Initial code drop of WebRTC r1804
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..90ec22b
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+.svn
diff --git a/trunk/.gitignore b/trunk/.gitignore
new file mode 100644
index 0000000..c27b166
--- /dev/null
+++ b/trunk/.gitignore
@@ -0,0 +1,65 @@
+*.target.mk
+*.Makefile
+*.ncb
+*.ninja
+*.props
+*.pyc
+*.rules
+*.scons
+*.sdf
+*.sln
+*.suo
+*.targets
+*.user
+*.vcproj
+*.vcxproj
+*.vcxproj.filters
+*.vpj
+*.vpw
+*.vpwhistu
+*.vtg
+*.xcodeproj
+*~
+.*.sw?
+.DS_Store
+.cproject
+.gdb_history
+.gdbinit
+.metadata
+.project
+.pydevproject
+.settings
+/build
+/chromium_deps
+/gyp-mac-tool
+/Makefile
+/out
+/resources
+/src/supplement.gypi
+/testing
+/third_party/asan
+/third_party/cygwin
+/third_party/expat
+/third_party/gaeunit
+/third_party/gold
+/third_party/google-gflags/src
+/third_party/google-visualization-python
+/third_party/jsoncpp
+/third_party/libjingle
+/third_party/libjpeg
+/third_party/libjpeg_turbo
+/third_party/libsrtp
+/third_party/libvpx
+/third_party/libyuv
+/third_party/llvm-build
+/third_party/oauth2
+/third_party/protobuf
+/third_party/valgrind
+/third_party/yasm
+/tools/clang
+/tools/gyp
+/tools/python
+/tools/valgrind
+/tools/win
+/x86-generic_out/
+/xcodebuild
diff --git a/trunk/AUTHORS b/trunk/AUTHORS
new file mode 100644
index 0000000..c3d75d6
--- /dev/null
+++ b/trunk/AUTHORS
@@ -0,0 +1,6 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc.
+Mozilla Foundation
+Ben Strong <bstrong@gmail.com>
\ No newline at end of file
diff --git a/trunk/Android.mk b/trunk/Android.mk
new file mode 100644
index 0000000..b7c28e6
--- /dev/null
+++ b/trunk/Android.mk
@@ -0,0 +1,167 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+MY_WEBRTC_ROOT_PATH := $(call my-dir)
+
+# voice
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/resampler/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/signal_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/vad/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/neteq/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/cng/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/g711/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/g722/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/pcm16b/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/ilbc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_conference_mixer/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_device/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/aec/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/aecm/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/agc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/ns/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/utility/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/media_file/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/rtp_rtcp/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/udp_transport/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/utility/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/system_wrappers/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/source/Android.mk
+
+# video
+include $(MY_WEBRTC_ROOT_PATH)/src/common_video/jpeg/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_video/libyuv/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_capture/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/codecs/i420/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/codecs/vp8/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_processing/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_render/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/Android.mk
+
+# third party
+include $(MY_WEBRTC_ROOT_PATH)/libvpx.mk
+
+# build .so
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+include $(LOCAL_PATH)/../../external/webrtc/android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_audio_preprocessing
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_spl \
+    libwebrtc_resampler \
+    libwebrtc_apm \
+    libwebrtc_apm_utility \
+    libwebrtc_vad \
+    libwebrtc_ns \
+    libwebrtc_agc \
+    libwebrtc_aec \
+    libwebrtc_aecm \
+    libwebrtc_system_wrappers
+
+# Add Neon libraries.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+LOCAL_WHOLE_STATIC_LIBRARIES += \
+    libwebrtc_aecm_neon \
+    libwebrtc_ns_neon
+endif
+
+LOCAL_STATIC_LIBRARIES := \
+    libprotobuf-cpp-2.3.0-lite
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+LOCAL_PRELINK_MODULE := false
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_SHARED_LIBRARY)
+
+###
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_system_wrappers \
+    libwebrtc_audio_device \
+    libwebrtc_pcm16b \
+    libwebrtc_cng \
+    libwebrtc_audio_coding \
+    libwebrtc_rtp_rtcp \
+    libwebrtc_media_file \
+    libwebrtc_udp_transport \
+    libwebrtc_utility \
+    libwebrtc_neteq \
+    libwebrtc_audio_conference_mixer \
+    libwebrtc_isac \
+    libwebrtc_ilbc \
+    libwebrtc_isacfix \
+    libwebrtc_g722 \
+    libwebrtc_g711 \
+    libwebrtc_voe_core \
+    libwebrtc_video_render \
+    libwebrtc_video_capture \
+    libwebrtc_i420 \
+    libwebrtc_video_coding \
+    libwebrtc_video_processing \
+    libwebrtc_vp8 \
+    libwebrtc_vie_core \
+    libwebrtc_yuv \
+    libwebrtc_jpeg \
+    libwebrtc_vpx
+
+# Add Neon libraries.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+LOCAL_WHOLE_STATIC_LIBRARIES += \
+    libwebrtc_isacfix_neon
+endif
+
+LOCAL_STATIC_LIBRARIES := \
+    libyuv_static
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport \
+    libjpeg \
+    libGLESv2 \
+    libOpenSLES \
+    libwebrtc_audio_preprocessing
+
+LOCAL_PRELINK_MODULE := false
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_SHARED_LIBRARY)
+
+# test apps, they're for test only; all these test apps have LOCAL_MODULE_TAGS:=tests
+# voice engine test apps
+include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/test/cmd_test/Android.mk
+#include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/test/auto_test/Android.mk
+# video engien test apps
+include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/main/test/android_test/Android.mk
+#include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/test/auto_test/android/Android.mk
diff --git a/trunk/DEPS b/trunk/DEPS
new file mode 100644
index 0000000..0c8892e
--- /dev/null
+++ b/trunk/DEPS
@@ -0,0 +1,130 @@
+vars = {
+  # Use this googlecode_url variable only if there is an internal mirror for it.
+  # If you do not know, use the full path while defining your new deps entry.
+  "googlecode_url": "http://%s.googlecode.com/svn",
+  "chromium_trunk" : "http://src.chromium.org/svn/trunk",
+  "chromium_revision": "122775",
+
+  # External resources like video and audio files used for testing purposes.
+  # Downloaded on demand when needed.
+  "webrtc_resources_revision": "8",
+}
+
+# NOTE: Prefer revision numbers to tags for svn deps. Use http rather than
+# https; the latter can cause problems for users behind proxies.
+deps = {
+  "trunk/chromium_deps":
+    File(Var("chromium_trunk") + "/src/DEPS@" + Var("chromium_revision")),
+
+  "trunk/build":
+    Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"),
+
+  "trunk/testing":
+    Var("chromium_trunk") + "/src/testing@" + Var("chromium_revision"),
+
+  "trunk/testing/gmock":
+    From("trunk/chromium_deps", "src/testing/gmock"),
+
+  "trunk/testing/gtest":
+    From("trunk/chromium_deps", "src/testing/gtest"),
+
+  "trunk/third_party/expat":
+    Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"),
+
+  # Used by tools/quality_tracking.
+  "trunk/third_party/gaeunit":
+    "http://code.google.com/p/gaeunit.git@e16d5bd4",
+
+  "trunk/third_party/google-gflags/src":
+    (Var("googlecode_url") % "google-gflags") + "/trunk/src@45",
+
+  # Used by tools/quality_tracking/dashboard and tools/python_charts.
+  "trunk/third_party/google-visualization-python":
+    (Var("googlecode_url") % "google-visualization-python") + "/trunk@15",
+
+  "trunk/third_party/libjpeg":
+    Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"),
+
+  "trunk/third_party/libjpeg_turbo":
+    From("trunk/chromium_deps", "src/third_party/libjpeg_turbo"),
+
+  "trunk/third_party/libvpx/source/libvpx":
+    "http://git.chromium.org/webm/libvpx.git@v1.0.0",
+
+  "trunk/third_party/libyuv":
+    (Var("googlecode_url") % "libyuv") + "/trunk@190",
+
+  "trunk/third_party/protobuf":
+    Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"),
+
+  # Used by tools/quality_tracking.
+  "trunk/third_party/oauth2":
+    "http://github.com/simplegeo/python-oauth2.git@a83f4a29",
+
+  "trunk/third_party/yasm":
+    Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"),
+
+  "trunk/third_party/yasm/source/patched-yasm":
+    From("trunk/chromium_deps", "src/third_party/yasm/source/patched-yasm"),
+
+  "trunk/tools/clang":
+    Var("chromium_trunk") + "/src/tools/clang@" + Var("chromium_revision"),
+
+  "trunk/tools/gyp":
+    From("trunk/chromium_deps", "src/tools/gyp"),
+
+  "trunk/tools/python":
+    Var("chromium_trunk") + "/src/tools/python@" + Var("chromium_revision"),
+
+  "trunk/tools/valgrind":
+    Var("chromium_trunk") + "/src/tools/valgrind@" + Var("chromium_revision"),
+
+  # Needed by build/common.gypi.
+  "trunk/tools/win/supalink":
+    Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"),
+}
+
+deps_os = {
+  "win": {
+    "trunk/third_party/cygwin":
+      Var("chromium_trunk") + "/deps/third_party/cygwin@66844",
+
+    # Used by libjpeg-turbo.
+    "trunk/third_party/yasm/binaries":
+      From("trunk/chromium_deps", "src/third_party/yasm/binaries"),
+  },
+  "unix": {
+    "trunk/third_party/gold":
+      From("trunk/chromium_deps", "src/third_party/gold"),
+  },
+}
+
+hooks = [
+  {
+    # Create a supplement.gypi file under trunk/.  This file will be picked up
+    # by gyp and we use it to set Chromium related variables (inside_chromium_build)
+    # to 0 and enable the standalone build.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/create_supplement_gypi.py", "trunk/src/supplement.gypi"],
+  },
+  {
+    # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes
+    # zero seconds to run. If something changed, it downloads a prebuilt clang.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/clang/scripts/update.py", "--mac-only"],
+  },
+  {
+    # Download test resources, i.e. video and audio files. If the latest
+    # version is already downloaded, this takes zero seconds to run.
+    # If a newer version or no current download exists, it will download
+    # the resources and extract them.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/resources/update.py"],
+  },
+  {
+    # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+    "pattern": ".",
+    "action": ["python", "trunk/build/gyp_chromium", "--depth=trunk", "trunk/webrtc.gyp"],
+  },
+]
+
diff --git a/trunk/LICENSE b/trunk/LICENSE
new file mode 100644
index 0000000..dd4a345
--- /dev/null
+++ b/trunk/LICENSE
@@ -0,0 +1 @@
+Refer to src/LICENSE.
diff --git a/trunk/LICENSE_THIRD_PARTY b/trunk/LICENSE_THIRD_PARTY
new file mode 100644
index 0000000..d47c055
--- /dev/null
+++ b/trunk/LICENSE_THIRD_PARTY
@@ -0,0 +1 @@
+Refer to src/LICENSE_THIRD_PARTY.
diff --git a/trunk/OWNERS b/trunk/OWNERS
new file mode 100644
index 0000000..b110a52
--- /dev/null
+++ b/trunk/OWNERS
@@ -0,0 +1,5 @@
+henrika@webrtc.org

+niklas.enbom@webrtc.org

+andrew@webrtc.org

+tina.legrand@webrtc.org

+tommi@webrtc.org
\ No newline at end of file
diff --git a/trunk/PATENTS b/trunk/PATENTS
new file mode 100644
index 0000000..5cb83ec
--- /dev/null
+++ b/trunk/PATENTS
@@ -0,0 +1 @@
+Refer to src/PATENTS.
diff --git a/trunk/PRESUBMIT.py b/trunk/PRESUBMIT.py
new file mode 100644
index 0000000..56c8d5b
--- /dev/null
+++ b/trunk/PRESUBMIT.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+def _LicenseHeader(input_api):
+  """Returns the license header regexp."""
+  license_header = (
+      r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
+        r'All Rights Reserved\.\n'
+      r'.*?\n'
+      r'.*? Use of this source code is governed by a BSD-style license\n'
+      r'.*? that can be found in the LICENSE file in the root of the source\n'
+      r'.*? tree\. An additional intellectual property rights grant can be '
+        r'found\n'
+      r'.*? in the file PATENTS\.  All contributing project authors may\n'
+      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
+  ) % {
+      'year': input_api.time.strftime('%Y'),
+  }
+  return license_header
+
+def _CheckNoIOStreamInHeaders(input_api, output_api):
+  """Checks to make sure no .h files include <iostream>."""
+  files = []
+  pattern = input_api.re.compile(r'^#include\s*<iostream>',
+                                 input_api.re.MULTILINE)
+  for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+    if not f.LocalPath().endswith('.h'):
+      continue
+    contents = input_api.ReadFile(f)
+    if pattern.search(contents):
+      files.append(f)
+
+  if len(files):
+    return [ output_api.PresubmitError(
+        'Do not #include <iostream> in header files, since it inserts static ' +
+        'initialization into every file including the header. Instead, ' +
+        '#include <ostream>. See http://crbug.com/94794',
+        files) ]
+  return []
+
+def _CheckNoFRIEND_TEST(input_api, output_api):
+  """Make sure that gtest's FRIEND_TEST() macro is not used, the
+  FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be
+  used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes."""
+  problems = []
+
+  file_filter = lambda f: f.LocalPath().endswith(('.cc', '.h'))
+  for f in input_api.AffectedFiles(file_filter=file_filter):
+    for line_num, line in f.ChangedContents():
+      if 'FRIEND_TEST(' in line:
+        problems.append('    %s:%d' % (f.LocalPath(), line_num))
+
+  if not problems:
+    return []
+  return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use '
+      'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and '
+      'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
+
+def _CheckNewFilesLintClean(input_api, output_api, source_file_filter=None):
+  """Checks that all NEW '.cc' and '.h' files pass cpplint.py.
+  This check is based on _CheckChangeLintsClean in
+  depot_tools/presubmit_canned_checks.py but has less filters and only checks
+  added files."""
+  result = []
+
+  # Initialize cpplint.
+  import cpplint
+  # Access to a protected member _XX of a client class
+  # pylint: disable=W0212
+  cpplint._cpplint_state.ResetErrorCounts()
+
+  # Justifications for each filter:
+  #
+  # - build/header_guard  : WebRTC coding style says they should be prefixed
+  #                         with WEBRTC_, which is not possible to configure in
+  #                         cpplint.py.
+  cpplint._SetFilters('-build/header_guard')
+
+  # Use the strictest verbosity level for cpplint.py (level 1) which is the
+  # default when running cpplint.py from command line.
+  # To make it possible to work with not-yet-converted code, we're only applying
+  # it to new (or moved/renamed) files.
+  verbosity_level = 1
+  files = []
+  for f in input_api.AffectedSourceFiles(source_file_filter):
+    # Note that moved/renamed files also count as added for svn.
+    if (f.Action() == 'A'):
+      files.append(f.AbsoluteLocalPath())
+  for file_name in files:
+    cpplint.ProcessFile(file_name, verbosity_level)
+
+  if cpplint._cpplint_state.error_count > 0:
+    if input_api.is_committing:
+      # TODO(kjellander): Change back to PresubmitError below when we're
+      # confident with the lint settings.
+      res_type = output_api.PresubmitPromptWarning
+    else:
+      res_type = output_api.PresubmitPromptWarning
+    result = [res_type('Changelist failed cpplint.py check.')]
+
+  return result
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  # TODO(kjellander): Use presubmit_canned_checks.PanProjectChecks too.
+  results = []
+  results.extend(input_api.canned_checks.CheckLongLines(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
+      input_api, output_api))
+  results.extend(_CheckNewFilesLintClean(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckLicense(
+      input_api, output_api, _LicenseHeader(input_api)))
+  results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
+  results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeWasUploaded(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasDescription(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasBugField(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasTestField(
+      input_api, output_api))
+  return results
diff --git a/trunk/WATCHLISTS b/trunk/WATCHLISTS
new file mode 100644
index 0000000..1b592f5
--- /dev/null
+++ b/trunk/WATCHLISTS
@@ -0,0 +1,112 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Inspired by chromium.org:
+# http://dev.chromium.org/developers/contributing-code/watchlists
+
+{
+  'WATCHLIST_DEFINITIONS': {
+    'this_file': {
+      'filepath': '^WATCHLISTS$',
+    },
+    'all_src': {
+      'filepath': 'src/.*',
+    },
+    'root_files': {
+      # src/build/ and non-recursive contents of ./ and src/
+      'filepath': '^[^/]*$|src/[^/]*$|src/build/.*',
+    },
+    'documented_interfaces': {
+      'filepath': 'src/[^/]*\.h$|'\
+                  'src/video_engine/main/interface/.*|'\
+                  'src/voice_engine/main/interface/.*',
+    },
+    'build_files': {
+      'filepath': '\.gyp$|\.gypi$|Android\.mk$',
+    },
+    'java_files': {
+      'filepath': '\.java$|\.xml$',
+    },
+    'video_engine': {
+      'filepath': 'src/video_engine/.*',
+    },
+    'voice_engine': {
+      'filepath': 'src/voice_engine/.*',
+    },
+    'common_audio': {
+      'filepath': 'src/common_audio/.*',
+    },
+    'video_capture': {
+      'filepath': 'src/modules/video_capture/.*',
+    },
+    'video_render': {
+      'filepath': 'src/modules/video_render/.*',
+    },
+    'audio_device': {
+      'filepath': 'src/modules/audio_device/.*',
+    },
+    'audio_coding': {
+      'filepath': 'src/modules/audio_coding/.*',
+    },
+    'neteq': {
+      'filepath': 'src/modules/audio_coding/neteq/.*',
+    },
+    'audio_processing': {
+      'filepath': 'src/modules/audio_processing/.*',
+    },
+    'video_codecs': {
+      'filepath': 'src/modules/video_coding/codecs/.*',
+    },
+    'video_coding': {
+      'filepath': 'src/modules/video_coding/.*',
+    },
+    'rtp_rtcp': {
+      'filepath': 'src/modules/rtp_rtcp/.*'
+    },
+    'system_wrappers': {
+      'filepath': 'src/system_wrappers/.*',
+    },
+  },
+
+  'WATCHLISTS': {
+    'this_file': [''],
+    'all_src': ['tterriberry@mozilla.com',
+                'giles@mozilla.com'],
+    'root_files': ['andrew@webrtc.org',
+                   'niklas.enbom@webrtc.org'],
+    'documented_interfaces': ['interface-changes@webrtc.org',
+                              'rwolff@gocast.it'],
+    'build_files': ['leozwang@webrtc.org'],
+    'java_files': ['leozwang@webrtc.org'],
+    'common_audio': ['bjornv@webrtc.org',
+                     'andrew@webrtc.org'],
+    'video_engine': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org'],
+    'voice_engine': ['henrika@webrtc.org'],
+    'video_capture': ['mflodman@webrtc.org',
+                      'perkj@webrtc.org',
+                      'leozwang@webrtc.org'],
+    'video_render': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_device': ['henrika@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_coding': ['tina.legrand@webrtc.org'],
+    'neteq': ['henrik.lundin@webrtc.org'],
+    'audio_processing': ['andrew@webrtc.org',
+                         'bjornv@webrtc.org'],
+    'video_codecs': ['henrik.lundin@webrtc.org',
+                     'pwestin@webrtc.org'],
+    'video_coding': ['stefan@webrtc.org'],
+    'rtp_rtcp': ['mflodman@webrtc.org',
+                 'pwestin@webrtc.org'],
+    'system_wrappers': ['mflodman@webrtc.org',
+                        'henrika@webrtc.org',
+                        'andrew@webrtc.org'],
+  },
+}
diff --git a/trunk/android-webrtc.mk b/trunk/android-webrtc.mk
new file mode 100644
index 0000000..cd495b0
--- /dev/null
+++ b/trunk/android-webrtc.mk
@@ -0,0 +1,46 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# These defines will apply to all source files
+# Think again before changing it
+MY_WEBRTC_COMMON_DEFS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_LINUX' \
+    '-DWEBRTC_THREAD_RR' \
+    '-DWEBRTC_CLOCK_TYPE_REALTIME' \
+    '-DWEBRTC_ANDROID'
+#    The following macros are used by modules,
+#    we might need to re-organize them
+#    '-DWEBRTC_ANDROID_OPENSLES' [module audio_device]
+#    '-DNETEQ_VOICEENGINE_CODECS' [module audio_coding neteq]
+#    '-DWEBRTC_MODULE_UTILITY_VIDEO' [module media_file] [module utility]
+ifeq ($(TARGET_ARCH),arm)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM'
+#    '-DWEBRTC_DETECT_ARM_NEON' # only used in a build configuration without Neon
+# TODO(kma): figure out if the above define could be moved to NDK build only.
+
+# TODO(kma): test if the code under next two macros works with generic GCC compilers
+ifeq ($(ARCH_ARM_HAVE_NEON),true)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM_NEON'
+MY_ARM_CFLAGS_NEON := \
+    -flax-vector-conversions
+endif
+
+ifneq (,$(filter '-DWEBRTC_DETECT_ARM_NEON' '-DWEBRTC_ARCH_ARM_NEON', \
+    $(MY_WEBRTC_COMMON_DEFS)))
+WEBRTC_BUILD_NEON_LIBS := true
+endif
+
+ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM_V7A'
+endif
+
+endif # ifeq ($(TARGET_ARCH),arm)
\ No newline at end of file
diff --git a/trunk/codereview.settings b/trunk/codereview.settings
new file mode 100644
index 0000000..b70057e
--- /dev/null
+++ b/trunk/codereview.settings
@@ -0,0 +1,9 @@
+# This file is used by gcl to get repository specific information.

+CODE_REVIEW_SERVER: webrtc-codereview.appspot.com

+#CC_LIST:

+#VIEW_VC:

+#STATUS: 

+TRY_ON_UPLOAD: False

+#TRYSERVER_SVN_URL: 

+#GITCL_PREUPLOAD: 

+#GITCL_PREDCOMMIT: 

diff --git a/trunk/libvpx.mk b/trunk/libvpx.mk
new file mode 100644
index 0000000..07c04dc
--- /dev/null
+++ b/trunk/libvpx.mk
@@ -0,0 +1,107 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+MY_LIBVPX_PATH = ../libvpx
+
+LOCAL_SRC_FILES = \
+     $(MY_LIBVPX_PATH)/vp8/common/alloccommon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/blockd.c \
+     $(MY_LIBVPX_PATH)/vp8/common/debugmodes.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropy.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymode.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/extend.c \
+     $(MY_LIBVPX_PATH)/vp8/common/filter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/findnearmv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/generic/systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/common/idctllm.c \
+     $(MY_LIBVPX_PATH)/vp8/common/invtrans.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter_filters.c \
+     $(MY_LIBVPX_PATH)/vp8/common/mbpitch.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecont.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecontext.c \
+     $(MY_LIBVPX_PATH)/vp8/common/quant_common.c \
+     $(MY_LIBVPX_PATH)/vp8/common/recon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconinter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra4x4.c \
+     $(MY_LIBVPX_PATH)/vp8/common/setupintrarecon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/swapyv12buffer.c \
+     $(MY_LIBVPX_PATH)/vp8/common/textblit.c \
+     $(MY_LIBVPX_PATH)/vp8/common/treecoder.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_cx_iface.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_dx_iface.c \
+     $(MY_LIBVPX_PATH)/vpx_config.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_codec.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_decoder.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_image.c \
+     $(MY_LIBVPX_PATH)/vpx_mem/vpx_mem.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/vpxscale.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12config.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12extend.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/gen_scalers.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/scalesystemdependent.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_encoder.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/bitstream.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/boolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/dct.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeframe.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeintra.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemb.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ethreading.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/firstpass.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/generic/csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/mcomp.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/modecosts.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/pickinter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/picklpf.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/psnr.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/quantize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ratectrl.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/rdopt.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/sad_c.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/segmentation.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/tokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/treewriter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/onyx_if.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/temporal_filter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/variance_c.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dboolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodframe.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dequantize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/detokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/generic/dsystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/onyxd_if.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/reconintra_mt.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/threading.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/idct_blk.c \
+     $(MY_LIBVPX_PATH)/vp8/common/arm/arm_systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/arm/arm_csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/arm/arm_dsystemdependent.c \
+
+LOCAL_CFLAGS := \
+    -DHAVE_CONFIG_H=vpx_config.h \
+    -include $(LOCAL_PATH)/third_party/libvpx/source/config/android/vpx_config.h
+
+LOCAL_MODULE := libwebrtc_vpx
+
+LOCAL_C_INCLUDES := \
+    external/libvpx \
+    external/libvpx/vpx_ports \
+    external/libvpx/vp8/common \
+    external/libvpx/vp8/encoder \
+    external/libvpx/vp8 \
+    external/libvpx/vpx_codec 
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/license_template.txt b/trunk/license_template.txt
new file mode 100644
index 0000000..5a3e653
--- /dev/null
+++ b/trunk/license_template.txt
@@ -0,0 +1,10 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
diff --git a/trunk/peerconnection/DEPS b/trunk/peerconnection/DEPS
new file mode 100644
index 0000000..7f1e4fe
--- /dev/null
+++ b/trunk/peerconnection/DEPS
@@ -0,0 +1,132 @@
+vars = {
+  # Use this googlecode_url variable only if there is an internal mirror for it.
+  # If you do not know, use the full path while defining your new deps entry.
+  "googlecode_url": "http://%s.googlecode.com/svn",
+  "chromium_trunk" : "http://src.chromium.org/svn/trunk",
+  "chromium_revision": "120526",
+  "webrtc_revision": "1538",
+  "libjingle_revision": "115",
+}
+
+# NOTE: Prefer revision numbers to tags for svn deps. Use http rather than
+# https; the latter can cause problems for users behind proxies.
+deps = {
+  # WebRTC deps.
+  "trunk/src":
+    (Var("googlecode_url") % "webrtc") + "/trunk/src@" + Var("webrtc_revision"),
+
+  "trunk/tools":
+    (Var("googlecode_url") % "webrtc") + "/trunk/tools@" + Var("webrtc_revision"),
+
+  "trunk/test":
+    (Var("googlecode_url") % "webrtc") + "/trunk/test@" + Var("webrtc_revision"),
+
+  "trunk/third_party/google-gflags":
+    (Var("googlecode_url") % "webrtc") + "/trunk/third_party/google-gflags@" + Var("webrtc_revision"),
+
+  "trunk/third_party/libvpx":
+    (Var("googlecode_url") % "webrtc") + "/trunk/third_party/libvpx@" + Var("webrtc_revision"),
+
+  "trunk/build":
+    Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"),
+
+  "trunk/testing":
+    Var("chromium_trunk") + "/src/testing@" + Var("chromium_revision"),
+
+  "trunk/testing/gtest":
+    (Var("googlecode_url") % "googletest") + "/trunk@573",
+
+  "trunk/testing/gmock":
+    (Var("googlecode_url") % "googlemock") + "/trunk@386",
+
+  "trunk/tools/gyp":
+    (Var("googlecode_url") % "gyp") + "/trunk@1187",
+
+  # Needed by build/common.gypi.
+  "trunk/tools/win/supalink":
+    Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"),
+
+  "trunk/tools/clang/scripts":
+    Var("chromium_trunk") + "/src/tools/clang/scripts@" + Var("chromium_revision"),
+
+  "trunk/tools/python":
+    Var("chromium_trunk") + "/src/tools/python@" + Var("chromium_revision"),
+
+  "trunk/tools/valgrind":
+    Var("chromium_trunk") + "/src/tools/valgrind@" + Var("chromium_revision"),
+
+  "trunk/third_party/protobuf/":
+    Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"),
+
+  "trunk/third_party/libvpx/source/libvpx":
+    "http://git.chromium.org/webm/libvpx.git@e479379a",
+
+  "trunk/third_party/libjpeg_turbo/":
+    Var("chromium_trunk") + "/deps/third_party/libjpeg_turbo@119959",
+
+  "trunk/third_party/libjpeg/":
+    Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"),
+
+  "trunk/third_party/yasm/":
+    Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"),
+
+  "trunk/third_party/expat/":
+    Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"),
+
+  "trunk/third_party/google-gflags/src":
+    (Var("googlecode_url") % "google-gflags") + "/trunk/src@45",
+
+  "trunk/third_party/yasm/source/patched-yasm":
+    Var("chromium_trunk") + "/deps/third_party/yasm/patched-yasm@73761",
+
+  # Used by libjpeg-turbo
+  "trunk/third_party/yasm/binaries":
+    Var("chromium_trunk") + "/deps/third_party/yasm/binaries@74228",
+
+  # TODO(andrew): roll to 164 after fixing:
+  # http://code.google.com/p/webrtc/issues/detail?id=267
+  "trunk/third_party/libyuv":
+    (Var("googlecode_url") % "libyuv") + "/trunk@121",
+
+  # libjingle deps.
+  "trunk/third_party/libjingle/source":
+    (Var("googlecode_url") % "libjingle") + "/trunk/@" + Var("libjingle_revision"),
+
+  "trunk/third_party/libsrtp/":
+    Var("chromium_trunk") + "/deps/third_party/libsrtp@119742",
+
+  "trunk/third_party/jsoncpp/":
+    Var("chromium_trunk") + "/src/third_party/jsoncpp@" + Var("chromium_revision"),
+
+  "trunk/third_party/jsoncpp/source":
+    "http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@248",
+}
+
+deps_os = {
+  "win": {
+    "trunk/third_party/cygwin/":
+      Var("chromium_trunk") + "/deps/third_party/cygwin@66844",
+  }
+}
+
+hooks = [
+  {
+    # Create a supplement.gypi file under trunk/.  This file will be picked up
+    # by gyp and we use it to set Chromium related variables (inside_chromium_build)
+    # to 0 and enable the standalone build.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/create_supplement_gypi.py", "trunk/src/supplement.gypi"],
+  },
+  {
+    # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes
+    # zero seconds to run. If something changed, it downloads a prebuilt clang.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/clang/scripts/update.py", "--mac-only"],
+  },
+  {
+    # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+    "pattern": ".",
+    "action": ["python", "trunk/build/gyp_chromium", "--depth=trunk", "trunk/peerconnection.gyp"],
+  },
+]
+
diff --git a/trunk/peerconnection/OWNERS b/trunk/peerconnection/OWNERS
new file mode 100644
index 0000000..1527445
--- /dev/null
+++ b/trunk/peerconnection/OWNERS
@@ -0,0 +1,5 @@
+henrike@webrtc.org
+mallinath@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+tommi@webrtc.org
diff --git a/trunk/peerconnection/README b/trunk/peerconnection/README
new file mode 100644
index 0000000..bfe1234
--- /dev/null
+++ b/trunk/peerconnection/README
@@ -0,0 +1,10 @@
+This folder can be used to pull together the chromium version of webrtc
+and libjingle, and build the peerconnection sample client and server.
+
+Follow the instructions for WebRTC here:
+http://www.webrtc.org/reference/getting-started
+
+but substitute this "gclient config" command:
+gclient config --name trunk
+http://webrtc.googlecode.com/svn/trunk/peerconnection
+
diff --git a/trunk/peerconnection/peerconnection.gyp b/trunk/peerconnection/peerconnection.gyp
new file mode 100644
index 0000000..e928194
--- /dev/null
+++ b/trunk/peerconnection/peerconnection.gyp
@@ -0,0 +1,113 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'src/build/common.gypi', ],
+  'variables': {
+    'peerconnection_sample': 'third_party/libjingle/source/talk/examples/peerconnection',
+  },  
+
+  'targets': [
+    {
+      'target_name': 'peerconnection_server',
+      'type': 'executable',
+      'sources': [
+        '<(peerconnection_sample)/server/data_socket.cc',
+        '<(peerconnection_sample)/server/data_socket.h',
+        '<(peerconnection_sample)/server/main.cc',
+        '<(peerconnection_sample)/server/peer_channel.cc',
+        '<(peerconnection_sample)/server/peer_channel.h',
+        '<(peerconnection_sample)/server/utils.cc',
+        '<(peerconnection_sample)/server/utils.h',
+      ],
+      'include_dirs': [
+        'third_party/libjingle/source',
+      ],
+    },
+  ],
+  'conditions': [
+    ['OS=="win"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            '<(peerconnection_sample)/client/conductor.cc',
+            '<(peerconnection_sample)/client/conductor.h',
+            '<(peerconnection_sample)/client/defaults.cc',
+            '<(peerconnection_sample)/client/defaults.h',
+            '<(peerconnection_sample)/client/main.cc',
+            '<(peerconnection_sample)/client/main_wnd.cc',
+            '<(peerconnection_sample)/client/main_wnd.h',
+            '<(peerconnection_sample)/client/peer_connection_client.cc',
+            '<(peerconnection_sample)/client/peer_connection_client.h',
+            'third_party/libjingle/source/talk/base/win32socketinit.cc',
+            'third_party/libjingle/source/talk/base/win32socketserver.cc',
+          ],
+          'msvs_settings': {
+            'VCLinkerTool': {
+             'SubSystem': '2',  # Windows
+            },
+          },
+          'dependencies': [
+            'third_party/libjingle/libjingle.gyp:libjingle_app',
+          ],
+          'include_dirs': [
+            'src',
+            'src/modules/interface',
+            'third_party/libjingle/source',
+          ],
+        },
+      ],  # targets
+    }, ],  # OS="win"
+    ['OS=="linux"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            '<(peerconnection_sample)/client/conductor.cc',
+            '<(peerconnection_sample)/client/conductor.h',
+            '<(peerconnection_sample)/client/defaults.cc',
+            '<(peerconnection_sample)/client/defaults.h',
+            '<(peerconnection_sample)/client/linux/main.cc',
+            '<(peerconnection_sample)/client/linux/main_wnd.cc',
+            '<(peerconnection_sample)/client/linux/main_wnd.h',
+            '<(peerconnection_sample)/client/peer_connection_client.cc',
+            '<(peerconnection_sample)/client/peer_connection_client.h',
+          ],
+          'dependencies': [
+            'third_party/libjingle/libjingle.gyp:libjingle_app',
+            # TODO(tommi): Switch to this and remove specific gtk dependency
+            # sections below for cflags and link_settings.
+            # '<(DEPTH)/build/linux/system.gyp:gtk',
+          ],
+          'include_dirs': [
+            'src',
+            'src/modules/interface',
+            'third_party/libjingle/source',
+          ],
+          'cflags': [
+            '<!@(pkg-config --cflags gtk+-2.0)',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
+              '-lX11',
+              '-lXext',
+            ],
+          },
+        },
+      ],  # targets
+    }, ],  # OS="linux"
+  ],
+
+}
diff --git a/trunk/peerconnection/third_party/libjingle/libjingle.gyp b/trunk/peerconnection/third_party/libjingle/libjingle.gyp
new file mode 100644
index 0000000..48939bf
--- /dev/null
+++ b/trunk/peerconnection/third_party/libjingle/libjingle.gyp
@@ -0,0 +1,698 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  # TODO(wu): Use the libjingle.gyp from chromium and get rid of this file.
+  'variables': {
+    'no_libjingle_logging%': 0,
+    'conditions': [
+      ['inside_chromium_build==1', {
+        'overrides': 'overrides',
+      },{
+        'overrides': 'source',
+      }],
+    ],
+  },
+  'target_defaults': {
+    'defines': [
+      'FEATURE_ENABLE_SSL',
+      'FEATURE_ENABLE_VOICEMAIL',  # TODO(ncarter): Do we really need this?
+      '_USE_32BIT_TIME_T',
+      'SAFE_TO_DEFINE_TALK_BASE_LOGGING_MACROS',
+      'EXPAT_RELATIVE_PATH',
+      'JSONCPP_RELATIVE_PATH',
+      'WEBRTC_RELATIVE_PATH',
+      'HAVE_WEBRTC',
+      'HAVE_WEBRTC_VIDEO',
+      'HAVE_WEBRTC_VOICE',
+      'HAVE_SRTP',
+      'SRTP_RELATIVE_PATH',
+    ],
+    'configurations': {
+      'Debug': {
+        'defines': [
+          # TODO(sergeyu): Fix libjingle to use NDEBUG instead of
+          # _DEBUG and remove this define. See below as well.
+          '_DEBUG',
+        ],
+      }
+    },
+    'dependencies': [
+      '../expat/expat.gyp:expat',
+    ],
+    'export_dependent_settings': [
+      '../expat/expat.gyp:expat',
+    ],
+    'include_dirs': [
+      '../../third_party/libyuv/include/',
+    ],
+    'direct_dependent_settings': {
+      'defines': [
+        'FEATURE_ENABLE_SSL',
+        'FEATURE_ENABLE_VOICEMAIL',
+        'EXPAT_RELATIVE_PATH',
+        'WEBRTC_RELATIVE_PATH',
+      ],
+      'conditions': [
+        ['OS=="win"', {
+          'link_settings': {
+            'libraries': [
+              '-lsecur32.lib',
+              '-lcrypt32.lib',
+              '-liphlpapi.lib',
+            ],
+          },
+        }],
+        ['OS=="win"', {
+          'include_dirs': [
+            '../third_party/platformsdk_win7/files/Include',
+          ],
+          'defines': [
+              '_CRT_SECURE_NO_WARNINGS',  # Suppres warnings about _vsnprinf
+          ],
+        }],
+        ['OS=="linux"', {
+          'defines': [
+            'LINUX',
+          ],
+        }],
+        ['OS=="mac"', {
+          'defines': [
+            'OSX',
+          ],
+        }],
+        ['os_posix == 1', {
+          'defines': [
+            'POSIX',
+          ],
+        }],
+        ['OS=="openbsd" or OS=="freebsd"', {
+          'defines': [
+            'BSD',
+          ],
+        }],
+        ['no_libjingle_logging==1', {
+          'defines': [
+            'NO_LIBJINGLE_LOGGING',
+          ],
+        }],
+      ],
+    },
+    'all_dependent_settings': {
+      'configurations': {
+        'Debug': {
+          'defines': [
+            # TODO(sergeyu): Fix libjingle to use NDEBUG instead of
+            # _DEBUG and remove this define. See above as well.
+            '_DEBUG',
+          ],
+        }
+      },
+    },
+    'conditions': [
+      ['inside_chromium_build==1', {
+        'defines': [
+          'NO_SOUND_SYSTEM',
+        ],
+        'include_dirs': [
+          '<(overrides)',
+          'source',
+          '../..',  # the third_party folder for webrtc includes
+          '../../third_party/expat/files',
+        ],
+        'direct_dependent_settings': {
+          'defines': [
+            'NO_SOUND_SYSTEM',
+          ],
+          'include_dirs': [
+            '<(overrides)',
+            'source',
+            '../../third_party/expat/files'
+          ],
+        },
+        'dependencies': [
+          '../../base/base.gyp:base',
+          '../../net/net.gyp:net',
+        ],
+      },{
+        'include_dirs': [
+          # the third_party folder for webrtc/ includes (non-chromium).
+          '../../src',
+          'source',
+          '../../third_party/expat/files',
+        ],
+      }],
+      ['OS=="win"', {
+        'include_dirs': [
+          '../third_party/platformsdk_win7/files/Include',
+        ],
+      }],
+      ['OS=="linux"', {
+        'defines': [
+          'LINUX',
+        ],
+      }],
+      ['OS=="mac"', {
+        'defines': [
+          'OSX',
+        ],
+      }],
+      ['os_posix == 1', {
+        'defines': [
+          'POSIX',
+        ],
+      }],
+      ['OS=="openbsd" or OS=="freebsd"', {
+        'defines': [
+          'BSD',
+        ],
+      }],
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'libjingle',
+      'type': 'static_library',
+      'sources': [
+        '<(overrides)/talk/base/basictypes.h',
+        '<(overrides)/talk/base/constructormagic.h',
+
+        # Need to override logging.h because we need
+        # SAFE_TO_DEFINE_TALK_BASE_LOGGING_MACROS to work.
+        # TODO(sergeyu): push SAFE_TO_DEFINE_TALK_BASE_LOGGING_MACROS to
+        # libjingle and remove this override.
+        '<(overrides)/talk/base/logging.h',
+
+        '<(overrides)/talk/base/scoped_ptr.h',
+
+        # Libjingle's QName is not threadsafe, so we need to use our own version
+        # here.
+        # TODO(sergeyu): Fix QName in Libjingle.
+        '<(overrides)/talk/xmllite/qname.cc',
+        '<(overrides)/talk/xmllite/qname.h',
+
+        'source/talk/base/Equifax_Secure_Global_eBusiness_CA-1.h',
+        'source/talk/base/asyncfile.cc',
+        'source/talk/base/asyncfile.h',
+        'source/talk/base/asynchttprequest.cc',
+        'source/talk/base/asynchttprequest.h',
+        'source/talk/base/asyncpacketsocket.h',
+        'source/talk/base/asyncsocket.cc',
+        'source/talk/base/asyncsocket.h',
+        'source/talk/base/asynctcpsocket.cc',
+        'source/talk/base/asynctcpsocket.h',
+        'source/talk/base/asyncudpsocket.cc',
+        'source/talk/base/asyncudpsocket.h',
+        'source/talk/base/autodetectproxy.cc',
+        'source/talk/base/autodetectproxy.h',
+        'source/talk/base/base64.cc',
+        'source/talk/base/base64.h',
+        'source/talk/base/basicdefs.h',
+        'source/talk/base/basicpacketsocketfactory.cc',
+        'source/talk/base/basicpacketsocketfactory.h',
+        'source/talk/base/bytebuffer.cc',
+        'source/talk/base/bytebuffer.h',
+        'source/talk/base/byteorder.h',
+        'source/talk/base/checks.cc',
+        'source/talk/base/checks.h',
+        'source/talk/base/common.cc',
+        'source/talk/base/common.h',
+        'source/talk/base/criticalsection.h',
+        'source/talk/base/cryptstring.h',
+        'source/talk/base/diskcache.cc',
+        'source/talk/base/diskcache.h',
+        'source/talk/base/event.cc',
+        'source/talk/base/event.h',
+        'source/talk/base/fileutils.cc',
+        'source/talk/base/fileutils.h',
+        'source/talk/base/firewallsocketserver.cc',
+        'source/talk/base/firewallsocketserver.h',
+        'source/talk/base/flags.cc',
+        'source/talk/base/flags.h',
+        'source/talk/base/helpers.cc',
+        'source/talk/base/helpers.h',
+        'source/talk/base/host.cc',
+        'source/talk/base/host.h',
+        'source/talk/base/httpbase.cc',
+        'source/talk/base/httpbase.h',
+        'source/talk/base/httpclient.h',
+        'source/talk/base/httpclient.cc',
+        'source/talk/base/httpcommon-inl.h',
+        'source/talk/base/httpcommon.cc',
+        'source/talk/base/httpcommon.h',
+        'source/talk/base/httprequest.cc',
+        'source/talk/base/httprequest.h',
+        'source/talk/base/ipaddress.cc',
+        'source/talk/base/ipaddress.h',
+        'source/talk/base/json.cc',
+        'source/talk/base/json.h',
+        'source/talk/base/linked_ptr.h',
+        'source/talk/base/logging.cc',
+        'source/talk/base/md5.h',
+        'source/talk/base/md5c.c',
+        'source/talk/base/messagehandler.cc',
+        'source/talk/base/messagehandler.h',
+        'source/talk/base/messagequeue.cc',
+        'source/talk/base/messagequeue.h',
+        'source/talk/base/nethelpers.cc',
+        'source/talk/base/nethelpers.h',
+        'source/talk/base/network.cc',
+        'source/talk/base/network.h',
+        'source/talk/base/pathutils.cc',
+        'source/talk/base/pathutils.h',
+        'source/talk/base/physicalsocketserver.cc',
+        'source/talk/base/physicalsocketserver.h',
+        'source/talk/base/proxydetect.cc',
+        'source/talk/base/proxydetect.h',
+        'source/talk/base/proxyinfo.cc',
+        'source/talk/base/proxyinfo.h',
+        'source/talk/base/ratetracker.cc',
+        'source/talk/base/ratetracker.h',
+        'source/talk/base/sec_buffer.h',
+        'source/talk/base/signalthread.cc',
+        'source/talk/base/signalthread.h',
+        'source/talk/base/sigslot.h',
+        'source/talk/base/sigslotrepeater.h',
+        'source/talk/base/socket.h',
+        'source/talk/base/socketadapters.cc',
+        'source/talk/base/socketadapters.h',
+        'source/talk/base/socketaddress.cc',
+        'source/talk/base/socketaddress.h',
+        'source/talk/base/socketaddresspair.cc',
+        'source/talk/base/socketaddresspair.h',
+        'source/talk/base/socketfactory.h',
+        'source/talk/base/socketpool.cc',
+        'source/talk/base/socketpool.h',
+        'source/talk/base/socketserver.h',
+        'source/talk/base/socketstream.cc',
+        'source/talk/base/socketstream.h',
+        'source/talk/base/ssladapter.cc',
+        'source/talk/base/ssladapter.h',
+        'source/talk/base/sslsocketfactory.cc',
+        'source/talk/base/sslsocketfactory.h',
+        'source/talk/base/stream.cc',
+        'source/talk/base/stream.h',
+        'source/talk/base/stringdigest.cc',
+        'source/talk/base/stringdigest.h',
+        'source/talk/base/stringencode.cc',
+        'source/talk/base/stringencode.h',
+        'source/talk/base/stringutils.cc',
+        'source/talk/base/stringutils.h',
+        'source/talk/base/task.cc',
+        'source/talk/base/task.h',
+        'source/talk/base/taskparent.cc',
+        'source/talk/base/taskparent.h',
+        'source/talk/base/taskrunner.cc',
+        'source/talk/base/taskrunner.h',
+        'source/talk/base/thread.cc',
+        'source/talk/base/thread.h',
+        'source/talk/base/time.cc',
+        'source/talk/base/time.h',
+        'source/talk/base/urlencode.cc',
+        'source/talk/base/urlencode.h',
+        'source/talk/base/worker.cc', 
+        'source/talk/base/worker.h', 
+        'source/talk/xmllite/xmlbuilder.cc',
+        'source/talk/xmllite/xmlbuilder.h',
+        'source/talk/xmllite/xmlconstants.cc',
+        'source/talk/xmllite/xmlconstants.h',
+        'source/talk/xmllite/xmlelement.cc',
+        'source/talk/xmllite/xmlelement.h',
+        'source/talk/xmllite/xmlnsstack.cc',
+        'source/talk/xmllite/xmlnsstack.h',
+        'source/talk/xmllite/xmlparser.cc',
+        'source/talk/xmllite/xmlparser.h',
+        'source/talk/xmllite/xmlprinter.cc',
+        'source/talk/xmllite/xmlprinter.h',
+        'source/talk/xmpp/asyncsocket.h',
+        'source/talk/xmpp/constants.cc',
+        'source/talk/xmpp/constants.h',
+        'source/talk/xmpp/jid.cc',
+        'source/talk/xmpp/jid.h',
+        'source/talk/xmpp/plainsaslhandler.h',
+        'source/talk/xmpp/prexmppauth.h',
+        'source/talk/xmpp/ratelimitmanager.cc',
+        'source/talk/xmpp/ratelimitmanager.h',
+        'source/talk/xmpp/saslcookiemechanism.h',
+        'source/talk/xmpp/saslhandler.h',
+        'source/talk/xmpp/saslmechanism.cc',
+        'source/talk/xmpp/saslmechanism.h',
+        'source/talk/xmpp/saslplainmechanism.h',
+        'source/talk/xmpp/xmppclient.cc',
+        'source/talk/xmpp/xmppclient.h',
+        'source/talk/xmpp/xmppclientsettings.h',
+        'source/talk/xmpp/xmppengine.h',
+        'source/talk/xmpp/xmppengineimpl.cc',
+        'source/talk/xmpp/xmppengineimpl.h',
+        'source/talk/xmpp/xmppengineimpl_iq.cc',
+        'source/talk/xmpp/xmpplogintask.cc',
+        'source/talk/xmpp/xmpplogintask.h',
+        'source/talk/xmpp/xmppstanzaparser.cc',
+        'source/talk/xmpp/xmppstanzaparser.h',
+        'source/talk/xmpp/xmpptask.cc',
+        'source/talk/xmpp/xmpptask.h',
+      ],
+      'conditions': [
+        ['inside_chromium_build==0', {
+          'sources': [ 
+            'source/talk/sound/automaticallychosensoundsystem.h', 
+            'source/talk/sound/platformsoundsystem.cc', 
+            'source/talk/sound/platformsoundsystem.h', 
+            'source/talk/sound/platformsoundsystemfactory.cc', 
+            'source/talk/sound/platformsoundsystemfactory.h', 
+            'source/talk/sound/sounddevicelocator.h', 
+            'source/talk/sound/soundinputstreaminterface.h', 
+            'source/talk/sound/soundoutputstreaminterface.h', 
+            'source/talk/sound/soundsystemfactory.h', 
+            'source/talk/sound/soundsysteminterface.cc', 
+            'source/talk/sound/soundsysteminterface.h', 
+            'source/talk/sound/soundsystemproxy.cc', 
+            'source/talk/sound/soundsystemproxy.h',
+          ],
+          'conditions' : [ 
+            ['OS=="linux"', {
+              'sources': [
+                'source/talk/sound/alsasoundsystem.cc',
+                'source/talk/sound/alsasoundsystem.h',
+                'source/talk/sound/alsasymboltable.cc',
+                'source/talk/sound/alsasymboltable.h',
+                'source/talk/sound/linuxsoundsystem.cc',
+                'source/talk/sound/linuxsoundsystem.h',
+                'source/talk/sound/pulseaudiosoundsystem.cc',
+                'source/talk/sound/pulseaudiosoundsystem.h',
+                'source/talk/sound/pulseaudiosymboltable.cc',
+                'source/talk/sound/pulseaudiosymboltable.h',
+              ],
+            }],
+          ],
+        }],
+        ['OS=="win"', {
+          'sources': [
+            '<(overrides)/talk/base/win32socketinit.cc',
+            'source/talk/base/schanneladapter.cc',
+            'source/talk/base/schanneladapter.h',
+            'source/talk/base/win32.h',
+            'source/talk/base/win32.cc',
+            'source/talk/base/win32filesystem.cc',
+            'source/talk/base/win32filesystem.h',
+            'source/talk/base/win32window.h',
+            'source/talk/base/win32window.cc',
+            'source/talk/base/win32securityerrors.cc',
+            'source/talk/base/winfirewall.cc',
+            'source/talk/base/winfirewall.h',
+            'source/talk/base/winping.cc',
+            'source/talk/base/winping.h',
+          ],
+        }],
+        ['os_posix == 1', {
+          'sources': [
+            'source/talk/base/latebindingsymboltable.cc',
+            'source/talk/base/latebindingsymboltable.h',
+            'source/talk/base/sslstreamadapter.cc',
+            'source/talk/base/sslstreamadapter.h',
+            'source/talk/base/unixfilesystem.cc',
+            'source/talk/base/unixfilesystem.h',
+          ],
+        }],
+        ['OS=="linux"', {
+          'sources': [
+            'source/talk/base/linux.cc',
+            'source/talk/base/linux.h',
+          ],
+        }],
+        ['OS=="mac"', {
+          'sources': [
+            'source/talk/base/macconversion.cc',
+            'source/talk/base/macconversion.h',
+            'source/talk/base/macutils.cc',
+            'source/talk/base/macutils.h',
+          ],
+        }],
+      ],
+      'dependencies': [
+        '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+      ],
+    },
+    # This has to be is a separate project due to a bug in MSVS:
+    # https://connect.microsoft.com/VisualStudio/feedback/details/368272/duplicate-cpp-filename-in-c-project-visual-studio-2008
+    # We have two files named "constants.cc" and MSVS doesn't handle this
+    # properly.
+    {
+      'target_name': 'libjingle_p2p',
+      'type': 'static_library',
+      'sources': [
+        'source/talk/p2p/base/candidate.h',
+        'source/talk/p2p/base/common.h',
+        'source/talk/p2p/base/constants.cc',
+        'source/talk/p2p/base/constants.h',
+        'source/talk/p2p/base/p2ptransport.cc',
+        'source/talk/p2p/base/p2ptransport.h',
+        'source/talk/p2p/base/p2ptransportchannel.cc',
+        'source/talk/p2p/base/p2ptransportchannel.h',
+        'source/talk/p2p/base/port.cc',
+        'source/talk/p2p/base/port.h',
+        'source/talk/p2p/base/portallocator.cc',
+        'source/talk/p2p/base/portallocator.h',
+        'source/talk/p2p/base/portallocatorsessionproxy.cc',
+        'source/talk/p2p/base/portallocatorsessionproxy.h',
+        'source/talk/p2p/base/portproxy.cc',
+        'source/talk/p2p/base/portproxy.h',
+        'source/talk/p2p/base/pseudotcp.cc',
+        'source/talk/p2p/base/pseudotcp.h',
+        'source/talk/p2p/base/rawtransport.cc',
+        'source/talk/p2p/base/rawtransport.h',
+        'source/talk/p2p/base/rawtransportchannel.cc',
+        'source/talk/p2p/base/rawtransportchannel.h',
+        'source/talk/p2p/base/relayport.cc',
+        'source/talk/p2p/base/relayport.h',
+        'source/talk/p2p/base/session.cc',
+        'source/talk/p2p/base/session.h',
+        'source/talk/p2p/base/sessionclient.h',
+        'source/talk/p2p/base/sessiondescription.cc',
+        'source/talk/p2p/base/sessiondescription.h',
+        'source/talk/p2p/base/sessionid.h',
+        'source/talk/p2p/base/sessionmanager.cc',
+        'source/talk/p2p/base/sessionmanager.h',
+        'source/talk/p2p/base/sessionmessages.cc',
+        'source/talk/p2p/base/sessionmessages.h',
+        'source/talk/p2p/base/parsing.cc',
+        'source/talk/p2p/base/parsing.h',
+        'source/talk/p2p/base/stun.cc',
+        'source/talk/p2p/base/stun.h',
+        'source/talk/p2p/base/stunport.cc',
+        'source/talk/p2p/base/stunport.h',
+        'source/talk/p2p/base/stunrequest.cc',
+        'source/talk/p2p/base/stunrequest.h',
+        'source/talk/p2p/base/tcpport.cc',
+        'source/talk/p2p/base/tcpport.h',
+        'source/talk/p2p/base/transport.cc',
+        'source/talk/p2p/base/transport.h',
+        'source/talk/p2p/base/transportchannel.cc',
+        'source/talk/p2p/base/transportchannel.h',
+        'source/talk/p2p/base/transportchannelimpl.h',
+        'source/talk/p2p/base/transportchannelproxy.cc',
+        'source/talk/p2p/base/transportchannelproxy.h',
+        'source/talk/p2p/base/udpport.cc',
+        'source/talk/p2p/base/udpport.h',
+        'source/talk/p2p/client/basicportallocator.cc',
+        'source/talk/p2p/client/basicportallocator.h',
+        'source/talk/p2p/client/httpportallocator.cc',
+        'source/talk/p2p/client/httpportallocator.h',
+        'source/talk/p2p/client/sessionmanagertask.h',
+        'source/talk/p2p/client/sessionsendtask.h',
+        'source/talk/p2p/client/socketmonitor.cc',
+        'source/talk/p2p/client/socketmonitor.h',
+        'source/talk/session/phone/audiomonitor.cc',
+        'source/talk/session/phone/audiomonitor.h',
+        'source/talk/session/phone/call.cc',
+        'source/talk/session/phone/call.h',
+        'source/talk/session/phone/channel.cc',
+        'source/talk/session/phone/channel.h',
+        'source/talk/session/phone/channelmanager.cc',
+        'source/talk/session/phone/channelmanager.h',
+        'source/talk/session/phone/codec.cc',
+        'source/talk/session/phone/codec.h',
+        'source/talk/session/phone/cryptoparams.h',
+        'source/talk/session/phone/currentspeakermonitor.cc',
+        'source/talk/session/phone/currentspeakermonitor.h',
+        'source/talk/session/phone/devicemanager.cc',
+        'source/talk/session/phone/devicemanager.h',
+        'source/talk/session/phone/filemediaengine.cc',
+        'source/talk/session/phone/filemediaengine.h',   
+        'source/talk/session/phone/mediachannel.h',
+        'source/talk/session/phone/mediaengine.cc',
+        'source/talk/session/phone/mediaengine.h',
+        'source/talk/session/phone/mediamessages.cc',
+        'source/talk/session/phone/mediamessages.h',
+        'source/talk/session/phone/mediamonitor.cc',
+        'source/talk/session/phone/mediamonitor.h',
+        'source/talk/session/phone/mediasession.cc',
+        'source/talk/session/phone/mediasession.h',
+        'source/talk/session/phone/mediasessionclient.cc',
+        'source/talk/session/phone/mediasessionclient.h',
+        'source/talk/session/phone/mediasink.h',
+        'source/talk/session/phone/rtcpmuxfilter.cc',
+        'source/talk/session/phone/rtcpmuxfilter.h',        
+        'source/talk/session/phone/rtpdump.cc',
+        'source/talk/session/phone/rtpdump.h',
+        'source/talk/session/phone/rtputils.cc',
+        'source/talk/session/phone/rtputils.h',
+        'source/talk/session/phone/soundclip.cc',
+        'source/talk/session/phone/soundclip.h',
+        'source/talk/session/phone/srtpfilter.cc',
+        'source/talk/session/phone/srtpfilter.h',
+        'source/talk/session/phone/ssrcmuxfilter.cc',
+        'source/talk/session/phone/ssrcmuxfilter.h',
+        'source/talk/session/phone/streamparams.cc',
+        'source/talk/session/phone/videocapturer.cc',
+        'source/talk/session/phone/videocapturer.h',
+        'source/talk/session/phone/videocommon.cc',
+        'source/talk/session/phone/videocommon.h',
+        'source/talk/session/phone/videoframe.cc',
+        'source/talk/session/phone/videoframe.h',
+        'source/talk/session/phone/voicechannel.h',
+        'source/talk/session/phone/webrtcpassthroughrender.cc',
+        'source/talk/session/phone/webrtccommon.h',
+        'source/talk/session/phone/webrtcvideocapturer.cc',
+        'source/talk/session/phone/webrtcvideocapturer.h',
+        'source/talk/session/phone/webrtcvideoengine.cc',
+        'source/talk/session/phone/webrtcvideoengine.h',
+        'source/talk/session/phone/webrtcvideoframe.cc',
+        'source/talk/session/phone/webrtcvideoframe.h',
+        'source/talk/session/phone/webrtcvie.h',
+        'source/talk/session/phone/webrtcvoe.h',
+        'source/talk/session/phone/webrtcvoiceengine.cc',
+        'source/talk/session/phone/webrtcvoiceengine.h',
+        'source/talk/session/tunnel/pseudotcpchannel.cc',
+        'source/talk/session/tunnel/pseudotcpchannel.h',
+        'source/talk/session/tunnel/tunnelsessionclient.cc',
+        'source/talk/session/tunnel/tunnelsessionclient.h',
+      ],
+      'conditions': [
+        ['OS=="win"', {
+          'sources': [
+            'source/talk/session/phone/gdivideorenderer.cc',
+            'source/talk/session/phone/gdivideorenderer.h',
+            'source/talk/session/phone/win32devicemanager.cc',
+            'source/talk/session/phone/win32devicemanager.h',
+          ],
+        }],   
+        ['OS=="linux"', {
+          'sources': [
+            'source/talk/session/phone/gtkvideorenderer.cc',
+            'source/talk/session/phone/gtkvideorenderer.cc',
+            'source/talk/session/phone/gtkvideorenderer.h', 
+            'source/talk/session/phone/libudevsymboltable.cc',
+            'source/talk/session/phone/libudevsymboltable.h',
+            'source/talk/session/phone/linuxdevicemanager.cc',
+            'source/talk/session/phone/linuxdevicemanager.h',
+            'source/talk/session/phone/v4llookup.cc',
+            'source/talk/session/phone/v4llookup.h',
+          ],
+          'include_dirs': [
+            'source/talk/third_party/libudev',
+          ],
+          'cflags': [
+             '<!@(pkg-config --cflags gtk+-2.0)',
+          ],
+        }],        
+        ['inside_chromium_build==1', {
+          'dependencies': [
+            '../../third_party/webrtc/modules/modules.gyp:audio_device',
+            '../../third_party/webrtc/modules/modules.gyp:video_capture_module',
+            '../../third_party/webrtc/modules/modules.gyp:video_render_module',
+            '../../third_party/webrtc/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '../../third_party/webrtc/video_engine/video_engine.gyp:video_engine_core',
+            '../../third_party/webrtc/voice_engine/voice_engine.gyp:voice_engine_core',
+            '<(DEPTH)/third_party/libsrtp/libsrtp.gyp:libsrtp',
+            'libjingle',
+          ],
+        }, {
+          'dependencies': [
+            '../../src/modules/modules.gyp:audio_device',
+            '../../src/modules/modules.gyp:video_capture_module',
+            '../../src/modules/modules.gyp:video_render_module',
+            '../../src/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '../../src/video_engine/video_engine.gyp:video_engine_core',
+            '../../src/voice_engine/voice_engine.gyp:voice_engine_core',
+            '<(DEPTH)/third_party/libsrtp/libsrtp.gyp:libsrtp',
+            'libjingle',
+          ],
+        } ],  # inside_chromium_build
+      ],  # conditions
+    },
+    # seperate project for app
+    {
+      'target_name': 'libjingle_app',
+      'type': '<(library)',
+      'sources': [        
+        'source/talk/app/webrtc/audiotrackimpl.cc',
+        'source/talk/app/webrtc/audiotrackimpl.h',
+        'source/talk/app/webrtc/mediastream.h',
+        'source/talk/app/webrtc/mediastreamhandler.cc',
+        'source/talk/app/webrtc/mediastreamhandler.h',
+        'source/talk/app/webrtc/mediastreamimpl.cc',
+        'source/talk/app/webrtc/mediastreamimpl.h',
+        'source/talk/app/webrtc/mediastreamprovider.h',
+        'source/talk/app/webrtc/mediastreamproxy.cc',
+        'source/talk/app/webrtc/mediastreamproxy.h',
+        'source/talk/app/webrtc/mediastreamtrackproxy.cc',
+        'source/talk/app/webrtc/mediastreamtrackproxy.h',
+        'source/talk/app/webrtc/mediatrackimpl.h',
+        'source/talk/app/webrtc/notifierimpl.h',
+        'source/talk/app/webrtc/peerconnection.h',
+        'source/talk/app/webrtc/peerconnectionfactoryimpl.cc',
+        'source/talk/app/webrtc/peerconnectionfactoryimpl.h',
+        'source/talk/app/webrtc/peerconnectionimpl.cc',
+        'source/talk/app/webrtc/peerconnectionimpl.h',
+        'source/talk/app/webrtc/peerconnectionsignaling.cc',
+        'source/talk/app/webrtc/peerconnectionsignaling.h',
+        'source/talk/app/webrtc/portallocatorfactory.cc',
+        'source/talk/app/webrtc/portallocatorfactory.h',
+        'source/talk/app/webrtc/roaperrorcodes.h',
+        'source/talk/app/webrtc/roapmessages.cc',
+        'source/talk/app/webrtc/roapmessages.h',
+        'source/talk/app/webrtc/roapsession.cc',
+        'source/talk/app/webrtc/roapsession.h',
+        'source/talk/app/webrtc/sessiondescriptionprovider.h',
+        'source/talk/app/webrtc/streamcollectionimpl.h',
+        'source/talk/app/webrtc/videorendererimpl.cc',
+        'source/talk/app/webrtc/videotrackimpl.cc',
+        'source/talk/app/webrtc/videotrackimpl.h',
+        'source/talk/app/webrtc/webrtcjson.cc',
+        'source/talk/app/webrtc/webrtcjson.h',
+        'source/talk/app/webrtc/webrtcsdp.cc',
+        'source/talk/app/webrtc/webrtcsdp.h',
+        'source/talk/app/webrtc/webrtcsession.cc',
+        'source/talk/app/webrtc/webrtcsession.h',
+        'source/talk/app/webrtc/webrtcsessionobserver.h',
+      ],
+      'dependencies': [
+        '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+        '<(DEPTH)/third_party/libsrtp/libsrtp.gyp:libsrtp',
+      ],
+      'conditions': [
+        ['inside_chromium_build==1', {        
+          'dependencies': [
+            '../../third_party/webrtc/modules/modules.gyp:video_capture_module',
+            '../../third_party/webrtc/modules/modules.gyp:video_render_module',
+            '../../third_party/webrtc/video_engine/video_engine.gyp:video_engine_core',
+            '../../third_party/webrtc/voice_engine/voice_engine.gyp:voice_engine_core',
+            '../../third_party/webrtc/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            'libjingle_p2p',
+          ],          
+        }, {
+          'dependencies': [
+            '../../src/modules/modules.gyp:video_capture_module',
+            '../../src/modules/modules.gyp:video_render_module',
+            '../../src/video_engine/video_engine.gyp:video_engine_core',
+            '../../src/voice_engine/voice_engine.gyp:voice_engine_core',
+            '../../src/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            'libjingle_p2p',
+          ],          
+        } ],  # inside_chromium_build
+      ],  # conditions
+    },    
+  ],
+}
diff --git a/trunk/src/LICENSE b/trunk/src/LICENSE
new file mode 100644
index 0000000..4c41b7b
--- /dev/null
+++ b/trunk/src/LICENSE
@@ -0,0 +1,29 @@
+Copyright (c) 2011, The WebRTC project authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+  * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+  * Neither the name of Google nor the names of its contributors may
+    be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/trunk/src/LICENSE_THIRD_PARTY b/trunk/src/LICENSE_THIRD_PARTY
new file mode 100644
index 0000000..e19708a
--- /dev/null
+++ b/trunk/src/LICENSE_THIRD_PARTY
@@ -0,0 +1,27 @@
+This source tree contains third party source code which is governed by third 
+party licenses. This file contains references to files which are under other 
+licenses than the one provided in the LICENSE file in the root of the source
+tree.
+
+Files governed by third party licenses:
+common_audio/signal_processing/spl_sqrt_floor.c
+common_audio/signal_processing/spl_sqrt_floor.s
+modules/audio_coding/codecs/G711/main/source/g711.h
+modules/audio_coding/codecs/G711/main/source/g711.c
+modules/audio_coding/codecs/G722/main/source/g722_decode.h
+modules/audio_coding/codecs/G722/main/source/g722_decode.c
+modules/audio_coding/codecs/G722/main/source/g722_encode.h
+modules/audio_coding/codecs/G722/main/source/g722_encode.c
+modules/audio_coding/codecs/iSAC/main/source/fft.c
+modules/audio_device/main/source/Mac/portaudio/pa_memorybarrier.h
+modules/audio_device/main/source/Mac/portaudio/pa_ringbuffer.h
+modules/audio_device/main/source/Mac/portaudio/pa_ringbuffer.c
+modules/audio_processing/utility/fft4g.c
+modules/audio_processing/aec/aec_rdft.c
+system_wrappers/interface/fix_interlocked_exchange_pointer_windows.h
+system_wrappers/interface/scoped_ptr.h
+system_wrappers/interface/scoped_refptr.h
+system_wrappers/source/condition_variable_windows.cc
+system_wrappers/source/spreadsortlib/constants.hpp
+system_wrappers/source/spreadsortlib/spreadsort.hpp
+system_wrappers/source/thread_windows_set_name.h
diff --git a/trunk/src/PATENTS b/trunk/src/PATENTS
new file mode 100644
index 0000000..190607a
--- /dev/null
+++ b/trunk/src/PATENTS
@@ -0,0 +1,24 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the WebRTC code package.
+
+Google hereby grants to you a perpetual, worldwide, non-exclusive,
+no-charge, irrevocable (except as stated in this section) patent
+license to make, have made, use, offer to sell, sell, import,
+transfer, and otherwise run, modify and propagate the contents of this
+implementation of the WebRTC code package, where such license applies
+only to those patent claims, both currently owned by Google and
+acquired in the future, licensable by Google that are necessarily
+infringed by this implementation of the WebRTC code package. This
+grant does not include claims that would be infringed only as a
+consequence of further modification of this implementation. If you or
+your agent or exclusive licensee institute or order or agree to the
+institution of patent litigation against any entity (including a
+cross-claim or counterclaim in a lawsuit) alleging that this
+implementation of the WebRTC code package or any code incorporated
+within this implementation of the WebRTC code package constitutes
+direct or contributory patent infringement, or inducement of patent
+infringement, then any patent rights granted to you under this License
+for this implementation of the WebRTC code package shall terminate as
+of the date such litigation is filed.
diff --git a/trunk/src/README.chromium b/trunk/src/README.chromium
new file mode 100644
index 0000000..246c13d
--- /dev/null
+++ b/trunk/src/README.chromium
@@ -0,0 +1,13 @@
+Name: WebRTC

+URL: http://www.webrtc.org

+Version: 90

+License: BSD

+License File: LICENSE

+

+Description:

+WebRTC provides real time voice and video processing

+functionality to enable the implementation of 

+PeerConnection/MediaStream.

+

+Third party code used in this project is described 

+in the file LICENSE_THIRD_PARTY.

diff --git a/trunk/src/build/common.gypi b/trunk/src/build/common.gypi
new file mode 100644
index 0000000..74b6003
--- /dev/null
+++ b/trunk/src/build/common.gypi
@@ -0,0 +1,180 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file contains common settings for building WebRTC components.
+
+{
+  'variables': {
+    # These variables need to be nested in order to use them in a conditions
+    # block to set other variables.
+    'variables': {
+      # This will be set to zero in the supplement.gypi triggered by a gclient
+      # hook in the standalone build.
+      'build_with_chromium%': 1,
+    },
+
+    'build_with_chromium%': '<(build_with_chromium)',
+
+    # The Chromium common.gypi we use treats all gyp files without
+    # chromium_code==1 as third party code. This disables many of the
+    # preferred warning settings.
+    #
+    # We can set this here to have WebRTC code treated as Chromium code. Our
+    # third party code will still have the reduced warning settings.
+    'chromium_code': 1,
+
+    # Adds video support to dependencies shared by voice and video engine.
+    # This should normally be enabled; the intended use is to disable only
+    # when building voice engine exclusively.
+    'enable_video%': 1,
+
+    # Selects fixed-point code where possible.
+    # TODO(andrew): we'd like to set this based on the target OS/architecture.
+    'prefer_fixed_point%': 0,
+
+    # Enable data logging. Produces text files with data logged within engines
+    # which can be easily parsed for offline processing.
+    'enable_data_logging%': 0,
+
+    # Disable these to not build components which can be externally provided.
+    'build_libjpeg%': 1,
+    'build_libyuv%': 1,
+
+    'conditions': [
+      ['OS=="win"', {
+        # TODO(andrew, perkj): does this need to be here?
+        # Path needed to build Direct Show base classes on Windows.
+        # The code is included in the Windows SDK.
+        'direct_show_base_classes':
+          'C:/Program Files/Microsoft SDKs/Windows/v7.1/Samples/multimedia/directshow/baseclasses/',
+      }],
+      ['build_with_chromium==1', {
+        # Exclude pulse audio on Chromium since its prerequisites don't require
+        # pulse audio.
+        'include_pulse_audio%': 0,
+
+        # Exclude internal ADM since Chromium uses its own IO handling.
+        'include_internal_audio_device%': 0,
+
+        # Exclude internal VCM in Chromium build.
+        'include_internal_video_capture%': 0,
+
+        # Exclude internal video render module in Chromium build.
+        'include_internal_video_render%': 0,
+
+        # Disable the use of protocol buffers in production code.
+        'enable_protobuf%': 0,
+
+        'webrtc_root%': '<(DEPTH)/third_party/webrtc',
+      }, {
+        # Settings for the standalone (not-in-Chromium) build.
+
+        'include_pulse_audio%': 1,
+
+        'include_internal_audio_device%': 1,
+
+        'include_internal_video_capture%': 1,
+
+        'include_internal_video_render%': 1,
+
+        'enable_protobuf%': 1,
+
+        'webrtc_root%': '<(DEPTH)/src',
+
+        'conditions': [
+          ['OS=="mac"', {
+            # TODO(andrew): clang is the default on Mac. For now, disable the
+            # Chrome plugins, which causes a flood of chromium-style warnings.
+            # Investigate enabling the plugins:
+            # http://code.google.com/p/webrtc/issues/detail?id=163
+            'clang_use_chrome_plugins%': 0,
+          }],
+        ],
+      }],
+    ], # conditions
+  },
+  'target_defaults': {
+    'include_dirs': [
+      '..','../..', # common_types.h, typedefs.h
+    ],
+    'conditions': [
+      ['build_with_chromium==1', {
+        'defines': [
+          # Changes settings for Chromium build.
+          'WEBRTC_CHROMIUM_BUILD',
+        ],
+      }, {
+        'conditions': [
+          ['os_posix==1', {
+            'cflags': [
+              '-Wextra',
+              # We need to repeat some flags from Chromium's common.gypi here
+              # that get overridden by -Wextra.
+              '-Wno-unused-parameter',
+              '-Wno-missing-field-initializers',
+            ],
+            'cflags_cc': [
+              # This is enabled for clang; enable for gcc as well.
+              '-Woverloaded-virtual',
+            ],
+          }],
+        ],
+      }],
+      ['OS=="linux"', {
+        'defines': [
+          'WEBRTC_TARGET_PC',
+          'WEBRTC_LINUX',
+          'WEBRTC_THREAD_RR',
+          # TODO(andrew): can we select this automatically?
+          # Define this if the Linux system does not support CLOCK_MONOTONIC.
+          #'WEBRTC_CLOCK_TYPE_REALTIME',
+        ],
+      }],
+      ['OS=="mac"', {
+        # TODO(andrew): what about PowerPC?
+        # Setup for Intel
+        'defines': [
+          'WEBRTC_TARGET_MAC_INTEL',
+          'WEBRTC_MAC_INTEL',
+          'WEBRTC_MAC',
+          'WEBRTC_THREAD_RR',
+          'WEBRTC_CLOCK_TYPE_REALTIME',
+        ],
+      }],
+      ['OS=="win"', {
+        'defines': [
+          'WEBRTC_TARGET_PC',
+        ],
+        # TODO(andrew): remove this block when possible.
+        # 4389: Signed/unsigned mismatch.
+        # 4373: MSVC legacy warning for ignoring const / volatile in
+        # signatures. TODO(phoglund): get rid of 4373 supression when
+        # http://code.google.com/p/webrtc/issues/detail?id=261 is solved.
+        'msvs_disabled_warnings': [4389, 4373],
+      }],
+      ['OS=="android"', {
+        'defines': [
+          'WEBRTC_TARGET_PC',
+          'WEBRTC_LINUX',
+          # TODO(leozwang): Investigate CLOCK_REALTIME and CLOCK_MONOTONIC
+          # support on Android. Keep WEBRTC_CLOCK_TYPE_REALTIME for now,
+          # remove it after I verify that CLOCK_MONOTONIC is fully functional
+          # with condition and event functions in system_wrappers.
+          'WEBRTC_CLOCK_TYPE_REALTIME',
+          'WEBRTC_THREAD_RR',
+          'WEBRTC_ANDROID',
+          'WEBRTC_ARM_INLINE_CALLS',
+          # TODO(leozwang): move WEBRTC_ARCH_ARM to typedefs.h.
+          'WEBRTC_ARCH_ARM',
+          'WEBRTC_ANDROID_OPENSLES',
+         ],
+      }],
+    ], # conditions
+  }, # target_defaults
+}
+
diff --git a/trunk/src/build/merge_libs.gyp b/trunk/src/build/merge_libs.gyp
new file mode 100644
index 0000000..d23bf11
--- /dev/null
+++ b/trunk/src/build/merge_libs.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'no_op',
+      'type': 'executable',
+      'dependencies': [
+        '../video_engine/video_engine.gyp:video_engine_core',
+      ],
+      'sources': [ 'no_op.cc', ],
+    },
+    {
+      'target_name': 'merged_lib',
+      'type': 'none',
+      'dependencies': [
+        'no_op',
+      ],
+      'actions': [
+        {
+          'variables': {
+            'output_lib_name': 'webrtc',
+            'output_lib': '<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)<(output_lib_name)_<(OS)_<(target_arch)_<(CONFIGURATION_NAME)<(STATIC_LIB_SUFFIX)',
+          },
+          'action_name': 'merge_libs',
+          'inputs': ['<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)no_op<(EXECUTABLE_SUFFIX)'],
+          'outputs': ['<(output_lib)'],
+          'action': ['python',
+                     'merge_libs.py',
+                     '<(PRODUCT_DIR)',
+                     '<(output_lib)',],
+        },
+      ],
+    },
+  ],
+}
diff --git a/trunk/src/build/merge_libs.py b/trunk/src/build/merge_libs.py
new file mode 100644
index 0000000..31c5efb
--- /dev/null
+++ b/trunk/src/build/merge_libs.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Searches for libraries and/or object files on the specified path and
+# merges them into a single library.
+
+import subprocess
+import sys
+
+if __name__ == '__main__':
+  if len(sys.argv) != 3:
+    sys.stderr.write('Usage: ' + sys.argv[0] + ' <search_path> <output_lib>\n')
+    sys.exit(2)
+
+  search_path = sys.argv[1]
+  output_lib = sys.argv[2]
+
+  from subprocess import call, PIPE
+  if sys.platform.startswith('linux'):
+    call(["rm -f " + output_lib], shell=True)
+    call(["rm -rf " + search_path + "/obj.target/*do_not_use*"], shell=True)
+    call(["ar crs " + output_lib + " $(find " + search_path +
+          "/obj.target -name *\.o)"], shell=True)
+    call(["ar crs " + output_lib + " $(find " + search_path +
+          "/obj/gen -name *\.o)"], shell=True)
+
+  elif sys.platform == 'darwin':
+    call(["rm -f " + output_lib], shell=True)
+    call(["rm -f " + search_path + "/*do_not_use*"], shell=True)
+    call(["libtool -static -v -o " + output_lib + " " + search_path + "/*.a"],
+         shell=True)
+
+  elif sys.platform == 'win32':
+    # We need to execute a batch file to set some environment variables for the
+    # lib command. VS 8 uses vsvars.bat and VS 9 uses vsvars32.bat. It's
+    # required that at least one of them is in the system PATH. We try both and
+    # suppress stderr and stdout to fail silently.
+    call(["vsvars.bat"], stderr=PIPE, stdout=PIPE, shell=True)
+    call(["vsvars32.bat"], stderr=PIPE, stdout=PIPE, shell=True)
+    call(["del " + output_lib], shell=True)
+    call(["del /F /S /Q " + search_path + "/lib/*do_not_use*"],
+          shell=True)
+    call(["lib /OUT:" + output_lib + " " + search_path + "/lib/*.lib"],
+         shell=True)
+
+  else:
+    sys.stderr.write('Platform not supported: %r\n\n' % sys.platform)
+    sys.exit(1)
+
+  sys.exit(0)
+
diff --git a/trunk/src/build/no_op.cc b/trunk/src/build/no_op.cc
new file mode 100644
index 0000000..4cce7f6
--- /dev/null
+++ b/trunk/src/build/no_op.cc
@@ -0,0 +1,3 @@
+int main() {
+  return 0;
+}
diff --git a/trunk/src/build/protoc.gypi b/trunk/src/build/protoc.gypi
new file mode 100644
index 0000000..70bf71e
--- /dev/null
+++ b/trunk/src/build/protoc.gypi
@@ -0,0 +1,97 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# It was necessary to copy this file to WebRTC, because the path to
+# build/common.gypi is different for the standalone and Chromium builds. Gyp
+# doesn't permit conditional inclusion or variable expansion in include paths.
+# http://code.google.com/p/gyp/wiki/InputFormatReference#Including_Other_Files
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'proto_in_dir%': '.',
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        '<(protoc)',
+        '--proto_path=<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '<(proto_in_dir)/<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--cpp_out=<(cc_dir)',
+        '--python_out=<(py_dir)',
+        ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/trunk/src/common_audio/OWNERS b/trunk/src/common_audio/OWNERS
new file mode 100644
index 0000000..84582f2
--- /dev/null
+++ b/trunk/src/common_audio/OWNERS
@@ -0,0 +1,4 @@
+bjornv@webrtc.org
+tina.legrand@webrtc.org
+jan.skoglund@webrtc.org
+andrew@webrtc.org
diff --git a/trunk/src/common_audio/common_audio.gyp b/trunk/src/common_audio/common_audio.gyp
new file mode 100644
index 0000000..3d3da3f
--- /dev/null
+++ b/trunk/src/common_audio/common_audio.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'signal_processing/signal_processing.gypi',
+    'resampler/resampler.gypi',
+    'vad/vad.gypi',
+  ],
+}
diff --git a/trunk/src/common_audio/resampler/Android.mk b/trunk/src/common_audio/resampler/Android.mk
new file mode 100644
index 0000000..b1d630a
--- /dev/null
+++ b/trunk/src/common_audio/resampler/Android.mk
@@ -0,0 +1,47 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_resampler
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := resampler.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+LOCAL_LDLIBS += -ldl -lpthread
+endif
+
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/common_audio/resampler/include/resampler.h b/trunk/src/common_audio/resampler/include/resampler.h
new file mode 100644
index 0000000..38e6bd3
--- /dev/null
+++ b/trunk/src/common_audio/resampler/include/resampler.h
@@ -0,0 +1,116 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * A wrapper for resampling a numerous amount of sampling combinations.
+ */
+
+#ifndef WEBRTC_RESAMPLER_RESAMPLER_H_
+#define WEBRTC_RESAMPLER_RESAMPLER_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+// TODO(andrew): the implementation depends on the exact values of this enum.
+// It should be rewritten in a less fragile way.
+enum ResamplerType
+{
+    // 4 MSB = Number of channels
+    // 4 LSB = Synchronous or asynchronous
+
+    kResamplerSynchronous = 0x10,
+    kResamplerAsynchronous = 0x11,
+    kResamplerSynchronousStereo = 0x20,
+    kResamplerAsynchronousStereo = 0x21,
+    kResamplerInvalid = 0xff
+};
+
+// TODO(andrew): doesn't need to be part of the interface.
+enum ResamplerMode
+{
+    kResamplerMode1To1,
+    kResamplerMode1To2,
+    kResamplerMode1To3,
+    kResamplerMode1To4,
+    kResamplerMode1To6,
+    kResamplerMode1To12,
+    kResamplerMode2To3,
+    kResamplerMode2To11,
+    kResamplerMode4To11,
+    kResamplerMode8To11,
+    kResamplerMode11To16,
+    kResamplerMode11To32,
+    kResamplerMode2To1,
+    kResamplerMode3To1,
+    kResamplerMode4To1,
+    kResamplerMode6To1,
+    kResamplerMode12To1,
+    kResamplerMode3To2,
+    kResamplerMode11To2,
+    kResamplerMode11To4,
+    kResamplerMode11To8
+};
+
+class Resampler
+{
+
+public:
+    Resampler();
+    // TODO(andrew): use an init function instead.
+    Resampler(int inFreq, int outFreq, ResamplerType type);
+    ~Resampler();
+
+    // Reset all states
+    int Reset(int inFreq, int outFreq, ResamplerType type);
+
+    // Reset all states if any parameter has changed
+    int ResetIfNeeded(int inFreq, int outFreq, ResamplerType type);
+
+    // Synchronous resampling, all output samples are written to samplesOut
+    int Push(const WebRtc_Word16* samplesIn, int lengthIn, WebRtc_Word16* samplesOut,
+             int maxLen, int &outLen);
+
+    // Asynchronous resampling, input
+    int Insert(WebRtc_Word16* samplesIn, int lengthIn);
+
+    // Asynchronous resampling output, remaining samples are buffered
+    int Pull(WebRtc_Word16* samplesOut, int desiredLen, int &outLen);
+
+private:
+    // Generic pointers since we don't know what states we'll need
+    void* state1_;
+    void* state2_;
+    void* state3_;
+
+    // Storage if needed
+    WebRtc_Word16* in_buffer_;
+    WebRtc_Word16* out_buffer_;
+    int in_buffer_size_;
+    int out_buffer_size_;
+    int in_buffer_size_max_;
+    int out_buffer_size_max_;
+
+    // State
+    int my_in_frequency_khz_;
+    int my_out_frequency_khz_;
+    ResamplerMode my_mode_;
+    ResamplerType my_type_;
+
+    // Extra instance for stereo
+    Resampler* slave_left_;
+    Resampler* slave_right_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_RESAMPLER_RESAMPLER_H_
diff --git a/trunk/src/common_audio/resampler/resampler.cc b/trunk/src/common_audio/resampler/resampler.cc
new file mode 100644
index 0000000..2db27b1
--- /dev/null
+++ b/trunk/src/common_audio/resampler/resampler.cc
@@ -0,0 +1,1084 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * A wrapper for resampling a numerous amount of sampling combinations.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "signal_processing_library.h"
+#include "resampler.h"
+
+
+namespace webrtc
+{
+
+Resampler::Resampler()
+{
+    state1_ = NULL;
+    state2_ = NULL;
+    state3_ = NULL;
+    in_buffer_ = NULL;
+    out_buffer_ = NULL;
+    in_buffer_size_ = 0;
+    out_buffer_size_ = 0;
+    in_buffer_size_max_ = 0;
+    out_buffer_size_max_ = 0;
+    // we need a reset before we will work
+    my_in_frequency_khz_ = 0;
+    my_out_frequency_khz_ = 0;
+    my_mode_ = kResamplerMode1To1;
+    my_type_ = kResamplerInvalid;
+    slave_left_ = NULL;
+    slave_right_ = NULL;
+}
+
+Resampler::Resampler(int inFreq, int outFreq, ResamplerType type)
+{
+    state1_ = NULL;
+    state2_ = NULL;
+    state3_ = NULL;
+    in_buffer_ = NULL;
+    out_buffer_ = NULL;
+    in_buffer_size_ = 0;
+    out_buffer_size_ = 0;
+    in_buffer_size_max_ = 0;
+    out_buffer_size_max_ = 0;
+    // we need a reset before we will work
+    my_in_frequency_khz_ = 0;
+    my_out_frequency_khz_ = 0;
+    my_mode_ = kResamplerMode1To1;
+    my_type_ = kResamplerInvalid;
+    slave_left_ = NULL;
+    slave_right_ = NULL;
+
+    Reset(inFreq, outFreq, type);
+}
+
+Resampler::~Resampler()
+{
+    if (state1_)
+    {
+        free(state1_);
+    }
+    if (state2_)
+    {
+        free(state2_);
+    }
+    if (state3_)
+    {
+        free(state3_);
+    }
+    if (in_buffer_)
+    {
+        free(in_buffer_);
+    }
+    if (out_buffer_)
+    {
+        free(out_buffer_);
+    }
+    if (slave_left_)
+    {
+        delete slave_left_;
+    }
+    if (slave_right_)
+    {
+        delete slave_right_;
+    }
+}
+
+int Resampler::ResetIfNeeded(int inFreq, int outFreq, ResamplerType type)
+{
+    int tmpInFreq_kHz = inFreq / 1000;
+    int tmpOutFreq_kHz = outFreq / 1000;
+
+    if ((tmpInFreq_kHz != my_in_frequency_khz_) || (tmpOutFreq_kHz != my_out_frequency_khz_)
+            || (type != my_type_))
+    {
+        return Reset(inFreq, outFreq, type);
+    } else
+    {
+        return 0;
+    }
+}
+
+int Resampler::Reset(int inFreq, int outFreq, ResamplerType type)
+{
+
+    if (state1_)
+    {
+        free(state1_);
+        state1_ = NULL;
+    }
+    if (state2_)
+    {
+        free(state2_);
+        state2_ = NULL;
+    }
+    if (state3_)
+    {
+        free(state3_);
+        state3_ = NULL;
+    }
+    if (in_buffer_)
+    {
+        free(in_buffer_);
+        in_buffer_ = NULL;
+    }
+    if (out_buffer_)
+    {
+        free(out_buffer_);
+        out_buffer_ = NULL;
+    }
+    if (slave_left_)
+    {
+        delete slave_left_;
+        slave_left_ = NULL;
+    }
+    if (slave_right_)
+    {
+        delete slave_right_;
+        slave_right_ = NULL;
+    }
+
+    in_buffer_size_ = 0;
+    out_buffer_size_ = 0;
+    in_buffer_size_max_ = 0;
+    out_buffer_size_max_ = 0;
+
+    // This might be overridden if parameters are not accepted.
+    my_type_ = type;
+
+    // Start with a math exercise, Euclid's algorithm to find the gcd:
+
+    int a = inFreq;
+    int b = outFreq;
+    int c = a % b;
+    while (c != 0)
+    {
+        a = b;
+        b = c;
+        c = a % b;
+    }
+    // b is now the gcd;
+
+    // We need to track what domain we're in.
+    my_in_frequency_khz_ = inFreq / 1000;
+    my_out_frequency_khz_ = outFreq / 1000;
+
+    // Scale with GCD
+    inFreq = inFreq / b;
+    outFreq = outFreq / b;
+
+    // Do we need stereo?
+    if ((my_type_ & 0xf0) == 0x20)
+    {
+        // Change type to mono
+        type = static_cast<ResamplerType>(
+            ((static_cast<int>(type) & 0x0f) + 0x10));
+        slave_left_ = new Resampler(inFreq, outFreq, type);
+        slave_right_ = new Resampler(inFreq, outFreq, type);
+    }
+
+    if (inFreq == outFreq)
+    {
+        my_mode_ = kResamplerMode1To1;
+    } else if (inFreq == 1)
+    {
+        switch (outFreq)
+        {
+            case 2:
+                my_mode_ = kResamplerMode1To2;
+                break;
+            case 3:
+                my_mode_ = kResamplerMode1To3;
+                break;
+            case 4:
+                my_mode_ = kResamplerMode1To4;
+                break;
+            case 6:
+                my_mode_ = kResamplerMode1To6;
+                break;
+            case 12:
+                my_mode_ = kResamplerMode1To12;
+                break;
+            default:
+                my_type_ = kResamplerInvalid;
+                return -1;
+        }
+    } else if (outFreq == 1)
+    {
+        switch (inFreq)
+        {
+            case 2:
+                my_mode_ = kResamplerMode2To1;
+                break;
+            case 3:
+                my_mode_ = kResamplerMode3To1;
+                break;
+            case 4:
+                my_mode_ = kResamplerMode4To1;
+                break;
+            case 6:
+                my_mode_ = kResamplerMode6To1;
+                break;
+            case 12:
+                my_mode_ = kResamplerMode12To1;
+                break;
+            default:
+                my_type_ = kResamplerInvalid;
+                return -1;
+        }
+    } else if ((inFreq == 2) && (outFreq == 3))
+    {
+        my_mode_ = kResamplerMode2To3;
+    } else if ((inFreq == 2) && (outFreq == 11))
+    {
+        my_mode_ = kResamplerMode2To11;
+    } else if ((inFreq == 4) && (outFreq == 11))
+    {
+        my_mode_ = kResamplerMode4To11;
+    } else if ((inFreq == 8) && (outFreq == 11))
+    {
+        my_mode_ = kResamplerMode8To11;
+    } else if ((inFreq == 3) && (outFreq == 2))
+    {
+        my_mode_ = kResamplerMode3To2;
+    } else if ((inFreq == 11) && (outFreq == 2))
+    {
+        my_mode_ = kResamplerMode11To2;
+    } else if ((inFreq == 11) && (outFreq == 4))
+    {
+        my_mode_ = kResamplerMode11To4;
+    } else if ((inFreq == 11) && (outFreq == 16))
+    {
+        my_mode_ = kResamplerMode11To16;
+    } else if ((inFreq == 11) && (outFreq == 32))
+    {
+        my_mode_ = kResamplerMode11To32;
+    } else if ((inFreq == 11) && (outFreq == 8))
+    {
+        my_mode_ = kResamplerMode11To8;
+    } else
+    {
+        my_type_ = kResamplerInvalid;
+        return -1;
+    }
+
+    // Now create the states we need
+    switch (my_mode_)
+    {
+        case kResamplerMode1To1:
+            // No state needed;
+            break;
+        case kResamplerMode1To2:
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode1To3:
+            state1_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz));
+            WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state1_);
+            break;
+        case kResamplerMode1To4:
+            // 1:2
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            // 2:4
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode1To6:
+            // 1:2
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            // 2:6
+            state2_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz));
+            WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state2_);
+            break;
+        case kResamplerMode1To12:
+            // 1:2
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            // 2:4
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            // 4:12
+            state3_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz));
+            WebRtcSpl_ResetResample16khzTo48khz(
+                (WebRtcSpl_State16khzTo48khz*) state3_);
+            break;
+        case kResamplerMode2To3:
+            // 2:6
+            state1_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz));
+            WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state1_);
+            // 6:3
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode2To11:
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+
+            state2_ = malloc(sizeof(WebRtcSpl_State8khzTo22khz));
+            WebRtcSpl_ResetResample8khzTo22khz((WebRtcSpl_State8khzTo22khz *)state2_);
+            break;
+        case kResamplerMode4To11:
+            state1_ = malloc(sizeof(WebRtcSpl_State8khzTo22khz));
+            WebRtcSpl_ResetResample8khzTo22khz((WebRtcSpl_State8khzTo22khz *)state1_);
+            break;
+        case kResamplerMode8To11:
+            state1_ = malloc(sizeof(WebRtcSpl_State16khzTo22khz));
+            WebRtcSpl_ResetResample16khzTo22khz((WebRtcSpl_State16khzTo22khz *)state1_);
+            break;
+        case kResamplerMode11To16:
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+
+            state2_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz));
+            WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state2_);
+            break;
+        case kResamplerMode11To32:
+            // 11 -> 22
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+
+            // 22 -> 16
+            state2_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz));
+            WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state2_);
+
+            // 16 -> 32
+            state3_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state3_, 0, 8 * sizeof(WebRtc_Word32));
+
+            break;
+        case kResamplerMode2To1:
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode3To1:
+            state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz));
+            WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state1_);
+            break;
+        case kResamplerMode4To1:
+            // 4:2
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            // 2:1
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode6To1:
+            // 6:2
+            state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz));
+            WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state1_);
+            // 2:1
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode12To1:
+            // 12:4
+            state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz));
+            WebRtcSpl_ResetResample48khzTo16khz(
+                (WebRtcSpl_State48khzTo16khz*) state1_);
+            // 4:2
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+            // 2:1
+            state3_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state3_, 0, 8 * sizeof(WebRtc_Word32));
+            break;
+        case kResamplerMode3To2:
+            // 3:6
+            state1_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state1_, 0, 8 * sizeof(WebRtc_Word32));
+            // 6:2
+            state2_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz));
+            WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state2_);
+            break;
+        case kResamplerMode11To2:
+            state1_ = malloc(sizeof(WebRtcSpl_State22khzTo8khz));
+            WebRtcSpl_ResetResample22khzTo8khz((WebRtcSpl_State22khzTo8khz *)state1_);
+
+            state2_ = malloc(8 * sizeof(WebRtc_Word32));
+            memset(state2_, 0, 8 * sizeof(WebRtc_Word32));
+
+            break;
+        case kResamplerMode11To4:
+            state1_ = malloc(sizeof(WebRtcSpl_State22khzTo8khz));
+            WebRtcSpl_ResetResample22khzTo8khz((WebRtcSpl_State22khzTo8khz *)state1_);
+            break;
+        case kResamplerMode11To8:
+            state1_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz));
+            WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state1_);
+            break;
+
+    }
+
+    return 0;
+}
+
+// Synchronous resampling, all output samples are written to samplesOut
+int Resampler::Push(const WebRtc_Word16 * samplesIn, int lengthIn, WebRtc_Word16* samplesOut,
+                    int maxLen, int &outLen)
+{
+    // Check that the resampler is not in asynchronous mode
+    if (my_type_ & 0x0f)
+    {
+        return -1;
+    }
+
+    // Do we have a stereo signal?
+    if ((my_type_ & 0xf0) == 0x20)
+    {
+
+        // Split up the signal and call the slave object for each channel
+
+        WebRtc_Word16* left = (WebRtc_Word16*)malloc(lengthIn * sizeof(WebRtc_Word16) / 2);
+        WebRtc_Word16* right = (WebRtc_Word16*)malloc(lengthIn * sizeof(WebRtc_Word16) / 2);
+        WebRtc_Word16* out_left = (WebRtc_Word16*)malloc(maxLen / 2 * sizeof(WebRtc_Word16));
+        WebRtc_Word16* out_right =
+                (WebRtc_Word16*)malloc(maxLen / 2 * sizeof(WebRtc_Word16));
+        int res = 0;
+        for (int i = 0; i < lengthIn; i += 2)
+        {
+            left[i >> 1] = samplesIn[i];
+            right[i >> 1] = samplesIn[i + 1];
+        }
+
+        // It's OK to overwrite the local parameter, since it's just a copy
+        lengthIn = lengthIn / 2;
+
+        int actualOutLen_left = 0;
+        int actualOutLen_right = 0;
+        // Do resampling for right channel
+        res |= slave_left_->Push(left, lengthIn, out_left, maxLen / 2, actualOutLen_left);
+        res |= slave_right_->Push(right, lengthIn, out_right, maxLen / 2, actualOutLen_right);
+        if (res || (actualOutLen_left != actualOutLen_right))
+        {
+            free(left);
+            free(right);
+            free(out_left);
+            free(out_right);
+            return -1;
+        }
+
+        // Reassemble the signal
+        for (int i = 0; i < actualOutLen_left; i++)
+        {
+            samplesOut[i * 2] = out_left[i];
+            samplesOut[i * 2 + 1] = out_right[i];
+        }
+        outLen = 2 * actualOutLen_left;
+
+        free(left);
+        free(right);
+        free(out_left);
+        free(out_right);
+
+        return 0;
+    }
+
+    // Containers for temp samples
+    WebRtc_Word16* tmp;
+    WebRtc_Word16* tmp_2;
+    // tmp data for resampling routines
+    WebRtc_Word32* tmp_mem;
+
+    switch (my_mode_)
+    {
+        case kResamplerMode1To1:
+            memcpy(samplesOut, samplesIn, lengthIn * sizeof(WebRtc_Word16));
+            outLen = lengthIn;
+            break;
+        case kResamplerMode1To2:
+            if (maxLen < (lengthIn * 2))
+            {
+                return -1;
+            }
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut, (WebRtc_Word32*)state1_);
+            outLen = lengthIn * 2;
+            return 0;
+        case kResamplerMode1To3:
+
+            // We can only handle blocks of 160 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 160) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < (lengthIn * 3))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(336 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 160)
+            {
+                WebRtcSpl_Resample16khzTo48khz(samplesIn + i, samplesOut + i * 3,
+                                               (WebRtcSpl_State16khzTo48khz *)state1_,
+                                               tmp_mem);
+            }
+            outLen = lengthIn * 3;
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode1To4:
+            if (maxLen < (lengthIn * 4))
+            {
+                return -1;
+            }
+
+            tmp = (WebRtc_Word16*)malloc(sizeof(WebRtc_Word16) * 2 * lengthIn);
+            // 1:2
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+            // 2:4
+            WebRtcSpl_UpsampleBy2(tmp, lengthIn * 2, samplesOut, (WebRtc_Word32*)state2_);
+            outLen = lengthIn * 4;
+            free(tmp);
+            return 0;
+        case kResamplerMode1To6:
+            // We can only handle blocks of 80 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 80) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < (lengthIn * 6))
+            {
+                return -1;
+            }
+
+            //1:2
+
+            tmp_mem = (WebRtc_Word32*)malloc(336 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*)malloc(sizeof(WebRtc_Word16) * 2 * lengthIn);
+
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+            outLen = lengthIn * 2;
+
+            for (int i = 0; i < outLen; i += 160)
+            {
+                WebRtcSpl_Resample16khzTo48khz(tmp + i, samplesOut + i * 3,
+                                               (WebRtcSpl_State16khzTo48khz *)state2_,
+                                               tmp_mem);
+            }
+            outLen = outLen * 3;
+            free(tmp_mem);
+            free(tmp);
+
+            return 0;
+        case kResamplerMode1To12:
+            // We can only handle blocks of 40 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 40) != 0) {
+              return -1;
+            }
+            if (maxLen < (lengthIn * 12)) {
+              return -1;
+            }
+
+            tmp_mem = (WebRtc_Word32*) malloc(336 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*) malloc(sizeof(WebRtc_Word16) * 4 * lengthIn);
+            //1:2
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut,
+                                  (WebRtc_Word32*) state1_);
+            outLen = lengthIn * 2;
+            //2:4
+            WebRtcSpl_UpsampleBy2(samplesOut, outLen, tmp, (WebRtc_Word32*) state2_);
+            outLen = outLen * 2;
+            // 4:12
+            for (int i = 0; i < outLen; i += 160) {
+              // WebRtcSpl_Resample16khzTo48khz() takes a block of 160 samples
+              // as input and outputs a resampled block of 480 samples. The
+              // data is now actually in 32 kHz sampling rate, despite the
+              // function name, and with a resampling factor of three becomes
+              // 96 kHz.
+              WebRtcSpl_Resample16khzTo48khz(tmp + i, samplesOut + i * 3,
+                                             (WebRtcSpl_State16khzTo48khz*) state3_,
+                                             tmp_mem);
+            }
+            outLen = outLen * 3;
+            free(tmp_mem);
+            free(tmp);
+
+            return 0;
+        case kResamplerMode2To3:
+            if (maxLen < (lengthIn * 3 / 2))
+            {
+                return -1;
+            }
+            // 2:6
+            // We can only handle blocks of 160 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 160) != 0)
+            {
+                return -1;
+            }
+            tmp = static_cast<WebRtc_Word16*> (malloc(sizeof(WebRtc_Word16) * lengthIn * 3));
+            tmp_mem = (WebRtc_Word32*)malloc(336 * sizeof(WebRtc_Word32));
+            for (int i = 0; i < lengthIn; i += 160)
+            {
+                WebRtcSpl_Resample16khzTo48khz(samplesIn + i, tmp + i * 3,
+                                               (WebRtcSpl_State16khzTo48khz *)state1_,
+                                               tmp_mem);
+            }
+            lengthIn = lengthIn * 3;
+            // 6:3
+            WebRtcSpl_DownsampleBy2(tmp, lengthIn, samplesOut, (WebRtc_Word32*)state2_);
+            outLen = lengthIn / 2;
+            free(tmp);
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode2To11:
+
+            // We can only handle blocks of 80 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 80) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 11) / 2))
+            {
+                return -1;
+            }
+            tmp = (WebRtc_Word16*)malloc(sizeof(WebRtc_Word16) * 2 * lengthIn);
+            // 1:2
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+            lengthIn *= 2;
+
+            tmp_mem = (WebRtc_Word32*)malloc(98 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 80)
+            {
+                WebRtcSpl_Resample8khzTo22khz(tmp + i, samplesOut + (i * 11) / 4,
+                                              (WebRtcSpl_State8khzTo22khz *)state2_,
+                                              tmp_mem);
+            }
+            outLen = (lengthIn * 11) / 4;
+            free(tmp_mem);
+            free(tmp);
+            return 0;
+        case kResamplerMode4To11:
+
+            // We can only handle blocks of 80 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 80) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 11) / 4))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(98 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 80)
+            {
+                WebRtcSpl_Resample8khzTo22khz(samplesIn + i, samplesOut + (i * 11) / 4,
+                                              (WebRtcSpl_State8khzTo22khz *)state1_,
+                                              tmp_mem);
+            }
+            outLen = (lengthIn * 11) / 4;
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode8To11:
+            // We can only handle blocks of 160 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 160) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 11) / 8))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(88 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 160)
+            {
+                WebRtcSpl_Resample16khzTo22khz(samplesIn + i, samplesOut + (i * 11) / 8,
+                                               (WebRtcSpl_State16khzTo22khz *)state1_,
+                                               tmp_mem);
+            }
+            outLen = (lengthIn * 11) / 8;
+            free(tmp_mem);
+            return 0;
+
+        case kResamplerMode11To16:
+            // We can only handle blocks of 110 samples
+            if ((lengthIn % 110) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 16) / 11))
+            {
+                return -1;
+            }
+
+            tmp_mem = (WebRtc_Word32*)malloc(104 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*)malloc((sizeof(WebRtc_Word16) * lengthIn * 2));
+
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+
+            for (int i = 0; i < (lengthIn * 2); i += 220)
+            {
+                WebRtcSpl_Resample22khzTo16khz(tmp + i, samplesOut + (i / 220) * 160,
+                                               (WebRtcSpl_State22khzTo16khz *)state2_,
+                                               tmp_mem);
+            }
+
+            outLen = (lengthIn * 16) / 11;
+
+            free(tmp_mem);
+            free(tmp);
+            return 0;
+
+        case kResamplerMode11To32:
+
+            // We can only handle blocks of 110 samples
+            if ((lengthIn % 110) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 32) / 11))
+            {
+                return -1;
+            }
+
+            tmp_mem = (WebRtc_Word32*)malloc(104 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*)malloc((sizeof(WebRtc_Word16) * lengthIn * 2));
+
+            // 11 -> 22 kHz in samplesOut
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut, (WebRtc_Word32*)state1_);
+
+            // 22 -> 16 in tmp
+            for (int i = 0; i < (lengthIn * 2); i += 220)
+            {
+                WebRtcSpl_Resample22khzTo16khz(samplesOut + i, tmp + (i / 220) * 160,
+                                               (WebRtcSpl_State22khzTo16khz *)state2_,
+                                               tmp_mem);
+            }
+
+            // 16 -> 32 in samplesOut
+            WebRtcSpl_UpsampleBy2(tmp, (lengthIn * 16) / 11, samplesOut,
+                                  (WebRtc_Word32*)state3_);
+
+            outLen = (lengthIn * 32) / 11;
+
+            free(tmp_mem);
+            free(tmp);
+            return 0;
+
+        case kResamplerMode2To1:
+            if (maxLen < (lengthIn / 2))
+            {
+                return -1;
+            }
+            WebRtcSpl_DownsampleBy2(samplesIn, lengthIn, samplesOut, (WebRtc_Word32*)state1_);
+            outLen = lengthIn / 2;
+            return 0;
+        case kResamplerMode3To1:
+            // We can only handle blocks of 480 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 480) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < (lengthIn / 3))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(496 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 480)
+            {
+                WebRtcSpl_Resample48khzTo16khz(samplesIn + i, samplesOut + i / 3,
+                                               (WebRtcSpl_State48khzTo16khz *)state1_,
+                                               tmp_mem);
+            }
+            outLen = lengthIn / 3;
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode4To1:
+            if (maxLen < (lengthIn / 4))
+            {
+                return -1;
+            }
+            tmp = (WebRtc_Word16*)malloc(sizeof(WebRtc_Word16) * lengthIn / 2);
+            // 4:2
+            WebRtcSpl_DownsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+            // 2:1
+            WebRtcSpl_DownsampleBy2(tmp, lengthIn / 2, samplesOut, (WebRtc_Word32*)state2_);
+            outLen = lengthIn / 4;
+            free(tmp);
+            return 0;
+
+        case kResamplerMode6To1:
+            // We can only handle blocks of 480 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 480) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < (lengthIn / 6))
+            {
+                return -1;
+            }
+
+            tmp_mem = (WebRtc_Word32*)malloc(496 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*)malloc((sizeof(WebRtc_Word16) * lengthIn) / 3);
+
+            for (int i = 0; i < lengthIn; i += 480)
+            {
+                WebRtcSpl_Resample48khzTo16khz(samplesIn + i, tmp + i / 3,
+                                               (WebRtcSpl_State48khzTo16khz *)state1_,
+                                               tmp_mem);
+            }
+            outLen = lengthIn / 3;
+            free(tmp_mem);
+            WebRtcSpl_DownsampleBy2(tmp, outLen, samplesOut, (WebRtc_Word32*)state2_);
+            free(tmp);
+            outLen = outLen / 2;
+            return 0;
+        case kResamplerMode12To1:
+            // We can only handle blocks of 480 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 480) != 0) {
+              return -1;
+            }
+            if (maxLen < (lengthIn / 12)) {
+              return -1;
+            }
+
+            tmp_mem = (WebRtc_Word32*) malloc(496 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*) malloc((sizeof(WebRtc_Word16) * lengthIn) / 3);
+            tmp_2 = (WebRtc_Word16*) malloc((sizeof(WebRtc_Word16) * lengthIn) / 6);
+            // 12:4
+            for (int i = 0; i < lengthIn; i += 480) {
+              // WebRtcSpl_Resample48khzTo16khz() takes a block of 480 samples
+              // as input and outputs a resampled block of 160 samples. The
+              // data is now actually in 96 kHz sampling rate, despite the
+              // function name, and with a resampling factor of 1/3 becomes
+              // 32 kHz.
+              WebRtcSpl_Resample48khzTo16khz(samplesIn + i, tmp + i / 3,
+                                             (WebRtcSpl_State48khzTo16khz*) state1_,
+                                             tmp_mem);
+            }
+            outLen = lengthIn / 3;
+            free(tmp_mem);
+            // 4:2
+            WebRtcSpl_DownsampleBy2(tmp, outLen, tmp_2,
+                                    (WebRtc_Word32*) state2_);
+            outLen = outLen / 2;
+            free(tmp);
+            // 2:1
+            WebRtcSpl_DownsampleBy2(tmp_2, outLen, samplesOut,
+                                    (WebRtc_Word32*) state3_);
+            free(tmp_2);
+            outLen = outLen / 2;
+            return 0;
+        case kResamplerMode3To2:
+            if (maxLen < (lengthIn * 2 / 3))
+            {
+                return -1;
+            }
+            // 3:6
+            tmp = static_cast<WebRtc_Word16*> (malloc(sizeof(WebRtc_Word16) * lengthIn * 2));
+            WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (WebRtc_Word32*)state1_);
+            lengthIn *= 2;
+            // 6:2
+            // We can only handle blocks of 480 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 480) != 0)
+            {
+                free(tmp);
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(496 * sizeof(WebRtc_Word32));
+            for (int i = 0; i < lengthIn; i += 480)
+            {
+                WebRtcSpl_Resample48khzTo16khz(tmp + i, samplesOut + i / 3,
+                                               (WebRtcSpl_State48khzTo16khz *)state2_,
+                                               tmp_mem);
+            }
+            outLen = lengthIn / 3;
+            free(tmp);
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode11To2:
+            // We can only handle blocks of 220 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 220) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 2) / 11))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(126 * sizeof(WebRtc_Word32));
+            tmp = (WebRtc_Word16*)malloc((lengthIn * 4) / 11 * sizeof(WebRtc_Word16));
+
+            for (int i = 0; i < lengthIn; i += 220)
+            {
+                WebRtcSpl_Resample22khzTo8khz(samplesIn + i, tmp + (i * 4) / 11,
+                                              (WebRtcSpl_State22khzTo8khz *)state1_,
+                                              tmp_mem);
+            }
+            lengthIn = (lengthIn * 4) / 11;
+
+            WebRtcSpl_DownsampleBy2(tmp, lengthIn, samplesOut, (WebRtc_Word32*)state2_);
+            outLen = lengthIn / 2;
+
+            free(tmp_mem);
+            free(tmp);
+            return 0;
+        case kResamplerMode11To4:
+            // We can only handle blocks of 220 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 220) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 4) / 11))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(126 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 220)
+            {
+                WebRtcSpl_Resample22khzTo8khz(samplesIn + i, samplesOut + (i * 4) / 11,
+                                              (WebRtcSpl_State22khzTo8khz *)state1_,
+                                              tmp_mem);
+            }
+            outLen = (lengthIn * 4) / 11;
+            free(tmp_mem);
+            return 0;
+        case kResamplerMode11To8:
+            // We can only handle blocks of 160 samples
+            // Can be fixed, but I don't think it's needed
+            if ((lengthIn % 220) != 0)
+            {
+                return -1;
+            }
+            if (maxLen < ((lengthIn * 8) / 11))
+            {
+                return -1;
+            }
+            tmp_mem = (WebRtc_Word32*)malloc(104 * sizeof(WebRtc_Word32));
+
+            for (int i = 0; i < lengthIn; i += 220)
+            {
+                WebRtcSpl_Resample22khzTo16khz(samplesIn + i, samplesOut + (i * 8) / 11,
+                                               (WebRtcSpl_State22khzTo16khz *)state1_,
+                                               tmp_mem);
+            }
+            outLen = (lengthIn * 8) / 11;
+            free(tmp_mem);
+            return 0;
+            break;
+
+    }
+    return 0;
+}
+
+// Asynchronous resampling, input
+int Resampler::Insert(WebRtc_Word16 * samplesIn, int lengthIn)
+{
+    if (my_type_ != kResamplerAsynchronous)
+    {
+        return -1;
+    }
+    int sizeNeeded, tenMsblock;
+
+    // Determine need for size of outBuffer
+    sizeNeeded = out_buffer_size_ + ((lengthIn + in_buffer_size_) * my_out_frequency_khz_)
+            / my_in_frequency_khz_;
+    if (sizeNeeded > out_buffer_size_max_)
+    {
+        // Round the value upwards to complete 10 ms blocks
+        tenMsblock = my_out_frequency_khz_ * 10;
+        sizeNeeded = (sizeNeeded / tenMsblock + 1) * tenMsblock;
+        out_buffer_ = (WebRtc_Word16*)realloc(out_buffer_, sizeNeeded * sizeof(WebRtc_Word16));
+        out_buffer_size_max_ = sizeNeeded;
+    }
+
+    // If we need to use inBuffer, make sure all input data fits there.
+
+    tenMsblock = my_in_frequency_khz_ * 10;
+    if (in_buffer_size_ || (lengthIn % tenMsblock))
+    {
+        // Check if input buffer size is enough
+        if ((in_buffer_size_ + lengthIn) > in_buffer_size_max_)
+        {
+            // Round the value upwards to complete 10 ms blocks
+            sizeNeeded = ((in_buffer_size_ + lengthIn) / tenMsblock + 1) * tenMsblock;
+            in_buffer_ = (WebRtc_Word16*)realloc(in_buffer_,
+                                                 sizeNeeded * sizeof(WebRtc_Word16));
+            in_buffer_size_max_ = sizeNeeded;
+        }
+        // Copy in data to input buffer
+        memcpy(in_buffer_ + in_buffer_size_, samplesIn, lengthIn * sizeof(WebRtc_Word16));
+
+        // Resample all available 10 ms blocks
+        int lenOut;
+        int dataLenToResample = (in_buffer_size_ / tenMsblock) * tenMsblock;
+        Push(in_buffer_, dataLenToResample, out_buffer_ + out_buffer_size_,
+             out_buffer_size_max_ - out_buffer_size_, lenOut);
+        out_buffer_size_ += lenOut;
+
+        // Save the rest
+        memmove(in_buffer_, in_buffer_ + dataLenToResample,
+                (in_buffer_size_ - dataLenToResample) * sizeof(WebRtc_Word16));
+        in_buffer_size_ -= dataLenToResample;
+    } else
+    {
+        // Just resample
+        int lenOut;
+        Push(in_buffer_, lengthIn, out_buffer_ + out_buffer_size_,
+             out_buffer_size_max_ - out_buffer_size_, lenOut);
+        out_buffer_size_ += lenOut;
+    }
+
+    return 0;
+}
+
+// Asynchronous resampling output, remaining samples are buffered
+int Resampler::Pull(WebRtc_Word16* samplesOut, int desiredLen, int &outLen)
+{
+    if (my_type_ != kResamplerAsynchronous)
+    {
+        return -1;
+    }
+
+    // Check that we have enough data
+    if (desiredLen <= out_buffer_size_)
+    {
+        // Give out the date
+        memcpy(samplesOut, out_buffer_, desiredLen * sizeof(WebRtc_Word32));
+
+        // Shuffle down remaining
+        memmove(out_buffer_, out_buffer_ + desiredLen,
+                (out_buffer_size_ - desiredLen) * sizeof(WebRtc_Word16));
+
+        // Update remaining size
+        out_buffer_size_ -= desiredLen;
+
+        return 0;
+    } else
+    {
+        return -1;
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/common_audio/resampler/resampler.gypi b/trunk/src/common_audio/resampler/resampler.gypi
new file mode 100644
index 0000000..69f9b0e
--- /dev/null
+++ b/trunk/src/common_audio/resampler/resampler.gypi
@@ -0,0 +1,55 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'resampler',
+      'type': '<(library)',
+      'dependencies': [
+        'signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/resampler.h',
+        'resampler.cc',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets' : [
+        {
+          'target_name': 'resampler_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'resampler',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'resampler_unittest.cc',
+          ],
+        }, # resampler_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/common_audio/resampler/resampler_unittest.cc b/trunk/src/common_audio/resampler/resampler_unittest.cc
new file mode 100644
index 0000000..9b1061a
--- /dev/null
+++ b/trunk/src/common_audio/resampler/resampler_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include "common_audio/resampler/include/resampler.h"
+
+// TODO(andrew): this is a work-in-progress. Many more tests are needed.
+
+namespace webrtc {
+namespace {
+const ResamplerType kTypes[] = {
+  kResamplerSynchronous,
+  kResamplerAsynchronous,
+  kResamplerSynchronousStereo,
+  kResamplerAsynchronousStereo
+  // kResamplerInvalid excluded
+};
+const size_t kTypesSize = sizeof(kTypes) / sizeof(*kTypes);
+
+// Rates we must support.
+const int kMaxRate = 96000;
+const int kRates[] = {
+  8000,
+  16000,
+  32000,
+  44000,
+  48000,
+  kMaxRate
+};
+const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
+const int kMaxChannels = 2;
+const size_t kDataSize = static_cast<size_t> (kMaxChannels * kMaxRate / 100);
+
+// TODO(andrew): should we be supporting these combinations?
+bool ValidRates(int in_rate, int out_rate) {
+  // Not the most compact notation, for clarity.
+  if ((in_rate == 44000 && (out_rate == 48000 || out_rate == 96000)) ||
+      (out_rate == 44000 && (in_rate == 48000 || in_rate == 96000))) {
+    return false;
+  }
+
+  return true;
+}
+
+class ResamplerTest : public testing::Test {
+ protected:
+  ResamplerTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  Resampler rs_;
+  int16_t data_in_[kDataSize];
+  int16_t data_out_[kDataSize];
+};
+
+ResamplerTest::ResamplerTest() {}
+
+void ResamplerTest::SetUp() {
+  // Initialize input data with anything. The tests are content independent.
+  memset(data_in_, 1, sizeof(data_in_));
+}
+
+void ResamplerTest::TearDown() {}
+
+TEST_F(ResamplerTest, Reset) {
+  // The only failure mode for the constructor is if Reset() fails. For the
+  // time being then (until an Init function is added), we rely on Reset()
+  // to test the constructor.
+
+  // Check that all required combinations are supported.
+  for (size_t i = 0; i < kRatesSize; ++i) {
+    for (size_t j = 0; j < kRatesSize; ++j) {
+      for (size_t k = 0; k < kTypesSize; ++k) {
+        std::ostringstream ss;
+        ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]
+            << ", type: " << kTypes[k];
+        SCOPED_TRACE(ss.str());
+        if (ValidRates(kRates[i], kRates[j]))
+          EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
+        else
+          EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
+      }
+    }
+  }
+}
+
+// TODO(tlegrand): Replace code inside the two tests below with a function
+// with number of channels and ResamplerType as input.
+TEST_F(ResamplerTest, Synchronous) {
+  for (size_t i = 0; i < kRatesSize; ++i) {
+    for (size_t j = 0; j < kRatesSize; ++j) {
+      std::ostringstream ss;
+      ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
+      SCOPED_TRACE(ss.str());
+
+      if (ValidRates(kRates[i], kRates[j])) {
+        int in_length = kRates[i] / 100;
+        int out_length = 0;
+        EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
+        EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
+                              out_length));
+        EXPECT_EQ(kRates[j] / 100, out_length);
+      } else {
+        EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
+      }
+    }
+  }
+}
+
+TEST_F(ResamplerTest, SynchronousStereo) {
+  // Number of channels is 2, stereo mode.
+  const int kChannels = 2;
+  for (size_t i = 0; i < kRatesSize; ++i) {
+    for (size_t j = 0; j < kRatesSize; ++j) {
+      std::ostringstream ss;
+      ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
+      SCOPED_TRACE(ss.str());
+
+      if (ValidRates(kRates[i], kRates[j])) {
+        int in_length = kChannels * kRates[i] / 100;
+        int out_length = 0;
+        EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j],
+                               kResamplerSynchronousStereo));
+        EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
+                              out_length));
+        EXPECT_EQ(kChannels * kRates[j] / 100, out_length);
+      } else {
+        EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j],
+                                kResamplerSynchronousStereo));
+      }
+    }
+  }
+}
+}  // namespace
+}  // namespace webrtc
diff --git a/trunk/src/common_audio/signal_processing/Android.mk b/trunk/src/common_audio/signal_processing/Android.mk
new file mode 100644
index 0000000..3ff066c
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/Android.mk
@@ -0,0 +1,100 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_spl
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    auto_corr_to_refl_coef.c \
+    auto_correlation.c \
+    complex_fft.c \
+    copy_set_operations.c \
+    division_operations.c \
+    dot_product_with_scale.c \
+    energy.c \
+    filter_ar.c \
+    filter_ma_fast_q12.c \
+    get_hanning_window.c \
+    get_scaling_square.c \
+    ilbc_specific_functions.c \
+    levinson_durbin.c \
+    lpc_to_refl_coef.c \
+    min_max_operations.c \
+    randomization_functions.c \
+    refl_coef_to_lpc.c \
+    resample.c \
+    resample_48khz.c \
+    resample_by_2.c \
+    resample_by_2_internal.c \
+    resample_fractional.c \
+    spl_sqrt.c \
+    spl_version.c \
+    splitting_filter.c \
+    sqrt_of_one_minus_x_squared.c \
+    vector_scaling_operations.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../.. 
+
+ifeq ($(ARCH_ARM_HAVE_NEON),true)
+LOCAL_SRC_FILES += \
+    min_max_operations_neon.c \
+    cross_correlation_neon.s \
+    downsample_fast_neon.s
+LOCAL_CFLAGS += \
+    $(MY_ARM_CFLAGS_NEON)
+else
+LOCAL_SRC_FILES += \
+    cross_correlation.c \
+    downsample_fast.c
+endif
+
+ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
+LOCAL_SRC_FILES += \
+    filter_ar_fast_q12_armv7.s
+else
+LOCAL_SRC_FILES += \
+    filter_ar_fast_q12.c
+endif
+
+ifeq ($(TARGET_ARCH),arm)
+LOCAL_SRC_FILES += \
+    complex_bit_reverse_arm.s \
+    spl_sqrt_floor.s
+else
+LOCAL_SRC_FILES += \
+    complex_bit_reverse.c \
+    spl_sqrt_floor.c
+endif
+
+LOCAL_SHARED_LIBRARIES := libstlport
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+LOCAL_LDLIBS += -ldl -lpthread
+endif
+
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/common_audio/signal_processing/auto_corr_to_refl_coef.c b/trunk/src/common_audio/signal_processing/auto_corr_to_refl_coef.c
new file mode 100644
index 0000000..b7e8858
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/auto_corr_to_refl_coef.c
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_AutoCorrToReflCoef().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_AutoCorrToReflCoef(G_CONST WebRtc_Word32 *R, int use_order, WebRtc_Word16 *K)
+{
+    int i, n;
+    WebRtc_Word16 tmp;
+    G_CONST WebRtc_Word32 *rptr;
+    WebRtc_Word32 L_num, L_den;
+    WebRtc_Word16 *acfptr, *pptr, *wptr, *p1ptr, *w1ptr, ACF[WEBRTC_SPL_MAX_LPC_ORDER],
+            P[WEBRTC_SPL_MAX_LPC_ORDER], W[WEBRTC_SPL_MAX_LPC_ORDER];
+
+    // Initialize loop and pointers.
+    acfptr = ACF;
+    rptr = R;
+    pptr = P;
+    p1ptr = &P[1];
+    w1ptr = &W[1];
+    wptr = w1ptr;
+
+    // First loop; n=0. Determine shifting.
+    tmp = WebRtcSpl_NormW32(*R);
+    *acfptr = (WebRtc_Word16)((*rptr++ << tmp) >> 16);
+    *pptr++ = *acfptr++;
+
+    // Initialize ACF, P and W.
+    for (i = 1; i <= use_order; i++)
+    {
+        *acfptr = (WebRtc_Word16)((*rptr++ << tmp) >> 16);
+        *wptr++ = *acfptr;
+        *pptr++ = *acfptr++;
+    }
+
+    // Compute reflection coefficients.
+    for (n = 1; n <= use_order; n++, K++)
+    {
+        tmp = WEBRTC_SPL_ABS_W16(*p1ptr);
+        if (*P < tmp)
+        {
+            for (i = n; i <= use_order; i++)
+                *K++ = 0;
+
+            return;
+        }
+
+        // Division: WebRtcSpl_div(tmp, *P)
+        *K = 0;
+        if (tmp != 0)
+        {
+            L_num = tmp;
+            L_den = *P;
+            i = 15;
+            while (i--)
+            {
+                (*K) <<= 1;
+                L_num <<= 1;
+                if (L_num >= L_den)
+                {
+                    L_num -= L_den;
+                    (*K)++;
+                }
+            }
+            if (*p1ptr > 0)
+                *K = -*K;
+        }
+
+        // Last iteration; don't do Schur recursion.
+        if (n == use_order)
+            return;
+
+        // Schur recursion.
+        pptr = P;
+        wptr = w1ptr;
+        tmp = (WebRtc_Word16)(((WebRtc_Word32)*p1ptr * (WebRtc_Word32)*K + 16384) >> 15);
+        *pptr = WEBRTC_SPL_ADD_SAT_W16( *pptr, tmp );
+        pptr++;
+        for (i = 1; i <= use_order - n; i++)
+        {
+            tmp = (WebRtc_Word16)(((WebRtc_Word32)*wptr * (WebRtc_Word32)*K + 16384) >> 15);
+            *pptr = WEBRTC_SPL_ADD_SAT_W16( *(pptr+1), tmp );
+            pptr++;
+            tmp = (WebRtc_Word16)(((WebRtc_Word32)*pptr * (WebRtc_Word32)*K + 16384) >> 15);
+            *wptr = WEBRTC_SPL_ADD_SAT_W16( *wptr, tmp );
+            wptr++;
+        }
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/auto_correlation.c b/trunk/src/common_audio/signal_processing/auto_correlation.c
new file mode 100644
index 0000000..a00fde4
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/auto_correlation.c
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_AutoCorrelation().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+int WebRtcSpl_AutoCorrelation(G_CONST WebRtc_Word16* in_vector,
+                              int in_vector_length,
+                              int order,
+                              WebRtc_Word32* result,
+                              int* scale)
+{
+    WebRtc_Word32 sum;
+    int i, j;
+    WebRtc_Word16 smax; // Sample max
+    G_CONST WebRtc_Word16* xptr1;
+    G_CONST WebRtc_Word16* xptr2;
+    WebRtc_Word32* resultptr;
+    int scaling = 0;
+
+#ifdef _ARM_OPT_
+#pragma message("NOTE: _ARM_OPT_ optimizations are used")
+    WebRtc_Word16 loops4;
+#endif
+
+    if (order < 0)
+        order = in_vector_length;
+
+    // Find the max. sample
+    smax = WebRtcSpl_MaxAbsValueW16(in_vector, in_vector_length);
+
+    // In order to avoid overflow when computing the sum we should scale the samples so that
+    // (in_vector_length * smax * smax) will not overflow.
+
+    if (smax == 0)
+    {
+        scaling = 0;
+    } else
+    {
+        int nbits = WebRtcSpl_GetSizeInBits(in_vector_length); // # of bits in the sum loop
+        int t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax)); // # of bits to normalize smax
+
+        if (t > nbits)
+        {
+            scaling = 0;
+        } else
+        {
+            scaling = nbits - t;
+        }
+
+    }
+
+    resultptr = result;
+
+    // Perform the actual correlation calculation
+    for (i = 0; i < order + 1; i++)
+    {
+        int loops = (in_vector_length - i);
+        sum = 0;
+        xptr1 = in_vector;
+        xptr2 = &in_vector[i];
+#ifndef _ARM_OPT_
+        for (j = loops; j > 0; j--)
+        {
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1++, *xptr2++, scaling);
+        }
+#else
+        loops4 = (loops >> 2) << 2;
+
+        if (scaling == 0)
+        {
+            for (j = 0; j < loops4; j = j + 4)
+            {
+                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
+                xptr1++;
+                xptr2++;
+            }
+
+            for (j = loops4; j < loops; j++)
+            {
+                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
+                xptr1++;
+                xptr2++;
+            }
+        }
+        else
+        {
+            for (j = 0; j < loops4; j = j + 4)
+            {
+                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
+                xptr1++;
+                xptr2++;
+                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
+                xptr1++;
+                xptr2++;
+            }
+
+            for (j = loops4; j < loops; j++)
+            {
+                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
+                xptr1++;
+                xptr2++;
+            }
+        }
+
+#endif
+        *resultptr++ = sum;
+    }
+
+    *scale = scaling;
+
+    return order + 1;
+}
diff --git a/trunk/src/common_audio/signal_processing/complex_bit_reverse.c b/trunk/src/common_audio/signal_processing/complex_bit_reverse.c
new file mode 100644
index 0000000..02fde1e
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/complex_bit_reverse.c
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "signal_processing_library.h"
+
+/* Tables for data buffer indexes that are bit reversed and thus need to be
+ * swapped. Note that, index_7[{0, 2, 4, ...}] are for the left side of the swap
+ * operations, while index_7[{1, 3, 5, ...}] are for the right side of the
+ * operation. Same for index_8.
+ */
+
+/* Indexes for the case of stages == 7. */
+static const int16_t index_7[112] = {
+  1, 64, 2, 32, 3, 96, 4, 16, 5, 80, 6, 48, 7, 112, 9, 72, 10, 40, 11, 104,
+  12, 24, 13, 88, 14, 56, 15, 120, 17, 68, 18, 36, 19, 100, 21, 84, 22, 52,
+  23, 116, 25, 76, 26, 44, 27, 108, 29, 92, 30, 60, 31, 124, 33, 66, 35, 98,
+  37, 82, 38, 50, 39, 114, 41, 74, 43, 106, 45, 90, 46, 58, 47, 122, 49, 70,
+  51, 102, 53, 86, 55, 118, 57, 78, 59, 110, 61, 94, 63, 126, 67, 97, 69,
+  81, 71, 113, 75, 105, 77, 89, 79, 121, 83, 101, 87, 117, 91, 109, 95, 125,
+  103, 115, 111, 123
+};
+
+/* Indexes for the case of stages == 8. */
+static const int16_t index_8[240] = {
+  1, 128, 2, 64, 3, 192, 4, 32, 5, 160, 6, 96, 7, 224, 8, 16, 9, 144, 10, 80,
+  11, 208, 12, 48, 13, 176, 14, 112, 15, 240, 17, 136, 18, 72, 19, 200, 20,
+  40, 21, 168, 22, 104, 23, 232, 25, 152, 26, 88, 27, 216, 28, 56, 29, 184,
+  30, 120, 31, 248, 33, 132, 34, 68, 35, 196, 37, 164, 38, 100, 39, 228, 41,
+  148, 42, 84, 43, 212, 44, 52, 45, 180, 46, 116, 47, 244, 49, 140, 50, 76,
+  51, 204, 53, 172, 54, 108, 55, 236, 57, 156, 58, 92, 59, 220, 61, 188, 62,
+  124, 63, 252, 65, 130, 67, 194, 69, 162, 70, 98, 71, 226, 73, 146, 74, 82,
+  75, 210, 77, 178, 78, 114, 79, 242, 81, 138, 83, 202, 85, 170, 86, 106, 87,
+  234, 89, 154, 91, 218, 93, 186, 94, 122, 95, 250, 97, 134, 99, 198, 101,
+  166, 103, 230, 105, 150, 107, 214, 109, 182, 110, 118, 111, 246, 113, 142,
+  115, 206, 117, 174, 119, 238, 121, 158, 123, 222, 125, 190, 127, 254, 131,
+  193, 133, 161, 135, 225, 137, 145, 139, 209, 141, 177, 143, 241, 147, 201,
+  149, 169, 151, 233, 155, 217, 157, 185, 159, 249, 163, 197, 167, 229, 171,
+  213, 173, 181, 175, 245, 179, 205, 183, 237, 187, 221, 191, 253, 199, 227,
+  203, 211, 207, 243, 215, 235, 223, 251, 239, 247
+};
+
+void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages) {
+  /* For any specific value of stages, we know exactly the indexes that are
+   * bit reversed. Currently (Feb. 2012) in WebRTC the only possible values of
+   * stages are 7 and 8, so we use tables to save unnecessary iterations and
+   * calculations for these two cases.
+   */
+  if (stages == 7 || stages == 8) {
+    int m = 0;
+    int length = 112;
+    const int16_t* index = index_7;
+
+    if (stages == 8) {
+      length = 240;
+      index = index_8;
+    }
+
+    /* Decimation in time. Swap the elements with bit-reversed indexes. */
+    for (m = 0; m < length; m += 2) {
+      /* We declare a int32_t* type pointer, to load both the 16-bit real
+       * and imaginary elements from complex_data in one instruction, reducing
+       * complexity.
+       */
+      int32_t* complex_data_ptr = (int32_t*)complex_data;
+      int32_t temp = 0;
+
+      temp = complex_data_ptr[index[m]];  /* Real and imaginary */
+      complex_data_ptr[index[m]] = complex_data_ptr[index[m + 1]];
+      complex_data_ptr[index[m + 1]] = temp;
+    }
+  }
+  else {
+    int m = 0, mr = 0, l = 0;
+    int n = 1 << stages;
+    int nn = n - 1;
+
+    /* Decimation in time - re-order data */
+    for (m = 1; m <= nn; ++m) {
+      int32_t* complex_data_ptr = (int32_t*)complex_data;
+      int32_t temp = 0;
+
+      /* Find out indexes that are bit-reversed. */
+      l = n;
+      do {
+        l >>= 1;
+      } while (l > nn - mr);
+      mr = (mr & (l - 1)) + l;
+
+      if (mr <= m) {
+        continue;
+      }
+
+      /* Swap the elements with bit-reversed indexes.
+       * This is similar to the loop in the stages == 7 or 8 cases.
+       */
+      temp = complex_data_ptr[m];  /* Real and imaginary */
+      complex_data_ptr[m] = complex_data_ptr[mr];
+      complex_data_ptr[mr] = temp;
+    }
+  }
+}
+
diff --git a/trunk/src/common_audio/signal_processing/complex_bit_reverse_arm.s b/trunk/src/common_audio/signal_processing/complex_bit_reverse_arm.s
new file mode 100644
index 0000000..4828077
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/complex_bit_reverse_arm.s
@@ -0,0 +1,126 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_ComplexBitReverse(), optimized
+@ for ARMv5 platforms.
+@ Reference C code is in file complex_bit_reverse.c. Bit-exact.
+
+.arch armv5
+
+.global WebRtcSpl_ComplexBitReverse
+
+.align  2
+
+WebRtcSpl_ComplexBitReverse:
+.fnstart
+
+  push {r4-r7}
+
+  cmp r1, #7
+  adr r3, index_7                 @ Table pointer.
+  mov r4, #112                    @ Number of interations.
+  beq PRE_LOOP_STAGES_7_OR_8
+
+  cmp r1, #8
+  adr r3, index_8                 @ Table pointer.
+  mov r4, #240                    @ Number of interations.
+  beq PRE_LOOP_STAGES_7_OR_8
+
+  mov r3, #1                      @ Initialize m.
+  mov r1, r3, asl r1              @ n = 1 << stages;
+  subs r6, r1, #1                 @ nn = n - 1;
+  ble END
+
+  mov r5, r0                      @ &complex_data
+  mov r4, #0                      @ ml
+
+LOOP_GENERIC:
+  rsb r12, r4, r6                 @ l > nn - mr
+  mov r2, r1                      @ n
+
+LOOP_SHIFT:
+  asr r2, #1                      @ l >>= 1;
+  cmp r2, r12
+  bgt LOOP_SHIFT
+
+  sub r12, r2, #1
+  and r4, r12, r4
+  add r4, r2                      @ mr = (mr & (l - 1)) + l;
+  cmp r4, r3                      @ mr <= m ?
+  ble UPDATE_REGISTERS
+
+  mov r12, r4, asl #2
+  ldr r7, [r5, #4]                @ complex_data[2 * m, 2 * m + 1].
+                                  @   Offset 4 due to m incrementing from 1.
+  ldr r2, [r0, r12]               @ complex_data[2 * mr, 2 * mr + 1].
+  str r7, [r0, r12]
+  str r2, [r5, #4]
+
+UPDATE_REGISTERS:
+  add r3, r3, #1
+  add r5, #4
+  cmp r3, r1
+  bne LOOP_GENERIC
+
+  b END
+
+PRE_LOOP_STAGES_7_OR_8:
+  add r4, r3, r4, asl #1
+
+LOOP_STAGES_7_OR_8:
+  ldrsh r2, [r3], #2              @ index[m]
+  ldrsh r5, [r3], #2              @ index[m + 1]
+  ldr r1, [r0, r2]                @ complex_data[index[m], index[m] + 1]
+  ldr r12, [r0, r5]               @ complex_data[index[m + 1], index[m + 1] + 1]
+  cmp r3, r4
+  str r1, [r0, r5]
+  str r12, [r0, r2]
+  bne LOOP_STAGES_7_OR_8
+
+END:
+  pop {r4-r7}
+  bx lr
+
+.fnend
+
+
+@ The index tables. Note the values are doubles of the actual indexes for 16-bit
+@ elements, different from the generic C code. It actually provides byte offsets
+@ for the indexes.
+
+.align  2
+index_7:  @ Indexes for stages == 7.
+  .hword 4, 256, 8, 128, 12, 384, 16, 64, 20, 320, 24, 192, 28, 448, 36, 288
+  .hword 40, 160, 44, 416, 48, 96, 52, 352, 56, 224, 60, 480, 68, 272, 72, 144
+  .hword 76, 400, 84, 336, 88, 208, 92, 464, 100, 304, 104, 176, 108, 432, 116
+  .hword 368, 120, 240, 124, 496, 132, 264, 140, 392, 148, 328, 152, 200, 156
+  .hword 456, 164, 296, 172, 424, 180, 360, 184, 232, 188, 488, 196, 280, 204
+  .hword 408, 212, 344, 220, 472, 228, 312, 236, 440, 244, 376, 252, 504, 268
+  .hword 388, 276, 324, 284, 452, 300, 420, 308, 356, 316, 484, 332, 404, 348
+  .hword 468, 364, 436, 380, 500, 412, 460, 444, 492
+
+index_8:  @ Indexes for stages == 8.
+  .hword 4, 512, 8, 256, 12, 768, 16, 128, 20, 640, 24, 384, 28, 896, 32, 64
+  .hword 36, 576, 40, 320, 44, 832, 48, 192, 52, 704, 56, 448, 60, 960, 68, 544
+  .hword 72, 288, 76, 800, 80, 160, 84, 672, 88, 416, 92, 928, 100, 608, 104
+  .hword 352, 108, 864, 112, 224, 116, 736, 120, 480, 124, 992, 132, 528, 136
+  .hword 272, 140, 784, 148, 656, 152, 400, 156, 912, 164, 592, 168, 336, 172
+  .hword 848, 176, 208, 180, 720, 184, 464, 188, 976, 196, 560, 200, 304, 204
+  .hword 816, 212, 688, 216, 432, 220, 944, 228, 624, 232, 368, 236, 880, 244
+  .hword 752, 248, 496, 252, 1008, 260, 520, 268, 776, 276, 648, 280, 392, 284
+  .hword 904, 292, 584, 296, 328, 300, 840, 308, 712, 312, 456, 316, 968, 324
+  .hword 552, 332, 808, 340, 680, 344, 424, 348, 936, 356, 616, 364, 872, 372
+  .hword 744, 376, 488, 380, 1000, 388, 536, 396, 792, 404, 664, 412, 920, 420
+  .hword 600, 428, 856, 436, 728, 440, 472, 444, 984, 452, 568, 460, 824, 468
+  .hword 696, 476, 952, 484, 632, 492, 888, 500, 760, 508, 1016, 524, 772, 532
+  .hword 644, 540, 900, 548, 580, 556, 836, 564, 708, 572, 964, 588, 804, 596
+  .hword 676, 604, 932, 620, 868, 628, 740, 636, 996, 652, 788, 668, 916, 684
+  .hword 852, 692, 724, 700, 980, 716, 820, 732, 948, 748, 884, 764, 1012, 796
+  .hword 908, 812, 844, 828, 972, 860, 940, 892, 1004, 956, 988
diff --git a/trunk/src/common_audio/signal_processing/complex_fft.c b/trunk/src/common_audio/signal_processing/complex_fft.c
new file mode 100644
index 0000000..1e8503c
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/complex_fft.c
@@ -0,0 +1,425 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_ComplexFFT().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+#define CFFTSFT 14
+#define CFFTRND 1
+#define CFFTRND2 16384
+
+#define CIFFTSFT 14
+#define CIFFTRND 1
+
+static const WebRtc_Word16 kSinTable1024[] = {
+      0,    201,    402,    603,    804,   1005,   1206,   1406,
+   1607,   1808,   2009,   2209,   2410,   2610,   2811,   3011,
+   3211,   3411,   3611,   3811,   4011,   4210,   4409,   4608,
+   4807,   5006,   5205,   5403,   5601,   5799,   5997,   6195,
+   6392,   6589,   6786,   6982,   7179,   7375,   7571,   7766,
+   7961,   8156,   8351,   8545,   8739,   8932,   9126,   9319,
+   9511,   9703,   9895,  10087,  10278,  10469,  10659,  10849,
+  11038,  11227,  11416,  11604,  11792,  11980,  12166,  12353,
+  12539,  12724,  12909,  13094,  13278,  13462,  13645,  13827,
+  14009,  14191,  14372,  14552,  14732,  14911,  15090,  15268,
+  15446,  15623,  15799,  15975,  16150,  16325,  16499,  16672,
+  16845,  17017,  17189,  17360,  17530,  17699,  17868,  18036,
+  18204,  18371,  18537,  18702,  18867,  19031,  19194,  19357,
+  19519,  19680,  19840,  20000,  20159,  20317,  20474,  20631,
+  20787,  20942,  21096,  21249,  21402,  21554,  21705,  21855,
+  22004,  22153,  22301,  22448,  22594,  22739,  22883,  23027,
+  23169,  23311,  23452,  23592,  23731,  23869,  24006,  24143,
+  24278,  24413,  24546,  24679,  24811,  24942,  25072,  25201,
+  25329,  25456,  25582,  25707,  25831,  25954,  26077,  26198,
+  26318,  26437,  26556,  26673,  26789,  26905,  27019,  27132,
+  27244,  27355,  27466,  27575,  27683,  27790,  27896,  28001,
+  28105,  28208,  28309,  28410,  28510,  28608,  28706,  28802,
+  28897,  28992,  29085,  29177,  29268,  29358,  29446,  29534,
+  29621,  29706,  29790,  29873,  29955,  30036,  30116,  30195,
+  30272,  30349,  30424,  30498,  30571,  30643,  30713,  30783,
+  30851,  30918,  30984,  31049,
+  31113,  31175,  31236,  31297,
+  31356,  31413,  31470,  31525,  31580,  31633,  31684,  31735,
+  31785,  31833,  31880,  31926,  31970,  32014,  32056,  32097,
+  32137,  32176,  32213,  32249,  32284,  32318,  32350,  32382,
+  32412,  32441,  32468,  32495,  32520,  32544,  32567,  32588,
+  32609,  32628,  32646,  32662,  32678,  32692,  32705,  32717,
+  32727,  32736,  32744,  32751,  32757,  32761,  32764,  32766,
+  32767,  32766,  32764,  32761,  32757,  32751,  32744,  32736,
+  32727,  32717,  32705,  32692,  32678,  32662,  32646,  32628,
+  32609,  32588,  32567,  32544,  32520,  32495,  32468,  32441,
+  32412,  32382,  32350,  32318,  32284,  32249,  32213,  32176,
+  32137,  32097,  32056,  32014,  31970,  31926,  31880,  31833,
+  31785,  31735,  31684,  31633,  31580,  31525,  31470,  31413,
+  31356,  31297,  31236,  31175,  31113,  31049,  30984,  30918,
+  30851,  30783,  30713,  30643,  30571,  30498,  30424,  30349,
+  30272,  30195,  30116,  30036,  29955,  29873,  29790,  29706,
+  29621,  29534,  29446,  29358,  29268,  29177,  29085,  28992,
+  28897,  28802,  28706,  28608,  28510,  28410,  28309,  28208,
+  28105,  28001,  27896,  27790,  27683,  27575,  27466,  27355,
+  27244,  27132,  27019,  26905,  26789,  26673,  26556,  26437,
+  26318,  26198,  26077,  25954,  25831,  25707,  25582,  25456,
+  25329,  25201,  25072,  24942,  24811,  24679,  24546,  24413,
+  24278,  24143,  24006,  23869,  23731,  23592,  23452,  23311,
+  23169,  23027,  22883,  22739,  22594,  22448,  22301,  22153,
+  22004,  21855,  21705,  21554,  21402,  21249,  21096,  20942,
+  20787,  20631,  20474,  20317,  20159,  20000,  19840,  19680,
+  19519,  19357,  19194,  19031,  18867,  18702,  18537,  18371,
+  18204,  18036,  17868,  17699,  17530,  17360,  17189,  17017,
+  16845,  16672,  16499,  16325,  16150,  15975,  15799,  15623,
+  15446,  15268,  15090,  14911,  14732,  14552,  14372,  14191,
+  14009,  13827,  13645,  13462,  13278,  13094,  12909,  12724,
+  12539,  12353,  12166,  11980,  11792,  11604,  11416,  11227,
+  11038,  10849,  10659,  10469,  10278,  10087,   9895,   9703,
+   9511,   9319,   9126,   8932,   8739,   8545,   8351,   8156,
+   7961,   7766,   7571,   7375,   7179,   6982,   6786,   6589,
+   6392,   6195,   5997,   5799,   5601,   5403,   5205,   5006,
+   4807,   4608,   4409,   4210,   4011,   3811,   3611,   3411,
+   3211,   3011,   2811,   2610,   2410,   2209,   2009,   1808,
+   1607,   1406,   1206,   1005,    804,    603,    402,    201,
+      0,   -201,   -402,   -603,   -804,  -1005,  -1206,  -1406,
+  -1607,  -1808,  -2009,  -2209,  -2410,  -2610,  -2811,  -3011,
+  -3211,  -3411,  -3611,  -3811,  -4011,  -4210,  -4409,  -4608,
+  -4807,  -5006,  -5205,  -5403,  -5601,  -5799,  -5997,  -6195,
+  -6392,  -6589,  -6786,  -6982,  -7179,  -7375,  -7571,  -7766,
+  -7961,  -8156,  -8351,  -8545,  -8739,  -8932,  -9126,  -9319,
+  -9511,  -9703,  -9895, -10087, -10278, -10469, -10659, -10849,
+ -11038, -11227, -11416, -11604, -11792, -11980, -12166, -12353,
+ -12539, -12724, -12909, -13094, -13278, -13462, -13645, -13827,
+ -14009, -14191, -14372, -14552, -14732, -14911, -15090, -15268,
+ -15446, -15623, -15799, -15975, -16150, -16325, -16499, -16672,
+ -16845, -17017, -17189, -17360, -17530, -17699, -17868, -18036,
+ -18204, -18371, -18537, -18702, -18867, -19031, -19194, -19357,
+ -19519, -19680, -19840, -20000, -20159, -20317, -20474, -20631,
+ -20787, -20942, -21096, -21249, -21402, -21554, -21705, -21855,
+ -22004, -22153, -22301, -22448, -22594, -22739, -22883, -23027,
+ -23169, -23311, -23452, -23592, -23731, -23869, -24006, -24143,
+ -24278, -24413, -24546, -24679, -24811, -24942, -25072, -25201,
+ -25329, -25456, -25582, -25707, -25831, -25954, -26077, -26198,
+ -26318, -26437, -26556, -26673, -26789, -26905, -27019, -27132,
+ -27244, -27355, -27466, -27575, -27683, -27790, -27896, -28001,
+ -28105, -28208, -28309, -28410, -28510, -28608, -28706, -28802,
+ -28897, -28992, -29085, -29177, -29268, -29358, -29446, -29534,
+ -29621, -29706, -29790, -29873, -29955, -30036, -30116, -30195,
+ -30272, -30349, -30424, -30498, -30571, -30643, -30713, -30783,
+ -30851, -30918, -30984, -31049, -31113, -31175, -31236, -31297,
+ -31356, -31413, -31470, -31525, -31580, -31633, -31684, -31735,
+ -31785, -31833, -31880, -31926, -31970, -32014, -32056, -32097,
+ -32137, -32176, -32213, -32249, -32284, -32318, -32350, -32382,
+ -32412, -32441, -32468, -32495, -32520, -32544, -32567, -32588,
+ -32609, -32628, -32646, -32662, -32678, -32692, -32705, -32717,
+ -32727, -32736, -32744, -32751, -32757, -32761, -32764, -32766,
+ -32767, -32766, -32764, -32761, -32757, -32751, -32744, -32736,
+ -32727, -32717, -32705, -32692, -32678, -32662, -32646, -32628,
+ -32609, -32588, -32567, -32544, -32520, -32495, -32468, -32441,
+ -32412, -32382, -32350, -32318, -32284, -32249, -32213, -32176,
+ -32137, -32097, -32056, -32014, -31970, -31926, -31880, -31833,
+ -31785, -31735, -31684, -31633, -31580, -31525, -31470, -31413,
+ -31356, -31297, -31236, -31175, -31113, -31049, -30984, -30918,
+ -30851, -30783, -30713, -30643, -30571, -30498, -30424, -30349,
+ -30272, -30195, -30116, -30036, -29955, -29873, -29790, -29706,
+ -29621, -29534, -29446, -29358, -29268, -29177, -29085, -28992,
+ -28897, -28802, -28706, -28608, -28510, -28410, -28309, -28208,
+ -28105, -28001, -27896, -27790, -27683, -27575, -27466, -27355,
+ -27244, -27132, -27019, -26905, -26789, -26673, -26556, -26437,
+ -26318, -26198, -26077, -25954, -25831, -25707, -25582, -25456,
+ -25329, -25201, -25072, -24942, -24811, -24679, -24546, -24413,
+ -24278, -24143, -24006, -23869, -23731, -23592, -23452, -23311,
+ -23169, -23027, -22883, -22739, -22594, -22448, -22301, -22153,
+ -22004, -21855, -21705, -21554, -21402, -21249, -21096, -20942,
+ -20787, -20631, -20474, -20317, -20159, -20000, -19840, -19680,
+ -19519, -19357, -19194, -19031, -18867, -18702, -18537, -18371,
+ -18204, -18036, -17868, -17699, -17530, -17360, -17189, -17017,
+ -16845, -16672, -16499, -16325, -16150, -15975, -15799, -15623,
+ -15446, -15268, -15090, -14911, -14732, -14552, -14372, -14191,
+ -14009, -13827, -13645, -13462, -13278, -13094, -12909, -12724,
+ -12539, -12353, -12166, -11980, -11792, -11604, -11416, -11227,
+ -11038, -10849, -10659, -10469, -10278, -10087,  -9895,  -9703,
+  -9511,  -9319,  -9126,  -8932,  -8739,  -8545,  -8351,  -8156,
+  -7961,  -7766,  -7571,  -7375,  -7179,  -6982,  -6786,  -6589,
+  -6392,  -6195,  -5997,  -5799,  -5601,  -5403,  -5205,  -5006,
+  -4807,  -4608,  -4409,  -4210,  -4011,  -3811,  -3611,  -3411,
+  -3211,  -3011,  -2811,  -2610,  -2410,  -2209,  -2009,  -1808,
+  -1607,  -1406,  -1206,  -1005,   -804,   -603,   -402,   -201
+};
+
+int WebRtcSpl_ComplexFFT(WebRtc_Word16 frfi[], int stages, int mode)
+{
+    int i, j, l, k, istep, n, m;
+    WebRtc_Word16 wr, wi;
+    WebRtc_Word32 tr32, ti32, qr32, qi32;
+
+    /* The 1024-value is a constant given from the size of kSinTable1024[],
+     * and should not be changed depending on the input parameter 'stages'
+     */
+    n = 1 << stages;
+    if (n > 1024)
+        return -1;
+
+    l = 1;
+    k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change
+         depending on the input parameter 'stages' */
+
+    if (mode == 0)
+    {
+        // mode==0: Low-complexity and Low-accuracy mode
+        while (l < n)
+        {
+            istep = l << 1;
+
+            for (m = 0; m < l; ++m)
+            {
+                j = m << k;
+
+                /* The 256-value is a constant given as 1/4 of the size of
+                 * kSinTable1024[], and should not be changed depending on the input
+                 * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2
+                 */
+                wr = kSinTable1024[j + 256];
+                wi = -kSinTable1024[j];
+
+                for (i = m; i < n; i += istep)
+                {
+                    j = i + l;
+
+                    tr32 = WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j])
+                            - WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j + 1])), 15);
+
+                    ti32 = WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j + 1])
+                            + WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j])), 15);
+
+                    qr32 = (WebRtc_Word32)frfi[2 * i];
+                    qi32 = (WebRtc_Word32)frfi[2 * i + 1];
+                    frfi[2 * j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qr32 - tr32, 1);
+                    frfi[2 * j + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qi32 - ti32, 1);
+                    frfi[2 * i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qr32 + tr32, 1);
+                    frfi[2 * i + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qi32 + ti32, 1);
+                }
+            }
+
+            --k;
+            l = istep;
+
+        }
+
+    } else
+    {
+        // mode==1: High-complexity and High-accuracy mode
+        while (l < n)
+        {
+            istep = l << 1;
+
+            for (m = 0; m < l; ++m)
+            {
+                j = m << k;
+
+                /* The 256-value is a constant given as 1/4 of the size of
+                 * kSinTable1024[], and should not be changed depending on the input
+                 * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2
+                 */
+                wr = kSinTable1024[j + 256];
+                wi = -kSinTable1024[j];
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+                WebRtc_Word32 wri;
+                WebRtc_Word32 frfi_r;
+                __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
+                    "r"((WebRtc_Word32)wr), "r"((WebRtc_Word32)wi));
+#endif
+
+                for (i = m; i < n; i += istep)
+                {
+                    j = i + l;
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+                    __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(frfi_r) :
+                        "r"((WebRtc_Word32)frfi[2*j]), "r"((WebRtc_Word32)frfi[2*j +1]));
+                    __asm__("smlsd %0, %1, %2, %3" : "=r"(tr32) :
+                        "r"(wri), "r"(frfi_r), "r"(CFFTRND));
+                    __asm__("smladx %0, %1, %2, %3" : "=r"(ti32) :
+                        "r"(wri), "r"(frfi_r), "r"(CFFTRND));
+    
+#else
+                    tr32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j])
+                            - WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j + 1]) + CFFTRND;
+
+                    ti32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j + 1])
+                            + WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j]) + CFFTRND;
+#endif
+
+                    tr32 = WEBRTC_SPL_RSHIFT_W32(tr32, 15 - CFFTSFT);
+                    ti32 = WEBRTC_SPL_RSHIFT_W32(ti32, 15 - CFFTSFT);
+
+                    qr32 = ((WebRtc_Word32)frfi[2 * i]) << CFFTSFT;
+                    qi32 = ((WebRtc_Word32)frfi[2 * i + 1]) << CFFTSFT;
+
+                    frfi[2 * j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qr32 - tr32 + CFFTRND2), 1 + CFFTSFT);
+                    frfi[2 * j + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qi32 - ti32 + CFFTRND2), 1 + CFFTSFT);
+                    frfi[2 * i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qr32 + tr32 + CFFTRND2), 1 + CFFTSFT);
+                    frfi[2 * i + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qi32 + ti32 + CFFTRND2), 1 + CFFTSFT);
+                }
+            }
+
+            --k;
+            l = istep;
+        }
+    }
+    return 0;
+}
+
+int WebRtcSpl_ComplexIFFT(WebRtc_Word16 frfi[], int stages, int mode)
+{
+    int i, j, l, k, istep, n, m, scale, shift;
+    WebRtc_Word16 wr, wi;
+    WebRtc_Word32 tr32, ti32, qr32, qi32;
+    WebRtc_Word32 tmp32, round2;
+
+    /* The 1024-value is a constant given from the size of kSinTable1024[],
+     * and should not be changed depending on the input parameter 'stages'
+     */
+    n = 1 << stages;
+    if (n > 1024)
+        return -1;
+
+    scale = 0;
+
+    l = 1;
+    k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change
+         depending on the input parameter 'stages' */
+
+    while (l < n)
+    {
+        // variable scaling, depending upon data
+        shift = 0;
+        round2 = 8192;
+
+        tmp32 = (WebRtc_Word32)WebRtcSpl_MaxAbsValueW16(frfi, 2 * n);
+        if (tmp32 > 13573)
+        {
+            shift++;
+            scale++;
+            round2 <<= 1;
+        }
+        if (tmp32 > 27146)
+        {
+            shift++;
+            scale++;
+            round2 <<= 1;
+        }
+
+        istep = l << 1;
+
+        if (mode == 0)
+        {
+            // mode==0: Low-complexity and Low-accuracy mode
+            for (m = 0; m < l; ++m)
+            {
+                j = m << k;
+
+                /* The 256-value is a constant given as 1/4 of the size of
+                 * kSinTable1024[], and should not be changed depending on the input
+                 * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2
+                 */
+                wr = kSinTable1024[j + 256];
+                wi = kSinTable1024[j];
+
+                for (i = m; i < n; i += istep)
+                {
+                    j = i + l;
+
+                    tr32 = WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL_16_16_RSFT(wr, frfi[2 * j], 0)
+                            - WEBRTC_SPL_MUL_16_16_RSFT(wi, frfi[2 * j + 1], 0)), 15);
+
+                    ti32 = WEBRTC_SPL_RSHIFT_W32(
+                            (WEBRTC_SPL_MUL_16_16_RSFT(wr, frfi[2 * j + 1], 0)
+                                    + WEBRTC_SPL_MUL_16_16_RSFT(wi,frfi[2*j],0)), 15);
+
+                    qr32 = (WebRtc_Word32)frfi[2 * i];
+                    qi32 = (WebRtc_Word32)frfi[2 * i + 1];
+                    frfi[2 * j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qr32 - tr32, shift);
+                    frfi[2 * j + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qi32 - ti32, shift);
+                    frfi[2 * i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qr32 + tr32, shift);
+                    frfi[2 * i + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(qi32 + ti32, shift);
+                }
+            }
+        } else
+        {
+            // mode==1: High-complexity and High-accuracy mode
+
+            for (m = 0; m < l; ++m)
+            {
+                j = m << k;
+
+                /* The 256-value is a constant given as 1/4 of the size of
+                 * kSinTable1024[], and should not be changed depending on the input
+                 * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2
+                 */
+                wr = kSinTable1024[j + 256];
+                wi = kSinTable1024[j];
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+                WebRtc_Word32 wri;
+                WebRtc_Word32 frfi_r;
+                __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
+                    "r"((WebRtc_Word32)wr), "r"((WebRtc_Word32)wi));
+#endif
+
+                for (i = m; i < n; i += istep)
+                {
+                    j = i + l;
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+                    __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(frfi_r) :
+                        "r"((WebRtc_Word32)frfi[2*j]), "r"((WebRtc_Word32)frfi[2*j +1]));
+                    __asm__("smlsd %0, %1, %2, %3" : "=r"(tr32) :
+                        "r"(wri), "r"(frfi_r), "r"(CIFFTRND));
+                    __asm__("smladx %0, %1, %2, %3" : "=r"(ti32) :
+                        "r"(wri), "r"(frfi_r), "r"(CIFFTRND));
+#else
+
+                    tr32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j])
+                            - WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j + 1]) + CIFFTRND;
+
+                    ti32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j + 1])
+                            + WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j]) + CIFFTRND;
+#endif
+                    tr32 = WEBRTC_SPL_RSHIFT_W32(tr32, 15 - CIFFTSFT);
+                    ti32 = WEBRTC_SPL_RSHIFT_W32(ti32, 15 - CIFFTSFT);
+
+                    qr32 = ((WebRtc_Word32)frfi[2 * i]) << CIFFTSFT;
+                    qi32 = ((WebRtc_Word32)frfi[2 * i + 1]) << CIFFTSFT;
+
+                    frfi[2 * j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((qr32 - tr32+round2),
+                                                                       shift+CIFFTSFT);
+                    frfi[2 * j + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qi32 - ti32 + round2), shift + CIFFTSFT);
+                    frfi[2 * i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((qr32 + tr32 + round2),
+                                                                       shift + CIFFTSFT);
+                    frfi[2 * i + 1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+                            (qi32 + ti32 + round2), shift + CIFFTSFT);
+                }
+            }
+
+        }
+        --k;
+        l = istep;
+    }
+    return scale;
+}
diff --git a/trunk/src/common_audio/signal_processing/copy_set_operations.c b/trunk/src/common_audio/signal_processing/copy_set_operations.c
new file mode 100644
index 0000000..8247337
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/copy_set_operations.c
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the implementation of functions
+ * WebRtcSpl_MemSetW16()
+ * WebRtcSpl_MemSetW32()
+ * WebRtcSpl_MemCpyReversedOrder()
+ * WebRtcSpl_CopyFromEndW16()
+ * WebRtcSpl_ZerosArrayW16()
+ * WebRtcSpl_ZerosArrayW32()
+ * WebRtcSpl_OnesArrayW16()
+ * WebRtcSpl_OnesArrayW32()
+ *
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include <string.h>
+#include "signal_processing_library.h"
+
+
+void WebRtcSpl_MemSetW16(WebRtc_Word16 *ptr, WebRtc_Word16 set_value, int length)
+{
+    int j;
+    WebRtc_Word16 *arrptr = ptr;
+
+    for (j = length; j > 0; j--)
+    {
+        *arrptr++ = set_value;
+    }
+}
+
+void WebRtcSpl_MemSetW32(WebRtc_Word32 *ptr, WebRtc_Word32 set_value, int length)
+{
+    int j;
+    WebRtc_Word32 *arrptr = ptr;
+
+    for (j = length; j > 0; j--)
+    {
+        *arrptr++ = set_value;
+    }
+}
+
+void WebRtcSpl_MemCpyReversedOrder(WebRtc_Word16* dest, WebRtc_Word16* source, int length)
+{
+    int j;
+    WebRtc_Word16* destPtr = dest;
+    WebRtc_Word16* sourcePtr = source;
+
+    for (j = 0; j < length; j++)
+    {
+        *destPtr-- = *sourcePtr++;
+    }
+}
+
+WebRtc_Word16 WebRtcSpl_CopyFromEndW16(G_CONST WebRtc_Word16 *vector_in,
+                                       WebRtc_Word16 length,
+                                       WebRtc_Word16 samples,
+                                       WebRtc_Word16 *vector_out)
+{
+    // Copy the last <samples> of the input vector to vector_out
+    WEBRTC_SPL_MEMCPY_W16(vector_out, &vector_in[length - samples], samples);
+
+    return samples;
+}
+
+WebRtc_Word16 WebRtcSpl_ZerosArrayW16(WebRtc_Word16 *vector, WebRtc_Word16 length)
+{
+    WebRtcSpl_MemSetW16(vector, 0, length);
+    return length;
+}
+
+WebRtc_Word16 WebRtcSpl_ZerosArrayW32(WebRtc_Word32 *vector, WebRtc_Word16 length)
+{
+    WebRtcSpl_MemSetW32(vector, 0, length);
+    return length;
+}
+
+WebRtc_Word16 WebRtcSpl_OnesArrayW16(WebRtc_Word16 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 i;
+    WebRtc_Word16 *tmpvec = vector;
+    for (i = 0; i < length; i++)
+    {
+        *tmpvec++ = 1;
+    }
+    return length;
+}
+
+WebRtc_Word16 WebRtcSpl_OnesArrayW32(WebRtc_Word32 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 i;
+    WebRtc_Word32 *tmpvec = vector;
+    for (i = 0; i < length; i++)
+    {
+        *tmpvec++ = 1;
+    }
+    return length;
+}
diff --git a/trunk/src/common_audio/signal_processing/cross_correlation.c b/trunk/src/common_audio/signal_processing/cross_correlation.c
new file mode 100644
index 0000000..726a749
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/cross_correlation.c
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_CrossCorrelation().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+/* TODO(kma): Clean up the code in this file, and break it up for
+ * various platforms (Xscale, ARM/Neon etc.).
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_correlation, WebRtc_Word16* seq1,
+                                WebRtc_Word16* seq2, WebRtc_Word16 dim_seq,
+                                WebRtc_Word16 dim_cross_correlation,
+                                WebRtc_Word16 right_shifts,
+                                WebRtc_Word16 step_seq2)
+{
+    int i, j;
+    WebRtc_Word16* seq1Ptr;
+    WebRtc_Word16* seq2Ptr;
+    WebRtc_Word32* CrossCorrPtr;
+
+#ifdef _XSCALE_OPT_
+
+#ifdef _WIN32
+#pragma message("NOTE: _XSCALE_OPT_ optimizations are used (overrides _ARM_OPT_ and requires /QRxscale compiler flag)")
+#endif
+
+    __int64 macc40;
+
+    int iseq1[250];
+    int iseq2[250];
+    int iseq3[250];
+    int * iseq1Ptr;
+    int * iseq2Ptr;
+    int * iseq3Ptr;
+    int len, i_len;
+
+    seq1Ptr = seq1;
+    iseq1Ptr = iseq1;
+    for(i = 0; i < ((dim_seq + 1) >> 1); i++)
+    {
+        *iseq1Ptr = (unsigned short)*seq1Ptr++;
+        *iseq1Ptr++ |= (WebRtc_Word32)*seq1Ptr++ << 16;
+
+    }
+
+    if(dim_seq%2)
+    {
+        *(iseq1Ptr-1) &= 0x0000ffff;
+    }
+    *iseq1Ptr = 0;
+    iseq1Ptr++;
+    *iseq1Ptr = 0;
+    iseq1Ptr++;
+    *iseq1Ptr = 0;
+
+    if(step_seq2 < 0)
+    {
+        seq2Ptr = seq2 - dim_cross_correlation + 1;
+        CrossCorrPtr = &cross_correlation[dim_cross_correlation - 1];
+    }
+    else
+    {
+        seq2Ptr = seq2;
+        CrossCorrPtr = cross_correlation;
+    }
+
+    len = dim_seq + dim_cross_correlation - 1;
+    i_len = (len + 1) >> 1;
+    iseq2Ptr = iseq2;
+
+    iseq3Ptr = iseq3;
+    for(i = 0; i < i_len; i++)
+    {
+        *iseq2Ptr = (unsigned short)*seq2Ptr++;
+        *iseq3Ptr = (unsigned short)*seq2Ptr;
+        *iseq2Ptr++ |= (WebRtc_Word32)*seq2Ptr++ << 16;
+        *iseq3Ptr++ |= (WebRtc_Word32)*seq2Ptr << 16;
+    }
+
+    if(len % 2)
+    {
+        iseq2[i_len - 1] &= 0x0000ffff;
+        iseq3[i_len - 1] = 0;
+    }
+    else
+    iseq3[i_len - 1] &= 0x0000ffff;
+
+    iseq2[i_len] = 0;
+    iseq3[i_len] = 0;
+    iseq2[i_len + 1] = 0;
+    iseq3[i_len + 1] = 0;
+    iseq2[i_len + 2] = 0;
+    iseq3[i_len + 2] = 0;
+
+    // Set pointer to start value
+    iseq2Ptr = iseq2;
+    iseq3Ptr = iseq3;
+
+    i_len = (dim_seq + 7) >> 3;
+    for (i = 0; i < dim_cross_correlation; i++)
+    {
+
+        iseq1Ptr = iseq1;
+
+        macc40 = 0;
+
+        _WriteCoProcessor(macc40, 0);
+
+        if((i & 1))
+        {
+            iseq3Ptr = iseq3 + (i >> 1);
+            for (j = i_len; j > 0; j--)
+            {
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
+            }
+        }
+        else
+        {
+            iseq2Ptr = iseq2 + (i >> 1);
+            for (j = i_len; j > 0; j--)
+            {
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
+                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
+            }
+
+        }
+
+        macc40 = _ReadCoProcessor(0);
+        *CrossCorrPtr = (WebRtc_Word32)(macc40 >> right_shifts);
+        CrossCorrPtr += step_seq2;
+    }
+#else // #ifdef _XSCALE_OPT_
+#ifdef _ARM_OPT_
+    WebRtc_Word16 dim_seq8 = (dim_seq >> 3) << 3;
+#endif
+
+    CrossCorrPtr = cross_correlation;
+
+    for (i = 0; i < dim_cross_correlation; i++)
+    {
+        // Set the pointer to the static vector, set the pointer to the sliding vector
+        // and initialize cross_correlation
+        seq1Ptr = seq1;
+        seq2Ptr = seq2 + (step_seq2 * i);
+        (*CrossCorrPtr) = 0;
+
+#ifndef _ARM_OPT_ 
+#ifdef _WIN32
+#pragma message("NOTE: default implementation is used")
+#endif
+        // Perform the cross correlation
+        for (j = 0; j < dim_seq; j++)
+        {
+            (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr), right_shifts);
+            seq1Ptr++;
+            seq2Ptr++;
+        }
+#else
+#ifdef _WIN32
+#pragma message("NOTE: _ARM_OPT_ optimizations are used")
+#endif
+        if (right_shifts == 0)
+        {
+            // Perform the optimized cross correlation
+            for (j = 0; j < dim_seq8; j = j + 8)
+            {
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+            }
+
+            for (j = dim_seq8; j < dim_seq; j++)
+            {
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+                seq1Ptr++;
+                seq2Ptr++;
+            }
+        }
+        else // right_shifts != 0
+
+        {
+            // Perform the optimized cross correlation
+            for (j = 0; j < dim_seq8; j = j + 8)
+            {
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+            }
+
+            for (j = dim_seq8; j < dim_seq; j++)
+            {
+                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
+                                                             right_shifts);
+                seq1Ptr++;
+                seq2Ptr++;
+            }
+        }
+#endif
+        CrossCorrPtr++;
+    }
+#endif
+}
diff --git a/trunk/src/common_audio/signal_processing/cross_correlation_neon.s b/trunk/src/common_audio/signal_processing/cross_correlation_neon.s
new file mode 100644
index 0000000..e9b1c69
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/cross_correlation_neon.s
@@ -0,0 +1,168 @@
+@
+@ Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ cross_correlation_neon.s
+@ This file contains the function WebRtcSpl_CrossCorrelation(),
+@ optimized for ARM Neon platform.
+@
+@ Reference Ccode at end of this file.
+@ Output is bit-exact with the reference C code, but not with the generic
+@ C code in file cross_correlation.c, due to reduction of shift operations
+@ from using Neon registers.
+
+@ Register usage:
+@
+@ r0: *cross_correlation (function argument)
+@ r1: *seq1 (function argument)
+@ r2: *seq2 (function argument)
+@ r3: dim_seq (function argument); then, total iteration of LOOP_DIM_SEQ
+@ r4: counter for LOOP_DIM_CROSS_CORRELATION
+@ r5: seq2_ptr
+@ r6: seq1_ptr
+@ r7: Total iteration of LOOP_DIM_SEQ_RESIDUAL
+@ r8, r9, r10, r11, r12: scratch
+
+.arch armv7-a
+.fpu neon
+
+.align  2
+.global WebRtcSpl_CrossCorrelation
+
+WebRtcSpl_CrossCorrelation:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  @ Put the shift value (-right_shifts) into a Neon register.
+  ldrsh r10, [sp, #36]
+  rsb r10, r10, #0
+  mov r8, r10, asr #31
+  vmov.32 d16, r10, r8
+
+  @ Initialize loop counters.
+  and r7, r3, #7              @ inner_loop_len2 = dim_seq % 8;
+  asr r3, r3, #3              @ inner_loop_len1 = dim_seq / 8;
+  ldrsh r4, [sp, #32]         @ dim_cross_correlation
+
+LOOP_DIM_CROSS_CORRELATION:
+  vmov.i32 q9, #0
+  vmov.i32 q14, #0
+  movs r8, r3                 @ inner_loop_len1
+  mov r6, r1                  @ seq1_ptr
+  mov r5, r2                  @ seq2_ptr
+  ble POST_LOOP_DIM_SEQ
+
+LOOP_DIM_SEQ:
+  vld1.16 {d20, d21}, [r6]!   @ seq1_ptr
+  vld1.16 {d22, d23}, [r5]!   @ seq2_ptr 
+  subs r8, r8, #1
+  vmull.s16 q12, d20, d22
+  vmull.s16 q13, d21, d23
+  vpadal.s32 q9, q12
+  vpadal.s32 q14, q13
+  bgt LOOP_DIM_SEQ
+
+POST_LOOP_DIM_SEQ:
+  movs r10, r7                @ Loop counter
+  mov r12, #0
+  mov r8, #0
+  ble POST_LOOP_DIM_SEQ_RESIDUAL
+
+LOOP_DIM_SEQ_RESIDUAL:
+  ldrh r11, [r6], #2
+  ldrh r9, [r5], #2
+  smulbb r11, r11, r9
+  adds r8, r8, r11
+  adc r12, r12, r11, asr #31
+  subs r10, #1
+  bgt LOOP_DIM_SEQ_RESIDUAL
+
+POST_LOOP_DIM_SEQ_RESIDUAL:   @ Sum the results up and do the shift.
+  vadd.i64 d18, d19
+  vadd.i64 d28, d29
+  vadd.i64 d18, d28
+  vmov.32 d17[0], r8
+  vmov.32 d17[1], r12
+  vadd.i64 d17, d18
+  vshl.s64 d17, d16
+  vst1.32 d17[0], [r0]!       @ Store the output
+
+  ldr r8, [sp, #40]           @ step_seq2
+  add r2, r8, lsl #1          @ prepare for seq2_ptr(r5) in the next loop.
+
+  subs r4, #1
+  bgt LOOP_DIM_CROSS_CORRELATION
+
+  pop {r4-r11}
+  bx  lr
+
+.fnend
+
+
+@ TODO(kma): Place this piece of reference code into a C code file.
+@ void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_correlation,
+@                                 WebRtc_Word16* seq1,
+@                                 WebRtc_Word16* seq2,
+@                                 WebRtc_Word16 dim_seq,
+@                                 WebRtc_Word16 dim_cross_correlation,
+@                                 WebRtc_Word16 right_shifts,
+@                                 WebRtc_Word16 step_seq2) {
+@   int i = 0;
+@   int j = 0;
+@   int inner_loop_len1 = dim_seq >> 3;
+@   int inner_loop_len2 = dim_seq - (inner_loop_len1 << 3);
+@ 
+@   assert(dim_cross_correlation > 0);
+@   assert(dim_seq > 0);
+@ 
+@   for (i = 0; i < dim_cross_correlation; i++) {
+@     int16_t *seq1_ptr = seq1;
+@     int16_t *seq2_ptr = seq2 + (step_seq2 * i);
+@     int64_t sum = 0;
+@ 
+@     for (j = inner_loop_len1; j > 0; j -= 1) {
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@     }
+@ 
+@     // Calculate the rest of the samples.
+@     for (j = inner_loop_len2; j > 0; j -= 1) {
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@     }
+@ 
+@     *cross_correlation++ = (int32_t)(sum >> right_shifts);
+@   }
+@ }
diff --git a/trunk/src/common_audio/signal_processing/division_operations.c b/trunk/src/common_audio/signal_processing/division_operations.c
new file mode 100644
index 0000000..b143373
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/division_operations.c
@@ -0,0 +1,144 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains implementations of the divisions
+ * WebRtcSpl_DivU32U16()
+ * WebRtcSpl_DivW32W16()
+ * WebRtcSpl_DivW32W16ResW16()
+ * WebRtcSpl_DivResultInQ31()
+ * WebRtcSpl_DivW32HiLow()
+ *
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+WebRtc_UWord32 WebRtcSpl_DivU32U16(WebRtc_UWord32 num, WebRtc_UWord16 den)
+{
+    // Guard against division with 0
+    if (den != 0)
+    {
+        return (WebRtc_UWord32)(num / den);
+    } else
+    {
+        return (WebRtc_UWord32)0xFFFFFFFF;
+    }
+}
+
+WebRtc_Word32 WebRtcSpl_DivW32W16(WebRtc_Word32 num, WebRtc_Word16 den)
+{
+    // Guard against division with 0
+    if (den != 0)
+    {
+        return (WebRtc_Word32)(num / den);
+    } else
+    {
+        return (WebRtc_Word32)0x7FFFFFFF;
+    }
+}
+
+WebRtc_Word16 WebRtcSpl_DivW32W16ResW16(WebRtc_Word32 num, WebRtc_Word16 den)
+{
+    // Guard against division with 0
+    if (den != 0)
+    {
+        return (WebRtc_Word16)(num / den);
+    } else
+    {
+        return (WebRtc_Word16)0x7FFF;
+    }
+}
+
+WebRtc_Word32 WebRtcSpl_DivResultInQ31(WebRtc_Word32 num, WebRtc_Word32 den)
+{
+    WebRtc_Word32 L_num = num;
+    WebRtc_Word32 L_den = den;
+    WebRtc_Word32 div = 0;
+    int k = 31;
+    int change_sign = 0;
+
+    if (num == 0)
+        return 0;
+
+    if (num < 0)
+    {
+        change_sign++;
+        L_num = -num;
+    }
+    if (den < 0)
+    {
+        change_sign++;
+        L_den = -den;
+    }
+    while (k--)
+    {
+        div <<= 1;
+        L_num <<= 1;
+        if (L_num >= L_den)
+        {
+            L_num -= L_den;
+            div++;
+        }
+    }
+    if (change_sign == 1)
+    {
+        div = -div;
+    }
+    return div;
+}
+
+WebRtc_Word32 WebRtcSpl_DivW32HiLow(WebRtc_Word32 num, WebRtc_Word16 den_hi,
+                                    WebRtc_Word16 den_low)
+{
+    WebRtc_Word16 approx, tmp_hi, tmp_low, num_hi, num_low;
+    WebRtc_Word32 tmpW32;
+
+    approx = (WebRtc_Word16)WebRtcSpl_DivW32W16((WebRtc_Word32)0x1FFFFFFF, den_hi);
+    // result in Q14 (Note: 3FFFFFFF = 0.5 in Q30)
+
+    // tmpW32 = 1/den = approx * (2.0 - den * approx) (in Q30)
+    tmpW32 = (WEBRTC_SPL_MUL_16_16(den_hi, approx) << 1)
+            + ((WEBRTC_SPL_MUL_16_16(den_low, approx) >> 15) << 1);
+    // tmpW32 = den * approx
+
+    tmpW32 = (WebRtc_Word32)0x7fffffffL - tmpW32; // result in Q30 (tmpW32 = 2.0-(den*approx))
+
+    // Store tmpW32 in hi and low format
+    tmp_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32, 16);
+    tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+    // tmpW32 = 1/den in Q29
+    tmpW32 = ((WEBRTC_SPL_MUL_16_16(tmp_hi, approx) + (WEBRTC_SPL_MUL_16_16(tmp_low, approx)
+            >> 15)) << 1);
+
+    // 1/den in hi and low format
+    tmp_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32, 16);
+    tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+    // Store num in hi and low format
+    num_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(num, 16);
+    num_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((num
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)num_hi, 16)), 1);
+
+    // num * (1/den) by 32 bit multiplication (result in Q28)
+
+    tmpW32 = (WEBRTC_SPL_MUL_16_16(num_hi, tmp_hi) + (WEBRTC_SPL_MUL_16_16(num_hi, tmp_low)
+            >> 15) + (WEBRTC_SPL_MUL_16_16(num_low, tmp_hi) >> 15));
+
+    // Put result in Q31 (convert from Q28)
+    tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
+
+    return tmpW32;
+}
diff --git a/trunk/src/common_audio/signal_processing/dot_product_with_scale.c b/trunk/src/common_audio/signal_processing/dot_product_with_scale.c
new file mode 100644
index 0000000..6e085fd
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/dot_product_with_scale.c
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_DotProductWithScale().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+WebRtc_Word32 WebRtcSpl_DotProductWithScale(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
+                                            int length, int scaling)
+{
+    WebRtc_Word32 sum;
+    int i;
+#ifdef _ARM_OPT_
+#pragma message("NOTE: _ARM_OPT_ optimizations are used")
+    WebRtc_Word16 len4 = (length >> 2) << 2;
+#endif
+
+    sum = 0;
+
+#ifndef _ARM_OPT_
+    for (i = 0; i < length; i++)
+    {
+        sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1++, *vector2++, scaling);
+    }
+#else
+    if (scaling == 0)
+    {
+        for (i = 0; i < len4; i = i + 4)
+        {
+            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
+            vector1++;
+            vector2++;
+        }
+
+        for (i = len4; i < length; i++)
+        {
+            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
+            vector1++;
+            vector2++;
+        }
+    }
+    else
+    {
+        for (i = 0; i < len4; i = i + 4)
+        {
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
+            vector1++;
+            vector2++;
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
+            vector1++;
+            vector2++;
+        }
+
+        for (i = len4; i < length; i++)
+        {
+            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
+            vector1++;
+            vector2++;
+        }
+    }
+#endif
+
+    return sum;
+}
diff --git a/trunk/src/common_audio/signal_processing/downsample_fast.c b/trunk/src/common_audio/signal_processing/downsample_fast.c
new file mode 100644
index 0000000..526cdca
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/downsample_fast.c
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "signal_processing_library.h"
+
+// TODO(Bjornv): Change the function parameter order to WebRTC code style.
+int WebRtcSpl_DownsampleFast(const int16_t* data_in,
+                             int data_in_length,
+                             int16_t* data_out,
+                             int data_out_length,
+                             const int16_t* __restrict coefficients,
+                             int coefficients_length,
+                             int factor,
+                             int delay) {
+  int i = 0;
+  int j = 0;
+  int32_t out_s32 = 0;
+  int endpos = delay + factor * (data_out_length - 1) + 1;
+
+  // Return error if any of the running conditions doesn't meet.
+  if (data_out_length <= 0 || coefficients_length <= 0
+                           || data_in_length < endpos) {
+    return -1;
+  }
+
+  for (i = delay; i < endpos; i += factor) {
+    out_s32 = 2048;  // Round value, 0.5 in Q12.
+
+    for (j = 0; j < coefficients_length; j++) {
+      out_s32 += coefficients[j] * data_in[i - j];  // Q12.
+    }
+
+    out_s32 >>= 12;  // Q0.
+
+    // Saturate and store the output.
+    *data_out++ = WebRtcSpl_SatW32ToW16(out_s32);
+  }
+
+  return 0;
+}
diff --git a/trunk/src/common_audio/signal_processing/downsample_fast_neon.s b/trunk/src/common_audio/signal_processing/downsample_fast_neon.s
new file mode 100644
index 0000000..906b0a1
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/downsample_fast_neon.s
@@ -0,0 +1,222 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_DownsampleFast(), optimized for
+@ ARM Neon platform. The description header can be found in
+@ signal_processing_library.h
+@
+@ The reference C code is in file downsample_fast.c. Bit-exact.
+
+.arch armv7-a
+.fpu neon
+
+.align  2
+.global WebRtcSpl_DownsampleFast
+
+WebRtcSpl_DownsampleFast:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  cmp r3, #0                                @ data_out_length <= 0?
+  movle r0, #-1
+  ble END
+
+  ldrsh r12, [sp, #44]
+  ldr r5, [sp, #40]                         @ r5: factor
+  add r4, r12, #1                           @ r4: delay + 1
+  sub r3, r3, #1                            @ r3: data_out_length - 1
+  smulbb r3, r5, r3
+  ldr r8, [sp, #32]                         @ &coefficients[0]
+  mov r9, r12                               @ Iteration counter for outer loops.
+  add r3, r4                                @ delay + factor * (out_length-1) +1
+
+  cmp r3, r1                                @ data_in_length < endpos?
+  movgt r0, #-1
+  bgt END
+
+  @ Initializations.
+  sub r3, r5, asl #3
+  add r11, r0, r12, asl #1                  @ &data_in[delay]
+  ldr r0, [sp, #36]                         @ coefficients_length
+  add r3, r5                                @ endpos - factor * 7
+
+  cmp r0, #0                                @ coefficients_length <= 0 ?
+  movle r0, #-1
+  ble END
+
+  add r8, r0, asl #1                        @ &coeffieient[coefficients_length]
+  cmp r9, r3
+  bge POST_LOOP_ENDPOS                      @ branch when Iteration < 8 times.
+
+@
+@ First part, unroll the loop 8 times, with 3 subcases (factor == 2, 4, others)
+@
+  mov r4, #-2
+
+  @ Direct program flow to the right channel.
+
+  @ r10 is an offset to &data_in[] in the loop. After an iteration, we need to
+  @ move the pointer back to original after advancing 16 bytes by a vld1, and
+  @ then move 2 bytes forward to increment one more sample.
+  cmp r5, #2
+  moveq r10, #-14
+  beq LOOP_ENDPOS_FACTOR2                   @ Branch when factor == 2
+
+  @ Similar here, for r10, we need to move the pointer back to original after
+  @ advancing 32 bytes, then move 2 bytes forward to increment one sample.
+  cmp r5, #4
+  moveq r10, #-30
+  beq LOOP_ENDPOS_FACTOR4                   @ Branch when factor == 4
+
+  @ For r10, we need to move the pointer back to original after advancing
+  @ (factor * 7 * 2) bytes, then move 2 bytes forward to increment one sample.
+  mov r10, r5, asl #4
+  rsb r10, #2
+  add r10, r5, asl #1
+  lsl r5, #1                                @ r5 = factor * sizeof(data_in)
+
+@ The general case (factor != 2 && factor != 4)
+LOOP_ENDPOS_GENERAL:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_GENERAL:
+  vld1.16 {d2[], d3[]}, [r7], r4            @ coefficients[j]
+  vld1.16 d0[0], [r1], r5                   @ data_in[i - j]
+  vld1.16 d0[1], [r1], r5                   @ data_in[i + factor - j]
+  vld1.16 d0[2], [r1], r5                   @ data_in[i + factor * 2 - j]
+  vld1.16 d0[3], [r1], r5                   @ data_in[i + factor * 3 - j]
+  vld1.16 d1[0], [r1], r5                   @ data_in[i + factor * 4 - j]
+  vld1.16 d1[1], [r1], r5                   @ data_in[i + factor * 5 - j]
+  vld1.16 d1[2], [r1], r5                   @ data_in[i + factor * 6 - j]
+  vld1.16 d1[3], [r1], r10                  @ data_in[i + factor * 7 - j]
+  subs r12, #1
+  vmlal.s16 q2, d0, d2
+  vmlal.s16 q3, d1, d3
+  bge LOOP_COEFF_LENGTH_GENERAL
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #3                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #2                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_GENERAL
+  asr r5, #1                                @ Restore r5 to the value of factor.
+  b POST_LOOP_ENDPOS
+
+@ The case for factor == 2.
+LOOP_ENDPOS_FACTOR2:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_FACTOR2:
+  vld1.16 {d16[], d17[]}, [r7], r4          @ coefficients[j]
+  vld2.16 {d0, d1}, [r1]!                   @ data_in[]
+  vld2.16 {d2, d3}, [r1], r10               @ data_in[]
+  subs r12, #1
+  vmlal.s16 q2, d0, d16
+  vmlal.s16 q3, d2, d17
+  bge LOOP_COEFF_LENGTH_FACTOR2
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #4                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #3                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_FACTOR2
+  b POST_LOOP_ENDPOS
+
+@ The case for factor == 4.
+LOOP_ENDPOS_FACTOR4:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_FACTOR4:
+  vld1.16 {d16[], d17[]}, [r7], r4          @ coefficients[j]
+  vld4.16 {d0, d1, d2, d3}, [r1]!           @ data_in[]
+  vld4.16 {d18, d19, d20, d21}, [r1], r10   @ data_in[]
+  subs r12, #1
+  vmlal.s16 q2, d0, d16
+  vmlal.s16 q3, d18, d17
+  bge LOOP_COEFF_LENGTH_FACTOR4
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #4                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #3                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_FACTOR4
+
+@
+@ Second part, do the rest iterations (if any).
+@
+
+POST_LOOP_ENDPOS:
+  add r3, r5, asl #3
+  sub r3, r5                                @ Restore r3 to endpos.
+  cmp r9, r3
+  movge r0, #0
+  bge END
+
+LOOP2_ENDPOS:
+  @ Initializations.
+  mov r7, r8
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r6, r11, r12, asl #1                  @ &data_in[i - j]
+
+  mov r1, #2048
+
+LOOP2_COEFF_LENGTH:
+  ldrsh r4, [r7, #-2]!                      @ coefficients[j]
+  ldrsh r10, [r6], #2                       @ data_in[i - j]
+  smlabb r1, r4, r10, r1
+  subs r12, #1
+  bge LOOP2_COEFF_LENGTH
+
+  @ Shift, saturate, and store the result.
+  ssat r1, #16, r1, asr #12
+  strh r1, [r2], #2
+
+  add r11, r5, asl #1                       @ r11 -> &data_in[i + factor]
+  add r9, r5                                @ Counter i = delay + factor.
+  cmp r9, r3                                @ i < endpos?
+  blt LOOP2_ENDPOS
+
+  mov r0, #0
+
+END:
+  pop {r4-r11}
+  bx  lr
+
+.fnend
diff --git a/trunk/src/common_audio/signal_processing/energy.c b/trunk/src/common_audio/signal_processing/energy.c
new file mode 100644
index 0000000..e8fdf94
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/energy.c
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_Energy().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+WebRtc_Word32 WebRtcSpl_Energy(WebRtc_Word16* vector, int vector_length, int* scale_factor)
+{
+    WebRtc_Word32 en = 0;
+    int i;
+    int scaling = WebRtcSpl_GetScalingSquare(vector, vector_length, vector_length);
+    int looptimes = vector_length;
+    WebRtc_Word16 *vectorptr = vector;
+
+    for (i = 0; i < looptimes; i++)
+    {
+        en += WEBRTC_SPL_MUL_16_16_RSFT(*vectorptr, *vectorptr, scaling);
+        vectorptr++;
+    }
+    *scale_factor = scaling;
+
+    return en;
+}
diff --git a/trunk/src/common_audio/signal_processing/filter_ar.c b/trunk/src/common_audio/signal_processing/filter_ar.c
new file mode 100644
index 0000000..24e83a6
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/filter_ar.c
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_FilterAR().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+int WebRtcSpl_FilterAR(G_CONST WebRtc_Word16* a,
+                       int a_length,
+                       G_CONST WebRtc_Word16* x,
+                       int x_length,
+                       WebRtc_Word16* state,
+                       int state_length,
+                       WebRtc_Word16* state_low,
+                       int state_low_length,
+                       WebRtc_Word16* filtered,
+                       WebRtc_Word16* filtered_low,
+                       int filtered_low_length)
+{
+    WebRtc_Word32 o;
+    WebRtc_Word32 oLOW;
+    int i, j, stop;
+    G_CONST WebRtc_Word16* x_ptr = &x[0];
+    WebRtc_Word16* filteredFINAL_ptr = filtered;
+    WebRtc_Word16* filteredFINAL_LOW_ptr = filtered_low;
+
+    for (i = 0; i < x_length; i++)
+    {
+        // Calculate filtered[i] and filtered_low[i]
+        G_CONST WebRtc_Word16* a_ptr = &a[1];
+        WebRtc_Word16* filtered_ptr = &filtered[i - 1];
+        WebRtc_Word16* filtered_low_ptr = &filtered_low[i - 1];
+        WebRtc_Word16* state_ptr = &state[state_length - 1];
+        WebRtc_Word16* state_low_ptr = &state_low[state_length - 1];
+
+        o = (WebRtc_Word32)(*x_ptr++) << 12;
+        oLOW = (WebRtc_Word32)0;
+
+        stop = (i < a_length) ? i + 1 : a_length;
+        for (j = 1; j < stop; j++)
+        {
+            o -= WEBRTC_SPL_MUL_16_16(*a_ptr, *filtered_ptr--);
+            oLOW -= WEBRTC_SPL_MUL_16_16(*a_ptr++, *filtered_low_ptr--);
+        }
+        for (j = i + 1; j < a_length; j++)
+        {
+            o -= WEBRTC_SPL_MUL_16_16(*a_ptr, *state_ptr--);
+            oLOW -= WEBRTC_SPL_MUL_16_16(*a_ptr++, *state_low_ptr--);
+        }
+
+        o += (oLOW >> 12);
+        *filteredFINAL_ptr = (WebRtc_Word16)((o + (WebRtc_Word32)2048) >> 12);
+        *filteredFINAL_LOW_ptr++ = (WebRtc_Word16)(o - ((WebRtc_Word32)(*filteredFINAL_ptr++)
+                << 12));
+    }
+
+    // Save the filter state
+    if (x_length >= state_length)
+    {
+        WebRtcSpl_CopyFromEndW16(filtered, x_length, a_length - 1, state);
+        WebRtcSpl_CopyFromEndW16(filtered_low, x_length, a_length - 1, state_low);
+    } else
+    {
+        for (i = 0; i < state_length - x_length; i++)
+        {
+            state[i] = state[i + x_length];
+            state_low[i] = state_low[i + x_length];
+        }
+        for (i = 0; i < x_length; i++)
+        {
+            state[state_length - x_length + i] = filtered[i];
+            state[state_length - x_length + i] = filtered_low[i];
+        }
+    }
+
+    return x_length;
+}
diff --git a/trunk/src/common_audio/signal_processing/filter_ar_fast_q12.c b/trunk/src/common_audio/signal_processing/filter_ar_fast_q12.c
new file mode 100644
index 0000000..0402302
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/filter_ar_fast_q12.c
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+// TODO(bjornv): Change the return type to report errors.
+
+void WebRtcSpl_FilterARFastQ12(const int16_t* data_in,
+                               int16_t* data_out,
+                               const int16_t* __restrict coefficients,
+                               int coefficients_length,
+                               int data_length) {
+  int i = 0;
+  int j = 0;
+
+  assert(data_length > 0);
+  assert(coefficients_length > 1);
+
+  for (i = 0; i < data_length; i++) {
+    int32_t output = 0;
+    int32_t sum = 0;
+
+    for (j = coefficients_length - 1; j > 0; j--) {
+      sum += coefficients[j] * data_out[i - j];
+    }
+
+    output = coefficients[0] * data_in[i];
+    output -= sum;
+
+    // Saturate and store the output.
+    output = WEBRTC_SPL_SAT(134215679, output, -134217728);
+    data_out[i] = (int16_t)((output + 2048) >> 12);
+  }
+}
+
diff --git a/trunk/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s b/trunk/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s
new file mode 100644
index 0000000..5591bb8
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s
@@ -0,0 +1,223 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_FilterARFastQ12(), optimized for
+@ ARMv7  platform. The description header can be found in
+@ signal_processing_library.h
+@
+@ Output is bit-exact with the generic C code as in filter_ar_fast_q12.c, and
+@ the reference C code at end of this file.
+
+@ Assumptions:
+@ (1) data_length > 0
+@ (2) coefficients_length > 1
+
+@ Register usage:
+@
+@ r0:  &data_in[i]
+@ r1:  &data_out[i], for result ouput
+@ r2:  &coefficients[0]
+@ r3:  coefficients_length
+@ r4:  Iteration counter for the outer loop.
+@ r5:  data_out[j] as multiplication inputs
+@ r6:  Calculated value for output data_out[]; interation counter for inner loop
+@ r7:  Partial sum of a filtering multiplication results
+@ r8:  Partial sum of a filtering multiplication results
+@ r9:  &data_out[], for filtering input; data_in[i]
+@ r10: coefficients[j]
+@ r11: Scratch
+@ r12: &coefficients[j]
+
+.arch armv7-a
+
+.align  2
+.global WebRtcSpl_FilterARFastQ12
+
+WebRtcSpl_FilterARFastQ12:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  ldrsh r12, [sp, #32]         @ data_length
+  subs r4, r12, #1
+  beq ODD_LENGTH               @ jump if data_length == 1
+
+LOOP_LENGTH:
+  add r12, r2, r3, lsl #1
+  sub r12, #4                  @ &coefficients[coefficients_length - 2]
+  sub r9, r1, r3, lsl #1
+  add r9, #2                   @ &data_out[i - coefficients_length + 1]
+  ldr r5, [r9], #4             @ data_out[i - coefficients_length + {1,2}]
+
+  mov r7, #0                   @ sum1
+  mov r8, #0                   @ sum2
+  subs r6, r3, #3              @ Iteration counter for inner loop.
+  beq ODD_A_LENGTH             @ branch if coefficients_length == 3
+  blt POST_LOOP_A_LENGTH       @ branch if coefficients_length == 2
+
+LOOP_A_LENGTH:
+  ldr r10, [r12], #-4          @ coefficients[j - 1], coefficients[j]
+  subs r6, #2
+  smlatt r8, r10, r5, r8       @ sum2 += coefficients[j] * data_out[i - j + 1];
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[j] * data_out[i - j];
+  smlabt r7, r10, r5, r7       @ coefficients[j - 1] * data_out[i - j + 1];
+  ldr r5, [r9], #4             @ data_out[i - j + 2],  data_out[i - j + 3]
+  smlabb r8, r10, r5, r8       @ coefficients[j - 1] * data_out[i - j + 2];
+  bgt LOOP_A_LENGTH
+  blt POST_LOOP_A_LENGTH
+
+ODD_A_LENGTH:
+  ldrsh r10, [r12, #2]         @ Filter coefficients coefficients[2]
+  sub r12, #2                  @ &coefficients[0]
+  smlabb r7, r10, r5, r7       @ sum1 += coefficients[2] * data_out[i - 2];
+  smlabt r8, r10, r5, r8       @ sum2 += coefficients[2] * data_out[i - 1];
+  ldr r5, [r9, #-2]            @ data_out[i - 1],  data_out[i]
+
+POST_LOOP_A_LENGTH:
+  ldr r10, [r12]               @ coefficients[0], coefficients[1]
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[1] * data_out[i - 1];
+
+  ldr r9, [r0], #4             @ data_in[i], data_in[i + 1]
+  smulbb r6, r10, r9           @ output1 = coefficients[0] * data_in[i];
+  sub r6, r7                   @ output1 -= sum1;
+
+  sbfx r11, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r11
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1], #2            @ Store data_out[i]
+
+  smlatb r8, r10, r6, r8       @ sum2 += coefficients[1] * data_out[i];
+  smulbt r6, r10, r9           @ output2 = coefficients[0] * data_in[i + 1];
+  sub r6, r8                   @ output1 -= sum1;
+
+  sbfx r11, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r11
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1], #2            @ Store data_out[i + 1]
+
+  subs r4, #2
+  bgt LOOP_LENGTH
+  blt END                      @ For even data_length, it's done. Jump to END.
+
+@ Process i = data_length -1, for the case of an odd length.
+ODD_LENGTH:
+  add r12, r2, r3, lsl #1
+  sub r12, #4                  @ &coefficients[coefficients_length - 2]
+  sub r9, r1, r3, lsl #1
+  add r9, #2                   @ &data_out[i - coefficients_length + 1]
+  mov r7, #0                   @ sum1
+  mov r8, #0                   @ sum1
+  subs r6, r3, #2              @ inner loop counter
+  beq EVEN_A_LENGTH            @ branch if coefficients_length == 2
+
+LOOP2_A_LENGTH:
+  ldr r10, [r12], #-4          @ coefficients[j - 1], coefficients[j]
+  ldr r5, [r9], #4             @ data_out[i - j],  data_out[i - j + 1]
+  subs r6, #2
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[j] * data_out[i - j];
+  smlabt r8, r10, r5, r8       @ coefficients[j - 1] * data_out[i - j + 1];
+  bgt LOOP2_A_LENGTH
+  addlt r12, #2
+  blt POST_LOOP2_A_LENGTH
+
+EVEN_A_LENGTH:
+  ldrsh r10, [r12, #2]         @ Filter coefficients coefficients[1]
+  ldrsh r5, [r9]               @ data_out[i - 1]
+  smlabb r7, r10, r5, r7       @ sum1 += coefficients[1] * data_out[i - 1];
+
+POST_LOOP2_A_LENGTH:
+  ldrsh r10, [r12]             @ Filter coefficients coefficients[0]
+  ldrsh r9, [r0]               @ data_in[i]
+  smulbb r6, r10, r9           @ output1 = coefficients[0] * data_in[i];
+  sub r6, r7                   @ output1 -= sum1;
+  sub r6, r8                   @ output1 -= sum1;
+  sbfx r8, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r8
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1]                @ Store the data_out[i]
+
+END:
+  pop {r4-r11}
+  bx  lr
+
+.fnend
+
+
+@Reference C code:
+@
+@void WebRtcSpl_FilterARFastQ12(int16_t* data_in,
+@                               int16_t* data_out,
+@                               int16_t* __restrict coefficients,
+@                               int coefficients_length,
+@                               int data_length) {
+@  int i = 0;
+@  int j = 0;
+@
+@  for (i = 0; i < data_length - 1; i += 2) {
+@    int32_t output1 = 0;
+@    int32_t sum1 = 0;
+@    int32_t output2 = 0;
+@    int32_t sum2 = 0;
+@
+@    for (j = coefficients_length - 1; j > 2; j -= 2) {
+@      sum1 += coefficients[j]      * data_out[i - j];
+@      sum1 += coefficients[j - 1]  * data_out[i - j + 1];
+@      sum2 += coefficients[j]     * data_out[i - j + 1];
+@      sum2 += coefficients[j - 1] * data_out[i - j + 2];
+@    }
+@
+@    if (j == 2) {
+@      sum1 += coefficients[2] * data_out[i - 2];
+@      sum2 += coefficients[2] * data_out[i - 1];
+@    }
+@
+@    sum1 += coefficients[1] * data_out[i - 1];
+@    output1 = coefficients[0] * data_in[i];
+@    output1 -= sum1;
+@    // Saturate and store the output.
+@    output1 = WEBRTC_SPL_SAT(134215679, output1, -134217728);
+@    data_out[i] = (int16_t)((output1 + 2048) >> 12);
+@
+@    sum2 += coefficients[1] * data_out[i];
+@    output2 = coefficients[0] * data_in[i + 1];
+@    output2 -= sum2;
+@    // Saturate and store the output.
+@    output2 = WEBRTC_SPL_SAT(134215679, output2, -134217728);
+@    data_out[i + 1] = (int16_t)((output2 + 2048) >> 12);
+@  }
+@
+@  if (i == data_length - 1) {
+@    int32_t output1 = 0;
+@    int32_t sum1 = 0;
+@
+@    for (j = coefficients_length - 1; j > 1; j -= 2) {
+@      sum1 += coefficients[j]      * data_out[i - j];
+@      sum1 += coefficients[j - 1]  * data_out[i - j + 1];
+@    }
+@
+@    if (j == 1) {
+@      sum1 += coefficients[1] * data_out[i - 1];
+@    }
+@
+@    output1 = coefficients[0] * data_in[i];
+@    output1 -= sum1;
+@    // Saturate and store the output.
+@    output1 = WEBRTC_SPL_SAT(134215679, output1, -134217728);
+@    data_out[i] = (int16_t)((output1 + 2048) >> 12);
+@  }
+@}
diff --git a/trunk/src/common_audio/signal_processing/filter_ma_fast_q12.c b/trunk/src/common_audio/signal_processing/filter_ma_fast_q12.c
new file mode 100644
index 0000000..19ad9b1
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/filter_ma_fast_q12.c
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_FilterMAFastQ12().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_FilterMAFastQ12(WebRtc_Word16* in_ptr,
+                               WebRtc_Word16* out_ptr,
+                               WebRtc_Word16* B,
+                               WebRtc_Word16 B_length,
+                               WebRtc_Word16 length)
+{
+    WebRtc_Word32 o;
+    int i, j;
+    for (i = 0; i < length; i++)
+    {
+        G_CONST WebRtc_Word16* b_ptr = &B[0];
+        G_CONST WebRtc_Word16* x_ptr = &in_ptr[i];
+
+        o = (WebRtc_Word32)0;
+
+        for (j = 0; j < B_length; j++)
+        {
+            o += WEBRTC_SPL_MUL_16_16(*b_ptr++, *x_ptr--);
+        }
+
+        // If output is higher than 32768, saturate it. Same with negative side
+        // 2^27 = 134217728, which corresponds to 32768 in Q12
+
+        // Saturate the output
+        o = WEBRTC_SPL_SAT((WebRtc_Word32)134215679, o, (WebRtc_Word32)-134217728);
+
+        *out_ptr++ = (WebRtc_Word16)((o + (WebRtc_Word32)2048) >> 12);
+    }
+    return;
+}
diff --git a/trunk/src/common_audio/signal_processing/get_hanning_window.c b/trunk/src/common_audio/signal_processing/get_hanning_window.c
new file mode 100644
index 0000000..6d67e60
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/get_hanning_window.c
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_GetHanningWindow().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+// Hanning table with 256 entries
+static const WebRtc_Word16 kHanningTable[] = {
+    1,      2,      6,     10,     15,     22,     30,     39,
+   50,     62,     75,     89,    104,    121,    138,    157,
+  178,    199,    222,    246,    271,    297,    324,    353,
+  383,    413,    446,    479,    513,    549,    586,    624,
+  663,    703,    744,    787,    830,    875,    920,    967,
+ 1015,   1064,   1114,   1165,   1218,   1271,   1325,   1381,
+ 1437,   1494,   1553,   1612,   1673,   1734,   1796,   1859,
+ 1924,   1989,   2055,   2122,   2190,   2259,   2329,   2399,
+ 2471,   2543,   2617,   2691,   2765,   2841,   2918,   2995,
+ 3073,   3152,   3232,   3312,   3393,   3475,   3558,   3641,
+ 3725,   3809,   3895,   3980,   4067,   4154,   4242,   4330,
+ 4419,   4509,   4599,   4689,   4781,   4872,   4964,   5057,
+ 5150,   5244,   5338,   5432,   5527,   5622,   5718,   5814,
+ 5910,   6007,   6104,   6202,   6299,   6397,   6495,   6594,
+ 6693,   6791,   6891,   6990,   7090,   7189,   7289,   7389,
+ 7489,   7589,   7690,   7790,   7890,   7991,   8091,   8192,
+ 8293,   8393,   8494,   8594,   8694,   8795,   8895,   8995,
+ 9095,   9195,   9294,   9394,   9493,   9593,   9691,   9790,
+ 9889,   9987,  10085,  10182,  10280,  10377,  10474,  10570,
+10666,  10762,  10857,  10952,  11046,  11140,  11234,  11327,
+11420,  11512,  11603,  11695,  11785,  11875,  11965,  12054,
+12142,  12230,  12317,  12404,  12489,  12575,  12659,  12743,
+12826,  12909,  12991,  13072,  13152,  13232,  13311,  13389,
+13466,  13543,  13619,  13693,  13767,  13841,  13913,  13985,
+14055,  14125,  14194,  14262,  14329,  14395,  14460,  14525,
+14588,  14650,  14711,  14772,  14831,  14890,  14947,  15003,
+15059,  15113,  15166,  15219,  15270,  15320,  15369,  15417,
+15464,  15509,  15554,  15597,  15640,  15681,  15721,  15760,
+15798,  15835,  15871,  15905,  15938,  15971,  16001,  16031,
+16060,  16087,  16113,  16138,  16162,  16185,  16206,  16227,
+16246,  16263,  16280,  16295,  16309,  16322,  16334,  16345,
+16354,  16362,  16369,  16374,  16378,  16382,  16383,  16384
+};
+
+void WebRtcSpl_GetHanningWindow(WebRtc_Word16 *v, WebRtc_Word16 size)
+{
+    int jj;
+    WebRtc_Word16 *vptr1;
+
+    WebRtc_Word32 index;
+    WebRtc_Word32 factor = ((WebRtc_Word32)0x40000000);
+
+    factor = WebRtcSpl_DivW32W16(factor, size);
+    if (size < 513)
+        index = (WebRtc_Word32)-0x200000;
+    else
+        index = (WebRtc_Word32)-0x100000;
+    vptr1 = v;
+
+    for (jj = 0; jj < size; jj++)
+    {
+        index += factor;
+        (*vptr1++) = kHanningTable[index >> 22];
+    }
+
+}
diff --git a/trunk/src/common_audio/signal_processing/get_scaling_square.c b/trunk/src/common_audio/signal_processing/get_scaling_square.c
new file mode 100644
index 0000000..dccbf33
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/get_scaling_square.c
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_GetScalingSquare().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+int WebRtcSpl_GetScalingSquare(WebRtc_Word16 *in_vector, int in_vector_length, int times)
+{
+    int nbits = WebRtcSpl_GetSizeInBits(times);
+    int i;
+    WebRtc_Word16 smax = -1;
+    WebRtc_Word16 sabs;
+    WebRtc_Word16 *sptr = in_vector;
+    int t;
+    int looptimes = in_vector_length;
+
+    for (i = looptimes; i > 0; i--)
+    {
+        sabs = (*sptr > 0 ? *sptr++ : -*sptr++);
+        smax = (sabs > smax ? sabs : smax);
+    }
+    t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax));
+
+    if (smax == 0)
+    {
+        return 0; // Since norm(0) returns 0
+    } else
+    {
+        return (t > nbits) ? 0 : nbits - t;
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/ilbc_specific_functions.c b/trunk/src/common_audio/signal_processing/ilbc_specific_functions.c
new file mode 100644
index 0000000..5a9e577
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/ilbc_specific_functions.c
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains implementations of the iLBC specific functions
+ * WebRtcSpl_ScaleAndAddVectorsWithRound()
+ * WebRtcSpl_ReverseOrderMultArrayElements()
+ * WebRtcSpl_ElementwiseVectorMult()
+ * WebRtcSpl_AddVectorsAndShift()
+ * WebRtcSpl_AddAffineVectorToVector()
+ * WebRtcSpl_AffineTransformVector()
+ *
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16 *vector1, WebRtc_Word16 scale1,
+                                           WebRtc_Word16 *vector2, WebRtc_Word16 scale2,
+                                           WebRtc_Word16 right_shifts, WebRtc_Word16 *out,
+                                           WebRtc_Word16 vector_length)
+{
+    int i;
+    WebRtc_Word16 roundVal;
+    roundVal = 1 << right_shifts;
+    roundVal = roundVal >> 1;
+    for (i = 0; i < vector_length; i++)
+    {
+        out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(vector1[i], scale1)
+                + WEBRTC_SPL_MUL_16_16(vector2[i], scale2) + roundVal) >> right_shifts);
+    }
+}
+
+void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16 *out, G_CONST WebRtc_Word16 *in,
+                                             G_CONST WebRtc_Word16 *win,
+                                             WebRtc_Word16 vector_length,
+                                             WebRtc_Word16 right_shifts)
+{
+    int i;
+    WebRtc_Word16 *outptr = out;
+    G_CONST WebRtc_Word16 *inptr = in;
+    G_CONST WebRtc_Word16 *winptr = win;
+    for (i = 0; i < vector_length; i++)
+    {
+        (*outptr++) = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(*inptr++,
+                                                               *winptr--, right_shifts);
+    }
+}
+
+void WebRtcSpl_ElementwiseVectorMult(WebRtc_Word16 *out, G_CONST WebRtc_Word16 *in,
+                                     G_CONST WebRtc_Word16 *win, WebRtc_Word16 vector_length,
+                                     WebRtc_Word16 right_shifts)
+{
+    int i;
+    WebRtc_Word16 *outptr = out;
+    G_CONST WebRtc_Word16 *inptr = in;
+    G_CONST WebRtc_Word16 *winptr = win;
+    for (i = 0; i < vector_length; i++)
+    {
+        (*outptr++) = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(*inptr++,
+                                                               *winptr++, right_shifts);
+    }
+}
+
+void WebRtcSpl_AddVectorsAndShift(WebRtc_Word16 *out, G_CONST WebRtc_Word16 *in1,
+                                  G_CONST WebRtc_Word16 *in2, WebRtc_Word16 vector_length,
+                                  WebRtc_Word16 right_shifts)
+{
+    int i;
+    WebRtc_Word16 *outptr = out;
+    G_CONST WebRtc_Word16 *in1ptr = in1;
+    G_CONST WebRtc_Word16 *in2ptr = in2;
+    for (i = vector_length; i > 0; i--)
+    {
+        (*outptr++) = (WebRtc_Word16)(((*in1ptr++) + (*in2ptr++)) >> right_shifts);
+    }
+}
+
+void WebRtcSpl_AddAffineVectorToVector(WebRtc_Word16 *out, WebRtc_Word16 *in,
+                                       WebRtc_Word16 gain, WebRtc_Word32 add_constant,
+                                       WebRtc_Word16 right_shifts, int vector_length)
+{
+    WebRtc_Word16 *inPtr;
+    WebRtc_Word16 *outPtr;
+    int i;
+
+    inPtr = in;
+    outPtr = out;
+    for (i = 0; i < vector_length; i++)
+    {
+        (*outPtr++) += (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16((*inPtr++), gain)
+                + (WebRtc_Word32)add_constant) >> right_shifts);
+    }
+}
+
+void WebRtcSpl_AffineTransformVector(WebRtc_Word16 *out, WebRtc_Word16 *in,
+                                     WebRtc_Word16 gain, WebRtc_Word32 add_constant,
+                                     WebRtc_Word16 right_shifts, int vector_length)
+{
+    WebRtc_Word16 *inPtr;
+    WebRtc_Word16 *outPtr;
+    int i;
+
+    inPtr = in;
+    outPtr = out;
+    for (i = 0; i < vector_length; i++)
+    {
+        (*outPtr++) = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16((*inPtr++), gain)
+                + (WebRtc_Word32)add_constant) >> right_shifts);
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/include/signal_processing_library.h b/trunk/src/common_audio/signal_processing/include/signal_processing_library.h
new file mode 100644
index 0000000..87b8f29
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/include/signal_processing_library.h
@@ -0,0 +1,1667 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file includes all of the fix point signal processing library (SPL) function
+ * descriptions and declarations.
+ * For specific function calls, see bottom of file.
+ */
+
+#ifndef WEBRTC_SPL_SIGNAL_PROCESSING_LIBRARY_H_
+#define WEBRTC_SPL_SIGNAL_PROCESSING_LIBRARY_H_
+
+#include <string.h>
+#include "typedefs.h"
+
+#ifdef ARM_WINM
+#include <Armintr.h> // intrinsic file for windows mobile
+#endif
+
+// Macros specific for the fixed point implementation
+#define WEBRTC_SPL_WORD16_MAX       32767
+#define WEBRTC_SPL_WORD16_MIN       -32768
+#define WEBRTC_SPL_WORD32_MAX       (WebRtc_Word32)0x7fffffff
+#define WEBRTC_SPL_WORD32_MIN       (WebRtc_Word32)0x80000000
+#define WEBRTC_SPL_MAX_LPC_ORDER    14
+#define WEBRTC_SPL_MAX_SEED_USED    0x80000000L
+#define WEBRTC_SPL_MIN(A, B)        (A < B ? A : B) // Get min value
+#define WEBRTC_SPL_MAX(A, B)        (A > B ? A : B) // Get max value
+#define WEBRTC_SPL_ABS_W16(a) \
+    (((WebRtc_Word16)a >= 0) ? ((WebRtc_Word16)a) : -((WebRtc_Word16)a))
+#define WEBRTC_SPL_ABS_W32(a) \
+    (((WebRtc_Word32)a >= 0) ? ((WebRtc_Word32)a) : -((WebRtc_Word32)a))
+
+#if (defined WEBRTC_TARGET_PC)||(defined __TARGET_XSCALE)
+#define WEBRTC_SPL_GET_BYTE(a, nr)  (((WebRtc_Word8 *)a)[nr])
+#define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
+    (((WebRtc_Word8 *)d_ptr)[index] = (val))
+#elif defined WEBRTC_BIG_ENDIAN
+#define WEBRTC_SPL_GET_BYTE(a, nr) \
+    ((((WebRtc_Word16 *)a)[nr >> 1]) >> (((nr + 1) & 0x1) * 8) & 0x00ff)
+#define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
+    ((WebRtc_Word16 *)d_ptr)[index >> 1] = \
+    ((((WebRtc_Word16 *)d_ptr)[index >> 1]) \
+    & (0x00ff << (8 * ((index) & 0x1)))) | (val << (8 * ((index + 1) & 0x1)))
+#else
+#define WEBRTC_SPL_GET_BYTE(a,nr) \
+    ((((WebRtc_Word16 *)(a))[(nr) >> 1]) >> (((nr) & 0x1) * 8) & 0x00ff)
+#define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
+    ((WebRtc_Word16 *)(d_ptr))[(index) >> 1] = \
+    ((((WebRtc_Word16 *)(d_ptr))[(index) >> 1]) \
+    & (0x00ff << (8 * (((index) + 1) & 0x1)))) | \
+    ((val) << (8 * ((index) & 0x1)))
+#endif
+
+#define WEBRTC_SPL_MUL(a, b) \
+    ((WebRtc_Word32) ((WebRtc_Word32)(a) * (WebRtc_Word32)(b)))
+#define WEBRTC_SPL_UMUL(a, b) \
+    ((WebRtc_UWord32) ((WebRtc_UWord32)(a) * (WebRtc_UWord32)(b)))
+#define WEBRTC_SPL_UMUL_RSFT16(a, b) \
+    ((WebRtc_UWord32) ((WebRtc_UWord32)(a) * (WebRtc_UWord32)(b)) >> 16)
+#define WEBRTC_SPL_UMUL_16_16(a, b) \
+    ((WebRtc_UWord32) (WebRtc_UWord16)(a) * (WebRtc_UWord16)(b))
+#define WEBRTC_SPL_UMUL_16_16_RSFT16(a, b) \
+    (((WebRtc_UWord32) (WebRtc_UWord16)(a) * (WebRtc_UWord16)(b)) >> 16)
+#define WEBRTC_SPL_UMUL_32_16(a, b) \
+    ((WebRtc_UWord32) ((WebRtc_UWord32)(a) * (WebRtc_UWord16)(b)))
+#define WEBRTC_SPL_UMUL_32_16_RSFT16(a, b) \
+    ((WebRtc_UWord32) ((WebRtc_UWord32)(a) * (WebRtc_UWord16)(b)) >> 16)
+#define WEBRTC_SPL_MUL_16_U16(a, b) \
+    ((WebRtc_Word32)(WebRtc_Word16)(a) * (WebRtc_UWord16)(b))
+#define WEBRTC_SPL_DIV(a, b) \
+    ((WebRtc_Word32) ((WebRtc_Word32)(a) / (WebRtc_Word32)(b)))
+#define WEBRTC_SPL_UDIV(a, b) \
+    ((WebRtc_UWord32) ((WebRtc_UWord32)(a) / (WebRtc_UWord32)(b)))
+
+#ifndef WEBRTC_ARCH_ARM_V7A
+// For ARMv7 platforms, these are inline functions in spl_inl_armv7.h
+#define WEBRTC_SPL_MUL_16_16(a, b) \
+    ((WebRtc_Word32) (((WebRtc_Word16)(a)) * ((WebRtc_Word16)(b))))
+#define WEBRTC_SPL_MUL_16_32_RSFT16(a, b) \
+    (WEBRTC_SPL_MUL_16_16(a, b >> 16) \
+     + ((WEBRTC_SPL_MUL_16_16(a, (b & 0xffff) >> 1) + 0x4000) >> 15))
+#define WEBRTC_SPL_MUL_32_32_RSFT32(a32a, a32b, b32) \
+    ((WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT16(a32a, b32) \
+    + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32) >> 16)))
+#define WEBRTC_SPL_MUL_32_32_RSFT32BI(a32, b32) \
+    ((WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT16(( \
+    (WebRtc_Word16)(a32 >> 16)), b32) + \
+    (WEBRTC_SPL_MUL_16_32_RSFT16(( \
+    (WebRtc_Word16)((a32 & 0x0000FFFF) >> 1)), b32) >> 15)))
+#endif
+
+#define WEBRTC_SPL_MUL_16_32_RSFT11(a, b) \
+    ((WEBRTC_SPL_MUL_16_16(a, (b) >> 16) << 5) \
+    + (((WEBRTC_SPL_MUL_16_U16(a, (WebRtc_UWord16)(b)) >> 1) + 0x0200) >> 10))
+#define WEBRTC_SPL_MUL_16_32_RSFT14(a, b) \
+    ((WEBRTC_SPL_MUL_16_16(a, (b) >> 16) << 2) \
+    + (((WEBRTC_SPL_MUL_16_U16(a, (WebRtc_UWord16)(b)) >> 1) + 0x1000) >> 13))
+#define WEBRTC_SPL_MUL_16_32_RSFT15(a, b) \
+    ((WEBRTC_SPL_MUL_16_16(a, (b) >> 16) << 1) \
+    + (((WEBRTC_SPL_MUL_16_U16(a, (WebRtc_UWord16)(b)) >> 1) + 0x2000) >> 14))
+
+#ifdef ARM_WINM
+#define WEBRTC_SPL_MUL_16_16(a, b) \
+    _SmulLo_SW_SL((WebRtc_Word16)(a), (WebRtc_Word16)(b))
+#endif
+
+#define WEBRTC_SPL_MUL_16_16_RSFT(a, b, c) \
+    (WEBRTC_SPL_MUL_16_16(a, b) >> (c))
+
+#define WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(a, b, c) \
+    ((WEBRTC_SPL_MUL_16_16(a, b) + ((WebRtc_Word32) \
+                                  (((WebRtc_Word32)1) << ((c) - 1)))) >> (c))
+#define WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(a, b) \
+    ((WEBRTC_SPL_MUL_16_16(a, b) + ((WebRtc_Word32) (1 << 14))) >> 15)
+
+// C + the 32 most significant bits of A * B
+#define WEBRTC_SPL_SCALEDIFF32(A, B, C) \
+    (C + (B >> 16) * A + (((WebRtc_UWord32)(0x0000FFFF & B) * A) >> 16))
+
+#define WEBRTC_SPL_ADD_SAT_W32(a, b)    WebRtcSpl_AddSatW32(a, b)
+#define WEBRTC_SPL_SAT(a, b, c)         (b > a ? a : b < c ? c : b)
+#define WEBRTC_SPL_MUL_32_16(a, b)      ((a) * (b))
+
+#define WEBRTC_SPL_SUB_SAT_W32(a, b)    WebRtcSpl_SubSatW32(a, b)
+#define WEBRTC_SPL_ADD_SAT_W16(a, b)    WebRtcSpl_AddSatW16(a, b)
+#define WEBRTC_SPL_SUB_SAT_W16(a, b)    WebRtcSpl_SubSatW16(a, b)
+
+// We cannot do casting here due to signed/unsigned problem
+#define WEBRTC_SPL_IS_NEG(a)            ((a) & 0x80000000)
+// Shifting with negative numbers allowed
+// Positive means left shift
+#define WEBRTC_SPL_SHIFT_W16(x, c) \
+    (((c) >= 0) ? ((x) << (c)) : ((x) >> (-(c))))
+#define WEBRTC_SPL_SHIFT_W32(x, c) \
+    (((c) >= 0) ? ((x) << (c)) : ((x) >> (-(c))))
+
+// Shifting with negative numbers not allowed
+// We cannot do casting here due to signed/unsigned problem
+#define WEBRTC_SPL_RSHIFT_W16(x, c)     ((x) >> (c))
+#define WEBRTC_SPL_LSHIFT_W16(x, c)     ((x) << (c))
+#define WEBRTC_SPL_RSHIFT_W32(x, c)     ((x) >> (c))
+#define WEBRTC_SPL_LSHIFT_W32(x, c)     ((x) << (c))
+
+#define WEBRTC_SPL_RSHIFT_U16(x, c)     ((WebRtc_UWord16)(x) >> (c))
+#define WEBRTC_SPL_LSHIFT_U16(x, c)     ((WebRtc_UWord16)(x) << (c))
+#define WEBRTC_SPL_RSHIFT_U32(x, c)     ((WebRtc_UWord32)(x) >> (c))
+#define WEBRTC_SPL_LSHIFT_U32(x, c)     ((WebRtc_UWord32)(x) << (c))
+
+#define WEBRTC_SPL_VNEW(t, n)           (t *) malloc (sizeof (t) * (n))
+#define WEBRTC_SPL_FREE                 free
+
+#define WEBRTC_SPL_RAND(a) \
+    ((WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT((a), 18816, 7) & 0x00007fff))
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+#define WEBRTC_SPL_MEMCPY_W8(v1, v2, length) \
+   memcpy(v1, v2, (length) * sizeof(char))
+#define WEBRTC_SPL_MEMCPY_W16(v1, v2, length) \
+   memcpy(v1, v2, (length) * sizeof(WebRtc_Word16))
+
+#define WEBRTC_SPL_MEMMOVE_W16(v1, v2, length) \
+   memmove(v1, v2, (length) * sizeof(WebRtc_Word16))
+
+// inline functions:
+#include "spl_inl.h"
+
+// Get SPL Version
+WebRtc_Word16 WebRtcSpl_get_version(char* version,
+                                    WebRtc_Word16 length_in_bytes);
+
+int WebRtcSpl_GetScalingSquare(WebRtc_Word16* in_vector,
+                               int in_vector_length,
+                               int times);
+
+// Copy and set operations. Implementation in copy_set_operations.c.
+// Descriptions at bottom of file.
+void WebRtcSpl_MemSetW16(WebRtc_Word16* vector,
+                         WebRtc_Word16 set_value,
+                         int vector_length);
+void WebRtcSpl_MemSetW32(WebRtc_Word32* vector,
+                         WebRtc_Word32 set_value,
+                         int vector_length);
+void WebRtcSpl_MemCpyReversedOrder(WebRtc_Word16* out_vector,
+                                   WebRtc_Word16* in_vector,
+                                   int vector_length);
+WebRtc_Word16 WebRtcSpl_CopyFromEndW16(G_CONST WebRtc_Word16* in_vector,
+                                       WebRtc_Word16 in_vector_length,
+                                       WebRtc_Word16 samples,
+                                       WebRtc_Word16* out_vector);
+WebRtc_Word16 WebRtcSpl_ZerosArrayW16(WebRtc_Word16* vector,
+                                      WebRtc_Word16 vector_length);
+WebRtc_Word16 WebRtcSpl_ZerosArrayW32(WebRtc_Word32* vector,
+                                      WebRtc_Word16 vector_length);
+WebRtc_Word16 WebRtcSpl_OnesArrayW16(WebRtc_Word16* vector,
+                                     WebRtc_Word16 vector_length);
+WebRtc_Word16 WebRtcSpl_OnesArrayW32(WebRtc_Word32* vector,
+                                     WebRtc_Word16 vector_length);
+// End: Copy and set operations.
+
+// Minimum and maximum operations. Implementation in min_max_operations.c.
+// Descriptions at bottom of file.
+WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16* vector,
+                                       WebRtc_Word16 length);
+WebRtc_Word32 WebRtcSpl_MaxAbsValueW32(G_CONST WebRtc_Word32* vector,
+                                       WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MinValueW16(G_CONST WebRtc_Word16* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word32 WebRtcSpl_MinValueW32(G_CONST WebRtc_Word32* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MaxValueW16(G_CONST WebRtc_Word16* vector,
+                                    WebRtc_Word16 length);
+
+WebRtc_Word16 WebRtcSpl_MaxAbsIndexW16(G_CONST WebRtc_Word16* vector,
+                                       WebRtc_Word16 length);
+WebRtc_Word32 WebRtcSpl_MaxValueW32(G_CONST WebRtc_Word32* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MinIndexW16(G_CONST WebRtc_Word16* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MinIndexW32(G_CONST WebRtc_Word32* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MaxIndexW16(G_CONST WebRtc_Word16* vector,
+                                    WebRtc_Word16 length);
+WebRtc_Word16 WebRtcSpl_MaxIndexW32(G_CONST WebRtc_Word32* vector,
+                                    WebRtc_Word16 length);
+// End: Minimum and maximum operations.
+
+// Vector scaling operations. Implementation in vector_scaling_operations.c.
+// Description at bottom of file.
+void WebRtcSpl_VectorBitShiftW16(WebRtc_Word16* out_vector,
+                                 WebRtc_Word16 vector_length,
+                                 G_CONST WebRtc_Word16* in_vector,
+                                 WebRtc_Word16 right_shifts);
+void WebRtcSpl_VectorBitShiftW32(WebRtc_Word32* out_vector,
+                                 WebRtc_Word16 vector_length,
+                                 G_CONST WebRtc_Word32* in_vector,
+                                 WebRtc_Word16 right_shifts);
+void WebRtcSpl_VectorBitShiftW32ToW16(WebRtc_Word16* out_vector,
+                                      WebRtc_Word16 vector_length,
+                                      G_CONST WebRtc_Word32* in_vector,
+                                      WebRtc_Word16 right_shifts);
+
+void WebRtcSpl_ScaleVector(G_CONST WebRtc_Word16* in_vector,
+                           WebRtc_Word16* out_vector,
+                           WebRtc_Word16 gain,
+                           WebRtc_Word16 vector_length,
+                           WebRtc_Word16 right_shifts);
+void WebRtcSpl_ScaleVectorWithSat(G_CONST WebRtc_Word16* in_vector,
+                                  WebRtc_Word16* out_vector,
+                                  WebRtc_Word16 gain,
+                                  WebRtc_Word16 vector_length,
+                                  WebRtc_Word16 right_shifts);
+void WebRtcSpl_ScaleAndAddVectors(G_CONST WebRtc_Word16* in_vector1,
+                                  WebRtc_Word16 gain1, int right_shifts1,
+                                  G_CONST WebRtc_Word16* in_vector2,
+                                  WebRtc_Word16 gain2, int right_shifts2,
+                                  WebRtc_Word16* out_vector,
+                                  int vector_length);
+// End: Vector scaling operations.
+
+// iLBC specific functions. Implementations in ilbc_specific_functions.c.
+// Description at bottom of file.
+void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16* in_vector1,
+                                           WebRtc_Word16 scale1,
+                                           WebRtc_Word16* in_vector2,
+                                           WebRtc_Word16 scale2,
+                                           WebRtc_Word16 right_shifts,
+                                           WebRtc_Word16* out_vector,
+                                           WebRtc_Word16 vector_length);
+void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16* out_vector,
+                                             G_CONST WebRtc_Word16* in_vector,
+                                             G_CONST WebRtc_Word16* window,
+                                             WebRtc_Word16 vector_length,
+                                             WebRtc_Word16 right_shifts);
+void WebRtcSpl_ElementwiseVectorMult(WebRtc_Word16* out_vector,
+                                     G_CONST WebRtc_Word16* in_vector,
+                                     G_CONST WebRtc_Word16* window,
+                                     WebRtc_Word16 vector_length,
+                                     WebRtc_Word16 right_shifts);
+void WebRtcSpl_AddVectorsAndShift(WebRtc_Word16* out_vector,
+                                  G_CONST WebRtc_Word16* in_vector1,
+                                  G_CONST WebRtc_Word16* in_vector2,
+                                  WebRtc_Word16 vector_length,
+                                  WebRtc_Word16 right_shifts);
+void WebRtcSpl_AddAffineVectorToVector(WebRtc_Word16* out_vector,
+                                       WebRtc_Word16* in_vector,
+                                       WebRtc_Word16 gain,
+                                       WebRtc_Word32 add_constant,
+                                       WebRtc_Word16 right_shifts,
+                                       int vector_length);
+void WebRtcSpl_AffineTransformVector(WebRtc_Word16* out_vector,
+                                     WebRtc_Word16* in_vector,
+                                     WebRtc_Word16 gain,
+                                     WebRtc_Word32 add_constant,
+                                     WebRtc_Word16 right_shifts,
+                                     int vector_length);
+// End: iLBC specific functions.
+
+// Signal processing operations. Descriptions at bottom of this file.
+int WebRtcSpl_AutoCorrelation(G_CONST WebRtc_Word16* vector,
+                              int vector_length, int order,
+                              WebRtc_Word32* result_vector,
+                              int* scale);
+WebRtc_Word16 WebRtcSpl_LevinsonDurbin(WebRtc_Word32* auto_corr,
+                                       WebRtc_Word16* lpc_coef,
+                                       WebRtc_Word16* refl_coef,
+                                       WebRtc_Word16 order);
+void WebRtcSpl_ReflCoefToLpc(G_CONST WebRtc_Word16* refl_coef,
+                             int use_order,
+                             WebRtc_Word16* lpc_coef);
+void WebRtcSpl_LpcToReflCoef(WebRtc_Word16* lpc_coef,
+                             int use_order,
+                             WebRtc_Word16* refl_coef);
+void WebRtcSpl_AutoCorrToReflCoef(G_CONST WebRtc_Word32* auto_corr,
+                                  int use_order,
+                                  WebRtc_Word16* refl_coef);
+void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_corr,
+                                WebRtc_Word16* vector1,
+                                WebRtc_Word16* vector2,
+                                WebRtc_Word16 dim_vector,
+                                WebRtc_Word16 dim_cross_corr,
+                                WebRtc_Word16 right_shifts,
+                                WebRtc_Word16 step_vector2);
+void WebRtcSpl_GetHanningWindow(WebRtc_Word16* window, WebRtc_Word16 size);
+void WebRtcSpl_SqrtOfOneMinusXSquared(WebRtc_Word16* in_vector,
+                                      int vector_length,
+                                      WebRtc_Word16* out_vector);
+// End: Signal processing operations.
+
+// Randomization functions. Implementations collected in randomization_functions.c and
+// descriptions at bottom of this file.
+WebRtc_UWord32 WebRtcSpl_IncreaseSeed(WebRtc_UWord32* seed);
+WebRtc_Word16 WebRtcSpl_RandU(WebRtc_UWord32* seed);
+WebRtc_Word16 WebRtcSpl_RandN(WebRtc_UWord32* seed);
+WebRtc_Word16 WebRtcSpl_RandUArray(WebRtc_Word16* vector,
+                                   WebRtc_Word16 vector_length,
+                                   WebRtc_UWord32* seed);
+// End: Randomization functions.
+
+// Math functions
+WebRtc_Word32 WebRtcSpl_Sqrt(WebRtc_Word32 value);
+WebRtc_Word32 WebRtcSpl_SqrtFloor(WebRtc_Word32 value);
+
+// Divisions. Implementations collected in division_operations.c and
+// descriptions at bottom of this file.
+WebRtc_UWord32 WebRtcSpl_DivU32U16(WebRtc_UWord32 num, WebRtc_UWord16 den);
+WebRtc_Word32 WebRtcSpl_DivW32W16(WebRtc_Word32 num, WebRtc_Word16 den);
+WebRtc_Word16 WebRtcSpl_DivW32W16ResW16(WebRtc_Word32 num, WebRtc_Word16 den);
+WebRtc_Word32 WebRtcSpl_DivResultInQ31(WebRtc_Word32 num, WebRtc_Word32 den);
+WebRtc_Word32 WebRtcSpl_DivW32HiLow(WebRtc_Word32 num, WebRtc_Word16 den_hi,
+                                    WebRtc_Word16 den_low);
+// End: Divisions.
+
+WebRtc_Word32 WebRtcSpl_Energy(WebRtc_Word16* vector,
+                               int vector_length,
+                               int* scale_factor);
+
+WebRtc_Word32 WebRtcSpl_DotProductWithScale(WebRtc_Word16* vector1,
+                                            WebRtc_Word16* vector2,
+                                            int vector_length,
+                                            int scaling);
+
+// Filter operations.
+int WebRtcSpl_FilterAR(G_CONST WebRtc_Word16* ar_coef, int ar_coef_length,
+                       G_CONST WebRtc_Word16* in_vector, int in_vector_length,
+                       WebRtc_Word16* filter_state, int filter_state_length,
+                       WebRtc_Word16* filter_state_low,
+                       int filter_state_low_length, WebRtc_Word16* out_vector,
+                       WebRtc_Word16* out_vector_low, int out_vector_low_length);
+
+void WebRtcSpl_FilterMAFastQ12(WebRtc_Word16* in_vector,
+                               WebRtc_Word16* out_vector,
+                               WebRtc_Word16* ma_coef,
+                               WebRtc_Word16 ma_coef_length,
+                               WebRtc_Word16 vector_length);
+
+// Performs a AR filtering on a vector in Q12
+// Input:
+//      - data_in            : Input samples
+//      - data_out           : State information in positions
+//                               data_out[-order] .. data_out[-1]
+//      - coefficients       : Filter coefficients (in Q12)
+//      - coefficients_length: Number of coefficients (order+1)
+//      - data_length        : Number of samples to be filtered
+// Output:
+//      - data_out           : Filtered samples
+void WebRtcSpl_FilterARFastQ12(const int16_t* data_in,
+                               int16_t* data_out,
+                               const int16_t* __restrict coefficients,
+                               int coefficients_length,
+                               int data_length);
+
+// Performs a MA down sampling filter on a vector
+// Input:
+//      - data_in            : Input samples (state in positions
+//                               data_in[-order] .. data_in[-1])
+//      - data_in_length     : Number of samples in |data_in| to be filtered.
+//                               This must be at least
+//                               |delay| + |factor|*(|out_vector_length|-1) + 1)
+//      - data_out_length    : Number of down sampled samples desired
+//      - coefficients       : Filter coefficients (in Q12)
+//      - coefficients_length: Number of coefficients (order+1)
+//      - factor             : Decimation factor
+//      - delay              : Delay of filter (compensated for in out_vector)
+// Output:
+//      - data_out           : Filtered samples
+// Return value              : 0 if OK, -1 if |in_vector| is too short
+int WebRtcSpl_DownsampleFast(const int16_t* data_in,
+                             int data_in_length,
+                             int16_t* data_out,
+                             int data_out_length,
+                             const int16_t* __restrict coefficients,
+                             int coefficients_length,
+                             int factor,
+                             int delay);
+
+// End: Filter operations.
+
+// FFT operations
+
+int WebRtcSpl_ComplexFFT(WebRtc_Word16 vector[], int stages, int mode);
+int WebRtcSpl_ComplexIFFT(WebRtc_Word16 vector[], int stages, int mode);
+
+// Treat a 16-bit complex data buffer |complex_data| as an array of 32-bit
+// values, and swap elements whose indexes are bit-reverses of each other.
+//
+// Input:
+//      - complex_data  : Complex data buffer containing 2^|stages| real
+//                        elements interleaved with 2^|stages| imaginary
+//                        elements: [Re Im Re Im Re Im....]
+//      - stages        : Number of FFT stages. Must be at least 3 and at most
+//                        10, since the table WebRtcSpl_kSinTable1024[] is 1024
+//                        elements long.
+//
+// Output:
+//      - complex_data  : The complex data buffer.
+
+void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages);
+
+// End: FFT operations
+
+/************************************************************
+ *
+ * RESAMPLING FUNCTIONS AND THEIR STRUCTS ARE DEFINED BELOW
+ *
+ ************************************************************/
+
+/*******************************************************************
+ * resample.c
+ *
+ * Includes the following resampling combinations
+ * 22 kHz -> 16 kHz
+ * 16 kHz -> 22 kHz
+ * 22 kHz ->  8 kHz
+ *  8 kHz -> 22 kHz
+ *
+ ******************************************************************/
+
+// state structure for 22 -> 16 resampler
+typedef struct
+{
+    WebRtc_Word32 S_22_44[8];
+    WebRtc_Word32 S_44_32[8];
+    WebRtc_Word32 S_32_16[8];
+} WebRtcSpl_State22khzTo16khz;
+
+void WebRtcSpl_Resample22khzTo16khz(const WebRtc_Word16* in,
+                                    WebRtc_Word16* out,
+                                    WebRtcSpl_State22khzTo16khz* state,
+                                    WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample22khzTo16khz(WebRtcSpl_State22khzTo16khz* state);
+
+// state structure for 16 -> 22 resampler
+typedef struct
+{
+    WebRtc_Word32 S_16_32[8];
+    WebRtc_Word32 S_32_22[8];
+} WebRtcSpl_State16khzTo22khz;
+
+void WebRtcSpl_Resample16khzTo22khz(const WebRtc_Word16* in,
+                                    WebRtc_Word16* out,
+                                    WebRtcSpl_State16khzTo22khz* state,
+                                    WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample16khzTo22khz(WebRtcSpl_State16khzTo22khz* state);
+
+// state structure for 22 -> 8 resampler
+typedef struct
+{
+    WebRtc_Word32 S_22_22[16];
+    WebRtc_Word32 S_22_16[8];
+    WebRtc_Word32 S_16_8[8];
+} WebRtcSpl_State22khzTo8khz;
+
+void WebRtcSpl_Resample22khzTo8khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State22khzTo8khz* state,
+                                   WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample22khzTo8khz(WebRtcSpl_State22khzTo8khz* state);
+
+// state structure for 8 -> 22 resampler
+typedef struct
+{
+    WebRtc_Word32 S_8_16[8];
+    WebRtc_Word32 S_16_11[8];
+    WebRtc_Word32 S_11_22[8];
+} WebRtcSpl_State8khzTo22khz;
+
+void WebRtcSpl_Resample8khzTo22khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State8khzTo22khz* state,
+                                   WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample8khzTo22khz(WebRtcSpl_State8khzTo22khz* state);
+
+/*******************************************************************
+ * resample_fractional.c
+ * Functions for internal use in the other resample functions
+ *
+ * Includes the following resampling combinations
+ * 48 kHz -> 32 kHz
+ * 32 kHz -> 24 kHz
+ * 44 kHz -> 32 kHz
+ *
+ ******************************************************************/
+
+void WebRtcSpl_Resample48khzTo32khz(const WebRtc_Word32* In, WebRtc_Word32* Out,
+                                    const WebRtc_Word32 K);
+
+void WebRtcSpl_Resample32khzTo24khz(const WebRtc_Word32* In, WebRtc_Word32* Out,
+                                    const WebRtc_Word32 K);
+
+void WebRtcSpl_Resample44khzTo32khz(const WebRtc_Word32* In, WebRtc_Word32* Out,
+                                    const WebRtc_Word32 K);
+
+/*******************************************************************
+ * resample_48khz.c
+ *
+ * Includes the following resampling combinations
+ * 48 kHz -> 16 kHz
+ * 16 kHz -> 48 kHz
+ * 48 kHz ->  8 kHz
+ *  8 kHz -> 48 kHz
+ *
+ ******************************************************************/
+
+typedef struct
+{
+    WebRtc_Word32 S_48_48[16];
+    WebRtc_Word32 S_48_32[8];
+    WebRtc_Word32 S_32_16[8];
+} WebRtcSpl_State48khzTo16khz;
+
+void WebRtcSpl_Resample48khzTo16khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State48khzTo16khz* state,
+                                    WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample48khzTo16khz(WebRtcSpl_State48khzTo16khz* state);
+
+typedef struct
+{
+    WebRtc_Word32 S_16_32[8];
+    WebRtc_Word32 S_32_24[8];
+    WebRtc_Word32 S_24_48[8];
+} WebRtcSpl_State16khzTo48khz;
+
+void WebRtcSpl_Resample16khzTo48khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State16khzTo48khz* state,
+                                    WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample16khzTo48khz(WebRtcSpl_State16khzTo48khz* state);
+
+typedef struct
+{
+    WebRtc_Word32 S_48_24[8];
+    WebRtc_Word32 S_24_24[16];
+    WebRtc_Word32 S_24_16[8];
+    WebRtc_Word32 S_16_8[8];
+} WebRtcSpl_State48khzTo8khz;
+
+void WebRtcSpl_Resample48khzTo8khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State48khzTo8khz* state,
+                                   WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample48khzTo8khz(WebRtcSpl_State48khzTo8khz* state);
+
+typedef struct
+{
+    WebRtc_Word32 S_8_16[8];
+    WebRtc_Word32 S_16_12[8];
+    WebRtc_Word32 S_12_24[8];
+    WebRtc_Word32 S_24_48[8];
+} WebRtcSpl_State8khzTo48khz;
+
+void WebRtcSpl_Resample8khzTo48khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State8khzTo48khz* state,
+                                   WebRtc_Word32* tmpmem);
+
+void WebRtcSpl_ResetResample8khzTo48khz(WebRtcSpl_State8khzTo48khz* state);
+
+/*******************************************************************
+ * resample_by_2.c
+ *
+ * Includes down and up sampling by a factor of two.
+ *
+ ******************************************************************/
+
+void WebRtcSpl_DownsampleBy2(const WebRtc_Word16* in, const WebRtc_Word16 len,
+                             WebRtc_Word16* out, WebRtc_Word32* filtState);
+
+void WebRtcSpl_UpsampleBy2(const WebRtc_Word16* in, WebRtc_Word16 len, WebRtc_Word16* out,
+                           WebRtc_Word32* filtState);
+
+/************************************************************
+ * END OF RESAMPLING FUNCTIONS
+ ************************************************************/
+void WebRtcSpl_AnalysisQMF(const WebRtc_Word16* in_data,
+                           WebRtc_Word16* low_band,
+                           WebRtc_Word16* high_band,
+                           WebRtc_Word32* filter_state1,
+                           WebRtc_Word32* filter_state2);
+void WebRtcSpl_SynthesisQMF(const WebRtc_Word16* low_band,
+                            const WebRtc_Word16* high_band,
+                            WebRtc_Word16* out_data,
+                            WebRtc_Word32* filter_state1,
+                            WebRtc_Word32* filter_state2);
+
+#ifdef __cplusplus
+}
+#endif // __cplusplus
+#endif // WEBRTC_SPL_SIGNAL_PROCESSING_LIBRARY_H_
+
+//
+// WebRtcSpl_AddSatW16(...)
+// WebRtcSpl_AddSatW32(...)
+//
+// Returns the result of a saturated 16-bit, respectively 32-bit, addition of
+// the numbers specified by the |var1| and |var2| parameters.
+//
+// Input:
+//      - var1      : Input variable 1
+//      - var2      : Input variable 2
+//
+// Return value     : Added and saturated value
+//
+
+//
+// WebRtcSpl_SubSatW16(...)
+// WebRtcSpl_SubSatW32(...)
+//
+// Returns the result of a saturated 16-bit, respectively 32-bit, subtraction
+// of the numbers specified by the |var1| and |var2| parameters.
+//
+// Input:
+//      - var1      : Input variable 1
+//      - var2      : Input variable 2
+//
+// Returned value   : Subtracted and saturated value
+//
+
+//
+// WebRtcSpl_GetSizeInBits(...)
+//
+// Returns the # of bits that are needed at the most to represent the number
+// specified by the |value| parameter.
+//
+// Input:
+//      - value     : Input value
+//
+// Return value     : Number of bits needed to represent |value|
+//
+
+//
+// WebRtcSpl_NormW32(...)
+//
+// Norm returns the # of left shifts required to 32-bit normalize the 32-bit
+// signed number specified by the |value| parameter.
+//
+// Input:
+//      - value     : Input value
+//
+// Return value     : Number of bit shifts needed to 32-bit normalize |value|
+//
+
+//
+// WebRtcSpl_NormW16(...)
+//
+// Norm returns the # of left shifts required to 16-bit normalize the 16-bit
+// signed number specified by the |value| parameter.
+//
+// Input:
+//      - value     : Input value
+//
+// Return value     : Number of bit shifts needed to 32-bit normalize |value|
+//
+
+//
+// WebRtcSpl_NormU32(...)
+//
+// Norm returns the # of left shifts required to 32-bit normalize the unsigned
+// 32-bit number specified by the |value| parameter.
+//
+// Input:
+//      - value     : Input value
+//
+// Return value     : Number of bit shifts needed to 32-bit normalize |value|
+//
+
+//
+// WebRtcSpl_GetScalingSquare(...)
+//
+// Returns the # of bits required to scale the samples specified in the
+// |in_vector| parameter so that, if the squares of the samples are added the
+// # of times specified by the |times| parameter, the 32-bit addition will not
+// overflow (result in WebRtc_Word32).
+//
+// Input:
+//      - in_vector         : Input vector to check scaling on
+//      - in_vector_length  : Samples in |in_vector|
+//      - times             : Number of additions to be performed
+//
+// Return value             : Number of right bit shifts needed to avoid
+//                            overflow in the addition calculation
+//
+
+//
+// WebRtcSpl_MemSetW16(...)
+//
+// Sets all the values in the WebRtc_Word16 vector |vector| of length
+// |vector_length| to the specified value |set_value|
+//
+// Input:
+//      - vector        : Pointer to the WebRtc_Word16 vector
+//      - set_value     : Value specified
+//      - vector_length : Length of vector
+//
+
+//
+// WebRtcSpl_MemSetW32(...)
+//
+// Sets all the values in the WebRtc_Word32 vector |vector| of length
+// |vector_length| to the specified value |set_value|
+//
+// Input:
+//      - vector        : Pointer to the WebRtc_Word16 vector
+//      - set_value     : Value specified
+//      - vector_length : Length of vector
+//
+
+//
+// WebRtcSpl_MemCpyReversedOrder(...)
+//
+// Copies all the values from the source WebRtc_Word16 vector |in_vector| to a
+// destination WebRtc_Word16 vector |out_vector|. It is done in reversed order,
+// meaning that the first sample of |in_vector| is copied to the last sample of
+// the |out_vector|. The procedure continues until the last sample of
+// |in_vector| has been copied to the first sample of |out_vector|. This
+// creates a reversed vector. Used in e.g. prediction in iLBC.
+//
+// Input:
+//      - in_vector     : Pointer to the first sample in a WebRtc_Word16 vector
+//                        of length |length|
+//      - vector_length : Number of elements to copy
+//
+// Output:
+//      - out_vector    : Pointer to the last sample in a WebRtc_Word16 vector
+//                        of length |length|
+//
+
+//
+// WebRtcSpl_CopyFromEndW16(...)
+//
+// Copies the rightmost |samples| of |in_vector| (of length |in_vector_length|)
+// to the vector |out_vector|.
+//
+// Input:
+//      - in_vector         : Input vector
+//      - in_vector_length  : Number of samples in |in_vector|
+//      - samples           : Number of samples to extract (from right side)
+//                            from |in_vector|
+//
+// Output:
+//      - out_vector        : Vector with the requested samples
+//
+// Return value             : Number of copied samples in |out_vector|
+//
+
+//
+// WebRtcSpl_ZerosArrayW16(...)
+// WebRtcSpl_ZerosArrayW32(...)
+//
+// Inserts the value "zero" in all positions of a w16 and a w32 vector
+// respectively.
+//
+// Input:
+//      - vector_length : Number of samples in vector
+//
+// Output:
+//      - vector        : Vector containing all zeros
+//
+// Return value         : Number of samples in vector
+//
+
+//
+// WebRtcSpl_OnesArrayW16(...)
+// WebRtcSpl_OnesArrayW32(...)
+//
+// Inserts the value "one" in all positions of a w16 and a w32 vector
+// respectively.
+//
+// Input:
+//      - vector_length : Number of samples in vector
+//
+// Output:
+//      - vector        : Vector containing all ones
+//
+// Return value         : Number of samples in vector
+//
+
+//
+// WebRtcSpl_MinValueW16(...)
+// WebRtcSpl_MinValueW32(...)
+//
+// Returns the minimum value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Minimum sample value in vector
+//
+
+//
+// WebRtcSpl_MaxValueW16(...)
+// WebRtcSpl_MaxValueW32(...)
+//
+// Returns the maximum value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Maximum sample value in vector
+//
+
+//
+// WebRtcSpl_MaxAbsValueW16(...)
+// WebRtcSpl_MaxAbsValueW32(...)
+//
+// Returns the largest absolute value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Maximum absolute value in vector
+//
+
+//
+// WebRtcSpl_MaxAbsIndexW16(...)
+//
+// Returns the vector index to the largest absolute value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Index to maximum absolute value in vector
+//
+
+//
+// WebRtcSpl_MinIndexW16(...)
+// WebRtcSpl_MinIndexW32(...)
+//
+// Returns the vector index to the minimum sample value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Index to minimum sample value in vector
+//
+
+//
+// WebRtcSpl_MaxIndexW16(...)
+// WebRtcSpl_MaxIndexW32(...)
+//
+// Returns the vector index to the maximum sample value of a vector
+//
+// Input:
+//      - vector        : Input vector
+//      - vector_length : Number of samples in vector
+//
+// Return value         : Index to maximum sample value in vector
+//
+
+//
+// WebRtcSpl_VectorBitShiftW16(...)
+// WebRtcSpl_VectorBitShiftW32(...)
+//
+// Bit shifts all the values in a vector up or downwards. Different calls for
+// WebRtc_Word16 and WebRtc_Word32 vectors respectively.
+//
+// Input:
+//      - vector_length : Length of vector
+//      - in_vector     : Pointer to the vector that should be bit shifted
+//      - right_shifts  : Number of right bit shifts (negative value gives left
+//                        shifts)
+//
+// Output:
+//      - out_vector    : Pointer to the result vector (can be the same as
+//                        |in_vector|)
+//
+
+//
+// WebRtcSpl_VectorBitShiftW32ToW16(...)
+//
+// Bit shifts all the values in a WebRtc_Word32 vector up or downwards and
+// stores the result as a WebRtc_Word16 vector
+//
+// Input:
+//      - vector_length : Length of vector
+//      - in_vector     : Pointer to the vector that should be bit shifted
+//      - right_shifts  : Number of right bit shifts (negative value gives left
+//                        shifts)
+//
+// Output:
+//      - out_vector    : Pointer to the result vector (can be the same as
+//                        |in_vector|)
+//
+
+//
+// WebRtcSpl_ScaleVector(...)
+//
+// Performs the vector operation:
+//  out_vector[k] = (gain*in_vector[k])>>right_shifts
+//
+// Input:
+//      - in_vector     : Input vector
+//      - gain          : Scaling gain
+//      - vector_length : Elements in the |in_vector|
+//      - right_shifts  : Number of right bit shifts applied
+//
+// Output:
+//      - out_vector    : Output vector (can be the same as |in_vector|)
+//
+
+//
+// WebRtcSpl_ScaleVectorWithSat(...)
+//
+// Performs the vector operation:
+//  out_vector[k] = SATURATE( (gain*in_vector[k])>>right_shifts )
+//
+// Input:
+//      - in_vector     : Input vector
+//      - gain          : Scaling gain
+//      - vector_length : Elements in the |in_vector|
+//      - right_shifts  : Number of right bit shifts applied
+//
+// Output:
+//      - out_vector    : Output vector (can be the same as |in_vector|)
+//
+
+//
+// WebRtcSpl_ScaleAndAddVectors(...)
+//
+// Performs the vector operation:
+//  out_vector[k] = (gain1*in_vector1[k])>>right_shifts1
+//                  + (gain2*in_vector2[k])>>right_shifts2
+//
+// Input:
+//      - in_vector1    : Input vector 1
+//      - gain1         : Gain to be used for vector 1
+//      - right_shifts1 : Right bit shift to be used for vector 1
+//      - in_vector2    : Input vector 2
+//      - gain2         : Gain to be used for vector 2
+//      - right_shifts2 : Right bit shift to be used for vector 2
+//      - vector_length : Elements in the input vectors
+//
+// Output:
+//      - out_vector    : Output vector
+//
+
+//
+// WebRtcSpl_ScaleAndAddVectorsWithRound(...)
+//
+// Performs the vector operation:
+//
+//  out_vector[k] = ((scale1*in_vector1[k]) + (scale2*in_vector2[k])
+//                      + round_value) >> right_shifts
+//
+//      where:
+//
+//  round_value = (1<<right_shifts)>>1
+//
+// Input:
+//      - in_vector1    : Input vector 1
+//      - scale1        : Gain to be used for vector 1
+//      - in_vector2    : Input vector 2
+//      - scale2        : Gain to be used for vector 2
+//      - right_shifts  : Number of right bit shifts to be applied
+//      - vector_length : Number of elements in the input vectors
+//
+// Output:
+//      - out_vector    : Output vector
+//
+
+//
+// WebRtcSpl_ReverseOrderMultArrayElements(...)
+//
+// Performs the vector operation:
+//  out_vector[n] = (in_vector[n]*window[-n])>>right_shifts
+//
+// Input:
+//      - in_vector     : Input vector
+//      - window        : Window vector (should be reversed). The pointer
+//                        should be set to the last value in the vector
+//      - right_shifts  : Number of right bit shift to be applied after the
+//                        multiplication
+//      - vector_length : Number of elements in |in_vector|
+//
+// Output:
+//      - out_vector    : Output vector (can be same as |in_vector|)
+//
+
+//
+// WebRtcSpl_ElementwiseVectorMult(...)
+//
+// Performs the vector operation:
+//  out_vector[n] = (in_vector[n]*window[n])>>right_shifts
+//
+// Input:
+//      - in_vector     : Input vector
+//      - window        : Window vector.
+//      - right_shifts  : Number of right bit shift to be applied after the
+//                        multiplication
+//      - vector_length : Number of elements in |in_vector|
+//
+// Output:
+//      - out_vector    : Output vector (can be same as |in_vector|)
+//
+
+//
+// WebRtcSpl_AddVectorsAndShift(...)
+//
+// Performs the vector operation:
+//  out_vector[k] = (in_vector1[k] + in_vector2[k])>>right_shifts
+//
+// Input:
+//      - in_vector1    : Input vector 1
+//      - in_vector2    : Input vector 2
+//      - right_shifts  : Number of right bit shift to be applied after the
+//                        multiplication
+//      - vector_length : Number of elements in |in_vector1| and |in_vector2|
+//
+// Output:
+//      - out_vector    : Output vector (can be same as |in_vector1|)
+//
+
+//
+// WebRtcSpl_AddAffineVectorToVector(...)
+//
+// Adds an affine transformed vector to another vector |out_vector|, i.e,
+// performs
+//  out_vector[k] += (in_vector[k]*gain+add_constant)>>right_shifts
+//
+// Input:
+//      - in_vector     : Input vector
+//      - gain          : Gain value, used to multiply the in vector with
+//      - add_constant  : Constant value to add (usually 1<<(right_shifts-1),
+//                        but others can be used as well
+//      - right_shifts  : Number of right bit shifts (0-16)
+//      - vector_length : Number of samples in |in_vector| and |out_vector|
+//
+// Output:
+//      - out_vector    : Vector with the output
+//
+
+//
+// WebRtcSpl_AffineTransformVector(...)
+//
+// Affine transforms a vector, i.e, performs
+//  out_vector[k] = (in_vector[k]*gain+add_constant)>>right_shifts
+//
+// Input:
+//      - in_vector     : Input vector
+//      - gain          : Gain value, used to multiply the in vector with
+//      - add_constant  : Constant value to add (usually 1<<(right_shifts-1),
+//                        but others can be used as well
+//      - right_shifts  : Number of right bit shifts (0-16)
+//      - vector_length : Number of samples in |in_vector| and |out_vector|
+//
+// Output:
+//      - out_vector    : Vector with the output
+//
+
+//
+// WebRtcSpl_AutoCorrelation(...)
+//
+// A 32-bit fix-point implementation of auto-correlation computation
+//
+// Input:
+//      - vector        : Vector to calculate autocorrelation upon
+//      - vector_length : Length (in samples) of |vector|
+//      - order         : The order up to which the autocorrelation should be
+//                        calculated
+//
+// Output:
+//      - result_vector : auto-correlation values (values should be seen
+//                        relative to each other since the absolute values
+//                        might have been down shifted to avoid overflow)
+//
+//      - scale         : The number of left shifts required to obtain the
+//                        auto-correlation in Q0
+//
+// Return value         : Number of samples in |result_vector|, i.e., (order+1)
+//
+
+//
+// WebRtcSpl_LevinsonDurbin(...)
+//
+// A 32-bit fix-point implementation of the Levinson-Durbin algorithm that
+// does NOT use the 64 bit class
+//
+// Input:
+//      - auto_corr : Vector with autocorrelation values of length >=
+//                    |use_order|+1
+//      - use_order : The LPC filter order (support up to order 20)
+//
+// Output:
+//      - lpc_coef  : lpc_coef[0..use_order] LPC coefficients in Q12
+//      - refl_coef : refl_coef[0...use_order-1]| Reflection coefficients in
+//                    Q15
+//
+// Return value     : 1 for stable 0 for unstable
+//
+
+//
+// WebRtcSpl_ReflCoefToLpc(...)
+//
+// Converts reflection coefficients |refl_coef| to LPC coefficients |lpc_coef|.
+// This version is a 16 bit operation.
+//
+// NOTE: The 16 bit refl_coef -> lpc_coef conversion might result in a
+// "slightly unstable" filter (i.e., a pole just outside the unit circle) in
+// "rare" cases even if the reflection coefficients are stable.
+//
+// Input:
+//      - refl_coef : Reflection coefficients in Q15 that should be converted
+//                    to LPC coefficients
+//      - use_order : Number of coefficients in |refl_coef|
+//
+// Output:
+//      - lpc_coef  : LPC coefficients in Q12
+//
+
+//
+// WebRtcSpl_LpcToReflCoef(...)
+//
+// Converts LPC coefficients |lpc_coef| to reflection coefficients |refl_coef|.
+// This version is a 16 bit operation.
+// The conversion is implemented by the step-down algorithm.
+//
+// Input:
+//      - lpc_coef  : LPC coefficients in Q12, that should be converted to
+//                    reflection coefficients
+//      - use_order : Number of coefficients in |lpc_coef|
+//
+// Output:
+//      - refl_coef : Reflection coefficients in Q15.
+//
+
+//
+// WebRtcSpl_AutoCorrToReflCoef(...)
+//
+// Calculates reflection coefficients (16 bit) from auto-correlation values
+//
+// Input:
+//      - auto_corr : Auto-correlation values
+//      - use_order : Number of coefficients wanted be calculated
+//
+// Output:
+//      - refl_coef : Reflection coefficients in Q15.
+//
+
+//
+// WebRtcSpl_CrossCorrelation(...)
+//
+// Calculates the cross-correlation between two sequences |vector1| and
+// |vector2|. |vector1| is fixed and |vector2| slides as the pointer is
+// increased with the amount |step_vector2|
+//
+// Input:
+//      - vector1           : First sequence (fixed throughout the correlation)
+//      - vector2           : Second sequence (slides |step_vector2| for each
+//                            new correlation)
+//      - dim_vector        : Number of samples to use in the cross-correlation
+//      - dim_cross_corr    : Number of cross-correlations to calculate (the
+//                            start position for |vector2| is updated for each
+//                            new one)
+//      - right_shifts      : Number of right bit shifts to use. This will
+//                            become the output Q-domain.
+//      - step_vector2      : How many (positive or negative) steps the
+//                            |vector2| pointer should be updated for each new
+//                            cross-correlation value.
+//
+// Output:
+//      - cross_corr        : The cross-correlation in Q(-right_shifts)
+//
+
+//
+// WebRtcSpl_GetHanningWindow(...)
+//
+// Creates (the first half of) a Hanning window. Size must be at least 1 and
+// at most 512.
+//
+// Input:
+//      - size      : Length of the requested Hanning window (1 to 512)
+//
+// Output:
+//      - window    : Hanning vector in Q14.
+//
+
+//
+// WebRtcSpl_SqrtOfOneMinusXSquared(...)
+//
+// Calculates y[k] = sqrt(1 - x[k]^2) for each element of the input vector
+// |in_vector|. Input and output values are in Q15.
+//
+// Inputs:
+//      - in_vector     : Values to calculate sqrt(1 - x^2) of
+//      - vector_length : Length of vector |in_vector|
+//
+// Output:
+//      - out_vector    : Output values in Q15
+//
+
+//
+// WebRtcSpl_IncreaseSeed(...)
+//
+// Increases the seed (and returns the new value)
+//
+// Input:
+//      - seed      : Seed for random calculation
+//
+// Output:
+//      - seed      : Updated seed value
+//
+// Return value     : The new seed value
+//
+
+//
+// WebRtcSpl_RandU(...)
+//
+// Produces a uniformly distributed value in the WebRtc_Word16 range
+//
+// Input:
+//      - seed      : Seed for random calculation
+//
+// Output:
+//      - seed      : Updated seed value
+//
+// Return value     : Uniformly distributed value in the range
+//                    [Word16_MIN...Word16_MAX]
+//
+
+//
+// WebRtcSpl_RandN(...)
+//
+// Produces a normal distributed value in the WebRtc_Word16 range
+//
+// Input:
+//      - seed      : Seed for random calculation
+//
+// Output:
+//      - seed      : Updated seed value
+//
+// Return value     : N(0,1) value in the Q13 domain
+//
+
+//
+// WebRtcSpl_RandUArray(...)
+//
+// Produces a uniformly distributed vector with elements in the WebRtc_Word16
+// range
+//
+// Input:
+//      - vector_length : Samples wanted in the vector
+//      - seed          : Seed for random calculation
+//
+// Output:
+//      - vector        : Vector with the uniform values
+//      - seed          : Updated seed value
+//
+// Return value         : Number of samples in vector, i.e., |vector_length|
+//
+
+//
+// WebRtcSpl_Sqrt(...)
+//
+// Returns the square root of the input value |value|. The precision of this
+// function is integer precision, i.e., sqrt(8) gives 2 as answer.
+// If |value| is a negative number then 0 is returned.
+//
+// Algorithm:
+//
+// A sixth order Taylor Series expansion is used here to compute the square
+// root of a number y^0.5 = (1+x)^0.5
+// where
+// x = y-1
+//   = 1+(x/2)-0.5*((x/2)^2+0.5*((x/2)^3-0.625*((x/2)^4+0.875*((x/2)^5)
+// 0.5 <= x < 1
+//
+// Input:
+//      - value     : Value to calculate sqrt of
+//
+// Return value     : Result of the sqrt calculation
+//
+
+//
+// WebRtcSpl_SqrtFloor(...)
+//
+// Returns the square root of the input value |value|. The precision of this
+// function is rounding down integer precision, i.e., sqrt(8) gives 2 as answer.
+// If |value| is a negative number then 0 is returned.
+//
+// Algorithm:
+//
+// An iterative 4 cylce/bit routine
+//
+// Input:
+//      - value     : Value to calculate sqrt of
+//
+// Return value     : Result of the sqrt calculation
+//
+
+//
+// WebRtcSpl_DivU32U16(...)
+//
+// Divides a WebRtc_UWord32 |num| by a WebRtc_UWord16 |den|.
+//
+// If |den|==0, (WebRtc_UWord32)0xFFFFFFFF is returned.
+//
+// Input:
+//      - num       : Numerator
+//      - den       : Denominator
+//
+// Return value     : Result of the division (as a WebRtc_UWord32), i.e., the
+//                    integer part of num/den.
+//
+
+//
+// WebRtcSpl_DivW32W16(...)
+//
+// Divides a WebRtc_Word32 |num| by a WebRtc_Word16 |den|.
+//
+// If |den|==0, (WebRtc_Word32)0x7FFFFFFF is returned.
+//
+// Input:
+//      - num       : Numerator
+//      - den       : Denominator
+//
+// Return value     : Result of the division (as a WebRtc_Word32), i.e., the
+//                    integer part of num/den.
+//
+
+//
+// WebRtcSpl_DivW32W16ResW16(...)
+//
+// Divides a WebRtc_Word32 |num| by a WebRtc_Word16 |den|, assuming that the
+// result is less than 32768, otherwise an unpredictable result will occur.
+//
+// If |den|==0, (WebRtc_Word16)0x7FFF is returned.
+//
+// Input:
+//      - num       : Numerator
+//      - den       : Denominator
+//
+// Return value     : Result of the division (as a WebRtc_Word16), i.e., the
+//                    integer part of num/den.
+//
+
+//
+// WebRtcSpl_DivResultInQ31(...)
+//
+// Divides a WebRtc_Word32 |num| by a WebRtc_Word16 |den|, assuming that the
+// absolute value of the denominator is larger than the numerator, otherwise
+// an unpredictable result will occur.
+//
+// Input:
+//      - num       : Numerator
+//      - den       : Denominator
+//
+// Return value     : Result of the division in Q31.
+//
+
+//
+// WebRtcSpl_DivW32HiLow(...)
+//
+// Divides a WebRtc_Word32 |num| by a denominator in hi, low format. The
+// absolute value of the denominator has to be larger (or equal to) the
+// numerator.
+//
+// Input:
+//      - num       : Numerator
+//      - den_hi    : High part of denominator
+//      - den_low   : Low part of denominator
+//
+// Return value     : Divided value in Q31
+//
+
+//
+// WebRtcSpl_Energy(...)
+//
+// Calculates the energy of a vector
+//
+// Input:
+//      - vector        : Vector which the energy should be calculated on
+//      - vector_length : Number of samples in vector
+//
+// Output:
+//      - scale_factor  : Number of left bit shifts needed to get the physical
+//                        energy value, i.e, to get the Q0 value
+//
+// Return value         : Energy value in Q(-|scale_factor|)
+//
+
+//
+// WebRtcSpl_FilterAR(...)
+//
+// Performs a 32-bit AR filtering on a vector in Q12
+//
+// Input:
+//  - ar_coef                   : AR-coefficient vector (values in Q12),
+//                                ar_coef[0] must be 4096.
+//  - ar_coef_length            : Number of coefficients in |ar_coef|.
+//  - in_vector                 : Vector to be filtered.
+//  - in_vector_length          : Number of samples in |in_vector|.
+//  - filter_state              : Current state (higher part) of the filter.
+//  - filter_state_length       : Length (in samples) of |filter_state|.
+//  - filter_state_low          : Current state (lower part) of the filter.
+//  - filter_state_low_length   : Length (in samples) of |filter_state_low|.
+//  - out_vector_low_length     : Maximum length (in samples) of
+//                                |out_vector_low|.
+//
+// Output:
+//  - filter_state              : Updated state (upper part) vector.
+//  - filter_state_low          : Updated state (lower part) vector.
+//  - out_vector                : Vector containing the upper part of the
+//                                filtered values.
+//  - out_vector_low            : Vector containing the lower part of the
+//                                filtered values.
+//
+// Return value                 : Number of samples in the |out_vector|.
+//
+
+//
+// WebRtcSpl_FilterMAFastQ12(...)
+//
+// Performs a MA filtering on a vector in Q12
+//
+// Input:
+//      - in_vector         : Input samples (state in positions
+//                            in_vector[-order] .. in_vector[-1])
+//      - ma_coef           : Filter coefficients (in Q12)
+//      - ma_coef_length    : Number of B coefficients (order+1)
+//      - vector_length     : Number of samples to be filtered
+//
+// Output:
+//      - out_vector        : Filtered samples
+//
+
+
+//
+// WebRtcSpl_DotProductWithScale(...)
+//
+// Calculates the dot product between two (WebRtc_Word16) vectors
+//
+// Input:
+//      - vector1       : Vector 1
+//      - vector2       : Vector 2
+//      - vector_length : Number of samples used in the dot product
+//      - scaling       : The number of right bit shifts to apply on each term
+//                        during calculation to avoid overflow, i.e., the
+//                        output will be in Q(-|scaling|)
+//
+// Return value         : The dot product in Q(-scaling)
+//
+
+//
+// WebRtcSpl_ComplexIFFT(...)
+//
+// Complex Inverse FFT
+//
+// Computes an inverse complex 2^|stages|-point FFT on the input vector, which
+// is in bit-reversed order. The original content of the vector is destroyed in
+// the process, since the input is overwritten by the output, normal-ordered,
+// FFT vector. With X as the input complex vector, y as the output complex
+// vector and with M = 2^|stages|, the following is computed:
+//
+//        M-1
+// y(k) = sum[X(i)*[cos(2*pi*i*k/M) + j*sin(2*pi*i*k/M)]]
+//        i=0
+//
+// The implementations are optimized for speed, not for code size. It uses the
+// decimation-in-time algorithm with radix-2 butterfly technique.
+//
+// Input:
+//      - vector    : In pointer to complex vector containing 2^|stages|
+//                    real elements interleaved with 2^|stages| imaginary
+//                    elements.
+//                    [ReImReImReIm....]
+//                    The elements are in Q(-scale) domain, see more on Return
+//                    Value below.
+//
+//      - stages    : Number of FFT stages. Must be at least 3 and at most 10,
+//                    since the table WebRtcSpl_kSinTable1024[] is 1024
+//                    elements long.
+//
+//      - mode      : This parameter gives the user to choose how the FFT
+//                    should work.
+//                    mode==0: Low-complexity and Low-accuracy mode
+//                    mode==1: High-complexity and High-accuracy mode
+//
+// Output:
+//      - vector    : Out pointer to the FFT vector (the same as input).
+//
+// Return Value     : The scale value that tells the number of left bit shifts
+//                    that the elements in the |vector| should be shifted with
+//                    in order to get Q0 values, i.e. the physically correct
+//                    values. The scale parameter is always 0 or positive,
+//                    except if N>1024 (|stages|>10), which returns a scale
+//                    value of -1, indicating error.
+//
+
+//
+// WebRtcSpl_ComplexFFT(...)
+//
+// Complex FFT
+//
+// Computes a complex 2^|stages|-point FFT on the input vector, which is in
+// bit-reversed order. The original content of the vector is destroyed in
+// the process, since the input is overwritten by the output, normal-ordered,
+// FFT vector. With x as the input complex vector, Y as the output complex
+// vector and with M = 2^|stages|, the following is computed:
+//
+//              M-1
+// Y(k) = 1/M * sum[x(i)*[cos(2*pi*i*k/M) + j*sin(2*pi*i*k/M)]]
+//              i=0
+//
+// The implementations are optimized for speed, not for code size. It uses the
+// decimation-in-time algorithm with radix-2 butterfly technique.
+//
+// This routine prevents overflow by scaling by 2 before each FFT stage. This is
+// a fixed scaling, for proper normalization - there will be log2(n) passes, so
+// this results in an overall factor of 1/n, distributed to maximize arithmetic
+// accuracy.
+//
+// Input:
+//      - vector    : In pointer to complex vector containing 2^|stages| real
+//                    elements interleaved with 2^|stages| imaginary elements.
+//                    [ReImReImReIm....]
+//                    The output is in the Q0 domain.
+//
+//      - stages    : Number of FFT stages. Must be at least 3 and at most 10,
+//                    since the table WebRtcSpl_kSinTable1024[] is 1024
+//                    elements long.
+//
+//      - mode      : This parameter gives the user to choose how the FFT
+//                    should work.
+//                    mode==0: Low-complexity and Low-accuracy mode
+//                    mode==1: High-complexity and High-accuracy mode
+//
+// Output:
+//      - vector    : The output FFT vector is in the Q0 domain.
+//
+// Return value     : The scale parameter is always 0, except if N>1024,
+//                    which returns a scale value of -1, indicating error.
+//
+
+//
+// WebRtcSpl_AnalysisQMF(...)
+//
+// Splits a 0-2*F Hz signal into two sub bands: 0-F Hz and F-2*F Hz. The
+// current version has F = 8000, therefore, a super-wideband audio signal is
+// split to lower-band 0-8 kHz and upper-band 8-16 kHz.
+//
+// Input:
+//      - in_data       : Wide band speech signal, 320 samples (10 ms)
+//
+// Input & Output:
+//      - filter_state1 : Filter state for first All-pass filter
+//      - filter_state2 : Filter state for second All-pass filter
+//
+// Output:
+//      - low_band      : Lower-band signal 0-8 kHz band, 160 samples (10 ms)
+//      - high_band     : Upper-band signal 8-16 kHz band (flipped in frequency
+//                        domain), 160 samples (10 ms)
+//
+
+//
+// WebRtcSpl_SynthesisQMF(...)
+//
+// Combines the two sub bands (0-F and F-2*F Hz) into a signal of 0-2*F
+// Hz, (current version has F = 8000 Hz). So the filter combines lower-band
+// (0-8 kHz) and upper-band (8-16 kHz) channels to obtain super-wideband 0-16
+// kHz audio.
+//
+// Input:
+//      - low_band      : The signal with the 0-8 kHz band, 160 samples (10 ms)
+//      - high_band     : The signal with the 8-16 kHz band, 160 samples (10 ms)
+//
+// Input & Output:
+//      - filter_state1 : Filter state for first All-pass filter
+//      - filter_state2 : Filter state for second All-pass filter
+//
+// Output:
+//      - out_data      : Super-wideband speech signal, 0-16 kHz
+//
+
+// WebRtc_Word16 WebRtcSpl_SatW32ToW16(...)
+//
+// This function saturates a 32-bit word into a 16-bit word.
+// 
+// Input:
+//      - value32   : The value of a 32-bit word.
+//
+// Output:
+//      - out16     : the saturated 16-bit word.
+//
+
+// int32_t WebRtc_MulAccumW16(...)
+//
+// This function multiply a 16-bit word by a 16-bit word, and accumulate this
+// value to a 32-bit integer.
+// 
+// Input:
+//      - a    : The value of the first 16-bit word.
+//      - b    : The value of the second 16-bit word.
+//      - c    : The value of an 32-bit integer.
+//
+// Return Value: The value of a * b + c.
+//
+
+// WebRtc_Word16 WebRtcSpl_get_version(...)
+//
+// This function gives the version string of the Signal Processing Library.
+//
+// Input:
+//      - length_in_bytes   : The size of Allocated space (in Bytes) where
+//                            the version number is written to (in string format).
+//
+// Output:
+//      - version           : Pointer to a buffer where the version number is written to.
+//
diff --git a/trunk/src/common_audio/signal_processing/include/spl_inl.h b/trunk/src/common_audio/signal_processing/include/spl_inl.h
new file mode 100644
index 0000000..23b3209
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/include/spl_inl.h
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This header file includes the inline functions in
+// the fix point signal processing library.
+
+#ifndef WEBRTC_SPL_SPL_INL_H_
+#define WEBRTC_SPL_SPL_INL_H_
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+#include "spl_inl_armv7.h"
+#else
+
+static __inline WebRtc_Word16 WebRtcSpl_SatW32ToW16(WebRtc_Word32 value32) {
+  WebRtc_Word16 out16 = (WebRtc_Word16) value32;
+
+  if (value32 > 32767)
+    out16 = 32767;
+  else if (value32 < -32768)
+    out16 = -32768;
+
+  return out16;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_AddSatW16(WebRtc_Word16 a,
+                                                  WebRtc_Word16 b) {
+  return WebRtcSpl_SatW32ToW16((WebRtc_Word32) a + (WebRtc_Word32) b);
+}
+
+static __inline WebRtc_Word32 WebRtcSpl_AddSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_sum;
+
+  // perform long addition
+  l_sum = l_var1 + l_var2;
+
+  // check for under or overflow
+  if (WEBRTC_SPL_IS_NEG(l_var1)) {
+    if (WEBRTC_SPL_IS_NEG(l_var2) && !WEBRTC_SPL_IS_NEG(l_sum)) {
+        l_sum = (WebRtc_Word32)0x80000000;
+    }
+  } else {
+    if (!WEBRTC_SPL_IS_NEG(l_var2) && WEBRTC_SPL_IS_NEG(l_sum)) {
+        l_sum = (WebRtc_Word32)0x7FFFFFFF;
+    }
+  }
+
+  return l_sum;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_SubSatW16(WebRtc_Word16 var1,
+                                                  WebRtc_Word16 var2) {
+  return WebRtcSpl_SatW32ToW16((WebRtc_Word32) var1 - (WebRtc_Word32) var2);
+}
+
+static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_diff;
+
+  // perform subtraction
+  l_diff = l_var1 - l_var2;
+
+  // check for underflow
+  if ((l_var1 < 0) && (l_var2 > 0) && (l_diff > 0))
+    l_diff = (WebRtc_Word32)0x80000000;
+  // check for overflow
+  if ((l_var1 > 0) && (l_var2 < 0) && (l_diff < 0))
+    l_diff = (WebRtc_Word32)0x7FFFFFFF;
+
+  return l_diff;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_GetSizeInBits(WebRtc_UWord32 n) {
+  int bits;
+
+  if (0xFFFF0000 & n) {
+    bits = 16;
+  } else {
+    bits = 0;
+  }
+  if (0x0000FF00 & (n >> bits)) bits += 8;
+  if (0x000000F0 & (n >> bits)) bits += 4;
+  if (0x0000000C & (n >> bits)) bits += 2;
+  if (0x00000002 & (n >> bits)) bits += 1;
+  if (0x00000001 & (n >> bits)) bits += 1;
+
+  return bits;
+}
+
+static __inline int WebRtcSpl_NormW32(WebRtc_Word32 a) {
+  int zeros;
+
+  if (a <= 0) a ^= 0xFFFFFFFF;
+
+  if (!(0xFFFF8000 & a)) {
+    zeros = 16;
+  } else {
+    zeros = 0;
+  }
+  if (!(0xFF800000 & (a << zeros))) zeros += 8;
+  if (!(0xF8000000 & (a << zeros))) zeros += 4;
+  if (!(0xE0000000 & (a << zeros))) zeros += 2;
+  if (!(0xC0000000 & (a << zeros))) zeros += 1;
+
+  return zeros;
+}
+
+static __inline int WebRtcSpl_NormU32(WebRtc_UWord32 a) {
+  int zeros;
+
+  if (a == 0) return 0;
+
+  if (!(0xFFFF0000 & a)) {
+    zeros = 16;
+  } else {
+    zeros = 0;
+  }
+  if (!(0xFF000000 & (a << zeros))) zeros += 8;
+  if (!(0xF0000000 & (a << zeros))) zeros += 4;
+  if (!(0xC0000000 & (a << zeros))) zeros += 2;
+  if (!(0x80000000 & (a << zeros))) zeros += 1;
+
+  return zeros;
+}
+
+static __inline int WebRtcSpl_NormW16(WebRtc_Word16 a) {
+  int zeros;
+
+  if (a <= 0) a ^= 0xFFFF;
+
+  if (!(0xFF80 & a)) {
+    zeros = 8;
+  } else {
+    zeros = 0;
+  }
+  if (!(0xF800 & (a << zeros))) zeros += 4;
+  if (!(0xE000 & (a << zeros))) zeros += 2;
+  if (!(0xC000 & (a << zeros))) zeros += 1;
+
+  return zeros;
+}
+
+static __inline int32_t WebRtc_MulAccumW16(int16_t a,
+                                          int16_t b,
+                                          int32_t c) {
+  return (a * b + c);
+}
+
+#endif  // WEBRTC_ARCH_ARM_V7A
+
+#endif  // WEBRTC_SPL_SPL_INL_H_
diff --git a/trunk/src/common_audio/signal_processing/include/spl_inl_armv7.h b/trunk/src/common_audio/signal_processing/include/spl_inl_armv7.h
new file mode 100644
index 0000000..5b19c2c
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/include/spl_inl_armv7.h
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This header file includes the inline functions for ARM processors in
+// the fix point signal processing library.
+
+#ifndef WEBRTC_SPL_SPL_INL_ARMV7_H_
+#define WEBRTC_SPL_SPL_INL_ARMV7_H_
+
+// TODO(kma): Replace some assembly code with GCC intrinsics
+// (e.g. __builtin_clz).
+
+static __inline WebRtc_Word32 WEBRTC_SPL_MUL_16_32_RSFT16(WebRtc_Word16 a,
+                                                          WebRtc_Word32 b) {
+  WebRtc_Word32 tmp;
+  __asm__("smulwb %0, %1, %2":"=r"(tmp):"r"(b), "r"(a));
+  return tmp;
+}
+
+static __inline WebRtc_Word32 WEBRTC_SPL_MUL_32_32_RSFT32(WebRtc_Word16 a,
+                                                          WebRtc_Word16 b,
+                                                          WebRtc_Word32 c) {
+  WebRtc_Word32 tmp;
+  __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(tmp) : "r"(b), "r"(a));
+  __asm__("smmul %0, %1, %2":"=r"(tmp):"r"(tmp), "r"(c));
+  return tmp;
+}
+
+static __inline WebRtc_Word32 WEBRTC_SPL_MUL_32_32_RSFT32BI(WebRtc_Word32 a,
+                                                            WebRtc_Word32 b) {
+  WebRtc_Word32 tmp;
+  __asm__("smmul %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
+  return tmp;
+}
+
+static __inline WebRtc_Word32 WEBRTC_SPL_MUL_16_16(WebRtc_Word16 a,
+                                                   WebRtc_Word16 b) {
+  WebRtc_Word32 tmp;
+  __asm__("smulbb %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
+  return tmp;
+}
+
+static __inline int32_t WebRtc_MulAccumW16(int16_t a,
+                                          int16_t b,
+                                          int32_t c) {
+  int32_t tmp = 0;
+  __asm__("smlabb %0, %1, %2, %3":"=r"(tmp):"r"(a), "r"(b), "r"(c));
+  return tmp;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_AddSatW16(WebRtc_Word16 a,
+                                                  WebRtc_Word16 b) {
+  WebRtc_Word32 s_sum;
+
+  __asm__("qadd16 %0, %1, %2":"=r"(s_sum):"r"(a), "r"(b));
+
+  return (WebRtc_Word16) s_sum;
+}
+
+static __inline WebRtc_Word32 WebRtcSpl_AddSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_sum;
+
+  __asm__("qadd %0, %1, %2":"=r"(l_sum):"r"(l_var1), "r"(l_var2));
+
+  return l_sum;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_SubSatW16(WebRtc_Word16 var1,
+                                                  WebRtc_Word16 var2) {
+  WebRtc_Word32 s_sub;
+
+  __asm__("qsub16 %0, %1, %2":"=r"(s_sub):"r"(var1), "r"(var2));
+
+  return (WebRtc_Word16)s_sub;
+}
+
+static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_sub;
+
+  __asm__("qsub %0, %1, %2":"=r"(l_sub):"r"(l_var1), "r"(l_var2));
+
+  return l_sub;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_GetSizeInBits(WebRtc_UWord32 n) {
+  WebRtc_Word32 tmp;
+
+  __asm__("clz %0, %1":"=r"(tmp):"r"(n));
+
+  return (WebRtc_Word16)(32 - tmp);
+}
+
+static __inline int WebRtcSpl_NormW32(WebRtc_Word32 a) {
+  WebRtc_Word32 tmp;
+
+  if (a <= 0) a ^= 0xFFFFFFFF;
+
+  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+
+  return tmp - 1;
+}
+
+static __inline int WebRtcSpl_NormU32(WebRtc_UWord32 a) {
+  int tmp;
+
+  if (a == 0) return 0;
+
+  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+
+  return tmp;
+}
+
+static __inline int WebRtcSpl_NormW16(WebRtc_Word16 a) {
+  WebRtc_Word32 tmp;
+
+  if (a <= 0) a ^= 0xFFFFFFFF;
+
+  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+
+  return tmp - 17;
+}
+
+static __inline WebRtc_Word16 WebRtcSpl_SatW32ToW16(WebRtc_Word32 value32) {
+  WebRtc_Word16 out16;
+
+  __asm__("ssat %r0, #16, %r1" : "=r"(out16) : "r"(value32));
+
+  return out16;
+}
+#endif  // WEBRTC_SPL_SPL_INL_ARMV7_H_
diff --git a/trunk/src/common_audio/signal_processing/levinson_durbin.c b/trunk/src/common_audio/signal_processing/levinson_durbin.c
new file mode 100644
index 0000000..4e11cdb
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/levinson_durbin.c
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_LevinsonDurbin().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+#define SPL_LEVINSON_MAXORDER 20
+
+WebRtc_Word16 WebRtcSpl_LevinsonDurbin(WebRtc_Word32 *R, WebRtc_Word16 *A, WebRtc_Word16 *K,
+                                       WebRtc_Word16 order)
+{
+    WebRtc_Word16 i, j;
+    // Auto-correlation coefficients in high precision
+    WebRtc_Word16 R_hi[SPL_LEVINSON_MAXORDER + 1], R_low[SPL_LEVINSON_MAXORDER + 1];
+    // LPC coefficients in high precision
+    WebRtc_Word16 A_hi[SPL_LEVINSON_MAXORDER + 1], A_low[SPL_LEVINSON_MAXORDER + 1];
+    // LPC coefficients for next iteration
+    WebRtc_Word16 A_upd_hi[SPL_LEVINSON_MAXORDER + 1], A_upd_low[SPL_LEVINSON_MAXORDER + 1];
+    // Reflection coefficient in high precision
+    WebRtc_Word16 K_hi, K_low;
+    // Prediction gain Alpha in high precision and with scale factor
+    WebRtc_Word16 Alpha_hi, Alpha_low, Alpha_exp;
+    WebRtc_Word16 tmp_hi, tmp_low;
+    WebRtc_Word32 temp1W32, temp2W32, temp3W32;
+    WebRtc_Word16 norm;
+
+    // Normalize the autocorrelation R[0]...R[order+1]
+
+    norm = WebRtcSpl_NormW32(R[0]);
+
+    for (i = order; i >= 0; i--)
+    {
+        temp1W32 = WEBRTC_SPL_LSHIFT_W32(R[i], norm);
+        // Put R in hi and low format
+        R_hi[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+        R_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+                - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16)), 1);
+    }
+
+    // K = A[1] = -R[1] / R[0]
+
+    temp2W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[1],16)
+            + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[1],1); // R[1] in Q31
+    temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); // abs R[1]
+    temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); // abs(R[1])/R[0] in Q31
+    // Put back the sign on R[1]
+    if (temp2W32 > 0)
+    {
+        temp1W32 = -temp1W32;
+    }
+
+    // Put K in hi and low format
+    K_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+
+    // Store first reflection coefficient
+    K[0] = K_hi;
+
+    temp1W32 = WEBRTC_SPL_RSHIFT_W32(temp1W32, 4); // A[1] in Q27
+
+    // Put A[1] in hi and low format
+    A_hi[1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    A_low[1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[1], 16)), 1);
+
+    // Alpha = R[0] * (1-K^2)
+
+    temp1W32 = (((WEBRTC_SPL_MUL_16_16(K_hi, K_low) >> 14) + WEBRTC_SPL_MUL_16_16(K_hi, K_hi))
+            << 1); // temp1W32 = k^2 in Q31
+
+    temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0
+    temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32; // temp1W32 = (1 - K[0]*K[0]) in Q31
+
+    // Store temp1W32 = 1 - K[0]*K[0] on hi and low format
+    tmp_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+    // Calculate Alpha in Q31
+    temp1W32 = ((WEBRTC_SPL_MUL_16_16(R_hi[0], tmp_hi)
+            + (WEBRTC_SPL_MUL_16_16(R_hi[0], tmp_low) >> 15)
+            + (WEBRTC_SPL_MUL_16_16(R_low[0], tmp_hi) >> 15)) << 1);
+
+    // Normalize Alpha and put it in hi and low format
+
+    Alpha_exp = WebRtcSpl_NormW32(temp1W32);
+    temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, Alpha_exp);
+    Alpha_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+            - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+
+    // Perform the iterative calculations in the Levinson-Durbin algorithm
+
+    for (i = 2; i <= order; i++)
+    {
+        /*                    ----
+         temp1W32 =  R[i] + > R[j]*A[i-j]
+         /
+         ----
+         j=1..i-1
+         */
+
+        temp1W32 = 0;
+
+        for (j = 1; j < i; j++)
+        {
+            // temp1W32 is in Q31
+            temp1W32 += ((WEBRTC_SPL_MUL_16_16(R_hi[j], A_hi[i-j]) << 1)
+                    + (((WEBRTC_SPL_MUL_16_16(R_hi[j], A_low[i-j]) >> 15)
+                            + (WEBRTC_SPL_MUL_16_16(R_low[j], A_hi[i-j]) >> 15)) << 1));
+        }
+
+        temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, 4);
+        temp1W32 += (WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16)
+                + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[i], 1));
+
+        // K = -temp1W32 / Alpha
+        temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); // abs(temp1W32)
+        temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, Alpha_low); // abs(temp1W32)/Alpha
+
+        // Put the sign of temp1W32 back again
+        if (temp1W32 > 0)
+        {
+            temp3W32 = -temp3W32;
+        }
+
+        // Use the Alpha shifts from earlier to de-normalize
+        norm = WebRtcSpl_NormW32(temp3W32);
+        if ((Alpha_exp <= norm) || (temp3W32 == 0))
+        {
+            temp3W32 = WEBRTC_SPL_LSHIFT_W32(temp3W32, Alpha_exp);
+        } else
+        {
+            if (temp3W32 > 0)
+            {
+                temp3W32 = (WebRtc_Word32)0x7fffffffL;
+            } else
+            {
+                temp3W32 = (WebRtc_Word32)0x80000000L;
+            }
+        }
+
+        // Put K on hi and low format
+        K_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+        K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32
+                - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+
+        // Store Reflection coefficient in Q15
+        K[i - 1] = K_hi;
+
+        // Test for unstable filter.
+        // If unstable return 0 and let the user decide what to do in that case
+
+        if ((WebRtc_Word32)WEBRTC_SPL_ABS_W16(K_hi) > (WebRtc_Word32)32750)
+        {
+            return 0; // Unstable filter
+        }
+
+        /*
+         Compute updated LPC coefficient: Anew[i]
+         Anew[j]= A[j] + K*A[i-j]   for j=1..i-1
+         Anew[i]= K
+         */
+
+        for (j = 1; j < i; j++)
+        {
+            // temp1W32 = A[j] in Q27
+            temp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[j],16)
+                    + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[j],1);
+
+            // temp1W32 += K*A[i-j] in Q27
+            temp1W32 += ((WEBRTC_SPL_MUL_16_16(K_hi, A_hi[i-j])
+                    + (WEBRTC_SPL_MUL_16_16(K_hi, A_low[i-j]) >> 15)
+                    + (WEBRTC_SPL_MUL_16_16(K_low, A_hi[i-j]) >> 15)) << 1);
+
+            // Put Anew in hi and low format
+            A_upd_hi[j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+            A_upd_low[j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+                    - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[j], 16)), 1);
+        }
+
+        // temp3W32 = K in Q27 (Convert from Q31 to Q27)
+        temp3W32 = WEBRTC_SPL_RSHIFT_W32(temp3W32, 4);
+
+        // Store Anew in hi and low format
+        A_upd_hi[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+        A_upd_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32
+                - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[i], 16)), 1);
+
+        // Alpha = Alpha * (1-K^2)
+
+        temp1W32 = (((WEBRTC_SPL_MUL_16_16(K_hi, K_low) >> 14)
+                + WEBRTC_SPL_MUL_16_16(K_hi, K_hi)) << 1); // K*K in Q31
+
+        temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0
+        temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32; // 1 - K*K  in Q31
+
+        // Convert 1- K^2 in hi and low format
+        tmp_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+        tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+                - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+        // Calculate Alpha = Alpha * (1-K^2) in Q31
+        temp1W32 = ((WEBRTC_SPL_MUL_16_16(Alpha_hi, tmp_hi)
+                + (WEBRTC_SPL_MUL_16_16(Alpha_hi, tmp_low) >> 15)
+                + (WEBRTC_SPL_MUL_16_16(Alpha_low, tmp_hi) >> 15)) << 1);
+
+        // Normalize Alpha and store it on hi and low format
+
+        norm = WebRtcSpl_NormW32(temp1W32);
+        temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, norm);
+
+        Alpha_hi = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+        Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32
+                - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+
+        // Update the total normalization of Alpha
+        Alpha_exp = Alpha_exp + norm;
+
+        // Update A[]
+
+        for (j = 1; j <= i; j++)
+        {
+            A_hi[j] = A_upd_hi[j];
+            A_low[j] = A_upd_low[j];
+        }
+    }
+
+    /*
+     Set A[0] to 1.0 and store the A[i] i=1...order in Q12
+     (Convert from Q27 and use rounding)
+     */
+
+    A[0] = 4096;
+
+    for (i = 1; i <= order; i++)
+    {
+        // temp1W32 in Q27
+        temp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[i], 16)
+                + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[i], 1);
+        // Round and store upper word
+        A[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32<<1)+(WebRtc_Word32)32768, 16);
+    }
+    return 1; // Stable filters
+}
diff --git a/trunk/src/common_audio/signal_processing/lpc_to_refl_coef.c b/trunk/src/common_audio/signal_processing/lpc_to_refl_coef.c
new file mode 100644
index 0000000..2cb83c2
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/lpc_to_refl_coef.c
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_LpcToReflCoef().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+#define SPL_LPC_TO_REFL_COEF_MAX_AR_MODEL_ORDER 50
+
+void WebRtcSpl_LpcToReflCoef(WebRtc_Word16* a16, int use_order, WebRtc_Word16* k16)
+{
+    int m, k;
+    WebRtc_Word32 tmp32[SPL_LPC_TO_REFL_COEF_MAX_AR_MODEL_ORDER];
+    WebRtc_Word32 tmp_inv_denom32;
+    WebRtc_Word16 tmp_inv_denom16;
+
+    k16[use_order - 1] = WEBRTC_SPL_LSHIFT_W16(a16[use_order], 3); //Q12<<3 => Q15
+    for (m = use_order - 1; m > 0; m--)
+    {
+        // (1 - k^2) in Q30
+        tmp_inv_denom32 = ((WebRtc_Word32)1073741823) - WEBRTC_SPL_MUL_16_16(k16[m], k16[m]);
+        // (1 - k^2) in Q15
+        tmp_inv_denom16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp_inv_denom32, 15);
+
+        for (k = 1; k <= m; k++)
+        {
+            // tmp[k] = (a[k] - RC[m] * a[m-k+1]) / (1.0 - RC[m]*RC[m]);
+
+            // [Q12<<16 - (Q15*Q12)<<1] = [Q28 - Q28] = Q28
+            tmp32[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)a16[k], 16)
+                    - WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(k16[m], a16[m-k+1]), 1);
+
+            tmp32[k] = WebRtcSpl_DivW32W16(tmp32[k], tmp_inv_denom16); //Q28/Q15 = Q13
+        }
+
+        for (k = 1; k < m; k++)
+        {
+            a16[k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32[k], 1); //Q13>>1 => Q12
+        }
+
+        tmp32[m] = WEBRTC_SPL_SAT(8191, tmp32[m], -8191);
+        k16[m - 1] = (WebRtc_Word16)WEBRTC_SPL_LSHIFT_W32(tmp32[m], 2); //Q13<<2 => Q15
+    }
+    return;
+}
diff --git a/trunk/src/common_audio/signal_processing/min_max_operations.c b/trunk/src/common_audio/signal_processing/min_max_operations.c
new file mode 100644
index 0000000..57eaff7
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/min_max_operations.c
@@ -0,0 +1,265 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the implementation of functions
+ * WebRtcSpl_MaxAbsValueW16()
+ * WebRtcSpl_MaxAbsIndexW16()
+ * WebRtcSpl_MaxAbsValueW32()
+ * WebRtcSpl_MaxValueW16()
+ * WebRtcSpl_MaxIndexW16()
+ * WebRtcSpl_MaxValueW32()
+ * WebRtcSpl_MaxIndexW32()
+ * WebRtcSpl_MinValueW16()
+ * WebRtcSpl_MinIndexW16()
+ * WebRtcSpl_MinValueW32()
+ * WebRtcSpl_MinIndexW32()
+ *
+ * The description header can be found in signal_processing_library.h.
+ *
+ */
+
+#include "signal_processing_library.h"
+
+#if !(defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
+
+// Maximum absolute value of word16 vector.
+WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word32 tempMax = 0;
+    WebRtc_Word32 absVal;
+    WebRtc_Word16 totMax;
+    int i;
+    G_CONST WebRtc_Word16 *tmpvector = vector;
+
+    for (i = 0; i < length; i++)
+    {
+        absVal = WEBRTC_SPL_ABS_W32((*tmpvector));
+        if (absVal > tempMax)
+        {
+            tempMax = absVal;
+        }
+        tmpvector++;
+    }
+    totMax = (WebRtc_Word16)WEBRTC_SPL_MIN(tempMax, WEBRTC_SPL_WORD16_MAX);
+    return totMax;
+}
+
+#endif
+
+// Index of maximum absolute value in a  word16 vector.
+WebRtc_Word16 WebRtcSpl_MaxAbsIndexW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 tempMax;
+    WebRtc_Word16 absTemp;
+    WebRtc_Word16 tempMaxIndex = 0;
+    WebRtc_Word16 i = 0;
+    G_CONST WebRtc_Word16 *tmpvector = vector;
+
+    tempMax = WEBRTC_SPL_ABS_W16(*tmpvector);
+    tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        absTemp = WEBRTC_SPL_ABS_W16(*tmpvector);
+        tmpvector++;
+        if (absTemp > tempMax)
+        {
+            tempMax = absTemp;
+            tempMaxIndex = i;
+        }
+    }
+    return tempMaxIndex;
+}
+
+// Maximum absolute value of word32 vector.
+WebRtc_Word32 WebRtcSpl_MaxAbsValueW32(G_CONST WebRtc_Word32 *vector, WebRtc_Word16 length)
+{
+    WebRtc_UWord32 tempMax = 0;
+    WebRtc_UWord32 absVal;
+    WebRtc_Word32 retval;
+    int i;
+    G_CONST WebRtc_Word32 *tmpvector = vector;
+
+    for (i = 0; i < length; i++)
+    {
+        absVal = WEBRTC_SPL_ABS_W32((*tmpvector));
+        if (absVal > tempMax)
+        {
+            tempMax = absVal;
+        }
+        tmpvector++;
+    }
+    retval = (WebRtc_Word32)(WEBRTC_SPL_MIN(tempMax, WEBRTC_SPL_WORD32_MAX));
+    return retval;
+}
+
+// Maximum value of word16 vector.
+#ifndef XSCALE_OPT
+WebRtc_Word16 WebRtcSpl_MaxValueW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 tempMax;
+    WebRtc_Word16 i;
+    G_CONST WebRtc_Word16 *tmpvector = vector;
+
+    tempMax = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ > tempMax)
+            tempMax = vector[i];
+    }
+    return tempMax;
+}
+#else
+#pragma message(">> WebRtcSpl_MaxValueW16 is excluded from this build")
+#endif
+
+// Index of maximum value in a word16 vector.
+WebRtc_Word16 WebRtcSpl_MaxIndexW16(G_CONST WebRtc_Word16 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 tempMax;
+    WebRtc_Word16 tempMaxIndex = 0;
+    WebRtc_Word16 i = 0;
+    G_CONST WebRtc_Word16 *tmpvector = vector;
+
+    tempMax = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ > tempMax)
+        {
+            tempMax = vector[i];
+            tempMaxIndex = i;
+        }
+    }
+    return tempMaxIndex;
+}
+
+// Maximum value of word32 vector.
+#ifndef XSCALE_OPT
+WebRtc_Word32 WebRtcSpl_MaxValueW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word32 tempMax;
+    WebRtc_Word16 i;
+    G_CONST WebRtc_Word32 *tmpvector = vector;
+
+    tempMax = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ > tempMax)
+            tempMax = vector[i];
+    }
+    return tempMax;
+}
+#else
+#pragma message(">> WebRtcSpl_MaxValueW32 is excluded from this build")
+#endif
+
+// Index of maximum value in a word32 vector.
+WebRtc_Word16 WebRtcSpl_MaxIndexW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word32 tempMax;
+    WebRtc_Word16 tempMaxIndex = 0;
+    WebRtc_Word16 i = 0;
+    G_CONST WebRtc_Word32 *tmpvector = vector;
+
+    tempMax = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ > tempMax)
+        {
+            tempMax = vector[i];
+            tempMaxIndex = i;
+        }
+    }
+    return tempMaxIndex;
+}
+
+// Minimum value of word16 vector.
+WebRtc_Word16 WebRtcSpl_MinValueW16(G_CONST WebRtc_Word16 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 tempMin;
+    WebRtc_Word16 i;
+    G_CONST WebRtc_Word16 *tmpvector = vector;
+
+    // Find the minimum value
+    tempMin = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ < tempMin)
+            tempMin = (vector[i]);
+    }
+    return tempMin;
+}
+
+// Index of minimum value in a word16 vector.
+#ifndef XSCALE_OPT
+WebRtc_Word16 WebRtcSpl_MinIndexW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word16 tempMin;
+    WebRtc_Word16 tempMinIndex = 0;
+    WebRtc_Word16 i = 0;
+    G_CONST WebRtc_Word16* tmpvector = vector;
+
+    // Find index of smallest value
+    tempMin = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ < tempMin)
+        {
+            tempMin = vector[i];
+            tempMinIndex = i;
+        }
+    }
+    return tempMinIndex;
+}
+#else
+#pragma message(">> WebRtcSpl_MinIndexW16 is excluded from this build")
+#endif
+
+// Minimum value of word32 vector.
+WebRtc_Word32 WebRtcSpl_MinValueW32(G_CONST WebRtc_Word32 *vector, WebRtc_Word16 length)
+{
+    WebRtc_Word32 tempMin;
+    WebRtc_Word16 i;
+    G_CONST WebRtc_Word32 *tmpvector = vector;
+
+    // Find the minimum value
+    tempMin = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ < tempMin)
+            tempMin = (vector[i]);
+    }
+    return tempMin;
+}
+
+// Index of minimum value in a word32 vector.
+#ifndef XSCALE_OPT
+WebRtc_Word16 WebRtcSpl_MinIndexW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
+{
+    WebRtc_Word32 tempMin;
+    WebRtc_Word16 tempMinIndex = 0;
+    WebRtc_Word16 i = 0;
+    G_CONST WebRtc_Word32 *tmpvector = vector;
+
+    // Find index of smallest value
+    tempMin = *tmpvector++;
+    for (i = 1; i < length; i++)
+    {
+        if (*tmpvector++ < tempMin)
+        {
+            tempMin = vector[i];
+            tempMinIndex = i;
+        }
+    }
+    return tempMinIndex;
+}
+#else
+#pragma message(">> WebRtcSpl_MinIndexW32 is excluded from this build")
+#endif
diff --git a/trunk/src/common_audio/signal_processing/min_max_operations_neon.c b/trunk/src/common_audio/signal_processing/min_max_operations_neon.c
new file mode 100644
index 0000000..158bcc1
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/min_max_operations_neon.c
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if (defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
+
+#include <arm_neon.h>
+
+#include "signal_processing_library.h"
+
+// Maximum absolute value of word16 vector.
+WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16* vector,
+                                       WebRtc_Word16 length) {
+  WebRtc_Word32 temp_max = 0;
+  WebRtc_Word32 abs_val;
+  WebRtc_Word16 tot_max;
+  int i;
+
+  __asm__("vmov.i16 d25, #0" : : : "d25");
+
+  for (i = 0; i < length - 7; i += 8) {
+    __asm__("vld1.16 {d26, d27}, [%0]" : : "r"(&vector[i]) : "q13");
+    __asm__("vabs.s16 q13, q13" : : : "q13");
+    __asm__("vpmax.s16 d26, d27" : : : "q13");
+    __asm__("vpmax.s16 d25, d26" : : : "d25", "d26");
+  }
+  __asm__("vpmax.s16 d25, d25" : : : "d25");
+  __asm__("vpmax.s16 d25, d25" : : : "d25");
+  __asm__("vmov.s16 %0, d25[0]" : "=r"(temp_max): : "d25");
+
+  for (; i < length; i++) {
+    abs_val = WEBRTC_SPL_ABS_W32((vector[i]));
+    if (abs_val > temp_max) {
+      temp_max = abs_val;
+    }
+  }
+  tot_max = (WebRtc_Word16)WEBRTC_SPL_MIN(temp_max, WEBRTC_SPL_WORD16_MAX);
+  return tot_max;
+}
+
+#endif
diff --git a/trunk/src/common_audio/signal_processing/randomization_functions.c b/trunk/src/common_audio/signal_processing/randomization_functions.c
new file mode 100644
index 0000000..04271ad
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/randomization_functions.c
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains implementations of the randomization functions
+ * WebRtcSpl_IncreaseSeed()
+ * WebRtcSpl_RandU()
+ * WebRtcSpl_RandN()
+ * WebRtcSpl_RandUArray()
+ *
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+static const WebRtc_Word16 kRandNTable[] = {
+    9178,    -7260,       40,    10189,     4894,    -3531,   -13779,    14764,
+   -4008,    -8884,    -8990,     1008,     7368,     5184,     3251,    -5817,
+   -9786,     5963,     1770,     8066,    -7135,    10772,    -2298,     1361,
+    6484,     2241,    -8633,      792,      199,    -3344,     6553,   -10079,
+  -15040,       95,    11608,   -12469,    14161,    -4176,     2476,     6403,
+   13685,   -16005,     6646,     2239,    10916,    -3004,     -602,    -3141,
+    2142,    14144,    -5829,     5305,     8209,     4713,     2697,    -5112,
+   16092,    -1210,    -2891,    -6631,    -5360,   -11878,    -6781,    -2739,
+   -6392,      536,    10923,    10872,     5059,    -4748,    -7770,     5477,
+      38,    -1025,    -2892,     1638,     6304,    14375,   -11028,     1553,
+   -1565,    10762,     -393,     4040,     5257,    12310,     6554,    -4799,
+    4899,    -6354,     1603,    -1048,    -2220,     8247,     -186,    -8944,
+  -12004,     2332,     4801,    -4933,     6371,      131,     8614,    -5927,
+   -8287,   -22760,     4033,   -15162,     3385,     3246,     3153,    -5250,
+    3766,      784,     6494,      -62,     3531,    -1582,    15572,      662,
+   -3952,     -330,    -3196,      669,     7236,    -2678,    -6569,    23319,
+   -8645,     -741,    14830,   -15976,     4903,      315,   -11342,    10311,
+    1858,    -7777,     2145,     5436,     5677,     -113,   -10033,      826,
+   -1353,    17210,     7768,      986,    -1471,     8291,    -4982,     8207,
+  -14911,    -6255,    -2449,   -11881,    -7059,   -11703,    -4338,     8025,
+    7538,    -2823,   -12490,     9470,    -1613,    -2529,   -10092,    -7807,
+    9480,     6970,   -12844,     5123,     3532,     4816,     4803,    -8455,
+   -5045,    14032,    -4378,    -1643,     5756,   -11041,    -2732,   -16618,
+   -6430,   -18375,    -3320,     6098,     5131,    -4269,    -8840,     2482,
+   -7048,     1547,   -21890,    -6505,    -7414,     -424,   -11722,     7955,
+    1653,   -17299,     1823,      473,    -9232,     3337,     1111,      873,
+    4018,    -8982,     9889,     3531,   -11763,    -3799,     7373,    -4539,
+    3231,     7054,    -8537,     7616,     6244,    16635,      447,    -2915,
+   13967,      705,    -2669,    -1520,    -1771,   -16188,     5956,     5117,
+    6371,    -9936,    -1448,     2480,     5128,     7550,    -8130,     5236,
+    8213,    -6443,     7707,    -1950,   -13811,     7218,     7031,    -3883,
+      67,     5731,    -2874,    13480,    -3743,     9298,    -3280,     3552,
+   -4425,      -18,    -3785,    -9988,    -5357,     5477,   -11794,     2117,
+    1416,    -9935,     3376,      802,    -5079,    -8243,    12652,       66,
+    3653,    -2368,     6781,   -21895,    -7227,     2487,     7839,     -385,
+    6646,    -7016,    -4658,     5531,    -1705,      834,      129,     3694,
+   -1343,     2238,   -22640,    -6417,   -11139,    11301,    -2945,    -3494,
+   -5626,      185,    -3615,    -2041,    -7972,    -3106,      -60,   -23497,
+   -1566,    17064,     3519,     2518,      304,    -6805,   -10269,     2105,
+    1936,     -426,     -736,    -8122,    -1467,     4238,    -6939,   -13309,
+     360,     7402,    -7970,    12576,     3287,    12194,    -6289,   -16006,
+    9171,     4042,    -9193,     9123,    -2512,     6388,    -4734,    -8739,
+    1028,    -5406,    -1696,     5889,     -666,    -4736,     4971,     3565,
+    9362,    -6292,     3876,    -3652,   -19666,     7523,    -4061,      391,
+  -11773,     7502,    -3763,     4929,    -9478,    13278,     2805,     4496,
+    7814,    16419,    12455,   -14773,     2127,    -2746,     3763,     4847,
+    3698,     6978,     4751,    -6957,    -3581,      -45,     6252,     1513,
+   -4797,    -7925,    11270,    16188,    -2359,    -5269,     9376,   -10777,
+    7262,    20031,    -6515,    -2208,    -5353,     8085,    -1341,    -1303,
+    7333,     5576,     3625,     5763,    -7931,     9833,    -3371,   -10305,
+    6534,   -13539,    -9971,      997,     8464,    -4064,    -1495,     1857,
+   13624,     5458,     9490,   -11086,    -4524,    12022,     -550,     -198,
+     408,    -8455,    -7068,    10289,     9712,    -3366,     9028,    -7621,
+   -5243,     2362,     6909,     4672,    -4933,    -1799,     4709,    -4563,
+     -62,     -566,     1624,    -7010,    14730,   -17791,    -3697,    -2344,
+   -1741,     7099,    -9509,    -6855,    -1989,     3495,    -2289,     2031,
+   12784,      891,    14189,    -3963,    -5683,      421,   -12575,     1724,
+  -12682,    -5970,    -8169,     3143,    -1824,    -5488,    -5130,     8536,
+   12799,      794,     5738,     3459,   -11689,     -258,    -3738,    -3775,
+   -8742,     2333,     8312,    -9383,    10331,    13119,     8398,    10644,
+  -19433,    -6446,   -16277,   -11793,    16284,     9345,    15222,    15834,
+    2009,    -7349,      130,   -14547,      338,    -5998,     3337,    21492,
+    2406,     7703,     -951,    11196,     -564,     3406,     2217,     4806,
+    2374,    -5797,    11839,     8940,   -11874,    18213,     2855,    10492
+};
+
+WebRtc_UWord32 WebRtcSpl_IncreaseSeed(WebRtc_UWord32 *seed)
+{
+    seed[0] = (seed[0] * ((WebRtc_Word32)69069) + 1) & (WEBRTC_SPL_MAX_SEED_USED - 1);
+    return seed[0];
+}
+
+WebRtc_Word16 WebRtcSpl_RandU(WebRtc_UWord32 *seed)
+{
+    return (WebRtc_Word16)(WebRtcSpl_IncreaseSeed(seed) >> 16);
+}
+
+WebRtc_Word16 WebRtcSpl_RandN(WebRtc_UWord32 *seed)
+{
+    return kRandNTable[WebRtcSpl_IncreaseSeed(seed) >> 23];
+}
+
+// Creates an array of uniformly distributed variables
+WebRtc_Word16 WebRtcSpl_RandUArray(WebRtc_Word16* vector,
+                                   WebRtc_Word16 vector_length,
+                                   WebRtc_UWord32* seed)
+{
+    int i;
+    for (i = 0; i < vector_length; i++)
+    {
+        vector[i] = WebRtcSpl_RandU(seed);
+    }
+    return vector_length;
+}
diff --git a/trunk/src/common_audio/signal_processing/refl_coef_to_lpc.c b/trunk/src/common_audio/signal_processing/refl_coef_to_lpc.c
new file mode 100644
index 0000000..d07804d
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/refl_coef_to_lpc.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_ReflCoefToLpc().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_ReflCoefToLpc(G_CONST WebRtc_Word16 *k, int use_order, WebRtc_Word16 *a)
+{
+    WebRtc_Word16 any[WEBRTC_SPL_MAX_LPC_ORDER + 1];
+    WebRtc_Word16 *aptr, *aptr2, *anyptr;
+    G_CONST WebRtc_Word16 *kptr;
+    int m, i;
+
+    kptr = k;
+    *a = 4096; // i.e., (Word16_MAX >> 3)+1.
+    *any = *a;
+    a[1] = WEBRTC_SPL_RSHIFT_W16((*k), 3);
+
+    for (m = 1; m < use_order; m++)
+    {
+        kptr++;
+        aptr = a;
+        aptr++;
+        aptr2 = &a[m];
+        anyptr = any;
+        anyptr++;
+
+        any[m + 1] = WEBRTC_SPL_RSHIFT_W16((*kptr), 3);
+        for (i = 0; i < m; i++)
+        {
+            *anyptr = (*aptr)
+                    + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT((*aptr2), (*kptr), 15);
+            anyptr++;
+            aptr++;
+            aptr2--;
+        }
+
+        aptr = a;
+        anyptr = any;
+        for (i = 0; i < (m + 2); i++)
+        {
+            *aptr = *anyptr;
+            aptr++;
+            anyptr++;
+        }
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/resample.c b/trunk/src/common_audio/signal_processing/resample.c
new file mode 100644
index 0000000..19d1778
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample.c
@@ -0,0 +1,505 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the resampling functions for 22 kHz.
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+#include "resample_by_2_internal.h"
+
+// Declaration of internally used functions
+static void WebRtcSpl_32khzTo22khzIntToShort(const WebRtc_Word32 *In, WebRtc_Word16 *Out,
+                                             const WebRtc_Word32 K);
+
+void WebRtcSpl_32khzTo22khzIntToInt(const WebRtc_Word32 *In, WebRtc_Word32 *Out,
+                                    const WebRtc_Word32 K);
+
+// interpolation coefficients
+static const WebRtc_Word16 kCoefficients32To22[5][9] = {
+        {127, -712,  2359, -6333, 23456, 16775, -3695,  945, -154},
+        {-39,  230,  -830,  2785, 32366, -2324,   760, -218,   38},
+        {117, -663,  2222, -6133, 26634, 13070, -3174,  831, -137},
+        {-77,  457, -1677,  5958, 31175, -4136,  1405, -408,   71},
+        { 98, -560,  1900, -5406, 29240,  9423, -2480,  663, -110}
+};
+
+//////////////////////
+// 22 kHz -> 16 kHz //
+//////////////////////
+
+// number of subblocks; options: 1, 2, 4, 5, 10
+#define SUB_BLOCKS_22_16    5
+
+// 22 -> 16 resampler
+void WebRtcSpl_Resample22khzTo16khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State22khzTo16khz* state, WebRtc_Word32* tmpmem)
+{
+    int k;
+
+    // process two blocks of 10/SUB_BLOCKS_22_16 ms (to reduce temp buffer size)
+    for (k = 0; k < SUB_BLOCKS_22_16; k++)
+    {
+        ///// 22 --> 44 /////
+        // WebRtc_Word16  in[220/SUB_BLOCKS_22_16]
+        // WebRtc_Word32 out[440/SUB_BLOCKS_22_16]
+        /////
+        WebRtcSpl_UpBy2ShortToInt(in, 220 / SUB_BLOCKS_22_16, tmpmem + 16, state->S_22_44);
+
+        ///// 44 --> 32 /////
+        // WebRtc_Word32  in[440/SUB_BLOCKS_22_16]
+        // WebRtc_Word32 out[320/SUB_BLOCKS_22_16]
+        /////
+        // copy state to and from input array
+        tmpmem[8] = state->S_44_32[0];
+        tmpmem[9] = state->S_44_32[1];
+        tmpmem[10] = state->S_44_32[2];
+        tmpmem[11] = state->S_44_32[3];
+        tmpmem[12] = state->S_44_32[4];
+        tmpmem[13] = state->S_44_32[5];
+        tmpmem[14] = state->S_44_32[6];
+        tmpmem[15] = state->S_44_32[7];
+        state->S_44_32[0] = tmpmem[440 / SUB_BLOCKS_22_16 + 8];
+        state->S_44_32[1] = tmpmem[440 / SUB_BLOCKS_22_16 + 9];
+        state->S_44_32[2] = tmpmem[440 / SUB_BLOCKS_22_16 + 10];
+        state->S_44_32[3] = tmpmem[440 / SUB_BLOCKS_22_16 + 11];
+        state->S_44_32[4] = tmpmem[440 / SUB_BLOCKS_22_16 + 12];
+        state->S_44_32[5] = tmpmem[440 / SUB_BLOCKS_22_16 + 13];
+        state->S_44_32[6] = tmpmem[440 / SUB_BLOCKS_22_16 + 14];
+        state->S_44_32[7] = tmpmem[440 / SUB_BLOCKS_22_16 + 15];
+
+        WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 40 / SUB_BLOCKS_22_16);
+
+        ///// 32 --> 16 /////
+        // WebRtc_Word32  in[320/SUB_BLOCKS_22_16]
+        // WebRtc_Word32 out[160/SUB_BLOCKS_22_16]
+        /////
+        WebRtcSpl_DownBy2IntToShort(tmpmem, 320 / SUB_BLOCKS_22_16, out, state->S_32_16);
+
+        // move input/output pointers 10/SUB_BLOCKS_22_16 ms seconds ahead
+        in += 220 / SUB_BLOCKS_22_16;
+        out += 160 / SUB_BLOCKS_22_16;
+    }
+}
+
+// initialize state of 22 -> 16 resampler
+void WebRtcSpl_ResetResample22khzTo16khz(WebRtcSpl_State22khzTo16khz* state)
+{
+    int k;
+    for (k = 0; k < 8; k++)
+    {
+        state->S_22_44[k] = 0;
+        state->S_44_32[k] = 0;
+        state->S_32_16[k] = 0;
+    }
+}
+
+//////////////////////
+// 16 kHz -> 22 kHz //
+//////////////////////
+
+// number of subblocks; options: 1, 2, 4, 5, 10
+#define SUB_BLOCKS_16_22    4
+
+// 16 -> 22 resampler
+void WebRtcSpl_Resample16khzTo22khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State16khzTo22khz* state, WebRtc_Word32* tmpmem)
+{
+    int k;
+
+    // process two blocks of 10/SUB_BLOCKS_16_22 ms (to reduce temp buffer size)
+    for (k = 0; k < SUB_BLOCKS_16_22; k++)
+    {
+        ///// 16 --> 32 /////
+        // WebRtc_Word16  in[160/SUB_BLOCKS_16_22]
+        // WebRtc_Word32 out[320/SUB_BLOCKS_16_22]
+        /////
+        WebRtcSpl_UpBy2ShortToInt(in, 160 / SUB_BLOCKS_16_22, tmpmem + 8, state->S_16_32);
+
+        ///// 32 --> 22 /////
+        // WebRtc_Word32  in[320/SUB_BLOCKS_16_22]
+        // WebRtc_Word32 out[220/SUB_BLOCKS_16_22]
+        /////
+        // copy state to and from input array
+        tmpmem[0] = state->S_32_22[0];
+        tmpmem[1] = state->S_32_22[1];
+        tmpmem[2] = state->S_32_22[2];
+        tmpmem[3] = state->S_32_22[3];
+        tmpmem[4] = state->S_32_22[4];
+        tmpmem[5] = state->S_32_22[5];
+        tmpmem[6] = state->S_32_22[6];
+        tmpmem[7] = state->S_32_22[7];
+        state->S_32_22[0] = tmpmem[320 / SUB_BLOCKS_16_22];
+        state->S_32_22[1] = tmpmem[320 / SUB_BLOCKS_16_22 + 1];
+        state->S_32_22[2] = tmpmem[320 / SUB_BLOCKS_16_22 + 2];
+        state->S_32_22[3] = tmpmem[320 / SUB_BLOCKS_16_22 + 3];
+        state->S_32_22[4] = tmpmem[320 / SUB_BLOCKS_16_22 + 4];
+        state->S_32_22[5] = tmpmem[320 / SUB_BLOCKS_16_22 + 5];
+        state->S_32_22[6] = tmpmem[320 / SUB_BLOCKS_16_22 + 6];
+        state->S_32_22[7] = tmpmem[320 / SUB_BLOCKS_16_22 + 7];
+
+        WebRtcSpl_32khzTo22khzIntToShort(tmpmem, out, 20 / SUB_BLOCKS_16_22);
+
+        // move input/output pointers 10/SUB_BLOCKS_16_22 ms seconds ahead
+        in += 160 / SUB_BLOCKS_16_22;
+        out += 220 / SUB_BLOCKS_16_22;
+    }
+}
+
+// initialize state of 16 -> 22 resampler
+void WebRtcSpl_ResetResample16khzTo22khz(WebRtcSpl_State16khzTo22khz* state)
+{
+    int k;
+    for (k = 0; k < 8; k++)
+    {
+        state->S_16_32[k] = 0;
+        state->S_32_22[k] = 0;
+    }
+}
+
+//////////////////////
+// 22 kHz ->  8 kHz //
+//////////////////////
+
+// number of subblocks; options: 1, 2, 5, 10
+#define SUB_BLOCKS_22_8     2
+
+// 22 -> 8 resampler
+void WebRtcSpl_Resample22khzTo8khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State22khzTo8khz* state, WebRtc_Word32* tmpmem)
+{
+    int k;
+
+    // process two blocks of 10/SUB_BLOCKS_22_8 ms (to reduce temp buffer size)
+    for (k = 0; k < SUB_BLOCKS_22_8; k++)
+    {
+        ///// 22 --> 22 lowpass /////
+        // WebRtc_Word16  in[220/SUB_BLOCKS_22_8]
+        // WebRtc_Word32 out[220/SUB_BLOCKS_22_8]
+        /////
+        WebRtcSpl_LPBy2ShortToInt(in, 220 / SUB_BLOCKS_22_8, tmpmem + 16, state->S_22_22);
+
+        ///// 22 --> 16 /////
+        // WebRtc_Word32  in[220/SUB_BLOCKS_22_8]
+        // WebRtc_Word32 out[160/SUB_BLOCKS_22_8]
+        /////
+        // copy state to and from input array
+        tmpmem[8] = state->S_22_16[0];
+        tmpmem[9] = state->S_22_16[1];
+        tmpmem[10] = state->S_22_16[2];
+        tmpmem[11] = state->S_22_16[3];
+        tmpmem[12] = state->S_22_16[4];
+        tmpmem[13] = state->S_22_16[5];
+        tmpmem[14] = state->S_22_16[6];
+        tmpmem[15] = state->S_22_16[7];
+        state->S_22_16[0] = tmpmem[220 / SUB_BLOCKS_22_8 + 8];
+        state->S_22_16[1] = tmpmem[220 / SUB_BLOCKS_22_8 + 9];
+        state->S_22_16[2] = tmpmem[220 / SUB_BLOCKS_22_8 + 10];
+        state->S_22_16[3] = tmpmem[220 / SUB_BLOCKS_22_8 + 11];
+        state->S_22_16[4] = tmpmem[220 / SUB_BLOCKS_22_8 + 12];
+        state->S_22_16[5] = tmpmem[220 / SUB_BLOCKS_22_8 + 13];
+        state->S_22_16[6] = tmpmem[220 / SUB_BLOCKS_22_8 + 14];
+        state->S_22_16[7] = tmpmem[220 / SUB_BLOCKS_22_8 + 15];
+
+        WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 20 / SUB_BLOCKS_22_8);
+
+        ///// 16 --> 8 /////
+        // WebRtc_Word32 in[160/SUB_BLOCKS_22_8]
+        // WebRtc_Word32 out[80/SUB_BLOCKS_22_8]
+        /////
+        WebRtcSpl_DownBy2IntToShort(tmpmem, 160 / SUB_BLOCKS_22_8, out, state->S_16_8);
+
+        // move input/output pointers 10/SUB_BLOCKS_22_8 ms seconds ahead
+        in += 220 / SUB_BLOCKS_22_8;
+        out += 80 / SUB_BLOCKS_22_8;
+    }
+}
+
+// initialize state of 22 -> 8 resampler
+void WebRtcSpl_ResetResample22khzTo8khz(WebRtcSpl_State22khzTo8khz* state)
+{
+    int k;
+    for (k = 0; k < 8; k++)
+    {
+        state->S_22_22[k] = 0;
+        state->S_22_22[k + 8] = 0;
+        state->S_22_16[k] = 0;
+        state->S_16_8[k] = 0;
+    }
+}
+
+//////////////////////
+//  8 kHz -> 22 kHz //
+//////////////////////
+
+// number of subblocks; options: 1, 2, 5, 10
+#define SUB_BLOCKS_8_22     2
+
+// 8 -> 22 resampler
+void WebRtcSpl_Resample8khzTo22khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State8khzTo22khz* state, WebRtc_Word32* tmpmem)
+{
+    int k;
+
+    // process two blocks of 10/SUB_BLOCKS_8_22 ms (to reduce temp buffer size)
+    for (k = 0; k < SUB_BLOCKS_8_22; k++)
+    {
+        ///// 8 --> 16 /////
+        // WebRtc_Word16  in[80/SUB_BLOCKS_8_22]
+        // WebRtc_Word32 out[160/SUB_BLOCKS_8_22]
+        /////
+        WebRtcSpl_UpBy2ShortToInt(in, 80 / SUB_BLOCKS_8_22, tmpmem + 18, state->S_8_16);
+
+        ///// 16 --> 11 /////
+        // WebRtc_Word32  in[160/SUB_BLOCKS_8_22]
+        // WebRtc_Word32 out[110/SUB_BLOCKS_8_22]
+        /////
+        // copy state to and from input array
+        tmpmem[10] = state->S_16_11[0];
+        tmpmem[11] = state->S_16_11[1];
+        tmpmem[12] = state->S_16_11[2];
+        tmpmem[13] = state->S_16_11[3];
+        tmpmem[14] = state->S_16_11[4];
+        tmpmem[15] = state->S_16_11[5];
+        tmpmem[16] = state->S_16_11[6];
+        tmpmem[17] = state->S_16_11[7];
+        state->S_16_11[0] = tmpmem[160 / SUB_BLOCKS_8_22 + 10];
+        state->S_16_11[1] = tmpmem[160 / SUB_BLOCKS_8_22 + 11];
+        state->S_16_11[2] = tmpmem[160 / SUB_BLOCKS_8_22 + 12];
+        state->S_16_11[3] = tmpmem[160 / SUB_BLOCKS_8_22 + 13];
+        state->S_16_11[4] = tmpmem[160 / SUB_BLOCKS_8_22 + 14];
+        state->S_16_11[5] = tmpmem[160 / SUB_BLOCKS_8_22 + 15];
+        state->S_16_11[6] = tmpmem[160 / SUB_BLOCKS_8_22 + 16];
+        state->S_16_11[7] = tmpmem[160 / SUB_BLOCKS_8_22 + 17];
+
+        WebRtcSpl_32khzTo22khzIntToInt(tmpmem + 10, tmpmem, 10 / SUB_BLOCKS_8_22);
+
+        ///// 11 --> 22 /////
+        // WebRtc_Word32  in[110/SUB_BLOCKS_8_22]
+        // WebRtc_Word16 out[220/SUB_BLOCKS_8_22]
+        /////
+        WebRtcSpl_UpBy2IntToShort(tmpmem, 110 / SUB_BLOCKS_8_22, out, state->S_11_22);
+
+        // move input/output pointers 10/SUB_BLOCKS_8_22 ms seconds ahead
+        in += 80 / SUB_BLOCKS_8_22;
+        out += 220 / SUB_BLOCKS_8_22;
+    }
+}
+
+// initialize state of 8 -> 22 resampler
+void WebRtcSpl_ResetResample8khzTo22khz(WebRtcSpl_State8khzTo22khz* state)
+{
+    int k;
+    for (k = 0; k < 8; k++)
+    {
+        state->S_8_16[k] = 0;
+        state->S_16_11[k] = 0;
+        state->S_11_22[k] = 0;
+    }
+}
+
+// compute two inner-products and store them to output array
+static void WebRtcSpl_DotProdIntToInt(const WebRtc_Word32* in1, const WebRtc_Word32* in2,
+                                      const WebRtc_Word16* coef_ptr, WebRtc_Word32* out1,
+                                      WebRtc_Word32* out2)
+{
+    WebRtc_Word32 tmp1 = 16384;
+    WebRtc_Word32 tmp2 = 16384;
+    WebRtc_Word16 coef;
+
+    coef = coef_ptr[0];
+    tmp1 += coef * in1[0];
+    tmp2 += coef * in2[-0];
+
+    coef = coef_ptr[1];
+    tmp1 += coef * in1[1];
+    tmp2 += coef * in2[-1];
+
+    coef = coef_ptr[2];
+    tmp1 += coef * in1[2];
+    tmp2 += coef * in2[-2];
+
+    coef = coef_ptr[3];
+    tmp1 += coef * in1[3];
+    tmp2 += coef * in2[-3];
+
+    coef = coef_ptr[4];
+    tmp1 += coef * in1[4];
+    tmp2 += coef * in2[-4];
+
+    coef = coef_ptr[5];
+    tmp1 += coef * in1[5];
+    tmp2 += coef * in2[-5];
+
+    coef = coef_ptr[6];
+    tmp1 += coef * in1[6];
+    tmp2 += coef * in2[-6];
+
+    coef = coef_ptr[7];
+    tmp1 += coef * in1[7];
+    tmp2 += coef * in2[-7];
+
+    coef = coef_ptr[8];
+    *out1 = tmp1 + coef * in1[8];
+    *out2 = tmp2 + coef * in2[-8];
+}
+
+// compute two inner-products and store them to output array
+static void WebRtcSpl_DotProdIntToShort(const WebRtc_Word32* in1, const WebRtc_Word32* in2,
+                                        const WebRtc_Word16* coef_ptr, WebRtc_Word16* out1,
+                                        WebRtc_Word16* out2)
+{
+    WebRtc_Word32 tmp1 = 16384;
+    WebRtc_Word32 tmp2 = 16384;
+    WebRtc_Word16 coef;
+
+    coef = coef_ptr[0];
+    tmp1 += coef * in1[0];
+    tmp2 += coef * in2[-0];
+
+    coef = coef_ptr[1];
+    tmp1 += coef * in1[1];
+    tmp2 += coef * in2[-1];
+
+    coef = coef_ptr[2];
+    tmp1 += coef * in1[2];
+    tmp2 += coef * in2[-2];
+
+    coef = coef_ptr[3];
+    tmp1 += coef * in1[3];
+    tmp2 += coef * in2[-3];
+
+    coef = coef_ptr[4];
+    tmp1 += coef * in1[4];
+    tmp2 += coef * in2[-4];
+
+    coef = coef_ptr[5];
+    tmp1 += coef * in1[5];
+    tmp2 += coef * in2[-5];
+
+    coef = coef_ptr[6];
+    tmp1 += coef * in1[6];
+    tmp2 += coef * in2[-6];
+
+    coef = coef_ptr[7];
+    tmp1 += coef * in1[7];
+    tmp2 += coef * in2[-7];
+
+    coef = coef_ptr[8];
+    tmp1 += coef * in1[8];
+    tmp2 += coef * in2[-8];
+
+    // scale down, round and saturate
+    tmp1 >>= 15;
+    if (tmp1 > (WebRtc_Word32)0x00007FFF)
+        tmp1 = 0x00007FFF;
+    if (tmp1 < (WebRtc_Word32)0xFFFF8000)
+        tmp1 = 0xFFFF8000;
+    tmp2 >>= 15;
+    if (tmp2 > (WebRtc_Word32)0x00007FFF)
+        tmp2 = 0x00007FFF;
+    if (tmp2 < (WebRtc_Word32)0xFFFF8000)
+        tmp2 = 0xFFFF8000;
+    *out1 = (WebRtc_Word16)tmp1;
+    *out2 = (WebRtc_Word16)tmp2;
+}
+
+//   Resampling ratio: 11/16
+// input:  WebRtc_Word32 (normalized, not saturated) :: size 16 * K
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) :: size 11 * K
+//      K: Number of blocks
+
+void WebRtcSpl_32khzTo22khzIntToInt(const WebRtc_Word32* In,
+                                    WebRtc_Word32* Out,
+                                    const WebRtc_Word32 K)
+{
+    /////////////////////////////////////////////////////////////
+    // Filter operation:
+    //
+    // Perform resampling (16 input samples -> 11 output samples);
+    // process in sub blocks of size 16 samples.
+    WebRtc_Word32 m;
+
+    for (m = 0; m < K; m++)
+    {
+        // first output sample
+        Out[0] = ((WebRtc_Word32)In[3] << 15) + (1 << 14);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToInt(&In[0], &In[22], kCoefficients32To22[0], &Out[1], &Out[10]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToInt(&In[2], &In[20], kCoefficients32To22[1], &Out[2], &Out[9]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToInt(&In[3], &In[19], kCoefficients32To22[2], &Out[3], &Out[8]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToInt(&In[5], &In[17], kCoefficients32To22[3], &Out[4], &Out[7]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToInt(&In[6], &In[16], kCoefficients32To22[4], &Out[5], &Out[6]);
+
+        // update pointers
+        In += 16;
+        Out += 11;
+    }
+}
+
+//   Resampling ratio: 11/16
+// input:  WebRtc_Word32 (normalized, not saturated) :: size 16 * K
+// output: WebRtc_Word16 (saturated) :: size 11 * K
+//      K: Number of blocks
+
+void WebRtcSpl_32khzTo22khzIntToShort(const WebRtc_Word32 *In,
+                                      WebRtc_Word16 *Out,
+                                      const WebRtc_Word32 K)
+{
+    /////////////////////////////////////////////////////////////
+    // Filter operation:
+    //
+    // Perform resampling (16 input samples -> 11 output samples);
+    // process in sub blocks of size 16 samples.
+    WebRtc_Word32 tmp;
+    WebRtc_Word32 m;
+
+    for (m = 0; m < K; m++)
+    {
+        // first output sample
+        tmp = In[3];
+        if (tmp > (WebRtc_Word32)0x00007FFF)
+            tmp = 0x00007FFF;
+        if (tmp < (WebRtc_Word32)0xFFFF8000)
+            tmp = 0xFFFF8000;
+        Out[0] = (WebRtc_Word16)tmp;
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToShort(&In[0], &In[22], kCoefficients32To22[0], &Out[1], &Out[10]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToShort(&In[2], &In[20], kCoefficients32To22[1], &Out[2], &Out[9]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToShort(&In[3], &In[19], kCoefficients32To22[2], &Out[3], &Out[8]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToShort(&In[5], &In[17], kCoefficients32To22[3], &Out[4], &Out[7]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_DotProdIntToShort(&In[6], &In[16], kCoefficients32To22[4], &Out[5], &Out[6]);
+
+        // update pointers
+        In += 16;
+        Out += 11;
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/resample_48khz.c b/trunk/src/common_audio/signal_processing/resample_48khz.c
new file mode 100644
index 0000000..31cbe6b
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample_48khz.c
@@ -0,0 +1,186 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains resampling functions between 48 kHz and nb/wb.
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include <string.h>
+#include "signal_processing_library.h"
+#include "resample_by_2_internal.h"
+
+////////////////////////////
+///// 48 kHz -> 16 kHz /////
+////////////////////////////
+
+// 48 -> 16 resampler
+void WebRtcSpl_Resample48khzTo16khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State48khzTo16khz* state, WebRtc_Word32* tmpmem)
+{
+    ///// 48 --> 48(LP) /////
+    // WebRtc_Word16  in[480]
+    // WebRtc_Word32 out[480]
+    /////
+    WebRtcSpl_LPBy2ShortToInt(in, 480, tmpmem + 16, state->S_48_48);
+
+    ///// 48 --> 32 /////
+    // WebRtc_Word32  in[480]
+    // WebRtc_Word32 out[320]
+    /////
+    // copy state to and from input array
+    memcpy(tmpmem + 8, state->S_48_32, 8 * sizeof(WebRtc_Word32));
+    memcpy(state->S_48_32, tmpmem + 488, 8 * sizeof(WebRtc_Word32));
+    WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 160);
+
+    ///// 32 --> 16 /////
+    // WebRtc_Word32  in[320]
+    // WebRtc_Word16 out[160]
+    /////
+    WebRtcSpl_DownBy2IntToShort(tmpmem, 320, out, state->S_32_16);
+}
+
+// initialize state of 48 -> 16 resampler
+void WebRtcSpl_ResetResample48khzTo16khz(WebRtcSpl_State48khzTo16khz* state)
+{
+    memset(state->S_48_48, 0, 16 * sizeof(WebRtc_Word32));
+    memset(state->S_48_32, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_32_16, 0, 8 * sizeof(WebRtc_Word32));
+}
+
+////////////////////////////
+///// 16 kHz -> 48 kHz /////
+////////////////////////////
+
+// 16 -> 48 resampler
+void WebRtcSpl_Resample16khzTo48khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                    WebRtcSpl_State16khzTo48khz* state, WebRtc_Word32* tmpmem)
+{
+    ///// 16 --> 32 /////
+    // WebRtc_Word16  in[160]
+    // WebRtc_Word32 out[320]
+    /////
+    WebRtcSpl_UpBy2ShortToInt(in, 160, tmpmem + 16, state->S_16_32);
+
+    ///// 32 --> 24 /////
+    // WebRtc_Word32  in[320]
+    // WebRtc_Word32 out[240]
+    // copy state to and from input array
+    /////
+    memcpy(tmpmem + 8, state->S_32_24, 8 * sizeof(WebRtc_Word32));
+    memcpy(state->S_32_24, tmpmem + 328, 8 * sizeof(WebRtc_Word32));
+    WebRtcSpl_Resample32khzTo24khz(tmpmem + 8, tmpmem, 80);
+
+    ///// 24 --> 48 /////
+    // WebRtc_Word32  in[240]
+    // WebRtc_Word16 out[480]
+    /////
+    WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48);
+}
+
+// initialize state of 16 -> 48 resampler
+void WebRtcSpl_ResetResample16khzTo48khz(WebRtcSpl_State16khzTo48khz* state)
+{
+    memset(state->S_16_32, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_32_24, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_24_48, 0, 8 * sizeof(WebRtc_Word32));
+}
+
+////////////////////////////
+///// 48 kHz ->  8 kHz /////
+////////////////////////////
+
+// 48 -> 8 resampler
+void WebRtcSpl_Resample48khzTo8khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State48khzTo8khz* state, WebRtc_Word32* tmpmem)
+{
+    ///// 48 --> 24 /////
+    // WebRtc_Word16  in[480]
+    // WebRtc_Word32 out[240]
+    /////
+    WebRtcSpl_DownBy2ShortToInt(in, 480, tmpmem + 256, state->S_48_24);
+
+    ///// 24 --> 24(LP) /////
+    // WebRtc_Word32  in[240]
+    // WebRtc_Word32 out[240]
+    /////
+    WebRtcSpl_LPBy2IntToInt(tmpmem + 256, 240, tmpmem + 16, state->S_24_24);
+
+    ///// 24 --> 16 /////
+    // WebRtc_Word32  in[240]
+    // WebRtc_Word32 out[160]
+    /////
+    // copy state to and from input array
+    memcpy(tmpmem + 8, state->S_24_16, 8 * sizeof(WebRtc_Word32));
+    memcpy(state->S_24_16, tmpmem + 248, 8 * sizeof(WebRtc_Word32));
+    WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 80);
+
+    ///// 16 --> 8 /////
+    // WebRtc_Word32  in[160]
+    // WebRtc_Word16 out[80]
+    /////
+    WebRtcSpl_DownBy2IntToShort(tmpmem, 160, out, state->S_16_8);
+}
+
+// initialize state of 48 -> 8 resampler
+void WebRtcSpl_ResetResample48khzTo8khz(WebRtcSpl_State48khzTo8khz* state)
+{
+    memset(state->S_48_24, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_24_24, 0, 16 * sizeof(WebRtc_Word32));
+    memset(state->S_24_16, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_16_8, 0, 8 * sizeof(WebRtc_Word32));
+}
+
+////////////////////////////
+/////  8 kHz -> 48 kHz /////
+////////////////////////////
+
+// 8 -> 48 resampler
+void WebRtcSpl_Resample8khzTo48khz(const WebRtc_Word16* in, WebRtc_Word16* out,
+                                   WebRtcSpl_State8khzTo48khz* state, WebRtc_Word32* tmpmem)
+{
+    ///// 8 --> 16 /////
+    // WebRtc_Word16  in[80]
+    // WebRtc_Word32 out[160]
+    /////
+    WebRtcSpl_UpBy2ShortToInt(in, 80, tmpmem + 264, state->S_8_16);
+
+    ///// 16 --> 12 /////
+    // WebRtc_Word32  in[160]
+    // WebRtc_Word32 out[120]
+    /////
+    // copy state to and from input array
+    memcpy(tmpmem + 256, state->S_16_12, 8 * sizeof(WebRtc_Word32));
+    memcpy(state->S_16_12, tmpmem + 416, 8 * sizeof(WebRtc_Word32));
+    WebRtcSpl_Resample32khzTo24khz(tmpmem + 256, tmpmem + 240, 40);
+
+    ///// 12 --> 24 /////
+    // WebRtc_Word32  in[120]
+    // WebRtc_Word16 out[240]
+    /////
+    WebRtcSpl_UpBy2IntToInt(tmpmem + 240, 120, tmpmem, state->S_12_24);
+
+    ///// 24 --> 48 /////
+    // WebRtc_Word32  in[240]
+    // WebRtc_Word16 out[480]
+    /////
+    WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48);
+}
+
+// initialize state of 8 -> 48 resampler
+void WebRtcSpl_ResetResample8khzTo48khz(WebRtcSpl_State8khzTo48khz* state)
+{
+    memset(state->S_8_16, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_16_12, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_12_24, 0, 8 * sizeof(WebRtc_Word32));
+    memset(state->S_24_48, 0, 8 * sizeof(WebRtc_Word32));
+}
diff --git a/trunk/src/common_audio/signal_processing/resample_by_2.c b/trunk/src/common_audio/signal_processing/resample_by_2.c
new file mode 100644
index 0000000..e239db7
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample_by_2.c
@@ -0,0 +1,181 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the resampling by two functions.
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+#ifdef WEBRTC_ARCH_ARM_V7A
+
+// allpass filter coefficients.
+static const WebRtc_UWord32 kResampleAllpass1[3] = {3284, 24441, 49528 << 15};
+static const WebRtc_UWord32 kResampleAllpass2[3] =
+  {12199, 37471 << 15, 60255 << 15};
+
+// Multiply two 32-bit values and accumulate to another input value.
+// Return: state + ((diff * tbl_value) >> 16)
+
+static __inline WebRtc_Word32 MUL_ACCUM_1(WebRtc_Word32 tbl_value,
+                                          WebRtc_Word32 diff,
+                                          WebRtc_Word32 state) {
+  WebRtc_Word32 result;
+  __asm__("smlawb %r0, %r1, %r2, %r3": "=r"(result): "r"(diff),
+                                       "r"(tbl_value), "r"(state));
+  return result;
+}
+
+// Multiply two 32-bit values and accumulate to another input value.
+// Return: Return: state + (((diff << 1) * tbl_value) >> 32)
+//
+// The reason to introduce this function is that, in case we can't use smlawb
+// instruction (in MUL_ACCUM_1) due to input value range, we can still use 
+// smmla to save some cycles.
+
+static __inline WebRtc_Word32 MUL_ACCUM_2(WebRtc_Word32 tbl_value,
+                                          WebRtc_Word32 diff,
+                                          WebRtc_Word32 state) {
+  WebRtc_Word32 result;
+  __asm__("smmla %r0, %r1, %r2, %r3": "=r"(result): "r"(diff << 1),
+                                      "r"(tbl_value), "r"(state));
+  return result;
+}
+
+#else
+
+// allpass filter coefficients.
+static const WebRtc_UWord16 kResampleAllpass1[3] = {3284, 24441, 49528};
+static const WebRtc_UWord16 kResampleAllpass2[3] = {12199, 37471, 60255};
+
+// Multiply a 32-bit value with a 16-bit value and accumulate to another input:
+#define MUL_ACCUM_1(a, b, c) WEBRTC_SPL_SCALEDIFF32(a, b, c)
+#define MUL_ACCUM_2(a, b, c) WEBRTC_SPL_SCALEDIFF32(a, b, c)
+
+#endif  // WEBRTC_ARCH_ARM_V7A
+
+
+// decimator
+void WebRtcSpl_DownsampleBy2(const WebRtc_Word16* in, const WebRtc_Word16 len,
+                             WebRtc_Word16* out, WebRtc_Word32* filtState) {
+  WebRtc_Word32 tmp1, tmp2, diff, in32, out32;
+  WebRtc_Word16 i;
+
+  register WebRtc_Word32 state0 = filtState[0];
+  register WebRtc_Word32 state1 = filtState[1];
+  register WebRtc_Word32 state2 = filtState[2];
+  register WebRtc_Word32 state3 = filtState[3];
+  register WebRtc_Word32 state4 = filtState[4];
+  register WebRtc_Word32 state5 = filtState[5];
+  register WebRtc_Word32 state6 = filtState[6];
+  register WebRtc_Word32 state7 = filtState[7];
+
+  for (i = (len >> 1); i > 0; i--) {
+    // lower allpass filter
+    in32 = (WebRtc_Word32)(*in++) << 10;
+    diff = in32 - state1;
+    tmp1 = MUL_ACCUM_1(kResampleAllpass2[0], diff, state0);
+    state0 = in32;
+    diff = tmp1 - state2;
+    tmp2 = MUL_ACCUM_2(kResampleAllpass2[1], diff, state1);
+    state1 = tmp1;
+    diff = tmp2 - state3;
+    state3 = MUL_ACCUM_2(kResampleAllpass2[2], diff, state2);
+    state2 = tmp2;
+
+    // upper allpass filter
+    in32 = (WebRtc_Word32)(*in++) << 10;
+    diff = in32 - state5;
+    tmp1 = MUL_ACCUM_1(kResampleAllpass1[0], diff, state4);
+    state4 = in32;
+    diff = tmp1 - state6;
+    tmp2 = MUL_ACCUM_1(kResampleAllpass1[1], diff, state5);
+    state5 = tmp1;
+    diff = tmp2 - state7;
+    state7 = MUL_ACCUM_2(kResampleAllpass1[2], diff, state6);
+    state6 = tmp2;
+
+    // add two allpass outputs, divide by two and round
+    out32 = (state3 + state7 + 1024) >> 11;
+
+    // limit amplitude to prevent wrap-around, and write to output array
+    *out++ = WebRtcSpl_SatW32ToW16(out32);
+  }
+
+  filtState[0] = state0;
+  filtState[1] = state1;
+  filtState[2] = state2;
+  filtState[3] = state3;
+  filtState[4] = state4;
+  filtState[5] = state5;
+  filtState[6] = state6;
+  filtState[7] = state7;
+}
+
+
+void WebRtcSpl_UpsampleBy2(const WebRtc_Word16* in, WebRtc_Word16 len,
+                           WebRtc_Word16* out, WebRtc_Word32* filtState) {
+  WebRtc_Word32 tmp1, tmp2, diff, in32, out32;
+  WebRtc_Word16 i;
+
+  register WebRtc_Word32 state0 = filtState[0];
+  register WebRtc_Word32 state1 = filtState[1];
+  register WebRtc_Word32 state2 = filtState[2];
+  register WebRtc_Word32 state3 = filtState[3];
+  register WebRtc_Word32 state4 = filtState[4];
+  register WebRtc_Word32 state5 = filtState[5];
+  register WebRtc_Word32 state6 = filtState[6];
+  register WebRtc_Word32 state7 = filtState[7];
+
+  for (i = len; i > 0; i--) {
+    // lower allpass filter
+    in32 = (WebRtc_Word32)(*in++) << 10;
+    diff = in32 - state1;
+    tmp1 = MUL_ACCUM_1(kResampleAllpass1[0], diff, state0);
+    state0 = in32;
+    diff = tmp1 - state2;
+    tmp2 = MUL_ACCUM_1(kResampleAllpass1[1], diff, state1);
+    state1 = tmp1;
+    diff = tmp2 - state3;
+    state3 = MUL_ACCUM_2(kResampleAllpass1[2], diff, state2);
+    state2 = tmp2;
+
+    // round; limit amplitude to prevent wrap-around; write to output array
+    out32 = (state3 + 512) >> 10;
+    *out++ = WebRtcSpl_SatW32ToW16(out32);
+
+    // upper allpass filter
+    diff = in32 - state5;
+    tmp1 = MUL_ACCUM_1(kResampleAllpass2[0], diff, state4);
+    state4 = in32;
+    diff = tmp1 - state6;
+    tmp2 = MUL_ACCUM_2(kResampleAllpass2[1], diff, state5);
+    state5 = tmp1;
+    diff = tmp2 - state7;
+    state7 = MUL_ACCUM_2(kResampleAllpass2[2], diff, state6);
+    state6 = tmp2;
+
+    // round; limit amplitude to prevent wrap-around; write to output array
+    out32 = (state7 + 512) >> 10;
+    *out++ = WebRtcSpl_SatW32ToW16(out32);
+  }
+
+  filtState[0] = state0;
+  filtState[1] = state1;
+  filtState[2] = state2;
+  filtState[3] = state3;
+  filtState[4] = state4;
+  filtState[5] = state5;
+  filtState[6] = state6;
+  filtState[7] = state7;
+}
diff --git a/trunk/src/common_audio/signal_processing/resample_by_2_internal.c b/trunk/src/common_audio/signal_processing/resample_by_2_internal.c
new file mode 100644
index 0000000..cbd2395
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample_by_2_internal.c
@@ -0,0 +1,679 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file contains some internal resampling functions.
+ *
+ */
+
+#include "resample_by_2_internal.h"
+
+// allpass filter coefficients.
+static const WebRtc_Word16 kResampleAllpass[2][3] = {
+        {821, 6110, 12382},
+        {3050, 9368, 15063}
+};
+
+//
+//   decimator
+// input:  WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) OVERWRITTEN!
+// output: WebRtc_Word16 (saturated) (of length len/2)
+// state:  filter state array; length = 8
+
+void WebRtcSpl_DownBy2IntToShort(WebRtc_Word32 *in, WebRtc_Word32 len, WebRtc_Word16 *out,
+                                 WebRtc_Word32 *state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    len >>= 1;
+
+    // lower allpass filter (operates on even input samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i << 1];
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // divide by two and store temporarily
+        in[i << 1] = (state[3] >> 1);
+    }
+
+    in++;
+
+    // upper allpass filter (operates on odd input samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i << 1];
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // divide by two and store temporarily
+        in[i << 1] = (state[7] >> 1);
+    }
+
+    in--;
+
+    // combine allpass outputs
+    for (i = 0; i < len; i += 2)
+    {
+        // divide by two, add both allpass outputs and round
+        tmp0 = (in[i << 1] + in[(i << 1) + 1]) >> 15;
+        tmp1 = (in[(i << 1) + 2] + in[(i << 1) + 3]) >> 15;
+        if (tmp0 > (WebRtc_Word32)0x00007FFF)
+            tmp0 = 0x00007FFF;
+        if (tmp0 < (WebRtc_Word32)0xFFFF8000)
+            tmp0 = 0xFFFF8000;
+        out[i] = (WebRtc_Word16)tmp0;
+        if (tmp1 > (WebRtc_Word32)0x00007FFF)
+            tmp1 = 0x00007FFF;
+        if (tmp1 < (WebRtc_Word32)0xFFFF8000)
+            tmp1 = 0xFFFF8000;
+        out[i + 1] = (WebRtc_Word16)tmp1;
+    }
+}
+
+//
+//   decimator
+// input:  WebRtc_Word16
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) (of length len/2)
+// state:  filter state array; length = 8
+
+void WebRtcSpl_DownBy2ShortToInt(const WebRtc_Word16 *in,
+                                  WebRtc_Word32 len,
+                                  WebRtc_Word32 *out,
+                                  WebRtc_Word32 *state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    len >>= 1;
+
+    // lower allpass filter (operates on even input samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // divide by two and store temporarily
+        out[i] = (state[3] >> 1);
+    }
+
+    in++;
+
+    // upper allpass filter (operates on odd input samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // divide by two and store temporarily
+        out[i] += (state[7] >> 1);
+    }
+
+    in--;
+}
+
+//
+//   interpolator
+// input:  WebRtc_Word16
+// output: WebRtc_Word32 (normalized, not saturated) (of length len*2)
+// state:  filter state array; length = 8
+void WebRtcSpl_UpBy2ShortToInt(const WebRtc_Word16 *in, WebRtc_Word32 len, WebRtc_Word32 *out,
+                               WebRtc_Word32 *state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    // upper allpass filter (generates odd output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i] << 15) + (1 << 14);
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[7] >> 15;
+    }
+
+    out++;
+
+    // lower allpass filter (generates even output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i] << 15) + (1 << 14);
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[3] >> 15;
+    }
+}
+
+//
+//   interpolator
+// input:  WebRtc_Word32 (shifted 15 positions to the left, + offset 16384)
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) (of length len*2)
+// state:  filter state array; length = 8
+void WebRtcSpl_UpBy2IntToInt(const WebRtc_Word32 *in, WebRtc_Word32 len, WebRtc_Word32 *out,
+                             WebRtc_Word32 *state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    // upper allpass filter (generates odd output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i];
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[7];
+    }
+
+    out++;
+
+    // lower allpass filter (generates even output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i];
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[3];
+    }
+}
+
+//
+//   interpolator
+// input:  WebRtc_Word32 (shifted 15 positions to the left, + offset 16384)
+// output: WebRtc_Word16 (saturated) (of length len*2)
+// state:  filter state array; length = 8
+void WebRtcSpl_UpBy2IntToShort(const WebRtc_Word32 *in, WebRtc_Word32 len, WebRtc_Word16 *out,
+                               WebRtc_Word32 *state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    // upper allpass filter (generates odd output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i];
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // scale down, saturate and store
+        tmp1 = state[7] >> 15;
+        if (tmp1 > (WebRtc_Word32)0x00007FFF)
+            tmp1 = 0x00007FFF;
+        if (tmp1 < (WebRtc_Word32)0xFFFF8000)
+            tmp1 = 0xFFFF8000;
+        out[i << 1] = (WebRtc_Word16)tmp1;
+    }
+
+    out++;
+
+    // lower allpass filter (generates even output samples)
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i];
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // scale down, saturate and store
+        tmp1 = state[3] >> 15;
+        if (tmp1 > (WebRtc_Word32)0x00007FFF)
+            tmp1 = 0x00007FFF;
+        if (tmp1 < (WebRtc_Word32)0xFFFF8000)
+            tmp1 = 0xFFFF8000;
+        out[i << 1] = (WebRtc_Word16)tmp1;
+    }
+}
+
+//   lowpass filter
+// input:  WebRtc_Word16
+// output: WebRtc_Word32 (normalized, not saturated)
+// state:  filter state array; length = 8
+void WebRtcSpl_LPBy2ShortToInt(const WebRtc_Word16* in, WebRtc_Word32 len, WebRtc_Word32* out,
+                               WebRtc_Word32* state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    len >>= 1;
+
+    // lower allpass filter: odd input -> even output samples
+    in++;
+    // initial state of polyphase delay element
+    tmp0 = state[12];
+    for (i = 0; i < len; i++)
+    {
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[3] >> 1;
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+    }
+    in--;
+
+    // upper allpass filter: even input -> even output samples
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // average the two allpass outputs, scale down and store
+        out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15;
+    }
+
+    // switch to odd output samples
+    out++;
+
+    // lower allpass filter: even input -> odd output samples
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+        diff = tmp0 - state[9];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[8] + diff * kResampleAllpass[1][0];
+        state[8] = tmp0;
+        diff = tmp1 - state[10];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[9] + diff * kResampleAllpass[1][1];
+        state[9] = tmp1;
+        diff = tmp0 - state[11];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[11] = state[10] + diff * kResampleAllpass[1][2];
+        state[10] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[11] >> 1;
+    }
+
+    // upper allpass filter: odd input -> odd output samples
+    in++;
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = ((WebRtc_Word32)in[i << 1] << 15) + (1 << 14);
+        diff = tmp0 - state[13];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[12] + diff * kResampleAllpass[0][0];
+        state[12] = tmp0;
+        diff = tmp1 - state[14];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[13] + diff * kResampleAllpass[0][1];
+        state[13] = tmp1;
+        diff = tmp0 - state[15];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[15] = state[14] + diff * kResampleAllpass[0][2];
+        state[14] = tmp0;
+
+        // average the two allpass outputs, scale down and store
+        out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15;
+    }
+}
+
+//   lowpass filter
+// input:  WebRtc_Word32 (shifted 15 positions to the left, + offset 16384)
+// output: WebRtc_Word32 (normalized, not saturated)
+// state:  filter state array; length = 8
+void WebRtcSpl_LPBy2IntToInt(const WebRtc_Word32* in, WebRtc_Word32 len, WebRtc_Word32* out,
+                             WebRtc_Word32* state)
+{
+    WebRtc_Word32 tmp0, tmp1, diff;
+    WebRtc_Word32 i;
+
+    len >>= 1;
+
+    // lower allpass filter: odd input -> even output samples
+    in++;
+    // initial state of polyphase delay element
+    tmp0 = state[12];
+    for (i = 0; i < len; i++)
+    {
+        diff = tmp0 - state[1];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[0] + diff * kResampleAllpass[1][0];
+        state[0] = tmp0;
+        diff = tmp1 - state[2];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[1] + diff * kResampleAllpass[1][1];
+        state[1] = tmp1;
+        diff = tmp0 - state[3];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[3] = state[2] + diff * kResampleAllpass[1][2];
+        state[2] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[3] >> 1;
+        tmp0 = in[i << 1];
+    }
+    in--;
+
+    // upper allpass filter: even input -> even output samples
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i << 1];
+        diff = tmp0 - state[5];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[4] + diff * kResampleAllpass[0][0];
+        state[4] = tmp0;
+        diff = tmp1 - state[6];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[5] + diff * kResampleAllpass[0][1];
+        state[5] = tmp1;
+        diff = tmp0 - state[7];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[7] = state[6] + diff * kResampleAllpass[0][2];
+        state[6] = tmp0;
+
+        // average the two allpass outputs, scale down and store
+        out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15;
+    }
+
+    // switch to odd output samples
+    out++;
+
+    // lower allpass filter: even input -> odd output samples
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i << 1];
+        diff = tmp0 - state[9];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[8] + diff * kResampleAllpass[1][0];
+        state[8] = tmp0;
+        diff = tmp1 - state[10];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[9] + diff * kResampleAllpass[1][1];
+        state[9] = tmp1;
+        diff = tmp0 - state[11];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[11] = state[10] + diff * kResampleAllpass[1][2];
+        state[10] = tmp0;
+
+        // scale down, round and store
+        out[i << 1] = state[11] >> 1;
+    }
+
+    // upper allpass filter: odd input -> odd output samples
+    in++;
+    for (i = 0; i < len; i++)
+    {
+        tmp0 = in[i << 1];
+        diff = tmp0 - state[13];
+        // scale down and round
+        diff = (diff + (1 << 13)) >> 14;
+        tmp1 = state[12] + diff * kResampleAllpass[0][0];
+        state[12] = tmp0;
+        diff = tmp1 - state[14];
+        // scale down and round
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        tmp0 = state[13] + diff * kResampleAllpass[0][1];
+        state[13] = tmp1;
+        diff = tmp0 - state[15];
+        // scale down and truncate
+        diff = diff >> 14;
+        if (diff < 0)
+            diff += 1;
+        state[15] = state[14] + diff * kResampleAllpass[0][2];
+        state[14] = tmp0;
+
+        // average the two allpass outputs, scale down and store
+        out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15;
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/resample_by_2_internal.h b/trunk/src/common_audio/signal_processing/resample_by_2_internal.h
new file mode 100644
index 0000000..b6ac9f0
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample_by_2_internal.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file contains some internal resampling functions.
+ *
+ */
+
+#ifndef WEBRTC_SPL_RESAMPLE_BY_2_INTERNAL_H_
+#define WEBRTC_SPL_RESAMPLE_BY_2_INTERNAL_H_
+
+#include "typedefs.h"
+
+/*******************************************************************
+ * resample_by_2_fast.c
+ * Functions for internal use in the other resample functions
+ ******************************************************************/
+void WebRtcSpl_DownBy2IntToShort(WebRtc_Word32 *in, WebRtc_Word32 len, WebRtc_Word16 *out,
+                                 WebRtc_Word32 *state);
+
+void WebRtcSpl_DownBy2ShortToInt(const WebRtc_Word16 *in, WebRtc_Word32 len,
+                                 WebRtc_Word32 *out, WebRtc_Word32 *state);
+
+void WebRtcSpl_UpBy2ShortToInt(const WebRtc_Word16 *in, WebRtc_Word32 len,
+                               WebRtc_Word32 *out, WebRtc_Word32 *state);
+
+void WebRtcSpl_UpBy2IntToInt(const WebRtc_Word32 *in, WebRtc_Word32 len, WebRtc_Word32 *out,
+                             WebRtc_Word32 *state);
+
+void WebRtcSpl_UpBy2IntToShort(const WebRtc_Word32 *in, WebRtc_Word32 len,
+                               WebRtc_Word16 *out, WebRtc_Word32 *state);
+
+void WebRtcSpl_LPBy2ShortToInt(const WebRtc_Word16* in, WebRtc_Word32 len,
+                               WebRtc_Word32* out, WebRtc_Word32* state);
+
+void WebRtcSpl_LPBy2IntToInt(const WebRtc_Word32* in, WebRtc_Word32 len, WebRtc_Word32* out,
+                             WebRtc_Word32* state);
+
+#endif // WEBRTC_SPL_RESAMPLE_BY_2_INTERNAL_H_
diff --git a/trunk/src/common_audio/signal_processing/resample_fractional.c b/trunk/src/common_audio/signal_processing/resample_fractional.c
new file mode 100644
index 0000000..51003d4
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/resample_fractional.c
@@ -0,0 +1,242 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the resampling functions between 48, 44, 32 and 24 kHz.
+ * The description headers can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+// interpolation coefficients
+static const WebRtc_Word16 kCoefficients48To32[2][8] = {
+        {778, -2050, 1087, 23285, 12903, -3783, 441, 222},
+        {222, 441, -3783, 12903, 23285, 1087, -2050, 778}
+};
+
+static const WebRtc_Word16 kCoefficients32To24[3][8] = {
+        {767, -2362, 2434, 24406, 10620, -3838, 721, 90},
+        {386, -381, -2646, 19062, 19062, -2646, -381, 386},
+        {90, 721, -3838, 10620, 24406, 2434, -2362, 767}
+};
+
+static const WebRtc_Word16 kCoefficients44To32[4][9] = {
+        {117, -669, 2245, -6183, 26267, 13529, -3245, 845, -138},
+        {-101, 612, -2283, 8532, 29790, -5138, 1789, -524, 91},
+        {50, -292, 1016, -3064, 32010, 3933, -1147, 315, -53},
+        {-156, 974, -3863, 18603, 21691, -6246, 2353, -712, 126}
+};
+
+//   Resampling ratio: 2/3
+// input:  WebRtc_Word32 (normalized, not saturated) :: size 3 * K
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) :: size 2 * K
+//      K: number of blocks
+
+void WebRtcSpl_Resample48khzTo32khz(const WebRtc_Word32 *In, WebRtc_Word32 *Out,
+                                    const WebRtc_Word32 K)
+{
+    /////////////////////////////////////////////////////////////
+    // Filter operation:
+    //
+    // Perform resampling (3 input samples -> 2 output samples);
+    // process in sub blocks of size 3 samples.
+    WebRtc_Word32 tmp;
+    WebRtc_Word32 m;
+
+    for (m = 0; m < K; m++)
+    {
+        tmp = 1 << 14;
+        tmp += kCoefficients48To32[0][0] * In[0];
+        tmp += kCoefficients48To32[0][1] * In[1];
+        tmp += kCoefficients48To32[0][2] * In[2];
+        tmp += kCoefficients48To32[0][3] * In[3];
+        tmp += kCoefficients48To32[0][4] * In[4];
+        tmp += kCoefficients48To32[0][5] * In[5];
+        tmp += kCoefficients48To32[0][6] * In[6];
+        tmp += kCoefficients48To32[0][7] * In[7];
+        Out[0] = tmp;
+
+        tmp = 1 << 14;
+        tmp += kCoefficients48To32[1][0] * In[1];
+        tmp += kCoefficients48To32[1][1] * In[2];
+        tmp += kCoefficients48To32[1][2] * In[3];
+        tmp += kCoefficients48To32[1][3] * In[4];
+        tmp += kCoefficients48To32[1][4] * In[5];
+        tmp += kCoefficients48To32[1][5] * In[6];
+        tmp += kCoefficients48To32[1][6] * In[7];
+        tmp += kCoefficients48To32[1][7] * In[8];
+        Out[1] = tmp;
+
+        // update pointers
+        In += 3;
+        Out += 2;
+    }
+}
+
+//   Resampling ratio: 3/4
+// input:  WebRtc_Word32 (normalized, not saturated) :: size 4 * K
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) :: size 3 * K
+//      K: number of blocks
+
+void WebRtcSpl_Resample32khzTo24khz(const WebRtc_Word32 *In, WebRtc_Word32 *Out,
+                                    const WebRtc_Word32 K)
+{
+    /////////////////////////////////////////////////////////////
+    // Filter operation:
+    //
+    // Perform resampling (4 input samples -> 3 output samples);
+    // process in sub blocks of size 4 samples.
+    WebRtc_Word32 m;
+    WebRtc_Word32 tmp;
+
+    for (m = 0; m < K; m++)
+    {
+        tmp = 1 << 14;
+        tmp += kCoefficients32To24[0][0] * In[0];
+        tmp += kCoefficients32To24[0][1] * In[1];
+        tmp += kCoefficients32To24[0][2] * In[2];
+        tmp += kCoefficients32To24[0][3] * In[3];
+        tmp += kCoefficients32To24[0][4] * In[4];
+        tmp += kCoefficients32To24[0][5] * In[5];
+        tmp += kCoefficients32To24[0][6] * In[6];
+        tmp += kCoefficients32To24[0][7] * In[7];
+        Out[0] = tmp;
+
+        tmp = 1 << 14;
+        tmp += kCoefficients32To24[1][0] * In[1];
+        tmp += kCoefficients32To24[1][1] * In[2];
+        tmp += kCoefficients32To24[1][2] * In[3];
+        tmp += kCoefficients32To24[1][3] * In[4];
+        tmp += kCoefficients32To24[1][4] * In[5];
+        tmp += kCoefficients32To24[1][5] * In[6];
+        tmp += kCoefficients32To24[1][6] * In[7];
+        tmp += kCoefficients32To24[1][7] * In[8];
+        Out[1] = tmp;
+
+        tmp = 1 << 14;
+        tmp += kCoefficients32To24[2][0] * In[2];
+        tmp += kCoefficients32To24[2][1] * In[3];
+        tmp += kCoefficients32To24[2][2] * In[4];
+        tmp += kCoefficients32To24[2][3] * In[5];
+        tmp += kCoefficients32To24[2][4] * In[6];
+        tmp += kCoefficients32To24[2][5] * In[7];
+        tmp += kCoefficients32To24[2][6] * In[8];
+        tmp += kCoefficients32To24[2][7] * In[9];
+        Out[2] = tmp;
+
+        // update pointers
+        In += 4;
+        Out += 3;
+    }
+}
+
+//
+// fractional resampling filters
+//   Fout = 11/16 * Fin
+//   Fout =  8/11 * Fin
+//
+
+// compute two inner-products and store them to output array
+static void WebRtcSpl_ResampDotProduct(const WebRtc_Word32 *in1, const WebRtc_Word32 *in2,
+                               const WebRtc_Word16 *coef_ptr, WebRtc_Word32 *out1,
+                               WebRtc_Word32 *out2)
+{
+    WebRtc_Word32 tmp1 = 16384;
+    WebRtc_Word32 tmp2 = 16384;
+    WebRtc_Word16 coef;
+
+    coef = coef_ptr[0];
+    tmp1 += coef * in1[0];
+    tmp2 += coef * in2[-0];
+
+    coef = coef_ptr[1];
+    tmp1 += coef * in1[1];
+    tmp2 += coef * in2[-1];
+
+    coef = coef_ptr[2];
+    tmp1 += coef * in1[2];
+    tmp2 += coef * in2[-2];
+
+    coef = coef_ptr[3];
+    tmp1 += coef * in1[3];
+    tmp2 += coef * in2[-3];
+
+    coef = coef_ptr[4];
+    tmp1 += coef * in1[4];
+    tmp2 += coef * in2[-4];
+
+    coef = coef_ptr[5];
+    tmp1 += coef * in1[5];
+    tmp2 += coef * in2[-5];
+
+    coef = coef_ptr[6];
+    tmp1 += coef * in1[6];
+    tmp2 += coef * in2[-6];
+
+    coef = coef_ptr[7];
+    tmp1 += coef * in1[7];
+    tmp2 += coef * in2[-7];
+
+    coef = coef_ptr[8];
+    *out1 = tmp1 + coef * in1[8];
+    *out2 = tmp2 + coef * in2[-8];
+}
+
+//   Resampling ratio: 8/11
+// input:  WebRtc_Word32 (normalized, not saturated) :: size 11 * K
+// output: WebRtc_Word32 (shifted 15 positions to the left, + offset 16384) :: size  8 * K
+//      K: number of blocks
+
+void WebRtcSpl_Resample44khzTo32khz(const WebRtc_Word32 *In, WebRtc_Word32 *Out,
+                                    const WebRtc_Word32 K)
+{
+    /////////////////////////////////////////////////////////////
+    // Filter operation:
+    //
+    // Perform resampling (11 input samples -> 8 output samples);
+    // process in sub blocks of size 11 samples.
+    WebRtc_Word32 tmp;
+    WebRtc_Word32 m;
+
+    for (m = 0; m < K; m++)
+    {
+        tmp = 1 << 14;
+
+        // first output sample
+        Out[0] = ((WebRtc_Word32)In[3] << 15) + tmp;
+
+        // sum and accumulate filter coefficients and input samples
+        tmp += kCoefficients44To32[3][0] * In[5];
+        tmp += kCoefficients44To32[3][1] * In[6];
+        tmp += kCoefficients44To32[3][2] * In[7];
+        tmp += kCoefficients44To32[3][3] * In[8];
+        tmp += kCoefficients44To32[3][4] * In[9];
+        tmp += kCoefficients44To32[3][5] * In[10];
+        tmp += kCoefficients44To32[3][6] * In[11];
+        tmp += kCoefficients44To32[3][7] * In[12];
+        tmp += kCoefficients44To32[3][8] * In[13];
+        Out[4] = tmp;
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_ResampDotProduct(&In[0], &In[17], kCoefficients44To32[0], &Out[1], &Out[7]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_ResampDotProduct(&In[2], &In[15], kCoefficients44To32[1], &Out[2], &Out[6]);
+
+        // sum and accumulate filter coefficients and input samples
+        WebRtcSpl_ResampDotProduct(&In[3], &In[14], kCoefficients44To32[2], &Out[3], &Out[5]);
+
+        // update pointers
+        In += 11;
+        Out += 8;
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/signal_processing.gypi b/trunk/src/common_audio/signal_processing/signal_processing.gypi
new file mode 100644
index 0000000..c67bf7c
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/signal_processing.gypi
@@ -0,0 +1,85 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'signal_processing',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/signal_processing_library.h',
+        'include/spl_inl.h',
+        'auto_corr_to_refl_coef.c',
+        'auto_correlation.c',
+        'complex_fft.c',
+        'complex_bit_reverse.c',
+        'copy_set_operations.c',
+        'cross_correlation.c',
+        'division_operations.c',
+        'dot_product_with_scale.c',
+        'downsample_fast.c',
+        'energy.c',
+        'filter_ar.c',
+        'filter_ar_fast_q12.c',
+        'filter_ma_fast_q12.c',
+        'get_hanning_window.c',
+        'get_scaling_square.c',
+        'ilbc_specific_functions.c',
+        'levinson_durbin.c',
+        'lpc_to_refl_coef.c',
+        'min_max_operations.c',
+        'randomization_functions.c',
+        'refl_coef_to_lpc.c',
+        'resample.c',
+        'resample_48khz.c',
+        'resample_by_2.c',
+        'resample_by_2_internal.c',
+        'resample_by_2_internal.h',
+        'resample_fractional.c',
+        'spl_sqrt.c',
+        'spl_sqrt_floor.c',
+        'spl_version.c',
+        'splitting_filter.c',
+        'sqrt_of_one_minus_x_squared.c',
+        'vector_scaling_operations.c',
+      ],
+    }, # spl
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'signal_processing_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'signal_processing',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'signal_processing_unittest.cc',
+          ],
+        }, # spl_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/common_audio/signal_processing/signal_processing_unittest.cc b/trunk/src/common_audio/signal_processing/signal_processing_unittest.cc
new file mode 100644
index 0000000..f6d1123
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/signal_processing_unittest.cc
@@ -0,0 +1,454 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "signal_processing_library.h"
+#include "gtest/gtest.h"
+
+class SplTest : public testing::Test {
+ protected:
+  virtual ~SplTest() {
+  }
+  void SetUp() {
+  }
+  void TearDown() {
+  }
+};
+
+TEST_F(SplTest, MacroTest) {
+    // Macros with inputs.
+    int A = 10;
+    int B = 21;
+    int a = -3;
+    int b = WEBRTC_SPL_WORD32_MAX;
+    int nr = 2;
+    int d_ptr2 = 0;
+
+    EXPECT_EQ(10, WEBRTC_SPL_MIN(A, B));
+    EXPECT_EQ(21, WEBRTC_SPL_MAX(A, B));
+
+    EXPECT_EQ(3, WEBRTC_SPL_ABS_W16(a));
+    EXPECT_EQ(3, WEBRTC_SPL_ABS_W32(a));
+    EXPECT_EQ(0, WEBRTC_SPL_GET_BYTE(&B, nr));
+    WEBRTC_SPL_SET_BYTE(&d_ptr2, 1, nr);
+    EXPECT_EQ(65536, d_ptr2);
+
+    EXPECT_EQ(-63, WEBRTC_SPL_MUL(a, B));
+    EXPECT_EQ(-2147483645, WEBRTC_SPL_MUL(a, b));
+    EXPECT_EQ(2147483651u, WEBRTC_SPL_UMUL(a, b));
+    b = WEBRTC_SPL_WORD16_MAX >> 1;
+    EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_RSFT16(a, b));
+    EXPECT_EQ(1073627139u, WEBRTC_SPL_UMUL_16_16(a, b));
+    EXPECT_EQ(16382u, WEBRTC_SPL_UMUL_16_16_RSFT16(a, b));
+    EXPECT_EQ(4294918147u, WEBRTC_SPL_UMUL_32_16(a, b));
+    EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_32_16_RSFT16(a, b));
+    EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_U16(a, b));
+
+    a = b;
+    b = -3;
+    EXPECT_EQ(-5461, WEBRTC_SPL_DIV(a, b));
+    EXPECT_EQ(0u, WEBRTC_SPL_UDIV(a, b));
+
+    EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_32_RSFT16(a, b));
+    EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_32_RSFT15(a, b));
+    EXPECT_EQ(-3, WEBRTC_SPL_MUL_16_32_RSFT14(a, b));
+    EXPECT_EQ(-24, WEBRTC_SPL_MUL_16_32_RSFT11(a, b));
+
+    int a32 = WEBRTC_SPL_WORD32_MAX;
+    int a32a = (WEBRTC_SPL_WORD32_MAX >> 16);
+    int a32b = (WEBRTC_SPL_WORD32_MAX & 0x0000ffff);
+    EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32(a32a, a32b, A));
+    EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32BI(a32, A));
+
+    EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_16(a, b));
+    EXPECT_EQ(-12288, WEBRTC_SPL_MUL_16_16_RSFT(a, b, 2));
+
+    EXPECT_EQ(-12287, WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(a, b, 2));
+    EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(a, b));
+
+    EXPECT_EQ(16380, WEBRTC_SPL_ADD_SAT_W32(a, b));
+    EXPECT_EQ(21, WEBRTC_SPL_SAT(a, A, B));
+    EXPECT_EQ(21, WEBRTC_SPL_SAT(a, B, A));
+    EXPECT_EQ(-49149, WEBRTC_SPL_MUL_32_16(a, b));
+
+    EXPECT_EQ(16386, WEBRTC_SPL_SUB_SAT_W32(a, b));
+    EXPECT_EQ(16380, WEBRTC_SPL_ADD_SAT_W16(a, b));
+    EXPECT_EQ(16386, WEBRTC_SPL_SUB_SAT_W16(a, b));
+
+    EXPECT_TRUE(WEBRTC_SPL_IS_NEG(b));
+
+    // Shifting with negative numbers allowed
+    int shift_amount = 1;  // Workaround compiler warning using variable here.
+    // Positive means left shift
+    EXPECT_EQ(32766, WEBRTC_SPL_SHIFT_W16(a, shift_amount));
+    EXPECT_EQ(32766, WEBRTC_SPL_SHIFT_W32(a, shift_amount));
+
+    // Shifting with negative numbers not allowed
+    // We cannot do casting here due to signed/unsigned problem
+    EXPECT_EQ(8191, WEBRTC_SPL_RSHIFT_W16(a, 1));
+    EXPECT_EQ(32766, WEBRTC_SPL_LSHIFT_W16(a, 1));
+    EXPECT_EQ(8191, WEBRTC_SPL_RSHIFT_W32(a, 1));
+    EXPECT_EQ(32766, WEBRTC_SPL_LSHIFT_W32(a, 1));
+
+    EXPECT_EQ(8191, WEBRTC_SPL_RSHIFT_U16(a, 1));
+    EXPECT_EQ(32766, WEBRTC_SPL_LSHIFT_U16(a, 1));
+    EXPECT_EQ(8191u, WEBRTC_SPL_RSHIFT_U32(a, 1));
+    EXPECT_EQ(32766u, WEBRTC_SPL_LSHIFT_U32(a, 1));
+
+    EXPECT_EQ(1470, WEBRTC_SPL_RAND(A));
+}
+
+TEST_F(SplTest, InlineTest) {
+    WebRtc_Word16 a = 121;
+    WebRtc_Word16 b = -17;
+    WebRtc_Word32 A = 111121;
+    WebRtc_Word32 B = -1711;
+    char bVersion[8];
+
+    EXPECT_EQ(104, WebRtcSpl_AddSatW16(a, b));
+    EXPECT_EQ(138, WebRtcSpl_SubSatW16(a, b));
+
+    EXPECT_EQ(109410, WebRtcSpl_AddSatW32(A, B));
+    EXPECT_EQ(112832, WebRtcSpl_SubSatW32(A, B));
+
+    EXPECT_EQ(17, WebRtcSpl_GetSizeInBits(A));
+    EXPECT_EQ(14, WebRtcSpl_NormW32(A));
+    EXPECT_EQ(4, WebRtcSpl_NormW16(B));
+    EXPECT_EQ(15, WebRtcSpl_NormU32(A));
+
+    EXPECT_EQ(0, WebRtcSpl_get_version(bVersion, 8));
+}
+
+TEST_F(SplTest, MathOperationsTest) {
+    int A = 117;
+    WebRtc_Word32 num = 117;
+    WebRtc_Word32 den = -5;
+    WebRtc_UWord16 denU = 5;
+    EXPECT_EQ(10, WebRtcSpl_Sqrt(A));
+    EXPECT_EQ(10, WebRtcSpl_SqrtFloor(A));
+
+
+    EXPECT_EQ(-91772805, WebRtcSpl_DivResultInQ31(den, num));
+    EXPECT_EQ(-23, WebRtcSpl_DivW32W16ResW16(num, (WebRtc_Word16)den));
+    EXPECT_EQ(-23, WebRtcSpl_DivW32W16(num, (WebRtc_Word16)den));
+    EXPECT_EQ(23u, WebRtcSpl_DivU32U16(num, denU));
+    EXPECT_EQ(0, WebRtcSpl_DivW32HiLow(128, 0, 256));
+}
+
+TEST_F(SplTest, BasicArrayOperationsTest) {
+    const int kVectorSize = 4;
+    int B[] = {4, 12, 133, 1100};
+    WebRtc_UWord8 b8[kVectorSize];
+    WebRtc_Word16 b16[kVectorSize];
+    WebRtc_Word32 b32[kVectorSize];
+
+    WebRtc_UWord8 bTmp8[kVectorSize];
+    WebRtc_Word16 bTmp16[kVectorSize];
+    WebRtc_Word32 bTmp32[kVectorSize];
+
+    WebRtcSpl_MemSetW16(b16, 3, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(3, b16[kk]);
+    }
+    EXPECT_EQ(kVectorSize, WebRtcSpl_ZerosArrayW16(b16, kVectorSize));
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(0, b16[kk]);
+    }
+    EXPECT_EQ(kVectorSize, WebRtcSpl_OnesArrayW16(b16, kVectorSize));
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(1, b16[kk]);
+    }
+    WebRtcSpl_MemSetW32(b32, 3, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(3, b32[kk]);
+    }
+    EXPECT_EQ(kVectorSize, WebRtcSpl_ZerosArrayW32(b32, kVectorSize));
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(0, b32[kk]);
+    }
+    EXPECT_EQ(kVectorSize, WebRtcSpl_OnesArrayW32(b32, kVectorSize));
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(1, b32[kk]);
+    }
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        bTmp8[kk] = (WebRtc_Word8)kk;
+        bTmp16[kk] = (WebRtc_Word16)kk;
+        bTmp32[kk] = (WebRtc_Word32)kk;
+    }
+    WEBRTC_SPL_MEMCPY_W8(b8, bTmp8, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(b8[kk], bTmp8[kk]);
+    }
+    WEBRTC_SPL_MEMCPY_W16(b16, bTmp16, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(b16[kk], bTmp16[kk]);
+    }
+//    WEBRTC_SPL_MEMCPY_W32(b32, bTmp32, kVectorSize);
+//    for (int kk = 0; kk < kVectorSize; ++kk) {
+//        EXPECT_EQ(b32[kk], bTmp32[kk]);
+//    }
+    EXPECT_EQ(2, WebRtcSpl_CopyFromEndW16(b16, kVectorSize, 2, bTmp16));
+    for (int kk = 0; kk < 2; ++kk) {
+        EXPECT_EQ(kk+2, bTmp16[kk]);
+    }
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        b32[kk] = B[kk];
+        b16[kk] = (WebRtc_Word16)B[kk];
+    }
+    WebRtcSpl_VectorBitShiftW32ToW16(bTmp16, kVectorSize, b32, 1);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((B[kk]>>1), bTmp16[kk]);
+    }
+    WebRtcSpl_VectorBitShiftW16(bTmp16, kVectorSize, b16, 1);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((B[kk]>>1), bTmp16[kk]);
+    }
+    WebRtcSpl_VectorBitShiftW32(bTmp32, kVectorSize, b32, 1);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((B[kk]>>1), bTmp32[kk]);
+    }
+
+    WebRtcSpl_MemCpyReversedOrder(&bTmp16[3], b16, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(b16[3-kk], bTmp16[kk]);
+    }
+}
+
+TEST_F(SplTest, MinMaxOperationsTest) {
+    const int kVectorSize = 4;
+    int B[] = {4, 12, 133, -1100};
+    WebRtc_Word16 b16[kVectorSize];
+    WebRtc_Word32 b32[kVectorSize];
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        b16[kk] = B[kk];
+        b32[kk] = B[kk];
+    }
+
+    EXPECT_EQ(1100, WebRtcSpl_MaxAbsValueW16(b16, kVectorSize));
+    EXPECT_EQ(1100, WebRtcSpl_MaxAbsValueW32(b32, kVectorSize));
+    EXPECT_EQ(133, WebRtcSpl_MaxValueW16(b16, kVectorSize));
+    EXPECT_EQ(133, WebRtcSpl_MaxValueW32(b32, kVectorSize));
+    EXPECT_EQ(3, WebRtcSpl_MaxAbsIndexW16(b16, kVectorSize));
+    EXPECT_EQ(2, WebRtcSpl_MaxIndexW16(b16, kVectorSize));
+    EXPECT_EQ(2, WebRtcSpl_MaxIndexW32(b32, kVectorSize));
+
+    EXPECT_EQ(-1100, WebRtcSpl_MinValueW16(b16, kVectorSize));
+    EXPECT_EQ(-1100, WebRtcSpl_MinValueW32(b32, kVectorSize));
+    EXPECT_EQ(3, WebRtcSpl_MinIndexW16(b16, kVectorSize));
+    EXPECT_EQ(3, WebRtcSpl_MinIndexW32(b32, kVectorSize));
+
+    EXPECT_EQ(0, WebRtcSpl_GetScalingSquare(b16, kVectorSize, 1));
+}
+
+TEST_F(SplTest, VectorOperationsTest) {
+    const int kVectorSize = 4;
+    int B[] = {4, 12, 133, 1100};
+    WebRtc_Word16 a16[kVectorSize];
+    WebRtc_Word16 b16[kVectorSize];
+    WebRtc_Word32 b32[kVectorSize];
+    WebRtc_Word16 bTmp16[kVectorSize];
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        a16[kk] = B[kk];
+        b16[kk] = B[kk];
+    }
+
+    WebRtcSpl_AffineTransformVector(bTmp16, b16, 3, 7, 2, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((B[kk]*3+7)>>2, bTmp16[kk]);
+    }
+    WebRtcSpl_ScaleAndAddVectorsWithRound(b16, 3, b16, 2, 2, bTmp16, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((B[kk]*3+B[kk]*2+2)>>2, bTmp16[kk]);
+    }
+
+    WebRtcSpl_AddAffineVectorToVector(bTmp16, b16, 3, 7, 2, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(((B[kk]*3+B[kk]*2+2)>>2)+((b16[kk]*3+7)>>2), bTmp16[kk]);
+    }
+
+    WebRtcSpl_CrossCorrelation(b32, b16, bTmp16, kVectorSize, 2, 2, 0);
+    for (int kk = 0; kk < 2; ++kk) {
+        EXPECT_EQ(614236, b32[kk]);
+    }
+//    EXPECT_EQ(, WebRtcSpl_DotProduct(b16, bTmp16, 4));
+    EXPECT_EQ(306962, WebRtcSpl_DotProductWithScale(b16, b16, kVectorSize, 2));
+
+    WebRtcSpl_ScaleVector(b16, bTmp16, 13, kVectorSize, 2);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((b16[kk]*13)>>2, bTmp16[kk]);
+    }
+    WebRtcSpl_ScaleVectorWithSat(b16, bTmp16, 13, kVectorSize, 2);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((b16[kk]*13)>>2, bTmp16[kk]);
+    }
+    WebRtcSpl_ScaleAndAddVectors(a16, 13, 2, b16, 7, 2, bTmp16, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(((a16[kk]*13)>>2)+((b16[kk]*7)>>2), bTmp16[kk]);
+    }
+
+    WebRtcSpl_AddVectorsAndShift(bTmp16, a16, b16, kVectorSize, 2);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(B[kk] >> 1, bTmp16[kk]);
+    }
+    WebRtcSpl_ReverseOrderMultArrayElements(bTmp16, a16, &b16[3], kVectorSize, 2);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((a16[kk]*b16[3-kk])>>2, bTmp16[kk]);
+    }
+    WebRtcSpl_ElementwiseVectorMult(bTmp16, a16, b16, kVectorSize, 6);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ((a16[kk]*b16[kk])>>6, bTmp16[kk]);
+    }
+
+    WebRtcSpl_SqrtOfOneMinusXSquared(b16, kVectorSize, bTmp16);
+    for (int kk = 0; kk < kVectorSize - 1; ++kk) {
+        EXPECT_EQ(32767, bTmp16[kk]);
+    }
+    EXPECT_EQ(32749, bTmp16[kVectorSize - 1]);
+}
+
+TEST_F(SplTest, EstimatorsTest) {
+    const int kVectorSize = 4;
+    int B[] = {4, 12, 133, 1100};
+    WebRtc_Word16 b16[kVectorSize];
+    WebRtc_Word32 b32[kVectorSize];
+    WebRtc_Word16 bTmp16[kVectorSize];
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        b16[kk] = B[kk];
+        b32[kk] = B[kk];
+    }
+
+    EXPECT_EQ(0, WebRtcSpl_LevinsonDurbin(b32, b16, bTmp16, 2));
+}
+
+TEST_F(SplTest, FilterTest) {
+    const int kVectorSize = 4;
+    const int kFilterOrder = 3;
+    WebRtc_Word16 A[] = {1, 2, 33, 100};
+    WebRtc_Word16 A5[] = {1, 2, 33, 100, -5};
+    WebRtc_Word16 B[] = {4, 12, 133, 110};
+    WebRtc_Word16 data_in[kVectorSize];
+    WebRtc_Word16 data_out[kVectorSize];
+    WebRtc_Word16 bTmp16Low[kVectorSize];
+    WebRtc_Word16 bState[kVectorSize];
+    WebRtc_Word16 bStateLow[kVectorSize];
+
+    WebRtcSpl_ZerosArrayW16(bState, kVectorSize);
+    WebRtcSpl_ZerosArrayW16(bStateLow, kVectorSize);
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        data_in[kk] = A[kk];
+        data_out[kk] = 0;
+    }
+
+    // MA filters.
+    // Note that the input data has |kFilterOrder| states before the actual
+    // data (one sample).
+    WebRtcSpl_FilterMAFastQ12(&data_in[kFilterOrder], data_out, B,
+                              kFilterOrder + 1, 1);
+    EXPECT_EQ(0, data_out[0]);
+    // AR filters.
+    // Note that the output data has |kFilterOrder| states before the actual
+    // data (one sample).
+    WebRtcSpl_FilterARFastQ12(data_in, &data_out[kFilterOrder], A,
+                              kFilterOrder + 1, 1);
+    EXPECT_EQ(0, data_out[kFilterOrder]);
+
+    EXPECT_EQ(kVectorSize, WebRtcSpl_FilterAR(A5,
+                                              5,
+                                              data_in,
+                                              kVectorSize,
+                                              bState,
+                                              kVectorSize,
+                                              bStateLow,
+                                              kVectorSize,
+                                              data_out,
+                                              bTmp16Low,
+                                              kVectorSize));
+}
+
+TEST_F(SplTest, RandTest) {
+    const int kVectorSize = 4;
+    WebRtc_Word16 BU[] = {3653, 12446, 8525, 30691};
+    WebRtc_Word16 b16[kVectorSize];
+    WebRtc_UWord32 bSeed = 100000;
+
+    EXPECT_EQ(464449057u, WebRtcSpl_IncreaseSeed(&bSeed));
+    EXPECT_EQ(31565, WebRtcSpl_RandU(&bSeed));
+    EXPECT_EQ(-9786, WebRtcSpl_RandN(&bSeed));
+    EXPECT_EQ(kVectorSize, WebRtcSpl_RandUArray(b16, kVectorSize, &bSeed));
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(BU[kk], b16[kk]);
+    }
+}
+
+TEST_F(SplTest, SignalProcessingTest) {
+    const int kVectorSize = 4;
+    int A[] = {1, 2, 33, 100};
+    const WebRtc_Word16 kHanning[4] = { 2399, 8192, 13985, 16384 };
+    WebRtc_Word16 b16[kVectorSize];
+
+    WebRtc_Word16 bTmp16[kVectorSize];
+    WebRtc_Word32 bTmp32[kVectorSize];
+
+    int bScale = 0;
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        b16[kk] = A[kk];
+    }
+
+    EXPECT_EQ(2, WebRtcSpl_AutoCorrelation(b16, kVectorSize, 1, bTmp32, &bScale));
+    // TODO(bjornv): Activate the Reflection Coefficient tests when refactoring.
+//    WebRtcSpl_ReflCoefToLpc(b16, kVectorSize, bTmp16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
+////    }
+//    WebRtcSpl_LpcToReflCoef(bTmp16, kVectorSize, b16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(a16[kk], b16[kk]);
+////    }
+//    WebRtcSpl_AutoCorrToReflCoef(b32, kVectorSize, bTmp16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
+////    }
+
+    WebRtcSpl_GetHanningWindow(bTmp16, kVectorSize);
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(kHanning[kk], bTmp16[kk]);
+    }
+
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        b16[kk] = A[kk];
+    }
+    EXPECT_EQ(11094 , WebRtcSpl_Energy(b16, kVectorSize, &bScale));
+    EXPECT_EQ(0, bScale);
+}
+
+TEST_F(SplTest, FFTTest) {
+    WebRtc_Word16 B[] = {1, 2, 33, 100,
+            2, 3, 34, 101,
+            3, 4, 35, 102,
+            4, 5, 36, 103};
+
+    EXPECT_EQ(0, WebRtcSpl_ComplexFFT(B, 3, 1));
+//    for (int kk = 0; kk < 16; ++kk) {
+//        EXPECT_EQ(A[kk], B[kk]);
+//    }
+    EXPECT_EQ(0, WebRtcSpl_ComplexIFFT(B, 3, 1));
+//    for (int kk = 0; kk < 16; ++kk) {
+//        EXPECT_EQ(A[kk], B[kk]);
+//    }
+    WebRtcSpl_ComplexBitReverse(B, 3);
+    for (int kk = 0; kk < 16; ++kk) {
+        //EXPECT_EQ(A[kk], B[kk]);
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/spl_sqrt.c b/trunk/src/common_audio/signal_processing/spl_sqrt.c
new file mode 100644
index 0000000..cfe2cd3
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/spl_sqrt.c
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_Sqrt().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+WebRtc_Word32 WebRtcSpl_SqrtLocal(WebRtc_Word32 in);
+
+WebRtc_Word32 WebRtcSpl_SqrtLocal(WebRtc_Word32 in)
+{
+
+    WebRtc_Word16 x_half, t16;
+    WebRtc_Word32 A, B, x2;
+
+    /* The following block performs:
+     y=in/2
+     x=y-2^30
+     x_half=x/2^31
+     t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4)
+         + 0.875*((x_half)^5)
+     */
+
+    B = in;
+
+    B = WEBRTC_SPL_RSHIFT_W32(B, 1); // B = in/2
+    B = B - ((WebRtc_Word32)0x40000000); // B = in/2 - 1/2
+    x_half = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(B, 16);// x_half = x/2 = (in-1)/2
+    B = B + ((WebRtc_Word32)0x40000000); // B = 1 + x/2
+    B = B + ((WebRtc_Word32)0x40000000); // Add 0.5 twice (since 1.0 does not exist in Q31)
+
+    x2 = ((WebRtc_Word32)x_half) * ((WebRtc_Word32)x_half) * 2; // A = (x/2)^2
+    A = -x2; // A = -(x/2)^2
+    B = B + (A >> 1); // B = 1 + x/2 - 0.5*(x/2)^2
+
+    A = WEBRTC_SPL_RSHIFT_W32(A, 16);
+    A = A * A * 2; // A = (x/2)^4
+    t16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(A, 16);
+    B = B + WEBRTC_SPL_MUL_16_16(-20480, t16) * 2; // B = B - 0.625*A
+    // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4
+
+    t16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(A, 16);
+    A = WEBRTC_SPL_MUL_16_16(x_half, t16) * 2; // A = (x/2)^5
+    t16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(A, 16);
+    B = B + WEBRTC_SPL_MUL_16_16(28672, t16) * 2; // B = B + 0.875*A
+    // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4 + 0.875*(x/2)^5
+
+    t16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(x2, 16);
+    A = WEBRTC_SPL_MUL_16_16(x_half, t16) * 2; // A = x/2^3
+
+    B = B + (A >> 1); // B = B + 0.5*A
+    // After this, B = 1 + x/2 - 0.5*(x/2)^2 + 0.5*(x/2)^3 - 0.625*(x/2)^4 + 0.875*(x/2)^5
+
+    B = B + ((WebRtc_Word32)32768); // Round off bit
+
+    return B;
+}
+
+WebRtc_Word32 WebRtcSpl_Sqrt(WebRtc_Word32 value)
+{
+    /*
+     Algorithm:
+
+     Six term Taylor Series is used here to compute the square root of a number
+     y^0.5 = (1+x)^0.5 where x = y-1
+     = 1+(x/2)-0.5*((x/2)^2+0.5*((x/2)^3-0.625*((x/2)^4+0.875*((x/2)^5)
+     0.5 <= x < 1
+
+     Example of how the algorithm works, with ut=sqrt(in), and
+     with in=73632 and ut=271 (even shift value case):
+
+     in=73632
+     y= in/131072
+     x=y-1
+     t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + 0.875*((x/2)^5)
+     ut=t*(1/sqrt(2))*512
+
+     or:
+
+     in=73632
+     in2=73632*2^14
+     y= in2/2^31
+     x=y-1
+     t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + 0.875*((x/2)^5)
+     ut=t*(1/sqrt(2))
+     ut2=ut*2^9
+
+     which gives:
+
+     in  = 73632
+     in2 = 1206386688
+     y   = 0.56176757812500
+     x   = -0.43823242187500
+     t   = 0.74973506527313
+     ut  = 0.53014274874797
+     ut2 = 2.714330873589594e+002
+
+     or:
+
+     in=73632
+     in2=73632*2^14
+     y=in2/2
+     x=y-2^30
+     x_half=x/2^31
+     t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4)
+         + 0.875*((x_half)^5)
+     ut=t*(1/sqrt(2))
+     ut2=ut*2^9
+
+     which gives:
+
+     in  = 73632
+     in2 = 1206386688
+     y   = 603193344
+     x   = -470548480
+     x_half =  -0.21911621093750
+     t   = 0.74973506527313
+     ut  = 0.53014274874797
+     ut2 = 2.714330873589594e+002
+
+     */
+
+    WebRtc_Word16 x_norm, nshift, t16, sh;
+    WebRtc_Word32 A;
+
+    WebRtc_Word16 k_sqrt_2 = 23170; // 1/sqrt2 (==5a82)
+
+    A = value;
+
+    if (A == 0)
+        return (WebRtc_Word32)0; // sqrt(0) = 0
+
+    sh = WebRtcSpl_NormW32(A); // # shifts to normalize A
+    A = WEBRTC_SPL_LSHIFT_W32(A, sh); // Normalize A
+    if (A < (WEBRTC_SPL_WORD32_MAX - 32767))
+    {
+        A = A + ((WebRtc_Word32)32768); // Round off bit
+    } else
+    {
+        A = WEBRTC_SPL_WORD32_MAX;
+    }
+
+    x_norm = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(A, 16); // x_norm = AH
+
+    nshift = WEBRTC_SPL_RSHIFT_W16(sh, 1); // nshift = sh>>1
+    nshift = -nshift; // Negate the power for later de-normalization
+
+    A = (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x_norm, 16);
+    A = WEBRTC_SPL_ABS_W32(A); // A = abs(x_norm<<16)
+    A = WebRtcSpl_SqrtLocal(A); // A = sqrt(A)
+
+    if ((-2 * nshift) == sh)
+    { // Even shift value case
+
+        t16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(A, 16); // t16 = AH
+
+        A = WEBRTC_SPL_MUL_16_16(k_sqrt_2, t16) * 2; // A = 1/sqrt(2)*t16
+        A = A + ((WebRtc_Word32)32768); // Round off
+        A = A & ((WebRtc_Word32)0x7fff0000); // Round off
+
+        A = WEBRTC_SPL_RSHIFT_W32(A, 15); // A = A>>16
+
+    } else
+    {
+        A = WEBRTC_SPL_RSHIFT_W32(A, 16); // A = A>>16
+    }
+
+    A = A & ((WebRtc_Word32)0x0000ffff);
+    A = (WebRtc_Word32)WEBRTC_SPL_SHIFT_W32(A, nshift); // De-normalize the result
+
+    return A;
+}
diff --git a/trunk/src/common_audio/signal_processing/spl_sqrt_floor.c b/trunk/src/common_audio/signal_processing/spl_sqrt_floor.c
new file mode 100644
index 0000000..f0e8ae2
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/spl_sqrt_floor.c
@@ -0,0 +1,54 @@
+/*
+ * Written by Wilco Dijkstra, 1996. Refer to file LICENSE under
+ * trunk/third_party_mods/sqrt_floor.
+ *
+ * Minor modifications in code style for WebRTC, 2012.
+ */
+
+#include "signal_processing_library.h"
+
+/*
+ * Algorithm:
+ * Successive approximation of the equation (root + delta) ^ 2 = N
+ * until delta < 1. If delta < 1 we have the integer part of SQRT (N).
+ * Use delta = 2^i for i = 15 .. 0.
+ *
+ * Output precision is 16 bits. Note for large input values (close to
+ * 0x7FFFFFFF), bit 15 (the highest bit of the low 16-bit half word)
+ * contains the MSB information (a non-sign value). Do with caution
+ * if you need to cast the output to int16_t type.
+ *
+ * If the input value is negative, it returns 0.
+ */
+
+#define WEBRTC_SPL_SQRT_ITER(N)                 \
+  try1 = root + (1 << (N));                     \
+  if (value >= try1 << (N))                     \
+  {                                             \
+    value -= try1 << (N);                       \
+    root |= 2 << (N);                           \
+  }
+
+int32_t WebRtcSpl_SqrtFloor(int32_t value)
+{
+  int32_t root = 0, try1;
+
+  WEBRTC_SPL_SQRT_ITER (15);
+  WEBRTC_SPL_SQRT_ITER (14);
+  WEBRTC_SPL_SQRT_ITER (13);
+  WEBRTC_SPL_SQRT_ITER (12);
+  WEBRTC_SPL_SQRT_ITER (11);
+  WEBRTC_SPL_SQRT_ITER (10);
+  WEBRTC_SPL_SQRT_ITER ( 9);
+  WEBRTC_SPL_SQRT_ITER ( 8);
+  WEBRTC_SPL_SQRT_ITER ( 7);
+  WEBRTC_SPL_SQRT_ITER ( 6);
+  WEBRTC_SPL_SQRT_ITER ( 5);
+  WEBRTC_SPL_SQRT_ITER ( 4);
+  WEBRTC_SPL_SQRT_ITER ( 3);
+  WEBRTC_SPL_SQRT_ITER ( 2);
+  WEBRTC_SPL_SQRT_ITER ( 1);
+  WEBRTC_SPL_SQRT_ITER ( 0);
+
+  return root >> 1;
+}
diff --git a/trunk/src/common_audio/signal_processing/spl_sqrt_floor.s b/trunk/src/common_audio/signal_processing/spl_sqrt_floor.s
new file mode 100644
index 0000000..cfd9ed0
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/spl_sqrt_floor.s
@@ -0,0 +1,88 @@
+@ Written by Wilco Dijkstra, 1996. Refer to file LICENSE under
+@ trunk/third_party_mods/sqrt_floor.
+@
+@ Minor modifications in code style for WebRTC, 2012.
+@ Output is bit-exact with the reference C code in spl_sqrt_floor.c.
+
+@ Input :             r0 32 bit unsigned integer
+@ Output:             r0 = INT (SQRT (r0)), precision is 16 bits
+@ Registers touched:  r1, r2
+
+.global WebRtcSpl_SqrtFloor
+
+.align  2
+WebRtcSpl_SqrtFloor:
+.fnstart
+  mov    r1, #3 << 30
+  mov    r2, #1 << 30
+
+  @ unroll for i = 0 .. 15
+
+  cmp    r0, r2, ror #2 * 0
+  subhs  r0, r0, r2, ror #2 * 0
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 1
+  subhs  r0, r0, r2, ror #2 * 1
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 2
+  subhs  r0, r0, r2, ror #2 * 2
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 3
+  subhs  r0, r0, r2, ror #2 * 3
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 4
+  subhs  r0, r0, r2, ror #2 * 4
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 5
+  subhs  r0, r0, r2, ror #2 * 5
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 6
+  subhs  r0, r0, r2, ror #2 * 6
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 7
+  subhs  r0, r0, r2, ror #2 * 7
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 8
+  subhs  r0, r0, r2, ror #2 * 8
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 9
+  subhs  r0, r0, r2, ror #2 * 9
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 10
+  subhs  r0, r0, r2, ror #2 * 10
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 11
+  subhs  r0, r0, r2, ror #2 * 11
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 12
+  subhs  r0, r0, r2, ror #2 * 12
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 13
+  subhs  r0, r0, r2, ror #2 * 13
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 14
+  subhs  r0, r0, r2, ror #2 * 14
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 15
+  subhs  r0, r0, r2, ror #2 * 15
+  adc    r2, r1, r2, lsl #1
+
+  bic    r0, r2, #3 << 30  @ for rounding add: cmp r0, r2  adc r2, #1
+  bx lr
+
+.fnend
diff --git a/trunk/src/common_audio/signal_processing/spl_version.c b/trunk/src/common_audio/signal_processing/spl_version.c
new file mode 100644
index 0000000..936925e
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/spl_version.c
@@ -0,0 +1,25 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_get_version().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include <string.h>
+#include "signal_processing_library.h"
+
+WebRtc_Word16 WebRtcSpl_get_version(char* version, WebRtc_Word16 length_in_bytes)
+{
+    strncpy(version, "1.2.0", length_in_bytes);
+    return 0;
+}
diff --git a/trunk/src/common_audio/signal_processing/splitting_filter.c b/trunk/src/common_audio/signal_processing/splitting_filter.c
new file mode 100644
index 0000000..f1acf67
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/splitting_filter.c
@@ -0,0 +1,198 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the splitting filter functions.
+ *
+ */
+
+#include "signal_processing_library.h"
+
+// Number of samples in a low/high-band frame.
+enum
+{
+    kBandFrameLength = 160
+};
+
+// QMF filter coefficients in Q16.
+static const WebRtc_UWord16 WebRtcSpl_kAllPassFilter1[3] = {6418, 36982, 57261};
+static const WebRtc_UWord16 WebRtcSpl_kAllPassFilter2[3] = {21333, 49062, 63010};
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcSpl_AllPassQMF(...)
+//
+// Allpass filter used by the analysis and synthesis parts of the QMF filter.
+//
+// Input:
+//    - in_data             : Input data sequence (Q10)
+//    - data_length         : Length of data sequence (>2)
+//    - filter_coefficients : Filter coefficients (length 3, Q16)
+//
+// Input & Output:
+//    - filter_state        : Filter state (length 6, Q10).
+//
+// Output:
+//    - out_data            : Output data sequence (Q10), length equal to
+//                            |data_length|
+//
+
+void WebRtcSpl_AllPassQMF(WebRtc_Word32* in_data, const WebRtc_Word16 data_length,
+                          WebRtc_Word32* out_data, const WebRtc_UWord16* filter_coefficients,
+                          WebRtc_Word32* filter_state)
+{
+    // The procedure is to filter the input with three first order all pass filters
+    // (cascade operations).
+    //
+    //         a_3 + q^-1    a_2 + q^-1    a_1 + q^-1
+    // y[n] =  -----------   -----------   -----------   x[n]
+    //         1 + a_3q^-1   1 + a_2q^-1   1 + a_1q^-1
+    //
+    // The input vector |filter_coefficients| includes these three filter coefficients.
+    // The filter state contains the in_data state, in_data[-1], followed by
+    // the out_data state, out_data[-1]. This is repeated for each cascade.
+    // The first cascade filter will filter the |in_data| and store the output in
+    // |out_data|. The second will the take the |out_data| as input and make an
+    // intermediate storage in |in_data|, to save memory. The third, and final, cascade
+    // filter operation takes the |in_data| (which is the output from the previous cascade
+    // filter) and store the output in |out_data|.
+    // Note that the input vector values are changed during the process.
+    WebRtc_Word16 k;
+    WebRtc_Word32 diff;
+    // First all-pass cascade; filter from in_data to out_data.
+
+    // Let y_i[n] indicate the output of cascade filter i (with filter coefficient a_i) at
+    // vector position n. Then the final output will be y[n] = y_3[n]
+
+    // First loop, use the states stored in memory.
+    // "diff" should be safe from wrap around since max values are 2^25
+    diff = WEBRTC_SPL_SUB_SAT_W32(in_data[0], filter_state[1]); // = (x[0] - y_1[-1])
+    // y_1[0] =  x[-1] + a_1 * (x[0] - y_1[-1])
+    out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, filter_state[0]);
+
+    // For the remaining loops, use previous values.
+    for (k = 1; k < data_length; k++)
+    {
+        diff = WEBRTC_SPL_SUB_SAT_W32(in_data[k], out_data[k - 1]); // = (x[n] - y_1[n-1])
+        // y_1[n] =  x[n-1] + a_1 * (x[n] - y_1[n-1])
+        out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, in_data[k - 1]);
+    }
+
+    // Update states.
+    filter_state[0] = in_data[data_length - 1]; // x[N-1], becomes x[-1] next time
+    filter_state[1] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time
+
+    // Second all-pass cascade; filter from out_data to in_data.
+    diff = WEBRTC_SPL_SUB_SAT_W32(out_data[0], filter_state[3]); // = (y_1[0] - y_2[-1])
+    // y_2[0] =  y_1[-1] + a_2 * (y_1[0] - y_2[-1])
+    in_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, filter_state[2]);
+    for (k = 1; k < data_length; k++)
+    {
+        diff = WEBRTC_SPL_SUB_SAT_W32(out_data[k], in_data[k - 1]); // =(y_1[n] - y_2[n-1])
+        // y_2[0] =  y_1[-1] + a_2 * (y_1[0] - y_2[-1])
+        in_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, out_data[k-1]);
+    }
+
+    filter_state[2] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time
+    filter_state[3] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time
+
+    // Third all-pass cascade; filter from in_data to out_data.
+    diff = WEBRTC_SPL_SUB_SAT_W32(in_data[0], filter_state[5]); // = (y_2[0] - y[-1])
+    // y[0] =  y_2[-1] + a_3 * (y_2[0] - y[-1])
+    out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, filter_state[4]);
+    for (k = 1; k < data_length; k++)
+    {
+        diff = WEBRTC_SPL_SUB_SAT_W32(in_data[k], out_data[k - 1]); // = (y_2[n] - y[n-1])
+        // y[n] =  y_2[n-1] + a_3 * (y_2[n] - y[n-1])
+        out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, in_data[k-1]);
+    }
+    filter_state[4] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time
+    filter_state[5] = out_data[data_length - 1]; // y[N-1], becomes y[-1] next time
+}
+
+void WebRtcSpl_AnalysisQMF(const WebRtc_Word16* in_data, WebRtc_Word16* low_band,
+                           WebRtc_Word16* high_band, WebRtc_Word32* filter_state1,
+                           WebRtc_Word32* filter_state2)
+{
+    WebRtc_Word16 i;
+    WebRtc_Word16 k;
+    WebRtc_Word32 tmp;
+    WebRtc_Word32 half_in1[kBandFrameLength];
+    WebRtc_Word32 half_in2[kBandFrameLength];
+    WebRtc_Word32 filter1[kBandFrameLength];
+    WebRtc_Word32 filter2[kBandFrameLength];
+
+    // Split even and odd samples. Also shift them to Q10.
+    for (i = 0, k = 0; i < kBandFrameLength; i++, k += 2)
+    {
+        half_in2[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)in_data[k], 10);
+        half_in1[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)in_data[k + 1], 10);
+    }
+
+    // All pass filter even and odd samples, independently.
+    WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter1,
+                         filter_state1);
+    WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter2,
+                         filter_state2);
+
+    // Take the sum and difference of filtered version of odd and even
+    // branches to get upper & lower band.
+    for (i = 0; i < kBandFrameLength; i++)
+    {
+        tmp = filter1[i] + filter2[i] + 1024;
+        tmp = WEBRTC_SPL_RSHIFT_W32(tmp, 11);
+        low_band[i] = WebRtcSpl_SatW32ToW16(tmp);
+
+        tmp = filter1[i] - filter2[i] + 1024;
+        tmp = WEBRTC_SPL_RSHIFT_W32(tmp, 11);
+        high_band[i] = WebRtcSpl_SatW32ToW16(tmp);
+    }
+}
+
+void WebRtcSpl_SynthesisQMF(const WebRtc_Word16* low_band, const WebRtc_Word16* high_band,
+                            WebRtc_Word16* out_data, WebRtc_Word32* filter_state1,
+                            WebRtc_Word32* filter_state2)
+{
+    WebRtc_Word32 tmp;
+    WebRtc_Word32 half_in1[kBandFrameLength];
+    WebRtc_Word32 half_in2[kBandFrameLength];
+    WebRtc_Word32 filter1[kBandFrameLength];
+    WebRtc_Word32 filter2[kBandFrameLength];
+    WebRtc_Word16 i;
+    WebRtc_Word16 k;
+
+    // Obtain the sum and difference channels out of upper and lower-band channels.
+    // Also shift to Q10 domain.
+    for (i = 0; i < kBandFrameLength; i++)
+    {
+        tmp = (WebRtc_Word32)low_band[i] + (WebRtc_Word32)high_band[i];
+        half_in1[i] = WEBRTC_SPL_LSHIFT_W32(tmp, 10);
+        tmp = (WebRtc_Word32)low_band[i] - (WebRtc_Word32)high_band[i];
+        half_in2[i] = WEBRTC_SPL_LSHIFT_W32(tmp, 10);
+    }
+
+    // all-pass filter the sum and difference channels
+    WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter2,
+                         filter_state1);
+    WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter1,
+                         filter_state2);
+
+    // The filtered signals are even and odd samples of the output. Combine
+    // them. The signals are Q10 should shift them back to Q0 and take care of
+    // saturation.
+    for (i = 0, k = 0; i < kBandFrameLength; i++)
+    {
+        tmp = WEBRTC_SPL_RSHIFT_W32(filter2[i] + 512, 10);
+        out_data[k++] = WebRtcSpl_SatW32ToW16(tmp);
+
+        tmp = WEBRTC_SPL_RSHIFT_W32(filter1[i] + 512, 10);
+        out_data[k++] = WebRtcSpl_SatW32ToW16(tmp);
+    }
+
+}
diff --git a/trunk/src/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c b/trunk/src/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c
new file mode 100644
index 0000000..9fb2c73
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the function WebRtcSpl_SqrtOfOneMinusXSquared().
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_SqrtOfOneMinusXSquared(WebRtc_Word16 *xQ15, int vector_length,
+                                      WebRtc_Word16 *yQ15)
+{
+    WebRtc_Word32 sq;
+    int m;
+    WebRtc_Word16 tmp;
+
+    for (m = 0; m < vector_length; m++)
+    {
+        tmp = xQ15[m];
+        sq = WEBRTC_SPL_MUL_16_16(tmp, tmp); // x^2 in Q30
+        sq = 1073741823 - sq; // 1-x^2, where 1 ~= 0.99999999906 is 1073741823 in Q30
+        sq = WebRtcSpl_Sqrt(sq); // sqrt(1-x^2) in Q15
+        yQ15[m] = (WebRtc_Word16)sq;
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/vector_scaling_operations.c b/trunk/src/common_audio/signal_processing/vector_scaling_operations.c
new file mode 100644
index 0000000..20d239c
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/vector_scaling_operations.c
@@ -0,0 +1,151 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains implementations of the functions
+ * WebRtcSpl_VectorBitShiftW16()
+ * WebRtcSpl_VectorBitShiftW32()
+ * WebRtcSpl_VectorBitShiftW32ToW16()
+ * WebRtcSpl_ScaleVector()
+ * WebRtcSpl_ScaleVectorWithSat()
+ * WebRtcSpl_ScaleAndAddVectors()
+ *
+ * The description header can be found in signal_processing_library.h
+ *
+ */
+
+#include "signal_processing_library.h"
+
+void WebRtcSpl_VectorBitShiftW16(WebRtc_Word16 *res,
+                             WebRtc_Word16 length,
+                             G_CONST WebRtc_Word16 *in,
+                             WebRtc_Word16 right_shifts)
+{
+    int i;
+
+    if (right_shifts > 0)
+    {
+        for (i = length; i > 0; i--)
+        {
+            (*res++) = ((*in++) >> right_shifts);
+        }
+    } else
+    {
+        for (i = length; i > 0; i--)
+        {
+            (*res++) = ((*in++) << (-right_shifts));
+        }
+    }
+}
+
+void WebRtcSpl_VectorBitShiftW32(WebRtc_Word32 *out_vector,
+                             WebRtc_Word16 vector_length,
+                             G_CONST WebRtc_Word32 *in_vector,
+                             WebRtc_Word16 right_shifts)
+{
+    int i;
+
+    if (right_shifts > 0)
+    {
+        for (i = vector_length; i > 0; i--)
+        {
+            (*out_vector++) = ((*in_vector++) >> right_shifts);
+        }
+    } else
+    {
+        for (i = vector_length; i > 0; i--)
+        {
+            (*out_vector++) = ((*in_vector++) << (-right_shifts));
+        }
+    }
+}
+
+void WebRtcSpl_VectorBitShiftW32ToW16(WebRtc_Word16 *res,
+                                  WebRtc_Word16 length,
+                                  G_CONST WebRtc_Word32 *in,
+                                  WebRtc_Word16 right_shifts)
+{
+    int i;
+
+    if (right_shifts >= 0)
+    {
+        for (i = length; i > 0; i--)
+        {
+            (*res++) = (WebRtc_Word16)((*in++) >> right_shifts);
+        }
+    } else
+    {
+        WebRtc_Word16 left_shifts = -right_shifts;
+        for (i = length; i > 0; i--)
+        {
+            (*res++) = (WebRtc_Word16)((*in++) << left_shifts);
+        }
+    }
+}
+
+void WebRtcSpl_ScaleVector(G_CONST WebRtc_Word16 *in_vector, WebRtc_Word16 *out_vector,
+                           WebRtc_Word16 gain, WebRtc_Word16 in_vector_length,
+                           WebRtc_Word16 right_shifts)
+{
+    // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts
+    int i;
+    G_CONST WebRtc_Word16 *inptr;
+    WebRtc_Word16 *outptr;
+
+    inptr = in_vector;
+    outptr = out_vector;
+
+    for (i = 0; i < in_vector_length; i++)
+    {
+        (*outptr++) = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(*inptr++, gain, right_shifts);
+    }
+}
+
+void WebRtcSpl_ScaleVectorWithSat(G_CONST WebRtc_Word16 *in_vector, WebRtc_Word16 *out_vector,
+                                 WebRtc_Word16 gain, WebRtc_Word16 in_vector_length,
+                                 WebRtc_Word16 right_shifts)
+{
+    // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts
+    int i;
+    WebRtc_Word32 tmpW32;
+    G_CONST WebRtc_Word16 *inptr;
+    WebRtc_Word16 *outptr;
+
+    inptr = in_vector;
+    outptr = out_vector;
+
+    for (i = 0; i < in_vector_length; i++)
+    {
+        tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(*inptr++, gain, right_shifts);
+        (*outptr++) = WebRtcSpl_SatW32ToW16(tmpW32);
+    }
+}
+
+void WebRtcSpl_ScaleAndAddVectors(G_CONST WebRtc_Word16 *in1, WebRtc_Word16 gain1, int shift1,
+                                  G_CONST WebRtc_Word16 *in2, WebRtc_Word16 gain2, int shift2,
+                                  WebRtc_Word16 *out, int vector_length)
+{
+    // Performs vector operation: out = (gain1*in1)>>shift1 + (gain2*in2)>>shift2
+    int i;
+    G_CONST WebRtc_Word16 *in1ptr;
+    G_CONST WebRtc_Word16 *in2ptr;
+    WebRtc_Word16 *outptr;
+
+    in1ptr = in1;
+    in2ptr = in2;
+    outptr = out;
+
+    for (i = 0; i < vector_length; i++)
+    {
+        (*outptr++) = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gain1, *in1ptr++, shift1)
+                + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gain2, *in2ptr++, shift2);
+    }
+}
diff --git a/trunk/src/common_audio/signal_processing/webrtc_fft_t_1024_8.c b/trunk/src/common_audio/signal_processing/webrtc_fft_t_1024_8.c
new file mode 100644
index 0000000..b587380
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/webrtc_fft_t_1024_8.c
@@ -0,0 +1,704 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the Q14 radix-8 tables used in ARM9e optimizations.
+ *
+ */
+
+extern const int s_Q14S_8;
+const int s_Q14S_8 = 1024;
+extern const unsigned short t_Q14S_8[2032];
+const unsigned short t_Q14S_8[2032] = {
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
+  0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
+  0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
+  0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
+  0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
+  0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
+  0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x396b,0x0646 ,0x3cc8,0x0324 ,0x35eb,0x0964 ,
+  0x3249,0x0c7c ,0x396b,0x0646 ,0x2aaa,0x1294 ,
+  0x2aaa,0x1294 ,0x35eb,0x0964 ,0x1e7e,0x1b5d ,
+  0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
+  0x1a46,0x1e2b ,0x2e88,0x0f8d ,0x0471,0x2afb ,
+  0x11a8,0x238e ,0x2aaa,0x1294 ,0xf721,0x3179 ,
+  0x08df,0x289a ,0x26b3,0x1590 ,0xea02,0x36e5 ,
+  0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
+  0xf721,0x3179 ,0x1e7e,0x1b5d ,0xd178,0x3e15 ,
+  0xee58,0x3537 ,0x1a46,0x1e2b ,0xc695,0x3fb1 ,
+  0xe5ba,0x3871 ,0x15fe,0x20e7 ,0xbcf0,0x3fec ,
+  0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
+  0xd556,0x3d3f ,0x0d48,0x2620 ,0xae2e,0x3c42 ,
+  0xcdb7,0x3ec5 ,0x08df,0x289a ,0xa963,0x3871 ,
+  0xc695,0x3fb1 ,0x0471,0x2afb ,0xa678,0x3368 ,
+  0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
+  0xba09,0x3fb1 ,0xfb8f,0x2f6c ,0xa678,0x2620 ,
+  0xb4be,0x3ec5 ,0xf721,0x3179 ,0xa963,0x1e2b ,
+  0xb02d,0x3d3f ,0xf2b8,0x3368 ,0xae2e,0x1590 ,
+  0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
+  0xa963,0x3871 ,0xea02,0x36e5 ,0xbcf0,0x0324 ,
+  0xa73b,0x3537 ,0xe5ba,0x3871 ,0xc695,0xf9ba ,
+  0xa5ed,0x3179 ,0xe182,0x39db ,0xd178,0xf073 ,
+  0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
+  0xa5ed,0x289a ,0xd94d,0x3c42 ,0xea02,0xdf19 ,
+  0xa73b,0x238e ,0xd556,0x3d3f ,0xf721,0xd766 ,
+  0xa963,0x1e2b ,0xd178,0x3e15 ,0x0471,0xd094 ,
+  0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
+  0xb02d,0x1294 ,0xca15,0x3f4f ,0x1e7e,0xc625 ,
+  0xb4be,0x0c7c ,0xc695,0x3fb1 ,0x2aaa,0xc2c1 ,
+  0xba09,0x0646 ,0xc338,0x3fec ,0x35eb,0xc0b1 ,
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x3e69,0x0192 ,0x3f36,0x00c9 ,0x3d9a,0x025b ,
+  0x3cc8,0x0324 ,0x3e69,0x0192 ,0x3b1e,0x04b5 ,
+  0x3b1e,0x04b5 ,0x3d9a,0x025b ,0x388e,0x070e ,
+  0x396b,0x0646 ,0x3cc8,0x0324 ,0x35eb,0x0964 ,
+  0x37af,0x07d6 ,0x3bf4,0x03ed ,0x3334,0x0bb7 ,
+  0x35eb,0x0964 ,0x3b1e,0x04b5 ,0x306c,0x0e06 ,
+  0x341e,0x0af1 ,0x3a46,0x057e ,0x2d93,0x1050 ,
+  0x3249,0x0c7c ,0x396b,0x0646 ,0x2aaa,0x1294 ,
+  0x306c,0x0e06 ,0x388e,0x070e ,0x27b3,0x14d2 ,
+  0x2e88,0x0f8d ,0x37af,0x07d6 ,0x24ae,0x1709 ,
+  0x2c9d,0x1112 ,0x36ce,0x089d ,0x219c,0x1937 ,
+  0x2aaa,0x1294 ,0x35eb,0x0964 ,0x1e7e,0x1b5d ,
+  0x28b2,0x1413 ,0x3505,0x0a2b ,0x1b56,0x1d79 ,
+  0x26b3,0x1590 ,0x341e,0x0af1 ,0x1824,0x1f8c ,
+  0x24ae,0x1709 ,0x3334,0x0bb7 ,0x14ea,0x2193 ,
+  0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
+  0x2093,0x19ef ,0x315b,0x0d41 ,0x0e61,0x257e ,
+  0x1e7e,0x1b5d ,0x306c,0x0e06 ,0x0b14,0x2760 ,
+  0x1c64,0x1cc6 ,0x2f7b,0x0eca ,0x07c4,0x2935 ,
+  0x1a46,0x1e2b ,0x2e88,0x0f8d ,0x0471,0x2afb ,
+  0x1824,0x1f8c ,0x2d93,0x1050 ,0x011c,0x2cb2 ,
+  0x15fe,0x20e7 ,0x2c9d,0x1112 ,0xfdc7,0x2e5a ,
+  0x13d5,0x223d ,0x2ba4,0x11d3 ,0xfa73,0x2ff2 ,
+  0x11a8,0x238e ,0x2aaa,0x1294 ,0xf721,0x3179 ,
+  0x0f79,0x24da ,0x29af,0x1354 ,0xf3d2,0x32ef ,
+  0x0d48,0x2620 ,0x28b2,0x1413 ,0xf087,0x3453 ,
+  0x0b14,0x2760 ,0x27b3,0x14d2 ,0xed41,0x35a5 ,
+  0x08df,0x289a ,0x26b3,0x1590 ,0xea02,0x36e5 ,
+  0x06a9,0x29ce ,0x25b1,0x164c ,0xe6cb,0x3812 ,
+  0x0471,0x2afb ,0x24ae,0x1709 ,0xe39c,0x392b ,
+  0x0239,0x2c21 ,0x23a9,0x17c4 ,0xe077,0x3a30 ,
+  0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
+  0xfdc7,0x2e5a ,0x219c,0x1937 ,0xda4f,0x3bfd ,
+  0xfb8f,0x2f6c ,0x2093,0x19ef ,0xd74e,0x3cc5 ,
+  0xf957,0x3076 ,0x1f89,0x1aa7 ,0xd45c,0x3d78 ,
+  0xf721,0x3179 ,0x1e7e,0x1b5d ,0xd178,0x3e15 ,
+  0xf4ec,0x3274 ,0x1d72,0x1c12 ,0xcea5,0x3e9d ,
+  0xf2b8,0x3368 ,0x1c64,0x1cc6 ,0xcbe2,0x3f0f ,
+  0xf087,0x3453 ,0x1b56,0x1d79 ,0xc932,0x3f6b ,
+  0xee58,0x3537 ,0x1a46,0x1e2b ,0xc695,0x3fb1 ,
+  0xec2b,0x3612 ,0x1935,0x1edc ,0xc40c,0x3fe1 ,
+  0xea02,0x36e5 ,0x1824,0x1f8c ,0xc197,0x3ffb ,
+  0xe7dc,0x37b0 ,0x1711,0x203a ,0xbf38,0x3fff ,
+  0xe5ba,0x3871 ,0x15fe,0x20e7 ,0xbcf0,0x3fec ,
+  0xe39c,0x392b ,0x14ea,0x2193 ,0xbabf,0x3fc4 ,
+  0xe182,0x39db ,0x13d5,0x223d ,0xb8a6,0x3f85 ,
+  0xdf6d,0x3a82 ,0x12bf,0x22e7 ,0xb6a5,0x3f30 ,
+  0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
+  0xdb52,0x3bb6 ,0x1091,0x2435 ,0xb2f2,0x3e45 ,
+  0xd94d,0x3c42 ,0x0f79,0x24da ,0xb140,0x3daf ,
+  0xd74e,0x3cc5 ,0x0e61,0x257e ,0xafa9,0x3d03 ,
+  0xd556,0x3d3f ,0x0d48,0x2620 ,0xae2e,0x3c42 ,
+  0xd363,0x3daf ,0x0c2e,0x26c1 ,0xacd0,0x3b6d ,
+  0xd178,0x3e15 ,0x0b14,0x2760 ,0xab8e,0x3a82 ,
+  0xcf94,0x3e72 ,0x09fa,0x27fe ,0xaa6a,0x3984 ,
+  0xcdb7,0x3ec5 ,0x08df,0x289a ,0xa963,0x3871 ,
+  0xcbe2,0x3f0f ,0x07c4,0x2935 ,0xa87b,0x374b ,
+  0xca15,0x3f4f ,0x06a9,0x29ce ,0xa7b1,0x3612 ,
+  0xc851,0x3f85 ,0x058d,0x2a65 ,0xa705,0x34c6 ,
+  0xc695,0x3fb1 ,0x0471,0x2afb ,0xa678,0x3368 ,
+  0xc4e2,0x3fd4 ,0x0355,0x2b8f ,0xa60b,0x31f8 ,
+  0xc338,0x3fec ,0x0239,0x2c21 ,0xa5bc,0x3076 ,
+  0xc197,0x3ffb ,0x011c,0x2cb2 ,0xa58d,0x2ee4 ,
+  0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
+  0xbe73,0x3ffb ,0xfee4,0x2dcf ,0xa58d,0x2b8f ,
+  0xbcf0,0x3fec ,0xfdc7,0x2e5a ,0xa5bc,0x29ce ,
+  0xbb77,0x3fd4 ,0xfcab,0x2ee4 ,0xa60b,0x27fe ,
+  0xba09,0x3fb1 ,0xfb8f,0x2f6c ,0xa678,0x2620 ,
+  0xb8a6,0x3f85 ,0xfa73,0x2ff2 ,0xa705,0x2435 ,
+  0xb74d,0x3f4f ,0xf957,0x3076 ,0xa7b1,0x223d ,
+  0xb600,0x3f0f ,0xf83c,0x30f9 ,0xa87b,0x203a ,
+  0xb4be,0x3ec5 ,0xf721,0x3179 ,0xa963,0x1e2b ,
+  0xb388,0x3e72 ,0xf606,0x31f8 ,0xaa6a,0x1c12 ,
+  0xb25e,0x3e15 ,0xf4ec,0x3274 ,0xab8e,0x19ef ,
+  0xb140,0x3daf ,0xf3d2,0x32ef ,0xacd0,0x17c4 ,
+  0xb02d,0x3d3f ,0xf2b8,0x3368 ,0xae2e,0x1590 ,
+  0xaf28,0x3cc5 ,0xf19f,0x33df ,0xafa9,0x1354 ,
+  0xae2e,0x3c42 ,0xf087,0x3453 ,0xb140,0x1112 ,
+  0xad41,0x3bb6 ,0xef6f,0x34c6 ,0xb2f2,0x0eca ,
+  0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
+  0xab8e,0x3a82 ,0xed41,0x35a5 ,0xb6a5,0x0a2b ,
+  0xaac8,0x39db ,0xec2b,0x3612 ,0xb8a6,0x07d6 ,
+  0xaa0f,0x392b ,0xeb16,0x367d ,0xbabf,0x057e ,
+  0xa963,0x3871 ,0xea02,0x36e5 ,0xbcf0,0x0324 ,
+  0xa8c5,0x37b0 ,0xe8ef,0x374b ,0xbf38,0x00c9 ,
+  0xa834,0x36e5 ,0xe7dc,0x37b0 ,0xc197,0xfe6e ,
+  0xa7b1,0x3612 ,0xe6cb,0x3812 ,0xc40c,0xfc13 ,
+  0xa73b,0x3537 ,0xe5ba,0x3871 ,0xc695,0xf9ba ,
+  0xa6d3,0x3453 ,0xe4aa,0x38cf ,0xc932,0xf763 ,
+  0xa678,0x3368 ,0xe39c,0x392b ,0xcbe2,0xf50f ,
+  0xa62c,0x3274 ,0xe28e,0x3984 ,0xcea5,0xf2bf ,
+  0xa5ed,0x3179 ,0xe182,0x39db ,0xd178,0xf073 ,
+  0xa5bc,0x3076 ,0xe077,0x3a30 ,0xd45c,0xee2d ,
+  0xa599,0x2f6c ,0xdf6d,0x3a82 ,0xd74e,0xebed ,
+  0xa585,0x2e5a ,0xde64,0x3ad3 ,0xda4f,0xe9b4 ,
+  0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
+  0xa585,0x2c21 ,0xdc57,0x3b6d ,0xe077,0xe559 ,
+  0xa599,0x2afb ,0xdb52,0x3bb6 ,0xe39c,0xe33a ,
+  0xa5bc,0x29ce ,0xda4f,0x3bfd ,0xe6cb,0xe124 ,
+  0xa5ed,0x289a ,0xd94d,0x3c42 ,0xea02,0xdf19 ,
+  0xa62c,0x2760 ,0xd84d,0x3c85 ,0xed41,0xdd19 ,
+  0xa678,0x2620 ,0xd74e,0x3cc5 ,0xf087,0xdb26 ,
+  0xa6d3,0x24da ,0xd651,0x3d03 ,0xf3d2,0xd93f ,
+  0xa73b,0x238e ,0xd556,0x3d3f ,0xf721,0xd766 ,
+  0xa7b1,0x223d ,0xd45c,0x3d78 ,0xfa73,0xd59b ,
+  0xa834,0x20e7 ,0xd363,0x3daf ,0xfdc7,0xd3df ,
+  0xa8c5,0x1f8c ,0xd26d,0x3de3 ,0x011c,0xd231 ,
+  0xa963,0x1e2b ,0xd178,0x3e15 ,0x0471,0xd094 ,
+  0xaa0f,0x1cc6 ,0xd085,0x3e45 ,0x07c4,0xcf07 ,
+  0xaac8,0x1b5d ,0xcf94,0x3e72 ,0x0b14,0xcd8c ,
+  0xab8e,0x19ef ,0xcea5,0x3e9d ,0x0e61,0xcc21 ,
+  0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
+  0xad41,0x1709 ,0xcccc,0x3eeb ,0x14ea,0xc983 ,
+  0xae2e,0x1590 ,0xcbe2,0x3f0f ,0x1824,0xc850 ,
+  0xaf28,0x1413 ,0xcafb,0x3f30 ,0x1b56,0xc731 ,
+  0xb02d,0x1294 ,0xca15,0x3f4f ,0x1e7e,0xc625 ,
+  0xb140,0x1112 ,0xc932,0x3f6b ,0x219c,0xc52d ,
+  0xb25e,0x0f8d ,0xc851,0x3f85 ,0x24ae,0xc44a ,
+  0xb388,0x0e06 ,0xc772,0x3f9c ,0x27b3,0xc37b ,
+  0xb4be,0x0c7c ,0xc695,0x3fb1 ,0x2aaa,0xc2c1 ,
+  0xb600,0x0af1 ,0xc5ba,0x3fc4 ,0x2d93,0xc21d ,
+  0xb74d,0x0964 ,0xc4e2,0x3fd4 ,0x306c,0xc18e ,
+  0xb8a6,0x07d6 ,0xc40c,0x3fe1 ,0x3334,0xc115 ,
+  0xba09,0x0646 ,0xc338,0x3fec ,0x35eb,0xc0b1 ,
+  0xbb77,0x04b5 ,0xc266,0x3ff5 ,0x388e,0xc064 ,
+  0xbcf0,0x0324 ,0xc197,0x3ffb ,0x3b1e,0xc02c ,
+  0xbe73,0x0192 ,0xc0ca,0x3fff ,0x3d9a,0xc00b ,
+  0x4000,0x0000 ,0x3f9b,0x0065 ,0x3f36,0x00c9 ,
+  0x3ed0,0x012e ,0x3e69,0x0192 ,0x3e02,0x01f7 ,
+  0x3d9a,0x025b ,0x3d31,0x02c0 ,0x3cc8,0x0324 ,
+  0x3c5f,0x0388 ,0x3bf4,0x03ed ,0x3b8a,0x0451 ,
+  0x3b1e,0x04b5 ,0x3ab2,0x051a ,0x3a46,0x057e ,
+  0x39d9,0x05e2 ,0x396b,0x0646 ,0x38fd,0x06aa ,
+  0x388e,0x070e ,0x381f,0x0772 ,0x37af,0x07d6 ,
+  0x373f,0x0839 ,0x36ce,0x089d ,0x365d,0x0901 ,
+  0x35eb,0x0964 ,0x3578,0x09c7 ,0x3505,0x0a2b ,
+  0x3492,0x0a8e ,0x341e,0x0af1 ,0x33a9,0x0b54 ,
+  0x3334,0x0bb7 ,0x32bf,0x0c1a ,0x3249,0x0c7c ,
+  0x31d2,0x0cdf ,0x315b,0x0d41 ,0x30e4,0x0da4 ,
+  0x306c,0x0e06 ,0x2ff4,0x0e68 ,0x2f7b,0x0eca ,
+  0x2f02,0x0f2b ,0x2e88,0x0f8d ,0x2e0e,0x0fee ,
+  0x2d93,0x1050 ,0x2d18,0x10b1 ,0x2c9d,0x1112 ,
+  0x2c21,0x1173 ,0x2ba4,0x11d3 ,0x2b28,0x1234 ,
+  0x2aaa,0x1294 ,0x2a2d,0x12f4 ,0x29af,0x1354 ,
+  0x2931,0x13b4 ,0x28b2,0x1413 ,0x2833,0x1473 ,
+  0x27b3,0x14d2 ,0x2733,0x1531 ,0x26b3,0x1590 ,
+  0x2632,0x15ee ,0x25b1,0x164c ,0x252f,0x16ab ,
+  0x24ae,0x1709 ,0x242b,0x1766 ,0x23a9,0x17c4 ,
+  0x2326,0x1821 ,0x22a3,0x187e ,0x221f,0x18db ,
+  0x219c,0x1937 ,0x2117,0x1993 ,0x2093,0x19ef ,
+  0x200e,0x1a4b ,0x1f89,0x1aa7 ,0x1f04,0x1b02 ,
+  0x1e7e,0x1b5d ,0x1df8,0x1bb8 ,0x1d72,0x1c12 ,
+  0x1ceb,0x1c6c ,0x1c64,0x1cc6 ,0x1bdd,0x1d20 ,
+  0x1b56,0x1d79 ,0x1ace,0x1dd3 ,0x1a46,0x1e2b ,
+  0x19be,0x1e84 ,0x1935,0x1edc ,0x18ad,0x1f34 ,
+  0x1824,0x1f8c ,0x179b,0x1fe3 ,0x1711,0x203a ,
+  0x1688,0x2091 ,0x15fe,0x20e7 ,0x1574,0x213d ,
+  0x14ea,0x2193 ,0x145f,0x21e8 ,0x13d5,0x223d ,
+  0x134a,0x2292 ,0x12bf,0x22e7 ,0x1234,0x233b ,
+  0x11a8,0x238e ,0x111d,0x23e2 ,0x1091,0x2435 ,
+  0x1005,0x2488 ,0x0f79,0x24da ,0x0eed,0x252c ,
+  0x0e61,0x257e ,0x0dd4,0x25cf ,0x0d48,0x2620 ,
+  0x0cbb,0x2671 ,0x0c2e,0x26c1 ,0x0ba1,0x2711 ,
+  0x0b14,0x2760 ,0x0a87,0x27af ,0x09fa,0x27fe ,
+  0x096d,0x284c ,0x08df,0x289a ,0x0852,0x28e7 ,
+  0x07c4,0x2935 ,0x0736,0x2981 ,0x06a9,0x29ce ,
+  0x061b,0x2a1a ,0x058d,0x2a65 ,0x04ff,0x2ab0 ,
+  0x0471,0x2afb ,0x03e3,0x2b45 ,0x0355,0x2b8f ,
+  0x02c7,0x2bd8 ,0x0239,0x2c21 ,0x01aa,0x2c6a ,
+  0x011c,0x2cb2 ,0x008e,0x2cfa ,0x0000,0x2d41 ,
+  0xff72,0x2d88 ,0xfee4,0x2dcf ,0xfe56,0x2e15 ,
+  0xfdc7,0x2e5a ,0xfd39,0x2e9f ,0xfcab,0x2ee4 ,
+  0xfc1d,0x2f28 ,0xfb8f,0x2f6c ,0xfb01,0x2faf ,
+  0xfa73,0x2ff2 ,0xf9e5,0x3034 ,0xf957,0x3076 ,
+  0xf8ca,0x30b8 ,0xf83c,0x30f9 ,0xf7ae,0x3139 ,
+  0xf721,0x3179 ,0xf693,0x31b9 ,0xf606,0x31f8 ,
+  0xf579,0x3236 ,0xf4ec,0x3274 ,0xf45f,0x32b2 ,
+  0xf3d2,0x32ef ,0xf345,0x332c ,0xf2b8,0x3368 ,
+  0xf22c,0x33a3 ,0xf19f,0x33df ,0xf113,0x3419 ,
+  0xf087,0x3453 ,0xeffb,0x348d ,0xef6f,0x34c6 ,
+  0xeee3,0x34ff ,0xee58,0x3537 ,0xedcc,0x356e ,
+  0xed41,0x35a5 ,0xecb6,0x35dc ,0xec2b,0x3612 ,
+  0xeba1,0x3648 ,0xeb16,0x367d ,0xea8c,0x36b1 ,
+  0xea02,0x36e5 ,0xe978,0x3718 ,0xe8ef,0x374b ,
+  0xe865,0x377e ,0xe7dc,0x37b0 ,0xe753,0x37e1 ,
+  0xe6cb,0x3812 ,0xe642,0x3842 ,0xe5ba,0x3871 ,
+  0xe532,0x38a1 ,0xe4aa,0x38cf ,0xe423,0x38fd ,
+  0xe39c,0x392b ,0xe315,0x3958 ,0xe28e,0x3984 ,
+  0xe208,0x39b0 ,0xe182,0x39db ,0xe0fc,0x3a06 ,
+  0xe077,0x3a30 ,0xdff2,0x3a59 ,0xdf6d,0x3a82 ,
+  0xdee9,0x3aab ,0xde64,0x3ad3 ,0xdde1,0x3afa ,
+  0xdd5d,0x3b21 ,0xdcda,0x3b47 ,0xdc57,0x3b6d ,
+  0xdbd5,0x3b92 ,0xdb52,0x3bb6 ,0xdad1,0x3bda ,
+  0xda4f,0x3bfd ,0xd9ce,0x3c20 ,0xd94d,0x3c42 ,
+  0xd8cd,0x3c64 ,0xd84d,0x3c85 ,0xd7cd,0x3ca5 ,
+  0xd74e,0x3cc5 ,0xd6cf,0x3ce4 ,0xd651,0x3d03 ,
+  0xd5d3,0x3d21 ,0xd556,0x3d3f ,0xd4d8,0x3d5b ,
+  0xd45c,0x3d78 ,0xd3df,0x3d93 ,0xd363,0x3daf ,
+  0xd2e8,0x3dc9 ,0xd26d,0x3de3 ,0xd1f2,0x3dfc ,
+  0xd178,0x3e15 ,0xd0fe,0x3e2d ,0xd085,0x3e45 ,
+  0xd00c,0x3e5c ,0xcf94,0x3e72 ,0xcf1c,0x3e88 ,
+  0xcea5,0x3e9d ,0xce2e,0x3eb1 ,0xcdb7,0x3ec5 ,
+  0xcd41,0x3ed8 ,0xcccc,0x3eeb ,0xcc57,0x3efd ,
+  0xcbe2,0x3f0f ,0xcb6e,0x3f20 ,0xcafb,0x3f30 ,
+  0xca88,0x3f40 ,0xca15,0x3f4f ,0xc9a3,0x3f5d ,
+  0xc932,0x3f6b ,0xc8c1,0x3f78 ,0xc851,0x3f85 ,
+  0xc7e1,0x3f91 ,0xc772,0x3f9c ,0xc703,0x3fa7 ,
+  0xc695,0x3fb1 ,0xc627,0x3fbb ,0xc5ba,0x3fc4 ,
+  0xc54e,0x3fcc ,0xc4e2,0x3fd4 ,0xc476,0x3fdb ,
+  0xc40c,0x3fe1 ,0xc3a1,0x3fe7 ,0xc338,0x3fec ,
+  0xc2cf,0x3ff1 ,0xc266,0x3ff5 ,0xc1fe,0x3ff8 ,
+  0xc197,0x3ffb ,0xc130,0x3ffd ,0xc0ca,0x3fff ,
+  0xc065,0x4000 ,0xc000,0x4000 ,0xbf9c,0x4000 ,
+  0xbf38,0x3fff ,0xbed5,0x3ffd ,0xbe73,0x3ffb ,
+  0xbe11,0x3ff8 ,0xbdb0,0x3ff5 ,0xbd50,0x3ff1 ,
+  0xbcf0,0x3fec ,0xbc91,0x3fe7 ,0xbc32,0x3fe1 ,
+  0xbbd4,0x3fdb ,0xbb77,0x3fd4 ,0xbb1b,0x3fcc ,
+  0xbabf,0x3fc4 ,0xba64,0x3fbb ,0xba09,0x3fb1 ,
+  0xb9af,0x3fa7 ,0xb956,0x3f9c ,0xb8fd,0x3f91 ,
+  0xb8a6,0x3f85 ,0xb84f,0x3f78 ,0xb7f8,0x3f6b ,
+  0xb7a2,0x3f5d ,0xb74d,0x3f4f ,0xb6f9,0x3f40 ,
+  0xb6a5,0x3f30 ,0xb652,0x3f20 ,0xb600,0x3f0f ,
+  0xb5af,0x3efd ,0xb55e,0x3eeb ,0xb50e,0x3ed8 ,
+  0xb4be,0x3ec5 ,0xb470,0x3eb1 ,0xb422,0x3e9d ,
+  0xb3d5,0x3e88 ,0xb388,0x3e72 ,0xb33d,0x3e5c ,
+  0xb2f2,0x3e45 ,0xb2a7,0x3e2d ,0xb25e,0x3e15 ,
+  0xb215,0x3dfc ,0xb1cd,0x3de3 ,0xb186,0x3dc9 ,
+  0xb140,0x3daf ,0xb0fa,0x3d93 ,0xb0b5,0x3d78 ,
+  0xb071,0x3d5b ,0xb02d,0x3d3f ,0xafeb,0x3d21 ,
+  0xafa9,0x3d03 ,0xaf68,0x3ce4 ,0xaf28,0x3cc5 ,
+  0xaee8,0x3ca5 ,0xaea9,0x3c85 ,0xae6b,0x3c64 ,
+  0xae2e,0x3c42 ,0xadf2,0x3c20 ,0xadb6,0x3bfd ,
+  0xad7b,0x3bda ,0xad41,0x3bb6 ,0xad08,0x3b92 ,
+  0xacd0,0x3b6d ,0xac98,0x3b47 ,0xac61,0x3b21 ,
+  0xac2b,0x3afa ,0xabf6,0x3ad3 ,0xabc2,0x3aab ,
+  0xab8e,0x3a82 ,0xab5b,0x3a59 ,0xab29,0x3a30 ,
+  0xaaf8,0x3a06 ,0xaac8,0x39db ,0xaa98,0x39b0 ,
+  0xaa6a,0x3984 ,0xaa3c,0x3958 ,0xaa0f,0x392b ,
+  0xa9e3,0x38fd ,0xa9b7,0x38cf ,0xa98d,0x38a1 ,
+  0xa963,0x3871 ,0xa93a,0x3842 ,0xa912,0x3812 ,
+  0xa8eb,0x37e1 ,0xa8c5,0x37b0 ,0xa89f,0x377e ,
+  0xa87b,0x374b ,0xa857,0x3718 ,0xa834,0x36e5 ,
+  0xa812,0x36b1 ,0xa7f1,0x367d ,0xa7d0,0x3648 ,
+  0xa7b1,0x3612 ,0xa792,0x35dc ,0xa774,0x35a5 ,
+  0xa757,0x356e ,0xa73b,0x3537 ,0xa71f,0x34ff ,
+  0xa705,0x34c6 ,0xa6eb,0x348d ,0xa6d3,0x3453 ,
+  0xa6bb,0x3419 ,0xa6a4,0x33df ,0xa68e,0x33a3 ,
+  0xa678,0x3368 ,0xa664,0x332c ,0xa650,0x32ef ,
+  0xa63e,0x32b2 ,0xa62c,0x3274 ,0xa61b,0x3236 ,
+  0xa60b,0x31f8 ,0xa5fb,0x31b9 ,0xa5ed,0x3179 ,
+  0xa5e0,0x3139 ,0xa5d3,0x30f9 ,0xa5c7,0x30b8 ,
+  0xa5bc,0x3076 ,0xa5b2,0x3034 ,0xa5a9,0x2ff2 ,
+  0xa5a1,0x2faf ,0xa599,0x2f6c ,0xa593,0x2f28 ,
+  0xa58d,0x2ee4 ,0xa588,0x2e9f ,0xa585,0x2e5a ,
+  0xa581,0x2e15 ,0xa57f,0x2dcf ,0xa57e,0x2d88 ,
+  0xa57e,0x2d41 ,0xa57e,0x2cfa ,0xa57f,0x2cb2 ,
+  0xa581,0x2c6a ,0xa585,0x2c21 ,0xa588,0x2bd8 ,
+  0xa58d,0x2b8f ,0xa593,0x2b45 ,0xa599,0x2afb ,
+  0xa5a1,0x2ab0 ,0xa5a9,0x2a65 ,0xa5b2,0x2a1a ,
+  0xa5bc,0x29ce ,0xa5c7,0x2981 ,0xa5d3,0x2935 ,
+  0xa5e0,0x28e7 ,0xa5ed,0x289a ,0xa5fb,0x284c ,
+  0xa60b,0x27fe ,0xa61b,0x27af ,0xa62c,0x2760 ,
+  0xa63e,0x2711 ,0xa650,0x26c1 ,0xa664,0x2671 ,
+  0xa678,0x2620 ,0xa68e,0x25cf ,0xa6a4,0x257e ,
+  0xa6bb,0x252c ,0xa6d3,0x24da ,0xa6eb,0x2488 ,
+  0xa705,0x2435 ,0xa71f,0x23e2 ,0xa73b,0x238e ,
+  0xa757,0x233b ,0xa774,0x22e7 ,0xa792,0x2292 ,
+  0xa7b1,0x223d ,0xa7d0,0x21e8 ,0xa7f1,0x2193 ,
+  0xa812,0x213d ,0xa834,0x20e7 ,0xa857,0x2091 ,
+  0xa87b,0x203a ,0xa89f,0x1fe3 ,0xa8c5,0x1f8c ,
+  0xa8eb,0x1f34 ,0xa912,0x1edc ,0xa93a,0x1e84 ,
+  0xa963,0x1e2b ,0xa98d,0x1dd3 ,0xa9b7,0x1d79 ,
+  0xa9e3,0x1d20 ,0xaa0f,0x1cc6 ,0xaa3c,0x1c6c ,
+  0xaa6a,0x1c12 ,0xaa98,0x1bb8 ,0xaac8,0x1b5d ,
+  0xaaf8,0x1b02 ,0xab29,0x1aa7 ,0xab5b,0x1a4b ,
+  0xab8e,0x19ef ,0xabc2,0x1993 ,0xabf6,0x1937 ,
+  0xac2b,0x18db ,0xac61,0x187e ,0xac98,0x1821 ,
+  0xacd0,0x17c4 ,0xad08,0x1766 ,0xad41,0x1709 ,
+  0xad7b,0x16ab ,0xadb6,0x164c ,0xadf2,0x15ee ,
+  0xae2e,0x1590 ,0xae6b,0x1531 ,0xaea9,0x14d2 ,
+  0xaee8,0x1473 ,0xaf28,0x1413 ,0xaf68,0x13b4 ,
+  0xafa9,0x1354 ,0xafeb,0x12f4 ,0xb02d,0x1294 ,
+  0xb071,0x1234 ,0xb0b5,0x11d3 ,0xb0fa,0x1173 ,
+  0xb140,0x1112 ,0xb186,0x10b1 ,0xb1cd,0x1050 ,
+  0xb215,0x0fee ,0xb25e,0x0f8d ,0xb2a7,0x0f2b ,
+  0xb2f2,0x0eca ,0xb33d,0x0e68 ,0xb388,0x0e06 ,
+  0xb3d5,0x0da4 ,0xb422,0x0d41 ,0xb470,0x0cdf ,
+  0xb4be,0x0c7c ,0xb50e,0x0c1a ,0xb55e,0x0bb7 ,
+  0xb5af,0x0b54 ,0xb600,0x0af1 ,0xb652,0x0a8e ,
+  0xb6a5,0x0a2b ,0xb6f9,0x09c7 ,0xb74d,0x0964 ,
+  0xb7a2,0x0901 ,0xb7f8,0x089d ,0xb84f,0x0839 ,
+  0xb8a6,0x07d6 ,0xb8fd,0x0772 ,0xb956,0x070e ,
+  0xb9af,0x06aa ,0xba09,0x0646 ,0xba64,0x05e2 ,
+  0xbabf,0x057e ,0xbb1b,0x051a ,0xbb77,0x04b5 ,
+  0xbbd4,0x0451 ,0xbc32,0x03ed ,0xbc91,0x0388 ,
+  0xbcf0,0x0324 ,0xbd50,0x02c0 ,0xbdb0,0x025b ,
+  0xbe11,0x01f7 ,0xbe73,0x0192 ,0xbed5,0x012e ,
+  0xbf38,0x00c9 ,0xbf9c,0x0065 };
+
+
+extern const int s_Q14R_8;
+const int s_Q14R_8 = 1024;
+extern const unsigned short t_Q14R_8[2032];
+const unsigned short t_Q14R_8[2032] = {
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
+  0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
+  0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
+  0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
+  0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
+  0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
+  0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x3fb1,0x0646 ,0x3fec,0x0324 ,0x3f4f,0x0964 ,
+  0x3ec5,0x0c7c ,0x3fb1,0x0646 ,0x3d3f,0x1294 ,
+  0x3d3f,0x1294 ,0x3f4f,0x0964 ,0x39db,0x1b5d ,
+  0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
+  0x3871,0x1e2b ,0x3e15,0x0f8d ,0x2f6c,0x2afb ,
+  0x3537,0x238e ,0x3d3f,0x1294 ,0x289a,0x3179 ,
+  0x3179,0x289a ,0x3c42,0x1590 ,0x20e7,0x36e5 ,
+  0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
+  0x289a,0x3179 ,0x39db,0x1b5d ,0x0f8d,0x3e15 ,
+  0x238e,0x3537 ,0x3871,0x1e2b ,0x0646,0x3fb1 ,
+  0x1e2b,0x3871 ,0x36e5,0x20e7 ,0xfcdc,0x3fec ,
+  0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
+  0x1294,0x3d3f ,0x3368,0x2620 ,0xea70,0x3c42 ,
+  0x0c7c,0x3ec5 ,0x3179,0x289a ,0xe1d5,0x3871 ,
+  0x0646,0x3fb1 ,0x2f6c,0x2afb ,0xd9e0,0x3368 ,
+  0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
+  0xf9ba,0x3fb1 ,0x2afb,0x2f6c ,0xcc98,0x2620 ,
+  0xf384,0x3ec5 ,0x289a,0x3179 ,0xc78f,0x1e2b ,
+  0xed6c,0x3d3f ,0x2620,0x3368 ,0xc3be,0x1590 ,
+  0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
+  0xe1d5,0x3871 ,0x20e7,0x36e5 ,0xc014,0x0324 ,
+  0xdc72,0x3537 ,0x1e2b,0x3871 ,0xc04f,0xf9ba ,
+  0xd766,0x3179 ,0x1b5d,0x39db ,0xc1eb,0xf073 ,
+  0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
+  0xce87,0x289a ,0x1590,0x3c42 ,0xc91b,0xdf19 ,
+  0xcac9,0x238e ,0x1294,0x3d3f ,0xce87,0xd766 ,
+  0xc78f,0x1e2b ,0x0f8d,0x3e15 ,0xd505,0xd094 ,
+  0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
+  0xc2c1,0x1294 ,0x0964,0x3f4f ,0xe4a3,0xc625 ,
+  0xc13b,0x0c7c ,0x0646,0x3fb1 ,0xed6c,0xc2c1 ,
+  0xc04f,0x0646 ,0x0324,0x3fec ,0xf69c,0xc0b1 ,
+  0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
+  0x3ffb,0x0192 ,0x3fff,0x00c9 ,0x3ff5,0x025b ,
+  0x3fec,0x0324 ,0x3ffb,0x0192 ,0x3fd4,0x04b5 ,
+  0x3fd4,0x04b5 ,0x3ff5,0x025b ,0x3f9c,0x070e ,
+  0x3fb1,0x0646 ,0x3fec,0x0324 ,0x3f4f,0x0964 ,
+  0x3f85,0x07d6 ,0x3fe1,0x03ed ,0x3eeb,0x0bb7 ,
+  0x3f4f,0x0964 ,0x3fd4,0x04b5 ,0x3e72,0x0e06 ,
+  0x3f0f,0x0af1 ,0x3fc4,0x057e ,0x3de3,0x1050 ,
+  0x3ec5,0x0c7c ,0x3fb1,0x0646 ,0x3d3f,0x1294 ,
+  0x3e72,0x0e06 ,0x3f9c,0x070e ,0x3c85,0x14d2 ,
+  0x3e15,0x0f8d ,0x3f85,0x07d6 ,0x3bb6,0x1709 ,
+  0x3daf,0x1112 ,0x3f6b,0x089d ,0x3ad3,0x1937 ,
+  0x3d3f,0x1294 ,0x3f4f,0x0964 ,0x39db,0x1b5d ,
+  0x3cc5,0x1413 ,0x3f30,0x0a2b ,0x38cf,0x1d79 ,
+  0x3c42,0x1590 ,0x3f0f,0x0af1 ,0x37b0,0x1f8c ,
+  0x3bb6,0x1709 ,0x3eeb,0x0bb7 ,0x367d,0x2193 ,
+  0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
+  0x3a82,0x19ef ,0x3e9d,0x0d41 ,0x33df,0x257e ,
+  0x39db,0x1b5d ,0x3e72,0x0e06 ,0x3274,0x2760 ,
+  0x392b,0x1cc6 ,0x3e45,0x0eca ,0x30f9,0x2935 ,
+  0x3871,0x1e2b ,0x3e15,0x0f8d ,0x2f6c,0x2afb ,
+  0x37b0,0x1f8c ,0x3de3,0x1050 ,0x2dcf,0x2cb2 ,
+  0x36e5,0x20e7 ,0x3daf,0x1112 ,0x2c21,0x2e5a ,
+  0x3612,0x223d ,0x3d78,0x11d3 ,0x2a65,0x2ff2 ,
+  0x3537,0x238e ,0x3d3f,0x1294 ,0x289a,0x3179 ,
+  0x3453,0x24da ,0x3d03,0x1354 ,0x26c1,0x32ef ,
+  0x3368,0x2620 ,0x3cc5,0x1413 ,0x24da,0x3453 ,
+  0x3274,0x2760 ,0x3c85,0x14d2 ,0x22e7,0x35a5 ,
+  0x3179,0x289a ,0x3c42,0x1590 ,0x20e7,0x36e5 ,
+  0x3076,0x29ce ,0x3bfd,0x164c ,0x1edc,0x3812 ,
+  0x2f6c,0x2afb ,0x3bb6,0x1709 ,0x1cc6,0x392b ,
+  0x2e5a,0x2c21 ,0x3b6d,0x17c4 ,0x1aa7,0x3a30 ,
+  0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
+  0x2c21,0x2e5a ,0x3ad3,0x1937 ,0x164c,0x3bfd ,
+  0x2afb,0x2f6c ,0x3a82,0x19ef ,0x1413,0x3cc5 ,
+  0x29ce,0x3076 ,0x3a30,0x1aa7 ,0x11d3,0x3d78 ,
+  0x289a,0x3179 ,0x39db,0x1b5d ,0x0f8d,0x3e15 ,
+  0x2760,0x3274 ,0x3984,0x1c12 ,0x0d41,0x3e9d ,
+  0x2620,0x3368 ,0x392b,0x1cc6 ,0x0af1,0x3f0f ,
+  0x24da,0x3453 ,0x38cf,0x1d79 ,0x089d,0x3f6b ,
+  0x238e,0x3537 ,0x3871,0x1e2b ,0x0646,0x3fb1 ,
+  0x223d,0x3612 ,0x3812,0x1edc ,0x03ed,0x3fe1 ,
+  0x20e7,0x36e5 ,0x37b0,0x1f8c ,0x0192,0x3ffb ,
+  0x1f8c,0x37b0 ,0x374b,0x203a ,0xff37,0x3fff ,
+  0x1e2b,0x3871 ,0x36e5,0x20e7 ,0xfcdc,0x3fec ,
+  0x1cc6,0x392b ,0x367d,0x2193 ,0xfa82,0x3fc4 ,
+  0x1b5d,0x39db ,0x3612,0x223d ,0xf82a,0x3f85 ,
+  0x19ef,0x3a82 ,0x35a5,0x22e7 ,0xf5d5,0x3f30 ,
+  0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
+  0x1709,0x3bb6 ,0x34c6,0x2435 ,0xf136,0x3e45 ,
+  0x1590,0x3c42 ,0x3453,0x24da ,0xeeee,0x3daf ,
+  0x1413,0x3cc5 ,0x33df,0x257e ,0xecac,0x3d03 ,
+  0x1294,0x3d3f ,0x3368,0x2620 ,0xea70,0x3c42 ,
+  0x1112,0x3daf ,0x32ef,0x26c1 ,0xe83c,0x3b6d ,
+  0x0f8d,0x3e15 ,0x3274,0x2760 ,0xe611,0x3a82 ,
+  0x0e06,0x3e72 ,0x31f8,0x27fe ,0xe3ee,0x3984 ,
+  0x0c7c,0x3ec5 ,0x3179,0x289a ,0xe1d5,0x3871 ,
+  0x0af1,0x3f0f ,0x30f9,0x2935 ,0xdfc6,0x374b ,
+  0x0964,0x3f4f ,0x3076,0x29ce ,0xddc3,0x3612 ,
+  0x07d6,0x3f85 ,0x2ff2,0x2a65 ,0xdbcb,0x34c6 ,
+  0x0646,0x3fb1 ,0x2f6c,0x2afb ,0xd9e0,0x3368 ,
+  0x04b5,0x3fd4 ,0x2ee4,0x2b8f ,0xd802,0x31f8 ,
+  0x0324,0x3fec ,0x2e5a,0x2c21 ,0xd632,0x3076 ,
+  0x0192,0x3ffb ,0x2dcf,0x2cb2 ,0xd471,0x2ee4 ,
+  0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
+  0xfe6e,0x3ffb ,0x2cb2,0x2dcf ,0xd11c,0x2b8f ,
+  0xfcdc,0x3fec ,0x2c21,0x2e5a ,0xcf8a,0x29ce ,
+  0xfb4b,0x3fd4 ,0x2b8f,0x2ee4 ,0xce08,0x27fe ,
+  0xf9ba,0x3fb1 ,0x2afb,0x2f6c ,0xcc98,0x2620 ,
+  0xf82a,0x3f85 ,0x2a65,0x2ff2 ,0xcb3a,0x2435 ,
+  0xf69c,0x3f4f ,0x29ce,0x3076 ,0xc9ee,0x223d ,
+  0xf50f,0x3f0f ,0x2935,0x30f9 ,0xc8b5,0x203a ,
+  0xf384,0x3ec5 ,0x289a,0x3179 ,0xc78f,0x1e2b ,
+  0xf1fa,0x3e72 ,0x27fe,0x31f8 ,0xc67c,0x1c12 ,
+  0xf073,0x3e15 ,0x2760,0x3274 ,0xc57e,0x19ef ,
+  0xeeee,0x3daf ,0x26c1,0x32ef ,0xc493,0x17c4 ,
+  0xed6c,0x3d3f ,0x2620,0x3368 ,0xc3be,0x1590 ,
+  0xebed,0x3cc5 ,0x257e,0x33df ,0xc2fd,0x1354 ,
+  0xea70,0x3c42 ,0x24da,0x3453 ,0xc251,0x1112 ,
+  0xe8f7,0x3bb6 ,0x2435,0x34c6 ,0xc1bb,0x0eca ,
+  0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
+  0xe611,0x3a82 ,0x22e7,0x35a5 ,0xc0d0,0x0a2b ,
+  0xe4a3,0x39db ,0x223d,0x3612 ,0xc07b,0x07d6 ,
+  0xe33a,0x392b ,0x2193,0x367d ,0xc03c,0x057e ,
+  0xe1d5,0x3871 ,0x20e7,0x36e5 ,0xc014,0x0324 ,
+  0xe074,0x37b0 ,0x203a,0x374b ,0xc001,0x00c9 ,
+  0xdf19,0x36e5 ,0x1f8c,0x37b0 ,0xc005,0xfe6e ,
+  0xddc3,0x3612 ,0x1edc,0x3812 ,0xc01f,0xfc13 ,
+  0xdc72,0x3537 ,0x1e2b,0x3871 ,0xc04f,0xf9ba ,
+  0xdb26,0x3453 ,0x1d79,0x38cf ,0xc095,0xf763 ,
+  0xd9e0,0x3368 ,0x1cc6,0x392b ,0xc0f1,0xf50f ,
+  0xd8a0,0x3274 ,0x1c12,0x3984 ,0xc163,0xf2bf ,
+  0xd766,0x3179 ,0x1b5d,0x39db ,0xc1eb,0xf073 ,
+  0xd632,0x3076 ,0x1aa7,0x3a30 ,0xc288,0xee2d ,
+  0xd505,0x2f6c ,0x19ef,0x3a82 ,0xc33b,0xebed ,
+  0xd3df,0x2e5a ,0x1937,0x3ad3 ,0xc403,0xe9b4 ,
+  0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
+  0xd1a6,0x2c21 ,0x17c4,0x3b6d ,0xc5d0,0xe559 ,
+  0xd094,0x2afb ,0x1709,0x3bb6 ,0xc6d5,0xe33a ,
+  0xcf8a,0x29ce ,0x164c,0x3bfd ,0xc7ee,0xe124 ,
+  0xce87,0x289a ,0x1590,0x3c42 ,0xc91b,0xdf19 ,
+  0xcd8c,0x2760 ,0x14d2,0x3c85 ,0xca5b,0xdd19 ,
+  0xcc98,0x2620 ,0x1413,0x3cc5 ,0xcbad,0xdb26 ,
+  0xcbad,0x24da ,0x1354,0x3d03 ,0xcd11,0xd93f ,
+  0xcac9,0x238e ,0x1294,0x3d3f ,0xce87,0xd766 ,
+  0xc9ee,0x223d ,0x11d3,0x3d78 ,0xd00e,0xd59b ,
+  0xc91b,0x20e7 ,0x1112,0x3daf ,0xd1a6,0xd3df ,
+  0xc850,0x1f8c ,0x1050,0x3de3 ,0xd34e,0xd231 ,
+  0xc78f,0x1e2b ,0x0f8d,0x3e15 ,0xd505,0xd094 ,
+  0xc6d5,0x1cc6 ,0x0eca,0x3e45 ,0xd6cb,0xcf07 ,
+  0xc625,0x1b5d ,0x0e06,0x3e72 ,0xd8a0,0xcd8c ,
+  0xc57e,0x19ef ,0x0d41,0x3e9d ,0xda82,0xcc21 ,
+  0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
+  0xc44a,0x1709 ,0x0bb7,0x3eeb ,0xde6d,0xc983 ,
+  0xc3be,0x1590 ,0x0af1,0x3f0f ,0xe074,0xc850 ,
+  0xc33b,0x1413 ,0x0a2b,0x3f30 ,0xe287,0xc731 ,
+  0xc2c1,0x1294 ,0x0964,0x3f4f ,0xe4a3,0xc625 ,
+  0xc251,0x1112 ,0x089d,0x3f6b ,0xe6c9,0xc52d ,
+  0xc1eb,0x0f8d ,0x07d6,0x3f85 ,0xe8f7,0xc44a ,
+  0xc18e,0x0e06 ,0x070e,0x3f9c ,0xeb2e,0xc37b ,
+  0xc13b,0x0c7c ,0x0646,0x3fb1 ,0xed6c,0xc2c1 ,
+  0xc0f1,0x0af1 ,0x057e,0x3fc4 ,0xefb0,0xc21d ,
+  0xc0b1,0x0964 ,0x04b5,0x3fd4 ,0xf1fa,0xc18e ,
+  0xc07b,0x07d6 ,0x03ed,0x3fe1 ,0xf449,0xc115 ,
+  0xc04f,0x0646 ,0x0324,0x3fec ,0xf69c,0xc0b1 ,
+  0xc02c,0x04b5 ,0x025b,0x3ff5 ,0xf8f2,0xc064 ,
+  0xc014,0x0324 ,0x0192,0x3ffb ,0xfb4b,0xc02c ,
+  0xc005,0x0192 ,0x00c9,0x3fff ,0xfda5,0xc00b ,
+  0x4000,0x0000 ,0x4000,0x0065 ,0x3fff,0x00c9 ,
+  0x3ffd,0x012e ,0x3ffb,0x0192 ,0x3ff8,0x01f7 ,
+  0x3ff5,0x025b ,0x3ff1,0x02c0 ,0x3fec,0x0324 ,
+  0x3fe7,0x0388 ,0x3fe1,0x03ed ,0x3fdb,0x0451 ,
+  0x3fd4,0x04b5 ,0x3fcc,0x051a ,0x3fc4,0x057e ,
+  0x3fbb,0x05e2 ,0x3fb1,0x0646 ,0x3fa7,0x06aa ,
+  0x3f9c,0x070e ,0x3f91,0x0772 ,0x3f85,0x07d6 ,
+  0x3f78,0x0839 ,0x3f6b,0x089d ,0x3f5d,0x0901 ,
+  0x3f4f,0x0964 ,0x3f40,0x09c7 ,0x3f30,0x0a2b ,
+  0x3f20,0x0a8e ,0x3f0f,0x0af1 ,0x3efd,0x0b54 ,
+  0x3eeb,0x0bb7 ,0x3ed8,0x0c1a ,0x3ec5,0x0c7c ,
+  0x3eb1,0x0cdf ,0x3e9d,0x0d41 ,0x3e88,0x0da4 ,
+  0x3e72,0x0e06 ,0x3e5c,0x0e68 ,0x3e45,0x0eca ,
+  0x3e2d,0x0f2b ,0x3e15,0x0f8d ,0x3dfc,0x0fee ,
+  0x3de3,0x1050 ,0x3dc9,0x10b1 ,0x3daf,0x1112 ,
+  0x3d93,0x1173 ,0x3d78,0x11d3 ,0x3d5b,0x1234 ,
+  0x3d3f,0x1294 ,0x3d21,0x12f4 ,0x3d03,0x1354 ,
+  0x3ce4,0x13b4 ,0x3cc5,0x1413 ,0x3ca5,0x1473 ,
+  0x3c85,0x14d2 ,0x3c64,0x1531 ,0x3c42,0x1590 ,
+  0x3c20,0x15ee ,0x3bfd,0x164c ,0x3bda,0x16ab ,
+  0x3bb6,0x1709 ,0x3b92,0x1766 ,0x3b6d,0x17c4 ,
+  0x3b47,0x1821 ,0x3b21,0x187e ,0x3afa,0x18db ,
+  0x3ad3,0x1937 ,0x3aab,0x1993 ,0x3a82,0x19ef ,
+  0x3a59,0x1a4b ,0x3a30,0x1aa7 ,0x3a06,0x1b02 ,
+  0x39db,0x1b5d ,0x39b0,0x1bb8 ,0x3984,0x1c12 ,
+  0x3958,0x1c6c ,0x392b,0x1cc6 ,0x38fd,0x1d20 ,
+  0x38cf,0x1d79 ,0x38a1,0x1dd3 ,0x3871,0x1e2b ,
+  0x3842,0x1e84 ,0x3812,0x1edc ,0x37e1,0x1f34 ,
+  0x37b0,0x1f8c ,0x377e,0x1fe3 ,0x374b,0x203a ,
+  0x3718,0x2091 ,0x36e5,0x20e7 ,0x36b1,0x213d ,
+  0x367d,0x2193 ,0x3648,0x21e8 ,0x3612,0x223d ,
+  0x35dc,0x2292 ,0x35a5,0x22e7 ,0x356e,0x233b ,
+  0x3537,0x238e ,0x34ff,0x23e2 ,0x34c6,0x2435 ,
+  0x348d,0x2488 ,0x3453,0x24da ,0x3419,0x252c ,
+  0x33df,0x257e ,0x33a3,0x25cf ,0x3368,0x2620 ,
+  0x332c,0x2671 ,0x32ef,0x26c1 ,0x32b2,0x2711 ,
+  0x3274,0x2760 ,0x3236,0x27af ,0x31f8,0x27fe ,
+  0x31b9,0x284c ,0x3179,0x289a ,0x3139,0x28e7 ,
+  0x30f9,0x2935 ,0x30b8,0x2981 ,0x3076,0x29ce ,
+  0x3034,0x2a1a ,0x2ff2,0x2a65 ,0x2faf,0x2ab0 ,
+  0x2f6c,0x2afb ,0x2f28,0x2b45 ,0x2ee4,0x2b8f ,
+  0x2e9f,0x2bd8 ,0x2e5a,0x2c21 ,0x2e15,0x2c6a ,
+  0x2dcf,0x2cb2 ,0x2d88,0x2cfa ,0x2d41,0x2d41 ,
+  0x2cfa,0x2d88 ,0x2cb2,0x2dcf ,0x2c6a,0x2e15 ,
+  0x2c21,0x2e5a ,0x2bd8,0x2e9f ,0x2b8f,0x2ee4 ,
+  0x2b45,0x2f28 ,0x2afb,0x2f6c ,0x2ab0,0x2faf ,
+  0x2a65,0x2ff2 ,0x2a1a,0x3034 ,0x29ce,0x3076 ,
+  0x2981,0x30b8 ,0x2935,0x30f9 ,0x28e7,0x3139 ,
+  0x289a,0x3179 ,0x284c,0x31b9 ,0x27fe,0x31f8 ,
+  0x27af,0x3236 ,0x2760,0x3274 ,0x2711,0x32b2 ,
+  0x26c1,0x32ef ,0x2671,0x332c ,0x2620,0x3368 ,
+  0x25cf,0x33a3 ,0x257e,0x33df ,0x252c,0x3419 ,
+  0x24da,0x3453 ,0x2488,0x348d ,0x2435,0x34c6 ,
+  0x23e2,0x34ff ,0x238e,0x3537 ,0x233b,0x356e ,
+  0x22e7,0x35a5 ,0x2292,0x35dc ,0x223d,0x3612 ,
+  0x21e8,0x3648 ,0x2193,0x367d ,0x213d,0x36b1 ,
+  0x20e7,0x36e5 ,0x2091,0x3718 ,0x203a,0x374b ,
+  0x1fe3,0x377e ,0x1f8c,0x37b0 ,0x1f34,0x37e1 ,
+  0x1edc,0x3812 ,0x1e84,0x3842 ,0x1e2b,0x3871 ,
+  0x1dd3,0x38a1 ,0x1d79,0x38cf ,0x1d20,0x38fd ,
+  0x1cc6,0x392b ,0x1c6c,0x3958 ,0x1c12,0x3984 ,
+  0x1bb8,0x39b0 ,0x1b5d,0x39db ,0x1b02,0x3a06 ,
+  0x1aa7,0x3a30 ,0x1a4b,0x3a59 ,0x19ef,0x3a82 ,
+  0x1993,0x3aab ,0x1937,0x3ad3 ,0x18db,0x3afa ,
+  0x187e,0x3b21 ,0x1821,0x3b47 ,0x17c4,0x3b6d ,
+  0x1766,0x3b92 ,0x1709,0x3bb6 ,0x16ab,0x3bda ,
+  0x164c,0x3bfd ,0x15ee,0x3c20 ,0x1590,0x3c42 ,
+  0x1531,0x3c64 ,0x14d2,0x3c85 ,0x1473,0x3ca5 ,
+  0x1413,0x3cc5 ,0x13b4,0x3ce4 ,0x1354,0x3d03 ,
+  0x12f4,0x3d21 ,0x1294,0x3d3f ,0x1234,0x3d5b ,
+  0x11d3,0x3d78 ,0x1173,0x3d93 ,0x1112,0x3daf ,
+  0x10b1,0x3dc9 ,0x1050,0x3de3 ,0x0fee,0x3dfc ,
+  0x0f8d,0x3e15 ,0x0f2b,0x3e2d ,0x0eca,0x3e45 ,
+  0x0e68,0x3e5c ,0x0e06,0x3e72 ,0x0da4,0x3e88 ,
+  0x0d41,0x3e9d ,0x0cdf,0x3eb1 ,0x0c7c,0x3ec5 ,
+  0x0c1a,0x3ed8 ,0x0bb7,0x3eeb ,0x0b54,0x3efd ,
+  0x0af1,0x3f0f ,0x0a8e,0x3f20 ,0x0a2b,0x3f30 ,
+  0x09c7,0x3f40 ,0x0964,0x3f4f ,0x0901,0x3f5d ,
+  0x089d,0x3f6b ,0x0839,0x3f78 ,0x07d6,0x3f85 ,
+  0x0772,0x3f91 ,0x070e,0x3f9c ,0x06aa,0x3fa7 ,
+  0x0646,0x3fb1 ,0x05e2,0x3fbb ,0x057e,0x3fc4 ,
+  0x051a,0x3fcc ,0x04b5,0x3fd4 ,0x0451,0x3fdb ,
+  0x03ed,0x3fe1 ,0x0388,0x3fe7 ,0x0324,0x3fec ,
+  0x02c0,0x3ff1 ,0x025b,0x3ff5 ,0x01f7,0x3ff8 ,
+  0x0192,0x3ffb ,0x012e,0x3ffd ,0x00c9,0x3fff ,
+  0x0065,0x4000 ,0x0000,0x4000 ,0xff9b,0x4000 ,
+  0xff37,0x3fff ,0xfed2,0x3ffd ,0xfe6e,0x3ffb ,
+  0xfe09,0x3ff8 ,0xfda5,0x3ff5 ,0xfd40,0x3ff1 ,
+  0xfcdc,0x3fec ,0xfc78,0x3fe7 ,0xfc13,0x3fe1 ,
+  0xfbaf,0x3fdb ,0xfb4b,0x3fd4 ,0xfae6,0x3fcc ,
+  0xfa82,0x3fc4 ,0xfa1e,0x3fbb ,0xf9ba,0x3fb1 ,
+  0xf956,0x3fa7 ,0xf8f2,0x3f9c ,0xf88e,0x3f91 ,
+  0xf82a,0x3f85 ,0xf7c7,0x3f78 ,0xf763,0x3f6b ,
+  0xf6ff,0x3f5d ,0xf69c,0x3f4f ,0xf639,0x3f40 ,
+  0xf5d5,0x3f30 ,0xf572,0x3f20 ,0xf50f,0x3f0f ,
+  0xf4ac,0x3efd ,0xf449,0x3eeb ,0xf3e6,0x3ed8 ,
+  0xf384,0x3ec5 ,0xf321,0x3eb1 ,0xf2bf,0x3e9d ,
+  0xf25c,0x3e88 ,0xf1fa,0x3e72 ,0xf198,0x3e5c ,
+  0xf136,0x3e45 ,0xf0d5,0x3e2d ,0xf073,0x3e15 ,
+  0xf012,0x3dfc ,0xefb0,0x3de3 ,0xef4f,0x3dc9 ,
+  0xeeee,0x3daf ,0xee8d,0x3d93 ,0xee2d,0x3d78 ,
+  0xedcc,0x3d5b ,0xed6c,0x3d3f ,0xed0c,0x3d21 ,
+  0xecac,0x3d03 ,0xec4c,0x3ce4 ,0xebed,0x3cc5 ,
+  0xeb8d,0x3ca5 ,0xeb2e,0x3c85 ,0xeacf,0x3c64 ,
+  0xea70,0x3c42 ,0xea12,0x3c20 ,0xe9b4,0x3bfd ,
+  0xe955,0x3bda ,0xe8f7,0x3bb6 ,0xe89a,0x3b92 ,
+  0xe83c,0x3b6d ,0xe7df,0x3b47 ,0xe782,0x3b21 ,
+  0xe725,0x3afa ,0xe6c9,0x3ad3 ,0xe66d,0x3aab ,
+  0xe611,0x3a82 ,0xe5b5,0x3a59 ,0xe559,0x3a30 ,
+  0xe4fe,0x3a06 ,0xe4a3,0x39db ,0xe448,0x39b0 ,
+  0xe3ee,0x3984 ,0xe394,0x3958 ,0xe33a,0x392b ,
+  0xe2e0,0x38fd ,0xe287,0x38cf ,0xe22d,0x38a1 ,
+  0xe1d5,0x3871 ,0xe17c,0x3842 ,0xe124,0x3812 ,
+  0xe0cc,0x37e1 ,0xe074,0x37b0 ,0xe01d,0x377e ,
+  0xdfc6,0x374b ,0xdf6f,0x3718 ,0xdf19,0x36e5 ,
+  0xdec3,0x36b1 ,0xde6d,0x367d ,0xde18,0x3648 ,
+  0xddc3,0x3612 ,0xdd6e,0x35dc ,0xdd19,0x35a5 ,
+  0xdcc5,0x356e ,0xdc72,0x3537 ,0xdc1e,0x34ff ,
+  0xdbcb,0x34c6 ,0xdb78,0x348d ,0xdb26,0x3453 ,
+  0xdad4,0x3419 ,0xda82,0x33df ,0xda31,0x33a3 ,
+  0xd9e0,0x3368 ,0xd98f,0x332c ,0xd93f,0x32ef ,
+  0xd8ef,0x32b2 ,0xd8a0,0x3274 ,0xd851,0x3236 ,
+  0xd802,0x31f8 ,0xd7b4,0x31b9 ,0xd766,0x3179 ,
+  0xd719,0x3139 ,0xd6cb,0x30f9 ,0xd67f,0x30b8 ,
+  0xd632,0x3076 ,0xd5e6,0x3034 ,0xd59b,0x2ff2 ,
+  0xd550,0x2faf ,0xd505,0x2f6c ,0xd4bb,0x2f28 ,
+  0xd471,0x2ee4 ,0xd428,0x2e9f ,0xd3df,0x2e5a ,
+  0xd396,0x2e15 ,0xd34e,0x2dcf ,0xd306,0x2d88 ,
+  0xd2bf,0x2d41 ,0xd278,0x2cfa ,0xd231,0x2cb2 ,
+  0xd1eb,0x2c6a ,0xd1a6,0x2c21 ,0xd161,0x2bd8 ,
+  0xd11c,0x2b8f ,0xd0d8,0x2b45 ,0xd094,0x2afb ,
+  0xd051,0x2ab0 ,0xd00e,0x2a65 ,0xcfcc,0x2a1a ,
+  0xcf8a,0x29ce ,0xcf48,0x2981 ,0xcf07,0x2935 ,
+  0xcec7,0x28e7 ,0xce87,0x289a ,0xce47,0x284c ,
+  0xce08,0x27fe ,0xcdca,0x27af ,0xcd8c,0x2760 ,
+  0xcd4e,0x2711 ,0xcd11,0x26c1 ,0xccd4,0x2671 ,
+  0xcc98,0x2620 ,0xcc5d,0x25cf ,0xcc21,0x257e ,
+  0xcbe7,0x252c ,0xcbad,0x24da ,0xcb73,0x2488 ,
+  0xcb3a,0x2435 ,0xcb01,0x23e2 ,0xcac9,0x238e ,
+  0xca92,0x233b ,0xca5b,0x22e7 ,0xca24,0x2292 ,
+  0xc9ee,0x223d ,0xc9b8,0x21e8 ,0xc983,0x2193 ,
+  0xc94f,0x213d ,0xc91b,0x20e7 ,0xc8e8,0x2091 ,
+  0xc8b5,0x203a ,0xc882,0x1fe3 ,0xc850,0x1f8c ,
+  0xc81f,0x1f34 ,0xc7ee,0x1edc ,0xc7be,0x1e84 ,
+  0xc78f,0x1e2b ,0xc75f,0x1dd3 ,0xc731,0x1d79 ,
+  0xc703,0x1d20 ,0xc6d5,0x1cc6 ,0xc6a8,0x1c6c ,
+  0xc67c,0x1c12 ,0xc650,0x1bb8 ,0xc625,0x1b5d ,
+  0xc5fa,0x1b02 ,0xc5d0,0x1aa7 ,0xc5a7,0x1a4b ,
+  0xc57e,0x19ef ,0xc555,0x1993 ,0xc52d,0x1937 ,
+  0xc506,0x18db ,0xc4df,0x187e ,0xc4b9,0x1821 ,
+  0xc493,0x17c4 ,0xc46e,0x1766 ,0xc44a,0x1709 ,
+  0xc426,0x16ab ,0xc403,0x164c ,0xc3e0,0x15ee ,
+  0xc3be,0x1590 ,0xc39c,0x1531 ,0xc37b,0x14d2 ,
+  0xc35b,0x1473 ,0xc33b,0x1413 ,0xc31c,0x13b4 ,
+  0xc2fd,0x1354 ,0xc2df,0x12f4 ,0xc2c1,0x1294 ,
+  0xc2a5,0x1234 ,0xc288,0x11d3 ,0xc26d,0x1173 ,
+  0xc251,0x1112 ,0xc237,0x10b1 ,0xc21d,0x1050 ,
+  0xc204,0x0fee ,0xc1eb,0x0f8d ,0xc1d3,0x0f2b ,
+  0xc1bb,0x0eca ,0xc1a4,0x0e68 ,0xc18e,0x0e06 ,
+  0xc178,0x0da4 ,0xc163,0x0d41 ,0xc14f,0x0cdf ,
+  0xc13b,0x0c7c ,0xc128,0x0c1a ,0xc115,0x0bb7 ,
+  0xc103,0x0b54 ,0xc0f1,0x0af1 ,0xc0e0,0x0a8e ,
+  0xc0d0,0x0a2b ,0xc0c0,0x09c7 ,0xc0b1,0x0964 ,
+  0xc0a3,0x0901 ,0xc095,0x089d ,0xc088,0x0839 ,
+  0xc07b,0x07d6 ,0xc06f,0x0772 ,0xc064,0x070e ,
+  0xc059,0x06aa ,0xc04f,0x0646 ,0xc045,0x05e2 ,
+  0xc03c,0x057e ,0xc034,0x051a ,0xc02c,0x04b5 ,
+  0xc025,0x0451 ,0xc01f,0x03ed ,0xc019,0x0388 ,
+  0xc014,0x0324 ,0xc00f,0x02c0 ,0xc00b,0x025b ,
+  0xc008,0x01f7 ,0xc005,0x0192 ,0xc003,0x012e ,
+  0xc001,0x00c9 ,0xc000,0x0065 };
diff --git a/trunk/src/common_audio/signal_processing/webrtc_fft_t_rad.c b/trunk/src/common_audio/signal_processing/webrtc_fft_t_rad.c
new file mode 100644
index 0000000..13fbd9f
--- /dev/null
+++ b/trunk/src/common_audio/signal_processing/webrtc_fft_t_rad.c
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file contains the Q14 radix-2 tables used in ARM9E optimization routines.
+ *
+ */
+
+extern const unsigned short t_Q14S_rad8[2];
+const unsigned short t_Q14S_rad8[2] = {  0x0000,0x2d41 };
+
+//extern const int t_Q30S_rad8[2];
+//const int t_Q30S_rad8[2] = {  0x00000000,0x2d413ccd };
+
+extern const unsigned short t_Q14R_rad8[2];
+const unsigned short t_Q14R_rad8[2] = {  0x2d41,0x2d41 };
+
+//extern const int t_Q30R_rad8[2];
+//const int t_Q30R_rad8[2] = {  0x2d413ccd,0x2d413ccd };
diff --git a/trunk/src/common_audio/vad/Android.mk b/trunk/src/common_audio/vad/Android.mk
new file mode 100644
index 0000000..b7be3f0
--- /dev/null
+++ b/trunk/src/common_audio/vad/Android.mk
@@ -0,0 +1,50 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_vad
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    webrtc_vad.c \
+    vad_core.c \
+    vad_filterbank.c \
+    vad_gmm.c \
+    vad_sp.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libdl \
+    libstlport
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+LOCAL_LDLIBS += -ldl -lpthread
+endif
+
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/common_audio/vad/include/webrtc_vad.h b/trunk/src/common_audio/vad/include/webrtc_vad.h
new file mode 100644
index 0000000..3f9e402
--- /dev/null
+++ b/trunk/src/common_audio/vad/include/webrtc_vad.h
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file includes the VAD API calls. Specific function calls are given below.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_
+#define WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_
+
+#include "typedefs.h"
+
+typedef struct WebRtcVadInst VadInst;
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+/****************************************************************************
+ * WebRtcVad_AssignSize(...) 
+ *
+ * This functions get the size needed for storing the instance for encoder
+ * and decoder, respectively
+ *
+ * Input/Output:
+ *      - size_in_bytes : Pointer to integer where the size is returned
+ *
+ * Return value         : 0
+ */
+WebRtc_Word16 WebRtcVad_AssignSize(int *size_in_bytes);
+
+/****************************************************************************
+ * WebRtcVad_Assign(...) 
+ *
+ * This functions Assigns memory for the instances.
+ *
+ * Input:
+ *        - vad_inst_addr :  Address to where to assign memory
+ * Output:
+ *        - vad_inst      :  Pointer to the instance that should be created
+ *
+ * Return value           :  0 - Ok
+ *                          -1 - Error
+ */
+WebRtc_Word16 WebRtcVad_Assign(VadInst **vad_inst, void *vad_inst_addr);
+
+// Creates an instance to the VAD structure.
+//
+// - handle [o] : Pointer to the VAD instance that should be created.
+//
+// returns      : 0 - (OK), -1 - (Error)
+int WebRtcVad_Create(VadInst** handle);
+
+// Frees the dynamic memory of a specified VAD instance.
+//
+// - handle [i] : Pointer to VAD instance that should be freed.
+//
+// returns      : 0 - (OK), -1 - (NULL pointer in)
+int WebRtcVad_Free(VadInst* handle);
+
+/****************************************************************************
+ * WebRtcVad_Init(...)
+ *
+ * This function initializes a VAD instance
+ *
+ * Input:
+ *      - vad_inst      : Instance that should be initialized
+ *
+ * Output:
+ *      - vad_inst      : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcVad_Init(VadInst *vad_inst);
+
+/****************************************************************************
+ * WebRtcVad_set_mode(...)
+ *
+ * This function initializes a VAD instance
+ *
+ * Input:
+ *      - vad_inst      : VAD instance
+ *      - mode          : Aggressiveness setting (0, 1, 2, or 3) 
+ *
+ * Output:
+ *      - vad_inst      : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcVad_set_mode(VadInst *vad_inst, int mode);
+
+/****************************************************************************
+ * WebRtcVad_Process(...)
+ * 
+ * This functions does a VAD for the inserted speech frame
+ *
+ * Input
+ *        - vad_inst     : VAD Instance. Needs to be initiated before call.
+ *        - fs           : sampling frequency (Hz): 8000, 16000, or 32000
+ *        - speech_frame : Pointer to speech frame buffer
+ *        - frame_length : Length of speech frame buffer in number of samples
+ *
+ * Output:
+ *        - vad_inst     : Updated VAD instance
+ *
+ * Return value          :  1 - Active Voice
+ *                          0 - Non-active Voice
+ *                         -1 - Error
+ */
+WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 *speech_frame,
+                                WebRtc_Word16 frame_length);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_
diff --git a/trunk/src/common_audio/vad/vad.gypi b/trunk/src/common_audio/vad/vad.gypi
new file mode 100644
index 0000000..f15e651
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad.gypi
@@ -0,0 +1,69 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'vad',
+      'type': '<(library)',
+      'dependencies': [
+        'signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/webrtc_vad.h',
+        'webrtc_vad.c',
+        'vad_core.c',
+        'vad_core.h',
+        'vad_defines.h',
+        'vad_filterbank.c',
+        'vad_filterbank.h',
+        'vad_gmm.c',
+        'vad_gmm.h',
+        'vad_sp.c',
+        'vad_sp.h',
+      ],
+    },
+  ], # targets
+   'conditions': [
+    ['build_with_chromium==0', {
+      'targets' : [
+        {
+          'target_name': 'vad_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'vad',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'vad_core_unittest.cc',
+            'vad_filterbank_unittest.cc',
+            'vad_gmm_unittest.cc',
+            'vad_sp_unittest.cc',
+            'vad_unittest.cc',
+            'vad_unittest.h',
+          ],
+        }, # vad_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/common_audio/vad/vad_core.c b/trunk/src/common_audio/vad/vad_core.c
new file mode 100644
index 0000000..2c6e6a9
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_core.c
@@ -0,0 +1,663 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vad_core.h"
+
+#include "signal_processing_library.h"
+#include "typedefs.h"
+#include "vad_defines.h"
+#include "vad_filterbank.h"
+#include "vad_gmm.h"
+#include "vad_sp.h"
+
+// Spectrum Weighting
+static const WebRtc_Word16 kSpectrumWeight[6] = { 6, 8, 10, 12, 14, 16 };
+static const WebRtc_Word16 kNoiseUpdateConst = 655; // Q15
+static const WebRtc_Word16 kSpeechUpdateConst = 6554; // Q15
+static const WebRtc_Word16 kBackEta = 154; // Q8
+// Minimum difference between the two models, Q5
+static const WebRtc_Word16 kMinimumDifference[6] = {
+    544, 544, 576, 576, 576, 576 };
+// Upper limit of mean value for speech model, Q7
+static const WebRtc_Word16 kMaximumSpeech[6] = {
+    11392, 11392, 11520, 11520, 11520, 11520 };
+// Minimum value for mean value
+static const WebRtc_Word16 kMinimumMean[2] = { 640, 768 };
+// Upper limit of mean value for noise model, Q7
+static const WebRtc_Word16 kMaximumNoise[6] = {
+    9216, 9088, 8960, 8832, 8704, 8576 };
+// Start values for the Gaussian models, Q7
+// Weights for the two Gaussians for the six channels (noise)
+static const WebRtc_Word16 kNoiseDataWeights[12] = {
+    34, 62, 72, 66, 53, 25, 94, 66, 56, 62, 75, 103 };
+// Weights for the two Gaussians for the six channels (speech)
+static const WebRtc_Word16 kSpeechDataWeights[12] = {
+    48, 82, 45, 87, 50, 47, 80, 46, 83, 41, 78, 81 };
+// Means for the two Gaussians for the six channels (noise)
+static const WebRtc_Word16 kNoiseDataMeans[12] = {
+    6738, 4892, 7065, 6715, 6771, 3369, 7646, 3863, 7820, 7266, 5020, 4362 };
+// Means for the two Gaussians for the six channels (speech)
+static const WebRtc_Word16 kSpeechDataMeans[12] = {
+    8306, 10085, 10078, 11823, 11843, 6309, 9473, 9571, 10879, 7581, 8180, 7483
+};
+// Stds for the two Gaussians for the six channels (noise)
+static const WebRtc_Word16 kNoiseDataStds[12] = {
+    378, 1064, 493, 582, 688, 593, 474, 697, 475, 688, 421, 455 };
+// Stds for the two Gaussians for the six channels (speech)
+static const WebRtc_Word16 kSpeechDataStds[12] = {
+    555, 505, 567, 524, 585, 1231, 509, 828, 492, 1540, 1079, 850 };
+
+// Constants used in GmmProbability().
+//
+// Maximum number of counted speech (VAD = 1) frames in a row.
+static const int16_t kMaxSpeechFrames = 6;
+// Minimum standard deviation for both speech and noise.
+static const int16_t kMinStd = 384;
+
+// Constants in WebRtcVad_InitCore().
+// Default aggressiveness mode.
+static const short kDefaultMode = 0;
+static const int kInitCheck = 42;
+
+// Calculates the probabilities for both speech and background noise using
+// Gaussian Mixture Models. A hypothesis-test is performed to decide which type
+// of signal is most probable.
+//
+// - inst           [i/o] : Pointer to VAD instance
+// - feature_vector [i]   : Feature vector = log10(energy in frequency band)
+// - total_power    [i]   : Total power in audio frame.
+// - frame_length   [i]   : Number of input samples
+//
+// - returns              : the VAD decision (0 - noise, 1 - speech).
+static int16_t GmmProbability(VadInstT *inst, WebRtc_Word16 *feature_vector,
+                              WebRtc_Word16 total_power, int frame_length)
+{
+    int n, k;
+    WebRtc_Word16 backval;
+    WebRtc_Word16 h0, h1;
+    WebRtc_Word16 ratvec, xval;
+    WebRtc_Word16 vadflag;
+    WebRtc_Word16 shifts0, shifts1;
+    WebRtc_Word16 tmp16, tmp16_1, tmp16_2;
+    WebRtc_Word16 diff, nr, pos;
+    WebRtc_Word16 nmk, nmk2, nmk3, smk, smk2, nsk, ssk;
+    WebRtc_Word16 delt, ndelt;
+    WebRtc_Word16 maxspe, maxmu;
+    WebRtc_Word16 deltaN[NUM_TABLE_VALUES], deltaS[NUM_TABLE_VALUES];
+    WebRtc_Word16 ngprvec[NUM_TABLE_VALUES], sgprvec[NUM_TABLE_VALUES];
+    WebRtc_Word32 h0test, h1test;
+    WebRtc_Word32 tmp32_1, tmp32_2;
+    WebRtc_Word32 dotVal;
+    WebRtc_Word32 nmid, smid;
+    WebRtc_Word32 probn[NUM_MODELS], probs[NUM_MODELS];
+    WebRtc_Word16 *nmean1ptr, *nmean2ptr, *smean1ptr, *smean2ptr, *nstd1ptr, *nstd2ptr,
+            *sstd1ptr, *sstd2ptr;
+    WebRtc_Word16 overhead1, overhead2, individualTest, totalTest;
+
+    // Set the thresholds to different values based on frame length
+    if (frame_length == 80)
+    {
+        // 80 input samples
+        overhead1 = inst->over_hang_max_1[0];
+        overhead2 = inst->over_hang_max_2[0];
+        individualTest = inst->individual[0];
+        totalTest = inst->total[0];
+    } else if (frame_length == 160)
+    {
+        // 160 input samples
+        overhead1 = inst->over_hang_max_1[1];
+        overhead2 = inst->over_hang_max_2[1];
+        individualTest = inst->individual[1];
+        totalTest = inst->total[1];
+    } else
+    {
+        // 240 input samples
+        overhead1 = inst->over_hang_max_1[2];
+        overhead2 = inst->over_hang_max_2[2];
+        individualTest = inst->individual[2];
+        totalTest = inst->total[2];
+    }
+
+    if (total_power > MIN_ENERGY)
+    { // If signal present at all
+
+        // Set pointers to the gaussian parameters
+        nmean1ptr = &inst->noise_means[0];
+        nmean2ptr = &inst->noise_means[NUM_CHANNELS];
+        smean1ptr = &inst->speech_means[0];
+        smean2ptr = &inst->speech_means[NUM_CHANNELS];
+        nstd1ptr = &inst->noise_stds[0];
+        nstd2ptr = &inst->noise_stds[NUM_CHANNELS];
+        sstd1ptr = &inst->speech_stds[0];
+        sstd2ptr = &inst->speech_stds[NUM_CHANNELS];
+
+        vadflag = 0;
+        dotVal = 0;
+        for (n = 0; n < NUM_CHANNELS; n++)
+        { // For all channels
+
+            pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
+            xval = feature_vector[n];
+
+            // Probability for Noise, Q7 * Q20 = Q27
+            tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean1ptr++, *nstd1ptr++,
+                                                    &deltaN[pos]);
+            probn[0] = (WebRtc_Word32)(kNoiseDataWeights[n] * tmp32_1);
+            tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean2ptr++, *nstd2ptr++,
+                                                    &deltaN[pos + 1]);
+            probn[1] = (WebRtc_Word32)(kNoiseDataWeights[n + NUM_CHANNELS] * tmp32_1);
+            h0test = probn[0] + probn[1]; // Q27
+            h0 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h0test, 12); // Q15
+
+            // Probability for Speech
+            tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean1ptr++, *sstd1ptr++,
+                                                    &deltaS[pos]);
+            probs[0] = (WebRtc_Word32)(kSpeechDataWeights[n] * tmp32_1);
+            tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean2ptr++, *sstd2ptr++,
+                                                    &deltaS[pos + 1]);
+            probs[1] = (WebRtc_Word32)(kSpeechDataWeights[n + NUM_CHANNELS] * tmp32_1);
+            h1test = probs[0] + probs[1]; // Q27
+            h1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h1test, 12); // Q15
+
+            // Get likelihood ratio. Approximate log2(H1/H0) with shifts0 - shifts1
+            shifts0 = WebRtcSpl_NormW32(h0test);
+            shifts1 = WebRtcSpl_NormW32(h1test);
+
+            if ((h0test > 0) && (h1test > 0))
+            {
+                ratvec = shifts0 - shifts1;
+            } else if (h1test > 0)
+            {
+                ratvec = 31 - shifts1;
+            } else if (h0test > 0)
+            {
+                ratvec = shifts0 - 31;
+            } else
+            {
+                ratvec = 0;
+            }
+
+            // VAD decision with spectrum weighting
+            dotVal += WEBRTC_SPL_MUL_16_16(ratvec, kSpectrumWeight[n]);
+
+            // Individual channel test
+            if ((ratvec << 2) > individualTest)
+            {
+                vadflag = 1;
+            }
+
+            // Probabilities used when updating model
+            if (h0 > 0)
+            {
+                tmp32_1 = probn[0] & 0xFFFFF000; // Q27
+                tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2); // Q29
+                ngprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h0);
+                ngprvec[pos + 1] = 16384 - ngprvec[pos];
+            } else
+            {
+                ngprvec[pos] = 16384;
+                ngprvec[pos + 1] = 0;
+            }
+
+            // Probabilities used when updating model
+            if (h1 > 0)
+            {
+                tmp32_1 = probs[0] & 0xFFFFF000;
+                tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2);
+                sgprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h1);
+                sgprvec[pos + 1] = 16384 - sgprvec[pos];
+            } else
+            {
+                sgprvec[pos] = 0;
+                sgprvec[pos + 1] = 0;
+            }
+        }
+
+        // Overall test
+        if (dotVal >= totalTest)
+        {
+            vadflag |= 1;
+        }
+
+        // Set pointers to the means and standard deviations.
+        nmean1ptr = &inst->noise_means[0];
+        smean1ptr = &inst->speech_means[0];
+        nstd1ptr = &inst->noise_stds[0];
+        sstd1ptr = &inst->speech_stds[0];
+
+        maxspe = 12800;
+
+        // Update the model's parameters
+        for (n = 0; n < NUM_CHANNELS; n++)
+        {
+
+            pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
+
+            // Get min value in past which is used for long term correction
+            backval = WebRtcVad_FindMinimum(inst, feature_vector[n], n); // Q4
+
+            // Compute the "global" mean, that is the sum of the two means weighted
+            nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr); // Q7 * Q7
+            nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS],
+                    *(nmean1ptr+NUM_CHANNELS));
+            tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 6); // Q8
+
+            for (k = 0; k < NUM_MODELS; k++)
+            {
+
+                nr = pos + k;
+
+                nmean2ptr = nmean1ptr + k * NUM_CHANNELS;
+                smean2ptr = smean1ptr + k * NUM_CHANNELS;
+                nstd2ptr = nstd1ptr + k * NUM_CHANNELS;
+                sstd2ptr = sstd1ptr + k * NUM_CHANNELS;
+                nmk = *nmean2ptr;
+                smk = *smean2ptr;
+                nsk = *nstd2ptr;
+                ssk = *sstd2ptr;
+
+                // Update noise mean vector if the frame consists of noise only
+                nmk2 = nmk;
+                if (!vadflag)
+                {
+                    // deltaN = (x-mu)/sigma^2
+                    // ngprvec[k] = probn[k]/(probn[0] + probn[1])
+
+                    delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ngprvec[nr],
+                            deltaN[nr], 11); // Q14*Q11
+                    nmk2 = nmk + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
+                            kNoiseUpdateConst,
+                            22); // Q7+(Q14*Q15>>22)
+                }
+
+                // Long term correction of the noise mean
+                ndelt = WEBRTC_SPL_LSHIFT_W16(backval, 4);
+                ndelt -= tmp16_1; // Q8 - Q8
+                nmk3 = nmk2 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ndelt,
+                        kBackEta,
+                        9); // Q7+(Q8*Q8)>>9
+
+                // Control that the noise mean does not drift to much
+                tmp16 = WEBRTC_SPL_LSHIFT_W16(k+5, 7);
+                if (nmk3 < tmp16)
+                    nmk3 = tmp16;
+                tmp16 = WEBRTC_SPL_LSHIFT_W16(72+k-n, 7);
+                if (nmk3 > tmp16)
+                    nmk3 = tmp16;
+                *nmean2ptr = nmk3;
+
+                if (vadflag)
+                {
+                    // Update speech mean vector:
+                    // deltaS = (x-mu)/sigma^2
+                    // sgprvec[k] = probn[k]/(probn[0] + probn[1])
+
+                    delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sgprvec[nr],
+                            deltaS[nr],
+                            11); // (Q14*Q11)>>11=Q14
+                    tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
+                            kSpeechUpdateConst,
+                            21) + 1;
+                    smk2 = smk + (tmp16 >> 1); // Q7 + (Q14 * Q15 >> 22)
+
+                    // Control that the speech mean does not drift to much
+                    maxmu = maxspe + 640;
+                    if (smk2 < kMinimumMean[k])
+                        smk2 = kMinimumMean[k];
+                    if (smk2 > maxmu)
+                        smk2 = maxmu;
+
+                    *smean2ptr = smk2;
+
+                    // (Q7>>3) = Q4
+                    tmp16 = WEBRTC_SPL_RSHIFT_W16((smk + 4), 3);
+
+                    tmp16 = feature_vector[n] - tmp16; // Q4
+                    tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaS[nr], tmp16, 3);
+                    tmp32_2 = tmp32_1 - (WebRtc_Word32)4096; // Q12
+                    tmp16 = WEBRTC_SPL_RSHIFT_W16((sgprvec[nr]), 2);
+                    tmp32_1 = (WebRtc_Word32)(tmp16 * tmp32_2);// (Q15>>3)*(Q14>>2)=Q12*Q12=Q24
+
+                    tmp32_2 = WEBRTC_SPL_RSHIFT_W32(tmp32_1, 4); // Q20
+
+                    // 0.1 * Q20 / Q7 = Q13
+                    if (tmp32_2 > 0)
+                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, ssk * 10);
+                    else
+                    {
+                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_2, ssk * 10);
+                        tmp16 = -tmp16;
+                    }
+                    // divide by 4 giving an update factor of 0.025
+                    tmp16 += 128; // Rounding
+                    ssk += WEBRTC_SPL_RSHIFT_W16(tmp16, 8);
+                    // Division with 8 plus Q7
+                    if (ssk < kMinStd)
+                        ssk = kMinStd;
+                    *sstd2ptr = ssk;
+                } else
+                {
+                    // Update GMM variance vectors
+                    // deltaN * (feature_vector[n] - nmk) - 1, Q11 * Q4
+                    tmp16 = feature_vector[n] - WEBRTC_SPL_RSHIFT_W16(nmk, 3);
+
+                    // (Q15>>3) * (Q14>>2) = Q12 * Q12 = Q24
+                    tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaN[nr], tmp16, 3) - 4096;
+                    tmp16 = WEBRTC_SPL_RSHIFT_W16((ngprvec[nr]+2), 2);
+                    tmp32_2 = (WebRtc_Word32)(tmp16 * tmp32_1);
+                    tmp32_1 = WEBRTC_SPL_RSHIFT_W32(tmp32_2, 14);
+                    // Q20  * approx 0.001 (2^-10=0.0009766)
+
+                    // Q20 / Q7 = Q13
+                    tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
+                    if (tmp32_1 > 0)
+                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
+                    else
+                    {
+                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_1, nsk);
+                        tmp16 = -tmp16;
+                    }
+                    tmp16 += 32; // Rounding
+                    nsk += WEBRTC_SPL_RSHIFT_W16(tmp16, 6);
+
+                    if (nsk < kMinStd)
+                        nsk = kMinStd;
+
+                    *nstd2ptr = nsk;
+                }
+            }
+
+            // Separate models if they are too close - nmid in Q14
+            nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr);
+            nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS], *nmean2ptr);
+
+            // smid in Q14
+            smid = WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n], *smean1ptr);
+            smid += WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n+NUM_CHANNELS], *smean2ptr);
+
+            // diff = "global" speech mean - "global" noise mean
+            diff = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 9);
+            tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 9);
+            diff -= tmp16;
+
+            if (diff < kMinimumDifference[n])
+            {
+
+                tmp16 = kMinimumDifference[n] - diff; // Q5
+
+                // tmp16_1 = ~0.8 * (kMinimumDifference - diff) in Q7
+                // tmp16_2 = ~0.2 * (kMinimumDifference - diff) in Q7
+                tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(13, tmp16, 2);
+                tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(3, tmp16, 2);
+
+                // First Gauss, speech model
+                tmp16 = tmp16_1 + *smean1ptr;
+                *smean1ptr = tmp16;
+                smid = WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n]);
+
+                // Second Gauss, speech model
+                tmp16 = tmp16_1 + *smean2ptr;
+                *smean2ptr = tmp16;
+                smid += WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n+NUM_CHANNELS]);
+
+                // First Gauss, noise model
+                tmp16 = *nmean1ptr - tmp16_2;
+                *nmean1ptr = tmp16;
+
+                nmid = WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n]);
+
+                // Second Gauss, noise model
+                tmp16 = *nmean2ptr - tmp16_2;
+                *nmean2ptr = tmp16;
+                nmid += WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n+NUM_CHANNELS]);
+            }
+
+            // Control that the speech & noise means do not drift to much
+            maxspe = kMaximumSpeech[n];
+            tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 7);
+            if (tmp16_2 > maxspe)
+            { // Upper limit of speech model
+                tmp16_2 -= maxspe;
+
+                *smean1ptr -= tmp16_2;
+                *smean2ptr -= tmp16_2;
+            }
+
+            tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 7);
+            if (tmp16_2 > kMaximumNoise[n])
+            {
+                tmp16_2 -= kMaximumNoise[n];
+
+                *nmean1ptr -= tmp16_2;
+                *nmean2ptr -= tmp16_2;
+            }
+
+            nmean1ptr++;
+            smean1ptr++;
+            nstd1ptr++;
+            sstd1ptr++;
+        }
+        inst->frame_counter++;
+    } else
+    {
+        vadflag = 0;
+    }
+
+    // Hangover smoothing
+    if (!vadflag)
+    {
+        if (inst->over_hang > 0)
+        {
+            vadflag = 2 + inst->over_hang;
+            inst->over_hang = inst->over_hang - 1;
+        }
+        inst->num_of_speech = 0;
+    } else
+    {
+        inst->num_of_speech = inst->num_of_speech + 1;
+        if (inst->num_of_speech > kMaxSpeechFrames)
+        {
+            inst->num_of_speech = kMaxSpeechFrames;
+            inst->over_hang = overhead2;
+        } else
+            inst->over_hang = overhead1;
+    }
+    return vadflag;
+}
+
+// Initialize the VAD. Set aggressiveness mode to default value.
+int WebRtcVad_InitCore(VadInstT* self) {
+  int i;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  // Initialization of general struct variables.
+  self->vad = 1;  // Speech active (=1).
+  self->frame_counter = 0;
+  self->over_hang = 0;
+  self->num_of_speech = 0;
+
+  // Initialization of downsampling filter state.
+  memset(self->downsampling_filter_states, 0,
+         sizeof(self->downsampling_filter_states));
+
+  // Read initial PDF parameters.
+  for (i = 0; i < NUM_TABLE_VALUES; i++) {
+    self->noise_means[i] = kNoiseDataMeans[i];
+    self->speech_means[i] = kSpeechDataMeans[i];
+    self->noise_stds[i] = kNoiseDataStds[i];
+    self->speech_stds[i] = kSpeechDataStds[i];
+  }
+
+  // Initialize Index and Minimum value vectors.
+  for (i = 0; i < 16 * NUM_CHANNELS; i++) {
+    self->low_value_vector[i] = 10000;
+    self->index_vector[i] = 0;
+  }
+
+  // Initialize splitting filter states.
+  memset(self->upper_state, 0, sizeof(self->upper_state));
+  memset(self->lower_state, 0, sizeof(self->lower_state));
+
+  // Initialize high pass filter states.
+  memset(self->hp_filter_state, 0, sizeof(self->hp_filter_state));
+
+  // Initialize mean value memory, for WebRtcVad_FindMinimum().
+  for (i = 0; i < NUM_CHANNELS; i++) {
+    self->mean_value[i] = 1600;
+  }
+
+  // Set aggressiveness mode to default (=|kDefaultMode|).
+  if (WebRtcVad_set_mode_core(self, kDefaultMode) != 0) {
+    return -1;
+  }
+
+  self->init_flag = kInitCheck;
+
+  return 0;
+}
+
+// Set aggressiveness mode
+int WebRtcVad_set_mode_core(VadInstT *inst, int mode)
+{
+
+    if (mode == 0)
+    {
+        // Quality mode
+        inst->over_hang_max_1[0] = OHMAX1_10MS_Q; // Overhang short speech burst
+        inst->over_hang_max_1[1] = OHMAX1_20MS_Q; // Overhang short speech burst
+        inst->over_hang_max_1[2] = OHMAX1_30MS_Q; // Overhang short speech burst
+        inst->over_hang_max_2[0] = OHMAX2_10MS_Q; // Overhang long speech burst
+        inst->over_hang_max_2[1] = OHMAX2_20MS_Q; // Overhang long speech burst
+        inst->over_hang_max_2[2] = OHMAX2_30MS_Q; // Overhang long speech burst
+
+        inst->individual[0] = INDIVIDUAL_10MS_Q;
+        inst->individual[1] = INDIVIDUAL_20MS_Q;
+        inst->individual[2] = INDIVIDUAL_30MS_Q;
+
+        inst->total[0] = TOTAL_10MS_Q;
+        inst->total[1] = TOTAL_20MS_Q;
+        inst->total[2] = TOTAL_30MS_Q;
+    } else if (mode == 1)
+    {
+        // Low bitrate mode
+        inst->over_hang_max_1[0] = OHMAX1_10MS_LBR; // Overhang short speech burst
+        inst->over_hang_max_1[1] = OHMAX1_20MS_LBR; // Overhang short speech burst
+        inst->over_hang_max_1[2] = OHMAX1_30MS_LBR; // Overhang short speech burst
+        inst->over_hang_max_2[0] = OHMAX2_10MS_LBR; // Overhang long speech burst
+        inst->over_hang_max_2[1] = OHMAX2_20MS_LBR; // Overhang long speech burst
+        inst->over_hang_max_2[2] = OHMAX2_30MS_LBR; // Overhang long speech burst
+
+        inst->individual[0] = INDIVIDUAL_10MS_LBR;
+        inst->individual[1] = INDIVIDUAL_20MS_LBR;
+        inst->individual[2] = INDIVIDUAL_30MS_LBR;
+
+        inst->total[0] = TOTAL_10MS_LBR;
+        inst->total[1] = TOTAL_20MS_LBR;
+        inst->total[2] = TOTAL_30MS_LBR;
+    } else if (mode == 2)
+    {
+        // Aggressive mode
+        inst->over_hang_max_1[0] = OHMAX1_10MS_AGG; // Overhang short speech burst
+        inst->over_hang_max_1[1] = OHMAX1_20MS_AGG; // Overhang short speech burst
+        inst->over_hang_max_1[2] = OHMAX1_30MS_AGG; // Overhang short speech burst
+        inst->over_hang_max_2[0] = OHMAX2_10MS_AGG; // Overhang long speech burst
+        inst->over_hang_max_2[1] = OHMAX2_20MS_AGG; // Overhang long speech burst
+        inst->over_hang_max_2[2] = OHMAX2_30MS_AGG; // Overhang long speech burst
+
+        inst->individual[0] = INDIVIDUAL_10MS_AGG;
+        inst->individual[1] = INDIVIDUAL_20MS_AGG;
+        inst->individual[2] = INDIVIDUAL_30MS_AGG;
+
+        inst->total[0] = TOTAL_10MS_AGG;
+        inst->total[1] = TOTAL_20MS_AGG;
+        inst->total[2] = TOTAL_30MS_AGG;
+    } else if (mode == 3)
+    {
+        // Very aggressive mode
+        inst->over_hang_max_1[0] = OHMAX1_10MS_VAG; // Overhang short speech burst
+        inst->over_hang_max_1[1] = OHMAX1_20MS_VAG; // Overhang short speech burst
+        inst->over_hang_max_1[2] = OHMAX1_30MS_VAG; // Overhang short speech burst
+        inst->over_hang_max_2[0] = OHMAX2_10MS_VAG; // Overhang long speech burst
+        inst->over_hang_max_2[1] = OHMAX2_20MS_VAG; // Overhang long speech burst
+        inst->over_hang_max_2[2] = OHMAX2_30MS_VAG; // Overhang long speech burst
+
+        inst->individual[0] = INDIVIDUAL_10MS_VAG;
+        inst->individual[1] = INDIVIDUAL_20MS_VAG;
+        inst->individual[2] = INDIVIDUAL_30MS_VAG;
+
+        inst->total[0] = TOTAL_10MS_VAG;
+        inst->total[1] = TOTAL_20MS_VAG;
+        inst->total[2] = TOTAL_30MS_VAG;
+    } else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// Calculate VAD decision by first extracting feature values and then calculate
+// probability for both speech and background noise.
+
+WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
+                                     int frame_length)
+{
+    WebRtc_Word16 len, vad;
+    WebRtc_Word16 speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB)
+    WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+
+
+    // Downsample signal 32->16->8 before doing VAD
+    WebRtcVad_Downsampling(speech_frame, speechWB, &(inst->downsampling_filter_states[2]),
+                           frame_length);
+    len = WEBRTC_SPL_RSHIFT_W16(frame_length, 1);
+
+    WebRtcVad_Downsampling(speechWB, speechNB, inst->downsampling_filter_states, len);
+    len = WEBRTC_SPL_RSHIFT_W16(len, 1);
+
+    // Do VAD on an 8 kHz signal
+    vad = WebRtcVad_CalcVad8khz(inst, speechNB, len);
+
+    return vad;
+}
+
+WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
+                                     int frame_length)
+{
+    WebRtc_Word16 len, vad;
+    WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+
+    // Wideband: Downsample signal before doing VAD
+    WebRtcVad_Downsampling(speech_frame, speechNB, inst->downsampling_filter_states,
+                           frame_length);
+
+    len = WEBRTC_SPL_RSHIFT_W16(frame_length, 1);
+    vad = WebRtcVad_CalcVad8khz(inst, speechNB, len);
+
+    return vad;
+}
+
+WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
+                                    int frame_length)
+{
+    WebRtc_Word16 feature_vector[NUM_CHANNELS], total_power;
+
+    // Get power in the bands
+    total_power = WebRtcVad_CalculateFeatures(inst, speech_frame, frame_length,
+                                              feature_vector);
+
+    // Make a VAD
+    inst->vad = GmmProbability(inst, feature_vector, total_power, frame_length);
+
+    return inst->vad;
+}
diff --git a/trunk/src/common_audio/vad/vad_core.h b/trunk/src/common_audio/vad/vad_core.h
new file mode 100644
index 0000000..c82fbce
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_core.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file includes the descriptions of the core VAD calls.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
+
+#include "typedefs.h"
+#include "vad_defines.h"
+
+typedef struct VadInstT_
+{
+
+    WebRtc_Word16 vad;
+    WebRtc_Word32 downsampling_filter_states[4];
+    WebRtc_Word16 noise_means[NUM_TABLE_VALUES];
+    WebRtc_Word16 speech_means[NUM_TABLE_VALUES];
+    WebRtc_Word16 noise_stds[NUM_TABLE_VALUES];
+    WebRtc_Word16 speech_stds[NUM_TABLE_VALUES];
+    // TODO(bjornv): Change to |frame_count|.
+    WebRtc_Word32 frame_counter;
+    WebRtc_Word16 over_hang; // Over Hang
+    WebRtc_Word16 num_of_speech;
+    // TODO(bjornv): Change to |age_vector|.
+    WebRtc_Word16 index_vector[16 * NUM_CHANNELS];
+    WebRtc_Word16 low_value_vector[16 * NUM_CHANNELS];
+    // TODO(bjornv): Change to |median|.
+    WebRtc_Word16 mean_value[NUM_CHANNELS];
+    WebRtc_Word16 upper_state[5];
+    WebRtc_Word16 lower_state[5];
+    WebRtc_Word16 hp_filter_state[4];
+    WebRtc_Word16 over_hang_max_1[3];
+    WebRtc_Word16 over_hang_max_2[3];
+    WebRtc_Word16 individual[3];
+    WebRtc_Word16 total[3];
+
+    int init_flag;
+
+} VadInstT;
+
+// Initializes the core VAD component. The default aggressiveness mode is
+// controlled by |kDefaultMode| in vad_core.c.
+//
+// - self [i/o] : Instance that should be initialized
+//
+// returns      : 0 (OK), -1 (NULL pointer in or if the default mode can't be
+//                set)
+int WebRtcVad_InitCore(VadInstT* self);
+
+/****************************************************************************
+ * WebRtcVad_set_mode_core(...)
+ *
+ * This function changes the VAD settings
+ *
+ * Input:
+ *      - inst      : VAD instance
+ *      - mode      : Aggressiveness degree
+ *                    0 (High quality) - 3 (Highly aggressive)
+ *
+ * Output:
+ *      - inst      : Changed  instance
+ *
+ * Return value     :  0 - Ok
+ *                    -1 - Error
+ */
+
+int WebRtcVad_set_mode_core(VadInstT* inst, int mode);
+
+/****************************************************************************
+ * WebRtcVad_CalcVad32khz(...) 
+ * WebRtcVad_CalcVad16khz(...) 
+ * WebRtcVad_CalcVad8khz(...) 
+ *
+ * Calculate probability for active speech and make VAD decision.
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - speech_frame  : Input speech frame
+ *      - frame_length  : Number of input samples
+ *
+ * Output:
+ *      - inst          : Updated filter states etc.
+ *
+ * Return value         : VAD decision
+ *                        0 - No active speech
+ *                        1-6 - Active speech
+ */
+WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT* inst, WebRtc_Word16* speech_frame,
+                                     int frame_length);
+WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT* inst, WebRtc_Word16* speech_frame,
+                                     int frame_length);
+WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT* inst, WebRtc_Word16* speech_frame,
+                                    int frame_length);
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
diff --git a/trunk/src/common_audio/vad/vad_core_unittest.cc b/trunk/src/common_audio/vad/vad_core_unittest.cc
new file mode 100644
index 0000000..141b796
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_core_unittest.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+}
+
+namespace {
+
+TEST_F(VadTest, InitCore) {
+  // Test WebRtcVad_InitCore().
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+
+  // NULL pointer test.
+  EXPECT_EQ(-1, WebRtcVad_InitCore(NULL));
+
+  // Verify return = 0 for non-NULL pointer.
+  EXPECT_EQ(0, WebRtcVad_InitCore(self));
+  // Verify init_flag is set.
+  EXPECT_EQ(42, self->init_flag);
+
+  free(self);
+}
+
+TEST_F(VadTest, set_mode_core) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+
+  // TODO(bjornv): Add NULL pointer check if we take care of it in
+  // vad_core.c
+
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  // Test WebRtcVad_set_mode_core().
+  // Invalid modes should return -1.
+  EXPECT_EQ(-1, WebRtcVad_set_mode_core(self, -1));
+  EXPECT_EQ(-1, WebRtcVad_set_mode_core(self, 1000));
+  // Valid modes should return 0.
+  for (size_t j = 0; j < kModesSize; ++j) {
+    EXPECT_EQ(0, WebRtcVad_set_mode_core(self, kModes[j]));
+  }
+
+  free(self);
+}
+
+TEST_F(VadTest, CalcVad) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  int16_t speech[kMaxFrameLength];
+
+  // TODO(bjornv): Add NULL pointer check if we take care of it in
+  // vad_core.c
+
+  // Test WebRtcVad_CalcVadXXkhz()
+  // Verify that all zeros in gives VAD = 0 out.
+  memset(speech, 0, sizeof(speech));
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad8khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(16000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad16khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(32000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad32khz(self, speech, kFrameLengths[j]));
+    }
+  }
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = (i * i);
+  }
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad8khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(16000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad16khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(32000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad32khz(self, speech, kFrameLengths[j]));
+    }
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/trunk/src/common_audio/vad/vad_defines.h b/trunk/src/common_audio/vad/vad_defines.h
new file mode 100644
index 0000000..5d1539d
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_defines.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This header file includes the macros used in VAD.
+ */
+
+#ifndef WEBRTC_VAD_DEFINES_H_
+#define WEBRTC_VAD_DEFINES_H_
+
+#define NUM_CHANNELS        6   // Eight frequency bands
+#define NUM_MODELS          2   // Number of Gaussian models
+#define NUM_TABLE_VALUES    NUM_CHANNELS * NUM_MODELS
+
+#define MIN_ENERGY          10
+#define ALPHA1              6553    // 0.2 in Q15
+#define ALPHA2              32439   // 0.99 in Q15
+// Mode 0, Quality thresholds - Different thresholds for the different frame lengths
+#define INDIVIDUAL_10MS_Q   24
+#define INDIVIDUAL_20MS_Q   21      // (log10(2)*66)<<2 ~=16
+#define INDIVIDUAL_30MS_Q   24
+
+#define TOTAL_10MS_Q        57
+#define TOTAL_20MS_Q        48
+#define TOTAL_30MS_Q        57
+
+#define OHMAX1_10MS_Q       8  // Max Overhang 1
+#define OHMAX2_10MS_Q       14 // Max Overhang 2
+#define OHMAX1_20MS_Q       4  // Max Overhang 1
+#define OHMAX2_20MS_Q       7  // Max Overhang 2
+#define OHMAX1_30MS_Q       3
+#define OHMAX2_30MS_Q       5
+
+// Mode 1, Low bitrate thresholds - Different thresholds for the different frame lengths
+#define INDIVIDUAL_10MS_LBR 37
+#define INDIVIDUAL_20MS_LBR 32
+#define INDIVIDUAL_30MS_LBR 37
+
+#define TOTAL_10MS_LBR      100
+#define TOTAL_20MS_LBR      80
+#define TOTAL_30MS_LBR      100
+
+#define OHMAX1_10MS_LBR     8  // Max Overhang 1
+#define OHMAX2_10MS_LBR     14 // Max Overhang 2
+#define OHMAX1_20MS_LBR     4
+#define OHMAX2_20MS_LBR     7
+
+#define OHMAX1_30MS_LBR     3
+#define OHMAX2_30MS_LBR     5
+
+// Mode 2, Very aggressive thresholds - Different thresholds for the different frame lengths
+#define INDIVIDUAL_10MS_AGG 82
+#define INDIVIDUAL_20MS_AGG 78
+#define INDIVIDUAL_30MS_AGG 82
+
+#define TOTAL_10MS_AGG      285 //580
+#define TOTAL_20MS_AGG      260
+#define TOTAL_30MS_AGG      285
+
+#define OHMAX1_10MS_AGG     6  // Max Overhang 1
+#define OHMAX2_10MS_AGG     9  // Max Overhang 2
+#define OHMAX1_20MS_AGG     3
+#define OHMAX2_20MS_AGG     5
+
+#define OHMAX1_30MS_AGG     2
+#define OHMAX2_30MS_AGG     3
+
+// Mode 3, Super aggressive thresholds - Different thresholds for the different frame lengths
+#define INDIVIDUAL_10MS_VAG 94
+#define INDIVIDUAL_20MS_VAG 94
+#define INDIVIDUAL_30MS_VAG 94
+
+#define TOTAL_10MS_VAG      1100 //1700
+#define TOTAL_20MS_VAG      1050
+#define TOTAL_30MS_VAG      1100
+
+#define OHMAX1_10MS_VAG     6  // Max Overhang 1
+#define OHMAX2_10MS_VAG     9  // Max Overhang 2
+#define OHMAX1_20MS_VAG     3
+#define OHMAX2_20MS_VAG     5
+
+#define OHMAX1_30MS_VAG     2
+#define OHMAX2_30MS_VAG     3
+
+#endif // WEBRTC_VAD_DEFINES_H_
diff --git a/trunk/src/common_audio/vad/vad_filterbank.c b/trunk/src/common_audio/vad/vad_filterbank.c
new file mode 100644
index 0000000..2f5db44
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_filterbank.c
@@ -0,0 +1,335 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vad_filterbank.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+#include "typedefs.h"
+#include "vad_defines.h"
+
+// Constants used in LogOfEnergy().
+static const int16_t kLogConst = 24660;  // 160*log10(2) in Q9.
+static const int16_t kLogEnergyIntPart = 14336;  // 14 in Q10
+
+// Coefficients used by HighPassFilter, Q14.
+static const int16_t kHpZeroCoefs[3] = { 6631, -13262, 6631 };
+static const int16_t kHpPoleCoefs[3] = { 16384, -7756, 5620 };
+
+// Allpass filter coefficients, upper and lower, in Q15.
+// Upper: 0.64, Lower: 0.17
+static const int16_t kAllPassCoefsQ15[2] = { 20972, 5571 };
+
+// Adjustment for division with two in SplitFilter.
+static const int16_t kOffsetVector[6] = { 368, 368, 272, 176, 176, 176 };
+
+// High pass filtering, with a cut-off frequency at 80 Hz, if the |data_in| is
+// sampled at 500 Hz.
+//
+// - data_in      [i]   : Input audio data sampled at 500 Hz.
+// - data_length  [i]   : Length of input and output data.
+// - filter_state [i/o] : State of the filter.
+// - data_out     [o]   : Output audio data in the frequency interval
+//                        80 - 250 Hz.
+static void HighPassFilter(const int16_t* data_in, int data_length,
+                           int16_t* filter_state, int16_t* data_out) {
+  int i;
+  const int16_t* in_ptr = data_in;
+  int16_t* out_ptr = data_out;
+  int32_t tmp32 = 0;
+
+
+  // The sum of the absolute values of the impulse response:
+  // The zero/pole-filter has a max amplification of a single sample of: 1.4546
+  // Impulse response: 0.4047 -0.6179 -0.0266  0.1993  0.1035  -0.0194
+  // The all-zero section has a max amplification of a single sample of: 1.6189
+  // Impulse response: 0.4047 -0.8094  0.4047  0       0        0
+  // The all-pole section has a max amplification of a single sample of: 1.9931
+  // Impulse response: 1.0000  0.4734 -0.1189 -0.2187 -0.0627   0.04532
+
+  for (i = 0; i < data_length; i++) {
+    // All-zero section (filter coefficients in Q14).
+    tmp32 = WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[0], *in_ptr);
+    tmp32 += WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[1], filter_state[0]);
+    tmp32 += WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[2], filter_state[1]);
+    filter_state[1] = filter_state[0];
+    filter_state[0] = *in_ptr++;
+
+    // All-pole section (filter coefficients in Q14).
+    tmp32 -= WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[1], filter_state[2]);
+    tmp32 -= WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[2], filter_state[3]);
+    filter_state[3] = filter_state[2];
+    filter_state[2] = (int16_t) (tmp32 >> 14);
+    *out_ptr++ = filter_state[2];
+  }
+}
+
+// All pass filtering of |data_in|, used before splitting the signal into two
+// frequency bands (low pass vs high pass).
+// Note that |data_in| and |data_out| can NOT correspond to the same address.
+//
+// - data_in            [i]   : Input audio signal given in Q0.
+// - data_length        [i]   : Length of input and output data.
+// - filter_coefficient [i]   : Given in Q15.
+// - filter_state       [i/o] : State of the filter given in Q(-1).
+// - data_out           [o]   : Output audio signal given in Q(-1).
+static void AllPassFilter(const int16_t* data_in, int data_length,
+                          int16_t filter_coefficient, int16_t* filter_state,
+                          int16_t* data_out) {
+  // The filter can only cause overflow (in the w16 output variable)
+  // if more than 4 consecutive input numbers are of maximum value and
+  // has the the same sign as the impulse responses first taps.
+  // First 6 taps of the impulse response:
+  // 0.6399 0.5905 -0.3779 0.2418 -0.1547 0.0990
+
+  int i;
+  int16_t tmp16 = 0;
+  int32_t tmp32 = 0;
+  int32_t state32 = ((int32_t) (*filter_state) << 16);  // Q15
+
+  for (i = 0; i < data_length; i++) {
+    tmp32 = state32 + WEBRTC_SPL_MUL_16_16(filter_coefficient, *data_in);
+    tmp16 = (int16_t) (tmp32 >> 16);  // Q(-1)
+    *data_out++ = tmp16;
+    state32 = (((int32_t) (*data_in)) << 14); // Q14
+    state32 -= WEBRTC_SPL_MUL_16_16(filter_coefficient, tmp16);  // Q14
+    state32 <<= 1;  // Q15.
+    data_in += 2;
+  }
+
+  *filter_state = (int16_t) (state32 >> 16);  // Q(-1)
+}
+
+// Splits |data_in| into |hp_data_out| and |lp_data_out| corresponding to
+// an upper (high pass) part and a lower (low pass) part respectively.
+//
+// - data_in      [i]   : Input audio data to be split into two frequency bands.
+// - data_length  [i]   : Length of |data_in|.
+// - upper_state  [i/o] : State of the upper filter, given in Q(-1).
+// - lower_state  [i/o] : State of the lower filter, given in Q(-1).
+// - hp_data_out  [o]   : Output audio data of the upper half of the spectrum.
+//                        The length is |data_length| / 2.
+// - lp_data_out  [o]   : Output audio data of the lower half of the spectrum.
+//                        The length is |data_length| / 2.
+static void SplitFilter(const int16_t* data_in, int data_length,
+                        int16_t* upper_state, int16_t* lower_state,
+                        int16_t* hp_data_out, int16_t* lp_data_out) {
+  int i;
+  int half_length = data_length >> 1;  // Downsampling by 2.
+  int16_t tmp_out;
+
+  // All-pass filtering upper branch.
+  AllPassFilter(&data_in[0], half_length, kAllPassCoefsQ15[0], upper_state,
+                hp_data_out);
+
+  // All-pass filtering lower branch.
+  AllPassFilter(&data_in[1], half_length, kAllPassCoefsQ15[1], lower_state,
+                lp_data_out);
+
+  // Make LP and HP signals.
+  for (i = 0; i < half_length; i++) {
+    tmp_out = *hp_data_out;
+    *hp_data_out++ -= *lp_data_out;
+    *lp_data_out++ += tmp_out;
+  }
+}
+
+// Calculates the energy of |data_in| in dB, and also updates an overall
+// |total_energy| if necessary.
+//
+// - data_in      [i]   : Input audio data for energy calculation.
+// - data_length  [i]   : Length of input data.
+// - offset       [i]   : Offset value added to |log_energy|.
+// - total_energy [i/o] : An external energy updated with the energy of
+//                        |data_in|.
+//                        NOTE: |total_energy| is only updated if
+//                        |total_energy| <= MIN_ENERGY.
+// - log_energy   [o]   : 10 * log10("energy of |data_in|") given in Q4.
+static void LogOfEnergy(const int16_t* data_in, int data_length,
+                        int16_t offset, int16_t* total_energy,
+                        int16_t* log_energy) {
+  // |tot_rshifts| accumulates the number of right shifts performed on |energy|.
+  int tot_rshifts = 0;
+  // The |energy| will be normalized to 15 bits. We use unsigned integer because
+  // we eventually will mask out the fractional part.
+  uint32_t energy = 0;
+
+  assert(data_in != NULL);
+  assert(data_length > 0);
+
+  energy = (uint32_t) WebRtcSpl_Energy((int16_t*) data_in, data_length,
+                                       &tot_rshifts);
+
+  if (energy != 0) {
+    // By construction, normalizing to 15 bits is equivalent with 17 leading
+    // zeros of an unsigned 32 bit value.
+    int normalizing_rshifts = 17 - WebRtcSpl_NormU32(energy);
+    // In a 15 bit representation the leading bit is 2^14. log2(2^14) in Q10 is
+    // (14 << 10), which is what we initialize |log2_energy| with. For a more
+    // detailed derivations, see below.
+    int16_t log2_energy = kLogEnergyIntPart;
+
+    tot_rshifts += normalizing_rshifts;
+    // Normalize |energy| to 15 bits.
+    // |tot_rshifts| is now the total number of right shifts performed on
+    // |energy| after normalization. This means that |energy| is in
+    // Q(-tot_rshifts).
+    if (normalizing_rshifts < 0) {
+      energy <<= -normalizing_rshifts;
+    } else {
+      energy >>= normalizing_rshifts;
+    }
+
+    // Calculate the energy of |data_in| in dB, in Q4.
+    //
+    // 10 * log10("true energy") in Q4 = 2^4 * 10 * log10("true energy") =
+    // 160 * log10(|energy| * 2^|tot_rshifts|) =
+    // 160 * log10(2) * log2(|energy| * 2^|tot_rshifts|) =
+    // 160 * log10(2) * (log2(|energy|) + log2(2^|tot_rshifts|)) =
+    // (160 * log10(2)) * (log2(|energy|) + |tot_rshifts|) =
+    // |kLogConst| * (|log2_energy| + |tot_rshifts|)
+    //
+    // We know by construction that |energy| is normalized to 15 bits. Hence,
+    // |energy| = 2^14 + frac_Q15, where frac_Q15 is a fractional part in Q15.
+    // Further, we'd like |log2_energy| in Q10
+    // log2(|energy|) in Q10 = 2^10 * log2(2^14 + frac_Q15) =
+    // 2^10 * log2(2^14 * (1 + frac_Q15 * 2^-14)) =
+    // 2^10 * (14 + log2(1 + frac_Q15 * 2^-14)) ~=
+    // (14 << 10) + 2^10 * (frac_Q15 * 2^-14) =
+    // (14 << 10) + (frac_Q15 * 2^-4) = (14 << 10) + (frac_Q15 >> 4)
+    //
+    // Note that frac_Q15 = (|energy| & 0x00003FFF)
+
+    // Calculate and add the fractional part to |log2_energy|.
+    log2_energy += (int16_t) ((energy & 0x00003FFF) >> 4);
+
+    // |kLogConst| is in Q9, |log2_energy| in Q10 and |tot_rshifts| in Q0.
+    // Note that we in our derivation above have accounted for an output in Q4.
+    *log_energy = (int16_t) (WEBRTC_SPL_MUL_16_16_RSFT(
+        kLogConst, log2_energy, 19) +
+        WEBRTC_SPL_MUL_16_16_RSFT(tot_rshifts, kLogConst, 9));
+
+    if (*log_energy < 0) {
+      *log_energy = 0;
+    }
+  } else {
+    *log_energy = offset;
+    return;
+  }
+
+  *log_energy += offset;
+
+  // Update the approximate |total_energy| with the energy of |data_in|, if
+  // |total_energy| has not exceeded MIN_ENERGY. |total_energy| is used as an
+  // energy indicator in WebRtcVad_GmmProbability() in vad_core.c.
+  if (*total_energy <= MIN_ENERGY) {
+    if (tot_rshifts >= 0) {
+      // We know by construction that the |energy| > MIN_ENERGY in Q0, so add an
+      // arbitrary value such that |total_energy| exceeds MIN_ENERGY.
+      *total_energy += MIN_ENERGY + 1;
+    } else {
+      // By construction |energy| is represented by 15 bits, hence any number of
+      // right shifted |energy| will fit in an int16_t. In addition, adding the
+      // value to |total_energy| is wrap around safe as long as
+      // MIN_ENERGY < 8192.
+      *total_energy += (int16_t) (energy >> -tot_rshifts);  // Q0.
+    }
+  }
+}
+
+int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in,
+                                    int data_length, int16_t* features) {
+  int16_t total_energy = 0;
+  // We expect |data_length| to be 80, 160 or 240 samples, which corresponds to
+  // 10, 20 or 30 ms in 8 kHz. Therefore, the intermediate downsampled data will
+  // have at most 120 samples after the first split and at most 60 samples after
+  // the second split.
+  int16_t hp_120[120], lp_120[120];
+  int16_t hp_60[60], lp_60[60];
+  const int half_data_length = data_length >> 1;
+  int length = half_data_length;  // |data_length| / 2, corresponds to
+                                  // bandwidth = 2000 Hz after downsampling.
+
+  // Initialize variables for the first SplitFilter().
+  int frequency_band = 0;
+  const int16_t* in_ptr = data_in;  // [0 - 4000] Hz.
+  int16_t* hp_out_ptr = hp_120;  // [2000 - 4000] Hz.
+  int16_t* lp_out_ptr = lp_120;  // [0 - 2000] Hz.
+
+  assert(data_length >= 0);
+  assert(data_length <= 240);
+  assert(4 < NUM_CHANNELS - 1);  // Checking maximum |frequency_band|.
+
+  // Split at 2000 Hz and downsample.
+  SplitFilter(in_ptr, data_length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // For the upper band (2000 Hz - 4000 Hz) split at 3000 Hz and downsample.
+  frequency_band = 1;
+  in_ptr = hp_120;  // [2000 - 4000] Hz.
+  hp_out_ptr = hp_60;  // [3000 - 4000] Hz.
+  lp_out_ptr = lp_60;  // [2000 - 3000] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 3000 Hz - 4000 Hz.
+  length >>= 1;  // |data_length| / 4 <=> bandwidth = 1000 Hz.
+
+  LogOfEnergy(hp_60, length, kOffsetVector[5], &total_energy, &features[5]);
+
+  // Energy in 2000 Hz - 3000 Hz.
+  LogOfEnergy(lp_60, length, kOffsetVector[4], &total_energy, &features[4]);
+
+  // For the lower band (0 Hz - 2000 Hz) split at 1000 Hz and downsample.
+  frequency_band = 2;
+  in_ptr = lp_120;  // [0 - 2000] Hz.
+  hp_out_ptr = hp_60;  // [1000 - 2000] Hz.
+  lp_out_ptr = lp_60;  // [0 - 1000] Hz.
+  length = half_data_length;  // |data_length| / 2 <=> bandwidth = 2000 Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 1000 Hz - 2000 Hz.
+  length >>= 1;  // |data_length| / 4 <=> bandwidth = 1000 Hz.
+  LogOfEnergy(hp_60, length, kOffsetVector[3], &total_energy, &features[3]);
+
+  // For the lower band (0 Hz - 1000 Hz) split at 500 Hz and downsample.
+  frequency_band = 3;
+  in_ptr = lp_60;  // [0 - 1000] Hz.
+  hp_out_ptr = hp_120;  // [500 - 1000] Hz.
+  lp_out_ptr = lp_120;  // [0 - 500] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 500 Hz - 1000 Hz.
+  length >>= 1;  // |data_length| / 8 <=> bandwidth = 500 Hz.
+  LogOfEnergy(hp_120, length, kOffsetVector[2], &total_energy, &features[2]);
+
+  // For the lower band (0 Hz - 500 Hz) split at 250 Hz and downsample.
+  frequency_band = 4;
+  in_ptr = lp_120;  // [0 - 500] Hz.
+  hp_out_ptr = hp_60;  // [250 - 500] Hz.
+  lp_out_ptr = lp_60;  // [0 - 250] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 250 Hz - 500 Hz.
+  length >>= 1;  // |data_length| / 16 <=> bandwidth = 250 Hz.
+  LogOfEnergy(hp_60, length, kOffsetVector[1], &total_energy, &features[1]);
+
+  // Remove 0 Hz - 80 Hz, by high pass filtering the lower band.
+  HighPassFilter(lp_60, length, self->hp_filter_state, hp_120);
+
+  // Energy in 80 Hz - 250 Hz.
+  LogOfEnergy(hp_120, length, kOffsetVector[0], &total_energy, &features[0]);
+
+  return total_energy;
+}
diff --git a/trunk/src/common_audio/vad/vad_filterbank.h b/trunk/src/common_audio/vad/vad_filterbank.h
new file mode 100644
index 0000000..0c5c00c
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_filterbank.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes feature calculating functionality used in vad_core.c.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_FILTERBANK_H_
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_FILTERBANK_H_
+
+#include "typedefs.h"
+#include "vad_core.h"
+
+// Takes |data_length| samples of |data_in| and calculates the logarithm of the
+// energy of each of the |NUM_CHANNELS| = 6 frequency bands used by the VAD:
+//        80 Hz - 250 Hz
+//        250 Hz - 500 Hz
+//        500 Hz - 1000 Hz
+//        1000 Hz - 2000 Hz
+//        2000 Hz - 3000 Hz
+//        3000 Hz - 4000 Hz
+//
+// The values are given in Q4 and written to |features|. Further, an approximate
+// overall energy is returned. The return value is used in
+// WebRtcVad_GmmProbability() as a signal indicator, hence it is arbitrary above
+// the threshold MIN_ENERGY.
+//
+// - self         [i/o] : State information of the VAD.
+// - data_in      [i]   : Input audio data, for feature extraction.
+// - data_length  [i]   : Audio data size, in number of samples.
+// - features     [o]   : 10 * log10(energy in each frequency band), Q4.
+// - returns            : Total energy of the signal (NOTE! This value is not
+//                        exact. It is only used in a comparison.)
+int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in,
+                                    int data_length, int16_t* features);
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_FILTERBANK_H_
diff --git a/trunk/src/common_audio/vad/vad_filterbank_unittest.cc b/trunk/src/common_audio/vad/vad_filterbank_unittest.cc
new file mode 100644
index 0000000..320fda9
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_filterbank_unittest.cc
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+#include "vad_defines.h"
+#include "vad_filterbank.h"
+}
+
+namespace {
+
+enum { kNumValidFrameLengths = 3 };
+
+TEST_F(VadTest, vad_filterbank) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  static const int16_t kReference[kNumValidFrameLengths] = { 48, 11, 11 };
+  static const int16_t kFeatures[kNumValidFrameLengths * NUM_CHANNELS] = {
+      1213, 759, 587, 462, 434, 272,
+      1479, 1385, 1291, 1200, 1103, 1099,
+      1732, 1692, 1681, 1629, 1436, 1436
+  };
+  static const int16_t kOffsetVector[NUM_CHANNELS] = {
+      368, 368, 272, 176, 176, 176 };
+  int16_t features[NUM_CHANNELS];
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  int16_t speech[kMaxFrameLength];
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = (i * i);
+  }
+
+  int frame_length_index = 0;
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(kReference[frame_length_index],
+                WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                            features));
+      for (int k = 0; k < NUM_CHANNELS; ++k) {
+        EXPECT_EQ(kFeatures[k + frame_length_index * NUM_CHANNELS],
+                  features[k]);
+      }
+      frame_length_index++;
+    }
+  }
+  EXPECT_EQ(kNumValidFrameLengths, frame_length_index);
+
+  // Verify that all zeros in gives kOffsetVector out.
+  memset(speech, 0, sizeof(speech));
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                               features));
+      for (int k = 0; k < NUM_CHANNELS; ++k) {
+        EXPECT_EQ(kOffsetVector[k], features[k]);
+      }
+    }
+  }
+
+  // Verify that all ones in gives kOffsetVector out. Any other constant input
+  // will have a small impact in the sub bands.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = 1;
+  }
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      ASSERT_EQ(0, WebRtcVad_InitCore(self));
+      EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                               features));
+      for (int k = 0; k < NUM_CHANNELS; ++k) {
+        EXPECT_EQ(kOffsetVector[k], features[k]);
+      }
+    }
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/trunk/src/common_audio/vad/vad_gmm.c b/trunk/src/common_audio/vad/vad_gmm.c
new file mode 100644
index 0000000..20a703a
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_gmm.c
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vad_gmm.h"
+
+#include "signal_processing_library.h"
+#include "typedefs.h"
+
+static const int32_t kCompVar = 22005;
+static const int16_t kLog2Exp = 5909;  // log2(exp(1)) in Q12.
+
+// For a normal distribution, the probability of |input| is calculated and
+// returned (in Q20). The formula for normal distributed probability is
+//
+// 1 / s * exp(-(x - m)^2 / (2 * s^2))
+//
+// where the parameters are given in the following Q domains:
+// m = |mean| (Q7)
+// s = |std| (Q7)
+// x = |input| (Q4)
+// in addition to the probability we output |delta| (in Q11) used when updating
+// the noise/speech model.
+int32_t WebRtcVad_GaussianProbability(int16_t input,
+                                      int16_t mean,
+                                      int16_t std,
+                                      int16_t* delta) {
+  int16_t tmp16, inv_std, inv_std2, exp_value = 0;
+  int32_t tmp32;
+
+  // Calculate |inv_std| = 1 / s, in Q10.
+  // 131072 = 1 in Q17, and (|std| >> 1) is for rounding instead of truncation.
+  // Q-domain: Q17 / Q7 = Q10.
+  tmp32 = (int32_t) 131072 + (int32_t) (std >> 1);
+  inv_std = (int16_t) WebRtcSpl_DivW32W16(tmp32, std);
+
+  // Calculate |inv_std2| = 1 / s^2, in Q14.
+  tmp16 = (inv_std >> 2);  // Q10 -> Q8.
+  // Q-domain: (Q8 * Q8) >> 2 = Q14.
+  inv_std2 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp16, tmp16, 2);
+  // TODO(bjornv): Investigate if changing to
+  // |inv_std2| = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(|inv_std|, |inv_std|, 6);
+  // gives better accuracy.
+
+  tmp16 = (input << 3);  // Q4 -> Q7
+  tmp16 = tmp16 - mean;  // Q7 - Q7 = Q7
+
+  // To be used later, when updating noise/speech model.
+  // |delta| = (x - m) / s^2, in Q11.
+  // Q-domain: (Q14 * Q7) >> 10 = Q11.
+  *delta = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inv_std2, tmp16, 10);
+
+  // Calculate the exponent |tmp32| = (x - m)^2 / (2 * s^2), in Q10. Replacing
+  // division by two with one shift.
+  // Q-domain: (Q11 * Q7) >> 8 = Q10.
+  tmp32 = WEBRTC_SPL_MUL_16_16_RSFT(*delta, tmp16, 9);
+
+  // If the exponent is small enough to give a non-zero probability we calculate
+  // |exp_value| ~= exp(-(x - m)^2 / (2 * s^2))
+  //             ~= exp2(-log2(exp(1)) * |tmp32|).
+  if (tmp32 < kCompVar) {
+    // Calculate |tmp16| = log2(exp(1)) * |tmp32|, in Q10.
+    // Q-domain: (Q12 * Q10) >> 12 = Q10.
+    tmp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(kLog2Exp, (int16_t) tmp32, 12);
+    tmp16 = -tmp16;
+    exp_value = (0x0400 | (tmp16 & 0x03FF));
+    tmp16 ^= 0xFFFF;
+    tmp16 >>= 10;
+    tmp16 += 1;
+    // Get |exp_value| = exp(-|tmp32|) in Q10.
+    exp_value >>= tmp16;
+  }
+
+  // Calculate and return (1 / s) * exp(-(x - m)^2 / (2 * s^2)), in Q20.
+  // Q-domain: Q10 * Q10 = Q20.
+  return WEBRTC_SPL_MUL_16_16(inv_std, exp_value);
+}
diff --git a/trunk/src/common_audio/vad/vad_gmm.h b/trunk/src/common_audio/vad/vad_gmm.h
new file mode 100644
index 0000000..2333af7
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_gmm.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Gaussian probability calculations internally used in vad_core.c.
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_GMM_H_
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_GMM_H_
+
+#include "typedefs.h"
+
+// Calculates the probability for |input|, given that |input| comes from a
+// normal distribution with mean and standard deviation (|mean|, |std|).
+//
+// Inputs:
+//      - input         : input sample in Q4.
+//      - mean          : mean input in the statistical model, Q7.
+//      - std           : standard deviation, Q7.
+//
+// Output:
+//
+//      - delta         : input used when updating the model, Q11.
+//                        |delta| = (|input| - |mean|) / |std|^2.
+//
+// Return:
+//   (probability for |input|) =
+//    1 / |std| * exp(-(|input| - |mean|)^2 / (2 * |std|^2));
+int32_t WebRtcVad_GaussianProbability(int16_t input,
+                                      int16_t mean,
+                                      int16_t std,
+                                      int16_t* delta);
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_GMM_H_
diff --git a/trunk/src/common_audio/vad/vad_gmm_unittest.cc b/trunk/src/common_audio/vad/vad_gmm_unittest.cc
new file mode 100644
index 0000000..205435a
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_gmm_unittest.cc
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_gmm.h"
+}
+
+namespace {
+
+TEST_F(VadTest, vad_gmm) {
+  int16_t delta = 0;
+  // Input value at mean.
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(0, 0, 128, &delta));
+  EXPECT_EQ(0, delta);
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(16, 128, 128, &delta));
+  EXPECT_EQ(0, delta);
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(-16, -128, 128, &delta));
+  EXPECT_EQ(0, delta);
+
+  // Largest possible input to give non-zero probability.
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(59, 0, 128, &delta));
+  EXPECT_EQ(7552, delta);
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(75, 128, 128, &delta));
+  EXPECT_EQ(7552, delta);
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(-75, -128, 128, &delta));
+  EXPECT_EQ(-7552, delta);
+
+  // Too large input, should give zero probability.
+  EXPECT_EQ(0, WebRtcVad_GaussianProbability(105, 0, 128, &delta));
+  EXPECT_EQ(13440, delta);
+}
+}  // namespace
diff --git a/trunk/src/common_audio/vad/vad_sp.c b/trunk/src/common_audio/vad/vad_sp.c
new file mode 100644
index 0000000..4fface3
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_sp.c
@@ -0,0 +1,181 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vad_sp.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+#include "typedefs.h"
+#include "vad_defines.h"
+
+// Allpass filter coefficients, upper and lower, in Q13.
+// Upper: 0.64, Lower: 0.17.
+static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 };  // Q13
+
+// TODO(bjornv): Move this function to vad_filterbank.c.
+// Downsampling filter based on splitting filter and allpass functions.
+void WebRtcVad_Downsampling(int16_t* signal_in,
+                            int16_t* signal_out,
+                            int32_t* filter_state,
+                            int in_length) {
+  int16_t tmp16_1 = 0, tmp16_2 = 0;
+  int32_t tmp32_1 = filter_state[0];
+  int32_t tmp32_2 = filter_state[1];
+  int n = 0;
+  int half_length = (in_length >> 1);  // Downsampling by 2 gives half length.
+
+  // Filter coefficients in Q13, filter state in Q0.
+  for (n = 0; n < half_length; n++) {
+    // All-pass filtering upper branch.
+    tmp16_1 = (int16_t) ((tmp32_1 >> 1) +
+        WEBRTC_SPL_MUL_16_16_RSFT(kAllPassCoefsQ13[0], *signal_in, 14));
+    *signal_out = tmp16_1;
+    tmp32_1 = (int32_t) (*signal_in++) -
+        WEBRTC_SPL_MUL_16_16_RSFT(kAllPassCoefsQ13[0], tmp16_1, 12);
+
+    // All-pass filtering lower branch.
+    tmp16_2 = (int16_t) ((tmp32_2 >> 1) +
+        WEBRTC_SPL_MUL_16_16_RSFT(kAllPassCoefsQ13[1], *signal_in, 14));
+    *signal_out++ += tmp16_2;
+    tmp32_2 = (int32_t) (*signal_in++) -
+        WEBRTC_SPL_MUL_16_16_RSFT(kAllPassCoefsQ13[1], tmp16_2, 12);
+  }
+  // Store the filter states.
+  filter_state[0] = tmp32_1;
+  filter_state[1] = tmp32_2;
+}
+
+// Inserts |feature_value| into |low_value_vector|, if it is one of the 16
+// smallest values the last 100 frames. Then calculates and returns the median
+// of the five smallest values.
+int16_t WebRtcVad_FindMinimum(VadInstT* self,
+                              int16_t feature_value,
+                              int channel) {
+  int i = 0, j = 0;
+  int position = -1;
+  // Offset to beginning of the 16 minimum values in memory.
+  int offset = (channel << 4);
+  int16_t current_median = 1600;
+  int16_t alpha = 0;
+  int32_t tmp32 = 0;
+  // Pointer to memory for the 16 minimum values and the age of each value of
+  // the |channel|.
+  int16_t* age_ptr = &self->index_vector[offset];
+  int16_t* value_ptr = &self->low_value_vector[offset];
+  int16_t *p1, *p2, *p3;
+
+  assert(channel < NUM_CHANNELS);
+
+  // Each value in |low_value_vector| is getting 1 loop older.
+  // Update age of each value in |age_ptr|, and remove old values.
+  for (i = 0; i < 16; i++) {
+    p3 = age_ptr + i;
+    if (*p3 != 100) {
+      *p3 += 1;
+    } else {
+      p1 = value_ptr + i + 1;
+      p2 = p3 + 1;
+      for (j = i; j < 16; j++) {
+        *(value_ptr + j) = *p1++;
+        *(age_ptr + j) = *p2++;
+      }
+      *(age_ptr + 15) = 101;
+      *(value_ptr + 15) = 10000;
+    }
+  }
+
+  // Check if |feature_value| is smaller than any of the values in
+  // |low_value_vector|. If so, find the |position| where to insert the new
+  // value.
+  if (feature_value < *(value_ptr + 7)) {
+    if (feature_value < *(value_ptr + 3)) {
+      if (feature_value < *(value_ptr + 1)) {
+        if (feature_value < *value_ptr) {
+          position = 0;
+        } else {
+          position = 1;
+        }
+      } else if (feature_value < *(value_ptr + 2)) {
+        position = 2;
+      } else {
+        position = 3;
+      }
+    } else if (feature_value < *(value_ptr + 5)) {
+      if (feature_value < *(value_ptr + 4)) {
+        position = 4;
+      } else {
+        position = 5;
+      }
+    } else if (feature_value < *(value_ptr + 6)) {
+      position = 6;
+    } else {
+      position = 7;
+    }
+  } else if (feature_value < *(value_ptr + 15)) {
+    if (feature_value < *(value_ptr + 11)) {
+      if (feature_value < *(value_ptr + 9)) {
+        if (feature_value < *(value_ptr + 8)) {
+          position = 8;
+        } else {
+          position = 9;
+        }
+      } else if (feature_value < *(value_ptr + 10)) {
+        position = 10;
+      } else {
+        position = 11;
+      }
+    } else if (feature_value < *(value_ptr + 13)) {
+      if (feature_value < *(value_ptr + 12)) {
+        position = 12;
+      } else {
+        position = 13;
+      }
+    } else if (feature_value < *(value_ptr + 14)) {
+      position = 14;
+    } else {
+      position = 15;
+    }
+  }
+
+  // If we have a new small value, put it in the correct position and shift
+  // larger values up.
+  if (position > -1) {
+    for (i = 15; i > position; i--) {
+      j = i - 1;
+      *(value_ptr + i) = *(value_ptr + j);
+      *(age_ptr + i) = *(age_ptr + j);
+    }
+    *(value_ptr + position) = feature_value;
+    *(age_ptr + position) = 1;
+  }
+
+  // Get |current_median|.
+  if (self->frame_counter > 2) {
+    current_median = *(value_ptr + 2);
+  } else if (self->frame_counter > 0) {
+    current_median = *value_ptr;
+  }
+
+  // Smooth the median value.
+  if (self->frame_counter > 0) {
+    if (current_median < self->mean_value[channel]) {
+      alpha = (int16_t) ALPHA1;  // 0.2 in Q15.
+    } else {
+      alpha = (int16_t) ALPHA2;  // 0.99 in Q15.
+    }
+  }
+  tmp32 = WEBRTC_SPL_MUL_16_16(alpha + 1, self->mean_value[channel]);
+  tmp32 += WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_WORD16_MAX - alpha, current_median);
+  tmp32 += 16384;
+  self->mean_value[channel] = (int16_t) (tmp32 >> 15);
+
+  return self->mean_value[channel];
+}
diff --git a/trunk/src/common_audio/vad/vad_sp.h b/trunk/src/common_audio/vad/vad_sp.h
new file mode 100644
index 0000000..9e8b204
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_sp.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes specific signal processing tools used in vad_core.c.
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_SP_H_
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_SP_H_
+
+#include "typedefs.h"
+#include "vad_core.h"
+
+// Downsamples the signal by a factor 2, eg. 32->16 or 16->8.
+//
+// Inputs:
+//      - signal_in     : Input signal.
+//      - in_length     : Length of input signal in samples.
+//
+// Input & Output:
+//      - filter_state  : Current filter states of the two all-pass filters. The
+//                        |filter_state| is updated after all samples have been
+//                        processed.
+//
+// Output:
+//      - signal_out    : Downsampled signal (of length |in_length| / 2).
+void WebRtcVad_Downsampling(int16_t* signal_in,
+                            int16_t* signal_out,
+                            int32_t* filter_state,
+                            int in_length);
+
+// Updates and returns the smoothed feature minimum. As minimum we use the
+// median of the five smallest feature values in a 100 frames long window.
+// As long as |handle->frame_counter| is zero, that is, we haven't received any
+// "valid" data, FindMinimum() outputs the default value of 1600.
+//
+// Inputs:
+//      - feature_value : New feature value to update with.
+//      - channel       : Channel number.
+//
+// Input & Output:
+//      - handle        : State information of the VAD.
+//
+// Returns:
+//                      : Smoothed minimum value for a moving window.
+int16_t WebRtcVad_FindMinimum(VadInstT* handle,
+                              int16_t feature_value,
+                              int channel);
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_SP_H_
diff --git a/trunk/src/common_audio/vad/vad_sp_unittest.cc b/trunk/src/common_audio/vad/vad_sp_unittest.cc
new file mode 100644
index 0000000..03c844e
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_sp_unittest.cc
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+#include "vad_defines.h"
+#include "vad_sp.h"
+}
+
+namespace {
+
+TEST_F(VadTest, vad_sp) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  int16_t zeros[kMaxFrameLength] = { 0 };
+  int32_t state[2] = { 0 };
+  int16_t data_in[kMaxFrameLength];
+  int16_t data_out[kMaxFrameLength];
+
+  // We expect the first value to be 1600 as long as |frame_counter| is zero,
+  // which is true for the first iteration.
+  static const int16_t kReferenceMin[32] = {
+      1600, 720, 509, 512, 532, 552, 570, 588,
+       606, 624, 642, 659, 675, 691, 707, 723,
+      1600, 544, 502, 522, 542, 561, 579, 597,
+       615, 633, 651, 667, 683, 699, 715, 731
+  };
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    data_in[i] = (i * i);
+  }
+  // Input values all zeros, expect all zeros out.
+  WebRtcVad_Downsampling(zeros, data_out, state,
+                         static_cast<int>(kMaxFrameLength));
+  EXPECT_EQ(0, state[0]);
+  EXPECT_EQ(0, state[1]);
+  for (int16_t i = 0; i < kMaxFrameLength / 2; ++i) {
+    EXPECT_EQ(0, data_out[i]);
+  }
+  // Make a simple non-zero data test.
+  WebRtcVad_Downsampling(data_in, data_out, state,
+                         static_cast<int>(kMaxFrameLength));
+  EXPECT_EQ(207, state[0]);
+  EXPECT_EQ(2270, state[1]);
+
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  // TODO(bjornv): Replace this part of the test with taking values from an
+  // array and calculate the reference value here. Make sure the values are not
+  // ordered.
+  for (int16_t i = 0; i < 16; ++i) {
+    int16_t value = 500 * (i + 1);
+    for (int j = 0; j < NUM_CHANNELS; ++j) {
+      // Use values both above and below initialized value.
+      EXPECT_EQ(kReferenceMin[i], WebRtcVad_FindMinimum(self, value, j));
+      EXPECT_EQ(kReferenceMin[i + 16], WebRtcVad_FindMinimum(self, 12000, j));
+    }
+    self->frame_counter++;
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/trunk/src/common_audio/vad/vad_unittest.cc b/trunk/src/common_audio/vad/vad_unittest.cc
new file mode 100644
index 0000000..60ee208
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_unittest.cc
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vad_unittest.h"
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "webrtc_vad.h"
+
+VadTest::VadTest() {}
+
+void VadTest::SetUp() {}
+
+void VadTest::TearDown() {}
+
+// Returns true if the rate and frame length combination is valid.
+bool VadTest::ValidRatesAndFrameLengths(int16_t rate, int16_t frame_length) {
+  if (rate == 8000) {
+    if (frame_length == 80 || frame_length == 160 || frame_length == 240) {
+      return true;
+    }
+    return false;
+  } else if (rate == 16000) {
+    if (frame_length == 160 || frame_length == 320 || frame_length == 480) {
+      return true;
+    }
+    return false;
+  }
+  if (rate == 32000) {
+    if (frame_length == 320 || frame_length == 640 || frame_length == 960) {
+      return true;
+    }
+    return false;
+  }
+
+  return false;
+}
+
+namespace {
+
+TEST_F(VadTest, ApiTest) {
+  // This API test runs through the APIs for all possible valid and invalid
+  // combinations.
+
+  VadInst* handle = NULL;
+  int16_t zeros[kMaxFrameLength] = { 0 };
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  int16_t speech[kMaxFrameLength];
+  for (int16_t i = 0; i < kMaxFrameLength; i++) {
+    speech[i] = (i * i);
+  }
+
+  // Null instance tests
+  EXPECT_EQ(-1, WebRtcVad_Create(NULL));
+  EXPECT_EQ(-1, WebRtcVad_Init(NULL));
+  EXPECT_EQ(-1, WebRtcVad_Assign(NULL, NULL));
+  EXPECT_EQ(-1, WebRtcVad_Free(NULL));
+  EXPECT_EQ(-1, WebRtcVad_set_mode(NULL, kModes[0]));
+  EXPECT_EQ(-1, WebRtcVad_Process(NULL, kRates[0], speech, kFrameLengths[0]));
+
+  // WebRtcVad_AssignSize tests
+  int handle_size_bytes = 0;
+  EXPECT_EQ(0, WebRtcVad_AssignSize(&handle_size_bytes));
+  EXPECT_EQ(576, handle_size_bytes);
+
+  // WebRtcVad_Assign tests
+  void* tmp_handle = malloc(handle_size_bytes);
+  EXPECT_EQ(-1, WebRtcVad_Assign(&handle, NULL));
+  EXPECT_EQ(0, WebRtcVad_Assign(&handle, tmp_handle));
+  EXPECT_EQ(handle, tmp_handle);
+  free(tmp_handle);
+
+  // WebRtcVad_Create()
+  ASSERT_EQ(0, WebRtcVad_Create(&handle));
+
+  // Not initialized tests
+  EXPECT_EQ(-1, WebRtcVad_Process(handle, kRates[0], speech, kFrameLengths[0]));
+  EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[0]));
+
+  // WebRtcVad_Init() test
+  ASSERT_EQ(0, WebRtcVad_Init(handle));
+
+  // WebRtcVad_set_mode() invalid modes tests
+  EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[0] - 1));
+  EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[kModesSize - 1] + 1));
+
+  // WebRtcVad_Process() tests
+  // NULL speech pointer
+  EXPECT_EQ(-1, WebRtcVad_Process(handle, kRates[0], NULL, kFrameLengths[0]));
+  // Invalid sampling rate
+  EXPECT_EQ(-1, WebRtcVad_Process(handle, 9999, speech, kFrameLengths[0]));
+  // All zeros as input should work
+  EXPECT_EQ(0, WebRtcVad_Process(handle, kRates[0], zeros, kFrameLengths[0]));
+  for (size_t k = 0; k < kModesSize; k++) {
+    // Test valid modes
+    EXPECT_EQ(0, WebRtcVad_set_mode(handle, kModes[k]));
+    // Loop through sampling rate and frame length combinations
+    for (size_t i = 0; i < kRatesSize; i++) {
+      for (size_t j = 0; j < kFrameLengthsSize; j++) {
+        if (ValidRatesAndFrameLengths(kRates[i], kFrameLengths[j])) {
+          EXPECT_EQ(1, WebRtcVad_Process(handle,
+                                         kRates[i],
+                                         speech,
+                                         kFrameLengths[j]));
+        } else {
+          EXPECT_EQ(-1, WebRtcVad_Process(handle,
+                                          kRates[i],
+                                          speech,
+                                          kFrameLengths[j]));
+        }
+      }
+    }
+  }
+
+  EXPECT_EQ(0, WebRtcVad_Free(handle));
+}
+
+// TODO(bjornv): Add a process test, run on file.
+
+}  // namespace
diff --git a/trunk/src/common_audio/vad/vad_unittest.h b/trunk/src/common_audio/vad/vad_unittest.h
new file mode 100644
index 0000000..f68427c
--- /dev/null
+++ b/trunk/src/common_audio/vad/vad_unittest.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMONT_AUDIO_VAD_VAD_UNIT_TESTS_H
+#define WEBRTC_COMMONT_AUDIO_VAD_VAD_UNIT_TESTS_H
+
+#include <stddef.h>  // size_t
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+
+namespace {
+
+// Modes we support
+const int kModes[] = { 0, 1, 2, 3 };
+const size_t kModesSize = sizeof(kModes) / sizeof(*kModes);
+
+// Rates we support.
+const int16_t kRates[] = { 8000, 12000, 16000, 24000, 32000 };
+const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
+
+// Frame lengths we support.
+const int16_t kMaxFrameLength = 960;
+const int16_t kFrameLengths[] = { 80, 120, 160, 240, 320, 480, 640,
+    kMaxFrameLength };
+const size_t kFrameLengthsSize = sizeof(kFrameLengths) / sizeof(*kFrameLengths);
+
+}  // namespace
+
+class VadTest : public ::testing::Test {
+ protected:
+  VadTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  // Returns true if the rate and frame length combination is valid.
+  bool ValidRatesAndFrameLengths(int16_t rate, int16_t frame_length);
+};
+
+#endif  // WEBRTC_COMMONT_AUDIO_VAD_VAD_UNIT_TESTS_H
diff --git a/trunk/src/common_audio/vad/webrtc_vad.c b/trunk/src/common_audio/vad/webrtc_vad.c
new file mode 100644
index 0000000..26941e6
--- /dev/null
+++ b/trunk/src/common_audio/vad/webrtc_vad.c
@@ -0,0 +1,165 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes the VAD API calls. For a specific function call description,
+ * see webrtc_vad.h
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "webrtc_vad.h"
+#include "vad_core.h"
+
+static const int kInitCheck = 42;
+
+WebRtc_Word16 WebRtcVad_AssignSize(int *size_in_bytes)
+{
+    *size_in_bytes = sizeof(VadInstT) * 2 / sizeof(WebRtc_Word16);
+    return 0;
+}
+
+WebRtc_Word16 WebRtcVad_Assign(VadInst **vad_inst, void *vad_inst_addr)
+{
+
+    if (vad_inst == NULL)
+    {
+        return -1;
+    }
+
+    if (vad_inst_addr != NULL)
+    {
+        *vad_inst = (VadInst*)vad_inst_addr;
+        return 0;
+    } else
+    {
+        return -1;
+    }
+}
+
+int WebRtcVad_Create(VadInst** handle) {
+  VadInstT* self = NULL;
+
+  if (handle == NULL) {
+    return -1;
+  }
+
+  *handle = NULL;
+  self = (VadInstT*) malloc(sizeof(VadInstT));
+  *handle = (VadInst*) self;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  self->init_flag = 0;
+
+  return 0;
+}
+
+int WebRtcVad_Free(VadInst* handle) {
+  if (handle == NULL) {
+    return -1;
+  }
+
+  free(handle);
+
+  return 0;
+}
+
+int WebRtcVad_Init(VadInst* handle) {
+  // Initialize the core VAD component.
+  return WebRtcVad_InitCore((VadInstT*) handle);
+}
+
+int WebRtcVad_set_mode(VadInst *vad_inst, int mode)
+{
+    VadInstT* vad_ptr;
+
+    if (vad_inst == NULL)
+    {
+        return -1;
+    }
+
+    vad_ptr = (VadInstT*)vad_inst;
+    if (vad_ptr->init_flag != kInitCheck)
+    {
+        return -1;
+    }
+
+    return WebRtcVad_set_mode_core((VadInstT*)vad_inst, mode);
+}
+
+WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 *speech_frame,
+                                WebRtc_Word16 frame_length)
+{
+    WebRtc_Word16 vad;
+    VadInstT* vad_ptr;
+
+    if (vad_inst == NULL)
+    {
+        return -1;
+    }
+
+    vad_ptr = (VadInstT*)vad_inst;
+    if (vad_ptr->init_flag != kInitCheck)
+    {
+        return -1;
+    }
+
+    if (speech_frame == NULL)
+    {
+        return -1;
+    }
+
+    if (fs == 32000)
+    {
+        if ((frame_length != 320) && (frame_length != 640) && (frame_length != 960))
+        {
+            return -1;
+        }
+        vad = WebRtcVad_CalcVad32khz((VadInstT*)vad_inst, speech_frame, frame_length);
+
+    } else if (fs == 16000)
+    {
+        if ((frame_length != 160) && (frame_length != 320) && (frame_length != 480))
+        {
+            return -1;
+        }
+        vad = WebRtcVad_CalcVad16khz((VadInstT*)vad_inst, speech_frame, frame_length);
+
+    } else if (fs == 8000)
+    {
+        if ((frame_length != 80) && (frame_length != 160) && (frame_length != 240))
+        {
+            return -1;
+        }
+        vad = WebRtcVad_CalcVad8khz((VadInstT*)vad_inst, speech_frame, frame_length);
+
+    } else
+    {
+        return -1; // Not a supported sampling frequency
+    }
+
+    if (vad > 0)
+    {
+        return 1;
+    } else if (vad == 0)
+    {
+        return 0;
+    } else
+    {
+        return -1;
+    }
+}
diff --git a/trunk/src/common_types.h b/trunk/src/common_types.h
new file mode 100644
index 0000000..9b3a0ce
--- /dev/null
+++ b/trunk/src/common_types.h
@@ -0,0 +1,573 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_TYPES_H
+#define WEBRTC_COMMON_TYPES_H
+
+#include "typedefs.h"
+
+#ifdef WEBRTC_EXPORT
+    #define WEBRTC_DLLEXPORT _declspec(dllexport)
+#elif WEBRTC_DLL
+    #define WEBRTC_DLLEXPORT _declspec(dllimport)
+#else
+    #define WEBRTC_DLLEXPORT
+#endif
+
+#ifndef NULL
+    #define NULL 0
+#endif
+
+#define RTP_PAYLOAD_NAME_SIZE 32
+
+namespace webrtc {
+
+class InStream
+{
+public:
+    virtual int Read(void *buf,int len) = 0;
+    virtual int Rewind() {return -1;}
+    virtual ~InStream() {}
+protected:
+    InStream() {}
+};
+
+class OutStream
+{
+public:
+    virtual bool Write(const void *buf,int len) = 0;
+    virtual int Rewind() {return -1;}
+    virtual ~OutStream() {}
+protected:
+    OutStream() {}
+};
+
+enum TraceModule
+{
+    // not a module, triggered from the engine code
+    kTraceVoice              = 0x0001,
+    // not a module, triggered from the engine code
+    kTraceVideo              = 0x0002,
+    // not a module, triggered from the utility code
+    kTraceUtility            = 0x0003,
+    kTraceRtpRtcp            = 0x0004,
+    kTraceTransport          = 0x0005,
+    kTraceSrtp               = 0x0006,
+    kTraceAudioCoding        = 0x0007,
+    kTraceAudioMixerServer   = 0x0008,
+    kTraceAudioMixerClient   = 0x0009,
+    kTraceFile               = 0x000a,
+    kTraceAudioProcessing    = 0x000b,
+    kTraceVideoCoding        = 0x0010,
+    kTraceVideoMixer         = 0x0011,
+    kTraceAudioDevice        = 0x0012,
+    kTraceVideoRenderer      = 0x0014,
+    kTraceVideoCapture       = 0x0015,
+    kTraceVideoPreocessing   = 0x0016
+};
+
+enum TraceLevel
+{
+    kTraceNone               = 0x0000,    // no trace
+    kTraceStateInfo          = 0x0001,
+    kTraceWarning            = 0x0002,
+    kTraceError              = 0x0004,
+    kTraceCritical           = 0x0008,
+    kTraceApiCall            = 0x0010,
+    kTraceDefault            = 0x00ff,
+
+    kTraceModuleCall         = 0x0020,
+    kTraceMemory             = 0x0100,   // memory info
+    kTraceTimer              = 0x0200,   // timing info
+    kTraceStream             = 0x0400,   // "continuous" stream of data
+
+    // used for debug purposes
+    kTraceDebug              = 0x0800,  // debug
+    kTraceInfo               = 0x1000,  // debug info
+
+    kTraceAll                = 0xffff
+};
+
+// External Trace API
+class TraceCallback
+{
+public:
+    virtual void Print(const TraceLevel level,
+                       const char *traceString,
+                       const int length) = 0;
+protected:
+    virtual ~TraceCallback() {}
+    TraceCallback() {}
+};
+
+
+enum FileFormats
+{
+    kFileFormatWavFile        = 1,
+    kFileFormatCompressedFile = 2,
+    kFileFormatAviFile        = 3,
+    kFileFormatPreencodedFile = 4,
+    kFileFormatPcm16kHzFile   = 7,
+    kFileFormatPcm8kHzFile    = 8,
+    kFileFormatPcm32kHzFile   = 9
+};
+
+
+enum ProcessingTypes
+{
+    kPlaybackPerChannel = 0,
+    kPlaybackAllChannelsMixed,
+    kRecordingPerChannel,
+    kRecordingAllChannelsMixed
+};
+
+// Encryption enums
+enum CipherTypes
+{
+    kCipherNull               = 0,
+    kCipherAes128CounterMode  = 1
+};
+
+enum AuthenticationTypes
+{
+    kAuthNull       = 0,
+    kAuthHmacSha1   = 3
+};
+
+enum SecurityLevels
+{
+    kNoProtection                    = 0,
+    kEncryption                      = 1,
+    kAuthentication                  = 2,
+    kEncryptionAndAuthentication     = 3
+};
+
+// Interface for encrypting and decrypting regular data and rtp/rtcp packets.
+// Implement this interface if you wish to provide an encryption scheme to
+// the voice or video engines.
+class Encryption
+{
+public:
+    // Encrypt the given data.
+    //
+    // Args:
+    //   channel: The channel to encrypt data for.
+    //   in_data: The data to encrypt. This data is bytes_in bytes long.
+    //   out_data: The buffer to write the encrypted data to. You may write more
+    //       bytes of encrypted data than what you got as input, up to a maximum
+    //       of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
+    //       webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
+    //   bytes_in: The number of bytes in the input buffer.
+    //   bytes_out: The number of bytes written in out_data.
+    virtual void encrypt(
+        int channel,
+        unsigned char* in_data,
+        unsigned char* out_data,
+        int bytes_in,
+        int* bytes_out) = 0;
+
+    // Decrypts the given data. This should reverse the effects of encrypt().
+    //
+    // Args:
+    //   channel_no: The channel to decrypt data for.
+    //   in_data: The data to decrypt. This data is bytes_in bytes long.
+    //   out_data: The buffer to write the decrypted data to. You may write more
+    //       bytes of decrypted data than what you got as input, up to a maximum
+    //       of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
+    //       webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
+    //   bytes_in: The number of bytes in the input buffer.
+    //   bytes_out: The number of bytes written in out_data.
+    virtual void decrypt(
+        int channel,
+        unsigned char* in_data,
+        unsigned char* out_data,
+        int bytes_in,
+        int* bytes_out) = 0;
+
+    // Encrypts a RTCP packet. Otherwise, this method has the same contract as
+    // encrypt().
+    virtual void encrypt_rtcp(
+        int channel,
+        unsigned char* in_data,
+        unsigned char* out_data,
+        int bytes_in,
+        int* bytes_out) = 0;
+
+    // Decrypts a RTCP packet. Otherwise, this method has the same contract as
+    // decrypt().
+    virtual void decrypt_rtcp(
+        int channel,
+        unsigned char* in_data,
+        unsigned char* out_data,
+        int bytes_in,
+        int* bytes_out) = 0;
+
+protected:
+    virtual ~Encryption() {}
+    Encryption() {}
+};
+
+// External transport callback interface
+class Transport
+{
+public:
+    virtual int SendPacket(int channel, const void *data, int len) = 0;
+    virtual int SendRTCPPacket(int channel, const void *data, int len) = 0;
+
+protected:
+    virtual ~Transport() {}
+    Transport() {}
+};
+
+// ==================================================================
+// Voice specific types
+// ==================================================================
+
+// Each codec supported can be described by this structure.
+struct CodecInst
+{
+    int pltype;
+    char plname[RTP_PAYLOAD_NAME_SIZE];
+    int plfreq;
+    int pacsize;
+    int channels;
+    int rate;
+};
+
+enum FrameType
+{
+    kFrameEmpty            = 0,
+    kAudioFrameSpeech      = 1,
+    kAudioFrameCN          = 2,
+    kVideoFrameKey         = 3,    // independent frame
+    kVideoFrameDelta       = 4,    // depends on the previus frame
+    kVideoFrameGolden      = 5,    // depends on a old known previus frame
+    kVideoFrameAltRef      = 6
+};
+
+// RTP
+enum {kRtpCsrcSize = 15}; // RFC 3550 page 13
+
+enum RTPDirections
+{
+    kRtpIncoming = 0,
+    kRtpOutgoing
+};
+
+enum PayloadFrequencies
+{
+    kFreq8000Hz = 8000,
+    kFreq16000Hz = 16000,
+    kFreq32000Hz = 32000
+};
+
+enum VadModes                 // degree of bandwidth reduction
+{
+    kVadConventional = 0,      // lowest reduction
+    kVadAggressiveLow,
+    kVadAggressiveMid,
+    kVadAggressiveHigh         // highest reduction
+};
+
+struct NetworkStatistics           // NETEQ statistics
+{
+    // current jitter buffer size in ms
+    WebRtc_UWord16 currentBufferSize;
+    // preferred (optimal) buffer size in ms
+    WebRtc_UWord16 preferredBufferSize;
+    // adding extra delay due to "peaky jitter"
+    bool jitterPeaksFound;
+    // loss rate (network + late) in percent (in Q14)
+    WebRtc_UWord16 currentPacketLossRate;
+    // late loss rate in percent (in Q14)
+    WebRtc_UWord16 currentDiscardRate;
+    // fraction (of original stream) of synthesized speech inserted through
+    // expansion (in Q14)
+    WebRtc_UWord16 currentExpandRate;
+    // fraction of synthesized speech inserted through pre-emptive expansion
+    // (in Q14)
+    WebRtc_UWord16 currentPreemptiveRate;
+    // fraction of data removed through acceleration (in Q14)
+    WebRtc_UWord16 currentAccelerateRate;
+    // clock-drift in parts-per-million (negative or positive)
+    int32_t clockDriftPPM;
+    // average packet waiting time in the jitter buffer (ms)
+    int meanWaitingTimeMs;
+    // median packet waiting time in the jitter buffer (ms)
+    int medianWaitingTimeMs;
+    // min packet waiting time in the jitter buffer (ms)
+    int minWaitingTimeMs;
+    // max packet waiting time in the jitter buffer (ms)
+    int maxWaitingTimeMs;
+};
+
+typedef struct
+{
+    int min;              // minumum
+    int max;              // maximum
+    int average;          // average
+} StatVal;
+
+typedef struct           // All levels are reported in dBm0
+{
+    StatVal speech_rx;   // long-term speech levels on receiving side
+    StatVal speech_tx;   // long-term speech levels on transmitting side
+    StatVal noise_rx;    // long-term noise/silence levels on receiving side
+    StatVal noise_tx;    // long-term noise/silence levels on transmitting side
+} LevelStatistics;
+
+typedef struct        // All levels are reported in dB
+{
+    StatVal erl;      // Echo Return Loss
+    StatVal erle;     // Echo Return Loss Enhancement
+    StatVal rerl;     // RERL = ERL + ERLE
+    // Echo suppression inside EC at the point just before its NLP
+    StatVal a_nlp;
+} EchoStatistics;
+
+enum TelephoneEventDetectionMethods
+{
+    kInBand = 0,
+    kOutOfBand = 1,
+    kInAndOutOfBand = 2
+};
+
+enum NsModes    // type of Noise Suppression
+{
+    kNsUnchanged = 0,   // previously set mode
+    kNsDefault,         // platform default
+    kNsConference,      // conferencing default
+    kNsLowSuppression,  // lowest suppression
+    kNsModerateSuppression,
+    kNsHighSuppression,
+    kNsVeryHighSuppression,     // highest suppression
+};
+
+enum AgcModes                  // type of Automatic Gain Control
+{
+    kAgcUnchanged = 0,        // previously set mode
+    kAgcDefault,              // platform default
+    // adaptive mode for use when analog volume control exists (e.g. for
+    // PC softphone)
+    kAgcAdaptiveAnalog,
+    // scaling takes place in the digital domain (e.g. for conference servers
+    // and embedded devices)
+    kAgcAdaptiveDigital,
+    // can be used on embedded devices where the the capture signal is level
+    // is predictable
+    kAgcFixedDigital
+};
+
+// EC modes
+enum EcModes                   // type of Echo Control
+{
+    kEcUnchanged = 0,          // previously set mode
+    kEcDefault,                // platform default
+    kEcConference,             // conferencing default (aggressive AEC)
+    kEcAec,                    // Acoustic Echo Cancellation
+    kEcAecm,                   // AEC mobile
+};
+
+// AECM modes
+enum AecmModes                 // mode of AECM
+{
+    kAecmQuietEarpieceOrHeadset = 0,
+                               // Quiet earpiece or headset use
+    kAecmEarpiece,             // most earpiece use
+    kAecmLoudEarpiece,         // Loud earpiece or quiet speakerphone use
+    kAecmSpeakerphone,         // most speakerphone use (default)
+    kAecmLoudSpeakerphone      // Loud speakerphone
+};
+
+// AGC configuration
+typedef struct
+{
+    unsigned short targetLeveldBOv;
+    unsigned short digitalCompressionGaindB;
+    bool           limiterEnable;
+} AgcConfig;                  // AGC configuration parameters
+
+enum StereoChannel
+{
+    kStereoLeft = 0,
+    kStereoRight,
+    kStereoBoth
+};
+
+// Audio device layers
+enum AudioLayers
+{
+    kAudioPlatformDefault = 0,
+    kAudioWindowsWave = 1,
+    kAudioWindowsCore = 2,
+    kAudioLinuxAlsa = 3,
+    kAudioLinuxPulse = 4
+};
+
+enum NetEqModes             // NetEQ playout configurations
+{
+    // Optimized trade-off between low delay and jitter robustness for two-way
+    // communication.
+    kNetEqDefault = 0,
+    // Improved jitter robustness at the cost of increased delay. Can be
+    // used in one-way communication.
+    kNetEqStreaming = 1,
+    // Optimzed for decodability of fax signals rather than for perceived audio
+    // quality.
+    kNetEqFax = 2,
+};
+
+enum NetEqBgnModes          // NetEQ Background Noise (BGN) configurations
+{
+    // BGN is always on and will be generated when the incoming RTP stream
+    // stops (default).
+    kBgnOn = 0,
+    // The BGN is faded to zero (complete silence) after a few seconds.
+    kBgnFade = 1,
+    // BGN is not used at all. Silence is produced after speech extrapolation
+    // has faded.
+    kBgnOff = 2,
+};
+
+enum OnHoldModes            // On Hold direction
+{
+    kHoldSendAndPlay = 0,    // Put both sending and playing in on-hold state.
+    kHoldSendOnly,           // Put only sending in on-hold state.
+    kHoldPlayOnly            // Put only playing in on-hold state.
+};
+
+enum AmrMode
+{
+    kRfc3267BwEfficient = 0,
+    kRfc3267OctetAligned = 1,
+    kRfc3267FileStorage = 2,
+};
+
+// ==================================================================
+// Video specific types
+// ==================================================================
+
+// Raw video types
+enum RawVideoType
+{
+    kVideoI420     = 0,
+    kVideoYV12     = 1,
+    kVideoYUY2     = 2,
+    kVideoUYVY     = 3,
+    kVideoIYUV     = 4,
+    kVideoARGB     = 5,
+    kVideoRGB24    = 6,
+    kVideoRGB565   = 7,
+    kVideoARGB4444 = 8,
+    kVideoARGB1555 = 9,
+    kVideoMJPEG    = 10,
+    kVideoNV12     = 11,
+    kVideoNV21     = 12,
+    kVideoBGRA     = 13,
+    kVideoUnknown  = 99
+};
+
+// Video codec
+enum { kConfigParameterSize = 128};
+enum { kPayloadNameSize = 32};
+enum { kMaxSimulcastStreams = 4};
+enum { kMaxTemporalStreams = 4};
+
+enum VideoCodecComplexity
+{
+    kComplexityNormal = 0,
+    kComplexityHigh    = 1,
+    kComplexityHigher  = 2,
+    kComplexityMax     = 3
+};
+
+enum VideoCodecProfile
+{
+    kProfileBase = 0x00,
+    kProfileMain = 0x01
+};
+
+enum VP8ResilienceMode {
+  kResilienceOff,    // The stream produced by the encoder requires a
+                     // recovery frame (typically a key frame) to be
+                     // decodable after a packet loss.
+  kResilientStream,  // A stream produced by the encoder is resilient to
+                     // packet losses, but packets within a frame subsequent
+                     // to a loss can't be decoded.
+  kResilientFrames   // Same as kResilientStream but with added resilience
+                     // within a frame.
+};
+
+// VP8 specific
+struct VideoCodecVP8
+{
+    bool                 pictureLossIndicationOn;
+    bool                 feedbackModeOn;
+    VideoCodecComplexity complexity;
+    VP8ResilienceMode    resilience;
+    unsigned char        numberOfTemporalLayers;
+};
+
+// Unknown specific
+struct VideoCodecGeneric
+{
+};
+
+// Video codec types
+enum VideoCodecType
+{
+    kVideoCodecVP8,
+    kVideoCodecI420,
+    kVideoCodecRED,
+    kVideoCodecULPFEC,
+    kVideoCodecUnknown
+};
+
+union VideoCodecUnion
+{
+    VideoCodecVP8       VP8;
+    VideoCodecGeneric   Generic;
+};
+
+
+// Simulcast is when the same stream is encoded multiple times with different
+// settings such as resolution.
+struct SimulcastStream
+{
+    unsigned short      width;
+    unsigned short      height;
+    unsigned char       numberOfTemporalLayers;
+    unsigned int        maxBitrate;
+    unsigned int        qpMax; // minimum quality
+};
+
+// Common video codec properties
+struct VideoCodec
+{
+    VideoCodecType      codecType;
+    char                plName[kPayloadNameSize];
+    unsigned char       plType;
+
+    unsigned short      width;
+    unsigned short      height;
+
+    unsigned int        startBitrate;
+    unsigned int        maxBitrate;
+    unsigned int        minBitrate;
+    unsigned char       maxFramerate;
+
+    VideoCodecUnion     codecSpecific;
+
+    unsigned int        qpMax;
+    unsigned char       numberOfSimulcastStreams;
+    SimulcastStream     simulcastStream[kMaxSimulcastStreams];
+};
+}  // namespace webrtc
+#endif  // WEBRTC_COMMON_TYPES_H
diff --git a/trunk/src/common_video/OWNERS b/trunk/src/common_video/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/trunk/src/common_video/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/trunk/src/common_video/common_video.gyp b/trunk/src/common_video/common_video.gyp
new file mode 100644
index 0000000..3adb56c
--- /dev/null
+++ b/trunk/src/common_video/common_video.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'libyuv/libyuv.gypi',
+    'jpeg/jpeg.gypi',
+  ],
+}
diff --git a/trunk/src/common_video/interface/video_image.h b/trunk/src/common_video/interface/video_image.h
new file mode 100644
index 0000000..82e7b15
--- /dev/null
+++ b/trunk/src/common_video/interface/video_image.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
+#define COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
+
+#include "typedefs.h"
+#include <stdlib.h>
+
+namespace webrtc
+{
+
+enum VideoFrameType
+{
+    kKeyFrame = 0,
+    kDeltaFrame = 1,
+    kGoldenFrame = 2,
+    kAltRefFrame = 3,
+    kSkipFrame = 4
+};
+
+class RawImage
+{
+public:
+    RawImage() :    _width(0), _height(0), _timeStamp(0), _buffer(NULL),
+                    _length(0), _size(0) {}
+
+    RawImage(WebRtc_UWord8* buffer, WebRtc_UWord32 length,
+             WebRtc_UWord32 size) :
+                    _width(0), _height(0), _timeStamp(0),
+                    _buffer(buffer), _length(length), _size(size) {}
+
+    WebRtc_UWord32    _width;
+    WebRtc_UWord32    _height;
+    WebRtc_UWord32    _timeStamp;
+    WebRtc_UWord8*    _buffer;
+    WebRtc_UWord32    _length;
+    WebRtc_UWord32    _size;
+};
+
+class EncodedImage
+{
+public:
+    EncodedImage() :
+                     _encodedWidth(0), _encodedHeight(0), _timeStamp(0),
+                     _frameType(kDeltaFrame), _buffer(NULL), _length(0),
+                     _size(0), _completeFrame(false) {}
+
+    EncodedImage(WebRtc_UWord8* buffer,
+                 WebRtc_UWord32 length,
+                 WebRtc_UWord32 size) :
+                     _encodedWidth(0), _encodedHeight(0), _timeStamp(0),
+                     _frameType(kDeltaFrame), _buffer(buffer), _length(length),
+                     _size(size), _completeFrame(false) {}
+
+    WebRtc_UWord32              _encodedWidth;
+    WebRtc_UWord32              _encodedHeight;
+    WebRtc_UWord32              _timeStamp;
+    VideoFrameType              _frameType;
+    WebRtc_UWord8*              _buffer;
+    WebRtc_UWord32              _length;
+    WebRtc_UWord32              _size;
+    bool                        _completeFrame;
+};
+
+} // namespace webrtc
+
+#endif // COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
diff --git a/trunk/src/common_video/jpeg/Android.mk b/trunk/src/common_video/jpeg/Android.mk
new file mode 100644
index 0000000..b5bc243
--- /dev/null
+++ b/trunk/src/common_video/jpeg/Android.mk
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_jpeg
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    jpeg.cc \
+    data_manager.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../ \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../../ \
+    external/jpeg
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/common_video/jpeg/data_manager.cc b/trunk/src/common_video/jpeg/data_manager.cc
new file mode 100644
index 0000000..a5a7a48
--- /dev/null
+++ b/trunk/src/common_video/jpeg/data_manager.cc
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/jpeg/data_manager.h"
+
+namespace webrtc
+{
+
+typedef struct
+{
+    jpeg_source_mgr  mgr;
+    JOCTET* next_input_byte;
+    size_t bytes_in_buffer;      /* # of byte spaces remaining in buffer */
+} DataSrcMgr;
+
+void
+jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize)
+{
+    DataSrcMgr* src;
+    if (cinfo->src == NULL)
+    {  /* first time for this JPEG object? */
+        cinfo->src = (struct jpeg_source_mgr *)
+                   (*cinfo->mem->alloc_small) ((j_common_ptr) cinfo,
+                       JPOOL_PERMANENT, sizeof(DataSrcMgr));
+    }
+
+    // Setting required functionality
+    src = (DataSrcMgr*) cinfo->src;
+    src->mgr.init_source = initSrc;;
+    src->mgr.fill_input_buffer = fillInputBuffer;
+    src->mgr.skip_input_data = skipInputData;
+    src->mgr.resync_to_restart = jpeg_resync_to_restart; // use default
+    src->mgr.term_source = termSource;
+    // setting buffer/src
+    src->bytes_in_buffer = bufferSize;
+    src->next_input_byte = srcBuffer;
+
+}
+
+
+void
+initSrc(j_decompress_ptr cinfo)
+{
+    DataSrcMgr  *src = (DataSrcMgr*)cinfo->src;
+    src->mgr.next_input_byte = src->next_input_byte;
+    src->mgr.bytes_in_buffer = src->bytes_in_buffer;
+}
+
+boolean
+fillInputBuffer(j_decompress_ptr cinfo)
+{
+    return false;
+}
+
+
+void
+skipInputData(j_decompress_ptr cinfo, long num_bytes)
+{
+    DataSrcMgr* src = (DataSrcMgr*)cinfo->src;
+    if (num_bytes > 0)
+    {
+          if ((unsigned long)num_bytes > src->mgr.bytes_in_buffer)
+              src->mgr.bytes_in_buffer = 0;
+          else
+          {
+              src->mgr.next_input_byte += num_bytes;
+              src->mgr.bytes_in_buffer -= num_bytes;
+          }
+    }
+}
+
+
+void
+termSource (j_decompress_ptr cinfo)
+{
+  //
+}
+
+} // end of namespace webrtc
diff --git a/trunk/src/common_video/jpeg/data_manager.h b/trunk/src/common_video/jpeg/data_manager.h
new file mode 100644
index 0000000..61609ec
--- /dev/null
+++ b/trunk/src/common_video/jpeg/data_manager.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Jpeg source data manager
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
+#define WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
+
+#include <stdio.h>
+extern "C" {
+#if defined(USE_SYSTEM_LIBJPEG)
+#include <jpeglib.h>
+#else
+#include "jpeglib.h"
+#endif
+}
+
+namespace webrtc
+{
+
+// Source manager:
+
+
+// a general function that will set these values
+void
+jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize);
+
+
+// Initialize source.  This is called by jpeg_read_header() before any
+//  data is actually read.
+
+void
+initSrc(j_decompress_ptr cinfo);
+
+
+// Fill input buffer
+// This is called whenever bytes_in_buffer has reached zero and more
+//  data is wanted.
+
+boolean
+fillInputBuffer(j_decompress_ptr cinfo);
+
+// Skip input data
+// Skip num_bytes worth of data.
+
+void
+skipInputData(j_decompress_ptr cinfo, long num_bytes);
+
+
+
+
+// Terminate source
+void
+termSource (j_decompress_ptr cinfo);
+
+} // end of namespace webrtc
+
+
+#endif /* WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER */
diff --git a/trunk/src/common_video/jpeg/include/jpeg.h b/trunk/src/common_video/jpeg/include/jpeg.h
new file mode 100644
index 0000000..05e759c
--- /dev/null
+++ b/trunk/src/common_video/jpeg/include/jpeg.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_JPEG
+#define WEBRTC_COMMON_VIDEO_JPEG
+
+#include "typedefs.h"
+#include "video_image.h"
+
+// jpeg forward declaration
+struct jpeg_compress_struct;
+struct jpeg_decompress_struct;
+
+namespace webrtc
+{
+
+class JpegEncoder
+{
+public:
+    JpegEncoder();
+    ~JpegEncoder();
+
+// SetFileName
+// Input:
+//  - fileName - Pointer to input vector (should be less than 256) to which the
+//               compressed  file will be written to
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 SetFileName(const char* fileName);
+
+// Encode an I420 image. The encoded image is saved to a file
+//
+// Input:
+//          - inputImage        : Image to be encoded
+//
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 Encode(const RawImage& inputImage);
+
+private:
+
+    jpeg_compress_struct*   _cinfo;
+    char                    _fileName[256];
+};
+
+class JpegDecoder
+{
+ public:
+    JpegDecoder();
+    ~JpegDecoder();
+
+// Decodes a JPEG-stream
+// Supports 1 image component. 3 interleaved image components,
+// YCbCr sub-sampling  4:4:4, 4:2:2, 4:2:0.
+//
+// Input:
+//    - inputImage        : encoded image to be decoded.
+//    - outputImage       : RawImage to store decoded output
+//
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 Decode(const EncodedImage& inputImage,
+                         RawImage& outputImage);
+ private:
+    jpeg_decompress_struct*    _cinfo;
+};
+
+
+}
+#endif /* WEBRTC_COMMON_VIDEO_JPEG  */
diff --git a/trunk/src/common_video/jpeg/jpeg.cc b/trunk/src/common_video/jpeg/jpeg.cc
new file mode 100644
index 0000000..00586d4
--- /dev/null
+++ b/trunk/src/common_video/jpeg/jpeg.cc
@@ -0,0 +1,375 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WIN32)
+ #include <basetsd.h>
+#endif
+#include <setjmp.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "common_video/jpeg/include/jpeg.h"
+#include "common_video/jpeg/data_manager.h"
+
+extern "C" {
+#if defined(USE_SYSTEM_LIBJPEG)
+#include <jpeglib.h>
+#else
+#include "jpeglib.h"
+#endif
+}
+
+
+namespace webrtc
+{
+
+// Error handler
+struct myErrorMgr {
+
+    struct jpeg_error_mgr pub;
+    jmp_buf setjmp_buffer;
+};
+typedef struct myErrorMgr * myErrorPtr;
+
+METHODDEF(void)
+MyErrorExit (j_common_ptr cinfo)
+{
+    myErrorPtr myerr = (myErrorPtr) cinfo->err;
+
+    // Return control to the setjmp point
+    longjmp(myerr->setjmp_buffer, 1);
+}
+
+JpegEncoder::JpegEncoder()
+{
+   _cinfo = new jpeg_compress_struct;
+    strcpy(_fileName, "Snapshot.jpg");
+}
+
+JpegEncoder::~JpegEncoder()
+{
+    if (_cinfo != NULL)
+    {
+        delete _cinfo;
+        _cinfo = NULL;
+    }
+}
+
+
+WebRtc_Word32
+JpegEncoder::SetFileName(const char* fileName)
+{
+    if (!fileName)
+    {
+        return -1;
+    }
+
+    if (fileName)
+    {
+        strncpy(_fileName, fileName, 256);
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+JpegEncoder::Encode(const RawImage& inputImage)
+{
+    if (inputImage._buffer == NULL || inputImage._size == 0)
+    {
+        return -1;
+    }
+    if (inputImage._width < 1 || inputImage._height < 1)
+    {
+        return -1;
+    }
+
+    FILE* outFile = NULL;
+
+    const WebRtc_UWord32 width = inputImage._width;
+    const WebRtc_UWord32 height = inputImage._height;
+
+    // Set error handler
+    myErrorMgr      jerr;
+    _cinfo->err = jpeg_std_error(&jerr.pub);
+    jerr.pub.error_exit = MyErrorExit;
+    // Establish the setjmp return context
+    if (setjmp(jerr.setjmp_buffer))
+    {
+        // If we get here, the JPEG code has signaled an error.
+        jpeg_destroy_compress(_cinfo);
+        if (outFile != NULL)
+        {
+            fclose(outFile);
+        }
+        return -1;
+    }
+
+    if ((outFile = fopen(_fileName, "wb")) == NULL)
+    {
+        return -2;
+    }
+    // Create a compression object
+    jpeg_create_compress(_cinfo);
+
+    // Setting destination file
+    jpeg_stdio_dest(_cinfo, outFile);
+
+    // Set parameters for compression
+    _cinfo->in_color_space = JCS_YCbCr;
+    jpeg_set_defaults(_cinfo);
+
+    _cinfo->image_width = width;
+    _cinfo->image_height = height;
+    _cinfo->input_components = 3;
+
+    _cinfo->comp_info[0].h_samp_factor = 2;   // Y
+    _cinfo->comp_info[0].v_samp_factor = 2;
+    _cinfo->comp_info[1].h_samp_factor = 1;   // U
+    _cinfo->comp_info[1].v_samp_factor = 1;
+    _cinfo->comp_info[2].h_samp_factor = 1;   // V
+    _cinfo->comp_info[2].v_samp_factor = 1;
+    _cinfo->raw_data_in = TRUE;
+
+    WebRtc_UWord32 height16 = (height + 15) & ~15;
+    WebRtc_UWord8* imgPtr = inputImage._buffer;
+    WebRtc_UWord8* origImagePtr = NULL;
+    if (height16 != height)
+    {
+        // Copy image to an adequate size buffer
+        WebRtc_UWord32 requiredSize = height16 * width * 3 >> 1;
+        origImagePtr = new WebRtc_UWord8[requiredSize];
+        memset(origImagePtr, 0, requiredSize);
+        memcpy(origImagePtr, inputImage._buffer, inputImage._length);
+        imgPtr = origImagePtr;
+    }
+
+    jpeg_start_compress(_cinfo, TRUE);
+
+    JSAMPROW y[16],u[8],v[8];
+    JSAMPARRAY data[3];
+
+    data[0] = y;
+    data[1] = u;
+    data[2] = v;
+
+    WebRtc_UWord32 i, j;
+
+    for (j = 0; j < height; j += 16)
+    {
+        for (i = 0; i < 16; i++)
+        {
+            y[i] = (JSAMPLE*)imgPtr + width * (i + j);
+
+            if (i % 2 == 0)
+            {
+                u[i / 2] = (JSAMPLE*) imgPtr + width * height +
+                            width / 2 * ((i + j) / 2);
+                v[i / 2] = (JSAMPLE*) imgPtr + width * height +
+                            width * height / 4 + width / 2 * ((i + j) / 2);
+            }
+        }
+        jpeg_write_raw_data(_cinfo, data, 16);
+    }
+
+    jpeg_finish_compress(_cinfo);
+    jpeg_destroy_compress(_cinfo);
+
+    fclose(outFile);
+
+    if (origImagePtr != NULL)
+    {
+        delete [] origImagePtr;
+    }
+
+    return 0;
+}
+
+JpegDecoder::JpegDecoder()
+{
+    _cinfo = new jpeg_decompress_struct;
+}
+
+JpegDecoder::~JpegDecoder()
+{
+    if (_cinfo != NULL)
+    {
+        delete _cinfo;
+        _cinfo = NULL;
+    }
+}
+
+WebRtc_Word32
+JpegDecoder::Decode(const EncodedImage& inputImage,
+                    RawImage& outputImage)
+{
+
+    WebRtc_UWord8* tmpBuffer = NULL;
+    // Set error handler
+    myErrorMgr    jerr;
+    _cinfo->err = jpeg_std_error(&jerr.pub);
+    jerr.pub.error_exit = MyErrorExit;
+
+    // Establish the setjmp return context
+    if (setjmp(jerr.setjmp_buffer))
+    {
+        if (_cinfo->is_decompressor)
+        {
+            jpeg_destroy_decompress(_cinfo);
+        }
+        if (tmpBuffer != NULL)
+        {
+            delete [] tmpBuffer;
+        }
+        return -1;
+    }
+
+    _cinfo->out_color_space = JCS_YCbCr;
+
+    // Create decompression object
+    jpeg_create_decompress(_cinfo);
+
+    // Specify data source
+    jpegSetSrcBuffer(_cinfo, (JOCTET*) inputImage._buffer, inputImage._size);
+
+    // Read header data
+    jpeg_read_header(_cinfo, TRUE);
+
+    _cinfo->raw_data_out = TRUE;
+    jpeg_start_decompress(_cinfo);
+
+    // Check header
+    if (_cinfo->num_components == 4)
+    {
+        return -2; // not supported
+    }
+    if (_cinfo->progressive_mode == 1)
+    {
+        return -2; // not supported
+    }
+
+
+    WebRtc_UWord32 height = _cinfo->image_height;
+    WebRtc_UWord32 width = _cinfo->image_width;
+
+    // Making sure width and height are even
+    if (height % 2)
+    {
+        height++;
+    }
+    if (width % 2)
+    {
+         width++;
+    }
+
+    WebRtc_UWord32 height16 = (height + 15) & ~15;
+    WebRtc_UWord32 stride = (width + 15) & ~15;
+    WebRtc_UWord32 uvStride = ((((stride + 1) >> 1) + 15) & ~15);
+
+    WebRtc_UWord32 tmpRequiredSize =  stride * height16 +
+                                      2 * (uvStride * ((height16 + 1) >> 1));
+    WebRtc_UWord32 requiredSize = width * height * 3 >> 1;
+
+    // verify sufficient buffer size
+    if (outputImage._buffer && outputImage._size < requiredSize)
+    {
+        delete [] outputImage._buffer;
+        outputImage._buffer = NULL;
+    }
+
+    if (outputImage._buffer == NULL)
+    {
+        outputImage._buffer = new WebRtc_UWord8[requiredSize];
+        outputImage._size = requiredSize;
+    }
+
+    WebRtc_UWord8* outPtr = outputImage._buffer;
+
+    if (tmpRequiredSize > requiredSize)
+    {
+        tmpBuffer = new WebRtc_UWord8[(int) (tmpRequiredSize)];
+        outPtr = tmpBuffer;
+    }
+
+    JSAMPROW y[16],u[8],v[8];
+    JSAMPARRAY data[3];
+    data[0] = y;
+    data[1] = u;
+    data[2] = v;
+
+    WebRtc_UWord32 hInd, i;
+    WebRtc_UWord32 numScanLines = 16;
+    WebRtc_UWord32 numLinesProcessed = 0;
+
+    while (_cinfo->output_scanline < _cinfo->output_height)
+    {
+        hInd = _cinfo->output_scanline;
+        for (i = 0; i < numScanLines; i++)
+        {
+            y[i] = outPtr + stride * (i + hInd);
+
+            if (i % 2 == 0)
+            {
+                 u[i / 2] = outPtr + stride * height16 +
+                            stride / 2 * ((i + hInd) / 2);
+                 v[i / 2] = outPtr + stride * height16 +
+                            stride * height16 / 4 +
+                            stride / 2 * ((i + hInd) / 2);
+            }
+        }
+        // Processes exactly one iMCU row per call
+        numLinesProcessed = jpeg_read_raw_data(_cinfo, data, numScanLines);
+        // Error in read
+        if (numLinesProcessed == 0)
+        {
+            jpeg_abort((j_common_ptr)_cinfo);
+            return -1;
+        }
+    }
+
+    if (tmpRequiredSize > requiredSize)
+    {
+         WebRtc_UWord8* dstFramePtr = outputImage._buffer;
+         WebRtc_UWord8* tmpPtr = outPtr;
+
+         for (WebRtc_UWord32 p = 0; p < 3; p++)
+         {
+             const WebRtc_UWord32 h = (p == 0) ? height : height >> 1;
+             const WebRtc_UWord32 h16 = (p == 0) ? height16 : height16 >> 1;
+             const WebRtc_UWord32 w = (p == 0) ? width : width >> 1;
+             const WebRtc_UWord32 s = (p == 0) ? stride : stride >> 1;
+
+             for (WebRtc_UWord32 i = 0; i < h; i++)
+             {
+                 memcpy(dstFramePtr, tmpPtr, w);
+                 dstFramePtr += w;
+                 tmpPtr += s;
+             }
+             tmpPtr += (h16 - h) * s;
+         }
+    }
+
+    if (tmpBuffer != NULL)
+    {
+        delete [] tmpBuffer;
+    }
+    // Setting output Image parameter
+    outputImage._width = width;
+    outputImage._height =  height;
+    outputImage._length = requiredSize;
+    outputImage._timeStamp = inputImage._timeStamp;
+
+    jpeg_finish_decompress(_cinfo);
+    jpeg_destroy_decompress(_cinfo);
+    return 0;
+}
+
+
+}
diff --git a/trunk/src/common_video/jpeg/jpeg.gypi b/trunk/src/common_video/jpeg/jpeg.gypi
new file mode 100644
index 0000000..1738a3d
--- /dev/null
+++ b/trunk/src/common_video/jpeg/jpeg.gypi
@@ -0,0 +1,90 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'variables': {
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+  },
+  'targets': [
+    {
+      'target_name': 'webrtc_jpeg',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_libyuv',
+      ],
+      'include_dirs': [
+        'include',
+        '<(webrtc_root)',
+        '<(webrtc_root)/common_video/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+          '<(webrtc_root)/common_video/interface',
+        ],
+      },
+      'conditions': [
+        ['build_libjpeg==1', {
+          'conditions': [
+            ['build_with_chromium==1', {
+              'dependencies': [
+                '<(libjpeg_gyp_path):libjpeg',
+              ],
+            }, {
+              'conditions': [
+                ['use_libjpeg_turbo==1', {
+                  'dependencies': [
+                    '<(DEPTH)/third_party/libjpeg_turbo/libjpeg.gyp:libjpeg',
+                  ],
+                }, {
+                  'dependencies': [
+                    '<(DEPTH)/third_party/libjpeg/libjpeg.gyp:libjpeg',
+                  ],
+                }],
+              ],
+            }],
+          ],
+        }, {
+          # Need to add a directory normally exported by libjpeg.gyp.
+          'include_dirs': [ '<(DEPTH)/third_party/libjpeg', ],
+        }],
+      ],
+      'sources': [
+        'include/jpeg.h',
+        'data_manager.cc',
+        'data_manager.h',
+        'jpeg.cc',
+      ],
+    },
+  ], # targets
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'jpeg_unittests',
+          'type': 'executable',
+          'dependencies': [
+             'webrtc_jpeg',
+             '<(webrtc_root)/../testing/gtest.gyp:gtest',
+             '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'jpeg_unittest.cc',
+          ],
+        },
+      ] # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/common_video/jpeg/jpeg_unittest.cc b/trunk/src/common_video/jpeg/jpeg_unittest.cc
new file mode 100644
index 0000000..fdcbe34
--- /dev/null
+++ b/trunk/src/common_video/jpeg/jpeg_unittest.cc
@@ -0,0 +1,123 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <string>
+
+#include "common_video/jpeg/include/jpeg.h"
+#include "common_video/interface/video_image.h"
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+const unsigned int kImageWidth = 640;
+const unsigned int kImageHeight = 480;
+
+class JpegTest: public testing::Test {
+ protected:
+  JpegTest()
+      : input_filename_(webrtc::test::ProjectRootPath() +
+                       "test/data/common_video/jpeg/webrtc_logo.jpg"),
+        decoded_filename_(webrtc::test::OutputPath() + "TestJpegDec.yuv"),
+        encoded_filename_(webrtc::test::OutputPath() + "TestJpegEnc.jpg"),
+        encoded_buffer_(NULL) {}
+  virtual ~JpegTest() {}
+
+  void SetUp() {
+    encoder_ = new JpegEncoder();
+    decoder_ = new JpegDecoder();
+  }
+
+  void TearDown() {
+    if (encoded_buffer_ != NULL) {
+      if (encoded_buffer_->_buffer != NULL) {
+        delete [] encoded_buffer_->_buffer;
+      }
+      delete encoded_buffer_;
+    }
+    delete encoder_;
+    delete decoder_;
+  }
+
+  // Reads an encoded image. Caller will have to deallocate the memory of this
+  // object and it's _buffer byte array.
+  EncodedImage* ReadEncodedImage(std::string input_filename) {
+    FILE* open_file = fopen(input_filename.c_str(), "rb");
+    assert(open_file != NULL);
+    size_t length = webrtc::test::GetFileSize(input_filename);
+    EncodedImage* encoded_buffer = new EncodedImage();
+    encoded_buffer->_buffer = new WebRtc_UWord8[length];
+    encoded_buffer->_size = length;
+    encoded_buffer->_length = length;
+    if (fread(encoded_buffer->_buffer, 1, length, open_file) != length) {
+      ADD_FAILURE() << "Error reading file:" << input_filename;
+    }
+    fclose(open_file);
+    return encoded_buffer;
+  }
+
+  std::string input_filename_;
+  std::string decoded_filename_;
+  std::string encoded_filename_;
+  EncodedImage* encoded_buffer_;
+  JpegEncoder* encoder_;
+  JpegDecoder* decoder_;
+};
+
+TEST_F(JpegTest, Decode) {
+  encoded_buffer_ = ReadEncodedImage(input_filename_);
+  RawImage image_buffer;
+  EXPECT_EQ(0, decoder_->Decode(*encoded_buffer_, image_buffer));
+  EXPECT_GT(image_buffer._length, 0u);
+  EXPECT_EQ(kImageWidth, image_buffer._width);
+  EXPECT_EQ(kImageHeight, image_buffer._height);
+  delete [] image_buffer._buffer;
+}
+
+TEST_F(JpegTest, EncodeInvalidInputs) {
+  RawImage empty;
+  empty._width = 164;
+  empty._height = 164;
+  EXPECT_EQ(-1, encoder_->SetFileName(0));
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty._buffer = new WebRtc_UWord8[10];
+  empty._size = 0;
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty._size = 10;
+  empty._height = 0;
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty._height = 164;
+  empty._width = 0;
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+  delete[] empty._buffer;
+}
+
+TEST_F(JpegTest, Encode) {
+  // Decode our input image then encode it again to a new file:
+  encoded_buffer_ = ReadEncodedImage(input_filename_);
+  RawImage image_buffer;
+  EXPECT_EQ(0, decoder_->Decode(*encoded_buffer_, image_buffer));
+
+  EXPECT_EQ(0, encoder_->SetFileName(encoded_filename_.c_str()));
+  EXPECT_EQ(0, encoder_->Encode(image_buffer));
+
+  // Save decoded image to file.
+  FILE* save_file = fopen(decoded_filename_.c_str(), "wb");
+  fwrite(image_buffer._buffer, 1, image_buffer._length, save_file);
+  fclose(save_file);
+
+  delete[] image_buffer._buffer;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/common_video/libyuv/Android.mk b/trunk/src/common_video/libyuv/Android.mk
new file mode 100644
index 0000000..90f6250
--- /dev/null
+++ b/trunk/src/common_video/libyuv/Android.mk
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_yuv
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    libyuv.cc \
+    scaler.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../libyuv/files/include
+
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
\ No newline at end of file
diff --git a/trunk/src/common_video/libyuv/include/libyuv.h b/trunk/src/common_video/libyuv/include/libyuv.h
new file mode 100644
index 0000000..bf106f1
--- /dev/null
+++ b/trunk/src/common_video/libyuv/include/libyuv.h
@@ -0,0 +1,194 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * WebRTC's Wrapper to libyuv.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_
+#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_
+
+#include "common_types.h"  // RawVideoTypes.
+#include "typedefs.h"
+
+namespace webrtc {
+
+// TODO(mikhal): 1. Sync libyuv and WebRtc meaning of stride.
+//               2. Reorder parameters for consistency.
+
+// Supported video types.
+enum VideoType {
+  kUnknown,
+  kI420,
+  kIYUV,
+  kRGB24,
+  kABGR,
+  kARGB,
+  kARGB4444,
+  kRGB565,
+  kARGB1555,
+  kYUY2,
+  kYV12,
+  kUYVY,
+  kMJPG,
+  kNV21,
+  kNV12,
+  kBGRA,
+};
+
+// Conversion between the RawVideoType and the LibYuv videoType.
+// TODO(wu): Consolidate types into one type throughout WebRtc.
+VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
+
+// Supported rotation
+// Direction of rotation - clockwise.
+enum VideoRotationMode {
+  kRotateNone = 0,
+  kRotate90 = 90,
+  kRotate180 = 180,
+  kRotate270 = 270,
+};
+
+// Calculate the required buffer size.
+// Input:
+//   - type - The type of the designated video frame.
+//   - width - frame width in pixels.
+//   - height - frame height in pixels.
+// Return value:  The required size in bytes to accommodate the specified
+//                video frame or -1 in case of an error .
+int CalcBufferSize(VideoType type, int width, int height);
+
+// Compute required buffer size when converting from one type to another.
+// Input:
+//   - src_video_type - Type of the existing video frame.
+//   - dst_video_type - Type of the designated video frame.
+//   - length - length in bytes of the data.
+// Return value: The required size in bytes to accommodate the specified
+//               converted video frame or -1 in case of an error.
+int CalcBufferSize(VideoType src_video_type,
+                   VideoType dst_video_type,
+                   int length);
+// TODO (mikhal): Merge the two functions above.
+
+
+// Convert To I420
+// Input:
+//   - src_video_type   : Type of input video.
+//   - src_frame        : Pointer to a source frame.
+//   - crop_x/crop_y    : Starting positions for cropping (0 for no crop).
+//   - src/dst_width    : src/dst width in pixels.
+//   - src/dst_height   : src/dst height in pixels.
+//   - sample_size      : Required only for the parsing of MJPG (set to 0 else).
+//   - dst_stride       : Number of bytes in a row of the dst Y plane.
+//   - rotate           : Rotation mode of output image.
+// Output:
+//   - dst_frame        : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+
+int ConvertToI420(VideoType src_video_type,
+                  const uint8_t* src_frame,
+                  int crop_x, int crop_y,
+                  int src_width, int src_height,
+                  int sample_size,
+                  int dst_width, int dst_height, int dst_stride,
+                  VideoRotationMode rotation,
+                  uint8_t* dst_frame);
+
+// Convert From I420
+// Input:
+//   - src_frame        : Pointer to a source frame.
+//   - src_stride       : Number of bytes in a row of the src Y plane.
+//   - dst_video_type   : Type of output video.
+//   - dst_sample_size  : Required only for the parsing of MJPG.
+//   - width            : Width in pixels.
+//   - height           : Height in pixels.
+//   - dst_frame        : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame);
+// ConvertFrom YV12.
+// Interface - same as above.
+int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame);
+
+// The following list describes designated conversion functions which
+// are not covered by the previous general functions.
+// Input and output descriptions mostly match the above descriptions, and are
+// therefore omitted.
+// Possible additional input value - dst_stride - stride of the dst frame.
+
+int ConvertI420ToARGB4444(const uint8_t* src_frame,
+                          uint8_t* dst_frame,
+                          int width,
+                          int height,
+                          int dst_stride);
+int ConvertI420ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width,
+                        int height);
+int ConvertI420ToARGB1555(const uint8_t* src_frame,
+                          uint8_t* dst_frame,
+                          int width,
+                          int height,
+                          int dst_stride);
+int ConvertRGB24ToARGB(const uint8_t* src_frame,
+                       uint8_t* dst_frame,
+                       int width, int height,
+                       int dst_stride);
+int ConvertNV12ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height);
+
+// Mirror functions
+// The following 2 functions perform mirroring on a given image
+// (LeftRight/UpDown).
+// Input:
+//    - width       : Image width in pixels.
+//    - height      : Image height in pixels.
+//    - src_frame   : Pointer to a source frame.
+//    - dst_frame   : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+int MirrorI420LeftRight(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height);
+int MirrorI420UpDown(const uint8_t* src_frame,
+                     uint8_t* dst_frame,
+                     int width, int height);
+
+// Mirror functions + conversion
+// Input:
+//    - src_frame       : Pointer to source frame.
+//    - dst_frame       : Pointer to destination frame.
+//    - src_width       : Width of input buffer.
+//    - src_height      : Height of input buffer.
+//    - src_color_space : Color space to convert from, I420 if no
+//                        conversion should be done.
+// Return value: 0 if OK, < 0 otherwise.
+int ConvertToI420AndMirrorUpDown(const uint8_t* src_frame,
+                                 uint8_t* dst_frame,
+                                 int src_width,
+                                 int src_height,
+                                 VideoType src_video_type);
+
+// Compute PSNR for an I420 frame (all planes).
+double I420PSNR(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height);
+// Compute SSIM for an I420 frame (all planes).
+double I420SSIM(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height);
+}
+
+#endif  // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_
diff --git a/trunk/src/common_video/libyuv/include/scaler.h b/trunk/src/common_video/libyuv/include/scaler.h
new file mode 100644
index 0000000..8f432f3
--- /dev/null
+++ b/trunk/src/common_video/libyuv/include/scaler.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface to the LibYuv scaling functionality
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
+#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Supported scaling types
+enum ScaleMethod {
+  kScalePoint,  // no interpolation
+  kScaleBilinear,
+  kScaleBox
+};
+
+// TODO (mikhal): Have set return the expected value of the dst_frame, such
+// that the user can allocate memory for Scale().
+class Scaler {
+ public:
+  Scaler();
+  ~Scaler();
+
+  // Set interpolation properties:
+  //
+  // Return value: 0 - OK
+  //              -1 - parameter error
+  int Set(int src_width, int src_height,
+          int dst_width, int dst_height,
+          VideoType src_video_type, VideoType dst_video_type,
+          ScaleMethod method);
+
+  // Scale frame
+  // Memory is allocated by user. If dst_frame is not of sufficient size,
+  // the frame will be reallocated to the appropriate size.
+  // Return value: 0 - OK,
+  //               -1 - parameter error
+  //               -2 - scaler not set
+  int Scale(const uint8_t* src_frame,
+            uint8_t*& dst_frame,
+            int& dst_size);
+
+ private:
+  // Determine if the VideoTypes are currently supported.
+  bool SupportedVideoType(VideoType src_video_type,
+                          VideoType dst_video_type);
+
+  ScaleMethod   method_;
+  int           src_width_;
+  int           src_height_;
+  int           dst_width_;
+  int           dst_height_;
+  bool          set_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
diff --git a/trunk/src/common_video/libyuv/libyuv.cc b/trunk/src/common_video/libyuv/libyuv.cc
new file mode 100644
index 0000000..d1ced7a
--- /dev/null
+++ b/trunk/src/common_video/libyuv/libyuv.cc
@@ -0,0 +1,450 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/libyuv/include/libyuv.h"
+
+#include <assert.h>
+
+// LibYuv includes
+#ifdef WEBRTC_ANDROID
+#include "libyuv/files/include/libyuv.h"
+#else
+#include "third_party/libyuv/include/libyuv.h"
+#endif
+
+namespace webrtc {
+
+VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
+  switch (type) {
+    case kVideoI420:
+      return kI420;
+    case kVideoIYUV:
+      return kIYUV;
+    case kVideoRGB24:
+      return kRGB24;
+    case kVideoARGB:
+      return kARGB;
+    case kVideoARGB4444:
+      return kARGB4444;
+    case kVideoRGB565:
+      return kRGB565;
+    case kVideoARGB1555:
+      return kARGB1555;
+    case kVideoYUY2:
+      return kYUY2;
+    case kVideoYV12:
+      return kYV12;
+    case kVideoUYVY:
+      return kUYVY;
+    case kVideoNV21:
+      return kNV21;
+    case kVideoNV12:
+      return kNV12;
+    case kVideoBGRA:
+      return kBGRA;
+    case kVideoMJPEG:
+      return kMJPG;
+    default:
+      assert(false);
+  }
+  return kUnknown;
+}
+
+int CalcBufferSize(VideoType type, int width, int height) {
+  int bits_per_pixel = 32;
+  switch (type) {
+    case kI420:
+    case kNV12:
+    case kNV21:
+    case kIYUV:
+    case kYV12:
+      bits_per_pixel = 12;
+      break;
+    case kARGB4444:
+    case kRGB565:
+    case kARGB1555:
+    case kYUY2:
+    case kUYVY:
+      bits_per_pixel = 16;
+      break;
+    case kRGB24:
+      bits_per_pixel = 24;
+      break;
+    case kBGRA:
+    case kARGB:
+      bits_per_pixel = 32;
+      break;
+    default:
+      assert(false);
+      return -1;
+  }
+  return (width * height * bits_per_pixel) / 8;  // bytes
+}
+
+int CalcBufferSize(VideoType src_video_type,
+                   VideoType dst_video_type,
+                   int length) {
+  int src_bits_per_pixel = 32;
+  switch (src_video_type) {
+    case kI420:
+    case kNV12:
+    case kNV21:
+    case kIYUV:
+    case kYV12:
+      src_bits_per_pixel = 12;
+      break;
+    case kARGB4444:
+    case kRGB565:
+    case kARGB1555:
+    case kYUY2:
+    case kUYVY:
+      src_bits_per_pixel = 16;
+      break;
+    case kRGB24:
+      src_bits_per_pixel = 24;
+      break;
+    case kBGRA:
+    case kARGB:
+      src_bits_per_pixel = 32;
+      break;
+    default:
+      assert(false);
+      return -1;
+  }
+
+  int dst_bits_per_pixel = 32;
+  switch (dst_video_type) {
+    case kI420:
+    case kIYUV:
+    case kYV12:
+      dst_bits_per_pixel = 12;
+      break;
+    case kARGB4444:
+    case kRGB565:
+    case kARGB1555:
+    case kYUY2:
+    case kUYVY:
+      dst_bits_per_pixel = 16;
+      break;
+    case kRGB24:
+      dst_bits_per_pixel = 24;
+      break;
+    case kBGRA:
+    case kARGB:
+      dst_bits_per_pixel = 32;
+      break;
+    default:
+      assert(false);
+      return -1;
+  }
+  return (length * dst_bits_per_pixel) / src_bits_per_pixel;
+}
+
+int ConvertI420ToARGB4444(const uint8_t* src_frame,
+                          uint8_t* dst_frame,
+                          int width, int height,
+                          int dst_stride) {
+  if (dst_stride == 0 || dst_stride == width)
+    dst_stride = 2 * width;
+  const uint8_t* yplane = src_frame;
+  const uint8_t* uplane = src_frame + width * height;
+  const uint8_t* vplane = uplane + (width * height / 4);
+
+  return libyuv::I420ToARGB4444(yplane, width,
+                                uplane, width / 2,
+                                vplane, width / 2,
+                                dst_frame, dst_stride,
+                                width, height);
+}
+
+int ConvertI420ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height) {
+  int abs_height = (height < 0) ? -height : height;
+  const uint8_t* yplane = src_frame;
+  const uint8_t* uplane = src_frame + width * abs_height;
+  const uint8_t* vplane = uplane + (width * abs_height / 4);
+
+  return libyuv::I420ToRGB565(yplane, width,
+                              uplane, width / 2,
+                              vplane, width / 2,
+                              dst_frame, width,
+                              width, height);
+}
+
+int ConvertI420ToARGB1555(const uint8_t* src_frame,
+                          uint8_t* dst_frame,
+                          int width, int height,
+                          int dst_stride) {
+  if (dst_stride == 0 || dst_stride == width)
+    dst_stride = 2 * width;
+  else if (dst_stride < 2 * width)
+    return -1;
+
+  const uint8_t* yplane = src_frame;
+  const uint8_t* uplane = src_frame + width * height;
+  const uint8_t* vplane = uplane + (width * height / 4);
+
+  return libyuv::I420ToARGB1555(yplane, width,
+                                uplane, width / 2,
+                                vplane, width / 2,
+                                dst_frame, dst_stride,
+                                width, height);
+}
+
+int ConvertNV12ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height) {
+  const uint8_t* yplane = src_frame;
+  const uint8_t* uvInterlaced = src_frame + (width * height);
+
+  return libyuv::NV12ToRGB565(yplane, width,
+                              uvInterlaced, width / 2,
+                              dst_frame, width,
+                              width, height);
+}
+
+int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
+                       int width, int height, int dst_stride) {
+  if (dst_stride == 0 || dst_stride == width)
+    dst_stride = width;
+  // Stride - currently webrtc style
+  return libyuv::RGB24ToARGB(src_frame, width,
+                             dst_frame, dst_stride,
+                             width, height);
+}
+
+libyuv::RotationMode ConvertRotationMode(VideoRotationMode rotation) {
+  switch(rotation) {
+    case kRotateNone:
+      return libyuv::kRotate0;
+    case kRotate90:
+      return libyuv::kRotate90;
+    case kRotate180:
+      return libyuv::kRotate180;
+    case kRotate270:
+      return libyuv::kRotate270;
+  }
+  assert(false);
+  return libyuv::kRotate0;
+}
+
+int ConvertVideoType(VideoType video_type) {
+  switch(video_type) {
+    case kUnknown:
+      return libyuv::FOURCC_ANY;
+    case  kI420:
+      return libyuv::FOURCC_I420;
+    case kIYUV:  // same as KYV12
+    case kYV12:
+      return libyuv::FOURCC_YV12;
+    case kRGB24:
+      return libyuv::FOURCC_24BG;
+    case kABGR:
+      return libyuv::FOURCC_ABGR;
+    case kARGB4444:
+    case kRGB565:
+    case kARGB1555:
+      // TODO(mikhal): Not supported;
+      assert(false);
+      return libyuv::FOURCC_ANY;
+    case kYUY2:
+      return libyuv::FOURCC_YUY2;
+    case kUYVY:
+      return libyuv::FOURCC_UYVY;
+    case kMJPG:
+      return libyuv::FOURCC_MJPG;
+    case kNV21:
+      return libyuv::FOURCC_NV21;
+    case kNV12:
+      return libyuv::FOURCC_NV12;
+    case kARGB:
+      return libyuv::FOURCC_ARGB;
+    case kBGRA:
+      return libyuv::FOURCC_BGRA;
+  }
+  assert(false);
+  return libyuv::FOURCC_ANY;
+}
+
+int ConvertToI420(VideoType src_video_type,
+                  const uint8_t* src_frame,
+                  int crop_x, int crop_y,
+                  int src_width, int src_height,
+                  int sample_size,
+                  int dst_width, int dst_height, int dst_stride,
+                  VideoRotationMode rotation,
+                  uint8_t* dst_frame) {
+  // All sanity tests are conducted within LibYuv.
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_yplane + dst_width * dst_height;
+  uint8_t* dst_vplane = dst_uplane + (dst_width * dst_height / 4);
+  return libyuv::ConvertToI420(src_frame, sample_size,
+                               dst_yplane, dst_stride,
+                               dst_uplane, (dst_stride + 1) / 2,
+                               dst_vplane, (dst_stride + 1) / 2,
+                               crop_x, crop_y,
+                               src_width, src_height,
+                               dst_width, dst_height,
+                               ConvertRotationMode(rotation),
+                               ConvertVideoType(src_video_type));
+}
+
+int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame) {
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * height;
+  const uint8_t* src_vplane = src_uplane + (width * height / 4);
+  return libyuv::ConvertFromI420(src_yplane, src_stride,
+                                 src_uplane, (src_stride + 1) / 2,
+                                 src_vplane, (src_stride + 1) / 2,
+                                 dst_frame, dst_sample_size,
+                                 width, height,
+                                 ConvertVideoType(dst_video_type));
+}
+
+int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame) {
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * height;
+  const uint8_t* src_vplane = src_uplane + (width * height / 4);
+  // YV12 = Y, V, U
+  return libyuv::ConvertFromI420(src_yplane, src_stride,
+                                 src_vplane, (src_stride + 1) / 2,
+                                 src_uplane, (src_stride + 1) / 2,
+                                 dst_frame, dst_sample_size,
+                                 width, height,
+                                 ConvertVideoType(dst_video_type));
+}
+
+int MirrorI420LeftRight(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height) {
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * height;
+  const uint8_t* src_vplane = src_uplane + (width * height / 4);
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_yplane + width * height;
+  uint8_t* dst_vplane = dst_uplane + (width * height / 4);
+  return libyuv::I420Mirror(src_yplane, width,
+                            src_uplane, width / 2,
+                            src_vplane, width / 2,
+                            dst_yplane, width,
+                            dst_uplane, width / 2,
+                            dst_vplane, width / 2,
+                            width, height);
+}
+
+int MirrorI420UpDown(const uint8_t* src_frame, uint8_t* dst_frame,
+                     int width, int height) {
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_frame + width * height;
+  const uint8_t* src_vplane = src_uplane + (width * height / 4);
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_frame + width * height;
+  uint8_t* dst_vplane = dst_uplane + (width * height / 4);
+
+  // Inserting negative height flips the frame.
+  return libyuv::I420Copy(src_yplane, width,
+                          src_uplane, width / 2,
+                          src_vplane, width / 2,
+                          dst_yplane, width,
+                          dst_uplane, width / 2,
+                          dst_vplane, width / 2,
+                          width, -height);
+}
+
+int ConvertToI420AndMirrorUpDown(const uint8_t* src_frame,
+                                 uint8_t* dst_frame,
+                                 int src_width, int src_height,
+                                 VideoType src_video_type) {
+  if (src_video_type != kI420 && src_video_type != kYV12)
+    return -1;
+  // TODO(mikhal): Use a more general convert function - with negative height
+  // Inserting negative height flips the frame.
+  // Using I420Copy with a negative height.
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_frame + src_width * src_height;
+  const uint8_t* src_vplane = src_uplane + (src_width * src_height / 4);
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_frame + src_width * src_height;
+  uint8_t* dst_vplane = dst_uplane + (src_width * src_height / 4);
+  if (src_video_type == kYV12) {
+    // Switch U and V
+    dst_vplane = dst_frame + src_width * src_height;
+    dst_uplane = dst_vplane + (src_width * src_height / 4);
+  }
+  // Inserting negative height flips the frame.
+  return libyuv::I420Copy(src_yplane, src_width,
+                          src_uplane, src_width / 2,
+                          src_vplane, src_width / 2,
+                          dst_yplane, src_width,
+                          dst_uplane, src_width / 2,
+                          dst_vplane, src_width / 2,
+                          src_width, -src_height);
+}
+
+// Compute PSNR for an I420 frame (all planes)
+double I420PSNR(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height) {
+  if (!ref_frame || !test_frame)
+    return -1;
+  else if (height < 0 || width < 0)
+    return -1;
+  const uint8_t* src_y_a = ref_frame;
+  const uint8_t* src_u_a = src_y_a + width * height;
+  const uint8_t* src_v_a = src_u_a + (width * height / 4);
+  const uint8_t* src_y_b = test_frame;
+  const uint8_t* src_u_b = src_y_b + width * height;
+  const uint8_t* src_v_b = src_u_b + (width * height / 4);
+  int stride_y = width;
+  int stride_uv = (width + 1) / 2;
+  double psnr = libyuv::I420Psnr(src_y_a, stride_y,
+                                 src_u_a, stride_uv,
+                                 src_v_a, stride_uv,
+                                 src_y_b, stride_y,
+                                 src_u_b, stride_uv,
+                                 src_v_b, stride_uv,
+                                 width, height);
+  // LibYuv sets the max psnr value to 128, we restrict it to 48.
+  // In case of 0 mse in one frame, 128 can skew the results significantly.
+  return (psnr > 48.0) ? 48.0 : psnr;
+}
+// Compute SSIM for an I420 frame (all planes)
+double I420SSIM(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height) {
+  if (!ref_frame || !test_frame)
+     return -1;
+  else if (height < 0 || width < 0)
+     return -1;
+  const uint8_t* src_y_a = ref_frame;
+  const uint8_t* src_u_a = src_y_a + width * height;
+  const uint8_t* src_v_a = src_u_a + (width * height / 4);
+  const uint8_t* src_y_b = test_frame;
+  const uint8_t* src_u_b = src_y_b + width * height;
+  const uint8_t* src_v_b = src_u_b + (width * height / 4);
+  int stride_y = width;
+  int stride_uv = (width + 1) / 2;
+  return libyuv::I420Ssim(src_y_a, stride_y,
+                          src_u_a, stride_uv,
+                          src_v_a, stride_uv,
+                          src_y_b, stride_y,
+                          src_u_b, stride_uv,
+                          src_v_b, stride_uv,
+                          width, height);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/common_video/libyuv/libyuv.gypi b/trunk/src/common_video/libyuv/libyuv.gypi
new file mode 100644
index 0000000..bace5a2
--- /dev/null
+++ b/trunk/src/common_video/libyuv/libyuv.gypi
@@ -0,0 +1,55 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_libyuv',
+      'type': '<(library)',
+      'conditions': [
+        ['build_libyuv==1', {
+          'dependencies': [
+            '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv'
+          ],
+        }, {
+          # Need to add a directory normally exported by libyuv.gyp.
+          'include_dirs': [ '<(DEPTH)/third_party/libyuv/include', ],
+        }],
+      ],
+      'sources': [
+        'include/libyuv.h',
+        'include/scaler.h',
+        'libyuv.cc',
+        'scaler.cc',
+      ],
+      'include_dirs': [
+        '<(DEPTH)',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'libyuv_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'webrtc_libyuv',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'libyuv_unittest.cc',
+            'scaler_unittest.cc', 
+          ], 
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
diff --git a/trunk/src/common_video/libyuv/libyuv_unittest.cc b/trunk/src/common_video/libyuv/libyuv_unittest.cc
new file mode 100644
index 0000000..07225ec
--- /dev/null
+++ b/trunk/src/common_video/libyuv/libyuv_unittest.cc
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <string.h>
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+int PrintFrame(const uint8_t* frame, int width, int height) {
+  if (frame == NULL)
+    return -1;
+  int k = 0;
+  for (int i = 0; i < height; i++) {
+    for (int j = 0; j < width; j++) {
+      printf("%d ", frame[k++]);
+    }
+    printf(" \n");
+  }
+  printf(" \n");
+  return 0;
+}
+
+int PrintFrame(const uint8_t* frame, int width,
+                int height, const char* str) {
+  if (frame == NULL)
+     return -1;
+  printf("%s %dx%d \n", str, width, height);
+
+  const uint8_t* frame_y = frame;
+  const uint8_t* frame_u = frame_y + width * height;
+  const uint8_t* frame_v = frame_u + width * height / 4;
+
+  int ret = 0;
+  ret += PrintFrame(frame_y, width, height);
+  ret += PrintFrame(frame_u, width / 2, height / 2);
+  ret += PrintFrame(frame_v, width / 2, height / 2);
+
+  return ret;
+}
+
+void CreateImage(int width, int height,
+                 uint8_t* frame, int offset,
+                 int height_factor, int width_factor) {
+  if (frame == NULL)
+    return;
+  for (int i = 0; i < height; i++) {
+    for (int j = 0; j < width; j++) {
+      *frame = static_cast<uint8_t>((i + offset) * height_factor
+                                     + j * width_factor);
+      frame++;
+    }
+  }
+}
+
+class TestLibYuv : public ::testing::Test {
+ protected:
+  TestLibYuv();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  FILE* source_file_;
+  const int width_;
+  const int height_;
+  const int frame_length_;
+};
+
+// TODO (mikhal): Use scoped_ptr when handling buffers.
+TestLibYuv::TestLibYuv()
+    : source_file_(NULL),
+      width_(352),
+      height_(288),
+      frame_length_(CalcBufferSize(kI420, 352, 288)) {
+}
+
+void TestLibYuv::SetUp() {
+  const std::string input_file_name = webrtc::test::ProjectRootPath() +
+                                      "resources/foreman_cif.yuv";
+  source_file_  = fopen(input_file_name.c_str(), "rb");
+  ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
+                                       input_file_name << "\n";
+}
+
+void TestLibYuv::TearDown() {
+  if (source_file_ != NULL) {
+    ASSERT_EQ(0, fclose(source_file_));
+  }
+  source_file_ = NULL;
+}
+
+TEST_F(TestLibYuv, ConvertSanityTest) {
+  // TODO(mikhal)
+}
+
+TEST_F(TestLibYuv, ConvertTest) {
+  // Reading YUV frame - testing on the first frame of the foreman sequence
+  int j = 0;
+  std::string output_file_name = webrtc::test::OutputPath() +
+                                 "LibYuvTest_conversion.yuv";
+  FILE*  output_file = fopen(output_file_name.c_str(), "wb");
+  ASSERT_TRUE(output_file != NULL);
+
+  double psnr = 0;
+
+  uint8_t* orig_buffer = new uint8_t[frame_length_];
+  EXPECT_GT(fread(orig_buffer, 1, frame_length_, source_file_), 0U);
+
+  // printf("\nConvert #%d I420 <-> RGB24\n", j);
+  uint8_t* res_rgb_buffer2  = new uint8_t[width_ * height_ * 3];
+  uint8_t* res_i420_buffer = new uint8_t[frame_length_];
+
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kRGB24, 0,
+                               width_, height_, res_rgb_buffer2));
+
+  EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
+                             0, width_, height_, width_, kRotateNone,
+                             res_i420_buffer));
+
+  fwrite(res_i420_buffer, frame_length_, 1, output_file);
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
+  EXPECT_GT(ceil(psnr), 44);
+  j++;
+  delete [] res_rgb_buffer2;
+
+  // printf("\nConvert #%d I420 <-> UYVY\n", j);
+  uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kUYVY, 0, width_, height_, out_uyvy_buffer));
+  EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
+            0, width_, height_, width_,kRotateNone, res_i420_buffer));
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  fwrite(res_i420_buffer, frame_length_, 1, output_file);
+
+  j++;
+  delete [] out_uyvy_buffer;
+
+  // printf("\nConvert #%d I420 <-> I420 \n", j);
+  uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
+  EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, out_i420_buffer));
+  EXPECT_EQ(0, ConvertFromI420(out_i420_buffer, width_, kI420, 0,
+                               width_, height_, res_i420_buffer));
+  fwrite(res_i420_buffer, frame_length_, 1, output_file);
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  j++;
+  delete [] out_i420_buffer;
+
+  // printf("\nConvert #%d I420 <-> YV12\n", j);
+  uint8_t* outYV120Buffer = new uint8_t[frame_length_];
+
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kYV12, 0,
+                               width_, height_, outYV120Buffer));
+  EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
+                               kI420, 0,
+                               width_, height_,
+                               res_i420_buffer));
+  fwrite(res_i420_buffer, frame_length_, 1, output_file);
+
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  j++;
+  delete [] outYV120Buffer;
+
+  // printf("\nConvert #%d I420 <-> YUY2\n", j);
+  uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kYUY2, 0, width_, height_, out_yuy2_buffer));
+
+  EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, res_i420_buffer));
+
+  fwrite(res_i420_buffer, frame_length_, 1, output_file);
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  ASSERT_EQ(0, fclose(output_file));
+  delete [] out_yuy2_buffer;
+  delete [] res_i420_buffer;
+  delete [] orig_buffer;
+}
+
+TEST_F(TestLibYuv, MirrorTest) {
+  // TODO (mikhal): Add an automated test to confirm output.
+  std::string str;
+  int width = 16;
+  int height = 8;
+  int factor_y = 1;
+  int factor_u = 1;
+  int factor_v = 1;
+  int start_buffer_offset = 10;
+  int length = webrtc::CalcBufferSize(kI420, width, height);
+
+  uint8_t* test_frame = new uint8_t[length];
+  memset(test_frame, 255, length);
+
+  // Create input frame
+  uint8_t* in_frame = test_frame;
+  uint8_t* in_frame_cb = in_frame + width * height;
+  uint8_t* in_frame_cr = in_frame_cb + (width * height) / 4;
+  CreateImage(width, height, in_frame, 10, factor_y, 1);  // Y
+  CreateImage(width / 2, height / 2, in_frame_cb, 100, factor_u, 1);  // Cb
+  CreateImage(width / 2, height / 2, in_frame_cr, 200, factor_v, 1);  // Cr
+  EXPECT_EQ(0, PrintFrame(test_frame, width, height, "InputFrame"));
+
+  uint8_t* test_frame2 = new uint8_t[length + start_buffer_offset * 2];
+  memset(test_frame2, 255, length + start_buffer_offset * 2);
+  uint8_t* out_frame = test_frame2;
+
+  // LeftRight
+  std::cout << "Test Mirror function: LeftRight" << std::endl;
+  EXPECT_EQ(0, MirrorI420LeftRight(in_frame, out_frame, width, height));
+  EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
+  EXPECT_EQ(0, MirrorI420LeftRight(out_frame, test_frame, width, height));
+
+  EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
+
+  // UpDown
+  std::cout << "Test Mirror function: UpDown" << std::endl;
+  EXPECT_EQ(0, MirrorI420UpDown(in_frame, out_frame, width, height));
+  EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
+  EXPECT_EQ(0, MirrorI420UpDown(out_frame, test_frame, width, height));
+
+  EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
+
+  // TODO(mikhal): Write to a file, and ask to look at the file.
+
+  std::cout << "Do the mirrored frames look correct?" << std::endl;
+  delete [] test_frame;
+  delete [] test_frame2;
+}
+
+}  // namespace
diff --git a/trunk/src/common_video/libyuv/scaler.cc b/trunk/src/common_video/libyuv/scaler.cc
new file mode 100644
index 0000000..a12462c
--- /dev/null
+++ b/trunk/src/common_video/libyuv/scaler.cc
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/libyuv/include/scaler.h"
+
+// LibYuv
+#ifdef WEBRTC_ANDROID
+#include "libyuv/files/include/libyuv.h"
+#else
+#include "third_party/libyuv/include/libyuv.h"
+#endif
+
+namespace webrtc {
+
+Scaler::Scaler()
+    : method_(kScaleBox),
+      src_width_(0),
+      src_height_(0),
+      dst_width_(0),
+      dst_height_(0),
+      set_(false) {}
+
+Scaler::~Scaler() {}
+
+int Scaler::Set(int src_width, int src_height,
+                int dst_width, int dst_height,
+                VideoType src_video_type, VideoType dst_video_type,
+                ScaleMethod method) {
+  set_ = false;
+  if (src_width < 1 || src_height < 1 || dst_width < 1 || dst_height < 1)
+    return -1;
+
+  if (!SupportedVideoType(src_video_type, dst_video_type))
+    return -1;
+
+  src_width_ = src_width;
+  src_height_ = src_height;
+  dst_width_ = dst_width;
+  dst_height_ = dst_height;
+  method_ = method;
+  set_ = true;
+  return 0;
+}
+
+int Scaler::Scale(const uint8_t* src_frame,
+                  uint8_t*& dst_frame,
+                  int& dst_size) {
+  if (src_frame == NULL)
+    return -1;
+  if (!set_)
+    return -2;
+
+  // Making sure that destination frame is of sufficient size
+  int required_dst_size = dst_width_ * dst_height_ * 3 / 2;
+  if (dst_frame && required_dst_size > dst_size) {
+    // allocated buffer is too small
+    delete [] dst_frame;
+    dst_frame = NULL;
+  }
+  if (dst_frame == NULL) {
+    dst_frame = new uint8_t[required_dst_size];
+    dst_size = required_dst_size;
+  }
+
+  // Converting to planes:
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_frame + src_width_ * src_height_;
+  const uint8_t* src_vplane = src_uplane + src_width_ * src_height_ / 4;
+
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_frame + dst_width_ * dst_height_;
+  uint8_t* dst_vplane = dst_uplane + dst_width_ * dst_height_ / 4;
+
+  return libyuv::I420Scale(src_yplane, src_width_,
+                           src_uplane, src_width_ / 2,
+                           src_vplane, src_width_ / 2,
+                           src_width_, src_height_,
+                           dst_yplane, dst_width_,
+                           dst_uplane, dst_width_ / 2,
+                           dst_vplane, dst_width_ / 2,
+                           dst_width_, dst_height_,
+                           libyuv::FilterMode(method_));
+}
+
+// TODO(mikhal): Add support for more types.
+bool Scaler::SupportedVideoType(VideoType src_video_type,
+                                VideoType dst_video_type) {
+  if (src_video_type != dst_video_type)
+    return false;
+
+  if ((src_video_type == kI420) || (src_video_type == kIYUV) ||
+      (src_video_type == kYV12))
+    return true;
+
+  return false;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/common_video/libyuv/scaler_unittest.cc b/trunk/src/common_video/libyuv/scaler_unittest.cc
new file mode 100644
index 0000000..899ad89
--- /dev/null
+++ b/trunk/src/common_video/libyuv/scaler_unittest.cc
@@ -0,0 +1,239 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <string.h>
+
+#include "common_video/libyuv/include/scaler.h"
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+class TestScaler : public ::testing::Test {
+ protected:
+  TestScaler();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  void ScaleSequence(ScaleMethod method,
+                     FILE* source_file, std::string out_name,
+                     int src_width, int src_height,
+                     int dst_width, int dst_height);
+
+  Scaler test_scaler_;
+  FILE* source_file_;
+  uint8_t* test_buffer_;
+  const int width_;
+  const int height_;
+  const int frame_length_;
+};
+
+
+// TODO (mikhal): Use scoped_ptr when handling buffers.
+TestScaler::TestScaler()
+    : source_file_(NULL),
+      width_(352),
+      height_(288),
+      frame_length_(CalcBufferSize(kI420, 352, 288)) {
+}
+
+void TestScaler::SetUp() {
+  const std::string input_file_name =
+      webrtc::test::ResourcePath("foreman_cif", "yuv");
+  source_file_  = fopen(input_file_name.c_str(), "rb");
+  ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
+                                       input_file_name << "\n";
+  test_buffer_ = new uint8_t[frame_length_];
+}
+
+void TestScaler::TearDown() {
+  if (source_file_ != NULL) {
+    ASSERT_EQ(0, fclose(source_file_));
+  }
+  source_file_ = NULL;
+  delete [] test_buffer_;
+}
+
+TEST_F(TestScaler, ScaleWithoutSettingValues) {
+  int size = 100;
+  EXPECT_EQ(-2, test_scaler_.Scale(test_buffer_, test_buffer_, size));
+}
+
+TEST_F(TestScaler, ScaleBadInitialValues) {
+  EXPECT_EQ(-1, test_scaler_.Set(0, 288, 352, 288, kI420, kI420, kScalePoint));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 0, 352, 288, kI420, kI420, kScaleBox));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 576, 352, 0, kI420, kI420,
+                                 kScaleBilinear));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 576, 0, 288, kI420, kI420, kScalePoint));
+}
+
+TEST_F(TestScaler, ScaleSendingNullSourcePointer) {
+  int size = 0;
+  EXPECT_EQ(-1, test_scaler_.Scale(NULL, test_buffer_, size));
+}
+
+TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
+  // Sending a buffer which is too small (should reallocate and update size)
+  EXPECT_EQ(0, test_scaler_.Set(352, 288, 144, 288, kI420, kI420, kScalePoint));
+  uint8_t* test_buffer2 = NULL;
+  int size = 0;
+  EXPECT_GT(fread(test_buffer_, 1, frame_length_, source_file_), 0U);
+  EXPECT_EQ(0, test_scaler_.Scale(test_buffer_, test_buffer2, size));
+  EXPECT_EQ(144 * 288 * 3 / 2, size);
+  delete [] test_buffer2;
+}
+
+//TODO (mikhal): Converge the test into one function that accepts the method.
+TEST_F(TestScaler, PointScaleTest) {
+  ScaleMethod method = kScalePoint;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_PointScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+}
+
+TEST_F(TestScaler, BiLinearScaleTest) {
+  ScaleMethod method = kScaleBilinear;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_BilinearScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+}
+
+TEST_F(TestScaler, BoxScaleTest) {
+  ScaleMethod method = kScaleBox;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_BoxScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+}
+
+// TODO (mikhal): Move part to a separate scale test.
+void TestScaler::ScaleSequence(ScaleMethod method,
+                   FILE* source_file, std::string out_name,
+                   int src_width, int src_height,
+                   int dst_width, int dst_height) {
+  FILE* output_file;
+  EXPECT_EQ(0, test_scaler_.Set(src_width, src_height,
+                               dst_width, dst_height,
+                               kI420, kI420, method));
+
+  output_file = fopen(out_name.c_str(), "wb");
+  ASSERT_TRUE(output_file != NULL);
+
+  rewind(source_file);
+
+  int out_required_size = dst_width * dst_height * 3 / 2;
+  int in_required_size = src_height * src_width * 3 / 2;
+  uint8_t* input_buffer = new uint8_t[in_required_size];
+  uint8_t* output_buffer = new uint8_t[out_required_size];
+
+  int64_t start_clock, total_clock;
+  total_clock = 0;
+  int frame_count = 0;
+
+  // Running through entire sequence
+  while (feof(source_file) == 0) {
+      if ((size_t)in_required_size !=
+          fread(input_buffer, 1, in_required_size, source_file))
+        break;
+
+    start_clock = TickTime::MillisecondTimestamp();
+    EXPECT_EQ(0, test_scaler_.Scale(input_buffer, output_buffer,
+                                   out_required_size));
+    total_clock += TickTime::MillisecondTimestamp() - start_clock;
+    fwrite(output_buffer, out_required_size, 1, output_file);
+    frame_count++;
+  }
+
+  if (frame_count) {
+    printf("Scaling[%d %d] => [%d %d]: ",
+           src_width, src_height, dst_width, dst_height);
+    printf("Average time per frame[ms]: %.2lf\n",
+             (static_cast<double>(total_clock) / frame_count));
+  }
+  ASSERT_EQ(0, fclose(output_file));
+  delete [] input_buffer;
+  delete [] output_buffer;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/engine_configurations.h b/trunk/src/engine_configurations.h
new file mode 100644
index 0000000..49a2f8c
--- /dev/null
+++ b/trunk/src/engine_configurations.h
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_ENGINE_CONFIGURATIONS_H_
+#define WEBRTC_ENGINE_CONFIGURATIONS_H_
+
+// ============================================================================
+//                              Voice and Video
+// ============================================================================
+
+// Don't link in socket support in Chrome
+#ifdef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_EXTERNAL_TRANSPORT
+#endif
+
+// Optional to enable stand-alone
+// #define WEBRTC_EXTERNAL_TRANSPORT
+
+// ----------------------------------------------------------------------------
+//  [Voice] Codec settings
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_CODEC_ISAC       // floating-point iSAC implementation (default)
+// #define WEBRTC_CODEC_ISACFX  // fix-point iSAC implementation
+#define WEBRTC_CODEC_AVT
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_CODEC_ILBC
+#define WEBRTC_CODEC_G722
+#define WEBRTC_CODEC_PCM16
+#define WEBRTC_CODEC_RED
+#endif
+
+// ----------------------------------------------------------------------------
+//  [Video] Codec settings
+// ----------------------------------------------------------------------------
+
+#define VIDEOCODEC_I420
+#define VIDEOCODEC_VP8
+
+// ============================================================================
+//                                 VoiceEngine
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Settings for VoiceEngine
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_AGC                 // Near-end AGC
+#define WEBRTC_VOICE_ENGINE_ECHO                // Near-end AEC
+#define WEBRTC_VOICE_ENGINE_NR                  // Near-end NS
+#define WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_VOICE_ENGINE_TYPING_DETECTION    // Typing detection
+#endif
+
+// ----------------------------------------------------------------------------
+//  VoiceEngine sub-APIs
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#define WEBRTC_VOICE_ENGINE_CODEC_API
+#define WEBRTC_VOICE_ENGINE_DTMF_API
+#define WEBRTC_VOICE_ENGINE_HARDWARE_API
+#define WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#define WEBRTC_VOICE_ENGINE_NETWORK_API
+#define WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#define WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#define WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#define WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#define WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#define WEBRTC_VOICE_ENGINE_FILE_API
+#endif
+
+// ============================================================================
+//                                 VideoEngine
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Settings for special VideoEngine configurations
+// ----------------------------------------------------------------------------
+// ----------------------------------------------------------------------------
+//  VideoEngine sub-API:s
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VIDEO_ENGINE_CAPTURE_API
+#define WEBRTC_VIDEO_ENGINE_CODEC_API
+#define WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+#define WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+#define WEBRTC_VIDEO_ENGINE_NETWORK_API
+#define WEBRTC_VIDEO_ENGINE_RENDER_API
+#define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+// #define WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_VIDEO_ENGINE_FILE_API
+#endif
+
+// ============================================================================
+//                       Platform specific configurations
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  VideoEngine Windows
+// ----------------------------------------------------------------------------
+
+#if defined(_WIN32)
+	// #define DIRECTDRAW_RENDERING
+	#define DIRECT3D9_RENDERING  // Requires DirectX 9.
+#endif 
+
+// ----------------------------------------------------------------------------
+//  VideoEngine MAC
+// ----------------------------------------------------------------------------
+
+#if defined(WEBRTC_MAC) && !defined(MAC_IPHONE)
+	// #define CARBON_RENDERING
+	#define COCOA_RENDERING
+#endif
+
+// ----------------------------------------------------------------------------
+//  VideoEngine Mobile iPhone
+// ----------------------------------------------------------------------------
+
+#if defined(MAC_IPHONE)
+    #define EAGL_RENDERING
+#endif
+
+// ----------------------------------------------------------------------------
+//  Deprecated
+// ----------------------------------------------------------------------------
+
+// #define WEBRTC_CODEC_G729
+// #define WEBRTC_DTMF_DETECTION
+// #define WEBRTC_SRTP
+// #define WEBRTC_SRTP_ALLOW_ROC_ITERATION
+
+#endif  // WEBRTC_ENGINE_CONFIGURATIONS_H_
diff --git a/trunk/src/modules/audio_coding/codecs/OWNERS b/trunk/src/modules/audio_coding/codecs/OWNERS
new file mode 100644
index 0000000..e1e6256
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/OWNERS
@@ -0,0 +1,3 @@
+tina.legrand@webrtc.org
+turaj@webrtc.org
+jan.skoglund@webrtc.org
diff --git a/trunk/src/modules/audio_coding/codecs/cng/Android.mk b/trunk/src/modules/audio_coding/codecs/cng/Android.mk
new file mode 100644
index 0000000..32671cc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/Android.mk
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_cng
+LOCAL_MODULE_TAGS := optional
+LOCAL_GENERATED_SOURCES :=
+LOCAL_SRC_FILES := \
+    webrtc_cng.c \
+    cng_helpfuns.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../common_audio/signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/codecs/cng/cng.gypi b/trunk/src/modules/audio_coding/codecs/cng/cng.gypi
new file mode 100644
index 0000000..2903cc2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/cng.gypi
@@ -0,0 +1,57 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'CNG',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/webrtc_cng.h',
+        'webrtc_cng.c',
+        'cng_helpfuns.c',
+        'cng_helpfuns.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'cng_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'CNG',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'cng_unittest.cc',
+          ],
+        }, # CNG_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.c b/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.c
new file mode 100644
index 0000000..2e9029f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.c
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "webrtc_cng.h"
+#include "signal_processing_library.h"
+#include "typedefs.h"
+#include "cng_helpfuns.h"
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+void WebRtcCng_K2a16( 
+    WebRtc_Word16 *k,           /* Q15.    */
+    int            useOrder,
+    WebRtc_Word16 *a            /* Q12.    */
+)
+{
+    WebRtc_Word16 any[WEBRTC_SPL_MAX_LPC_ORDER+1];
+    WebRtc_Word16 *aptr, *aptr2, *anyptr;
+    G_CONST WebRtc_Word16 *kptr;
+    int m, i;
+    
+    kptr = k;
+    *a   = 4096;  /* i.e., (Word16_MAX >> 3)+1 */
+     *any = *a;
+    a[1] = (*k+4) >> 3;
+    for( m=1; m<useOrder; m++ )
+    {
+        kptr++;
+        aptr = a;
+        aptr++;
+        aptr2 = &a[m];
+        anyptr = any;
+        anyptr++;
+
+        any[m+1] = (*kptr+4) >> 3;
+        for( i=0; i<m; i++ ) {
+            *anyptr++ = (*aptr++) + (WebRtc_Word16)( (( (WebRtc_Word32)(*aptr2--) * (WebRtc_Word32)*kptr )+16384) >> 15);
+        }
+
+        aptr   = a;
+        anyptr = any;
+        for( i=0; i<(m+2); i++ ){
+            *aptr++ = *anyptr++;
+        }
+    }
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.h b/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.h
new file mode 100644
index 0000000..fd8d6dc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/cng_helpfuns.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
+
+extern WebRtc_Word32 lpc_lagwinTbl_fixw32[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+void WebRtcCng_K2a16(WebRtc_Word16 *k, int useOrder, WebRtc_Word16 *a);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
diff --git a/trunk/src/modules/audio_coding/codecs/cng/cng_unittest.cc b/trunk/src/modules/audio_coding/codecs/cng/cng_unittest.cc
new file mode 100644
index 0000000..6a4edc0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "webrtc_cng.h"
+#include "gtest/gtest.h"
+
+TEST(CngTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h b/trunk/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h
new file mode 100644
index 0000000..d405e3a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
+
+#include "typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBRTC_CNG_MAX_LPC_ORDER 12
+#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
+
+/* Define Error codes */
+
+/* 6100 Encoder */
+#define CNG_ENCODER_MEMORY_ALLOCATION_FAILED    6110
+#define CNG_ENCODER_NOT_INITIATED               6120
+#define CNG_DISALLOWED_LPC_ORDER                6130
+#define CNG_DISALLOWED_FRAME_SIZE               6140
+#define CNG_DISALLOWED_SAMPLING_FREQUENCY       6150
+/* 6200 Decoder */
+#define CNG_DECODER_MEMORY_ALLOCATION_FAILED    6210
+#define CNG_DECODER_NOT_INITIATED               6220
+
+
+typedef struct WebRtcCngEncInst         CNG_enc_inst;
+typedef struct WebRtcCngDecInst         CNG_dec_inst;
+
+
+/****************************************************************************
+ * WebRtcCng_Version(...)
+ *
+ * These functions returns the version name (string must be at least
+ * 500 characters long)
+ *
+ * Output:
+ *    - version    : Pointer to character string
+ *
+ * Return value    :  0 - Ok
+ *                   -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Version(char *version);
+
+/****************************************************************************
+ * WebRtcCng_AssignSizeEnc/Dec(...)
+ *
+ * These functions get the size needed for storing the instance for encoder
+ * and decoder, respectively
+ *
+ * Input/Output:
+ *    - sizeinbytes     : Pointer to integer where the size is returned
+ *
+ * Return value         :  0
+ */
+
+WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes);
+WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes);
+
+
+/****************************************************************************
+ * WebRtcCng_AssignEnc/Dec(...)
+ *
+ * These functions Assignes memory for the instances.
+ *
+ * Input:
+ *    - CNG_inst_Addr :  Adress to where to assign memory
+ * Output:
+ *    - inst          :  Pointer to the instance that should be created
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr);
+WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr);
+
+
+/****************************************************************************
+ * WebRtcCng_CreateEnc/Dec(...)
+ *
+ * These functions create an instance to the specified structure
+ *
+ * Input:
+ *    - XXX_inst      : Pointer to created instance that should be created
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst);
+WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst);
+
+
+/****************************************************************************
+ * WebRtcCng_InitEnc/Dec(...)
+ *
+ * This function initializes a instance
+ *
+ * Input:
+ *    - cng_inst      : Instance that should be initialized
+ *
+ *    - fs            : 8000 for narrowband and 16000 for wideband
+ *    - interval      : generate SID data every interval ms
+ *    - quality       : Number of refl. coefs, maximum allowed is 12
+ *
+ * Output:
+ *    - cng_inst      : Initialized instance
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 interval,
+                                WebRtc_Word16 quality);
+WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_dec_inst);
+
+ 
+/****************************************************************************
+ * WebRtcCng_FreeEnc/Dec(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst);
+WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst);
+
+
+
+/****************************************************************************
+ * WebRtcCng_Encode(...)
+ *
+ * These functions analyzes background noise
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance
+ *    - speech        : Signal to be analyzed
+ *    - nrOfSamples   : Size of speech vector
+ *    - forceSID      : not zero to force SID frame and reset
+ *
+ * Output:
+ *    - bytesOut      : Nr of bytes to transmit, might be 0
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst,
+                               WebRtc_Word16 *speech,
+                               WebRtc_Word16 nrOfSamples,
+                               WebRtc_UWord8* SIDdata,
+                               WebRtc_Word16 *bytesOut,
+                               WebRtc_Word16 forceSID);
+
+
+/****************************************************************************
+ * WebRtcCng_UpdateSid(...)
+ *
+ * These functions updates the CN state, when a new SID packet arrives
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - SID           : SID packet, all headers removed
+ *    - length        : Length in bytes of SID packet
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
+                                  WebRtc_UWord8 *SID,
+                                  WebRtc_Word16 length);
+
+
+/****************************************************************************
+ * WebRtcCng_Generate(...)
+ *
+ * These functions generates CN data when needed
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - outData       : pointer to area to write CN data
+ *    - nrOfSamples   : How much data to generate
+ *    - new_period    : >0 if a new period of CNG, will reset history
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
+                                 WebRtc_Word16 * outData,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 new_period);
+
+
+/*****************************************************************************
+ * WebRtcCng_GetErrorCodeEnc/Dec(...)
+ *
+ * This functions can be used to check the error code of a CNG instance. When
+ * a function returns -1 a error code will be set for that instance. The 
+ * function below extract the code of the last error that occurred in the
+ * specified instance.
+ *
+ * Input:
+ *    - CNG_inst    : CNG enc/dec instance
+ *
+ * Return value     : Error code
+ */
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst);
+WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
diff --git a/trunk/src/modules/audio_coding/codecs/cng/test/CNG.cc b/trunk/src/modules/audio_coding/codecs/cng/test/CNG.cc
new file mode 100644
index 0000000..b247831
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/test/CNG.cc
@@ -0,0 +1,225 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * CNG.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include "stdafx.h"
+#include "webrtc_cng.h"
+#include "webrtc_vad.h"
+
+CNG_enc_inst *e_inst; 
+CNG_dec_inst *d_inst;
+
+VadInst *vinst;
+//#define ASSIGN
+
+short anaSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], genSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], state[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+unsigned char SIDpkt[114];
+
+int main(int argc, char* argv[])
+{
+    FILE * infile, *outfile, *statefile;
+    short res=0,errtype;
+    /*float time=0.0;*/
+    
+    WebRtcVad_Create(&vinst);
+    WebRtcVad_Init(vinst);
+    
+    short size;
+    int samps=0;
+    
+    if (argc < 5){
+        printf("Usage:\n CNG.exe infile outfile samplingfreq(Hz) interval(ms) order\n\n");
+        return(0);
+    }
+    
+    infile=fopen(argv[1],"rb");
+    if (infile==NULL){
+        printf("file %s does not exist\n",argv[1]);
+        return(0);
+    }
+    outfile=fopen(argv[2],"wb");
+    statefile=fopen("CNGVAD.d","wb");
+    if (outfile==NULL){
+        printf("file %s could not be created\n",argv[2]);
+        return(0); 
+    }
+    
+    unsigned int fs=16000;
+    short frameLen=fs/50;
+    
+#ifndef ASSIGN
+    res=WebRtcCng_CreateEnc(&e_inst);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    res=WebRtcCng_CreateDec(&d_inst);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    
+#else
+    
+    // Test the Assign-functions
+    int Esize, Dsize;    
+    void *Eaddr, *Daddr;
+    
+    res=WebRtcCng_AssignSizeEnc(&Esize);
+    res=WebRtcCng_AssignSizeDec(&Dsize);
+    Eaddr=malloc(Esize);
+    Daddr=malloc(Dsize);
+    
+    res=WebRtcCng_AssignEnc(&e_inst, Eaddr);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    
+    res=WebRtcCng_AssignDec(&d_inst, Daddr);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    
+#endif
+    
+    res=WebRtcCng_InitEnc(e_inst,atoi(argv[3]),atoi(argv[4]),atoi(argv[5])); 
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    
+    res=WebRtcCng_InitDec(d_inst); 
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    
+    
+    static bool firstSilent=true;
+    
+    int numSamp=0;
+    int speech=0;
+    int silent=0;
+    long cnt=0;
+    
+    while(fread(anaSpeech,2,frameLen,infile)==frameLen){
+        
+        cnt++;
+        if (cnt==60){
+            cnt=60;
+        }
+        /*  time+=(float)frameLen/fs;
+        numSamp+=frameLen;
+        float temp[640];
+        for(unsigned int j=0;j<frameLen;j++)
+        temp[j]=(float)anaSpeech[j]; */
+        
+        //        if(!WebRtcVad_Process(vinst, fs, anaSpeech, frameLen)){
+        
+
+        if(1){ // Do CNG coding of entire file
+
+            //        if(!((anaSpeech[0]==0)&&(anaSpeech[1]==0)&&(anaSpeech[2]==0))){
+            if(firstSilent){
+                res = WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,1);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+                
+                
+                firstSilent=false;
+                
+                res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,1);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+                
+            }
+            else{
+                res=WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+                res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            }
+            
+            if(size>0){
+                res=WebRtcCng_UpdateSid(d_inst,SIDpkt, size);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+                    fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            }
+            res=WebRtcCng_Generate(d_inst,genSpeech, frameLen,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+                    fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            memcpy(state,anaSpeech,2*frameLen);
+        }
+        else{
+            firstSilent=true;
+            memcpy(genSpeech,anaSpeech,2*frameLen);
+            
+            memset(anaSpeech,0,frameLen*2);
+            memset(state,0,frameLen*2);
+            
+        }
+        fwrite(genSpeech,2,frameLen,outfile);
+        fwrite(state,2,frameLen,statefile);
+        
+    }
+    
+    fclose(infile);
+    fclose(outfile);
+    fclose(statefile);
+    return 0;
+}   
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.cc b/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.cc
new file mode 100644
index 0000000..995e510
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.cc
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.cpp : source file that includes just the standard includes
+//    CNG.pch will be the pre-compiled header
+//    stdafx.obj will contain the pre-compiled type information
+
+#include "stdafx.h"
+
+// TODO: reference any additional headers you need in STDAFX.H
+// and not in this file
diff --git a/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.h b/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.h
new file mode 100644
index 0000000..dd6c445
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/test/StdAfx.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+//  or project specific include files that are used frequently, but
+//      are changed infrequently
+//
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif // _MSC_VER > 1000
+
+#define WIN32_LEAN_AND_MEAN        // Exclude rarely-used stuff from Windows headers
+
+#include <stdio.h>
+
+// TODO: reference additional headers your program requires here
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // !defined(AFX_STDAFX_H__DE2097A7_569B_42A0_A615_41BF352D6FFB__INCLUDED_)
diff --git a/trunk/src/modules/audio_coding/codecs/cng/webrtc_cng.c b/trunk/src/modules/audio_coding/codecs/cng/webrtc_cng.c
new file mode 100644
index 0000000..65f6672
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/cng/webrtc_cng.c
@@ -0,0 +1,732 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "webrtc_cng.h"
+#include "signal_processing_library.h"
+#include "cng_helpfuns.h"
+#include "stdio.h"
+
+
+typedef struct WebRtcCngDecInst_t_ {
+
+    WebRtc_UWord32 dec_seed;
+    WebRtc_Word32 dec_target_energy;
+    WebRtc_Word32 dec_used_energy;
+    WebRtc_Word16 dec_target_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_used_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_Efiltstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_EfiltstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_order;
+    WebRtc_Word16 dec_target_scale_factor; /*Q29*/
+    WebRtc_Word16 dec_used_scale_factor;  /*Q29*/
+    WebRtc_Word16 target_scale_factor; /* Q13 */
+    WebRtc_Word16 errorcode;
+    WebRtc_Word16 initflag; 
+
+} WebRtcCngDecInst_t;
+
+
+typedef struct WebRtcCngEncInst_t_ {
+
+    WebRtc_Word16 enc_nrOfCoefs;
+    WebRtc_Word16 enc_sampfreq;
+    WebRtc_Word16 enc_interval;
+    WebRtc_Word16 enc_msSinceSID;
+    WebRtc_Word32 enc_Energy;
+    WebRtc_Word16 enc_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word32 enc_corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 enc_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 enc_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_UWord32 enc_seed;    
+    WebRtc_Word16 errorcode;
+    WebRtc_Word16 initflag;
+
+} WebRtcCngEncInst_t;
+
+const WebRtc_Word32 WebRtcCng_kDbov[94]={
+    1081109975,  858756178,  682134279,  541838517,  430397633,  341876992,
+    271562548,  215709799,  171344384,  136103682,  108110997,   85875618,
+    68213428,   54183852,   43039763,   34187699,   27156255,   21570980,
+    17134438,   13610368,   10811100,    8587562,    6821343,    5418385,
+    4303976,    3418770,    2715625,    2157098,    1713444,    1361037,
+    1081110,     858756,     682134,     541839,     430398,     341877,
+    271563,     215710,     171344,     136104,     108111,      85876,
+    68213,      54184,      43040,      34188,      27156,      21571,
+    17134,      13610,      10811,       8588,       6821,       5418,
+    4304,       3419,       2716,       2157,       1713,       1361,
+    1081,        859,        682,        542,        430,        342,
+    272,        216,        171,        136,        108,         86, 
+    68,         54,         43,         34,         27,         22, 
+    17,         14,         11,          9,          7,          5, 
+    4,          3,          3,          2,          2,           1, 
+    1,          1,          1,          1
+};
+const WebRtc_Word16 WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
+    32702, 32636, 32570, 32505, 32439, 32374, 
+    32309, 32244, 32179, 32114, 32049, 31985
+}; 
+
+/****************************************************************************
+ * WebRtcCng_Version(...)
+ *
+ * These functions returns the version name (string must be at least
+ * 500 characters long)
+ *
+ * Output:
+ *      - version       : Pointer to character string
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Version(char *version)
+{
+    strcpy((char*)version,(const char*)"1.2.0\n");
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_AssignSizeEnc/Dec(...)
+ *
+ * These functions get the size needed for storing the instance for encoder
+ * and decoder, respectively
+ *
+ * Input/Output:
+ *      - sizeinbytes   : Pointer to integer where the size is returned
+ *
+ * Return value         :  0
+ */
+
+WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes)
+{
+    *sizeinbytes=sizeof(WebRtcCngEncInst_t)*2/sizeof(WebRtc_Word16);
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes)
+{
+    *sizeinbytes=sizeof(WebRtcCngDecInst_t)*2/sizeof(WebRtc_Word16);
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_AssignEnc/Dec(...)
+ *
+ * These functions Assignes memory for the instances.
+ *
+ * Input:
+ *        - CNG_inst_Addr :  Adress to where to assign memory
+ * Output:
+ *        - inst          :  Pointer to the instance that should be created
+ *
+ * Return value           :  0 - Ok
+ *                          -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr)
+{
+    if (CNG_inst_Addr!=NULL) {
+        *inst = (CNG_enc_inst*)CNG_inst_Addr;
+        (*(WebRtcCngEncInst_t**) inst)->errorcode = 0;
+        (*(WebRtcCngEncInst_t**) inst)->initflag = 0;
+        return(0);
+    } else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr)
+{
+    if (CNG_inst_Addr!=NULL) {
+        *inst = (CNG_dec_inst*)CNG_inst_Addr;
+        (*(WebRtcCngDecInst_t**) inst)->errorcode = 0;
+        (*(WebRtcCngDecInst_t**) inst)->initflag = 0;
+        return(0);
+    } else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_CreateEnc/Dec(...)
+ *
+ * These functions create an instance to the specified structure
+ *
+ * Input:
+ *      - XXX_inst      : Pointer to created instance that should be created
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst)
+{
+    *cng_inst=(CNG_enc_inst*)malloc(sizeof(WebRtcCngEncInst_t));
+    if(cng_inst!=NULL) {
+        (*(WebRtcCngEncInst_t**) cng_inst)->errorcode = 0;
+        (*(WebRtcCngEncInst_t**) cng_inst)->initflag = 0;
+        return(0);
+    }
+    else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst)
+{
+    *cng_inst=(CNG_dec_inst*)malloc(sizeof(WebRtcCngDecInst_t));
+    if(cng_inst!=NULL) {
+        (*(WebRtcCngDecInst_t**) cng_inst)->errorcode = 0;
+        (*(WebRtcCngDecInst_t**) cng_inst)->initflag = 0;
+        return(0);
+    }
+    else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_InitEnc/Dec(...)
+ *
+ * This function initializes a instance
+ *
+ * Input:
+ *    - cng_inst      : Instance that should be initialized
+ *
+ *    - fs            : 8000 for narrowband and 16000 for wideband
+ *    - interval      : generate SID data every interval ms
+ *    - quality       : TBD
+ *
+ * Output:
+ *    - cng_inst      : Initialized instance
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 interval,
+                                WebRtc_Word16 quality)
+{
+    int i;
+
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    memset(inst, 0, sizeof(WebRtcCngEncInst_t));
+
+     /* Check LPC order */
+
+    if (quality>WEBRTC_CNG_MAX_LPC_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_LPC_ORDER;
+        return (-1);
+    }
+
+    if (fs<=0) {
+        inst->errorcode = CNG_DISALLOWED_SAMPLING_FREQUENCY;
+        return (-1);
+    }
+
+    inst->enc_sampfreq=fs;
+    inst->enc_interval=interval;
+    inst->enc_nrOfCoefs=quality;
+    inst->enc_msSinceSID=0;
+    inst->enc_seed=7777; /*For debugging only*/
+    inst->enc_Energy=0;
+    for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
+        inst->enc_reflCoefs[i]=0;
+        inst->enc_corrVector[i]=0;
+    }
+    inst->initflag=1;
+
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_inst)
+{
+    int i;
+
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+
+    memset(inst, 0, sizeof(WebRtcCngDecInst_t));
+    inst->dec_seed=7777; /*For debugging only*/
+    inst->dec_order=5;
+    inst->dec_target_scale_factor=0;
+    inst->dec_used_scale_factor=0;
+    for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
+        inst->dec_filtstate[i]=0;
+        inst->dec_target_reflCoefs[i]=0;
+        inst->dec_used_reflCoefs[i]=0;
+    }
+    inst->dec_target_reflCoefs[0]=0;
+    inst->dec_used_reflCoefs[0]=0;
+    inst ->dec_used_energy=0;
+    inst->initflag=1;
+
+    return(0);
+}
+
+/****************************************************************************
+ * WebRtcCng_FreeEnc/Dec(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst)
+{
+    free(cng_inst);
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst)
+{
+    free(cng_inst);
+    return(0);
+}
+
+
+
+/****************************************************************************
+ * WebRtcCng_Encode(...)
+ *
+ * These functions analyzes background noise
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance
+ *    - speech        : Signal (noise) to be analyzed
+ *    - nrOfSamples   : Size of speech vector
+ *    - bytesOut      : Nr of bytes to transmit, might be 0
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst, 
+                               WebRtc_Word16 *speech,
+                               WebRtc_Word16 nrOfSamples,
+                               WebRtc_UWord8* SIDdata,
+                               WebRtc_Word16* bytesOut,
+                               WebRtc_Word16 forceSID)
+{
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    WebRtc_Word16 arCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word32 corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 hanningW[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 ReflBeta=19661; /*0.6 in q15*/
+    WebRtc_Word16 ReflBetaComp=13107; /*0.4 in q15*/ 
+    WebRtc_Word32 outEnergy;
+    int outShifts;
+    int i, stab;
+    int acorrScale;
+    int index;
+    WebRtc_Word16 ind,factor;
+    WebRtc_Word32 *bptr, blo, bhi;
+    WebRtc_Word16 negate;
+    const WebRtc_Word16 *aptr;
+
+    WebRtc_Word16 speechBuf[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+
+
+    /* check if encoder initiated */    
+    if (inst->initflag != 1) {
+        inst->errorcode = CNG_ENCODER_NOT_INITIATED;
+        return (-1);
+    }
+
+
+    /* check framesize */    
+    if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
+        return (-1);
+    }
+
+
+    for(i=0;i<nrOfSamples;i++){
+        speechBuf[i]=speech[i];
+    }
+
+    factor=nrOfSamples;
+
+    /* Calculate energy and a coefficients */
+    outEnergy =WebRtcSpl_Energy(speechBuf, nrOfSamples, &outShifts);
+    while(outShifts>0){
+        if(outShifts>5){ /*We can only do 5 shifts without destroying accuracy in division factor*/
+            outEnergy<<=(outShifts-5);
+            outShifts=5;
+        }
+        else{
+            factor/=2;
+            outShifts--;
+        }
+    }
+    outEnergy=WebRtcSpl_DivW32W16(outEnergy,factor);
+
+    if (outEnergy > 1){
+        /* Create Hanning Window */
+        WebRtcSpl_GetHanningWindow(hanningW, nrOfSamples/2);
+        for( i=0;i<(nrOfSamples/2);i++ )
+            hanningW[nrOfSamples-i-1]=hanningW[i];
+
+        WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, nrOfSamples, 14);
+
+        WebRtcSpl_AutoCorrelation( speechBuf, nrOfSamples, inst->enc_nrOfCoefs, corrVector, &acorrScale );
+
+        if( *corrVector==0 )
+            *corrVector = WEBRTC_SPL_WORD16_MAX;
+
+        /* Adds the bandwidth expansion */
+        aptr = WebRtcCng_kCorrWindow;
+        bptr = corrVector;
+
+        // (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700) 
+        for( ind=0; ind<inst->enc_nrOfCoefs; ind++ )
+        {
+            // The below code multiplies the 16 b corrWindow values (Q15) with
+            // the 32 b corrvector (Q0) and shifts the result down 15 steps.
+                 
+            negate = *bptr<0;
+            if( negate )
+                *bptr = -*bptr;
+
+            blo = (WebRtc_Word32)*aptr * (*bptr & 0xffff);
+            bhi = ((blo >> 16) & 0xffff) + ((WebRtc_Word32)(*aptr++) * ((*bptr >> 16) & 0xffff));
+            blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
+
+            *bptr = (( (bhi>>16) & 0x7fff) << 17) | ((WebRtc_UWord32)blo >> 15);
+            if( negate )
+                *bptr = -*bptr;
+            bptr++;
+        }
+
+        // end of bandwidth expansion
+
+        stab=WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs, inst->enc_nrOfCoefs);
+        
+        if(!stab){
+            // disregard from this frame
+            *bytesOut=0;
+            return(0);
+        }
+
+    }
+    else {
+        for(i=0;i<inst->enc_nrOfCoefs; i++)
+            refCs[i]=0;
+    }
+
+    if(forceSID){
+        /*Read instantaneous values instead of averaged*/
+        for(i=0;i<inst->enc_nrOfCoefs;i++)
+            inst->enc_reflCoefs[i]=refCs[i];
+        inst->enc_Energy=outEnergy;
+    }
+    else{
+        /*Average history with new values*/
+        for(i=0;i<(inst->enc_nrOfCoefs);i++){
+            inst->enc_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->enc_reflCoefs[i],ReflBeta,15);
+            inst->enc_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(refCs[i],ReflBetaComp,15);
+        }
+        inst->enc_Energy=(outEnergy>>2)+(inst->enc_Energy>>1)+(inst->enc_Energy>>2);
+    }
+
+
+    if(inst->enc_Energy<1){
+        inst->enc_Energy=1;
+    }
+
+    if((inst->enc_msSinceSID>(inst->enc_interval-1))||forceSID){
+
+        /* Search for best dbov value */
+        index=0;
+        for(i=1;i<93;i++){
+            /* Always round downwards */
+            if((inst->enc_Energy-WebRtcCng_kDbov[i])>0){
+                index=i;
+                break;
+            }
+        }
+        if((i==93)&&(index==0))
+            index=94;
+        SIDdata[0]=index;
+
+
+        /* Quantize coefs with tweak for WebRtc implementation of RFC3389 */
+        if(inst->enc_nrOfCoefs==WEBRTC_CNG_MAX_LPC_ORDER){ 
+            for(i=0;i<inst->enc_nrOfCoefs;i++){
+                SIDdata[i+1]=((inst->enc_reflCoefs[i]+128)>>8); /* Q15 to Q7*/ /* +127 */
+            }
+        }else{
+            for(i=0;i<inst->enc_nrOfCoefs;i++){
+                SIDdata[i+1]=(127+((inst->enc_reflCoefs[i]+128)>>8)); /* Q15 to Q7*/ /* +127 */
+            }
+        }
+
+        inst->enc_msSinceSID=0;
+        *bytesOut=inst->enc_nrOfCoefs+1;
+
+        inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
+        return(inst->enc_nrOfCoefs+1);
+    }else{
+        inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
+        *bytesOut=0;
+    return(0);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_UpdateSid(...)
+ *
+ * These functions updates the CN state, when a new SID packet arrives
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - SID           : SID packet, all headers removed
+ *    - length        : Length in bytes of SID packet
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
+                                  WebRtc_UWord8 *SID,
+                                  WebRtc_Word16 length)
+{
+
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+    WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER];
+    WebRtc_Word32 targetEnergy;
+    int i;
+
+    if (inst->initflag != 1) {
+        inst->errorcode = CNG_DECODER_NOT_INITIATED;
+        return (-1);
+    }
+
+    /*Throw away reflection coefficients of higher order than we can handle*/
+    if(length> (WEBRTC_CNG_MAX_LPC_ORDER+1))
+        length=WEBRTC_CNG_MAX_LPC_ORDER+1;
+
+    inst->dec_order=length-1;
+
+    if(SID[0]>93)
+        SID[0]=93;
+    targetEnergy=WebRtcCng_kDbov[SID[0]];
+    /* Take down target energy to 75% */
+    targetEnergy=targetEnergy>>1;
+    targetEnergy+=targetEnergy>>2;
+
+    inst->dec_target_energy=targetEnergy;
+
+    /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389 */
+    if(inst->dec_order==WEBRTC_CNG_MAX_LPC_ORDER){ 
+        for(i=0;i<(inst->dec_order);i++){
+            refCs[i]=SID[i+1]<<8; /* Q7 to Q15*/
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+    }else{
+        for(i=0;i<(inst->dec_order);i++){
+            refCs[i]=(SID[i+1]-127)<<8; /* Q7 to Q15*/
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+    }
+    
+    for(i=(inst->dec_order);i<WEBRTC_CNG_MAX_LPC_ORDER;i++){
+            refCs[i]=0; 
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_Generate(...)
+ *
+ * These functions generates CN data when needed
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - outData       : pointer to area to write CN data
+ *    - nrOfSamples   : How much data to generate
+ *
+ * Return value        :  0 - Ok
+ *                       -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
+                                 WebRtc_Word16 *outData,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 new_period)
+{
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+    
+    int i;
+    WebRtc_Word16 excitation[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 low[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 lpPoly[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 ReflBetaStd=26214; /*0.8 in q15*/
+    WebRtc_Word16 ReflBetaCompStd=6553; /*0.2in q15*/
+    WebRtc_Word16 ReflBetaNewP=19661; /*0.6 in q15*/
+    WebRtc_Word16 ReflBetaCompNewP=13107; /*0.4 in q15*/
+    WebRtc_Word16 Beta,BetaC, tmp1, tmp2, tmp3;
+    WebRtc_Word32 targetEnergy;
+    WebRtc_Word16 En;
+    WebRtc_Word16 temp16;
+
+    if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
+        return (-1);
+    }
+
+
+    if (new_period) {
+        inst->dec_used_scale_factor=inst->dec_target_scale_factor;
+        Beta=ReflBetaNewP;
+        BetaC=ReflBetaCompNewP;
+    } else {
+        Beta=ReflBetaStd;
+        BetaC=ReflBetaCompStd;
+    }
+
+    /*Here we use a 0.5 weighting, should possibly be modified to 0.6*/
+    tmp1=inst->dec_used_scale_factor<<2; /* Q13->Q15 */
+    tmp2=inst->dec_target_scale_factor<<2; /* Q13->Q15 */
+    tmp3=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1,Beta,15);
+    tmp3+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp2,BetaC,15);
+    inst->dec_used_scale_factor=tmp3>>2; /* Q15->Q13 */
+
+    inst->dec_used_energy=inst->dec_used_energy>>1;
+    inst->dec_used_energy+=inst->dec_target_energy>>1;
+
+    
+    /* Do the same for the reflection coeffs */
+    for (i=0;i<WEBRTC_CNG_MAX_LPC_ORDER;i++) {
+        inst->dec_used_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],Beta,15);
+        inst->dec_used_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_target_reflCoefs[i],BetaC,15);        
+    }
+
+    /* Compute the polynomial coefficients            */
+    WebRtcCng_K2a16(inst->dec_used_reflCoefs, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
+
+    /***/ 
+
+    targetEnergy=inst->dec_used_energy;
+
+    // Calculate scaling factor based on filter energy
+    En=8192; //1.0 in Q13
+    for (i=0; i<(WEBRTC_CNG_MAX_LPC_ORDER); i++) {
+
+        // Floating point value for reference 
+        // E*=1.0-((float)inst->dec_used_reflCoefs[i]/32768.0)*((float)inst->dec_used_reflCoefs[i]/32768.0);
+
+        // Same in fixed point
+        temp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],inst->dec_used_reflCoefs[i],15); // K(i).^2 in Q15
+        temp16=0x7fff - temp16; // 1 - K(i).^2 in Q15
+        En=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(En,temp16,15);
+
+    }
+
+    //float scaling= sqrt(E*inst->dec_target_energy/((1<<24)));
+
+    //Calculate sqrt(En*target_energy/exctiation energy)
+
+    targetEnergy=WebRtcSpl_Sqrt(inst->dec_used_energy);
+
+    En=(WebRtc_Word16)WebRtcSpl_Sqrt(En)<<6; //We are missing a factor sqrt(2) here
+    En=(En*3)>>1; //1.5 estimates sqrt(2)
+
+    inst->dec_used_scale_factor=(WebRtc_Word16)((En*targetEnergy)>>12);
+
+
+    /***/
+
+    /*Generate excitation*/
+    /*Excitation energy per sample is 2.^24 - Q13 N(0,1) */
+    for(i=0;i<nrOfSamples;i++){
+        excitation[i]=WebRtcSpl_RandN(&inst->dec_seed)>>1;
+    }
+
+    /*Scale to correct energy*/
+    WebRtcSpl_ScaleVector(excitation, excitation, inst->dec_used_scale_factor, nrOfSamples, 13);
+
+    WebRtcSpl_FilterAR(
+        lpPoly,    /* Coefficients in Q12 */
+        WEBRTC_CNG_MAX_LPC_ORDER+1, 
+        excitation,            /* Speech samples */
+        nrOfSamples, 
+        inst->dec_filtstate,        /* State preservation */
+        WEBRTC_CNG_MAX_LPC_ORDER, 
+        inst->dec_filtstateLow,        /* State preservation */
+        WEBRTC_CNG_MAX_LPC_ORDER, 
+        outData,    /* Filtered speech samples */
+        low,
+        nrOfSamples
+    );
+
+    return(0);
+
+}
+
+
+
+/****************************************************************************
+ * WebRtcCng_GetErrorCodeEnc/Dec(...)
+ *
+ * This functions can be used to check the error code of a CNG instance. When
+ * a function returns -1 a error code will be set for that instance. The 
+ * function below extract the code of the last error that occured in the 
+ * specified instance.
+ *
+ * Input:
+ *    - CNG_inst    : CNG enc/dec instance
+ *
+ * Return value     : Error code
+ */
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst)
+{
+
+    /* typecast pointer to real structure */
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    return inst->errorcode;
+}
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst)
+{
+
+    /* typecast pointer to real structure */
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+
+    return inst->errorcode;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/g711/Android.mk b/trunk/src/modules/audio_coding/codecs/g711/Android.mk
new file mode 100644
index 0000000..779bb7c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/Android.mk
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_g711
+LOCAL_MODULE_TAGS := optional
+LOCAL_GENERATED_SOURCES :=
+LOCAL_SRC_FILES := \
+    g711_interface.c \
+    g711.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../../..
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/codecs/g711/g711.c b/trunk/src/modules/audio_coding/codecs/g711/g711.c
new file mode 100644
index 0000000..954f377
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/g711.c
@@ -0,0 +1,83 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g711.c - A-law and u-law transcoding routines
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2006 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place this code in the
+ *  public domain for the benefit of all mankind - even the slimy
+ *  ones who might try to proprietize my work and use it to my
+ *  detriment.
+ *
+ * $Id: g711.c,v 1.1 2006/06/07 15:46:39 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed unused include files
+ * -Changed to use WebRtc types
+ * -Added option to run encoder bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+#include "g711.h"
+#include "typedefs.h"
+
+/* Copied from the CCITT G.711 specification */
+static const WebRtc_UWord8 ulaw_to_alaw_table[256] =
+{
+     42,  43,  40,  41,  46,  47,  44,  45,  34,  35,  32,  33,  38,  39,  36,  37,
+     58,  59,  56,  57,  62,  63,  60,  61,  50,  51,  48,  49,  54,  55,  52,  53,
+     10,  11,   8,   9,  14,  15,  12,  13,   2,   3,   0,   1,   6,   7,   4,  26,
+     27,  24,  25,  30,  31,  28,  29,  18,  19,  16,  17,  22,  23,  20,  21, 106,
+    104, 105, 110, 111, 108, 109,  98,  99,  96,  97, 102, 103, 100, 101, 122, 120,
+    126, 127, 124, 125, 114, 115, 112, 113, 118, 119, 116, 117,  75,  73,  79,  77,
+     66,  67,  64,  65,  70,  71,  68,  69,  90,  91,  88,  89,  94,  95,  92,  93,
+     82,  82,  83,  83,  80,  80,  81,  81,  86,  86,  87,  87,  84,  84,  85,  85,
+    170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
+    186, 187, 184, 185, 190, 191, 188, 189, 178, 179, 176, 177, 182, 183, 180, 181,
+    138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 154,
+    155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149, 234,
+    232, 233, 238, 239, 236, 237, 226, 227, 224, 225, 230, 231, 228, 229, 250, 248,
+    254, 255, 252, 253, 242, 243, 240, 241, 246, 247, 244, 245, 203, 201, 207, 205,
+    194, 195, 192, 193, 198, 199, 196, 197, 218, 219, 216, 217, 222, 223, 220, 221,
+    210, 210, 211, 211, 208, 208, 209, 209, 214, 214, 215, 215, 212, 212, 213, 213
+};
+
+/* These transcoding tables are copied from the CCITT G.711 specification. To achieve
+   optimal results, do not change them. */
+
+static const WebRtc_UWord8 alaw_to_ulaw_table[256] =
+{
+     42,  43,  40,  41,  46,  47,  44,  45,  34,  35,  32,  33,  38,  39,  36,  37,
+     57,  58,  55,  56,  61,  62,  59,  60,  49,  50,  47,  48,  53,  54,  51,  52,
+     10,  11,   8,   9,  14,  15,  12,  13,   2,   3,   0,   1,   6,   7,   4,   5,
+     26,  27,  24,  25,  30,  31,  28,  29,  18,  19,  16,  17,  22,  23,  20,  21,
+     98,  99,  96,  97, 102, 103, 100, 101,  93,  93,  92,  92,  95,  95,  94,  94,
+    116, 118, 112, 114, 124, 126, 120, 122, 106, 107, 104, 105, 110, 111, 108, 109,
+     72,  73,  70,  71,  76,  77,  74,  75,  64,  65,  63,  63,  68,  69,  66,  67,
+     86,  87,  84,  85,  90,  91,  88,  89,  79,  79,  78,  78,  82,  83,  80,  81,
+    170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
+    185, 186, 183, 184, 189, 190, 187, 188, 177, 178, 175, 176, 181, 182, 179, 180,
+    138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 133,
+    154, 155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149,
+    226, 227, 224, 225, 230, 231, 228, 229, 221, 221, 220, 220, 223, 223, 222, 222,
+    244, 246, 240, 242, 252, 254, 248, 250, 234, 235, 232, 233, 238, 239, 236, 237,
+    200, 201, 198, 199, 204, 205, 202, 203, 192, 193, 191, 191, 196, 197, 194, 195,
+    214, 215, 212, 213, 218, 219, 216, 217, 207, 207, 206, 206, 210, 211, 208, 209
+};
+
+WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw)
+{
+    return alaw_to_ulaw_table[alaw];
+}
+/*- End of function --------------------------------------------------------*/
+
+WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw)
+{
+    return ulaw_to_alaw_table[ulaw];
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/trunk/src/modules/audio_coding/codecs/g711/g711.gypi b/trunk/src/modules/audio_coding/codecs/g711/g711.gypi
new file mode 100644
index 0000000..763e6db
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/g711.gypi
@@ -0,0 +1,65 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'G711',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/g711_interface.h',
+        'g711_interface.c',
+        'g711.c',
+        'g711.h',
+      ],
+    },
+  ], # targets
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'g711_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'G711',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'g711_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'g711_test',
+          'type': 'executable',
+          'dependencies': [
+            'G711',
+          ],
+          'sources': [
+            'test/testG711.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/g711/g711.h b/trunk/src/modules/audio_coding/codecs/g711/g711.h
new file mode 100644
index 0000000..cd5e3d7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/g711.h
@@ -0,0 +1,382 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g711.h - In line A-law and u-law conversion routines
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2001 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place this code in the
+ *  public domain for the benefit of all mankind - even the slimy
+ *  ones who might try to proprietize my work and use it to my
+ *  detriment.
+ *
+ * $Id: g711.h,v 1.1 2006/06/07 15:46:39 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Changed to use WebRtc types
+ * -Changed __inline__ to __inline
+ * -Two changes to make implementation bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+/*! \page g711_page A-law and mu-law handling
+Lookup tables for A-law and u-law look attractive, until you consider the impact
+on the CPU cache. If it causes a substantial area of your processor cache to get
+hit too often, cache sloshing will severely slow things down. The main reason
+these routines are slow in C, is the lack of direct access to the CPU's "find
+the first 1" instruction. A little in-line assembler fixes that, and the
+conversion routines can be faster than lookup tables, in most real world usage.
+A "find the first 1" instruction is available on most modern CPUs, and is a
+much underused feature. 
+
+If an assembly language method of bit searching is not available, these routines
+revert to a method that can be a little slow, so the cache thrashing might not
+seem so bad :(
+
+Feel free to submit patches to add fast "find the first 1" support for your own
+favourite processor.
+
+Look up tables are used for transcoding between A-law and u-law, since it is
+difficult to achieve the precise transcoding procedure laid down in the G.711
+specification by other means.
+*/
+
+#if !defined(_G711_H_)
+#define _G711_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "typedefs.h"
+
+#if defined(__i386__)
+/*! \brief Find the bit position of the highest set bit in a word
+    \param bits The word to be searched
+    \return The bit number of the highest set bit, or -1 if the word is zero. */
+static __inline__ int top_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movl $-1,%%edx;\n"
+                         " bsrl %%eax,%%edx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Find the bit position of the lowest set bit in a word
+    \param bits The word to be searched
+    \return The bit number of the lowest set bit, or -1 if the word is zero. */
+static __inline__ int bottom_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movl $-1,%%edx;\n"
+                         " bsfl %%eax,%%edx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+#elif defined(__x86_64__)
+static __inline__ int top_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movq $-1,%%rdx;\n"
+                         " bsrq %%rax,%%rdx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+
+static __inline__ int bottom_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movq $-1,%%rdx;\n"
+                         " bsfq %%rax,%%rdx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+#else
+static __inline int top_bit(unsigned int bits)
+{
+    int i;
+    
+    if (bits == 0)
+        return -1;
+    i = 0;
+    if (bits & 0xFFFF0000)
+    {
+        bits &= 0xFFFF0000;
+        i += 16;
+    }
+    if (bits & 0xFF00FF00)
+    {
+        bits &= 0xFF00FF00;
+        i += 8;
+    }
+    if (bits & 0xF0F0F0F0)
+    {
+        bits &= 0xF0F0F0F0;
+        i += 4;
+    }
+    if (bits & 0xCCCCCCCC)
+    {
+        bits &= 0xCCCCCCCC;
+        i += 2;
+    }
+    if (bits & 0xAAAAAAAA)
+    {
+        bits &= 0xAAAAAAAA;
+        i += 1;
+    }
+    return i;
+}
+/*- End of function --------------------------------------------------------*/
+
+static __inline int bottom_bit(unsigned int bits)
+{
+    int i;
+    
+    if (bits == 0)
+        return -1;
+    i = 32;
+    if (bits & 0x0000FFFF)
+    {
+        bits &= 0x0000FFFF;
+        i -= 16;
+    }
+    if (bits & 0x00FF00FF)
+    {
+        bits &= 0x00FF00FF;
+        i -= 8;
+    }
+    if (bits & 0x0F0F0F0F)
+    {
+        bits &= 0x0F0F0F0F;
+        i -= 4;
+    }
+    if (bits & 0x33333333)
+    {
+        bits &= 0x33333333;
+        i -= 2;
+    }
+    if (bits & 0x55555555)
+    {
+        bits &= 0x55555555;
+        i -= 1;
+    }
+    return i;
+}
+/*- End of function --------------------------------------------------------*/
+#endif
+
+/* N.B. It is tempting to use look-up tables for A-law and u-law conversion.
+ *      However, you should consider the cache footprint.
+ *
+ *      A 64K byte table for linear to x-law and a 512 byte table for x-law to
+ *      linear sound like peanuts these days, and shouldn't an array lookup be
+ *      real fast? No! When the cache sloshes as badly as this one will, a tight
+ *      calculation may be better. The messiest part is normally finding the
+ *      segment, but a little inline assembly can fix that on an i386, x86_64 and
+ *      many other modern processors.
+ */
+ 
+/*
+ * Mu-law is basically as follows:
+ *
+ *      Biased Linear Input Code        Compressed Code
+ *      ------------------------        ---------------
+ *      00000001wxyza                   000wxyz
+ *      0000001wxyzab                   001wxyz
+ *      000001wxyzabc                   010wxyz
+ *      00001wxyzabcd                   011wxyz
+ *      0001wxyzabcde                   100wxyz
+ *      001wxyzabcdef                   101wxyz
+ *      01wxyzabcdefg                   110wxyz
+ *      1wxyzabcdefgh                   111wxyz
+ *
+ * Each biased linear code has a leading 1 which identifies the segment
+ * number. The value of the segment number is equal to 7 minus the number
+ * of leading 0's. The quantization interval is directly available as the
+ * four bits wxyz.  * The trailing bits (a - h) are ignored.
+ *
+ * Ordinarily the complement of the resulting code word is used for
+ * transmission, and so the code word is complemented before it is returned.
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+
+//#define ULAW_ZEROTRAP                 /* turn on the trap as per the MIL-STD */
+#define ULAW_BIAS        0x84           /* Bias for linear code. */
+
+/*! \brief Encode a linear sample to u-law
+    \param linear The sample to encode.
+    \return The u-law value.
+*/
+static __inline WebRtc_UWord8 linear_to_ulaw(int linear)
+{
+    WebRtc_UWord8 u_val;
+    int mask;
+    int seg;
+
+    /* Get the sign and the magnitude of the value. */
+    if (linear < 0)
+    {
+        /* WebRtc, tlegrand: -1 added to get bitexact to reference implementation */
+        linear = ULAW_BIAS - linear - 1;
+        mask = 0x7F;
+    }
+    else
+    {
+        linear = ULAW_BIAS + linear;
+        mask = 0xFF;
+    }
+
+    seg = top_bit(linear | 0xFF) - 7;
+
+    /*
+     * Combine the sign, segment, quantization bits,
+     * and complement the code word.
+     */
+    if (seg >= 8)
+        u_val = (WebRtc_UWord8) (0x7F ^ mask);
+    else
+        u_val = (WebRtc_UWord8) (((seg << 4) | ((linear >> (seg + 3)) & 0xF)) ^ mask);
+#ifdef ULAW_ZEROTRAP
+    /* Optional ITU trap */
+    if (u_val == 0)
+        u_val = 0x02;
+#endif
+    return  u_val;
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Decode an u-law sample to a linear value.
+    \param ulaw The u-law sample to decode.
+    \return The linear value.
+*/
+static __inline WebRtc_Word16 ulaw_to_linear(WebRtc_UWord8 ulaw)
+{
+    int t;
+    
+    /* Complement to obtain normal u-law value. */
+    ulaw = ~ulaw;
+    /*
+     * Extract and bias the quantization bits. Then
+     * shift up by the segment number and subtract out the bias.
+     */
+    t = (((ulaw & 0x0F) << 3) + ULAW_BIAS) << (((int) ulaw & 0x70) >> 4);
+    return  (WebRtc_Word16) ((ulaw & 0x80)  ?  (ULAW_BIAS - t)  :  (t - ULAW_BIAS));
+}
+/*- End of function --------------------------------------------------------*/
+
+/*
+ * A-law is basically as follows:
+ *
+ *      Linear Input Code        Compressed Code
+ *      -----------------        ---------------
+ *      0000000wxyza             000wxyz
+ *      0000001wxyza             001wxyz
+ *      000001wxyzab             010wxyz
+ *      00001wxyzabc             011wxyz
+ *      0001wxyzabcd             100wxyz
+ *      001wxyzabcde             101wxyz
+ *      01wxyzabcdef             110wxyz
+ *      1wxyzabcdefg             111wxyz
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+
+#define ALAW_AMI_MASK       0x55
+
+/*! \brief Encode a linear sample to A-law
+    \param linear The sample to encode.
+    \return The A-law value.
+*/
+static __inline WebRtc_UWord8 linear_to_alaw(int linear)
+{
+    int mask;
+    int seg;
+    
+    if (linear >= 0)
+    {
+        /* Sign (bit 7) bit = 1 */
+        mask = ALAW_AMI_MASK | 0x80;
+    }
+    else
+    {
+        /* Sign (bit 7) bit = 0 */
+        mask = ALAW_AMI_MASK;
+        /* WebRtc, tlegrand: Changed from -8 to -1 to get bitexact to reference
+         * implementation */
+        linear = -linear - 1;
+    }
+
+    /* Convert the scaled magnitude to segment number. */
+    seg = top_bit(linear | 0xFF) - 7;
+    if (seg >= 8)
+    {
+        if (linear >= 0)
+        {
+            /* Out of range. Return maximum value. */
+            return (WebRtc_UWord8) (0x7F ^ mask);
+        }
+        /* We must be just a tiny step below zero */
+        return (WebRtc_UWord8) (0x00 ^ mask);
+    }
+    /* Combine the sign, segment, and quantization bits. */
+    return (WebRtc_UWord8) (((seg << 4) | ((linear >> ((seg)  ?  (seg + 3)  :  4)) & 0x0F)) ^ mask);
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Decode an A-law sample to a linear value.
+    \param alaw The A-law sample to decode.
+    \return The linear value.
+*/
+static __inline WebRtc_Word16 alaw_to_linear(WebRtc_UWord8 alaw)
+{
+    int i;
+    int seg;
+
+    alaw ^= ALAW_AMI_MASK;
+    i = ((alaw & 0x0F) << 4);
+    seg = (((int) alaw & 0x70) >> 4);
+    if (seg)
+        i = (i + 0x108) << (seg - 1);
+    else
+        i += 8;
+    return (WebRtc_Word16) ((alaw & 0x80)  ?  i  :  -i);
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Transcode from A-law to u-law, using the procedure defined in G.711.
+    \param alaw The A-law sample to transcode.
+    \return The best matching u-law value.
+*/
+WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw);
+
+/*! \brief Transcode from u-law to A-law, using the procedure defined in G.711.
+    \param alaw The u-law sample to transcode.
+    \return The best matching A-law value.
+*/
+WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+/*- End of file ------------------------------------------------------------*/
diff --git a/trunk/src/modules/audio_coding/codecs/g711/g711_interface.c b/trunk/src/modules/audio_coding/codecs/g711/g711_interface.c
new file mode 100644
index 0000000..a49abdb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/g711_interface.c
@@ -0,0 +1,171 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include <string.h>
+#include "g711.h"
+#include "g711_interface.h"
+#include "typedefs.h"
+
+WebRtc_Word16 WebRtcG711_EncodeA(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded)
+{
+    int n;
+    WebRtc_UWord16 tempVal,  tempVal2;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    // Loop over all samples
+    for (n = 0; n < len; n++) {
+        tempVal = (WebRtc_UWord16)linear_to_alaw(speechIn[n]);
+
+#ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            encoded[n>>1]|=((WebRtc_UWord16)tempVal);
+        } else {
+            encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
+        }
+#else
+        if ((n & 0x1) == 1) {
+            tempVal2 |= ((WebRtc_UWord16) tempVal) << 8;
+            encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
+        } else {
+            tempVal2 = ((WebRtc_UWord16) tempVal);
+            encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
+        }
+#endif
+    }
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_EncodeU(void  *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    // Loop over all samples
+    for (n = 0; n < len; n++) {
+        tempVal = (WebRtc_UWord16)linear_to_ulaw(speechIn[n]);
+
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            encoded[n>>1]|=((WebRtc_UWord16)tempVal);
+        } else {
+            encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
+        } else {
+            encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
+        }
+ #endif
+    }
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_DecodeA(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    for (n = 0; n < len; n++) {
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
+        } else {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            tempVal = (encoded[n >> 1] >> 8);
+        } else {
+            tempVal = (encoded[n >> 1] & 0xFF);
+        }
+ #endif
+        decoded[n] = (WebRtc_Word16) alaw_to_linear(tempVal);
+    }
+
+    *speechType = 1;
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_DecodeU(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    for (n = 0; n < len; n++) {
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
+        } else {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            tempVal = (encoded[n >> 1] >> 8);
+        } else {
+            tempVal = (encoded[n >> 1] & 0xFF);
+        }
+ #endif
+        decoded[n] = (WebRtc_Word16) ulaw_to_linear(tempVal);
+    }
+
+    *speechType = 1;
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes)
+{
+    strncpy(version, "2.0.0", lenBytes);
+    return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/g711/g711_unittest.cc b/trunk/src/modules/audio_coding/codecs/g711/g711_unittest.cc
new file mode 100644
index 0000000..c903bed
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/g711_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "g711_interface.h"
+#include "gtest/gtest.h"
+
+TEST(G711Test, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/audio_coding/codecs/g711/include/g711_interface.h b/trunk/src/modules/audio_coding/codecs/g711/include/g711_interface.h
new file mode 100644
index 0000000..25a9903
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/include/g711_interface.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
+#define MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
+
+#include "typedefs.h"
+
+// Comfort noise constants
+#define G711_WEBRTC_SPEECH    1
+#define G711_WEBRTC_CNG       2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcG711_EncodeA(...)
+ *
+ * This function encodes a G711 A-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - speechIn           : Input speech vector
+ *      - len                : Samples in speechIn
+ *
+ * Output:
+ *      - encoded            : The encoded data vector
+ *
+ * Return value              : >0 - Length (in bytes) of coded data
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_EncodeA(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded);
+
+/****************************************************************************
+ * WebRtcG711_EncodeU(...)
+ *
+ * This function encodes a G711 U-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - speechIn           : Input speech vector
+ *      - len                : Samples in speechIn
+ *
+ * Output:
+ *      - encoded            : The encoded data vector
+ *
+ * Return value              : >0 - Length (in bytes) of coded data
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_EncodeU(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded);
+
+/****************************************************************************
+ * WebRtcG711_DecodeA(...)
+ *
+ * This function decodes a packet G711 A-law frame.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - encoded            : Encoded data
+ *      - len                : Bytes in encoded vector
+ *
+ * Output:
+ *      - decoded            : The decoded vector
+ *      - speechType         : 1 normal, 2 CNG (for G711 it should
+ *                             always return 1 since G711 does not have a
+ *                             built-in DTX/CNG scheme)
+ *
+ * Return value              : >0 - Samples in decoded vector
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_DecodeA(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType);
+
+/****************************************************************************
+ * WebRtcG711_DecodeU(...)
+ *
+ * This function decodes a packet G711 U-law frame.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - encoded            : Encoded data
+ *      - len                : Bytes in encoded vector
+ *
+ * Output:
+ *      - decoded            : The decoded vector
+ *      - speechType         : 1 normal, 2 CNG (for G711 it should
+ *                             always return 1 since G711 does not have a
+ *                             built-in DTX/CNG scheme)
+ *
+ * Return value              : >0 - Samples in decoded vector
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_DecodeU(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType);
+
+/**********************************************************************
+* WebRtcG711_Version(...)
+*
+* This function gives the version string of the G.711 codec.
+*
+* Input:
+*      - lenBytes:     the size of Allocated space (in Bytes) where
+*                      the version number is written to (in string format).
+*
+* Output:
+*      - version:      Pointer to a buffer where the version number is
+*                      written to.
+*
+*/
+
+WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes);
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif /* MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/g711/test/testG711.cc b/trunk/src/modules/audio_coding/codecs/g711/test/testG711.cc
new file mode 100644
index 0000000..747f922
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g711/test/testG711.cc
@@ -0,0 +1,169 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * testG711.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/* include API */
+#include "g711_interface.h"
+
+/* Runtime statistics */
+#include <time.h>
+#define CLOCKS_PER_SEC_G711  1000
+
+
+/* function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+
+    short k, rlen, status = 0;
+
+    rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+    return status;
+}
+
+int main(int argc, char* argv[])
+{
+    char inname[80], outname[40], bitname[40];
+    FILE *inp, *outp, *bitp = NULL;
+    int framecnt, endfile;
+
+    WebRtc_Word16 framelength = 80;
+
+    int err;
+
+    /* Runtime statistics */
+    double starttime;
+    double runtime;
+    double length_file;
+
+    WebRtc_Word16 stream_len = 0;
+    WebRtc_Word16 shortdata[480];
+    WebRtc_Word16 decoded[480];
+    WebRtc_Word16 streamdata[500];
+    WebRtc_Word16    speechType[1];
+    char law[2];
+    char versionNumber[40];
+
+    /* handling wrong input arguments in the command line */
+    if ((argc!=5) && (argc!=6))  {
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        printf("\nG.711 test application\n\n");
+        printf("Usage:\n\n");
+        printf("./testG711.exe framelength law infile outfile \n\n");
+        printf("framelength: Framelength in samples.\n");
+        printf("law        : Coding law, A och u.\n");
+        printf("infile     : Normal speech input file\n");
+        printf("outfile    : Speech output file\n\n");
+        printf("outbits    : Output bitstream file [optional]\n\n");
+        exit(0);
+
+    }
+
+    /* Get version and print */
+    WebRtcG711_Version(versionNumber, 40);
+
+    printf("-----------------------------------\n");
+    printf("G.711 version: %s\n\n", versionNumber);
+    /* Get frame length */
+    framelength = atoi(argv[1]);
+    
+    /* Get compression law */
+    strcpy(law, argv[2]);
+
+    /* Get Input and Output files */
+    sscanf(argv[3], "%s", inname);
+    sscanf(argv[4], "%s", outname);
+    if (argc==6) {
+        sscanf(argv[5], "%s", bitname);
+        if ((bitp = fopen(bitname,"wb")) == NULL) {
+            printf("  G.711: Cannot read file %s.\n", bitname);
+            exit(1);
+        }
+    }
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  G.711: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  G.711: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+    printf("\nInput:  %s\nOutput: %s\n", inname, outname);
+    if (argc==6) {
+        printf("\nBitfile:  %s\n", bitname);
+    }
+
+    starttime = clock()/(double)CLOCKS_PER_SEC_G711; /* Runtime statistics */
+
+     /* Initialize encoder and decoder */
+    framecnt= 0;
+    endfile    = 0;
+    while (endfile == 0) {
+        framecnt++;
+        /* Read speech block */
+        endfile = readframe(shortdata, inp, framelength);
+
+        /* G.711 encoding */
+        if (!strcmp(law,"A")) {
+            /* A-law encoding */
+            stream_len = WebRtcG711_EncodeA(NULL, shortdata, framelength, streamdata);
+            if (argc==6){
+                /* Write bits to file */
+                fwrite(streamdata,sizeof(unsigned char),stream_len,bitp);
+            }
+            err = WebRtcG711_DecodeA(NULL, streamdata, stream_len, decoded, speechType);
+        } else if (!strcmp(law,"u")){
+            /* u-law encoding */
+            stream_len = WebRtcG711_EncodeU(NULL, shortdata, framelength, streamdata);
+            if (argc==6){
+                /* Write bits to file */
+                fwrite(streamdata,sizeof(unsigned char),stream_len,bitp);
+            }
+            err = WebRtcG711_DecodeU(NULL, streamdata, stream_len, decoded, speechType);
+        } else {
+            printf("Wrong law mode\n");
+            exit (1);
+        }
+        if (stream_len < 0 || err < 0) {
+            /* exit if returned with error */
+            printf("Error in encoder/decoder\n");
+        } else {
+            /* Write coded speech to file */
+            fwrite(decoded,sizeof(short),framelength,outp);
+        }
+    }
+
+
+    runtime = (double)(clock()/(double)CLOCKS_PER_SEC_G711-starttime);
+    length_file = ((double)framecnt*(double)framelength/8000);
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+    printf("Time to run G.711:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+    printf("---------------------END----------------------\n");
+
+    fclose(inp);
+    fclose(outp);
+
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/g722/Android.mk b/trunk/src/modules/audio_coding/codecs/g722/Android.mk
new file mode 100644
index 0000000..39dea9e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/Android.mk
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_g722
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    g722_interface.c \
+    g722_encode.c \
+    g722_decode.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../../..
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722.gypi b/trunk/src/modules/audio_coding/codecs/g722/g722.gypi
new file mode 100644
index 0000000..c1e55d9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722.gypi
@@ -0,0 +1,65 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+  'targets': [
+    {
+      'target_name': 'G722',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/g722_interface.h',
+        'g722_interface.c',
+        'g722_encode.c',
+        'g722_decode.c',
+        'g722_enc_dec.h',
+      ],
+    },
+  ], # targets
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'g722_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'G722',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'g722_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'G722Test',
+          'type': 'executable',
+          'dependencies': [
+            'G722',
+          ],
+          'sources': [
+            'test/testG722.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722_decode.c b/trunk/src/modules/audio_coding/codecs/g722/g722_decode.c
new file mode 100644
index 0000000..499cc8f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722_decode.c
@@ -0,0 +1,410 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722_decode.c - The ITU G.722 codec, decode part.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based in part on a single channel G.722 codec which is:
+ *
+ * Copyright (c) CMU 1993
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722_decode.c,v 1.15 2006/07/07 16:37:49 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed usage of inttypes.h and tgmath.h
+ * -Changed to use WebRtc types
+ * -Changed __inline__ to __inline
+ * -Added saturation check on output
+ */
+
+/*! \file */
+
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <stdio.h>
+#include <memory.h>
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "g722_enc_dec.h"
+
+
+#if !defined(FALSE)
+#define FALSE 0
+#endif
+#if !defined(TRUE)
+#define TRUE (!FALSE)
+#endif
+
+static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+{
+    WebRtc_Word16 amp16;
+
+    /* Hopefully this is optimised for the common case - not clipping */
+    amp16 = (WebRtc_Word16) amp;
+    if (amp == amp16)
+        return amp16;
+    if (amp > WEBRTC_INT16_MAX)
+        return  WEBRTC_INT16_MAX;
+    return  WEBRTC_INT16_MIN;
+}
+/*- End of function --------------------------------------------------------*/
+
+static void block4(g722_decode_state_t *s, int band, int d);
+
+static void block4(g722_decode_state_t *s, int band, int d)
+{
+    int wd1;
+    int wd2;
+    int wd3;
+    int i;
+
+    /* Block 4, RECONS */
+    s->band[band].d[0] = d;
+    s->band[band].r[0] = saturate(s->band[band].s + d);
+
+    /* Block 4, PARREC */
+    s->band[band].p[0] = saturate(s->band[band].sz + d);
+
+    /* Block 4, UPPOL2 */
+    for (i = 0;  i < 3;  i++)
+        s->band[band].sg[i] = s->band[band].p[i] >> 15;
+    wd1 = saturate(s->band[band].a[1] << 2);
+
+    wd2 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  -wd1  :  wd1;
+    if (wd2 > 32767)
+        wd2 = 32767;
+    wd3 = (s->band[band].sg[0] == s->band[band].sg[2])  ?  128  :  -128;
+    wd3 += (wd2 >> 7);
+    wd3 += (s->band[band].a[2]*32512) >> 15;
+    if (wd3 > 12288)
+        wd3 = 12288;
+    else if (wd3 < -12288)
+        wd3 = -12288;
+    s->band[band].ap[2] = wd3;
+
+    /* Block 4, UPPOL1 */
+    s->band[band].sg[0] = s->band[band].p[0] >> 15;
+    s->band[band].sg[1] = s->band[band].p[1] >> 15;
+    wd1 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  192  :  -192;
+    wd2 = (s->band[band].a[1]*32640) >> 15;
+
+    s->band[band].ap[1] = saturate(wd1 + wd2);
+    wd3 = saturate(15360 - s->band[band].ap[2]);
+    if (s->band[band].ap[1] > wd3)
+        s->band[band].ap[1] = wd3;
+    else if (s->band[band].ap[1] < -wd3)
+        s->band[band].ap[1] = -wd3;
+
+    /* Block 4, UPZERO */
+    wd1 = (d == 0)  ?  0  :  128;
+    s->band[band].sg[0] = d >> 15;
+    for (i = 1;  i < 7;  i++)
+    {
+        s->band[band].sg[i] = s->band[band].d[i] >> 15;
+        wd2 = (s->band[band].sg[i] == s->band[band].sg[0])  ?  wd1  :  -wd1;
+        wd3 = (s->band[band].b[i]*32640) >> 15;
+        s->band[band].bp[i] = saturate(wd2 + wd3);
+    }
+
+    /* Block 4, DELAYA */
+    for (i = 6;  i > 0;  i--)
+    {
+        s->band[band].d[i] = s->band[band].d[i - 1];
+        s->band[band].b[i] = s->band[band].bp[i];
+    }
+    
+    for (i = 2;  i > 0;  i--)
+    {
+        s->band[band].r[i] = s->band[band].r[i - 1];
+        s->band[band].p[i] = s->band[band].p[i - 1];
+        s->band[band].a[i] = s->band[band].ap[i];
+    }
+
+    /* Block 4, FILTEP */
+    wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
+    wd1 = (s->band[band].a[1]*wd1) >> 15;
+    wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
+    wd2 = (s->band[band].a[2]*wd2) >> 15;
+    s->band[band].sp = saturate(wd1 + wd2);
+
+    /* Block 4, FILTEZ */
+    s->band[band].sz = 0;
+    for (i = 6;  i > 0;  i--)
+    {
+        wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
+        s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
+    }
+    s->band[band].sz = saturate(s->band[band].sz);
+
+    /* Block 4, PREDIC */
+    s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
+}
+/*- End of function --------------------------------------------------------*/
+
+g722_decode_state_t *WebRtc_g722_decode_init(g722_decode_state_t *s,
+                                             int rate,
+                                             int options)
+{
+    if (s == NULL)
+    {
+        if ((s = (g722_decode_state_t *) malloc(sizeof(*s))) == NULL)
+            return NULL;
+    }
+    memset(s, 0, sizeof(*s));
+    if (rate == 48000)
+        s->bits_per_sample = 6;
+    else if (rate == 56000)
+        s->bits_per_sample = 7;
+    else
+        s->bits_per_sample = 8;
+    if ((options & G722_SAMPLE_RATE_8000))
+        s->eight_k = TRUE;
+    if ((options & G722_PACKED)  &&  s->bits_per_sample != 8)
+        s->packed = TRUE;
+    else
+        s->packed = FALSE;
+    s->band[0].det = 32;
+    s->band[1].det = 8;
+    return s;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_decode_release(g722_decode_state_t *s)
+{
+    free(s);
+    return 0;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_decode(g722_decode_state_t *s, WebRtc_Word16 amp[],
+                       const WebRtc_UWord8 g722_data[], int len)
+{
+    static const int wl[8] = {-60, -30, 58, 172, 334, 538, 1198, 3042 };
+    static const int rl42[16] = {0, 7, 6, 5, 4, 3, 2, 1,
+                                 7, 6, 5, 4, 3,  2, 1, 0 };
+    static const int ilb[32] =
+    {
+        2048, 2093, 2139, 2186, 2233, 2282, 2332,
+        2383, 2435, 2489, 2543, 2599, 2656, 2714,
+        2774, 2834, 2896, 2960, 3025, 3091, 3158,
+        3228, 3298, 3371, 3444, 3520, 3597, 3676,
+        3756, 3838, 3922, 4008
+    };
+    static const int wh[3] = {0, -214, 798};
+    static const int rh2[4] = {2, 1, 2, 1};
+    static const int qm2[4] = {-7408, -1616,  7408,   1616};
+    static const int qm4[16] = 
+    {
+              0, -20456, -12896,  -8968, 
+          -6288,  -4240,  -2584,  -1200,
+          20456,  12896,   8968,   6288,
+           4240,   2584,   1200,      0
+    };
+    static const int qm5[32] =
+    {
+           -280,   -280, -23352, -17560,
+         -14120, -11664,  -9752,  -8184,
+          -6864,  -5712,  -4696,  -3784,
+          -2960,  -2208,  -1520,   -880,
+          23352,  17560,  14120,  11664,
+           9752,   8184,   6864,   5712,
+           4696,   3784,   2960,   2208,
+           1520,    880,    280,   -280
+    };
+    static const int qm6[64] =
+    {
+           -136,   -136,   -136,   -136,
+         -24808, -21904, -19008, -16704,
+         -14984, -13512, -12280, -11192,
+         -10232,  -9360,  -8576,  -7856,
+          -7192,  -6576,  -6000,  -5456,
+          -4944,  -4464,  -4008,  -3576,
+          -3168,  -2776,  -2400,  -2032,
+          -1688,  -1360,  -1040,   -728,
+          24808,  21904,  19008,  16704,
+          14984,  13512,  12280,  11192,
+          10232,   9360,   8576,   7856,
+           7192,   6576,   6000,   5456,
+           4944,   4464,   4008,   3576,
+           3168,   2776,   2400,   2032,
+           1688,   1360,   1040,    728,
+            432,    136,   -432,   -136
+    };
+    static const int qmf_coeffs[12] =
+    {
+           3,  -11,   12,   32, -210,  951, 3876, -805,  362, -156,   53,  -11,
+    };
+
+    int dlowt;
+    int rlow;
+    int ihigh;
+    int dhigh;
+    int rhigh;
+    int xout1;
+    int xout2;
+    int wd1;
+    int wd2;
+    int wd3;
+    int code;
+    int outlen;
+    int i;
+    int j;
+
+    outlen = 0;
+    rhigh = 0;
+    for (j = 0;  j < len;  )
+    {
+        if (s->packed)
+        {
+            /* Unpack the code bits */
+            if (s->in_bits < s->bits_per_sample)
+            {
+                s->in_buffer |= (g722_data[j++] << s->in_bits);
+                s->in_bits += 8;
+            }
+            code = s->in_buffer & ((1 << s->bits_per_sample) - 1);
+            s->in_buffer >>= s->bits_per_sample;
+            s->in_bits -= s->bits_per_sample;
+        }
+        else
+        {
+            code = g722_data[j++];
+        }
+
+        switch (s->bits_per_sample)
+        {
+        default:
+        case 8:
+            wd1 = code & 0x3F;
+            ihigh = (code >> 6) & 0x03;
+            wd2 = qm6[wd1];
+            wd1 >>= 2;
+            break;
+        case 7:
+            wd1 = code & 0x1F;
+            ihigh = (code >> 5) & 0x03;
+            wd2 = qm5[wd1];
+            wd1 >>= 1;
+            break;
+        case 6:
+            wd1 = code & 0x0F;
+            ihigh = (code >> 4) & 0x03;
+            wd2 = qm4[wd1];
+            break;
+        }
+        /* Block 5L, LOW BAND INVQBL */
+        wd2 = (s->band[0].det*wd2) >> 15;
+        /* Block 5L, RECONS */
+        rlow = s->band[0].s + wd2;
+        /* Block 6L, LIMIT */
+        if (rlow > 16383)
+            rlow = 16383;
+        else if (rlow < -16384)
+            rlow = -16384;
+
+        /* Block 2L, INVQAL */
+        wd2 = qm4[wd1];
+        dlowt = (s->band[0].det*wd2) >> 15;
+
+        /* Block 3L, LOGSCL */
+        wd2 = rl42[wd1];
+        wd1 = (s->band[0].nb*127) >> 7;
+        wd1 += wl[wd2];
+        if (wd1 < 0)
+            wd1 = 0;
+        else if (wd1 > 18432)
+            wd1 = 18432;
+        s->band[0].nb = wd1;
+            
+        /* Block 3L, SCALEL */
+        wd1 = (s->band[0].nb >> 6) & 31;
+        wd2 = 8 - (s->band[0].nb >> 11);
+        wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+        s->band[0].det = wd3 << 2;
+
+        block4(s, 0, dlowt);
+        
+        if (!s->eight_k)
+        {
+            /* Block 2H, INVQAH */
+            wd2 = qm2[ihigh];
+            dhigh = (s->band[1].det*wd2) >> 15;
+            /* Block 5H, RECONS */
+            rhigh = dhigh + s->band[1].s;
+            /* Block 6H, LIMIT */
+            if (rhigh > 16383)
+                rhigh = 16383;
+            else if (rhigh < -16384)
+                rhigh = -16384;
+
+            /* Block 2H, INVQAH */
+            wd2 = rh2[ihigh];
+            wd1 = (s->band[1].nb*127) >> 7;
+            wd1 += wh[wd2];
+            if (wd1 < 0)
+                wd1 = 0;
+            else if (wd1 > 22528)
+                wd1 = 22528;
+            s->band[1].nb = wd1;
+            
+            /* Block 3H, SCALEH */
+            wd1 = (s->band[1].nb >> 6) & 31;
+            wd2 = 10 - (s->band[1].nb >> 11);
+            wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+            s->band[1].det = wd3 << 2;
+
+            block4(s, 1, dhigh);
+        }
+
+        if (s->itu_test_mode)
+        {
+            amp[outlen++] = (WebRtc_Word16) (rlow << 1);
+            amp[outlen++] = (WebRtc_Word16) (rhigh << 1);
+        }
+        else
+        {
+            if (s->eight_k)
+            {
+                amp[outlen++] = (WebRtc_Word16) (rlow << 1);
+            }
+            else
+            {
+                /* Apply the receive QMF */
+                for (i = 0;  i < 22;  i++)
+                    s->x[i] = s->x[i + 2];
+                s->x[22] = rlow + rhigh;
+                s->x[23] = rlow - rhigh;
+
+                xout1 = 0;
+                xout2 = 0;
+                for (i = 0;  i < 12;  i++)
+                {
+                    xout2 += s->x[2*i]*qmf_coeffs[i];
+                    xout1 += s->x[2*i + 1]*qmf_coeffs[11 - i];
+                }
+                /* We shift by 12 to allow for the QMF filters (DC gain = 4096), less 1
+                   to allow for the 15 bit input to the G.722 algorithm. */
+                /* WebRtc, tlegrand: added saturation */
+                amp[outlen++] = saturate(xout1 >> 11);
+                amp[outlen++] = saturate(xout2 >> 11);
+            }
+        }
+    }
+    return outlen;
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722_enc_dec.h b/trunk/src/modules/audio_coding/codecs/g722/g722_enc_dec.h
new file mode 100644
index 0000000..d2d19b0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722_enc_dec.h
@@ -0,0 +1,158 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722.h - The ITU G.722 codec.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based on a single channel G.722 codec which is:
+ *
+ *****    Copyright (c) CMU    1993      *****
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722.h,v 1.10 2006/06/16 12:45:53 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Changed to use WebRtc types
+ * -Added new defines for minimum and maximum values of short int
+ */
+
+
+/*! \file */
+
+#if !defined(_G722_ENC_DEC_H_)
+#define _G722_ENC_DEC_H_
+
+/*! \page g722_page G.722 encoding and decoding
+\section g722_page_sec_1 What does it do?
+The G.722 module is a bit exact implementation of the ITU G.722 specification for all three
+specified bit rates - 64000bps, 56000bps and 48000bps. It passes the ITU tests.
+
+To allow fast and flexible interworking with narrow band telephony, the encoder and decoder
+support an option for the linear audio to be an 8k samples/second stream. In this mode the
+codec is considerably faster, and still fully compatible with wideband terminals using G.722.
+
+\section g722_page_sec_2 How does it work?
+???.
+*/
+
+#define WEBRTC_INT16_MAX 32767
+#define WEBRTC_INT16_MIN -32768
+
+enum
+{
+    G722_SAMPLE_RATE_8000 = 0x0001,
+    G722_PACKED = 0x0002
+};
+
+typedef struct
+{
+    /*! TRUE if the operating in the special ITU test mode, with the band split filters
+             disabled. */
+    int itu_test_mode;
+    /*! TRUE if the G.722 data is packed */
+    int packed;
+    /*! TRUE if encode from 8k samples/second */
+    int eight_k;
+    /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
+    int bits_per_sample;
+
+    /*! Signal history for the QMF */
+    int x[24];
+
+    struct
+    {
+        int s;
+        int sp;
+        int sz;
+        int r[3];
+        int a[3];
+        int ap[3];
+        int p[3];
+        int d[7];
+        int b[7];
+        int bp[7];
+        int sg[7];
+        int nb;
+        int det;
+    } band[2];
+
+    unsigned int in_buffer;
+    int in_bits;
+    unsigned int out_buffer;
+    int out_bits;
+} g722_encode_state_t;
+
+typedef struct
+{
+    /*! TRUE if the operating in the special ITU test mode, with the band split filters
+             disabled. */
+    int itu_test_mode;
+    /*! TRUE if the G.722 data is packed */
+    int packed;
+    /*! TRUE if decode to 8k samples/second */
+    int eight_k;
+    /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
+    int bits_per_sample;
+
+    /*! Signal history for the QMF */
+    int x[24];
+
+    struct
+    {
+        int s;
+        int sp;
+        int sz;
+        int r[3];
+        int a[3];
+        int ap[3];
+        int p[3];
+        int d[7];
+        int b[7];
+        int bp[7];
+        int sg[7];
+        int nb;
+        int det;
+    } band[2];
+    
+    unsigned int in_buffer;
+    int in_bits;
+    unsigned int out_buffer;
+    int out_bits;
+} g722_decode_state_t;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+g722_encode_state_t *WebRtc_g722_encode_init(g722_encode_state_t *s,
+                                             int rate,
+                                             int options);
+int WebRtc_g722_encode_release(g722_encode_state_t *s);
+int WebRtc_g722_encode(g722_encode_state_t *s,
+                       WebRtc_UWord8 g722_data[],
+                       const WebRtc_Word16 amp[],
+                       int len);
+
+g722_decode_state_t *WebRtc_g722_decode_init(g722_decode_state_t *s,
+                                             int rate,
+                                             int options);
+int WebRtc_g722_decode_release(g722_decode_state_t *s);
+int WebRtc_g722_decode(g722_decode_state_t *s,
+                       WebRtc_Word16 amp[],
+                       const WebRtc_UWord8 g722_data[],
+                       int len);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722_encode.c b/trunk/src/modules/audio_coding/codecs/g722/g722_encode.c
new file mode 100644
index 0000000..7487b64
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722_encode.c
@@ -0,0 +1,434 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722_encode.c - The ITU G.722 codec, encode part.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ * All rights reserved.
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based on a single channel 64kbps only G.722 codec which is:
+ *
+ *****    Copyright (c) CMU    1993      *****
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722_encode.c,v 1.14 2006/07/07 16:37:49 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed usage of inttypes.h and tgmath.h
+ * -Changed to use WebRtc types
+ * -Added option to run encoder bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <stdio.h>
+#include <memory.h>
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "g722_enc_dec.h"
+
+#if !defined(FALSE)
+#define FALSE 0
+#endif
+#if !defined(TRUE)
+#define TRUE (!FALSE)
+#endif
+
+static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+{
+    WebRtc_Word16 amp16;
+
+    /* Hopefully this is optimised for the common case - not clipping */
+    amp16 = (WebRtc_Word16) amp;
+    if (amp == amp16)
+        return amp16;
+    if (amp > WEBRTC_INT16_MAX)
+        return  WEBRTC_INT16_MAX;
+    return  WEBRTC_INT16_MIN;
+}
+/*- End of function --------------------------------------------------------*/
+
+static void block4(g722_encode_state_t *s, int band, int d)
+{
+    int wd1;
+    int wd2;
+    int wd3;
+    int i;
+
+    /* Block 4, RECONS */
+    s->band[band].d[0] = d;
+    s->band[band].r[0] = saturate(s->band[band].s + d);
+
+    /* Block 4, PARREC */
+    s->band[band].p[0] = saturate(s->band[band].sz + d);
+
+    /* Block 4, UPPOL2 */
+    for (i = 0;  i < 3;  i++)
+        s->band[band].sg[i] = s->band[band].p[i] >> 15;
+    wd1 = saturate(s->band[band].a[1] << 2);
+
+    wd2 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  -wd1  :  wd1;
+    if (wd2 > 32767)
+        wd2 = 32767;
+    wd3 = (wd2 >> 7) + ((s->band[band].sg[0] == s->band[band].sg[2])  ?  128  :  -128);
+    wd3 += (s->band[band].a[2]*32512) >> 15;
+    if (wd3 > 12288)
+        wd3 = 12288;
+    else if (wd3 < -12288)
+        wd3 = -12288;
+    s->band[band].ap[2] = wd3;
+
+    /* Block 4, UPPOL1 */
+    s->band[band].sg[0] = s->band[band].p[0] >> 15;
+    s->band[band].sg[1] = s->band[band].p[1] >> 15;
+    wd1 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  192  :  -192;
+    wd2 = (s->band[band].a[1]*32640) >> 15;
+
+    s->band[band].ap[1] = saturate(wd1 + wd2);
+    wd3 = saturate(15360 - s->band[band].ap[2]);
+    if (s->band[band].ap[1] > wd3)
+        s->band[band].ap[1] = wd3;
+    else if (s->band[band].ap[1] < -wd3)
+        s->band[band].ap[1] = -wd3;
+
+    /* Block 4, UPZERO */
+    wd1 = (d == 0)  ?  0  :  128;
+    s->band[band].sg[0] = d >> 15;
+    for (i = 1;  i < 7;  i++)
+    {
+        s->band[band].sg[i] = s->band[band].d[i] >> 15;
+        wd2 = (s->band[band].sg[i] == s->band[band].sg[0])  ?  wd1  :  -wd1;
+        wd3 = (s->band[band].b[i]*32640) >> 15;
+        s->band[band].bp[i] = saturate(wd2 + wd3);
+    }
+
+    /* Block 4, DELAYA */
+    for (i = 6;  i > 0;  i--)
+    {
+        s->band[band].d[i] = s->band[band].d[i - 1];
+        s->band[band].b[i] = s->band[band].bp[i];
+    }
+    
+    for (i = 2;  i > 0;  i--)
+    {
+        s->band[band].r[i] = s->band[band].r[i - 1];
+        s->band[band].p[i] = s->band[band].p[i - 1];
+        s->band[band].a[i] = s->band[band].ap[i];
+    }
+
+    /* Block 4, FILTEP */
+    wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
+    wd1 = (s->band[band].a[1]*wd1) >> 15;
+    wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
+    wd2 = (s->band[band].a[2]*wd2) >> 15;
+    s->band[band].sp = saturate(wd1 + wd2);
+
+    /* Block 4, FILTEZ */
+    s->band[band].sz = 0;
+    for (i = 6;  i > 0;  i--)
+    {
+        wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
+        s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
+    }
+    s->band[band].sz = saturate(s->band[band].sz);
+
+    /* Block 4, PREDIC */
+    s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
+}
+/*- End of function --------------------------------------------------------*/
+
+g722_encode_state_t *WebRtc_g722_encode_init(g722_encode_state_t *s,
+                                             int rate, int options)
+{
+    if (s == NULL)
+    {
+        if ((s = (g722_encode_state_t *) malloc(sizeof(*s))) == NULL)
+            return NULL;
+    }
+    memset(s, 0, sizeof(*s));
+    if (rate == 48000)
+        s->bits_per_sample = 6;
+    else if (rate == 56000)
+        s->bits_per_sample = 7;
+    else
+        s->bits_per_sample = 8;
+    if ((options & G722_SAMPLE_RATE_8000))
+        s->eight_k = TRUE;
+    if ((options & G722_PACKED)  &&  s->bits_per_sample != 8)
+        s->packed = TRUE;
+    else
+        s->packed = FALSE;
+    s->band[0].det = 32;
+    s->band[1].det = 8;
+    return s;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_encode_release(g722_encode_state_t *s)
+{
+    free(s);
+    return 0;
+}
+/*- End of function --------------------------------------------------------*/
+
+/* WebRtc, tlegrand:
+ * Only define the following if bit-exactness with reference implementation
+ * is needed. Will only have any effect if input signal is saturated.
+ */
+//#define RUN_LIKE_REFERENCE_G722
+#ifdef RUN_LIKE_REFERENCE_G722
+WebRtc_Word16 limitValues (WebRtc_Word16 rl)
+{
+
+    WebRtc_Word16 yl;
+
+    yl = (rl > 16383) ? 16383 : ((rl < -16384) ? -16384 : rl);
+
+    return (yl);
+}
+#endif
+
+int WebRtc_g722_encode(g722_encode_state_t *s, WebRtc_UWord8 g722_data[],
+                       const WebRtc_Word16 amp[], int len)
+{
+    static const int q6[32] =
+    {
+           0,   35,   72,  110,  150,  190,  233,  276,
+         323,  370,  422,  473,  530,  587,  650,  714,
+         786,  858,  940, 1023, 1121, 1219, 1339, 1458,
+        1612, 1765, 1980, 2195, 2557, 2919,    0,    0
+    };
+    static const int iln[32] =
+    {
+         0, 63, 62, 31, 30, 29, 28, 27,
+        26, 25, 24, 23, 22, 21, 20, 19,
+        18, 17, 16, 15, 14, 13, 12, 11,
+        10,  9,  8,  7,  6,  5,  4,  0
+    };
+    static const int ilp[32] =
+    {
+         0, 61, 60, 59, 58, 57, 56, 55,
+        54, 53, 52, 51, 50, 49, 48, 47,
+        46, 45, 44, 43, 42, 41, 40, 39,
+        38, 37, 36, 35, 34, 33, 32,  0
+    };
+    static const int wl[8] =
+    {
+        -60, -30, 58, 172, 334, 538, 1198, 3042
+    };
+    static const int rl42[16] =
+    {
+        0, 7, 6, 5, 4, 3, 2, 1, 7, 6, 5, 4, 3, 2, 1, 0
+    };
+    static const int ilb[32] =
+    {
+        2048, 2093, 2139, 2186, 2233, 2282, 2332,
+        2383, 2435, 2489, 2543, 2599, 2656, 2714,
+        2774, 2834, 2896, 2960, 3025, 3091, 3158,
+        3228, 3298, 3371, 3444, 3520, 3597, 3676,
+        3756, 3838, 3922, 4008
+    };
+    static const int qm4[16] =
+    {
+             0, -20456, -12896, -8968,
+         -6288,  -4240,  -2584, -1200,
+         20456,  12896,   8968,  6288,
+          4240,   2584,   1200,     0
+    };
+    static const int qm2[4] =
+    {
+        -7408,  -1616,   7408,   1616
+    };
+    static const int qmf_coeffs[12] =
+    {
+           3,  -11,   12,   32, -210,  951, 3876, -805,  362, -156,   53,  -11,
+    };
+    static const int ihn[3] = {0, 1, 0};
+    static const int ihp[3] = {0, 3, 2};
+    static const int wh[3] = {0, -214, 798};
+    static const int rh2[4] = {2, 1, 2, 1};
+
+    int dlow;
+    int dhigh;
+    int el;
+    int wd;
+    int wd1;
+    int ril;
+    int wd2;
+    int il4;
+    int ih2;
+    int wd3;
+    int eh;
+    int mih;
+    int i;
+    int j;
+    /* Low and high band PCM from the QMF */
+    int xlow;
+    int xhigh;
+    int g722_bytes;
+    /* Even and odd tap accumulators */
+    int sumeven;
+    int sumodd;
+    int ihigh;
+    int ilow;
+    int code;
+
+    g722_bytes = 0;
+    xhigh = 0;
+    for (j = 0;  j < len;  )
+    {
+        if (s->itu_test_mode)
+        {
+            xlow =
+            xhigh = amp[j++] >> 1;
+        }
+        else
+        {
+            if (s->eight_k)
+            {
+                /* We shift by 1 to allow for the 15 bit input to the G.722 algorithm. */
+                xlow = amp[j++] >> 1;
+            }
+            else
+            {
+                /* Apply the transmit QMF */
+                /* Shuffle the buffer down */
+                for (i = 0;  i < 22;  i++)
+                    s->x[i] = s->x[i + 2];
+                s->x[22] = amp[j++];
+                s->x[23] = amp[j++];
+    
+                /* Discard every other QMF output */
+                sumeven = 0;
+                sumodd = 0;
+                for (i = 0;  i < 12;  i++)
+                {
+                    sumodd += s->x[2*i]*qmf_coeffs[i];
+                    sumeven += s->x[2*i + 1]*qmf_coeffs[11 - i];
+                }
+                /* We shift by 12 to allow for the QMF filters (DC gain = 4096), plus 1
+                   to allow for us summing two filters, plus 1 to allow for the 15 bit
+                   input to the G.722 algorithm. */
+                xlow = (sumeven + sumodd) >> 14;
+                xhigh = (sumeven - sumodd) >> 14;
+
+#ifdef RUN_LIKE_REFERENCE_G722
+                /* The following lines are only used to verify bit-exactness
+                 * with reference implementation of G.722. Higher precision
+                 * is achieved without limiting the values.
+                 */
+                xlow = limitValues(xlow);
+                xhigh = limitValues(xhigh);
+#endif
+            }
+        }
+        /* Block 1L, SUBTRA */
+        el = saturate(xlow - s->band[0].s);
+
+        /* Block 1L, QUANTL */
+        wd = (el >= 0)  ?  el  :  -(el + 1);
+
+        for (i = 1;  i < 30;  i++)
+        {
+            wd1 = (q6[i]*s->band[0].det) >> 12;
+            if (wd < wd1)
+                break;
+        }
+        ilow = (el < 0)  ?  iln[i]  :  ilp[i];
+
+        /* Block 2L, INVQAL */
+        ril = ilow >> 2;
+        wd2 = qm4[ril];
+        dlow = (s->band[0].det*wd2) >> 15;
+
+        /* Block 3L, LOGSCL */
+        il4 = rl42[ril];
+        wd = (s->band[0].nb*127) >> 7;
+        s->band[0].nb = wd + wl[il4];
+        if (s->band[0].nb < 0)
+            s->band[0].nb = 0;
+        else if (s->band[0].nb > 18432)
+            s->band[0].nb = 18432;
+
+        /* Block 3L, SCALEL */
+        wd1 = (s->band[0].nb >> 6) & 31;
+        wd2 = 8 - (s->band[0].nb >> 11);
+        wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+        s->band[0].det = wd3 << 2;
+
+        block4(s, 0, dlow);
+        
+        if (s->eight_k)
+        {
+            /* Just leave the high bits as zero */
+            code = (0xC0 | ilow) >> (8 - s->bits_per_sample);
+        }
+        else
+        {
+            /* Block 1H, SUBTRA */
+            eh = saturate(xhigh - s->band[1].s);
+
+            /* Block 1H, QUANTH */
+            wd = (eh >= 0)  ?  eh  :  -(eh + 1);
+            wd1 = (564*s->band[1].det) >> 12;
+            mih = (wd >= wd1)  ?  2  :  1;
+            ihigh = (eh < 0)  ?  ihn[mih]  :  ihp[mih];
+
+            /* Block 2H, INVQAH */
+            wd2 = qm2[ihigh];
+            dhigh = (s->band[1].det*wd2) >> 15;
+
+            /* Block 3H, LOGSCH */
+            ih2 = rh2[ihigh];
+            wd = (s->band[1].nb*127) >> 7;
+            s->band[1].nb = wd + wh[ih2];
+            if (s->band[1].nb < 0)
+                s->band[1].nb = 0;
+            else if (s->band[1].nb > 22528)
+                s->band[1].nb = 22528;
+
+            /* Block 3H, SCALEH */
+            wd1 = (s->band[1].nb >> 6) & 31;
+            wd2 = 10 - (s->band[1].nb >> 11);
+            wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+            s->band[1].det = wd3 << 2;
+
+            block4(s, 1, dhigh);
+            code = ((ihigh << 6) | ilow) >> (8 - s->bits_per_sample);
+        }
+
+        if (s->packed)
+        {
+            /* Pack the code bits */
+            s->out_buffer |= (code << s->out_bits);
+            s->out_bits += s->bits_per_sample;
+            if (s->out_bits >= 8)
+            {
+                g722_data[g722_bytes++] = (WebRtc_UWord8) (s->out_buffer & 0xFF);
+                s->out_bits -= 8;
+                s->out_buffer >>= 8;
+            }
+        }
+        else
+        {
+            g722_data[g722_bytes++] = (WebRtc_UWord8) code;
+        }
+    }
+    return g722_bytes;
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722_interface.c b/trunk/src/modules/audio_coding/codecs/g722/g722_interface.c
new file mode 100644
index 0000000..d559014
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722_interface.c
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+
+#include <stdlib.h>
+#include <string.h>
+#include "g722_interface.h"
+#include "g722_enc_dec.h"
+#include "typedefs.h"
+
+
+WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
+{
+    *G722enc_inst=(G722EncInst*)malloc(sizeof(g722_encode_state_t));
+    if (*G722enc_inst!=NULL) {
+      return(0);
+    } else {
+      return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
+{
+    // Create and/or reset the G.722 encoder
+    // Bitrate 64 kbps and wideband mode (2)
+    G722enc_inst = (G722EncInst *) WebRtc_g722_encode_init(
+        (g722_encode_state_t*) G722enc_inst, 64000, 2);
+    if (G722enc_inst == NULL) {
+        return -1;
+    } else {
+        return 0;
+    }
+}
+
+WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
+{
+    // Free encoder memory
+    return WebRtc_g722_encode_release((g722_encode_state_t*) G722enc_inst);
+}
+
+WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
+                                WebRtc_Word16 *speechIn,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *encoded)
+{
+    unsigned char *codechar = (unsigned char*) encoded;
+    // Encode the input speech vector
+    return WebRtc_g722_encode((g722_encode_state_t*) G722enc_inst,
+                       codechar, speechIn, len);
+}
+
+WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
+{
+    *G722dec_inst=(G722DecInst*)malloc(sizeof(g722_decode_state_t));
+    if (*G722dec_inst!=NULL) {
+      return(0);
+    } else {
+      return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
+{
+    // Create and/or reset the G.722 decoder
+    // Bitrate 64 kbps and wideband mode (2)
+    G722dec_inst = (G722DecInst *) WebRtc_g722_decode_init(
+        (g722_decode_state_t*) G722dec_inst, 64000, 2);
+    if (G722dec_inst == NULL) {
+        return -1;
+    } else {
+        return 0;
+    }
+}
+
+WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
+{
+    // Free encoder memory
+    return WebRtc_g722_decode_release((g722_decode_state_t*) G722dec_inst);
+}
+
+WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
+                                WebRtc_Word16 *encoded,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *decoded,
+                                WebRtc_Word16 *speechType)
+{
+    // Decode the G.722 encoder stream
+    *speechType=G722_WEBRTC_SPEECH;
+    return WebRtc_g722_decode((g722_decode_state_t*) G722dec_inst,
+                              decoded, (WebRtc_UWord8*) encoded, len);
+}
+
+WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len)
+{
+    // Get version string
+    char version[30] = "2.0.0\n";
+    if (strlen(version) < (unsigned int)len)
+    {
+        strcpy(versionStr, version);
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
diff --git a/trunk/src/modules/audio_coding/codecs/g722/g722_unittest.cc b/trunk/src/modules/audio_coding/codecs/g722/g722_unittest.cc
new file mode 100644
index 0000000..a828edd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/g722_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "g722_interface.h"
+#include "gtest/gtest.h"
+
+TEST(G722Test, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/audio_coding/codecs/g722/include/g722_interface.h b/trunk/src/modules/audio_coding/codecs/g722/include/g722_interface.h
new file mode 100644
index 0000000..e50d66f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/include/g722_interface.h
@@ -0,0 +1,190 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
+#define MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
+
+#include "typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ */
+
+typedef struct WebRtcG722EncInst    G722EncInst;
+typedef struct WebRtcG722DecInst    G722DecInst;
+
+/*
+ * Comfort noise constants
+ */
+
+#define G722_WEBRTC_SPEECH     1
+#define G722_WEBRTC_CNG        2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/****************************************************************************
+ * WebRtcG722_CreateEncoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance for encoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_EncoderInit(...)
+ *
+ * This function initializes a G722 instance
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance, i.e. the user that should receive
+ *                             be initialized
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_FreeEncoder(...)
+ *
+ * Free the memory used for G722 encoder
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance for encoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
+
+
+
+/****************************************************************************
+ * WebRtcG722_Encode(...)
+ *
+ * This function encodes G722 encoded data.
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance, i.e. the user that should encode
+ *                              a packet
+ *     - speechIn             : Input speech vector
+ *     - len                  : Samples in speechIn
+ *
+ * Output:
+ *        - encoded           : The encoded data vector
+ *
+ * Return value               : >0 - Length (in bytes) of coded data
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
+                                WebRtc_Word16 *speechIn,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *encoded);
+
+
+/****************************************************************************
+ * WebRtcG722_CreateDecoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ *     - G722dec_inst         : G722 instance for decoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_DecoderInit(...)
+ *
+ * This function initializes a G729 instance
+ *
+ * Input:
+ *     - G729_decinst_t    : G729 instance, i.e. the user that should receive
+ *                           be initialized
+ *
+ * Return value            :  0 - Ok
+ *                           -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_FreeDecoder(...)
+ *
+ * Free the memory used for G722 decoder
+ *
+ * Input:
+ *     - G722dec_inst         : G722 instance for decoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_Decode(...)
+ *
+ * This function decodes a packet with G729 frame(s). Output speech length
+ * will be a multiple of 80 samples (80*frames/packet).
+ *
+ * Input:
+ *     - G722dec_inst       : G722 instance, i.e. the user that should decode
+ *                            a packet
+ *     - encoded            : Encoded G722 frame(s)
+ *     - len                : Bytes in encoded vector
+ *
+ * Output:
+ *        - decoded         : The decoded vector
+ *      - speechType        : 1 normal, 2 CNG (Since G722 does not have its own
+ *                            DTX/CNG scheme it should always return 1)
+ *
+ * Return value             : >0 - Samples in decoded vector
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
+                                WebRtc_Word16 *encoded,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *decoded,
+                                WebRtc_Word16 *speechType);
+
+/****************************************************************************
+ * WebRtcG722_Version(...)
+ *
+ * Get a string with the current version of the codec
+ */
+
+WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif /* MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/g722/test/testG722.cc b/trunk/src/modules/audio_coding/codecs/g722/test/testG722.cc
new file mode 100644
index 0000000..9ef8f2d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/g722/test/testG722.cc
@@ -0,0 +1,157 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * testG722.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "typedefs.h"
+
+/* include API */
+#include "g722_interface.h"
+
+/* Runtime statistics */
+#include <time.h>
+#define CLOCKS_PER_SEC_G722  100000
+
+// Forward declaration
+typedef struct WebRtcG722EncInst    G722EncInst;
+typedef struct WebRtcG722DecInst    G722DecInst;
+
+/* function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length)
+{
+    short k, rlen, status = 0;
+
+    rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+    return status;
+}
+
+int main(int argc, char* argv[])
+{
+    char inname[60], outbit[40], outname[40];
+    FILE *inp, *outbitp, *outp;
+
+    int framecnt, endfile;
+    WebRtc_Word16 framelength = 160;
+    G722EncInst *G722enc_inst;
+    G722DecInst *G722dec_inst;
+    int err;
+
+    /* Runtime statistics */
+    double starttime;
+    double runtime = 0;
+    double length_file;
+
+    WebRtc_Word16 stream_len = 0;
+    WebRtc_Word16 shortdata[960];
+    WebRtc_Word16 decoded[960];
+    WebRtc_Word16 streamdata[80*3];
+    WebRtc_Word16 speechType[1];
+
+    /* handling wrong input arguments in the command line */
+    if (argc!=5)  {
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        printf("Usage:\n\n");
+        printf("./testG722.exe framelength infile outbitfile outspeechfile \n\n");
+        printf("with:\n");
+        printf("framelength  :    Framelength in samples.\n\n");
+        printf("infile       :    Normal speech input file\n\n");
+        printf("outbitfile   :    Bitstream output file\n\n");
+        printf("outspeechfile:    Speech output file\n\n");
+        exit(0);
+
+    }
+
+    /* Get frame length */
+    framelength = atoi(argv[1]);
+
+    /* Get Input and Output files */
+    sscanf(argv[2], "%s", inname);
+    sscanf(argv[3], "%s", outbit);
+    sscanf(argv[4], "%s", outname);
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  G.722: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outbitp = fopen(outbit,"wb")) == NULL) {
+        printf("  G.722: Cannot write file %s.\n", outbit);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  G.722: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+    printf("\nInput:%s\nOutput bitstream:%s\nOutput:%s\n", inname, outbit, outname);
+
+    /* Create and init */
+    WebRtcG722_CreateEncoder((G722EncInst **)&G722enc_inst);
+    WebRtcG722_CreateDecoder((G722DecInst **)&G722dec_inst);
+    WebRtcG722_EncoderInit((G722EncInst *)G722enc_inst);
+    WebRtcG722_DecoderInit((G722DecInst *)G722dec_inst);
+
+
+    /* Initialize encoder and decoder */
+    framecnt = 0;
+    endfile = 0;
+    while (endfile == 0) {
+        framecnt++;
+
+        /* Read speech block */
+        endfile = readframe(shortdata, inp, framelength);
+
+        /* Start clock before call to encoder and decoder */
+        starttime = clock()/(double)CLOCKS_PER_SEC_G722;
+
+        /* G.722 encoding + decoding */
+        stream_len = WebRtcG722_Encode((G722EncInst *)G722enc_inst, shortdata, framelength, streamdata);
+        err = WebRtcG722_Decode((G722DecInst *)G722dec_inst, streamdata, stream_len, decoded, speechType);
+
+        /* Stop clock after call to encoder and decoder */
+        runtime += (double)((clock()/(double)CLOCKS_PER_SEC_G722)-starttime);
+
+        if (stream_len < 0 || err < 0) {
+            /* exit if returned with error */
+            printf("Error in encoder/decoder\n");
+        } else {
+            /* Write coded bits to file */
+            fwrite(streamdata,sizeof(short),stream_len/2,outbitp);
+            /* Write coded speech to file */
+            fwrite(decoded,sizeof(short),framelength,outp);
+        }
+    }
+
+    WebRtcG722_FreeEncoder((G722EncInst *)G722enc_inst);
+    WebRtcG722_FreeDecoder((G722DecInst *)G722dec_inst);
+
+    length_file = ((double)framecnt*(double)framelength/16000);
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+    printf("Time to run G.722:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+    printf("---------------------END----------------------\n");
+
+    fclose(inp);
+    fclose(outbitp);
+    fclose(outp);
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/interface/isacfix.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/interface/isacfix.h
new file mode 100644
index 0000000..28e9429
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/interface/isacfix.h
@@ -0,0 +1,633 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INTERFACE_ISACFIX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INTERFACE_ISACFIX_H_
+
+/*
+ * Define the fixpoint numeric formats
+ */
+#include "typedefs.h"
+
+
+typedef struct {
+  void *dummy;
+} ISACFIX_MainStruct;
+
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+
+  /**************************************************************************
+   * WebRtcIsacfix_AssignSize(...)
+   *
+   *  Functions used when malloc is not allowed
+   *  Output the number of bytes needed to allocate for iSAC struct.
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_AssignSize(int *sizeinbytes);
+
+  /**************************************************************************
+   * WebRtcIsacfix_Assign(...)
+   *
+   * Functions used when malloc is not allowed, it
+   * places a struct at the given address.
+   *
+   * Input:
+   *      - *ISAC_main_inst   : a pointer to the coder instance.
+   *      - ISACFIX_inst_Addr : address of the memory where a space is
+   *                            for iSAC structure.
+   *
+   * Return value             : 0 - Ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst,
+                                     void *ISACFIX_inst_Addr);
+
+  /****************************************************************************
+   * WebRtcIsacfix_Create(...)
+   *
+   * This function creates an ISAC instance, which will contain the state
+   * information for one coding/decoding channel.
+   *
+   * Input:
+   *      - *ISAC_main_inst   : a pointer to the coder instance.
+   *
+   * Return value             : 0 - Ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_Free(...)
+   *
+   * This function frees the ISAC instance created at the beginning.
+   *
+   * Input:
+   *      - ISAC_main_inst    : a ISAC instance.
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_EncoderInit(...)
+   *
+   * This function initializes an ISAC instance prior to the encoder calls.
+   *
+   * Input:
+   *     - ISAC_main_inst     : ISAC instance.
+   *     - CodingMode         : 0 - Bit rate and frame length are automatically
+   *                                adjusted to available bandwidth on
+   *                                transmission channel.
+   *                            1 - User sets a frame length and a target bit
+   *                                rate which is taken as the maximum short-term
+   *                                average bit rate.
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
+                                          WebRtc_Word16  CodingMode);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_Encode(...)
+   *
+   * This function encodes 10ms frame(s) and inserts it into a package.
+   * Input speech length has to be 160 samples (10ms). The encoder buffers those
+   * 10ms frames until it reaches the chosen Framesize (480 or 960 samples
+   * corresponding to 30 or 60 ms frames), and then proceeds to the encoding.
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - speechIn          : input speech vector.
+   *
+   * Output:
+   *      - encoded           : the encoded data vector
+   *
+   * Return value             : >0 - Length (in bytes) of coded data
+   *                             0 - The buffer didn't reach the chosen framesize
+   *                                 so it keeps buffering speech samples.
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
+                                     const WebRtc_Word16 *speechIn,
+                                     WebRtc_Word16 *encoded);
+
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_EncodeNb(...)
+   *
+   * This function encodes 10ms narrow band (8 kHz sampling) frame(s) and inserts
+   * it into a package. Input speech length has to be 80 samples (10ms). The encoder
+   * interpolates into wide-band (16 kHz sampling) buffers those
+   * 10ms frames until it reaches the chosen Framesize (480 or 960 wide-band samples
+   * corresponding to 30 or 60 ms frames), and then proceeds to the encoding.
+   *
+   * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - speechIn          : input speech vector.
+   *
+   * Output:
+   *      - encoded           : the encoded data vector
+   *
+   * Return value             : >0 - Length (in bytes) of coded data
+   *                             0 - The buffer didn't reach the chosen framesize
+   *                                 so it keeps buffering speech samples.
+   *                            -1 - Error
+   */
+
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  WebRtc_Word16 WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                       const WebRtc_Word16 *speechIn,
+                                       WebRtc_Word16 *encoded);
+#endif //  WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_DecoderInit(...)
+   *
+   * This function initializes an ISAC instance prior to the decoder calls.
+   *
+   * Input:
+   *  - ISAC_main_inst : ISAC instance.
+   *
+   * Return value
+   *       :  0 - Ok
+   *         -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_UpdateBwEstimate1(...)
+   *
+   * This function updates the estimate of the bandwidth.
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - encoded           : encoded ISAC frame(s).
+   *      - packet_size       : size of the packet.
+   *      - rtp_seq_number    : the RTP number of the packet.
+   *      - arr_ts            : the arrival time of the packet (from NetEq)
+   *                            in samples.
+   *
+   * Return value             : 0 - Ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
+                                                const WebRtc_UWord16 *encoded,
+                                                WebRtc_Word32  packet_size,
+                                                WebRtc_UWord16 rtp_seq_number,
+                                                WebRtc_UWord32 arr_ts);
+
+  /****************************************************************************
+   * WebRtcIsacfix_UpdateBwEstimate(...)
+   *
+   * This function updates the estimate of the bandwidth.
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - encoded           : encoded ISAC frame(s).
+   *      - packet_size       : size of the packet.
+   *      - rtp_seq_number    : the RTP number of the packet.
+   *      - send_ts           : the send time of the packet from RTP header,
+   *                            in samples.
+   *      - arr_ts            : the arrival time of the packet (from NetEq)
+   *                            in samples.
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
+                                               const WebRtc_UWord16   *encoded,
+                                               WebRtc_Word32          packet_size,
+                                               WebRtc_UWord16         rtp_seq_number,
+                                               WebRtc_UWord32         send_ts,
+                                               WebRtc_UWord32         arr_ts);
+
+  /****************************************************************************
+   * WebRtcIsacfix_Decode(...)
+   *
+   * This function decodes an ISAC frame. Output speech length
+   * will be a multiple of 480 samples: 480 or 960 samples,
+   * depending on the framesize (30 or 60 ms).
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - encoded           : encoded ISAC frame(s)
+   *      - len               : bytes in encoded vector
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *
+   * Return value             : >0 - number of samples in decoded vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
+                                     const WebRtc_UWord16 *encoded,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *decoded,
+                                     WebRtc_Word16 *speechType);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_DecodeNb(...)
+   *
+   * This function decodes a ISAC frame in narrow-band (8 kHz sampling).
+   * Output speech length will be a multiple of 240 samples: 240 or 480 samples,
+   * depending on the framesize (30 or 60 ms).
+   *
+   * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - encoded           : encoded ISAC frame(s)
+   *      - len               : bytes in encoded vector
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *
+   * Return value             : >0 - number of samples in decoded vector
+   *                            -1 - Error
+   */
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  WebRtc_Word16 WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                       const WebRtc_UWord16 *encoded,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 *decoded,
+                                       WebRtc_Word16 *speechType);
+#endif //  WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_DecodePlcNb(...)
+   *
+   * This function conducts PLC for ISAC frame(s) in narrow-band (8kHz sampling).
+   * Output speech length  will be "240*noOfLostFrames" samples
+   * that equevalent of "30*noOfLostFrames" millisecond.
+   *
+   * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - noOfLostFrames    : Number of PLC frames (240 sample=30ms) to produce
+   *                            NOTE! Maximum number is 2 (480 samples = 60ms)
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *
+   * Return value             : >0 - number of samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  WebRtc_Word16 WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                          WebRtc_Word16 *decoded,
+                                          WebRtc_Word16 noOfLostFrames );
+#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+
+
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_DecodePlc(...)
+   *
+   * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling).
+   * Output speech length  will be "480*noOfLostFrames" samples
+   * that is equevalent of "30*noOfLostFrames" millisecond.
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - noOfLostFrames    : Number of PLC frames (480sample = 30ms)
+   *                            to produce
+   *                            NOTE! Maximum number is 2 (960 samples = 60ms)
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *
+   * Return value             : >0 - number of samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
+                                        WebRtc_Word16 *decoded,
+                                        WebRtc_Word16 noOfLostFrames );
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_ReadFrameLen(...)
+   *
+   * This function returns the length of the frame represented in the packet.
+   *
+   * Input:
+   *      - encoded           : Encoded bitstream
+   *
+   * Output:
+   *      - frameLength       : Length of frame in packet (in samples)
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_ReadFrameLen(const WebRtc_Word16* encoded,
+                                           WebRtc_Word16* frameLength);
+
+  /****************************************************************************
+   * WebRtcIsacfix_Control(...)
+   *
+   * This function sets the limit on the short-term average bit rate and the
+   * frame length. Should be used only in Instantaneous mode.
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - rate              : limit on the short-term average bit rate,
+   *                            in bits/second (between 10000 and 32000)
+   *      - framesize         : number of milliseconds per frame (30 or 60)
+   *
+   * Return value             : 0  - ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
+                                      WebRtc_Word16          rate,
+                                      WebRtc_Word16          framesize);
+
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_ControlBwe(...)
+   *
+   * This function sets the initial values of bottleneck and frame-size if
+   * iSAC is used in channel-adaptive mode. Through this API, users can
+   * enforce a frame-size for all values of bottleneck. Then iSAC will not
+   * automatically change the frame-size.
+   *
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - rateBPS           : initial value of bottleneck in bits/second
+   *                            10000 <= rateBPS <= 32000 is accepted
+   *      - frameSizeMs       : number of milliseconds per frame (30 or 60)
+   *      - enforceFrameSize  : 1 to enforce the given frame-size through out
+   *                            the adaptation process, 0 to let iSAC change
+   *                            the frame-size if required.
+   *
+   * Return value             : 0  - ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
+                                         WebRtc_Word16 rateBPS,
+                                         WebRtc_Word16 frameSizeMs,
+                                         WebRtc_Word16 enforceFrameSize);
+
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_version(...)
+   *
+   * This function returns the version number.
+   *
+   * Output:
+   *      - version      : Pointer to character string
+   *
+   */
+
+  void WebRtcIsacfix_version(char *version);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_GetErrorCode(...)
+   *
+   * This function can be used to check the error code of an iSAC instance. When
+   * a function returns -1 a error code will be set for that instance. The
+   * function below extract the code of the last error that occured in the
+   * specified instance.
+   *
+   * Input:
+   *  - ISAC_main_inst        : ISAC instance
+   *
+   * Return value             : Error code
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_GetUplinkBw(...)
+   *
+   * This function return iSAC send bitrate
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC instance
+   *
+   * Return value             : <0 Error code
+   *                            else bitrate
+   */
+
+  WebRtc_Word32 WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_SetMaxPayloadSize(...)
+   *
+   * This function sets a limit for the maximum payload size of iSAC. The same
+   * value is used both for 30 and 60 msec packets.
+   * The absolute max will be valid until next time the function is called.
+   * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate()
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC instance
+   *      - maxPayloadBytes   : maximum size of the payload in bytes
+   *                            valid values are between 100 and 400 bytes
+   *
+   *
+   * Return value             : 0 if sucessful
+   *                           -1 if error happens
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
+                                                WebRtc_Word16 maxPayloadBytes);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_SetMaxRate(...)
+   *
+   * This function sets the maximum rate which the codec may not exceed for a
+   * singel packet. The maximum rate is set in bits per second.
+   * The codec has an absolute maximum rate of 53400 bits per second (200 bytes
+   * per 30 msec).
+   * It is possible to set a maximum rate between 32000 and 53400 bits per second.
+   *
+   * The rate limit is valid until next time the function is called.
+   *
+   * NOTE! Packet size will never go above the value set if calling
+   * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes).
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC instance
+   *      - maxRateInBytes    : maximum rate in bits per second,
+   *                            valid values are 32000 to 53400 bits
+   *
+   * Return value             : 0 if sucessful
+   *                           -1 if error happens
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
+                                         WebRtc_Word32 maxRate);
+
+  /****************************************************************************
+   * WebRtcIsacfix_CreateInternal(...)
+   *
+   * This function creates the memory that is used to store data in the encoder
+   *
+   * Input:
+   *      - *ISAC_main_inst   : a pointer to the coder instance.
+   *
+   * Return value             : 0 - Ok
+   *                           -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_FreeInternal(...)
+   *
+   * This function frees the internal memory for storing encoder data.
+   *
+   * Input:
+   *      - ISAC_main_inst        : an ISAC instance.
+   *
+   * Return value                 :  0 - Ok
+   *                                -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_GetNewBitStream(...)
+   *
+   * This function returns encoded data, with the recieved bwe-index in the
+   * stream. It should always return a complete packet, i.e. only called once
+   * even for 60 msec frames
+   *
+   * Input:
+   *      - ISAC_main_inst    : ISAC instance.
+   *      - bweIndex          : index of bandwidth estimate to put in new bitstream
+   *      - scale             : factor for rate change (0.4 ~=> half the rate, 1 no change).
+   *
+   * Output:
+   *      - encoded           : the encoded data vector
+   *
+   * Return value             : >0 - Length (in bytes) of coded data
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
+                                              WebRtc_Word16          bweIndex,
+                                              float              scale,
+                                              WebRtc_Word16        *encoded);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_GetDownLinkBwIndex(...)
+   *
+   * This function returns index representing the Bandwidth estimate from
+   * other side to this side.
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC struct
+   *
+   * Output:
+   *      - rateIndex         : Bandwidth estimate to transmit to other side.
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
+                                                 WebRtc_Word16*     rateIndex);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_UpdateUplinkBw(...)
+   *
+   * This function takes an index representing the Bandwidth estimate from
+   * this side to other side and updates BWE.
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC struct
+   *      - rateIndex         : Bandwidth estimate from other side.
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
+                                             WebRtc_Word16     rateIndex);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_ReadBwIndex(...)
+   *
+   * This function returns the index of the Bandwidth estimate from the bitstream.
+   *
+   * Input:
+   *      - encoded           : Encoded bitstream
+   *
+   * Output:
+   *      - rateIndex         : Bandwidth estimate in bitstream
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_ReadBwIndex(const WebRtc_Word16* encoded,
+                                          WebRtc_Word16* rateIndex);
+
+
+  /****************************************************************************
+   * WebRtcIsacfix_GetNewFrameLen(...)
+   *
+   * This function return the next frame length (in samples) of iSAC.
+   *
+   * Input:
+   *      -ISAC_main_inst     : iSAC instance
+   *
+   * Return value             : frame lenght in samples
+   */
+
+  WebRtc_Word16 WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst);
+
+
+#if defined(__cplusplus)
+}
+#endif
+
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INTERFACE_ISACFIX_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk
new file mode 100644
index 0000000..c81d13a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk
@@ -0,0 +1,143 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+#############################
+# Build the non-neon library.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_isacfix
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    arith_routines.c \
+    arith_routines_hist.c \
+    arith_routines_logist.c \
+    bandwidth_estimator.c \
+    decode.c \
+    decode_bwe.c \
+    decode_plc.c \
+    encode.c \
+    entropy_coding.c \
+    fft.c \
+    filterbank_tables.c \
+    filterbanks.c \
+    filters.c \
+    initialize.c \
+    isacfix.c \
+    lattice.c \
+    lpc_masking_model.c \
+    lpc_tables.c \
+    pitch_estimator.c \
+    pitch_filter.c \
+    pitch_gain_tables.c \
+    pitch_lag_tables.c \
+    spectrum_ar_model_tables.c \
+    transform.c
+
+ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
+LOCAL_SRC_FILES += \
+    lattice_armv7.S
+else
+LOCAL_SRC_FILES += \
+    lattice_c.c
+endif
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../common_audio/signal_processing/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+#########################
+# Build the neon library.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_isacfix_neon
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    filters_neon.c \
+    lattice_neon.S #.S extention is for including a header file in assembly.
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    -mfpu=neon \
+    -mfloat-abi=softfp \
+    -flax-vector-conversions
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../common_audio/signal_processing/include
+
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+endif # ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+###########################
+# isac test app
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= ../test/kenny.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../../../..
+
+LOCAL_STATIC_LIBRARIES := \
+    libwebrtc_isacfix \
+    libwebrtc_spl
+
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+LOCAL_STATIC_LIBRARIES += \
+    libwebrtc_isacfix_neon
+endif
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils
+
+LOCAL_MODULE:= webrtc_isac_test
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include $(BUILD_NATIVE_TEST)
+endif
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines.c
new file mode 100644
index 0000000..ee62bad
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines.c
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routins.c
+ *
+ * This C file contains a function for finalizing the bitstream
+ * after arithmetic coding.
+ *
+ */
+
+#include "arith_routins.h"
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncTerminate(...)
+ *
+ * Final call to the arithmetic coder for an encoder call. This function
+ * terminates and return byte stream.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *
+ * Return value             : number of bytes in the stream
+ */
+WebRtc_Word16 WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData)
+{
+  WebRtc_UWord16 *streamPtr;
+  WebRtc_UWord16 negCarry;
+
+  /* point to the right place in the stream buffer */
+  streamPtr = streamData->stream + streamData->stream_index;
+
+  /* find minimum length (determined by current interval width) */
+  if ( streamData->W_upper > 0x01FFFFFF )
+  {
+    streamData->streamval += 0x01000000;
+
+    /* if result is less than the added value we must take care of the carry */
+    if (streamData->streamval < 0x01000000)
+    {
+      /* propagate carry */
+      if (streamData->full == 0) {
+        /* Add value to current value */
+        negCarry = *streamPtr;
+        negCarry += 0x0100;
+        *streamPtr = negCarry;
+
+        /* if value is too big, propagate carry to next byte, and so on */
+        while (!(negCarry))
+        {
+          negCarry = *--streamPtr;
+          negCarry++;
+          *streamPtr = negCarry;
+        }
+      } else {
+        /* propagate carry by adding one to the previous byte in the
+         * stream if that byte is 0xFFFF we need to propagate the carry
+         * furhter back in the stream */
+        while ( !(++(*--streamPtr)) );
+      }
+
+      /* put pointer back to the old value */
+      streamPtr = streamData->stream + streamData->stream_index;
+    }
+    /* write remaining data to bitstream, if "full == 0" first byte has data */
+    if (streamData->full == 0) {
+      *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+      streamData->full = 1;
+    } else {
+      *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_W32(
+          WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24), 8);
+      streamData->full = 0;
+    }
+  }
+  else
+  {
+    streamData->streamval += 0x00010000;
+
+    /* if result is less than the added value we must take care of the carry */
+    if (streamData->streamval < 0x00010000)
+    {
+      /* propagate carry */
+      if (streamData->full == 0) {
+        /* Add value to current value */
+        negCarry = *streamPtr;
+        negCarry += 0x0100;
+        *streamPtr = negCarry;
+
+        /* if value to big, propagate carry to next byte, and so on */
+        while (!(negCarry))
+        {
+          negCarry = *--streamPtr;
+          negCarry++;
+          *streamPtr = negCarry;
+        }
+      } else {
+        /* Add carry to previous byte */
+        while ( !(++(*--streamPtr)) );
+      }
+
+      /* put pointer back to the old value */
+      streamPtr = streamData->stream + streamData->stream_index;
+    }
+    /* write remaining data (2 bytes) to bitstream */
+    if (streamData->full) {
+      *streamPtr++ = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 16);
+    } else {
+      *streamPtr++ |= (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+      *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 8)
+          & 0xFF00;
+    }
+  }
+
+  /* calculate stream length in bytes */
+  return (((streamPtr - streamData->stream)<<1) + !(streamData->full));
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_hist.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_hist.c
new file mode 100644
index 0000000..14f1add
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_hist.c
@@ -0,0 +1,404 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routinshist.c
+ *
+ * This C file contains arithmetic encoding and decoding.
+ *
+ */
+
+#include "arith_routins.h"
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncHistMulti(...)
+ *
+ * Encode the histogram interval
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - data              : data vector
+ *      - cdf               : array of cdf arrays
+ *      - lenData           : data vector length
+ *
+ * Return value             : 0 if ok
+ *                            <0 if error detected
+ */
+int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData,
+                              const WebRtc_Word16 *data,
+                              const WebRtc_UWord16 **cdf,
+                              const WebRtc_Word16 lenData)
+{
+  WebRtc_UWord32 W_lower;
+  WebRtc_UWord32 W_upper;
+  WebRtc_UWord32 W_upper_LSB;
+  WebRtc_UWord32 W_upper_MSB;
+  WebRtc_UWord16 *streamPtr;
+  WebRtc_UWord16 negCarry;
+  WebRtc_UWord16 *maxStreamPtr;
+  WebRtc_UWord16 *streamPtrCarry;
+  WebRtc_UWord32 cdfLo;
+  WebRtc_UWord32 cdfHi;
+  int k;
+
+
+  /* point to beginning of stream buffer
+   * and set maximum streamPtr value */
+  streamPtr = streamData->stream + streamData->stream_index;
+  maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1;
+
+  W_upper = streamData->W_upper;
+
+  for (k = lenData; k > 0; k--)
+  {
+    /* fetch cdf_lower and cdf_upper from cdf tables */
+    cdfLo = (WebRtc_UWord32) *(*cdf + (WebRtc_UWord32)*data);
+    cdfHi = (WebRtc_UWord32) *(*cdf++ + (WebRtc_UWord32)*data++ + 1);
+
+    /* update interval */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = WEBRTC_SPL_RSHIFT_W32(W_upper, 16);
+    W_lower = WEBRTC_SPL_UMUL(W_upper_MSB, cdfLo);
+    W_lower += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfLo);
+    W_upper = WEBRTC_SPL_UMUL(W_upper_MSB, cdfHi);
+    W_upper += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfHi);
+
+    /* shift interval such that it begins at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamData->streamval += W_lower;
+
+    /* handle carry */
+    if (streamData->streamval < W_lower)
+    {
+      /* propagate carry */
+      streamPtrCarry = streamPtr;
+      if (streamData->full == 0) {
+        negCarry = *streamPtrCarry;
+        negCarry += 0x0100;
+        *streamPtrCarry = negCarry;
+        while (!(negCarry))
+        {
+          negCarry = *--streamPtrCarry;
+          negCarry++;
+          *streamPtrCarry = negCarry;
+        }
+      } else {
+        while ( !(++(*--streamPtrCarry)) );
+      }
+    }
+
+    /* renormalize interval, store most significant byte of streamval and update streamval
+     * W_upper < 2^24 */
+    while ( !(W_upper & 0xFF000000) )
+    {
+      W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
+      if (streamData->full == 0) {
+        *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+        streamData->full = 1;
+      } else {
+        *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_W32(
+            WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24), 8);
+        streamData->full = 0;
+      }
+
+      if( streamPtr > maxStreamPtr ) {
+        return -ISAC_DISALLOWED_BITSTREAM_LENGTH;
+      }
+      streamData->streamval = WEBRTC_SPL_LSHIFT_W32(streamData->streamval, 8);
+    }
+  }
+
+  /* calculate new stream_index */
+  streamData->stream_index = streamPtr - streamData->stream;
+  streamData->W_upper = W_upper;
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecHistBisectMulti(...)
+ *
+ * Function to decode more symbols from the arithmetic bytestream, using
+ * method of bisection cdf tables should be of size 2^k-1 (which corresponds
+ * to an alphabet size of 2^k-2)
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - cdf               : array of cdf arrays
+ *      - cdfSize           : array of cdf table sizes+1 (power of two: 2^k)
+ *      - lenData           : data vector length
+ *
+ * Output:
+ *      - data              : data vector
+ *
+ * Return value             : number of bytes in the stream
+ *                            <0 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecHistBisectMulti(WebRtc_Word16 *data,
+                                              Bitstr_dec *streamData,
+                                              const WebRtc_UWord16 **cdf,
+                                              const WebRtc_UWord16 *cdfSize,
+                                              const WebRtc_Word16 lenData)
+{
+  WebRtc_UWord32    W_lower = 0;
+  WebRtc_UWord32    W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord32    W_upper_LSB;
+  WebRtc_UWord32    W_upper_MSB;
+  WebRtc_UWord32    streamval;
+  const WebRtc_UWord16 *streamPtr;
+  const WebRtc_UWord16 *cdfPtr;
+  WebRtc_Word16     sizeTmp;
+  int             k;
+
+
+  streamPtr = streamData->stream + streamData->stream_index;
+  W_upper = streamData->W_upper;
+
+  /* Error check: should not be possible in normal operation */
+  if (W_upper == 0) {
+    return -2;
+  }
+
+  /* first time decoder is called for this stream */
+  if (streamData->stream_index == 0)
+  {
+    /* read first word from bytestream */
+    streamval = WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)*streamPtr++, 16);
+    streamval |= *streamPtr++;
+  } else {
+    streamval = streamData->streamval;
+  }
+
+  for (k = lenData; k > 0; k--)
+  {
+    /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = WEBRTC_SPL_RSHIFT_W32(W_upper, 16);
+
+    /* start halfway the cdf range */
+    sizeTmp = WEBRTC_SPL_RSHIFT_W16(*cdfSize++, 1);
+    cdfPtr = *cdf + (sizeTmp - 1);
+
+    /* method of bisection */
+    for ( ;; )
+    {
+      W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr);
+      W_tmp += WEBRTC_SPL_UMUL_32_16_RSFT16(W_upper_LSB, *cdfPtr);
+      sizeTmp = WEBRTC_SPL_RSHIFT_W16(sizeTmp, 1);
+      if (sizeTmp == 0) {
+        break;
+      }
+
+      if (streamval > W_tmp)
+      {
+        W_lower = W_tmp;
+        cdfPtr += sizeTmp;
+      } else {
+        W_upper = W_tmp;
+        cdfPtr -= sizeTmp;
+      }
+    }
+    if (streamval > W_tmp)
+    {
+      W_lower = W_tmp;
+      *data++ = cdfPtr - *cdf++;
+    } else {
+      W_upper = W_tmp;
+      *data++ = cdfPtr - *cdf++ - 1;
+    }
+
+    /* shift interval to start at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamval -= W_lower;
+
+    /* renormalize interval and update streamval */
+    /* W_upper < 2^24 */
+    while ( !(W_upper & 0xFF000000) )
+    {
+      /* read next byte from stream */
+      if (streamData->full == 0) {
+        streamval = WEBRTC_SPL_LSHIFT_W32(streamval, 8) |
+            (*streamPtr++ & 0x00FF);
+        streamData->full = 1;
+      } else {
+        streamval = WEBRTC_SPL_LSHIFT_W32(streamval, 8) |
+            WEBRTC_SPL_RSHIFT_W16(*streamPtr, 8);
+        streamData->full = 0;
+      }
+      W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
+    }
+
+
+    /* Error check: should not be possible in normal operation */
+    if (W_upper == 0) {
+      return -2;
+    }
+
+  }
+
+  streamData->stream_index = streamPtr - streamData->stream;
+  streamData->W_upper = W_upper;
+  streamData->streamval = streamval;
+
+  if ( W_upper > 0x01FFFFFF ) {
+    return (streamData->stream_index*2 - 3 + !streamData->full);
+  } else {
+    return (streamData->stream_index*2 - 2 + !streamData->full);
+  }
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecHistOneStepMulti(...)
+ *
+ * Function to decode more symbols from the arithmetic bytestream, taking
+ * single step up or down at a time.
+ * cdf tables can be of arbitrary size, but large tables may take a lot of
+ * iterations.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - cdf               : array of cdf arrays
+ *      - initIndex         : vector of initial cdf table search entries
+ *      - lenData           : data vector length
+ *
+ * Output:
+ *      - data              : data vector
+ *
+ * Return value             : number of bytes in original stream
+ *                            <0 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecHistOneStepMulti(WebRtc_Word16 *data,
+                                               Bitstr_dec *streamData,
+                                               const WebRtc_UWord16 **cdf,
+                                               const WebRtc_UWord16 *initIndex,
+                                               const WebRtc_Word16 lenData)
+{
+  WebRtc_UWord32    W_lower;
+  WebRtc_UWord32    W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord32    W_upper_LSB;
+  WebRtc_UWord32    W_upper_MSB;
+  WebRtc_UWord32    streamval;
+  const WebRtc_UWord16 *streamPtr;
+  const WebRtc_UWord16 *cdfPtr;
+  int             k;
+
+
+  streamPtr = streamData->stream + streamData->stream_index;
+  W_upper = streamData->W_upper;
+  /* Error check: Should not be possible in normal operation */
+  if (W_upper == 0) {
+    return -2;
+  }
+
+  /* Check if it is the first time decoder is called for this stream */
+  if (streamData->stream_index == 0)
+  {
+    /* read first word from bytestream */
+    streamval = WEBRTC_SPL_LSHIFT_U32(*streamPtr++, 16);
+    streamval |= *streamPtr++;
+  } else {
+    streamval = streamData->streamval;
+  }
+
+  for (k = lenData; k > 0; k--)
+  {
+    /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
+
+    /* start at the specified table entry */
+    cdfPtr = *cdf + (*initIndex++);
+    W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr);
+    W_tmp += WEBRTC_SPL_UMUL_32_16_RSFT16(W_upper_LSB, *cdfPtr);
+
+    if (streamval > W_tmp)
+    {
+      for ( ;; )
+      {
+        W_lower = W_tmp;
+
+        /* range check */
+        if (cdfPtr[0] == 65535) {
+          return -3;
+        }
+
+        W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *++cdfPtr);
+        W_tmp += WEBRTC_SPL_UMUL_32_16_RSFT16(W_upper_LSB, *cdfPtr);
+
+        if (streamval <= W_tmp) {
+          break;
+        }
+      }
+      W_upper = W_tmp;
+      *data++ = cdfPtr - *cdf++ - 1;
+    } else {
+      for ( ;; )
+      {
+        W_upper = W_tmp;
+        --cdfPtr;
+
+        /* range check */
+        if (cdfPtr < *cdf) {
+          return -3;
+        }
+
+        W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr);
+        W_tmp += WEBRTC_SPL_UMUL_32_16_RSFT16(W_upper_LSB, *cdfPtr);
+
+        if (streamval > W_tmp) {
+          break;
+        }
+      }
+      W_lower = W_tmp;
+      *data++ = cdfPtr - *cdf++;
+    }
+
+    /* shift interval to start at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamval -= W_lower;
+
+    /* renormalize interval and update streamval */
+    /* W_upper < 2^24 */
+    while ( !(W_upper & 0xFF000000) )
+    {
+      /* read next byte from stream */
+      if (streamData->full == 0) {
+        streamval = WEBRTC_SPL_LSHIFT_W32(streamval, 8) | (*streamPtr++ & 0x00FF);
+        streamData->full = 1;
+      } else {
+        streamval = WEBRTC_SPL_LSHIFT_W32(streamval, 8) | (*streamPtr >> 8);
+        streamData->full = 0;
+      }
+      W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
+    }
+  }
+
+  streamData->stream_index = streamPtr - streamData->stream;
+  streamData->W_upper = W_upper;
+  streamData->streamval = streamval;
+
+  /* find number of bytes in original stream (determined by current interval width) */
+  if ( W_upper > 0x01FFFFFF ) {
+    return (streamData->stream_index*2 - 3 + !streamData->full);
+  } else {
+    return (streamData->stream_index*2 - 2 + !streamData->full);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_logist.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_logist.c
new file mode 100644
index 0000000..39c437e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routines_logist.c
@@ -0,0 +1,404 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routinslogist.c
+ *
+ * This C file contains arithmetic encode and decode logistic
+ *
+ */
+
+#include "arith_routins.h"
+
+
+/* Tables for piecewise linear cdf functions: y = k*x */
+
+/* x Points for function piecewise() in Q15 */
+static const WebRtc_Word32 kHistEdges[51] = {
+  -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716,
+  -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858,  -91751,  -78644,
+  -65536,  -52429,  -39322,  -26215,  -13108,       0,   13107,   26214,   39321,   52428,
+  65536,   78643,   91750,  104857,  117964,  131072,  144179,  157286,  170393,  183500,
+  196608,  209715,  222822,  235929,  249036,  262144,  275251,  288358,  301465,  314572,
+  327680
+};
+
+
+/* k Points for function piecewise() in Q0 */
+static const WebRtc_UWord16 kCdfSlope[51] = {
+  5,    5,     5,     5,     5,     5,     5,     5,    5,    5,
+  5,    5,    13,    23,    47,    87,   154,   315,  700, 1088,
+  2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312,
+  1095,  660,   316,   145,    86,    41,    32,     5,    5,    5,
+  5,    5,     5,     5,     5,     5,     5,     5,    5,    2,
+  0
+};
+
+/* y Points for function piecewise() in Q0 */
+static const WebRtc_UWord16 kCdfLogistic[51] = {
+  0,     2,     4,     6,     8,    10,    12,    14,    16,    18,
+  20,    22,    24,    29,    38,    57,    92,   153,   279,   559,
+  994,  1983,  4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636,
+  64560, 64998, 65262, 65389, 65447, 65481, 65497, 65510, 65512, 65514,
+  65516, 65518, 65520, 65522, 65524, 65526, 65528, 65530, 65532, 65534,
+  65535
+};
+
+
+/****************************************************************************
+ * WebRtcIsacfix_Piecewise(...)
+ *
+ * Piecewise linear function
+ *
+ * Input:
+ *      - xinQ15           : input value x in Q15
+ *
+ * Return value            : korresponding y-value in Q0
+ */
+
+
+static __inline WebRtc_UWord16 WebRtcIsacfix_Piecewise(WebRtc_Word32 xinQ15) {
+  WebRtc_Word32 ind;
+  WebRtc_Word32 qtmp1;
+  WebRtc_UWord16 qtmp2;
+
+  /* Find index for x-value */
+  qtmp1 = WEBRTC_SPL_SAT(kHistEdges[50],xinQ15,kHistEdges[0]);
+  ind = WEBRTC_SPL_MUL(5, qtmp1 - kHistEdges[0]);
+  ind =  WEBRTC_SPL_RSHIFT_W32(ind, 16);
+
+  /* Calculate corresponding y-value ans return*/
+  qtmp1 = qtmp1 - kHistEdges[ind];
+  qtmp2 = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(
+      WEBRTC_SPL_UMUL_32_16(qtmp1,kCdfSlope[ind]), 15);
+  return (kCdfLogistic[ind] + qtmp2);
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_EncLogisticMulti2(...)
+ *
+ * Arithmetic coding of spectrum.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - dataQ7            : data vector in Q7
+ *      - envQ8             : side info vector defining the width of the pdf
+ *                            in Q8
+ *      - lenData           : data vector length
+ *
+ * Return value             :  0 if ok,
+ *                            <0 otherwise.
+ */
+int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc *streamData,
+                                   WebRtc_Word16 *dataQ7,
+                                   const WebRtc_UWord16 *envQ8,
+                                   const WebRtc_Word16 lenData)
+{
+  WebRtc_UWord32 W_lower;
+  WebRtc_UWord32 W_upper;
+  WebRtc_UWord16 W_upper_LSB;
+  WebRtc_UWord16 W_upper_MSB;
+  WebRtc_UWord16 *streamPtr;
+  WebRtc_UWord16 *maxStreamPtr;
+  WebRtc_UWord16 *streamPtrCarry;
+  WebRtc_UWord16 negcarry;
+  WebRtc_UWord32 cdfLo;
+  WebRtc_UWord32 cdfHi;
+  int k;
+
+  /* point to beginning of stream buffer
+   * and set maximum streamPtr value */
+  streamPtr = streamData->stream + streamData->stream_index;
+  maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1;
+  W_upper = streamData->W_upper;
+
+  for (k = 0; k < lenData; k++)
+  {
+    /* compute cdf_lower and cdf_upper by evaluating the
+     * WebRtcIsacfix_Piecewise linear cdf */
+    cdfLo = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8));
+    cdfHi = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8));
+
+    /* test and clip if probability gets too small */
+    while ((cdfLo + 1) >= cdfHi) {
+      /* clip */
+      if (*dataQ7 > 0) {
+        *dataQ7 -= 128;
+        cdfHi = cdfLo;
+        cdfLo = WebRtcIsacfix_Piecewise(
+            WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8));
+      } else {
+        *dataQ7 += 128;
+        cdfLo = cdfHi;
+        cdfHi = WebRtcIsacfix_Piecewise(
+            WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8));
+      }
+    }
+
+    dataQ7++;
+    /* increment only once per 4 iterations */
+    envQ8 += (k & 1) & (k >> 1);
+
+
+    /* update interval */
+    W_upper_LSB = (WebRtc_UWord16)W_upper;
+    W_upper_MSB = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
+    W_lower = WEBRTC_SPL_UMUL_32_16(cdfLo, W_upper_MSB);
+    W_lower += WEBRTC_SPL_UMUL_32_16_RSFT16(cdfLo, W_upper_LSB);
+    W_upper = WEBRTC_SPL_UMUL_32_16(cdfHi, W_upper_MSB);
+    W_upper += WEBRTC_SPL_UMUL_32_16_RSFT16(cdfHi, W_upper_LSB);
+
+    /* shift interval such that it begins at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamData->streamval += W_lower;
+
+    /* handle carry */
+    if (streamData->streamval < W_lower)
+    {
+      /* propagate carry */
+      streamPtrCarry = streamPtr;
+      if (streamData->full == 0) {
+        negcarry = *streamPtrCarry;
+        negcarry += 0x0100;
+        *streamPtrCarry = negcarry;
+        while (!(negcarry))
+        {
+          negcarry = *--streamPtrCarry;
+          negcarry++;
+          *streamPtrCarry = negcarry;
+        }
+      } else {
+        while (!(++(*--streamPtrCarry)));
+      }
+    }
+
+    /* renormalize interval, store most significant byte of streamval and update streamval
+     * W_upper < 2^24 */
+    while ( !(W_upper & 0xFF000000) )
+    {
+      W_upper = WEBRTC_SPL_LSHIFT_U32(W_upper, 8);
+      if (streamData->full == 0) {
+        *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_U32(
+            streamData->streamval, 24);
+        streamData->full = 1;
+      } else {
+        *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_U32(
+            WEBRTC_SPL_RSHIFT_U32(streamData->streamval, 24), 8);
+        streamData->full = 0;
+      }
+
+      if( streamPtr > maxStreamPtr )
+        return -ISAC_DISALLOWED_BITSTREAM_LENGTH;
+
+      streamData->streamval = WEBRTC_SPL_LSHIFT_U32(streamData->streamval, 8);
+    }
+  }
+
+  /* calculate new stream_index */
+  streamData->stream_index = streamPtr - streamData->stream;
+  streamData->W_upper = W_upper;
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecLogisticMulti2(...)
+ *
+ * Arithmetic decoding of spectrum.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - envQ8             : side info vector defining the width of the pdf
+ *                            in Q8
+ *      - lenData           : data vector length
+ *
+ * Input/Output:
+ *      - dataQ7            : input: dither vector, output: data vector
+ *
+ * Return value             : number of bytes in the stream so far
+ *                            -1 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecLogisticMulti2(WebRtc_Word16 *dataQ7,
+                                             Bitstr_dec *streamData,
+                                             const WebRtc_Word32 *envQ8,
+                                             const WebRtc_Word16 lenData)
+{
+  WebRtc_UWord32    W_lower;
+  WebRtc_UWord32    W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord16    W_upper_LSB;
+  WebRtc_UWord16    W_upper_MSB;
+  WebRtc_UWord32    streamVal;
+  WebRtc_UWord16    cdfTmp;
+  WebRtc_Word32     res;
+  WebRtc_Word32     inSqrt;
+  WebRtc_Word32     newRes;
+  const WebRtc_UWord16 *streamPtr;
+  WebRtc_Word16     candQ7;
+  WebRtc_Word16     envCount;
+  WebRtc_UWord16    tmpARSpecQ8 = 0;
+  int             k, i;
+
+
+  /* point to beginning of stream buffer */
+  streamPtr = streamData->stream + streamData->stream_index;
+  W_upper = streamData->W_upper;
+
+  /* Check if it is first time decoder is called for this stream */
+  if (streamData->stream_index == 0)
+  {
+    /* read first word from bytestream */
+    streamVal = WEBRTC_SPL_LSHIFT_U32(*streamPtr++, 16);
+    streamVal |= *streamPtr++;
+
+  } else {
+    streamVal = streamData->streamval;
+  }
+
+
+  res = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1,
+                               WEBRTC_SPL_RSHIFT_W16(WebRtcSpl_GetSizeInBits(envQ8[0]), 1));
+  envCount = 0;
+
+  /* code assumes lenData%4 == 0 */
+  for (k = 0; k < lenData; k += 4)
+  {
+    int k4;
+
+    /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+    inSqrt = envQ8[envCount];
+    i = 10;
+
+    /* For safty reasons */
+    if (inSqrt < 0)
+      inSqrt=-inSqrt;
+
+    newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(inSqrt, res) + res, 1);
+    do
+    {
+      res = newRes;
+      newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(inSqrt, res) + res, 1);
+    } while (newRes != res && i-- > 0);
+
+    tmpARSpecQ8 = (WebRtc_UWord16)newRes;
+
+    for(k4 = 0; k4 < 4; k4++)
+    {
+      /* find the integer *data for which streamVal lies in [W_lower+1, W_upper] */
+      W_upper_LSB = (WebRtc_UWord16) (W_upper & 0x0000FFFF);
+      W_upper_MSB = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
+
+      /* find first candidate by inverting the logistic cdf
+       * Input dither value collected from io-stream */
+      candQ7 = - *dataQ7 + 64;
+      cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
+
+      W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
+      W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+
+      if (streamVal > W_tmp)
+      {
+        W_lower = W_tmp;
+        candQ7 += 128;
+        cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
+
+        W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
+        W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+
+        while (streamVal > W_tmp)
+        {
+          W_lower = W_tmp;
+          candQ7 += 128;
+          cdfTmp = WebRtcIsacfix_Piecewise(
+              WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
+
+          W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
+          W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+
+          /* error check */
+          if (W_lower == W_tmp) {
+            return -1;
+          }
+        }
+        W_upper = W_tmp;
+
+        /* Output value put in dataQ7: another sample decoded */
+        *dataQ7 = candQ7 - 64;
+      }
+      else
+      {
+        W_upper = W_tmp;
+        candQ7 -= 128;
+        cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
+
+        W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
+        W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+
+        while ( !(streamVal > W_tmp) )
+        {
+          W_upper = W_tmp;
+          candQ7 -= 128;
+          cdfTmp = WebRtcIsacfix_Piecewise(
+              WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
+
+          W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
+          W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+
+          /* error check */
+          if (W_upper == W_tmp){
+            return -1;
+          }
+        }
+        W_lower = W_tmp;
+
+        /* Output value put in dataQ7: another sample decoded */
+        *dataQ7 = candQ7 + 64;
+      }
+
+      dataQ7++;
+
+      /* shift interval to start at zero */
+      W_upper -= ++W_lower;
+
+      /* add integer to bitstream */
+      streamVal -= W_lower;
+
+      /* renormalize interval and update streamVal
+       * W_upper < 2^24 */
+      while ( !(W_upper & 0xFF000000) )
+      {
+        /* read next byte from stream */
+        if (streamData->full == 0) {
+          streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) | (*streamPtr++ & 0x00FF);
+          streamData->full = 1;
+        } else {
+          streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) |
+              WEBRTC_SPL_RSHIFT_U16(*streamPtr, 8);
+          streamData->full = 0;
+        }
+        W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
+      }
+    }
+    envCount++;
+  }
+
+  streamData->stream_index = streamPtr - streamData->stream;
+  streamData->W_upper = W_upper;
+  streamData->streamval = streamVal;
+
+  /* find number of bytes in original stream (determined by current interval width) */
+  if ( W_upper > 0x01FFFFFF )
+    return (streamData->stream_index*2 - 3 + !streamData->full);
+  else
+    return (streamData->stream_index*2 - 2 + !streamData->full);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routins.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routins.h
new file mode 100644
index 0000000..9aa49da
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/arith_routins.h
@@ -0,0 +1,160 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routins.h
+ *
+ * Functions for arithmetic coding.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_
+
+#include "structs.h"
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncLogisticMulti2(...)
+ *
+ * Arithmetic coding of spectrum.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - dataQ7            : data vector in Q7
+ *      - envQ8             : side info vector defining the width of the pdf
+ *                            in Q8
+ *      - lenData           : data vector length
+ *
+ * Return value             :  0 if ok,
+ *                             <0 otherwise.
+ */
+int WebRtcIsacfix_EncLogisticMulti2(
+    Bitstr_enc *streamData,
+    WebRtc_Word16 *dataQ7,
+    const WebRtc_UWord16 *env,
+    const WebRtc_Word16 lenData);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncTerminate(...)
+ *
+ * Final call to the arithmetic coder for an encoder call. This function
+ * terminates and return byte stream.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *
+ * Return value             : number of bytes in the stream
+ */
+WebRtc_Word16 WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecLogisticMulti2(...)
+ *
+ * Arithmetic decoding of spectrum.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - envQ8             : side info vector defining the width of the pdf
+ *                            in Q8
+ *      - lenData           : data vector length
+ *
+ * Input/Output:
+ *      - dataQ7            : input: dither vector, output: data vector, in Q7
+ *
+ * Return value             : number of bytes in the stream so far
+ *                            <0 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecLogisticMulti2(
+    WebRtc_Word16 *data,
+    Bitstr_dec *streamData,
+    const WebRtc_Word32 *env,
+    const WebRtc_Word16 lenData);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncHistMulti(...)
+ *
+ * Encode the histogram interval
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - data              : data vector
+ *      - cdf               : array of cdf arrays
+ *      - lenData           : data vector length
+ *
+ * Return value             : 0 if ok
+ *                            <0 if error detected
+ */
+int WebRtcIsacfix_EncHistMulti(
+    Bitstr_enc *streamData,
+    const WebRtc_Word16 *data,
+    const WebRtc_UWord16 **cdf,
+    const WebRtc_Word16 lenData);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecHistBisectMulti(...)
+ *
+ * Function to decode more symbols from the arithmetic bytestream, using
+ * method of bisection.
+ * C df tables should be of size 2^k-1 (which corresponds to an
+ * alphabet size of 2^k-2)
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - cdf               : array of cdf arrays
+ *      - cdfSize           : array of cdf table sizes+1 (power of two: 2^k)
+ *      - lenData           : data vector length
+ *
+ * Output:
+ *      - data              : data vector
+ *
+ * Return value             : number of bytes in the stream
+ *                            <0 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecHistBisectMulti(
+    WebRtc_Word16 *data,
+    Bitstr_dec *streamData,
+    const WebRtc_UWord16 **cdf,
+    const WebRtc_UWord16 *cdfSize,
+    const WebRtc_Word16 lenData);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecHistOneStepMulti(...)
+ *
+ * Function to decode more symbols from the arithmetic bytestream, taking
+ * single step up or down at a time.
+ * cdf tables can be of arbitrary size, but large tables may take a lot of
+ * iterations.
+ *
+ * Input:
+ *      - streamData        : in-/output struct containing bitstream
+ *      - cdf               : array of cdf arrays
+ *      - initIndex         : vector of initial cdf table search entries
+ *      - lenData           : data vector length
+ *
+ * Output:
+ *      - data              : data vector
+ *
+ * Return value             : number of bytes in original stream
+ *                            <0 if error detected
+ */
+WebRtc_Word16 WebRtcIsacfix_DecHistOneStepMulti(
+    WebRtc_Word16 *data,
+    Bitstr_dec *streamData,
+    const WebRtc_UWord16 **cdf,
+    const WebRtc_UWord16 *initIndex,
+    const WebRtc_Word16 lenData);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.c
new file mode 100644
index 0000000..a274b66
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.c
@@ -0,0 +1,1022 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * bandwidth_estimator.c
+ *
+ * This file contains the code for the Bandwidth Estimator designed
+ * for iSAC.
+ *
+ * NOTE! Castings needed for C55, do not remove!
+ *
+ */
+
+#include "bandwidth_estimator.h"
+#include "settings.h"
+
+
+/* array of quantization levels for bottle neck info; Matlab code: */
+/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */
+static const WebRtc_Word16 kQRateTable[12] = {
+  10000, 11115, 12355, 13733, 15265, 16967,
+  18860, 20963, 23301, 25900, 28789, 32000
+};
+
+/* 0.1 times the values in the table kQRateTable */
+/* values are in Q16                                         */
+static const WebRtc_Word32 KQRate01[12] = {
+  65536000,  72843264,  80969728,  90000589,  100040704, 111194931,
+  123600896, 137383117, 152705434, 169738240, 188671590, 209715200
+};
+
+/* Bits per Bytes Seconds
+ * 8 bits/byte * 1000 msec/sec * 1/framelength (in msec)->bits/byte*sec
+ * frame length will either be 30 or 60 msec. 8738 is 1/60 in Q19 and 1/30 in Q18
+ * The following number is either in Q15 or Q14 depending on the current frame length */
+static const WebRtc_Word32 kBitsByteSec = 4369000;
+
+/* Received header rate. First value is for 30 ms packets and second for 60 ms */
+static const WebRtc_Word16 kRecHeaderRate[2] = {
+  9333, 4666
+};
+
+/* Inverted minimum and maximum bandwidth in Q30.
+   minBwInv 30 ms, maxBwInv 30 ms,
+   minBwInv 60 ms, maxBwInv 69 ms
+*/
+static const WebRtc_Word32 kInvBandwidth[4] = {
+  55539, 25978,
+  73213, 29284
+};
+
+/* Number of samples in 25 msec */
+static const WebRtc_Word32 kSamplesIn25msec = 400;
+
+
+/****************************************************************************
+ * WebRtcIsacfix_InitBandwidthEstimator(...)
+ *
+ * This function initializes the struct for the bandwidth estimator
+ *
+ * Input/Output:
+ *      - bweStr        : Struct containing bandwidth information.
+ *
+ * Return value            : 0
+ */
+WebRtc_Word32 WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bweStr)
+{
+  bweStr->prevFrameSizeMs       = INIT_FRAME_LEN;
+  bweStr->prevRtpNumber         = 0;
+  bweStr->prevSendTime          = 0;
+  bweStr->prevArrivalTime       = 0;
+  bweStr->prevRtpRate           = 1;
+  bweStr->lastUpdate            = 0;
+  bweStr->lastReduction         = 0;
+  bweStr->countUpdates          = -9;
+
+  /* INIT_BN_EST = 20000
+   * INIT_BN_EST_Q7 = 2560000
+   * INIT_HDR_RATE = 4666
+   * INIT_REC_BN_EST_Q5 = 789312
+   *
+   * recBwInv = 1/(INIT_BN_EST + INIT_HDR_RATE) in Q30
+   * recBwAvg = INIT_BN_EST + INIT_HDR_RATE in Q5
+   */
+  bweStr->recBwInv              = 43531;
+  bweStr->recBw                 = INIT_BN_EST;
+  bweStr->recBwAvgQ             = INIT_BN_EST_Q7;
+  bweStr->recBwAvg              = INIT_REC_BN_EST_Q5;
+  bweStr->recJitter             = (WebRtc_Word32) 327680;   /* 10 in Q15 */
+  bweStr->recJitterShortTerm    = 0;
+  bweStr->recJitterShortTermAbs = (WebRtc_Word32) 40960;    /* 5 in Q13 */
+  bweStr->recMaxDelay           = (WebRtc_Word32) 10;
+  bweStr->recMaxDelayAvgQ       = (WebRtc_Word32) 5120;     /* 10 in Q9 */
+  bweStr->recHeaderRate         = INIT_HDR_RATE;
+  bweStr->countRecPkts          = 0;
+  bweStr->sendBwAvg             = INIT_BN_EST_Q7;
+  bweStr->sendMaxDelayAvg       = (WebRtc_Word32) 5120;     /* 10 in Q9 */
+
+  bweStr->countHighSpeedRec     = 0;
+  bweStr->highSpeedRec          = 0;
+  bweStr->countHighSpeedSent    = 0;
+  bweStr->highSpeedSend         = 0;
+  bweStr->inWaitPeriod          = 0;
+
+  /* Find the inverse of the max bw and min bw in Q30
+   *  (1 / (MAX_ISAC_BW + INIT_HDR_RATE) in Q30
+   *  (1 / (MIN_ISAC_BW + INIT_HDR_RATE) in Q30
+   */
+  bweStr->maxBwInv              = kInvBandwidth[3];
+  bweStr->minBwInv              = kInvBandwidth[2];
+
+  return 0;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_UpdateUplinkBwImpl(...)
+ *
+ * This function updates bottle neck rate received from other side in payload
+ * and calculates a new bottle neck to send to the other side.
+ *
+ * Input/Output:
+ *      - bweStr           : struct containing bandwidth information.
+ *      - rtpNumber        : value from RTP packet, from NetEq
+ *      - frameSize        : length of signal frame in ms, from iSAC decoder
+ *      - sendTime         : value in RTP header giving send time in samples
+ *      - arrivalTime      : value given by timeGetTime() time of arrival in
+ *                           samples of packet from NetEq
+ *      - pksize           : size of packet in bytes, from NetEq
+ *      - Index            : integer (range 0...23) indicating bottle neck &
+ *                           jitter as estimated by other side
+ *
+ * Return value            : 0 if everything went fine,
+ *                           -1 otherwise
+ */
+WebRtc_Word32 WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr,
+                                               const WebRtc_UWord16 rtpNumber,
+                                               const WebRtc_Word16  frameSize,
+                                               const WebRtc_UWord32 sendTime,
+                                               const WebRtc_UWord32 arrivalTime,
+                                               const WebRtc_Word16  pksize,
+                                               const WebRtc_UWord16 Index)
+{
+  WebRtc_UWord16  weight = 0;
+  WebRtc_UWord32  currBwInv = 0;
+  WebRtc_UWord16  recRtpRate;
+  WebRtc_UWord32  arrTimeProj;
+  WebRtc_Word32   arrTimeDiff;
+  WebRtc_Word32   arrTimeNoise;
+  WebRtc_Word32   arrTimeNoiseAbs;
+  WebRtc_Word32   sendTimeDiff;
+
+  WebRtc_Word32 delayCorrFactor = DELAY_CORRECTION_MED;
+  WebRtc_Word32 lateDiff = 0;
+  WebRtc_Word16 immediateSet = 0;
+  WebRtc_Word32 frameSizeSampl;
+
+  WebRtc_Word32  temp;
+  WebRtc_Word32  msec;
+  WebRtc_UWord32 exponent;
+  WebRtc_UWord32 reductionFactor;
+  WebRtc_UWord32 numBytesInv;
+  WebRtc_Word32  sign;
+
+  WebRtc_UWord32 byteSecondsPerBit;
+  WebRtc_UWord32 tempLower;
+  WebRtc_UWord32 tempUpper;
+  WebRtc_Word32 recBwAvgInv;
+  WebRtc_Word32 numPktsExpected;
+
+  WebRtc_Word16 errCode;
+
+  /* UPDATE ESTIMATES FROM OTHER SIDE */
+
+  /* The function also checks if Index has a valid value */
+  errCode = WebRtcIsacfix_UpdateUplinkBwRec(bweStr, Index);
+  if (errCode <0) {
+    return(errCode);
+  }
+
+
+  /* UPDATE ESTIMATES ON THIS SIDE */
+
+  /* Bits per second per byte * 1/30 or 1/60 */
+  if (frameSize == 60) {
+    /* If frameSize changed since last call, from 30 to 60, recalculate some values */
+    if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) {
+      bweStr->countUpdates = 10;
+      bweStr->recHeaderRate = kRecHeaderRate[1];
+
+      bweStr->maxBwInv = kInvBandwidth[3];
+      bweStr->minBwInv = kInvBandwidth[2];
+      bweStr->recBwInv = WEBRTC_SPL_UDIV(1073741824, (bweStr->recBw + bweStr->recHeaderRate));
+    }
+
+    /* kBitsByteSec is in Q15 */
+    recRtpRate = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
+                                                                     (WebRtc_Word32)pksize), 15) + bweStr->recHeaderRate;
+
+  } else {
+    /* If frameSize changed since last call, from 60 to 30, recalculate some values */
+    if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) {
+      bweStr->countUpdates = 10;
+      bweStr->recHeaderRate = kRecHeaderRate[0];
+
+      bweStr->maxBwInv = kInvBandwidth[1];
+      bweStr->minBwInv = kInvBandwidth[0];
+      bweStr->recBwInv = WEBRTC_SPL_UDIV(1073741824, (bweStr->recBw + bweStr->recHeaderRate));
+    }
+
+    /* kBitsByteSec is in Q14 */
+    recRtpRate = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
+                                                                      (WebRtc_Word32)pksize), 14) + bweStr->recHeaderRate;
+  }
+
+
+  /* Check for timer wrap-around */
+  if (arrivalTime < bweStr->prevArrivalTime) {
+    bweStr->prevArrivalTime = arrivalTime;
+    bweStr->lastUpdate      = arrivalTime;
+    bweStr->lastReduction   = arrivalTime + FS3;
+
+    bweStr->countRecPkts      = 0;
+
+    /* store frame size */
+    bweStr->prevFrameSizeMs = frameSize;
+
+    /* store far-side transmission rate */
+    bweStr->prevRtpRate = recRtpRate;
+
+    /* store far-side RTP time stamp */
+    bweStr->prevRtpNumber = rtpNumber;
+
+    return 0;
+  }
+
+  bweStr->countRecPkts++;
+
+  /* Calculate framesize in msec */
+  frameSizeSampl = WEBRTC_SPL_MUL_16_16((WebRtc_Word16)SAMPLES_PER_MSEC, frameSize);
+
+  /* Check that it's not one of the first 9 packets */
+  if ( bweStr->countUpdates > 0 ) {
+
+    /* Stay in Wait Period for 1.5 seconds (no updates in wait period) */
+    if(bweStr->inWaitPeriod) {
+      if ((arrivalTime - bweStr->startWaitPeriod)> FS_1_HALF) {
+        bweStr->inWaitPeriod = 0;
+      }
+    }
+
+    /* If not been updated for a long time, reduce the BN estimate */
+
+    /* Check send time difference between this packet and previous received      */
+    sendTimeDiff = sendTime - bweStr->prevSendTime;
+    if (sendTimeDiff <= WEBRTC_SPL_LSHIFT_W32(frameSizeSampl, 1)) {
+
+      /* Only update if 3 seconds has past since last update */
+      if ((arrivalTime - bweStr->lastUpdate) > FS3) {
+
+        /* Calculate expected number of received packets since last update */
+        numPktsExpected =  WEBRTC_SPL_UDIV(arrivalTime - bweStr->lastUpdate, frameSizeSampl);
+
+        /* If received number of packets is more than 90% of expected (922 = 0.9 in Q10): */
+        /* do the update, else not                                                        */
+        if(WEBRTC_SPL_LSHIFT_W32(bweStr->countRecPkts, 10)  > WEBRTC_SPL_MUL_16_16(922, numPktsExpected)) {
+          /* Q4 chosen to approx dividing by 16 */
+          msec = (arrivalTime - bweStr->lastReduction);
+
+          /* the number below represents 13 seconds, highly unlikely
+             but to insure no overflow when reduction factor is multiplied by recBw inverse */
+          if (msec > 208000) {
+            msec = 208000;
+          }
+
+          /* Q20 2^(negative number: - 76/1048576) = .99995
+             product is Q24 */
+          exponent = WEBRTC_SPL_UMUL(0x0000004C, msec);
+
+          /* do the approx with positive exponent so that value is actually rf^-1
+             and multiply by bw inverse */
+          reductionFactor = WEBRTC_SPL_RSHIFT_U32(0x01000000 | (exponent & 0x00FFFFFF),
+                                                  WEBRTC_SPL_RSHIFT_U32(exponent, 24));
+
+          /* reductionFactor in Q13 */
+          reductionFactor = WEBRTC_SPL_RSHIFT_U32(reductionFactor, 11);
+
+          if ( reductionFactor != 0 ) {
+            bweStr->recBwInv = WEBRTC_SPL_MUL((WebRtc_Word32)bweStr->recBwInv, (WebRtc_Word32)reductionFactor);
+            bweStr->recBwInv = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)bweStr->recBwInv, 13);
+
+          } else {
+            /* recBwInv = 1 / (INIT_BN_EST + INIT_HDR_RATE) in Q26 (Q30??)*/
+            bweStr->recBwInv = WEBRTC_SPL_DIV((1073741824 +
+                                               WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)INIT_BN_EST + INIT_HDR_RATE), 1)), INIT_BN_EST + INIT_HDR_RATE);
+          }
+
+          /* reset time-since-update counter */
+          bweStr->lastReduction = arrivalTime;
+        } else {
+          /* Delay last reduction with 3 seconds */
+          bweStr->lastReduction = arrivalTime + FS3;
+          bweStr->lastUpdate    = arrivalTime;
+          bweStr->countRecPkts  = 0;
+        }
+      }
+    } else {
+      bweStr->lastReduction = arrivalTime + FS3;
+      bweStr->lastUpdate    = arrivalTime;
+      bweStr->countRecPkts  = 0;
+    }
+
+
+    /*   update only if previous packet was not lost */
+    if ( rtpNumber == bweStr->prevRtpNumber + 1 ) {
+      arrTimeDiff = arrivalTime - bweStr->prevArrivalTime;
+
+      if (!(bweStr->highSpeedSend && bweStr->highSpeedRec)) {
+        if (arrTimeDiff > frameSizeSampl) {
+          if (sendTimeDiff > 0) {
+            lateDiff = arrTimeDiff - sendTimeDiff -
+                WEBRTC_SPL_LSHIFT_W32(frameSizeSampl, 1);
+          } else {
+            lateDiff = arrTimeDiff - frameSizeSampl;
+          }
+
+          /* 8000 is 1/2 second (in samples at FS) */
+          if (lateDiff > 8000) {
+            delayCorrFactor = (WebRtc_Word32) DELAY_CORRECTION_MAX;
+            bweStr->inWaitPeriod = 1;
+            bweStr->startWaitPeriod = arrivalTime;
+            immediateSet = 1;
+          } else if (lateDiff > 5120) {
+            delayCorrFactor = (WebRtc_Word32) DELAY_CORRECTION_MED;
+            immediateSet = 1;
+            bweStr->inWaitPeriod = 1;
+            bweStr->startWaitPeriod = arrivalTime;
+          }
+        }
+      }
+
+      if ((bweStr->prevRtpRate > WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) bweStr->recBwAvg, 5)) &&
+          (recRtpRate > WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)bweStr->recBwAvg, 5)) &&
+          !bweStr->inWaitPeriod) {
+
+        /* test if still in initiation period and increment counter */
+        if (bweStr->countUpdates++ > 99) {
+          /* constant weight after initiation part, 0.01 in Q13 */
+          weight = (WebRtc_UWord16) 82;
+        } else {
+          /* weight decreases with number of updates, 1/countUpdates in Q13  */
+          weight = (WebRtc_UWord16) WebRtcSpl_DivW32W16(
+              (WebRtc_Word32)(8192 + WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) bweStr->countUpdates, 1)),
+              (WebRtc_Word16)bweStr->countUpdates);
+        }
+
+        /* Bottle Neck Estimation */
+
+        /* limit outliers, if more than 25 ms too much */
+        if (arrTimeDiff > frameSizeSampl + kSamplesIn25msec) {
+          arrTimeDiff = frameSizeSampl + kSamplesIn25msec;
+        }
+
+        /* don't allow it to be less than frame rate - 10 ms */
+        if (arrTimeDiff < frameSizeSampl - FRAMESAMPLES_10ms) {
+          arrTimeDiff = frameSizeSampl - FRAMESAMPLES_10ms;
+        }
+
+        /* compute inverse receiving rate for last packet, in Q19 */
+        numBytesInv = (WebRtc_UWord16) WebRtcSpl_DivW32W16(
+            (WebRtc_Word32)(524288 + WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)pksize + HEADER_SIZE), 1)),
+            (WebRtc_Word16)(pksize + HEADER_SIZE));
+
+        /* 8389 is  ~ 1/128000 in Q30 */
+        byteSecondsPerBit = WEBRTC_SPL_MUL_16_16(arrTimeDiff, 8389);
+
+        /* get upper N bits */
+        tempUpper = WEBRTC_SPL_RSHIFT_U32(byteSecondsPerBit, 15);
+
+        /* get lower 15 bits */
+        tempLower = byteSecondsPerBit & 0x00007FFF;
+
+        tempUpper = WEBRTC_SPL_MUL(tempUpper, numBytesInv);
+        tempLower = WEBRTC_SPL_MUL(tempLower, numBytesInv);
+        tempLower = WEBRTC_SPL_RSHIFT_U32(tempLower, 15);
+
+        currBwInv = tempUpper + tempLower;
+        currBwInv = WEBRTC_SPL_RSHIFT_U32(currBwInv, 4);
+
+        /* Limit inv rate. Note that minBwInv > maxBwInv! */
+        if(currBwInv < bweStr->maxBwInv) {
+          currBwInv = bweStr->maxBwInv;
+        } else if(currBwInv > bweStr->minBwInv) {
+          currBwInv = bweStr->minBwInv;
+        }
+
+        /* update bottle neck rate estimate */
+        bweStr->recBwInv = WEBRTC_SPL_UMUL(weight, currBwInv) +
+            WEBRTC_SPL_UMUL((WebRtc_UWord32) 8192 - weight, bweStr->recBwInv);
+
+        /* Shift back to Q30 from Q40 (actual used bits shouldn't be more than 27 based on minBwInv)
+           up to 30 bits used with Q13 weight */
+        bweStr->recBwInv = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwInv, 13);
+
+        /* reset time-since-update counter */
+        bweStr->lastUpdate    = arrivalTime;
+        bweStr->lastReduction = arrivalTime + FS3;
+        bweStr->countRecPkts  = 0;
+
+        /* to save resolution compute the inverse of recBwAvg in Q26 by left shifting numerator to 2^31
+           and NOT right shifting recBwAvg 5 bits to an integer
+           At max 13 bits are used
+           shift to Q5 */
+        recBwAvgInv = WEBRTC_SPL_UDIV((WebRtc_UWord32)(0x80000000 + WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 1)),
+                                      bweStr->recBwAvg);
+
+        /* Calculate Projected arrival time difference */
+
+        /* The numerator of the quotient can be 22 bits so right shift inv by 4 to avoid overflow
+           result in Q22 */
+        arrTimeProj = WEBRTC_SPL_MUL((WebRtc_Word32)8000, recBwAvgInv);
+        /* shift to Q22 */
+        arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 4);
+        /* complete calulation */
+        arrTimeProj = WEBRTC_SPL_MUL(((WebRtc_Word32)pksize + HEADER_SIZE), arrTimeProj);
+        /* shift to Q10 */
+        arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 12);
+
+        /* difference between projected and actual arrival time differences */
+        /* Q9 (only shift arrTimeDiff by 5 to simulate divide by 16 (need to revisit if change sampling rate) DH */
+        if (WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6) > (WebRtc_Word32)arrTimeProj) {
+          arrTimeNoise = WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6) -  arrTimeProj;
+          sign = 1;
+        } else {
+          arrTimeNoise = arrTimeProj - WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6);
+          sign = -1;
+        }
+
+        /* Q9 */
+        arrTimeNoiseAbs = arrTimeNoise;
+
+        /* long term averaged absolute jitter, Q15 */
+        weight = WEBRTC_SPL_RSHIFT_W32(weight, 3);
+        bweStr->recJitter = WEBRTC_SPL_MUL(weight, WEBRTC_SPL_LSHIFT_W32(arrTimeNoiseAbs, 5))
+            +  WEBRTC_SPL_MUL(1024 - weight, bweStr->recJitter);
+
+        /* remove the fractional portion */
+        bweStr->recJitter = WEBRTC_SPL_RSHIFT_W32(bweStr->recJitter, 10);
+
+        /* Maximum jitter is 10 msec in Q15 */
+        if (bweStr->recJitter > (WebRtc_Word32)327680) {
+          bweStr->recJitter = (WebRtc_Word32)327680;
+        }
+
+        /* short term averaged absolute jitter */
+        /* Calculation in Q13 products in Q23 */
+        bweStr->recJitterShortTermAbs = WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32(arrTimeNoiseAbs, 3)) +
+            WEBRTC_SPL_MUL(973, bweStr->recJitterShortTermAbs);
+        bweStr->recJitterShortTermAbs = WEBRTC_SPL_RSHIFT_W32(bweStr->recJitterShortTermAbs , 10);
+
+        /* short term averaged jitter */
+        /* Calculation in Q13 products in Q23 */
+        bweStr->recJitterShortTerm = WEBRTC_SPL_MUL(205, WEBRTC_SPL_LSHIFT_W32(arrTimeNoise, 3)) * sign +
+            WEBRTC_SPL_MUL(3891, bweStr->recJitterShortTerm);
+
+        if (bweStr->recJitterShortTerm < 0) {
+          temp = -bweStr->recJitterShortTerm;
+          temp = WEBRTC_SPL_RSHIFT_W32(temp, 12);
+          bweStr->recJitterShortTerm = -temp;
+        } else {
+          bweStr->recJitterShortTerm = WEBRTC_SPL_RSHIFT_W32(bweStr->recJitterShortTerm, 12);
+        }
+      }
+    }
+  } else {
+    /* reset time-since-update counter when receiving the first 9 packets */
+    bweStr->lastUpdate    = arrivalTime;
+    bweStr->lastReduction = arrivalTime + FS3;
+    bweStr->countRecPkts  = 0;
+    bweStr->countUpdates++;
+  }
+
+  /* Limit to minimum or maximum bottle neck rate (in Q30) */
+  if (bweStr->recBwInv > bweStr->minBwInv) {
+    bweStr->recBwInv = bweStr->minBwInv;
+  } else if (bweStr->recBwInv < bweStr->maxBwInv) {
+    bweStr->recBwInv = bweStr->maxBwInv;
+  }
+
+
+  /* store frame length */
+  bweStr->prevFrameSizeMs = frameSize;
+
+  /* store far-side transmission rate */
+  bweStr->prevRtpRate = recRtpRate;
+
+  /* store far-side RTP time stamp */
+  bweStr->prevRtpNumber = rtpNumber;
+
+  /* Replace bweStr->recMaxDelay by the new value (atomic operation) */
+  if (bweStr->prevArrivalTime != 0xffffffff) {
+    bweStr->recMaxDelay = WEBRTC_SPL_MUL(3, bweStr->recJitter);
+  }
+
+  /* store arrival time stamp */
+  bweStr->prevArrivalTime = arrivalTime;
+  bweStr->prevSendTime = sendTime;
+
+  /* Replace bweStr->recBw by the new value */
+  bweStr->recBw = WEBRTC_SPL_UDIV(1073741824, bweStr->recBwInv) - bweStr->recHeaderRate;
+
+  if (immediateSet) {
+    /* delay correction factor is in Q10 */
+    bweStr->recBw = WEBRTC_SPL_UMUL(delayCorrFactor, bweStr->recBw);
+    bweStr->recBw = WEBRTC_SPL_RSHIFT_U32(bweStr->recBw, 10);
+
+    if (bweStr->recBw < (WebRtc_Word32) MIN_ISAC_BW) {
+      bweStr->recBw = (WebRtc_Word32) MIN_ISAC_BW;
+    }
+
+    bweStr->recBwAvg = WEBRTC_SPL_LSHIFT_U32(bweStr->recBw + bweStr->recHeaderRate, 5);
+
+    bweStr->recBwAvgQ = WEBRTC_SPL_LSHIFT_U32(bweStr->recBw, 7);
+
+    bweStr->recJitterShortTerm = 0;
+
+    bweStr->recBwInv = WEBRTC_SPL_UDIV(1073741824, bweStr->recBw + bweStr->recHeaderRate);
+
+    immediateSet = 0;
+  }
+
+
+  return 0;
+}
+
+/* This function updates the send bottle neck rate                                                   */
+/* Index         - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
+/* returns 0 if everything went fine, -1 otherwise                                                   */
+WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bweStr,
+                                              const WebRtc_Word16 Index)
+{
+  WebRtc_UWord16 RateInd;
+
+  if ( (Index < 0) || (Index > 23) ) {
+    return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
+  }
+
+  /* UPDATE ESTIMATES FROM OTHER SIDE */
+
+  if ( Index > 11 ) {
+    RateInd = Index - 12;
+    /* compute the jitter estimate as decoded on the other side in Q9 */
+    /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MAX_ISAC_MD */
+    bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) +
+        WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)MAX_ISAC_MD, 9));
+    bweStr->sendMaxDelayAvg = WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
+
+  } else {
+    RateInd = Index;
+    /* compute the jitter estimate as decoded on the other side in Q9 */
+    /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MIN_ISAC_MD */
+    bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) +
+        WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)MIN_ISAC_MD,9));
+    bweStr->sendMaxDelayAvg = WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
+
+  }
+
+
+  /* compute the BN estimate as decoded on the other side */
+  /* sendBwAvg = 0.9 * sendBwAvg + 0.1 * kQRateTable[RateInd]; */
+  bweStr->sendBwAvg = WEBRTC_SPL_UMUL(461, bweStr->sendBwAvg) +
+      WEBRTC_SPL_UMUL(51, WEBRTC_SPL_LSHIFT_U32(kQRateTable[RateInd], 7));
+  bweStr->sendBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 9);
+
+
+  if (WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7) > 28000 && !bweStr->highSpeedSend) {
+    bweStr->countHighSpeedSent++;
+
+    /* approx 2 seconds with 30ms frames */
+    if (bweStr->countHighSpeedSent >= 66) {
+      bweStr->highSpeedSend = 1;
+    }
+  } else if (!bweStr->highSpeedSend) {
+    bweStr->countHighSpeedSent = 0;
+  }
+
+  return 0;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_GetDownlinkBwIndexImpl(...)
+ *
+ * This function calculates and returns the bandwidth/jitter estimation code
+ * (integer 0...23) to put in the sending iSAC payload.
+ *
+ * Input:
+ *      - bweStr       : BWE struct
+ *
+ * Return:
+ *      bandwith and jitter index (0..23)
+ */
+WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bweStr)
+{
+  WebRtc_Word32  rate;
+  WebRtc_Word32  maxDelay;
+  WebRtc_UWord16 rateInd;
+  WebRtc_UWord16 maxDelayBit;
+  WebRtc_Word32  tempTerm1;
+  WebRtc_Word32  tempTerm2;
+  WebRtc_Word32  tempTermX;
+  WebRtc_Word32  tempTermY;
+  WebRtc_Word32  tempMin;
+  WebRtc_Word32  tempMax;
+
+  /* Get Rate Index */
+
+  /* Get unquantized rate. Always returns 10000 <= rate <= 32000 */
+  rate = WebRtcIsacfix_GetDownlinkBandwidth(bweStr);
+
+  /* Compute the averaged BN estimate on this side */
+
+  /* recBwAvg = 0.9 * recBwAvg + 0.1 * (rate + bweStr->recHeaderRate), 0.9 and 0.1 in Q9 */
+  bweStr->recBwAvg = WEBRTC_SPL_UMUL(922, bweStr->recBwAvg) +
+      WEBRTC_SPL_UMUL(102, WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)rate + bweStr->recHeaderRate, 5));
+  bweStr->recBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 10);
+
+  /* find quantization index that gives the closest rate after averaging */
+  for (rateInd = 1; rateInd < 12; rateInd++) {
+    if (rate <= kQRateTable[rateInd]){
+      break;
+    }
+  }
+
+  /* find closest quantization index, and update quantized average by taking: */
+  /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */
+
+  /* 0.9 times recBwAvgQ in Q16 */
+  /* 461/512 - 25/65536 =0.900009 */
+  tempTerm1 = WEBRTC_SPL_MUL(bweStr->recBwAvgQ, 25);
+  tempTerm1 = WEBRTC_SPL_RSHIFT_W32(tempTerm1, 7);
+  tempTermX = WEBRTC_SPL_UMUL(461, bweStr->recBwAvgQ) - tempTerm1;
+
+  /* rate in Q16 */
+  tempTermY = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)rate, 16);
+
+  /* 0.1 * kQRateTable[rateInd] = KQRate01[rateInd] */
+  tempTerm1 = tempTermX + KQRate01[rateInd] - tempTermY;
+  tempTerm2 = tempTermY - tempTermX - KQRate01[rateInd-1];
+
+  /* Compare (0.9 * recBwAvgQ + 0.1 * kQRateTable[rateInd] - rate) >
+     (rate - 0.9 * recBwAvgQ - 0.1 * kQRateTable[rateInd-1]) */
+  if (tempTerm1  > tempTerm2) {
+    rateInd--;
+  }
+
+  /* Update quantized average by taking:                  */
+  /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */
+
+  /* Add 0.1 times kQRateTable[rateInd], in Q16 */
+  tempTermX += KQRate01[rateInd];
+
+  /* Shift back to Q7 */
+  bweStr->recBwAvgQ = WEBRTC_SPL_RSHIFT_W32(tempTermX, 9);
+
+  /* Count consecutive received bandwidth above 28000 kbps (28000 in Q7 = 3584000) */
+  /* If 66 high estimates in a row, set highSpeedRec to one */
+  /* 66 corresponds to ~2 seconds in 30 msec mode */
+  if ((bweStr->recBwAvgQ > 3584000) && !bweStr->highSpeedRec) {
+    bweStr->countHighSpeedRec++;
+    if (bweStr->countHighSpeedRec >= 66) {
+      bweStr->highSpeedRec = 1;
+    }
+  } else if (!bweStr->highSpeedRec)    {
+    bweStr->countHighSpeedRec = 0;
+  }
+
+  /* Get Max Delay Bit */
+
+  /* get unquantized max delay */
+  maxDelay = WebRtcIsacfix_GetDownlinkMaxDelay(bweStr);
+
+  /* Update quantized max delay average */
+  tempMax = 652800; /* MAX_ISAC_MD * 0.1 in Q18 */
+  tempMin = 130560; /* MIN_ISAC_MD * 0.1 in Q18 */
+  tempTermX = WEBRTC_SPL_MUL((WebRtc_Word32)bweStr->recMaxDelayAvgQ, (WebRtc_Word32)461);
+  tempTermY = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)maxDelay, 18);
+
+  tempTerm1 = tempTermX + tempMax - tempTermY;
+  tempTerm2 = tempTermY - tempTermX - tempMin;
+
+  if ( tempTerm1 > tempTerm2) {
+    maxDelayBit = 0;
+    tempTerm1 = tempTermX + tempMin;
+
+    /* update quantized average, shift back to Q9 */
+    bweStr->recMaxDelayAvgQ = WEBRTC_SPL_RSHIFT_W32(tempTerm1, 9);
+  } else {
+    maxDelayBit = 12;
+    tempTerm1 =  tempTermX + tempMax;
+
+    /* update quantized average, shift back to Q9 */
+    bweStr->recMaxDelayAvgQ = WEBRTC_SPL_RSHIFT_W32(tempTerm1, 9);
+  }
+
+  /* Return bandwitdh and jitter index (0..23) */
+  return (WebRtc_UWord16)(rateInd + maxDelayBit);
+}
+
+/* get the bottle neck rate from far side to here, as estimated on this side */
+WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bweStr)
+{
+  WebRtc_UWord32  recBw;
+  WebRtc_Word32   jitter_sign; /* Q8 */
+  WebRtc_Word32   bw_adjust;   /* Q16 */
+  WebRtc_Word32   rec_jitter_short_term_abs_inv; /* Q18 */
+  WebRtc_Word32   temp;
+
+  /* Q18  rec jitter short term abs is in Q13, multiply it by 2^13 to save precision
+     2^18 then needs to be shifted 13 bits to 2^31 */
+  rec_jitter_short_term_abs_inv = WEBRTC_SPL_UDIV(0x80000000, bweStr->recJitterShortTermAbs);
+
+  /* Q27 = 9 + 18 */
+  jitter_sign = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(bweStr->recJitterShortTerm, 4), (WebRtc_Word32)rec_jitter_short_term_abs_inv);
+
+  if (jitter_sign < 0) {
+    temp = -jitter_sign;
+    temp = WEBRTC_SPL_RSHIFT_W32(temp, 19);
+    jitter_sign = -temp;
+  } else {
+    jitter_sign = WEBRTC_SPL_RSHIFT_W32(jitter_sign, 19);
+  }
+
+  /* adjust bw proportionally to negative average jitter sign */
+  //bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign);
+  //Q8 -> Q16 .15 +.15 * jitter^2 first term is .15 in Q16 latter term is Q8*Q8*Q8
+  //38 in Q8 ~.15 9830 in Q16 ~.15
+  temp = 9830  + WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL(38, WEBRTC_SPL_MUL(jitter_sign, jitter_sign))), 8);
+
+  if (jitter_sign < 0) {
+    temp = WEBRTC_SPL_MUL(jitter_sign, temp);
+    temp = -temp;
+    temp = WEBRTC_SPL_RSHIFT_W32(temp, 8);
+    bw_adjust = (WebRtc_UWord32)65536 + temp; /* (1 << 16) + temp; */
+  } else {
+    bw_adjust = (WebRtc_UWord32)65536 - WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(jitter_sign, temp), 8);/* (1 << 16) - ((jitter_sign * temp) >> 8); */
+  }
+
+  //make sure following multiplication won't overflow
+  //bw adjust now Q14
+  bw_adjust = WEBRTC_SPL_RSHIFT_W32(bw_adjust, 2);//see if good resolution is maintained
+
+  /* adjust Rate if jitter sign is mostly constant */
+  recBw = WEBRTC_SPL_UMUL(bweStr->recBw, bw_adjust);
+
+  recBw = WEBRTC_SPL_RSHIFT_W32(recBw, 14);
+
+  /* limit range of bottle neck rate */
+  if (recBw < MIN_ISAC_BW) {
+    recBw = MIN_ISAC_BW;
+  } else if (recBw > MAX_ISAC_BW) {
+    recBw = MAX_ISAC_BW;
+  }
+
+  return  (WebRtc_UWord16) recBw;
+}
+
+/* Returns the mmax delay (in ms) */
+WebRtc_Word16 WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bweStr)
+{
+  WebRtc_Word16 recMaxDelay;
+
+  recMaxDelay = (WebRtc_Word16)  WEBRTC_SPL_RSHIFT_W32(bweStr->recMaxDelay, 15);
+
+  /* limit range of jitter estimate */
+  if (recMaxDelay < MIN_ISAC_MD) {
+    recMaxDelay = MIN_ISAC_MD;
+  } else if (recMaxDelay > MAX_ISAC_MD) {
+    recMaxDelay = MAX_ISAC_MD;
+  }
+
+  return recMaxDelay;
+}
+
+/* get the bottle neck rate from here to far side, as estimated by far side */
+WebRtc_Word16 WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bweStr)
+{
+  WebRtc_Word16 send_bw;
+
+  send_bw = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7);
+
+  /* limit range of bottle neck rate */
+  if (send_bw < MIN_ISAC_BW) {
+    send_bw = MIN_ISAC_BW;
+  } else if (send_bw > MAX_ISAC_BW) {
+    send_bw = MAX_ISAC_BW;
+  }
+
+  return send_bw;
+}
+
+
+
+/* Returns the max delay value from the other side in ms */
+WebRtc_Word16 WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bweStr)
+{
+  WebRtc_Word16 send_max_delay;
+
+  send_max_delay = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
+
+  /* limit range of jitter estimate */
+  if (send_max_delay < MIN_ISAC_MD) {
+    send_max_delay = MIN_ISAC_MD;
+  } else if (send_max_delay > MAX_ISAC_MD) {
+    send_max_delay = MAX_ISAC_MD;
+  }
+
+  return send_max_delay;
+}
+
+
+
+
+/*
+ * update long-term average bitrate and amount of data in buffer
+ * returns minimum payload size (bytes)
+ */
+WebRtc_UWord16 WebRtcIsacfix_GetMinBytes(RateModel *State,
+                                         WebRtc_Word16 StreamSize,                    /* bytes in bitstream */
+                                         const WebRtc_Word16 FrameSamples,            /* samples per frame */
+                                         const WebRtc_Word16 BottleNeck,        /* bottle neck rate; excl headers (bps) */
+                                         const WebRtc_Word16 DelayBuildUp)      /* max delay from bottle neck buffering (ms) */
+{
+  WebRtc_Word32 MinRate = 0;
+  WebRtc_UWord16    MinBytes;
+  WebRtc_Word16 TransmissionTime;
+  WebRtc_Word32 inv_Q12;
+  WebRtc_Word32 den;
+
+
+  /* first 10 packets @ low rate, then INIT_BURST_LEN packets @ fixed rate of INIT_RATE bps */
+  if (State->InitCounter > 0) {
+    if (State->InitCounter-- <= INIT_BURST_LEN) {
+      MinRate = INIT_RATE;
+    } else {
+      MinRate = 0;
+    }
+  } else {
+    /* handle burst */
+    if (State->BurstCounter) {
+      if (State->StillBuffered < WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL((512 - WEBRTC_SPL_DIV(512, BURST_LEN)), DelayBuildUp), 9)) {
+        /* max bps derived from BottleNeck and DelayBuildUp values */
+        inv_Q12 = WEBRTC_SPL_DIV(4096, WEBRTC_SPL_MUL(BURST_LEN, FrameSamples));
+        MinRate = WEBRTC_SPL_MUL(512 + WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(DelayBuildUp, inv_Q12), 3)), BottleNeck);
+      } else {
+        /* max bps derived from StillBuffered and DelayBuildUp values */
+        inv_Q12 = WEBRTC_SPL_DIV(4096, FrameSamples);
+        if (DelayBuildUp > State->StillBuffered) {
+          MinRate = WEBRTC_SPL_MUL(512 + WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(DelayBuildUp - State->StillBuffered, inv_Q12), 3)), BottleNeck);
+        } else if ((den = WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, (State->StillBuffered - DelayBuildUp))) >= FrameSamples) {
+          /* MinRate will be negative here */
+          MinRate = 0;
+        } else {
+          MinRate = WEBRTC_SPL_MUL((512 - WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(den, inv_Q12), 3)), BottleNeck);
+        }
+        //if (MinRate < 1.04 * BottleNeck)
+        //    MinRate = 1.04 * BottleNeck;
+        //Q9
+        if (MinRate < WEBRTC_SPL_MUL(532, BottleNeck)) {
+          MinRate += WEBRTC_SPL_MUL(22, BottleNeck);
+        }
+      }
+
+      State->BurstCounter--;
+    }
+  }
+
+
+  /* convert rate from bits/second to bytes/packet */
+  //round and shift before conversion
+  MinRate += 256;
+  MinRate = WEBRTC_SPL_RSHIFT_W32(MinRate, 9);
+  MinBytes = (WebRtc_UWord16)WEBRTC_SPL_UDIV(WEBRTC_SPL_MUL(MinRate, FrameSamples), FS8);
+
+  /* StreamSize will be adjusted if less than MinBytes */
+  if (StreamSize < MinBytes) {
+    StreamSize = MinBytes;
+  }
+
+  /* keep track of when bottle neck was last exceeded by at least 1% */
+  //517/512 ~ 1.01
+  if (WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, FS8), FrameSamples) > (WEBRTC_SPL_MUL(517, BottleNeck) >> 9)) {
+    if (State->PrevExceed) {
+      /* bottle_neck exceded twice in a row, decrease ExceedAgo */
+      State->ExceedAgo -= WEBRTC_SPL_DIV(BURST_INTERVAL, BURST_LEN - 1);
+      if (State->ExceedAgo < 0) {
+        State->ExceedAgo = 0;
+      }
+    } else {
+      State->ExceedAgo += (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4);       /* ms */
+      State->PrevExceed = 1;
+    }
+  } else {
+    State->PrevExceed = 0;
+    State->ExceedAgo += (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4);           /* ms */
+  }
+
+  /* set burst flag if bottle neck not exceeded for long time */
+  if ((State->ExceedAgo > BURST_INTERVAL) && (State->BurstCounter == 0)) {
+    if (State->PrevExceed) {
+      State->BurstCounter = BURST_LEN - 1;
+    } else {
+      State->BurstCounter = BURST_LEN;
+    }
+  }
+
+
+  /* Update buffer delay */
+  TransmissionTime = (WebRtc_Word16)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, 8000), BottleNeck);    /* ms */
+  State->StillBuffered += TransmissionTime;
+  State->StillBuffered -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4);  //>>4 =  SAMPLES_PER_MSEC        /* ms */
+  if (State->StillBuffered < 0) {
+    State->StillBuffered = 0;
+  }
+
+  if (State->StillBuffered > 2000) {
+    State->StillBuffered = 2000;
+  }
+
+  return MinBytes;
+}
+
+
+/*
+ * update long-term average bitrate and amount of data in buffer
+ */
+void WebRtcIsacfix_UpdateRateModel(RateModel *State,
+                                   WebRtc_Word16 StreamSize,                    /* bytes in bitstream */
+                                   const WebRtc_Word16 FrameSamples,            /* samples per frame */
+                                   const WebRtc_Word16 BottleNeck)        /* bottle neck rate; excl headers (bps) */
+{
+  WebRtc_Word16 TransmissionTime;
+
+  /* avoid the initial "high-rate" burst */
+  State->InitCounter = 0;
+
+  /* Update buffer delay */
+  TransmissionTime = (WebRtc_Word16)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(WEBRTC_SPL_MUL(StreamSize, 8), 1000), BottleNeck);    /* ms */
+  State->StillBuffered += TransmissionTime;
+  State->StillBuffered -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4);            /* ms */
+  if (State->StillBuffered < 0) {
+    State->StillBuffered = 0;
+  }
+
+}
+
+
+void WebRtcIsacfix_InitRateModel(RateModel *State)
+{
+  State->PrevExceed      = 0;                        /* boolean */
+  State->ExceedAgo       = 0;                        /* ms */
+  State->BurstCounter    = 0;                        /* packets */
+  State->InitCounter     = INIT_BURST_LEN + 10;    /* packets */
+  State->StillBuffered   = 1;                    /* ms */
+}
+
+
+
+
+
+WebRtc_Word16 WebRtcIsacfix_GetNewFrameLength(WebRtc_Word16 bottle_neck, WebRtc_Word16 current_framesamples)
+{
+  WebRtc_Word16 new_framesamples;
+
+  new_framesamples = current_framesamples;
+
+  /* find new framelength */
+  switch(current_framesamples) {
+    case 480:
+      if (bottle_neck < Thld_30_60) {
+        new_framesamples = 960;
+      }
+      break;
+    case 960:
+      if (bottle_neck >= Thld_60_30) {
+        new_framesamples = 480;
+      }
+      break;
+    default:
+      new_framesamples = -1; /* Error */
+  }
+
+  return new_framesamples;
+}
+
+WebRtc_Word16 WebRtcIsacfix_GetSnr(WebRtc_Word16 bottle_neck, WebRtc_Word16 framesamples)
+{
+  WebRtc_Word16 s2nr = 0;
+
+  /* find new SNR value */
+  //consider BottleNeck to be in Q10 ( * 1 in Q10)
+  switch(framesamples) {
+    case 480:
+      /*s2nr = -1*(a_30 << 10) + ((b_30 * bottle_neck) >> 10);*/
+      s2nr = -22500 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
+      break;
+    case 960:
+      /*s2nr = -1*(a_60 << 10) + ((b_60 * bottle_neck) >> 10);*/
+      s2nr = -22500 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
+      break;
+    default:
+      s2nr = -1; /* Error */
+  }
+
+  return s2nr; //return in Q10
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.h
new file mode 100644
index 0000000..76a50f8
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/bandwidth_estimator.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * bandwidth_estimator.h
+ *
+ * This header file contains the API for the Bandwidth Estimator
+ * designed for iSAC.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_
+
+#include "structs.h"
+
+
+/****************************************************************************
+ * WebRtcIsacfix_InitBandwidthEstimator(...)
+ *
+ * This function initializes the struct for the bandwidth estimator
+ *
+ * Input/Output:
+ *      - bwest_str        : Struct containing bandwidth information.
+ *
+ * Return value            : 0
+ */
+
+WebRtc_Word32 WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bwest_str);
+
+
+/****************************************************************************
+ * WebRtcIsacfix_UpdateUplinkBwImpl(...)
+ *
+ * This function updates bottle neck rate received from other side in payload
+ * and calculates a new bottle neck to send to the other side.
+ *
+ * Input/Output:
+ *      - bweStr           : struct containing bandwidth information.
+ *      - rtpNumber        : value from RTP packet, from NetEq
+ *      - frameSize        : length of signal frame in ms, from iSAC decoder
+ *      - sendTime         : value in RTP header giving send time in samples
+ *      - arrivalTime      : value given by timeGetTime() time of arrival in
+ *                           samples of packet from NetEq
+ *      - pksize           : size of packet in bytes, from NetEq
+ *      - Index            : integer (range 0...23) indicating bottle neck &
+ *                           jitter as estimated by other side
+ *
+ * Return value            : 0 if everything went fine,
+ *                           -1 otherwise
+ */
+
+WebRtc_Word32 WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr            *bwest_str,
+                                               const WebRtc_UWord16        rtp_number,
+                                               const WebRtc_Word16         frameSize,
+                                               const WebRtc_UWord32    send_ts,
+                                               const WebRtc_UWord32        arr_ts,
+                                               const WebRtc_Word16         pksize,
+                                               const WebRtc_UWord16        Index);
+
+/* Update receiving estimates. Used when we only receive BWE index, no iSAC data packet. */
+WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bwest_str,
+                                              const WebRtc_Word16 Index);
+
+/****************************************************************************
+ * WebRtcIsacfix_GetDownlinkBwIndexImpl(...)
+ *
+ * This function calculates and returns the bandwidth/jitter estimation code
+ * (integer 0...23) to put in the sending iSAC payload.
+ *
+ * Input:
+ *      - bweStr       : BWE struct
+ *
+ * Return:
+ *      bandwith and jitter index (0..23)
+ */
+WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bwest_str);
+
+/* Returns the bandwidth estimation (in bps) */
+WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bwest_str);
+
+/* Returns the bandwidth that iSAC should send with in bps */
+WebRtc_Word16 WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bwest_str);
+
+/* Returns the max delay (in ms) */
+WebRtc_Word16 WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str);
+
+/* Returns the max delay value from the other side in ms */
+WebRtc_Word16 WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bwest_str);
+
+/*
+ * update amount of data in bottle neck buffer and burst handling
+ * returns minimum payload size (bytes)
+ */
+WebRtc_UWord16 WebRtcIsacfix_GetMinBytes(RateModel *State,
+                                         WebRtc_Word16 StreamSize,     /* bytes in bitstream */
+                                         const WebRtc_Word16 FrameLen,    /* ms per frame */
+                                         const WebRtc_Word16 BottleNeck,        /* bottle neck rate; excl headers (bps) */
+                                         const WebRtc_Word16 DelayBuildUp);     /* max delay from bottle neck buffering (ms) */
+
+/*
+ * update long-term average bitrate and amount of data in buffer
+ */
+void WebRtcIsacfix_UpdateRateModel(RateModel *State,
+                                   WebRtc_Word16 StreamSize,    /* bytes in bitstream */
+                                   const WebRtc_Word16 FrameSamples,  /* samples per frame */
+                                   const WebRtc_Word16 BottleNeck);       /* bottle neck rate; excl headers (bps) */
+
+
+void WebRtcIsacfix_InitRateModel(RateModel *State);
+
+/* Returns the new framelength value (input argument: bottle_neck) */
+WebRtc_Word16 WebRtcIsacfix_GetNewFrameLength(WebRtc_Word16 bottle_neck, WebRtc_Word16 current_framelength);
+
+/* Returns the new SNR value (input argument: bottle_neck) */
+//returns snr in Q10
+WebRtc_Word16 WebRtcIsacfix_GetSnr(WebRtc_Word16 bottle_neck, WebRtc_Word16 framesamples);
+
+
+#endif /*  WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/codec.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/codec.h
new file mode 100644
index 0000000..154f2e9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/codec.h
@@ -0,0 +1,183 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * codec.h
+ *
+ * This header file contains the calls to the internal encoder
+ * and decoder functions.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_
+
+#include "structs.h"
+
+
+int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr   *bwest_str,
+                                    Bitstr_dec       *streamdata,
+                                    WebRtc_Word32      packet_size,
+                                    WebRtc_UWord16     rtp_seq_number,
+                                    WebRtc_UWord32     send_ts,
+                                    WebRtc_UWord32     arr_ts);
+
+WebRtc_Word16 WebRtcIsacfix_DecodeImpl(WebRtc_Word16   *signal_out16,
+                                       ISACFIX_DecInst_t  *ISACdec_obj,
+                                       WebRtc_Word16        *current_framesamples);
+
+WebRtc_Word16 WebRtcIsacfix_DecodePlcImpl(WebRtc_Word16       *decoded,
+                                          ISACFIX_DecInst_t *ISACdec_obj,
+                                          WebRtc_Word16       *current_framesample );
+
+int WebRtcIsacfix_EncodeImpl(WebRtc_Word16      *in,
+                             ISACFIX_EncInst_t  *ISACenc_obj,
+                             BwEstimatorstr      *bw_estimatordata,
+                             WebRtc_Word16         CodingMode);
+
+int WebRtcIsacfix_EncodeStoredData(ISACFIX_EncInst_t  *ISACenc_obj,
+                                   int     BWnumber,
+                                   float              scale);
+
+/************************** initialization functions *************************/
+
+void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc *maskdata);
+void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec *maskdata);
+
+void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr *prefiltdata);
+
+void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr *postfiltdata);
+
+void WebRtcIsacfix_InitPitchFilter(PitchFiltstr *pitchfiltdata);
+
+void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct *State);
+
+void WebRtcIsacfix_InitPlc( PLCstr *State );
+
+
+/**************************** transform functions ****************************/
+
+void WebRtcIsacfix_InitTransform();
+
+
+void WebRtcIsacfix_Time2Spec(WebRtc_Word16 *inre1Q9,
+                             WebRtc_Word16 *inre2Q9,
+                             WebRtc_Word16 *outre,
+                             WebRtc_Word16 *outim);
+
+
+
+void WebRtcIsacfix_Spec2Time(WebRtc_Word16 *inreQ7,
+                             WebRtc_Word16 *inimQ7,
+                             WebRtc_Word32 *outre1Q16,
+                             WebRtc_Word32 *outre2Q16);
+
+
+
+
+/***************************** filterbank functions **************************/
+
+
+
+void WebRtcIsacfix_SplitAndFilter1(WebRtc_Word16    *in,
+                                   WebRtc_Word16    *LP16,
+                                   WebRtc_Word16    *HP16,
+                                   PreFiltBankstr *prefiltdata);
+
+void WebRtcIsacfix_FilterAndCombine1(WebRtc_Word16     *tempin_ch1,
+                                     WebRtc_Word16     *tempin_ch2,
+                                     WebRtc_Word16     *out16,
+                                     PostFiltBankstr *postfiltdata);
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+
+void WebRtcIsacfix_SplitAndFilter2(WebRtc_Word16    *in,
+                                   WebRtc_Word16    *LP16,
+                                   WebRtc_Word16    *HP16,
+                                   PreFiltBankstr *prefiltdata);
+
+void WebRtcIsacfix_FilterAndCombine2(WebRtc_Word16     *tempin_ch1,
+                                     WebRtc_Word16     *tempin_ch2,
+                                     WebRtc_Word16     *out16,
+                                     PostFiltBankstr *postfiltdata,
+                                     WebRtc_Word16     len);
+
+#endif
+
+/************************* normalized lattice filters ************************/
+
+
+void WebRtcIsacfix_NormLatticeFilterMa(WebRtc_Word16 orderCoef,
+                                       WebRtc_Word32 *stateGQ15,
+                                       WebRtc_Word16 *lat_inQ0,
+                                       WebRtc_Word16 *filt_coefQ15,
+                                       WebRtc_Word32 *gain_lo_hiQ17,
+                                       WebRtc_Word16 lo_hi,
+                                       WebRtc_Word16 *lat_outQ9);
+
+void WebRtcIsacfix_NormLatticeFilterAr(WebRtc_Word16 orderCoef,
+                                       WebRtc_Word16 *stateGQ0,
+                                       WebRtc_Word32 *lat_inQ25,
+                                       WebRtc_Word16 *filt_coefQ15,
+                                       WebRtc_Word32 *gain_lo_hiQ17,
+                                       WebRtc_Word16 lo_hi,
+                                       WebRtc_Word16 *lat_outQ0);
+
+int WebRtcIsacfix_AutocorrC(WebRtc_Word32* __restrict r,
+                            const WebRtc_Word16* __restrict x,
+                            WebRtc_Word16 N,
+                            WebRtc_Word16 order,
+                            WebRtc_Word16* __restrict scale);
+
+void WebRtcIsacfix_FilterMaLoopC(int16_t input0,
+                                 int16_t input1,
+                                 int32_t input2,
+                                 int32_t* ptr0,
+                                 int32_t* ptr1,
+                                 int32_t* ptr2);
+
+// Functions for ARM-Neon platforms, in place of the above two generic C ones.
+#if (defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
+int WebRtcIsacfix_AutocorrNeon(WebRtc_Word32* __restrict r,
+                               const WebRtc_Word16* __restrict x,
+                               WebRtc_Word16 N,
+                               WebRtc_Word16 order,
+                               WebRtc_Word16* __restrict scale);
+
+void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0,
+                                    int16_t input1,
+                                    int32_t input2,
+                                    int32_t* ptr0,
+                                    int32_t* ptr1,
+                                    int32_t* ptr2);
+#endif
+
+/**** Function pointers associated with 
+ **** WebRtcIsacfix_AutocorrC() / WebRtcIsacfix_AutocorrNeon()
+ **** and WebRtcIsacfix_FilterMaLoopC() / WebRtcIsacfix_FilterMaLoopNeon().
+ ****/
+
+typedef int (*AutocorrFix)(WebRtc_Word32* __restrict r,
+                           const WebRtc_Word16* __restrict x,
+                           WebRtc_Word16 N,
+                           WebRtc_Word16 order,
+                           WebRtc_Word16* __restrict scale);
+extern AutocorrFix WebRtcIsacfix_AutocorrFix;
+
+typedef void (*FilterMaLoopFix)(int16_t input0,
+                                int16_t input1,
+                                int32_t input2,
+                                int32_t* ptr0,
+                                int32_t* ptr1,
+                                int32_t* ptr2);
+extern FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix;
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode.c
new file mode 100644
index 0000000..2e15e7a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode.c
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * decode.c
+ *
+ * This C file contains the internal decoding function.
+ *
+ */
+
+#include <string.h>
+
+#include "bandwidth_estimator.h"
+#include "codec.h"
+#include "entropy_coding.h"
+#include "pitch_estimator.h"
+#include "settings.h"
+#include "structs.h"
+
+
+
+
+WebRtc_Word16 WebRtcIsacfix_DecodeImpl(WebRtc_Word16       *signal_out16,
+                                       ISACFIX_DecInst_t *ISACdec_obj,
+                                       WebRtc_Word16       *current_framesamples)
+{
+  int k;
+  int err;
+  WebRtc_Word16 BWno;
+  WebRtc_Word16 len = 0;
+
+  WebRtc_Word16 model;
+
+
+  WebRtc_Word16 Vector_Word16_1[FRAMESAMPLES/2];
+  WebRtc_Word16 Vector_Word16_2[FRAMESAMPLES/2];
+
+  WebRtc_Word32 Vector_Word32_1[FRAMESAMPLES/2];
+  WebRtc_Word32 Vector_Word32_2[FRAMESAMPLES/2];
+
+  WebRtc_Word16 lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
+  WebRtc_Word16 hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
+  WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+
+  WebRtc_Word16 PitchLags_Q7[PITCH_SUBFRAMES];
+  WebRtc_Word16 PitchGains_Q12[PITCH_SUBFRAMES];
+  WebRtc_Word16 AvgPitchGain_Q12;
+
+  WebRtc_Word16 tmp_1, tmp_2;
+  WebRtc_Word32 tmp32a, tmp32b;
+  WebRtc_Word16 gainQ13;
+
+
+  WebRtc_Word16 frame_nb; /* counter */
+  WebRtc_Word16 frame_mode; /* 0 for 20ms and 30ms, 1 for 60ms */
+  WebRtc_Word16 processed_samples;
+
+  /* PLC */
+  WebRtc_Word16 overlapWin[ 240 ];
+
+  (ISACdec_obj->bitstr_obj).W_upper = 0xFFFFFFFF;
+  (ISACdec_obj->bitstr_obj).streamval = 0;
+  (ISACdec_obj->bitstr_obj).stream_index = 0;
+  (ISACdec_obj->bitstr_obj).full = 1;
+
+
+  /* decode framelength and BW estimation - not used, only for stream pointer*/
+  err = WebRtcIsacfix_DecodeFrameLen(&ISACdec_obj->bitstr_obj, current_framesamples);
+  if (err<0)  // error check
+    return err;
+
+  frame_mode = (WebRtc_Word16)WEBRTC_SPL_DIV(*current_framesamples, MAX_FRAMESAMPLES); /* 0, or 1 */
+  processed_samples = (WebRtc_Word16)WEBRTC_SPL_DIV(*current_framesamples, frame_mode+1); /* either 320 (20ms) or 480 (30, 60 ms) */
+
+  err = WebRtcIsacfix_DecodeSendBandwidth(&ISACdec_obj->bitstr_obj, &BWno);
+  if (err<0)  // error check
+    return err;
+
+  /* one loop if it's one frame (20 or 30ms), 2 loops if 2 frames bundled together (60ms) */
+  for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) {
+
+    /* decode & dequantize pitch parameters */
+    err = WebRtcIsacfix_DecodePitchGain(&(ISACdec_obj->bitstr_obj), PitchGains_Q12);
+    if (err<0)  // error check
+      return err;
+
+    err = WebRtcIsacfix_DecodePitchLag(&ISACdec_obj->bitstr_obj, PitchGains_Q12, PitchLags_Q7);
+    if (err<0)  // error check
+      return err;
+
+    AvgPitchGain_Q12 = (WebRtc_Word16)(((WebRtc_Word32)PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3])>>2);
+
+    /* decode & dequantize FiltCoef */
+    err = WebRtcIsacfix_DecodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15,
+                                  &ISACdec_obj->bitstr_obj, &model);
+
+    if (err<0)  // error check
+      return err;
+
+    /* decode & dequantize spectrum */
+    len = WebRtcIsacfix_DecodeSpec(&ISACdec_obj->bitstr_obj, Vector_Word16_1, Vector_Word16_2, AvgPitchGain_Q12);
+    if (len < 0)  // error check
+      return len;
+
+    // Why does this need Q16 in and out? /JS
+    WebRtcIsacfix_Spec2Time(Vector_Word16_1, Vector_Word16_2, Vector_Word32_1, Vector_Word32_2);
+
+    for (k=0; k<FRAMESAMPLES/2; k++) {
+      Vector_Word16_1[k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(Vector_Word32_1[k]+64, 7); //Q16 -> Q9
+    }
+
+    /* ----  If this is recovery frame ---- */
+    if( (ISACdec_obj->plcstr_obj).used == PLC_WAS_USED )
+    {
+      (ISACdec_obj->plcstr_obj).used = PLC_NOT_USED;
+      if( (ISACdec_obj->plcstr_obj).B < 1000 )
+      {
+        (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 4000;
+      }
+
+      ISACdec_obj->plcstr_obj.decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX;    /* DECAY_RATE is in Q15 */
+      ISACdec_obj->plcstr_obj.decayCoeffNoise = WEBRTC_SPL_WORD16_MAX;    /* DECAY_RATE is in Q15 */
+      ISACdec_obj->plcstr_obj.pitchCycles = 0;
+
+      PitchGains_Q12[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[0], 700, 10 );
+
+      /* ---- Add-overlap ---- */
+      WebRtcSpl_GetHanningWindow( overlapWin, RECOVERY_OVERLAP );
+      for( k = 0; k < RECOVERY_OVERLAP; k++ )
+        Vector_Word16_1[k] = WEBRTC_SPL_ADD_SAT_W16(
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( (ISACdec_obj->plcstr_obj).overlapLP[k], overlapWin[RECOVERY_OVERLAP - k - 1], 14),
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( Vector_Word16_1[k], overlapWin[k], 14) );
+
+
+
+    }
+
+    /* --- Store side info --- */
+    if( frame_nb == frame_mode )
+    {
+      /* --- LPC info */
+      WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).lofilt_coefQ15, &lofilt_coefQ15[(SUBFRAMES-1)*ORDERLO], ORDERLO );
+      WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).hifilt_coefQ15, &hifilt_coefQ15[(SUBFRAMES-1)*ORDERHI], ORDERHI );
+      (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0] = gain_lo_hiQ17[(SUBFRAMES-1) * 2];
+      (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1] = gain_lo_hiQ17[(SUBFRAMES-1) * 2 + 1];
+
+      /* --- LTP info */
+      (ISACdec_obj->plcstr_obj).AvgPitchGain_Q12 = PitchGains_Q12[3];
+      (ISACdec_obj->plcstr_obj).lastPitchGain_Q12 = PitchGains_Q12[3];
+      (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = PitchLags_Q7[3];
+
+      if( PitchLags_Q7[3] < 3000 )
+        (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 += PitchLags_Q7[3];
+
+      WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvIn, Vector_Word16_1, FRAMESAMPLES/2 );
+
+    }
+    /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+    /* inverse pitch filter */
+    WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2, &ISACdec_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 4);
+
+    if( frame_nb == frame_mode )
+    {
+      WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvOut, &(Vector_Word16_2[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10)]), PITCH_MAX_LAG );
+    }
+
+
+    /* reduce gain to compensate for pitch enhancer */
+    /* gain = 1.0f - 0.45f * AvgPitchGain; */
+    tmp32a = WEBRTC_SPL_MUL_16_16_RSFT(AvgPitchGain_Q12, 29, 0); // Q18
+    tmp32b = 262144 - tmp32a;  // Q18
+    gainQ13 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q13
+
+    for (k = 0; k < FRAMESAMPLES/2; k++)
+    {
+      Vector_Word32_1[k] = (WebRtc_Word32) WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(Vector_Word16_2[k], gainQ13), 3); // Q25
+    }
+
+
+    /* perceptual post-filtering (using normalized lattice filter) */
+    WebRtcIsacfix_NormLatticeFilterAr(ORDERLO, (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0,
+                                      Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1);
+
+    /* --- Store Highpass Residual --- */
+    for (k = 0; k < FRAMESAMPLES/2; k++)
+      Vector_Word32_1[k]    = WEBRTC_SPL_LSHIFT_W32(Vector_Word32_2[k], 9); // Q16 -> Q25
+
+    for( k = 0; k < PITCH_MAX_LAG + 10; k++ )
+      (ISACdec_obj->plcstr_obj).prevHP[k] = Vector_Word32_1[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10) + k];
+
+
+    WebRtcIsacfix_NormLatticeFilterAr(ORDERHI, (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0,
+                                      Vector_Word32_1, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2);
+
+    /* recombine the 2 bands */
+
+    /* Form the polyphase signals, and compensate for DC offset */
+    for (k=0;k<FRAMESAMPLES/2;k++) {
+      tmp_1 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(((WebRtc_Word32)Vector_Word16_1[k]+Vector_Word16_2[k] + 1)); /* Construct a new upper channel signal*/
+      tmp_2 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(((WebRtc_Word32)Vector_Word16_1[k]-Vector_Word16_2[k])); /* Construct a new lower channel signal*/
+      Vector_Word16_1[k] = tmp_1;
+      Vector_Word16_2[k] = tmp_2;
+    }
+
+    WebRtcIsacfix_FilterAndCombine1(Vector_Word16_1, Vector_Word16_2, signal_out16 + frame_nb * processed_samples, &ISACdec_obj->postfiltbankstr_obj);
+
+  }
+  return len;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_bwe.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_bwe.c
new file mode 100644
index 0000000..68c6003
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_bwe.c
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * decode_bwe.c
+ *
+ * This C file contains the internal decode bandwidth estimate function.
+ *
+ */
+
+
+#include "bandwidth_estimator.h"
+#include "codec.h"
+#include "entropy_coding.h"
+#include "structs.h"
+
+
+
+
+int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr *bwest_str,
+                                    Bitstr_dec  *streamdata,
+                                    WebRtc_Word32  packet_size,
+                                    WebRtc_UWord16 rtp_seq_number,
+                                    WebRtc_UWord32 send_ts,
+                                    WebRtc_UWord32 arr_ts)
+{
+  WebRtc_Word16 index;
+  WebRtc_Word16 frame_samples;
+  int err;
+
+  /* decode framelength */
+  err = WebRtcIsacfix_DecodeFrameLen(streamdata, &frame_samples);
+  /* error check */
+  if (err<0) {
+    return err;
+  }
+
+  /* decode BW estimation */
+  err = WebRtcIsacfix_DecodeSendBandwidth(streamdata, &index);
+  /* error check */
+  if (err<0) {
+    return err;
+  }
+
+  /* Update BWE with received data */
+  err = WebRtcIsacfix_UpdateUplinkBwImpl(
+      bwest_str,
+      rtp_seq_number,
+      (WebRtc_UWord16)WEBRTC_SPL_UDIV(WEBRTC_SPL_UMUL(frame_samples,1000), FS),
+      send_ts,
+      arr_ts,
+      (WebRtc_Word16) packet_size,  /* in bytes */
+      index);
+
+  /* error check */
+  if (err<0) {
+    return err;
+  }
+
+  /* Succesful */
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_plc.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_plc.c
new file mode 100644
index 0000000..de51658
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/decode_plc.c
@@ -0,0 +1,830 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * decode_plc.c
+ *
+ * Packet Loss Concealment.
+ *
+ */
+
+#include <string.h>
+
+#include "settings.h"
+#include "entropy_coding.h"
+#include "pitch_estimator.h"
+#include "bandwidth_estimator.h"
+#include "structs.h"
+#include "codec.h"
+
+
+#define NO_OF_PRIMES 8
+#define NOISE_FILTER_LEN 30
+
+/*
+ * function to decode the bitstream
+ * returns the total number of bytes in the stream
+ */
+
+static WebRtc_Word16 plc_filterma_Fast(
+    WebRtc_Word16 *In,  /* (i)   Vector to be filtered. InOut[-orderCoef+1]
+                           to InOut[-1] contains state */
+    WebRtc_Word16 *Out,  /* (o)   Filtered vector */
+    WebRtc_Word16 *B,   /* (i)   The filter coefficients (in Q0) */
+    WebRtc_Word16 Blen,  /* (i)   Number of B coefficients */
+    WebRtc_Word16 len,   /* (i)  Number of samples to be filtered */
+    WebRtc_Word16 reduceDecay,
+    WebRtc_Word16 decay,
+    WebRtc_Word16 rshift )
+{
+  int i, j;
+  WebRtc_Word32 o;
+  WebRtc_Word32 lim;
+
+  lim = WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 15 + rshift )-1;
+
+  for (i = 0; i < len; i++)
+  {
+    G_CONST WebRtc_Word16 *b_ptr = &B[0];
+    G_CONST WebRtc_Word16 *x_ptr = &In[i];
+
+    o = (WebRtc_Word32)0;
+
+    for (j = 0;j < Blen; j++)
+    {
+      o = WEBRTC_SPL_ADD_SAT_W32( o, WEBRTC_SPL_MUL_16_16( *b_ptr, *x_ptr) );
+      b_ptr++;
+      x_ptr--;
+    }
+
+    /* to round off correctly */
+    o = WEBRTC_SPL_ADD_SAT_W32( o, WEBRTC_SPL_LSHIFT_W32( 1, (rshift-1) ) );
+
+    /* saturate according to the domain of the filter coefficients */
+    o = WEBRTC_SPL_SAT((WebRtc_Word32)lim, o, (WebRtc_Word32)-lim);
+
+    /* o should be in the range of WebRtc_Word16 */
+    o = WEBRTC_SPL_RSHIFT_W32( o, rshift );
+
+    /* decay the output signal; this is specific to plc */
+    *Out++ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16)o, decay, 15); // ((o + (WebRtc_Word32)2048) >> 12);
+
+    /* change the decay */
+    decay -= reduceDecay;
+    if( decay < 0 )
+      decay = 0;
+  }
+  return( decay );
+}
+
+
+
+
+
+
+
+
+static __inline WebRtc_Word32 log2_Q8_T( WebRtc_UWord32 x ) {
+
+  WebRtc_Word32 zeros, lg2;
+  WebRtc_Word16 frac;
+
+  zeros=WebRtcSpl_NormU32(x);
+  frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
+  /* log2(magn(i)) */
+
+  lg2= (WEBRTC_SPL_LSHIFT_W16((31-zeros), 8)+frac);
+  return lg2;
+
+}
+
+static __inline WebRtc_Word16  exp2_Q10_T(WebRtc_Word16 x) { // Both in and out in Q10
+
+  WebRtc_Word16 tmp16_1, tmp16_2;
+
+  tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
+  tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+  if(tmp16_1>0)
+    return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+  else
+    return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+
+}
+
+
+/*
+  This is a fixed-point version of the above code with limLow = 700 and limHigh = 5000,
+  hard-coded. The values 700 and 5000 were experimentally obtained.
+
+  The function implements membership values for two sets. The mebership functions are
+  of second orders corresponding to half-bell-shapped pulses.
+*/
+static void MemshipValQ15( WebRtc_Word16 in, WebRtc_Word16 *A, WebRtc_Word16 *B )
+{
+  WebRtc_Word16 x;
+
+  in -= 700;    /* translate the lowLim to 0, limHigh = 5000 - 700, M = 2150 */
+
+  if( in <= 2150 )
+  {
+    if( in > 0 )
+    {
+      /* b = in^2 / (2 * M^2), a = 1 - b in Q0.
+         We have to compute in Q15 */
+
+      /* x = in / 2150 {in Q15} = x * 15.2409 {in Q15} =
+         x*15 + (x*983)/(2^12); note that 983/2^12 = 0.23999     */
+
+      /* we are sure that x is in the range of WebRtc_Word16            */
+      x = (WebRtc_Word16)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
+                           WEBRTC_SPL_MUL_16_16_RSFT( in, 983, 12) );
+      /* b = x^2 / 2 {in Q15} so a shift of 16 is required to
+         be in correct domain and one more for the division by 2 */
+      *B = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
+      *A = WEBRTC_SPL_WORD16_MAX - *B;
+    }
+    else
+    {
+      *B = 0;
+      *A = WEBRTC_SPL_WORD16_MAX;
+    }
+  }
+  else
+  {
+    if( in < 4300 )
+    {
+      /* This is a mirror case of the above */
+      in = 4300 - in;
+      x = (WebRtc_Word16)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
+                           WEBRTC_SPL_MUL_16_16_RSFT( in, 983, 12) );
+      /* b = x^2 / 2 {in Q15} so a shift of 16 is required to
+         be in correct domain and one more for the division by 2 */
+      *A = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
+      *B = WEBRTC_SPL_WORD16_MAX - *A;
+
+    }
+    else
+    {
+      *A = 0;
+      *B = WEBRTC_SPL_WORD16_MAX;
+    }
+  }
+}
+
+
+
+
+static void LinearResampler( WebRtc_Word16 *in, WebRtc_Word16 *out, WebRtc_Word16 lenIn, WebRtc_Word16 lenOut )
+{
+  WebRtc_Word32 n;
+  WebRtc_Word16 resOut, i, j, relativePos, diff; /* */
+  WebRtc_UWord16 udiff;
+
+  if( lenIn == lenOut )
+  {
+    WEBRTC_SPL_MEMCPY_W16( out, in, lenIn );
+    return;
+  }
+
+  n = WEBRTC_SPL_MUL_16_16( (WebRtc_Word16)(lenIn-1), RESAMP_RES );
+  resOut = WebRtcSpl_DivW32W16ResW16( n, (WebRtc_Word16)(lenOut-1) );
+
+  out[0] = in[0];
+  for( i = 1, j = 0, relativePos = 0; i < lenOut; i++ )
+  {
+
+    relativePos += resOut;
+    while( relativePos > RESAMP_RES )
+    {
+      j++;
+      relativePos -= RESAMP_RES;
+    }
+
+
+    /* an overflow may happen and the differce in sample values may
+     * require more than 16 bits. We like to avoid 32 bit arithmatic
+     * as much as possible */
+
+    if( (in[ j ] > 0) && (in[j + 1] < 0) )
+    {
+      udiff = (WebRtc_UWord16)(in[ j ] - in[j + 1]);
+      out[ i ] = in[ j ] - (WebRtc_UWord16)( ((WebRtc_Word32)( udiff * relativePos )) >> RESAMP_RES_BIT);
+    }
+    else
+    {
+      if( (in[j] < 0) && (in[j+1] > 0) )
+      {
+        udiff = (WebRtc_UWord16)( in[j + 1] - in[ j ] );
+        out[ i ] = in[ j ] + (WebRtc_UWord16)( ((WebRtc_Word32)( udiff * relativePos )) >> RESAMP_RES_BIT);
+      }
+      else
+      {
+        diff = in[ j + 1 ] - in[ j ];
+        out[ i ] = in[ j ] + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( diff, relativePos, RESAMP_RES_BIT );
+      }
+    }
+  }
+}
+
+
+
+
+
+WebRtc_Word16 WebRtcIsacfix_DecodePlcImpl(WebRtc_Word16 *signal_out16,
+                                          ISACFIX_DecInst_t *ISACdec_obj,
+                                          WebRtc_Word16 *current_framesamples )
+{
+  int subframecnt;
+  WebRtc_Word16 len = 0;
+
+  WebRtc_Word16* Vector_Word16_1;
+  WebRtc_Word16  Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
+  WebRtc_Word16* Vector_Word16_2;
+  WebRtc_Word16  Vector_Word16_Extended_2[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
+
+  WebRtc_Word32 Vector_Word32_1[FRAMESAMPLES_HALF];
+  WebRtc_Word32 Vector_Word32_2[FRAMESAMPLES_HALF];
+
+  WebRtc_Word16 lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
+  WebRtc_Word16 hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
+
+  WebRtc_Word16 pitchLags_Q7[PITCH_SUBFRAMES];
+  WebRtc_Word16 pitchGains_Q12[PITCH_SUBFRAMES];
+
+  WebRtc_Word16 tmp_1, tmp_2;
+  WebRtc_Word32 tmp32a, tmp32b;
+  WebRtc_Word16 gainQ13;
+
+  WebRtc_Word16 myDecayRate;
+
+  /* ---------- PLC variables ------------ */
+  WebRtc_Word16 lag0, i, k, noiseIndex;
+  WebRtc_Word16 stretchPitchLP[PITCH_MAX_LAG + 10], stretchPitchLP1[PITCH_MAX_LAG + 10];
+
+  WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+
+  WebRtc_Word16 nLP, pLP, wNoisyLP, wPriodicLP, tmp16, minIdx;
+  WebRtc_Word32 nHP, pHP, wNoisyHP, wPriodicHP, corr, minCorr, maxCoeff;
+  WebRtc_Word16 noise1, rshift;
+
+
+  WebRtc_Word16 ltpGain, pitchGain, myVoiceIndicator, myAbs, maxAbs;
+  WebRtc_Word32 varIn, varOut, logVarIn, logVarOut, Q, logMaxAbs;
+  int rightShiftIn, rightShiftOut;
+
+
+  /* ------------------------------------- */
+
+
+  myDecayRate = (DECAY_RATE);
+  Vector_Word16_1 = &Vector_Word16_Extended_1[NOISE_FILTER_LEN];
+  Vector_Word16_2 = &Vector_Word16_Extended_2[NOISE_FILTER_LEN];
+
+
+  /* ----- Simply Copy Previous LPC parameters ------ */
+  for( subframecnt = 0; subframecnt < SUBFRAMES; subframecnt++ )
+  {
+    /* lower Band */
+    WEBRTC_SPL_MEMCPY_W16(&lofilt_coefQ15[ subframecnt * ORDERLO ],
+                          (ISACdec_obj->plcstr_obj).lofilt_coefQ15, ORDERLO);
+    gain_lo_hiQ17[2*subframecnt] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0];
+
+    /* Upper Band */
+    WEBRTC_SPL_MEMCPY_W16(&hifilt_coefQ15[ subframecnt * ORDERHI ],
+                          (ISACdec_obj->plcstr_obj).hifilt_coefQ15, ORDERHI);
+    gain_lo_hiQ17[2*subframecnt + 1] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1];
+  }
+
+
+
+
+  lag0 = WEBRTC_SPL_RSHIFT_W16(
+      (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 + 64, 7 ) + 1;
+
+
+  if( (ISACdec_obj->plcstr_obj).used != PLC_WAS_USED )
+  {
+    (ISACdec_obj->plcstr_obj).pitchCycles = 0;
+
+    (ISACdec_obj->plcstr_obj).lastPitchLP =
+        &((ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0]);
+    minCorr = WEBRTC_SPL_WORD32_MAX;
+
+    if ( (FRAMESAMPLES_HALF - 2*lag0 - 10) > 0 )
+    {
+      minIdx = 11;
+      for( i = 0; i < 21; i++ )
+      {
+        corr = 0;
+        for( k = 0; k < lag0; k++ )
+        {
+          corr = WEBRTC_SPL_ADD_SAT_W32( corr, WEBRTC_SPL_ABS_W32(
+              WEBRTC_SPL_SUB_SAT_W16(
+                  (ISACdec_obj->plcstr_obj).lastPitchLP[k],
+                  (ISACdec_obj->plcstr_obj).prevPitchInvIn[
+                      FRAMESAMPLES_HALF - 2*lag0 - 10 + i + k ] ) ) );
+        }
+        if( corr < minCorr )
+        {
+          minCorr = corr;
+          minIdx = i;
+        }
+      }
+      (ISACdec_obj->plcstr_obj).prevPitchLP =
+          &( (ISACdec_obj->plcstr_obj).prevPitchInvIn[
+              FRAMESAMPLES_HALF - lag0*2 - 10 + minIdx] );
+    }
+    else
+    {
+      (ISACdec_obj->plcstr_obj).prevPitchLP =
+          (ISACdec_obj->plcstr_obj).lastPitchLP;
+    }
+    pitchGain = (ISACdec_obj->plcstr_obj).lastPitchGain_Q12;
+
+    WebRtcSpl_AutoCorrelation(
+        &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0],
+        lag0, 0, &varIn, &rightShiftIn);
+    WebRtcSpl_AutoCorrelation(
+        &(ISACdec_obj->plcstr_obj).prevPitchInvOut[PITCH_MAX_LAG + 10 - lag0],
+        lag0, 0, &varOut, &rightShiftOut);
+
+    maxAbs = 0;
+    for( i = 0; i< lag0; i++)
+    {
+      myAbs = WEBRTC_SPL_ABS_W16(
+          (ISACdec_obj->plcstr_obj).prevPitchInvOut[
+              PITCH_MAX_LAG + 10 - lag0 + i] );
+      maxAbs = (myAbs > maxAbs)? myAbs:maxAbs;
+    }
+    logVarIn = log2_Q8_T( (WebRtc_UWord32)( varIn ) ) +
+        (WebRtc_Word32)(rightShiftIn << 8);
+    logVarOut = log2_Q8_T( (WebRtc_UWord32)( varOut ) ) +
+        (WebRtc_Word32)(rightShiftOut << 8);
+    logMaxAbs = log2_Q8_T( (WebRtc_UWord32)( maxAbs ) );
+
+    ltpGain = (WebRtc_Word16)(logVarOut - logVarIn);
+    Q = 2 * logMaxAbs - ( logVarOut - 1512 );
+
+    /*
+     * ---
+     * We are computing sqrt( (VarIn/lag0) / var( noise ) )
+     * var( noise ) is almost 256. we have already computed log2( VarIn ) in Q8
+     * so we actually compute 2^( 0.5*(log2( VarIn ) - log2( lag0 ) - log2( var(noise ) )  ).
+     * Note that put log function is in Q8 but the exponential function is in Q10.
+     * --
+     */
+
+    logVarIn -= log2_Q8_T( (WebRtc_UWord32)( lag0 ) );
+    tmp16 = (WebRtc_Word16)((logVarIn<<1) - (4<<10) );
+    rightShiftIn = 0;
+    if( tmp16 > 4096 )
+    {
+      tmp16 -= 4096;
+      tmp16 = exp2_Q10_T( tmp16 );
+      tmp16 >>= 6;
+    }
+    else
+      tmp16 = exp2_Q10_T( tmp16 )>>10;
+
+    (ISACdec_obj->plcstr_obj).std = tmp16 - 4;
+
+    if( (ltpGain < 110) || (ltpGain > 230) )
+    {
+      if( ltpGain < 100 && (pitchGain < 1800) )
+      {
+        (ISACdec_obj->plcstr_obj).A = WEBRTC_SPL_WORD16_MAX;
+      }
+      else
+      {
+        (ISACdec_obj->plcstr_obj).A = ((ltpGain < 110) && (Q < 800)
+                                       )? WEBRTC_SPL_WORD16_MAX:0;
+      }
+      (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX -
+          (ISACdec_obj->plcstr_obj).A;
+    }
+    else
+    {
+      if( (pitchGain < 450) || (pitchGain > 1600) )
+      {
+        (ISACdec_obj->plcstr_obj).A = ((pitchGain < 450)
+                                       )? WEBRTC_SPL_WORD16_MAX:0;
+        (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX -
+            (ISACdec_obj->plcstr_obj).A;
+      }
+      else
+      {
+        myVoiceIndicator = ltpGain * 2 + pitchGain;
+        MemshipValQ15( myVoiceIndicator,
+                       &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B );
+      }
+    }
+
+
+
+    myVoiceIndicator = ltpGain * 16 + pitchGain * 2 + (pitchGain >> 8);
+    MemshipValQ15( myVoiceIndicator,
+                   &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B );
+
+
+
+    (ISACdec_obj->plcstr_obj).stretchLag = lag0;
+    (ISACdec_obj->plcstr_obj).pitchIndex = 0;
+
+  }
+  else
+  {
+    myDecayRate = (DECAY_RATE<<2);
+  }
+
+  if( (ISACdec_obj->plcstr_obj).B < 1000 )
+  {
+    myDecayRate += (DECAY_RATE<<3);
+  }
+
+  /* ------------ reconstructing the residual signal ------------------ */
+
+  LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP,
+                   stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag );
+  /* inverse pitch filter */
+
+  pitchLags_Q7[0] = pitchLags_Q7[1] = pitchLags_Q7[2] = pitchLags_Q7[3] =
+      ((ISACdec_obj->plcstr_obj).stretchLag<<7);
+  pitchGains_Q12[3] = ( (ISACdec_obj->plcstr_obj).lastPitchGain_Q12);
+  pitchGains_Q12[2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+      pitchGains_Q12[3], 1010, 10 );
+  pitchGains_Q12[1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+      pitchGains_Q12[2], 1010, 10 );
+  pitchGains_Q12[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+      pitchGains_Q12[1], 1010, 10 );
+
+
+  /* most of the time either B or A are zero so seperating */
+  if( (ISACdec_obj->plcstr_obj).B == 0 )
+  {
+    for( i = 0; i < FRAMESAMPLES_HALF; i++ )
+    {
+      /* --- Low Pass                                             */
+      (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+          (ISACdec_obj->plcstr_obj).seed );
+      Vector_Word16_1[i] = WEBRTC_SPL_RSHIFT_W16(
+          (ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
+
+      /* --- Highpass                                              */
+      (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+          (ISACdec_obj->plcstr_obj).seed );
+      Vector_Word16_2[i] = WEBRTC_SPL_RSHIFT_W16(
+          (ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
+
+    }
+    for( i = 1; i < NOISE_FILTER_LEN; i++ )
+    {
+      (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+          (ISACdec_obj->plcstr_obj).seed );
+      Vector_Word16_Extended_1[ i ] = WEBRTC_SPL_RSHIFT_W16(
+          (ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
+
+      (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+          (ISACdec_obj->plcstr_obj).seed );
+      Vector_Word16_Extended_2[ i ] = WEBRTC_SPL_RSHIFT_W16(
+          (ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
+    }
+    plc_filterma_Fast(Vector_Word16_1, Vector_Word16_Extended_1,
+                      &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF -
+                                                                NOISE_FILTER_LEN], (WebRtc_Word16) NOISE_FILTER_LEN,
+                      (WebRtc_Word16) FRAMESAMPLES_HALF, (WebRtc_Word16)(5),
+                      (ISACdec_obj->plcstr_obj).decayCoeffNoise, (WebRtc_Word16)(6));
+
+    maxCoeff = WebRtcSpl_MaxAbsValueW32(
+        &(ISACdec_obj->plcstr_obj).prevHP[
+            PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN], NOISE_FILTER_LEN );
+
+    rshift = 0;
+    while( maxCoeff > WEBRTC_SPL_WORD16_MAX )
+    {
+      maxCoeff = WEBRTC_SPL_RSHIFT_W32(maxCoeff, 1);
+      rshift++;
+    }
+    for( i = 0; i < NOISE_FILTER_LEN; i++ ) {
+      Vector_Word16_1[ FRAMESAMPLES_HALF - NOISE_FILTER_LEN + i] =
+          (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+              (ISACdec_obj->plcstr_obj).prevHP[
+                  PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN + i], rshift);
+    }
+    (ISACdec_obj->plcstr_obj).decayCoeffNoise = plc_filterma_Fast(
+        Vector_Word16_2,
+        Vector_Word16_Extended_2,
+        &Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN],
+        (WebRtc_Word16) NOISE_FILTER_LEN,
+        (WebRtc_Word16) FRAMESAMPLES_HALF,
+        (WebRtc_Word16) (5),
+        (ISACdec_obj->plcstr_obj).decayCoeffNoise,
+        (WebRtc_Word16) (7) );
+
+    for( i = 0; i < FRAMESAMPLES_HALF; i++ )
+      Vector_Word32_2[i] = WEBRTC_SPL_LSHIFT_W32(
+          (WebRtc_Word32)Vector_Word16_Extended_2[i], rshift );
+
+    Vector_Word16_1 = Vector_Word16_Extended_1;
+  }
+  else
+  {
+    if( (ISACdec_obj->plcstr_obj).A == 0 )
+    {
+      /* ------ Periodic Vector ---                                */
+      for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ )
+      {
+        /* --- Lowpass                                               */
+        pLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+            stretchPitchLP[(ISACdec_obj->plcstr_obj).pitchIndex],
+            (ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15 );
+
+        /* --- Highpass                                              */
+        pHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+            (ISACdec_obj->plcstr_obj).decayCoeffPriodic,
+            (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 -
+                                             (ISACdec_obj->plcstr_obj).stretchLag +
+                                             (ISACdec_obj->plcstr_obj).pitchIndex] );
+
+        /* --- lower the muliplier (more decay at next sample) --- */
+        (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate);
+        if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 )
+          (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0;
+
+        (ISACdec_obj->plcstr_obj).pitchIndex++;
+
+        if( (ISACdec_obj->plcstr_obj).pitchIndex ==
+            (ISACdec_obj->plcstr_obj).stretchLag )
+        {
+          (ISACdec_obj->plcstr_obj).pitchIndex = 0;
+          (ISACdec_obj->plcstr_obj).pitchCycles++;
+
+          if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) )
+          {
+            (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1;
+          }
+          else
+          {
+            (ISACdec_obj->plcstr_obj).stretchLag = lag0;
+          }
+
+          (ISACdec_obj->plcstr_obj).stretchLag = (
+              (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG
+                                                  )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag;
+
+          LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP,
+                           stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag );
+
+          LinearResampler( (ISACdec_obj->plcstr_obj).prevPitchLP,
+                           stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag );
+
+          switch( (ISACdec_obj->plcstr_obj).pitchCycles )
+          {
+            case 1:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)((
+                      (WebRtc_Word32)stretchPitchLP[k]* 3 +
+                      (WebRtc_Word32)stretchPitchLP1[k])>>2);
+                }
+                break;
+              }
+            case 2:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)((
+                      (WebRtc_Word32)stretchPitchLP[k] +
+                      (WebRtc_Word32)stretchPitchLP1[k] )>>1);
+                }
+                break;
+              }
+            case 3:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)((stretchPitchLP[k] +
+                                                       (WebRtc_Word32)stretchPitchLP1[k]*3 )>>2);
+                }
+                break;
+              }
+          }
+
+          if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 )
+          {
+            myDecayRate += 35; //(myDecayRate>>1);
+            (ISACdec_obj->plcstr_obj).pitchCycles = 0;
+          }
+
+        }
+
+        /* ------ Sum the noisy and periodic signals  ------ */
+        Vector_Word16_1[i] = pLP;
+        Vector_Word32_2[i] = pHP;
+      }
+    }
+    else
+    {
+      for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ )
+      {
+
+        (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+            (ISACdec_obj->plcstr_obj).seed );
+
+        noise1 = WEBRTC_SPL_RSHIFT_W16(
+            (ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
+
+        nLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+            (WebRtc_Word16)((noise1)*(ISACdec_obj->plcstr_obj).std),
+            (ISACdec_obj->plcstr_obj).decayCoeffNoise, 15 );
+
+        /* --- Highpass                                              */
+        (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND(
+            (ISACdec_obj->plcstr_obj).seed );
+        noise1 = WEBRTC_SPL_RSHIFT_W16(
+            (ISACdec_obj->plcstr_obj).seed, 11 ) - 8;
+
+        nHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+            (ISACdec_obj->plcstr_obj).decayCoeffNoise,
+            (WebRtc_Word32)(noise1*(ISACdec_obj->plcstr_obj).std) );
+
+        /* --- lower the muliplier (more decay at next sample) --- */
+        (ISACdec_obj->plcstr_obj).decayCoeffNoise -= (myDecayRate);
+        if( (ISACdec_obj->plcstr_obj).decayCoeffNoise < 0 )
+          (ISACdec_obj->plcstr_obj).decayCoeffNoise = 0;
+
+        /* ------ Periodic Vector ---                                */
+        /* --- Lowpass                                               */
+        pLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+            stretchPitchLP[(ISACdec_obj->plcstr_obj).pitchIndex],
+            (ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15 );
+
+        /* --- Highpass                                              */
+        pHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+            (ISACdec_obj->plcstr_obj).decayCoeffPriodic,
+            (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 -
+                                             (ISACdec_obj->plcstr_obj).stretchLag +
+                                             (ISACdec_obj->plcstr_obj).pitchIndex] );
+
+        /* --- lower the muliplier (more decay at next sample) --- */
+        (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate);
+        if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 )
+        {
+          (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0;
+        }
+
+        /* ------ Weighting the noisy and periodic vectors -------   */
+        wNoisyLP = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT(
+            (ISACdec_obj->plcstr_obj).A, nLP, 15 ) );
+        wNoisyHP = (WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT15(
+            (ISACdec_obj->plcstr_obj).A, (nHP) ) );
+
+        wPriodicLP = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT(
+            (ISACdec_obj->plcstr_obj).B, pLP, 15));
+        wPriodicHP = (WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT15(
+            (ISACdec_obj->plcstr_obj).B, pHP));
+
+        (ISACdec_obj->plcstr_obj).pitchIndex++;
+
+        if((ISACdec_obj->plcstr_obj).pitchIndex ==
+           (ISACdec_obj->plcstr_obj).stretchLag)
+        {
+          (ISACdec_obj->plcstr_obj).pitchIndex = 0;
+          (ISACdec_obj->plcstr_obj).pitchCycles++;
+
+          if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) )
+            (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1;
+          else
+            (ISACdec_obj->plcstr_obj).stretchLag = lag0;
+
+          (ISACdec_obj->plcstr_obj).stretchLag = (
+              (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG
+                                                  )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag;
+          LinearResampler(
+              (ISACdec_obj->plcstr_obj).lastPitchLP,
+              stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag );
+
+          LinearResampler((ISACdec_obj->plcstr_obj).prevPitchLP,
+                          stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag );
+
+          switch((ISACdec_obj->plcstr_obj).pitchCycles)
+          {
+            case 1:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)((
+                      (WebRtc_Word32)stretchPitchLP[k]* 3 +
+                      (WebRtc_Word32)stretchPitchLP1[k] )>>2);
+                }
+                break;
+              }
+            case 2:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)((
+                      (WebRtc_Word32)stretchPitchLP[k] +
+                      (WebRtc_Word32)stretchPitchLP1[k])>>1);
+                }
+                break;
+              }
+            case 3:
+              {
+                for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
+                {
+                  stretchPitchLP[k] = (WebRtc_Word16)(
+                      (stretchPitchLP[k] +
+                       (WebRtc_Word32)stretchPitchLP1[k]*3 )>>2);
+                }
+                break;
+              }
+          }
+
+          if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 )
+          {
+            myDecayRate += 55; //(myDecayRate>>1);
+            (ISACdec_obj->plcstr_obj).pitchCycles = 0;
+          }
+        }
+
+        /* ------ Sum the noisy and periodic signals  ------ */
+        Vector_Word16_1[i] = (WebRtc_Word16)WEBRTC_SPL_ADD_SAT_W16(
+            wNoisyLP, wPriodicLP );
+        Vector_Word32_2[i] = (WebRtc_Word32)WEBRTC_SPL_ADD_SAT_W32(
+            wNoisyHP, wPriodicHP );
+      }
+    }
+  }
+  /* ----------------- residual signal is reconstructed ------------------ */
+
+  k = (ISACdec_obj->plcstr_obj).pitchIndex;
+  /* --- Write one pitch cycle for recovery block --- */
+
+  for( i = 0; i < RECOVERY_OVERLAP; i++ )
+  {
+    (ISACdec_obj->plcstr_obj).overlapLP[i] = (WebRtc_Word16)(
+        WEBRTC_SPL_MUL_16_16_RSFT(stretchPitchLP[k],
+                                  (ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15) );
+    k = ( k < ((ISACdec_obj->plcstr_obj).stretchLag - 1) )? (k+1):0;
+  }
+
+  (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = (ISACdec_obj->plcstr_obj).stretchLag << 7;
+
+
+  /* --- Inverse Pitch Filter --- */
+  WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2,
+                            &ISACdec_obj->pitchfiltstr_obj, pitchLags_Q7, pitchGains_Q12, 4);
+
+  /* reduce gain to compensate for pitch enhancer */
+  /* gain = 1.0f - 0.45f * AvgPitchGain; */
+  tmp32a = WEBRTC_SPL_MUL_16_16_RSFT((ISACdec_obj->plcstr_obj).AvgPitchGain_Q12,
+                                     29, 0); // Q18
+  tmp32b = 262144 - tmp32a;  // Q18
+  gainQ13 = (WebRtc_Word16) (tmp32b >> 5); // Q13
+
+  /* perceptual post-filtering (using normalized lattice filter) */
+  for (k = 0; k < FRAMESAMPLES_HALF; k++)
+    Vector_Word32_1[k] = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(
+        Vector_Word16_2[k], gainQ13) << 3; // Q25
+
+
+  WebRtcIsacfix_NormLatticeFilterAr(ORDERLO,
+                                    (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0,
+                                    Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1);
+
+  WebRtcIsacfix_NormLatticeFilterAr(ORDERHI,
+                                    (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0,
+                                    Vector_Word32_2, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2);
+
+  /* recombine the 2 bands */
+
+  /* Form the polyphase signals, and compensate for DC offset */
+  for (k=0;k<FRAMESAMPLES_HALF;k++)
+  {
+    /* Construct a new upper channel signal*/
+    tmp_1 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(
+                                           ((WebRtc_Word32)Vector_Word16_1[k]+Vector_Word16_2[k] + 1));
+    /* Construct a new lower channel signal*/
+    tmp_2 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(
+                                           ((WebRtc_Word32)Vector_Word16_1[k]-Vector_Word16_2[k]));
+    Vector_Word16_1[k] = tmp_1;
+    Vector_Word16_2[k] = tmp_2;
+  }
+
+
+  WebRtcIsacfix_FilterAndCombine1(Vector_Word16_1,
+                                  Vector_Word16_2, signal_out16, &ISACdec_obj->postfiltbankstr_obj);
+
+  (ISACdec_obj->plcstr_obj).used = PLC_WAS_USED;
+  *current_framesamples = 480;
+
+  return len;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/encode.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/encode.c
new file mode 100644
index 0000000..cb531e5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/encode.c
@@ -0,0 +1,626 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * encode.c
+ *
+ * Encoding function for the iSAC coder.
+ *
+ */
+
+#include "arith_routins.h"
+#include "bandwidth_estimator.h"
+#include "codec.h"
+#include "pitch_gain_tables.h"
+#include "pitch_lag_tables.h"
+#include "entropy_coding.h"
+#include "lpc_tables.h"
+#include "lpc_masking_model.h"
+#include "pitch_estimator.h"
+#include "structs.h"
+#include <stdio.h>
+
+
+int WebRtcIsacfix_EncodeImpl(WebRtc_Word16      *in,
+                         ISACFIX_EncInst_t  *ISACenc_obj,
+                         BwEstimatorstr      *bw_estimatordata,
+                         WebRtc_Word16         CodingMode)
+{
+  WebRtc_Word16 stream_length = 0;
+  WebRtc_Word16 usefulstr_len = 0;
+  int k;
+  WebRtc_Word16 BWno;
+
+  WebRtc_Word16 lofilt_coefQ15[(ORDERLO)*SUBFRAMES];
+  WebRtc_Word16 hifilt_coefQ15[(ORDERHI)*SUBFRAMES];
+  WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+
+  WebRtc_Word16 LPandHP[FRAMESAMPLES/2 + QLOOKAHEAD];
+  WebRtc_Word16 LP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
+  WebRtc_Word16 HP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
+
+  WebRtc_Word16 PitchLags_Q7[PITCH_SUBFRAMES];
+  WebRtc_Word16 PitchGains_Q12[PITCH_SUBFRAMES];
+  WebRtc_Word16 AvgPitchGain_Q12;
+
+  WebRtc_Word16 frame_mode; /* 0 for 30ms, 1 for 60ms */
+  WebRtc_Word16 processed_samples;
+  int status;
+
+  WebRtc_Word32 bits_gainsQ11;
+  WebRtc_Word16 MinBytes;
+  WebRtc_Word16 bmodel;
+
+  transcode_obj transcodingParam;
+  WebRtc_Word16 payloadLimitBytes;
+  WebRtc_Word16 arithLenBeforeEncodingDFT;
+  WebRtc_Word16 iterCntr;
+
+  /* copy new frame length and bottle neck rate only for the first 10 ms data */
+  if (ISACenc_obj->buffer_index == 0) {
+    /* set the framelength for the next packet */
+    ISACenc_obj->current_framesamples = ISACenc_obj->new_framelength;
+  }
+
+  frame_mode = ISACenc_obj->current_framesamples/MAX_FRAMESAMPLES; /* 0 (30 ms) or 1 (60 ms)  */
+  processed_samples = ISACenc_obj->current_framesamples/(frame_mode+1); /* 480 (30, 60 ms) */
+
+  /* buffer speech samples (by 10ms packet) until the framelength is reached (30 or 60 ms) */
+  /**************************************************************************************/
+  /* fill the buffer with 10ms input data */
+  for(k=0; k<FRAMESAMPLES_10ms; k++) {
+    ISACenc_obj->data_buffer_fix[k + ISACenc_obj->buffer_index] = in[k];
+  }
+  /* if buffersize is not equal to current framesize, and end of file is not reached yet, */
+  /* increase index and go back to main to get more speech samples */
+  if (ISACenc_obj->buffer_index + FRAMESAMPLES_10ms != processed_samples) {
+    ISACenc_obj->buffer_index = ISACenc_obj->buffer_index + FRAMESAMPLES_10ms;
+    return 0;
+  }
+  /* if buffer reached the right size, reset index and continue with encoding the frame */
+  ISACenc_obj->buffer_index = 0;
+
+  /* end of buffer function */
+  /**************************/
+
+  /* encoding */
+  /************/
+
+  if (frame_mode == 0 || ISACenc_obj->frame_nb == 0 )
+  {
+    /* reset bitstream */
+    ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF;
+    ISACenc_obj->bitstr_obj.streamval = 0;
+    ISACenc_obj->bitstr_obj.stream_index = 0;
+    ISACenc_obj->bitstr_obj.full = 1;
+
+    if (CodingMode == 0) {
+      ISACenc_obj->BottleNeck =  WebRtcIsacfix_GetUplinkBandwidth(bw_estimatordata);
+      ISACenc_obj->MaxDelay =  WebRtcIsacfix_GetUplinkMaxDelay(bw_estimatordata);
+    }
+    if (CodingMode == 0 && frame_mode == 0 && (ISACenc_obj->enforceFrameSize == 0)) {
+      ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck,
+                                                                     ISACenc_obj->current_framesamples);
+    }
+
+    // multiply the bottleneck by 0.88 before computing SNR, 0.88 is tuned by experimenting on TIMIT
+    // 901/1024 is 0.87988281250000
+    ISACenc_obj->s2nr = WebRtcIsacfix_GetSnr((WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ISACenc_obj->BottleNeck, 901, 10),
+                                             ISACenc_obj->current_framesamples);
+
+    /* encode frame length */
+    status = WebRtcIsacfix_EncodeFrameLen(ISACenc_obj->current_framesamples, &ISACenc_obj->bitstr_obj);
+    if (status < 0)
+    {
+      /* Wrong frame size */
+      if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+      {
+        // If this is the second 30ms of a 60ms frame reset this such that in the next call
+        // encoder starts fresh.
+        ISACenc_obj->frame_nb = 0;
+      }
+      return status;
+    }
+
+    /* Save framelength for multiple packets memory */
+    if (ISACenc_obj->SaveEnc_ptr != NULL) {
+      (ISACenc_obj->SaveEnc_ptr)->framelength=ISACenc_obj->current_framesamples;
+    }
+
+    /* bandwidth estimation and coding */
+    BWno = WebRtcIsacfix_GetDownlinkBwIndexImpl(bw_estimatordata);
+    status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj);
+    if (status < 0)
+    {
+      if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+      {
+        // If this is the second 30ms of a 60ms frame reset this such that in the next call
+        // encoder starts fresh.
+        ISACenc_obj->frame_nb = 0;
+      }
+      return status;
+    }
+  }
+
+  /* split signal in two bands */
+  WebRtcIsacfix_SplitAndFilter1(ISACenc_obj->data_buffer_fix, LP16a, HP16a, &ISACenc_obj->prefiltbankstr_obj );
+
+  /* estimate pitch parameters and pitch-filter lookahead signal */
+  WebRtcIsacfix_PitchAnalysis(LP16a+QLOOKAHEAD, LPandHP,
+                              &ISACenc_obj->pitchanalysisstr_obj,  PitchLags_Q7, PitchGains_Q12); /* LPandHP = LP_lookahead_pfQ0, */
+
+  /* Set where to store data in multiple packets memory */
+  if (ISACenc_obj->SaveEnc_ptr != NULL) {
+    if (frame_mode == 0 || ISACenc_obj->frame_nb == 0)
+    {
+      (ISACenc_obj->SaveEnc_ptr)->startIdx = 0;
+    }
+    else
+    {
+      (ISACenc_obj->SaveEnc_ptr)->startIdx = 1;
+    }
+  }
+
+  /* quantize & encode pitch parameters */
+  status = WebRtcIsacfix_EncodePitchGain(PitchGains_Q12, &ISACenc_obj->bitstr_obj,  ISACenc_obj->SaveEnc_ptr);
+  if (status < 0)
+  {
+    if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+    {
+      // If this is the second 30ms of a 60ms frame reset this such that in the next call
+      // encoder starts fresh.
+      ISACenc_obj->frame_nb = 0;
+    }
+    return status;
+  }
+  status = WebRtcIsacfix_EncodePitchLag(PitchLags_Q7 , PitchGains_Q12, &ISACenc_obj->bitstr_obj,  ISACenc_obj->SaveEnc_ptr);
+  if (status < 0)
+  {
+    if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+    {
+      // If this is the second 30ms of a 60ms frame reset this such that in the next call
+      // encoder starts fresh.
+      ISACenc_obj->frame_nb = 0;
+    }
+    return status;
+  }
+  AvgPitchGain_Q12 = WEBRTC_SPL_RSHIFT_W32(PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3], 2);
+
+  /* find coefficients for perceptual pre-filters */
+  WebRtcIsacfix_GetLpcCoef(LPandHP, HP16a+QLOOKAHEAD, &ISACenc_obj->maskfiltstr_obj,
+                           ISACenc_obj->s2nr, PitchGains_Q12,
+                           gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15); /*LPandHP = LP_lookahead_pfQ0*/
+
+  // record LPC Gains for possible bit-rate reduction
+  for(k = 0; k < KLT_ORDER_GAIN; k++)
+  {
+    transcodingParam.lpcGains[k] = gain_lo_hiQ17[k];
+  }
+
+  /* code LPC model and shape - gains not quantized yet */
+  status = WebRtcIsacfix_EncodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15,
+                                   &bmodel, &bits_gainsQ11, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr, &transcodingParam);
+  if (status < 0)
+  {
+    if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+    {
+      // If this is the second 30ms of a 60ms frame reset this such that in the next call
+      // encoder starts fresh.
+      ISACenc_obj->frame_nb = 0;
+    }
+    return status;
+  }
+  arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full);
+
+  /* low-band filtering */
+  WebRtcIsacfix_NormLatticeFilterMa(ORDERLO, ISACenc_obj->maskfiltstr_obj.PreStateLoGQ15,
+                                    LP16a, lofilt_coefQ15, gain_lo_hiQ17, 0, LPandHP);/* LPandHP = LP16b */
+
+  /* pitch filter */
+  WebRtcIsacfix_PitchFilter(LPandHP, LP16a, &ISACenc_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 1);/* LPandHP = LP16b */
+
+  /* high-band filtering */
+  WebRtcIsacfix_NormLatticeFilterMa(ORDERHI, ISACenc_obj->maskfiltstr_obj.PreStateHiGQ15,
+                                    HP16a, hifilt_coefQ15, gain_lo_hiQ17, 1, LPandHP);/*LPandHP = HP16b*/
+
+  /* transform */
+  WebRtcIsacfix_Time2Spec(LP16a, LPandHP, LP16a, LPandHP); /*LPandHP = HP16b*/
+
+  /* Save data for multiple packets memory */
+  if (ISACenc_obj->SaveEnc_ptr != NULL) {
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k];
+      (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k];
+    }
+    (ISACenc_obj->SaveEnc_ptr)->AvgPitchGain[(ISACenc_obj->SaveEnc_ptr)->startIdx] = AvgPitchGain_Q12;
+  }
+
+  /* quantization and lossless coding */
+  status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12);
+  if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/
+  {
+    if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+    {
+      // If this is the second 30ms of a 60ms frame reset this such that in the next call
+      // encoder starts fresh.
+      ISACenc_obj->frame_nb = 0;
+    }
+    return status;
+  }
+
+  if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0))
+  {
+    // it is a 60ms and we are in the first 30ms
+    // then the limit at this point should be half of the assigned value
+    payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 >> 1;
+  }
+  else if (frame_mode == 0)
+  {
+    // it is a 30ms frame
+    payloadLimitBytes = (ISACenc_obj->payloadLimitBytes30) - 3;
+  }
+  else
+  {
+    // this is the second half of a 60ms frame.
+    payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 - 3; // subract 3 because termination process may add 3 bytes
+  }
+
+  iterCntr = 0;
+  while((((ISACenc_obj->bitstr_obj.stream_index) << 1) > payloadLimitBytes) ||
+        (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH))
+  {
+    WebRtc_Word16 arithLenDFTByte;
+    WebRtc_Word16 bytesLeftQ5;
+    WebRtc_Word16 ratioQ5[8] = {0, 6, 9, 12, 16, 19, 22, 25};
+
+    // According to experiments on TIMIT the following is proper for audio, but it is not agressive enough for tonal inputs
+    // such as DTMF, sweep-sine, ...
+    //
+    // (0.55 - (0.8 - ratio[i]/32) * 5 / 6) * 2^14
+    // WebRtc_Word16 scaleQ14[8] = {0, 648, 1928, 3208, 4915, 6195, 7475, 8755};
+
+
+    // This is a supper-agressive scaling passed the tests (tonal inputs) tone with one iteration for payload limit
+    // of 120 (32kbps bottleneck), number of frames needed a rate-reduction was 58403
+    //
+    WebRtc_Word16 scaleQ14[8] = {0, 348, 828, 1408, 2015, 3195, 3500, 3500};
+    WebRtc_Word16 idx;
+
+    if(iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION)
+    {
+      // We were not able to limit the payload size
+
+      if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0))
+      {
+        // This was the first 30ms of a 60ms frame. Although the payload is larger than it
+        // should be but we let the second 30ms be encoded. Maybe togetehr we won't exceed
+        // the limit.
+        ISACenc_obj->frame_nb = 1;
+        return 0;
+      }
+      else if((frame_mode == 1) && (ISACenc_obj->frame_nb == 1))
+      {
+        ISACenc_obj->frame_nb = 0;
+      }
+
+      if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)
+      {
+        return -ISAC_PAYLOAD_LARGER_THAN_LIMIT;
+      }
+      else
+      {
+        return status;
+      }
+    }
+    if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)
+    {
+      arithLenDFTByte = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full) - arithLenBeforeEncodingDFT;
+      bytesLeftQ5 = (payloadLimitBytes - arithLenBeforeEncodingDFT) << 5;
+
+      // bytesLeft / arithLenDFTBytes indicates how much scaling is required a rough estimate (agressive)
+      // scale = 0.55 - (0.8 - bytesLeft / arithLenDFTBytes) * 5 / 6
+      // bytesLeft / arithLenDFTBytes below 0.2 will have a scale of zero and above 0.8 are treated as 0.8
+      // to avoid division we do more simplification.
+      //
+      // values of (bytesLeft / arithLenDFTBytes)*32 between ratioQ5[i] and ratioQ5[i+1] are rounded to ratioQ5[i]
+      // and the corresponding scale is chosen
+
+      // we compare bytesLeftQ5 with ratioQ5[]*arithLenDFTByte;
+      idx = 4;
+      idx += (bytesLeftQ5 >= WEBRTC_SPL_MUL_16_16(ratioQ5[idx], arithLenDFTByte))? 2:-2;
+      idx += (bytesLeftQ5 >= WEBRTC_SPL_MUL_16_16(ratioQ5[idx], arithLenDFTByte))? 1:-1;
+      idx += (bytesLeftQ5 >= WEBRTC_SPL_MUL_16_16(ratioQ5[idx], arithLenDFTByte))? 0:-1;
+    }
+    else
+    {
+      // we are here because the bit-stream did not fit into the buffer, in this case, the stream_index is not
+      // trustable, especially if the is the first 30ms of a packet. Thereforem, we will go for the most agressive
+      // case.
+      idx = 0;
+    }
+    // scale FFT coefficients to reduce the bit-rate
+    for(k = 0; k < FRAMESAMPLES_HALF; k++)
+    {
+      LP16a[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(LP16a[k], scaleQ14[idx], 14);
+      LPandHP[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(LPandHP[k], scaleQ14[idx], 14);
+    }
+
+    // Save data for multiple packets memory
+    if (ISACenc_obj->SaveEnc_ptr != NULL)
+    {
+      for(k = 0; k < FRAMESAMPLES_HALF; k++)
+      {
+        (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k];
+        (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k];
+      }
+    }
+
+    // scale the unquantized LPC gains and save the scaled version for the future use
+    for(k = 0; k < KLT_ORDER_GAIN; k++)
+    {
+      gain_lo_hiQ17[k] = WEBRTC_SPL_MUL_16_32_RSFT14(scaleQ14[idx], transcodingParam.lpcGains[k]);//transcodingParam.lpcGains[k]; //
+      transcodingParam.lpcGains[k] = gain_lo_hiQ17[k];
+    }
+
+    // reset the bit-stream object to the state which it had before encoding LPC Gains
+    ISACenc_obj->bitstr_obj.full = transcodingParam.full;
+    ISACenc_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
+    ISACenc_obj->bitstr_obj.streamval = transcodingParam.streamval;
+    ISACenc_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
+    ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index-1] = transcodingParam.beforeLastWord;
+    ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index] = transcodingParam.lastWord;
+
+
+    // quantize and encode LPC gain
+    WebRtcIsacfix_EstCodeLpcGain(gain_lo_hiQ17, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr);
+    arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full);
+    status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12);
+    if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/
+    {
+      if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+      {
+        // If this is the second 30ms of a 60ms frame reset this such that in the next call
+        // encoder starts fresh.
+        ISACenc_obj->frame_nb = 0;
+      }
+      return status;
+    }
+    iterCntr++;
+  }
+
+  if (frame_mode == 1 && ISACenc_obj->frame_nb == 0)
+    /* i.e. 60 ms framesize and just processed the first 30ms, */
+    /* go back to main function to buffer the other 30ms speech frame */
+  {
+    ISACenc_obj->frame_nb = 1;
+    return 0;
+  }
+  else if (frame_mode == 1 && ISACenc_obj->frame_nb == 1)
+  {
+    ISACenc_obj->frame_nb = 0;
+    /* also update the framelength for next packet, in Adaptive mode only */
+    if (CodingMode == 0 && (ISACenc_obj->enforceFrameSize == 0)) {
+      ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck,
+                                                                     ISACenc_obj->current_framesamples);
+    }
+  }
+
+
+  /* complete arithmetic coding */
+  stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj);
+  /* can this be negative? */
+
+  if(CodingMode == 0)
+  {
+
+    /* update rate model and get minimum number of bytes in this packet */
+    MinBytes = WebRtcIsacfix_GetMinBytes(&ISACenc_obj->rate_data_obj, (WebRtc_Word16) stream_length,
+                                         ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck, ISACenc_obj->MaxDelay);
+
+    /* if bitstream is too short, add garbage at the end */
+
+    /* Store length of coded data */
+    usefulstr_len = stream_length;
+
+    /* Make sure MinBytes does not exceed packet size limit */
+    if ((ISACenc_obj->frame_nb == 0) && (MinBytes > ISACenc_obj->payloadLimitBytes30)) {
+      MinBytes = ISACenc_obj->payloadLimitBytes30;
+    } else if ((ISACenc_obj->frame_nb == 1) && (MinBytes > ISACenc_obj->payloadLimitBytes60)) {
+      MinBytes = ISACenc_obj->payloadLimitBytes60;
+    }
+
+    /* Make sure we don't allow more than 255 bytes of garbage data.
+       We store the length of the garbage data in 8 bits in the bitstream,
+       255 is the max garbage lenght we can signal using 8 bits. */
+    if( MinBytes > usefulstr_len + 255 ) {
+      MinBytes = usefulstr_len + 255;
+    }
+
+    /* Save data for creation of multiple bitstreams */
+    if (ISACenc_obj->SaveEnc_ptr != NULL) {
+      (ISACenc_obj->SaveEnc_ptr)->minBytes = MinBytes;
+    }
+
+    while (stream_length < MinBytes)
+    {
+      if (stream_length & 0x0001){
+        ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
+        ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] |= (WebRtc_UWord16)(ISACenc_obj->bitstr_seed & 0xFF);
+      } else {
+        ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
+        ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] = WEBRTC_SPL_LSHIFT_U16(ISACenc_obj->bitstr_seed, 8);
+      }
+      stream_length++;
+    }
+
+    /* to get the real stream_length, without garbage */
+    if (usefulstr_len & 0x0001) {
+      ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0xFF00;
+      ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += (MinBytes - usefulstr_len) & 0x00FF;
+    }
+    else {
+      ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0x00FF;
+      ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += WEBRTC_SPL_LSHIFT_U16((MinBytes - usefulstr_len) & 0x00FF, 8);
+    }
+  }
+  else
+  {
+    /* update rate model */
+    WebRtcIsacfix_UpdateRateModel(&ISACenc_obj->rate_data_obj, (WebRtc_Word16) stream_length,
+                                  ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck);
+  }
+  return stream_length;
+}
+
+/* This function is used to create a new bitstream with new BWE.
+   The same data as previously encoded with the fucntion WebRtcIsacfix_EncodeImpl()
+   is used. The data needed is taken from the struct, where it was stored
+   when calling the encoder. */
+int WebRtcIsacfix_EncodeStoredData(ISACFIX_EncInst_t  *ISACenc_obj,
+                                   int     BWnumber,
+                                   float              scale)
+{
+  int ii;
+  int status;
+  WebRtc_Word16 BWno = BWnumber;
+  int stream_length = 0;
+
+  WebRtc_Word16 model;
+  const WebRtc_UWord16 *Q_PitchGain_cdf_ptr[1];
+  const WebRtc_UWord16 **cdf;
+  const ISAC_SaveEncData_t *SaveEnc_str;
+  WebRtc_Word32 tmpLPCcoeffs_g[KLT_ORDER_GAIN<<1];
+  WebRtc_Word16 tmpLPCindex_g[KLT_ORDER_GAIN<<1];
+  WebRtc_Word16 tmp_fre[FRAMESAMPLES];
+  WebRtc_Word16 tmp_fim[FRAMESAMPLES];
+
+  SaveEnc_str = ISACenc_obj->SaveEnc_ptr;
+
+  /* Check if SaveEnc memory exists */
+  if (SaveEnc_str == NULL) {
+    return (-1);
+  }
+
+  /* Sanity Check - possible values for BWnumber is 0 - 23 */
+  if ((BWnumber < 0) || (BWnumber > 23)) {
+    return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
+  }
+
+  /* reset bitstream */
+  ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF;
+  ISACenc_obj->bitstr_obj.streamval = 0;
+  ISACenc_obj->bitstr_obj.stream_index = 0;
+  ISACenc_obj->bitstr_obj.full = 1;
+
+  /* encode frame length */
+  status = WebRtcIsacfix_EncodeFrameLen(SaveEnc_str->framelength, &ISACenc_obj->bitstr_obj);
+  if (status < 0) {
+    /* Wrong frame size */
+    return status;
+  }
+
+  /* encode bandwidth estimate */
+  status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj);
+  if (status < 0) {
+    return status;
+  }
+
+  /* Transcoding                                                 */
+  /* If scale < 1, rescale data to produce lower bitrate signal  */
+  if ((0.0 < scale) && (scale < 1.0)) {
+    /* Compensate LPC gain */
+    for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) {
+      tmpLPCcoeffs_g[ii] = (WebRtc_Word32) ((scale) * (float) SaveEnc_str->LPCcoeffs_g[ii]);
+    }
+
+    /* Scale DFT */
+    for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) {
+      tmp_fre[ii] = (WebRtc_Word16) ((scale) * (float) SaveEnc_str->fre[ii]) ;
+      tmp_fim[ii] = (WebRtc_Word16) ((scale) * (float) SaveEnc_str->fim[ii]) ;
+    }
+  } else {
+    for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) {
+      tmpLPCindex_g[ii] =  SaveEnc_str->LPCindex_g[ii];
+    }
+
+    for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) {
+      tmp_fre[ii] = SaveEnc_str->fre[ii];
+      tmp_fim[ii] = SaveEnc_str->fim[ii];
+    }
+  }
+
+  /* Loop over number of 30 msec */
+  for (ii = 0; ii <= SaveEnc_str->startIdx; ii++)
+  {
+
+    /* encode pitch gains */
+    *Q_PitchGain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf;
+    status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->pitchGain_index[ii],
+                                       Q_PitchGain_cdf_ptr, 1);
+    if (status < 0) {
+      return status;
+    }
+
+    /* entropy coding of quantization pitch lags */
+    /* voicing classificiation */
+    if (SaveEnc_str->meanGain[ii] <= 819) {
+      cdf = WebRtcIsacfix_kPitchLagPtrLo;
+    } else if (SaveEnc_str->meanGain[ii] <= 1638) {
+      cdf = WebRtcIsacfix_kPitchLagPtrMid;
+    } else {
+      cdf = WebRtcIsacfix_kPitchLagPtrHi;
+    }
+    status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj,
+                                       &SaveEnc_str->pitchIndex[PITCH_SUBFRAMES*ii], cdf, PITCH_SUBFRAMES);
+    if (status < 0) {
+      return status;
+    }
+
+    /* LPC */
+    /* entropy coding of model number */
+    model = 0;
+    status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj,  &model,
+                                       WebRtcIsacfix_kModelCdfPtr, 1);
+    if (status < 0) {
+      return status;
+    }
+
+    /* entropy coding of quantization indices - LPC shape only */
+    status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->LPCindex_s[KLT_ORDER_SHAPE*ii],
+                                       WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE);
+    if (status < 0) {
+      return status;
+    }
+
+    /* If transcoding, get new LPC gain indices */
+    if (scale < 1.0) {
+      WebRtcIsacfix_TranscodeLpcCoef(&tmpLPCcoeffs_g[KLT_ORDER_GAIN*ii], &tmpLPCindex_g[KLT_ORDER_GAIN*ii]);
+    }
+
+    /* entropy coding of quantization indices - LPC gain */
+    status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN*ii],
+                                       WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN);
+    if (status < 0) {
+      return status;
+    }
+
+    /* quantization and lossless coding */
+    status = WebRtcIsacfix_EncodeSpec(&tmp_fre[ii*FRAMESAMPLES_HALF], &tmp_fim[ii*FRAMESAMPLES_HALF],
+                                      &ISACenc_obj->bitstr_obj, SaveEnc_str->AvgPitchGain[ii]);
+    if (status < 0) {
+      return status;
+    }
+  }
+
+  /* complete arithmetic coding */
+  stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj);
+
+  return stream_length;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.c
new file mode 100644
index 0000000..0b64d83
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.c
@@ -0,0 +1,2072 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * entropy_coding.c
+ *
+ * This file contains all functions used to arithmetically
+ * encode the iSAC bistream.
+ *
+ */
+
+#include <stddef.h>
+
+#include "arith_routins.h"
+#include "spectrum_ar_model_tables.h"
+#include "pitch_gain_tables.h"
+#include "pitch_lag_tables.h"
+#include "entropy_coding.h"
+#include "lpc_tables.h"
+#include "settings.h"
+#include "signal_processing_library.h"
+
+
+/*
+  This function implements the fix-point correspondant function to lrint.
+
+  FLP: (WebRtc_Word32)floor(flt+.499999999999)
+  FIP: (fixVal+roundVal)>>qDomain
+
+  where roundVal = 2^(qDomain-1) = 1<<(qDomain-1)
+
+*/
+static __inline WebRtc_Word32 CalcLrIntQ(WebRtc_Word32 fixVal, WebRtc_Word16 qDomain) {
+  WebRtc_Word32 intgr;
+  WebRtc_Word32 roundVal;
+
+  roundVal = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, qDomain-1);
+  intgr = WEBRTC_SPL_RSHIFT_W32(fixVal+roundVal, qDomain);
+
+  return intgr;
+}
+
+/*
+  __inline WebRtc_UWord32 stepwise(WebRtc_Word32 dinQ10) {
+
+  WebRtc_Word32 ind, diQ10, dtQ10;
+
+  diQ10 = dinQ10;
+  if (diQ10 < DPMIN_Q10)
+  diQ10 = DPMIN_Q10;
+  if (diQ10 >= DPMAX_Q10)
+  diQ10 = DPMAX_Q10 - 1;
+
+  dtQ10 = diQ10 - DPMIN_Q10;*/ /* Q10 + Q10 = Q10 */
+/* ind = (dtQ10 * 5) >> 10;  */ /* 2^10 / 5 = 0.2 in Q10  */
+/* Q10 -> Q0 */
+
+/* return rpointsFIX_Q10[ind];
+
+   }
+*/
+
+/* logN(x) = logN(2)*log2(x) = 0.6931*log2(x). Output in Q8. */
+/* The input argument X to logN(X) is 2^17 times higher than the
+   input floating point argument Y to log(Y), since the X value
+   is a Q17 value. This can be compensated for after the call, by
+   subraction a value Z for each Q-step. One Q-step means that
+   X gets 2 thimes higher, i.e. Z = logN(2)*256 = 0.693147180559*256 =
+   177.445678 should be subtracted (since logN() returns a Q8 value).
+   For a X value in Q17, the value 177.445678*17 = 3017 should be
+   subtracted */
+static WebRtc_Word16 CalcLogN(WebRtc_Word32 arg) {
+  WebRtc_Word16 zeros, log2, frac, logN;
+
+  zeros=WebRtcSpl_NormU32(arg);
+  frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(WEBRTC_SPL_LSHIFT_W32(arg, zeros)&0x7FFFFFFF, 23);
+  log2=(WebRtc_Word16)(WEBRTC_SPL_LSHIFT_W32(31-zeros, 8)+frac); // log2(x) in Q8
+  logN=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(log2,22713,15); //Q8*Q15 log(2) = 0.693147 = 22713 in Q15
+  logN=logN+11; //Scalar compensation which minimizes the (log(x)-logN(x))^2 error over all x.
+
+  return logN;
+}
+
+
+/*
+  expN(x) = 2^(a*x), where a = log2(e) ~= 1.442695
+
+  Input:  Q8  (WebRtc_Word16)
+  Output: Q17 (WebRtc_Word32)
+
+  a = log2(e) = log2(exp(1)) ~= 1.442695  ==>  a = 23637 in Q14 (1.442688)
+  To this value, 700 is added or subtracted in order to get an average error
+  nearer zero, instead of always same-sign.
+*/
+
+static WebRtc_Word32 CalcExpN(WebRtc_Word16 x) {
+  WebRtc_Word16 ax, axINT, axFRAC;
+  WebRtc_Word16 exp16;
+  WebRtc_Word32 exp;
+
+  if (x>=0) {
+    //  ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637-700, 14); //Q8
+    ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
+    axINT = WEBRTC_SPL_RSHIFT_W16(ax, 8); //Q0
+    axFRAC = ax&0x00FF;
+    exp16 = WEBRTC_SPL_LSHIFT_W32(1, axINT); //Q0
+    axFRAC = axFRAC+256; //Q8
+    exp = WEBRTC_SPL_MUL_16_16(exp16, axFRAC); // Q0*Q8 = Q8
+    exp = WEBRTC_SPL_LSHIFT_W32(exp, 9); //Q17
+  } else {
+    //  ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637+700, 14); //Q8
+    ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
+    ax = -ax;
+    axINT = 1 + WEBRTC_SPL_RSHIFT_W16(ax, 8); //Q0
+    axFRAC = 0x00FF - (ax&0x00FF);
+    exp16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(32768, axINT); //Q15
+    axFRAC = axFRAC+256; //Q8
+    exp = WEBRTC_SPL_MUL_16_16(exp16, axFRAC); // Q15*Q8 = Q23
+    exp = WEBRTC_SPL_RSHIFT_W32(exp, 6); //Q17
+  }
+
+  return exp;
+}
+
+
+/* compute correlation from power spectrum */
+static void CalcCorrelation(WebRtc_Word32 *PSpecQ12, WebRtc_Word32 *CorrQ7)
+{
+  WebRtc_Word32 summ[FRAMESAMPLES/8];
+  WebRtc_Word32 diff[FRAMESAMPLES/8];
+  WebRtc_Word32 sum;
+  int k, n;
+
+  for (k = 0; k < FRAMESAMPLES/8; k++) {
+    summ[k] = WEBRTC_SPL_RSHIFT_W32(PSpecQ12[k] + PSpecQ12[FRAMESAMPLES/4-1 - k] + 16, 5);
+    diff[k] = WEBRTC_SPL_RSHIFT_W32(PSpecQ12[k] - PSpecQ12[FRAMESAMPLES/4-1 - k] + 16, 5);
+  }
+
+  sum = 2;
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    sum += summ[n];
+  CorrQ7[0] = sum;
+
+  for (k = 0; k < AR_ORDER; k += 2) {
+    sum = 0;
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      sum += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WebRtcIsacfix_kCos[k][n], diff[n]) + 256, 9);
+    CorrQ7[k+1] = sum;
+  }
+
+  for (k=1; k<AR_ORDER; k+=2) {
+    sum = 0;
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      sum += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WebRtcIsacfix_kCos[k][n], summ[n]) + 256, 9);
+    CorrQ7[k+1] = sum;
+  }
+}
+
+
+/* compute inverse AR power spectrum */
+static void CalcInvArSpec(const WebRtc_Word16 *ARCoefQ12,
+                          const WebRtc_Word32 gainQ10,
+                          WebRtc_Word32 *CurveQ16)
+{
+  WebRtc_Word32 CorrQ11[AR_ORDER+1];
+  WebRtc_Word32 sum, tmpGain;
+  WebRtc_Word32 diffQ16[FRAMESAMPLES/8];
+  const WebRtc_Word16 *CS_ptrQ9;
+  int k, n;
+  WebRtc_Word16 round, shftVal = 0, sh;
+
+  sum = 0;
+  for (n = 0; n < AR_ORDER+1; n++)
+    sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]);    /* Q24 */
+  sum = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(sum, 6), 65) + 32768, 16);    /* result in Q8 */
+  CorrQ11[0] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, gainQ10) + 256, 9);
+
+  /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */
+  if(gainQ10>400000){
+    tmpGain = WEBRTC_SPL_RSHIFT_W32(gainQ10, 3);
+    round = 32;
+    shftVal = 6;
+  } else {
+    tmpGain = gainQ10;
+    round = 256;
+    shftVal = 9;
+  }
+
+  for (k = 1; k < AR_ORDER+1; k++) {
+    sum = 16384;
+    for (n = k; n < AR_ORDER+1; n++)
+      sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]);  /* Q24 */
+    sum = WEBRTC_SPL_RSHIFT_W32(sum, 15);
+    CorrQ11[k] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, tmpGain) + round, shftVal);
+  }
+  sum = WEBRTC_SPL_LSHIFT_W32(CorrQ11[0], 7);
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    CurveQ16[n] = sum;
+
+  for (k = 1; k < AR_ORDER; k += 2) {
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      CurveQ16[n] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WebRtcIsacfix_kCos[k][n], CorrQ11[k+1]) + 2, 2);
+  }
+
+  CS_ptrQ9 = WebRtcIsacfix_kCos[0];
+
+  /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */
+  sh=WebRtcSpl_NormW32(CorrQ11[1]);
+  if (CorrQ11[1]==0) /* Use next correlation */
+    sh=WebRtcSpl_NormW32(CorrQ11[2]);
+
+  if (sh<9)
+    shftVal = 9 - sh;
+  else
+    shftVal = 0;
+
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    diffQ16[n] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[1], shftVal)) + 2, 2);
+  for (k = 2; k < AR_ORDER; k += 2) {
+    CS_ptrQ9 = WebRtcIsacfix_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      diffQ16[n] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[k+1], shftVal)) + 2, 2);
+  }
+
+  for (k=0; k<FRAMESAMPLES/8; k++) {
+    CurveQ16[FRAMESAMPLES/4-1 - k] = CurveQ16[k] - WEBRTC_SPL_LSHIFT_W32(diffQ16[k], shftVal);
+    CurveQ16[k] += WEBRTC_SPL_LSHIFT_W32(diffQ16[k], shftVal);
+  }
+}
+
+static void CalcRootInvArSpec(const WebRtc_Word16 *ARCoefQ12,
+                              const WebRtc_Word32 gainQ10,
+                              WebRtc_UWord16 *CurveQ8)
+{
+  WebRtc_Word32 CorrQ11[AR_ORDER+1];
+  WebRtc_Word32 sum, tmpGain;
+  WebRtc_Word32 summQ16[FRAMESAMPLES/8];
+  WebRtc_Word32 diffQ16[FRAMESAMPLES/8];
+
+  const WebRtc_Word16 *CS_ptrQ9;
+  int k, n, i;
+  WebRtc_Word16 round, shftVal = 0, sh;
+  WebRtc_Word32 res, in_sqrt, newRes;
+
+  sum = 0;
+  for (n = 0; n < AR_ORDER+1; n++)
+    sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]);    /* Q24 */
+  sum = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(sum, 6), 65) + 32768, 16);    /* result in Q8 */
+  CorrQ11[0] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, gainQ10) + 256, 9);
+
+  /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */
+  if(gainQ10>400000){
+    tmpGain = WEBRTC_SPL_RSHIFT_W32(gainQ10, 3);
+    round = 32;
+    shftVal = 6;
+  } else {
+    tmpGain = gainQ10;
+    round = 256;
+    shftVal = 9;
+  }
+
+  for (k = 1; k < AR_ORDER+1; k++) {
+    sum = 16384;
+    for (n = k; n < AR_ORDER+1; n++)
+      sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]);  /* Q24 */
+    sum = WEBRTC_SPL_RSHIFT_W32(sum, 15);
+    CorrQ11[k] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, tmpGain) + round, shftVal);
+  }
+  sum = WEBRTC_SPL_LSHIFT_W32(CorrQ11[0], 7);
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    summQ16[n] = sum;
+
+  for (k = 1; k < (AR_ORDER); k += 2) {
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      summQ16[n] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(CorrQ11[k+1],WebRtcIsacfix_kCos[k][n]) + 2, 2);
+  }
+
+  CS_ptrQ9 = WebRtcIsacfix_kCos[0];
+
+  /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */
+  sh=WebRtcSpl_NormW32(CorrQ11[1]);
+  if (CorrQ11[1]==0) /* Use next correlation */
+    sh=WebRtcSpl_NormW32(CorrQ11[2]);
+
+  if (sh<9)
+    shftVal = 9 - sh;
+  else
+    shftVal = 0;
+
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    diffQ16[n] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[1], shftVal)) + 2, 2);
+  for (k = 2; k < AR_ORDER; k += 2) {
+    CS_ptrQ9 = WebRtcIsacfix_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      diffQ16[n] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[k+1], shftVal)) + 2, 2);
+  }
+
+  in_sqrt = summQ16[0] + WEBRTC_SPL_LSHIFT_W32(diffQ16[0], shftVal);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB)  */
+  res = WEBRTC_SPL_LSHIFT_W32(1, WEBRTC_SPL_RSHIFT_W16(WebRtcSpl_GetSizeInBits(in_sqrt), 1));
+
+  for (k = 0; k < FRAMESAMPLES/8; k++)
+  {
+    in_sqrt = summQ16[k] + WEBRTC_SPL_LSHIFT_W32(diffQ16[k], shftVal);
+    i = 10;
+
+    /* make in_sqrt positive to prohibit sqrt of negative values */
+    if(in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
+    do
+    {
+      res = newRes;
+      newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
+    } while (newRes != res && i-- > 0);
+
+    CurveQ8[k] = (WebRtc_Word16)newRes;
+  }
+  for (k = FRAMESAMPLES/8; k < FRAMESAMPLES/4; k++) {
+
+    in_sqrt = summQ16[FRAMESAMPLES/4-1 - k] - WEBRTC_SPL_LSHIFT_W32(diffQ16[FRAMESAMPLES/4-1 - k], shftVal);
+    i = 10;
+
+    /* make in_sqrt positive to prohibit sqrt of negative values */
+    if(in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
+    do
+    {
+      res = newRes;
+      newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
+    } while (newRes != res && i-- > 0);
+
+    CurveQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+}
+
+
+
+/* generate array of dither samples in Q7 */
+static void GenerateDitherQ7(WebRtc_Word16 *bufQ7,
+                             WebRtc_UWord32 seed,
+                             WebRtc_Word16 length,
+                             WebRtc_Word16 AvgPitchGain_Q12)
+{
+  int   k;
+  WebRtc_Word16 dither1_Q7, dither2_Q7, dither_gain_Q14, shft;
+
+  if (AvgPitchGain_Q12 < 614)  /* this threshold should be equal to that in decode_spec() */
+  {
+    for (k = 0; k < length-2; k += 3)
+    {
+      /* new random unsigned WebRtc_Word32 */
+      seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 (Q7) */
+      dither1_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)seed + 16777216, 25); // * 128/4294967295
+
+      /* new random unsigned WebRtc_Word32 */
+      seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 */
+      dither2_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(seed + 16777216, 25);
+
+      shft = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 15);
+      if (shft < 5)
+      {
+        bufQ7[k]   = dither1_Q7;
+        bufQ7[k+1] = dither2_Q7;
+        bufQ7[k+2] = 0;
+      }
+      else if (shft < 10)
+      {
+        bufQ7[k]   = dither1_Q7;
+        bufQ7[k+1] = 0;
+        bufQ7[k+2] = dither2_Q7;
+      }
+      else
+      {
+        bufQ7[k]   = 0;
+        bufQ7[k+1] = dither1_Q7;
+        bufQ7[k+2] = dither2_Q7;
+      }
+    }
+  }
+  else
+  {
+    dither_gain_Q14 = (WebRtc_Word16)(22528 - WEBRTC_SPL_MUL(10, AvgPitchGain_Q12));
+
+    /* dither on half of the coefficients */
+    for (k = 0; k < length-1; k += 2)
+    {
+      /* new random unsigned WebRtc_Word32 */
+      seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 */
+      dither1_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)seed + 16777216, 25);
+
+      /* dither sample is placed in either even or odd index */
+      shft = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 1);     /* either 0 or 1 */
+
+      bufQ7[k + shft] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(dither_gain_Q14, dither1_Q7) + 8192, 14);
+      bufQ7[k + 1 - shft] = 0;
+    }
+  }
+}
+
+
+
+
+/*
+ * function to decode the complex spectrum from the bitstream
+ * returns the total number of bytes in the stream
+ */
+WebRtc_Word16 WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
+                                       WebRtc_Word16 *frQ7,
+                                       WebRtc_Word16 *fiQ7,
+                                       WebRtc_Word16 AvgPitchGain_Q12)
+{
+  WebRtc_Word16  data[FRAMESAMPLES];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES/4];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  gainQ10;
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word16  len;
+  int          k;
+
+  /* create dither signal */
+  GenerateDitherQ7(data, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12); /* Dither is output in vector 'Data' */
+
+  /* decode model parameters */
+  if (WebRtcIsacfix_DecodeRcCoef(streamdata, RCQ15) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  if (WebRtcIsacfix_DecodeGain2(streamdata, &gain2_Q10) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* compute inverse AR power spectrum */
+  CalcInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* arithmetic decoding of spectrum */
+  /* 'data' input and output. Input = Dither */
+  len = WebRtcIsacfix_DecLogisticMulti2(data, streamdata, invARSpec2_Q16, (WebRtc_Word16)FRAMESAMPLES);
+
+  if (len<1)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* subtract dither and scale down spectral samples with low SNR */
+  if (AvgPitchGain_Q12 <= 614)
+  {
+    for (k = 0; k < FRAMESAMPLES; k += 4)
+    {
+      gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)30, 10),
+                                              (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (WebRtc_UWord32)2195456, 16));
+      *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
+      *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
+      *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
+      *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
+    }
+  }
+  else
+  {
+    for (k = 0; k < FRAMESAMPLES; k += 4)
+    {
+      gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)36, 10),
+                                              (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (WebRtc_UWord32)2654208, 16));
+      *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
+      *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
+      *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
+      *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
+    }
+  }
+
+  return len;
+}
+
+
+int WebRtcIsacfix_EncodeSpec(const WebRtc_Word16 *fr,
+                             const WebRtc_Word16 *fi,
+                             Bitstr_enc *streamdata,
+                             WebRtc_Word16 AvgPitchGain_Q12)
+{
+  WebRtc_Word16  dataQ7[FRAMESAMPLES];
+  WebRtc_Word32  PSpec[FRAMESAMPLES/4];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES/4];
+  WebRtc_Word32  CorrQ7[AR_ORDER+1];
+  WebRtc_Word32  CorrQ7_norm[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word16  val;
+  WebRtc_Word32  nrg;
+  WebRtc_UWord32 sum;
+  WebRtc_Word16  lft_shft;
+  WebRtc_Word16  status;
+  int          k, n, j;
+
+
+  /* create dither_float signal */
+  GenerateDitherQ7(dataQ7, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12);
+
+  /* add dither and quantize, and compute power spectrum */
+  /* Vector dataQ7 contains Dither in Q7 */
+  for (k = 0; k < FRAMESAMPLES; k += 4)
+  {
+    val = ((*fr++ + dataQ7[k]   + 64) & 0xFF80) - dataQ7[k]; /* Data = Dither */
+    dataQ7[k] = val;            /* New value in Data */
+    sum = WEBRTC_SPL_UMUL(val, val);
+
+    val = ((*fi++ + dataQ7[k+1] + 64) & 0xFF80) - dataQ7[k+1]; /* Data = Dither */
+    dataQ7[k+1] = val;            /* New value in Data */
+    sum += WEBRTC_SPL_UMUL(val, val);
+
+    val = ((*fr++ + dataQ7[k+2] + 64) & 0xFF80) - dataQ7[k+2]; /* Data = Dither */
+    dataQ7[k+2] = val;            /* New value in Data */
+    sum += WEBRTC_SPL_UMUL(val, val);
+
+    val = ((*fi++ + dataQ7[k+3] + 64) & 0xFF80) - dataQ7[k+3]; /* Data = Dither */
+    dataQ7[k+3] = val;            /* New value in Data */
+    sum += WEBRTC_SPL_UMUL(val, val);
+
+    PSpec[k>>2] = WEBRTC_SPL_RSHIFT_U32(sum, 2);
+  }
+
+  /* compute correlation from power spectrum */
+  CalcCorrelation(PSpec, CorrQ7);
+
+
+  /* find AR coefficients */
+  /* number of bit shifts to 14-bit normalize CorrQ7[0] (leaving room for sign) */
+  lft_shft = WebRtcSpl_NormW32(CorrQ7[0]) - 18;
+
+  if (lft_shft > 0) {
+    for (k=0; k<AR_ORDER+1; k++)
+      CorrQ7_norm[k] = WEBRTC_SPL_LSHIFT_W32(CorrQ7[k], lft_shft);
+  } else {
+    for (k=0; k<AR_ORDER+1; k++)
+      CorrQ7_norm[k] = WEBRTC_SPL_RSHIFT_W32(CorrQ7[k], -lft_shft);
+  }
+
+  /* find RC coefficients */
+  WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15);
+
+  /* quantize & code RC Coef */
+  status = WebRtcIsacfix_EncodeRcCoef(RCQ15, streamdata);
+  if (status < 0) {
+    return status;
+  }
+
+  /* RC -> AR coefficients */
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  /* compute ARCoef' * Corr * ARCoef in Q19 */
+  nrg = 0;
+  for (j = 0; j <= AR_ORDER; j++) {
+    for (n = 0; n <= j; n++)
+      nrg += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(ARCoefQ12[j], WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CorrQ7_norm[j-n], ARCoefQ12[n]) + 256, 9)) + 4, 3);
+    for (n = j+1; n <= AR_ORDER; n++)
+      nrg += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(ARCoefQ12[j], WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CorrQ7_norm[n-j], ARCoefQ12[n]) + 256, 9)) + 4, 3);
+  }
+
+  if (lft_shft > 0)
+    nrg = WEBRTC_SPL_RSHIFT_W32(nrg, lft_shft);
+  else
+    nrg = WEBRTC_SPL_LSHIFT_W32(nrg, -lft_shft);
+
+  if(nrg>131072)
+    gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES >> 2, nrg);  /* also shifts 31 bits to the left! */
+  else
+    gain2_Q10 = WEBRTC_SPL_RSHIFT_W32(FRAMESAMPLES, 2);
+
+  /* quantize & code gain2_Q10 */
+  if (WebRtcIsacfix_EncodeGain2(&gain2_Q10, streamdata))
+    return -1;
+
+  /* compute inverse AR magnitude spectrum */
+  CalcRootInvArSpec(ARCoefQ12, gain2_Q10, invARSpecQ8);
+
+
+  /* arithmetic coding of spectrum */
+  status = WebRtcIsacfix_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, (WebRtc_Word16)FRAMESAMPLES);
+  if ( status )
+    return( status );
+
+  return 0;
+}
+
+
+/* Matlab's LAR definition */
+static void Rc2LarFix(const WebRtc_Word16 *rcQ15, WebRtc_Word32 *larQ17, WebRtc_Word16 order) {
+
+  /*
+
+    This is a piece-wise implemenetation of a rc2lar-function (all values in the comment
+    are Q15 values and  are based on [0 24956/32768 30000/32768 32500/32768], i.e.
+    [0.76159667968750   0.91552734375000   0.99182128906250]
+
+    x0  x1           a                 k              x0(again)         b
+    ==================================================================================
+    0.00 0.76:   0                  2.625997508581   0                  0
+    0.76 0.91:   2.000012018559     7.284502668663   0.761596679688    -3.547841027073
+    0.91 0.99:   3.121320351712    31.115835041229   0.915527343750   -25.366077452148
+    0.99 1.00:   5.495270168700   686.663805654056   0.991821289063  -675.552510708011
+
+    The implementation is y(x)= a + (x-x0)*k, but this can be simplified to
+
+    y(x) = a-x0*k + x*k = b + x*k, where b = a-x0*k
+
+    akx=[0                 2.625997508581   0
+    2.000012018559     7.284502668663   0.761596679688
+    3.121320351712    31.115835041229   0.915527343750
+    5.495270168700   686.663805654056   0.991821289063];
+
+    b = akx(:,1) - akx(:,3).*akx(:,2)
+
+    [ 0.0
+    -3.547841027073
+    -25.366077452148
+    -675.552510708011]
+
+  */
+
+  int k;
+  WebRtc_Word16 rc;
+  WebRtc_Word32 larAbsQ17;
+
+  for (k = 0; k < order; k++) {
+
+    rc = WEBRTC_SPL_ABS_W16(rcQ15[k]); //Q15
+
+    /* Calculate larAbsQ17 in Q17 from rc in Q15 */
+
+    if (rc<24956) {  //0.7615966 in Q15
+      // (Q15*Q13)>>11 = Q17
+      larAbsQ17 = WEBRTC_SPL_MUL_16_16_RSFT(rc, 21512, 11);
+    } else if (rc<30000) { //0.91552734375 in Q15
+      // Q17 + (Q15*Q12)>>10 = Q17
+      larAbsQ17 = -465024 + WEBRTC_SPL_MUL_16_16_RSFT(rc, 29837, 10);
+    } else if (rc<32500) { //0.99182128906250 in Q15
+      // Q17 + (Q15*Q10)>>8 = Q17
+      larAbsQ17 = -3324784 + WEBRTC_SPL_MUL_16_16_RSFT(rc, 31863, 8);
+    } else  {
+      // Q17 + (Q15*Q5)>>3 = Q17
+      larAbsQ17 = -88546020 + WEBRTC_SPL_MUL_16_16_RSFT(rc, 21973, 3);
+    }
+
+    if (rcQ15[k]>0) {
+      larQ17[k] = larAbsQ17;
+    } else {
+      larQ17[k] = -larAbsQ17;
+    }
+  }
+}
+
+
+static void Lar2RcFix(const WebRtc_Word32 *larQ17, WebRtc_Word16 *rcQ15,  WebRtc_Word16 order) {
+
+  /*
+    This is a piece-wise implemenetation of a lar2rc-function
+    See comment in Rc2LarFix() about details.
+  */
+
+  int k;
+  WebRtc_Word16 larAbsQ11;
+  WebRtc_Word32 rc;
+
+  for (k = 0; k < order; k++) {
+
+    larAbsQ11 = (WebRtc_Word16) WEBRTC_SPL_ABS_W32(WEBRTC_SPL_RSHIFT_W32(larQ17[k]+32,6)); //Q11
+
+    if (larAbsQ11<4097) { //2.000012018559 in Q11
+      // Q11*Q16>>12 = Q15
+      rc = WEBRTC_SPL_MUL_16_16_RSFT(larAbsQ11, 24957, 12);
+    } else if (larAbsQ11<6393) { //3.121320351712 in Q11
+      // (Q11*Q17 + Q13)>>13 = Q15
+      rc = WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL_16_16(larAbsQ11, 17993) + 130738688), 13);
+    } else if (larAbsQ11<11255) { //5.495270168700 in Q11
+      // (Q11*Q19 + Q30)>>15 = Q15
+      rc = WEBRTC_SPL_RSHIFT_W32((WEBRTC_SPL_MUL_16_16(larAbsQ11, 16850) + 875329820), 15);
+    } else  {
+      // (Q11*Q24>>16 + Q19)>>4 = Q15
+      rc = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16_RSFT(larAbsQ11, 24433, 16)) + 515804), 4);
+    }
+
+    if (larQ17[k]<=0) {
+      rc = -rc;
+    }
+
+    rcQ15[k] = (WebRtc_Word16) rc;  // Q15
+  }
+}
+
+static void Poly2LarFix(WebRtc_Word16 *lowbandQ15,
+                        WebRtc_Word16 orderLo,
+                        WebRtc_Word16 *hibandQ15,
+                        WebRtc_Word16 orderHi,
+                        WebRtc_Word16 Nsub,
+                        WebRtc_Word32 *larsQ17) {
+
+  int k, n;
+  WebRtc_Word32 *outpQ17;
+  WebRtc_Word16 orderTot;
+  WebRtc_Word32 larQ17[MAX_ORDER];   // Size 7+6 is enough
+
+  orderTot = (orderLo + orderHi);
+  outpQ17 = larsQ17;
+  for (k = 0; k < Nsub; k++) {
+
+    Rc2LarFix(lowbandQ15, larQ17, orderLo);
+
+    for (n = 0; n < orderLo; n++)
+      outpQ17[n] = larQ17[n]; //Q17
+
+    Rc2LarFix(hibandQ15, larQ17, orderHi);
+
+    for (n = 0; n < orderHi; n++)
+      outpQ17[n + orderLo] = larQ17[n]; //Q17;
+
+    outpQ17 += orderTot;
+    lowbandQ15 += orderLo;
+    hibandQ15 += orderHi;
+  }
+}
+
+
+static void Lar2polyFix(WebRtc_Word32 *larsQ17,
+                        WebRtc_Word16 *lowbandQ15,
+                        WebRtc_Word16 orderLo,
+                        WebRtc_Word16 *hibandQ15,
+                        WebRtc_Word16 orderHi,
+                        WebRtc_Word16 Nsub) {
+
+  int k, n;
+  WebRtc_Word16 orderTot;
+  WebRtc_Word16 *outplQ15, *outphQ15;
+  WebRtc_Word32 *inpQ17;
+  WebRtc_Word16 rcQ15[7+6];
+
+  orderTot = (orderLo + orderHi);
+  outplQ15 = lowbandQ15;
+  outphQ15 = hibandQ15;
+  inpQ17 = larsQ17;
+  for (k = 0; k < Nsub; k++) {
+
+    /* gains not handled here as in the FLP version */
+
+    /* Low band */
+    Lar2RcFix(&inpQ17[0], rcQ15, orderLo);
+    for (n = 0; n < orderLo; n++)
+      outplQ15[n] = rcQ15[n]; // Refl. coeffs
+
+    /* High band */
+    Lar2RcFix(&inpQ17[orderLo], rcQ15, orderHi);
+    for (n = 0; n < orderHi; n++)
+      outphQ15[n] = rcQ15[n]; // Refl. coeffs
+
+    inpQ17 += orderTot;
+    outplQ15 += orderLo;
+    outphQ15 += orderHi;
+  }
+}
+
+int WebRtcIsacfix_DecodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
+                            WebRtc_Word16 *LPCCoef_loQ15,
+                            WebRtc_Word16 *LPCCoef_hiQ15,
+                            Bitstr_dec *streamdata,
+                            WebRtc_Word16 *outmodel) {
+
+  WebRtc_Word32 larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_GAIN+KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
+  int err;
+
+  err = WebRtcIsacfix_DecodeLpcCoef(streamdata, larsQ17, gain_lo_hiQ17, outmodel);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_LPC;
+
+  Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES);
+
+  return 0;
+}
+
+/* decode & dequantize LPC Coef */
+int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec *streamdata,
+                                WebRtc_Word32 *LPCCoefQ17,
+                                WebRtc_Word32 *gain_lo_hiQ17,
+                                WebRtc_Word16 *outmodel)
+{
+  int j, k, n;
+  int err;
+  WebRtc_Word16 pos, pos2, posg, poss, offsg, offss, offs2;
+  WebRtc_Word16 gainpos;
+  WebRtc_Word16 model;
+  WebRtc_Word16 index_QQ[KLT_ORDER_SHAPE];
+  WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+  WebRtc_Word16 tmpcoeffs_sQ10[KLT_ORDER_SHAPE];
+  WebRtc_Word32 tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
+  WebRtc_Word32 tmpcoeffs2_sQ18[KLT_ORDER_SHAPE];
+  WebRtc_Word32 sumQQ;
+  WebRtc_Word16 sumQQ16;
+  WebRtc_Word32 tmp32;
+
+
+
+  /* entropy decoding of model number */
+  err = WebRtcIsacfix_DecHistOneStepMulti(&model, streamdata, WebRtcIsacfix_kModelCdfPtr, WebRtcIsacfix_kModelInitIndex, 1);
+  if (err<0)  // error check
+    return err;
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsacfix_DecHistOneStepMulti(index_QQ, streamdata, WebRtcIsacfix_kCdfShapePtr[model], WebRtcIsacfix_kInitIndexShape[model], KLT_ORDER_SHAPE);
+  if (err<0)  // error check
+    return err;
+  /* find quantization levels for coefficients */
+  for (k=0; k<KLT_ORDER_SHAPE; k++) {
+    tmpcoeffs_sQ10[WebRtcIsacfix_kSelIndShape[k]] = WebRtcIsacfix_kLevelsShapeQ10[WebRtcIsacfix_kOfLevelsShape[model]+WebRtcIsacfix_kOffsetShape[model][k] + index_QQ[k]];
+  }
+
+  err = WebRtcIsacfix_DecHistOneStepMulti(index_QQ, streamdata, WebRtcIsacfix_kCdfGainPtr[model], WebRtcIsacfix_kInitIndexGain[model], KLT_ORDER_GAIN);
+  if (err<0)  // error check
+    return err;
+  /* find quantization levels for coefficients */
+  for (k=0; k<KLT_ORDER_GAIN; k++) {
+    tmpcoeffs_gQ17[WebRtcIsacfix_kSelIndGain[k]] = WebRtcIsacfix_kLevelsGainQ17[WebRtcIsacfix_kOfLevelsGain[model]+ WebRtcIsacfix_kOffsetGain[model][k] + index_QQ[k]];
+  }
+
+
+  /* inverse KLT  */
+
+  /* left transform */  // Transpose matrix!
+  offsg = 0;
+  offss = 0;
+  posg = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = offsg;
+      pos2 = offs2;
+      for (n=0; n<2; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[model][pos2], tmpcoeffs_gQ17[pos]<<5)); // (Q15*Q17)>>(16-5) = Q21
+        pos++;
+        pos2++;
+      }
+      tmpcoeffs2_gQ21[posg] = sumQQ; //Q21
+      posg++;
+      offs2 += 2;
+    }
+    offs2 = 0;
+
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = offss;
+      pos2 = offs2;
+      for (n=0; n<LPC_SHAPE_ORDER; n++) {
+        sumQQ += WEBRTC_SPL_MUL_16_16_RSFT(tmpcoeffs_sQ10[pos], WebRtcIsacfix_kT1ShapeQ15[model][pos2], 7); // (Q10*Q15)>>7 = Q18
+        pos++;
+        pos2++;
+      }
+      tmpcoeffs2_sQ18[poss] = sumQQ; //Q18
+      poss++;
+      offs2 += LPC_SHAPE_ORDER;
+    }
+    offsg += 2;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* right transform */ // Transpose matrix
+  offsg = 0;
+  offss = 0;
+  posg = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2GainQ15[model][pos2], tmpcoeffs2_gQ21[pos]), 1); // (Q15*Q21)>>(16-1) = Q21
+        pos += 2;
+        pos2 += SUBFRAMES;
+
+      }
+      tmpcoeffs_gQ17[posg] = WEBRTC_SPL_RSHIFT_W32(sumQQ, 4);
+      posg++;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2ShapeQ15[model][pos2], tmpcoeffs2_sQ18[pos])); // (Q15*Q18)>>16 = Q17
+        pos += LPC_SHAPE_ORDER;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_sQ17[poss] = sumQQ;
+      poss++;
+    }
+    offsg += 2;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* scaling, mean addition, and gain restoration */
+  gainpos = 0;
+  posg = 0;poss = 0;pos=0;
+  for (k=0; k<SUBFRAMES; k++) {
+
+    /* log gains */
+    sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+    sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg];
+    sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
+    gain_lo_hiQ17[gainpos] = sumQQ; //Q17
+    gainpos++;
+    posg++;
+
+    sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+    sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg];
+    sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
+    gain_lo_hiQ17[gainpos] = sumQQ; //Q17
+    gainpos++;
+    posg++;
+
+    /* lo band LAR coeffs */
+    for (n=0; n<ORDERLO; n++, pos++, poss++) {
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(31208, tmpcoeffs_sQ17[poss]); // (Q16*Q17)>>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16
+      tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17
+      LPCCoefQ17[pos] = tmp32;
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<ORDERHI; n++, pos++, poss++) {
+      tmp32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]), 3); // ((Q13*Q17)>>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13
+      tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17
+      LPCCoefQ17[pos] = tmp32;
+    }
+  }
+
+
+  *outmodel=model;
+
+  return 0;
+}
+
+/* estimate codel length of LPC Coef */
+static int EstCodeLpcCoef(WebRtc_Word32 *LPCCoefQ17,
+                          WebRtc_Word32 *gain_lo_hiQ17,
+                          WebRtc_Word16 *model,
+                          WebRtc_Word32 *sizeQ11,
+                          Bitstr_enc *streamdata,
+                          ISAC_SaveEncData_t* encData,
+                          transcode_obj *transcodingParam) {
+  int j, k, n;
+  WebRtc_Word16 posQQ, pos2QQ, gainpos;
+  WebRtc_Word16  pos, pos2, poss, posg, offsg, offss, offs2;
+  WebRtc_Word16 index_gQQ[KLT_ORDER_GAIN], index_sQQ[KLT_ORDER_SHAPE];
+  WebRtc_Word16 index_ovr_gQQ[KLT_ORDER_GAIN], index_ovr_sQQ[KLT_ORDER_SHAPE];
+  WebRtc_Word32 BitsQQ;
+
+  WebRtc_Word16 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
+  WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs2_sQ17[KLT_ORDER_SHAPE];
+  WebRtc_Word32 sumQQ;
+  WebRtc_Word32 tmp32;
+  WebRtc_Word16 sumQQ16;
+  int status = 0;
+
+  /* write LAR coefficients to statistics file */
+  /* Save data for creation of multiple bitstreams (and transcoding) */
+  if (encData != NULL) {
+    for (k=0; k<KLT_ORDER_GAIN; k++) {
+      encData->LPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k];
+    }
+  }
+
+  /* log gains, mean removal and scaling */
+  posg = 0;poss = 0;pos=0; gainpos=0;
+
+  for (k=0; k<SUBFRAMES; k++) {
+    /* log gains */
+
+    /* The input argument X to logN(X) is 2^17 times higher than the
+       input floating point argument Y to log(Y), since the X value
+       is a Q17 value. This can be compensated for after the call, by
+       subraction a value Z for each Q-step. One Q-step means that
+       X gets 2 times higher, i.e. Z = logN(2)*256 = 0.693147180559*256 =
+       177.445678 should be subtracted (since logN() returns a Q8 value).
+       For a X value in Q17, the value 177.445678*17 = 3017 should be
+       subtracted */
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+
+    /* lo band LAR coeffs */
+    for (n=0; n<ORDERLO; n++, poss++, pos++) {
+      tmp32 = LPCCoefQ17[pos] - WebRtcIsacfix_kMeansShapeQ17[0][poss]; //Q17
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(17203, tmp32<<3); // tmp32 = 2.1*tmp32
+      tmpcoeffs_sQ17[poss] = tmp32; //Q17
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<ORDERHI; n++, poss++, pos++) {
+      tmp32 = LPCCoefQ17[pos] - WebRtcIsacfix_kMeansShapeQ17[0][poss]; //Q17
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(14746, tmp32<<1); // tmp32 = 0.45*tmp32
+      tmpcoeffs_sQ17[poss] = tmp32; //Q17
+    }
+
+  }
+
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  offss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<2; n++) {
+        sumQQ += WEBRTC_SPL_MUL_16_16(tmpcoeffs_gQ6[pos], WebRtcIsacfix_kT1GainQ15[0][pos2]); //Q21 = Q6*Q15
+        pos++;
+        pos2 += 2;
+      }
+      tmpcoeffs2_gQ21[posg] = sumQQ;
+      posg++;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = offss;
+      pos2 = k;
+      for (n=0; n<LPC_SHAPE_ORDER; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1ShapeQ15[0][pos2], tmpcoeffs_sQ17[pos]<<1)); // (Q15*Q17)>>(16-1) = Q17
+        pos++;
+        pos2 += LPC_SHAPE_ORDER;
+      }
+      tmpcoeffs2_sQ17[poss] = sumQQ; //Q17
+      poss++;
+    }
+    offsg += 2;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offss = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2GainQ15[0][pos2], tmpcoeffs2_gQ21[pos]), 1); // (Q15*Q21)>>(16-1) = Q21
+        pos += 2;
+        pos2++;
+      }
+      tmpcoeffs_gQ17[posg] = WEBRTC_SPL_RSHIFT_W32(sumQQ, 4);
+      posg++;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2ShapeQ15[0][pos2], tmpcoeffs2_sQ17[pos]<<1)); // (Q15*Q17)>>(16-1) = Q17
+        pos += LPC_SHAPE_ORDER;
+        pos2++;
+      }
+      tmpcoeffs_sQ17[poss] = sumQQ;
+      poss++;
+    }
+    offs2 += SUBFRAMES;
+    offsg += 2;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* quantize coefficients */
+
+  BitsQQ = 0;
+  for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
+  {
+    posQQ = WebRtcIsacfix_kSelIndGain[k];
+    pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+
+    index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
+    if (index_gQQ[k] < 0) {
+      index_gQQ[k] = 0;
+    }
+    else if (index_gQQ[k] > WebRtcIsacfix_kMaxIndGain[k]) {
+      index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k];
+    }
+    index_ovr_gQQ[k] = WebRtcIsacfix_kOffsetGain[0][k]+index_gQQ[k];
+    posQQ = WebRtcIsacfix_kOfLevelsGain[0] + index_ovr_gQQ[k];
+
+    /* Save data for creation of multiple bitstreams */
+    if (encData != NULL) {
+      encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k];
+    }
+
+    /* determine number of bits */
+    sumQQ = WebRtcIsacfix_kCodeLenGainQ11[posQQ]; //Q11
+    BitsQQ += sumQQ;
+  }
+
+  for (k=0; k<KLT_ORDER_SHAPE; k++) //ATTN: ok?
+  {
+    index_sQQ[k] = (WebRtc_Word16)(CalcLrIntQ(tmpcoeffs_sQ17[WebRtcIsacfix_kSelIndShape[k]], 17) + WebRtcIsacfix_kQuantMinShape[k]); //ATTN: ok?
+
+    if (index_sQQ[k] < 0)
+      index_sQQ[k] = 0;
+    else if (index_sQQ[k] > WebRtcIsacfix_kMaxIndShape[k])
+      index_sQQ[k] = WebRtcIsacfix_kMaxIndShape[k];
+    index_ovr_sQQ[k] = WebRtcIsacfix_kOffsetShape[0][k]+index_sQQ[k];
+
+    posQQ = WebRtcIsacfix_kOfLevelsShape[0] + index_ovr_sQQ[k];
+    sumQQ = WebRtcIsacfix_kCodeLenShapeQ11[posQQ]; //Q11
+    BitsQQ += sumQQ;
+  }
+
+
+
+  *model = 0;
+  *sizeQ11=BitsQQ;
+
+  /* entropy coding of model number */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, model, WebRtcIsacfix_kModelCdfPtr, 1);
+  if (status < 0) {
+    return status;
+  }
+
+  /* entropy coding of quantization indices - shape only */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, index_sQQ, WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE);
+  if (status < 0) {
+    return status;
+  }
+
+  /* Save data for creation of multiple bitstreams */
+  if (encData != NULL) {
+    for (k=0; k<KLT_ORDER_SHAPE; k++)
+    {
+      encData->LPCindex_s[KLT_ORDER_SHAPE*encData->startIdx + k] = index_sQQ[k];
+    }
+  }
+  /* save the state of the bitstream object 'streamdata' for the possible bit-rate reduction */
+  transcodingParam->full         = streamdata->full;
+  transcodingParam->stream_index = streamdata->stream_index;
+  transcodingParam->streamval    = streamdata->streamval;
+  transcodingParam->W_upper      = streamdata->W_upper;
+  transcodingParam->beforeLastWord     = streamdata->stream[streamdata->stream_index-1];
+  transcodingParam->lastWord     = streamdata->stream[streamdata->stream_index];
+
+  /* entropy coding of index */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN);
+  if (status < 0) {
+    return status;
+  }
+
+  /* find quantization levels for shape coefficients */
+  for (k=0; k<KLT_ORDER_SHAPE; k++) {
+    tmpcoeffs_sQ17[WebRtcIsacfix_kSelIndShape[k]] = WEBRTC_SPL_MUL(128, WebRtcIsacfix_kLevelsShapeQ10[WebRtcIsacfix_kOfLevelsShape[0]+index_ovr_sQQ[k]]);
+
+  }
+  /* inverse KLT  */
+
+  /* left transform */  // Transpose matrix!
+  offss = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = offss;
+      pos2 = offs2;
+      for (n=0; n<LPC_SHAPE_ORDER; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1ShapeQ15[0][pos2], tmpcoeffs_sQ17[pos]<<1)); // (Q15*Q17)>>(16-1) = Q17
+        pos++;
+        pos2++;
+      }
+      tmpcoeffs2_sQ17[poss] = sumQQ;
+
+      poss++;
+      offs2 += LPC_SHAPE_ORDER;
+    }
+    offss += LPC_SHAPE_ORDER;
+  }
+
+
+  /* right transform */ // Transpose matrix
+  offss = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2ShapeQ15[0][pos2], tmpcoeffs2_sQ17[pos]<<1)); // (Q15*Q17)>>(16-1) = Q17
+        pos += LPC_SHAPE_ORDER;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_sQ17[poss] = sumQQ;
+      poss++;
+    }
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* scaling, mean addition, and gain restoration */
+  poss = 0;pos=0;
+  for (k=0; k<SUBFRAMES; k++) {
+
+    /* lo band LAR coeffs */
+    for (n=0; n<ORDERLO; n++, pos++, poss++) {
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(31208, tmpcoeffs_sQ17[poss]); // (Q16*Q17)>>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16
+      tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17
+      LPCCoefQ17[pos] = tmp32;
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<ORDERHI; n++, pos++, poss++) {
+      tmp32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]), 3); // ((Q13*Q17)>>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13
+      tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17
+      LPCCoefQ17[pos] = tmp32;
+    }
+
+  }
+
+  //to update tmpcoeffs_gQ17 to the proper state
+  for (k=0; k<KLT_ORDER_GAIN; k++) {
+    tmpcoeffs_gQ17[WebRtcIsacfix_kSelIndGain[k]] = WebRtcIsacfix_kLevelsGainQ17[WebRtcIsacfix_kOfLevelsGain[0]+index_ovr_gQQ[k]];
+  }
+
+
+
+  /* find quantization levels for coefficients */
+
+  /* left transform */
+  offsg = 0;
+  posg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = offsg;
+      pos2 = offs2;
+      for (n=0; n<2; n++) {
+        sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][pos2], tmpcoeffs_gQ17[pos])<<1); // (Q15*Q17)>>(16-1) = Q17
+        pos++;
+        pos2++;
+      }
+      tmpcoeffs2_gQ21[posg] = WEBRTC_SPL_LSHIFT_W32(sumQQ, 4); //Q17<<4 = Q21
+      posg++;
+      offs2 += 2;
+    }
+    offsg += 2;
+  }
+
+  /* right transform */ // Transpose matrix
+  offsg = 0;
+  posg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2GainQ15[0][pos2], tmpcoeffs2_gQ21[pos]), 1); // (Q15*Q21)>>(16-1) = Q21
+        pos += 2;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_gQ17[posg] = WEBRTC_SPL_RSHIFT_W32(sumQQ, 4);
+      posg++;
+    }
+    offsg += 2;
+  }
+
+  /* scaling, mean addition, and gain restoration */
+  posg = 0;
+  gainpos = 0;
+  for (k=0; k<2*SUBFRAMES; k++) {
+
+    sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+    sumQQ16 += WebRtcIsacfix_kMeansGainQ8[0][posg];
+    sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
+    gain_lo_hiQ17[gainpos] = sumQQ; //Q17
+
+    gainpos++;
+    pos++;posg++;
+  }
+
+  return 0;
+}
+
+int WebRtcIsacfix_EstCodeLpcGain(WebRtc_Word32 *gain_lo_hiQ17,
+                                 Bitstr_enc *streamdata,
+                                 ISAC_SaveEncData_t* encData) {
+  int j, k, n;
+  WebRtc_Word16 posQQ, pos2QQ, gainpos;
+  WebRtc_Word16  pos, pos2, posg, offsg, offs2;
+  WebRtc_Word16 index_gQQ[KLT_ORDER_GAIN];
+
+  WebRtc_Word16 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+  WebRtc_Word32 sumQQ;
+  int status = 0;
+
+  /* write LAR coefficients to statistics file */
+  /* Save data for creation of multiple bitstreams (and transcoding) */
+  if (encData != NULL) {
+    for (k=0; k<KLT_ORDER_GAIN; k++) {
+      encData->LPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k];
+    }
+  }
+
+  /* log gains, mean removal and scaling */
+  posg = 0; pos = 0; gainpos = 0;
+
+  for (k=0; k<SUBFRAMES; k++) {
+    /* log gains */
+
+    /* The input argument X to logN(X) is 2^17 times higher than the
+       input floating point argument Y to log(Y), since the X value
+       is a Q17 value. This can be compensated for after the call, by
+       subraction a value Z for each Q-step. One Q-step means that
+       X gets 2 times higher, i.e. Z = logN(2)*256 = 0.693147180559*256 =
+       177.445678 should be subtracted (since logN() returns a Q8 value).
+       For a X value in Q17, the value 177.445678*17 = 3017 should be
+       subtracted */
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+  }
+
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<2; n++) {
+        sumQQ += WEBRTC_SPL_MUL_16_16(tmpcoeffs_gQ6[pos], WebRtcIsacfix_kT1GainQ15[0][pos2]); //Q21 = Q6*Q15
+        pos++;
+        pos2 += 2;
+      }
+      tmpcoeffs2_gQ21[posg] = sumQQ;
+      posg++;
+    }
+    offsg += 2;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2GainQ15[0][pos2], tmpcoeffs2_gQ21[pos]), 1); // (Q15*Q21)>>(16-1) = Q21
+        pos += 2;
+        pos2++;
+      }
+      tmpcoeffs_gQ17[posg] = WEBRTC_SPL_RSHIFT_W32(sumQQ, 4);
+      posg++;
+    }
+    offsg += 2;
+    offs2 += SUBFRAMES;
+  }
+
+  /* quantize coefficients */
+
+  for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
+  {
+    posQQ = WebRtcIsacfix_kSelIndGain[k];
+    pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+
+    index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
+    if (index_gQQ[k] < 0) {
+      index_gQQ[k] = 0;
+    }
+    else if (index_gQQ[k] > WebRtcIsacfix_kMaxIndGain[k]) {
+      index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k];
+    }
+
+    /* Save data for creation of multiple bitstreams */
+    if (encData != NULL) {
+      encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k];
+    }
+  }
+
+  /* entropy coding of index */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN);
+  if (status < 0) {
+    return status;
+  }
+
+  return 0;
+}
+
+
+int WebRtcIsacfix_EncodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
+                            WebRtc_Word16 *LPCCoef_loQ15,
+                            WebRtc_Word16 *LPCCoef_hiQ15,
+                            WebRtc_Word16 *model,
+                            WebRtc_Word32 *sizeQ11,
+                            Bitstr_enc *streamdata,
+                            ISAC_SaveEncData_t* encData,
+                            transcode_obj *transcodeParam)
+{
+  int status = 0;
+  WebRtc_Word32 larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
+  // = (6+12)*6 == 108
+
+  Poly2LarFix(LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES, larsQ17);
+
+  status = EstCodeLpcCoef(larsQ17, gain_lo_hiQ17, model, sizeQ11, streamdata, encData, transcodeParam);
+  if (status < 0) {
+    return (status);
+  }
+
+  Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES);
+
+  return 0;
+}
+
+
+/* decode & dequantize RC */
+int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata, WebRtc_Word16 *RCQ15)
+{
+  int k, err;
+  WebRtc_Word16 index[AR_ORDER];
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsacfix_DecHistOneStepMulti(index, streamdata, WebRtcIsacfix_kRcCdfPtr, WebRtcIsacfix_kRcInitInd, AR_ORDER);
+  if (err<0)  // error check
+    return err;
+
+  /* find quantization levels for reflection coefficients */
+  for (k=0; k<AR_ORDER; k++)
+  {
+    RCQ15[k] = *(WebRtcIsacfix_kRcLevPtr[k] + index[k]);
+  }
+
+  return 0;
+}
+
+
+
+/* quantize & code RC */
+int WebRtcIsacfix_EncodeRcCoef(WebRtc_Word16 *RCQ15, Bitstr_enc *streamdata)
+{
+  int k;
+  WebRtc_Word16 index[AR_ORDER];
+  int status;
+
+  /* quantize reflection coefficients (add noise feedback?) */
+  for (k=0; k<AR_ORDER; k++)
+  {
+    index[k] = WebRtcIsacfix_kRcInitInd[k];
+
+    if (RCQ15[k] > WebRtcIsacfix_kRcBound[index[k]])
+    {
+      while (RCQ15[k] > WebRtcIsacfix_kRcBound[index[k] + 1])
+        index[k]++;
+    }
+    else
+    {
+      while (RCQ15[k] < WebRtcIsacfix_kRcBound[--index[k]]) ;
+    }
+
+    RCQ15[k] = *(WebRtcIsacfix_kRcLevPtr[k] + index[k]);
+  }
+
+
+  /* entropy coding of quantization indices */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, index, WebRtcIsacfix_kRcCdfPtr, AR_ORDER);
+
+  /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */
+  return status;
+}
+
+
+/* decode & dequantize squared Gain */
+int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata, WebRtc_Word32 *gainQ10)
+{
+  int err;
+  WebRtc_Word16 index;
+
+  /* entropy decoding of quantization index */
+  err = WebRtcIsacfix_DecHistOneStepMulti(
+      &index,
+      streamdata,
+      WebRtcIsacfix_kGainPtr,
+      WebRtcIsacfix_kGainInitInd,
+      1);
+  /* error check */
+  if (err<0) {
+    return err;
+  }
+
+  /* find quantization level */
+  *gainQ10 = WebRtcIsacfix_kGain2Lev[index];
+
+  return 0;
+}
+
+
+
+/* quantize & code squared Gain */
+int WebRtcIsacfix_EncodeGain2(WebRtc_Word32 *gainQ10, Bitstr_enc *streamdata)
+{
+  WebRtc_Word16 index;
+  int status = 0;
+
+  /* find quantization index */
+  index = WebRtcIsacfix_kGainInitInd[0];
+  if (*gainQ10 > WebRtcIsacfix_kGain2Bound[index])
+  {
+    while (*gainQ10 > WebRtcIsacfix_kGain2Bound[index + 1])
+      index++;
+  }
+  else
+  {
+    while (*gainQ10 < WebRtcIsacfix_kGain2Bound[--index]) ;
+  }
+
+  /* dequantize */
+  *gainQ10 = WebRtcIsacfix_kGain2Lev[index];
+
+  /* entropy coding of quantization index */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, &index, WebRtcIsacfix_kGainPtr, 1);
+
+  /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */
+  return status;
+}
+
+
+/* code and decode Pitch Gains and Lags functions */
+
+/* decode & dequantize Pitch Gains */
+int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata, WebRtc_Word16 *PitchGains_Q12)
+{
+  int err;
+  WebRtc_Word16 index_comb;
+  const WebRtc_UWord16 *pitch_gain_cdf_ptr[1];
+
+  /* entropy decoding of quantization indices */
+  *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf;
+  err = WebRtcIsacfix_DecHistBisectMulti(&index_comb, streamdata, pitch_gain_cdf_ptr, WebRtcIsacfix_kCdfTableSizeGain, 1);
+  /* error check, Q_mean_Gain.. tables are of size 144 */
+  if ((err<0) || (index_comb<0) || (index_comb>144))
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN;
+
+  /* unquantize back to pitch gains by table look-up */
+  PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb];
+  PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb];
+  PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb];
+  PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb];
+
+  return 0;
+}
+
+
+/* quantize & code Pitch Gains */
+int WebRtcIsacfix_EncodePitchGain(WebRtc_Word16 *PitchGains_Q12, Bitstr_enc *streamdata, ISAC_SaveEncData_t* encData)
+{
+  int k,j;
+  WebRtc_Word16 SQ15[PITCH_SUBFRAMES];
+  WebRtc_Word16 index[3];
+  WebRtc_Word16 index_comb;
+  const WebRtc_UWord16 *pitch_gain_cdf_ptr[1];
+  WebRtc_Word32 CQ17;
+  int status = 0;
+
+
+  /* get the approximate arcsine (almost linear)*/
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    SQ15[k] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[k],33,2); //Q15
+
+
+  /* find quantization index; only for the first three transform coefficients */
+  for (k=0; k<3; k++)
+  {
+    /*  transform */
+    CQ17=0;
+    for (j=0; j<PITCH_SUBFRAMES; j++) {
+      CQ17 += WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIsacfix_kTransform[k][j], SQ15[j],10); // Q17
+    }
+
+    index[k] = (WebRtc_Word16)((CQ17 + 8192)>>14); // Rounding and scaling with stepsize (=1/0.125=8)
+
+    /* check that the index is not outside the boundaries of the table */
+    if (index[k] < WebRtcIsacfix_kLowerlimiGain[k]) index[k] = WebRtcIsacfix_kLowerlimiGain[k];
+    else if (index[k] > WebRtcIsacfix_kUpperlimitGain[k]) index[k] = WebRtcIsacfix_kUpperlimitGain[k];
+    index[k] -= WebRtcIsacfix_kLowerlimiGain[k];
+  }
+
+  /* calculate unique overall index */
+  index_comb = (WebRtc_Word16)(WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[0], index[0]) +
+                               WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[1], index[1]) + index[2]);
+
+  /* unquantize back to pitch gains by table look-up */
+  // (Y)
+  PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb];
+  PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb];
+  PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb];
+  PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb];
+
+
+  /* entropy coding of quantization pitch gains */
+  *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf;
+  status = WebRtcIsacfix_EncHistMulti(streamdata, &index_comb, pitch_gain_cdf_ptr, 1);
+  if (status < 0) {
+    return status;
+  }
+
+  /* Save data for creation of multiple bitstreams */
+  if (encData != NULL) {
+    encData->pitchGain_index[encData->startIdx] = index_comb;
+  }
+
+  return 0;
+}
+
+
+
+/* Pitch LAG */
+
+
+/* decode & dequantize Pitch Lags */
+int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
+                                 WebRtc_Word16 *PitchGain_Q12,
+                                 WebRtc_Word16 *PitchLags_Q7)
+{
+  int k, err;
+  WebRtc_Word16 index[PITCH_SUBFRAMES];
+  const WebRtc_Word16 *mean_val2Q10, *mean_val4Q10;
+
+  const WebRtc_Word16 *lower_limit;
+  const WebRtc_UWord16 *init_index;
+  const WebRtc_UWord16 *cdf_size;
+  const WebRtc_UWord16 **cdf;
+
+  WebRtc_Word32 meangainQ12;
+  WebRtc_Word32 CQ11, CQ10,tmp32a,tmp32b;
+  WebRtc_Word16 shft,tmp16a,tmp16c;
+
+  meangainQ12=0;
+  for (k = 0; k < 4; k++)
+    meangainQ12 += PitchGain_Q12[k];
+
+  meangainQ12 = WEBRTC_SPL_RSHIFT_W32(meangainQ12, 2);  // Get average
+
+  /* voicing classificiation */
+  if (meangainQ12 <= 819) {                 // mean_gain < 0.2
+    shft = -1;        // StepSize=2.0;
+    cdf = WebRtcIsacfix_kPitchLagPtrLo;
+    cdf_size = WebRtcIsacfix_kPitchLagSizeLo;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo;
+    lower_limit = WebRtcIsacfix_kLowerLimitLo;
+    init_index = WebRtcIsacfix_kInitIndLo;
+  } else if (meangainQ12 <= 1638) {            // mean_gain < 0.4
+    shft = 0;        // StepSize=1.0;
+    cdf = WebRtcIsacfix_kPitchLagPtrMid;
+    cdf_size = WebRtcIsacfix_kPitchLagSizeMid;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid;
+    lower_limit = WebRtcIsacfix_kLowerLimitMid;
+    init_index = WebRtcIsacfix_kInitIndMid;
+  } else {
+    shft = 1;        // StepSize=0.5;
+    cdf = WebRtcIsacfix_kPitchLagPtrHi;
+    cdf_size = WebRtcIsacfix_kPitchLagSizeHi;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi;
+    lower_limit = WebRtcIsacfix_kLowerLimitHi;
+    init_index = WebRtcIsacfix_kInitIndHi;
+  }
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsacfix_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1);
+  if ((err<0) || (index[0]<0))  // error check
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG;
+
+  err = WebRtcIsacfix_DecHistOneStepMulti(index+1, streamdata, cdf+1, init_index, 3);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG;
+
+
+  /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */
+  CQ11 = ((WebRtc_Word32)index[0] + lower_limit[0]);  // Q0
+  CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32a =  WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11);
+    tmp16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);
+    PitchLags_Q7[k] = tmp16a;
+  }
+
+  CQ10 = mean_val2Q10[index[1]];
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32b =  (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[1][k], (WebRtc_Word16) CQ10,10);
+    tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
+    PitchLags_Q7[k] += tmp16c;
+  }
+
+  CQ10 = mean_val4Q10[index[3]];
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32b =  (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[3][k], (WebRtc_Word16) CQ10,10);
+    tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
+    PitchLags_Q7[k] += tmp16c;
+  }
+
+  return 0;
+}
+
+
+
+/* quantize & code Pitch Lags */
+int WebRtcIsacfix_EncodePitchLag(WebRtc_Word16 *PitchLagsQ7,WebRtc_Word16 *PitchGain_Q12,
+                                 Bitstr_enc *streamdata, ISAC_SaveEncData_t* encData)
+{
+  int k, j;
+  WebRtc_Word16 index[PITCH_SUBFRAMES];
+  WebRtc_Word32 meangainQ12, CQ17;
+  WebRtc_Word32 CQ11, CQ10,tmp32a;
+
+  const WebRtc_Word16 *mean_val2Q10,*mean_val4Q10;
+  const WebRtc_Word16 *lower_limit, *upper_limit;
+  const WebRtc_UWord16 **cdf;
+  WebRtc_Word16 shft, tmp16a, tmp16b, tmp16c;
+  WebRtc_Word32 tmp32b;
+  int status = 0;
+
+  /* compute mean pitch gain */
+  meangainQ12=0;
+  for (k = 0; k < 4; k++)
+    meangainQ12 += PitchGain_Q12[k];
+
+  meangainQ12 = WEBRTC_SPL_RSHIFT_W32(meangainQ12, 2);
+
+  /* Save data for creation of multiple bitstreams */
+  if (encData != NULL) {
+    encData->meanGain[encData->startIdx] = meangainQ12;
+  }
+
+  /* voicing classificiation */
+  if (meangainQ12 <= 819) {                 // mean_gain < 0.2
+    shft = -1;        // StepSize=2.0;
+    cdf = WebRtcIsacfix_kPitchLagPtrLo;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo;
+    lower_limit = WebRtcIsacfix_kLowerLimitLo;
+    upper_limit = WebRtcIsacfix_kUpperLimitLo;
+  } else if (meangainQ12 <= 1638) {            // mean_gain < 0.4
+    shft = 0;        // StepSize=1.0;
+    cdf = WebRtcIsacfix_kPitchLagPtrMid;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid;
+    lower_limit = WebRtcIsacfix_kLowerLimitMid;
+    upper_limit = WebRtcIsacfix_kUpperLimitMid;
+  } else {
+    shft = 1;        // StepSize=0.5;
+    cdf = WebRtcIsacfix_kPitchLagPtrHi;
+    mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi;
+    mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi;
+    lower_limit = WebRtcIsacfix_kLowerLimitHi;
+    upper_limit = WebRtcIsacfix_kUpperLimitHi;
+  }
+
+  /* find quantization index */
+  for (k=0; k<4; k++)
+  {
+    /*  transform */
+    CQ17=0;
+    for (j=0; j<PITCH_SUBFRAMES; j++)
+      CQ17 += WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIsacfix_kTransform[k][j], PitchLagsQ7[j],2); // Q17
+
+    CQ17 = WEBRTC_SPL_SHIFT_W32(CQ17,shft); // Scale with StepSize
+
+    /* quantize */
+    tmp16b = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(CQ17 + 65536, 17 );
+    index[k] =  tmp16b;
+
+    /* check that the index is not outside the boundaries of the table */
+    if (index[k] < lower_limit[k]) index[k] = lower_limit[k];
+    else if (index[k] > upper_limit[k]) index[k] = upper_limit[k];
+    index[k] -= lower_limit[k];
+
+    /* Save data for creation of multiple bitstreams */
+    if(encData != NULL) {
+      encData->pitchIndex[PITCH_SUBFRAMES*encData->startIdx + k] = index[k];
+    }
+  }
+
+  /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */
+  CQ11 = (index[0] + lower_limit[0]);  // Q0
+  CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11
+
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32a =  WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11); // Q12
+    tmp16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);// Q7
+    PitchLagsQ7[k] = tmp16a;
+  }
+
+  CQ10 = mean_val2Q10[index[1]];
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32b =  (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[1][k], (WebRtc_Word16) CQ10,10);
+    tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
+    PitchLagsQ7[k] += tmp16c;
+  }
+
+  CQ10 = mean_val4Q10[index[3]];
+  for (k=0; k<PITCH_SUBFRAMES; k++) {
+    tmp32b =  (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[3][k], (WebRtc_Word16) CQ10,10);
+    tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
+    PitchLagsQ7[k] += tmp16c;
+  }
+
+  /* entropy coding of quantization pitch lags */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES);
+
+  /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */
+  return status;
+}
+
+
+
+/* Routines for inband signaling of bandwitdh estimation */
+/* Histograms based on uniform distribution of indices */
+/* Move global variables later! */
+
+
+/* cdf array for frame length indicator */
+const WebRtc_UWord16 kFrameLenCdf[4] = {
+  0, 21845, 43690, 65535};
+
+/* pointer to cdf array for frame length indicator */
+const WebRtc_UWord16 *kFrameLenCdfPtr[1] = {kFrameLenCdf};
+
+/* initial cdf index for decoder of frame length indicator */
+const WebRtc_UWord16 kFrameLenInitIndex[1] = {1};
+
+
+int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata,
+                                 WebRtc_Word16 *framesamples)
+{
+
+  int err;
+  WebRtc_Word16 frame_mode;
+
+  err = 0;
+  /* entropy decoding of frame length [1:30ms,2:60ms] */
+  err = WebRtcIsacfix_DecHistOneStepMulti(&frame_mode, streamdata, kFrameLenCdfPtr, kFrameLenInitIndex, 1);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH;
+
+  switch(frame_mode) {
+    case 1:
+      *framesamples = 480; /* 30ms */
+      break;
+    case 2:
+      *framesamples = 960; /* 60ms */
+      break;
+    default:
+      err = -ISAC_DISALLOWED_FRAME_MODE_DECODER;
+  }
+
+  return err;
+}
+
+
+int WebRtcIsacfix_EncodeFrameLen(WebRtc_Word16 framesamples, Bitstr_enc *streamdata) {
+
+  int status;
+  WebRtc_Word16 frame_mode;
+
+  status = 0;
+  frame_mode = 0;
+  /* entropy coding of frame length [1:480 samples,2:960 samples] */
+  switch(framesamples) {
+    case 480:
+      frame_mode = 1;
+      break;
+    case 960:
+      frame_mode = 2;
+      break;
+    default:
+      status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER;
+  }
+
+  if (status < 0)
+    return status;
+
+  status = WebRtcIsacfix_EncHistMulti(streamdata, &frame_mode, kFrameLenCdfPtr, 1);
+
+  return status;
+}
+
+/* cdf array for estimated bandwidth */
+const WebRtc_UWord16 kBwCdf[25] = {
+  0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037,
+  32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074,
+  62804, 65535};
+
+/* pointer to cdf array for estimated bandwidth */
+const WebRtc_UWord16 *kBwCdfPtr[1] = {kBwCdf};
+
+/* initial cdf index for decoder of estimated bandwidth*/
+const WebRtc_UWord16 kBwInitIndex[1] = {7};
+
+
+int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata, WebRtc_Word16 *BWno) {
+
+  int err;
+  WebRtc_Word16 BWno32;
+
+  /* entropy decoding of sender's BW estimation [0..23] */
+  err = WebRtcIsacfix_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, kBwInitIndex, 1);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH;
+  *BWno = (WebRtc_Word16)BWno32;
+  return err;
+
+}
+
+
+int WebRtcIsacfix_EncodeReceiveBandwidth(WebRtc_Word16 *BWno, Bitstr_enc *streamdata)
+{
+  int status = 0;
+  /* entropy encoding of receiver's BW estimation [0..23] */
+  status = WebRtcIsacfix_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1);
+
+  return status;
+}
+
+/* estimate codel length of LPC Coef */
+void WebRtcIsacfix_TranscodeLpcCoef(WebRtc_Word32 *gain_lo_hiQ17,
+                                    WebRtc_Word16 *index_gQQ) {
+  int j, k, n;
+  WebRtc_Word16 posQQ, pos2QQ;
+  WebRtc_Word16  pos, pos2, posg, offsg, offs2, gainpos;
+  WebRtc_Word32 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+  WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+  WebRtc_Word32 sumQQ;
+
+
+  /* log gains, mean removal and scaling */
+  posg = 0;pos=0; gainpos=0;
+
+  for (k=0; k<SUBFRAMES; k++) {
+    /* log gains */
+
+    /* The input argument X to logN(X) is 2^17 times higher than the
+       input floating point argument Y to log(Y), since the X value
+       is a Q17 value. This can be compensated for after the call, by
+       subraction a value Z for each Q-step. One Q-step means that
+       X gets 2 times higher, i.e. Z = logN(2)*256 = 0.693147180559*256 =
+       177.445678 should be subtracted (since logN() returns a Q8 value).
+       For a X value in Q17, the value 177.445678*17 = 3017 should be
+       subtracted */
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+
+    tmpcoeffs_gQ6[posg] = CalcLogN(gain_lo_hiQ17[gainpos])-3017; //Q8
+    tmpcoeffs_gQ6[posg] -= WebRtcIsacfix_kMeansGainQ8[0][posg]; //Q8, but Q6 after not-needed mult. by 4
+    posg++; gainpos++;
+
+  }
+
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<2; n++) {
+        sumQQ += WEBRTC_SPL_MUL_16_16(tmpcoeffs_gQ6[pos], WebRtcIsacfix_kT1GainQ15[0][pos2]); //Q21 = Q6*Q15
+        pos++;
+        pos2 += 2;
+      }
+      tmpcoeffs2_gQ21[posg] = sumQQ;
+      posg++;
+    }
+
+    offsg += 2;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<2; k++) {
+      sumQQ = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sumQQ += WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT2GainQ15[0][pos2], tmpcoeffs2_gQ21[pos]), 1); // (Q15*Q21)>>(16-1) = Q21
+        pos += 2;
+        pos2++;
+      }
+      tmpcoeffs_gQ17[posg] = WEBRTC_SPL_RSHIFT_W32(sumQQ, 4);
+      posg++;
+    }
+    offsg += 2;
+    offs2 += SUBFRAMES;
+  }
+
+  /* quantize coefficients */
+  for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
+  {
+    posQQ = WebRtcIsacfix_kSelIndGain[k];
+    pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+
+    index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
+    if (index_gQQ[k] < 0) {
+      index_gQQ[k] = 0;
+    }
+    else if (index_gQQ[k] > WebRtcIsacfix_kMaxIndGain[k]) {
+      index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k];
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.h
new file mode 100644
index 0000000..298ea22
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/entropy_coding.h
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * entropy_coding.h
+ *
+ * This header file contains all of the functions used to arithmetically
+ * encode the iSAC bistream
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_
+
+#include "structs.h"
+
+/* decode complex spectrum (return number of bytes in stream) */
+WebRtc_Word16 WebRtcIsacfix_DecodeSpec(Bitstr_dec  *streamdata,
+                                       WebRtc_Word16 *frQ7,
+                                       WebRtc_Word16 *fiQ7,
+                                       WebRtc_Word16 AvgPitchGain_Q12);
+
+/* encode complex spectrum */
+int WebRtcIsacfix_EncodeSpec(const WebRtc_Word16 *fr,
+                             const WebRtc_Word16 *fi,
+                             Bitstr_enc *streamdata,
+                             WebRtc_Word16 AvgPitchGain_Q12);
+
+
+/* decode & dequantize LPC Coef */
+int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec  *streamdata,
+                                WebRtc_Word32 *LPCCoefQ17,
+                                WebRtc_Word32 *gain_lo_hiQ17,
+                                WebRtc_Word16 *outmodel);
+
+int WebRtcIsacfix_DecodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
+                            WebRtc_Word16 *LPCCoef_loQ15,
+                            WebRtc_Word16 *LPCCoef_hiQ15,
+                            Bitstr_dec  *streamdata,
+                            WebRtc_Word16 *outmodel);
+
+/* quantize & code LPC Coef */
+int WebRtcIsacfix_EncodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
+                            WebRtc_Word16 *LPCCoef_loQ15,
+                            WebRtc_Word16 *LPCCoef_hiQ15,
+                            WebRtc_Word16 *model,
+                            WebRtc_Word32 *sizeQ11,
+                            Bitstr_enc *streamdata,
+                            ISAC_SaveEncData_t* encData,
+                            transcode_obj *transcodeParam);
+
+int WebRtcIsacfix_EstCodeLpcGain(WebRtc_Word32 *gain_lo_hiQ17,
+                                 Bitstr_enc *streamdata,
+                                 ISAC_SaveEncData_t* encData);
+/* decode & dequantize RC */
+int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata,
+                               WebRtc_Word16 *RCQ15);
+
+/* quantize & code RC */
+int WebRtcIsacfix_EncodeRcCoef(WebRtc_Word16 *RCQ15,
+                               Bitstr_enc *streamdata);
+
+/* decode & dequantize squared Gain */
+int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata,
+                              WebRtc_Word32 *Gain2);
+
+/* quantize & code squared Gain (input is squared gain) */
+int WebRtcIsacfix_EncodeGain2(WebRtc_Word32 *gain2,
+                              Bitstr_enc *streamdata);
+
+int WebRtcIsacfix_EncodePitchGain(WebRtc_Word16 *PitchGains_Q12,
+                                  Bitstr_enc *streamdata,
+                                  ISAC_SaveEncData_t* encData);
+
+int WebRtcIsacfix_EncodePitchLag(WebRtc_Word16 *PitchLagQ7,
+                                 WebRtc_Word16 *PitchGain_Q12,
+                                 Bitstr_enc *streamdata,
+                                 ISAC_SaveEncData_t* encData);
+
+int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata,
+                                  WebRtc_Word16 *PitchGain_Q12);
+
+int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
+                                 WebRtc_Word16 *PitchGain_Q12,
+                                 WebRtc_Word16 *PitchLagQ7);
+
+int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata,
+                                 WebRtc_Word16 *framelength);
+
+
+int WebRtcIsacfix_EncodeFrameLen(WebRtc_Word16 framelength,
+                                 Bitstr_enc *streamdata);
+
+int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata,
+                                      WebRtc_Word16 *BWno);
+
+
+int WebRtcIsacfix_EncodeReceiveBandwidth(WebRtc_Word16 *BWno,
+                                         Bitstr_enc *streamdata);
+
+void WebRtcIsacfix_TranscodeLpcCoef(WebRtc_Word32 *tmpcoeffs_gQ6,
+                                    WebRtc_Word16 *index_gQQ);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.c
new file mode 100644
index 0000000..fff35c4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.c
@@ -0,0 +1,415 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * fft.c
+ *
+ * Fast Fourier Transform
+ *
+ */
+
+
+#include "fft.h"
+
+const WebRtc_Word16 kSortTabFft[240] = {
+  0, 60, 120, 180, 20, 80, 140, 200, 40, 100, 160, 220,
+  4, 64, 124, 184, 24, 84, 144, 204, 44, 104, 164, 224,
+  8, 68, 128, 188, 28, 88, 148, 208, 48, 108, 168, 228,
+  12, 72, 132, 192, 32, 92, 152, 212, 52, 112, 172, 232,
+  16, 76, 136, 196, 36, 96, 156, 216, 56, 116, 176, 236,
+  1, 61, 121, 181, 21, 81, 141, 201, 41, 101, 161, 221,
+  5, 65, 125, 185, 25, 85, 145, 205, 45, 105, 165, 225,
+  9, 69, 129, 189, 29, 89, 149, 209, 49, 109, 169, 229,
+  13, 73, 133, 193, 33, 93, 153, 213, 53, 113, 173, 233,
+  17, 77, 137, 197, 37, 97, 157, 217, 57, 117, 177, 237,
+  2, 62, 122, 182, 22, 82, 142, 202, 42, 102, 162, 222,
+  6, 66, 126, 186, 26, 86, 146, 206, 46, 106, 166, 226,
+  10, 70, 130, 190, 30, 90, 150, 210, 50, 110, 170, 230,
+  14, 74, 134, 194, 34, 94, 154, 214, 54, 114, 174, 234,
+  18, 78, 138, 198, 38, 98, 158, 218, 58, 118, 178, 238,
+  3, 63, 123, 183, 23, 83, 143, 203, 43, 103, 163, 223,
+  7, 67, 127, 187, 27, 87, 147, 207, 47, 107, 167, 227,
+  11, 71, 131, 191, 31, 91, 151, 211, 51, 111, 171, 231,
+  15, 75, 135, 195, 35, 95, 155, 215, 55, 115, 175, 235,
+  19, 79, 139, 199, 39, 99, 159, 219, 59, 119, 179, 239
+};
+
+/* Cosine table in Q14 */
+const WebRtc_Word16 kCosTabFfftQ14[240] = {
+  16384,  16378, 16362,   16333,  16294,  16244,  16182,  16110,  16026,  15931,  15826,  15709,
+  15582,  15444, 15296,   15137,  14968,  14788,  14598,  14399,  14189,  13970,  13741,  13502,
+  13255,  12998, 12733,   12458,  12176,  11885,  11585,  11278,  10963,  10641,  10311,   9974,
+  9630,   9280,  8923,    8561,   8192,   7818,   7438,   7053,   6664,   6270,   5872,   5469,
+  5063,   4653,  4240,    3825,   3406,   2986,   2563,   2139,   1713,   1285,    857,    429,
+  0,   -429,  -857,   -1285,  -1713,  -2139,  -2563,  -2986,  -3406,  -3825,  -4240,  -4653,
+  -5063,  -5469, -5872,   -6270,  -6664,  -7053,  -7438,  -7818,  -8192,  -8561,  -8923,  -9280,
+  -9630,  -9974, -10311, -10641, -10963, -11278, -11585, -11885, -12176, -12458, -12733, -12998,
+  -13255, -13502, -13741, -13970, -14189, -14399, -14598, -14788, -14968, -15137, -15296, -15444,
+  -15582, -15709, -15826, -15931, -16026, -16110, -16182, -16244, -16294, -16333, -16362, -16378,
+  -16384, -16378, -16362, -16333, -16294, -16244, -16182, -16110, -16026, -15931, -15826, -15709,
+  -15582, -15444, -15296, -15137, -14968, -14788, -14598, -14399, -14189, -13970, -13741, -13502,
+  -13255, -12998, -12733, -12458, -12176, -11885, -11585, -11278, -10963, -10641, -10311,  -9974,
+  -9630,  -9280,  -8923,  -8561,  -8192,  -7818,  -7438,  -7053,  -6664,  -6270,  -5872,  -5469,
+  -5063,  -4653,  -4240,  -3825,  -3406,  -2986,  -2563,  -2139,  -1713,  -1285,   -857,   -429,
+  0,    429,    857,   1285,   1713,   2139,   2563,   2986,   3406,   3825,   4240,   4653,
+  5063,   5469,   5872,   6270,   6664,   7053,   7438,   7818,   8192,   8561,   8923,   9280,
+  9630,   9974,  10311,  10641,  10963,  11278,  11585,  11885,  12176,  12458,  12733,  12998,
+  13255,  13502,  13741,  13970,  14189,  14399,  14598,  14788,  14968,  15137,  15296,  15444,
+  15582,  15709,  15826,  15931,  16026,  16110,  16182,  16244,  16294,  16333,  16362,  16378
+};
+
+
+
+/* Uses 16x16 mul, without rounding, which is faster. Uses WEBRTC_SPL_MUL_16_16_RSFT */
+WebRtc_Word16 WebRtcIsacfix_FftRadix16Fastest(WebRtc_Word16 RexQx[], WebRtc_Word16 ImxQx[], WebRtc_Word16 iSign) {
+
+  WebRtc_Word16 dd, ee, ff, gg, hh, ii;
+  WebRtc_Word16 k0, k1, k2, k3, k4, kk;
+  WebRtc_Word16 tmp116, tmp216;
+
+  WebRtc_Word16 ccc1Q14, ccc2Q14, ccc3Q14, sss1Q14, sss2Q14, sss3Q14;
+  WebRtc_Word16 sss60Q14, ccc72Q14, sss72Q14;
+  WebRtc_Word16 aaQx, ajQx, akQx, ajmQx, ajpQx, akmQx, akpQx;
+  WebRtc_Word16 bbQx, bjQx, bkQx, bjmQx, bjpQx, bkmQx, bkpQx;
+
+  WebRtc_Word16 ReDATAQx[240],  ImDATAQx[240];
+
+  sss60Q14 = kCosTabFfftQ14[20];
+  ccc72Q14 = kCosTabFfftQ14[48];
+  sss72Q14 = kCosTabFfftQ14[12];
+
+  if (iSign < 0) {
+    sss72Q14 = -sss72Q14;
+    sss60Q14 = -sss60Q14;
+  }
+  /* Complexity is: 10 cycles */
+
+  /* compute fourier transform */
+
+  // transform for factor of 4
+  for (kk=0; kk<60; kk++) {
+    k0 = kk;
+    k1 = k0 + 60;
+    k2 = k1 + 60;
+    k3 = k2 + 60;
+
+    akpQx = RexQx[k0] + RexQx[k2];
+    akmQx = RexQx[k0] - RexQx[k2];
+    ajpQx = RexQx[k1] + RexQx[k3];
+    ajmQx = RexQx[k1] - RexQx[k3];
+    bkpQx = ImxQx[k0] + ImxQx[k2];
+    bkmQx = ImxQx[k0] - ImxQx[k2];
+    bjpQx = ImxQx[k1] + ImxQx[k3];
+    bjmQx = ImxQx[k1] - ImxQx[k3];
+
+    RexQx[k0] = akpQx + ajpQx;
+    ImxQx[k0] = bkpQx + bjpQx;
+    ajpQx = akpQx - ajpQx;
+    bjpQx = bkpQx - bjpQx;
+    if (iSign < 0) {
+      akpQx = akmQx + bjmQx;
+      bkpQx = bkmQx - ajmQx;
+      akmQx -= bjmQx;
+      bkmQx += ajmQx;
+    } else {
+      akpQx = akmQx - bjmQx;
+      bkpQx = bkmQx + ajmQx;
+      akmQx += bjmQx;
+      bkmQx -= ajmQx;
+    }
+
+    ccc1Q14 = kCosTabFfftQ14[kk];
+    ccc2Q14 = kCosTabFfftQ14[WEBRTC_SPL_MUL_16_16(2, kk)];
+    ccc3Q14 = kCosTabFfftQ14[WEBRTC_SPL_MUL_16_16(3, kk)];
+    sss1Q14 = kCosTabFfftQ14[kk+60];
+    sss2Q14 = kCosTabFfftQ14[WEBRTC_SPL_MUL_16_16(2, kk)+60];
+    sss3Q14 = kCosTabFfftQ14[WEBRTC_SPL_MUL_16_16(3, kk)+60];
+    if (iSign==1) {
+      sss1Q14 = -sss1Q14;
+      sss2Q14 = -sss2Q14;
+      sss3Q14 = -sss3Q14;
+    }
+
+    //Do several multiplications like Q14*Q16>>14 = Q16
+    // RexQ16[k1] = akpQ16 * ccc1Q14 - bkpQ16 * sss1Q14;
+    // RexQ16[k2] = ajpQ16 * ccc2Q14 - bjpQ16 * sss2Q14;
+    // RexQ16[k3] = akmQ16 * ccc3Q14 - bkmQ16 * sss3Q14;
+    // ImxQ16[k1] = akpQ16 * sss1Q14 + bkpQ16 * ccc1Q14;
+    // ImxQ16[k2] = ajpQ16 * sss2Q14 + bjpQ16 * ccc2Q14;
+    // ImxQ16[k3] = akmQ16 * sss3Q14 + bkmQ16 * ccc3Q14;
+
+    RexQx[k1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, akpQx, 14) -
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, bkpQx, 14); // 6 non-mul + 2 mul cycles, i.e. 8 cycles (6+2*7=20 cycles if 16x32mul)
+    RexQx[k2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) -
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjpQx, 14);
+    RexQx[k3] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, akmQx, 14) -
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, bkmQx, 14);
+    ImxQx[k1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, akpQx, 14) +
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, bkpQx, 14);
+    ImxQx[k2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajpQx, 14) +
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14);
+    ImxQx[k3] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, akmQx, 14) +
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, bkmQx, 14);
+    //This mul segment needs 6*8 = 48 cycles for 16x16 muls, but 6*20 = 120 cycles for 16x32 muls
+
+
+  }
+  /* Complexity is: 51+48 = 99 cycles for 16x16 muls, but 51+120 = 171 cycles for 16x32 muls*/
+
+  // transform for factor of 3
+  kk=0;
+  k1=20;
+  k2=40;
+
+  for (hh=0; hh<4; hh++) {
+    for (ii=0; ii<20; ii++) {
+      akQx = RexQx[kk];
+      bkQx = ImxQx[kk];
+      ajQx = RexQx[k1] + RexQx[k2];
+      bjQx = ImxQx[k1] + ImxQx[k2];
+      RexQx[kk] = akQx + ajQx;
+      ImxQx[kk] = bkQx + bjQx;
+      tmp116 = WEBRTC_SPL_RSHIFT_W16(ajQx, 1);
+      tmp216 = WEBRTC_SPL_RSHIFT_W16(bjQx, 1);
+      akQx = akQx - tmp116;
+      bkQx = bkQx - tmp216;
+      tmp116 = RexQx[k1] - RexQx[k2];
+      tmp216 = ImxQx[k1] - ImxQx[k2];
+
+      ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp116, 14); // Q14*Qx>>14 = Qx
+      bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp216, 14); // Q14*Qx>>14 = Qx
+      RexQx[k1] = akQx - bjQx;
+      RexQx[k2] = akQx + bjQx;
+      ImxQx[k1] = bkQx + ajQx;
+      ImxQx[k2] = bkQx - ajQx;
+
+      kk++;
+      k1++;
+      k2++;
+    }
+    /* Complexity : (31+6)*20 = 740 cycles for 16x16 muls, but (31+18)*20 = 980 cycles for 16x32 muls*/
+    kk=kk+40;
+    k1=k1+40;
+    k2=k2+40;
+  }
+  /* Complexity : 4*(740+3) = 2972 cycles for 16x16 muls, but 4*(980+3) = 3932 cycles for 16x32 muls*/
+
+  /* multiply by rotation factor for odd factor 3 or 5 (not for 4)
+     Same code (duplicated) for both ii=2 and ii=3 */
+  kk = 1;
+  ee = 0;
+  ff = 0;
+
+  for (gg=0; gg<19; gg++) {
+    kk += 20;
+    ff = ff+4;
+    for (hh=0; hh<2; hh++) {
+      ee = ff + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(hh, ff);
+      dd = ee + 60;
+      ccc2Q14 = kCosTabFfftQ14[ee];
+      sss2Q14 = kCosTabFfftQ14[dd];
+      if (iSign==1) {
+        sss2Q14 = -sss2Q14;
+      }
+      for (ii=0; ii<4; ii++) {
+        akQx = RexQx[kk];
+        bkQx = ImxQx[kk];
+        RexQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - // Q14*Qx>>14 = Qx
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
+        ImxQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + // Q14*Qx>>14 = Qx
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
+
+
+        kk += 60;
+      }
+      kk = kk - 220;
+    }
+    // Complexity: 2*(13+5+4*13+2) = 144 for 16x16 muls, but 2*(13+5+4*33+2) = 304 cycles for 16x32 muls
+    kk = kk - 59;
+  }
+  // Complexity: 19*144 = 2736 for 16x16 muls, but 19*304 = 5776 cycles for 16x32 muls
+
+  // transform for factor of 5
+  kk = 0;
+  ccc2Q14 = kCosTabFfftQ14[96];
+  sss2Q14 = kCosTabFfftQ14[84];
+  if (iSign==1) {
+    sss2Q14 = -sss2Q14;
+  }
+
+  for (hh=0; hh<4; hh++) {
+    for (ii=0; ii<12; ii++) {
+      k1 = kk + 4;
+      k2 = k1 + 4;
+      k3 = k2 + 4;
+      k4 = k3 + 4;
+
+      akpQx = RexQx[k1] + RexQx[k4];
+      akmQx = RexQx[k1] - RexQx[k4];
+      bkpQx = ImxQx[k1] + ImxQx[k4];
+      bkmQx = ImxQx[k1] - ImxQx[k4];
+      ajpQx = RexQx[k2] + RexQx[k3];
+      ajmQx = RexQx[k2] - RexQx[k3];
+      bjpQx = ImxQx[k2] + ImxQx[k3];
+      bjmQx = ImxQx[k2] - ImxQx[k3];
+      aaQx = RexQx[kk];
+      bbQx = ImxQx[kk];
+      RexQx[kk] = aaQx + akpQx + ajpQx;
+      ImxQx[kk] = bbQx + bkpQx + bjpQx;
+
+      akQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, akpQx, 14) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14)  + aaQx;
+      bkQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bkpQx, 14) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14)  + bbQx;
+      ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, akmQx, 14) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajmQx, 14);
+      bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bkmQx, 14) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjmQx, 14);
+      // 32+4*8=64 or 32+4*20=112
+
+      RexQx[k1] = akQx - bjQx;
+      RexQx[k4] = akQx + bjQx;
+      ImxQx[k1] = bkQx + ajQx;
+      ImxQx[k4] = bkQx - ajQx;
+
+      akQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akpQx, 14)  +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, ajpQx, 14) + aaQx;
+      bkQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkpQx, 14)  +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bjpQx, 14) + bbQx;
+      ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akmQx, 14) -
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, ajmQx, 14);
+      bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkmQx, 14) -
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bjmQx, 14);
+      // 8+4*8=40 or 8+4*20=88
+
+      RexQx[k2] = akQx - bjQx;
+      RexQx[k3] = akQx + bjQx;
+      ImxQx[k2] = bkQx + ajQx;
+      ImxQx[k3] = bkQx - ajQx;
+
+      kk = k4 + 4;
+    }
+    // Complexity: 12*(64+40+10) = 1368 for 16x16 muls, but 12*(112+88+10) = 2520 cycles for 16x32 muls
+    kk -= 239;
+  }
+  // Complexity: 4*1368 = 5472 for 16x16 muls, but 4*2520 = 10080 cycles for 16x32 muls
+
+  /* multiply by rotation factor for odd factor 3 or 5 (not for 4)
+     Same code (duplicated) for both ii=2 and ii=3 */
+  kk = 1;
+  ee=0;
+
+  for (gg=0; gg<3; gg++) {
+    kk += 4;
+    dd = 12 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(12, gg);
+    ff = 0;
+    for (hh=0; hh<4; hh++) {
+      ff = ff+dd;
+      ee = ff+60;
+      for (ii=0; ii<12; ii++) {
+        akQx = RexQx[kk];
+        bkQx = ImxQx[kk];
+
+        ccc2Q14 = kCosTabFfftQ14[ff];
+        sss2Q14 = kCosTabFfftQ14[ee];
+
+        if (iSign==1) {
+          sss2Q14 = -sss2Q14;
+        }
+
+        RexQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) -
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
+        ImxQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) +
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
+
+        kk += 20;
+      }
+      kk = kk - 236;
+      // Complexity: 12*(12+12) = 288 for 16x16 muls, but 12*(12+32) = 528 cycles for 16x32 muls
+    }
+    kk = kk - 19;
+    // Complexity: 4*288+6 for 16x16 muls, but 4*528+6 cycles for 16x32 muls
+  }
+  // Complexity: 3*4*288+6 = 3462 for 16x16 muls, but 3*4*528+6 = 6342 cycles for 16x32 muls
+
+
+  // last transform for factor of 4 */
+  for (kk=0; kk<240; kk=kk+4) {
+    k1 = kk + 1;
+    k2 = k1 + 1;
+    k3 = k2 + 1;
+
+    akpQx = RexQx[kk] + RexQx[k2];
+    akmQx = RexQx[kk] - RexQx[k2];
+    ajpQx = RexQx[k1] + RexQx[k3];
+    ajmQx = RexQx[k1] - RexQx[k3];
+    bkpQx = ImxQx[kk] + ImxQx[k2];
+    bkmQx = ImxQx[kk] - ImxQx[k2];
+    bjpQx = ImxQx[k1] + ImxQx[k3];
+    bjmQx = ImxQx[k1] - ImxQx[k3];
+    RexQx[kk] = akpQx + ajpQx;
+    ImxQx[kk] = bkpQx + bjpQx;
+    ajpQx = akpQx - ajpQx;
+    bjpQx = bkpQx - bjpQx;
+    if (iSign < 0) {
+      akpQx = akmQx + bjmQx;
+      bkpQx = bkmQx - ajmQx;
+      akmQx -= bjmQx;
+      bkmQx += ajmQx;
+    } else {
+      akpQx = akmQx - bjmQx;
+      bkpQx = bkmQx + ajmQx;
+      akmQx += bjmQx;
+      bkmQx -= ajmQx;
+    }
+    RexQx[k1] = akpQx;
+    RexQx[k2] = ajpQx;
+    RexQx[k3] = akmQx;
+    ImxQx[k1] = bkpQx;
+    ImxQx[k2] = bjpQx;
+    ImxQx[k3] = bkmQx;
+  }
+  // Complexity: 60*45 = 2700 for 16x16 muls, but 60*45 = 2700 cycles for 16x32 muls
+
+  /* permute the results to normal order */
+  for (ii=0; ii<240; ii++) {
+    ReDATAQx[ii]=RexQx[ii];
+    ImDATAQx[ii]=ImxQx[ii];
+  }
+  // Complexity: 240*2=480 cycles
+
+  for (ii=0; ii<240; ii++) {
+    RexQx[ii]=ReDATAQx[kSortTabFft[ii]];
+    ImxQx[ii]=ImDATAQx[kSortTabFft[ii]];
+  }
+  // Complexity: 240*2*2=960 cycles
+
+  // Total complexity:
+  //            16x16 16x32
+  // Complexity:   10    10
+  // Complexity:   99   171
+  // Complexity: 2972  3932
+  // Complexity: 2736  5776
+  // Complexity: 5472 10080
+  // Complexity: 3462  6342
+  // Complexity: 2700  2700
+  // Complexity:  480   480
+  // Complexity:  960   960
+  // =======================
+  //            18891 30451
+  //
+  // If this FFT is called 2 time each frame, i.e. 67 times per second, it will correspond to
+  // a C54 complexity of 67*18891/1000000 = 1.27 MIPS with 16x16-muls, and 67*30451/1000000 =
+  // = 2.04 MIPS with 16x32-muls. Note that this routine somtimes is called 6 times during the
+  // encoding of a frame, i.e. the max complexity would be 7/2*1.27 = 4.4 MIPS for the 16x16 mul case.
+
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.h
new file mode 100644
index 0000000..efa116e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/fft.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*--------------------------------*-C-*---------------------------------*
+ * File:
+ * fft.h
+ * ---------------------------------------------------------------------*
+ * Re[]: real value array
+ * Im[]: imaginary value array
+ * nTotal: total number of complex values
+ * nPass: number of elements involved in this pass of transform
+ * nSpan: nspan/nPass = number of bytes to increment pointer
+ *  in Re[] and Im[]
+ * isign: exponent: +1 = forward  -1 = reverse
+ * scaling: normalizing constant by which the final result is *divided*
+ * scaling == -1, normalize by total dimension of the transform
+ * scaling <  -1, normalize by the square-root of the total dimension
+ *
+ * ----------------------------------------------------------------------
+ * See the comments in the code for correct usage!
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_
+
+
+#include "structs.h"
+
+
+WebRtc_Word16 WebRtcIsacfix_FftRadix16Fastest(WebRtc_Word16 RexQx[], WebRtc_Word16 ImxQx[], WebRtc_Word16 iSign);
+
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.c
new file mode 100644
index 0000000..87c62aa
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.c
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filterbank_tables.c
+ *
+ * This file contains variables that are used in
+ * filterbanks.c
+ *
+ */
+
+#include "filterbank_tables.h"
+#include "settings.h"
+
+
+/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2};
+ * In float, they are:
+ * {-1.94895953203325f, 0.94984516000000f, -0.05101826139794f, 0.05015484000000f};
+ */
+const WebRtc_Word16 WebRtcIsacfix_kHpStCoeffInQ30[8] = {
+  -31932,  16189,  /* Q30 hi/lo pair */
+  15562,  17243,  /* Q30 hi/lo pair */
+  -26748, -17186,  /* Q35 hi/lo pair */
+  26296, -27476   /* Q35 hi/lo pair */
+};
+
+/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2};
+ * In float, they are:
+ * {-1.99701049409000f, 0.99714204490000f, 0.01701049409000f, -0.01704204490000f};
+ */
+const WebRtc_Word16 WebRtcIsacfix_kHPStCoeffOut1Q30[8] = {
+  -32719, -1306,  /* Q30 hi/lo pair */
+  16337, 11486,  /* Q30 hi/lo pair */
+  8918, 26078,  /* Q35 hi/lo pair */
+  -8935,  3956   /* Q35 hi/lo pair */
+};
+
+/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2};
+ * In float, they are:
+ * {-1.98645294509837f, 0.98672435560000f, 0.00645294509837f, -0.00662435560000f};
+ */
+const WebRtc_Word16 WebRtcIsacfix_kHPStCoeffOut2Q30[8] = {
+  -32546, -2953,  /* Q30 hi/lo pair */
+  16166, 32233,  /* Q30 hi/lo pair */
+  3383, 13217,  /* Q35 hi/lo pair */
+  -3473, -4597   /* Q35 hi/lo pair */
+};
+
+/* The upper channel all-pass filter factors */
+const WebRtc_Word16 WebRtcIsacfix_kUpperApFactorsQ15[2] = {
+  1137, 12537
+};
+
+/* The lower channel all-pass filter factors */
+const WebRtc_Word16 WebRtcIsacfix_kLowerApFactorsQ15[2] = {
+  5059, 24379
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.h
new file mode 100644
index 0000000..b6be4f0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbank_tables.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filterbank_tables.h
+ *
+ * Header file for variables that are defined in
+ * filterbank_tables.c.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_
+
+#include "typedefs.h"
+
+/********************* Coefficient Tables ************************/
+
+/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+extern const WebRtc_Word16 WebRtcIsacfix_kHpStCoeffInQ30[8]; /* [Q30hi Q30lo Q30hi Q30lo Q35hi Q35lo Q35hi Q35lo] */
+
+/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+extern const WebRtc_Word16 WebRtcIsacfix_kHPStCoeffOut1Q30[8]; /* [Q30hi Q30lo Q30hi Q30lo Q35hi Q35lo Q35hi Q35lo] */
+
+/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+extern const WebRtc_Word16 WebRtcIsacfix_kHPStCoeffOut2Q30[8]; /* [Q30hi Q30lo Q30hi Q30lo Q35hi Q35lo Q35hi Q35lo] */
+
+/* The upper channel all-pass filter factors */
+extern const WebRtc_Word16 WebRtcIsacfix_kUpperApFactorsQ15[2];
+
+/* The lower channel all-pass filter factors */
+extern const WebRtc_Word16 WebRtcIsacfix_kLowerApFactorsQ15[2];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbanks.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbanks.c
new file mode 100644
index 0000000..a53aa66
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filterbanks.c
@@ -0,0 +1,326 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filterbanks.c
+ *
+ * This file contains function 
+ * WebRtcIsacfix_SplitAndFilter, and WebRtcIsacfix_FilterAndCombine
+ * which implement filterbanks that produce decimated lowpass and
+ * highpass versions of a signal, and performs reconstruction.
+ *
+ */
+
+#include "codec.h"
+#include "filterbank_tables.h"
+#include "settings.h"
+
+
+static void AllpassFilter2FixDec16(WebRtc_Word16 *InOut16, //Q0
+                                   const WebRtc_Word16 *APSectionFactors, //Q15
+                                   WebRtc_Word16 lengthInOut,
+                                   WebRtc_Word16 NumberOfSections,
+                                   WebRtc_Word32 *FilterState) //Q16
+{
+  int n, j;
+  WebRtc_Word32 a, b;
+
+  for (j=0; j<NumberOfSections; j++) {
+    for (n=0;n<lengthInOut;n++) {
+
+
+      a = WEBRTC_SPL_MUL_16_16(APSectionFactors[j], InOut16[n]); //Q15*Q0=Q15
+      a = WEBRTC_SPL_LSHIFT_W32(a, 1); // Q15 -> Q16
+      b = WEBRTC_SPL_ADD_SAT_W32(a, FilterState[j]); //Q16+Q16=Q16
+      a = WEBRTC_SPL_MUL_16_16_RSFT(-APSectionFactors[j], (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16), 0); //Q15*Q0=Q15
+      FilterState[j] = WEBRTC_SPL_ADD_SAT_W32(WEBRTC_SPL_LSHIFT_W32(a,1), WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)InOut16[n],16)); // Q15<<1 + Q0<<16 = Q16 + Q16 = Q16
+      InOut16[n] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16); //Save as Q0
+
+    }
+  }
+
+}
+
+
+static void HighpassFilterFixDec32(
+    WebRtc_Word16 *io,   /* Q0:input Q0: Output */
+    WebRtc_Word16 len, /* length of input, Input */
+    const WebRtc_Word16 *coeff, /* Coeff: [Q30hi Q30lo Q30hi Q30lo Q35hi Q35lo Q35hi Q35lo] */
+    WebRtc_Word32 *state) /* Q4:filter state Input/Output */
+{
+  int k;
+  WebRtc_Word32 a, b, c, in;
+
+
+
+  for (k=0; k<len; k++) {
+    in = (WebRtc_Word32)io[k];
+    /* Q35 * Q4 = Q39 ; shift 32 bit => Q7 */
+    a = WEBRTC_SPL_MUL_32_32_RSFT32(coeff[2*2], coeff[2*2+1], state[0]);
+    b = WEBRTC_SPL_MUL_32_32_RSFT32(coeff[2*3], coeff[2*3+1], state[1]);
+
+    c = ((WebRtc_Word32)in) + WEBRTC_SPL_RSHIFT_W32(a+b, 7); // Q0
+    //c = WEBRTC_SPL_RSHIFT_W32(c, 1); // Q-1
+    io[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(c);  // Write output as Q0
+
+    /* Q30 * Q4 = Q34 ; shift 32 bit => Q2 */
+    a = WEBRTC_SPL_MUL_32_32_RSFT32(coeff[2*0], coeff[2*0+1], state[0]);
+    b = WEBRTC_SPL_MUL_32_32_RSFT32(coeff[2*1], coeff[2*1+1], state[1]);
+
+    c = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)in, 2) - a - b; // New state in Q2
+    c= (WebRtc_Word32)WEBRTC_SPL_SAT((WebRtc_Word32)536870911, c, (WebRtc_Word32)-536870912); // Check for wrap-around
+
+    state[1] = state[0];
+    state[0] = WEBRTC_SPL_LSHIFT_W32(c, 2); // Write state as Q4
+
+  }
+}
+
+
+void WebRtcIsacfix_SplitAndFilter1(WebRtc_Word16 *pin,
+                                   WebRtc_Word16 *LP16,
+                                   WebRtc_Word16 *HP16,
+                                   PreFiltBankstr *prefiltdata)
+{
+  /* Function WebRtcIsacfix_SplitAndFilter */
+  /* This function creates low-pass and high-pass decimated versions of part of
+     the input signal, and part of the signal in the input 'lookahead buffer'. */
+
+  int k;
+
+  WebRtc_Word16 tempin_ch1[FRAMESAMPLES/2 + QLOOKAHEAD];
+  WebRtc_Word16 tempin_ch2[FRAMESAMPLES/2 + QLOOKAHEAD];
+  WebRtc_Word32 tmpState[WEBRTC_SPL_MUL_16_16(2,(QORDER-1))]; /* 4 */
+
+
+  /* High pass filter */
+  HighpassFilterFixDec32(pin, FRAMESAMPLES, WebRtcIsacfix_kHpStCoeffInQ30, prefiltdata->HPstates_fix);
+
+
+  /* First Channel */
+  for (k=0;k<FRAMESAMPLES/2;k++) {
+    tempin_ch1[QLOOKAHEAD + k] = pin[1+WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+  for (k=0;k<QLOOKAHEAD;k++) {
+    tempin_ch1[k]=prefiltdata->INLABUF1_fix[k];
+    prefiltdata->INLABUF1_fix[k]=pin[FRAMESAMPLES+1-WEBRTC_SPL_MUL_16_16(2, QLOOKAHEAD)+WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+
+  /* Second Channel.  This is exactly like the first channel, except that the
+     even samples are now filtered instead (lower channel). */
+  for (k=0;k<FRAMESAMPLES/2;k++) {
+    tempin_ch2[QLOOKAHEAD+k] = pin[WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+  for (k=0;k<QLOOKAHEAD;k++) {
+    tempin_ch2[k]=prefiltdata->INLABUF2_fix[k];
+    prefiltdata->INLABUF2_fix[k]=pin[FRAMESAMPLES-WEBRTC_SPL_MUL_16_16(2, QLOOKAHEAD)+WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+
+
+  /*obtain polyphase components by forward all-pass filtering through each channel */
+  /* The all pass filtering automatically updates the filter states which are exported in the
+     prefiltdata structure */
+  AllpassFilter2FixDec16(tempin_ch1,WebRtcIsacfix_kUpperApFactorsQ15, FRAMESAMPLES/2 , NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT1_fix);
+  AllpassFilter2FixDec16(tempin_ch2,WebRtcIsacfix_kLowerApFactorsQ15, FRAMESAMPLES/2 , NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT2_fix);
+
+  for (k=0;k<WEBRTC_SPL_MUL_16_16(2, (QORDER-1));k++)
+    tmpState[k] = prefiltdata->INSTAT1_fix[k];
+  AllpassFilter2FixDec16(tempin_ch1 + FRAMESAMPLES/2,WebRtcIsacfix_kUpperApFactorsQ15, QLOOKAHEAD , NUMBEROFCHANNELAPSECTIONS, tmpState);
+  for (k=0;k<WEBRTC_SPL_MUL_16_16(2, (QORDER-1));k++)
+    tmpState[k] = prefiltdata->INSTAT2_fix[k];
+  AllpassFilter2FixDec16(tempin_ch2 + FRAMESAMPLES/2,WebRtcIsacfix_kLowerApFactorsQ15, QLOOKAHEAD , NUMBEROFCHANNELAPSECTIONS, tmpState);
+
+
+  /* Now Construct low-pass and high-pass signals as combinations of polyphase components */
+  for (k=0; k<FRAMESAMPLES/2 + QLOOKAHEAD; k++) {
+    WebRtc_Word32 tmp1, tmp2, tmp3;
+    tmp1 = (WebRtc_Word32)tempin_ch1[k]; // Q0 -> Q0
+    tmp2 = (WebRtc_Word32)tempin_ch2[k]; // Q0 -> Q0
+    tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
+    LP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
+    tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
+    HP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
+  }
+
+}/*end of WebRtcIsacfix_SplitAndFilter */
+
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+
+/* Without lookahead */
+void WebRtcIsacfix_SplitAndFilter2(WebRtc_Word16 *pin,
+                                   WebRtc_Word16 *LP16,
+                                   WebRtc_Word16 *HP16,
+                                   PreFiltBankstr *prefiltdata)
+{
+  /* Function WebRtcIsacfix_SplitAndFilter2 */
+  /* This function creates low-pass and high-pass decimated versions of part of
+     the input signal. */
+
+  int k;
+
+  WebRtc_Word16 tempin_ch1[FRAMESAMPLES/2];
+  WebRtc_Word16 tempin_ch2[FRAMESAMPLES/2];
+
+
+  /* High pass filter */
+  HighpassFilterFixDec32(pin, FRAMESAMPLES, WebRtcIsacfix_kHpStCoeffInQ30, prefiltdata->HPstates_fix);
+
+
+  /* First Channel */
+  for (k=0;k<FRAMESAMPLES/2;k++) {
+    tempin_ch1[k] = pin[1+WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+
+  /* Second Channel.  This is exactly like the first channel, except that the
+     even samples are now filtered instead (lower channel). */
+  for (k=0;k<FRAMESAMPLES/2;k++) {
+    tempin_ch2[k] = pin[WEBRTC_SPL_MUL_16_16(2, k)];
+  }
+
+
+  /*obtain polyphase components by forward all-pass filtering through each channel */
+  /* The all pass filtering automatically updates the filter states which are exported in the
+     prefiltdata structure */
+  AllpassFilter2FixDec16(tempin_ch1,WebRtcIsacfix_kUpperApFactorsQ15, FRAMESAMPLES/2 , NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT1_fix);
+  AllpassFilter2FixDec16(tempin_ch2,WebRtcIsacfix_kLowerApFactorsQ15, FRAMESAMPLES/2 , NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT2_fix);
+
+
+  /* Now Construct low-pass and high-pass signals as combinations of polyphase components */
+  for (k=0; k<FRAMESAMPLES/2; k++) {
+    WebRtc_Word32 tmp1, tmp2, tmp3;
+    tmp1 = (WebRtc_Word32)tempin_ch1[k]; // Q0 -> Q0
+    tmp2 = (WebRtc_Word32)tempin_ch2[k]; // Q0 -> Q0
+    tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
+    LP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
+    tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
+    HP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
+  }
+
+}/*end of WebRtcIsacfix_SplitAndFilter */
+
+#endif
+
+
+
+//////////////////////////////////////////////////////////
+////////// Combining
+/* Function WebRtcIsacfix_FilterAndCombine */
+/* This is a decoder function that takes the decimated
+   length FRAMESAMPLES/2 input low-pass and
+   high-pass signals and creates a reconstructed fullband
+   output signal of length FRAMESAMPLES. WebRtcIsacfix_FilterAndCombine
+   is the sibling function of WebRtcIsacfix_SplitAndFilter */
+/* INPUTS:
+   inLP: a length FRAMESAMPLES/2 array of input low-pass
+   samples.
+   inHP: a length FRAMESAMPLES/2 array of input high-pass
+   samples.
+   postfiltdata: input data structure containing the filterbank
+   states from the previous decoding iteration.
+   OUTPUTS:
+   Out: a length FRAMESAMPLES array of output reconstructed
+   samples (fullband) based on the input low-pass and
+   high-pass signals.
+   postfiltdata: the input data structure containing the filterbank
+   states is updated for the next decoding iteration */
+void WebRtcIsacfix_FilterAndCombine1(WebRtc_Word16 *tempin_ch1,
+                                     WebRtc_Word16 *tempin_ch2,
+                                     WebRtc_Word16 *out16,
+                                     PostFiltBankstr *postfiltdata)
+{
+  int k;
+  WebRtc_Word16 in[FRAMESAMPLES];
+
+  /* all-pass filter the new upper channel signal. HOWEVER, use the all-pass filter factors
+     that were used as a lower channel at the encoding side.  So at the decoder, the
+     corresponding all-pass filter factors for each channel are swapped.*/
+
+  AllpassFilter2FixDec16(tempin_ch1, WebRtcIsacfix_kLowerApFactorsQ15, FRAMESAMPLES/2, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_UPPER_fix);
+
+  /* Now, all-pass filter the new lower channel signal. But since all-pass filter factors
+     at the decoder are swapped from the ones at the encoder, the 'upper' channel
+     all-pass filter factors (kUpperApFactors) are used to filter this new lower channel signal */
+
+  AllpassFilter2FixDec16(tempin_ch2, WebRtcIsacfix_kUpperApFactorsQ15, FRAMESAMPLES/2, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_LOWER_fix);
+
+  /* Merge outputs to form the full length output signal.*/
+  for (k=0;k<FRAMESAMPLES/2;k++) {
+    in[WEBRTC_SPL_MUL_16_16(2, k)]=tempin_ch2[k];
+    in[WEBRTC_SPL_MUL_16_16(2, k)+1]=tempin_ch1[k];
+  }
+
+  /* High pass filter */
+  HighpassFilterFixDec32(in, FRAMESAMPLES, WebRtcIsacfix_kHPStCoeffOut1Q30, postfiltdata->HPstates1_fix);
+  HighpassFilterFixDec32(in, FRAMESAMPLES, WebRtcIsacfix_kHPStCoeffOut2Q30, postfiltdata->HPstates2_fix);
+
+  for (k=0;k<FRAMESAMPLES;k++) {
+    out16[k] = in[k];
+  }
+}
+
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+/* Function WebRtcIsacfix_FilterAndCombine */
+/* This is a decoder function that takes the decimated
+   length len/2 input low-pass and
+   high-pass signals and creates a reconstructed fullband
+   output signal of length len. WebRtcIsacfix_FilterAndCombine
+   is the sibling function of WebRtcIsacfix_SplitAndFilter */
+/* INPUTS:
+   inLP: a length len/2 array of input low-pass
+   samples.
+   inHP: a length len/2 array of input high-pass
+   samples.
+   postfiltdata: input data structure containing the filterbank
+   states from the previous decoding iteration.
+   OUTPUTS:
+   Out: a length len array of output reconstructed
+   samples (fullband) based on the input low-pass and
+   high-pass signals.
+   postfiltdata: the input data structure containing the filterbank
+   states is updated for the next decoding iteration */
+void WebRtcIsacfix_FilterAndCombine2(WebRtc_Word16 *tempin_ch1,
+                                     WebRtc_Word16 *tempin_ch2,
+                                     WebRtc_Word16 *out16,
+                                     PostFiltBankstr *postfiltdata,
+                                     WebRtc_Word16 len)
+{
+  int k;
+  WebRtc_Word16 in[FRAMESAMPLES];
+
+  /* all-pass filter the new upper channel signal. HOWEVER, use the all-pass filter factors
+     that were used as a lower channel at the encoding side.  So at the decoder, the
+     corresponding all-pass filter factors for each channel are swapped.*/
+
+  AllpassFilter2FixDec16(tempin_ch1, WebRtcIsacfix_kLowerApFactorsQ15,(WebRtc_Word16) (len/2), NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_UPPER_fix);
+
+  /* Now, all-pass filter the new lower channel signal. But since all-pass filter factors
+     at the decoder are swapped from the ones at the encoder, the 'upper' channel
+     all-pass filter factors (kUpperApFactors) are used to filter this new lower channel signal */
+
+  AllpassFilter2FixDec16(tempin_ch2, WebRtcIsacfix_kUpperApFactorsQ15, (WebRtc_Word16) (len/2), NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_LOWER_fix);
+
+  /* Merge outputs to form the full length output signal.*/
+  for (k=0;k<len/2;k++) {
+    in[WEBRTC_SPL_MUL_16_16(2, k)]=tempin_ch2[k];
+    in[WEBRTC_SPL_MUL_16_16(2, k)+1]=tempin_ch1[k];
+  }
+
+  /* High pass filter */
+  HighpassFilterFixDec32(in, len, WebRtcIsacfix_kHPStCoeffOut1Q30, postfiltdata->HPstates1_fix);
+  HighpassFilterFixDec32(in, len, WebRtcIsacfix_kHPStCoeffOut2Q30, postfiltdata->HPstates2_fix);
+
+  for (k=0;k<len;k++) {
+    out16[k] = in[k];
+  }
+}
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters.c
new file mode 100644
index 0000000..6ee0477
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters.c
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filters.c
+ *
+ * This file contains function WebRtcIsacfix_AutocorrC,
+ * AllpassFilterForDec32, and WebRtcIsacfix_DecimateAllpass32
+ *
+ */
+
+#include <string.h>
+
+#include "pitch_estimator.h"
+#include "lpc_masking_model.h"
+#include "codec.h"
+
+// Autocorrelation function in fixed point.
+// NOTE! Different from SPLIB-version in how it scales the signal.
+int WebRtcIsacfix_AutocorrC(WebRtc_Word32* __restrict r,
+                            const WebRtc_Word16* __restrict x,
+                            WebRtc_Word16 N,
+                            WebRtc_Word16 order,
+                            WebRtc_Word16* __restrict scale) {
+  int i = 0;
+  int j = 0;
+  int16_t scaling = 0;
+  int32_t sum = 0;
+  uint32_t temp = 0;
+  int64_t prod = 0;
+
+  // Calculate r[0].
+  for (i = 0; i < N; i++) {
+    prod += WEBRTC_SPL_MUL_16_16(x[i], x[i]);
+  }
+
+  // Calculate scaling (the value of shifting).
+  temp = (uint32_t)(prod >> 31);
+  if(temp == 0) {
+    scaling = 0;
+  } else {
+    scaling = 32 - WebRtcSpl_NormU32(temp);
+  }
+  r[0] = (int32_t)(prod >> scaling);
+
+  // Perform the actual correlation calculation.
+  for (i = 1; i < order + 1; i++) {
+    prod = 0;
+    for (j = 0; j < N - i; j++) {
+      prod += WEBRTC_SPL_MUL_16_16(x[j], x[i + j]);
+    }
+    sum = (int32_t)(prod >> scaling);
+    r[i] = sum;
+  }
+
+  *scale = scaling;
+
+  return(order + 1);
+}
+
+static const WebRtc_Word32 kApUpperQ15[ALLPASSSECTIONS] = { 1137, 12537 };
+static const WebRtc_Word32 kApLowerQ15[ALLPASSSECTIONS] = { 5059, 24379 };
+
+
+static void AllpassFilterForDec32(WebRtc_Word16         *InOut16, //Q0
+                                  const WebRtc_Word32   *APSectionFactors, //Q15
+                                  WebRtc_Word16         lengthInOut,
+                                  WebRtc_Word32          *FilterState) //Q16
+{
+  int n, j;
+  WebRtc_Word32 a, b;
+
+  for (j=0; j<ALLPASSSECTIONS; j++) {
+    for (n=0;n<lengthInOut;n+=2){
+      a = WEBRTC_SPL_MUL_16_32_RSFT16(InOut16[n], APSectionFactors[j]); //Q0*Q31=Q31 shifted 16 gives Q15
+      a = WEBRTC_SPL_LSHIFT_W32(a, 1); // Q15 -> Q16
+      b = WEBRTC_SPL_ADD_SAT_W32(a, FilterState[j]); //Q16+Q16=Q16
+      a = WEBRTC_SPL_MUL_16_32_RSFT16(
+          (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16),
+          -APSectionFactors[j]); //Q0*Q31=Q31 shifted 16 gives Q15
+      FilterState[j] = WEBRTC_SPL_ADD_SAT_W32(
+          WEBRTC_SPL_LSHIFT_W32(a,1),
+          WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)InOut16[n], 16)); // Q15<<1 + Q0<<16 = Q16 + Q16 = Q16
+      InOut16[n] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16); //Save as Q0
+    }
+  }
+}
+
+
+
+
+void WebRtcIsacfix_DecimateAllpass32(const WebRtc_Word16 *in,
+                                     WebRtc_Word32 *state_in,        /* array of size: 2*ALLPASSSECTIONS+1 */
+                                     WebRtc_Word16 N,                /* number of input samples */
+                                     WebRtc_Word16 *out)             /* array of size N/2 */
+{
+  int n;
+  WebRtc_Word16 data_vec[PITCH_FRAME_LEN];
+
+  /* copy input */
+  memcpy(data_vec+1, in, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), (N-1)));
+
+
+  data_vec[0] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)],16);   //the z^(-1) state
+  state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)] = WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)in[N-1],16);
+
+
+
+  AllpassFilterForDec32(data_vec+1, kApUpperQ15, N, state_in);
+  AllpassFilterForDec32(data_vec, kApLowerQ15, N, state_in+ALLPASSSECTIONS);
+
+  for (n=0;n<N/2;n++) {
+    out[n]=WEBRTC_SPL_ADD_SAT_W16(data_vec[WEBRTC_SPL_MUL_16_16(2, n)], data_vec[WEBRTC_SPL_MUL_16_16(2, n)+1]);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters_neon.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters_neon.c
new file mode 100644
index 0000000..8270359
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/filters_neon.c
@@ -0,0 +1,167 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filters_neon.c
+ *
+ * This file contains function WebRtcIsacfix_AutocorrNeon, optimized for
+ * ARM Neon platform.
+ *
+ */
+
+#include <arm_neon.h>
+#include <assert.h>
+
+#include "codec.h"
+
+// Autocorrelation function in fixed point.
+// NOTE! Different from SPLIB-version in how it scales the signal.
+int WebRtcIsacfix_AutocorrNeon(
+    WebRtc_Word32* __restrict r,
+    const WebRtc_Word16* __restrict x,
+    WebRtc_Word16 N,
+    WebRtc_Word16 order,
+    WebRtc_Word16* __restrict scale) {
+
+  // The 1st for loop assumed N % 4 == 0.
+  assert(N % 4 == 0);
+
+  int i = 0;
+  int zeros_low = 0;
+  int zeros_high = 0;
+  int16_t scaling = 0;
+  int32_t sum = 0;
+
+  // Step 1, calculate r[0] and how much scaling is needed.
+
+  int16x4_t reg16x4;
+  int64x1_t reg64x1a;
+  int64x1_t reg64x1b;
+  int32x4_t reg32x4;
+  int64x2_t reg64x2 = vdupq_n_s64(0); // zeros
+
+  // Loop over the samples and do:
+  // sum += WEBRTC_SPL_MUL_16_16(x[i], x[i]);
+  for (i = 0; i < N; i += 4) {
+    reg16x4 = vld1_s16(&x[i]);
+    reg32x4 = vmull_s16(reg16x4, reg16x4);
+    reg64x2 = vpadalq_s32(reg64x2, reg32x4);
+  }
+  reg64x1a = vget_low_s64(reg64x2);
+  reg64x1b = vget_high_s64(reg64x2);
+  reg64x1a = vadd_s64(reg64x1a, reg64x1b);
+
+  // Calculate the value of shifting (scaling).
+  __asm__ __volatile__(
+    "vmov %[z_l], %[z_h], %P[reg]\n\t"
+    "clz %[z_l], %[z_l]\n\t"
+    "clz %[z_h], %[z_h]\n\t"
+    :[z_l]"+r"(zeros_low),
+     [z_h]"+r"(zeros_high)
+    :[reg]"w"(reg64x1a)
+  );
+  if (zeros_high != 32) {
+    scaling = (32 - zeros_high + 1);
+  } else if (zeros_low == 0) {
+    scaling = 1;
+  }
+  reg64x1b = -scaling;
+  reg64x1a = vshl_s64(reg64x1a, reg64x1b);
+
+  // Record the result.
+  r[0] = (int32_t)vget_lane_s64(reg64x1a, 0);
+
+
+  // Step 2, perform the actual correlation calculation.
+
+  /* Original C code (for the rest of the function):
+  for (i = 1; i < order + 1; i++)  {
+    prod = 0;
+    for (j = 0; j < N - i; j++) {
+      prod += WEBRTC_SPL_MUL_16_16(x[j], x[i + j]);
+    }
+    sum = (int32_t)(prod >> scaling);
+    r[i] = sum;
+  }
+  */
+
+  for (i = 1; i < order + 1; i++) {
+    int32_t prod_lower = 0;
+    int32_t prod_upper = 0;
+    int16_t* ptr0 = &x[0];
+    int16_t* ptr1 = &x[i];
+    int32_t tmp = 0;
+
+    // Initialize the sum (q9) to zero.
+    __asm__ __volatile__("vmov.i32 q9, #0\n\t":::"q9");
+
+    // Calculate the major block of the samples (a multiple of 8).
+    for (; ptr0 < &x[N - i - 7];) {
+      __asm__ __volatile__(
+        "vld1.16 {d20, d21}, [%[ptr0]]!\n\t"
+        "vld1.16 {d22, d23}, [%[ptr1]]!\n\t"
+        "vmull.s16 q12, d20, d22\n\t"
+        "vmull.s16 q13, d21, d23\n\t"
+        "vpadal.s32 q9, q12\n\t"
+        "vpadal.s32 q9, q13\n\t"
+
+        // Specify constraints.
+        :[ptr0]"+r"(ptr0),
+        [ptr1]"+r"(ptr1)
+        :
+        :"d18", "d19", "d20", "d21", "d22", "d23", "d24", "d25", "d26", "d27"
+      );
+    }
+
+    // Calculate the rest of the samples.
+    for (; ptr0 < &x[N - i]; ptr0++, ptr1++) {
+      __asm__ __volatile__(
+        "smulbb %[tmp], %[ptr0], %[ptr1]\n\t"
+        "adds %[prod_lower], %[prod_lower], %[tmp]\n\t"
+        "adc %[prod_upper], %[prod_upper], %[tmp], asr #31\n\t"
+
+        // Specify constraints.
+        :[prod_lower]"+r"(prod_lower),
+        [prod_upper]"+r"(prod_upper),
+        [tmp]"+r"(tmp)
+        :[ptr0]"r"(*ptr0),
+        [ptr1]"r"(*ptr1)
+      );
+    }
+
+    // Sum the results up, and do shift.
+    __asm__ __volatile__(
+      "vadd.i64 d18, d19\n\t"
+      "vmov.32 d17[0], %[prod_lower]\n\t"
+      "vmov.32 d17[1], %[prod_upper]\n\t"
+      "vadd.i64 d17, d18\n\t"
+      "mov %[tmp], %[scaling], asr #31\n\t"
+      "vmov.32 d16, %[scaling], %[tmp]\n\t"
+      "vshl.s64 d17, d16\n\t"
+      "vmov.32 %[sum], d17[0]\n\t"
+
+      // Specify constraints.
+      :[sum]"=r"(sum),
+      [tmp]"+r"(tmp)
+      :[prod_upper]"r"(prod_upper),
+      [prod_lower]"r"(prod_lower),
+      [scaling]"r"(-scaling)
+      :"d16", "d17", "d18", "d19"
+    );
+
+    // Record the result.
+    r[i] = sum;
+  }
+
+  // Record the result.
+  *scale = scaling;
+
+  return(order + 1);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/initialize.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/initialize.c
new file mode 100644
index 0000000..4d11af5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/initialize.c
@@ -0,0 +1,175 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * initialize.c
+ *
+ * Internal initfunctions
+ *
+ */
+
+#include "codec.h"
+#include "structs.h"
+#include "pitch_estimator.h"
+
+
+void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc *maskdata) {
+
+  int k;
+
+  for (k = 0; k < WINLEN; k++) {
+    maskdata->DataBufferLoQ0[k] = (WebRtc_Word16) 0;
+    maskdata->DataBufferHiQ0[k] = (WebRtc_Word16) 0;
+  }
+  for (k = 0; k < ORDERLO+1; k++) {
+    maskdata->CorrBufLoQQ[k] = (WebRtc_Word32) 0;
+    maskdata->CorrBufLoQdom[k] = 0;
+
+    maskdata->PreStateLoGQ15[k] = 0;
+
+  }
+  for (k = 0; k < ORDERHI+1; k++) {
+    maskdata->CorrBufHiQQ[k] = (WebRtc_Word32) 0;
+    maskdata->CorrBufHiQdom[k] = 0;
+    maskdata->PreStateHiGQ15[k] = 0;
+  }
+
+  maskdata->OldEnergy = 10;
+
+  return;
+}
+
+void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec *maskdata) {
+
+  int k;
+
+  for (k = 0; k < ORDERLO+1; k++)
+  {
+    maskdata->PostStateLoGQ0[k] = 0;
+  }
+  for (k = 0; k < ORDERHI+1; k++)
+  {
+    maskdata->PostStateHiGQ0[k] = 0;
+  }
+
+  maskdata->OldEnergy = 10;
+
+  return;
+}
+
+
+
+
+
+
+
+void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr *prefiltdata)
+{
+  int k;
+
+  for (k = 0; k < QLOOKAHEAD; k++) {
+    prefiltdata->INLABUF1_fix[k] = 0;
+    prefiltdata->INLABUF2_fix[k] = 0;
+  }
+  for (k = 0; k < WEBRTC_SPL_MUL_16_16(2,(QORDER-1)); k++) {
+
+    prefiltdata->INSTAT1_fix[k] = 0;
+    prefiltdata->INSTAT2_fix[k] = 0;
+  }
+
+  /* High pass filter states */
+  prefiltdata->HPstates_fix[0] = 0;
+  prefiltdata->HPstates_fix[1] = 0;
+
+  return;
+}
+
+void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr *postfiltdata)
+{
+  int k;
+
+  for (k = 0; k < WEBRTC_SPL_MUL_16_16(2, POSTQORDER); k++) {
+
+    postfiltdata->STATE_0_LOWER_fix[k] = 0;
+    postfiltdata->STATE_0_UPPER_fix[k] = 0;
+  }
+
+  /* High pass filter states */
+
+  postfiltdata->HPstates1_fix[0] = 0;
+  postfiltdata->HPstates1_fix[1] = 0;
+
+  postfiltdata->HPstates2_fix[0] = 0;
+  postfiltdata->HPstates2_fix[1] = 0;
+
+  return;
+}
+
+
+void WebRtcIsacfix_InitPitchFilter(PitchFiltstr *pitchfiltdata)
+{
+  int k;
+
+  for (k = 0; k < PITCH_BUFFSIZE; k++)
+    pitchfiltdata->ubufQQ[k] = 0;
+  for (k = 0; k < (PITCH_DAMPORDER); k++)
+    pitchfiltdata->ystateQQ[k] = 0;
+
+  pitchfiltdata->oldlagQ7 = 6400; /* 50.0 in Q7 */
+  pitchfiltdata->oldgainQ12 = 0;
+}
+
+void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct *State)
+{
+  int k;
+
+  for (k = 0; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k++) {
+    State->dec_buffer16[k] = 0;
+  }
+  for (k = 0; k < WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)+1; k++) {
+    State->decimator_state32[k] = 0;
+  }
+
+  for (k = 0; k < QLOOKAHEAD; k++)
+    State->inbuf[k] = 0;
+
+  WebRtcIsacfix_InitPitchFilter(&(State->PFstr_wght));
+
+  WebRtcIsacfix_InitPitchFilter(&(State->PFstr));
+}
+
+
+void WebRtcIsacfix_InitPlc( PLCstr *State )
+{
+  State->decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX;
+  State->decayCoeffNoise = WEBRTC_SPL_WORD16_MAX;
+
+  State->used = PLC_WAS_USED;
+
+  WebRtcSpl_ZerosArrayW16(State->overlapLP, RECOVERY_OVERLAP);
+  WebRtcSpl_ZerosArrayW16(State->lofilt_coefQ15, ORDERLO);
+  WebRtcSpl_ZerosArrayW16(State->hifilt_coefQ15, ORDERHI );
+
+  State->AvgPitchGain_Q12 = 0;
+  State->lastPitchGain_Q12 = 0;
+  State->lastPitchLag_Q7 = 0;
+  State->gain_lo_hiQ17[0]=State->gain_lo_hiQ17[1] = 0;
+  WebRtcSpl_ZerosArrayW16(State->prevPitchInvIn, FRAMESAMPLES/2);
+  WebRtcSpl_ZerosArrayW16(State->prevPitchInvOut, PITCH_MAX_LAG + 10 );
+  WebRtcSpl_ZerosArrayW32(State->prevHP, PITCH_MAX_LAG + 10 );
+  State->pitchCycles = 0;
+  State->A = 0;
+  State->B = 0;
+  State->pitchIndex = 0;
+  State->stretchLag = 240;
+  State->seed = 4447;
+
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.c
new file mode 100644
index 0000000..3a37785
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.c
@@ -0,0 +1,1529 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * isacfix.c
+ *
+ * This C file contains the functions for the ISAC API
+ *
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "isacfix.h"
+#include "bandwidth_estimator.h"
+#include "codec.h"
+#include "entropy_coding.h"
+#include "structs.h"
+
+
+/**************************************************************************
+ * WebRtcIsacfix_AssignSize(...)
+ *
+ * Functions used when malloc is not allowed
+ * Returns number of bytes needed to allocate for iSAC struct.
+ *
+ */
+
+WebRtc_Word16 WebRtcIsacfix_AssignSize(int *sizeinbytes) {
+  *sizeinbytes=sizeof(ISACFIX_SubStruct)*2/sizeof(WebRtc_Word16);
+  return(0);
+}
+
+/***************************************************************************
+ * WebRtcIsacfix_Assign(...)
+ *
+ * Functions used when malloc is not allowed
+ * Place struct at given address
+ *
+ * If successful, Return 0, else Return -1
+ */
+
+WebRtc_Word16 WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst, void *ISACFIX_inst_Addr) {
+  if (ISACFIX_inst_Addr!=NULL) {
+    *inst = (ISACFIX_MainStruct*)ISACFIX_inst_Addr;
+    (*(ISACFIX_SubStruct**)inst)->errorcode = 0;
+    (*(ISACFIX_SubStruct**)inst)->initflag = 0;
+    (*(ISACFIX_SubStruct**)inst)->ISACenc_obj.SaveEnc_ptr = NULL;
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+
+#ifndef ISACFIX_NO_DYNAMIC_MEM
+
+/****************************************************************************
+ * WebRtcIsacfix_Create(...)
+ *
+ * This function creates a ISAC instance, which will contain the state
+ * information for one coding/decoding channel.
+ *
+ * Input:
+ *      - *ISAC_main_inst   : a pointer to the coder instance.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst)
+{
+  ISACFIX_SubStruct *tempo;
+  tempo = malloc(1 * sizeof(ISACFIX_SubStruct));
+  *ISAC_main_inst = (ISACFIX_MainStruct *)tempo;
+  if (*ISAC_main_inst!=NULL) {
+    (*(ISACFIX_SubStruct**)ISAC_main_inst)->errorcode = 0;
+    (*(ISACFIX_SubStruct**)ISAC_main_inst)->initflag = 0;
+    (*(ISACFIX_SubStruct**)ISAC_main_inst)->ISACenc_obj.SaveEnc_ptr = NULL;
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_CreateInternal(...)
+ *
+ * This function creates the memory that is used to store data in the encoder
+ *
+ * Input:
+ *      - *ISAC_main_inst   : a pointer to the coder instance.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Allocate memory for storing encoder data */
+  ISAC_inst->ISACenc_obj.SaveEnc_ptr = malloc(1 * sizeof(ISAC_SaveEncData_t));
+
+  if (ISAC_inst->ISACenc_obj.SaveEnc_ptr!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+
+#endif
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_Free(...)
+ *
+ * This function frees the ISAC instance created at the beginning.
+ *
+ * Input:
+ *      - ISAC_main_inst    : a ISAC instance.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  free(ISAC_main_inst);
+  return(0);
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_FreeInternal(...)
+ *
+ * This function frees the internal memory for storing encoder data.
+ *
+ * Input:
+ *       - ISAC_main_inst    : a ISAC instance.
+ *
+ * Return value              :  0 - Ok
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Release memory */
+  free(ISAC_inst->ISACenc_obj.SaveEnc_ptr);
+
+  return(0);
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_EncoderInit(...)
+ *
+ * This function initializes a ISAC instance prior to the encoder calls.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - CodingMode        : 0 -> Bit rate and frame length are automatically
+ *                                 adjusted to available bandwidth on
+ *                                 transmission channel.
+ *                            1 -> User sets a frame length and a target bit
+ *                                 rate which is taken as the maximum short-term
+ *                                 average bit rate.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
+                                        WebRtc_Word16  CodingMode)
+{
+  int k;
+  WebRtc_Word16 statusInit;
+  ISACFIX_SubStruct *ISAC_inst;
+
+  statusInit = 0;
+  /* typecast pointer to rela structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* flag encoder init */
+  ISAC_inst->initflag |= 2;
+
+  if (CodingMode == 0)
+    /* Adaptive mode */
+    ISAC_inst->ISACenc_obj.new_framelength  = INITIAL_FRAMESAMPLES;
+  else if (CodingMode == 1)
+    /* Instantaneous mode */
+    ISAC_inst->ISACenc_obj.new_framelength = 480;    /* default for I-mode */
+  else {
+    ISAC_inst->errorcode = ISAC_DISALLOWED_CODING_MODE;
+    statusInit = -1;
+  }
+
+  ISAC_inst->CodingMode = CodingMode;
+
+  WebRtcIsacfix_InitMaskingEnc(&ISAC_inst->ISACenc_obj.maskfiltstr_obj);
+  WebRtcIsacfix_InitPreFilterbank(&ISAC_inst->ISACenc_obj.prefiltbankstr_obj);
+  WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACenc_obj.pitchfiltstr_obj);
+  WebRtcIsacfix_InitPitchAnalysis(&ISAC_inst->ISACenc_obj.pitchanalysisstr_obj);
+
+
+  WebRtcIsacfix_InitBandwidthEstimator(&ISAC_inst->bwestimator_obj);
+  WebRtcIsacfix_InitRateModel(&ISAC_inst->ISACenc_obj.rate_data_obj);
+
+
+  ISAC_inst->ISACenc_obj.buffer_index   = 0;
+  ISAC_inst->ISACenc_obj.frame_nb    = 0;
+  ISAC_inst->ISACenc_obj.BottleNeck      = 32000; /* default for I-mode */
+  ISAC_inst->ISACenc_obj.MaxDelay    = 10;    /* default for I-mode */
+  ISAC_inst->ISACenc_obj.current_framesamples = 0;
+  ISAC_inst->ISACenc_obj.s2nr     = 0;
+  ISAC_inst->ISACenc_obj.MaxBits    = 0;
+  ISAC_inst->ISACenc_obj.bitstr_seed   = 4447;
+  ISAC_inst->ISACenc_obj.payloadLimitBytes30  = STREAM_MAXW16_30MS << 1;
+  ISAC_inst->ISACenc_obj.payloadLimitBytes60  = STREAM_MAXW16_60MS << 1;
+  ISAC_inst->ISACenc_obj.maxPayloadBytes      = STREAM_MAXW16_60MS << 1;
+  ISAC_inst->ISACenc_obj.maxRateInBytes       = STREAM_MAXW16_30MS << 1;
+  ISAC_inst->ISACenc_obj.enforceFrameSize     = 0;
+
+  /* Init the bistream data area to zero */
+  for (k=0; k<STREAM_MAXW16_60MS; k++){
+    ISAC_inst->ISACenc_obj.bitstr_obj.stream[k] = 0;
+  }
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  WebRtcIsacfix_InitPostFilterbank(&ISAC_inst->ISACenc_obj.interpolatorstr_obj);
+#endif
+
+  // Initiaze function pointers.
+  WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrC;
+  WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopC;
+
+#ifdef WEBRTC_ARCH_ARM_NEON
+  WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrNeon;
+  WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopNeon;
+#endif
+
+  return statusInit;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_Encode(...)
+ *
+ * This function encodes 10ms frame(s) and inserts it into a package.
+ * Input speech length has to be 160 samples (10ms). The encoder buffers those
+ * 10ms frames until it reaches the chosen Framesize (480 or 960 samples
+ * corresponding to 30 or 60 ms frames), and then proceeds to the encoding.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - speechIn          : input speech vector.
+ *
+ * Output:
+ *      - encoded           : the encoded data vector
+ *
+ * Return value:
+ *                          : >0 - Length (in bytes) of coded data
+ *                          :  0 - The buffer didn't reach the chosen framesize
+ *                            so it keeps buffering speech samples.
+ *                          : -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
+                                   const WebRtc_Word16    *speechIn,
+                                   WebRtc_Word16          *encoded)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  WebRtc_Word16 stream_len;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+
+  /* typecast pointer to rela structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+
+  /* check if encoder initiated */
+  if ((ISAC_inst->initflag & 2) != 2) {
+    ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  stream_len = WebRtcIsacfix_EncodeImpl((WebRtc_Word16*)speechIn,
+                                    &ISAC_inst->ISACenc_obj,
+                                    &ISAC_inst->bwestimator_obj,
+                                    ISAC_inst->CodingMode);
+  if (stream_len<0) {
+    ISAC_inst->errorcode = - stream_len;
+    return -1;
+  }
+
+
+  /* convert from bytes to WebRtc_Word16 */
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0;k<(stream_len+1)>>1;k++) {
+    encoded[k] = (WebRtc_Word16)( ( (WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
+                                  | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
+  }
+
+#else
+  WEBRTC_SPL_MEMCPY_W16(encoded, (ISAC_inst->ISACenc_obj.bitstr_obj).stream, (stream_len + 1)>>1);
+#endif
+
+
+
+  return stream_len;
+
+}
+
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_EncodeNb(...)
+ *
+ * This function encodes 10ms narrow band (8 kHz sampling) frame(s) and inserts
+ * it into a package. Input speech length has to be 80 samples (10ms). The encoder
+ * interpolates into wide-band (16 kHz sampling) buffers those
+ * 10ms frames until it reaches the chosen Framesize (480 or 960 wide-band samples
+ * corresponding to 30 or 60 ms frames), and then proceeds to the encoding.
+ *
+ * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - speechIn          : input speech vector.
+ *
+ * Output:
+ *      - encoded           : the encoded data vector
+ *
+ * Return value:
+ *                          : >0 - Length (in bytes) of coded data
+ *                          :  0 - The buffer didn't reach the chosen framesize
+ *                            so it keeps buffering speech samples.
+ *                          : -1 - Error
+ */
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+WebRtc_Word16 WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                      const WebRtc_Word16    *speechIn,
+                                      WebRtc_Word16          *encoded)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  WebRtc_Word16 stream_len;
+  WebRtc_Word16 speechInWB[FRAMESAMPLES_10ms];
+  WebRtc_Word16 Vector_Word16_1[FRAMESAMPLES_10ms/2];
+  WebRtc_Word16 Vector_Word16_2[FRAMESAMPLES_10ms/2];
+
+  int k;
+
+
+  /* typecast pointer to rela structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+
+  /* check if encoder initiated */
+  if ((ISAC_inst->initflag & 2) != 2) {
+    ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED;
+    return (-1);
+  }
+
+
+  /* Oversample to WB */
+
+  /* Form polyphase signals, and compensate for DC offset */
+  for (k=0;k<FRAMESAMPLES_10ms/2;k++) {
+    Vector_Word16_1[k] = speechIn[k] + 1;
+    Vector_Word16_2[k] = speechIn[k];
+  }
+  WebRtcIsacfix_FilterAndCombine2(Vector_Word16_1, Vector_Word16_2, speechInWB, &ISAC_inst->ISACenc_obj.interpolatorstr_obj, FRAMESAMPLES_10ms);
+
+
+  /* Encode WB signal */
+  stream_len = WebRtcIsacfix_EncodeImpl((WebRtc_Word16*)speechInWB,
+                                    &ISAC_inst->ISACenc_obj,
+                                    &ISAC_inst->bwestimator_obj,
+                                    ISAC_inst->CodingMode);
+  if (stream_len<0) {
+    ISAC_inst->errorcode = - stream_len;
+    return -1;
+  }
+
+
+  /* convert from bytes to WebRtc_Word16 */
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0;k<(stream_len+1)>>1;k++) {
+    encoded[k] = (WebRtc_Word16)(((WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8)
+                                 | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
+  }
+
+#else
+  WEBRTC_SPL_MEMCPY_W16(encoded, (ISAC_inst->ISACenc_obj.bitstr_obj).stream, (stream_len + 1)>>1);
+#endif
+
+
+
+  return stream_len;
+}
+#endif  /* WEBRTC_ISAC_FIX_NB_CALLS_ENABLED */
+
+
+/****************************************************************************
+ * WebRtcIsacfix_GetNewBitStream(...)
+ *
+ * This function returns encoded data, with the recieved bwe-index in the
+ * stream. It should always return a complete packet, i.e. only called once
+ * even for 60 msec frames
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - bweIndex          : index of bandwidth estimate to put in new bitstream
+ *
+ * Output:
+ *      - encoded           : the encoded data vector
+ *
+ * Return value:
+ *                          : >0 - Length (in bytes) of coded data
+ *                          : -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
+                                            WebRtc_Word16      bweIndex,
+                                            float              scale,
+                                            WebRtc_Word16        *encoded)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  WebRtc_Word16 stream_len;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+
+  /* typecast pointer to rela structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+
+  /* check if encoder initiated */
+  if ((ISAC_inst->initflag & 2) != 2) {
+    ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  stream_len = WebRtcIsacfix_EncodeStoredData(&ISAC_inst->ISACenc_obj,
+                                              bweIndex,
+                                              scale);
+  if (stream_len<0) {
+    ISAC_inst->errorcode = - stream_len;
+    return -1;
+  }
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0;k<(stream_len+1)>>1;k++) {
+    encoded[k] = (WebRtc_Word16)( ( (WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
+                                  | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
+  }
+
+#else
+  WEBRTC_SPL_MEMCPY_W16(encoded, (ISAC_inst->ISACenc_obj.bitstr_obj).stream, (stream_len + 1)>>1);
+#endif
+
+  return stream_len;
+
+}
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecoderInit(...)
+ *
+ * This function initializes a ISAC instance prior to the decoder calls.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *
+ * Return value
+ *                          :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* flag decoder init */
+  ISAC_inst->initflag |= 1;
+
+
+  WebRtcIsacfix_InitMaskingDec(&ISAC_inst->ISACdec_obj.maskfiltstr_obj);
+  WebRtcIsacfix_InitPostFilterbank(&ISAC_inst->ISACdec_obj.postfiltbankstr_obj);
+  WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACdec_obj.pitchfiltstr_obj);
+
+  /* TS */
+  WebRtcIsacfix_InitPlc( &ISAC_inst->ISACdec_obj.plcstr_obj );
+
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  WebRtcIsacfix_InitPreFilterbank(&ISAC_inst->ISACdec_obj.decimatorstr_obj);
+#endif
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_UpdateBwEstimate1(...)
+ *
+ * This function updates the estimate of the bandwidth.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - encoded           : encoded ISAC frame(s).
+ *      - packet_size       : size of the packet.
+ *      - rtp_seq_number    : the RTP number of the packet.
+ *      - arr_ts            : the arrival time of the packet (from NetEq)
+ *                            in samples.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
+                                     const WebRtc_UWord16   *encoded,
+                                     WebRtc_Word32          packet_size,
+                                     WebRtc_UWord16         rtp_seq_number,
+                                     WebRtc_UWord32         arr_ts)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  Bitstr_dec streamdata;
+  WebRtc_UWord16 partOfStream[5];
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  /* Set stream pointer to point at partOfStream */
+  streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Sanity check of packet length */
+  if (packet_size <= 0) {
+    /* return error code if the packet length is null or less */
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  } else if (packet_size > (STREAM_MAXW16<<1)) {
+    /* return error code if length of stream is too long */
+    ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+    return -1;
+  }
+
+  /* check if decoder initiated */
+  if ((ISAC_inst->initflag & 1) != 1) {
+    ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+  streamdata.full = 1;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<5; k++) {
+    streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+#else
+  memcpy(streamdata.stream, encoded, 5);
+#endif
+
+  if (packet_size == 0)
+  {
+    /* return error code if the packet length is null */
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  }
+
+  err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj,
+                                        &streamdata,
+                                        packet_size,
+                                        rtp_seq_number,
+                                        0,
+                                        arr_ts);
+
+
+  if (err < 0)
+  {
+    /* return error code if something went wrong */
+    ISAC_inst->errorcode = -err;
+    return -1;
+  }
+
+
+  return 0;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_UpdateBwEstimate(...)
+ *
+ * This function updates the estimate of the bandwidth.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - encoded           : encoded ISAC frame(s).
+ *      - packet_size       : size of the packet.
+ *      - rtp_seq_number    : the RTP number of the packet.
+ *      - send_ts           : Send Time Stamp from RTP header
+ *      - arr_ts            : the arrival time of the packet (from NetEq)
+ *                            in samples.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
+                                       const WebRtc_UWord16   *encoded,
+                                       WebRtc_Word32          packet_size,
+                                       WebRtc_UWord16         rtp_seq_number,
+                                       WebRtc_UWord32         send_ts,
+                                       WebRtc_UWord32         arr_ts)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  Bitstr_dec streamdata;
+  WebRtc_UWord16 partOfStream[5];
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  /* Set stream pointer to point at partOfStream */
+  streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Sanity check of packet length */
+  if (packet_size <= 0) {
+    /* return error code if the packet length is null  or less */
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  } else if (packet_size > (STREAM_MAXW16<<1)) {
+    /* return error code if length of stream is too long */
+    ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+    return -1;
+  }
+
+  /* check if decoder initiated */
+  if ((ISAC_inst->initflag & 1) != 1) {
+    ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+  streamdata.full = 1;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<5; k++) {
+    streamdata.stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+#else
+  memcpy(streamdata.stream, encoded, 5);
+#endif
+
+  if (packet_size == 0)
+  {
+    /* return error code if the packet length is null */
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  }
+
+  err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj,
+                                        &streamdata,
+                                        packet_size,
+                                        rtp_seq_number,
+                                        send_ts,
+                                        arr_ts);
+
+  if (err < 0)
+  {
+    /* return error code if something went wrong */
+    ISAC_inst->errorcode = -err;
+    return -1;
+  }
+
+
+  return 0;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_Decode(...)
+ *
+ * This function decodes a ISAC frame. Output speech length
+ * will be a multiple of 480 samples: 480 or 960 samples,
+ * depending on the framesize (30 or 60 ms).
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - encoded           : encoded ISAC frame(s)
+ *      - len               : bytes in encoded vector
+ *
+ * Output:
+ *      - decoded           : The decoded vector
+ *
+ * Return value             : >0 - number of samples in decoded vector
+ *                            -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
+                                     const WebRtc_UWord16   *encoded,
+                                     WebRtc_Word16          len,
+                                     WebRtc_Word16          *decoded,
+                                     WebRtc_Word16     *speechType)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  /* number of samples (480 or 960), output from decoder */
+  /* that were actually used in the encoder/decoder (determined on the fly) */
+  WebRtc_Word16     number_of_samples;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 declen = 0;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* check if decoder initiated */
+  if ((ISAC_inst->initflag & 1) != 1) {
+    ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  /* Sanity check of packet length */
+  if (len <= 0) {
+    /* return error code if the packet length is null  or less */
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  } else if (len > (STREAM_MAXW16<<1)) {
+    /* return error code if length of stream is too long */
+    ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+    return -1;
+  }
+
+  (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (WebRtc_UWord16 *)encoded;
+
+  /* convert bitstream from WebRtc_Word16 to bytes */
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<(len>>1); k++) {
+    (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+  if (len & 0x0001)
+    (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] & 0xFF)<<8);
+#endif
+
+  /* added for NetEq purposes (VAD/DTX related) */
+  *speechType=1;
+
+  declen = WebRtcIsacfix_DecodeImpl(decoded,&ISAC_inst->ISACdec_obj, &number_of_samples);
+
+  if (declen < 0) {
+    /* Some error inside the decoder */
+    ISAC_inst->errorcode = -declen;
+    memset(decoded, 0, sizeof(WebRtc_Word16) * MAX_FRAMESAMPLES);
+    return -1;
+  }
+
+  /* error check */
+
+  if (declen & 0x0001) {
+    if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) & 0x00FF) ) {
+      ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+      memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+      return -1;
+    }
+  } else {
+    if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) >> 8) ) {
+      ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+      memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+      return -1;
+    }
+  }
+
+  return number_of_samples;
+}
+
+
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecodeNb(...)
+ *
+ * This function decodes a ISAC frame in narrow-band (8 kHz sampling).
+ * Output speech length will be a multiple of 240 samples: 240 or 480 samples,
+ * depending on the framesize (30 or 60 ms).
+ *
+ * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - encoded           : encoded ISAC frame(s)
+ *      - len               : bytes in encoded vector
+ *
+ * Output:
+ *      - decoded           : The decoded vector
+ *
+ * Return value             : >0 - number of samples in decoded vector
+ *                            -1 - Error
+ */
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+WebRtc_Word16 WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                        const WebRtc_UWord16   *encoded,
+                                        WebRtc_Word16          len,
+                                        WebRtc_Word16          *decoded,
+                                        WebRtc_Word16    *speechType)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  /* twice the number of samples (480 or 960), output from decoder */
+  /* that were actually used in the encoder/decoder (determined on the fly) */
+  WebRtc_Word16     number_of_samples;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 declen = 0;
+  WebRtc_Word16 dummy[FRAMESAMPLES/2];
+
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* check if decoder initiated */
+  if ((ISAC_inst->initflag & 1) != 1) {
+    ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  if (len == 0)
+  {  /* return error code if the packet length is null */
+
+    ISAC_inst->errorcode = ISAC_EMPTY_PACKET;
+    return -1;
+  }
+
+  (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (WebRtc_UWord16 *)encoded;
+
+  /* convert bitstream from WebRtc_Word16 to bytes */
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<(len>>1); k++) {
+    (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+  if (len & 0x0001)
+    (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] & 0xFF)<<8);
+#endif
+
+  /* added for NetEq purposes (VAD/DTX related) */
+  *speechType=1;
+
+  declen = WebRtcIsacfix_DecodeImpl(decoded,&ISAC_inst->ISACdec_obj, &number_of_samples);
+
+  if (declen < 0) {
+    /* Some error inside the decoder */
+    ISAC_inst->errorcode = -declen;
+    memset(decoded, 0, sizeof(WebRtc_Word16) * FRAMESAMPLES);
+    return -1;
+  }
+
+  /* error check */
+
+  if (declen & 0x0001) {
+    if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) & 0x00FF) ) {
+      ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+      memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+      return -1;
+    }
+  } else {
+    if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) >> 8) ) {
+      ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
+      memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+      return -1;
+    }
+  }
+
+  WebRtcIsacfix_SplitAndFilter2(decoded, decoded, dummy, &ISAC_inst->ISACdec_obj.decimatorstr_obj);
+
+  if (number_of_samples>FRAMESAMPLES) {
+    WebRtcIsacfix_SplitAndFilter2(decoded + FRAMESAMPLES, decoded + FRAMESAMPLES/2,
+                                  dummy, &ISAC_inst->ISACdec_obj.decimatorstr_obj);
+  }
+
+  return number_of_samples/2;
+}
+#endif /* WEBRTC_ISAC_FIX_NB_CALLS_ENABLED */
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecodePlcNb(...)
+ *
+ * This function conducts PLC for ISAC frame(s) in narrow-band (8kHz sampling).
+ * Output speech length  will be "240*noOfLostFrames" samples
+ * that is equevalent of "30*noOfLostFrames" millisecond.
+ *
+ * The function is enabled if WEBRTC_ISAC_FIX_NB_CALLS_ENABLED is defined
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - noOfLostFrames    : Number of PLC frames (240 sample=30ms) to produce
+ *
+ * Output:
+ *      - decoded           : The decoded vector
+ *
+ * Return value             : >0 - number of samples in decoded PLC vector
+ *                            -1 - Error
+ */
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+WebRtc_Word16 WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
+                                         WebRtc_Word16          *decoded,
+                                         WebRtc_Word16 noOfLostFrames )
+{
+  WebRtc_Word16 no_of_samples, declen, k, ok;
+  WebRtc_Word16 outframeNB[FRAMESAMPLES];
+  WebRtc_Word16 outframeWB[FRAMESAMPLES];
+  WebRtc_Word16 dummy[FRAMESAMPLES/2];
+
+
+  ISACFIX_SubStruct *ISAC_inst;
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Limit number of frames to two = 60 msec. Otherwise we exceed data vectors */
+  if (noOfLostFrames > 2){
+    noOfLostFrames = 2;
+  }
+
+  k = 0;
+  declen = 0;
+  while( noOfLostFrames > 0 )
+  {
+    ok = WebRtcIsacfix_DecodePlcImpl( outframeWB, &ISAC_inst->ISACdec_obj, &no_of_samples );
+    if(ok)
+      return -1;
+
+    WebRtcIsacfix_SplitAndFilter2(outframeWB, &(outframeNB[k*240]), dummy, &ISAC_inst->ISACdec_obj.decimatorstr_obj);
+
+    declen += no_of_samples;
+    noOfLostFrames--;
+    k++;
+  }
+
+  declen>>=1;
+
+  for (k=0;k<declen;k++) {
+    decoded[k] = outframeNB[k];
+  }
+
+  return declen;
+}
+#endif /* WEBRTC_ISAC_FIX_NB_CALLS_ENABLED */
+
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_DecodePlc(...)
+ *
+ * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling).
+ * Output speech length  will be "480*noOfLostFrames" samples
+ * that is equevalent of "30*noOfLostFrames" millisecond.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - noOfLostFrames    : Number of PLC frames (480sample = 30ms)
+ *                                to produce
+ *
+ * Output:
+ *      - decoded           : The decoded vector
+ *
+ * Return value             : >0 - number of samples in decoded PLC vector
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
+                                      WebRtc_Word16          *decoded,
+                                      WebRtc_Word16 noOfLostFrames)
+{
+
+  WebRtc_Word16 no_of_samples, declen, k, ok;
+  WebRtc_Word16 outframe16[MAX_FRAMESAMPLES];
+
+  ISACFIX_SubStruct *ISAC_inst;
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Limit number of frames to two = 60 msec. Otherwise we exceed data vectors */
+  if (noOfLostFrames > 2) {
+    noOfLostFrames = 2;
+  }
+  k = 0;
+  declen = 0;
+  while( noOfLostFrames > 0 )
+  {
+    ok = WebRtcIsacfix_DecodePlcImpl( &(outframe16[k*480]), &ISAC_inst->ISACdec_obj, &no_of_samples );
+    if(ok)
+      return -1;
+    declen += no_of_samples;
+    noOfLostFrames--;
+    k++;
+  }
+
+  for (k=0;k<declen;k++) {
+    decoded[k] = outframe16[k];
+  }
+
+  return declen;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_Control(...)
+ *
+ * This function sets the limit on the short-term average bit rate and the
+ * frame length. Should be used only in Instantaneous mode.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance.
+ *      - rate              : limit on the short-term average bit rate,
+ *                            in bits/second (between 10000 and 32000)
+ *      - framesize         : number of milliseconds per frame (30 or 60)
+ *
+ * Return value             : 0  - ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
+                                    WebRtc_Word16          rate,
+                                    WebRtc_Word16          framesize)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  if (ISAC_inst->CodingMode == 0)
+  {
+    /* in adaptive mode */
+    ISAC_inst->errorcode = ISAC_MODE_MISMATCH;
+    return -1;
+  }
+
+
+  if (rate >= 10000 && rate <= 32000)
+    ISAC_inst->ISACenc_obj.BottleNeck = rate;
+  else {
+    ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK;
+    return -1;
+  }
+
+
+
+  if (framesize  == 30 || framesize == 60)
+    ISAC_inst->ISACenc_obj.new_framelength = (FS/1000) * framesize;
+  else {
+    ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
+    return -1;
+  }
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_ControlBwe(...)
+ *
+ * This function sets the initial values of bottleneck and frame-size if
+ * iSAC is used in channel-adaptive mode. Through this API, users can
+ * enforce a frame-size for all values of bottleneck. Then iSAC will not
+ * automatically change the frame-size.
+ *
+ *
+ * Input:
+ *  - ISAC_main_inst : ISAC instance.
+ *      - rateBPS           : initial value of bottleneck in bits/second
+ *                            10000 <= rateBPS <= 32000 is accepted
+ *                            For default bottleneck set rateBPS = 0
+ *      - frameSizeMs       : number of milliseconds per frame (30 or 60)
+ *      - enforceFrameSize  : 1 to enforce the given frame-size through out
+ *                            the adaptation process, 0 to let iSAC change
+ *                            the frame-size if required.
+ *
+ * Return value    : 0  - ok
+ *         -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
+                                        WebRtc_Word16 rateBPS,
+                                        WebRtc_Word16 frameSizeMs,
+                                        WebRtc_Word16 enforceFrameSize)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  /* Typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if ((ISAC_inst->initflag & 2) != 2) {
+    ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED;
+    return (-1);
+  }
+
+  /* Check that we are in channel-adaptive mode, otherwise, return -1 */
+  if (ISAC_inst->CodingMode != 0) {
+    ISAC_inst->errorcode = ISAC_MODE_MISMATCH;
+    return (-1);
+  }
+
+  /* Set struct variable if enforceFrameSize is set. ISAC will then keep the */
+  /* chosen frame size.                                                      */
+  ISAC_inst->ISACenc_obj.enforceFrameSize = (enforceFrameSize != 0)? 1:0;
+
+  /* Set initial rate, if value between 10000 and 32000,                */
+  /* if rateBPS is 0, keep the default initial bottleneck value (15000) */
+  if ((rateBPS >= 10000) && (rateBPS <= 32000)) {
+    ISAC_inst->bwestimator_obj.sendBwAvg = (((WebRtc_UWord32)rateBPS) << 7);
+  } else if (rateBPS != 0) {
+    ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK;
+    return -1;
+  }
+
+  /* Set initial framesize. If enforceFrameSize is set the frame size will not change */
+  if ((frameSizeMs  == 30) || (frameSizeMs == 60)) {
+    ISAC_inst->ISACenc_obj.new_framelength = (FS/1000) * frameSizeMs;
+  } else {
+    ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
+    return -1;
+  }
+
+  return 0;
+}
+
+
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_GetDownLinkBwIndex(...)
+ *
+ * This function returns index representing the Bandwidth estimate from
+ * other side to this side.
+ *
+ * Input:
+ *      - ISAC_main_inst: iSAC struct
+ *
+ * Output:
+ *      - rateIndex     : Bandwidth estimate to transmit to other side.
+ *
+ */
+
+WebRtc_Word16 WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
+                                       WebRtc_Word16*     rateIndex)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Call function to get Bandwidth Estimate */
+  *rateIndex = WebRtcIsacfix_GetDownlinkBwIndexImpl(&ISAC_inst->bwestimator_obj);
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_UpdateUplinkBw(...)
+ *
+ * This function takes an index representing the Bandwidth estimate from
+ * this side to other side and updates BWE.
+ *
+ * Input:
+ *      - ISAC_main_inst: iSAC struct
+ *      - rateIndex     : Bandwidth estimate from other side.
+ *
+ */
+
+WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
+                                   WebRtc_Word16     rateIndex)
+{
+  WebRtc_Word16 err = 0;
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  /* Call function to update BWE with received Bandwidth Estimate */
+  err = WebRtcIsacfix_UpdateUplinkBwRec(&ISAC_inst->bwestimator_obj, rateIndex);
+  if (err < 0) {
+    ISAC_inst->errorcode = -err;
+    return (-1);
+  }
+
+  return 0;
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_ReadFrameLen(...)
+ *
+ * This function returns the length of the frame represented in the packet.
+ *
+ * Input:
+ *      - encoded       : Encoded bitstream
+ *
+ * Output:
+ *      - frameLength   : Length of frame in packet (in samples)
+ *
+ */
+
+WebRtc_Word16 WebRtcIsacfix_ReadFrameLen(const WebRtc_Word16* encoded,
+                                        WebRtc_Word16* frameLength)
+{
+  Bitstr_dec streamdata;
+  WebRtc_UWord16 partOfStream[5];
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  /* Set stream pointer to point at partOfStream */
+  streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+  streamdata.full = 1;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<5; k++) {
+    streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+#else
+  memcpy(streamdata.stream, encoded, 5);
+#endif
+
+  /* decode frame length */
+  err = WebRtcIsacfix_DecodeFrameLen(&streamdata, frameLength);
+  if (err<0)  // error check
+    return err;
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_ReadBwIndex(...)
+ *
+ * This function returns the index of the Bandwidth estimate from the bitstream.
+ *
+ * Input:
+ *      - encoded       : Encoded bitstream
+ *
+ * Output:
+ *      - frameLength   : Length of frame in packet (in samples)
+ *      - rateIndex     : Bandwidth estimate in bitstream
+ *
+ */
+
+WebRtc_Word16 WebRtcIsacfix_ReadBwIndex(const WebRtc_Word16* encoded,
+                                   WebRtc_Word16* rateIndex)
+{
+  Bitstr_dec streamdata;
+  WebRtc_UWord16 partOfStream[5];
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  /* Set stream pointer to point at partOfStream */
+  streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+  streamdata.full = 1;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<5; k++) {
+    streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+  }
+#else
+  memcpy(streamdata.stream, encoded, 5);
+#endif
+
+  /* decode frame length, needed to get to the rateIndex in the bitstream */
+  err = WebRtcIsacfix_DecodeFrameLen(&streamdata, rateIndex);
+  if (err<0)  // error check
+    return err;
+
+  /* decode BW estimation */
+  err = WebRtcIsacfix_DecodeSendBandwidth(&streamdata, rateIndex);
+  if (err<0)  // error check
+    return err;
+
+  return 0;
+}
+
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_GetErrorCode(...)
+ *
+ * This function can be used to check the error code of an iSAC instance. When
+ * a function returns -1 a error code will be set for that instance. The
+ * function below extract the code of the last error that occured in the
+ * specified instance.
+ *
+ * Input:
+ *      - ISAC_main_inst    : ISAC instance
+ *
+ * Return value             : Error code
+ */
+
+WebRtc_Word16 WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  return ISAC_inst->errorcode;
+}
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_GetUplinkBw(...)
+ *
+ * This function returns the inst quantized iSAC send bitrate
+ *
+ * Input:
+ *      - ISAC_main_inst    : iSAC instance
+ *
+ * Return value             : bitrate
+ */
+
+WebRtc_Word32 WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+  BwEstimatorstr * bw = (BwEstimatorstr*)&(ISAC_inst->bwestimator_obj);
+
+  return (WebRtc_Word32) WebRtcIsacfix_GetUplinkBandwidth(bw);
+}
+
+/****************************************************************************
+ * WebRtcIsacfix_GetNewFrameLen(...)
+ *
+ * This function return the next frame length (in samples) of iSAC.
+ *
+ * Input:
+ *      - ISAC_main_inst    : iSAC instance
+ *
+ * Return value             :  frame lenght in samples
+ */
+
+WebRtc_Word16 WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst)
+{
+  ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+  return ISAC_inst->ISACenc_obj.new_framelength;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_SetMaxPayloadSize(...)
+ *
+ * This function sets a limit for the maximum payload size of iSAC. The same
+ * value is used both for 30 and 60 msec packets.
+ * The absolute max will be valid until next time the function is called.
+ * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate()
+ *
+ * Input:
+ *      - ISAC_main_inst    : iSAC instance
+ *      - maxPayloadBytes   : maximum size of the payload in bytes
+ *                            valid values are between 100 and 400 bytes
+ *
+ *
+ * Return value             : 0 if sucessful
+ *                           -1 if error happens
+ */
+
+WebRtc_Word16 WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
+                                              WebRtc_Word16 maxPayloadBytes)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  if((maxPayloadBytes < 100) || (maxPayloadBytes > 400))
+  {
+    /* maxPayloadBytes is out of valid range */
+    return -1;
+  }
+  else
+  {
+    /* Set new absolute max, which will not change unless this function
+       is called again with a new value */
+    ISAC_inst->ISACenc_obj.maxPayloadBytes = maxPayloadBytes;
+
+    /* Set new maximum values for 30 and 60 msec packets */
+    if (maxPayloadBytes < ISAC_inst->ISACenc_obj.maxRateInBytes) {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxPayloadBytes;
+    } else {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxRateInBytes;
+    }
+
+    if ( maxPayloadBytes < (ISAC_inst->ISACenc_obj.maxRateInBytes << 1)) {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes60 = maxPayloadBytes;
+    } else {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (ISAC_inst->ISACenc_obj.maxRateInBytes << 1);
+    }
+  }
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsacfix_SetMaxRate(...)
+ *
+ * This function sets the maximum rate which the codec may not exceed for a
+ * singel packet. The maximum rate is set in bits per second.
+ * The codec has an absolute maximum rate of 53400 bits per second (200 bytes
+ * per 30 msec).
+ * It is possible to set a maximum rate between 32000 and 53400 bits per second.
+ *
+ * The rate limit is valid until next time the function is called.
+ *
+ * NOTE! Packet size will never go above the value set if calling
+ * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes).
+ *
+ * Input:
+ *      - ISAC_main_inst    : iSAC instance
+ *      - maxRateInBytes    : maximum rate in bits per second,
+ *                            valid values are 32000 to 53400 bits
+ *
+ * Return value             : 0 if sucessful
+ *                           -1 if error happens
+ */
+
+WebRtc_Word16 WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
+                                       WebRtc_Word32 maxRate)
+{
+  ISACFIX_SubStruct *ISAC_inst;
+  WebRtc_Word16 maxRateInBytes;
+
+  /* typecast pointer to real structure */
+  ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
+
+  if((maxRate < 32000) || (maxRate > 53400))
+  {
+    /* maxRate is out of valid range */
+    return -1;
+  }
+  else
+  {
+    /* Calculate maximum number of bytes per 30 msec packets for the given
+       maximum rate. Multiply with 30/1000 to get number of bits per 30 msec,
+       divide by 8 to get number of bytes per 30 msec:
+       maxRateInBytes = floor((maxRate * 30/1000) / 8); */
+    maxRateInBytes = (WebRtc_Word16)( WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_MUL(maxRate, 3), 800) );
+
+    /* Store the value for usage in the WebRtcIsacfix_SetMaxPayloadSize-function */
+    ISAC_inst->ISACenc_obj.maxRateInBytes = maxRateInBytes;
+
+    /* For 30 msec packets: if the new limit is below the maximum
+       payload size, set a new limit */
+    if (maxRateInBytes < ISAC_inst->ISACenc_obj.maxPayloadBytes) {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxRateInBytes;
+    } else {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxPayloadBytes;
+    }
+
+    /* For 60 msec packets: if the new limit (times 2) is below the
+       maximum payload size, set a new limit */
+    if ( (maxRateInBytes << 1) < ISAC_inst->ISACenc_obj.maxPayloadBytes) {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (maxRateInBytes << 1);
+    } else {
+      ISAC_inst->ISACenc_obj.payloadLimitBytes60 = ISAC_inst->ISACenc_obj.maxPayloadBytes;
+    }
+  }
+
+  return 0;
+}
+
+
+
+/****************************************************************************
+ * WebRtcIsacfix_version(...)
+ *
+ * This function returns the version number.
+ *
+ * Output:
+ *      - version  : Pointer to character string
+ *
+ */
+
+void WebRtcIsacfix_version(char *version)
+{
+  strcpy(version, "3.6.0");
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.gypi b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.gypi
new file mode 100644
index 0000000..a13d1f5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.gypi
@@ -0,0 +1,82 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'iSACFix',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../interface/isacfix.h',
+        'arith_routines.c',
+        'arith_routines_hist.c',
+        'arith_routines_logist.c',
+        'bandwidth_estimator.c',
+        'decode.c',
+        'decode_bwe.c',
+        'decode_plc.c',
+        'encode.c',
+        'entropy_coding.c',
+        'fft.c',
+        'filterbank_tables.c',
+        'filterbanks.c',
+        'filters.c',
+        'initialize.c',
+        'isacfix.c',
+        'lattice.c',
+        'lattice_c.c',
+        'lpc_masking_model.c',
+        'lpc_tables.c',
+        'pitch_estimator.c',
+        'pitch_filter.c',
+        'pitch_gain_tables.c',
+        'pitch_lag_tables.c',
+        'spectrum_ar_model_tables.c',
+        'transform.c',
+        'arith_routins.h',
+        'bandwidth_estimator.h',
+        'codec.h',
+        'entropy_coding.h',
+        'fft.h',
+        'filterbank_tables.h',
+        'lpc_masking_model.h',
+        'lpc_tables.h',
+        'pitch_estimator.h',
+        'pitch_gain_tables.h',
+        'pitch_lag_tables.h',
+        'settings.h',
+        'spectrum_ar_model_tables.h',
+        'structs.h',
+     ],
+      'conditions': [
+        ['OS!="win"', {
+          'defines': [
+            'WEBRTC_LINUX',
+          ],
+        }],
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice.c
new file mode 100644
index 0000000..8822c6e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice.c
@@ -0,0 +1,313 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lattice.c
+ *
+ * Contains the normalized lattice filter routines (MA and AR) for iSAC codec
+ *
+ */
+
+#include "codec.h"
+#include "settings.h"
+
+#define LATTICE_MUL_32_32_RSFT16(a32a, a32b, b32)                  \
+  ((WebRtc_Word32)(WEBRTC_SPL_MUL(a32a, b32) + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32))))
+/* This macro is FORBIDDEN to use elsewhere than in a function in this file and
+   its corresponding neon version. It might give unpredictable results, since a
+   general WebRtc_Word32*WebRtc_Word32 multiplication results in a 64 bit value.
+   The result is then shifted just 16 steps to the right, giving need for 48
+   bits, i.e. in the generel case, it will NOT fit in a WebRtc_Word32. In the
+   cases used in here, the WebRtc_Word32 will be enough, since (for a good
+   reason) the involved multiplicands aren't big enough to overflow a
+   WebRtc_Word32 after shifting right 16 bits. I have compared the result of a
+   multiplication between t32 and tmp32, done in two ways:
+   1) Using (WebRtc_Word32) (((float)(tmp32))*((float)(tmp32b))/65536.0);
+   2) Using LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b);
+   By running 25 files, I haven't found any bigger diff than 64 - this was in the
+   case when  method 1) gave 650235648 and 2) gave 650235712.
+*/
+
+/* Function prototype: filtering ar_g_Q0[] and ar_f_Q0[] through an AR filter
+   with coefficients cth_Q15[] and sth_Q15[].
+   Implemented for both generic and ARMv7 platforms.
+ */
+void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0,
+                                int16_t* ar_f_Q0,
+                                int16_t* cth_Q15,
+                                int16_t* sth_Q15,
+                                int16_t order_coef);
+
+/* Inner loop used for function WebRtcIsacfix_NormLatticeFilterMa().
+   It does:
+   for 0 <= n < HALF_SUBFRAMELEN - 1:
+     *ptr2 = input2 * (*ptr2) + input0 * (*ptr0));
+     *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+*/
+void WebRtcIsacfix_FilterMaLoopC(int16_t input0,  // Filter coefficient
+                                 int16_t input1,  // Filter coefficient
+                                 int32_t input2,  // Inverse coeff. (1/input1)
+                                 int32_t* ptr0,   // Sample buffer
+                                 int32_t* ptr1,   // Sample buffer
+                                 int32_t* ptr2) { // Sample buffer
+  int n = 0;
+
+  // Separate the 32-bit variable input2 into two 16-bit integers (high 16 and
+  // low 16 bits), for using LATTICE_MUL_32_32_RSFT16 in the loop.
+  int16_t t16a = (int16_t)(input2 >> 16);
+  int16_t t16b = (int16_t)input2;
+  if (t16b < 0) t16a++;
+
+  // The loop filtering the samples *ptr0, *ptr1, *ptr2 with filter coefficients
+  // input0, input1, and input2.
+  for(n = 0; n < HALF_SUBFRAMELEN - 1; n++, ptr0++, ptr1++, ptr2++) {
+    int32_t tmp32a = 0;
+    int32_t tmp32b = 0;
+
+    // Calculate *ptr2 = input2 * (*ptr2 + input0 * (*ptr0));
+    tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr0); // Q15 * Q15 >> 15 = Q15
+    tmp32b = *ptr2 + tmp32a; // Q15 + Q15 = Q15
+    *ptr2 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b);
+
+    // Calculate *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+    tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input1, *ptr0); // Q15*Q15>>15 = Q15
+    tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr2); // Q15*Q15>>15 = Q15
+    *ptr1 = tmp32a + tmp32b; // Q15 + Q15 = Q15
+  }
+}
+
+// Declare a function pointer.
+FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix;
+
+/* filter the signal using normalized lattice filter */
+/* MA filter */
+void WebRtcIsacfix_NormLatticeFilterMa(WebRtc_Word16 orderCoef,
+                                       WebRtc_Word32 *stateGQ15,
+                                       WebRtc_Word16 *lat_inQ0,
+                                       WebRtc_Word16 *filt_coefQ15,
+                                       WebRtc_Word32 *gain_lo_hiQ17,
+                                       WebRtc_Word16 lo_hi,
+                                       WebRtc_Word16 *lat_outQ9)
+{
+  WebRtc_Word16 sthQ15[MAX_AR_MODEL_ORDER];
+  WebRtc_Word16 cthQ15[MAX_AR_MODEL_ORDER];
+
+  int u, i, k, n;
+  WebRtc_Word16 temp2,temp3;
+  WebRtc_Word16 ord_1 = orderCoef+1;
+  WebRtc_Word32 inv_cthQ16[MAX_AR_MODEL_ORDER];
+
+  WebRtc_Word32 gain32, fQtmp;
+  WebRtc_Word16 gain16;
+  WebRtc_Word16 gain_sh;
+
+  WebRtc_Word32 tmp32, tmp32b;
+  WebRtc_Word32 fQ15vec[HALF_SUBFRAMELEN];
+  WebRtc_Word32 gQ15[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN];
+  WebRtc_Word16 sh;
+  WebRtc_Word16 t16a;
+  WebRtc_Word16 t16b;
+
+  for (u=0;u<SUBFRAMES;u++)
+  {
+    int32_t temp1 = WEBRTC_SPL_MUL_16_16(u, HALF_SUBFRAMELEN);
+
+    /* set the Direct Form coefficients */
+    temp2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(u, orderCoef);
+    temp3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, u)+lo_hi;
+
+    /* compute lattice filter coefficients */
+    memcpy(sthQ15, &filt_coefQ15[temp2], orderCoef * sizeof(WebRtc_Word16));
+
+    WebRtcSpl_SqrtOfOneMinusXSquared(sthQ15, orderCoef, cthQ15);
+
+    /* compute the gain */
+    gain32 = gain_lo_hiQ17[temp3];
+    gain_sh = WebRtcSpl_NormW32(gain32);
+    gain32 = WEBRTC_SPL_LSHIFT_W32(gain32, gain_sh); //Q(17+gain_sh)
+
+    for (k=0;k<orderCoef;k++)
+    {
+      gain32 = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[k], gain32); //Q15*Q(17+gain_sh)>>15 = Q(17+gain_sh)
+      inv_cthQ16[k] = WebRtcSpl_DivW32W16((WebRtc_Word32)2147483647, cthQ15[k]); // 1/cth[k] in Q31/Q15 = Q16
+    }
+    gain16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(gain32, 16); //Q(1+gain_sh)
+
+    /* normalized lattice filter */
+    /*****************************/
+
+    /* initial conditions */
+    for (i=0;i<HALF_SUBFRAMELEN;i++)
+    {
+      fQ15vec[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)lat_inQ0[i + temp1], 15); //Q15
+      gQ15[0][i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)lat_inQ0[i + temp1], 15); //Q15
+    }
+
+
+    fQtmp = fQ15vec[0];
+
+    /* get the state of f&g for the first input, for all orders */
+    for (i=1;i<ord_1;i++)
+    {
+      // Calculate f[i][0] = inv_cth[i-1]*(f[i-1][0] + sth[i-1]*stateG[i-1]);
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT15(sthQ15[i-1], stateGQ15[i-1]);//Q15*Q15>>15 = Q15
+      tmp32b= fQtmp + tmp32; //Q15+Q15=Q15
+      tmp32 = inv_cthQ16[i-1]; //Q16
+      t16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+      t16b = (WebRtc_Word16) (tmp32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)t16a), 16));
+      if (t16b<0) t16a++;
+      tmp32 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b);
+      fQtmp = tmp32; // Q15
+
+      // Calculate g[i][0] = cth[i-1]*stateG[i-1] + sth[i-1]* f[i][0];
+      tmp32  = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[i-1], stateGQ15[i-1]); //Q15*Q15>>15 = Q15
+      tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(sthQ15[i-1], fQtmp); //Q15*Q15>>15 = Q15
+      tmp32  = tmp32 + tmp32b;//Q15+Q15 = Q15
+      gQ15[i][0] = tmp32; // Q15
+    }
+
+    /* filtering */
+    /* save the states */
+    for(k=0;k<orderCoef;k++)
+    {
+      // for 0 <= n < HALF_SUBFRAMELEN - 1:
+      //   f[k+1][n+1] = inv_cth[k]*(f[k][n+1] + sth[k]*g[k][n]);
+      //   g[k+1][n+1] = cth[k]*g[k][n] + sth[k]* f[k+1][n+1];
+      WebRtcIsacfix_FilterMaLoopFix(sthQ15[k], cthQ15[k], inv_cthQ16[k],
+                                    &gQ15[k][0], &gQ15[k+1][1], &fQ15vec[1]);
+    }
+
+    fQ15vec[0] = fQtmp;
+
+    for(n=0;n<HALF_SUBFRAMELEN;n++)
+    {
+      //gain32 = WEBRTC_SPL_RSHIFT_W32(gain32, gain_sh); // Q(17+gain_sh) -> Q17
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(gain16, fQ15vec[n]); //Q(1+gain_sh)*Q15>>16 = Q(gain_sh)
+      sh = 9-gain_sh; //number of needed shifts to reach Q9
+      t16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32, sh);
+      lat_outQ9[n + temp1] = t16a;
+    }
+
+    /* save the states */
+    for (i=0;i<ord_1;i++)
+    {
+      stateGQ15[i] = gQ15[i][HALF_SUBFRAMELEN-1];
+    }
+    //process next frame
+  }
+
+  return;
+}
+
+
+
+
+
+/* ----------------AR filter-------------------------*/
+/* filter the signal using normalized lattice filter */
+void WebRtcIsacfix_NormLatticeFilterAr(WebRtc_Word16 orderCoef,
+                                       WebRtc_Word16 *stateGQ0,
+                                       WebRtc_Word32 *lat_inQ25,
+                                       WebRtc_Word16 *filt_coefQ15,
+                                       WebRtc_Word32 *gain_lo_hiQ17,
+                                       WebRtc_Word16 lo_hi,
+                                       WebRtc_Word16 *lat_outQ0)
+{
+  int ii,n,k,i,u;
+  WebRtc_Word16 sthQ15[MAX_AR_MODEL_ORDER];
+  WebRtc_Word16 cthQ15[MAX_AR_MODEL_ORDER];
+  WebRtc_Word32 tmp32;
+
+
+  WebRtc_Word16 tmpAR;
+  WebRtc_Word16 ARfQ0vec[HALF_SUBFRAMELEN];
+  WebRtc_Word16 ARgQ0vec[MAX_AR_MODEL_ORDER+1];
+
+  WebRtc_Word32 inv_gain32;
+  WebRtc_Word16 inv_gain16;
+  WebRtc_Word16 den16;
+  WebRtc_Word16 sh;
+
+  WebRtc_Word16 temp2,temp3;
+  WebRtc_Word16 ord_1 = orderCoef+1;
+
+  for (u=0;u<SUBFRAMES;u++)
+  {
+    int32_t temp1 = WEBRTC_SPL_MUL_16_16(u, HALF_SUBFRAMELEN);
+
+    //set the denominator and numerator of the Direct Form
+    temp2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(u, orderCoef);
+    temp3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, u) + lo_hi;
+
+    for (ii=0; ii<orderCoef; ii++) {
+      sthQ15[ii] = filt_coefQ15[temp2+ii];
+    }
+
+    WebRtcSpl_SqrtOfOneMinusXSquared(sthQ15, orderCoef, cthQ15);
+
+    /* Simulation of the 25 files shows that maximum value in
+       the vector gain_lo_hiQ17[] is 441344, which means that
+       it is log2((2^31)/441344) = 12.2 shifting bits from
+       saturation. Therefore, it should be safe to use Q27 instead
+       of Q17. */
+
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(gain_lo_hiQ17[temp3], 10); // Q27
+
+    for (k=0;k<orderCoef;k++) {
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[k], tmp32); // Q15*Q27>>15 = Q27
+    }
+
+    sh = WebRtcSpl_NormW32(tmp32); // tmp32 is the gain
+    den16 = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32, sh-16); //Q(27+sh-16) = Q(sh+11) (all 16 bits are value bits)
+    inv_gain32 = WebRtcSpl_DivW32W16((WebRtc_Word32)2147483647, den16); // 1/gain in Q31/Q(sh+11) = Q(20-sh)
+
+    //initial conditions
+    inv_gain16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inv_gain32, 2); // 1/gain in Q(20-sh-2) = Q(18-sh)
+
+    for (i=0;i<HALF_SUBFRAMELEN;i++)
+    {
+
+      tmp32 = WEBRTC_SPL_LSHIFT_W32(lat_inQ25[i + temp1], 1); //Q25->Q26
+      tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(inv_gain16, tmp32); //lat_in[]*inv_gain in (Q(18-sh)*Q26)>>16 = Q(28-sh)
+      tmp32 = WEBRTC_SPL_SHIFT_W32(tmp32, -(28-sh)); // lat_in[]*inv_gain in Q0
+
+      ARfQ0vec[i] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+    }
+
+    for (i=orderCoef-1;i>=0;i--) //get the state of f&g for the first input, for all orders
+    {
+      tmp32 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(cthQ15[i],ARfQ0vec[0])) - (WEBRTC_SPL_MUL_16_16(sthQ15[i],stateGQ0[i])) + 16384), 15);
+      tmpAR = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+
+      tmp32 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(sthQ15[i],ARfQ0vec[0])) + (WEBRTC_SPL_MUL_16_16(cthQ15[i], stateGQ0[i])) + 16384), 15);
+      ARgQ0vec[i+1] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+      ARfQ0vec[0] = tmpAR;
+    }
+    ARgQ0vec[0] = ARfQ0vec[0];
+
+    // Filter ARgQ0vec[] and ARfQ0vec[] through coefficients cthQ15[] and sthQ15[].
+    WebRtcIsacfix_FilterArLoop(ARgQ0vec, ARfQ0vec, cthQ15, sthQ15, orderCoef);
+
+    for(n=0;n<HALF_SUBFRAMELEN;n++)
+    {
+      lat_outQ0[n + temp1] = ARfQ0vec[n];
+    }
+
+
+    /* cannot use memcpy in the following */
+
+    for (i=0;i<ord_1;i++)
+    {
+      stateGQ0[i] = ARgQ0vec[i];
+    }
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_armv7.S b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_armv7.S
new file mode 100644
index 0000000..1cd3a76
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_armv7.S
@@ -0,0 +1,82 @@
+@
+@ Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ Contains a function for the core loop in the normalized lattice AR
+@ filter routine for iSAC codec, optimized for ARMv7 platforms.
+@
+@ Output is bit-exact with the reference C code in lattic_c.c
+@
+@ Register usage:
+@
+@ r0:  &ar_g_Q0
+@ r1:  &ar_f_Q0
+@ r2:  &cth_Q15
+@ r3:  &sth_Q15
+@ r4:  out loop counter
+@ r5:  tmpAR
+@ r9:  inner loop counter
+@ r12: constant #16384
+@ r6, r7, r8, r10, r11: scratch
+
+#include "settings.h"
+
+.arch armv7-a
+.global WebRtcIsacfix_FilterArLoop
+.align  2
+
+WebRtcIsacfix_FilterArLoop:
+.fnstart
+
+.save {r4-r11}
+  push    {r4-r11}
+
+  add     r1, #2                 @ &ar_f_Q0[1]
+  mov     r12, #16384
+  mov     r4, #HALF_SUBFRAMELEN
+  sub     r4, #1                 @ Outer loop counter = HALF_SUBFRAMELEN - 1
+
+HALF_SUBFRAME_LOOP:  @ for(n = 0; n < HALF_SUBFRAMELEN - 1; n++)
+
+  ldr     r9, [sp, #32]          @ Restore the inner loop counter to order_coef
+  ldrh    r5, [r1]               @ tmpAR = ar_f_Q0[n+1]
+  add     r0, r9, asl #1         @ Restore r0 to &ar_g_Q0[order_coef]
+  add     r2, r9, asl #1         @ Restore r2 to &cth_Q15[order_coef]
+  add     r3, r9, asl #1         @ Restore r3 to &sth_Q15[order_coef]
+
+ORDER_COEF_LOOP:  @ for(k = order_coef - 1 ; k >= 0; k--)
+
+  ldrh    r7, [r3, #-2]!         @ sth_Q15[k]
+  ldrh    r6, [r2, #-2]!         @ cth_Q15[k]
+
+  ldrh    r8, [r0, #-2]          @ ar_g_Q0[k]
+  smlabb  r11, r7, r5, r12       @ sth_Q15[k] * tmpAR + 16384
+  smlabb  r10, r6, r5, r12       @ cth_Q15[k] * tmpAR + 16384
+  smulbb  r7, r7, r8             @ sth_Q15[k] * ar_g_Q0[k]
+  smlabb  r11, r6, r8, r11       @ cth_Q15[k]*ar_g_Q0[k]+(sth_Q15[k]*tmpAR+16384)
+
+  sub     r10, r10, r7           @ cth_Q15[k]*tmpAR+16384-(sth_Q15[k]*ar_g_Q0[k])
+  ssat    r11, #16, r11, asr #15
+  ssat    r5, #16, r10, asr #15
+  strh    r11, [r0], #-2         @ Output: ar_g_Q0[k+1]
+
+  subs    r9, #1
+  bgt     ORDER_COEF_LOOP
+
+  strh    r5, [r0]               @ Output: ar_g_Q0[0] = tmpAR;
+  strh    r5, [r1], #2           @ Output: ar_f_Q0[n+1] = tmpAR;
+
+  subs    r4, #1
+  bne     HALF_SUBFRAME_LOOP
+
+  pop     {r4-r11}
+  bx      lr
+
+.fnend
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_c.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_c.c
new file mode 100644
index 0000000..80ccf39
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_c.c
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Contains the core loop function for the lattice filter AR routine
+ * for iSAC codec.
+ *
+ */
+
+#include "settings.h"
+#include "signal_processing_library.h"
+#include "typedefs.h"
+
+/* Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients
+ * cth_Q15[] and sth_Q15[].
+ */
+void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0,     // Input samples
+                                int16_t* ar_f_Q0,     // Input samples
+                                int16_t* cth_Q15,     // Filter coefficients
+                                int16_t* sth_Q15,     // Filter coefficients
+                                int16_t order_coef) { // order of the filter
+  int n = 0;
+
+  for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) {
+    int k = 0;
+    int16_t tmpAR = 0;
+    int32_t tmp32 = 0;
+    int32_t tmp32_2 = 0;
+
+    tmpAR = ar_f_Q0[n + 1];
+    for (k = order_coef - 1; k >= 0; k--) {
+      tmp32 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(cth_Q15[k], tmpAR))
+              - (WEBRTC_SPL_MUL_16_16(sth_Q15[k], ar_g_Q0[k])) + 16384), 15);
+      tmp32_2 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(sth_Q15[k], tmpAR))
+                + (WEBRTC_SPL_MUL_16_16(cth_Q15[k], ar_g_Q0[k])) + 16384), 15);
+      tmpAR   = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32);
+      ar_g_Q0[k + 1] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32_2);
+    }
+    ar_f_Q0[n + 1] = tmpAR;
+    ar_g_Q0[0] = tmpAR;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_neon.S b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_neon.S
new file mode 100644
index 0000000..a59b6e3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lattice_neon.S
@@ -0,0 +1,155 @@
+@
+@ Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ lattice_neon.s
+@
+@ Contains a function for the core loop in the normalized lattice MA 
+@ filter routine for iSAC codec, optimized for ARM Neon platform.
+@ void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0,
+@                                     int16_t input1,
+@                                     int32_t input2,
+@                                     int32_t* ptr0,
+@                                     int32_t* ptr1,
+@                                     int32_t* __restrict ptr2);
+@ It calculates
+@   *ptr2 = input2 * (*ptr2) + input0 * (*ptr0));
+@   *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+@ in Q15 domain.
+@
+@ Reference code in lattice.c.
+@ Output is not bit-exact with the reference C code, due to the replacement
+@ of WEBRTC_SPL_MUL_16_32_RSFT15 and LATTICE_MUL_32_32_RSFT16 with Neon
+@ instructions, smulwb, and smull. Speech quality was not degraded by
+@ testing speech and tone vectors.
+
+.arch armv7-a
+.fpu neon
+
+#include "settings.h"
+
+.global WebRtcIsacfix_FilterMaLoopNeon
+
+.align  2
+
+WebRtcIsacfix_FilterMaLoopNeon:
+.fnstart
+
+.save {r4-r8}
+  push        {r4-r8}
+
+  vdup.32     d28, r0             @ Initialize Neon register with input0
+  vdup.32     d29, r1             @ Initialize Neon register with input1
+  vdup.32     d30, r2             @ Initialize Neon register with input2
+  ldr         r4, [sp, #20]       @ ptr1
+  ldr         r12, [sp, #24]      @ ptr2
+
+  @ Number of loop iterations after unrolling: r5 = (HALF_SUBFRAMELEN - 1) >> 2
+  @ Leftover samples after the loop, in r6:
+  @    r6 = (HALF_SUBFRAMELEN - 1) - (HALF_SUBFRAMELEN - 1) >> 2 << 2
+  mov         r6, #HALF_SUBFRAMELEN
+  sub         r6, #1
+  lsr         r5, r6, #2
+  sub         r6, r5, lsl #2
+
+  @ First r5 iterations in a loop.
+
+LOOP:
+  vld1.32     {d0, d1}, [r3]!     @ *ptr0
+
+  vmull.s32   q10, d0, d28        @ tmp32a = input0 * (*ptr0)
+  vmull.s32   q11, d1, d28        @ tmp32a = input0 * (*ptr0)
+  vmull.s32   q12, d0, d29        @ input1 * (*ptr0)
+  vmull.s32   q13, d1, d29        @ input1 * (*ptr0)
+                                  
+  vrshrn.i64  d4, q10, #15        
+  vrshrn.i64  d5, q11, #15        
+                                  
+  vld1.32     {d2, d3}, [r12]     @ *ptr2
+  vadd.i32    q3, q2, q1          @ tmp32b = *ptr2 + tmp32a
+                                  
+  vrshrn.i64  d0, q12, #15        
+                                  
+  vmull.s32   q10, d6, d30        @ input2 * (*ptr2 + tmp32b)
+  vmull.s32   q11, d7, d30        @ input2 * (*ptr2 + tmp32b)
+
+  vrshrn.i64  d16, q10, #16
+  vrshrn.i64  d17, q11, #16
+
+  vmull.s32   q10, d16, d28       @ input0 * (*ptr2)
+  vmull.s32   q11, d17, d28       @ input0 * (*ptr2)
+
+  vrshrn.i64  d1, q13, #15
+  vrshrn.i64  d18, q10, #15
+  vrshrn.i64  d19, q11, #15
+
+  vst1.32     {d16, d17}, [r12]!  @ *ptr2
+
+  vadd.i32    q9, q0, q9
+  subs        r5, #1
+  vst1.32     {d18, d19}, [r4]!   @ *ptr1
+
+  bgt         LOOP
+
+  @ Check how many samples still need to be processed.
+  subs        r6, #2
+  blt         LAST_SAMPLE
+
+  @ Process two more samples:
+  vld1.32     d0, [r3]!           @ *ptr0
+
+  vmull.s32   q11, d0, d28        @ tmp32a = input0 * (*ptr0)
+  vmull.s32   q13, d0, d29        @ input1 * (*ptr0)
+
+  vld1.32     d18, [r12]          @ *ptr2
+  vrshrn.i64  d4, q11, #15
+
+  vadd.i32    d7, d4, d18         @ tmp32b = *ptr2 + tmp32a
+  vmull.s32   q11, d7, d30        @ input2 * (*ptr2 + tmp32b)
+  vrshrn.i64  d16, q11, #16
+
+  vmull.s32   q11, d16, d28       @ input0 * (*ptr2)
+  vst1.32     d16, [r12]!         @ *ptr2
+
+  vrshrn.i64  d0, q13, #15
+  vrshrn.i64  d19, q11, #15
+  vadd.i32    d19, d0, d19
+
+  vst1.32     d19, [r4]!          @ *ptr1
+
+  @ If there's still one more sample, process it here.
+LAST_SAMPLE:
+  cmp         r6, #1
+  bne         END
+
+  @ *ptr2 = input2 * (*ptr2 + input0 * (*ptr0));
+  
+  ldr         r7, [r3]            @ *ptr0
+  ldr         r8, [r12]           @ *ptr2
+
+  smulwb      r5, r7, r0          @ tmp32a = *ptr0 * input0 >> 16
+  add         r8, r8, r5, lsl #1  @ tmp32b = *ptr2 + (tmp32a << 1)
+  smull       r5, r6, r8, r2      @ tmp32b * input2, in 64 bits
+  lsl         r6, #16
+  add         r6, r5, lsr #16     @ Only take the middle 32 bits
+  str         r6, [r12]           @ Output (*ptr2, as 32 bits)
+
+  @ *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+
+  smulwb      r5, r7, r1          @ tmp32a = *ptr0 * input1 >> 16
+  smulwb      r6, r6, r0          @ tmp32b = *ptr2 * input0 >> 16
+  lsl         r5, r5, #1
+  add         r5, r6, lsl #1
+  str         r5, [r4]            @ Output (*ptr1)
+
+END:
+  pop         {r4-r8}
+  bx          lr
+
+.fnend
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.c
new file mode 100644
index 0000000..4fa8ebb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.c
@@ -0,0 +1,1035 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_masking_model.c
+ *
+ * LPC analysis and filtering functions
+ *
+ */
+
+#include "codec.h"
+#include "entropy_coding.h"
+#include "settings.h"
+
+
+/* The conversion is implemented by the step-down algorithm */
+void WebRtcSpl_AToK_JSK(
+    WebRtc_Word16 *a16, /* Q11 */
+    WebRtc_Word16 useOrder,
+    WebRtc_Word16 *k16  /* Q15 */
+                        )
+{
+  int m, k;
+  WebRtc_Word32 tmp32[MAX_AR_MODEL_ORDER];
+  WebRtc_Word32 tmp32b;
+  WebRtc_Word32 tmp_inv_denum32;
+  WebRtc_Word16 tmp_inv_denum16;
+
+  k16[useOrder-1]= WEBRTC_SPL_LSHIFT_W16(a16[useOrder], 4); //Q11<<4 => Q15
+
+  for (m=useOrder-1; m>0; m--) {
+    tmp_inv_denum32 = ((WebRtc_Word32) 1073741823) - WEBRTC_SPL_MUL_16_16(k16[m], k16[m]); // (1 - k^2) in Q30
+    tmp_inv_denum16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp_inv_denum32, 15); // (1 - k^2) in Q15
+
+    for (k=1; k<=m; k++) {
+      tmp32b = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)a16[k], 16) -
+          WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(k16[m], a16[m-k+1]), 1);
+
+      tmp32[k] = WebRtcSpl_DivW32W16(tmp32b, tmp_inv_denum16); //Q27/Q15 = Q12
+    }
+
+    for (k=1; k<m; k++) {
+      a16[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32[k], 1); //Q12>>1 => Q11
+    }
+
+    tmp32[m] = WEBRTC_SPL_SAT(4092, tmp32[m], -4092);
+    k16[m-1] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmp32[m], 3); //Q12<<3 => Q15
+  }
+
+  return;
+}
+
+
+
+
+
+WebRtc_Word16 WebRtcSpl_LevinsonW32_JSK(
+    WebRtc_Word32 *R,  /* (i) Autocorrelation of length >= order+1 */
+    WebRtc_Word16 *A,  /* (o) A[0..order] LPC coefficients (Q11) */
+    WebRtc_Word16 *K,  /* (o) K[0...order-1] Reflection coefficients (Q15) */
+    WebRtc_Word16 order /* (i) filter order */
+                                        ) {
+  WebRtc_Word16 i, j;
+  WebRtc_Word16 R_hi[LEVINSON_MAX_ORDER+1], R_low[LEVINSON_MAX_ORDER+1];
+  /* Aurocorr coefficients in high precision */
+  WebRtc_Word16 A_hi[LEVINSON_MAX_ORDER+1], A_low[LEVINSON_MAX_ORDER+1];
+  /* LPC coefficients in high precicion */
+  WebRtc_Word16 A_upd_hi[LEVINSON_MAX_ORDER+1], A_upd_low[LEVINSON_MAX_ORDER+1];
+  /* LPC coefficients for next iteration */
+  WebRtc_Word16 K_hi, K_low;      /* reflection coefficient in high precision */
+  WebRtc_Word16 Alpha_hi, Alpha_low, Alpha_exp; /* Prediction gain Alpha in high precision
+                                                   and with scale factor */
+  WebRtc_Word16 tmp_hi, tmp_low;
+  WebRtc_Word32 temp1W32, temp2W32, temp3W32;
+  WebRtc_Word16 norm;
+
+  /* Normalize the autocorrelation R[0]...R[order+1] */
+
+  norm = WebRtcSpl_NormW32(R[0]);
+
+  for (i=order;i>=0;i--) {
+    temp1W32 = WEBRTC_SPL_LSHIFT_W32(R[i], norm);
+    /* Put R in hi and low format */
+    R_hi[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    R_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16)), 1);
+  }
+
+  /* K = A[1] = -R[1] / R[0] */
+
+  temp2W32  = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[1],16) +
+      WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[1],1);     /* R[1] in Q31      */
+  temp3W32  = WEBRTC_SPL_ABS_W32(temp2W32);      /* abs R[1]         */
+  temp1W32  = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); /* abs(R[1])/R[0] in Q31 */
+  /* Put back the sign on R[1] */
+  if (temp2W32 > 0) {
+    temp1W32 = -temp1W32;
+  }
+
+  /* Put K in hi and low format */
+  K_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+  K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+
+  /* Store first reflection coefficient */
+  K[0] = K_hi;
+
+  temp1W32 = WEBRTC_SPL_RSHIFT_W32(temp1W32, 4);    /* A[1] in Q27      */
+
+  /* Put A[1] in hi and low format */
+  A_hi[1] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+  A_low[1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[1], 16)), 1);
+
+  /*  Alpha = R[0] * (1-K^2) */
+
+  temp1W32  = WEBRTC_SPL_LSHIFT_W32((WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_hi, K_low), 14) +
+                                      WEBRTC_SPL_MUL_16_16(K_hi, K_hi)), 1); /* temp1W32 = k^2 in Q31 */
+
+  temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32);    /* Guard against <0 */
+  temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32;    /* temp1W32 = (1 - K[0]*K[0]) in Q31 */
+
+  /* Store temp1W32 = 1 - K[0]*K[0] on hi and low format */
+  tmp_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+  tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+  /* Calculate Alpha in Q31 */
+  temp1W32 = WEBRTC_SPL_LSHIFT_W32((WEBRTC_SPL_MUL_16_16(R_hi[0], tmp_hi) +
+                                     WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(R_hi[0], tmp_low), 15) +
+                                     WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(R_low[0], tmp_hi), 15) ), 1);
+
+  /* Normalize Alpha and put it in hi and low format */
+
+  Alpha_exp = WebRtcSpl_NormW32(temp1W32);
+  temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, Alpha_exp);
+  Alpha_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+  Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+
+  /* Perform the iterative calculations in the
+     Levinson Durbin algorithm */
+
+  for (i=2; i<=order; i++)
+  {
+
+    /*                    ----
+                          \
+                          temp1W32 =  R[i] + > R[j]*A[i-j]
+                          /
+                          ----
+                          j=1..i-1
+    */
+
+    temp1W32 = 0;
+
+    for(j=1; j<i; j++) {
+      /* temp1W32 is in Q31 */
+      temp1W32 += (WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(R_hi[j], A_hi[i-j]), 1) +
+                   WEBRTC_SPL_LSHIFT_W32(( WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(R_hi[j], A_low[i-j]), 15) +
+                                            WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(R_low[j], A_hi[i-j]), 15) ), 1));
+    }
+
+    temp1W32  = WEBRTC_SPL_LSHIFT_W32(temp1W32, 4);
+    temp1W32 += (WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16) +
+                 WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[i], 1));
+
+    /* K = -temp1W32 / Alpha */
+    temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32);      /* abs(temp1W32) */
+    temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, Alpha_low); /* abs(temp1W32)/Alpha */
+
+    /* Put the sign of temp1W32 back again */
+    if (temp1W32 > 0) {
+      temp3W32 = -temp3W32;
+    }
+
+    /* Use the Alpha shifts from earlier to denormalize */
+    norm = WebRtcSpl_NormW32(temp3W32);
+    if ((Alpha_exp <= norm)||(temp3W32==0)) {
+      temp3W32 = WEBRTC_SPL_LSHIFT_W32(temp3W32, Alpha_exp);
+    } else {
+      if (temp3W32 > 0)
+      {
+        temp3W32 = (WebRtc_Word32)0x7fffffffL;
+      } else
+      {
+        temp3W32 = (WebRtc_Word32)0x80000000L;
+      }
+    }
+
+    /* Put K on hi and low format */
+    K_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+    K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+
+    /* Store Reflection coefficient in Q15 */
+    K[i-1] = K_hi;
+
+    /* Test for unstable filter. If unstable return 0 and let the
+       user decide what to do in that case
+    */
+
+    if ((WebRtc_Word32)WEBRTC_SPL_ABS_W16(K_hi) > (WebRtc_Word32)32740) {
+      return(-i); /* Unstable filter */
+    }
+
+    /*
+      Compute updated LPC coefficient: Anew[i]
+      Anew[j]= A[j] + K*A[i-j]   for j=1..i-1
+      Anew[i]= K
+    */
+
+    for(j=1; j<i; j++)
+    {
+      temp1W32  = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[j],16) +
+          WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[j],1);    /* temp1W32 = A[j] in Q27 */
+
+      temp1W32 += WEBRTC_SPL_LSHIFT_W32(( WEBRTC_SPL_MUL_16_16(K_hi, A_hi[i-j]) +
+                                           WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_hi, A_low[i-j]), 15) +
+                                           WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_low, A_hi[i-j]), 15) ), 1); /* temp1W32 += K*A[i-j] in Q27 */
+
+      /* Put Anew in hi and low format */
+      A_upd_hi[j] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+      A_upd_low[j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[j], 16)), 1);
+    }
+
+    temp3W32 = WEBRTC_SPL_RSHIFT_W32(temp3W32, 4);     /* temp3W32 = K in Q27 (Convert from Q31 to Q27) */
+
+    /* Store Anew in hi and low format */
+    A_upd_hi[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+    A_upd_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[i], 16)), 1);
+
+    /*  Alpha = Alpha * (1-K^2) */
+
+    temp1W32  = WEBRTC_SPL_LSHIFT_W32((WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_hi, K_low), 14) +
+                                        WEBRTC_SPL_MUL_16_16(K_hi, K_hi)), 1);  /* K*K in Q31 */
+
+    temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32);      /* Guard against <0 */
+    temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32;      /* 1 - K*K  in Q31 */
+
+    /* Convert 1- K^2 in hi and low format */
+    tmp_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+
+    /* Calculate Alpha = Alpha * (1-K^2) in Q31 */
+    temp1W32 = WEBRTC_SPL_LSHIFT_W32(( WEBRTC_SPL_MUL_16_16(Alpha_hi, tmp_hi) +
+                                        WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(Alpha_hi, tmp_low), 15) +
+                                        WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(Alpha_low, tmp_hi), 15)), 1);
+
+    /* Normalize Alpha and store it on hi and low format */
+
+    norm = WebRtcSpl_NormW32(temp1W32);
+    temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, norm);
+
+    Alpha_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+    Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+
+    /* Update the total nomalization of Alpha */
+    Alpha_exp = Alpha_exp + norm;
+
+    /* Update A[] */
+
+    for(j=1; j<=i; j++)
+    {
+      A_hi[j] =A_upd_hi[j];
+      A_low[j] =A_upd_low[j];
+    }
+  }
+
+  /*
+    Set A[0] to 1.0 and store the A[i] i=1...order in Q12
+    (Convert from Q27 and use rounding)
+  */
+
+  A[0] = 2048;
+
+  for(i=1; i<=order; i++) {
+    /* temp1W32 in Q27 */
+    temp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[i], 16) +
+        WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[i], 1);
+    /* Round and store upper word */
+    A[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32+(WebRtc_Word32)32768, 16);
+  }
+  return(1); /* Stable filters */
+}
+
+
+
+
+
+/* window */
+/* Matlab generation of floating point code:
+ *  t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid;
+ *  for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end
+ * All values are multiplyed with 2^21 in fixed point code.
+ */
+static const WebRtc_Word16 kWindowAutocorr[WINLEN] = {
+  0,     0,     0,     0,     0,     1,     1,     2,     2,     3,     5,     6,
+  8,    10,    12,    14,    17,    20,    24,    28,    33,    38,    43,    49,
+  56,    63,    71,    79,    88,    98,   108,   119,   131,   143,   157,   171,
+  186,   202,   219,   237,   256,   275,   296,   318,   341,   365,   390,   416,
+  444,   472,   502,   533,   566,   600,   635,   671,   709,   748,   789,   831,
+  875,   920,   967,  1015,  1065,  1116,  1170,  1224,  1281,  1339,  1399,  1461,
+  1525,  1590,  1657,  1726,  1797,  1870,  1945,  2021,  2100,  2181,  2263,  2348,
+  2434,  2523,  2614,  2706,  2801,  2898,  2997,  3099,  3202,  3307,  3415,  3525,
+  3637,  3751,  3867,  3986,  4106,  4229,  4354,  4481,  4611,  4742,  4876,  5012,
+  5150,  5291,  5433,  5578,  5725,  5874,  6025,  6178,  6333,  6490,  6650,  6811,
+  6974,  7140,  7307,  7476,  7647,  7820,  7995,  8171,  8349,  8529,  8711,  8894,
+  9079,  9265,  9453,  9642,  9833, 10024, 10217, 10412, 10607, 10803, 11000, 11199,
+  11398, 11597, 11797, 11998, 12200, 12401, 12603, 12805, 13008, 13210, 13412, 13614,
+  13815, 14016, 14216, 14416, 14615, 14813, 15009, 15205, 15399, 15591, 15782, 15971,
+  16157, 16342, 16524, 16704, 16881, 17056, 17227, 17395, 17559, 17720, 17877, 18030,
+  18179, 18323, 18462, 18597, 18727, 18851, 18970, 19082, 19189, 19290, 19384, 19471,
+  19551, 19623, 19689, 19746, 19795, 19835, 19867, 19890, 19904, 19908, 19902, 19886,
+  19860, 19823, 19775, 19715, 19644, 19561, 19465, 19357, 19237, 19102, 18955, 18793,
+  18618, 18428, 18223, 18004, 17769, 17518, 17252, 16970, 16672, 16357, 16025, 15677,
+  15311, 14929, 14529, 14111, 13677, 13225, 12755, 12268, 11764, 11243, 10706, 10152,
+  9583,  8998,  8399,  7787,  7162,  6527,  5883,  5231,  4576,  3919,  3265,  2620,
+  1990,  1386,   825,   333
+};
+
+
+/* By using a hearing threshold level in dB of -28 dB (higher value gives more noise),
+   the H_T_H (in float) can be calculated as:
+   H_T_H = pow(10.0, 0.05 * (-28.0)) = 0.039810717055350
+   In Q19, H_T_H becomes round(0.039810717055350*2^19) ~= 20872, i.e.
+   H_T_H = 20872/524288.0, and H_T_HQ19 = 20872;
+*/
+
+
+/* The bandwidth expansion vectors are created from:
+   kPolyVecLo=[0.900000,0.810000,0.729000,0.656100,0.590490,0.531441,0.478297,0.430467,0.387420,0.348678,0.313811,0.282430];
+   kPolyVecHi=[0.800000,0.640000,0.512000,0.409600,0.327680,0.262144];
+   round(kPolyVecLo*32768)
+   round(kPolyVecHi*32768)
+*/
+static const WebRtc_Word16 kPolyVecLo[12] = {
+  29491, 26542, 23888, 21499, 19349, 17414, 15673, 14106, 12695, 11425, 10283, 9255
+};
+static const WebRtc_Word16 kPolyVecHi[6] = {
+  26214, 20972, 16777, 13422, 10737, 8590
+};
+
+static __inline WebRtc_Word32 log2_Q8_LPC( WebRtc_UWord32 x ) {
+
+  WebRtc_Word32 zeros, lg2;
+  WebRtc_Word16 frac;
+
+  zeros=WebRtcSpl_NormU32(x);
+  frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
+
+  /* log2(x) */
+
+  lg2= (WEBRTC_SPL_LSHIFT_W16((31-zeros), 8)+frac);
+  return lg2;
+
+}
+
+static const WebRtc_Word16 kMulPitchGain = -25; /* 200/256 in Q5 */
+static const WebRtc_Word16 kChngFactor = 3523; /* log10(2)*10/4*0.4/1.4=log10(2)/1.4= 0.2150 in Q14 */
+static const WebRtc_Word16 kExp2 = 11819; /* 1/log(2) */
+
+void WebRtcIsacfix_GetVars(const WebRtc_Word16 *input, const WebRtc_Word16 *pitchGains_Q12,
+                           WebRtc_UWord32 *oldEnergy, WebRtc_Word16 *varscale)
+{
+  int k;
+  WebRtc_UWord32 nrgQ[4];
+  WebRtc_Word16 nrgQlog[4];
+  WebRtc_Word16 tmp16, chng1, chng2, chng3, chng4, tmp, chngQ, oldNrgQlog, pgQ, pg3;
+  WebRtc_Word32 expPg32;
+  WebRtc_Word16 expPg, divVal;
+  WebRtc_Word16 tmp16_1, tmp16_2;
+
+  /* Calculate energies of first and second frame halfs */
+  nrgQ[0]=0;
+  for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES/4 + QLOOKAHEAD) / 2; k++) {
+    nrgQ[0] +=WEBRTC_SPL_MUL_16_16(input[k],input[k]);
+  }
+  nrgQ[1]=0;
+  for ( ; k < (FRAMESAMPLES/2 + QLOOKAHEAD) / 2; k++) {
+    nrgQ[1] +=WEBRTC_SPL_MUL_16_16(input[k],input[k]);
+  }
+  nrgQ[2]=0;
+  for ( ; k < (WEBRTC_SPL_MUL_16_16(FRAMESAMPLES, 3)/4 + QLOOKAHEAD) / 2; k++) {
+    nrgQ[2] +=WEBRTC_SPL_MUL_16_16(input[k],input[k]);
+  }
+  nrgQ[3]=0;
+  for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) {
+    nrgQ[3] +=WEBRTC_SPL_MUL_16_16(input[k],input[k]);
+  }
+
+  for ( k=0; k<4; k++) {
+    nrgQlog[k] = (WebRtc_Word16)log2_Q8_LPC(nrgQ[k]); /* log2(nrgQ) */
+  }
+  oldNrgQlog = (WebRtc_Word16)log2_Q8_LPC(*oldEnergy);
+
+  /* Calculate average level change */
+  chng1 = WEBRTC_SPL_ABS_W16(nrgQlog[3]-nrgQlog[2]);
+  chng2 = WEBRTC_SPL_ABS_W16(nrgQlog[2]-nrgQlog[1]);
+  chng3 = WEBRTC_SPL_ABS_W16(nrgQlog[1]-nrgQlog[0]);
+  chng4 = WEBRTC_SPL_ABS_W16(nrgQlog[0]-oldNrgQlog);
+  tmp = chng1+chng2+chng3+chng4;
+  chngQ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp, kChngFactor, 10); /* Q12 */
+  chngQ += 2926; /* + 1.0/1.4 in Q12 */
+
+  /* Find average pitch gain */
+  pgQ = 0;
+  for (k=0; k<4; k++)
+  {
+    pgQ += pitchGains_Q12[k];
+  }
+
+  pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pgQ,11); /* pgQ in Q(12+2)=Q14. Q14*Q14>>11 => Q17 */
+  pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pg3,13); /* Q17*Q14>>13 =>Q18  */
+  pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pg3, kMulPitchGain ,5); /* Q10  kMulPitchGain = -25 = -200 in Q-3. */
+
+  tmp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,pg3,13);/* Q13*Q10>>13 => Q10*/
+  if (tmp16<0) {
+    tmp16_2 = (0x0400 | (tmp16 & 0x03FF));
+    tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((WebRtc_UWord16)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
+    if (tmp16_1<0)
+      expPg=(WebRtc_Word16) -WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+    else
+      expPg=(WebRtc_Word16) -WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+  } else
+    expPg = (WebRtc_Word16) -16384; /* 1 in Q14, since 2^0=1 */
+
+  expPg32 = (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W16((WebRtc_Word32)expPg, 8); /* Q22 */
+  divVal = WebRtcSpl_DivW32W16ResW16(expPg32, chngQ); /* Q22/Q12=Q10 */
+
+  tmp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,divVal,13);/* Q13*Q10>>13 => Q10*/
+  if (tmp16<0) {
+    tmp16_2 = (0x0400 | (tmp16 & 0x03FF));
+    tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((WebRtc_UWord16)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
+    if (tmp16_1<0)
+      expPg=(WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+    else
+      expPg=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+  } else
+    expPg = (WebRtc_Word16) 16384; /* 1 in Q14, since 2^0=1 */
+
+  *varscale = expPg-1;
+  *oldEnergy = nrgQ[3];
+}
+
+
+
+static __inline WebRtc_Word16  exp2_Q10_T(WebRtc_Word16 x) { // Both in and out in Q10
+
+  WebRtc_Word16 tmp16_1, tmp16_2;
+
+  tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
+  tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+  if(tmp16_1>0)
+    return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+  else
+    return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+
+}
+
+
+// Declare a function pointer.
+AutocorrFix WebRtcIsacfix_AutocorrFix;
+
+void WebRtcIsacfix_GetLpcCoef(WebRtc_Word16 *inLoQ0,
+                              WebRtc_Word16 *inHiQ0,
+                              MaskFiltstr_enc *maskdata,
+                              WebRtc_Word16 snrQ10,
+                              const WebRtc_Word16 *pitchGains_Q12,
+                              WebRtc_Word32 *gain_lo_hiQ17,
+                              WebRtc_Word16 *lo_coeffQ15,
+                              WebRtc_Word16 *hi_coeffQ15)
+{
+  int k, n, j, ii;
+  WebRtc_Word16 pos1, pos2;
+  WebRtc_Word16 sh_lo, sh_hi, sh, ssh, shMem;
+  WebRtc_Word16 varscaleQ14;
+
+  WebRtc_Word16 tmpQQlo, tmpQQhi;
+  WebRtc_Word32 tmp32;
+  WebRtc_Word16 tmp16,tmp16b;
+
+  WebRtc_Word16 polyHI[ORDERHI+1];
+  WebRtc_Word16 rcQ15_lo[ORDERLO], rcQ15_hi[ORDERHI];
+
+
+  WebRtc_Word16 DataLoQ6[WINLEN], DataHiQ6[WINLEN];
+  WebRtc_Word32 corrloQQ[ORDERLO+2];
+  WebRtc_Word32 corrhiQQ[ORDERHI+1];
+  WebRtc_Word32 corrlo2QQ[ORDERLO+1];
+  WebRtc_Word16 scale;
+  WebRtc_Word16 QdomLO, QdomHI, newQdomHI, newQdomLO;
+
+  WebRtc_Word32 round;
+  WebRtc_Word32 res_nrgQQ;
+  WebRtc_Word32 sqrt_nrg;
+
+  WebRtc_Word32 aSQR32;
+
+  /* less-noise-at-low-frequencies factor */
+  WebRtc_Word16 aaQ14;
+
+  /* Multiplication with 1/sqrt(12) ~= 0.28901734104046 can be done by convertion to
+     Q15, i.e. round(0.28901734104046*32768) = 9471, and use 9471/32768.0 ~= 0.289032
+  */
+  WebRtc_Word16 snrq, shft;
+
+  WebRtc_Word16 tmp16a;
+  WebRtc_Word32 tmp32a, tmp32b, tmp32c;
+
+  WebRtc_Word16 a_LOQ11[ORDERLO+1];
+  WebRtc_Word16 k_vecloQ15[ORDERLO];
+  WebRtc_Word16 a_HIQ12[ORDERHI+1];
+  WebRtc_Word16 k_vechiQ15[ORDERHI];
+
+  WebRtc_Word16 stab;
+
+  snrq=snrQ10;
+
+  /* SNR= C * 2 ^ (D * snrq) ; C=0.289, D=0.05*log2(10)=0.166 (~=172 in Q10)*/
+  tmp16 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(snrq, 172, 10); // Q10
+  tmp16b = exp2_Q10_T(tmp16); // Q10
+  snrq = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(tmp16b, 285, 10); // Q10
+
+  /* change quallevel depending on pitch gains and level fluctuations */
+  WebRtcIsacfix_GetVars(inLoQ0, pitchGains_Q12, &(maskdata->OldEnergy), &varscaleQ14);
+
+  /* less-noise-at-low-frequencies factor */
+  /* Calculation of 0.35 * (0.5 + 0.5 * varscale) in fixpoint:
+     With 0.35 in Q16 (0.35 ~= 22938/65536.0 = 0.3500061) and varscaleQ14 in Q14,
+     we get Q16*Q14>>16 = Q14
+  */
+  aaQ14 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+      (WEBRTC_SPL_MUL_16_16(22938, (8192 + WEBRTC_SPL_RSHIFT_W32(varscaleQ14, 1)))
+       + ((WebRtc_Word32)32768)), 16);
+
+  /* Calculate tmp = (1.0 + aa*aa); in Q12 */
+  tmp16 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(aaQ14, aaQ14, 15); //Q14*Q14>>15 = Q13
+  tmpQQlo = 4096 + WEBRTC_SPL_RSHIFT_W16(tmp16, 1); // Q12 + Q13>>1 = Q12
+
+  /* Calculate tmp = (1.0+aa) * (1.0+aa); */
+  tmp16 = 8192 + WEBRTC_SPL_RSHIFT_W16(aaQ14, 1); // 1+a in Q13
+  tmpQQhi = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(tmp16, tmp16, 14); //Q13*Q13>>14 = Q12
+
+  /* replace data in buffer by new look-ahead data */
+  for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) {
+    maskdata->DataBufferLoQ0[pos1 + WINLEN - QLOOKAHEAD] = inLoQ0[pos1];
+  }
+
+  for (k = 0; k < SUBFRAMES; k++) {
+
+    /* Update input buffer and multiply signal with window */
+    for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) {
+      maskdata->DataBufferLoQ0[pos1] = maskdata->DataBufferLoQ0[pos1 + UPDATE/2];
+      maskdata->DataBufferHiQ0[pos1] = maskdata->DataBufferHiQ0[pos1 + UPDATE/2];
+      DataLoQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+          maskdata->DataBufferLoQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
+      DataHiQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+          maskdata->DataBufferHiQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
+    }
+    pos2 = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(k, UPDATE)/2);
+    for (n = 0; n < UPDATE/2; n++, pos1++) {
+      maskdata->DataBufferLoQ0[pos1] = inLoQ0[QLOOKAHEAD + pos2];
+      maskdata->DataBufferHiQ0[pos1] = inHiQ0[pos2++];
+      DataLoQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+          maskdata->DataBufferLoQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
+      DataHiQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+          maskdata->DataBufferHiQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
+    }
+
+    /* Get correlation coefficients */
+    /* The highest absolute value measured inside DataLo in the test set
+       For DataHi, corresponding value was 160.
+
+       This means that it should be possible to represent the input values
+       to WebRtcSpl_AutoCorrelation() as Q6 values (since 307*2^6 =
+       19648). Of course, Q0 will also work, but due to the low energy in
+       DataLo and DataHi, the outputted autocorrelation will be more accurate
+       and mimic the floating point code better, by being in an high as possible
+       Q-domain.
+    */
+
+    WebRtcIsacfix_AutocorrFix(corrloQQ,DataLoQ6,WINLEN, ORDERLO+1, &scale);
+    QdomLO = 12-scale; // QdomLO is the Q-domain of corrloQQ
+    sh_lo = WebRtcSpl_NormW32(corrloQQ[0]);
+    QdomLO += sh_lo;
+    for (ii=0; ii<ORDERLO+2; ii++) {
+      corrloQQ[ii] = WEBRTC_SPL_LSHIFT_W32(corrloQQ[ii], sh_lo);
+    }
+    /* It is investigated whether it was possible to use 16 bits for the
+       32-bit vector corrloQQ, but it didn't work. */
+
+    WebRtcIsacfix_AutocorrFix(corrhiQQ,DataHiQ6,WINLEN, ORDERHI, &scale);
+
+    QdomHI = 12-scale; // QdomHI is the Q-domain of corrhiQQ
+    sh_hi = WebRtcSpl_NormW32(corrhiQQ[0]);
+    QdomHI += sh_hi;
+    for (ii=0; ii<ORDERHI+1; ii++) {
+      corrhiQQ[ii] = WEBRTC_SPL_LSHIFT_W32(corrhiQQ[ii], sh_hi);
+    }
+
+    /* less noise for lower frequencies, by filtering/scaling autocorrelation sequences */
+
+    /* Calculate corrlo2[0] = tmpQQlo * corrlo[0] - 2.0*tmpQQlo * corrlo[1];*/
+    corrlo2QQ[0] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQlo, corrloQQ[0]), 1)- // Q(12+QdomLO-16)>>1 = Q(QdomLO-5)
+        WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, corrloQQ[1]), 2); // 2*Q(14+QdomLO-16)>>3 = Q(QdomLO-2)>>2 = Q(QdomLO-5)
+
+    /* Calculate corrlo2[n] = tmpQQlo * corrlo[n] - tmpQQlo * (corrlo[n-1] + corrlo[n+1]);*/
+    for (n = 1; n <= ORDERLO; n++) {
+
+      tmp32 = WEBRTC_SPL_RSHIFT_W32(corrloQQ[n-1], 1) + WEBRTC_SPL_RSHIFT_W32(corrloQQ[n+1], 1); // Q(QdomLO-1)
+      corrlo2QQ[n] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQlo, corrloQQ[n]), 1)- // Q(12+QdomLO-16)>>1 = Q(QdomLO-5)
+          WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, tmp32), 2); // Q(14+QdomLO-1-16)>>2 = Q(QdomLO-3)>>2 = Q(QdomLO-5)
+
+    }
+    QdomLO -= 5;
+
+    /* Calculate corrhi[n] = tmpQQhi * corrhi[n]; */
+    for (n = 0; n <= ORDERHI; n++) {
+      corrhiQQ[n] = WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQhi, corrhiQQ[n]); // Q(12+QdomHI-16) = Q(QdomHI-4)
+    }
+    QdomHI -= 4;
+
+    /* add white noise floor */
+    /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) */
+    /* Calculate corrlo2[0] += 9.5367431640625e-7; and
+       corrhi[0]  += 9.5367431640625e-7, where the constant is 1/2^20 */
+
+    tmp32 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32) 1, QdomLO-20);
+    corrlo2QQ[0] += tmp32;
+    tmp32 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32) 1, QdomHI-20);
+    corrhiQQ[0]  += tmp32;
+
+    /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) before the following
+       code segment, where we want to make sure we get a 1-bit margin */
+    for (n = 0; n <= ORDERLO; n++) {
+      corrlo2QQ[n] = WEBRTC_SPL_RSHIFT_W32(corrlo2QQ[n], 1); // Make sure we have a 1-bit margin
+    }
+    QdomLO -= 1; // Now, corrlo2QQ is in Q(QdomLO), with a 1-bit margin
+
+    for (n = 0; n <= ORDERHI; n++) {
+      corrhiQQ[n] = WEBRTC_SPL_RSHIFT_W32(corrhiQQ[n], 1); // Make sure we have a 1-bit margin
+    }
+    QdomHI -= 1; // Now, corrhiQQ is in Q(QdomHI), with a 1-bit margin
+
+
+    newQdomLO = QdomLO;
+
+    for (n = 0; n <= ORDERLO; n++) {
+      WebRtc_Word32 tmp, tmpB, tmpCorr;
+      WebRtc_Word16 alpha=328; //0.01 in Q15
+      WebRtc_Word16 beta=324; //(1-0.01)*0.01=0.0099 in Q15
+      WebRtc_Word16 gamma=32440; //(1-0.01)=0.99 in Q15
+
+      if (maskdata->CorrBufLoQQ[n] != 0) {
+        shMem=WebRtcSpl_NormW32(maskdata->CorrBufLoQQ[n]);
+        sh = QdomLO - maskdata->CorrBufLoQdom[n];
+        if (sh<=shMem) {
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], sh); // Get CorrBufLoQQ to same domain as corrlo2
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp);
+        } else if ((sh-shMem)<7){
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufLoQQ as much as possible
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(WEBRTC_SPL_LSHIFT_W16(alpha, (sh-shMem)), tmp); // Shift alpha the number of times required to get tmp in QdomLO
+        } else {
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(WEBRTC_SPL_LSHIFT_W16(alpha, 6), tmp); // Shift alpha as much as possible without overflow the number of times required to get tmp in QdomHI
+          tmpCorr = WEBRTC_SPL_RSHIFT_W32(corrloQQ[n], sh-shMem-6);
+          tmp = tmp + tmpCorr;
+          maskdata->CorrBufLoQQ[n] = tmp;
+          newQdomLO = QdomLO-(sh-shMem-6);
+          maskdata->CorrBufLoQdom[n] = newQdomLO;
+        }
+      } else
+        tmp = 0;
+
+      tmp = tmp + corrlo2QQ[n];
+
+      maskdata->CorrBufLoQQ[n] = tmp;
+      maskdata->CorrBufLoQdom[n] = QdomLO;
+
+      tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp);
+      tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, corrlo2QQ[n]);
+      corrlo2QQ[n] = tmp + tmpB;
+    }
+    if( newQdomLO!=QdomLO) {
+      for (n = 0; n <= ORDERLO; n++) {
+        if (maskdata->CorrBufLoQdom[n] != newQdomLO)
+          corrloQQ[n] = WEBRTC_SPL_RSHIFT_W32(corrloQQ[n], maskdata->CorrBufLoQdom[n]-newQdomLO);
+      }
+      QdomLO = newQdomLO;
+    }
+
+
+    newQdomHI = QdomHI;
+
+    for (n = 0; n <= ORDERHI; n++) {
+      WebRtc_Word32 tmp, tmpB, tmpCorr;
+      WebRtc_Word16 alpha=328; //0.01 in Q15
+      WebRtc_Word16 beta=324; //(1-0.01)*0.01=0.0099 in Q15
+      WebRtc_Word16 gamma=32440; //(1-0.01)=0.99 in Q1
+      if (maskdata->CorrBufHiQQ[n] != 0) {
+        shMem=WebRtcSpl_NormW32(maskdata->CorrBufHiQQ[n]);
+        sh = QdomHI - maskdata->CorrBufHiQdom[n];
+        if (sh<=shMem) {
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], sh); // Get CorrBufHiQQ to same domain as corrhi
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp);
+          tmpCorr = corrhiQQ[n];
+          tmp = tmp + tmpCorr;
+          maskdata->CorrBufHiQQ[n] = tmp;
+          maskdata->CorrBufHiQdom[n] = QdomHI;
+        } else if ((sh-shMem)<7) {
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(WEBRTC_SPL_LSHIFT_W16(alpha, (sh-shMem)), tmp); // Shift alpha the number of times required to get tmp in QdomHI
+          tmpCorr = corrhiQQ[n];
+          tmp = tmp + tmpCorr;
+          maskdata->CorrBufHiQQ[n] = tmp;
+          maskdata->CorrBufHiQdom[n] = QdomHI;
+        } else {
+          tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible
+          tmp = WEBRTC_SPL_MUL_16_32_RSFT15(WEBRTC_SPL_LSHIFT_W16(alpha, 6), tmp); // Shift alpha as much as possible without overflow the number of times required to get tmp in QdomHI
+          tmpCorr = WEBRTC_SPL_RSHIFT_W32(corrhiQQ[n], sh-shMem-6);
+          tmp = tmp + tmpCorr;
+          maskdata->CorrBufHiQQ[n] = tmp;
+          newQdomHI = QdomHI-(sh-shMem-6);
+          maskdata->CorrBufHiQdom[n] = newQdomHI;
+        }
+      } else {
+        tmp = corrhiQQ[n];
+        tmpCorr = tmp;
+        maskdata->CorrBufHiQQ[n] = tmp;
+        maskdata->CorrBufHiQdom[n] = QdomHI;
+      }
+
+      tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp);
+      tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, tmpCorr);
+      corrhiQQ[n] = tmp + tmpB;
+    }
+
+    if( newQdomHI!=QdomHI) {
+      for (n = 0; n <= ORDERHI; n++) {
+        if (maskdata->CorrBufHiQdom[n] != newQdomHI)
+          corrhiQQ[n] = WEBRTC_SPL_RSHIFT_W32(corrhiQQ[n], maskdata->CorrBufHiQdom[n]-newQdomHI);
+      }
+      QdomHI = newQdomHI;
+    }
+
+    stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, ORDERLO);
+
+    if (stab<0) {  // If unstable use lower order
+      a_LOQ11[0]=2048;
+      for (n = 1; n <= ORDERLO; n++) {
+        a_LOQ11[n]=0;
+      }
+
+      stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, 8);
+    }
+
+
+    WebRtcSpl_LevinsonDurbin(corrhiQQ,  a_HIQ12,  k_vechiQ15, ORDERHI);
+
+    /* bandwidth expansion */
+    for (n = 1; n <= ORDERLO; n++) {
+      a_LOQ11[n] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecLo[n-1], a_LOQ11[n]);
+    }
+
+
+    polyHI[0] = a_HIQ12[0];
+    for (n = 1; n <= ORDERHI; n++) {
+      a_HIQ12[n] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecHi[n-1], a_HIQ12[n]);
+      polyHI[n] = a_HIQ12[n];
+    }
+
+    /* Normalize the corrlo2 vector */
+    sh = WebRtcSpl_NormW32(corrlo2QQ[0]);
+    for (n = 0; n <= ORDERLO; n++) {
+      corrlo2QQ[n] = WEBRTC_SPL_LSHIFT_W32(corrlo2QQ[n], sh);
+    }
+    QdomLO += sh; /* Now, corrlo2QQ is still in Q(QdomLO) */
+
+
+    /* residual energy */
+
+    sh_lo = 31;
+    res_nrgQQ = 0;
+    for (j = 0; j <= ORDERLO; j++)
+    {
+      for (n = 0; n < j; n++)
+      {
+        WebRtc_Word16 index, diff, sh_corr;
+
+        index = j - n; //WEBRTC_SPL_ABS_W16(j-n);
+
+        /* Calculation of res_nrg += a_LO[j] * corrlo2[j-n] * a_LO[n]; */
+        /* corrlo2QQ is in Q(QdomLO) */
+        tmp32 = ((WebRtc_Word32) WEBRTC_SPL_MUL_16_16(a_LOQ11[j], a_LOQ11[n])); // Q11*Q11 = Q22
+        // multiply by 2 as loop only on half of the matrix. a_LOQ11 gone through bandwidth
+        // expation so the following shift is safe.
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(tmp32, 1);
+        sh = WebRtcSpl_NormW32(tmp32);
+        aSQR32 = WEBRTC_SPL_LSHIFT_W32(tmp32, sh); // Q(22+sh)
+        sh_corr = WebRtcSpl_NormW32(corrlo2QQ[index]);
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(corrlo2QQ[index], sh_corr);
+        tmp32 = (WebRtc_Word32) WEBRTC_SPL_MUL_32_32_RSFT32BI(aSQR32, tmp32); // Q(22+sh)*Q(QdomLO+sh_corr)>>32 = Q(22+sh+QdomLO+sh_corr-32) = Q(sh+QdomLO+sh_corr-10)
+        sh = sh+QdomLO+sh_corr-10;
+
+        diff = sh_lo-sh;
+
+        round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, (WEBRTC_SPL_ABS_W32(diff)-1));
+        if (diff==0)
+          round = 0;
+        if (diff>=31) {
+          res_nrgQQ = tmp32;
+          sh_lo = sh;
+        } else if (diff>0) {
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32((res_nrgQQ+round), (diff+1)) + WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
+          sh_lo = sh-1;
+        } else  if (diff>-31){
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1) + WEBRTC_SPL_SHIFT_W32((tmp32+round), -(-diff+1));
+          sh_lo = sh_lo-1;
+        }
+        sh = WebRtcSpl_NormW32(res_nrgQQ);
+        res_nrgQQ = WEBRTC_SPL_LSHIFT_W32(res_nrgQQ, sh);
+        sh_lo += sh;
+      }
+      n = j;
+      {
+        WebRtc_Word16 index, diff, sh_corr;
+
+        index = 0; //WEBRTC_SPL_ABS_W16(j-n);
+
+        /* Calculation of res_nrg += a_LO[j] * corrlo2[j-n] * a_LO[n]; */
+        /* corrlo2QQ is in Q(QdomLO) */
+        tmp32 = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(a_LOQ11[j], a_LOQ11[n]); // Q11*Q11 = Q22
+        sh = WebRtcSpl_NormW32(tmp32);
+        aSQR32 = WEBRTC_SPL_LSHIFT_W32(tmp32, sh); // Q(22+sh)
+        sh_corr = WebRtcSpl_NormW32(corrlo2QQ[index]);
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(corrlo2QQ[index], sh_corr);
+        tmp32 = (WebRtc_Word32) WEBRTC_SPL_MUL_32_32_RSFT32BI(aSQR32, tmp32); // Q(22+sh)*Q(QdomLO+sh_corr)>>32 = Q(22+sh+QdomLO+sh_corr-32) = Q(sh+QdomLO+sh_corr-10)
+        sh = sh+QdomLO+sh_corr-10;
+        diff = sh_lo-sh;
+
+        round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, (WEBRTC_SPL_ABS_W32(diff)-1));
+        if (diff==0)
+          round = 0;
+        if (diff>=31) {
+          res_nrgQQ = tmp32;
+          sh_lo = sh;
+        } else if (diff>0) {
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32((res_nrgQQ+round), (diff+1)) + WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
+          sh_lo = sh-1;
+        } else  if (diff>-31){
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1) + WEBRTC_SPL_SHIFT_W32((tmp32+round), -(-diff+1));
+          sh_lo = sh_lo-1;
+        }
+        sh = WebRtcSpl_NormW32(res_nrgQQ);
+        res_nrgQQ = WEBRTC_SPL_LSHIFT_W32(res_nrgQQ, sh);
+        sh_lo += sh;
+      }
+    }
+    /* Convert to reflection coefficients */
+
+
+    WebRtcSpl_AToK_JSK(a_LOQ11, ORDERLO, rcQ15_lo);
+
+    if (sh_lo & 0x0001) {
+      res_nrgQQ=WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1);
+      sh_lo-=1;
+    }
+
+
+    if( res_nrgQQ > 0 )
+    {
+      sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ);
+
+      /* add hearing threshold and compute the gain */
+      /* lo_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */
+
+
+      //tmp32a=WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, H_T_HQ19, 17);  // Q14
+      tmp32a=WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) varscaleQ14,1);  // H_T_HQ19=65536 (16-17=-1)   ssh= WEBRTC_SPL_RSHIFT_W16(sh_lo, 1);  // sqrt_nrg is in Qssh
+      ssh= WEBRTC_SPL_RSHIFT_W16(sh_lo, 1);  // sqrt_nrg is in Qssh
+      sh = ssh - 14;
+      tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh
+      tmp32c = sqrt_nrg + tmp32b;  // Qssh  (denominator)
+      tmp32a = WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, snrq, 0);  //Q24 (numerator)
+
+      sh = WebRtcSpl_NormW32(tmp32c);
+      shft = 16 - sh;
+      tmp16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft)  (denominator)
+
+      tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft)
+      sh = ssh-shft-7;
+      *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh);  // Gains in Q17
+    }
+    else
+    {
+      *gain_lo_hiQ17 = 100; //(WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 17);  // Gains in Q17
+    }
+    gain_lo_hiQ17++;
+
+    /* copy coefficients to output array */
+    for (n = 0; n < ORDERLO; n++) {
+      *lo_coeffQ15 = (WebRtc_Word16) (rcQ15_lo[n]);
+      lo_coeffQ15++;
+    }
+    /* residual energy */
+    res_nrgQQ = 0;
+    sh_hi = 31;
+
+
+    for (j = 0; j <= ORDERHI; j++)
+    {
+      for (n = 0; n < j; n++)
+      {
+        WebRtc_Word16 index, diff, sh_corr;
+
+        index = j-n; //WEBRTC_SPL_ABS_W16(j-n);
+
+        /* Calculation of res_nrg += a_HI[j] * corrhi[j-n] * a_HI[n] * 2; for j != n */
+        /* corrhiQQ is in Q(QdomHI) */
+        tmp32 = ((WebRtc_Word32) WEBRTC_SPL_MUL_16_16(a_HIQ12[j], a_HIQ12[n])); // Q12*Q12 = Q24
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(tmp32, 1);
+        sh = WebRtcSpl_NormW32(tmp32);
+        aSQR32 = WEBRTC_SPL_LSHIFT_W32(tmp32, sh); // Q(24+sh)
+        sh_corr = WebRtcSpl_NormW32(corrhiQQ[index]);
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(corrhiQQ[index],sh_corr);
+        tmp32 = (WebRtc_Word32) WEBRTC_SPL_MUL_32_32_RSFT32BI(aSQR32, tmp32); // Q(24+sh)*Q(QdomHI+sh_corr)>>32 = Q(24+sh+QdomHI+sh_corr-32) = Q(sh+QdomHI+sh_corr-8)
+        sh = sh+QdomHI+sh_corr-8;
+        diff = sh_hi-sh;
+
+        round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, (WEBRTC_SPL_ABS_W32(diff)-1));
+        if (diff==0)
+          round = 0;
+        if (diff>=31) {
+          res_nrgQQ = tmp32;
+          sh_hi = sh;
+        } else if (diff>0) {
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32((res_nrgQQ+round), (diff+1)) + WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
+          sh_hi = sh-1;
+        } else  if (diff>-31){
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1) + WEBRTC_SPL_SHIFT_W32((tmp32+round), -(-diff+1));
+          sh_hi = sh_hi-1;
+        }
+
+        sh = WebRtcSpl_NormW32(res_nrgQQ);
+        res_nrgQQ = WEBRTC_SPL_LSHIFT_W32(res_nrgQQ, sh);
+        sh_hi += sh;
+      }
+
+      n = j;
+      {
+        WebRtc_Word16 index, diff, sh_corr;
+
+        index = 0; //n-j; //WEBRTC_SPL_ABS_W16(j-n);
+
+        /* Calculation of res_nrg += a_HI[j] * corrhi[j-n] * a_HI[n];*/
+        /* corrhiQQ is in Q(QdomHI) */
+        tmp32 = ((WebRtc_Word32) WEBRTC_SPL_MUL_16_16(a_HIQ12[j], a_HIQ12[n])); // Q12*Q12 = Q24
+        sh = WebRtcSpl_NormW32(tmp32);
+        aSQR32 = WEBRTC_SPL_LSHIFT_W32(tmp32, sh); // Q(24+sh)
+        sh_corr = WebRtcSpl_NormW32(corrhiQQ[index]);
+        tmp32 = WEBRTC_SPL_LSHIFT_W32(corrhiQQ[index],sh_corr);
+        tmp32 = (WebRtc_Word32) WEBRTC_SPL_MUL_32_32_RSFT32BI(aSQR32, tmp32); // Q(24+sh)*Q(QdomHI+sh_corr)>>32 = Q(24+sh+QdomHI+sh_corr-32) = Q(sh+QdomHI+sh_corr-8)
+        sh = sh+QdomHI+sh_corr-8;
+        diff = sh_hi-sh;
+
+        round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, (WEBRTC_SPL_ABS_W32(diff)-1));
+        if (diff==0)
+          round = 0;
+        if (diff>=31) {
+          res_nrgQQ = tmp32;
+          sh_hi = sh;
+        } else if (diff>0) {
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32((res_nrgQQ+round), (diff+1)) + WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
+          sh_hi = sh-1;
+        } else  if (diff>-31){
+          res_nrgQQ = WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1) + WEBRTC_SPL_SHIFT_W32((tmp32+round), -(-diff+1));
+          sh_hi = sh_hi-1;
+        }
+
+        sh = WebRtcSpl_NormW32(res_nrgQQ);
+        res_nrgQQ = WEBRTC_SPL_LSHIFT_W32(res_nrgQQ, sh);
+        sh_hi += sh;
+      }
+    }
+
+    /* Convert to reflection coefficients */
+    WebRtcSpl_LpcToReflCoef(polyHI, ORDERHI, rcQ15_hi);
+
+    if (sh_hi & 0x0001) {
+      res_nrgQQ=WEBRTC_SPL_RSHIFT_W32(res_nrgQQ, 1);
+      sh_hi-=1;
+    }
+
+
+    if( res_nrgQQ > 0 )
+    {
+      sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ);
+
+
+      /* add hearing threshold and compute the gain */
+      /* hi_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */
+
+      //tmp32a=WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, H_T_HQ19, 17);  // Q14
+      tmp32a=WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) varscaleQ14,1);  // H_T_HQ19=65536 (16-17=-1)
+
+      ssh= WEBRTC_SPL_RSHIFT_W32(sh_hi, 1);  // sqrt_nrg is in Qssh
+      sh = ssh - 14;
+      tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh
+      tmp32c = sqrt_nrg + tmp32b;  // Qssh  (denominator)
+      tmp32a = WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, snrq, 0);  //Q24 (numerator)
+
+      sh = WebRtcSpl_NormW32(tmp32c);
+      shft = 16 - sh;
+      tmp16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft)  (denominator)
+
+      tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft)
+      sh = ssh-shft-7;
+      *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh);  // Gains in Q17
+    }
+    else
+    {
+      *gain_lo_hiQ17 = 100; //(WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 17);  // Gains in Q17
+    }
+    gain_lo_hiQ17++;
+
+
+    /* copy coefficients to output array */
+    for (n = 0; n < ORDERHI; n++) {
+      *hi_coeffQ15 = rcQ15_hi[n];
+      hi_coeffQ15++;
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.h
new file mode 100644
index 0000000..9a64844
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_masking_model.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_masking_model.h
+ *
+ * LPC functions
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_
+
+#include "structs.h"
+
+void WebRtcIsacfix_GetVars(const WebRtc_Word16 *input,
+                           const WebRtc_Word16 *pitchGains_Q12,
+                           WebRtc_UWord32 *oldEnergy,
+                           WebRtc_Word16 *varscale);
+
+void WebRtcIsacfix_GetLpcCoef(WebRtc_Word16 *inLoQ0,
+                              WebRtc_Word16 *inHiQ0,
+                              MaskFiltstr_enc *maskdata,
+                              WebRtc_Word16 snrQ10,
+                              const WebRtc_Word16 *pitchGains_Q12,
+                              WebRtc_Word32 *gain_lo_hiQ17,
+                              WebRtc_Word16 *lo_coeffQ15,
+                              WebRtc_Word16 *hi_coeffQ15);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.c
new file mode 100644
index 0000000..90cc9af
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.c
@@ -0,0 +1,1280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_tables.c
+ *
+ * Coding tables for the KLT coefficients
+ *
+ */
+
+
+#include "settings.h"
+#include "lpc_tables.h"
+
+/* indices of KLT coefficients used */
+const WebRtc_UWord16 WebRtcIsacfix_kSelIndGain[12] = {
+  0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+  10,  11};
+
+const WebRtc_UWord16 WebRtcIsacfix_kSelIndShape[108] = {
+  0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+  10,  11,  12,  13,  14,  15,  16,  17,  18,  19,
+  20,  21,  22,  23,  24,  25,  26,  27,  28,  29,
+  30,  31,  32,  33,  34,  35,  36,  37,  38,  39,
+  40,  41,  42,  43,  44,  45,  46,  47,  48,  49,
+  50,  51,  52,  53,  54,  55,  56,  57,  58,  59,
+  60,  61,  62,  63,  64,  65,  66,  67,  68,  69,
+  70,  71,  72,  73,  74,  75,  76,  77,  78,  79,
+  80,  81,  82,  83,  84,  85,  86,  87,  88,  89,
+  90,  91,  92,  93,  94,  95,  96,  97,  98,  99,
+  100,  101,  102,  103,  104,  105,  106,  107
+};
+
+/* cdf array for model indicator */
+const WebRtc_UWord16 WebRtcIsacfix_kModelCdf[4] = {
+  0,  15434,  37548,  65535
+};
+
+/* pointer to cdf array for model indicator */
+const WebRtc_UWord16 *WebRtcIsacfix_kModelCdfPtr[1] = {
+  WebRtcIsacfix_kModelCdf
+};
+
+/* initial cdf index for decoder of model indicator */
+const WebRtc_UWord16 WebRtcIsacfix_kModelInitIndex[1] = {
+  1
+};
+
+/* offset to go from rounded value to quantization index */
+const WebRtc_Word16 WebRtcIsacfix_kQuantMinGain[12] ={
+  3,  6,  4,  6,  6,  9,  5,  16,  11,  34,  32,  47
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kQuantMinShape[108] = {
+  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+  1,  1,  1,  1,  2,  2,  2,  3,  0,  0,
+  0,  0,  1,  0,  0,  0,  0,  1,  1,  1,
+  1,  1,  1,  2,  2,  3,  0,  0,  0,  0,
+  1,  0,  1,  1,  1,  1,  1,  1,  1,  2,
+  2,  4,  3,  5,  0,  0,  0,  0,  1,  1,
+  1,  1,  1,  1,  2,  1,  2,  2,  3,  4,
+  4,  7,  0,  0,  1,  1,  1,  1,  1,  1,
+  1,  2,  3,  2,  3,  4,  4,  5,  7,  13,
+  0,  1,  1,  2,  3,  2,  2,  2,  4,  4,
+  5,  6,  7,  11, 9, 13, 12, 26
+};
+
+/* maximum quantization index */
+const WebRtc_UWord16 WebRtcIsacfix_kMaxIndGain[12] = {
+  6,  12,  8,  14,  10,  19,  12,  31,  22,  56,  52,  138
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kMaxIndShape[108] = {
+  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+  2,  2,  2,  2,  4,  4,  5,  6,  0,  0,
+  0,  0,  1,  0,  0,  0,  0,  1,  2,  2,
+  2,  2,  3,  4,  5,  7,  0,  0,  0,  0,
+  2,  0,  2,  2,  2,  2,  3,  2,  2,  4,
+  4,  6,  6,  9,  0,  0,  0,  0,  2,  2,
+  2,  2,  2,  2,  3,  2,  4,  4,  7,  7,
+  9,  13, 0,  0,  2,  2,  2,  2,  2,  2,
+  3,  4,  5,  4,  6,  8,  8, 10, 16, 25,
+  0,  2,  2,  4,  5,  4,  4,  4,  7,  8,
+  9, 10, 13, 19, 17, 23, 25, 49
+};
+
+/* index offset */
+const WebRtc_UWord16 WebRtcIsacfix_kOffsetGain[3][12] = {
+  { 0,  7,  20,  29,  44,  55,  75,  88,  120,  143,  200,  253},
+  { 0,  7,  19,  27,  42,  53,  73,  86,  117,  140,  197,  249},
+  { 0,  7,  20,  28,  44,  55,  75,  89,  121,  145,  202,  257}
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kOffsetShape[3][108] = {
+  {
+    0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+    11,  14,  17,  20,  23,  28,  33,  39,  46,  47,
+    48,  49,  50,  52,  53,  54,  55,  56,  58,  61,
+    64,  67,  70,  74,  79,  85,  93,  94,  95,  96,
+    97,  100,  101,  104,  107,  110,  113,  117,  120,  123,
+    128,  133,  140,  147,  157,  158,  159,  160,  161,  164,
+    167,  170,  173,  176,  179,  183,  186,  191,  196,  204,
+    212,  222,  236,  237,  238,  241,  244,  247,  250,  253,
+    256,  260,  265,  271,  276,  283,  292,  301,  312,  329,
+    355,  356,  359,  362,  367,  373,  378,  383,  388,  396,
+    405,  415,  426,  440,  460,  478,  502,  528
+  },
+  {
+    0,  1,  2,  3,  4,  6,  7,  8,  9,  11,
+    13,  16,  19,  22,  26,  29,  34,  39,  45,  46,
+    47,  48,  49,  50,  51,  52,  53,  55,  57,  60,
+    63,  66,  70,  73,  78,  84,  91,  92,  93,  94,
+    95,  96,  97,  99,  102,  105,  108,  111,  114,  118,
+    123,  128,  134,  141,  151,  152,  153,  154,  156,  159,
+    162,  165,  168,  171,  174,  177,  181,  186,  194,  200,
+    208,  218,  233,  234,  235,  236,  239,  242,  245,  248,
+    251,  254,  258,  263,  270,  277,  288,  297,  308,  324,
+    349,  351,  354,  357,  361,  366,  372,  378,  383,  390,
+    398,  407,  420,  431,  450,  472,  496,  524
+  },
+  {
+    0,  1,  2,  3,  4,  5,  6,  7,  8,  11,
+    14,  17,  20,  23,  26,  29,  34,  40,  47,  48,
+    49,  50,  51,  52,  53,  54,  55,  58,  61,  64,
+    67,  70,  73,  77,  82,  88,  96,  97,  98,  99,
+    101,  102,  104,  107,  110,  113,  116,  119,  122,  125,
+    129,  134,  141,  150,  160,  161,  162,  163,  166,  168,
+    171,  174,  177,  180,  183,  186,  190,  195,  201,  208,
+    216,  226,  243,  244,  245,  248,  251,  254,  257,  260,
+    263,  268,  273,  278,  284,  291,  299,  310,  323,  340,
+    366,  368,  371,  374,  379,  383,  389,  394,  399,  406,
+    414,  422,  433,  445,  461,  480,  505,  533
+  }
+};
+
+/* initial cdf index for KLT coefficients */
+const WebRtc_UWord16 WebRtcIsacfix_kInitIndexGain[3][12] = {
+  { 3,  6,  4,  7,  5,  10,  6,  16,  11,  28,  26,  69},
+  { 3,  6,  4,  7,  5,  10,  6,  15,  11,  28,  26,  69},
+  { 3,  6,  4,  8,  5,  10,  7,  16,  12,  28,  27,  70}
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kInitIndexShape[3][108] = {
+  {
+    0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+    1,  1,  1,  1,  2,  2,  3,  3,  0,  0,
+    0,  0,  1,  0,  0,  0,  0,  1,  1,  1,
+    1,  1,  2,  2,  3,  4,  0,  0,  0,  0,
+    1,  0,  1,  1,  1,  1,  2,  1,  1,  2,
+    2,  3,  3,  5,  0,  0,  0,  0,  1,  1,
+    1,  1,  1,  1,  2,  1,  2,  2,  4,  4,
+    5,  7,  0,  0,  1,  1,  1,  1,  1,  1,
+    2,  2,  3,  2,  3,  4,  4,  5,  8,  13,
+    0,  1,  1,  2,  3,  2,  2,  2,  4,  4,
+    5,  5,  7,  10,  9,  12,  13,  25
+  },
+  {
+    0,  0,  0,  0,  1,  0,  0,  0,  1,  1,
+    1,  1,  1,  2,  1,  2,  2,  3,  0,  0,
+    0,  0,  0,  0,  0,  0,  1,  1,  1,  1,
+    1,  2,  1,  2,  3,  3,  0,  0,  0,  0,
+    0,  0,  1,  1,  1,  1,  1,  1,  2,  2,
+    2,  3,  3,  5,  0,  0,  0,  1,  1,  1,
+    1,  1,  1,  1,  1,  2,  2,  4,  3,  4,
+    5,  7,  0,  0,  0,  1,  1,  1,  1,  1,
+    1,  2,  2,  3,  3,  5,  4,  5,  8,  12,
+    1,  1,  1,  2,  2,  3,  3,  2,  3,  4,
+    4,  6,  5,  9,  11,  12,  14,  25
+  },
+  {
+    0,  0,  0,  0,  0,  0,  0,  0,  1,  1,
+    1,  1,  1,  1,  1,  2,  3,  3,  0,  0,
+    0,  0,  0,  0,  0,  0,  1,  1,  1,  1,
+    1,  1,  2,  2,  3,  4,  0,  0,  0,  1,
+    0,  1,  1,  1,  1,  1,  1,  1,  1,  2,
+    2,  3,  4,  5,  0,  0,  0,  1,  1,  1,
+    1,  1,  1,  1,  1,  2,  2,  3,  3,  4,
+    5,  8,  0,  0,  1,  1,  1,  1,  1,  1,
+    2,  2,  2,  3,  3,  4,  5,  6,  8,  13,
+    1,  1,  1,  2,  2,  3,  2,  2,  3,  4,
+    4,  5,  6,  8,  9,  12,  14,  25
+  }
+};
+
+/* offsets for quantizer representation levels*/
+const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsGain[3] = {
+  0,  392,  779
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsShape[3] = {
+  0,  578,  1152
+};
+
+/* quantizer representation levels */
+
+
+
+const WebRtc_Word32 WebRtcIsacfix_kLevelsGainQ17[1176] = {
+  -364547,-231664,-102123,-573,104807,238257,368823,-758583,-640135,-510291
+  ,-377662,-252785,-113177,2627,112906,248601,389461,522691,644517,763974
+  ,-538963,-368179,-245823,-106095,-890,104299,241111,350730,493190,-800763
+  ,-646230,-510239,-382115,-248623,-111829,-2983,113852,251105,388114,519757
+  ,644048,774712,896334,1057931,-770009,-635310,-503690,-375087,-248106,-108525
+  ,-105,108259,243472,377948,519271,-1160885,-1032044,-914636,-777593,-647891
+  ,-518408,-388028,-254321,-115293,-598,117849,251296,385367,515510,652727
+  ,777432,920363,1038625,1153637,1316836,-632287,-505430,-379430,-248458,-118013
+  ,-888,118762,250266,381650,513327,652169,766471,932113,-2107480,-1971030
+  ,-1827020,-1698263,-1558670,-1436314,-1305377,-1172252,-1047355,-914202,-779651,-651001
+  ,-520999,-390394,-255761,-123490,-1893,126839,256703,385493,518607,651760
+  ,782750,908693,1044131,1163865,1311066,1424177,1582628,1709823,1831740,1955391
+  ,-1423044,-1288917,-1181281,-1043222,-911770,-780354,-646799,-522664,-386721,-258266
+  ,-128060,-1101,128233,259996,390336,519590,649290,778701,908010,1040796
+  ,1161235,1306889,1441882,-4446002,-4301031,-4194304,-4080591,-3947740,-3808975,-3686530
+  ,-3567839,-3383251,-3287089,-3136577,-3017405,-2869860,-2751321,-2619984,-2482932,-2354790
+  ,-2223147,-2090669,-1964135,-1831208,-1706697,-1570817,-1446008,-1305386,-1175773,-1046066
+  ,-915356,-785120,-653614,-524331,-393767,-260442,-130187,-799,128841,261466
+  ,393616,520542,652117,784613,914159,1045399,1181072,1308971,1442502,1570346
+  ,1693912,1843986,1966014,2090474,2224869,2364593,2475934,2628403,2752512,2856640
+  ,-4192441,-4063232,-3917821,-3799195,-3666233,-3519199,-3411021,-3269192,-3135684,-3008826
+  ,-2880875,-2747342,-2620981,-2494872,-2354979,-2229718,-2098939,-1964971,-1835399,-1703452
+  ,-1572806,-1440482,-1311794,-1179338,-1046521,-919823,-785914,-655335,-523416,-395507
+  ,-264833,-132184,-2546,131698,256217,391372,522688,651248,789964,909618
+  ,1035305,1179145,1313824,1436934,1552353,1693722,1815508,1972826,2096328,2228224
+  ,2359296,2490368,2598848,-6160384,-6029312,-5881382,-5767168,-5636096,-5505024,-5373952
+  ,-5228418,-5110384,-4954923,-4880576,-4710990,-4587364,-4471340,-4333905,-4211513,-4051293
+  ,-3907927,-3800105,-3675961,-3538640,-3413663,-3271148,-3152105,-3019103,-2869647,-2744015
+  ,-2620639,-2479385,-2364211,-2227611,-2095427,-1974497,-1834168,-1703561,-1568961,-1439826
+  ,-1309192,-1174050,-1050191,-917836,-786015,-656943,-518934,-394831,-257708,-128041
+  ,1610,128991,264442,393977,521383,653849,788164,918641,1049122,1181971
+  ,1308934,1439505,1571808,1706305,1836318,1966235,2097269,2228990,2357005,2490292
+  ,2617400,2749680,2881234,3014880,3145637,3276467,3409099,3536637,3671493,3802918
+  ,3929740,4065036,4194143,4325999,4456126,4586857,4717194,4843923,4978676,5110913
+  ,5245281,5371394,5499780,5633779,5762611,5897682,6028688,6167546,6296465,6421682
+  ,6548882,6682074,6809432,6941956,7078143,7204509,7334296,7475137,7609896,7732044
+  ,7861604,8002039,8131670,8259222,8390299,8522399,8650037,8782348,8908402,9037815
+  ,9164594,9300338,9434679,9574500,9699702,9833934,9948152,10083972,10244937,10332822
+  ,10485760,10600122,10760754,10892964,11010048,11111004,11272192,11403264,11525091,11624984
+  ,11796480,11915146,-393216,-262144,-101702,-740,100568,262144,393216,-786432
+  ,-655360,-524288,-383907,-243301,-94956,-156,95547,269629,416691,524288
+  ,655360,-393216,-262144,-88448,-37,87318,262144,393216,524288,-917504
+  ,-786432,-655360,-495894,-373308,-267503,-93211,4119,91308,250895,393216
+  ,526138,655360,786432,917504,-786432,-655360,-524288,-393216,-262144,-83497
+  ,222,86893,240922,393216,524288,-1048576,-917504,-790472,-655360,-508639
+  ,-383609,-262016,-95550,-3775,96692,256797,364847,534906,655360,786432
+  ,889679,1048576,1179648,1310720,1441792,-655360,-524288,-377684,-248408,-93690
+  ,1261,95441,227519,393216,524288,655360,786432,917504,-2097152,-1966080
+  ,-1809470,-1703936,-1572864,-1441792,-1314289,-1195149,-1056205,-917504,-809951,-657769
+  ,-521072,-383788,-248747,-106350,-2944,105550,243408,388548,521064,628732
+  ,786432,885456,1064548,1179648,1310720,1441792,1572864,1703936,1835008,-1441792
+  ,-1310720,-1179648,-1037570,-888492,-767774,-646634,-519935,-373458,-248029,-111915
+  ,760,111232,247735,379432,507672,672699,786432,917504,1048576,1179648
+  ,1310720,1441792,-4456448,-4325376,-4194304,-4063232,-3932160,-3801088,-3670016,-3538944
+  ,-3407872,-3276800,-3145728,-3014656,-2883584,-2752512,-2647002,-2490368,-2359296,-2228224
+  ,-2097152,-1951753,-1835008,-1703936,-1594177,-1462001,-1289150,-1160774,-1025917,-924928
+  ,-782509,-641294,-516191,-386630,-251910,-118886,5210,121226,253949,386008
+  ,517973,649374,780064,917783,1052462,1183856,1290593,1419389,1556641,1699884
+  ,1835008,1988314,2090470,2228224,2359296,2490368,2621440,2752512,2883584,-3801088
+  ,-3643514,-3539937,-3409931,-3263294,-3145658,-3012952,-2879230,-2752359,-2622556,-2483471
+  ,-2357556,-2226500,-2093112,-1965892,-1833664,-1701035,-1567767,-1440320,-1310556,-1178339
+  ,-1049625,-916812,-786477,-655277,-525050,-393773,-264828,-130696,-480,132126
+  ,260116,394197,527846,652294,785563,917183,1049511,1175958,1308161,1438759
+  ,1572253,1698835,1828535,1967072,2089391,2212798,2348901,2461547,2621440,2752512
+  ,2883584,-7309870,-7203780,-7062699,-6939106,-6790819,-6672036,-6553600,-6422317,-6288422
+  ,-6164694,-6026456,-5901410,-5754168,-5621459,-5502710,-5369686,-5240454,-5120712,-4976140
+  ,-4847970,-4723070,-4589083,-4450923,-4324680,-4189892,-4065551,-3931803,-3800209,-3668539
+  ,-3539395,-3404801,-3277470,-3141389,-3016710,-2885724,-2752612,-2618541,-2486762,-2354153
+  ,-2225059,-2094984,-1968194,-1830895,-1699508,-1575743,-1444516,-1308683,-1179714,-1053088
+  ,-917981,-783707,-653900,-524980,-395409,-260309,-131948,-3452,132113,263241
+  ,392185,522597,654134,788288,919810,1045795,1179210,1314201,1444235,1574447
+  ,1705193,1834009,1967332,2098102,2229019,2359147,2489859,2619878,2754966,2879671
+  ,3014438,3146143,3276733,3405958,3542196,3667493,3798815,3932961,4062458,4187125
+  ,4322346,4454875,4587752,4716809,4848274,4975027,5111957,5242215,5373085,5501158
+  ,5640140,5762918,5895358,6024008,6157906,6290628,6422713,6546339,6675888,6815606
+  ,6955288,7077501,7211630,7337893,7473635,7607175,7728310,7866475,7999658,8127888
+  ,8241758,8386483,8522550,8641582,8771915,8922139,9038632,9179385,9313426,9437184
+  ,9568256,9699328,9830400,9952933,10120004,10223616,10354688,10474645,10616832,-393216
+  ,-262144,-85425,-121,82533,262144,393216,-786432,-655360,-524288,-379928
+  ,-222821,-95200,287,95541,227093,393216,493567,655360,786432,-393216
+  ,-262144,-86805,510,86722,262144,393216,524288,-1048576,-917504,-786432
+  ,-624456,-529951,-395071,-241627,-101168,81,99975,241605,393216,524288
+  ,655360,786432,917504,-786432,-655360,-524288,-393216,-230359,-95619,-137
+  ,94425,226222,393216,524288,-1179648,-1048576,-917504,-773841,-655360,-492258
+  ,-379715,-244707,-103621,-434,104523,242680,381575,523659,650565,786432
+  ,917504,1048576,1179648,1310720,-786432,-629344,-524288,-376757,-242858,-101932
+  ,-2715,107155,239212,366480,514943,655360,786432,917504,-2228224,-2097152
+  ,-1966080,-1835008,-1703936,-1572864,-1441792,-1284584,-1179648,-1048819,-934658,-777181
+  ,-626371,-515660,-377493,-248975,-113036,436,113584,248354,379718,512475
+  ,653932,796494,917504,1048576,1179648,1310720,1441792,1572864,1703936,1835008
+  ,-1572864,-1441792,-1297608,-1161159,-1032316,-917092,-779770,-647384,-515529,-384269
+  ,-250003,-119252,1053,118111,249512,380545,512039,648101,770656,907003
+  ,1021725,1178082,1310720,1441792,-4587520,-4456448,-4325376,-4194304,-4063232,-3932160
+  ,-3801088,-3670016,-3538944,-3407872,-3276800,-3145728,-2999335,-2883584,-2752512,-2621440
+  ,-2490368,-2359296,-2228224,-2112691,-1966080,-1848781,-1709830,-1566109,-1438427,-1303530
+  ,-1176124,-1040936,-913876,-784585,-652025,-518361,-385267,-256342,-127297,-2733
+  ,125422,257792,389363,519911,651106,783805,909407,1044143,1174156,1309267
+  ,1436173,1553771,1708958,1814083,1967036,2095386,2255169,2359296,2478303,2621440
+  ,2752512,-4456448,-4325376,-4194304,-4063232,-3932160,-3797524,-3670016,-3560250,-3413217
+  ,-3257719,-3166416,-2986626,-2878000,-2781144,-2625383,-2495465,-2346792,-2230930,-2077063
+  ,-1949225,-1819274,-1697261,-1568664,-1443074,-1304302,-1175289,-1043794,-913423,-785561
+  ,-652104,-522835,-392667,-260517,-130088,-2,129509,260990,391931,522470
+  ,655770,784902,917093,1046445,1176951,1303121,1441362,1565401,1702022,1822856
+  ,1952852,2090384,2214607,2338436,2457483,2621440,-8781824,-8650752,-8519680,-8388608
+  ,-8260828,-8126464,-8003337,-7859030,-7750057,-7602176,-7471104,-7340032,-7193045,-7090588
+  ,-6946816,-6843344,-6676635,-6557575,-6447804,-6277614,-6159736,-6035729,-5884723,-5739567
+  ,-5634818,-5489867,-5372864,-5243300,-5098939,-4988639,-4856258,-4728494,-4591717,-4447428
+  ,-4322409,-4192918,-4062638,-3934141,-3797545,-3673373,-3531587,-3407391,-3277404,-3147797
+  ,-3013578,-2886548,-2749811,-2616428,-2490949,-2361301,-2228482,-2096883,-1964343,-1831754
+  ,-1702201,-1572495,-1442012,-1309242,-1182451,-1048996,-916905,-786510,-657079,-524730
+  ,-393672,-261313,-128743,166,130678,261334,393287,524155,655570,786839
+  ,917353,1052167,1179013,1309360,1442634,1571153,1703961,1832027,1965014,2097912
+  ,2224861,2355341,2490455,2623051,2753484,2877015,3015783,3144157,3273705,3405255
+  ,3542006,3669580,3802417,3935413,4065088,4190896,4333521,4456355,4579781,4713832
+  ,4845707,4978625,5113278,5243817,5382318,5500592,5638135,5761179,5900822,6029270
+  ,6186398,6297816,6436435,6559163,6666389,6806548,6950461,7086078,7195777,7350973
+  ,7480132,7614852,7743514,7847288,8014762,8126464,8257536,8388608,8519680,8650752
+  ,8781824,8912896,9043968,9175040,9306112,9437184
+};
+
+
+
+const WebRtc_Word16 WebRtcIsacfix_kLevelsShapeQ10[1735] = {
+  0,     0,    -1,     0,     0,     1,     0,     1,     0,  -821
+  ,     1,  -763,    -1,   656,  -620,     0,   633,  -636,     4,   615
+  ,  -630,     1,   649, -1773,  -670,     5,   678,  1810, -1876,  -676
+  ,     0,   691,  1843, -1806,  -743,    -1,   749,  1795,  2920, -2872
+  , -1761,  -772,    -3,   790,  1763,  2942,     0,     0,     0,     0
+  ,  -792,     2,     0,     0,     1,     0,  -854,     0,  -702,    -1
+  ,   662,  -624,    -5,   638,  -611,    -6,   638,  -647,     0,   651
+  ,  -685,    -4,   679,  2123, -1814,  -693,     0,   664,  1791, -1735
+  ,  -737,     0,   771,  1854,  2873, -2867, -1842,  -793,    -1,   821
+  ,  1826,  2805,  3922,     0,     0,     0,    -1,  -779,     1,   786
+  ,     1,  -708,     0,   789,  -799,     1,   797,  -663,     2,   646
+  ,  -600,     3,   609,  -600,     1,   658,  1807,  -627,    -3,   612
+  ,  -625,     3,   632, -1732,  -674,     1,   672,  2048, -1768,  -715
+  ,     0,   724,  1784, -3881, -3072, -1774,  -719,    -1,   730,  1811
+  , -2963, -1829,  -806,    -1,   816,  1795,  3050, -5389, -3784, -2942
+  , -1893,  -865,   -12,   867,  1885,  2945,  3928,    -2,     1,     4
+  ,     0,  -694,     2,   665,  -598,     5,   587,  -599,    -1,   661
+  ,  -656,    -7,   611,  -607,     5,   603,  -618,    -4,   620, -1794
+  ,  -645,    -2,   654,  -655,    -1,   658, -1801,  -700,     5,   707
+  ,  1927, -1752,  -745,    -8,   752,  1843, -2838, -1781,  -801,    11
+  ,   796,  1811,  2942,  3866, -3849, -3026, -1848,  -819,     2,   827
+  ,  1825,  2963, -3873, -2904, -1869,  -910,    -6,   903,  1902,  2885
+  ,  3978,  5286, -7168, -6081, -4989, -3968, -2963, -1970,  -943,    -2
+  ,   953,  1951,  2968,  3974,  5009,  6032,    -2,     3, -1024,     2
+  ,  1024,  -637,     1,   669,  -613,    -7,   630,  -603,     4,   612
+  ,  -612,     0,   590,  -645,   -11,   627,  -657,    -2,   671,  1849
+  , -1853,  -694,     2,   702,  1838, -3304, -1780,  -736,    -8,   732
+  ,  1772, -1709,  -755,    -6,   760,  1780, -2994, -1780,  -800,     8
+  ,   819,  1830,  2816, -4096, -2822, -1881,  -851,    -4,   855,  1872
+  ,  2840,  3899, -3908, -2904, -1878,  -887,     6,   897,  1872,  2942
+  ,  4008, -4992, -3881, -2933, -1915,  -928,     1,   937,  1919,  2900
+  ,  4009,  4881, -6848, -6157, -5065, -3981, -2983, -1972,  -978,    -1
+  ,   968,  1979,  2988,  4008,  5007,  6108,  7003,  8051,  9027,-13272
+  ,-12012,-11228,-10213, -9261, -8084, -7133, -6075, -5052, -4050, -3036
+  , -2014,  -996,    -4,  1007,  2031,  3038,  4049,  5074,  6134,  7069
+  ,  8094,  9069, 10212, 11049, 12104,    51, -1024,   -13,  1024,  -609
+  ,  -107,   613, -2048,  -687,   -95,   667,  2048, -3072, -1724,  -785
+  ,   -34,   732,  1819, -2048,  -703,   -26,   681,  2048, -2048,  -686
+  ,    -9,   665,  2048, -2048,  -702,    37,   748,  1723, -4096, -2786
+  , -1844,  -837,    37,   811,  1742,  3072, -4096, -2783, -1848,  -881
+  ,    39,   898,  1843,  2792,  3764, -5120, -4096, -2923, -1833,  -852
+  ,   -14,   862,  1824,  2834,  4096, -6144, -5120, -3914, -2842, -1870
+  ,  -886,   -27,   888,  1929,  2931,  4051, -7168, -6144, -5120, -3866
+  , -2933, -1915,  -927,    64,   933,  1902,  2929,  3912,  5063,  6144
+  ,-11264,-10240, -9216, -8192, -7086, -6144, -5039, -3972, -2943, -1929
+  ,  -941,     3,   938,  1942,  2959,  3933,  4905,  6088,  6983,  8192
+  , -9216, -8192, -7202, -6088, -4983, -4019, -2955, -1975,  -966,    17
+  ,   997,  1981,  2967,  3990,  4948,  6022,  6967,  8192,-13312,-12288
+  ,-11264,-10240, -9216, -8049, -6997, -6040, -5026, -4043, -3029, -2034
+  , -1015,   -23,   984,  1997,  3010,  4038,  5002,  6015,  6946,  8061
+  ,  9216, 10240,-12381,-11264,-10240, -9060, -8058, -7153, -6085, -5075
+  , -4051, -3042, -2037, -1017,    -5,  1007,  2028,  3035,  4050,  5088
+  ,  6111,  7160,  8156,  9215, 10095, 11229, 12202, 13016,-26624,-25600
+  ,-24582,-23671,-22674,-21400,-20355,-19508,-18315,-17269,-16361,-15299
+  ,-14363,-13294,-12262,-11237,-10203, -9227, -8165, -7156, -6116, -5122
+  , -4076, -3056, -2043, -1020,    -8,  1027,  2047,  3065,  4110,  5130
+  ,  6125,  7168,  8195,  9206, 10230, 11227, 12256, 13304, 14281, 15316
+  , 16374, 17382, 18428, 19388, 20361, 21468, 22448, 23781,     0,     0
+  ,    -1,     0,    -2,  1024,     0,     0,     0,    -1,  1024, -1024
+  ,     1, -1024,     4,  1024, -1024,     2,  1024, -1024,     2,  1024
+  , -2048, -1024,    -4,  1024, -1024,     2,  1024, -2048, -1024,    -3
+  ,  1024,  2048, -2048, -1024,     4,  1024,  2048, -3072, -2048, -1024
+  ,    -1,   662,  2048,     0,     1,     0,     0,     1,    -2,    -2
+  ,     0,     2,  1024,    -1,  1024, -1024,     4,  1024, -1024,     1
+  ,  1024, -1024,     1,  1024, -2048,  -781,    -4,   844,  -807,    -5
+  ,   866, -2048,  -726,   -13,   777,  2048, -2048,  -643,    -4,   617
+  ,  2048,  3072, -3072, -2048,  -629,     1,   630,  2048,  3072,     0
+  ,    -1,     1,    -2,     2,     1, -1024,     5, -1024,     6,  1024
+  , -1024,     4,  1024, -1024,     1,  1024, -1024,    -9,  1024,  -673
+  ,    -7,   655, -2048,  -665,   -15,   716, -2048,  -647,     4,   640
+  ,  2048, -2048,  -615,    -1,   635,  2048, -2048,  -613,    10,   637
+  ,  2048,  3072, -3072, -2048,  -647,    -3,   641,  2048,  3072, -5120
+  , -4096, -3072, -2048,  -681,     6,   685,  2048,  3072,  4096,     1
+  ,     1,     0,    -1,  1024, -1024,    -3,  1024, -1024,     6,  1024
+  , -1024,    -1,   769,  -733,     0,  1024,  -876,    -2,   653, -1024
+  ,    -4,   786,  -596,   -13,   595,  -634,    -2,   638,  2048, -2048
+  ,  -620,    -5,   620,  2048, -4096, -3072, -2048,  -639,    11,   655
+  ,  2048,  3072, -3072, -2048,  -659,     5,   663,  2048, -3072, -1823
+  ,  -687,    22,   695,  2048,  3072,  4096, -4096, -3072, -1848,  -715
+  ,    -3,   727,  1816,  3072,  4096,  5120, -8192, -7168, -6144, -5120
+  , -4096, -2884, -1771,  -756,   -14,   775,  1844,  3072,  4096,  5120
+  ,  6144,    -1,     1,     0, -1024,     2,   815,  -768,     2,   708
+  , -1024,    -3,   693,  -661,    -7,   607,  -643,    -5,   609,  -624
+  ,     3,   631,  -682,    -3,   691,  2048, -2048,  -640,     5,   650
+  ,  2048, -3072, -2048,  -701,     9,   704,  2048,  3072, -3072, -2048
+  ,  -670,    10,   674,  2048,  3072, -5120, -4096, -3072, -1749,  -738
+  ,     0,   733,  1811,  3072,  4096,  5120, -4096, -3072, -1873,  -753
+  ,     0,   756,  1874,  3072,  4096, -5120, -4096, -2900, -1838,  -793
+  ,    -6,   793,  1868,  2837,  4096,  5120, -7168, -6144, -5120, -4096
+  , -2832, -1891,  -828,     1,   828,  1901,  2823,  3912,  5120,  6144
+  ,  7168,  8192,-13312,-12288,-11264,-10240, -9216, -8192, -7168, -6144
+  , -5120, -3976, -3004, -1911,  -869,     7,   869,  1932,  3024,  3992
+  ,  5009,  6144,  7168,  8192,  9216, 10240, 11264,    -4,  1024,  -629
+  ,   -22,   609,  -623,     9,   640, -2048,  -768,     1,   682, -2048
+  ,  -741,    49,   722,  2048, -3072, -1706,  -808,   -20,   768,  1750
+  , -1684,  -727,   -29,   788,  1840,  3033, -1758,  -784,     0,   801
+  ,  1702, -3072, -1813,  -814,    38,   820,  1884,  2927, -4096, -3241
+  , -1839,  -922,    25,   882,  1886,  2812, -4096, -2982, -1923,  -894
+  ,    84,   912,  1869,  2778,  4096, -4928, -3965, -2902, -1920,  -883
+  ,     3,   917,  1953,  2921,  3957,  4922,  6144,  7168, -5120, -3916
+  , -2897, -1949,  -930,    31,   959,  1934,  2901,  3851,  5120, -9216
+  , -8192, -7046, -6029, -5030, -4034, -2980, -1969, -1013,   -76,   963
+  ,  1963,  2901,  3929,  4893,  6270,  7168,  8192,  9216,-12288,-11264
+  ,-10240, -9216, -8192, -6846, -6123, -5108, -4008, -3000, -1963,  -954
+  ,    -6,   958,  1992,  3009,  4020,  5085,  6097,  7168,  8192,  9216
+  ,-11264,-10139, -9194, -8127, -7156, -6102, -5053, -4049, -3036, -2025
+  , -1009,   -34,   974,  1984,  3034,  4028,  5138,  6000,  7057,  8166
+  ,  9070, 10033, 11360, 12288,-13312,-12288,-10932,-10190, -9120, -8123
+  , -7128, -6103, -5074, -4081, -3053, -2029,  -989,    -4,  1010,  2028
+  ,  3051,  4073,  5071,  6099,  7132,  8147,  9295, 10159, 11023, 12263
+  , 13312, 14336,-25600,-24576,-23552,-22529,-21504,-20480,-19456,-18637
+  ,-17425,-16165,-15316,-14327,-13606,-12135,-11182,-10107, -9153, -8144
+  , -7146, -6160, -5129, -4095, -3064, -2038, -1025,     1,  1031,  2072
+  ,  3074,  4088,  5123,  6149,  7157,  8173,  9198, 10244, 11250, 12268
+  , 13263, 14289, 15351, 16370, 17402, 18413, 19474, 20337, 21386, 22521
+  , 23367, 24350,     0,     0,     0,     0,     0,     0,     0,     0
+  , -1024,     0,  1024, -1024,     0,  1024, -1024,     0,  1024, -1024
+  ,     0,  1024, -1024,     0,  1024,  -773,     0,  1024,  -674,     0
+  ,   645, -2048,  -745,     0,   628,  2048, -2048,  -712,     0,   681
+  ,  2048,  3072, -3072, -2048,  -673,     0,   682,  1964,  3257,     0
+  ,     0,     0,     0,     0,     0,     0,     0, -1024,     0,  1024
+  , -1024,     0,  1024, -1024,     0,  1024,  -705,     0,   623,  -771
+  ,     0,  1024,  -786,     0,   688,  -631,     0,   652,  2048, -2048
+  ,  -627,    -1,   666,  2048, -3072, -1756,  -694,     0,   674,  2048
+  , -3098, -1879,  -720,     5,   694,  1886,  2958,  4096,     0,     0
+  ,     0,     0,  1024,     0,     0,  1024,  -769,     0,  1024, -1024
+  ,     0,  1024, -1024,     0,  1024,  -817,     0,   734,  -786,     0
+  ,   651,  -638,     0,   637,  -623,     0,   671,  -652,     0,   619
+  ,  2048, -2048,  -670,    -1,   663,  2048, -1908,  -680,     1,   686
+  ,  2048,  3072,  4096, -4096, -3072, -1833,  -711,     0,   727,  1747
+  ,  3072,  4096, -4096, -2971, -1826,  -762,     2,   766,  1832,  2852
+  ,  3928,  5079,     0,     0,     0, -1024,     0,  1024, -1024,     0
+  ,  -656,     0,  1024,  -599,     0,   620, -1024,     0,  1024,  -603
+  ,     0,   622,  -643,     0,   660,  -599,     0,   611,  -641,    -1
+  ,   651,  2048, -2048,  -648,    -2,   647,  1798, -3072, -2048,  -672
+  ,     2,   670,  2048, -3072, -1780,  -694,    -1,   706,  1751,  3072
+  , -3072, -1862,  -757,     7,   739,  1798,  3072,  4096, -5120, -4096
+  , -3253, -1811,  -787,     3,   782,  1887,  3123,  4096, -7252, -6144
+  , -5354, -4060, -2864, -1863,  -820,   -11,   847,  1903,  2970,  3851
+  ,  4921,  5957,  7168,  8192,  9306,     0,     0, -1024,     0,  1024
+  ,  -726,     0,   706,  -692,     0,   593,  -598,     0,   616,  -624
+  ,     0,   616,  -605,     0,   613, -2048,  -652,     1,   635,  2048
+  , -2048,  -647,    -1,   660,  2048, -1811,  -668,    -2,   685,  2048
+  , -1796,  -731,    -2,   730,  1702,  3072, -3072, -1766,  -747,    -4
+  ,   756,  1770,  3072, -4096, -3024, -1762,  -783,     4,   771,  1781
+  ,  3072, -5120, -4057, -2807, -1832,  -822,     0,   816,  1804,  2851
+  ,  3949,  5120, -6144, -4899, -3927, -2920, -1893,  -874,    -2,   868
+  ,  1881,  2905,  3960,  4912,  6144, -9216, -8192, -7168, -6225, -4963
+  , -3943, -2956, -1890,  -902,     0,   897,  1914,  2916,  3984,  4990
+  ,  6050,  7168,-11264,-10217, -9114, -8132, -7035, -5988, -4984, -4000
+  , -2980, -1962,  -927,     7,   931,  1956,  2981,  4031,  4972,  6213
+  ,  7227,  8192,  9216, 10240, 11170, 12288, 13312, 14336,     0,  1024
+  ,  -557,     1,   571,  -606,    -4,   612, -1676,  -707,    10,   673
+  ,  2048, -2048,  -727,     5,   686, -3072, -1772,  -755,    12,   716
+  ,  1877, -1856,  -786,     2,   786,  1712, -1685,  -818,   -16,   863
+  ,  1729, -3072, -1762,  -857,     3,   866,  1838,  2841, -3862, -2816
+  , -1864,  -925,    -2,   923,  1897,  2779, -2782, -1838,  -920,   -28
+  ,   931,  1951,  2835,  3804, -4815, -4001, -2940, -1934,  -959,   -22
+  ,   975,  1957,  2904,  3971,  4835, -5148, -3892, -2944, -1953,  -986
+  ,   -11,   989,  1968,  2939,  3949,  4947,  5902, -9216, -8192, -6915
+  , -6004, -4965, -4013, -3009, -1977,  -987,    -1,   982,  1972,  3000
+  ,  3960,  4939,  5814, -8976, -7888, -7084, -5955, -5043, -4009, -2991
+  , -2002, -1000,    -8,   993,  2011,  3023,  4026,  5028,  6023,  7052
+  ,  8014,  9216,-11240,-10036, -9125, -8118, -7105, -6062, -5048, -4047
+  , -3044, -2025, -1009,    -1,  1011,  2023,  3042,  4074,  5085,  6108
+  ,  7119,  8142,  9152, 10114, 11141, 12250, 13307,-15360,-14099,-13284
+  ,-12291,-11223,-10221, -9152, -8147, -7128, -6104, -5077, -4072, -3062
+  , -2033, -1020,     7,  1018,  2038,  3059,  4081,  5084,  6109,  7102
+  ,  8128,  9134, 10125, 11239, 12080,-23552,-22528,-21504,-20480,-19456
+  ,-18159,-17240,-16291,-15364,-14285,-13305,-12271,-11233,-10217, -9198
+  , -8175, -7157, -6134, -5122, -4089, -3071, -2047, -1018,     3,  1026
+  ,  2041,  3077,  4090,  5108,  6131,  7150,  8172,  9175, 10196, 11272
+  , 12303, 13273, 14328, 15332, 16334, 17381, 18409, 19423, 20423, 21451
+  , 22679, 23391, 24568, 25600, 26589
+};
+
+/* cdf tables for quantizer indices */
+const WebRtc_UWord16 WebRtcIsacfix_kCdfGain[1212] = {
+  0,  13,  301,  3730,  61784,  65167,  65489,  65535,  0,  17,
+  142,  314,  929,  2466,  7678,  56450,  63463,  64740,  65204,  65426,
+  65527,  65535,  0,  8,  100,  724,  6301,  60105,  65125,  65510,
+  65531,  65535,  0,  13,  117,  368,  1068,  3010,  11928,  53603,
+  61177,  63404,  64505,  65108,  65422,  65502,  65531,  65535,  0,  4,
+  17,  96,  410,  1859,  12125,  54361,  64103,  65305,  65497,  65535,
+  0,  4,  88,  230,  469,  950,  1746,  3228,  6092,  16592,
+  44756,  56848,  61256,  63308,  64325,  64920,  65309,  65460,  65502,  65522,
+  65535,  0,  88,  352,  1675,  6339,  20749,  46686,  59284,  63525,
+  64949,  65359,  65502,  65527,  65535,  0,  13,  38,  63,  117,
+  234,  381,  641,  929,  1407,  2043,  2809,  4032,  5753,  8792,
+  14407,  24308,  38941,  48947,  55403,  59293,  61411,  62688,  63630,  64329,
+  64840,  65188,  65376,  65472,  65506,  65527,  65531,  65535,  0,  8,
+  29,  75,  222,  615,  1327,  2801,  5623,  9931,  16094,  24966,
+  34419,  43458,  50676,  56186,  60055,  62500,  63936,  64765,  65225,  65435,
+  65514,  65535,  0,  8,  13,  15,  17,  21,  33,  59,
+  71,  92,  151,  243,  360,  456,  674,  934,  1223,  1583,
+  1989,  2504,  3031,  3617,  4354,  5154,  6163,  7411,  8780,  10747,
+  12874,  15591,  18974,  23027,  27436,  32020,  36948,  41830,  46205,  49797,
+  53042,  56094,  58418,  60360,  61763,  62818,  63559,  64103,  64509,  64798,
+  65045,  65162,  65288,  65363,  65447,  65506,  65522,  65531,  65533,  65535,
+  0,  4,  6,  25,  38,  71,  138,  264,  519,  808,
+  1227,  1825,  2516,  3408,  4279,  5560,  7092,  9197,  11420,  14108,
+  16947,  20300,  23926,  27459,  31164,  34827,  38575,  42178,  45540,  48747,
+  51444,  54090,  56426,  58460,  60080,  61595,  62734,  63668,  64275,  64673,
+  64936,  65112,  65217,  65334,  65426,  65464,  65477,  65489,  65518,  65527,
+  65529,  65531,  65533,  65535,  0,  2,  4,  8,  10,  12,
+  14,  16,  21,  33,  50,  71,  84,  92,  105,  138,
+  180,  255,  318,  377,  435,  473,  511,  590,  682,  758,
+  913,  1097,  1256,  1449,  1671,  1884,  2169,  2445,  2772,  3157,
+  3563,  3944,  4375,  4848,  5334,  5820,  6448,  7101,  7716,  8378,
+  9102,  9956,  10752,  11648,  12707,  13670,  14758,  15910,  17187,  18472,
+  19627,  20649,  21951,  23169,  24283,  25552,  26862,  28227,  29391,  30764,
+  31882,  33213,  34432,  35600,  36910,  38116,  39464,  40729,  41872,  43144,
+  44371,  45514,  46762,  47813,  48968,  50069,  51032,  51974,  52908,  53737,
+  54603,  55445,  56282,  56990,  57572,  58191,  58840,  59410,  59887,  60264,
+  60607,  60946,  61269,  61516,  61771,  61960,  62198,  62408,  62558,  62776,
+  62985,  63207,  63408,  63546,  63739,  63906,  64070,  64237,  64371,  64551,
+  64677,  64836,  64999,  65095,  65213,  65284,  65338,  65380,  65426,  65447,
+  65472,  65485,  65487,  65489,  65502,  65510,  65512,  65514,  65516,  65518,
+  65522,  65531,  65533,  65535,  0,  2,  4,  6,  65528,  65531,
+  65533,  65535,  0,  2,  4,  6,  8,  10,  222,  65321,
+  65513,  65528,  65531,  65533,  65535,  0,  2,  4,  50,  65476,
+  65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,  12,
+  38,  544,  64936,  65509,  65523,  65525,  65529,  65531,  65533,  65535,
+  0,  2,  4,  6,  8,  10,  1055,  64508,  65528,  65531,
+  65533,  65535,  0,  2,  4,  6,  8,  10,  12,  123,
+  3956,  62999,  65372,  65495,  65515,  65521,  65523,  65525,  65527,  65529,
+  65531,  65533,  65535,  0,  2,  4,  12,  53,  4707,  59445,
+  65467,  65525,  65527,  65529,  65531,  65533,  65535,  0,  2,  4,
+  6,  8,  10,  12,  14,  16,  38,  40,  50,  67,
+  96,  234,  929,  14345,  55750,  64866,  65389,  65462,  65514,  65517,
+  65519,  65521,  65523,  65525,  65527,  65529,  65531,  65533,  65535,  0,
+  2,  4,  6,  8,  10,  15,  35,  91,  377,  1946,
+  13618,  52565,  63714,  65184,  65465,  65520,  65523,  65525,  65527,  65529,
+  65531,  65533,  65535,  0,  2,  4,  6,  8,  10,  12,
+  14,  16,  18,  20,  22,  24,  26,  28,  30,  32,
+  34,  36,  38,  40,  42,  44,  46,  48,  50,  52,
+  54,  82,  149,  362,  751,  1701,  4239,  12893,  38627,  55072,
+  60875,  63071,  64158,  64702,  65096,  65283,  65412,  65473,  65494,  65505,
+  65508,  65517,  65519,  65521,  65523,  65525,  65527,  65529,  65531,  65533,
+  65535,  0,  2,  15,  23,  53,  143,  260,  418,  698,
+  988,  1353,  1812,  2411,  3144,  4015,  5143,  6401,  7611,  8999,
+  10653,  12512,  14636,  16865,  19404,  22154,  24798,  27521,  30326,  33102,
+  35790,  38603,  41415,  43968,  46771,  49435,  52152,  54715,  57143,  59481,
+  61178,  62507,  63603,  64489,  64997,  65257,  65427,  65473,  65503,  65520,
+  65529,  65531,  65533,  65535,  0,  3,  6,  9,  26,  32,
+  44,  46,  64,  94,  111,  164,  205,  254,  327,  409,
+  506,  608,  733,  885,  1093,  1292,  1482,  1742,  1993,  2329,
+  2615,  3029,  3374,  3798,  4257,  4870,  5405,  5992,  6618,  7225,
+  7816,  8418,  9051,  9761,  10532,  11380,  12113,  13010,  13788,  14594,
+  15455,  16361,  17182,  18088,  18997,  20046,  20951,  21968,  22947,  24124,
+  25296,  26547,  27712,  28775,  29807,  30835,  31709,  32469,  33201,  34014,
+  34876,  35773,  36696,  37620,  38558,  39547,  40406,  41277,  42367,  43290,
+  44445,  45443,  46510,  47684,  48973,  50157,  51187,  52242,  53209,  54083,
+  55006,  55871,  56618,  57293,  57965,  58556,  59222,  59722,  60180,  60554,
+  60902,  61250,  61554,  61837,  62100,  62372,  62631,  62856,  63078,  63324,
+  63557,  63768,  63961,  64089,  64235,  64352,  64501,  64633,  64770,  64887,
+  65001,  65059,  65121,  65188,  65246,  65302,  65346,  65390,  65428,  65463,
+  65477,  65506,  65515,  65517,  65519,  65521,  65523,  65525,  65527,  65529,
+  65531,  65533,  65535,  0,  2,  4,  109,  65332,  65531,  65533,
+  65535,  0,  2,  4,  6,  8,  25,  1817,  63874,  65511,
+  65527,  65529,  65531,  65533,  65535,  0,  2,  4,  907,  65014,
+  65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,  10,
+  12,  132,  2743,  62708,  65430,  65525,  65527,  65529,  65531,  65533,
+  65535,  0,  2,  4,  6,  8,  35,  3743,  61666,  65485,
+  65531,  65533,  65535,  0,  2,  4,  6,  8,  10,  23,
+  109,  683,  6905,  58417,  64911,  65398,  65497,  65518,  65525,  65527,
+  65529,  65531,  65533,  65535,  0,  2,  4,  6,  53,  510,
+  10209,  55212,  64573,  65441,  65522,  65529,  65531,  65533,  65535,  0,
+  2,  4,  6,  8,  10,  12,  14,  16,  18,  20,
+  22,  32,  90,  266,  1037,  3349,  14468,  50488,  62394,  64685,
+  65341,  65480,  65514,  65519,  65521,  65523,  65525,  65527,  65529,  65531,
+  65533,  65535,  0,  2,  4,  6,  9,  16,  37,  106,
+  296,  748,  1868,  5733,  18897,  45553,  60165,  63949,  64926,  65314,
+  65441,  65508,  65524,  65529,  65531,  65533,  65535,  0,  2,  4,
+  6,  8,  10,  12,  14,  16,  18,  20,  22,  24,
+  26,  28,  30,  32,  34,  36,  38,  40,  42,  44,
+  46,  48,  50,  83,  175,  344,  667,  1293,  2337,  4357,
+  8033,  14988,  28600,  43244,  52011,  57042,  59980,  61779,  63065,  63869,
+  64390,  64753,  64988,  65164,  65326,  65422,  65462,  65492,  65506,  65522,
+  65524,  65526,  65531,  65533,  65535,  0,  2,  4,  6,  8,
+  10,  12,  14,  16,  25,  39,  48,  55,  62,  65,
+  85,  106,  139,  169,  194,  252,  323,  485,  688,  1074,
+  1600,  2544,  3863,  5733,  8303,  11397,  15529,  20273,  25734,  31455,
+  36853,  41891,  46410,  50306,  53702,  56503,  58673,  60479,  61880,  62989,
+  63748,  64404,  64852,  65124,  65309,  65424,  65480,  65524,  65528,  65533,
+  65535,  0,  2,  4,  6,  8,  10,  12,  14,  21,
+  23,  25,  27,  29,  31,  39,  41,  43,  48,  60,
+  72,  79,  106,  136,  166,  187,  224,  252,  323,  381,
+  427,  478,  568,  660,  783,  912,  1046,  1175,  1365,  1567,
+  1768,  2024,  2347,  2659,  3049,  3529,  4033,  4623,  5281,  5925,
+  6726,  7526,  8417,  9468,  10783,  12141,  13571,  15222,  16916,  18659,
+  20350,  22020,  23725,  25497,  27201,  29026,  30867,  32632,  34323,  36062,
+  37829,  39466,  41144,  42654,  43981,  45343,  46579,  47759,  49013,  50171,
+  51249,  52283,  53245,  54148,  54938,  55669,  56421,  57109,  57791,  58464,
+  59092,  59674,  60105,  60653,  61083,  61407,  61757,  62095,  62388,  62649,
+  62873,  63157,  63358,  63540,  63725,  63884,  64046,  64155,  64278,  64426,
+  64548,  64654,  64806,  64906,  64994,  65077,  65137,  65215,  65277,  65324,
+  65354,  65409,  65437,  65455,  65462,  65490,  65495,  65499,  65508,  65511,
+  65513,  65515,  65517,  65519,  65521,  65523,  65525,  65527,  65529,  65531,
+  65533,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kCdfShape[2059] = {
+  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,
+  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  4,
+  65535,  0,  8,  65514,  65535,  0,  29,  65481,  65535,  0,
+  121,  65439,  65535,  0,  239,  65284,  65535,  0,  8,  779,
+  64999,  65527,  65535,  0,  8,  888,  64693,  65522,  65535,  0,
+  29,  2604,  62843,  65497,  65531,  65535,  0,  25,  176,  4576,
+  61164,  65275,  65527,  65535,  0,  65535,  0,  65535,  0,  65535,
+  0,  65535,  0,  4,  65535,  0,  65535,  0,  65535,  0,
+  65535,  0,  65535,  0,  4,  65535,  0,  33,  65502,  65535,
+  0,  54,  65481,  65535,  0,  251,  65309,  65535,  0,  611,
+  65074,  65535,  0,  1273,  64292,  65527,  65535,  0,  4,  1809,
+  63940,  65518,  65535,  0,  88,  4392,  60603,  65426,  65531,  65535,
+  0,  25,  419,  7046,  57756,  64961,  65514,  65531,  65535,  0,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  4,  65531,
+  65535,  0,  65535,  0,  8,  65531,  65535,  0,  4,  65527,
+  65535,  0,  17,  65510,  65535,  0,  42,  65481,  65535,  0,
+  197,  65342,  65531,  65535,  0,  385,  65154,  65535,  0,  1005,
+  64522,  65535,  0,  8,  1985,  63469,  65533,  65535,  0,  38,
+  3119,  61884,  65514,  65535,  0,  4,  6,  67,  4961,  60804,
+  65472,  65535,  0,  17,  565,  9182,  56538,  65087,  65514,  65535,
+  0,  8,  63,  327,  2118,  14490,  52774,  63839,  65376,  65522,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+  17,  65522,  65535,  0,  59,  65489,  65535,  0,  50,  65522,
+  65535,  0,  54,  65489,  65535,  0,  310,  65179,  65535,  0,
+  615,  64836,  65535,  0,  4,  1503,  63965,  65535,  0,  2780,
+  63383,  65535,  0,  21,  3919,  61051,  65527,  65535,  0,  84,
+  6674,  59929,  65435,  65535,  0,  4,  255,  7976,  55784,  65150,
+  65518,  65531,  65535,  0,  4,  8,  582,  10726,  53465,  64949,
+  65518,  65535,  0,  29,  339,  3006,  17555,  49517,  62956,  65200,
+  65497,  65531,  65535,  0,  2,  33,  138,  565,  2324,  7670,
+  22089,  45966,  58949,  63479,  64966,  65380,  65518,  65535,  0,  65535,
+  0,  65535,  0,  2,  65533,  65535,  0,  46,  65514,  65535,
+  0,  414,  65091,  65535,  0,  540,  64911,  65535,  0,  419,
+  65162,  65535,  0,  976,  64790,  65535,  0,  2977,  62495,  65531,
+  65535,  0,  4,  3852,  61034,  65527,  65535,  0,  4,  29,
+  6021,  60243,  65468,  65535,  0,  84,  6711,  58066,  65418,  65535,
+  0,  13,  281,  9550,  54917,  65125,  65506,  65535,  0,  2,
+  63,  984,  12108,  52644,  64342,  65435,  65527,  65535,  0,  29,
+  251,  2014,  14871,  47553,  62881,  65229,  65518,  65535,  0,  13,
+  142,  749,  4220,  18497,  45200,  60913,  64823,  65426,  65527,  65535,
+  0,  13,  71,  264,  1176,  3789,  10500,  24480,  43488,  56324,
+  62315,  64493,  65242,  65464,  65514,  65522,  65531,  65535,  0,  4,
+  13,  38,  109,  205,  448,  850,  1708,  3429,  6276,  11371,
+  19221,  29734,  40955,  49391,  55411,  59460,  62102,  63793,  64656,  65150,
+  65401,  65485,  65522,  65531,  65535,  0,  65535,  0,  2,  65533,
+  65535,  0,  1160,  65476,  65535,  0,  2,  6640,  64763,  65533,
+  65535,  0,  2,  38,  9923,  61009,  65527,  65535,  0,  2,
+  4949,  63092,  65533,  65535,  0,  2,  3090,  63398,  65533,  65535,
+  0,  2,  2520,  58744,  65510,  65535,  0,  2,  13,  544,
+  8784,  51403,  65148,  65533,  65535,  0,  2,  25,  1017,  10412,
+  43550,  63651,  65489,  65527,  65535,  0,  2,  4,  29,  783,
+  13377,  52462,  64524,  65495,  65533,  65535,  0,  2,  4,  6,
+  100,  1817,  18451,  52590,  63559,  65376,  65531,  65535,  0,  2,
+  4,  6,  46,  385,  2562,  11225,  37416,  60488,  65026,  65487,
+  65529,  65533,  65535,  0,  2,  4,  6,  8,  10,  12,
+  42,  222,  971,  5221,  19811,  45048,  60312,  64486,  65294,  65474,
+  65525,  65529,  65533,  65535,  0,  2,  4,  8,  71,  167,
+  666,  2533,  7875,  19622,  38082,  54359,  62108,  64633,  65290,  65495,
+  65529,  65533,  65535,  0,  2,  4,  6,  8,  10,  13,
+  109,  586,  1930,  4949,  11600,  22641,  36125,  48312,  56899,  61495,
+  63927,  64932,  65389,  65489,  65518,  65531,  65533,  65535,  0,  4,
+  6,  8,  67,  209,  712,  1838,  4195,  8432,  14432,  22834,
+  31723,  40523,  48139,  53929,  57865,  60657,  62403,  63584,  64363,  64907,
+  65167,  65372,  65472,  65514,  65535,  0,  2,  4,  13,  25,
+  42,  46,  50,  75,  113,  147,  281,  448,  657,  909,
+  1185,  1591,  1976,  2600,  3676,  5317,  7398,  9914,  12941,  16169,
+  19477,  22885,  26464,  29851,  33360,  37228,  41139,  44802,  48654,  52058,
+  55181,  57676,  59581,  61022,  62190,  63107,  63676,  64199,  64547,  64924,
+  65158,  65313,  65430,  65481,  65518,  65535,  0,  65535,  0,  65535,
+  0,  65535,  0,  65535,  0,  65533,  65535,  0,  65535,  0,
+  65535,  0,  65535,  0,  65533,  65535,  0,  2,  65535,  0,
+  2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,
+  65535,  0,  2,  4,  65533,  65535,  0,  2,  65533,  65535,
+  0,  2,  4,  65531,  65533,  65535,  0,  2,  4,  65531,
+  65533,  65535,  0,  2,  4,  6,  65524,  65533,  65535,  0,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+  65535,  0,  65535,  0,  65535,  0,  65533,  65535,  0,  65533,
+  65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,
+  2,  65533,  65535,  0,  2,  4,  65532,  65535,  0,  6,
+  65523,  65535,  0,  2,  15,  65530,  65533,  65535,  0,  2,
+  35,  65493,  65531,  65533,  65535,  0,  2,  4,  158,  65382,
+  65531,  65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+  65535,  0,  65535,  0,  65535,  0,  2,  65535,  0,  2,
+  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,
+  0,  2,  65533,  65535,  0,  9,  65512,  65535,  0,  2,
+  12,  65529,  65535,  0,  2,  73,  65434,  65533,  65535,  0,
+  2,  240,  65343,  65533,  65535,  0,  2,  476,  65017,  65531,
+  65533,  65535,  0,  2,  4,  1046,  64686,  65531,  65533,  65535,
+  0,  2,  4,  6,  8,  1870,  63898,  65529,  65531,  65533,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65533,  65535,
+  0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,
+  65532,  65535,  0,  6,  65533,  65535,  0,  6,  65523,  65535,
+  0,  2,  65532,  65535,  0,  137,  65439,  65535,  0,  576,
+  64899,  65533,  65535,  0,  2,  289,  65299,  65533,  65535,  0,
+  2,  4,  6,  880,  64134,  65531,  65533,  65535,  0,  2,
+  4,  1853,  63347,  65533,  65535,  0,  2,  6,  2516,  61762,
+  65529,  65531,  65533,  65535,  0,  2,  4,  9,  3980,  61380,
+  65503,  65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,
+  10,  12,  61,  6393,  59859,  65466,  65527,  65529,  65531,  65533,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  2,  65532,
+  65535,  0,  3,  65529,  65535,  0,  2,  65529,  65535,  0,
+  61,  65453,  65535,  0,  234,  65313,  65535,  0,  503,  65138,
+  65535,  0,  155,  65402,  65533,  65535,  0,  2,  1058,  64554,
+  65533,  65535,  0,  2,  4,  3138,  62109,  65531,  65533,  65535,
+  0,  2,  4,  2031,  63339,  65531,  65533,  65535,  0,  2,
+  4,  6,  9,  4155,  60778,  65523,  65529,  65531,  65533,  65535,
+  0,  2,  4,  41,  6189,  59269,  65490,  65531,  65533,  65535,
+  0,  2,  4,  6,  210,  8789,  57043,  65400,  65528,  65531,
+  65533,  65535,  0,  2,  4,  6,  8,  26,  453,  10086,
+  55499,  64948,  65483,  65524,  65527,  65529,  65531,  65533,  65535,  0,
+  2,  4,  6,  8,  10,  12,  14,  16,  18,  20,
+  114,  1014,  11202,  52670,  64226,  65356,  65503,  65514,  65523,  65525,
+  65527,  65529,  65531,  65533,  65535,  0,  65533,  65535,  0,  15,
+  65301,  65535,  0,  152,  64807,  65535,  0,  2,  3328,  63308,
+  65535,  0,  2,  4050,  59730,  65533,  65535,  0,  2,  164,
+  10564,  61894,  65529,  65535,  0,  15,  6712,  59831,  65076,  65532,
+  65535,  0,  32,  7712,  57449,  65459,  65535,  0,  2,  210,
+  7849,  53110,  65021,  65523,  65535,  0,  2,  12,  1081,  13883,
+  48262,  62870,  65477,  65535,  0,  2,  88,  847,  6145,  37852,
+  62012,  65454,  65533,  65535,  0,  9,  47,  207,  1823,  14522,
+  45521,  61069,  64891,  65481,  65528,  65531,  65533,  65535,  0,  2,
+  9,  488,  2881,  12758,  38703,  58412,  64420,  65410,  65533,  65535,
+  0,  2,  4,  6,  61,  333,  1891,  6486,  19720,  43188,
+  57547,  62472,  64796,  65421,  65497,  65523,  65529,  65531,  65533,  65535,
+  0,  2,  4,  6,  8,  10,  12,  29,  117,  447,
+  1528,  6138,  21242,  43133,  56495,  62432,  64746,  65362,  65500,  65529,
+  65531,  65533,  65535,  0,  2,  18,  105,  301,  760,  1490,
+  3472,  7568,  15002,  26424,  40330,  53029,  60048,  62964,  64274,  64890,
+  65337,  65445,  65489,  65513,  65527,  65530,  65533,  65535,  0,  2,
+  4,  6,  41,  102,  409,  853,  2031,  4316,  7302,  11328,
+  16869,  24825,  34926,  43481,  50877,  56126,  59874,  62103,  63281,  63857,
+  64166,  64675,  65382,  65522,  65531,  65533,  65535,  0,  2,  4,
+  6,  8,  10,  12,  14,  16,  18,  29,  38,  53,
+  58,  96,  181,  503,  1183,  2849,  5590,  8600,  11379,  13942,
+  16478,  19453,  22638,  26039,  29411,  32921,  37596,  41433,  44998,  48560,
+  51979,  55106,  57666,  59892,  61485,  62616,  63484,  64018,  64375,  64685,
+  64924,  65076,  65278,  65395,  65471,  65509,  65529,  65535,  0,  65535,
+  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,
+  0,  65535,  0,  65535,  0,  2,  65533,  65535,  0,  2,
+  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,
+  0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,  7,
+  65519,  65535,  0,  2,  14,  65491,  65533,  65535,  0,  2,
+  81,  65427,  65531,  65533,  65535,  0,  2,  4,  312,  65293,
+  65528,  65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+  2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,
+  65535,  0,  5,  65523,  65535,  0,  2,  65533,  65535,  0,
+  7,  65526,  65535,  0,  46,  65464,  65533,  65535,  0,  2,
+  120,  65309,  65533,  65535,  0,  2,  5,  362,  65097,  65533,
+  65535,  0,  2,  18,  1164,  64785,  65528,  65531,  65533,  65535,
+  0,  65535,  0,  65535,  0,  65535,  0,  65533,  65535,  0,
+  65535,  0,  65533,  65535,  0,  2,  65533,  65535,  0,  2,
+  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65530,  65535,
+  0,  2,  65523,  65535,  0,  69,  65477,  65535,  0,  141,
+  65459,  65535,  0,  194,  65325,  65533,  65535,  0,  2,  543,
+  64912,  65533,  65535,  0,  5,  1270,  64301,  65529,  65531,  65533,
+  65535,  0,  2,  4,  12,  2055,  63538,  65508,  65531,  65533,
+  65535,  0,  2,  7,  102,  3775,  61970,  65429,  65526,  65528,
+  65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  2,
+  65533,  65535,  0,  2,  65535,  0,  9,  65533,  65535,  0,
+  25,  65512,  65535,  0,  2,  65533,  65535,  0,  44,  65480,
+  65535,  0,  48,  65475,  65535,  0,  162,  65373,  65535,  0,
+  637,  64806,  65533,  65535,  0,  2,  935,  64445,  65533,  65535,
+  0,  2,  4,  1662,  64083,  65533,  65535,  0,  2,  12,
+  3036,  62469,  65521,  65533,  65535,  0,  2,  120,  5405,  60468,
+  65469,  65531,  65533,  65535,  0,  2,  4,  18,  254,  6663,
+  58999,  65272,  65528,  65533,  65535,  0,  2,  4,  9,  12,
+  67,  591,  8981,  56781,  64564,  65365,  65508,  65524,  65526,  65529,
+  65531,  65533,  65535,  0,  65535,  0,  65535,  0,  2,  65533,
+  65535,  0,  9,  65526,  65535,  0,  14,  65503,  65535,  0,
+  127,  65390,  65535,  0,  517,  64990,  65535,  0,  178,  65330,
+  65535,  0,  2,  1055,  64533,  65533,  65535,  0,  2,  1558,
+  63942,  65533,  65535,  0,  2,  2205,  63173,  65533,  65535,  0,
+  25,  4493,  60862,  65505,  65533,  65535,  0,  2,  48,  5890,
+  59442,  65482,  65533,  65535,  0,  2,  4,  127,  7532,  58191,
+  65394,  65533,  65535,  0,  2,  5,  32,  550,  10388,  54924,
+  65046,  65510,  65531,  65533,  65535,  0,  2,  4,  30,  150,
+  1685,  14340,  51375,  63619,  65288,  65503,  65528,  65533,  65535,  0,
+  2,  4,  6,  8,  28,  97,  473,  2692,  15407,  50020,
+  62880,  65064,  65445,  65508,  65531,  65533,  65535,  0,  2,  4,
+  12,  32,  79,  150,  372,  907,  2184,  5868,  18207,  45431,
+  59856,  64031,  65096,  65401,  65481,  65507,  65521,  65523,  65525,  65527,
+  65529,  65531,  65533,  65535,  0,  65533,  65535,  0,  182,  65491,
+  65535,  0,  877,  64286,  65535,  0,  9,  2708,  63612,  65533,
+  65535,  0,  2,  6038,  59532,  65535,  0,  2,  92,  5500,
+  60539,  65533,  65535,  0,  268,  8908,  56512,  65385,  65535,  0,
+  129,  13110,  52742,  65036,  65535,  0,  2,  806,  14003,  51929,
+  64732,  65523,  65535,  0,  7,  92,  2667,  18159,  47678,  62610,
+  65355,  65535,  0,  32,  1836,  19676,  48237,  61677,  64960,  65526,
+  65535,  0,  21,  159,  967,  5668,  22782,  44709,  58317,  64020,
+  65406,  65528,  65535,  0,  7,  162,  1838,  8328,  23929,  43014,
+  56394,  63374,  65216,  65484,  65521,  65535,  0,  2,  4,  6,
+  28,  268,  1120,  3613,  10688,  24185,  40989,  54917,  61684,  64510,
+  65403,  65530,  65535,  0,  2,  16,  44,  139,  492,  1739,
+  5313,  13558,  26766,  41566,  52446,  58937,  62815,  64480,  65201,  65454,
+  65524,  65533,  65535,  0,  7,  25,  76,  263,  612,  1466,
+  3325,  6832,  12366,  20152,  29466,  39255,  47360,  53506,  57740,  60726,
+  62845,  64131,  64882,  65260,  65459,  65521,  65528,  65530,  65535,  0,
+  2,  4,  14,  48,  136,  312,  653,  1240,  2369,  4327,
+  7028,  10759,  15449,  21235,  28027,  35386,  42938,  49562,  54990,  59119,
+  62086,  63916,  64863,  65249,  65445,  65493,  65523,  65535,  0,  2,
+  4,  6,  8,  10,  12,  21,  83,  208,  409,  723,
+  1152,  1868,  2951,  4463,  6460,  8979,  11831,  15195,  18863,  22657,
+  26762,  30881,  34963,  39098,  43054,  47069,  50620,  53871,  56821,  59386,
+  61340,  62670,  63512,  64023,  64429,  64750,  64944,  65126,  65279,  65366,
+  65413,  65445,  65473,  65505,  65510,  65521,  65528,  65530,  65535
+};
+
+/* pointers to cdf tables for quantizer indices */
+const WebRtc_UWord16 *WebRtcIsacfix_kCdfGainPtr[3][12] = {
+  { WebRtcIsacfix_kCdfGain +0 +0,   WebRtcIsacfix_kCdfGain +0 +8,   WebRtcIsacfix_kCdfGain +0 +22,
+    WebRtcIsacfix_kCdfGain +0 +32,  WebRtcIsacfix_kCdfGain +0 +48,  WebRtcIsacfix_kCdfGain +0 +60,
+    WebRtcIsacfix_kCdfGain +0 +81,  WebRtcIsacfix_kCdfGain +0 +95,  WebRtcIsacfix_kCdfGain +0 +128,
+    WebRtcIsacfix_kCdfGain +0 +152, WebRtcIsacfix_kCdfGain +0 +210, WebRtcIsacfix_kCdfGain +0 +264
+  },
+  { WebRtcIsacfix_kCdfGain +404 +0,   WebRtcIsacfix_kCdfGain +404 +8,   WebRtcIsacfix_kCdfGain +404 +21,
+    WebRtcIsacfix_kCdfGain +404 +30,  WebRtcIsacfix_kCdfGain +404 +46,  WebRtcIsacfix_kCdfGain +404 +58,
+    WebRtcIsacfix_kCdfGain +404 +79,  WebRtcIsacfix_kCdfGain +404 +93,  WebRtcIsacfix_kCdfGain +404 +125,
+    WebRtcIsacfix_kCdfGain +404 +149, WebRtcIsacfix_kCdfGain +404 +207, WebRtcIsacfix_kCdfGain +404 +260
+  },
+  { WebRtcIsacfix_kCdfGain +803 +0,   WebRtcIsacfix_kCdfGain +803 +8,   WebRtcIsacfix_kCdfGain +803 +22,
+    WebRtcIsacfix_kCdfGain +803 +31,  WebRtcIsacfix_kCdfGain +803 +48,  WebRtcIsacfix_kCdfGain +803 +60,
+    WebRtcIsacfix_kCdfGain +803 +81,  WebRtcIsacfix_kCdfGain +803 +96,  WebRtcIsacfix_kCdfGain +803 +129,
+    WebRtcIsacfix_kCdfGain +803 +154, WebRtcIsacfix_kCdfGain +803 +212, WebRtcIsacfix_kCdfGain +803 +268
+  }
+};
+
+const WebRtc_UWord16 *WebRtcIsacfix_kCdfShapePtr[3][108] = {
+  { WebRtcIsacfix_kCdfShape +0 +0,   WebRtcIsacfix_kCdfShape +0 +2,   WebRtcIsacfix_kCdfShape +0 +4,
+    WebRtcIsacfix_kCdfShape +0 +6,   WebRtcIsacfix_kCdfShape +0 +8,   WebRtcIsacfix_kCdfShape +0 +10,
+    WebRtcIsacfix_kCdfShape +0 +12,  WebRtcIsacfix_kCdfShape +0 +14,  WebRtcIsacfix_kCdfShape +0 +16,
+    WebRtcIsacfix_kCdfShape +0 +18,  WebRtcIsacfix_kCdfShape +0 +21,  WebRtcIsacfix_kCdfShape +0 +25,
+    WebRtcIsacfix_kCdfShape +0 +29,  WebRtcIsacfix_kCdfShape +0 +33,  WebRtcIsacfix_kCdfShape +0 +37,
+    WebRtcIsacfix_kCdfShape +0 +43,  WebRtcIsacfix_kCdfShape +0 +49,  WebRtcIsacfix_kCdfShape +0 +56,
+    WebRtcIsacfix_kCdfShape +0 +64,  WebRtcIsacfix_kCdfShape +0 +66,  WebRtcIsacfix_kCdfShape +0 +68,
+    WebRtcIsacfix_kCdfShape +0 +70,  WebRtcIsacfix_kCdfShape +0 +72,  WebRtcIsacfix_kCdfShape +0 +75,
+    WebRtcIsacfix_kCdfShape +0 +77,  WebRtcIsacfix_kCdfShape +0 +79,  WebRtcIsacfix_kCdfShape +0 +81,
+    WebRtcIsacfix_kCdfShape +0 +83,  WebRtcIsacfix_kCdfShape +0 +86,  WebRtcIsacfix_kCdfShape +0 +90,
+    WebRtcIsacfix_kCdfShape +0 +94,  WebRtcIsacfix_kCdfShape +0 +98,  WebRtcIsacfix_kCdfShape +0 +102,
+    WebRtcIsacfix_kCdfShape +0 +107, WebRtcIsacfix_kCdfShape +0 +113, WebRtcIsacfix_kCdfShape +0 +120,
+    WebRtcIsacfix_kCdfShape +0 +129, WebRtcIsacfix_kCdfShape +0 +131, WebRtcIsacfix_kCdfShape +0 +133,
+    WebRtcIsacfix_kCdfShape +0 +135, WebRtcIsacfix_kCdfShape +0 +137, WebRtcIsacfix_kCdfShape +0 +141,
+    WebRtcIsacfix_kCdfShape +0 +143, WebRtcIsacfix_kCdfShape +0 +147, WebRtcIsacfix_kCdfShape +0 +151,
+    WebRtcIsacfix_kCdfShape +0 +155, WebRtcIsacfix_kCdfShape +0 +159, WebRtcIsacfix_kCdfShape +0 +164,
+    WebRtcIsacfix_kCdfShape +0 +168, WebRtcIsacfix_kCdfShape +0 +172, WebRtcIsacfix_kCdfShape +0 +178,
+    WebRtcIsacfix_kCdfShape +0 +184, WebRtcIsacfix_kCdfShape +0 +192, WebRtcIsacfix_kCdfShape +0 +200,
+    WebRtcIsacfix_kCdfShape +0 +211, WebRtcIsacfix_kCdfShape +0 +213, WebRtcIsacfix_kCdfShape +0 +215,
+    WebRtcIsacfix_kCdfShape +0 +217, WebRtcIsacfix_kCdfShape +0 +219, WebRtcIsacfix_kCdfShape +0 +223,
+    WebRtcIsacfix_kCdfShape +0 +227, WebRtcIsacfix_kCdfShape +0 +231, WebRtcIsacfix_kCdfShape +0 +235,
+    WebRtcIsacfix_kCdfShape +0 +239, WebRtcIsacfix_kCdfShape +0 +243, WebRtcIsacfix_kCdfShape +0 +248,
+    WebRtcIsacfix_kCdfShape +0 +252, WebRtcIsacfix_kCdfShape +0 +258, WebRtcIsacfix_kCdfShape +0 +264,
+    WebRtcIsacfix_kCdfShape +0 +273, WebRtcIsacfix_kCdfShape +0 +282, WebRtcIsacfix_kCdfShape +0 +293,
+    WebRtcIsacfix_kCdfShape +0 +308, WebRtcIsacfix_kCdfShape +0 +310, WebRtcIsacfix_kCdfShape +0 +312,
+    WebRtcIsacfix_kCdfShape +0 +316, WebRtcIsacfix_kCdfShape +0 +320, WebRtcIsacfix_kCdfShape +0 +324,
+    WebRtcIsacfix_kCdfShape +0 +328, WebRtcIsacfix_kCdfShape +0 +332, WebRtcIsacfix_kCdfShape +0 +336,
+    WebRtcIsacfix_kCdfShape +0 +341, WebRtcIsacfix_kCdfShape +0 +347, WebRtcIsacfix_kCdfShape +0 +354,
+    WebRtcIsacfix_kCdfShape +0 +360, WebRtcIsacfix_kCdfShape +0 +368, WebRtcIsacfix_kCdfShape +0 +378,
+    WebRtcIsacfix_kCdfShape +0 +388, WebRtcIsacfix_kCdfShape +0 +400, WebRtcIsacfix_kCdfShape +0 +418,
+    WebRtcIsacfix_kCdfShape +0 +445, WebRtcIsacfix_kCdfShape +0 +447, WebRtcIsacfix_kCdfShape +0 +451,
+    WebRtcIsacfix_kCdfShape +0 +455, WebRtcIsacfix_kCdfShape +0 +461, WebRtcIsacfix_kCdfShape +0 +468,
+    WebRtcIsacfix_kCdfShape +0 +474, WebRtcIsacfix_kCdfShape +0 +480, WebRtcIsacfix_kCdfShape +0 +486,
+    WebRtcIsacfix_kCdfShape +0 +495, WebRtcIsacfix_kCdfShape +0 +505, WebRtcIsacfix_kCdfShape +0 +516,
+    WebRtcIsacfix_kCdfShape +0 +528, WebRtcIsacfix_kCdfShape +0 +543, WebRtcIsacfix_kCdfShape +0 +564,
+    WebRtcIsacfix_kCdfShape +0 +583, WebRtcIsacfix_kCdfShape +0 +608, WebRtcIsacfix_kCdfShape +0 +635
+  },
+  { WebRtcIsacfix_kCdfShape +686 +0,   WebRtcIsacfix_kCdfShape +686 +2,   WebRtcIsacfix_kCdfShape +686 +4,
+    WebRtcIsacfix_kCdfShape +686 +6,   WebRtcIsacfix_kCdfShape +686 +8,   WebRtcIsacfix_kCdfShape +686 +11,
+    WebRtcIsacfix_kCdfShape +686 +13,  WebRtcIsacfix_kCdfShape +686 +15,  WebRtcIsacfix_kCdfShape +686 +17,
+    WebRtcIsacfix_kCdfShape +686 +20,  WebRtcIsacfix_kCdfShape +686 +23,  WebRtcIsacfix_kCdfShape +686 +27,
+    WebRtcIsacfix_kCdfShape +686 +31,  WebRtcIsacfix_kCdfShape +686 +35,  WebRtcIsacfix_kCdfShape +686 +40,
+    WebRtcIsacfix_kCdfShape +686 +44,  WebRtcIsacfix_kCdfShape +686 +50,  WebRtcIsacfix_kCdfShape +686 +56,
+    WebRtcIsacfix_kCdfShape +686 +63,  WebRtcIsacfix_kCdfShape +686 +65,  WebRtcIsacfix_kCdfShape +686 +67,
+    WebRtcIsacfix_kCdfShape +686 +69,  WebRtcIsacfix_kCdfShape +686 +71,  WebRtcIsacfix_kCdfShape +686 +73,
+    WebRtcIsacfix_kCdfShape +686 +75,  WebRtcIsacfix_kCdfShape +686 +77,  WebRtcIsacfix_kCdfShape +686 +79,
+    WebRtcIsacfix_kCdfShape +686 +82,  WebRtcIsacfix_kCdfShape +686 +85,  WebRtcIsacfix_kCdfShape +686 +89,
+    WebRtcIsacfix_kCdfShape +686 +93,  WebRtcIsacfix_kCdfShape +686 +97,  WebRtcIsacfix_kCdfShape +686 +102,
+    WebRtcIsacfix_kCdfShape +686 +106, WebRtcIsacfix_kCdfShape +686 +112, WebRtcIsacfix_kCdfShape +686 +119,
+    WebRtcIsacfix_kCdfShape +686 +127, WebRtcIsacfix_kCdfShape +686 +129, WebRtcIsacfix_kCdfShape +686 +131,
+    WebRtcIsacfix_kCdfShape +686 +133, WebRtcIsacfix_kCdfShape +686 +135, WebRtcIsacfix_kCdfShape +686 +137,
+    WebRtcIsacfix_kCdfShape +686 +139, WebRtcIsacfix_kCdfShape +686 +142, WebRtcIsacfix_kCdfShape +686 +146,
+    WebRtcIsacfix_kCdfShape +686 +150, WebRtcIsacfix_kCdfShape +686 +154, WebRtcIsacfix_kCdfShape +686 +158,
+    WebRtcIsacfix_kCdfShape +686 +162, WebRtcIsacfix_kCdfShape +686 +167, WebRtcIsacfix_kCdfShape +686 +173,
+    WebRtcIsacfix_kCdfShape +686 +179, WebRtcIsacfix_kCdfShape +686 +186, WebRtcIsacfix_kCdfShape +686 +194,
+    WebRtcIsacfix_kCdfShape +686 +205, WebRtcIsacfix_kCdfShape +686 +207, WebRtcIsacfix_kCdfShape +686 +209,
+    WebRtcIsacfix_kCdfShape +686 +211, WebRtcIsacfix_kCdfShape +686 +214, WebRtcIsacfix_kCdfShape +686 +218,
+    WebRtcIsacfix_kCdfShape +686 +222, WebRtcIsacfix_kCdfShape +686 +226, WebRtcIsacfix_kCdfShape +686 +230,
+    WebRtcIsacfix_kCdfShape +686 +234, WebRtcIsacfix_kCdfShape +686 +238, WebRtcIsacfix_kCdfShape +686 +242,
+    WebRtcIsacfix_kCdfShape +686 +247, WebRtcIsacfix_kCdfShape +686 +253, WebRtcIsacfix_kCdfShape +686 +262,
+    WebRtcIsacfix_kCdfShape +686 +269, WebRtcIsacfix_kCdfShape +686 +278, WebRtcIsacfix_kCdfShape +686 +289,
+    WebRtcIsacfix_kCdfShape +686 +305, WebRtcIsacfix_kCdfShape +686 +307, WebRtcIsacfix_kCdfShape +686 +309,
+    WebRtcIsacfix_kCdfShape +686 +311, WebRtcIsacfix_kCdfShape +686 +315, WebRtcIsacfix_kCdfShape +686 +319,
+    WebRtcIsacfix_kCdfShape +686 +323, WebRtcIsacfix_kCdfShape +686 +327, WebRtcIsacfix_kCdfShape +686 +331,
+    WebRtcIsacfix_kCdfShape +686 +335, WebRtcIsacfix_kCdfShape +686 +340, WebRtcIsacfix_kCdfShape +686 +346,
+    WebRtcIsacfix_kCdfShape +686 +354, WebRtcIsacfix_kCdfShape +686 +362, WebRtcIsacfix_kCdfShape +686 +374,
+    WebRtcIsacfix_kCdfShape +686 +384, WebRtcIsacfix_kCdfShape +686 +396, WebRtcIsacfix_kCdfShape +686 +413,
+    WebRtcIsacfix_kCdfShape +686 +439, WebRtcIsacfix_kCdfShape +686 +442, WebRtcIsacfix_kCdfShape +686 +446,
+    WebRtcIsacfix_kCdfShape +686 +450, WebRtcIsacfix_kCdfShape +686 +455, WebRtcIsacfix_kCdfShape +686 +461,
+    WebRtcIsacfix_kCdfShape +686 +468, WebRtcIsacfix_kCdfShape +686 +475, WebRtcIsacfix_kCdfShape +686 +481,
+    WebRtcIsacfix_kCdfShape +686 +489, WebRtcIsacfix_kCdfShape +686 +498, WebRtcIsacfix_kCdfShape +686 +508,
+    WebRtcIsacfix_kCdfShape +686 +522, WebRtcIsacfix_kCdfShape +686 +534, WebRtcIsacfix_kCdfShape +686 +554,
+    WebRtcIsacfix_kCdfShape +686 +577, WebRtcIsacfix_kCdfShape +686 +602, WebRtcIsacfix_kCdfShape +686 +631
+  },
+  { WebRtcIsacfix_kCdfShape +1368 +0,   WebRtcIsacfix_kCdfShape +1368 +2,   WebRtcIsacfix_kCdfShape +1368 +4,
+    WebRtcIsacfix_kCdfShape +1368 +6,   WebRtcIsacfix_kCdfShape +1368 +8,   WebRtcIsacfix_kCdfShape +1368 +10,
+    WebRtcIsacfix_kCdfShape +1368 +12,  WebRtcIsacfix_kCdfShape +1368 +14,  WebRtcIsacfix_kCdfShape +1368 +16,
+    WebRtcIsacfix_kCdfShape +1368 +20,  WebRtcIsacfix_kCdfShape +1368 +24,  WebRtcIsacfix_kCdfShape +1368 +28,
+    WebRtcIsacfix_kCdfShape +1368 +32,  WebRtcIsacfix_kCdfShape +1368 +36,  WebRtcIsacfix_kCdfShape +1368 +40,
+    WebRtcIsacfix_kCdfShape +1368 +44,  WebRtcIsacfix_kCdfShape +1368 +50,  WebRtcIsacfix_kCdfShape +1368 +57,
+    WebRtcIsacfix_kCdfShape +1368 +65,  WebRtcIsacfix_kCdfShape +1368 +67,  WebRtcIsacfix_kCdfShape +1368 +69,
+    WebRtcIsacfix_kCdfShape +1368 +71,  WebRtcIsacfix_kCdfShape +1368 +73,  WebRtcIsacfix_kCdfShape +1368 +75,
+    WebRtcIsacfix_kCdfShape +1368 +77,  WebRtcIsacfix_kCdfShape +1368 +79,  WebRtcIsacfix_kCdfShape +1368 +81,
+    WebRtcIsacfix_kCdfShape +1368 +85,  WebRtcIsacfix_kCdfShape +1368 +89,  WebRtcIsacfix_kCdfShape +1368 +93,
+    WebRtcIsacfix_kCdfShape +1368 +97,  WebRtcIsacfix_kCdfShape +1368 +101, WebRtcIsacfix_kCdfShape +1368 +105,
+    WebRtcIsacfix_kCdfShape +1368 +110, WebRtcIsacfix_kCdfShape +1368 +116, WebRtcIsacfix_kCdfShape +1368 +123,
+    WebRtcIsacfix_kCdfShape +1368 +132, WebRtcIsacfix_kCdfShape +1368 +134, WebRtcIsacfix_kCdfShape +1368 +136,
+    WebRtcIsacfix_kCdfShape +1368 +138, WebRtcIsacfix_kCdfShape +1368 +141, WebRtcIsacfix_kCdfShape +1368 +143,
+    WebRtcIsacfix_kCdfShape +1368 +146, WebRtcIsacfix_kCdfShape +1368 +150, WebRtcIsacfix_kCdfShape +1368 +154,
+    WebRtcIsacfix_kCdfShape +1368 +158, WebRtcIsacfix_kCdfShape +1368 +162, WebRtcIsacfix_kCdfShape +1368 +166,
+    WebRtcIsacfix_kCdfShape +1368 +170, WebRtcIsacfix_kCdfShape +1368 +174, WebRtcIsacfix_kCdfShape +1368 +179,
+    WebRtcIsacfix_kCdfShape +1368 +185, WebRtcIsacfix_kCdfShape +1368 +193, WebRtcIsacfix_kCdfShape +1368 +203,
+    WebRtcIsacfix_kCdfShape +1368 +214, WebRtcIsacfix_kCdfShape +1368 +216, WebRtcIsacfix_kCdfShape +1368 +218,
+    WebRtcIsacfix_kCdfShape +1368 +220, WebRtcIsacfix_kCdfShape +1368 +224, WebRtcIsacfix_kCdfShape +1368 +227,
+    WebRtcIsacfix_kCdfShape +1368 +231, WebRtcIsacfix_kCdfShape +1368 +235, WebRtcIsacfix_kCdfShape +1368 +239,
+    WebRtcIsacfix_kCdfShape +1368 +243, WebRtcIsacfix_kCdfShape +1368 +247, WebRtcIsacfix_kCdfShape +1368 +251,
+    WebRtcIsacfix_kCdfShape +1368 +256, WebRtcIsacfix_kCdfShape +1368 +262, WebRtcIsacfix_kCdfShape +1368 +269,
+    WebRtcIsacfix_kCdfShape +1368 +277, WebRtcIsacfix_kCdfShape +1368 +286, WebRtcIsacfix_kCdfShape +1368 +297,
+    WebRtcIsacfix_kCdfShape +1368 +315, WebRtcIsacfix_kCdfShape +1368 +317, WebRtcIsacfix_kCdfShape +1368 +319,
+    WebRtcIsacfix_kCdfShape +1368 +323, WebRtcIsacfix_kCdfShape +1368 +327, WebRtcIsacfix_kCdfShape +1368 +331,
+    WebRtcIsacfix_kCdfShape +1368 +335, WebRtcIsacfix_kCdfShape +1368 +339, WebRtcIsacfix_kCdfShape +1368 +343,
+    WebRtcIsacfix_kCdfShape +1368 +349, WebRtcIsacfix_kCdfShape +1368 +355, WebRtcIsacfix_kCdfShape +1368 +361,
+    WebRtcIsacfix_kCdfShape +1368 +368, WebRtcIsacfix_kCdfShape +1368 +376, WebRtcIsacfix_kCdfShape +1368 +385,
+    WebRtcIsacfix_kCdfShape +1368 +397, WebRtcIsacfix_kCdfShape +1368 +411, WebRtcIsacfix_kCdfShape +1368 +429,
+    WebRtcIsacfix_kCdfShape +1368 +456, WebRtcIsacfix_kCdfShape +1368 +459, WebRtcIsacfix_kCdfShape +1368 +463,
+    WebRtcIsacfix_kCdfShape +1368 +467, WebRtcIsacfix_kCdfShape +1368 +473, WebRtcIsacfix_kCdfShape +1368 +478,
+    WebRtcIsacfix_kCdfShape +1368 +485, WebRtcIsacfix_kCdfShape +1368 +491, WebRtcIsacfix_kCdfShape +1368 +497,
+    WebRtcIsacfix_kCdfShape +1368 +505, WebRtcIsacfix_kCdfShape +1368 +514, WebRtcIsacfix_kCdfShape +1368 +523,
+    WebRtcIsacfix_kCdfShape +1368 +535, WebRtcIsacfix_kCdfShape +1368 +548, WebRtcIsacfix_kCdfShape +1368 +565,
+    WebRtcIsacfix_kCdfShape +1368 +585, WebRtcIsacfix_kCdfShape +1368 +611, WebRtcIsacfix_kCdfShape +1368 +640
+  }
+};
+
+/* code length for all coefficients using different models */
+
+const WebRtc_Word16 WebRtcIsacfix_kCodeLenGainQ11[392] = {
+  25189, 16036,  8717,   358,  8757, 15706, 21456, 24397, 18502, 17559
+  , 13794, 11088,  7480,   873,  6603, 11636, 14627, 16805, 19132, 26624
+  , 26624, 19408, 13751,  7280,   583,  7591, 15178, 23773, 28672, 25189
+  , 19045, 16442, 13412, 10397,  5893,  1338,  6376,  9992, 12074, 13853
+  , 15781, 19821, 22819, 28672, 28672, 25189, 19858, 15781, 11262,  5477
+  ,  1298,  5632, 11814, 17234, 22020, 28672, 19677, 18125, 16587, 14521
+  , 13032, 11196,  9249,  5411,  2495,  4994,  7975, 10234, 12308, 13892
+  , 15148, 17944, 21725, 23917, 25189, 19539, 16293, 11531,  7808,  4475
+  ,  2739,  4872,  8089, 11314, 14992, 18105, 23257, 26624, 25189, 23257
+  , 23257, 20982, 18697, 18023, 16338, 16036, 14539, 13695, 13146, 11763
+  , 10754,  9074,  7260,  5584,  4430,  5553,  6848,  8344, 10141, 11636
+  , 12535, 13416, 14342, 15477, 17296, 19282, 22349, 23773, 28672, 28672
+  , 26624, 23773, 21456, 18023, 15118, 13362, 11212,  9293,  8043,  6985
+  ,  5908,  5721,  5853,  6518,  7316,  8360,  9716, 11289, 12912, 14652
+  , 16969, 19858, 23773, 26624, 28013, 30720, 30720, 28672, 25426, 23141
+  , 25426, 23773, 20720, 19408, 18697, 19282, 16859, 16338, 16026, 15377
+  , 15021, 14319, 14251, 13937, 13260, 13017, 12332, 11703, 11430, 10359
+  , 10128,  9405,  8757,  8223,  7974,  7859,  7646,  7673,  7997,  8580
+  ,  8880,  9061,  9866, 10397, 11358, 12200, 13244, 14157, 15021, 16026
+  , 16490, 18697, 18479, 20011, 19677, 20720, 24576, 26276, 30720, 30720
+  , 28672, 30720, 24068, 25189, 22437, 20345, 18479, 16396, 16026, 14928
+  , 13877, 13450, 12696, 12766, 11626, 11098, 10159,  9998,  9437,  9275
+  ,  8783,  8552,  8629,  8488,  8522,  8454,  8571,  8775,  8915,  9427
+  ,  9483,  9851, 10260, 10933, 11131, 11974, 12560, 13833, 15080, 16304
+  , 17491, 19017, 18697, 19408, 22020, 25189, 25426, 22819, 26276, 30720
+  , 30720, 30720, 30720, 30720, 30720, 28672, 30720, 30720, 30720, 30720
+  , 28013, 25426, 24397, 23773, 25189, 26624, 25189, 22437, 21725, 20011
+  , 20527, 20720, 20771, 22020, 22020, 19858, 19408, 19972, 17866, 17360
+  , 17791, 17219, 16805, 16927, 16067, 16162, 15661, 15178, 15021, 15209
+  , 14845, 14570, 14490, 14490, 13733, 13617, 13794, 13577, 13312, 12824
+  , 13032, 12683, 12189, 12469, 12109, 11940, 11636, 11617, 11932, 12294
+  , 11578, 11775, 12039, 11654, 11560, 11439, 11909, 11421, 12029, 11513
+  , 11773, 11899, 11560, 11805, 11476, 11664, 11963, 11647, 11754, 11963
+  , 11703, 12211, 11932, 12074, 12469, 12535, 12560, 12912, 12783, 12866
+  , 12884, 13378, 13957, 13775, 13635, 14019, 14545, 15240, 15520, 15554
+  , 15697, 16490, 16396, 17281, 16599, 16969, 17963, 16859, 16983, 16805
+  , 17099, 18210, 17219, 17646, 17700, 17646, 18297, 17425, 18479, 17791
+  , 17718, 19282, 18672, 20173, 20982, 21725, 21456, 23773, 23257, 25189
+  , 30720, 30720, 25189, 26624, 30720, 30720, 30720, 30720, 28672, 26276
+  , 30720, 30720
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kCodeLenShapeQ11[577] = {
+  0,     0,     0,     0,     0,     0,     0,     0,     0, 28672
+  ,     0, 26624,     1, 23773, 22819,     4, 20982, 18598,    10, 19282
+  , 16587,    22, 16442, 26624, 13126,    60, 14245, 26624, 26624, 12736
+  ,    79, 12912, 25189, 22819,  9563,   249,  9474, 22349, 28672, 23257
+  , 17944,  7980,   434,  8181, 16431, 26624,     0,     0,     0,     0
+  , 28672,     0,     0,     0,     0,     0, 28672,     0, 22437,     3
+  , 22437, 20982,     5, 20982, 16442,    22, 16752, 13814,    49, 14646
+  , 11645,   116, 11734, 26624, 28672, 10613,   158, 11010, 24397, 19539
+  ,  8046,   453,  7709, 19017, 28672, 23257, 15110,  6770,   758,  6523
+  , 14108, 24397, 28672,     0,     0,     0,     0, 28672,     0, 28672
+  ,     0, 26624,     1, 28672, 28672,     1, 26624, 24397,     2, 23257
+  , 21725,     4, 20982, 17158,    18, 17281, 28672, 15178,    35, 15209
+  , 12343,    92, 12320, 26624, 10344,   189, 10217, 30720, 22020,  9033
+  ,   322,  8549, 23773, 28672, 30720, 20622,  7666,   473,  7806, 20527
+  , 24397, 14135,  5995,   960,  6018, 14872, 23773, 26624, 20928, 16293
+  , 10636,  4926,  1588,  5256, 11088, 18043, 25189,     0,     0,     0
+  ,     0, 24397,     1, 25189, 20720,     5, 21456, 21209,     3, 25189
+  , 20982,     5, 21456, 15818,    30, 15410, 13794,    60, 13416, 28672
+  , 11162,   142, 11025,  9337,   231, 10094, 23773,  8338,   405,  7930
+  , 26624, 19677,  6787,   613,  7318, 19161, 28672, 16442,  6319,   932
+  ,  5748, 15312, 25189, 28672, 28672, 28672, 13998,  5513,  1263,  5146
+  , 14024, 24397, 22819, 15818,  9460,  4447,  2122,  4681,  9970, 15945
+  , 22349, 28672, 30720, 22622, 19017, 14872, 10689,  7405,  4473,  2983
+  ,  4783,  7894, 11186, 14964, 18210, 24397,     0,     0, 30720,     0
+  , 30720, 21456,     3, 23773, 14964,    39, 14757, 14179,    53, 13751
+  , 14928,    36, 15272, 12430,    79, 13228,  9135,   285,  9077, 28672
+  , 28672,  8377,   403,  7919, 26624, 28672, 23257,  7068,   560,  7473
+  , 20345, 19677,  6770,   720,  6464, 18697, 25189, 16249,  5779,  1087
+  ,  5494, 15209, 22819, 30720, 20622, 12601,  5240,  1419,  5091, 12095
+  , 19408, 26624, 22819, 16805, 10683,  4812,  2056,  4293,  9836, 16026
+  , 24397, 25189, 18409, 13833,  8681,  4503,  2653,  4220,  8329, 13853
+  , 19132, 26624, 25189, 20771, 17219, 12630,  9520,  6733,  4565,  3657
+  ,  4817,  7069, 10058, 13212, 16805, 21209, 26624, 26276, 28672, 28672
+  , 26276, 23257, 20173, 19282, 16538, 15051, 12811, 10754,  9267,  7547
+  ,  6270,  5407,  5214,  6057,  7054,  8226,  9488, 10806, 12793, 14442
+  , 16442, 19677, 22099, 26276, 28672,     0, 30720,     0, 30720, 11920
+  ,    56, 20720, 30720,  6766,   355, 13130, 30720, 30720, 22180,  5589
+  ,   736,  7902, 26624, 30720,  7634,   354,  9721, 30720, 30720,  9027
+  ,   246, 10117, 30720, 30720,  9630,   453,  6709, 23257, 30720, 25683
+  , 14228,  6127,  1271,  4615, 15178, 30720, 30720, 23504, 12382,  5739
+  ,  2015,  3492, 10560, 22020, 26624, 30720, 30720, 23257, 13192,  4873
+  ,  1527,  5001, 12445, 22020, 30720, 30720, 30720, 30720, 19344, 10761
+  ,  4051,  1927,  5281, 10594, 17866, 28672, 30720, 30720, 30720, 21869
+  , 15554, 10060,  5979,  2710,  3085,  7889, 14646, 21725, 28672, 30720
+  , 30720, 30720, 30720, 30720, 30720, 30720, 22719, 17425, 13212,  8083
+  ,  4439,  2820,  4305,  8136, 12988, 17425, 21151, 28672, 28672, 30720
+  , 30720, 30720, 28672, 20527, 19282, 14412, 10513,  7407,  5079,  3744
+  ,  4115,  6308,  9621, 13599, 17040, 22349, 28672, 30720, 30720, 30720
+  , 30720, 30720, 30720, 29522, 19282, 14545, 11485,  9093,  6760,  5262
+  ,  4672,  4970,  6005,  7852,  9732, 12343, 14672, 19161, 22819, 25189
+  , 30720, 30720, 28672, 30720, 30720, 20720, 18125, 14388, 12007,  9825
+  ,  8092,  7064,  6069,  5903,  5932,  6359,  7169,  8310,  9324, 10711
+  , 11867, 13096, 14157, 16338, 17040, 19161, 21725, 23773, 30720, 30720
+  , 26276, 25426, 24397, 28672, 28672, 23257, 22020, 22349, 18297, 17646
+  , 16983, 16431, 16162, 15021, 15178, 13751, 12142, 10895, 10193,  9632
+  ,  9086,  8896,  8823,  8735,  8591,  8754,  8649,  8361,  8329,  8522
+  ,  8373,  8739,  8993,  9657, 10454, 11279, 11899, 12614, 14024, 14273
+  , 15477, 15240, 16649, 17866, 18697, 21151, 22099
+};
+
+/* left KLT transforms */
+const WebRtc_Word16 WebRtcIsacfix_kT1GainQ15[3][4] = {
+  { -26130, 19773, 19773, 26130 },
+  { -26664, 19046, 19046, 26664 },
+  { -23538, 22797, 22797, 23538 }
+};
+
+
+
+const WebRtc_Word16 WebRtcIsacfix_kT1ShapeQ15[3][324] = {
+  { 52,16,168,7,439,-138,-89,306,671,882,
+    157,1301,291,1598,-3571,-1943,-1119,32404,96,-12,
+    379,-64,-307,345,-836,539,1045,2541,-2865,-992,
+    1683,-4717,5808,7427,30599,2319,183,-73,451,481,
+    933,-198,781,-397,1244,-777,3690,-2414,149,-1356,
+    -2593,-31140,8289,-1737,-202,-14,-214,360,501,450,
+    -245,-7,797,3638,-2804,3042,-337,22137,-22103,2264,
+    6838,-3381,305,172,263,-195,-355,351,179,513,
+    2234,3343,5509,7531,19075,-17740,-16836,2244,-629,-1505,
+    -153,108,124,-324,2694,-124,1492,-850,5347,4285,
+    7439,-10229,-22822,-12467,-12891,3645,822,-232,131,13,
+    374,565,536,4681,1294,-1935,1926,-5734,-10643,26462,
+    -12480,-5589,-1038,-2468,964,-704,-247,-106,186,-558,
+    -4050,3760,2972,2141,-7393,6294,26740,11991,-3251,5461,
+    5341,1574,2208,-51,-552,-297,-753,-154,2068,-5371,
+    3578,4106,28043,-10533,8041,2353,2389,4609,3410,1906,
+    351,-249,18,-15,1117,539,2870,9084,17585,-24528,
+    -366,-6490,2009,-3170,2942,1116,-232,1672,1065,606,
+    -399,-388,-518,38,3728,28948,-11936,4543,4104,-4441,
+    1545,-4044,1485,622,-68,186,-473,135,-280,125,
+    -546,-1813,6989,6606,23711,19376,-2636,2870,-4553,-1687,
+    878,-375,205,-208,-409,-108,-200,-45,-1670,-337,
+    8213,-5524,-2334,5240,-12939,-26205,5937,-1582,-592,-959,
+    -5374,2449,3400,559,349,-492,668,12379,-27684,3419,
+    5117,4415,-297,-8270,-1252,-3490,-1272,-1199,-3159,191,
+    630,488,-797,-3071,12912,-27783,-10249,1047,647,619,
+    111,-3722,-915,-1055,-502,5,-1384,-306,221,68,
+    5219,13173,-26474,-11663,-5626,927,806,-1127,236,-589,
+    -522,-230,-312,-315,-428,-573,426,192,-11830,-26883,
+    -14121,-2785,-1429,-109,410,-832,-302,539,-459,104,
+    1,-530,-202,-289,153,116,30082,-12944,-671,20,
+    649,98,103,215,234,0,280,-51,-169,298,
+    31,230,-73,-51
+  },
+  { -154,-7,-192,61,-739,-389,-947,-162,-60,94,
+    511,-716,1520,-1428,4168,-2214,1816,32270,-123,-77,
+    -199,-99,-42,-588,203,-240,-930,-35,1580,234,
+    3206,-5507,-1495,-10946,30000,-2667,-136,-176,-240,-175,
+    -204,-661,-1796,-1039,-1271,498,3143,734,2663,2699,
+    -8127,29333,10495,2356,-72,113,-91,118,-2840,-723,
+    -1733,-1158,-389,-2116,-3054,-3,-5179,8071,29546,6308,
+    5657,-3178,-186,-294,-473,-635,1213,-983,-1437,-1715,
+    -1094,1280,-92,-9573,948,29576,-7060,-5921,2954,1349,
+    -337,-108,-1099,962,418,-413,-1149,-334,1241,3975,
+    -6825,26725,-14377,7051,-4772,-1707,2335,2008,-150,570,
+    1371,42,-1649,-619,2039,3369,-1225,1583,-2755,-15207,
+    -27504,-4855,-4304,1495,2733,1324,15,-448,403,353,
+    3016,-1242,2338,2673,2064,-7496,-30447,-3686,5833,-1301,
+    -2455,2122,1519,608,43,-653,773,-3072,912,-1537,
+    4505,10284,30237,1549,3200,-691,205,1702,658,1014,
+    1499,148,79,-322,-1162,-4639,-813,7536,3204,29109,
+    -10747,-26,1611,2286,2114,2561,1022,372,348,207,
+    1062,-1088,-443,-9849,2381,5671,29097,-7612,-2927,3853,
+    194,1155,275,1438,1438,1312,581,888,-784,906,
+    112,-11103,25104,14438,-9311,-3068,1210,368,370,-940,
+    -2434,-1148,1925,392,657,258,-526,1475,-2281,-4265,
+    -1880,1534,2185,-1472,959,-30934,6306,3114,-4109,1768,
+    -2612,-703,45,644,2185,2033,5670,7211,19114,-22427,
+    6432,5150,-4090,-2694,3860,1245,-596,293,1829,369,
+    -319,229,-3256,2170,-6374,-26216,-4570,-16053,-5766,-262,
+    -2006,2873,-1477,147,378,-1544,-344,-544,-985,-481,
+    4210,4542,30757,-7291,-4863,1529,-2079,-628,-603,-783,
+    -408,1646,697,808,-620,-292,181,158,-13313,-29173,
+    5984,-1262,859,-1776,-558,-24,-883,-1421,739,210,
+    -531,-285,131,-160,-246,-56,29345,-13706,-2859,-2966,
+    -300,-970,-2382,-268,-103,-636,-12,-62,-691,-253,
+    -147,-127,27,66
+  },
+  { 55,-212,-198,489,-274,81,682,399,328,-934,
+    -389,-37,1357,-3632,5276,6581,-9493,-29921,29,-45,
+    2,190,172,-15,311,-130,-1085,-25,324,-684,
+    3223,-6580,4485,-5280,-29521,9933,82,-320,-530,229,
+    -705,-533,-414,848,-1842,-4473,1390,-857,6717,-6692,
+    4648,29397,576,8339,-68,-85,238,-330,264,-1012,
+    -381,-203,-3384,-3329,3906,6810,3790,-6250,28312,-8078,
+    8089,1565,160,-569,-612,-613,-1063,-1928,-1125,3421,
+    -7481,-7484,4942,-6984,4330,-25591,-10574,-6982,5682,-1781,
+    -308,89,178,-1715,-420,-3530,-5776,1219,-8617,-7137,
+    7015,4981,24875,12657,-5408,-3356,-785,-1972,326,-858,
+    -506,-3382,-986,-6258,-2259,4015,-8374,-10482,3127,23826,
+    -14126,-514,-5417,2178,-2912,-17,-587,80,67,-5881,
+    -1702,-5351,-4481,398,-10156,-225,20727,-15460,-11603,7752,
+    3660,1714,-2001,-359,499,-527,-1225,-7820,-1297,-6326,
+    -8526,7900,-18328,13311,-17488,-2926,-196,-17,2281,873,
+    480,-160,-624,471,780,-8729,1707,-14262,-20647,1721,
+    18590,-2206,-1214,-1066,312,-2602,783,-412,-113,49,
+    -119,1305,-2371,-15132,-1833,-18252,20295,-8316,2227,341,
+    -2074,-702,3082,-262,-465,-198,430,30,-70,-788,
+    2342,-25132,-4863,19783,-484,2137,2811,-1906,799,1586,
+    962,-734,-191,-30,-129,-93,-1126,1729,5860,-2030,
+    8953,603,-3338,-10869,-1144,22070,12130,10513,3191,-6881,
+    -3514,2090,711,-666,1843,-5997,-5681,2921,-17641,-2801,
+    4969,18590,7169,12214,8587,4405,3008,-1074,-371,-77,
+    253,331,-5611,5014,13152,-1985,18483,-1696,8043,20463,
+    2381,-393,1688,-1205,618,1220,457,248,-83,176,
+    7920,-13676,-22139,-3038,17402,2036,844,3258,994,719,
+    2087,-44,426,494,12,-91,46,5,-14204,22912,
+    -18156,-361,442,2298,-829,2229,386,1433,1335,1323,
+    55,-592,-139,49,-12,-57,27783,17134,350,-282,
+    552,158,142,2488,465,329,1087,118,143,10,
+    56,65,-15,-31
+  }
+};
+
+/* right KLT transforms */
+const WebRtc_Word16 WebRtcIsacfix_kT2GainQ15[3][36] = {
+  {   4775, -14892,  20313, -17104,  10533,  -3613,  -6782,  16044,  -8889,
+      -11019,  21330, -10720,  13193, -15678, -11101,  14461,  12250, -13096,
+      -16951,   2167,  16066,  15569,   -702, -16754, -19195, -12823,  -4321,
+      5128,    13348,  17825,  13232,  13404,  13494,  13490,  13383,  13261
+  },
+  {  -3725,  11408, -18493,  20031, -13097,   3865,   9344, -19294,  10740,
+     8856, -18432,   8982,  13975, -14444, -11930,  11774,  14285, -13594,
+     -16323,     -4,  16340,  15609,    359, -17220, -18401, -13471,  -4643,
+     5225,  13375,  18053,  13124,  13463,  13621,  13583,  13393,  13072
+  },
+  {  -3513,  11402, -17883,  19504, -14399,   4885,   8702, -19513,  12046,
+     8533, -18110,   8447,  12778, -14838, -12444,  13177,  14107, -12759,
+     -17268,    914,  15822,  15661,    838, -16686, -18907, -12936,  -4820,
+     4175,  12398,  18830,  12913,  13215,  13433,  13572,  13601,  13518
+  }
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kT2ShapeQ15[3][36] = {
+  {   4400, -11512,  17205, -19470,  14770,  -5345,   9784, -19222,  11228,
+      6842, -18371,   9909,  14191, -13496, -11563,  14015,  11827, -14839,
+      -15439,    948,  17802,  14827,  -2053, -17132,  18723,  14516,   4135,
+      -6822, -13869, -16016,  12975,  13341,  13563,  13603,  13478,  13296
+  },
+  {   5420, -14215,  19060, -18073,  11709,  -3911,   9645, -18335,   7717,
+      10842, -19283,   9777,  14898, -12555, -13661,  11668,  13520, -13733,
+      -15936,  -1358,  15671,  16728,    328, -17100,  17527,  13973,   5587,
+      -5194, -14165, -17677,  12970,  13446,  13693,  13660,  13462,  13015
+  },
+  {   4386, -12426,  18019, -18895,  13894,  -5034,   9713, -19270,  10283,
+      8692, -18439,   9317,  13992, -13454, -13241,  12850,  13366, -13336,
+      -16334,   -498,  15976,  16213,   -114, -16987,  18191,  13659,   4958,
+      -5116, -13444, -18021,  12911,  13424,  13718,  13674,  13464,  13054
+  }
+};
+
+/* means of log gains and LAR coefficients*/
+const WebRtc_Word16 WebRtcIsacfix_kMeansGainQ8[3][12] = {
+  { -1758, -1370, -1758, -1373, -1757, -1375,
+    -1758, -1374, -1758, -1373, -1755, -1370
+  },
+  { -1569, -1224, -1569, -1225, -1569, -1227,
+    -1569, -1226, -1567, -1225, -1565, -1224
+  },
+  { -1452,  -957, -1447,  -951, -1438,  -944,
+    -1431,  -938, -1419,  -931, -1406,  -926
+  }
+};
+
+
+const WebRtc_Word32 WebRtcIsacfix_kMeansShapeQ17[3][108] = {
+  { -119581, 34418, -44193, 11112, -4428, 18906, 9222, 8068, 1953, 5425,
+    1871, 1689, 109933, 33751, 10471, -2566, 1090, 2320, -119219, 33728,
+    -43759, 11450, -4870, 19117, 9174, 8037, 1972, 5331, 1872, 1843,
+    109899, 34301, 10629, -2316, 1272, 2562, -118608, 32318, -44012, 11591,
+    -4914, 18932, 9456, 8088, 1900, 5419, 1723, 1853, 109963, 35059,
+    10745, -2335, 1161, 2520, -119174, 32107, -44462, 11635, -4694, 18611,
+    9757, 8108, 1969, 5486, 1673, 1777, 109636, 34907, 10643, -2406,
+    1034, 2420, -118597, 32320, -44590, 10854, -4569, 18821, 9701, 7866,
+    2003, 5577, 1732, 1626, 109913, 34448, 10714, -2752, 990, 2228,
+    -118138, 32996, -44352, 10334, -3772, 18488, 9464, 7865, 2208, 5540,
+    1745, 1664, 109880, 33381, 10640, -2779, 980, 2054
+  },
+  { -146328, 46370, 1047, 26431, 10035, 13933, 6415, 14359, -2368, 6661,
+    2269, 1764, 96623, 7802, 4163, 10742, 1643, 2954, -146871, 46561, 1127,
+    26225, 10113, 14096, 6771, 14323, -2037, 6788, 2297, 1761, 96324, 8382,
+    4309, 10450, 1695, 3016, -146502, 46475, 1580, 26118, 10487, 14179, 6622,
+    14439, -2034, 6757, 2342, 1761, 95869, 8966, 4347, 10358, 1999, 2855,
+    -146958, 47717, 826, 25952, 10263, 14061, 5266, 13681, -2417, 6582, 2047,
+    1608, 96257, 9107, 4452, 10301, 1792, 2676, -146992, 47123, 446, 25822,
+    10405, 14292, 5140, 13804, -2403, 6496, 1834, 1735, 97489, 9253, 4414,
+    10684, 1549, 2721, -145811, 46182, 901, 26482, 10241, 14524, 6075, 14514,
+    -2147, 6691, 2196, 1899, 97011, 8178, 4102, 10758, 1638, 2869
+  },
+  { -166617, 46969, -43908, 17726, 6330, 25615, 6913, 5450, -2301, 1984,
+    507, 2883, 149998, 28709, 19333, 16703, 11093, 8965, -168254, 46604,
+    -44315, 17862, 6474, 25746, 7018, 5373, -2343, 1930, 513, 2819, 150391,
+    28627, 19194, 16678, 10998, 8929, -169093, 46084, -44767, 17427, 6401,
+    25674, 7147, 5472, -2336, 1820, 491, 2802, 149860, 28430, 19064, 16524,
+    10898, 8875, -170205, 46189, -44877, 17403, 6190, 25209, 7035, 5673, -2173,
+    1894, 574, 2756, 148830, 28230, 18819, 16418, 10789, 8811, -171263, 45045,
+    -44834, 16858, 6103, 24726, 7014, 5713, -2103, 1877, 518, 2729, 147073,
+    27744, 18629, 16277, 10690, 8703, -171720, 44153, -45062, 15951, 5872,
+    24429, 7044, 5585, -2082, 1807, 519, 2769, 144791, 27402, 18490, 16126,
+    10548, 8635
+  }
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.h
new file mode 100644
index 0000000..4f2e0e7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/lpc_tables.h
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_tables.h
+ *
+ * header file for coding tables for the LPC coefficients
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_
+
+#include "typedefs.h"
+
+
+/* indices of KLT coefficients used */
+extern const WebRtc_UWord16 WebRtcIsacfix_kSelIndGain[12];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kSelIndShape[108];
+
+/* cdf array for model indicator */
+extern const WebRtc_UWord16 WebRtcIsacfix_kModelCdf[KLT_NUM_MODELS+1];
+
+/* pointer to cdf array for model indicator */
+extern const WebRtc_UWord16 *WebRtcIsacfix_kModelCdfPtr[1];
+
+/* initial cdf index for decoder of model indicator */
+extern const WebRtc_UWord16 WebRtcIsacfix_kModelInitIndex[1];
+
+/* offset to go from rounded value to quantization index */
+extern const WebRtc_Word16 WebRtcIsacfix_kQuantMinGain[12];
+
+extern const WebRtc_Word16 WebRtcIsacfix_kQuantMinShape[108];
+
+/* maximum quantization index */
+extern const WebRtc_UWord16 WebRtcIsacfix_kMaxIndGain[12];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kMaxIndShape[108];
+
+/* index offset */
+extern const WebRtc_UWord16 WebRtcIsacfix_kOffsetGain[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kOffsetShape[KLT_NUM_MODELS][108];
+
+/* initial cdf index for KLT coefficients */
+extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndexGain[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndexShape[KLT_NUM_MODELS][108];
+
+/* offsets for quantizer representation levels */
+extern const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsGain[3];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsShape[3];
+
+/* quantizer representation levels */
+extern const WebRtc_Word32 WebRtcIsacfix_kLevelsGainQ17[1176];
+
+extern const WebRtc_Word16 WebRtcIsacfix_kLevelsShapeQ10[1735];
+
+/* cdf tables for quantizer indices */
+extern const WebRtc_UWord16 WebRtcIsacfix_kCdfGain[1212];
+
+extern const WebRtc_UWord16 WebRtcIsacfix_kCdfShape[2059];
+
+/* pointers to cdf tables for quantizer indices */
+extern const WebRtc_UWord16 *WebRtcIsacfix_kCdfGainPtr[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 *WebRtcIsacfix_kCdfShapePtr[KLT_NUM_MODELS][108];
+
+/* code length for all coefficients using different models */
+extern const WebRtc_Word16 WebRtcIsacfix_kCodeLenGainQ11[392];
+
+extern const WebRtc_Word16 WebRtcIsacfix_kCodeLenShapeQ11[577];
+
+/* left KLT transforms */
+extern const WebRtc_Word16 WebRtcIsacfix_kT1GainQ15[KLT_NUM_MODELS][4];
+
+extern const WebRtc_Word16 WebRtcIsacfix_kT1ShapeQ15[KLT_NUM_MODELS][324];
+
+/* right KLT transforms */
+extern const WebRtc_Word16 WebRtcIsacfix_kT2GainQ15[KLT_NUM_MODELS][36];
+
+extern const WebRtc_Word16 WebRtcIsacfix_kT2ShapeQ15[KLT_NUM_MODELS][36];
+
+/* means of log gains and LAR coefficients */
+extern const WebRtc_Word16 WebRtcIsacfix_kMeansGainQ8[KLT_NUM_MODELS][12];
+
+extern const WebRtc_Word32 WebRtcIsacfix_kMeansShapeQ17[3][108];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.c
new file mode 100644
index 0000000..1702098
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.c
@@ -0,0 +1,519 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_estimator.c
+ *
+ * Pitch filter functions
+ *
+ */
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+#include "pitch_estimator.h"
+
+/* log2[0.2, 0.5, 0.98] in Q8 */
+static const WebRtc_Word16 kLogLagWinQ8[3] = {
+  -594, -256, -7
+};
+
+/* [1 -0.75 0.25] in Q12 */
+static const WebRtc_Word16 kACoefQ12[3] = {
+  4096, -3072, 1024
+};
+
+
+
+static __inline WebRtc_Word32 Log2Q8( WebRtc_UWord32 x ) {
+
+  WebRtc_Word32 zeros, lg2;
+  WebRtc_Word16 frac;
+
+  zeros=WebRtcSpl_NormU32(x);
+  frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)(WEBRTC_SPL_LSHIFT_W32(x, zeros))&0x7FFFFFFF), 23);
+  /* log2(magn(i)) */
+
+  lg2= (WEBRTC_SPL_LSHIFT_W32((31-zeros), 8)+frac);
+  return lg2;
+
+}
+
+static __inline WebRtc_Word16 Exp2Q10(WebRtc_Word16 x) { // Both in and out in Q10
+
+  WebRtc_Word16 tmp16_1, tmp16_2;
+
+  tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
+  tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+  if(tmp16_1>0)
+    return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+  else
+    return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+
+}
+
+
+
+/* 1D parabolic interpolation . All input and output values are in Q8 */
+static __inline void Intrp1DQ8(WebRtc_Word32 *x, WebRtc_Word32 *fx, WebRtc_Word32 *y, WebRtc_Word32 *fy) {
+
+  WebRtc_Word16 sign1=1, sign2=1;
+  WebRtc_Word32 r32, q32, t32, nom32, den32;
+  WebRtc_Word16 t16, tmp16, tmp16_1;
+
+  if ((fx[0]>0) && (fx[2]>0)) {
+    r32=fx[1]-fx[2];
+    q32=fx[0]-fx[1];
+    nom32=q32+r32;
+    den32=WEBRTC_SPL_MUL_32_16((q32-r32), 2);
+    if (nom32<0)
+      sign1=-1;
+    if (den32<0)
+      sign2=-1;
+
+    /* t = (q32+r32)/(2*(q32-r32)) = (fx[0]-fx[1] + fx[1]-fx[2])/(2 * fx[0]-fx[1] - (fx[1]-fx[2]))*/
+    /* (Signs are removed because WebRtcSpl_DivResultInQ31 can't handle negative numbers) */
+    t32=WebRtcSpl_DivResultInQ31(WEBRTC_SPL_MUL_32_16(nom32, sign1),WEBRTC_SPL_MUL_32_16(den32, sign2)); /* t in Q31, without signs */
+
+    t16=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(t32, 23);  /* Q8 */
+    t16=t16*sign1*sign2;        /* t in Q8 with signs */
+
+    *y = x[0]+t16;          /* Q8 */
+    // *y = x[1]+t16;          /* Q8 */
+
+    /* The following code calculates fy in three steps */
+    /* fy = 0.5 * t * (t-1) * fx[0] + (1-t*t) * fx[1] + 0.5 * t * (t+1) * fx[2]; */
+
+    /* Part I: 0.5 * t * (t-1) * fx[0] */
+    tmp16_1=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(t16,t16); /* Q8*Q8=Q16 */
+    tmp16_1 = WEBRTC_SPL_RSHIFT_W16(tmp16_1,2);  /* Q16>>2 = Q14 */
+    t16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(t16, 64);           /* Q8<<6 = Q14  */
+    tmp16 = tmp16_1-t16;
+    *fy = WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[0]); /* (Q14 * Q8 >>15)/2 = Q8 */
+
+    /* Part II: (1-t*t) * fx[1] */
+    tmp16 = 16384-tmp16_1;        /* 1 in Q14 - Q14 */
+    *fy += WEBRTC_SPL_MUL_16_32_RSFT14(tmp16, fx[1]);/* Q14 * Q8 >> 14 = Q8 */
+
+    /* Part III: 0.5 * t * (t+1) * fx[2] */
+    tmp16 = tmp16_1+t16;
+    *fy += WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[2]);/* (Q14 * Q8 >>15)/2 = Q8 */
+  } else {
+    *y = x[0];
+    *fy= fx[1];
+  }
+}
+
+
+static void FindFour32(WebRtc_Word32 *in, WebRtc_Word16 length, WebRtc_Word16 *bestind)
+{
+  WebRtc_Word32 best[4]= {-100, -100, -100, -100};
+  WebRtc_Word16 k;
+
+  for (k=0; k<length; k++) {
+    if (in[k] > best[3]) {
+      if (in[k] > best[2]) {
+        if (in[k] > best[1]) {
+          if (in[k] > best[0]) { // The Best
+            best[3] = best[2];
+            bestind[3] = bestind[2];
+            best[2] = best[1];
+            bestind[2] = bestind[1];
+            best[1] = best[0];
+            bestind[1] = bestind[0];
+            best[0] = in[k];
+            bestind[0] = k;
+          } else { // 2nd best
+            best[3] = best[2];
+            bestind[3] = bestind[2];
+            best[2] = best[1];
+            bestind[2] = bestind[1];
+            best[1] = in[k];
+            bestind[1] = k;
+          }
+        } else { // 3rd best
+          best[3] = best[2];
+          bestind[3] = bestind[2];
+          best[2] = in[k];
+          bestind[2] = k;
+        }
+      } else {  // 4th best
+        best[3] = in[k];
+        bestind[3] = k;
+      }
+    }
+  }
+}
+
+
+
+
+
+static void PCorr2Q32(const WebRtc_Word16 *in, WebRtc_Word32 *logcorQ8)
+{
+  WebRtc_Word16 scaling,n,k;
+  WebRtc_Word32 ysum32,csum32, lys, lcs;
+  WebRtc_Word32 prod32, oneQ8;
+
+
+  const WebRtc_Word16 *x, *inptr;
+
+  oneQ8 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, 8);  // 1.00 in Q8
+
+  x = in + PITCH_MAX_LAG/2 + 2;
+  scaling = WebRtcSpl_GetScalingSquare ((WebRtc_Word16 *) in, PITCH_CORR_LEN2, PITCH_CORR_LEN2);
+  ysum32 = 1;
+  csum32 = 0;
+  x = in + PITCH_MAX_LAG/2 + 2;
+  for (n = 0; n < PITCH_CORR_LEN2; n++) {
+    ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[n],(WebRtc_Word16) in[n], scaling);  // Q0
+    csum32 += WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) x[n],(WebRtc_Word16) in[n], scaling); // Q0
+  }
+
+  logcorQ8 += PITCH_LAG_SPAN2 - 1;
+
+  lys=Log2Q8((WebRtc_UWord32) ysum32); // Q8
+  lys=WEBRTC_SPL_RSHIFT_W32(lys, 1); //sqrt(ysum);
+
+  if (csum32>0) {
+
+    lcs=Log2Q8((WebRtc_UWord32) csum32);   // 2log(csum) in Q8
+
+    if (lcs>(lys + oneQ8) ){ // csum/sqrt(ysum) > 2 in Q8
+      *logcorQ8 = lcs - lys;  // log2(csum/sqrt(ysum))
+    } else {
+      *logcorQ8 = oneQ8;  // 1.00
+    }
+
+  } else {
+    *logcorQ8 = 0;
+  }
+
+
+  for (k = 1; k < PITCH_LAG_SPAN2; k++) {
+    inptr = &in[k];
+    ysum32 -= WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[k-1],(WebRtc_Word16) in[k-1], scaling);
+    ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[PITCH_CORR_LEN2 + k - 1],(WebRtc_Word16) in[PITCH_CORR_LEN2 + k - 1], scaling);
+    csum32 = 0;
+    prod32 = WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) x[0],(WebRtc_Word16) inptr[0], scaling);
+
+    for (n = 1; n < PITCH_CORR_LEN2; n++) {
+      csum32 += prod32;
+      prod32 = WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) x[n],(WebRtc_Word16) inptr[n], scaling);
+    }
+
+    csum32 += prod32;
+    logcorQ8--;
+
+    lys=Log2Q8((WebRtc_UWord32)ysum32); // Q8
+    lys=WEBRTC_SPL_RSHIFT_W32(lys, 1); //sqrt(ysum);
+
+    if (csum32>0) {
+
+      lcs=Log2Q8((WebRtc_UWord32) csum32);   // 2log(csum) in Q8
+
+      if (lcs>(lys + oneQ8) ){ // csum/sqrt(ysum) > 2
+        *logcorQ8 = lcs - lys;  // log2(csum/sqrt(ysum))
+      } else {
+        *logcorQ8 = oneQ8;  // 1.00
+      }
+
+    } else {
+      *logcorQ8 = 0;
+    }
+  }
+}
+
+
+
+void WebRtcIsacfix_InitialPitch(const WebRtc_Word16 *in, /* Q0 */
+                                PitchAnalysisStruct *State,
+                                WebRtc_Word16 *lagsQ7                   /* Q7 */
+                                )
+{
+  WebRtc_Word16 buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2];
+  WebRtc_Word32 *crrvecQ8_1,*crrvecQ8_2;
+  WebRtc_Word32 cv1q[PITCH_LAG_SPAN2+2],cv2q[PITCH_LAG_SPAN2+2], peakvq[PITCH_LAG_SPAN2+2];
+  int k;
+  WebRtc_Word16 peaks_indq;
+  WebRtc_Word16 peakiq[PITCH_LAG_SPAN2];
+  WebRtc_Word32 corr;
+  WebRtc_Word32 corr32, corr_max32, corr_max_o32;
+  WebRtc_Word16 npkq;
+  WebRtc_Word16 best4q[4]={0,0,0,0};
+  WebRtc_Word32 xq[3],yq[1],fyq[1];
+  WebRtc_Word32 *fxq;
+  WebRtc_Word32 best_lag1q, best_lag2q;
+  WebRtc_Word32 tmp32a,tmp32b,lag32,ratq;
+  WebRtc_Word16 start;
+  WebRtc_Word16 oldgQ12, tmp16a, tmp16b, gain_bias16,tmp16c, tmp16d, bias16;
+  WebRtc_Word32 tmp32c,tmp32d, tmp32e;
+  WebRtc_Word16 old_lagQ;
+  WebRtc_Word32 old_lagQ8;
+  WebRtc_Word32 lagsQ8[4];
+
+  old_lagQ = State->PFstr_wght.oldlagQ7; // Q7
+  old_lagQ8= WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)old_lagQ,1); //Q8
+
+  oldgQ12= State->PFstr_wght.oldgainQ12;
+
+  crrvecQ8_1=&cv1q[1];
+  crrvecQ8_2=&cv2q[1];
+
+
+  /* copy old values from state buffer */
+  memcpy(buf_dec16, State->dec_buffer16, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2)));
+
+  /* decimation; put result after the old values */
+  WebRtcIsacfix_DecimateAllpass32(in, State->decimator_state32, PITCH_FRAME_LEN,
+                                  &buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2]);
+
+  /* low-pass filtering */
+  start= PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2;
+  WebRtcSpl_FilterARFastQ12(&buf_dec16[start],&buf_dec16[start],(WebRtc_Word16*)kACoefQ12,3, PITCH_FRAME_LEN/2);
+
+  /* copy end part back into state buffer */
+  for (k = 0; k < (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2); k++)
+    State->dec_buffer16[k] = buf_dec16[k+PITCH_FRAME_LEN/2];
+
+
+  /* compute correlation for first and second half of the frame */
+  PCorr2Q32(buf_dec16, crrvecQ8_1);
+  PCorr2Q32(buf_dec16 + PITCH_CORR_STEP2, crrvecQ8_2);
+
+
+  /* bias towards pitch lag of previous frame */
+  tmp32a = Log2Q8((WebRtc_UWord32) old_lagQ8) - 2304; // log2(0.5*oldlag) in Q8
+  tmp32b = WEBRTC_SPL_MUL_16_16_RSFT(oldgQ12,oldgQ12, 10); //Q12 & * 4.0;
+  gain_bias16 = (WebRtc_Word16) tmp32b;  //Q12
+  if (gain_bias16 > 3276) gain_bias16 = 3276; // 0.8 in Q12
+
+
+  for (k = 0; k < PITCH_LAG_SPAN2; k++)
+  {
+    if (crrvecQ8_1[k]>0) {
+      tmp32b = Log2Q8((WebRtc_UWord32) (k + (PITCH_MIN_LAG/2-2)));
+      tmp16a = (WebRtc_Word16) (tmp32b - tmp32a); // Q8 & fabs(ratio)<4
+      tmp32c = WEBRTC_SPL_MUL_16_16_RSFT(tmp16a,tmp16a, 6); //Q10
+      tmp16b = (WebRtc_Word16) tmp32c; // Q10 & <8
+      tmp32d = WEBRTC_SPL_MUL_16_16_RSFT(tmp16b, 177 , 8); // mult with ln2 in Q8
+      tmp16c = (WebRtc_Word16) tmp32d; // Q10 & <4
+      tmp16d = Exp2Q10((WebRtc_Word16) -tmp16c); //Q10
+      tmp32c = WEBRTC_SPL_MUL_16_16_RSFT(gain_bias16,tmp16d,13); // Q10  & * 0.5
+      bias16 = (WebRtc_Word16) (1024 + tmp32c); // Q10
+      tmp32b = Log2Q8((WebRtc_UWord32) bias16) - 2560; // Q10 in -> Q8 out with 10*2^8 offset
+      crrvecQ8_1[k] += tmp32b ; // -10*2^8 offset
+    }
+  }
+
+  /* taper correlation functions */
+  for (k = 0; k < 3; k++) {
+    crrvecQ8_1[k] += kLogLagWinQ8[k];
+    crrvecQ8_2[k] += kLogLagWinQ8[k];
+
+    crrvecQ8_1[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k];
+    crrvecQ8_2[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k];
+  }
+
+
+  /* Make zeropadded corr vectors */
+  cv1q[0]=0;
+  cv2q[0]=0;
+  cv1q[PITCH_LAG_SPAN2+1]=0;
+  cv2q[PITCH_LAG_SPAN2+1]=0;
+  corr_max32 = 0;
+
+  for (k = 1; k <= PITCH_LAG_SPAN2; k++)
+  {
+
+
+    corr32=crrvecQ8_1[k-1];
+    if (corr32 > corr_max32)
+      corr_max32 = corr32;
+
+    corr32=crrvecQ8_2[k-1];
+    corr32 += -4; // Compensate for later (log2(0.99))
+
+    if (corr32 > corr_max32)
+      corr_max32 = corr32;
+
+  }
+
+  /* threshold value to qualify as a peak */
+  // corr_max32 += -726; // log(0.14)/log(2.0) in Q8
+  corr_max32 += -1000; // log(0.14)/log(2.0) in Q8
+  corr_max_o32 = corr_max32;
+
+
+  /* find peaks in corr1 */
+  peaks_indq = 0;
+  for (k = 1; k <= PITCH_LAG_SPAN2; k++)
+  {
+    corr32=cv1q[k];
+    if (corr32>corr_max32) { // Disregard small peaks
+      if ((corr32>=cv1q[k-1]) && (corr32>cv1q[k+1])) { // Peak?
+        peakvq[peaks_indq] = corr32;
+        peakiq[peaks_indq++] = k;
+      }
+    }
+  }
+
+
+  /* find highest interpolated peak */
+  corr_max32=0;
+  best_lag1q =0;
+  if (peaks_indq > 0) {
+    FindFour32(peakvq, (WebRtc_Word16) peaks_indq, best4q);
+    npkq = WEBRTC_SPL_MIN(peaks_indq, 4);
+
+    for (k=0;k<npkq;k++) {
+
+      lag32 =  peakiq[best4q[k]];
+      fxq = &cv1q[peakiq[best4q[k]]-1];
+      xq[0]= lag32;
+      xq[0] = WEBRTC_SPL_LSHIFT_W32(xq[0], 8);
+      Intrp1DQ8(xq, fxq, yq, fyq);
+
+      tmp32a= Log2Q8((WebRtc_UWord32) *yq) - 2048; // offset 8*2^8
+      /* Bias towards short lags */
+      /* log(pow(0.8, log(2.0 * *y )))/log(2.0) */
+      tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32a, -42, 8);
+      tmp32c= tmp32b + 256;
+      *fyq += tmp32c;
+      if (*fyq > corr_max32) {
+        corr_max32 = *fyq;
+        best_lag1q = *yq;
+      }
+    }
+    tmp32a = best_lag1q - OFFSET_Q8;
+    tmp32b = WEBRTC_SPL_LSHIFT_W32(tmp32a, 1);
+    lagsQ8[0] = tmp32b + PITCH_MIN_LAG_Q8;
+    lagsQ8[1] = lagsQ8[0];
+  } else {
+    lagsQ8[0] = old_lagQ8;
+    lagsQ8[1] = lagsQ8[0];
+  }
+
+  /* Bias towards constant pitch */
+  tmp32a = lagsQ8[0] - PITCH_MIN_LAG_Q8;
+  ratq = WEBRTC_SPL_RSHIFT_W32(tmp32a, 1) + OFFSET_Q8;
+
+  for (k = 1; k <= PITCH_LAG_SPAN2; k++)
+  {
+    tmp32a = WEBRTC_SPL_LSHIFT_W32(k, 7); // 0.5*k Q8
+    tmp32b = (WebRtc_Word32) (WEBRTC_SPL_LSHIFT_W32(tmp32a, 1)) - ratq; // Q8
+    tmp32c = WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32b, (WebRtc_Word16) tmp32b, 8); // Q8
+
+    tmp32b = (WebRtc_Word32) tmp32c + (WebRtc_Word32)  WEBRTC_SPL_RSHIFT_W32(ratq, 1); // (k-r)^2 + 0.5 * r  Q8
+    tmp32c = Log2Q8((WebRtc_UWord32) tmp32a) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
+    tmp32d = Log2Q8((WebRtc_UWord32) tmp32b) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
+    tmp32e =  tmp32c -tmp32d;
+
+    cv2q[k] += WEBRTC_SPL_RSHIFT_W32(tmp32e, 1);
+
+  }
+
+  /* find peaks in corr2 */
+  corr_max32 = corr_max_o32;
+  peaks_indq = 0;
+
+  for (k = 1; k <= PITCH_LAG_SPAN2; k++)
+  {
+    corr=cv2q[k];
+    if (corr>corr_max32) { // Disregard small peaks
+      if ((corr>=cv2q[k-1]) && (corr>cv2q[k+1])) { // Peak?
+        peakvq[peaks_indq] = corr;
+        peakiq[peaks_indq++] = k;
+      }
+    }
+  }
+
+
+
+  /* find highest interpolated peak */
+  corr_max32 = 0;
+  best_lag2q =0;
+  if (peaks_indq > 0) {
+
+    FindFour32(peakvq, (WebRtc_Word16) peaks_indq, best4q);
+    npkq = WEBRTC_SPL_MIN(peaks_indq, 4);
+    for (k=0;k<npkq;k++) {
+
+      lag32 =  peakiq[best4q[k]];
+      fxq = &cv2q[peakiq[best4q[k]]-1];
+
+      xq[0]= lag32;
+      xq[0] = WEBRTC_SPL_LSHIFT_W32(xq[0], 8);
+      Intrp1DQ8(xq, fxq, yq, fyq);
+
+      /* Bias towards short lags */
+      /* log(pow(0.8, log(2.0f * *y )))/log(2.0f) */
+      tmp32a= Log2Q8((WebRtc_UWord32) *yq) - 2048; // offset 8*2^8
+      tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32a, -82, 8);
+      tmp32c= tmp32b + 256;
+      *fyq += tmp32c;
+      if (*fyq > corr_max32) {
+        corr_max32 = *fyq;
+        best_lag2q = *yq;
+      }
+    }
+
+    tmp32a = best_lag2q - OFFSET_Q8;
+    tmp32b = WEBRTC_SPL_LSHIFT_W32(tmp32a, 1);
+    lagsQ8[2] = tmp32b + PITCH_MIN_LAG_Q8;
+    lagsQ8[3] = lagsQ8[2];
+  } else {
+    lagsQ8[2] = lagsQ8[0];
+    lagsQ8[3] = lagsQ8[0];
+  }
+
+  lagsQ7[0]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[0], 1);
+  lagsQ7[1]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[1], 1);
+  lagsQ7[2]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[2], 1);
+  lagsQ7[3]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[3], 1);
+
+
+}
+
+
+
+void WebRtcIsacfix_PitchAnalysis(const WebRtc_Word16 *inn,               /* PITCH_FRAME_LEN samples */
+                                 WebRtc_Word16 *outQ0,                  /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+                                 PitchAnalysisStruct *State,
+                                 WebRtc_Word16 *PitchLags_Q7,
+                                 WebRtc_Word16 *PitchGains_Q12)
+{
+  WebRtc_Word16 inbufQ0[PITCH_FRAME_LEN + QLOOKAHEAD];
+  WebRtc_Word16 k;
+
+  /* inital pitch estimate */
+  WebRtcIsacfix_InitialPitch(inn, State,  PitchLags_Q7);
+
+
+  /* Calculate gain */
+  WebRtcIsacfix_PitchFilterGains(inn, &(State->PFstr_wght), PitchLags_Q7, PitchGains_Q12);
+
+  /* concatenate previous input's end and current input */
+  for (k = 0; k < QLOOKAHEAD; k++) {
+    inbufQ0[k] = State->inbuf[k];
+  }
+  for (k = 0; k < PITCH_FRAME_LEN; k++) {
+    inbufQ0[k+QLOOKAHEAD] = (WebRtc_Word16) inn[k];
+  }
+
+  /* lookahead pitch filtering for masking analysis */
+  WebRtcIsacfix_PitchFilter(inbufQ0, outQ0, &(State->PFstr), PitchLags_Q7,PitchGains_Q12, 2);
+
+
+  /* store last part of input */
+  for (k = 0; k < QLOOKAHEAD; k++) {
+    State->inbuf[k] = inbufQ0[k + PITCH_FRAME_LEN];
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.h
new file mode 100644
index 0000000..afdc978
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_estimator.h
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_estimator.h
+ *
+ * Pitch functions
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_
+
+#include "structs.h"
+
+
+
+void WebRtcIsacfix_PitchAnalysis(const WebRtc_Word16 *in,               /* PITCH_FRAME_LEN samples */
+                                 WebRtc_Word16 *outQ0,                  /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+                                 PitchAnalysisStruct *State,
+                                 WebRtc_Word16 *lagsQ7,
+                                 WebRtc_Word16 *PitchGains_Q12);
+
+
+void WebRtcIsacfix_InitialPitch(const WebRtc_Word16 *in,
+                                PitchAnalysisStruct *State,
+                                WebRtc_Word16 *qlags);
+
+void WebRtcIsacfix_PitchFilter(WebRtc_Word16 *indatFix,
+                               WebRtc_Word16 *outdatQQ,
+                               PitchFiltstr *pfp,
+                               WebRtc_Word16 *lagsQ7,
+                               WebRtc_Word16 *gainsQ12,
+                               WebRtc_Word16 type);
+
+void WebRtcIsacfix_PitchFilterGains(const WebRtc_Word16 *indatQ0,
+                                    PitchFiltstr *pfp,
+                                    WebRtc_Word16 *lagsQ7,
+                                    WebRtc_Word16 *gainsQ12);
+
+
+
+void WebRtcIsacfix_DecimateAllpass32(const WebRtc_Word16 *in,
+                                     WebRtc_Word32 *state_in,        /* array of size: 2*ALLPASSSECTIONS+1 */
+                                     WebRtc_Word16 N,                   /* number of input samples */
+                                     WebRtc_Word16 *out);             /* array of size N/2 */
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_filter.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_filter.c
new file mode 100644
index 0000000..87af153
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_filter.c
@@ -0,0 +1,331 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_estimatorfilter.c
+ *
+ * Pitch filter functions
+ *
+ */
+
+#include <string.h>
+
+#include "pitch_estimator.h"
+
+
+/* Filter coefficicients in Q15 */
+static const WebRtc_Word16 kDampFilter[PITCH_DAMPORDER] = {
+  -2294, 8192, 20972, 8192, -2294
+};
+
+/* Interpolation coefficients; generated by design_pitch_filter.m.
+ * Coefficients are stored in Q14.
+ */
+static const WebRtc_Word16 kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = {
+  {-367, 1090, -2706,  9945, 10596, -3318,  1626, -781,  287},
+  {-325,  953, -2292,  7301, 12963, -3320,  1570, -743,  271},
+  {-240,  693, -1622,  4634, 14809, -2782,  1262, -587,  212},
+  {-125,  358,  -817,  2144, 15982, -1668,   721, -329,  118},
+  {   0,    0,    -1,     1, 16380,     1,    -1,    0,    0},
+  { 118, -329,   721, -1668, 15982,  2144,  -817,  358, -125},
+  { 212, -587,  1262, -2782, 14809,  4634, -1622,  693, -240},
+  { 271, -743,  1570, -3320, 12963,  7301, -2292,  953, -325}
+};
+
+
+
+
+static __inline WebRtc_Word32 CalcLrIntQ(WebRtc_Word32 fixVal, WebRtc_Word16 qDomain) {
+  WebRtc_Word32 intgr;
+  WebRtc_Word32 roundVal;
+
+  roundVal = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1,  qDomain-1);
+  intgr = WEBRTC_SPL_RSHIFT_W32(fixVal+roundVal, qDomain);
+
+  return intgr;
+}
+
+void WebRtcIsacfix_PitchFilter(WebRtc_Word16 *indatQQ, /* Q10 if type is 1 or 4, Q0 if type is 2 */
+                               WebRtc_Word16 *outdatQQ,
+                               PitchFiltstr *pfp,
+                               WebRtc_Word16 *lagsQ7,
+                               WebRtc_Word16 *gainsQ12,
+                               WebRtc_Word16 type)
+{
+  int    k, n, m, ind;
+  WebRtc_Word16 sign = 1;
+  WebRtc_Word16 inystateQQ[PITCH_DAMPORDER];
+  WebRtc_Word16 ubufQQ[PITCH_INTBUFFSIZE+QLOOKAHEAD];
+  WebRtc_Word16 Gain = 21299;     /* 1.3 in Q14 */
+  WebRtc_Word16 DivFactor = 6553; /* 0.2 in Q15 */
+  WebRtc_Word16 oldLagQ7, oldGainQ12,
+      lagdeltaQ7, curLagQ7,
+      gaindeltaQ12, curGainQ12;
+  WebRtc_Word16 tmpW16, indW16=0, frcQQ, cnt=0, pos, pos2;
+  const WebRtc_Word16 *fracoeffQQ=NULL;
+  WebRtc_Word32 tmpW32;
+
+  if (type==4)
+    sign = -1;
+
+  /* Set up buffer and states */
+  memcpy(ubufQQ, pfp->ubufQQ, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_BUFFSIZE));
+  memcpy(inystateQQ, pfp->ystateQQ, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_DAMPORDER));
+
+  /* Get old lag and gain value from memory */
+  oldLagQ7 = pfp->oldlagQ7;
+  oldGainQ12 = pfp->oldgainQ12;
+
+  if (type==4) {
+    /* make output more periodic */
+    /* Fixed 1.3 = 21299 in Q14 */
+    for (k=0;k<PITCH_SUBFRAMES;k++)
+      gainsQ12[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gainsQ12[k], Gain, 14);
+  }
+
+  /* No interpolation if pitch lag step is big */
+  if ((WEBRTC_SPL_RSHIFT_W16(WEBRTC_SPL_MUL_16_16(lagsQ7[0], 3), 1) < oldLagQ7) ||
+      (lagsQ7[0] > WEBRTC_SPL_RSHIFT_W16(WEBRTC_SPL_MUL_16_16(oldLagQ7, 3), 1))) {
+    oldLagQ7 = lagsQ7[0];
+    oldGainQ12 = gainsQ12[0];
+  }
+
+  ind=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdeltaQ7 = lagsQ7[k]-oldLagQ7;
+    lagdeltaQ7 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(lagdeltaQ7,DivFactor,15);
+    curLagQ7 = oldLagQ7;
+    gaindeltaQ12 = gainsQ12[k]-oldGainQ12;
+    gaindeltaQ12 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gaindeltaQ12,DivFactor,15);
+
+    curGainQ12 = oldGainQ12;
+    oldLagQ7 = lagsQ7[k];
+    oldGainQ12 = gainsQ12[k];
+
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+
+      if (cnt == 0) {   /* Update parameters */
+
+        curGainQ12 += gaindeltaQ12;
+        curLagQ7 += lagdeltaQ7;
+        indW16 = (WebRtc_Word16)CalcLrIntQ(curLagQ7,7);
+        tmpW16 = WEBRTC_SPL_LSHIFT_W16(indW16,7);
+        tmpW16 -= curLagQ7;
+        frcQQ = WEBRTC_SPL_RSHIFT_W16(tmpW16,4);
+        frcQQ += 4;
+
+        if(frcQQ==PITCH_FRACS)
+          frcQQ=0;
+        fracoeffQQ = kIntrpCoef[frcQQ];
+
+        cnt=12;
+      }
+
+      /* shift low pass filter state */
+      for (m=PITCH_DAMPORDER-1;m>0;m--)
+        inystateQQ[m] = inystateQQ[m-1];
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos2 = pos - (indW16 + 2);
+
+      tmpW32=0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        tmpW32 += WEBRTC_SPL_MUL_16_16(ubufQQ[pos2+m], fracoeffQQ[m]);
+
+      /* Saturate to avoid overflow in tmpW16 */
+      tmpW32 = WEBRTC_SPL_SAT(536862719, tmpW32, -536879104);
+      tmpW32 += 8192;
+      tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32,14);
+
+      inystateQQ[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(curGainQ12, tmpW16,12); /* Multiply with gain */
+
+      /* Low pass filter */
+      tmpW32=0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        tmpW32 += WEBRTC_SPL_MUL_16_16(inystateQQ[m], kDampFilter[m]);
+
+      /* Saturate to avoid overflow in tmpW16 */
+      tmpW32 = WEBRTC_SPL_SAT(1073725439, tmpW32, -1073758208);
+      tmpW32 += 16384;
+      tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32,15);
+
+      /* Subtract from input and update buffer */
+      tmpW32 = indatQQ[ind] - WEBRTC_SPL_MUL_16_16(sign, tmpW16);
+      outdatQQ[ind] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmpW32);
+      tmpW32 = indatQQ[ind] + (WebRtc_Word32)outdatQQ[ind];
+      ubufQQ[pos] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmpW32);
+
+      ind++;
+      cnt--;
+    }
+  }
+
+
+  /* Export buffer and states */
+  memcpy(pfp->ubufQQ, ubufQQ+PITCH_FRAME_LEN, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_BUFFSIZE));
+  memcpy(pfp->ystateQQ, inystateQQ, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_DAMPORDER));
+
+  pfp->oldlagQ7 = oldLagQ7;
+  pfp->oldgainQ12 = oldGainQ12;
+
+  if (type==2) {
+    /* Filter look-ahead segment */
+    for (n=0;n<QLOOKAHEAD;n++) {
+      /* shift low pass filter state */
+      for (m=PITCH_DAMPORDER-1;m>0;m--)
+        inystateQQ[m] = inystateQQ[m-1];
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos2= pos - (indW16 + 2);
+
+      tmpW32=0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        tmpW32 += WEBRTC_SPL_MUL_16_16(ubufQQ[pos2+m], fracoeffQQ[m]);
+
+      if (tmpW32<536862720) {//536870912)
+        tmpW32 += 8192;
+        tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32,14);
+      } else
+        tmpW16= 32767;
+      inystateQQ[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(curGainQ12, tmpW16,12);  /* Multiply with gain */
+
+      /* Low pass filter */
+      tmpW32=0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        tmpW32 += WEBRTC_SPL_MUL_16_16(inystateQQ[m], kDampFilter[m]);
+      if (tmpW32<1073725440) { //1073741824)
+        tmpW32 += 16384;
+        tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32,15);
+      } else
+        tmpW16 = 32767;
+
+      /* Subtract from input and update buffer */
+      tmpW32 = indatQQ[ind] - (WebRtc_Word32)tmpW16;
+      outdatQQ[ind] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmpW32);
+      tmpW32 = indatQQ[ind] + (WebRtc_Word32)outdatQQ[ind];
+      ubufQQ[pos] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmpW32);
+
+      ind++;
+    }
+
+  }
+
+
+}
+
+
+void WebRtcIsacfix_PitchFilterGains(const WebRtc_Word16 *indatQ0,
+                                    PitchFiltstr *pfp,
+                                    WebRtc_Word16 *lagsQ7,
+                                    WebRtc_Word16 *gainsQ12)
+{
+  int  k, n, m, ind;
+
+  WebRtc_Word16 ubufQQ[PITCH_INTBUFFSIZE];
+  WebRtc_Word16 oldLagQ7,lagdeltaQ7, curLagQ7;
+  WebRtc_Word16 DivFactor = 6553;
+  const WebRtc_Word16 *fracoeffQQ = NULL;
+  WebRtc_Word16 scale;
+  WebRtc_Word16 cnt=0, pos, pos3QQ, frcQQ, indW16 = 0, tmpW16;
+  WebRtc_Word32 tmpW32, tmp2W32, csum1QQ, esumxQQ;
+
+  /* Set up buffer and states */
+  memcpy(ubufQQ, pfp->ubufQQ, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_BUFFSIZE));
+  oldLagQ7 = pfp->oldlagQ7;
+
+  /* No interpolation if pitch lag step is big */
+  if ((WEBRTC_SPL_RSHIFT_W16(WEBRTC_SPL_MUL_16_16(lagsQ7[0], 3), 1) < oldLagQ7) ||
+      (lagsQ7[0] > WEBRTC_SPL_RSHIFT_W16(WEBRTC_SPL_MUL_16_16(oldLagQ7, 3), 1))) {
+    oldLagQ7 = lagsQ7[0];
+  }
+
+  ind=0;
+  scale=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdeltaQ7 = lagsQ7[k]-oldLagQ7;
+    lagdeltaQ7 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(lagdeltaQ7,DivFactor,15);
+    curLagQ7 = oldLagQ7;
+    oldLagQ7 = lagsQ7[k];
+
+    csum1QQ=1;
+    esumxQQ=1;
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+
+      if (cnt == 0) {   /* Update parameters */
+        curLagQ7 += lagdeltaQ7;
+        indW16 = (WebRtc_Word16)CalcLrIntQ(curLagQ7,7);
+        tmpW16 = WEBRTC_SPL_LSHIFT_W16(indW16,7);
+        tmpW16 -= curLagQ7;
+        frcQQ = WEBRTC_SPL_RSHIFT_W16(tmpW16,4);
+        frcQQ += 4;
+
+        if(frcQQ==PITCH_FRACS)
+          frcQQ=0;
+        fracoeffQQ = kIntrpCoef[frcQQ];
+
+        cnt=12;
+      }
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos3QQ = pos - (indW16 + 4);
+
+      tmpW32=0;
+      for (m=0;m<PITCH_FRACORDER;m++){
+        tmpW32 += WEBRTC_SPL_MUL_16_16(ubufQQ[pos3QQ+m], fracoeffQQ[m]);
+      }
+
+      /* Subtract from input and update buffer */
+      ubufQQ[pos] = indatQ0[ind];
+
+      tmp2W32 = WEBRTC_SPL_MUL_16_32_RSFT14(indatQ0[ind], tmpW32);
+      tmpW32 += 8192;
+      tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32,14);
+      tmpW32 = WEBRTC_SPL_MUL_16_16(tmpW16, tmpW16);
+
+      if ((tmp2W32>1073700000) || (csum1QQ>1073700000) || (tmpW32>1073700000) || (esumxQQ>1073700000)) {//2^30
+        scale++;
+        csum1QQ = WEBRTC_SPL_RSHIFT_W32(csum1QQ,1);
+        esumxQQ = WEBRTC_SPL_RSHIFT_W32(esumxQQ,1);
+      }
+      tmp2W32 = WEBRTC_SPL_RSHIFT_W32(tmp2W32,scale);
+      csum1QQ += tmp2W32;
+      tmpW32 = WEBRTC_SPL_RSHIFT_W32(tmpW32,scale);
+      esumxQQ += tmpW32;
+
+      ind++;
+      cnt--;
+    }
+
+    if (csum1QQ<esumxQQ) {
+      tmp2W32=WebRtcSpl_DivResultInQ31(csum1QQ,esumxQQ);
+
+      /* Gain should be half the correlation */
+      tmpW32=WEBRTC_SPL_RSHIFT_W32(tmp2W32,20);
+    } else
+      tmpW32=4096;
+    gainsQ12[k]=(WebRtc_Word16)WEBRTC_SPL_SAT(PITCH_MAX_GAIN_Q12, tmpW32, 0);
+
+  }
+
+  /* Export buffer and states */
+  memcpy(pfp->ubufQQ, ubufQQ+PITCH_FRAME_LEN, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), PITCH_BUFFSIZE));
+  pfp->oldlagQ7 = lagsQ7[PITCH_SUBFRAMES-1];
+  pfp->oldgainQ12 = gainsQ12[PITCH_SUBFRAMES-1];
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.c
new file mode 100644
index 0000000..50ea658
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.c
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_gain_tables.c
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#include "pitch_gain_tables.h"
+
+
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+
+/* cdf for quantized pitch filter gains */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchGainCdf[255] = {
+  0,  2,  4,  6,  64,  901,  903,  905,  16954,  16956,
+  16961,  17360,  17362,  17364,  17366,  17368,  17370,  17372,  17374,  17411,
+  17514,  17516,  17583,  18790,  18796,  18802,  20760,  20777,  20782,  21722,
+  21724,  21728,  21738,  21740,  21742,  21744,  21746,  21748,  22224,  22227,
+  22230,  23214,  23229,  23239,  25086,  25108,  25120,  26088,  26094,  26098,
+  26175,  26177,  26179,  26181,  26183,  26185,  26484,  26507,  26522,  27705,
+  27731,  27750,  29767,  29799,  29817,  30866,  30883,  30885,  31025,  31029,
+  31031,  31033,  31035,  31037,  31114,  31126,  31134,  32687,  32722,  32767,
+  35718,  35742,  35757,  36943,  36952,  36954,  37115,  37128,  37130,  37132,
+  37134,  37136,  37143,  37145,  37152,  38843,  38863,  38897,  47458,  47467,
+  47474,  49040,  49061,  49063,  49145,  49157,  49159,  49161,  49163,  49165,
+  49167,  49169,  49171,  49757,  49770,  49782,  61333,  61344,  61346,  62860,
+  62883,  62885,  62887,  62889,  62891,  62893,  62895,  62897,  62899,  62901,
+  62903,  62905,  62907,  62909,  65496,  65498,  65500,  65521,  65523,  65525,
+  65527,  65529,  65531,  65533,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535
+};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsacfix_kLowerlimiGain[3] = {
+  -7, -2, -1
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kUpperlimitGain[3] = {
+  0,  3,  1
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kMultsGain[2] = {
+  18,  3
+};
+
+/* size of cdf table */
+const WebRtc_UWord16 WebRtcIsacfix_kCdfTableSizeGain[1] = {
+  256
+};
+
+/* mean values of pitch filter gains in FIXED point Q12 */
+const WebRtc_Word16 WebRtcIsacfix_kPitchGain1[144] = {
+  843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839,
+  1843, 1843, 1843, 1843, 1843,   1843, 1843, 814, 846, 1092, 1013,
+  1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843,   1843,
+  1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263,
+  1380, 1447, 1559, 1676,   1645, 1749, 1843, 1843, 1843, 1843, 81,
+  477, 563, 611, 706, 806, 849, 1012, 1192, 1128,   1330, 1489,
+  1425, 1576, 1826, 1741, 1843, 1843, 0,     290, 305, 356, 488,
+  575, 602, 741,    890, 835, 1079, 1196, 1182, 1376, 1519, 1506,
+  1680, 1843, 0,     47,  97,  69,  289, 381,    385, 474, 617,
+  664, 803, 1079, 935, 1160, 1269, 1265, 1506, 1741, 0,      0,
+  0,      0,      112, 120, 190, 283, 442, 343, 526, 809, 684,
+  935, 1134, 1020, 1265, 1506, 0,      0,      0,      0,      0,      0,
+  0,      111,    256, 87,  373, 597, 430, 684, 935, 770, 1020,
+  1265
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kPitchGain2[144] = {
+  1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784,
+  1606, 1843, 1843, 1711, 1843,   1843, 1814, 1389, 1275, 1040, 1564,
+  1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720,   1475,
+  1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253,
+  1111, 1495, 1343, 1178,   1770, 1465, 1234, 1814, 1581, 1342, 1040,
+  793, 713, 1053, 895, 737, 1128, 1003, 861, 1277,   1094, 981,
+  1475, 1192, 1019, 1581, 1342, 1098, 855, 570, 483, 833, 648,
+  540, 948, 744,    572, 1009, 844, 636, 1234, 934, 685, 1342,
+  1217, 984, 537, 318, 124, 603, 423, 350,    687, 479, 322,
+  791, 581, 430, 987, 671, 488, 1098, 849, 597, 283, 27,
+  0,     397,    222, 38,  513, 271, 124, 624, 325, 157, 737,
+  484, 233, 849, 597, 343, 27,  0,      0,   141, 0,     0,
+  256, 69,  0,     370, 87,  0,     484, 229, 0,     597, 343,
+  87
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kPitchGain3[144] = {
+  1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639,
+  1393, 1760, 1525, 1285, 1656,   1419, 1176, 1835, 1718, 1475, 1841,
+  1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299,   1040,
+  1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260,
+  1115, 1398, 1151, 1025,   1172, 1080, 790, 1176, 928, 677, 1475,
+  1147, 1019, 1276, 1096, 922, 1214, 1010, 901, 1057,   893, 800,
+  1040, 796, 734, 928, 677, 424, 1137, 897, 753, 1120, 830,
+  710, 875, 751,    601, 795, 642, 583, 790, 544, 475, 677,
+  474, 140, 987, 750, 482, 697, 573, 450,    691, 487, 303,
+  661, 394, 332, 537, 303, 220, 424, 168, 0,     737, 484,
+  229, 624,    348, 153, 441, 261, 136, 397, 166, 51,  283,
+  27,  0,     168, 0,     0,     484, 229,    0,   370, 57,  0,
+  256, 43,  0,     141, 0,  0,   27,  0,   0,   0,   0,
+  0
+};
+
+
+const WebRtc_Word16 WebRtcIsacfix_kPitchGain4[144] = {
+  1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434,
+  1656, 843, 1092, 1336, 504,    757, 1007, 1843, 1843, 1843, 1838,
+  1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821,    1092,
+  249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268,
+  1409, 805, 961, 1131,   444, 670, 843, 0,  249, 504, 1425,
+  1655, 1743, 1096, 1324, 1448, 822, 1019, 1199, 490,    704, 867,
+  81,  450, 555, 0,     0,  249, 1247, 1428, 1530, 881, 1073,
+  1283, 610, 759,    939, 278, 464, 645, 0,     200, 270, 0,
+  0,   0,  935, 1163, 1410, 528, 790, 1068,   377, 499, 717,
+  173, 240, 274, 0,   43,  62,  0,   0,   0,   684, 935,
+  1182, 343,    551, 735, 161, 262, 423, 0,      55,  27,  0,
+  0,   0,   0,   0,   0,   430, 684,    935, 87,  377, 597,
+  0,   46,  256, 0,   0,   0,   0,   0,   0,   0,   0,
+  0
+};
+
+
+
+/* transform matrix in Q12*/
+const WebRtc_Word16 WebRtcIsacfix_kTransform[4][4] = {
+  { -2048, -2048, -2048, -2048 },
+  {  2748,   916,  -916, -2748 },
+  {  2048, -2048, -2048,  2048 },
+  {   916, -2748,  2748,  -916 }
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.h
new file mode 100644
index 0000000..788e553
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_gain_tables.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_gain_tables.h
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_
+
+#include "typedefs.h"
+
+
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+/* cdf for quantized pitch filter gains */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchGainCdf[255];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsacfix_kLowerlimiGain[3];
+extern const WebRtc_Word16 WebRtcIsacfix_kUpperlimitGain[3];
+extern const WebRtc_UWord16 WebRtcIsacfix_kMultsGain[2];
+
+/* mean values of pitch filter gains in Q12*/
+extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain1[144];
+extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain2[144];
+extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain3[144];
+extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain4[144];
+
+/* size of cdf table */
+extern const WebRtc_UWord16 WebRtcIsacfix_kCdfTableSizeGain[1];
+
+/* transform matrix */
+extern const WebRtc_Word16 WebRtcIsacfix_kTransform[4][4];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.c
new file mode 100644
index 0000000..81700e4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.c
@@ -0,0 +1,306 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_lag_tables.c
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#include "settings.h"
+#include "pitch_lag_tables.h"
+
+
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+
+/* tables for use with small pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Lo[127] = {
+  0,  134,  336,  549,  778,  998,  1264,  1512,  1777,  2070,
+  2423,  2794,  3051,  3361,  3708,  3979,  4315,  4610,  4933,  5269,
+  5575,  5896,  6155,  6480,  6816,  7129,  7477,  7764,  8061,  8358,
+  8718,  9020,  9390,  9783,  10177,  10543,  10885,  11342,  11795,  12213,
+  12680,  13096,  13524,  13919,  14436,  14903,  15349,  15795,  16267,  16734,
+  17266,  17697,  18130,  18632,  19080,  19447,  19884,  20315,  20735,  21288,
+  21764,  22264,  22723,  23193,  23680,  24111,  24557,  25022,  25537,  26082,
+  26543,  27090,  27620,  28139,  28652,  29149,  29634,  30175,  30692,  31273,
+  31866,  32506,  33059,  33650,  34296,  34955,  35629,  36295,  36967,  37726,
+  38559,  39458,  40364,  41293,  42256,  43215,  44231,  45253,  46274,  47359,
+  48482,  49678,  50810,  51853,  53016,  54148,  55235,  56263,  57282,  58363,
+  59288,  60179,  61076,  61806,  62474,  63129,  63656,  64160,  64533,  64856,
+  65152,  65535,  65535,  65535,  65535,  65535,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Lo[20] = {
+  0,  429,  3558,  5861,  8558,  11639,  15210,  19502,  24773,  31983,
+  42602,  48567,  52601,  55676,  58160,  60172,  61889,  63235,  65383,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Lo[2] = {
+  0,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Lo[10] = {
+  0,  2966,  6368,  11182,  19431,  37793,  48532,  55353,  60626,  65535
+};
+
+const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrLo[4] = {
+  WebRtcIsacfix_kPitchLagCdf1Lo,
+  WebRtcIsacfix_kPitchLagCdf2Lo,
+  WebRtcIsacfix_kPitchLagCdf3Lo,
+  WebRtcIsacfix_kPitchLagCdf4Lo
+};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeLo[1] = {
+  128
+};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsacfix_kLowerLimitLo[4] = {
+  -140, -9,  0, -4
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kUpperLimitLo[4] = {
+  -20,  9,  0,  4
+};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsacfix_kInitIndLo[3] = {
+  10,  1,  5
+};
+
+/* mean values of pitch filter lags in Q10 */
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Lo[19] = {
+  -17627, -16207, -14409, -12319, -10253, -8200, -6054, -3986, -1948, -19,
+  1937, 3974, 6064, 8155, 10229, 12270, 14296, 16127, 17520
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Lo[9] = {
+  -7949, -6063, -4036, -1941, 38, 1977, 4060, 6059
+};
+
+
+
+/* tables for use with medium pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Mid[255] = {
+  0,  28,  61,  88,  121,  149,  233,  331,  475,  559,
+  624,  661,  689,  712,  745,  791,  815,  843,  866,  922,
+  959,  1024,  1061,  1117,  1178,  1238,  1280,  1350,  1453,  1513,
+  1564,  1625,  1671,  1741,  1788,  1904,  2072,  2421,  2626,  2770,
+  2840,  2900,  2942,  3012,  3068,  3115,  3147,  3194,  3254,  3319,
+  3366,  3520,  3678,  3780,  3850,  3911,  3957,  4032,  4106,  4185,
+  4292,  4474,  4683,  4842,  5019,  5191,  5321,  5428,  5540,  5675,
+  5763,  5847,  5959,  6127,  6304,  6564,  6839,  7090,  7263,  7421,
+  7556,  7728,  7872,  7984,  8142,  8361,  8580,  8743,  8938,  9227,
+  9409,  9539,  9674,  9795,  9930,  10060,  10177,  10382,  10614,  10861,
+  11038,  11271,  11415,  11629,  11792,  12044,  12193,  12416,  12574,  12821,
+  13007,  13235,  13445,  13654,  13901,  14134,  14488,  15000,  15703,  16285,
+  16504,  16797,  17086,  17328,  17579,  17807,  17998,  18268,  18538,  18836,
+  19087,  19274,  19474,  19716,  19935,  20270,  20833,  21303,  21532,  21741,
+  21978,  22207,  22523,  22770,  23054,  23613,  23943,  24204,  24399,  24651,
+  24832,  25074,  25270,  25549,  25759,  26015,  26150,  26424,  26713,  27048,
+  27342,  27504,  27681,  27854,  28021,  28207,  28412,  28664,  28859,  29064,
+  29278,  29548,  29748,  30107,  30377,  30656,  30856,  31164,  31452,  31755,
+  32011,  32328,  32626,  32919,  33319,  33789,  34329,  34925,  35396,  35973,
+  36443,  36964,  37551,  38156,  38724,  39357,  40023,  40908,  41587,  42602,
+  43924,  45037,  45810,  46597,  47421,  48291,  49092,  50051,  51448,  52719,
+  53440,  54241,  54944,  55977,  56676,  57299,  57872,  58389,  59059,  59688,
+  60237,  60782,  61094,  61573,  61890,  62290,  62658,  63030,  63217,  63454,
+  63622,  63882,  64003,  64273,  64427,  64529,  64581,  64697,  64758,  64902,
+  65414,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Mid[36] = {
+  0,  71,  335,  581,  836,  1039,  1323,  1795,  2258,  2608,
+  3005,  3591,  4243,  5344,  7163,  10583,  16848,  28078,  49448,  57007,
+  60357,  61850,  62837,  63437,  63872,  64188,  64377,  64614,  64774,  64949,
+  65039,  65115,  65223,  65360,  65474,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Mid[2] = {
+  0,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Mid[20] = {
+  0,  28,  246,  459,  667,  1045,  1523,  2337,  4337,  11347,
+  44231,  56709,  60781,  62243,  63161,  63969,  64608,  65062,  65502,  65535
+};
+
+const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrMid[4] = {
+  WebRtcIsacfix_kPitchLagCdf1Mid,
+  WebRtcIsacfix_kPitchLagCdf2Mid,
+  WebRtcIsacfix_kPitchLagCdf3Mid,
+  WebRtcIsacfix_kPitchLagCdf4Mid
+};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeMid[1] = {
+  256
+};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsacfix_kLowerLimitMid[4] = {
+  -280, -17,  0, -9
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kUpperLimitMid[4] = {
+  -40,  17,  0,  9
+};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsacfix_kInitIndMid[3] = {
+  18,  1,  10
+};
+
+/* mean values of pitch filter lags in Q10 */
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Mid[35] = {
+  -17297, -16250, -15416, -14343, -13341, -12363, -11270,
+  -10355, -9122, -8217, -7172, -6083, -5102, -4004, -3060,
+  -1982, -952, -18, 935, 1976, 3040, 4032,
+  5082, 6065, 7257, 8202, 9264, 10225, 11242,
+  12234, 13337, 14336, 15374, 16187, 17347
+};
+
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Mid[19] = {
+  -8811, -8081, -7203, -6003, -5057, -4025, -2983, -1964,
+  -891, 29, 921, 1920, 2988, 4064, 5187, 6079, 7173, 8074, 8849
+};
+
+
+/* tables for use with large pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Hi[511] = {
+  0,  7,  18,  33,  69,  105,  156,  228,  315,  612,
+  680,  691,  709,  724,  735,  738,  742,  746,  749,  753,
+  756,  760,  764,  774,  782,  785,  789,  796,  800,  803,
+  807,  814,  818,  822,  829,  832,  847,  854,  858,  869,
+  876,  883,  898,  908,  934,  977,  1010,  1050,  1060,  1064,
+  1075,  1078,  1086,  1089,  1093,  1104,  1111,  1122,  1133,  1136,
+  1151,  1162,  1183,  1209,  1252,  1281,  1339,  1364,  1386,  1401,
+  1411,  1415,  1426,  1430,  1433,  1440,  1448,  1455,  1462,  1477,
+  1487,  1495,  1502,  1506,  1509,  1516,  1524,  1531,  1535,  1542,
+  1553,  1556,  1578,  1589,  1611,  1625,  1639,  1643,  1654,  1665,
+  1672,  1687,  1694,  1705,  1708,  1719,  1730,  1744,  1752,  1759,
+  1791,  1795,  1820,  1867,  1886,  1915,  1936,  1943,  1965,  1987,
+  2041,  2099,  2161,  2175,  2200,  2211,  2226,  2233,  2244,  2251,
+  2266,  2280,  2287,  2298,  2309,  2316,  2331,  2342,  2356,  2378,
+  2403,  2418,  2447,  2497,  2544,  2602,  2863,  2895,  2903,  2935,
+  2950,  2971,  3004,  3011,  3018,  3029,  3040,  3062,  3087,  3127,
+  3152,  3170,  3199,  3243,  3293,  3322,  3340,  3377,  3402,  3427,
+  3474,  3518,  3543,  3579,  3601,  3637,  3659,  3706,  3731,  3760,
+  3818,  3847,  3869,  3901,  3920,  3952,  4068,  4169,  4220,  4271,
+  4524,  4571,  4604,  4632,  4672,  4730,  4777,  4806,  4857,  4904,
+  4951,  5002,  5031,  5060,  5107,  5150,  5212,  5266,  5331,  5382,
+  5432,  5490,  5544,  5610,  5700,  5762,  5812,  5874,  5972,  6022,
+  6091,  6163,  6232,  6305,  6402,  6540,  6685,  6880,  7090,  7271,
+  7379,  7452,  7542,  7625,  7687,  7770,  7843,  7911,  7966,  8024,
+  8096,  8190,  8252,  8320,  8411,  8501,  8585,  8639,  8751,  8842,
+  8918,  8986,  9066,  9127,  9203,  9269,  9345,  9406,  9464,  9536,
+  9612,  9667,  9735,  9844,  9931,  10036,  10119,  10199,  10260,  10358,
+  10441,  10514,  10666,  10734,  10872,  10951,  11053,  11125,  11223,  11324,
+  11516,  11664,  11737,  11816,  11892,  12008,  12120,  12200,  12280,  12392,
+  12490,  12576,  12685,  12812,  12917,  13003,  13108,  13210,  13300,  13384,
+  13470,  13579,  13673,  13771,  13879,  13999,  14136,  14201,  14368,  14614,
+  14759,  14867,  14958,  15030,  15121,  15189,  15280,  15385,  15461,  15555,
+  15653,  15768,  15884,  15971,  16069,  16145,  16210,  16279,  16380,  16463,
+  16539,  16615,  16688,  16818,  16919,  17017,  18041,  18338,  18523,  18649,
+  18790,  18917,  19047,  19167,  19315,  19460,  19601,  19731,  19858,  20068,
+  20173,  20318,  20466,  20625,  20741,  20911,  21045,  21201,  21396,  21588,
+  21816,  22022,  22305,  22547,  22786,  23072,  23322,  23600,  23879,  24168,
+  24433,  24769,  25120,  25511,  25895,  26289,  26792,  27219,  27683,  28077,
+  28566,  29094,  29546,  29977,  30491,  30991,  31573,  32105,  32594,  33173,
+  33788,  34497,  35181,  35833,  36488,  37255,  37921,  38645,  39275,  39894,
+  40505,  41167,  41790,  42431,  43096,  43723,  44385,  45134,  45858,  46607,
+  47349,  48091,  48768,  49405,  49955,  50555,  51167,  51985,  52611,  53078,
+  53494,  53965,  54435,  54996,  55601,  56125,  56563,  56838,  57244,  57566,
+  57967,  58297,  58771,  59093,  59419,  59647,  59886,  60143,  60461,  60693,
+  60917,  61170,  61416,  61634,  61891,  62122,  62310,  62455,  62632,  62839,
+  63103,  63436,  63639,  63805,  63906,  64015,  64192,  64355,  64475,  64558,
+  64663,  64742,  64811,  64865,  64916,  64956,  64981,  65025,  65068,  65115,
+  65195,  65314,  65419,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Hi[68] = {
+  0,  7,  11,  22,  37,  52,  56,  59,  81,  85,
+  89,  96,  115,  130,  137,  152,  170,  181,  193,  200,
+  207,  233,  237,  259,  289,  318,  363,  433,  592,  992,
+  1607,  3062,  6149,  12206,  25522,  48368,  58223,  61918,  63640,  64584,
+  64943,  65098,  65206,  65268,  65294,  65335,  65350,  65372,  65387,  65402,
+  65413,  65420,  65428,  65435,  65439,  65450,  65454,  65468,  65472,  65476,
+  65483,  65491,  65498,  65505,  65516,  65520,  65528,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Hi[2] = {
+  0,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Hi[35] = {
+  0,  7,  19,  30,  41,  48,  63,  74,  82,  96,
+  122,  152,  215,  330,  701,  2611,  10931,  48106,  61177,  64341,
+  65112,  65238,  65309,  65338,  65364,  65379,  65401,  65427,  65453,
+  65465,  65476,  65490,  65509,  65528,  65535
+};
+
+const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrHi[4] = {
+  WebRtcIsacfix_kPitchLagCdf1Hi,
+  WebRtcIsacfix_kPitchLagCdf2Hi,
+  WebRtcIsacfix_kPitchLagCdf3Hi,
+  WebRtcIsacfix_kPitchLagCdf4Hi
+};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeHi[1] = {
+  512
+};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsacfix_kLowerLimitHi[4] = {
+  -552, -34,  0, -16
+};
+
+const WebRtc_Word16 WebRtcIsacfix_kUpperLimitHi[4] = {
+  -80,  32,  0,  17
+};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsacfix_kInitIndHi[3] = {
+  34,  1,  18
+};
+
+/* mean values of pitch filter lags */
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Hi[67] = {
+  -17482, -16896, -16220, -15929, -15329, -14848, -14336, -13807, -13312, -12800, -12218, -11720,
+  -11307, -10649, -10396, -9742, -9148, -8668, -8297, -7718, -7155, -6656, -6231, -5600, -5129,
+  -4610, -4110, -3521, -3040, -2525, -2016, -1506, -995, -477, -5, 469, 991, 1510, 2025, 2526, 3079,
+  3555, 4124, 4601, 5131, 5613, 6194, 6671, 7140, 7645, 8207, 8601, 9132, 9728, 10359, 10752, 11302,
+  11776, 12288, 12687, 13204, 13759, 14295, 14810, 15360, 15764, 16350
+};
+
+
+const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Hi[34] = {
+  -8175, -7659, -7205, -6684, -6215, -5651, -5180, -4566, -4087, -3536, -3096,
+  -2532, -1990, -1482, -959, -440, 11, 451, 954, 1492, 2020, 2562, 3059,
+  3577, 4113, 4618, 5134, 5724, 6060, 6758, 7015, 7716, 8066, 8741
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.h
new file mode 100644
index 0000000..9517c29
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/pitch_lag_tables.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_lag_tables.h
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_
+
+
+#include "typedefs.h"
+
+
+/********************* Pitch Filter Lag Coefficient Tables ************************/
+
+/* tables for use with small pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Lo[127];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Lo[20];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Lo[2];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Lo[10];
+
+extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrLo[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeLo[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitLo[4];
+extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitLo[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndLo[3];
+
+/* mean values of pitch filter lags */
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Lo[19];
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Lo[9];
+
+
+
+/* tables for use with medium pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Mid[255];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Mid[36];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Mid[2];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Mid[20];
+
+extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrMid[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeMid[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitMid[4];
+extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitMid[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndMid[3];
+
+/* mean values of pitch filter lags */
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Mid[35];
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Mid[19];
+
+
+/* tables for use with large pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Hi[511];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Hi[68];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Hi[2];
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Hi[35];
+
+extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrHi[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeHi[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitHi[4];
+extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitHi[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndHi[3];
+
+/* mean values of pitch filter lags */
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Hi[67];
+extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Hi[34];
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/settings.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/settings.h
new file mode 100644
index 0000000..da88ba2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/settings.h
@@ -0,0 +1,205 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * settings.h
+ *
+ * Declaration of #defines used in the iSAC codec
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_
+
+
+/* sampling frequency (Hz) */
+#define FS                                      16000
+/* 1.5 times Sampling frequency */
+#define FS_1_HALF        (WebRtc_UWord32) 24000
+/* Three times Sampling frequency */
+#define FS3          (WebRtc_UWord32) 48000
+/* Eight times Sampling frequency */
+#define FS8          (WebRtc_UWord32) 128000
+
+/* number of samples per frame (either 480 (30ms) or 960 (60ms)) */
+#define INITIAL_FRAMESAMPLES     960
+
+/* miliseconds */
+#define FRAMESIZE                               30
+/* number of samples per frame processed in the encoder (30ms) */
+#define FRAMESAMPLES                            480     /* ((FRAMESIZE*FS)/1000) */
+#define FRAMESAMPLES_HALF       240
+/* max number of samples per frame (= 60 ms frame) */
+#define MAX_FRAMESAMPLES      960
+/* number of samples per 10ms frame */
+#define FRAMESAMPLES_10ms                       160      /* ((10*FS)/1000) */
+/* Number of samples per 1 ms */
+#define SAMPLES_PER_MSEC      16
+/* number of subframes */
+#define SUBFRAMES                               6
+/* length of a subframe */
+#define UPDATE                                  80
+/* length of half a subframe (low/high band) */
+#define HALF_SUBFRAMELEN                        40    /* (UPDATE/2) */
+/* samples of look ahead (in a half-band, so actually half the samples of look ahead @ FS) */
+#define QLOOKAHEAD                              24    /* 3 ms */
+
+/* order of AR model in spectral entropy coder */
+#define AR_ORDER                                6
+#define MAX_ORDER                               13
+#define LEVINSON_MAX_ORDER                  12
+
+/* window length (masking analysis) */
+#define WINLEN                                  256
+/* order of low-band pole filter used to approximate masking curve */
+#define ORDERLO                                 12
+/* order of hi-band pole filter used to approximate masking curve */
+#define ORDERHI                                 6
+
+#define KLT_NUM_AVG_GAIN                        0
+#define KLT_NUM_AVG_SHAPE                       0
+#define KLT_NUM_MODELS                          3
+#define LPC_SHAPE_ORDER                         18    /* (ORDERLO + ORDERHI) */
+
+#define KLT_ORDER_GAIN                          12    /* (2 * SUBFRAMES) */
+#define KLT_ORDER_SHAPE                         108   /*  (LPC_SHAPE_ORDER * SUBFRAMES) */
+
+
+
+/* order for post_filter_bank */
+#define POSTQORDER                              3
+/* order for pre-filterbank */
+#define QORDER                                  3
+/* for decimator */
+#define ALLPASSSECTIONS                         2
+/* The number of composite all-pass filter factors */
+#define NUMBEROFCOMPOSITEAPSECTIONS             4
+
+/* The number of all-pass filter factors in an upper or lower channel*/
+#define NUMBEROFCHANNELAPSECTIONS               2
+
+
+
+#define DPMIN_Q10                            -10240   /* -10.00 in Q10 */
+#define DPMAX_Q10                             10240   /* 10.00 in Q10 */
+#define MINBITS_Q10                           10240   /* 10.0 in Q10 */
+
+
+/* array size for byte stream in number of Word16. */
+#define STREAM_MAXW16       300 /* The old maximum size still needed for the decoding */
+#define STREAM_MAXW16_30MS  100 /* 100 Word16 = 200 bytes = 53.4 kbit/s @ 30 ms.framelength */
+#define STREAM_MAXW16_60MS  200 /* 200 Word16 = 400 bytes = 53.4 kbit/s @ 60 ms.framelength */
+
+
+/* storage size for bit counts */
+//#define BIT_COUNTER_SIZE                        30
+/* maximum order of any AR model or filter */
+#define MAX_AR_MODEL_ORDER                      12
+
+/* Maximum number of iterations allowed to limit payload size */
+#define MAX_PAYLOAD_LIMIT_ITERATION           1
+
+/* Bandwidth estimator */
+
+#define MIN_ISAC_BW                           10000     /* Minimum bandwidth in bits per sec */
+#define MAX_ISAC_BW                           32000     /* Maxmum bandwidth in bits per sec */
+#define MIN_ISAC_MD                           5         /* Minimum Max Delay in ?? */
+#define MAX_ISAC_MD                           25        /* Maxmum Max Delay in ?? */
+#define DELAY_CORRECTION_MAX      717
+#define DELAY_CORRECTION_MED      819
+#define Thld_30_60         18000
+#define Thld_60_30         27000
+
+/* assumed header size; we don't know the exact number (header compression may be used) */
+#define HEADER_SIZE                           35       /* bytes */
+#define INIT_FRAME_LEN                        60
+#define INIT_BN_EST                           20000
+#define INIT_BN_EST_Q7                        2560000  /* 20 kbps in Q7 */
+#define INIT_REC_BN_EST_Q5                    789312   /* INIT_BN_EST + INIT_HDR_RATE in Q5 */
+
+/* 8738 in Q18 is ~ 1/30 */
+/* #define INIT_HDR_RATE (((HEADER_SIZE * 8 * 1000) * 8738) >> NUM_BITS_TO_SHIFT (INIT_FRAME_LEN)) */
+#define INIT_HDR_RATE                    4666
+/* number of packets in a row for a high rate burst */
+#define BURST_LEN                             3
+/* ms, max time between two full bursts */
+#define BURST_INTERVAL                        800
+/* number of packets in a row for initial high rate burst */
+#define INIT_BURST_LEN                        5
+/* bits/s, rate for the first BURST_LEN packets */
+#define INIT_RATE                             10240000 /* INIT_BN_EST in Q9 */
+
+
+/* For pitch analysis */
+#define PITCH_FRAME_LEN                         240  /* (FRAMESAMPLES/2) 30 ms  */
+#define PITCH_MAX_LAG                           140       /* 57 Hz  */
+#define PITCH_MIN_LAG                           20                /* 400 Hz */
+#define PITCH_MIN_LAG_Q8                        5120 /* 256 * PITCH_MIN_LAG */
+#define OFFSET_Q8                               768  /* 256 * 3 */
+
+#define PITCH_MAX_GAIN_Q12      1843                  /* 0.45 */
+#define PITCH_LAG_SPAN2                         65   /* (PITCH_MAX_LAG/2-PITCH_MIN_LAG/2+5) */
+#define PITCH_CORR_LEN2                         60     /* 15 ms  */
+#define PITCH_CORR_STEP2                        60   /* (PITCH_FRAME_LEN/4) */
+#define PITCH_SUBFRAMES                         4
+#define PITCH_SUBFRAME_LEN                      60   /* (PITCH_FRAME_LEN/PITCH_SUBFRAMES) */
+
+/* For pitch filter */
+#define PITCH_BUFFSIZE                   190  /* (PITCH_MAX_LAG + 50) Extra 50 for fraction and LP filters */
+#define PITCH_INTBUFFSIZE               430  /* (PITCH_FRAME_LEN+PITCH_BUFFSIZE) */
+#define PITCH_FRACS                             8
+#define PITCH_FRACORDER                         9
+#define PITCH_DAMPORDER                         5
+
+
+/* Order of high pass filter */
+#define HPORDER                                 2
+
+
+/* PLC */
+#define DECAY_RATE               10               /* Q15, 20% of decay every lost frame apllied linearly sample by sample*/
+#define PLC_WAS_USED              1
+#define PLC_NOT_USED              3
+#define RECOVERY_OVERLAP         80
+#define RESAMP_RES              256
+#define RESAMP_RES_BIT            8
+
+
+
+/* Define Error codes */
+/* 6000 General */
+#define ISAC_MEMORY_ALLOCATION_FAILED    6010
+#define ISAC_MODE_MISMATCH       6020
+#define ISAC_DISALLOWED_BOTTLENECK     6030
+#define ISAC_DISALLOWED_FRAME_LENGTH    6040
+/* 6200 Bandwidth estimator */
+#define ISAC_RANGE_ERROR_BW_ESTIMATOR    6240
+/* 6400 Encoder */
+#define ISAC_ENCODER_NOT_INITIATED     6410
+#define ISAC_DISALLOWED_CODING_MODE     6420
+#define ISAC_DISALLOWED_FRAME_MODE_ENCODER   6430
+#define ISAC_DISALLOWED_BITSTREAM_LENGTH            6440
+#define ISAC_PAYLOAD_LARGER_THAN_LIMIT              6450
+/* 6600 Decoder */
+#define ISAC_DECODER_NOT_INITIATED     6610
+#define ISAC_EMPTY_PACKET       6620
+#define ISAC_DISALLOWED_FRAME_MODE_DECODER   6630
+#define ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH  6640
+#define ISAC_RANGE_ERROR_DECODE_BANDWIDTH   6650
+#define ISAC_RANGE_ERROR_DECODE_PITCH_GAIN   6660
+#define ISAC_RANGE_ERROR_DECODE_PITCH_LAG   6670
+#define ISAC_RANGE_ERROR_DECODE_LPC     6680
+#define ISAC_RANGE_ERROR_DECODE_SPECTRUM   6690
+#define ISAC_LENGTH_MISMATCH      6730
+/* 6800 Call setup formats */
+#define ISAC_INCOMPATIBLE_FORMATS     6810
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.c
new file mode 100644
index 0000000..81b932f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.c
@@ -0,0 +1,193 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * spectrum_ar_model_tables.c
+ *
+ * This file contains tables with AR coefficients, Gain coefficients
+ * and cosine tables.
+ *
+ */
+
+#include "spectrum_ar_model_tables.h"
+#include "settings.h"
+
+/********************* AR Coefficient Tables ************************/
+
+/* cdf for quantized reflection coefficient 1 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc1Cdf[12] = {
+  0,  2,  4,  129,  7707,  57485,  65495,  65527,  65529,  65531,
+  65533,  65535
+};
+
+/* cdf for quantized reflection coefficient 2 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc2Cdf[12] = {
+  0,  2,  4,  7,  531,  25298,  64525,  65526,  65529,  65531,
+  65533,  65535
+};
+
+/* cdf for quantized reflection coefficient 3 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc3Cdf[12] = {
+  0,  2,  4,  6,  620,  22898,  64843,  65527,  65529,  65531,
+  65533,  65535
+};
+
+/* cdf for quantized reflection coefficient 4 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc4Cdf[12] = {
+  0,  2,  4,  6,  35,  10034,  60733,  65506,  65529,  65531,
+  65533,  65535
+};
+
+/* cdf for quantized reflection coefficient 5 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc5Cdf[12] = {
+  0,  2,  4,  6,  36,  7567,  56727,  65385,  65529,  65531,
+  65533,  65535
+};
+
+/* cdf for quantized reflection coefficient 6 */
+const WebRtc_UWord16 WebRtcIsacfix_kRc6Cdf[12] = {
+  0,  2,  4,  6,  14,  6579,  57360,  65409,  65529,  65531,
+  65533,  65535
+};
+
+/* representation levels for quantized reflection coefficient 1 */
+const WebRtc_Word16 WebRtcIsacfix_kRc1Levels[11] = {
+  -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 2 */
+const WebRtc_Word16 WebRtcIsacfix_kRc2Levels[11] = {
+  -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 3 */
+const WebRtc_Word16 WebRtcIsacfix_kRc3Levels[11] = {
+  -32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 4 */
+const WebRtc_Word16 WebRtcIsacfix_kRc4Levels[11] = {
+  -32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 5 */
+const WebRtc_Word16 WebRtcIsacfix_kRc5Levels[11] = {
+  -32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 6 */
+const WebRtc_Word16 WebRtcIsacfix_kRc6Levels[11] = {
+  -32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
+};
+
+/* quantization boundary levels for reflection coefficients */
+const WebRtc_Word16 WebRtcIsacfix_kRcBound[12] = {
+  -32768, -31441, -27566, -21458, -13612, -4663,
+  4663, 13612, 21458, 27566, 31441, 32767
+};
+
+/* initial index for AR reflection coefficient quantizer and cdf table search */
+const WebRtc_UWord16 WebRtcIsacfix_kRcInitInd[6] = {
+  5,  5,  5,  5,  5,  5
+};
+
+/* pointers to AR cdf tables */
+const WebRtc_UWord16 *WebRtcIsacfix_kRcCdfPtr[AR_ORDER] = {
+  WebRtcIsacfix_kRc1Cdf,
+  WebRtcIsacfix_kRc2Cdf,
+  WebRtcIsacfix_kRc3Cdf,
+  WebRtcIsacfix_kRc4Cdf,
+  WebRtcIsacfix_kRc5Cdf,
+  WebRtcIsacfix_kRc6Cdf
+};
+
+/* pointers to AR representation levels tables */
+const WebRtc_Word16 *WebRtcIsacfix_kRcLevPtr[AR_ORDER] = {
+  WebRtcIsacfix_kRc1Levels,
+  WebRtcIsacfix_kRc2Levels,
+  WebRtcIsacfix_kRc3Levels,
+  WebRtcIsacfix_kRc4Levels,
+  WebRtcIsacfix_kRc5Levels,
+  WebRtcIsacfix_kRc6Levels
+};
+
+
+/******************** GAIN Coefficient Tables ***********************/
+
+/* cdf for Gain coefficient */
+const WebRtc_UWord16 WebRtcIsacfix_kGainCdf[19] = {
+  0,  2,  4,  6,  8,  10,  12,  14,  16,  1172,
+  11119,  29411,  51699,  64445,  65527,  65529,  65531,  65533,  65535
+};
+
+/* representation levels for quantized squared Gain coefficient */
+const WebRtc_Word32 WebRtcIsacfix_kGain2Lev[18] = {
+  128, 128, 128, 128, 128, 215, 364, 709, 1268,
+  1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000
+};
+
+/* quantization boundary levels for squared Gain coefficient */
+const WebRtc_Word32 WebRtcIsacfix_kGain2Bound[19] = {
+  0, 21, 35, 59, 99, 166, 280, 475, 815, 1414,
+  2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF
+};
+
+/* pointers to Gain cdf table */
+const WebRtc_UWord16 *WebRtcIsacfix_kGainPtr[1] = {
+  WebRtcIsacfix_kGainCdf
+};
+
+/* gain initial index for gain quantizer and cdf table search */
+const WebRtc_UWord16 WebRtcIsacfix_kGainInitInd[1] = {
+  11
+};
+
+
+/************************* Cosine Tables ****************************/
+
+/* cosine table */
+const WebRtc_Word16 WebRtcIsacfix_kCos[6][60] = {
+  { 512,   512,   511,   510,   508,   507,   505,   502,   499,   496,
+        493,   489,   485,   480,   476,   470,   465,   459,   453,   447,
+ 440,   433,   426,   418,   410,   402,   394,   385,   376,   367,
+        357,   348,   338,   327,   317,   306,   295,   284,   273,   262,
+ 250,   238,   226,   214,   202,   190,   177,   165,   152,   139,
+        126,   113,   100,   87,   73,   60,   47,   33,   20,   7       },
+  { 512,   510,   508,   503,   498,   491,   483,   473,   462,   450,
+        437,   422,   406,   389,   371,   352,   333,   312,   290,   268,
+ 244,   220,   196,   171,   145,   120,   93,   67,   40,   13,
+        -13,   -40,   -67,   -93,   -120,   -145,   -171,   -196,   -220,   -244,
+ -268,   -290,   -312,   -333,   -352,   -371,   -389,   -406,   -422,   -437,
+        -450,   -462,   -473,   -483,   -491,   -498,   -503,   -508,   -510,   -512    },
+  { 512,   508,   502,   493,   480,   465,   447,   426,   402,   376,
+        348,   317,   284,   250,   214,   177,   139,   100,   60,   20,
+ -20,   -60,   -100,   -139,   -177,   -214,   -250,   -284,   -317,   -348,
+        -376,   -402,   -426,   -447,   -465,   -480,   -493,   -502,   -508,   -512,
+ -512,   -508,   -502,   -493,   -480,   -465,   -447,   -426,   -402,   -376,
+        -348,   -317,   -284,   -250,   -214,   -177,   -139,   -100,   -60,   -20     },
+  { 511,   506,   495,   478,   456,   429,   398,   362,   322,   279,
+        232,   183,   133,   80,   27,   -27,   -80,   -133,   -183,   -232,
+ -279,   -322,   -362,   -398,   -429,   -456,   -478,   -495,   -506,   -511,
+        -511,   -506,   -495,   -478,   -456,   -429,   -398,   -362,   -322,   -279,
+ -232,   -183,   -133,   -80,   -27,   27,   80,   133,   183,   232,
+        279,   322,   362,   398,   429,   456,   478,   495,   506,   511     },
+  { 511,   502,   485,   459,   426,   385,   338,   284,   226,   165,
+        100,   33,   -33,   -100,   -165,   -226,   -284,   -338,   -385,   -426,
+ -459,   -485,   -502,   -511,   -511,   -502,   -485,   -459,   -426,   -385,
+        -338,   -284,   -226,   -165,   -100,   -33,   33,   100,   165,   226,
+ 284,   338,   385,   426,   459,   485,   502,   511,   511,   502,
+        485,   459,   426,   385,   338,   284,   226,   165,   100,   33      },
+  { 510,   498,   473,   437,   389,   333,   268,   196,   120,   40,
+        -40,   -120,   -196,   -268,   -333,   -389,   -437,   -473,   -498,   -510,
+ -510,   -498,   -473,   -437,   -389,   -333,   -268,   -196,   -120,   -40,
+        40,   120,   196,   268,   333,   389,   437,   473,   498,   510,
+ 510,   498,   473,   437,   389,   333,   268,   196,   120,   40,
+        -40,   -120,   -196,   -268,   -333,   -389,   -437,   -473,   -498,   -510    }
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.h
new file mode 100644
index 0000000..b506d0e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/spectrum_ar_model_tables.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * spectrum_ar_model_tables.h
+ *
+ * This file contains definitions of tables with AR coefficients,
+ * Gain coefficients and cosine tables.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_
+
+#include "typedefs.h"
+#include "settings.h"
+
+
+/********************* AR Coefficient Tables ************************/
+/* cdf for quantized reflection coefficient 1 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc1Cdf[12];
+
+/* cdf for quantized reflection coefficient 2 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc2Cdf[12];
+
+/* cdf for quantized reflection coefficient 3 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc3Cdf[12];
+
+/* cdf for quantized reflection coefficient 4 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc4Cdf[12];
+
+/* cdf for quantized reflection coefficient 5 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc5Cdf[12];
+
+/* cdf for quantized reflection coefficient 6 */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRc6Cdf[12];
+
+/* representation levels for quantized reflection coefficient 1 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc1Levels[11];
+
+/* representation levels for quantized reflection coefficient 2 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc2Levels[11];
+
+/* representation levels for quantized reflection coefficient 3 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc3Levels[11];
+
+/* representation levels for quantized reflection coefficient 4 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc4Levels[11];
+
+/* representation levels for quantized reflection coefficient 5 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc5Levels[11];
+
+/* representation levels for quantized reflection coefficient 6 */
+extern const WebRtc_Word16 WebRtcIsacfix_kRc6Levels[11];
+
+/* quantization boundary levels for reflection coefficients */
+extern const WebRtc_Word16 WebRtcIsacfix_kRcBound[12];
+
+/* initial indices for AR reflection coefficient quantizer and cdf table search */
+extern const WebRtc_UWord16 WebRtcIsacfix_kRcInitInd[AR_ORDER];
+
+/* pointers to AR cdf tables */
+extern const WebRtc_UWord16 *WebRtcIsacfix_kRcCdfPtr[AR_ORDER];
+
+/* pointers to AR representation levels tables */
+extern const WebRtc_Word16 *WebRtcIsacfix_kRcLevPtr[AR_ORDER];
+
+
+/******************** GAIN Coefficient Tables ***********************/
+/* cdf for Gain coefficient */
+extern const WebRtc_UWord16 WebRtcIsacfix_kGainCdf[19];
+
+/* representation levels for quantized Gain coefficient */
+extern const WebRtc_Word32 WebRtcIsacfix_kGain2Lev[18];
+
+/* squared quantization boundary levels for Gain coefficient */
+extern const WebRtc_Word32 WebRtcIsacfix_kGain2Bound[19];
+
+/* pointer to Gain cdf table */
+extern const WebRtc_UWord16 *WebRtcIsacfix_kGainPtr[1];
+
+/* Gain initial index for gain quantizer and cdf table search */
+extern const WebRtc_UWord16 WebRtcIsacfix_kGainInitInd[1];
+
+/************************* Cosine Tables ****************************/
+/* Cosine table */
+extern const WebRtc_Word16 WebRtcIsacfix_kCos[6][60];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/structs.h b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/structs.h
new file mode 100644
index 0000000..54dffa9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/structs.h
@@ -0,0 +1,382 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * structs.h
+ *
+ * This header file contains all the structs used in the ISAC codec
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_
+
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+#include "settings.h"
+
+/* Bitstream struct for decoder */
+typedef struct Bitstreamstruct_dec {
+
+  WebRtc_UWord16  *stream;          /* Pointer to bytestream to decode */
+  WebRtc_UWord32  W_upper;          /* Upper boundary of interval W */
+  WebRtc_UWord32  streamval;
+  WebRtc_UWord16  stream_index;     /* Index to the current position in bytestream */
+  WebRtc_Word16   full;             /* 0 - first byte in memory filled, second empty*/
+  /* 1 - both bytes are empty (we just filled the previous memory */
+
+} Bitstr_dec;
+
+/* Bitstream struct for encoder */
+typedef struct Bitstreamstruct_enc {
+
+  WebRtc_UWord16  stream[STREAM_MAXW16_60MS];   /* Vector for adding encoded bytestream */
+  WebRtc_UWord32  W_upper;          /* Upper boundary of interval W */
+  WebRtc_UWord32  streamval;
+  WebRtc_UWord16  stream_index;     /* Index to the current position in bytestream */
+  WebRtc_Word16   full;             /* 0 - first byte in memory filled, second empty*/
+  /* 1 - both bytes are empty (we just filled the previous memory */
+
+} Bitstr_enc;
+
+
+typedef struct {
+
+  WebRtc_Word16 DataBufferLoQ0[WINLEN];
+  WebRtc_Word16 DataBufferHiQ0[WINLEN];
+
+  WebRtc_Word32 CorrBufLoQQ[ORDERLO+1];
+  WebRtc_Word32 CorrBufHiQQ[ORDERHI+1];
+
+  WebRtc_Word16 CorrBufLoQdom[ORDERLO+1];
+  WebRtc_Word16 CorrBufHiQdom[ORDERHI+1];
+
+  WebRtc_Word32 PreStateLoGQ15[ORDERLO+1];
+  WebRtc_Word32 PreStateHiGQ15[ORDERHI+1];
+
+  WebRtc_UWord32 OldEnergy;
+
+} MaskFiltstr_enc;
+
+
+
+typedef struct {
+
+  WebRtc_Word16 PostStateLoGQ0[ORDERLO+1];
+  WebRtc_Word16 PostStateHiGQ0[ORDERHI+1];
+
+  WebRtc_UWord32 OldEnergy;
+
+} MaskFiltstr_dec;
+
+
+
+
+
+
+
+
+typedef struct {
+
+  //state vectors for each of the two analysis filters
+
+  WebRtc_Word32 INSTAT1_fix[2*(QORDER-1)];
+  WebRtc_Word32 INSTAT2_fix[2*(QORDER-1)];
+  WebRtc_Word16 INLABUF1_fix[QLOOKAHEAD];
+  WebRtc_Word16 INLABUF2_fix[QLOOKAHEAD];
+
+  /* High pass filter */
+  WebRtc_Word32 HPstates_fix[HPORDER];
+
+} PreFiltBankstr;
+
+
+typedef struct {
+
+  //state vectors for each of the two analysis filters
+  WebRtc_Word32 STATE_0_LOWER_fix[2*POSTQORDER];
+  WebRtc_Word32 STATE_0_UPPER_fix[2*POSTQORDER];
+
+  /* High pass filter */
+
+  WebRtc_Word32 HPstates1_fix[HPORDER];
+  WebRtc_Word32 HPstates2_fix[HPORDER];
+
+} PostFiltBankstr;
+
+typedef struct {
+
+
+  /* data buffer for pitch filter */
+  WebRtc_Word16 ubufQQ[PITCH_BUFFSIZE];
+
+  /* low pass state vector */
+  WebRtc_Word16 ystateQQ[PITCH_DAMPORDER];
+
+  /* old lag and gain */
+  WebRtc_Word16 oldlagQ7;
+  WebRtc_Word16 oldgainQ12;
+
+} PitchFiltstr;
+
+
+
+typedef struct {
+
+  //for inital estimator
+  WebRtc_Word16   dec_buffer16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2];
+  WebRtc_Word32   decimator_state32[2*ALLPASSSECTIONS+1];
+  WebRtc_Word16   inbuf[QLOOKAHEAD];
+
+  PitchFiltstr  PFstr_wght;
+  PitchFiltstr  PFstr;
+
+
+} PitchAnalysisStruct;
+
+
+typedef struct {
+  /* Parameters used in PLC to avoid re-computation       */
+
+  /* --- residual signals --- */
+  WebRtc_Word16 prevPitchInvIn[FRAMESAMPLES/2];
+  WebRtc_Word16 prevPitchInvOut[PITCH_MAX_LAG + 10];            // [FRAMESAMPLES/2]; save 90
+  WebRtc_Word32 prevHP[PITCH_MAX_LAG + 10];                     // [FRAMESAMPLES/2]; save 90
+
+
+  WebRtc_Word16 decayCoeffPriodic; /* how much to supress a sample */
+  WebRtc_Word16 decayCoeffNoise;
+  WebRtc_Word16 used;       /* if PLC is used */
+
+
+  WebRtc_Word16 *lastPitchLP;                                  // [FRAMESAMPLES/2]; saved 240;
+
+
+  /* --- LPC side info --- */
+  WebRtc_Word16 lofilt_coefQ15[ ORDERLO ];
+  WebRtc_Word16 hifilt_coefQ15[ ORDERHI ];
+  WebRtc_Word32 gain_lo_hiQ17[2];
+
+  /* --- LTP side info --- */
+  WebRtc_Word16 AvgPitchGain_Q12;
+  WebRtc_Word16 lastPitchGain_Q12;
+  WebRtc_Word16 lastPitchLag_Q7;
+
+  /* --- Add-overlap in recovery packet --- */
+  WebRtc_Word16 overlapLP[ RECOVERY_OVERLAP ];                 // [FRAMESAMPLES/2]; saved 160
+
+  WebRtc_Word16 pitchCycles;
+  WebRtc_Word16 A;
+  WebRtc_Word16 B;
+  WebRtc_Word16 pitchIndex;
+  WebRtc_Word16 stretchLag;
+  WebRtc_Word16 *prevPitchLP;                                  // [ FRAMESAMPLES/2 ]; saved 240
+  WebRtc_Word16 seed;
+
+  WebRtc_Word16 std;
+} PLCstr;
+
+
+
+/* Have instance of struct together with other iSAC structs */
+typedef struct {
+
+  WebRtc_Word16   prevFrameSizeMs;      /* Previous frame size (in ms) */
+  WebRtc_UWord16  prevRtpNumber;      /* Previous RTP timestamp from received packet */
+  /* (in samples relative beginning)  */
+  WebRtc_UWord32  prevSendTime;   /* Send time for previous packet, from RTP header */
+  WebRtc_UWord32  prevArrivalTime;      /* Arrival time for previous packet (in ms using timeGetTime()) */
+  WebRtc_UWord16  prevRtpRate;          /* rate of previous packet, derived from RTP timestamps (in bits/s) */
+  WebRtc_UWord32  lastUpdate;           /* Time since the last update of the Bottle Neck estimate (in samples) */
+  WebRtc_UWord32  lastReduction;        /* Time sinse the last reduction (in samples) */
+  WebRtc_Word32   countUpdates;         /* How many times the estimate was update in the beginning */
+
+  /* The estimated bottle neck rate from there to here (in bits/s)                */
+  WebRtc_UWord32  recBw;
+  WebRtc_UWord32  recBwInv;
+  WebRtc_UWord32  recBwAvg;
+  WebRtc_UWord32  recBwAvgQ;
+
+  WebRtc_UWord32  minBwInv;
+  WebRtc_UWord32  maxBwInv;
+
+  /* The estimated mean absolute jitter value, as seen on this side (in ms)       */
+  WebRtc_Word32   recJitter;
+  WebRtc_Word32   recJitterShortTerm;
+  WebRtc_Word32   recJitterShortTermAbs;
+  WebRtc_Word32   recMaxDelay;
+  WebRtc_Word32   recMaxDelayAvgQ;
+
+
+  WebRtc_Word16   recHeaderRate;         /* (assumed) bitrate for headers (bps) */
+
+  WebRtc_UWord32  sendBwAvg;           /* The estimated bottle neck rate from here to there (in bits/s) */
+  WebRtc_Word32   sendMaxDelayAvg;    /* The estimated mean absolute jitter value, as seen on the other siee (in ms)  */
+
+
+  WebRtc_Word16   countRecPkts;          /* number of packets received since last update */
+  WebRtc_Word16   highSpeedRec;        /* flag for marking that a high speed network has been detected downstream */
+
+  /* number of consecutive pkts sent during which the bwe estimate has
+     remained at a value greater than the downstream threshold for determining highspeed network */
+  WebRtc_Word16   countHighSpeedRec;
+
+  /* flag indicating bwe should not adjust down immediately for very late pckts */
+  WebRtc_Word16   inWaitPeriod;
+
+  /* variable holding the time of the start of a window of time when
+     bwe should not adjust down immediately for very late pckts */
+  WebRtc_UWord32  startWaitPeriod;
+
+  /* number of consecutive pkts sent during which the bwe estimate has
+     remained at a value greater than the upstream threshold for determining highspeed network */
+  WebRtc_Word16   countHighSpeedSent;
+
+  /* flag indicated the desired number of packets over threshold rate have been sent and
+     bwe will assume the connection is over broadband network */
+  WebRtc_Word16   highSpeedSend;
+
+
+
+
+} BwEstimatorstr;
+
+
+typedef struct {
+
+  /* boolean, flags if previous packet exceeded B.N. */
+  WebRtc_Word16    PrevExceed;
+  /* ms */
+  WebRtc_Word16    ExceedAgo;
+  /* packets left to send in current burst */
+  WebRtc_Word16    BurstCounter;
+  /* packets */
+  WebRtc_Word16    InitCounter;
+  /* ms remaining in buffer when next packet will be sent */
+  WebRtc_Word16    StillBuffered;
+
+} RateModel;
+
+/* The following strutc is used to store data from encoding, to make it
+   fast and easy to construct a new bitstream with a different Bandwidth
+   estimate. All values (except framelength and minBytes) is double size to
+   handle 60 ms of data.
+*/
+typedef struct {
+
+  /* Used to keep track of if it is first or second part of 60 msec packet */
+  int     startIdx;
+
+  /* Frame length in samples */
+  WebRtc_Word16         framelength;
+
+  /* Pitch Gain */
+  WebRtc_Word16   pitchGain_index[2];
+
+  /* Pitch Lag */
+  WebRtc_Word32   meanGain[2];
+  WebRtc_Word16   pitchIndex[PITCH_SUBFRAMES*2];
+
+  /* LPC */
+  WebRtc_Word32         LPCcoeffs_g[12*2]; /* KLT_ORDER_GAIN = 12 */
+  WebRtc_Word16   LPCindex_s[108*2]; /* KLT_ORDER_SHAPE = 108 */
+  WebRtc_Word16   LPCindex_g[12*2];  /* KLT_ORDER_GAIN = 12 */
+
+  /* Encode Spec */
+  WebRtc_Word16   fre[FRAMESAMPLES];
+  WebRtc_Word16   fim[FRAMESAMPLES];
+  WebRtc_Word16   AvgPitchGain[2];
+
+  /* Used in adaptive mode only */
+  int     minBytes;
+
+} ISAC_SaveEncData_t;
+
+typedef struct {
+
+  Bitstr_enc          bitstr_obj;
+  MaskFiltstr_enc     maskfiltstr_obj;
+  PreFiltBankstr      prefiltbankstr_obj;
+  PitchFiltstr        pitchfiltstr_obj;
+  PitchAnalysisStruct pitchanalysisstr_obj;
+  RateModel           rate_data_obj;
+
+  WebRtc_Word16         buffer_index;
+  WebRtc_Word16         current_framesamples;
+
+  WebRtc_Word16      data_buffer_fix[FRAMESAMPLES]; // the size was MAX_FRAMESAMPLES
+
+  WebRtc_Word16         frame_nb;
+  WebRtc_Word16         BottleNeck;
+  WebRtc_Word16         MaxDelay;
+  WebRtc_Word16         new_framelength;
+  WebRtc_Word16         s2nr;
+  WebRtc_UWord16        MaxBits;
+
+  WebRtc_Word16         bitstr_seed;
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  PostFiltBankstr     interpolatorstr_obj;
+#endif
+
+  ISAC_SaveEncData_t *SaveEnc_ptr;
+  WebRtc_Word16         payloadLimitBytes30; /* Maximum allowed number of bits for a 30 msec packet */
+  WebRtc_Word16         payloadLimitBytes60; /* Maximum allowed number of bits for a 30 msec packet */
+  WebRtc_Word16         maxPayloadBytes;     /* Maximum allowed number of bits for both 30 and 60 msec packet */
+  WebRtc_Word16         maxRateInBytes;      /* Maximum allowed rate in bytes per 30 msec packet */
+  WebRtc_Word16         enforceFrameSize;    /* If set iSAC will never change packet size */
+
+} ISACFIX_EncInst_t;
+
+
+typedef struct {
+
+  Bitstr_dec          bitstr_obj;
+  MaskFiltstr_dec     maskfiltstr_obj;
+  PostFiltBankstr     postfiltbankstr_obj;
+  PitchFiltstr        pitchfiltstr_obj;
+  PLCstr              plcstr_obj;               /* TS; for packet loss concealment */
+
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+  PreFiltBankstr      decimatorstr_obj;
+#endif
+
+} ISACFIX_DecInst_t;
+
+
+
+typedef struct {
+
+  ISACFIX_EncInst_t ISACenc_obj;
+  ISACFIX_DecInst_t ISACdec_obj;
+  BwEstimatorstr     bwestimator_obj;
+  WebRtc_Word16         CodingMode;       /* 0 = adaptive; 1 = instantaneous */
+  WebRtc_Word16   errorcode;
+  WebRtc_Word16   initflag;  /* 0 = nothing initiated; 1 = encoder or decoder */
+  /* not initiated; 2 = all initiated */
+} ISACFIX_SubStruct;
+
+
+typedef struct {
+  WebRtc_Word32   lpcGains[12];     /* 6 lower-band & 6 upper-band we may need to double it for 60*/
+  /* */
+  WebRtc_UWord32  W_upper;          /* Upper boundary of interval W */
+  WebRtc_UWord32  streamval;
+  WebRtc_UWord16  stream_index;     /* Index to the current position in bytestream */
+  WebRtc_Word16   full;             /* 0 - first byte in memory filled, second empty*/
+  /* 1 - both bytes are empty (we just filled the previous memory */
+  WebRtc_UWord16  beforeLastWord;
+  WebRtc_UWord16  lastWord;
+} transcode_obj;
+
+
+//Bitstr_enc myBitStr;
+
+#endif  /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/transform.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/transform.c
new file mode 100644
index 0000000..56ef9f2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/source/transform.c
@@ -0,0 +1,296 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * WebRtcIsacfix_kTransform.c
+ *
+ * Transform functions
+ *
+ */
+
+#include "fft.h"
+#include "codec.h"
+#include "settings.h"
+
+
+/* Cosine table 1 in Q14 */
+static const WebRtc_Word16 kCosTab1[FRAMESAMPLES/2] = {
+  16384,  16383,  16378,  16371,  16362,  16349,  16333,  16315,  16294,  16270,
+  16244,  16214,  16182,  16147,  16110,  16069,  16026,  15980,  15931,  15880,
+  15826,  15769,  15709,  15647,  15582,  15515,  15444,  15371,  15296,  15218,
+  15137,  15053,  14968,  14879,  14788,  14694,  14598,  14500,  14399,  14295,
+  14189,  14081,  13970,  13856,  13741,  13623,  13502,  13380,  13255,  13128,
+  12998,  12867,  12733,  12597,  12458,  12318,  12176,  12031,  11885,  11736,
+  11585,  11433,  11278,  11121,  10963,  10803,  10641,  10477,  10311,  10143,
+  9974,   9803,   9630,   9456,   9280,   9102,   8923,   8743,   8561,   8377,
+  8192,   8006,   7818,   7629,   7438,   7246,   7053,   6859,   6664,   6467,
+  6270,   6071,   5872,   5671,   5469,   5266,   5063,   4859,   4653,   4447,
+  4240,   4033,   3825,   3616,   3406,   3196,   2986,   2775,   2563,   2351,
+  2139,   1926,   1713,   1499,   1285,   1072,    857,    643,    429,    214,
+  0,   -214,   -429,   -643,   -857,  -1072,  -1285,  -1499,  -1713,  -1926,
+  -2139,  -2351,  -2563,  -2775,  -2986,  -3196,  -3406,  -3616,  -3825,  -4033,
+  -4240,  -4447,  -4653,  -4859,  -5063,  -5266,  -5469,  -5671,  -5872,  -6071,
+  -6270,  -6467,  -6664,  -6859,  -7053,  -7246,  -7438,  -7629,  -7818,  -8006,
+  -8192,  -8377,  -8561,  -8743,  -8923,  -9102,  -9280,  -9456,  -9630,  -9803,
+  -9974, -10143, -10311, -10477, -10641, -10803, -10963, -11121, -11278, -11433,
+  -11585, -11736, -11885, -12031, -12176, -12318, -12458, -12597, -12733, -12867,
+  -12998, -13128, -13255, -13380, -13502, -13623, -13741, -13856, -13970, -14081,
+  -14189, -14295, -14399, -14500, -14598, -14694, -14788, -14879, -14968, -15053,
+  -15137, -15218, -15296, -15371, -15444, -15515, -15582, -15647, -15709, -15769,
+  -15826, -15880, -15931, -15980, -16026, -16069, -16110, -16147, -16182, -16214,
+  -16244, -16270, -16294, -16315, -16333, -16349, -16362, -16371, -16378, -16383
+};
+
+
+/* Sine table 1 in Q14 */
+static const WebRtc_Word16 kSinTab1[FRAMESAMPLES/2] = {
+  0,   214,   429,   643,   857,  1072,  1285,  1499,  1713,  1926,
+  2139,  2351,  2563,  2775,  2986,  3196,  3406,  3616,  3825,  4033,
+  4240,  4447,  4653,  4859,  5063,  5266,  5469,  5671,  5872,  6071,
+  6270,  6467,  6664,  6859,  7053,  7246,  7438,  7629,  7818,  8006,
+  8192,  8377,  8561,  8743,  8923,  9102,  9280,  9456,  9630,  9803,
+  9974, 10143, 10311, 10477, 10641, 10803, 10963, 11121, 11278, 11433,
+  11585, 11736, 11885, 12031, 12176, 12318, 12458, 12597, 12733, 12867,
+  12998, 13128, 13255, 13380, 13502, 13623, 13741, 13856, 13970, 14081,
+  14189, 14295, 14399, 14500, 14598, 14694, 14788, 14879, 14968, 15053,
+  15137, 15218, 15296, 15371, 15444, 15515, 15582, 15647, 15709, 15769,
+  15826, 15880, 15931, 15980, 16026, 16069, 16110, 16147, 16182, 16214,
+  16244, 16270, 16294, 16315, 16333, 16349, 16362, 16371, 16378, 16383,
+  16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270,
+  16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880,
+  15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218,
+  15137, 15053, 14968, 14879, 14788, 14694, 14598, 14500, 14399, 14295,
+  14189, 14081, 13970, 13856, 13741, 13623, 13502, 13380, 13255, 13128,
+  12998, 12867, 12733, 12597, 12458, 12318, 12176, 12031, 11885, 11736,
+  11585, 11433, 11278, 11121, 10963, 10803, 10641, 10477, 10311, 10143,
+  9974,  9803,  9630,  9456,  9280,  9102,  8923,  8743,  8561,  8377,
+  8192,  8006,  7818,  7629,  7438,  7246,  7053,  6859,  6664,  6467,
+  6270,  6071,  5872,  5671,  5469,  5266,  5063,  4859,  4653,  4447,
+  4240,  4033,  3825,  3616,  3406,  3196,  2986,  2775,  2563,  2351,
+  2139,  1926,  1713,  1499,  1285,  1072,   857,   643,   429,   214
+};
+
+
+/* Cosine table 2 in Q14 */
+static const WebRtc_Word16 kCosTab2[FRAMESAMPLES/4] = {
+  107,   -322,   536,   -750,   965,  -1179,  1392,  -1606,  1819,  -2032,
+  2245,  -2457,  2669,  -2880,  3091,  -3301,  3511,  -3720,  3929,  -4137,
+  4344,  -4550,  4756,  -4961,  5165,  -5368,  5570,  -5771,  5971,  -6171,
+  6369,  -6566,  6762,  -6957,  7150,  -7342,  7534,  -7723,  7912,  -8099,
+  8285,  -8469,  8652,  -8833,  9013,  -9191,  9368,  -9543,  9717,  -9889,
+  10059, -10227, 10394, -10559, 10722, -10883, 11042, -11200, 11356, -11509,
+  11661, -11810, 11958, -12104, 12247, -12389, 12528, -12665, 12800, -12933,
+  13063, -13192, 13318, -13441, 13563, -13682, 13799, -13913, 14025, -14135,
+  14242, -14347, 14449, -14549, 14647, -14741, 14834, -14924, 15011, -15095,
+  15178, -15257, 15334, -15408, 15480, -15549, 15615, -15679, 15739, -15798,
+  15853, -15906, 15956, -16003, 16048, -16090, 16129, -16165, 16199, -16229,
+  16257, -16283, 16305, -16325, 16342, -16356, 16367, -16375, 16381, -16384
+};
+
+
+/* Sine table 2 in Q14 */
+static const WebRtc_Word16 kSinTab2[FRAMESAMPLES/4] = {
+  16384, -16381, 16375, -16367, 16356, -16342, 16325, -16305, 16283, -16257,
+  16229, -16199, 16165, -16129, 16090, -16048, 16003, -15956, 15906, -15853,
+  15798, -15739, 15679, -15615, 15549, -15480, 15408, -15334, 15257, -15178,
+  15095, -15011, 14924, -14834, 14741, -14647, 14549, -14449, 14347, -14242,
+  14135, -14025, 13913, -13799, 13682, -13563, 13441, -13318, 13192, -13063,
+  12933, -12800, 12665, -12528, 12389, -12247, 12104, -11958, 11810, -11661,
+  11509, -11356, 11200, -11042, 10883, -10722, 10559, -10394, 10227, -10059,
+  9889,  -9717,  9543,  -9368,  9191,  -9013,  8833,  -8652,  8469,  -8285,
+  8099,  -7912,  7723,  -7534,  7342,  -7150,  6957,  -6762,  6566,  -6369,
+  6171,  -5971,  5771,  -5570,  5368,  -5165,  4961,  -4756,  4550,  -4344,
+  4137,  -3929,  3720,  -3511,  3301,  -3091,  2880,  -2669,  2457,  -2245,
+  2032,  -1819,  1606,  -1392,  1179,   -965,   750,   -536,   322,   -107
+};
+
+
+
+void WebRtcIsacfix_Time2Spec(WebRtc_Word16 *inre1Q9,
+                             WebRtc_Word16 *inre2Q9,
+                             WebRtc_Word16 *outreQ7,
+                             WebRtc_Word16 *outimQ7)
+{
+
+  int k;
+  WebRtc_Word32 tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2];
+  WebRtc_Word16 tmp1rQ14, tmp1iQ14;
+  WebRtc_Word32 xrQ16, xiQ16, yrQ16, yiQ16;
+  WebRtc_Word32 v1Q16, v2Q16;
+  WebRtc_Word16 factQ19, sh;
+
+  /* Multiply with complex exponentials and combine into one complex vector */
+  factQ19 = 16921; // 0.5/sqrt(240) in Q19 is round(.5/sqrt(240)*(2^19)) = 16921
+  for (k = 0; k < FRAMESAMPLES/2; k++) {
+    tmp1rQ14 = kCosTab1[k];
+    tmp1iQ14 = kSinTab1[k];
+    xrQ16 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(tmp1rQ14, inre1Q9[k]) + WEBRTC_SPL_MUL_16_16(tmp1iQ14, inre2Q9[k]), 7);
+    xiQ16 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(tmp1rQ14, inre2Q9[k]) - WEBRTC_SPL_MUL_16_16(tmp1iQ14, inre1Q9[k]), 7);
+    tmpreQ16[k] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xrQ16)+4, 3); // (Q16*Q19>>16)>>3 = Q16
+    tmpimQ16[k] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xiQ16)+4, 3); // (Q16*Q19>>16)>>3 = Q16
+  }
+
+
+  xrQ16  = WebRtcSpl_MaxAbsValueW32(tmpreQ16, FRAMESAMPLES/2);
+  yrQ16 = WebRtcSpl_MaxAbsValueW32(tmpimQ16, FRAMESAMPLES/2);
+  if (yrQ16>xrQ16) {
+    xrQ16 = yrQ16;
+  }
+
+  sh = WebRtcSpl_NormW32(xrQ16);
+  sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh)
+  //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh)
+
+  //"Fastest" vectors
+  if (sh>=0) {
+    for (k=0; k<FRAMESAMPLES/2; k++) {
+      inre1Q9[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmpreQ16[k], sh); //Q(16+sh)
+      inre2Q9[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmpimQ16[k], sh); //Q(16+sh)
+    }
+  } else {
+    WebRtc_Word32 round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, -sh-1);
+    for (k=0; k<FRAMESAMPLES/2; k++) {
+      inre1Q9[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpreQ16[k]+round, -sh); //Q(16+sh)
+      inre2Q9[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpimQ16[k]+round, -sh); //Q(16+sh)
+    }
+  }
+
+  /* Get DFT */
+  WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); // real call
+
+  //"Fastest" vectors
+  if (sh>=0) {
+    for (k=0; k<FRAMESAMPLES/2; k++) {
+      tmpreQ16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inre1Q9[k], sh); //Q(16+sh) -> Q16
+      tmpimQ16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inre2Q9[k], sh); //Q(16+sh) -> Q16
+    }
+  } else {
+    for (k=0; k<FRAMESAMPLES/2; k++) {
+      tmpreQ16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inre1Q9[k], -sh); //Q(16+sh) -> Q16
+      tmpimQ16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inre2Q9[k], -sh); //Q(16+sh) -> Q16
+    }
+  }
+
+
+  /* Use symmetry to separate into two complex vectors and center frames in time around zero */
+  for (k = 0; k < FRAMESAMPLES/4; k++) {
+    xrQ16 = tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k];
+    yiQ16 = -tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k];
+    xiQ16 = tmpimQ16[k] - tmpimQ16[FRAMESAMPLES/2 - 1 - k];
+    yrQ16 = tmpimQ16[k] + tmpimQ16[FRAMESAMPLES/2 - 1 - k];
+    tmp1rQ14 = kCosTab2[k];
+    tmp1iQ14 = kSinTab2[k];
+    v1Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xiQ16);
+    v2Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xiQ16);
+    outreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(v1Q16, 9);
+    outimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(v2Q16, 9);
+    v1Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yiQ16);
+    v2Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yiQ16);
+    outreQ7[FRAMESAMPLES/2 - 1 - k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(v1Q16, 9); //CalcLrIntQ(v1Q16, 9);
+    outimQ7[FRAMESAMPLES/2 - 1 - k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(v2Q16, 9); //CalcLrIntQ(v2Q16, 9);
+
+  }
+}
+
+
+void WebRtcIsacfix_Spec2Time(WebRtc_Word16 *inreQ7, WebRtc_Word16 *inimQ7, WebRtc_Word32 *outre1Q16, WebRtc_Word32 *outre2Q16)
+{
+
+  int k;
+  WebRtc_Word16 tmp1rQ14, tmp1iQ14;
+  WebRtc_Word32 xrQ16, xiQ16, yrQ16, yiQ16;
+  WebRtc_Word32 tmpInRe, tmpInIm, tmpInRe2, tmpInIm2;
+  WebRtc_Word16 factQ11;
+  WebRtc_Word16 sh;
+
+  for (k = 0; k < FRAMESAMPLES/4; k++) {
+    /* Move zero in time to beginning of frames */
+    tmp1rQ14 = kCosTab2[k];
+    tmp1iQ14 = kSinTab2[k];
+
+    tmpInRe = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inreQ7[k], 9);  // Q7 -> Q16
+    tmpInIm = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inimQ7[k], 9);  // Q7 -> Q16
+    tmpInRe2 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inreQ7[FRAMESAMPLES/2 - 1 - k], 9);  // Q7 -> Q16
+    tmpInIm2 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inimQ7[FRAMESAMPLES/2 - 1 - k], 9);  // Q7 -> Q16
+
+    xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm);
+    xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe);
+    yrQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm2) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe2);
+    yiQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe2) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm2);
+
+    /* Combine into one vector,  z = x + j * y */
+    outre1Q16[k] = xrQ16 - yiQ16;
+    outre1Q16[FRAMESAMPLES/2 - 1 - k] = xrQ16 + yiQ16;
+    outre2Q16[k] = xiQ16 + yrQ16;
+    outre2Q16[FRAMESAMPLES/2 - 1 - k] = -xiQ16 + yrQ16;
+  }
+
+  /* Get IDFT */
+  tmpInRe  = WebRtcSpl_MaxAbsValueW32(outre1Q16, 240);
+  tmpInIm = WebRtcSpl_MaxAbsValueW32(outre2Q16, 240);
+  if (tmpInIm>tmpInRe) {
+    tmpInRe = tmpInIm;
+  }
+
+  sh = WebRtcSpl_NormW32(tmpInRe);
+  sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh)
+  //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh)
+
+  //"Fastest" vectors
+  if (sh>=0) {
+    for (k=0; k<240; k++) {
+      inreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(outre1Q16[k], sh); //Q(16+sh)
+      inimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(outre2Q16[k], sh); //Q(16+sh)
+    }
+  } else {
+    WebRtc_Word32 round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, -sh-1);
+    for (k=0; k<240; k++) {
+      inreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(outre1Q16[k]+round, -sh); //Q(16+sh)
+      inimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(outre2Q16[k]+round, -sh); //Q(16+sh)
+    }
+  }
+
+  WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); // real call
+
+  //"Fastest" vectors
+  if (sh>=0) {
+    for (k=0; k<240; k++) {
+      outre1Q16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inreQ7[k], sh); //Q(16+sh) -> Q16
+      outre2Q16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inimQ7[k], sh); //Q(16+sh) -> Q16
+    }
+  } else {
+    for (k=0; k<240; k++) {
+      outre1Q16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inreQ7[k], -sh); //Q(16+sh) -> Q16
+      outre2Q16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inimQ7[k], -sh); //Q(16+sh) -> Q16
+    }
+  }
+
+  /* Divide through by the normalizing constant: */
+  /* scale all values with 1/240, i.e. with 273 in Q16 */
+  /* 273/65536 ~= 0.0041656                            */
+  /*     1/240 ~= 0.0041666                            */
+  for (k=0; k<240; k++) {
+    outre1Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre1Q16[k]);
+    outre2Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre2Q16[k]);
+  }
+
+  /* Demodulate and separate */
+  factQ11 = 31727; // sqrt(240) in Q11 is round(15.49193338482967*2048) = 31727
+  for (k = 0; k < FRAMESAMPLES/2; k++) {
+    tmp1rQ14 = kCosTab1[k];
+    tmp1iQ14 = kSinTab1[k];
+    xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre1Q16[k]) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre2Q16[k]);
+    xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre2Q16[k]) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre1Q16[k]);
+    xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xrQ16);
+    xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xiQ16);
+    outre2Q16[k] = xiQ16;
+    outre1Q16[k] = xrQ16;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/ISACHist.cc b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/ISACHist.cc
new file mode 100644
index 0000000..753acd7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/ISACHist.cc
@@ -0,0 +1,173 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <ctype.h>
+
+//#include "isac_codec.h"
+//#include "isac_structs.h"
+#include "isacfix.h"
+
+
+#define NUM_CODECS 1
+
+int main(int argc, char* argv[])
+{
+    FILE *inFileList;
+    FILE *audioFile;
+    FILE *outFile;
+    char audioFileName[501];
+    short audioBuff[960];
+    short encoded[600];
+    short startAudio;
+    short encodedLen;
+    ISACFIX_MainStruct *isac_struct;
+    unsigned long int hist[601];
+
+    // reset the histogram
+    for(short n=0; n < 601; n++)
+    {
+        hist[n] = 0;
+    }
+    
+    
+    inFileList = fopen(argv[1], "r");
+    if(inFileList == NULL)
+    {
+        printf("Could not open the input file.\n");
+        getchar();
+        exit(-1);
+    }
+    outFile = fopen(argv[2], "w");
+    if(outFile == NULL)
+    {
+        printf("Could not open the histogram file.\n");
+        getchar();
+        exit(-1);
+    }
+
+    short frameSizeMsec = 30;
+    if(argc > 3)
+    {
+        frameSizeMsec = atoi(argv[3]);
+    }
+    
+    short audioOffset = 0;
+    if(argc > 4)
+    {
+        audioOffset = atoi(argv[4]);
+    }
+    int ok;
+    ok = WebRtcIsacfix_Create(&isac_struct);
+    // instantaneous mode
+    ok |= WebRtcIsacfix_EncoderInit(isac_struct, 1);
+    // is not used but initialize
+    ok |= WebRtcIsacfix_DecoderInit(isac_struct);    
+    ok |= WebRtcIsacfix_Control(isac_struct, 32000, frameSizeMsec);
+
+    if(ok != 0)
+    {
+        printf("\nProblem in seting up iSAC\n");
+        exit(-1);
+    } 
+
+    while( fgets(audioFileName, 500, inFileList) != NULL )
+    {
+        // remove trailing white-spaces and any Cntrl character
+        if(strlen(audioFileName) == 0)
+        {
+            continue;
+        }
+        short n = strlen(audioFileName) - 1;
+        while(isspace(audioFileName[n]) || iscntrl(audioFileName[n]))
+        {
+            audioFileName[n] = '\0';
+            n--;
+            if(n < 0)
+            {
+                break;
+            }
+        }
+
+        // remove leading spaces
+        if(strlen(audioFileName) == 0)
+        {
+            continue;
+        }
+        n = 0;
+        while((isspace(audioFileName[n]) || iscntrl(audioFileName[n])) && 
+            (audioFileName[n] != '\0'))
+        {
+            n++;
+        }
+        memmove(audioFileName, &audioFileName[n], 500 - n);
+        if(strlen(audioFileName) == 0)
+        {
+            continue;
+        }
+        audioFile = fopen(audioFileName, "rb");
+        if(audioFile == NULL)
+        {
+            printf("\nCannot open %s!!!!!\n", audioFileName);
+            exit(0);
+        }
+
+        if(audioOffset > 0)
+        {
+            fseek(audioFile, (audioOffset<<1), SEEK_SET);
+        }
+        
+        while(fread(audioBuff, sizeof(short), (480*frameSizeMsec/30), audioFile) >= (480*frameSizeMsec/30))
+        {            
+            startAudio = 0;
+            do
+            {
+                encodedLen = WebRtcIsacfix_Encode(isac_struct, 
+                                                  &audioBuff[startAudio], encoded);
+                startAudio += 160;            
+            } while(encodedLen == 0);
+
+            if(encodedLen < 0)
+            {
+                printf("\nEncoding Error!!!\n");
+                exit(0);
+            }
+            hist[encodedLen]++;
+        }
+        fclose(audioFile);
+    }
+    fclose(inFileList);
+    unsigned long totalFrames = 0;
+    for(short n=0; n < 601; n++)
+    {
+        totalFrames += hist[n];
+        fprintf(outFile, "%10lu\n", hist[n]);
+    }
+    fclose(outFile);
+    
+    short topTenCntr = 0;
+    printf("\nTotal number of Frames %lu\n\n", totalFrames);
+    printf("Payload Len    # occurences\n");
+    printf("===========    ============\n");
+
+    for(short n = 600; (n >= 0) && (topTenCntr < 10); n--)
+    {
+        if(hist[n] > 0)
+        {
+            topTenCntr++;
+            printf("    %3d            %3d\n", n, hist[n]);
+        }
+    }
+    WebRtcIsacfix_Free(isac_struct);    
+    return 0;
+}
+    
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/Isac_test.cc b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/Isac_test.cc
new file mode 100644
index 0000000..2791db4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/Isac_test.cc
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+	Stand Alone test application for ISACFIX and ISAC LC
+
+******************************************************************/
+
+#include <string.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "typedefs.h"
+
+#include "isacfix.h"
+ISACFIX_MainStruct *ISACfix_inst;
+
+#define FS								16000
+
+
+typedef struct {
+	WebRtc_UWord32 arrival_time;            /* samples */
+	WebRtc_UWord32 sample_count;            /* samples */
+	WebRtc_UWord16 rtp_number;
+} BottleNeckModel;
+
+void get_arrival_time(int current_framesamples,   /* samples */
+					  int packet_size,            /* bytes */
+					  int bottleneck,             /* excluding headers; bits/s */
+					  BottleNeckModel *BN_data)
+{
+	const int HeaderSize = 35; 
+	int HeaderRate;
+
+	HeaderRate = HeaderSize * 8 * FS / current_framesamples;     /* bits/s */
+
+	/* everything in samples */
+	BN_data->sample_count = BN_data->sample_count + current_framesamples;
+
+	BN_data->arrival_time += ((packet_size + HeaderSize) * 8 * FS) / (bottleneck + HeaderRate);
+
+	if (BN_data->arrival_time < BN_data->sample_count)
+		BN_data->arrival_time = BN_data->sample_count;
+
+	BN_data->rtp_number++;
+}
+
+/*
+#ifdef __cplusplus
+extern "C" {
+#endif
+*/
+int main(int argc, char* argv[]){ 
+
+	/* Parameters */
+	FILE *pInFile, *pOutFile, *pChcFile; 
+	WebRtc_Word8 inFile[40];
+	WebRtc_Word8 outFile[40];
+	WebRtc_Word8 chcFile[40];
+	WebRtc_Word8 codec[10];
+	WebRtc_Word16 bitrt, spType, size;
+	WebRtc_UWord16 frameLen;
+	WebRtc_Word16 sigOut[1000], sigIn[1000]; 
+	WebRtc_UWord16 bitStream[500]; /* double to 32 kbps for 60 ms */
+
+	WebRtc_Word16 chc, ok;
+	int noOfCalls, cdlen;
+	WebRtc_Word16 noOfLostFrames;
+	int err, errtype;
+
+	BottleNeckModel       BN_data;
+
+	int totalbits =0;
+	int totalsmpls =0;
+
+	/*End Parameters*/
+
+	
+	if ((argc==6)||(argc==7) ){
+
+		strcpy(codec,argv[5]);
+
+		if(argc==7){
+			if (!_stricmp("isac",codec)){
+				bitrt = atoi(argv[6]);
+				if ( (bitrt<10000)&&(bitrt>32000)){
+					printf("Error: Supported bit rate in the range 10000-32000 bps!\n");
+					exit(-1);
+				}
+
+			}else{
+	      printf("Error: Codec not recognized. Check spelling!\n");
+	      exit(-1);
+			}
+
+		} else { 
+				printf("Error: Codec not recognized. Check spelling!\n");
+				exit(-1);
+		}
+	} else {
+		printf("Error: Wrong number of input parameter!\n\n");
+		exit(-1);
+	}
+	
+	frameLen = atoi(argv[4]);
+	strcpy(chcFile,argv[3]);
+	strcpy(outFile,argv[2]);
+	strcpy(inFile,argv[1]);
+
+	/*  Open file streams */
+	if( (pInFile = fopen(inFile,"rb")) == NULL ) {
+		printf( "Error: Did not find input file!\n" );
+		exit(-1);
+	}
+	strcat(outFile,"_");
+	strcat(outFile, argv[4]);
+	strcat(outFile,"_");
+	strcat(outFile, codec);
+	
+	if (argc==7){
+		strcat(outFile,"_");
+		strcat(outFile, argv[6]);
+	}
+	if (_stricmp("none", chcFile)){
+		strcat(outFile,"_");
+		strcat(outFile, "plc");
+	}
+	
+	strcat(outFile, ".otp");
+	
+	if (_stricmp("none", chcFile)){
+		if( (pChcFile = fopen(chcFile,"rb")) == NULL ) {
+			printf( "Error: Did not find channel file!\n" );
+			exit(-1);
+		}
+	}
+    /******************************************************************/
+	if (!_stricmp("isac", codec)){    /*    ISAC   */
+		if ((frameLen!=480)&&(frameLen!=960)) {
+			printf("Error: ISAC only supports 480 and 960 samples per frame (not %d)\n", frameLen);
+			exit(-1);
+		}
+		if( (pOutFile = fopen(outFile,"wb")) == NULL ) {
+			printf( "Could not open output file!\n" );
+			exit(-1);
+		}
+		ok=WebRtcIsacfix_Create(&ISACfix_inst);
+		if (ok!=0) {
+			printf("Couldn't allocate memory for iSAC fix instance\n");
+			exit(-1);
+		}
+
+		BN_data.arrival_time  = 0;
+		BN_data.sample_count  = 0;
+		BN_data.rtp_number    = 0;
+
+		WebRtcIsacfix_EncoderInit(ISACfix_inst,1);
+		WebRtcIsacfix_DecoderInit(ISACfix_inst);
+		err = WebRtcIsacfix_Control(ISACfix_inst, bitrt, (frameLen>>4));
+		if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+				exit(EXIT_FAILURE);
+			}
+		/* loop over frame */
+		while (fread(sigIn,sizeof(WebRtc_Word16),frameLen,pInFile) == frameLen) {
+			
+			noOfCalls=0;
+			cdlen=0;
+			while (cdlen<=0) {
+				cdlen=WebRtcIsacfix_Encode(ISACfix_inst,&sigIn[noOfCalls*160],(WebRtc_Word16*)bitStream);
+				if(cdlen==-1){
+					errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
+					printf("\n\nError in encoder: %d.\n\n", errtype);
+					exit(-1);
+				}
+				noOfCalls++;
+			}
+	
+	
+			if(_stricmp("none", chcFile)){
+				if (fread(&chc,sizeof(WebRtc_Word16),1,pChcFile)!=1) /* packet may be lost */
+					break;
+			} else {
+				chc = 1; /* packets never lost */
+			}
+					
+			/* simulate packet handling through NetEq and the modem */
+			get_arrival_time(frameLen, cdlen, bitrt, &BN_data);
+			
+			if (chc){ /* decode */
+
+				err = WebRtcIsacfix_UpdateBwEstimate1(ISACfix_inst,
+								  bitStream,
+								  cdlen,
+								  BN_data.rtp_number,
+								  BN_data.arrival_time);
+
+				if (err < 0) {
+					/* exit if returned with error */
+					errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
+					printf("\n\nError in decoder: %d.\n\n", errtype);
+					exit(EXIT_FAILURE);
+				}
+				size = WebRtcIsacfix_Decode(ISACfix_inst, bitStream, cdlen, sigOut, &spType);
+				if(size<=0){
+					/* exit if returned with error */
+					errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
+					printf("\n\nError in decoder: %d.\n\n", errtype);
+					exit(-1);
+				}
+			} else { /* PLC */
+				if (frameLen == 480){
+					noOfLostFrames = 1;
+				} else{
+					noOfLostFrames = 2;
+				}
+				size = WebRtcIsacfix_DecodePlc(ISACfix_inst, sigOut, noOfLostFrames );
+				if(size<=0){
+					errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
+					printf("\n\nError in decoder: %d.\n\n", errtype);
+					exit(-1);
+				}
+			}
+				
+			/* Write decoded speech to file */
+			fwrite(sigOut,sizeof(short),size,pOutFile);
+
+		totalbits += 8 * cdlen;
+		totalsmpls += size;
+
+		}
+	/******************************************************************/
+	}
+
+//	printf("\n\ntotal bits				= %d bits", totalbits);
+	printf("\nmeasured average bitrate		= %0.3f kbits/s", (double)totalbits * 16 / totalsmpls);
+	printf("\n");
+
+
+	fclose(pInFile);
+	fclose(pOutFile);
+	if (_stricmp("none", chcFile)){
+		fclose(pChcFile);
+	}
+
+	if (!_stricmp("isac", codec)){
+		WebRtcIsacfix_Free(ISACfix_inst);
+	}
+									 			
+	return 0;
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ChannelFiles.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ChannelFiles.txt
new file mode 100644
index 0000000..05f7410
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ChannelFiles.txt
@@ -0,0 +1,3 @@
+bottlenecks.txt
+lowrates.txt
+tworates.txt
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFiles.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFiles.txt
new file mode 100644
index 0000000..f26b7af
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFiles.txt
@@ -0,0 +1,31 @@
+DTMF_16kHz_long.pcm
+DTMF_16kHz_short.pcm
+F00.INP
+F01.INP
+F02.INP
+F03.INP
+F04.INP
+F05.INP
+F06.INP
+longtest.pcm
+ltest_speech_clean.pcm
+ltest_music.pcm
+ltest_speech_noisy.pcm
+misc2.pcm
+purenb.pcm
+sawsweep_380_60.pcm
+sinesweep.pcm
+sinesweep_half.pcm
+speechmusic.pcm
+speechmusic_nb.pcm
+speechoffice0dB.pcm
+speech_and_misc_NB.pcm
+speech_and_misc_WB.pcm
+testM4.pcm
+testM4D_rev.pcm  
+testM4D.pcm  
+testfile.pcm
+tone_cisco.pcm
+tone_cisco_long.pcm
+wb_contspeech.pcm
+wb_speech_office25db.pcm
\ No newline at end of file
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFilesFew.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFilesFew.txt
new file mode 100644
index 0000000..08bbde3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/InputFilesFew.txt
@@ -0,0 +1,6 @@
+DTMF_16kHz_short.pcm
+ltest_speech_noisy.pcm
+misc2.pcm
+sinesweep.pcm
+speechmusic.pcm
+tone_cisco.pcm
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ListOfTestCases.xls b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ListOfTestCases.xls
new file mode 100644
index 0000000..f0889ef
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/ListOfTestCases.xls
Binary files differ
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSAC.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSAC.txt
new file mode 100644
index 0000000..96b87c0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSAC.txt
@@ -0,0 +1,481 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+diff ../dataqa350/i30_1DTMF_16kHz_long.pcm ../dataqa351/i30_1DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_1DTMF_16kHz_long.pcm ../dataqa351/i60_1DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_2DTMF_16kHz_long.pcm ../dataqa351/i30_2DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_2DTMF_16kHz_long.pcm ../dataqa351/i60_2DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_3DTMF_16kHz_long.pcm ../dataqa351/i30_3DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_3DTMF_16kHz_long.pcm ../dataqa351/i60_3DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_4DTMF_16kHz_long.pcm ../dataqa351/i30_4DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_4DTMF_16kHz_long.pcm ../dataqa351/i60_4DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_5DTMF_16kHz_long.pcm ../dataqa351/i30_5DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_5DTMF_16kHz_long.pcm ../dataqa351/i60_5DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_6DTMF_16kHz_long.pcm ../dataqa351/i30_6DTMF_16kHz_long.pcm
+diff ../dataqa350/i60_6DTMF_16kHz_long.pcm ../dataqa351/i60_6DTMF_16kHz_long.pcm
+diff ../dataqa350/a1DTMF_16kHz_long.pcm ../dataqa351/a1DTMF_16kHz_long.pcm
+diff ../dataqa350/a2DTMF_16kHz_long.pcm ../dataqa351/a2DTMF_16kHz_long.pcm
+diff ../dataqa350/a3DTMF_16kHz_long.pcm ../dataqa351/a3DTMF_16kHz_long.pcm
+diff ../dataqa350/i30_7DTMF_16kHz_short.pcm ../dataqa351/i30_7DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_7DTMF_16kHz_short.pcm ../dataqa351/i60_7DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_8DTMF_16kHz_short.pcm ../dataqa351/i30_8DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_8DTMF_16kHz_short.pcm ../dataqa351/i60_8DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_9DTMF_16kHz_short.pcm ../dataqa351/i30_9DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_9DTMF_16kHz_short.pcm ../dataqa351/i60_9DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_10DTMF_16kHz_short.pcm ../dataqa351/i30_10DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_10DTMF_16kHz_short.pcm ../dataqa351/i60_10DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_11DTMF_16kHz_short.pcm ../dataqa351/i30_11DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_11DTMF_16kHz_short.pcm ../dataqa351/i60_11DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_12DTMF_16kHz_short.pcm ../dataqa351/i30_12DTMF_16kHz_short.pcm
+diff ../dataqa350/i60_12DTMF_16kHz_short.pcm ../dataqa351/i60_12DTMF_16kHz_short.pcm
+diff ../dataqa350/a4DTMF_16kHz_short.pcm ../dataqa351/a4DTMF_16kHz_short.pcm
+diff ../dataqa350/a5DTMF_16kHz_short.pcm ../dataqa351/a5DTMF_16kHz_short.pcm
+diff ../dataqa350/a6DTMF_16kHz_short.pcm ../dataqa351/a6DTMF_16kHz_short.pcm
+diff ../dataqa350/i30_13F00.INP ../dataqa350/i30_13F00.INP
+diff ../dataqa350/i60_13F00.INP ../dataqa350/i60_13F00.INP
+diff ../dataqa350/i30_14F00.INP ../dataqa350/i30_14F00.INP
+diff ../dataqa350/i60_14F00.INP ../dataqa350/i60_14F00.INP
+diff ../dataqa350/i30_15F00.INP ../dataqa350/i30_15F00.INP
+diff ../dataqa350/i60_15F00.INP ../dataqa350/i60_15F00.INP
+diff ../dataqa350/i30_16F00.INP ../dataqa350/i30_16F00.INP
+diff ../dataqa350/i60_16F00.INP ../dataqa350/i60_16F00.INP
+diff ../dataqa350/i30_17F00.INP ../dataqa350/i30_17F00.INP
+diff ../dataqa350/i60_17F00.INP ../dataqa350/i60_17F00.INP
+diff ../dataqa350/i30_18F00.INP ../dataqa350/i30_18F00.INP
+diff ../dataqa350/i60_18F00.INP ../dataqa350/i60_18F00.INP
+diff ../dataqa350/a7F00.INP ../dataqa350/a7F00.INP
+diff ../dataqa350/a8F00.INP ../dataqa350/a8F00.INP
+diff ../dataqa350/a9F00.INP ../dataqa350/a9F00.INP
+diff ../dataqa350/i30_19F01.INP ../dataqa350/i30_19F01.INP
+diff ../dataqa350/i60_19F01.INP ../dataqa350/i60_19F01.INP
+diff ../dataqa350/i30_20F01.INP ../dataqa350/i30_20F01.INP
+diff ../dataqa350/i60_20F01.INP ../dataqa350/i60_20F01.INP
+diff ../dataqa350/i30_21F01.INP ../dataqa350/i30_21F01.INP
+diff ../dataqa350/i60_21F01.INP ../dataqa350/i60_21F01.INP
+diff ../dataqa350/i30_22F01.INP ../dataqa350/i30_22F01.INP
+diff ../dataqa350/i60_22F01.INP ../dataqa350/i60_22F01.INP
+diff ../dataqa350/i30_23F01.INP ../dataqa350/i30_23F01.INP
+diff ../dataqa350/i60_23F01.INP ../dataqa350/i60_23F01.INP
+diff ../dataqa350/i30_24F01.INP ../dataqa350/i30_24F01.INP
+diff ../dataqa350/i60_24F01.INP ../dataqa350/i60_24F01.INP
+diff ../dataqa350/a10F01.INP ../dataqa350/a10F01.INP
+diff ../dataqa350/a11F01.INP ../dataqa350/a11F01.INP
+diff ../dataqa350/a12F01.INP ../dataqa350/a12F01.INP
+diff ../dataqa350/i30_25F02.INP ../dataqa350/i30_25F02.INP
+diff ../dataqa350/i60_25F02.INP ../dataqa350/i60_25F02.INP
+diff ../dataqa350/i30_26F02.INP ../dataqa350/i30_26F02.INP
+diff ../dataqa350/i60_26F02.INP ../dataqa350/i60_26F02.INP
+diff ../dataqa350/i30_27F02.INP ../dataqa350/i30_27F02.INP
+diff ../dataqa350/i60_27F02.INP ../dataqa350/i60_27F02.INP
+diff ../dataqa350/i30_28F02.INP ../dataqa350/i30_28F02.INP
+diff ../dataqa350/i60_28F02.INP ../dataqa350/i60_28F02.INP
+diff ../dataqa350/i30_29F02.INP ../dataqa350/i30_29F02.INP
+diff ../dataqa350/i60_29F02.INP ../dataqa350/i60_29F02.INP
+diff ../dataqa350/i30_30F02.INP ../dataqa350/i30_30F02.INP
+diff ../dataqa350/i60_30F02.INP ../dataqa350/i60_30F02.INP
+diff ../dataqa350/a13F02.INP ../dataqa350/a13F02.INP
+diff ../dataqa350/a14F02.INP ../dataqa350/a14F02.INP
+diff ../dataqa350/a15F02.INP ../dataqa350/a15F02.INP
+diff ../dataqa350/i30_31F03.INP ../dataqa350/i30_31F03.INP
+diff ../dataqa350/i60_31F03.INP ../dataqa350/i60_31F03.INP
+diff ../dataqa350/i30_32F03.INP ../dataqa350/i30_32F03.INP
+diff ../dataqa350/i60_32F03.INP ../dataqa350/i60_32F03.INP
+diff ../dataqa350/i30_33F03.INP ../dataqa350/i30_33F03.INP
+diff ../dataqa350/i60_33F03.INP ../dataqa350/i60_33F03.INP
+diff ../dataqa350/i30_34F03.INP ../dataqa350/i30_34F03.INP
+diff ../dataqa350/i60_34F03.INP ../dataqa350/i60_34F03.INP
+diff ../dataqa350/i30_35F03.INP ../dataqa350/i30_35F03.INP
+diff ../dataqa350/i60_35F03.INP ../dataqa350/i60_35F03.INP
+diff ../dataqa350/i30_36F03.INP ../dataqa350/i30_36F03.INP
+diff ../dataqa350/i60_36F03.INP ../dataqa350/i60_36F03.INP
+diff ../dataqa350/a16F03.INP ../dataqa350/a16F03.INP
+diff ../dataqa350/a17F03.INP ../dataqa350/a17F03.INP
+diff ../dataqa350/a18F03.INP ../dataqa350/a18F03.INP
+diff ../dataqa350/i30_37F04.INP ../dataqa350/i30_37F04.INP
+diff ../dataqa350/i60_37F04.INP ../dataqa350/i60_37F04.INP
+diff ../dataqa350/i30_38F04.INP ../dataqa350/i30_38F04.INP
+diff ../dataqa350/i60_38F04.INP ../dataqa350/i60_38F04.INP
+diff ../dataqa350/i30_39F04.INP ../dataqa350/i30_39F04.INP
+diff ../dataqa350/i60_39F04.INP ../dataqa350/i60_39F04.INP
+diff ../dataqa350/i30_40F04.INP ../dataqa350/i30_40F04.INP
+diff ../dataqa350/i60_40F04.INP ../dataqa350/i60_40F04.INP
+diff ../dataqa350/i30_41F04.INP ../dataqa350/i30_41F04.INP
+diff ../dataqa350/i60_41F04.INP ../dataqa350/i60_41F04.INP
+diff ../dataqa350/i30_42F04.INP ../dataqa350/i30_42F04.INP
+diff ../dataqa350/i60_42F04.INP ../dataqa350/i60_42F04.INP
+diff ../dataqa350/a19F04.INP ../dataqa350/a19F04.INP
+diff ../dataqa350/a20F04.INP ../dataqa350/a20F04.INP
+diff ../dataqa350/a21F04.INP ../dataqa350/a21F04.INP
+diff ../dataqa350/i30_43F05.INP ../dataqa350/i30_43F05.INP
+diff ../dataqa350/i60_43F05.INP ../dataqa350/i60_43F05.INP
+diff ../dataqa350/i30_44F05.INP ../dataqa350/i30_44F05.INP
+diff ../dataqa350/i60_44F05.INP ../dataqa350/i60_44F05.INP
+diff ../dataqa350/i30_45F05.INP ../dataqa350/i30_45F05.INP
+diff ../dataqa350/i60_45F05.INP ../dataqa350/i60_45F05.INP
+diff ../dataqa350/i30_46F05.INP ../dataqa350/i30_46F05.INP
+diff ../dataqa350/i60_46F05.INP ../dataqa350/i60_46F05.INP
+diff ../dataqa350/i30_47F05.INP ../dataqa350/i30_47F05.INP
+diff ../dataqa350/i60_47F05.INP ../dataqa350/i60_47F05.INP
+diff ../dataqa350/i30_48F05.INP ../dataqa350/i30_48F05.INP
+diff ../dataqa350/i60_48F05.INP ../dataqa350/i60_48F05.INP
+diff ../dataqa350/a22F05.INP ../dataqa350/a22F05.INP
+diff ../dataqa350/a23F05.INP ../dataqa350/a23F05.INP
+diff ../dataqa350/a24F05.INP ../dataqa350/a24F05.INP
+diff ../dataqa350/i30_49F06.INP ../dataqa350/i30_49F06.INP
+diff ../dataqa350/i60_49F06.INP ../dataqa350/i60_49F06.INP
+diff ../dataqa350/i30_50F06.INP ../dataqa350/i30_50F06.INP
+diff ../dataqa350/i60_50F06.INP ../dataqa350/i60_50F06.INP
+diff ../dataqa350/i30_51F06.INP ../dataqa350/i30_51F06.INP
+diff ../dataqa350/i60_51F06.INP ../dataqa350/i60_51F06.INP
+diff ../dataqa350/i30_52F06.INP ../dataqa350/i30_52F06.INP
+diff ../dataqa350/i60_52F06.INP ../dataqa350/i60_52F06.INP
+diff ../dataqa350/i30_53F06.INP ../dataqa350/i30_53F06.INP
+diff ../dataqa350/i60_53F06.INP ../dataqa350/i60_53F06.INP
+diff ../dataqa350/i30_54F06.INP ../dataqa350/i30_54F06.INP
+diff ../dataqa350/i60_54F06.INP ../dataqa350/i60_54F06.INP
+diff ../dataqa350/a25F06.INP ../dataqa350/a25F06.INP
+diff ../dataqa350/a26F06.INP ../dataqa350/a26F06.INP
+diff ../dataqa350/a27F06.INP ../dataqa350/a27F06.INP
+diff ../dataqa350/i30_55longtest.pcm ../dataqa351/i30_55longtest.pcm
+diff ../dataqa350/i60_55longtest.pcm ../dataqa351/i60_55longtest.pcm
+diff ../dataqa350/i30_56longtest.pcm ../dataqa351/i30_56longtest.pcm
+diff ../dataqa350/i60_56longtest.pcm ../dataqa351/i60_56longtest.pcm
+diff ../dataqa350/i30_57longtest.pcm ../dataqa351/i30_57longtest.pcm
+diff ../dataqa350/i60_57longtest.pcm ../dataqa351/i60_57longtest.pcm
+diff ../dataqa350/i30_58longtest.pcm ../dataqa351/i30_58longtest.pcm
+diff ../dataqa350/i60_58longtest.pcm ../dataqa351/i60_58longtest.pcm
+diff ../dataqa350/i30_59longtest.pcm ../dataqa351/i30_59longtest.pcm
+diff ../dataqa350/i60_59longtest.pcm ../dataqa351/i60_59longtest.pcm
+diff ../dataqa350/i30_60longtest.pcm ../dataqa351/i30_60longtest.pcm
+diff ../dataqa350/i60_60longtest.pcm ../dataqa351/i60_60longtest.pcm
+diff ../dataqa350/a28longtest.pcm ../dataqa351/a28longtest.pcm
+diff ../dataqa350/a29longtest.pcm ../dataqa351/a29longtest.pcm
+diff ../dataqa350/a30longtest.pcm ../dataqa351/a30longtest.pcm
+diff ../dataqa350/i30_61ltest_speech_clean.pcm ../dataqa351/i30_61ltest_speech_clean.pcm
+diff ../dataqa350/i60_61ltest_speech_clean.pcm ../dataqa351/i60_61ltest_speech_clean.pcm
+diff ../dataqa350/i30_62ltest_speech_clean.pcm ../dataqa351/i30_62ltest_speech_clean.pcm
+diff ../dataqa350/i60_62ltest_speech_clean.pcm ../dataqa351/i60_62ltest_speech_clean.pcm
+diff ../dataqa350/i30_63ltest_speech_clean.pcm ../dataqa351/i30_63ltest_speech_clean.pcm
+diff ../dataqa350/i60_63ltest_speech_clean.pcm ../dataqa351/i60_63ltest_speech_clean.pcm
+diff ../dataqa350/i30_64ltest_speech_clean.pcm ../dataqa351/i30_64ltest_speech_clean.pcm
+diff ../dataqa350/i60_64ltest_speech_clean.pcm ../dataqa351/i60_64ltest_speech_clean.pcm
+diff ../dataqa350/i30_65ltest_speech_clean.pcm ../dataqa351/i30_65ltest_speech_clean.pcm
+diff ../dataqa350/i60_65ltest_speech_clean.pcm ../dataqa351/i60_65ltest_speech_clean.pcm
+diff ../dataqa350/i30_66ltest_speech_clean.pcm ../dataqa351/i30_66ltest_speech_clean.pcm
+diff ../dataqa350/i60_66ltest_speech_clean.pcm ../dataqa351/i60_66ltest_speech_clean.pcm
+diff ../dataqa350/a31ltest_speech_clean.pcm ../dataqa351/a31ltest_speech_clean.pcm
+diff ../dataqa350/a32ltest_speech_clean.pcm ../dataqa351/a32ltest_speech_clean.pcm
+diff ../dataqa350/a33ltest_speech_clean.pcm ../dataqa351/a33ltest_speech_clean.pcm
+diff ../dataqa350/i30_67ltest_music.pcm ../dataqa351/i30_67ltest_music.pcm
+diff ../dataqa350/i60_67ltest_music.pcm ../dataqa351/i60_67ltest_music.pcm
+diff ../dataqa350/i30_68ltest_music.pcm ../dataqa351/i30_68ltest_music.pcm
+diff ../dataqa350/i60_68ltest_music.pcm ../dataqa351/i60_68ltest_music.pcm
+diff ../dataqa350/i30_69ltest_music.pcm ../dataqa351/i30_69ltest_music.pcm
+diff ../dataqa350/i60_69ltest_music.pcm ../dataqa351/i60_69ltest_music.pcm
+diff ../dataqa350/i30_70ltest_music.pcm ../dataqa351/i30_70ltest_music.pcm
+diff ../dataqa350/i60_70ltest_music.pcm ../dataqa351/i60_70ltest_music.pcm
+diff ../dataqa350/i30_71ltest_music.pcm ../dataqa351/i30_71ltest_music.pcm
+diff ../dataqa350/i60_71ltest_music.pcm ../dataqa351/i60_71ltest_music.pcm
+diff ../dataqa350/i30_72ltest_music.pcm ../dataqa351/i30_72ltest_music.pcm
+diff ../dataqa350/i60_72ltest_music.pcm ../dataqa351/i60_72ltest_music.pcm
+diff ../dataqa350/a34ltest_music.pcm ../dataqa351/a34ltest_music.pcm
+diff ../dataqa350/a35ltest_music.pcm ../dataqa351/a35ltest_music.pcm
+diff ../dataqa350/a36ltest_music.pcm ../dataqa351/a36ltest_music.pcm
+diff ../dataqa350/i30_73ltest_speech_noisy.pcm ../dataqa351/i30_73ltest_speech_noisy.pcm
+diff ../dataqa350/i60_73ltest_speech_noisy.pcm ../dataqa351/i60_73ltest_speech_noisy.pcm
+diff ../dataqa350/i30_74ltest_speech_noisy.pcm ../dataqa351/i30_74ltest_speech_noisy.pcm
+diff ../dataqa350/i60_74ltest_speech_noisy.pcm ../dataqa351/i60_74ltest_speech_noisy.pcm
+diff ../dataqa350/i30_75ltest_speech_noisy.pcm ../dataqa351/i30_75ltest_speech_noisy.pcm
+diff ../dataqa350/i60_75ltest_speech_noisy.pcm ../dataqa351/i60_75ltest_speech_noisy.pcm
+diff ../dataqa350/i30_76ltest_speech_noisy.pcm ../dataqa351/i30_76ltest_speech_noisy.pcm
+diff ../dataqa350/i60_76ltest_speech_noisy.pcm ../dataqa351/i60_76ltest_speech_noisy.pcm
+diff ../dataqa350/i30_77ltest_speech_noisy.pcm ../dataqa351/i30_77ltest_speech_noisy.pcm
+diff ../dataqa350/i60_77ltest_speech_noisy.pcm ../dataqa351/i60_77ltest_speech_noisy.pcm
+diff ../dataqa350/i30_78ltest_speech_noisy.pcm ../dataqa351/i30_78ltest_speech_noisy.pcm
+diff ../dataqa350/i60_78ltest_speech_noisy.pcm ../dataqa351/i60_78ltest_speech_noisy.pcm
+diff ../dataqa350/a37ltest_speech_noisy.pcm ../dataqa351/a37ltest_speech_noisy.pcm
+diff ../dataqa350/a38ltest_speech_noisy.pcm ../dataqa351/a38ltest_speech_noisy.pcm
+diff ../dataqa350/a39ltest_speech_noisy.pcm ../dataqa351/a39ltest_speech_noisy.pcm
+diff ../dataqa350/i30_79misc2.pcm ../dataqa351/i30_79misc2.pcm
+diff ../dataqa350/i60_79misc2.pcm ../dataqa351/i60_79misc2.pcm
+diff ../dataqa350/i30_80misc2.pcm ../dataqa351/i30_80misc2.pcm
+diff ../dataqa350/i60_80misc2.pcm ../dataqa351/i60_80misc2.pcm
+diff ../dataqa350/i30_81misc2.pcm ../dataqa351/i30_81misc2.pcm
+diff ../dataqa350/i60_81misc2.pcm ../dataqa351/i60_81misc2.pcm
+diff ../dataqa350/i30_82misc2.pcm ../dataqa351/i30_82misc2.pcm
+diff ../dataqa350/i60_82misc2.pcm ../dataqa351/i60_82misc2.pcm
+diff ../dataqa350/i30_83misc2.pcm ../dataqa351/i30_83misc2.pcm
+diff ../dataqa350/i60_83misc2.pcm ../dataqa351/i60_83misc2.pcm
+diff ../dataqa350/i30_84misc2.pcm ../dataqa351/i30_84misc2.pcm
+diff ../dataqa350/i60_84misc2.pcm ../dataqa351/i60_84misc2.pcm
+diff ../dataqa350/a40misc2.pcm ../dataqa351/a40misc2.pcm
+diff ../dataqa350/a41misc2.pcm ../dataqa351/a41misc2.pcm
+diff ../dataqa350/a42misc2.pcm ../dataqa351/a42misc2.pcm
+diff ../dataqa350/i30_85purenb.pcm ../dataqa351/i30_85purenb.pcm
+diff ../dataqa350/i60_85purenb.pcm ../dataqa351/i60_85purenb.pcm
+diff ../dataqa350/i30_86purenb.pcm ../dataqa351/i30_86purenb.pcm
+diff ../dataqa350/i60_86purenb.pcm ../dataqa351/i60_86purenb.pcm
+diff ../dataqa350/i30_87purenb.pcm ../dataqa351/i30_87purenb.pcm
+diff ../dataqa350/i60_87purenb.pcm ../dataqa351/i60_87purenb.pcm
+diff ../dataqa350/i30_88purenb.pcm ../dataqa351/i30_88purenb.pcm
+diff ../dataqa350/i60_88purenb.pcm ../dataqa351/i60_88purenb.pcm
+diff ../dataqa350/i30_89purenb.pcm ../dataqa351/i30_89purenb.pcm
+diff ../dataqa350/i60_89purenb.pcm ../dataqa351/i60_89purenb.pcm
+diff ../dataqa350/i30_90purenb.pcm ../dataqa351/i30_90purenb.pcm
+diff ../dataqa350/i60_90purenb.pcm ../dataqa351/i60_90purenb.pcm
+diff ../dataqa350/a43purenb.pcm ../dataqa351/a43purenb.pcm
+diff ../dataqa350/a44purenb.pcm ../dataqa351/a44purenb.pcm
+diff ../dataqa350/a45purenb.pcm ../dataqa351/a45purenb.pcm
+diff ../dataqa350/i30_91sawsweep_380_60.pcm ../dataqa351/i30_91sawsweep_380_60.pcm
+diff ../dataqa350/i60_91sawsweep_380_60.pcm ../dataqa351/i60_91sawsweep_380_60.pcm
+diff ../dataqa350/i30_92sawsweep_380_60.pcm ../dataqa351/i30_92sawsweep_380_60.pcm
+diff ../dataqa350/i60_92sawsweep_380_60.pcm ../dataqa351/i60_92sawsweep_380_60.pcm
+diff ../dataqa350/i30_93sawsweep_380_60.pcm ../dataqa351/i30_93sawsweep_380_60.pcm
+diff ../dataqa350/i60_93sawsweep_380_60.pcm ../dataqa351/i60_93sawsweep_380_60.pcm
+diff ../dataqa350/i30_94sawsweep_380_60.pcm ../dataqa351/i30_94sawsweep_380_60.pcm
+diff ../dataqa350/i60_94sawsweep_380_60.pcm ../dataqa351/i60_94sawsweep_380_60.pcm
+diff ../dataqa350/i30_95sawsweep_380_60.pcm ../dataqa351/i30_95sawsweep_380_60.pcm
+diff ../dataqa350/i60_95sawsweep_380_60.pcm ../dataqa351/i60_95sawsweep_380_60.pcm
+diff ../dataqa350/i30_96sawsweep_380_60.pcm ../dataqa351/i30_96sawsweep_380_60.pcm
+diff ../dataqa350/i60_96sawsweep_380_60.pcm ../dataqa351/i60_96sawsweep_380_60.pcm
+diff ../dataqa350/a46sawsweep_380_60.pcm ../dataqa351/a46sawsweep_380_60.pcm
+diff ../dataqa350/a47sawsweep_380_60.pcm ../dataqa351/a47sawsweep_380_60.pcm
+diff ../dataqa350/a48sawsweep_380_60.pcm ../dataqa351/a48sawsweep_380_60.pcm
+diff ../dataqa350/i30_97sinesweep.pcm ../dataqa351/i30_97sinesweep.pcm
+diff ../dataqa350/i60_97sinesweep.pcm ../dataqa351/i60_97sinesweep.pcm
+diff ../dataqa350/i30_98sinesweep.pcm ../dataqa351/i30_98sinesweep.pcm
+diff ../dataqa350/i60_98sinesweep.pcm ../dataqa351/i60_98sinesweep.pcm
+diff ../dataqa350/i30_99sinesweep.pcm ../dataqa351/i30_99sinesweep.pcm
+diff ../dataqa350/i60_99sinesweep.pcm ../dataqa351/i60_99sinesweep.pcm
+diff ../dataqa350/i30_100sinesweep.pcm ../dataqa351/i30_100sinesweep.pcm
+diff ../dataqa350/i60_100sinesweep.pcm ../dataqa351/i60_100sinesweep.pcm
+diff ../dataqa350/i30_101sinesweep.pcm ../dataqa351/i30_101sinesweep.pcm
+diff ../dataqa350/i60_101sinesweep.pcm ../dataqa351/i60_101sinesweep.pcm
+diff ../dataqa350/i30_102sinesweep.pcm ../dataqa351/i30_102sinesweep.pcm
+diff ../dataqa350/i60_102sinesweep.pcm ../dataqa351/i60_102sinesweep.pcm
+diff ../dataqa350/a49sinesweep.pcm ../dataqa351/a49sinesweep.pcm
+diff ../dataqa350/a50sinesweep.pcm ../dataqa351/a50sinesweep.pcm
+diff ../dataqa350/a51sinesweep.pcm ../dataqa351/a51sinesweep.pcm
+diff ../dataqa350/i30_103sinesweep_half.pcm ../dataqa351/i30_103sinesweep_half.pcm
+diff ../dataqa350/i60_103sinesweep_half.pcm ../dataqa351/i60_103sinesweep_half.pcm
+diff ../dataqa350/i30_104sinesweep_half.pcm ../dataqa351/i30_104sinesweep_half.pcm
+diff ../dataqa350/i60_104sinesweep_half.pcm ../dataqa351/i60_104sinesweep_half.pcm
+diff ../dataqa350/i30_105sinesweep_half.pcm ../dataqa351/i30_105sinesweep_half.pcm
+diff ../dataqa350/i60_105sinesweep_half.pcm ../dataqa351/i60_105sinesweep_half.pcm
+diff ../dataqa350/i30_106sinesweep_half.pcm ../dataqa351/i30_106sinesweep_half.pcm
+diff ../dataqa350/i60_106sinesweep_half.pcm ../dataqa351/i60_106sinesweep_half.pcm
+diff ../dataqa350/i30_107sinesweep_half.pcm ../dataqa351/i30_107sinesweep_half.pcm
+diff ../dataqa350/i60_107sinesweep_half.pcm ../dataqa351/i60_107sinesweep_half.pcm
+diff ../dataqa350/i30_108sinesweep_half.pcm ../dataqa351/i30_108sinesweep_half.pcm
+diff ../dataqa350/i60_108sinesweep_half.pcm ../dataqa351/i60_108sinesweep_half.pcm
+diff ../dataqa350/a52sinesweep_half.pcm ../dataqa351/a52sinesweep_half.pcm
+diff ../dataqa350/a53sinesweep_half.pcm ../dataqa351/a53sinesweep_half.pcm
+diff ../dataqa350/a54sinesweep_half.pcm ../dataqa351/a54sinesweep_half.pcm
+diff ../dataqa350/i30_109speechmusic.pcm ../dataqa351/i30_109speechmusic.pcm
+diff ../dataqa350/i60_109speechmusic.pcm ../dataqa351/i60_109speechmusic.pcm
+diff ../dataqa350/i30_110speechmusic.pcm ../dataqa351/i30_110speechmusic.pcm
+diff ../dataqa350/i60_110speechmusic.pcm ../dataqa351/i60_110speechmusic.pcm
+diff ../dataqa350/i30_111speechmusic.pcm ../dataqa351/i30_111speechmusic.pcm
+diff ../dataqa350/i60_111speechmusic.pcm ../dataqa351/i60_111speechmusic.pcm
+diff ../dataqa350/i30_112speechmusic.pcm ../dataqa351/i30_112speechmusic.pcm
+diff ../dataqa350/i60_112speechmusic.pcm ../dataqa351/i60_112speechmusic.pcm
+diff ../dataqa350/i30_113speechmusic.pcm ../dataqa351/i30_113speechmusic.pcm
+diff ../dataqa350/i60_113speechmusic.pcm ../dataqa351/i60_113speechmusic.pcm
+diff ../dataqa350/i30_114speechmusic.pcm ../dataqa351/i30_114speechmusic.pcm
+diff ../dataqa350/i60_114speechmusic.pcm ../dataqa351/i60_114speechmusic.pcm
+diff ../dataqa350/a55speechmusic.pcm ../dataqa351/a55speechmusic.pcm
+diff ../dataqa350/a56speechmusic.pcm ../dataqa351/a56speechmusic.pcm
+diff ../dataqa350/a57speechmusic.pcm ../dataqa351/a57speechmusic.pcm
+diff ../dataqa350/i30_115speechmusic_nb.pcm ../dataqa351/i30_115speechmusic_nb.pcm
+diff ../dataqa350/i60_115speechmusic_nb.pcm ../dataqa351/i60_115speechmusic_nb.pcm
+diff ../dataqa350/i30_116speechmusic_nb.pcm ../dataqa351/i30_116speechmusic_nb.pcm
+diff ../dataqa350/i60_116speechmusic_nb.pcm ../dataqa351/i60_116speechmusic_nb.pcm
+diff ../dataqa350/i30_117speechmusic_nb.pcm ../dataqa351/i30_117speechmusic_nb.pcm
+diff ../dataqa350/i60_117speechmusic_nb.pcm ../dataqa351/i60_117speechmusic_nb.pcm
+diff ../dataqa350/i30_118speechmusic_nb.pcm ../dataqa351/i30_118speechmusic_nb.pcm
+diff ../dataqa350/i60_118speechmusic_nb.pcm ../dataqa351/i60_118speechmusic_nb.pcm
+diff ../dataqa350/i30_119speechmusic_nb.pcm ../dataqa351/i30_119speechmusic_nb.pcm
+diff ../dataqa350/i60_119speechmusic_nb.pcm ../dataqa351/i60_119speechmusic_nb.pcm
+diff ../dataqa350/i30_120speechmusic_nb.pcm ../dataqa351/i30_120speechmusic_nb.pcm
+diff ../dataqa350/i60_120speechmusic_nb.pcm ../dataqa351/i60_120speechmusic_nb.pcm
+diff ../dataqa350/a58speechmusic_nb.pcm ../dataqa351/a58speechmusic_nb.pcm
+diff ../dataqa350/a59speechmusic_nb.pcm ../dataqa351/a59speechmusic_nb.pcm
+diff ../dataqa350/a60speechmusic_nb.pcm ../dataqa351/a60speechmusic_nb.pcm
+diff ../dataqa350/i30_121speechoffice0dB.pcm ../dataqa351/i30_121speechoffice0dB.pcm
+diff ../dataqa350/i60_121speechoffice0dB.pcm ../dataqa351/i60_121speechoffice0dB.pcm
+diff ../dataqa350/i30_122speechoffice0dB.pcm ../dataqa351/i30_122speechoffice0dB.pcm
+diff ../dataqa350/i60_122speechoffice0dB.pcm ../dataqa351/i60_122speechoffice0dB.pcm
+diff ../dataqa350/i30_123speechoffice0dB.pcm ../dataqa351/i30_123speechoffice0dB.pcm
+diff ../dataqa350/i60_123speechoffice0dB.pcm ../dataqa351/i60_123speechoffice0dB.pcm
+diff ../dataqa350/i30_124speechoffice0dB.pcm ../dataqa351/i30_124speechoffice0dB.pcm
+diff ../dataqa350/i60_124speechoffice0dB.pcm ../dataqa351/i60_124speechoffice0dB.pcm
+diff ../dataqa350/i30_125speechoffice0dB.pcm ../dataqa351/i30_125speechoffice0dB.pcm
+diff ../dataqa350/i60_125speechoffice0dB.pcm ../dataqa351/i60_125speechoffice0dB.pcm
+diff ../dataqa350/i30_126speechoffice0dB.pcm ../dataqa351/i30_126speechoffice0dB.pcm
+diff ../dataqa350/i60_126speechoffice0dB.pcm ../dataqa351/i60_126speechoffice0dB.pcm
+diff ../dataqa350/a61speechoffice0dB.pcm ../dataqa351/a61speechoffice0dB.pcm
+diff ../dataqa350/a62speechoffice0dB.pcm ../dataqa351/a62speechoffice0dB.pcm
+diff ../dataqa350/a63speechoffice0dB.pcm ../dataqa351/a63speechoffice0dB.pcm
+diff ../dataqa350/i30_127speech_and_misc_NB.pcm ../dataqa351/i30_127speech_and_misc_NB.pcm
+diff ../dataqa350/i60_127speech_and_misc_NB.pcm ../dataqa351/i60_127speech_and_misc_NB.pcm
+diff ../dataqa350/i30_128speech_and_misc_NB.pcm ../dataqa351/i30_128speech_and_misc_NB.pcm
+diff ../dataqa350/i60_128speech_and_misc_NB.pcm ../dataqa351/i60_128speech_and_misc_NB.pcm
+diff ../dataqa350/i30_129speech_and_misc_NB.pcm ../dataqa351/i30_129speech_and_misc_NB.pcm
+diff ../dataqa350/i60_129speech_and_misc_NB.pcm ../dataqa351/i60_129speech_and_misc_NB.pcm
+diff ../dataqa350/i30_130speech_and_misc_NB.pcm ../dataqa351/i30_130speech_and_misc_NB.pcm
+diff ../dataqa350/i60_130speech_and_misc_NB.pcm ../dataqa351/i60_130speech_and_misc_NB.pcm
+diff ../dataqa350/i30_131speech_and_misc_NB.pcm ../dataqa351/i30_131speech_and_misc_NB.pcm
+diff ../dataqa350/i60_131speech_and_misc_NB.pcm ../dataqa351/i60_131speech_and_misc_NB.pcm
+diff ../dataqa350/i30_132speech_and_misc_NB.pcm ../dataqa351/i30_132speech_and_misc_NB.pcm
+diff ../dataqa350/i60_132speech_and_misc_NB.pcm ../dataqa351/i60_132speech_and_misc_NB.pcm
+diff ../dataqa350/a64speech_and_misc_NB.pcm ../dataqa351/a64speech_and_misc_NB.pcm
+diff ../dataqa350/a65speech_and_misc_NB.pcm ../dataqa351/a65speech_and_misc_NB.pcm
+diff ../dataqa350/a66speech_and_misc_NB.pcm ../dataqa351/a66speech_and_misc_NB.pcm
+diff ../dataqa350/i30_133speech_and_misc_WB.pcm ../dataqa351/i30_133speech_and_misc_WB.pcm
+diff ../dataqa350/i60_133speech_and_misc_WB.pcm ../dataqa351/i60_133speech_and_misc_WB.pcm
+diff ../dataqa350/i30_134speech_and_misc_WB.pcm ../dataqa351/i30_134speech_and_misc_WB.pcm
+diff ../dataqa350/i60_134speech_and_misc_WB.pcm ../dataqa351/i60_134speech_and_misc_WB.pcm
+diff ../dataqa350/i30_135speech_and_misc_WB.pcm ../dataqa351/i30_135speech_and_misc_WB.pcm
+diff ../dataqa350/i60_135speech_and_misc_WB.pcm ../dataqa351/i60_135speech_and_misc_WB.pcm
+diff ../dataqa350/i30_136speech_and_misc_WB.pcm ../dataqa351/i30_136speech_and_misc_WB.pcm
+diff ../dataqa350/i60_136speech_and_misc_WB.pcm ../dataqa351/i60_136speech_and_misc_WB.pcm
+diff ../dataqa350/i30_137speech_and_misc_WB.pcm ../dataqa351/i30_137speech_and_misc_WB.pcm
+diff ../dataqa350/i60_137speech_and_misc_WB.pcm ../dataqa351/i60_137speech_and_misc_WB.pcm
+diff ../dataqa350/i30_138speech_and_misc_WB.pcm ../dataqa351/i30_138speech_and_misc_WB.pcm
+diff ../dataqa350/i60_138speech_and_misc_WB.pcm ../dataqa351/i60_138speech_and_misc_WB.pcm
+diff ../dataqa350/a67speech_and_misc_WB.pcm ../dataqa351/a67speech_and_misc_WB.pcm
+diff ../dataqa350/a68speech_and_misc_WB.pcm ../dataqa351/a68speech_and_misc_WB.pcm
+diff ../dataqa350/a69speech_and_misc_WB.pcm ../dataqa351/a69speech_and_misc_WB.pcm
+diff ../dataqa350/i30_139testM4.pcm ../dataqa351/i30_139testM4.pcm
+diff ../dataqa350/i60_139testM4.pcm ../dataqa351/i60_139testM4.pcm
+diff ../dataqa350/i30_140testM4.pcm ../dataqa351/i30_140testM4.pcm
+diff ../dataqa350/i60_140testM4.pcm ../dataqa351/i60_140testM4.pcm
+diff ../dataqa350/i30_141testM4.pcm ../dataqa351/i30_141testM4.pcm
+diff ../dataqa350/i60_141testM4.pcm ../dataqa351/i60_141testM4.pcm
+diff ../dataqa350/i30_142testM4.pcm ../dataqa351/i30_142testM4.pcm
+diff ../dataqa350/i60_142testM4.pcm ../dataqa351/i60_142testM4.pcm
+diff ../dataqa350/i30_143testM4.pcm ../dataqa351/i30_143testM4.pcm
+diff ../dataqa350/i60_143testM4.pcm ../dataqa351/i60_143testM4.pcm
+diff ../dataqa350/i30_144testM4.pcm ../dataqa351/i30_144testM4.pcm
+diff ../dataqa350/i60_144testM4.pcm ../dataqa351/i60_144testM4.pcm
+diff ../dataqa350/a70testM4.pcm ../dataqa351/a70testM4.pcm
+diff ../dataqa350/a71testM4.pcm ../dataqa351/a71testM4.pcm
+diff ../dataqa350/a72testM4.pcm ../dataqa351/a72testM4.pcm
+diff ../dataqa350/i30_145testM4D_rev.pcm ../dataqa351/i30_145testM4D_rev.pcm
+diff ../dataqa350/i60_145testM4D_rev.pcm ../dataqa351/i60_145testM4D_rev.pcm
+diff ../dataqa350/i30_146testM4D_rev.pcm ../dataqa351/i30_146testM4D_rev.pcm
+diff ../dataqa350/i60_146testM4D_rev.pcm ../dataqa351/i60_146testM4D_rev.pcm
+diff ../dataqa350/i30_147testM4D_rev.pcm ../dataqa351/i30_147testM4D_rev.pcm
+diff ../dataqa350/i60_147testM4D_rev.pcm ../dataqa351/i60_147testM4D_rev.pcm
+diff ../dataqa350/i30_148testM4D_rev.pcm ../dataqa351/i30_148testM4D_rev.pcm
+diff ../dataqa350/i60_148testM4D_rev.pcm ../dataqa351/i60_148testM4D_rev.pcm
+diff ../dataqa350/i30_149testM4D_rev.pcm ../dataqa351/i30_149testM4D_rev.pcm
+diff ../dataqa350/i60_149testM4D_rev.pcm ../dataqa351/i60_149testM4D_rev.pcm
+diff ../dataqa350/i30_150testM4D_rev.pcm ../dataqa351/i30_150testM4D_rev.pcm
+diff ../dataqa350/i60_150testM4D_rev.pcm ../dataqa351/i60_150testM4D_rev.pcm
+diff ../dataqa350/a73testM4D_rev.pcm ../dataqa351/a73testM4D_rev.pcm
+diff ../dataqa350/a74testM4D_rev.pcm ../dataqa351/a74testM4D_rev.pcm
+diff ../dataqa350/a75testM4D_rev.pcm ../dataqa351/a75testM4D_rev.pcm
+diff ../dataqa350/i30_151testM4D.pcm ../dataqa351/i30_151testM4D.pcm
+diff ../dataqa350/i60_151testM4D.pcm ../dataqa351/i60_151testM4D.pcm
+diff ../dataqa350/i30_152testM4D.pcm ../dataqa351/i30_152testM4D.pcm
+diff ../dataqa350/i60_152testM4D.pcm ../dataqa351/i60_152testM4D.pcm
+diff ../dataqa350/i30_153testM4D.pcm ../dataqa351/i30_153testM4D.pcm
+diff ../dataqa350/i60_153testM4D.pcm ../dataqa351/i60_153testM4D.pcm
+diff ../dataqa350/i30_154testM4D.pcm ../dataqa351/i30_154testM4D.pcm
+diff ../dataqa350/i60_154testM4D.pcm ../dataqa351/i60_154testM4D.pcm
+diff ../dataqa350/i30_155testM4D.pcm ../dataqa351/i30_155testM4D.pcm
+diff ../dataqa350/i60_155testM4D.pcm ../dataqa351/i60_155testM4D.pcm
+diff ../dataqa350/i30_156testM4D.pcm ../dataqa351/i30_156testM4D.pcm
+diff ../dataqa350/i60_156testM4D.pcm ../dataqa351/i60_156testM4D.pcm
+diff ../dataqa350/a76testM4D.pcm ../dataqa351/a76testM4D.pcm
+diff ../dataqa350/a77testM4D.pcm ../dataqa351/a77testM4D.pcm
+diff ../dataqa350/a78testM4D.pcm ../dataqa351/a78testM4D.pcm
+diff ../dataqa350/i30_157testfile.pcm ../dataqa351/i30_157testfile.pcm
+diff ../dataqa350/i60_157testfile.pcm ../dataqa351/i60_157testfile.pcm
+diff ../dataqa350/i30_158testfile.pcm ../dataqa351/i30_158testfile.pcm
+diff ../dataqa350/i60_158testfile.pcm ../dataqa351/i60_158testfile.pcm
+diff ../dataqa350/i30_159testfile.pcm ../dataqa351/i30_159testfile.pcm
+diff ../dataqa350/i60_159testfile.pcm ../dataqa351/i60_159testfile.pcm
+diff ../dataqa350/i30_160testfile.pcm ../dataqa351/i30_160testfile.pcm
+diff ../dataqa350/i60_160testfile.pcm ../dataqa351/i60_160testfile.pcm
+diff ../dataqa350/i30_161testfile.pcm ../dataqa351/i30_161testfile.pcm
+diff ../dataqa350/i60_161testfile.pcm ../dataqa351/i60_161testfile.pcm
+diff ../dataqa350/i30_162testfile.pcm ../dataqa351/i30_162testfile.pcm
+diff ../dataqa350/i60_162testfile.pcm ../dataqa351/i60_162testfile.pcm
+diff ../dataqa350/a79testfile.pcm ../dataqa351/a79testfile.pcm
+diff ../dataqa350/a80testfile.pcm ../dataqa351/a80testfile.pcm
+diff ../dataqa350/a81testfile.pcm ../dataqa351/a81testfile.pcm
+diff ../dataqa350/i30_163tone_cisco.pcm ../dataqa351/i30_163tone_cisco.pcm
+diff ../dataqa350/i60_163tone_cisco.pcm ../dataqa351/i60_163tone_cisco.pcm
+diff ../dataqa350/i30_164tone_cisco.pcm ../dataqa351/i30_164tone_cisco.pcm
+diff ../dataqa350/i60_164tone_cisco.pcm ../dataqa351/i60_164tone_cisco.pcm
+diff ../dataqa350/i30_165tone_cisco.pcm ../dataqa351/i30_165tone_cisco.pcm
+diff ../dataqa350/i60_165tone_cisco.pcm ../dataqa351/i60_165tone_cisco.pcm
+diff ../dataqa350/i30_166tone_cisco.pcm ../dataqa351/i30_166tone_cisco.pcm
+diff ../dataqa350/i60_166tone_cisco.pcm ../dataqa351/i60_166tone_cisco.pcm
+diff ../dataqa350/i30_167tone_cisco.pcm ../dataqa351/i30_167tone_cisco.pcm
+diff ../dataqa350/i60_167tone_cisco.pcm ../dataqa351/i60_167tone_cisco.pcm
+diff ../dataqa350/i30_168tone_cisco.pcm ../dataqa351/i30_168tone_cisco.pcm
+diff ../dataqa350/i60_168tone_cisco.pcm ../dataqa351/i60_168tone_cisco.pcm
+diff ../dataqa350/a82tone_cisco.pcm ../dataqa351/a82tone_cisco.pcm
+diff ../dataqa350/a83tone_cisco.pcm ../dataqa351/a83tone_cisco.pcm
+diff ../dataqa350/a84tone_cisco.pcm ../dataqa351/a84tone_cisco.pcm
+diff ../dataqa350/i30_169tone_cisco_long.pcm ../dataqa351/i30_169tone_cisco_long.pcm
+diff ../dataqa350/i60_169tone_cisco_long.pcm ../dataqa351/i60_169tone_cisco_long.pcm
+diff ../dataqa350/i30_170tone_cisco_long.pcm ../dataqa351/i30_170tone_cisco_long.pcm
+diff ../dataqa350/i60_170tone_cisco_long.pcm ../dataqa351/i60_170tone_cisco_long.pcm
+diff ../dataqa350/i30_171tone_cisco_long.pcm ../dataqa351/i30_171tone_cisco_long.pcm
+diff ../dataqa350/i60_171tone_cisco_long.pcm ../dataqa351/i60_171tone_cisco_long.pcm
+diff ../dataqa350/i30_172tone_cisco_long.pcm ../dataqa351/i30_172tone_cisco_long.pcm
+diff ../dataqa350/i60_172tone_cisco_long.pcm ../dataqa351/i60_172tone_cisco_long.pcm
+diff ../dataqa350/i30_173tone_cisco_long.pcm ../dataqa351/i30_173tone_cisco_long.pcm
+diff ../dataqa350/i60_173tone_cisco_long.pcm ../dataqa351/i60_173tone_cisco_long.pcm
+diff ../dataqa350/i30_174tone_cisco_long.pcm ../dataqa351/i30_174tone_cisco_long.pcm
+diff ../dataqa350/i60_174tone_cisco_long.pcm ../dataqa351/i60_174tone_cisco_long.pcm
+diff ../dataqa350/a85tone_cisco_long.pcm ../dataqa351/a85tone_cisco_long.pcm
+diff ../dataqa350/a86tone_cisco_long.pcm ../dataqa351/a86tone_cisco_long.pcm
+diff ../dataqa350/a87tone_cisco_long.pcm ../dataqa351/a87tone_cisco_long.pcm
+diff ../dataqa350/i30_175wb_contspeech.pcm ../dataqa351/i30_175wb_contspeech.pcm
+diff ../dataqa350/i60_175wb_contspeech.pcm ../dataqa351/i60_175wb_contspeech.pcm
+diff ../dataqa350/i30_176wb_contspeech.pcm ../dataqa351/i30_176wb_contspeech.pcm
+diff ../dataqa350/i60_176wb_contspeech.pcm ../dataqa351/i60_176wb_contspeech.pcm
+diff ../dataqa350/i30_177wb_contspeech.pcm ../dataqa351/i30_177wb_contspeech.pcm
+diff ../dataqa350/i60_177wb_contspeech.pcm ../dataqa351/i60_177wb_contspeech.pcm
+diff ../dataqa350/i30_178wb_contspeech.pcm ../dataqa351/i30_178wb_contspeech.pcm
+diff ../dataqa350/i60_178wb_contspeech.pcm ../dataqa351/i60_178wb_contspeech.pcm
+diff ../dataqa350/i30_179wb_contspeech.pcm ../dataqa351/i30_179wb_contspeech.pcm
+diff ../dataqa350/i60_179wb_contspeech.pcm ../dataqa351/i60_179wb_contspeech.pcm
+diff ../dataqa350/i30_180wb_contspeech.pcm ../dataqa351/i30_180wb_contspeech.pcm
+diff ../dataqa350/i60_180wb_contspeech.pcm ../dataqa351/i60_180wb_contspeech.pcm
+diff ../dataqa350/a88wb_contspeech.pcm ../dataqa351/a88wb_contspeech.pcm
+diff ../dataqa350/a89wb_contspeech.pcm ../dataqa351/a89wb_contspeech.pcm
+diff ../dataqa350/a90wb_contspeech.pcm ../dataqa351/a90wb_contspeech.pcm
+diff ../dataqa350/i30_181wb_speech_office25db.pcm ../dataqa351/i30_181wb_speech_office25db.pcm
+diff ../dataqa350/i60_181wb_speech_office25db.pcm ../dataqa351/i60_181wb_speech_office25db.pcm
+diff ../dataqa350/i30_182wb_speech_office25db.pcm ../dataqa351/i30_182wb_speech_office25db.pcm
+diff ../dataqa350/i60_182wb_speech_office25db.pcm ../dataqa351/i60_182wb_speech_office25db.pcm
+diff ../dataqa350/i30_183wb_speech_office25db.pcm ../dataqa351/i30_183wb_speech_office25db.pcm
+diff ../dataqa350/i60_183wb_speech_office25db.pcm ../dataqa351/i60_183wb_speech_office25db.pcm
+diff ../dataqa350/i30_184wb_speech_office25db.pcm ../dataqa351/i30_184wb_speech_office25db.pcm
+diff ../dataqa350/i60_184wb_speech_office25db.pcm ../dataqa351/i60_184wb_speech_office25db.pcm
+diff ../dataqa350/i30_185wb_speech_office25db.pcm ../dataqa351/i30_185wb_speech_office25db.pcm
+diff ../dataqa350/i60_185wb_speech_office25db.pcm ../dataqa351/i60_185wb_speech_office25db.pcm
+diff ../dataqa350/i30_186wb_speech_office25db.pcm ../dataqa351/i30_186wb_speech_office25db.pcm
+diff ../dataqa350/i60_186wb_speech_office25db.pcm ../dataqa351/i60_186wb_speech_office25db.pcm
+diff ../dataqa350/a91wb_speech_office25db.pcm ../dataqa351/a91wb_speech_office25db.pcm
+diff ../dataqa350/a92wb_speech_office25db.pcm ../dataqa351/a92wb_speech_office25db.pcm
+diff ../dataqa350/a93wb_speech_office25db.pcm ../dataqa351/a93wb_speech_office25db.pcm
+diff ../dataqa350/a30_1DTMF_16kHz_short.pcm ../dataqa351/a30_1DTMF_16kHz_short.pcm
+diff ../dataqa350/a60_1DTMF_16kHz_short.pcm ../dataqa351/a60_1DTMF_16kHz_short.pcm
+diff ../dataqa350/a30_2ltest_speech_noisy.pcm ../dataqa351/a30_2ltest_speech_noisy.pcm
+diff ../dataqa350/a60_2ltest_speech_noisy.pcm ../dataqa351/a60_2ltest_speech_noisy.pcm
+diff ../dataqa350/a30_3misc2.pcm ../dataqa351/a30_3misc2.pcm
+diff ../dataqa350/a60_3misc2.pcm ../dataqa351/a60_3misc2.pcm
+diff ../dataqa350/a30_4sinesweep.pcm ../dataqa351/a30_4sinesweep.pcm
+diff ../dataqa350/a60_4sinesweep.pcm ../dataqa351/a60_4sinesweep.pcm
+diff ../dataqa350/a30_5speechmusic.pcm ../dataqa351/a30_5speechmusic.pcm
+diff ../dataqa350/a60_5speechmusic.pcm ../dataqa351/a60_5speechmusic.pcm
+diff ../dataqa350/a30_6tone_cisco.pcm ../dataqa351/a30_6tone_cisco.pcm
+diff ../dataqa350/a60_6tone_cisco.pcm ../dataqa351/a60_6tone_cisco.pcm
+diff ../dataqa350/a60_7tone_cisco.pcm ../dataqa351/a60_7tone_cisco.pcm
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSACPLC.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSACPLC.txt
new file mode 100644
index 0000000..9e3629b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/diffiSACPLC.txt
@@ -0,0 +1,20 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logplc.txt
+echo "START PLC TEST" > $LOGFILE
+
+OUTDIR1=../dataqaplc_0
+OUTDIR2=../dataqaplc_1
+
+diff $OUTDIR1/outplc1.pcm $OUTDIR2/outplc1.pcm 
+diff $OUTDIR1/outplc2.pcm $OUTDIR2/outplc2.pcm 
+diff $OUTDIR1/outplc3.pcm $OUTDIR2/outplc3.pcm 
+diff $OUTDIR1/outplc4.pcm $OUTDIR2/outplc4.pcm 
+diff $OUTDIR1/outplc5.pcm $OUTDIR2/outplc5.pcm 
+diff $OUTDIR1/outplc6.pcm $OUTDIR2/outplc6.pcm 
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACLongtest.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACLongtest.txt
new file mode 100644
index 0000000..eeffc0c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACLongtest.txt
@@ -0,0 +1,61 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logNormal.txt
+echo "START ISAC TEST" > $LOGFILE
+echo >> $LOGFILE
+
+ISAC=../Release/kenny.exe
+ISACFIXFLOAT=../Release/testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqa
+mkdir -p $OUTDIR
+
+TARGETRATE=(10000 15000 20000 25000 30000 32000)
+#echo ${CHANNELFILES[1]}
+
+index1=0
+index2=0
+
+for file in $INFILES # loop over all input files
+  do
+  
+  for rate in ${TARGETRATE[*]}
+	do
+	let "index1=index1+1"
+	$ISAC -I $rate -FL 30 $INDIR/"$file" $OUTDIR/i30_$index1"$file" >> $LOGFILE
+	$ISAC -I $rate -FL 60 $INDIR/"$file" $OUTDIR/i60_$index1"$file" >> $LOGFILE
+  done
+  for channel in $CHANNELFILES
+	do
+	let "index2=index2+1"
+	$ISAC $INDIR/$channel $INDIR/"$file" $OUTDIR/a$index2"$file" >> $LOGFILE
+  done
+
+done
+
+index1=0
+
+for file in $SUBSET # loop over the subset of input files
+  do
+	let "index1=index1+1"
+	$ISAC $INDIR/${CHANNELLIST[0]} -FL 30 -FIXED_FL $INDIR/"$file" $OUTDIR/a30_$index1"$file" >> $LOGFILE
+	$ISAC $INDIR/${CHANNELLIST[0]} -FL 60 -FIXED_FL $INDIR/"$file" $OUTDIR/a60_$index1"$file" >> $LOGFILE
+done
+
+let "index1=index1+1"	
+ $ISAC $INDIR/${CHANNELLIST[0]} -INITRATE 25000 -FL 30 $INDIR/"$file" $OUTDIR/a60_$index1"$file" >> $LOGFILE
+
+# Run fault test
+
+#./runiSACfault.txt
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACNB.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACNB.txt
new file mode 100644
index 0000000..605595c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACNB.txt
@@ -0,0 +1,45 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logNB.txt
+echo "START NARROWBAND TEST" > $LOGFILE
+echo >> $LOGFILE
+
+ISAC=../Release/kenny.exe
+ISACFIXFLOAT=../Release/testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqaNB
+mkdir -p $OUTDIR
+
+TARGETRATE=(10000 15000 20000 25000 30000 32000)
+#echo ${CHANNELFILES[1]}
+
+index1=0
+index2=0
+
+# Narrowband Interfaces
+
+for file in $SUBSET # loop over all input files
+  do
+  for rate in ${TARGETRATE[*]}
+	do
+	let "index1=index1+1"
+ 	$ISAC $rate -FL 30 -NB 1 $INDIR/"$file" $OUTDIR/nb130_$index1"$file" >> $LOGFILE
+	$ISAC $rate -FL 60 -NB 1 $INDIR/"$file" $OUTDIR/nb160_$index1"$file" >> $LOGFILE
+	$ISAC $rate -FL 30 -NB 2 $INDIR/"$file" $OUTDIR/nb230_$index1"$file" >> $LOGFILE
+	$ISAC $rate -FL 60 -NB 2 $INDIR/"$file" $OUTDIR/nb260_$index1"$file" >> $LOGFILE
+	$ISAC $rate -FL 30 -NB 2 -PL 10 $INDIR/"$file" $OUTDIR/nb2plc30_$index1"$file" >> $LOGFILE
+	$ISAC $rate -FL 60 -NB 2 -PL 10 $INDIR/"$file" $OUTDIR/nb2plc60_$index1"$file" >> $LOGFILE
+  done
+
+done
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACPLC.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACPLC.txt
new file mode 100644
index 0000000..6bee6f7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACPLC.txt
@@ -0,0 +1,23 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logplc.txt
+echo "START PLC TEST" > $LOGFILE
+
+ISAC=../Release/kenny.exe
+
+INDIR=../data/orig
+OUTDIR=../dataqaplc_0
+mkdir -p $OUTDIR
+
+$ISAC 12000 -PL 15 $INDIR/speechmusic.pcm $OUTDIR/outplc1.pcm 
+$ISAC 20000 -PL 15 $INDIR/speechmusic.pcm $OUTDIR/outplc2.pcm 
+$ISAC 32000 -PL 15 $INDIR/speechmusic.pcm $OUTDIR/outplc3.pcm 
+$ISAC 12000 -PL 15 $INDIR/tone_cisco.pcm $OUTDIR/outplc4.pcm 
+$ISAC 20000 -PL 15 $INDIR/tone_cisco.pcm $OUTDIR/outplc5.pcm 
+$ISAC 32000 -PL 15 $INDIR/tone_cisco.pcm $OUTDIR/outplc6.pcm 
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACRate.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACRate.txt
new file mode 100644
index 0000000..d8403e0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACRate.txt
@@ -0,0 +1,23 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGG=loggRate.txt
+OUTDIR=../dataqaRate
+mkdir -p $OUTDIR
+
+../Release/kenny.exe 13000 -FIXED_FL -FL 30 -MAX 100 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_1.pcm > $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 30 -MAXRATE 32000 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_2.pcm >> $LOGG
+../Release/kenny.exe 13000 -FIXED_FL -FL 30 -MAX 100 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_3.pcm >> $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 30 -MAXRATE 32000 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_4.pcm >> $LOGG
+../Release/kenny.exe 13000 -FIXED_FL -FL 60 -MAX 100 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_5.pcm >> $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 60 -MAXRATE 32000 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_6.pcm >> $LOGG
+../Release/kenny.exe 13000 -INIT_RATE 32000 -FIXED_FL -FL 60 -MAX 100 ../data/orig/sawsweep_380_60.pcm $OUTDIR/out_napi_7.pcm >> $LOGG
+
+../Release/kenny.exe 13000 -FIXED_FL -FL 30 -MAX 100 ../data/orig/longspeech.pcm $OUTDIR/out_napi_11.pcm >> $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 30 -MAXRATE 32000 ../data/orig/longspeech.pcm $OUTDIR/out_napi_12.pcm >> $LOGG
+../Release/kenny.exe 13000 -FIXED_FL -FL 30 -MAX 100 ../data/orig/longspeech.pcm $OUTDIR/out_napi_13.pcm >> $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 30 -MAXRATE 32000 ../data/orig/longspeech.pcm $OUTDIR/out_napi_14.pcm >> $LOGG
+../Release/kenny.exe 13000 -FIXED_FL -FL 60 -MAX 100 ../data/orig/longspeech.pcm $OUTDIR/out_napi_15.pcm >> $LOGG
+../Release/kenny.exe ../data/orig/bottlenecks.txt -FIXED_FL -FL 60 -MAXRATE 32000 ../data/orig/longspeech.pcm $OUTDIR/out_napi_16.pcm >> $LOGG
+../Release/kenny.exe 13000 -INIT_RATE 32000 -FIXED_FL -FL 60 -MAX 100 ../data/orig/longspeech.pcm $OUTDIR/out_napi_17.pcm >> $LOGG
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfault.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfault.txt
new file mode 100644
index 0000000..f4d9478
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfault.txt
@@ -0,0 +1,40 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logfault.txt
+echo "START FAULT TEST" > $LOGFILE
+
+ISAC=../Release/kenny.exe
+ISACFIXFLOAT=../Release/testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqaft
+mkdir -p $OUTDIR
+
+TARGETRATE=(10000 15000 20000 25000 30000 32000)
+FAULTTEST=(1 2 3 4 5 6 7 9)
+
+index1=0
+
+file=wb_contspeech.pcm
+
+# Fault test
+for testnr in ${FAULTTEST[*]}
+   do
+	$ISAC 32000 -F $testnr $INDIR/"$file" $OUTDIR/ft$testnr"$file" >> $LOGFILE
+done
+
+# Fault test number 10, error in bitstream
+ $ISAC 32000 -F 10 $INDIR/"$file" $OUTDIR/ft10_"$file" >> $LOGFILE
+ $ISAC 32000 -F 10 -PL 10 $INDIR/"$file" $OUTDIR/ft10plc_"$file" >> $LOGFILE
+ $ISAC 32000 -F 10 -NB 1 $INDIR/"$file" $OUTDIR/ft10nb1_"$file" >> $LOGFILE
+ $ISAC 32000 -F 10 -NB 2 -PL 10 $INDIR/"$file" $OUTDIR/ft10nb2_"$file" >> $LOGFILE
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfixfloat.txt b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfixfloat.txt
new file mode 100644
index 0000000..c9e02df
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/QA/runiSACfixfloat.txt
@@ -0,0 +1,47 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logfxfl.txt
+echo "START FIX-FLOAT TEST" > $LOGFILE
+
+
+ISACFIXFLOAT=../testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqafxfl
+mkdir -p $OUTDIR
+
+index1=0
+
+for file in $INFILES # loop over all input files
+  do
+  
+  for channel in $CHANNELFILES
+	do
+	let "index1=index1+1"
+
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -PLC $INDIR/"$file" $OUTDIR/flfx$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -PLC $INDIR/"$file" $OUTDIR/fxfl$index1"$file" >> $LOGFILE
+  done
+
+done
+
+index1=0
+
+for file in $SUBSET # loop over the subset of input files
+  do
+	let "index1=index1+1"
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 1 $INDIR/"$file" $OUTDIR/flfxnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 1 $INDIR/"$file" $OUTDIR/fxflnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 2 -PLC $INDIR/"$file" $OUTDIR/flfxnb2_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 2 -PLC $INDIR/"$file" $OUTDIR/fxflnb2_$index1"$file" >> $LOGFILE
+done
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/kenny.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/kenny.c
new file mode 100644
index 0000000..1b9c44c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/kenny.c
@@ -0,0 +1,847 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* kenny.c  - Main function for the iSAC coder */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <ctype.h>
+
+#include "isacfix.h"
+
+
+/* Defines */
+#define SEED_FILE "randseed.txt"  /* Used when running decoder on garbage data */
+#define MAX_FRAMESAMPLES    960   /* max number of samples per frame (= 60 ms frame) */
+#define FRAMESAMPLES_10ms 160   /* number of samples per 10ms frame */
+#define FS           16000 /* sampling frequency (Hz) */
+
+/* Function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+
+  short k, rlen, status = 0;
+
+  rlen = fread(data, sizeof(WebRtc_Word16), length, inp);
+  if (rlen < length) {
+    for (k = rlen; k < length; k++)
+      data[k] = 0;
+    status = 1;
+  }
+
+  return status;
+}
+
+/* Struct for bottleneck model */
+typedef struct {
+  WebRtc_UWord32 send_time;            /* samples */
+  WebRtc_UWord32 arrival_time;         /* samples */
+  WebRtc_UWord32 sample_count;         /* samples */
+  WebRtc_UWord16 rtp_number;
+} BottleNeckModel;
+
+void get_arrival_time(int current_framesamples,   /* samples */
+                      int packet_size,            /* bytes */
+                      int bottleneck,             /* excluding headers; bits/s */
+                      BottleNeckModel *BN_data)
+{
+  const int HeaderSize = 35;
+  int HeaderRate;
+
+  HeaderRate = HeaderSize * 8 * FS / current_framesamples;     /* bits/s */
+
+  /* everything in samples */
+  BN_data->sample_count = BN_data->sample_count + current_framesamples;
+
+  BN_data->arrival_time += ((packet_size + HeaderSize) * 8 * FS) / (bottleneck + HeaderRate);
+  BN_data->send_time += current_framesamples;
+
+  if (BN_data->arrival_time < BN_data->sample_count)
+    BN_data->arrival_time = BN_data->sample_count;
+
+  BN_data->rtp_number++;
+}
+
+void get_arrival_time2(int current_framesamples,
+                       int current_delay,
+                       BottleNeckModel *BN_data)
+{
+  if (current_delay == -1)
+    //dropped packet
+  {
+    BN_data->arrival_time += current_framesamples;
+  }
+  else if (current_delay != -2)
+  {
+    //
+    BN_data->arrival_time += (current_framesamples + ((FS/1000) * current_delay));
+  }
+  //else
+  //current packet has same timestamp as previous packet
+
+  BN_data->rtp_number++;
+}
+
+int main(int argc, char* argv[])
+{
+
+  char inname[100], outname[100],  outbitsname[100], bottleneck_file[100];
+  FILE *inp, *outp, *f_bn, *outbits;
+  int endfile;
+
+  int i, errtype, h = 0, k, packetLossPercent = 0;
+  WebRtc_Word16 CodingMode;
+  WebRtc_Word16 bottleneck;
+  WebRtc_Word16 framesize = 30;           /* ms */
+  int cur_framesmpls, err = 0, lostPackets = 0;
+
+  /* Runtime statistics */
+  double starttime, runtime, length_file;
+
+  WebRtc_Word16 stream_len = 0;
+  WebRtc_Word16 framecnt, declen = 0;
+  WebRtc_Word16 shortdata[FRAMESAMPLES_10ms];
+  WebRtc_Word16 decoded[MAX_FRAMESAMPLES];
+  WebRtc_UWord16 streamdata[500];
+  WebRtc_Word16 speechType[1];
+  WebRtc_Word16 prevFrameSize = 1;
+  WebRtc_Word16 rateBPS = 0;
+  WebRtc_Word16 fixedFL = 0;
+  WebRtc_Word16 payloadSize = 0;
+  WebRtc_Word32 payloadRate = 0;
+  int setControlBWE = 0;
+  int readLoss;
+  FILE  *plFile = NULL;
+
+  char version_number[20];
+  char tmpBit[5] = ".bit";
+
+  double kbps;
+  int totalbits =0;
+  int totalsmpls =0;
+#ifdef _DEBUG
+  FILE *fy;
+#endif
+  WebRtc_Word16 testNum, testCE;
+
+  FILE *fp_gns = NULL;
+  int gns = 0;
+  int cur_delay = 0;
+  char gns_file[100];
+
+  int nbTest = 0;
+  WebRtc_Word16 lostFrame;
+  float scale = (float)0.7;
+  /* only one structure used for ISAC encoder */
+  ISACFIX_MainStruct *ISAC_main_inst;
+
+  /* For fault test 10, garbage data */
+  FILE *seedfile;
+  unsigned int random_seed = (unsigned int) time(NULL);//1196764538
+
+  BottleNeckModel       BN_data;
+  f_bn  = NULL;
+
+#ifdef _DEBUG
+  fy = fopen("bit_rate.dat", "w");
+  fclose(fy);
+  fy = fopen("bytes_frames.dat", "w");
+  fclose(fy);
+#endif
+
+  readLoss = 0;
+  packetLossPercent = 0;
+
+  /* Handling wrong input arguments in the command line */
+  if ((argc<3) || (argc>21))  {
+    printf("\n\nWrong number of arguments or flag values.\n\n");
+
+    printf("\n");
+    WebRtcIsacfix_version(version_number);
+    printf("iSAC version %s \n\n", version_number);
+
+    printf("Usage:\n\n");
+    printf("./kenny.exe [-F num][-I] bottleneck_value infile outfile \n\n");
+    printf("with:\n");
+    printf("[-I]             :if -I option is specified, the coder will use\n");
+    printf("                  an instantaneous Bottleneck value. If not, it\n");
+    printf("                  will be an adaptive Bottleneck value.\n\n");
+    printf("bottleneck_value :the value of the bottleneck provided either\n");
+    printf("                  as a fixed value (e.g. 25000) or\n");
+    printf("                  read from a file (e.g. bottleneck.txt)\n\n");
+    printf("[-INITRATE num]  :Set a new value for initial rate. Note! Only used"
+           " in adaptive mode.\n\n");
+    printf("[-FL num]        :Set (initial) frame length in msec. Valid length"
+           " are 30 and 60 msec.\n\n");
+    printf("[-FIXED_FL]      :Frame length will be fixed to initial value.\n\n");
+    printf("[-MAX num]       :Set the limit for the payload size of iSAC"
+           " in bytes. \n");
+    printf("                  Minimum 100, maximum 400.\n\n");
+    printf("[-MAXRATE num]   :Set the maxrate for iSAC in bits per second. \n");
+    printf("                  Minimum 32000, maximum 53400.\n\n");
+    printf("[-F num]         :if -F option is specified, the test function\n");
+    printf("                  will run the iSAC API fault scenario specified"
+           " by the\n");
+    printf("                  supplied number.\n");
+    printf("                  F 1 - Call encoder prior to init encoder call\n");
+    printf("                  F 2 - Call decoder prior to init decoder call\n");
+    printf("                  F 3 - Call decoder prior to encoder call\n");
+    printf("                  F 4 - Call decoder with a too short coded"
+           " sequence\n");
+    printf("                  F 5 - Call decoder with a too long coded"
+           " sequence\n");
+    printf("                  F 6 - Call decoder with random bit stream\n");
+    printf("                  F 7 - Call init encoder/decoder at random"
+           " during a call\n");
+    printf("                  F 8 - Call encoder/decoder without having"
+           " allocated memory for \n");
+    printf("                        encoder/decoder instance\n");
+    printf("                  F 9 - Call decodeB without calling decodeA\n");
+    printf("                  F 10 - Call decodeB with garbage data\n");
+    printf("[-PL num]       : if -PL option is specified 0<num<100 will "
+           "specify the\n");
+    printf("                  percentage of packet loss\n\n");
+    printf("[-G file]       : if -G option is specified the file given is"
+           " a .gns file\n");
+    printf("                  that represents a network profile\n\n");
+    printf("[-NB num]       : if -NB option, use the narrowband interfaces\n");
+    printf("                  num=1 => encode with narrowband encoder"
+           " (infile is narrowband)\n");
+    printf("                  num=2 => decode with narrowband decoder"
+           " (outfile is narrowband)\n\n");
+    printf("[-CE num]       : Test of APIs used by Conference Engine.\n");
+    printf("                  CE 1 - createInternal, freeInternal,"
+           " getNewBitstream \n");
+    printf("                  CE 2 - transcode, getBWE \n");
+    printf("                  CE 3 - getSendBWE, setSendBWE.  \n\n");
+    printf("[-RTP_INIT num] : if -RTP_INIT option is specified num will be"
+           " the initial\n");
+    printf("                  value of the rtp sequence number.\n\n");
+    printf("infile          : Normal speech input file\n\n");
+    printf("outfile         : Speech output file\n\n");
+    printf("Example usage   : \n\n");
+    printf("./kenny.exe -I bottleneck.txt speechIn.pcm speechOut.pcm\n\n");
+    exit(0);
+
+  }
+
+  /* Print version number */
+  WebRtcIsacfix_version(version_number);
+  printf("iSAC version %s \n\n", version_number);
+
+  /* Loop over all command line arguments */
+  CodingMode = 0;
+  testNum = 0;
+  testCE = 0;
+  for (i = 1; i < argc-2;i++) {
+    /* Instantaneous mode */
+    if (!strcmp ("-I", argv[i])) {
+      printf("\nInstantaneous BottleNeck\n");
+      CodingMode = 1;
+      i++;
+    }
+
+    /* Set (initial) bottleneck value */
+    if (!strcmp ("-INITRATE", argv[i])) {
+      rateBPS = atoi(argv[i + 1]);
+      setControlBWE = 1;
+      if ((rateBPS < 10000) || (rateBPS > 32000)) {
+        printf("\n%d is not a initial rate. "
+               "Valid values are in the range 10000 to 32000.\n", rateBPS);
+        exit(0);
+      }
+      printf("\nNew initial rate: %d\n", rateBPS);
+      i++;
+    }
+
+    /* Set (initial) framelength */
+    if (!strcmp ("-FL", argv[i])) {
+      framesize = atoi(argv[i + 1]);
+      if ((framesize != 30) && (framesize != 60)) {
+        printf("\n%d is not a valid frame length. "
+               "Valid length are 30 and 60 msec.\n", framesize);
+        exit(0);
+      }
+      printf("\nFrame Length: %d\n", framesize);
+      i++;
+    }
+
+    /* Fixed frame length */
+    if (!strcmp ("-FIXED_FL", argv[i])) {
+      fixedFL = 1;
+      setControlBWE = 1;
+    }
+
+    /* Set maximum allowed payload size in bytes */
+    if (!strcmp ("-MAX", argv[i])) {
+      payloadSize = atoi(argv[i + 1]);
+      printf("Maximum Payload Size: %d\n", payloadSize);
+      i++;
+    }
+
+    /* Set maximum rate in bytes */
+    if (!strcmp ("-MAXRATE", argv[i])) {
+      payloadRate = atoi(argv[i + 1]);
+      printf("Maximum Rate in kbps: %d\n", payloadRate);
+      i++;
+    }
+
+    /* Test of fault scenarious */
+    if (!strcmp ("-F", argv[i])) {
+      testNum = atoi(argv[i + 1]);
+      printf("\nFault test: %d\n", testNum);
+      if (testNum < 1 || testNum > 10) {
+        printf("\n%d is not a valid Fault Scenario number."
+               " Valid Fault Scenarios are numbered 1-10.\n", testNum);
+        exit(0);
+      }
+      i++;
+    }
+
+    /* Packet loss test */
+    if (!strcmp ("-PL", argv[i])) {
+      if( isdigit( *argv[i+1] ) ) {
+        packetLossPercent = atoi( argv[i+1] );
+        if( (packetLossPercent < 0) | (packetLossPercent > 100) ) {
+          printf( "\nInvalid packet loss perentage \n" );
+          exit( 0 );
+        }
+        if( packetLossPercent > 0 ) {
+          printf( "\nSimulating %d %% of independent packet loss\n",
+                  packetLossPercent );
+        } else {
+          printf( "\nNo Packet Loss Is Simulated \n" );
+        }
+        readLoss = 0;
+      } else {
+        readLoss = 1;
+        plFile = fopen( argv[i+1], "rb" );
+        if( plFile == NULL ) {
+          printf( "\n couldn't open the frameloss file: %s\n", argv[i+1] );
+          exit( 0 );
+        }
+        printf( "\nSimulating packet loss through the given "
+                "channel file: %s\n", argv[i+1] );
+      }
+      i++;
+    }
+
+    /* Random packetlosses */
+    if (!strcmp ("-rnd", argv[i])) {
+      srand(time(NULL) );
+      printf( "\n Random pattern in lossed packets \n" );
+    }
+
+    /* Use gns file */
+    if (!strcmp ("-G", argv[i])) {
+      sscanf(argv[i + 1], "%s", gns_file);
+      fp_gns = fopen(gns_file, "rb");
+      if (fp_gns  == NULL) {
+        printf("Cannot read file %s.\n", gns_file);
+        exit(0);
+      }
+      gns = 1;
+      i++;
+    }
+
+    /* Run Narrowband interfaces (either encoder or decoder) */
+    if (!strcmp ("-NB", argv[i])) {
+      nbTest = atoi(argv[i + 1]);
+      i++;
+    }
+
+    /* Run Conference Engine APIs */
+    if (!strcmp ("-CE", argv[i])) {
+      testCE = atoi(argv[i + 1]);
+      if (testCE==1 || testCE==2) {
+        i++;
+        scale = (float)atof( argv[i+1] );
+      } else if (testCE < 1 || testCE > 3) {
+        printf("\n%d is not a valid CE-test number, valid Fault "
+               "Scenarios are numbered 1-3\n", testCE);
+        exit(0);
+      }
+      i++;
+    }
+
+    /* Set initial RTP number */
+    if (!strcmp ("-RTP_INIT", argv[i])) {
+      i++;
+    }
+  }
+
+  /* Get Bottleneck value                                                   */
+  /* Gns files and bottleneck should not and can not be used simultaneously */
+  bottleneck = atoi(argv[CodingMode+1]);
+  if (bottleneck == 0 && gns == 0) {
+    sscanf(argv[CodingMode+1], "%s", bottleneck_file);
+    f_bn = fopen(bottleneck_file, "rb");
+    if (f_bn  == NULL) {
+      printf("No value provided for BottleNeck and cannot read file %s\n", bottleneck_file);
+      exit(0);
+    } else {
+      int aux_var;
+      printf("reading bottleneck rates from file %s\n\n",bottleneck_file);
+      if (fscanf(f_bn, "%d", &aux_var) == EOF) {
+        /* Set pointer to beginning of file */
+        fseek(f_bn, 0L, SEEK_SET);
+        if (fscanf(f_bn, "%d", &aux_var) == EOF) {
+          exit(0);
+        }
+      }
+      bottleneck = (WebRtc_Word16)aux_var;
+      /* Bottleneck is a cosine function
+       * Matlab code for writing the bottleneck file:
+       * BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi);
+       * fid = fopen('bottleneck.txt', 'wb');
+       * fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid);
+       */
+    }
+  } else {
+    f_bn = NULL;
+    printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+  }
+
+  if (CodingMode == 0) {
+    printf("\nAdaptive BottleNeck\n");
+  }
+
+  /* Get Input and Output files */
+  sscanf(argv[argc-2], "%s", inname);
+  sscanf(argv[argc-1], "%s", outname);
+
+  /* Add '.bit' to output bitstream file */
+  while ((int)outname[h] != 0) {
+    outbitsname[h] = outname[h];
+    h++;
+  }
+  for (k=0; k<5; k++) {
+    outbitsname[h] = tmpBit[k];
+    h++;
+  }
+  if ((inp = fopen(inname,"rb")) == NULL) {
+    printf("  iSAC: Cannot read file %s\n", inname);
+    exit(1);
+  }
+  if ((outp = fopen(outname,"wb")) == NULL) {
+    printf("  iSAC: Cannot write file %s\n", outname);
+    exit(1);
+  }
+
+  if ((outbits = fopen(outbitsname,"wb")) == NULL) {
+    printf("  iSAC: Cannot write file %s\n", outbitsname);
+    exit(1);
+  }
+  printf("\nInput:%s\nOutput:%s\n\n", inname, outname);
+
+  /* Error test number 10, garbage data */
+  if (testNum == 10) {
+    /* Test to run decoder with garbage data */
+    srand(random_seed);
+
+    if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+      printf("Error: Could not open file %s\n", SEED_FILE);
+    }
+    else {
+      fprintf(seedfile, "%u\n", random_seed);
+      fclose(seedfile);
+    }
+  }
+
+  /* Runtime statistics */
+  starttime = clock()/(double)CLOCKS_PER_SEC;
+
+  /* Initialize the ISAC and BN structs */
+  if (testNum != 8)
+  {
+    if(1){
+      err =WebRtcIsacfix_Create(&ISAC_main_inst);
+    }else{
+      /* Test the Assign functions */
+      int sss;
+      void *ppp;
+      err =WebRtcIsacfix_AssignSize(&sss);
+      ppp=malloc(sss);
+      err =WebRtcIsacfix_Assign(&ISAC_main_inst,ppp);
+    }
+    /* Error check */
+    if (err < 0) {
+      printf("\n\n Error in create.\n\n");
+    }
+    if (testCE == 1) {
+      err = WebRtcIsacfix_CreateInternal(ISAC_main_inst);
+      /* Error check */
+      if (err < 0) {
+        printf("\n\n Error in createInternal.\n\n");
+      }
+    }
+  }
+
+  /* Init of bandwidth data */
+  BN_data.send_time     = 0;
+  BN_data.arrival_time  = 0;
+  BN_data.sample_count  = 0;
+  BN_data.rtp_number    = 0;
+
+  /* Initialize encoder and decoder */
+  framecnt= 0;
+  endfile = 0;
+  if (testNum != 1) {
+    WebRtcIsacfix_EncoderInit(ISAC_main_inst, CodingMode);
+  }
+  if (testNum != 2) {
+    WebRtcIsacfix_DecoderInit(ISAC_main_inst);
+  }
+
+  if (CodingMode == 1) {
+    err = WebRtcIsacfix_Control(ISAC_main_inst, bottleneck, framesize);
+    if (err < 0) {
+      /* exit if returned with error */
+      errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+      printf("\n\n Error in control: %d.\n\n", errtype);
+    }
+  } else if(setControlBWE == 1) {
+    err = WebRtcIsacfix_ControlBwe(ISAC_main_inst, rateBPS, framesize, fixedFL);
+  }
+
+  if (payloadSize != 0) {
+    err = WebRtcIsacfix_SetMaxPayloadSize(ISAC_main_inst, payloadSize);
+    if (err < 0) {
+      /* exit if returned with error */
+      errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+      printf("\n\n Error in SetMaxPayloadSize: %d.\n\n", errtype);
+      exit(EXIT_FAILURE);
+    }
+  }
+  if (payloadRate != 0) {
+    err = WebRtcIsacfix_SetMaxRate(ISAC_main_inst, payloadRate);
+    if (err < 0) {
+      /* exit if returned with error */
+      errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+      printf("\n\n Error in SetMaxRateInBytes: %d.\n\n", errtype);
+      exit(EXIT_FAILURE);
+    }
+  }
+
+  *speechType = 1;
+
+
+  while (endfile == 0) {
+
+    if(testNum == 7 && (rand()%2 == 0)) {
+      err = WebRtcIsacfix_EncoderInit(ISAC_main_inst, CodingMode);
+      /* Error check */
+      if (err < 0) {
+        errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+        printf("\n\n Error in encoderinit: %d.\n\n", errtype);
+      }
+
+      err = WebRtcIsacfix_DecoderInit(ISAC_main_inst);
+      /* Error check */
+      if (err < 0) {
+        errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+        printf("\n\n Error in decoderinit: %d.\n\n", errtype);
+      }
+    }
+
+
+    cur_framesmpls = 0;
+    while (1) {
+      /* Read 10 ms speech block */
+      if (nbTest != 1) {
+        endfile = readframe(shortdata, inp, FRAMESAMPLES_10ms);
+      } else {
+        endfile = readframe(shortdata, inp, (FRAMESAMPLES_10ms/2));
+      }
+
+      if (testNum == 7) {
+        srand(time(NULL));
+      }
+
+      /* iSAC encoding */
+      if (!(testNum == 3 && framecnt == 0)) {
+        if (nbTest != 1) {
+          short bwe;
+
+          /* Encode */
+          stream_len = WebRtcIsacfix_Encode(ISAC_main_inst,
+                                            shortdata,
+                                            (WebRtc_Word16*)streamdata);
+
+          /* If packet is ready, and CE testing, call the different API functions
+             from the internal API.                       */
+          if (stream_len>0) {
+            if (testCE == 1) {
+              err = WebRtcIsacfix_ReadBwIndex((WebRtc_Word16*)streamdata, &bwe);
+              stream_len = WebRtcIsacfix_GetNewBitStream(
+                  ISAC_main_inst,
+                  bwe,
+                  scale,
+                  (WebRtc_Word16*)streamdata);
+            } else if (testCE == 2) {
+              /* transcode function not supported */
+            } else if (testCE == 3) {
+              /* Only for Function testing. The functions should normally
+                 not be used in this way                                      */
+
+              err = WebRtcIsacfix_GetDownLinkBwIndex(ISAC_main_inst, &bwe);
+              /* Error Check */
+              if (err < 0) {
+                errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+                printf("\nError in getSendBWE: %d.\n", errtype);
+              }
+
+              err = WebRtcIsacfix_UpdateUplinkBw(ISAC_main_inst, bwe);
+              /* Error Check */
+              if (err < 0) {
+                errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+                printf("\nError in setBWE: %d.\n", errtype);
+              }
+
+            }
+          }
+        } else {
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+          stream_len = WebRtcIsacfix_EncodeNb(ISAC_main_inst,
+                                              shortdata,
+                                              streamdata);
+#else
+          stream_len = -1;
+#endif
+        }
+      }
+      else
+      {
+        break;
+      }
+
+      if (stream_len < 0 || err < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+        printf("\nError in encoder: %d.\n", errtype);
+      } else {
+        fwrite(streamdata, sizeof(char), stream_len, outbits);
+      }
+
+      cur_framesmpls += FRAMESAMPLES_10ms;
+
+      /* read next bottleneck rate */
+      if (f_bn != NULL) {
+        int aux_var;
+        if (fscanf(f_bn, "%d", &aux_var) == EOF) {
+          /* Set pointer to beginning of file */
+          fseek(f_bn, 0L, SEEK_SET);
+          if (fscanf(f_bn, "%d", &aux_var) == EOF) {
+            exit(0);
+          }
+        }
+        bottleneck = (WebRtc_Word16)aux_var;
+        if (CodingMode == 1) {
+          WebRtcIsacfix_Control(ISAC_main_inst, bottleneck, framesize);
+        }
+      }
+
+      /* exit encoder loop if the encoder returned a bitstream */
+      if (stream_len != 0) break;
+    }
+
+    /* make coded sequence to short be inreasing */
+    /* the length the decoder expects */
+    if (testNum == 4) {
+      stream_len += 10;
+    }
+
+    /* make coded sequence to long be decreasing */
+    /* the length the decoder expects */
+    if (testNum == 5) {
+      stream_len -= 10;
+    }
+
+    if (testNum == 6) {
+      srand(time(NULL));
+      for (i = 0; i < stream_len; i++ ) {
+        streamdata[i] = rand();
+      }
+    }
+
+    /* set pointer to beginning of file */
+    if (fp_gns != NULL) {
+      if (fscanf(fp_gns, "%d", &cur_delay) == EOF) {
+        fseek(fp_gns, 0L, SEEK_SET);
+        if (fscanf(fp_gns, "%d", &cur_delay) == EOF) {
+          exit(0);
+        }
+      }
+    }
+
+    /* simulate packet handling through NetEq and the modem */
+    if (!(testNum == 3 && framecnt == 0)) {
+      if (gns == 0) {
+        get_arrival_time(cur_framesmpls, stream_len, bottleneck,
+                         &BN_data);
+      } else {
+        get_arrival_time2(cur_framesmpls, cur_delay, &BN_data);
+      }
+    }
+
+    /* packet not dropped */
+    if (cur_delay != -1) {
+
+      /* Error test number 10, garbage data */
+      if (testNum == 10) {
+        for ( i = 0; i < stream_len; i++) {
+          streamdata[i] = (short) (streamdata[i] + (short) rand());
+        }
+      }
+
+      if (testNum != 9) {
+        err = WebRtcIsacfix_UpdateBwEstimate(ISAC_main_inst,
+                                             streamdata,
+                                             stream_len,
+                                             BN_data.rtp_number,
+                                             BN_data.send_time,
+                                             BN_data.arrival_time);
+
+        if (err < 0) {
+          /* exit if returned with error */
+          errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+          printf("\nError in decoder: %d.\n", errtype);
+        }
+      }
+#ifdef _DEBUG
+      fprintf(stderr,"  \rframe = %7d", framecnt);
+#endif
+
+      if( readLoss == 1 ) {
+        if( fread( &lostFrame, sizeof(WebRtc_Word16), 1, plFile ) != 1 ) {
+          rewind( plFile );
+        }
+        lostFrame = !lostFrame;
+      } else {
+        lostFrame = (rand()%100 < packetLossPercent);
+      }
+
+
+
+      /* iSAC decoding */
+      if( lostFrame && framecnt >  0) {
+        if (nbTest !=2) {
+          declen = WebRtcIsacfix_DecodePlc(ISAC_main_inst,
+                                           decoded, prevFrameSize );
+        } else {
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+          declen = WebRtcIsacfix_DecodePlcNb(ISAC_main_inst, decoded,
+                                             prevFrameSize );
+#else
+          declen = -1;
+#endif
+        }
+        lostPackets++;
+      } else {
+        if (nbTest !=2 ) {
+          short FL;
+          /* Call getFramelen, only used here for function test */
+          err = WebRtcIsacfix_ReadFrameLen((WebRtc_Word16*)streamdata, &FL);
+          declen = WebRtcIsacfix_Decode( ISAC_main_inst, streamdata, stream_len,
+                                         decoded, speechType );
+          /* Error check */
+          if (err<0 || declen<0 || FL!=declen) {
+            errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+            printf("\nError in decode_B/or getFrameLen: %d.\n", errtype);
+          }
+          prevFrameSize = declen/480;
+
+        } else {
+#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
+          declen = WebRtcIsacfix_DecodeNb( ISAC_main_inst, streamdata,
+                                           stream_len, decoded, speechType );
+#else
+          declen = -1;
+#endif
+          prevFrameSize = declen/240;
+        }
+      }
+
+      if (declen <= 0) {
+        /* exit if returned with error */
+        errtype=WebRtcIsacfix_GetErrorCode(ISAC_main_inst);
+        printf("\nError in decoder: %d.\n", errtype);
+      }
+
+      /* Write decoded speech frame to file */
+      fwrite(decoded, sizeof(WebRtc_Word16), declen, outp);
+      //   fprintf( ratefile, "%f \n", stream_len / ( ((double)declen)/
+      // ((double)FS) ) * 8 );
+    } else {
+      lostPackets++;
+    }
+    framecnt++;
+
+    totalsmpls += declen;
+    totalbits += 8 * stream_len;
+    kbps = ((double) FS) / ((double) cur_framesmpls) * 8.0 *
+        stream_len / 1000.0;// kbits/s
+
+    /* Error test number 10, garbage data */
+    if (testNum == 10) {
+      if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+        printf( "Error: Could not open file %s\n", SEED_FILE);
+      }
+      else {
+        fprintf(seedfile, "ok\n\n");
+        fclose(seedfile);
+      }
+    }
+
+#ifdef _DEBUG
+
+    fy = fopen("bit_rate.dat", "a");
+    fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
+    fclose(fy);
+
+#endif /* _DEBUG */
+
+  }
+  printf("\nLost Frames %d ~ %4.1f%%\n", lostPackets,
+         (double)lostPackets/(double)framecnt*100.0 );
+  printf("\n\ntotal bits                          = %d bits", totalbits);
+  printf("\nmeasured average bitrate              = %0.3f kbits/s",
+         (double)totalbits *(FS/1000) / totalsmpls);
+  printf("\n");
+
+#ifdef _DEBUG
+  /* fprintf(stderr,"\n\ntotal bits    = %d bits", totalbits);
+     fprintf(stderr,"\nmeasured average bitrate  = %0.3f kbits/s",
+     (double)totalbits *(FS/1000) / totalsmpls);
+     fprintf(stderr,"\n");
+  */
+#endif /* _DEBUG */
+
+  /* Runtime statistics */
+
+
+  runtime = (double)(((double)clock()/(double)CLOCKS_PER_SEC)-starttime);
+  length_file = ((double)framecnt*(double)declen/FS);
+  printf("\n\nLength of speech file: %.1f s\n", length_file);
+  printf("Time to run iSAC:      %.2f s (%.2f %% of realtime)\n\n",
+         runtime, (100*runtime/length_file));
+  printf("\n\n_______________________________________________\n");
+
+  fclose(inp);
+  fclose(outp);
+  fclose(outbits);
+
+  if ( testCE == 1) {
+    WebRtcIsacfix_FreeInternal(ISAC_main_inst);
+  }
+  WebRtcIsacfix_Free(ISAC_main_inst);
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/test_iSACfixfloat.c b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/test_iSACfixfloat.c
new file mode 100644
index 0000000..57c30ca
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/fix/test/test_iSACfixfloat.c
@@ -0,0 +1,693 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * test_iSACfixfloat.c
+ *
+ * Test compatibility and quality between floating- and fixed-point code
+ * */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/* include API */
+#include "isac.h"
+#include "isacfix.h"
+
+
+/* max number of samples per frame (= 60 ms frame) */
+#define MAX_FRAMESAMPLES				960
+/* number of samples per 10ms frame */
+#define FRAMESAMPLES_10ms				160
+/* sampling frequency (Hz) */
+#define FS								16000
+
+
+
+/* Runtime statistics */
+#include <time.h>
+#define CLOCKS_PER_SEC  1000
+
+
+
+//	FILE *histfile, *ratefile;
+
+
+/* function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+	
+	short k, rlen, status = 0;
+		
+	rlen = fread(data, sizeof(WebRtc_Word16), length, inp);
+	if (rlen < length) {
+		for (k = rlen; k < length; k++)
+			data[k] = 0;
+		status = 1;
+	}
+	
+	return status;
+}
+
+typedef struct {
+	WebRtc_UWord32 send_time;            /* samples */
+	WebRtc_UWord32 arrival_time;            /* samples */
+	WebRtc_UWord32 sample_count;            /* samples */
+	WebRtc_UWord16 rtp_number;
+} BottleNeckModel;
+
+void get_arrival_time(int current_framesamples,   /* samples */
+					  int packet_size,            /* bytes */
+					  int bottleneck,             /* excluding headers; bits/s */
+					  BottleNeckModel *BN_data)
+{
+	const int HeaderSize = 35; 
+	int HeaderRate;
+
+	HeaderRate = HeaderSize * 8 * FS / current_framesamples;     /* bits/s */
+
+	/* everything in samples */
+	BN_data->sample_count = BN_data->sample_count + current_framesamples;
+
+	BN_data->arrival_time += ((packet_size + HeaderSize) * 8 * FS) / (bottleneck + HeaderRate);
+	BN_data->send_time += current_framesamples;
+
+	if (BN_data->arrival_time < BN_data->sample_count)
+		BN_data->arrival_time = BN_data->sample_count;
+
+	BN_data->rtp_number++;
+}
+
+
+
+int main(int argc, char* argv[])
+{
+
+	char inname[50], outname[50], bottleneck_file[50], bitfilename[60], bitending[10]="_bits.pcm";	
+	FILE *inp, *outp, *f_bn, *bitsp;
+	int framecnt, endfile;
+
+
+	int i,j,errtype, plc=0;
+	WebRtc_Word16 CodingMode;
+	WebRtc_Word16 bottleneck;
+
+	WebRtc_Word16 framesize = 30;           /* ms */
+    //WebRtc_Word16 framesize = 60; /* To invoke cisco complexity case at frame 2252 */
+	
+	int cur_framesmpls, err;
+	
+	/* Runtime statistics */
+	double starttime;
+	double runtime;
+	double length_file;
+	
+	WebRtc_Word16 stream_len = 0;
+	WebRtc_Word16 declen;
+	
+	WebRtc_Word16 shortdata[FRAMESAMPLES_10ms];
+	WebRtc_Word16 decoded[MAX_FRAMESAMPLES];
+	WebRtc_UWord16 streamdata[600];
+	WebRtc_Word16	speechType[1];
+	
+//	WebRtc_Word16	*iSACstruct;
+
+	char version_number[20];
+	int mode=-1, tmp, nbTest=0; /*,sss;*/
+
+#ifdef _DEBUG
+	FILE *fy;
+	double kbps;
+	int totalbits =0;
+	int totalsmpls =0;
+#endif /* _DEBUG */
+
+
+
+
+	/* only one structure used for ISAC encoder */
+	ISAC_MainStruct *ISAC_main_inst;
+	ISACFIX_MainStruct *ISACFIX_main_inst;
+
+	BottleNeckModel       BN_data;
+	f_bn  = NULL;
+
+#ifdef _DEBUG
+	fy = fopen("bit_rate.dat", "w");
+	fclose(fy);
+	fy = fopen("bytes_frames.dat", "w");
+	fclose(fy);
+#endif /* _DEBUG */
+
+
+//histfile = fopen("histo.dat", "ab");
+//ratefile = fopen("rates.dat", "ab");
+
+	/* handling wrong input arguments in the command line */
+	if ((argc<6) || (argc>10))  {
+		printf("\n\nWrong number of arguments or flag values.\n\n");
+
+		printf("\n");
+		WebRtcIsacfix_version(version_number);
+		printf("iSAC version %s \n\n", version_number);
+
+		printf("Usage:\n\n");
+		printf("./kenny.exe [-I] bottleneck_value infile outfile \n\n");
+		printf("with:\n");
+
+		printf("[-I]			:	if -I option is specified, the coder will use\n");
+		printf("				an instantaneous Bottleneck value. If not, it\n");
+		printf("				will be an adaptive Bottleneck value.\n\n");
+		printf("bottleneck_value	:	the value of the bottleneck provided either\n");
+		printf("				as a fixed value (e.g. 25000) or\n");
+		printf("				read from a file (e.g. bottleneck.txt)\n\n");
+		printf("[-m] mode		: Mode (encoder - decoder):\n");
+		printf("				:		0 - float - float \n");
+		printf("				:		1 - float - fix \n");
+		printf("				:		2 - fix - float \n");
+		printf("				:		3 - fix - fix \n");
+		printf("[-PLC]	 		:	Test PLC packetlosses\n");
+		printf("[-NB] num		:	Test NB interfaces, num=1 encNB, num=2 decNB\n");
+		printf("infile			:	Normal speech input file\n\n");
+		printf("outfile			:	Speech output file\n\n");
+		printf("Example usage:\n\n");
+		printf("./kenny.exe -I bottleneck.txt -m 1 speechIn.pcm speechOut.pcm\n\n");
+		exit(0);
+
+	} 
+	
+	
+	printf("--------------------START---------------------\n\n");
+	WebRtcIsac_version(version_number);
+	printf("iSAC FLOAT version %s \n", version_number);
+	WebRtcIsacfix_version(version_number);
+	printf("iSAC FIX version   %s \n\n", version_number);
+
+	CodingMode = 0;
+	tmp=1;
+	for (i = 1; i < argc;i++)
+	{
+		if (!strcmp ("-I", argv[i]))
+		{
+			printf("\nInstantaneous BottleNeck\n");
+			CodingMode = 1;
+			i++;
+			tmp=0;
+		} 
+
+		if (!strcmp ("-m", argv[i])) {
+			mode=atoi(argv[i+1]);
+			i++;
+		}		
+
+		if (!strcmp ("-PLC", argv[i]))
+		{
+			plc=1;
+		}
+		
+		if (!strcmp ("-NB", argv[i]))
+		{
+			nbTest = atoi(argv[i + 1]);
+			i++;
+		}
+			  
+	}
+	
+	if(mode<0) {
+		printf("\nError! Mode must be set: -m 0 \n");
+		exit(0);
+	}
+	
+	if (CodingMode == 0)
+	{
+		printf("\nAdaptive BottleNeck\n");
+	}
+
+
+
+	/* Get Bottleneck value */
+	bottleneck = atoi(argv[2-tmp]);
+	if (bottleneck == 0)
+	{
+		sscanf(argv[2-tmp], "%s", bottleneck_file);
+		f_bn = fopen(bottleneck_file, "rb");
+		if (f_bn  == NULL)
+		{
+			printf("No value provided for BottleNeck and cannot read file %s.\n", bottleneck_file);
+			exit(0);
+		}
+		else {
+			printf("reading bottleneck rates from file %s\n\n",bottleneck_file);
+			if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+					/* Set pointer to beginning of file */
+					fseek(f_bn, 0L, SEEK_SET);
+					fscanf(f_bn, "%d", &bottleneck);
+			}		
+
+			/*	Bottleneck is a cosine function 
+			*	Matlab code for writing the bottleneck file:
+			*	BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi);
+			*	fid = fopen('bottleneck.txt', 'wb');
+			*	fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid);
+			*/
+		}
+	}
+	else 
+	{
+		printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+	}
+
+
+
+	/* Get Input and Output files */
+	sscanf(argv[argc-2], "%s", inname);
+	sscanf(argv[argc-1], "%s", outname);
+	
+	if ((inp = fopen(inname,"rb")) == NULL) {
+		printf("  iSAC: Cannot read file %s.\n", inname);
+		exit(1);
+	}
+	if ((outp = fopen(outname,"wb")) == NULL) {
+		printf("  iSAC: Cannot write file %s.\n", outname);
+		exit(1);
+	}
+	printf("\nInput:%s\nOutput:%s\n", inname, outname);
+
+	i=0;
+	while (outname[i]!='\0') {
+		bitfilename[i]=outname[i];
+		i++;
+	}
+	i-=4;
+	for (j=0;j<9;j++, i++)
+		bitfilename[i]=bitending[j];
+	bitfilename[i]='\0';
+	if ((bitsp = fopen(bitfilename,"wb")) == NULL) {
+		printf("  iSAC: Cannot read file %s.\n", bitfilename);
+		exit(1);
+	}
+	printf("Bitstream:%s\n\n", bitfilename);
+
+
+	
+	starttime = clock()/(double)CLOCKS_PER_SEC; /* Runtime statistics */
+
+
+	/* Initialize the ISAC and BN structs */
+	WebRtcIsac_create(&ISAC_main_inst);
+/*	WebRtcIsacfix_AssignSize(&sss);
+	iSACstruct=malloc(sss);
+	WebRtcIsacfix_Assign(&ISACFIX_main_inst,iSACstruct);*/
+	WebRtcIsacfix_Create(&ISACFIX_main_inst);
+	
+	BN_data.send_time	  = 0;
+	BN_data.arrival_time  = 0;
+	BN_data.sample_count  = 0;
+	BN_data.rtp_number    = 0;
+	
+	/* Initialize encoder and decoder */
+    framecnt= 0;
+    endfile	= 0;
+
+	if (mode==0) { /* Encode using FLOAT, decode using FLOAT */
+
+		printf("Coding mode: Encode using FLOAT, decode using FLOAT \n\n");
+
+		/* Init iSAC FLOAT */
+		WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode);
+		WebRtcIsac_DecoderInit(ISAC_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+			//	exit(EXIT_FAILURE);
+			}
+		}
+	
+	} else if (mode==1) { /* Encode using FLOAT, decode using FIX */
+
+		printf("Coding mode: Encode using FLOAT, decode using FIX \n\n");
+
+		/* Init iSAC FLOAT */
+		WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode);
+		WebRtcIsac_DecoderInit(ISAC_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+			//	exit(EXIT_FAILURE);
+			}
+		}
+
+		/* Init iSAC FIX */
+		WebRtcIsacfix_EncoderInit(ISACFIX_main_inst, CodingMode);
+		WebRtcIsacfix_DecoderInit(ISACFIX_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsacfix_Control(ISACFIX_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}
+	} else if (mode==2) { /* Encode using FIX, decode using FLOAT */
+
+		printf("Coding mode: Encode using FIX, decode using FLOAT \n\n");
+
+		/* Init iSAC FLOAT */
+		WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode);
+		WebRtcIsac_DecoderInit(ISAC_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}
+		
+		/* Init iSAC FIX */
+		WebRtcIsacfix_EncoderInit(ISACFIX_main_inst, CodingMode);
+		WebRtcIsacfix_DecoderInit(ISACFIX_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsacfix_Control(ISACFIX_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}
+	} else if (mode==3) {
+
+		printf("Coding mode: Encode using FIX, decode using FIX \n\n");
+
+		WebRtcIsacfix_EncoderInit(ISACFIX_main_inst, CodingMode);
+		WebRtcIsacfix_DecoderInit(ISACFIX_main_inst);
+		if (CodingMode == 1) {
+			err = WebRtcIsacfix_Control(ISACFIX_main_inst, bottleneck, framesize);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\n Error in initialization: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}
+
+	} else
+		printf("Mode must be value between 0 and 3\n");
+	*speechType = 1;
+
+//#define BI_TEST 1
+#ifdef BI_TEST
+    err = WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_main_inst, 300);
+    if (err < 0) {
+            /* exit if returned with error */
+            errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+            printf("\n\n Error in setMaxPayloadSize: %d.\n\n", errtype);
+            fclose(inp);
+            fclose(outp);
+            fclose(bitsp);
+            return(EXIT_FAILURE);
+    }
+#endif
+
+
+    while (endfile == 0) {	
+
+		cur_framesmpls = 0;
+		while (1) {
+			/* Read 10 ms speech block */
+			if (nbTest != 1)
+				endfile = readframe(shortdata, inp, FRAMESAMPLES_10ms);
+			else
+				endfile = readframe(shortdata, inp, (FRAMESAMPLES_10ms/2));
+
+			/* iSAC encoding */
+
+			if (mode==0 || mode ==1) {
+				stream_len = WebRtcIsac_Encode(ISAC_main_inst, shortdata,	streamdata);
+				if (stream_len < 0) {
+					/* exit if returned with error */
+					errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+					printf("\n\nError in encoder: %d.\n\n", errtype);
+				//	exit(EXIT_FAILURE);
+				}
+			} else if (mode==2 || mode==3) {
+				/* iSAC encoding */
+				if (nbTest != 1)
+					stream_len = WebRtcIsacfix_Encode(ISACFIX_main_inst, shortdata,	streamdata);
+				else
+					stream_len = WebRtcIsacfix_EncodeNb(ISACFIX_main_inst, shortdata, streamdata);
+		
+				if (stream_len < 0) {
+					/* exit if returned with error */
+					errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+					printf("\n\nError in encoder: %d.\n\n", errtype);
+				//	exit(EXIT_FAILURE);
+				}
+			}			
+
+			cur_framesmpls += FRAMESAMPLES_10ms;
+
+			/* read next bottleneck rate */
+			if (f_bn != NULL) {
+				if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+					/* Set pointer to beginning of file */
+					fseek(f_bn, 0L, SEEK_SET);
+					fscanf(f_bn, "%d", &bottleneck);
+				}
+				if (CodingMode == 1) {
+					if (mode==0 || mode==1)
+					  WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+					else if	(mode==2 || mode==3)
+						WebRtcIsacfix_Control(ISACFIX_main_inst, bottleneck, framesize);
+				}
+			}
+
+			/* exit encoder loop if the encoder returned a bitstream */
+			if (stream_len != 0) break;
+		}
+		
+		fwrite(streamdata, 1, stream_len, bitsp); /* NOTE! Writes bytes to file */
+
+		/* simulate packet handling through NetEq and the modem */
+		get_arrival_time(cur_framesmpls, stream_len, bottleneck,
+						 &BN_data);
+//*****************************
+		if (1){
+		if (mode==0) {
+			err = WebRtcIsac_UpdateBwEstimate(ISAC_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.arrival_time);
+
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+			/* iSAC decoding */
+			declen = WebRtcIsac_Decode(ISAC_main_inst,
+										  streamdata,
+									  	  stream_len,
+										  decoded,
+										  speechType);
+			if (declen <= 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		} else if (mode==1) {
+
+			err = WebRtcIsac_UpdateBwEstimate(ISAC_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.arrival_time);
+			err = WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.arrival_time);
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+
+			declen = WebRtcIsac_Decode(ISAC_main_inst,
+										  streamdata,
+									  	  stream_len,
+										  decoded,
+										  speechType);
+
+			/* iSAC decoding */
+			if (plc && (framecnt+1)%10 == 0) {
+				if (nbTest !=2 )
+					declen = WebRtcIsacfix_DecodePlc( ISACFIX_main_inst, decoded, 1 );
+				else
+					declen = WebRtcIsacfix_DecodePlcNb( ISACFIX_main_inst, decoded, 1 );
+			} else {
+				if (nbTest !=2 )
+					declen = WebRtcIsacfix_Decode(ISACFIX_main_inst,
+												  streamdata,
+											  	  stream_len,
+												  decoded,
+												  speechType);
+				else
+					declen = WebRtcIsacfix_DecodeNb(ISACFIX_main_inst,
+												  streamdata,
+											  	  stream_len,
+												  decoded,
+												  speechType);
+			}
+			
+			if (declen <= 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		} else if (mode==2) {
+			err = WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.arrival_time);
+
+			err = WebRtcIsac_UpdateBwEstimate(ISAC_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.arrival_time);
+
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+			/* iSAC decoding */
+			declen = WebRtcIsac_Decode(ISAC_main_inst,
+										  streamdata,
+									  	  stream_len,
+										  decoded,
+										  speechType);
+			if (declen <= 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}  else if (mode==3) {
+			err = WebRtcIsacfix_UpdateBwEstimate(ISACFIX_main_inst,
+									  streamdata,
+									  stream_len,
+									  BN_data.rtp_number,
+									  BN_data.send_time,
+									  BN_data.arrival_time);
+
+			if (err < 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+			/* iSAC decoding */
+						
+			if (plc && (framecnt+1)%10 == 0) {
+				if (nbTest !=2 )
+					declen = WebRtcIsacfix_DecodePlc( ISACFIX_main_inst, decoded, 1 );
+				else
+					declen = WebRtcIsacfix_DecodePlcNb( ISACFIX_main_inst, decoded, 1 );
+			} else {
+				if (nbTest !=2 )
+					declen = WebRtcIsacfix_Decode(ISACFIX_main_inst,
+												  streamdata,
+											  	  stream_len,
+												  decoded,
+												  speechType);
+				else
+					declen = WebRtcIsacfix_DecodeNb(ISACFIX_main_inst,
+												  streamdata,
+											  	  stream_len,
+												  decoded,
+												  speechType);
+			}
+			if (declen <= 0) {
+				/* exit if returned with error */
+				errtype=WebRtcIsacfix_GetErrorCode(ISACFIX_main_inst);
+				printf("\n\nError in decoder: %d.\n\n", errtype);
+				//exit(EXIT_FAILURE);
+			}
+		}
+
+		/* Write decoded speech frame to file */
+		fwrite(decoded, sizeof(WebRtc_Word16), declen, outp);
+		}
+
+		fprintf(stderr,"  \rframe = %d", framecnt);
+		framecnt++;
+
+
+
+#ifdef _DEBUG
+		
+		totalsmpls += declen;
+		totalbits += 8 * stream_len;
+		kbps = ((double) FS) / ((double) cur_framesmpls) * 8.0 * stream_len / 1000.0;// kbits/s
+		fy = fopen("bit_rate.dat", "a");
+		fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
+		fclose(fy);
+		
+#endif /* _DEBUG */
+	
+	}
+	
+#ifdef _DEBUG
+	printf("\n\ntotal bits				= %d bits", totalbits);
+	printf("\nmeasured average bitrate		= %0.3f kbits/s", (double)totalbits *(FS/1000) / totalsmpls);
+	printf("\n");
+#endif /* _DEBUG */
+	
+	/* Runtime statistics */
+	runtime = (double)(clock()/(double)CLOCKS_PER_SEC-starttime);
+	length_file = ((double)framecnt*(double)declen/FS);
+	printf("\n\nLength of speech file: %.1f s\n", length_file);
+	printf("Time to run iSAC:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+	printf("---------------------END----------------------\n");
+	
+	fclose(inp);
+	fclose(outp);
+	
+	WebRtcIsac_Free(ISAC_main_inst);
+	WebRtcIsacfix_Free(ISACFIX_main_inst);
+
+	
+
+//	fclose(histfile);
+//	fclose(ratefile);
+	
+	return 0;
+
+}	
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/isac_test.gypi b/trunk/src/modules/audio_coding/codecs/iSAC/isac_test.gypi
new file mode 100644
index 0000000..abe2454
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/isac_test.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # simple kenny
+    {
+      'target_name': 'iSACtest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/simpleKenny.c',
+        './main/util/utility.c',
+      ],
+    },
+    # ReleaseTest-API
+    {
+      'target_name': 'iSACAPITest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/ReleaseTest-API/ReleaseTest-API.cc',
+        './main/util/utility.c',
+      ],
+    },
+    # SwitchingSampRate
+    {
+      'target_name': 'iSACSwitchSampRateTest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        '../../../../common_audio/signal_processing/include',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/SwitchingSampRate/SwitchingSampRate.cc',
+        './main/util/utility.c',
+      ],    
+    },
+
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/isacfix_test.gypi b/trunk/src/modules/audio_coding/codecs/iSAC/isacfix_test.gypi
new file mode 100644
index 0000000..627342f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/isacfix_test.gypi
@@ -0,0 +1,33 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # kenny
+    {
+      'target_name': 'iSACFixtest',
+      'type': 'executable',
+      'dependencies': [
+        'iSACFix',
+      ],
+      'include_dirs': [
+        './fix/test',
+        './fix/interface',
+      ],
+      'sources': [
+        './fix/test/kenny.c',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/interface/isac.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/interface/isac.h
new file mode 100644
index 0000000..03c260b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/interface/isac.h
@@ -0,0 +1,729 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INTERFACE_ISAC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INTERFACE_ISAC_H_
+
+/*
+ * Define the fixed-point numeric formats
+ */
+#include "typedefs.h"
+
+typedef struct WebRtcISACStruct    ISACStruct;
+
+enum IsacSamplingRate {kIsacWideband = 16,  kIsacSuperWideband = 32};
+
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+  /******************************************************************************
+   * WebRtcIsac_AssignSize(...)
+   *
+   * This function returns the size of the ISAC instance, so that the instance
+   * can be created outside iSAC.
+   *
+   * Input:
+   *        - samplingRate      : sampling rate of the input/output audio.
+   *
+   * Output:
+   *        - sizeinbytes       : number of bytes needed to allocate for the
+   *                              instance.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_AssignSize(
+      int* sizeinbytes);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Assign(...)
+   *
+   * This function assignes the memory already created to the ISAC instance.
+   *
+   * Input:
+   *        - *ISAC_main_inst   : a pointer to the coder instance.
+   *        - samplingRate      : sampling rate of the input/output audio.
+   *        - ISAC_inst_Addr    : the already allocated memory, where we put the
+   *                              iSAC structure.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_Assign(
+      ISACStruct** ISAC_main_inst,
+      void*        ISAC_inst_Addr);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Create(...)
+   *
+   * This function creates an ISAC instance, which will contain the state
+   * information for one coding/decoding channel.
+   *
+   * Input:
+   *        - *ISAC_main_inst   : a pointer to the coder instance.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_Create(
+      ISACStruct** ISAC_main_inst);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Free(...)
+   *
+   * This function frees the ISAC instance created at the beginning.
+   *
+   * Input:
+   *        - ISAC_main_inst    : an ISAC instance.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_Free(
+      ISACStruct* ISAC_main_inst);
+
+
+  /******************************************************************************
+   * WebRtcIsac_EncoderInit(...)
+   *
+   * This function initializes an ISAC instance prior to the encoder calls.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - CodingMode        : 0 -> Bit rate and frame length are
+   *                                automatically adjusted to available bandwidth
+   *                                on transmission channel, just valid if codec
+   *                                is created to work in wideband mode.
+   *                              1 -> User sets a frame length and a target bit
+   *                                rate which is taken as the maximum
+   *                                short-term average bit rate.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_EncoderInit(
+      ISACStruct* ISAC_main_inst,
+      WebRtc_Word16 CodingMode);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Encode(...)
+   *
+   * This function encodes 10ms audio blocks and inserts it into a package.
+   * Input speech length has 160 samples if operating at 16 kHz sampling
+   * rate, or 320 if operating at 32 kHz sampling rate. The encoder buffers the
+   * input audio until the whole frame is buffered then proceeds with encoding.
+   *
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - speechIn          : input speech vector.
+   *
+   * Output:
+   *        - encoded           : the encoded data vector
+   *
+   * Return value:
+   *                            : >0 - Length (in bytes) of coded data
+   *                            :  0 - The buffer didn't reach the chosen
+   *                               frame-size so it keeps buffering speech
+   *                               samples.
+   *                            : -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_Encode(
+      ISACStruct*        ISAC_main_inst,
+      const WebRtc_Word16* speechIn,
+      WebRtc_Word16*       encoded);
+
+
+  /******************************************************************************
+   * WebRtcIsac_DecoderInit(...)
+   *
+   * This function initializes an ISAC instance prior to the decoder calls.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *
+   * Return value
+   *                            : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_DecoderInit(
+      ISACStruct* ISAC_main_inst);
+
+
+  /******************************************************************************
+   * WebRtcIsac_UpdateBwEstimate(...)
+   *
+   * This function updates the estimate of the bandwidth.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - encoded           : encoded ISAC frame(s).
+   *        - packet_size       : size of the packet.
+   *        - rtp_seq_number    : the RTP number of the packet.
+   *        - send_ts           : the RTP send timestamp, given in samples
+   *        - arr_ts            : the arrival time of the packet (from NetEq)
+   *                              in samples.
+   *
+   * Return value               : 0 - Ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_UpdateBwEstimate(
+      ISACStruct*         ISAC_main_inst,
+      const WebRtc_UWord16* encoded,
+      WebRtc_Word32         packet_size,
+      WebRtc_UWord16        rtp_seq_number,
+      WebRtc_UWord32        send_ts,
+      WebRtc_UWord32        arr_ts);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Decode(...)
+   *
+   * This function decodes an ISAC frame. At 16 kHz sampling rate, the length
+   * of the output audio could be either 480 or 960 samples, equivalent to
+   * 30 or 60 ms respectively. At 32 kHz sampling rate, the length of the
+   * output audio is 960 samples, which is 30 ms.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - encoded           : encoded ISAC frame(s).
+   *        - len               : bytes in encoded vector.
+   *
+   * Output:
+   *        - decoded           : The decoded vector.
+   *
+   * Return value               : >0 - number of samples in decoded vector.
+   *                              -1 - Error.
+   */
+
+  WebRtc_Word16 WebRtcIsac_Decode(
+      ISACStruct*           ISAC_main_inst,
+      const WebRtc_UWord16* encoded,
+      WebRtc_Word16         len,
+      WebRtc_Word16*        decoded,
+      WebRtc_Word16*        speechType);
+
+
+  /******************************************************************************
+   * WebRtcIsac_DecodePlc(...)
+   *
+   * This function conducts PLC for ISAC frame(s). Output speech length
+   * will be a multiple of frames, i.e. multiples of 30 ms audio. Therefore,
+   * the output is multiple of 480 samples if operating at 16 kHz and multiple
+   * of 960 if operating at 32 kHz.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - noOfLostFrames    : Number of PLC frames to produce.
+   *
+   * Output:
+   *        - decoded           : The decoded vector.
+   *
+   * Return value               : >0 - number of samples in decoded PLC vector
+   *                              -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_DecodePlc(
+      ISACStruct*  ISAC_main_inst,
+      WebRtc_Word16* decoded,
+      WebRtc_Word16  noOfLostFrames);
+
+
+  /******************************************************************************
+   * WebRtcIsac_Control(...)
+   *
+   * This function sets the limit on the short-term average bit-rate and the
+   * frame length. Should be used only in Instantaneous mode. At 16 kHz sampling
+   * rate, an average bit-rate between 10000 to 32000 bps is valid and a
+   * frame-size of 30 or 60 ms is acceptable. At 32 kHz, an average bit-rate
+   * between 10000 to 56000 is acceptable, and the valid frame-size is 30 ms.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - rate              : limit on the short-term average bit rate,
+   *                              in bits/second.
+   *        - framesize         : frame-size in millisecond.
+   *
+   * Return value               : 0  - ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_Control(
+      ISACStruct*   ISAC_main_inst,
+      WebRtc_Word32 rate,
+      WebRtc_Word16 framesize);
+
+
+  /******************************************************************************
+   * WebRtcIsac_ControlBwe(...)
+   *
+   * This function sets the initial values of bottleneck and frame-size if
+   * iSAC is used in channel-adaptive mode. Therefore, this API is not
+   * applicable if the codec is created to operate in super-wideband mode.
+   *
+   * Through this API, users can enforce a frame-size for all values of
+   * bottleneck. Then iSAC will not automatically change the frame-size.
+   *
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - rateBPS           : initial value of bottleneck in bits/second
+   *                              10000 <= rateBPS <= 56000 is accepted
+   *                              For default bottleneck set rateBPS = 0
+   *        - frameSizeMs       : number of milliseconds per frame (30 or 60)
+   *        - enforceFrameSize  : 1 to enforce the given frame-size through
+   *                              out the adaptation process, 0 to let iSAC
+   *                              change the frame-size if required.
+   *
+   * Return value               : 0  - ok
+   *                             -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIsac_ControlBwe(
+      ISACStruct* ISAC_main_inst,
+      WebRtc_Word32 rateBPS,
+      WebRtc_Word16 frameSizeMs,
+      WebRtc_Word16 enforceFrameSize);
+
+
+  /******************************************************************************
+   * WebRtcIsac_ReadFrameLen(...)
+   *
+   * This function returns the length of the frame represented in the packet.
+   *
+   * Input:
+   *        - encoded           : Encoded bit-stream
+   *
+   * Output:
+   *        - frameLength       : Length of frame in packet (in samples)
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsac_ReadFrameLen(
+      ISACStruct*          ISAC_main_inst,
+      const WebRtc_Word16* encoded,
+      WebRtc_Word16*       frameLength);
+
+
+  /******************************************************************************
+   * WebRtcIsac_version(...)
+   *
+   * This function returns the version number.
+   *
+   * Output:
+   *        - version      : Pointer to character string
+   *
+   */
+
+  void WebRtcIsac_version(
+      char *version);
+
+
+  /******************************************************************************
+   * WebRtcIsac_GetErrorCode(...)
+   *
+   * This function can be used to check the error code of an iSAC instance. When
+   * a function returns -1 a error code will be set for that instance. The
+   * function below extract the code of the last error that occurred in the
+   * specified instance.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance
+   *
+   * Return value               : Error code
+   */
+
+  WebRtc_Word16 WebRtcIsac_GetErrorCode(
+      ISACStruct* ISAC_main_inst);
+
+
+  /****************************************************************************
+   * WebRtcIsac_GetUplinkBw(...)
+   *
+   * This function outputs the target bottleneck of the codec. In
+   * channel-adaptive mode, the target bottleneck is specified through in-band
+   * signalling retreived by bandwidth estimator.
+   * In channel-independent, also called instantaneous mode, the target
+   * bottleneck is provided to the encoder by calling xxx_control(...). If
+   * xxx_control is never called the default values is returned. The default
+   * value for bottleneck at 16 kHz encoder sampling rate is 32000 bits/sec,
+   * and it is 56000 bits/sec for 32 kHz sampling rate.
+   * Note that the output is the iSAC internal operating bottleneck which might
+   * differ slightly from the one provided through xxx_control().
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *
+   * Output:
+   *        - *bottleneck       : bottleneck in bits/sec
+   *
+   * Return value               : -1 if error happens
+   *                               0 bit-rates computed correctly.
+   */
+
+  WebRtc_Word16 WebRtcIsac_GetUplinkBw(
+      ISACStruct*    ISAC_main_inst,
+      WebRtc_Word32* bottleneck);
+
+
+  /******************************************************************************
+   * WebRtcIsac_SetMaxPayloadSize(...)
+   *
+   * This function sets a limit for the maximum payload size of iSAC. The same
+   * value is used both for 30 and 60 ms packets. If the encoder sampling rate
+   * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the
+   * encoder sampling rate is 32 kHz the maximum payload size is between 120
+   * and 600 bytes.
+   *
+   * If an out of range limit is used, the function returns -1, but the closest
+   * valid value will be applied.
+   *
+   * ---------------
+   * IMPORTANT NOTES
+   * ---------------
+   * The size of a packet is limited to the minimum of 'max-payload-size' and
+   * 'max-rate.' For instance, let's assume the max-payload-size is set to
+   * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps
+   * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms
+   * frame-size. Then a packet with a frame-size of 30 ms is limited to 150,
+   * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to
+   * 170 bytes, i.e. min(170, 300).
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *        - maxPayloadBytes   : maximum size of the payload in bytes
+   *                              valid values are between 120 and 400 bytes
+   *                              if encoder sampling rate is 16 kHz. For
+   *                              32 kHz encoder sampling rate valid values
+   *                              are between 120 and 600 bytes.
+   *
+   * Return value               : 0 if successful
+   *                             -1 if error happens
+   */
+
+  WebRtc_Word16 WebRtcIsac_SetMaxPayloadSize(
+      ISACStruct* ISAC_main_inst,
+      WebRtc_Word16 maxPayloadBytes);
+
+
+  /******************************************************************************
+   * WebRtcIsac_SetMaxRate(...)
+   *
+   * This function sets the maximum rate which the codec may not exceed for
+   * any signal packet. The maximum rate is defined and payload-size per
+   * frame-size in bits per second.
+   *
+   * The codec has a maximum rate of 53400 bits per second (200 bytes per 30
+   * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms)
+   * if the encoder sampling rate is 32 kHz.
+   *
+   * It is possible to set a maximum rate between 32000 and 53400 bits/sec
+   * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode.
+   *
+   * If an out of range limit is used, the function returns -1, but the closest
+   * valid value will be applied.
+   *
+   * ---------------
+   * IMPORTANT NOTES
+   * ---------------
+   * The size of a packet is limited to the minimum of 'max-payload-size' and
+   * 'max-rate.' For instance, let's assume the max-payload-size is set to
+   * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps
+   * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms
+   * frame-size. Then a packet with a frame-size of 30 ms is limited to 150,
+   * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to
+   * 170 bytes, min(170, 300).
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *        - maxRate           : maximum rate in bits per second,
+   *                              valid values are 32000 to 53400 bits/sec in
+   *                              wideband mode, and 32000 to 160000 bits/sec in
+   *                              super-wideband mode.
+   *
+   * Return value               : 0 if successful
+   *                             -1 if error happens
+   */
+
+  WebRtc_Word16 WebRtcIsac_SetMaxRate(
+      ISACStruct* ISAC_main_inst,
+      WebRtc_Word32 maxRate);
+
+
+  /******************************************************************************
+   * WebRtcIsac_DecSampRate()
+   * Return the sampling rate of the decoded audio.
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *
+   * Return value               : enumerator representing sampling frequency
+   *                              associated with the decoder, i.e. the
+   *                              sampling rate of the decoded audio.
+   *
+   */
+
+  enum IsacSamplingRate WebRtcIsac_DecSampRate(
+      ISACStruct*                ISAC_main_inst);
+
+
+  /******************************************************************************
+   * WebRtcIsac_EncSampRate()
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *
+   * Return value               : enumerator representing sampling frequency
+   *                              associated with the encoder, the input audio
+   *                              is expected to be sampled at this rate.
+   *
+   */
+
+  enum IsacSamplingRate WebRtcIsac_EncSampRate(
+      ISACStruct*                ISAC_main_inst);
+
+
+  /******************************************************************************
+   * WebRtcIsac_SetDecSampRate()
+   * Set the sampling rate of the decoder.  Initialization of the decoder WILL
+   * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz
+   * which is set when the instance is created.
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *        - sampRate          : enumerator specifying the sampling rate.
+   *
+   * Return value               : 0 if successful
+   *                             -1 if failed.
+   */
+
+  WebRtc_Word16 WebRtcIsac_SetDecSampRate(
+      ISACStruct*           ISAC_main_inst,
+      enum IsacSamplingRate sampRate);
+
+
+  /******************************************************************************
+   * WebRtcIsac_SetEncSampRate()
+   * Set the sampling rate of the encoder. Initialization of the encoder WILL
+   * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz
+   * which is set when the instance is created. The encoding-mode and the
+   * bottleneck remain unchanged by this call, however, the maximum rate and
+   * maximum payload-size will reset to their default value.
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC instance
+   *        - sampRate          : enumerator specifying the sampling rate.
+   *
+   * Return value               : 0 if successful
+   *                             -1 if failed.
+   */
+
+  WebRtc_Word16 WebRtcIsac_SetEncSampRate(
+      ISACStruct*           ISAC_main_inst,
+      enum IsacSamplingRate sampRate);
+
+
+
+  /******************************************************************************
+   * WebRtcIsac_GetNewBitStream(...)
+   *
+   * This function returns encoded data, with the recieved bwe-index in the
+   * stream. If the rate is set to a value less than bottleneck of codec
+   * the new bistream will be re-encoded with the given target rate.
+   * It should always return a complete packet, i.e. only called once
+   * even for 60 msec frames.
+   *
+   * NOTE 1! This function does not write in the ISACStruct, it is not allowed.
+   * NOTE 2! Currently not implemented for SWB mode.
+   * NOTE 3! Rates larger than the bottleneck of the codec will be limited
+   *         to the current bottleneck.
+   *
+   * Input:
+   *        - ISAC_main_inst    : ISAC instance.
+   *        - bweIndex          : Index of bandwidth estimate to put in new
+   *                              bitstream
+   *        - rate              : target rate of the transcoder is bits/sec.
+   *                              Valid values are the accepted rate in iSAC,
+   *                              i.e. 10000 to 56000.
+   *        - isRCU                       : if the new bit-stream is an RCU stream.
+   *                              Note that the rate parameter always indicates
+   *                              the target rate of the main paylaod, regardless
+   *                              of 'isRCU' value.
+   *
+   * Output:
+   *        - encoded           : The encoded data vector
+   *
+   * Return value               : >0 - Length (in bytes) of coded data
+   *                              -1 - Error  or called in SWB mode
+   *                                 NOTE! No error code is written to
+   *                                 the struct since it is only allowed to read
+   *                                 the struct.
+   */
+  WebRtc_Word16 WebRtcIsac_GetNewBitStream(
+      ISACStruct*    ISAC_main_inst,
+      WebRtc_Word16  bweIndex,
+      WebRtc_Word16  jitterInfo,
+      WebRtc_Word32  rate,
+      WebRtc_Word16* encoded,
+      WebRtc_Word16  isRCU);
+
+
+
+  /****************************************************************************
+   * WebRtcIsac_GetDownLinkBwIndex(...)
+   *
+   * This function returns index representing the Bandwidth estimate from
+   * other side to this side.
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC struct
+   *
+   * Output:
+   *        - bweIndex          : Bandwidth estimate to transmit to other side.
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsac_GetDownLinkBwIndex(
+      ISACStruct*  ISAC_main_inst,
+      WebRtc_Word16* bweIndex,
+      WebRtc_Word16* jitterInfo);
+
+
+  /****************************************************************************
+   * WebRtcIsac_UpdateUplinkBw(...)
+   *
+   * This function takes an index representing the Bandwidth estimate from
+   * this side to other side and updates BWE.
+   *
+   * Input:
+   *        - ISAC_main_inst    : iSAC struct
+   *        - bweIndex          : Bandwidth estimate from other side.
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsac_UpdateUplinkBw(
+      ISACStruct* ISAC_main_inst,
+      WebRtc_Word16 bweIndex);
+
+
+  /****************************************************************************
+   * WebRtcIsac_ReadBwIndex(...)
+   *
+   * This function returns the index of the Bandwidth estimate from the bitstream.
+   *
+   * Input:
+   *        - encoded           : Encoded bitstream
+   *
+   * Output:
+   *        - frameLength       : Length of frame in packet (in samples)
+   *        - bweIndex         : Bandwidth estimate in bitstream
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsac_ReadBwIndex(
+      const WebRtc_Word16* encoded,
+      WebRtc_Word16*       bweIndex);
+
+
+
+  /*******************************************************************************
+   * WebRtcIsac_GetNewFrameLen(...)
+   *
+   * returns the frame lenght (in samples) of the next packet. In the case of channel-adaptive
+   * mode, iSAC decides on its frame lenght based on the estimated bottleneck
+   * this allows a user to prepare for the next packet (at the encoder)
+   *
+   * The primary usage is in CE to make the iSAC works in channel-adaptive mode
+   *
+   * Input:
+   *        - ISAC_main_inst     : iSAC struct
+   *
+   * Return Value                : frame lenght in samples
+   *
+   */
+
+  WebRtc_Word16 WebRtcIsac_GetNewFrameLen(
+      ISACStruct* ISAC_main_inst);
+
+
+  /****************************************************************************
+   *  WebRtcIsac_GetRedPayload(...)
+   *
+   *  Populates "encoded" with the redundant payload of the recently encoded
+   *  frame. This function has to be called once that WebRtcIsac_Encode(...)
+   *  returns a positive value. Regardless of the frame-size this function will
+   *  be called only once after encoding is completed.
+   *
+   * Input:
+   *      - ISAC_main_inst    : iSAC struct
+   *
+   * Output:
+   *        - encoded            : the encoded data vector
+   *
+   *
+   * Return value:
+   *                              : >0 - Length (in bytes) of coded data
+   *                              : -1 - Error
+   *
+   *
+   */
+  WebRtc_Word16 WebRtcIsac_GetRedPayload(
+      ISACStruct*    ISAC_main_inst,
+      WebRtc_Word16* encoded);
+
+
+  /****************************************************************************
+   * WebRtcIsac_DecodeRcu(...)
+   *
+   * This function decodes a redundant (RCU) iSAC frame. Function is called in
+   * NetEq with a stored RCU payload i case of packet loss. Output speech length
+   * will be a multiple of 480 samples: 480 or 960 samples,
+   * depending on the framesize (30 or 60 ms).
+   *
+   * Input:
+   *      - ISAC_main_inst     : ISAC instance.
+   *      - encoded            : encoded ISAC RCU frame(s)
+   *      - len                : bytes in encoded vector
+   *
+   * Output:
+   *      - decoded            : The decoded vector
+   *
+   * Return value              : >0 - number of samples in decoded vector
+   *                             -1 - Error
+   */
+  WebRtc_Word16 WebRtcIsac_DecodeRcu(
+      ISACStruct*           ISAC_main_inst,
+      const WebRtc_UWord16* encoded,
+      WebRtc_Word16         len,
+      WebRtc_Word16*        decoded,
+      WebRtc_Word16*        speechType);
+
+
+#if defined(__cplusplus)
+}
+#endif
+
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INTERFACE_ISAC_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk
new file mode 100644
index 0000000..07b2a31
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk
@@ -0,0 +1,66 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_isac
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    arith_routines.c \
+    arith_routines_hist.c \
+    arith_routines_logist.c \
+    bandwidth_estimator.c \
+    crc.c \
+    decode.c \
+    decode_bwe.c \
+    encode.c \
+    encode_lpc_swb.c \
+    entropy_coding.c \
+    fft.c \
+    filter_functions.c \
+    filterbank_tables.c \
+    intialize.c \
+    isac.c \
+    filterbanks.c \
+    pitch_lag_tables.c \
+    lattice.c \
+    lpc_gain_swb_tables.c \
+    lpc_analysis.c \
+    lpc_shape_swb12_tables.c \
+    lpc_shape_swb16_tables.c \
+    lpc_tables.c \
+    pitch_estimator.c \
+    pitch_filter.c \
+    pitch_gain_tables.c \
+    spectrum_ar_model_tables.c \
+    transform.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../common_audio/signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.c
new file mode 100644
index 0000000..31c441a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "arith_routines.h"
+#include "settings.h"
+
+
+/*
+ * terminate and return byte stream;
+ * returns the number of bytes in the stream
+ */
+int WebRtcIsac_EncTerminate(Bitstr *streamdata) /* in-/output struct containing bitstream */
+{
+  WebRtc_UWord8 *stream_ptr;
+
+
+  /* point to the right place in the stream buffer */
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+
+  /* find minimum length (determined by current interval width) */
+  if ( streamdata->W_upper > 0x01FFFFFF )
+  {
+    streamdata->streamval += 0x01000000;
+    /* add carry to buffer */
+    if (streamdata->streamval < 0x01000000)
+    {
+      /* propagate carry */
+      while ( !(++(*--stream_ptr)) );
+      /* put pointer back to the old value */
+      stream_ptr = streamdata->stream + streamdata->stream_index;
+    }
+    /* write remaining data to bitstream */
+    *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+  }
+  else
+  {
+    streamdata->streamval += 0x00010000;
+    /* add carry to buffer */
+    if (streamdata->streamval < 0x00010000)
+    {
+      /* propagate carry */
+      while ( !(++(*--stream_ptr)) );
+      /* put pointer back to the old value */
+      stream_ptr = streamdata->stream + streamdata->stream_index;
+    }
+    /* write remaining data to bitstream */
+    *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+    *stream_ptr++ = (WebRtc_UWord8) ((streamdata->streamval >> 16) & 0x00FF);
+  }
+
+  /* calculate stream length */
+  return (int)(stream_ptr - streamdata->stream);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.h
new file mode 100644
index 0000000..8e5f496
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routines.h
+ *
+ * Functions for arithmetic coding.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_
+
+#include "structs.h"
+
+
+int WebRtcIsac_EncLogisticMulti2(
+    Bitstr *streamdata,              /* in-/output struct containing bitstream */
+    WebRtc_Word16 *dataQ7,           /* input: data vector */
+    const WebRtc_UWord16 *env,       /* input: side info vector defining the width of the pdf */
+    const int N,                     /* input: data vector length */
+    const WebRtc_Word16 isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
+
+/* returns the number of bytes in the stream */
+int WebRtcIsac_EncTerminate(Bitstr *streamdata); /* in-/output struct containing bitstream */
+
+/* returns the number of bytes in the stream so far */
+int WebRtcIsac_DecLogisticMulti2(
+    WebRtc_Word16 *data,             /* output: data vector */
+    Bitstr *streamdata,              /* in-/output struct containing bitstream */
+    const WebRtc_UWord16 *env,       /* input: side info vector defining the width of the pdf */
+    const WebRtc_Word16 *dither,     /* input: dither vector */
+    const int N,                     /* input: data vector length */
+    const WebRtc_Word16 isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
+
+void WebRtcIsac_EncHistMulti(
+    Bitstr *streamdata,         /* in-/output struct containing bitstream */
+    const int *data,            /* input: data vector */
+    const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
+    const int N);               /* input: data vector length */
+
+int WebRtcIsac_DecHistBisectMulti(
+    int *data,                      /* output: data vector */
+    Bitstr *streamdata,             /* in-/output struct containing bitstream */
+    const WebRtc_UWord16 **cdf,     /* input: array of cdf arrays */
+    const WebRtc_UWord16 *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
+    const int N);                   /* input: data vector length */
+
+int WebRtcIsac_DecHistOneStepMulti(
+    int *data,                       /* output: data vector */
+    Bitstr *streamdata,              /* in-/output struct containing bitstream */
+    const WebRtc_UWord16 **cdf,      /* input: array of cdf arrays */
+    const WebRtc_UWord16 *init_index,/* input: vector of initial cdf table search entries */
+    const int N);                    /* input: data vector length */
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_hist.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_hist.c
new file mode 100644
index 0000000..f4a13d6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_hist.c
@@ -0,0 +1,291 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "settings.h"
+#include "arith_routines.h"
+
+
+/*
+ * code symbols into arithmetic bytestream
+ */
+void WebRtcIsac_EncHistMulti(Bitstr *streamdata, /* in-/output struct containing bitstream */
+                             const int *data,  /* input: data vector */
+                             const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
+                             const int N)   /* input: data vector length */
+{
+  WebRtc_UWord32 W_lower, W_upper;
+  WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
+  WebRtc_UWord8 *stream_ptr;
+  WebRtc_UWord8 *stream_ptr_carry;
+  WebRtc_UWord32 cdf_lo, cdf_hi;
+  int k;
+
+
+  /* point to beginning of stream buffer */
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+  W_upper = streamdata->W_upper;
+
+  for (k=N; k>0; k--)
+  {
+    /* fetch cdf_lower and cdf_upper from cdf tables */
+    cdf_lo = (WebRtc_UWord32) *(*cdf + *data);
+    cdf_hi = (WebRtc_UWord32) *(*cdf++ + *data++ + 1);
+
+    /* update interval */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = W_upper >> 16;
+    W_lower = W_upper_MSB * cdf_lo;
+    W_lower += (W_upper_LSB * cdf_lo) >> 16;
+    W_upper = W_upper_MSB * cdf_hi;
+    W_upper += (W_upper_LSB * cdf_hi) >> 16;
+
+    /* shift interval such that it begins at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamdata->streamval += W_lower;
+
+    /* handle carry */
+    if (streamdata->streamval < W_lower)
+    {
+      /* propagate carry */
+      stream_ptr_carry = stream_ptr;
+      while (!(++(*--stream_ptr_carry)));
+    }
+
+    /* renormalize interval, store most significant byte of streamval and update streamval */
+    while ( !(W_upper & 0xFF000000) )      /* W_upper < 2^24 */
+    {
+      W_upper <<= 8;
+      *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+      streamdata->streamval <<= 8;
+    }
+  }
+
+  /* calculate new stream_index */
+  streamdata->stream_index = (int)(stream_ptr - streamdata->stream);
+  streamdata->W_upper = W_upper;
+
+  return;
+}
+
+
+
+/*
+ * function to decode more symbols from the arithmetic bytestream, using method of bisection
+ * cdf tables should be of size 2^k-1 (which corresponds to an alphabet size of 2^k-2)
+ */
+int WebRtcIsac_DecHistBisectMulti(int *data,     /* output: data vector */
+                                  Bitstr *streamdata,   /* in-/output struct containing bitstream */
+                                  const WebRtc_UWord16 **cdf,  /* input: array of cdf arrays */
+                                  const WebRtc_UWord16 *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
+                                  const int N)    /* input: data vector length */
+{
+  WebRtc_UWord32    W_lower, W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord32    W_upper_LSB, W_upper_MSB;
+  WebRtc_UWord32    streamval;
+  const   WebRtc_UWord8 *stream_ptr;
+  const   WebRtc_UWord16 *cdf_ptr;
+  int     size_tmp;
+  int     k;
+
+  W_lower = 0; //to remove warning -DH
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+  W_upper = streamdata->W_upper;
+  if (W_upper == 0)
+    /* Should not be possible in normal operation */
+    return -2;
+
+  if (streamdata->stream_index == 0)   /* first time decoder is called for this stream */
+  {
+    /* read first word from bytestream */
+    streamval = *stream_ptr << 24;
+    streamval |= *++stream_ptr << 16;
+    streamval |= *++stream_ptr << 8;
+    streamval |= *++stream_ptr;
+  } else {
+    streamval = streamdata->streamval;
+  }
+
+  for (k=N; k>0; k--)
+  {
+    /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = W_upper >> 16;
+
+    /* start halfway the cdf range */
+    size_tmp = *cdf_size++ >> 1;
+    cdf_ptr = *cdf + (size_tmp - 1);
+
+    /* method of bisection */
+    for ( ;; )
+    {
+      W_tmp = W_upper_MSB * *cdf_ptr;
+      W_tmp += (W_upper_LSB * *cdf_ptr) >> 16;
+      size_tmp >>= 1;
+      if (size_tmp == 0) break;
+      if (streamval > W_tmp)
+      {
+        W_lower = W_tmp;
+        cdf_ptr += size_tmp;
+      } else {
+        W_upper = W_tmp;
+        cdf_ptr -= size_tmp;
+      }
+    }
+    if (streamval > W_tmp)
+    {
+      W_lower = W_tmp;
+      *data++ = (int)(cdf_ptr - *cdf++);
+    } else {
+      W_upper = W_tmp;
+      *data++ = (int)(cdf_ptr - *cdf++ - 1);
+    }
+
+    /* shift interval to start at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamval -= W_lower;
+
+    /* renormalize interval and update streamval */
+    while ( !(W_upper & 0xFF000000) )    /* W_upper < 2^24 */
+    {
+      /* read next byte from stream */
+      streamval = (streamval << 8) | *++stream_ptr;
+      W_upper <<= 8;
+    }
+
+    if (W_upper == 0)
+      /* Should not be possible in normal operation */
+      return -2;
+
+
+  }
+
+  streamdata->stream_index = (int)(stream_ptr - streamdata->stream);
+  streamdata->W_upper = W_upper;
+  streamdata->streamval = streamval;
+
+
+  /* find number of bytes in original stream (determined by current interval width) */
+  if ( W_upper > 0x01FFFFFF )
+    return streamdata->stream_index - 2;
+  else
+    return streamdata->stream_index - 1;
+}
+
+
+
+/*
+ * function to decode more symbols from the arithmetic bytestream, taking single step up or
+ * down at a time
+ * cdf tables can be of arbitrary size, but large tables may take a lot of iterations
+ */
+int WebRtcIsac_DecHistOneStepMulti(int *data,        /* output: data vector */
+                                   Bitstr *streamdata,      /* in-/output struct containing bitstream */
+                                   const WebRtc_UWord16 **cdf,   /* input: array of cdf arrays */
+                                   const WebRtc_UWord16 *init_index, /* input: vector of initial cdf table search entries */
+                                   const int N)     /* input: data vector length */
+{
+  WebRtc_UWord32    W_lower, W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord32    W_upper_LSB, W_upper_MSB;
+  WebRtc_UWord32    streamval;
+  const   WebRtc_UWord8 *stream_ptr;
+  const   WebRtc_UWord16 *cdf_ptr;
+  int     k;
+
+
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+  W_upper = streamdata->W_upper;
+  if (W_upper == 0)
+    /* Should not be possible in normal operation */
+    return -2;
+
+  if (streamdata->stream_index == 0)   /* first time decoder is called for this stream */
+  {
+    /* read first word from bytestream */
+    streamval = *stream_ptr << 24;
+    streamval |= *++stream_ptr << 16;
+    streamval |= *++stream_ptr << 8;
+    streamval |= *++stream_ptr;
+  } else {
+    streamval = streamdata->streamval;
+  }
+
+
+  for (k=N; k>0; k--)
+  {
+    /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = W_upper >> 16;
+
+    /* start at the specified table entry */
+    cdf_ptr = *cdf + (*init_index++);
+    W_tmp = W_upper_MSB * *cdf_ptr;
+    W_tmp += (W_upper_LSB * *cdf_ptr) >> 16;
+    if (streamval > W_tmp)
+    {
+      for ( ;; )
+      {
+        W_lower = W_tmp;
+        if (cdf_ptr[0]==65535)
+          /* range check */
+          return -3;
+        W_tmp = W_upper_MSB * *++cdf_ptr;
+        W_tmp += (W_upper_LSB * *cdf_ptr) >> 16;
+        if (streamval <= W_tmp) break;
+      }
+      W_upper = W_tmp;
+      *data++ = (int)(cdf_ptr - *cdf++ - 1);
+    } else {
+      for ( ;; )
+      {
+        W_upper = W_tmp;
+        --cdf_ptr;
+        if (cdf_ptr<*cdf) {
+          /* range check */
+          return -3;
+        }
+        W_tmp = W_upper_MSB * *cdf_ptr;
+        W_tmp += (W_upper_LSB * *cdf_ptr) >> 16;
+        if (streamval > W_tmp) break;
+      }
+      W_lower = W_tmp;
+      *data++ = (int)(cdf_ptr - *cdf++);
+    }
+
+    /* shift interval to start at zero */
+    W_upper -= ++W_lower;
+    /* add integer to bitstream */
+    streamval -= W_lower;
+
+    /* renormalize interval and update streamval */
+    while ( !(W_upper & 0xFF000000) )    /* W_upper < 2^24 */
+    {
+      /* read next byte from stream */
+      streamval = (streamval << 8) | *++stream_ptr;
+      W_upper <<= 8;
+    }
+  }
+
+  streamdata->stream_index = (int)(stream_ptr - streamdata->stream);
+  streamdata->W_upper = W_upper;
+  streamdata->streamval = streamval;
+
+
+  /* find number of bytes in original stream (determined by current interval width) */
+  if ( W_upper > 0x01FFFFFF )
+    return streamdata->stream_index - 2;
+  else
+    return streamdata->stream_index - 1;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_logist.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_logist.c
new file mode 100644
index 0000000..422855a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/arith_routines_logist.c
@@ -0,0 +1,294 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * arith_routines.h
+ *
+ * This file contains functions for arithmatically encoding and
+ * decoding DFT coefficients.
+ *
+ */
+
+
+#include "arith_routines.h"
+
+
+
+static const WebRtc_Word32 kHistEdgesQ15[51] = {
+  -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716,
+  -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644,
+  -65536, -52429, -39322, -26215, -13108,  0,  13107,  26214,  39321,  52428,
+  65536,  78643,  91750,  104857,  117964,  131072,  144179,  157286,  170393,  183500,
+  196608,  209715,  222822,  235929,  249036,  262144,  275251,  288358,  301465,  314572,
+  327680};
+
+
+static const int kCdfSlopeQ0[51] = {  /* Q0 */
+  5,  5,  5,  5,  5,  5,  5,  5,  5,  5,
+  5,  5,  13,  23,  47,  87,  154,  315,  700,  1088,
+  2471,  6064,  14221,  21463,  36634,  36924,  19750,  13270,  5806,  2312,
+  1095,  660,  316,  145,  86,  41,  32,  5,  5,  5,
+  5,  5,  5,  5,  5,  5,  5,  5,  5,  2, 0};
+
+
+static const int kCdfQ16[51] = {  /* Q16 */
+  0,  2,  4,  6,  8,  10,  12,  14,  16,  18,
+  20,  22,  24,  29,  38,  57,  92,  153,  279,  559,
+  994,  1983,  4408,  10097,  18682,  33336,  48105,  56005,  61313,  63636,
+  64560,  64998,  65262,  65389,  65447,  65481,  65497,  65510,  65512,  65514,
+  65516,  65518,  65520,  65522,  65524,  65526,  65528,  65530,  65532,  65534,
+  65535};
+
+
+
+/* function to be converted to fixed point */
+static __inline WebRtc_UWord32 piecewise(WebRtc_Word32 xinQ15) {
+
+  WebRtc_Word32 ind, qtmp1, qtmp2, qtmp3;
+  WebRtc_UWord32 tmpUW32;
+
+
+  qtmp2 = xinQ15;
+
+  if (qtmp2 < kHistEdgesQ15[0]) {
+    qtmp2 = kHistEdgesQ15[0];
+  }
+  if (qtmp2 > kHistEdgesQ15[50]) {
+    qtmp2 = kHistEdgesQ15[50];
+  }
+
+  qtmp1 = qtmp2 - kHistEdgesQ15[0];       /* Q15 - Q15 = Q15        */
+  ind = (qtmp1 * 5) >> 16;              /* 2^16 / 5 = 0.4 in Q15  */
+  /* Q15 -> Q0              */
+  qtmp1 = qtmp2 - kHistEdgesQ15[ind];     /* Q15 - Q15 = Q15        */
+  qtmp2 = kCdfSlopeQ0[ind] * qtmp1;      /* Q0 * Q15 = Q15         */
+  qtmp3 = qtmp2>>15;                    /* Q15 -> Q0              */
+
+  tmpUW32 = kCdfQ16[ind] + qtmp3;    /* Q0 + Q0 = Q0           */
+  return tmpUW32;
+}
+
+
+
+int WebRtcIsac_EncLogisticMulti2(
+    Bitstr *streamdata,      /* in-/output struct containing bitstream */
+    WebRtc_Word16 *dataQ7,    /* input: data vector */
+    const WebRtc_UWord16 *envQ8, /* input: side info vector defining the width of the pdf */
+    const int N,       /* input: data vector length / 2 */
+    const WebRtc_Word16 isSWB12kHz)
+{
+  WebRtc_UWord32 W_lower, W_upper;
+  WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
+  WebRtc_UWord8 *stream_ptr;
+  WebRtc_UWord8 *maxStreamPtr;
+  WebRtc_UWord8 *stream_ptr_carry;
+  WebRtc_UWord32 cdf_lo, cdf_hi;
+  int k;
+
+  /* point to beginning of stream buffer */
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+  W_upper = streamdata->W_upper;
+
+  maxStreamPtr = streamdata->stream + STREAM_SIZE_MAX_60 - 1;
+  for (k = 0; k < N; k++)
+  {
+    /* compute cdf_lower and cdf_upper by evaluating the piecewise linear cdf */
+    cdf_lo = piecewise((*dataQ7 - 64) * *envQ8);
+    cdf_hi = piecewise((*dataQ7 + 64) * *envQ8);
+
+    /* test and clip if probability gets too small */
+    while (cdf_lo+1 >= cdf_hi) {
+      /* clip */
+      if (*dataQ7 > 0) {
+        *dataQ7 -= 128;
+        cdf_hi = cdf_lo;
+        cdf_lo = piecewise((*dataQ7 - 64) * *envQ8);
+      } else {
+        *dataQ7 += 128;
+        cdf_lo = cdf_hi;
+        cdf_hi = piecewise((*dataQ7 + 64) * *envQ8);
+      }
+    }
+
+    dataQ7++;
+    // increment only once per 4 iterations for SWB-16kHz or WB
+    // increment only once per 2 iterations for SWB-12kHz
+    envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1));
+
+
+    /* update interval */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = W_upper >> 16;
+    W_lower = W_upper_MSB * cdf_lo;
+    W_lower += (W_upper_LSB * cdf_lo) >> 16;
+    W_upper = W_upper_MSB * cdf_hi;
+    W_upper += (W_upper_LSB * cdf_hi) >> 16;
+
+    /* shift interval such that it begins at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamdata->streamval += W_lower;
+
+    /* handle carry */
+    if (streamdata->streamval < W_lower)
+    {
+      /* propagate carry */
+      stream_ptr_carry = stream_ptr;
+      while (!(++(*--stream_ptr_carry)));
+    }
+
+    /* renormalize interval, store most significant byte of streamval and update streamval */
+    while ( !(W_upper & 0xFF000000) )      /* W_upper < 2^24 */
+    {
+      W_upper <<= 8;
+      *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+
+      if(stream_ptr > maxStreamPtr)
+      {
+        return -ISAC_DISALLOWED_BITSTREAM_LENGTH;
+      }
+      streamdata->streamval <<= 8;
+    }
+  }
+
+  /* calculate new stream_index */
+  streamdata->stream_index = (int)(stream_ptr - streamdata->stream);
+  streamdata->W_upper = W_upper;
+
+  return 0;
+}
+
+
+
+int WebRtcIsac_DecLogisticMulti2(
+    WebRtc_Word16 *dataQ7,       /* output: data vector */
+    Bitstr *streamdata,      /* in-/output struct containing bitstream */
+    const WebRtc_UWord16 *envQ8, /* input: side info vector defining the width of the pdf */
+    const WebRtc_Word16 *ditherQ7,/* input: dither vector */
+    const int N,         /* input: data vector length */
+    const WebRtc_Word16 isSWB12kHz)
+{
+  WebRtc_UWord32    W_lower, W_upper;
+  WebRtc_UWord32    W_tmp;
+  WebRtc_UWord32    W_upper_LSB, W_upper_MSB;
+  WebRtc_UWord32    streamval;
+  const WebRtc_UWord8 *stream_ptr;
+  WebRtc_UWord32    cdf_tmp;
+  WebRtc_Word16     candQ7;
+  int             k;
+
+  stream_ptr = streamdata->stream + streamdata->stream_index;
+  W_upper = streamdata->W_upper;
+  if (streamdata->stream_index == 0)   /* first time decoder is called for this stream */
+  {
+    /* read first word from bytestream */
+    streamval = *stream_ptr << 24;
+    streamval |= *++stream_ptr << 16;
+    streamval |= *++stream_ptr << 8;
+    streamval |= *++stream_ptr;
+  } else {
+    streamval = streamdata->streamval;
+  }
+
+
+  for (k = 0; k < N; k++)
+  {
+    /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */
+    W_upper_LSB = W_upper & 0x0000FFFF;
+    W_upper_MSB = W_upper >> 16;
+
+    /* find first candidate by inverting the logistic cdf */
+    candQ7 = - *ditherQ7 + 64;
+    cdf_tmp = piecewise(candQ7 * *envQ8);
+
+    W_tmp = W_upper_MSB * cdf_tmp;
+    W_tmp += (W_upper_LSB * cdf_tmp) >> 16;
+    if (streamval > W_tmp)
+    {
+      W_lower = W_tmp;
+      candQ7 += 128;
+      cdf_tmp = piecewise(candQ7 * *envQ8);
+
+      W_tmp = W_upper_MSB * cdf_tmp;
+      W_tmp += (W_upper_LSB * cdf_tmp) >> 16;
+      while (streamval > W_tmp)
+      {
+        W_lower = W_tmp;
+        candQ7 += 128;
+        cdf_tmp = piecewise(candQ7 * *envQ8);
+
+        W_tmp = W_upper_MSB * cdf_tmp;
+        W_tmp += (W_upper_LSB * cdf_tmp) >> 16;
+
+        /* error check */
+        if (W_lower == W_tmp) return -1;
+      }
+      W_upper = W_tmp;
+
+      /* another sample decoded */
+      *dataQ7 = candQ7 - 64;
+    }
+    else
+    {
+      W_upper = W_tmp;
+      candQ7 -= 128;
+      cdf_tmp = piecewise(candQ7 * *envQ8);
+
+      W_tmp = W_upper_MSB * cdf_tmp;
+      W_tmp += (W_upper_LSB * cdf_tmp) >> 16;
+      while ( !(streamval > W_tmp) )
+      {
+        W_upper = W_tmp;
+        candQ7 -= 128;
+        cdf_tmp = piecewise(candQ7 * *envQ8);
+
+        W_tmp = W_upper_MSB * cdf_tmp;
+        W_tmp += (W_upper_LSB * cdf_tmp) >> 16;
+
+        /* error check */
+        if (W_upper == W_tmp) return -1;
+      }
+      W_lower = W_tmp;
+
+      /* another sample decoded */
+      *dataQ7 = candQ7 + 64;
+    }
+    ditherQ7++;
+    dataQ7++;
+    // increment only once per 4 iterations for SWB-16kHz or WB
+    // increment only once per 2 iterations for SWB-12kHz
+    envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1));
+
+    /* shift interval to start at zero */
+    W_upper -= ++W_lower;
+
+    /* add integer to bitstream */
+    streamval -= W_lower;
+
+    /* renormalize interval and update streamval */
+    while ( !(W_upper & 0xFF000000) )    /* W_upper < 2^24 */
+    {
+      /* read next byte from stream */
+      streamval = (streamval << 8) | *++stream_ptr;
+      W_upper <<= 8;
+    }
+  }
+
+  streamdata->stream_index = (int)(stream_ptr - streamdata->stream);
+  streamdata->W_upper = W_upper;
+  streamdata->streamval = streamval;
+
+  /* find number of bytes in original stream (determined by current interval width) */
+  if ( W_upper > 0x01FFFFFF )
+    return streamdata->stream_index - 2;
+  else
+    return streamdata->stream_index - 1;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.c
new file mode 100644
index 0000000..d0a50c5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.c
@@ -0,0 +1,1020 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * BwEstimator.c
+ *
+ * This file contains the code for the Bandwidth Estimator designed
+ * for iSAC.
+ *
+ */
+
+#include "bandwidth_estimator.h"
+#include "settings.h"
+#include "isac.h"
+
+#include <math.h>
+
+/* array of quantization levels for bottle neck info; Matlab code: */
+/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */
+static const float kQRateTableWb[12] =
+{
+  10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f,
+  18859.8f, 20963.3f, 23301.4f, 25900.3f, 28789.0f, 32000.0f};
+
+
+static const float kQRateTableSwb[24] =
+{
+  10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f,
+  18859.8f, 20963.3f, 23153.1f, 25342.9f, 27532.7f, 29722.5f,
+  31912.3f, 34102.1f, 36291.9f, 38481.7f, 40671.4f, 42861.2f,
+  45051.0f, 47240.8f, 49430.6f, 51620.4f, 53810.2f, 56000.0f,
+};
+
+
+
+
+WebRtc_Word32 WebRtcIsac_InitBandwidthEstimator(
+    BwEstimatorstr*              bwest_str,
+    enum IsacSamplingRate encoderSampRate,
+    enum IsacSamplingRate decoderSampRate)
+{
+  switch(encoderSampRate)
+  {
+    case kIsacWideband:
+      {
+        bwest_str->send_bw_avg       = INIT_BN_EST_WB;
+        break;
+      }
+    case kIsacSuperWideband:
+      {
+        bwest_str->send_bw_avg       = INIT_BN_EST_SWB;
+        break;
+      }
+  }
+
+  switch(decoderSampRate)
+  {
+    case kIsacWideband:
+      {
+        bwest_str->prev_frame_length = INIT_FRAME_LEN_WB;
+        bwest_str->rec_bw_inv        = 1.0f /
+            (INIT_BN_EST_WB + INIT_HDR_RATE_WB);
+        bwest_str->rec_bw            = (WebRtc_Word32)INIT_BN_EST_WB;
+        bwest_str->rec_bw_avg_Q      = INIT_BN_EST_WB;
+        bwest_str->rec_bw_avg        = INIT_BN_EST_WB + INIT_HDR_RATE_WB;
+        bwest_str->rec_header_rate   = INIT_HDR_RATE_WB;
+        break;
+      }
+    case kIsacSuperWideband:
+      {
+        bwest_str->prev_frame_length = INIT_FRAME_LEN_SWB;
+        bwest_str->rec_bw_inv        = 1.0f /
+            (INIT_BN_EST_SWB + INIT_HDR_RATE_SWB);
+        bwest_str->rec_bw            = (WebRtc_Word32)INIT_BN_EST_SWB;
+        bwest_str->rec_bw_avg_Q      = INIT_BN_EST_SWB;
+        bwest_str->rec_bw_avg        = INIT_BN_EST_SWB + INIT_HDR_RATE_SWB;
+        bwest_str->rec_header_rate   = INIT_HDR_RATE_SWB;
+        break;
+      }
+  }
+
+  bwest_str->prev_rec_rtp_number       = 0;
+  bwest_str->prev_rec_arr_ts           = 0;
+  bwest_str->prev_rec_send_ts          = 0;
+  bwest_str->prev_rec_rtp_rate         = 1.0f;
+  bwest_str->last_update_ts            = 0;
+  bwest_str->last_reduction_ts         = 0;
+  bwest_str->count_tot_updates_rec     = -9;
+  bwest_str->rec_jitter                = 10.0f;
+  bwest_str->rec_jitter_short_term     = 0.0f;
+  bwest_str->rec_jitter_short_term_abs = 5.0f;
+  bwest_str->rec_max_delay             = 10.0f;
+  bwest_str->rec_max_delay_avg_Q       = 10.0f;
+  bwest_str->num_pkts_rec              = 0;
+
+  bwest_str->send_max_delay_avg        = 10.0f;
+
+  bwest_str->hsn_detect_rec = 0;
+
+  bwest_str->num_consec_rec_pkts_over_30k = 0;
+
+  bwest_str->hsn_detect_snd = 0;
+
+  bwest_str->num_consec_snt_pkts_over_30k = 0;
+
+  bwest_str->in_wait_period = 0;
+
+  bwest_str->change_to_WB = 0;
+
+  bwest_str->numConsecLatePkts = 0;
+  bwest_str->consecLatency = 0;
+  bwest_str->inWaitLatePkts = 0;
+  bwest_str->senderTimestamp = 0;
+  bwest_str->receiverTimestamp = 0;
+  return 0;
+}
+
+/* This function updates both bottle neck rates                                                      */
+/* Parameters:                                                                                       */
+/* rtp_number    - value from RTP packet, from NetEq                                                 */
+/* frame length  - length of signal frame in ms, from iSAC decoder                                   */
+/* send_ts       - value in RTP header giving send time in samples                                     */
+/* arr_ts        - value given by timeGetTime() time of arrival in samples of packet from NetEq      */
+/* pksize        - size of packet in bytes, from NetEq                                               */
+/* Index         - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
+/* returns 0 if everything went fine, -1 otherwise                                                   */
+WebRtc_Word16 WebRtcIsac_UpdateBandwidthEstimator(
+    BwEstimatorstr *bwest_str,
+    const WebRtc_UWord16 rtp_number,
+    const WebRtc_Word32  frame_length,
+    const WebRtc_UWord32 send_ts,
+    const WebRtc_UWord32 arr_ts,
+    const WebRtc_Word32  pksize
+    /*,    const WebRtc_UWord16 Index*/)
+{
+  float weight = 0.0f;
+  float curr_bw_inv = 0.0f;
+  float rec_rtp_rate;
+  float t_diff_proj;
+  float arr_ts_diff;
+  float send_ts_diff;
+  float arr_time_noise;
+  float arr_time_noise_abs;
+
+  float delay_correction_factor = 1;
+  float late_diff = 0.0f;
+  int immediate_set = 0;
+  int num_pkts_expected;
+
+
+  // We have to adjust the header-rate if the first packet has a
+  // frame-size different than the initialized value.
+  if ( frame_length != bwest_str->prev_frame_length )
+  {
+    bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f *
+        1000.0f / (float)frame_length;     /* bits/s */
+  }
+
+  /* UPDATE ESTIMATES ON THIS SIDE */
+  /* compute far-side transmission rate */
+  rec_rtp_rate = ((float)pksize * 8.0f * 1000.0f / (float)frame_length) +
+      bwest_str->rec_header_rate;
+  // rec_rtp_rate packet bits/s + header bits/s
+
+  /* check for timer wrap-around */
+  if (arr_ts < bwest_str->prev_rec_arr_ts)
+  {
+    bwest_str->prev_rec_arr_ts   = arr_ts;
+    bwest_str->last_update_ts    = arr_ts;
+    bwest_str->last_reduction_ts = arr_ts + 3*FS;
+    bwest_str->num_pkts_rec      = 0;
+
+    /* store frame length */
+    bwest_str->prev_frame_length = frame_length;
+
+    /* store far-side transmission rate */
+    bwest_str->prev_rec_rtp_rate = rec_rtp_rate;
+
+    /* store far-side RTP time stamp */
+    bwest_str->prev_rec_rtp_number = rtp_number;
+
+    return 0;
+  }
+
+  bwest_str->num_pkts_rec++;
+
+  /* check that it's not one of the first 9 packets */
+  if ( bwest_str->count_tot_updates_rec > 0 )
+  {
+    if(bwest_str->in_wait_period > 0 )
+    {
+      bwest_str->in_wait_period--;
+    }
+
+    bwest_str->inWaitLatePkts -= ((bwest_str->inWaitLatePkts > 0)? 1:0);
+    send_ts_diff = (float)(send_ts - bwest_str->prev_rec_send_ts);
+
+    if (send_ts_diff <= (16 * frame_length)*2)
+      //doesn't allow for a dropped packet, not sure necessary to be
+      // that strict -DH
+    {
+      /* if not been updated for a long time, reduce the BN estimate */
+      if((WebRtc_UWord32)(arr_ts - bwest_str->last_update_ts) *
+         1000.0f / FS > 3000)
+      {
+        //how many frames should have been received since the last
+        // update if too many have been dropped or there have been
+        // big delays won't allow this reduction may no longer need
+        // the send_ts_diff here
+        num_pkts_expected = (int)(((float)(arr_ts -
+                                           bwest_str->last_update_ts) * 1000.0f /(float) FS) /
+                                  (float)frame_length);
+
+        if(((float)bwest_str->num_pkts_rec/(float)num_pkts_expected) >
+           0.9)
+        {
+          float inv_bitrate = (float) pow( 0.99995,
+                                           (double)((WebRtc_UWord32)(arr_ts -
+                                                                     bwest_str->last_reduction_ts)*1000.0f/FS) );
+
+          if ( inv_bitrate )
+          {
+            bwest_str->rec_bw_inv /= inv_bitrate;
+
+            //precautionary, likely never necessary
+            if (bwest_str->hsn_detect_snd &&
+                bwest_str->hsn_detect_rec)
+            {
+              if (bwest_str->rec_bw_inv > 0.000066f)
+              {
+                bwest_str->rec_bw_inv = 0.000066f;
+              }
+            }
+          }
+          else
+          {
+            bwest_str->rec_bw_inv = 1.0f /
+                (INIT_BN_EST_WB + INIT_HDR_RATE_WB);
+          }
+          /* reset time-since-update counter */
+          bwest_str->last_reduction_ts = arr_ts;
+        }
+        else
+          //reset here?
+        {
+          bwest_str->last_reduction_ts = arr_ts + 3*FS;
+          bwest_str->last_update_ts = arr_ts;
+          bwest_str->num_pkts_rec = 0;
+        }
+      }
+    }
+    else
+    {
+      bwest_str->last_reduction_ts = arr_ts + 3*FS;
+      bwest_str->last_update_ts = arr_ts;
+      bwest_str->num_pkts_rec = 0;
+    }
+
+
+    /* temporarily speed up adaptation if frame length has changed */
+    if ( frame_length != bwest_str->prev_frame_length )
+    {
+      bwest_str->count_tot_updates_rec = 10;
+      bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f *
+          1000.0f / (float)frame_length;     /* bits/s */
+
+      bwest_str->rec_bw_inv = 1.0f /((float)bwest_str->rec_bw +
+                                     bwest_str->rec_header_rate);
+    }
+
+    ////////////////////////
+    arr_ts_diff = (float)(arr_ts - bwest_str->prev_rec_arr_ts);
+
+    if (send_ts_diff > 0 )
+    {
+      late_diff = arr_ts_diff - send_ts_diff;
+    }
+    else
+    {
+      late_diff = arr_ts_diff - (float)(16 * frame_length);
+    }
+
+    if((late_diff > 0) && !bwest_str->inWaitLatePkts)
+    {
+      bwest_str->numConsecLatePkts++;
+      bwest_str->consecLatency += late_diff;
+    }
+    else
+    {
+      bwest_str->numConsecLatePkts = 0;
+      bwest_str->consecLatency = 0;
+    }
+    if(bwest_str->numConsecLatePkts > 50)
+    {
+      float latencyMs = bwest_str->consecLatency/(FS/1000);
+      float averageLatencyMs = latencyMs / bwest_str->numConsecLatePkts;
+      delay_correction_factor = frame_length / (frame_length + averageLatencyMs);
+      immediate_set = 1;
+      bwest_str->inWaitLatePkts = (WebRtc_Word16)((bwest_str->consecLatency/(FS/1000)) / 30);// + 150;
+      bwest_str->start_wait_period = arr_ts;
+    }
+    ///////////////////////////////////////////////
+
+
+
+    /*   update only if previous packet was not lost */
+    if ( rtp_number == bwest_str->prev_rec_rtp_number + 1 )
+    {
+
+
+      if (!(bwest_str->hsn_detect_snd && bwest_str->hsn_detect_rec))
+      {
+        if ((arr_ts_diff > (float)(16 * frame_length)))
+        {
+          //1/2 second
+          if ((late_diff > 8000.0f) && !bwest_str->in_wait_period)
+          {
+            delay_correction_factor = 0.7f;
+            bwest_str->in_wait_period = 55;
+            bwest_str->start_wait_period = arr_ts;
+            immediate_set = 1;
+          }
+          //320 ms
+          else if (late_diff > 5120.0f && !bwest_str->in_wait_period)
+          {
+            delay_correction_factor = 0.8f;
+            immediate_set = 1;
+            bwest_str->in_wait_period = 44;
+            bwest_str->start_wait_period = arr_ts;
+          }
+        }
+      }
+
+
+      if ((bwest_str->prev_rec_rtp_rate > bwest_str->rec_bw_avg) &&
+          (rec_rtp_rate > bwest_str->rec_bw_avg)                 &&
+          !bwest_str->in_wait_period)
+      {
+        /* test if still in initiation period and increment counter */
+        if (bwest_str->count_tot_updates_rec++ > 99)
+        {
+          /* constant weight after initiation part */
+          weight = 0.01f;
+        }
+        else
+        {
+          /* weight decreases with number of updates */
+          weight = 1.0f / (float) bwest_str->count_tot_updates_rec;
+        }
+        /* Bottle Neck Estimation */
+
+        /* limit outliers */
+        /* if more than 25 ms too much */
+        if (arr_ts_diff > frame_length * FS/1000 + 400.0f)
+        {
+          // in samples,  why 25ms??
+          arr_ts_diff = frame_length * FS/1000 + 400.0f;
+        }
+        if(arr_ts_diff < (frame_length * FS/1000) - 160.0f)
+        {
+          /* don't allow it to be less than frame rate - 10 ms */
+          arr_ts_diff = (float)frame_length * FS/1000 - 160.0f;
+        }
+
+        /* compute inverse receiving rate for last packet */
+        curr_bw_inv = arr_ts_diff / ((float)(pksize + HEADER_SIZE) *
+                                     8.0f * FS); // (180+35)*8*16000 = 27.5 Mbit....
+
+
+        if(curr_bw_inv <
+           (1.0f / (MAX_ISAC_BW + bwest_str->rec_header_rate)))
+        {
+          // don't allow inv rate to be larger than MAX
+          curr_bw_inv = (1.0f /
+                         (MAX_ISAC_BW + bwest_str->rec_header_rate));
+        }
+
+        /* update bottle neck rate estimate */
+        bwest_str->rec_bw_inv = weight * curr_bw_inv +
+            (1.0f - weight) * bwest_str->rec_bw_inv;
+
+        /* reset time-since-update counter */
+        bwest_str->last_update_ts    = arr_ts;
+        bwest_str->last_reduction_ts = arr_ts + 3 * FS;
+        bwest_str->num_pkts_rec = 0;
+
+        /* Jitter Estimation */
+        /* projected difference between arrival times */
+        t_diff_proj = ((float)(pksize + HEADER_SIZE) * 8.0f *
+                       1000.0f) / bwest_str->rec_bw_avg;
+
+
+        // difference between projected and actual
+        //   arrival time differences
+        arr_time_noise = (float)(arr_ts_diff*1000.0f/FS) -
+            t_diff_proj;
+        arr_time_noise_abs = (float) fabs( arr_time_noise );
+
+        /* long term averaged absolute jitter */
+        bwest_str->rec_jitter = weight * arr_time_noise_abs +
+            (1.0f - weight) * bwest_str->rec_jitter;
+        if (bwest_str->rec_jitter > 10.0f)
+        {
+          bwest_str->rec_jitter = 10.0f;
+        }
+        /* short term averaged absolute jitter */
+        bwest_str->rec_jitter_short_term_abs = 0.05f *
+            arr_time_noise_abs + 0.95f *
+            bwest_str->rec_jitter_short_term_abs;
+
+        /* short term averaged jitter */
+        bwest_str->rec_jitter_short_term = 0.05f * arr_time_noise +
+            0.95f * bwest_str->rec_jitter_short_term;
+      }
+    }
+  }
+  else
+  {
+    // reset time-since-update counter when
+    // receiving the first 9 packets
+    bwest_str->last_update_ts    = arr_ts;
+    bwest_str->last_reduction_ts = arr_ts + 3*FS;
+    bwest_str->num_pkts_rec = 0;
+
+    bwest_str->count_tot_updates_rec++;
+  }
+
+  /* limit minimum bottle neck rate */
+  if (bwest_str->rec_bw_inv > 1.0f / ((float)MIN_ISAC_BW +
+                                      bwest_str->rec_header_rate))
+  {
+    bwest_str->rec_bw_inv = 1.0f / ((float)MIN_ISAC_BW +
+                                    bwest_str->rec_header_rate);
+  }
+
+  // limit maximum bitrate
+  if (bwest_str->rec_bw_inv < 1.0f / ((float)MAX_ISAC_BW +
+                                      bwest_str->rec_header_rate))
+  {
+    bwest_str->rec_bw_inv = 1.0f / ((float)MAX_ISAC_BW +
+                                    bwest_str->rec_header_rate);
+  }
+
+  /* store frame length */
+  bwest_str->prev_frame_length = frame_length;
+
+  /* store far-side transmission rate */
+  bwest_str->prev_rec_rtp_rate = rec_rtp_rate;
+
+  /* store far-side RTP time stamp */
+  bwest_str->prev_rec_rtp_number = rtp_number;
+
+  // Replace bwest_str->rec_max_delay by the new
+  // value (atomic operation)
+  bwest_str->rec_max_delay = 3.0f * bwest_str->rec_jitter;
+
+  /* store send and arrival time stamp */
+  bwest_str->prev_rec_arr_ts = arr_ts ;
+  bwest_str->prev_rec_send_ts = send_ts;
+
+  /* Replace bwest_str->rec_bw by the new value (atomic operation) */
+  bwest_str->rec_bw = (WebRtc_Word32)(1.0f / bwest_str->rec_bw_inv -
+                                      bwest_str->rec_header_rate);
+
+  if (immediate_set)
+  {
+    bwest_str->rec_bw = (WebRtc_Word32) (delay_correction_factor *
+                                         (float) bwest_str->rec_bw);
+
+    if (bwest_str->rec_bw < (WebRtc_Word32) MIN_ISAC_BW)
+    {
+      bwest_str->rec_bw = (WebRtc_Word32) MIN_ISAC_BW;
+    }
+
+    bwest_str->rec_bw_avg = bwest_str->rec_bw +
+        bwest_str->rec_header_rate;
+
+    bwest_str->rec_bw_avg_Q = (float) bwest_str->rec_bw;
+
+    bwest_str->rec_jitter_short_term = 0.0f;
+
+    bwest_str->rec_bw_inv = 1.0f / (bwest_str->rec_bw +
+                                    bwest_str->rec_header_rate);
+
+    bwest_str->count_tot_updates_rec = 1;
+
+    immediate_set = 0;
+    bwest_str->consecLatency = 0;
+    bwest_str->numConsecLatePkts = 0;
+  }
+
+  return 0;
+}
+
+
+/* This function updates the send bottle neck rate                                                   */
+/* Index         - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
+/* returns 0 if everything went fine, -1 otherwise                                                   */
+WebRtc_Word16 WebRtcIsac_UpdateUplinkBwImpl(
+    BwEstimatorstr*           bwest_str,
+    WebRtc_Word16               index,
+    enum IsacSamplingRate encoderSamplingFreq)
+{
+  if((index < 0) || (index > 23))
+  {
+    return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
+  }
+
+  /* UPDATE ESTIMATES FROM OTHER SIDE */
+  if(encoderSamplingFreq == kIsacWideband)
+  {
+    if(index > 11)
+    {
+      index -= 12;   
+      /* compute the jitter estimate as decoded on the other side */
+      bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg +
+          0.1f * (float)MAX_ISAC_MD;
+    }
+    else
+    {
+      /* compute the jitter estimate as decoded on the other side */
+      bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg +
+          0.1f * (float)MIN_ISAC_MD;
+    }
+
+    /* compute the BN estimate as decoded on the other side */
+    bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg +
+        0.1f * kQRateTableWb[index];
+  }
+  else
+  {
+    /* compute the BN estimate as decoded on the other side */
+    bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg +
+        0.1f * kQRateTableSwb[index];
+  }
+
+  if (bwest_str->send_bw_avg > (float) 28000 && !bwest_str->hsn_detect_snd)
+  {
+    bwest_str->num_consec_snt_pkts_over_30k++;
+
+    if (bwest_str->num_consec_snt_pkts_over_30k >= 66)
+    {
+      //approx 2 seconds with 30ms frames
+      bwest_str->hsn_detect_snd = 1;
+    }
+  }
+  else if (!bwest_str->hsn_detect_snd)
+  {
+    bwest_str->num_consec_snt_pkts_over_30k = 0;
+  }
+  return 0;
+}
+
+// called when there is upper-band bit-stream to update jitter
+// statistics.
+WebRtc_Word16 WebRtcIsac_UpdateUplinkJitter(
+    BwEstimatorstr*              bwest_str,
+    WebRtc_Word32                  index)
+{
+  if((index < 0) || (index > 23))
+  {
+    return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
+  }
+
+  if(index > 0)
+  {
+    /* compute the jitter estimate as decoded on the other side */
+    bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg +
+        0.1f * (float)MAX_ISAC_MD;
+  }
+  else
+  {
+    /* compute the jitter estimate as decoded on the other side */
+    bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg +
+        0.1f * (float)MIN_ISAC_MD;
+  }
+
+  return 0;
+}
+
+
+
+// Returns the bandwidth/jitter estimation code (integer 0...23)
+// to put in the sending iSAC payload
+WebRtc_UWord16
+WebRtcIsac_GetDownlinkBwJitIndexImpl(
+    BwEstimatorstr*           bwest_str,
+    WebRtc_Word16*              bottleneckIndex,
+    WebRtc_Word16*              jitterInfo,
+    enum IsacSamplingRate decoderSamplingFreq)
+{
+  float MaxDelay;
+  //WebRtc_UWord16 MaxDelayBit;
+
+  float rate;
+  float r;
+  float e1, e2;
+  const float weight = 0.1f;
+  const float* ptrQuantizationTable;
+  WebRtc_Word16 addJitterInfo;
+  WebRtc_Word16 minInd;
+  WebRtc_Word16 maxInd;
+  WebRtc_Word16 midInd;
+
+  /* Get Max Delay Bit */
+  /* get unquantized max delay */
+  MaxDelay = (float)WebRtcIsac_GetDownlinkMaxDelay(bwest_str);
+
+  if ( ((1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight *
+        MAX_ISAC_MD - MaxDelay) > (MaxDelay - (1.f-weight) *
+                                   bwest_str->rec_max_delay_avg_Q - weight * MIN_ISAC_MD) )
+  {
+    jitterInfo[0] = 0;
+    /* update quantized average */
+    bwest_str->rec_max_delay_avg_Q =
+        (1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight *
+        (float)MIN_ISAC_MD;
+  }
+  else
+  {
+    jitterInfo[0] = 1;
+    /* update quantized average */
+    bwest_str->rec_max_delay_avg_Q =
+        (1.f-weight) * bwest_str->rec_max_delay_avg_Q + weight *
+        (float)MAX_ISAC_MD;
+  }
+
+  // Get unquantized rate.
+  rate = (float)WebRtcIsac_GetDownlinkBandwidth(bwest_str);
+
+  /* Get Rate Index */
+  if(decoderSamplingFreq == kIsacWideband)
+  {
+    ptrQuantizationTable = kQRateTableWb;
+    addJitterInfo = 1;
+    maxInd = 11;
+  }
+  else
+  {
+    ptrQuantizationTable = kQRateTableSwb;
+    addJitterInfo = 0;
+    maxInd = 23;
+  }
+
+  minInd = 0;
+  while(maxInd > minInd + 1)
+  {
+    midInd = (maxInd + minInd) >> 1;
+    if(rate > ptrQuantizationTable[midInd])
+    {
+      minInd = midInd;
+    }
+    else
+    {
+      maxInd = midInd;
+    }
+  }
+  // Chose the index which gives results an average which is closest
+  // to rate
+  r = (1 - weight) * bwest_str->rec_bw_avg_Q - rate;
+  e1 = weight * ptrQuantizationTable[minInd] + r;
+  e2 = weight * ptrQuantizationTable[maxInd] + r;
+  e1 = (e1 > 0)? e1:-e1;
+  e2 = (e2 > 0)? e2:-e2;
+  if(e1 < e2)
+  {
+    bottleneckIndex[0] = minInd;
+  }
+  else
+  {
+    bottleneckIndex[0] = maxInd;
+  }
+
+  bwest_str->rec_bw_avg_Q = (1 - weight) * bwest_str->rec_bw_avg_Q +
+      weight * ptrQuantizationTable[bottleneckIndex[0]];
+  bottleneckIndex[0] += jitterInfo[0] * 12 * addJitterInfo;
+
+  bwest_str->rec_bw_avg = (1 - weight) * bwest_str->rec_bw_avg + weight *
+      (rate + bwest_str->rec_header_rate);
+
+  return 0;
+}
+
+
+
+/* get the bottle neck rate from far side to here, as estimated on this side */
+WebRtc_Word32 WebRtcIsac_GetDownlinkBandwidth( const BwEstimatorstr *bwest_str)
+{
+  WebRtc_Word32  rec_bw;
+  float   jitter_sign;
+  float   bw_adjust;
+
+  /* create a value between -1.0 and 1.0 indicating "average sign" of jitter */
+  jitter_sign = bwest_str->rec_jitter_short_term /
+      bwest_str->rec_jitter_short_term_abs;
+
+  /* adjust bw proportionally to negative average jitter sign */
+  bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign);
+
+  /* adjust Rate if jitter sign is mostly constant */
+  rec_bw = (WebRtc_Word32)(bwest_str->rec_bw * bw_adjust);
+
+  /* limit range of bottle neck rate */
+  if (rec_bw < MIN_ISAC_BW)
+  {
+    rec_bw = MIN_ISAC_BW;
+  }
+  else if (rec_bw > MAX_ISAC_BW)
+  {
+    rec_bw = MAX_ISAC_BW;
+  }
+  return rec_bw;
+}
+
+/* Returns the max delay (in ms) */
+WebRtc_Word32
+WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str)
+{
+  WebRtc_Word32 rec_max_delay;
+
+  rec_max_delay = (WebRtc_Word32)(bwest_str->rec_max_delay);
+
+  /* limit range of jitter estimate */
+  if (rec_max_delay < MIN_ISAC_MD)
+  {
+    rec_max_delay = MIN_ISAC_MD;
+  }
+  else if (rec_max_delay > MAX_ISAC_MD)
+  {
+    rec_max_delay = MAX_ISAC_MD;
+  }
+  return rec_max_delay;
+}
+
+/* get the bottle neck rate from here to far side, as estimated by far side */
+void
+WebRtcIsac_GetUplinkBandwidth(
+    const BwEstimatorstr* bwest_str,
+    WebRtc_Word32*          bitRate)
+{
+  /* limit range of bottle neck rate */
+  if (bwest_str->send_bw_avg < MIN_ISAC_BW)
+  {
+    *bitRate = MIN_ISAC_BW;
+  }
+  else if (bwest_str->send_bw_avg > MAX_ISAC_BW)
+  {
+    *bitRate = MAX_ISAC_BW;
+  }
+  else
+  {
+    *bitRate = (WebRtc_Word32)(bwest_str->send_bw_avg);
+  }
+  return;
+}
+
+/* Returns the max delay value from the other side in ms */
+WebRtc_Word32
+WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr *bwest_str)
+{
+  WebRtc_Word32 send_max_delay;
+
+  send_max_delay = (WebRtc_Word32)(bwest_str->send_max_delay_avg);
+
+  /* limit range of jitter estimate */
+  if (send_max_delay < MIN_ISAC_MD)
+  {
+    send_max_delay = MIN_ISAC_MD;
+  }
+  else if (send_max_delay > MAX_ISAC_MD)
+  {
+    send_max_delay = MAX_ISAC_MD;
+  }
+  return send_max_delay;
+}
+
+
+/*
+ * update long-term average bitrate and amount of data in buffer
+ * returns minimum payload size (bytes)
+ */
+int WebRtcIsac_GetMinBytes(
+    RateModel*         State,
+    int                StreamSize,    /* bytes in bitstream */
+    const int          FrameSamples,  /* samples per frame */
+    const double       BottleNeck,    /* bottle neck rate; excl headers (bps) */
+    const double       DelayBuildUp,  /* max delay from bottleneck buffering (ms) */
+    enum ISACBandwidth bandwidth
+    /*,WebRtc_Word16        frequentLargePackets*/)
+{
+  double MinRate = 0.0;
+  int    MinBytes;
+  double TransmissionTime;
+  int    burstInterval = BURST_INTERVAL;
+
+  // first 10 packets @ low rate, then INIT_BURST_LEN packets @
+  // fixed rate of INIT_RATE bps
+  if (State->InitCounter > 0)
+  {
+    if (State->InitCounter-- <= INIT_BURST_LEN)
+    {
+      if(bandwidth == isac8kHz)
+      {
+        MinRate = INIT_RATE_WB;
+      }
+      else
+      {
+        MinRate = INIT_RATE_SWB;
+      }
+    }
+    else
+    {
+      MinRate = 0;
+    }
+  }
+  else
+  {
+    /* handle burst */
+    if (State->BurstCounter)
+    {
+      if (State->StillBuffered < (1.0 - 1.0/BURST_LEN) * DelayBuildUp)
+      {
+        /* max bps derived from BottleNeck and DelayBuildUp values */
+        MinRate = (1.0 + (FS/1000) * DelayBuildUp /
+                   (double)(BURST_LEN * FrameSamples)) * BottleNeck;
+      }
+      else
+      {
+        // max bps derived from StillBuffered and DelayBuildUp
+        // values
+        MinRate = (1.0 + (FS/1000) * (DelayBuildUp -
+                                      State->StillBuffered) / (double)FrameSamples) * BottleNeck;
+        if (MinRate < 1.04 * BottleNeck)
+        {
+          MinRate = 1.04 * BottleNeck;
+        }
+      }
+      State->BurstCounter--;
+    }
+  }
+
+
+  /* convert rate from bits/second to bytes/packet */
+  MinBytes = (int) (MinRate * FrameSamples / (8.0 * FS));
+
+  /* StreamSize will be adjusted if less than MinBytes */
+  if (StreamSize < MinBytes)
+  {
+    StreamSize = MinBytes;
+  }
+
+  /* keep track of when bottle neck was last exceeded by at least 1% */
+  if (StreamSize * 8.0 * FS / FrameSamples > 1.01 * BottleNeck) {
+    if (State->PrevExceed) {
+      /* bottle_neck exceded twice in a row, decrease ExceedAgo */
+      State->ExceedAgo -= /*BURST_INTERVAL*/ burstInterval / (BURST_LEN - 1);
+      if (State->ExceedAgo < 0)
+        State->ExceedAgo = 0;
+    }
+    else
+    {
+      State->ExceedAgo += (FrameSamples * 1000) / FS; /* ms */
+      State->PrevExceed = 1;
+    }
+  }
+  else
+  {
+    State->PrevExceed = 0;
+    State->ExceedAgo += (FrameSamples * 1000) / FS;     /* ms */
+  }
+
+  /* set burst flag if bottle neck not exceeded for long time */
+  if ((State->ExceedAgo > burstInterval) &&
+      (State->BurstCounter == 0))
+  {
+    if (State->PrevExceed)
+    {
+      State->BurstCounter = BURST_LEN - 1;
+    }
+    else
+    {
+      State->BurstCounter = BURST_LEN;
+    }
+  }
+
+
+  /* Update buffer delay */
+  TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck;  /* ms */
+  State->StillBuffered += TransmissionTime;
+  State->StillBuffered -= (FrameSamples * 1000) / FS;     /* ms */
+  if (State->StillBuffered < 0.0)
+  {
+    State->StillBuffered = 0.0;
+  }
+
+  return MinBytes;
+}
+
+
+/*
+ * update long-term average bitrate and amount of data in buffer
+ */
+void WebRtcIsac_UpdateRateModel(
+    RateModel *State,
+    int StreamSize,                    /* bytes in bitstream */
+    const int FrameSamples,            /* samples per frame */
+    const double BottleNeck)        /* bottle neck rate; excl headers (bps) */
+{
+  double TransmissionTime;
+
+  /* avoid the initial "high-rate" burst */
+  State->InitCounter = 0;
+
+  /* Update buffer delay */
+  TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck;  /* ms */
+  State->StillBuffered += TransmissionTime;
+  State->StillBuffered -= (FrameSamples * 1000) / FS;     /* ms */
+  if (State->StillBuffered < 0.0)
+    State->StillBuffered = 0.0;
+
+}
+
+
+void WebRtcIsac_InitRateModel(
+    RateModel *State)
+{
+  State->PrevExceed      = 0;                        /* boolean */
+  State->ExceedAgo       = 0;                        /* ms */
+  State->BurstCounter    = 0;                        /* packets */
+  State->InitCounter     = INIT_BURST_LEN + 10;    /* packets */
+  State->StillBuffered   = 1.0;                    /* ms */
+}
+
+int WebRtcIsac_GetNewFrameLength(
+    double bottle_neck,
+    int    current_framesamples)
+{
+  int new_framesamples;
+
+  const int Thld_20_30 = 20000;
+
+  //const int Thld_30_20 = 30000;
+  const int Thld_30_20 = 1000000;   // disable 20 ms frames
+
+  const int Thld_30_60 = 18000;
+  //const int Thld_30_60 = 0;      // disable 60 ms frames
+
+  const int Thld_60_30 = 27000;
+
+
+  new_framesamples = current_framesamples;
+
+  /* find new framelength */
+  switch(current_framesamples) {
+    case 320:
+      if (bottle_neck < Thld_20_30)
+        new_framesamples = 480;
+      break;
+    case 480:
+      if (bottle_neck < Thld_30_60)
+        new_framesamples = 960;
+      else if (bottle_neck > Thld_30_20)
+        new_framesamples = 320;
+      break;
+    case 960:
+      if (bottle_neck >= Thld_60_30)
+        new_framesamples = 480;
+      break;
+  }
+
+  return new_framesamples;
+}
+
+double WebRtcIsac_GetSnr(
+    double bottle_neck,
+    int    framesamples)
+{
+  double s2nr;
+
+  const double a_20 = -30.0;
+  const double b_20 = 0.8;
+  const double c_20 = 0.0;
+
+  const double a_30 = -23.0;
+  const double b_30 = 0.48;
+  const double c_30 = 0.0;
+
+  const double a_60 = -23.0;
+  const double b_60 = 0.53;
+  const double c_60 = 0.0;
+
+
+  /* find new SNR value */
+  switch(framesamples) {
+    case 320:
+      s2nr = a_20 + b_20 * bottle_neck * 0.001 + c_20 * bottle_neck *
+          bottle_neck * 0.000001;
+      break;
+    case 480:
+      s2nr = a_30 + b_30 * bottle_neck * 0.001 + c_30 * bottle_neck *
+          bottle_neck * 0.000001;
+      break;
+    case 960:
+      s2nr = a_60 + b_60 * bottle_neck * 0.001 + c_60 * bottle_neck *
+          bottle_neck * 0.000001;
+      break;
+    default:
+      s2nr = 0;
+  }
+
+  return s2nr;
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.h
new file mode 100644
index 0000000..5604d7b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/bandwidth_estimator.h
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * bandwidth_estimator.h
+ *
+ * This header file contains the API for the Bandwidth Estimator
+ * designed for iSAC.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_
+
+#include "structs.h"
+#include "settings.h"
+
+
+#define MIN_ISAC_BW     10000
+#define MIN_ISAC_BW_LB  10000
+#define MIN_ISAC_BW_UB  25000
+
+#define MAX_ISAC_BW     56000
+#define MAX_ISAC_BW_UB  32000
+#define MAX_ISAC_BW_LB  32000
+
+#define MIN_ISAC_MD     5
+#define MAX_ISAC_MD     25
+
+// assumed header size, in bytes; we don't know the exact number
+// (header compression may be used)
+#define HEADER_SIZE        35
+
+// Initial Frame-Size, in ms, for Wideband & Super-Wideband Mode
+#define INIT_FRAME_LEN_WB  60
+#define INIT_FRAME_LEN_SWB 30
+
+// Initial Bottleneck Estimate, in bits/sec, for
+// Wideband & Super-wideband mode
+#define INIT_BN_EST_WB     20e3f
+#define INIT_BN_EST_SWB    56e3f
+
+// Initial Header rate (header rate depends on frame-size),
+// in bits/sec, for Wideband & Super-Wideband mode.
+#define INIT_HDR_RATE_WB                                                \
+  ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_WB)
+#define INIT_HDR_RATE_SWB                                               \
+  ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_SWB)
+
+// number of packets in a row for a high rate burst
+#define BURST_LEN       3
+
+// ms, max time between two full bursts
+#define BURST_INTERVAL  500
+
+// number of packets in a row for initial high rate burst
+#define INIT_BURST_LEN  5
+
+// bits/s, rate for the first BURST_LEN packets
+#define INIT_RATE_WB       INIT_BN_EST_WB
+#define INIT_RATE_SWB      INIT_BN_EST_SWB
+
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+  /* This function initializes the struct                    */
+  /* to be called before using the struct for anything else  */
+  /* returns 0 if everything went fine, -1 otherwise         */
+  WebRtc_Word32 WebRtcIsac_InitBandwidthEstimator(
+      BwEstimatorstr*           bwest_str,
+      enum IsacSamplingRate encoderSampRate,
+      enum IsacSamplingRate decoderSampRate);
+
+  /* This function updates the receiving estimate                                                      */
+  /* Parameters:                                                                                       */
+  /* rtp_number    - value from RTP packet, from NetEq                                                 */
+  /* frame length  - length of signal frame in ms, from iSAC decoder                                   */
+  /* send_ts       - value in RTP header giving send time in samples                                   */
+  /* arr_ts        - value given by timeGetTime() time of arrival in samples of packet from NetEq      */
+  /* pksize        - size of packet in bytes, from NetEq                                               */
+  /* Index         - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
+  /* returns 0 if everything went fine, -1 otherwise                                                   */
+  WebRtc_Word16 WebRtcIsac_UpdateBandwidthEstimator(
+      BwEstimatorstr*    bwest_str,
+      const WebRtc_UWord16 rtp_number,
+      const WebRtc_Word32  frame_length,
+      const WebRtc_UWord32 send_ts,
+      const WebRtc_UWord32 arr_ts,
+      const WebRtc_Word32  pksize);
+
+  /* Update receiving estimates. Used when we only receive BWE index, no iSAC data packet. */
+  WebRtc_Word16 WebRtcIsac_UpdateUplinkBwImpl(
+      BwEstimatorstr*           bwest_str,
+      WebRtc_Word16               Index,
+      enum IsacSamplingRate encoderSamplingFreq);
+
+  /* Returns the bandwidth/jitter estimation code (integer 0...23) to put in the sending iSAC payload */
+  WebRtc_UWord16 WebRtcIsac_GetDownlinkBwJitIndexImpl(
+      BwEstimatorstr*           bwest_str,
+      WebRtc_Word16*              bottleneckIndex,
+      WebRtc_Word16*              jitterInfo,
+      enum IsacSamplingRate decoderSamplingFreq);
+
+  /* Returns the bandwidth estimation (in bps) */
+  WebRtc_Word32 WebRtcIsac_GetDownlinkBandwidth(
+      const BwEstimatorstr *bwest_str);
+
+  /* Returns the max delay (in ms) */
+  WebRtc_Word32 WebRtcIsac_GetDownlinkMaxDelay(
+      const BwEstimatorstr *bwest_str);
+
+  /* Returns the bandwidth that iSAC should send with in bps */
+  void WebRtcIsac_GetUplinkBandwidth(
+      const BwEstimatorstr* bwest_str,
+      WebRtc_Word32*          bitRate);
+
+  /* Returns the max delay value from the other side in ms */
+  WebRtc_Word32 WebRtcIsac_GetUplinkMaxDelay(
+      const BwEstimatorstr *bwest_str);
+
+
+  /*
+   * update amount of data in bottle neck buffer and burst handling
+   * returns minimum payload size (bytes)
+   */
+  int WebRtcIsac_GetMinBytes(
+      RateModel*         State,
+      int                StreamSize,    /* bytes in bitstream */
+      const int          FrameLen,      /* ms per frame */
+      const double       BottleNeck,    /* bottle neck rate; excl headers (bps) */
+      const double       DelayBuildUp,  /* max delay from bottleneck buffering (ms) */
+      enum ISACBandwidth bandwidth
+      /*,WebRtc_Word16        frequentLargePackets*/);
+
+  /*
+   * update long-term average bitrate and amount of data in buffer
+   */
+  void WebRtcIsac_UpdateRateModel(
+      RateModel*   State,
+      int          StreamSize,                /* bytes in bitstream */
+      const int    FrameSamples,        /* samples per frame */
+      const double BottleNeck);       /* bottle neck rate; excl headers (bps) */
+
+
+  void WebRtcIsac_InitRateModel(
+      RateModel *State);
+
+  /* Returns the new framelength value (input argument: bottle_neck) */
+  int WebRtcIsac_GetNewFrameLength(
+      double bottle_neck,
+      int    current_framelength);
+
+  /* Returns the new SNR value (input argument: bottle_neck) */
+  double WebRtcIsac_GetSnr(
+      double bottle_neck,
+      int    new_framelength);
+
+
+  WebRtc_Word16 WebRtcIsac_UpdateUplinkJitter(
+      BwEstimatorstr*              bwest_str,
+      WebRtc_Word32                  index);
+
+#if defined(__cplusplus)
+}
+#endif
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/codec.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/codec.h
new file mode 100644
index 0000000..6af27ea
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/codec.h
@@ -0,0 +1,292 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * codec.h
+ *
+ * This header file contains the calls to the internal encoder
+ * and decoder functions.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_
+
+#include "structs.h"
+
+int WebRtcIsac_EstimateBandwidth(
+    BwEstimatorstr*           bwest_str,
+    Bitstr*                   streamdata,
+    WebRtc_Word32               packet_size,
+    WebRtc_UWord16              rtp_seq_number,
+    WebRtc_UWord32              send_ts,
+    WebRtc_UWord32              arr_ts,
+    enum IsacSamplingRate encoderSampRate,
+    enum IsacSamplingRate decoderSampRate);
+
+int WebRtcIsac_DecodeLb(
+    float*           signal_out,
+    ISACLBDecStruct* ISACdec_obj,
+    WebRtc_Word16*     current_framesamples,
+    WebRtc_Word16      isRCUPayload);
+
+int WebRtcIsac_DecodeRcuLb(
+    float*           signal_out,
+    ISACLBDecStruct* ISACdec_obj,
+    WebRtc_Word16*     current_framesamples);
+
+int WebRtcIsac_EncodeLb(
+    float*           in,
+    ISACLBEncStruct* ISACencLB_obj,
+    WebRtc_Word16      codingMode,
+    WebRtc_Word16      bottleneckIndex);
+
+int WebRtcIsac_EncodeStoredDataLb(
+    const ISAC_SaveEncData_t* ISACSavedEnc_obj,
+    Bitstr*                   ISACBitStr_obj,
+    int                       BWnumber,
+    float                     scale);
+
+
+int WebRtcIsac_EncodeStoredDataUb12(
+    const ISACUBSaveEncDataStruct* ISACSavedEnc_obj,
+    Bitstr*                        bitStream,
+    WebRtc_Word32                    jitterInfo,
+    float                          scale);
+
+int WebRtcIsac_EncodeStoredDataUb16(
+    const ISACUBSaveEncDataStruct* ISACSavedEnc_obj,
+    Bitstr*                        bitStream,
+    WebRtc_Word32                    jitterInfo,
+    float                          scale);
+
+
+WebRtc_Word16 WebRtcIsac_GetRedPayloadUb(
+    const ISACUBSaveEncDataStruct* ISACSavedEncObj,
+    Bitstr*                        bitStreamObj,
+    enum ISACBandwidth             bandwidth);
+/******************************************************************************
+ * WebRtcIsac_RateAllocation()
+ * Internal function to perform a rate-allocation for upper and lower-band,
+ * given a total rate.
+ *
+ * Input:
+ *   - inRateBitPerSec           : a total bit-rate in bits/sec.
+ *
+ * Output:
+ *   - rateLBBitPerSec           : a bit-rate allocated to the lower-band
+ *                                 in bits/sec.
+ *   - rateUBBitPerSec           : a bit-rate allocated to the upper-band
+ *                                 in bits/sec.
+ *
+ * Return value                  : 0 if rate allocation has been successful.
+ *                                -1 if failed to allocate rates.
+ */
+
+WebRtc_Word16
+WebRtcIsac_RateAllocation(
+    WebRtc_Word32         inRateBitPerSec,
+    double*             rateLBBitPerSec,
+    double*             rateUBBitPerSec,
+    enum ISACBandwidth* bandwidthKHz);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeUb16()
+ *
+ * Decode the upper-band if the codec is in 0-16 kHz mode.
+ *
+ * Input/Output:
+ *       -ISACdec_obj        : pointer to the upper-band decoder object. The
+ *                             bit-stream is stored inside the decoder object.
+ *
+ * Output:
+ *       -signal_out         : decoded audio, 480 samples 30 ms.
+ *
+ * Return value              : >0 number of decoded bytes.
+ *                             <0 if an error occurred.
+ */
+int WebRtcIsac_DecodeUb16(
+    float*           signal_out,
+    ISACUBDecStruct* ISACdec_obj,
+    WebRtc_Word16      isRCUPayload);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeUb12()
+ *
+ * Decode the upper-band if the codec is in 0-12 kHz mode.
+ *
+ * Input/Output:
+ *       -ISACdec_obj        : pointer to the upper-band decoder object. The
+ *                             bit-stream is stored inside the decoder object.
+ *
+ * Output:
+ *       -signal_out         : decoded audio, 480 samples 30 ms.
+ *
+ * Return value              : >0 number of decoded bytes.
+ *                             <0 if an error occurred.
+ */
+int WebRtcIsac_DecodeUb12(
+    float*           signal_out,
+    ISACUBDecStruct* ISACdec_obj,
+    WebRtc_Word16      isRCUPayload);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeUb16()
+ *
+ * Encode the upper-band if the codec is in 0-16 kHz mode.
+ *
+ * Input:
+ *       -in                 : upper-band audio, 160 samples (10 ms).
+ *
+ * Input/Output:
+ *       -ISACdec_obj        : pointer to the upper-band encoder object. The
+ *                             bit-stream is stored inside the encoder object.
+ *
+ * Return value              : >0 number of encoded bytes.
+ *                             <0 if an error occurred.
+ */
+int WebRtcIsac_EncodeUb16(
+    float*           in,
+    ISACUBEncStruct* ISACenc_obj,
+    WebRtc_Word32    jitterInfo);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeUb12()
+ *
+ * Encode the upper-band if the codec is in 0-12 kHz mode.
+ *
+ * Input:
+ *       -in                 : upper-band audio, 160 samples (10 ms).
+ *
+ * Input/Output:
+ *       -ISACdec_obj        : pointer to the upper-band encoder object. The
+ *                             bit-stream is stored inside the encoder object.
+ *
+ * Return value              : >0 number of encoded bytes.
+ *                             <0 if an error occurred.
+ */
+int WebRtcIsac_EncodeUb12(
+    float*           in,
+    ISACUBEncStruct* ISACenc_obj,
+    WebRtc_Word32    jitterInfo);
+
+/************************** initialization functions *************************/
+
+void WebRtcIsac_InitMasking(MaskFiltstr *maskdata);
+
+void WebRtcIsac_InitPreFilterbank(PreFiltBankstr *prefiltdata);
+
+void WebRtcIsac_InitPostFilterbank(PostFiltBankstr *postfiltdata);
+
+void WebRtcIsac_InitPitchFilter(PitchFiltstr *pitchfiltdata);
+
+void WebRtcIsac_InitPitchAnalysis(PitchAnalysisStruct *State);
+
+
+/**************************** transform functions ****************************/
+
+void WebRtcIsac_InitTransform();
+
+void WebRtcIsac_Time2Spec(double *inre1,
+                         double *inre2,
+                         WebRtc_Word16 *outre,
+                         WebRtc_Word16 *outim,
+                         FFTstr *fftstr_obj);
+
+void WebRtcIsac_Spec2time(double *inre,
+                         double *inim,
+                         double *outre1,
+                         double *outre2,
+                         FFTstr *fftstr_obj);
+
+
+/******************************* filter functions ****************************/
+
+void WebRtcIsac_AllPoleFilter(double  *InOut,
+                             double  *Coef,
+                             int     lengthInOut,
+                             int     orderCoef);
+
+void WebRtcIsac_AllZeroFilter(double *In,
+                             double *Coef,
+                             int    lengthInOut,
+                             int    orderCoef,
+                             double *Out);
+
+void WebRtcIsac_ZeroPoleFilter(double *In,
+                              double *ZeroCoef,
+                              double *PoleCoef,
+                              int    lengthInOut,
+                              int     orderCoef,
+                              double *Out);
+
+
+/***************************** filterbank functions **************************/
+
+void WebRtcIsac_SplitAndFilter(double         *in,
+                              double         *LP,
+                              double         *HP,
+                              double         *LP_la,
+                              double         *HP_la,
+                              PreFiltBankstr *prefiltdata);
+
+
+void WebRtcIsac_FilterAndCombine(double          *InLP,
+                                double          *InHP,
+                                double          *Out,
+                                PostFiltBankstr *postfiltdata);
+
+
+
+void WebRtcIsac_SplitAndFilterFloat(float         *in,
+                                    float         *LP,
+                                    float         *HP,
+                                    double         *LP_la,
+                                    double         *HP_la,
+                                    PreFiltBankstr *prefiltdata);
+
+
+void WebRtcIsac_FilterAndCombineFloat(float          *InLP,
+                                      float           *InHP,
+                                      float          *Out,
+                                      PostFiltBankstr *postfiltdata);
+
+
+/************************* normalized lattice filters ************************/
+
+void WebRtcIsac_NormLatticeFilterMa(int    orderCoef,
+                                     float *stateF,
+                                     float *stateG,
+                                     float *lat_in,
+                                     double *filtcoeflo,
+                                     double *lat_out);
+
+void WebRtcIsac_NormLatticeFilterAr(int    orderCoef,
+                                     float  *stateF,
+                                     float *stateG,
+                                     double *lat_in,
+                                     double *lo_filt_coef,
+                                     float *lat_out);
+
+void WebRtcIsac_Dir2Lat(double *a,
+                        int    orderCoef,
+                        float *sth,
+                        float *cth);
+
+void WebRtcIsac_AutoCorr(double *r,
+                        const double *x,
+                        int N,
+                        int order);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.c
new file mode 100644
index 0000000..098e4b7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.c
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "crc.h"
+#include <stdlib.h>
+#include "signal_processing_library.h"
+
+#define POLYNOMIAL 0x04c11db7L
+
+
+static const WebRtc_UWord32 kCrcTable[256] = {
+  0,          0x4c11db7,  0x9823b6e,  0xd4326d9,  0x130476dc, 0x17c56b6b,
+  0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
+  0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,
+  0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
+  0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3,
+  0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
+  0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef,
+  0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
+  0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb,
+  0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
+  0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0,
+  0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
+  0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x18aeb13,  0x54bf6a4,
+  0x808d07d,  0xcc9cdca,  0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
+  0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08,
+  0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
+  0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc,
+  0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
+  0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050,
+  0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
+  0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34,
+  0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
+  0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1,
+  0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
+  0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5,
+  0x3f9b762c, 0x3b5a6b9b,  0x315d626, 0x7d4cb91,  0xa97ed48,  0xe56f0ff,
+  0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9,
+  0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
+  0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd,
+  0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
+  0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71,
+  0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
+  0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2,
+  0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
+  0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e,
+  0x18197087, 0x1cd86d30, 0x29f3d35,  0x65e2082,  0xb1d065b,  0xfdc1bec,
+  0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a,
+  0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
+  0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676,
+  0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
+  0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662,
+  0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
+  0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
+};
+
+
+
+
+/****************************************************************************
+ * WebRtcIsac_GetCrc(...)
+ *
+ * This function returns a 32 bit CRC checksum of a bit stream
+ *
+ * Input:
+ *  - bitstream              : payload bitstream
+ *  - len_bitstream_in_bytes : number of 8-bit words in the bit stream
+ *
+ * Output:
+ *  - crc                    : checksum
+ *
+ * Return value              :  0 - Ok
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsac_GetCrc(const WebRtc_Word16* bitstream,
+                                WebRtc_Word16        len_bitstream_in_bytes,
+                                WebRtc_UWord32*      crc)
+{
+  WebRtc_UWord8* bitstream_ptr_uw8;
+  WebRtc_UWord32 crc_state;
+  int byte_cntr;
+  int crc_tbl_indx;
+
+  /* Sanity Check. */
+  if (bitstream == NULL) {
+    return -1;
+  }
+  /* cast to UWord8 pointer */
+  bitstream_ptr_uw8 = (WebRtc_UWord8 *)bitstream;
+
+  /* initialize */
+  crc_state = 0xFFFFFFFF;
+
+  for (byte_cntr = 0; byte_cntr < len_bitstream_in_bytes; byte_cntr++) {
+    crc_tbl_indx = (WEBRTC_SPL_RSHIFT_U32(crc_state, 24) ^
+                       bitstream_ptr_uw8[byte_cntr]) & 0xFF;
+    crc_state = WEBRTC_SPL_LSHIFT_U32(crc_state, 8) ^ kCrcTable[crc_tbl_indx];
+  }
+
+  *crc = ~crc_state;
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.h
new file mode 100644
index 0000000..0151278
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/crc.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * crc.h
+ *
+ * Checksum functions
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_
+
+#include "typedefs.h"
+
+/****************************************************************************
+ * WebRtcIsac_GetCrc(...)
+ *
+ * This function returns a 32 bit CRC checksum of a bit stream
+ *
+ * Input:
+ *  - encoded      : payload bit stream
+ *  - no_of_word8s : number of 8-bit words in the bit stream
+ *
+ * Output:
+ *  - crc          : checksum
+ *
+ * Return value    :  0 - Ok
+ *                   -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsac_GetCrc(
+    const WebRtc_Word16* encoded,
+    WebRtc_Word16        no_of_word8s,
+    WebRtc_UWord32*      crc);
+
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode.c
new file mode 100644
index 0000000..25634b0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode.c
@@ -0,0 +1,330 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * decode_B.c
+ *
+ * This file contains definition of funtions for decoding.
+ * Decoding of lower-band, including normal-decoding and RCU decoding.
+ * Decoding of upper-band, including 8-12 kHz, when the bandwidth is
+ * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz.
+ *
+ */
+
+
+#include "codec.h"
+#include "entropy_coding.h"
+#include "pitch_estimator.h"
+#include "bandwidth_estimator.h"
+#include "structs.h"
+#include "settings.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+/*
+ * function to decode the bitstream
+ * returns the total number of bytes in the stream
+ */
+int
+WebRtcIsac_DecodeLb(
+    float*          signal_out,
+    ISACLBDecStruct* ISACdecLB_obj,
+    WebRtc_Word16*    current_framesamples,
+    WebRtc_Word16     isRCUPayload)
+{
+  int k, model;
+  int len, err;
+  WebRtc_Word16 bandwidthInd;
+
+  float LP_dec_float[FRAMESAMPLES_HALF];
+  float HP_dec_float[FRAMESAMPLES_HALF];
+
+  double LPw[FRAMESAMPLES_HALF];
+  double HPw[FRAMESAMPLES_HALF];
+  double LPw_pf[FRAMESAMPLES_HALF];
+
+  double lo_filt_coef[(ORDERLO+1)*SUBFRAMES];
+  double hi_filt_coef[(ORDERHI+1)*SUBFRAMES];
+
+  double real_f[FRAMESAMPLES_HALF];
+  double imag_f[FRAMESAMPLES_HALF];
+
+  double PitchLags[4];
+  double PitchGains[4];
+  double AvgPitchGain;
+  WebRtc_Word16 PitchGains_Q12[4];
+  WebRtc_Word16 AvgPitchGain_Q12;
+
+  float gain;
+
+  int frame_nb; /* counter */
+  int frame_mode; /* 0 for 20ms and 30ms, 1 for 60ms */
+  int processed_samples;
+
+  (ISACdecLB_obj->bitstr_obj).W_upper = 0xFFFFFFFF;
+  (ISACdecLB_obj->bitstr_obj).streamval = 0;
+  (ISACdecLB_obj->bitstr_obj).stream_index = 0;
+
+  len = 0;
+
+  /* decode framelength and BW estimation - not used,
+     only for stream pointer*/
+  err = WebRtcIsac_DecodeFrameLen(&ISACdecLB_obj->bitstr_obj,
+                                  current_framesamples);
+  if (err < 0) { // error check
+    return err;
+  }
+
+  /* frame_mode: 0, or 1 */
+  frame_mode = *current_framesamples/MAX_FRAMESAMPLES;
+  /* processed_samples: either 320 (20ms) or 480 (30, 60 ms) */
+  processed_samples = *current_framesamples/(frame_mode+1);
+
+  err = WebRtcIsac_DecodeSendBW(&ISACdecLB_obj->bitstr_obj, &bandwidthInd);
+  if (err < 0) { // error check
+    return err;
+  }
+
+  /* one loop if it's one frame (20 or 30ms), 2 loops if 2 frames
+     bundled together (60ms) */
+  for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) {
+    /* decode & dequantize pitch parameters */
+    err = WebRtcIsac_DecodePitchGain(&(ISACdecLB_obj->bitstr_obj),
+                                     PitchGains_Q12);
+    if (err < 0) { // error check
+      return err;
+    }
+
+    err = WebRtcIsac_DecodePitchLag(&ISACdecLB_obj->bitstr_obj,
+                                    PitchGains_Q12, PitchLags);
+    if (err < 0) { // error check
+      return err;
+    }
+
+    AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] +
+                        PitchGains_Q12[2] + PitchGains_Q12[3])>>2;
+
+    /* decode & dequantize FiltCoef */
+    err = WebRtcIsac_DecodeLpc(&ISACdecLB_obj->bitstr_obj,
+                               lo_filt_coef,hi_filt_coef, &model);
+    if (err < 0) { // error check
+      return err;
+    }
+    /* decode & dequantize spectrum */
+    len = WebRtcIsac_DecodeSpecLb(&ISACdecLB_obj->bitstr_obj,
+                                  real_f, imag_f, AvgPitchGain_Q12);
+    if (len < 0) { // error check
+      return len;
+    }
+
+    /* inverse transform */
+    WebRtcIsac_Spec2time(real_f, imag_f, LPw, HPw,
+                        &ISACdecLB_obj->fftstr_obj);
+
+    /* convert PitchGains back to FLOAT for pitchfilter_post */
+    for (k = 0; k < 4; k++) {
+      PitchGains[k] = ((float)PitchGains_Q12[k])/4096;
+    }
+
+    if(isRCUPayload)
+    {
+      for (k = 0; k < 240; k++) {
+        LPw[k] *= RCU_TRANSCODING_SCALE_INVERSE;
+        HPw[k] *= RCU_TRANSCODING_SCALE_INVERSE;
+      }
+    }
+
+    /* inverse pitch filter */
+    WebRtcIsac_PitchfilterPost(LPw, LPw_pf,
+                                &ISACdecLB_obj->pitchfiltstr_obj, PitchLags, PitchGains);
+    /* convert AvgPitchGain back to FLOAT for computation of gain */
+    AvgPitchGain = ((float)AvgPitchGain_Q12)/4096;
+    gain = 1.0f - 0.45f * (float)AvgPitchGain;
+
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      /* reduce gain to compensate for pitch enhancer */
+      LPw_pf[ k ] *= gain;
+    }
+
+    if(isRCUPayload)
+    {
+      for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+        /* compensation for transcoding gain changes*/
+        LPw_pf[k] *= RCU_TRANSCODING_SCALE;
+        HPw[k]    *= RCU_TRANSCODING_SCALE;
+      }
+    }
+
+    /* perceptual post-filtering (using normalized lattice filter) */
+    WebRtcIsac_NormLatticeFilterAr(ORDERLO,
+                                    ISACdecLB_obj->maskfiltstr_obj.PostStateLoF,
+                                    (ISACdecLB_obj->maskfiltstr_obj).PostStateLoG,
+                                    LPw_pf, lo_filt_coef, LP_dec_float);
+    WebRtcIsac_NormLatticeFilterAr(ORDERHI,
+                                    ISACdecLB_obj->maskfiltstr_obj.PostStateHiF,
+                                    (ISACdecLB_obj->maskfiltstr_obj).PostStateHiG,
+                                    HPw, hi_filt_coef, HP_dec_float);
+
+    /* recombine the 2 bands */
+    WebRtcIsac_FilterAndCombineFloat( LP_dec_float, HP_dec_float,
+                                      signal_out + frame_nb * processed_samples,
+                                      &ISACdecLB_obj->postfiltbankstr_obj);
+  }
+
+  return len;
+}
+
+
+/*
+ * This decode function is called when the codec is operating in 16 kHz
+ * bandwidth to decode the upperband, i.e. 8-16 kHz.
+ *
+ * Contrary to lower-band, the upper-band (8-16 kHz) is not split in
+ * frequency, but split to 12 sub-frames, i.e. twice as lower-band.
+ */
+int
+WebRtcIsac_DecodeUb16(
+    float*           signal_out,
+    ISACUBDecStruct* ISACdecUB_obj,
+    WebRtc_Word16      isRCUPayload)
+{
+  int len, err;
+
+  double halfFrameFirst[FRAMESAMPLES_HALF];
+  double halfFrameSecond[FRAMESAMPLES_HALF];
+
+  double percepFilterParam[(UB_LPC_ORDER+1) * (SUBFRAMES<<1) +
+                           (UB_LPC_ORDER+1)];
+
+  double real_f[FRAMESAMPLES_HALF];
+  double imag_f[FRAMESAMPLES_HALF];
+
+  len = 0;
+
+  /* decode & dequantize FiltCoef */
+  memset(percepFilterParam, 0, sizeof(percepFilterParam));
+  err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj,
+                                       percepFilterParam, isac16kHz);
+  if (err < 0) { // error check
+    return err;
+  }
+
+  /* decode & dequantize spectrum */
+  len = WebRtcIsac_DecodeSpecUB16(&ISACdecUB_obj->bitstr_obj, real_f,
+                                    imag_f);
+  if (len < 0) {  // error check
+    return len;
+  }
+  if(isRCUPayload)
+  {
+    int n;
+    for(n = 0; n < 240; n++)
+    {
+      real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE;
+      imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE;
+    }
+  }
+
+  /* inverse transform */
+  WebRtcIsac_Spec2time(real_f, imag_f, halfFrameFirst, halfFrameSecond,
+                      &ISACdecUB_obj->fftstr_obj);
+
+  /* perceptual post-filtering (using normalized lattice filter) */
+  WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
+                                  ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
+                                  (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameFirst,
+                                  &percepFilterParam[(UB_LPC_ORDER+1)], signal_out);
+
+  WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
+                                  ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
+                                  (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameSecond,
+                                  &percepFilterParam[(UB_LPC_ORDER + 1) * SUBFRAMES + (UB_LPC_ORDER+1)],
+                                  &signal_out[FRAMESAMPLES_HALF]);
+
+  return len;
+}
+
+/*
+ * This decode function is called when the codec operates at 0-12 kHz
+ * bandwidth to decode the upperband, i.e. 8-12 kHz.
+ *
+ * At the encoder the upper-band is split into two band, 8-12 kHz & 12-16
+ * kHz, and only 8-12 kHz is encoded. At the decoder, 8-12 kHz band is
+ * reconstructed and 12-16 kHz replaced with zeros. Then two bands
+ * are combined, to reconstruct the upperband 8-16 kHz.
+ */
+int
+WebRtcIsac_DecodeUb12(
+    float*           signal_out,
+    ISACUBDecStruct* ISACdecUB_obj,
+    WebRtc_Word16      isRCUPayload)
+{
+  int len, err;
+
+  float LP_dec_float[FRAMESAMPLES_HALF];
+  float HP_dec_float[FRAMESAMPLES_HALF];
+
+  double LPw[FRAMESAMPLES_HALF];
+  double HPw[FRAMESAMPLES_HALF];
+
+  double percepFilterParam[(UB_LPC_ORDER+1)*SUBFRAMES];
+
+  double real_f[FRAMESAMPLES_HALF];
+  double imag_f[FRAMESAMPLES_HALF];
+
+  len = 0;
+
+  /* decode & dequantize FiltCoef */
+  err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj,
+                                       percepFilterParam, isac12kHz);
+  if(err < 0) { // error check
+    return err;
+  }
+
+  /* decode & dequantize spectrum */
+  len = WebRtcIsac_DecodeSpecUB12(&ISACdecUB_obj->bitstr_obj,
+                                    real_f, imag_f);
+  if(len < 0) { // error check
+    return len;
+  }
+
+  if(isRCUPayload)
+  {
+    int n;
+    for(n = 0; n < 240; n++)
+    {
+      real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE;
+      imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE;
+    }
+  }
+
+  /* inverse transform */
+  WebRtcIsac_Spec2time(real_f, imag_f, LPw, HPw, &ISACdecUB_obj->fftstr_obj);
+
+  /* perceptual post-filtering (using normalized lattice filter) */
+  WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
+                                  ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
+                                  (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, LPw,
+                                  percepFilterParam, LP_dec_float);
+
+  /* Zerro for upper-band */
+  memset(HP_dec_float, 0, sizeof(float) * (FRAMESAMPLES_HALF));
+
+  /* recombine the 2 bands */
+  WebRtcIsac_FilterAndCombineFloat(HP_dec_float, LP_dec_float, signal_out,
+                                   &ISACdecUB_obj->postfiltbankstr_obj);
+
+
+
+  return len;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode_bwe.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode_bwe.c
new file mode 100644
index 0000000..cdac7fa
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/decode_bwe.c
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "structs.h"
+#include "bandwidth_estimator.h"
+#include "entropy_coding.h"
+#include "codec.h"
+
+
+int
+WebRtcIsac_EstimateBandwidth(
+    BwEstimatorstr*           bwest_str,
+    Bitstr*                   streamdata,
+    WebRtc_Word32               packet_size,
+    WebRtc_UWord16              rtp_seq_number,
+    WebRtc_UWord32              send_ts,
+    WebRtc_UWord32              arr_ts,
+    enum IsacSamplingRate encoderSampRate,
+    enum IsacSamplingRate decoderSampRate)
+{
+  WebRtc_Word16  index;
+  WebRtc_Word16  frame_samples;
+  WebRtc_UWord32 sendTimestampIn16kHz;
+  WebRtc_UWord32 arrivalTimestampIn16kHz;
+  WebRtc_UWord32 diffSendTime;
+  WebRtc_UWord32 diffArrivalTime;
+  int err;
+
+  /* decode framelength and BW estimation */
+  err = WebRtcIsac_DecodeFrameLen(streamdata, &frame_samples);
+  if(err < 0)  // error check
+  {
+    return err;
+  }
+  err = WebRtcIsac_DecodeSendBW(streamdata, &index);
+  if(err < 0)  // error check
+  {
+    return err;
+  }
+
+  /* UPDATE ESTIMATES FROM OTHER SIDE */
+  err = WebRtcIsac_UpdateUplinkBwImpl(bwest_str, index, encoderSampRate);
+  if(err < 0)
+  {
+    return err;
+  }
+
+  // We like BWE to work at 16 kHz sampling rate,
+  // therefore, we have to change the timestamps accordingly.
+  // translate the send timestamp if required
+  diffSendTime = (WebRtc_UWord32)((WebRtc_UWord32)send_ts -
+                                  (WebRtc_UWord32)bwest_str->senderTimestamp);
+  bwest_str->senderTimestamp = send_ts;
+
+  diffArrivalTime = (WebRtc_UWord32)((WebRtc_UWord32)arr_ts -
+                                     (WebRtc_UWord32)bwest_str->receiverTimestamp);
+  bwest_str->receiverTimestamp = arr_ts;
+
+  if(decoderSampRate == kIsacSuperWideband)
+  {
+    diffArrivalTime = (WebRtc_UWord32)diffArrivalTime >> 1;
+    diffSendTime = (WebRtc_UWord32)diffSendTime >> 1;
+  }
+  // arrival timestamp in 16 kHz
+  arrivalTimestampIn16kHz = (WebRtc_UWord32)((WebRtc_UWord32)
+                                             bwest_str->prev_rec_arr_ts + (WebRtc_UWord32)diffArrivalTime);
+  // send timestamp in 16 kHz
+  sendTimestampIn16kHz = (WebRtc_UWord32)((WebRtc_UWord32)
+                                          bwest_str->prev_rec_send_ts + (WebRtc_UWord32)diffSendTime);
+
+  err = WebRtcIsac_UpdateBandwidthEstimator(bwest_str, rtp_seq_number,
+                                            (frame_samples * 1000) / FS, sendTimestampIn16kHz,
+                                            arrivalTimestampIn16kHz, packet_size);
+  // error check
+  if(err < 0)
+  {
+    return err;
+  }
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode.c
new file mode 100644
index 0000000..75cd726
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode.c
@@ -0,0 +1,1451 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * encode.c
+ *
+ * This file contains definition of funtions for encoding.
+ * Decoding of upper-band, including 8-12 kHz, when the bandwidth is
+ * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz.
+ *
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "structs.h"
+#include "codec.h"
+#include "pitch_estimator.h"
+#include "entropy_coding.h"
+#include "arith_routines.h"
+#include "pitch_gain_tables.h"
+#include "pitch_lag_tables.h"
+#include "spectrum_ar_model_tables.h"
+#include "lpc_tables.h"
+#include "lpc_analysis.h"
+#include "bandwidth_estimator.h"
+#include "lpc_shape_swb12_tables.h"
+#include "lpc_shape_swb16_tables.h"
+#include "lpc_gain_swb_tables.h"
+
+
+#define UB_LOOKAHEAD 24
+
+/*
+  Rate allocation tables of lower and upper-band bottleneck for
+  12kHz & 16kHz bandwidth.
+
+  12 kHz bandwidth
+  -----------------
+  The overall bottleneck of the coder is between 38 kbps and 45 kbps. We have
+  considered 7 enteries, uniformly distributed in this interval, i.e. 38,
+  39.17, 40.33, 41.5, 42.67, 43.83 and 45. For every entery, the lower-band
+  and the upper-band bottlenecks are specified in
+  'kLowerBandBitRate12' and 'kUpperBandBitRate12'
+  tables, respectively. E.g. the overall rate of 41.5 kbps corresponts to a
+  bottleneck of 31 kbps for lower-band and 27 kbps for upper-band. Given an
+  overall bottleneck of the codec, we use linear interpolation to get
+  lower-band and upper-band bottlenecks.
+
+  16 kHz bandwidth
+  -----------------
+  The overall bottleneck of the coder is between 50 kbps and 56 kbps. We have
+  considered 7 enteries, uniformly distributed in this interval, i.e. 50, 51.2,
+  52.4, 53.6, 54.8 and 56. For every entery, the lower-band and the upper-band
+  bottlenecks are specified in 'kLowerBandBitRate16' and
+  'kUpperBandBitRate16' tables, respectively. E.g. the overall rate
+  of 53.6 kbps corresponts to a bottleneck of 32 kbps for lower-band and 30
+  kbps for upper-band. Given an overall bottleneck of the codec, we use linear
+  interpolation to get lower-band and upper-band bottlenecks.
+
+*/
+
+//     38  39.17  40.33   41.5  42.67  43.83     45
+static const WebRtc_Word16 kLowerBandBitRate12[7] = {
+  29000, 30000, 30000, 31000, 31000, 32000, 32000};
+static const WebRtc_Word16 kUpperBandBitRate12[7] = {
+  25000, 25000, 27000, 27000, 29000, 29000, 32000};
+
+//    50     51.2  52.4   53.6   54.8    56
+static const WebRtc_Word16 kLowerBandBitRate16[6] = {
+  31000, 31000, 32000, 32000, 32000, 32000};
+static const WebRtc_Word16 kUpperBandBitRate16[6] = {
+  28000, 29000, 29000, 30000, 31000, 32000};
+
+/******************************************************************************
+ * WebRtcIsac_RateAllocation()
+ * Internal function to perform a rate-allocation for upper and lower-band,
+ * given a total rate.
+ *
+ * Input:
+ *   - inRateBitPerSec           : a total bottleneck in bits/sec.
+ *
+ * Output:
+ *   - rateLBBitPerSec           : a bottleneck allocated to the lower-band
+ *                                 in bits/sec.
+ *   - rateUBBitPerSec           : a bottleneck allocated to the upper-band
+ *                                 in bits/sec.
+ *
+ * Return value                  : 0 if rate allocation has been successful.
+ *                                -1 if failed to allocate rates.
+ */
+
+WebRtc_Word16
+WebRtcIsac_RateAllocation(
+    WebRtc_Word32         inRateBitPerSec,
+    double*             rateLBBitPerSec,
+    double*             rateUBBitPerSec,
+    enum ISACBandwidth* bandwidthKHz)
+{
+  WebRtc_Word16 idx;
+  double idxD;
+  double idxErr;
+  if(inRateBitPerSec < 38000)
+  {
+    // If the given overall bottleneck is less than 38000 then
+    // then codec has to operate in wideband mode, i.e. 8 kHz
+    // bandwidth.
+    *rateLBBitPerSec = (WebRtc_Word16)((inRateBitPerSec > 32000)?
+                                       32000:inRateBitPerSec);
+    *rateUBBitPerSec = 0;
+    *bandwidthKHz = isac8kHz;
+  }
+  else if((inRateBitPerSec >= 38000) && (inRateBitPerSec < 50000))
+  {
+    // At a bottleneck between 38 and 50 kbps the codec is operating
+    // at 12 kHz bandwidth. Using xxxBandBitRate12[] to calculates
+    // upper/lower bottleneck
+
+    // find the bottlenecks by linear interpolation
+    // step is (45000 - 38000)/6.0 we use the inverse of it.
+    const double stepSizeInv = 8.5714286e-4;
+    idxD = (inRateBitPerSec - 38000) * stepSizeInv;
+    idx = (idxD >= 6)? 6:((WebRtc_Word16)idxD);
+    idxErr = idxD - idx;
+    *rateLBBitPerSec = kLowerBandBitRate12[idx];
+    *rateUBBitPerSec = kUpperBandBitRate12[idx];
+
+    if(idx < 6)
+    {
+      *rateLBBitPerSec += (WebRtc_Word16)(idxErr *
+                                          (kLowerBandBitRate12[idx + 1] -
+                                           kLowerBandBitRate12[idx]));
+      *rateUBBitPerSec += (WebRtc_Word16)(idxErr *
+                                          (kUpperBandBitRate12[idx + 1] -
+                                           kUpperBandBitRate12[idx]));
+    }
+
+    *bandwidthKHz = isac12kHz;
+  }
+  else if((inRateBitPerSec >= 50000) && (inRateBitPerSec <= 56000))
+  {
+    // A bottleneck between 50 and 56 kbps corresponds to bandwidth
+    // of 16 kHz. Using xxxBandBitRate16[] to calculates
+    // upper/lower bottleneck
+
+    // find the bottlenecks by linear interpolation
+    // step is (56000 - 50000)/5 we use the inverse of it
+    const double stepSizeInv = 8.3333333e-4;
+    idxD = (inRateBitPerSec - 50000) * stepSizeInv;
+    idx = (idxD >= 5)? 5:((WebRtc_Word16)idxD);
+    idxErr = idxD - idx;
+    *rateLBBitPerSec = kLowerBandBitRate16[idx];
+    *rateUBBitPerSec  = kUpperBandBitRate16[idx];
+
+    if(idx < 5)
+    {
+      *rateLBBitPerSec += (WebRtc_Word16)(idxErr *
+                                          (kLowerBandBitRate16[idx + 1] -
+                                           kLowerBandBitRate16[idx]));
+
+      *rateUBBitPerSec += (WebRtc_Word16)(idxErr *
+                                          (kUpperBandBitRate16[idx + 1] -
+                                           kUpperBandBitRate16[idx]));
+    }
+
+    *bandwidthKHz = isac16kHz;
+  }
+  else
+  {
+    // Out-of-range botlteneck value.
+    return -1;
+  }
+
+  // limit the values.
+  *rateLBBitPerSec = (*rateLBBitPerSec > 32000)? 32000:*rateLBBitPerSec;
+  *rateUBBitPerSec = (*rateUBBitPerSec > 32000)? 32000:*rateUBBitPerSec;
+
+  return 0;
+}
+
+
+
+int
+WebRtcIsac_EncodeLb(
+    float*           in,
+    ISACLBEncStruct* ISACencLB_obj,
+    WebRtc_Word16      codingMode,
+    WebRtc_Word16      bottleneckIndex)
+{
+  int stream_length = 0;
+  int err;
+  int k;
+  int iterCntr;
+
+  double lofilt_coef[(ORDERLO+1)*SUBFRAMES];
+  double hifilt_coef[(ORDERHI+1)*SUBFRAMES];
+  float LP[FRAMESAMPLES_HALF];
+  float HP[FRAMESAMPLES_HALF];
+
+  double LP_lookahead[FRAMESAMPLES_HALF];
+  double HP_lookahead[FRAMESAMPLES_HALF];
+  double LP_lookahead_pf[FRAMESAMPLES_HALF + QLOOKAHEAD];
+  double LPw[FRAMESAMPLES_HALF];
+
+  double HPw[FRAMESAMPLES_HALF];
+  double LPw_pf[FRAMESAMPLES_HALF];
+  WebRtc_Word16 fre[FRAMESAMPLES_HALF];   /* Q7 */
+  WebRtc_Word16 fim[FRAMESAMPLES_HALF];   /* Q7 */
+
+  double PitchLags[4];
+  double PitchGains[4];
+  WebRtc_Word16 PitchGains_Q12[4];
+  WebRtc_Word16 AvgPitchGain_Q12;
+
+  int frame_mode; /* 0 for 30ms, 1 for 60ms */
+  int processed_samples, status = 0;
+
+  double bits_gains;
+  int bmodel;
+
+  transcode_obj transcodingParam;
+  double bytesLeftSpecCoding;
+  WebRtc_UWord16 payloadLimitBytes;
+
+  /* copy new frame length and bottle neck rate only for the first
+     10 ms data */
+  if (ISACencLB_obj->buffer_index == 0) {
+    /* set the framelength for the next packet */
+    ISACencLB_obj->current_framesamples = ISACencLB_obj->new_framelength;
+  }
+  /* frame_mode is 0 (30 ms) or 1 (60 ms)  */
+  frame_mode = ISACencLB_obj->current_framesamples/MAX_FRAMESAMPLES;
+  /* processed_samples: 480 (30, 60 ms) */
+  processed_samples = ISACencLB_obj->current_framesamples/(frame_mode+1);
+
+  /* buffer speech samples (by 10ms packet) until the framelength */
+  /* is reached (30 or 60 ms)                                     */
+  /****************************************************************/
+
+  /* fill the buffer with 10ms input data */
+  for (k = 0; k < FRAMESAMPLES_10ms; k++) {
+    ISACencLB_obj->data_buffer_float[k + ISACencLB_obj->buffer_index] =
+        in[k];
+  }
+
+  /* if buffersize is not equal to current framesize then increase index
+     and return. We do no encoding untill we have enough audio.  */
+  if (ISACencLB_obj->buffer_index + FRAMESAMPLES_10ms != processed_samples) {
+    ISACencLB_obj->buffer_index += FRAMESAMPLES_10ms;
+    return 0;
+  }
+  /* if buffer reached the right size, reset index and continue with
+     encoding the frame */
+  ISACencLB_obj->buffer_index = 0;
+
+  /* end of buffer function */
+  /**************************/
+
+  /* encoding */
+  /************/
+
+  if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0 ) {
+    // This is to avoid Linux warnings until we change 'int' to 'Word32'
+    // at all places.
+    int intVar;
+    /* reset bitstream */
+    ISACencLB_obj->bitstr_obj.W_upper = 0xFFFFFFFF;
+    ISACencLB_obj->bitstr_obj.streamval = 0;
+    ISACencLB_obj->bitstr_obj.stream_index = 0;
+
+    if((codingMode == 0) && (frame_mode == 0) &&
+       (ISACencLB_obj->enforceFrameSize == 0)) {
+      ISACencLB_obj->new_framelength =
+          WebRtcIsac_GetNewFrameLength(ISACencLB_obj->bottleneck,
+                                       ISACencLB_obj->current_framesamples);
+    }
+
+    ISACencLB_obj->s2nr = WebRtcIsac_GetSnr(
+        ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples);
+
+    /* encode frame length */
+    status = WebRtcIsac_EncodeFrameLen(
+        ISACencLB_obj->current_framesamples, &ISACencLB_obj->bitstr_obj);
+    if (status < 0) {
+      /* Wrong frame size */
+      return status;
+    }
+    /* Save framelength for multiple packets memory */
+    ISACencLB_obj->SaveEnc_obj.framelength =
+        ISACencLB_obj->current_framesamples;
+
+    /* To be used for Redundant Coding */
+    ISACencLB_obj->lastBWIdx = bottleneckIndex;
+    intVar = (int)bottleneckIndex;
+    WebRtcIsac_EncodeReceiveBw(&intVar, &ISACencLB_obj->bitstr_obj);
+  }
+
+  /* split signal in two bands */
+  WebRtcIsac_SplitAndFilterFloat(ISACencLB_obj->data_buffer_float, LP, HP,
+                                 LP_lookahead, HP_lookahead,    &ISACencLB_obj->prefiltbankstr_obj );
+
+  /* estimate pitch parameters and pitch-filter lookahead signal */
+  WebRtcIsac_PitchAnalysis(LP_lookahead, LP_lookahead_pf,
+                           &ISACencLB_obj->pitchanalysisstr_obj, PitchLags, PitchGains);
+
+  /* encode in FIX Q12 */
+
+  /* convert PitchGain to Fixed point */
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+    PitchGains_Q12[k] = (WebRtc_Word16)(PitchGains[k] * 4096.0);
+  }
+
+  /* Set where to store data in multiple packets memory */
+  if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0)
+  {
+    ISACencLB_obj->SaveEnc_obj.startIdx = 0;
+  } else {
+    ISACencLB_obj->SaveEnc_obj.startIdx = 1;
+  }
+
+  /* quantize & encode pitch parameters */
+  WebRtcIsac_EncodePitchGain(PitchGains_Q12, &ISACencLB_obj->bitstr_obj,
+                             &ISACencLB_obj->SaveEnc_obj);
+  WebRtcIsac_EncodePitchLag(PitchLags, PitchGains_Q12,
+                            &ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
+
+  AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] +
+                      PitchGains_Q12[2] + PitchGains_Q12[3])>>2;
+
+  /* find coefficients for perceptual pre-filters */
+  WebRtcIsac_GetLpcCoefLb(LP_lookahead_pf, HP_lookahead,
+                          &ISACencLB_obj->maskfiltstr_obj, ISACencLB_obj->s2nr,
+                          PitchGains_Q12, lofilt_coef, hifilt_coef);
+
+  /* code LPC model and shape - gains not quantized yet */
+  WebRtcIsac_EncodeLpcLb(lofilt_coef, hifilt_coef,  &bmodel, &bits_gains,
+                         &ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
+
+  /* convert PitchGains back to FLOAT for pitchfilter_pre */
+  for (k = 0; k < 4; k++) {
+    PitchGains[k] = ((float)PitchGains_Q12[k])/4096;
+  }
+
+  /* Store the state of arithmetic coder before coding LPC gains */
+  transcodingParam.W_upper      = ISACencLB_obj->bitstr_obj.W_upper;
+  transcodingParam.stream_index = ISACencLB_obj->bitstr_obj.stream_index;
+  transcodingParam.streamval    = ISACencLB_obj->bitstr_obj.streamval;
+  transcodingParam.stream[0]    = ISACencLB_obj->bitstr_obj.stream[
+      ISACencLB_obj->bitstr_obj.stream_index - 2];
+  transcodingParam.stream[1]    = ISACencLB_obj->bitstr_obj.stream[
+      ISACencLB_obj->bitstr_obj.stream_index - 1];
+  transcodingParam.stream[2]    = ISACencLB_obj->bitstr_obj.stream[
+      ISACencLB_obj->bitstr_obj.stream_index];
+
+  /* Store LPC Gains before encoding them */
+  for(k = 0; k < SUBFRAMES; k++) {
+    transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER+1)*k];
+    transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER+1)*k];
+  }
+
+  /* Code gains */
+  WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef,  bmodel,
+                             &ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
+
+  /* Get the correct value for the payload limit and calculate the
+     number of bytes left for coding the spectrum.*/
+  if((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) {
+    /* It is a 60ms and we are in the first 30ms then the limit at
+       this point should be half of the assigned value */
+    payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 >> 1;
+  }
+  else if (frame_mode == 0) {
+    /* It is a 30ms frame */
+    /* Subract 3 because termination process may add 3 bytes */
+    payloadLimitBytes = ISACencLB_obj->payloadLimitBytes30 - 3;
+  } else {
+    /* This is the second half of a 60ms frame. */
+    /* Subract 3 because termination process may add 3 bytes */
+    payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 - 3;
+  }
+  bytesLeftSpecCoding = payloadLimitBytes - transcodingParam.stream_index;
+
+  /* perceptual pre-filtering (using normalized lattice filter) */
+  /* low-band filtering */
+  WebRtcIsac_NormLatticeFilterMa(ORDERLO,
+                                  ISACencLB_obj->maskfiltstr_obj.PreStateLoF,
+                                  ISACencLB_obj->maskfiltstr_obj.PreStateLoG, LP, lofilt_coef, LPw);
+  /* high-band filtering */
+  WebRtcIsac_NormLatticeFilterMa(ORDERHI,
+                                  ISACencLB_obj->maskfiltstr_obj.PreStateHiF,
+                                  ISACencLB_obj->maskfiltstr_obj.PreStateHiG, HP, hifilt_coef, HPw);
+
+
+  /* pitch filter */
+  WebRtcIsac_PitchfilterPre(LPw, LPw_pf, &ISACencLB_obj->pitchfiltstr_obj,
+                             PitchLags, PitchGains);
+
+  /* transform */
+  WebRtcIsac_Time2Spec(LPw_pf, HPw, fre, fim, &ISACencLB_obj->fftstr_obj);
+
+
+  /* Save data for multiple packets memory */
+  for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+    ISACencLB_obj->SaveEnc_obj.fre[k +
+                                   ISACencLB_obj->SaveEnc_obj.startIdx*FRAMESAMPLES_HALF] = fre[k];
+    ISACencLB_obj->SaveEnc_obj.fim[k +
+                                   ISACencLB_obj->SaveEnc_obj.startIdx*FRAMESAMPLES_HALF] = fim[k];
+  }
+  ISACencLB_obj->SaveEnc_obj.AvgPitchGain[
+      ISACencLB_obj->SaveEnc_obj.startIdx] = AvgPitchGain_Q12;
+
+  /* quantization and lossless coding */
+  err = WebRtcIsac_EncodeSpecLb(fre, fim, &ISACencLB_obj->bitstr_obj,
+                                AvgPitchGain_Q12);
+  if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    /* There has been an error but it was not too large payload
+       (we can cure too large payload) */
+    if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) {
+      /* If this is the second 30ms of a 60ms frame reset
+         this such that in the next call encoder starts fresh. */
+      ISACencLB_obj->frame_nb = 0;
+    }
+    return err;
+  }
+  iterCntr = 0;
+  while((ISACencLB_obj->bitstr_obj.stream_index > payloadLimitBytes) ||
+        (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    double bytesSpecCoderUsed;
+    double transcodeScale;
+
+    if(iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) {
+      /* We were not able to limit the payload size */
+      if((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) {
+        /* This was the first 30ms of a 60ms frame. Although
+           the payload is larger than it should be but we let
+           the second 30ms be encoded. Maybe together we
+           won't exceed the limit. */
+        ISACencLB_obj->frame_nb = 1;
+        return 0;
+      } else if((frame_mode == 1) && (ISACencLB_obj->frame_nb == 1)) {
+        ISACencLB_obj->frame_nb = 0;
+      }
+
+      if(err != -ISAC_DISALLOWED_BITSTREAM_LENGTH) {
+        return -ISAC_PAYLOAD_LARGER_THAN_LIMIT;
+      } else {
+        return status;
+      }
+    }
+
+    if(err == -ISAC_DISALLOWED_BITSTREAM_LENGTH) {
+      bytesSpecCoderUsed = STREAM_SIZE_MAX;
+      // being coservative
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5;
+    } else {
+      bytesSpecCoderUsed = ISACencLB_obj->bitstr_obj.stream_index -
+          transcodingParam.stream_index;
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed;
+    }
+
+    /* To be safe, we reduce the scale depending on
+       the number of iterations. */
+    transcodeScale *= (1.0 - (0.9 * (double)iterCntr /
+                              (double)MAX_PAYLOAD_LIMIT_ITERATION));
+
+    /* Scale the LPC Gains */
+    for (k = 0; k < SUBFRAMES; k++) {
+      lofilt_coef[(LPC_LOBAND_ORDER+1) * k] =
+          transcodingParam.loFiltGain[k] * transcodeScale;
+      hifilt_coef[(LPC_HIBAND_ORDER+1) * k] =
+          transcodingParam.hiFiltGain[k] * transcodeScale;
+      transcodingParam.loFiltGain[k] =
+          lofilt_coef[(LPC_LOBAND_ORDER+1) * k];
+      transcodingParam.hiFiltGain[k] =
+          hifilt_coef[(LPC_HIBAND_ORDER+1) * k];
+    }
+
+    /* Scale DFT coefficients */
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      fre[k] = (WebRtc_Word16)(fre[k] * transcodeScale);
+      fim[k] = (WebRtc_Word16)(fim[k] * transcodeScale);
+    }
+
+    /* Save data for multiple packets memory */
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      ISACencLB_obj->SaveEnc_obj.fre[k +
+                                     ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF] =
+          fre[k];
+      ISACencLB_obj->SaveEnc_obj.fim[k +
+                                     ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF] =
+          fim[k];
+    }
+
+    /* Re-store the state of arithmetic coder before coding LPC gains */
+    ISACencLB_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
+    ISACencLB_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
+    ISACencLB_obj->bitstr_obj.streamval = transcodingParam.streamval;
+    ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] =
+        transcodingParam.stream[0];
+    ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] =
+        transcodingParam.stream[1];
+    ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index] =
+        transcodingParam.stream[2];
+
+    /* Code gains */
+    WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef,  bmodel,
+                               &ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
+
+    /* Update the number of bytes left for encoding the spectrum */
+    bytesLeftSpecCoding = payloadLimitBytes -
+        transcodingParam.stream_index;
+
+    /* Encode the spectrum */
+    err = WebRtcIsac_EncodeSpecLb(fre, fim, &ISACencLB_obj->bitstr_obj,
+                                  AvgPitchGain_Q12);
+    if((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+      /* There has been an error but it was not too large
+         payload (we can cure too large payload) */
+      if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) {
+        /* If this is the second 30ms of a 60ms frame reset
+           this such that in the next call encoder starts fresh. */
+        ISACencLB_obj->frame_nb = 0;
+      }
+      return err;
+    }
+    iterCntr++;
+  }
+
+  /* i.e. 60 ms framesize and just processed the first 30ms, */
+  /* go back to main function to buffer the other 30ms speech frame */
+  if (frame_mode == 1)
+  {
+    if(ISACencLB_obj->frame_nb == 0)
+    {
+      ISACencLB_obj->frame_nb = 1;
+      return 0;
+    }
+    else if(ISACencLB_obj->frame_nb == 1)
+    {
+      ISACencLB_obj->frame_nb = 0;
+      /* also update the framelength for next packet,
+         in Adaptive mode only */
+      if (codingMode == 0 && (ISACencLB_obj->enforceFrameSize == 0))
+      {
+        ISACencLB_obj->new_framelength =
+            WebRtcIsac_GetNewFrameLength(ISACencLB_obj->bottleneck,
+                                         ISACencLB_obj->current_framesamples);
+      }
+    }
+  }
+  else
+  {
+    ISACencLB_obj->frame_nb = 0;
+  }
+
+  /* complete arithmetic coding */
+  stream_length = WebRtcIsac_EncTerminate(&ISACencLB_obj->bitstr_obj);
+
+  return stream_length;
+}
+
+int
+WebRtcIsac_EncodeUb16(
+    float*           in,
+    ISACUBEncStruct* ISACencUB_obj,
+    WebRtc_Word32      jitterInfo)
+{
+  int err;
+  int k;
+
+  double lpcVecs[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  double percepFilterParams[(1 + UB_LPC_ORDER) * (SUBFRAMES<<1) +
+                            (1 + UB_LPC_ORDER)];
+
+  double LP_lookahead[FRAMESAMPLES];
+  WebRtc_Word16 fre[FRAMESAMPLES_HALF];   /* Q7 */
+  WebRtc_Word16 fim[FRAMESAMPLES_HALF];   /* Q7 */
+
+  int status = 0;
+
+  double varscale[2];
+  double corr[SUBFRAMES<<1][UB_LPC_ORDER + 1];
+  double lpcGains[SUBFRAMES<<1];
+  transcode_obj transcodingParam;
+  double bytesLeftSpecCoding;
+  WebRtc_UWord16 payloadLimitBytes;
+  WebRtc_UWord16 iterCntr;
+  double s2nr;
+
+  /* buffer speech samples (by 10ms packet) until the framelength is   */
+  /* reached (30 or 60 ms)                                             */
+  /*********************************************************************/
+
+  /* fill the buffer with 10ms input data */
+  for (k = 0; k < FRAMESAMPLES_10ms; k++) {
+    ISACencUB_obj->data_buffer_float[k + ISACencUB_obj->buffer_index] =
+        in[k];
+  }
+
+  /* if buffersize is not equal to current framesize, and end of file is
+     not reached yet, we don't do encoding unless we have the whole frame */
+  if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) {
+    ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms;
+    return 0;
+  }
+
+  /* end of buffer function */
+  /**************************/
+
+  /* encoding */
+  /************/
+
+  /* reset bitstream */
+  ISACencUB_obj->bitstr_obj.W_upper = 0xFFFFFFFF;
+  ISACencUB_obj->bitstr_obj.streamval = 0;
+  ISACencUB_obj->bitstr_obj.stream_index = 0;
+
+  /* bandwidth estimation and coding */
+  /* To be used for Redundant Coding */
+  WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj);
+
+  status = WebRtcIsac_EncodeBandwidth(isac16kHz,
+                                      &ISACencUB_obj->bitstr_obj);
+  if (status < 0) {
+    return status;
+  }
+
+  s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck,
+                                 FRAMESAMPLES);
+
+  memcpy(lpcVecs, ISACencUB_obj->lastLPCVec, UB_LPC_ORDER * sizeof(double));
+
+  for (k = 0; k < FRAMESAMPLES; k++) {
+    LP_lookahead[k] = ISACencUB_obj->data_buffer_float[UB_LOOKAHEAD + k];
+  }
+
+  /* find coefficients for perceptual pre-filters */
+  WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj,
+                            &lpcVecs[UB_LPC_ORDER], corr, varscale, isac16kHz);
+
+  memcpy(ISACencUB_obj->lastLPCVec,
+         &lpcVecs[(UB16_LPC_VEC_PER_FRAME - 1) * (UB_LPC_ORDER)],
+         sizeof(double) * UB_LPC_ORDER);
+
+  /* code LPC model and shape - gains not quantized yet */
+  WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj,
+                           percepFilterParams, isac16kHz, &ISACencUB_obj->SaveEnc_obj);
+
+
+  // the first set of lpc parameters are from the last sub-frame of
+  // the previous frame. so we don't care about them
+  WebRtcIsac_GetLpcGain(s2nr, &percepFilterParams[UB_LPC_ORDER + 1],
+                       (SUBFRAMES<<1), lpcGains, corr, varscale);
+
+  /* Store the state of arithmetic coder before coding LPC gains */
+  transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index;
+  transcodingParam.W_upper      = ISACencUB_obj->bitstr_obj.W_upper;
+  transcodingParam.streamval    = ISACencUB_obj->bitstr_obj.streamval;
+  transcodingParam.stream[0]    = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index - 2];
+  transcodingParam.stream[1]    = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index - 1];
+  transcodingParam.stream[2]    = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index];
+
+  /* Store LPC Gains before encoding them */
+  for(k = 0; k < SUBFRAMES; k++) {
+    transcodingParam.loFiltGain[k] = lpcGains[k];
+    transcodingParam.hiFiltGain[k] = lpcGains[SUBFRAMES + k];
+  }
+
+  // Store the gains for multiple encoding
+  memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, (SUBFRAMES << 1) * sizeof(double));
+
+  WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj,
+                             ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
+  WebRtcIsac_EncodeLpcGainUb(&lpcGains[SUBFRAMES], &ISACencUB_obj->bitstr_obj,
+                             &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]);
+
+  /* Get the correct value for the payload limit and calculate the number of
+     bytes left for coding the spectrum. It is a 30ms frame
+     Subract 3 because termination process may add 3 bytes */
+  payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes -
+      ISACencUB_obj->numBytesUsed - 3;
+  bytesLeftSpecCoding = payloadLimitBytes -
+      ISACencUB_obj->bitstr_obj.stream_index;
+
+  for (k = 0; k < (SUBFRAMES<<1); k++) {
+    percepFilterParams[k*(UB_LPC_ORDER + 1) + (UB_LPC_ORDER + 1)] =
+        lpcGains[k];
+  }
+
+  /* perceptual pre-filtering (using normalized lattice filter) */
+  /* first half-frame filtering */
+  WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoG,
+                                  &ISACencUB_obj->data_buffer_float[0],
+                                  &percepFilterParams[UB_LPC_ORDER + 1],
+                                  &LP_lookahead[0]);
+
+  /* Second half-frame filtering */
+  WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoG,
+                                  &ISACencUB_obj->data_buffer_float[FRAMESAMPLES_HALF],
+                                  &percepFilterParams[(UB_LPC_ORDER + 1) + SUBFRAMES *
+                                                      (UB_LPC_ORDER + 1)], &LP_lookahead[FRAMESAMPLES_HALF]);
+
+  WebRtcIsac_Time2Spec(&LP_lookahead[0], &LP_lookahead[FRAMESAMPLES_HALF],
+                      fre, fim, &ISACencUB_obj->fftstr_obj);
+
+  //Store FFT coefficients for multiple encoding
+  memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
+         FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+  memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
+         FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+  // Prepare the audio buffer for the next packet
+  // move the last 3 ms to the beginning of the buffer
+  memcpy(ISACencUB_obj->data_buffer_float,
+         &ISACencUB_obj->data_buffer_float[FRAMESAMPLES],
+         LB_TOTAL_DELAY_SAMPLES * sizeof(float));
+  // start writing with 3 ms delay to compensate for the delay
+  // of the lower-band.
+  ISACencUB_obj->buffer_index = LB_TOTAL_DELAY_SAMPLES;
+
+  // Save the bit-stream object at this point for FEC.
+  memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
+         &ISACencUB_obj->bitstr_obj, sizeof(Bitstr));
+
+  /* quantization and lossless coding */
+  err = WebRtcIsac_EncodeSpecUB16(fre, fim, &ISACencUB_obj->bitstr_obj);
+  if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    return err;
+  }
+
+  iterCntr = 0;
+  while((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) ||
+        (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    double bytesSpecCoderUsed;
+    double transcodeScale;
+
+    if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) {
+      /* We were not able to limit the payload size */
+      return -ISAC_PAYLOAD_LARGER_THAN_LIMIT;
+    }
+
+    if (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH) {
+      bytesSpecCoderUsed = STREAM_SIZE_MAX;
+      // being conservative
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5;
+    } else {
+      bytesSpecCoderUsed = ISACencUB_obj->bitstr_obj.stream_index -
+          transcodingParam.stream_index;
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed;
+    }
+
+    /* To be safe, we reduce the scale depending on the
+       number of iterations. */
+    transcodeScale *= (1.0 - (0.9 * (double)iterCntr/
+                              (double)MAX_PAYLOAD_LIMIT_ITERATION));
+
+    /* Scale the LPC Gains */
+    for (k = 0; k < SUBFRAMES; k++) {
+      transcodingParam.loFiltGain[k] *= transcodeScale;
+      transcodingParam.hiFiltGain[k] *= transcodeScale;
+    }
+
+    /* Scale DFT coefficients */
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      fre[k] = (WebRtc_Word16)(fre[k] * transcodeScale + 0.5);
+      fim[k] = (WebRtc_Word16)(fim[k] * transcodeScale + 0.5);
+    }
+
+    //Store FFT coefficients for multiple encoding
+    memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
+           FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+    memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
+           FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+
+    /* Store the state of arithmetic coder before coding LPC gains */
+    ISACencUB_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
+
+    ISACencUB_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
+
+    ISACencUB_obj->bitstr_obj.streamval = transcodingParam.streamval;
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] =
+        transcodingParam.stream[0];
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] =
+        transcodingParam.stream[1];
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index] =
+        transcodingParam.stream[2];
+
+    // Store the gains for multiple encoding
+    memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains,
+           (SUBFRAMES << 1) * sizeof(double));
+
+    WebRtcIsac_EncodeLpcGainUb(transcodingParam.loFiltGain,
+                               &ISACencUB_obj->bitstr_obj,
+                               ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
+    WebRtcIsac_EncodeLpcGainUb(transcodingParam.hiFiltGain,
+                               &ISACencUB_obj->bitstr_obj,
+                               &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]);
+
+    /* Update the number of bytes left for encoding the spectrum */
+    bytesLeftSpecCoding = payloadLimitBytes -
+        ISACencUB_obj->bitstr_obj.stream_index;
+
+    // Save the bit-stream object at this point for FEC.
+    memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
+           &ISACencUB_obj->bitstr_obj, sizeof(Bitstr));
+
+    /* Encode the spectrum */
+    err = WebRtcIsac_EncodeSpecUB16(fre, fim, &ISACencUB_obj->bitstr_obj);
+    if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+      /* There has been an error but it was not too large payload
+         (we can cure too large payload) */
+      return err;
+    }
+    iterCntr++;
+  }
+
+  /* complete arithmetic coding */
+  return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj);
+}
+
+
+int
+WebRtcIsac_EncodeUb12(
+    float*           in,
+    ISACUBEncStruct* ISACencUB_obj,
+    WebRtc_Word32      jitterInfo)
+{
+  int err;
+  int k;
+  int iterCntr;
+
+  double lpcVecs[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
+
+  double percepFilterParams[(1 + UB_LPC_ORDER) * SUBFRAMES];
+  float LP[FRAMESAMPLES_HALF];
+  float HP[FRAMESAMPLES_HALF];
+
+  double LP_lookahead[FRAMESAMPLES_HALF];
+  double HP_lookahead[FRAMESAMPLES_HALF];
+  double LPw[FRAMESAMPLES_HALF];
+
+  double HPw[FRAMESAMPLES_HALF];
+  WebRtc_Word16 fre[FRAMESAMPLES_HALF];   /* Q7 */
+  WebRtc_Word16 fim[FRAMESAMPLES_HALF];   /* Q7 */
+
+  int status = 0;
+
+  double varscale[1];
+
+  double corr[UB_LPC_GAIN_DIM][UB_LPC_ORDER + 1];
+  double lpcGains[SUBFRAMES];
+  transcode_obj transcodingParam;
+  double bytesLeftSpecCoding;
+  WebRtc_UWord16 payloadLimitBytes;
+  double s2nr;
+
+  /* buffer speech samples (by 10ms packet) until the framelength is  */
+  /* reached (30 or 60 ms)                                            */
+  /********************************************************************/
+
+  /* fill the buffer with 10ms input data */
+  for (k=0; k<FRAMESAMPLES_10ms; k++) {
+    ISACencUB_obj->data_buffer_float[k + ISACencUB_obj->buffer_index] =
+        in[k];
+  }
+
+  /* if buffer-size is not equal to current frame-size then increase the
+     index and return. We do the encoding when we have enough audio.     */
+  if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) {
+    ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms;
+    return 0;
+  }
+  /* if buffer reached the right size, reset index and continue
+     with encoding the frame */
+  ISACencUB_obj->buffer_index = 0;
+
+  /* end of buffer function */
+  /**************************/
+
+  /* encoding */
+  /************/
+
+  /* reset bitstream */
+  ISACencUB_obj->bitstr_obj.W_upper = 0xFFFFFFFF;
+  ISACencUB_obj->bitstr_obj.streamval = 0;
+  ISACencUB_obj->bitstr_obj.stream_index = 0;
+
+  /* bandwidth estimation and coding */
+  /* To be used for Redundant Coding */
+  WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj);
+
+  status = WebRtcIsac_EncodeBandwidth(isac12kHz,
+                                      &ISACencUB_obj->bitstr_obj);
+  if (status < 0) {
+    return status;
+  }
+
+
+  s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck,
+                                 FRAMESAMPLES);
+
+  /* split signal in two bands */
+  WebRtcIsac_SplitAndFilterFloat(ISACencUB_obj->data_buffer_float, HP, LP,
+                                 HP_lookahead, LP_lookahead, &ISACencUB_obj->prefiltbankstr_obj);
+
+  /* find coefficients for perceptual pre-filters */
+  WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj,
+                            lpcVecs, corr, varscale, isac12kHz);
+
+  /* code LPC model and shape - gains not quantized yet */
+  WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj,
+                           percepFilterParams, isac12kHz, &ISACencUB_obj->SaveEnc_obj);
+
+  WebRtcIsac_GetLpcGain(s2nr, percepFilterParams, SUBFRAMES, lpcGains,
+                       corr, varscale);
+  
+  /* Store the state of arithmetic coder before coding LPC gains */
+  transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper;
+
+  transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index;
+
+  transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval;
+
+  transcodingParam.stream[0] = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index - 2];
+
+  transcodingParam.stream[1] = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index - 1];
+
+  transcodingParam.stream[2] = ISACencUB_obj->bitstr_obj.stream[
+      ISACencUB_obj->bitstr_obj.stream_index];
+
+  /* Store LPC Gains before encoding them */
+  for(k = 0; k < SUBFRAMES; k++) {
+    transcodingParam.loFiltGain[k] = lpcGains[k];
+  }
+
+  // Store the gains for multiple encoding
+  memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, SUBFRAMES *
+         sizeof(double));
+
+  WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj,
+                             ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
+
+  for(k = 0; k < SUBFRAMES; k++) {
+    percepFilterParams[k*(UB_LPC_ORDER + 1)] = lpcGains[k];
+  }
+
+  /* perceptual pre-filtering (using normalized lattice filter) */
+  /* low-band filtering */
+  WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
+                                  ISACencUB_obj->maskfiltstr_obj.PreStateLoG, LP, percepFilterParams,
+                                  LPw);
+
+  /* Get the correct value for the payload limit and calculate the number
+     of bytes left for coding the spectrum. It is a 30ms frame Subract 3
+     because termination process may add 3 bytes */
+  payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes -
+      ISACencUB_obj->numBytesUsed - 3;
+  bytesLeftSpecCoding = payloadLimitBytes -
+      ISACencUB_obj->bitstr_obj.stream_index;
+
+  memset(HPw, 0, sizeof(double) * FRAMESAMPLES_HALF);
+
+  /* transform */
+  WebRtcIsac_Time2Spec(LPw, HPw, fre, fim, &ISACencUB_obj->fftstr_obj);
+
+  //Store real FFT coefficients for multiple encoding
+  memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
+         FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+  //Store imaginary FFT coefficients for multiple encoding
+  memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
+         FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+  // Save the bit-stream object at this point for FEC.
+  memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
+         &ISACencUB_obj->bitstr_obj, sizeof(Bitstr));
+
+  /* quantization and lossless coding */
+  err = WebRtcIsac_EncodeSpecUB12(fre, fim, &ISACencUB_obj->bitstr_obj);
+  if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    /* There has been an error but it was not too large
+       payload (we can cure too large payload) */
+    return err;
+  }
+  iterCntr = 0;
+  while((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) ||
+        (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+    double bytesSpecCoderUsed;
+    double transcodeScale;
+
+    if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) {
+      /* We were not able to limit the payload size */
+      return -ISAC_PAYLOAD_LARGER_THAN_LIMIT;
+    }
+
+    if (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH) {
+      bytesSpecCoderUsed = STREAM_SIZE_MAX;
+      // being coservative
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5;
+    } else {
+      bytesSpecCoderUsed = ISACencUB_obj->bitstr_obj.stream_index -
+          transcodingParam.stream_index;
+      transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed;
+    }
+
+    /* To be safe, we reduce the scale depending on the
+       number of iterations. */
+    transcodeScale *= (1.0 - (0.9 * (double)iterCntr/
+                              (double)MAX_PAYLOAD_LIMIT_ITERATION));
+
+    /* Scale the LPC Gains */
+    for (k = 0; k < SUBFRAMES; k++) {
+      transcodingParam.loFiltGain[k] *= transcodeScale;
+    }
+
+    /* Scale DFT coefficients */
+    for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+      fre[k] = (WebRtc_Word16)(fre[k] * transcodeScale + 0.5);
+      fim[k] = (WebRtc_Word16)(fim[k] * transcodeScale + 0.5);
+    }
+
+    //Store real FFT coefficients for multiple encoding
+    memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
+           FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+    //Store imaginary FFT coefficients for multiple encoding
+    memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
+           FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
+
+
+    /* Re-store the state of arithmetic coder before coding LPC gains */
+    ISACencUB_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
+
+    ISACencUB_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
+
+    ISACencUB_obj->bitstr_obj.streamval = transcodingParam.streamval;
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] =
+        transcodingParam.stream[0];
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] =
+        transcodingParam.stream[1];
+
+    ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index] =
+        transcodingParam.stream[2];
+
+    // Store the gains for multiple encoding
+    memcpy(&ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, SUBFRAMES *
+           sizeof(double));
+
+    // encode LPC gain and store quantization indices. HAving quantization
+    // indices reduces transcoding complexity if 'scale factor' is 1.
+    WebRtcIsac_EncodeLpcGainUb(transcodingParam.loFiltGain,
+                               &ISACencUB_obj->bitstr_obj,
+                               ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
+
+    // Save the bit-stream object at this point for FEC.
+    memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
+           &ISACencUB_obj->bitstr_obj, sizeof(Bitstr));
+
+    /* Update the number of bytes left for encoding the spectrum */
+    bytesLeftSpecCoding = payloadLimitBytes -
+        ISACencUB_obj->bitstr_obj.stream_index;
+
+    /* Encode the spectrum */
+    err = WebRtcIsac_EncodeSpecUB12(fre, fim,
+                                      &ISACencUB_obj->bitstr_obj);
+    if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) {
+      /* There has been an error but it was not too large payload
+         (we can cure too large payload) */
+      return err;
+    }
+    iterCntr++;
+  }
+
+  /* complete arithmetic coding */
+  return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj);
+}
+
+
+
+
+
+
+/* This function is used to create a new bitstream with new BWE.
+   The same data as previously encoded with the function WebRtcIsac_Encoder().
+   The data needed is taken from the struct, where it was stored
+   when calling the encoder. */
+
+int WebRtcIsac_EncodeStoredDataLb(
+    const ISAC_SaveEncData_t* ISACSavedEnc_obj,
+    Bitstr*                   ISACBitStr_obj,
+    int                       BWnumber,
+    float                     scale)
+{
+  int ii;
+  int status;
+  int BWno = BWnumber;
+
+  const WebRtc_UWord16 *WebRtcIsac_kQPitchGainCdf_ptr[1];
+  const WebRtc_UWord16 **cdf;
+
+  double tmpLPCcoeffs_lo[(ORDERLO+1)*SUBFRAMES*2];
+  double tmpLPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*2];
+  int tmpLPCindex_g[12*2];
+  WebRtc_Word16 tmp_fre[FRAMESAMPLES], tmp_fim[FRAMESAMPLES];
+
+  /* Sanity Check - possible values for BWnumber is 0 - 23 */
+  if ((BWnumber < 0) || (BWnumber > 23)) {
+    return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
+  }
+
+  /* reset bitstream */
+  ISACBitStr_obj->W_upper = 0xFFFFFFFF;
+  ISACBitStr_obj->streamval = 0;
+  ISACBitStr_obj->stream_index = 0;
+
+  /* encode frame length */
+  status = WebRtcIsac_EncodeFrameLen(ISACSavedEnc_obj->framelength,
+                                     ISACBitStr_obj);
+  if (status < 0) {
+    /* Wrong frame size */
+    return status;
+  }
+
+  /* Transcoding                                                 */
+  if ((scale > 0.0) && (scale < 1.0)) {
+    /* Compensate LPC gain */
+    for (ii = 0;
+         ii < ((ORDERLO + 1)* SUBFRAMES * (1 + ISACSavedEnc_obj->startIdx));
+         ii++) {
+      tmpLPCcoeffs_lo[ii] = scale *  ISACSavedEnc_obj->LPCcoeffs_lo[ii];
+    }
+    for (ii = 0;
+         ii < ((ORDERHI + 1) * SUBFRAMES *(1 + ISACSavedEnc_obj->startIdx));
+         ii++) {
+      tmpLPCcoeffs_hi[ii] = scale *  ISACSavedEnc_obj->LPCcoeffs_hi[ii];
+    }
+    /* Scale DFT */
+    for (ii = 0;
+         ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx));
+         ii++) {
+      tmp_fre[ii] = (WebRtc_Word16)((scale) *
+                                    (float)ISACSavedEnc_obj->fre[ii]) ;
+      tmp_fim[ii] = (WebRtc_Word16)((scale) *
+                                    (float)ISACSavedEnc_obj->fim[ii]) ;
+    }
+  } else {
+    for (ii = 0;
+         ii < (KLT_ORDER_GAIN * (1 + ISACSavedEnc_obj->startIdx));
+         ii++) {
+      tmpLPCindex_g[ii] =  ISACSavedEnc_obj->LPCindex_g[ii];
+    }
+    for (ii = 0;
+         ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx));
+         ii++) {
+      tmp_fre[ii] = ISACSavedEnc_obj->fre[ii];
+      tmp_fim[ii] = ISACSavedEnc_obj->fim[ii];
+    }
+  }
+
+  /* encode bandwidth estimate */
+  WebRtcIsac_EncodeReceiveBw(&BWno, ISACBitStr_obj);
+
+  /* Loop over number of 30 msec */
+  for (ii = 0; ii <= ISACSavedEnc_obj->startIdx; ii++) {
+    /* encode pitch gains */
+    *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf;
+    WebRtcIsac_EncHistMulti(ISACBitStr_obj,
+                            &ISACSavedEnc_obj->pitchGain_index[ii], WebRtcIsac_kQPitchGainCdf_ptr, 1);
+
+    /* entropy coding of quantization pitch lags */
+    /* voicing classificiation */
+    if (ISACSavedEnc_obj->meanGain[ii] < 0.2) {
+      cdf = WebRtcIsac_kQPitchLagCdfPtrLo;
+    } else if (ISACSavedEnc_obj->meanGain[ii] < 0.4) {
+      cdf = WebRtcIsac_kQPitchLagCdfPtrMid;
+    } else {
+      cdf = WebRtcIsac_kQPitchLagCdfPtrHi;
+    }
+    WebRtcIsac_EncHistMulti(ISACBitStr_obj,
+                            &ISACSavedEnc_obj->pitchIndex[PITCH_SUBFRAMES*ii], cdf,
+                            PITCH_SUBFRAMES);
+
+    /* LPC */
+    /* entropy coding of model number */
+    WebRtcIsac_EncHistMulti(ISACBitStr_obj,
+                            &ISACSavedEnc_obj->LPCmodel[ii], WebRtcIsac_kQKltModelCdfPtr, 1);
+
+    /* entropy coding of quantization indices - LPC shape only */
+    WebRtcIsac_EncHistMulti(ISACBitStr_obj,
+                            &ISACSavedEnc_obj->LPCindex_s[KLT_ORDER_SHAPE*ii],
+                            WebRtcIsac_kQKltCdfPtrShape[ISACSavedEnc_obj->LPCmodel[ii]],
+                            KLT_ORDER_SHAPE);
+
+    /* If transcoding, get new LPC gain indices */
+    if (scale < 1.0) {
+      WebRtcIsac_TranscodeLPCCoef(&tmpLPCcoeffs_lo[(ORDERLO+1) *
+                                                   SUBFRAMES*ii], &tmpLPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*ii],
+                                  ISACSavedEnc_obj->LPCmodel[ii],
+                                  &tmpLPCindex_g[KLT_ORDER_GAIN * ii]);
+    }
+
+    /* entropy coding of quantization indices - LPC gain */
+    WebRtcIsac_EncHistMulti(ISACBitStr_obj,
+                            &tmpLPCindex_g[KLT_ORDER_GAIN*ii], WebRtcIsac_kQKltCdfPtrGain[
+                                ISACSavedEnc_obj->LPCmodel[ii]], KLT_ORDER_GAIN);
+
+    /* quantization and lossless coding */
+    status = WebRtcIsac_EncodeSpecLb(&tmp_fre[ii*FRAMESAMPLES_HALF],
+                                     &tmp_fim[ii*FRAMESAMPLES_HALF], ISACBitStr_obj,
+                                     ISACSavedEnc_obj->AvgPitchGain[ii]);
+    if (status < 0) {
+      return status;
+    }
+  }
+
+  /* complete arithmetic coding */
+  return WebRtcIsac_EncTerminate(ISACBitStr_obj);
+}
+
+
+
+
+int WebRtcIsac_EncodeStoredDataUb12(
+    const ISACUBSaveEncDataStruct* ISACSavedEnc_obj,
+    Bitstr*                        bitStream,
+    WebRtc_Word32                    jitterInfo,
+    float                          scale)
+{
+  int n;
+  int err;
+  double lpcGain[SUBFRAMES];
+  WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
+  WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+
+  /* reset bitstream */
+  bitStream->W_upper = 0xFFFFFFFF;
+  bitStream->streamval = 0;
+  bitStream->stream_index = 0;
+
+  // Encode jitter index
+  WebRtcIsac_EncodeJitterInfo(jitterInfo, bitStream);
+
+  err = WebRtcIsac_EncodeBandwidth(isac12kHz, bitStream);
+  if(err < 0)
+  {
+    return err;
+  }
+
+  // Encode LPC-shape
+  WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape,
+                          WebRtcIsac_kLpcShapeCdfMatUb12, UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME);
+
+
+  // we only consider scales between zero and one.
+  if((scale <= 0.0) || (scale > 1.0))
+  {
+    scale = 1.0f;
+  }
+
+  if(scale == 1.0f)
+  {
+    //memcpy(lpcGain, ISACSavedEnc_obj->lpcGain, SUBFRAMES * sizeof(double));
+    WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->lpcGainIndex,
+                            WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
+    // store FFT coefficients
+    err = WebRtcIsac_EncodeSpecUB12(ISACSavedEnc_obj->realFFT,
+                                      ISACSavedEnc_obj->imagFFT, bitStream);
+  }
+  else
+  {
+    /* scale lpc gain and FFT coefficients */
+    for(n = 0; n < SUBFRAMES; n++)
+    {
+      lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n];
+    }
+    // store lpc gain
+    WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream);
+    for(n = 0; n < FRAMESAMPLES_HALF; n++)
+    {
+      realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] + 0.5f);
+      imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + 0.5f);
+    }
+    // store FFT coefficients
+    err = WebRtcIsac_EncodeSpecUB12(realFFT, imagFFT, bitStream);
+  }
+  if(err < 0)
+  {
+    // error happened while encoding FFT coefficients.
+    return err;
+  }
+
+  /* complete arithmetic coding */
+  return WebRtcIsac_EncTerminate(bitStream);
+}
+
+
+int
+WebRtcIsac_EncodeStoredDataUb16(
+    const ISACUBSaveEncDataStruct* ISACSavedEnc_obj,
+    Bitstr*                        bitStream,
+    WebRtc_Word32                    jitterInfo,
+    float                          scale)
+{
+  int n;
+  int err;
+  double lpcGain[SUBFRAMES << 1];
+  WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
+  WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+
+  /* reset bitstream */
+  bitStream->W_upper = 0xFFFFFFFF;
+  bitStream->streamval = 0;
+  bitStream->stream_index = 0;
+
+  // Encode jitter index
+  WebRtcIsac_EncodeJitterInfo(jitterInfo, bitStream);
+
+  err = WebRtcIsac_EncodeBandwidth(isac16kHz, bitStream);
+  if(err < 0)
+  {
+    return err;
+  }
+
+  WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape,
+                          WebRtcIsac_kLpcShapeCdfMatUb16, UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME);
+
+  // we only consider scales between zero and one.
+  if((scale <= 0.0) || (scale > 1.0))
+  {
+    scale = 1.0f;
+  }
+
+  if(scale == 1.0f)
+  {
+    // store gains
+    WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->lpcGainIndex,
+                            WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
+    WebRtcIsac_EncHistMulti(bitStream, &ISACSavedEnc_obj->lpcGainIndex[SUBFRAMES],
+                            WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
+    // store FFT coefficients
+    err = WebRtcIsac_EncodeSpecUB16(ISACSavedEnc_obj->realFFT,
+                                      ISACSavedEnc_obj->imagFFT, bitStream);
+
+  }
+  else
+  {
+    /* Scale Gain */
+    for(n = 0; n < SUBFRAMES; n++)
+    {
+      lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n];
+      lpcGain[n + SUBFRAMES] = scale * ISACSavedEnc_obj->lpcGain[n + SUBFRAMES];
+    }
+    // store lpc gain
+    WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream);
+    WebRtcIsac_StoreLpcGainUb(&lpcGain[SUBFRAMES], bitStream);
+    /* scale FFT coefficients */
+    for(n = 0; n < FRAMESAMPLES_HALF; n++)
+    {
+      realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] + 0.5f);
+      imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + 0.5f);
+    }
+    // store FFT coefficients
+    err = WebRtcIsac_EncodeSpecUB16(realFFT, imagFFT, bitStream);
+  }
+
+  if(err < 0)
+  {
+    // error happened while encoding FFT coefficients.
+    return err;
+  }
+
+  /* complete arithmetic coding */
+  return WebRtcIsac_EncTerminate(bitStream);
+}
+
+
+WebRtc_Word16
+WebRtcIsac_GetRedPayloadUb(
+    const ISACUBSaveEncDataStruct* ISACSavedEncObj,
+    Bitstr*                        bitStreamObj,
+    enum ISACBandwidth             bandwidth)
+{
+  int n;
+  WebRtc_Word16 status;
+  WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
+  WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+
+  // store bit-stream object.
+  memcpy(bitStreamObj, &ISACSavedEncObj->bitStreamObj, sizeof(Bitstr));
+
+  // Scale FFT coefficients.
+  for(n = 0; n < FRAMESAMPLES_HALF; n++)
+  {
+    realFFT[n] = (WebRtc_Word16)((float)ISACSavedEncObj->realFFT[n] *
+                                 RCU_TRANSCODING_SCALE_UB + 0.5);
+    imagFFT[n] = (WebRtc_Word16)((float)ISACSavedEncObj->imagFFT[n] *
+                                 RCU_TRANSCODING_SCALE_UB + 0.5);
+  }
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        status = WebRtcIsac_EncodeSpecUB12(realFFT, imagFFT, bitStreamObj);
+        break;
+      }
+    case isac16kHz:
+      {
+        status = WebRtcIsac_EncodeSpecUB16(realFFT, imagFFT, bitStreamObj);
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  if(status < 0)
+  {
+    // error happened
+    return status;
+  }
+  else
+  {
+    // terminate entropy coding
+    return WebRtcIsac_EncTerminate(bitStreamObj);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.c
new file mode 100644
index 0000000..2bf4c36
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.c
@@ -0,0 +1,708 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * code_LPC_UB.c
+ *
+ * This file contains definition of functions used to
+ * encode LPC parameters (Shape & gain) of the upper band.
+ *
+ */
+
+#include "encode_lpc_swb.h"
+#include "typedefs.h"
+#include "settings.h"
+
+#include "lpc_shape_swb12_tables.h"
+#include "lpc_shape_swb16_tables.h"
+#include "lpc_gain_swb_tables.h"
+
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+
+/******************************************************************************
+ * WebRtcIsac_RemoveLarMean()
+ *
+ * Remove the means from LAR coefficients.
+ *
+ * Input:
+ *      -lar                : pointer to lar vectors. LAR vectors are
+ *                            concatenated.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -lar                : pointer to mean-removed LAR:s.
+ *
+ *
+ */
+WebRtc_Word16
+WebRtcIsac_RemoveLarMean(
+    double* lar,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 coeffCntr;
+  WebRtc_Word16 vecCntr;
+  WebRtc_Word16 numVec;
+  const double* meanLAR;
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        numVec = UB_LPC_VEC_PER_FRAME;
+        meanLAR = WebRtcIsac_kMeanLarUb12;
+        break;
+      }
+    case isac16kHz:
+      {
+        numVec = UB16_LPC_VEC_PER_FRAME;
+        meanLAR = WebRtcIsac_kMeanLarUb16;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  for(vecCntr = 0; vecCntr < numVec; vecCntr++)
+  {
+    for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+    {
+      // REMOVE MEAN
+      *lar++ -= meanLAR[coeffCntr];
+    }
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateIntraVec()
+ *
+ * Remove the correlation amonge the components of LAR vectors. If LAR vectors
+ * of one frame are put in a matrix where each column is a LAR vector of a
+ * sub-frame, then this is equivalent to multiplying the LAR matrix with
+ * a decorrelting mtrix from left.
+ *
+ * Input:
+ *      -inLar              : pointer to mean-removed LAR vecrtors.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : decorrelated LAR vectors.
+ */
+WebRtc_Word16
+WebRtcIsac_DecorrelateIntraVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16 bandwidth)
+{
+  const double* ptrData;
+  const double* ptrRow;
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+  WebRtc_Word16 larVecCntr;
+  WebRtc_Word16 numVec;
+  const double* decorrMat;
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        decorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0];
+        numVec = UB_LPC_VEC_PER_FRAME;
+        break;
+      }
+    case isac16kHz:
+      {
+        decorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0];
+        numVec = UB16_LPC_VEC_PER_FRAME;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  //
+  // decorrMat * data
+  //
+  // data is assumed to contain 'numVec' of LAR
+  // vectors (mean removed) each of dimension 'UB_LPC_ORDER'
+  // concatenated one after the other.
+  //
+
+  ptrData = data;
+  for(larVecCntr = 0; larVecCntr < numVec; larVecCntr++)
+  {
+    for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++)
+    {
+      ptrRow = &decorrMat[rowCntr * UB_LPC_ORDER];
+      *out = 0;
+      for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++)
+      {
+        *out += ptrData[colCntr] * ptrRow[colCntr];
+      }
+      out++;
+    }
+    ptrData += UB_LPC_ORDER;
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateInterVec()
+ *
+ * Remover the correlation among mean-removed LAR vectors. If LAR vectors
+ * of one frame are put in a matrix where each column is a LAR vector of a
+ * sub-frame, then this is equivalent to multiplying the LAR matrix with
+ * a decorrelting mtrix from right.
+ *
+ * Input:
+ *      -data               : pointer to matrix of LAR vectors. The matrix
+ *                            is stored column-wise.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : decorrelated LAR vectors.
+ */
+WebRtc_Word16
+WebRtcIsac_DecorrelateInterVec(
+    const double* data,
+    double* out,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 coeffCntr;
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+  const double* decorrMat;
+  WebRtc_Word16 interVecDim;
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        decorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0];
+        interVecDim = UB_LPC_VEC_PER_FRAME;
+        break;
+      }
+    case isac16kHz:
+      {
+        decorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0];
+        interVecDim = UB16_LPC_VEC_PER_FRAME;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  //
+  // data * decorrMat
+  //
+  // data is of size 'interVecDim' * 'UB_LPC_ORDER'
+  // That is 'interVecDim' of LAR vectors (mean removed)
+  // in columns each of dimension 'UB_LPC_ORDER'.
+  // matrix is stored column-wise.
+  //
+
+  for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+  {
+    for(colCntr = 0; colCntr < interVecDim; colCntr++)
+    {
+      out[coeffCntr + colCntr * UB_LPC_ORDER] = 0;
+      for(rowCntr = 0; rowCntr < interVecDim; rowCntr++)
+      {
+        out[coeffCntr + colCntr * UB_LPC_ORDER] +=
+            data[coeffCntr + rowCntr * UB_LPC_ORDER] *
+            decorrMat[rowCntr * interVecDim + colCntr];
+      }
+    }
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_QuantizeUncorrLar()
+ *
+ * Quantize the uncorrelated parameters.
+ *
+ * Input:
+ *      -data               : uncorrelated LAR vectors.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -data               : quantized version of the input.
+ *      -idx                : pointer to quantization indices.
+ */
+double
+WebRtcIsac_QuantizeUncorrLar(
+    double* data,
+    int* recIdx,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 cntr;
+  WebRtc_Word32 idx;
+  WebRtc_Word16 interVecDim;
+  const double* leftRecPoint;
+  double quantizationStepSize;
+  const WebRtc_Word16* numQuantCell;
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        leftRecPoint         = WebRtcIsac_kLpcShapeLeftRecPointUb12;
+        quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12;
+        numQuantCell         = WebRtcIsac_kLpcShapeNumRecPointUb12;
+        interVecDim          = UB_LPC_VEC_PER_FRAME;
+        break;
+      }
+    case isac16kHz:
+      {
+        leftRecPoint         = WebRtcIsac_kLpcShapeLeftRecPointUb16;
+        quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16;
+        numQuantCell         = WebRtcIsac_kLpcShapeNumRecPointUb16;
+        interVecDim          = UB16_LPC_VEC_PER_FRAME;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  //
+  // Quantize the parametrs.
+  //
+  for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++)
+  {
+    idx = (WebRtc_Word32)floor((*data - leftRecPoint[cntr]) /
+                               quantizationStepSize + 0.5);
+    if(idx < 0)
+    {
+      idx = 0;
+    }
+    else if(idx >= numQuantCell[cntr])
+    {
+      idx = numQuantCell[cntr] - 1;
+    }
+
+    *data++ = leftRecPoint[cntr] + idx * quantizationStepSize;
+    *recIdx++ = idx;
+  }
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_DequantizeLpcParam()
+ *
+ * Get the quantized value of uncorrelated LARs given the quantization indices.
+ *
+ * Input:
+ *      -idx                : pointer to quantiztion indices.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : pointer to quantized values.
+ */
+WebRtc_Word16
+WebRtcIsac_DequantizeLpcParam(
+    const int* idx,
+    double*    out,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 cntr;
+  WebRtc_Word16 interVecDim;
+  const double* leftRecPoint;
+  double quantizationStepSize;
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        leftRecPoint =         WebRtcIsac_kLpcShapeLeftRecPointUb12;
+        quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12;
+        interVecDim =          UB_LPC_VEC_PER_FRAME;
+        break;
+      }
+    case isac16kHz:
+      {
+        leftRecPoint =         WebRtcIsac_kLpcShapeLeftRecPointUb16;
+        quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16;
+        interVecDim =          UB16_LPC_VEC_PER_FRAME;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  //
+  // Dequantize given the quantization indices
+  //
+
+  for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++)
+  {
+    *out++ = leftRecPoint[cntr] + *idx++ * quantizationStepSize;
+  }
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateIntraVec()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateIntraVec().
+ *
+ * Input:
+ *      -data               : uncorrelated parameters.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : correlated parametrs.
+ */
+WebRtc_Word16
+WebRtcIsac_CorrelateIntraVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 vecCntr;
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+  WebRtc_Word16 numVec;
+  const double* ptrData;
+  const double* intraVecDecorrMat;
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        numVec            = UB_LPC_VEC_PER_FRAME;
+        intraVecDecorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0];
+        break;
+      }
+    case isac16kHz:
+      {
+        numVec            = UB16_LPC_VEC_PER_FRAME;
+        intraVecDecorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0];
+        break;
+      }
+    default:
+      return -1;
+  }
+
+
+  ptrData = data;
+  for(vecCntr = 0; vecCntr < numVec; vecCntr++)
+  {
+    for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++)
+    {
+      *out = 0;
+      for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++)
+      {
+        *out += ptrData[rowCntr] *
+            intraVecDecorrMat[rowCntr * UB_LPC_ORDER + colCntr];
+      }
+      out++;
+    }
+    ptrData += UB_LPC_ORDER;
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateInterVec()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateInterVec().
+ *
+ * Input:
+ *      -data
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : correlated parametrs.
+ */
+WebRtc_Word16
+WebRtcIsac_CorrelateInterVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 coeffCntr;
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+  WebRtc_Word16 interVecDim;
+  double myVec[UB16_LPC_VEC_PER_FRAME];
+  const double* interVecDecorrMat;
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        interVecDim       = UB_LPC_VEC_PER_FRAME;
+        interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0];
+        break;
+      }
+    case isac16kHz:
+      {
+        interVecDim       = UB16_LPC_VEC_PER_FRAME;
+        interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0];
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+  {
+    for(rowCntr = 0; rowCntr < interVecDim; rowCntr++)
+    {
+      myVec[rowCntr] = 0;
+      for(colCntr = 0; colCntr < interVecDim; colCntr++)
+      {
+        myVec[rowCntr] += data[coeffCntr + colCntr * UB_LPC_ORDER] * //*ptrData *
+            interVecDecorrMat[rowCntr * interVecDim + colCntr];
+        //ptrData += UB_LPC_ORDER;
+      }
+    }
+
+    for(rowCntr = 0; rowCntr < interVecDim; rowCntr++)
+    {
+      out[coeffCntr + rowCntr * UB_LPC_ORDER] = myVec[rowCntr];
+    }
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_AddLarMean()
+ *
+ * This is the inverse of WebRtcIsac_RemoveLarMean()
+ *
+ * Input:
+ *      -data               : pointer to mean-removed LAR:s.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -data               : pointer to LARs.
+ */
+WebRtc_Word16
+WebRtcIsac_AddLarMean(
+    double* data,
+    WebRtc_Word16 bandwidth)
+{
+  WebRtc_Word16 coeffCntr;
+  WebRtc_Word16 vecCntr;
+  WebRtc_Word16 numVec;
+  const double* meanLAR;
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        numVec = UB_LPC_VEC_PER_FRAME;
+        meanLAR = WebRtcIsac_kMeanLarUb12;
+        break;
+      }
+    case isac16kHz:
+      {
+        numVec = UB16_LPC_VEC_PER_FRAME;
+        meanLAR = WebRtcIsac_kMeanLarUb16;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  for(vecCntr = 0; vecCntr < numVec; vecCntr++)
+  {
+    for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+    {
+      *data++ += meanLAR[coeffCntr];
+    }
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_ToLogDomainRemoveMean()
+ *
+ * Transform the LPC gain to log domain then remove the mean value.
+ *
+ * Input:
+ *      -lpcGain            : pointer to LPC Gain, expecting 6 LPC gains
+ *
+ * Output:
+ *      -lpcGain            : mean-removed in log domain.
+ */
+WebRtc_Word16
+WebRtcIsac_ToLogDomainRemoveMean(
+    double* data)
+{
+  WebRtc_Word16 coeffCntr;
+  for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
+  {
+    data[coeffCntr] = log(data[coeffCntr]) - WebRtcIsac_kMeanLpcGain;
+  }
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateLPGain()
+ *
+ * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like
+ * multiplying gain vector with decorrelating matrix.
+ *
+ * Input:
+ *      -data               : LPC gain in log-domain with mean removed.
+ *
+ * Output:
+ *      -out                : decorrelated parameters.
+ */
+WebRtc_Word16 WebRtcIsac_DecorrelateLPGain(
+    const double* data,
+    double* out)
+{
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+
+  for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++)
+  {
+    *out = 0;
+    for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++)
+    {
+      *out += data[rowCntr] * WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr];
+    }
+    out++;
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_QuantizeLpcGain()
+ *
+ * Quantize the decorrelated log-domain gains.
+ *
+ * Input:
+ *      -lpcGain            : uncorrelated LPC gains.
+ *
+ * Output:
+ *      -idx                : quantization indices
+ *      -lpcGain            : quantized value of the inpt.
+ */
+double WebRtcIsac_QuantizeLpcGain(
+    double* data,
+    int*    idx)
+{
+  WebRtc_Word16 coeffCntr;
+  for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
+  {
+    *idx = (int)floor((*data - WebRtcIsac_kLeftRecPointLpcGain[coeffCntr]) /
+                                WebRtcIsac_kQSizeLpcGain + 0.5);
+
+    if(*idx < 0)
+    {
+      *idx = 0;
+    }
+    else if(*idx >= WebRtcIsac_kNumQCellLpcGain[coeffCntr])
+    {
+      *idx = WebRtcIsac_kNumQCellLpcGain[coeffCntr] - 1;
+    }
+    *data = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx *
+        WebRtcIsac_kQSizeLpcGain;
+
+    data++;
+    idx++;
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_DequantizeLpcGain()
+ *
+ * Get the quantized values given the quantization indices.
+ *
+ * Input:
+ *      -idx                : pointer to quantization indices.
+ *
+ * Output:
+ *      -lpcGains           : quantized values of the given parametes.
+ */
+WebRtc_Word16 WebRtcIsac_DequantizeLpcGain(
+    const int* idx,
+    double*    out)
+{
+  WebRtc_Word16 coeffCntr;
+  for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
+  {
+    *out = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx *
+        WebRtcIsac_kQSizeLpcGain;
+    out++;
+    idx++;
+  }
+  return 0;
+}
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateLpcGain()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateLPGain().
+ *
+ * Input:
+ *      -data               : decorrelated parameters.
+ *
+ * Output:
+ *      -out                : correlated parameters.
+ */
+WebRtc_Word16 WebRtcIsac_CorrelateLpcGain(
+    const double* data,
+    double* out)
+{
+  WebRtc_Word16 rowCntr;
+  WebRtc_Word16 colCntr;
+
+  for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++)
+  {
+    *out = 0;
+    for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++)
+    {
+      *out += WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr] * data[colCntr];
+    }
+    out++;
+  }
+
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_AddMeanToLinearDomain()
+ *
+ * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean().
+ *
+ * Input:
+ *      -lpcGain            : LPC gain in log-domain & mean removed
+ *
+ * Output:
+ *      -lpcGain            : LPC gain in normal domain.
+ */
+WebRtc_Word16 WebRtcIsac_AddMeanToLinearDomain(
+    double* lpcGains)
+{
+  WebRtc_Word16 coeffCntr;
+  for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
+  {
+    lpcGains[coeffCntr] = exp(lpcGains[coeffCntr] + WebRtcIsac_kMeanLpcGain);
+  }
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.h
new file mode 100644
index 0000000..e7f1a76
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/encode_lpc_swb.h
@@ -0,0 +1,283 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * encode_lpc_swb.h
+ *
+ * This file contains declaration of functions used to
+ * encode LPC parameters (Shape & gain) of the upper band.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_
+
+#include "typedefs.h"
+#include "settings.h"
+#include "structs.h"
+
+
+/******************************************************************************
+ * WebRtcIsac_RemoveLarMean()
+ *
+ * Remove the means from LAR coefficients.
+ *
+ * Input:
+ *      -lar                : pointer to lar vectors. LAR vectors are
+ *                            concatenated.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -lar                : pointer to mean-removed LAR:s.
+ *
+ *
+ */
+WebRtc_Word16 WebRtcIsac_RemoveLarMean(
+    double*     lar,
+    WebRtc_Word16 bandwidth);
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateIntraVec()
+ *
+ * Remove the correlation amonge the components of LAR vectors. If LAR vectors
+ * of one frame are put in a matrix where each column is a LAR vector of a
+ * sub-frame, then this is equivalent to multiplying the LAR matrix with
+ * a decorrelting mtrix from left.
+ *
+ * Input:
+ *      -inLar              : pointer to mean-removed LAR vecrtors.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : decorrelated LAR vectors.
+ */
+WebRtc_Word16 WebRtcIsac_DecorrelateIntraVec(
+    const double* inLAR,
+    double*       out,
+    WebRtc_Word16   bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateInterVec()
+ *
+ * Remover the correlation among mean-removed LAR vectors. If LAR vectors
+ * of one frame are put in a matrix where each column is a LAR vector of a
+ * sub-frame, then this is equivalent to multiplying the LAR matrix with
+ * a decorrelting mtrix from right.
+ *
+ * Input:
+ *      -data               : pointer to matrix of LAR vectors. The matrix
+ *                            is stored column-wise.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : decorrelated LAR vectors.
+ */
+WebRtc_Word16 WebRtcIsac_DecorrelateInterVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16   bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_QuantizeUncorrLar()
+ *
+ * Quantize the uncorrelated parameters.
+ *
+ * Input:
+ *      -data               : uncorrelated LAR vectors.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -data               : quantized version of the input.
+ *      -idx                : pointer to quantization indices.
+ */
+double WebRtcIsac_QuantizeUncorrLar(
+    double*     data,
+    int*        idx,
+    WebRtc_Word16 bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateIntraVec()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateIntraVec().
+ *
+ * Input:
+ *      -data               : uncorrelated parameters.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : correlated parametrs.
+ */
+WebRtc_Word16 WebRtcIsac_CorrelateIntraVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16   bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateInterVec()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateInterVec().
+ *
+ * Input:
+ *      -data
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : correlated parametrs.
+ */
+WebRtc_Word16 WebRtcIsac_CorrelateInterVec(
+    const double* data,
+    double*       out,
+    WebRtc_Word16   bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_AddLarMean()
+ *
+ * This is the inverse of WebRtcIsac_RemoveLarMean()
+ * 
+ * Input:
+ *      -data               : pointer to mean-removed LAR:s.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -data               : pointer to LARs.
+ */
+WebRtc_Word16 WebRtcIsac_AddLarMean(
+    double*     data,
+    WebRtc_Word16 bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_DequantizeLpcParam()
+ *
+ * Get the quantized value of uncorrelated LARs given the quantization indices.
+ *
+ * Input:
+ *      -idx                : pointer to quantiztion indices.
+ *      -bandwidth          : indicates if the given LAR vectors belong
+ *                            to SWB-12kHz or SWB-16kHz.
+ *
+ * Output:
+ *      -out                : pointer to quantized values.
+ */
+WebRtc_Word16 WebRtcIsac_DequantizeLpcParam(
+    const int*  idx,
+    double*     out,
+    WebRtc_Word16 bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_ToLogDomainRemoveMean()
+ *
+ * Transform the LPC gain to log domain then remove the mean value.
+ *
+ * Input:
+ *      -lpcGain            : pointer to LPC Gain, expecting 6 LPC gains
+ *
+ * Output:
+ *      -lpcGain            : mean-removed in log domain.
+ */
+WebRtc_Word16 WebRtcIsac_ToLogDomainRemoveMean(
+    double* lpGains);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecorrelateLPGain()
+ *
+ * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like
+ * multiplying gain vector with decorrelating matrix.
+ *
+ * Input:
+ *      -data               : LPC gain in log-domain with mean removed.
+ *
+ * Output:
+ *      -out                : decorrelated parameters.
+ */
+WebRtc_Word16 WebRtcIsac_DecorrelateLPGain(
+    const double* data,
+    double*       out);
+
+
+/******************************************************************************
+ * WebRtcIsac_QuantizeLpcGain()
+ *
+ * Quantize the decorrelated log-domain gains.
+ * 
+ * Input:
+ *      -lpcGain            : uncorrelated LPC gains.
+ *
+ * Output:
+ *      -idx                : quantization indices
+ *      -lpcGain            : quantized value of the inpt.
+ */
+double WebRtcIsac_QuantizeLpcGain(
+    double* lpGains,
+    int*    idx);
+
+
+/******************************************************************************
+ * WebRtcIsac_DequantizeLpcGain()
+ *
+ * Get the quantized values given the quantization indices.
+ *
+ * Input:
+ *      -idx                : pointer to quantization indices.
+ *
+ * Output:
+ *      -lpcGains           : quantized values of the given parametes.
+ */
+WebRtc_Word16 WebRtcIsac_DequantizeLpcGain(
+    const int* idx,
+    double*    lpGains);
+
+
+/******************************************************************************
+ * WebRtcIsac_CorrelateLpcGain()
+ *
+ * This is the inverse of WebRtcIsac_DecorrelateLPGain().
+ *
+ * Input:
+ *      -data               : decorrelated parameters.
+ *
+ * Output:
+ *      -out                : correlated parameters.
+ */
+WebRtc_Word16 WebRtcIsac_CorrelateLpcGain(
+    const double* data,
+    double*       out);
+
+
+/******************************************************************************
+ * WebRtcIsac_AddMeanToLinearDomain()
+ *
+ * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean().
+ *
+ * Input:
+ *      -lpcGain            : LPC gain in log-domain & mean removed
+ *
+ * Output:
+ *      -lpcGain            : LPC gain in normal domain.
+ */
+WebRtc_Word16 WebRtcIsac_AddMeanToLinearDomain(
+    double* lpcGains);
+
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.c
new file mode 100644
index 0000000..a729944
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.c
@@ -0,0 +1,2748 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * entropy_coding.c
+ *
+ * This header file defines all of the functions used to arithmetically
+ * encode the iSAC bistream
+ *
+ */
+
+
+#include "entropy_coding.h"
+#include "settings.h"
+#include "arith_routines.h"
+#include "signal_processing_library.h"
+#include "spectrum_ar_model_tables.h"
+#include "lpc_tables.h"
+#include "pitch_gain_tables.h"
+#include "pitch_lag_tables.h"
+#include "encode_lpc_swb.h"
+#include "lpc_shape_swb12_tables.h"
+#include "lpc_shape_swb16_tables.h"
+#include "lpc_gain_swb_tables.h"
+#include "os_specific_inline.h"
+
+#include <math.h>
+#include <string.h>
+
+static const WebRtc_UWord16 kLpcVecPerSegmentUb12 = 5;
+static const WebRtc_UWord16 kLpcVecPerSegmentUb16 = 4;
+
+/* coefficients for the stepwise rate estimation */
+static const WebRtc_Word32 kRPointsQ10[100] = {
+  14495,  14295,  14112,  13944,  13788,  13643,  13459,  13276,  13195,  13239,
+  13243,  13191,  13133,  13216,  13263,  13330,  13316,  13242,  13191,  13106,
+  12942,  12669,  12291,  11840,  11361,  10795,  10192,  9561,  8934,  8335,
+  7750,  7161,  6589,  6062,  5570,  5048,  4548,  4069,  3587,  3143,
+  2717,  2305,  1915,  1557,  1235,  963,  720,  541,  423,  366,
+  369,  435,  561,  750,  1001,  1304,  1626,  1989,  2381,  2793,
+  3219,  3656,  4134,  4612,  5106,  5629,  6122,  6644,  7216,  7801,
+  8386,  8987,  9630,  10255,  10897,  11490,  11950,  12397,  12752,  12999,
+  13175,  13258,  13323,  13290,  13296,  13335,  13113,  13255,  13347,  13355,
+  13298,  13247,  13313,  13155,  13267,  13313,  13374,  13446,  13525,  13609};
+
+
+/* cdf array for encoder bandwidth (12 vs 16 kHz) indicator */
+static const WebRtc_UWord16 kOneBitEqualProbCdf[3] = {
+  0, 32768, 65535 };
+
+/* pointer to cdf array for encoder bandwidth (12 vs 16 kHz) indicator */
+static const WebRtc_UWord16 *kOneBitEqualProbCdf_ptr[1] = {
+    kOneBitEqualProbCdf };
+
+/* initial cdf index for decoder of encoded bandwidth (12 vs 16 kHz) indicator */
+static const WebRtc_UWord16 kOneBitEqualProbInitIndex[1] = {1};
+
+
+/* coefficients for the stepwise rate estimation */
+
+
+static const WebRtc_Word32 acnQ10 =  426;
+static const WebRtc_Word32 bcnQ10 = -581224;
+static const WebRtc_Word32 ccnQ10 =  722631;
+static const WebRtc_Word32 lbcnQ10 = -402874;
+#define DPMIN_Q10     -10240 // -10.00 in Q10
+#define DPMAX_Q10      10240 // 10.00 in Q10
+#define MINBITS_Q10    10240  /* 10.0 in Q10 */
+#define IS_SWB_12KHZ       1
+
+__inline WebRtc_UWord32 stepwise(WebRtc_Word32 dinQ10) {
+
+  WebRtc_Word32 ind, diQ10, dtQ10;
+
+  diQ10 = dinQ10;
+  if (diQ10 < DPMIN_Q10)
+    diQ10 = DPMIN_Q10;
+  if (diQ10 >= DPMAX_Q10)
+    diQ10 = DPMAX_Q10 - 1;
+
+  dtQ10 = diQ10 - DPMIN_Q10; /* Q10 + Q10 = Q10 */
+  ind = (dtQ10 * 5) >> 10;   /* 2^10 / 5 = 0.2 in Q10  */
+  /* Q10 -> Q0 */
+
+  return kRPointsQ10[ind];
+}
+
+
+__inline short log2_Q10_B( int x )
+{
+  int zeros;
+  short frac;
+
+  zeros = WebRtcSpl_NormU32( x );
+  frac = ((unsigned int)(x << zeros) & 0x7FFFFFFF) >> 21;
+  return (short) (((31 - zeros) << 10) + frac);
+}
+
+
+
+/* compute correlation from power spectrum */
+static void WebRtcIsac_FindCorrelation(WebRtc_Word32 *PSpecQ12, WebRtc_Word32 *CorrQ7)
+{
+  WebRtc_Word32 summ[FRAMESAMPLES/8];
+  WebRtc_Word32 diff[FRAMESAMPLES/8];
+  const WebRtc_Word16 *CS_ptrQ9;
+  WebRtc_Word32 sum;
+  int k, n;
+
+  for (k = 0; k < FRAMESAMPLES/8; k++) {
+    summ[k] = (PSpecQ12[k] + PSpecQ12[FRAMESAMPLES_QUARTER-1 - k] + 16) >> 5;
+    diff[k] = (PSpecQ12[k] - PSpecQ12[FRAMESAMPLES_QUARTER-1 - k] + 16) >> 5;
+  }
+
+  sum = 2;
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    sum += summ[n];
+  CorrQ7[0] = sum;
+
+  for (k = 0; k < AR_ORDER; k += 2) {
+    sum = 0;
+    CS_ptrQ9 = WebRtcIsac_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      sum += (CS_ptrQ9[n] * diff[n] + 256) >> 9;
+    CorrQ7[k+1] = sum;
+  }
+
+  for (k=1; k<AR_ORDER; k+=2) {
+    sum = 0;
+    CS_ptrQ9 = WebRtcIsac_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      sum += (CS_ptrQ9[n] * summ[n] + 256) >> 9;
+    CorrQ7[k+1] = sum;
+  }
+}
+
+/* compute inverse AR power spectrum */
+/* Changed to the function used in iSAC FIX for compatibility reasons */
+static void WebRtcIsac_FindInvArSpec(const WebRtc_Word16 *ARCoefQ12,
+                                     const WebRtc_Word32 gainQ10,
+                                     WebRtc_Word32 *CurveQ16)
+{
+  WebRtc_Word32 CorrQ11[AR_ORDER+1];
+  WebRtc_Word32 sum, tmpGain;
+  WebRtc_Word32 diffQ16[FRAMESAMPLES/8];
+  const WebRtc_Word16 *CS_ptrQ9;
+  int k, n;
+  WebRtc_Word16 round, shftVal = 0, sh;
+
+  sum = 0;
+  for (n = 0; n < AR_ORDER+1; n++)
+    sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]);   /* Q24 */
+  sum = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(sum, 6), 65) + 32768, 16); /* result in Q8 */
+  CorrQ11[0] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, gainQ10) + 256, 9);
+
+  /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */
+  if(gainQ10>400000){
+    tmpGain = WEBRTC_SPL_RSHIFT_W32(gainQ10, 3);
+    round = 32;
+    shftVal = 6;
+  } else {
+    tmpGain = gainQ10;
+    round = 256;
+    shftVal = 9;
+  }
+
+  for (k = 1; k < AR_ORDER+1; k++) {
+    sum = 16384;
+    for (n = k; n < AR_ORDER+1; n++)
+      sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]); /* Q24 */
+    sum = WEBRTC_SPL_RSHIFT_W32(sum, 15);
+    CorrQ11[k] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(sum, tmpGain) + round, shftVal);
+  }
+  sum = WEBRTC_SPL_LSHIFT_W32(CorrQ11[0], 7);
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    CurveQ16[n] = sum;
+
+  for (k = 1; k < AR_ORDER; k += 2) {
+    //CS_ptrQ9 = WebRtcIsac_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      CurveQ16[n] += WEBRTC_SPL_RSHIFT_W32(
+          WEBRTC_SPL_MUL(WebRtcIsac_kCos[k][n], CorrQ11[k+1]) + 2, 2);
+  }
+
+  CS_ptrQ9 = WebRtcIsac_kCos[0];
+
+  /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */
+  sh=WebRtcSpl_NormW32(CorrQ11[1]);
+  if (CorrQ11[1]==0) /* Use next correlation */
+    sh=WebRtcSpl_NormW32(CorrQ11[2]);
+
+  if (sh<9)
+    shftVal = 9 - sh;
+  else
+    shftVal = 0;
+
+  for (n = 0; n < FRAMESAMPLES/8; n++)
+    diffQ16[n] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[1], shftVal)) + 2, 2);
+  for (k = 2; k < AR_ORDER; k += 2) {
+    CS_ptrQ9 = WebRtcIsac_kCos[k];
+    for (n = 0; n < FRAMESAMPLES/8; n++)
+      diffQ16[n] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(CS_ptrQ9[n], WEBRTC_SPL_RSHIFT_W32(CorrQ11[k+1], shftVal)) + 2, 2);
+  }
+
+  for (k=0; k<FRAMESAMPLES/8; k++) {
+    CurveQ16[FRAMESAMPLES_QUARTER-1 - k] = CurveQ16[k] - WEBRTC_SPL_LSHIFT_W32(diffQ16[k], shftVal);
+    CurveQ16[k] += WEBRTC_SPL_LSHIFT_W32(diffQ16[k], shftVal);
+  }
+}
+
+/* generate array of dither samples in Q7 */
+static void GenerateDitherQ7Lb(WebRtc_Word16 *bufQ7,
+                               WebRtc_UWord32 seed,
+                               int length,
+                               WebRtc_Word16 AvgPitchGain_Q12)
+{
+  int   k, shft;
+  WebRtc_Word16 dither1_Q7, dither2_Q7, dither_gain_Q14;
+
+  if (AvgPitchGain_Q12 < 614)  /* this threshold should be equal to that in decode_spec() */
+  {
+    for (k = 0; k < length-2; k += 3)
+    {
+      /* new random unsigned int */
+      seed = (seed * 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 (Q7) */
+      dither1_Q7 = (WebRtc_Word16)(((int)seed + 16777216)>>25); // * 128/4294967295
+
+      /* new random unsigned int */
+      seed = (seed * 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 */
+      dither2_Q7 = (WebRtc_Word16)(((int)seed + 16777216)>>25);
+
+      shft = (seed >> 25) & 15;
+      if (shft < 5)
+      {
+        bufQ7[k]   = dither1_Q7;
+        bufQ7[k+1] = dither2_Q7;
+        bufQ7[k+2] = 0;
+      }
+      else if (shft < 10)
+      {
+        bufQ7[k]   = dither1_Q7;
+        bufQ7[k+1] = 0;
+        bufQ7[k+2] = dither2_Q7;
+      }
+      else
+      {
+        bufQ7[k]   = 0;
+        bufQ7[k+1] = dither1_Q7;
+        bufQ7[k+2] = dither2_Q7;
+      }
+    }
+  }
+  else
+  {
+    dither_gain_Q14 = (WebRtc_Word16)(22528 - 10 * AvgPitchGain_Q12);
+
+    /* dither on half of the coefficients */
+    for (k = 0; k < length-1; k += 2)
+    {
+      /* new random unsigned int */
+      seed = (seed * 196314165) + 907633515;
+
+      /* fixed-point dither sample between -64 and 64 */
+      dither1_Q7 = (WebRtc_Word16)(((int)seed + 16777216)>>25);
+
+      /* dither sample is placed in either even or odd index */
+      shft = (seed >> 25) & 1;     /* either 0 or 1 */
+
+      bufQ7[k + shft] = (((dither_gain_Q14 * dither1_Q7) + 8192)>>14);
+      bufQ7[k + 1 - shft] = 0;
+    }
+  }
+}
+
+
+
+/******************************************************************************
+ * GenerateDitherQ7LbUB()
+ *
+ * generate array of dither samples in Q7 There are less zeros in dither
+ * vector compared to GenerateDitherQ7Lb.
+ *
+ * A uniform random number generator with the range of [-64 64] is employed
+ * but the generated dithers are scaled by 0.35, a heuristic scaling.
+ *
+ * Input:
+ *      -seed               : the initial seed for the random number generator.
+ *      -length             : the number of dither values to be generated.
+ *
+ * Output:
+ *      -bufQ7              : pointer to a buffer where dithers are written to.
+ */
+static void GenerateDitherQ7LbUB(
+    WebRtc_Word16 *bufQ7,
+    WebRtc_UWord32 seed,
+    int length)
+{
+  int k;
+  for (k = 0; k < length; k++) {
+    /* new random unsigned int */
+    seed = (seed * 196314165) + 907633515;
+
+    /* fixed-point dither sample between -64 and 64 (Q7) */
+    // * 128/4294967295
+    bufQ7[k] = (WebRtc_Word16)(((int)seed + 16777216)>>25);
+
+    // scale by 0.35
+    bufQ7[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(bufQ7[k],
+                                                           2048, 13);
+  }
+}
+
+
+
+/*
+ * function to decode the complex spectrum from the bit stream
+ * returns the total number of bytes in the stream
+ */
+int WebRtcIsac_DecodeSpecLb(Bitstr *streamdata,
+                            double *fr,
+                            double *fi,
+                            WebRtc_Word16 AvgPitchGain_Q12)
+{
+  WebRtc_Word16  DitherQ7[FRAMESAMPLES];
+  WebRtc_Word16  data[FRAMESAMPLES];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  gainQ10;
+  WebRtc_Word32  gain2_Q10, res;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  int            k, len, i;
+
+  /* create dither signal */
+  GenerateDitherQ7Lb(DitherQ7, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12);
+
+  /* decode model parameters */
+  if (WebRtcIsac_DecodeRc(streamdata, RCQ15) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  if (WebRtcIsac_DecodeGain2(streamdata, &gain2_Q10) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic decoding of spectrum */
+  if ((len = WebRtcIsac_DecLogisticMulti2(data, streamdata, invARSpecQ8, DitherQ7,
+                                          FRAMESAMPLES, !IS_SWB_12KHZ)) <1)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* subtract dither and scale down spectral samples with low SNR */
+  if (AvgPitchGain_Q12 <= 614)
+  {
+    for (k = 0; k < FRAMESAMPLES; k += 4)
+    {
+      gainQ10 = WebRtcSpl_DivW32W16ResW16(30 << 10,
+                                              (WebRtc_Word16)((invARSpec2_Q16[k>>2] + (32768 + (33 << 16))) >> 16));
+      *fr++ = (double)((data[ k ] * gainQ10 + 512) >> 10) / 128.0;
+      *fi++ = (double)((data[k+1] * gainQ10 + 512) >> 10) / 128.0;
+      *fr++ = (double)((data[k+2] * gainQ10 + 512) >> 10) / 128.0;
+      *fi++ = (double)((data[k+3] * gainQ10 + 512) >> 10) / 128.0;
+    }
+  }
+  else
+  {
+    for (k = 0; k < FRAMESAMPLES; k += 4)
+    {
+      gainQ10 = WebRtcSpl_DivW32W16ResW16(36 << 10,
+                                              (WebRtc_Word16)((invARSpec2_Q16[k>>2] + (32768 + (40 << 16))) >> 16));
+      *fr++ = (double)((data[ k ] * gainQ10 + 512) >> 10) / 128.0;
+      *fi++ = (double)((data[k+1] * gainQ10 + 512) >> 10) / 128.0;
+      *fr++ = (double)((data[k+2] * gainQ10 + 512) >> 10) / 128.0;
+      *fi++ = (double)((data[k+3] * gainQ10 + 512) >> 10) / 128.0;
+    }
+  }
+
+  return len;
+}
+
+/******************************************************************************
+ * WebRtcIsac_DecodeSpecUB16()
+ * Decode real and imaginary part of the DFT coefficients, given a bit-stream.
+ * This function is called when the codec is in 0-16 kHz bandwidth.
+ * The decoded DFT coefficient can be transformed to time domain by
+ * WebRtcIsac_Time2Spec().
+ *
+ * Input:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are written to.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are written to.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_DecodeSpecUB16(
+    Bitstr*     streamdata,
+    double*     fr,
+    double*     fi)
+{
+  WebRtc_Word16  DitherQ7[FRAMESAMPLES];
+  WebRtc_Word16  data[FRAMESAMPLES];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word32  gain2_Q10, res;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  int            k, len, i, j;
+
+  /* create dither signal */
+  GenerateDitherQ7LbUB(DitherQ7, streamdata->W_upper, FRAMESAMPLES);
+
+  /* decode model parameters */
+  if (WebRtcIsac_DecodeRc(streamdata, RCQ15) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  if (WebRtcIsac_DecodeGain2(streamdata, &gain2_Q10) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic decoding of spectrum */
+  if ((len = WebRtcIsac_DecLogisticMulti2(data, streamdata, invARSpecQ8,
+                                          DitherQ7, FRAMESAMPLES, !IS_SWB_12KHZ)) <1)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  /* re-arrange DFT coefficients and scale down */
+  for (j = 0, k = 0; k < FRAMESAMPLES; k += 4, j++)
+  {
+    fr[j] = (double)data[ k ] / 128.0;
+    fi[j] = (double)data[k+1] / 128.0;
+    fr[(FRAMESAMPLES_HALF) - 1 - j] = (double)data[k+2] / 128.0;
+    fi[(FRAMESAMPLES_HALF) - 1 - j] = (double)data[k+3] / 128.0;
+
+  }
+  return len;
+}
+
+
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeSpecUB12()
+ * Decode real and imaginary part of the DFT coefficients, given a bit-stream.
+ * This function is called when the codec is in 0-12 kHz bandwidth.
+ * The decoded DFT coefficient can be transformed to time domain by
+ * WebRtcIsac_Time2Spec().
+ *
+ * Input:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are written to.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are written to.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_DecodeSpecUB12(
+    Bitstr *streamdata,
+    double *fr,
+    double *fi)
+{
+  WebRtc_Word16  DitherQ7[FRAMESAMPLES];
+  WebRtc_Word16  data[FRAMESAMPLES];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word32  res;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  int            k, len, i;
+
+  /* create dither signal */
+  GenerateDitherQ7LbUB(DitherQ7, streamdata->W_upper, FRAMESAMPLES);
+
+  /* decode model parameters */
+  if (WebRtcIsac_DecodeRc(streamdata, RCQ15) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  if (WebRtcIsac_DecodeGain2(streamdata, &gain2_Q10) < 0)
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic decoding of spectrum */
+  if ((len = WebRtcIsac_DecLogisticMulti2(data, streamdata,
+                                          invARSpecQ8, DitherQ7, (FRAMESAMPLES_HALF), IS_SWB_12KHZ)) < 1)
+  {
+    return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
+  }
+
+  for (k = 0, i = 0; k < FRAMESAMPLES_HALF; k += 4)
+  {
+    fr[i] = (double)data[ k ] / 128.0;
+    fi[i] = (double)data[k+1] / 128.0;
+    i++;
+    fr[i] = (double)data[k+2] / 128.0;
+    fi[i] = (double)data[k+3] / 128.0;
+    i++;
+  }
+
+  // The second half of real and imaginary coefficients is zero. This is
+  // due to using the old FFT module which requires two signals as input
+  // while in 0-12 kHz mode we only have 8-12 kHz band, and the second signal
+  // is set to zero
+  memset(&fr[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * sizeof(double));
+  memset(&fi[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * sizeof(double));
+
+  return len;
+}
+
+
+
+
+
+int WebRtcIsac_EncodeSpecLb(const WebRtc_Word16 *fr,
+                            const WebRtc_Word16 *fi,
+                            Bitstr *streamdata,
+                            WebRtc_Word16 AvgPitchGain_Q12)
+{
+  WebRtc_Word16  ditherQ7[FRAMESAMPLES];
+  WebRtc_Word16  dataQ7[FRAMESAMPLES];
+  WebRtc_Word32  PSpec[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  CorrQ7[AR_ORDER+1];
+  WebRtc_Word32  CorrQ7_norm[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word16  val;
+  WebRtc_Word32  nrg, res;
+  WebRtc_UWord32 sum;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  WebRtc_Word16  err;
+  WebRtc_UWord32  nrg_u32;
+  int            shift_var;
+  int          k, n, j, i;
+
+
+  /* create dither_float signal */
+  GenerateDitherQ7Lb(ditherQ7, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12);
+
+  /* add dither and quantize, and compute power spectrum */
+  for (k = 0; k < FRAMESAMPLES; k += 4)
+  {
+    val = ((*fr++ + ditherQ7[k]   + 64) & 0xFF80) - ditherQ7[k];
+    dataQ7[k] = val;
+    sum = val * val;
+
+    val = ((*fi++ + ditherQ7[k+1] + 64) & 0xFF80) - ditherQ7[k+1];
+    dataQ7[k+1] = val;
+    sum += val * val;
+
+    val = ((*fr++ + ditherQ7[k+2] + 64) & 0xFF80) - ditherQ7[k+2];
+    dataQ7[k+2] = val;
+    sum += val * val;
+
+    val = ((*fi++ + ditherQ7[k+3] + 64) & 0xFF80) - ditherQ7[k+3];
+    dataQ7[k+3] = val;
+    sum += val * val;
+
+    PSpec[k>>2] = sum >> 2;
+  }
+
+  /* compute correlation from power spectrum */
+  WebRtcIsac_FindCorrelation(PSpec, CorrQ7);
+
+
+  /* find AR coefficients */
+  /* number of bit shifts to 14-bit normalize CorrQ7[0] (leaving room for sign) */
+  shift_var = WebRtcSpl_NormW32(CorrQ7[0]) - 18;
+
+  if (shift_var > 0) {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] << shift_var;
+    }
+  } else {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] >> (-shift_var);
+    }
+  }
+
+  /* find RC coefficients */
+  WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15);
+
+  /* quantize & code RC Coefficient */
+  WebRtcIsac_EncodeRc(RCQ15, streamdata);
+
+  /* RC -> AR coefficients */
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  /* compute ARCoef' * Corr * ARCoef in Q19 */
+  nrg = 0;
+  for (j = 0; j <= AR_ORDER; j++) {
+    for (n = 0; n <= j; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[j-n] * ARCoefQ12[n] + 256) >> 9) + 4 ) >> 3;
+    }
+    for (n = j+1; n <= AR_ORDER; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[n-j] * ARCoefQ12[n] + 256) >> 9) + 4 ) >> 3;
+    }
+  }
+
+  nrg_u32 = (WebRtc_UWord32)nrg;
+  if (shift_var > 0) {
+    nrg_u32 = nrg_u32 >> shift_var;
+  } else {
+    nrg_u32 = nrg_u32 << (-shift_var);
+  }
+
+  if (nrg_u32 > 0x7FFFFFFF)
+    nrg = 0x7FFFFFFF;
+  else
+    nrg = (WebRtc_Word32)nrg_u32;
+
+  gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg);  /* also shifts 31 bits to the left! */
+
+  /* quantize & code gain2_Q10 */
+  if (WebRtcIsac_EncodeGain2(&gain2_Q10, streamdata)) {
+    return -1;
+  }
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic coding of spectrum */
+  err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8,
+                                     FRAMESAMPLES, !IS_SWB_12KHZ);
+  if (err < 0)
+  {
+    return (err);
+  }
+
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeSpecUB16()
+ * Quantize and encode real and imaginary part of the DFT coefficients.
+ * This function is called when the codec is in 0-16 kHz bandwidth.
+ * The real and imaginary part are computed by calling WebRtcIsac_Time2Spec().
+ *
+ *
+ * Input:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are stored.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are stored.
+ *
+ * Output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_EncodeSpecUB16(
+    const WebRtc_Word16* fr,
+    const WebRtc_Word16* fi,
+    Bitstr*            streamdata)
+{
+  WebRtc_Word16  ditherQ7[FRAMESAMPLES];
+  WebRtc_Word16  dataQ7[FRAMESAMPLES];
+  WebRtc_Word32  PSpec[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  CorrQ7[AR_ORDER+1];
+  WebRtc_Word32  CorrQ7_norm[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word16  val;
+  WebRtc_Word32  nrg, res;
+  WebRtc_UWord32 sum;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  WebRtc_Word16  err;
+  WebRtc_UWord32 nrg_u32;
+  int            shift_var;
+  int          k, n, j, i;
+
+  /* create dither_float signal */
+  GenerateDitherQ7LbUB(ditherQ7, streamdata->W_upper, FRAMESAMPLES);
+
+  /* add dither and quantize, and compute power spectrum */
+  for (j = 0, k = 0; k < FRAMESAMPLES; k += 4, j++)
+  {
+    val = ((fr[j] + ditherQ7[k]   + 64) & 0xFF80) - ditherQ7[k];
+    dataQ7[k] = val;
+    sum = val * val;
+
+    val = ((fi[j] + ditherQ7[k+1] + 64) & 0xFF80) - ditherQ7[k+1];
+    dataQ7[k+1] = val;
+    sum += val * val;
+
+    val = ((fr[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k+2] + 64) &
+           0xFF80) - ditherQ7[k+2];
+    dataQ7[k+2] = val;
+    sum += val * val;
+
+    val = ((fi[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k+3] + 64) &
+           0xFF80) - ditherQ7[k+3];
+    dataQ7[k+3] = val;
+    sum += val * val;
+
+    PSpec[k>>2] = sum >> 2;
+  }
+
+  /* compute correlation from power spectrum */
+  WebRtcIsac_FindCorrelation(PSpec, CorrQ7);
+
+
+  /* find AR coefficients
+     number of bit shifts to 14-bit normalize CorrQ7[0]
+     (leaving room for sign) */
+  shift_var = WebRtcSpl_NormW32(CorrQ7[0]) - 18;
+
+  if (shift_var > 0) {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] << shift_var;
+    }
+  } else {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] >> (-shift_var);
+    }
+  }
+
+  /* find RC coefficients */
+  WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15);
+
+  /* quantize & code RC Coef */
+  WebRtcIsac_EncodeRc(RCQ15, streamdata);
+
+  /* RC -> AR coefficients */
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+  /* compute ARCoef' * Corr * ARCoef in Q19 */
+  nrg = 0;
+  for (j = 0; j <= AR_ORDER; j++) {
+    for (n = 0; n <= j; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[j-n] * ARCoefQ12[n] +
+                                256) >> 9) + 4 ) >> 3;
+    }
+    for (n = j+1; n <= AR_ORDER; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[n-j] * ARCoefQ12[n] +
+                                256) >> 9) + 4 ) >> 3;
+    }
+  }
+  nrg_u32 = (WebRtc_UWord32)nrg;
+  if (shift_var > 0) {
+    nrg_u32 = nrg_u32 >> shift_var;
+  } else {
+    nrg_u32 = nrg_u32 << (-shift_var);
+  }
+
+  if (nrg_u32 > 0x7FFFFFFF)
+    nrg = 0x7FFFFFFF;
+  else
+    nrg = (WebRtc_Word32)nrg_u32;
+
+  gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg);  /* also shifts 31 bits to the left! */
+
+  /* quantize & code gain2_Q10 */
+  if (WebRtcIsac_EncodeGain2(&gain2_Q10, streamdata)) {
+    return -1;
+  }
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic coding of spectrum */
+  err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8,
+                                     FRAMESAMPLES, !IS_SWB_12KHZ);
+  if (err < 0)
+  {
+    return (err);
+  }
+
+  return 0;
+}
+
+
+
+
+int WebRtcIsac_EncodeSpecUB12(const WebRtc_Word16 *fr,
+                                const WebRtc_Word16 *fi,
+                                Bitstr *streamdata)
+{
+  WebRtc_Word16  ditherQ7[FRAMESAMPLES];
+  WebRtc_Word16  dataQ7[FRAMESAMPLES];
+  WebRtc_Word32  PSpec[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+  WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
+  WebRtc_Word32  CorrQ7[AR_ORDER+1];
+  WebRtc_Word32  CorrQ7_norm[AR_ORDER+1];
+  WebRtc_Word16  RCQ15[AR_ORDER];
+  WebRtc_Word16  ARCoefQ12[AR_ORDER+1];
+  WebRtc_Word32  gain2_Q10;
+  WebRtc_Word16  val;
+  WebRtc_Word32  nrg, res;
+  WebRtc_UWord32 sum;
+  WebRtc_Word32  in_sqrt;
+  WebRtc_Word32  newRes;
+  WebRtc_Word16  err;
+  int            shift_var;
+  int          k, n, j, i;
+  WebRtc_UWord32 nrg_u32;
+
+  /* create dither_float signal */
+  GenerateDitherQ7LbUB(ditherQ7, streamdata->W_upper, FRAMESAMPLES);
+
+  /* add dither and quantize, and compute power spectrum */
+  for (k = 0, j = 0; k < (FRAMESAMPLES_HALF); k += 4)
+  {
+    val = ((*fr++ + ditherQ7[k]   + 64) & 0xFF80) - ditherQ7[k];
+    dataQ7[k] = val;
+    sum = (val) * (val);
+
+    val = ((*fi++ + ditherQ7[k+1] + 64) & 0xFF80) - ditherQ7[k+1];
+    dataQ7[k+1] = val;
+    sum += (val) * (val);
+
+    if(j < FRAMESAMPLES_QUARTER)
+    {
+      PSpec[j] = sum >> 1;
+      j++;
+    }
+
+    val = ((*fr++ + ditherQ7[k+2] + 64) & 0xFF80) - ditherQ7[k+2];
+    dataQ7[k+2] = val;
+    sum = (val) * (val);
+
+    val = ((*fi++ + ditherQ7[k+3] + 64) & 0xFF80) - ditherQ7[k+3];
+    dataQ7[k+3] = val;
+    sum += (val) * (val);
+
+    if(j < FRAMESAMPLES_QUARTER)
+    {
+      PSpec[j] = sum >> 1;
+      j++;
+    }
+  }
+  /* compute correlation from power spectrum */
+  WebRtcIsac_FindCorrelation(PSpec, CorrQ7);
+
+
+  /* find AR coefficients */
+  /* number of bit shifts to 14-bit normalize CorrQ7[0] (leaving room for sign) */
+  shift_var = WebRtcSpl_NormW32(CorrQ7[0]) - 18;
+
+  if (shift_var > 0) {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] << shift_var;
+    }
+  } else {
+    for (k=0; k<AR_ORDER+1; k++) {
+      CorrQ7_norm[k] = CorrQ7[k] >> (-shift_var);
+    }
+  }
+
+  /* find RC coefficients */
+  WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15);
+
+  /* quantize & code RC Coef */
+  WebRtcIsac_EncodeRc(RCQ15, streamdata);
+
+
+  /* RC -> AR coefficients */
+  WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12);
+
+
+  /* compute ARCoef' * Corr * ARCoef in Q19 */
+  nrg = 0;
+  for (j = 0; j <= AR_ORDER; j++) {
+    for (n = 0; n <= j; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[j-n] * ARCoefQ12[n] + 256) >> 9) + 4 ) >> 3;
+    }
+    for (n = j+1; n <= AR_ORDER; n++) {
+      nrg += ( ARCoefQ12[j] * ((CorrQ7_norm[n-j] * ARCoefQ12[n] + 256) >> 9) + 4 ) >> 3;
+    }
+  }
+
+  nrg_u32 = (WebRtc_UWord32)nrg;
+  if (shift_var > 0) {
+    nrg_u32 = nrg_u32 >> shift_var;
+  } else {
+    nrg_u32 = nrg_u32 << (-shift_var);
+  }
+
+  if (nrg_u32 > 0x7FFFFFFF) {
+    nrg = 0x7FFFFFFF;
+  } else {
+    nrg = (WebRtc_Word32)nrg_u32;
+  }
+
+  gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg);  /* also shifts 31 bits to the left! */
+
+  /* quantize & code gain2_Q10 */
+  if (WebRtcIsac_EncodeGain2(&gain2_Q10, streamdata)) {
+    return -1;
+  }
+
+  /* compute inverse AR power spectrum */
+  WebRtcIsac_FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16);
+
+  /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */
+  res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1);
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++)
+  {
+    in_sqrt = invARSpec2_Q16[k];
+    i = 10;
+
+    /* Negative values make no sense for a real sqrt-function. */
+    if (in_sqrt<0)
+      in_sqrt=-in_sqrt;
+
+    newRes = (in_sqrt / res + res) >> 1;
+    do
+    {
+      res = newRes;
+      newRes = (in_sqrt / res + res) >> 1;
+    } while (newRes != res && i-- > 0);
+
+    invARSpecQ8[k] = (WebRtc_Word16)newRes;
+  }
+
+  /* arithmetic coding of spectrum */
+  err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8,
+                                     (FRAMESAMPLES_HALF), IS_SWB_12KHZ);
+  if (err < 0)
+  {
+    return (err);
+  }
+
+  return 0;
+}
+
+
+
+/* step-up */
+void WebRtcIsac_Rc2Poly(double *RC, int N, double *a)
+{
+  int m, k;
+  double tmp[MAX_AR_MODEL_ORDER];
+
+  a[0] = 1.0;
+  tmp[0] = 1.0;
+  for (m=1; m<=N; m++) {
+    /* copy */
+    for (k=1; k<m; k++)
+      tmp[k] = a[k];
+
+    a[m] = RC[m-1];
+    for (k=1; k<m; k++)
+      a[k] += RC[m-1] * tmp[m-k];
+  }
+  return;
+}
+
+/* step-down */
+void WebRtcIsac_Poly2Rc(double *a, int N, double *RC)
+{
+  int m, k;
+  double tmp[MAX_AR_MODEL_ORDER];
+  double tmp_inv;
+
+  RC[N-1] = a[N];
+  for (m=N-1; m>0; m--) {
+    tmp_inv = 1.0 / (1.0 - RC[m]*RC[m]);
+    for (k=1; k<=m; k++)
+      tmp[k] = (a[k] - RC[m] * a[m-k+1]) * tmp_inv;
+
+    for (k=1; k<m; k++)
+      a[k] = tmp[k];
+
+    RC[m-1] = tmp[m];
+  }
+  return;
+}
+
+
+#define MAX_ORDER 100
+
+
+void WebRtcIsac_Rc2Lar(const double *refc, double *lar, int order) {  /* Matlab's LAR definition */
+
+  int k;
+
+  for (k = 0; k < order; k++) {
+    lar[k] = log((1 + refc[k]) / (1 - refc[k]));
+  }
+
+}
+
+
+void WebRtcIsac_Lar2Rc(const double *lar, double *refc,  int order) {
+
+  int k;
+  double tmp;
+
+  for (k = 0; k < order; k++) {
+    tmp = exp(lar[k]);
+    refc[k] = (tmp - 1) / (tmp + 1);
+  }
+}
+
+void WebRtcIsac_Poly2Lar(double *lowband, int orderLo, double *hiband, int orderHi, int Nsub, double *lars) {
+
+  int k, n, orderTot;
+  double poly[MAX_ORDER], lar[MAX_ORDER], rc[MAX_ORDER], *inpl, *inph, *outp;
+
+  orderTot = (orderLo + orderHi + 2);
+  inpl = lowband;
+  inph = hiband;
+  outp = lars;
+  poly[0] = 1.0;
+  for (k = 0; k < Nsub; k++) {
+    /* gains */
+    outp[0] = inpl[0];
+    outp[1] = inph[0];
+
+    /* Low band */
+    for (n = 1; n <= orderLo; n++)
+      poly[n] = inpl[n];
+    WebRtcIsac_Poly2Rc(poly, orderLo, rc);
+    WebRtcIsac_Rc2Lar(rc, lar, orderLo);
+    for (n = 0; n < orderLo; n++)
+      outp[n + 2] = lar[n];
+
+    /* High band */
+    for (n = 1; n <= orderHi; n++)
+      poly[n] = inph[n];
+    WebRtcIsac_Poly2Rc(poly, orderHi, rc);
+    WebRtcIsac_Rc2Lar(rc, lar, orderHi);
+    for (n = 0; n < orderHi; n++)
+      outp[n + orderLo + 2] = lar[n];
+
+    inpl += orderLo + 1;
+    inph += orderHi + 1;
+    outp += orderTot;
+  }
+}
+
+
+
+WebRtc_Word16
+WebRtcIsac_Poly2LarUB(
+    double* lpcVecs,
+    WebRtc_Word16 bandwidth)
+{
+  double      poly[MAX_ORDER];
+  double      rc[MAX_ORDER];
+  double*     ptrIO;
+  WebRtc_Word16 vecCntr;
+  WebRtc_Word16 vecSize;
+  WebRtc_Word16 numVec;
+
+  vecSize = UB_LPC_ORDER;
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        numVec  = UB_LPC_VEC_PER_FRAME;
+        break;
+      }
+    case isac16kHz:
+      {
+        numVec  = UB16_LPC_VEC_PER_FRAME;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  ptrIO = lpcVecs;
+  poly[0] = 1.0;
+  for(vecCntr = 0; vecCntr < numVec; vecCntr++)
+  {
+    memcpy(&poly[1], ptrIO, sizeof(double) * vecSize);
+    WebRtcIsac_Poly2Rc(poly, vecSize, rc);
+    WebRtcIsac_Rc2Lar(rc, ptrIO, vecSize);
+    ptrIO += vecSize;
+  }
+  return 0;
+}
+
+
+
+void WebRtcIsac_Lar2Poly(double *lars, double *lowband, int orderLo, double *hiband, int orderHi, int Nsub) {
+
+  int k, n, orderTot;
+  double poly[MAX_ORDER], lar[MAX_ORDER], rc[MAX_ORDER], *outpl, *outph, *inp;
+
+  orderTot = (orderLo + orderHi + 2);
+  outpl = lowband;
+  outph = hiband;
+  inp = lars;
+  for (k = 0; k < Nsub; k++) {
+    /* gains */
+    outpl[0] = inp[0];
+    outph[0] = inp[1];
+
+    /* Low band */
+    for (n = 0; n < orderLo; n++)
+      lar[n] = inp[n + 2];
+    WebRtcIsac_Lar2Rc(lar, rc, orderLo);
+    WebRtcIsac_Rc2Poly(rc, orderLo, poly);
+    for (n = 1; n <= orderLo; n++)
+      outpl[n] = poly[n];
+
+    /* High band */
+    for (n = 0; n < orderHi; n++)
+      lar[n] = inp[n + orderLo + 2];
+    WebRtcIsac_Lar2Rc(lar, rc, orderHi);
+    WebRtcIsac_Rc2Poly(rc, orderHi, poly);
+    for (n = 1; n <= orderHi; n++)
+      outph[n] = poly[n];
+
+    outpl += orderLo + 1;
+    outph += orderHi + 1;
+    inp += orderTot;
+  }
+}
+
+// assumes 2 LAR vectors interpolates to 'numPolyVec' A-polynomials
+void
+WebRtcIsac_Lar2PolyInterpolUB(
+    double* larVecs,
+    double* percepFilterParams,
+    int     numPolyVecs) // includes the first and the last point of the interval
+{
+
+  int polyCntr, coeffCntr;
+  double larInterpol[UB_LPC_ORDER];
+  double rc[UB_LPC_ORDER];
+  double delta[UB_LPC_ORDER];
+
+  // calculate the step-size for linear interpolation coefficients
+  for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+  {
+    delta[coeffCntr] = (larVecs[UB_LPC_ORDER + coeffCntr] -
+                        larVecs[coeffCntr]) / (numPolyVecs - 1);
+  }
+
+  for(polyCntr = 0; polyCntr < numPolyVecs; polyCntr++)
+  {
+    for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++)
+    {
+      larInterpol[coeffCntr] = larVecs[coeffCntr] +
+          delta[coeffCntr] * polyCntr;
+    }
+    WebRtcIsac_Lar2Rc(larInterpol, rc, UB_LPC_ORDER);
+
+    // convert to A-polynomial, the following function returns A[0] = 1;
+    // which is written where gains had to be written. Then we write the
+    // gain (outside this function). This way we say a memcpy
+    WebRtcIsac_Rc2Poly(rc, UB_LPC_ORDER, percepFilterParams);
+    percepFilterParams += (UB_LPC_ORDER + 1);
+  }
+}
+
+int WebRtcIsac_DecodeLpc(Bitstr *streamdata, double *LPCCoef_lo, double *LPCCoef_hi, int *outmodel) {
+
+  double lars[KLT_ORDER_GAIN + KLT_ORDER_SHAPE];
+  int err;
+
+  err = WebRtcIsac_DecodeLpcCoef(streamdata, lars, outmodel);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_LPC;
+
+  WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, SUBFRAMES);
+
+  return 0;
+}
+
+WebRtc_Word16
+WebRtcIsac_DecodeInterpolLpcUb(
+    Bitstr*     streamdata,
+    double*     percepFilterParams,
+    WebRtc_Word16 bandwidth)
+{
+
+  double lpcCoeff[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  int err;
+  int interpolCntr;
+  int subframeCntr;
+  WebRtc_Word16 numSegments;
+  WebRtc_Word16 numVecPerSegment;
+  WebRtc_Word16 numGains;
+
+  double percepFilterGains[SUBFRAMES<<1];
+  double* ptrOutParam = percepFilterParams;
+
+  err = WebRtcIsac_DecodeLpcCoefUB(streamdata, lpcCoeff, percepFilterGains,
+                                   bandwidth);
+
+  // error check
+  if (err<0)
+  {
+    return -ISAC_RANGE_ERROR_DECODE_LPC;
+  }
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        numGains = SUBFRAMES;
+        numSegments = UB_LPC_VEC_PER_FRAME - 1;
+        numVecPerSegment = kLpcVecPerSegmentUb12;
+        break;
+      }
+    case isac16kHz:
+      {
+        numGains = SUBFRAMES << 1;
+        numSegments = UB16_LPC_VEC_PER_FRAME - 1;
+        numVecPerSegment = kLpcVecPerSegmentUb16;
+        break;
+      }
+    default:
+      return -1;
+  }
+
+
+
+  for(interpolCntr = 0; interpolCntr < numSegments; interpolCntr++)
+  {
+    WebRtcIsac_Lar2PolyInterpolUB(
+        &lpcCoeff[interpolCntr * UB_LPC_ORDER], ptrOutParam,
+        numVecPerSegment + 1);
+
+    ptrOutParam += ((numVecPerSegment) *
+                    (UB_LPC_ORDER + 1));
+  }
+
+  ptrOutParam = percepFilterParams;
+
+  if(bandwidth == isac16kHz)
+  {
+    ptrOutParam += (1 + UB_LPC_ORDER);
+  }
+
+  for(subframeCntr = 0; subframeCntr < numGains; subframeCntr++)
+  {
+    *ptrOutParam = percepFilterGains[subframeCntr];
+    ptrOutParam += (1 + UB_LPC_ORDER);
+  }
+
+  return 0;
+}
+
+
+/* decode & dequantize LPC Coef */
+int WebRtcIsac_DecodeLpcCoef(Bitstr *streamdata, double *LPCCoef, int *outmodel)
+{
+  int j, k, n, model, pos, pos2, posg, poss, offsg, offss, offs2;
+  int index_g[KLT_ORDER_GAIN], index_s[KLT_ORDER_SHAPE];
+  double tmpcoeffs_g[KLT_ORDER_GAIN],tmpcoeffs_s[KLT_ORDER_SHAPE];
+  double tmpcoeffs2_g[KLT_ORDER_GAIN], tmpcoeffs2_s[KLT_ORDER_SHAPE];
+  double sum;
+  int err;
+
+
+  /* entropy decoding of model number */
+  err = WebRtcIsac_DecHistOneStepMulti(&model, streamdata, WebRtcIsac_kQKltModelCdfPtr, WebRtcIsac_kQKltModelInitIndex, 1);
+  if (err<0)  // error check
+    return err;
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsac_DecHistOneStepMulti(index_s, streamdata, WebRtcIsac_kQKltCdfPtrShape[model], WebRtcIsac_kQKltInitIndexShape[model], KLT_ORDER_SHAPE);
+  if (err<0)  // error check
+    return err;
+  err = WebRtcIsac_DecHistOneStepMulti(index_g, streamdata, WebRtcIsac_kQKltCdfPtrGain[model], WebRtcIsac_kQKltInitIndexGain[model], KLT_ORDER_GAIN);
+  if (err<0)  // error check
+    return err;
+
+
+  /* find quantization levels for coefficients */
+  for (k=0; k<KLT_ORDER_SHAPE; k++) {
+    tmpcoeffs_s[WebRtcIsac_kQKltSelIndShape[k]] = WebRtcIsac_kQKltLevelsShape[WebRtcIsac_kQKltOfLevelsShape[model]+WebRtcIsac_kQKltOffsetShape[model][k] + index_s[k]];
+  }
+  for (k=0; k<KLT_ORDER_GAIN; k++) {
+    tmpcoeffs_g[WebRtcIsac_kQKltSelIndGain[k]] = WebRtcIsac_kQKltLevelsGain[WebRtcIsac_kQKltOfLevelsGain[model]+ WebRtcIsac_kQKltOffsetGain[model][k] + index_g[k]];
+  }
+
+
+  /* inverse KLT  */
+
+  /* left transform */  // Transpose matrix!
+  offsg = 0;
+  offss = 0;
+  posg = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = offsg;
+      pos2 = offs2;
+      for (n=0; n<LPC_GAIN_ORDER; n++)
+        sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[model][pos2++];
+      tmpcoeffs2_g[posg++] = sum;
+      offs2 += LPC_GAIN_ORDER;
+    }
+    offs2 = 0;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = offss;
+      pos2 = offs2;
+      for (n=0; n<LPC_SHAPE_ORDER; n++)
+        sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[model][pos2++];
+      tmpcoeffs2_s[poss++] = sum;
+      offs2 += LPC_SHAPE_ORDER;
+    }
+    offsg += LPC_GAIN_ORDER;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+
+  /* right transform */ // Transpose matrix
+  offsg = 0;
+  offss = 0;
+  posg = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[model][pos2];
+        pos += LPC_GAIN_ORDER;
+        pos2 += SUBFRAMES;
+
+      }
+      tmpcoeffs_g[posg++] = sum;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[model][pos2];
+        pos += LPC_SHAPE_ORDER;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_s[poss++] = sum;
+    }
+    offsg += LPC_GAIN_ORDER;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+
+  /* scaling, mean addition, and gain restoration */
+  posg = 0;poss = 0;pos=0;
+  for (k=0; k<SUBFRAMES; k++) {
+
+    /* log gains */
+    LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE;
+    LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[model][posg];
+    LPCCoef[pos] = exp(LPCCoef[pos]);
+    pos++;posg++;
+    LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE;
+    LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[model][posg];
+    LPCCoef[pos] = exp(LPCCoef[pos]);
+    pos++;posg++;
+
+    /* lo band LAR coeffs */
+    for (n=0; n<LPC_LOBAND_ORDER; n++, pos++, poss++) {
+      LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE;
+      LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[model][poss];
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<LPC_HIBAND_ORDER; n++, pos++, poss++) {
+      LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE;
+      LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[model][poss];
+    }
+  }
+
+
+  *outmodel=model;
+
+  return 0;
+}
+
+/* estimate codel length of LPC Coef */
+void WebRtcIsac_EncodeLar(double *LPCCoef, int *model, double *size, Bitstr *streamdata, ISAC_SaveEncData_t* encData) {
+  int j, k, n, bmodel, pos, pos2, poss, posg, offsg, offss, offs2;
+  int index_g[KLT_ORDER_GAIN], index_s[KLT_ORDER_SHAPE];
+  int index_ovr_g[KLT_ORDER_GAIN], index_ovr_s[KLT_ORDER_SHAPE];
+  double Bits;
+  double tmpcoeffs_g[KLT_ORDER_GAIN], tmpcoeffs_s[KLT_ORDER_SHAPE];
+  double tmpcoeffs2_g[KLT_ORDER_GAIN], tmpcoeffs2_s[KLT_ORDER_SHAPE];
+  double sum;
+
+  /* Only one LPC model remains in iSAC. Tables for other models are saved for compatibility reasons. */
+  bmodel = 0;
+
+  /* log gains, mean removal and scaling */
+  posg = 0;poss = 0;pos=0;
+
+  for (k=0; k<SUBFRAMES; k++) {
+    /* log gains */
+    tmpcoeffs_g[posg] = log(LPCCoef[pos]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[bmodel][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;pos++;
+
+    tmpcoeffs_g[posg] = log(LPCCoef[pos]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[bmodel][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;pos++;
+
+    /* lo band LAR coeffs */
+    for (n=0; n<LPC_LOBAND_ORDER; n++, poss++, pos++) {
+      tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[bmodel][poss];
+      tmpcoeffs_s[poss] *= LPC_LOBAND_SCALE;
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<LPC_HIBAND_ORDER; n++, poss++, pos++) {
+      tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[bmodel][poss];
+      tmpcoeffs_s[poss] *= LPC_HIBAND_SCALE;
+    }
+  }
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  offss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<LPC_GAIN_ORDER; n++) {
+        sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[bmodel][pos2];
+        pos2 += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs2_g[posg++] = sum;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = offss;
+      pos2 = k;
+      for (n=0; n<LPC_SHAPE_ORDER; n++) {
+        sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[bmodel][pos2];
+        pos2 += LPC_SHAPE_ORDER;
+      }
+      tmpcoeffs2_s[poss++] = sum;
+    }
+    offsg += LPC_GAIN_ORDER;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offss = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[bmodel][pos2++];
+        pos += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs_g[posg++] = sum;
+    }
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[bmodel][pos2++];
+        pos += LPC_SHAPE_ORDER;
+      }
+      tmpcoeffs_s[poss++] = sum;
+    }
+    offs2 += SUBFRAMES;
+    offsg += LPC_GAIN_ORDER;
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* quantize coefficients */
+
+  Bits = 0.0;
+  for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
+  {
+    pos = WebRtcIsac_kQKltSelIndGain[k];
+    pos2= WebRtcIsac_lrint(tmpcoeffs_g[pos] / KLT_STEPSIZE);
+    index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k]; //ATTN: ok?
+    if (index_g[k] < 0) {
+      index_g[k] = 0;
+    }
+    else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k])
+      index_g[k] = WebRtcIsac_kQKltMaxIndGain[k];
+    index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[bmodel][k]+index_g[k];
+    pos = WebRtcIsac_kQKltOfLevelsGain[bmodel] + index_ovr_g[k];
+
+    /* determine number of bits */
+    sum = WebRtcIsac_kQKltCodeLenGain[pos];
+    Bits += sum;
+  }
+
+  for (k=0; k<KLT_ORDER_SHAPE; k++) //ATTN: ok?
+  {
+    index_s[k] = (WebRtcIsac_lrint(tmpcoeffs_s[WebRtcIsac_kQKltSelIndShape[k]] / KLT_STEPSIZE)) + WebRtcIsac_kQKltQuantMinShape[k]; //ATTN: ok?
+    if (index_s[k] < 0)
+      index_s[k] = 0;
+    else if (index_s[k] > WebRtcIsac_kQKltMaxIndShape[k])
+      index_s[k] = WebRtcIsac_kQKltMaxIndShape[k];
+    index_ovr_s[k] = WebRtcIsac_kQKltOffsetShape[bmodel][k]+index_s[k];
+    pos = WebRtcIsac_kQKltOfLevelsShape[bmodel] + index_ovr_s[k];
+    sum = WebRtcIsac_kQKltCodeLenShape[pos];
+    Bits += sum;
+  }
+
+
+  /* Only one model remains in this version of the code, model = 0 */
+  *model=bmodel;
+  *size=Bits;
+
+  /* entropy coding of model number */
+  WebRtcIsac_EncHistMulti(streamdata, model, WebRtcIsac_kQKltModelCdfPtr, 1);
+
+  /* entropy coding of quantization indices - shape only */
+  WebRtcIsac_EncHistMulti(streamdata, index_s, WebRtcIsac_kQKltCdfPtrShape[bmodel], KLT_ORDER_SHAPE);
+
+  /* Save data for creation of multiple bit streams */
+  encData->LPCmodel[encData->startIdx] = 0;
+  for (k=0; k<KLT_ORDER_SHAPE; k++)
+  {
+    encData->LPCindex_s[KLT_ORDER_SHAPE*encData->startIdx + k] = index_s[k];
+  }
+
+  /* find quantization levels for shape coefficients */
+  for (k=0; k<KLT_ORDER_SHAPE; k++) {
+    tmpcoeffs_s[WebRtcIsac_kQKltSelIndShape[k]] = WebRtcIsac_kQKltLevelsShape[WebRtcIsac_kQKltOfLevelsShape[bmodel]+index_ovr_s[k]];
+  }
+  /* inverse KLT  */
+  /* left transform */  // Transpose matrix!
+  offss = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = offss;
+      pos2 = offs2;
+      for (n=0; n<LPC_SHAPE_ORDER; n++)
+        sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[bmodel][pos2++];
+      tmpcoeffs2_s[poss++] = sum;
+      offs2 += LPC_SHAPE_ORDER;
+    }
+    offss += LPC_SHAPE_ORDER;
+  }
+
+
+  /* right transform */ // Transpose matrix
+  offss = 0;
+  poss = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    poss = offss;
+    for (k=0; k<LPC_SHAPE_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[bmodel][pos2];
+        pos += LPC_SHAPE_ORDER;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_s[poss++] = sum;
+    }
+    offss += LPC_SHAPE_ORDER;
+  }
+
+  /* scaling, mean addition, and gain restoration */
+  poss = 0;pos=0;
+  for (k=0; k<SUBFRAMES; k++) {
+
+    /* log gains */
+    pos+=2;
+
+    /* lo band LAR coeffs */
+    for (n=0; n<LPC_LOBAND_ORDER; n++, pos++, poss++) {
+      LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE;
+      LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[bmodel][poss];
+    }
+
+    /* hi band LAR coeffs */
+    for (n=0; n<LPC_HIBAND_ORDER; n++, pos++, poss++) {
+      LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE;
+      LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[bmodel][poss];
+    }
+
+  }
+
+}
+
+
+void WebRtcIsac_EncodeLpcLb(double *LPCCoef_lo, double *LPCCoef_hi, int *model,
+                            double *size, Bitstr *streamdata, ISAC_SaveEncData_t* encData) {
+
+  double lars[KLT_ORDER_GAIN+KLT_ORDER_SHAPE];
+  int k;
+
+  WebRtcIsac_Poly2Lar(LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, SUBFRAMES, lars);
+  WebRtcIsac_EncodeLar(lars, model, size, streamdata, encData);
+  WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, SUBFRAMES);
+  /* Save data for creation of multiple bit streams (and transcoding) */
+  for (k=0; k<(ORDERLO+1)*SUBFRAMES; k++) {
+    encData->LPCcoeffs_lo[(ORDERLO+1)*SUBFRAMES*encData->startIdx + k] = LPCCoef_lo[k];
+  }
+  for (k=0; k<(ORDERHI+1)*SUBFRAMES; k++) {
+    encData->LPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*encData->startIdx + k] = LPCCoef_hi[k];
+  }
+}
+
+
+WebRtc_Word16
+WebRtcIsac_EncodeLpcUB(
+    double*                  lpcVecs,
+    Bitstr*                  streamdata,
+    double*                  interpolLPCCoeff,
+    WebRtc_Word16              bandwidth,
+    ISACUBSaveEncDataStruct* encData)
+{
+
+  double    U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  int     idx[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  int interpolCntr;
+
+  WebRtcIsac_Poly2LarUB(lpcVecs, bandwidth);
+  WebRtcIsac_RemoveLarMean(lpcVecs, bandwidth);
+  WebRtcIsac_DecorrelateIntraVec(lpcVecs, U, bandwidth);
+  WebRtcIsac_DecorrelateInterVec(U, lpcVecs, bandwidth);
+  WebRtcIsac_QuantizeUncorrLar(lpcVecs, idx, bandwidth);
+
+  WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth);
+  WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth);
+  WebRtcIsac_AddLarMean(lpcVecs, bandwidth);
+
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        // Stor the indices to be used for multiple encoding.
+        memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME *
+               sizeof(int));
+        WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb12,
+                                UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME);
+        for(interpolCntr = 0; interpolCntr < UB_INTERPOL_SEGMENTS; interpolCntr++)
+        {
+          WebRtcIsac_Lar2PolyInterpolUB(lpcVecs,
+                                        interpolLPCCoeff, kLpcVecPerSegmentUb12 + 1);
+          lpcVecs += UB_LPC_ORDER;
+          interpolLPCCoeff += (kLpcVecPerSegmentUb12 * (UB_LPC_ORDER + 1));
+        }
+        break;
+      }
+    case isac16kHz:
+      {
+        // Stor the indices to be used for multiple encoding.
+        memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME *
+               sizeof(int));
+        WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb16,
+                                UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME);
+        for(interpolCntr = 0; interpolCntr < UB16_INTERPOL_SEGMENTS; interpolCntr++)
+        {
+          WebRtcIsac_Lar2PolyInterpolUB(lpcVecs,
+                                        interpolLPCCoeff, kLpcVecPerSegmentUb16 + 1);
+          lpcVecs += UB_LPC_ORDER;
+          interpolLPCCoeff += (kLpcVecPerSegmentUb16 * (UB_LPC_ORDER + 1));
+        }
+        break;
+      }
+    default:
+      return -1;
+  }
+  return 0;
+}
+
+void WebRtcIsac_EncodeLpcGainLb(double *LPCCoef_lo, double *LPCCoef_hi, int model, Bitstr *streamdata, ISAC_SaveEncData_t* encData) {
+
+  int j, k, n, pos, pos2, posg, offsg, offs2;
+  int index_g[KLT_ORDER_GAIN];
+  int index_ovr_g[KLT_ORDER_GAIN];
+  double tmpcoeffs_g[KLT_ORDER_GAIN];
+  double tmpcoeffs2_g[KLT_ORDER_GAIN];
+  double sum;
+
+  /* log gains, mean removal and scaling */
+  posg = 0;
+  for (k=0; k<SUBFRAMES; k++) {
+    tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER+1)*k]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[model][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;
+    tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER+1)*k]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[model][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;
+  }
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<LPC_GAIN_ORDER; n++) {
+        sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[model][pos2];
+        pos2 += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs2_g[posg++] = sum;
+    }
+    offsg += LPC_GAIN_ORDER;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[model][pos2++];
+        pos += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs_g[posg++] = sum;
+    }
+    offs2 += SUBFRAMES;
+    offsg += LPC_GAIN_ORDER;
+  }
+
+
+  /* quantize coefficients */
+  for (k=0; k<KLT_ORDER_GAIN; k++) {
+
+    /* get index */
+    pos = WebRtcIsac_kQKltSelIndGain[k];
+    pos2= WebRtcIsac_lrint(tmpcoeffs_g[pos] / KLT_STEPSIZE);
+    index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k];
+    if (index_g[k] < 0) {
+      index_g[k] = 0;
+    }
+    else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) {
+      index_g[k] = WebRtcIsac_kQKltMaxIndGain[k];
+    }
+    index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[model][k]+index_g[k];
+
+    /* find quantization levels for coefficients */
+    tmpcoeffs_g[WebRtcIsac_kQKltSelIndGain[k]] = WebRtcIsac_kQKltLevelsGain[WebRtcIsac_kQKltOfLevelsGain[model]+index_ovr_g[k]];
+
+    /* Save data for creation of multiple bit streams */
+    encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_g[k];
+  }
+
+
+  /* entropy coding of quantization indices - gain */
+  WebRtcIsac_EncHistMulti(streamdata, index_g, WebRtcIsac_kQKltCdfPtrGain[model], KLT_ORDER_GAIN);
+
+  /* find quantization levels for coefficients */
+
+  /* left transform */
+  offsg = 0;
+  posg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    offs2 = 0;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = offsg;
+      pos2 = offs2;
+      for (n=0; n<LPC_GAIN_ORDER; n++)
+        sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[model][pos2++];
+      tmpcoeffs2_g[posg++] = sum;
+      offs2 += LPC_GAIN_ORDER;
+    }
+    offsg += LPC_GAIN_ORDER;
+  }
+
+  /* right transform */ // Transpose matrix
+  offsg = 0;
+  posg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = j;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[model][pos2];
+        pos += LPC_GAIN_ORDER;
+        pos2 += SUBFRAMES;
+      }
+      tmpcoeffs_g[posg++] = sum;
+    }
+    offsg += LPC_GAIN_ORDER;
+  }
+
+
+  /* scaling, mean addition, and gain restoration */
+  posg = 0;
+  for (k=0; k<SUBFRAMES; k++) {
+    sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE;
+    sum += WebRtcIsac_kLpcMeansGain[model][posg];
+    LPCCoef_lo[k*(LPC_LOBAND_ORDER+1)] = exp(sum);
+    pos++;posg++;
+    sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE;
+    sum += WebRtcIsac_kLpcMeansGain[model][posg];
+    LPCCoef_hi[k*(LPC_HIBAND_ORDER+1)] = exp(sum);
+    pos++;posg++;
+  }
+
+}
+
+void
+WebRtcIsac_EncodeLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata,
+    int*    lpcGainIndex)
+{
+  double U[UB_LPC_GAIN_DIM];
+  int idx[UB_LPC_GAIN_DIM];
+  WebRtcIsac_ToLogDomainRemoveMean(lpGains);
+  WebRtcIsac_DecorrelateLPGain(lpGains, U);
+  WebRtcIsac_QuantizeLpcGain(U, idx);
+  // Store the index for re-encoding for FEC.
+  memcpy(lpcGainIndex, idx, UB_LPC_GAIN_DIM * sizeof(int));
+  WebRtcIsac_CorrelateLpcGain(U, lpGains);
+  WebRtcIsac_AddMeanToLinearDomain(lpGains);
+  WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
+}
+
+
+void
+WebRtcIsac_StoreLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata)
+{
+  double U[UB_LPC_GAIN_DIM];
+  int idx[UB_LPC_GAIN_DIM];
+  WebRtcIsac_ToLogDomainRemoveMean(lpGains);
+  WebRtcIsac_DecorrelateLPGain(lpGains, U);
+  WebRtcIsac_QuantizeLpcGain(U, idx);
+  WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
+}
+
+
+
+WebRtc_Word16
+WebRtcIsac_DecodeLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata)
+{
+  double U[UB_LPC_GAIN_DIM];
+  int idx[UB_LPC_GAIN_DIM];
+  int err;
+  err = WebRtcIsac_DecHistOneStepMulti(idx, streamdata,
+                                       WebRtcIsac_kLpcGainCdfMat, WebRtcIsac_kLpcGainEntropySearch,
+                                       UB_LPC_GAIN_DIM);
+  if(err < 0)
+  {
+    return -1;
+  }
+  WebRtcIsac_DequantizeLpcGain(idx, U);
+  WebRtcIsac_CorrelateLpcGain(U, lpGains);
+  WebRtcIsac_AddMeanToLinearDomain(lpGains);
+  return 0;
+}
+
+
+
+/* decode & dequantize RC */
+int WebRtcIsac_DecodeRc(Bitstr *streamdata, WebRtc_Word16 *RCQ15)
+{
+  int k, err;
+  int index[AR_ORDER];
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsac_DecHistOneStepMulti(index, streamdata, WebRtcIsac_kQArRcCdfPtr,
+                                       WebRtcIsac_kQArRcInitIndex, AR_ORDER);
+  if (err<0)  // error check
+    return err;
+
+  /* find quantization levels for reflection coefficients */
+  for (k=0; k<AR_ORDER; k++)
+  {
+    RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]);
+  }
+
+  return 0;
+}
+
+
+
+/* quantize & code RC */
+void WebRtcIsac_EncodeRc(WebRtc_Word16 *RCQ15, Bitstr *streamdata)
+{
+  int k;
+  int index[AR_ORDER];
+
+  /* quantize reflection coefficients (add noise feedback?) */
+  for (k=0; k<AR_ORDER; k++)
+  {
+    index[k] = WebRtcIsac_kQArRcInitIndex[k];
+
+    if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]])
+    {
+      while (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1])
+        index[k]++;
+    }
+    else
+    {
+      while (RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ;
+    }
+
+    RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]);
+  }
+
+
+  /* entropy coding of quantization indices */
+  WebRtcIsac_EncHistMulti(streamdata, index, WebRtcIsac_kQArRcCdfPtr, AR_ORDER);
+}
+
+
+/* decode & dequantize squared Gain */
+int WebRtcIsac_DecodeGain2(Bitstr *streamdata, WebRtc_Word32 *gainQ10)
+{
+  int index, err;
+
+  /* entropy decoding of quantization index */
+  err = WebRtcIsac_DecHistOneStepMulti(&index, streamdata, WebRtcIsac_kQGainCdf_ptr,
+                                       WebRtcIsac_kQGainInitIndex, 1);
+  if (err<0)  // error check
+    return err;
+
+  /* find quantization level */
+  *gainQ10 = WebRtcIsac_kQGain2Levels[index];
+
+  return 0;
+}
+
+
+
+/* quantize & code squared Gain */
+int WebRtcIsac_EncodeGain2(WebRtc_Word32 *gainQ10, Bitstr *streamdata)
+{
+  int index;
+
+
+  /* find quantization index */
+  index = WebRtcIsac_kQGainInitIndex[0];
+  if (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index])
+  {
+    while (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index + 1])
+      index++;
+  }
+  else
+  {
+    while (*gainQ10 < WebRtcIsac_kQGain2BoundaryLevels[--index]) ;
+  }
+
+  /* dequantize */
+  *gainQ10 = WebRtcIsac_kQGain2Levels[index];
+
+
+  /* entropy coding of quantization index */
+  WebRtcIsac_EncHistMulti(streamdata, &index, WebRtcIsac_kQGainCdf_ptr, 1);
+
+  return 0;
+}
+
+
+/* code and decode Pitch Gains and Lags functions */
+
+/* decode & dequantize Pitch Gains */
+int WebRtcIsac_DecodePitchGain(Bitstr *streamdata, WebRtc_Word16 *PitchGains_Q12)
+{
+  int index_comb, err;
+  const WebRtc_UWord16 *WebRtcIsac_kQPitchGainCdf_ptr[1];
+
+  /* entropy decoding of quantization indices */
+  *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf;
+  err = WebRtcIsac_DecHistBisectMulti(&index_comb, streamdata, WebRtcIsac_kQPitchGainCdf_ptr, WebRtcIsac_kQCdfTableSizeGain, 1);
+  /* error check, Q_mean_Gain.. tables are of size 144 */
+  if ((err<0) || (index_comb<0) || (index_comb>144))
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN;
+
+  /* unquantize back to pitch gains by table look-up */
+  PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb];
+  PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb];
+  PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb];
+  PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb];
+
+  return 0;
+}
+
+
+/* quantize & code Pitch Gains */
+void WebRtcIsac_EncodePitchGain(WebRtc_Word16 *PitchGains_Q12, Bitstr *streamdata, ISAC_SaveEncData_t* encData)
+{
+  int k,j;
+  double C;
+  double S[PITCH_SUBFRAMES];
+  int index[3];
+  int index_comb;
+  const WebRtc_UWord16 *WebRtcIsac_kQPitchGainCdf_ptr[1];
+  double PitchGains[PITCH_SUBFRAMES] = {0,0,0,0};
+
+  /* take the asin */
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+  {
+    PitchGains[k] = ((float)PitchGains_Q12[k])/4096;
+    S[k] = asin(PitchGains[k]);
+  }
+
+
+  /* find quantization index; only for the first three transform coefficients */
+  for (k=0; k<3; k++)
+  {
+    /*  transform */
+    C = 0.0;
+    for (j=0; j<PITCH_SUBFRAMES; j++)
+      C += WebRtcIsac_kTransform[k][j] * S[j];
+
+    /* quantize */
+    index[k] = WebRtcIsac_lrint(C / PITCH_GAIN_STEPSIZE);
+
+    /* check that the index is not outside the boundaries of the table */
+    if (index[k] < WebRtcIsac_kIndexLowerLimitGain[k]) index[k] = WebRtcIsac_kIndexLowerLimitGain[k];
+    else if (index[k] > WebRtcIsac_kIndexUpperLimitGain[k]) index[k] = WebRtcIsac_kIndexUpperLimitGain[k];
+    index[k] -= WebRtcIsac_kIndexLowerLimitGain[k];
+  }
+
+  /* calculate unique overall index */
+  index_comb = WebRtcIsac_kIndexMultsGain[0] * index[0] + WebRtcIsac_kIndexMultsGain[1] * index[1] + index[2];
+
+  /* unquantize back to pitch gains by table look-up */
+  PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb];
+  PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb];
+  PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb];
+  PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb];
+
+  /* entropy coding of quantization pitch gains */
+  *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf;
+  WebRtcIsac_EncHistMulti(streamdata, &index_comb, WebRtcIsac_kQPitchGainCdf_ptr, 1);
+  encData->pitchGain_index[encData->startIdx] = index_comb;
+
+}
+
+
+
+/* Pitch LAG */
+
+
+/* decode & dequantize Pitch Lags */
+int WebRtcIsac_DecodePitchLag(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12, double *PitchLags)
+{
+  int k, err;
+  double StepSize;
+  double C;
+  int index[PITCH_SUBFRAMES];
+  double mean_gain;
+  const double *mean_val2, *mean_val3, *mean_val4;
+  const WebRtc_Word16 *lower_limit;
+  const WebRtc_UWord16 *init_index;
+  const WebRtc_UWord16 *cdf_size;
+  const WebRtc_UWord16 **cdf;
+
+  //(Y)
+  double PitchGain[4]={0,0,0,0};
+  //
+
+  /* compute mean pitch gain */
+  mean_gain = 0.0;
+  for (k = 0; k < 4; k++)
+  {
+    //(Y)
+    PitchGain[k] = ((float)PitchGain_Q12[k])/4096;
+    //(Y)
+    mean_gain += PitchGain[k];
+  }
+  mean_gain /= 4.0;
+
+  /* voicing classificiation */
+  if (mean_gain < 0.2) {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeLo;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrLo;
+    cdf_size = WebRtcIsac_kQPitchLagCdfSizeLo;
+    mean_val2 = WebRtcIsac_kQMeanLag2Lo;
+    mean_val3 = WebRtcIsac_kQMeanLag3Lo;
+    mean_val4 = WebRtcIsac_kQMeanLag4Lo;
+    lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo;
+    init_index = WebRtcIsac_kQInitIndexLagLo;
+  } else if (mean_gain < 0.4) {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeMid;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrMid;
+    cdf_size = WebRtcIsac_kQPitchLagCdfSizeMid;
+    mean_val2 = WebRtcIsac_kQMeanLag2Mid;
+    mean_val3 = WebRtcIsac_kQMeanLag3Mid;
+    mean_val4 = WebRtcIsac_kQMeanLag4Mid;
+    lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid;
+    init_index = WebRtcIsac_kQInitIndexLagMid;
+  } else {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeHi;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrHi;
+    cdf_size = WebRtcIsac_kQPitchLagCdfSizeHi;
+    mean_val2 = WebRtcIsac_kQMeanLag2Hi;
+    mean_val3 = WebRtcIsac_kQMeanLag3Hi;
+    mean_val4 = WebRtcIsac_kQMeanLag4Hi;
+    lower_limit = WebRtcIsac_kQindexLowerLimitLagHi;
+    init_index = WebRtcIsac_kQInitIndexLagHi;
+  }
+
+  /* entropy decoding of quantization indices */
+  err = WebRtcIsac_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1);
+  if ((err<0) || (index[0]<0))  // error check
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG;
+
+  err = WebRtcIsac_DecHistOneStepMulti(index+1, streamdata, cdf+1, init_index, 3);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG;
+
+
+  /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */
+  C = (index[0] + lower_limit[0]) * StepSize;
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C;
+  C = mean_val2[index[1]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C;
+  C = mean_val3[index[2]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C;
+  C = mean_val4[index[3]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C;
+
+  return 0;
+}
+
+
+
+/* quantize & code Pitch Lags */
+void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12, Bitstr* streamdata, ISAC_SaveEncData_t* encData)
+{
+  int k, j;
+  double StepSize;
+  double C;
+  int index[PITCH_SUBFRAMES];
+  double mean_gain;
+  const double *mean_val2, *mean_val3, *mean_val4;
+  const WebRtc_Word16 *lower_limit, *upper_limit;
+  const WebRtc_UWord16 **cdf;
+
+  //(Y)
+  double PitchGain[4]={0,0,0,0};
+  //
+
+  /* compute mean pitch gain */
+  mean_gain = 0.0;
+  for (k = 0; k < 4; k++)
+  {
+    //(Y)
+    PitchGain[k] = ((float)PitchGain_Q12[k])/4096;
+    //(Y)
+    mean_gain += PitchGain[k];
+  }
+  mean_gain /= 4.0;
+
+  /* Save data for creation of multiple bit streams */
+  encData->meanGain[encData->startIdx] = mean_gain;
+
+  /* voicing classification */
+  if (mean_gain < 0.2) {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeLo;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrLo;
+    mean_val2 = WebRtcIsac_kQMeanLag2Lo;
+    mean_val3 = WebRtcIsac_kQMeanLag3Lo;
+    mean_val4 = WebRtcIsac_kQMeanLag4Lo;
+    lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo;
+    upper_limit = WebRtcIsac_kQIndexUpperLimitLagLo;
+  } else if (mean_gain < 0.4) {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeMid;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrMid;
+    mean_val2 = WebRtcIsac_kQMeanLag2Mid;
+    mean_val3 = WebRtcIsac_kQMeanLag3Mid;
+    mean_val4 = WebRtcIsac_kQMeanLag4Mid;
+    lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid;
+    upper_limit = WebRtcIsac_kQIndexUpperLimitLagMid;
+  } else {
+    StepSize = WebRtcIsac_kQPitchLagStepsizeHi;
+    cdf = WebRtcIsac_kQPitchLagCdfPtrHi;
+    mean_val2 = WebRtcIsac_kQMeanLag2Hi;
+    mean_val3 = WebRtcIsac_kQMeanLag3Hi;
+    mean_val4 = WebRtcIsac_kQMeanLag4Hi;
+    lower_limit = WebRtcIsac_kQindexLowerLimitLagHi;
+    upper_limit = WebRtcIsac_kQindexUpperLimitLagHi;
+  }
+
+
+  /* find quantization index */
+  for (k=0; k<4; k++)
+  {
+    /*  transform */
+    C = 0.0;
+    for (j=0; j<PITCH_SUBFRAMES; j++)
+      C += WebRtcIsac_kTransform[k][j] * PitchLags[j];
+
+    /* quantize */
+    index[k] = WebRtcIsac_lrint(C / StepSize);
+
+    /* check that the index is not outside the boundaries of the table */
+    if (index[k] < lower_limit[k]) index[k] = lower_limit[k];
+    else if (index[k] > upper_limit[k]) index[k] = upper_limit[k];
+    index[k] -= lower_limit[k];
+
+    /* Save data for creation of multiple bit streams */
+    encData->pitchIndex[PITCH_SUBFRAMES*encData->startIdx + k] = index[k];
+  }
+
+  /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */
+  C = (index[0] + lower_limit[0]) * StepSize;
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C;
+  C = mean_val2[index[1]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C;
+  C = mean_val3[index[2]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C;
+  C = mean_val4[index[3]];
+  for (k=0; k<PITCH_SUBFRAMES; k++)
+    PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C;
+
+
+  /* entropy coding of quantization pitch lags */
+  WebRtcIsac_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES);
+
+}
+
+
+
+/* Routines for in-band signaling of bandwidth estimation */
+/* Histograms based on uniform distribution of indices */
+/* Move global variables later! */
+
+
+/* cdf array for frame length indicator */
+const WebRtc_UWord16 WebRtcIsac_kFrameLengthCdf[4] = {
+  0, 21845, 43690, 65535};
+
+/* pointer to cdf array for frame length indicator */
+const WebRtc_UWord16 *WebRtcIsac_kFrameLengthCdf_ptr[1] = {WebRtcIsac_kFrameLengthCdf};
+
+/* initial cdf index for decoder of frame length indicator */
+const WebRtc_UWord16 WebRtcIsac_kFrameLengthInitIndex[1] = {1};
+
+
+int WebRtcIsac_DecodeFrameLen(Bitstr *streamdata,
+                              WebRtc_Word16 *framesamples)
+{
+
+  int frame_mode, err;
+
+  err = 0;
+  /* entropy decoding of frame length [1:30ms,2:60ms] */
+  err = WebRtcIsac_DecHistOneStepMulti(&frame_mode, streamdata, WebRtcIsac_kFrameLengthCdf_ptr, WebRtcIsac_kFrameLengthInitIndex, 1);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH;
+
+  switch(frame_mode) {
+    case 1:
+      *framesamples = 480; /* 30ms */
+      break;
+    case 2:
+      *framesamples = 960; /* 60ms */
+      break;
+    default:
+      err = -ISAC_DISALLOWED_FRAME_MODE_DECODER;
+  }
+
+  return err;
+}
+
+int WebRtcIsac_EncodeFrameLen(WebRtc_Word16 framesamples, Bitstr *streamdata) {
+
+  int frame_mode, status;
+
+  status = 0;
+  frame_mode = 0;
+  /* entropy coding of frame length [1:480 samples,2:960 samples] */
+  switch(framesamples) {
+    case 480:
+      frame_mode = 1;
+      break;
+    case 960:
+      frame_mode = 2;
+      break;
+    default:
+      status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER;
+  }
+
+  if (status < 0)
+    return status;
+
+  WebRtcIsac_EncHistMulti(streamdata, &frame_mode, WebRtcIsac_kFrameLengthCdf_ptr, 1);
+
+  return status;
+}
+
+/* cdf array for estimated bandwidth */
+static const WebRtc_UWord16 kBwCdf[25] = {
+  0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037,
+  32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074,
+  62804, 65535};
+
+/* pointer to cdf array for estimated bandwidth */
+static const WebRtc_UWord16 *kBwCdfPtr[1] = { kBwCdf };
+
+/* initial cdf index for decoder of estimated bandwidth*/
+static const WebRtc_UWord16 kBwInitIndex[1] = { 7 };
+
+
+int WebRtcIsac_DecodeSendBW(Bitstr *streamdata, WebRtc_Word16 *BWno) {
+
+  int BWno32, err;
+
+  /* entropy decoding of sender's BW estimation [0..23] */
+  err = WebRtcIsac_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, kBwInitIndex, 1);
+  if (err<0)  // error check
+    return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH;
+  *BWno = (WebRtc_Word16)BWno32;
+  return err;
+
+}
+
+void WebRtcIsac_EncodeReceiveBw(int *BWno, Bitstr *streamdata) {
+
+  /* entropy encoding of receiver's BW estimation [0..23] */
+  WebRtcIsac_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1);
+
+}
+
+
+/* estimate code length of LPC Coef */
+void WebRtcIsac_TranscodeLPCCoef(double *LPCCoef_lo, double *LPCCoef_hi, int model,
+                                 int *index_g) {
+
+  int j, k, n, pos, pos2, posg, offsg, offs2;
+  int index_ovr_g[KLT_ORDER_GAIN];
+  double tmpcoeffs_g[KLT_ORDER_GAIN];
+  double tmpcoeffs2_g[KLT_ORDER_GAIN];
+  double sum;
+
+  /* log gains, mean removal and scaling */
+  posg = 0;
+  for (k=0; k<SUBFRAMES; k++) {
+    tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER+1)*k]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[model][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;
+    tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER+1)*k]);
+    tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[model][posg];
+    tmpcoeffs_g[posg] *= LPC_GAIN_SCALE;
+    posg++;
+  }
+
+  /* KLT  */
+
+  /* left transform */
+  offsg = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = offsg;
+      pos2 = k;
+      for (n=0; n<LPC_GAIN_ORDER; n++) {
+        sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[model][pos2];
+        pos2 += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs2_g[posg++] = sum;
+    }
+    offsg += LPC_GAIN_ORDER;
+  }
+
+  /* right transform */
+  offsg = 0;
+  offs2 = 0;
+  for (j=0; j<SUBFRAMES; j++) {
+    posg = offsg;
+    for (k=0; k<LPC_GAIN_ORDER; k++) {
+      sum = 0;
+      pos = k;
+      pos2 = offs2;
+      for (n=0; n<SUBFRAMES; n++) {
+        sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[model][pos2++];
+        pos += LPC_GAIN_ORDER;
+      }
+      tmpcoeffs_g[posg++] = sum;
+    }
+    offs2 += SUBFRAMES;
+    offsg += LPC_GAIN_ORDER;
+  }
+
+
+  /* quantize coefficients */
+  for (k=0; k<KLT_ORDER_GAIN; k++) {
+
+    /* get index */
+    pos = WebRtcIsac_kQKltSelIndGain[k];
+    pos2= WebRtcIsac_lrint(tmpcoeffs_g[pos] / KLT_STEPSIZE);
+    index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k];
+    if (index_g[k] < 0) {
+      index_g[k] = 0;
+    }
+    else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) {
+      index_g[k] = WebRtcIsac_kQKltMaxIndGain[k];
+    }
+    index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[model][k]+index_g[k];
+
+    /* find quantization levels for coefficients */
+    tmpcoeffs_g[WebRtcIsac_kQKltSelIndGain[k]] = WebRtcIsac_kQKltLevelsGain[WebRtcIsac_kQKltOfLevelsGain[model]+index_ovr_g[k]];
+  }
+}
+
+
+/* decode & dequantize LPC Coef */
+int
+WebRtcIsac_DecodeLpcCoefUB(
+    Bitstr*     streamdata,
+    double*     lpcVecs,
+    double*     percepFilterGains,
+    WebRtc_Word16 bandwidth)
+{
+  int  index_s[KLT_ORDER_SHAPE];
+
+  double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  int err;
+
+  /* entropy decoding of quantization indices */
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        err = WebRtcIsac_DecHistOneStepMulti(index_s, streamdata,
+                                             WebRtcIsac_kLpcShapeCdfMatUb12, WebRtcIsac_kLpcShapeEntropySearchUb12,
+                                             UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME);
+        break;
+      }
+    case isac16kHz:
+      {
+        err = WebRtcIsac_DecHistOneStepMulti(index_s, streamdata,
+                                             WebRtcIsac_kLpcShapeCdfMatUb16, WebRtcIsac_kLpcShapeEntropySearchUb16,
+                                             UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME);
+        break;
+      }
+    default:
+      return -1;
+  }
+
+  if (err<0)  // error check
+  {
+    return err;
+  }
+
+  WebRtcIsac_DequantizeLpcParam(index_s, lpcVecs, bandwidth);
+  WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth);
+  WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth);
+  WebRtcIsac_AddLarMean(lpcVecs, bandwidth);
+
+
+  WebRtcIsac_DecodeLpcGainUb(percepFilterGains, streamdata);
+
+  if(bandwidth == isac16kHz)
+  {
+    // decode another set of Gains
+    WebRtcIsac_DecodeLpcGainUb(&percepFilterGains[SUBFRAMES], streamdata);
+  }
+
+  return 0;
+}
+
+WebRtc_Word16
+WebRtcIsac_EncodeBandwidth(
+    enum ISACBandwidth bandwidth,
+    Bitstr*            streamData)
+{
+  int bandwidthMode;
+  switch(bandwidth)
+  {
+    case isac12kHz:
+      {
+        bandwidthMode = 0;
+        break;
+      }
+    case isac16kHz:
+      {
+        bandwidthMode = 1;
+        break;
+      }
+    default:
+      return -ISAC_DISALLOWED_ENCODER_BANDWIDTH;
+  }
+
+  WebRtcIsac_EncHistMulti(streamData, &bandwidthMode,
+                          kOneBitEqualProbCdf_ptr, 1);
+  return 0;
+}
+
+WebRtc_Word16
+WebRtcIsac_DecodeBandwidth(
+    Bitstr*             streamData,
+    enum ISACBandwidth* bandwidth)
+{
+  int bandwidthMode;
+
+  if(WebRtcIsac_DecHistOneStepMulti(&bandwidthMode, streamData,
+                                    kOneBitEqualProbCdf_ptr,
+                                    kOneBitEqualProbInitIndex, 1) < 0)
+  {
+    // error check
+    return -ISAC_RANGE_ERROR_DECODE_BANDWITH;
+  }
+
+  switch(bandwidthMode)
+  {
+    case 0:
+      {
+        *bandwidth = isac12kHz;
+        break;
+      }
+    case 1:
+      {
+        *bandwidth = isac16kHz;
+        break;
+      }
+    default:
+      return -ISAC_DISALLOWED_BANDWIDTH_MODE_DECODER;
+  }
+  return 0;
+}
+
+WebRtc_Word16
+WebRtcIsac_EncodeJitterInfo(
+    WebRtc_Word32 jitterIndex,
+    Bitstr*     streamData)
+{
+  // This is to avoid LINUX warning until we change 'int' to
+  // 'Word32'
+  int intVar;
+
+  if((jitterIndex < 0) || (jitterIndex > 1))
+  {
+    return -1;
+  }
+  intVar = (int)(jitterIndex);
+  // Use the same CDF table as for bandwidth
+  // both take two values with equal probability
+  WebRtcIsac_EncHistMulti(streamData, &intVar,
+                          kOneBitEqualProbCdf_ptr, 1);
+  return 0;
+
+}
+
+WebRtc_Word16
+WebRtcIsac_DecodeJitterInfo(
+    Bitstr*      streamData,
+    WebRtc_Word32* jitterInfo)
+{
+  int intVar;
+
+  // Use the same CDF table as for bandwidth
+  // both take two values with equal probability
+  if(WebRtcIsac_DecHistOneStepMulti(&intVar, streamData,
+                                    kOneBitEqualProbCdf_ptr,
+                                    kOneBitEqualProbInitIndex, 1) < 0)
+  {
+    // error check
+    return -ISAC_RANGE_ERROR_DECODE_BANDWITH;
+  }
+  *jitterInfo = (WebRtc_Word16)(intVar);
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.h
new file mode 100644
index 0000000..8446bcf
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/entropy_coding.h
@@ -0,0 +1,412 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * entropy_coding.h
+ *
+ * This header file declares all of the functions used to arithmetically
+ * encode the iSAC bistream
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_
+
+#include "structs.h"
+
+/* decode complex spectrum (return number of bytes in stream) */
+int WebRtcIsac_DecodeSpecLb(Bitstr *streamdata,
+                            double *fr,
+                            double *fi,
+                            WebRtc_Word16 AvgPitchGain_Q12);
+
+/******************************************************************************
+ * WebRtcIsac_DecodeSpecUB16()
+ * Decode real and imaginary part of the DFT coefficients, given a bit-stream.
+ * This function is called when the codec is in 0-16 kHz bandwidth.
+ * The decoded DFT coefficient can be transformed to time domain by
+ * WebRtcIsac_Time2Spec().
+ *
+ * Input:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are written to.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are written to.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_DecodeSpecUB16(
+    Bitstr* streamdata,
+    double* fr,
+    double* fi);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeSpecUB12()
+ * Decode real and imaginary part of the DFT coefficients, given a bit-stream.
+ * This function is called when the codec is in 0-12 kHz bandwidth.
+ * The decoded DFT coefficient can be transformed to time domain by
+ * WebRtcIsac_Time2Spec().
+ *
+ * Input:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are written to.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are written to.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_DecodeSpecUB12(
+    Bitstr* streamdata,
+    double* fr,
+    double* fi);
+
+
+/* encode complex spectrum */
+int WebRtcIsac_EncodeSpecLb(const WebRtc_Word16* fr,
+                            const WebRtc_Word16* fi,
+                            Bitstr* streamdata,
+                            WebRtc_Word16 AvgPitchGain_Q12);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeSpecUB16()
+ * Quantize and encode real and imaginary part of the DFT coefficients.
+ * This function is called when the codec is in 0-16 kHz bandwidth.
+ * The real and imaginary part are computed by calling WebRtcIsac_Time2Spec().
+ *
+ *
+ * Input:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are stored.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are stored.
+ *
+ * Output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_EncodeSpecUB16(
+    const WebRtc_Word16* fr,
+    const WebRtc_Word16* fi,
+    Bitstr*            streamdata);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeSpecUB12()
+ * Quantize and encode real and imaginary part of the DFT coefficients.
+ * This function is called when the codec is in 0-12 kHz bandwidth.
+ * The real and imaginary part are computed by calling WebRtcIsac_Time2Spec().
+ *
+ *
+ * Input:
+ *      -*fr                : pointer to a buffer where the real part of DFT
+ *                            coefficients are stored.
+ *      -*fi                : pointer to a buffer where the imaginary part
+ *                            of DFT coefficients are stored.
+ *
+ * Output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Return value             : < 0 if an error occures
+ *                              0 if succeeded.
+ */
+int WebRtcIsac_EncodeSpecUB12(
+    const WebRtc_Word16* fr,
+    const WebRtc_Word16* fi,
+    Bitstr*            streamdata);
+
+
+/* decode & dequantize LPC Coef */
+int WebRtcIsac_DecodeLpcCoef(Bitstr *streamdata, double *LPCCoef, int *outmodel);
+int WebRtcIsac_DecodeLpcCoefUB(
+    Bitstr*     streamdata,
+    double*     lpcVecs,
+    double*     percepFilterGains,
+    WebRtc_Word16 bandwidth);
+
+int WebRtcIsac_DecodeLpc(Bitstr *streamdata, double *LPCCoef_lo, double *LPCCoef_hi, int *outmodel);
+
+/* quantize & code LPC Coef */
+void WebRtcIsac_EncodeLpcLb(double *LPCCoef_lo, double *LPCCoef_hi, int *model, double *size, Bitstr *streamdata, ISAC_SaveEncData_t* encData);
+void WebRtcIsac_EncodeLpcGainLb(double *LPCCoef_lo, double *LPCCoef_hi, int model, Bitstr *streamdata, ISAC_SaveEncData_t* encData);
+
+/******************************************************************************
+ * WebRtcIsac_EncodeLpcUB()
+ * Encode LPC parameters, given as A-polynomial, of upper-band. The encoding
+ * is performed in LAR domain.
+ * For the upper-band, we compute and encode LPC of some sub-frames, LPC of
+ * other sub-frames are computed by linear interpolation, in LAR domain. This
+ * function performs the interpolation and returns the LPC of all sub-frames.
+ *
+ * Inputs:
+ *  - lpcCoef               : a buffer containing A-polynomials of sub-frames
+ *                            (excluding first coefficient that is 1).
+ *  - bandwidth             : specifies if the codec is operating at 0-12 kHz
+ *                            or 0-16 kHz mode.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - interpolLPCCoeff      : Decoded and interpolated LPC (A-polynomial)
+ *                            of all sub-frames.
+ *                            If LP analysis is of order K, and there are N
+ *                            sub-frames then this is a buffer of size
+ *                            (k + 1) * N, each vector starts with the LPC gain
+ *                            of the corresponding sub-frame. The LPC gains
+ *                            are encoded and inserted after this function is
+ *                            called. The first A-coefficient which is 1 is not
+ *                            included.
+ *
+ * Return value             : 0 if encoding is successful,
+ *                           <0 if failed to encode.
+ */
+WebRtc_Word16 WebRtcIsac_EncodeLpcUB(
+    double*                  lpcCoeff,
+    Bitstr*                  streamdata,
+    double*                  interpolLPCCoeff,
+    WebRtc_Word16              bandwidth,
+    ISACUBSaveEncDataStruct* encData);
+
+/******************************************************************************
+ * WebRtcIsac_DecodeInterpolLpcUb()
+ * Decode LPC coefficients and interpolate to get the coefficients fo all
+ * sub-frmaes.
+ *
+ * Inputs:
+ *  - bandwidth             : spepecifies if the codec is in 0-12 kHz or
+ *                            0-16 kHz mode.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - percepFilterParam     : Decoded and interpolated LPC (A-polynomial) of
+ *                            all sub-frames.
+ *                            If LP analysis is of order K, and there are N
+ *                            sub-frames then this is a buffer of size
+ *                            (k + 1) * N, each vector starts with the LPC gain
+ *                            of the corresponding sub-frame. The LPC gains
+ *                            are encoded and inserted after this function is
+ *                            called. The first A-coefficient which is 1 is not
+ *                            included.
+ *
+ * Return value             : 0 if encoding is successful,
+ *                           <0 if failed to encode.
+ */
+WebRtc_Word16 WebRtcIsac_DecodeInterpolLpcUb(
+    Bitstr*     streamdata,
+    double*     percepFilterParam,
+    WebRtc_Word16 bandwidth);
+
+/* decode & dequantize RC */
+int WebRtcIsac_DecodeRc(Bitstr *streamdata, WebRtc_Word16 *RCQ15);
+
+/* quantize & code RC */
+void WebRtcIsac_EncodeRc(WebRtc_Word16 *RCQ15, Bitstr *streamdata);
+
+/* decode & dequantize squared Gain */
+int WebRtcIsac_DecodeGain2(Bitstr *streamdata, WebRtc_Word32 *Gain2);
+
+/* quantize & code squared Gain (input is squared gain) */
+int WebRtcIsac_EncodeGain2(WebRtc_Word32 *gain2, Bitstr *streamdata);
+
+void WebRtcIsac_EncodePitchGain(WebRtc_Word16* PitchGains_Q12, Bitstr* streamdata,  ISAC_SaveEncData_t* encData);
+
+void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12, Bitstr* streamdata, ISAC_SaveEncData_t* encData);
+
+int WebRtcIsac_DecodePitchGain(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12);
+int WebRtcIsac_DecodePitchLag(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12, double *PitchLag);
+
+int WebRtcIsac_DecodeFrameLen(Bitstr *streamdata, WebRtc_Word16 *framelength);
+int WebRtcIsac_EncodeFrameLen(WebRtc_Word16 framelength, Bitstr *streamdata);
+int WebRtcIsac_DecodeSendBW(Bitstr *streamdata, WebRtc_Word16 *BWno);
+void WebRtcIsac_EncodeReceiveBw(int *BWno, Bitstr *streamdata);
+
+/* step-down */
+void WebRtcIsac_Poly2Rc(double *a, int N, double *RC);
+
+/* step-up */
+void WebRtcIsac_Rc2Poly(double *RC, int N, double *a);
+
+void WebRtcIsac_TranscodeLPCCoef(double *LPCCoef_lo, double *LPCCoef_hi, int model,
+                                 int *index_g);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeLpcGainUb()
+ * Encode LPC gains of sub-Frames.
+ *
+ * Input/outputs:
+ *  - lpGains               : a buffer which contains 'SUBFRAME' number of
+ *                            LP gains to be encoded. The input values are
+ *                            overwritten by the quantized values.
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - lpcGainIndex          : quantization indices for lpc gains, these will
+ *                            be stored to be used  for FEC.
+ */
+void WebRtcIsac_EncodeLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata,
+    int*    lpcGainIndex);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeLpcGainUb()
+ * Store LPC gains of sub-Frames in 'streamdata'.
+ *
+ * Input:
+ *  - lpGains               : a buffer which contains 'SUBFRAME' number of
+ *                            LP gains to be encoded.
+ * Input/outputs:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ */
+void WebRtcIsac_StoreLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeLpcGainUb()
+ * Decode the LPC gain of sub-frames.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - lpGains               : a buffer where decoded LPC gians will be stored.
+ *
+ * Return value             : 0 if succeeded.
+ *                           <0 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_DecodeLpcGainUb(
+    double* lpGains,
+    Bitstr* streamdata);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeBandwidth()
+ * Encode if the bandwidth of encoded audio is 0-12 kHz or 0-16 kHz.
+ *
+ * Input:
+ *  - bandwidth             : an enumerator specifying if the codec in is
+ *                            0-12 kHz or 0-16 kHz mode.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Return value             : 0 if succeeded.
+ *                           <0 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_EncodeBandwidth(
+    enum ISACBandwidth bandwidth,
+    Bitstr*            streamData);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeBandwidth()
+ * Decode the bandwidth of the encoded audio, i.e. if the bandwidth is 0-12 kHz
+ * or 0-16 kHz.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - bandwidth             : an enumerator specifying if the codec is in
+ *                            0-12 kHz or 0-16 kHz mode.
+ *
+ * Return value             : 0 if succeeded.
+ *                           <0 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_DecodeBandwidth(
+    Bitstr*             streamData,
+    enum ISACBandwidth* bandwidth);
+
+
+/******************************************************************************
+ * WebRtcIsac_EncodeJitterInfo()
+ * Decode the jitter information.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Input:
+ *  - jitterInfo            : one bit of info specifying if the channel is
+ *                            in high/low jitter. Zero indicates low jitter
+ *                            and one indicates high jitter.
+ *
+ * Return value             : 0 if succeeded.
+ *                           <0 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_EncodeJitterInfo(
+    WebRtc_Word32 jitterIndex,
+    Bitstr*     streamData);
+
+
+/******************************************************************************
+ * WebRtcIsac_DecodeJitterInfo()
+ * Decode the jitter information.
+ *
+ * Input/output:
+ *  - streamdata            : pointer to a stucture containg the encoded
+ *                            data and theparameters needed for entropy
+ *                            coding.
+ *
+ * Output:
+ *  - jitterInfo            : one bit of info specifying if the channel is
+ *                            in high/low jitter. Zero indicates low jitter
+ *                            and one indicates high jitter.
+ *
+ * Return value             : 0 if succeeded.
+ *                           <0 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_DecodeJitterInfo(
+    Bitstr*      streamData,
+    WebRtc_Word32* jitterInfo);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.c
new file mode 100644
index 0000000..c824798
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.c
@@ -0,0 +1,947 @@
+/*
+ * Copyright(c)1995,97 Mark Olesen <olesen@me.QueensU.CA>
+ *    Queen's Univ at Kingston (Canada)
+ *
+ * Permission to use, copy, modify, and distribute this software for
+ * any purpose without fee is hereby granted, provided that this
+ * entire notice is included in all copies of any software which is
+ * or includes a copy or modification of this software and in all
+ * copies of the supporting documentation for such software.
+ *
+ * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
+ * IMPLIED WARRANTY.  IN PARTICULAR, NEITHER THE AUTHOR NOR QUEEN'S
+ * UNIVERSITY AT KINGSTON MAKES ANY REPRESENTATION OR WARRANTY OF ANY
+ * KIND CONCERNING THE MERCHANTABILITY OF THIS SOFTWARE OR ITS
+ * FITNESS FOR ANY PARTICULAR PURPOSE.
+ *
+ * All of which is to say that you can do what you like with this
+ * source code provided you don't try to sell it as your own and you
+ * include an unaltered copy of this message (including the
+ * copyright).
+ *
+ * It is also implicitly understood that bug fixes and improvements
+ * should make their way back to the general Internet community so
+ * that everyone benefits.
+ *
+ * Changes:
+ *   Trivial type modifications by the WebRTC authors.
+ */
+
+
+/*
+ * File:
+ * WebRtcIsac_Fftn.c
+ *
+ * Public:
+ * WebRtcIsac_Fftn / fftnf ();
+ *
+ * Private:
+ * WebRtcIsac_Fftradix / fftradixf ();
+ *
+ * Descript:
+ * multivariate complex Fourier transform, computed in place
+ * using mixed-radix Fast Fourier Transform algorithm.
+ *
+ * Fortran code by:
+ * RC Singleton, Stanford Research Institute, Sept. 1968
+ *
+ * translated by f2c (version 19950721).
+ *
+ * int WebRtcIsac_Fftn (int ndim, const int dims[], REAL Re[], REAL Im[],
+ *     int iSign, double scaling);
+ *
+ * NDIM = the total number dimensions
+ * DIMS = a vector of array sizes
+ * if NDIM is zero then DIMS must be zero-terminated
+ *
+ * RE and IM hold the real and imaginary components of the data, and return
+ * the resulting real and imaginary Fourier coefficients.  Multidimensional
+ * data *must* be allocated contiguously.  There is no limit on the number
+ * of dimensions.
+ *
+ * ISIGN = the sign of the complex exponential (ie, forward or inverse FFT)
+ * the magnitude of ISIGN (normally 1) is used to determine the
+ * correct indexing increment (see below).
+ *
+ * SCALING = normalizing constant by which the final result is *divided*
+ * if SCALING == -1, normalize by total dimension of the transform
+ * if SCALING <  -1, normalize by the square-root of the total dimension
+ *
+ * example:
+ * tri-variate transform with Re[n1][n2][n3], Im[n1][n2][n3]
+ *
+ * int dims[3] = {n1,n2,n3}
+ * WebRtcIsac_Fftn (3, dims, Re, Im, 1, scaling);
+ *
+ *-----------------------------------------------------------------------*
+ * int WebRtcIsac_Fftradix (REAL Re[], REAL Im[], size_t nTotal, size_t nPass,
+ *   size_t nSpan, int iSign, size_t max_factors,
+ *   size_t max_perm);
+ *
+ * RE, IM - see above documentation
+ *
+ * Although there is no limit on the number of dimensions, WebRtcIsac_Fftradix() must
+ * be called once for each dimension, but the calls may be in any order.
+ *
+ * NTOTAL = the total number of complex data values
+ * NPASS  = the dimension of the current variable
+ * NSPAN/NPASS = the spacing of consecutive data values while indexing the
+ * current variable
+ * ISIGN - see above documentation
+ *
+ * example:
+ * tri-variate transform with Re[n1][n2][n3], Im[n1][n2][n3]
+ *
+ * WebRtcIsac_Fftradix (Re, Im, n1*n2*n3, n1,       n1, 1, maxf, maxp);
+ * WebRtcIsac_Fftradix (Re, Im, n1*n2*n3, n2,    n1*n2, 1, maxf, maxp);
+ * WebRtcIsac_Fftradix (Re, Im, n1*n2*n3, n3, n1*n2*n3, 1, maxf, maxp);
+ *
+ * single-variate transform,
+ *    NTOTAL = N = NSPAN = (number of complex data values),
+ *
+ * WebRtcIsac_Fftradix (Re, Im, n, n, n, 1, maxf, maxp);
+ *
+ * The data can also be stored in a single array with alternating real and
+ * imaginary parts, the magnitude of ISIGN is changed to 2 to give correct
+ * indexing increment, and data [0] and data [1] used to pass the initial
+ * addresses for the sequences of real and imaginary values,
+ *
+ * example:
+ * REAL data [2*NTOTAL];
+ * WebRtcIsac_Fftradix ( &data[0], &data[1], NTOTAL, nPass, nSpan, 2, maxf, maxp);
+ *
+ * for temporary allocation:
+ *
+ * MAX_FACTORS >= the maximum prime factor of NPASS
+ * MAX_PERM >= the number of prime factors of NPASS.  In addition,
+ * if the square-free portion K of NPASS has two or more prime
+ * factors, then MAX_PERM >= (K-1)
+ *
+ * storage in FACTOR for a maximum of 15 prime factors of NPASS. if NPASS
+ * has more than one square-free factor, the product of the square-free
+ * factors must be <= 210 array storage for maximum prime factor of 23 the
+ * following two constants should agree with the array dimensions.
+ *
+ *----------------------------------------------------------------------*/
+#include "fft.h"
+
+#include <stdlib.h>
+#include <math.h>
+
+
+
+/* double precision routine */
+static int
+WebRtcIsac_Fftradix (double Re[], double Im[],
+                    size_t nTotal, size_t nPass, size_t nSpan, int isign,
+                    int max_factors, unsigned int max_perm,
+                    FFTstr *fftstate);
+
+
+
+#ifndef M_PI
+# define M_PI 3.14159265358979323846264338327950288
+#endif
+
+#ifndef SIN60
+# define SIN60 0.86602540378443865 /* sin(60 deg) */
+# define COS72 0.30901699437494742 /* cos(72 deg) */
+# define SIN72 0.95105651629515357 /* sin(72 deg) */
+#endif
+
+# define REAL  double
+# define FFTN  WebRtcIsac_Fftn
+# define FFTNS  "fftn"
+# define FFTRADIX WebRtcIsac_Fftradix
+# define FFTRADIXS "fftradix"
+
+
+int  WebRtcIsac_Fftns(unsigned int ndim, const int dims[],
+                     double Re[],
+                     double Im[],
+                     int iSign,
+                     double scaling,
+                     FFTstr *fftstate)
+{
+
+  size_t nSpan, nPass, nTotal;
+  unsigned int i;
+  int ret, max_factors, max_perm;
+
+  /*
+   * tally the number of elements in the data array
+   * and determine the number of dimensions
+   */
+  nTotal = 1;
+  if (ndim && dims [0])
+  {
+    for (i = 0; i < ndim; i++)
+    {
+      if (dims [i] <= 0)
+      {
+        return -1;
+      }
+      nTotal *= dims [i];
+    }
+  }
+  else
+  {
+    ndim = 0;
+    for (i = 0; dims [i]; i++)
+    {
+      if (dims [i] <= 0)
+      {
+        return -1;
+      }
+      nTotal *= dims [i];
+      ndim++;
+    }
+  }
+
+  /* determine maximum number of factors and permuations */
+#if 1
+  /*
+   * follow John Beale's example, just use the largest dimension and don't
+   * worry about excess allocation.  May be someone else will do it?
+   */
+  max_factors = max_perm = 1;
+  for (i = 0; i < ndim; i++)
+  {
+    nSpan = dims [i];
+    if ((int)nSpan > max_factors)
+    {
+      max_factors = (int)nSpan;
+    }
+    if ((int)nSpan > max_perm) 
+    {
+      max_perm = (int)nSpan;
+    }
+  }
+#else
+  /* use the constants used in the original Fortran code */
+  max_factors = 23;
+  max_perm = 209;
+#endif
+  /* loop over the dimensions: */
+  nPass = 1;
+  for (i = 0; i < ndim; i++)
+  {
+    nSpan = dims [i];
+    nPass *= nSpan;
+    ret = FFTRADIX (Re, Im, nTotal, nSpan, nPass, iSign,
+                    max_factors, max_perm, fftstate);
+    /* exit, clean-up already done */
+    if (ret)
+      return ret;
+  }
+
+  /* Divide through by the normalizing constant: */
+  if (scaling && scaling != 1.0)
+  {
+    if (iSign < 0) iSign = -iSign;
+    if (scaling < 0.0)
+    {
+      scaling = (double)nTotal;
+      if (scaling < -1.0)
+        scaling = sqrt (scaling);
+    }
+    scaling = 1.0 / scaling; /* multiply is often faster */
+    for (i = 0; i < nTotal; i += iSign)
+    {
+      Re [i] *= scaling;
+      Im [i] *= scaling;
+    }
+  }
+  return 0;
+}
+
+/*
+ * singleton's mixed radix routine
+ *
+ * could move allocation out to WebRtcIsac_Fftn(), but leave it here so that it's
+ * possible to make this a standalone function
+ */
+
+static int   FFTRADIX (REAL Re[],
+                       REAL Im[],
+                       size_t nTotal,
+                       size_t nPass,
+                       size_t nSpan,
+                       int iSign,
+                       int max_factors,
+                       unsigned int max_perm,
+                       FFTstr *fftstate)
+{
+  int ii, mfactor, kspan, ispan, inc;
+  int j, jc, jf, jj, k, k1, k2, k3, k4, kk, kt, nn, ns, nt;
+
+
+  REAL radf;
+  REAL c1, c2, c3, cd, aa, aj, ak, ajm, ajp, akm, akp;
+  REAL s1, s2, s3, sd, bb, bj, bk, bjm, bjp, bkm, bkp;
+
+  REAL *Rtmp = NULL; /* temp space for real part*/
+  REAL *Itmp = NULL; /* temp space for imaginary part */
+  REAL *Cos = NULL; /* Cosine values */
+  REAL *Sin = NULL; /* Sine values */
+
+  REAL s60 = SIN60;  /* sin(60 deg) */
+  REAL c72 = COS72;  /* cos(72 deg) */
+  REAL s72 = SIN72;  /* sin(72 deg) */
+  REAL pi2 = M_PI;  /* use PI first, 2 PI later */
+
+
+  fftstate->SpaceAlloced = 0;
+  fftstate->MaxPermAlloced = 0;
+
+
+  // initialize to avoid warnings
+  k3 = c2 = c3 = s2 = s3 = 0.0;
+
+  if (nPass < 2)
+    return 0;
+
+  /*  allocate storage */
+  if (fftstate->SpaceAlloced < max_factors * sizeof (REAL))
+  {
+#ifdef SUN_BROKEN_REALLOC
+    if (!fftstate->SpaceAlloced) /* first time */
+    {
+      fftstate->SpaceAlloced = max_factors * sizeof (REAL);
+    }
+    else
+    {
+#endif
+      fftstate->SpaceAlloced = max_factors * sizeof (REAL);
+#ifdef SUN_BROKEN_REALLOC
+    }
+#endif
+  }
+  else
+  {
+    /* allow full use of alloc'd space */
+    max_factors = fftstate->SpaceAlloced / sizeof (REAL);
+  }
+  if (fftstate->MaxPermAlloced < max_perm)
+  {
+#ifdef SUN_BROKEN_REALLOC
+    if (!fftstate->MaxPermAlloced) /* first time */
+    else
+#endif
+      fftstate->MaxPermAlloced = max_perm;
+  }
+  else
+  {
+    /* allow full use of alloc'd space */
+    max_perm = fftstate->MaxPermAlloced;
+  }
+  if (fftstate->Tmp0 == NULL || fftstate->Tmp1 == NULL || fftstate->Tmp2 == NULL || fftstate->Tmp3 == NULL
+      || fftstate->Perm == NULL) {
+    return -1;
+  }
+
+  /* assign pointers */
+  Rtmp = (REAL *) fftstate->Tmp0;
+  Itmp = (REAL *) fftstate->Tmp1;
+  Cos  = (REAL *) fftstate->Tmp2;
+  Sin  = (REAL *) fftstate->Tmp3;
+
+  /*
+   * Function Body
+   */
+  inc = iSign;
+  if (iSign < 0) {
+    s72 = -s72;
+    s60 = -s60;
+    pi2 = -pi2;
+    inc = -inc;  /* absolute value */
+  }
+
+  /* adjust for strange increments */
+  nt = inc * (int)nTotal;
+  ns = inc * (int)nSpan;
+  kspan = ns;
+
+  nn = nt - inc;
+  jc = ns / (int)nPass;
+  radf = pi2 * (double) jc;
+  pi2 *= 2.0;   /* use 2 PI from here on */
+
+  ii = 0;
+  jf = 0;
+  /*  determine the factors of n */
+  mfactor = 0;
+  k = (int)nPass;
+  while (k % 16 == 0) {
+    mfactor++;
+    fftstate->factor [mfactor - 1] = 4;
+    k /= 16;
+  }
+  j = 3;
+  jj = 9;
+  do {
+    while (k % jj == 0) {
+      mfactor++;
+      fftstate->factor [mfactor - 1] = j;
+      k /= jj;
+    }
+    j += 2;
+    jj = j * j;
+  } while (jj <= k);
+  if (k <= 4) {
+    kt = mfactor;
+    fftstate->factor [mfactor] = k;
+    if (k != 1)
+      mfactor++;
+  } else {
+    if (k - (k / 4 << 2) == 0) {
+      mfactor++;
+      fftstate->factor [mfactor - 1] = 2;
+      k /= 4;
+    }
+    kt = mfactor;
+    j = 2;
+    do {
+      if (k % j == 0) {
+        mfactor++;
+        fftstate->factor [mfactor - 1] = j;
+        k /= j;
+      }
+      j = ((j + 1) / 2 << 1) + 1;
+    } while (j <= k);
+  }
+  if (kt) {
+    j = kt;
+    do {
+      mfactor++;
+      fftstate->factor [mfactor - 1] = fftstate->factor [j - 1];
+      j--;
+    } while (j);
+  }
+
+  /* test that mfactors is in range */
+  if (mfactor > NFACTOR)
+  {
+    return -1;
+  }
+
+  /* compute fourier transform */
+  for (;;) {
+    sd = radf / (double) kspan;
+    cd = sin(sd);
+    cd = 2.0 * cd * cd;
+    sd = sin(sd + sd);
+    kk = 0;
+    ii++;
+
+    switch (fftstate->factor [ii - 1]) {
+      case 2:
+        /* transform for factor of 2 (including rotation factor) */
+        kspan /= 2;
+        k1 = kspan + 2;
+        do {
+          do {
+            k2 = kk + kspan;
+            ak = Re [k2];
+            bk = Im [k2];
+            Re [k2] = Re [kk] - ak;
+            Im [k2] = Im [kk] - bk;
+            Re [kk] += ak;
+            Im [kk] += bk;
+            kk = k2 + kspan;
+          } while (kk < nn);
+          kk -= nn;
+        } while (kk < jc);
+        if (kk >= kspan)
+          goto Permute_Results_Label;  /* exit infinite loop */
+        do {
+          c1 = 1.0 - cd;
+          s1 = sd;
+          do {
+            do {
+              do {
+                k2 = kk + kspan;
+                ak = Re [kk] - Re [k2];
+                bk = Im [kk] - Im [k2];
+                Re [kk] += Re [k2];
+                Im [kk] += Im [k2];
+                Re [k2] = c1 * ak - s1 * bk;
+                Im [k2] = s1 * ak + c1 * bk;
+                kk = k2 + kspan;
+              } while (kk < (nt-1));
+              k2 = kk - nt;
+              c1 = -c1;
+              kk = k1 - k2;
+            } while (kk > k2);
+            ak = c1 - (cd * c1 + sd * s1);
+            s1 = sd * c1 - cd * s1 + s1;
+            c1 = 2.0 - (ak * ak + s1 * s1);
+            s1 *= c1;
+            c1 *= ak;
+            kk += jc;
+          } while (kk < k2);
+          k1 += inc + inc;
+          kk = (k1 - kspan + 1) / 2 + jc - 1;
+        } while (kk < (jc + jc));
+        break;
+
+      case 4:   /* transform for factor of 4 */
+        ispan = kspan;
+        kspan /= 4;
+
+        do {
+          c1 = 1.0;
+          s1 = 0.0;
+          do {
+            do {
+              k1 = kk + kspan;
+              k2 = k1 + kspan;
+              k3 = k2 + kspan;
+              akp = Re [kk] + Re [k2];
+              akm = Re [kk] - Re [k2];
+              ajp = Re [k1] + Re [k3];
+              ajm = Re [k1] - Re [k3];
+              bkp = Im [kk] + Im [k2];
+              bkm = Im [kk] - Im [k2];
+              bjp = Im [k1] + Im [k3];
+              bjm = Im [k1] - Im [k3];
+              Re [kk] = akp + ajp;
+              Im [kk] = bkp + bjp;
+              ajp = akp - ajp;
+              bjp = bkp - bjp;
+              if (iSign < 0) {
+                akp = akm + bjm;
+                bkp = bkm - ajm;
+                akm -= bjm;
+                bkm += ajm;
+              } else {
+                akp = akm - bjm;
+                bkp = bkm + ajm;
+                akm += bjm;
+                bkm -= ajm;
+              }
+              /* avoid useless multiplies */
+              if (s1 == 0.0) {
+                Re [k1] = akp;
+                Re [k2] = ajp;
+                Re [k3] = akm;
+                Im [k1] = bkp;
+                Im [k2] = bjp;
+                Im [k3] = bkm;
+              } else {
+                Re [k1] = akp * c1 - bkp * s1;
+                Re [k2] = ajp * c2 - bjp * s2;
+                Re [k3] = akm * c3 - bkm * s3;
+                Im [k1] = akp * s1 + bkp * c1;
+                Im [k2] = ajp * s2 + bjp * c2;
+                Im [k3] = akm * s3 + bkm * c3;
+              }
+              kk = k3 + kspan;
+            } while (kk < nt);
+
+            c2 = c1 - (cd * c1 + sd * s1);
+            s1 = sd * c1 - cd * s1 + s1;
+            c1 = 2.0 - (c2 * c2 + s1 * s1);
+            s1 *= c1;
+            c1 *= c2;
+            /* values of c2, c3, s2, s3 that will get used next time */
+            c2 = c1 * c1 - s1 * s1;
+            s2 = 2.0 * c1 * s1;
+            c3 = c2 * c1 - s2 * s1;
+            s3 = c2 * s1 + s2 * c1;
+            kk = kk - nt + jc;
+          } while (kk < kspan);
+          kk = kk - kspan + inc;
+        } while (kk < jc);
+        if (kspan == jc)
+          goto Permute_Results_Label;  /* exit infinite loop */
+        break;
+
+      default:
+        /*  transform for odd factors */
+#ifdef FFT_RADIX4
+        return -1;
+        break;
+#else /* FFT_RADIX4 */
+        k = fftstate->factor [ii - 1];
+        ispan = kspan;
+        kspan /= k;
+
+        switch (k) {
+          case 3: /* transform for factor of 3 (optional code) */
+            do {
+              do {
+                k1 = kk + kspan;
+                k2 = k1 + kspan;
+                ak = Re [kk];
+                bk = Im [kk];
+                aj = Re [k1] + Re [k2];
+                bj = Im [k1] + Im [k2];
+                Re [kk] = ak + aj;
+                Im [kk] = bk + bj;
+                ak -= 0.5 * aj;
+                bk -= 0.5 * bj;
+                aj = (Re [k1] - Re [k2]) * s60;
+                bj = (Im [k1] - Im [k2]) * s60;
+                Re [k1] = ak - bj;
+                Re [k2] = ak + bj;
+                Im [k1] = bk + aj;
+                Im [k2] = bk - aj;
+                kk = k2 + kspan;
+              } while (kk < (nn - 1));
+              kk -= nn;
+            } while (kk < kspan);
+            break;
+
+          case 5: /*  transform for factor of 5 (optional code) */
+            c2 = c72 * c72 - s72 * s72;
+            s2 = 2.0 * c72 * s72;
+            do {
+              do {
+                k1 = kk + kspan;
+                k2 = k1 + kspan;
+                k3 = k2 + kspan;
+                k4 = k3 + kspan;
+                akp = Re [k1] + Re [k4];
+                akm = Re [k1] - Re [k4];
+                bkp = Im [k1] + Im [k4];
+                bkm = Im [k1] - Im [k4];
+                ajp = Re [k2] + Re [k3];
+                ajm = Re [k2] - Re [k3];
+                bjp = Im [k2] + Im [k3];
+                bjm = Im [k2] - Im [k3];
+                aa = Re [kk];
+                bb = Im [kk];
+                Re [kk] = aa + akp + ajp;
+                Im [kk] = bb + bkp + bjp;
+                ak = akp * c72 + ajp * c2 + aa;
+                bk = bkp * c72 + bjp * c2 + bb;
+                aj = akm * s72 + ajm * s2;
+                bj = bkm * s72 + bjm * s2;
+                Re [k1] = ak - bj;
+                Re [k4] = ak + bj;
+                Im [k1] = bk + aj;
+                Im [k4] = bk - aj;
+                ak = akp * c2 + ajp * c72 + aa;
+                bk = bkp * c2 + bjp * c72 + bb;
+                aj = akm * s2 - ajm * s72;
+                bj = bkm * s2 - bjm * s72;
+                Re [k2] = ak - bj;
+                Re [k3] = ak + bj;
+                Im [k2] = bk + aj;
+                Im [k3] = bk - aj;
+                kk = k4 + kspan;
+              } while (kk < (nn-1));
+              kk -= nn;
+            } while (kk < kspan);
+            break;
+
+          default:
+            if (k != jf) {
+              jf = k;
+              s1 = pi2 / (double) k;
+              c1 = cos(s1);
+              s1 = sin(s1);
+              if (jf > max_factors){
+                return -1;
+              }
+              Cos [jf - 1] = 1.0;
+              Sin [jf - 1] = 0.0;
+              j = 1;
+              do {
+                Cos [j - 1] = Cos [k - 1] * c1 + Sin [k - 1] * s1;
+                Sin [j - 1] = Cos [k - 1] * s1 - Sin [k - 1] * c1;
+                k--;
+                Cos [k - 1] = Cos [j - 1];
+                Sin [k - 1] = -Sin [j - 1];
+                j++;
+              } while (j < k);
+            }
+            do {
+              do {
+                k1 = kk;
+                k2 = kk + ispan;
+                ak = aa = Re [kk];
+                bk = bb = Im [kk];
+                j = 1;
+                k1 += kspan;
+                do {
+                  k2 -= kspan;
+                  j++;
+                  Rtmp [j - 1] = Re [k1] + Re [k2];
+                  ak += Rtmp [j - 1];
+                  Itmp [j - 1] = Im [k1] + Im [k2];
+                  bk += Itmp [j - 1];
+                  j++;
+                  Rtmp [j - 1] = Re [k1] - Re [k2];
+                  Itmp [j - 1] = Im [k1] - Im [k2];
+                  k1 += kspan;
+                } while (k1 < k2);
+                Re [kk] = ak;
+                Im [kk] = bk;
+                k1 = kk;
+                k2 = kk + ispan;
+                j = 1;
+                do {
+                  k1 += kspan;
+                  k2 -= kspan;
+                  jj = j;
+                  ak = aa;
+                  bk = bb;
+                  aj = 0.0;
+                  bj = 0.0;
+                  k = 1;
+                  do {
+                    k++;
+                    ak += Rtmp [k - 1] * Cos [jj - 1];
+                    bk += Itmp [k - 1] * Cos [jj - 1];
+                    k++;
+                    aj += Rtmp [k - 1] * Sin [jj - 1];
+                    bj += Itmp [k - 1] * Sin [jj - 1];
+                    jj += j;
+                    if (jj > jf) {
+                      jj -= jf;
+                    }
+                  } while (k < jf);
+                  k = jf - j;
+                  Re [k1] = ak - bj;
+                  Im [k1] = bk + aj;
+                  Re [k2] = ak + bj;
+                  Im [k2] = bk - aj;
+                  j++;
+                } while (j < k);
+                kk += ispan;
+              } while (kk < nn);
+              kk -= nn;
+            } while (kk < kspan);
+            break;
+        }
+
+        /*  multiply by rotation factor (except for factors of 2 and 4) */
+        if (ii == mfactor)
+          goto Permute_Results_Label;  /* exit infinite loop */
+        kk = jc;
+        do {
+          c2 = 1.0 - cd;
+          s1 = sd;
+          do {
+            c1 = c2;
+            s2 = s1;
+            kk += kspan;
+            do {
+              do {
+                ak = Re [kk];
+                Re [kk] = c2 * ak - s2 * Im [kk];
+                Im [kk] = s2 * ak + c2 * Im [kk];
+                kk += ispan;
+              } while (kk < nt);
+              ak = s1 * s2;
+              s2 = s1 * c2 + c1 * s2;
+              c2 = c1 * c2 - ak;
+              kk = kk - nt + kspan;
+            } while (kk < ispan);
+            c2 = c1 - (cd * c1 + sd * s1);
+            s1 += sd * c1 - cd * s1;
+            c1 = 2.0 - (c2 * c2 + s1 * s1);
+            s1 *= c1;
+            c2 *= c1;
+            kk = kk - ispan + jc;
+          } while (kk < kspan);
+          kk = kk - kspan + jc + inc;
+        } while (kk < (jc + jc));
+        break;
+#endif /* FFT_RADIX4 */
+    }
+  }
+
+  /*  permute the results to normal order---done in two stages */
+  /*  permutation for square factors of n */
+Permute_Results_Label:
+  fftstate->Perm [0] = ns;
+  if (kt) {
+    k = kt + kt + 1;
+    if (mfactor < k)
+      k--;
+    j = 1;
+    fftstate->Perm [k] = jc;
+    do {
+      fftstate->Perm [j] = fftstate->Perm [j - 1] / fftstate->factor [j - 1];
+      fftstate->Perm [k - 1] = fftstate->Perm [k] * fftstate->factor [j - 1];
+      j++;
+      k--;
+    } while (j < k);
+    k3 = fftstate->Perm [k];
+    kspan = fftstate->Perm [1];
+    kk = jc;
+    k2 = kspan;
+    j = 1;
+    if (nPass != nTotal) {
+      /*  permutation for multivariate transform */
+   Permute_Multi_Label:
+      do {
+        do {
+          k = kk + jc;
+          do {
+            /* swap Re [kk] <> Re [k2], Im [kk] <> Im [k2] */
+            ak = Re [kk]; Re [kk] = Re [k2]; Re [k2] = ak;
+            bk = Im [kk]; Im [kk] = Im [k2]; Im [k2] = bk;
+            kk += inc;
+            k2 += inc;
+          } while (kk < (k-1));
+          kk += ns - jc;
+          k2 += ns - jc;
+        } while (kk < (nt-1));
+        k2 = k2 - nt + kspan;
+        kk = kk - nt + jc;
+      } while (k2 < (ns-1));
+      do {
+        do {
+          k2 -= fftstate->Perm [j - 1];
+          j++;
+          k2 = fftstate->Perm [j] + k2;
+        } while (k2 > fftstate->Perm [j - 1]);
+        j = 1;
+        do {
+          if (kk < (k2-1))
+            goto Permute_Multi_Label;
+          kk += jc;
+          k2 += kspan;
+        } while (k2 < (ns-1));
+      } while (kk < (ns-1));
+    } else {
+      /*  permutation for single-variate transform (optional code) */
+   Permute_Single_Label:
+      do {
+        /* swap Re [kk] <> Re [k2], Im [kk] <> Im [k2] */
+        ak = Re [kk]; Re [kk] = Re [k2]; Re [k2] = ak;
+        bk = Im [kk]; Im [kk] = Im [k2]; Im [k2] = bk;
+        kk += inc;
+        k2 += kspan;
+      } while (k2 < (ns-1));
+      do {
+        do {
+          k2 -= fftstate->Perm [j - 1];
+          j++;
+          k2 = fftstate->Perm [j] + k2;
+        } while (k2 >= fftstate->Perm [j - 1]);
+        j = 1;
+        do {
+          if (kk < k2)
+            goto Permute_Single_Label;
+          kk += inc;
+          k2 += kspan;
+        } while (k2 < (ns-1));
+      } while (kk < (ns-1));
+    }
+    jc = k3;
+  }
+
+  if ((kt << 1) + 1 >= mfactor)
+    return 0;
+  ispan = fftstate->Perm [kt];
+  /* permutation for square-free factors of n */
+  j = mfactor - kt;
+  fftstate->factor [j] = 1;
+  do {
+    fftstate->factor [j - 1] *= fftstate->factor [j];
+    j--;
+  } while (j != kt);
+  kt++;
+  nn = fftstate->factor [kt - 1] - 1;
+  if (nn > (int) max_perm) {
+    return -1;
+  }
+  j = jj = 0;
+  for (;;) {
+    k = kt + 1;
+    k2 = fftstate->factor [kt - 1];
+    kk = fftstate->factor [k - 1];
+    j++;
+    if (j > nn)
+      break;    /* exit infinite loop */
+    jj += kk;
+    while (jj >= k2) {
+      jj -= k2;
+      k2 = kk;
+      k++;
+      kk = fftstate->factor [k - 1];
+      jj += kk;
+    }
+    fftstate->Perm [j - 1] = jj;
+  }
+  /*  determine the permutation cycles of length greater than 1 */
+  j = 0;
+  for (;;) {
+    do {
+      j++;
+      kk = fftstate->Perm [j - 1];
+    } while (kk < 0);
+    if (kk != j) {
+      do {
+        k = kk;
+        kk = fftstate->Perm [k - 1];
+        fftstate->Perm [k - 1] = -kk;
+      } while (kk != j);
+      k3 = kk;
+    } else {
+      fftstate->Perm [j - 1] = -j;
+      if (j == nn)
+        break;  /* exit infinite loop */
+    }
+  }
+  max_factors *= inc;
+  /*  reorder a and b, following the permutation cycles */
+  for (;;) {
+    j = k3 + 1;
+    nt -= ispan;
+    ii = nt - inc + 1;
+    if (nt < 0)
+      break;   /* exit infinite loop */
+    do {
+      do {
+        j--;
+      } while (fftstate->Perm [j - 1] < 0);
+      jj = jc;
+      do {
+        kspan = jj;
+        if (jj > max_factors) {
+          kspan = max_factors;
+        }
+        jj -= kspan;
+        k = fftstate->Perm [j - 1];
+        kk = jc * k + ii + jj;
+        k1 = kk + kspan - 1;
+        k2 = 0;
+        do {
+          k2++;
+          Rtmp [k2 - 1] = Re [k1];
+          Itmp [k2 - 1] = Im [k1];
+          k1 -= inc;
+        } while (k1 != (kk-1));
+        do {
+          k1 = kk + kspan - 1;
+          k2 = k1 - jc * (k + fftstate->Perm [k - 1]);
+          k = -fftstate->Perm [k - 1];
+          do {
+            Re [k1] = Re [k2];
+            Im [k1] = Im [k2];
+            k1 -= inc;
+            k2 -= inc;
+          } while (k1 != (kk-1));
+          kk = k2 + 1;
+        } while (k != j);
+        k1 = kk + kspan - 1;
+        k2 = 0;
+        do {
+          k2++;
+          Re [k1] = Rtmp [k2 - 1];
+          Im [k1] = Itmp [k2 - 1];
+          k1 -= inc;
+        } while (k1 != (kk-1));
+      } while (jj);
+    } while (j != 1);
+  }
+  return 0;   /* exit point here */
+}
+/* ---------------------- end-of-file (c source) ---------------------- */
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.h
new file mode 100644
index 0000000..a42f57b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/fft.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*--------------------------------*-C-*---------------------------------*
+ * File:
+ *       fftn.h
+ * ---------------------------------------------------------------------*
+ * Re[]:        real value array
+ * Im[]:        imaginary value array
+ * nTotal:      total number of complex values
+ * nPass:       number of elements involved in this pass of transform
+ * nSpan:       nspan/nPass = number of bytes to increment pointer
+ *              in Re[] and Im[]
+ * isign: exponent: +1 = forward  -1 = reverse
+ * scaling: normalizing constant by which the final result is *divided*
+ * scaling == -1, normalize by total dimension of the transform
+ * scaling <  -1, normalize by the square-root of the total dimension
+ *
+ * ----------------------------------------------------------------------
+ * See the comments in the code for correct usage!
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FFT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FFT_H_
+
+
+#include "structs.h"
+
+
+/* double precision routine */
+
+
+int WebRtcIsac_Fftns (unsigned int ndim, const int dims[], double Re[], double Im[],
+                     int isign, double scaling, FFTstr *fftstate);
+
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FFT_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filter_functions.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filter_functions.c
new file mode 100644
index 0000000..33024a3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filter_functions.c
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory.h>
+#ifdef WEBRTC_ANDROID
+#include <stdlib.h>
+#endif
+#include "pitch_estimator.h"
+#include "lpc_analysis.h"
+#include "codec.h"
+
+
+
+void WebRtcIsac_AllPoleFilter(double *InOut, double *Coef, int lengthInOut, int orderCoef){
+
+  /* the state of filter is assumed to be in InOut[-1] to InOut[-orderCoef] */
+  double scal;
+  double sum;
+  int n,k;
+
+  //if (fabs(Coef[0]-1.0)<0.001) {
+  if ( (Coef[0] > 0.9999) && (Coef[0] < 1.0001) )
+  {
+    for(n = 0; n < lengthInOut; n++)
+    {
+      sum = Coef[1] * InOut[-1];
+      for(k = 2; k <= orderCoef; k++){
+        sum += Coef[k] * InOut[-k];
+      }
+      *InOut++ -= sum;
+    }
+  }
+  else
+  {
+    scal = 1.0 / Coef[0];
+    for(n=0;n<lengthInOut;n++)
+    {
+      *InOut *= scal;
+      for(k=1;k<=orderCoef;k++){
+        *InOut -= scal*Coef[k]*InOut[-k];
+      }
+      InOut++;
+    }
+  }
+}
+
+
+void WebRtcIsac_AllZeroFilter(double *In, double *Coef, int lengthInOut, int orderCoef, double *Out){
+
+  /* the state of filter is assumed to be in In[-1] to In[-orderCoef] */
+
+  int n, k;
+  double tmp;
+
+  for(n = 0; n < lengthInOut; n++)
+  {
+    tmp = In[0] * Coef[0];
+
+    for(k = 1; k <= orderCoef; k++){
+      tmp += Coef[k] * In[-k];
+    }
+
+    *Out++ = tmp;
+    In++;
+  }
+}
+
+
+
+void WebRtcIsac_ZeroPoleFilter(double *In, double *ZeroCoef, double *PoleCoef, int lengthInOut, int orderCoef, double *Out){
+
+  /* the state of the zero section is assumed to be in In[-1] to In[-orderCoef] */
+  /* the state of the pole section is assumed to be in Out[-1] to Out[-orderCoef] */
+
+  WebRtcIsac_AllZeroFilter(In,ZeroCoef,lengthInOut,orderCoef,Out);
+  WebRtcIsac_AllPoleFilter(Out,PoleCoef,lengthInOut,orderCoef);
+}
+
+
+void WebRtcIsac_AutoCorr(
+    double *r,
+    const double *x,
+    int N,
+    int order
+                        )
+{
+  int  lag, n;
+  double sum, prod;
+  const double *x_lag;
+
+  for (lag = 0; lag <= order; lag++)
+  {
+    sum = 0.0f;
+    x_lag = &x[lag];
+    prod = x[0] * x_lag[0];
+    for (n = 1; n < N - lag; n++) {
+      sum += prod;
+      prod = x[n] * x_lag[n];
+    }
+    sum += prod;
+    r[lag] = sum;
+  }
+
+}
+
+
+void WebRtcIsac_BwExpand(double *out, double *in, double coef, short length) {
+  int i;
+  double  chirp;
+
+  chirp = coef;
+
+  out[0] = in[0];
+  for (i = 1; i < length; i++) {
+    out[i] = chirp * in[i];
+    chirp *= coef;
+  }
+}
+
+void WebRtcIsac_WeightingFilter(const double *in, double *weiout, double *whiout, WeightFiltstr *wfdata) {
+
+  double  tmpbuffer[PITCH_FRAME_LEN + PITCH_WLPCBUFLEN];
+  double  corr[PITCH_WLPCORDER+1], rc[PITCH_WLPCORDER+1];
+  double apol[PITCH_WLPCORDER+1], apolr[PITCH_WLPCORDER+1];
+  double  rho=0.9, *inp, *dp, *dp2;
+  double  whoutbuf[PITCH_WLPCBUFLEN + PITCH_WLPCORDER];
+  double  weoutbuf[PITCH_WLPCBUFLEN + PITCH_WLPCORDER];
+  double  *weo, *who, opol[PITCH_WLPCORDER+1], ext[PITCH_WLPCWINLEN];
+  int     k, n, endpos, start;
+
+  /* Set up buffer and states */
+  memcpy(tmpbuffer, wfdata->buffer, sizeof(double) * PITCH_WLPCBUFLEN);
+  memcpy(tmpbuffer+PITCH_WLPCBUFLEN, in, sizeof(double) * PITCH_FRAME_LEN);
+  memcpy(wfdata->buffer, tmpbuffer+PITCH_FRAME_LEN, sizeof(double) * PITCH_WLPCBUFLEN);
+
+  dp=weoutbuf;
+  dp2=whoutbuf;
+  for (k=0;k<PITCH_WLPCORDER;k++) {
+    *dp++ = wfdata->weostate[k];
+    *dp2++ = wfdata->whostate[k];
+    opol[k]=0.0;
+  }
+  opol[0]=1.0;
+  opol[PITCH_WLPCORDER]=0.0;
+  weo=dp;
+  who=dp2;
+
+  endpos=PITCH_WLPCBUFLEN + PITCH_SUBFRAME_LEN;
+  inp=tmpbuffer + PITCH_WLPCBUFLEN;
+
+  for (n=0; n<PITCH_SUBFRAMES; n++) {
+    /* Windowing */
+    start=endpos-PITCH_WLPCWINLEN;
+    for (k=0; k<PITCH_WLPCWINLEN; k++) {
+      ext[k]=wfdata->window[k]*tmpbuffer[start+k];
+    }
+
+    /* Get LPC polynomial */
+    WebRtcIsac_AutoCorr(corr, ext, PITCH_WLPCWINLEN, PITCH_WLPCORDER);
+    corr[0]=1.01*corr[0]+1.0; /* White noise correction */
+    WebRtcIsac_LevDurb(apol, rc, corr, PITCH_WLPCORDER);
+    WebRtcIsac_BwExpand(apolr, apol, rho, PITCH_WLPCORDER+1);
+
+    /* Filtering */
+    WebRtcIsac_ZeroPoleFilter(inp, apol, apolr, PITCH_SUBFRAME_LEN, PITCH_WLPCORDER, weo);
+    WebRtcIsac_ZeroPoleFilter(inp, apolr, opol, PITCH_SUBFRAME_LEN, PITCH_WLPCORDER, who);
+
+    inp+=PITCH_SUBFRAME_LEN;
+    endpos+=PITCH_SUBFRAME_LEN;
+    weo+=PITCH_SUBFRAME_LEN;
+    who+=PITCH_SUBFRAME_LEN;
+  }
+
+  /* Export filter states */
+  for (k=0;k<PITCH_WLPCORDER;k++) {
+    wfdata->weostate[k]=weoutbuf[PITCH_FRAME_LEN+k];
+    wfdata->whostate[k]=whoutbuf[PITCH_FRAME_LEN+k];
+  }
+
+  /* Export output data */
+  memcpy(weiout, weoutbuf+PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN);
+  memcpy(whiout, whoutbuf+PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN);
+}
+
+
+static const double APupper[ALLPASSSECTIONS] = {0.0347, 0.3826};
+static const double APlower[ALLPASSSECTIONS] = {0.1544, 0.744};
+
+
+
+void WebRtcIsac_AllpassFilterForDec(double *InOut,
+                                   const double *APSectionFactors,
+                                   int lengthInOut,
+                                   double *FilterState)
+{
+  //This performs all-pass filtering--a series of first order all-pass sections are used
+  //to filter the input in a cascade manner.
+  int n,j;
+  double temp;
+  for (j=0; j<ALLPASSSECTIONS; j++){
+    for (n=0;n<lengthInOut;n+=2){
+      temp = InOut[n]; //store input
+      InOut[n] = FilterState[j] + APSectionFactors[j]*temp;
+      FilterState[j] = -APSectionFactors[j]*InOut[n] + temp;
+    }
+  }
+}
+
+void WebRtcIsac_DecimateAllpass(const double *in,
+                                double *state_in,        /* array of size: 2*ALLPASSSECTIONS+1 */
+                                int N,                   /* number of input samples */
+                                double *out)             /* array of size N/2 */
+{
+  int n;
+  double data_vec[PITCH_FRAME_LEN];
+
+  /* copy input */
+  memcpy(data_vec+1, in, sizeof(double) * (N-1));
+
+  data_vec[0] = state_in[2*ALLPASSSECTIONS];   //the z^(-1) state
+  state_in[2*ALLPASSSECTIONS] = in[N-1];
+
+  WebRtcIsac_AllpassFilterForDec(data_vec+1, APupper, N, state_in);
+  WebRtcIsac_AllpassFilterForDec(data_vec, APlower, N, state_in+ALLPASSSECTIONS);
+
+  for (n=0;n<N/2;n++)
+    out[n] = data_vec[2*n] + data_vec[2*n+1];
+
+}
+
+
+
+/* create high-pass filter ocefficients
+ * z = 0.998 * exp(j*2*pi*35/8000);
+ * p = 0.94 * exp(j*2*pi*140/8000);
+ * HP_b = [1, -2*real(z), abs(z)^2];
+ * HP_a = [1, -2*real(p), abs(p)^2]; */
+static const double a_coef[2] = { 1.86864659625574, -0.88360000000000};
+static const double b_coef[2] = {-1.99524591718270,  0.99600400000000};
+static const float a_coef_float[2] = { 1.86864659625574f, -0.88360000000000f};
+static const float b_coef_float[2] = {-1.99524591718270f,  0.99600400000000f};
+
+/* second order high-pass filter */
+void WebRtcIsac_Highpass(const double *in, double *out, double *state, int N)
+{
+  int k;
+
+  for (k=0; k<N; k++) {
+    *out = *in + state[1];
+    state[1] = state[0] + b_coef[0] * *in + a_coef[0] * *out;
+    state[0] = b_coef[1] * *in++ + a_coef[1] * *out++;
+  }
+}
+
+void WebRtcIsac_Highpass_float(const float *in, double *out, double *state, int N)
+{
+  int k;
+
+  for (k=0; k<N; k++) {
+    *out = (double)*in + state[1];
+    state[1] = state[0] + b_coef_float[0] * *in + a_coef_float[0] * *out;
+    state[0] = b_coef_float[1] * (double)*in++ + a_coef_float[1] * *out++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.c
new file mode 100644
index 0000000..0f844af
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.c
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* filterbank_tables.c*/
+/* This file contains variables that are used in filterbanks.c*/
+
+#include "filterbank_tables.h"
+#include "settings.h"
+
+/* The composite all-pass filter factors */
+const float WebRtcIsac_kCompositeApFactorsFloat[4] = {
+ 0.03470000000000f,  0.15440000000000f,  0.38260000000000f,  0.74400000000000f};
+
+/* The upper channel all-pass filter factors */
+const float WebRtcIsac_kUpperApFactorsFloat[2] = {
+ 0.03470000000000f,  0.38260000000000f};
+
+/* The lower channel all-pass filter factors */
+const float WebRtcIsac_kLowerApFactorsFloat[2] = {
+ 0.15440000000000f,  0.74400000000000f};
+
+/* The matrix for transforming the backward composite state to upper channel state */
+const float WebRtcIsac_kTransform1Float[8] = {
+  -0.00158678506084f,  0.00127157815343f, -0.00104805672709f,  0.00084837248079f,
+  0.00134467983258f, -0.00107756549387f,  0.00088814793277f, -0.00071893072525f};
+
+/* The matrix for transforming the backward composite state to lower channel state */
+const float WebRtcIsac_kTransform2Float[8] = {
+ -0.00170686041697f,  0.00136780109829f, -0.00112736532350f,  0.00091257055385f,
+  0.00103094281812f, -0.00082615076557f,  0.00068092756088f, -0.00055119165484f};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.h
new file mode 100644
index 0000000..e8fda5e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbank_tables.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filterbank_tables.h
+ *
+ * Header file for variables that are defined in
+ * filterbank_tables.c.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FILTERBANK_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FILTERBANK_TABLES_H_
+
+#include "structs.h"
+
+/********************* Coefficient Tables ************************/
+/* The number of composite all-pass filter factors */
+#define NUMBEROFCOMPOSITEAPSECTIONS 4
+
+/* The number of all-pass filter factors in an upper or lower channel*/
+#define NUMBEROFCHANNELAPSECTIONS 2
+
+/* The composite all-pass filter factors */
+extern const float WebRtcIsac_kCompositeApFactorsFloat[4];
+
+/* The upper channel all-pass filter factors */
+extern const float WebRtcIsac_kUpperApFactorsFloat[2];
+
+/* The lower channel all-pass filter factors */
+extern const float WebRtcIsac_kLowerApFactorsFloat[2];
+
+/* The matrix for transforming the backward composite state to upper channel state */
+extern const float WebRtcIsac_kTransform1Float[8];
+
+/* The matrix for transforming the backward composite state to lower channel state */
+extern const float WebRtcIsac_kTransform2Float[8];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FILTERBANK_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbanks.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbanks.c
new file mode 100644
index 0000000..671fd32
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/filterbanks.c
@@ -0,0 +1,346 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * filterbanks.c
+ *
+ * This file contains function WebRtcIsac_AllPassFilter2Float,
+ * WebRtcIsac_SplitAndFilter, and WebRtcIsac_FilterAndCombine
+ * which implement filterbanks that produce decimated lowpass and
+ * highpass versions of a signal, and performs reconstruction.
+ *
+ */
+
+#include "settings.h"
+#include "filterbank_tables.h"
+#include "codec.h"
+
+/* This function performs all-pass filtering--a series of first order all-pass
+ * sections are used to filter the input in a cascade manner.
+ * The input is overwritten!!
+ */
+static void WebRtcIsac_AllPassFilter2Float(float *InOut, const float *APSectionFactors,
+                                           int lengthInOut, int NumberOfSections,
+                                           float *FilterState)
+{
+  int n, j;
+  float temp;
+  for (j=0; j<NumberOfSections; j++){
+    for (n=0;n<lengthInOut;n++){
+      temp = FilterState[j] + APSectionFactors[j] * InOut[n];
+      FilterState[j] = -APSectionFactors[j] * temp + InOut[n];
+      InOut[n] = temp;
+    }
+  }
+}
+
+/* HPstcoeff_in = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+static const float kHpStCoefInFloat[4] =
+{-1.94895953203325f, 0.94984516000000f, -0.05101826139794f, 0.05015484000000f};
+
+/* Function WebRtcIsac_SplitAndFilter
+ * This function creates low-pass and high-pass decimated versions of part of
+ the input signal, and part of the signal in the input 'lookahead buffer'.
+
+ INPUTS:
+ in: a length FRAMESAMPLES array of input samples
+ prefiltdata: input data structure containing the filterbank states
+ and lookahead samples from the previous encoding
+ iteration.
+ OUTPUTS:
+ LP: a FRAMESAMPLES_HALF array of low-pass filtered samples that
+ have been phase equalized.  The first QLOOKAHEAD samples are
+ based on the samples in the two prefiltdata->INLABUFx arrays
+ each of length QLOOKAHEAD.
+ The remaining FRAMESAMPLES_HALF-QLOOKAHEAD samples are based
+ on the first FRAMESAMPLES_HALF-QLOOKAHEAD samples of the input
+ array in[].
+ HP: a FRAMESAMPLES_HALF array of high-pass filtered samples that
+ have been phase equalized.  The first QLOOKAHEAD samples are
+ based on the samples in the two prefiltdata->INLABUFx arrays
+ each of length QLOOKAHEAD.
+ The remaining FRAMESAMPLES_HALF-QLOOKAHEAD samples are based
+ on the first FRAMESAMPLES_HALF-QLOOKAHEAD samples of the input
+ array in[].
+
+ LP_la: a FRAMESAMPLES_HALF array of low-pass filtered samples.
+ These samples are not phase equalized. They are computed
+ from the samples in the in[] array.
+ HP_la: a FRAMESAMPLES_HALF array of high-pass filtered samples
+ that are not phase equalized. They are computed from
+ the in[] vector.
+ prefiltdata: this input data structure's filterbank state and
+ lookahead sample buffers are updated for the next
+ encoding iteration.
+*/
+void WebRtcIsac_SplitAndFilterFloat(float *pin, float *LP, float *HP,
+                                    double *LP_la, double *HP_la,
+                                    PreFiltBankstr *prefiltdata)
+{
+  int k,n;
+  float CompositeAPFilterState[NUMBEROFCOMPOSITEAPSECTIONS];
+  float ForTransform_CompositeAPFilterState[NUMBEROFCOMPOSITEAPSECTIONS];
+  float ForTransform_CompositeAPFilterState2[NUMBEROFCOMPOSITEAPSECTIONS];
+  float tempinoutvec[FRAMESAMPLES+MAX_AR_MODEL_ORDER];
+  float tempin_ch1[FRAMESAMPLES+MAX_AR_MODEL_ORDER];
+  float tempin_ch2[FRAMESAMPLES+MAX_AR_MODEL_ORDER];
+  float in[FRAMESAMPLES];
+  float ftmp;
+
+
+  /* High pass filter */
+
+  for (k=0;k<FRAMESAMPLES;k++) {
+    in[k] = pin[k] + kHpStCoefInFloat[2] * prefiltdata->HPstates_float[0] +
+        kHpStCoefInFloat[3] * prefiltdata->HPstates_float[1];
+    ftmp = pin[k] - kHpStCoefInFloat[0] * prefiltdata->HPstates_float[0] -
+        kHpStCoefInFloat[1] * prefiltdata->HPstates_float[1];
+    prefiltdata->HPstates_float[1] = prefiltdata->HPstates_float[0];
+    prefiltdata->HPstates_float[0] = ftmp;
+  }
+
+  /*
+    % backwards all-pass filtering to obtain zero-phase
+    [tmp1(N2+LA:-1:LA+1, 1), state1] = filter(Q.coef, Q.coef(end:-1:1), in(N:-2:2));
+    tmp1(LA:-1:1) = filter(Q.coef, Q.coef(end:-1:1), Q.LookAheadBuf1, state1);
+    Q.LookAheadBuf1 = in(N:-2:N-2*LA+2);
+  */
+  /*Backwards all-pass filter the odd samples of the input (upper channel)
+    to eventually obtain zero phase.  The composite all-pass filter (comprised of both
+    the upper and lower channel all-pass filsters in series) is used for the
+    filtering. */
+
+  /* First Channel */
+
+  /*initial state of composite filter is zero */
+  for (k=0;k<NUMBEROFCOMPOSITEAPSECTIONS;k++){
+    CompositeAPFilterState[k] = 0.0;
+  }
+  /* put every other sample of input into a temporary vector in reverse (backward) order*/
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    tempinoutvec[k] = in[FRAMESAMPLES-1-2*k];
+  }
+
+  /* now all-pass filter the backwards vector.  Output values overwrite the input vector. */
+  WebRtcIsac_AllPassFilter2Float(tempinoutvec, WebRtcIsac_kCompositeApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCOMPOSITEAPSECTIONS, CompositeAPFilterState);
+
+  /* save the backwards filtered output for later forward filtering,
+     but write it in forward order*/
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    tempin_ch1[FRAMESAMPLES_HALF+QLOOKAHEAD-1-k] = tempinoutvec[k];
+  }
+
+  /* save the backwards filter state  becaue it will be transformed
+     later into a forward state */
+  for (k=0; k<NUMBEROFCOMPOSITEAPSECTIONS; k++) {
+    ForTransform_CompositeAPFilterState[k] = CompositeAPFilterState[k];
+  }
+
+  /* now backwards filter the samples in the lookahead buffer. The samples were
+     placed there in the encoding of the previous frame.  The output samples
+     overwrite the input samples */
+  WebRtcIsac_AllPassFilter2Float(prefiltdata->INLABUF1_float,
+                                 WebRtcIsac_kCompositeApFactorsFloat, QLOOKAHEAD,
+                                 NUMBEROFCOMPOSITEAPSECTIONS, CompositeAPFilterState);
+
+  /* save the output, but write it in forward order */
+  /* write the lookahead samples for the next encoding iteration. Every other
+     sample at the end of the input frame is written in reverse order for the
+     lookahead length. Exported in the prefiltdata structure. */
+  for (k=0;k<QLOOKAHEAD;k++) {
+    tempin_ch1[QLOOKAHEAD-1-k]=prefiltdata->INLABUF1_float[k];
+    prefiltdata->INLABUF1_float[k]=in[FRAMESAMPLES-1-2*k];
+  }
+
+  /* Second Channel.  This is exactly like the first channel, except that the
+     even samples are now filtered instead (lower channel). */
+  for (k=0;k<NUMBEROFCOMPOSITEAPSECTIONS;k++){
+    CompositeAPFilterState[k] = 0.0;
+  }
+
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    tempinoutvec[k] = in[FRAMESAMPLES-2-2*k];
+  }
+
+  WebRtcIsac_AllPassFilter2Float(tempinoutvec, WebRtcIsac_kCompositeApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCOMPOSITEAPSECTIONS, CompositeAPFilterState);
+
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    tempin_ch2[FRAMESAMPLES_HALF+QLOOKAHEAD-1-k] = tempinoutvec[k];
+  }
+
+  for (k=0; k<NUMBEROFCOMPOSITEAPSECTIONS; k++) {
+    ForTransform_CompositeAPFilterState2[k] = CompositeAPFilterState[k];
+  }
+
+
+  WebRtcIsac_AllPassFilter2Float(prefiltdata->INLABUF2_float,
+                                 WebRtcIsac_kCompositeApFactorsFloat, QLOOKAHEAD,NUMBEROFCOMPOSITEAPSECTIONS,
+                                 CompositeAPFilterState);
+
+  for (k=0;k<QLOOKAHEAD;k++) {
+    tempin_ch2[QLOOKAHEAD-1-k]=prefiltdata->INLABUF2_float[k];
+    prefiltdata->INLABUF2_float[k]=in[FRAMESAMPLES-2-2*k];
+  }
+
+  /* Transform filter states from backward to forward */
+  /*At this point, each of the states of the backwards composite filters for the
+    two channels are transformed into forward filtering states for the corresponding
+    forward channel filters.  Each channel's forward filtering state from the previous
+    encoding iteration is added to the transformed state to get a proper forward state */
+
+  /* So the existing NUMBEROFCOMPOSITEAPSECTIONS x 1 (4x1) state vector is multiplied by a
+     NUMBEROFCHANNELAPSECTIONSxNUMBEROFCOMPOSITEAPSECTIONS (2x4) transform matrix to get the
+     new state that is added to the previous 2x1 input state */
+
+  for (k=0;k<NUMBEROFCHANNELAPSECTIONS;k++){ /* k is row variable */
+    for (n=0; n<NUMBEROFCOMPOSITEAPSECTIONS;n++){/* n is column variable */
+      prefiltdata->INSTAT1_float[k] += ForTransform_CompositeAPFilterState[n]*
+          WebRtcIsac_kTransform1Float[k*NUMBEROFCHANNELAPSECTIONS+n];
+      prefiltdata->INSTAT2_float[k] += ForTransform_CompositeAPFilterState2[n]*
+          WebRtcIsac_kTransform2Float[k*NUMBEROFCHANNELAPSECTIONS+n];
+    }
+  }
+
+  /*obtain polyphase components by forward all-pass filtering through each channel */
+  /* the backward filtered samples are now forward filtered with the corresponding channel filters */
+  /* The all pass filtering automatically updates the filter states which are exported in the
+     prefiltdata structure */
+  WebRtcIsac_AllPassFilter2Float(tempin_ch1,WebRtcIsac_kUpperApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT1_float);
+  WebRtcIsac_AllPassFilter2Float(tempin_ch2,WebRtcIsac_kLowerApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTAT2_float);
+
+  /* Now Construct low-pass and high-pass signals as combinations of polyphase components */
+  for (k=0; k<FRAMESAMPLES_HALF; k++) {
+    LP[k] = 0.5f*(tempin_ch1[k] + tempin_ch2[k]);/* low pass signal*/
+    HP[k] = 0.5f*(tempin_ch1[k] - tempin_ch2[k]);/* high pass signal*/
+  }
+
+  /* Lookahead LP and HP signals */
+  /* now create low pass and high pass signals of the input vector.  However, no
+     backwards filtering is performed, and hence no phase equalization is involved.
+     Also, the input contains some samples that are lookahead samples.  The high pass
+     and low pass signals that are created are used outside this function for analysis
+     (not encoding) purposes */
+
+  /* set up input */
+  for (k=0; k<FRAMESAMPLES_HALF; k++) {
+    tempin_ch1[k]=in[2*k+1];
+    tempin_ch2[k]=in[2*k];
+  }
+
+  /* the input filter states are passed in and updated by the all-pass filtering routine and
+     exported in the prefiltdata structure*/
+  WebRtcIsac_AllPassFilter2Float(tempin_ch1,WebRtcIsac_kUpperApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTATLA1_float);
+  WebRtcIsac_AllPassFilter2Float(tempin_ch2,WebRtcIsac_kLowerApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS, prefiltdata->INSTATLA2_float);
+
+  for (k=0; k<FRAMESAMPLES_HALF; k++) {
+    LP_la[k] = (float)(0.5f*(tempin_ch1[k] + tempin_ch2[k])); /*low pass */
+    HP_la[k] = (double)(0.5f*(tempin_ch1[k] - tempin_ch2[k])); /* high pass */
+  }
+
+
+}/*end of WebRtcIsac_SplitAndFilter */
+
+
+/* Combining */
+
+/* HPstcoeff_out_1 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+static const float kHpStCoefOut1Float[4] =
+{-1.99701049409000f, 0.99714204490000f, 0.01701049409000f, -0.01704204490000f};
+
+/* HPstcoeff_out_2 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */
+static const float kHpStCoefOut2Float[4] =
+{-1.98645294509837f, 0.98672435560000f, 0.00645294509837f, -0.00662435560000f};
+
+
+/* Function WebRtcIsac_FilterAndCombine */
+/* This is a decoder function that takes the decimated
+   length FRAMESAMPLES_HALF input low-pass and
+   high-pass signals and creates a reconstructed fullband
+   output signal of length FRAMESAMPLES. WebRtcIsac_FilterAndCombine
+   is the sibling function of WebRtcIsac_SplitAndFilter */
+/* INPUTS:
+   inLP: a length FRAMESAMPLES_HALF array of input low-pass
+   samples.
+   inHP: a length FRAMESAMPLES_HALF array of input high-pass
+   samples.
+   postfiltdata: input data structure containing the filterbank
+   states from the previous decoding iteration.
+   OUTPUTS:
+   Out: a length FRAMESAMPLES array of output reconstructed
+   samples (fullband) based on the input low-pass and
+   high-pass signals.
+   postfiltdata: the input data structure containing the filterbank
+   states is updated for the next decoding iteration */
+void WebRtcIsac_FilterAndCombineFloat(float *InLP,
+                                      float *InHP,
+                                      float *Out,
+                                      PostFiltBankstr *postfiltdata)
+{
+  int k;
+  float tempin_ch1[FRAMESAMPLES+MAX_AR_MODEL_ORDER];
+  float tempin_ch2[FRAMESAMPLES+MAX_AR_MODEL_ORDER];
+  float ftmp, ftmp2;
+
+  /* Form the polyphase signals*/
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    tempin_ch1[k]=InLP[k]+InHP[k]; /* Construct a new upper channel signal*/
+    tempin_ch2[k]=InLP[k]-InHP[k]; /* Construct a new lower channel signal*/
+  }
+
+
+  /* all-pass filter the new upper channel signal. HOWEVER, use the all-pass filter factors
+     that were used as a lower channel at the encoding side.  So at the decoder, the
+     corresponding all-pass filter factors for each channel are swapped.*/
+  WebRtcIsac_AllPassFilter2Float(tempin_ch1, WebRtcIsac_kLowerApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_UPPER_float);
+
+  /* Now, all-pass filter the new lower channel signal. But since all-pass filter factors
+     at the decoder are swapped from the ones at the encoder, the 'upper' channel
+     all-pass filter factors (WebRtcIsac_kUpperApFactorsFloat) are used to filter this new
+     lower channel signal */
+  WebRtcIsac_AllPassFilter2Float(tempin_ch2, WebRtcIsac_kUpperApFactorsFloat,
+                                 FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_LOWER_float);
+
+
+  /* Merge outputs to form the full length output signal.*/
+  for (k=0;k<FRAMESAMPLES_HALF;k++) {
+    Out[2*k]=tempin_ch2[k];
+    Out[2*k+1]=tempin_ch1[k];
+  }
+
+
+  /* High pass filter */
+
+  for (k=0;k<FRAMESAMPLES;k++) {
+    ftmp2 = Out[k] + kHpStCoefOut1Float[2] * postfiltdata->HPstates1_float[0] +
+        kHpStCoefOut1Float[3] * postfiltdata->HPstates1_float[1];
+    ftmp = Out[k] - kHpStCoefOut1Float[0] * postfiltdata->HPstates1_float[0] -
+        kHpStCoefOut1Float[1] * postfiltdata->HPstates1_float[1];
+    postfiltdata->HPstates1_float[1] = postfiltdata->HPstates1_float[0];
+    postfiltdata->HPstates1_float[0] = ftmp;
+    Out[k] = ftmp2;
+  }
+
+  for (k=0;k<FRAMESAMPLES;k++) {
+    ftmp2 = Out[k] + kHpStCoefOut2Float[2] * postfiltdata->HPstates2_float[0] +
+        kHpStCoefOut2Float[3] * postfiltdata->HPstates2_float[1];
+    ftmp = Out[k] - kHpStCoefOut2Float[0] * postfiltdata->HPstates2_float[0] -
+        kHpStCoefOut2Float[1] * postfiltdata->HPstates2_float[1];
+    postfiltdata->HPstates2_float[1] = postfiltdata->HPstates2_float[0];
+    postfiltdata->HPstates2_float[0] = ftmp;
+    Out[k] = ftmp2;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/intialize.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/intialize.c
new file mode 100644
index 0000000..6df034d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/intialize.c
@@ -0,0 +1,175 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* encode.c  - Encoding function for the iSAC coder */
+
+#include "structs.h"
+#include "codec.h"
+#include "pitch_estimator.h"
+
+#include <math.h>
+
+void WebRtcIsac_InitMasking(MaskFiltstr *maskdata) {
+
+  int k;
+
+  for (k = 0; k < WINLEN; k++) {
+    maskdata->DataBufferLo[k] = 0.0;
+    maskdata->DataBufferHi[k] = 0.0;
+  }
+  for (k = 0; k < ORDERLO+1; k++) {
+    maskdata->CorrBufLo[k] = 0.0;
+    maskdata->PreStateLoF[k] = 0.0;
+    maskdata->PreStateLoG[k] = 0.0;
+    maskdata->PostStateLoF[k] = 0.0;
+    maskdata->PostStateLoG[k] = 0.0;
+  }
+  for (k = 0; k < ORDERHI+1; k++) {
+    maskdata->CorrBufHi[k] = 0.0;
+    maskdata->PreStateHiF[k] = 0.0;
+    maskdata->PreStateHiG[k] = 0.0;
+    maskdata->PostStateHiF[k] = 0.0;
+    maskdata->PostStateHiG[k] = 0.0;
+  }
+
+  maskdata->OldEnergy = 10.0;
+
+  /* fill tables for transforms */
+  WebRtcIsac_InitTransform();
+
+  return;
+}
+
+void WebRtcIsac_InitPreFilterbank(PreFiltBankstr *prefiltdata)
+{
+  int k;
+
+  for (k = 0; k < QLOOKAHEAD; k++) {
+    prefiltdata->INLABUF1[k] = 0;
+    prefiltdata->INLABUF2[k] = 0;
+
+    prefiltdata->INLABUF1_float[k] = 0;
+    prefiltdata->INLABUF2_float[k] = 0;
+  }
+  for (k = 0; k < 2*(QORDER-1); k++) {
+    prefiltdata->INSTAT1[k] = 0;
+    prefiltdata->INSTAT2[k] = 0;
+    prefiltdata->INSTATLA1[k] = 0;
+    prefiltdata->INSTATLA2[k] = 0;
+
+    prefiltdata->INSTAT1_float[k] = 0;
+    prefiltdata->INSTAT2_float[k] = 0;
+    prefiltdata->INSTATLA1_float[k] = 0;
+    prefiltdata->INSTATLA2_float[k] = 0;
+  }
+
+  /* High pass filter states */
+  prefiltdata->HPstates[0] = 0.0;
+  prefiltdata->HPstates[1] = 0.0;
+
+  prefiltdata->HPstates_float[0] = 0.0f;
+  prefiltdata->HPstates_float[1] = 0.0f;
+
+  return;
+}
+
+void WebRtcIsac_InitPostFilterbank(PostFiltBankstr *postfiltdata)
+{
+  int k;
+
+  for (k = 0; k < 2*POSTQORDER; k++) {
+    postfiltdata->STATE_0_LOWER[k] = 0;
+    postfiltdata->STATE_0_UPPER[k] = 0;
+
+    postfiltdata->STATE_0_LOWER_float[k] = 0;
+    postfiltdata->STATE_0_UPPER_float[k] = 0;
+  }
+
+  /* High pass filter states */
+  postfiltdata->HPstates1[0] = 0.0;
+  postfiltdata->HPstates1[1] = 0.0;
+
+  postfiltdata->HPstates2[0] = 0.0;
+  postfiltdata->HPstates2[1] = 0.0;
+
+  postfiltdata->HPstates1_float[0] = 0.0f;
+  postfiltdata->HPstates1_float[1] = 0.0f;
+
+  postfiltdata->HPstates2_float[0] = 0.0f;
+  postfiltdata->HPstates2_float[1] = 0.0f;
+
+  return;
+}
+
+
+void WebRtcIsac_InitPitchFilter(PitchFiltstr *pitchfiltdata)
+{
+  int k;
+
+  for (k = 0; k < PITCH_BUFFSIZE; k++) {
+    pitchfiltdata->ubuf[k] = 0.0;
+  }
+  pitchfiltdata->ystate[0] = 0.0;
+  for (k = 1; k < (PITCH_DAMPORDER); k++) {
+    pitchfiltdata->ystate[k] = 0.0;
+  }
+  pitchfiltdata->oldlagp[0] = 50.0;
+  pitchfiltdata->oldgainp[0] = 0.0;
+}
+
+void WebRtcIsac_InitWeightingFilter(WeightFiltstr *wfdata)
+{
+  int k;
+  double t, dtmp, dtmp2, denum, denum2;
+
+  for (k=0;k<PITCH_WLPCBUFLEN;k++)
+    wfdata->buffer[k]=0.0;
+
+  for (k=0;k<PITCH_WLPCORDER;k++) {
+    wfdata->istate[k]=0.0;
+    wfdata->weostate[k]=0.0;
+    wfdata->whostate[k]=0.0;
+  }
+
+  /* next part should be in Matlab, writing to a global table */
+  t = 0.5;
+  denum = 1.0 / ((double) PITCH_WLPCWINLEN);
+  denum2 = denum * denum;
+  for (k=0;k<PITCH_WLPCWINLEN;k++) {
+    dtmp = PITCH_WLPCASYM * t * denum + (1-PITCH_WLPCASYM) * t * t * denum2;
+    dtmp *= 3.14159265;
+    dtmp2 = sin(dtmp);
+    wfdata->window[k] = dtmp2 * dtmp2;
+    t++;
+  }
+}
+
+/* clear all buffers */
+void WebRtcIsac_InitPitchAnalysis(PitchAnalysisStruct *State)
+{
+  int k;
+
+  for (k = 0; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k++)
+    State->dec_buffer[k] = 0.0;
+  for (k = 0; k < 2*ALLPASSSECTIONS+1; k++)
+    State->decimator_state[k] = 0.0;
+  for (k = 0; k < 2; k++)
+    State->hp_state[k] = 0.0;
+  for (k = 0; k < QLOOKAHEAD; k++)
+    State->whitened_buf[k] = 0.0;
+  for (k = 0; k < QLOOKAHEAD; k++)
+    State->inbuf[k] = 0.0;
+
+  WebRtcIsac_InitPitchFilter(&(State->PFstr_wght));
+
+  WebRtcIsac_InitPitchFilter(&(State->PFstr));
+
+  WebRtcIsac_InitWeightingFilter(&(State->Wghtstr));
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.c
new file mode 100644
index 0000000..67db1e4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.c
@@ -0,0 +1,2793 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * isac.c
+ *
+ * This C file contains the functions for the ISAC API
+ *
+ */
+
+#include "isac.h"
+#include "bandwidth_estimator.h"
+#include "crc.h"
+#include "entropy_coding.h"
+#include "codec.h"
+#include "structs.h"
+#include "signal_processing_library.h"
+#include "lpc_shape_swb16_tables.h"
+#include "os_specific_inline.h"
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <math.h>
+
+#define BIT_MASK_DEC_INIT 0x0001
+#define BIT_MASK_ENC_INIT 0x0002
+
+#define LEN_CHECK_SUM_WORD8     4
+#define MAX_NUM_LAYERS         10
+
+
+/****************************************************************************
+ * UpdatePayloadSizeLimit()
+ *
+ * Call this function to update the limit on the payload size. The limit on
+ * payload size might change i) if a user ''directly changes the limit by
+ * calling xxx_setMaxPayloadSize() or xxx_setMaxRate(), or ii) indirectly
+ * when bandwidth is changing. The latter might be the result of bandwidth
+ * adaptation, or direct change of the bottleneck in instantaneous mode.
+ *
+ * This function takes the current overall limit on payload, and translate it
+ * to the limits on lower and upper-band. If the codec is in wideband mode
+ * then the overall limit and the limit on the lower-band is the same.
+ * Otherwise, a fraction of the limit should be allocated to lower-band
+ * leaving some room for the upper-band bit-stream. That is why an update
+ * of limit is required every time that the bandwidth is changing.
+ *
+ */
+static void UpdatePayloadSizeLimit(
+				   ISACMainStruct *instISAC)
+{
+  WebRtc_Word16 lim30MsPayloadBytes;
+  WebRtc_Word16 lim60MsPayloadBytes;
+
+  lim30MsPayloadBytes = WEBRTC_SPL_MIN(
+				       (instISAC->maxPayloadSizeBytes),
+				       (instISAC->maxRateBytesPer30Ms));
+
+  lim60MsPayloadBytes = WEBRTC_SPL_MIN(
+				       (instISAC->maxPayloadSizeBytes),
+				       (instISAC->maxRateBytesPer30Ms << 1));
+
+  // The only time that iSAC will have 60 ms
+  // frame-size is when operating in wideband so
+  // there is no upper-band bit-stream
+
+  if(instISAC->bandwidthKHz == isac8kHz)
+    {
+      // at 8 kHz there is no upper-band bit-stream
+      // therefore the lower-band limit is as the overall
+      // limit.
+      instISAC->instLB.ISACencLB_obj.payloadLimitBytes60 =
+        lim60MsPayloadBytes;
+      instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 =
+        lim30MsPayloadBytes;
+    }
+  else
+    {
+      // when in super-wideband, we only have 30 ms frames
+      // Do a rate allocation for the given limit.
+      if(lim30MsPayloadBytes > 250)
+	{
+	  // 4/5 to lower-band the rest for upper-band
+	  instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 =
+	    (lim30MsPayloadBytes << 2) / 5;
+	}
+      else if(lim30MsPayloadBytes > 200)
+	{
+	  // for the interval of 200 to 250 the share of
+	  // upper-band linearly grows from 20 to 50;
+	  instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 =
+	    (lim30MsPayloadBytes << 1) / 5 + 100;
+	}
+      else
+	{
+	  // allocate only 20 for upper-band
+	  instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 =
+	    lim30MsPayloadBytes - 20;
+	}
+      instISAC->instUB.ISACencUB_obj.maxPayloadSizeBytes =
+        lim30MsPayloadBytes;
+    }
+}
+
+
+/****************************************************************************
+ * UpdateBottleneck()
+ *
+ * This function updates the bottleneck only if the codec is operating in
+ * channel-adaptive mode. Furthermore, as the update of bottleneck might
+ * result in an update of bandwidth, therefore, the bottlenech should be
+ * updated just right before the first 10ms of a frame is pushed into encoder.
+ *
+ */
+static void UpdateBottleneck(
+			     ISACMainStruct *instISAC)
+{
+  // read the bottleneck from bandwidth estimator for the
+  // first 10 ms audio. This way, if there is a change
+  // in bandwidth upper and lower-band will be in sync.
+  if((instISAC->codingMode == 0) &&
+     (instISAC->instLB.ISACencLB_obj.buffer_index == 0) &&
+     (instISAC->instLB.ISACencLB_obj.frame_nb == 0))
+    {
+      WebRtc_Word32 bottleneck;
+      WebRtcIsac_GetUplinkBandwidth(&(instISAC->bwestimator_obj),
+				    &bottleneck);
+
+      // Adding hysteresis when increasing signal bandwidth
+      if((instISAC->bandwidthKHz == isac8kHz)
+	 && (bottleneck > 37000)
+	 && (bottleneck < 41000))
+	{
+	  bottleneck = 37000;
+	}
+
+      // switching from 12 kHz to 16 kHz is not allowed at this revision
+      // If we let this happen, we have to take care of buffer_index and
+      // the last LPC vector.
+      if((instISAC->bandwidthKHz != isac16kHz) &&
+	 (bottleneck > 46000))
+	{
+	  bottleneck = 46000;
+	}
+
+      // we might need a rate allocation.
+      if(instISAC->encoderSamplingRateKHz == kIsacWideband)
+	{
+	  // wideband is the only choise we have here.
+	  instISAC->instLB.ISACencLB_obj.bottleneck =
+	    (bottleneck > 32000)? 32000:bottleneck;
+	  instISAC->bandwidthKHz = isac8kHz;
+	}
+      else
+	{
+	  // do the rate-allosation and get the new bandwidth.
+	  enum ISACBandwidth bandwidth;
+	  WebRtcIsac_RateAllocation(bottleneck,
+				    &(instISAC->instLB.ISACencLB_obj.bottleneck),
+				    &(instISAC->instUB.ISACencUB_obj.bottleneck),
+				    &bandwidth);
+	  if(bandwidth != isac8kHz)
+	    {
+	      instISAC->instLB.ISACencLB_obj.new_framelength = 480;
+	    }
+	  if(bandwidth != instISAC->bandwidthKHz)
+	    {
+	      // bandwidth is changing.
+	      instISAC->bandwidthKHz = bandwidth;
+	      UpdatePayloadSizeLimit(instISAC);
+	      if(bandwidth == isac12kHz)
+		{
+		  instISAC->instLB.ISACencLB_obj.buffer_index = 0;
+		}
+	      // currently we don't let the bandwidth to switch to 16 kHz
+	      // if in adaptive mode. If we let this happen, we have to take
+	      // car of buffer_index and the last LPC vector.
+	    }
+	}
+    }
+}
+
+
+/****************************************************************************
+ * GetSendBandwidthInfo()
+ *
+ * This is called to get the bandwidth info. This info is the bandwidth and
+ * and the jitter of 'there-to-here' channel, estimated 'here.' These info
+ * is signaled in an in-band fashion to the otherside.
+ *
+ * The call to the bandwidth estimator trigers a recursive averaging which
+ * has to be synchronized between encoder & decoder, therefore. The call to
+ * BWE should be once per packet. As the BWE info is inserted into bit-stream
+ * we need a valid info right before the encodeLB function is going to
+ * generating a bit-stream. That is when lower-band buffer has already 20ms
+ * of audio, and the 3rd block of 10ms is going to be injected into encoder.
+ *
+ * Inputs:
+ *         - instISAC          : iSAC instance.
+ *
+ * Outputs:
+ *         - bandwidthIndex    : an index which has to be encoded in
+ *                               lower-band bit-stream, indicating the
+ *                               bandwidth of there-to-here channel.
+ *         - jitterInfo        : this indicates if the jitter is high
+ *                               or low and it is encoded in upper-band
+ *                               bit-stream.
+ *
+ */
+static void GetSendBandwidthInfo(
+				 ISACMainStruct* instISAC,
+				 WebRtc_Word16*    bandwidthIndex,
+				 WebRtc_Word16*    jitterInfo)
+{
+  if((instISAC->instLB.ISACencLB_obj.buffer_index ==
+      (FRAMESAMPLES_10ms << 1)) &&
+     (instISAC->instLB.ISACencLB_obj.frame_nb == 0))
+    {
+      /* bandwidth estimation and coding */
+      WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj),
+				       bandwidthIndex, jitterInfo, instISAC->decoderSamplingRateKHz);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_AssignSize(...)
+ *
+ * This function returns the size of the ISAC instance, so that the instance
+ * can be created out side iSAC.
+ *
+ * Output:
+ *        - sizeinbytes       : number of bytes needed to allocate for the
+ *                              instance.
+ *
+ * Return value               : 0 - Ok
+ *                             -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_AssignSize(
+				   int *sizeInBytes)
+{
+  *sizeInBytes = sizeof(ISACMainStruct) * 2 / sizeof(WebRtc_Word16);
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_Assign(...)
+ *
+ * This function assignes the memory already created to the ISAC instance.
+ *
+ * Input:
+ *        - ISAC_main_inst    : address of the pointer to the coder instance.
+ *        - instISAC_Addr     : the already allocaded memeory, where we put the
+ *                              iSAC struct
+ *
+ * Return value               : 0 - Ok
+ *                             -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_Assign(
+			       ISACStruct** ISAC_main_inst,
+			       void*        instISAC_Addr)
+{
+  if(instISAC_Addr != NULL)
+    {
+      ISACMainStruct* instISAC = (ISACMainStruct*)instISAC_Addr;
+      instISAC->errorCode = 0;
+      instISAC->initFlag = 0;
+
+      // Assign the address
+      *ISAC_main_inst = (ISACStruct*)instISAC_Addr;
+
+      // Default is wideband.
+      instISAC->encoderSamplingRateKHz = kIsacWideband;
+      instISAC->decoderSamplingRateKHz = kIsacWideband;
+      instISAC->bandwidthKHz           = isac8kHz;
+      return 0;
+    }
+  else
+    {
+      return -1;
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_Create(...)
+ *
+ * This function creates an ISAC instance, which will contain the state
+ * information for one coding/decoding channel.
+ *
+ * Input:
+ *        - ISAC_main_inst    : address of the pointer to the coder instance.
+ *
+ * Return value               : 0 - Ok
+ *                             -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_Create(
+			       ISACStruct** ISAC_main_inst)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)WEBRTC_SPL_VNEW(ISACMainStruct, 1);
+  *ISAC_main_inst = (ISACStruct*)instISAC;
+  if(*ISAC_main_inst != NULL)
+    {
+      instISAC->errorCode = 0;
+      instISAC->initFlag = 0;
+      // Default is wideband
+      instISAC->bandwidthKHz           = isac8kHz;
+      instISAC->encoderSamplingRateKHz = kIsacWideband;
+      instISAC->decoderSamplingRateKHz = kIsacWideband;
+      return 0;
+    }
+  else
+    {
+      return -1;
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_Free(...)
+ *
+ * This function frees the ISAC instance created at the beginning.
+ *
+ * Input:
+ *        - ISAC_main_inst    : a ISAC instance.
+ *
+ * Return value               : 0 - Ok
+ *                             -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_Free(
+			     ISACStruct* ISAC_main_inst)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+  WEBRTC_SPL_FREE(instISAC);
+  return 0;
+}
+
+
+/****************************************************************************
+ * EncoderInitLb(...) - internal function for initialization of
+ *                                Lower Band
+ * EncoderInitUb(...) - internal function for initialization of
+ *                                Upper Band
+ * WebRtcIsac_EncoderInit(...) - API function
+ *
+ * This function initializes a ISAC instance prior to the encoder calls.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - CodingMode        : 0 -> Bit rate and frame length are automatically
+ *                                 adjusted to available bandwidth on
+ *                                 transmission channel, applicable just to
+ *                                 wideband mode.
+ *                              1 -> User sets a frame length and a target bit
+ *                                 rate which is taken as the maximum
+ *                                 short-term average bit rate.
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+static WebRtc_Word16 EncoderInitLb(
+				   ISACLBStruct*             instLB,
+				   WebRtc_Word16               codingMode,
+				   enum IsacSamplingRate sampRate)
+{
+  WebRtc_Word16 statusInit = 0;
+  int k;
+
+  /* Init stream vector to zero */
+  for(k=0; k < STREAM_SIZE_MAX_60; k++)
+    {
+      instLB->ISACencLB_obj.bitstr_obj.stream[k] = 0;
+    }
+
+  if((codingMode == 1) || (sampRate == kIsacSuperWideband))
+    {
+      // 30 ms frame-size if either in super-wideband or
+      // instanteneous mode (I-mode)
+      instLB->ISACencLB_obj.new_framelength = 480;
+    }
+  else
+    {
+      instLB->ISACencLB_obj.new_framelength = INITIAL_FRAMESAMPLES;
+    }
+
+  WebRtcIsac_InitMasking(&instLB->ISACencLB_obj.maskfiltstr_obj);
+  WebRtcIsac_InitPreFilterbank(&instLB->ISACencLB_obj.prefiltbankstr_obj);
+  WebRtcIsac_InitPitchFilter(&instLB->ISACencLB_obj.pitchfiltstr_obj);
+  WebRtcIsac_InitPitchAnalysis(
+			       &instLB->ISACencLB_obj.pitchanalysisstr_obj);
+
+
+  instLB->ISACencLB_obj.buffer_index         = 0;
+  instLB->ISACencLB_obj.frame_nb             = 0;
+  /* default for I-mode */
+  instLB->ISACencLB_obj.bottleneck           = 32000;
+  instLB->ISACencLB_obj.current_framesamples = 0;
+  instLB->ISACencLB_obj.s2nr                 = 0;
+  instLB->ISACencLB_obj.payloadLimitBytes30  = STREAM_SIZE_MAX_30;
+  instLB->ISACencLB_obj.payloadLimitBytes60  = STREAM_SIZE_MAX_60;
+  instLB->ISACencLB_obj.maxPayloadBytes      = STREAM_SIZE_MAX_60;
+  instLB->ISACencLB_obj.maxRateInBytes       = STREAM_SIZE_MAX_30;
+  instLB->ISACencLB_obj.enforceFrameSize     = 0;
+  /* invalid value prevents getRedPayload to
+     run before encoder is called */
+  instLB->ISACencLB_obj.lastBWIdx            = -1;
+  return statusInit;
+}
+
+static WebRtc_Word16 EncoderInitUb(
+				   ISACUBStruct* instUB,
+				   WebRtc_Word16   bandwidth)
+{
+  WebRtc_Word16 statusInit = 0;
+  int k;
+
+  /* Init stream vector to zero */
+  for(k = 0; k < STREAM_SIZE_MAX_60; k++)
+    {
+      instUB->ISACencUB_obj.bitstr_obj.stream[k] = 0;
+    }
+
+  WebRtcIsac_InitMasking(&instUB->ISACencUB_obj.maskfiltstr_obj);
+  WebRtcIsac_InitPreFilterbank(&instUB->ISACencUB_obj.prefiltbankstr_obj);
+
+  if(bandwidth == isac16kHz)
+    {
+      instUB->ISACencUB_obj.buffer_index = LB_TOTAL_DELAY_SAMPLES;
+    }
+  else
+    {
+      instUB->ISACencUB_obj.buffer_index        = 0;
+    }
+  /* default for I-mode */
+  instUB->ISACencUB_obj.bottleneck            = 32000;
+  // These store the limits for the wideband + super-wideband bit-stream.
+  instUB->ISACencUB_obj.maxPayloadSizeBytes    = STREAM_SIZE_MAX_30 << 1;
+  // This has to be updated after each lower-band encoding to guarantee
+  // a correct payload-limitation.
+  instUB->ISACencUB_obj.numBytesUsed         = 0;
+  memset(instUB->ISACencUB_obj.data_buffer_float, 0,
+         (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES) * sizeof(float));
+
+  memcpy(&(instUB->ISACencUB_obj.lastLPCVec),
+         WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER);
+
+  return statusInit;
+}
+
+
+WebRtc_Word16 WebRtcIsac_EncoderInit(
+				    ISACStruct* ISAC_main_inst,
+				    WebRtc_Word16 codingMode)
+{
+  ISACMainStruct *instISAC;
+  WebRtc_Word16 status;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if((codingMode != 0) && (codingMode != 1))
+    {
+      instISAC->errorCode = ISAC_DISALLOWED_CODING_MODE;
+      return -1;
+    }
+  // default bottleneck
+  instISAC->bottleneck = MAX_ISAC_BW;
+
+  if(instISAC->encoderSamplingRateKHz == kIsacWideband)
+    {
+      instISAC->bandwidthKHz = isac8kHz;
+      instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60;
+      instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30;
+    }
+  else
+    {
+      instISAC->bandwidthKHz = isac16kHz;
+      instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX;
+      instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX;
+    }
+
+  // Channel-adaptive = 0; Instantaneous (Channel-independent) = 1;
+  instISAC->codingMode = codingMode;
+
+  WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj,
+                                    instISAC->encoderSamplingRateKHz,
+                                    instISAC->decoderSamplingRateKHz);
+
+  WebRtcIsac_InitRateModel(&instISAC->rate_data_obj);
+  /* default for I-mode */
+  instISAC->MaxDelay = 10.0;
+
+  status = EncoderInitLb(&instISAC->instLB, codingMode,
+			 instISAC->encoderSamplingRateKHz);
+  if(status < 0)
+    {
+      instISAC->errorCode = -status;
+      return -1;
+    }
+
+  if(instISAC->encoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      // Initialize encoder filter-bank.
+      memset(instISAC->analysisFBState1, 0,
+	     FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+      memset(instISAC->analysisFBState2, 0,
+	     FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+
+      status = EncoderInitUb(&(instISAC->instUB),
+			     instISAC->bandwidthKHz);
+      if(status < 0)
+	{
+	  instISAC->errorCode = -status;
+	  return -1;
+	}
+    }
+  // Initializtion is successful, set the flag
+  instISAC->initFlag |= BIT_MASK_ENC_INIT;
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_Encode(...)
+ *
+ * This function encodes 10ms frame(s) and inserts it into a package.
+ * Input speech length has to be 160 samples (10ms). The encoder buffers those
+ * 10ms frames until it reaches the chosen Framesize (480 or 960 samples
+ * corresponding to 30 or 60 ms frames), and then proceeds to the encoding.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - speechIn          : input speech vector.
+ *
+ * Output:
+ *        - encoded           : the encoded data vector
+ *
+ * Return value:
+ *                            : >0 - Length (in bytes) of coded data
+ *                            :  0 - The buffer didn't reach the chosen
+ *                                  frameSize so it keeps buffering speech
+ *                                 samples.
+ *                            : -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_Encode(
+			       ISACStruct*        ISAC_main_inst,
+			       const WebRtc_Word16* speechIn,
+			       WebRtc_Word16*       encoded)
+{
+  ISACMainStruct* instISAC;
+  ISACLBStruct*   instLB;
+  ISACUBStruct*   instUB;
+
+  float        inFrame[FRAMESAMPLES_10ms];
+  WebRtc_Word16  speechInLB[FRAMESAMPLES_10ms];
+  WebRtc_Word16  speechInUB[FRAMESAMPLES_10ms];
+  WebRtc_Word16  streamLenLB = 0;
+  WebRtc_Word16  streamLenUB = 0;
+  WebRtc_Word16  streamLen = 0;
+  WebRtc_Word16  k = 0;
+  WebRtc_UWord8* ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
+  int          garbageLen = 0;
+  WebRtc_Word32  bottleneck = 0;
+  WebRtc_Word16  bottleneckIdx = 0;
+  WebRtc_Word16  jitterInfo = 0;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+  instLB = &(instISAC->instLB);
+  instUB = &(instISAC->instUB);
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  if(instISAC->encoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      WebRtcSpl_AnalysisQMF(speechIn, speechInLB, speechInUB,
+			     instISAC->analysisFBState1, instISAC->analysisFBState2);
+
+      /* convert from fixed to floating point */
+      for(k = 0; k < FRAMESAMPLES_10ms; k++)
+	{
+	  inFrame[k] = (float)speechInLB[k];
+	}
+    }
+  else
+    {
+      for(k = 0; k < FRAMESAMPLES_10ms; k++)
+	{
+	  inFrame[k] = (float) speechIn[k];
+	}
+    }
+
+  /* add some noise to avoid denormal numbers */
+  inFrame[0] += (float)1.23455334e-3;
+  inFrame[1] -= (float)2.04324239e-3;
+  inFrame[2] += (float)1.90854954e-3;
+  inFrame[9] += (float)1.84854878e-3;
+
+
+  // This function will update the bottleneck if required
+  UpdateBottleneck(instISAC);
+
+  // Get the bandwith information which has to be sent to the other side
+  GetSendBandwidthInfo(instISAC, &bottleneckIdx, &jitterInfo);
+
+  //
+  // ENCODE LOWER-BAND
+  //
+  streamLenLB = WebRtcIsac_EncodeLb(inFrame, &instLB->ISACencLB_obj,
+                                    instISAC->codingMode, bottleneckIdx);
+
+  if(streamLenLB < 0)
+    {
+      return -1;
+    }
+
+  if(instISAC->encoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      instUB = &(instISAC->instUB);
+
+      // convert to float
+      for(k = 0; k < FRAMESAMPLES_10ms; k++)
+	{
+	  inFrame[k] = (float) speechInUB[k];
+	}
+
+      /* add some noise to avoid denormal numbers */
+      inFrame[0] += (float)1.23455334e-3;
+      inFrame[1] -= (float)2.04324239e-3;
+      inFrame[2] += (float)1.90854954e-3;
+      inFrame[9] += (float)1.84854878e-3;
+
+      // Tell to upper-band the number of bytes used so far.
+      // This is for payload limitation.
+      instUB->ISACencUB_obj.numBytesUsed = streamLenLB + 1 +
+        LEN_CHECK_SUM_WORD8;
+
+      //
+      // ENCODE UPPER-BAND
+      //
+      switch(instISAC->bandwidthKHz)
+	{
+	case isac12kHz:
+	  {
+	    streamLenUB = WebRtcIsac_EncodeUb12(inFrame,
+						&instUB->ISACencUB_obj,
+						jitterInfo);
+	    break;
+	  }
+	case isac16kHz:
+	  {
+	    streamLenUB = WebRtcIsac_EncodeUb16(inFrame,
+						&instUB->ISACencUB_obj,
+						jitterInfo);
+	    break;
+	  }
+	case isac8kHz:
+	  {
+	    streamLenUB = 0;
+	    break;
+	  }
+	}
+
+      if((streamLenUB < 0) &&
+	 (streamLenUB != -ISAC_PAYLOAD_LARGER_THAN_LIMIT))
+	{
+	  // an error has happened but this is not the error due to a
+	  // bit-stream larger than the limit
+	  return -1;
+	}
+
+      if(streamLenLB == 0)
+	{
+	  return 0;
+	}
+
+      // One bite is allocated for the length. According to older decoders
+      // so the length bit-stream plus one byte for size and
+      // LEN_CHECK_SUM_WORD8 for the checksum should be less than or equal
+      // to 255.
+      if((streamLenUB > (255 - (LEN_CHECK_SUM_WORD8 + 1))) ||
+	 (streamLenUB == -ISAC_PAYLOAD_LARGER_THAN_LIMIT))
+	{
+	  // we have got a too long bit-stream we skip the upper-band
+	  // bit-stream for this frame.
+	  streamLenUB = 0;
+	}
+
+      memcpy(ptrEncodedUW8, instLB->ISACencLB_obj.bitstr_obj.stream,
+	     streamLenLB);
+      streamLen = streamLenLB;
+      if(streamLenUB > 0)
+	{
+	  ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)(streamLenUB + 1 +
+						       LEN_CHECK_SUM_WORD8);
+	  memcpy(&ptrEncodedUW8[streamLenLB + 1],
+		 instUB->ISACencUB_obj.bitstr_obj.stream, streamLenUB);
+	  streamLen += ptrEncodedUW8[streamLenLB];
+	}
+      else
+	{
+	  ptrEncodedUW8[streamLenLB] = 0;
+	}
+    }
+  else
+    {
+      if(streamLenLB == 0)
+	{
+	  return 0;
+	}
+      memcpy(ptrEncodedUW8, instLB->ISACencLB_obj.bitstr_obj.stream,
+	     streamLenLB);
+      streamLenUB = 0;
+      streamLen = streamLenLB;
+    }
+
+  // Add Garbage if required.
+  WebRtcIsac_GetUplinkBandwidth(&instISAC->bwestimator_obj, &bottleneck);
+  if(instISAC->codingMode == 0)
+    {
+      int          minBytes;
+      int          limit;
+      WebRtc_UWord8* ptrGarbage;
+
+      instISAC->MaxDelay = (double)WebRtcIsac_GetUplinkMaxDelay(
+								&instISAC->bwestimator_obj);
+
+      /* update rate model and get minimum number of bytes in this packet */
+      minBytes = WebRtcIsac_GetMinBytes(&(instISAC->rate_data_obj),
+					streamLen, instISAC->instLB.ISACencLB_obj.current_framesamples,
+					bottleneck, instISAC->MaxDelay, instISAC->bandwidthKHz);
+
+      /* Make sure MinBytes does not exceed packet size limit */
+      if(instISAC->bandwidthKHz == isac8kHz)
+	{
+	  if(instLB->ISACencLB_obj.current_framesamples == FRAMESAMPLES)
+	    {
+	      limit = instLB->ISACencLB_obj.payloadLimitBytes30;
+	    }
+	  else
+	    {
+	      limit = instLB->ISACencLB_obj.payloadLimitBytes60;
+	    }
+	}
+      else
+	{
+	  limit = instUB->ISACencUB_obj.maxPayloadSizeBytes;
+	}
+      minBytes = (minBytes > limit)? limit:minBytes;
+
+      /* Make sure we don't allow more than 255 bytes of garbage data.
+	 We store the length of the garbage data in 8 bits in the bitstream,
+	 255 is the max garbage length we can signal using 8 bits. */
+      if((instISAC->bandwidthKHz == isac8kHz) ||
+	 (streamLenUB == 0))
+	{
+	  ptrGarbage = &ptrEncodedUW8[streamLenLB];
+	  limit = streamLen + 255;
+	}
+      else
+	{
+	  ptrGarbage = &ptrEncodedUW8[streamLenLB + 1 + streamLenUB];
+	  limit = streamLen + (255 - ptrEncodedUW8[streamLenLB]);
+	}
+      minBytes = (minBytes > limit)? limit:minBytes;
+
+      garbageLen = (minBytes > streamLen)? (minBytes - streamLen):0;
+
+      /* Save data for creation of multiple bitstreams */
+      //ISACencLB_obj->SaveEnc_obj.minBytes = MinBytes;
+
+      /* if bitstream is too short, add garbage at the end */
+      if(garbageLen > 0)
+	{
+	  for(k = 0; k < garbageLen; k++)
+	    {
+	      ptrGarbage[k] = (WebRtc_UWord8)(rand() & 0xFF);
+	    }
+
+	  // for a correct length of the upper-band bit-stream together
+	  // with the garbage. Garbage is embeded in upper-band bit-stream.
+	  //    That is the only way to preserve backward compatibility.
+	  if((instISAC->bandwidthKHz == isac8kHz) ||
+	     (streamLenUB == 0))
+	    {
+	      ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)garbageLen;
+	    }
+	  else
+	    {
+	      ptrEncodedUW8[streamLenLB] += (WebRtc_UWord8)garbageLen;
+	      // write the length of the garbage at the end of the upper-band
+	      // bit-stream, if exists. This helps for sanity check.
+	      ptrEncodedUW8[streamLenLB + 1 + streamLenUB] = (WebRtc_UWord8)garbageLen;
+
+	    }
+
+	  streamLen += garbageLen;
+	}
+    }
+  else
+    {
+      /* update rate model */
+      WebRtcIsac_UpdateRateModel(&instISAC->rate_data_obj, streamLen,
+				 instISAC->instLB.ISACencLB_obj.current_framesamples, bottleneck);
+      garbageLen = 0;
+    }
+
+  // Generate CRC if required.
+  if((instISAC->bandwidthKHz != isac8kHz) &&
+     (streamLenUB > 0))
+    {
+      WebRtc_UWord32 crc;
+
+      WebRtcIsac_GetCrc((WebRtc_Word16*)(&(ptrEncodedUW8[streamLenLB + 1])),
+			streamLenUB + garbageLen, &crc);
+#ifndef WEBRTC_BIG_ENDIAN
+      for(k = 0; k < LEN_CHECK_SUM_WORD8; k++)
+	{
+	  ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] =
+	    (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+	}
+#else
+      memcpy(&ptrEncodedUW8[streamLenLB + streamLenUB + 1], &crc,
+	     LEN_CHECK_SUM_WORD8);
+#endif
+    }
+
+  return streamLen;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_GetNewBitStream(...)
+ *
+ * This function returns encoded data, with the recieved bwe-index in the
+ * stream. If the rate is set to a value less than bottleneck of codec
+ * the new bistream will be re-encoded with the given target rate.
+ * It should always return a complete packet, i.e. only called once
+ * even for 60 msec frames.
+ *
+ * NOTE 1! This function does not write in the ISACStruct, it is not allowed.
+ * NOTE 3! Rates larger than the bottleneck of the codec will be limited
+ *         to the current bottleneck.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - bweIndex          : Index of bandwidth estimate to put in new
+ *                              bitstream
+ *        - rate              : target rate of the transcoder is bits/sec.
+ *                              Valid values are the accepted rate in iSAC,
+ *                              i.e. 10000 to 56000.
+ *
+ * Output:
+ *        - encoded           : The encoded data vector
+ *
+ * Return value               : >0 - Length (in bytes) of coded data
+ *                              -1 - Error  or called in SWB mode
+ *                                 NOTE! No error code is written to
+ *                                 the struct since it is only allowed to read
+ *                                 the struct.
+ */
+WebRtc_Word16 WebRtcIsac_GetNewBitStream(
+					ISACStruct*  ISAC_main_inst,
+					WebRtc_Word16  bweIndex,
+					WebRtc_Word16  jitterInfo,
+					WebRtc_Word32  rate,
+					WebRtc_Word16* encoded,
+					WebRtc_Word16  isRCU)
+{
+  Bitstr iSACBitStreamInst;   /* Local struct for bitstream handling */
+  WebRtc_Word16 streamLenLB;
+  WebRtc_Word16 streamLenUB;
+  WebRtc_Word16 totalStreamLen;
+  double gain2;
+  double gain1;
+  float scale;
+  enum ISACBandwidth bandwidthKHz;
+  double rateLB;
+  double rateUB;
+  WebRtc_Word32 currentBN;
+  ISACMainStruct* instISAC;
+  WebRtc_UWord8* encodedPtrUW8 = (WebRtc_UWord8*)encoded;
+  WebRtc_UWord32 crc;
+#ifndef WEBRTC_BIG_ENDIAN
+  WebRtc_Word16  k;
+#endif
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      return -1;
+    }
+
+  // Get the bottleneck of this iSAC and limit the
+  // given rate to the current bottleneck.
+  WebRtcIsac_GetUplinkBw(ISAC_main_inst, &currentBN);
+  if(rate > currentBN)
+    {
+      rate = currentBN;
+    }
+
+  if(WebRtcIsac_RateAllocation(rate, &rateLB, &rateUB, &bandwidthKHz) < 0)
+    {
+      return -1;
+    }
+
+  // Cannot transcode from 16 kHz to 12 kHz
+  if((bandwidthKHz == isac12kHz) &&
+     (instISAC->bandwidthKHz == isac16kHz))
+    {
+      return -1;
+    }
+
+  // These gains are in dB
+  // gain for the given rate.
+  gain1 = WebRtcIsac_GetSnr(rateLB,
+			    instISAC->instLB.ISACencLB_obj.current_framesamples);
+  // gain of this iSAC
+  gain2 = WebRtcIsac_GetSnr(
+			    instISAC->instLB.ISACencLB_obj.bottleneck,
+			    instISAC->instLB.ISACencLB_obj.current_framesamples);
+
+  // scale is the ratio of two gains in normal domain.
+  scale = (float)pow(10, (gain1 - gain2) / 20.0);
+  // change the scale if this is a RCU bit-stream.
+  scale = (isRCU)? (scale * RCU_TRANSCODING_SCALE):scale;
+
+  streamLenLB = WebRtcIsac_EncodeStoredDataLb(
+					      &instISAC->instLB.ISACencLB_obj.SaveEnc_obj, &iSACBitStreamInst,
+					      bweIndex, scale);
+
+  if(streamLenLB < 0)
+    {
+      return -1;
+    }
+
+  /* convert from bytes to WebRtc_Word16 */
+  memcpy(encoded, iSACBitStreamInst.stream, streamLenLB);
+
+  if(bandwidthKHz == isac8kHz)
+    {
+      return streamLenLB;
+    }
+
+  totalStreamLen = streamLenLB;
+  // super-wideband is always at 30ms.
+  // These gains are in dB
+  // gain for the given rate.
+  gain1 = WebRtcIsac_GetSnr(rateUB, FRAMESAMPLES);
+  // gain of this iSAC
+  gain2 = WebRtcIsac_GetSnr(
+			    instISAC->instUB.ISACencUB_obj.bottleneck, FRAMESAMPLES);
+
+  // scale is the ratio of two gains in normal domain.
+  scale = (float)pow(10, (gain1 - gain2) / 20.0);
+
+  // change the scale if this is a RCU bit-stream.
+  scale = (isRCU)? (scale * RCU_TRANSCODING_SCALE_UB):scale;
+
+  switch(instISAC->bandwidthKHz)
+    {
+    case isac12kHz:
+      {
+        streamLenUB = WebRtcIsac_EncodeStoredDataUb12(
+						      &(instISAC->instUB.ISACencUB_obj.SaveEnc_obj),
+						      &iSACBitStreamInst, jitterInfo, scale);
+        break;
+      }
+    case isac16kHz:
+      {
+        streamLenUB = WebRtcIsac_EncodeStoredDataUb16(
+						      &(instISAC->instUB.ISACencUB_obj.SaveEnc_obj),
+						      &iSACBitStreamInst, jitterInfo, scale);
+        break;
+      }
+    default:
+      return -1;
+    }
+
+  if(streamLenUB < 0)
+    {
+      return -1;
+    }
+
+  if(streamLenUB + 1 + LEN_CHECK_SUM_WORD8 > 255)
+    {
+      return streamLenLB;
+    }
+
+  totalStreamLen = streamLenLB + streamLenUB + 1 + LEN_CHECK_SUM_WORD8;
+  encodedPtrUW8[streamLenLB] = streamLenUB + 1 + LEN_CHECK_SUM_WORD8;
+
+  memcpy(&encodedPtrUW8[streamLenLB+1], iSACBitStreamInst.stream,
+         streamLenUB);
+
+  WebRtcIsac_GetCrc((WebRtc_Word16*)(&(encodedPtrUW8[streamLenLB + 1])),
+                    streamLenUB, &crc);
+#ifndef WEBRTC_BIG_ENDIAN
+  for(k = 0; k < LEN_CHECK_SUM_WORD8; k++)
+    {
+      encodedPtrUW8[totalStreamLen - LEN_CHECK_SUM_WORD8 + k] =
+        (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+    }
+#else
+  memcpy(&encodedPtrUW8[streamLenLB + streamLenUB + 1], &crc,
+         LEN_CHECK_SUM_WORD8);
+#endif
+
+
+  return totalStreamLen;
+}
+
+
+/****************************************************************************
+ * DecoderInitLb(...) - internal function for initialization of
+ *                                Lower Band
+ * DecoderInitUb(...) - internal function for initialization of
+ *                                Upper Band
+ * WebRtcIsac_DecoderInit(...) - API function
+ *
+ * This function initializes a ISAC instance prior to the decoder calls.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *
+ * Return value
+ *                            :  0 - Ok
+ *                              -1 - Error
+ */
+static WebRtc_Word16 DecoderInitLb(
+				   ISACLBStruct* instISAC)
+{
+  int i;
+  /* Init stream vector to zero */
+  for (i=0; i<STREAM_SIZE_MAX_60; i++)
+    {
+      instISAC->ISACdecLB_obj.bitstr_obj.stream[i] = 0;
+    }
+
+  WebRtcIsac_InitMasking(&instISAC->ISACdecLB_obj.maskfiltstr_obj);
+  WebRtcIsac_InitPostFilterbank(
+				&instISAC->ISACdecLB_obj.postfiltbankstr_obj);
+  WebRtcIsac_InitPitchFilter(&instISAC->ISACdecLB_obj.pitchfiltstr_obj);
+
+  return (0);
+}
+
+static WebRtc_Word16 DecoderInitUb(
+				   ISACUBStruct* instISAC)
+{
+  int i;
+  /* Init stream vector to zero */
+  for (i = 0; i < STREAM_SIZE_MAX_60; i++)
+    {
+      instISAC->ISACdecUB_obj.bitstr_obj.stream[i] = 0;
+    }
+
+  WebRtcIsac_InitMasking(&instISAC->ISACdecUB_obj.maskfiltstr_obj);
+  WebRtcIsac_InitPostFilterbank(
+				&instISAC->ISACdecUB_obj.postfiltbankstr_obj);
+  return (0);
+}
+
+WebRtc_Word16 WebRtcIsac_DecoderInit(
+				    ISACStruct *ISAC_main_inst)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if(DecoderInitLb(&instISAC->instLB) < 0)
+    {
+      return -1;
+    }
+
+  if(instISAC->decoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      memset(instISAC->synthesisFBState1, 0,
+	     FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+      memset(instISAC->synthesisFBState2, 0,
+	     FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+
+      if(DecoderInitUb(&(instISAC->instUB)) < 0)
+	{
+	  return -1;
+	}
+    }
+
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj,
+					instISAC->encoderSamplingRateKHz,
+					instISAC->decoderSamplingRateKHz);
+    }
+
+  instISAC->initFlag |= BIT_MASK_DEC_INIT;
+
+  instISAC->resetFlag_8kHz = 0;
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_UpdateBwEstimate(...)
+ *
+ * This function updates the estimate of the bandwidth.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - encoded           : encoded ISAC frame(s).
+ *        - packet_size       : size of the packet.
+ *        - rtp_seq_number    : the RTP number of the packet.
+ *        - arr_ts            : the arrival time of the packet (from NetEq)
+ *                              in samples.
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_UpdateBwEstimate(
+				  ISACStruct*         ISAC_main_inst,
+				  const WebRtc_UWord16* encoded,
+				  WebRtc_Word32         packet_size,
+				  WebRtc_UWord16        rtp_seq_number,
+				  WebRtc_UWord32        send_ts,
+				  WebRtc_UWord32        arr_ts)
+{
+  ISACMainStruct *instISAC;
+  Bitstr streamdata;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* check if decoder initiated */
+  if((instISAC->initFlag & BIT_MASK_DEC_INIT) !=
+     BIT_MASK_DEC_INIT)
+    {
+      instISAC->errorCode = ISAC_DECODER_NOT_INITIATED;
+      return -1;
+    }
+
+  if(packet_size <= 0)
+    {
+      /* return error code if the packet length is null */
+      instISAC->errorCode = ISAC_EMPTY_PACKET;
+      return -1;
+    }
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for(k = 0; k < 10; k++)
+    {
+      streamdata.stream[k] = (WebRtc_UWord8) ((encoded[k>>1] >>
+					       ((k&1) << 3)) & 0xFF);
+    }
+#else
+  memcpy(streamdata.stream, encoded, 10);
+#endif
+
+  err = WebRtcIsac_EstimateBandwidth(&instISAC->bwestimator_obj, &streamdata,
+                                     packet_size, rtp_seq_number, send_ts, arr_ts,
+                                     instISAC->encoderSamplingRateKHz,
+                                     instISAC->decoderSamplingRateKHz);
+
+  if(err < 0)
+    {
+      /* return error code if something went wrong */
+      instISAC->errorCode = -err;
+      return -1;
+    }
+
+  return 0;
+}
+
+static WebRtc_Word16 Decode(
+			    ISACStruct*         ISAC_main_inst,
+			    const WebRtc_UWord16* encoded,
+			    WebRtc_Word16         lenEncodedBytes,
+			    WebRtc_Word16*        decoded,
+			    WebRtc_Word16*        speechType,
+			    WebRtc_Word16         isRCUPayload)
+{
+  /* number of samples (480 or 960), output from decoder
+     that were actually used in the encoder/decoder
+     (determined on the fly) */
+  ISACMainStruct* instISAC;
+  ISACUBDecStruct* decInstUB;
+  ISACLBDecStruct*    decInstLB;
+
+  WebRtc_Word16  numSamplesLB;
+  WebRtc_Word16  numSamplesUB;
+  WebRtc_Word16  speechIdx;
+  float        outFrame[MAX_FRAMESAMPLES];
+  WebRtc_Word16  outFrameLB[MAX_FRAMESAMPLES];
+  WebRtc_Word16  outFrameUB[MAX_FRAMESAMPLES];
+  WebRtc_Word16  numDecodedBytesLB;
+  WebRtc_Word16  numDecodedBytesUB;
+  WebRtc_Word16  lenEncodedLBBytes;
+  WebRtc_Word16  validChecksum = 1;
+  WebRtc_Word16  k;
+  WebRtc_UWord8* ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
+  WebRtc_UWord16 numLayer;
+  WebRtc_Word16  totSizeBytes;
+  WebRtc_Word16  err;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+  decInstUB = &(instISAC->instUB.ISACdecUB_obj);
+  decInstLB = &(instISAC->instLB.ISACdecLB_obj);
+
+  /* check if decoder initiated */
+  if((instISAC->initFlag & BIT_MASK_DEC_INIT) !=
+     BIT_MASK_DEC_INIT)
+    {
+      instISAC->errorCode = ISAC_DECODER_NOT_INITIATED;
+      return -1;
+    }
+
+  if(lenEncodedBytes <= 0)
+    {
+      /* return error code if the packet length is null */
+      instISAC->errorCode = ISAC_EMPTY_PACKET;
+      return -1;
+    }
+
+  // the size of the rncoded lower-band is bounded by
+  // STREAM_SIZE_MAX,
+  // If a payload with the size larger than STREAM_SIZE_MAX
+  // is received, it is not considered erroneous.
+  lenEncodedLBBytes = (lenEncodedBytes > STREAM_SIZE_MAX)
+    ?  STREAM_SIZE_MAX:lenEncodedBytes;
+
+  // Copy to lower-band bit-stream structure
+  memcpy(instISAC->instLB.ISACdecLB_obj.bitstr_obj.stream, ptrEncodedUW8,
+         lenEncodedLBBytes);
+
+  // Regardless of that the current codec is setup to work in
+  // wideband or super-wideband, the decoding of the lower-band
+  // has to be performed.
+  numDecodedBytesLB = WebRtcIsac_DecodeLb(outFrame, decInstLB,
+                                          &numSamplesLB, isRCUPayload);
+
+  // Check for error
+  if((numDecodedBytesLB < 0) ||
+     (numDecodedBytesLB > lenEncodedLBBytes) ||
+     (numSamplesLB > MAX_FRAMESAMPLES))
+    {
+      instISAC->errorCode = ISAC_LENGTH_MISMATCH;
+      return -1;
+    }
+
+  // Error Check, we accept multi-layer bit-stream
+  // This will limit number of iterations of the
+  // while loop. Even withouut this the number of iterations
+  // is limited.
+  numLayer = 1;
+  totSizeBytes = numDecodedBytesLB;
+  while(totSizeBytes != lenEncodedBytes)
+    {
+      if((totSizeBytes > lenEncodedBytes) ||
+	 (ptrEncodedUW8[totSizeBytes] == 0) ||
+	 (numLayer > MAX_NUM_LAYERS))
+	{
+	  instISAC->errorCode = ISAC_LENGTH_MISMATCH;
+	  return -1;
+	}
+      totSizeBytes += ptrEncodedUW8[totSizeBytes];
+      numLayer++;
+    }
+
+  if(instISAC->decoderSamplingRateKHz == kIsacWideband)
+    {
+      for(k = 0; k < numSamplesLB; k++)
+	{
+	  if(outFrame[k] > 32767)
+	    {
+	      decoded[k] = 32767;
+	    }
+	  else if(outFrame[k] < -32768)
+	    {
+	      decoded[k] = -32768;
+	    }
+	  else
+	    {
+              decoded[k] = (WebRtc_Word16)WebRtcIsac_lrint(outFrame[k]);
+	    }
+	}
+      numSamplesUB = 0;
+    }
+  else
+    {
+      WebRtc_UWord32 crc;
+      // We don't accept larger than 30ms (480 samples at lower-band)
+      // frame-size.
+      for(k = 0; k < numSamplesLB; k++)
+	{
+	  if(outFrame[k] > 32767)
+	    {
+	      outFrameLB[k] = 32767;
+	    }
+	  else if(outFrame[k] < -32768)
+	    {
+	      outFrameLB[k] = -32768;
+	    }
+	  else
+	    {
+              outFrameLB[k] = (WebRtc_Word16)WebRtcIsac_lrint(outFrame[k]);
+	    }
+	}
+
+      //numSamplesUB = numSamplesLB;
+
+      // Check for possible error, and if upper-band stream exist.
+      if(numDecodedBytesLB == lenEncodedBytes)
+	{
+	  // Decoding was successful. No super-wideband bitstream
+	  // exists.
+	  numSamplesUB = numSamplesLB;
+	  memset(outFrameUB, 0, sizeof(WebRtc_Word16) *  numSamplesUB);
+
+	  // Prepare for the potential increase of signal bandwidth
+	  instISAC->resetFlag_8kHz = 2;
+	}
+      else
+	{
+	  // this includes the check sum and the bytes that stores the
+	  // length
+	  WebRtc_Word16 lenNextStream = ptrEncodedUW8[numDecodedBytesLB];
+
+	  // Is this garbage or valid super-wideband bit-stream?
+	  // Check if checksum is valid
+	  if(lenNextStream <= (LEN_CHECK_SUM_WORD8 + 1))
+	    {
+	      // such a small second layer cannot be super-wideband layer.
+	      // It must be a short garbage.
+	      validChecksum = 0;
+	    }
+	  else
+	    {
+	      // Run CRC to see if the checksum match.
+	      WebRtcIsac_GetCrc((WebRtc_Word16*)(
+						 &ptrEncodedUW8[numDecodedBytesLB + 1]),
+				lenNextStream - LEN_CHECK_SUM_WORD8 - 1, &crc);
+
+	      validChecksum = 1;
+	      for(k = 0; k < LEN_CHECK_SUM_WORD8; k++)
+		{
+		  validChecksum &= (((crc >> (24 - k * 8)) & 0xFF) ==
+				    ptrEncodedUW8[numDecodedBytesLB + lenNextStream -
+						  LEN_CHECK_SUM_WORD8 + k]);
+		}
+	    }
+
+	  if(!validChecksum)
+	    {
+	      // this is a garbage, we have received a wideband
+	      // bit-stream with garbage
+	      numSamplesUB = numSamplesLB;
+	      memset(outFrameUB, 0, sizeof(WebRtc_Word16) * numSamplesUB);
+	    }
+	  else
+	    {
+	      // A valid super-wideband biststream exists.
+	      enum ISACBandwidth bandwidthKHz;
+	      WebRtc_Word32 maxDelayBit;
+
+	      //instISAC->bwestimator_obj.incomingStreamSampFreq =
+	      //    kIsacSuperWideband;
+	      // If we have super-wideband bit-stream, we cannot
+	      // have 60 ms frame-size.
+	      if(numSamplesLB > FRAMESAMPLES)
+		{
+		  instISAC->errorCode = ISAC_LENGTH_MISMATCH;
+		  return -1;
+		}
+
+	      // the rest of the bit-stream contains the upper-band
+	      // bit-stream curently this is the only thing there,
+	      // however, we might add more layers.
+
+	      // Have to exclude one byte where the length is stored
+	      // and last 'LEN_CHECK_SUM_WORD8' bytes where the
+	      // checksum is stored.
+	      lenNextStream -= (LEN_CHECK_SUM_WORD8 + 1);
+
+	      memcpy(decInstUB->bitstr_obj.stream,
+		     &ptrEncodedUW8[numDecodedBytesLB + 1], lenNextStream);
+
+	      // THIS IS THE FIRST DECODING
+	      decInstUB->bitstr_obj.W_upper      = 0xFFFFFFFF;
+	      decInstUB->bitstr_obj.streamval    = 0;
+	      decInstUB->bitstr_obj.stream_index = 0;
+
+	      // Decode jitter infotmation
+	      err = WebRtcIsac_DecodeJitterInfo(&decInstUB->bitstr_obj,
+						&maxDelayBit);
+	      // error check
+	      if(err < 0)
+		{
+		  instISAC->errorCode = -err;
+		  return -1;
+		}
+
+	      // Update jitter info which is in the upper-band bit-stream
+	      // only if the encoder is in super-wideband. Otherwise,
+	      // the jitter info is already embeded in bandwidth index
+	      // and has been updated.
+	      if(instISAC->encoderSamplingRateKHz == kIsacSuperWideband)
+		{
+		  err = WebRtcIsac_UpdateUplinkJitter(
+						      &(instISAC->bwestimator_obj), maxDelayBit);
+		  if(err < 0)
+		    {
+		      instISAC->errorCode = -err;
+		      return -1;
+		    }
+		}
+
+	      // decode bandwidth information
+	      err = WebRtcIsac_DecodeBandwidth(&decInstUB->bitstr_obj,
+					       &bandwidthKHz);
+	      if(err < 0)
+		{
+		  instISAC->errorCode = -err;
+		  return -1;
+		}
+
+	      switch(bandwidthKHz)
+		{
+		case isac12kHz:
+		  {
+		    numDecodedBytesUB = WebRtcIsac_DecodeUb12(outFrame,
+							      decInstUB, isRCUPayload);
+
+		    // Hang-over for transient alleviation -
+		    // wait two frames to add the upper band going up from 8 kHz
+		    if (instISAC->resetFlag_8kHz > 0)
+		      {
+			if (instISAC->resetFlag_8kHz == 2)
+			  {
+			    // Silence first and a half frame
+			    memset(outFrame, 0, MAX_FRAMESAMPLES *
+				   sizeof(float));
+			  }
+			else
+			  {
+			    const float rampStep = 2.0f / MAX_FRAMESAMPLES;
+			    float rampVal = 0;
+			    memset(outFrame, 0, (MAX_FRAMESAMPLES>>1) *
+				   sizeof(float));
+
+			    // Ramp up second half of second frame
+			    for(k = MAX_FRAMESAMPLES/2; k < MAX_FRAMESAMPLES; k++)
+			      {
+				outFrame[k] *= rampVal;
+				rampVal += rampStep;
+			      }
+			  }
+			instISAC->resetFlag_8kHz -= 1;
+		      }
+
+		    break;
+		  }
+		case isac16kHz:
+		  {
+		    numDecodedBytesUB = WebRtcIsac_DecodeUb16(outFrame,
+							      decInstUB, isRCUPayload);
+		    break;
+		  }
+		default:
+		  return -1;
+		}
+
+	      // it might be less due to garbage.
+	      if((numDecodedBytesUB != lenNextStream) &&
+		 (numDecodedBytesUB != (lenNextStream - ptrEncodedUW8[
+								      numDecodedBytesLB + 1 + numDecodedBytesUB])))
+		{
+		  instISAC->errorCode = ISAC_LENGTH_MISMATCH;
+		  return -1;
+		}
+
+	      // If there is no error Upper-band always decodes
+	      // 30 ms (480 samples)
+	      numSamplesUB = FRAMESAMPLES;
+
+	      // Convert to W16
+	      for(k = 0; k < numSamplesUB; k++)
+		{
+		  if(outFrame[k] > 32767)
+		    {
+		      outFrameUB[k] = 32767;
+		    }
+		  else if(outFrame[k] < -32768)
+		    {
+		      outFrameUB[k] = -32768;
+		    }
+		  else
+		    {
+                      outFrameUB[k] = (WebRtc_Word16)WebRtcIsac_lrint(
+                          outFrame[k]);
+		    }
+		}
+	    }
+	}
+
+      speechIdx = 0;
+      while(speechIdx < numSamplesLB)
+	{
+	  WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx],
+				  &outFrameUB[speechIdx], &decoded[(speechIdx<<1)],
+				  instISAC->synthesisFBState1, instISAC->synthesisFBState2);
+
+	  speechIdx += FRAMESAMPLES_10ms;
+	}
+    }
+  *speechType = 0;
+  return (numSamplesLB + numSamplesUB);
+}
+
+
+
+
+
+
+
+/****************************************************************************
+ * WebRtcIsac_Decode(...)
+ *
+ * This function decodes a ISAC frame. Output speech length
+ * will be a multiple of 480 samples: 480 or 960 samples,
+ * depending on the  frameSize (30 or 60 ms).
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - encoded           : encoded ISAC frame(s)
+ *        - len               : bytes in encoded vector
+ *
+ * Output:
+ *        - decoded           : The decoded vector
+ *
+ * Return value               : >0 - number of samples in decoded vector
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcIsac_Decode(
+			       ISACStruct*         ISAC_main_inst,
+			       const WebRtc_UWord16* encoded,
+			       WebRtc_Word16         lenEncodedBytes,
+			       WebRtc_Word16*        decoded,
+			       WebRtc_Word16*        speechType)
+{
+  WebRtc_Word16 isRCUPayload = 0;
+  return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
+		speechType, isRCUPayload);
+}
+
+/****************************************************************************
+ * WebRtcIsac_DecodeRcu(...)
+ *
+ * This function decodes a redundant (RCU) iSAC frame. Function is called in
+ * NetEq with a stored RCU payload i case of packet loss. Output speech length
+ * will be a multiple of 480 samples: 480 or 960 samples,
+ * depending on the framesize (30 or 60 ms).
+ *
+ * Input:
+ *      - ISAC_main_inst     : ISAC instance.
+ *      - encoded            : encoded ISAC RCU frame(s)
+ *      - len                : bytes in encoded vector
+ *
+ * Output:
+ *      - decoded            : The decoded vector
+ *
+ * Return value              : >0 - number of samples in decoded vector
+ *                             -1 - Error
+ */
+
+
+
+WebRtc_Word16 WebRtcIsac_DecodeRcu(
+				  ISACStruct*         ISAC_main_inst,
+				  const WebRtc_UWord16* encoded,
+				  WebRtc_Word16         lenEncodedBytes,
+				  WebRtc_Word16*        decoded,
+				  WebRtc_Word16*        speechType)
+{
+  WebRtc_Word16 isRCUPayload = 1;
+  return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
+		speechType, isRCUPayload);
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_DecodePlc(...)
+ *
+ * This function conducts PLC for ISAC frame(s). Output speech length
+ * will be a multiple of 480 samples: 480 or 960 samples,
+ * depending on the  frameSize (30 or 60 ms).
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - noOfLostFrames    : Number of PLC frames to produce
+ *
+ * Output:
+ *        - decoded           : The decoded vector
+ *
+ * Return value               : >0 - number of samples in decoded PLC vector
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_DecodePlc(
+				  ISACStruct*         ISAC_main_inst,
+				  WebRtc_Word16*        decoded,
+				  WebRtc_Word16         noOfLostFrames)
+{
+  WebRtc_Word16 numSamples = 0;
+  ISACMainStruct* instISAC;
+
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  /* Limit number of frames to two = 60 msec. Otherwise we exceed data vectors */
+  if(noOfLostFrames > 2)
+    {
+      noOfLostFrames = 2;
+    }
+
+  /* Get the number of samples per frame */
+  switch(instISAC->decoderSamplingRateKHz)
+    {
+    case kIsacWideband:
+      {
+        numSamples = 480 * noOfLostFrames;
+        break;
+      }
+    case kIsacSuperWideband:
+      {
+        numSamples = 960 * noOfLostFrames;
+        break;
+      }
+    }
+
+  /* Set output samples to zero */
+  memset(decoded, 0, numSamples * sizeof(WebRtc_Word16));
+  return numSamples;
+}
+
+
+/****************************************************************************
+ * ControlLb(...) - Internal function for controling Lower Band
+ * ControlUb(...) - Internal function for controling Upper Band
+ * WebRtcIsac_Control(...) - API function
+ *
+ * This function sets the limit on the short-term average bit rate and the
+ * frame length. Should be used only in Instantaneous mode.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - rate              : limit on the short-term average bit rate,
+ *                              in bits/second (between 10000 and 32000)
+ *        -  frameSize         : number of milliseconds per frame (30 or 60)
+ *
+ * Return value               : 0  - ok
+ *                             -1 - Error
+ */
+static WebRtc_Word16 ControlLb(
+					  ISACLBStruct* instISAC,
+					  double        rate,
+					  WebRtc_Word16   frameSize)
+{
+  if((rate >= 10000) && (rate <= 32000))
+    {
+      instISAC->ISACencLB_obj.bottleneck = rate;
+    }
+  else
+    {
+      return -ISAC_DISALLOWED_BOTTLENECK;
+    }
+
+  if((frameSize == 30) ||  (frameSize == 60))
+    {
+      instISAC->ISACencLB_obj.new_framelength = (FS/1000) *  frameSize;
+    }
+  else
+    {
+      return -ISAC_DISALLOWED_FRAME_LENGTH;
+    }
+
+  return 0;
+}
+
+static WebRtc_Word16 ControlUb(
+					  ISACUBStruct* instISAC,
+					  double        rate)
+{
+  if((rate >= 10000) && (rate <= 32000))
+    {
+      instISAC->ISACencUB_obj.bottleneck = rate;
+    }
+  else
+    {
+      return -ISAC_DISALLOWED_BOTTLENECK;
+    }
+  return 0;
+}
+
+WebRtc_Word16 WebRtcIsac_Control(
+				ISACStruct* ISAC_main_inst,
+				WebRtc_Word32 bottleneckBPS,
+				WebRtc_Word16 frameSize)
+{
+  ISACMainStruct *instISAC;
+  WebRtc_Word16 status;
+  double rateLB;
+  double rateUB;
+  enum ISACBandwidth bandwidthKHz;
+
+
+  /* Typecast pointer to real structure */
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if(instISAC->codingMode == 0)
+    {
+      /* in adaptive mode */
+      instISAC->errorCode = ISAC_MODE_MISMATCH;
+      return -1;
+    }
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  if(instISAC->encoderSamplingRateKHz == kIsacWideband)
+    {
+      // if the sampling rate is 16kHz then bandwith should be 8kHz,
+      // regardless of bottleneck.
+      bandwidthKHz = isac8kHz;
+      rateLB = (bottleneckBPS > 32000)? 32000:bottleneckBPS;
+      rateUB = 0;
+    }
+  else
+    {
+      if(WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB,
+				   &bandwidthKHz) < 0)
+	{
+	  return -1;
+	}
+    }
+
+  if((instISAC->encoderSamplingRateKHz == kIsacSuperWideband) &&
+     (frameSize != 30)                                      &&
+     (bandwidthKHz != isac8kHz))
+    {
+      // Cannot have 60 ms in super-wideband
+      instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH;
+      return -1;
+    }
+
+  status = ControlLb(&instISAC->instLB, rateLB, frameSize);
+  if(status < 0)
+    {
+      instISAC->errorCode = -status;
+      return -1;
+    }
+  if(bandwidthKHz != isac8kHz)
+    {
+      status = ControlUb(&(instISAC->instUB), rateUB);
+      if(status < 0)
+	{
+	  instISAC->errorCode = -status;
+	  return -1;
+	}
+    }
+
+  //
+  // Check if bandwidth is changing from wideband to super-wideband
+  // then we have to synch data buffer of lower & upper-band. also
+  // clean up the upper-band data buffer.
+  //
+  if((instISAC->bandwidthKHz == isac8kHz) &&
+     (bandwidthKHz != isac8kHz))
+    {
+      memset(instISAC->instUB.ISACencUB_obj.data_buffer_float, 0,
+	     sizeof(float) * (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES));
+
+      if(bandwidthKHz == isac12kHz)
+	{
+	  instISAC->instUB.ISACencUB_obj.buffer_index =
+	    instISAC->instLB.ISACencLB_obj.buffer_index;
+	}
+      else
+	{
+	  instISAC->instUB.ISACencUB_obj.buffer_index = LB_TOTAL_DELAY_SAMPLES +
+	    instISAC->instLB.ISACencLB_obj.buffer_index;
+
+	  memcpy(&(instISAC->instUB.ISACencUB_obj.lastLPCVec),
+            WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER);
+	}
+    }
+
+  // update the payload limit it the bandwidth is changing.
+  if(instISAC->bandwidthKHz != bandwidthKHz)
+    {
+      instISAC->bandwidthKHz = bandwidthKHz;
+      UpdatePayloadSizeLimit(instISAC);
+    }
+  instISAC->bottleneck = bottleneckBPS;
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_ControlBwe(...)
+ *
+ * This function sets the initial values of bottleneck and frame-size if
+ * iSAC is used in channel-adaptive mode. Through this API, users can
+ * enforce a frame-size for all values of bottleneck. Then iSAC will not
+ * automatically change the frame-size.
+ *
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance.
+ *        - rateBPS           : initial value of bottleneck in bits/second
+ *                              10000 <= rateBPS <= 32000 is accepted
+ *                              For default bottleneck set rateBPS = 0
+ *        - frameSizeMs       : number of milliseconds per frame (30 or 60)
+ *        - enforceFrameSize  : 1 to enforce the given frame-size through out
+ *                              the adaptation process, 0 to let iSAC change
+ *                              the frame-size if required.
+ *
+ * Return value               : 0  - ok
+ *                             -1 - Error
+ */
+WebRtc_Word16 WebRtcIsac_ControlBwe(
+				   ISACStruct* ISAC_main_inst,
+				   WebRtc_Word32 bottleneckBPS,
+				   WebRtc_Word16 frameSizeMs,
+				   WebRtc_Word16 enforceFrameSize)
+{
+  ISACMainStruct *instISAC;
+  enum ISACBandwidth bandwidth;
+
+  /* Typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  /* Check that we are in channel-adaptive mode, otherwise, return (-1) */
+  if(instISAC->codingMode != 0)
+    {
+      instISAC->errorCode = ISAC_MODE_MISMATCH;
+      return -1;
+    }
+  if((frameSizeMs != 30) &&
+     (instISAC->encoderSamplingRateKHz == kIsacSuperWideband))
+    {
+      return -1;
+    }
+
+  /* Set struct variable if enforceFrameSize is set. ISAC will then */
+  /* keep the chosen frame size. */
+  if((enforceFrameSize != 0) /*||
+                               (instISAC->samplingRateKHz == kIsacSuperWideband)*/)
+    {
+      instISAC->instLB.ISACencLB_obj.enforceFrameSize = 1;
+    }
+  else
+    {
+      instISAC->instLB.ISACencLB_obj.enforceFrameSize = 0;
+    }
+
+  /* Set initial rate, if value between 10000 and 32000,                */
+  /* if rateBPS is 0, keep the default initial bottleneck value (15000) */
+  if(bottleneckBPS != 0)
+    {
+      double rateLB;
+      double rateUB;
+      if(WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB, &bandwidth) < 0)
+	{
+	  return -1;
+	}
+      instISAC->bwestimator_obj.send_bw_avg = (float)bottleneckBPS;
+      instISAC->bandwidthKHz = bandwidth;
+    }
+
+  /* Set initial  frameSize. If enforceFrameSize is set the frame size will
+     not change */
+  if(frameSizeMs != 0)
+    {
+      if((frameSizeMs  == 30) || (frameSizeMs == 60))
+	{
+	  instISAC->instLB.ISACencLB_obj.new_framelength = (FS/1000) *
+	    frameSizeMs;
+	  //instISAC->bwestimator_obj.rec_header_rate = ((float)HEADER_SIZE *
+	  //    8.0f * 1000.0f / (float)frameSizeMs);
+	}
+      else
+	{
+	  instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH;
+	  return -1;
+	}
+    }
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_GetDownLinkBwIndex(...)
+ *
+ * This function returns index representing the Bandwidth estimate from
+ * other side to this side.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC struct
+ *
+ * Output:
+ *        - bweIndex         : Bandwidth estimate to transmit to other side.
+ *
+ */
+WebRtc_Word16 WebRtcIsac_GetDownLinkBwIndex(
+				   ISACStruct*  ISAC_main_inst,
+				   WebRtc_Word16* bweIndex,
+				   WebRtc_Word16* jitterInfo)
+{
+  ISACMainStruct *instISAC;
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_DEC_INIT) !=
+     BIT_MASK_DEC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  /* Call function to get Bandwidth Estimate */
+  WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj),
+                                   bweIndex, jitterInfo, instISAC->decoderSamplingRateKHz);
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_UpdateUplinkBw(...)
+ *
+ * This function takes an index representing the Bandwidth estimate from
+ * this side to other side and updates BWE.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC struct
+ *        - rateIndex         : Bandwidth estimate from other side.
+ *
+ * Return value               : 0 - ok
+ *                             -1 - index out of range
+ */
+WebRtc_Word16 WebRtcIsac_UpdateUplinkBw(
+			       ISACStruct*   ISAC_main_inst,
+			       WebRtc_Word16   bweIndex)
+{
+  ISACMainStruct *instISAC;
+  WebRtc_Word16 returnVal;
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  /* Call function to get Bandwidth Estimate */
+  returnVal = WebRtcIsac_UpdateUplinkBwImpl(
+					&(instISAC->bwestimator_obj), bweIndex,
+					instISAC->encoderSamplingRateKHz);
+
+  if(returnVal < 0)
+    {
+      instISAC->errorCode = -returnVal;
+      return -1;
+    }
+  else
+    {
+      return 0;
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_ReadBwIndex(...)
+ *
+ * This function returns the index of the Bandwidth estimate from the
+ * bitstream.
+ *
+ * Input:
+ *        - encoded           : Encoded bitstream
+ *
+ * Output:
+ *        - frameLength       : Length of frame in packet (in samples)
+ *        - bweIndex         : Bandwidth estimate in bitstream
+ *
+ */
+WebRtc_Word16 WebRtcIsac_ReadBwIndex(
+			       const WebRtc_Word16* encoded,
+			       WebRtc_Word16*       bweIndex)
+{
+  Bitstr streamdata;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for(k = 0; k < 10; k++)
+    {
+      streamdata.stream[k] = (WebRtc_UWord8) ((encoded[k>>1] >>
+					       ((k&1) << 3)) & 0xFF);
+    }
+#else
+  memcpy(streamdata.stream, encoded, 10);
+#endif
+
+  /* decode frame length */
+  err = WebRtcIsac_DecodeFrameLen(&streamdata, bweIndex);
+  if(err < 0)
+    {
+      return err;
+    }
+
+  /* decode BW estimation */
+  err = WebRtcIsac_DecodeSendBW(&streamdata, bweIndex);
+  if(err < 0)
+    {
+      return err;
+    }
+
+  return 0;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_ReadFrameLen(...)
+ *
+ * This function returns the length of the frame represented in the packet.
+ *
+ * Input:
+ *        - encoded           : Encoded bitstream
+ *
+ * Output:
+ *        - frameLength       : Length of frame in packet (in samples)
+ *
+ */
+WebRtc_Word16 WebRtcIsac_ReadFrameLen(
+				    ISACStruct*        ISAC_main_inst,
+				    const WebRtc_Word16* encoded,
+				    WebRtc_Word16*       frameLength)
+{
+  Bitstr streamdata;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+  WebRtc_Word16 err;
+  ISACMainStruct* instISAC;
+
+  streamdata.W_upper = 0xFFFFFFFF;
+  streamdata.streamval = 0;
+  streamdata.stream_index = 0;
+
+#ifndef WEBRTC_BIG_ENDIAN
+  for (k=0; k<10; k++) {
+    streamdata.stream[k] = (WebRtc_UWord8) ((encoded[k>>1] >>
+                                             ((k&1) << 3)) & 0xFF);
+  }
+#else
+  memcpy(streamdata.stream, encoded, 10);
+#endif
+
+  /* decode frame length */
+  err = WebRtcIsac_DecodeFrameLen(&streamdata, frameLength);
+  if(err < 0) {
+    return -1;
+  }
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if(instISAC->decoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      // the decoded frame length indicates the number of samples in
+      // lower-band in this case, multiply by 2 to get the total number
+      // of samples.
+      *frameLength <<= 1;
+    }
+
+  return 0;
+}
+
+
+/*******************************************************************************
+ * WebRtcIsac_GetNewFrameLen(...)
+ *
+ * returns the frame lenght (in samples) of the next packet. In the case of
+ * channel-adaptive mode, iSAC decides on its frame lenght based on the
+ * estimated bottleneck this allows a user to prepare for the next packet
+ * (at the encoder).
+ *
+ * The primary usage is in CE to make the iSAC works in channel-adaptive mode
+ *
+ * Input:
+ *        - ISAC_main_inst     : iSAC struct
+ *
+ * Return Value                : frame lenght in samples
+ *
+ */
+WebRtc_Word16 WebRtcIsac_GetNewFrameLen(
+				       ISACStruct *ISAC_main_inst)
+{
+  ISACMainStruct *instISAC;
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* Return new frame length */
+  if(instISAC->encoderSamplingRateKHz == kIsacWideband)
+    {
+      return (instISAC->instLB.ISACencLB_obj.new_framelength);
+    }
+  else
+    {
+      return ((instISAC->instLB.ISACencLB_obj.new_framelength) << 1);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_GetErrorCode(...)
+ *
+ * This function can be used to check the error code of an iSAC instance.
+ * When a function returns -1 a error code will be set for that instance.
+ * The function below extract the code of the last error that occured in
+ * the specified instance.
+ *
+ * Input:
+ *        - ISAC_main_inst    : ISAC instance
+ *
+ * Return value               : Error code
+ */
+WebRtc_Word16 WebRtcIsac_GetErrorCode(
+				     ISACStruct *ISAC_main_inst)
+{
+  ISACMainStruct *instISAC;
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  return (instISAC->errorCode);
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_GetUplinkBw(...)
+ *
+ * This function outputs the target bottleneck of the codec. In
+ * channel-adaptive mode, the target bottleneck is specified through in-band
+ * signalling retreived by bandwidth estimator.
+ * In channel-independent, also called instantaneous mode, the target
+ * bottleneck is provided to the encoder by calling xxx_control(...) (if
+ * xxx_control is never called the default values is).
+ * Note that the output is the iSAC internal operating bottleneck whch might
+ * differ slightly from the one provided through xxx_control().
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *
+ * Output:
+ *        - *bottleneck       : bottleneck in bits/sec
+ *
+ * Return value               : -1 if error happens
+ *                               0 bit-rates computed correctly.
+ */
+WebRtc_Word16 WebRtcIsac_GetUplinkBw(
+				       ISACStruct*  ISAC_main_inst,
+				       WebRtc_Word32* bottleneck)
+{
+  ISACMainStruct* instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  if(instISAC->codingMode == 0)
+    {
+      // we are in adaptive mode then get the bottleneck from BWE
+      *bottleneck = (WebRtc_Word32)instISAC->bwestimator_obj.send_bw_avg;
+    }
+  else
+    {
+      *bottleneck = instISAC->bottleneck;
+    }
+
+  if((*bottleneck > 32000) && (*bottleneck < 38000))
+    {
+      *bottleneck = 32000;
+    }
+  else if((*bottleneck > 45000) && (*bottleneck < 50000))
+    {
+      *bottleneck = 45000;
+    }
+  else if(*bottleneck > 56000)
+    {
+      *bottleneck = 56000;
+    }
+
+  return 0;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_SetMaxPayloadSize(...)
+ *
+ * This function sets a limit for the maximum payload size of iSAC. The same
+ * value is used both for 30 and 60 ms packets. If the encoder sampling rate
+ * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the
+ * encoder sampling rate is 32 kHz the maximum payload size is between 120
+ * and 600 bytes.
+ *
+ * ---------------
+ * IMPORTANT NOTES
+ * ---------------
+ * The size of a packet is limited to the minimum of 'max-payload-size' and
+ * 'max-rate.' For instance, let's assume the max-payload-size is set to
+ * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps
+ * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms
+ * frame-size. Then a packet with a frame-size of 30 ms is limited to 150,
+ * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to
+ * 170 bytes, i.e. min(170, 300).
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *        - maxPayloadBytes   : maximum size of the payload in bytes
+ *                              valid values are between 100 and 400 bytes
+ *                              if encoder sampling rate is 16 kHz. For
+ *                              32 kHz encoder sampling rate valid values
+ *                              are between 100 and 600 bytes.
+ *
+ * Return value               : 0 if successful
+ *                             -1 if error happens
+ */
+WebRtc_Word16 WebRtcIsac_SetMaxPayloadSize(
+					  ISACStruct* ISAC_main_inst,
+					  WebRtc_Word16 maxPayloadBytes)
+{
+  ISACMainStruct *instISAC;
+  WebRtc_Word16 status = 0;
+
+  /* typecast pointer to real structure  */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+
+  if(instISAC->encoderSamplingRateKHz == kIsacSuperWideband)
+    {
+      // sanity check
+      if(maxPayloadBytes < 120)
+	{
+	  // maxRate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxPayloadBytes = 120;
+	  status = -1;
+	}
+
+      /* sanity check */
+      if(maxPayloadBytes > STREAM_SIZE_MAX)
+	{
+	  // maxRate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxPayloadBytes = STREAM_SIZE_MAX;
+	  status = -1;
+	}
+    }
+  else
+    {
+      if(maxPayloadBytes < 120)
+	{
+	  // max payload-size is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxPayloadBytes = 120;
+	  status = -1;
+	}
+      if(maxPayloadBytes > STREAM_SIZE_MAX_60)
+	{
+	  // max payload-size is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxPayloadBytes = STREAM_SIZE_MAX_60;
+	  status = -1;
+	}
+    }
+  instISAC->maxPayloadSizeBytes = maxPayloadBytes;
+  UpdatePayloadSizeLimit(instISAC);
+  return status;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_SetMaxRate(...)
+ *
+ * This function sets the maximum rate which the codec may not exceed for
+ * any signal packet. The maximum rate is defined and payload-size per
+ * frame-size in bits per second.
+ *
+ * The codec has a maximum rate of 53400 bits per second (200 bytes per 30
+ * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms)
+ * if the encoder sampling rate is 32 kHz.
+ *
+ * It is possible to set a maximum rate between 32000 and 53400 bits/sec
+ * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode.
+ *
+ * ---------------
+ * IMPORTANT NOTES
+ * ---------------
+ * The size of a packet is limited to the minimum of 'max-payload-size' and
+ * 'max-rate.' For instance, let's assume the max-payload-size is set to
+ * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps
+ * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms
+ * frame-size. Then a packet with a frame-size of 30 ms is limited to 150,
+ * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to
+ * 170 bytes, min(170, 300).
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *        - maxRate           : maximum rate in bits per second,
+ *                              valid values are 32000 to 53400 bits/sec in
+ *                              wideband mode, and 32000 to 160000 bits/sec in
+ *                              super-wideband mode.
+ *
+ * Return value               : 0 if successful
+ *                             -1 if error happens
+ */
+WebRtc_Word16 WebRtcIsac_SetMaxRate(
+				   ISACStruct* ISAC_main_inst,
+				   WebRtc_Word32 maxRate)
+{
+  ISACMainStruct *instISAC;
+  WebRtc_Word16 maxRateInBytesPer30Ms;
+  WebRtc_Word16 status = 0;
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct *)ISAC_main_inst;
+
+  /* check if encoder initiated */
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+      return -1;
+    }
+  /*
+    Calculate maximum number of bytes per 30 msec packets for the
+    given maximum rate. Multiply with 30/1000 to get number of
+    bits per 30 ms, divide by 8 to get number of bytes per 30 ms:
+    maxRateInBytes = floor((maxRate * 30/1000) / 8);
+  */
+  maxRateInBytesPer30Ms = (WebRtc_Word16)(maxRate*3/800);
+
+  if(instISAC->encoderSamplingRateKHz == kIsacWideband)
+    {
+      if(maxRate < 32000)
+	{
+	  // max rate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxRateInBytesPer30Ms = 120;
+	  status = -1;
+	}
+
+      if(maxRate > 53400)
+	{
+	  // max rate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxRateInBytesPer30Ms = 200;
+	  status = -1;
+	}
+    }
+  else
+    {
+      if(maxRateInBytesPer30Ms < 120)
+	{
+	  // maxRate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxRateInBytesPer30Ms = 120;
+	  status = -1;
+	}
+
+      if(maxRateInBytesPer30Ms > STREAM_SIZE_MAX)
+	{
+	  // maxRate is out of valid range
+	  // set to the acceptable value and return -1.
+	  maxRateInBytesPer30Ms = STREAM_SIZE_MAX;
+	  status = -1;
+	}
+    }
+  instISAC->maxRateBytesPer30Ms = maxRateInBytesPer30Ms;
+  UpdatePayloadSizeLimit(instISAC);
+  return status;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_GetRedPayload(...)
+ *
+ * Populates "encoded" with the redundant payload of the recently encoded
+ * frame. This function has to be called once that WebRtcIsac_Encode(...)
+ * returns a positive value. Regardless of the frame-size this function will
+ * be called only once after encoding is completed. The bit-stream is
+ * targeted for 16000 bit/sec.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC struct
+ *
+ * Output:
+ *        - encoded           : the encoded data vector
+ *
+ *
+ * Return value               : >0 - Length (in bytes) of coded data
+ *                            : -1 - Error
+ *
+ *
+ */
+WebRtc_Word16 WebRtcIsac_GetRedPayload(
+				      ISACStruct*  ISAC_main_inst,
+				      WebRtc_Word16* encoded)
+{
+  ISACMainStruct* instISAC;
+  Bitstr          iSACBitStreamInst;
+  WebRtc_Word16     streamLenLB;
+  WebRtc_Word16     streamLenUB;
+  WebRtc_Word16     streamLen;
+  WebRtc_Word16     totalLenUB;
+  WebRtc_UWord8*    ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
+#ifndef WEBRTC_BIG_ENDIAN
+  int k;
+#endif
+
+  /* typecast pointer to real structure */
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+
+  if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+     BIT_MASK_ENC_INIT)
+    {
+      instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED;
+    }
+
+
+  iSACBitStreamInst.W_upper = 0xFFFFFFFF;
+  iSACBitStreamInst.streamval = 0;
+  iSACBitStreamInst.stream_index = 0;
+
+
+  streamLenLB = WebRtcIsac_EncodeStoredDataLb(
+					      &instISAC->instLB.ISACencLB_obj.SaveEnc_obj,
+					      &iSACBitStreamInst,
+					      instISAC->instLB.ISACencLB_obj.lastBWIdx,
+					      RCU_TRANSCODING_SCALE);
+
+  if(streamLenLB < 0)
+    {
+      return -1;
+    }
+
+  /* convert from bytes to WebRtc_Word16 */
+  memcpy(ptrEncodedUW8, iSACBitStreamInst.stream, streamLenLB);
+
+  streamLen = streamLenLB;
+
+  if(instISAC->bandwidthKHz == isac8kHz)
+    {
+      return streamLenLB;
+    }
+
+  streamLenUB = WebRtcIsac_GetRedPayloadUb(
+					   &instISAC->instUB.ISACencUB_obj.SaveEnc_obj,
+					   &iSACBitStreamInst, instISAC->bandwidthKHz);
+
+  if(streamLenUB < 0)
+    {
+      // an error has happened but this is not the error due to a
+      // bit-stream larger than the limit
+      return -1;
+    }
+
+  // We have one byte to write the total length of the upper band
+  // the length include the bitstream length, check-sum and the
+  // single byte where the length is written to. This is according to
+  // iSAC wideband and how the "garbage" is dealt.
+  totalLenUB = streamLenUB + 1 + LEN_CHECK_SUM_WORD8;
+  if(totalLenUB > 255)
+    {
+      streamLenUB = 0;
+    }
+
+  // Generate CRC if required.
+  if((instISAC->bandwidthKHz != isac8kHz) &&
+     (streamLenUB > 0))
+    {
+      WebRtc_UWord32 crc;
+      streamLen += totalLenUB;
+      ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)totalLenUB;
+      memcpy(&ptrEncodedUW8[streamLenLB+1], iSACBitStreamInst.stream, streamLenUB);
+
+      WebRtcIsac_GetCrc((WebRtc_Word16*)(&(ptrEncodedUW8[streamLenLB + 1])),
+			streamLenUB, &crc);
+#ifndef WEBRTC_BIG_ENDIAN
+      for(k = 0; k < LEN_CHECK_SUM_WORD8; k++)
+	{
+	  ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] =
+	    (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+	}
+#else
+      memcpy(&ptrEncodedUW8[streamLenLB + streamLenUB + 1], &crc,
+	     LEN_CHECK_SUM_WORD8);
+#endif
+    }
+
+
+  return streamLen;
+}
+
+
+/****************************************************************************
+ * WebRtcIsac_version(...)
+ *
+ * This function returns the version number.
+ *
+ * Output:
+ *        - version      : Pointer to character string
+ *
+ */
+void WebRtcIsac_version(char *version)
+{
+  strcpy(version, "4.3.0");
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_SetEncSampRate()
+ * Set the sampling rate of the encoder. Initialization of the encoder WILL
+ * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz
+ * which is set when the instance is created. The encoding-mode and the
+ * bottleneck remain unchanged by this call, however, the maximum rate and
+ * maximum payload-size will reset to their default value.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *        - sampRate          : enumerator specifying the sampling rate.
+ *
+ * Return value               : 0 if successful
+ *                             -1 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_SetEncSampRate(
+				       ISACStruct*               ISAC_main_inst,
+				       enum IsacSamplingRate sampRate)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if((sampRate != kIsacWideband) &&
+     (sampRate != kIsacSuperWideband))
+    {
+      // Sampling Frequency is not supported
+      instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY;
+      return -1;
+    }
+  else if((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
+          BIT_MASK_ENC_INIT)
+    {
+      if(sampRate == kIsacWideband)
+      {
+        instISAC->bandwidthKHz = isac8kHz;
+      }
+      else
+      {
+        instISAC->bandwidthKHz = isac16kHz;
+      }
+      instISAC->encoderSamplingRateKHz = sampRate;
+      return 0;
+    }
+  else
+    {
+      ISACUBStruct* instUB = &(instISAC->instUB);
+      ISACLBStruct* instLB = &(instISAC->instLB);
+      double bottleneckLB;
+      double bottleneckUB;
+      WebRtc_Word32 bottleneck = instISAC->bottleneck;
+      WebRtc_Word16 codingMode = instISAC->codingMode;
+      WebRtc_Word16 frameSizeMs = instLB->ISACencLB_obj.new_framelength / (FS / 1000);
+
+      if((sampRate == kIsacWideband) &&
+	 (instISAC->encoderSamplingRateKHz == kIsacSuperWideband))
+	{
+	  // changing from super-wideband to wideband.
+	  // we don't need to re-initialize the encoder of the
+	  // lower-band.
+	  instISAC->bandwidthKHz = isac8kHz;
+	  if(codingMode == 1)
+	    {
+	      ControlLb(instLB,
+				   (bottleneck > 32000)? 32000:bottleneck, FRAMESIZE);
+	    }
+	  instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60;
+	  instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30;
+	}
+      else if((sampRate == kIsacSuperWideband) &&
+	      (instISAC->encoderSamplingRateKHz == kIsacWideband))
+	{
+	  if(codingMode == 1)
+	    {
+	      WebRtcIsac_RateAllocation(bottleneck, &bottleneckLB, &bottleneckUB,
+					&(instISAC->bandwidthKHz));
+	    }
+
+          instISAC->bandwidthKHz = isac16kHz;
+	  instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX;
+	  instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX;
+
+	  EncoderInitLb(instLB, codingMode, sampRate);
+	  EncoderInitUb(instUB, instISAC->bandwidthKHz);
+
+	  memset(instISAC->analysisFBState1, 0,
+		 FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+	  memset(instISAC->analysisFBState2, 0,
+		 FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+
+	  if(codingMode == 1)
+	    {
+	      instISAC->bottleneck = bottleneck;
+	      ControlLb(instLB, bottleneckLB,
+				   (instISAC->bandwidthKHz == isac8kHz)? frameSizeMs:FRAMESIZE);
+	      if(instISAC->bandwidthKHz > isac8kHz)
+		{
+		  ControlUb(instUB, bottleneckUB);
+		}
+	    }
+	  else
+	    {
+	      instLB->ISACencLB_obj.enforceFrameSize = 0;
+	      instLB->ISACencLB_obj.new_framelength = FRAMESAMPLES;
+	    }
+	}
+      instISAC->encoderSamplingRateKHz = sampRate;
+      return 0;
+    }
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_SetDecSampRate()
+ * Set the sampling rate of the decoder.  Initialization of the decoder WILL
+ * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz
+ * which is set when the instance is created.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *        - sampRate          : enumerator specifying the sampling rate.
+ *
+ * Return value               : 0 if successful
+ *                             -1 if failed.
+ */
+WebRtc_Word16 WebRtcIsac_SetDecSampRate(
+				       ISACStruct*               ISAC_main_inst,
+				       enum IsacSamplingRate sampRate)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  if((sampRate != kIsacWideband) &&
+     (sampRate != kIsacSuperWideband))
+    {
+      // Sampling Frequency is not supported
+      instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY;
+      return -1;
+    }
+  else
+    {
+      if((instISAC->decoderSamplingRateKHz == kIsacWideband) &&
+	 (sampRate == kIsacSuperWideband))
+	{
+	  // switching from wideband to super-wideband at the decoder
+	  // we need to reset the filter-bank and initialize
+	  // upper-band decoder.
+	  memset(instISAC->synthesisFBState1, 0,
+		 FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+	  memset(instISAC->synthesisFBState2, 0,
+		 FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+
+	  if(DecoderInitUb(&(instISAC->instUB)) < 0)
+	    {
+	      return -1;
+	    }
+	}
+      instISAC->decoderSamplingRateKHz = sampRate;
+      return 0;
+    }
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_EncSampRate()
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *
+ * Return value               : enumerator representing sampling frequency
+ *                              associated with the encoder, the input audio
+ *                              is expected to be sampled at this rate.
+ *
+ */
+enum IsacSamplingRate WebRtcIsac_EncSampRate(
+					       ISACStruct*                ISAC_main_inst)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  return instISAC->encoderSamplingRateKHz;
+}
+
+
+/******************************************************************************
+ * WebRtcIsac_DecSampRate()
+ * Return the sampling rate of the decoded audio.
+ *
+ * Input:
+ *        - ISAC_main_inst    : iSAC instance
+ *
+ * Return value               : enumerator representing sampling frequency
+ *                              associated with the decoder, i.e. the
+ *                              sampling rate of the decoded audio.
+ *
+ */
+enum IsacSamplingRate WebRtcIsac_DecSampRate(
+					       ISACStruct*                ISAC_main_inst)
+{
+  ISACMainStruct* instISAC;
+
+  instISAC = (ISACMainStruct*)ISAC_main_inst;
+
+  return instISAC->decoderSamplingRateKHz;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.gypi b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.gypi
new file mode 100644
index 0000000..d30be55
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/isac.gypi
@@ -0,0 +1,91 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'iSAC',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../interface/isac.h',
+        'arith_routines.c',
+        'arith_routines_hist.c',
+        'arith_routines_logist.c',
+        'bandwidth_estimator.c',
+        'crc.c',
+        'decode.c',
+        'decode_bwe.c',
+        'encode.c',
+        'encode_lpc_swb.c',
+        'entropy_coding.c',
+        'fft.c',
+        'filter_functions.c',
+        'filterbank_tables.c',
+        'intialize.c',
+        'isac.c',
+        'filterbanks.c',
+        'pitch_lag_tables.c',
+        'lattice.c',
+        'lpc_gain_swb_tables.c',
+        'lpc_analysis.c',
+        'lpc_shape_swb12_tables.c',
+        'lpc_shape_swb16_tables.c',
+        'lpc_tables.c',
+        'pitch_estimator.c',
+        'pitch_filter.c',
+        'pitch_gain_tables.c',
+        'spectrum_ar_model_tables.c',
+        'transform.c',
+        'arith_routines.h',
+        'bandwidth_estimator.h',
+        'codec.h',
+        'crc.h',
+        'encode_lpc_swb.h',
+        'entropy_coding.h',
+        'fft.h',
+        'filterbank_tables.h',
+        'lpc_gain_swb_tables.h',
+        'lpc_analysis.h',
+        'lpc_shape_swb12_tables.h',
+        'lpc_shape_swb16_tables.h',
+        'lpc_tables.h',
+        'pitch_estimator.h',
+        'pitch_gain_tables.h',
+        'pitch_lag_tables.h',
+        'settings.h',
+        'spectrum_ar_model_tables.h',
+        'structs.h',
+        'os_specific_inline.h',
+     ],
+      'conditions': [
+        ['OS!="win"', {
+          'defines': [
+            'WEBRTC_LINUX',
+          ],
+        }],
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lattice.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lattice.c
new file mode 100644
index 0000000..a46135a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lattice.c
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lattice.c
+ *
+ * contains the normalized lattice filter routines (MA and AR) for iSAC codec
+ *
+ */
+#include "settings.h"
+#include "codec.h"
+
+#include <math.h>
+#include <memory.h>
+#ifdef WEBRTC_ANDROID
+#include <stdlib.h>
+#endif
+
+/* filter the signal using normalized lattice filter */
+/* MA filter */
+void WebRtcIsac_NormLatticeFilterMa(int orderCoef,
+                                     float *stateF,
+                                     float *stateG,
+                                     float *lat_in,
+                                     double *filtcoeflo,
+                                     double *lat_out)
+{
+  int n,k,i,u,temp1;
+  int ord_1 = orderCoef+1;
+  float sth[MAX_AR_MODEL_ORDER];
+  float cth[MAX_AR_MODEL_ORDER];
+  float inv_cth[MAX_AR_MODEL_ORDER];
+  double a[MAX_AR_MODEL_ORDER+1];
+  float f[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN], g[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN];
+  float gain1;
+
+  for (u=0;u<SUBFRAMES;u++)
+  {
+    /* set the Direct Form coefficients */
+    temp1 = u*ord_1;
+    a[0] = 1;
+    memcpy(a+1, filtcoeflo+temp1+1, sizeof(double) * (ord_1-1));
+
+    /* compute lattice filter coefficients */
+    WebRtcIsac_Dir2Lat(a,orderCoef,sth,cth);
+
+    /* compute the gain */
+    gain1 = (float)filtcoeflo[temp1];
+    for (k=0;k<orderCoef;k++)
+    {
+      gain1 *= cth[k];
+      inv_cth[k] = 1/cth[k];
+    }
+
+    /* normalized lattice filter */
+    /*****************************/
+
+    /* initial conditions */
+    for (i=0;i<HALF_SUBFRAMELEN;i++)
+    {
+      f[0][i] = lat_in[i + u * HALF_SUBFRAMELEN];
+      g[0][i] = lat_in[i + u * HALF_SUBFRAMELEN];
+    }
+
+    /* get the state of f&g for the first input, for all orders */
+    for (i=1;i<ord_1;i++)
+    {
+      f[i][0] = inv_cth[i-1]*(f[i-1][0] + sth[i-1]*stateG[i-1]);
+      g[i][0] = cth[i-1]*stateG[i-1] + sth[i-1]* f[i][0];
+    }
+
+    /* filtering */
+    for(k=0;k<orderCoef;k++)
+    {
+      for(n=0;n<(HALF_SUBFRAMELEN-1);n++)
+      {
+        f[k+1][n+1] = inv_cth[k]*(f[k][n+1] + sth[k]*g[k][n]);
+        g[k+1][n+1] = cth[k]*g[k][n] + sth[k]* f[k+1][n+1];
+      }
+    }
+
+    for(n=0;n<HALF_SUBFRAMELEN;n++)
+    {
+      lat_out[n + u * HALF_SUBFRAMELEN] = gain1 * f[orderCoef][n];
+    }
+
+    /* save the states */
+    for (i=0;i<ord_1;i++)
+    {
+      stateF[i] = f[i][HALF_SUBFRAMELEN-1];
+      stateG[i] = g[i][HALF_SUBFRAMELEN-1];
+    }
+    /* process next frame */
+  }
+
+  return;
+}
+
+
+/*///////////////////AR filter ///////////////////////////////*/
+/* filter the signal using normalized lattice filter */
+void WebRtcIsac_NormLatticeFilterAr(int orderCoef,
+                                     float *stateF,
+                                     float *stateG,
+                                     double *lat_in,
+                                     double *lo_filt_coef,
+                                     float *lat_out)
+{
+  int n,k,i,u,temp1;
+  int ord_1 = orderCoef+1;
+  float sth[MAX_AR_MODEL_ORDER];
+  float cth[MAX_AR_MODEL_ORDER];
+  double a[MAX_AR_MODEL_ORDER+1];
+  float ARf[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN], ARg[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN];
+  float gain1,inv_gain1;
+
+  for (u=0;u<SUBFRAMES;u++)
+  {
+    /* set the denominator and numerator of the Direct Form */
+    temp1 = u*ord_1;
+    a[0] = 1;
+
+    memcpy(a+1, lo_filt_coef+temp1+1, sizeof(double) * (ord_1-1));
+
+    WebRtcIsac_Dir2Lat(a,orderCoef,sth,cth);
+
+    gain1 = (float)lo_filt_coef[temp1];
+    for (k=0;k<orderCoef;k++)
+    {
+      gain1 = cth[k]*gain1;
+    }
+
+    /* initial conditions */
+    inv_gain1 = 1/gain1;
+    for (i=0;i<HALF_SUBFRAMELEN;i++)
+    {
+      ARf[orderCoef][i] = (float)lat_in[i + u * HALF_SUBFRAMELEN]*inv_gain1;
+    }
+
+
+    for (i=orderCoef-1;i>=0;i--) //get the state of f&g for the first input, for all orders
+    {
+      ARf[i][0] = cth[i]*ARf[i+1][0] - sth[i]*stateG[i];
+      ARg[i+1][0] = sth[i]*ARf[i+1][0] + cth[i]* stateG[i];
+    }
+    ARg[0][0] = ARf[0][0];
+
+    for(n=0;n<(HALF_SUBFRAMELEN-1);n++)
+    {
+      for(k=orderCoef-1;k>=0;k--)
+      {
+        ARf[k][n+1] = cth[k]*ARf[k+1][n+1] - sth[k]*ARg[k][n];
+        ARg[k+1][n+1] = sth[k]*ARf[k+1][n+1] + cth[k]* ARg[k][n];
+      }
+      ARg[0][n+1] = ARf[0][n+1];
+    }
+
+    memcpy(lat_out+u * HALF_SUBFRAMELEN, &(ARf[0][0]), sizeof(float) * HALF_SUBFRAMELEN);
+
+    /* cannot use memcpy in the following */
+    for (i=0;i<ord_1;i++)
+    {
+      stateF[i] = ARf[i][HALF_SUBFRAMELEN-1];
+      stateG[i] = ARg[i][HALF_SUBFRAMELEN-1];
+    }
+
+  }
+
+  return;
+}
+
+
+/* compute the reflection coefficients using the step-down procedure*/
+/* converts the direct form parameters to lattice form.*/
+/* a and b are vectors which contain the direct form coefficients,
+   according to
+   A(z) = a(1) + a(2)*z + a(3)*z^2 + ... + a(M+1)*z^M
+   B(z) = b(1) + b(2)*z + b(3)*z^2 + ... + b(M+1)*z^M
+*/
+
+void WebRtcIsac_Dir2Lat(double *a,
+                        int orderCoef,
+                        float *sth,
+                        float *cth)
+{
+  int m, k;
+  float tmp[MAX_AR_MODEL_ORDER];
+  float tmp_inv, cth2;
+
+  sth[orderCoef-1] = (float)a[orderCoef];
+  cth2 = 1.0f - sth[orderCoef-1] * sth[orderCoef-1];
+  cth[orderCoef-1] = (float)sqrt(cth2);
+  for (m=orderCoef-1; m>0; m--)
+  {
+    tmp_inv = 1.0f / cth2;
+    for (k=1; k<=m; k++)
+    {
+      tmp[k] = ((float)a[k] - sth[m] * (float)a[m-k+1]) * tmp_inv;
+    }
+
+    for (k=1; k<m; k++)
+    {
+      a[k] = tmp[k];
+    }
+
+    sth[m-1] = tmp[m];
+    cth2 = 1 - sth[m-1] * sth[m-1];
+    cth[m-1] = (float)sqrt(cth2);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.c
new file mode 100644
index 0000000..854b2d7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.c
@@ -0,0 +1,535 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "lpc_analysis.h"
+#include "settings.h"
+#include "codec.h"
+#include "entropy_coding.h"
+
+#include <math.h>
+#include <string.h>
+
+#define LEVINSON_EPS    1.0e-10
+
+
+/* window */
+/* Matlab generation code:
+ *  t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid;
+ *  for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end
+ */
+static const double kLpcCorrWindow[WINLEN] = {
+  0.00000000, 0.00000001, 0.00000004, 0.00000010, 0.00000020,
+  0.00000035, 0.00000055, 0.00000083, 0.00000118, 0.00000163,
+  0.00000218, 0.00000283, 0.00000361, 0.00000453, 0.00000558, 0.00000679,
+  0.00000817, 0.00000973, 0.00001147, 0.00001342, 0.00001558,
+  0.00001796, 0.00002058, 0.00002344, 0.00002657, 0.00002997,
+  0.00003365, 0.00003762, 0.00004190, 0.00004651, 0.00005144, 0.00005673,
+  0.00006236, 0.00006837, 0.00007476, 0.00008155, 0.00008875,
+  0.00009636, 0.00010441, 0.00011290, 0.00012186, 0.00013128,
+  0.00014119, 0.00015160, 0.00016252, 0.00017396, 0.00018594, 0.00019846,
+  0.00021155, 0.00022521, 0.00023946, 0.00025432, 0.00026978,
+  0.00028587, 0.00030260, 0.00031998, 0.00033802, 0.00035674,
+  0.00037615, 0.00039626, 0.00041708, 0.00043863, 0.00046092, 0.00048396,
+  0.00050775, 0.00053233, 0.00055768, 0.00058384, 0.00061080,
+  0.00063858, 0.00066720, 0.00069665, 0.00072696, 0.00075813,
+  0.00079017, 0.00082310, 0.00085692, 0.00089164, 0.00092728, 0.00096384,
+  0.00100133, 0.00103976, 0.00107914, 0.00111947, 0.00116077,
+  0.00120304, 0.00124630, 0.00129053, 0.00133577, 0.00138200,
+  0.00142924, 0.00147749, 0.00152676, 0.00157705, 0.00162836, 0.00168070,
+  0.00173408, 0.00178850, 0.00184395, 0.00190045, 0.00195799,
+  0.00201658, 0.00207621, 0.00213688, 0.00219860, 0.00226137,
+  0.00232518, 0.00239003, 0.00245591, 0.00252284, 0.00259079, 0.00265977,
+  0.00272977, 0.00280078, 0.00287280, 0.00294582, 0.00301984,
+  0.00309484, 0.00317081, 0.00324774, 0.00332563, 0.00340446,
+  0.00348421, 0.00356488, 0.00364644, 0.00372889, 0.00381220, 0.00389636,
+  0.00398135, 0.00406715, 0.00415374, 0.00424109, 0.00432920,
+  0.00441802, 0.00450754, 0.00459773, 0.00468857, 0.00478001,
+  0.00487205, 0.00496464, 0.00505775, 0.00515136, 0.00524542, 0.00533990,
+  0.00543476, 0.00552997, 0.00562548, 0.00572125, 0.00581725,
+  0.00591342, 0.00600973, 0.00610612, 0.00620254, 0.00629895,
+  0.00639530, 0.00649153, 0.00658758, 0.00668341, 0.00677894, 0.00687413,
+  0.00696891, 0.00706322, 0.00715699, 0.00725016, 0.00734266,
+  0.00743441, 0.00752535, 0.00761540, 0.00770449, 0.00779254,
+  0.00787947, 0.00796519, 0.00804963, 0.00813270, 0.00821431, 0.00829437,
+  0.00837280, 0.00844949, 0.00852436, 0.00859730, 0.00866822,
+  0.00873701, 0.00880358, 0.00886781, 0.00892960, 0.00898884,
+  0.00904542, 0.00909923, 0.00915014, 0.00919805, 0.00924283, 0.00928436,
+  0.00932252, 0.00935718, 0.00938821, 0.00941550, 0.00943890,
+  0.00945828, 0.00947351, 0.00948446, 0.00949098, 0.00949294,
+  0.00949020, 0.00948262, 0.00947005, 0.00945235, 0.00942938, 0.00940099,
+  0.00936704, 0.00932738, 0.00928186, 0.00923034, 0.00917268,
+  0.00910872, 0.00903832, 0.00896134, 0.00887763, 0.00878706,
+  0.00868949, 0.00858478, 0.00847280, 0.00835343, 0.00822653, 0.00809199,
+  0.00794970, 0.00779956, 0.00764145, 0.00747530, 0.00730103,
+  0.00711857, 0.00692787, 0.00672888, 0.00652158, 0.00630597,
+  0.00608208, 0.00584994, 0.00560962, 0.00536124, 0.00510493, 0.00484089,
+  0.00456935, 0.00429062, 0.00400505, 0.00371310, 0.00341532,
+  0.00311238, 0.00280511, 0.00249452, 0.00218184, 0.00186864,
+  0.00155690, 0.00124918, 0.00094895, 0.00066112, 0.00039320, 0.00015881
+};
+
+double WebRtcIsac_LevDurb(double *a, double *k, double *r, int order)
+{
+
+  double  sum, alpha;
+  int     m, m_h, i;
+  alpha = 0; //warning -DH
+  a[0] = 1.0;
+  if (r[0] < LEVINSON_EPS) { /* if r[0] <= 0, set LPC coeff. to zero */
+    for (i = 0; i < order; i++) {
+      k[i] = 0;
+      a[i+1] = 0;
+    }
+  } else {
+    a[1] = k[0] = -r[1]/r[0];
+    alpha = r[0] + r[1] * k[0];
+    for (m = 1; m < order; m++){
+      sum = r[m + 1];
+      for (i = 0; i < m; i++){
+        sum += a[i+1] * r[m - i];
+      }
+      k[m] = -sum / alpha;
+      alpha += k[m] * sum;
+      m_h = (m + 1) >> 1;
+      for (i = 0; i < m_h; i++){
+        sum = a[i+1] + k[m] * a[m - i];
+        a[m - i] += k[m] * a[i+1];
+        a[i+1] = sum;
+      }
+      a[m+1] = k[m];
+    }
+  }
+  return alpha;
+}
+
+
+//was static before, but didn't work with MEX file
+void WebRtcIsac_GetVars(const double *input, const WebRtc_Word16 *pitchGains_Q12,
+                       double *oldEnergy, double *varscale)
+{
+  double nrg[4], chng, pg;
+  int k;
+
+  double pitchGains[4]={0,0,0,0};;
+
+  /* Calculate energies of first and second frame halfs */
+  nrg[0] = 0.0001;
+  for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES_QUARTER + QLOOKAHEAD) / 2; k++) {
+    nrg[0] += input[k]*input[k];
+  }
+  nrg[1] = 0.0001;
+  for ( ; k < (FRAMESAMPLES_HALF + QLOOKAHEAD) / 2; k++) {
+    nrg[1] += input[k]*input[k];
+  }
+  nrg[2] = 0.0001;
+  for ( ; k < (FRAMESAMPLES*3/4 + QLOOKAHEAD) / 2; k++) {
+    nrg[2] += input[k]*input[k];
+  }
+  nrg[3] = 0.0001;
+  for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) {
+    nrg[3] += input[k]*input[k];
+  }
+
+  /* Calculate average level change */
+  chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) +
+                 fabs(10.0 * log10(nrg[2] / nrg[1])) +
+                 fabs(10.0 * log10(nrg[1] / nrg[0])) +
+                 fabs(10.0 * log10(nrg[0] / *oldEnergy)));
+
+
+  /* Find average pitch gain */
+  pg = 0.0;
+  for (k=0; k<4; k++)
+  {
+    pitchGains[k] = ((float)pitchGains_Q12[k])/4096;
+    pg += pitchGains[k];
+  }
+  pg *= 0.25;
+
+  /* If pitch gain is low and energy constant - increase noise level*/
+  /* Matlab code:
+     pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) ))
+  */
+  *varscale = 0.0 + 1.0 * exp( -1.4 * exp(-200.0 * pg*pg*pg) / (1.0 + 0.4 * chng) );
+
+  *oldEnergy = nrg[3];
+}
+
+void
+WebRtcIsac_GetVarsUB(
+    const double* input,
+    double*       oldEnergy,
+    double*       varscale)
+{
+  double nrg[4], chng;
+  int k;
+
+  /* Calculate energies of first and second frame halfs */
+  nrg[0] = 0.0001;
+  for (k = 0; k < (FRAMESAMPLES_QUARTER) / 2; k++) {
+    nrg[0] += input[k]*input[k];
+  }
+  nrg[1] = 0.0001;
+  for ( ; k < (FRAMESAMPLES_HALF) / 2; k++) {
+    nrg[1] += input[k]*input[k];
+  }
+  nrg[2] = 0.0001;
+  for ( ; k < (FRAMESAMPLES*3/4) / 2; k++) {
+    nrg[2] += input[k]*input[k];
+  }
+  nrg[3] = 0.0001;
+  for ( ; k < (FRAMESAMPLES) / 2; k++) {
+    nrg[3] += input[k]*input[k];
+  }
+
+  /* Calculate average level change */
+  chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) +
+                 fabs(10.0 * log10(nrg[2] / nrg[1])) +
+                 fabs(10.0 * log10(nrg[1] / nrg[0])) +
+                 fabs(10.0 * log10(nrg[0] / *oldEnergy)));
+
+
+  /* If pitch gain is low and energy constant - increase noise level*/
+  /* Matlab code:
+     pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) ))
+  */
+  *varscale = exp( -1.4 / (1.0 + 0.4 * chng) );
+
+  *oldEnergy = nrg[3];
+}
+
+void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata,
+                             double signal_noise_ratio, const WebRtc_Word16 *pitchGains_Q12,
+                             double *lo_coeff, double *hi_coeff)
+{
+  int k, n, j, pos1, pos2;
+  double varscale;
+
+  double DataLo[WINLEN], DataHi[WINLEN];
+  double corrlo[ORDERLO+2], corrlo2[ORDERLO+1];
+  double corrhi[ORDERHI+1];
+  double k_veclo[ORDERLO], k_vechi[ORDERHI];
+
+  double a_LO[ORDERLO+1], a_HI[ORDERHI+1];
+  double tmp, res_nrg;
+
+  double FwdA, FwdB;
+
+  /* hearing threshold level in dB; higher value gives more noise */
+  const double HearThresOffset = -28.0;
+
+  /* bandwdith expansion factors for low- and high band */
+  const double gammaLo = 0.9;
+  const double gammaHi = 0.8;
+
+  /* less-noise-at-low-frequencies factor */
+  double aa;
+
+
+  /* convert from dB to signal level */
+  const double H_T_H = pow(10.0, 0.05 * HearThresOffset);
+  double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46;    /* divide by sqrt(12) */
+
+  /* change quallevel depending on pitch gains and level fluctuations */
+  WebRtcIsac_GetVars(inLo, pitchGains_Q12, &(maskdata->OldEnergy), &varscale);
+
+  /* less-noise-at-low-frequencies factor */
+  aa = 0.35 * (0.5 + 0.5 * varscale);
+
+  /* replace data in buffer by new look-ahead data */
+  for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++)
+    maskdata->DataBufferLo[pos1 + WINLEN - QLOOKAHEAD] = inLo[pos1];
+
+  for (k = 0; k < SUBFRAMES; k++) {
+
+    /* Update input buffer and multiply signal with window */
+    for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) {
+      maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 + UPDATE/2];
+      maskdata->DataBufferHi[pos1] = maskdata->DataBufferHi[pos1 + UPDATE/2];
+      DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1];
+      DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1];
+    }
+    pos2 = k * UPDATE/2;
+    for (n = 0; n < UPDATE/2; n++, pos1++) {
+      maskdata->DataBufferLo[pos1] = inLo[QLOOKAHEAD + pos2];
+      maskdata->DataBufferHi[pos1] = inHi[pos2++];
+      DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1];
+      DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1];
+    }
+
+    /* Get correlation coefficients */
+    WebRtcIsac_AutoCorr(corrlo, DataLo, WINLEN, ORDERLO+1); /* computing autocorrelation */
+    WebRtcIsac_AutoCorr(corrhi, DataHi, WINLEN, ORDERHI);
+
+
+    /* less noise for lower frequencies, by filtering/scaling autocorrelation sequences */
+    corrlo2[0] = (1.0+aa*aa) * corrlo[0] - 2.0*aa * corrlo[1];
+    tmp = (1.0 + aa*aa);
+    for (n = 1; n <= ORDERLO; n++) {
+      corrlo2[n] = tmp * corrlo[n] - aa * (corrlo[n-1] + corrlo[n+1]);
+    }
+    tmp = (1.0+aa) * (1.0+aa);
+    for (n = 0; n <= ORDERHI; n++) {
+      corrhi[n] = tmp * corrhi[n];
+    }
+
+    /* add white noise floor */
+    corrlo2[0] += 1e-6;
+    corrhi[0] += 1e-6;
+
+
+    FwdA = 0.01;
+    FwdB = 0.01;
+
+    /* recursive filtering of correlation over subframes */
+    for (n = 0; n <= ORDERLO; n++) {
+      maskdata->CorrBufLo[n] = FwdA * maskdata->CorrBufLo[n] + corrlo2[n];
+      corrlo2[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufLo[n] + (1.0-FwdB) * corrlo2[n];
+    }
+    for (n = 0; n <= ORDERHI; n++) {
+      maskdata->CorrBufHi[n] = FwdA * maskdata->CorrBufHi[n] + corrhi[n];
+      corrhi[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufHi[n] + (1.0-FwdB) * corrhi[n];
+    }
+
+    /* compute prediction coefficients */
+    WebRtcIsac_LevDurb(a_LO, k_veclo, corrlo2, ORDERLO);
+    WebRtcIsac_LevDurb(a_HI, k_vechi, corrhi, ORDERHI);
+
+    /* bandwidth expansion */
+    tmp = gammaLo;
+    for (n = 1; n <= ORDERLO; n++) {
+      a_LO[n] *= tmp;
+      tmp *= gammaLo;
+    }
+
+    /* residual energy */
+    res_nrg = 0.0;
+    for (j = 0; j <= ORDERLO; j++) {
+      for (n = 0; n <= j; n++) {
+        res_nrg += a_LO[j] * corrlo2[j-n] * a_LO[n];
+      }
+      for (n = j+1; n <= ORDERLO; n++) {
+        res_nrg += a_LO[j] * corrlo2[n-j] * a_LO[n];
+      }
+    }
+
+    /* add hearing threshold and compute the gain */
+    *lo_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H);
+
+    /* copy coefficients to output array */
+    for (n = 1; n <= ORDERLO; n++) {
+      *lo_coeff++ = a_LO[n];
+    }
+
+
+    /* bandwidth expansion */
+    tmp = gammaHi;
+    for (n = 1; n <= ORDERHI; n++) {
+      a_HI[n] *= tmp;
+      tmp *= gammaHi;
+    }
+
+    /* residual energy */
+    res_nrg = 0.0;
+    for (j = 0; j <= ORDERHI; j++) {
+      for (n = 0; n <= j; n++) {
+        res_nrg += a_HI[j] * corrhi[j-n] * a_HI[n];
+      }
+      for (n = j+1; n <= ORDERHI; n++) {
+        res_nrg += a_HI[j] * corrhi[n-j] * a_HI[n];
+      }
+    }
+
+    /* add hearing threshold and compute of the gain */
+    *hi_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H);
+
+    /* copy coefficients to output array */
+    for (n = 1; n <= ORDERHI; n++) {
+      *hi_coeff++ = a_HI[n];
+    }
+  }
+}
+
+
+
+/******************************************************************************
+ * WebRtcIsac_GetLpcCoefUb()
+ *
+ * Compute LP coefficients and correlation coefficients. At 12 kHz LP
+ * coefficients of the first and the last sub-frame is computed. At 16 kHz
+ * LP coefficients of 4th, 8th and 12th sub-frames are computed. We always
+ * compute correlation coefficients of all sub-frames.
+ *
+ * Inputs:
+ *       -inSignal           : Input signal
+ *       -maskdata           : a structure keeping signal from previous frame.
+ *       -bandwidth          : specifies if the codec is in 0-16 kHz mode or
+ *                             0-12 kHz mode.
+ *
+ * Outputs:
+ *       -lpCoeff            : pointer to a buffer where A-polynomials are
+ *                             written to (first coeff is 1 and it is not
+ *                             written)
+ *       -corrMat            : a matrix where correlation coefficients of each
+ *                             sub-frame are written to one row.
+ *       -varscale           : a scale used to compute LPC gains.
+ */
+void
+WebRtcIsac_GetLpcCoefUb(
+    double*      inSignal,
+    MaskFiltstr* maskdata,
+    double*      lpCoeff,
+    double       corrMat[][UB_LPC_ORDER + 1],
+    double*      varscale,
+    WebRtc_Word16  bandwidth)
+{
+  int frameCntr, activeFrameCntr, n, pos1, pos2;
+  WebRtc_Word16 criterion1;
+  WebRtc_Word16 criterion2;
+  WebRtc_Word16 numSubFrames = SUBFRAMES * (1 + (bandwidth == isac16kHz));
+  double data[WINLEN];
+  double corrSubFrame[UB_LPC_ORDER+2];
+  double reflecCoeff[UB_LPC_ORDER];
+
+  double aPolynom[UB_LPC_ORDER+1];
+  double tmp;
+
+  /* bandwdith expansion factors */
+  const double gamma = 0.9;
+
+  /* change quallevel depending on pitch gains and level fluctuations */
+  WebRtcIsac_GetVarsUB(inSignal, &(maskdata->OldEnergy), varscale);
+
+  /* replace data in buffer by new look-ahead data */
+  for(frameCntr = 0, activeFrameCntr = 0; frameCntr < numSubFrames;
+      frameCntr++)
+  {
+    if(frameCntr == SUBFRAMES)
+    {
+      // we are in 16 kHz
+      varscale++;
+      WebRtcIsac_GetVarsUB(&inSignal[FRAMESAMPLES_HALF],
+                          &(maskdata->OldEnergy), varscale);
+    }
+    /* Update input buffer and multiply signal with window */
+    for(pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++)
+    {
+      maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 +
+                                                            UPDATE/2];
+      data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1];
+    }
+    pos2 = frameCntr * UPDATE/2;
+    for(n = 0; n < UPDATE/2; n++, pos1++, pos2++)
+    {
+      maskdata->DataBufferLo[pos1] = inSignal[pos2];
+      data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1];
+    }
+
+    /* Get correlation coefficients */
+    /* computing autocorrelation    */
+    WebRtcIsac_AutoCorr(corrSubFrame, data, WINLEN, UB_LPC_ORDER+1);
+    memcpy(corrMat[frameCntr], corrSubFrame,
+           (UB_LPC_ORDER+1)*sizeof(double));
+
+    criterion1 = ((frameCntr == 0) || (frameCntr == (SUBFRAMES - 1))) &&
+        (bandwidth == isac12kHz);
+    criterion2 = (((frameCntr+1) % 4) == 0) &&
+        (bandwidth == isac16kHz);
+    if(criterion1 || criterion2)
+    {
+      /* add noise */
+      corrSubFrame[0] += 1e-6;
+      /* compute prediction coefficients */
+      WebRtcIsac_LevDurb(aPolynom, reflecCoeff, corrSubFrame,
+                        UB_LPC_ORDER);
+
+      /* bandwidth expansion */
+      tmp = gamma;
+      for (n = 1; n <= UB_LPC_ORDER; n++)
+      {
+        *lpCoeff++ = aPolynom[n] * tmp;
+        tmp *= gamma;
+      }
+      activeFrameCntr++;
+    }
+  }
+}
+
+
+
+/******************************************************************************
+ * WebRtcIsac_GetLpcGain()
+ *
+ * Compute the LPC gains for each sub-frame, given the LPC of each sub-frame
+ * and the corresponding correlation coefficients.
+ *
+ * Inputs:
+ *       -signal_noise_ratio : the desired SNR in dB.
+ *       -numVecs            : number of sub-frames
+ *       -corrMat             : a matrix of correlation coefficients where
+ *                             each row is a set of correlation coefficients of
+ *                             one sub-frame.
+ *       -varscale           : a scale computed when WebRtcIsac_GetLpcCoefUb()
+ *                             is called.
+ *
+ * Outputs:
+ *       -gain               : pointer to a buffer where LP gains are written.
+ *
+ */
+void
+WebRtcIsac_GetLpcGain(
+    double        signal_noise_ratio,
+    const double* filtCoeffVecs,
+    int           numVecs,
+    double*       gain,
+    double        corrMat[][UB_LPC_ORDER + 1],
+    const double* varscale)
+{
+  WebRtc_Word16 j, n;
+  WebRtc_Word16 subFrameCntr;
+  double aPolynom[ORDERLO + 1];
+  double res_nrg;
+
+  const double HearThresOffset = -28.0;
+  const double H_T_H = pow(10.0, 0.05 * HearThresOffset);
+  /* divide by sqrt(12) = 3.46 */
+  const double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46;
+
+  aPolynom[0] = 1;
+  for(subFrameCntr = 0; subFrameCntr < numVecs; subFrameCntr++)
+  {
+    if(subFrameCntr == SUBFRAMES)
+    {
+      // we are in second half of a SWB frame. use new varscale
+      varscale++;
+    }
+    memcpy(&aPolynom[1], &filtCoeffVecs[(subFrameCntr * (UB_LPC_ORDER + 1)) +
+                                        1], sizeof(double) * UB_LPC_ORDER);
+
+    /* residual energy */
+    res_nrg = 0.0;
+    for(j = 0; j <= UB_LPC_ORDER; j++)
+    {
+      for(n = 0; n <= j; n++)
+      {
+        res_nrg += aPolynom[j] * corrMat[subFrameCntr][j-n] *
+            aPolynom[n];
+      }
+      for(n = j+1; n <= UB_LPC_ORDER; n++)
+      {
+        res_nrg += aPolynom[j] * corrMat[subFrameCntr][n-j] *
+            aPolynom[n];
+      }
+    }
+
+    /* add hearing threshold and compute the gain */
+    gain[subFrameCntr] = S_N_R / (sqrt(res_nrg) / *varscale + H_T_H);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.h
new file mode 100644
index 0000000..4eafeac
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_analysis.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_analysis.h
+ *
+ * LPC functions
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_
+
+#include "settings.h"
+#include "structs.h"
+
+double WebRtcIsac_LevDurb(double *a, double *k, double *r, int order);
+
+void WebRtcIsac_GetVars(const double *input, const WebRtc_Word16 *pitchGains_Q12,
+                       double *oldEnergy, double *varscale);
+
+void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata,
+                             double signal_noise_ratio, const WebRtc_Word16 *pitchGains_Q12,
+                             double *lo_coeff, double *hi_coeff);
+
+
+void WebRtcIsac_GetLpcGain(
+    double         signal_noise_ratio,
+    const double*  filtCoeffVecs,
+    int            numVecs,
+    double*        gain,
+    double         corrLo[][UB_LPC_ORDER + 1],
+    const double*  varscale);
+
+void WebRtcIsac_GetLpcCoefUb(
+    double*      inSignal,
+    MaskFiltstr* maskdata,
+    double*      lpCoeff,
+    double       corr[][UB_LPC_ORDER + 1],
+    double*      varscale,
+    WebRtc_Word16  bandwidth);
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYIS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.c
new file mode 100644
index 0000000..25c69cb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.c
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * SWB_KLT_Tables_LPCGain.c
+ *
+ * This file defines tables used for entropy coding of LPC Gain
+ * of upper-band.
+ *
+ */
+
+#include "lpc_gain_swb_tables.h"
+#include "settings.h"
+#include "typedefs.h"
+
+const double WebRtcIsac_kQSizeLpcGain = 0.100000;
+
+const double WebRtcIsac_kMeanLpcGain = -3.3822;
+
+/*
+* The smallest reconstruction points for quantiztion of
+* LPC gains.
+*/
+const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES] =
+{
+   -0.800000, -1.000000, -1.200000, -2.200000, -3.000000, -12.700000
+};
+
+/*
+* Number of reconstruction points of quantizers for LPC Gains.
+*/
+const WebRtc_Word16 WebRtcIsac_kNumQCellLpcGain[SUBFRAMES] =
+{
+    17,  20,  25,  45,  77, 170
+};
+/*
+* Starting index for entropy decoder to search for the right interval,
+* one entry per LAR coefficient
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES] =
+{
+     8,  10,  12,  22,  38,  85
+};
+
+/*
+* The following 6 vectors define CDF of 6 decorrelated LPC
+* gains.
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec0[18] =
+{
+     0,    10,    27,    83,   234,   568,  1601,  4683, 16830, 57534, 63437,
+ 64767, 65229, 65408, 65483, 65514, 65527, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec1[21] =
+{
+     0,    15,    33,    84,   185,   385,   807,  1619,  3529,  7850, 19488,
+ 51365, 62437, 64548, 65088, 65304, 65409, 65484, 65507, 65522, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec2[26] =
+{
+     0,    15,    29,    54,    89,   145,   228,   380,   652,  1493,  4260,
+ 12359, 34133, 50749, 57224, 60814, 62927, 64078, 64742, 65103, 65311, 65418,
+ 65473, 65509, 65521, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec3[46] =
+{
+     0,     8,    12,    16,    26,    42,    56,    76,   111,   164,   247,
+   366,   508,   693,  1000,  1442,  2155,  3188,  4854,  7387, 11249, 17617,
+ 30079, 46711, 56291, 60127, 62140, 63258, 63954, 64384, 64690, 64891, 65031,
+ 65139, 65227, 65293, 65351, 65399, 65438, 65467, 65492, 65504, 65510, 65518,
+ 65523, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec4[78] =
+{
+     0,    17,    29,    39,    51,    70,   104,   154,   234,   324,   443,
+   590,   760,   971,  1202,  1494,  1845,  2274,  2797,  3366,  4088,  4905,
+  5899,  7142,  8683, 10625, 12983, 16095, 20637, 28216, 38859, 47237, 51537,
+ 54150, 56066, 57583, 58756, 59685, 60458, 61103, 61659, 62144, 62550, 62886,
+ 63186, 63480, 63743, 63954, 64148, 64320, 64467, 64600, 64719, 64837, 64939,
+ 65014, 65098, 65160, 65211, 65250, 65290, 65325, 65344, 65366, 65391, 65410,
+ 65430, 65447, 65460, 65474, 65487, 65494, 65501, 65509, 65513, 65518, 65520,
+ 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec5[171] =
+{
+     0,    10,    12,    14,    16,    18,    23,    29,    35,    42,    51,
+    58,    65,    72,    78,    87,    96,   103,   111,   122,   134,   150,
+   167,   184,   202,   223,   244,   265,   289,   315,   346,   379,   414,
+   450,   491,   532,   572,   613,   656,   700,   751,   802,   853,   905,
+   957,  1021,  1098,  1174,  1250,  1331,  1413,  1490,  1565,  1647,  1730,
+  1821,  1913,  2004,  2100,  2207,  2314,  2420,  2532,  2652,  2783,  2921,
+  3056,  3189,  3327,  3468,  3640,  3817,  3993,  4171,  4362,  4554,  4751,
+  4948,  5142,  5346,  5566,  5799,  6044,  6301,  6565,  6852,  7150,  7470,
+  7797,  8143,  8492,  8835,  9181,  9547,  9919, 10315, 10718, 11136, 11566,
+ 12015, 12482, 12967, 13458, 13953, 14432, 14903, 15416, 15936, 16452, 16967,
+ 17492, 18024, 18600, 19173, 19736, 20311, 20911, 21490, 22041, 22597, 23157,
+ 23768, 24405, 25034, 25660, 26280, 26899, 27614, 28331, 29015, 29702, 30403,
+ 31107, 31817, 32566, 33381, 34224, 35099, 36112, 37222, 38375, 39549, 40801,
+ 42074, 43350, 44626, 45982, 47354, 48860, 50361, 51845, 53312, 54739, 56026,
+ 57116, 58104, 58996, 59842, 60658, 61488, 62324, 63057, 63769, 64285, 64779,
+ 65076, 65344, 65430, 65500, 65517, 65535
+};
+
+/*
+* An array of pointers to CDFs of decorrelated LPC Gains
+*/
+const WebRtc_UWord16* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES] =
+{
+    WebRtcIsac_kLpcGainCdfVec0, WebRtcIsac_kLpcGainCdfVec1,
+    WebRtcIsac_kLpcGainCdfVec2, WebRtcIsac_kLpcGainCdfVec3,
+    WebRtcIsac_kLpcGainCdfVec4, WebRtcIsac_kLpcGainCdfVec5
+};
+
+/*
+* A matrix to decorrellate LPC gains of subframes.
+*/
+const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES] =
+{
+    {-0.150860,  0.327872,  0.367220,  0.504613,  0.559270,  0.409234},
+    { 0.457128, -0.613591, -0.289283, -0.029734,  0.393760,  0.418240},
+    {-0.626043,  0.136489, -0.439118, -0.448323,  0.135987,  0.420869},
+    { 0.526617,  0.480187,  0.242552, -0.488754, -0.158713,  0.411331},
+    {-0.302587, -0.494953,  0.588112, -0.063035, -0.404290,  0.387510},
+    { 0.086378,  0.147714, -0.428875,  0.548300, -0.570121,  0.401391}
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.h
new file mode 100644
index 0000000..1eba97c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_gain_swb_tables.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * SWB_KLT_Tables_LPCGain.h
+ *
+ * This file declares tables used for entropy coding of LPC Gain
+ * of upper-band.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_
+
+#include "settings.h"
+#include "typedefs.h"
+
+extern const double WebRtcIsac_kQSizeLpcGain;
+
+extern const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES];
+
+extern const WebRtc_Word16 WebRtcIsac_kNumQCellLpcGain[SUBFRAMES];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec0[18];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec1[21];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec2[26];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec3[46];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec4[78];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec5[171];
+
+extern const WebRtc_UWord16* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES];
+
+extern const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES];
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.c
new file mode 100644
index 0000000..695d583
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.c
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * SWB_KLT_Tables.c
+ *
+ * This file defines tables used for entropy coding of LPC shape of
+ * upper-band signal if the bandwidth is 12 kHz.
+ *
+ */
+
+#include "lpc_shape_swb12_tables.h"
+#include "settings.h"
+#include "typedefs.h"
+
+/*
+* Mean value of LAR
+*/
+const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER] =
+{
+  0.03748928306641, 0.09453441192543, -0.01112522344398, 0.03800237516842
+};
+
+/*
+* A rotation matrix to decorrelate intra-vector correlation,
+* i.e. correlation among components of LAR vector.
+*/
+const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER][UB_LPC_ORDER] =
+{
+    {-0.00075365493856,  -0.05809964887743,  -0.23397966154116,   0.97050367376411},
+    { 0.00625021257734,  -0.17299965610679,   0.95977735920651,   0.22104179375008},
+    { 0.20543384258374,  -0.96202143495696,  -0.15301870801552,  -0.09432375099565},
+    {-0.97865075648479,  -0.20300322280841,  -0.02581111653779,  -0.01913568980258}
+};
+
+/*
+* A rotation matrix to remove correlation among LAR coefficients
+* of different LAR vectors. One might guess that decorrelation matrix
+* for the first component should differ from the second component
+* but we haven't observed a significant benefit of having different
+* decorrelation matrices for different components.
+*/
+const double WebRtcIsac_kInterVecDecorrMatUb12
+[UB_LPC_VEC_PER_FRAME][UB_LPC_VEC_PER_FRAME] =
+{
+    { 0.70650597970460,  -0.70770707262373},
+    {-0.70770707262373,  -0.70650597970460}
+};
+
+/*
+* LAR quantization step-size.
+*/
+const double WebRtcIsac_kLpcShapeQStepSizeUb12 = 0.150000;
+
+/*
+* The smallest reconstruction points for quantiztion of LAR coefficients.
+*/
+const double WebRtcIsac_kLpcShapeLeftRecPointUb12
+[UB_LPC_ORDER*UB_LPC_VEC_PER_FRAME] =
+{
+    -0.900000, -1.050000, -1.350000, -1.800000, -1.350000, -1.650000,
+    -2.250000, -3.450000
+};
+
+/*
+* Number of reconstruction points of quantizers for LAR coefficients.
+*/
+const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
+{
+    13, 15, 19, 27, 19, 24, 32, 48
+};
+
+/*
+* Starting index for entropy decoder to search for the right interval,
+* one entry per LAR coefficient
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
+{
+     6,  7,  9, 13,  9, 12, 16, 24
+};
+
+/*
+* The following 8 vectors define CDF of 8 decorrelated LAR
+* coefficients.
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec0Ub12[14] =
+{
+     0,    13,    95,   418,  1687,  6498, 21317, 44200, 59029, 63849, 65147,
+ 65449, 65525, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub12[16] =
+{
+     0,    10,    59,   255,   858,  2667,  8200, 22609, 42988, 57202, 62947,
+ 64743, 65308, 65476, 65522, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub12[20] =
+{
+     0,    18,    40,   118,   332,   857,  2017,  4822, 11321, 24330, 41279,
+ 54342, 60637, 63394, 64659, 65184, 65398, 65482, 65518, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub12[28] =
+{
+     0,    21,    38,    90,   196,   398,   770,  1400,  2589,  4650,  8211,
+ 14933, 26044, 39592, 50814, 57452, 60971, 62884, 63995, 64621, 65019, 65273,
+ 65410, 65480, 65514, 65522, 65531, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub12[20] =
+{
+     0,     7,    46,   141,   403,   969,  2132,  4649, 10633, 24902, 43254,
+ 54665, 59928, 62674, 64173, 64938, 65293, 65464, 65523, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub12[25] =
+{
+     0,     7,    22,    72,   174,   411,   854,  1737,  3545,  6774, 13165,
+ 25221, 40980, 52821, 58714, 61706, 63472, 64437, 64989, 65287, 65430, 65503,
+ 65525, 65529, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub12[33] =
+{
+     0,    11,    21,    36,    65,   128,   228,   401,   707,  1241,  2126,
+  3589,  6060, 10517, 18853, 31114, 42477, 49770, 54271, 57467, 59838, 61569,
+ 62831, 63772, 64433, 64833, 65123, 65306, 65419, 65466, 65499, 65519, 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub12[49] =
+{
+     0,    14,    34,    67,   107,   167,   245,   326,   449,   645,   861,
+  1155,  1508,  2003,  2669,  3544,  4592,  5961,  7583,  9887, 13256, 18765,
+ 26519, 34077, 40034, 44349, 47795, 50663, 53262, 55473, 57458, 59122, 60592,
+ 61742, 62690, 63391, 63997, 64463, 64794, 65045, 65207, 65309, 65394, 65443,
+ 65478, 65504, 65514, 65523, 65535
+};
+
+/*
+* An array of pointers to CDFs of decorrelated LARs
+*/
+const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
+{
+    WebRtcIsac_kLpcShapeCdfVec0Ub12, WebRtcIsac_kLpcShapeCdfVec1Ub12,
+    WebRtcIsac_kLpcShapeCdfVec2Ub12, WebRtcIsac_kLpcShapeCdfVec3Ub12,
+    WebRtcIsac_kLpcShapeCdfVec4Ub12, WebRtcIsac_kLpcShapeCdfVec5Ub12,
+    WebRtcIsac_kLpcShapeCdfVec6Ub12, WebRtcIsac_kLpcShapeCdfVec7Ub12
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.h
new file mode 100644
index 0000000..1e93847
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb12_tables.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_shape_swb12_tables.h
+ *
+ * This file declares tables used for entropy coding of LPC shape of
+ * upper-band signal if the bandwidth is 12 kHz.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_
+
+#include "settings.h"
+#include "typedefs.h"
+
+extern const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER];
+
+extern const double WebRtcIsac_kMeanLpcGain;
+
+extern const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER][UB_LPC_ORDER];
+
+extern const double WebRtcIsac_kInterVecDecorrMatUb12
+[UB_LPC_VEC_PER_FRAME][UB_LPC_VEC_PER_FRAME];
+
+extern const double WebRtcIsac_kLpcShapeQStepSizeUb12;
+
+extern const double WebRtcIsac_kLpcShapeLeftRecPointUb12
+[UB_LPC_ORDER*UB_LPC_VEC_PER_FRAME];
+
+
+extern const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec0Ub12[14];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub12[16];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub12[20];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub12[28];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub12[20];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub12[25];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub12[33];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub12[49];
+
+extern const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb12
+[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.c
new file mode 100644
index 0000000..89f4523
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.c
@@ -0,0 +1,248 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * SWB16_KLT_Tables.c
+ *
+ * This file defines tables used for entropy coding of LPC shape of
+ * upper-band signal if the bandwidth is 16 kHz.
+ *
+ */
+
+#include "lpc_shape_swb16_tables.h"
+#include "settings.h"
+#include "typedefs.h"
+
+/*
+* Mean value of LAR
+*/
+const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER] =
+{
+0.454978, 0.364747, 0.102999, 0.104523
+};
+
+/*
+* A rotation matrix to decorrelate intra-vector correlation,
+* i.e. correlation among components of LAR vector.
+*/
+const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER][UB_LPC_ORDER] =
+{
+    {-0.020528, -0.085858, -0.002431,  0.996093},
+    {-0.033155,  0.036102,  0.998786,  0.004866},
+    { 0.202627,  0.974853, -0.028940,  0.088132},
+    {-0.978479,  0.202454, -0.039785, -0.002811}
+};
+
+/*
+* A rotation matrix to remove correlation among LAR coefficients
+* of different LAR vectors. One might guess that decorrelation matrix
+* for the first component should differ from the second component
+* but we haven't observed a significant benefit of having different
+* decorrelation matrices for different components.
+*/
+const double WebRtcIsac_kInterVecDecorrMatUb16
+[UB16_LPC_VEC_PER_FRAME][UB16_LPC_VEC_PER_FRAME] =
+{
+    { 0.291675, -0.515786,  0.644927,  0.482658},
+    {-0.647220,  0.479712,  0.289556,  0.516856},
+    { 0.643084,  0.485489, -0.289307,  0.516763},
+    {-0.287185, -0.517823, -0.645389,  0.482553}
+};
+
+/*
+* The following 16 vectors define CDF of 16 decorrelated LAR
+* coefficients.
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub16[14] =
+{
+     0,      2,     20,    159,   1034,   5688,  20892,  44653,
+ 59849,  64485,  65383,  65518,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub16[16] =
+{
+     0,      1,      7,     43,    276,   1496,   6681,  21653,
+ 43891,  58859,  64022,  65248,  65489,  65529,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub16[18] =
+{
+     0,      1,      9,     54,    238,    933,   3192,   9461,
+ 23226,  42146,  56138,  62413,  64623,  65300,  65473,  65521,
+ 65533,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub16[30] =
+{
+     0,      2,      4,      8,     17,     36,     75,    155,
+   329,    683,   1376,   2662,   5047,   9508,  17526,  29027,
+ 40363,  48997,  55096,  59180,  61789,  63407,  64400,  64967,
+ 65273,  65429,  65497,  65526,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub16[16] =
+{
+     0,      1,     10,     63,    361,   1785,   7407,  22242,
+ 43337,  58125,  63729,  65181,  65472,  65527,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub16[17] =
+{
+     0,      1,      7,     29,    134,    599,   2443,   8590,
+ 22962,  42635,  56911,  63060,  64940,  65408,  65513,  65531,
+ 65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub16[21] =
+{
+     0,      1,      5,     16,     57,    191,    611,   1808,
+  4847,  11755,  24612,  40910,  53789,  60698,  63729,  64924,
+ 65346,  65486,  65523,  65532,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub16[36] =
+{
+     0,      1,      4,     12,     25,     55,    104,    184,
+   314,    539,    926,   1550,   2479,   3861,   5892,   8845,
+ 13281,  20018,  29019,  38029,  45581,  51557,  56057,  59284,
+ 61517,  63047,  64030,  64648,  65031,  65261,  65402,  65480,
+ 65518,  65530,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec8Ub16[21] =
+{
+     0,      1,      2,      7,     26,    103,    351,   1149,
+  3583,  10204,  23846,  41711,  55361,  61917,  64382,  65186,
+ 65433,  65506,  65528,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub160[21] =
+{
+     0,      6,     19,     63,    205,    638,   1799,   4784,
+ 11721,  24494,  40803,  53805,  60886,  63822,  64931,  65333,
+ 65472,  65517,  65530,  65533,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub161[28] =
+{
+     0,      1,      3,     11,     31,     86,    221,    506,
+  1101,   2296,   4486,   8477,  15356,  26079,  38941,  49952,
+ 57165,  61257,  63426,  64549,  65097,  65351,  65463,  65510,
+ 65526,  65532,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub162[55] =
+{
+     0,      3,     12,     23,     42,     65,     89,    115,
+   150,    195,    248,    327,    430,    580,    784,   1099,
+  1586,   2358,   3651,   5899,   9568,  14312,  19158,  23776,
+ 28267,  32663,  36991,  41153,  45098,  48680,  51870,  54729,
+ 57141,  59158,  60772,  62029,  63000,  63761,  64322,  64728,
+ 65000,  65192,  65321,  65411,  65463,  65496,  65514,  65523,
+ 65527,  65529,  65531,  65532,  65533,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub163[26] =
+{
+     0,      2,      4,     10,     21,     48,    114,    280,
+   701,   1765,   4555,  11270,  24267,  41213,  54285,  61003,
+ 63767,  64840,  65254,  65421,  65489,  65514,  65526,  65532,
+ 65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub164[28] =
+{
+     0,      1,      3,      6,     15,     36,     82,    196,
+   453,   1087,   2557,   5923,  13016,  25366,  40449,  52582,
+ 59539,  62896,  64389,  65033,  65316,  65442,  65494,  65519,
+ 65529,  65533,  65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub165[34] =
+{
+     0,      2,      4,      8,     18,     35,     73,    146,
+   279,    524,    980,   1789,   3235,   5784,  10040,  16998,
+ 27070,  38543,  48499,  55421,  59712,  62257,  63748,  64591,
+ 65041,  65278,  65410,  65474,  65508,  65522,  65530,  65533,
+ 65534,  65535
+};
+
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub166[71] =
+{
+     0,      1,      2,      6,     13,     26,     55,     92,
+   141,    191,    242,    296,    355,    429,    522,    636,
+   777,    947,   1162,   1428,   1753,   2137,   2605,   3140,
+  3743,   4409,   5164,   6016,   6982,   8118,   9451,  10993,
+ 12754,  14810,  17130,  19780,  22864,  26424,  30547,  35222,
+ 40140,  44716,  48698,  52056,  54850,  57162,  59068,  60643,
+ 61877,  62827,  63561,  64113,  64519,  64807,  65019,  65167,
+ 65272,  65343,  65399,  65440,  65471,  65487,  65500,  65509,
+ 65518,  65524,  65527,  65531,  65533,  65534,  65535
+};
+
+/*
+* An array of pointers to CDFs of decorrelated LARs
+*/
+const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = {
+     WebRtcIsac_kLpcShapeCdfVec01Ub16,
+     WebRtcIsac_kLpcShapeCdfVec1Ub16,
+     WebRtcIsac_kLpcShapeCdfVec2Ub16,
+     WebRtcIsac_kLpcShapeCdfVec3Ub16,
+     WebRtcIsac_kLpcShapeCdfVec4Ub16,
+     WebRtcIsac_kLpcShapeCdfVec5Ub16,
+     WebRtcIsac_kLpcShapeCdfVec6Ub16,
+     WebRtcIsac_kLpcShapeCdfVec7Ub16,
+     WebRtcIsac_kLpcShapeCdfVec8Ub16,
+     WebRtcIsac_kLpcShapeCdfVec01Ub160,
+     WebRtcIsac_kLpcShapeCdfVec01Ub161,
+     WebRtcIsac_kLpcShapeCdfVec01Ub162,
+     WebRtcIsac_kLpcShapeCdfVec01Ub163,
+     WebRtcIsac_kLpcShapeCdfVec01Ub164,
+     WebRtcIsac_kLpcShapeCdfVec01Ub165,
+     WebRtcIsac_kLpcShapeCdfVec01Ub166
+};
+
+/*
+* The smallest reconstruction points for quantiztion of LAR coefficients.
+*/
+const double WebRtcIsac_kLpcShapeLeftRecPointUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] =
+{
+ -0.8250,  -0.9750,  -1.1250,  -2.1750,  -0.9750,  -1.1250,  -1.4250,
+ -2.6250,  -1.4250,  -1.2750,  -1.8750,  -3.6750,  -1.7250,  -1.8750,
+ -2.3250,  -5.4750
+};
+
+/*
+* Number of reconstruction points of quantizers for LAR coefficients.
+*/
+const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] =
+{
+   13,    15,    17,    29,    15,    16,    20,    35,    20,
+   20,    27,    54,    25,    27,    33,    70
+};
+
+/*
+* Starting index for entropy decoder to search for the right interval,
+* one entry per LAR coefficient
+*/
+const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] =
+{
+    6,     7,     8,    14,     7,     8,    10,    17,    10,
+   10,    13,    27,    12,    13,    16,    35
+};
+
+/*
+* LAR quantization step-size.
+*/
+const double WebRtcIsac_kLpcShapeQStepSizeUb16 = 0.150000;
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.h
new file mode 100644
index 0000000..68d08b2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_shape_swb16_tables.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_shape_swb16_tables.h
+ *
+ * This file declares tables used for entropy coding of LPC shape of
+ * upper-band signal if the bandwidth is 16 kHz.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_
+
+#include "settings.h"
+#include "typedefs.h"
+
+
+extern const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER];
+
+extern const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER][UB_LPC_ORDER];
+
+extern const double WebRtcIsac_kInterVecDecorrMatUb16
+[UB16_LPC_VEC_PER_FRAME][UB16_LPC_VEC_PER_FRAME];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub16[14];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub16[16];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub16[18];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub16[30];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub16[16];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub16[17];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub16[21];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub16[36];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec8Ub16[21];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub160[21];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub161[28];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub162[55];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub163[26];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub164[28];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub165[34];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub166[71];
+
+extern const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+
+extern const double WebRtcIsac_kLpcShapeLeftRecPointUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+
+extern const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+
+extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb16
+[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+
+extern const double WebRtcIsac_kLpcShapeQStepSizeUb16;
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.c
new file mode 100644
index 0000000..7df6121
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.c
@@ -0,0 +1,1129 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* coding tables for the KLT coefficients */
+
+#include "lpc_tables.h"
+#include "settings.h"
+
+/* indices of KLT coefficients used */
+const WebRtc_UWord16 WebRtcIsac_kQKltSelIndGain[12] = {
+ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+ 10,  11};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltSelIndShape[108] = {
+ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+ 10,  11,  12,  13,  14,  15,  16,  17,  18,  19,
+ 20,  21,  22,  23,  24,  25,  26,  27,  28,  29,
+ 30,  31,  32,  33,  34,  35,  36,  37,  38,  39,
+ 40,  41,  42,  43,  44,  45,  46,  47,  48,  49,
+ 50,  51,  52,  53,  54,  55,  56,  57,  58,  59,
+ 60,  61,  62,  63,  64,  65,  66,  67,  68,  69,
+ 70,  71,  72,  73,  74,  75,  76,  77,  78,  79,
+ 80,  81,  82,  83,  84,  85,  86,  87,  88,  89,
+ 90,  91,  92,  93,  94,  95,  96,  97,  98,  99,
+ 100,  101,  102,  103,  104,  105,  106,  107};
+
+/* cdf array for model indicator */
+const WebRtc_UWord16 WebRtcIsac_kQKltModelCdf[4] = {
+ 0,  15434,  37548,  65535};
+
+/* pointer to cdf array for model indicator */
+const WebRtc_UWord16 *WebRtcIsac_kQKltModelCdfPtr[1] = {WebRtcIsac_kQKltModelCdf};
+
+/* initial cdf index for decoder of model indicator */
+const WebRtc_UWord16 WebRtcIsac_kQKltModelInitIndex[1] = {1};
+
+/* offset to go from rounded value to quantization index */
+const short WebRtcIsac_kQKltQuantMinGain[12] = {
+ 3,  6,  4,  6,  6,  9,  5,  16,  11,  34, 32,  47};
+
+
+const short WebRtcIsac_kQKltQuantMinShape[108] = {
+ 0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+ 1,  1,  1,  1,  2,  2,  2,  3,  0,  0,
+ 0,  0,  1,  0,  0,  0,  0,  1,  1,  1,
+ 1,  1,  1,  2,  2,  3,  0,  0,  0,  0,
+ 1,  0,  1,  1,  1,  1,  1,  1,  1,  2,
+ 2,  4,  3,  5,  0,  0,  0,  0,  1,  1,
+ 1,  1,  1,  1,  2,  1,  2,  2,  3,  4,
+ 4,  7,  0,  0,  1,  1,  1,  1,  1,  1,
+ 1,  2,  3,  2,  3,  4,  4,  5,  7,  13,
+ 0,  1,  1,  2,  3,  2,  2,  2,  4,  4,
+ 5,  6,  7,  11,  9,  13,  12,  26};
+
+/* maximum quantization index */
+const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndGain[12] = {
+ 6,  12,  8,  14,  10,  19,  12,  31,  22,  56, 52,  138};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndShape[108] = {
+ 0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+ 2,  2,  2,  2,  4,  4,  5,  6,  0,  0,
+ 0,  0,  1,  0,  0,  0,  0,  1,  2,  2,
+ 2,  2,  3,  4,  5,  7,  0,  0,  0,  0,
+ 2,  0,  2,  2,  2,  2,  3,  2,  2,  4,
+ 4,  6,  6,  9,  0,  0,  0,  0,  2,  2,
+ 2,  2,  2,  2,  3,  2,  4,  4,  7,  7,
+ 9,  13,  0,  0,  2,  2,  2,  2,  2,  2,
+ 3,  4,  5,  4,  6,  8,  8,  10,  16,  25,
+ 0,  2,  2,  4,  5,  4,  4,  4,  7,  8,
+ 9,  10,  13,  19,  17,  23,  25,  49};
+
+/* index offset */
+const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[3][12] = {
+{ 0,  7,  20,  29,  44,  55,  75,  88,  120,  143,
+ 200,  253},
+{ 0,  7,  19,  27,  42,  53,  73,  86,  117,  140,
+ 197,  249},
+{ 0,  7,  20,  28,  44,  55,  75,  89,  121,  145,
+ 202,  257}};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[3][108] = {
+{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
+ 11,  14,  17,  20,  23,  28,  33,  39,  46,  47,
+ 48,  49,  50,  52,  53,  54,  55,  56,  58,  61,
+ 64,  67,  70,  74,  79,  85,  93,  94,  95,  96,
+ 97,  100,  101,  104,  107,  110,  113,  117,  120,  123,
+ 128,  133,  140,  147,  157,  158,  159,  160,  161,  164,
+ 167,  170,  173,  176,  179,  183,  186,  191,  196,  204,
+ 212,  222,  236,  237,  238,  241,  244,  247,  250,  253,
+ 256,  260,  265,  271,  276,  283,  292,  301,  312,  329,
+ 355,  356,  359,  362,  367,  373,  378,  383,  388,  396,
+ 405,  415,  426,  440,  460,  478,  502,  528},
+{ 0,  1,  2,  3,  4,  6,  7,  8,  9,  11,
+ 13,  16,  19,  22,  26,  29,  34,  39,  45,  46,
+ 47,  48,  49,  50,  51,  52,  53,  55,  57,  60,
+ 63,  66,  70,  73,  78,  84,  91,  92,  93,  94,
+ 95,  96,  97,  99,  102,  105,  108,  111,  114,  118,
+ 123,  128,  134,  141,  151,  152,  153,  154,  156,  159,
+ 162,  165,  168,  171,  174,  177,  181,  186,  194,  200,
+ 208,  218,  233,  234,  235,  236,  239,  242,  245,  248,
+ 251,  254,  258,  263,  270,  277,  288,  297,  308,  324,
+ 349,  351,  354,  357,  361,  366,  372,  378,  383,  390,
+ 398,  407,  420,  431,  450,  472,  496,  524},
+{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  11,
+ 14,  17,  20,  23,  26,  29,  34,  40,  47,  48,
+ 49,  50,  51,  52,  53,  54,  55,  58,  61,  64,
+ 67,  70,  73,  77,  82,  88,  96,  97,  98,  99,
+ 101,  102,  104,  107,  110,  113,  116,  119,  122,  125,
+ 129,  134,  141,  150,  160,  161,  162,  163,  166,  168,
+ 171,  174,  177,  180,  183,  186,  190,  195,  201,  208,
+ 216,  226,  243,  244,  245,  248,  251,  254,  257,  260,
+ 263,  268,  273,  278,  284,  291,  299,  310,  323,  340,
+ 366,  368,  371,  374,  379,  383,  389,  394,  399,  406,
+ 414,  422,  433,  445,  461,  480,  505,  533}};
+
+/* initial cdf index for KLT coefficients */
+const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[3][12] = {
+{ 3,  6,  4,  7,  5,  10,  6,  16,  11,  28,
+ 26,  69},
+{ 3,  6,  4,  7,  5,  10,  6,  15,  11,  28,
+ 26,  69},
+{ 3,  6,  4,  8,  5,  10,  7,  16,  12,  28,
+ 27,  70}};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[3][108] = {
+{ 0,  0,  0,  0,  0,  0,  0,  0,  0,  1,
+ 1,  1,  1,  1,  2,  2,  3,  3,  0,  0,
+ 0,  0,  1,  0,  0,  0,  0,  1,  1,  1,
+ 1,  1,  2,  2,  3,  4,  0,  0,  0,  0,
+ 1,  0,  1,  1,  1,  1,  2,  1,  1,  2,
+ 2,  3,  3,  5,  0,  0,  0,  0,  1,  1,
+ 1,  1,  1,  1,  2,  1,  2,  2,  4,  4,
+ 5,  7,  0,  0,  1,  1,  1,  1,  1,  1,
+ 2,  2,  3,  2,  3,  4,  4,  5,  8,  13,
+ 0,  1,  1,  2,  3,  2,  2,  2,  4,  4,
+ 5,  5,  7,  10,  9,  12,  13,  25},
+{ 0,  0,  0,  0,  1,  0,  0,  0,  1,  1,
+ 1,  1,  1,  2,  1,  2,  2,  3,  0,  0,
+ 0,  0,  0,  0,  0,  0,  1,  1,  1,  1,
+ 1,  2,  1,  2,  3,  3,  0,  0,  0,  0,
+ 0,  0,  1,  1,  1,  1,  1,  1,  2,  2,
+ 2,  3,  3,  5,  0,  0,  0,  1,  1,  1,
+ 1,  1,  1,  1,  1,  2,  2,  4,  3,  4,
+ 5,  7,  0,  0,  0,  1,  1,  1,  1,  1,
+ 1,  2,  2,  3,  3,  5,  4,  5,  8,  12,
+ 1,  1,  1,  2,  2,  3,  3,  2,  3,  4,
+ 4,  6,  5,  9,  11,  12,  14,  25},
+{ 0,  0,  0,  0,  0,  0,  0,  0,  1,  1,
+ 1,  1,  1,  1,  1,  2,  3,  3,  0,  0,
+ 0,  0,  0,  0,  0,  0,  1,  1,  1,  1,
+ 1,  1,  2,  2,  3,  4,  0,  0,  0,  1,
+ 0,  1,  1,  1,  1,  1,  1,  1,  1,  2,
+ 2,  3,  4,  5,  0,  0,  0,  1,  1,  1,
+ 1,  1,  1,  1,  1,  2,  2,  3,  3,  4,
+ 5,  8,  0,  0,  1,  1,  1,  1,  1,  1,
+ 2,  2,  2,  3,  3,  4,  5,  6,  8,  13,
+ 1,  1,  1,  2,  2,  3,  2,  2,  3,  4,
+ 4,  5,  6,  8,  9,  12,  14,  25}};
+
+/* offsets for quantizer representation levels*/
+const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsGain[3] = {
+ 0,  392,  779};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsShape[3] = {
+ 0,  578,  1152};
+
+/* quantizer representation levels */
+const double WebRtcIsac_kQKltLevelsGain[1176] = {
+-2.78127126, -1.76745590, -0.77913790, -0.00437329,  0.79961206,  1.81775776,  2.81389782, -5.78753143, -4.88384084, -3.89320940,
+-2.88133610, -1.92859977, -0.86347396,  0.02003888,  0.86140400,  1.89667156,  2.97134967,  3.98781964,  4.91727277,  5.82865898,
+-4.11195874, -2.80898424, -1.87547977, -0.80943825, -0.00679084,  0.79573851,  1.83953397,  2.67586037,  3.76274082, -6.10933968,
+-4.93034581, -3.89281296, -2.91530625, -1.89684163, -0.85319130, -0.02275767,  0.86862017,  1.91578276,  2.96107339,  3.96543056,
+ 4.91369908,  5.91058154,  6.83848343,  8.07136925, -5.87470395, -4.84703049, -3.84284597, -2.86168446, -1.89290192, -0.82798145,
+-0.00080013,  0.82594974,  1.85754329,  2.88351798,  3.96172628, -8.85684885, -7.87387461, -6.97811862, -5.93256270, -4.94301439,
+-3.95513701, -2.96041544, -1.94031192, -0.87961478, -0.00456201,  0.89911505,  1.91723376,  2.94011511,  3.93302540,  4.97990967,
+ 5.93133404,  7.02181199,  7.92407762,  8.80155440,  10.04665814, -4.82396678, -3.85612158, -2.89482244, -1.89558408, -0.90036978,
+-0.00677823,  0.90607989,  1.90937981,  2.91175777,  3.91637730,  4.97565723,  5.84771228,  7.11145863, -16.07879840, -15.03776309,
+-13.93905670, -12.95671800, -11.89171202, -10.95820934, -9.95923714, -8.94357334, -7.99068299, -6.97481009, -5.94826231, -4.96673988,
+-3.97490466, -2.97846970, -1.95130435, -0.94215262, -0.01444043,  0.96770704,  1.95848598,  2.94107862,  3.95666119,  4.97253085,
+ 5.97191122,  6.93277360,  7.96608727,  8.87958779,  10.00264269,  10.86560820,  12.07449071,  13.04491775,  13.97507061,  14.91845261,
+-10.85696295, -9.83365357, -9.01245635, -7.95915145, -6.95625003, -5.95362618, -4.93468444, -3.98760978, -2.95044407, -1.97041277,
+-0.97701799, -0.00840234,  0.97834289,  1.98361415,  2.97802439,  3.96415871,  4.95369042,  5.94101770,  6.92756798,  7.94063998,
+ 8.85951828,  9.97077022,  11.00068503, -33.92030406, -32.81426422, -32.00000000, -31.13243639, -30.11886909, -29.06017570, -28.12598824,
+-27.22045482, -25.81215858, -25.07849962, -23.93018013, -23.02097643, -21.89529725, -20.99091085, -19.98889048, -18.94327044, -17.96562071,
+-16.96126218, -15.95054062, -14.98516200, -13.97101012, -13.02106500, -11.98438006, -11.03216748, -9.95930286, -8.97043946, -7.98085082,
+-6.98360995, -5.98998802, -4.98668173, -4.00032906, -3.00420619, -1.98701132, -0.99324682, -0.00609324,  0.98297834,  1.99483076,
+ 3.00305044,  3.97142097,  4.97525759,  5.98612258,  6.97448236,  7.97575900,  9.01086211,  9.98665542,  11.00541438,  11.98078628,
+ 12.92352471,  14.06849675,  14.99949430,  15.94904834,  16.97440321,  18.04040916,  18.88987609,  20.05312391,  21.00000000,  21.79443341,
+-31.98578825, -31.00000000, -29.89060567, -28.98555686, -27.97114102, -26.84935410, -26.02402230, -24.94195278, -23.92336849, -22.95552382,
+-21.97932836, -20.96055470, -19.99649553, -19.03436122, -17.96706525, -17.01139515, -16.01363516, -14.99154248, -14.00298333, -12.99630613,
+-11.99955519, -10.99000421, -10.00819092, -8.99763648, -7.98431793, -7.01769025, -5.99604690, -4.99980697, -3.99334671, -3.01748192,
+-2.02051217, -1.00848371, -0.01942358,  1.00477757,  1.95477872,  2.98593031,  3.98779079,  4.96862849,  6.02694771,  6.93983733,
+ 7.89874717,  8.99615862,  10.02367921,  10.96293452,  11.84351528,  12.92207187,  13.85122329,  15.05146877,  15.99371264,  17.00000000,
+ 18.00000000,  19.00000000,  19.82763573, -47.00000000, -46.00000000, -44.87138498, -44.00000000, -43.00000000, -42.00000000, -41.00000000,
+-39.88966612, -38.98913239, -37.80306486, -37.23584325, -35.94200288, -34.99881301, -34.11361858, -33.06507360, -32.13129135, -30.90891364,
+-29.81511907, -28.99250380, -28.04535391, -26.99767800, -26.04418164, -24.95687851, -24.04865595, -23.03392645, -21.89366707, -20.93517364,
+-19.99388660, -18.91620943, -18.03749683, -16.99532379, -15.98683813, -15.06421479, -13.99359211, -12.99714098, -11.97022520, -10.98500279,
+-9.98834422, -8.95729330, -8.01232284, -7.00253661, -5.99681626, -5.01207817, -3.95914904, -3.01232178, -1.96615919, -0.97687670,
+ 0.01228030,  0.98412288,  2.01753544,  3.00580570,  3.97783510,  4.98846894,  6.01321400,  7.00867732,  8.00416375,  9.01771966,
+ 9.98637729,  10.98255180,  11.99194163,  13.01807333,  14.00999545,  15.00118556,  16.00089224,  17.00584148,  17.98251763,  18.99942091,
+ 19.96917690,  20.97839265,  21.98207297,  23.00171271,  23.99930737,  24.99746061,  26.00936304,  26.98240132,  28.01126868,  29.01395915,
+ 29.98153507,  31.01376711,  31.99876818,  33.00475317,  33.99753994,  34.99493913,  35.98933585,  36.95620160,  37.98428461,  38.99317544,
+ 40.01832073,  40.98048133,  41.95999283,  42.98232091,  43.96523612,  44.99574268,  45.99524194,  47.05464025,  48.03821548,  48.99354366,
+ 49.96400411,  50.98017973,  51.95184408,  52.96291806,  54.00194392,  54.96603783,  55.95623778,  57.03076595,  58.05889901,  58.99081551,
+ 59.97928121,  61.05071612,  62.03971580,  63.01286038,  64.01290338,  65.02074503,  65.99454594,  67.00399425,  67.96571257,  68.95305727,
+ 69.92030664,  70.95594862,  71.98088567,  73.04764124,  74.00285480,  75.02696330,  75.89837673,  76.93459997,  78.16266309,  78.83317543,
+ 80.00000000,  80.87251574,  82.09803524,  83.10671664,  84.00000000,  84.77023523,  86.00000000,  87.00000000,  87.92946897,  88.69159118,
+ 90.00000000,  90.90535270, -3.00000000, -2.00000000, -0.77592424, -0.00564307,  0.76727305,  2.00000000,  3.00000000, -6.00000000,
+-5.00000000, -4.00000000, -2.92897924, -1.85623684, -0.72445303, -0.00119184,  0.72896652,  2.05710416,  3.17909894,  4.00000000,
+ 5.00000000, -3.00000000, -2.00000000, -0.67480586, -0.00028016,  0.66618169,  2.00000000,  3.00000000,  4.00000000, -7.00000000,
+-6.00000000, -5.00000000, -3.78336783, -2.84811556, -2.04088844, -0.71114371,  0.03142493,  0.69662772,  1.91417930,  3.00000000,
+ 4.01411062,  5.00000000,  6.00000000,  7.00000000, -6.00000000, -5.00000000, -4.00000000, -3.00000000, -2.00000000, -0.63703469,
+ 0.00169604,  0.66294191,  1.83808563,  3.00000000,  4.00000000, -8.00000000, -7.00000000, -6.03082300, -5.00000000, -3.88061019,
+-2.92670084, -1.99902336, -0.72898996, -0.02880170,  0.73769927,  1.95920233,  2.78356263,  4.08100921,  5.00000000,  6.00000000,
+ 6.78771437,  8.00000000,  9.00000000,  10.00000000,  11.00000000, -5.00000000, -4.00000000, -2.88150384, -1.89520024, -0.71479482,
+ 0.00962397,  0.72816030,  1.73583550,  3.00000000,  4.00000000,  5.00000000,  6.00000000,  7.00000000, -16.00000000, -15.00000000,
+-13.80516401, -13.00000000, -12.00000000, -11.00000000, -10.02723144, -9.11825995, -8.05820112, -7.00000000, -6.17943541, -5.01837980,
+-3.97546169, -2.92806857, -1.89778775, -0.81138893, -0.02246016,  0.80528415,  1.85705214,  2.96438524,  3.97540151,  4.79684246,
+ 6.00000000,  6.75549513,  8.12185828,  9.00000000,  10.00000000,  11.00000000,  12.00000000,  13.00000000,  14.00000000, -11.00000000,
+-10.00000000, -9.00000000, -7.91603344, -6.77865892, -5.85765006, -4.93342332, -3.96679157, -2.84925552, -1.89230732, -0.85384229,
+ 0.00579591,  0.84863246,  1.89006713,  2.89483818,  3.87322971,  5.13228411,  6.00000000,  7.00000000,  8.00000000,  9.00000000,
+ 10.00000000,  11.00000000, -34.00000000, -33.00000000, -32.00000000, -31.00000000, -30.00000000, -29.00000000, -28.00000000, -27.00000000,
+-26.00000000, -25.00000000, -24.00000000, -23.00000000, -22.00000000, -21.00000000, -20.19501953, -19.00000000, -18.00000000, -17.00000000,
+-16.00000000, -14.89069633, -14.00000000, -13.00000000, -12.16260304, -11.15418282, -9.83543570, -8.85600407, -7.82712677, -7.05664308,
+-5.97007352, -4.89268438, -3.93822771, -2.94975269, -1.92192127, -0.90702480,  0.03974847,  0.92488359,  1.93747579,  2.94500522,
+ 3.95181797,  4.95433087,  5.95141808,  7.00212920,  8.02964757,  9.03210585,  9.84644504,  10.82907720,  11.87622530,  12.96908371,
+ 14.00000000,  15.16963413,  15.94902025,  17.00000000,  18.00000000,  19.00000000,  20.00000000,  21.00000000,  22.00000000, -29.00000000,
+-27.79780781, -27.00757888, -26.01571026, -24.89695568, -23.99946491, -22.98699614, -21.96678139, -20.99883532, -20.00851529, -18.94738054,
+-17.98672566, -16.98684787, -15.96917397, -14.99856852, -13.98974852, -12.97786927, -11.96110939, -10.98877093, -9.99875257, -8.99001359,
+-8.00799989, -6.99471760, -6.00034670, -4.99936372, -4.00581479, -3.00424577, -2.02047620, -0.99713266, -0.00366397,  1.00803955,
+ 1.98452687,  3.00748501,  4.02714611,  4.97661026,  5.99337271,  6.99754716,  8.00713602,  8.97184974,  9.98047901,  10.97685939,
+ 11.99533975,  12.96107876,  13.95061478,  15.00756776,  15.94078690,  16.88231059,  17.92069248,  18.78011047,  20.00000000,  21.00000000,
+ 22.00000000, -55.76988333, -54.96048193, -53.88411581, -52.94117980, -51.80983449, -50.90359699, -50.00000000, -48.99838741, -47.97685542,
+-47.03288597, -45.97820919, -45.02418374, -43.90081897, -42.88832512, -41.98234549, -40.96745512, -39.98148729, -39.06792854, -37.96493755,
+-36.98707870, -36.03416079, -35.01192444, -33.95785029, -32.99469087, -31.96633807, -31.01769053, -29.99727691, -28.99329690, -27.98873019,
+-27.00344273, -25.97657141, -25.00511074, -23.96689479, -23.01566842, -22.01632643, -21.00076343, -19.97788007, -18.97248680, -17.96076284,
+-16.97585453, -15.98345587, -15.01612745, -13.96862118, -12.96622055, -12.02196641, -11.02078103, -9.98445656, -9.00050060, -8.03442387,
+-7.00363761, -5.97921358, -4.98886269, -4.00528221, -3.01672947, -1.98599795, -1.00668518, -0.02633490,  1.00794139,  2.00837138,
+ 2.99213287,  3.98710216,  4.99064334,  6.01416391,  7.01759708,  7.97878151,  8.99665730,  10.02656114,  11.01863887,  12.01207901,
+ 13.00958725,  13.99237829,  15.00954971,  16.00724653,  17.00606559,  17.99886292,  18.99611967,  19.98808171,  21.01871930,  21.97014763,
+ 22.99833843,  24.00316842,  24.99949142,  25.98539601,  27.02480733,  27.98075377,  28.98266019,  30.00611445,  30.99409128,  31.94523141,
+ 32.97688339,  33.98800206,  35.00177074,  35.98639997,  36.98939428,  37.95644255,  39.00114054,  39.99492439,  40.99338254,  41.97050844,
+ 43.03085663,  43.96757668,  44.97800970,  45.95953358,  46.98109551,  47.99368477,  49.00141209,  49.94459923,  50.93298108,  51.99894661,
+ 53.06463883,  53.99704669,  55.02037199,  55.98368047,  57.01930954,  58.03813852,  58.96232502,  60.01644186,  61.03254711,  62.01086576,
+ 62.87962247,  63.98378413,  65.02189831,  65.93003954,  66.92439900,  68.07051633,  68.95928756,  70.03315022,  71.05579859,  72.00000000,
+ 73.00000000,  74.00000000,  75.00000000,  75.93485291,  77.20950456,  78.00000000,  79.00000000,  79.91519960,  81.00000000, -3.00000000,
+-2.00000000, -0.65174074, -0.00092112,  0.62967387,  2.00000000,  3.00000000, -6.00000000, -5.00000000, -4.00000000, -2.89861729,
+-1.69999061, -0.72632201,  0.00219241,  0.72891750,  1.73257865,  3.00000000,  3.76561508,  5.00000000,  6.00000000, -3.00000000,
+-2.00000000, -0.66227013,  0.00389373,  0.66163500,  2.00000000,  3.00000000,  4.00000000, -8.00000000, -7.00000000, -6.00000000,
+-4.76421796, -4.04320264, -3.01415201, -1.84346485, -0.77185048,  0.00061977,  0.76274524,  1.84330156,  3.00000000,  4.00000000,
+ 5.00000000,  6.00000000,  7.00000000, -6.00000000, -5.00000000, -4.00000000, -3.00000000, -1.75749611, -0.72951347, -0.00104394,
+ 0.72040315,  1.72594036,  3.00000000,  4.00000000, -9.00000000, -8.00000000, -7.00000000, -5.90394062, -5.00000000, -3.75562807,
+-2.89699407, -1.86696610, -0.79056636, -0.00330943,  0.79744554,  1.85149941,  2.91118681,  3.99520311,  4.96341987,  6.00000000,
+ 7.00000000,  8.00000000,  9.00000000,  10.00000000, -6.00000000, -4.80151529, -4.00000000, -2.87442856, -1.85285815, -0.77767592,
+-0.02071301,  0.81752572,  1.82503940,  2.79602150,  3.92870203,  5.00000000,  6.00000000,  7.00000000, -17.00000000, -16.00000000,
+-15.00000000, -14.00000000, -13.00000000, -12.00000000, -11.00000000, -9.80059874, -9.00000000, -8.00185204, -7.13087808, -5.92942149,
+-4.77883243, -3.93417708, -2.88004618, -1.89952522, -0.86239337,  0.00332274,  0.86657548,  1.89479279,  2.89701813,  3.90987417,
+ 4.98910145,  6.07676766,  7.00000000,  8.00000000,  9.00000000,  10.00000000,  11.00000000,  12.00000000,  13.00000000,  14.00000000,
+-12.00000000, -11.00000000, -9.89996262, -8.85894205, -7.87594823, -6.99685317, -5.94917589, -4.93914916, -3.93317670, -2.93174244,
+-1.90737478, -0.90982242,  0.00803316,  0.90111563,  1.90362879,  2.90332432,  3.90654662,  4.94461954,  5.87963665,  6.91988113,
+ 7.79514004,  8.98805413,  10.00000000,  11.00000000, -35.00000000, -34.00000000, -33.00000000, -32.00000000, -31.00000000, -30.00000000,
+-29.00000000, -28.00000000, -27.00000000, -26.00000000, -25.00000000, -24.00000000, -22.88310970, -22.00000000, -21.00000000, -20.00000000,
+-19.00000000, -18.00000000, -17.00000000, -16.11854974, -15.00000000, -14.10507667, -13.04497040, -11.94846700, -10.97432494, -9.94514368,
+-8.97311414, -7.94171496, -6.97232122, -5.98590548, -4.97455572, -3.95477903, -2.93935454, -1.95573532, -0.97120273, -0.02084826,
+ 0.95689153,  1.96679781,  2.97060165,  3.96660892,  4.96754331,  5.97996089,  6.93822411,  7.96618014,  8.95809791,  9.98891474,
+ 10.95713402,  11.85433084,  13.03831696,  13.84035295,  15.00729606,  15.98652872,  17.20557599,  18.00000000,  18.90794805,  20.00000000,
+ 21.00000000, -34.00000000, -33.00000000, -32.00000000, -31.00000000, -30.00000000, -28.97280602, -28.00000000, -27.16255057, -26.04078092,
+-24.85442050, -24.15783484, -22.78614956, -21.95739865, -21.21844626, -20.03008104, -19.03888543, -17.90460490, -17.02064693, -15.84673652,
+-14.87140709, -13.87996048, -12.94907251, -11.96795995, -11.00977925, -9.95103238, -8.96674655, -7.96351667, -6.96886200, -5.99335494,
+-4.97515534, -3.98891694, -2.99581150, -1.98758360, -0.99249128, -0.00001403,  0.98807868,  1.99119869,  2.99019366,  3.98612953,
+ 5.00312941,  5.98833080,  6.99686651,  7.98373889,  8.97942222,  9.94202752,  10.99671622,  11.94306164,  12.98539825,  13.90728690,
+ 14.89907642,  15.94836675,  16.89611342,  17.84084949,  18.74910958,  20.00000000, -67.00000000, -66.00000000, -65.00000000, -64.00000000,
+-63.02511977, -62.00000000, -61.06061493, -59.95964043, -59.12824439, -58.00000000, -57.00000000, -56.00000000, -54.87857996, -54.09689334,
+-53.00000000, -52.21057366, -50.93867921, -50.03032952, -49.19283867, -47.89439051, -46.99505692, -46.04895543, -44.89687413, -43.78942208,
+-42.99025156, -41.88436155, -40.99169704, -40.00320429, -38.90181498, -38.06029271, -37.05030818, -36.07554573, -35.03202233, -33.93117946,
+-32.97736655, -31.98942819, -30.99546798, -30.01511004, -28.97296525, -28.02561164, -26.94386985, -25.99632704, -25.00461143, -24.01578192,
+-22.99177609, -22.02261094, -20.97939001, -19.96176066, -19.00442980, -18.01529434, -17.00196902, -15.99794828, -14.98675055, -13.97517657,
+-12.98676283, -11.99718760, -11.00167809, -9.98872268, -9.02138474, -8.00320338, -6.99542797, -6.00059136, -5.01311763, -4.00336943,
+-3.00348281, -1.99365875, -0.98223019,  0.00126343,  0.99699237,  1.99381968,  3.00054436,  3.99898305,  5.00160508,  6.00310399,
+ 6.99885096,  8.02740039,  8.99515550,  9.98962151,  11.00642302,  11.98694516,  13.00018933,  13.97726018,  14.99186645,  16.00580131,
+ 16.97434224,  17.96982658,  19.00066438,  20.01228749,  21.00741822,  21.94988312,  23.00860212,  23.98801542,  24.97638417,  25.98003521,
+ 27.02336188,  27.99667029,  29.01014125,  30.02481912,  31.01415797,  31.97399854,  33.06214485,  33.99929330,  34.94095386,  35.96368372,
+ 36.96980925,  37.98389244,  39.01121235,  40.00715026,  41.06382894,  41.96618280,  43.01555590,  43.95430436,  45.01970038,  45.99967821,
+ 47.19847394,  48.04852502,  49.10609965,  50.04244122,  50.86051406,  51.92983796,  53.02781107,  54.06248545,  54.89942009,  56.08347165,
+ 57.06887956,  58.09671115,  59.07832400,  59.87005277,  61.14778499,  62.00000000,  63.00000000,  64.00000000,  65.00000000,  66.00000000,
+ 67.00000000,  68.00000000,  69.00000000,  70.00000000,  71.00000000,  72.00000000};
+
+const double WebRtcIsac_kQKltLevelsShape[1735] = {
+ 0.00032397,  0.00008053, -0.00061202, -0.00012620,  0.00030437,  0.00054764, -0.00027902,  0.00069360,  0.00029449, -0.80219239,
+ 0.00091089, -0.74514927, -0.00094283,  0.64030631, -0.60509119,  0.00035575,  0.61851665, -0.62129957,  0.00375219,  0.60054900,
+-0.61554359,  0.00054977,  0.63362016, -1.73118727, -0.65422341,  0.00524568,  0.66165298,  1.76785515, -1.83182018, -0.65997434,
+-0.00011887,  0.67524299,  1.79933938, -1.76344480, -0.72547708, -0.00133017,  0.73104704,  1.75305377,  2.85164534, -2.80423916,
+-1.71959639, -0.75419722, -0.00329945,  0.77196760,  1.72211069,  2.87339653,  0.00031089, -0.00015311,  0.00018201, -0.00035035,
+-0.77357251,  0.00154647, -0.00047625, -0.00045299,  0.00086590,  0.00044762, -0.83383829,  0.00024787, -0.68526258, -0.00122472,
+ 0.64643255, -0.60904942, -0.00448987,  0.62309184, -0.59626442, -0.00574132,  0.62296546, -0.63222115,  0.00013441,  0.63609545,
+-0.66911055, -0.00369971,  0.66346095,  2.07281301, -1.77184694, -0.67640425, -0.00010145,  0.64818392,  1.74948973, -1.69420224,
+-0.71943894, -0.00004680,  0.75303493,  1.81075983,  2.80610041, -2.80005755, -1.79866753, -0.77409777, -0.00084220,  0.80141293,
+ 1.78291081,  2.73954236,  3.82994169,  0.00015140, -0.00012766, -0.00034241, -0.00119125, -0.76113497,  0.00069246,  0.76722027,
+ 0.00132862, -0.69107530,  0.00010656,  0.77061578, -0.78012970,  0.00095947,  0.77828502, -0.64787758,  0.00217168,  0.63050167,
+-0.58601125,  0.00306596,  0.59466308, -0.58603410,  0.00059779,  0.64257970,  1.76512766, -0.61193600, -0.00259517,  0.59767574,
+-0.61026273,  0.00315811,  0.61725479, -1.69169719, -0.65816029,  0.00067575,  0.65576890,  2.00000000, -1.72689193, -0.69780808,
+-0.00040990,  0.70668487,  1.74198458, -3.79028154, -3.00000000, -1.73194459, -0.70179341, -0.00106695,  0.71302629,  1.76849782,
+-2.89332364, -1.78585007, -0.78731491, -0.00132610,  0.79692976,  1.75247009,  2.97828682, -5.26238694, -3.69559829, -2.87286122,
+-1.84908818, -0.84434577, -0.01167975,  0.84641753,  1.84087672,  2.87628156,  3.83556679, -0.00190204,  0.00092642,  0.00354385,
+-0.00012982, -0.67742785,  0.00229509,  0.64935672, -0.58444751,  0.00470733,  0.57299534, -0.58456202, -0.00097715,  0.64593607,
+-0.64060330, -0.00638534,  0.59680157, -0.59287537,  0.00490772,  0.58919707, -0.60306173, -0.00417464,  0.60562100, -1.75218757,
+-0.63018569, -0.00225922,  0.63863300, -0.63949939, -0.00126421,  0.64268914, -1.75851182, -0.68318060,  0.00510418,  0.69049211,
+ 1.88178506, -1.71136148, -0.72710534, -0.00815559,  0.73412917,  1.79996711, -2.77111145, -1.73940498, -0.78212945,  0.01074476,
+ 0.77688916,  1.76873972,  2.87281379,  3.77554698, -3.75832725, -2.95463235, -1.80451491, -0.80017226,  0.00149902,  0.80729206,
+ 1.78265046,  2.89391793, -3.78236148, -2.83640598, -1.82532067, -0.88844327, -0.00620952,  0.88208030,  1.85757631,  2.81712391,
+ 3.88430176,  5.16179367, -7.00000000, -5.93805408, -4.87172597, -3.87524433, -2.89399744, -1.92359563, -0.92136341, -0.00172725,
+ 0.93087018,  1.90528280,  2.89809686,  3.88085708,  4.89147740,  5.89078692, -0.00239502,  0.00312564, -1.00000000,  0.00178325,
+ 1.00000000, -0.62198029,  0.00143254,  0.65344051, -0.59851220, -0.00676987,  0.61510140, -0.58894151,  0.00385055,  0.59794203,
+-0.59808568, -0.00038214,  0.57625703, -0.63009713, -0.01107985,  0.61278758, -0.64206758, -0.00154369,  0.65480598,  1.80604162,
+-1.80909286, -0.67810514,  0.00205762,  0.68571097,  1.79453891, -3.22682422, -1.73808453, -0.71870305, -0.00738594,  0.71486172,
+ 1.73005326, -1.66891897, -0.73689615, -0.00616203,  0.74262409,  1.73807899, -2.92417482, -1.73866741, -0.78133871,  0.00764425,
+ 0.80027264,  1.78668732,  2.74992588, -4.00000000, -2.75578740, -1.83697516, -0.83117035, -0.00355191,  0.83527172,  1.82814700,
+ 2.77377675,  3.80718693, -3.81667698, -2.83575471, -1.83372350, -0.86579471,  0.00547578,  0.87582281,  1.82858793,  2.87265007,
+ 3.91405377, -4.87521600, -3.78999094, -2.86437014, -1.86964365, -0.90618018,  0.00128243,  0.91497811,  1.87374952,  2.83199819,
+ 3.91519130,  4.76632822, -6.68713448, -6.01252467, -4.94587936, -3.88795368, -2.91299088, -1.92592211, -0.95504570, -0.00089980,
+ 0.94565200,  1.93239633,  2.91832808,  3.91363475,  4.88920034,  5.96471415,  6.83905252,  7.86195009,  8.81571018, -12.96141759,
+-11.73039516, -10.96459719, -9.97382433, -9.04414433, -7.89460619, -6.96628608, -5.93236595, -4.93337924, -3.95479990, -2.96451499,
+-1.96635876, -0.97271229, -0.00402238,  0.98343930,  1.98348291,  2.96641164,  3.95456471,  4.95517089,  5.98975714,  6.90322073,
+ 7.90468849,  8.85639467,  9.97255498,  10.79006309,  11.81988596,  0.04950500, -1.00000000, -0.01226628,  1.00000000, -0.59479469,
+-0.10438305,  0.59822144, -2.00000000, -0.67109149, -0.09256692,  0.65171621,  2.00000000, -3.00000000, -1.68391999, -0.76681039,
+-0.03354151,  0.71509146,  1.77615472, -2.00000000, -0.68661511, -0.02497881,  0.66478398,  2.00000000, -2.00000000, -0.67032784,
+-0.00920582,  0.64892756,  2.00000000, -2.00000000, -0.68561894,  0.03641869,  0.73021611,  1.68293863, -4.00000000, -2.72024184,
+-1.80096059, -0.81696185,  0.03604685,  0.79232033,  1.70070730,  3.00000000, -4.00000000, -2.71795670, -1.80482986, -0.86001162,
+ 0.03764903,  0.87723968,  1.79970771,  2.72685932,  3.67589143, -5.00000000, -4.00000000, -2.85492548, -1.78996365, -0.83250358,
+-0.01376828,  0.84195506,  1.78161105,  2.76754458,  4.00000000, -6.00000000, -5.00000000, -3.82268811, -2.77563624, -1.82608163,
+-0.86486114, -0.02671886,  0.86693165,  1.88422879,  2.86248347,  3.95632216, -7.00000000, -6.00000000, -5.00000000, -3.77533988,
+-2.86391432, -1.87052039, -0.90513658,  0.06271236,  0.91083620,  1.85734756,  2.86031688,  3.82019418,  4.94420394,  6.00000000,
+-11.00000000, -10.00000000, -9.00000000, -8.00000000, -6.91952415, -6.00000000, -4.92044374, -3.87845165, -2.87392362, -1.88413020,
+-0.91915740,  0.00318517,  0.91602800,  1.89664838,  2.88925058,  3.84123856,  4.78988651,  5.94526812,  6.81953917,  8.00000000,
+-9.00000000, -8.00000000, -7.03319143, -5.94530963, -4.86669720, -3.92438007, -2.88620396, -1.92848070, -0.94365985,  0.01671855,
+ 0.97349410,  1.93419878,  2.89740109,  3.89662823,  4.83235583,  5.88106535,  6.80328232,  8.00000000, -13.00000000, -12.00000000,
+-11.00000000, -10.00000000, -9.00000000, -7.86033489, -6.83344055, -5.89844215, -4.90811454, -3.94841298, -2.95820490, -1.98627966,
+-0.99161468, -0.02286136,  0.96055651,  1.95052433,  2.93969396,  3.94304346,  4.88522624,  5.87434241,  6.78309433,  7.87244101,
+ 9.00000000,  10.00000000, -12.09117356, -11.00000000, -10.00000000, -8.84766108, -7.86934236, -6.98544896, -5.94233429, -4.95583292,
+-3.95575986, -2.97085529, -1.98955811, -0.99359873, -0.00485413,  0.98298870,  1.98093258,  2.96430203,  3.95540216,  4.96915010,
+ 5.96775124,  6.99236918,  7.96503302,  8.99864542,  9.85857723,  10.96541926,  11.91647197,  12.71060069, -26.00000000, -25.00000000,
+-24.00585596, -23.11642573, -22.14271284, -20.89800711, -19.87815799, -19.05036354, -17.88555651, -16.86471209, -15.97711073, -14.94012359,
+-14.02661226, -12.98243228, -11.97489256, -10.97402777, -9.96425624, -9.01085220, -7.97372506, -6.98795002, -5.97271328, -5.00191694,
+-3.98055849, -2.98458048, -1.99470442, -0.99656768, -0.00825666,  1.00272004,  1.99922218,  2.99357669,  4.01407905,  5.01003897,
+ 5.98115528,  7.00018958,  8.00338125,  8.98981046,  9.98990318,  10.96341479,  11.96866930,  12.99175139,  13.94580443,  14.95745083,
+ 15.98992869,  16.97484646,  17.99630043,  18.93396897,  19.88347741,  20.96532482,  21.92191032,  23.22314702,  0.00006846,  0.00014352,
+-0.00056203,  0.00027588, -0.00147678,  1.00000000,  0.00003823,  0.00001975, -0.00033710, -0.00096712,  1.00000000, -1.00000000,
+ 0.00067511, -1.00000000,  0.00342065,  1.00000000, -1.00000000,  0.00196254,  1.00000000, -1.00000000,  0.00201173,  1.00000000,
+-2.00000000, -1.00000000, -0.00381686,  1.00000000, -1.00000000,  0.00178037,  1.00000000, -2.00000000, -1.00000000, -0.00320274,
+ 1.00000000,  2.00000000, -2.00000000, -1.00000000,  0.00426519,  1.00000000,  2.00000000, -3.00000000, -2.00000000, -1.00000000,
+-0.00074072,  0.64654602,  2.00000000,  0.00031217,  0.00063348,  0.00020247,  0.00047891,  0.00122893, -0.00150669, -0.00148276,
+ 0.00016848,  0.00147085,  1.00000000, -0.00088160,  1.00000000, -1.00000000,  0.00381641,  1.00000000, -1.00000000,  0.00129816,
+ 1.00000000, -1.00000000,  0.00074903,  1.00000000, -2.00000000, -0.76230566, -0.00370764,  0.82467977, -0.78769346, -0.00492670,
+ 0.84532630, -2.00000000, -0.70943195, -0.01257613,  0.75905385,  2.00000000, -2.00000000, -0.62780445, -0.00408633,  0.60272506,
+ 2.00000000,  3.00000000, -3.00000000, -2.00000000, -0.61412985,  0.00102833,  0.61527589,  2.00000000,  3.00000000,  0.00012115,
+-0.00080909,  0.00071061, -0.00227957,  0.00179794,  0.00103827, -1.00000000,  0.00444757, -1.00000000,  0.00604068,  1.00000000,
+-1.00000000,  0.00427327,  1.00000000, -1.00000000,  0.00086662,  1.00000000, -1.00000000, -0.00837492,  1.00000000, -0.65715934,
+-0.00645342,  0.64004630, -2.00000000, -0.64987682, -0.01449567,  0.69893373, -2.00000000, -0.63221961,  0.00421765,  0.62452105,
+ 2.00000000, -2.00000000, -0.60027006, -0.00110630,  0.62033821,  2.00000000, -2.00000000, -0.59823932,  0.00928313,  0.62188520,
+ 2.00000000,  3.00000000, -3.00000000, -2.00000000, -0.63230286, -0.00248555,  0.62632575,  2.00000000,  3.00000000, -5.00000000,
+-4.00000000, -3.00000000, -2.00000000, -0.66521143,  0.00544305,  0.66930486,  2.00000000,  3.00000000,  4.00000000,  0.00077008,
+ 0.00061140, -0.00009317, -0.00049643,  1.00000000, -1.00000000, -0.00285084,  1.00000000, -1.00000000,  0.00601784,  1.00000000,
+-1.00000000, -0.00091887,  0.75122772, -0.71579859, -0.00043545,  1.00000000, -0.85571363, -0.00227654,  0.63816873, -1.00000000,
+-0.00393484,  0.76748004, -0.58223659, -0.01229777,  0.58080322, -0.61945902, -0.00232238,  0.62277938,  2.00000000, -2.00000000,
+-0.60595489, -0.00535702,  0.60547736,  2.00000000, -4.00000000, -3.00000000, -2.00000000, -0.62368122,  0.01112097,  0.63997294,
+ 2.00000000,  3.00000000, -3.00000000, -2.00000000, -0.64318217,  0.00515139,  0.64781184,  2.00000000, -3.00000000, -1.78031579,
+-0.67122588,  0.02153711,  0.67899877,  2.00000000,  3.00000000,  4.00000000, -4.00000000, -3.00000000, -1.80503233, -0.69835727,
+-0.00270770,  0.70999554,  1.77332849,  3.00000000,  4.00000000,  5.00000000, -8.00000000, -7.00000000, -6.00000000, -5.00000000,
+-4.00000000, -2.81600693, -1.72970368, -0.73779413, -0.01384841,  0.75694606,  1.80042618,  3.00000000,  4.00000000,  5.00000000,
+ 6.00000000, -0.00051787,  0.00059593, -0.00023319, -1.00000000,  0.00191861,  0.79547197, -0.75020995,  0.00217840,  0.69165833,
+-1.00000000, -0.00304964,  0.67698951, -0.64516943, -0.00657667,  0.59260129, -0.62819301, -0.00456626,  0.59426260, -0.60909519,
+ 0.00256476,  0.61660408, -0.66560131, -0.00293463,  0.67477566,  2.00000000, -2.00000000, -0.62484067,  0.00505116,  0.63491494,
+ 2.00000000, -3.00000000, -2.00000000, -0.68427246,  0.00924353,  0.68755774,  2.00000000,  3.00000000, -3.00000000, -2.00000000,
+-0.65390928,  0.01008025,  0.65849449,  2.00000000,  3.00000000, -5.00000000, -4.00000000, -3.00000000, -1.70848232, -0.72079538,
+-0.00007674,  0.71556176,  1.76815351,  3.00000000,  4.00000000,  5.00000000, -4.00000000, -3.00000000, -1.82887466, -0.73529886,
+ 0.00033458,  0.73847588,  1.83009515,  3.00000000,  4.00000000, -5.00000000, -4.00000000, -2.83203553, -1.79500085, -0.77452749,
+-0.00614320,  0.77416943,  1.82469471,  2.77034612,  4.00000000,  5.00000000, -7.00000000, -6.00000000, -5.00000000, -4.00000000,
+-2.76574798, -1.84700836, -0.80822297,  0.00054165,  0.80901445,  1.85687331,  2.75680191,  3.81986695,  5.00000000,  6.00000000,
+ 7.00000000,  8.00000000, -13.00000000, -12.00000000, -11.00000000, -10.00000000, -9.00000000, -8.00000000, -7.00000000, -6.00000000,
+-5.00000000, -3.88304817, -2.93396067, -1.86645989, -0.84825410,  0.00666207,  0.84853252,  1.88634684,  2.95282618,  3.89813287,
+ 4.89189079,  6.00000000,  7.00000000,  8.00000000,  9.00000000,  10.00000000,  11.00000000, -0.00344877,  1.00000000, -0.61413659,
+-0.02115630,  0.59438887, -0.60873054,  0.00844993,  0.62510557, -2.00000000, -0.75002947,  0.00120913,  0.66616051, -2.00000000,
+-0.72324691,  0.04760499,  0.70532533,  2.00000000, -3.00000000, -1.66577589, -0.78941380, -0.01909714,  0.74993685,  1.70945570,
+-1.64422308, -0.70992006, -0.02795108,  0.76990363,  1.79682243,  2.96233315, -1.71686461, -0.76572785, -0.00041846,  0.78174132,
+ 1.66217596, -3.00000000, -1.77033369, -0.79475091,  0.03709740,  0.80097076,  1.83947400,  2.85879773, -4.00000000, -3.16528651,
+-1.79564411, -0.90078981,  0.02403102,  0.86138856,  1.84207433,  2.74584048, -4.00000000, -2.91249347, -1.87804769, -0.87323549,
+ 0.08164382,  0.89037056,  1.82505263,  2.71336163,  4.00000000, -4.81262228, -3.87173565, -2.83424209, -1.87517938, -0.86199960,
+ 0.00268598,  0.89547657,  1.90713511,  2.85219071,  3.86417171,  4.80711781,  6.00000000,  7.00000000, -5.00000000, -3.82388480,
+-2.82875808, -1.90350457, -0.90795818,  0.03047007,  0.93676836,  1.88844957,  2.83269711,  3.76109686,  5.00000000, -9.00000000,
+-8.00000000, -6.88037957, -5.88776398, -4.91209139, -3.93902541, -2.90989221, -1.92281230, -0.98960535, -0.07440511,  0.94023957,
+ 1.91666262,  2.83340828,  3.83651295,  4.77839424,  6.12284019,  7.00000000,  8.00000000,  9.00000000, -12.00000000, -11.00000000,
+-10.00000000, -9.00000000, -8.00000000, -6.68554513, -5.97994708, -4.98789075, -3.91383581, -2.92952795, -1.91727195, -0.93148075,
+-0.00568870,  0.93515148,  1.94580068,  2.93838956,  3.92567644,  4.96573603,  5.95402763,  7.00000000,  8.00000000,  9.00000000,
+-11.00000000, -9.90096030, -8.97868124, -7.93663988, -6.98806055, -5.95937864, -4.93473664, -3.95454756, -2.96518446, -1.97711766,
+-0.98552111, -0.03317271,  0.95115775,  1.93785086,  2.96310779,  3.93322450,  5.01716212,  5.85909823,  6.89163669,  7.97492693,
+ 8.85698897,  9.79802946,  11.09373957,  12.00000000, -13.00000000, -12.00000000, -10.67579109, -9.95079100, -8.90576592, -7.93254656,
+-6.96112672, -5.96015798, -4.95493809, -3.98556269, -2.98182856, -1.98150255, -0.96551153, -0.00399791,  0.98644875,  1.98043830,
+ 2.97969033,  3.97728257,  4.95173541,  5.95649050,  6.96447378,  7.95591513,  9.07680954,  9.92093070,  10.76496555,  11.97525735,
+ 13.00000000,  14.00000000, -25.00000000, -24.00000000, -23.00000000, -22.00072357, -21.00000000, -20.00000000, -19.00000000, -18.20003462,
+-17.01648407, -15.78651996, -14.95660266, -13.99167850, -13.28722978, -11.85013840, -10.92025302, -9.87055810, -8.93841040, -7.95329867,
+-6.97819441, -6.01593394, -5.00905213, -3.99905285, -2.99171810, -1.99062796, -1.00112466,  0.00140492,  1.00701091,  2.02327185,
+ 3.00194633,  3.99188294,  5.00313145,  6.00448038,  6.98904951,  7.98158293,  8.98212774,  10.00363404,  10.98641678,  11.98034311,
+ 12.95176779,  13.95383703,  14.99084578,  15.98600642,  16.99406826,  17.98134623,  19.01793961,  19.86072639,  20.88465474,  21.99287082,
+ 22.81916620,  23.77946383,  0.00000234,  0.00000298,  0.00000048,  0.00002408, -0.00000165, -0.00001831, -0.00005703, -0.00000184,
+-1.00000000,  0.00001977,  1.00000000, -1.00000000,  0.00000010,  1.00000000, -1.00000000, -0.00001152,  1.00000000, -1.00000000,
+ 0.00000840,  1.00000000, -1.00000000,  0.00002353,  1.00000000, -0.75455603, -0.00001433,  1.00000000, -0.65859705, -0.00000703,
+ 0.62995860, -2.00000000, -0.72724652, -0.00033969,  0.61359174,  2.00000000, -2.00000000, -0.69510998, -0.00031410,  0.66467605,
+ 2.00000000,  3.00000000, -3.00000000, -2.00000000, -0.65738683,  0.00039019,  0.66554720,  1.91774106,  3.18089124,  0.00000070,
+ 0.00001152, -0.00000795, -0.00000058, -0.00003502, -0.00001508, -0.00004225, -0.00002165, -1.00000000,  0.00004391,  1.00000000,
+-1.00000000,  0.00001784,  1.00000000, -1.00000000, -0.00003678,  1.00000000, -0.68878314, -0.00013166,  0.60880149, -0.75291978,
+ 0.00006493,  1.00000000, -0.76757316,  0.00003057,  0.67140524, -0.61602267, -0.00014495,  0.63625803,  2.00000000, -2.00000000,
+-0.61253314, -0.00116483,  0.65071851,  2.00000000, -3.00000000, -1.71451667, -0.67799909, -0.00048294,  0.65846019,  2.00000000,
+-3.02497593, -1.83515395, -0.70317981,  0.00519701,  0.67780009,  1.84218153,  2.88846262,  4.00000000,  0.00001124,  0.00000588,
+-0.00000172,  0.00002835,  1.00000000,  0.00001012, -0.00008644,  1.00000000, -0.75115901,  0.00004347,  1.00000000, -1.00000000,
+ 0.00002800,  1.00000000, -1.00000000, -0.00006039,  1.00000000, -0.79763258, -0.00011907,  0.71713616, -0.76791870, -0.00007113,
+ 0.63583609, -0.62337806,  0.00012891,  0.62242094, -0.60837055,  0.00043216,  0.65515705, -0.63637782, -0.00019749,  0.60423967,
+ 2.00000000, -2.00000000, -0.65404827, -0.00089304,  0.64706660,  2.00000000, -1.86334076, -0.66410366,  0.00063219,  0.66968004,
+ 2.00000000,  3.00000000,  4.00000000, -4.00000000, -3.00000000, -1.79048834, -0.69451890,  0.00030677,  0.71009333,  1.70591343,
+ 3.00000000,  4.00000000, -4.00000000, -2.90176499, -1.78368781, -0.74425178,  0.00234068,  0.74847325,  1.78886822,  2.78478854,
+ 3.83608985,  4.95996151,  0.00002170,  0.00001281,  0.00002162, -1.00000000, -0.00007266,  1.00000000, -1.00000000, -0.00003250,
+-0.64088804,  0.00015239,  1.00000000, -0.58450370, -0.00008410,  0.60567186, -1.00000000, -0.00010752,  1.00000000, -0.58922508,
+-0.00017378,  0.60755779, -0.62797206, -0.00001016,  0.64432847, -0.58497934, -0.00001851,  0.59716791, -0.62642499, -0.00097386,
+ 0.63568558,  2.00000000, -2.00000000, -0.63236390, -0.00173361,  0.63142762,  1.75629192, -3.00000000, -2.00000000, -0.65596684,
+ 0.00209364,  0.65419742,  2.00000000, -3.00000000, -1.73856625, -0.67767521, -0.00119512,  0.68973603,  1.70985573,  3.00000000,
+-3.00000000, -1.81820220, -0.73974134,  0.00695869,  0.72216179,  1.75624461,  3.00000000,  4.00000000, -5.00000000, -4.00000000,
+-3.17718593, -1.76857567, -0.76822322,  0.00267400,  0.76414602,  1.84309221,  3.04940652,  4.00000000, -7.08189123, -6.00000000,
+-5.22882249, -3.96477958, -2.79653492, -1.81923435, -0.80050253, -0.01086663,  0.82708565,  1.85804900,  2.89996354,  3.76028554,
+ 4.80518081,  5.81738096,  7.00000000,  8.00000000,  9.08816091, -0.00002979, -0.00000333, -1.00000000, -0.00011532,  1.00000000,
+-0.70921122, -0.00005325,  0.68933188, -0.67581263, -0.00023107,  0.57868212, -0.58388312, -0.00020850,  0.60149012, -0.60912457,
+ 0.00001567,  0.60180554, -0.59130091, -0.00038863,  0.59908653, -2.00000000, -0.63697707,  0.00083913,  0.62040514,  2.00000000,
+-2.00000000, -0.63216238, -0.00081100,  0.64411071,  2.00000000, -1.76856259, -0.65266989, -0.00243486,  0.66888899,  2.00000000,
+-1.75427214, -0.71415385, -0.00226376,  0.71296778,  1.66182947,  3.00000000, -3.00000000, -1.72505821, -0.72920134, -0.00360424,
+ 0.73800767,  1.72848281,  3.00000000, -4.00000000, -2.95284408, -1.72025758, -0.76503859,  0.00418761,  0.75297139,  1.73959808,
+ 3.00000000, -5.00000000, -3.96232791, -2.74080544, -1.78897123, -0.80233505, -0.00002050,  0.79693417,  1.76182598,  2.78434458,
+ 3.85693287,  5.00000000, -6.00000000, -4.78439284, -3.83501790, -2.85203629, -1.84909573, -0.85382658, -0.00181019,  0.84735145,
+ 1.83676575,  2.83656843,  3.86722376,  4.79702431,  6.00000000, -9.00000000, -8.00000000, -7.00000000, -6.07957292, -4.84677515,
+-3.85093972, -2.88683139, -1.84596391, -0.88058034, -0.00008692,  0.87554746,  1.86933183,  2.84729990,  3.89029797,  4.87311773,
+ 5.90844023,  7.00000000, -11.00000000, -9.97745420, -8.90015761, -7.94187517, -6.86987726, -5.84795335, -4.86693435, -3.90601819,
+-2.91031804, -1.91620096, -0.90497055,  0.00659199,  0.90926869,  1.90980821,  2.91070850,  3.93685967,  4.85581177,  6.06727337,
+ 7.05801043,  8.00000000,  9.00000000,  10.00000000,  10.90825787,  12.00000000,  13.00000000,  14.00000000, -0.00008918,  1.00000000,
+-0.54405938,  0.00120348,  0.55781920, -0.59227786, -0.00349602,  0.59777231, -1.63717598, -0.69048065,  0.00999281,  0.65770558,
+ 2.00000000, -2.00000000, -0.71013571,  0.00454518,  0.66991065, -3.00000000, -1.73004867, -0.73743921,  0.01162454,  0.69964842,
+ 1.83319587, -1.81225491, -0.76806000,  0.00164742,  0.76780397,  1.67168896, -1.64564794, -0.79903361, -0.01522880,  0.84277926,
+ 1.68873752, -3.00000000, -1.72063244, -0.83687428,  0.00246724,  0.84618697,  1.79464483,  2.77447025, -3.77118426, -2.75025539,
+-1.82050448, -0.90373722, -0.00187780,  0.90102245,  1.85249394,  2.71364180, -2.71720889, -1.79466125, -0.89860801, -0.02725825,
+ 0.90877329,  1.90542096,  2.76847902,  3.71496428, -4.70257302, -3.90746659, -2.87078421, -1.88858709, -0.93608993, -0.02157425,
+ 0.95181182,  1.91155682,  2.83614575,  3.87820801,  4.72172277, -5.02764544, -3.80066801, -2.87484378, -1.90707477, -0.96326017,
+-0.01060091,  0.96558851,  1.92191548,  2.86970759,  3.85655474,  4.83135970,  5.76387469, -9.00000000, -8.00000000, -6.75261776,
+-5.86333393, -4.84846871, -3.91871758, -2.93827286, -1.93050320, -0.96359634, -0.00141931,  0.95926312,  1.92541870,  2.93009411,
+ 3.86699087,  4.82315929,  5.67815206, -8.76594345, -7.70350451, -6.91784020, -5.81539490, -4.92526872, -3.91513203, -2.92134949,
+-1.95465646, -0.97638102, -0.00742564,  0.96948714,  1.96401112,  2.95256722,  3.93146353,  4.90991357,  5.88139022,  6.88640588,
+ 7.82610489,  9.00000000, -10.97611369, -9.80036760, -8.91109518, -7.92809404, -6.93865353, -5.91965899, -4.92957669, -3.95206224,
+-2.97308718, -1.97778214, -0.98552568, -0.00063212,  0.98686014,  1.97511867,  2.97114218,  3.97854244,  4.96578513,  5.96457765,
+ 6.95180187,  7.95163483,  8.93760897,  9.87666900,  10.88024562,  11.96270158,  12.99519291, -15.00000000, -13.76826291, -12.97229116,
+-12.00334834, -10.95980884, -9.98190891, -8.93798503, -7.95621309, -6.96109479, -5.96056649, -4.95843419, -3.97688640, -2.98989576,
+-1.98533395, -0.99580972,  0.00694370,  0.99421120,  1.99033132,  2.98751217,  3.98549580,  4.96482394,  5.96623233,  6.93564626,
+ 7.93772467,  8.92015276,  9.88785129,  10.97606096,  11.79686057, -23.00000000, -22.00000000, -21.00000000, -20.00000000, -19.00000000,
+-17.73310977, -16.83574096, -15.90889480, -15.00437366, -13.95007272, -12.99296117, -11.98334751, -10.96970820, -9.97775151, -8.98193840,
+-7.98378966, -6.98887770, -5.99059477, -5.00228769, -3.99355850, -2.99947486, -1.99897483, -0.99375857,  0.00324880,  1.00215912,
+ 1.99277083,  3.00503747,  3.99390482,  4.98854283,  5.98753219,  6.98245347,  7.98089893,  8.95960522,  9.95663648,  11.00810285,
+ 12.01421617,  12.96208687,  13.99227766,  14.97230040,  15.95114804,  16.97347393,  17.97794884,  18.96777118,  19.94446034,  20.94799029,
+ 22.14740083,  22.84288347,  23.99212109,  25.00000000,  25.96562658};
+
+/* cdf tables for quantizer indices */
+const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[1212] = {
+ 0,  13,  301,  3730,  61784,  65167,  65489,  65535,  0,  17,
+ 142,  314,  929,  2466,  7678,  56450,  63463,  64740,  65204,  65426,
+ 65527,  65535,  0,  8,  100,  724,  6301,  60105,  65125,  65510,
+ 65531,  65535,  0,  13,  117,  368,  1068,  3010,  11928,  53603,
+ 61177,  63404,  64505,  65108,  65422,  65502,  65531,  65535,  0,  4,
+ 17,  96,  410,  1859,  12125,  54361,  64103,  65305,  65497,  65535,
+ 0,  4,  88,  230,  469,  950,  1746,  3228,  6092,  16592,
+ 44756,  56848,  61256,  63308,  64325,  64920,  65309,  65460,  65502,  65522,
+ 65535,  0,  88,  352,  1675,  6339,  20749,  46686,  59284,  63525,
+ 64949,  65359,  65502,  65527,  65535,  0,  13,  38,  63,  117,
+ 234,  381,  641,  929,  1407,  2043,  2809,  4032,  5753,  8792,
+ 14407,  24308,  38941,  48947,  55403,  59293,  61411,  62688,  63630,  64329,
+ 64840,  65188,  65376,  65472,  65506,  65527,  65531,  65535,  0,  8,
+ 29,  75,  222,  615,  1327,  2801,  5623,  9931,  16094,  24966,
+ 34419,  43458,  50676,  56186,  60055,  62500,  63936,  64765,  65225,  65435,
+ 65514,  65535,  0,  8,  13,  15,  17,  21,  33,  59,
+ 71,  92,  151,  243,  360,  456,  674,  934,  1223,  1583,
+ 1989,  2504,  3031,  3617,  4354,  5154,  6163,  7411,  8780,  10747,
+ 12874,  15591,  18974,  23027,  27436,  32020,  36948,  41830,  46205,  49797,
+ 53042,  56094,  58418,  60360,  61763,  62818,  63559,  64103,  64509,  64798,
+ 65045,  65162,  65288,  65363,  65447,  65506,  65522,  65531,  65533,  65535,
+ 0,  4,  6,  25,  38,  71,  138,  264,  519,  808,
+ 1227,  1825,  2516,  3408,  4279,  5560,  7092,  9197,  11420,  14108,
+ 16947,  20300,  23926,  27459,  31164,  34827,  38575,  42178,  45540,  48747,
+ 51444,  54090,  56426,  58460,  60080,  61595,  62734,  63668,  64275,  64673,
+ 64936,  65112,  65217,  65334,  65426,  65464,  65477,  65489,  65518,  65527,
+ 65529,  65531,  65533,  65535,  0,  2,  4,  8,  10,  12,
+ 14,  16,  21,  33,  50,  71,  84,  92,  105,  138,
+ 180,  255,  318,  377,  435,  473,  511,  590,  682,  758,
+ 913,  1097,  1256,  1449,  1671,  1884,  2169,  2445,  2772,  3157,
+ 3563,  3944,  4375,  4848,  5334,  5820,  6448,  7101,  7716,  8378,
+ 9102,  9956,  10752,  11648,  12707,  13670,  14758,  15910,  17187,  18472,
+ 19627,  20649,  21951,  23169,  24283,  25552,  26862,  28227,  29391,  30764,
+ 31882,  33213,  34432,  35600,  36910,  38116,  39464,  40729,  41872,  43144,
+ 44371,  45514,  46762,  47813,  48968,  50069,  51032,  51974,  52908,  53737,
+ 54603,  55445,  56282,  56990,  57572,  58191,  58840,  59410,  59887,  60264,
+ 60607,  60946,  61269,  61516,  61771,  61960,  62198,  62408,  62558,  62776,
+ 62985,  63207,  63408,  63546,  63739,  63906,  64070,  64237,  64371,  64551,
+ 64677,  64836,  64999,  65095,  65213,  65284,  65338,  65380,  65426,  65447,
+ 65472,  65485,  65487,  65489,  65502,  65510,  65512,  65514,  65516,  65518,
+ 65522,  65531,  65533,  65535,  0,  2,  4,  6,  65528,  65531,
+ 65533,  65535,  0,  2,  4,  6,  8,  10,  222,  65321,
+ 65513,  65528,  65531,  65533,  65535,  0,  2,  4,  50,  65476,
+ 65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,  12,
+ 38,  544,  64936,  65509,  65523,  65525,  65529,  65531,  65533,  65535,
+ 0,  2,  4,  6,  8,  10,  1055,  64508,  65528,  65531,
+ 65533,  65535,  0,  2,  4,  6,  8,  10,  12,  123,
+ 3956,  62999,  65372,  65495,  65515,  65521,  65523,  65525,  65527,  65529,
+ 65531,  65533,  65535,  0,  2,  4,  12,  53,  4707,  59445,
+ 65467,  65525,  65527,  65529,  65531,  65533,  65535,  0,  2,  4,
+ 6,  8,  10,  12,  14,  16,  38,  40,  50,  67,
+ 96,  234,  929,  14345,  55750,  64866,  65389,  65462,  65514,  65517,
+ 65519,  65521,  65523,  65525,  65527,  65529,  65531,  65533,  65535,  0,
+ 2,  4,  6,  8,  10,  15,  35,  91,  377,  1946,
+ 13618,  52565,  63714,  65184,  65465,  65520,  65523,  65525,  65527,  65529,
+ 65531,  65533,  65535,  0,  2,  4,  6,  8,  10,  12,
+ 14,  16,  18,  20,  22,  24,  26,  28,  30,  32,
+ 34,  36,  38,  40,  42,  44,  46,  48,  50,  52,
+ 54,  82,  149,  362,  751,  1701,  4239,  12893,  38627,  55072,
+ 60875,  63071,  64158,  64702,  65096,  65283,  65412,  65473,  65494,  65505,
+ 65508,  65517,  65519,  65521,  65523,  65525,  65527,  65529,  65531,  65533,
+ 65535,  0,  2,  15,  23,  53,  143,  260,  418,  698,
+ 988,  1353,  1812,  2411,  3144,  4015,  5143,  6401,  7611,  8999,
+ 10653,  12512,  14636,  16865,  19404,  22154,  24798,  27521,  30326,  33102,
+ 35790,  38603,  41415,  43968,  46771,  49435,  52152,  54715,  57143,  59481,
+ 61178,  62507,  63603,  64489,  64997,  65257,  65427,  65473,  65503,  65520,
+ 65529,  65531,  65533,  65535,  0,  3,  6,  9,  26,  32,
+ 44,  46,  64,  94,  111,  164,  205,  254,  327,  409,
+ 506,  608,  733,  885,  1093,  1292,  1482,  1742,  1993,  2329,
+ 2615,  3029,  3374,  3798,  4257,  4870,  5405,  5992,  6618,  7225,
+ 7816,  8418,  9051,  9761,  10532,  11380,  12113,  13010,  13788,  14594,
+ 15455,  16361,  17182,  18088,  18997,  20046,  20951,  21968,  22947,  24124,
+ 25296,  26547,  27712,  28775,  29807,  30835,  31709,  32469,  33201,  34014,
+ 34876,  35773,  36696,  37620,  38558,  39547,  40406,  41277,  42367,  43290,
+ 44445,  45443,  46510,  47684,  48973,  50157,  51187,  52242,  53209,  54083,
+ 55006,  55871,  56618,  57293,  57965,  58556,  59222,  59722,  60180,  60554,
+ 60902,  61250,  61554,  61837,  62100,  62372,  62631,  62856,  63078,  63324,
+ 63557,  63768,  63961,  64089,  64235,  64352,  64501,  64633,  64770,  64887,
+ 65001,  65059,  65121,  65188,  65246,  65302,  65346,  65390,  65428,  65463,
+ 65477,  65506,  65515,  65517,  65519,  65521,  65523,  65525,  65527,  65529,
+ 65531,  65533,  65535,  0,  2,  4,  109,  65332,  65531,  65533,
+ 65535,  0,  2,  4,  6,  8,  25,  1817,  63874,  65511,
+ 65527,  65529,  65531,  65533,  65535,  0,  2,  4,  907,  65014,
+ 65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,  10,
+ 12,  132,  2743,  62708,  65430,  65525,  65527,  65529,  65531,  65533,
+ 65535,  0,  2,  4,  6,  8,  35,  3743,  61666,  65485,
+ 65531,  65533,  65535,  0,  2,  4,  6,  8,  10,  23,
+ 109,  683,  6905,  58417,  64911,  65398,  65497,  65518,  65525,  65527,
+ 65529,  65531,  65533,  65535,  0,  2,  4,  6,  53,  510,
+ 10209,  55212,  64573,  65441,  65522,  65529,  65531,  65533,  65535,  0,
+ 2,  4,  6,  8,  10,  12,  14,  16,  18,  20,
+ 22,  32,  90,  266,  1037,  3349,  14468,  50488,  62394,  64685,
+ 65341,  65480,  65514,  65519,  65521,  65523,  65525,  65527,  65529,  65531,
+ 65533,  65535,  0,  2,  4,  6,  9,  16,  37,  106,
+ 296,  748,  1868,  5733,  18897,  45553,  60165,  63949,  64926,  65314,
+ 65441,  65508,  65524,  65529,  65531,  65533,  65535,  0,  2,  4,
+ 6,  8,  10,  12,  14,  16,  18,  20,  22,  24,
+ 26,  28,  30,  32,  34,  36,  38,  40,  42,  44,
+ 46,  48,  50,  83,  175,  344,  667,  1293,  2337,  4357,
+ 8033,  14988,  28600,  43244,  52011,  57042,  59980,  61779,  63065,  63869,
+ 64390,  64753,  64988,  65164,  65326,  65422,  65462,  65492,  65506,  65522,
+ 65524,  65526,  65531,  65533,  65535,  0,  2,  4,  6,  8,
+ 10,  12,  14,  16,  25,  39,  48,  55,  62,  65,
+ 85,  106,  139,  169,  194,  252,  323,  485,  688,  1074,
+ 1600,  2544,  3863,  5733,  8303,  11397,  15529,  20273,  25734,  31455,
+ 36853,  41891,  46410,  50306,  53702,  56503,  58673,  60479,  61880,  62989,
+ 63748,  64404,  64852,  65124,  65309,  65424,  65480,  65524,  65528,  65533,
+ 65535,  0,  2,  4,  6,  8,  10,  12,  14,  21,
+ 23,  25,  27,  29,  31,  39,  41,  43,  48,  60,
+ 72,  79,  106,  136,  166,  187,  224,  252,  323,  381,
+ 427,  478,  568,  660,  783,  912,  1046,  1175,  1365,  1567,
+ 1768,  2024,  2347,  2659,  3049,  3529,  4033,  4623,  5281,  5925,
+ 6726,  7526,  8417,  9468,  10783,  12141,  13571,  15222,  16916,  18659,
+ 20350,  22020,  23725,  25497,  27201,  29026,  30867,  32632,  34323,  36062,
+ 37829,  39466,  41144,  42654,  43981,  45343,  46579,  47759,  49013,  50171,
+ 51249,  52283,  53245,  54148,  54938,  55669,  56421,  57109,  57791,  58464,
+ 59092,  59674,  60105,  60653,  61083,  61407,  61757,  62095,  62388,  62649,
+ 62873,  63157,  63358,  63540,  63725,  63884,  64046,  64155,  64278,  64426,
+ 64548,  64654,  64806,  64906,  64994,  65077,  65137,  65215,  65277,  65324,
+ 65354,  65409,  65437,  65455,  65462,  65490,  65495,  65499,  65508,  65511,
+ 65513,  65515,  65517,  65519,  65521,  65523,  65525,  65527,  65529,  65531,
+ 65533,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[2059] = {
+ 0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,
+ 0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  4,
+ 65535,  0,  8,  65514,  65535,  0,  29,  65481,  65535,  0,
+ 121,  65439,  65535,  0,  239,  65284,  65535,  0,  8,  779,
+ 64999,  65527,  65535,  0,  8,  888,  64693,  65522,  65535,  0,
+ 29,  2604,  62843,  65497,  65531,  65535,  0,  25,  176,  4576,
+ 61164,  65275,  65527,  65535,  0,  65535,  0,  65535,  0,  65535,
+ 0,  65535,  0,  4,  65535,  0,  65535,  0,  65535,  0,
+ 65535,  0,  65535,  0,  4,  65535,  0,  33,  65502,  65535,
+ 0,  54,  65481,  65535,  0,  251,  65309,  65535,  0,  611,
+ 65074,  65535,  0,  1273,  64292,  65527,  65535,  0,  4,  1809,
+ 63940,  65518,  65535,  0,  88,  4392,  60603,  65426,  65531,  65535,
+ 0,  25,  419,  7046,  57756,  64961,  65514,  65531,  65535,  0,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  4,  65531,
+ 65535,  0,  65535,  0,  8,  65531,  65535,  0,  4,  65527,
+ 65535,  0,  17,  65510,  65535,  0,  42,  65481,  65535,  0,
+ 197,  65342,  65531,  65535,  0,  385,  65154,  65535,  0,  1005,
+ 64522,  65535,  0,  8,  1985,  63469,  65533,  65535,  0,  38,
+ 3119,  61884,  65514,  65535,  0,  4,  6,  67,  4961,  60804,
+ 65472,  65535,  0,  17,  565,  9182,  56538,  65087,  65514,  65535,
+ 0,  8,  63,  327,  2118,  14490,  52774,  63839,  65376,  65522,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+ 17,  65522,  65535,  0,  59,  65489,  65535,  0,  50,  65522,
+ 65535,  0,  54,  65489,  65535,  0,  310,  65179,  65535,  0,
+ 615,  64836,  65535,  0,  4,  1503,  63965,  65535,  0,  2780,
+ 63383,  65535,  0,  21,  3919,  61051,  65527,  65535,  0,  84,
+ 6674,  59929,  65435,  65535,  0,  4,  255,  7976,  55784,  65150,
+ 65518,  65531,  65535,  0,  4,  8,  582,  10726,  53465,  64949,
+ 65518,  65535,  0,  29,  339,  3006,  17555,  49517,  62956,  65200,
+ 65497,  65531,  65535,  0,  2,  33,  138,  565,  2324,  7670,
+ 22089,  45966,  58949,  63479,  64966,  65380,  65518,  65535,  0,  65535,
+ 0,  65535,  0,  2,  65533,  65535,  0,  46,  65514,  65535,
+ 0,  414,  65091,  65535,  0,  540,  64911,  65535,  0,  419,
+ 65162,  65535,  0,  976,  64790,  65535,  0,  2977,  62495,  65531,
+ 65535,  0,  4,  3852,  61034,  65527,  65535,  0,  4,  29,
+ 6021,  60243,  65468,  65535,  0,  84,  6711,  58066,  65418,  65535,
+ 0,  13,  281,  9550,  54917,  65125,  65506,  65535,  0,  2,
+ 63,  984,  12108,  52644,  64342,  65435,  65527,  65535,  0,  29,
+ 251,  2014,  14871,  47553,  62881,  65229,  65518,  65535,  0,  13,
+ 142,  749,  4220,  18497,  45200,  60913,  64823,  65426,  65527,  65535,
+ 0,  13,  71,  264,  1176,  3789,  10500,  24480,  43488,  56324,
+ 62315,  64493,  65242,  65464,  65514,  65522,  65531,  65535,  0,  4,
+ 13,  38,  109,  205,  448,  850,  1708,  3429,  6276,  11371,
+ 19221,  29734,  40955,  49391,  55411,  59460,  62102,  63793,  64656,  65150,
+ 65401,  65485,  65522,  65531,  65535,  0,  65535,  0,  2,  65533,
+ 65535,  0,  1160,  65476,  65535,  0,  2,  6640,  64763,  65533,
+ 65535,  0,  2,  38,  9923,  61009,  65527,  65535,  0,  2,
+ 4949,  63092,  65533,  65535,  0,  2,  3090,  63398,  65533,  65535,
+ 0,  2,  2520,  58744,  65510,  65535,  0,  2,  13,  544,
+ 8784,  51403,  65148,  65533,  65535,  0,  2,  25,  1017,  10412,
+ 43550,  63651,  65489,  65527,  65535,  0,  2,  4,  29,  783,
+ 13377,  52462,  64524,  65495,  65533,  65535,  0,  2,  4,  6,
+ 100,  1817,  18451,  52590,  63559,  65376,  65531,  65535,  0,  2,
+ 4,  6,  46,  385,  2562,  11225,  37416,  60488,  65026,  65487,
+ 65529,  65533,  65535,  0,  2,  4,  6,  8,  10,  12,
+ 42,  222,  971,  5221,  19811,  45048,  60312,  64486,  65294,  65474,
+ 65525,  65529,  65533,  65535,  0,  2,  4,  8,  71,  167,
+ 666,  2533,  7875,  19622,  38082,  54359,  62108,  64633,  65290,  65495,
+ 65529,  65533,  65535,  0,  2,  4,  6,  8,  10,  13,
+ 109,  586,  1930,  4949,  11600,  22641,  36125,  48312,  56899,  61495,
+ 63927,  64932,  65389,  65489,  65518,  65531,  65533,  65535,  0,  4,
+ 6,  8,  67,  209,  712,  1838,  4195,  8432,  14432,  22834,
+ 31723,  40523,  48139,  53929,  57865,  60657,  62403,  63584,  64363,  64907,
+ 65167,  65372,  65472,  65514,  65535,  0,  2,  4,  13,  25,
+ 42,  46,  50,  75,  113,  147,  281,  448,  657,  909,
+ 1185,  1591,  1976,  2600,  3676,  5317,  7398,  9914,  12941,  16169,
+ 19477,  22885,  26464,  29851,  33360,  37228,  41139,  44802,  48654,  52058,
+ 55181,  57676,  59581,  61022,  62190,  63107,  63676,  64199,  64547,  64924,
+ 65158,  65313,  65430,  65481,  65518,  65535,  0,  65535,  0,  65535,
+ 0,  65535,  0,  65535,  0,  65533,  65535,  0,  65535,  0,
+ 65535,  0,  65535,  0,  65533,  65535,  0,  2,  65535,  0,
+ 2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,
+ 65535,  0,  2,  4,  65533,  65535,  0,  2,  65533,  65535,
+ 0,  2,  4,  65531,  65533,  65535,  0,  2,  4,  65531,
+ 65533,  65535,  0,  2,  4,  6,  65524,  65533,  65535,  0,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+ 65535,  0,  65535,  0,  65535,  0,  65533,  65535,  0,  65533,
+ 65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,
+ 2,  65533,  65535,  0,  2,  4,  65532,  65535,  0,  6,
+ 65523,  65535,  0,  2,  15,  65530,  65533,  65535,  0,  2,
+ 35,  65493,  65531,  65533,  65535,  0,  2,  4,  158,  65382,
+ 65531,  65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+ 65535,  0,  65535,  0,  65535,  0,  2,  65535,  0,  2,
+ 65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,
+ 0,  2,  65533,  65535,  0,  9,  65512,  65535,  0,  2,
+ 12,  65529,  65535,  0,  2,  73,  65434,  65533,  65535,  0,
+ 2,  240,  65343,  65533,  65535,  0,  2,  476,  65017,  65531,
+ 65533,  65535,  0,  2,  4,  1046,  64686,  65531,  65533,  65535,
+ 0,  2,  4,  6,  8,  1870,  63898,  65529,  65531,  65533,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  65533,  65535,
+ 0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,
+ 65532,  65535,  0,  6,  65533,  65535,  0,  6,  65523,  65535,
+ 0,  2,  65532,  65535,  0,  137,  65439,  65535,  0,  576,
+ 64899,  65533,  65535,  0,  2,  289,  65299,  65533,  65535,  0,
+ 2,  4,  6,  880,  64134,  65531,  65533,  65535,  0,  2,
+ 4,  1853,  63347,  65533,  65535,  0,  2,  6,  2516,  61762,
+ 65529,  65531,  65533,  65535,  0,  2,  4,  9,  3980,  61380,
+ 65503,  65529,  65531,  65533,  65535,  0,  2,  4,  6,  8,
+ 10,  12,  61,  6393,  59859,  65466,  65527,  65529,  65531,  65533,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  2,  65532,
+ 65535,  0,  3,  65529,  65535,  0,  2,  65529,  65535,  0,
+ 61,  65453,  65535,  0,  234,  65313,  65535,  0,  503,  65138,
+ 65535,  0,  155,  65402,  65533,  65535,  0,  2,  1058,  64554,
+ 65533,  65535,  0,  2,  4,  3138,  62109,  65531,  65533,  65535,
+ 0,  2,  4,  2031,  63339,  65531,  65533,  65535,  0,  2,
+ 4,  6,  9,  4155,  60778,  65523,  65529,  65531,  65533,  65535,
+ 0,  2,  4,  41,  6189,  59269,  65490,  65531,  65533,  65535,
+ 0,  2,  4,  6,  210,  8789,  57043,  65400,  65528,  65531,
+ 65533,  65535,  0,  2,  4,  6,  8,  26,  453,  10086,
+ 55499,  64948,  65483,  65524,  65527,  65529,  65531,  65533,  65535,  0,
+ 2,  4,  6,  8,  10,  12,  14,  16,  18,  20,
+ 114,  1014,  11202,  52670,  64226,  65356,  65503,  65514,  65523,  65525,
+ 65527,  65529,  65531,  65533,  65535,  0,  65533,  65535,  0,  15,
+ 65301,  65535,  0,  152,  64807,  65535,  0,  2,  3328,  63308,
+ 65535,  0,  2,  4050,  59730,  65533,  65535,  0,  2,  164,
+ 10564,  61894,  65529,  65535,  0,  15,  6712,  59831,  65076,  65532,
+ 65535,  0,  32,  7712,  57449,  65459,  65535,  0,  2,  210,
+ 7849,  53110,  65021,  65523,  65535,  0,  2,  12,  1081,  13883,
+ 48262,  62870,  65477,  65535,  0,  2,  88,  847,  6145,  37852,
+ 62012,  65454,  65533,  65535,  0,  9,  47,  207,  1823,  14522,
+ 45521,  61069,  64891,  65481,  65528,  65531,  65533,  65535,  0,  2,
+ 9,  488,  2881,  12758,  38703,  58412,  64420,  65410,  65533,  65535,
+ 0,  2,  4,  6,  61,  333,  1891,  6486,  19720,  43188,
+ 57547,  62472,  64796,  65421,  65497,  65523,  65529,  65531,  65533,  65535,
+ 0,  2,  4,  6,  8,  10,  12,  29,  117,  447,
+ 1528,  6138,  21242,  43133,  56495,  62432,  64746,  65362,  65500,  65529,
+ 65531,  65533,  65535,  0,  2,  18,  105,  301,  760,  1490,
+ 3472,  7568,  15002,  26424,  40330,  53029,  60048,  62964,  64274,  64890,
+ 65337,  65445,  65489,  65513,  65527,  65530,  65533,  65535,  0,  2,
+ 4,  6,  41,  102,  409,  853,  2031,  4316,  7302,  11328,
+ 16869,  24825,  34926,  43481,  50877,  56126,  59874,  62103,  63281,  63857,
+ 64166,  64675,  65382,  65522,  65531,  65533,  65535,  0,  2,  4,
+ 6,  8,  10,  12,  14,  16,  18,  29,  38,  53,
+ 58,  96,  181,  503,  1183,  2849,  5590,  8600,  11379,  13942,
+ 16478,  19453,  22638,  26039,  29411,  32921,  37596,  41433,  44998,  48560,
+ 51979,  55106,  57666,  59892,  61485,  62616,  63484,  64018,  64375,  64685,
+ 64924,  65076,  65278,  65395,  65471,  65509,  65529,  65535,  0,  65535,
+ 0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,
+ 0,  65535,  0,  65535,  0,  2,  65533,  65535,  0,  2,
+ 65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,  65535,
+ 0,  2,  65533,  65535,  0,  2,  65533,  65535,  0,  7,
+ 65519,  65535,  0,  2,  14,  65491,  65533,  65535,  0,  2,
+ 81,  65427,  65531,  65533,  65535,  0,  2,  4,  312,  65293,
+ 65528,  65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+ 65535,  0,  65535,  0,  65535,  0,  65535,  0,  65535,  0,
+ 2,  65533,  65535,  0,  2,  65533,  65535,  0,  2,  65533,
+ 65535,  0,  5,  65523,  65535,  0,  2,  65533,  65535,  0,
+ 7,  65526,  65535,  0,  46,  65464,  65533,  65535,  0,  2,
+ 120,  65309,  65533,  65535,  0,  2,  5,  362,  65097,  65533,
+ 65535,  0,  2,  18,  1164,  64785,  65528,  65531,  65533,  65535,
+ 0,  65535,  0,  65535,  0,  65535,  0,  65533,  65535,  0,
+ 65535,  0,  65533,  65535,  0,  2,  65533,  65535,  0,  2,
+ 65533,  65535,  0,  2,  65533,  65535,  0,  2,  65530,  65535,
+ 0,  2,  65523,  65535,  0,  69,  65477,  65535,  0,  141,
+ 65459,  65535,  0,  194,  65325,  65533,  65535,  0,  2,  543,
+ 64912,  65533,  65535,  0,  5,  1270,  64301,  65529,  65531,  65533,
+ 65535,  0,  2,  4,  12,  2055,  63538,  65508,  65531,  65533,
+ 65535,  0,  2,  7,  102,  3775,  61970,  65429,  65526,  65528,
+ 65533,  65535,  0,  65535,  0,  65535,  0,  65535,  0,  2,
+ 65533,  65535,  0,  2,  65535,  0,  9,  65533,  65535,  0,
+ 25,  65512,  65535,  0,  2,  65533,  65535,  0,  44,  65480,
+ 65535,  0,  48,  65475,  65535,  0,  162,  65373,  65535,  0,
+ 637,  64806,  65533,  65535,  0,  2,  935,  64445,  65533,  65535,
+ 0,  2,  4,  1662,  64083,  65533,  65535,  0,  2,  12,
+ 3036,  62469,  65521,  65533,  65535,  0,  2,  120,  5405,  60468,
+ 65469,  65531,  65533,  65535,  0,  2,  4,  18,  254,  6663,
+ 58999,  65272,  65528,  65533,  65535,  0,  2,  4,  9,  12,
+ 67,  591,  8981,  56781,  64564,  65365,  65508,  65524,  65526,  65529,
+ 65531,  65533,  65535,  0,  65535,  0,  65535,  0,  2,  65533,
+ 65535,  0,  9,  65526,  65535,  0,  14,  65503,  65535,  0,
+ 127,  65390,  65535,  0,  517,  64990,  65535,  0,  178,  65330,
+ 65535,  0,  2,  1055,  64533,  65533,  65535,  0,  2,  1558,
+ 63942,  65533,  65535,  0,  2,  2205,  63173,  65533,  65535,  0,
+ 25,  4493,  60862,  65505,  65533,  65535,  0,  2,  48,  5890,
+ 59442,  65482,  65533,  65535,  0,  2,  4,  127,  7532,  58191,
+ 65394,  65533,  65535,  0,  2,  5,  32,  550,  10388,  54924,
+ 65046,  65510,  65531,  65533,  65535,  0,  2,  4,  30,  150,
+ 1685,  14340,  51375,  63619,  65288,  65503,  65528,  65533,  65535,  0,
+ 2,  4,  6,  8,  28,  97,  473,  2692,  15407,  50020,
+ 62880,  65064,  65445,  65508,  65531,  65533,  65535,  0,  2,  4,
+ 12,  32,  79,  150,  372,  907,  2184,  5868,  18207,  45431,
+ 59856,  64031,  65096,  65401,  65481,  65507,  65521,  65523,  65525,  65527,
+ 65529,  65531,  65533,  65535,  0,  65533,  65535,  0,  182,  65491,
+ 65535,  0,  877,  64286,  65535,  0,  9,  2708,  63612,  65533,
+ 65535,  0,  2,  6038,  59532,  65535,  0,  2,  92,  5500,
+ 60539,  65533,  65535,  0,  268,  8908,  56512,  65385,  65535,  0,
+ 129,  13110,  52742,  65036,  65535,  0,  2,  806,  14003,  51929,
+ 64732,  65523,  65535,  0,  7,  92,  2667,  18159,  47678,  62610,
+ 65355,  65535,  0,  32,  1836,  19676,  48237,  61677,  64960,  65526,
+ 65535,  0,  21,  159,  967,  5668,  22782,  44709,  58317,  64020,
+ 65406,  65528,  65535,  0,  7,  162,  1838,  8328,  23929,  43014,
+ 56394,  63374,  65216,  65484,  65521,  65535,  0,  2,  4,  6,
+ 28,  268,  1120,  3613,  10688,  24185,  40989,  54917,  61684,  64510,
+ 65403,  65530,  65535,  0,  2,  16,  44,  139,  492,  1739,
+ 5313,  13558,  26766,  41566,  52446,  58937,  62815,  64480,  65201,  65454,
+ 65524,  65533,  65535,  0,  7,  25,  76,  263,  612,  1466,
+ 3325,  6832,  12366,  20152,  29466,  39255,  47360,  53506,  57740,  60726,
+ 62845,  64131,  64882,  65260,  65459,  65521,  65528,  65530,  65535,  0,
+ 2,  4,  14,  48,  136,  312,  653,  1240,  2369,  4327,
+ 7028,  10759,  15449,  21235,  28027,  35386,  42938,  49562,  54990,  59119,
+ 62086,  63916,  64863,  65249,  65445,  65493,  65523,  65535,  0,  2,
+ 4,  6,  8,  10,  12,  21,  83,  208,  409,  723,
+ 1152,  1868,  2951,  4463,  6460,  8979,  11831,  15195,  18863,  22657,
+ 26762,  30881,  34963,  39098,  43054,  47069,  50620,  53871,  56821,  59386,
+ 61340,  62670,  63512,  64023,  64429,  64750,  64944,  65126,  65279,  65366,
+ 65413,  65445,  65473,  65505,  65510,  65521,  65528,  65530,  65535};
+
+/* pointers to cdf tables for quantizer indices */
+const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[3][12] = {
+{WebRtcIsac_kQKltCdfGain +0 +0, WebRtcIsac_kQKltCdfGain +0 +8, WebRtcIsac_kQKltCdfGain +0 +22, WebRtcIsac_kQKltCdfGain +0 +32, WebRtcIsac_kQKltCdfGain +0 +48, WebRtcIsac_kQKltCdfGain +0 +60, WebRtcIsac_kQKltCdfGain +0 +81, WebRtcIsac_kQKltCdfGain +0 +95, WebRtcIsac_kQKltCdfGain +0 +128, WebRtcIsac_kQKltCdfGain +0 +152,
+WebRtcIsac_kQKltCdfGain +0 +210, WebRtcIsac_kQKltCdfGain +0 +264},
+{WebRtcIsac_kQKltCdfGain +404 +0, WebRtcIsac_kQKltCdfGain +404 +8, WebRtcIsac_kQKltCdfGain +404 +21, WebRtcIsac_kQKltCdfGain +404 +30, WebRtcIsac_kQKltCdfGain +404 +46, WebRtcIsac_kQKltCdfGain +404 +58, WebRtcIsac_kQKltCdfGain +404 +79, WebRtcIsac_kQKltCdfGain +404 +93, WebRtcIsac_kQKltCdfGain +404 +125, WebRtcIsac_kQKltCdfGain +404 +149,
+WebRtcIsac_kQKltCdfGain +404 +207, WebRtcIsac_kQKltCdfGain +404 +260},
+{WebRtcIsac_kQKltCdfGain +803 +0, WebRtcIsac_kQKltCdfGain +803 +8, WebRtcIsac_kQKltCdfGain +803 +22, WebRtcIsac_kQKltCdfGain +803 +31, WebRtcIsac_kQKltCdfGain +803 +48, WebRtcIsac_kQKltCdfGain +803 +60, WebRtcIsac_kQKltCdfGain +803 +81, WebRtcIsac_kQKltCdfGain +803 +96, WebRtcIsac_kQKltCdfGain +803 +129, WebRtcIsac_kQKltCdfGain +803 +154,
+WebRtcIsac_kQKltCdfGain +803 +212, WebRtcIsac_kQKltCdfGain +803 +268}};
+
+const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[3][108] = {
+{WebRtcIsac_kQKltCdfShape +0 +0, WebRtcIsac_kQKltCdfShape +0 +2, WebRtcIsac_kQKltCdfShape +0 +4, WebRtcIsac_kQKltCdfShape +0 +6, WebRtcIsac_kQKltCdfShape +0 +8, WebRtcIsac_kQKltCdfShape +0 +10, WebRtcIsac_kQKltCdfShape +0 +12, WebRtcIsac_kQKltCdfShape +0 +14, WebRtcIsac_kQKltCdfShape +0 +16, WebRtcIsac_kQKltCdfShape +0 +18,
+WebRtcIsac_kQKltCdfShape +0 +21, WebRtcIsac_kQKltCdfShape +0 +25, WebRtcIsac_kQKltCdfShape +0 +29, WebRtcIsac_kQKltCdfShape +0 +33, WebRtcIsac_kQKltCdfShape +0 +37, WebRtcIsac_kQKltCdfShape +0 +43, WebRtcIsac_kQKltCdfShape +0 +49, WebRtcIsac_kQKltCdfShape +0 +56, WebRtcIsac_kQKltCdfShape +0 +64, WebRtcIsac_kQKltCdfShape +0 +66,
+WebRtcIsac_kQKltCdfShape +0 +68, WebRtcIsac_kQKltCdfShape +0 +70, WebRtcIsac_kQKltCdfShape +0 +72, WebRtcIsac_kQKltCdfShape +0 +75, WebRtcIsac_kQKltCdfShape +0 +77, WebRtcIsac_kQKltCdfShape +0 +79, WebRtcIsac_kQKltCdfShape +0 +81, WebRtcIsac_kQKltCdfShape +0 +83, WebRtcIsac_kQKltCdfShape +0 +86, WebRtcIsac_kQKltCdfShape +0 +90,
+WebRtcIsac_kQKltCdfShape +0 +94, WebRtcIsac_kQKltCdfShape +0 +98, WebRtcIsac_kQKltCdfShape +0 +102, WebRtcIsac_kQKltCdfShape +0 +107, WebRtcIsac_kQKltCdfShape +0 +113, WebRtcIsac_kQKltCdfShape +0 +120, WebRtcIsac_kQKltCdfShape +0 +129, WebRtcIsac_kQKltCdfShape +0 +131, WebRtcIsac_kQKltCdfShape +0 +133, WebRtcIsac_kQKltCdfShape +0 +135,
+WebRtcIsac_kQKltCdfShape +0 +137, WebRtcIsac_kQKltCdfShape +0 +141, WebRtcIsac_kQKltCdfShape +0 +143, WebRtcIsac_kQKltCdfShape +0 +147, WebRtcIsac_kQKltCdfShape +0 +151, WebRtcIsac_kQKltCdfShape +0 +155, WebRtcIsac_kQKltCdfShape +0 +159, WebRtcIsac_kQKltCdfShape +0 +164, WebRtcIsac_kQKltCdfShape +0 +168, WebRtcIsac_kQKltCdfShape +0 +172,
+WebRtcIsac_kQKltCdfShape +0 +178, WebRtcIsac_kQKltCdfShape +0 +184, WebRtcIsac_kQKltCdfShape +0 +192, WebRtcIsac_kQKltCdfShape +0 +200, WebRtcIsac_kQKltCdfShape +0 +211, WebRtcIsac_kQKltCdfShape +0 +213, WebRtcIsac_kQKltCdfShape +0 +215, WebRtcIsac_kQKltCdfShape +0 +217, WebRtcIsac_kQKltCdfShape +0 +219, WebRtcIsac_kQKltCdfShape +0 +223,
+WebRtcIsac_kQKltCdfShape +0 +227, WebRtcIsac_kQKltCdfShape +0 +231, WebRtcIsac_kQKltCdfShape +0 +235, WebRtcIsac_kQKltCdfShape +0 +239, WebRtcIsac_kQKltCdfShape +0 +243, WebRtcIsac_kQKltCdfShape +0 +248, WebRtcIsac_kQKltCdfShape +0 +252, WebRtcIsac_kQKltCdfShape +0 +258, WebRtcIsac_kQKltCdfShape +0 +264, WebRtcIsac_kQKltCdfShape +0 +273,
+WebRtcIsac_kQKltCdfShape +0 +282, WebRtcIsac_kQKltCdfShape +0 +293, WebRtcIsac_kQKltCdfShape +0 +308, WebRtcIsac_kQKltCdfShape +0 +310, WebRtcIsac_kQKltCdfShape +0 +312, WebRtcIsac_kQKltCdfShape +0 +316, WebRtcIsac_kQKltCdfShape +0 +320, WebRtcIsac_kQKltCdfShape +0 +324, WebRtcIsac_kQKltCdfShape +0 +328, WebRtcIsac_kQKltCdfShape +0 +332,
+WebRtcIsac_kQKltCdfShape +0 +336, WebRtcIsac_kQKltCdfShape +0 +341, WebRtcIsac_kQKltCdfShape +0 +347, WebRtcIsac_kQKltCdfShape +0 +354, WebRtcIsac_kQKltCdfShape +0 +360, WebRtcIsac_kQKltCdfShape +0 +368, WebRtcIsac_kQKltCdfShape +0 +378, WebRtcIsac_kQKltCdfShape +0 +388, WebRtcIsac_kQKltCdfShape +0 +400, WebRtcIsac_kQKltCdfShape +0 +418,
+WebRtcIsac_kQKltCdfShape +0 +445, WebRtcIsac_kQKltCdfShape +0 +447, WebRtcIsac_kQKltCdfShape +0 +451, WebRtcIsac_kQKltCdfShape +0 +455, WebRtcIsac_kQKltCdfShape +0 +461, WebRtcIsac_kQKltCdfShape +0 +468, WebRtcIsac_kQKltCdfShape +0 +474, WebRtcIsac_kQKltCdfShape +0 +480, WebRtcIsac_kQKltCdfShape +0 +486, WebRtcIsac_kQKltCdfShape +0 +495,
+WebRtcIsac_kQKltCdfShape +0 +505, WebRtcIsac_kQKltCdfShape +0 +516, WebRtcIsac_kQKltCdfShape +0 +528, WebRtcIsac_kQKltCdfShape +0 +543, WebRtcIsac_kQKltCdfShape +0 +564, WebRtcIsac_kQKltCdfShape +0 +583, WebRtcIsac_kQKltCdfShape +0 +608, WebRtcIsac_kQKltCdfShape +0 +635},
+{WebRtcIsac_kQKltCdfShape +686 +0, WebRtcIsac_kQKltCdfShape +686 +2, WebRtcIsac_kQKltCdfShape +686 +4, WebRtcIsac_kQKltCdfShape +686 +6, WebRtcIsac_kQKltCdfShape +686 +8, WebRtcIsac_kQKltCdfShape +686 +11, WebRtcIsac_kQKltCdfShape +686 +13, WebRtcIsac_kQKltCdfShape +686 +15, WebRtcIsac_kQKltCdfShape +686 +17, WebRtcIsac_kQKltCdfShape +686 +20,
+WebRtcIsac_kQKltCdfShape +686 +23, WebRtcIsac_kQKltCdfShape +686 +27, WebRtcIsac_kQKltCdfShape +686 +31, WebRtcIsac_kQKltCdfShape +686 +35, WebRtcIsac_kQKltCdfShape +686 +40, WebRtcIsac_kQKltCdfShape +686 +44, WebRtcIsac_kQKltCdfShape +686 +50, WebRtcIsac_kQKltCdfShape +686 +56, WebRtcIsac_kQKltCdfShape +686 +63, WebRtcIsac_kQKltCdfShape +686 +65,
+WebRtcIsac_kQKltCdfShape +686 +67, WebRtcIsac_kQKltCdfShape +686 +69, WebRtcIsac_kQKltCdfShape +686 +71, WebRtcIsac_kQKltCdfShape +686 +73, WebRtcIsac_kQKltCdfShape +686 +75, WebRtcIsac_kQKltCdfShape +686 +77, WebRtcIsac_kQKltCdfShape +686 +79, WebRtcIsac_kQKltCdfShape +686 +82, WebRtcIsac_kQKltCdfShape +686 +85, WebRtcIsac_kQKltCdfShape +686 +89,
+WebRtcIsac_kQKltCdfShape +686 +93, WebRtcIsac_kQKltCdfShape +686 +97, WebRtcIsac_kQKltCdfShape +686 +102, WebRtcIsac_kQKltCdfShape +686 +106, WebRtcIsac_kQKltCdfShape +686 +112, WebRtcIsac_kQKltCdfShape +686 +119, WebRtcIsac_kQKltCdfShape +686 +127, WebRtcIsac_kQKltCdfShape +686 +129, WebRtcIsac_kQKltCdfShape +686 +131, WebRtcIsac_kQKltCdfShape +686 +133,
+WebRtcIsac_kQKltCdfShape +686 +135, WebRtcIsac_kQKltCdfShape +686 +137, WebRtcIsac_kQKltCdfShape +686 +139, WebRtcIsac_kQKltCdfShape +686 +142, WebRtcIsac_kQKltCdfShape +686 +146, WebRtcIsac_kQKltCdfShape +686 +150, WebRtcIsac_kQKltCdfShape +686 +154, WebRtcIsac_kQKltCdfShape +686 +158, WebRtcIsac_kQKltCdfShape +686 +162, WebRtcIsac_kQKltCdfShape +686 +167,
+WebRtcIsac_kQKltCdfShape +686 +173, WebRtcIsac_kQKltCdfShape +686 +179, WebRtcIsac_kQKltCdfShape +686 +186, WebRtcIsac_kQKltCdfShape +686 +194, WebRtcIsac_kQKltCdfShape +686 +205, WebRtcIsac_kQKltCdfShape +686 +207, WebRtcIsac_kQKltCdfShape +686 +209, WebRtcIsac_kQKltCdfShape +686 +211, WebRtcIsac_kQKltCdfShape +686 +214, WebRtcIsac_kQKltCdfShape +686 +218,
+WebRtcIsac_kQKltCdfShape +686 +222, WebRtcIsac_kQKltCdfShape +686 +226, WebRtcIsac_kQKltCdfShape +686 +230, WebRtcIsac_kQKltCdfShape +686 +234, WebRtcIsac_kQKltCdfShape +686 +238, WebRtcIsac_kQKltCdfShape +686 +242, WebRtcIsac_kQKltCdfShape +686 +247, WebRtcIsac_kQKltCdfShape +686 +253, WebRtcIsac_kQKltCdfShape +686 +262, WebRtcIsac_kQKltCdfShape +686 +269,
+WebRtcIsac_kQKltCdfShape +686 +278, WebRtcIsac_kQKltCdfShape +686 +289, WebRtcIsac_kQKltCdfShape +686 +305, WebRtcIsac_kQKltCdfShape +686 +307, WebRtcIsac_kQKltCdfShape +686 +309, WebRtcIsac_kQKltCdfShape +686 +311, WebRtcIsac_kQKltCdfShape +686 +315, WebRtcIsac_kQKltCdfShape +686 +319, WebRtcIsac_kQKltCdfShape +686 +323, WebRtcIsac_kQKltCdfShape +686 +327,
+WebRtcIsac_kQKltCdfShape +686 +331, WebRtcIsac_kQKltCdfShape +686 +335, WebRtcIsac_kQKltCdfShape +686 +340, WebRtcIsac_kQKltCdfShape +686 +346, WebRtcIsac_kQKltCdfShape +686 +354, WebRtcIsac_kQKltCdfShape +686 +362, WebRtcIsac_kQKltCdfShape +686 +374, WebRtcIsac_kQKltCdfShape +686 +384, WebRtcIsac_kQKltCdfShape +686 +396, WebRtcIsac_kQKltCdfShape +686 +413,
+WebRtcIsac_kQKltCdfShape +686 +439, WebRtcIsac_kQKltCdfShape +686 +442, WebRtcIsac_kQKltCdfShape +686 +446, WebRtcIsac_kQKltCdfShape +686 +450, WebRtcIsac_kQKltCdfShape +686 +455, WebRtcIsac_kQKltCdfShape +686 +461, WebRtcIsac_kQKltCdfShape +686 +468, WebRtcIsac_kQKltCdfShape +686 +475, WebRtcIsac_kQKltCdfShape +686 +481, WebRtcIsac_kQKltCdfShape +686 +489,
+WebRtcIsac_kQKltCdfShape +686 +498, WebRtcIsac_kQKltCdfShape +686 +508, WebRtcIsac_kQKltCdfShape +686 +522, WebRtcIsac_kQKltCdfShape +686 +534, WebRtcIsac_kQKltCdfShape +686 +554, WebRtcIsac_kQKltCdfShape +686 +577, WebRtcIsac_kQKltCdfShape +686 +602, WebRtcIsac_kQKltCdfShape +686 +631},
+{WebRtcIsac_kQKltCdfShape +1368 +0, WebRtcIsac_kQKltCdfShape +1368 +2, WebRtcIsac_kQKltCdfShape +1368 +4, WebRtcIsac_kQKltCdfShape +1368 +6, WebRtcIsac_kQKltCdfShape +1368 +8, WebRtcIsac_kQKltCdfShape +1368 +10, WebRtcIsac_kQKltCdfShape +1368 +12, WebRtcIsac_kQKltCdfShape +1368 +14, WebRtcIsac_kQKltCdfShape +1368 +16, WebRtcIsac_kQKltCdfShape +1368 +20,
+WebRtcIsac_kQKltCdfShape +1368 +24, WebRtcIsac_kQKltCdfShape +1368 +28, WebRtcIsac_kQKltCdfShape +1368 +32, WebRtcIsac_kQKltCdfShape +1368 +36, WebRtcIsac_kQKltCdfShape +1368 +40, WebRtcIsac_kQKltCdfShape +1368 +44, WebRtcIsac_kQKltCdfShape +1368 +50, WebRtcIsac_kQKltCdfShape +1368 +57, WebRtcIsac_kQKltCdfShape +1368 +65, WebRtcIsac_kQKltCdfShape +1368 +67,
+WebRtcIsac_kQKltCdfShape +1368 +69, WebRtcIsac_kQKltCdfShape +1368 +71, WebRtcIsac_kQKltCdfShape +1368 +73, WebRtcIsac_kQKltCdfShape +1368 +75, WebRtcIsac_kQKltCdfShape +1368 +77, WebRtcIsac_kQKltCdfShape +1368 +79, WebRtcIsac_kQKltCdfShape +1368 +81, WebRtcIsac_kQKltCdfShape +1368 +85, WebRtcIsac_kQKltCdfShape +1368 +89, WebRtcIsac_kQKltCdfShape +1368 +93,
+WebRtcIsac_kQKltCdfShape +1368 +97, WebRtcIsac_kQKltCdfShape +1368 +101, WebRtcIsac_kQKltCdfShape +1368 +105, WebRtcIsac_kQKltCdfShape +1368 +110, WebRtcIsac_kQKltCdfShape +1368 +116, WebRtcIsac_kQKltCdfShape +1368 +123, WebRtcIsac_kQKltCdfShape +1368 +132, WebRtcIsac_kQKltCdfShape +1368 +134, WebRtcIsac_kQKltCdfShape +1368 +136, WebRtcIsac_kQKltCdfShape +1368 +138,
+WebRtcIsac_kQKltCdfShape +1368 +141, WebRtcIsac_kQKltCdfShape +1368 +143, WebRtcIsac_kQKltCdfShape +1368 +146, WebRtcIsac_kQKltCdfShape +1368 +150, WebRtcIsac_kQKltCdfShape +1368 +154, WebRtcIsac_kQKltCdfShape +1368 +158, WebRtcIsac_kQKltCdfShape +1368 +162, WebRtcIsac_kQKltCdfShape +1368 +166, WebRtcIsac_kQKltCdfShape +1368 +170, WebRtcIsac_kQKltCdfShape +1368 +174,
+WebRtcIsac_kQKltCdfShape +1368 +179, WebRtcIsac_kQKltCdfShape +1368 +185, WebRtcIsac_kQKltCdfShape +1368 +193, WebRtcIsac_kQKltCdfShape +1368 +203, WebRtcIsac_kQKltCdfShape +1368 +214, WebRtcIsac_kQKltCdfShape +1368 +216, WebRtcIsac_kQKltCdfShape +1368 +218, WebRtcIsac_kQKltCdfShape +1368 +220, WebRtcIsac_kQKltCdfShape +1368 +224, WebRtcIsac_kQKltCdfShape +1368 +227,
+WebRtcIsac_kQKltCdfShape +1368 +231, WebRtcIsac_kQKltCdfShape +1368 +235, WebRtcIsac_kQKltCdfShape +1368 +239, WebRtcIsac_kQKltCdfShape +1368 +243, WebRtcIsac_kQKltCdfShape +1368 +247, WebRtcIsac_kQKltCdfShape +1368 +251, WebRtcIsac_kQKltCdfShape +1368 +256, WebRtcIsac_kQKltCdfShape +1368 +262, WebRtcIsac_kQKltCdfShape +1368 +269, WebRtcIsac_kQKltCdfShape +1368 +277,
+WebRtcIsac_kQKltCdfShape +1368 +286, WebRtcIsac_kQKltCdfShape +1368 +297, WebRtcIsac_kQKltCdfShape +1368 +315, WebRtcIsac_kQKltCdfShape +1368 +317, WebRtcIsac_kQKltCdfShape +1368 +319, WebRtcIsac_kQKltCdfShape +1368 +323, WebRtcIsac_kQKltCdfShape +1368 +327, WebRtcIsac_kQKltCdfShape +1368 +331, WebRtcIsac_kQKltCdfShape +1368 +335, WebRtcIsac_kQKltCdfShape +1368 +339,
+WebRtcIsac_kQKltCdfShape +1368 +343, WebRtcIsac_kQKltCdfShape +1368 +349, WebRtcIsac_kQKltCdfShape +1368 +355, WebRtcIsac_kQKltCdfShape +1368 +361, WebRtcIsac_kQKltCdfShape +1368 +368, WebRtcIsac_kQKltCdfShape +1368 +376, WebRtcIsac_kQKltCdfShape +1368 +385, WebRtcIsac_kQKltCdfShape +1368 +397, WebRtcIsac_kQKltCdfShape +1368 +411, WebRtcIsac_kQKltCdfShape +1368 +429,
+WebRtcIsac_kQKltCdfShape +1368 +456, WebRtcIsac_kQKltCdfShape +1368 +459, WebRtcIsac_kQKltCdfShape +1368 +463, WebRtcIsac_kQKltCdfShape +1368 +467, WebRtcIsac_kQKltCdfShape +1368 +473, WebRtcIsac_kQKltCdfShape +1368 +478, WebRtcIsac_kQKltCdfShape +1368 +485, WebRtcIsac_kQKltCdfShape +1368 +491, WebRtcIsac_kQKltCdfShape +1368 +497, WebRtcIsac_kQKltCdfShape +1368 +505,
+WebRtcIsac_kQKltCdfShape +1368 +514, WebRtcIsac_kQKltCdfShape +1368 +523, WebRtcIsac_kQKltCdfShape +1368 +535, WebRtcIsac_kQKltCdfShape +1368 +548, WebRtcIsac_kQKltCdfShape +1368 +565, WebRtcIsac_kQKltCdfShape +1368 +585, WebRtcIsac_kQKltCdfShape +1368 +611, WebRtcIsac_kQKltCdfShape +1368 +640}};
+
+/* code length for all coefficients using different models */
+const double WebRtcIsac_kQKltCodeLenGain[392] = {
+ 12.29956028,  7.83007500,  4.25642781,  0.17489215,  4.27591254,  7.66908312,  10.47643804, 11.91253716, 9.03421572,  8.57373525,
+ 6.73555740,   5.41409855,  3.65237863,  0.42623449,  3.22418399,  5.68145719,  7.14201900,  8.20558413,  9.34178852,  13.00000000,
+ 13.00000000,  9.47643804,  6.71459778,  3.55472644,  0.28457419,  3.70652835,  7.41128536,  11.60768258, 14.00000000, 12.29956028,
+ 9.29956028,   8.02845645,  6.54878889,  5.07667251,  2.87749552,  0.65310542,  3.11316029,  4.87911416,  5.89540125,  6.76398581,
+ 7.70537925,   9.67807191,  11.14201900, 14.00000000, 14.00000000, 12.29956028, 9.69621925,  7.70537925,  5.49915812,  2.67441345,
+ 0.63381441,   2.74999773,  5.76877882,  8.41503750,  10.75207249, 14.00000000, 9.60768258,  8.85025288,  8.09913319,  7.09010692,
+ 6.36337538,   5.46667027,  4.51618422,  2.64189829,  1.21843537,  2.43823474,  3.89409149,  4.99718498,  6.00989604,  6.78325414,
+ 7.39637366,   8.76159526,  10.60768258, 11.67807191, 12.29956028, 9.54056838,  7.95560588,  5.63040265,  3.81264793,  2.18521728,
+ 1.33727600,   2.37909290,  3.94981123,  5.52426657,  7.32051990,  8.84012866,  11.35614381, 13.00000000, 12.29956028, 11.35614381,
+ 11.35614381,  10.24511250, 9.12963528,  8.80032766,  7.97763219,  7.83007500,  7.09913319,  6.68711704,  6.41879942,  5.74379131,
+ 5.25096862,   4.43061904,  3.54492969,  2.72664147,  2.16306204,  2.71142226,  3.34357514,  4.07444556,  4.95151313,  5.68145719,
+ 6.12041675,   6.55085135,  7.00282052,  7.55705650,  8.44541115,  9.41503750,  10.91253716, 11.60768258, 14.00000000, 14.00000000,
+ 13.00000000,  11.60768258, 10.47643804, 8.80032766,  7.38161450,  6.52426657,  5.47447919,  4.53749773,  3.92719747,  3.41058292,
+ 2.88495635,   2.79344346,  2.85805254,  3.18261657,  3.57216340,  4.08225499,  4.74438125,  5.51215997,  6.30477171,  7.15450995,
+ 8.28575448,   9.69621925,  11.60768258, 13.00000000, 13.67807191, 15.00000000, 15.00000000, 14.00000000, 12.41503750, 11.29956028,
+ 12.41503750,  11.60768258, 10.11735695, 9.47643804,  9.12963528,  9.41503750,  8.23181568,  7.97763219,  7.82507432,  7.50814690,
+ 7.33466408,   6.99157138,  6.95834085,  6.80524315,  6.47447919,  6.35614381,  6.02128954,  5.71459778,  5.58109327,  5.05821876,
+ 4.94539568,   4.59220115,  4.27591254,  4.01522554,  3.89376424,  3.83760867,  3.73321346,  3.74674342,  3.90493270,  4.18942837,
+ 4.33599724,   4.42446075,  4.81760565,  5.07667251,  5.54570071,  5.95697272,  6.46667027,  6.91253716,  7.33466408,  7.82507432,
+ 8.05163277,   9.12963528,  9.02272008,  9.77118131,  9.60768258,  10.11735695, 12.00000000, 12.83007500, 15.00000000, 15.00000000,
+ 14.00000000,  15.00000000, 11.75207249, 12.29956028, 10.95560588, 9.93391081,  9.02272008,  8.00564656,  7.82507432,  7.28919357,
+ 6.77599833,   6.56745810,  6.19910010,  6.23347109,  5.67694524,  5.41879942,  4.96039548,  4.88170777,  4.60768258,  4.52883287,
+ 4.28876323,   4.17583679,  4.21332197,  4.14474217,  4.16119001,  4.12809476,  4.18501706,  4.28489599,  4.35299136,  4.60286019,
+ 4.63040265,   4.81017544,  5.00989604,  5.33822190,  5.43489792,  5.84644797,  6.13272126,  6.75444729,  7.36337538,  7.96108101,
+ 8.54056838,   9.28575448,  9.12963528,  9.47643804,  10.75207249, 12.29956028, 12.41503750, 11.14201900, 12.83007500, 15.00000000,
+ 15.00000000,  15.00000000, 15.00000000, 15.00000000, 15.00000000, 14.00000000, 15.00000000, 15.00000000, 15.00000000, 15.00000000,
+ 13.67807191,  12.41503750, 11.91253716, 11.60768258, 12.29956028, 13.00000000, 12.29956028, 10.95560588, 10.60768258, 9.77118131,
+ 10.02272008,  10.11735695, 10.14201900, 10.75207249, 10.75207249, 9.69621925,  9.47643804,  9.75207249,  8.72387559,  8.47643804,
+ 8.68711704,   8.40754296,  8.20558413,  8.26529038,  7.84518189,  7.89147554,  7.64685317,  7.41128536,  7.33466408,  7.42635281,
+ 7.24845594,   7.11430363,  7.07518750,  7.07518750,  6.70537925,  6.64906082,  6.73555740,  6.62931259,  6.50015411,  6.26190774,
+ 6.36337538,   6.19264508,  5.95151313,  6.08860801,  5.91253716,  5.83007500,  5.68145719,  5.67244736,  5.82632286,  6.00282052,
+ 5.65348627,   5.74970158,  5.87846648,  5.69052365,  5.64464890,  5.58531476,  5.81512466,  5.57688409,  5.87329553,  5.62170514,
+ 5.74851759,   5.81017544,  5.64464890,  5.76398581,  5.60339522,  5.69507833,  5.84139031,  5.68711704,  5.73908047,  5.84139031,
+ 5.71459778,   5.96245305,  5.82632286,  5.89540125,  6.08860801,  6.12041675,  6.13272126,  6.30477171,  6.24177679,  6.28232358,
+ 6.29091619,   6.53239445,  6.81512466,  6.72620440,  6.65792533,  6.84518189,  7.10215454,  7.44157929,  7.57793523,  7.59485854,
+ 7.66460965,   8.05163277,  8.00564656,  8.43775758,  8.10518224,  8.28575448,  8.77118131,  8.23181568,  8.29264087,  8.20558413,
+ 8.34894831,   8.89147554,  8.40754296,  8.61629571,  8.64244800,  8.61629571,  8.93391081,  8.50814690,  9.02272008,  8.68711704,
+ 8.65127185,   9.41503750,  9.11735695,  9.85025288,  10.24511250, 10.60768258, 10.47643804, 11.60768258, 11.35614381, 12.29956028,
+ 15.00000000,  15.00000000, 12.29956028, 13.00000000, 15.00000000, 15.00000000, 15.00000000, 15.00000000, 14.00000000, 12.83007500,
+ 15.00000000,  15.00000000};
+
+const double WebRtcIsac_kQKltCodeLenShape[578] = {
+ 0.00002201,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  14.00000000,
+ 0.00011007,  13.00000000,  0.00066056,  11.60768258,  11.14201900,  0.00185034,  10.24511250,  9.08113676,  0.00480700,  9.41503750,
+ 8.09913319,  0.01084946,  8.02845645,  13.00000000,  6.40941295,  0.02926496,  6.95560588,  13.00000000,  13.00000000,  6.21864029,
+ 0.03861814,  6.30477171,  12.29956028,  11.14201900,  4.66964328,  0.12158980,  4.62604734,  10.91253716,  14.00000000,  11.35614381,
+ 8.76159526,  3.89671219,  0.21179147,  3.99472634,  8.02272008,  13.00000000,  0.00002201,  0.00002201,  0.00002201,  0.00002201,
+ 14.00000000,  0.00011007,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  14.00000000,  0.00011007,  10.95560588,  0.00147568,
+ 10.95560588,  10.24511250,  0.00240150,  10.24511250,  8.02845645,  0.01056115,  8.17982104,  6.74497143,  0.02381629,  7.15137706,
+ 5.68598330,  0.05650076,  5.72970467,  13.00000000,  14.00000000,  5.18221688,  0.07697435,  5.37611851,  11.91253716,  9.54056838,
+ 3.92853764,  0.22143514,  3.76428491,  9.28575448,  14.00000000,  11.35614381,  7.37794818,  3.30585980,  0.37001735,  3.18521728,
+ 6.88886433,  11.91253716,  14.00000000,  0.00002201,  0.00002201,  0.00002201,  0.00002201,  14.00000000,  0.00019814,  14.00000000,
+ 0.00002201,  13.00000000,  0.00028621,  14.00000000,  14.00000000,  0.00028621,  13.00000000,  11.91253716,  0.00094690,  11.35614381,
+ 10.60768258,  0.00213692,  10.24511250,  8.37794818,  0.00863317,  8.43775758,  14.00000000,  7.41128536,  0.01698415,  7.42635281,
+ 6.02702021,  0.04514485,  6.01558154,  13.00000000,  5.05090284,  0.09207659,  4.98877274,  15.00000000,  10.75207249,  4.41081703,
+ 0.15733047,  4.17424617,  11.60768258,  14.00000000,  15.00000000,  10.06926266,  3.74320161,  0.23091117,  3.81141115,  10.02272008,
+ 11.91253716,  6.90196792,  2.92703003,  0.46874039,  2.93846004,  7.26190774,  11.60768258,  13.00000000,  10.21864029,  7.95560588,
+ 5.19345038,  2.40520888,  0.77554605,  2.56628417,  5.41409855,  8.81017544,  12.29956028,  0.00002201,  0.00002201,  0.00002201,
+ 0.00002201,  11.91253716,  0.00068259,  12.29956028,  10.11735695,  0.00233535,  10.47643804,  10.35614381,  0.00140957,  12.29956028,
+ 10.24511250,  0.00222511,  10.47643804,  7.72387559,  0.01475842,  7.52426657,  6.73555740,  0.02924249,  6.55085135,  14.00000000,
+ 5.45021533,  0.06930886,  5.38345116,  4.55913083,  0.11289841,  4.92853764,  11.60768258,  4.07148162,  0.19798859,  3.87200568,
+ 13.00000000,  9.60768258,  3.31393725,  0.29937064,  3.57321111,  9.35614381,  14.00000000,  8.02845645,  3.08542800,  0.45503557,
+ 2.80678268,  7.47643804,  12.29956028,  14.00000000,  14.00000000,  14.00000000,  6.83509307,  2.69166097,  0.61673447,  2.51266238,
+ 6.84771516,  11.91253716,  11.14201900,  7.72387559,  4.61899789,  2.17136763,  1.03592993,  2.28586183,  4.86814304,  7.78568088,
+ 10.91253716,  14.00000000,  15.00000000,  11.04580369,  9.28575448,  7.26190774,  5.21946023,  3.61575588,  2.18431651,  1.45666604,
+ 2.33566383,  3.85470467,  5.46181107,  7.30651304,  8.89147554,  11.91253716,  0.00002201,  0.00002201,  15.00000000,  0.00011007,
+ 15.00000000,  10.47643804,  0.00149771,  11.60768258,  7.30651304,  0.01903486,  7.20558413,  6.92318440,  0.02587674,  6.71459778,
+ 7.28919357,  0.01756340,  7.45696818,  6.06926266,  0.03841465,  6.45890338,  4.46035649,  0.13896157,  4.43204392,  14.00000000,
+ 14.00000000,  4.09010692,  0.19672654,  3.86653665,  13.00000000,  14.00000000,  11.35614381,  3.45117809,  0.27340929,  3.64878468,
+ 9.93391081,  9.60768258,  3.30585980,  0.35178287,  3.15607895,  9.12963528,  12.29956028,  7.93391081,  2.82180202,  0.53064436,
+ 2.68258739,  7.42635281,  11.14201900,  15.00000000,  10.06926266,  6.15294265,  2.55861197,  0.69308389,  2.48602573,  5.90592231,
+ 9.47643804,  13.00000000,  11.14201900,  8.20558413,  5.21618324,  2.34973357,  1.00379135,  2.09611815,  4.80278331,  7.82507432,
+ 11.91253716,  12.29956028,  8.98877274,  6.75444729,  4.23886435,  2.19859476,  1.29528579,  2.06032897,  4.06704711,  6.76398581,
+ 9.34178852,  13.00000000,  12.29956028,  10.14201900,  8.40754296,  6.16710999,  4.64850859,  3.28768796,  2.22892326,  1.78568088,
+ 2.35209193,  3.45141888,  4.91121176,  6.45117809,  8.20558413,  10.35614381,  13.00000000,  12.83007500,  14.00000000,  14.00000000,
+ 12.83007500,  11.35614381,  9.85025288,  9.41503750,  8.07518750,  7.34894831,  6.25516616,  5.25096862,  4.52477322,  3.68513357,
+ 3.06152306,  2.64011320,  2.54608637,  2.95765662,  3.44445223,  4.01665007,  4.63258525,  5.27633906,  6.24678325,  7.05163277,
+ 8.02845645,  9.60768258,  10.79054663,  12.83007500,  14.00000000,  0.00002201,  15.00000000,  0.00011007,  15.00000000,  5.82009091,
+ 0.02710994,  10.11735695,  15.00000000,  3.30346709,  0.17317845,  6.41128536,  15.00000000,  15.00000000,  10.83007500,  2.72897475,
+ 0.35935964,  3.85853144,  13.00000000,  15.00000000,  3.72766182,  0.17268211,  4.74674342,  15.00000000,  15.00000000,  4.40754296,
+ 0.11993823,  4.93997965,  15.00000000,  15.00000000,  4.70193743,  0.22110152,  3.27591254,  11.35614381,  15.00000000,  12.54056838,
+ 6.94743195,  2.99157138,  0.62079088,  2.25338071,  7.41128536,  15.00000000,  15.00000000,  11.47643804,  6.04580369,  2.80232255,
+ 0.98380109,  1.70502034,  5.15607895,  10.75207249,  13.00000000,  15.00000000,  15.00000000,  11.35614381,  6.44157929,  2.37955105,
+ 0.74567258,  2.44181848,  6.07667251,  10.75207249,  15.00000000,  15.00000000,  15.00000000,  15.00000000,  9.44541115,  5.25432568,
+ 1.97815248,  0.94086682,  2.57885561,  5.17265730,  8.72387559,  14.00000000,  15.00000000,  15.00000000,  15.00000000,  10.67807191,
+ 7.59485854,  4.91187431,  2.91934900,  1.32321648,  1.50614455,  3.85215911,  7.15137706,  10.60768258,  14.00000000,  15.00000000,
+ 15.00000000,  15.00000000,  15.00000000,  15.00000000,  15.00000000,  15.00000000,  11.09310940,  8.50814690,  6.45117809,  3.94675287,
+ 2.16730774,  1.37674720,  2.10215454,  3.97278511,  6.34178852,  8.50814690,  10.32757466,  14.00000000,  14.00000000,  15.00000000,
+ 15.00000000,  15.00000000,  14.00000000,  10.02272008,  9.41503750,  7.03710399,  5.13349379,  3.61683574,  2.47999526,  1.82788507,
+ 2.00945280,  3.08020557,  4.69793233,  6.64025044,  8.32051990,  10.91253716,  14.00000000,  15.00000000,  15.00000000,  15.00000000,
+ 15.00000000,  15.00000000,  15.00000000,  14.41503750,  9.41503750,  7.10215454,  5.60768258,  4.44014496,  3.30064444,  2.56941678,
+ 2.28103909,  2.42694459,  2.93206152,  3.83383692,  4.75207249,  6.02702021,  7.16394964,  9.35614381,  11.14201900,  12.29956028,
+ 15.00000000,  15.00000000,  14.00000000,  15.00000000,  15.00000000,  10.11735695,  8.85025288,  7.02558541,  5.86300889,  4.79726396,
+ 3.95117259,  3.44925321,  2.96348293,  2.88219459,  2.89671219,  3.10518224,  3.50065237,  4.05748549,  4.55291677,  5.23016216,
+ 5.79420675,  6.39452048,  6.91253716,  7.97763219,  8.32051990,  9.35614381,  10.60768258,  11.60768258,  15.00000000,  15.00000000,
+ 12.83007500,  12.41503750,  11.91253716,  14.00000000,  14.00000000,  11.35614381,  10.75207249,  10.91253716,  8.93391081,  8.61629571,
+ 8.29264087,  8.02272008,  7.89147554,  7.33466408,  7.41128536,  6.71459778,  5.92853764,  5.31964048,  4.97693875,  4.70308379,
+ 4.43632704,  4.34357514,  4.30825648,  4.26529038,  4.19465917,  4.27420773,  4.22315577,  4.08262792,  4.06667818,  4.16119001,
+ 4.08860801,  4.26698468,  4.39128315,  4.71517590,  5.10442472,  5.50714538,  5.81017544,  6.15922208};
+
+/* left KLT transforms */
+const double WebRtcIsac_kKltT1Gain[3][4] = {
+{-0.79742827,  0.60341375,  0.60341375,  0.79742827},
+{-0.81372390,  0.58125159,  0.58125159,  0.81372390},
+{-0.71832547,  0.69570721,  0.69570721,  0.71832547}};
+
+const double WebRtcIsac_kKltT1Shape[3][324] = {
+{ 0.00159597,  0.00049320,  0.00513821,  0.00021066,  0.01338581, -0.00422367, -0.00272072,  0.00935107,  0.02047622,  0.02691189,
+ 0.00478236,  0.03969702,  0.00886698,  0.04877604, -0.10898362, -0.05930891, -0.03415047,  0.98889721,  0.00293558, -0.00035282,
+ 0.01156321, -0.00195341, -0.00937631,  0.01052213, -0.02551163,  0.01644059,  0.03189927,  0.07754773, -0.08742313, -0.03026338,
+ 0.05136248, -0.14395974,  0.17725040,  0.22664856,  0.93380230,  0.07076411,  0.00557890, -0.00222834,  0.01377569,  0.01466808,
+ 0.02847361, -0.00603178,  0.02382480, -0.01210452,  0.03797267, -0.02371480,  0.11260335, -0.07366682,  0.00453436, -0.04136941,
+-0.07912843, -0.95031418,  0.25295337, -0.05302216, -0.00617554, -0.00044040, -0.00653778,  0.01097838,  0.01529174,  0.01374431,
+-0.00748512, -0.00020034,  0.02432713,  0.11101570, -0.08556891,  0.09282249, -0.01029446,  0.67556443, -0.67454300,  0.06910063,
+ 0.20866865, -0.10318050,  0.00932175,  0.00524058,  0.00803610, -0.00594676, -0.01082578,  0.01069906,  0.00546768,  0.01565291,
+ 0.06816200,  0.10201227,  0.16812734,  0.22984074,  0.58213170, -0.54138651, -0.51379962,  0.06847390, -0.01920037, -0.04592324,
+-0.00467394,  0.00328858,  0.00377424, -0.00987448,  0.08222096, -0.00377301,  0.04551941, -0.02592517,  0.16317082,  0.13077530,
+ 0.22702921, -0.31215289, -0.69645962, -0.38047101, -0.39339411,  0.11124777,  0.02508035, -0.00708074,  0.00400344,  0.00040331,
+ 0.01142402,  0.01725406,  0.01635170,  0.14285366,  0.03949233, -0.05905676,  0.05877154, -0.17497577, -0.32479440,  0.80754464,
+-0.38085603, -0.17055430, -0.03168622, -0.07531451,  0.02942002, -0.02148095, -0.00754114, -0.00322372,  0.00567812, -0.01701521,
+-0.12358320,  0.11473564,  0.09070136,  0.06533068, -0.22560802,  0.19209022,  0.81605094,  0.36592275, -0.09919829,  0.16667122,
+ 0.16300725,  0.04803807,  0.06739263, -0.00156752, -0.01685302, -0.00905240, -0.02297836, -0.00469939,  0.06310613, -0.16391930,
+ 0.10919511,  0.12529293,  0.85581322, -0.32145522,  0.24539076,  0.07181839,  0.07289591,  0.14066759,  0.10406711,  0.05815518,
+ 0.01072680, -0.00759339,  0.00053486, -0.00044865,  0.03407361,  0.01645348,  0.08758579,  0.27722240,  0.53665485, -0.74853376,
+-0.01118192, -0.19805430,  0.06130619, -0.09675299,  0.08978480,  0.03405255, -0.00706867,  0.05102045,  0.03250746,  0.01849966,
+-0.01216314, -0.01184187, -0.01579288,  0.00114807,  0.11376166,  0.88342114, -0.36425379,  0.13863190,  0.12524180, -0.13553892,
+ 0.04715856, -0.12341103,  0.04531568,  0.01899360, -0.00206897,  0.00567768, -0.01444163,  0.00411946, -0.00855896,  0.00381663,
+-0.01664861, -0.05534280,  0.21328278,  0.20161162,  0.72360394,  0.59130708, -0.08043791,  0.08757349, -0.13893918, -0.05147377,
+ 0.02680690, -0.01144070,  0.00625162, -0.00634215, -0.01248947, -0.00329455, -0.00609625, -0.00136305, -0.05097048, -0.01029851,
+ 0.25065384, -0.16856837, -0.07123372,  0.15992623, -0.39487617, -0.79972301,  0.18118185, -0.04826639, -0.01805578, -0.02927253,
+-0.16400618,  0.07472763,  0.10376449,  0.01705406,  0.01065801, -0.01500498,  0.02039914,  0.37776349, -0.84484186,  0.10434286,
+ 0.15616990,  0.13474456, -0.00906238, -0.25238368, -0.03820885, -0.10650905, -0.03880833, -0.03660028, -0.09640894,  0.00583314,
+ 0.01922097,  0.01489911, -0.02431117, -0.09372217,  0.39404721, -0.84786223, -0.31277121,  0.03193850,  0.01974060,  0.01887901,
+ 0.00337911, -0.11359599, -0.02792521, -0.03220184, -0.01533311,  0.00015962, -0.04225043, -0.00933965,  0.00675311,  0.00206060,
+ 0.15926771,  0.40199829, -0.80792558, -0.35591604, -0.17169764,  0.02830436,  0.02459982, -0.03438589,  0.00718705, -0.01798329,
+-0.01594508, -0.00702430, -0.00952419, -0.00962701, -0.01307212, -0.01749740,  0.01299602,  0.00587270, -0.36103108, -0.82039266,
+-0.43092844, -0.08500097, -0.04361674, -0.00333482,  0.01250434, -0.02538295, -0.00921797,  0.01645071, -0.01400872,  0.00317607,
+ 0.00003277, -0.01617646, -0.00616863, -0.00882661,  0.00466157,  0.00353237,  0.91803104, -0.39503305, -0.02048964,  0.00060125,
+ 0.01980634,  0.00300109,  0.00313880,  0.00657337,  0.00715163,  0.00000261,  0.00854276, -0.00154825, -0.00516128,  0.00909527,
+ 0.00095609,  0.00701196, -0.00221867, -0.00156741},
+{-0.00469582, -0.00020403, -0.00587134,  0.00185153, -0.02256479, -0.01185761, -0.02891481, -0.00493792, -0.00182344,  0.00285962,
+ 0.01558059, -0.02185140,  0.04639438, -0.04357142,  0.12718613, -0.06756136,  0.05542227,  0.98480184, -0.00374376, -0.00236433,
+-0.00607169, -0.00303290, -0.00127243, -0.01794845,  0.00620033, -0.00732704, -0.02837749, -0.00107164,  0.04820548,  0.00713300,
+ 0.09784244, -0.16806261, -0.04563341, -0.33406041,  0.91554083, -0.08139655, -0.00415851, -0.00538193, -0.00731198, -0.00534534,
+-0.00623075, -0.02016943, -0.05480133, -0.03172290, -0.03879603,  0.01518441,  0.09591688,  0.02238470,  0.08126640,  0.08236821,
+-0.24802119,  0.89516402,  0.32029647,  0.07188887, -0.00220366,  0.00344025, -0.00277284,  0.00358963, -0.08668007, -0.02205910,
+-0.05289669, -0.03535201, -0.01188017, -0.06456872, -0.09321006, -0.00009617, -0.15804070,  0.24632041,  0.90166119,  0.19250690,
+ 0.17264619, -0.09699155, -0.00567329, -0.00897700, -0.01442565, -0.01939390,  0.03702127, -0.02999862, -0.04385696, -0.05232394,
+-0.03339177,  0.03905964, -0.00281424, -0.29213275,  0.02892968,  0.90257613, -0.21546058, -0.18070946,  0.09014567,  0.04117230,
+-0.01029696, -0.00329116, -0.03354346,  0.02937079,  0.01274208, -0.01260649, -0.03505571, -0.01020645,  0.03787209,  0.12132165,
+-0.20826840,  0.81556933, -0.43874351,  0.21518682, -0.14564290, -0.05210031,  0.07124563,  0.06127983, -0.00457321,  0.01740496,
+ 0.04185176,  0.00128036, -0.05033693, -0.01890046,  0.06221734,  0.10280078, -0.03738531,  0.04830209, -0.08408293, -0.46409009,
+-0.83936263, -0.14817619, -0.13135927,  0.04563506,  0.08340661,  0.04040200,  0.00044396, -0.01365972,  0.01228951,  0.01078273,
+ 0.09205406, -0.03791500,  0.07135889,  0.08158339,  0.06298278, -0.22875755, -0.92917558, -0.11248260,  0.17801883, -0.03971674,
+-0.07491915,  0.06477287,  0.04635713,  0.01856159,  0.00130895, -0.01991604,  0.02358176, -0.09376056,  0.02782280, -0.04691559,
+ 0.13749249,  0.31383132,  0.92274602,  0.04727419,  0.09765196, -0.02108945,  0.00626005,  0.05193322,  0.02009133,  0.03094066,
+ 0.04573470,  0.00451733,  0.00240169, -0.00982355, -0.03546208, -0.14156875, -0.02480689,  0.22997442,  0.09778317,  0.88834235,
+-0.32797611, -0.00079977,  0.04917079,  0.06977359,  0.06451185,  0.07816204,  0.03119314,  0.01136506,  0.01062006,  0.00632783,
+ 0.03241828, -0.03318847, -0.01350502, -0.30055361,  0.07265375,  0.17308022,  0.88795796, -0.23231020, -0.08932700,  0.11759604,
+ 0.00590705,  0.03525351,  0.00840466,  0.04389942,  0.04387629,  0.04003275,  0.01772966,  0.02709780, -0.02393282,  0.02766178,
+ 0.00342983, -0.33882220,  0.76612668,  0.44061716, -0.28414784, -0.09364014,  0.03694060,  0.01124120,  0.01130268, -0.02869682,
+-0.07428963, -0.03504754,  0.05874942,  0.01196795,  0.02003875,  0.00787152, -0.01605561,  0.04501257, -0.06959958, -0.13015784,
+-0.05738065,  0.04681625,  0.06668700, -0.04492094,  0.02927765, -0.94404277,  0.19243952,  0.09504337, -0.12540826,  0.05394317,
+-0.07972638, -0.02145188,  0.00136427,  0.01964678,  0.06667373,  0.06204535,  0.17302394,  0.22005905,  0.58329964, -0.68440447,
+ 0.19628796,  0.15718011, -0.12481840, -0.08222507,  0.11780870,  0.03798206, -0.01818866,  0.00892766,  0.05582263,  0.01126832,
+-0.00973589,  0.00697442, -0.09937902,  0.06621185, -0.19452202, -0.80004569, -0.13946094, -0.48990700, -0.17595191, -0.00798873,
+-0.06121856,  0.08768040, -0.04507631,  0.00448896,  0.01153941, -0.04711652, -0.01050749, -0.01660047, -0.03007159, -0.01468906,
+ 0.12848053,  0.13859838,  0.93863771, -0.22250065, -0.14841278,  0.04666032, -0.06344813, -0.01915105, -0.01840150, -0.02389410,
+-0.01245496,  0.05023402,  0.02125840,  0.02467318, -0.01893022, -0.00889647,  0.00551817,  0.00481915, -0.40626968, -0.89028236,
+ 0.18261687, -0.03852330,  0.02621926, -0.05420122, -0.01704117, -0.00072893, -0.02694170, -0.04335124,  0.02256467,  0.00642301,
+-0.01619484, -0.00871160,  0.00400065, -0.00488820, -0.00752173, -0.00170603,  0.89554989, -0.41825934, -0.08725803, -0.09051404,
+-0.00916236, -0.02959065, -0.07268075, -0.00816626, -0.00314215, -0.01941078, -0.00036782, -0.00188655, -0.02107724, -0.00771657,
+-0.00448194, -0.00387517,  0.00082998,  0.00202471},
+{ 0.00167296, -0.00647772, -0.00604394,  0.01490810, -0.00837664,  0.00246438,  0.02082153,  0.01216715,  0.01001396, -0.02850860,
+-0.01187868, -0.00113289,  0.04140237, -0.11084998,  0.16102260,  0.20084170, -0.28969446, -0.91312256,  0.00087788, -0.00136895,
+ 0.00004622,  0.00578894,  0.00524119, -0.00044550,  0.00948906, -0.00396910, -0.03312197, -0.00075487,  0.00987494, -0.02088734,
+ 0.09835550, -0.20080342,  0.13687782, -0.16111863, -0.90089988,  0.30312999,  0.00248784, -0.00975419, -0.01617200,  0.00699371,
+-0.02151635, -0.01625774, -0.01262800,  0.02588781, -0.05620764, -0.13651454,  0.04242442, -0.02615307,  0.20497288, -0.20422909,
+ 0.14184406,  0.89712919,  0.01758042,  0.25447787, -0.00207668, -0.00260329,  0.00724812, -0.01007749,  0.00806242, -0.03089729,
+-0.01161934, -0.00618676, -0.10327342, -0.10160272,  0.11919283,  0.20781533,  0.11564869, -0.19072476,  0.86402008, -0.24650846,
+ 0.24684161,  0.04775750,  0.00486888, -0.01735569, -0.01868000, -0.01870386, -0.03243262, -0.05883701, -0.03433371,  0.10441236,
+-0.22831067, -0.22837988,  0.15082544, -0.21313767,  0.13215611, -0.78096079, -0.32270595, -0.21307018,  0.17339271, -0.05435742,
+-0.00940813,  0.00272520,  0.00542917, -0.05232991, -0.01280809, -0.10773627, -0.17626479,  0.03719285, -0.26297104, -0.21780618,
+ 0.21406665,  0.15202177,  0.75911044,  0.38627481, -0.16504189, -0.10242997, -0.02394939, -0.06018959,  0.00994733, -0.02617197,
+-0.01543723, -0.10320051, -0.03010481, -0.19098072, -0.06893233,  0.12253174, -0.25556092, -0.31989059,  0.09542655,  0.72712041,
+-0.43108921, -0.01568072, -0.16532685,  0.06646835, -0.08885408, -0.00050364, -0.01791050,  0.00245405,  0.00204794, -0.17948691,
+-0.05193881, -0.16329387, -0.13676259,  0.01214133, -0.30994612, -0.00687734,  0.63254090, -0.47180795, -0.35409214,  0.23658315,
+ 0.11170294,  0.05229887, -0.06107035, -0.01094212,  0.01523854, -0.01608284, -0.03739206, -0.23864328, -0.03958494, -0.19305719,
+-0.26019058,  0.24108257, -0.55933566,  0.40623396, -0.53367968, -0.08930957, -0.00599383, -0.00050845,  0.06960811,  0.02664961,
+ 0.01464197, -0.00486781, -0.01905736,  0.01437578,  0.02379930, -0.26639588,  0.05208876, -0.43525002, -0.63009424,  0.05251889,
+ 0.56732782, -0.06731164, -0.03705909, -0.03253946,  0.00950673, -0.07941760,  0.02388267, -0.01258409, -0.00343524,  0.00148711,
+-0.00362107,  0.03981813, -0.07235214, -0.46180041, -0.05595288, -0.55699317,  0.61935853, -0.25379716,  0.06796783,  0.01039267,
+-0.06329171, -0.02143024,  0.09406929, -0.00799203, -0.01419805, -0.00603024,  0.01313145,  0.00091161, -0.00212107, -0.02405340,
+ 0.07146405, -0.76695326, -0.14841817,  0.60372663, -0.01478424,  0.06522462,  0.08580016, -0.05817981,  0.02438942,  0.04840904,
+ 0.02934363, -0.02239678, -0.00582247, -0.00091312, -0.00394148, -0.00285276, -0.03435745,  0.05277435,  0.17882781, -0.06194164,
+ 0.27321118,  0.01840179, -0.10188148, -0.33168524, -0.03491221,  0.67351789,  0.37017376,  0.32083717,  0.09737800, -0.20998084,
+-0.10725041,  0.06379186,  0.02169903, -0.02031584,  0.05623799, -0.18300962, -0.17337803,  0.08915172, -0.53835537, -0.08547263,
+ 0.15163321,  0.56732906,  0.21878115,  0.37274266,  0.26206918,  0.13443927,  0.09178695, -0.03276324, -0.01131664, -0.00236369,
+ 0.00772568,  0.01008805, -0.17122615,  0.15301569,  0.40135484, -0.06058913,  0.56405128, -0.05176853,  0.24544337,  0.62448073,
+ 0.07265009, -0.01198695,  0.05151774, -0.03678498,  0.01886154,  0.03724094,  0.01393667,  0.00758055, -0.00254297,  0.00537118,
+ 0.24169707, -0.41735970, -0.67564355, -0.09270478,  0.53106033,  0.06214579,  0.02574404,  0.09943837,  0.03032542,  0.02194476,
+ 0.06369772, -0.00133741,  0.01301113,  0.01508494,  0.00036111, -0.00278870,  0.00139205,  0.00015792, -0.43347887,  0.69923146,
+-0.55406563, -0.01102231,  0.01347767,  0.07012139, -0.02530164,  0.06803192,  0.01177196,  0.04374491,  0.04073027,  0.04037438,
+ 0.00167330, -0.01807065, -0.00425562,  0.00149653, -0.00035119, -0.00172888,  0.84785495,  0.52289580,  0.01067734, -0.00859194,
+ 0.01685964,  0.00481442,  0.00434738,  0.07592695,  0.01419942,  0.01005336,  0.03316937,  0.00360465,  0.00435039,  0.00029122,
+ 0.00171268,  0.00198919, -0.00046889, -0.00094176}};
+
+/* right KLT transforms */
+const double WebRtcIsac_kKltT2Gain[3][36] = {
+{ 0.14572837, -0.45446306,  0.61990621, -0.52197033,  0.32145074, -0.11026900, -0.20698282,  0.48962182, -0.27127933, -0.33627476,
+ 0.65094037, -0.32715751,  0.40262573, -0.47844405, -0.33876075,  0.44130653,  0.37383966, -0.39964662, -0.51730480,  0.06611973,
+ 0.49030187,  0.47512886, -0.02141226, -0.51129451, -0.58578569, -0.39132064, -0.13187771,  0.15649421,  0.40735596,  0.54396897,
+ 0.40381276,  0.40904942,  0.41179766,  0.41167576,  0.40840251,  0.40468132},
+{-0.11368135,  0.34815515, -0.56434996,  0.61130763, -0.39970336,  0.11795708,  0.28514257, -0.58879243,  0.32775812,  0.27024886,
+-0.56251299,  0.27411037,  0.42649186, -0.44080232, -0.36408215,  0.35932457,  0.43592895, -0.41484213, -0.49813030, -0.00012592,
+ 0.49865688,  0.47634953,  0.01094246, -0.52552726, -0.56154082, -0.41110686, -0.14170764,  0.15946614,  0.40818082,  0.55094554,
+ 0.40051601,  0.41084781,  0.41567800,  0.41450700,  0.40871872,  0.39891823},
+{-0.10719481,  0.34796287, -0.54573957,  0.59521001, -0.43943367,  0.14907223,  0.26554957, -0.59549939,  0.36760692,  0.26040652,
+-0.55268701,  0.25778784,  0.38994096, -0.45282773, -0.37975656,  0.40213055,  0.43052647, -0.38937904, -0.52698359,  0.02788094,
+ 0.48284286,  0.47792474,  0.02557759, -0.50922240, -0.57699826, -0.39476779, -0.14708238,  0.12742149,  0.37835245,  0.57464021,
+ 0.39408127,  0.40327462,  0.40993655,  0.41419345,  0.41506301,  0.41253853}};
+
+const double WebRtcIsac_kKltT2Shape[3][36] = {
+{ 0.13427386, -0.35132558,  0.52506528, -0.59419077,  0.45075085, -0.16312057,  0.29857439, -0.58660147,  0.34265431,  0.20879510,
+-0.56063262,  0.30238345,  0.43308283, -0.41186999, -0.35288681,  0.42768996,  0.36094634, -0.45284910, -0.47116680,  0.02893449,
+ 0.54326135,  0.45249040, -0.06264420, -0.52283830,  0.57137758,  0.44298139,  0.12617554, -0.20819946, -0.42324603, -0.48876443,
+ 0.39597050,  0.40713935,  0.41389880,  0.41512486,  0.41130400,  0.40575001},
+{ 0.16540737, -0.43379435,  0.58165221, -0.55154773,  0.35734028, -0.11935912,  0.29434254, -0.55954817,  0.23549804,  0.33087258,
+-0.58848503,  0.29835834,  0.45464789, -0.38316155, -0.41689708,  0.35607296,  0.41260747, -0.41910198, -0.48633899, -0.04144955,
+ 0.47824583,  0.51050942,  0.01000345, -0.52184032,  0.53488229,  0.42641051,  0.17049774, -0.15849613, -0.43229355, -0.53945045,
+ 0.39582002,  0.41033103,  0.41788713,  0.41688080,  0.41081697,  0.39719658},
+{ 0.13386268, -0.37919915,  0.54989123, -0.57663572,  0.42402636, -0.15362720,  0.29641577, -0.58806770,  0.31381040,  0.26524954,
+-0.56271012,  0.28431868,  0.42699898, -0.41058922, -0.40408270,  0.39215865,  0.40788513, -0.40699735, -0.49846482, -0.01521208,
+ 0.48756040,  0.49479418, -0.00347672, -0.51841384,  0.55513106,  0.41683793,  0.15131217, -0.15613621, -0.41029341, -0.54996461,
+ 0.39402116,  0.40965305,  0.41862791,  0.41730770,  0.41089648,  0.39837262}};
+
+/* means of log gains and LAR coefficients*/
+const double WebRtcIsac_kLpcMeansGain[3][12] = {
+{-6.86881911, -5.35075273, -6.86792680, -5.36200897, -6.86401538, -5.36921533, -6.86802969, -5.36893966, -6.86538097, -5.36315063,
+-6.85535304, -5.35155315},
+{-6.12914600, -4.78070092, -6.12971780, -4.78382183, -6.12858525, -4.79362198, -6.12926491, -4.79017481, -6.12102401, -4.78346122,
+-6.11441152, -4.78019228},
+{-5.67273484, -3.73876311, -5.65246094, -3.71407895, -5.61716443, -3.68814580, -5.58804560, -3.66334094, -5.54189577, -3.63845640,
+-5.49293185, -3.61760203}};
+
+const double WebRtcIsac_kLpcMeansShape[3][108] = {
+{-0.91232981,  0.26258634, -0.33716701,  0.08477430, -0.03378426,  0.14423909,  0.07036185,  0.06155019,  0.01490385,  0.04138740,
+ 0.01427317,  0.01288970,  0.83872106,  0.25750199,  0.07988929, -0.01957923,  0.00831390,  0.01770300, -0.90957164,  0.25732216,
+-0.33385344,  0.08735740, -0.03715332,  0.14584917,  0.06998990,  0.06131968,  0.01504379,  0.04067339,  0.01428039,  0.01406460,
+ 0.83846243,  0.26169862,  0.08109025, -0.01767055,  0.00970539,  0.01954310, -0.90490803,  0.24656405, -0.33578607,  0.08843286,
+-0.03749139,  0.14443959,  0.07214669,  0.06170993,  0.01449947,  0.04134309,  0.01314762,  0.01413471,  0.83895203,  0.26748062,
+ 0.08197507, -0.01781298,  0.00885967,  0.01922394, -0.90922472,  0.24495889, -0.33921540,  0.08877169, -0.03581332,  0.14199172,
+ 0.07444032,  0.06185940,  0.01502054,  0.04185113,  0.01276579,  0.01355457,  0.83645358,  0.26631720,  0.08119697, -0.01835449,
+ 0.00788512,  0.01846446, -0.90482253,  0.24658310, -0.34019734,  0.08281090, -0.03486038,  0.14359248,  0.07401336,  0.06001471,
+ 0.01528421,  0.04254560,  0.01321472,  0.01240799,  0.83857127,  0.26281654,  0.08174380, -0.02099842,  0.00755176,  0.01699448,
+-0.90132307,  0.25174308, -0.33838268,  0.07883863, -0.02877906,  0.14105407,  0.07220290,  0.06000352,  0.01684879,  0.04226844,
+ 0.01331331,  0.01269244,  0.83832138,  0.25467485,  0.08118028, -0.02120528,  0.00747832,  0.01567212},
+{-1.11639718,  0.35377266,  0.00798929,  0.20165280,  0.07656104,  0.10629964,  0.04894160,  0.10955305, -0.01806405,  0.05082282,
+ 0.01730794,  0.01345957,  0.73717782,  0.05952284,  0.03176204,  0.08195122,  0.01253148,  0.02253385, -1.12053537,  0.35523538,
+ 0.00859646,  0.20007706,  0.07715852,  0.10754596,  0.05165976,  0.10927703, -0.01554395,  0.05178866,  0.01752534,  0.01343468,
+ 0.73489046,  0.06395167,  0.03287798,  0.07972374,  0.01293550,  0.02300929, -1.11772179,  0.35457623,  0.01205524,  0.19926481,
+ 0.08000866,  0.10817921,  0.05052481,  0.11016167, -0.01552091,  0.05155510,  0.01787163,  0.01343778,  0.73142568,  0.06840830,
+ 0.03316828,  0.07902608,  0.01525042,  0.02178127, -1.12120164,  0.36405233,  0.00630305,  0.19799738,  0.07829690,  0.10727588,
+ 0.04017317,  0.10437949, -0.01844109,  0.05021700,  0.01561726,  0.01226571,  0.73438044,  0.06947982,  0.03396317,  0.07858683,
+ 0.01367105,  0.02041955, -1.12146187,  0.35952226,  0.00340090,  0.19700813,  0.07938222,  0.10904137,  0.03921216,  0.10531403,
+-0.01833415,  0.04956231,  0.01399539,  0.01323582,  0.74378099,  0.07059589,  0.03367692,  0.08151462,  0.01182040,  0.02075577,
+-1.11245254,  0.35234230,  0.00687490,  0.20204252,  0.07813186,  0.11081259,  0.04634665,  0.11073238, -0.01637954,  0.05104577,
+ 0.01675122,  0.01448696,  0.74013627,  0.06239059,  0.03129412,  0.08207461,  0.01249475,  0.02189238},
+{-1.27118948,  0.35834331, -0.33499347,  0.13524073,  0.04829079,  0.19542773,  0.05273835,  0.04157974, -0.01755227,  0.01513442,
+ 0.00386630,  0.02199463,  1.14439142,  0.21903073,  0.14750213,  0.12743356,  0.08463334,  0.06839691, -1.28367777,  0.35556287,
+-0.33809405,  0.13627881,  0.04939309,  0.19642571,  0.05354373,  0.04099247, -0.01787481,  0.01472425,  0.00391474,  0.02150716,
+ 1.14739079,  0.21840872,  0.14643624,  0.12724347,  0.08390642,  0.06811938, -1.29007667,  0.35159558, -0.34154267,  0.13295849,
+ 0.04883602,  0.19587595,  0.05452759,  0.04174703, -0.01782110,  0.01388270,  0.00374754,  0.02138105,  1.14333767,  0.21690116,
+ 0.14544599,  0.12606728,  0.08314168,  0.06771389, -1.29856471,  0.35239315, -0.34238732,  0.13277553,  0.04722712,  0.19233156,
+ 0.05366901,  0.04328110, -0.01657749,  0.01444736,  0.00438108,  0.02102563,  1.13548397,  0.21537812,  0.14357377,  0.12525845,
+ 0.08230994,  0.06722511, -1.30663540,  0.34366563, -0.34205544,  0.12861679,  0.04655851,  0.18864359,  0.05351285,  0.04358693,
+-0.01604498,  0.01431907,  0.00395326,  0.02082299,  1.12207794,  0.21167325,  0.14212491,  0.12418671,  0.08155467,  0.06639789,
+-1.31011673,  0.33686271, -0.34379843,  0.12169569,  0.04480323,  0.18637557,  0.05374078,  0.04260827, -0.01588226,  0.01378294,
+ 0.00396009,  0.02112406,  1.10466984,  0.20905894,  0.14107033,  0.12303074,  0.08047136,  0.06588031}};
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.h
new file mode 100644
index 0000000..604d963
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/lpc_tables.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * lpc_tables.h
+ *
+ * header file for coding tables for the LPC coefficients
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_
+
+#include "structs.h"
+
+#include "settings.h"
+
+#define KLT_STEPSIZE         1.00000000
+#define KLT_NUM_AVG_GAIN     0
+#define KLT_NUM_AVG_SHAPE    0
+#define KLT_NUM_MODELS  3
+#define LPC_GAIN_SCALE     4.000f
+#define LPC_LOBAND_SCALE   2.100f
+#define LPC_LOBAND_ORDER   ORDERLO
+#define LPC_HIBAND_SCALE   0.450f
+#define LPC_HIBAND_ORDER   ORDERHI
+#define LPC_GAIN_ORDER     2
+
+#define LPC_SHAPE_ORDER    (LPC_LOBAND_ORDER + LPC_HIBAND_ORDER)
+
+#define KLT_ORDER_GAIN     (LPC_GAIN_ORDER * SUBFRAMES)
+#define KLT_ORDER_SHAPE    (LPC_SHAPE_ORDER * SUBFRAMES)
+/* indices of KLT coefficients used */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltSelIndGain[12];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltSelIndShape[108];
+
+/* cdf array for model indicator */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS+1];
+
+/* pointer to cdf array for model indicator */
+extern const WebRtc_UWord16 *WebRtcIsac_kQKltModelCdfPtr[1];
+
+/* initial cdf index for decoder of model indicator */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltModelInitIndex[1];
+
+/* offset to go from rounded value to quantization index */
+extern const short WebRtcIsac_kQKltQuantMinGain[12];
+
+extern const short WebRtcIsac_kQKltQuantMinShape[108];
+
+/* maximum quantization index */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndGain[12];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndShape[108];
+
+/* index offset */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[KLT_NUM_MODELS][108];
+
+/* initial cdf index for KLT coefficients */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[KLT_NUM_MODELS][108];
+
+/* offsets for quantizer representation levels */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsGain[3];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsShape[3];
+
+/* quantizer representation levels */
+extern const double WebRtcIsac_kQKltLevelsGain[1176];
+
+extern const double WebRtcIsac_kQKltLevelsShape[1735];
+
+/* cdf tables for quantizer indices */
+extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[1212];
+
+extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[2059];
+
+/* pointers to cdf tables for quantizer indices */
+extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[KLT_NUM_MODELS][12];
+
+extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[KLT_NUM_MODELS][108];
+
+/* code length for all coefficients using different models */
+extern const double WebRtcIsac_kQKltCodeLenGain[392];
+
+extern const double WebRtcIsac_kQKltCodeLenShape[578];
+
+/* left KLT transforms */
+extern const double WebRtcIsac_kKltT1Gain[KLT_NUM_MODELS][4];
+
+extern const double WebRtcIsac_kKltT1Shape[KLT_NUM_MODELS][324];
+
+/* right KLT transforms */
+extern const double WebRtcIsac_kKltT2Gain[KLT_NUM_MODELS][36];
+
+extern const double WebRtcIsac_kKltT2Shape[KLT_NUM_MODELS][36];
+
+/* means of log gains and LAR coefficients */
+extern const double WebRtcIsac_kLpcMeansGain[KLT_NUM_MODELS][12];
+
+extern const double WebRtcIsac_kLpcMeansShape[KLT_NUM_MODELS][108];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/os_specific_inline.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/os_specific_inline.h
new file mode 100644
index 0000000..c469c2e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/os_specific_inline.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_OS_SPECIFIC_INLINE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_OS_SPECIFIC_INLINE_H_
+
+#include <math.h>
+#include "typedefs.h"
+
+// TODO(turaj): switch to WEBRTC_POSIX when available
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#define WebRtcIsac_lrint lrint
+#elif (defined(WEBRTC_ARCH_X86) && defined(WIN32))
+static __inline long int WebRtcIsac_lrint(double x_dbl) {
+  long int x_int;
+
+  __asm {
+    fld x_dbl
+    fistp x_int
+  };
+
+  return x_int;
+}
+#else // Do a slow but correct implementation of lrint
+
+static __inline long int WebRtcIsac_lrint(double x_dbl) {
+  long int x_int;
+  x_int = (long int)floor(x_dbl + 0.499999999999);
+  return x_int;
+}
+
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_OS_SPECIFIC_INLINE_H_
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.c
new file mode 100644
index 0000000..75525f6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.c
@@ -0,0 +1,622 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pitch_estimator.h"
+
+#include <math.h>
+#include <memory.h>
+#ifdef WEBRTC_ANDROID
+#include <stdlib.h>
+#endif
+
+static const double kInterpolWin[8] = {-0.00067556028640,  0.02184247643159, -0.12203175715679,  0.60086484101160,
+                                       0.60086484101160, -0.12203175715679,  0.02184247643159, -0.00067556028640};
+
+/* interpolation filter */
+__inline static void IntrepolFilter(double *data_ptr, double *intrp)
+{
+  *intrp = kInterpolWin[0] * data_ptr[-3];
+  *intrp += kInterpolWin[1] * data_ptr[-2];
+  *intrp += kInterpolWin[2] * data_ptr[-1];
+  *intrp += kInterpolWin[3] * data_ptr[0];
+  *intrp += kInterpolWin[4] * data_ptr[1];
+  *intrp += kInterpolWin[5] * data_ptr[2];
+  *intrp += kInterpolWin[6] * data_ptr[3];
+  *intrp += kInterpolWin[7] * data_ptr[4];
+}
+
+
+/* 2D parabolic interpolation */
+/* probably some 0.5 factors can be eliminated, and the square-roots can be removed from the Cholesky fact. */
+__inline static void Intrpol2D(double T[3][3], double *x, double *y, double *peak_val)
+{
+  double c, b[2], A[2][2];
+  double t1, t2, d;
+  double delta1, delta2;
+
+
+  // double T[3][3] = {{-1.25, -.25,-.25}, {-.25, .75, .75}, {-.25, .75, .75}};
+  // should result in: delta1 = 0.5;  delta2 = 0.0;  peak_val = 1.0
+
+  c = T[1][1];
+  b[0] = 0.5 * (T[1][2] + T[2][1] - T[0][1] - T[1][0]);
+  b[1] = 0.5 * (T[1][0] + T[2][1] - T[0][1] - T[1][2]);
+  A[0][1] = -0.5 * (T[0][1] + T[2][1] - T[1][0] - T[1][2]);
+  t1 = 0.5 * (T[0][0] + T[2][2]) - c;
+  t2 = 0.5 * (T[2][0] + T[0][2]) - c;
+  d = (T[0][1] + T[1][2] + T[1][0] + T[2][1]) - 4.0 * c - t1 - t2;
+  A[0][0] = -t1 - 0.5 * d;
+  A[1][1] = -t2 - 0.5 * d;
+
+  /* deal with singularities or ill-conditioned cases */
+  if ( (A[0][0] < 1e-7) || ((A[0][0] * A[1][1] - A[0][1] * A[0][1]) < 1e-7) ) {
+    *peak_val = T[1][1];
+    return;
+  }
+
+  /* Cholesky decomposition: replace A by upper-triangular factor */
+  A[0][0] = sqrt(A[0][0]);
+  A[0][1] = A[0][1] / A[0][0];
+  A[1][1] = sqrt(A[1][1] - A[0][1] * A[0][1]);
+
+  /* compute [x; y] = -0.5 * inv(A) * b */
+  t1 = b[0] / A[0][0];
+  t2 = (b[1] - t1 * A[0][1]) / A[1][1];
+  delta2 = t2 / A[1][1];
+  delta1 = 0.5 * (t1 - delta2 * A[0][1]) / A[0][0];
+  delta2 *= 0.5;
+
+  /* limit norm */
+  t1 = delta1 * delta1 + delta2 * delta2;
+  if (t1 > 1.0) {
+    delta1 /= t1;
+    delta2 /= t1;
+  }
+
+  *peak_val = 0.5 * (b[0] * delta1 + b[1] * delta2) + c;
+
+  *x += delta1;
+  *y += delta2;
+}
+
+
+static void PCorr(const double *in, double *outcorr)
+{
+  double sum, ysum, prod;
+  const double *x, *inptr;
+  int k, n;
+
+  //ysum = 1e-6;          /* use this with float (i.s.o. double)! */
+  ysum = 1e-13;
+  sum = 0.0;
+  x = in + PITCH_MAX_LAG/2 + 2;
+  for (n = 0; n < PITCH_CORR_LEN2; n++) {
+    ysum += in[n] * in[n];
+    sum += x[n] * in[n];
+  }
+
+  outcorr += PITCH_LAG_SPAN2 - 1;     /* index of last element in array */
+  *outcorr = sum / sqrt(ysum);
+
+  for (k = 1; k < PITCH_LAG_SPAN2; k++) {
+    ysum -= in[k-1] * in[k-1];
+    ysum += in[PITCH_CORR_LEN2 + k - 1] * in[PITCH_CORR_LEN2 + k - 1];
+    sum = 0.0;
+    inptr = &in[k];
+    prod = x[0] * inptr[0];
+    for (n = 1; n < PITCH_CORR_LEN2; n++) {
+      sum += prod;
+      prod = x[n] * inptr[n];
+    }
+    sum += prod;
+    outcorr--;
+    *outcorr = sum / sqrt(ysum);
+  }
+}
+
+
+void WebRtcIsac_InitializePitch(const double *in,
+                                const double old_lag,
+                                const double old_gain,
+                                PitchAnalysisStruct *State,
+                                double *lags)
+{
+  double buf_dec[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2];
+  double ratio, log_lag, gain_bias;
+  double bias;
+  double corrvec1[PITCH_LAG_SPAN2];
+  double corrvec2[PITCH_LAG_SPAN2];
+  int m, k;
+  // Allocating 10 extra entries at the begining of the CorrSurf
+  double corrSurfBuff[10 + (2*PITCH_BW+3)*(PITCH_LAG_SPAN2+4)];
+  double* CorrSurf[2*PITCH_BW+3];
+  double *CorrSurfPtr1, *CorrSurfPtr2;
+  double LagWin[3] = {0.2, 0.5, 0.98};
+  int ind1, ind2, peaks_ind, peak, max_ind;
+  int peaks[PITCH_MAX_NUM_PEAKS];
+  double adj, gain_tmp;
+  double corr, corr_max;
+  double intrp_a, intrp_b, intrp_c, intrp_d;
+  double peak_vals[PITCH_MAX_NUM_PEAKS];
+  double lags1[PITCH_MAX_NUM_PEAKS];
+  double lags2[PITCH_MAX_NUM_PEAKS];
+  double T[3][3];
+  int row;
+
+  for(k = 0; k < 2*PITCH_BW+3; k++)
+  {
+    CorrSurf[k] = &corrSurfBuff[10 + k * (PITCH_LAG_SPAN2+4)];
+  }
+  /* reset CorrSurf matrix */
+  memset(corrSurfBuff, 0, sizeof(double) * (10 + (2*PITCH_BW+3) * (PITCH_LAG_SPAN2+4)));
+
+  //warnings -DH
+  max_ind = 0;
+  peak = 0;
+
+  /* copy old values from state buffer */
+  memcpy(buf_dec, State->dec_buffer, sizeof(double) * (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2));
+
+  /* decimation; put result after the old values */
+  WebRtcIsac_DecimateAllpass(in, State->decimator_state, PITCH_FRAME_LEN,
+                             &buf_dec[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2]);
+
+  /* low-pass filtering */
+  for (k = PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2; k++)
+    buf_dec[k] += 0.75 * buf_dec[k-1] - 0.25 * buf_dec[k-2];
+
+  /* copy end part back into state buffer */
+  memcpy(State->dec_buffer, buf_dec+PITCH_FRAME_LEN/2, sizeof(double) * (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2));
+
+  /* compute correlation for first and second half of the frame */
+  PCorr(buf_dec, corrvec1);
+  PCorr(buf_dec + PITCH_CORR_STEP2, corrvec2);
+
+  /* bias towards pitch lag of previous frame */
+  log_lag = log(0.5 * old_lag);
+  gain_bias = 4.0 * old_gain * old_gain;
+  if (gain_bias > 0.8) gain_bias = 0.8;
+  for (k = 0; k < PITCH_LAG_SPAN2; k++)
+  {
+    ratio = log((double) (k + (PITCH_MIN_LAG/2-2))) - log_lag;
+    bias = 1.0 + gain_bias * exp(-5.0 * ratio * ratio);
+    corrvec1[k] *= bias;
+  }
+
+  /* taper correlation functions */
+  for (k = 0; k < 3; k++) {
+    gain_tmp = LagWin[k];
+    corrvec1[k] *= gain_tmp;
+    corrvec2[k] *= gain_tmp;
+    corrvec1[PITCH_LAG_SPAN2-1-k] *= gain_tmp;
+    corrvec2[PITCH_LAG_SPAN2-1-k] *= gain_tmp;
+  }
+
+  corr_max = 0.0;
+  /* fill middle row of correlation surface */
+  ind1 = 0;
+  ind2 = 0;
+  CorrSurfPtr1 = &CorrSurf[PITCH_BW][2];
+  for (k = 0; k < PITCH_LAG_SPAN2; k++) {
+    corr = corrvec1[ind1++] + corrvec2[ind2++];
+    CorrSurfPtr1[k] = corr;
+    if (corr > corr_max) {
+      corr_max = corr;  /* update maximum */
+      max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+    }
+  }
+  /* fill first and last rows of correlation surface */
+  ind1 = 0;
+  ind2 = PITCH_BW;
+  CorrSurfPtr1 = &CorrSurf[0][2];
+  CorrSurfPtr2 = &CorrSurf[2*PITCH_BW][PITCH_BW+2];
+  for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW; k++) {
+    ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12));
+    adj = 0.2 * ratio * (2.0 - ratio);   /* adjustment factor; inverse parabola as a function of ratio */
+    corr = adj * (corrvec1[ind1] + corrvec2[ind2]);
+    CorrSurfPtr1[k] = corr;
+    if (corr > corr_max) {
+      corr_max = corr;  /* update maximum */
+      max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+    }
+    corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]);
+    CorrSurfPtr2[k] = corr;
+    if (corr > corr_max) {
+      corr_max = corr;  /* update maximum */
+      max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]);
+    }
+  }
+  /* fill second and next to last rows of correlation surface */
+  ind1 = 0;
+  ind2 = PITCH_BW-1;
+  CorrSurfPtr1 = &CorrSurf[1][2];
+  CorrSurfPtr2 = &CorrSurf[2*PITCH_BW-1][PITCH_BW+1];
+  for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW+1; k++) {
+    ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12));
+    adj = 0.9 * ratio * (2.0 - ratio);   /* adjustment factor; inverse parabola as a function of ratio */
+    corr = adj * (corrvec1[ind1] + corrvec2[ind2]);
+    CorrSurfPtr1[k] = corr;
+    if (corr > corr_max) {
+      corr_max = corr;  /* update maximum */
+      max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+    }
+    corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]);
+    CorrSurfPtr2[k] = corr;
+    if (corr > corr_max) {
+      corr_max = corr;  /* update maximum */
+      max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]);
+    }
+  }
+  /* fill remainder of correlation surface */
+  for (m = 2; m < PITCH_BW; m++) {
+    ind1 = 0;
+    ind2 = PITCH_BW - m;         /* always larger than ind1 */
+    CorrSurfPtr1 = &CorrSurf[m][2];
+    CorrSurfPtr2 = &CorrSurf[2*PITCH_BW-m][PITCH_BW+2-m];
+    for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW+m; k++) {
+      ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12));
+      adj = ratio * (2.0 - ratio);    /* adjustment factor; inverse parabola as a function of ratio */
+      corr = adj * (corrvec1[ind1] + corrvec2[ind2]);
+      CorrSurfPtr1[k] = corr;
+      if (corr > corr_max) {
+        corr_max = corr;  /* update maximum */
+        max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+      }
+      corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]);
+      CorrSurfPtr2[k] = corr;
+      if (corr > corr_max) {
+        corr_max = corr;  /* update maximum */
+        max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]);
+      }
+    }
+  }
+
+  /* threshold value to qualify as a peak */
+  corr_max *= 0.6;
+
+  peaks_ind = 0;
+  /* find peaks */
+  for (m = 1; m < PITCH_BW+1; m++) {
+    if (peaks_ind == PITCH_MAX_NUM_PEAKS) break;
+    CorrSurfPtr1 = &CorrSurf[m][2];
+    for (k = 2; k < PITCH_LAG_SPAN2-PITCH_BW-2+m; k++) {
+      corr = CorrSurfPtr1[k];
+      if (corr > corr_max) {
+        if ( (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+5)]) && (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+4)]) ) {
+          if ( (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+4)]) && (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+5)]) ) {
+            /* found a peak; store index into matrix */
+            peaks[peaks_ind++] = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+            if (peaks_ind == PITCH_MAX_NUM_PEAKS) break;
+          }
+        }
+      }
+    }
+  }
+  for (m = PITCH_BW+1; m < 2*PITCH_BW; m++) {
+    if (peaks_ind == PITCH_MAX_NUM_PEAKS) break;
+    CorrSurfPtr1 = &CorrSurf[m][2];
+    for (k = 2+m-PITCH_BW; k < PITCH_LAG_SPAN2-2; k++) {
+      corr = CorrSurfPtr1[k];
+      if (corr > corr_max) {
+        if ( (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+5)]) && (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+4)]) ) {
+          if ( (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+4)]) && (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+5)]) ) {
+            /* found a peak; store index into matrix */
+            peaks[peaks_ind++] = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]);
+            if (peaks_ind == PITCH_MAX_NUM_PEAKS) break;
+          }
+        }
+      }
+    }
+  }
+
+  if (peaks_ind > 0) {
+    /* examine each peak */
+    CorrSurfPtr1 = &CorrSurf[0][0];
+    for (k = 0; k < peaks_ind; k++) {
+      peak = peaks[k];
+
+      /* compute four interpolated values around current peak */
+      IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)], &intrp_a);
+      IntrepolFilter(&CorrSurfPtr1[peak - 1            ], &intrp_b);
+      IntrepolFilter(&CorrSurfPtr1[peak                ], &intrp_c);
+      IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)], &intrp_d);
+
+      /* determine maximum of the interpolated values */
+      corr = CorrSurfPtr1[peak];
+      corr_max = intrp_a;
+      if (intrp_b > corr_max) corr_max = intrp_b;
+      if (intrp_c > corr_max) corr_max = intrp_c;
+      if (intrp_d > corr_max) corr_max = intrp_d;
+
+      /* determine where the peak sits and fill a 3x3 matrix around it */
+      row = peak / (PITCH_LAG_SPAN2+4);
+      lags1[k] = (double) ((peak - row * (PITCH_LAG_SPAN2+4)) + PITCH_MIN_LAG/2 - 4);
+      lags2[k] = (double) (lags1[k] + PITCH_BW - row);
+      if ( corr > corr_max ) {
+        T[0][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)];
+        T[2][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)];
+        T[1][1] = corr;
+        T[0][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)];
+        T[2][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)];
+        T[1][0] = intrp_a;
+        T[0][1] = intrp_b;
+        T[2][1] = intrp_c;
+        T[1][2] = intrp_d;
+      } else {
+        if (intrp_a == corr_max) {
+          lags1[k] -= 0.5;
+          lags2[k] += 0.5;
+          IntrepolFilter(&CorrSurfPtr1[peak - 2*(PITCH_LAG_SPAN2+5)], &T[0][0]);
+          IntrepolFilter(&CorrSurfPtr1[peak - (2*PITCH_LAG_SPAN2+9)], &T[2][0]);
+          T[1][1] = intrp_a;
+          T[0][2] = intrp_b;
+          T[2][2] = intrp_c;
+          T[1][0] = CorrSurfPtr1[peak - (2*PITCH_LAG_SPAN2+9)];
+          T[0][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)];
+          T[2][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)];
+          T[1][2] = corr;
+        } else if (intrp_b == corr_max) {
+          lags1[k] -= 0.5;
+          lags2[k] -= 0.5;
+          IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+6)], &T[0][0]);
+          T[2][0] = intrp_a;
+          T[1][1] = intrp_b;
+          IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+3)], &T[0][2]);
+          T[2][2] = intrp_d;
+          T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)];
+          T[0][1] = CorrSurfPtr1[peak - 1];
+          T[2][1] = corr;
+          T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)];
+        } else if (intrp_c == corr_max) {
+          lags1[k] += 0.5;
+          lags2[k] += 0.5;
+          T[0][0] = intrp_a;
+          IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)], &T[2][0]);
+          T[1][1] = intrp_c;
+          T[0][2] = intrp_d;
+          IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)], &T[2][2]);
+          T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)];
+          T[0][1] = corr;
+          T[2][1] = CorrSurfPtr1[peak + 1];
+          T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)];
+        } else {
+          lags1[k] += 0.5;
+          lags2[k] -= 0.5;
+          T[0][0] = intrp_b;
+          T[2][0] = intrp_c;
+          T[1][1] = intrp_d;
+          IntrepolFilter(&CorrSurfPtr1[peak + 2*(PITCH_LAG_SPAN2+4)], &T[0][2]);
+          IntrepolFilter(&CorrSurfPtr1[peak + (2*PITCH_LAG_SPAN2+9)], &T[2][2]);
+          T[1][0] = corr;
+          T[0][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)];
+          T[2][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)];
+          T[1][2] = CorrSurfPtr1[peak + (2*PITCH_LAG_SPAN2+9)];
+        }
+      }
+
+      /* 2D parabolic interpolation gives more accurate lags and peak value */
+      Intrpol2D(T, &lags1[k], &lags2[k], &peak_vals[k]);
+    }
+
+    /* determine the highest peak, after applying a bias towards short lags */
+    corr_max = 0.0;
+    for (k = 0; k < peaks_ind; k++) {
+      corr = peak_vals[k] * pow(PITCH_PEAK_DECAY, log(lags1[k] + lags2[k]));
+      if (corr > corr_max) {
+        corr_max = corr;
+        peak = k;
+      }
+    }
+
+    lags1[peak] *= 2.0;
+    lags2[peak] *= 2.0;
+
+    if (lags1[peak] < (double) PITCH_MIN_LAG) lags1[peak] = (double) PITCH_MIN_LAG;
+    if (lags2[peak] < (double) PITCH_MIN_LAG) lags2[peak] = (double) PITCH_MIN_LAG;
+    if (lags1[peak] > (double) PITCH_MAX_LAG) lags1[peak] = (double) PITCH_MAX_LAG;
+    if (lags2[peak] > (double) PITCH_MAX_LAG) lags2[peak] = (double) PITCH_MAX_LAG;
+
+    /* store lags of highest peak in output array */
+    lags[0] = lags1[peak];
+    lags[1] = lags1[peak];
+    lags[2] = lags2[peak];
+    lags[3] = lags2[peak];
+  }
+  else
+  {
+    row = max_ind / (PITCH_LAG_SPAN2+4);
+    lags1[0] = (double) ((max_ind - row * (PITCH_LAG_SPAN2+4)) + PITCH_MIN_LAG/2 - 4);
+    lags2[0] = (double) (lags1[0] + PITCH_BW - row);
+
+    if (lags1[0] < (double) PITCH_MIN_LAG) lags1[0] = (double) PITCH_MIN_LAG;
+    if (lags2[0] < (double) PITCH_MIN_LAG) lags2[0] = (double) PITCH_MIN_LAG;
+    if (lags1[0] > (double) PITCH_MAX_LAG) lags1[0] = (double) PITCH_MAX_LAG;
+    if (lags2[0] > (double) PITCH_MAX_LAG) lags2[0] = (double) PITCH_MAX_LAG;
+
+    /* store lags of highest peak in output array */
+    lags[0] = lags1[0];
+    lags[1] = lags1[0];
+    lags[2] = lags2[0];
+    lags[3] = lags2[0];
+  }
+}
+
+
+
+/* create weighting matrix by orthogonalizing a basis of polynomials of increasing order
+ * t = (0:4)';
+ * A = [t.^0, t.^1, t.^2, t.^3, t.^4];
+ * [Q, dummy] = qr(A);
+ * P.Weight = Q * diag([0, .1, .5, 1, 1]) * Q'; */
+static const double kWeight[5][5] = {
+  { 0.29714285714286,  -0.30857142857143,  -0.05714285714286,   0.05142857142857,  0.01714285714286},
+  {-0.30857142857143,   0.67428571428571,  -0.27142857142857,  -0.14571428571429,  0.05142857142857},
+  {-0.05714285714286,  -0.27142857142857,   0.65714285714286,  -0.27142857142857, -0.05714285714286},
+  { 0.05142857142857,  -0.14571428571429,  -0.27142857142857,   0.67428571428571, -0.30857142857143},
+  { 0.01714285714286,   0.05142857142857,  -0.05714285714286,  -0.30857142857143,  0.29714285714286}
+};
+
+
+void WebRtcIsac_PitchAnalysis(const double *in,               /* PITCH_FRAME_LEN samples */
+                              double *out,                    /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+                              PitchAnalysisStruct *State,
+                              double *lags,
+                              double *gains)
+{
+  double HPin[PITCH_FRAME_LEN];
+  double Weighted[PITCH_FRAME_LEN];
+  double Whitened[PITCH_FRAME_LEN + QLOOKAHEAD];
+  double inbuf[PITCH_FRAME_LEN + QLOOKAHEAD];
+  double out_G[PITCH_FRAME_LEN + QLOOKAHEAD];          // could be removed by using out instead
+  double out_dG[4][PITCH_FRAME_LEN + QLOOKAHEAD];
+  double old_lag, old_gain;
+  double nrg_wht, tmp;
+  double Wnrg, Wfluct, Wgain;
+  double H[4][4];
+  double grad[4];
+  double dG[4];
+  int k, m, n, iter;
+
+  /* high pass filtering using second order pole-zero filter */
+  WebRtcIsac_Highpass(in, HPin, State->hp_state, PITCH_FRAME_LEN);
+
+  /* copy from state into buffer */
+  memcpy(Whitened, State->whitened_buf, sizeof(double) * QLOOKAHEAD);
+
+  /* compute weighted and whitened signals */
+  WebRtcIsac_WeightingFilter(HPin, &Weighted[0], &Whitened[QLOOKAHEAD], &(State->Wghtstr));
+
+  /* copy from buffer into state */
+  memcpy(State->whitened_buf, Whitened+PITCH_FRAME_LEN, sizeof(double) * QLOOKAHEAD);
+
+  old_lag = State->PFstr_wght.oldlagp[0];
+  old_gain = State->PFstr_wght.oldgainp[0];
+
+  /* inital pitch estimate */
+  WebRtcIsac_InitializePitch(Weighted, old_lag, old_gain, State, lags);
+
+
+  /* Iterative optimization of lags - to be done */
+
+  /* compute energy of whitened signal */
+  nrg_wht = 0.0;
+  for (k = 0; k < PITCH_FRAME_LEN + QLOOKAHEAD; k++)
+    nrg_wht += Whitened[k] * Whitened[k];
+
+
+  /* Iterative optimization of gains */
+
+  /* set weights for energy, gain fluctiation, and spectral gain penalty functions */
+  Wnrg = 1.0 / nrg_wht;
+  Wgain = 0.005;
+  Wfluct = 3.0;
+
+  /* set initial gains */
+  for (k = 0; k < 4; k++)
+    gains[k] = PITCH_MAX_GAIN_06;
+
+  /* two iterations should be enough */
+  for (iter = 0; iter < 2; iter++) {
+    /* compute Jacobian of pre-filter output towards gains */
+    WebRtcIsac_PitchfilterPre_gains(Whitened, out_G, out_dG, &(State->PFstr_wght), lags, gains);
+
+    /* gradient and approximate Hessian (lower triangle) for minimizing the filter's output power */
+    for (k = 0; k < 4; k++) {
+      tmp = 0.0;
+      for (n = 0; n < PITCH_FRAME_LEN + QLOOKAHEAD; n++)
+        tmp += out_G[n] * out_dG[k][n];
+      grad[k] = tmp * Wnrg;
+    }
+    for (k = 0; k < 4; k++) {
+      for (m = 0; m <= k; m++) {
+        tmp = 0.0;
+        for (n = 0; n < PITCH_FRAME_LEN + QLOOKAHEAD; n++)
+          tmp += out_dG[m][n] * out_dG[k][n];
+        H[k][m] = tmp * Wnrg;
+      }
+    }
+
+    /* add gradient and Hessian (lower triangle) for dampening fast gain changes */
+    for (k = 0; k < 4; k++) {
+      tmp = kWeight[k+1][0] * old_gain;
+      for (m = 0; m < 4; m++)
+        tmp += kWeight[k+1][m+1] * gains[m];
+      grad[k] += tmp * Wfluct;
+    }
+    for (k = 0; k < 4; k++) {
+      for (m = 0; m <= k; m++) {
+        H[k][m] += kWeight[k+1][m+1] * Wfluct;
+      }
+    }
+
+    /* add gradient and Hessian for dampening gain */
+    for (k = 0; k < 3; k++) {
+      tmp = 1.0 / (1 - gains[k]);
+      grad[k] += tmp * tmp * Wgain;
+      H[k][k] += 2.0 * tmp * (tmp * tmp * Wgain);
+    }
+    tmp = 1.0 / (1 - gains[3]);
+    grad[3] += 1.33 * (tmp * tmp * Wgain);
+    H[3][3] += 2.66 * tmp * (tmp * tmp * Wgain);
+
+
+    /* compute Cholesky factorization of Hessian
+     * by overwritting the upper triangle; scale factors on diagonal
+     * (for non pc-platforms store the inverse of the diagonals seperately to minimize divisions) */
+    H[0][1] = H[1][0] / H[0][0];
+    H[0][2] = H[2][0] / H[0][0];
+    H[0][3] = H[3][0] / H[0][0];
+    H[1][1] -= H[0][0] * H[0][1] * H[0][1];
+    H[1][2] = (H[2][1] - H[0][1] * H[2][0]) / H[1][1];
+    H[1][3] = (H[3][1] - H[0][1] * H[3][0]) / H[1][1];
+    H[2][2] -= H[0][0] * H[0][2] * H[0][2] + H[1][1] * H[1][2] * H[1][2];
+    H[2][3] = (H[3][2] - H[0][2] * H[3][0] - H[1][2] * H[1][1] * H[1][3]) / H[2][2];
+    H[3][3] -= H[0][0] * H[0][3] * H[0][3] + H[1][1] * H[1][3] * H[1][3] + H[2][2] * H[2][3] * H[2][3];
+
+    /* Compute update as  delta_gains = -inv(H) * grad */
+    /* copy and negate */
+    for (k = 0; k < 4; k++)
+      dG[k] = -grad[k];
+    /* back substitution */
+    dG[1] -= dG[0] * H[0][1];
+    dG[2] -= dG[0] * H[0][2] + dG[1] * H[1][2];
+    dG[3] -= dG[0] * H[0][3] + dG[1] * H[1][3] + dG[2] * H[2][3];
+    /* scale */
+    for (k = 0; k < 4; k++)
+      dG[k] /= H[k][k];
+    /* back substitution */
+    dG[2] -= dG[3] * H[2][3];
+    dG[1] -= dG[3] * H[1][3] + dG[2] * H[1][2];
+    dG[0] -= dG[3] * H[0][3] + dG[2] * H[0][2] + dG[1] * H[0][1];
+
+    /* update gains and check range */
+    for (k = 0; k < 4; k++) {
+      gains[k] += dG[k];
+      if (gains[k] > PITCH_MAX_GAIN)
+        gains[k] = PITCH_MAX_GAIN;
+      else if (gains[k] < 0.0)
+        gains[k] = 0.0;
+    }
+  }
+
+  /* update state for next frame */
+  WebRtcIsac_PitchfilterPre(Whitened, out, &(State->PFstr_wght), lags, gains);
+
+  /* concatenate previous input's end and current input */
+  memcpy(inbuf, State->inbuf, sizeof(double) * QLOOKAHEAD);
+  memcpy(inbuf+QLOOKAHEAD, in, sizeof(double) * PITCH_FRAME_LEN);
+
+  /* lookahead pitch filtering for masking analysis */
+  WebRtcIsac_PitchfilterPre_la(inbuf, out, &(State->PFstr), lags, gains);
+
+  /* store last part of input */
+  for (k = 0; k < QLOOKAHEAD; k++)
+    State->inbuf[k] = inbuf[k + PITCH_FRAME_LEN];
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.h
new file mode 100644
index 0000000..f5d9356
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_estimator.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_estimator.h
+ *
+ * Pitch functions
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_ESTIMATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_ESTIMATOR_H_
+
+#include "structs.h"
+
+
+
+void WebRtcIsac_PitchAnalysis(const double *in,               /* PITCH_FRAME_LEN samples */
+                              double *out,                    /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+                              PitchAnalysisStruct *State,
+                              double *lags,
+                              double *gains);
+
+void WebRtcIsac_InitializePitch(const double *in,
+                                const double old_lag,
+                                const double old_gain,
+                                PitchAnalysisStruct *State,
+                                double *lags);
+
+void WebRtcIsac_PitchfilterPre(double *indat,
+                               double *outdat,
+                               PitchFiltstr *pfp,
+                               double *lags,
+                               double *gains);
+
+void WebRtcIsac_PitchfilterPost(double *indat,
+                                double *outdat,
+                                PitchFiltstr *pfp,
+                                double *lags,
+                                double *gains);
+
+void WebRtcIsac_PitchfilterPre_la(double *indat,
+                                  double *outdat,
+                                  PitchFiltstr *pfp,
+                                  double *lags,
+                                  double *gains);
+
+void WebRtcIsac_PitchfilterPre_gains(double *indat,
+                                     double *outdat,
+                                     double out_dG[][PITCH_FRAME_LEN + QLOOKAHEAD],
+                                     PitchFiltstr *pfp,
+                                     double *lags,
+                                     double *gains);
+
+void WebRtcIsac_WeightingFilter(const double *in, double *weiout, double *whiout, WeightFiltstr *wfdata);
+
+void WebRtcIsac_Highpass(const double *in, double *out, double *state, int N);
+
+void WebRtcIsac_DecimateAllpass(const double *in,
+                                double *state_in,        /* array of size: 2*ALLPASSSECTIONS+1 */
+                                int N,                   /* number of input samples */
+                                double *out);            /* array of size N/2 */
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_ESTIMATOR_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_filter.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_filter.c
new file mode 100644
index 0000000..ccc8d21
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_filter.c
@@ -0,0 +1,469 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pitch_estimator.h"
+#include "os_specific_inline.h"
+
+#include <stdlib.h>
+#include <memory.h>
+#include <math.h>
+
+static const double kDampFilter[PITCH_DAMPORDER] = {-0.07, 0.25, 0.64, 0.25, -0.07};
+
+/* interpolation coefficients; generated by design_pitch_filter.m */
+static const double kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = {
+  {-0.02239172458614,  0.06653315052934, -0.16515880017569,  0.60701333734125, 0.64671399919202, -0.20249000396417,  0.09926548334755, -0.04765933793109,  0.01754159521746},
+  {-0.01985640750434,  0.05816126837866, -0.13991265473714,  0.44560418147643, 0.79117042386876, -0.20266133815188,  0.09585268418555, -0.04533310458084,  0.01654127246314},
+  {-0.01463300534216,  0.04229888475060, -0.09897034715253,  0.28284326017787, 0.90385267956632, -0.16976950138649,  0.07704272393639, -0.03584218578311,  0.01295781500709},
+  {-0.00764851320885,  0.02184035544377, -0.04985561057281,  0.13083306574393, 0.97545011664662, -0.10177807997561,  0.04400901776474, -0.02010737175166,  0.00719783432422},
+  {-0.00000000000000,  0.00000000000000, -0.00000000000001,  0.00000000000001, 0.99999999999999,  0.00000000000001, -0.00000000000001,  0.00000000000000, -0.00000000000000},
+  { 0.00719783432422, -0.02010737175166,  0.04400901776474, -0.10177807997562, 0.97545011664663,  0.13083306574393, -0.04985561057280,  0.02184035544377, -0.00764851320885},
+  { 0.01295781500710, -0.03584218578312,  0.07704272393640, -0.16976950138650, 0.90385267956634,  0.28284326017785, -0.09897034715252,  0.04229888475059, -0.01463300534216},
+  { 0.01654127246315, -0.04533310458085,  0.09585268418557, -0.20266133815190, 0.79117042386878,  0.44560418147640, -0.13991265473712,  0.05816126837865, -0.01985640750433}
+};
+
+
+void WebRtcIsac_PitchfilterPre(double *indat,
+                                double *outdat,
+                                PitchFiltstr *pfp,
+                                double *lags,
+                                double *gains)
+{
+
+  double ubuf[PITCH_INTBUFFSIZE];
+  const double *fracoeff = NULL;
+  double curgain, curlag, gaindelta, lagdelta;
+  double sum, inystate[PITCH_DAMPORDER];
+  double ftmp, oldlag, oldgain;
+  int    k, n, m, pos, ind, pos2, Li, frc;
+
+  Li = 0;
+  /* Set up buffer and states */
+  memcpy(ubuf, pfp->ubuf, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(inystate, pfp->ystate, sizeof(double) * PITCH_DAMPORDER);
+
+  oldlag = *pfp->oldlagp;
+  oldgain = *pfp->oldgainp;
+
+  /* No interpolation if pitch lag step is big */
+  if ((lags[0] > (PITCH_UPSTEP * oldlag)) || (lags[0] < (PITCH_DOWNSTEP * oldlag))) {
+    oldlag = lags[0];
+    oldgain = gains[0];
+  }
+
+  ind=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdelta=(lags[k]-oldlag) / PITCH_GRAN_PER_SUBFRAME;
+    curlag=oldlag ;
+    gaindelta=(gains[k]-oldgain) / PITCH_GRAN_PER_SUBFRAME;
+    curgain=oldgain ;
+    oldlag=lags[k];
+    oldgain=gains[k];
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+      if ((ind % PITCH_UPDATE) == 0) { /* Update parameters */
+        curgain += gaindelta;
+        curlag += lagdelta;
+        Li = WebRtcIsac_lrint(curlag+PITCH_FILTDELAY + 0.5);
+        ftmp = Li - (curlag+PITCH_FILTDELAY);
+        frc = WebRtcIsac_lrint(PITCH_FRACS * ftmp - 0.5);
+        fracoeff = kIntrpCoef[frc];
+      }
+
+      /* shift low pass filter state */
+      for (m=PITCH_DAMPORDER-1;m>0;m--)
+        inystate[m] = inystate[m-1];
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos2 = pos - Li;
+      sum=0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        sum += ubuf[pos2+m] * fracoeff[m];
+      inystate[0] = curgain * sum;  /* Multiply with gain */
+
+      /* Low pass filter */
+      sum=0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        sum += inystate[m] * kDampFilter[m];
+
+      /* Subtract from input and update buffer */
+      outdat[ind] = indat[ind] - sum;
+      ubuf[pos] = indat[ind] + outdat[ind];
+      ind++;
+    }
+  }
+
+  /* Export buffer and states */
+  memcpy(pfp->ubuf, ubuf+PITCH_FRAME_LEN, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(pfp->ystate, inystate, sizeof(double) * PITCH_DAMPORDER);
+
+  *pfp->oldlagp = oldlag;
+  *pfp->oldgainp = oldgain;
+
+}
+
+
+void WebRtcIsac_PitchfilterPre_la(double *indat,
+                                   double *outdat,
+                                   PitchFiltstr *pfp,
+                                   double *lags,
+                                   double *gains)
+{
+  double ubuf[PITCH_INTBUFFSIZE+QLOOKAHEAD];
+  const double *fracoeff = NULL;
+  double curgain, curlag, gaindelta, lagdelta;
+  double sum, inystate[PITCH_DAMPORDER];
+  double ftmp;
+  double oldlag, oldgain;
+  int    k, n, m, pos, ind, pos2, Li, frc;
+
+  Li = 0;
+  /* Set up buffer and states */
+  memcpy(ubuf, pfp->ubuf, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(inystate, pfp->ystate, sizeof(double) * PITCH_DAMPORDER);
+
+  oldlag = *pfp->oldlagp;
+  oldgain = *pfp->oldgainp;
+
+  /* No interpolation if pitch lag step is big */
+  if ((lags[0] > (PITCH_UPSTEP * oldlag)) || (lags[0] < (PITCH_DOWNSTEP * oldlag))) {
+    oldlag = lags[0];
+    oldgain = gains[0];
+  }
+
+
+  ind=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdelta=(lags[k]-oldlag) / PITCH_GRAN_PER_SUBFRAME;
+    curlag=oldlag ;
+    gaindelta=(gains[k]-oldgain) / PITCH_GRAN_PER_SUBFRAME;
+    curgain=oldgain ;
+    oldlag=lags[k];
+    oldgain=gains[k];
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+      if ((ind % PITCH_UPDATE) == 0) {   /* Update parameters */
+        curgain += gaindelta;
+        curlag += lagdelta;
+        Li = WebRtcIsac_lrint(curlag+PITCH_FILTDELAY + 0.5);
+        ftmp = Li - (curlag+PITCH_FILTDELAY);
+        frc = WebRtcIsac_lrint(PITCH_FRACS * ftmp - 0.5);
+        fracoeff = kIntrpCoef[frc];
+      }
+
+      /* shift low pass filter state */
+      for (m=PITCH_DAMPORDER-1;m>0;m--)
+        inystate[m] = inystate[m-1];
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos2 = pos - Li;
+      sum=0.0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        sum += ubuf[pos2+m] * fracoeff[m];
+      inystate[0] = curgain * sum; /* Multiply with gain */
+
+      /* Low pass filter */
+      sum=0.0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        sum += inystate[m] * kDampFilter[m];
+
+      /* Subtract from input and update buffer */
+      outdat[ind] = indat[ind] - sum;
+      ubuf[pos] = indat[ind] + outdat[ind];
+      ind++;
+    }
+  }
+
+  /* Export buffer and states */
+  memcpy(pfp->ubuf, ubuf+PITCH_FRAME_LEN, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(pfp->ystate, inystate, sizeof(double) * PITCH_DAMPORDER);
+
+  *pfp->oldlagp = oldlag;
+  *pfp->oldgainp = oldgain;
+
+
+  /* Filter look-ahead segment */
+  for (n=0;n<QLOOKAHEAD;n++) {
+    /* shift low pass filter state */
+    for (m=PITCH_DAMPORDER-1;m>0;m--)
+      inystate[m] = inystate[m-1];
+
+    /* Filter to get fractional pitch */
+    pos = ind + PITCH_BUFFSIZE;
+    pos2 = pos - Li;
+    sum=0.0;
+    for (m=0;m<PITCH_FRACORDER;m++)
+      sum += ubuf[pos2+m] * fracoeff[m];
+    inystate[0] = curgain * sum; /* Multiply with gain */
+
+    /* Low pass filter */
+    sum=0.0;
+    for (m=0;m<PITCH_DAMPORDER;m++)
+      sum += inystate[m] * kDampFilter[m];
+
+    /* Subtract from input and update buffer */
+    outdat[ind] = indat[ind] - sum;
+    ubuf[pos] = indat[ind] + outdat[ind];
+    ind++;
+  }
+}
+
+
+void WebRtcIsac_PitchfilterPre_gains(double *indat,
+                                      double *outdat,
+                                      double out_dG[][PITCH_FRAME_LEN + QLOOKAHEAD],
+                                      PitchFiltstr *pfp,
+                                      double *lags,
+                                      double *gains)
+{
+  double ubuf[PITCH_INTBUFFSIZE+QLOOKAHEAD];
+  double inystate_dG[4][PITCH_DAMPORDER];
+  double gain_mult[4];
+  const double *fracoeff = NULL;
+  double curgain, curlag, gaindelta, lagdelta;
+  double sum, sum2, inystate[PITCH_DAMPORDER];
+  double ftmp, oldlag, oldgain;
+  int    k, n, m, m_tmp, j, pos, ind, pos2, Li, frc;
+
+  Li = 0;
+
+  /* Set up buffer and states */
+  memcpy(ubuf, pfp->ubuf, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(inystate, pfp->ystate, sizeof(double) * PITCH_DAMPORDER);
+
+  /* clear some buffers */
+  for (k = 0; k < 4; k++) {
+    gain_mult[k] = 0.0;
+    for (n = 0; n < PITCH_DAMPORDER; n++)
+      inystate_dG[k][n] = 0.0;
+  }
+
+  oldlag = *pfp->oldlagp;
+  oldgain = *pfp->oldgainp;
+
+  /* No interpolation if pitch lag step is big */
+  if ((lags[0] > (PITCH_UPSTEP * oldlag)) || (lags[0] < (PITCH_DOWNSTEP * oldlag))) {
+    oldlag = lags[0];
+    oldgain = gains[0];
+    gain_mult[0] = 1.0;
+  }
+
+
+  ind=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdelta=(lags[k]-oldlag) / PITCH_GRAN_PER_SUBFRAME;
+    curlag=oldlag ;
+    gaindelta=(gains[k]-oldgain) / PITCH_GRAN_PER_SUBFRAME;
+    curgain=oldgain ;
+    oldlag=lags[k];
+    oldgain=gains[k];
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+      if ((ind % PITCH_UPDATE) == 0) {   /* Update parameters */
+        curgain += gaindelta;
+        curlag += lagdelta;
+        Li = WebRtcIsac_lrint(curlag+PITCH_FILTDELAY + 0.5);
+        ftmp = Li - (curlag+PITCH_FILTDELAY);
+        frc = WebRtcIsac_lrint(PITCH_FRACS * ftmp - 0.5);
+        fracoeff = kIntrpCoef[frc];
+        gain_mult[k] += 0.2;
+        if (gain_mult[k] > 1.0) gain_mult[k] = 1.0;
+        if (k > 0) gain_mult[k-1] -= 0.2;
+      }
+
+      /* shift low pass filter states */
+      for (m=PITCH_DAMPORDER-1;m>0;m--) {
+        inystate[m] = inystate[m-1];
+        for (j = 0; j < 4; j++)
+          inystate_dG[j][m] = inystate_dG[j][m-1];
+      }
+
+      pos = ind + PITCH_BUFFSIZE;
+      pos2 = pos - Li;
+
+      /* Filter to get fractional pitch */
+      sum=0.0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        sum += ubuf[pos2+m] * fracoeff[m];
+      inystate[0] = curgain * sum;  /* Multiply with gain */
+      m_tmp = (Li-ind > 0) ? Li-ind : 0;
+      for (j = 0; j < k+1; j++) {
+        /* filter */
+        sum2 = 0.0;
+        for (m = PITCH_FRACORDER-1; m >= m_tmp; m--)
+          sum2 += out_dG[j][ind-Li + m] * fracoeff[m];
+        inystate_dG[j][0] = gain_mult[j] * sum + curgain * sum2;
+      }
+
+      /* Low pass filter */
+      sum=0.0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        sum += inystate[m] * kDampFilter[m];
+
+      /* Subtract from input and update buffer */
+      outdat[ind] = indat[ind] - sum;
+      ubuf[pos] = indat[ind] + outdat[ind];
+
+      for (j = 0; j < k+1; j++) {
+        sum = 0.0;
+        for (m=0;m<PITCH_DAMPORDER;m++)
+          sum -= inystate_dG[j][m] * kDampFilter[m];
+        out_dG[j][ind] = sum;
+      }
+      for (j = k+1; j < 4; j++)
+        out_dG[j][ind] = 0.0;
+
+
+      ind++;
+    }
+  }
+
+  /* Filter look-ahead segment */
+  for (n=0;n<QLOOKAHEAD;n++) {
+    /* shift low pass filter states */
+    for (m=PITCH_DAMPORDER-1;m>0;m--) {
+      inystate[m] = inystate[m-1];
+      for (j = 0; j < 4; j++)
+        inystate_dG[j][m] = inystate_dG[j][m-1];
+    }
+
+    pos = ind + PITCH_BUFFSIZE;
+    pos2 = pos - Li;
+
+    /* Filter to get fractional pitch */
+    sum=0.0;
+    for (m=0;m<PITCH_FRACORDER;m++)
+      sum += ubuf[pos2+m] * fracoeff[m];
+    inystate[0] = curgain * sum;  /* Multiply with gain */
+    m_tmp = (Li-ind > 0) ? Li-ind : 0;
+    for (j = 0; (j<k+1)&&(j<4); j++) {
+      /* filter */
+      sum2 = 0.0;
+      for (m = PITCH_FRACORDER-1; m >= m_tmp; m--)
+        sum2 += out_dG[j][ind-Li + m] * fracoeff[m];
+      inystate_dG[j][0] = gain_mult[j] * sum + curgain * sum2;
+    }
+
+    /* Low pass filter */
+    sum=0.0;
+    for (m=0;m<PITCH_DAMPORDER;m++)
+      sum += inystate[m] * kDampFilter[m];
+
+    /* Subtract from input and update buffer */
+    outdat[ind] = indat[ind] - sum;
+    ubuf[pos] = indat[ind] + outdat[ind];
+
+    for (j = 0; (j<k+1)&&(j<4); j++) {
+      sum = 0.0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        sum -= inystate_dG[j][m] * kDampFilter[m];
+      out_dG[j][ind] = sum;
+    }
+
+    ind++;
+  }
+}
+
+
+void WebRtcIsac_PitchfilterPost(double *indat,
+                                 double *outdat,
+                                 PitchFiltstr *pfp,
+                                 double *lags,
+                                 double *gains)
+{
+
+  double ubuf[PITCH_INTBUFFSIZE];
+  const double *fracoeff = NULL;
+  double curgain, curlag, gaindelta, lagdelta;
+  double sum, inystate[PITCH_DAMPORDER];
+  double ftmp, oldlag, oldgain;
+  int    k, n, m, pos, ind, pos2, Li, frc;
+
+  Li = 0;
+
+  /* Set up buffer and states */
+  memcpy(ubuf, pfp->ubuf, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(inystate, pfp->ystate, sizeof(double) * PITCH_DAMPORDER);
+
+  oldlag = *pfp->oldlagp;
+  oldgain = *pfp->oldgainp;
+
+  /* make output more periodic */
+  for (k=0;k<PITCH_SUBFRAMES;k++)
+    gains[k] *= 1.3;
+
+  /* No interpolation if pitch lag step is big */
+  if ((lags[0] > (PITCH_UPSTEP * oldlag)) || (lags[0] < (PITCH_DOWNSTEP * oldlag))) {
+    oldlag = lags[0];
+    oldgain = gains[0];
+  }
+
+
+  ind=0;
+  for (k=0;k<PITCH_SUBFRAMES;k++) {
+
+    /* Calculate interpolation steps */
+    lagdelta=(lags[k]-oldlag) / PITCH_GRAN_PER_SUBFRAME;
+    curlag=oldlag ;
+    gaindelta=(gains[k]-oldgain) / PITCH_GRAN_PER_SUBFRAME;
+    curgain=oldgain ;
+    oldlag=lags[k];
+    oldgain=gains[k];
+
+    for (n=0;n<PITCH_SUBFRAME_LEN;n++) {
+      if ((ind % PITCH_UPDATE) == 0) {   /* Update parameters */
+        curgain += gaindelta;
+        curlag += lagdelta;
+        Li = WebRtcIsac_lrint(curlag+PITCH_FILTDELAY + 0.5);
+        ftmp = Li - (curlag+PITCH_FILTDELAY);
+        frc = WebRtcIsac_lrint(PITCH_FRACS * ftmp - 0.5);
+        fracoeff = kIntrpCoef[frc];
+      }
+
+      /* shift low pass filter state */
+      for (m=PITCH_DAMPORDER-1;m>0;m--)
+        inystate[m] = inystate[m-1];
+
+      /* Filter to get fractional pitch */
+      pos = ind + PITCH_BUFFSIZE;
+      pos2 = pos - Li;
+      sum=0.0;
+      for (m=0;m<PITCH_FRACORDER;m++)
+        sum += ubuf[pos2+m] * fracoeff[m];
+      inystate[0] = curgain * sum; /* Multiply with gain */
+
+      /* Low pass filter */
+      sum=0.0;
+      for (m=0;m<PITCH_DAMPORDER;m++)
+        sum += inystate[m] * kDampFilter[m];
+
+      /* Add to input and update buffer */
+      outdat[ind] = indat[ind] + sum;
+      ubuf[pos] = indat[ind] + outdat[ind];
+      ind++;
+    }
+  }
+
+  /* Export buffer and states */
+  memcpy(pfp->ubuf, ubuf+PITCH_FRAME_LEN, sizeof(double) * PITCH_BUFFSIZE);
+  memcpy(pfp->ystate, inystate, sizeof(double) * PITCH_DAMPORDER);
+
+  *pfp->oldlagp = oldlag;
+  *pfp->oldgainp = oldgain;
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.c
new file mode 100644
index 0000000..5d998a2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.c
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pitch_gain_tables.h"
+
+#include "settings.h"
+
+/* header file for coding tables for the pitch filter side-info in the entropy coder */
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+/* cdf for quantized pitch filter gains */
+const WebRtc_UWord16 WebRtcIsac_kQPitchGainCdf[255] = {
+  0,  2,  4,  6,  64,  901,  903,  905,  16954,  16956,
+  16961,  17360,  17362,  17364,  17366,  17368,  17370,  17372,  17374,  17411,
+  17514,  17516,  17583,  18790,  18796,  18802,  20760,  20777,  20782,  21722,
+  21724,  21728,  21738,  21740,  21742,  21744,  21746,  21748,  22224,  22227,
+  22230,  23214,  23229,  23239,  25086,  25108,  25120,  26088,  26094,  26098,
+  26175,  26177,  26179,  26181,  26183,  26185,  26484,  26507,  26522,  27705,
+  27731,  27750,  29767,  29799,  29817,  30866,  30883,  30885,  31025,  31029,
+  31031,  31033,  31035,  31037,  31114,  31126,  31134,  32687,  32722,  32767,
+  35718,  35742,  35757,  36943,  36952,  36954,  37115,  37128,  37130,  37132,
+  37134,  37136,  37143,  37145,  37152,  38843,  38863,  38897,  47458,  47467,
+  47474,  49040,  49061,  49063,  49145,  49157,  49159,  49161,  49163,  49165,
+  49167,  49169,  49171,  49757,  49770,  49782,  61333,  61344,  61346,  62860,
+  62883,  62885,  62887,  62889,  62891,  62893,  62895,  62897,  62899,  62901,
+  62903,  62905,  62907,  62909,  65496,  65498,  65500,  65521,  65523,  65525,
+  65527,  65529,  65531,  65533,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+  65535,  65535,  65535,  65535,  65535};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsac_kIndexLowerLimitGain[3] = {
+  -7, -2, -1};
+
+const WebRtc_Word16 WebRtcIsac_kIndexUpperLimitGain[3] = {
+  0,  3,  1};
+
+const WebRtc_UWord16 WebRtcIsac_kIndexMultsGain[2] = {
+  18,  3};
+
+/* size of cdf table */
+const WebRtc_UWord16 WebRtcIsac_kQCdfTableSizeGain[1] = {
+  256};
+
+///////////////////////////FIXED POINT
+/* mean values of pitch filter gains in FIXED point */
+const WebRtc_Word16 WebRtcIsac_kQMeanGain1Q12[144] = {
+   843,    1092,    1336,    1222,    1405,    1656,    1500,    1815,    1843,    1838,    1839,    1843,    1843,    1843,    1843,    1843,
+  1843,    1843,     814,     846,    1092,    1013,    1174,    1383,    1391,    1511,    1584,    1734,    1753,    1843,    1843,    1843,
+  1843,    1843,    1843,    1843,     524,     689,     777,     845,     947,    1069,    1090,    1263,    1380,    1447,    1559,    1676,
+  1645,    1749,    1843,    1843,    1843,    1843,      81,     477,     563,     611,     706,     806,     849,    1012,    1192,    1128,
+  1330,    1489,    1425,    1576,    1826,    1741,    1843,    1843,       0,     290,     305,     356,     488,     575,     602,     741,
+   890,     835,    1079,    1196,    1182,    1376,    1519,    1506,    1680,    1843,       0,      47,      97,      69,     289,     381,
+   385,     474,     617,     664,     803,    1079,     935,    1160,    1269,    1265,    1506,    1741,       0,       0,       0,       0,
+   112,     120,     190,     283,     442,     343,     526,     809,     684,     935,    1134,    1020,    1265,    1506,       0,       0,
+     0,       0,       0,       0,       0,     111,     256,      87,     373,     597,     430,     684,     935,     770,    1020,    1265};
+
+const WebRtc_Word16 WebRtcIsac_kQMeanGain2Q12[144] = {
+  1760,    1525,    1285,    1747,    1671,    1393,    1843,    1826,    1555,    1843,    1784,    1606,    1843,    1843,    1711,    1843,
+  1843,    1814,    1389,    1275,    1040,    1564,    1414,    1252,    1610,    1495,    1343,    1753,    1592,    1405,    1804,    1720,
+  1475,    1843,    1814,    1581,    1208,    1061,    856,    1349,    1148,    994,    1390,    1253,    1111,    1495,    1343,    1178,
+  1770,    1465,    1234,    1814,    1581,    1342,    1040,    793,    713,    1053,    895,    737,    1128,    1003,    861,    1277,
+  1094,    981,    1475,    1192,    1019,    1581,    1342,    1098,    855,    570,    483,    833,    648,    540,    948,    744,
+  572,    1009,    844,    636,    1234,    934,    685,    1342,    1217,    984,    537,    318,    124,    603,    423,    350,
+  687,    479,    322,    791,    581,    430,    987,    671,    488,    1098,    849,    597,    283,    27,        0,    397,
+  222,    38,        513,    271,    124,    624,    325,    157,    737,    484,    233,    849,    597,    343,    27,        0,
+  0,    141,    0,    0,    256,    69,        0,    370,    87,        0,    484,    229,    0,    597,    343,    87};
+
+const WebRtc_Word16 WebRtcIsac_kQMeanGain3Q12[144] = {
+  1843,    1843,    1711,    1843,    1818,    1606,    1843,    1827,    1511,    1814,    1639,    1393,    1760,    1525,    1285,    1656,
+  1419,    1176,    1835,    1718,    1475,    1841,    1650,    1387,    1648,    1498,    1287,    1600,    1411,    1176,    1522,    1299,
+  1040,    1419,    1176,    928,    1773,    1461,    1128,    1532,    1355,    1202,    1429,    1260,    1115,    1398,    1151,    1025,
+  1172,    1080,    790,    1176,    928,    677,    1475,    1147,    1019,    1276,    1096,    922,    1214,    1010,    901,    1057,
+  893,    800,    1040,    796,    734,    928,    677,    424,    1137,    897,    753,    1120,    830,    710,    875,    751,
+  601,    795,    642,    583,    790,    544,    475,    677,    474,    140,    987,    750,    482,    697,    573,    450,
+  691,    487,    303,    661,    394,    332,    537,    303,    220,    424,    168,    0,    737,    484,    229,    624,
+  348,    153,    441,    261,    136,    397,    166,    51,        283,    27,        0,    168,    0,    0,    484,    229,
+  0,    370,    57,        0,    256,    43,        0,    141,    0,        0,    27,        0,    0,    0,    0,    0};
+
+
+const WebRtc_Word16 WebRtcIsac_kQMeanGain4Q12[144] = {
+  1843,    1843,    1843,    1843,    1841,    1843,    1500,    1821,    1843,    1222,    1434,    1656,    843,    1092,    1336,    504,
+  757,    1007,    1843,    1843,    1843,    1838,    1791,    1843,    1265,    1505,    1599,    965,    1219,    1425,    730,    821,
+  1092,    249,    504,    757,    1783,    1819,    1843,    1351,    1567,    1727,    1096,    1268,    1409,    805,    961,    1131,
+  444,    670,    843,    0,        249,    504,    1425,    1655,    1743,    1096,    1324,    1448,    822,    1019,    1199,    490,
+  704,    867,    81,        450,    555,    0,    0,        249,    1247,    1428,    1530,    881,    1073,    1283,    610,    759,
+  939,    278,    464,    645,    0,    200,    270,    0,    0,    0,        935,    1163,    1410,    528,    790,    1068,
+  377,    499,    717,    173,    240,    274,    0,    43,        62,        0,    0,    0,    684,    935,    1182,    343,
+  551,    735,    161,    262,    423,    0,    55,        27,        0,    0,    0,    0,    0,    0,    430,    684,
+  935,    87,        377,    597,    0,    46,        256,    0,    0,    0,    0,    0,    0,    0,    0,    0};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.h
new file mode 100644
index 0000000..f958f5d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_gain_tables.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_gain_tables.h
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_
+
+#include "typedefs.h"
+
+/* header file for coding tables for the pitch filter side-info in the entropy coder */
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+/* cdf for quantized pitch filter gains */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchGainCdf[255];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsac_kIndexLowerLimitGain[3];
+
+extern const WebRtc_Word16 WebRtcIsac_kIndexUpperLimitGain[3];
+extern const WebRtc_UWord16 WebRtcIsac_kIndexMultsGain[2];
+
+/* mean values of pitch filter gains */
+//(Y)
+extern const WebRtc_Word16 WebRtcIsac_kQMeanGain1Q12[144];
+extern const WebRtc_Word16 WebRtcIsac_kQMeanGain2Q12[144];
+extern const WebRtc_Word16 WebRtcIsac_kQMeanGain3Q12[144];
+extern const WebRtc_Word16 WebRtcIsac_kQMeanGain4Q12[144];
+//(Y)
+
+/* size of cdf table */
+extern const WebRtc_UWord16 WebRtcIsac_kQCdfTableSizeGain[1];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.c
new file mode 100644
index 0000000..72a031e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.c
@@ -0,0 +1,277 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pitch_lag_tables.h"
+#include "settings.h"
+
+/* header file for coding tables for the pitch filter side-info in the entropy coder */
+/********************* Pitch Filter Gain Coefficient Tables ************************/
+
+/* tables for use with small pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Lo[127] = {
+ 0,  134,  336,  549,  778,  998,  1264,  1512,  1777,  2070,
+ 2423,  2794,  3051,  3361,  3708,  3979,  4315,  4610,  4933,  5269,
+ 5575,  5896,  6155,  6480,  6816,  7129,  7477,  7764,  8061,  8358,
+ 8718,  9020,  9390,  9783,  10177,  10543,  10885,  11342,  11795,  12213,
+ 12680,  13096,  13524,  13919,  14436,  14903,  15349,  15795,  16267,  16734,
+ 17266,  17697,  18130,  18632,  19080,  19447,  19884,  20315,  20735,  21288,
+ 21764,  22264,  22723,  23193,  23680,  24111,  24557,  25022,  25537,  26082,
+ 26543,  27090,  27620,  28139,  28652,  29149,  29634,  30175,  30692,  31273,
+ 31866,  32506,  33059,  33650,  34296,  34955,  35629,  36295,  36967,  37726,
+ 38559,  39458,  40364,  41293,  42256,  43215,  44231,  45253,  46274,  47359,
+ 48482,  49678,  50810,  51853,  53016,  54148,  55235,  56263,  57282,  58363,
+ 59288,  60179,  61076,  61806,  62474,  63129,  63656,  64160,  64533,  64856,
+ 65152,  65535,  65535,  65535,  65535,  65535,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Lo[20] = {
+ 0,  429,  3558,  5861,  8558,  11639,  15210,  19502,  24773,  31983,
+ 42602,  48567,  52601,  55676,  58160,  60172,  61889,  63235,  65383,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Lo[2] = {
+ 0,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Lo[10] = {
+ 0,  2966,  6368,  11182,  19431,  37793,  48532,  55353,  60626,  65535};
+
+const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrLo[4] = {WebRtcIsac_kQPitchLagCdf1Lo, WebRtcIsac_kQPitchLagCdf2Lo, WebRtcIsac_kQPitchLagCdf3Lo, WebRtcIsac_kQPitchLagCdf4Lo};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeLo[1] = {128};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagLo[4] = {
+-140, -9,  0, -4};
+
+const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagLo[4] = {
+-20,  9,  0,  4};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagLo[3] = {
+ 10,  1,  5};
+
+/* mean values of pitch filter lags */
+const double WebRtcIsac_kQMeanLag2Lo[19] = {
+-17.21385070, -15.82678944, -14.07123081, -12.03003877, -10.01311864, -8.00794627, -5.91162987, -3.89231876, -1.90220980, -0.01879275,
+ 1.89144232,  3.88123171,  5.92146992,  7.96435361,  9.98923648,  11.98266347,  13.96101002,  15.74855713,  17.10976611};
+
+const double WebRtcIsac_kQMeanLag3Lo[1] = {
+ 0.00000000};
+
+const double WebRtcIsac_kQMeanLag4Lo[9] = {
+-7.76246496, -5.92083980, -3.94095226, -1.89502305,  0.03724681,  1.93054221,  3.96443467,  5.91726366,  7.78434291};
+
+const double WebRtcIsac_kQPitchLagStepsizeLo = 2.000000;
+
+
+/* tables for use with medium pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Mid[255] = {
+ 0,  28,  61,  88,  121,  149,  233,  331,  475,  559,
+ 624,  661,  689,  712,  745,  791,  815,  843,  866,  922,
+ 959,  1024,  1061,  1117,  1178,  1238,  1280,  1350,  1453,  1513,
+ 1564,  1625,  1671,  1741,  1788,  1904,  2072,  2421,  2626,  2770,
+ 2840,  2900,  2942,  3012,  3068,  3115,  3147,  3194,  3254,  3319,
+ 3366,  3520,  3678,  3780,  3850,  3911,  3957,  4032,  4106,  4185,
+ 4292,  4474,  4683,  4842,  5019,  5191,  5321,  5428,  5540,  5675,
+ 5763,  5847,  5959,  6127,  6304,  6564,  6839,  7090,  7263,  7421,
+ 7556,  7728,  7872,  7984,  8142,  8361,  8580,  8743,  8938,  9227,
+ 9409,  9539,  9674,  9795,  9930,  10060,  10177,  10382,  10614,  10861,
+ 11038,  11271,  11415,  11629,  11792,  12044,  12193,  12416,  12574,  12821,
+ 13007,  13235,  13445,  13654,  13901,  14134,  14488,  15000,  15703,  16285,
+ 16504,  16797,  17086,  17328,  17579,  17807,  17998,  18268,  18538,  18836,
+ 19087,  19274,  19474,  19716,  19935,  20270,  20833,  21303,  21532,  21741,
+ 21978,  22207,  22523,  22770,  23054,  23613,  23943,  24204,  24399,  24651,
+ 24832,  25074,  25270,  25549,  25759,  26015,  26150,  26424,  26713,  27048,
+ 27342,  27504,  27681,  27854,  28021,  28207,  28412,  28664,  28859,  29064,
+ 29278,  29548,  29748,  30107,  30377,  30656,  30856,  31164,  31452,  31755,
+ 32011,  32328,  32626,  32919,  33319,  33789,  34329,  34925,  35396,  35973,
+ 36443,  36964,  37551,  38156,  38724,  39357,  40023,  40908,  41587,  42602,
+ 43924,  45037,  45810,  46597,  47421,  48291,  49092,  50051,  51448,  52719,
+ 53440,  54241,  54944,  55977,  56676,  57299,  57872,  58389,  59059,  59688,
+ 60237,  60782,  61094,  61573,  61890,  62290,  62658,  63030,  63217,  63454,
+ 63622,  63882,  64003,  64273,  64427,  64529,  64581,  64697,  64758,  64902,
+ 65414,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+ 65535,  65535,  65535,  65535,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Mid[36] = {
+ 0,  71,  335,  581,  836,  1039,  1323,  1795,  2258,  2608,
+ 3005,  3591,  4243,  5344,  7163,  10583,  16848,  28078,  49448,  57007,
+ 60357,  61850,  62837,  63437,  63872,  64188,  64377,  64614,  64774,  64949,
+ 65039,  65115,  65223,  65360,  65474,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Mid[2] = {
+ 0,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Mid[20] = {
+ 0,  28,  246,  459,  667,  1045,  1523,  2337,  4337,  11347,
+ 44231,  56709,  60781,  62243,  63161,  63969,  64608,  65062,  65502,  65535};
+
+const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrMid[4] = {WebRtcIsac_kQPitchLagCdf1Mid, WebRtcIsac_kQPitchLagCdf2Mid, WebRtcIsac_kQPitchLagCdf3Mid, WebRtcIsac_kQPitchLagCdf4Mid};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeMid[1] = {256};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagMid[4] = {
+-280, -17,  0, -9};
+
+const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagMid[4] = {
+-40,  17,  0,  9};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagMid[3] = {
+ 18,  1,  10};
+
+/* mean values of pitch filter lags */
+const double WebRtcIsac_kQMeanLag2Mid[35] = {
+-16.89183900, -15.86949778, -15.05476653, -14.00664348, -13.02793036, -12.07324237, -11.00542532, -10.11250602, -8.90792971, -8.02474753,
+-7.00426767, -5.94055287, -4.98251338, -3.91053158, -2.98820425, -1.93524245, -0.92978085, -0.01722509,  0.91317387,  1.92973955,
+ 2.96908851,  3.93728974,  4.96308471,  5.92244151,  7.08673497,  8.00993708,  9.04656316,  9.98538742,  10.97851694,  11.94772884,
+ 13.02426166,  14.00039951,  15.01347042,  15.80758023,  16.94086895};
+
+const double WebRtcIsac_kQMeanLag3Mid[1] = {
+ 0.00000000};
+
+const double WebRtcIsac_kQMeanLag4Mid[19] = {
+-8.60409403, -7.89198395, -7.03450280, -5.86260421, -4.93822322, -3.93078706, -2.91302322, -1.91824007, -0.87003282,  0.02822649,
+ 0.89951758,  1.87495484,  2.91802604,  3.96874074,  5.06571703,  5.93618227,  7.00520185,  7.88497726,  8.64160364};
+
+const double WebRtcIsac_kQPitchLagStepsizeMid = 1.000000;
+
+
+/* tables for use with large pitch gain */
+
+/* cdf for quantized pitch filter lags */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Hi[511] = {
+ 0,  7,  18,  33,  69,  105,  156,  228,  315,  612,
+ 680,  691,  709,  724,  735,  738,  742,  746,  749,  753,
+ 756,  760,  764,  774,  782,  785,  789,  796,  800,  803,
+ 807,  814,  818,  822,  829,  832,  847,  854,  858,  869,
+ 876,  883,  898,  908,  934,  977,  1010,  1050,  1060,  1064,
+ 1075,  1078,  1086,  1089,  1093,  1104,  1111,  1122,  1133,  1136,
+ 1151,  1162,  1183,  1209,  1252,  1281,  1339,  1364,  1386,  1401,
+ 1411,  1415,  1426,  1430,  1433,  1440,  1448,  1455,  1462,  1477,
+ 1487,  1495,  1502,  1506,  1509,  1516,  1524,  1531,  1535,  1542,
+ 1553,  1556,  1578,  1589,  1611,  1625,  1639,  1643,  1654,  1665,
+ 1672,  1687,  1694,  1705,  1708,  1719,  1730,  1744,  1752,  1759,
+ 1791,  1795,  1820,  1867,  1886,  1915,  1936,  1943,  1965,  1987,
+ 2041,  2099,  2161,  2175,  2200,  2211,  2226,  2233,  2244,  2251,
+ 2266,  2280,  2287,  2298,  2309,  2316,  2331,  2342,  2356,  2378,
+ 2403,  2418,  2447,  2497,  2544,  2602,  2863,  2895,  2903,  2935,
+ 2950,  2971,  3004,  3011,  3018,  3029,  3040,  3062,  3087,  3127,
+ 3152,  3170,  3199,  3243,  3293,  3322,  3340,  3377,  3402,  3427,
+ 3474,  3518,  3543,  3579,  3601,  3637,  3659,  3706,  3731,  3760,
+ 3818,  3847,  3869,  3901,  3920,  3952,  4068,  4169,  4220,  4271,
+ 4524,  4571,  4604,  4632,  4672,  4730,  4777,  4806,  4857,  4904,
+ 4951,  5002,  5031,  5060,  5107,  5150,  5212,  5266,  5331,  5382,
+ 5432,  5490,  5544,  5610,  5700,  5762,  5812,  5874,  5972,  6022,
+ 6091,  6163,  6232,  6305,  6402,  6540,  6685,  6880,  7090,  7271,
+ 7379,  7452,  7542,  7625,  7687,  7770,  7843,  7911,  7966,  8024,
+ 8096,  8190,  8252,  8320,  8411,  8501,  8585,  8639,  8751,  8842,
+ 8918,  8986,  9066,  9127,  9203,  9269,  9345,  9406,  9464,  9536,
+ 9612,  9667,  9735,  9844,  9931,  10036,  10119,  10199,  10260,  10358,
+ 10441,  10514,  10666,  10734,  10872,  10951,  11053,  11125,  11223,  11324,
+ 11516,  11664,  11737,  11816,  11892,  12008,  12120,  12200,  12280,  12392,
+ 12490,  12576,  12685,  12812,  12917,  13003,  13108,  13210,  13300,  13384,
+ 13470,  13579,  13673,  13771,  13879,  13999,  14136,  14201,  14368,  14614,
+ 14759,  14867,  14958,  15030,  15121,  15189,  15280,  15385,  15461,  15555,
+ 15653,  15768,  15884,  15971,  16069,  16145,  16210,  16279,  16380,  16463,
+ 16539,  16615,  16688,  16818,  16919,  17017,  18041,  18338,  18523,  18649,
+ 18790,  18917,  19047,  19167,  19315,  19460,  19601,  19731,  19858,  20068,
+ 20173,  20318,  20466,  20625,  20741,  20911,  21045,  21201,  21396,  21588,
+ 21816,  22022,  22305,  22547,  22786,  23072,  23322,  23600,  23879,  24168,
+ 24433,  24769,  25120,  25511,  25895,  26289,  26792,  27219,  27683,  28077,
+ 28566,  29094,  29546,  29977,  30491,  30991,  31573,  32105,  32594,  33173,
+ 33788,  34497,  35181,  35833,  36488,  37255,  37921,  38645,  39275,  39894,
+ 40505,  41167,  41790,  42431,  43096,  43723,  44385,  45134,  45858,  46607,
+ 47349,  48091,  48768,  49405,  49955,  50555,  51167,  51985,  52611,  53078,
+ 53494,  53965,  54435,  54996,  55601,  56125,  56563,  56838,  57244,  57566,
+ 57967,  58297,  58771,  59093,  59419,  59647,  59886,  60143,  60461,  60693,
+ 60917,  61170,  61416,  61634,  61891,  62122,  62310,  62455,  62632,  62839,
+ 63103,  63436,  63639,  63805,  63906,  64015,  64192,  64355,  64475,  64558,
+ 64663,  64742,  64811,  64865,  64916,  64956,  64981,  65025,  65068,  65115,
+ 65195,  65314,  65419,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+ 65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+ 65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+ 65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,  65535,
+ 65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Hi[68] = {
+ 0,  7,  11,  22,  37,  52,  56,  59,  81,  85,
+ 89,  96,  115,  130,  137,  152,  170,  181,  193,  200,
+ 207,  233,  237,  259,  289,  318,  363,  433,  592,  992,
+ 1607,  3062,  6149,  12206,  25522,  48368,  58223,  61918,  63640,  64584,
+ 64943,  65098,  65206,  65268,  65294,  65335,  65350,  65372,  65387,  65402,
+ 65413,  65420,  65428,  65435,  65439,  65450,  65454,  65468,  65472,  65476,
+ 65483,  65491,  65498,  65505,  65516,  65520,  65528,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Hi[2] = {
+ 0,  65535};
+
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Hi[35] = {
+ 0,  7,  19,  30,  41,  48,  63,  74,  82,  96,
+ 122,  152,  215,  330,  701,  2611,  10931,  48106,  61177,  64341,
+ 65112,  65238,  65309,  65338,  65364,  65379,  65401,  65427,  65453,  65465,
+ 65476,  65490,  65509,  65528,  65535};
+
+const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrHi[4] = {WebRtcIsac_kQPitchLagCdf1Hi, WebRtcIsac_kQPitchLagCdf2Hi, WebRtcIsac_kQPitchLagCdf3Hi, WebRtcIsac_kQPitchLagCdf4Hi};
+
+/* size of first cdf table */
+const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeHi[1] = {512};
+
+/* index limits and ranges */
+const WebRtc_Word16 WebRtcIsac_kQindexLowerLimitLagHi[4] = {
+-552, -34,  0, -16};
+
+const WebRtc_Word16 WebRtcIsac_kQindexUpperLimitLagHi[4] = {
+-80,  32,  0,  17};
+
+/* initial index for arithmetic decoder */
+const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagHi[3] = {
+ 34,  1,  18};
+
+/* mean values of pitch filter lags */
+const double WebRtcIsac_kQMeanLag2Hi[67] = {
+-17.07263295, -16.50000000, -15.83966081, -15.55613708, -14.96948007, -14.50000000, -14.00000000, -13.48377986, -13.00000000, -12.50000000,
+-11.93199636, -11.44530414, -11.04197641, -10.39910301, -10.15202337, -9.51322461, -8.93357741, -8.46456632, -8.10270672, -7.53751847,
+-6.98686404, -6.50000000, -6.08463150, -5.46872991, -5.00864717, -4.50163760, -4.01382410, -3.43856708, -2.96898001, -2.46554810,
+-1.96861004, -1.47106701, -0.97197237, -0.46561654, -0.00531409,  0.45767857,  0.96777907,  1.47507903,  1.97740425,  2.46695420,
+ 3.00695774,  3.47167185,  4.02712538,  4.49280007,  5.01087640,  5.48191963,  6.04916550,  6.51511058,  6.97297819,  7.46565499,
+ 8.01489405,  8.39912001,  8.91819757,  9.50000000,  10.11654065,  10.50000000,  11.03712583,  11.50000000,  12.00000000,  12.38964346,
+ 12.89466127,  13.43657881,  13.96013840,  14.46279912,  15.00000000,  15.39412269,  15.96662441};
+
+const double WebRtcIsac_kQMeanLag3Hi[1] = {
+ 0.00000000};
+
+const double WebRtcIsac_kQMeanLag4Hi[34] = {
+-7.98331221, -7.47988769, -7.03626557, -6.52708003, -6.06982173, -5.51856292, -5.05827033, -4.45909878, -3.99125864, -3.45308135,
+-3.02328139, -2.47297273, -1.94341995, -1.44699056, -0.93612243, -0.43012406,  0.01120357,  0.44054812,  0.93199883,  1.45669587,
+ 1.97218322,  2.50187419,  2.98748690,  3.49343202,  4.01660147,  4.50984306,  5.01402683,  5.58936797,  5.91787793,  6.59998900,
+ 6.85034315,  7.53503316,  7.87711194,  8.53631648};
+
+const double WebRtcIsac_kQPitchLagStepsizeHi = 0.500000;
+
+/* transform matrix */
+const double WebRtcIsac_kTransform[4][4] = {
+{-0.50000000, -0.50000000, -0.50000000, -0.50000000},
+{ 0.67082039,  0.22360680, -0.22360680, -0.67082039},
+{ 0.50000000, -0.50000000, -0.50000000,  0.50000000},
+{ 0.22360680, -0.67082039,  0.67082039, -0.22360680}};
+
+/* transpose transform matrix */
+const double WebRtcIsac_kTransformTranspose[4][4] = {
+{-0.50000000,  0.67082039,  0.50000000,  0.22360680},
+{-0.50000000,  0.22360680, -0.50000000, -0.67082039},
+{-0.50000000, -0.22360680, -0.50000000,  0.67082039},
+{-0.50000000, -0.67082039,  0.50000000, -0.22360680}};
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.h
new file mode 100644
index 0000000..67b02e5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/pitch_lag_tables.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * pitch_lag_tables.h
+ *
+ * This file contains tables for the pitch filter side-info in the entropy coder.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_
+
+#include "typedefs.h"
+/* header file for coding tables for the pitch filter side-info in the entropy coder */
+/********************* Pitch Filter Lag Coefficient Tables ************************/
+
+/* tables for use with small pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Lo[127];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Lo[20];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Lo[2];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Lo[10];
+
+extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrLo[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeLo[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagLo[4];
+extern const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagLo[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagLo[3];
+
+/* mean values of pitch filter lags */
+extern const double WebRtcIsac_kQMeanLag2Lo[19];
+extern const double WebRtcIsac_kQMeanLag3Lo[1];
+extern const double WebRtcIsac_kQMeanLag4Lo[9];
+
+extern const double WebRtcIsac_kQPitchLagStepsizeLo;
+
+
+/* tables for use with medium pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Mid[255];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Mid[36];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Mid[2];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Mid[20];
+
+extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrMid[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeMid[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagMid[4];
+extern const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagMid[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagMid[3];
+
+/* mean values of pitch filter lags */
+extern const double WebRtcIsac_kQMeanLag2Mid[35];
+extern const double WebRtcIsac_kQMeanLag3Mid[1];
+extern const double WebRtcIsac_kQMeanLag4Mid[19];
+
+extern const double WebRtcIsac_kQPitchLagStepsizeMid;
+
+
+/* tables for use with large pitch gain */
+
+/* cdfs for quantized pitch lags */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Hi[511];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Hi[68];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Hi[2];
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Hi[35];
+
+extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrHi[4];
+
+/* size of first cdf table */
+extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeHi[1];
+
+/* index limits and ranges */
+extern const WebRtc_Word16 WebRtcIsac_kQindexLowerLimitLagHi[4];
+extern const WebRtc_Word16 WebRtcIsac_kQindexUpperLimitLagHi[4];
+
+/* initial index for arithmetic decoder */
+extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagHi[3];
+
+/* mean values of pitch filter lags */
+extern const double WebRtcIsac_kQMeanLag2Hi[67];
+extern const double WebRtcIsac_kQMeanLag3Hi[1];
+extern const double WebRtcIsac_kQMeanLag4Hi[34];
+
+extern const double WebRtcIsac_kQPitchLagStepsizeHi;
+
+/* transform matrix */
+extern const double WebRtcIsac_kTransform[4][4];
+
+/* transpose transform matrix */
+extern const double WebRtcIsac_kTransformTranspose[4][4];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/settings.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/settings.h
new file mode 100644
index 0000000..b7aed77
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/settings.h
@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * settings.h
+ *
+ * Declaration of #defines used in the iSAC codec
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SETTINGS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SETTINGS_H_
+
+/* sampling frequency (Hz) */
+#define FS                                      16000
+
+/* number of samples per frame (either 320 (20ms), 480 (30ms) or 960 (60ms)) */
+#define INITIAL_FRAMESAMPLES     960
+
+
+#define MAXFFTSIZE 2048
+#define NFACTOR 11
+
+
+
+/* do not modify the following; this will have to be modified if we have a 20ms framesize option */
+/*************************************************************************************************/
+/* miliseconds */
+#define FRAMESIZE                               30
+/* number of samples per frame processed in the encoder, 480 */
+#define FRAMESAMPLES                            480 /* ((FRAMESIZE*FS)/1000) */
+#define FRAMESAMPLES_HALF      240
+#define FRAMESAMPLES_QUARTER                    120
+/*************************************************************************************************/
+
+
+
+/* max number of samples per frame (= 60 ms frame) */
+#define MAX_FRAMESAMPLES      960
+#define MAX_SWBFRAMESAMPLES                     (MAX_FRAMESAMPLES * 2)
+/* number of samples per 10ms frame */
+#define FRAMESAMPLES_10ms                       ((10*FS)/1000)
+#define SWBFRAMESAMPLES_10ms                    (FRAMESAMPLES_10ms * 2)
+/* number of samples in 30 ms frame */
+#define FRAMESAMPLES_30ms            480
+/* number of subframes */
+#define SUBFRAMES                               6
+/* length of a subframe */
+#define UPDATE                                  80
+/* length of half a subframe (low/high band) */
+#define HALF_SUBFRAMELEN                        (UPDATE/2)
+/* samples of look ahead (in a half-band, so actually half the samples of look ahead @ FS) */
+#define QLOOKAHEAD                              24    /* 3 ms */
+/* order of AR model in spectral entropy coder */
+#define AR_ORDER                                6
+/* order of LP model in spectral entropy coder */
+#define LP_ORDER                                0
+
+/* window length (masking analysis) */
+#define WINLEN                                  256
+/* order of low-band pole filter used to approximate masking curve */
+#define ORDERLO                                 12
+/* order of hi-band pole filter used to approximate masking curve */
+#define ORDERHI                                 6
+
+#define UB_LPC_ORDER                            4
+#define UB_LPC_VEC_PER_FRAME                    2
+#define UB16_LPC_VEC_PER_FRAME                  4
+#define UB_ACTIVE_SUBFRAMES                     2
+#define UB_MAX_LPC_ORDER                        6
+#define UB_INTERPOL_SEGMENTS                    1
+#define UB16_INTERPOL_SEGMENTS                  3
+#define LB_TOTAL_DELAY_SAMPLES                 48
+enum ISACBandwidth {isac8kHz = 8, isac12kHz = 12, isac16kHz = 16};
+enum ISACBand{isacLowerBand = 0, isacUpperBand = 1};
+#define UB_LPC_GAIN_DIM                 SUBFRAMES
+#define FB_STATE_SIZE_WORD32                    6
+
+
+/* order for post_filter_bank */
+#define POSTQORDER                              3
+/* order for pre-filterbank */
+#define QORDER                                  3
+/* another order */
+#define QORDER_ALL                              (POSTQORDER+QORDER-1)
+/* for decimator */
+#define ALLPASSSECTIONS                         2
+
+
+/* array size for byte stream in number of bytes. */
+#define STREAM_SIZE_MAX     600 /* The old maximum size still needed for the decoding */
+#define STREAM_SIZE_MAX_30  200 /* 200 bytes = 53.4 kbit/s @ 30 ms.framelength */
+#define STREAM_SIZE_MAX_60  400 /* 400 bytes = 53.4 kbit/s @ 60 ms.framelength */
+
+/* storage size for bit counts */
+#define BIT_COUNTER_SIZE                        30
+/* maximum order of any AR model or filter */
+#define MAX_AR_MODEL_ORDER                      12//50
+
+
+/* For pitch analysis */
+#define PITCH_FRAME_LEN                         (FRAMESAMPLES_HALF) /* 30 ms  */
+#define PITCH_MAX_LAG                           140     /* 57 Hz  */
+#define PITCH_MIN_LAG                           20              /* 400 Hz */
+#define PITCH_MAX_GAIN                          0.45
+#define PITCH_MAX_GAIN_06                       0.27                /* PITCH_MAX_GAIN*0.6 */
+#define PITCH_MAX_GAIN_Q12      1843
+#define PITCH_LAG_SPAN2                         (PITCH_MAX_LAG/2-PITCH_MIN_LAG/2+5)
+#define PITCH_CORR_LEN2                         60     /* 15 ms  */
+#define PITCH_CORR_STEP2                        (PITCH_FRAME_LEN/4)
+#define PITCH_BW                 11     /* half the band width of correlation surface */
+#define PITCH_SUBFRAMES                         4
+#define PITCH_GRAN_PER_SUBFRAME                 5
+#define PITCH_SUBFRAME_LEN                      (PITCH_FRAME_LEN/PITCH_SUBFRAMES)
+#define PITCH_UPDATE                            (PITCH_SUBFRAME_LEN/PITCH_GRAN_PER_SUBFRAME)
+/* maximum number of peaks to be examined in correlation surface */
+#define PITCH_MAX_NUM_PEAKS                  10
+#define PITCH_PEAK_DECAY               0.85
+/* For weighting filter */
+#define PITCH_WLPCORDER                   6
+#define PITCH_WLPCWINLEN               PITCH_FRAME_LEN
+#define PITCH_WLPCASYM                   0.3                 /* asymmetry parameter */
+#define PITCH_WLPCBUFLEN               PITCH_WLPCWINLEN
+/* For pitch filter */
+#define PITCH_BUFFSIZE                   (PITCH_MAX_LAG + 50)  /* Extra 50 for fraction and LP filters */
+#define PITCH_INTBUFFSIZE               (PITCH_FRAME_LEN+PITCH_BUFFSIZE)
+/* Max rel. step for interpolation */
+#define PITCH_UPSTEP                1.5
+/* Max rel. step for interpolation */
+#define PITCH_DOWNSTEP                   0.67
+#define PITCH_FRACS                             8
+#define PITCH_FRACORDER                         9
+#define PITCH_DAMPORDER                         5
+#define PITCH_FILTDELAY                         1.5f
+/* stepsize for quantization of the pitch Gain */
+#define PITCH_GAIN_STEPSIZE                     0.125
+
+
+
+/* Order of high pass filter */
+#define HPORDER                                 2
+
+/* some mathematical constants */
+#define LOG2EXP                                 1.44269504088896       /* log2(exp) */
+#define PI                                      3.14159265358979
+
+/* Maximum number of iterations allowed to limit payload size */
+#define MAX_PAYLOAD_LIMIT_ITERATION             5
+
+/* Redundant Coding */
+#define RCU_BOTTLENECK_BPS                      16000
+#define RCU_TRANSCODING_SCALE                   0.40f
+#define RCU_TRANSCODING_SCALE_INVERSE           2.5f
+
+#define RCU_TRANSCODING_SCALE_UB                0.50f
+#define RCU_TRANSCODING_SCALE_UB_INVERSE        2.0f
+
+
+/* Define Error codes */
+/* 6000 General */
+#define ISAC_MEMORY_ALLOCATION_FAILED    6010
+#define ISAC_MODE_MISMATCH       6020
+#define ISAC_DISALLOWED_BOTTLENECK     6030
+#define ISAC_DISALLOWED_FRAME_LENGTH    6040
+#define ISAC_UNSUPPORTED_SAMPLING_FREQUENCY         6050
+
+/* 6200 Bandwidth estimator */
+#define ISAC_RANGE_ERROR_BW_ESTIMATOR    6240
+/* 6400 Encoder */
+#define ISAC_ENCODER_NOT_INITIATED     6410
+#define ISAC_DISALLOWED_CODING_MODE     6420
+#define ISAC_DISALLOWED_FRAME_MODE_ENCODER   6430
+#define ISAC_DISALLOWED_BITSTREAM_LENGTH            6440
+#define ISAC_PAYLOAD_LARGER_THAN_LIMIT              6450
+#define ISAC_DISALLOWED_ENCODER_BANDWIDTH           6460
+/* 6600 Decoder */
+#define ISAC_DECODER_NOT_INITIATED     6610
+#define ISAC_EMPTY_PACKET       6620
+#define ISAC_DISALLOWED_FRAME_MODE_DECODER   6630
+#define ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH  6640
+#define ISAC_RANGE_ERROR_DECODE_BANDWIDTH   6650
+#define ISAC_RANGE_ERROR_DECODE_PITCH_GAIN   6660
+#define ISAC_RANGE_ERROR_DECODE_PITCH_LAG   6670
+#define ISAC_RANGE_ERROR_DECODE_LPC     6680
+#define ISAC_RANGE_ERROR_DECODE_SPECTRUM   6690
+#define ISAC_LENGTH_MISMATCH      6730
+#define ISAC_RANGE_ERROR_DECODE_BANDWITH            6740
+#define ISAC_DISALLOWED_BANDWIDTH_MODE_DECODER      6750
+/* 6800 Call setup formats */
+#define ISAC_INCOMPATIBLE_FORMATS     6810
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SETTINGS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.c
new file mode 100644
index 0000000..92b9c4d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.c
@@ -0,0 +1,138 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "spectrum_ar_model_tables.h"
+#include "settings.h"
+
+/********************* AR Coefficient Tables ************************/
+/* cdf for quantized reflection coefficient 1 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc1Cdf[12] = {
+ 0,  2,  4,  129,  7707,  57485,  65495,  65527,  65529,  65531,
+ 65533,  65535};
+
+/* cdf for quantized reflection coefficient 2 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc2Cdf[12] = {
+ 0,  2,  4,  7,  531,  25298,  64525,  65526,  65529,  65531,
+ 65533,  65535};
+
+/* cdf for quantized reflection coefficient 3 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc3Cdf[12] = {
+ 0,  2,  4,  6,  620,  22898,  64843,  65527,  65529,  65531,
+ 65533,  65535};
+
+/* cdf for quantized reflection coefficient 4 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc4Cdf[12] = {
+ 0,  2,  4,  6,  35,  10034,  60733,  65506,  65529,  65531,
+ 65533,  65535};
+
+/* cdf for quantized reflection coefficient 5 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc5Cdf[12] = {
+ 0,  2,  4,  6,  36,  7567,  56727,  65385,  65529,  65531,
+ 65533,  65535};
+
+/* cdf for quantized reflection coefficient 6 */
+const WebRtc_UWord16 WebRtcIsac_kQArRc6Cdf[12] = {
+ 0,  2,  4,  6,  14,  6579,  57360,  65409,  65529,  65531,
+ 65533,  65535};
+
+/* representation levels for quantized reflection coefficient 1 */
+const WebRtc_Word16 WebRtcIsac_kQArRc1Levels[11] = {
+ -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 2 */
+const WebRtc_Word16 WebRtcIsac_kQArRc2Levels[11] = {
+ -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 3 */
+const WebRtc_Word16 WebRtcIsac_kQArRc3Levels[11] = {
+-32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 4 */
+const WebRtc_Word16 WebRtcIsac_kQArRc4Levels[11] = {
+-32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 5 */
+const WebRtc_Word16 WebRtcIsac_kQArRc5Levels[11] = {
+-32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
+};
+
+/* representation levels for quantized reflection coefficient 6 */
+const WebRtc_Word16 WebRtcIsac_kQArRc6Levels[11] = {
+-32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
+};
+
+/* quantization boundary levels for reflection coefficients */
+const WebRtc_Word16 WebRtcIsac_kQArBoundaryLevels[12] = {
+-32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, 32767
+};
+
+/* initial index for AR reflection coefficient quantizer and cdf table search */
+const WebRtc_UWord16 WebRtcIsac_kQArRcInitIndex[6] = {
+ 5,  5,  5,  5,  5,  5};
+
+/* pointers to AR cdf tables */
+const WebRtc_UWord16 *WebRtcIsac_kQArRcCdfPtr[AR_ORDER] = {
+  WebRtcIsac_kQArRc1Cdf, WebRtcIsac_kQArRc2Cdf, WebRtcIsac_kQArRc3Cdf,
+  WebRtcIsac_kQArRc4Cdf, WebRtcIsac_kQArRc5Cdf, WebRtcIsac_kQArRc6Cdf
+};
+
+/* pointers to AR representation levels tables */
+const WebRtc_Word16 *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER] = {
+  WebRtcIsac_kQArRc1Levels, WebRtcIsac_kQArRc2Levels, WebRtcIsac_kQArRc3Levels,
+  WebRtcIsac_kQArRc4Levels, WebRtcIsac_kQArRc5Levels, WebRtcIsac_kQArRc6Levels
+};
+
+
+/******************** GAIN Coefficient Tables ***********************/
+/* cdf for Gain coefficient */
+const WebRtc_UWord16 WebRtcIsac_kQGainCdf[19] = {
+ 0,  2,  4,  6,  8,  10,  12,  14,  16,  1172,
+ 11119,  29411,  51699,  64445,  65527,  65529,  65531,  65533,  65535};
+
+/* representation levels for quantized squared Gain coefficient */
+const WebRtc_Word32 WebRtcIsac_kQGain2Levels[18] = {
+// 17, 28, 46, 76, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000};
+ 128, 128, 128, 128, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000};
+/* quantization boundary levels for squared Gain coefficient */
+const WebRtc_Word32 WebRtcIsac_kQGain2BoundaryLevels[19] = {
+0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF};
+
+/* pointers to Gain cdf table */
+const WebRtc_UWord16 *WebRtcIsac_kQGainCdf_ptr[1] = {WebRtcIsac_kQGainCdf};
+
+/* Gain initial index for gain quantizer and cdf table search */
+const WebRtc_UWord16 WebRtcIsac_kQGainInitIndex[1] = {11};
+
+/************************* Cosine Tables ****************************/
+/* Cosine table */
+const WebRtc_Word16 WebRtcIsac_kCos[6][60] = {
+{512,  512,  511,  510,  508,  507,  505,  502,  499,  496,  493,  489,  485,  480,  476,  470,  465,  459,  453,  447,
+440,  433,  426,  418,  410,  402,  394,  385,  376,  367,  357,  348,  338,  327,  317,  306,  295,  284,  273,  262,
+250,  238,  226,  214,  202,  190,  177,  165,  152,  139,  126,  113,  100,  87,  73,  60,  47,  33,  20,  7},
+{512,  510,  508,  503,  498,  491,  483,  473,  462,  450,  437,  422,  406,  389,  371,  352,  333,  312,  290,  268,
+244,  220,  196,  171,  145,  120,  93,  67,  40,  13,  -13,  -40,  -67,  -93,  -120,  -145,  -171,  -196,  -220,  -244,
+-268,  -290,  -312,  -333,  -352,  -371,  -389,  -406,  -422,  -437,  -450,  -462,  -473,  -483,  -491,  -498,  -503,  -508,  -510,  -512},
+{512,  508,  502,  493,  480,  465,  447,  426,  402,  376,  348,  317,  284,  250,  214,  177,  139,  100,  60,  20,
+-20,  -60,  -100,  -139,  -177,  -214,  -250,  -284,  -317,  -348,  -376,  -402,  -426,  -447,  -465,  -480,  -493,  -502,  -508,  -512,
+-512,  -508,  -502,  -493,  -480,  -465,  -447,  -426,  -402,  -376,  -348,  -317,  -284,  -250,  -214,  -177,  -139,  -100,  -60,  -20},
+{511,  506,  495,  478,  456,  429,  398,  362,  322,  279,  232,  183,  133,  80,  27,  -27,  -80,  -133,  -183,  -232,
+-279,  -322,  -362,  -398,  -429,  -456,  -478,  -495,  -506,  -511,  -511,  -506,  -495,  -478,  -456,  -429,  -398,  -362,  -322,  -279,
+-232,  -183,  -133,  -80,  -27,  27,  80,  133,  183,  232,  279,  322,  362,  398,  429,  456,  478,  495,  506,  511},
+{511,  502,  485,  459,  426,  385,  338,  284,  226,  165,  100,  33,  -33,  -100,  -165,  -226,  -284,  -338,  -385,  -426,
+-459,  -485,  -502,  -511,  -511,  -502,  -485,  -459,  -426,  -385,  -338,  -284,  -226,  -165,  -100,  -33,  33,  100,  165,  226,
+284,  338,  385,  426,  459,  485,  502,  511,  511,  502,  485,  459,  426,  385,  338,  284,  226,  165,  100,  33},
+{510,  498,  473,  437,  389,  333,  268,  196,  120,  40,  -40,  -120,  -196,  -268,  -333,  -389,  -437,  -473,  -498,  -510,
+-510,  -498,  -473,  -437,  -389,  -333,  -268,  -196,  -120,  -40,  40,  120,  196,  268,  333,  389,  437,  473,  498,  510,
+510,  498,  473,  437,  389,  333,  268,  196,  120,  40,  -40,  -120,  -196,  -268,  -333,  -389,  -437,  -473,  -498,  -510}
+};
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.h
new file mode 100644
index 0000000..159245b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/spectrum_ar_model_tables.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * spectrum_ar_model_tables.h
+ *
+ * This file contains definitions of tables with AR coefficients, 
+ * Gain coefficients and cosine tables.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_
+
+#include "structs.h"
+
+/********************* AR Coefficient Tables ************************/
+/* cdf for quantized reflection coefficient 1 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc1Cdf[12];
+
+/* cdf for quantized reflection coefficient 2 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc2Cdf[12];
+
+/* cdf for quantized reflection coefficient 3 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc3Cdf[12];
+
+/* cdf for quantized reflection coefficient 4 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc4Cdf[12];
+
+/* cdf for quantized reflection coefficient 5 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc5Cdf[12];
+
+/* cdf for quantized reflection coefficient 6 */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRc6Cdf[12];
+
+/* quantization boundary levels for reflection coefficients */
+extern const WebRtc_Word16 WebRtcIsac_kQArBoundaryLevels[12];
+
+/* initial indices for AR reflection coefficient quantizer and cdf table search */
+extern const WebRtc_UWord16 WebRtcIsac_kQArRcInitIndex[AR_ORDER];
+
+/* pointers to AR cdf tables */
+extern const WebRtc_UWord16 *WebRtcIsac_kQArRcCdfPtr[AR_ORDER];
+
+/* pointers to AR representation levels tables */
+extern const WebRtc_Word16 *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER];
+
+
+/******************** GAIN Coefficient Tables ***********************/
+/* cdf for Gain coefficient */
+extern const WebRtc_UWord16 WebRtcIsac_kQGainCdf[19];
+
+/* representation levels for quantized Gain coefficient */
+extern const WebRtc_Word32 WebRtcIsac_kQGain2Levels[18];
+
+/* squared quantization boundary levels for Gain coefficient */
+extern const WebRtc_Word32 WebRtcIsac_kQGain2BoundaryLevels[19];
+
+/* pointer to Gain cdf table */
+extern const WebRtc_UWord16 *WebRtcIsac_kQGainCdf_ptr[1];
+
+/* Gain initial index for gain quantizer and cdf table search */
+extern const WebRtc_UWord16 WebRtcIsac_kQGainInitIndex[1];
+
+/************************* Cosine Tables ****************************/
+/* Cosine table */
+extern const WebRtc_Word16 WebRtcIsac_kCos[6][60];
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/structs.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/structs.h
new file mode 100644
index 0000000..7523ad6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/structs.h
@@ -0,0 +1,478 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * structs.h
+ *
+ * This header file contains all the structs used in the ISAC codec
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
+
+
+#include "typedefs.h"
+#include "settings.h"
+#include "isac.h"
+
+typedef struct Bitstreamstruct {
+
+  WebRtc_UWord8   stream[STREAM_SIZE_MAX];
+  WebRtc_UWord32  W_upper;
+  WebRtc_UWord32  streamval;
+  WebRtc_UWord32  stream_index;
+
+} Bitstr;
+
+typedef struct {
+
+  double    DataBufferLo[WINLEN];
+  double    DataBufferHi[WINLEN];
+
+  double    CorrBufLo[ORDERLO+1];
+  double    CorrBufHi[ORDERHI+1];
+
+  float    PreStateLoF[ORDERLO+1];
+  float    PreStateLoG[ORDERLO+1];
+  float    PreStateHiF[ORDERHI+1];
+  float    PreStateHiG[ORDERHI+1];
+  float    PostStateLoF[ORDERLO+1];
+  float    PostStateLoG[ORDERLO+1];
+  float    PostStateHiF[ORDERHI+1];
+  float    PostStateHiG[ORDERHI+1];
+
+  double    OldEnergy;
+
+} MaskFiltstr;
+
+
+typedef struct {
+
+  //state vectors for each of the two analysis filters
+  double    INSTAT1[2*(QORDER-1)];
+  double    INSTAT2[2*(QORDER-1)];
+  double    INSTATLA1[2*(QORDER-1)];
+  double    INSTATLA2[2*(QORDER-1)];
+  double    INLABUF1[QLOOKAHEAD];
+  double    INLABUF2[QLOOKAHEAD];
+
+  float    INSTAT1_float[2*(QORDER-1)];
+  float    INSTAT2_float[2*(QORDER-1)];
+  float    INSTATLA1_float[2*(QORDER-1)];
+  float    INSTATLA2_float[2*(QORDER-1)];
+  float    INLABUF1_float[QLOOKAHEAD];
+  float    INLABUF2_float[QLOOKAHEAD];
+
+  /* High pass filter */
+  double    HPstates[HPORDER];
+  float    HPstates_float[HPORDER];
+
+} PreFiltBankstr;
+
+
+typedef struct {
+
+  //state vectors for each of the two analysis filters
+  double    STATE_0_LOWER[2*POSTQORDER];
+  double    STATE_0_UPPER[2*POSTQORDER];
+
+  /* High pass filter */
+  double    HPstates1[HPORDER];
+  double    HPstates2[HPORDER];
+
+  float    STATE_0_LOWER_float[2*POSTQORDER];
+  float    STATE_0_UPPER_float[2*POSTQORDER];
+
+  float    HPstates1_float[HPORDER];
+  float    HPstates2_float[HPORDER];
+
+} PostFiltBankstr;
+
+typedef struct {
+
+  //data buffer for pitch filter
+  double    ubuf[PITCH_BUFFSIZE];
+
+  //low pass state vector
+  double    ystate[PITCH_DAMPORDER];
+
+  //old lag and gain
+  double    oldlagp[1];
+  double    oldgainp[1];
+
+} PitchFiltstr;
+
+typedef struct {
+
+  //data buffer
+  double    buffer[PITCH_WLPCBUFLEN];
+
+  //state vectors
+  double    istate[PITCH_WLPCORDER];
+  double    weostate[PITCH_WLPCORDER];
+  double    whostate[PITCH_WLPCORDER];
+
+  //LPC window   -> should be a global array because constant
+  double    window[PITCH_WLPCWINLEN];
+
+} WeightFiltstr;
+
+typedef struct {
+
+  //for inital estimator
+  double         dec_buffer[PITCH_CORR_LEN2 + PITCH_CORR_STEP2 +
+                            PITCH_MAX_LAG/2 - PITCH_FRAME_LEN/2+2];
+  double        decimator_state[2*ALLPASSSECTIONS+1];
+  double        hp_state[2];
+
+  double        whitened_buf[QLOOKAHEAD];
+
+  double        inbuf[QLOOKAHEAD];
+
+  PitchFiltstr  PFstr_wght;
+  PitchFiltstr  PFstr;
+  WeightFiltstr Wghtstr;
+
+} PitchAnalysisStruct;
+
+
+
+/* Have instance of struct together with other iSAC structs */
+typedef struct {
+
+  /* Previous frame length (in ms)                                    */
+  WebRtc_Word32    prev_frame_length;
+
+  /* Previous RTP timestamp from received
+     packet (in samples relative beginning)                           */
+  WebRtc_Word32    prev_rec_rtp_number;
+
+  /* Send timestamp for previous packet (in ms using timeGetTime())   */
+  WebRtc_UWord32    prev_rec_send_ts;
+
+  /* Arrival time for previous packet (in ms using timeGetTime())     */
+  WebRtc_UWord32    prev_rec_arr_ts;
+
+  /* rate of previous packet, derived from RTP timestamps (in bits/s) */
+  float   prev_rec_rtp_rate;
+
+  /* Time sinse the last update of the BN estimate (in ms)            */
+  WebRtc_UWord32    last_update_ts;
+
+  /* Time sinse the last reduction (in ms)                            */
+  WebRtc_UWord32    last_reduction_ts;
+
+  /* How many times the estimate was update in the beginning          */
+  WebRtc_Word32    count_tot_updates_rec;
+
+  /* The estimated bottle neck rate from there to here (in bits/s)    */
+  WebRtc_Word32  rec_bw;
+  float   rec_bw_inv;
+  float   rec_bw_avg;
+  float   rec_bw_avg_Q;
+
+  /* The estimated mean absolute jitter value,
+     as seen on this side (in ms)                                     */
+  float   rec_jitter;
+  float   rec_jitter_short_term;
+  float   rec_jitter_short_term_abs;
+  float   rec_max_delay;
+  float   rec_max_delay_avg_Q;
+
+  /* (assumed) bitrate for headers (bps)                              */
+  float   rec_header_rate;
+
+  /* The estimated bottle neck rate from here to there (in bits/s)    */
+  float    send_bw_avg;
+
+  /* The estimated mean absolute jitter value, as seen on
+     the other siee (in ms)                                           */
+  float   send_max_delay_avg;
+
+  // number of packets received since last update
+  int num_pkts_rec;
+
+  int num_consec_rec_pkts_over_30k;
+
+  // flag for marking that a high speed network has been
+  // detected downstream
+  int hsn_detect_rec;
+
+  int num_consec_snt_pkts_over_30k;
+
+  // flag for marking that a high speed network has
+  // been detected upstream
+  int hsn_detect_snd;
+
+  WebRtc_UWord32 start_wait_period;
+
+  int in_wait_period;
+
+  int change_to_WB;
+
+  WebRtc_UWord32                 senderTimestamp;
+  WebRtc_UWord32                 receiverTimestamp;
+  //enum IsacSamplingRate incomingStreamSampFreq;
+  WebRtc_UWord16                 numConsecLatePkts;
+  float                        consecLatency;
+  WebRtc_Word16                  inWaitLatePkts;
+} BwEstimatorstr;
+
+
+typedef struct {
+
+  /* boolean, flags if previous packet exceeded B.N. */
+  int    PrevExceed;
+  /* ms */
+  int    ExceedAgo;
+  /* packets left to send in current burst */
+  int    BurstCounter;
+  /* packets */
+  int    InitCounter;
+  /* ms remaining in buffer when next packet will be sent */
+  double StillBuffered;
+
+} RateModel;
+
+
+typedef struct {
+
+  unsigned int SpaceAlloced;
+  unsigned int MaxPermAlloced;
+  double Tmp0[MAXFFTSIZE];
+  double Tmp1[MAXFFTSIZE];
+  double Tmp2[MAXFFTSIZE];
+  double Tmp3[MAXFFTSIZE];
+  int Perm[MAXFFTSIZE];
+  int factor [NFACTOR];
+
+} FFTstr;
+
+
+/* The following strutc is used to store data from encoding, to make it
+   fast and easy to construct a new bitstream with a different Bandwidth
+   estimate. All values (except framelength and minBytes) is double size to
+   handle 60 ms of data.
+*/
+typedef struct {
+
+  /* Used to keep track of if it is first or second part of 60 msec packet */
+  int         startIdx;
+
+  /* Frame length in samples */
+  WebRtc_Word16 framelength;
+
+  /* Pitch Gain */
+  int         pitchGain_index[2];
+
+  /* Pitch Lag */
+  double      meanGain[2];
+  int         pitchIndex[PITCH_SUBFRAMES*2];
+
+  /* LPC */
+  int         LPCmodel[2];
+  int         LPCindex_s[108*2]; /* KLT_ORDER_SHAPE = 108 */
+  int         LPCindex_g[12*2];  /* KLT_ORDER_GAIN = 12 */
+  double      LPCcoeffs_lo[(ORDERLO+1)*SUBFRAMES*2];
+  double      LPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*2];
+
+  /* Encode Spec */
+  WebRtc_Word16 fre[FRAMESAMPLES];
+  WebRtc_Word16 fim[FRAMESAMPLES];
+  WebRtc_Word16 AvgPitchGain[2];
+
+  /* Used in adaptive mode only */
+  int         minBytes;
+
+} ISAC_SaveEncData_t;
+
+
+typedef struct {
+
+  int         indexLPCShape[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
+  double      lpcGain[SUBFRAMES<<1];
+  int         lpcGainIndex[SUBFRAMES<<1];
+
+  Bitstr      bitStreamObj;
+
+  WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
+  WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+} ISACUBSaveEncDataStruct;
+
+
+
+typedef struct {
+
+  Bitstr              bitstr_obj;
+  MaskFiltstr         maskfiltstr_obj;
+  PreFiltBankstr      prefiltbankstr_obj;
+  PitchFiltstr        pitchfiltstr_obj;
+  PitchAnalysisStruct pitchanalysisstr_obj;
+  FFTstr              fftstr_obj;
+  ISAC_SaveEncData_t  SaveEnc_obj;
+
+  int                 buffer_index;
+  WebRtc_Word16         current_framesamples;
+
+  float               data_buffer_float[FRAMESAMPLES_30ms];
+
+  int                 frame_nb;
+  double              bottleneck;
+  WebRtc_Word16         new_framelength;
+  double              s2nr;
+
+  /* Maximum allowed number of bits for a 30 msec packet */
+  WebRtc_Word16         payloadLimitBytes30;
+  /* Maximum allowed number of bits for a 30 msec packet */
+  WebRtc_Word16         payloadLimitBytes60;
+  /* Maximum allowed number of bits for both 30 and 60 msec packet */
+  WebRtc_Word16         maxPayloadBytes;
+  /* Maximum allowed rate in bytes per 30 msec packet */
+  WebRtc_Word16         maxRateInBytes;
+
+  /*---
+    If set to 1 iSAC will not addapt the frame-size, if used in
+    channel-adaptive mode. The initial value will be used for all rates.
+    ---*/
+  WebRtc_Word16         enforceFrameSize;
+
+  /*-----
+    This records the BWE index the encoder injected into the bit-stream.
+    It will be used in RCU. The same BWE index of main paylaod will be in
+    the redundant payload. We can not retrive it from BWE because it is
+    a recursive procedure (WebRtcIsac_GetDownlinkBwJitIndexImpl) and has to be
+    called only once per each encode.
+    -----*/
+  WebRtc_Word16         lastBWIdx;
+} ISACLBEncStruct;
+
+typedef struct {
+
+  Bitstr                  bitstr_obj;
+  MaskFiltstr             maskfiltstr_obj;
+  PreFiltBankstr          prefiltbankstr_obj;
+  FFTstr                  fftstr_obj;
+  ISACUBSaveEncDataStruct SaveEnc_obj;
+
+  int                     buffer_index;
+  float                   data_buffer_float[MAX_FRAMESAMPLES +
+                                            LB_TOTAL_DELAY_SAMPLES];
+  double                  bottleneck;
+  /* Maximum allowed number of bits for a 30 msec packet */
+  //WebRtc_Word16        payloadLimitBytes30;
+  /* Maximum allowed number of bits for both 30 and 60 msec packet */
+  //WebRtc_Word16        maxPayloadBytes;
+  WebRtc_Word16             maxPayloadSizeBytes;
+
+  double                  lastLPCVec[UB_LPC_ORDER];
+  WebRtc_Word16             numBytesUsed;
+  WebRtc_Word16             lastJitterInfo;
+} ISACUBEncStruct;
+
+
+
+typedef struct {
+
+  Bitstr          bitstr_obj;
+  MaskFiltstr     maskfiltstr_obj;
+  PostFiltBankstr postfiltbankstr_obj;
+  PitchFiltstr    pitchfiltstr_obj;
+  FFTstr          fftstr_obj;
+
+} ISACLBDecStruct;
+
+typedef struct {
+
+  Bitstr          bitstr_obj;
+  MaskFiltstr     maskfiltstr_obj;
+  PostFiltBankstr postfiltbankstr_obj;
+  FFTstr          fftstr_obj;
+
+} ISACUBDecStruct;
+
+
+
+typedef struct {
+
+  ISACLBEncStruct ISACencLB_obj;
+  ISACLBDecStruct ISACdecLB_obj;
+} ISACLBStruct;
+
+
+typedef struct {
+
+  ISACUBEncStruct ISACencUB_obj;
+  ISACUBDecStruct ISACdecUB_obj;
+} ISACUBStruct;
+
+/*
+  This struct is used to take a snapshot of the entropy coder and LPC gains
+  right before encoding LPC gains. This allows us to go back to that state
+  if we like to limit the payload size.
+*/
+typedef struct {
+  /* 6 lower-band & 6 upper-band */
+  double       loFiltGain[SUBFRAMES];
+  double       hiFiltGain[SUBFRAMES];
+  /* Upper boundary of interval W */
+  WebRtc_UWord32 W_upper;
+  WebRtc_UWord32 streamval;
+  /* Index to the current position in bytestream */
+  WebRtc_UWord32 stream_index;
+  WebRtc_UWord8  stream[3];
+} transcode_obj;
+
+
+typedef struct {
+  // lower-band codec instance
+  ISACLBStruct              instLB;
+  // upper-band codec instance
+  ISACUBStruct              instUB;
+
+  // Bandwidth Estimator and model for the rate.
+  BwEstimatorstr            bwestimator_obj;
+  RateModel                 rate_data_obj;
+  double                    MaxDelay;
+
+  /* 0 = adaptive; 1 = instantaneous */
+  WebRtc_Word16               codingMode;
+
+  // overall bottleneck of the codec
+  WebRtc_Word32               bottleneck;
+
+  // QMF Filter state
+  WebRtc_Word32               analysisFBState1[FB_STATE_SIZE_WORD32];
+  WebRtc_Word32               analysisFBState2[FB_STATE_SIZE_WORD32];
+  WebRtc_Word32               synthesisFBState1[FB_STATE_SIZE_WORD32];
+  WebRtc_Word32               synthesisFBState2[FB_STATE_SIZE_WORD32];
+
+  // Error Code
+  WebRtc_Word16               errorCode;
+
+  // bandwidth of the encoded audio 8, 12 or 16 kHz
+  enum ISACBandwidth        bandwidthKHz;
+  // Sampling rate of audio, encoder and decode,  8 or 16 kHz
+  enum IsacSamplingRate encoderSamplingRateKHz;
+  enum IsacSamplingRate decoderSamplingRateKHz;
+  // Flag to keep track of initializations, lower & upper-band
+  // encoder and decoder.
+  WebRtc_Word16               initFlag;
+
+  // Flag to to indicate signal bandwidth switch
+  WebRtc_Word16               resetFlag_8kHz;
+
+  // Maximum allowed rate, measured in Bytes per 30 ms.
+  WebRtc_Word16               maxRateBytesPer30Ms;
+  // Maximum allowed payload-size, measured in Bytes.
+  WebRtc_Word16               maxPayloadSizeBytes;
+} ISACMainStruct;
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_ */
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/source/transform.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/transform.c
new file mode 100644
index 0000000..97b801a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/source/transform.c
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "settings.h"
+#include "fft.h"
+#include "codec.h"
+#include "os_specific_inline.h"
+#include <math.h>
+
+static double costab1[FRAMESAMPLES_HALF];
+static double sintab1[FRAMESAMPLES_HALF];
+static double costab2[FRAMESAMPLES_QUARTER];
+static double sintab2[FRAMESAMPLES_QUARTER];
+
+void WebRtcIsac_InitTransform()
+{
+  int k;
+  double fact, phase;
+
+  fact = PI / (FRAMESAMPLES_HALF);
+  phase = 0.0;
+  for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+    costab1[k] = cos(phase);
+    sintab1[k] = sin(phase);
+    phase += fact;
+  }
+
+  fact = PI * ((double) (FRAMESAMPLES_HALF - 1)) / ((double) FRAMESAMPLES_HALF);
+  phase = 0.5 * fact;
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++) {
+    costab2[k] = cos(phase);
+    sintab2[k] = sin(phase);
+    phase += fact;
+  }
+}
+
+
+void WebRtcIsac_Time2Spec(double *inre1,
+                         double *inre2,
+                         WebRtc_Word16 *outreQ7,
+                         WebRtc_Word16 *outimQ7,
+                         FFTstr *fftstr_obj)
+{
+
+  int k;
+  int dims[1];
+  double tmp1r, tmp1i, xr, xi, yr, yi, fact;
+  double tmpre[FRAMESAMPLES_HALF], tmpim[FRAMESAMPLES_HALF];
+
+
+  dims[0] = FRAMESAMPLES_HALF;
+
+
+  /* Multiply with complex exponentials and combine into one complex vector */
+  fact = 0.5 / sqrt(FRAMESAMPLES_HALF);
+  for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+    tmp1r = costab1[k];
+    tmp1i = sintab1[k];
+    tmpre[k] = (inre1[k] * tmp1r + inre2[k] * tmp1i) * fact;
+    tmpim[k] = (inre2[k] * tmp1r - inre1[k] * tmp1i) * fact;
+  }
+
+
+  /* Get DFT */
+  WebRtcIsac_Fftns(1, dims, tmpre, tmpim, -1, 1.0, fftstr_obj);
+
+  /* Use symmetry to separate into two complex vectors and center frames in time around zero */
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++) {
+    xr = tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k];
+    yi = -tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k];
+    xi = tmpim[k] - tmpim[FRAMESAMPLES_HALF - 1 - k];
+    yr = tmpim[k] + tmpim[FRAMESAMPLES_HALF - 1 - k];
+
+    tmp1r = costab2[k];
+    tmp1i = sintab2[k];
+    outreQ7[k] = (WebRtc_Word16)WebRtcIsac_lrint((xr * tmp1r - xi * tmp1i) * 128.0);
+    outimQ7[k] = (WebRtc_Word16)WebRtcIsac_lrint((xr * tmp1i + xi * tmp1r) * 128.0);
+    outreQ7[FRAMESAMPLES_HALF - 1 - k] = (WebRtc_Word16)WebRtcIsac_lrint((-yr * tmp1i - yi * tmp1r) * 128.0);
+    outimQ7[FRAMESAMPLES_HALF - 1 - k] = (WebRtc_Word16)WebRtcIsac_lrint((-yr * tmp1r + yi * tmp1i) * 128.0);
+  }
+}
+
+
+void WebRtcIsac_Spec2time(double *inre, double *inim, double *outre1, double *outre2, FFTstr *fftstr_obj)
+{
+
+  int k;
+  double tmp1r, tmp1i, xr, xi, yr, yi, fact;
+
+  int dims;
+
+  dims = FRAMESAMPLES_HALF;
+
+  for (k = 0; k < FRAMESAMPLES_QUARTER; k++) {
+    /* Move zero in time to beginning of frames */
+    tmp1r = costab2[k];
+    tmp1i = sintab2[k];
+    xr = inre[k] * tmp1r + inim[k] * tmp1i;
+    xi = inim[k] * tmp1r - inre[k] * tmp1i;
+    yr = -inim[FRAMESAMPLES_HALF - 1 - k] * tmp1r - inre[FRAMESAMPLES_HALF - 1 - k] * tmp1i;
+    yi = -inre[FRAMESAMPLES_HALF - 1 - k] * tmp1r + inim[FRAMESAMPLES_HALF - 1 - k] * tmp1i;
+
+    /* Combine into one vector,  z = x + j * y */
+    outre1[k] = xr - yi;
+    outre1[FRAMESAMPLES_HALF - 1 - k] = xr + yi;
+    outre2[k] = xi + yr;
+    outre2[FRAMESAMPLES_HALF - 1 - k] = -xi + yr;
+  }
+
+
+  /* Get IDFT */
+  WebRtcIsac_Fftns(1, &dims, outre1, outre2, 1, FRAMESAMPLES_HALF, fftstr_obj);
+
+
+  /* Demodulate and separate */
+  fact = sqrt(FRAMESAMPLES_HALF);
+  for (k = 0; k < FRAMESAMPLES_HALF; k++) {
+    tmp1r = costab1[k];
+    tmp1i = sintab1[k];
+    xr = (outre1[k] * tmp1r - outre2[k] * tmp1i) * fact;
+    outre2[k] = (outre2[k] * tmp1r + outre1[k] * tmp1i) * fact;
+    outre1[k] = xr;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACLongtest.txt b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACLongtest.txt
new file mode 100644
index 0000000..3f05224
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACLongtest.txt
@@ -0,0 +1,433 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+
+
+if  [ "$1" = "x64" ] || [ "$2" = "x64" ] || [ "$#" -eq 0 ]
+    then
+    PLATFORM=_X64
+    ISAC=../x64/Release/ReleaseTest-API_2005.exe
+elif [ "$1" = "LINUX" ] || [ "$2" = "LINUX" ]
+    then
+    PLATFORM=_linux
+    ISAC=../ReleaseTest-API/isacswtest
+else
+    PLATFORM=_2005
+    ISAC=../win32/Release/ReleaseTest-API_2005.exe
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "wb" ]
+    then
+    LOGFILE=logNormal"$PLATFORM".txt
+    echo "START ISAC WB TEST" > $LOGFILE
+    echo >> $LOGFILE
+
+    INFILES=$(cat InputFiles.txt)
+    SUBSET=$(cat InputFilesFew.txt)
+    CHANNELFILES=$(cat ChannelFiles.txt)
+    CHANNELLIST=($(cat ChannelFiles.txt))
+    INDIR=../data/orig
+    OUTDIR=../dataqa"$PLATFORM"
+    mkdir -p $OUTDIR
+    rm -f $OUTDIR/*
+    
+    idx=0
+    RATE=10000
+    FRAMESIZE=30
+    
+
+    for file in $INFILES # loop over all input files
+      do
+      
+      echo "Input file: " $file
+      echo "-----------------------------------"
+      echo "Instantaneous with RATE " $RATE ", and Frame-size " $FRAMESIZE
+      $ISAC -I -B $RATE -FL $FRAMESIZE -FS 16 $INDIR/"$file" $OUTDIR/i_"$FRAMESIZE"_"$RATE"_"$file" >> $LOGFILE
+      echo
+      
+      name="${CHANNELLIST[$idx]}"
+      echo "Adaptive with channel file: " $name 
+      
+      $ISAC -B $INDIR/${CHANNELLIST[$idx]} -FS 16 $INDIR/"$file" $OUTDIR/a_${name%.*}_"$file" >> $LOGFILE
+      
+      echo
+      echo
+      
+#     alternate between 30 & 60 ms.
+      if [ $FRAMESIZE -eq 30 ]
+	  then
+	  FRAMESIZE=60
+      else
+	  FRAMESIZE=30
+      fi
+      
+#     rate between 10000 to 32000 bits/sec
+      if [ $RATE -le 30000 ]
+	  then
+	  let "RATE=RATE+2000"
+      else
+	  let "RATE=10000"
+      fi
+      
+#     there are only three channel file
+      if [ $idx -ge 2 ]; then
+	  idx=0
+      else
+	  let "idx=idx+1"
+      fi
+      
+    done
+
+    idx=0
+    
+#   loop over the subset of input files
+    for file in $SUBSET 
+      do
+      
+      if [ $idx -eq 0 ]; then
+	  $ISAC -B $INDIR/${CHANNELLIST[0]} -FL 30 -FIXED_FL -FS 16 $INDIR/"$file" $OUTDIR/a30_"$file" >> $LOGFILE
+	  idx=1
+      else
+	  $ISAC -B $INDIR/${CHANNELLIST[0]} -FL 60 -FIXED_FL -FS 16 $INDIR/"$file" $OUTDIR/a60_"$file" >> $LOGFILE
+	  idx=0
+      fi
+    done
+
+    $ISAC -B $INDIR/${CHANNELLIST[0]} -INITRATE 25000 -FL 30 -FS 16 $INDIR/"$file" $OUTDIR/a60_Init25kbps_"$file" >> $LOGFILE
+
+    echo
+    echo WIDEBAND DONE!
+    echo
+    echo
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "swb" ]
+    then
+
+    LOGFILE=logNormal_SWB"$PLATFORM".txt
+    echo "START ISAC SWB TEST" > $LOGFILE
+    echo >> $LOGFILE
+
+    echo STARTING TO TEST SUPER-WIDEBAND
+    
+    INFILES=$(cat InputFilesSWB.txt)
+    INDIR=../data/origswb
+    OUTDIR=../dataqaswb"$PLATFORM"
+    mkdir -p $OUTDIR
+    rm -f $OUTDIR/*
+    
+    for file in $INFILES
+      do
+      echo
+      echo "Input file: " $file
+      echo "--------------------------------"
+      for RATE in 12000 20000 32000 38000 45000 50000 56000  
+	do
+	
+	echo "Rate " $RATE
+	$ISAC -I -B $RATE -FL 30 -FS 32 $INDIR/"$file" $OUTDIR/swb_"$RATE"_"$file" >> $LOGFILE
+	echo
+	
+      done
+  
+    done
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "API" ]
+    then
+
+    LOGFILE_API=logNormal_API"$PLATFORM".txt
+    echo
+    echo
+    echo "START ISAC API TEST" > $LOGFILE_API
+    echo >> $LOGFILE_API
+    idx=1
+    echo "                            Test Enforcement of frame-size"
+    echo "========================================================================================"
+    mkdir -p ../FrameSizeLim"$PLATFORM"
+    rm -f ../FrameSizeLim"$PLATFORM"/*
+    echo
+    echo "-- No enforcement; BN 10000"
+    echo
+    $ISAC -B 10000 -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Now Enforce 30 ms frame size with the same bottleneck"
+    echo "There should not be any 60 ms frame"
+    echo
+    $ISAC -B 10000 -FL 30 -FIXED_FL -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- No enforcement; BN 32000"
+    echo
+    $ISAC -B 32000 -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Now Enforce 60 ms frame size with the same bottleneck"
+    echo "There should not be any 30 ms frame"
+    echo 
+    $ISAC -B 32000 -FL 60 -FIXED_FL -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo
+    echo
+    echo
+
+    echo "                           Test Limiting of Payload Size and Rate"
+    echo "========================================================================================"
+    mkdir -p ../PayloadLim"$PLATFORM"
+    rm -f ../PayloadLim"$PLATFORM"/*
+    echo
+    echo
+    echo "-- No Limit, frame-size 60 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 60 -FS 16                ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload-size limit of 250, frame-size 60 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 60 -FS 16 -MAX 250       ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 33 kbps for 60 ms frame-size"
+    echo
+    $ISAC -I -B 32000 -FL 60 -FS 16 -MAXRATE 33000 ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo 
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No Limit, frame-size 30 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 30 -FS 16                ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo    
+    echo "-- Payload-size limit of 130, frame-size 30 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 30 -FS 16 -MAX 130       ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 33 kbps for 30 ms frame-size, wideband"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 16 -MAXRATE 33000 ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No limit for 32 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 130 bytes for 32 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 32 -MAX 130 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No limit, Rate 45 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32               ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 46 kbps for 42 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32 -MAXRATE 46000 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 170 bytes for 45 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32 -MAX 170       ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+  
+    echo "-- No limit for 56 kbps, 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32                ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 200 bytes for 56 kbps 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32 -MAX 200       ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 57 kbps for 56 kbps 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32 -MAXRATE 57000 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo
+    echo
+    echo
+    
+    echo "                                    Test Trans-Coding"
+    echo "========================================================================================"
+    mkdir -p ../Transcoding"$PLATFORM"
+    rm -f ../Transcoding"$PLATFORM"/*
+    echo
+    echo
+    echo "-- 20 kbps, 30 ms, WIDEBAND"
+    echo
+    $ISAC -I -B 20000 -FL 30 -FS 16          ../data/orig/speech_and_misc_WB.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans20WB.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps trans-coding to 20 kbps, 30 ms, WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 16  -T 20000  ../Transcoding"$PLATFORM"/APITest_32T20.pcm \
+	../data/orig/speech_and_misc_WB.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+
+    echo
+    echo
+    echo "-- 38 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 38000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans38.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 45 kbps trans-coding to 38 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32  -T 38000  ../Transcoding"$PLATFORM"/APITest_45T38.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+ 
+    echo
+    echo
+    echo "-- 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 20000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans20SWB.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+
+    echo
+    echo
+    
+    echo "-- 45 kbps trans-coding to 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32  -T 20000  ../Transcoding"$PLATFORM"/APITest_45T20.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo "-- 50 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 50000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans50.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 56 kbps trans-coding to 50 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32  -T 50000  ../Transcoding"$PLATFORM"/APITest_56T50.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 56 kbps trans-coding to 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32  -T 20000 ../Transcoding"$PLATFORM"/APITest_56T20.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo 
+    echo
+    echo
+    
+    echo "                                         Test FEC"
+    echo "========================================================================================"
+    mkdir -p ../FEC"$PLATFORM"
+    rm -f ../FEC"$PLATFORM"/*
+    echo
+    echo
+    echo "-- 32 kbps with transcoding to 20kbps, 30 ms, WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 30 -FS 16 -PL 10 -T 20000 ../FEC"$PLATFORM"/APITest_PL10_WB30_T20.pcm \
+	../data/orig/speech_and_misc_WB.pcm ../FEC"$PLATFORM"/APITest_PL10_WB30.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps, 60 ms, WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 60 -FS 16 -PL 10 ../data/orig/speech_and_misc_WB.pcm \
+	../FEC"$PLATFORM"/APITest_PL10_WB60.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps with transcoding to 20 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 30 -FS 32 -PL 10 -T 20000 ../FEC"$PLATFORM"/APITest_PL10_SWB_8kHz_T20.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_8kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+
+    echo "-- 45 kbps with Trascoding to 38 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 45000 -FL 30 -FS 32 -PL 10 -T 38000 ../FEC"$PLATFORM"/APITest_PL10_SWB_12kHz_T38.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_12kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+
+    echo "-- 56 kbps with transcoding to 50 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 56000 -FL 30 -FS 32 -PL 10 -T 50000 ../FEC"$PLATFORM"/APITest_PL10_SWB_16kHz_T50.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_16kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+fi
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfault.txt b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfault.txt
new file mode 100644
index 0000000..63829a4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfault.txt
@@ -0,0 +1,80 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character????? ?????? 
+if  [ "$1" = "x64" ] || [ "$#" -eq 0 ]
+    then
+    PLATFORM=_X64
+    ISAC=../x64/Release/ReleaseTest-API_2005.exe
+elif [ "$1" = "2005" ]
+	then
+    PLATFORM=_2005
+    ISAC=../win32/Release/ReleaseTest-API_2005.exe
+elif [ "$1" == "LINUX" ]
+	then
+    PLATFORM=_linux
+    ISAC=../ReleaseTest-API/isacswtest
+else
+	echo Unknown Platform
+	exit 2
+fi
+
+LOGFILE=logfault$PLATFORM.txt
+echo "START FAULT TEST" > $LOGFILE
+
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+INDIRSWB=../data/origswb
+OUTDIR=../dataqaft$PLATFORM
+mkdir -p $OUTDIR
+
+#maximum Target rate for different bandwidth
+TARGETRATE=( 32000 32000 44000 56000 )
+SAMPFREQ=( 16 32 32 32 )
+FAULTTEST=(1 2 3 4 5 6 7 9)
+
+index1=0
+
+file_wb=../data/orig/16kHz.pcm
+file_swb=../data/origswb/32kHz.pcm
+
+for idx in 0 1 2 3 
+  do
+# Fault test
+  echo
+  echo "Sampling Frequency " ${SAMPFREQ[idx]} "kHz, Rate " ${TARGETRATE[idx]} "bps."
+  echo "---------------------------------------------------"
+  if [ ${SAMPFREQ[idx]} -eq 16 ]; then
+    file=$file_wb
+  else
+    file=$file_swb
+  fi
+
+  for testnr in ${FAULTTEST[*]}
+    do
+    echo "Running Fault Test " $testnr
+    $ISAC -I -B "${TARGETRATE[idx]}" -F $testnr -FS "${SAMPFREQ[idx]}" "$file" \
+	$OUTDIR/ft"$testnr"_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+    echo
+    
+  done
+  
+# Fault test number 10, error in bitstream
+  echo "Running Fault Test 10"
+  $ISAC -I -B "${TARGETRATE[idx]}" -F 10        -FS "${SAMPFREQ[idx]}" "$file" \
+    $OUTDIR/ft10_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+  echo
+  echo "Running Fault Test 10 with packetloss"
+  $ISAC -I -B "${TARGETRATE[idx]}" -F 10 -PL 10 -FS "${SAMPFREQ[idx]}" "$file" \
+    $OUTDIR/ft10plc_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+  echo
+done
+
+echo 
+echo
+echo DONE!
+  
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfixfloat.txt b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfixfloat.txt
new file mode 100644
index 0000000..4cda78e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/QA/runiSACfixfloat.txt
@@ -0,0 +1,47 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logfxfl.txt
+echo "START FIX-FLOAT TEST" > $LOGFILE
+
+
+ISACFIXFLOAT=../../../fix/test/testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqafxfl
+mkdir -p $OUTDIR
+
+index1=0
+
+for file in $INFILES # loop over all input files
+  do
+  
+  for channel in $CHANNELFILES
+	do
+	let "index1=index1+1"
+
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -PLC $INDIR/"$file" $OUTDIR/flfx$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -PLC $INDIR/"$file" $OUTDIR/fxfl$index1"$file" >> $LOGFILE
+  done
+
+done
+
+index1=0
+
+for file in $SUBSET # loop over the subset of input files
+  do
+	let "index1=index1+1"
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 1 $INDIR/"$file" $OUTDIR/flfxnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 1 $INDIR/"$file" $OUTDIR/fxflnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 2 -PLC $INDIR/"$file" $OUTDIR/flfxnb2_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 2 -PLC $INDIR/"$file" $OUTDIR/fxflnb2_$index1"$file" >> $LOGFILE
+done
+
+echo DONE!
+
+
+
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/ReleaseTest-API/ReleaseTest-API.cc b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/ReleaseTest-API/ReleaseTest-API.cc
new file mode 100644
index 0000000..19cff9e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/ReleaseTest-API/ReleaseTest-API.cc
@@ -0,0 +1,1050 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ReleaseTest-API.cpp : Defines the entry point for the console application.
+//
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <ctype.h>
+#include <iostream>
+
+/* include API */
+#include "isac.h"
+#include "utility.h"
+
+/* Defines */
+#define SEED_FILE "randseed.txt"  /* Used when running decoder on garbage data  */
+#define MAX_FRAMESAMPLES     960  /* max number of samples per frame
+                                      (= 60 ms frame & 16 kHz) or
+                                      (= 30 ms frame & 32 kHz)                  */
+#define FRAMESAMPLES_10ms	 160   /* number of samples per 10ms frame          */
+#define SWBFRAMESAMPLES_10ms 320
+//#define FS		        	16000 /* sampling frequency (Hz) */
+
+#ifdef WIN32
+#define CLOCKS_PER_SEC      1000  /* Runtime statistics */
+#endif
+
+
+
+
+using namespace std;
+
+int main(int argc, char* argv[])
+{
+
+    char inname[100], outname[100], bottleneck_file[100], vadfile[100];
+	FILE *inp, *outp, *f_bn=NULL, *vadp, *bandwidthp;
+	int framecnt, endfile;
+
+	int i, errtype, VADusage = 0, packetLossPercent = 0;
+	WebRtc_Word16 CodingMode;
+	WebRtc_Word32 bottleneck;
+	WebRtc_Word16 framesize = 30;           /* ms */
+	int cur_framesmpls, err;
+
+	/* Runtime statistics */
+	double starttime, runtime, length_file;
+
+	WebRtc_Word16 stream_len = 0;
+	WebRtc_Word16 declen, lostFrame = 0, declenTC = 0;
+
+	WebRtc_Word16 shortdata[SWBFRAMESAMPLES_10ms];
+	WebRtc_Word16 vaddata[SWBFRAMESAMPLES_10ms*3];
+	WebRtc_Word16 decoded[MAX_FRAMESAMPLES << 1];
+	WebRtc_Word16 decodedTC[MAX_FRAMESAMPLES << 1];
+	WebRtc_UWord16 streamdata[500];
+	WebRtc_Word16	speechType[1];
+    WebRtc_Word16 rateBPS = 0;
+    WebRtc_Word16 fixedFL = 0;
+    WebRtc_Word16 payloadSize = 0;
+    WebRtc_Word32 payloadRate = 0;
+    int setControlBWE = 0;
+    short FL, testNum;
+	char version_number[20];
+    FILE  *plFile;
+    WebRtc_Word32 sendBN;
+
+#ifdef _DEBUG
+	FILE *fy;
+	double kbps;
+#endif /* _DEBUG */
+	int totalbits =0;
+	int totalsmpls =0;
+
+    /* If use GNS file */
+    FILE *fp_gns = NULL;
+	char gns_file[100];
+    short maxStreamLen30 = 0;
+    short maxStreamLen60 = 0;
+    short sampFreqKHz = 32;
+    short samplesIn10Ms;
+    short useAssign = 0;
+    //FILE logFile;
+    bool doTransCoding = false;
+    WebRtc_Word32 rateTransCoding = 0;
+    WebRtc_UWord16 streamDataTransCoding[600];
+    WebRtc_Word16 streamLenTransCoding = 0;
+    FILE* transCodingFile = NULL;
+    FILE* transcodingBitstream = NULL;
+    WebRtc_UWord32 numTransCodingBytes = 0;
+
+	/* only one structure used for ISAC encoder */
+	ISACStruct* ISAC_main_inst;
+    ISACStruct* decoderTransCoding;
+
+	BottleNeckModel       BN_data;
+
+#ifdef _DEBUG
+	fy = fopen("bit_rate.dat", "w");
+	fclose(fy);
+	fy = fopen("bytes_frames.dat", "w");
+	fclose(fy);
+#endif /* _DEBUG */
+
+	/* Handling wrong input arguments in the command line */
+	if((argc<3) || (argc>17))  {
+		printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        WebRtcIsac_version(version_number);
+        printf("iSAC-swb version %s \n\n", version_number);
+
+        printf("Usage:\n\n");
+        printf("./kenny.exe [-I] bottleneck_value infile outfile \n\n");
+        printf("with:\n");
+        printf("[-FS num]       :   sampling frequency in kHz, valid values are 16 & 32,\n");
+        printf("                    with 16 as default.\n");
+        printf("[-I]            :   if -I option is specified, the coder will use\n");
+        printf("                    an instantaneous Bottleneck value. If not, it\n");
+        printf("                    will be an adaptive Bottleneck value.\n\n");
+        printf("[-assign]       :   Use Assign API.\n");
+        printf("[-B num]        :   the value of the bottleneck provided either\n");
+        printf("                    as a fixed value in bits/sec (e.g. 25000) or\n");
+        printf("                    read from a file (e.g. bottleneck.txt)\n\n");
+        printf("[-INITRATE num] :   Set a new value for initial rate. Note! Only used in \n");
+        printf("                    adaptive mode.\n\n");
+        printf("[-FL num]       :   Set (initial) frame length in msec. Valid length are \n");
+        printf("                    30 and 60 msec.\n\n");
+        printf("[-FIXED_FL]     :   Frame length will be fixed to initial value.\n\n");
+        printf("[-MAX num]      :   Set the limit for the payload size of iSAC in bytes. \n");
+        printf("                    Minimum 100 maximum 400.\n\n");
+        printf("[-MAXRATE num]  :   Set the maxrate for iSAC in bits per second. \n");
+        printf("                    Minimum 32000, maximum 53400.\n\n");
+        printf("[-F num]        :   if -F option is specified, the test function\n");
+        printf("                    will run the iSAC API fault scenario specified by the\n");
+        printf("                    supplied number.\n");
+        printf("                    F 1 - Call encoder prior to init encoder call\n");
+        printf("                    F 2 - Call decoder prior to init decoder call\n");
+        printf("                    F 3 - Call decoder prior to encoder call\n");
+        printf("                    F 4 - Call decoder with a too short coded sequence\n");
+        printf("                    F 5 - Call decoder with a too long coded sequence\n");
+        printf("                    F 6 - Call decoder with random bit stream\n");
+        printf("                    F 7 - Call init encoder/decoder at random during a call\n");
+        printf("                    F 8 - Call encoder/decoder without having allocated memory \n");
+        printf("                          for encoder/decoder instance\n");
+        printf("                    F 9 - Call decodeB without calling decodeA\n");
+        printf("                    F 10 - Call decodeB with garbage data\n");
+        printf("[-PL num]       :   if -PL option is specified \n");
+        printf("[-T rate file]  :   test trans-coding with target bottleneck 'rate' bits/sec\n");
+        printf("                    the output file is written to 'file'\n");
+        printf("[-LOOP num]     :   number of times to repeat coding the input file for stress testing\n");
+        //printf("[-CE num]       :   Test of APIs used by Conference Engine.\n");
+        //printf("                    CE 1 - getNewBitstream, getBWE \n");
+        //printf("                    (CE 2 - RESERVED for transcoding)\n");
+        //printf("                    CE 3 - getSendBWE, setSendBWE.  \n\n");
+        //printf("-L filename     :   write the logging info into file (appending)\n");
+        printf("infile          :   Normal speech input file\n\n");
+        printf("outfile         :   Speech output file\n\n");
+    	exit(0);
+	}
+
+    /* Print version number */
+    printf("-------------------------------------------------\n");
+    WebRtcIsac_version(version_number);
+    printf("iSAC version %s \n\n", version_number);
+
+    /* Loop over all command line arguments */
+	CodingMode = 0;
+	testNum = 0;
+    useAssign = 0;
+    //logFile = NULL;
+    char transCodingFileName[500];
+    WebRtc_Word16 totFileLoop = 0;
+    WebRtc_Word16 numFileLoop = 0;
+	for (i = 1; i < argc-2;i++)
+    {
+        if(!strcmp("-LOOP", argv[i]))
+        {
+            i++;
+            totFileLoop = (WebRtc_Word16)atol(argv[i]);
+            if(totFileLoop <= 0)
+            {
+                fprintf(stderr, "Invalid number of runs for the given input file, %d.", totFileLoop);
+                exit(0);
+            }
+        }
+
+        if(!strcmp("-T", argv[i]))
+        {
+            doTransCoding = true;
+            i++;
+            rateTransCoding = atoi(argv[i]);
+            i++;
+            strcpy(transCodingFileName, argv[i]);
+        }
+
+        /*Should we use assign API*/
+        if(!strcmp("-assign", argv[i]))
+        {
+            useAssign = 1;
+        }
+
+        /* Set Sampling Rate */
+        if(!strcmp("-FS", argv[i]))
+        {
+            i++;
+            sampFreqKHz = atoi(argv[i]);
+        }
+
+        /* Instantaneous mode */
+		if(!strcmp ("-I", argv[i]))
+        {
+			printf("Instantaneous BottleNeck\n");
+			CodingMode = 1;
+		}
+
+        /* Set (initial) bottleneck value */
+        if(!strcmp ("-INITRATE", argv[i]))	{
+			rateBPS = atoi(argv[i + 1]);
+            setControlBWE = 1;
+            if((rateBPS < 10000) || (rateBPS > 32000))
+            {
+				printf("\n%d is not a initial rate. Valid values are in the range 10000 to 32000.\n", rateBPS);
+				exit(0);
+            }
+			printf("New initial rate: %d\n", rateBPS);
+			i++;
+		}
+
+        /* Set (initial) framelength */
+        if(!strcmp ("-FL", argv[i]))	{
+			framesize = atoi(argv[i + 1]);
+            if((framesize != 30) && (framesize != 60))
+            {
+				printf("\n%d is not a valid frame length. Valid length are 30 and 60 msec.\n", framesize);
+				exit(0);
+            }
+            setControlBWE = 1;
+			printf("Frame Length: %d\n", framesize);
+			i++;
+		}
+
+        /* Fixed frame length */
+        if(!strcmp ("-FIXED_FL", argv[i]))
+        {
+			fixedFL = 1;
+            setControlBWE = 1;
+			printf("Fixed Frame Length\n");
+		}
+
+        /* Set maximum allowed payload size in bytes */
+        if(!strcmp ("-MAX", argv[i]))	{
+			payloadSize = atoi(argv[i + 1]);
+            printf("Maximum Payload Size: %d\n", payloadSize);
+			i++;
+		}
+
+        /* Set maximum rate in bytes */
+        if(!strcmp ("-MAXRATE", argv[i]))	{
+			payloadRate = atoi(argv[i + 1]);
+            printf("Maximum Rate in kbps: %d\n", payloadRate);
+			i++;
+		}
+
+        /* Test of fault scenarious */
+        if(!strcmp ("-F", argv[i]))
+        {
+			testNum = atoi(argv[i + 1]);
+			printf("Fault test: %d\n", testNum);
+			if(testNum < 1 || testNum > 10)
+            {
+				printf("\n%d is not a valid Fault Scenario number. Valid Fault Scenarios are numbered 1-10.\n", testNum);
+				exit(0);
+			}
+			i++;
+		}
+
+        /* Packet loss test */
+		if(!strcmp ("-PL", argv[i]))
+        {
+			if( isdigit( *argv[i+1] ) )
+            {
+				packetLossPercent = atoi( argv[i+1] );
+				if( (packetLossPercent < 0) | (packetLossPercent > 100) )
+                {
+					printf( "\nInvalid packet loss perentage \n" );
+					exit( 0 );
+				}
+                if( packetLossPercent > 0 )
+                {
+					printf( "Simulating %d %% of independent packet loss\n", packetLossPercent );
+                }
+                else
+                {
+					printf( "\nNo Packet Loss Is Simulated \n" );
+                }
+            }
+            else
+            {
+				plFile = fopen( argv[i+1], "rb" );
+				if( plFile == NULL )
+                {
+					printf( "\n couldn't open the frameloss file: %s\n", argv[i+1] );
+					exit( 0 );
+				}
+				printf( "Simulating packet loss through the given channel file: %s\n", argv[i+1] );
+			}
+			i++;
+		}
+
+        /* Random packetlosses */
+		if(!strcmp ("-rnd", argv[i]))
+        {
+			srand((unsigned int)time(NULL) );
+			printf( "Random pattern in lossed packets \n" );
+		}
+
+        /* Use gns file */
+		if(!strcmp ("-G", argv[i]))
+        {
+			sscanf(argv[i + 1], "%s", gns_file);
+			fp_gns = fopen(gns_file, "rb");
+			if(fp_gns  == NULL)
+            {
+				printf("Cannot read file %s.\n", gns_file);
+				exit(0);
+			}
+			i++;
+		}
+
+
+        // make it with '-B'
+        /* Get Bottleneck value */
+        if(!strcmp("-B", argv[i]))
+        {
+            i++;
+            bottleneck = atoi(argv[i]);
+            if(bottleneck == 0)
+            {
+                sscanf(argv[i], "%s", bottleneck_file);
+                f_bn = fopen(bottleneck_file, "rb");
+                if(f_bn  == NULL)
+                {
+                    printf("Error No value provided for BottleNeck and cannot read file %s.\n", bottleneck_file);
+                    exit(0);
+                }
+                else
+                {
+                    printf("reading bottleneck rates from file %s\n\n",bottleneck_file);
+                    if(fscanf(f_bn, "%d", &bottleneck) == EOF)
+                    {
+                        /* Set pointer to beginning of file */
+                        fseek(f_bn, 0L, SEEK_SET);
+                        if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+                            exit(0);
+                        }
+                    }
+
+                    /*	Bottleneck is a cosine function
+                    *	Matlab code for writing the bottleneck file:
+                    *	BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi);
+                    *	fid = fopen('bottleneck.txt', 'wb');
+                    *	fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid);
+                    */
+                }
+            }
+            else
+            {
+                printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+            }
+        }
+        /* Run Conference Engine APIs */
+        //     Do not test it in the first release
+        //
+        //     if(!strcmp ("-CE", argv[i]))
+        //     {
+        //         testCE = atoi(argv[i + 1]);
+        //         if(testCE==1)
+        //         {
+        //             i++;
+        //             scale = (float)atof( argv[i+1] );
+        //         }
+        //         else if(testCE == 2)
+        //         {
+        //             printf("\nCE-test 2 (transcoding) not implemented.\n");
+        //             exit(0);
+        //         }
+        //         else if(testCE < 1 || testCE > 3)
+        //         {
+        //             printf("\n%d is not a valid CE-test number. Valid CE tests are 1-3.\n", testCE);
+        //             exit(0);
+        //         }
+        //         printf("CE-test number: %d\n", testCE);
+        //         i++;
+        //     }
+    }
+
+	if(CodingMode == 0)
+	{
+		printf("\nAdaptive BottleNeck\n");
+	}
+
+    switch(sampFreqKHz)
+    {
+    case 16:
+        {
+            printf("iSAC Wideband.\n");
+            samplesIn10Ms = FRAMESAMPLES_10ms;
+            break;
+        }
+    case 32:
+        {
+            printf("iSAC Supper-Wideband.\n");
+            samplesIn10Ms = SWBFRAMESAMPLES_10ms;
+            break;
+        }
+    default:
+            printf("Unsupported sampling frequency %d kHz", sampFreqKHz);
+            exit(0);
+    }
+
+
+
+
+	/* Get Input and Output files */
+	sscanf(argv[argc-2], "%s", inname);
+	sscanf(argv[argc-1], "%s", outname);
+    printf("\nInput file: %s\n", inname);
+    printf("Output file: %s\n\n", outname);
+	if((inp = fopen(inname,"rb")) == NULL)
+    {
+		printf("  Error iSAC Cannot read file %s.\n", inname);
+        cout << flush;
+		exit(1);
+	}
+
+	if((outp = fopen(outname,"wb")) == NULL)
+    {
+		printf("  Error iSAC Cannot write file %s.\n", outname);
+        cout << flush;
+        getchar();
+		exit(1);
+	}
+	if(VADusage)
+    {
+		if((vadp = fopen(vadfile,"rb")) == NULL)
+        {
+			printf("  Error iSAC Cannot read file %s.\n", vadfile);
+            cout << flush;
+			exit(1);
+		}
+	}
+
+    if((bandwidthp = fopen("bwe.pcm","wb")) == NULL)
+    {
+            printf("  Error iSAC Cannot read file %s.\n", "bwe.pcm");
+            cout << flush;
+            exit(1);
+    }
+
+
+	starttime = clock()/(double)CLOCKS_PER_SEC; /* Runtime statistics */
+
+    /* Initialize the ISAC and BN structs */
+    if(testNum != 8)
+    {
+        if(!useAssign)
+        {
+            err =WebRtcIsac_Create(&ISAC_main_inst);
+            WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+            WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        }
+        else
+        {
+            /* Test the Assign functions */
+            int sss;
+            void *ppp;
+            err = WebRtcIsac_AssignSize(&sss);
+            ppp = malloc(sss);
+            err = WebRtcIsac_Assign(&ISAC_main_inst, ppp);
+            WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+            WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        }
+        /* Error check */
+        if(err < 0)
+        {
+            printf("\n\n Error in create.\n\n");
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+	BN_data.arrival_time  = 0;
+	BN_data.sample_count  = 0;
+	BN_data.rtp_number    = 0;
+
+	/* Initialize encoder and decoder */
+    framecnt= 0;
+    endfile	= 0;
+
+    if(doTransCoding)
+    {
+        WebRtcIsac_Create(&decoderTransCoding);
+        WebRtcIsac_SetEncSampRate(decoderTransCoding, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        WebRtcIsac_SetDecSampRate(decoderTransCoding, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        WebRtcIsac_DecoderInit(decoderTransCoding);
+        transCodingFile = fopen(transCodingFileName, "wb");
+        if(transCodingFile == NULL)
+        {
+            printf("Could not open %s to output trans-coding.\n", transCodingFileName);
+            exit(0);
+        }
+        strcat(transCodingFileName, ".bit");
+        transcodingBitstream = fopen(transCodingFileName, "wb");
+        if(transcodingBitstream == NULL)
+        {
+            printf("Could not open %s to write the bit-stream of transcoder.\n", transCodingFileName);
+            exit(0);
+        }
+    }
+
+    if(testNum != 1)
+    {
+		if(WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode) < 0)
+        {
+            printf("Error could not initialize the encoder \n");
+            cout << flush;
+            return 0;
+        }
+	}
+    if(testNum != 2)
+    {
+        if(WebRtcIsac_DecoderInit(ISAC_main_inst) < 0)
+        {
+            printf("Error could not initialize the decoder \n");
+            cout << flush;
+            return 0;
+        }
+	}
+	if(CodingMode == 1)
+    {
+        err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in initialization (control): %d.\n\n", errtype);
+            cout << flush;
+            if(testNum == 0)
+            {
+                exit(EXIT_FAILURE);
+            }
+        }
+	}
+
+    if((setControlBWE) && (CodingMode == 0))
+    {
+        err = WebRtcIsac_ControlBwe(ISAC_main_inst, rateBPS, framesize, fixedFL);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+
+            printf("\n\n Error in Control BWE: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+
+    if(payloadSize != 0)
+    {
+        err = WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadSize);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in SetMaxPayloadSize: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+    if(payloadRate != 0)
+    {
+        err = WebRtcIsac_SetMaxRate(ISAC_main_inst, payloadRate);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in SetMaxRateInBytes: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+
+	*speechType = 1;
+
+    cout << "\n" << flush;
+
+    length_file = 0;
+    WebRtc_Word16 bnIdxTC;
+    WebRtc_Word16 jitterInfoTC;
+    while (endfile == 0)
+    {
+        /* Call init functions at random, fault test number 7 */
+		if(testNum == 7 && (rand()%2 == 0))
+        {
+            err = WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode);
+            /* Error check */
+            if(err < 0)
+            {
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\n Error in encoderinit: %d.\n\n", errtype);
+                cout << flush;
+            }
+
+            err = WebRtcIsac_DecoderInit(ISAC_main_inst);
+            /* Error check */
+            if(err < 0)
+            {
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\n Error in decoderinit: %d.\n\n", errtype);
+                cout << flush;
+            }
+        }
+
+		cur_framesmpls = 0;
+		while (1)
+        {
+            /* Read 10 ms speech block */
+            endfile = readframe(shortdata, inp, samplesIn10Ms);
+
+            if(endfile)
+            {
+                numFileLoop++;
+                if(numFileLoop < totFileLoop)
+                {
+                    rewind(inp);
+                    framecnt = 0;
+                    fprintf(stderr, "\n");
+                    endfile = readframe(shortdata, inp, samplesIn10Ms);
+                }
+            }
+
+            if(testNum == 7)
+            {
+		    	srand((unsigned int)time(NULL));
+		    }
+
+            /* iSAC encoding */
+            if(!(testNum == 3 && framecnt == 0))
+            {
+                stream_len = WebRtcIsac_Encode(ISAC_main_inst,
+                    shortdata,
+                    (WebRtc_Word16*)streamdata);
+                if((payloadSize != 0) && (stream_len > payloadSize))
+                {
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                    printf("\nError: Streamsize out of range %d\n", stream_len - payloadSize);
+                    cout << flush;
+                }
+
+                WebRtcIsac_GetUplinkBw(ISAC_main_inst, &sendBN);
+
+                if(stream_len>0)
+                {
+                    if(doTransCoding)
+                    {
+                        WebRtc_Word16 indexStream;
+                        WebRtc_UWord8 auxUW8;
+
+                        /************************* Main Transcoding stream *******************************/
+                        WebRtcIsac_GetDownLinkBwIndex(ISAC_main_inst, &bnIdxTC, &jitterInfoTC);
+                        streamLenTransCoding = WebRtcIsac_GetNewBitStream(
+                            ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
+                            (WebRtc_Word16*)streamDataTransCoding, false);
+                        if(streamLenTransCoding < 0)
+                        {
+                            fprintf(stderr, "Error in trans-coding\n");
+                            exit(0);
+                        }
+                        auxUW8 = (WebRtc_UWord8)(((streamLenTransCoding & 0xFF00) >> 8) &  0x00FF);
+                        fwrite(&auxUW8, sizeof(WebRtc_UWord8),
+                            1, transcodingBitstream);
+
+                        auxUW8 = (WebRtc_UWord8)(streamLenTransCoding & 0x00FF);
+                        fwrite(&auxUW8, sizeof(WebRtc_UWord8),
+                            1, transcodingBitstream);
+
+                        fwrite((WebRtc_UWord8*)streamDataTransCoding, sizeof(WebRtc_UWord8),
+                            streamLenTransCoding, transcodingBitstream);
+
+                        WebRtcIsac_ReadBwIndex((WebRtc_Word16*)streamDataTransCoding, &indexStream);
+                        if(indexStream != bnIdxTC)
+                        {
+                            fprintf(stderr, "Error in inserting Bandwidth index into transcoding stream.\n");
+                            exit(0);
+                        }
+                        numTransCodingBytes += streamLenTransCoding;
+                    }
+                }
+            }
+            else
+            {
+                break;
+            }
+
+			if(stream_len < 0)
+            {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\nError in encoder: %d.\n\n", errtype);
+                cout << flush;
+			}
+			cur_framesmpls += samplesIn10Ms;
+			/* exit encoder loop if the encoder returned a bitstream */
+			if(stream_len != 0) break;
+		}
+
+        /* read next bottleneck rate */
+        if(f_bn != NULL)
+        {
+            if(fscanf(f_bn, "%d", &bottleneck) == EOF)
+            {
+                /* Set pointer to beginning of file */
+                fseek(f_bn, 0L, SEEK_SET);
+                if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+                    exit(0);
+                }
+            }
+            if(CodingMode == 1)
+            {
+                WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+            }
+        }
+
+        length_file += cur_framesmpls;
+        if(cur_framesmpls == (3 * samplesIn10Ms))
+        {
+            maxStreamLen30 = (stream_len > maxStreamLen30)? stream_len:maxStreamLen30;
+        }
+        else
+        {
+            maxStreamLen60 = (stream_len > maxStreamLen60)? stream_len:maxStreamLen60;
+        }
+
+        if(!lostFrame)
+        {
+            lostFrame = ((rand()%100) < packetLossPercent);
+        }
+        else
+        {
+            lostFrame = 0;
+        }
+
+        // RED.
+        if(lostFrame)
+        {
+            stream_len = WebRtcIsac_GetRedPayload(ISAC_main_inst,
+                (WebRtc_Word16*)streamdata);
+
+            if(doTransCoding)
+            {
+                streamLenTransCoding = WebRtcIsac_GetNewBitStream(
+                    ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
+                    (WebRtc_Word16*)streamDataTransCoding, true);
+                if(streamLenTransCoding < 0)
+                {
+                    fprintf(stderr, "Error in RED trans-coding\n");
+                    exit(0);
+                }
+            }
+        }
+
+        /* make coded sequence to short be inreasing */
+		/* the length the decoder expects */
+		if(testNum == 4)
+        {
+			stream_len += 10;
+		}
+
+		/* make coded sequence to long be decreasing */
+		/* the length the decoder expects */
+		if(testNum == 5)
+        {
+			stream_len -= 10;
+		}
+
+        if(testNum == 6)
+        {
+			srand((unsigned int)time(NULL));
+            for(i = 0; i < stream_len; i++)
+            {
+				streamdata[i] = rand();
+            }
+		}
+
+        if(VADusage){
+            readframe(vaddata, vadp, samplesIn10Ms*3);
+        }
+
+		/* simulate packet handling through NetEq and the modem */
+		if(!(testNum == 3 && framecnt == 0))
+        {
+            get_arrival_time(cur_framesmpls, stream_len, bottleneck, &BN_data,
+                sampFreqKHz*1000, sampFreqKHz*1000);
+        }
+
+		if(VADusage && (framecnt>10 && vaddata[0]==0))
+        {
+			BN_data.rtp_number--;
+		}
+        else
+        {
+            /* Error test number 10, garbage data */
+            if(testNum == 10)
+            {
+                /* Test to run decoder with garbage data */
+                for(i = 0; i < stream_len; i++)
+                {
+                    streamdata[i] = (short) (streamdata[i]) + (short) rand();
+                }
+            }
+
+            if(testNum != 9)
+            {
+                err = WebRtcIsac_UpdateBwEstimate(ISAC_main_inst, streamdata,
+                    stream_len, BN_data.rtp_number, BN_data.sample_count,
+                    BN_data.arrival_time);
+
+                if(err < 0)
+                {
+                    /* exit if returned with error */
+                    errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                    printf("Error: in decoder: %d.", errtype);
+                    cout << flush;
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                }
+            }
+
+            /* Call getFramelen, only used here for function test */
+            err = WebRtcIsac_ReadFrameLen(ISAC_main_inst,
+                (WebRtc_Word16*)streamdata, &FL);
+            if(err < 0)
+            {
+                /* exit if returned with error */
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in getFrameLen %d.", errtype);
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+            }
+
+            // iSAC decoding
+
+            if(lostFrame)
+            {
+                declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, streamdata,
+                    stream_len, decoded, speechType);
+
+                if(doTransCoding)
+                {
+                    declenTC = WebRtcIsac_DecodeRcu(decoderTransCoding,
+                        streamDataTransCoding, streamLenTransCoding,
+                        decodedTC, speechType);
+                }
+            }
+            else
+            {
+                declen = WebRtcIsac_Decode(ISAC_main_inst, streamdata,
+                    stream_len, decoded, speechType);
+
+                if(doTransCoding)
+                {
+                    declenTC = WebRtcIsac_Decode(decoderTransCoding,
+                        streamDataTransCoding, streamLenTransCoding,
+                        decodedTC, speechType);
+                }
+            }
+
+            if(declen < 0)
+            {
+                /* exit if returned with error */
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in decoder %d.", errtype);
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+            }
+
+            if(declenTC < 0)
+            {
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in decoding the transcoded stream");
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+
+            }
+        }
+        /* Write decoded speech frame to file */
+        if((declen > 0) && (numFileLoop == 0))
+        {
+            fwrite(decoded, sizeof(WebRtc_Word16), declen, outp);
+        }
+
+        if((declenTC > 0) && (numFileLoop == 0))
+        {
+            fwrite(decodedTC, sizeof(WebRtc_Word16), declen, transCodingFile);
+        }
+
+
+		fprintf(stderr, "\rframe = %5d  ", framecnt);
+        fflush(stderr);
+		framecnt++;
+
+        /* Error test number 10, garbage data */
+        //if(testNum == 10)
+        //{
+        //    /* Test to run decoder with garbage data */
+        //    if( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL )
+        //    {
+        //        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+        //    }
+        //    else
+        //    {
+        //        fprintf(seedfile, "ok\n\n");
+        //        fclose(seedfile);
+        //    }
+        //}
+        /* Error test number 10, garbage data */
+        //if(testNum == 10)
+        //{
+        //    /* Test to run decoder with garbage data */
+        //    for ( i = 0; i < stream_len; i++)
+        //    {
+        //        streamdata[i] = (short) (streamdata[i] + (short) rand());
+        //    }
+        //}
+
+
+		totalsmpls += declen;
+		totalbits += 8 * stream_len;
+#ifdef _DEBUG
+        kbps = ((double) sampFreqKHz * 1000.) / ((double) cur_framesmpls) * 8.0 * stream_len / 1000.0;// kbits/s
+		fy = fopen("bit_rate.dat", "a");
+		fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
+		fclose(fy);
+
+#endif /* _DEBUG */
+
+	}
+	printf("\n");
+	printf("total bits               = %d bits\n", totalbits);
+	printf("measured average bitrate = %0.3f kbits/s\n",
+        (double)totalbits *(sampFreqKHz) / totalsmpls);
+    if(doTransCoding)
+    {
+        printf("Transcoding average bit-rate = %0.3f kbps\n",
+            (double)numTransCodingBytes * 8.0 *(sampFreqKHz) / totalsmpls);
+        fclose(transCodingFile);
+    }
+	printf("\n");
+
+	/* Runtime statistics */
+	runtime = (double)(clock()/(double)CLOCKS_PER_SEC-starttime);
+	length_file = length_file /(sampFreqKHz * 1000.);
+
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+	printf("Time to run iSAC:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+
+    if(maxStreamLen30 != 0)
+    {
+        printf("Maximum payload size 30ms Frames %d bytes (%0.3f kbps)\n",
+            maxStreamLen30,
+            maxStreamLen30 * 8 / 30.);
+    }
+    if(maxStreamLen60 != 0)
+    {
+        printf("Maximum payload size 60ms Frames %d bytes (%0.3f kbps)\n",
+            maxStreamLen60,
+            maxStreamLen60 * 8 / 60.);
+    }
+    //fprintf(stderr, "\n");
+
+	fprintf(stderr, "   %.1f s", length_file);
+    fprintf(stderr, "   %0.1f kbps", (double)totalbits *(sampFreqKHz) / totalsmpls);
+    if(maxStreamLen30 != 0)
+    {
+        fprintf(stderr, "   plmax-30ms %d bytes (%0.0f kbps)",
+            maxStreamLen30,
+            maxStreamLen30 * 8 / 30.);
+    }
+    if(maxStreamLen60 != 0)
+    {
+        fprintf(stderr, "   plmax-60ms %d bytes (%0.0f kbps)",
+            maxStreamLen60,
+            maxStreamLen60 * 8 / 60.);
+    }
+    if(doTransCoding)
+    {
+        fprintf(stderr, "  transcoding rate %.0f kbps",
+            (double)numTransCodingBytes * 8.0 *(sampFreqKHz) / totalsmpls);
+    }
+
+    fclose(inp);
+	fclose(outp);
+	WebRtcIsac_Free(ISAC_main_inst);
+
+
+	exit(0);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/SwitchingSampRate/SwitchingSampRate.cc b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/SwitchingSampRate/SwitchingSampRate.cc
new file mode 100644
index 0000000..e0eed12
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/SwitchingSampRate/SwitchingSampRate.cc
@@ -0,0 +1,443 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// SwitchingSampRate.cpp : Defines the entry point for the console
+// application.
+//
+
+#include <iostream>
+#include "isac.h"
+#include "utility.h"
+#include "signal_processing_library.h"
+
+#define MAX_FILE_NAME  500
+#define MAX_NUM_CLIENTS 2
+
+
+#define NUM_CLIENTS 2
+
+using namespace std;
+
+int main(int argc, char* argv[])
+{
+  char fileNameWB[MAX_FILE_NAME];
+  char fileNameSWB[MAX_FILE_NAME];
+
+  char outFileName[MAX_NUM_CLIENTS][MAX_FILE_NAME];
+
+  FILE* inFile[MAX_NUM_CLIENTS];
+  FILE* outFile[MAX_NUM_CLIENTS];
+
+  ISACStruct* codecInstance[MAX_NUM_CLIENTS];
+  WebRtc_Word32 resamplerState[MAX_NUM_CLIENTS][8];
+
+  enum IsacSamplingRate encoderSampRate[MAX_NUM_CLIENTS];
+
+  int minBn = 16000;
+  int maxBn = 56000;
+
+  int bnWB = 32000;
+  int bnSWB = 56000;
+
+  strcpy(outFileName[0], "switchSampRate_out1.pcm");
+  strcpy(outFileName[1], "switchSampRate_out2.pcm");
+
+  short clientCntr;
+
+  unsigned int lenEncodedInBytes[MAX_NUM_CLIENTS];
+  unsigned int lenAudioIn10ms[MAX_NUM_CLIENTS];
+  unsigned int lenEncodedInBytesTmp[MAX_NUM_CLIENTS];
+  unsigned int lenAudioIn10msTmp[MAX_NUM_CLIENTS];
+  BottleNeckModel* packetData[MAX_NUM_CLIENTS];
+
+  char versionNumber[100];
+  short samplesIn10ms[MAX_NUM_CLIENTS];
+  int bottleneck[MAX_NUM_CLIENTS];
+
+  printf("\n\n");
+  printf("____________________________________________\n\n");
+  WebRtcIsac_version(versionNumber);
+  printf("    iSAC-swb version %s\n", versionNumber);
+  printf("____________________________________________\n");
+
+
+  fileNameWB[0]  = '\0';
+  fileNameSWB[0] = '\0';
+
+  char myFlag[20];
+  strcpy(myFlag, "-wb");
+  // READ THE WIDEBAND AND SUPER-WIDEBAND FILE NAMES
+  if(readParamString(argc, argv, myFlag, fileNameWB, MAX_FILE_NAME) <= 0)
+  {
+    printf("No wideband file is specified");
+  }
+
+  strcpy(myFlag, "-swb");
+  if(readParamString(argc, argv, myFlag, fileNameSWB, MAX_FILE_NAME) <= 0)
+  {
+    printf("No super-wideband file is specified");
+  }
+
+  // THE FIRST CLIENT STARTS IN WIDEBAND
+  encoderSampRate[0] = kIsacWideband;
+  OPEN_FILE_RB(inFile[0], fileNameWB);
+
+  // THE SECOND CLIENT STARTS IN SUPER-WIDEBAND
+  encoderSampRate[1] = kIsacSuperWideband;
+  OPEN_FILE_RB(inFile[1], fileNameSWB);
+
+  strcpy(myFlag, "-I");
+  short codingMode = readSwitch(argc, argv, myFlag);
+
+  for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+  {
+    codecInstance[clientCntr] = NULL;
+
+    printf("\n");
+    printf("Client %d\n", clientCntr + 1);
+    printf("---------\n");
+    printf("Starting %s",
+           (encoderSampRate[clientCntr] == kIsacWideband)
+           ? "wideband":"super-wideband");
+
+    // Open output File Name
+    OPEN_FILE_WB(outFile[clientCntr], outFileName[clientCntr]);
+    printf("Output File...................... %s\n", outFileName[clientCntr]);
+
+    samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10;
+
+    if(codingMode == 1)
+    {
+      bottleneck[clientCntr] = (clientCntr)? bnSWB:bnWB;
+    }
+    else
+    {
+      bottleneck[clientCntr] = (clientCntr)? minBn:maxBn;
+    }
+
+    printf("Bottleneck....................... %0.3f kbits/sec \n",
+           bottleneck[clientCntr] / 1000.0);
+
+    // coding-mode
+    printf("Encoding Mode.................... %s\n",
+           (codingMode == 1)? "Channel-Independent (Instantaneous)":"Adaptive");
+
+    lenEncodedInBytes[clientCntr] = 0;
+    lenAudioIn10ms[clientCntr] = 0;
+    lenEncodedInBytesTmp[clientCntr] = 0;
+    lenAudioIn10msTmp[clientCntr] = 0;
+
+    packetData[clientCntr] = (BottleNeckModel*)new(BottleNeckModel);
+    if(packetData[clientCntr] == NULL)
+    {
+      printf("Could not allocate memory for packetData \n");
+      return -1;
+    }
+    memset(packetData[clientCntr], 0, sizeof(BottleNeckModel));
+    memset(resamplerState[clientCntr], 0, sizeof(WebRtc_Word32) * 8);
+  }
+
+  for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+  {
+    // Create
+    if(WebRtcIsac_Create(&codecInstance[clientCntr]))
+    {
+      printf("Could not creat client %d\n", clientCntr + 1);
+      return -1;
+    }
+
+    WebRtcIsac_SetEncSampRate(codecInstance[clientCntr], encoderSampRate[clientCntr]);
+
+    WebRtcIsac_SetDecSampRate(codecInstance[clientCntr],
+                              encoderSampRate[clientCntr + (1 - ((clientCntr & 1)<<1))]);
+
+    // Initialize Encoder
+    if(WebRtcIsac_EncoderInit(codecInstance[clientCntr],
+                              codingMode) < 0)
+    {
+      printf("Could not initialize client, %d\n", clientCntr + 1);
+      return -1;
+    }
+
+    // Initialize Decoder
+    if(WebRtcIsac_DecoderInit(codecInstance[clientCntr]) < 0)
+    {
+      printf("Could not initialize decoder of client %d\n",
+             clientCntr + 1);
+      return -1;
+    }
+
+    // setup Rate if in Instantaneous mode
+    if(codingMode != 0)
+    {
+      // ONLY Clients who are not in Adaptive mode
+      if(WebRtcIsac_Control(codecInstance[clientCntr],
+                            bottleneck[clientCntr], 30) < 0)
+      {
+        printf("Could not setup bottleneck and frame-size for client %d\n",
+               clientCntr + 1);
+        return -1;
+      }
+    }
+  }
+
+
+  short streamLen;
+  short numSamplesRead;
+  short lenDecodedAudio;
+  short senderIdx;
+  short receiverIdx;
+
+  printf("\n");
+  short num10ms[MAX_NUM_CLIENTS];
+  memset(num10ms, 0, sizeof(short)*MAX_NUM_CLIENTS);
+  FILE* arrivalTimeFile1 = fopen("arrivalTime1.dat", "wb");
+  FILE* arrivalTimeFile2 = fopen("arrivalTime2.dat", "wb");
+  short numPrint[MAX_NUM_CLIENTS];
+  memset(numPrint, 0, sizeof(short) * MAX_NUM_CLIENTS);
+
+  // Audio Buffers
+  short silence10ms[10 * 32];
+  memset(silence10ms, 0, 320 * sizeof(short));
+  short audioBuff10ms[10 * 32];
+  short audioBuff60ms[60 * 32];
+  short resampledAudio60ms[60 * 32];
+
+  unsigned short bitStream[600+600];
+  short speechType[1];
+
+  short numSampFreqChanged = 0;
+  while(numSampFreqChanged < 10)
+  {
+    for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+    {
+      // Encoding/decoding for this pair of clients, if there is
+      // audio for any of them
+      //if(audioLeft[clientCntr] || audioLeft[clientCntr + 1])
+      //{
+      //for(pairCntr = 0; pairCntr < 2; pairCntr++)
+      //{
+      senderIdx = clientCntr; // + pairCntr;
+      receiverIdx = 1 - clientCntr;//  + (1 - pairCntr);
+
+      //if(num10ms[senderIdx] > 6)
+      //{
+      //    printf("Too many frames read for client %d",
+      //        senderIdx + 1);
+      //    return -1;
+      //}
+
+      numSamplesRead = (short)fread(audioBuff10ms, sizeof(short),
+                                    samplesIn10ms[senderIdx], inFile[senderIdx]);
+      if(numSamplesRead != samplesIn10ms[senderIdx])
+      {
+        // file finished switch encoder sampling frequency.
+        printf("Changing Encoder Sampling frequency in client %d to ", senderIdx+1);
+        fclose(inFile[senderIdx]);
+        numSampFreqChanged++;
+        if(encoderSampRate[senderIdx] == kIsacWideband)
+        {
+          printf("super-wideband.\n");
+          OPEN_FILE_RB(inFile[senderIdx], fileNameSWB);
+          encoderSampRate[senderIdx] = kIsacSuperWideband;
+        }
+        else
+        {
+          printf("wideband.\n");
+          OPEN_FILE_RB(inFile[senderIdx], fileNameWB);
+          encoderSampRate[senderIdx] = kIsacWideband;
+        }
+        WebRtcIsac_SetEncSampRate(codecInstance[senderIdx], encoderSampRate[senderIdx]);
+        WebRtcIsac_SetDecSampRate(codecInstance[receiverIdx], encoderSampRate[senderIdx]);
+
+        samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10;
+
+        numSamplesRead = (short)fread(audioBuff10ms, sizeof(short),
+                                      samplesIn10ms[senderIdx], inFile[senderIdx]);
+        if(numSamplesRead != samplesIn10ms[senderIdx])
+        {
+          printf(" File %s for client %d has not enough audio\n",
+                 (encoderSampRate[senderIdx]==kIsacWideband)? "wideband":"super-wideband",
+                 senderIdx + 1);
+          return -1;
+        }
+      }
+      num10ms[senderIdx]++;
+
+      // sanity check
+      //if(num10ms[senderIdx] > 6)
+      //{
+      //    printf("Client %d has got more than 60 ms audio and encoded no packet.\n",
+      //        senderIdx);
+      //    return -1;
+      //}
+
+      // Encode
+
+
+      streamLen = WebRtcIsac_Encode(codecInstance[senderIdx],
+                                    audioBuff10ms, (short*)bitStream);
+      WebRtc_Word16 ggg;
+      if (streamLen > 0) {
+        if((  WebRtcIsac_ReadFrameLen(codecInstance[receiverIdx],
+                                      (short *) bitStream, &ggg))<0)
+          printf("ERROR\n");
+      }
+
+      // Sanity check
+      if(streamLen < 0)
+      {
+        printf(" Encoder error in client %d \n", senderIdx + 1);
+        return -1;
+      }
+
+
+      if(streamLen > 0)
+      {
+        // Packet generated; model sending through a channel, do bandwidth
+        // estimation at the receiver and decode.
+        lenEncodedInBytes[senderIdx] += streamLen;
+        lenAudioIn10ms[senderIdx] += (unsigned int)num10ms[senderIdx];
+        lenEncodedInBytesTmp[senderIdx] += streamLen;
+        lenAudioIn10msTmp[senderIdx] += (unsigned int)num10ms[senderIdx];
+
+        // Print after ~5 sec.
+        if(lenAudioIn10msTmp[senderIdx] >= 100)
+        {
+          numPrint[senderIdx]++;
+          printf("  %d,  %6.3f => %6.3f ", senderIdx+1,
+                 bottleneck[senderIdx] / 1000.0,
+                 lenEncodedInBytesTmp[senderIdx] * 0.8 /
+                 lenAudioIn10msTmp[senderIdx]);
+
+          if(codingMode == 0)
+          {
+            WebRtc_Word32 bn;
+            WebRtcIsac_GetUplinkBw(codecInstance[senderIdx], &bn);
+            printf("[%d] ", bn);
+          }
+          //WebRtc_Word16 rateIndexLB;
+          //WebRtc_Word16 rateIndexUB;
+          //WebRtcIsac_GetDownLinkBwIndex(codecInstance[receiverIdx],
+          //    &rateIndexLB, &rateIndexUB);
+          //printf(" (%2d, %2d) ", rateIndexLB, rateIndexUB);
+
+          cout << flush;
+          lenEncodedInBytesTmp[senderIdx] = 0;
+          lenAudioIn10msTmp[senderIdx]    = 0;
+          //if(senderIdx == (NUM_CLIENTS - 1))
+          //{
+          printf("  %0.1f \n", lenAudioIn10ms[senderIdx] * 10. /1000);
+          //}
+
+          // After ~20 sec change the bottleneck.
+          //    if((numPrint[senderIdx] == 4) && (codingMode == 0))
+          //    {
+          //        numPrint[senderIdx] = 0;
+          //        if(codingMode == 0)
+          //        {
+          //            int newBottleneck = bottleneck[senderIdx] +
+          //                (bottleneckChange[senderIdx] * 1000);
+
+          //            if(bottleneckChange[senderIdx] > 0)
+          //            {
+          //                if(newBottleneck >maxBn)
+          //                {
+          //                    bottleneckChange[senderIdx] = -1;
+          //                    newBottleneck = bottleneck[senderIdx] +
+          //                        (bottleneckChange[senderIdx] * 1000);
+          //                    if(newBottleneck > minBn)
+          //                    {
+          //                        bottleneck[senderIdx] = newBottleneck;
+          //                    }
+          //                }
+          //                else
+          //                {
+          //                    bottleneck[senderIdx] = newBottleneck;
+          //                }
+          //            }
+          //            else
+          //            {
+          //                if(newBottleneck < minBn)
+          //                {
+          //                    bottleneckChange[senderIdx] = 1;
+          //                    newBottleneck = bottleneck[senderIdx] +
+          //                        (bottleneckChange[senderIdx] * 1000);
+          //                    if(newBottleneck < maxBn)
+          //                    {
+          //                        bottleneck[senderIdx] = newBottleneck;
+          //                    }
+          //                }
+          //                else
+          //                {
+          //                    bottleneck[senderIdx] = newBottleneck;
+          //                }
+          //            }
+          //        }
+          //    }
+        }
+
+        // model a channel of given bottleneck, to get the receive timestamp
+        get_arrival_time(num10ms[senderIdx] * samplesIn10ms[senderIdx],
+                         streamLen, bottleneck[senderIdx], packetData[senderIdx],
+                         encoderSampRate[senderIdx]*1000, encoderSampRate[senderIdx]*1000);
+
+        // Write the arrival time.
+        if(senderIdx == 0)
+        {
+          fwrite(&(packetData[senderIdx]->arrival_time), sizeof(unsigned int), 1,
+                 arrivalTimeFile1);
+        }
+        else
+        {
+          fwrite(&(packetData[senderIdx]->arrival_time), sizeof(unsigned int), 1,
+                 arrivalTimeFile2);
+        }
+
+        // BWE
+        if(WebRtcIsac_UpdateBwEstimate(codecInstance[receiverIdx],
+                                       bitStream,  streamLen, packetData[senderIdx]->rtp_number,
+                                       packetData[senderIdx]->sample_count,
+                                       packetData[senderIdx]->arrival_time) < 0)
+        {
+          printf(" BWE Error at client %d \n", receiverIdx + 1);
+          return -1;
+        }
+        /**/
+        // Decode
+        lenDecodedAudio = WebRtcIsac_Decode(
+            codecInstance[receiverIdx], bitStream, streamLen,
+            audioBuff60ms, speechType);
+        if(lenDecodedAudio < 0)
+        {
+          printf(" Decoder error in client %d \n", receiverIdx + 1);
+          return -1;
+        }
+
+
+        if(encoderSampRate[senderIdx] == kIsacWideband)
+        {
+          WebRtcSpl_UpsampleBy2(audioBuff60ms, lenDecodedAudio, resampledAudio60ms,
+                                resamplerState[receiverIdx]);
+          fwrite(resampledAudio60ms, sizeof(short), lenDecodedAudio << 1,
+                 outFile[receiverIdx]);
+        }
+        else
+        {
+          fwrite(audioBuff60ms, sizeof(short), lenDecodedAudio,
+                 outFile[receiverIdx]);
+        }
+        num10ms[senderIdx] = 0;
+      }
+      //}
+      //}
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/debugUtility.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/debugUtility.h
new file mode 100644
index 0000000..d708ad1
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/debugUtility.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_TEST_DEBUGUTILITY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_TEST_DEBUGUTILITY_H_
+
+#include <stdio.h>
+#include <string.h>
+#include "utility.h"
+
+typedef struct 
+{
+    FILE*  res0to4FilePtr;
+    FILE*  res4to8FilePtr;
+    FILE*  res8to12FilePtr;
+    FILE*  res8to16FilePtr;
+
+    FILE*  res0to4DecFilePtr;
+    FILE*  res4to8DecFilePtr;
+    FILE*  res8to12DecFilePtr;
+    FILE*  res8to16DecFilePtr;
+
+    FILE*  in0to4FilePtr;
+    FILE*  in4to8FilePtr;
+    FILE*  in8to12FilePtr;
+    FILE*  in8to16FilePtr;
+
+    FILE*  out0to4FilePtr;
+    FILE*  out4to8FilePtr;
+    FILE*  out8to12FilePtr;
+    FILE*  out8to16FilePtr;
+
+    FILE*  fftFilePtr;
+    FILE*  fftDecFilePtr;
+
+    FILE*  arrivalTime;
+    
+    float  lastArrivalTime;
+
+    int    prevPacketLost;
+    int    currPacketLost;
+    int    nextPacketLost;
+
+    //double residualSignal4kHZ[240];
+    int    packetLossPercent;
+
+    int maxPayloadLB;
+    int maxPayloadUB;
+    int lbBytes;
+    int ubBytes;
+    
+
+}debugStruct;
+
+
+#define PRINT_ENTROPY_INFO(obj)                                         \
+    do                                                                  \
+    {                                                                   \
+        printf("%10u, %u; ",                                            \
+            obj->bitstr_obj.streamval, obj->bitstr_obj.stream_index);   \
+    } while(0)  
+
+int setupDebugStruct(debugStruct* str);
+
+#endif
\ No newline at end of file
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/test/simpleKenny.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/simpleKenny.c
new file mode 100644
index 0000000..d92ab89
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/test/simpleKenny.c
@@ -0,0 +1,634 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* kenny.c  - Main function for the iSAC coder */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#ifdef WIN32
+#include "windows.h"
+#define CLOCKS_PER_SEC  1000
+#endif
+
+#include <ctype.h>
+#include <math.h>
+
+/* include API */
+#include "isac.h"
+#include "utility.h"
+//#include "commonDefs.h"
+
+/* max number of samples per frame (= 60 ms frame) */
+#define MAX_FRAMESAMPLES_SWB                1920
+/* number of samples per 10ms frame */
+#define FRAMESAMPLES_SWB_10ms               320
+#define FRAMESAMPLES_WB_10ms                160
+
+/* sampling frequency (Hz) */
+#define FS_SWB                               32000
+#define FS_WB                                16000
+
+//#define CHANGE_OUTPUT_NAME
+
+#ifdef HAVE_DEBUG_INFO
+    #include "debugUtility.h"
+    debugStruct debugInfo;
+#endif
+
+unsigned long framecnt = 0;
+
+int main(int argc, char* argv[])
+{
+    //--- File IO ----
+    FILE* inp;
+    FILE* outp;
+    char inname[500];
+    char outname[500];
+
+    /* Runtime statistics */
+    double        rate;
+    double        rateRCU;
+    unsigned long totalbits = 0;
+    unsigned long totalBitsRCU = 0;
+    unsigned long totalsmpls =0;
+
+    WebRtc_Word32   bottleneck = 39;
+    WebRtc_Word16   frameSize = 30;           /* ms */
+    WebRtc_Word16   codingMode = 1;
+    WebRtc_Word16   shortdata[FRAMESAMPLES_SWB_10ms];
+    WebRtc_Word16   decoded[MAX_FRAMESAMPLES_SWB];
+    //WebRtc_UWord16  streamdata[1000];
+    WebRtc_Word16   speechType[1];
+    WebRtc_Word16   payloadLimit;
+    WebRtc_Word32   rateLimit;
+    ISACStruct*   ISAC_main_inst;
+
+    WebRtc_Word16   stream_len = 0;
+    WebRtc_Word16   declen;
+    WebRtc_Word16   err;
+    WebRtc_Word16   cur_framesmpls;
+    int           endfile;
+#ifdef WIN32
+    double        length_file;
+    double        runtime;
+    char          outDrive[10];
+    char          outPath[500];
+    char          outPrefix[500];
+    char          outSuffix[500];
+    char          bitrateFileName[500];
+    FILE*         bitrateFile;
+    double        starttime;
+    double        rateLB = 0;
+    double        rateUB = 0;
+#endif
+    FILE*         histFile;
+    FILE*         averageFile;
+    int           sampFreqKHz;
+    int           samplesIn10Ms;
+    WebRtc_Word16   maxStreamLen = 0;
+    char          histFileName[500];
+    char          averageFileName[500];
+    unsigned int  hist[600];
+    unsigned int  tmpSumStreamLen = 0;
+    unsigned int  packetCntr = 0;
+    unsigned int  lostPacketCntr = 0;
+    WebRtc_UWord16  payload[600];
+    WebRtc_UWord16  payloadRCU[600];
+    WebRtc_UWord16  packetLossPercent = 0;
+    WebRtc_Word16   rcuStreamLen = 0;
+	int onlyEncode;
+	int onlyDecode;
+
+
+    BottleNeckModel packetData;
+	packetData.arrival_time  = 0;
+	packetData.sample_count  = 0;
+	packetData.rtp_number    = 0;
+    memset(hist, 0, sizeof(hist));
+
+    /* handling wrong input arguments in the command line */
+    if(argc < 5)
+    {
+		int size;
+		WebRtcIsac_AssignSize(&size);
+
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("Usage:\n\n");
+        printf("%s infile outfile -bn bottelneck [options] \n\n", argv[0]);
+        printf("with:\n");
+        printf("-I................... indicates encoding in instantaneous mode.\n");
+        printf("-bn bottleneck....... the value of the bottleneck in bit/sec, e.g. 39742,\n");
+		printf("                      in instantaneous (channel-independent) mode.\n\n");
+        printf("infile............... Normal speech input file\n\n");
+        printf("outfile.............. Speech output file\n\n");
+        printf("OPTIONS\n");
+        printf("-------\n");
+        printf("-fs sampFreq......... sampling frequency of codec 16 or 32 (default) kHz.\n");
+        printf("-plim payloadLim..... payload limit in bytes,\n");
+        printf("                      default is the maximum possible.\n");
+        printf("-rlim rateLim........ rate limit in bits/sec, \n");
+        printf("                      default is the maimum possible.\n");
+        printf("-h file.............. record histogram and *append* to 'file'.\n");
+        printf("-ave file............ record average rate of 3 sec intervales and *append* to 'file'.\n");
+        printf("-ploss............... packet-loss percentage.\n");
+		printf("-enc................. do only encoding and store the bit-stream\n");
+		printf("-dec................. the input file is a bit-stream, decode it.\n");
+
+        printf("\n");
+        printf("Example usage:\n\n");
+        printf("%s speechIn.pcm speechOut.pcm -B 40000 -fs 32 \n\n", argv[0]);
+
+		printf("structure size %d bytes\n", size);
+
+        exit(0);
+    }
+
+
+
+    /* Get Bottleneck value */
+    bottleneck = readParamInt(argc, argv, "-bn", 50000);
+    fprintf(stderr,"\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+
+    /* Get Input and Output files */
+    sscanf(argv[1], "%s", inname);
+    sscanf(argv[2], "%s", outname);
+    codingMode = readSwitch(argc, argv, "-I");
+    sampFreqKHz = (WebRtc_Word16)readParamInt(argc, argv, "-fs", 32);
+    if(readParamString(argc, argv, "-h", histFileName, 500) > 0)
+    {
+        histFile = fopen(histFileName, "a");
+        if(histFile == NULL)
+        {
+            printf("cannot open hist file %s", histFileName);
+            exit(0);
+        }
+    }
+    else
+    {
+        // NO recording of hitstogram
+        histFile = NULL;
+    }
+
+
+    packetLossPercent = readParamInt(argc, argv, "-ploss", 0);
+
+    if(readParamString(argc, argv, "-ave", averageFileName, 500) > 0)
+    {
+        averageFile = fopen(averageFileName, "a");
+        if(averageFile == NULL)
+        {
+            printf("cannot open file to write rate %s", averageFileName);
+            exit(0);
+        }
+    }
+    else
+    {
+        averageFile = NULL;
+    }
+
+	onlyEncode = readSwitch(argc, argv, "-enc");
+	onlyDecode = readSwitch(argc, argv, "-dec");
+
+
+    switch(sampFreqKHz)
+    {
+    case 16:
+        {
+            samplesIn10Ms = 160;
+            break;
+        }
+    case 32:
+        {
+            samplesIn10Ms = 320;
+            break;
+        }
+    default:
+        printf("A sampling frequency of %d kHz is not supported,\
+valid values are 8 and 16.\n", sampFreqKHz);
+        exit(-1);
+    }
+    payloadLimit = (WebRtc_Word16)readParamInt(argc, argv, "-plim", 400);
+    rateLimit = readParamInt(argc, argv, "-rlim", 106800);
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  iSAC: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  iSAC: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+
+#ifdef WIN32
+    _splitpath(outname, outDrive, outPath, outPrefix, outSuffix);
+    _makepath(bitrateFileName, outDrive, outPath, "bitrate", ".txt");
+
+    bitrateFile = fopen(bitrateFileName, "a");
+    fprintf(bitrateFile, "%  %%s  \n", inname);
+#endif
+
+    printf("\n");
+    printf("Input.................... %s\n", inname);
+    printf("Output................... %s\n", outname);
+    printf("Encoding Mode............ %s\n",
+        (codingMode == 1)? "Channel-Independent":"Channel-Adaptive");
+    printf("Bottleneck............... %d bits/sec\n", bottleneck);
+    printf("Packet-loss Percentage... %d\n", packetLossPercent);
+    printf("\n");
+
+#ifdef WIN32
+    starttime = clock()/(double)CLOCKS_PER_SEC; /* Runtime statistics */
+#endif
+
+    /* Initialize the ISAC and BN structs */
+    err = WebRtcIsac_Create(&ISAC_main_inst);
+
+    WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband: kIsacSuperWideband);
+    WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband: kIsacSuperWideband);
+    /* Error check */
+    if (err < 0) {
+        fprintf(stderr,"\n\n Error in create.\n\n");
+        exit(EXIT_FAILURE);
+    }
+
+    framecnt = 0;
+    endfile     = 0;
+
+    /* Initialize encoder and decoder */
+    if(WebRtcIsac_EncoderInit(ISAC_main_inst, codingMode) < 0)
+    {
+        printf("cannot initialize encoder\n");
+        return -1;
+    }
+    if(WebRtcIsac_DecoderInit(ISAC_main_inst) < 0)
+    {
+        printf("cannot initialize decoder\n");
+        return -1;
+    }
+
+    //{
+    //    WebRtc_Word32 b1, b2;
+    //    FILE* fileID = fopen("GetBNTest.txt", "w");
+    //    b2 = 32100;
+    //    while(b2 <= 52000)
+    //    {
+    //        WebRtcIsac_Control(ISAC_main_inst, b2, frameSize);
+    //        WebRtcIsac_GetUplinkBw(ISAC_main_inst, &b1);
+    //        fprintf(fileID, "%5d %5d\n", b2, b1);
+    //        b2 += 10;
+    //    }
+    //}
+
+    if(codingMode == 1)
+    {
+        if(WebRtcIsac_Control(ISAC_main_inst, bottleneck, frameSize) < 0)
+        {
+            printf("cannot set bottleneck\n");
+            return -1;
+        }
+    }
+    else
+    {
+        if(WebRtcIsac_ControlBwe(ISAC_main_inst, 15000, 30, 1) < 0)
+        {
+            printf("cannot configure BWE\n");
+            return -1;
+        }
+    }
+
+    if(WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadLimit) < 0)
+    {
+        printf("cannot set maximum payload size %d.\n", payloadLimit);
+        return -1;
+    }
+
+    if (rateLimit < 106800) {
+        if(WebRtcIsac_SetMaxRate(ISAC_main_inst, rateLimit) < 0)
+        {
+            printf("cannot set the maximum rate %d.\n", rateLimit);
+            return -1;
+        }
+    }
+
+    //=====================================
+//#ifdef HAVE_DEBUG_INFO
+//    if(setupDebugStruct(&debugInfo) < 0)
+//    {
+//        exit(1);
+//    }
+//#endif
+
+    while (endfile == 0)
+    {
+        fprintf(stderr,"  \rframe = %7li", framecnt);
+
+        //============== Readind from the file and encoding =================
+        cur_framesmpls = 0;
+        stream_len = 0;
+
+
+		if(onlyDecode)
+		{
+			WebRtc_UWord8 auxUW8;
+                        size_t auxSizet;
+			if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+			{
+				break;
+			}
+			stream_len = ((WebRtc_UWord8)auxUW8) << 8;
+			if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+			{
+				break;
+			}
+			stream_len |= (WebRtc_UWord16)auxUW8;
+                        auxSizet = (size_t)stream_len;
+                        if(fread(payload, 1, auxSizet, inp) < auxSizet)
+			{
+				printf("last payload is corrupted\n");
+				break;
+			}
+		}
+		else
+		{
+			while(stream_len == 0)
+			{
+				// Read 10 ms speech block
+				endfile = readframe(shortdata, inp, samplesIn10Ms);
+				if(endfile)
+				{
+					break;
+				}
+				cur_framesmpls += samplesIn10Ms;
+
+				//-------- iSAC encoding ---------
+				stream_len = WebRtcIsac_Encode(ISAC_main_inst, shortdata,
+					(WebRtc_Word16*)payload);
+
+				if(stream_len < 0)
+				{
+					// exit if returned with error
+					//errType=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+					fprintf(stderr,"\nError in encoder\n");
+					getchar();
+					exit(EXIT_FAILURE);
+				}
+
+
+			}
+			//===================================================================
+			if(endfile)
+			{
+				break;
+			}
+
+			rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, (WebRtc_Word16*)payloadRCU);
+
+			get_arrival_time(cur_framesmpls, stream_len, bottleneck, &packetData,
+				sampFreqKHz * 1000, sampFreqKHz * 1000);
+			if(WebRtcIsac_UpdateBwEstimate(ISAC_main_inst,
+				payload,  stream_len, packetData.rtp_number,
+				packetData.sample_count,
+				packetData.arrival_time) < 0)
+			{
+				printf(" BWE Error at client\n");
+				return -1;
+			}
+		}
+
+        if(endfile)
+        {
+            break;
+        }
+
+        maxStreamLen = (stream_len > maxStreamLen)? stream_len:maxStreamLen;
+        packetCntr++;
+
+        hist[stream_len]++;
+        if(averageFile != NULL)
+        {
+            tmpSumStreamLen += stream_len;
+            if(packetCntr == 100)
+            {
+                // kbps
+                fprintf(averageFile, "%8.3f ", (double)tmpSumStreamLen * 8.0 / (30.0 * packetCntr));
+                packetCntr = 0;
+                tmpSumStreamLen = 0;
+            }
+        }
+
+		if(onlyEncode)
+		{
+			WebRtc_UWord8 auxUW8;
+			auxUW8 = (WebRtc_UWord8)(((stream_len & 0x7F00) >> 8) & 0xFF);
+			fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp);
+
+			auxUW8 = (WebRtc_UWord8)(stream_len & 0xFF);
+			fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp);
+			fwrite(payload, 1, stream_len, outp);
+		}
+		else
+		{
+
+			//======================= iSAC decoding ===========================
+
+			if((rand() % 100) < packetLossPercent)
+			{
+				declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, payloadRCU,
+					rcuStreamLen, decoded, speechType);
+				lostPacketCntr++;
+			}
+			else
+			{
+				declen = WebRtcIsac_Decode(ISAC_main_inst, payload,
+					stream_len, decoded, speechType);
+			}
+			if(declen <= 0)
+			{
+				//errType=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				fprintf(stderr,"\nError in decoder.\n");
+				getchar();
+				exit(1);
+			}
+
+			// Write decoded speech frame to file
+			fwrite(decoded, sizeof(WebRtc_Word16), declen, outp);
+			cur_framesmpls = declen;
+		}
+        // Update Statistics
+        framecnt++;
+        totalsmpls += cur_framesmpls;
+        if(stream_len > 0)
+        {
+            totalbits += 8 * stream_len;
+        }
+        if(rcuStreamLen > 0)
+        {
+            totalBitsRCU += 8 * rcuStreamLen;
+        }
+    }
+
+    rate =    ((double)totalbits    * (sampFreqKHz)) / (double)totalsmpls;
+    rateRCU = ((double)totalBitsRCU * (sampFreqKHz)) / (double)totalsmpls;
+
+    printf("\n\n");
+    printf("Sampling Rate......................... %d kHz\n", sampFreqKHz);
+    printf("Payload Limit......................... %d bytes \n", payloadLimit);
+    printf("Rate Limit............................ %d bits/sec \n", rateLimit);
+
+#ifdef WIN32
+#ifdef HAVE_DEBUG_INFO
+    rateLB = ((double)debugInfo.lbBytes * 8. *
+              (sampFreqKHz)) / (double)totalsmpls;
+    rateUB = ((double)debugInfo.ubBytes * 8. *
+              (sampFreqKHz)) / (double)totalsmpls;
+#endif
+
+    fprintf(bitrateFile, "%d  %10u     %d     %6.3f  %6.3f    %6.3f\n",
+        sampFreqKHz,
+        framecnt,
+        bottleneck,
+        rateLB,
+        rateUB,
+        rate);
+    fclose(bitrateFile);
+#endif   // WIN32
+
+    printf("\n");
+    printf("Measured bit-rate..................... %0.3f kbps\n", rate);
+    printf("Measured RCU bit-ratre................ %0.3f kbps\n", rateRCU);
+    printf("Maximum bit-rate/payloadsize.......... %0.3f / %d\n",
+        maxStreamLen * 8 / 0.03, maxStreamLen);
+    printf("Measured packet-loss.................. %0.1f%% \n",
+        100.0f * (float)lostPacketCntr / (float)packetCntr);
+
+//#ifdef HAVE_DEBUG_INFO
+//    printf("Measured lower-band bit-rate.......... %0.3f kbps (%.0f%%)\n",
+//        rateLB, (double)(rateLB) * 100. /(double)(rate));
+//    printf("Measured upper-band bit-rate.......... %0.3f kbps (%.0f%%)\n",
+//        rateUB, (double)(rateUB) * 100. /(double)(rate));
+//
+//    printf("Maximum payload lower-band............ %d bytes (%0.3f kbps)\n",
+//        debugInfo.maxPayloadLB, debugInfo.maxPayloadLB * 8.0 / 0.03);
+//    printf("Maximum payload upper-band............ %d bytes (%0.3f kbps)\n",
+//        debugInfo.maxPayloadUB, debugInfo.maxPayloadUB * 8.0 / 0.03);
+//#endif
+
+    printf("\n");
+
+    /* Runtime statistics */
+#ifdef WIN32
+    runtime = (double)(clock()/(double)CLOCKS_PER_SEC-starttime);
+    length_file = ((double)framecnt*(double)declen/(sampFreqKHz*1000));
+    printf("Length of speech file................ %.1f s\n", length_file);
+    printf("Time to run iSAC..................... %.2f s (%.2f %% of realtime)\n\n",
+        runtime, (100*runtime/length_file));
+#endif
+    printf("\n\n_______________________________________________\n");
+
+    if(histFile != NULL)
+    {
+        int n;
+        for(n = 0; n < 600; n++)
+        {
+            fprintf(histFile, "%6d ", hist[n]);
+        }
+        fprintf(histFile, "\n");
+        fclose(histFile);
+    }
+    if(averageFile != NULL)
+    {
+        if(packetCntr > 0)
+        {
+            fprintf(averageFile, "%8.3f ", (double)tmpSumStreamLen * 8.0 / (30.0 * packetCntr));
+        }
+        fprintf(averageFile, "\n");
+        fclose(averageFile);
+    }
+
+    fclose(inp);
+    fclose(outp);
+
+    WebRtcIsac_Free(ISAC_main_inst);
+
+
+#ifdef CHANGE_OUTPUT_NAME
+    {
+        char* p;
+        char myExt[50];
+        char bitRateStr[10];
+        char newOutName[500];
+        strcpy(newOutName, outname);
+
+        myExt[0] = '\0';
+        p = strchr(newOutName, '.');
+        if(p != NULL)
+        {
+            strcpy(myExt, p);
+            *p = '_';
+            p++;
+            *p = '\0';
+        }
+        else
+        {
+            strcat(newOutName, "_");
+        }
+        sprintf(bitRateStr, "%0.0fkbps", rate);
+        strcat(newOutName, bitRateStr);
+        strcat(newOutName, myExt);
+        rename(outname, newOutName);
+    }
+#endif
+    exit(0);
+}
+
+
+#ifdef HAVE_DEBUG_INFO
+int setupDebugStruct(debugStruct* str)
+{
+    str->prevPacketLost = 0;
+    str->currPacketLost = 0;
+
+    OPEN_FILE_WB(str->res0to4FilePtr,     "Res0to4.dat");
+    OPEN_FILE_WB(str->res4to8FilePtr,     "Res4to8.dat");
+    OPEN_FILE_WB(str->res8to12FilePtr,    "Res8to12.dat");
+    OPEN_FILE_WB(str->res8to16FilePtr,    "Res8to16.dat");
+
+    OPEN_FILE_WB(str->res0to4DecFilePtr,  "Res0to4Dec.dat");
+    OPEN_FILE_WB(str->res4to8DecFilePtr,  "Res4to8Dec.dat");
+    OPEN_FILE_WB(str->res8to12DecFilePtr, "Res8to12Dec.dat");
+    OPEN_FILE_WB(str->res8to16DecFilePtr, "Res8to16Dec.dat");
+
+    OPEN_FILE_WB(str->in0to4FilePtr,      "in0to4.dat");
+    OPEN_FILE_WB(str->in4to8FilePtr,      "in4to8.dat");
+    OPEN_FILE_WB(str->in8to12FilePtr,     "in8to12.dat");
+    OPEN_FILE_WB(str->in8to16FilePtr,     "in8to16.dat");
+
+    OPEN_FILE_WB(str->out0to4FilePtr,     "out0to4.dat");
+    OPEN_FILE_WB(str->out4to8FilePtr,     "out4to8.dat");
+    OPEN_FILE_WB(str->out8to12FilePtr,    "out8to12.dat");
+    OPEN_FILE_WB(str->out8to16FilePtr,    "out8to16.dat");
+    OPEN_FILE_WB(str->fftFilePtr,         "riFFT.dat");
+    OPEN_FILE_WB(str->fftDecFilePtr,      "riFFTDec.dat");
+
+    OPEN_FILE_WB(str->arrivalTime,        NULL/*"ArivalTime.dat"*/);
+    str->lastArrivalTime = 0;
+
+    str->maxPayloadLB = 0;
+    str->maxPayloadUB = 0;
+    str->lbBytes = 0;
+    str->ubBytes = 0;
+
+    return 0;
+};
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.c b/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.c
new file mode 100644
index 0000000..0a2256a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.c
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include "utility.h"
+
+/* function for reading audio data from PCM file */
+int
+readframe(
+    short* data,
+    FILE*  inp,
+    int    length)
+{
+    short k, rlen, status = 0;
+	unsigned char* ptrUChar;
+	ptrUChar = (unsigned char*)data;
+
+    rlen = (short)fread(data, sizeof(short), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+	// Assuming that our PCM files are written in Intel machines
+	for(k = 0; k < length; k++)
+	{
+		data[k] = (short)ptrUChar[k<<1] | ((((short)ptrUChar[(k<<1) + 1]) << 8) & 0xFF00);
+	}
+
+    return status;
+}
+
+short
+readSwitch(
+    int   argc,
+    char* argv[],
+    char* strID)
+{
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            return 1;
+        }
+    }
+    return 0;
+}
+
+double
+readParamDouble(
+    int    argc,
+    char*  argv[],
+    char*  strID,
+    double defaultVal)
+{
+    double returnVal = defaultVal;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                returnVal = atof(argv[n]);
+            }
+            break;
+        }
+    }
+    return returnVal;
+}
+
+int
+readParamInt(
+    int   argc,
+    char* argv[],
+    char* strID,
+    int   defaultVal)
+{
+    int returnVal = defaultVal;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                returnVal = atoi(argv[n]);
+            }
+            break;
+        }
+    }
+    return returnVal;
+}
+
+int
+readParamString(
+    int   argc,
+    char* argv[],
+    char* strID,
+    char* stringParam,
+    int   maxSize)
+{
+    int paramLenght = 0;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                strncpy(stringParam, argv[n], maxSize);
+                paramLenght = (int)strlen(argv[n]);
+            }
+            break;
+        }
+    }
+    return paramLenght;
+}
+
+void
+get_arrival_time(
+    int              current_framesamples,   /* samples */
+    int              packet_size,            /* bytes */
+    int              bottleneck,             /* excluding headers; bits/s */
+    BottleNeckModel* BN_data,
+    short            senderSampFreqHz,
+    short            receiverSampFreqHz)
+{
+    unsigned int travelTimeMs;
+	const int headerSizeByte = 35;
+
+	int headerRate;
+
+    BN_data->whenPackGeneratedMs += (current_framesamples / (senderSampFreqHz / 1000));
+
+	headerRate = headerSizeByte * 8 * senderSampFreqHz / current_framesamples;     /* bits/s */
+
+	/* everything in samples */
+	BN_data->sample_count = BN_data->sample_count + current_framesamples;
+
+    //travelTimeMs = ((packet_size + HeaderSize) * 8 * sampFreqHz) /
+    //    (bottleneck + HeaderRate)
+    travelTimeMs = (unsigned int)floor((double)((packet_size + headerSizeByte) * 8 * 1000)
+        / (double)(bottleneck + headerRate) + 0.5);
+
+    if(BN_data->whenPrevPackLeftMs > BN_data->whenPackGeneratedMs)
+    {
+        BN_data->whenPrevPackLeftMs += travelTimeMs;
+    }
+    else
+    {
+        BN_data->whenPrevPackLeftMs = BN_data->whenPackGeneratedMs +
+            travelTimeMs;
+    }
+
+    BN_data->arrival_time = (BN_data->whenPrevPackLeftMs *
+        (receiverSampFreqHz / 1000));
+
+//	if (BN_data->arrival_time < BN_data->sample_count)
+//		BN_data->arrival_time = BN_data->sample_count;
+
+	BN_data->rtp_number++;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.h b/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.h
new file mode 100644
index 0000000..f9fba94
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/iSAC/main/util/utility.h
@@ -0,0 +1,144 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_
+
+#include <stdlib.h>
+#include <stdio.h>
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+#define OPEN_FILE_WB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "wb");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to write to.", fullPath);     \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define OPEN_FILE_AB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "ab");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to write to.", fullPath);     \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define OPEN_FILE_RB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "rb");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to read from.", fullPath);    \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define WRITE_FILE_D(bufferPtr, len, filePtr)           \
+  do                                                    \
+  {                                                     \
+    if(filePtr != NULL)                                 \
+    {                                                   \
+      double dummy[1000];                               \
+      int cntr;                                         \
+      for(cntr = 0; cntr < (len); cntr++)               \
+      {                                                 \
+        dummy[cntr] = (double)bufferPtr[cntr];          \
+      }                                                 \
+      fwrite(dummy, sizeof(double), len, filePtr);      \
+      fflush(filePtr);                                  \
+    }                                                   \
+  } while(0)
+
+  typedef struct {
+    unsigned int whenPackGeneratedMs;
+    unsigned int whenPrevPackLeftMs;
+    unsigned int sendTimeMs ;          /* milisecond */
+    unsigned int arrival_time;         /* samples */
+    unsigned int sample_count;         /* samples, also used as "send time stamp" */
+    unsigned int rtp_number;
+  } BottleNeckModel;
+
+  void get_arrival_time(
+      int              current_framesamples,   /* samples */
+      int              packet_size,            /* bytes */
+      int              bottleneck,             /* excluding headers; bits/s */
+      BottleNeckModel* BN_data,
+      short            senderSampFreqHz,
+      short            receiverSampFreqHz);
+
+  /* function for reading audio data from PCM file */
+  int readframe(
+      short* data,
+      FILE*  inp,
+      int    length);
+
+  short readSwitch(
+      int   argc,
+      char* argv[],
+      char* strID);
+
+  double readParamDouble(
+      int    argc,
+      char*  argv[],
+      char*  strID,
+      double defaultVal);
+
+  int readParamInt(
+      int   argc,
+      char* argv[],
+      char* strID,
+      int   defaultVal);
+
+  int readParamString(
+      int   argc,
+      char* argv[],
+      char* strID,
+      char* stringParam,
+      int   maxSize);
+
+#if defined(__cplusplus)
+}
+#endif
+
+
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/Android.mk b/trunk/src/modules/audio_coding/codecs/ilbc/Android.mk
new file mode 100644
index 0000000..cbadcab
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/Android.mk
@@ -0,0 +1,165 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_ilbc
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    abs_quant.c \
+    abs_quant_loop.c \
+    augmented_cb_corr.c \
+    bw_expand.c \
+    cb_construct.c \
+    cb_mem_energy.c \
+    cb_mem_energy_augmentation.c \
+    cb_mem_energy_calc.c \
+    cb_search.c \
+    cb_search_core.c \
+    cb_update_best_index.c \
+    chebyshev.c \
+    comp_corr.c \
+    constants.c \
+    create_augmented_vec.c \
+    decode.c \
+    decode_residual.c \
+    decoder_interpolate_lsf.c \
+    do_plc.c \
+    encode.c \
+    energy_inverse.c \
+    enh_upsample.c \
+    enhancer.c \
+    enhancer_interface.c \
+    filtered_cb_vecs.c \
+    frame_classify.c \
+    gain_dequant.c \
+    gain_quant.c \
+    get_cd_vec.c \
+    get_lsp_poly.c \
+    get_sync_seq.c \
+    hp_input.c \
+    hp_output.c \
+    ilbc.c \
+    index_conv_dec.c \
+    index_conv_enc.c \
+    init_decode.c \
+    init_encode.c \
+    interpolate.c \
+    interpolate_samples.c \
+    lpc_encode.c \
+    lsf_check.c \
+    lsf_interpolate_to_poly_dec.c \
+    lsf_interpolate_to_poly_enc.c \
+    lsf_to_lsp.c \
+    lsf_to_poly.c \
+    lsp_to_lsf.c \
+    my_corr.c \
+    nearest_neighbor.c \
+    pack_bits.c \
+    poly_to_lsf.c \
+    poly_to_lsp.c \
+    refiner.c \
+    simple_interpolate_lsf.c \
+    simple_lpc_analysis.c \
+    simple_lsf_dequant.c \
+    simple_lsf_quant.c \
+    smooth.c \
+    smooth_out_data.c \
+    sort_sq.c \
+    split_vq.c \
+    state_construct.c \
+    state_search.c \
+    swap_bytes.c \
+    unpack_bits.c \
+    vq3.c \
+    vq4.c \
+    window32_w32.c \
+    xcorr_coef.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../common_audio/signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+
+# iLBC test app
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= test/iLBC_test.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/../../../..
+
+LOCAL_STATIC_LIBRARIES := \
+    libwebrtc_ilbc \
+    libwebrtc_spl
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils
+
+LOCAL_MODULE:= webrtc_ilbc_test
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include $(BUILD_NATIVE_TEST)
+endif
+
+# iLBC_testLib test app
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= test/iLBC_testLib.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/../../../..
+
+LOCAL_STATIC_LIBRARIES := \
+    libwebrtc_ilbc \
+    libwebrtc_spl
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils
+
+LOCAL_MODULE:= webrtc_ilbc_testLib
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include $(BUILD_NATIVE_TEST)
+endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.c b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.c
new file mode 100644
index 0000000..4a70c8b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.c
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "abs_quant_loop.h"
+
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuant(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
+                                   and idxVec, uses state_first as
+                                   input) */
+    WebRtc_Word16 *in,     /* (i) vector to encode */
+    WebRtc_Word16 *weightDenum   /* (i) denominator of synthesis filter */
+                            ) {
+  WebRtc_Word16 *syntOut;
+  WebRtc_Word16 quantLen[2];
+
+  /* Stack based */
+  WebRtc_Word16 syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 in_weightedVec[STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 *in_weighted = &in_weightedVec[LPC_FILTERORDER];
+
+  /* Initialize the buffers */
+  WebRtcSpl_MemSetW16(syntOutBuf, 0, LPC_FILTERORDER+STATE_SHORT_LEN_30MS);
+  syntOut = &syntOutBuf[LPC_FILTERORDER];
+  /* Start with zero state */
+  WebRtcSpl_MemSetW16(in_weightedVec, 0, LPC_FILTERORDER);
+
+  /* Perform the quantization loop in two sections of length quantLen[i],
+     where the perceptual weighting filter is updated at the subframe
+     border */
+
+  if (iLBC_encbits->state_first) {
+    quantLen[0]=SUBL;
+    quantLen[1]=iLBCenc_inst->state_short_len-SUBL;
+  } else {
+    quantLen[0]=iLBCenc_inst->state_short_len-SUBL;
+    quantLen[1]=SUBL;
+  }
+
+  /* Calculate the weighted residual, switch perceptual weighting
+     filter at the subframe border */
+  WebRtcSpl_FilterARFastQ12(
+      in, in_weighted,
+      weightDenum, LPC_FILTERORDER+1, quantLen[0]);
+  WebRtcSpl_FilterARFastQ12(
+      &in[quantLen[0]], &in_weighted[quantLen[0]],
+      &weightDenum[LPC_FILTERORDER+1], LPC_FILTERORDER+1, quantLen[1]);
+
+  WebRtcIlbcfix_AbsQuantLoop(
+      syntOut,
+      in_weighted,
+      weightDenum,
+      quantLen,
+      iLBC_encbits->idxVec);
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.h b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.h
new file mode 100644
index 0000000..fa59593
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuant(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
+                                   and idxVec, uses state_first as
+                                   input) */
+    WebRtc_Word16 *in,     /* (i) vector to encode */
+    WebRtc_Word16 *weightDenum   /* (i) denominator of synthesis filter */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
new file mode 100644
index 0000000..4eebc3e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuantLoop.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "sort_sq.h"
+
+void WebRtcIlbcfix_AbsQuantLoop(
+    WebRtc_Word16 *syntOutIN,
+    WebRtc_Word16 *in_weightedIN,
+    WebRtc_Word16 *weightDenumIN,
+    WebRtc_Word16 *quantLenIN,
+    WebRtc_Word16 *idxVecIN
+                                )
+{
+  int n, k1, k2;
+  WebRtc_Word16 index;
+  WebRtc_Word32 toQW32;
+  WebRtc_Word32 toQ32;
+  WebRtc_Word16 tmp16a;
+  WebRtc_Word16 xq;
+
+  WebRtc_Word16 *syntOut   = syntOutIN;
+  WebRtc_Word16 *in_weighted  = in_weightedIN;
+  WebRtc_Word16 *weightDenum  = weightDenumIN;
+  WebRtc_Word16 *quantLen  = quantLenIN;
+  WebRtc_Word16 *idxVec   = idxVecIN;
+
+  n=0;
+
+  for(k1=0;k1<2;k1++) {
+    for(k2=0;k2<quantLen[k1];k2++){
+
+      /* Filter to get the predicted value */
+      WebRtcSpl_FilterARFastQ12(
+          syntOut, syntOut,
+          weightDenum, LPC_FILTERORDER+1, 1);
+
+      /* the quantizer */
+      toQW32 = (WebRtc_Word32)(*in_weighted) - (WebRtc_Word32)(*syntOut);
+
+      toQ32 = (((WebRtc_Word32)toQW32)<<2);
+
+      if (toQ32 > 32767) {
+        toQ32 = (WebRtc_Word32) 32767;
+      } else if (toQ32 < -32768) {
+        toQ32 = (WebRtc_Word32) -32768;
+      }
+
+      /* Quantize the state */
+      if (toQW32<(-7577)) {
+        /* To prevent negative overflow */
+        index=0;
+      } else if (toQW32>8151) {
+        /* To prevent positive overflow */
+        index=7;
+      } else {
+        /* Find the best quantization index
+           (state_sq3Tbl is in Q13 and toQ is in Q11)
+        */
+        WebRtcIlbcfix_SortSq(&xq, &index,
+                             (WebRtc_Word16)toQ32,
+                             WebRtcIlbcfix_kStateSq3, 8);
+      }
+
+      /* Store selected index */
+      (*idxVec++) = index;
+
+      /* Compute decoded sample and update of the prediction filter */
+      tmp16a = ((WebRtcIlbcfix_kStateSq3[index] + 2 ) >> 2);
+
+      *syntOut     = (WebRtc_Word16) (tmp16a + (WebRtc_Word32)(*in_weighted) - toQW32);
+
+      n++;
+      syntOut++; in_weighted++;
+    }
+    /* Update perceptual weighting filter at subframe border */
+    weightDenum += 11;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
new file mode 100644
index 0000000..f506e8e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuantLoop.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuantLoop(
+    WebRtc_Word16 *syntOutIN,
+    WebRtc_Word16 *in_weightedIN,
+    WebRtc_Word16 *weightDenumIN,
+    WebRtc_Word16 *quantLenIN,
+    WebRtc_Word16 *idxVecIN
+                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c b/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
new file mode 100644
index 0000000..6011e92
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AugmentedCbCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "augmented_cb_corr.h"
+
+void WebRtcIlbcfix_AugmentedCbCorr(
+    WebRtc_Word16 *target,   /* (i) Target vector */
+    WebRtc_Word16 *buffer,   /* (i) Memory buffer */
+    WebRtc_Word16 *interpSamples, /* (i) buffer with
+                                     interpolated samples */
+    WebRtc_Word32 *crossDot,  /* (o) The cross correlation between
+                                 the target and the Augmented
+                                 vector */
+    WebRtc_Word16 low,    /* (i) Lag to start from (typically
+                             20) */
+    WebRtc_Word16 high,   /* (i) Lag to end at (typically 39) */
+    WebRtc_Word16 scale)   /* (i) Scale factor to use for
+                              the crossDot */
+{
+  int lagcount;
+  WebRtc_Word16 ilow;
+  WebRtc_Word16 *targetPtr;
+  WebRtc_Word32 *crossDotPtr;
+  WebRtc_Word16 *iSPtr=interpSamples;
+
+  /* Calculate the correlation between the target and the
+     interpolated codebook. The correlation is calculated in
+     3 sections with the interpolated part in the middle */
+  crossDotPtr=crossDot;
+  for (lagcount=low; lagcount<=high; lagcount++) {
+
+    ilow = (WebRtc_Word16) (lagcount-4);
+
+    /* Compute dot product for the first (lagcount-4) samples */
+    (*crossDotPtr) = WebRtcSpl_DotProductWithScale(target, buffer-lagcount, ilow, scale);
+
+    /* Compute dot product on the interpolated samples */
+    (*crossDotPtr) += WebRtcSpl_DotProductWithScale(target+ilow, iSPtr, 4, scale);
+    targetPtr = target + lagcount;
+    iSPtr += lagcount-ilow;
+
+    /* Compute dot product for the remaining samples */
+    (*crossDotPtr) += WebRtcSpl_DotProductWithScale(targetPtr, buffer-lagcount, SUBL-lagcount, scale);
+    crossDotPtr++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h b/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
new file mode 100644
index 0000000..8e097fe
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AugmentedCbCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Calculate correlation between target and Augmented codebooks
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AugmentedCbCorr(
+    WebRtc_Word16 *target,   /* (i) Target vector */
+    WebRtc_Word16 *buffer,   /* (i) Memory buffer */
+    WebRtc_Word16 *interpSamples, /* (i) buffer with
+                                           interpolated samples */
+    WebRtc_Word32 *crossDot,  /* (o) The cross correlation between
+                                           the target and the Augmented
+                                           vector */
+    WebRtc_Word16 low,    /* (i) Lag to start from (typically
+                                                   20) */
+    WebRtc_Word16 high,   /* (i) Lag to end at (typically 39 */
+    WebRtc_Word16 scale);   /* (i) Scale factor to use for
+                                                   the crossDot */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.c b/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.c
new file mode 100644
index 0000000..a2287aa
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_BwExpand.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc bandwidth expansion
+ *---------------------------------------------------------------*/
+
+/* The output is in the same domain as the input */
+void WebRtcIlbcfix_BwExpand(
+    WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
+    WebRtc_Word16 *in,  /* (i) the lpc coefficients before bandwidth
+                                   expansion */
+    WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
+    WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+                            ) {
+  int i;
+
+  out[0] = in[0];
+  for (i = 1; i < length; i++) {
+    /* out[i] = coef[i] * in[i] with rounding.
+       in[] and out[] are in Q12 and coef[] is in Q15
+    */
+    out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(coef[i], in[i])+16384)>>15);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.h b/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.h
new file mode 100644
index 0000000..c9f3fab
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/bw_expand.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_BwExpand.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc bandwidth expansion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_BwExpand(
+    WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
+    WebRtc_Word16 *in,  /* (i) the lpc coefficients before bandwidth
+                                   expansion */
+    WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
+    WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.c
new file mode 100644
index 0000000..094a7e4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.c
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbConstruct.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "gain_dequant.h"
+#include "get_cd_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Construct decoded vector from codebook and gains.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbConstruct(
+    WebRtc_Word16 *decvector,  /* (o) Decoded vector */
+    WebRtc_Word16 *index,   /* (i) Codebook indices */
+    WebRtc_Word16 *gain_index,  /* (i) Gain quantization indices */
+    WebRtc_Word16 *mem,   /* (i) Buffer for codevector construction */
+    WebRtc_Word16 lMem,   /* (i) Length of buffer */
+    WebRtc_Word16 veclen   /* (i) Length of vector */
+                               ){
+  int j;
+  WebRtc_Word16 gain[CB_NSTAGES];
+  /* Stack based */
+  WebRtc_Word16 cbvec0[SUBL];
+  WebRtc_Word16 cbvec1[SUBL];
+  WebRtc_Word16 cbvec2[SUBL];
+  WebRtc_Word32 a32;
+  WebRtc_Word16 *gainPtr;
+
+  /* gain de-quantization */
+
+  gain[0] = WebRtcIlbcfix_GainDequant(gain_index[0], 16384, 0);
+  gain[1] = WebRtcIlbcfix_GainDequant(gain_index[1], gain[0], 1);
+  gain[2] = WebRtcIlbcfix_GainDequant(gain_index[2], gain[1], 2);
+
+  /* codebook vector construction and construction of total vector */
+
+  /* Stack based */
+  WebRtcIlbcfix_GetCbVec(cbvec0, mem, index[0], lMem, veclen);
+  WebRtcIlbcfix_GetCbVec(cbvec1, mem, index[1], lMem, veclen);
+  WebRtcIlbcfix_GetCbVec(cbvec2, mem, index[2], lMem, veclen);
+
+  gainPtr = &gain[0];
+  for (j=0;j<veclen;j++) {
+    a32  = WEBRTC_SPL_MUL_16_16(*gainPtr++, cbvec0[j]);
+    a32 += WEBRTC_SPL_MUL_16_16(*gainPtr++, cbvec1[j]);
+    a32 += WEBRTC_SPL_MUL_16_16(*gainPtr, cbvec2[j]);
+    gainPtr -= 2;
+    decvector[j] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(a32 + 8192, 14);
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.h
new file mode 100644
index 0000000..bec759f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_construct.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbConstruct.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct decoded vector from codebook and gains.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbConstruct(
+    WebRtc_Word16 *decvector,  /* (o) Decoded vector */
+    WebRtc_Word16 *index,   /* (i) Codebook indices */
+    WebRtc_Word16 *gain_index,  /* (i) Gain quantization indices */
+    WebRtc_Word16 *mem,   /* (i) Buffer for codevector construction */
+    WebRtc_Word16 lMem,   /* (i) Length of buffer */
+    WebRtc_Word16 veclen   /* (i) Length of vector */
+                               );
+
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
new file mode 100644
index 0000000..8613fa2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergy.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "cb_mem_energy_calc.h"
+
+/*----------------------------------------------------------------*
+ *  Function WebRtcIlbcfix_CbMemEnergy computes the energy of all
+ * the vectors in the codebook memory that will be used in the
+ * following search for the best match.
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbMemEnergy(
+    WebRtc_Word16 range,
+    WebRtc_Word16 *CB,   /* (i) The CB memory (1:st section) */
+    WebRtc_Word16 *filteredCB,  /* (i) The filtered CB memory (2:nd section) */
+    WebRtc_Word16 lMem,   /* (i) Length of the CB memory */
+    WebRtc_Word16 lTarget,   /* (i) Length of the target vector */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                               ) {
+  WebRtc_Word16 *ppi, *ppo, *pp;
+  WebRtc_Word32 energy, tmp32;
+
+  /* Compute the energy and store it in a vector. Also the
+   * corresponding shift values are stored. The energy values
+   * are reused in all three stages. */
+
+  /* Calculate the energy in the first block of 'lTarget' sampels. */
+  ppi = CB+lMem-lTarget-1;
+  ppo = CB+lMem-1;
+
+  pp=CB+lMem-lTarget;
+  energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
+
+  /* Normalize the energy and store the number of shifts */
+  energyShifts[0] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+  tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[0]);
+  energyW16[0] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+
+  /* Compute the energy of the rest of the cb memory
+   * by step wise adding and subtracting the next
+   * sample and the last sample respectively. */
+  WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, 0);
+
+  /* Next, precompute the energy values for the filtered cb section */
+  energy=0;
+  pp=filteredCB+lMem-lTarget;
+
+  energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
+
+  /* Normalize the energy and store the number of shifts */
+  energyShifts[base_size] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+  tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[base_size]);
+  energyW16[base_size] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+
+  ppi = filteredCB + lMem - 1 - lTarget;
+  ppo = filteredCB + lMem - 1;
+
+  WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, base_size);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
new file mode 100644
index 0000000..1aa2b7b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergy.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_
+
+void WebRtcIlbcfix_CbMemEnergy(
+    WebRtc_Word16 range,
+    WebRtc_Word16 *CB,   /* (i) The CB memory (1:st section) */
+    WebRtc_Word16 *filteredCB,  /* (i) The filtered CB memory (2:nd section) */
+    WebRtc_Word16 lMem,   /* (i) Length of the CB memory */
+    WebRtc_Word16 lTarget,   /* (i) Length of the target vector */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                               );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
new file mode 100644
index 0000000..0c6f479
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyAugmentation.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbMemEnergyAugmentation(
+    WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size,  /* (i) Index to where the energy values should be stored */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+                                           ){
+  WebRtc_Word32 energy, tmp32;
+  WebRtc_Word16 *ppe, *pp, *interpSamplesPtr;
+  WebRtc_Word16 *CBmemPtr, lagcount;
+  WebRtc_Word16 *enPtr=&energyW16[base_size-20];
+  WebRtc_Word16 *enShPtr=&energyShifts[base_size-20];
+  WebRtc_Word32 nrjRecursive;
+
+  CBmemPtr = CBmem+147;
+  interpSamplesPtr = interpSamples;
+
+  /* Compute the energy for the first (low-5) noninterpolated samples */
+  nrjRecursive = WebRtcSpl_DotProductWithScale( CBmemPtr-19, CBmemPtr-19, 15, scale);
+  ppe = CBmemPtr - 20;
+
+  for (lagcount=20; lagcount<=39; lagcount++) {
+
+    /* Update the energy recursively to save complexity */
+    nrjRecursive = nrjRecursive +
+        WEBRTC_SPL_MUL_16_16_RSFT(*ppe, *ppe, scale);
+    ppe--;
+    energy = nrjRecursive;
+
+    /* interpolation */
+    energy += WebRtcSpl_DotProductWithScale(interpSamplesPtr, interpSamplesPtr, 4, scale);
+    interpSamplesPtr += 4;
+
+    /* Compute energy for the remaining samples */
+    pp = CBmemPtr - lagcount;
+    energy += WebRtcSpl_DotProductWithScale(pp, pp, SUBL-lagcount, scale);
+
+    /* Normalize the energy and store the number of shifts */
+    (*enShPtr) = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, (*enShPtr));
+    (*enPtr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+    enShPtr++;
+    enPtr++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
new file mode 100644
index 0000000..938b87e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyAugmentation.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_
+
+void WebRtcIlbcfix_CbMemEnergyAugmentation(
+    WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size,  /* (i) Index to where the energy values should be stored */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+                                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
new file mode 100644
index 0000000..40bb708
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyCalc.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/* Compute the energy of the rest of the cb memory
+ * by step wise adding and subtracting the next
+ * sample and the last sample respectively */
+void WebRtcIlbcfix_CbMemEnergyCalc(
+    WebRtc_Word32 energy,   /* (i) input start energy */
+    WebRtc_Word16 range,   /* (i) number of iterations */
+    WebRtc_Word16 *ppi,   /* (i) input pointer 1 */
+    WebRtc_Word16 *ppo,   /* (i) input pointer 2 */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                                   )
+{
+  WebRtc_Word16 j,shft;
+  WebRtc_Word32 tmp;
+  WebRtc_Word16 *eSh_ptr;
+  WebRtc_Word16 *eW16_ptr;
+
+
+  eSh_ptr  = &energyShifts[1+base_size];
+  eW16_ptr = &energyW16[1+base_size];
+
+  for(j=0;j<range-1;j++) {
+
+    /* Calculate next energy by a +/-
+       operation on the edge samples */
+    tmp  = WEBRTC_SPL_MUL_16_16(*ppi, *ppi);
+    tmp -= WEBRTC_SPL_MUL_16_16(*ppo, *ppo);
+    energy += WEBRTC_SPL_RSHIFT_W32(tmp, scale);
+    energy = WEBRTC_SPL_MAX(energy, 0);
+
+    ppi--;
+    ppo--;
+
+    /* Normalize the energy into a WebRtc_Word16 and store
+       the number of shifts */
+
+    shft = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+    *eSh_ptr++ = shft;
+
+    tmp = WEBRTC_SPL_LSHIFT_W32(energy, shft);
+    *eW16_ptr++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp, 16);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
new file mode 100644
index 0000000..ee2e285
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyCalc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_
+
+void WebRtcIlbcfix_CbMemEnergyCalc(
+    WebRtc_Word32 energy,   /* (i) input start energy */
+    WebRtc_Word16 range,   /* (i) number of iterations */
+    WebRtc_Word16 *ppi,   /* (i) input pointer 1 */
+    WebRtc_Word16 *ppo,   /* (i) input pointer 2 */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                                   );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.c
new file mode 100644
index 0000000..c51ccf7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.c
@@ -0,0 +1,396 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearch.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "gain_quant.h"
+#include "filtered_cb_vecs.h"
+#include "constants.h"
+#include "cb_mem_energy.h"
+#include "interpolate_samples.h"
+#include "cb_mem_energy_augmentation.h"
+#include "cb_search_core.h"
+#include "energy_inverse.h"
+#include "augmented_cb_corr.h"
+#include "cb_update_best_index.h"
+#include "create_augmented_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Search routine for codebook encoding and gain quantization.
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) the encoder state structure */
+    WebRtc_Word16 *index,  /* (o) Codebook indices */
+    WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
+    WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
+    WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
+    WebRtc_Word16 lMem,  /* (i) Length of buffer */
+    WebRtc_Word16 lTarget,  /* (i) Length of vector */
+    WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
+    WebRtc_Word16 block  /* (i) the subblock number */
+                            ) {
+  WebRtc_Word16 i, j, stage, range;
+  WebRtc_Word16 *pp, scale, tmp;
+  WebRtc_Word16 bits, temp1, temp2;
+  WebRtc_Word16 base_size;
+  WebRtc_Word32 codedEner, targetEner;
+  WebRtc_Word16 gains[CB_NSTAGES+1];
+  WebRtc_Word16 *cb_vecPtr;
+  WebRtc_Word16 indexOffset, sInd, eInd;
+  WebRtc_Word32 CritMax=0;
+  WebRtc_Word16 shTotMax=WEBRTC_SPL_WORD16_MIN;
+  WebRtc_Word16 bestIndex=0;
+  WebRtc_Word16 bestGain=0;
+  WebRtc_Word16 indexNew, CritNewSh;
+  WebRtc_Word32 CritNew;
+  WebRtc_Word32 *cDotPtr;
+  WebRtc_Word16 noOfZeros;
+  WebRtc_Word16 *gainPtr;
+  WebRtc_Word32 t32, tmpW32;
+  WebRtc_Word16 *WebRtcIlbcfix_kGainSq5_ptr;
+  /* Stack based */
+  WebRtc_Word16 CBbuf[CB_MEML+LPC_FILTERORDER+CB_HALFFILTERLEN];
+  WebRtc_Word32 cDot[128];
+  WebRtc_Word32 Crit[128];
+  WebRtc_Word16 targetVec[SUBL+LPC_FILTERORDER];
+  WebRtc_Word16 cbvectors[CB_MEML];
+  WebRtc_Word16 codedVec[SUBL];
+  WebRtc_Word16 interpSamples[20*4];
+  WebRtc_Word16 interpSamplesFilt[20*4];
+  WebRtc_Word16 energyW16[CB_EXPAND*128];
+  WebRtc_Word16 energyShifts[CB_EXPAND*128];
+  WebRtc_Word16 *inverseEnergy=energyW16;   /* Reuse memory */
+  WebRtc_Word16 *inverseEnergyShifts=energyShifts; /* Reuse memory */
+  WebRtc_Word16 *buf = &CBbuf[LPC_FILTERORDER];
+  WebRtc_Word16 *target = &targetVec[LPC_FILTERORDER];
+  WebRtc_Word16 *aug_vec = (WebRtc_Word16*)cDot;   /* length [SUBL], reuse memory */
+
+  /* Determine size of codebook sections */
+
+  base_size=lMem-lTarget+1;
+  if (lTarget==SUBL) {
+    base_size=lMem-19;
+  }
+
+  /* weighting of the CB memory */
+  noOfZeros=lMem-WebRtcIlbcfix_kFilterRange[block];
+  WebRtcSpl_MemSetW16(&buf[-LPC_FILTERORDER], 0, noOfZeros+LPC_FILTERORDER);
+  WebRtcSpl_FilterARFastQ12(
+      decResidual+noOfZeros, buf+noOfZeros,
+      weightDenum, LPC_FILTERORDER+1, WebRtcIlbcfix_kFilterRange[block]);
+
+  /* weighting of the target vector */
+  WEBRTC_SPL_MEMCPY_W16(&target[-LPC_FILTERORDER], buf+noOfZeros+WebRtcIlbcfix_kFilterRange[block]-LPC_FILTERORDER, LPC_FILTERORDER);
+  WebRtcSpl_FilterARFastQ12(
+      intarget, target,
+      weightDenum, LPC_FILTERORDER+1, lTarget);
+
+  /* Store target, towards the end codedVec is calculated as
+     the initial target minus the remaining target */
+  WEBRTC_SPL_MEMCPY_W16(codedVec, target, lTarget);
+
+  /* Find the highest absolute value to calculate proper
+     vector scale factor (so that it uses 12 bits) */
+  temp1 = WebRtcSpl_MaxAbsValueW16(buf, (WebRtc_Word16)lMem);
+  temp2 = WebRtcSpl_MaxAbsValueW16(target, (WebRtc_Word16)lTarget);
+
+  if ((temp1>0)&&(temp2>0)) {
+    temp1 = WEBRTC_SPL_MAX(temp1, temp2);
+    scale = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(temp1, temp1));
+  } else {
+    /* temp1 or temp2 is negative (maximum was -32768) */
+    scale = 30;
+  }
+
+  /* Scale to so that a mul-add 40 times does not overflow */
+  scale = scale - 25;
+  scale = WEBRTC_SPL_MAX(0, scale);
+
+  /* Compute energy of the original target */
+  targetEner = WebRtcSpl_DotProductWithScale(target, target, lTarget, scale);
+
+  /* Prepare search over one more codebook section. This section
+     is created by filtering the original buffer with a filter. */
+  WebRtcIlbcfix_FilteredCbVecs(cbvectors, buf, lMem, WebRtcIlbcfix_kFilterRange[block]);
+
+  range = WebRtcIlbcfix_kSearchRange[block][0];
+
+  if(lTarget == SUBL) {
+    /* Create the interpolated samples and store them for use in all stages */
+
+    /* First section, non-filtered half of the cb */
+    WebRtcIlbcfix_InterpolateSamples(interpSamples, buf, lMem);
+
+    /* Second section, filtered half of the cb */
+    WebRtcIlbcfix_InterpolateSamples(interpSamplesFilt, cbvectors, lMem);
+
+    /* Compute the CB vectors' energies for the first cb section (non-filtered) */
+    WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamples, buf,
+                                          scale, 20, energyW16, energyShifts);
+
+    /* Compute the CB vectors' energies for the second cb section (filtered cb) */
+    WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamplesFilt, cbvectors,
+                                          scale, (WebRtc_Word16)(base_size+20), energyW16, energyShifts);
+
+    /* Compute the CB vectors' energies and store them in the vector
+     * energyW16. Also the corresponding shift values are stored. The
+     * energy values are used in all three stages. */
+    WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem,
+                              lTarget, energyW16+20, energyShifts+20, scale, base_size);
+
+  } else {
+    /* Compute the CB vectors' energies and store them in the vector
+     * energyW16. Also the corresponding shift values are stored. The
+     * energy values are used in all three stages. */
+    WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem,
+                              lTarget, energyW16, energyShifts, scale, base_size);
+
+    /* Set the energy positions 58-63 and 122-127 to zero
+       (otherwise they are uninitialized) */
+    WebRtcSpl_MemSetW16(energyW16+range, 0, (base_size-range));
+    WebRtcSpl_MemSetW16(energyW16+range+base_size, 0, (base_size-range));
+  }
+
+  /* Calculate Inverse Energy (energyW16 is already normalized
+     and will contain the inverse energy in Q29 after this call */
+  WebRtcIlbcfix_EnergyInverse(energyW16, base_size*CB_EXPAND);
+
+  /* The gain value computed in the previous stage is used
+   * as an upper limit to what the next stage gain value
+   * is allowed to be. In stage 0, 16384 (1.0 in Q14) is used as
+   * the upper limit. */
+  gains[0] = 16384;
+
+  for (stage=0; stage<CB_NSTAGES; stage++) {
+
+    /* Set up memories */
+    range = WebRtcIlbcfix_kSearchRange[block][stage];
+
+    /* initialize search measures */
+    CritMax=0;
+    shTotMax=-100;
+    bestIndex=0;
+    bestGain=0;
+
+    /* loop over lags 40+ in the first codebook section, full search */
+    cb_vecPtr = buf+lMem-lTarget;
+
+    /* Calculate all the cross correlations (augmented part of CB) */
+    if (lTarget==SUBL) {
+      WebRtcIlbcfix_AugmentedCbCorr(target, buf+lMem,
+                                    interpSamples, cDot,
+                                    20, 39, scale);
+      cDotPtr=&cDot[20];
+    } else {
+      cDotPtr=cDot;
+    }
+    /* Calculate all the cross correlations (main part of CB) */
+    WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, range, scale, -1);
+
+    /* Adjust the search range for the augmented vectors */
+    if (lTarget==SUBL) {
+      range=WebRtcIlbcfix_kSearchRange[block][stage]+20;
+    } else {
+      range=WebRtcIlbcfix_kSearchRange[block][stage];
+    }
+
+    indexOffset=0;
+
+    /* Search for best index in this part of the vector */
+    WebRtcIlbcfix_CbSearchCore(
+        cDot, range, stage, inverseEnergy,
+        inverseEnergyShifts, Crit,
+        &indexNew, &CritNew, &CritNewSh);
+
+    /* Update the global best index and the corresponding gain */
+    WebRtcIlbcfix_CbUpdateBestIndex(
+        CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew+indexOffset],
+        inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
+        &CritMax, &shTotMax, &bestIndex, &bestGain);
+
+    sInd=bestIndex-(WebRtc_Word16)(CB_RESRANGE>>1);
+    eInd=sInd+CB_RESRANGE;
+    if (sInd<0) {
+      eInd-=sInd;
+      sInd=0;
+    }
+    if (eInd>=range) {
+      eInd=range-1;
+      sInd=eInd-CB_RESRANGE;
+    }
+
+    range = WebRtcIlbcfix_kSearchRange[block][stage];
+
+    if (lTarget==SUBL) {
+      i=sInd;
+      if (sInd<20) {
+        WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors+lMem,
+                                      interpSamplesFilt, cDot,
+                                      (WebRtc_Word16)(sInd+20), (WebRtc_Word16)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
+        i=20;
+      }
+
+      cDotPtr=&cDot[WEBRTC_SPL_MAX(0,(20-sInd))];
+      cb_vecPtr = cbvectors+lMem-20-i;
+
+      /* Calculate the cross correlations (main part of the filtered CB) */
+      WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-i+1), scale, -1);
+
+    } else {
+      cDotPtr = cDot;
+      cb_vecPtr = cbvectors+lMem-lTarget-sInd;
+
+      /* Calculate the cross correlations (main part of the filtered CB) */
+      WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-sInd+1), scale, -1);
+
+    }
+
+    /* Adjust the search range for the augmented vectors */
+    indexOffset=base_size+sInd;
+
+    /* Search for best index in this part of the vector */
+    WebRtcIlbcfix_CbSearchCore(
+        cDot, (WebRtc_Word16)(eInd-sInd+1), stage, inverseEnergy+indexOffset,
+        inverseEnergyShifts+indexOffset, Crit,
+        &indexNew, &CritNew, &CritNewSh);
+
+    /* Update the global best index and the corresponding gain */
+    WebRtcIlbcfix_CbUpdateBestIndex(
+        CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew],
+        inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
+        &CritMax, &shTotMax, &bestIndex, &bestGain);
+
+    index[stage] = bestIndex;
+
+
+    bestGain = WebRtcIlbcfix_GainQuant(bestGain,
+                                       (WebRtc_Word16)WEBRTC_SPL_ABS_W16(gains[stage]), stage, &gain_index[stage]);
+
+    /* Extract the best (according to measure) codebook vector
+       Also adjust the index, so that the augmented vectors are last.
+       Above these vectors were first...
+    */
+
+    if(lTarget==(STATE_LEN-iLBCenc_inst->state_short_len)) {
+
+      if(index[stage]<base_size) {
+        pp=buf+lMem-lTarget-index[stage];
+      } else {
+        pp=cbvectors+lMem-lTarget-
+            index[stage]+base_size;
+      }
+
+    } else {
+
+      if (index[stage]<base_size) {
+        if (index[stage]>=20) {
+          /* Adjust index and extract vector */
+          index[stage]-=20;
+          pp=buf+lMem-lTarget-index[stage];
+        } else {
+          /* Adjust index and extract vector */
+          index[stage]+=(base_size-20);
+
+          WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-base_size+40),
+                                           buf+lMem, aug_vec);
+          pp = aug_vec;
+
+        }
+      } else {
+
+        if ((index[stage] - base_size) >= 20) {
+          /* Adjust index and extract vector */
+          index[stage]-=20;
+          pp=cbvectors+lMem-lTarget-
+              index[stage]+base_size;
+        } else {
+          /* Adjust index and extract vector */
+          index[stage]+=(base_size-20);
+          WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-2*base_size+40),
+                                           cbvectors+lMem, aug_vec);
+          pp = aug_vec;
+        }
+      }
+    }
+
+    /* Subtract the best codebook vector, according
+       to measure, from the target vector */
+
+    WebRtcSpl_AddAffineVectorToVector(target, pp, (WebRtc_Word16)(-bestGain), (WebRtc_Word32)8192, (WebRtc_Word16)14, (int)lTarget);
+
+    /* record quantized gain */
+    gains[stage+1] = bestGain;
+
+  } /* end of Main Loop. for (stage=0;... */
+
+  /* Calculte the coded vector (original target - what's left) */
+  for (i=0;i<lTarget;i++) {
+    codedVec[i]-=target[i];
+  }
+
+  /* Gain adjustment for energy matching */
+  codedEner = WebRtcSpl_DotProductWithScale(codedVec, codedVec, lTarget, scale);
+
+  j=gain_index[0];
+
+  temp1 = (WebRtc_Word16)WebRtcSpl_NormW32(codedEner);
+  temp2 = (WebRtc_Word16)WebRtcSpl_NormW32(targetEner);
+
+  if(temp1 < temp2) {
+    bits = 16 - temp1;
+  } else {
+    bits = 16 - temp2;
+  }
+
+  tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(gains[1],gains[1], 14);
+
+  targetEner = WEBRTC_SPL_MUL_16_16(
+      WEBRTC_SPL_SHIFT_W32(targetEner, -bits), tmp);
+
+  tmpW32 = ((WebRtc_Word32)(gains[1]-1))<<1;
+
+  /* Pointer to the table that contains
+     gain_sq5TblFIX * gain_sq5TblFIX in Q14 */
+  gainPtr=(WebRtc_Word16*)WebRtcIlbcfix_kGainSq5Sq+gain_index[0];
+  temp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(codedEner, -bits);
+
+  WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[j];
+
+  /* targetEner and codedEner are in Q(-2*scale) */
+  for (i=gain_index[0];i<32;i++) {
+
+    /* Change the index if
+       (codedEnergy*gainTbl[i]*gainTbl[i])<(targetEn*gain[0]*gain[0]) AND
+       gainTbl[i] < 2*gain[0]
+    */
+
+    t32 = WEBRTC_SPL_MUL_16_16(temp1, (*gainPtr));
+    t32 = t32 - targetEner;
+    if (t32 < 0) {
+      if ((*WebRtcIlbcfix_kGainSq5_ptr) < tmpW32) {
+        j=i;
+        WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[i];
+      }
+    }
+    gainPtr++;
+  }
+  gain_index[0]=j;
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.h
new file mode 100644
index 0000000..e4ad4b5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearch.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_
+
+void WebRtcIlbcfix_CbSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) the encoder state structure */
+    WebRtc_Word16 *index,  /* (o) Codebook indices */
+    WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
+    WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
+    WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
+    WebRtc_Word16 lMem,  /* (i) Length of buffer */
+    WebRtc_Word16 lTarget,  /* (i) Length of vector */
+    WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
+    WebRtc_Word16 block  /* (i) the subblock number */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.c
new file mode 100644
index 0000000..711e2df
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.c
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearchCore.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbSearchCore(
+    WebRtc_Word32 *cDot,    /* (i) Cross Correlation */
+    WebRtc_Word16 range,    /* (i) Search range */
+    WebRtc_Word16 stage,    /* (i) Stage of this search */
+    WebRtc_Word16 *inverseEnergy,  /* (i) Inversed energy */
+    WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+                                           with the offset 2*16-29 */
+    WebRtc_Word32 *Crit,    /* (o) The criteria */
+    WebRtc_Word16 *bestIndex,   /* (o) Index that corresponds to
+                                                   maximum criteria (in this
+                                                   vector) */
+    WebRtc_Word32 *bestCrit,   /* (o) Value of critera for the
+                                                   chosen index */
+    WebRtc_Word16 *bestCritSh)   /* (o) The domain of the chosen
+                                                   criteria */
+{
+  WebRtc_Word32 maxW32, tmp32;
+  WebRtc_Word16 max, sh, tmp16;
+  int i;
+  WebRtc_Word32 *cDotPtr;
+  WebRtc_Word16 cDotSqW16;
+  WebRtc_Word16 *inverseEnergyPtr;
+  WebRtc_Word32 *critPtr;
+  WebRtc_Word16 *inverseEnergyShiftPtr;
+
+  /* Don't allow negative values for stage 0 */
+  if (stage==0) {
+    cDotPtr=cDot;
+    for (i=0;i<range;i++) {
+      *cDotPtr=WEBRTC_SPL_MAX(0, (*cDotPtr));
+      cDotPtr++;
+    }
+  }
+
+  /* Normalize cDot to WebRtc_Word16, calculate the square of cDot and store the upper WebRtc_Word16 */
+  maxW32 = WebRtcSpl_MaxAbsValueW32(cDot, range);
+
+  sh = (WebRtc_Word16)WebRtcSpl_NormW32(maxW32);
+  cDotPtr = cDot;
+  inverseEnergyPtr = inverseEnergy;
+  critPtr = Crit;
+  inverseEnergyShiftPtr=inverseEnergyShift;
+  max=WEBRTC_SPL_WORD16_MIN;
+
+  for (i=0;i<range;i++) {
+    /* Calculate cDot*cDot and put the result in a WebRtc_Word16 */
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(*cDotPtr,sh);
+    tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32,16);
+    cDotSqW16 = (WebRtc_Word16)(((WebRtc_Word32)(tmp16)*(tmp16))>>16);
+
+    /* Calculate the criteria (cDot*cDot/energy) */
+    *critPtr=WEBRTC_SPL_MUL_16_16(cDotSqW16, (*inverseEnergyPtr));
+
+    /* Extract the maximum shift value under the constraint
+       that the criteria is not zero */
+    if ((*critPtr)!=0) {
+      max = WEBRTC_SPL_MAX((*inverseEnergyShiftPtr), max);
+    }
+
+    inverseEnergyPtr++;
+    inverseEnergyShiftPtr++;
+    critPtr++;
+    cDotPtr++;
+  }
+
+  /* If no max shifts still at initialization value, set shift to zero */
+  if (max==WEBRTC_SPL_WORD16_MIN) {
+    max = 0;
+  }
+
+  /* Modify the criterias, so that all of them use the same Q domain */
+  critPtr=Crit;
+  inverseEnergyShiftPtr=inverseEnergyShift;
+  for (i=0;i<range;i++) {
+    /* Guarantee that the shift value is less than 16
+       in order to simplify for DSP's (and guard against >31) */
+    tmp16 = WEBRTC_SPL_MIN(16, max-(*inverseEnergyShiftPtr));
+
+    (*critPtr)=WEBRTC_SPL_SHIFT_W32((*critPtr),-tmp16);
+    critPtr++;
+    inverseEnergyShiftPtr++;
+  }
+
+  /* Find the index of the best value */
+  *bestIndex = WebRtcSpl_MaxIndexW32(Crit, range);
+  *bestCrit = Crit[*bestIndex];
+
+  /* Calculate total shifts of this criteria */
+  *bestCritSh = 32 - 2*sh + max;
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.h
new file mode 100644
index 0000000..e074c52
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_search_core.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearchCore.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_CbSearchCore(
+    WebRtc_Word32 *cDot,    /* (i) Cross Correlation */
+    WebRtc_Word16 range,    /* (i) Search range */
+    WebRtc_Word16 stage,    /* (i) Stage of this search */
+    WebRtc_Word16 *inverseEnergy,  /* (i) Inversed energy */
+    WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+                                          with the offset 2*16-29 */
+    WebRtc_Word32 *Crit,    /* (o) The criteria */
+    WebRtc_Word16 *bestIndex,   /* (o) Index that corresponds to
+                                   maximum criteria (in this
+                                   vector) */
+    WebRtc_Word32 *bestCrit,   /* (o) Value of critera for the
+                                  chosen index */
+    WebRtc_Word16 *bestCritSh);  /* (o) The domain of the chosen
+                                    criteria */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c b/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
new file mode 100644
index 0000000..bf85408
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbUpdateBestIndex.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "cb_update_best_index.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbUpdateBestIndex(
+    WebRtc_Word32 CritNew,    /* (i) New Potentially best Criteria */
+    WebRtc_Word16 CritNewSh,   /* (i) Shift value of above Criteria */
+    WebRtc_Word16 IndexNew,   /* (i) Index of new Criteria */
+    WebRtc_Word32 cDotNew,    /* (i) Cross dot of new index */
+    WebRtc_Word16 invEnergyNew,  /* (i) Inversed energy new index */
+    WebRtc_Word16 energyShiftNew,  /* (i) Energy shifts of new index */
+    WebRtc_Word32 *CritMax,   /* (i/o) Maximum Criteria (so far) */
+    WebRtc_Word16 *shTotMax,   /* (i/o) Shifts of maximum criteria */
+    WebRtc_Word16 *bestIndex,   /* (i/o) Index that corresponds to
+                                                   maximum criteria */
+    WebRtc_Word16 *bestGain)   /* (i/o) Gain in Q14 that corresponds
+                                                   to maximum criteria */
+{
+  WebRtc_Word16 shOld, shNew, tmp16;
+  WebRtc_Word16 scaleTmp;
+  WebRtc_Word32 gainW32;
+
+  /* Normalize the new and old Criteria to the same domain */
+  if (CritNewSh>(*shTotMax)) {
+    shOld=WEBRTC_SPL_MIN(31,CritNewSh-(*shTotMax));
+    shNew=0;
+  } else {
+    shOld=0;
+    shNew=WEBRTC_SPL_MIN(31,(*shTotMax)-CritNewSh);
+  }
+
+  /* Compare the two criterias. If the new one is better,
+     calculate the gain and store this index as the new best one
+  */
+
+  if (WEBRTC_SPL_RSHIFT_W32(CritNew, shNew)>
+      WEBRTC_SPL_RSHIFT_W32((*CritMax),shOld)) {
+
+    tmp16 = (WebRtc_Word16)WebRtcSpl_NormW32(cDotNew);
+    tmp16 = 16 - tmp16;
+
+    /* Calculate the gain in Q14
+       Compensate for inverseEnergyshift in Q29 and that the energy
+       value was stored in a WebRtc_Word16 (shifted down 16 steps)
+       => 29-14+16 = 31 */
+
+    scaleTmp = -energyShiftNew-tmp16+31;
+    scaleTmp = WEBRTC_SPL_MIN(31, scaleTmp);
+
+    gainW32 = WEBRTC_SPL_MUL_16_16_RSFT(
+        ((WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cDotNew, -tmp16)), invEnergyNew, scaleTmp);
+
+    /* Check if criteria satisfies Gain criteria (max 1.3)
+       if it is larger set the gain to 1.3
+       (slightly different from FLP version)
+    */
+    if (gainW32>21299) {
+      *bestGain=21299;
+    } else if (gainW32<-21299) {
+      *bestGain=-21299;
+    } else {
+      *bestGain=(WebRtc_Word16)gainW32;
+    }
+
+    *CritMax=CritNew;
+    *shTotMax=CritNewSh;
+    *bestIndex = IndexNew;
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h b/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
new file mode 100644
index 0000000..9015187
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbUpdateBestIndex.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_CbUpdateBestIndex(
+    WebRtc_Word32 CritNew,    /* (i) New Potentially best Criteria */
+    WebRtc_Word16 CritNewSh,   /* (i) Shift value of above Criteria */
+    WebRtc_Word16 IndexNew,   /* (i) Index of new Criteria */
+    WebRtc_Word32 cDotNew,    /* (i) Cross dot of new index */
+    WebRtc_Word16 invEnergyNew,  /* (i) Inversed energy new index */
+    WebRtc_Word16 energyShiftNew,  /* (i) Energy shifts of new index */
+    WebRtc_Word32 *CritMax,   /* (i/o) Maximum Criteria (so far) */
+    WebRtc_Word16 *shTotMax,   /* (i/o) Shifts of maximum criteria */
+    WebRtc_Word16 *bestIndex,   /* (i/o) Index that corresponds to
+                                   maximum criteria */
+    WebRtc_Word16 *bestGain);   /* (i/o) Gain in Q14 that corresponds
+                                   to maximum criteria */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.c b/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.c
new file mode 100644
index 0000000..90108ff
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.c
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Chebyshev.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*------------------------------------------------------------------*
+ *  Calculate the Chevyshev polynomial series
+ *  F(w) = 2*exp(-j5w)*C(x)
+ *   C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2)
+ *   T_i(x) is the i:th order Chebyshev polynomial
+ *------------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+    /* (o) Result of C(x) */
+    WebRtc_Word16 x,  /* (i) Value to the Chevyshev polynomial */
+    WebRtc_Word16 *f  /* (i) The coefficients in the polynomial */
+                                      ) {
+  WebRtc_Word16 b1_high, b1_low; /* Use the high, low format to increase the accuracy */
+  WebRtc_Word32 b2;
+  WebRtc_Word32 tmp1W32;
+  WebRtc_Word32 tmp2W32;
+  int i;
+
+  b2 = (WebRtc_Word32)0x1000000; /* b2 = 1.0 (Q23) */
+  /* Calculate b1 = 2*x + f[1] */
+  tmp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x, 10);
+  tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[1], 14);
+
+  for (i = 2; i < 5; i++) {
+    tmp2W32 = tmp1W32;
+
+    /* Split b1 (in tmp1W32) into a high and low part */
+    b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+    b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+
+    /* Calculate 2*x*b1-b2+f[i] */
+    tmp1W32 = WEBRTC_SPL_LSHIFT_W32( (WEBRTC_SPL_MUL_16_16(b1_high, x) +
+                                      WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15)), 2);
+
+    tmp1W32 -= b2;
+    tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 14);
+
+    /* Update b2 for next round */
+    b2 = tmp2W32;
+  }
+
+  /* Split b1 (in tmp1W32) into a high and low part */
+  b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+  b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+
+  /* tmp1W32 = x*b1 - b2 + f[i]/2 */
+  tmp1W32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(b1_high, x), 1) +
+      WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15), 1);
+
+  tmp1W32 -= b2;
+  tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 13);
+
+  /* Handle overflows and set to maximum or minimum WebRtc_Word16 instead */
+  if (tmp1W32>((WebRtc_Word32)33553408)) {
+    return(WEBRTC_SPL_WORD16_MAX);
+  } else if (tmp1W32<((WebRtc_Word32)-33554432)) {
+    return(WEBRTC_SPL_WORD16_MIN);
+  } else {
+    return((WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 10));
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.h b/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.h
new file mode 100644
index 0000000..57aab99
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/chebyshev.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Chebyshev.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_
+
+#include "defines.h"
+
+/*------------------------------------------------------------------*
+ *  Calculate the Chevyshev polynomial series
+ *  F(w) = 2*exp(-j5w)*C(x)
+ *   C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2)
+ *   T_i(x) is the i:th order Chebyshev polynomial
+ *------------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+    /* (o) Result of C(x) */
+    WebRtc_Word16 x,  /* (i) Value to the Chevyshev polynomial */
+    WebRtc_Word16 *f  /* (i) The coefficients in the polynomial */
+                                      );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.c b/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.c
new file mode 100644
index 0000000..3d7f93e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.c
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CompCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Compute cross correlation and pitch gain for pitch prediction
+ *  of last subframe at given lag.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CompCorr(
+    WebRtc_Word32 *corr, /* (o) cross correlation */
+    WebRtc_Word32 *ener, /* (o) energy */
+    WebRtc_Word16 *buffer, /* (i) signal buffer */
+    WebRtc_Word16 lag,  /* (i) pitch lag */
+    WebRtc_Word16 bLen, /* (i) length of buffer */
+    WebRtc_Word16 sRange, /* (i) correlation search length */
+    WebRtc_Word16 scale /* (i) number of rightshifts to use */
+                            ){
+  WebRtc_Word16 *w16ptr;
+
+  w16ptr=&buffer[bLen-sRange-lag];
+
+  /* Calculate correlation and energy */
+  (*corr)=WebRtcSpl_DotProductWithScale(&buffer[bLen-sRange], w16ptr, sRange, scale);
+  (*ener)=WebRtcSpl_DotProductWithScale(w16ptr, w16ptr, sRange, scale);
+
+  /* For zero energy set the energy to 0 in order to avoid potential
+     problems for coming divisions */
+  if (*ener == 0) {
+    *corr = 0;
+    *ener = 1;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.h b/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.h
new file mode 100644
index 0000000..cd46532
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/comp_corr.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CompCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Compute cross correlation and pitch gain for pitch prediction
+ *  of last subframe at given lag.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CompCorr(
+    WebRtc_Word32 *corr, /* (o) cross correlation */
+    WebRtc_Word32 *ener, /* (o) energy */
+    WebRtc_Word16 *buffer, /* (i) signal buffer */
+    WebRtc_Word16 lag,  /* (i) pitch lag */
+    WebRtc_Word16 bLen, /* (i) length of buffer */
+    WebRtc_Word16 sRange, /* (i) correlation search length */
+    WebRtc_Word16 scale /* (i) number of rightshifts to use */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m b/trunk/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m
new file mode 100644
index 0000000..f768194
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m
@@ -0,0 +1,49 @@
+clear;
+pack;
+%
+% Enter the path to YOUR executable and remember to define the perprocessor
+% variable PRINT_MIPS te get the instructions printed to the screen.
+%
+command = '!iLBCtest.exe 30 speechAndBGnoise.pcm out1.bit out1.pcm tlm10_30ms.dat';
+cout=' > st.txt';   %saves to matlab variable 'st'
+eval(strcat(command,cout));
+if(length(cout)>3)
+    load st.txt
+else
+    disp('No cout file to load')
+end
+
+% initialize vector to zero
+index = find(st(1:end,1)==-1);
+indexnonzero = find(st(1:end,1)>0);
+frames = length(index)-indexnonzero(1)+1;
+start = indexnonzero(1) - 1;
+functionOrder=max(st(:,2));
+new=zeros(frames,functionOrder);
+
+for i = 1:frames,
+    for j = index(start-1+i)+1:(index(start+i)-1),
+        new(i,st(j,2)) = new(i,st(j,2)) + st(j,1);
+    end
+end
+
+result=zeros(functionOrder,3);
+for i=1:functionOrder
+    nonzeroelements = find(new(1:end,i)>0);
+    result(i,1)=i;
+    
+    % Compute each function's mean complexity
+    % result(i,2)=(sum(new(nonzeroelements,i))/(length(nonzeroelements)*0.03))/1000000;
+    
+    % Compute each function's maximum complexity in encoding
+    % and decoding respectively and then add it together:
+    % result(i,3)=(max(new(1:end,i))/0.03)/1000000;
+    result(i,3)=(max(new(1:size(new,1)/2,i))/0.03)/1000000 + (max(new(size(new,1)/2+1:end,i))/0.03)/1000000;
+end
+
+result
+
+% Compute maximum complexity for a single frame (enc/dec separately and together)
+maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000
+maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000
+totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame
\ No newline at end of file
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/constants.c b/trunk/src/modules/audio_coding/codecs/ilbc/constants.c
new file mode 100644
index 0000000..5ebe9be
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/constants.c
@@ -0,0 +1,666 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ constants.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/* HP Filters {b[0] b[1] b[2] -a[1] -a[2]} */
+
+const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733};
+const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[5] = {3849, -7699, 3849, 7918, -3833};
+
+/* Window in Q11 to window the energies of the 5 choises (3 for 20ms) in the choise for
+   the 80 sample start state
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[NSUB_MAX-1]= {
+  1638, 1843, 2048, 1843, 1638
+};
+
+/* LP Filter coeffs used for downsampling */
+const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[FILTERORDER_DS_PLUS1]= {
+  -273, 512, 1297, 1696, 1297, 512, -273
+};
+
+/* Constants used in the LPC calculations */
+
+/* Hanning LPC window (in Q15) */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[BLOCKL_MAX] = {
+  6, 22, 50, 89, 139, 200, 272, 355, 449, 554, 669, 795,
+  932, 1079, 1237, 1405, 1583, 1771, 1969, 2177, 2395, 2622, 2858, 3104,
+  3359, 3622, 3894, 4175, 4464, 4761, 5066, 5379, 5699, 6026, 6361, 6702,
+  7050, 7404, 7764, 8130, 8502, 8879, 9262, 9649, 10040, 10436, 10836, 11240,
+  11647, 12058, 12471, 12887, 13306, 13726, 14148, 14572, 14997, 15423, 15850, 16277,
+  16704, 17131, 17558, 17983, 18408, 18831, 19252, 19672, 20089, 20504, 20916, 21325,
+  21730, 22132, 22530, 22924, 23314, 23698, 24078, 24452, 24821, 25185, 25542, 25893,
+  26238, 26575, 26906, 27230, 27547, 27855, 28156, 28450, 28734, 29011, 29279, 29538,
+  29788, 30029, 30261, 30483, 30696, 30899, 31092, 31275, 31448, 31611, 31764, 31906,
+  32037, 32158, 32268, 32367, 32456, 32533, 32600, 32655, 32700, 32733, 32755, 32767,
+  32767, 32755, 32733, 32700, 32655, 32600, 32533, 32456, 32367, 32268, 32158, 32037,
+  31906, 31764, 31611, 31448, 31275, 31092, 30899, 30696, 30483, 30261, 30029, 29788,
+  29538, 29279, 29011, 28734, 28450, 28156, 27855, 27547, 27230, 26906, 26575, 26238,
+  25893, 25542, 25185, 24821, 24452, 24078, 23698, 23314, 22924, 22530, 22132, 21730,
+  21325, 20916, 20504, 20089, 19672, 19252, 18831, 18408, 17983, 17558, 17131, 16704,
+  16277, 15850, 15423, 14997, 14572, 14148, 13726, 13306, 12887, 12471, 12058, 11647,
+  11240, 10836, 10436, 10040, 9649, 9262, 8879, 8502, 8130, 7764, 7404, 7050,
+  6702, 6361, 6026, 5699, 5379, 5066, 4761, 4464, 4175, 3894, 3622, 3359,
+  3104, 2858, 2622, 2395, 2177, 1969, 1771, 1583, 1405, 1237, 1079, 932,
+  795, 669, 554, 449, 355, 272, 200, 139, 89, 50, 22, 6
+};
+
+/* Asymmetric LPC window (in Q15)*/
+const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[BLOCKL_MAX] = {
+  2, 7, 15, 27, 42, 60, 81, 106, 135, 166, 201, 239,
+  280, 325, 373, 424, 478, 536, 597, 661, 728, 798, 872, 949,
+  1028, 1111, 1197, 1287, 1379, 1474, 1572, 1674, 1778, 1885, 1995, 2108,
+  2224, 2343, 2465, 2589, 2717, 2847, 2980, 3115, 3254, 3395, 3538, 3684,
+  3833, 3984, 4138, 4295, 4453, 4615, 4778, 4944, 5112, 5283, 5456, 5631,
+  5808, 5987, 6169, 6352, 6538, 6725, 6915, 7106, 7300, 7495, 7692, 7891,
+  8091, 8293, 8497, 8702, 8909, 9118, 9328, 9539, 9752, 9966, 10182, 10398,
+  10616, 10835, 11055, 11277, 11499, 11722, 11947, 12172, 12398, 12625, 12852, 13080,
+  13309, 13539, 13769, 14000, 14231, 14463, 14695, 14927, 15160, 15393, 15626, 15859,
+  16092, 16326, 16559, 16792, 17026, 17259, 17492, 17725, 17957, 18189, 18421, 18653,
+  18884, 19114, 19344, 19573, 19802, 20030, 20257, 20483, 20709, 20934, 21157, 21380,
+  21602, 21823, 22042, 22261, 22478, 22694, 22909, 23123, 23335, 23545, 23755, 23962,
+  24168, 24373, 24576, 24777, 24977, 25175, 25371, 25565, 25758, 25948, 26137, 26323,
+  26508, 26690, 26871, 27049, 27225, 27399, 27571, 27740, 27907, 28072, 28234, 28394,
+  28552, 28707, 28860, 29010, 29157, 29302, 29444, 29584, 29721, 29855, 29987, 30115,
+  30241, 30364, 30485, 30602, 30717, 30828, 30937, 31043, 31145, 31245, 31342, 31436,
+  31526, 31614, 31699, 31780, 31858, 31933, 32005, 32074, 32140, 32202, 32261, 32317,
+  32370, 32420, 32466, 32509, 32549, 32585, 32618, 32648, 32675, 32698, 32718, 32734,
+  32748, 32758, 32764, 32767, 32767, 32667, 32365, 31863, 31164, 30274, 29197, 27939,
+  26510, 24917, 23170, 21281, 19261, 17121, 14876, 12540, 10126, 7650, 5126, 2571
+};
+
+/* Lag window for LPC (Q31) */
+const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[LPC_FILTERORDER + 1]={
+  2147483647,   2144885453,   2137754373,   2125918626,   2109459810,
+  2088483140,   2063130336,   2033564590,   1999977009,   1962580174,
+  1921610283};
+
+/* WebRtcIlbcfix_kLpcChirpSyntDenum vector in Q15 corresponding
+ * floating point vector {1 0.9025 0.9025^2 0.9025^3 ...}
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[LPC_FILTERORDER + 1] = {
+  32767, 29573, 26690, 24087,
+  21739, 19619, 17707, 15980,
+  14422, 13016, 11747};
+
+/* WebRtcIlbcfix_kLpcChirpWeightDenum in Q15 corresponding to
+ * floating point vector {1 0.4222 0.4222^2... }
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[LPC_FILTERORDER + 1] = {
+  32767, 13835, 5841, 2466, 1041, 440,
+  186, 78,  33,  14,  6};
+
+/* LSF quantization Q13 domain */
+const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[64 * 3 + 128 * 3 + 128 * 4] = {
+  1273,       2238,       3696,
+  3199,       5309,       8209,
+  3606,       5671,       7829,
+  2815,       5262,       8778,
+  2608,       4027,       5493,
+  1582,       3076,       5945,
+  2983,       4181,       5396,
+  2437,       4322,       6902,
+  1861,       2998,       4613,
+  2007,       3250,       5214,
+  1388,       2459,       4262,
+  2563,       3805,       5269,
+  2036,       3522,       5129,
+  1935,       4025,       6694,
+  2744,       5121,       7338,
+  2810,       4248,       5723,
+  3054,       5405,       7745,
+  1449,       2593,       4763,
+  3411,       5128,       6596,
+  2484,       4659,       7496,
+  1668,       2879,       4818,
+  1812,       3072,       5036,
+  1638,       2649,       3900,
+  2464,       3550,       4644,
+  1853,       2900,       4158,
+  2458,       4163,       5830,
+  2556,       4036,       6254,
+  2703,       4432,       6519,
+  3062,       4953,       7609,
+  1725,       3703,       6187,
+  2221,       3877,       5427,
+  2339,       3579,       5197,
+  2021,       4633,       7037,
+  2216,       3328,       4535,
+  2961,       4739,       6667,
+  2807,       3955,       5099,
+  2788,       4501,       6088,
+  1642,       2755,       4431,
+  3341,       5282,       7333,
+  2414,       3726,       5727,
+  1582,       2822,       5269,
+  2259,       3447,       4905,
+  3117,       4986,       7054,
+  1825,       3491,       5542,
+  3338,       5736,       8627,
+  1789,       3090,       5488,
+  2566,       3720,       4923,
+  2846,       4682,       7161,
+  1950,       3321,       5976,
+  1834,       3383,       6734,
+  3238,       4769,       6094,
+  2031,       3978,       5903,
+  1877,       4068,       7436,
+  2131,       4644,       8296,
+  2764,       5010,       8013,
+  2194,       3667,       6302,
+  2053,       3127,       4342,
+  3523,       6595,      10010,
+  3134,       4457,       5748,
+  3142,       5819,       9414,
+  2223,       4334,       6353,
+  2022,       3224,       4822,
+  2186,       3458,       5544,
+  2552,       4757,       6870,
+  10905,      12917,      14578,
+  9503,      11485,      14485,
+  9518,      12494,      14052,
+  6222,       7487,       9174,
+  7759,       9186,      10506,
+  8315,      12755,      14786,
+  9609,      11486,      13866,
+  8909,      12077,      13643,
+  7369,       9054,      11520,
+  9408,      12163,      14715,
+  6436,       9911,      12843,
+  7109,       9556,      11884,
+  7557,      10075,      11640,
+  6482,       9202,      11547,
+  6463,       7914,      10980,
+  8611,      10427,      12752,
+  7101,       9676,      12606,
+  7428,      11252,      13172,
+  10197,      12955,      15842,
+  7487,      10955,      12613,
+  5575,       7858,      13621,
+  7268,      11719,      14752,
+  7476,      11744,      13795,
+  7049,       8686,      11922,
+  8234,      11314,      13983,
+  6560,      11173,      14984,
+  6405,       9211,      12337,
+  8222,      12054,      13801,
+  8039,      10728,      13255,
+  10066,      12733,      14389,
+  6016,       7338,      10040,
+  6896,       8648,      10234,
+  7538,       9170,      12175,
+  7327,      12608,      14983,
+  10516,      12643,      15223,
+  5538,       7644,      12213,
+  6728,      12221,      14253,
+  7563,       9377,      12948,
+  8661,      11023,      13401,
+  7280,       8806,      11085,
+  7723,       9793,      12333,
+  12225,      14648,      16709,
+  8768,      13389,      15245,
+  10267,      12197,      13812,
+  5301,       7078,      11484,
+  7100,      10280,      11906,
+  8716,      12555,      14183,
+  9567,      12464,      15434,
+  7832,      12305,      14300,
+  7608,      10556,      12121,
+  8913,      11311,      12868,
+  7414,       9722,      11239,
+  8666,      11641,      13250,
+  9079,      10752,      12300,
+  8024,      11608,      13306,
+  10453,      13607,      16449,
+  8135,       9573,      10909,
+  6375,       7741,      10125,
+  10025,      12217,      14874,
+  6985,      11063,      14109,
+  9296,      13051,      14642,
+  8613,      10975,      12542,
+  6583,      10414,      13534,
+  6191,       9368,      13430,
+  5742,       6859,       9260,
+  7723,       9813,      13679,
+  8137,      11291,      12833,
+  6562,       8973,      10641,
+  6062,       8462,      11335,
+  6928,       8784,      12647,
+  7501,       8784,      10031,
+  8372,      10045,      12135,
+  8191,       9864,      12746,
+  5917,       7487,      10979,
+  5516,       6848,      10318,
+  6819,       9899,      11421,
+  7882,      12912,      15670,
+  9558,      11230,      12753,
+  7752,       9327,      11472,
+  8479,       9980,      11358,
+  11418,      14072,      16386,
+  7968,      10330,      14423,
+  8423,      10555,      12162,
+  6337,      10306,      14391,
+  8850,      10879,      14276,
+  6750,      11885,      15710,
+  7037,       8328,       9764,
+  6914,       9266,      13476,
+  9746,      13949,      15519,
+  11032,      14444,      16925,
+  8032,      10271,      11810,
+  10962,      13451,      15833,
+  10021,      11667,      13324,
+  6273,       8226,      12936,
+  8543,      10397,      13496,
+  7936,      10302,      12745,
+  6769,       8138,      10446,
+  6081,       7786,      11719,
+  8637,      11795,      14975,
+  8790,      10336,      11812,
+  7040,       8490,      10771,
+  7338,      10381,      13153,
+  6598,       7888,       9358,
+  6518,       8237,      12030,
+  9055,      10763,      12983,
+  6490,      10009,      12007,
+  9589,      12023,      13632,
+  6867,       9447,      10995,
+  7930,       9816,      11397,
+  10241,      13300,      14939,
+  5830,       8670,      12387,
+  9870,      11915,      14247,
+  9318,      11647,      13272,
+  6721,      10836,      12929,
+  6543,       8233,       9944,
+  8034,      10854,      12394,
+  9112,      11787,      14218,
+  9302,      11114,      13400,
+  9022,      11366,      13816,
+  6962,      10461,      12480,
+  11288,      13333,      15222,
+  7249,       8974,      10547,
+  10566,      12336,      14390,
+  6697,      11339,      13521,
+  11851,      13944,      15826,
+  6847,       8381,      11349,
+  7509,       9331,      10939,
+  8029,       9618,      11909,
+  13973,      17644,      19647,      22474,
+  14722,      16522,      20035,      22134,
+  16305,      18179,      21106,      23048,
+  15150,      17948,      21394,      23225,
+  13582,      15191,      17687,      22333,
+  11778,      15546,      18458,      21753,
+  16619,      18410,      20827,      23559,
+  14229,      15746,      17907,      22474,
+  12465,      15327,      20700,      22831,
+  15085,      16799,      20182,      23410,
+  13026,      16935,      19890,      22892,
+  14310,      16854,      19007,      22944,
+  14210,      15897,      18891,      23154,
+  14633,      18059,      20132,      22899,
+  15246,      17781,      19780,      22640,
+  16396,      18904,      20912,      23035,
+  14618,      17401,      19510,      21672,
+  15473,      17497,      19813,      23439,
+  18851,      20736,      22323,      23864,
+  15055,      16804,      18530,      20916,
+  16490,      18196,      19990,      21939,
+  11711,      15223,      21154,      23312,
+  13294,      15546,      19393,      21472,
+  12956,      16060,      20610,      22417,
+  11628,      15843,      19617,      22501,
+  14106,      16872,      19839,      22689,
+  15655,      18192,      20161,      22452,
+  12953,      15244,      20619,      23549,
+  15322,      17193,      19926,      21762,
+  16873,      18676,      20444,      22359,
+  14874,      17871,      20083,      21959,
+  11534,      14486,      19194,      21857,
+  17766,      19617,      21338,      23178,
+  13404,      15284,      19080,      23136,
+  15392,      17527,      19470,      21953,
+  14462,      16153,      17985,      21192,
+  17734,      19750,      21903,      23783,
+  16973,      19096,      21675,      23815,
+  16597,      18936,      21257,      23461,
+  15966,      17865,      20602,      22920,
+  15416,      17456,      20301,      22972,
+  18335,      20093,      21732,      23497,
+  15548,      17217,      20679,      23594,
+  15208,      16995,      20816,      22870,
+  13890,      18015,      20531,      22468,
+  13211,      15377,      19951,      22388,
+  12852,      14635,      17978,      22680,
+  16002,      17732,      20373,      23544,
+  11373,      14134,      19534,      22707,
+  17329,      19151,      21241,      23462,
+  15612,      17296,      19362,      22850,
+  15422,      19104,      21285,      23164,
+  13792,      17111,      19349,      21370,
+  15352,      17876,      20776,      22667,
+  15253,      16961,      18921,      22123,
+  14108,      17264,      20294,      23246,
+  15785,      17897,      20010,      21822,
+  17399,      19147,      20915,      22753,
+  13010,      15659,      18127,      20840,
+  16826,      19422,      22218,      24084,
+  18108,      20641,      22695,      24237,
+  18018,      20273,      22268,      23920,
+  16057,      17821,      21365,      23665,
+  16005,      17901,      19892,      23016,
+  13232,      16683,      21107,      23221,
+  13280,      16615,      19915,      21829,
+  14950,      18575,      20599,      22511,
+  16337,      18261,      20277,      23216,
+  14306,      16477,      21203,      23158,
+  12803,      17498,      20248,      22014,
+  14327,      17068,      20160,      22006,
+  14402,      17461,      21599,      23688,
+  16968,      18834,      20896,      23055,
+  15070,      17157,      20451,      22315,
+  15419,      17107,      21601,      23946,
+  16039,      17639,      19533,      21424,
+  16326,      19261,      21745,      23673,
+  16489,      18534,      21658,      23782,
+  16594,      18471,      20549,      22807,
+  18973,      21212,      22890,      24278,
+  14264,      18674,      21123,      23071,
+  15117,      16841,      19239,      23118,
+  13762,      15782,      20478,      23230,
+  14111,      15949,      20058,      22354,
+  14990,      16738,      21139,      23492,
+  13735,      16971,      19026,      22158,
+  14676,      17314,      20232,      22807,
+  16196,      18146,      20459,      22339,
+  14747,      17258,      19315,      22437,
+  14973,      17778,      20692,      23367,
+  15715,      17472,      20385,      22349,
+  15702,      18228,      20829,      23410,
+  14428,      16188,      20541,      23630,
+  16824,      19394,      21365,      23246,
+  13069,      16392,      18900,      21121,
+  12047,      16640,      19463,      21689,
+  14757,      17433,      19659,      23125,
+  15185,      16930,      19900,      22540,
+  16026,      17725,      19618,      22399,
+  16086,      18643,      21179,      23472,
+  15462,      17248,      19102,      21196,
+  17368,      20016,      22396,      24096,
+  12340,      14475,      19665,      23362,
+  13636,      16229,      19462,      22728,
+  14096,      16211,      19591,      21635,
+  12152,      14867,      19943,      22301,
+  14492,      17503,      21002,      22728,
+  14834,      16788,      19447,      21411,
+  14650,      16433,      19326,      22308,
+  14624,      16328,      19659,      23204,
+  13888,      16572,      20665,      22488,
+  12977,      16102,      18841,      22246,
+  15523,      18431,      21757,      23738,
+  14095,      16349,      18837,      20947,
+  13266,      17809,      21088,      22839,
+  15427,      18190,      20270,      23143,
+  11859,      16753,      20935,      22486,
+  12310,      17667,      21736,      23319,
+  14021,      15926,      18702,      22002,
+  12286,      15299,      19178,      21126,
+  15703,      17491,      21039,      23151,
+  12272,      14018,      18213,      22570,
+  14817,      16364,      18485,      22598,
+  17109,      19683,      21851,      23677,
+  12657,      14903,      19039,      22061,
+  14713,      16487,      20527,      22814,
+  14635,      16726,      18763,      21715,
+  15878,      18550,      20718,      22906
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[LSF_NSPLIT] = {3, 3, 4};
+const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[LSF_NSPLIT] = {64,128,128};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[LPC_FILTERORDER] = {
+  2308,       3652,       5434,       7885,
+  10255,      12559,      15160,      17513,
+  20328,      22752};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLspMean[LPC_FILTERORDER] = {
+  31476, 29565, 25819, 18725, 10276,
+  1236, -9049, -17600, -25884, -30618
+};
+
+/* Q14 */
+const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[4] = {12288, 8192, 4096, 0};
+const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[6] = {8192, 16384, 10923, 5461, 0, 0};
+
+/*
+   cos(x) in Q15
+   WebRtcIlbcfix_kCos[i] = cos(pi*i/64.0)
+   used in WebRtcIlbcfix_Lsp2Lsf()
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kCos[64] = {
+  32767,  32729,  32610,  32413,  32138,  31786,  31357,  30853,
+  30274,  29622,  28899,  28106,  27246,  26320,  25330,  24279,
+  23170,  22006,  20788,  19520,  18205,  16846,  15447,  14010,
+  12540,  11039,   9512,   7962,   6393,   4808,   3212,   1608,
+  0,  -1608,  -3212,  -4808,  -6393,  -7962,  -9512, -11039,
+  -12540, -14010, -15447, -16846, -18205, -19520, -20788, -22006,
+  -23170, -24279, -25330, -26320, -27246, -28106, -28899, -29622,
+  -30274, -30853, -31357, -31786, -32138, -32413, -32610, -32729
+};
+
+/*
+   Derivative in Q19, used to interpolate between the
+   WebRtcIlbcfix_kCos[] values to get a more exact y = cos(x)
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[64] = {
+  -632,  -1893,  -3150,  -4399,  -5638,  -6863,  -8072,  -9261,
+  -10428, -11570, -12684, -13767, -14817, -15832, -16808, -17744,
+  -18637, -19486, -20287, -21039, -21741, -22390, -22986, -23526,
+  -24009, -24435, -24801, -25108, -25354, -25540, -25664, -25726,
+  -25726, -25664, -25540, -25354, -25108, -24801, -24435, -24009,
+  -23526, -22986, -22390, -21741, -21039, -20287, -19486, -18637,
+  -17744, -16808, -15832, -14817, -13767, -12684, -11570, -10428,
+  -9261,  -8072,  -6863,  -5638,  -4399,  -3150,  -1893,   -632};
+
+/*
+  Table in Q15, used for a2lsf conversion
+  WebRtcIlbcfix_kCosGrid[i] = cos((2*pi*i)/(float)(2*COS_GRID_POINTS));
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[COS_GRID_POINTS + 1] = {
+  32760, 32723, 32588, 32364, 32051, 31651, 31164, 30591,
+  29935, 29196, 28377, 27481, 26509, 25465, 24351, 23170,
+  21926, 20621, 19260, 17846, 16384, 14876, 13327, 11743,
+  10125, 8480, 6812, 5126, 3425, 1714, 0, -1714, -3425,
+  -5126, -6812, -8480, -10125, -11743, -13327, -14876,
+  -16384, -17846, -19260, -20621, -21926, -23170, -24351,
+  -25465, -26509, -27481, -28377, -29196, -29935, -30591,
+  -31164, -31651, -32051, -32364, -32588, -32723, -32760
+};
+
+/*
+   Derivative of y = acos(x) in Q12
+   used in WebRtcIlbcfix_Lsp2Lsf()
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[64] = {
+  -26887, -8812, -5323, -3813, -2979, -2444, -2081, -1811,
+  -1608, -1450, -1322, -1219, -1132, -1059, -998, -946,
+  -901, -861, -827, -797, -772, -750, -730, -713,
+  -699, -687, -677, -668, -662, -657, -654, -652,
+  -652, -654, -657, -662, -668, -677, -687, -699,
+  -713, -730, -750, -772, -797, -827, -861, -901,
+  -946, -998, -1059, -1132, -1219, -1322, -1450, -1608,
+  -1811, -2081, -2444, -2979, -3813, -5323, -8812, -26887
+};
+
+
+/* Tables for quantization of start state */
+
+/* State quantization tables */
+const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[8] = { /* Values in Q13 */
+  -30473, -17838, -9257, -2537,
+  3639, 10893, 19958, 32636
+};
+
+/* This table defines the limits for the selection of the freqg
+   less or equal than value 0 => index = 0
+   less or equal than value k => index = k
+*/
+const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[64] = {
+  118, 163, 222, 305, 425, 604,
+  851, 1174, 1617, 2222, 3080, 4191,
+  5525, 7215, 9193, 11540, 14397, 17604,
+  21204, 25209, 29863, 35720, 42531, 50375,
+  59162, 68845, 80108, 93754, 110326, 129488,
+  150654, 174328, 201962, 233195, 267843, 308239,
+  354503, 405988, 464251, 531550, 608652, 697516,
+  802526, 928793, 1080145, 1258120, 1481106, 1760881,
+  2111111, 2546619, 3078825, 3748642, 4563142, 5573115,
+  6887601, 8582108, 10797296, 14014513, 18625760, 25529599,
+  37302935, 58819185, 109782723, WEBRTC_SPL_WORD32_MAX
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kScale[64] = {
+  /* Values in Q16 */
+  29485, 25003, 21345, 18316, 15578, 13128, 10973, 9310, 7955,
+  6762, 5789, 4877, 4255, 3699, 3258, 2904, 2595, 2328,
+  2123, 1932, 1785, 1631, 1493, 1370, 1260, 1167, 1083,
+  /* Values in Q21 */
+  32081, 29611, 27262, 25229, 23432, 21803, 20226, 18883, 17609,
+  16408, 15311, 14327, 13390, 12513, 11693, 10919, 10163, 9435,
+  8739, 8100, 7424, 6813, 6192, 5648, 5122, 4639, 4207, 3798,
+  3404, 3048, 2706, 2348, 2036, 1713, 1393, 1087, 747
+};
+
+/*frgq in fixpoint, but already computed like this:
+  for(i=0; i<64; i++){
+  a = (pow(10,frgq[i])/4.5);
+  WebRtcIlbcfix_kFrgQuantMod[i] = round(a);
+  }
+
+  Value 0 :36 in Q8
+  37:58 in Q5
+  59:63 in Q3
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[64] = {
+  /* First 37 values in Q8 */
+  569, 671, 786, 916, 1077, 1278,
+  1529, 1802, 2109, 2481, 2898, 3440,
+  3943, 4535, 5149, 5778, 6464, 7208,
+  7904, 8682, 9397, 10285, 11240, 12246,
+  13313, 14382, 15492, 16735, 18131, 19693,
+  21280, 22912, 24624, 26544, 28432, 30488,
+  32720,
+  /* 22 values in Q5 */
+  4383, 4684, 5012, 5363, 5739, 6146,
+  6603, 7113, 7679, 8285, 9040, 9850,
+  10838, 11882, 13103, 14467, 15950, 17669,
+  19712, 22016, 24800, 28576,
+  /* 5 values in Q3 */
+  8240, 9792, 12040, 15440, 22472
+};
+
+/* Constants for codebook search and creation */
+
+/* Expansion filter to get additional cb section.
+ * Q12 and reversed compared to flp
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[CB_FILTERLEN]={
+  -140, 446, -755, 3302, 2922, -590, 343, -138};
+
+/* Weighting coefficients for short lags.
+ * [0.2 0.4 0.6 0.8] in Q15 */
+const WebRtc_Word16 WebRtcIlbcfix_kAlpha[4]={
+  6554, 13107, 19661, 26214};
+
+/* Ranges for search and filters at different subframes */
+
+const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]={
+  {58,58,58}, {108,44,44}, {108,108,108}, {108,108,108}, {108,108,108}};
+
+const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[5]={63, 85, 125, 147, 147};
+
+/* Gain Quantization for the codebook gains of the 3 stages */
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[9]={
+  -16384, -10813, -5407, 0, 4096, 8192,
+  12288, 16384, 32767};
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[17]={
+  -17203, -14746, -12288, -9830, -7373, -4915,
+  -2458, 0, 2458, 4915, 7373, 9830,
+  12288, 14746, 17203, 19661, 32767};
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[33]={
+  614,        1229,        1843,        2458,        3072,       3686,
+  4301,        4915,        5530,        6144,        6758,        7373,
+  7987,        8602,        9216,        9830,       10445,       11059,
+  11674,       12288,       12902,       13517,       14131,       14746,
+  15360,       15974,       16589,       17203,       17818,       18432,
+  19046,       19661,    32767};
+
+/* Q14 gain_sq5Tbl squared in Q14 */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[32] = {
+  23,   92,    207,  368,  576,  829,
+  1129,  1474,   1866,  2304,  2787,  3317,
+  3893,  4516,   5184,  5897,  6658,  7464,
+  8318,  9216,   10160,  11151,  12187,  13271,
+  14400,  15574,   16796,  18062,  19377,  20736,
+  22140,  23593
+};
+
+const WebRtc_Word16* const WebRtcIlbcfix_kGain[3] =
+{WebRtcIlbcfix_kGainSq5, WebRtcIlbcfix_kGainSq4, WebRtcIlbcfix_kGainSq3};
+
+
+/* Tables for the Enhancer, using upsamling factor 4 (ENH_UPS0 = 4) */
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1]={
+  {0,    0,    0, 4096,    0,  0,   0},
+  {64, -315, 1181, 3531, -436, 77, -64},
+  {97, -509, 2464, 2464, -509, 97, -97},
+  {77, -436, 3531, 1181, -315, 64, -77}
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[3] = {
+  4800, 16384, 27968 /* Q16 */
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[ENH_NBLOCKS_TOT] = {
+  160, 480, 800, 1120, 1440, 1760, 2080, 2400  /* Q(-2) */
+};
+
+/* PLC table */
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[6] = { /* Grid points for square of periodiciy in Q15 */
+  839, 1343, 2048, 2998, 4247, 5849
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[6] = { /* Value of y=(x^4-0.4)/(0.7-0.4) in grid points in Q15 */
+  0, 5462, 10922, 16384, 21846, 27306
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[6] = { /* Slope of y=(x^4-0.4)/(0.7-0.4) in Q11 */
+  26667, 18729, 13653, 10258, 7901, 6214
+};
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/constants.h b/trunk/src/modules/audio_coding/codecs/ilbc/constants.h
new file mode 100644
index 0000000..f787f74
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/constants.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ constants.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_
+
+#include "defines.h"
+#include "typedefs.h"
+
+/* high pass filters */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[];
+
+/* Window for start state decision */
+extern const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[];
+
+/* low pass filter used for downsampling */
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[];
+
+/* LPC analysis and quantization */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[];
+extern const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLspMean[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCos[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[];
+
+/* state quantization tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[];
+extern const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kScale[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[];
+
+/* Ranges for search and filters at different subframes */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES];
+extern const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[];
+
+/* gain quantization tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[];
+extern const WebRtc_Word16* const WebRtcIlbcfix_kGain[];
+
+/* adaptive codebook definitions */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kAlpha[];
+
+/* enhancer definitions */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1];
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[];
+
+/* PLC tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[];
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c b/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
new file mode 100644
index 0000000..f021c4d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CreateAugmentedVec.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Recreate a specific codebook vector from the augmented part.
+ *
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CreateAugmentedVec(
+    WebRtc_Word16 index,  /* (i) Index for the augmented vector to be created */
+    WebRtc_Word16 *buffer,  /* (i) Pointer to the end of the codebook memory that
+                                           is used for creation of the augmented codebook */
+    WebRtc_Word16 *cbVec  /* (o) The construced codebook vector */
+                                      ) {
+  WebRtc_Word16 ilow;
+  WebRtc_Word16 *ppo, *ppi;
+  WebRtc_Word16 cbVecTmp[4];
+
+  ilow = index-4;
+
+  /* copy the first noninterpolated part */
+  ppo = buffer-index;
+  WEBRTC_SPL_MEMCPY_W16(cbVec, ppo, index);
+
+  /* interpolation */
+  ppo = buffer - 4;
+  ppi = buffer - index - 4;
+
+  /* perform cbVec[ilow+k] = ((ppi[k]*alphaTbl[k])>>15) + ((ppo[k]*alphaTbl[3-k])>>15);
+     for k = 0..3
+  */
+  WebRtcSpl_ElementwiseVectorMult(&cbVec[ilow], ppi, WebRtcIlbcfix_kAlpha, 4, 15);
+  WebRtcSpl_ReverseOrderMultArrayElements(cbVecTmp, ppo, &WebRtcIlbcfix_kAlpha[3], 4, 15);
+  WebRtcSpl_AddVectorsAndShift(&cbVec[ilow], &cbVec[ilow], cbVecTmp, 4, 0);
+
+  /* copy the second noninterpolated part */
+  ppo = buffer - index;
+  WEBRTC_SPL_MEMCPY_W16(cbVec+index,ppo,(SUBL-index));
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h b/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
new file mode 100644
index 0000000..970a9be
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CreateAugmentedVec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Recreate a specific codebook vector from the augmented part.
+ *
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CreateAugmentedVec(
+    WebRtc_Word16 index,  /* (i) Index for the augmented vector to be created */
+    WebRtc_Word16 *buffer,  /* (i) Pointer to the end of the codebook memory that
+                                           is used for creation of the augmented codebook */
+    WebRtc_Word16 *cbVec  /* (o) The construced codebook vector */
+                                      );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decode.c b/trunk/src/modules/audio_coding/codecs/ilbc/decode.c
new file mode 100644
index 0000000..827532c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decode.c
@@ -0,0 +1,244 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Decode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "simple_lsf_dequant.h"
+#include "decoder_interpolate_lsf.h"
+#include "index_conv_dec.h"
+#include "do_plc.h"
+#include "constants.h"
+#include "enhancer_interface.h"
+#include "xcorr_coef.h"
+#include "lsf_check.h"
+#include "decode_residual.h"
+#include "unpack_bits.h"
+#include "hp_output.h"
+#ifndef WEBRTC_BIG_ENDIAN
+#include "swap_bytes.h"
+#endif
+
+/*----------------------------------------------------------------*
+ *  main decoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeImpl(
+    WebRtc_Word16 *decblock,    /* (o) decoded signal block */
+    WebRtc_UWord16 *bytes,     /* (i) encoded signal bits */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
+                                           structure */
+    WebRtc_Word16 mode      /* (i) 0: bad packet, PLC,
+                                                                   1: normal */
+                           ) {
+  int i;
+  WebRtc_Word16 order_plus_one;
+
+  WebRtc_Word16 last_bit;
+  WebRtc_Word16 *data;
+  /* Stack based */
+  WebRtc_Word16 decresidual[BLOCKL_MAX];
+  WebRtc_Word16 PLCresidual[BLOCKL_MAX + LPC_FILTERORDER];
+  WebRtc_Word16 syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+  WebRtc_Word16 PLClpc[LPC_FILTERORDER + 1];
+  iLBC_bits *iLBCbits_inst = (iLBC_bits*)PLCresidual;
+
+  /* Reuse some buffers that are non overlapping in order to save stack memory */
+  data = &PLCresidual[LPC_FILTERORDER];
+
+  if (mode>0) { /* the data are good */
+
+    /* decode data */
+
+#ifndef WEBRTC_BIG_ENDIAN
+    WebRtcIlbcfix_SwapBytes((WebRtc_UWord16*)bytes, iLBCdec_inst->no_of_words);
+#endif
+
+    /* Unpacketize bits into parameters */
+
+    last_bit = WebRtcIlbcfix_UnpackBits(bytes, iLBCbits_inst, iLBCdec_inst->mode);
+
+#ifndef WEBRTC_BIG_ENDIAN
+    /* Swap back so that the the input vector "bytes" is unchanged */
+    WebRtcIlbcfix_SwapBytes((WebRtc_UWord16*)bytes, iLBCdec_inst->no_of_words);
+#endif
+
+    /* Check for bit errors */
+    if (iLBCbits_inst->startIdx<1)
+      mode = 0;
+    if ((iLBCdec_inst->mode==20) && (iLBCbits_inst->startIdx>3))
+      mode = 0;
+    if ((iLBCdec_inst->mode==30) && (iLBCbits_inst->startIdx>5))
+      mode = 0;
+    if (last_bit==1)
+      mode = 0;
+
+    if (mode==1) { /* No bit errors was detected, continue decoding */
+      /* Stack based */
+      WebRtc_Word16 lsfdeq[LPC_FILTERORDER*LPC_N_MAX];
+      WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+
+      /* adjust index */
+      WebRtcIlbcfix_IndexConvDec(iLBCbits_inst->cb_index);
+
+      /* decode the lsf */
+      WebRtcIlbcfix_SimpleLsfDeQ(lsfdeq, (WebRtc_Word16*)(iLBCbits_inst->lsf), iLBCdec_inst->lpc_n);
+      WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCdec_inst->lpc_n);
+      WebRtcIlbcfix_DecoderInterpolateLsp(syntdenum, weightdenum,
+                                          lsfdeq, LPC_FILTERORDER, iLBCdec_inst);
+
+      /* Decode the residual using the cb and gain indexes */
+      WebRtcIlbcfix_DecodeResidual(iLBCdec_inst, iLBCbits_inst, decresidual, syntdenum);
+
+      /* preparing the plc for a future loss! */
+      WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 0,
+                              decresidual, syntdenum + (LPC_FILTERORDER + 1)*(iLBCdec_inst->nsub - 1),
+                              (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+
+      /* Use the output from doThePLC */
+      WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
+    }
+
+  }
+
+  if (mode == 0) {
+    /* the data is bad (either a PLC call
+     * was made or a bit error was detected)
+     */
+
+    /* packet loss conceal */
+
+    WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 1,
+                            decresidual, syntdenum, (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+
+    WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
+
+    order_plus_one = LPC_FILTERORDER + 1;
+
+    for (i = 0; i < iLBCdec_inst->nsub; i++) {
+      WEBRTC_SPL_MEMCPY_W16(syntdenum+(i*order_plus_one),
+                            PLClpc, order_plus_one);
+    }
+  }
+
+  if ((*iLBCdec_inst).use_enhancer == 1) { /* Enhancer activated */
+
+    /* Update the filter and filter coefficients if there was a packet loss */
+    if (iLBCdec_inst->prev_enh_pl==2) {
+      for (i=0;i<iLBCdec_inst->nsub;i++) {
+        WEBRTC_SPL_MEMCPY_W16(&(iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)]),
+                              syntdenum, (LPC_FILTERORDER+1));
+      }
+    }
+
+    /* post filtering */
+    (*iLBCdec_inst).last_lag =
+        WebRtcIlbcfix_EnhancerInterface(data, decresidual, iLBCdec_inst);
+
+    /* synthesis filtering */
+
+    /* Set up the filter state */
+    WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER);
+
+    if (iLBCdec_inst->mode==20) {
+      /* Enhancer has 40 samples delay */
+      i=0;
+      WebRtcSpl_FilterARFastQ12(
+          data, data,
+          iLBCdec_inst->old_syntdenum + (i+iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1),
+          LPC_FILTERORDER+1, SUBL);
+
+      for (i=1; i < iLBCdec_inst->nsub; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            syntdenum+(i-1)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+
+    } else if (iLBCdec_inst->mode==30) {
+      /* Enhancer has 80 samples delay */
+      for (i=0; i < 2; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            iLBCdec_inst->old_syntdenum + (i+4)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+      for (i=2; i < iLBCdec_inst->nsub; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            syntdenum+(i-2)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+    }
+
+    /* Save the filter state */
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+
+  } else { /* Enhancer not activated */
+    WebRtc_Word16 lag;
+
+    /* Find last lag (since the enhancer is not called to give this info) */
+    lag = 20;
+    if (iLBCdec_inst->mode==20) {
+      lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+          &decresidual[iLBCdec_inst->blockl-60],
+          &decresidual[iLBCdec_inst->blockl-60-lag],
+          60,
+          80, lag, -1);
+    } else {
+      lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+          &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL],
+          &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL-lag],
+          ENH_BLOCKL,
+          100, lag, -1);
+    }
+
+    /* Store lag (it is needed if next packet is lost) */
+    (*iLBCdec_inst).last_lag = (int)lag;
+
+    /* copy data and run synthesis filter */
+    WEBRTC_SPL_MEMCPY_W16(data, decresidual, iLBCdec_inst->blockl);
+
+    /* Set up the filter state */
+    WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER);
+
+    for (i=0; i < iLBCdec_inst->nsub; i++) {
+      WebRtcSpl_FilterARFastQ12(
+          data+i*SUBL, data+i*SUBL,
+          syntdenum + i*(LPC_FILTERORDER+1),
+          LPC_FILTERORDER+1, SUBL);
+    }
+
+    /* Save the filter state */
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+  }
+
+  WEBRTC_SPL_MEMCPY_W16(decblock,data,iLBCdec_inst->blockl);
+
+  /* High pass filter the signal (with upscaling a factor 2 and saturation) */
+  WebRtcIlbcfix_HpOutput(decblock, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                         iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                         iLBCdec_inst->blockl);
+
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->old_syntdenum,
+                        syntdenum, iLBCdec_inst->nsub*(LPC_FILTERORDER+1));
+
+  iLBCdec_inst->prev_enh_pl=0;
+
+  if (mode==0) { /* PLC was used */
+    iLBCdec_inst->prev_enh_pl=1;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decode.h b/trunk/src/modules/audio_coding/codecs/ilbc/decode.h
new file mode 100644
index 0000000..0252d9c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decode.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Decode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  main decoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeImpl(
+    WebRtc_Word16 *decblock,    /* (o) decoded signal block */
+    WebRtc_UWord16 *bytes,     /* (i) encoded signal bits */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
+                                           structure */
+    WebRtc_Word16 mode      /* (i) 0: bad packet, PLC,
+                                                                   1: normal */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.c b/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.c
new file mode 100644
index 0000000..4bc1cd3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.c
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecodeResidual.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "state_construct.h"
+#include "cb_construct.h"
+#include "index_conv_dec.h"
+#include "do_plc.h"
+#include "constants.h"
+#include "enhancer_interface.h"
+#include "xcorr_coef.h"
+#include "lsf_check.h"
+
+
+/*----------------------------------------------------------------*
+ *  frame residual decoder function (subrutine to iLBC_decode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeResidual(
+    iLBC_Dec_Inst_t *iLBCdec_inst,
+    /* (i/o) the decoder state structure */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
+                                for the decoding  */
+    WebRtc_Word16 *decresidual,  /* (o) decoded residual frame */
+    WebRtc_Word16 *syntdenum   /* (i) the decoded synthesis filter
+                                  coefficients */
+                                  ) {
+  WebRtc_Word16 meml_gotten, Nfor, Nback, diff, start_pos;
+  WebRtc_Word16 subcount, subframe;
+  WebRtc_Word16 *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */
+  WebRtc_Word16 *memVec = iLBCdec_inst->prevResidual;  /* Memory for codebook and filter state (reuse memory in state) */
+  WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN];   /* Memory for codebook */
+
+  diff = STATE_LEN - iLBCdec_inst->state_short_len;
+
+  if (iLBC_encbits->state_first == 1) {
+    start_pos = (iLBC_encbits->startIdx-1)*SUBL;
+  } else {
+    start_pos = (iLBC_encbits->startIdx-1)*SUBL + diff;
+  }
+
+  /* decode scalar part of start state */
+
+  WebRtcIlbcfix_StateConstruct(iLBC_encbits->idxForMax,
+                               iLBC_encbits->idxVec, &syntdenum[(iLBC_encbits->startIdx-1)*(LPC_FILTERORDER+1)],
+                               &decresidual[start_pos], iLBCdec_inst->state_short_len
+                               );
+
+  if (iLBC_encbits->state_first) { /* put adaptive part in the end */
+
+    /* setup memory */
+
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCdec_inst->state_short_len));
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCdec_inst->state_short_len, decresidual+start_pos,
+                          iLBCdec_inst->state_short_len);
+
+    /* construct decoded vector */
+
+    WebRtcIlbcfix_CbConstruct(
+        &decresidual[start_pos+iLBCdec_inst->state_short_len],
+        iLBC_encbits->cb_index, iLBC_encbits->gain_index,
+        mem+CB_MEML-ST_MEM_L_TBL,
+        ST_MEM_L_TBL, (WebRtc_Word16)diff
+                              );
+
+  }
+  else {/* put adaptive part in the beginning */
+
+    /* create reversed vectors for prediction */
+
+    WebRtcSpl_MemCpyReversedOrder(reverseDecresidual+diff,
+                                  &decresidual[(iLBC_encbits->startIdx+1)*SUBL-1-STATE_LEN], diff);
+
+    /* setup memory */
+
+    meml_gotten = iLBCdec_inst->state_short_len;
+    WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
+                                  decresidual+start_pos, meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+    /* construct decoded vector */
+
+    WebRtcIlbcfix_CbConstruct(
+        reverseDecresidual,
+        iLBC_encbits->cb_index, iLBC_encbits->gain_index,
+        mem+CB_MEML-ST_MEM_L_TBL,
+        ST_MEM_L_TBL, diff
+                              );
+
+    /* get decoded residual from reversed vector */
+
+    WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1],
+                                  reverseDecresidual, diff);
+  }
+
+  /* counter for predicted subframes */
+
+  subcount=1;
+
+  /* forward prediction of subframes */
+
+  Nfor = iLBCdec_inst->nsub-iLBC_encbits->startIdx-1;
+
+  if( Nfor > 0 ) {
+
+    /* setup memory */
+    WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN);
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN,
+                          decresidual+(iLBC_encbits->startIdx-1)*SUBL, STATE_LEN);
+
+    /* loop over subframes to encode */
+
+    for (subframe=0; subframe<Nfor; subframe++) {
+
+      /* construct decoded vector */
+      WebRtcIlbcfix_CbConstruct(
+          &decresidual[(iLBC_encbits->startIdx+1+subframe)*SUBL],
+          iLBC_encbits->cb_index+subcount*CB_NSTAGES,
+          iLBC_encbits->gain_index+subcount*CB_NSTAGES,
+          mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, CB_MEML-SUBL);
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &decresidual[(iLBC_encbits->startIdx+1+subframe)*SUBL], SUBL);
+
+      subcount++;
+    }
+
+  }
+
+  /* backward prediction of subframes */
+
+  Nback = iLBC_encbits->startIdx-1;
+
+  if( Nback > 0 ){
+
+    /* setup memory */
+
+    meml_gotten = SUBL*(iLBCdec_inst->nsub+1-iLBC_encbits->startIdx);
+    if( meml_gotten > CB_MEML ) {
+      meml_gotten=CB_MEML;
+    }
+
+    WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
+                                  decresidual+(iLBC_encbits->startIdx-1)*SUBL, meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+    /* loop over subframes to decode */
+
+    for (subframe=0; subframe<Nback; subframe++) {
+
+      /* construct decoded vector */
+      WebRtcIlbcfix_CbConstruct(
+          &reverseDecresidual[subframe*SUBL],
+          iLBC_encbits->cb_index+subcount*CB_NSTAGES,
+          iLBC_encbits->gain_index+subcount*CB_NSTAGES,
+          mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, CB_MEML-SUBL);
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &reverseDecresidual[subframe*SUBL], SUBL);
+
+      subcount++;
+    }
+
+    /* get decoded residual from reversed vector */
+    WebRtcSpl_MemCpyReversedOrder(decresidual+SUBL*Nback-1,
+                                  reverseDecresidual, SUBL*Nback);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.h b/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.h
new file mode 100644
index 0000000..ea7208a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decode_residual.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecodeResidual.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  frame residual decoder function (subrutine to iLBC_decode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeResidual(
+    iLBC_Dec_Inst_t *iLBCdec_inst,
+    /* (i/o) the decoder state structure */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
+                                   for the decoding  */
+    WebRtc_Word16 *decresidual,  /* (o) decoded residual frame */
+    WebRtc_Word16 *syntdenum   /* (i) the decoded synthesis filter
+                                                   coefficients */
+                                  );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c b/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
new file mode 100644
index 0000000..eee3105
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecoderInterpolateLsp.c
+
+******************************************************************/
+
+#include "lsf_interpolate_to_poly_dec.h"
+#include "bw_expand.h"
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  obtain synthesis and weighting filters form lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecoderInterpolateLsp(
+    WebRtc_Word16 *syntdenum,  /* (o) synthesis filter coefficients */
+    WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+                                   coefficients */
+    WebRtc_Word16 *lsfdeq,   /* (i) dequantized lsf coefficients */
+    WebRtc_Word16 length,   /* (i) length of lsf coefficient vector */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i) the decoder state structure */
+                                          ){
+  int  i, pos, lp_length;
+  WebRtc_Word16  lp[LPC_FILTERORDER + 1], *lsfdeq2;
+
+  lsfdeq2 = lsfdeq + length;
+  lp_length = length + 1;
+
+  if (iLBCdec_inst->mode==30) {
+    /* subframe 1: Interpolation between old and first LSF */
+
+    WebRtcIlbcfix_LspInterpolate2PolyDec(lp, (*iLBCdec_inst).lsfdeqold, lsfdeq,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0], length);
+    WEBRTC_SPL_MEMCPY_W16(syntdenum,lp,lp_length);
+    WebRtcIlbcfix_BwExpand(weightdenum, lp, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+
+    /* subframes 2 to 6: interpolation between first and last LSF */
+
+    pos = lp_length;
+    for (i = 1; i < 6; i++) {
+      WebRtcIlbcfix_LspInterpolate2PolyDec(lp, lsfdeq, lsfdeq2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i], length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos,lp,lp_length);
+      WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+      pos += lp_length;
+    }
+  } else { /* iLBCdec_inst->mode=20 */
+    /* subframes 1 to 4: interpolation between old and new LSF */
+    pos = 0;
+    for (i = 0; i < iLBCdec_inst->nsub; i++) {
+      WebRtcIlbcfix_LspInterpolate2PolyDec(lp, iLBCdec_inst->lsfdeqold, lsfdeq,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i], length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum+pos,lp,lp_length);
+      WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+      pos += lp_length;
+    }
+  }
+
+  /* update memory */
+
+  if (iLBCdec_inst->mode==30) {
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq2, length);
+  } else {
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq, length);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h b/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
new file mode 100644
index 0000000..3896ca9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecoderInterpolateLsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  obtain synthesis and weighting filters form lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecoderInterpolateLsp(
+    WebRtc_Word16 *syntdenum,  /* (o) synthesis filter coefficients */
+    WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+                                   coefficients */
+    WebRtc_Word16 *lsfdeq,   /* (i) dequantized lsf coefficients */
+    WebRtc_Word16 length,   /* (i) length of lsf coefficient vector */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i) the decoder state structure */
+                                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/defines.h b/trunk/src/modules/audio_coding/codecs/ilbc/defines.h
new file mode 100644
index 0000000..bdeba01
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/defines.h
@@ -0,0 +1,219 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ define.h
+
+******************************************************************/
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+#include <string.h>
+
+/* general codec settings */
+
+#define FS       8000
+#define BLOCKL_20MS     160
+#define BLOCKL_30MS     240
+#define BLOCKL_MAX     240
+#define NSUB_20MS     4
+#define NSUB_30MS     6
+#define NSUB_MAX     6
+#define NASUB_20MS     2
+#define NASUB_30MS     4
+#define NASUB_MAX     4
+#define SUBL      40
+#define STATE_LEN     80
+#define STATE_SHORT_LEN_30MS  58
+#define STATE_SHORT_LEN_20MS  57
+
+/* LPC settings */
+
+#define LPC_FILTERORDER    10
+#define LPC_LOOKBACK    60
+#define LPC_N_20MS     1
+#define LPC_N_30MS     2
+#define LPC_N_MAX     2
+#define LPC_ASYMDIFF    20
+#define LSF_NSPLIT     3
+#define LSF_NUMBER_OF_STEPS   4
+#define LPC_HALFORDER    5
+#define COS_GRID_POINTS 60
+
+/* cb settings */
+
+#define CB_NSTAGES     3
+#define CB_EXPAND     2
+#define CB_MEML      147
+#define CB_FILTERLEN    (2*4)
+#define CB_HALFFILTERLEN   4
+#define CB_RESRANGE     34
+#define CB_MAXGAIN_FIXQ6   83 /* error = -0.24% */
+#define CB_MAXGAIN_FIXQ14   21299
+
+/* enhancer */
+
+#define ENH_BLOCKL     80  /* block length */
+#define ENH_BLOCKL_HALF    (ENH_BLOCKL/2)
+#define ENH_HL      3  /* 2*ENH_HL+1 is number blocks
+                                                                           in said second sequence */
+#define ENH_SLOP     2  /* max difference estimated and
+                                                                           correct pitch period */
+#define ENH_PLOCSL     8  /* pitch-estimates and
+                                                                           pitch-locations buffer length */
+#define ENH_OVERHANG    2
+#define ENH_UPS0     4  /* upsampling rate */
+#define ENH_FL0      3  /* 2*FLO+1 is the length of each filter */
+#define ENH_FLO_MULT2_PLUS1   7
+#define ENH_VECTL     (ENH_BLOCKL+2*ENH_FL0)
+#define ENH_CORRDIM     (2*ENH_SLOP+1)
+#define ENH_NBLOCKS     (BLOCKL/ENH_BLOCKL)
+#define ENH_NBLOCKS_EXTRA   5
+#define ENH_NBLOCKS_TOT    8 /* ENH_NBLOCKS+ENH_NBLOCKS_EXTRA */
+#define ENH_BUFL     (ENH_NBLOCKS_TOT)*ENH_BLOCKL
+#define ENH_BUFL_FILTEROVERHEAD  3
+#define ENH_A0      819   /* Q14 */
+#define ENH_A0_MINUS_A0A0DIV4  848256041 /* Q34 */
+#define ENH_A0DIV2     26843546 /* Q30 */
+
+/* PLC */
+
+/* Down sampling */
+
+#define FILTERORDER_DS_PLUS1  7
+#define DELAY_DS     3
+#define FACTOR_DS     2
+
+/* bit stream defs */
+
+#define NO_OF_BYTES_20MS   38
+#define NO_OF_BYTES_30MS   50
+#define NO_OF_WORDS_20MS   19
+#define NO_OF_WORDS_30MS   25
+#define STATE_BITS     3
+#define BYTE_LEN     8
+#define ULP_CLASSES     3
+
+/* help parameters */
+
+#define TWO_PI_FIX     25736 /* Q12 */
+
+/* Constants for codebook search and creation */
+
+#define ST_MEM_L_TBL  85
+#define MEM_LF_TBL  147
+
+
+/* Struct for the bits */
+typedef struct iLBC_bits_t_ {
+  WebRtc_Word16 lsf[LSF_NSPLIT*LPC_N_MAX];
+  WebRtc_Word16 cb_index[CB_NSTAGES*(NASUB_MAX+1)];  /* First CB_NSTAGES values contains extra CB index */
+  WebRtc_Word16 gain_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB gain */
+  WebRtc_Word16 idxForMax;
+  WebRtc_Word16 state_first;
+  WebRtc_Word16 idxVec[STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 firstbits;
+  WebRtc_Word16 startIdx;
+} iLBC_bits;
+
+/* type definition encoder instance */
+typedef struct iLBC_Enc_Inst_t_ {
+
+  /* flag for frame size mode */
+  WebRtc_Word16 mode;
+
+  /* basic parameters for different frame sizes */
+  WebRtc_Word16 blockl;
+  WebRtc_Word16 nsub;
+  WebRtc_Word16 nasub;
+  WebRtc_Word16 no_of_bytes, no_of_words;
+  WebRtc_Word16 lpc_n;
+  WebRtc_Word16 state_short_len;
+
+  /* analysis filter state */
+  WebRtc_Word16 anaMem[LPC_FILTERORDER];
+
+  /* Fix-point old lsf parameters for interpolation */
+  WebRtc_Word16 lsfold[LPC_FILTERORDER];
+  WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+
+  /* signal buffer for LP analysis */
+  WebRtc_Word16 lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX];
+
+  /* state of input HP filter */
+  WebRtc_Word16 hpimemx[2];
+  WebRtc_Word16 hpimemy[4];
+
+#ifdef SPLIT_10MS
+  WebRtc_Word16 weightdenumbuf[66];
+  WebRtc_Word16 past_samples[160];
+  WebRtc_UWord16 bytes[25];
+  WebRtc_Word16 section;
+  WebRtc_Word16 Nfor_flag;
+  WebRtc_Word16 Nback_flag;
+  WebRtc_Word16 start_pos;
+  WebRtc_Word16 diff;
+#endif
+
+} iLBC_Enc_Inst_t;
+
+/* type definition decoder instance */
+typedef struct iLBC_Dec_Inst_t_ {
+
+  /* flag for frame size mode */
+  WebRtc_Word16 mode;
+
+  /* basic parameters for different frame sizes */
+  WebRtc_Word16 blockl;
+  WebRtc_Word16 nsub;
+  WebRtc_Word16 nasub;
+  WebRtc_Word16 no_of_bytes, no_of_words;
+  WebRtc_Word16 lpc_n;
+  WebRtc_Word16 state_short_len;
+
+  /* synthesis filter state */
+  WebRtc_Word16 syntMem[LPC_FILTERORDER];
+
+  /* old LSF for interpolation */
+  WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+
+  /* pitch lag estimated in enhancer and used in PLC */
+  int last_lag;
+
+  /* PLC state information */
+  int consPLICount, prev_enh_pl;
+  WebRtc_Word16 perSquare;
+
+  WebRtc_Word16 prevScale, prevPLI;
+  WebRtc_Word16 prevLag, prevLpc[LPC_FILTERORDER+1];
+  WebRtc_Word16 prevResidual[NSUB_MAX*SUBL];
+  WebRtc_Word16 seed;
+
+  /* previous synthesis filter parameters */
+
+  WebRtc_Word16 old_syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+
+  /* state of output HP filter */
+  WebRtc_Word16 hpimemx[2];
+  WebRtc_Word16 hpimemy[4];
+
+  /* enhancer state information */
+  int use_enhancer;
+  WebRtc_Word16 enh_buf[ENH_BUFL+ENH_BUFL_FILTEROVERHEAD];
+  WebRtc_Word16 enh_period[ENH_NBLOCKS_TOT];
+
+} iLBC_Dec_Inst_t;
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.c b/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.c
new file mode 100644
index 0000000..0dfae2b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.c
@@ -0,0 +1,308 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DoThePlc.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "comp_corr.h"
+#include "bw_expand.h"
+
+/*----------------------------------------------------------------*
+ *  Packet loss concealment routine. Conceals a residual signal
+ *  and LP parameters. If no packet loss, update state.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DoThePlc(
+    WebRtc_Word16 *PLCresidual,  /* (o) concealed residual */
+    WebRtc_Word16 *PLClpc,    /* (o) concealed LP parameters */
+    WebRtc_Word16 PLI,     /* (i) packet loss indicator
+                                                           0 - no PL, 1 = PL */
+    WebRtc_Word16 *decresidual,  /* (i) decoded residual */
+    WebRtc_Word16 *lpc,    /* (i) decoded LPC (only used for no PL) */
+    WebRtc_Word16 inlag,    /* (i) pitch lag */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i/o) decoder instance */
+                            ){
+  WebRtc_Word16 i, pick;
+  WebRtc_Word32 cross, ener, cross_comp, ener_comp = 0;
+  WebRtc_Word32 measure, maxMeasure, energy;
+  WebRtc_Word16 max, crossSquareMax, crossSquare;
+  WebRtc_Word16 j, lag, tmp1, tmp2, randlag;
+  WebRtc_Word16 shift1, shift2, shift3, shiftMax;
+  WebRtc_Word16 scale3;
+  WebRtc_Word16 corrLen;
+  WebRtc_Word32 tmpW32, tmp2W32;
+  WebRtc_Word16 use_gain;
+  WebRtc_Word16 tot_gain;
+  WebRtc_Word16 max_perSquare;
+  WebRtc_Word16 scale1, scale2;
+  WebRtc_Word16 totscale;
+  WebRtc_Word32 nom;
+  WebRtc_Word16 denom;
+  WebRtc_Word16 pitchfact;
+  WebRtc_Word16 use_lag;
+  int ind;
+  WebRtc_Word16 randvec[BLOCKL_MAX];
+
+  /* Packet Loss */
+  if (PLI == 1) {
+
+    (*iLBCdec_inst).consPLICount += 1;
+
+    /* if previous frame not lost,
+       determine pitch pred. gain */
+
+    if (iLBCdec_inst->prevPLI != 1) {
+
+      /* Maximum 60 samples are correlated, preserve as high accuracy
+         as possible without getting overflow */
+      max = WebRtcSpl_MaxAbsValueW16((*iLBCdec_inst).prevResidual, (WebRtc_Word16)iLBCdec_inst->blockl);
+      scale3 = (WebRtcSpl_GetSizeInBits(max)<<1) - 25;
+      if (scale3 < 0) {
+        scale3 = 0;
+      }
+
+      /* Store scale for use when interpolating between the
+       * concealment and the received packet */
+      iLBCdec_inst->prevScale = scale3;
+
+      /* Search around the previous lag +/-3 to find the
+         best pitch period */
+      lag = inlag - 3;
+
+      /* Guard against getting outside the frame */
+      corrLen = WEBRTC_SPL_MIN(60, iLBCdec_inst->blockl-(inlag+3));
+
+      WebRtcIlbcfix_CompCorr( &cross, &ener,
+                              iLBCdec_inst->prevResidual, lag, iLBCdec_inst->blockl, corrLen, scale3);
+
+      /* Normalize and store cross^2 and the number of shifts */
+      shiftMax = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross))-15;
+      crossSquareMax = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross, -shiftMax),
+                                                                WEBRTC_SPL_SHIFT_W32(cross, -shiftMax), 15);
+
+      for (j=inlag-2;j<=inlag+3;j++) {
+        WebRtcIlbcfix_CompCorr( &cross_comp, &ener_comp,
+                                iLBCdec_inst->prevResidual, j, iLBCdec_inst->blockl, corrLen, scale3);
+
+        /* Use the criteria (corr*corr)/energy to compare if
+           this lag is better or not. To avoid the division,
+           do a cross multiplication */
+        shift1 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross_comp))-15;
+        crossSquare = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1),
+                                                               WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1), 15);
+
+        shift2 = WebRtcSpl_GetSizeInBits(ener)-15;
+        measure = WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_SHIFT_W32(ener, -shift2),
+                                       crossSquare);
+
+        shift3 = WebRtcSpl_GetSizeInBits(ener_comp)-15;
+        maxMeasure = WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_SHIFT_W32(ener_comp, -shift3),
+                                          crossSquareMax);
+
+        /* Calculate shift value, so that the two measures can
+           be put in the same Q domain */
+        if(((shiftMax<<1)+shift3) > ((shift1<<1)+shift2)) {
+          tmp1 = WEBRTC_SPL_MIN(31, (shiftMax<<1)+shift3-(shift1<<1)-shift2);
+          tmp2 = 0;
+        } else {
+          tmp1 = 0;
+          tmp2 = WEBRTC_SPL_MIN(31, (shift1<<1)+shift2-(shiftMax<<1)-shift3);
+        }
+
+        if ((measure>>tmp1) > (maxMeasure>>tmp2)) {
+          /* New lag is better => record lag, measure and domain */
+          lag = j;
+          crossSquareMax = crossSquare;
+          cross = cross_comp;
+          shiftMax = shift1;
+          ener = ener_comp;
+        }
+      }
+
+      /* Calculate the periodicity for the lag with the maximum correlation.
+
+         Definition of the periodicity:
+         abs(corr(vec1, vec2))/(sqrt(energy(vec1))*sqrt(energy(vec2)))
+
+         Work in the Square domain to simplify the calculations
+         max_perSquare is less than 1 (in Q15)
+      */
+      tmp2W32=WebRtcSpl_DotProductWithScale(&iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen],
+                                            &iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen],
+                                            corrLen, scale3);
+
+      if ((tmp2W32>0)&&(ener_comp>0)) {
+        /* norm energies to WebRtc_Word16, compute the product of the energies and
+           use the upper WebRtc_Word16 as the denominator */
+
+        scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp2W32)-16;
+        tmp1=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(tmp2W32, scale1);
+
+        scale2=(WebRtc_Word16)WebRtcSpl_NormW32(ener)-16;
+        tmp2=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, scale2);
+        denom=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1, tmp2, 16); /* denom in Q(scale1+scale2-16) */
+
+        /* Square the cross correlation and norm it such that max_perSquare
+           will be in Q15 after the division */
+
+        totscale = scale1+scale2-1;
+        tmp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, (totscale>>1));
+        tmp2 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, totscale-(totscale>>1));
+
+        nom = WEBRTC_SPL_MUL_16_16(tmp1, tmp2);
+        max_perSquare = (WebRtc_Word16)WebRtcSpl_DivW32W16(nom, denom);
+
+      } else {
+        max_perSquare = 0;
+      }
+    }
+
+    /* previous frame lost, use recorded lag and gain */
+
+    else {
+      lag = iLBCdec_inst->prevLag;
+      max_perSquare = iLBCdec_inst->perSquare;
+    }
+
+    /* Attenuate signal and scale down pitch pred gain if
+       several frames lost consecutively */
+
+    use_gain = 32767;   /* 1.0 in Q15 */
+
+    if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>320) {
+      use_gain = 29491;  /* 0.9 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>640) {
+      use_gain = 22938;  /* 0.7 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>960) {
+      use_gain = 16384;  /* 0.5 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>1280) {
+      use_gain = 0;   /* 0.0 in Q15 */
+    }
+
+    /* Compute mixing factor of picth repeatition and noise:
+       for max_per>0.7 set periodicity to 1.0
+       0.4<max_per<0.7 set periodicity to (maxper-0.4)/0.7-0.4)
+       max_per<0.4 set periodicity to 0.0
+    */
+
+    if (max_perSquare>7868) { /* periodicity > 0.7  (0.7^4=0.2401 in Q15) */
+      pitchfact = 32767;
+    } else if (max_perSquare>839) { /* 0.4 < periodicity < 0.7 (0.4^4=0.0256 in Q15) */
+      /* find best index and interpolate from that */
+      ind = 5;
+      while ((max_perSquare<WebRtcIlbcfix_kPlcPerSqr[ind])&&(ind>0)) {
+        ind--;
+      }
+      /* pitch fact is approximated by first order */
+      tmpW32 = (WebRtc_Word32)WebRtcIlbcfix_kPlcPitchFact[ind] +
+          WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kPlcPfSlope[ind], (max_perSquare-WebRtcIlbcfix_kPlcPerSqr[ind]), 11);
+
+      pitchfact = (WebRtc_Word16)WEBRTC_SPL_MIN(tmpW32, 32767); /* guard against overflow */
+
+    } else { /* periodicity < 0.4 */
+      pitchfact = 0;
+    }
+
+    /* avoid repetition of same pitch cycle (buzzyness) */
+    use_lag = lag;
+    if (lag<80) {
+      use_lag = 2*lag;
+    }
+
+    /* compute concealed residual */
+    energy = 0;
+
+    for (i=0; i<iLBCdec_inst->blockl; i++) {
+
+      /* noise component -  52 < randlagFIX < 117 */
+      iLBCdec_inst->seed = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iLBCdec_inst->seed, 31821)+(WebRtc_Word32)13849);
+      randlag = 53 + (WebRtc_Word16)(iLBCdec_inst->seed & 63);
+
+      pick = i - randlag;
+
+      if (pick < 0) {
+        randvec[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
+      } else {
+        randvec[i] = iLBCdec_inst->prevResidual[pick];
+      }
+
+      /* pitch repeatition component */
+      pick = i - use_lag;
+
+      if (pick < 0) {
+        PLCresidual[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
+      } else {
+        PLCresidual[i] = PLCresidual[pick];
+      }
+
+      /* Attinuate total gain for each 10 ms */
+      if (i<80) {
+        tot_gain=use_gain;
+      } else if (i<160) {
+        tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(31130, use_gain, 15); /* 0.95*use_gain */
+      } else {
+        tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(29491, use_gain, 15); /* 0.9*use_gain */
+      }
+
+
+      /* mix noise and pitch repeatition */
+
+      PLCresidual[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tot_gain,
+                                                                (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( (WEBRTC_SPL_MUL_16_16(pitchfact, PLCresidual[i]) +
+                                                                                                       WEBRTC_SPL_MUL_16_16((32767-pitchfact), randvec[i]) + 16384),
+                                                                                                      15),
+                                                                15);
+
+      /* Shifting down the result one step extra to ensure that no overflow
+         will occur */
+      energy += WEBRTC_SPL_MUL_16_16_RSFT(PLCresidual[i],
+                                          PLCresidual[i], (iLBCdec_inst->prevScale+1));
+
+    }
+
+    /* less than 30 dB, use only noise */
+    if (energy < (WEBRTC_SPL_SHIFT_W32(((WebRtc_Word32)iLBCdec_inst->blockl*900),-(iLBCdec_inst->prevScale+1)))) {
+      energy = 0;
+      for (i=0; i<iLBCdec_inst->blockl; i++) {
+        PLCresidual[i] = randvec[i];
+      }
+    }
+
+    /* use the old LPC */
+    WEBRTC_SPL_MEMCPY_W16(PLClpc, (*iLBCdec_inst).prevLpc, LPC_FILTERORDER+1);
+
+    /* Update state in case there are multiple frame losses */
+    iLBCdec_inst->prevLag = lag;
+    iLBCdec_inst->perSquare = max_perSquare;
+  }
+
+  /* no packet loss, copy input */
+
+  else {
+    WEBRTC_SPL_MEMCPY_W16(PLCresidual, decresidual, iLBCdec_inst->blockl);
+    WEBRTC_SPL_MEMCPY_W16(PLClpc, lpc, (LPC_FILTERORDER+1));
+    iLBCdec_inst->consPLICount = 0;
+  }
+
+  /* update state */
+  iLBCdec_inst->prevPLI = PLI;
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevLpc, PLClpc, (LPC_FILTERORDER+1));
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevResidual, PLCresidual, iLBCdec_inst->blockl);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.h b/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.h
new file mode 100644
index 0000000..c5bcc52
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/do_plc.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DoThePlc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Packet loss concealment routine. Conceals a residual signal
+ *  and LP parameters. If no packet loss, update state.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DoThePlc(
+    WebRtc_Word16 *PLCresidual,  /* (o) concealed residual */
+    WebRtc_Word16 *PLClpc,    /* (o) concealed LP parameters */
+    WebRtc_Word16 PLI,     /* (i) packet loss indicator
+                                                           0 - no PL, 1 = PL */
+    WebRtc_Word16 *decresidual,  /* (i) decoded residual */
+    WebRtc_Word16 *lpc,    /* (i) decoded LPC (only used for no PL) */
+    WebRtc_Word16 inlag,    /* (i) pitch lag */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i/o) decoder instance */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3951.txt b/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3951.txt
new file mode 100644
index 0000000..d4fba08
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3951.txt
@@ -0,0 +1,10867 @@
+
+
+
+
+
+
+Network Working Group                                        S. Andersen
+Request for Comments: 3951                            Aalborg University
+Category: Experimental                                          A. Duric
+                                                                   Telio
+                                                               H. Astrom
+                                                                R. Hagen
+                                                               W. Kleijn
+                                                               J. Linden
+                                                         Global IP Sound
+                                                           December 2004
+
+
+                   Internet Low Bit Rate Codec (iLBC)
+
+Status of this Memo
+
+   This memo defines an Experimental Protocol for the Internet
+   community.  It does not specify an Internet standard of any kind.
+   Discussion and suggestions for improvement are requested.
+   Distribution of this memo is unlimited.
+
+Copyright Notice
+
+   Copyright (C) The Internet Society (2004).
+
+Abstract
+
+   This document specifies a speech codec suitable for robust voice
+   communication over IP.  The codec is developed by Global IP Sound
+   (GIPS).  It is designed for narrow band speech and results in a
+   payload bit rate of 13.33 kbit/s for 30 ms frames and 15.20 kbit/s
+   for 20 ms frames.  The codec enables graceful speech quality
+   degradation in the case of lost frames, which occurs in connection
+   with lost or delayed IP packets.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                      [Page 1]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+Table of Contents
+
+   1.  Introduction . . . . . . . . . . . . . . . . . . . . . . . . .  4
+   2.  Outline of the Codec . . . . . . . . . . . . . . . . . . . . .  5
+       2.1.  Encoder. . . . . . . . . . . . . . . . . . . . . . . . .  5
+       2.2.  Decoder. . . . . . . . . . . . . . . . . . . . . . . . .  7
+   3.  Encoder Principles . . . . . . . . . . . . . . . . . . . . . .  7
+       3.1.  Pre-processing . . . . . . . . . . . . . . . . . . . . .  9
+       3.2.  LPC Analysis and Quantization. . . . . . . . . . . . . .  9
+             3.2.1.  Computation of Autocorrelation Coefficients. . . 10
+             3.2.2.  Computation of LPC Coefficients. . . . . . . . . 11
+             3.2.3.  Computation of LSF Coefficients from LPC
+                     Coefficients . . . . . . . . . . . . . . . . . . 11
+             3.2.4.  Quantization of LSF Coefficients . . . . . . . . 12
+             3.2.5.  Stability Check of LSF Coefficients. . . . . . . 13
+             3.2.6.  Interpolation of LSF Coefficients. . . . . . . . 13
+             3.2.7.  LPC Analysis and Quantization for 20 ms Frames . 14
+       3.3.  Calculation of the Residual. . . . . . . . . . . . . . . 15
+       3.4.  Perceptual Weighting Filter. . . . . . . . . . . . . . . 15
+       3.5.  Start State Encoder. . . . . . . . . . . . . . . . . . . 15
+             3.5.1.  Start State Estimation . . . . . . . . . . . . . 16
+             3.5.2.  All-Pass Filtering and Scale Quantization. . . . 17
+             3.5.3.  Scalar Quantization. . . . . . . . . . . . . . . 18
+       3.6.  Encoding the Remaining Samples . . . . . . . . . . . . . 19
+             3.6.1.  Codebook Memory. . . . . . . . . . . . . . . . . 20
+             3.6.2.  Perceptual Weighting of Codebook Memory
+                     and Target . . . . . . . . . . . . . . . . . . . 22
+             3.6.3.  Codebook Creation. . . . . . . . . . . . . . . . 23
+                     3.6.3.1. Creation of a Base Codebook . . . . . . 23
+                     3.6.3.2. Codebook Expansion. . . . . . . . . . . 24
+                     3.6.3.3. Codebook Augmentation . . . . . . . . . 24
+             3.6.4.  Codebook Search. . . . . . . . . . . . . . . . . 26
+                     3.6.4.1. Codebook Search at Each Stage . . . . . 26
+                     3.6.4.2. Gain Quantization at Each Stage . . . . 27
+                     3.6.4.3. Preparation of Target for Next Stage. . 28
+       3.7.  Gain Correction Encoding . . . . . . . . . . . . . . . . 28
+       3.8.  Bitstream Definition . . . . . . . . . . . . . . . . . . 29
+   4.  Decoder Principles . . . . . . . . . . . . . . . . . . . . . . 32
+       4.1.  LPC Filter Reconstruction. . . . . . . . . . . . . . . . 33
+       4.2.  Start State Reconstruction . . . . . . . . . . . . . . . 33
+       4.3.  Excitation Decoding Loop . . . . . . . . . . . . . . . . 34
+       4.4.  Multistage Adaptive Codebook Decoding. . . . . . . . . . 35
+             4.4.1.  Construction of the Decoded Excitation Signal. . 35
+       4.5.  Packet Loss Concealment. . . . . . . . . . . . . . . . . 35
+             4.5.1.  Block Received Correctly and Previous Block
+                     Also Received. . . . . . . . . . . . . . . . . . 35
+             4.5.2.  Block Not Received . . . . . . . . . . . . . . . 36
+
+
+
+
+Andersen, et al.              Experimental                      [Page 2]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+             4.5.3.  Block Received Correctly When Previous Block
+                     Not Received . . . . . . . . . . . . . . . . . . 36
+       4.6.  Enhancement. . . . . . . . . . . . . . . . . . . . . . . 37
+             4.6.1.  Estimating the Pitch . . . . . . . . . . . . . . 39
+             4.6.2.  Determination of the Pitch-Synchronous
+                     Sequences. . . . . . . . . . . . . . . . . . . . 39
+             4.6.3.  Calculation of the Smoothed Excitation . . . . . 41
+             4.6.4.  Enhancer Criterion . . . . . . . . . . . . . . . 41
+             4.6.5.  Enhancing the Excitation . . . . . . . . . . . . 42
+       4.7.  Synthesis Filtering. . . . . . . . . . . . . . . . . . . 43
+       4.8.  Post Filtering . . . . . . . . . . . . . . . . . . . . . 43
+   5.  Security Considerations. . . . . . . . . . . . . . . . . . . . 43
+   6.  Evaluation of the iLBC Implementations . . . . . . . . . . . . 43
+   7.  References . . . . . . . . . . . . . . . . . . . . . . . . . . 43
+       7.1.  Normative References . . . . . . . . . . . . . . . . . . 43
+       7.2.  Informative References . . . . . . . . . . . . . . . . . 44
+   8.  ACKNOWLEDGEMENTS . . . . . . . . . . . . . . . . . . . . . . . 44
+   APPENDIX A: Reference Implementation . . . . . . . . . . . . . . . 45
+       A.1.  iLBC_test.c. . . . . . . . . . . . . . . . . . . . . . . 46
+       A.2   iLBC_encode.h. . . . . . . . . . . . . . . . . . . . . . 52
+       A.3.  iLBC_encode.c. . . . . . . . . . . . . . . . . . . . . . 53
+       A.4.  iLBC_decode.h. . . . . . . . . . . . . . . . . . . . . . 63
+       A.5.  iLBC_decode.c. . . . . . . . . . . . . . . . . . . . . . 64
+       A.6.  iLBC_define.h. . . . . . . . . . . . . . . . . . . . . . 76
+       A.7.  constants.h. . . . . . . . . . . . . . . . . . . . . . . 80
+       A.8.  constants.c. . . . . . . . . . . . . . . . . . . . . . . 82
+       A.9.  anaFilter.h. . . . . . . . . . . . . . . . . . . . . . . 96
+       A.10. anaFilter.c. . . . . . . . . . . . . . . . . . . . . . . 97
+       A.11. createCB.h . . . . . . . . . . . . . . . . . . . . . . . 98
+       A.12. createCB.c . . . . . . . . . . . . . . . . . . . . . . . 99
+       A.13. doCPLC.h . . . . . . . . . . . . . . . . . . . . . . . .104
+       A.14. doCPLC.c . . . . . . . . . . . . . . . . . . . . . . . .104
+       A.15. enhancer.h . . . . . . . . . . . . . . . . . . . . . . .109
+       A.16. enhancer.c . . . . . . . . . . . . . . . . . . . . . . .110
+       A.17. filter.h . . . . . . . . . . . . . . . . . . . . . . . .123
+       A.18. filter.c . . . . . . . . . . . . . . . . . . . . . . . .125
+       A.19. FrameClassify.h. . . . . . . . . . . . . . . . . . . . .128
+       A.20. FrameClassify.c. . . . . . . . . . . . . . . . . . . . .129
+       A.21. gainquant.h. . . . . . . . . . . . . . . . . . . . . . .131
+       A.22. gainquant.c. . . . . . . . . . . . . . . . . . . . . . .131
+       A.23. getCBvec.h . . . . . . . . . . . . . . . . . . . . . . .134
+       A.24. getCBvec.c . . . . . . . . . . . . . . . . . . . . . . .134
+       A.25. helpfun.h. . . . . . . . . . . . . . . . . . . . . . . .138
+       A.26. helpfun.c. . . . . . . . . . . . . . . . . . . . . . . .140
+       A.27. hpInput.h. . . . . . . . . . . . . . . . . . . . . . . .146
+       A.28. hpInput.c. . . . . . . . . . . . . . . . . . . . . . . .146
+       A.29. hpOutput.h . . . . . . . . . . . . . . . . . . . . . . .148
+       A.30. hpOutput.c . . . . . . . . . . . . . . . . . . . . . . .148
+
+
+
+Andersen, et al.              Experimental                      [Page 3]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       A.31. iCBConstruct.h . . . . . . . . . . . . . . . . . . . . .149
+       A.32. iCBConstruct.c . . . . . . . . . . . . . . . . . . . . .150
+       A.33. iCBSearch.h. . . . . . . . . . . . . . . . . . . . . . .152
+       A.34. iCBSearch.c. . . . . . . . . . . . . . . . . . . . . . .153
+       A.35. LPCdecode.h. . . . . . . . . . . . . . . . . . . . . . .163
+       A.36. LPCdecode.c. . . . . . . . . . . . . . . . . . . . . . .164
+       A.37. LPCencode.h. . . . . . . . . . . . . . . . . . . . . . .167
+       A.38. LPCencode.c. . . . . . . . . . . . . . . . . . . . . . .167
+       A.39. lsf.h. . . . . . . . . . . . . . . . . . . . . . . . . .172
+       A.40. lsf.c. . . . . . . . . . . . . . . . . . . . . . . . . .172
+       A.41. packing.h. . . . . . . . . . . . . . . . . . . . . . . .178
+       A.42. packing.c. . . . . . . . . . . . . . . . . . . . . . . .179
+       A.43. StateConstructW.h. . . . . . . . . . . . . . . . . . . .182
+       A.44. StateConstructW.c. . . . . . . . . . . . . . . . . . . .183
+       A.45. StateSearchW.h . . . . . . . . . . . . . . . . . . . . .185
+       A.46. StateSearchW.c . . . . . . . . . . . . . . . . . . . . .186
+       A.47. syntFilter.h . . . . . . . . . . . . . . . . . . . . . .190
+       A.48. syntFilter.c . . . . . . . . . . . . . . . . . . . . . .190
+   Authors' Addresses . . . . . . . . . . . . . . . . . . . . . . . .192
+   Full Copyright Statement . . . . . . . . . . . . . . . . . . . . .194
+
+1.  Introduction
+
+   This document contains the description of an algorithm for the coding
+   of speech signals sampled at 8 kHz.  The algorithm, called iLBC, uses
+   a block-independent linear-predictive coding (LPC) algorithm and has
+   support for two basic frame lengths: 20 ms at 15.2 kbit/s and 30 ms
+   at 13.33 kbit/s.  When the codec operates at block lengths of 20 ms,
+   it produces 304 bits per block, which SHOULD be packetized as in [1].
+   Similarly, for block lengths of 30 ms it produces 400 bits per block,
+   which SHOULD be packetized as in [1].  The two modes for the
+   different frame sizes operate in a very similar way.  When they
+   differ it is explicitly stated in the text, usually with the notation
+   x/y, where x refers to the 20 ms mode and y refers to the 30 ms mode.
+
+   The described algorithm results in a speech coding system with a
+   controlled response to packet losses similar to what is known from
+   pulse code modulation (PCM) with packet loss concealment (PLC), such
+   as the ITU-T G.711 standard [4], which operates at a fixed bit rate
+   of 64 kbit/s.  At the same time, the described algorithm enables
+   fixed bit rate coding with a quality-versus-bit rate tradeoff close
+   to state-of-the-art.  A suitable RTP payload format for the iLBC
+   codec is specified in [1].
+
+   Some of the applications for which this coder is suitable are real
+   time communications such as telephony and videoconferencing,
+   streaming audio, archival, and messaging.
+
+
+
+
+Andersen, et al.              Experimental                      [Page 4]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   Cable Television Laboratories (CableLabs(R)) has adopted iLBC as a
+   mandatory PacketCable(TM) audio codec standard for VoIP over Cable
+   applications [3].
+
+   This document is organized as follows.  Section 2 gives a brief
+   outline of the codec.  The specific encoder and decoder algorithms
+   are explained in sections 3 and 4, respectively.  Appendix A provides
+   a c-code reference implementation.
+
+   The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
+   "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
+   document are to be interpreted as described in BCP 14, RFC 2119 [2].
+
+2.  Outline of the Codec
+
+   The codec consists of an encoder and a decoder as described in
+   sections 2.1 and 2.2, respectively.
+
+   The essence of the codec is LPC and block-based coding of the LPC
+   residual signal.  For each 160/240 (20 ms/30 ms) sample block, the
+   following major steps are performed: A set of LPC filters are
+   computed, and the speech signal is filtered through them to produce
+   the residual signal.  The codec uses scalar quantization of the
+   dominant part, in terms of energy, of the residual signal for the
+   block.  The dominant state is of length 57/58 (20 ms/30 ms) samples
+   and forms a start state for dynamic codebooks constructed from the
+   already coded parts of the residual signal.  These dynamic codebooks
+   are used to code the remaining parts of the residual signal.  By this
+   method, coding independence between blocks is achieved, resulting in
+   elimination of propagation of perceptual degradations due to packet
+   loss.  The method facilitates high-quality packet loss concealment
+   (PLC).
+
+2.1.  Encoder
+
+   The input to the encoder SHOULD be 16 bit uniform PCM sampled at 8
+   kHz.  It SHOULD be partitioned into blocks of BLOCKL=160/240 samples
+   for the 20/30 ms frame size.  Each block is divided into NSUB=4/6
+   consecutive sub-blocks of SUBL=40 samples each.  For 30 ms frame
+   size, the encoder performs two LPC_FILTERORDER=10 linear-predictive
+   coding (LPC) analyses.  The first analysis applies a smooth window
+   centered over the second sub-block and extending to the middle of the
+   fifth sub-block.  The second LPC analysis applies a smooth asymmetric
+   window centered over the fifth sub-block and extending to the end of
+   the sixth sub-block.  For 20 ms frame size, one LPC_FILTERORDER=10
+   linear-predictive coding (LPC) analysis is performed with a smooth
+   window centered over the third sub-frame.
+
+
+
+
+Andersen, et al.              Experimental                      [Page 5]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   For each of the LPC analyses, a set of line-spectral frequencies
+   (LSFs) are obtained, quantized, and interpolated to obtain LSF
+   coefficients for each sub-block.  Subsequently, the LPC residual is
+   computed by using the quantized and interpolated LPC analysis
+   filters.
+
+   The two consecutive sub-blocks of the residual exhibiting the maximal
+   weighted energy are identified.  Within these two sub-blocks, the
+   start state (segment) is selected from two choices: the first 57/58
+   samples or the last 57/58 samples of the two consecutive sub-blocks.
+   The selected segment is the one of higher energy.  The start state is
+   encoded with scalar quantization.
+
+   A dynamic codebook encoding procedure is used to encode 1) the 23/22
+   (20 ms/30 ms) remaining samples in the two sub-blocks containing the
+   start state; 2) the sub-blocks after the start state in time; and 3)
+   the sub-blocks before the start state in time.  Thus, the encoding
+   target can be either the 23/22 samples remaining of the two sub-
+   blocks containing the start state or a 40-sample sub-block.  This
+   target can consist of samples indexed forward in time or backward in
+   time, depending on the location of the start state.
+
+   The codebook coding is based on an adaptive codebook built from a
+   codebook memory that contains decoded LPC excitation samples from the
+   already encoded part of the block.  These samples are indexed in the
+   same time direction as the target vector, ending at the sample
+   instant prior to the first sample instant represented in the target
+   vector.  The codebook is used in CB_NSTAGES=3 stages in a successive
+   refinement approach, and the resulting three code vector gains are
+   encoded with 5-, 4-, and 3-bit scalar quantization, respectively.
+
+   The codebook search method employs noise shaping derived from the LPC
+   filters, and the main decision criterion is to minimize the squared
+   error between the target vector and the code vectors.  Each code
+   vector in this codebook comes from one of CB_EXPAND=2 codebook
+   sections.  The first section is filled with delayed, already encoded
+   residual vectors.  The code vectors of the second codebook section
+   are constructed by predefined linear combinations of vectors in the
+   first section of the codebook.
+
+   As codebook encoding with squared-error matching is known to produce
+   a coded signal of less power than does the scalar quantized start
+   state signal, a gain re-scaling method is implemented by a refined
+   search for a better set of codebook gains in terms of power matching
+   after encoding.  This is done by searching for a higher value of the
+   gain factor for the first stage codebook, as the subsequent stage
+   codebook gains are scaled by the first stage gain.
+
+
+
+
+Andersen, et al.              Experimental                      [Page 6]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+2.2.  Decoder
+
+   Typically for packet communications, a jitter buffer placed at the
+   receiving end decides whether the packet containing an encoded signal
+   block has been received or lost.  This logic is not part of the codec
+   described here.  For each encoded signal block received the decoder
+   performs a decoding.  For each lost signal block, the decoder
+   performs a PLC operation.
+
+   The decoding for each block starts by decoding and interpolating the
+   LPC coefficients.  Subsequently the start state is decoded.
+
+   For codebook-encoded segments, each segment is decoded by
+   constructing the three code vectors given by the received codebook
+   indices in the same way that the code vectors were constructed in the
+   encoder.  The three gain factors are also decoded and the resulting
+   decoded signal is given by the sum of the three codebook vectors
+   scaled with respective gain.
+
+   An enhancement algorithm is applied to the reconstructed excitation
+   signal.  This enhancement augments the periodicity of voiced speech
+   regions.  The enhancement is optimized under the constraint that the
+   modification signal (defined as the difference between the enhanced
+   excitation and the excitation signal prior to enhancement) has a
+   short-time energy that does not exceed a preset fraction of the
+   short-time energy of the excitation signal prior to enhancement.
+
+   A packet loss concealment (PLC) operation is easily embedded in the
+   decoder.  The PLC operation can, e.g., be based on repeating LPC
+   filters and obtaining the LPC residual signal by using a long-term
+   prediction estimate from previous residual blocks.
+
+3.  Encoder Principles
+
+   The following block diagram is an overview of all the components of
+   the iLBC encoding procedure.  The description of the blocks contains
+   references to the section where that particular procedure is further
+   described.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                      [Page 7]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+             +-----------+    +---------+    +---------+
+   speech -> | 1. Pre P  | -> | 2. LPC  | -> | 3. Ana  | ->
+             +-----------+    +---------+    +---------+
+
+             +---------------+   +--------------+
+          -> | 4. Start Sel  | ->| 5. Scalar Qu | ->
+             +---------------+   +--------------+
+
+             +--------------+    +---------------+
+          -> |6. CB Search  | -> | 7. Packetize  | -> payload
+          |  +--------------+ |  +---------------+
+          ----<---------<------
+       sub-frame 0..2/4 (20 ms/30 ms)
+
+   Figure 3.1. Flow chart of the iLBC encoder
+
+   1. Pre-process speech with a HP filter, if needed (section 3.1).
+
+   2. Compute LPC parameters, quantize, and interpolate (section 3.2).
+
+   3. Use analysis filters on speech to compute residual (section 3.3).
+
+   4. Select position of 57/58-sample start state (section 3.5).
+
+   5. Quantize the 57/58-sample start state with scalar quantization
+      (section 3.5).
+
+   6. Search the codebook for each sub-frame.  Start with 23/22 sample
+      block, then encode sub-blocks forward in time, and then encode
+      sub-blocks backward in time.  For each block, the steps in Figure
+      3.4 are performed (section 3.6).
+
+   7. Packetize the bits into the payload specified in Table 3.2.
+
+   The input to the encoder SHOULD be 16-bit uniform PCM sampled at 8
+   kHz.  Also it SHOULD be partitioned into blocks of BLOCKL=160/240
+   samples.  Each block input to the encoder is divided into NSUB=4/6
+   consecutive sub-blocks of SUBL=40 samples each.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                      [Page 8]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+             0        39        79       119       159
+             +---------------------------------------+
+             |    1    |    2    |    3    |    4    |
+             +---------------------------------------+
+                            20 ms frame
+
+   0        39        79       119       159       199       239
+   +-----------------------------------------------------------+
+   |    1    |    2    |    3    |    4    |    5    |    6    |
+   +-----------------------------------------------------------+
+                                  30 ms frame
+   Figure 3.2. One input block to the encoder for 20 ms (with four sub-
+   frames) and 30 ms (with six sub-frames).
+
+3.1.  Pre-processing
+
+   In some applications, the recorded speech signal contains DC level
+   and/or 50/60 Hz noise.  If these components have not been removed
+   prior to the encoder call, they should be removed by a high-pass
+   filter.  A reference implementation of this, using a filter with a
+   cutoff frequency of 90 Hz, can be found in Appendix A.28.
+
+3.2.  LPC Analysis and Quantization
+
+   The input to the LPC analysis module is a possibly high-pass filtered
+   speech buffer, speech_hp, that contains 240/300 (LPC_LOOKBACK +
+   BLOCKL = 80/60 + 160/240 = 240/300) speech samples, where samples 0
+   through 79/59 are from the previous block and samples 80/60 through
+   239/299 are from the current block.  No look-ahead into the next
+   block is used.  For the very first block processed, the look-back
+   samples are assumed to be zeros.
+
+   For each input block, the LPC analysis calculates one/two set(s) of
+   LPC_FILTERORDER=10 LPC filter coefficients using the autocorrelation
+   method and the Levinson-Durbin recursion.  These coefficients are
+   converted to the Line Spectrum Frequency representation.  In the 20
+   ms case, the single lsf set represents the spectral characteristics
+   as measured at the center of the third sub-block.  For 30 ms frames,
+   the first set, lsf1, represents the spectral properties of the input
+   signal at the center of the second sub-block, and the other set,
+   lsf2, represents the spectral characteristics as measured at the
+   center of the fifth sub-block.  The details of the computation for 30
+   ms frames are described in sections 3.2.1 through 3.2.6.  Section
+   3.2.7 explains how the LPC Analysis and Quantization differs for 20
+   ms frames.
+
+
+
+
+
+
+Andersen, et al.              Experimental                      [Page 9]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+3.2.1.  Computation of Autocorrelation Coefficients
+
+   The first step in the LPC analysis procedure is to calculate
+   autocorrelation coefficients by using windowed speech samples.  This
+   windowing is the only difference in the LPC analysis procedure for
+   the two sets of coefficients.  For the first set, a 240-sample-long
+   standard symmetric Hanning window is applied to samples 0 through 239
+   of the input data.  The first window, lpc_winTbl, is defined as
+
+      lpc_winTbl[i]= 0.5 * (1.0 - cos((2*PI*(i+1))/(BLOCKL+1)));
+               i=0,...,119
+      lpc_winTbl[i] = winTbl[BLOCKL - i - 1]; i=120,...,239
+
+   The windowed speech speech_hp_win1 is then obtained by multiplying
+   the first 240 samples of the input speech buffer with the window
+   coefficients:
+
+      speech_hp_win1[i] = speech_hp[i] * lpc_winTbl[i];
+               i=0,...,BLOCKL-1
+
+   From these 240 windowed speech samples, 11 (LPC_FILTERORDER + 1)
+   autocorrelation coefficients, acf1, are calculated:
+
+      acf1[lag] += speech_hp_win1[n] * speech_hp_win1[n + lag];
+               lag=0,...,LPC_FILTERORDER; n=0,...,BLOCKL-lag-1
+
+   In order to make the analysis more robust against numerical precision
+   problems, a spectral smoothing procedure is applied by windowing the
+   autocorrelation coefficients before the LPC coefficients are
+   computed.  Also, a white noise floor is added to the autocorrelation
+   function by multiplying coefficient zero by 1.0001 (40dB below the
+   energy of the windowed speech signal).  These two steps are
+   implemented by multiplying the autocorrelation coefficients with the
+   following window:
+
+      lpc_lagwinTbl[0] = 1.0001;
+      lpc_lagwinTbl[i] = exp(-0.5 * ((2 * PI * 60.0 * i) /FS)^2);
+               i=1,...,LPC_FILTERORDER
+               where FS=8000 is the sampling frequency
+
+   Then, the windowed acf function acf1_win is obtained by
+
+      acf1_win[i] = acf1[i] * lpc_lagwinTbl[i];
+               i=0,...,LPC_FILTERORDER
+
+   The second set of autocorrelation coefficients, acf2_win, are
+   obtained in a similar manner.  The window, lpc_asymwinTbl, is applied
+   to samples 60 through 299, i.e., the entire current block.  The
+
+
+
+Andersen, et al.              Experimental                     [Page 10]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   window consists of two segments, the first (samples 0 to 219) being
+   half a Hanning window with length 440 and the second a quarter of a
+   cycle of a cosine wave.  By using this asymmetric window, an LPC
+   analysis centered in the fifth sub-block is obtained without the need
+   for any look-ahead, which would add delay.  The asymmetric window is
+   defined as
+
+      lpc_asymwinTbl[i] = (sin(PI * (i + 1) / 441))^2; i=0,...,219
+
+      lpc_asymwinTbl[i] = cos((i - 220) * PI / 40); i=220,...,239
+
+   and the windowed speech is computed by
+
+      speech_hp_win2[i] = speech_hp[i + LPC_LOOKBACK] *
+               lpc_asymwinTbl[i];  i=0,....BLOCKL-1
+
+   The windowed autocorrelation coefficients are then obtained in
+   exactly the same way as for the first analysis instance.
+
+   The generation of the windows lpc_winTbl, lpc_asymwinTbl, and
+   lpc_lagwinTbl are typically done in advance, and the arrays are
+   stored in ROM rather than repeating the calculation for every block.
+
+3.2.2.  Computation of LPC Coefficients
+
+   From the 2 x 11 smoothed autocorrelation coefficients, acf1_win and
+   acf2_win, the 2 x 11 LPC coefficients, lp1 and lp2, are calculated
+   in the same way for both analysis locations by using the well known
+   Levinson-Durbin recursion.  The first LPC coefficient is always 1.0,
+   resulting in ten unique coefficients.
+
+   After determining the LPC coefficients, a bandwidth expansion
+   procedure is applied to smooth the spectral peaks in the
+   short-term spectrum.  The bandwidth addition is obtained by the
+   following modification of the LPC coefficients:
+
+      lp1_bw[i] = lp1[i] * chirp^i; i=0,...,LPC_FILTERORDER
+      lp2_bw[i] = lp2[i] * chirp^i; i=0,...,LPC_FILTERORDER
+
+   where "chirp" is a real number between 0 and 1.  It is RECOMMENDED to
+   use a value of 0.9.
+
+3.2.3.  Computation of LSF Coefficients from LPC Coefficients
+
+   Thus far, two sets of LPC coefficients that represent the short-term
+   spectral characteristics of the speech signal for two different time
+   locations within the current block have been determined.  These
+   coefficients SHOULD be quantized and interpolated.  Before this is
+
+
+
+Andersen, et al.              Experimental                     [Page 11]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   done, it is advantageous to convert the LPC parameters into another
+   type of representation called Line Spectral Frequencies (LSF).  The
+   LSF parameters are used because they are better suited for
+   quantization and interpolation than the regular LPC coefficients.
+   Many computationally efficient methods for calculating the LSFs from
+   the LPC coefficients have been proposed in the literature.  The
+   detailed implementation of one applicable method can be found in
+   Appendix A.26.  The two arrays of LSF coefficients obtained, lsf1 and
+   lsf2, are of dimension 10 (LPC_FILTERORDER).
+
+3.2.4.  Quantization of LSF Coefficients
+
+   Because the LPC filters defined by the two sets of LSFs are also
+   needed in the decoder, the LSF parameters need to be quantized and
+   transmitted as side information.  The total number of bits required
+   to represent the quantization of the two LSF representations for one
+   block of speech is 40, with 20 bits used for each of lsf1 and lsf2.
+
+   For computational and storage reasons, the LSF vectors are quantized
+   using three-split vector quantization (VQ).  That is, the LSF vectors
+   are split into three sub-vectors that are each quantized with a
+   regular VQ.  The quantized versions of lsf1 and lsf2, qlsf1 and
+   qlsf2, are obtained by using the same memoryless split VQ.  The
+   length of each of these two LSF vectors is 10, and they are split
+   into three sub-vectors containing 3, 3, and 4 values, respectively.
+
+   For each of the sub-vectors, a separate codebook of quantized values
+   has been designed with a standard VQ training method for a large
+   database containing speech from a large number of speakers recorded
+   under various conditions.  The size of each of the three codebooks
+   associated with the split definitions above is
+
+      int size_lsfCbTbl[LSF_NSPLIT] = {64,128,128};
+
+   The actual values of the vector quantization codebook that must be
+   used can be found in the reference code of Appendix A.  Both sets of
+   LSF coefficients, lsf1 and lsf2, are quantized with a standard
+   memoryless split vector quantization (VQ) structure using the squared
+   error criterion in the LSF domain.  The split VQ quantization
+   consists of the following steps:
+
+   1) Quantize the first three LSF coefficients (1 - 3) with a VQ
+      codebook of size 64.
+   2) Quantize the next three LSF coefficients 4 - 6 with VQ a codebook
+      of size 128.
+   3) Quantize the last four LSF coefficients (7 - 10) with a VQ
+      codebook of size 128.
+
+
+
+
+Andersen, et al.              Experimental                     [Page 12]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   This procedure, repeated for lsf1 and lsf2, gives six quantization
+   indices and the quantized sets of LSF coefficients qlsf1 and qlsf2.
+   Each set of three indices is encoded with 6 + 7 + 7 = 20 bits.  The
+   total number of bits used for LSF quantization in a block is thus 40
+   bits.
+
+3.2.5.  Stability Check of LSF Coefficients
+
+   The LSF representation of the LPC filter has the convenient property
+   that the coefficients are ordered by increasing value, i.e., lsf(n-1)
+   < lsf(n), 0 < n < 10, if the corresponding synthesis filter is
+   stable.  As we are employing a split VQ scheme, it is possible that
+   at the split boundaries the LSF coefficients are not ordered
+   correctly and hence that the corresponding LP filter is unstable.  To
+   ensure that the filter used is stable, a stability check is performed
+   for the quantized LSF vectors.  If it turns out that the coefficients
+   are not ordered appropriately (with a safety margin of 50 Hz to
+   ensure that formant peaks are not too narrow), they will be moved
+   apart.  The detailed method for this can be found in Appendix A.40.
+   The same procedure is performed in the decoder.  This ensures that
+   exactly the same LSF representations are used in both encoder and
+   decoder.
+
+3.2.6.  Interpolation of LSF Coefficients
+
+   From the two sets of LSF coefficients that are computed for each
+   block of speech, different LSFs are obtained for each sub-block by
+   means of interpolation.  This procedure is performed for the original
+   LSFs (lsf1 and lsf2), as well as the quantized versions qlsf1 and
+   qlsf2, as both versions are used in the encoder.  Here follows a
+   brief summary of the interpolation scheme; the details are found in
+   the c-code of Appendix A.  In the first sub-block, the average of the
+   second LSF vector from the previous block and the first LSF vector in
+   the current block is used.  For sub-blocks two through five, the LSFs
+   used are obtained by linear interpolation from lsf1 (and qlsf1) to
+   lsf2 (and qlsf2), with lsf1 used in sub-block two and lsf2 in sub-
+   block five.  In the last sub-block, lsf2 is used.  For the very first
+   block it is assumed that the last LSF vector of the previous block is
+   equal to a predefined vector, lsfmeanTbl, obtained by calculating the
+   mean LSF vector of the LSF design database.
+
+   lsfmeanTbl[LPC_FILTERORDER] = {0.281738, 0.445801, 0.663330,
+                  0.962524, 1.251831, 1.533081, 1.850586, 2.137817,
+                  2.481445, 2.777344}
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 13]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   The interpolation method is standard linear interpolation in the LSF
+   domain.  The interpolated LSF values are converted to LPC
+   coefficients for each sub-block.  The unquantized and quantized LPC
+   coefficients form two sets of filters respectively.  The unquantized
+   analysis filter for sub-block k is defined as follows
+
+                ___
+                \
+      Ak(z)= 1 + > ak(i)*z^(-i)
+                /__
+             i=1...LPC_FILTERORDER
+
+   The quantized analysis filter for sub-block k is defined as follows
+                 ___
+                 \
+      A~k(z)= 1 + > a~k(i)*z^(-i)
+                 /__
+             i=1...LPC_FILTERORDER
+
+   A reference implementation of the lsf encoding is given in Appendix
+   A.38.  A reference implementation of the corresponding decoding can
+   be found in Appendix A.36.
+
+3.2.7.  LPC Analysis and Quantization for 20 ms Frames
+
+   As previously stated, the codec only calculates one set of LPC
+   parameters for the 20 ms frame size as opposed to two sets for 30 ms
+   frames.  A single set of autocorrelation coefficients is calculated
+   on the LPC_LOOKBACK + BLOCKL = 80 + 160 = 240 samples.  These samples
+   are windowed with the asymmetric window lpc_asymwinTbl, centered over
+   the third sub-frame, to form speech_hp_win.  Autocorrelation
+   coefficients, acf, are calculated on the 240 samples in speech_hp_win
+   and then windowed exactly as in section 3.2.1 (resulting in
+   acf_win).
+
+   This single set of windowed autocorrelation coefficients is used to
+   calculate LPC coefficients, LSF coefficients, and quantized LSF
+   coefficients in exactly the same manner as in sections 3.2.3 through
+   3.2.4.  As for the 30 ms frame size, the ten LSF coefficients are
+   divided into three sub-vectors of size 3, 3, and 4 and quantized by
+   using the same scheme and codebook as in section 3.2.4 to finally get
+   3 quantization indices.  The quantized LSF coefficients are
+   stabilized with the algorithm described in section 3.2.5.
+
+   From the set of LSF coefficients computed for this block and those
+   from the previous block, different LSFs are obtained for each sub-
+   block by means of interpolation.  The interpolation is done linearly
+   in the LSF domain over the four sub-blocks, so that the n-th sub-
+
+
+
+Andersen, et al.              Experimental                     [Page 14]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   frame uses the weight (4-n)/4 for the LSF from old frame and the
+   weight n/4 of the LSF from the current frame.  For the very first
+   block the mean LSF, lsfmeanTbl, is used as the LSF from the previous
+   block.  Similarly as seen in section 3.2.6, both unquantized, A(z),
+   and quantized, A~(z), analysis filters are calculated for each of the
+   four sub-blocks.
+
+3.3.  Calculation of the Residual
+
+   The block of speech samples is filtered by the quantized and
+   interpolated LPC analysis filters to yield the residual signal.  In
+   particular, the corresponding LPC analysis filter for each 40 sample
+   sub-block is used to filter the speech samples for the same sub-
+   block.  The filter memory at the end of each sub-block is carried
+   over to the LPC filter of the next sub-block.  The signal at the
+   output of each LP analysis filter constitutes the residual signal for
+   the corresponding sub-block.
+
+   A reference implementation of the LPC analysis filters is given in
+   Appendix A.10.
+
+3.4.  Perceptual Weighting Filter
+
+   In principle any good design of a perceptual weighting filter can be
+   applied in the encoder without compromising this codec definition.
+   However, it is RECOMMENDED to use the perceptual weighting filter Wk
+   for sub-block k specified below:
+
+      Wk(z)=1/Ak(z/LPC_CHIRP_WEIGHTDENUM), where
+                               LPC_CHIRP_WEIGHTDENUM = 0.4222
+
+   This is a simple design with low complexity that is applied in the
+   LPC residual domain.  Here Ak(z) is the filter obtained for sub-block
+   k from unquantized but interpolated LSF coefficients.
+
+3.5.  Start State Encoder
+
+   The start state is quantized by using a common 6-bit scalar quantizer
+   for the block and a 3-bit scalar quantizer operating on scaled
+   samples in the weighted speech domain.  In the following we describe
+   the state encoding in greater detail.
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 15]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+3.5.1.  Start State Estimation
+
+   The two sub-blocks containing the start state are determined by
+   finding the two consecutive sub-blocks in the block having the
+   highest power.  Advantageously, down-weighting is used in the
+   beginning and end of the sub-frames, i.e., the following measure is
+   computed (NSUB=4/6 for 20/30 ms frame size):
+
+      nsub=1,...,NSUB-1
+      ssqn[nsub] = 0.0;
+      for (i=(nsub-1)*SUBL; i<(nsub-1)*SUBL+5; i++)
+               ssqn[nsub] += sampEn_win[i-(nsub-1)*SUBL]*
+                                 residual[i]*residual[i];
+      for (i=(nsub-1)*SUBL+5; i<(nsub+1)*SUBL-5; i++)
+               ssqn[nsub] += residual[i]*residual[i];
+      for (i=(nsub+1)*SUBL-5; i<(nsub+1)*SUBL; i++)
+               ssqn[nsub] += sampEn_win[(nsub+1)*SUBL-i-1]*
+                                 residual[i]*residual[i];
+
+   where sampEn_win[5]={1/6, 2/6, 3/6, 4/6, 5/6}; MAY be used.  The
+   sub-frame number corresponding to the maximum value of
+   ssqEn_win[nsub-1]*ssqn[nsub] is selected as the start state
+   indicator.  A weighting of ssqEn_win[]={0.8,0.9,1.0,0.9,0.8} for 30
+   ms frames and ssqEn_win[]={0.9,1.0,0.9} for 20 ms frames; MAY
+   advantageously be used to bias the start state towards the middle of
+   the frame.
+
+   For 20 ms frames there are three possible positions for the two-sub-
+   block length maximum power segment; the start state position is
+   encoded with 2 bits.  The start state position, start, MUST be
+   encoded as
+
+      start=1: start state in sub-frame 0 and 1
+      start=2: start state in sub-frame 1 and 2
+      start=3: start state in sub-frame 2 and 3
+
+   For 30 ms frames there are five possible positions of the two-sub-
+   block length maximum power segment, the start state position is
+   encoded with 3 bits.  The start state position, start, MUST be
+   encoded as
+
+      start=1: start state in sub-frame 0 and 1
+      start=2: start state in sub-frame 1 and 2
+      start=3: start state in sub-frame 2 and 3
+      start=4: start state in sub-frame 3 and 4
+      start=5: start state in sub-frame 4 and 5
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 16]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   Hence, in both cases, index 0 is not used.  In order to shorten the
+   start state for bit rate efficiency, the start state is brought down
+   to STATE_SHORT_LEN=57 samples for 20 ms frames and STATE_SHORT_LEN=58
+   samples for 30 ms frames.  The power of the first 23/22 and last
+   23/22 samples of the two sub-frame blocks identified above is
+   computed as the sum of the squared signal sample values, and the
+   23/22-sample segment with the lowest power is excluded from the start
+   state.  One bit is transmitted to indicate which of the two possible
+   57/58 sample segments is used.  The start state position within the
+   two sub-frames determined above, state_first, MUST be encoded as
+
+      state_first=1: start state is first STATE_SHORT_LEN samples
+      state_first=0: start state is last STATE_SHORT_LEN samples
+
+3.5.2.  All-Pass Filtering and Scale Quantization
+
+   The block of residual samples in the start state is first filtered by
+   an all-pass filter with the quantized LPC coefficients as denominator
+   and reversed quantized LPC coefficients as numerator.  The purpose of
+   this phase-dispersion filter is to get a more even distribution of
+   the sample values in the residual signal.  The filtering is performed
+   by circular convolution, where the initial filter memory is set to
+   zero.
+
+      res(0..(STATE_SHORT_LEN-1))   = uncoded start state residual
+      res((STATE_SHORT_LEN)..(2*STATE_SHORT_LEN-1)) = 0
+
+      Pk(z) = A~rk(z)/A~k(z), where
+                                   ___
+                                   \
+      A~rk(z)= z^(-LPC_FILTERORDER)+>a~k(i+1)*z^(i-(LPC_FILTERORDER-1))
+                                   /__
+                               i=0...(LPC_FILTERORDER-1)
+
+      and A~k(z) is taken from the block where the start state begins
+
+      res -> Pk(z) -> filtered
+
+      ccres(k) = filtered(k) + filtered(k+STATE_SHORT_LEN),
+                                        k=0..(STATE_SHORT_LEN-1)
+
+   The all-pass filtered block is searched for its largest magnitude
+   sample.  The 10-logarithm of this magnitude is quantized with a 6-bit
+   quantizer, state_frgqTbl, by finding the nearest representation.
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 17]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   This results in an index, idxForMax, corresponding to a quantized
+   value, qmax.  The all-pass filtered residual samples in the block are
+   then multiplied with a scaling factor scal=4.5/(10^qmax) to yield
+   normalized samples.
+
+   state_frgqTbl[64] = {1.000085, 1.071695, 1.140395, 1.206868,
+                  1.277188, 1.351503, 1.429380, 1.500727, 1.569049,
+                  1.639599, 1.707071, 1.781531, 1.840799, 1.901550,
+                  1.956695, 2.006750, 2.055474, 2.102787, 2.142819,
+                  2.183592, 2.217962, 2.257177, 2.295739, 2.332967,
+                  2.369248, 2.402792, 2.435080, 2.468598, 2.503394,
+                  2.539284, 2.572944, 2.605036, 2.636331, 2.668939,
+                  2.698780, 2.729101, 2.759786, 2.789834, 2.818679,
+                  2.848074, 2.877470, 2.906899, 2.936655, 2.967804,
+                  3.000115, 3.033367, 3.066355, 3.104231, 3.141499,
+                  3.183012, 3.222952, 3.265433, 3.308441, 3.350823,
+                  3.395275, 3.442793, 3.490801, 3.542514, 3.604064,
+                  3.666050, 3.740994, 3.830749, 3.938770, 4.101764}
+
+3.5.3.  Scalar Quantization
+
+   The normalized samples are quantized in the perceptually weighted
+   speech domain by a sample-by-sample scalar DPCM quantization as
+   depicted in Figure 3.3.  Each sample in the block is filtered by a
+   weighting filter Wk(z), specified in section 3.4, to form a weighted
+   speech sample x[n].  The target sample d[n] is formed by subtracting
+   a predicted sample y[n], where the prediction filter is given by
+
+           Pk(z) = 1 - 1 / Wk(z).
+
+               +-------+  x[n] +    d[n] +-----------+ u[n]
+   residual -->| Wk(z) |-------->(+)---->| Quantizer |------> quantized
+               +-------+       - /|\     +-----------+    |   residual
+                                  |                      \|/
+                             y[n] +--------------------->(+)
+                                  |                       |
+                                  |        +------+       |
+                                  +--------| Pk(z)|<------+
+                                           +------+
+
+   Figure 3.3.  Quantization of start state samples by DPCM in weighted
+   speech domain.
+
+   The coded state sample u[n] is obtained by quantizing d[n] with a 3-
+   bit quantizer with quantization table state_sq3Tbl.
+
+   state_sq3Tbl[8] = {-3.719849, -2.177490, -1.130005, -0.309692,
+                  0.444214, 1.329712, 2.436279, 3.983887}
+
+
+
+Andersen, et al.              Experimental                     [Page 18]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   The quantized samples are transformed back to the residual domain by
+   1) scaling with 1/scal; 2) time-reversing the scaled samples; 3)
+   filtering the time-reversed samples by the same all-pass filter, as
+   in section 3.5.2, by using circular convolution; and 4) time-
+   reversing the filtered samples.  (More detail is in section 4.2.)
+
+   A reference implementation of the start-state encoding can be found
+   in Appendix A.46.
+
+3.6.  Encoding the Remaining Samples
+
+   A dynamic codebook is used to encode 1) the 23/22 remaining samples
+   in the two sub-blocks containing the start state; 2) the sub-blocks
+   after the start state in time; and 3) the sub-blocks before the start
+   state in time.  Thus, the encoding target can be either the 23/22
+   samples remaining of the 2 sub-blocks containing the start state, or
+   a 40-sample sub-block.  This target can consist of samples that are
+   indexed forward in time or backward in time, depending on the
+   location of the start state.  The length of the target is denoted by
+   lTarget.
+
+   The coding is based on an adaptive codebook that is built from a
+   codebook memory that contains decoded LPC excitation samples from the
+   already encoded part of the block.  These samples are indexed in the
+   same time direction as is the target vector and end at the sample
+   instant prior to the first sample instant represented in the target
+   vector.  The codebook memory has length lMem, which is equal to
+   CB_MEML=147 for the two/four 40-sample sub-blocks and 85 for the
+   23/22-sample sub-block.
+
+   The following figure shows an overview of the encoding procedure.
+
+         +------------+    +---------------+    +-------------+
+      -> | 1. Decode  | -> | 2. Mem setup  | -> | 3. Perc. W. | ->
+         +------------+    +---------------+    +-------------+
+
+         +------------+    +-----------------+
+      -> | 4. Search  | -> | 5. Upd. Target  | ------------------>
+       | +------------+    +------------------ |
+       ----<-------------<-----------<----------
+                     stage=0..2
+
+         +----------------+
+      -> | 6. Recalc G[0] | ---------------> gains and CB indices
+         +----------------+
+
+   Figure 3.4.  Flow chart of the codebook search in the iLBC encoder.
+
+
+
+
+Andersen, et al.              Experimental                     [Page 19]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   1. Decode the part of the residual that has been encoded so far,
+      using the codebook without perceptual weighting.
+
+   2. Set up the memory by taking data from the decoded residual.  This
+      memory is used to construct codebooks.  For blocks preceding the
+      start state, both the decoded residual and the target are time
+      reversed (section 3.6.1).
+   3. Filter the memory + target with the perceptual weighting filter
+      (section 3.6.2).
+
+   4. Search for the best match between the target and the codebook
+      vector.  Compute the optimal gain for this match and quantize that
+      gain (section 3.6.4).
+
+   5. Update the perceptually weighted target by subtracting the
+      contribution from the selected codebook vector from the
+      perceptually weighted memory (quantized gain times selected
+      vector).  Repeat 4 and 5 for the two additional stages.
+
+   6. Calculate the energy loss due to encoding of the residual.  If
+      needed, compensate for this loss by an upscaling and
+      requantization of the gain for the first stage (section 3.7).
+
+   The following sections provide an in-depth description of the
+   different blocks of Figure 3.4.
+
+3.6.1.  Codebook Memory
+
+   The codebook memory is based on the already encoded sub-blocks, so
+   the available data for encoding increases for each new sub-block that
+   has been encoded.  Until enough sub-blocks have been encoded to fill
+   the codebook memory with data, it is padded with zeros.  The
+   following figure shows an example of the order in which the sub-
+   blocks are encoded for the 30 ms frame size if the start state is
+   located in the last 58 samples of sub-block 2 and 3.
+
+   +-----------------------------------------------------+
+   |  5     | 1  |///|////////|    2   |    3   |    4   |
+   +-----------------------------------------------------+
+
+   Figure 3.5.  The order from 1 to 5 in which the sub-blocks are
+   encoded.  The slashed area is the start state.
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 20]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   The first target sub-block to be encoded is number 1, and the
+   corresponding codebook memory is shown in the following figure.  As
+   the target vector comes before the start state in time, the codebook
+   memory and target vector are time reversed; thus, after the block has
+   been time reversed the search algorithm can be reused.  As only the
+   start state has been encoded so far, the last samples of the codebook
+   memory are padded with zeros.
+
+   +-------------------------
+   |zeros|\\\\\\\\|\\\\|  1 |
+   +-------------------------
+
+   Figure 3.6.  The codebook memory, length lMem=85 samples, and the
+   target vector 1, length 22 samples.
+
+   The next step is to encode sub-block 2 by using the memory that now
+   has increased since sub-block 1 has been encoded.  The following
+   figure shows the codebook memory for encoding of sub-block 2.
+
+   +-----------------------------------
+   | zeros | 1  |///|////////|    2   |
+   +-----------------------------------
+
+   Figure 3.7.  The codebook memory, length lMem=147 samples, and the
+   target vector 2, length 40 samples.
+
+   The next step is to encode sub-block 3 by using the memory which has
+   been increased yet again since sub-blocks 1 and 2 have been encoded,
+   but the sub-block still has to be padded with a few zeros.  The
+   following figure shows the codebook memory for encoding of sub-block
+   3.
+
+   +------------------------------------------
+   |zeros| 1  |///|////////|    2   |   3    |
+   +------------------------------------------
+
+   Figure 3.8.  The codebook memory, length lMem=147 samples, and the
+   target vector 3, length 40 samples.
+
+   The next step is to encode sub-block 4 by using the memory which now
+   has increased yet again since sub-blocks 1, 2, and 3 have been
+   encoded.  This time, the memory does not have to be padded with
+   zeros.  The following figure shows the codebook memory for encoding
+   of sub-block 4.
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 21]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   +------------------------------------------
+   |1|///|////////|    2   |   3    |   4    |
+   +------------------------------------------
+
+   Figure 3.9.  The codebook memory, length lMem=147 samples, and the
+   target vector 4, length 40 samples.
+
+   The final target sub-block to be encoded is number 5, and the
+   following figure shows the corresponding codebook memory.  As the
+   target vector comes before the start state in time, the codebook
+   memory and target vector are time reversed.
+
+   +-------------------------------------------
+   |  3  |   2    |\\\\\\\\|\\\\|  1 |   5    |
+   +-------------------------------------------
+
+   Figure 3.10.  The codebook memory, length lMem=147 samples, and the
+   target vector 5, length 40 samples.
+
+   For the case of 20 ms frames, the encoding procedure looks almost
+   exactly the same.  The only difference is that the size of the start
+   state is 57 samples and that there are only three sub-blocks to be
+   encoded.  The encoding order is the same as above, starting with the
+   23-sample target and then encoding the two remaining 40-sample sub-
+   blocks, first going forward in time and then going backward in time
+   relative to the start state.
+
+3.6.2.  Perceptual Weighting of Codebook Memory and Target
+
+   To provide a perceptual weighting of the coding error, a
+   concatenation of the codebook memory and the target to be coded is
+   all-pole filtered with the perceptual weighting filter specified in
+   section 3.4.  The filter state of the weighting filter is set to
+   zero.
+
+      in(0..(lMem-1))            = unweighted codebook memory
+      in(lMem..(lMem+lTarget-1)) = unweighted target signal
+
+
+      in -> Wk(z) -> filtered,
+          where Wk(z) is taken from the sub-block of the target
+
+      weighted codebook memory = filtered(0..(lMem-1))
+      weighted target signal = filtered(lMem..(lMem+lTarget-1))
+
+   The codebook search is done with the weighted codebook memory and the
+   weighted target, whereas the decoding and the codebook memory update
+   uses the unweighted codebook memory.
+
+
+
+Andersen, et al.              Experimental                     [Page 22]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+3.6.3.  Codebook Creation
+
+   The codebook for the search is created from the perceptually weighted
+   codebook memory.  It consists of two sections, where the first is
+   referred to as the base codebook and the second as the expanded
+   codebook, as it is created by linear combinations of the first.  Each
+   of these two sections also has a subsection referred to as the
+   augmented codebook.  The augmented codebook is only created and used
+   for the coding of the 40-sample sub-blocks and not for the 23/22-
+   sample sub-block case.  The codebook size used for the different
+   sub-blocks and different stages are summarized in the table below.
+
+                              Stage
+                        1               2 & 3
+           --------------------------------------------
+                22     128  (64+0)*2     128 (64+0)*2
+   Sub-    1:st 40     256  (108+20)*2   128 (44+20)*2
+   Blocks  2:nd 40     256  (108+20)*2   256 (108+20)*2
+           3:rd 40     256  (108+20)*2   256 (108+20)*2
+           4:th 40     256  (108+20)*2   256 (108+20)*2
+
+   Table 3.1.  Codebook sizes for the 30 ms mode.
+
+   Table 3.1 shows the codebook size for the different sub-blocks and
+   stages for 30 ms frames.  Inside the parentheses it shows how the
+   number of codebook vectors is distributed, within the two sections,
+   between the base/expanded codebook and the augmented base/expanded
+   codebook.  It should be interpreted in the following way:
+   (base/expanded cb + augmented base/expanded cb).  The total number of
+   codebook vectors for a specific sub-block and stage is given by the
+   following formula:
+
+   Tot. cb vectors = base cb + aug. base cb + exp. cb + aug. exp. cb
+
+   The corresponding values to Figure 3.1 for 20 ms frames are only
+   slightly modified.  The short sub-block is 23 instead of 22 samples,
+   and the 3:rd and 4:th sub-frame are not present.
+
+3.6.3.1.  Creation of a Base Codebook
+
+   The base codebook is given by the perceptually weighted codebook
+   memory that is mentioned in section 3.5.3.  The different codebook
+   vectors are given by sliding a window of length 23/22 or 40, given by
+   variable lTarget, over the lMem-long perceptually weighted codebook
+   memory.  The indices are ordered so that the codebook vector
+   containing sample (lMem-lTarget-n) to (lMem-n-1) of the codebook
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 23]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   memory vector has index n, where n=0..lMem-lTarget.  Thus the total
+   number of base codebook vectors is lMem-lTarget+1, and the indices
+   are ordered from sample delay lTarget (23/22 or 40) to lMem+1 (86 or
+   148).
+
+3.6.3.2.  Codebook Expansion
+
+   The base codebook is expanded by a factor of 2, creating an
+   additional section in the codebook.  This new section is obtained by
+   filtering the base codebook, base_cb, with a FIR filter with filter
+   length CB_FILTERLEN=8.  The construction of the expanded codebook
+   compensates for the delay of four samples introduced by the FIR
+   filter.
+
+   cbfiltersTbl[CB_FILTERLEN]={-0.033691, 0.083740, -0.144043,
+                  0.713379, 0.806152, -0.184326,
+                  0.108887, -0.034180};
+
+                   ___
+                   \
+      exp_cb(k)=  + > cbfiltersTbl(i)*x(k-i+4)
+                   /__
+             i=0...(LPC_FILTERORDER-1)
+
+      where x(j) = base_cb(j) for j=0..lMem-1 and 0 otherwise
+
+   The individual codebook vectors of the new filtered codebook, exp_cb,
+   and their indices are obtained in the same fashion as described above
+   for the base codebook.
+
+3.6.3.3.  Codebook Augmentation
+
+   For cases where encoding entire sub-blocks, i.e., cbveclen=40, the
+   base and expanded codebooks are augmented to increase codebook
+   richness.  The codebooks are augmented by vectors produced by
+   interpolation of segments.  The base and expanded codebook,
+   constructed above, consists of vectors corresponding to sample delays
+   in the range from cbveclen to lMem.  The codebook augmentation
+   attempts to augment these codebooks with vectors corresponding to
+   sample delays from 20 to 39.  However, not all of these samples are
+   present in the base codebook and expanded codebook, respectively.
+   Therefore, the augmentation vectors are constructed as linear
+   combinations between samples corresponding to sample delays in the
+   range 20 to 39.  The general idea of this procedure is presented in
+   the following figures and text.  The procedure is performed for both
+   the base codebook and the expanded codebook.
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 24]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       - - ------------------------|
+    codebook memory                |
+       - - ------------------------|
+                  |-5-|---15---|-5-|
+                  pi  pp       po
+
+                      |        |                       Codebook vector
+                      |---15---|-5-|-----20-----|   <- corresponding to
+                          i     ii      iii            sample delay 20
+
+   Figure 3.11.  Generation of the first augmented codebook.
+
+   Figure 3.11 shows the codebook memory with pointers pi, pp, and po,
+   where pi points to sample 25, pp to sample 20, and po to sample 5.
+   Below the codebook memory, the augmented codebook vector
+   corresponding to sample delay 20 is drawn.  Segment i consists of
+   fifteen samples from pointer pp and forward in time.  Segment ii
+   consists of five interpolated samples from pi and forward and from po
+   and forward.  The samples are linearly interpolated with weights
+   [0.0, 0.2, 0.4, 0.6, 0.8] for pi and weights [1.0, 0.8, 0.6, 0.4,
+   0.2] for po.  Segment iii consists of twenty samples from pp and
+   forward.  The augmented codebook vector corresponding to sample delay
+   21 is produced by moving pointers pp and pi one sample backward in
+   time.  This gives us the following figure.
+
+       - - ------------------------|
+    codebook memory                |
+       - - ------------------------|
+                  |-5-|---16---|-5-|
+                  pi  pp       po
+
+                      |        |                       Codebook vector
+                      |---16---|-5-|-----19-----|   <- corresponding to
+                          i     ii      iii            sample delay 21
+
+   Figure 3.12.  Generation of the second augmented codebook.
+
+   Figure 3.12 shows the codebook memory with pointers pi, pp and po
+   where pi points to sample 26, pp to sample 21, and po to sample 5.
+   Below the codebook memory, the augmented codebook vector
+   corresponding to sample delay 21 is drawn.  Segment i now consists of
+   sixteen samples from pp and forward.  Segment ii consists of five
+   interpolated samples from pi and forward and from po and forward, and
+   the interpolation weights are the same throughout the procedure.
+   Segment iii consists of nineteen samples from pp and forward.  The
+   same procedure of moving the two pointers is continued until the last
+   augmented vector corresponding to sample delay 39 has been created.
+   This gives a total of twenty new codebook vectors to each of the two
+
+
+
+Andersen, et al.              Experimental                     [Page 25]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   sections.  Thus the total number of codebook vectors for each of the
+   two sections, when including the augmented codebook, becomes lMem-
+   SUBL+1+SUBL/2.  This is provided that augmentation is evoked, i.e.,
+   that lTarget=SUBL.
+
+3.6.4.  Codebook Search
+
+   The codebook search uses the codebooks described in the sections
+   above to find the best match of the perceptually weighted target, see
+   section 3.6.2.  The search method is a multi-stage gain-shape
+   matching performed as follows.  At each stage the best shape vector
+   is identified, then the gain is calculated and quantized, and finally
+   the target is updated in preparation for the next codebook search
+   stage.  The number of stages is CB_NSTAGES=3.
+
+   If the target is the 23/22-sample vector the codebooks are indexed so
+   that the base codebook is followed by the expanded codebook.  If the
+   target is 40 samples the order is as follows: base codebook,
+   augmented base codebook, expanded codebook, and augmented expanded
+   codebook.  The size of each codebook section and its corresponding
+   augmented section is given by Table 3.1 in section 3.6.3.
+
+   For example, when the second 40-sample sub-block is coded, indices 0
+   - 107 correspond to the base codebook, 108 - 127 correspond to the
+   augmented base codebook, 128 - 235 correspond to the expanded
+   codebook, and indices 236 - 255 correspond to the augmented expanded
+   codebook.  The indices are divided in the same fashion for all stages
+   in the example.  Only in the case of coding the first 40-sample sub-
+   block is there a difference between stages (see Table 3.1).
+
+3.6.4.1.  Codebook Search at Each Stage
+
+   The codebooks are searched to find the best match to the target at
+   each stage.  When the best match is found, the target is updated and
+   the next-stage search is started.  The three chosen codebook vectors
+   and their corresponding gains constitute the encoded sub-block.  The
+   best match is decided by the following three criteria:
+
+   1. Compute the measure
+
+      (target*cbvec)^2 / ||cbvec||^2
+
+   for all codebook vectors, cbvec, and choose the codebook vector
+   maximizing the measure.  The expression (target*cbvec) is the dot
+   product between the target vector to be coded and the codebook vector
+   for which we compute the measure.  The norm, ||x||, is defined as the
+   square root of (x*x).
+
+
+
+
+Andersen, et al.              Experimental                     [Page 26]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   2. The absolute value of the gain, corresponding to the chosen
+      codebook vector, cbvec, must be smaller than a fixed limit,
+      CB_MAXGAIN=1.3:
+
+            |gain| < CB_MAXGAIN
+
+      where the gain is computed in the following way:
+
+            gain = (target*cbvec) / ||cbvec||^2
+
+   3. For the first stage, the dot product of the chosen codebook vector
+      and target must be positive:
+
+      target*cbvec > 0
+
+   In practice the above criteria are used in a sequential search
+   through all codebook vectors.  The best match is found by registering
+   a new max measure and index whenever the previously registered max
+   measure is surpassed and all other criteria are fulfilled.  If none
+   of the codebook vectors fulfill (2) and (3), the first codebook
+   vector is selected.
+
+3.6.4.2.  Gain Quantization at Each Stage
+
+   The gain follows as a result of the computation
+
+      gain = (target*cbvec) / ||cbvec||^2
+
+   for the optimal codebook vector found by the procedure in section
+   3.6.4.1.
+
+   The three stages quantize the gain, using 5, 4, and 3 bits,
+   respectively.  In the first stage, the gain is limited to positive
+   values.  This gain is quantized by finding the nearest value in the
+   quantization table gain_sq5Tbl.
+
+   gain_sq5Tbl[32]={0.037476, 0.075012, 0.112488, 0.150024, 0.187500,
+                  0.224976, 0.262512, 0.299988, 0.337524, 0.375000,
+                  0.412476, 0.450012, 0.487488, 0.525024, 0.562500,
+                  0.599976, 0.637512, 0.674988, 0.712524, 0.750000,
+                  0.787476, 0.825012, 0.862488, 0.900024, 0.937500,
+                  0.974976, 1.012512, 1.049988, 1.087524, 1.125000,
+                  1.162476, 1.200012}
+
+   The gains of the subsequent two stages can be either positive or
+   negative.  The gains are quantized by using a quantization table
+   times a scale factor.  The second stage uses the table gain_sq4Tbl,
+   and the third stage uses gain_sq3Tbl.  The scale factor equates 0.1
+
+
+
+Andersen, et al.              Experimental                     [Page 27]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   or the absolute value of the quantized gain representation value
+   obtained in the previous stage, whichever is larger.  Again, the
+   resulting gain index is the index to the nearest value of the
+   quantization table times the scale factor.
+
+        gainQ = scaleFact * gain_sqXTbl[index]
+
+   gain_sq4Tbl[16]={-1.049988, -0.900024, -0.750000, -0.599976,
+                  -0.450012, -0.299988, -0.150024, 0.000000, 0.150024,
+                  0.299988, 0.450012, 0.599976, 0.750000, 0.900024,
+                  1.049988, 1.200012}
+
+   gain_sq3Tbl[8]={-1.000000, -0.659973, -0.330017,0.000000,
+                  0.250000, 0.500000, 0.750000, 1.00000}
+
+3.6.4.3.  Preparation of Target for Next Stage
+
+   Before performing the search for the next stage, the perceptually
+   weighted target vector is updated by subtracting from it the selected
+   codebook vector (from the perceptually weighted codebook) times the
+   corresponding quantized gain.
+
+      target[i] = target[i] - gainQ * selected_vec[i];
+
+   A reference implementation of the codebook encoding is found in
+   Appendix A.34.
+
+3.7.  Gain Correction Encoding
+
+   The start state is quantized in a relatively model independent manner
+   using 3 bits per sample.  In contrast, the remaining parts of the
+   block are encoded by using an adaptive codebook.  This codebook will
+   produce high matching accuracy whenever there is a high correlation
+   between the target and the best codebook vector.  For unvoiced speech
+   segments and background noises, this is not necessarily so, which,
+   due to the nature of the squared error criterion, results in a coded
+   signal with less power than the target signal.  As the coded start
+   state has good power matching to the target, the result is a power
+   fluctuation within the encoded frame.  Perceptually, the main problem
+   with this is that the time envelope of the signal energy becomes
+   unsteady.  To overcome this problem, the gains for the codebooks are
+   re-scaled after the codebook encoding by searching for a new gain
+   factor for the first stage codebook that provides better power
+   matching.
+
+   First, the energy for the target signal, tene, is computed along with
+   the energy for the coded signal, cene, given by the addition of the
+   three gain scaled codebook vectors.  Because the gains of the second
+
+
+
+Andersen, et al.              Experimental                     [Page 28]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   and third stage scale with the gain of the first stage, when the
+   first stage gain is changed from gain[0] to gain_sq5Tbl[i] the energy
+   of the coded signal changes from cene to
+
+      cene*(gain_sq5Tbl[i]*gain_sq5Tbl[i])/(gain[0]*gain[0])
+
+   where gain[0] is the gain for the first stage found in the original
+   codebook search.  A refined search is performed by testing the gain
+   indices i=0 to 31, and as long as the new codebook energy as given
+   above is less than tene, the gain index for stage 1 is increased.  A
+   restriction is applied so that the new gain value for stage 1 cannot
+   be more than two times higher than the original value found in the
+   codebook search.  Note that by using this method we do not change the
+   shape of the encoded vector, only the gain or amplitude.
+
+3.8.  Bitstream Definition
+
+   The total number of bits used to describe one frame of 20 ms speech
+   is 304, which fits in 38 bytes and results in a bit rate of 15.20
+   kbit/s.  For the case of a frame length of 30 ms speech, the total
+   number of bits used is 400, which fits in 50 bytes and results in a
+   bit rate of 13.33 kbit/s.  In the bitstream definition, the bits are
+   distributed into three classes according to their bit error or loss
+   sensitivity.  The most sensitive bits (class 1) are placed first in
+   the bitstream for each frame.  The less sensitive bits (class 2) are
+   placed after the class 1 bits.  The least sensitive bits (class 3)
+   are placed at the end of the bitstream for each frame.
+
+   In the 20/30 ms frame length cases for each class, the following hold
+   true: The class 1 bits occupy a total of 6/8 bytes (48/64 bits), the
+   class 2 bits occupy 8/12 bytes (64/96 bits), and the class 3 bits
+   occupy 24/30 bytes (191/239 bits).  This distribution of the bits
+   enables the use of uneven level protection (ULP) as is exploited in
+   the payload format definition for iLBC [1].  The detailed bit
+   allocation is shown in the table below.  When a quantization index is
+   distributed between more classes, the more significant bits belong to
+   the lowest class.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 29]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   Bitstream structure:
+
+   ------------------------------------------------------------------+
+   Parameter                         |       Bits Class <1,2,3>      |
+                                     |  20 ms frame  |  30 ms frame  |
+   ----------------------------------+---------------+---------------+
+                            Split 1  |   6 <6,0,0>   |   6 <6,0,0>   |
+                   LSF 1    Split 2  |   7 <7,0,0>   |   7 <7,0,0>   |
+   LSF                      Split 3  |   7 <7,0,0>   |   7 <7,0,0>   |
+                   ------------------+---------------+---------------+
+                            Split 1  | NA (Not Appl.)|   6 <6,0,0>   |
+                   LSF 2    Split 2  |      NA       |   7 <7,0,0>   |
+                            Split 3  |      NA       |   7 <7,0,0>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  20 <20,0,0>  |  40 <40,0,0>  |
+   ----------------------------------+---------------+---------------+
+   Block Class                       |   2 <2,0,0>   |   3 <3,0,0>   |
+   ----------------------------------+---------------+---------------+
+   Position 22 sample segment        |   1 <1,0,0>   |   1 <1,0,0>   |
+   ----------------------------------+---------------+---------------+
+   Scale Factor State Coder          |   6 <6,0,0>   |   6 <6,0,0>   |
+   ----------------------------------+---------------+---------------+
+                   Sample 0          |   3 <0,1,2>   |   3 <0,1,2>   |
+   Quantized       Sample 1          |   3 <0,1,2>   |   3 <0,1,2>   |
+   Residual           :              |   :    :      |   :    :      |
+   State              :              |   :    :      |   :    :      |
+   Samples            :              |   :    :      |   :    :      |
+                   Sample 56         |   3 <0,1,2>   |   3 <0,1,2>   |
+                   Sample 57         |      NA       |   3 <0,1,2>   |
+                   ------------------+---------------+---------------+
+                   Sum               | 171 <0,57,114>| 174 <0,58,116>|
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   7 <6,0,1>   |   7 <4,2,1>   |
+   CB for 22/23             Stage 2  |   7 <0,0,7>   |   7 <0,0,7>   |
+   sample block             Stage 3  |   7 <0,0,7>   |   7 <0,0,7>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  21 <6,0,15>  |  21 <4,2,15>  |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   5 <2,0,3>   |   5 <1,1,3>   |
+   Gain for 22/23           Stage 2  |   4 <1,1,2>   |   4 <1,1,2>   |
+   sample block             Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  12 <3,1,8>   |  12 <2,2,8>   |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   8 <7,0,1>   |   8 <6,1,1>   |
+               sub-block 1  Stage 2  |   7 <0,0,7>   |   7 <0,0,7>   |
+                            Stage 3  |   7 <0,0,7>   |   7 <0,0,7>   |
+                   ------------------+---------------+---------------+
+
+
+
+Andersen, et al.              Experimental                     [Page 30]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                            Stage 1  |   8 <0,0,8>   |   8 <0,7,1>   |
+               sub-block 2  Stage 2  |   8 <0,0,8>   |   8 <0,0,8>   |
+   Indices                  Stage 3  |   8 <0,0,8>   |   8 <0,0,8>   |
+   for CB          ------------------+---------------+---------------+
+   sub-blocks               Stage 1  |      NA       |   8 <0,7,1>   |
+               sub-block 3  Stage 2  |      NA       |   8 <0,0,8>   |
+                            Stage 3  |      NA       |   8 <0,0,8>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |      NA       |   8 <0,7,1>   |
+               sub-block 4  Stage 2  |      NA       |   8 <0,0,8>   |
+                            Stage 3  |      NA       |   8 <0,0,8>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  46 <7,0,39>  |  94 <6,22,66> |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   5 <1,2,2>   |   5 <1,2,2>   |
+               sub-block 1  Stage 2  |   4 <1,1,2>   |   4 <1,2,1>   |
+                            Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |   5 <1,1,3>   |   5 <0,2,3>   |
+               sub-block 2  Stage 2  |   4 <0,2,2>   |   4 <0,2,2>   |
+                            Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+   Gains for       ------------------+---------------+---------------+
+   sub-blocks               Stage 1  |      NA       |   5 <0,1,4>   |
+               sub-block 3  Stage 2  |      NA       |   4 <0,1,3>   |
+                            Stage 3  |      NA       |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |      NA       |   5 <0,1,4>   |
+               sub-block 4  Stage 2  |      NA       |   4 <0,1,3>   |
+                            Stage 3  |      NA       |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  24 <3,6,15>  |  48 <2,12,34> |
+   ----------------------------------+---------------+---------------+
+   Empty frame indicator             |   1 <0,0,1>   |   1 <0,0,1>   |
+   -------------------------------------------------------------------
+   SUM                                 304 <48,64,192> 400 <64,96,240>
+
+   Table 3.2.  The bitstream definition for iLBC for both the 20 ms
+   frame size mode and the 30 ms frame size mode.
+
+   When packetized into the payload, the bits MUST be sorted as follows:
+   All the class 1 bits in the order (from top to bottom) as specified
+   in the table, all the class 2 bits (from top to bottom), and all the
+   class 3 bits in the same sequential order.  The last bit, the empty
+   frame indicator, SHOULD be set to zero by the encoder.  If this bit
+   is set to 1 the decoder SHOULD treat the data as a lost frame.  For
+   example, this bit can be set to 1 to indicate lost frame for file
+   storage format, as in [1].
+
+
+
+
+Andersen, et al.              Experimental                     [Page 31]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.  Decoder Principles
+
+   This section describes the principles of each component of the
+   decoder algorithm.
+
+              +-------------+    +--------+    +---------------+
+   payload -> | 1. Get para | -> | 2. LPC | -> | 3. Sc Dequant | ->
+              +-------------+    +--------+    +---------------+
+
+              +-------------+    +------------------+
+           -> | 4. Mem setup| -> | 5. Construct res |------->
+           |  +-------------+    +-------------------   |
+           ---------<-----------<-----------<------------
+                     Sub-frame 0...2/4 (20 ms/30 ms)
+
+              +----------------+    +----------+
+           -> | 6. Enhance res | -> | 7. Synth | ------------>
+              +----------------+    +----------+
+
+              +-----------------+
+           -> | 8. Post Process | ----------------> decoded speech
+              +-----------------+
+
+   Figure 4.1.  Flow chart of the iLBC decoder.  If a frame was lost,
+   steps 1 to 5 SHOULD be replaced by a PLC algorithm.
+
+   1. Extract the parameters from the bitstream.
+
+   2. Decode the LPC and interpolate (section 4.1).
+
+   3. Construct the 57/58-sample start state (section 4.2).
+
+   4. Set up the memory by using data from the decoded residual.  This
+      memory is used for codebook construction.  For blocks preceding
+      the start state, both the decoded residual and the target are time
+      reversed.  Sub-frames are decoded in the same order as they were
+      encoded.
+
+   5. Construct the residuals of this sub-frame (gain[0]*cbvec[0] +
+      gain[1]*cbvec[1] + gain[2]*cbvec[2]).  Repeat 4 and 5 until the
+      residual of all sub-blocks has been constructed.
+
+   6. Enhance the residual with the post filter (section 4.6).
+
+   7. Synthesis of the residual (section 4.7).
+
+   8. Post process with HP filter, if desired (section 4.8).
+
+
+
+
+Andersen, et al.              Experimental                     [Page 32]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.1.  LPC Filter Reconstruction
+
+   The decoding of the LP filter parameters is very straightforward.
+   For a set of three/six indices, the corresponding LSF vector(s) are
+   found by simple table lookup.  For each of the LSF vectors, the three
+   split vectors are concatenated to obtain qlsf1 and qlsf2,
+   respectively (in the 20 ms mode only one LSF vector, qlsf, is
+   constructed).  The next step is the stability check described in
+   section 3.2.5 followed by the interpolation scheme described in
+   section 3.2.6 (3.2.7 for 20 ms frames).  The only difference is that
+   only the quantized LSFs are known at the decoder, and hence the
+   unquantized LSFs are not processed.
+
+   A reference implementation of the LPC filter reconstruction is given
+   in Appendix A.36.
+
+4.2.  Start State Reconstruction
+
+   The scalar encoded STATE_SHORT_LEN=58 (STATE_SHORT_LEN=57 in the 20
+   ms mode) state samples are reconstructed by 1) forming a set of
+   samples (by table lookup) from the index stream idxVec[n], 2)
+   multiplying the set with 1/scal=(10^qmax)/4.5, 3) time reversing the
+   57/58 samples, 4) filtering the time reversed block with the
+   dispersion (all-pass) filter used in the encoder (as described in
+   section 3.5.2); this compensates for the phase distortion of the
+   earlier filter operation, and 5 reversing the 57/58 samples from the
+   previous step.
+
+   in(0..(STATE_SHORT_LEN-1)) = time reversed samples from table
+                                look-up,
+                                idxVecDec((STATE_SHORT_LEN-1)..0)
+
+   in(STATE_SHORT_LEN..(2*STATE_SHORT_LEN-1)) = 0
+
+   Pk(z) = A~rk(z)/A~k(z), where
+                                  ___
+                                  \
+   A~rk(z)= z^(-LPC_FILTERORDER) + > a~ki*z^(i-(LPC_FILTERORDER-1))
+                                  /__
+                              i=0...(LPC_FILTERORDER-1)
+
+   and A~k(z) is taken from the block where the start state begins
+
+   in -> Pk(z) -> filtered
+
+   out(k) = filtered(STATE_SHORT_LEN-1-k) +
+                           filtered(2*STATE_SHORT_LEN-1-k),
+                                         k=0..(STATE_SHORT_LEN-1)
+
+
+
+Andersen, et al.              Experimental                     [Page 33]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   The remaining 23/22 samples in the state are reconstructed by the
+   same adaptive codebook technique described in section 4.3.  The
+   location bit determines whether these are the first or the last 23/22
+   samples of the 80-sample state vector.  If the remaining 23/22
+   samples are the first samples, then the scalar encoded
+   STATE_SHORT_LEN state samples are time-reversed before initialization
+   of the adaptive codebook memory vector.
+
+   A reference implementation of the start state reconstruction is given
+   in Appendix A.44.
+
+4.3.  Excitation Decoding Loop
+
+   The decoding of the LPC excitation vector proceeds in the same order
+   in which the residual was encoded at the encoder.  That is, after the
+   decoding of the entire 80-sample state vector, the forward sub-blocks
+   (corresponding to samples occurring after the state vector samples)
+   are decoded, and then the backward sub-blocks (corresponding to
+   samples occurring before the state vector) are decoded, resulting in
+   a fully decoded block of excitation signal samples.
+
+   In particular, each sub-block is decoded by using the multistage
+   adaptive codebook decoding module described in section 4.4.  This
+   module relies upon an adaptive codebook memory constructed before
+   each run of the adaptive codebook decoding.  The construction of the
+   adaptive codebook memory in the decoder is identical to the method
+   outlined in section 3.6.3, except that it is done on the codebook
+   memory without perceptual weighting.
+
+   For the initial forward sub-block, the last STATE_LEN=80 samples of
+   the length CB_LMEM=147 adaptive codebook memory are filled with the
+   samples of the state vector.  For subsequent forward sub-blocks, the
+   first SUBL=40 samples of the adaptive codebook memory are discarded,
+   the remaining samples are shifted by SUBL samples toward the
+   beginning of the vector, and the newly decoded SUBL=40 samples are
+   placed at the end of the adaptive codebook memory.  For backward
+   sub-blocks, the construction is similar, except that every vector of
+   samples involved is first time reversed.
+
+   A reference implementation of the excitation decoding loop is found
+   in Appendix A.5.
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 34]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.4.  Multistage Adaptive Codebook Decoding
+
+   The Multistage Adaptive Codebook Decoding module is used at both the
+   sender (encoder) and the receiver (decoder) ends to produce a
+   synthetic signal in the residual domain that is eventually used to
+   produce synthetic speech.  The module takes the index values used to
+   construct vectors that are scaled and summed together to produce a
+   synthetic signal that is the output of the module.
+
+4.4.1.  Construction of the Decoded Excitation Signal
+
+   The unpacked index values provided at the input to the module are
+   references to extended codebooks, which are constructed as described
+   in section 3.6.3, except that they are based on the codebook memory
+   without the perceptual weighting.  The unpacked three indices are
+   used to look up three codebook vectors.  The unpacked three gain
+   indices are used to decode the corresponding 3 gains.  In this
+   decoding, the successive rescaling, as described in section 3.6.4.2,
+   is applied.
+
+   A reference implementation of the adaptive codebook decoding is
+   listed in Appendix A.32.
+
+4.5.  Packet Loss Concealment
+
+   If packet loss occurs, the decoder receives a signal saying that
+   information regarding a block is lost.  For such blocks it is
+   RECOMMENDED to use a Packet Loss Concealment (PLC) unit to create a
+   decoded signal that masks the effect of that packet loss.  In the
+   following we will describe an example of a PLC unit that can be used
+   with the iLBC codec.  As the PLC unit is used only at the decoder,
+   the PLC unit does not affect interoperability between
+   implementations.  Other PLC implementations MAY therefore be used.
+
+   The PLC described operates on the LP filters and the excitation
+   signals and is based on the following principles:
+
+4.5.1.  Block Received Correctly and Previous Block Also Received
+
+   If the block is received correctly, the PLC only records state
+   information of the current block that can be used in case the next
+   block is lost.  The LP filter coefficients for each sub-block and the
+   entire decoded excitation signal are all saved in the decoder state
+   structure.  All of this information will be needed if the following
+   block is lost.
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 35]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.5.2.  Block Not Received
+
+   If the block is not received, the block substitution is based on a
+   pitch-synchronous repetition of the excitation signal, which is
+   filtered by the last LP filter of the previous block.  The previous
+   block's information is stored in the decoder state structure.
+
+   A correlation analysis is performed on the previous block's
+   excitation signal in order to detect the amount of pitch periodicity
+   and a pitch value.  The correlation measure is also used to decide on
+   the voicing level (the degree to which the previous block's
+   excitation was a voiced or roughly periodic signal).  The excitation
+   in the previous block is used to create an excitation for the block
+   to be substituted, such that the pitch of the previous block is
+   maintained.  Therefore, the new excitation is constructed in a
+   pitch-synchronous manner.  In order to avoid a buzzy-sounding
+   substituted block, a random excitation is mixed with the new pitch
+   periodic excitation, and the relative use of the two components is
+   computed from the correlation measure (voicing level).
+
+   For the block to be substituted, the newly constructed excitation
+   signal is then passed through the LP filter to produce the speech
+   that will be substituted for the lost block.
+
+   For several consecutive lost blocks, the packet loss concealment
+   continues in a similar manner.  The correlation measure of the last
+   block received is still used along with the same pitch value.  The LP
+   filters of the last block received are also used again.  The energy
+   of the substituted excitation for consecutive lost blocks is
+   decreased, leading to a dampened excitation, and therefore to
+   dampened speech.
+
+4.5.3.  Block Received Correctly When Previous Block Not Received
+
+   For the case in which a block is received correctly when the previous
+   block was not, the correctly received block's directly decoded speech
+   (based solely on the received block) is not used as the actual
+   output.  The reason for this is that the directly decoded speech does
+   not necessarily smoothly merge into the synthetic speech generated
+   for the previous lost block.  If the two signals are not smoothly
+   merged, an audible discontinuity is accidentally produced.
+   Therefore, a correlation analysis between the two blocks of
+   excitation signal (the excitation of the previous concealed block and
+   that of the current received block) is performed to find the best
+   phase match.  Then a simple overlap-add procedure is performed to
+   merge the previous excitation smoothly into the current block's
+   excitation.
+
+
+
+
+Andersen, et al.              Experimental                     [Page 36]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   The exact implementation of the packet loss concealment does not
+   influence interoperability of the codec.
+
+   A reference implementation of the packet loss concealment is
+   suggested in Appendix A.14.  Exact compliance with this suggested
+   algorithm is not needed for a reference implementation to be fully
+   compatible with the overall codec specification.
+
+4.6.  Enhancement
+
+   The decoder contains an enhancement unit that operates on the
+   reconstructed excitation signal.  The enhancement unit increases the
+   perceptual quality of the reconstructed signal by reducing the
+   speech-correlated noise in the voiced speech segments.  Compared to
+   traditional postfilters, the enhancer has an advantage in that it can
+   only modify the excitation signal slightly.  This means that there is
+   no risk of over enhancement.  The enhancer works very similarly for
+   both the 20 ms frame size mode and the 30 ms frame size mode.
+
+   For the mode with 20 ms frame size, the enhancer uses a memory of six
+   80-sample excitation blocks prior in time plus the two new 80-sample
+   excitation blocks.  For each block of 160 new unenhanced excitation
+   samples, 160 enhanced excitation samples are produced.  The enhanced
+   excitation is 40-sample delayed compared to the unenhanced
+   excitation, as the enhancer algorithm uses lookahead.
+
+   For the mode with 30 ms frame size, the enhancer uses a memory of
+   five 80-sample excitation blocks prior in time plus the three new
+   80-sample excitation blocks.  For each block of 240 new unenhanced
+   excitation samples, 240 enhanced excitation samples are produced.
+   The enhanced excitation is 80-sample delayed compared to the
+   unenhanced excitation, as the enhancer algorithm uses lookahead.
+
+   Outline of Enhancer
+
+   The speech enhancement unit operates on sub-blocks of 80 samples,
+   which means that there are two/three 80 sample sub-blocks per frame.
+   Each of these two/three sub-blocks is enhanced separately, but in an
+   analogous manner.
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 37]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   unenhanced residual
+           |
+           |   +---------------+    +--------------+
+           +-> | 1. Pitch Est  | -> | 2. Find PSSQ | -------->
+               +---------------+  | +--------------+
+                                  +-----<-------<------<--+
+               +------------+         enh block 0..1/2    |
+            -> | 3. Smooth  |                             |
+               +------------+                             |
+                 \                                        |
+                 /\                                       |
+                /  \   Already                            |
+               / 4. \----------->----------->-----------+ |
+               \Crit/ Fulfilled                         | |
+                \? /                                    v |
+                 \/                                     | |
+                  \  +-----------------+    +---------+ | |
+              Not +->| 5. Use Constr.  | -> | 6. Mix  | ----->
+           Fulfilled +-----------------+    +---------+
+
+            ---------------> enhanced residual
+
+   Figure 4.2.  Flow chart of the enhancer.
+
+   1. Pitch estimation of each of the two/three new 80-sample blocks.
+
+   2. Find the pitch-period-synchronous sequence n (for block k) by a
+      search around the estimated pitch value.  Do this for n=1,2,3,
+      -1,-2,-3.
+
+   3. Calculate the smoothed residual generated by the six pitch-
+      period-synchronous sequences from prior step.
+
+   4. Check if the smoothed residual satisfies the criterion (section
+      4.6.4).
+
+   5. Use constraint to calculate mixing factor (section 4.6.5).
+
+   6. Mix smoothed signal with unenhanced residual (pssq(n) n=0).
+
+   The main idea of the enhancer is to find three 80 sample blocks
+   before and three 80-sample blocks after the analyzed unenhanced sub-
+   block and to use these to improve the quality of the excitation in
+   that sub-block.  The six blocks are chosen so that they have the
+   highest possible correlation with the unenhanced sub-block that is
+   being enhanced.  In other words, the six blocks are pitch-period-
+   synchronous sequences to the unenhanced sub-block.
+
+
+
+
+Andersen, et al.              Experimental                     [Page 38]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   A linear combination of the six pitch-period-synchronous sequences is
+   calculated that approximates the sub-block.  If the squared error
+   between the approximation and the unenhanced sub-block is small
+   enough, the enhanced residual is set equal to this approximation.
+   For the cases when the squared error criterion is not fulfilled, a
+   linear combination of the approximation and the unenhanced residual
+   forms the enhanced residual.
+
+4.6.1.  Estimating the Pitch
+
+   Pitch estimates are needed to determine the locations of the pitch-
+   period-synchronous sequences in a complexity-efficient way.  For each
+   of the new two/three sub-blocks, a pitch estimate is calculated by
+   finding the maximum correlation in the range from lag 20 to lag 120.
+   These pitch estimates are used to narrow down the search for the best
+   possible pitch-period-synchronous sequences.
+
+4.6.2.  Determination of the Pitch-Synchronous Sequences
+
+   Upon receiving the pitch estimates from the prior step, the enhancer
+   analyzes and enhances one 80-sample sub-block at a time.  The pitch-
+   period-synchronous-sequences pssq(n) can be viewed as vectors of
+   length 80 samples each shifted n*lag samples from the current sub-
+   block.  The six pitch-period-synchronous-sequences, pssq(-3) to
+   pssq(-1) and pssq(1) to pssq(3), are found one at a time by the steps
+   below:
+
+   1) Calculate the estimate of the position of the pssq(n).  For
+      pssq(n) in front of pssq(0) (n > 0), the location of the pssq(n)
+      is estimated by moving one pitch estimate forward in time from the
+      exact location of pssq(n-1).  Similarly, pssq(n) behind pssq(0) (n
+      < 0) is estimated by moving one pitch estimate backward in time
+      from the exact location of pssq(n+1).  If the estimated pssq(n)
+      vector location is totally within the enhancer memory (Figure
+      4.3), steps 2, 3, and 4 are performed, otherwise the pssq(n) is
+      set to zeros.
+
+   2) Compute the correlation between the unenhanced excitation and
+      vectors around the estimated location interval of pssq(n).  The
+      correlation is calculated in the interval estimated location +/- 2
+      samples.  This results in five correlation values.
+
+   3) The five correlation values are upsampled by a factor of 4, by
+      using four simple upsampling filters (MA filters with coefficients
+      upsFilter1.. upsFilter4).  Within these the maximum value is
+      found, which specifies the best pitch-period with a resolution of
+      a quarter of a sample.
+
+
+
+
+Andersen, et al.              Experimental                     [Page 39]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+      upsFilter1[7]={0.000000 0.000000 0.000000 1.000000
+             0.000000 0.000000 0.000000}
+      upsFilter2[7]={0.015625 -0.076904 0.288330 0.862061
+            -0.106445 0.018799 -0.015625}
+      upsFilter3[7]={0.023682 -0.124268 0.601563 0.601563
+            -0.124268 0.023682 -0.023682}
+      upsFilter4[7]={0.018799 -0.106445 0.862061 0.288330
+            -0.076904 0.015625 -0.018799}
+
+   4) Generate the pssq(n) vector by upsampling of the excitation memory
+      and extracting the sequence that corresponds to the lag delay that
+      was calculated in prior step.
+
+   With the steps above, all the pssq(n) can be found in an iterative
+   manner, first moving backward in time from pssq(0) and then forward
+   in time from pssq(0).
+
+
+   0              159             319             479             639
+   +---------------------------------------------------------------+
+   |  -5   |  -4   |  -3   |  -2   |  -1   |   0   |   1   |   2   |
+   +---------------------------------------------------------------+
+                                               |pssq 0 |
+                                          |pssq -1| |pssq 1 |
+                                       |pssq -2|       |pssq 2 |
+                                    |pssq -3|             |pssq 3 |
+
+   Figure 4.3.  Enhancement for 20 ms frame size.
+
+   Figure 4.3 depicts pitch-period-synchronous sequences in the
+   enhancement of the first 80 sample block in the 20 ms frame size
+   mode.  The unenhanced signal input is stored in the last two sub-
+   blocks (1 - 2), and the six other sub-blocks contain unenhanced
+   residual prior-in-time.  We perform the enhancement algorithm on two
+   blocks of 80 samples, where the first of the two blocks consists of
+   the last 40 samples of sub-block 0 and the first 40 samples of sub-
+   block 1.  The second 80-sample block consists of the last 40 samples
+   of sub-block 1 and the first 40 samples of sub-block 2.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 40]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   0              159             319             479             639
+   +---------------------------------------------------------------+
+   |  -4   |  -3   |  -2   |  -1   |   0   |   1   |   2   |   3   |
+   +---------------------------------------------------------------+
+                                   |pssq 0 |
+                              |pssq -1| |pssq 1 |
+                           |pssq -2|       |pssq 2 |
+                        |pssq -3|             |pssq 3 |
+
+   Figure 4.4.  Enhancement for 30 ms frame size.
+
+   Figure 4.4 depicts pitch-period-synchronous sequences in the
+   enhancement of the first 80-sample block in the 30 ms frame size
+   mode.  The unenhanced signal input is stored in the last three sub-
+   blocks (1 - 3).  The five other sub-blocks contain unenhanced
+   residual prior-in-time.  The enhancement algorithm is performed on
+   the three 80 sample sub-blocks 0, 1, and 2.
+
+4.6.3.  Calculation of the Smoothed Excitation
+
+   A linear combination of the six pssq(n) (n!=0) form a smoothed
+   approximation, z, of pssq(0).  Most of the weight is put on the
+   sequences that are close to pssq(0), as these are likely to be most
+   similar to pssq(0).  The smoothed vector is also rescaled so that the
+   energy of z is the same as the energy of pssq(0).
+
+      ___
+      \
+   y = > pssq(i) * pssq_weight(i)
+      /__
+   i=-3,-2,-1,1,2,3
+
+   pssq_weight(i) = 0.5*(1-cos(2*pi*(i+4)/(2*3+2)))
+
+   z = C * y, where C = ||pssq(0)||/||y||
+
+4.6.4.  Enhancer Criterion
+
+   The criterion of the enhancer is that the enhanced excitation is not
+   allowed to differ much from the unenhanced excitation.  This
+   criterion is checked for each 80-sample sub-block.
+
+   e < (b * ||pssq(0)||^2), where b=0.05 and   (Constraint 1)
+
+   e = (pssq(0)-z)*(pssq(0)-z), and "*" means the dot product
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 41]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.6.5.  Enhancing the excitation
+
+   From the criterion in the previous section, it is clear that the
+   excitation is not allowed to change much.  The purpose of this
+   constraint is to prevent the creation of an enhanced signal
+   significantly different from the original signal.  This also means
+   that the constraint limits the numerical size of the errors that the
+   enhancement procedure can make.  That is especially important in
+   unvoiced segments and background noise segments for which increased
+   periodicity could lead to lower perceived quality.
+
+   When the constraint in the prior section is not met, the enhanced
+   residual is instead calculated through a constrained optimization by
+   using the Lagrange multiplier technique.  The new constraint is that
+
+      e = (b * ||pssq(0)||^2)                     (Constraint 2)
+
+   We distinguish two solution regions for the optimization: 1) the
+   region where the first constraint is fulfilled and 2) the region
+   where the first constraint is not fulfilled and the second constraint
+   must be used.
+
+   In the first case, where the second constraint is not needed, the
+   optimized re-estimated vector is simply z, the energy-scaled version
+   of y.
+
+   In the second case, where the second constraint is activated and
+   becomes an equality constraint, we have
+
+      z= A*y + B*pssq(0)
+
+   where
+
+      A = sqrt((b-b^2/4)*(w00*w00)/ (w11*w00 + w10*w10)) and
+
+      w11 = pssq(0)*pssq(0)
+      w00 = y*y
+      w10 = y*pssq(0)    (* symbolizes the dot product)
+
+   and
+
+      B = 1 - b/2 - A * w10/w00
+
+   Appendix A.16 contains a listing of a reference implementation for
+   the enhancement method.
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 42]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+4.7.  Synthesis Filtering
+
+   Upon decoding or PLC of the LP excitation block, the decoded speech
+   block is obtained by running the decoded LP synthesis filter,
+   1/A~k(z), over the block.  The synthesis filters have to be shifted
+   to compensate for the delay in the enhancer.  For 20 ms frame size
+   mode, they SHOULD be shifted one 40-sample sub-block, and for 30 ms
+   frame size mode, they SHOULD be shifted two 40-sample sub-blocks.
+   The LP coefficients SHOULD be changed at the first sample of every
+   sub-block while keeping the filter state.  For PLC blocks, one
+   solution is to apply the last LP coefficients of the last decoded
+   speech block for all sub-blocks.
+
+   The reference implementation for the synthesis filtering can be found
+   in Appendix A.48.
+
+4.8.  Post Filtering
+
+   If desired, the decoded block can be filtered by a high-pass filter.
+   This removes the low frequencies of the decoded signal.  A reference
+   implementation of this, with cutoff at 65 Hz, is shown in Appendix
+   A.30.
+
+5.  Security Considerations
+
+   This algorithm for the coding of speech signals is not subject to any
+   known security consideration; however, its RTP payload format [1] is
+   subject to several considerations, which are addressed there.
+   Confidentiality of the media streams is achieved by encryption;
+   therefore external mechanisms, such as SRTP [5], MAY be used for that
+   purpose.
+
+6.  Evaluation of the iLBC Implementations
+
+   It is possible and suggested to evaluate certain iLBC implementation
+   by utilizing methodology and tools available at
+   http://www.ilbcfreeware.org/evaluation.html
+
+7.  References
+
+7.1.  Normative References
+
+   [1] Duric, A. and S. Andersen, "Real-time Transport Protocol (RTP)
+       Payload Format for internet Low Bit Rate Codec (iLBC) Speech",
+       RFC 3952, December 2004.
+
+   [2] Bradner, S., "Key words for use in RFCs to Indicate Requirement
+       Levels", BCP 14, RFC 2119, March 1997.
+
+
+
+Andersen, et al.              Experimental                     [Page 43]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   [3] PacketCable(TM) Audio/Video Codecs Specification, Cable
+       Television Laboratories, Inc.
+
+7.2.  Informative References
+
+   [4] ITU-T Recommendation G.711, available online from the ITU
+       bookstore at http://www.itu.int.
+
+   [5] Baugher, M., McGrew, D., Naslund, M., Carrara, E., and K. Norman,
+       "The Secure Real Time Transport Protocol (SRTP)", RFC 3711, March
+       2004.
+
+8.  Acknowledgements
+
+   This extensive work, besides listed authors, has the following
+   authors, who could not have been listed among "official" authors (due
+   to IESG restrictions in the number of authors who can be listed):
+
+      Manohar N. Murthi (Department of Electrical and Computer
+      Engineering, University of Miami), Fredrik Galschiodt, Julian
+      Spittka, and Jan Skoglund (Global IP Sound).
+
+   The authors are deeply indebted to the following people and thank
+   them sincerely:
+
+      Henry Sinnreich, Patrik Faltstrom, Alan Johnston, and Jean-
+      Francois Mule for great support of the iLBC initiative and for
+      valuable feedback and comments.
+
+      Peter Vary, Frank Mertz, and Christoph Erdmann (RWTH Aachen);
+      Vladimir Cuperman (Niftybox LLC); Thomas Eriksson (Chalmers Univ
+      of Tech), and Gernot Kubin (TU Graz), for thorough review of the
+      iLBC document and their valuable feedback and remarks.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 44]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+APPENDIX A.  Reference Implementation
+
+   This appendix contains the complete c-code for a reference
+   implementation of encoder and decoder for the specified codec.
+
+   The c-code consists of the following files with highest-level
+   functions:
+
+         iLBC_test.c: main function for evaluation purpose
+         iLBC_encode.h: encoder header
+         iLBC_encode.c: encoder function
+         iLBC_decode.h: decoder header
+         iLBC_decode.c: decoder function
+
+   The following files contain global defines and constants:
+
+         iLBC_define.h: global defines
+         constants.h: global constants header
+         constants.c: global constants memory allocations
+
+   The following files contain subroutines:
+
+         anaFilter.h: lpc analysis filter header
+         anaFilter.c: lpc analysis filter function
+         createCB.h: codebook construction header
+         createCB.c: codebook construction function
+         doCPLC.h: packet loss concealment header
+         doCPLC.c: packet loss concealment function
+         enhancer.h: signal enhancement header
+         enhancer.c: signal enhancement function
+         filter.h: general filter header
+         filter.c: general filter functions
+         FrameClassify.h: start state classification header
+         FrameClassify.c: start state classification function
+         gainquant.h: gain quantization header
+         gainquant.c: gain quantization function
+         getCBvec.h: codebook vector construction header
+         getCBvec.c: codebook vector construction function
+         helpfun.h: general purpose header
+         helpfun.c: general purpose functions
+         hpInput.h: input high pass filter header
+         hpInput.c: input high pass filter function
+         hpOutput.h: output high pass filter header
+         hpOutput.c: output high pass filter function
+         iCBConstruct.h: excitation decoding header
+         iCBConstruct.c: excitation decoding function
+         iCBSearch.h: excitation encoding header
+         iCBSearch.c: excitation encoding function
+
+
+
+Andersen, et al.              Experimental                     [Page 45]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+         LPCdecode.h: lpc decoding header
+         LPCdecode.c: lpc decoding function
+         LPCencode.h: lpc encoding header
+         LPCencode.c: lpc encoding function
+         lsf.h: line spectral frequencies header
+         lsf.c: line spectral frequencies functions
+         packing.h: bitstream packetization header
+         packing.c: bitstream packetization functions
+         StateConstructW.h: state decoding header
+         StateConstructW.c: state decoding functions
+         StateSearchW.h: state encoding header
+         StateSearchW.c: state encoding function
+         syntFilter.h: lpc synthesis filter header
+         syntFilter.c: lpc synthesis filter function
+
+   The implementation is portable and should work on many different
+   platforms.  However, it is not difficult to optimize the
+   implementation on particular platforms, an exercise left to the
+   reader.
+
+A.1.  iLBC_test.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_test.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <stdlib.h>
+   #include <stdio.h>
+   #include <string.h>
+   #include "iLBC_define.h"
+   #include "iLBC_encode.h"
+   #include "iLBC_decode.h"
+
+   /* Runtime statistics */
+   #include <time.h>
+
+   #define ILBCNOOFWORDS_MAX   (NO_OF_BYTES_30MS/2)
+
+   /*----------------------------------------------------------------*
+    *  Encoder interface function
+
+
+
+Andersen, et al.              Experimental                     [Page 46]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+    *---------------------------------------------------------------*/
+
+   short encode(   /* (o) Number of bytes encoded */
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                                   /* (i/o) Encoder instance */
+       short *encoded_data,    /* (o) The encoded bytes */
+       short *data                 /* (i) The signal block to encode*/
+   ){
+       float block[BLOCKL_MAX];
+       int k;
+
+       /* convert signal to float */
+
+       for (k=0; k<iLBCenc_inst->blockl; k++)
+           block[k] = (float)data[k];
+
+       /* do the actual encoding */
+
+       iLBC_encode((unsigned char *)encoded_data, block, iLBCenc_inst);
+
+
+       return (iLBCenc_inst->no_of_bytes);
+   }
+
+   /*----------------------------------------------------------------*
+    *  Decoder interface function
+    *---------------------------------------------------------------*/
+
+   short decode(       /* (o) Number of decoded samples */
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) Decoder instance */
+       short *decoded_data,        /* (o) Decoded signal block*/
+       short *encoded_data,        /* (i) Encoded bytes */
+       short mode                       /* (i) 0=PL, 1=Normal */
+   ){
+       int k;
+       float decblock[BLOCKL_MAX], dtmp;
+
+       /* check if mode is valid */
+
+       if (mode<0 || mode>1) {
+           printf("\nERROR - Wrong mode - 0, 1 allowed\n"); exit(3);}
+
+       /* do actual decoding of block */
+
+       iLBC_decode(decblock, (unsigned char *)encoded_data,
+           iLBCdec_inst, mode);
+
+       /* convert to short */
+
+
+
+Andersen, et al.              Experimental                     [Page 47]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       for (k=0; k<iLBCdec_inst->blockl; k++){
+           dtmp=decblock[k];
+
+           if (dtmp<MIN_SAMPLE)
+               dtmp=MIN_SAMPLE;
+           else if (dtmp>MAX_SAMPLE)
+               dtmp=MAX_SAMPLE;
+           decoded_data[k] = (short) dtmp;
+       }
+
+       return (iLBCdec_inst->blockl);
+   }
+
+   /*---------------------------------------------------------------*
+    *  Main program to test iLBC encoding and decoding
+    *
+    *  Usage:
+    *    exefile_name.exe <infile> <bytefile> <outfile> <channel>
+    *
+    *    <infile>   : Input file, speech for encoder (16-bit pcm file)
+    *    <bytefile> : Bit stream output from the encoder
+    *    <outfile>  : Output file, decoded speech (16-bit pcm file)
+    *    <channel>  : Bit error file, optional (16-bit)
+    *                     1 - Packet received correctly
+    *                     0 - Packet Lost
+    *
+    *--------------------------------------------------------------*/
+
+   int main(int argc, char* argv[])
+   {
+
+       /* Runtime statistics */
+
+       float starttime;
+       float runtime;
+       float outtime;
+
+       FILE *ifileid,*efileid,*ofileid, *cfileid;
+       short data[BLOCKL_MAX];
+       short encoded_data[ILBCNOOFWORDS_MAX], decoded_data[BLOCKL_MAX];
+       int len;
+       short pli, mode;
+       int blockcount = 0;
+       int packetlosscount = 0;
+
+       /* Create structs */
+       iLBC_Enc_Inst_t Enc_Inst;
+       iLBC_Dec_Inst_t Dec_Inst;
+
+
+
+Andersen, et al.              Experimental                     [Page 48]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* get arguments and open files */
+
+       if ((argc!=5) && (argc!=6)) {
+           fprintf(stderr,
+           "\n*-----------------------------------------------*\n");
+           fprintf(stderr,
+           "   %s <20,30> input encoded decoded (channel)\n\n",
+               argv[0]);
+           fprintf(stderr,
+           "   mode    : Frame size for the encoding/decoding\n");
+           fprintf(stderr,
+           "                 20 - 20 ms\n");
+           fprintf(stderr,
+           "                 30 - 30 ms\n");
+           fprintf(stderr,
+           "   input   : Speech for encoder (16-bit pcm file)\n");
+           fprintf(stderr,
+           "   encoded : Encoded bit stream\n");
+           fprintf(stderr,
+           "   decoded : Decoded speech (16-bit pcm file)\n");
+           fprintf(stderr,
+           "   channel : Packet loss pattern, optional (16-bit)\n");
+           fprintf(stderr,
+           "                  1 - Packet received correctly\n");
+           fprintf(stderr,
+           "                  0 - Packet Lost\n");
+           fprintf(stderr,
+           "*-----------------------------------------------*\n\n");
+           exit(1);
+       }
+       mode=atoi(argv[1]);
+       if (mode != 20 && mode != 30) {
+           fprintf(stderr,"Wrong mode %s, must be 20, or 30\n",
+               argv[1]);
+           exit(2);
+       }
+       if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+           fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+           exit(2);}
+       if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+           fprintf(stderr, "Cannot open encoded file %s\n",
+               argv[3]); exit(1);}
+       if ( (ofileid=fopen(argv[4],"wb")) == NULL) {
+           fprintf(stderr, "Cannot open decoded file %s\n",
+               argv[4]); exit(1);}
+       if (argc==6) {
+           if( (cfileid=fopen(argv[5],"rb")) == NULL) {
+               fprintf(stderr, "Cannot open channel file %s\n",
+
+
+
+Andersen, et al.              Experimental                     [Page 49]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   argv[5]);
+               exit(1);
+           }
+       } else {
+           cfileid=NULL;
+       }
+
+       /* print info */
+
+       fprintf(stderr, "\n");
+       fprintf(stderr,
+           "*---------------------------------------------------*\n");
+       fprintf(stderr,
+           "*                                                   *\n");
+       fprintf(stderr,
+           "*      iLBC test program                            *\n");
+       fprintf(stderr,
+           "*                                                   *\n");
+       fprintf(stderr,
+           "*                                                   *\n");
+       fprintf(stderr,
+           "*---------------------------------------------------*\n");
+       fprintf(stderr,"\nMode           : %2d ms\n", mode);
+       fprintf(stderr,"Input file     : %s\n", argv[2]);
+       fprintf(stderr,"Encoded file   : %s\n", argv[3]);
+       fprintf(stderr,"Output file    : %s\n", argv[4]);
+       if (argc==6) {
+           fprintf(stderr,"Channel file   : %s\n", argv[5]);
+       }
+       fprintf(stderr,"\n");
+
+       /* Initialization */
+
+       initEncode(&Enc_Inst, mode);
+       initDecode(&Dec_Inst, mode, 1);
+
+       /* Runtime statistics */
+
+       starttime=clock()/(float)CLOCKS_PER_SEC;
+
+       /* loop over input blocks */
+
+       while (fread(data,sizeof(short),Enc_Inst.blockl,ifileid)==
+               Enc_Inst.blockl) {
+
+           blockcount++;
+
+           /* encoding */
+
+
+
+Andersen, et al.              Experimental                     [Page 50]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           fprintf(stderr, "--- Encoding block %i --- ",blockcount);
+           len=encode(&Enc_Inst, encoded_data, data);
+           fprintf(stderr, "\r");
+
+           /* write byte file */
+
+           fwrite(encoded_data, sizeof(unsigned char), len, efileid);
+
+           /* get channel data if provided */
+           if (argc==6) {
+               if (fread(&pli, sizeof(short), 1, cfileid)) {
+                   if ((pli!=0)&&(pli!=1)) {
+                       fprintf(stderr, "Error in channel file\n");
+                       exit(0);
+                   }
+                   if (pli==0) {
+                       /* Packet loss -> remove info from frame */
+                       memset(encoded_data, 0,
+                           sizeof(short)*ILBCNOOFWORDS_MAX);
+                       packetlosscount++;
+                   }
+               } else {
+                   fprintf(stderr, "Error. Channel file too short\n");
+                   exit(0);
+               }
+           } else {
+               pli=1;
+           }
+
+           /* decoding */
+
+           fprintf(stderr, "--- Decoding block %i --- ",blockcount);
+
+           len=decode(&Dec_Inst, decoded_data, encoded_data, pli);
+           fprintf(stderr, "\r");
+
+           /* write output file */
+
+           fwrite(decoded_data,sizeof(short),len,ofileid);
+       }
+
+       /* Runtime statistics */
+
+       runtime = (float)(clock()/(float)CLOCKS_PER_SEC-starttime);
+       outtime = (float)((float)blockcount*(float)mode/1000.0);
+       printf("\n\nLength of speech file: %.1f s\n", outtime);
+       printf("Packet loss          : %.1f%%\n",
+           100.0*(float)packetlosscount/(float)blockcount);
+
+
+
+Andersen, et al.              Experimental                     [Page 51]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       printf("Time to run iLBC     :");
+       printf(" %.1f s (%.1f %% of realtime)\n\n", runtime,
+           (100*runtime/outtime));
+
+       /* close files */
+
+       fclose(ifileid);  fclose(efileid); fclose(ofileid);
+       if (argc==6) {
+           fclose(cfileid);
+       }
+       return(0);
+   }
+
+A.2.  iLBC_encode.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_encode.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_ILBCENCODE_H
+   #define __iLBC_ILBCENCODE_H
+
+   #include "iLBC_define.h"
+
+   short initEncode(                   /* (o) Number of bytes
+                                              encoded */
+       iLBC_Enc_Inst_t *iLBCenc_inst,  /* (i/o) Encoder instance */
+       int mode                    /* (i) frame size mode */
+   );
+
+   void iLBC_encode(
+
+       unsigned char *bytes,           /* (o) encoded data bits iLBC */
+       float *block,                   /* (o) speech vector to
+                                              encode */
+       iLBC_Enc_Inst_t *iLBCenc_inst   /* (i/o) the general encoder
+                                              state */
+   );
+
+   #endif
+
+
+
+
+Andersen, et al.              Experimental                     [Page 52]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.3.  iLBC_encode.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_encode.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <stdlib.h>
+   #include <string.h>
+
+   #include "iLBC_define.h"
+   #include "LPCencode.h"
+   #include "FrameClassify.h"
+   #include "StateSearchW.h"
+   #include "StateConstructW.h"
+   #include "helpfun.h"
+   #include "constants.h"
+   #include "packing.h"
+   #include "iCBSearch.h"
+   #include "iCBConstruct.h"
+   #include "hpInput.h"
+   #include "anaFilter.h"
+   #include "syntFilter.h"
+
+   /*----------------------------------------------------------------*
+    *  Initiation of encoder instance.
+    *---------------------------------------------------------------*/
+
+   short initEncode(                   /* (o) Number of bytes
+                                              encoded */
+       iLBC_Enc_Inst_t *iLBCenc_inst,  /* (i/o) Encoder instance */
+       int mode                    /* (i) frame size mode */
+   ){
+       iLBCenc_inst->mode = mode;
+       if (mode==30) {
+           iLBCenc_inst->blockl = BLOCKL_30MS;
+           iLBCenc_inst->nsub = NSUB_30MS;
+           iLBCenc_inst->nasub = NASUB_30MS;
+           iLBCenc_inst->lpc_n = LPC_N_30MS;
+           iLBCenc_inst->no_of_bytes = NO_OF_BYTES_30MS;
+           iLBCenc_inst->no_of_words = NO_OF_WORDS_30MS;
+
+
+
+Andersen, et al.              Experimental                     [Page 53]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           iLBCenc_inst->state_short_len=STATE_SHORT_LEN_30MS;
+           /* ULP init */
+           iLBCenc_inst->ULP_inst=&ULP_30msTbl;
+       }
+       else if (mode==20) {
+           iLBCenc_inst->blockl = BLOCKL_20MS;
+           iLBCenc_inst->nsub = NSUB_20MS;
+           iLBCenc_inst->nasub = NASUB_20MS;
+           iLBCenc_inst->lpc_n = LPC_N_20MS;
+           iLBCenc_inst->no_of_bytes = NO_OF_BYTES_20MS;
+           iLBCenc_inst->no_of_words = NO_OF_WORDS_20MS;
+           iLBCenc_inst->state_short_len=STATE_SHORT_LEN_20MS;
+           /* ULP init */
+           iLBCenc_inst->ULP_inst=&ULP_20msTbl;
+       }
+       else {
+           exit(2);
+       }
+
+       memset((*iLBCenc_inst).anaMem, 0,
+           LPC_FILTERORDER*sizeof(float));
+       memcpy((*iLBCenc_inst).lsfold, lsfmeanTbl,
+           LPC_FILTERORDER*sizeof(float));
+       memcpy((*iLBCenc_inst).lsfdeqold, lsfmeanTbl,
+           LPC_FILTERORDER*sizeof(float));
+       memset((*iLBCenc_inst).lpc_buffer, 0,
+           (LPC_LOOKBACK+BLOCKL_MAX)*sizeof(float));
+       memset((*iLBCenc_inst).hpimem, 0, 4*sizeof(float));
+
+       return (iLBCenc_inst->no_of_bytes);
+   }
+
+   /*----------------------------------------------------------------*
+    *  main encoder function
+    *---------------------------------------------------------------*/
+
+   void iLBC_encode(
+       unsigned char *bytes,           /* (o) encoded data bits iLBC */
+       float *block,                   /* (o) speech vector to
+                                              encode */
+       iLBC_Enc_Inst_t *iLBCenc_inst   /* (i/o) the general encoder
+                                              state */
+   ){
+
+       float data[BLOCKL_MAX];
+       float residual[BLOCKL_MAX], reverseResidual[BLOCKL_MAX];
+
+       int start, idxForMax, idxVec[STATE_LEN];
+
+
+
+Andersen, et al.              Experimental                     [Page 54]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float reverseDecresidual[BLOCKL_MAX], mem[CB_MEML];
+       int n, k, meml_gotten, Nfor, Nback, i, pos;
+       int gain_index[CB_NSTAGES*NASUB_MAX],
+           extra_gain_index[CB_NSTAGES];
+       int cb_index[CB_NSTAGES*NASUB_MAX],extra_cb_index[CB_NSTAGES];
+       int lsf_i[LSF_NSPLIT*LPC_N_MAX];
+       unsigned char *pbytes;
+       int diff, start_pos, state_first;
+       float en1, en2;
+       int index, ulp, firstpart;
+       int subcount, subframe;
+       float weightState[LPC_FILTERORDER];
+       float syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+       float weightdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+       float decresidual[BLOCKL_MAX];
+
+       /* high pass filtering of input signal if such is not done
+              prior to calling this function */
+
+       hpInput(block, iLBCenc_inst->blockl,
+                   data, (*iLBCenc_inst).hpimem);
+
+       /* otherwise simply copy */
+
+       /*memcpy(data,block,iLBCenc_inst->blockl*sizeof(float));*/
+
+       /* LPC of hp filtered input data */
+
+       LPCencode(syntdenum, weightdenum, lsf_i, data, iLBCenc_inst);
+
+
+       /* inverse filter to get residual */
+
+       for (n=0; n<iLBCenc_inst->nsub; n++) {
+           anaFilter(&data[n*SUBL], &syntdenum[n*(LPC_FILTERORDER+1)],
+               SUBL, &residual[n*SUBL], iLBCenc_inst->anaMem);
+       }
+
+       /* find state location */
+
+       start = FrameClassify(iLBCenc_inst, residual);
+
+       /* check if state should be in first or last part of the
+       two subframes */
+
+       diff = STATE_LEN - iLBCenc_inst->state_short_len;
+       en1 = 0;
+       index = (start-1)*SUBL;
+
+
+
+Andersen, et al.              Experimental                     [Page 55]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       for (i = 0; i < iLBCenc_inst->state_short_len; i++) {
+           en1 += residual[index+i]*residual[index+i];
+       }
+       en2 = 0;
+       index = (start-1)*SUBL+diff;
+       for (i = 0; i < iLBCenc_inst->state_short_len; i++) {
+           en2 += residual[index+i]*residual[index+i];
+       }
+
+
+       if (en1 > en2) {
+           state_first = 1;
+           start_pos = (start-1)*SUBL;
+       } else {
+           state_first = 0;
+           start_pos = (start-1)*SUBL + diff;
+       }
+
+       /* scalar quantization of state */
+
+       StateSearchW(iLBCenc_inst, &residual[start_pos],
+           &syntdenum[(start-1)*(LPC_FILTERORDER+1)],
+           &weightdenum[(start-1)*(LPC_FILTERORDER+1)], &idxForMax,
+           idxVec, iLBCenc_inst->state_short_len, state_first);
+
+       StateConstructW(idxForMax, idxVec,
+           &syntdenum[(start-1)*(LPC_FILTERORDER+1)],
+           &decresidual[start_pos], iLBCenc_inst->state_short_len);
+
+       /* predictive quantization in state */
+
+       if (state_first) { /* put adaptive part in the end */
+
+           /* setup memory */
+
+           memset(mem, 0,
+               (CB_MEML-iLBCenc_inst->state_short_len)*sizeof(float));
+           memcpy(mem+CB_MEML-iLBCenc_inst->state_short_len,
+               decresidual+start_pos,
+               iLBCenc_inst->state_short_len*sizeof(float));
+           memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+           /* encode sub-frames */
+
+           iCBSearch(iLBCenc_inst, extra_cb_index, extra_gain_index,
+               &residual[start_pos+iLBCenc_inst->state_short_len],
+               mem+CB_MEML-stMemLTbl,
+               stMemLTbl, diff, CB_NSTAGES,
+
+
+
+Andersen, et al.              Experimental                     [Page 56]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               &weightdenum[start*(LPC_FILTERORDER+1)],
+               weightState, 0);
+
+           /* construct decoded vector */
+
+           iCBConstruct(
+               &decresidual[start_pos+iLBCenc_inst->state_short_len],
+               extra_cb_index, extra_gain_index,
+               mem+CB_MEML-stMemLTbl,
+               stMemLTbl, diff, CB_NSTAGES);
+
+       }
+       else { /* put adaptive part in the beginning */
+
+           /* create reversed vectors for prediction */
+
+           for (k=0; k<diff; k++) {
+               reverseResidual[k] = residual[(start+1)*SUBL-1
+                   -(k+iLBCenc_inst->state_short_len)];
+           }
+
+           /* setup memory */
+
+           meml_gotten = iLBCenc_inst->state_short_len;
+           for (k=0; k<meml_gotten; k++) {
+               mem[CB_MEML-1-k] = decresidual[start_pos + k];
+           }
+           memset(mem, 0, (CB_MEML-k)*sizeof(float));
+           memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+           /* encode sub-frames */
+
+           iCBSearch(iLBCenc_inst, extra_cb_index, extra_gain_index,
+               reverseResidual, mem+CB_MEML-stMemLTbl, stMemLTbl,
+               diff, CB_NSTAGES,
+               &weightdenum[(start-1)*(LPC_FILTERORDER+1)],
+               weightState, 0);
+
+           /* construct decoded vector */
+
+           iCBConstruct(reverseDecresidual, extra_cb_index,
+               extra_gain_index, mem+CB_MEML-stMemLTbl, stMemLTbl,
+               diff, CB_NSTAGES);
+
+           /* get decoded residual from reversed vector */
+
+           for (k=0; k<diff; k++) {
+               decresidual[start_pos-1-k] = reverseDecresidual[k];
+
+
+
+Andersen, et al.              Experimental                     [Page 57]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           }
+       }
+
+       /* counter for predicted sub-frames */
+
+       subcount=0;
+
+       /* forward prediction of sub-frames */
+
+       Nfor = iLBCenc_inst->nsub-start-1;
+
+
+       if ( Nfor > 0 ) {
+
+           /* setup memory */
+
+           memset(mem, 0, (CB_MEML-STATE_LEN)*sizeof(float));
+           memcpy(mem+CB_MEML-STATE_LEN, decresidual+(start-1)*SUBL,
+               STATE_LEN*sizeof(float));
+           memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+           /* loop over sub-frames to encode */
+
+           for (subframe=0; subframe<Nfor; subframe++) {
+
+               /* encode sub-frame */
+
+               iCBSearch(iLBCenc_inst, cb_index+subcount*CB_NSTAGES,
+                   gain_index+subcount*CB_NSTAGES,
+                   &residual[(start+1+subframe)*SUBL],
+                   mem+CB_MEML-memLfTbl[subcount],
+                   memLfTbl[subcount], SUBL, CB_NSTAGES,
+                   &weightdenum[(start+1+subframe)*
+                               (LPC_FILTERORDER+1)],
+                   weightState, subcount+1);
+
+               /* construct decoded vector */
+
+               iCBConstruct(&decresidual[(start+1+subframe)*SUBL],
+                   cb_index+subcount*CB_NSTAGES,
+                   gain_index+subcount*CB_NSTAGES,
+                   mem+CB_MEML-memLfTbl[subcount],
+                   memLfTbl[subcount], SUBL, CB_NSTAGES);
+
+               /* update memory */
+
+               memcpy(mem, mem+SUBL, (CB_MEML-SUBL)*sizeof(float));
+               memcpy(mem+CB_MEML-SUBL,
+
+
+
+Andersen, et al.              Experimental                     [Page 58]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   &decresidual[(start+1+subframe)*SUBL],
+                   SUBL*sizeof(float));
+               memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+               subcount++;
+           }
+       }
+
+
+       /* backward prediction of sub-frames */
+
+       Nback = start-1;
+
+
+       if ( Nback > 0 ) {
+
+           /* create reverse order vectors */
+
+           for (n=0; n<Nback; n++) {
+               for (k=0; k<SUBL; k++) {
+                   reverseResidual[n*SUBL+k] =
+                       residual[(start-1)*SUBL-1-n*SUBL-k];
+                   reverseDecresidual[n*SUBL+k] =
+                       decresidual[(start-1)*SUBL-1-n*SUBL-k];
+               }
+           }
+
+           /* setup memory */
+
+           meml_gotten = SUBL*(iLBCenc_inst->nsub+1-start);
+
+
+           if ( meml_gotten > CB_MEML ) {
+               meml_gotten=CB_MEML;
+           }
+           for (k=0; k<meml_gotten; k++) {
+               mem[CB_MEML-1-k] = decresidual[(start-1)*SUBL + k];
+           }
+           memset(mem, 0, (CB_MEML-k)*sizeof(float));
+           memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+           /* loop over sub-frames to encode */
+
+           for (subframe=0; subframe<Nback; subframe++) {
+
+               /* encode sub-frame */
+
+               iCBSearch(iLBCenc_inst, cb_index+subcount*CB_NSTAGES,
+
+
+
+Andersen, et al.              Experimental                     [Page 59]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   gain_index+subcount*CB_NSTAGES,
+                   &reverseResidual[subframe*SUBL],
+                   mem+CB_MEML-memLfTbl[subcount],
+                   memLfTbl[subcount], SUBL, CB_NSTAGES,
+                   &weightdenum[(start-2-subframe)*
+                               (LPC_FILTERORDER+1)],
+                   weightState, subcount+1);
+
+               /* construct decoded vector */
+
+               iCBConstruct(&reverseDecresidual[subframe*SUBL],
+                   cb_index+subcount*CB_NSTAGES,
+                   gain_index+subcount*CB_NSTAGES,
+                   mem+CB_MEML-memLfTbl[subcount],
+                   memLfTbl[subcount], SUBL, CB_NSTAGES);
+
+               /* update memory */
+
+               memcpy(mem, mem+SUBL, (CB_MEML-SUBL)*sizeof(float));
+               memcpy(mem+CB_MEML-SUBL,
+                   &reverseDecresidual[subframe*SUBL],
+                   SUBL*sizeof(float));
+               memset(weightState, 0, LPC_FILTERORDER*sizeof(float));
+
+               subcount++;
+
+           }
+
+           /* get decoded residual from reversed vector */
+
+           for (i=0; i<SUBL*Nback; i++) {
+               decresidual[SUBL*Nback - i - 1] =
+                   reverseDecresidual[i];
+           }
+       }
+       /* end encoding part */
+
+       /* adjust index */
+       index_conv_enc(cb_index);
+
+       /* pack bytes */
+
+       pbytes=bytes;
+       pos=0;
+
+       /* loop over the 3 ULP classes */
+
+       for (ulp=0; ulp<3; ulp++) {
+
+
+
+Andersen, et al.              Experimental                     [Page 60]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+
+           /* LSF */
+           for (k=0; k<LSF_NSPLIT*iLBCenc_inst->lpc_n; k++) {
+               packsplit(&lsf_i[k], &firstpart, &lsf_i[k],
+                   iLBCenc_inst->ULP_inst->lsf_bits[k][ulp],
+                   iLBCenc_inst->ULP_inst->lsf_bits[k][ulp]+
+                   iLBCenc_inst->ULP_inst->lsf_bits[k][ulp+1]+
+                   iLBCenc_inst->ULP_inst->lsf_bits[k][ulp+2]);
+               dopack( &pbytes, firstpart,
+                   iLBCenc_inst->ULP_inst->lsf_bits[k][ulp], &pos);
+           }
+
+           /* Start block info */
+
+           packsplit(&start, &firstpart, &start,
+               iLBCenc_inst->ULP_inst->start_bits[ulp],
+               iLBCenc_inst->ULP_inst->start_bits[ulp]+
+               iLBCenc_inst->ULP_inst->start_bits[ulp+1]+
+               iLBCenc_inst->ULP_inst->start_bits[ulp+2]);
+           dopack( &pbytes, firstpart,
+               iLBCenc_inst->ULP_inst->start_bits[ulp], &pos);
+
+           packsplit(&state_first, &firstpart, &state_first,
+               iLBCenc_inst->ULP_inst->startfirst_bits[ulp],
+               iLBCenc_inst->ULP_inst->startfirst_bits[ulp]+
+               iLBCenc_inst->ULP_inst->startfirst_bits[ulp+1]+
+               iLBCenc_inst->ULP_inst->startfirst_bits[ulp+2]);
+           dopack( &pbytes, firstpart,
+               iLBCenc_inst->ULP_inst->startfirst_bits[ulp], &pos);
+
+           packsplit(&idxForMax, &firstpart, &idxForMax,
+               iLBCenc_inst->ULP_inst->scale_bits[ulp],
+               iLBCenc_inst->ULP_inst->scale_bits[ulp]+
+               iLBCenc_inst->ULP_inst->scale_bits[ulp+1]+
+               iLBCenc_inst->ULP_inst->scale_bits[ulp+2]);
+           dopack( &pbytes, firstpart,
+               iLBCenc_inst->ULP_inst->scale_bits[ulp], &pos);
+
+           for (k=0; k<iLBCenc_inst->state_short_len; k++) {
+               packsplit(idxVec+k, &firstpart, idxVec+k,
+                   iLBCenc_inst->ULP_inst->state_bits[ulp],
+                   iLBCenc_inst->ULP_inst->state_bits[ulp]+
+                   iLBCenc_inst->ULP_inst->state_bits[ulp+1]+
+                   iLBCenc_inst->ULP_inst->state_bits[ulp+2]);
+               dopack( &pbytes, firstpart,
+                   iLBCenc_inst->ULP_inst->state_bits[ulp], &pos);
+           }
+
+
+
+
+Andersen, et al.              Experimental                     [Page 61]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           /* 23/22 (20ms/30ms) sample block */
+
+           for (k=0;k<CB_NSTAGES;k++) {
+               packsplit(extra_cb_index+k, &firstpart,
+                   extra_cb_index+k,
+                   iLBCenc_inst->ULP_inst->extra_cb_index[k][ulp],
+                   iLBCenc_inst->ULP_inst->extra_cb_index[k][ulp]+
+                   iLBCenc_inst->ULP_inst->extra_cb_index[k][ulp+1]+
+                   iLBCenc_inst->ULP_inst->extra_cb_index[k][ulp+2]);
+               dopack( &pbytes, firstpart,
+                   iLBCenc_inst->ULP_inst->extra_cb_index[k][ulp],
+                   &pos);
+           }
+
+           for (k=0;k<CB_NSTAGES;k++) {
+               packsplit(extra_gain_index+k, &firstpart,
+                   extra_gain_index+k,
+                   iLBCenc_inst->ULP_inst->extra_cb_gain[k][ulp],
+                   iLBCenc_inst->ULP_inst->extra_cb_gain[k][ulp]+
+                   iLBCenc_inst->ULP_inst->extra_cb_gain[k][ulp+1]+
+                   iLBCenc_inst->ULP_inst->extra_cb_gain[k][ulp+2]);
+               dopack( &pbytes, firstpart,
+                   iLBCenc_inst->ULP_inst->extra_cb_gain[k][ulp],
+                   &pos);
+           }
+
+           /* The two/four (20ms/30ms) 40 sample sub-blocks */
+
+           for (i=0; i<iLBCenc_inst->nasub; i++) {
+               for (k=0; k<CB_NSTAGES; k++) {
+                   packsplit(cb_index+i*CB_NSTAGES+k, &firstpart,
+                       cb_index+i*CB_NSTAGES+k,
+                       iLBCenc_inst->ULP_inst->cb_index[i][k][ulp],
+                       iLBCenc_inst->ULP_inst->cb_index[i][k][ulp]+
+                       iLBCenc_inst->ULP_inst->cb_index[i][k][ulp+1]+
+                       iLBCenc_inst->ULP_inst->cb_index[i][k][ulp+2]);
+                   dopack( &pbytes, firstpart,
+                       iLBCenc_inst->ULP_inst->cb_index[i][k][ulp],
+                       &pos);
+               }
+           }
+
+           for (i=0; i<iLBCenc_inst->nasub; i++) {
+               for (k=0; k<CB_NSTAGES; k++) {
+                   packsplit(gain_index+i*CB_NSTAGES+k, &firstpart,
+                       gain_index+i*CB_NSTAGES+k,
+                       iLBCenc_inst->ULP_inst->cb_gain[i][k][ulp],
+                       iLBCenc_inst->ULP_inst->cb_gain[i][k][ulp]+
+
+
+
+Andersen, et al.              Experimental                     [Page 62]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                       iLBCenc_inst->ULP_inst->cb_gain[i][k][ulp+1]+
+                       iLBCenc_inst->ULP_inst->cb_gain[i][k][ulp+2]);
+                   dopack( &pbytes, firstpart,
+                       iLBCenc_inst->ULP_inst->cb_gain[i][k][ulp],
+                       &pos);
+               }
+           }
+       }
+
+       /* set the last bit to zero (otherwise the decoder
+          will treat it as a lost frame) */
+       dopack( &pbytes, 0, 1, &pos);
+   }
+
+A.4.  iLBC_decode.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_decode.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_ILBCDECODE_H
+   #define __iLBC_ILBCDECODE_H
+
+   #include "iLBC_define.h"
+
+   short initDecode(                   /* (o) Number of decoded
+                                              samples */
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) Decoder instance */
+       int mode,                       /* (i) frame size mode */
+       int use_enhancer                /* (i) 1 to use enhancer
+                                              0 to run without
+                                                enhancer */
+   );
+
+   void iLBC_decode(
+       float *decblock,            /* (o) decoded signal block */
+       unsigned char *bytes,           /* (i) encoded signal bits */
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) the decoder state
+                                                structure */
+       int mode                    /* (i) 0: bad packet, PLC,
+                                              1: normal */
+
+
+
+Andersen, et al.              Experimental                     [Page 63]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   );
+
+   #endif
+
+A.5.  iLBC_decode.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_decode.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <stdlib.h>
+
+   #include "iLBC_define.h"
+   #include "StateConstructW.h"
+   #include "LPCdecode.h"
+   #include "iCBConstruct.h"
+   #include "doCPLC.h"
+   #include "helpfun.h"
+   #include "constants.h"
+   #include "packing.h"
+   #include "string.h"
+   #include "enhancer.h"
+   #include "hpOutput.h"
+   #include "syntFilter.h"
+
+   /*----------------------------------------------------------------*
+    *  Initiation of decoder instance.
+    *---------------------------------------------------------------*/
+
+   short initDecode(                   /* (o) Number of decoded
+                                              samples */
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) Decoder instance */
+       int mode,                       /* (i) frame size mode */
+       int use_enhancer                /* (i) 1 to use enhancer
+                                              0 to run without
+                                                enhancer */
+   ){
+       int i;
+
+       iLBCdec_inst->mode = mode;
+
+
+
+Andersen, et al.              Experimental                     [Page 64]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       if (mode==30) {
+           iLBCdec_inst->blockl = BLOCKL_30MS;
+           iLBCdec_inst->nsub = NSUB_30MS;
+           iLBCdec_inst->nasub = NASUB_30MS;
+           iLBCdec_inst->lpc_n = LPC_N_30MS;
+           iLBCdec_inst->no_of_bytes = NO_OF_BYTES_30MS;
+           iLBCdec_inst->no_of_words = NO_OF_WORDS_30MS;
+           iLBCdec_inst->state_short_len=STATE_SHORT_LEN_30MS;
+           /* ULP init */
+           iLBCdec_inst->ULP_inst=&ULP_30msTbl;
+       }
+       else if (mode==20) {
+           iLBCdec_inst->blockl = BLOCKL_20MS;
+           iLBCdec_inst->nsub = NSUB_20MS;
+           iLBCdec_inst->nasub = NASUB_20MS;
+           iLBCdec_inst->lpc_n = LPC_N_20MS;
+           iLBCdec_inst->no_of_bytes = NO_OF_BYTES_20MS;
+           iLBCdec_inst->no_of_words = NO_OF_WORDS_20MS;
+           iLBCdec_inst->state_short_len=STATE_SHORT_LEN_20MS;
+           /* ULP init */
+           iLBCdec_inst->ULP_inst=&ULP_20msTbl;
+       }
+       else {
+           exit(2);
+       }
+
+       memset(iLBCdec_inst->syntMem, 0,
+           LPC_FILTERORDER*sizeof(float));
+       memcpy((*iLBCdec_inst).lsfdeqold, lsfmeanTbl,
+           LPC_FILTERORDER*sizeof(float));
+
+       memset(iLBCdec_inst->old_syntdenum, 0,
+           ((LPC_FILTERORDER + 1)*NSUB_MAX)*sizeof(float));
+       for (i=0; i<NSUB_MAX; i++)
+           iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)]=1.0;
+
+       iLBCdec_inst->last_lag = 20;
+
+       iLBCdec_inst->prevLag = 120;
+       iLBCdec_inst->per = 0.0;
+       iLBCdec_inst->consPLICount = 0;
+       iLBCdec_inst->prevPLI = 0;
+       iLBCdec_inst->prevLpc[0] = 1.0;
+       memset(iLBCdec_inst->prevLpc+1,0,
+           LPC_FILTERORDER*sizeof(float));
+       memset(iLBCdec_inst->prevResidual, 0, BLOCKL_MAX*sizeof(float));
+       iLBCdec_inst->seed=777;
+
+
+
+
+Andersen, et al.              Experimental                     [Page 65]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       memset(iLBCdec_inst->hpomem, 0, 4*sizeof(float));
+
+       iLBCdec_inst->use_enhancer = use_enhancer;
+       memset(iLBCdec_inst->enh_buf, 0, ENH_BUFL*sizeof(float));
+       for (i=0;i<ENH_NBLOCKS_TOT;i++)
+           iLBCdec_inst->enh_period[i]=(float)40.0;
+
+       iLBCdec_inst->prev_enh_pl = 0;
+
+       return (iLBCdec_inst->blockl);
+   }
+
+   /*----------------------------------------------------------------*
+    *  frame residual decoder function (subrutine to iLBC_decode)
+    *---------------------------------------------------------------*/
+
+   void Decode(
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) the decoder state
+                                                structure */
+       float *decresidual,             /* (o) decoded residual frame */
+       int start,                      /* (i) location of start
+                                              state */
+       int idxForMax,                  /* (i) codebook index for the
+                                              maximum value */
+       int *idxVec,                /* (i) codebook indexes for the
+                                              samples  in the start
+                                              state */
+       float *syntdenum,               /* (i) the decoded synthesis
+                                              filter coefficients */
+       int *cb_index,                  /* (i) the indexes for the
+                                              adaptive codebook */
+       int *gain_index,            /* (i) the indexes for the
+                                              corresponding gains */
+       int *extra_cb_index,        /* (i) the indexes for the
+                                              adaptive codebook part
+                                              of start state */
+       int *extra_gain_index,          /* (i) the indexes for the
+                                              corresponding gains */
+       int state_first                 /* (i) 1 if non adaptive part
+                                              of start state comes
+                                              first 0 if that part
+                                              comes last */
+   ){
+       float reverseDecresidual[BLOCKL_MAX], mem[CB_MEML];
+       int k, meml_gotten, Nfor, Nback, i;
+       int diff, start_pos;
+       int subcount, subframe;
+
+
+
+
+Andersen, et al.              Experimental                     [Page 66]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       diff = STATE_LEN - iLBCdec_inst->state_short_len;
+
+       if (state_first == 1) {
+           start_pos = (start-1)*SUBL;
+       } else {
+           start_pos = (start-1)*SUBL + diff;
+       }
+
+       /* decode scalar part of start state */
+
+       StateConstructW(idxForMax, idxVec,
+           &syntdenum[(start-1)*(LPC_FILTERORDER+1)],
+           &decresidual[start_pos], iLBCdec_inst->state_short_len);
+
+
+       if (state_first) { /* put adaptive part in the end */
+
+           /* setup memory */
+
+           memset(mem, 0,
+               (CB_MEML-iLBCdec_inst->state_short_len)*sizeof(float));
+           memcpy(mem+CB_MEML-iLBCdec_inst->state_short_len,
+               decresidual+start_pos,
+               iLBCdec_inst->state_short_len*sizeof(float));
+
+           /* construct decoded vector */
+
+           iCBConstruct(
+               &decresidual[start_pos+iLBCdec_inst->state_short_len],
+               extra_cb_index, extra_gain_index, mem+CB_MEML-stMemLTbl,
+               stMemLTbl, diff, CB_NSTAGES);
+
+       }
+       else {/* put adaptive part in the beginning */
+
+           /* create reversed vectors for prediction */
+
+           for (k=0; k<diff; k++) {
+               reverseDecresidual[k] =
+                   decresidual[(start+1)*SUBL-1-
+                           (k+iLBCdec_inst->state_short_len)];
+           }
+
+           /* setup memory */
+
+           meml_gotten = iLBCdec_inst->state_short_len;
+           for (k=0; k<meml_gotten; k++){
+               mem[CB_MEML-1-k] = decresidual[start_pos + k];
+
+
+
+Andersen, et al.              Experimental                     [Page 67]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           }
+           memset(mem, 0, (CB_MEML-k)*sizeof(float));
+
+           /* construct decoded vector */
+
+           iCBConstruct(reverseDecresidual, extra_cb_index,
+               extra_gain_index, mem+CB_MEML-stMemLTbl, stMemLTbl,
+               diff, CB_NSTAGES);
+
+           /* get decoded residual from reversed vector */
+
+           for (k=0; k<diff; k++) {
+               decresidual[start_pos-1-k] = reverseDecresidual[k];
+           }
+       }
+
+       /* counter for predicted sub-frames */
+
+       subcount=0;
+
+       /* forward prediction of sub-frames */
+
+       Nfor = iLBCdec_inst->nsub-start-1;
+
+       if ( Nfor > 0 ){
+
+           /* setup memory */
+
+           memset(mem, 0, (CB_MEML-STATE_LEN)*sizeof(float));
+           memcpy(mem+CB_MEML-STATE_LEN, decresidual+(start-1)*SUBL,
+               STATE_LEN*sizeof(float));
+
+           /* loop over sub-frames to encode */
+
+           for (subframe=0; subframe<Nfor; subframe++) {
+
+               /* construct decoded vector */
+
+               iCBConstruct(&decresidual[(start+1+subframe)*SUBL],
+                   cb_index+subcount*CB_NSTAGES,
+                   gain_index+subcount*CB_NSTAGES,
+                   mem+CB_MEML-memLfTbl[subcount],
+                   memLfTbl[subcount], SUBL, CB_NSTAGES);
+
+               /* update memory */
+
+               memcpy(mem, mem+SUBL, (CB_MEML-SUBL)*sizeof(float));
+               memcpy(mem+CB_MEML-SUBL,
+
+
+
+Andersen, et al.              Experimental                     [Page 68]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   &decresidual[(start+1+subframe)*SUBL],
+                   SUBL*sizeof(float));
+
+               subcount++;
+
+           }
+
+       }
+
+       /* backward prediction of sub-frames */
+
+       Nback = start-1;
+
+       if ( Nback > 0 ) {
+
+           /* setup memory */
+
+           meml_gotten = SUBL*(iLBCdec_inst->nsub+1-start);
+
+           if ( meml_gotten > CB_MEML ) {
+               meml_gotten=CB_MEML;
+           }
+           for (k=0; k<meml_gotten; k++) {
+               mem[CB_MEML-1-k] = decresidual[(start-1)*SUBL + k];
+           }
+           memset(mem, 0, (CB_MEML-k)*sizeof(float));
+
+           /* loop over subframes to decode */
+
+           for (subframe=0; subframe<Nback; subframe++) {
+
+               /* construct decoded vector */
+
+               iCBConstruct(&reverseDecresidual[subframe*SUBL],
+                   cb_index+subcount*CB_NSTAGES,
+                   gain_index+subcount*CB_NSTAGES,
+                   mem+CB_MEML-memLfTbl[subcount], memLfTbl[subcount],
+                   SUBL, CB_NSTAGES);
+
+               /* update memory */
+
+               memcpy(mem, mem+SUBL, (CB_MEML-SUBL)*sizeof(float));
+               memcpy(mem+CB_MEML-SUBL,
+                   &reverseDecresidual[subframe*SUBL],
+                   SUBL*sizeof(float));
+
+               subcount++;
+           }
+
+
+
+Andersen, et al.              Experimental                     [Page 69]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           /* get decoded residual from reversed vector */
+
+           for (i=0; i<SUBL*Nback; i++)
+               decresidual[SUBL*Nback - i - 1] =
+               reverseDecresidual[i];
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  main decoder function
+    *---------------------------------------------------------------*/
+
+   void iLBC_decode(
+       float *decblock,            /* (o) decoded signal block */
+       unsigned char *bytes,           /* (i) encoded signal bits */
+       iLBC_Dec_Inst_t *iLBCdec_inst,  /* (i/o) the decoder state
+                                                structure */
+       int mode                    /* (i) 0: bad packet, PLC,
+                                              1: normal */
+   ){
+       float data[BLOCKL_MAX];
+       float lsfdeq[LPC_FILTERORDER*LPC_N_MAX];
+       float PLCresidual[BLOCKL_MAX], PLClpc[LPC_FILTERORDER + 1];
+       float zeros[BLOCKL_MAX], one[LPC_FILTERORDER + 1];
+       int k, i, start, idxForMax, pos, lastpart, ulp;
+       int lag, ilag;
+       float cc, maxcc;
+       int idxVec[STATE_LEN];
+       int check;
+       int gain_index[NASUB_MAX*CB_NSTAGES],
+           extra_gain_index[CB_NSTAGES];
+       int cb_index[CB_NSTAGES*NASUB_MAX], extra_cb_index[CB_NSTAGES];
+       int lsf_i[LSF_NSPLIT*LPC_N_MAX];
+       int state_first;
+       int last_bit;
+       unsigned char *pbytes;
+       float weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+       int order_plus_one;
+       float syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+       float decresidual[BLOCKL_MAX];
+
+       if (mode>0) { /* the data are good */
+
+           /* decode data */
+
+           pbytes=bytes;
+           pos=0;
+
+
+
+
+Andersen, et al.              Experimental                     [Page 70]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           /* Set everything to zero before decoding */
+
+           for (k=0; k<LSF_NSPLIT*LPC_N_MAX; k++) {
+               lsf_i[k]=0;
+           }
+           start=0;
+           state_first=0;
+           idxForMax=0;
+           for (k=0; k<iLBCdec_inst->state_short_len; k++) {
+               idxVec[k]=0;
+           }
+           for (k=0; k<CB_NSTAGES; k++) {
+               extra_cb_index[k]=0;
+           }
+           for (k=0; k<CB_NSTAGES; k++) {
+               extra_gain_index[k]=0;
+           }
+           for (i=0; i<iLBCdec_inst->nasub; i++) {
+               for (k=0; k<CB_NSTAGES; k++) {
+                   cb_index[i*CB_NSTAGES+k]=0;
+               }
+           }
+           for (i=0; i<iLBCdec_inst->nasub; i++) {
+               for (k=0; k<CB_NSTAGES; k++) {
+                   gain_index[i*CB_NSTAGES+k]=0;
+               }
+           }
+
+           /* loop over ULP classes */
+
+           for (ulp=0; ulp<3; ulp++) {
+
+               /* LSF */
+               for (k=0; k<LSF_NSPLIT*iLBCdec_inst->lpc_n; k++){
+                   unpack( &pbytes, &lastpart,
+                       iLBCdec_inst->ULP_inst->lsf_bits[k][ulp], &pos);
+                   packcombine(&lsf_i[k], lastpart,
+                       iLBCdec_inst->ULP_inst->lsf_bits[k][ulp]);
+               }
+
+               /* Start block info */
+
+               unpack( &pbytes, &lastpart,
+                   iLBCdec_inst->ULP_inst->start_bits[ulp], &pos);
+               packcombine(&start, lastpart,
+                   iLBCdec_inst->ULP_inst->start_bits[ulp]);
+
+               unpack( &pbytes, &lastpart,
+
+
+
+Andersen, et al.              Experimental                     [Page 71]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   iLBCdec_inst->ULP_inst->startfirst_bits[ulp], &pos);
+               packcombine(&state_first, lastpart,
+                   iLBCdec_inst->ULP_inst->startfirst_bits[ulp]);
+
+               unpack( &pbytes, &lastpart,
+                   iLBCdec_inst->ULP_inst->scale_bits[ulp], &pos);
+               packcombine(&idxForMax, lastpart,
+                   iLBCdec_inst->ULP_inst->scale_bits[ulp]);
+
+               for (k=0; k<iLBCdec_inst->state_short_len; k++) {
+                   unpack( &pbytes, &lastpart,
+                       iLBCdec_inst->ULP_inst->state_bits[ulp], &pos);
+                   packcombine(idxVec+k, lastpart,
+                       iLBCdec_inst->ULP_inst->state_bits[ulp]);
+               }
+
+               /* 23/22 (20ms/30ms) sample block */
+
+               for (k=0; k<CB_NSTAGES; k++) {
+                   unpack( &pbytes, &lastpart,
+                       iLBCdec_inst->ULP_inst->extra_cb_index[k][ulp],
+                       &pos);
+                   packcombine(extra_cb_index+k, lastpart,
+                       iLBCdec_inst->ULP_inst->extra_cb_index[k][ulp]);
+               }
+               for (k=0; k<CB_NSTAGES; k++) {
+                   unpack( &pbytes, &lastpart,
+                       iLBCdec_inst->ULP_inst->extra_cb_gain[k][ulp],
+                       &pos);
+                   packcombine(extra_gain_index+k, lastpart,
+                       iLBCdec_inst->ULP_inst->extra_cb_gain[k][ulp]);
+               }
+
+               /* The two/four (20ms/30ms) 40 sample sub-blocks */
+
+               for (i=0; i<iLBCdec_inst->nasub; i++) {
+                   for (k=0; k<CB_NSTAGES; k++) {
+                       unpack( &pbytes, &lastpart,
+                       iLBCdec_inst->ULP_inst->cb_index[i][k][ulp],
+                           &pos);
+                       packcombine(cb_index+i*CB_NSTAGES+k, lastpart,
+                       iLBCdec_inst->ULP_inst->cb_index[i][k][ulp]);
+                   }
+               }
+
+               for (i=0; i<iLBCdec_inst->nasub; i++) {
+                   for (k=0; k<CB_NSTAGES; k++) {
+                       unpack( &pbytes, &lastpart,
+
+
+
+Andersen, et al.              Experimental                     [Page 72]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                       iLBCdec_inst->ULP_inst->cb_gain[i][k][ulp],
+                           &pos);
+                       packcombine(gain_index+i*CB_NSTAGES+k, lastpart,
+                           iLBCdec_inst->ULP_inst->cb_gain[i][k][ulp]);
+                   }
+               }
+           }
+           /* Extract last bit. If it is 1 this indicates an
+              empty/lost frame */
+           unpack( &pbytes, &last_bit, 1, &pos);
+
+           /* Check for bit errors or empty/lost frames */
+           if (start<1)
+               mode = 0;
+           if (iLBCdec_inst->mode==20 && start>3)
+               mode = 0;
+           if (iLBCdec_inst->mode==30 && start>5)
+               mode = 0;
+           if (last_bit==1)
+               mode = 0;
+
+           if (mode==1) { /* No bit errors was detected,
+                             continue decoding */
+
+               /* adjust index */
+               index_conv_dec(cb_index);
+
+               /* decode the lsf */
+
+               SimplelsfDEQ(lsfdeq, lsf_i, iLBCdec_inst->lpc_n);
+               check=LSF_check(lsfdeq, LPC_FILTERORDER,
+                   iLBCdec_inst->lpc_n);
+               DecoderInterpolateLSF(syntdenum, weightdenum,
+                   lsfdeq, LPC_FILTERORDER, iLBCdec_inst);
+
+               Decode(iLBCdec_inst, decresidual, start, idxForMax,
+                   idxVec, syntdenum, cb_index, gain_index,
+                   extra_cb_index, extra_gain_index,
+                   state_first);
+
+               /* preparing the plc for a future loss! */
+
+               doThePLC(PLCresidual, PLClpc, 0, decresidual,
+                   syntdenum +
+                   (LPC_FILTERORDER + 1)*(iLBCdec_inst->nsub - 1),
+                   (*iLBCdec_inst).last_lag, iLBCdec_inst);
+
+
+
+
+
+Andersen, et al.              Experimental                     [Page 73]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               memcpy(decresidual, PLCresidual,
+                   iLBCdec_inst->blockl*sizeof(float));
+           }
+
+       }
+
+       if (mode == 0) {
+           /* the data is bad (either a PLC call
+            * was made or a severe bit error was detected)
+            */
+
+           /* packet loss conceal */
+
+           memset(zeros, 0, BLOCKL_MAX*sizeof(float));
+
+           one[0] = 1;
+           memset(one+1, 0, LPC_FILTERORDER*sizeof(float));
+
+           start=0;
+
+           doThePLC(PLCresidual, PLClpc, 1, zeros, one,
+               (*iLBCdec_inst).last_lag, iLBCdec_inst);
+           memcpy(decresidual, PLCresidual,
+               iLBCdec_inst->blockl*sizeof(float));
+
+           order_plus_one = LPC_FILTERORDER + 1;
+           for (i = 0; i < iLBCdec_inst->nsub; i++) {
+               memcpy(syntdenum+(i*order_plus_one), PLClpc,
+                   order_plus_one*sizeof(float));
+           }
+       }
+
+       if (iLBCdec_inst->use_enhancer == 1) {
+
+           /* post filtering */
+
+           iLBCdec_inst->last_lag =
+               enhancerInterface(data, decresidual, iLBCdec_inst);
+
+           /* synthesis filtering */
+
+           if (iLBCdec_inst->mode==20) {
+               /* Enhancer has 40 samples delay */
+               i=0;
+               syntFilter(data + i*SUBL,
+                   iLBCdec_inst->old_syntdenum +
+                   (i+iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1),
+                   SUBL, iLBCdec_inst->syntMem);
+
+
+
+Andersen, et al.              Experimental                     [Page 74]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               for (i=1; i < iLBCdec_inst->nsub; i++) {
+                   syntFilter(data + i*SUBL,
+                       syntdenum + (i-1)*(LPC_FILTERORDER+1),
+                       SUBL, iLBCdec_inst->syntMem);
+               }
+           } else if (iLBCdec_inst->mode==30) {
+               /* Enhancer has 80 samples delay */
+               for (i=0; i < 2; i++) {
+                   syntFilter(data + i*SUBL,
+                       iLBCdec_inst->old_syntdenum +
+                       (i+iLBCdec_inst->nsub-2)*(LPC_FILTERORDER+1),
+                       SUBL, iLBCdec_inst->syntMem);
+               }
+               for (i=2; i < iLBCdec_inst->nsub; i++) {
+                   syntFilter(data + i*SUBL,
+                       syntdenum + (i-2)*(LPC_FILTERORDER+1), SUBL,
+                       iLBCdec_inst->syntMem);
+               }
+           }
+
+       } else {
+
+           /* Find last lag */
+           lag = 20;
+           maxcc = xCorrCoef(&decresidual[BLOCKL_MAX-ENH_BLOCKL],
+               &decresidual[BLOCKL_MAX-ENH_BLOCKL-lag], ENH_BLOCKL);
+
+           for (ilag=21; ilag<120; ilag++) {
+               cc = xCorrCoef(&decresidual[BLOCKL_MAX-ENH_BLOCKL],
+                   &decresidual[BLOCKL_MAX-ENH_BLOCKL-ilag],
+                   ENH_BLOCKL);
+
+               if (cc > maxcc) {
+                   maxcc = cc;
+                   lag = ilag;
+               }
+           }
+           iLBCdec_inst->last_lag = lag;
+
+           /* copy data and run synthesis filter */
+
+           memcpy(data, decresidual,
+               iLBCdec_inst->blockl*sizeof(float));
+           for (i=0; i < iLBCdec_inst->nsub; i++) {
+               syntFilter(data + i*SUBL,
+                   syntdenum + i*(LPC_FILTERORDER+1), SUBL,
+                   iLBCdec_inst->syntMem);
+           }
+
+
+
+Andersen, et al.              Experimental                     [Page 75]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       }
+
+       /* high pass filtering on output if desired, otherwise
+          copy to out */
+
+       hpOutput(data, iLBCdec_inst->blockl,
+                   decblock,iLBCdec_inst->hpomem);
+
+       /* memcpy(decblock,data,iLBCdec_inst->blockl*sizeof(float));*/
+
+       memcpy(iLBCdec_inst->old_syntdenum, syntdenum,
+
+           iLBCdec_inst->nsub*(LPC_FILTERORDER+1)*sizeof(float));
+
+       iLBCdec_inst->prev_enh_pl=0;
+
+       if (mode==0) { /* PLC was used */
+           iLBCdec_inst->prev_enh_pl=1;
+       }
+   }
+
+A.6.  iLBC_define.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iLBC_define.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+   #include <string.h>
+
+   #ifndef __iLBC_ILBCDEFINE_H
+   #define __iLBC_ILBCDEFINE_H
+
+   /* general codec settings */
+
+   #define FS                      (float)8000.0
+   #define BLOCKL_20MS             160
+   #define BLOCKL_30MS             240
+   #define BLOCKL_MAX              240
+   #define NSUB_20MS               4
+   #define NSUB_30MS               6
+   #define NSUB_MAX            6
+   #define NASUB_20MS              2
+
+
+
+Andersen, et al.              Experimental                     [Page 76]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #define NASUB_30MS              4
+   #define NASUB_MAX               4
+   #define SUBL                40
+   #define STATE_LEN               80
+   #define STATE_SHORT_LEN_30MS    58
+   #define STATE_SHORT_LEN_20MS    57
+
+   /* LPC settings */
+
+   #define LPC_FILTERORDER         10
+   #define LPC_CHIRP_SYNTDENUM     (float)0.9025
+   #define LPC_CHIRP_WEIGHTDENUM   (float)0.4222
+   #define LPC_LOOKBACK        60
+   #define LPC_N_20MS              1
+   #define LPC_N_30MS              2
+   #define LPC_N_MAX               2
+   #define LPC_ASYMDIFF        20
+   #define LPC_BW                  (float)60.0
+   #define LPC_WN                  (float)1.0001
+   #define LSF_NSPLIT              3
+   #define LSF_NUMBER_OF_STEPS     4
+   #define LPC_HALFORDER           (LPC_FILTERORDER/2)
+
+   /* cb settings */
+
+   #define CB_NSTAGES              3
+   #define CB_EXPAND               2
+   #define CB_MEML                 147
+   #define CB_FILTERLEN        2*4
+   #define CB_HALFFILTERLEN    4
+   #define CB_RESRANGE             34
+   #define CB_MAXGAIN              (float)1.3
+
+   /* enhancer */
+
+   #define ENH_BLOCKL              80  /* block length */
+   #define ENH_BLOCKL_HALF         (ENH_BLOCKL/2)
+   #define ENH_HL                  3   /* 2*ENH_HL+1 is number blocks
+                                          in said second sequence */
+   #define ENH_SLOP            2   /* max difference estimated and
+                                          correct pitch period */
+   #define ENH_PLOCSL              20  /* pitch-estimates and pitch-
+                                          locations buffer length */
+   #define ENH_OVERHANG        2
+   #define ENH_UPS0            4   /* upsampling rate */
+   #define ENH_FL0                 3   /* 2*FLO+1 is the length of
+                                          each filter */
+   #define ENH_VECTL               (ENH_BLOCKL+2*ENH_FL0)
+
+
+
+Andersen, et al.              Experimental                     [Page 77]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #define ENH_CORRDIM             (2*ENH_SLOP+1)
+   #define ENH_NBLOCKS             (BLOCKL_MAX/ENH_BLOCKL)
+   #define ENH_NBLOCKS_EXTRA       5
+   #define ENH_NBLOCKS_TOT         8   /* ENH_NBLOCKS +
+                                          ENH_NBLOCKS_EXTRA */
+   #define ENH_BUFL            (ENH_NBLOCKS_TOT)*ENH_BLOCKL
+   #define ENH_ALPHA0              (float)0.05
+
+   /* Down sampling */
+
+   #define FILTERORDER_DS          7
+   #define DELAY_DS            3
+   #define FACTOR_DS               2
+
+   /* bit stream defs */
+
+   #define NO_OF_BYTES_20MS    38
+   #define NO_OF_BYTES_30MS    50
+   #define NO_OF_WORDS_20MS    19
+   #define NO_OF_WORDS_30MS    25
+   #define STATE_BITS              3
+   #define BYTE_LEN            8
+   #define ULP_CLASSES             3
+
+   /* help parameters */
+
+   #define FLOAT_MAX               (float)1.0e37
+   #define EPS                     (float)2.220446049250313e-016
+   #define PI                      (float)3.14159265358979323846
+   #define MIN_SAMPLE              -32768
+   #define MAX_SAMPLE              32767
+   #define TWO_PI                  (float)6.283185307
+   #define PI2                     (float)0.159154943
+
+   /* type definition encoder instance */
+   typedef struct iLBC_ULP_Inst_t_ {
+       int lsf_bits[6][ULP_CLASSES+2];
+       int start_bits[ULP_CLASSES+2];
+       int startfirst_bits[ULP_CLASSES+2];
+       int scale_bits[ULP_CLASSES+2];
+       int state_bits[ULP_CLASSES+2];
+       int extra_cb_index[CB_NSTAGES][ULP_CLASSES+2];
+       int extra_cb_gain[CB_NSTAGES][ULP_CLASSES+2];
+       int cb_index[NSUB_MAX][CB_NSTAGES][ULP_CLASSES+2];
+       int cb_gain[NSUB_MAX][CB_NSTAGES][ULP_CLASSES+2];
+   } iLBC_ULP_Inst_t;
+
+   /* type definition encoder instance */
+
+
+
+Andersen, et al.              Experimental                     [Page 78]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   typedef struct iLBC_Enc_Inst_t_ {
+
+       /* flag for frame size mode */
+       int mode;
+
+       /* basic parameters for different frame sizes */
+       int blockl;
+       int nsub;
+       int nasub;
+       int no_of_bytes, no_of_words;
+       int lpc_n;
+       int state_short_len;
+       const iLBC_ULP_Inst_t *ULP_inst;
+
+       /* analysis filter state */
+       float anaMem[LPC_FILTERORDER];
+
+       /* old lsf parameters for interpolation */
+       float lsfold[LPC_FILTERORDER];
+       float lsfdeqold[LPC_FILTERORDER];
+
+       /* signal buffer for LP analysis */
+       float lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX];
+
+       /* state of input HP filter */
+       float hpimem[4];
+
+   } iLBC_Enc_Inst_t;
+
+   /* type definition decoder instance */
+   typedef struct iLBC_Dec_Inst_t_ {
+
+       /* flag for frame size mode */
+       int mode;
+
+       /* basic parameters for different frame sizes */
+       int blockl;
+       int nsub;
+       int nasub;
+       int no_of_bytes, no_of_words;
+       int lpc_n;
+       int state_short_len;
+       const iLBC_ULP_Inst_t *ULP_inst;
+
+       /* synthesis filter state */
+       float syntMem[LPC_FILTERORDER];
+
+       /* old LSF for interpolation */
+
+
+
+Andersen, et al.              Experimental                     [Page 79]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float lsfdeqold[LPC_FILTERORDER];
+
+       /* pitch lag estimated in enhancer and used in PLC */
+       int last_lag;
+
+       /* PLC state information */
+       int prevLag, consPLICount, prevPLI, prev_enh_pl;
+       float prevLpc[LPC_FILTERORDER+1];
+       float prevResidual[NSUB_MAX*SUBL];
+       float per;
+       unsigned long seed;
+
+       /* previous synthesis filter parameters */
+       float old_syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+
+       /* state of output HP filter */
+       float hpomem[4];
+
+       /* enhancer state information */
+       int use_enhancer;
+       float enh_buf[ENH_BUFL];
+       float enh_period[ENH_NBLOCKS_TOT];
+
+   } iLBC_Dec_Inst_t;
+
+   #endif
+
+A.7.  constants.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       constants.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_CONSTANTS_H
+   #define __iLBC_CONSTANTS_H
+
+   #include "iLBC_define.h"
+
+
+   /* ULP bit allocation */
+
+
+
+
+Andersen, et al.              Experimental                     [Page 80]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   extern const iLBC_ULP_Inst_t ULP_20msTbl;
+   extern const iLBC_ULP_Inst_t ULP_30msTbl;
+
+   /* high pass filters */
+
+   extern float hpi_zero_coefsTbl[];
+   extern float hpi_pole_coefsTbl[];
+   extern float hpo_zero_coefsTbl[];
+   extern float hpo_pole_coefsTbl[];
+
+   /* low pass filters */
+   extern float lpFilt_coefsTbl[];
+
+   /* LPC analysis and quantization */
+
+   extern float lpc_winTbl[];
+   extern float lpc_asymwinTbl[];
+   extern float lpc_lagwinTbl[];
+   extern float lsfCbTbl[];
+   extern float lsfmeanTbl[];
+   extern int   dim_lsfCbTbl[];
+   extern int   size_lsfCbTbl[];
+   extern float lsf_weightTbl_30ms[];
+   extern float lsf_weightTbl_20ms[];
+
+   /* state quantization tables */
+
+   extern float state_sq3Tbl[];
+   extern float state_frgqTbl[];
+
+   /* gain quantization tables */
+
+   extern float gain_sq3Tbl[];
+   extern float gain_sq4Tbl[];
+   extern float gain_sq5Tbl[];
+
+   /* adaptive codebook definitions */
+
+   extern int search_rangeTbl[5][CB_NSTAGES];
+   extern int memLfTbl[];
+   extern int stMemLTbl;
+   extern float cbfiltersTbl[CB_FILTERLEN];
+
+   /* enhancer definitions */
+
+   extern float polyphaserTbl[];
+   extern float enh_plocsTbl[];
+
+
+
+
+Andersen, et al.              Experimental                     [Page 81]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #endif
+
+A.8.  constants.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       constants.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "iLBC_define.h"
+
+   /* ULP bit allocation */
+
+       /* 20 ms frame */
+
+   const iLBC_ULP_Inst_t ULP_20msTbl = {
+       /* LSF */
+       {   {6,0,0,0,0}, {7,0,0,0,0}, {7,0,0,0,0},
+           {0,0,0,0,0}, {0,0,0,0,0}, {0,0,0,0,0}},
+       /* Start state location, gain and samples */
+       {2,0,0,0,0},
+       {1,0,0,0,0},
+       {6,0,0,0,0},
+       {0,1,2,0,0},
+       /* extra CB index and extra CB gain */
+       {{6,0,1,0,0}, {0,0,7,0,0}, {0,0,7,0,0}},
+       {{2,0,3,0,0}, {1,1,2,0,0}, {0,0,3,0,0}},
+       /* CB index and CB gain */
+       {   {{7,0,1,0,0}, {0,0,7,0,0}, {0,0,7,0,0}},
+           {{0,0,8,0,0}, {0,0,8,0,0}, {0,0,8,0,0}},
+           {{0,0,0,0,0}, {0,0,0,0,0}, {0,0,0,0,0}},
+           {{0,0,0,0,0}, {0,0,0,0,0}, {0,0,0,0,0}}},
+       {   {{1,2,2,0,0}, {1,1,2,0,0}, {0,0,3,0,0}},
+           {{1,1,3,0,0}, {0,2,2,0,0}, {0,0,3,0,0}},
+           {{0,0,0,0,0}, {0,0,0,0,0}, {0,0,0,0,0}},
+           {{0,0,0,0,0}, {0,0,0,0,0}, {0,0,0,0,0}}}
+   };
+
+       /* 30 ms frame */
+
+   const iLBC_ULP_Inst_t ULP_30msTbl = {
+       /* LSF */
+
+
+
+Andersen, et al.              Experimental                     [Page 82]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       {   {6,0,0,0,0}, {7,0,0,0,0}, {7,0,0,0,0},
+           {6,0,0,0,0}, {7,0,0,0,0}, {7,0,0,0,0}},
+       /* Start state location, gain and samples */
+       {3,0,0,0,0},
+       {1,0,0,0,0},
+       {6,0,0,0,0},
+       {0,1,2,0,0},
+       /* extra CB index and extra CB gain */
+       {{4,2,1,0,0}, {0,0,7,0,0}, {0,0,7,0,0}},
+       {{1,1,3,0,0}, {1,1,2,0,0}, {0,0,3,0,0}},
+       /* CB index and CB gain */
+       {   {{6,1,1,0,0}, {0,0,7,0,0}, {0,0,7,0,0}},
+           {{0,7,1,0,0}, {0,0,8,0,0}, {0,0,8,0,0}},
+           {{0,7,1,0,0}, {0,0,8,0,0}, {0,0,8,0,0}},
+           {{0,7,1,0,0}, {0,0,8,0,0}, {0,0,8,0,0}}},
+       {   {{1,2,2,0,0}, {1,2,1,0,0}, {0,0,3,0,0}},
+           {{0,2,3,0,0}, {0,2,2,0,0}, {0,0,3,0,0}},
+           {{0,1,4,0,0}, {0,1,3,0,0}, {0,0,3,0,0}},
+           {{0,1,4,0,0}, {0,1,3,0,0}, {0,0,3,0,0}}}
+   };
+
+   /* HP Filters */
+
+   float hpi_zero_coefsTbl[3] = {
+       (float)0.92727436, (float)-1.8544941, (float)0.92727436
+   };
+   float hpi_pole_coefsTbl[3] = {
+       (float)1.0, (float)-1.9059465, (float)0.9114024
+   };
+   float hpo_zero_coefsTbl[3] = {
+       (float)0.93980581, (float)-1.8795834, (float)0.93980581
+   };
+   float hpo_pole_coefsTbl[3] = {
+       (float)1.0, (float)-1.9330735, (float)0.93589199
+   };
+
+   /* LP Filter */
+
+   float lpFilt_coefsTbl[FILTERORDER_DS]={
+       (float)-0.066650, (float)0.125000, (float)0.316650,
+       (float)0.414063, (float)0.316650,
+       (float)0.125000, (float)-0.066650
+   };
+
+   /* State quantization tables */
+
+   float state_sq3Tbl[8] = {
+       (float)-3.719849, (float)-2.177490, (float)-1.130005,
+
+
+
+Andersen, et al.              Experimental                     [Page 83]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       (float)-0.309692, (float)0.444214, (float)1.329712,
+       (float)2.436279, (float)3.983887
+   };
+
+   float state_frgqTbl[64] = {
+       (float)1.000085, (float)1.071695, (float)1.140395,
+       (float)1.206868, (float)1.277188, (float)1.351503,
+       (float)1.429380, (float)1.500727, (float)1.569049,
+       (float)1.639599, (float)1.707071, (float)1.781531,
+       (float)1.840799, (float)1.901550, (float)1.956695,
+       (float)2.006750, (float)2.055474, (float)2.102787,
+       (float)2.142819, (float)2.183592, (float)2.217962,
+       (float)2.257177, (float)2.295739, (float)2.332967,
+       (float)2.369248, (float)2.402792, (float)2.435080,
+       (float)2.468598, (float)2.503394, (float)2.539284,
+       (float)2.572944, (float)2.605036, (float)2.636331,
+       (float)2.668939, (float)2.698780, (float)2.729101,
+       (float)2.759786, (float)2.789834, (float)2.818679,
+       (float)2.848074, (float)2.877470, (float)2.906899,
+       (float)2.936655, (float)2.967804, (float)3.000115,
+       (float)3.033367, (float)3.066355, (float)3.104231,
+       (float)3.141499, (float)3.183012, (float)3.222952,
+       (float)3.265433, (float)3.308441, (float)3.350823,
+       (float)3.395275, (float)3.442793, (float)3.490801,
+       (float)3.542514, (float)3.604064, (float)3.666050,
+       (float)3.740994, (float)3.830749, (float)3.938770,
+       (float)4.101764
+   };
+
+   /* CB tables */
+
+   int search_rangeTbl[5][CB_NSTAGES]={{58,58,58}, {108,44,44},
+               {108,108,108}, {108,108,108}, {108,108,108}};
+   int stMemLTbl=85;
+   int memLfTbl[NASUB_MAX]={147,147,147,147};
+
+   /* expansion filter(s) */
+
+   float cbfiltersTbl[CB_FILTERLEN]={
+       (float)-0.034180, (float)0.108887, (float)-0.184326,
+       (float)0.806152,  (float)0.713379, (float)-0.144043,
+       (float)0.083740,  (float)-0.033691
+   };
+
+   /* Gain Quantization */
+
+   float gain_sq3Tbl[8]={
+       (float)-1.000000,  (float)-0.659973,  (float)-0.330017,
+
+
+
+Andersen, et al.              Experimental                     [Page 84]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       (float)0.000000, (float)0.250000, (float)0.500000,
+       (float)0.750000, (float)1.00000};
+
+   float gain_sq4Tbl[16]={
+       (float)-1.049988, (float)-0.900024, (float)-0.750000,
+       (float)-0.599976, (float)-0.450012, (float)-0.299988,
+       (float)-0.150024, (float)0.000000, (float)0.150024,
+       (float)0.299988, (float)0.450012, (float)0.599976,
+       (float)0.750000, (float)0.900024, (float)1.049988,
+       (float)1.200012};
+
+   float gain_sq5Tbl[32]={
+       (float)0.037476, (float)0.075012, (float)0.112488,
+       (float)0.150024, (float)0.187500, (float)0.224976,
+       (float)0.262512, (float)0.299988, (float)0.337524,
+       (float)0.375000, (float)0.412476, (float)0.450012,
+       (float)0.487488, (float)0.525024, (float)0.562500,
+       (float)0.599976, (float)0.637512, (float)0.674988,
+       (float)0.712524, (float)0.750000, (float)0.787476,
+       (float)0.825012, (float)0.862488, (float)0.900024,
+       (float)0.937500, (float)0.974976, (float)1.012512,
+       (float)1.049988, (float)1.087524, (float)1.125000,
+       (float)1.162476, (float)1.200012};
+
+   /* Enhancer - Upsamling a factor 4 (ENH_UPS0 = 4) */
+   float polyphaserTbl[ENH_UPS0*(2*ENH_FL0+1)]={
+       (float)0.000000, (float)0.000000, (float)0.000000,
+   (float)1.000000,
+           (float)0.000000, (float)0.000000, (float)0.000000,
+       (float)0.015625, (float)-0.076904, (float)0.288330,
+   (float)0.862061,
+           (float)-0.106445, (float)0.018799, (float)-0.015625,
+       (float)0.023682, (float)-0.124268, (float)0.601563,
+   (float)0.601563,
+           (float)-0.124268, (float)0.023682, (float)-0.023682,
+       (float)0.018799, (float)-0.106445, (float)0.862061,
+   (float)0.288330,
+           (float)-0.076904, (float)0.015625, (float)-0.018799};
+
+   float enh_plocsTbl[ENH_NBLOCKS_TOT] = {(float)40.0, (float)120.0,
+               (float)200.0, (float)280.0, (float)360.0,
+               (float)440.0, (float)520.0, (float)600.0};
+
+   /* LPC analysis and quantization */
+
+   int dim_lsfCbTbl[LSF_NSPLIT] = {3, 3, 4};
+   int size_lsfCbTbl[LSF_NSPLIT] = {64,128,128};
+
+
+
+
+Andersen, et al.              Experimental                     [Page 85]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   float lsfmeanTbl[LPC_FILTERORDER] = {
+       (float)0.281738, (float)0.445801, (float)0.663330,
+       (float)0.962524, (float)1.251831, (float)1.533081,
+       (float)1.850586, (float)2.137817, (float)2.481445,
+       (float)2.777344};
+
+   float lsf_weightTbl_30ms[6] = {(float)(1.0/2.0), (float)1.0,
+   (float)(2.0/3.0),
+       (float)(1.0/3.0), (float)0.0, (float)0.0};
+
+   float lsf_weightTbl_20ms[4] = {(float)(3.0/4.0), (float)(2.0/4.0),
+       (float)(1.0/4.0), (float)(0.0)};
+
+   /* Hanning LPC window */
+   float lpc_winTbl[BLOCKL_MAX]={
+       (float)0.000183, (float)0.000671, (float)0.001526,
+       (float)0.002716, (float)0.004242, (float)0.006104,
+       (float)0.008301, (float)0.010834, (float)0.013702,
+       (float)0.016907, (float)0.020416, (float)0.024261,
+       (float)0.028442, (float)0.032928, (float)0.037750,
+       (float)0.042877, (float)0.048309, (float)0.054047,
+       (float)0.060089, (float)0.066437, (float)0.073090,
+       (float)0.080017, (float)0.087219, (float)0.094727,
+       (float)0.102509, (float)0.110535, (float)0.118835,
+       (float)0.127411, (float)0.136230, (float)0.145294,
+       (float)0.154602, (float)0.164154, (float)0.173920,
+       (float)0.183899, (float)0.194122, (float)0.204529,
+       (float)0.215149, (float)0.225952, (float)0.236938,
+       (float)0.248108, (float)0.259460, (float)0.270966,
+       (float)0.282654, (float)0.294464, (float)0.306396,
+       (float)0.318481, (float)0.330688, (float)0.343018,
+       (float)0.355438, (float)0.367981, (float)0.380585,
+       (float)0.393280, (float)0.406067, (float)0.418884,
+       (float)0.431763, (float)0.444702, (float)0.457672,
+       (float)0.470673, (float)0.483704, (float)0.496735,
+       (float)0.509766, (float)0.522797, (float)0.535828,
+       (float)0.548798, (float)0.561768, (float)0.574677,
+       (float)0.587524, (float)0.600342, (float)0.613068,
+       (float)0.625732, (float)0.638306, (float)0.650787,
+       (float)0.663147, (float)0.675415, (float)0.687561,
+       (float)0.699585, (float)0.711487, (float)0.723206,
+       (float)0.734802, (float)0.746216, (float)0.757477,
+       (float)0.768585, (float)0.779480, (float)0.790192,
+       (float)0.800720, (float)0.811005, (float)0.821106,
+       (float)0.830994, (float)0.840668, (float)0.850067,
+       (float)0.859253, (float)0.868225, (float)0.876892,
+       (float)0.885345, (float)0.893524, (float)0.901428,
+       (float)0.909058, (float)0.916412, (float)0.923492,
+
+
+
+Andersen, et al.              Experimental                     [Page 86]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       (float)0.930267, (float)0.936768, (float)0.942963,
+       (float)0.948853, (float)0.954437, (float)0.959717,
+       (float)0.964691, (float)0.969360, (float)0.973694,
+       (float)0.977692, (float)0.981384, (float)0.984741,
+       (float)0.987762, (float)0.990479, (float)0.992828,
+       (float)0.994873, (float)0.996552, (float)0.997925,
+       (float)0.998932, (float)0.999603, (float)0.999969,
+       (float)0.999969, (float)0.999603, (float)0.998932,
+       (float)0.997925, (float)0.996552, (float)0.994873,
+       (float)0.992828, (float)0.990479, (float)0.987762,
+       (float)0.984741, (float)0.981384, (float)0.977692,
+       (float)0.973694, (float)0.969360, (float)0.964691,
+       (float)0.959717, (float)0.954437, (float)0.948853,
+       (float)0.942963, (float)0.936768, (float)0.930267,
+       (float)0.923492, (float)0.916412, (float)0.909058,
+       (float)0.901428, (float)0.893524, (float)0.885345,
+       (float)0.876892, (float)0.868225, (float)0.859253,
+       (float)0.850067, (float)0.840668, (float)0.830994,
+       (float)0.821106, (float)0.811005, (float)0.800720,
+       (float)0.790192, (float)0.779480, (float)0.768585,
+       (float)0.757477, (float)0.746216, (float)0.734802,
+       (float)0.723206, (float)0.711487, (float)0.699585,
+       (float)0.687561, (float)0.675415, (float)0.663147,
+       (float)0.650787, (float)0.638306, (float)0.625732,
+       (float)0.613068, (float)0.600342, (float)0.587524,
+       (float)0.574677, (float)0.561768, (float)0.548798,
+       (float)0.535828, (float)0.522797, (float)0.509766,
+       (float)0.496735, (float)0.483704, (float)0.470673,
+       (float)0.457672, (float)0.444702, (float)0.431763,
+       (float)0.418884, (float)0.406067, (float)0.393280,
+       (float)0.380585, (float)0.367981, (float)0.355438,
+       (float)0.343018, (float)0.330688, (float)0.318481,
+       (float)0.306396, (float)0.294464, (float)0.282654,
+       (float)0.270966, (float)0.259460, (float)0.248108,
+       (float)0.236938, (float)0.225952, (float)0.215149,
+       (float)0.204529, (float)0.194122, (float)0.183899,
+       (float)0.173920, (float)0.164154, (float)0.154602,
+       (float)0.145294, (float)0.136230, (float)0.127411,
+       (float)0.118835, (float)0.110535, (float)0.102509,
+       (float)0.094727, (float)0.087219, (float)0.080017,
+       (float)0.073090, (float)0.066437, (float)0.060089,
+       (float)0.054047, (float)0.048309, (float)0.042877,
+       (float)0.037750, (float)0.032928, (float)0.028442,
+       (float)0.024261, (float)0.020416, (float)0.016907,
+       (float)0.013702, (float)0.010834, (float)0.008301,
+       (float)0.006104, (float)0.004242, (float)0.002716,
+       (float)0.001526, (float)0.000671, (float)0.000183
+   };
+
+
+
+Andersen, et al.              Experimental                     [Page 87]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   /* Asymmetric LPC window */
+   float lpc_asymwinTbl[BLOCKL_MAX]={
+       (float)0.000061, (float)0.000214, (float)0.000458,
+       (float)0.000824, (float)0.001282, (float)0.001831,
+       (float)0.002472, (float)0.003235, (float)0.004120,
+       (float)0.005066, (float)0.006134, (float)0.007294,
+       (float)0.008545, (float)0.009918, (float)0.011383,
+       (float)0.012939, (float)0.014587, (float)0.016357,
+       (float)0.018219, (float)0.020172, (float)0.022217,
+       (float)0.024353, (float)0.026611, (float)0.028961,
+       (float)0.031372, (float)0.033905, (float)0.036530,
+       (float)0.039276, (float)0.042084, (float)0.044983,
+       (float)0.047974, (float)0.051086, (float)0.054260,
+       (float)0.057526, (float)0.060883, (float)0.064331,
+       (float)0.067871, (float)0.071503, (float)0.075226,
+       (float)0.079010, (float)0.082916, (float)0.086884,
+       (float)0.090942, (float)0.095062, (float)0.099304,
+       (float)0.103607, (float)0.107971, (float)0.112427,
+       (float)0.116974, (float)0.121582, (float)0.126282,
+       (float)0.131073, (float)0.135895, (float)0.140839,
+       (float)0.145813, (float)0.150879, (float)0.156006,
+       (float)0.161224, (float)0.166504, (float)0.171844,
+       (float)0.177246, (float)0.182709, (float)0.188263,
+       (float)0.193848, (float)0.199524, (float)0.205231,
+       (float)0.211029, (float)0.216858, (float)0.222778,
+       (float)0.228729, (float)0.234741, (float)0.240814,
+       (float)0.246918, (float)0.253082, (float)0.259308,
+       (float)0.265564, (float)0.271881, (float)0.278259,
+       (float)0.284668, (float)0.291107, (float)0.297607,
+       (float)0.304138, (float)0.310730, (float)0.317322,
+       (float)0.323975, (float)0.330658, (float)0.337372,
+       (float)0.344147, (float)0.350922, (float)0.357727,
+       (float)0.364594, (float)0.371460, (float)0.378357,
+       (float)0.385284, (float)0.392212, (float)0.399170,
+       (float)0.406158, (float)0.413177, (float)0.420197,
+       (float)0.427246, (float)0.434296, (float)0.441376,
+       (float)0.448456, (float)0.455536, (float)0.462646,
+       (float)0.469757, (float)0.476868, (float)0.483978,
+       (float)0.491089, (float)0.498230, (float)0.505341,
+       (float)0.512451, (float)0.519592, (float)0.526703,
+       (float)0.533813, (float)0.540924, (float)0.548004,
+       (float)0.555084, (float)0.562164, (float)0.569244,
+       (float)0.576294, (float)0.583313, (float)0.590332,
+       (float)0.597321, (float)0.604309, (float)0.611267,
+       (float)0.618195, (float)0.625092, (float)0.631989,
+       (float)0.638855, (float)0.645660, (float)0.652466,
+       (float)0.659241, (float)0.665985, (float)0.672668,
+       (float)0.679352, (float)0.685974, (float)0.692566,
+
+
+
+Andersen, et al.              Experimental                     [Page 88]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       (float)0.699127, (float)0.705658, (float)0.712128,
+       (float)0.718536, (float)0.724945, (float)0.731262,
+       (float)0.737549, (float)0.743805, (float)0.750000,
+       (float)0.756134, (float)0.762238, (float)0.768280,
+       (float)0.774261, (float)0.780182, (float)0.786072,
+       (float)0.791870, (float)0.797638, (float)0.803314,
+       (float)0.808960, (float)0.814514, (float)0.820038,
+       (float)0.825470, (float)0.830841, (float)0.836151,
+       (float)0.841400, (float)0.846558, (float)0.851654,
+       (float)0.856689, (float)0.861633, (float)0.866516,
+       (float)0.871338, (float)0.876068, (float)0.880737,
+       (float)0.885315, (float)0.889801, (float)0.894226,
+       (float)0.898560, (float)0.902832, (float)0.907013,
+       (float)0.911102, (float)0.915100, (float)0.919037,
+       (float)0.922882, (float)0.926636, (float)0.930328,
+       (float)0.933899, (float)0.937408, (float)0.940796,
+       (float)0.944122, (float)0.947357, (float)0.950470,
+       (float)0.953522, (float)0.956482, (float)0.959351,
+       (float)0.962097, (float)0.964783, (float)0.967377,
+       (float)0.969849, (float)0.972229, (float)0.974518,
+       (float)0.976715, (float)0.978821, (float)0.980835,
+       (float)0.982727, (float)0.984528, (float)0.986237,
+       (float)0.987854, (float)0.989380, (float)0.990784,
+       (float)0.992096, (float)0.993317, (float)0.994415,
+       (float)0.995422, (float)0.996338, (float)0.997162,
+       (float)0.997864, (float)0.998474, (float)0.998962,
+       (float)0.999390, (float)0.999695, (float)0.999878,
+       (float)0.999969, (float)0.999969, (float)0.996918,
+       (float)0.987701, (float)0.972382, (float)0.951050,
+       (float)0.923889, (float)0.891022, (float)0.852631,
+       (float)0.809021, (float)0.760406, (float)0.707092,
+       (float)0.649445, (float)0.587799, (float)0.522491,
+       (float)0.453979, (float)0.382690, (float)0.309021,
+       (float)0.233459, (float)0.156433, (float)0.078461
+   };
+
+   /* Lag window for LPC */
+   float lpc_lagwinTbl[LPC_FILTERORDER + 1]={
+       (float)1.000100, (float)0.998890, (float)0.995569,
+           (float)0.990057, (float)0.982392,
+       (float)0.972623, (float)0.960816, (float)0.947047,
+           (float)0.931405, (float)0.913989, (float)0.894909};
+
+   /* LSF quantization*/
+   float lsfCbTbl[64 * 3 + 128 * 3 + 128 * 4] = {
+   (float)0.155396, (float)0.273193, (float)0.451172,
+   (float)0.390503, (float)0.648071, (float)1.002075,
+   (float)0.440186, (float)0.692261, (float)0.955688,
+
+
+
+Andersen, et al.              Experimental                     [Page 89]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)0.343628, (float)0.642334, (float)1.071533,
+   (float)0.318359, (float)0.491577, (float)0.670532,
+   (float)0.193115, (float)0.375488, (float)0.725708,
+   (float)0.364136, (float)0.510376, (float)0.658691,
+   (float)0.297485, (float)0.527588, (float)0.842529,
+   (float)0.227173, (float)0.365967, (float)0.563110,
+   (float)0.244995, (float)0.396729, (float)0.636475,
+   (float)0.169434, (float)0.300171, (float)0.520264,
+   (float)0.312866, (float)0.464478, (float)0.643188,
+   (float)0.248535, (float)0.429932, (float)0.626099,
+   (float)0.236206, (float)0.491333, (float)0.817139,
+   (float)0.334961, (float)0.625122, (float)0.895752,
+   (float)0.343018, (float)0.518555, (float)0.698608,
+   (float)0.372803, (float)0.659790, (float)0.945435,
+   (float)0.176880, (float)0.316528, (float)0.581421,
+   (float)0.416382, (float)0.625977, (float)0.805176,
+   (float)0.303223, (float)0.568726, (float)0.915039,
+   (float)0.203613, (float)0.351440, (float)0.588135,
+   (float)0.221191, (float)0.375000, (float)0.614746,
+   (float)0.199951, (float)0.323364, (float)0.476074,
+   (float)0.300781, (float)0.433350, (float)0.566895,
+   (float)0.226196, (float)0.354004, (float)0.507568,
+   (float)0.300049, (float)0.508179, (float)0.711670,
+   (float)0.312012, (float)0.492676, (float)0.763428,
+   (float)0.329956, (float)0.541016, (float)0.795776,
+   (float)0.373779, (float)0.604614, (float)0.928833,
+   (float)0.210571, (float)0.452026, (float)0.755249,
+   (float)0.271118, (float)0.473267, (float)0.662476,
+   (float)0.285522, (float)0.436890, (float)0.634399,
+   (float)0.246704, (float)0.565552, (float)0.859009,
+   (float)0.270508, (float)0.406250, (float)0.553589,
+   (float)0.361450, (float)0.578491, (float)0.813843,
+   (float)0.342651, (float)0.482788, (float)0.622437,
+   (float)0.340332, (float)0.549438, (float)0.743164,
+   (float)0.200439, (float)0.336304, (float)0.540894,
+   (float)0.407837, (float)0.644775, (float)0.895142,
+   (float)0.294678, (float)0.454834, (float)0.699097,
+   (float)0.193115, (float)0.344482, (float)0.643188,
+   (float)0.275757, (float)0.420776, (float)0.598755,
+   (float)0.380493, (float)0.608643, (float)0.861084,
+   (float)0.222778, (float)0.426147, (float)0.676514,
+   (float)0.407471, (float)0.700195, (float)1.053101,
+   (float)0.218384, (float)0.377197, (float)0.669922,
+   (float)0.313232, (float)0.454102, (float)0.600952,
+   (float)0.347412, (float)0.571533, (float)0.874146,
+   (float)0.238037, (float)0.405396, (float)0.729492,
+   (float)0.223877, (float)0.412964, (float)0.822021,
+   (float)0.395264, (float)0.582153, (float)0.743896,
+
+
+
+Andersen, et al.              Experimental                     [Page 90]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)0.247925, (float)0.485596, (float)0.720581,
+   (float)0.229126, (float)0.496582, (float)0.907715,
+   (float)0.260132, (float)0.566895, (float)1.012695,
+   (float)0.337402, (float)0.611572, (float)0.978149,
+   (float)0.267822, (float)0.447632, (float)0.769287,
+   (float)0.250610, (float)0.381714, (float)0.530029,
+   (float)0.430054, (float)0.805054, (float)1.221924,
+   (float)0.382568, (float)0.544067, (float)0.701660,
+   (float)0.383545, (float)0.710327, (float)1.149170,
+   (float)0.271362, (float)0.529053, (float)0.775513,
+   (float)0.246826, (float)0.393555, (float)0.588623,
+   (float)0.266846, (float)0.422119, (float)0.676758,
+   (float)0.311523, (float)0.580688, (float)0.838623,
+   (float)1.331177, (float)1.576782, (float)1.779541,
+   (float)1.160034, (float)1.401978, (float)1.768188,
+   (float)1.161865, (float)1.525146, (float)1.715332,
+   (float)0.759521, (float)0.913940, (float)1.119873,
+   (float)0.947144, (float)1.121338, (float)1.282471,
+   (float)1.015015, (float)1.557007, (float)1.804932,
+   (float)1.172974, (float)1.402100, (float)1.692627,
+   (float)1.087524, (float)1.474243, (float)1.665405,
+   (float)0.899536, (float)1.105225, (float)1.406250,
+   (float)1.148438, (float)1.484741, (float)1.796265,
+   (float)0.785645, (float)1.209839, (float)1.567749,
+   (float)0.867798, (float)1.166504, (float)1.450684,
+   (float)0.922485, (float)1.229858, (float)1.420898,
+   (float)0.791260, (float)1.123291, (float)1.409546,
+   (float)0.788940, (float)0.966064, (float)1.340332,
+   (float)1.051147, (float)1.272827, (float)1.556641,
+   (float)0.866821, (float)1.181152, (float)1.538818,
+   (float)0.906738, (float)1.373535, (float)1.607910,
+   (float)1.244751, (float)1.581421, (float)1.933838,
+   (float)0.913940, (float)1.337280, (float)1.539673,
+   (float)0.680542, (float)0.959229, (float)1.662720,
+   (float)0.887207, (float)1.430542, (float)1.800781,
+   (float)0.912598, (float)1.433594, (float)1.683960,
+   (float)0.860474, (float)1.060303, (float)1.455322,
+   (float)1.005127, (float)1.381104, (float)1.706909,
+   (float)0.800781, (float)1.363892, (float)1.829102,
+   (float)0.781860, (float)1.124390, (float)1.505981,
+   (float)1.003662, (float)1.471436, (float)1.684692,
+   (float)0.981323, (float)1.309570, (float)1.618042,
+   (float)1.228760, (float)1.554321, (float)1.756470,
+   (float)0.734375, (float)0.895752, (float)1.225586,
+   (float)0.841797, (float)1.055664, (float)1.249268,
+   (float)0.920166, (float)1.119385, (float)1.486206,
+   (float)0.894409, (float)1.539063, (float)1.828979,
+   (float)1.283691, (float)1.543335, (float)1.858276,
+
+
+
+Andersen, et al.              Experimental                     [Page 91]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)0.676025, (float)0.933105, (float)1.490845,
+   (float)0.821289, (float)1.491821, (float)1.739868,
+   (float)0.923218, (float)1.144653, (float)1.580566,
+   (float)1.057251, (float)1.345581, (float)1.635864,
+   (float)0.888672, (float)1.074951, (float)1.353149,
+   (float)0.942749, (float)1.195435, (float)1.505493,
+   (float)1.492310, (float)1.788086, (float)2.039673,
+   (float)1.070313, (float)1.634399, (float)1.860962,
+   (float)1.253296, (float)1.488892, (float)1.686035,
+   (float)0.647095, (float)0.864014, (float)1.401855,
+   (float)0.866699, (float)1.254883, (float)1.453369,
+   (float)1.063965, (float)1.532593, (float)1.731323,
+   (float)1.167847, (float)1.521484, (float)1.884033,
+   (float)0.956055, (float)1.502075, (float)1.745605,
+   (float)0.928711, (float)1.288574, (float)1.479614,
+   (float)1.088013, (float)1.380737, (float)1.570801,
+   (float)0.905029, (float)1.186768, (float)1.371948,
+   (float)1.057861, (float)1.421021, (float)1.617432,
+   (float)1.108276, (float)1.312500, (float)1.501465,
+   (float)0.979492, (float)1.416992, (float)1.624268,
+   (float)1.276001, (float)1.661011, (float)2.007935,
+   (float)0.993042, (float)1.168579, (float)1.331665,
+   (float)0.778198, (float)0.944946, (float)1.235962,
+   (float)1.223755, (float)1.491333, (float)1.815674,
+   (float)0.852661, (float)1.350464, (float)1.722290,
+   (float)1.134766, (float)1.593140, (float)1.787354,
+   (float)1.051392, (float)1.339722, (float)1.531006,
+   (float)0.803589, (float)1.271240, (float)1.652100,
+   (float)0.755737, (float)1.143555, (float)1.639404,
+   (float)0.700928, (float)0.837280, (float)1.130371,
+   (float)0.942749, (float)1.197876, (float)1.669800,
+   (float)0.993286, (float)1.378296, (float)1.566528,
+   (float)0.801025, (float)1.095337, (float)1.298950,
+   (float)0.739990, (float)1.032959, (float)1.383667,
+   (float)0.845703, (float)1.072266, (float)1.543823,
+   (float)0.915649, (float)1.072266, (float)1.224487,
+   (float)1.021973, (float)1.226196, (float)1.481323,
+   (float)0.999878, (float)1.204102, (float)1.555908,
+   (float)0.722290, (float)0.913940, (float)1.340210,
+   (float)0.673340, (float)0.835938, (float)1.259521,
+   (float)0.832397, (float)1.208374, (float)1.394165,
+   (float)0.962158, (float)1.576172, (float)1.912842,
+   (float)1.166748, (float)1.370850, (float)1.556763,
+   (float)0.946289, (float)1.138550, (float)1.400391,
+   (float)1.035034, (float)1.218262, (float)1.386475,
+   (float)1.393799, (float)1.717773, (float)2.000244,
+   (float)0.972656, (float)1.260986, (float)1.760620,
+   (float)1.028198, (float)1.288452, (float)1.484619,
+
+
+
+Andersen, et al.              Experimental                     [Page 92]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)0.773560, (float)1.258057, (float)1.756714,
+   (float)1.080322, (float)1.328003, (float)1.742676,
+   (float)0.823975, (float)1.450806, (float)1.917725,
+   (float)0.859009, (float)1.016602, (float)1.191895,
+   (float)0.843994, (float)1.131104, (float)1.645020,
+   (float)1.189697, (float)1.702759, (float)1.894409,
+   (float)1.346680, (float)1.763184, (float)2.066040,
+   (float)0.980469, (float)1.253784, (float)1.441650,
+   (float)1.338135, (float)1.641968, (float)1.932739,
+   (float)1.223267, (float)1.424194, (float)1.626465,
+   (float)0.765747, (float)1.004150, (float)1.579102,
+   (float)1.042847, (float)1.269165, (float)1.647461,
+   (float)0.968750, (float)1.257568, (float)1.555786,
+   (float)0.826294, (float)0.993408, (float)1.275146,
+   (float)0.742310, (float)0.950439, (float)1.430542,
+   (float)1.054321, (float)1.439819, (float)1.828003,
+   (float)1.072998, (float)1.261719, (float)1.441895,
+   (float)0.859375, (float)1.036377, (float)1.314819,
+   (float)0.895752, (float)1.267212, (float)1.605591,
+   (float)0.805420, (float)0.962891, (float)1.142334,
+   (float)0.795654, (float)1.005493, (float)1.468506,
+   (float)1.105347, (float)1.313843, (float)1.584839,
+   (float)0.792236, (float)1.221802, (float)1.465698,
+   (float)1.170532, (float)1.467651, (float)1.664063,
+   (float)0.838257, (float)1.153198, (float)1.342163,
+   (float)0.968018, (float)1.198242, (float)1.391235,
+   (float)1.250122, (float)1.623535, (float)1.823608,
+   (float)0.711670, (float)1.058350, (float)1.512085,
+   (float)1.204834, (float)1.454468, (float)1.739136,
+   (float)1.137451, (float)1.421753, (float)1.620117,
+   (float)0.820435, (float)1.322754, (float)1.578247,
+   (float)0.798706, (float)1.005005, (float)1.213867,
+   (float)0.980713, (float)1.324951, (float)1.512939,
+   (float)1.112305, (float)1.438843, (float)1.735596,
+   (float)1.135498, (float)1.356689, (float)1.635742,
+   (float)1.101318, (float)1.387451, (float)1.686523,
+   (float)0.849854, (float)1.276978, (float)1.523438,
+   (float)1.377930, (float)1.627563, (float)1.858154,
+   (float)0.884888, (float)1.095459, (float)1.287476,
+   (float)1.289795, (float)1.505859, (float)1.756592,
+   (float)0.817505, (float)1.384155, (float)1.650513,
+   (float)1.446655, (float)1.702148, (float)1.931885,
+   (float)0.835815, (float)1.023071, (float)1.385376,
+   (float)0.916626, (float)1.139038, (float)1.335327,
+   (float)0.980103, (float)1.174072, (float)1.453735,
+   (float)1.705688, (float)2.153809, (float)2.398315, (float)2.743408,
+   (float)1.797119, (float)2.016846, (float)2.445679, (float)2.701904,
+   (float)1.990356, (float)2.219116, (float)2.576416, (float)2.813477,
+
+
+
+Andersen, et al.              Experimental                     [Page 93]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)1.849365, (float)2.190918, (float)2.611572, (float)2.835083,
+   (float)1.657959, (float)1.854370, (float)2.159058, (float)2.726196,
+   (float)1.437744, (float)1.897705, (float)2.253174, (float)2.655396,
+   (float)2.028687, (float)2.247314, (float)2.542358, (float)2.875854,
+   (float)1.736938, (float)1.922119, (float)2.185913, (float)2.743408,
+   (float)1.521606, (float)1.870972, (float)2.526855, (float)2.786987,
+   (float)1.841431, (float)2.050659, (float)2.463623, (float)2.857666,
+   (float)1.590088, (float)2.067261, (float)2.427979, (float)2.794434,
+   (float)1.746826, (float)2.057373, (float)2.320190, (float)2.800781,
+   (float)1.734619, (float)1.940552, (float)2.306030, (float)2.826416,
+   (float)1.786255, (float)2.204468, (float)2.457520, (float)2.795288,
+   (float)1.861084, (float)2.170532, (float)2.414551, (float)2.763672,
+   (float)2.001465, (float)2.307617, (float)2.552734, (float)2.811890,
+   (float)1.784424, (float)2.124146, (float)2.381592, (float)2.645508,
+   (float)1.888794, (float)2.135864, (float)2.418579, (float)2.861206,
+   (float)2.301147, (float)2.531250, (float)2.724976, (float)2.913086,
+   (float)1.837769, (float)2.051270, (float)2.261963, (float)2.553223,
+   (float)2.012939, (float)2.221191, (float)2.440186, (float)2.678101,
+   (float)1.429565, (float)1.858276, (float)2.582275, (float)2.845703,
+   (float)1.622803, (float)1.897705, (float)2.367310, (float)2.621094,
+   (float)1.581543, (float)1.960449, (float)2.515869, (float)2.736450,
+   (float)1.419434, (float)1.933960, (float)2.394653, (float)2.746704,
+   (float)1.721924, (float)2.059570, (float)2.421753, (float)2.769653,
+   (float)1.911011, (float)2.220703, (float)2.461060, (float)2.740723,
+   (float)1.581177, (float)1.860840, (float)2.516968, (float)2.874634,
+   (float)1.870361, (float)2.098755, (float)2.432373, (float)2.656494,
+   (float)2.059692, (float)2.279785, (float)2.495605, (float)2.729370,
+   (float)1.815674, (float)2.181519, (float)2.451538, (float)2.680542,
+   (float)1.407959, (float)1.768311, (float)2.343018, (float)2.668091,
+   (float)2.168701, (float)2.394653, (float)2.604736, (float)2.829346,
+   (float)1.636230, (float)1.865723, (float)2.329102, (float)2.824219,
+   (float)1.878906, (float)2.139526, (float)2.376709, (float)2.679810,
+   (float)1.765381, (float)1.971802, (float)2.195435, (float)2.586914,
+   (float)2.164795, (float)2.410889, (float)2.673706, (float)2.903198,
+   (float)2.071899, (float)2.331055, (float)2.645874, (float)2.907104,
+   (float)2.026001, (float)2.311523, (float)2.594849, (float)2.863892,
+   (float)1.948975, (float)2.180786, (float)2.514893, (float)2.797852,
+   (float)1.881836, (float)2.130859, (float)2.478149, (float)2.804199,
+   (float)2.238159, (float)2.452759, (float)2.652832, (float)2.868286,
+   (float)1.897949, (float)2.101685, (float)2.524292, (float)2.880127,
+   (float)1.856445, (float)2.074585, (float)2.541016, (float)2.791748,
+   (float)1.695557, (float)2.199097, (float)2.506226, (float)2.742676,
+   (float)1.612671, (float)1.877075, (float)2.435425, (float)2.732910,
+   (float)1.568848, (float)1.786499, (float)2.194580, (float)2.768555,
+   (float)1.953369, (float)2.164551, (float)2.486938, (float)2.874023,
+   (float)1.388306, (float)1.725342, (float)2.384521, (float)2.771851,
+   (float)2.115356, (float)2.337769, (float)2.592896, (float)2.864014,
+   (float)1.905762, (float)2.111328, (float)2.363525, (float)2.789307,
+
+
+
+Andersen, et al.              Experimental                     [Page 94]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)1.882568, (float)2.332031, (float)2.598267, (float)2.827637,
+   (float)1.683594, (float)2.088745, (float)2.361938, (float)2.608643,
+   (float)1.874023, (float)2.182129, (float)2.536133, (float)2.766968,
+   (float)1.861938, (float)2.070435, (float)2.309692, (float)2.700562,
+   (float)1.722168, (float)2.107422, (float)2.477295, (float)2.837646,
+   (float)1.926880, (float)2.184692, (float)2.442627, (float)2.663818,
+   (float)2.123901, (float)2.337280, (float)2.553101, (float)2.777466,
+   (float)1.588135, (float)1.911499, (float)2.212769, (float)2.543945,
+   (float)2.053955, (float)2.370850, (float)2.712158, (float)2.939941,
+   (float)2.210449, (float)2.519653, (float)2.770386, (float)2.958618,
+   (float)2.199463, (float)2.474731, (float)2.718262, (float)2.919922,
+   (float)1.960083, (float)2.175415, (float)2.608032, (float)2.888794,
+   (float)1.953735, (float)2.185181, (float)2.428223, (float)2.809570,
+   (float)1.615234, (float)2.036499, (float)2.576538, (float)2.834595,
+   (float)1.621094, (float)2.028198, (float)2.431030, (float)2.664673,
+   (float)1.824951, (float)2.267456, (float)2.514526, (float)2.747925,
+   (float)1.994263, (float)2.229126, (float)2.475220, (float)2.833984,
+   (float)1.746338, (float)2.011353, (float)2.588257, (float)2.826904,
+   (float)1.562866, (float)2.135986, (float)2.471680, (float)2.687256,
+   (float)1.748901, (float)2.083496, (float)2.460938, (float)2.686279,
+   (float)1.758057, (float)2.131470, (float)2.636597, (float)2.891602,
+   (float)2.071289, (float)2.299072, (float)2.550781, (float)2.814331,
+   (float)1.839600, (float)2.094360, (float)2.496460, (float)2.723999,
+   (float)1.882202, (float)2.088257, (float)2.636841, (float)2.923096,
+   (float)1.957886, (float)2.153198, (float)2.384399, (float)2.615234,
+   (float)1.992920, (float)2.351196, (float)2.654419, (float)2.889771,
+   (float)2.012817, (float)2.262451, (float)2.643799, (float)2.903076,
+   (float)2.025635, (float)2.254761, (float)2.508423, (float)2.784058,
+   (float)2.316040, (float)2.589355, (float)2.794189, (float)2.963623,
+   (float)1.741211, (float)2.279541, (float)2.578491, (float)2.816284,
+   (float)1.845337, (float)2.055786, (float)2.348511, (float)2.822021,
+   (float)1.679932, (float)1.926514, (float)2.499756, (float)2.835693,
+   (float)1.722534, (float)1.946899, (float)2.448486, (float)2.728760,
+   (float)1.829834, (float)2.043213, (float)2.580444, (float)2.867676,
+   (float)1.676636, (float)2.071655, (float)2.322510, (float)2.704834,
+   (float)1.791504, (float)2.113525, (float)2.469727, (float)2.784058,
+   (float)1.977051, (float)2.215088, (float)2.497437, (float)2.726929,
+   (float)1.800171, (float)2.106689, (float)2.357788, (float)2.738892,
+   (float)1.827759, (float)2.170166, (float)2.525879, (float)2.852417,
+   (float)1.918335, (float)2.132813, (float)2.488403, (float)2.728149,
+   (float)1.916748, (float)2.225098, (float)2.542603, (float)2.857666,
+   (float)1.761230, (float)1.976074, (float)2.507446, (float)2.884521,
+   (float)2.053711, (float)2.367432, (float)2.608032, (float)2.837646,
+   (float)1.595337, (float)2.000977, (float)2.307129, (float)2.578247,
+   (float)1.470581, (float)2.031250, (float)2.375854, (float)2.647583,
+   (float)1.801392, (float)2.128052, (float)2.399780, (float)2.822876,
+   (float)1.853638, (float)2.066650, (float)2.429199, (float)2.751465,
+   (float)1.956299, (float)2.163696, (float)2.394775, (float)2.734253,
+
+
+
+Andersen, et al.              Experimental                     [Page 95]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   (float)1.963623, (float)2.275757, (float)2.585327, (float)2.865234,
+   (float)1.887451, (float)2.105469, (float)2.331787, (float)2.587402,
+   (float)2.120117, (float)2.443359, (float)2.733887, (float)2.941406,
+   (float)1.506348, (float)1.766968, (float)2.400513, (float)2.851807,
+   (float)1.664551, (float)1.981079, (float)2.375732, (float)2.774414,
+   (float)1.720703, (float)1.978882, (float)2.391479, (float)2.640991,
+   (float)1.483398, (float)1.814819, (float)2.434448, (float)2.722290,
+   (float)1.769043, (float)2.136597, (float)2.563721, (float)2.774414,
+   (float)1.810791, (float)2.049316, (float)2.373901, (float)2.613647,
+   (float)1.788330, (float)2.005981, (float)2.359131, (float)2.723145,
+   (float)1.785156, (float)1.993164, (float)2.399780, (float)2.832520,
+   (float)1.695313, (float)2.022949, (float)2.522583, (float)2.745117,
+   (float)1.584106, (float)1.965576, (float)2.299927, (float)2.715576,
+   (float)1.894897, (float)2.249878, (float)2.655884, (float)2.897705,
+   (float)1.720581, (float)1.995728, (float)2.299438, (float)2.557007,
+   (float)1.619385, (float)2.173950, (float)2.574219, (float)2.787964,
+   (float)1.883179, (float)2.220459, (float)2.474365, (float)2.825073,
+   (float)1.447632, (float)2.045044, (float)2.555542, (float)2.744873,
+   (float)1.502686, (float)2.156616, (float)2.653320, (float)2.846558,
+   (float)1.711548, (float)1.944092, (float)2.282959, (float)2.685791,
+   (float)1.499756, (float)1.867554, (float)2.341064, (float)2.578857,
+   (float)1.916870, (float)2.135132, (float)2.568237, (float)2.826050,
+   (float)1.498047, (float)1.711182, (float)2.223267, (float)2.755127,
+   (float)1.808716, (float)1.997559, (float)2.256470, (float)2.758545,
+   (float)2.088501, (float)2.402710, (float)2.667358, (float)2.890259,
+   (float)1.545044, (float)1.819214, (float)2.324097, (float)2.692993,
+   (float)1.796021, (float)2.012573, (float)2.505737, (float)2.784912,
+   (float)1.786499, (float)2.041748, (float)2.290405, (float)2.650757,
+   (float)1.938232, (float)2.264404, (float)2.529053, (float)2.796143
+   };
+
+A.9.  anaFilter.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       anaFilter.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_ANAFILTER_H
+   #define __iLBC_ANAFILTER_H
+
+   void anaFilter(
+
+
+
+Andersen, et al.              Experimental                     [Page 96]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *In,  /* (i) Signal to be filtered */
+       float *a,   /* (i) LP parameters */
+       int len,/* (i) Length of signal */
+       float *Out, /* (o) Filtered signal */
+       float *mem  /* (i/o) Filter state */
+   );
+
+   #endif
+
+A.10.  anaFilter.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       anaFilter.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <string.h>
+   #include "iLBC_define.h"
+
+   /*----------------------------------------------------------------*
+    *  LP analysis filter.
+    *---------------------------------------------------------------*/
+
+   void anaFilter(
+       float *In,  /* (i) Signal to be filtered */
+       float *a,   /* (i) LP parameters */
+       int len,/* (i) Length of signal */
+       float *Out, /* (o) Filtered signal */
+       float *mem  /* (i/o) Filter state */
+   ){
+       int i, j;
+       float *po, *pi, *pm, *pa;
+
+       po = Out;
+
+       /* Filter first part using memory from past */
+
+       for (i=0; i<LPC_FILTERORDER; i++) {
+           pi = &In[i];
+           pm = &mem[LPC_FILTERORDER-1];
+           pa = a;
+           *po=0.0;
+
+
+
+Andersen, et al.              Experimental                     [Page 97]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           for (j=0; j<=i; j++) {
+               *po+=(*pa++)*(*pi--);
+           }
+           for (j=i+1; j<LPC_FILTERORDER+1; j++) {
+
+               *po+=(*pa++)*(*pm--);
+           }
+           po++;
+       }
+
+       /* Filter last part where the state is entirely
+          in the input vector */
+
+       for (i=LPC_FILTERORDER; i<len; i++) {
+           pi = &In[i];
+           pa = a;
+           *po=0.0;
+           for (j=0; j<LPC_FILTERORDER+1; j++) {
+               *po+=(*pa++)*(*pi--);
+           }
+           po++;
+       }
+
+       /* Update state vector */
+
+       memcpy(mem, &In[len-LPC_FILTERORDER],
+           LPC_FILTERORDER*sizeof(float));
+   }
+
+A.11.  createCB.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       createCB.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_CREATECB_H
+   #define __iLBC_CREATECB_H
+
+   void filteredCBvecs(
+       float *cbvectors,   /* (o) Codebook vector for the
+                                  higher section */
+
+
+
+Andersen, et al.              Experimental                     [Page 98]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *mem,         /* (i) Buffer to create codebook
+                                  vectors from */
+       int lMem        /* (i) Length of buffer */
+   );
+
+   void searchAugmentedCB(
+       int low,        /* (i) Start index for the search */
+       int high,           /* (i) End index for the search */
+       int stage,          /* (i) Current stage */
+       int startIndex,     /* (i) CB index for the first
+                                  augmented vector */
+       float *target,      /* (i) Target vector for encoding */
+       float *buffer,      /* (i) Pointer to the end of the
+                                  buffer for augmented codebook
+                                  construction */
+       float *max_measure, /* (i/o) Currently maximum measure */
+       int *best_index,/* (o) Currently the best index */
+       float *gain,    /* (o) Currently the best gain */
+       float *energy,      /* (o) Energy of augmented
+                                  codebook vectors */
+       float *invenergy/* (o) Inv energy of aug codebook
+                                  vectors */
+   );
+
+   void createAugmentedVec(
+       int index,          /* (i) Index for the aug vector
+                                  to be created */
+       float *buffer,      /* (i) Pointer to the end of the
+                                  buffer for augmented codebook
+                                  construction */
+       float *cbVec    /* (o) The construced codebook vector */
+   );
+
+   #endif
+
+A.12.  createCB.c
+
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       createCB.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+
+
+Andersen, et al.              Experimental                     [Page 99]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include <string.h>
+   #include <math.h>
+
+   /*----------------------------------------------------------------*
+    *  Construct an additional codebook vector by filtering the
+    *  initial codebook buffer. This vector is then used to expand
+    *  the codebook with an additional section.
+    *---------------------------------------------------------------*/
+
+   void filteredCBvecs(
+       float *cbvectors,   /* (o) Codebook vectors for the
+                                  higher section */
+       float *mem,         /* (i) Buffer to create codebook
+                                  vector from */
+       int lMem        /* (i) Length of buffer */
+   ){
+       int j, k;
+       float *pp, *pp1;
+       float tempbuff2[CB_MEML+CB_FILTERLEN];
+       float *pos;
+
+       memset(tempbuff2, 0, (CB_HALFFILTERLEN-1)*sizeof(float));
+       memcpy(&tempbuff2[CB_HALFFILTERLEN-1], mem, lMem*sizeof(float));
+       memset(&tempbuff2[lMem+CB_HALFFILTERLEN-1], 0,
+           (CB_HALFFILTERLEN+1)*sizeof(float));
+
+       /* Create codebook vector for higher section by filtering */
+
+       /* do filtering */
+       pos=cbvectors;
+       memset(pos, 0, lMem*sizeof(float));
+       for (k=0; k<lMem; k++) {
+           pp=&tempbuff2[k];
+           pp1=&cbfiltersTbl[CB_FILTERLEN-1];
+           for (j=0;j<CB_FILTERLEN;j++) {
+               (*pos)+=(*pp++)*(*pp1--);
+           }
+           pos++;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  Search the augmented part of the codebook to find the best
+    *  measure.
+    *----------------------------------------------------------------*/
+
+
+
+
+Andersen, et al.              Experimental                    [Page 100]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void searchAugmentedCB(
+       int low,        /* (i) Start index for the search */
+       int high,           /* (i) End index for the search */
+       int stage,          /* (i) Current stage */
+       int startIndex,     /* (i) Codebook index for the first
+                                  aug vector */
+       float *target,      /* (i) Target vector for encoding */
+       float *buffer,      /* (i) Pointer to the end of the buffer for
+                                  augmented codebook construction */
+       float *max_measure, /* (i/o) Currently maximum measure */
+       int *best_index,/* (o) Currently the best index */
+       float *gain,    /* (o) Currently the best gain */
+       float *energy,      /* (o) Energy of augmented codebook
+                                  vectors */
+       float *invenergy/* (o) Inv energy of augmented codebook
+                                  vectors */
+   ) {
+       int icount, ilow, j, tmpIndex;
+       float *pp, *ppo, *ppi, *ppe, crossDot, alfa;
+       float weighted, measure, nrjRecursive;
+       float ftmp;
+
+       /* Compute the energy for the first (low-5)
+          noninterpolated samples */
+       nrjRecursive = (float) 0.0;
+       pp = buffer - low + 1;
+       for (j=0; j<(low-5); j++) {
+           nrjRecursive += ( (*pp)*(*pp) );
+           pp++;
+       }
+       ppe = buffer - low;
+
+
+       for (icount=low; icount<=high; icount++) {
+
+           /* Index of the codebook vector used for retrieving
+              energy values */
+           tmpIndex = startIndex+icount-20;
+
+           ilow = icount-4;
+
+           /* Update the energy recursively to save complexity */
+           nrjRecursive = nrjRecursive + (*ppe)*(*ppe);
+           ppe--;
+           energy[tmpIndex] = nrjRecursive;
+
+           /* Compute cross dot product for the first (low-5)
+              samples */
+
+
+
+Andersen, et al.              Experimental                    [Page 101]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           crossDot = (float) 0.0;
+           pp = buffer-icount;
+           for (j=0; j<ilow; j++) {
+               crossDot += target[j]*(*pp++);
+           }
+
+           /* interpolation */
+           alfa = (float) 0.2;
+           ppo = buffer-4;
+           ppi = buffer-icount-4;
+           for (j=ilow; j<icount; j++) {
+               weighted = ((float)1.0-alfa)*(*ppo)+alfa*(*ppi);
+               ppo++;
+               ppi++;
+               energy[tmpIndex] += weighted*weighted;
+               crossDot += target[j]*weighted;
+               alfa += (float)0.2;
+           }
+
+           /* Compute energy and cross dot product for the
+              remaining samples */
+           pp = buffer - icount;
+           for (j=icount; j<SUBL; j++) {
+               energy[tmpIndex] += (*pp)*(*pp);
+               crossDot += target[j]*(*pp++);
+           }
+
+           if (energy[tmpIndex]>0.0) {
+               invenergy[tmpIndex]=(float)1.0/(energy[tmpIndex]+EPS);
+           } else {
+               invenergy[tmpIndex] = (float) 0.0;
+           }
+
+           if (stage==0) {
+               measure = (float)-10000000.0;
+
+               if (crossDot > 0.0) {
+                   measure = crossDot*crossDot*invenergy[tmpIndex];
+               }
+           }
+           else {
+               measure = crossDot*crossDot*invenergy[tmpIndex];
+           }
+
+           /* check if measure is better */
+           ftmp = crossDot*invenergy[tmpIndex];
+
+           if ((measure>*max_measure) && (fabs(ftmp)<CB_MAXGAIN)) {
+
+
+
+Andersen, et al.              Experimental                    [Page 102]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               *best_index = tmpIndex;
+               *max_measure = measure;
+               *gain = ftmp;
+           }
+       }
+   }
+
+
+   /*----------------------------------------------------------------*
+    *  Recreate a specific codebook vector from the augmented part.
+    *
+    *----------------------------------------------------------------*/
+
+   void createAugmentedVec(
+       int index,      /* (i) Index for the augmented vector
+                              to be created */
+       float *buffer,  /* (i) Pointer to the end of the buffer for
+                              augmented codebook construction */
+       float *cbVec/* (o) The construced codebook vector */
+   ) {
+       int ilow, j;
+       float *pp, *ppo, *ppi, alfa, alfa1, weighted;
+
+       ilow = index-5;
+
+       /* copy the first noninterpolated part */
+
+       pp = buffer-index;
+       memcpy(cbVec,pp,sizeof(float)*index);
+
+       /* interpolation */
+
+       alfa1 = (float)0.2;
+       alfa = 0.0;
+       ppo = buffer-5;
+       ppi = buffer-index-5;
+       for (j=ilow; j<index; j++) {
+           weighted = ((float)1.0-alfa)*(*ppo)+alfa*(*ppi);
+           ppo++;
+           ppi++;
+           cbVec[j] = weighted;
+           alfa += alfa1;
+       }
+
+       /* copy the second noninterpolated part */
+
+       pp = buffer - index;
+       memcpy(cbVec+index,pp,sizeof(float)*(SUBL-index));
+
+
+
+Andersen, et al.              Experimental                    [Page 103]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   }
+
+A.13.  doCPLC.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       doCPLC.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_DOLPC_H
+   #define __iLBC_DOLPC_H
+
+   void doThePLC(
+       float *PLCresidual, /* (o) concealed residual */
+       float *PLClpc,      /* (o) concealed LP parameters */
+       int PLI,        /* (i) packet loss indicator
+                                  0 - no PL, 1 = PL */
+       float *decresidual, /* (i) decoded residual */
+       float *lpc,         /* (i) decoded LPC (only used for no PL) */
+       int inlag,          /* (i) pitch lag */
+       iLBC_Dec_Inst_t *iLBCdec_inst
+                           /* (i/o) decoder instance */
+   );
+
+   #endif
+
+A.14.  doCPLC.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       doCPLC.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+   #include <stdio.h>
+
+
+
+Andersen, et al.              Experimental                    [Page 104]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #include "iLBC_define.h"
+
+   /*----------------------------------------------------------------*
+    *  Compute cross correlation and pitch gain for pitch prediction
+    *  of last subframe at given lag.
+    *---------------------------------------------------------------*/
+
+   void compCorr(
+       float *cc,      /* (o) cross correlation coefficient */
+       float *gc,      /* (o) gain */
+       float *pm,
+       float *buffer,  /* (i) signal buffer */
+       int lag,    /* (i) pitch lag */
+       int bLen,       /* (i) length of buffer */
+       int sRange      /* (i) correlation search length */
+   ){
+       int i;
+       float ftmp1, ftmp2, ftmp3;
+
+       /* Guard against getting outside buffer */
+       if ((bLen-sRange-lag)<0) {
+           sRange=bLen-lag;
+       }
+
+       ftmp1 = 0.0;
+       ftmp2 = 0.0;
+       ftmp3 = 0.0;
+       for (i=0; i<sRange; i++) {
+           ftmp1 += buffer[bLen-sRange+i] *
+               buffer[bLen-sRange+i-lag];
+           ftmp2 += buffer[bLen-sRange+i-lag] *
+                   buffer[bLen-sRange+i-lag];
+           ftmp3 += buffer[bLen-sRange+i] *
+                   buffer[bLen-sRange+i];
+       }
+
+       if (ftmp2 > 0.0) {
+           *cc = ftmp1*ftmp1/ftmp2;
+           *gc = (float)fabs(ftmp1/ftmp2);
+           *pm=(float)fabs(ftmp1)/
+               ((float)sqrt(ftmp2)*(float)sqrt(ftmp3));
+       }
+       else {
+           *cc = 0.0;
+           *gc = 0.0;
+           *pm=0.0;
+       }
+   }
+
+
+
+Andersen, et al.              Experimental                    [Page 105]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   /*----------------------------------------------------------------*
+    *  Packet loss concealment routine. Conceals a residual signal
+    *  and LP parameters. If no packet loss, update state.
+    *---------------------------------------------------------------*/
+
+   void doThePLC(
+       float *PLCresidual, /* (o) concealed residual */
+       float *PLClpc,      /* (o) concealed LP parameters */
+       int PLI,        /* (i) packet loss indicator
+                                  0 - no PL, 1 = PL */
+       float *decresidual, /* (i) decoded residual */
+       float *lpc,         /* (i) decoded LPC (only used for no PL) */
+       int inlag,          /* (i) pitch lag */
+       iLBC_Dec_Inst_t *iLBCdec_inst
+                           /* (i/o) decoder instance */
+   ){
+       int lag=20, randlag;
+       float gain, maxcc;
+       float use_gain;
+       float gain_comp, maxcc_comp, per, max_per;
+       int i, pick, use_lag;
+       float ftmp, randvec[BLOCKL_MAX], pitchfact, energy;
+
+       /* Packet Loss */
+
+       if (PLI == 1) {
+
+           iLBCdec_inst->consPLICount += 1;
+
+           /* if previous frame not lost,
+              determine pitch pred. gain */
+
+           if (iLBCdec_inst->prevPLI != 1) {
+
+               /* Search around the previous lag to find the
+                  best pitch period */
+
+               lag=inlag-3;
+               compCorr(&maxcc, &gain, &max_per,
+                   iLBCdec_inst->prevResidual,
+                   lag, iLBCdec_inst->blockl, 60);
+               for (i=inlag-2;i<=inlag+3;i++) {
+                   compCorr(&maxcc_comp, &gain_comp, &per,
+                       iLBCdec_inst->prevResidual,
+                       i, iLBCdec_inst->blockl, 60);
+
+                   if (maxcc_comp>maxcc) {
+                       maxcc=maxcc_comp;
+
+
+
+Andersen, et al.              Experimental                    [Page 106]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                       gain=gain_comp;
+                       lag=i;
+                       max_per=per;
+                   }
+               }
+
+           }
+
+           /* previous frame lost, use recorded lag and periodicity */
+
+           else {
+               lag=iLBCdec_inst->prevLag;
+               max_per=iLBCdec_inst->per;
+           }
+
+           /* downscaling */
+
+           use_gain=1.0;
+           if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>320)
+               use_gain=(float)0.9;
+           else if (iLBCdec_inst->consPLICount*
+                           iLBCdec_inst->blockl>2*320)
+               use_gain=(float)0.7;
+           else if (iLBCdec_inst->consPLICount*
+                           iLBCdec_inst->blockl>3*320)
+               use_gain=(float)0.5;
+           else if (iLBCdec_inst->consPLICount*
+                           iLBCdec_inst->blockl>4*320)
+               use_gain=(float)0.0;
+
+           /* mix noise and pitch repeatition */
+           ftmp=(float)sqrt(max_per);
+           if (ftmp>(float)0.7)
+               pitchfact=(float)1.0;
+           else if (ftmp>(float)0.4)
+               pitchfact=(ftmp-(float)0.4)/((float)0.7-(float)0.4);
+           else
+               pitchfact=0.0;
+
+
+           /* avoid repetition of same pitch cycle */
+           use_lag=lag;
+           if (lag<80) {
+               use_lag=2*lag;
+           }
+
+           /* compute concealed residual */
+
+
+
+
+Andersen, et al.              Experimental                    [Page 107]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           energy = 0.0;
+           for (i=0; i<iLBCdec_inst->blockl; i++) {
+
+               /* noise component */
+
+               iLBCdec_inst->seed=(iLBCdec_inst->seed*69069L+1) &
+                   (0x80000000L-1);
+               randlag = 50 + ((signed long) iLBCdec_inst->seed)%70;
+               pick = i - randlag;
+
+               if (pick < 0) {
+                   randvec[i] =
+                       iLBCdec_inst->prevResidual[
+                                   iLBCdec_inst->blockl+pick];
+               } else {
+                   randvec[i] =  randvec[pick];
+               }
+
+               /* pitch repeatition component */
+               pick = i - use_lag;
+
+               if (pick < 0) {
+                   PLCresidual[i] =
+                       iLBCdec_inst->prevResidual[
+                                   iLBCdec_inst->blockl+pick];
+               } else {
+                   PLCresidual[i] = PLCresidual[pick];
+               }
+
+               /* mix random and periodicity component */
+
+               if (i<80)
+                   PLCresidual[i] = use_gain*(pitchfact *
+                               PLCresidual[i] +
+                               ((float)1.0 - pitchfact) * randvec[i]);
+               else if (i<160)
+                   PLCresidual[i] = (float)0.95*use_gain*(pitchfact *
+                               PLCresidual[i] +
+                               ((float)1.0 - pitchfact) * randvec[i]);
+               else
+                   PLCresidual[i] = (float)0.9*use_gain*(pitchfact *
+                               PLCresidual[i] +
+                               ((float)1.0 - pitchfact) * randvec[i]);
+
+               energy += PLCresidual[i] * PLCresidual[i];
+           }
+
+           /* less than 30 dB, use only noise */
+
+
+
+Andersen, et al.              Experimental                    [Page 108]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+
+           if (sqrt(energy/(float)iLBCdec_inst->blockl) < 30.0) {
+               gain=0.0;
+               for (i=0; i<iLBCdec_inst->blockl; i++) {
+                   PLCresidual[i] = randvec[i];
+               }
+           }
+
+           /* use old LPC */
+
+           memcpy(PLClpc,iLBCdec_inst->prevLpc,
+               (LPC_FILTERORDER+1)*sizeof(float));
+
+       }
+
+       /* no packet loss, copy input */
+
+       else {
+           memcpy(PLCresidual, decresidual,
+               iLBCdec_inst->blockl*sizeof(float));
+           memcpy(PLClpc, lpc, (LPC_FILTERORDER+1)*sizeof(float));
+           iLBCdec_inst->consPLICount = 0;
+       }
+
+       /* update state */
+
+       if (PLI) {
+           iLBCdec_inst->prevLag = lag;
+           iLBCdec_inst->per=max_per;
+       }
+
+       iLBCdec_inst->prevPLI = PLI;
+       memcpy(iLBCdec_inst->prevLpc, PLClpc,
+           (LPC_FILTERORDER+1)*sizeof(float));
+       memcpy(iLBCdec_inst->prevResidual, PLCresidual,
+           iLBCdec_inst->blockl*sizeof(float));
+   }
+
+A.15.  enhancer.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       enhancer.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+
+
+Andersen, et al.              Experimental                    [Page 109]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   ******************************************************************/
+
+   #ifndef __ENHANCER_H
+   #define __ENHANCER_H
+
+   #include "iLBC_define.h"
+
+   float xCorrCoef(
+       float *target,      /* (i) first array */
+       float *regressor,   /* (i) second array */
+       int subl        /* (i) dimension arrays */
+   );
+
+   int enhancerInterface(
+       float *out,         /* (o) the enhanced recidual signal */
+       float *in,          /* (i) the recidual signal to enhance */
+       iLBC_Dec_Inst_t *iLBCdec_inst
+                           /* (i/o) the decoder state structure */
+   );
+
+   #endif
+
+A.16.  enhancer.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       enhancer.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include "filter.h"
+
+   /*----------------------------------------------------------------*
+    * Find index in array such that the array element with said
+    * index is the element of said array closest to "value"
+    * according to the squared-error criterion
+    *---------------------------------------------------------------*/
+
+   void NearestNeighbor(
+
+
+
+Andersen, et al.              Experimental                    [Page 110]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       int   *index,   /* (o) index of array element closest
+                              to value */
+       float *array,   /* (i) data array */
+       float value,/* (i) value */
+       int arlength/* (i) dimension of data array */
+   ){
+       int i;
+       float bestcrit,crit;
+
+       crit=array[0]-value;
+       bestcrit=crit*crit;
+       *index=0;
+       for (i=1; i<arlength; i++) {
+           crit=array[i]-value;
+           crit=crit*crit;
+
+           if (crit<bestcrit) {
+               bestcrit=crit;
+               *index=i;
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    * compute cross correlation between sequences
+    *---------------------------------------------------------------*/
+
+   void mycorr1(
+       float* corr,    /* (o) correlation of seq1 and seq2 */
+       float* seq1,    /* (i) first sequence */
+       int dim1,           /* (i) dimension first seq1 */
+       const float *seq2,  /* (i) second sequence */
+       int dim2        /* (i) dimension seq2 */
+   ){
+       int i,j;
+
+       for (i=0; i<=dim1-dim2; i++) {
+           corr[i]=0.0;
+           for (j=0; j<dim2; j++) {
+               corr[i] += seq1[i+j] * seq2[j];
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    * upsample finite array assuming zeros outside bounds
+    *---------------------------------------------------------------*/
+
+
+
+
+Andersen, et al.              Experimental                    [Page 111]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void enh_upsample(
+       float* useq1,   /* (o) upsampled output sequence */
+       float* seq1,/* (i) unupsampled sequence */
+       int dim1,       /* (i) dimension seq1 */
+       int hfl         /* (i) polyphase filter length=2*hfl+1 */
+   ){
+       float *pu,*ps;
+       int i,j,k,q,filterlength,hfl2;
+       const float *polyp[ENH_UPS0]; /* pointers to
+                                        polyphase columns */
+       const float *pp;
+
+       /* define pointers for filter */
+
+       filterlength=2*hfl+1;
+
+       if ( filterlength > dim1 ) {
+           hfl2=(int) (dim1/2);
+           for (j=0; j<ENH_UPS0; j++) {
+               polyp[j]=polyphaserTbl+j*filterlength+hfl-hfl2;
+           }
+           hfl=hfl2;
+           filterlength=2*hfl+1;
+       }
+       else {
+           for (j=0; j<ENH_UPS0; j++) {
+               polyp[j]=polyphaserTbl+j*filterlength;
+           }
+       }
+
+       /* filtering: filter overhangs left side of sequence */
+
+       pu=useq1;
+       for (i=hfl; i<filterlength; i++) {
+           for (j=0; j<ENH_UPS0; j++) {
+               *pu=0.0;
+               pp = polyp[j];
+               ps = seq1+i;
+               for (k=0; k<=i; k++) {
+                   *pu += *ps-- * *pp++;
+               }
+               pu++;
+           }
+       }
+
+       /* filtering: simple convolution=inner products */
+
+       for (i=filterlength; i<dim1; i++) {
+
+
+
+Andersen, et al.              Experimental                    [Page 112]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           for (j=0;j<ENH_UPS0; j++){
+               *pu=0.0;
+               pp = polyp[j];
+               ps = seq1+i;
+               for (k=0; k<filterlength; k++) {
+                   *pu += *ps-- * *pp++;
+               }
+               pu++;
+           }
+       }
+
+       /* filtering: filter overhangs right side of sequence */
+
+       for (q=1; q<=hfl; q++) {
+           for (j=0; j<ENH_UPS0; j++) {
+               *pu=0.0;
+               pp = polyp[j]+q;
+               ps = seq1+dim1-1;
+               for (k=0; k<filterlength-q; k++) {
+                   *pu += *ps-- * *pp++;
+               }
+               pu++;
+           }
+       }
+   }
+
+
+   /*----------------------------------------------------------------*
+    * find segment starting near idata+estSegPos that has highest
+    * correlation with idata+centerStartPos through
+    * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a
+    * resolution of ENH_UPSO times the original of the original
+    * sampling rate
+    *---------------------------------------------------------------*/
+
+   void refiner(
+       float *seg,         /* (o) segment array */
+       float *updStartPos, /* (o) updated start point */
+       float* idata,       /* (i) original data buffer */
+       int idatal,         /* (i) dimension of idata */
+       int centerStartPos, /* (i) beginning center segment */
+       float estSegPos,/* (i) estimated beginning other segment */
+       float period    /* (i) estimated pitch period */
+   ){
+       int estSegPosRounded,searchSegStartPos,searchSegEndPos,corrdim;
+       int tloc,tloc2,i,st,en,fraction;
+       float vect[ENH_VECTL],corrVec[ENH_CORRDIM],maxv;
+       float corrVecUps[ENH_CORRDIM*ENH_UPS0];
+
+
+
+Andersen, et al.              Experimental                    [Page 113]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* defining array bounds */
+
+       estSegPosRounded=(int)(estSegPos - 0.5);
+
+       searchSegStartPos=estSegPosRounded-ENH_SLOP;
+
+       if (searchSegStartPos<0) {
+           searchSegStartPos=0;
+       }
+       searchSegEndPos=estSegPosRounded+ENH_SLOP;
+
+       if (searchSegEndPos+ENH_BLOCKL >= idatal) {
+           searchSegEndPos=idatal-ENH_BLOCKL-1;
+       }
+       corrdim=searchSegEndPos-searchSegStartPos+1;
+
+       /* compute upsampled correlation (corr33) and find
+          location of max */
+
+       mycorr1(corrVec,idata+searchSegStartPos,
+           corrdim+ENH_BLOCKL-1,idata+centerStartPos,ENH_BLOCKL);
+       enh_upsample(corrVecUps,corrVec,corrdim,ENH_FL0);
+       tloc=0; maxv=corrVecUps[0];
+       for (i=1; i<ENH_UPS0*corrdim; i++) {
+
+           if (corrVecUps[i]>maxv) {
+               tloc=i;
+               maxv=corrVecUps[i];
+           }
+       }
+
+       /* make vector can be upsampled without ever running outside
+          bounds */
+
+       *updStartPos= (float)searchSegStartPos +
+           (float)tloc/(float)ENH_UPS0+(float)1.0;
+       tloc2=(int)(tloc/ENH_UPS0);
+
+       if (tloc>tloc2*ENH_UPS0) {
+           tloc2++;
+       }
+       st=searchSegStartPos+tloc2-ENH_FL0;
+
+       if (st<0) {
+           memset(vect,0,-st*sizeof(float));
+           memcpy(&vect[-st],idata, (ENH_VECTL+st)*sizeof(float));
+       }
+       else {
+
+
+
+Andersen, et al.              Experimental                    [Page 114]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           en=st+ENH_VECTL;
+
+           if (en>idatal) {
+               memcpy(vect, &idata[st],
+                   (ENH_VECTL-(en-idatal))*sizeof(float));
+               memset(&vect[ENH_VECTL-(en-idatal)], 0,
+                   (en-idatal)*sizeof(float));
+           }
+           else {
+               memcpy(vect, &idata[st], ENH_VECTL*sizeof(float));
+           }
+       }
+       fraction=tloc2*ENH_UPS0-tloc;
+
+       /* compute the segment (this is actually a convolution) */
+
+       mycorr1(seg,vect,ENH_VECTL,polyphaserTbl+(2*ENH_FL0+1)*fraction,
+           2*ENH_FL0+1);
+   }
+
+   /*----------------------------------------------------------------*
+    * find the smoothed output data
+    *---------------------------------------------------------------*/
+
+   void smath(
+       float *odata,   /* (o) smoothed output */
+       float *sseq,/* (i) said second sequence of waveforms */
+       int hl,         /* (i) 2*hl+1 is sseq dimension */
+       float alpha0/* (i) max smoothing energy fraction */
+   ){
+       int i,k;
+       float w00,w10,w11,A,B,C,*psseq,err,errs;
+       float surround[BLOCKL_MAX]; /* shape contributed by other than
+                                      current */
+       float wt[2*ENH_HL+1];       /* waveform weighting to get
+                                      surround shape */
+       float denom;
+
+       /* create shape of contribution from all waveforms except the
+          current one */
+
+       for (i=1; i<=2*hl+1; i++) {
+           wt[i-1] = (float)0.5*(1 - (float)cos(2*PI*i/(2*hl+2)));
+       }
+       wt[hl]=0.0; /* for clarity, not used */
+       for (i=0; i<ENH_BLOCKL; i++) {
+           surround[i]=sseq[i]*wt[0];
+       }
+
+
+
+Andersen, et al.              Experimental                    [Page 115]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       for (k=1; k<hl; k++) {
+           psseq=sseq+k*ENH_BLOCKL;
+           for(i=0;i<ENH_BLOCKL; i++) {
+               surround[i]+=psseq[i]*wt[k];
+           }
+       }
+       for (k=hl+1; k<=2*hl; k++) {
+           psseq=sseq+k*ENH_BLOCKL;
+           for(i=0;i<ENH_BLOCKL; i++) {
+               surround[i]+=psseq[i]*wt[k];
+           }
+       }
+
+       /* compute some inner products */
+
+       w00 = w10 = w11 = 0.0;
+       psseq=sseq+hl*ENH_BLOCKL; /* current block  */
+       for (i=0; i<ENH_BLOCKL;i++) {
+           w00+=psseq[i]*psseq[i];
+           w11+=surround[i]*surround[i];
+           w10+=surround[i]*psseq[i];
+       }
+
+       if (fabs(w11) < 1.0) {
+           w11=1.0;
+       }
+       C = (float)sqrt( w00/w11);
+
+       /* first try enhancement without power-constraint */
+
+       errs=0.0;
+       psseq=sseq+hl*ENH_BLOCKL;
+       for (i=0; i<ENH_BLOCKL; i++) {
+           odata[i]=C*surround[i];
+           err=psseq[i]-odata[i];
+           errs+=err*err;
+       }
+
+       /* if constraint violated by first try, add constraint */
+
+       if (errs > alpha0 * w00) {
+           if ( w00 < 1) {
+               w00=1;
+           }
+           denom = (w11*w00-w10*w10)/(w00*w00);
+
+           if (denom > 0.0001) { /* eliminates numerical problems
+                                    for if smooth */
+
+
+
+Andersen, et al.              Experimental                    [Page 116]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               A = (float)sqrt( (alpha0- alpha0*alpha0/4)/denom);
+               B = -alpha0/2 - A * w10/w00;
+               B = B+1;
+           }
+           else { /* essentially no difference between cycles;
+                     smoothing not needed */
+               A= 0.0;
+               B= 1.0;
+           }
+
+           /* create smoothed sequence */
+
+           psseq=sseq+hl*ENH_BLOCKL;
+           for (i=0; i<ENH_BLOCKL; i++) {
+               odata[i]=A*surround[i]+B*psseq[i];
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    * get the pitch-synchronous sample sequence
+    *---------------------------------------------------------------*/
+
+   void getsseq(
+       float *sseq,    /* (o) the pitch-synchronous sequence */
+       float *idata,       /* (i) original data */
+       int idatal,         /* (i) dimension of data */
+       int centerStartPos, /* (i) where current block starts */
+       float *period,      /* (i) rough-pitch-period array */
+       float *plocs,       /* (i) where periods of period array
+                                  are taken */
+       int periodl,    /* (i) dimension period array */
+       int hl              /* (i) 2*hl+1 is the number of sequences */
+   ){
+       int i,centerEndPos,q;
+       float blockStartPos[2*ENH_HL+1];
+       int lagBlock[2*ENH_HL+1];
+       float plocs2[ENH_PLOCSL];
+       float *psseq;
+
+       centerEndPos=centerStartPos+ENH_BLOCKL-1;
+
+       /* present */
+
+       NearestNeighbor(lagBlock+hl,plocs,
+           (float)0.5*(centerStartPos+centerEndPos),periodl);
+
+       blockStartPos[hl]=(float)centerStartPos;
+
+
+
+Andersen, et al.              Experimental                    [Page 117]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       psseq=sseq+ENH_BLOCKL*hl;
+       memcpy(psseq, idata+centerStartPos, ENH_BLOCKL*sizeof(float));
+
+       /* past */
+
+       for (q=hl-1; q>=0; q--) {
+           blockStartPos[q]=blockStartPos[q+1]-period[lagBlock[q+1]];
+           NearestNeighbor(lagBlock+q,plocs,
+               blockStartPos[q]+
+               ENH_BLOCKL_HALF-period[lagBlock[q+1]], periodl);
+
+
+           if (blockStartPos[q]-ENH_OVERHANG>=0) {
+               refiner(sseq+q*ENH_BLOCKL, blockStartPos+q, idata,
+                   idatal, centerStartPos, blockStartPos[q],
+                   period[lagBlock[q+1]]);
+           } else {
+               psseq=sseq+q*ENH_BLOCKL;
+               memset(psseq, 0, ENH_BLOCKL*sizeof(float));
+           }
+       }
+
+       /* future */
+
+       for (i=0; i<periodl; i++) {
+           plocs2[i]=plocs[i]-period[i];
+       }
+       for (q=hl+1; q<=2*hl; q++) {
+           NearestNeighbor(lagBlock+q,plocs2,
+               blockStartPos[q-1]+ENH_BLOCKL_HALF,periodl);
+
+           blockStartPos[q]=blockStartPos[q-1]+period[lagBlock[q]];
+           if (blockStartPos[q]+ENH_BLOCKL+ENH_OVERHANG<idatal) {
+               refiner(sseq+ENH_BLOCKL*q, blockStartPos+q, idata,
+                   idatal, centerStartPos, blockStartPos[q],
+                   period[lagBlock[q]]);
+           }
+           else {
+               psseq=sseq+q*ENH_BLOCKL;
+               memset(psseq, 0, ENH_BLOCKL*sizeof(float));
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    * perform enhancement on idata+centerStartPos through
+    * idata+centerStartPos+ENH_BLOCKL-1
+    *---------------------------------------------------------------*/
+
+
+
+Andersen, et al.              Experimental                    [Page 118]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void enhancer(
+       float *odata,       /* (o) smoothed block, dimension blockl */
+       float *idata,       /* (i) data buffer used for enhancing */
+       int idatal,         /* (i) dimension idata */
+       int centerStartPos, /* (i) first sample current block
+                                  within idata */
+       float alpha0,       /* (i) max correction-energy-fraction
+                                 (in [0,1]) */
+       float *period,      /* (i) pitch period array */
+       float *plocs,       /* (i) locations where period array
+                                  values valid */
+       int periodl         /* (i) dimension of period and plocs */
+   ){
+       float sseq[(2*ENH_HL+1)*ENH_BLOCKL];
+
+       /* get said second sequence of segments */
+
+       getsseq(sseq,idata,idatal,centerStartPos,period,
+           plocs,periodl,ENH_HL);
+
+       /* compute the smoothed output from said second sequence */
+
+       smath(odata,sseq,ENH_HL,alpha0);
+
+   }
+
+   /*----------------------------------------------------------------*
+    * cross correlation
+    *---------------------------------------------------------------*/
+
+   float xCorrCoef(
+       float *target,      /* (i) first array */
+       float *regressor,   /* (i) second array */
+       int subl        /* (i) dimension arrays */
+   ){
+       int i;
+       float ftmp1, ftmp2;
+
+       ftmp1 = 0.0;
+       ftmp2 = 0.0;
+       for (i=0; i<subl; i++) {
+           ftmp1 += target[i]*regressor[i];
+           ftmp2 += regressor[i]*regressor[i];
+       }
+
+       if (ftmp1 > 0.0) {
+           return (float)(ftmp1*ftmp1/ftmp2);
+       }
+
+
+
+Andersen, et al.              Experimental                    [Page 119]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       else {
+           return (float)0.0;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    * interface for enhancer
+    *---------------------------------------------------------------*/
+
+   int enhancerInterface(
+       float *out,                     /* (o) enhanced signal */
+       float *in,                      /* (i) unenhanced signal */
+       iLBC_Dec_Inst_t *iLBCdec_inst   /* (i) buffers etc */
+   ){
+       float *enh_buf, *enh_period;
+       int iblock, isample;
+       int lag=0, ilag, i, ioffset;
+       float cc, maxcc;
+       float ftmp1, ftmp2;
+       float *inPtr, *enh_bufPtr1, *enh_bufPtr2;
+       float plc_pred[ENH_BLOCKL];
+
+       float lpState[6], downsampled[(ENH_NBLOCKS*ENH_BLOCKL+120)/2];
+       int inLen=ENH_NBLOCKS*ENH_BLOCKL+120;
+       int start, plc_blockl, inlag;
+
+       enh_buf=iLBCdec_inst->enh_buf;
+       enh_period=iLBCdec_inst->enh_period;
+
+       memmove(enh_buf, &enh_buf[iLBCdec_inst->blockl],
+           (ENH_BUFL-iLBCdec_inst->blockl)*sizeof(float));
+
+       memcpy(&enh_buf[ENH_BUFL-iLBCdec_inst->blockl], in,
+           iLBCdec_inst->blockl*sizeof(float));
+
+       if (iLBCdec_inst->mode==30)
+           plc_blockl=ENH_BLOCKL;
+       else
+           plc_blockl=40;
+
+       /* when 20 ms frame, move processing one block */
+       ioffset=0;
+       if (iLBCdec_inst->mode==20) ioffset=1;
+
+       i=3-ioffset;
+       memmove(enh_period, &enh_period[i],
+           (ENH_NBLOCKS_TOT-i)*sizeof(float));
+
+
+
+
+Andersen, et al.              Experimental                    [Page 120]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* Set state information to the 6 samples right before
+          the samples to be downsampled. */
+
+       memcpy(lpState,
+           enh_buf+(ENH_NBLOCKS_EXTRA+ioffset)*ENH_BLOCKL-126,
+           6*sizeof(float));
+
+       /* Down sample a factor 2 to save computations */
+
+       DownSample(enh_buf+(ENH_NBLOCKS_EXTRA+ioffset)*ENH_BLOCKL-120,
+                   lpFilt_coefsTbl, inLen-ioffset*ENH_BLOCKL,
+                   lpState, downsampled);
+
+       /* Estimate the pitch in the down sampled domain. */
+       for (iblock = 0; iblock<ENH_NBLOCKS-ioffset; iblock++) {
+
+           lag = 10;
+           maxcc = xCorrCoef(downsampled+60+iblock*
+               ENH_BLOCKL_HALF, downsampled+60+iblock*
+               ENH_BLOCKL_HALF-lag, ENH_BLOCKL_HALF);
+           for (ilag=11; ilag<60; ilag++) {
+               cc = xCorrCoef(downsampled+60+iblock*
+                   ENH_BLOCKL_HALF, downsampled+60+iblock*
+                   ENH_BLOCKL_HALF-ilag, ENH_BLOCKL_HALF);
+
+               if (cc > maxcc) {
+                   maxcc = cc;
+                   lag = ilag;
+               }
+           }
+
+           /* Store the estimated lag in the non-downsampled domain */
+           enh_period[iblock+ENH_NBLOCKS_EXTRA+ioffset] = (float)lag*2;
+
+
+       }
+
+
+       /* PLC was performed on the previous packet */
+       if (iLBCdec_inst->prev_enh_pl==1) {
+
+           inlag=(int)enh_period[ENH_NBLOCKS_EXTRA+ioffset];
+
+           lag = inlag-1;
+           maxcc = xCorrCoef(in, in+lag, plc_blockl);
+           for (ilag=inlag; ilag<=inlag+1; ilag++) {
+               cc = xCorrCoef(in, in+ilag, plc_blockl);
+
+
+
+
+Andersen, et al.              Experimental                    [Page 121]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               if (cc > maxcc) {
+                   maxcc = cc;
+                   lag = ilag;
+               }
+           }
+
+           enh_period[ENH_NBLOCKS_EXTRA+ioffset-1]=(float)lag;
+
+           /* compute new concealed residual for the old lookahead,
+              mix the forward PLC with a backward PLC from
+              the new frame */
+
+           inPtr=&in[lag-1];
+
+           enh_bufPtr1=&plc_pred[plc_blockl-1];
+
+           if (lag>plc_blockl) {
+               start=plc_blockl;
+           } else {
+               start=lag;
+           }
+
+           for (isample = start; isample>0; isample--) {
+               *enh_bufPtr1-- = *inPtr--;
+           }
+
+           enh_bufPtr2=&enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl];
+           for (isample = (plc_blockl-1-lag); isample>=0; isample--) {
+               *enh_bufPtr1-- = *enh_bufPtr2--;
+           }
+
+           /* limit energy change */
+           ftmp2=0.0;
+           ftmp1=0.0;
+           for (i=0;i<plc_blockl;i++) {
+               ftmp2+=enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl-i]*
+                   enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl-i];
+               ftmp1+=plc_pred[i]*plc_pred[i];
+           }
+           ftmp1=(float)sqrt(ftmp1/(float)plc_blockl);
+           ftmp2=(float)sqrt(ftmp2/(float)plc_blockl);
+           if (ftmp1>(float)2.0*ftmp2 && ftmp1>0.0) {
+               for (i=0;i<plc_blockl-10;i++) {
+                   plc_pred[i]*=(float)2.0*ftmp2/ftmp1;
+               }
+               for (i=plc_blockl-10;i<plc_blockl;i++) {
+                   plc_pred[i]*=(float)(i-plc_blockl+10)*
+                       ((float)1.0-(float)2.0*ftmp2/ftmp1)/(float)(10)+
+
+
+
+Andersen, et al.              Experimental                    [Page 122]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                       (float)2.0*ftmp2/ftmp1;
+               }
+           }
+
+           enh_bufPtr1=&enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl];
+           for (i=0; i<plc_blockl; i++) {
+               ftmp1 = (float) (i+1) / (float) (plc_blockl+1);
+               *enh_bufPtr1 *= ftmp1;
+               *enh_bufPtr1 += ((float)1.0-ftmp1)*
+                                   plc_pred[plc_blockl-1-i];
+               enh_bufPtr1--;
+           }
+       }
+
+       if (iLBCdec_inst->mode==20) {
+           /* Enhancer with 40 samples delay */
+           for (iblock = 0; iblock<2; iblock++) {
+               enhancer(out+iblock*ENH_BLOCKL, enh_buf,
+                   ENH_BUFL, (5+iblock)*ENH_BLOCKL+40,
+                   ENH_ALPHA0, enh_period, enh_plocsTbl,
+                       ENH_NBLOCKS_TOT);
+           }
+       } else if (iLBCdec_inst->mode==30) {
+           /* Enhancer with 80 samples delay */
+           for (iblock = 0; iblock<3; iblock++) {
+               enhancer(out+iblock*ENH_BLOCKL, enh_buf,
+                   ENH_BUFL, (4+iblock)*ENH_BLOCKL,
+                   ENH_ALPHA0, enh_period, enh_plocsTbl,
+                       ENH_NBLOCKS_TOT);
+           }
+       }
+
+       return (lag*2);
+   }
+
+A.17.  filter.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       filter.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+
+
+
+Andersen, et al.              Experimental                    [Page 123]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #ifndef __iLBC_FILTER_H
+   #define __iLBC_FILTER_H
+
+   void AllPoleFilter(
+       float *InOut,   /* (i/o) on entrance InOut[-orderCoef] to
+                              InOut[-1] contain the state of the
+                              filter (delayed samples). InOut[0] to
+                              InOut[lengthInOut-1] contain the filter
+                              input, on en exit InOut[-orderCoef] to
+                              InOut[-1] is unchanged and InOut[0] to
+                              InOut[lengthInOut-1] contain filtered
+                              samples */
+       float *Coef,/* (i) filter coefficients, Coef[0] is assumed
+                              to be 1.0 */
+       int lengthInOut,/* (i) number of input/output samples */
+       int orderCoef   /* (i) number of filter coefficients */
+   );
+
+   void AllZeroFilter(
+       float *In,      /* (i) In[0] to In[lengthInOut-1] contain
+                              filter input samples */
+       float *Coef,/* (i) filter coefficients (Coef[0] is assumed
+                              to be 1.0) */
+       int lengthInOut,/* (i) number of input/output samples */
+       int orderCoef,  /* (i) number of filter coefficients */
+       float *Out      /* (i/o) on entrance Out[-orderCoef] to Out[-1]
+                              contain the filter state, on exit Out[0]
+                              to Out[lengthInOut-1] contain filtered
+                              samples */
+   );
+
+   void ZeroPoleFilter(
+       float *In,      /* (i) In[0] to In[lengthInOut-1] contain filter
+                              input samples In[-orderCoef] to In[-1]
+                              contain state of all-zero section */
+       float *ZeroCoef,/* (i) filter coefficients for all-zero
+                              section (ZeroCoef[0] is assumed to
+                              be 1.0) */
+       float *PoleCoef,/* (i) filter coefficients for all-pole section
+                              (ZeroCoef[0] is assumed to be 1.0) */
+       int lengthInOut,/* (i) number of input/output samples */
+       int orderCoef,  /* (i) number of filter coefficients */
+       float *Out      /* (i/o) on entrance Out[-orderCoef] to Out[-1]
+                              contain state of all-pole section. On
+                              exit Out[0] to Out[lengthInOut-1]
+                              contain filtered samples */
+   );
+
+
+
+
+Andersen, et al.              Experimental                    [Page 124]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void DownSample (
+       float  *In,     /* (i) input samples */
+       float  *Coef,   /* (i) filter coefficients */
+       int lengthIn,   /* (i) number of input samples */
+       float  *state,  /* (i) filter state */
+       float  *Out     /* (o) downsampled output */
+   );
+
+   #endif
+
+A.18.  filter.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       filter.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "iLBC_define.h"
+
+   /*----------------------------------------------------------------*
+    *  all-pole filter
+    *---------------------------------------------------------------*/
+
+   void AllPoleFilter(
+       float *InOut,   /* (i/o) on entrance InOut[-orderCoef] to
+                              InOut[-1] contain the state of the
+                              filter (delayed samples). InOut[0] to
+                              InOut[lengthInOut-1] contain the filter
+                              input, on en exit InOut[-orderCoef] to
+                              InOut[-1] is unchanged and InOut[0] to
+                              InOut[lengthInOut-1] contain filtered
+                              samples */
+       float *Coef,/* (i) filter coefficients, Coef[0] is assumed
+                              to be 1.0 */
+       int lengthInOut,/* (i) number of input/output samples */
+       int orderCoef   /* (i) number of filter coefficients */
+   ){
+       int n,k;
+
+       for(n=0;n<lengthInOut;n++){
+           for(k=1;k<=orderCoef;k++){
+               *InOut -= Coef[k]*InOut[-k];
+
+
+
+Andersen, et al.              Experimental                    [Page 125]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           }
+           InOut++;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  all-zero filter
+    *---------------------------------------------------------------*/
+
+   void AllZeroFilter(
+       float *In,      /* (i) In[0] to In[lengthInOut-1] contain
+                              filter input samples */
+       float *Coef,/* (i) filter coefficients (Coef[0] is assumed
+                              to be 1.0) */
+       int lengthInOut,/* (i) number of input/output samples */
+       int orderCoef,  /* (i) number of filter coefficients */
+       float *Out      /* (i/o) on entrance Out[-orderCoef] to Out[-1]
+                              contain the filter state, on exit Out[0]
+                              to Out[lengthInOut-1] contain filtered
+                              samples */
+   ){
+       int n,k;
+
+       for(n=0;n<lengthInOut;n++){
+           *Out = Coef[0]*In[0];
+           for(k=1;k<=orderCoef;k++){
+               *Out += Coef[k]*In[-k];
+           }
+           Out++;
+           In++;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  pole-zero filter
+    *---------------------------------------------------------------*/
+
+   void ZeroPoleFilter(
+       float *In,      /* (i) In[0] to In[lengthInOut-1] contain
+                              filter input samples In[-orderCoef] to
+                              In[-1] contain state of all-zero
+                              section */
+       float *ZeroCoef,/* (i) filter coefficients for all-zero
+                              section (ZeroCoef[0] is assumed to
+                              be 1.0) */
+       float *PoleCoef,/* (i) filter coefficients for all-pole section
+                              (ZeroCoef[0] is assumed to be 1.0) */
+       int lengthInOut,/* (i) number of input/output samples */
+
+
+
+Andersen, et al.              Experimental                    [Page 126]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       int orderCoef,  /* (i) number of filter coefficients */
+       float *Out      /* (i/o) on entrance Out[-orderCoef] to Out[-1]
+                              contain state of all-pole section. On
+                              exit Out[0] to Out[lengthInOut-1]
+                              contain filtered samples */
+   ){
+       AllZeroFilter(In,ZeroCoef,lengthInOut,orderCoef,Out);
+       AllPoleFilter(Out,PoleCoef,lengthInOut,orderCoef);
+   }
+
+   /*----------------------------------------------------------------*
+    * downsample (LP filter and decimation)
+    *---------------------------------------------------------------*/
+
+   void DownSample (
+       float  *In,     /* (i) input samples */
+       float  *Coef,   /* (i) filter coefficients */
+       int lengthIn,   /* (i) number of input samples */
+       float  *state,  /* (i) filter state */
+       float  *Out     /* (o) downsampled output */
+   ){
+       float   o;
+       float *Out_ptr = Out;
+       float *Coef_ptr, *In_ptr;
+       float *state_ptr;
+       int i, j, stop;
+
+       /* LP filter and decimate at the same time */
+
+       for (i = DELAY_DS; i < lengthIn; i+=FACTOR_DS)
+       {
+           Coef_ptr = &Coef[0];
+           In_ptr = &In[i];
+           state_ptr = &state[FILTERORDER_DS-2];
+
+           o = (float)0.0;
+
+           stop = (i < FILTERORDER_DS) ? i + 1 : FILTERORDER_DS;
+
+           for (j = 0; j < stop; j++)
+           {
+               o += *Coef_ptr++ * (*In_ptr--);
+           }
+           for (j = i + 1; j < FILTERORDER_DS; j++)
+           {
+               o += *Coef_ptr++ * (*state_ptr--);
+           }
+
+
+
+
+Andersen, et al.              Experimental                    [Page 127]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           *Out_ptr++ = o;
+       }
+
+       /* Get the last part (use zeros as input for the future) */
+
+       for (i=(lengthIn+FACTOR_DS); i<(lengthIn+DELAY_DS);
+               i+=FACTOR_DS) {
+
+           o=(float)0.0;
+
+           if (i<lengthIn) {
+               Coef_ptr = &Coef[0];
+               In_ptr = &In[i];
+               for (j=0; j<FILTERORDER_DS; j++) {
+                       o += *Coef_ptr++ * (*Out_ptr--);
+               }
+           } else {
+               Coef_ptr = &Coef[i-lengthIn];
+               In_ptr = &In[lengthIn-1];
+               for (j=0; j<FILTERORDER_DS-(i-lengthIn); j++) {
+                       o += *Coef_ptr++ * (*In_ptr--);
+               }
+           }
+           *Out_ptr++ = o;
+       }
+   }
+
+A.19.  FrameClassify.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       FrameClassify.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_FRAMECLASSIFY_H
+   #define __iLBC_FRAMECLASSIFY_H
+
+   int FrameClassify(      /* index to the max-energy sub-frame */
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i/o) the encoder state structure */
+       float *residual     /* (i) lpc residual signal */
+   );
+
+
+
+Andersen, et al.              Experimental                    [Page 128]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #endif
+
+A.20.  FrameClassify.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       FrameClassify.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "iLBC_define.h"
+
+   /*---------------------------------------------------------------*
+    *  Classification of subframes to localize start state
+    *--------------------------------------------------------------*/
+
+   int FrameClassify(      /* index to the max-energy sub-frame */
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i/o) the encoder state structure */
+       float *residual     /* (i) lpc residual signal */
+   ) {
+       float max_ssqEn, fssqEn[NSUB_MAX], bssqEn[NSUB_MAX], *pp;
+       int n, l, max_ssqEn_n;
+       const float ssqEn_win[NSUB_MAX-1]={(float)0.8,(float)0.9,
+           (float)1.0,(float)0.9,(float)0.8};
+       const float sampEn_win[5]={(float)1.0/(float)6.0,
+           (float)2.0/(float)6.0, (float)3.0/(float)6.0,
+           (float)4.0/(float)6.0, (float)5.0/(float)6.0};
+
+       /* init the front and back energies to zero */
+
+       memset(fssqEn, 0, NSUB_MAX*sizeof(float));
+       memset(bssqEn, 0, NSUB_MAX*sizeof(float));
+
+       /* Calculate front of first seqence */
+
+       n=0;
+       pp=residual;
+       for (l=0; l<5; l++) {
+           fssqEn[n] += sampEn_win[l] * (*pp) * (*pp);
+           pp++;
+       }
+       for (l=5; l<SUBL; l++) {
+
+
+
+Andersen, et al.              Experimental                    [Page 129]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           fssqEn[n] += (*pp) * (*pp);
+           pp++;
+       }
+
+       /* Calculate front and back of all middle sequences */
+
+       for (n=1; n<iLBCenc_inst->nsub-1; n++) {
+           pp=residual+n*SUBL;
+           for (l=0; l<5; l++) {
+               fssqEn[n] += sampEn_win[l] * (*pp) * (*pp);
+               bssqEn[n] += (*pp) * (*pp);
+               pp++;
+           }
+           for (l=5; l<SUBL-5; l++) {
+               fssqEn[n] += (*pp) * (*pp);
+               bssqEn[n] += (*pp) * (*pp);
+               pp++;
+           }
+           for (l=SUBL-5; l<SUBL; l++) {
+               fssqEn[n] += (*pp) * (*pp);
+               bssqEn[n] += sampEn_win[SUBL-l-1] * (*pp) * (*pp);
+               pp++;
+           }
+       }
+
+       /* Calculate back of last seqence */
+
+       n=iLBCenc_inst->nsub-1;
+       pp=residual+n*SUBL;
+       for (l=0; l<SUBL-5; l++) {
+           bssqEn[n] += (*pp) * (*pp);
+           pp++;
+       }
+       for (l=SUBL-5; l<SUBL; l++) {
+           bssqEn[n] += sampEn_win[SUBL-l-1] * (*pp) * (*pp);
+           pp++;
+       }
+
+       /* find the index to the weighted 80 sample with
+          most energy */
+
+       if (iLBCenc_inst->mode==20) l=1;
+       else                        l=0;
+
+       max_ssqEn=(fssqEn[0]+bssqEn[1])*ssqEn_win[l];
+       max_ssqEn_n=1;
+       for (n=2; n<iLBCenc_inst->nsub; n++) {
+
+
+
+
+Andersen, et al.              Experimental                    [Page 130]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           l++;
+           if ((fssqEn[n-1]+bssqEn[n])*ssqEn_win[l] > max_ssqEn) {
+               max_ssqEn=(fssqEn[n-1]+bssqEn[n]) *
+                               ssqEn_win[l];
+               max_ssqEn_n=n;
+           }
+       }
+
+       return max_ssqEn_n;
+   }
+
+A.21.  gainquant.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       gainquant.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_GAINQUANT_H
+   #define __iLBC_GAINQUANT_H
+
+   float gainquant(/* (o) quantized gain value */
+       float in,       /* (i) gain value */
+       float maxIn,/* (i) maximum of gain value */
+       int cblen,      /* (i) number of quantization indices */
+       int *index      /* (o) quantization index */
+   );
+
+   float gaindequant(  /* (o) quantized gain value */
+       int index,      /* (i) quantization index */
+       float maxIn,/* (i) maximum of unquantized gain */
+       int cblen       /* (i) number of quantization indices */
+   );
+
+   #endif
+
+A.22.  gainquant.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+
+
+
+Andersen, et al.              Experimental                    [Page 131]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       gainquant.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <string.h>
+   #include <math.h>
+   #include "constants.h"
+   #include "filter.h"
+
+   /*----------------------------------------------------------------*
+    *  quantizer for the gain in the gain-shape coding of residual
+    *---------------------------------------------------------------*/
+
+   float gainquant(/* (o) quantized gain value */
+       float in,       /* (i) gain value */
+       float maxIn,/* (i) maximum of gain value */
+       int cblen,      /* (i) number of quantization indices */
+       int *index      /* (o) quantization index */
+   ){
+       int i, tindex;
+       float minmeasure,measure, *cb, scale;
+
+       /* ensure a lower bound on the scaling factor */
+
+       scale=maxIn;
+
+       if (scale<0.1) {
+           scale=(float)0.1;
+       }
+
+       /* select the quantization table */
+
+       if (cblen == 8) {
+           cb = gain_sq3Tbl;
+       } else if (cblen == 16) {
+           cb = gain_sq4Tbl;
+       } else  {
+           cb = gain_sq5Tbl;
+       }
+
+       /* select the best index in the quantization table */
+
+       minmeasure=10000000.0;
+       tindex=0;
+       for (i=0; i<cblen; i++) {
+
+
+
+Andersen, et al.              Experimental                    [Page 132]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           measure=(in-scale*cb[i])*(in-scale*cb[i]);
+
+           if (measure<minmeasure) {
+               tindex=i;
+               minmeasure=measure;
+           }
+       }
+       *index=tindex;
+
+       /* return the quantized value */
+
+       return scale*cb[tindex];
+   }
+
+   /*----------------------------------------------------------------*
+    *  decoder for quantized gains in the gain-shape coding of
+    *  residual
+    *---------------------------------------------------------------*/
+
+   float gaindequant(  /* (o) quantized gain value */
+       int index,      /* (i) quantization index */
+       float maxIn,/* (i) maximum of unquantized gain */
+       int cblen       /* (i) number of quantization indices */
+   ){
+       float scale;
+
+       /* obtain correct scale factor */
+
+       scale=(float)fabs(maxIn);
+
+       if (scale<0.1) {
+           scale=(float)0.1;
+       }
+
+       /* select the quantization table and return the decoded value */
+
+       if (cblen==8) {
+           return scale*gain_sq3Tbl[index];
+       } else if (cblen==16) {
+           return scale*gain_sq4Tbl[index];
+       }
+       else if (cblen==32) {
+           return scale*gain_sq5Tbl[index];
+       }
+
+       return 0.0;
+   }
+
+
+
+
+Andersen, et al.              Experimental                    [Page 133]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.23.  getCBvec.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       getCBvec.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_GETCBVEC_H
+   #define __iLBC_GETCBVEC_H
+
+   void getCBvec(
+       float *cbvec,   /* (o) Constructed codebook vector */
+       float *mem,     /* (i) Codebook buffer */
+       int index,      /* (i) Codebook index */
+       int lMem,       /* (i) Length of codebook buffer */
+       int cbveclen/* (i) Codebook vector length */
+   );
+
+   #endif
+
+A.24.  getCBvec.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       getCBvec.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include <string.h>
+
+   /*----------------------------------------------------------------*
+    *  Construct codebook vector for given index.
+    *---------------------------------------------------------------*/
+
+   void getCBvec(
+
+
+
+Andersen, et al.              Experimental                    [Page 134]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *cbvec,   /* (o) Constructed codebook vector */
+       float *mem,     /* (i) Codebook buffer */
+       int index,      /* (i) Codebook index */
+       int lMem,       /* (i) Length of codebook buffer */
+       int cbveclen/* (i) Codebook vector length */
+   ){
+       int j, k, n, memInd, sFilt;
+       float tmpbuf[CB_MEML];
+       int base_size;
+       int ilow, ihigh;
+       float alfa, alfa1;
+
+       /* Determine size of codebook sections */
+
+       base_size=lMem-cbveclen+1;
+
+       if (cbveclen==SUBL) {
+           base_size+=cbveclen/2;
+       }
+
+       /* No filter -> First codebook section */
+
+       if (index<lMem-cbveclen+1) {
+
+           /* first non-interpolated vectors */
+
+           k=index+cbveclen;
+           /* get vector */
+           memcpy(cbvec, mem+lMem-k, cbveclen*sizeof(float));
+
+       } else if (index < base_size) {
+
+           k=2*(index-(lMem-cbveclen+1))+cbveclen;
+
+           ihigh=k/2;
+           ilow=ihigh-5;
+
+           /* Copy first noninterpolated part */
+
+           memcpy(cbvec, mem+lMem-k/2, ilow*sizeof(float));
+
+           /* interpolation */
+
+           alfa1=(float)0.2;
+           alfa=0.0;
+           for (j=ilow; j<ihigh; j++) {
+               cbvec[j]=((float)1.0-alfa)*mem[lMem-k/2+j]+
+                   alfa*mem[lMem-k+j];
+
+
+
+Andersen, et al.              Experimental                    [Page 135]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               alfa+=alfa1;
+           }
+
+           /* Copy second noninterpolated part */
+
+           memcpy(cbvec+ihigh, mem+lMem-k+ihigh,
+               (cbveclen-ihigh)*sizeof(float));
+
+       }
+
+       /* Higher codebook section based on filtering */
+
+       else {
+
+           /* first non-interpolated vectors */
+
+           if (index-base_size<lMem-cbveclen+1) {
+               float tempbuff2[CB_MEML+CB_FILTERLEN+1];
+               float *pos;
+               float *pp, *pp1;
+
+               memset(tempbuff2, 0,
+                   CB_HALFFILTERLEN*sizeof(float));
+               memcpy(&tempbuff2[CB_HALFFILTERLEN], mem,
+                   lMem*sizeof(float));
+               memset(&tempbuff2[lMem+CB_HALFFILTERLEN], 0,
+                   (CB_HALFFILTERLEN+1)*sizeof(float));
+
+               k=index-base_size+cbveclen;
+               sFilt=lMem-k;
+               memInd=sFilt+1-CB_HALFFILTERLEN;
+
+               /* do filtering */
+               pos=cbvec;
+               memset(pos, 0, cbveclen*sizeof(float));
+               for (n=0; n<cbveclen; n++) {
+                   pp=&tempbuff2[memInd+n+CB_HALFFILTERLEN];
+                   pp1=&cbfiltersTbl[CB_FILTERLEN-1];
+                   for (j=0; j<CB_FILTERLEN; j++) {
+                       (*pos)+=(*pp++)*(*pp1--);
+                   }
+                   pos++;
+               }
+           }
+
+           /* interpolated vectors */
+
+           else {
+
+
+
+Andersen, et al.              Experimental                    [Page 136]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               float tempbuff2[CB_MEML+CB_FILTERLEN+1];
+
+               float *pos;
+               float *pp, *pp1;
+               int i;
+
+               memset(tempbuff2, 0,
+                   CB_HALFFILTERLEN*sizeof(float));
+               memcpy(&tempbuff2[CB_HALFFILTERLEN], mem,
+                   lMem*sizeof(float));
+               memset(&tempbuff2[lMem+CB_HALFFILTERLEN], 0,
+                   (CB_HALFFILTERLEN+1)*sizeof(float));
+
+               k=2*(index-base_size-
+                   (lMem-cbveclen+1))+cbveclen;
+               sFilt=lMem-k;
+               memInd=sFilt+1-CB_HALFFILTERLEN;
+
+               /* do filtering */
+               pos=&tmpbuf[sFilt];
+               memset(pos, 0, k*sizeof(float));
+               for (i=0; i<k; i++) {
+                   pp=&tempbuff2[memInd+i+CB_HALFFILTERLEN];
+                   pp1=&cbfiltersTbl[CB_FILTERLEN-1];
+                   for (j=0; j<CB_FILTERLEN; j++) {
+                       (*pos)+=(*pp++)*(*pp1--);
+                   }
+                   pos++;
+               }
+
+               ihigh=k/2;
+               ilow=ihigh-5;
+
+               /* Copy first noninterpolated part */
+
+               memcpy(cbvec, tmpbuf+lMem-k/2,
+                   ilow*sizeof(float));
+
+               /* interpolation */
+
+               alfa1=(float)0.2;
+               alfa=0.0;
+               for (j=ilow; j<ihigh; j++) {
+                   cbvec[j]=((float)1.0-alfa)*
+                       tmpbuf[lMem-k/2+j]+alfa*tmpbuf[lMem-k+j];
+                   alfa+=alfa1;
+               }
+
+
+
+
+Andersen, et al.              Experimental                    [Page 137]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               /* Copy second noninterpolated part */
+
+               memcpy(cbvec+ihigh, tmpbuf+lMem-k+ihigh,
+                   (cbveclen-ihigh)*sizeof(float));
+           }
+       }
+   }
+
+A.25.  helpfun.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       helpfun.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_HELPFUN_H
+   #define __iLBC_HELPFUN_H
+
+   void autocorr(
+       float *r,       /* (o) autocorrelation vector */
+       const float *x, /* (i) data vector */
+       int N,          /* (i) length of data vector */
+       int order       /* largest lag for calculated
+                          autocorrelations */
+   );
+
+   void window(
+       float *z,       /* (o) the windowed data */
+       const float *x, /* (i) the original data vector */
+       const float *y, /* (i) the window */
+       int N           /* (i) length of all vectors */
+   );
+
+   void levdurb(
+       float *a,       /* (o) lpc coefficient vector starting
+                              with 1.0 */
+       float *k,       /* (o) reflection coefficients */
+       float *r,       /* (i) autocorrelation vector */
+       int order       /* (i) order of lpc filter */
+   );
+
+   void interpolate(
+
+
+
+Andersen, et al.              Experimental                    [Page 138]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *out,     /* (o) the interpolated vector */
+       float *in1,     /* (i) the first vector for the
+                              interpolation */
+       float *in2,     /* (i) the second vector for the
+                              interpolation */
+       float coef,     /* (i) interpolation weights */
+       int length      /* (i) length of all vectors */
+   );
+
+   void bwexpand(
+       float *out,     /* (o) the bandwidth expanded lpc
+                              coefficients */
+       float *in,      /* (i) the lpc coefficients before bandwidth
+                              expansion */
+       float coef,     /* (i) the bandwidth expansion factor */
+       int length      /* (i) the length of lpc coefficient vectors */
+   );
+
+   void vq(
+       float *Xq,      /* (o) the quantized vector */
+       int *index,     /* (o) the quantization index */
+       const float *CB,/* (i) the vector quantization codebook */
+       float *X,       /* (i) the vector to quantize */
+       int n_cb,       /* (i) the number of vectors in the codebook */
+       int dim         /* (i) the dimension of all vectors */
+   );
+
+   void SplitVQ(
+       float *qX,      /* (o) the quantized vector */
+       int *index,     /* (o) a vector of indexes for all vector
+                              codebooks in the split */
+       float *X,       /* (i) the vector to quantize */
+       const float *CB,/* (i) the quantizer codebook */
+       int nsplit,     /* the number of vector splits */
+       const int *dim, /* the dimension of X and qX */
+       const int *cbsize /* the number of vectors in the codebook */
+   );
+
+
+   void sort_sq(
+       float *xq,      /* (o) the quantized value */
+       int *index,     /* (o) the quantization index */
+       float x,    /* (i) the value to quantize */
+       const float *cb,/* (i) the quantization codebook */
+       int cb_size     /* (i) the size of the quantization codebook */
+   );
+
+   int LSF_check(      /* (o) 1 for stable lsf vectors and 0 for
+
+
+
+Andersen, et al.              Experimental                    [Page 139]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                              nonstable ones */
+       float *lsf,     /* (i) a table of lsf vectors */
+       int dim,    /* (i) the dimension of each lsf vector */
+       int NoAn    /* (i) the number of lsf vectors in the
+                              table */
+   );
+
+   #endif
+
+A.26.  helpfun.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       helpfun.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+
+   /*----------------------------------------------------------------*
+    *  calculation of auto correlation
+    *---------------------------------------------------------------*/
+
+   void autocorr(
+       float *r,       /* (o) autocorrelation vector */
+       const float *x, /* (i) data vector */
+       int N,          /* (i) length of data vector */
+       int order       /* largest lag for calculated
+                          autocorrelations */
+   ){
+       int     lag, n;
+       float   sum;
+
+       for (lag = 0; lag <= order; lag++) {
+           sum = 0;
+           for (n = 0; n < N - lag; n++) {
+               sum += x[n] * x[n+lag];
+           }
+           r[lag] = sum;
+       }
+
+
+
+Andersen, et al.              Experimental                    [Page 140]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   }
+
+   /*----------------------------------------------------------------*
+    *  window multiplication
+    *---------------------------------------------------------------*/
+
+   void window(
+       float *z,       /* (o) the windowed data */
+       const float *x, /* (i) the original data vector */
+       const float *y, /* (i) the window */
+       int N           /* (i) length of all vectors */
+   ){
+       int     i;
+
+       for (i = 0; i < N; i++) {
+           z[i] = x[i] * y[i];
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  levinson-durbin solution for lpc coefficients
+    *---------------------------------------------------------------*/
+
+   void levdurb(
+       float *a,       /* (o) lpc coefficient vector starting
+                              with 1.0 */
+       float *k,       /* (o) reflection coefficients */
+       float *r,       /* (i) autocorrelation vector */
+       int order       /* (i) order of lpc filter */
+   ){
+       float  sum, alpha;
+       int     m, m_h, i;
+
+       a[0] = 1.0;
+
+       if (r[0] < EPS) { /* if r[0] <= 0, set LPC coeff. to zero */
+           for (i = 0; i < order; i++) {
+               k[i] = 0;
+               a[i+1] = 0;
+           }
+       } else {
+           a[1] = k[0] = -r[1]/r[0];
+           alpha = r[0] + r[1] * k[0];
+           for (m = 1; m < order; m++){
+               sum = r[m + 1];
+               for (i = 0; i < m; i++){
+                   sum += a[i+1] * r[m - i];
+               }
+
+
+
+Andersen, et al.              Experimental                    [Page 141]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               k[m] = -sum / alpha;
+               alpha += k[m] * sum;
+               m_h = (m + 1) >> 1;
+               for (i = 0; i < m_h; i++){
+                   sum = a[i+1] + k[m] * a[m - i];
+                   a[m - i] += k[m] * a[i+1];
+                   a[i+1] = sum;
+               }
+               a[m+1] = k[m];
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  interpolation between vectors
+    *---------------------------------------------------------------*/
+
+   void interpolate(
+       float *out,      /* (o) the interpolated vector */
+       float *in1,     /* (i) the first vector for the
+                              interpolation */
+       float *in2,     /* (i) the second vector for the
+                              interpolation */
+       float coef,      /* (i) interpolation weights */
+       int length      /* (i) length of all vectors */
+   ){
+       int i;
+       float invcoef;
+
+       invcoef = (float)1.0 - coef;
+       for (i = 0; i < length; i++) {
+           out[i] = coef * in1[i] + invcoef * in2[i];
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  lpc bandwidth expansion
+    *---------------------------------------------------------------*/
+
+   void bwexpand(
+       float *out,      /* (o) the bandwidth expanded lpc
+                              coefficients */
+       float *in,      /* (i) the lpc coefficients before bandwidth
+                              expansion */
+       float coef,     /* (i) the bandwidth expansion factor */
+       int length      /* (i) the length of lpc coefficient vectors */
+   ){
+       int i;
+
+
+
+Andersen, et al.              Experimental                    [Page 142]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float  chirp;
+
+       chirp = coef;
+
+       out[0] = in[0];
+       for (i = 1; i < length; i++) {
+           out[i] = chirp * in[i];
+           chirp *= coef;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  vector quantization
+    *---------------------------------------------------------------*/
+
+   void vq(
+       float *Xq,      /* (o) the quantized vector */
+       int *index,     /* (o) the quantization index */
+       const float *CB,/* (i) the vector quantization codebook */
+       float *X,       /* (i) the vector to quantize */
+       int n_cb,       /* (i) the number of vectors in the codebook */
+       int dim         /* (i) the dimension of all vectors */
+   ){
+       int     i, j;
+       int     pos, minindex;
+       float   dist, tmp, mindist;
+
+       pos = 0;
+       mindist = FLOAT_MAX;
+       minindex = 0;
+       for (j = 0; j < n_cb; j++) {
+           dist = X[0] - CB[pos];
+           dist *= dist;
+           for (i = 1; i < dim; i++) {
+               tmp = X[i] - CB[pos + i];
+               dist += tmp*tmp;
+           }
+
+           if (dist < mindist) {
+               mindist = dist;
+               minindex = j;
+           }
+           pos += dim;
+       }
+       for (i = 0; i < dim; i++) {
+           Xq[i] = CB[minindex*dim + i];
+       }
+       *index = minindex;
+
+
+
+Andersen, et al.              Experimental                    [Page 143]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   }
+
+   /*----------------------------------------------------------------*
+    *  split vector quantization
+    *---------------------------------------------------------------*/
+
+   void SplitVQ(
+       float *qX,      /* (o) the quantized vector */
+       int *index,     /* (o) a vector of indexes for all vector
+                              codebooks in the split */
+       float *X,       /* (i) the vector to quantize */
+       const float *CB,/* (i) the quantizer codebook */
+       int nsplit,     /* the number of vector splits */
+       const int *dim, /* the dimension of X and qX */
+       const int *cbsize /* the number of vectors in the codebook */
+   ){
+       int    cb_pos, X_pos, i;
+
+       cb_pos = 0;
+       X_pos= 0;
+       for (i = 0; i < nsplit; i++) {
+           vq(qX + X_pos, index + i, CB + cb_pos, X + X_pos,
+               cbsize[i], dim[i]);
+           X_pos += dim[i];
+           cb_pos += dim[i] * cbsize[i];
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  scalar quantization
+    *---------------------------------------------------------------*/
+
+   void sort_sq(
+       float *xq,      /* (o) the quantized value */
+       int *index,     /* (o) the quantization index */
+       float x,    /* (i) the value to quantize */
+       const float *cb,/* (i) the quantization codebook */
+       int cb_size      /* (i) the size of the quantization codebook */
+   ){
+       int i;
+
+       if (x <= cb[0]) {
+           *index = 0;
+           *xq = cb[0];
+       } else {
+           i = 0;
+           while ((x > cb[i]) && i < cb_size - 1) {
+               i++;
+
+
+
+Andersen, et al.              Experimental                    [Page 144]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           }
+
+           if (x > ((cb[i] + cb[i - 1])/2)) {
+               *index = i;
+               *xq = cb[i];
+           } else {
+               *index = i - 1;
+               *xq = cb[i - 1];
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  check for stability of lsf coefficients
+    *---------------------------------------------------------------*/
+
+   int LSF_check(    /* (o) 1 for stable lsf vectors and 0 for
+                              nonstable ones */
+       float *lsf,     /* (i) a table of lsf vectors */
+       int dim,    /* (i) the dimension of each lsf vector */
+       int NoAn    /* (i) the number of lsf vectors in the
+                              table */
+   ){
+       int k,n,m, Nit=2, change=0,pos;
+       float tmp;
+       static float eps=(float)0.039; /* 50 Hz */
+       static float eps2=(float)0.0195;
+       static float maxlsf=(float)3.14; /* 4000 Hz */
+       static float minlsf=(float)0.01; /* 0 Hz */
+
+       /* LSF separation check*/
+
+       for (n=0; n<Nit; n++) { /* Run through a couple of times */
+           for (m=0; m<NoAn; m++) { /* Number of analyses per frame */
+               for (k=0; k<(dim-1); k++) {
+                   pos=m*dim+k;
+
+                   if ((lsf[pos+1]-lsf[pos])<eps) {
+
+                       if (lsf[pos+1]<lsf[pos]) {
+                           tmp=lsf[pos+1];
+                           lsf[pos+1]= lsf[pos]+eps2;
+                           lsf[pos]= lsf[pos+1]-eps2;
+                       } else {
+                           lsf[pos]-=eps2;
+                           lsf[pos+1]+=eps2;
+                       }
+                       change=1;
+
+
+
+Andersen, et al.              Experimental                    [Page 145]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   }
+
+                   if (lsf[pos]<minlsf) {
+                       lsf[pos]=minlsf;
+                       change=1;
+                   }
+
+                   if (lsf[pos]>maxlsf) {
+                       lsf[pos]=maxlsf;
+                       change=1;
+                   }
+               }
+           }
+       }
+
+       return change;
+   }
+
+A.27.  hpInput.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       hpInput.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_HPINPUT_H
+   #define __iLBC_HPINPUT_H
+
+   void hpInput(
+       float *In,  /* (i) vector to filter */
+       int len,    /* (i) length of vector to filter */
+       float *Out, /* (o) the resulting filtered vector */
+       float *mem  /* (i/o) the filter state */
+   );
+
+   #endif
+
+A.28.  hpInput.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+
+
+Andersen, et al.              Experimental                    [Page 146]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       hpInput.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "constants.h"
+
+   /*----------------------------------------------------------------*
+    *  Input high-pass filter
+    *---------------------------------------------------------------*/
+
+   void hpInput(
+       float *In,  /* (i) vector to filter */
+       int len,    /* (i) length of vector to filter */
+       float *Out, /* (o) the resulting filtered vector */
+       float *mem  /* (i/o) the filter state */
+   ){
+       int i;
+       float *pi, *po;
+
+       /* all-zero section*/
+
+       pi = &In[0];
+       po = &Out[0];
+       for (i=0; i<len; i++) {
+           *po = hpi_zero_coefsTbl[0] * (*pi);
+           *po += hpi_zero_coefsTbl[1] * mem[0];
+           *po += hpi_zero_coefsTbl[2] * mem[1];
+
+           mem[1] = mem[0];
+           mem[0] = *pi;
+           po++;
+           pi++;
+
+       }
+
+       /* all-pole section*/
+
+       po = &Out[0];
+       for (i=0; i<len; i++) {
+           *po -= hpi_pole_coefsTbl[1] * mem[2];
+           *po -= hpi_pole_coefsTbl[2] * mem[3];
+
+           mem[3] = mem[2];
+           mem[2] = *po;
+           po++;
+
+
+
+Andersen, et al.              Experimental                    [Page 147]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       }
+   }
+
+A.29.  hpOutput.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       hpOutput.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_HPOUTPUT_H
+   #define __iLBC_HPOUTPUT_H
+
+   void hpOutput(
+       float *In,  /* (i) vector to filter */
+       int len,/* (i) length of vector to filter */
+       float *Out, /* (o) the resulting filtered vector */
+       float *mem  /* (i/o) the filter state */
+   );
+
+   #endif
+
+A.30.  hpOutput.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       hpOutput.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "constants.h"
+
+   /*----------------------------------------------------------------*
+    *  Output high-pass filter
+    *---------------------------------------------------------------*/
+
+   void hpOutput(
+
+
+
+Andersen, et al.              Experimental                    [Page 148]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *In,  /* (i) vector to filter */
+       int len,/* (i) length of vector to filter */
+       float *Out, /* (o) the resulting filtered vector */
+       float *mem  /* (i/o) the filter state */
+   ){
+       int i;
+       float *pi, *po;
+
+       /* all-zero section*/
+
+       pi = &In[0];
+       po = &Out[0];
+       for (i=0; i<len; i++) {
+           *po = hpo_zero_coefsTbl[0] * (*pi);
+           *po += hpo_zero_coefsTbl[1] * mem[0];
+           *po += hpo_zero_coefsTbl[2] * mem[1];
+
+           mem[1] = mem[0];
+           mem[0] = *pi;
+           po++;
+           pi++;
+
+       }
+
+       /* all-pole section*/
+
+       po = &Out[0];
+       for (i=0; i<len; i++) {
+           *po -= hpo_pole_coefsTbl[1] * mem[2];
+           *po -= hpo_pole_coefsTbl[2] * mem[3];
+
+           mem[3] = mem[2];
+           mem[2] = *po;
+           po++;
+       }
+   }
+
+A.31.  iCBConstruct.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iCBConstruct.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+
+
+
+Andersen, et al.              Experimental                    [Page 149]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   ******************************************************************/
+
+   #ifndef __iLBC_ICBCONSTRUCT_H
+   #define __iLBC_ICBCONSTRUCT_H
+
+   void index_conv_enc(
+       int *index          /* (i/o) Codebook indexes */
+   );
+
+   void index_conv_dec(
+       int *index          /* (i/o) Codebook indexes */
+   );
+
+   void iCBConstruct(
+       float *decvector,   /* (o) Decoded vector */
+       int *index,         /* (i) Codebook indices */
+       int *gain_index,/* (i) Gain quantization indices */
+       float *mem,         /* (i) Buffer for codevector construction */
+       int lMem,           /* (i) Length of buffer */
+       int veclen,         /* (i) Length of vector */
+       int nStages         /* (i) Number of codebook stages */
+   );
+
+   #endif
+
+A.32.  iCBConstruct.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iCBConstruct.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+
+   #include "iLBC_define.h"
+   #include "gainquant.h"
+   #include "getCBvec.h"
+
+   /*----------------------------------------------------------------*
+    *  Convert the codebook indexes to make the search easier
+    *---------------------------------------------------------------*/
+
+
+
+
+Andersen, et al.              Experimental                    [Page 150]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void index_conv_enc(
+       int *index          /* (i/o) Codebook indexes */
+   ){
+       int k;
+
+       for (k=1; k<CB_NSTAGES; k++) {
+
+           if ((index[k]>=108)&&(index[k]<172)) {
+               index[k]-=64;
+           } else if (index[k]>=236) {
+               index[k]-=128;
+           } else {
+               /* ERROR */
+           }
+       }
+   }
+
+   void index_conv_dec(
+       int *index          /* (i/o) Codebook indexes */
+   ){
+       int k;
+
+       for (k=1; k<CB_NSTAGES; k++) {
+
+           if ((index[k]>=44)&&(index[k]<108)) {
+               index[k]+=64;
+           } else if ((index[k]>=108)&&(index[k]<128)) {
+               index[k]+=128;
+           } else {
+               /* ERROR */
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  Construct decoded vector from codebook and gains.
+    *---------------------------------------------------------------*/
+
+   void iCBConstruct(
+       float *decvector,   /* (o) Decoded vector */
+       int *index,         /* (i) Codebook indices */
+       int *gain_index,/* (i) Gain quantization indices */
+       float *mem,         /* (i) Buffer for codevector construction */
+       int lMem,           /* (i) Length of buffer */
+       int veclen,         /* (i) Length of vector */
+       int nStages         /* (i) Number of codebook stages */
+   ){
+       int j,k;
+
+
+
+Andersen, et al.              Experimental                    [Page 151]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float gain[CB_NSTAGES];
+       float cbvec[SUBL];
+
+       /* gain de-quantization */
+
+       gain[0] = gaindequant(gain_index[0], 1.0, 32);
+       if (nStages > 1) {
+           gain[1] = gaindequant(gain_index[1],
+               (float)fabs(gain[0]), 16);
+       }
+       if (nStages > 2) {
+           gain[2] = gaindequant(gain_index[2],
+               (float)fabs(gain[1]), 8);
+       }
+
+       /* codebook vector construction and construction of
+       total vector */
+
+       getCBvec(cbvec, mem, index[0], lMem, veclen);
+       for (j=0;j<veclen;j++){
+           decvector[j] = gain[0]*cbvec[j];
+       }
+       if (nStages > 1) {
+           for (k=1; k<nStages; k++) {
+               getCBvec(cbvec, mem, index[k], lMem, veclen);
+               for (j=0;j<veclen;j++) {
+                   decvector[j] += gain[k]*cbvec[j];
+               }
+           }
+       }
+   }
+
+A.33.  iCBSearch.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iCBSearch.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_ICBSEARCH_H
+   #define __iLBC_ICBSEARCH_H
+
+
+
+
+Andersen, et al.              Experimental                    [Page 152]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void iCBSearch(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) the encoder state structure */
+       int *index,         /* (o) Codebook indices */
+       int *gain_index,/* (o) Gain quantization indices */
+       float *intarget,/* (i) Target vector for encoding */
+       float *mem,         /* (i) Buffer for codebook construction */
+       int lMem,           /* (i) Length of buffer */
+       int lTarget,    /* (i) Length of vector */
+       int nStages,    /* (i) Number of codebook stages */
+       float *weightDenum, /* (i) weighting filter coefficients */
+       float *weightState, /* (i) weighting filter state */
+       int block           /* (i) the sub-block number */
+   );
+
+   #endif
+
+A.34.  iCBSearch.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       iCBSearch.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+
+   #include "iLBC_define.h"
+   #include "gainquant.h"
+   #include "createCB.h"
+   #include "filter.h"
+   #include "constants.h"
+
+   /*----------------------------------------------------------------*
+    *  Search routine for codebook encoding and gain quantization.
+    *---------------------------------------------------------------*/
+
+   void iCBSearch(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) the encoder state structure */
+       int *index,         /* (o) Codebook indices */
+       int *gain_index,/* (o) Gain quantization indices */
+
+
+
+Andersen, et al.              Experimental                    [Page 153]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *intarget,/* (i) Target vector for encoding */
+       float *mem,         /* (i) Buffer for codebook construction */
+       int lMem,           /* (i) Length of buffer */
+       int lTarget,    /* (i) Length of vector */
+       int nStages,    /* (i) Number of codebook stages */
+       float *weightDenum, /* (i) weighting filter coefficients */
+       float *weightState, /* (i) weighting filter state */
+       int block           /* (i) the sub-block number */
+   ){
+       int i, j, icount, stage, best_index, range, counter;
+       float max_measure, gain, measure, crossDot, ftmp;
+       float gains[CB_NSTAGES];
+       float target[SUBL];
+       int base_index, sInd, eInd, base_size;
+       int sIndAug=0, eIndAug=0;
+       float buf[CB_MEML+SUBL+2*LPC_FILTERORDER];
+       float invenergy[CB_EXPAND*128], energy[CB_EXPAND*128];
+       float *pp, *ppi=0, *ppo=0, *ppe=0;
+       float cbvectors[CB_MEML];
+       float tene, cene, cvec[SUBL];
+       float aug_vec[SUBL];
+
+       memset(cvec,0,SUBL*sizeof(float));
+
+       /* Determine size of codebook sections */
+
+       base_size=lMem-lTarget+1;
+
+       if (lTarget==SUBL) {
+           base_size=lMem-lTarget+1+lTarget/2;
+       }
+
+       /* setup buffer for weighting */
+
+       memcpy(buf,weightState,sizeof(float)*LPC_FILTERORDER);
+       memcpy(buf+LPC_FILTERORDER,mem,lMem*sizeof(float));
+       memcpy(buf+LPC_FILTERORDER+lMem,intarget,lTarget*sizeof(float));
+
+       /* weighting */
+
+       AllPoleFilter(buf+LPC_FILTERORDER, weightDenum,
+           lMem+lTarget, LPC_FILTERORDER);
+
+       /* Construct the codebook and target needed */
+
+       memcpy(target, buf+LPC_FILTERORDER+lMem, lTarget*sizeof(float));
+
+       tene=0.0;
+
+
+
+Andersen, et al.              Experimental                    [Page 154]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       for (i=0; i<lTarget; i++) {
+           tene+=target[i]*target[i];
+       }
+
+       /* Prepare search over one more codebook section. This section
+          is created by filtering the original buffer with a filter. */
+
+       filteredCBvecs(cbvectors, buf+LPC_FILTERORDER, lMem);
+
+       /* The Main Loop over stages */
+
+       for (stage=0; stage<nStages; stage++) {
+
+           range = search_rangeTbl[block][stage];
+
+           /* initialize search measure */
+
+           max_measure = (float)-10000000.0;
+           gain = (float)0.0;
+           best_index = 0;
+
+           /* Compute cross dot product between the target
+              and the CB memory */
+
+           crossDot=0.0;
+           pp=buf+LPC_FILTERORDER+lMem-lTarget;
+           for (j=0; j<lTarget; j++) {
+               crossDot += target[j]*(*pp++);
+           }
+
+           if (stage==0) {
+
+               /* Calculate energy in the first block of
+                 'lTarget' samples. */
+               ppe = energy;
+               ppi = buf+LPC_FILTERORDER+lMem-lTarget-1;
+               ppo = buf+LPC_FILTERORDER+lMem-1;
+
+               *ppe=0.0;
+               pp=buf+LPC_FILTERORDER+lMem-lTarget;
+               for (j=0; j<lTarget; j++) {
+                   *ppe+=(*pp)*(*pp++);
+               }
+
+               if (*ppe>0.0) {
+                   invenergy[0] = (float) 1.0 / (*ppe + EPS);
+               } else {
+                   invenergy[0] = (float) 0.0;
+
+
+
+Andersen, et al.              Experimental                    [Page 155]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+               }
+               ppe++;
+
+               measure=(float)-10000000.0;
+
+               if (crossDot > 0.0) {
+                      measure = crossDot*crossDot*invenergy[0];
+               }
+           }
+           else {
+               measure = crossDot*crossDot*invenergy[0];
+           }
+
+           /* check if measure is better */
+           ftmp = crossDot*invenergy[0];
+
+           if ((measure>max_measure) && (fabs(ftmp)<CB_MAXGAIN)) {
+               best_index = 0;
+               max_measure = measure;
+               gain = ftmp;
+           }
+
+           /* loop over the main first codebook section,
+              full search */
+
+           for (icount=1; icount<range; icount++) {
+
+               /* calculate measure */
+
+               crossDot=0.0;
+               pp = buf+LPC_FILTERORDER+lMem-lTarget-icount;
+
+               for (j=0; j<lTarget; j++) {
+                   crossDot += target[j]*(*pp++);
+               }
+
+               if (stage==0) {
+                   *ppe++ = energy[icount-1] + (*ppi)*(*ppi) -
+                       (*ppo)*(*ppo);
+                   ppo--;
+                   ppi--;
+
+                   if (energy[icount]>0.0) {
+                       invenergy[icount] =
+                           (float)1.0/(energy[icount]+EPS);
+                   } else {
+                       invenergy[icount] = (float) 0.0;
+                   }
+
+
+
+Andersen, et al.              Experimental                    [Page 156]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   measure=(float)-10000000.0;
+
+                   if (crossDot > 0.0) {
+                       measure = crossDot*crossDot*invenergy[icount];
+                   }
+               }
+               else {
+                   measure = crossDot*crossDot*invenergy[icount];
+               }
+
+               /* check if measure is better */
+               ftmp = crossDot*invenergy[icount];
+
+               if ((measure>max_measure) && (fabs(ftmp)<CB_MAXGAIN)) {
+                   best_index = icount;
+                   max_measure = measure;
+                   gain = ftmp;
+               }
+           }
+
+           /* Loop over augmented part in the first codebook
+            * section, full search.
+            * The vectors are interpolated.
+            */
+
+           if (lTarget==SUBL) {
+
+               /* Search for best possible cb vector and
+                  compute the CB-vectors' energy. */
+               searchAugmentedCB(20, 39, stage, base_size-lTarget/2,
+                   target, buf+LPC_FILTERORDER+lMem,
+                   &max_measure, &best_index, &gain, energy,
+                   invenergy);
+           }
+
+           /* set search range for following codebook sections */
+
+           base_index=best_index;
+
+           /* unrestricted search */
+
+           if (CB_RESRANGE == -1) {
+               sInd=0;
+               eInd=range-1;
+               sIndAug=20;
+               eIndAug=39;
+           }
+
+
+
+
+Andersen, et al.              Experimental                    [Page 157]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           /* restricted search around best index from first
+           codebook section */
+
+           else {
+               /* Initialize search indices */
+               sIndAug=0;
+               eIndAug=0;
+               sInd=base_index-CB_RESRANGE/2;
+               eInd=sInd+CB_RESRANGE;
+
+               if (lTarget==SUBL) {
+
+                   if (sInd<0) {
+
+                       sIndAug = 40 + sInd;
+                       eIndAug = 39;
+                       sInd=0;
+
+                   } else if ( base_index < (base_size-20) ) {
+
+                       if (eInd > range) {
+                           sInd -= (eInd-range);
+                           eInd = range;
+                       }
+                   } else { /* base_index >= (base_size-20) */
+
+                       if (sInd < (base_size-20)) {
+                           sIndAug = 20;
+                           sInd = 0;
+                           eInd = 0;
+                           eIndAug = 19 + CB_RESRANGE;
+
+                           if(eIndAug > 39) {
+                               eInd = eIndAug-39;
+                               eIndAug = 39;
+                           }
+                       } else {
+                           sIndAug = 20 + sInd - (base_size-20);
+                           eIndAug = 39;
+                           sInd = 0;
+                           eInd = CB_RESRANGE - (eIndAug-sIndAug+1);
+                       }
+                   }
+
+               } else { /* lTarget = 22 or 23 */
+
+                   if (sInd < 0) {
+                       eInd -= sInd;
+
+
+
+Andersen, et al.              Experimental                    [Page 158]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                       sInd = 0;
+                   }
+
+                   if(eInd > range) {
+                       sInd -= (eInd - range);
+                       eInd = range;
+                   }
+               }
+           }
+
+           /* search of higher codebook section */
+
+           /* index search range */
+           counter = sInd;
+           sInd += base_size;
+           eInd += base_size;
+
+
+           if (stage==0) {
+               ppe = energy+base_size;
+               *ppe=0.0;
+
+               pp=cbvectors+lMem-lTarget;
+               for (j=0; j<lTarget; j++) {
+                   *ppe+=(*pp)*(*pp++);
+               }
+
+               ppi = cbvectors + lMem - 1 - lTarget;
+               ppo = cbvectors + lMem - 1;
+
+               for (j=0; j<(range-1); j++) {
+                   *(ppe+1) = *ppe + (*ppi)*(*ppi) - (*ppo)*(*ppo);
+                   ppo--;
+                   ppi--;
+                   ppe++;
+               }
+           }
+
+           /* loop over search range */
+
+           for (icount=sInd; icount<eInd; icount++) {
+
+               /* calculate measure */
+
+               crossDot=0.0;
+               pp=cbvectors + lMem - (counter++) - lTarget;
+
+               for (j=0;j<lTarget;j++) {
+
+
+
+Andersen, et al.              Experimental                    [Page 159]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+                   crossDot += target[j]*(*pp++);
+               }
+
+               if (energy[icount]>0.0) {
+                   invenergy[icount] =(float)1.0/(energy[icount]+EPS);
+               } else {
+                   invenergy[icount] =(float)0.0;
+               }
+
+               if (stage==0) {
+
+                   measure=(float)-10000000.0;
+
+                   if (crossDot > 0.0) {
+                       measure = crossDot*crossDot*
+                           invenergy[icount];
+                   }
+               }
+               else {
+                   measure = crossDot*crossDot*invenergy[icount];
+               }
+
+               /* check if measure is better */
+               ftmp = crossDot*invenergy[icount];
+
+               if ((measure>max_measure) && (fabs(ftmp)<CB_MAXGAIN)) {
+                   best_index = icount;
+                   max_measure = measure;
+                   gain = ftmp;
+               }
+           }
+
+           /* Search the augmented CB inside the limited range. */
+
+           if ((lTarget==SUBL)&&(sIndAug!=0)) {
+               searchAugmentedCB(sIndAug, eIndAug, stage,
+                   2*base_size-20, target, cbvectors+lMem,
+                   &max_measure, &best_index, &gain, energy,
+                   invenergy);
+           }
+
+           /* record best index */
+
+           index[stage] = best_index;
+
+           /* gain quantization */
+
+           if (stage==0){
+
+
+
+Andersen, et al.              Experimental                    [Page 160]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+
+               if (gain<0.0){
+                   gain = 0.0;
+               }
+
+               if (gain>CB_MAXGAIN) {
+                   gain = (float)CB_MAXGAIN;
+               }
+               gain = gainquant(gain, 1.0, 32, &gain_index[stage]);
+           }
+           else {
+               if (stage==1) {
+                   gain = gainquant(gain, (float)fabs(gains[stage-1]),
+                       16, &gain_index[stage]);
+               } else {
+                   gain = gainquant(gain, (float)fabs(gains[stage-1]),
+                       8, &gain_index[stage]);
+               }
+           }
+
+           /* Extract the best (according to measure)
+              codebook vector */
+
+           if (lTarget==(STATE_LEN-iLBCenc_inst->state_short_len)) {
+
+               if (index[stage]<base_size) {
+                   pp=buf+LPC_FILTERORDER+lMem-lTarget-index[stage];
+               } else {
+                   pp=cbvectors+lMem-lTarget-
+                       index[stage]+base_size;
+               }
+           } else {
+
+               if (index[stage]<base_size) {
+                   if (index[stage]<(base_size-20)) {
+                       pp=buf+LPC_FILTERORDER+lMem-
+                           lTarget-index[stage];
+                   } else {
+                       createAugmentedVec(index[stage]-base_size+40,
+                               buf+LPC_FILTERORDER+lMem,aug_vec);
+                       pp=aug_vec;
+                   }
+               } else {
+                   int filterno, position;
+
+                   filterno=index[stage]/base_size;
+                   position=index[stage]-filterno*base_size;
+
+
+
+
+Andersen, et al.              Experimental                    [Page 161]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+
+                   if (position<(base_size-20)) {
+                       pp=cbvectors+filterno*lMem-lTarget-
+                           index[stage]+filterno*base_size;
+                   } else {
+                       createAugmentedVec(
+                           index[stage]-(filterno+1)*base_size+40,
+                           cbvectors+filterno*lMem,aug_vec);
+                       pp=aug_vec;
+                   }
+               }
+           }
+
+           /* Subtract the best codebook vector, according
+              to measure, from the target vector */
+
+           for (j=0;j<lTarget;j++) {
+               cvec[j] += gain*(*pp);
+               target[j] -= gain*(*pp++);
+           }
+
+           /* record quantized gain */
+
+           gains[stage]=gain;
+
+       }/* end of Main Loop. for (stage=0;... */
+
+       /* Gain adjustment for energy matching */
+       cene=0.0;
+       for (i=0; i<lTarget; i++) {
+           cene+=cvec[i]*cvec[i];
+       }
+       j=gain_index[0];
+
+       for (i=gain_index[0]; i<32; i++) {
+           ftmp=cene*gain_sq5Tbl[i]*gain_sq5Tbl[i];
+
+           if ((ftmp<(tene*gains[0]*gains[0])) &&
+               (gain_sq5Tbl[j]<(2.0*gains[0]))) {
+               j=i;
+           }
+       }
+       gain_index[0]=j;
+   }
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 162]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.35.  LPCdecode.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       LPC_decode.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_LPC_DECODE_H
+   #define __iLBC_LPC_DECODE_H
+
+   void LSFinterpolate2a_dec(
+       float *a,           /* (o) lpc coefficients for a sub-frame */
+       float *lsf1,    /* (i) first lsf coefficient vector */
+       float *lsf2,    /* (i) second lsf coefficient vector */
+       float coef,         /* (i) interpolation weight */
+       int length          /* (i) length of lsf vectors */
+   );
+
+   void SimplelsfDEQ(
+       float *lsfdeq,      /* (o) dequantized lsf coefficients */
+       int *index,         /* (i) quantization index */
+       int lpc_n           /* (i) number of LPCs */
+   );
+
+   void DecoderInterpolateLSF(
+       float *syntdenum,   /* (o) synthesis filter coefficients */
+       float *weightdenum, /* (o) weighting denumerator
+                                  coefficients */
+       float *lsfdeq,      /* (i) dequantized lsf coefficients */
+       int length,         /* (i) length of lsf coefficient vector */
+       iLBC_Dec_Inst_t *iLBCdec_inst
+                           /* (i) the decoder state structure */
+   );
+
+   #endif
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 163]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.36.  LPCdecode.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       LPC_decode.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+
+   #include "helpfun.h"
+   #include "lsf.h"
+   #include "iLBC_define.h"
+   #include "constants.h"
+
+   /*---------------------------------------------------------------*
+    *  interpolation of lsf coefficients for the decoder
+    *--------------------------------------------------------------*/
+
+   void LSFinterpolate2a_dec(
+       float *a,           /* (o) lpc coefficients for a sub-frame */
+       float *lsf1,    /* (i) first lsf coefficient vector */
+       float *lsf2,    /* (i) second lsf coefficient vector */
+       float coef,         /* (i) interpolation weight */
+       int length          /* (i) length of lsf vectors */
+   ){
+       float  lsftmp[LPC_FILTERORDER];
+
+       interpolate(lsftmp, lsf1, lsf2, coef, length);
+       lsf2a(a, lsftmp);
+   }
+
+   /*---------------------------------------------------------------*
+    *  obtain dequantized lsf coefficients from quantization index
+    *--------------------------------------------------------------*/
+
+   void SimplelsfDEQ(
+       float *lsfdeq,    /* (o) dequantized lsf coefficients */
+       int *index,         /* (i) quantization index */
+       int lpc_n           /* (i) number of LPCs */
+   ){
+       int i, j, pos, cb_pos;
+
+
+
+Andersen, et al.              Experimental                    [Page 164]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* decode first LSF */
+
+       pos = 0;
+       cb_pos = 0;
+       for (i = 0; i < LSF_NSPLIT; i++) {
+           for (j = 0; j < dim_lsfCbTbl[i]; j++) {
+               lsfdeq[pos + j] = lsfCbTbl[cb_pos +
+                   (long)(index[i])*dim_lsfCbTbl[i] + j];
+           }
+           pos += dim_lsfCbTbl[i];
+           cb_pos += size_lsfCbTbl[i]*dim_lsfCbTbl[i];
+       }
+
+       if (lpc_n>1) {
+
+           /* decode last LSF */
+
+           pos = 0;
+           cb_pos = 0;
+           for (i = 0; i < LSF_NSPLIT; i++) {
+               for (j = 0; j < dim_lsfCbTbl[i]; j++) {
+                   lsfdeq[LPC_FILTERORDER + pos + j] =
+                       lsfCbTbl[cb_pos +
+                       (long)(index[LSF_NSPLIT + i])*
+                       dim_lsfCbTbl[i] + j];
+               }
+               pos += dim_lsfCbTbl[i];
+               cb_pos += size_lsfCbTbl[i]*dim_lsfCbTbl[i];
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  obtain synthesis and weighting filters form lsf coefficients
+    *---------------------------------------------------------------*/
+
+   void DecoderInterpolateLSF(
+       float *syntdenum, /* (o) synthesis filter coefficients */
+       float *weightdenum, /* (o) weighting denumerator
+                                  coefficients */
+       float *lsfdeq,       /* (i) dequantized lsf coefficients */
+       int length,         /* (i) length of lsf coefficient vector */
+       iLBC_Dec_Inst_t *iLBCdec_inst
+                           /* (i) the decoder state structure */
+   ){
+       int    i, pos, lp_length;
+       float  lp[LPC_FILTERORDER + 1], *lsfdeq2;
+
+
+
+
+Andersen, et al.              Experimental                    [Page 165]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       lsfdeq2 = lsfdeq + length;
+       lp_length = length + 1;
+
+       if (iLBCdec_inst->mode==30) {
+           /* sub-frame 1: Interpolation between old and first */
+
+           LSFinterpolate2a_dec(lp, iLBCdec_inst->lsfdeqold, lsfdeq,
+               lsf_weightTbl_30ms[0], length);
+           memcpy(syntdenum,lp,lp_length*sizeof(float));
+           bwexpand(weightdenum, lp, LPC_CHIRP_WEIGHTDENUM,
+               lp_length);
+
+           /* sub-frames 2 to 6: interpolation between first
+              and last LSF */
+
+           pos = lp_length;
+           for (i = 1; i < 6; i++) {
+               LSFinterpolate2a_dec(lp, lsfdeq, lsfdeq2,
+                   lsf_weightTbl_30ms[i], length);
+               memcpy(syntdenum + pos,lp,lp_length*sizeof(float));
+               bwexpand(weightdenum + pos, lp,
+                   LPC_CHIRP_WEIGHTDENUM, lp_length);
+               pos += lp_length;
+           }
+       }
+       else {
+           pos = 0;
+           for (i = 0; i < iLBCdec_inst->nsub; i++) {
+               LSFinterpolate2a_dec(lp, iLBCdec_inst->lsfdeqold,
+                   lsfdeq, lsf_weightTbl_20ms[i], length);
+               memcpy(syntdenum+pos,lp,lp_length*sizeof(float));
+               bwexpand(weightdenum+pos, lp, LPC_CHIRP_WEIGHTDENUM,
+                   lp_length);
+               pos += lp_length;
+           }
+       }
+
+       /* update memory */
+
+       if (iLBCdec_inst->mode==30)
+           memcpy(iLBCdec_inst->lsfdeqold, lsfdeq2,
+                       length*sizeof(float));
+       else
+           memcpy(iLBCdec_inst->lsfdeqold, lsfdeq,
+                       length*sizeof(float));
+
+   }
+
+
+
+
+Andersen, et al.              Experimental                    [Page 166]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.37.  LPCencode.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       LPCencode.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_LPCENCOD_H
+   #define __iLBC_LPCENCOD_H
+
+   void LPCencode(
+       float *syntdenum,   /* (i/o) synthesis filter coefficients
+                                  before/after encoding */
+       float *weightdenum, /* (i/o) weighting denumerator coefficients
+                                  before/after encoding */
+       int *lsf_index,     /* (o) lsf quantization index */
+       float *data,    /* (i) lsf coefficients to quantize */
+       iLBC_Enc_Inst_t *iLBCenc_inst
+                           /* (i/o) the encoder state structure */
+   );
+
+   #endif
+
+A.38.  LPCencode.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       LPCencode.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <string.h>
+
+   #include "iLBC_define.h"
+   #include "helpfun.h"
+   #include "lsf.h"
+   #include "constants.h"
+
+
+
+Andersen, et al.              Experimental                    [Page 167]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   /*----------------------------------------------------------------*
+    *  lpc analysis (subrutine to LPCencode)
+    *---------------------------------------------------------------*/
+
+   void SimpleAnalysis(
+       float *lsf,         /* (o) lsf coefficients */
+       float *data,    /* (i) new data vector */
+       iLBC_Enc_Inst_t *iLBCenc_inst
+                           /* (i/o) the encoder state structure */
+   ){
+       int k, is;
+       float temp[BLOCKL_MAX], lp[LPC_FILTERORDER + 1];
+       float lp2[LPC_FILTERORDER + 1];
+       float r[LPC_FILTERORDER + 1];
+
+       is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+       memcpy(iLBCenc_inst->lpc_buffer+is,data,
+           iLBCenc_inst->blockl*sizeof(float));
+
+       /* No lookahead, last window is asymmetric */
+
+       for (k = 0; k < iLBCenc_inst->lpc_n; k++) {
+
+           is = LPC_LOOKBACK;
+
+           if (k < (iLBCenc_inst->lpc_n - 1)) {
+               window(temp, lpc_winTbl,
+                   iLBCenc_inst->lpc_buffer, BLOCKL_MAX);
+           } else {
+               window(temp, lpc_asymwinTbl,
+                   iLBCenc_inst->lpc_buffer + is, BLOCKL_MAX);
+           }
+
+           autocorr(r, temp, BLOCKL_MAX, LPC_FILTERORDER);
+           window(r, r, lpc_lagwinTbl, LPC_FILTERORDER + 1);
+
+           levdurb(lp, temp, r, LPC_FILTERORDER);
+           bwexpand(lp2, lp, LPC_CHIRP_SYNTDENUM, LPC_FILTERORDER+1);
+
+           a2lsf(lsf + k*LPC_FILTERORDER, lp2);
+       }
+       is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+       memmove(iLBCenc_inst->lpc_buffer,
+           iLBCenc_inst->lpc_buffer+LPC_LOOKBACK+BLOCKL_MAX-is,
+           is*sizeof(float));
+   }
+
+   /*----------------------------------------------------------------*
+
+
+
+Andersen, et al.              Experimental                    [Page 168]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+    *  lsf interpolator and conversion from lsf to a coefficients
+    *  (subrutine to SimpleInterpolateLSF)
+    *---------------------------------------------------------------*/
+
+   void LSFinterpolate2a_enc(
+       float *a,       /* (o) lpc coefficients */
+       float *lsf1,/* (i) first set of lsf coefficients */
+       float *lsf2,/* (i) second set of lsf coefficients */
+       float coef,     /* (i) weighting coefficient to use between
+                              lsf1 and lsf2 */
+       long length      /* (i) length of coefficient vectors */
+   ){
+       float  lsftmp[LPC_FILTERORDER];
+
+       interpolate(lsftmp, lsf1, lsf2, coef, length);
+       lsf2a(a, lsftmp);
+   }
+
+   /*----------------------------------------------------------------*
+    *  lsf interpolator (subrutine to LPCencode)
+    *---------------------------------------------------------------*/
+
+   void SimpleInterpolateLSF(
+       float *syntdenum,   /* (o) the synthesis filter denominator
+                                  resulting from the quantized
+                                  interpolated lsf */
+       float *weightdenum, /* (o) the weighting filter denominator
+                                  resulting from the unquantized
+                                  interpolated lsf */
+       float *lsf,         /* (i) the unquantized lsf coefficients */
+       float *lsfdeq,      /* (i) the dequantized lsf coefficients */
+       float *lsfold,      /* (i) the unquantized lsf coefficients of
+                                  the previous signal frame */
+       float *lsfdeqold, /* (i) the dequantized lsf coefficients of
+                                  the previous signal frame */
+       int length,         /* (i) should equate LPC_FILTERORDER */
+       iLBC_Enc_Inst_t *iLBCenc_inst
+                           /* (i/o) the encoder state structure */
+   ){
+       int    i, pos, lp_length;
+       float  lp[LPC_FILTERORDER + 1], *lsf2, *lsfdeq2;
+
+       lsf2 = lsf + length;
+       lsfdeq2 = lsfdeq + length;
+       lp_length = length + 1;
+
+       if (iLBCenc_inst->mode==30) {
+           /* sub-frame 1: Interpolation between old and first
+
+
+
+Andersen, et al.              Experimental                    [Page 169]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+              set of lsf coefficients */
+
+           LSFinterpolate2a_enc(lp, lsfdeqold, lsfdeq,
+               lsf_weightTbl_30ms[0], length);
+           memcpy(syntdenum,lp,lp_length*sizeof(float));
+           LSFinterpolate2a_enc(lp, lsfold, lsf,
+               lsf_weightTbl_30ms[0], length);
+           bwexpand(weightdenum, lp, LPC_CHIRP_WEIGHTDENUM, lp_length);
+
+           /* sub-frame 2 to 6: Interpolation between first
+              and second set of lsf coefficients */
+
+           pos = lp_length;
+           for (i = 1; i < iLBCenc_inst->nsub; i++) {
+               LSFinterpolate2a_enc(lp, lsfdeq, lsfdeq2,
+                   lsf_weightTbl_30ms[i], length);
+               memcpy(syntdenum + pos,lp,lp_length*sizeof(float));
+
+               LSFinterpolate2a_enc(lp, lsf, lsf2,
+                   lsf_weightTbl_30ms[i], length);
+               bwexpand(weightdenum + pos, lp,
+                   LPC_CHIRP_WEIGHTDENUM, lp_length);
+               pos += lp_length;
+           }
+       }
+       else {
+           pos = 0;
+           for (i = 0; i < iLBCenc_inst->nsub; i++) {
+               LSFinterpolate2a_enc(lp, lsfdeqold, lsfdeq,
+                   lsf_weightTbl_20ms[i], length);
+               memcpy(syntdenum+pos,lp,lp_length*sizeof(float));
+               LSFinterpolate2a_enc(lp, lsfold, lsf,
+                   lsf_weightTbl_20ms[i], length);
+               bwexpand(weightdenum+pos, lp,
+                   LPC_CHIRP_WEIGHTDENUM, lp_length);
+               pos += lp_length;
+           }
+       }
+
+       /* update memory */
+
+       if (iLBCenc_inst->mode==30) {
+           memcpy(lsfold, lsf2, length*sizeof(float));
+           memcpy(lsfdeqold, lsfdeq2, length*sizeof(float));
+       }
+       else {
+           memcpy(lsfold, lsf, length*sizeof(float));
+           memcpy(lsfdeqold, lsfdeq, length*sizeof(float));
+
+
+
+Andersen, et al.              Experimental                    [Page 170]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  lsf quantizer (subrutine to LPCencode)
+    *---------------------------------------------------------------*/
+
+   void SimplelsfQ(
+       float *lsfdeq,    /* (o) dequantized lsf coefficients
+                              (dimension FILTERORDER) */
+       int *index,     /* (o) quantization index */
+       float *lsf,      /* (i) the lsf coefficient vector to be
+                              quantized (dimension FILTERORDER ) */
+       int lpc_n     /* (i) number of lsf sets to quantize */
+   ){
+       /* Quantize first LSF with memoryless split VQ */
+       SplitVQ(lsfdeq, index, lsf, lsfCbTbl, LSF_NSPLIT,
+           dim_lsfCbTbl, size_lsfCbTbl);
+
+       if (lpc_n==2) {
+           /* Quantize second LSF with memoryless split VQ */
+           SplitVQ(lsfdeq + LPC_FILTERORDER, index + LSF_NSPLIT,
+               lsf + LPC_FILTERORDER, lsfCbTbl, LSF_NSPLIT,
+               dim_lsfCbTbl, size_lsfCbTbl);
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  lpc encoder
+    *---------------------------------------------------------------*/
+
+   void LPCencode(
+       float *syntdenum, /* (i/o) synthesis filter coefficients
+                                  before/after encoding */
+       float *weightdenum, /* (i/o) weighting denumerator
+                                  coefficients before/after
+                                  encoding */
+       int *lsf_index,     /* (o) lsf quantization index */
+       float *data,    /* (i) lsf coefficients to quantize */
+       iLBC_Enc_Inst_t *iLBCenc_inst
+                           /* (i/o) the encoder state structure */
+   ){
+       float lsf[LPC_FILTERORDER * LPC_N_MAX];
+       float lsfdeq[LPC_FILTERORDER * LPC_N_MAX];
+       int change=0;
+
+       SimpleAnalysis(lsf, data, iLBCenc_inst);
+       SimplelsfQ(lsfdeq, lsf_index, lsf, iLBCenc_inst->lpc_n);
+
+
+
+Andersen, et al.              Experimental                    [Page 171]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       change=LSF_check(lsfdeq, LPC_FILTERORDER, iLBCenc_inst->lpc_n);
+       SimpleInterpolateLSF(syntdenum, weightdenum,
+           lsf, lsfdeq, iLBCenc_inst->lsfold,
+           iLBCenc_inst->lsfdeqold, LPC_FILTERORDER, iLBCenc_inst);
+   }
+
+A.39.  lsf.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       lsf.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_LSF_H
+   #define __iLBC_LSF_H
+
+   void a2lsf(
+       float *freq,/* (o) lsf coefficients */
+       float *a    /* (i) lpc coefficients */
+   );
+
+   void lsf2a(
+       float *a_coef,  /* (o) lpc coefficients */
+       float *freq     /* (i) lsf coefficients */
+   );
+
+   #endif
+
+A.40.  lsf.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       lsf.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <string.h>
+
+
+
+Andersen, et al.              Experimental                    [Page 172]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   #include <math.h>
+
+   #include "iLBC_define.h"
+
+   /*----------------------------------------------------------------*
+    *  conversion from lpc coefficients to lsf coefficients
+    *---------------------------------------------------------------*/
+
+   void a2lsf(
+       float *freq,/* (o) lsf coefficients */
+       float *a    /* (i) lpc coefficients */
+   ){
+       float steps[LSF_NUMBER_OF_STEPS] =
+           {(float)0.00635, (float)0.003175, (float)0.0015875,
+           (float)0.00079375};
+       float step;
+       int step_idx;
+       int lsp_index;
+       float p[LPC_HALFORDER];
+       float q[LPC_HALFORDER];
+       float p_pre[LPC_HALFORDER];
+       float q_pre[LPC_HALFORDER];
+       float old_p, old_q, *old;
+       float *pq_coef;
+       float omega, old_omega;
+       int i;
+       float hlp, hlp1, hlp2, hlp3, hlp4, hlp5;
+
+       for (i=0; i<LPC_HALFORDER; i++) {
+           p[i] = (float)-1.0 * (a[i + 1] + a[LPC_FILTERORDER - i]);
+           q[i] = a[LPC_FILTERORDER - i] - a[i + 1];
+       }
+
+       p_pre[0] = (float)-1.0 - p[0];
+       p_pre[1] = - p_pre[0] - p[1];
+       p_pre[2] = - p_pre[1] - p[2];
+       p_pre[3] = - p_pre[2] - p[3];
+       p_pre[4] = - p_pre[3] - p[4];
+       p_pre[4] = p_pre[4] / 2;
+
+       q_pre[0] = (float)1.0 - q[0];
+       q_pre[1] = q_pre[0] - q[1];
+       q_pre[2] = q_pre[1] - q[2];
+       q_pre[3] = q_pre[2] - q[3];
+       q_pre[4] = q_pre[3] - q[4];
+       q_pre[4] = q_pre[4] / 2;
+
+       omega = 0.0;
+
+
+
+Andersen, et al.              Experimental                    [Page 173]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       old_omega = 0.0;
+
+       old_p = FLOAT_MAX;
+       old_q = FLOAT_MAX;
+
+       /* Here we loop through lsp_index to find all the
+          LPC_FILTERORDER roots for omega. */
+
+       for (lsp_index = 0; lsp_index<LPC_FILTERORDER; lsp_index++) {
+
+           /* Depending on lsp_index being even or odd, we
+           alternatively solve the roots for the two LSP equations. */
+
+
+           if ((lsp_index & 0x1) == 0) {
+               pq_coef = p_pre;
+               old = &old_p;
+           } else {
+               pq_coef = q_pre;
+               old = &old_q;
+           }
+
+           /* Start with low resolution grid */
+
+           for (step_idx = 0, step = steps[step_idx];
+               step_idx < LSF_NUMBER_OF_STEPS;){
+
+               /*  cos(10piw) + pq(0)cos(8piw) + pq(1)cos(6piw) +
+               pq(2)cos(4piw) + pq(3)cod(2piw) + pq(4) */
+
+               hlp = (float)cos(omega * TWO_PI);
+               hlp1 = (float)2.0 * hlp + pq_coef[0];
+               hlp2 = (float)2.0 * hlp * hlp1 - (float)1.0 +
+                   pq_coef[1];
+               hlp3 = (float)2.0 * hlp * hlp2 - hlp1 + pq_coef[2];
+               hlp4 = (float)2.0 * hlp * hlp3 - hlp2 + pq_coef[3];
+               hlp5 = hlp * hlp4 - hlp3 + pq_coef[4];
+
+
+               if (((hlp5 * (*old)) <= 0.0) || (omega >= 0.5)){
+
+                   if (step_idx == (LSF_NUMBER_OF_STEPS - 1)){
+
+                       if (fabs(hlp5) >= fabs(*old)) {
+                           freq[lsp_index] = omega - step;
+                       } else {
+                           freq[lsp_index] = omega;
+                       }
+
+
+
+Andersen, et al.              Experimental                    [Page 174]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+
+
+                       if ((*old) >= 0.0){
+                           *old = (float)-1.0 * FLOAT_MAX;
+                       } else {
+                           *old = FLOAT_MAX;
+                       }
+
+                       omega = old_omega;
+                       step_idx = 0;
+
+                       step_idx = LSF_NUMBER_OF_STEPS;
+                   } else {
+
+                       if (step_idx == 0) {
+                           old_omega = omega;
+                       }
+
+                       step_idx++;
+                       omega -= steps[step_idx];
+
+                       /* Go back one grid step */
+
+                       step = steps[step_idx];
+                   }
+               } else {
+
+               /* increment omega until they are of different sign,
+               and we know there is at least one root between omega
+               and old_omega */
+                   *old = hlp5;
+                   omega += step;
+               }
+           }
+       }
+
+       for (i = 0; i<LPC_FILTERORDER; i++) {
+           freq[i] = freq[i] * TWO_PI;
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  conversion from lsf coefficients to lpc coefficients
+    *---------------------------------------------------------------*/
+
+   void lsf2a(
+       float *a_coef,  /* (o) lpc coefficients */
+       float *freq     /* (i) lsf coefficients */
+
+
+
+Andersen, et al.              Experimental                    [Page 175]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   ){
+       int i, j;
+       float hlp;
+       float p[LPC_HALFORDER], q[LPC_HALFORDER];
+       float a[LPC_HALFORDER + 1], a1[LPC_HALFORDER],
+           a2[LPC_HALFORDER];
+       float b[LPC_HALFORDER + 1], b1[LPC_HALFORDER],
+           b2[LPC_HALFORDER];
+
+       for (i=0; i<LPC_FILTERORDER; i++) {
+           freq[i] = freq[i] * PI2;
+       }
+
+       /* Check input for ill-conditioned cases.  This part is not
+       found in the TIA standard.  It involves the following 2 IF
+       blocks.  If "freq" is judged ill-conditioned, then we first
+       modify freq[0] and freq[LPC_HALFORDER-1] (normally
+       LPC_HALFORDER = 10 for LPC applications), then we adjust
+       the other "freq" values slightly */
+
+
+       if ((freq[0] <= 0.0) || (freq[LPC_FILTERORDER - 1] >= 0.5)){
+
+
+           if (freq[0] <= 0.0) {
+               freq[0] = (float)0.022;
+           }
+
+
+           if (freq[LPC_FILTERORDER - 1] >= 0.5) {
+               freq[LPC_FILTERORDER - 1] = (float)0.499;
+           }
+
+           hlp = (freq[LPC_FILTERORDER - 1] - freq[0]) /
+               (float) (LPC_FILTERORDER - 1);
+
+           for (i=1; i<LPC_FILTERORDER; i++) {
+               freq[i] = freq[i - 1] + hlp;
+           }
+       }
+
+       memset(a1, 0, LPC_HALFORDER*sizeof(float));
+       memset(a2, 0, LPC_HALFORDER*sizeof(float));
+       memset(b1, 0, LPC_HALFORDER*sizeof(float));
+       memset(b2, 0, LPC_HALFORDER*sizeof(float));
+       memset(a, 0, (LPC_HALFORDER+1)*sizeof(float));
+       memset(b, 0, (LPC_HALFORDER+1)*sizeof(float));
+
+
+
+
+Andersen, et al.              Experimental                    [Page 176]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* p[i] and q[i] compute cos(2*pi*omega_{2j}) and
+       cos(2*pi*omega_{2j-1} in eqs. 4.2.2.2-1 and 4.2.2.2-2.
+       Note that for this code p[i] specifies the coefficients
+       used in .Q_A(z) while q[i] specifies the coefficients used
+       in .P_A(z) */
+
+       for (i=0; i<LPC_HALFORDER; i++) {
+           p[i] = (float)cos(TWO_PI * freq[2 * i]);
+           q[i] = (float)cos(TWO_PI * freq[2 * i + 1]);
+       }
+
+       a[0] = 0.25;
+       b[0] = 0.25;
+
+       for (i= 0; i<LPC_HALFORDER; i++) {
+           a[i + 1] = a[i] - 2 * p[i] * a1[i] + a2[i];
+           b[i + 1] = b[i] - 2 * q[i] * b1[i] + b2[i];
+           a2[i] = a1[i];
+           a1[i] = a[i];
+           b2[i] = b1[i];
+           b1[i] = b[i];
+       }
+
+       for (j=0; j<LPC_FILTERORDER; j++) {
+
+           if (j == 0) {
+               a[0] = 0.25;
+               b[0] = -0.25;
+           } else {
+               a[0] = b[0] = 0.0;
+           }
+
+           for (i=0; i<LPC_HALFORDER; i++) {
+               a[i + 1] = a[i] - 2 * p[i] * a1[i] + a2[i];
+               b[i + 1] = b[i] - 2 * q[i] * b1[i] + b2[i];
+               a2[i] = a1[i];
+               a1[i] = a[i];
+               b2[i] = b1[i];
+               b1[i] = b[i];
+           }
+
+           a_coef[j + 1] = 2 * (a[LPC_HALFORDER] + b[LPC_HALFORDER]);
+       }
+
+       a_coef[0] = 1.0;
+   }
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 177]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.41.  packing.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       packing.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __PACKING_H
+   #define __PACKING_H
+
+   void packsplit(
+       int *index,                 /* (i) the value to split */
+       int *firstpart,             /* (o) the value specified by most
+                                          significant bits */
+       int *rest,                  /* (o) the value specified by least
+                                          significant bits */
+       int bitno_firstpart,    /* (i) number of bits in most
+                                          significant part */
+       int bitno_total             /* (i) number of bits in full range
+                                          of value */
+   );
+
+   void packcombine(
+       int *index,                 /* (i/o) the msb value in the
+                                          combined value out */
+       int rest,                   /* (i) the lsb value */
+       int bitno_rest              /* (i) the number of bits in the
+                                          lsb part */
+   );
+
+   void dopack(
+       unsigned char **bitstream,  /* (i/o) on entrance pointer to
+                                          place in bitstream to pack
+                                          new data, on exit pointer
+                                          to place in bitstream to
+                                          pack future data */
+       int index,                  /* (i) the value to pack */
+       int bitno,                  /* (i) the number of bits that the
+                                          value will fit within */
+       int *pos                /* (i/o) write position in the
+                                          current byte */
+   );
+
+
+
+Andersen, et al.              Experimental                    [Page 178]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   void unpack(
+       unsigned char **bitstream,  /* (i/o) on entrance pointer to
+                                          place in bitstream to
+                                          unpack new data from, on
+                                          exit pointer to place in
+                                          bitstream to unpack future
+                                          data from */
+       int *index,                 /* (o) resulting value */
+       int bitno,                  /* (i) number of bits used to
+                                          represent the value */
+       int *pos                /* (i/o) read position in the
+                                          current byte */
+   );
+
+   #endif
+
+A.42.  packing.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       packing.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <stdlib.h>
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include "helpfun.h"
+   #include "string.h"
+
+   /*----------------------------------------------------------------*
+    *  splitting an integer into first most significant bits and
+    *  remaining least significant bits
+    *---------------------------------------------------------------*/
+
+   void packsplit(
+       int *index,                 /* (i) the value to split */
+       int *firstpart,             /* (o) the value specified by most
+                                          significant bits */
+       int *rest,                  /* (o) the value specified by least
+                                          significant bits */
+
+
+
+Andersen, et al.              Experimental                    [Page 179]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       int bitno_firstpart,    /* (i) number of bits in most
+                                          significant part */
+       int bitno_total             /* (i) number of bits in full range
+                                          of value */
+   ){
+       int bitno_rest = bitno_total-bitno_firstpart;
+
+       *firstpart = *index>>(bitno_rest);
+       *rest = *index-(*firstpart<<(bitno_rest));
+   }
+
+   /*----------------------------------------------------------------*
+    *  combining a value corresponding to msb's with a value
+    *  corresponding to lsb's
+    *---------------------------------------------------------------*/
+
+   void packcombine(
+       int *index,                 /* (i/o) the msb value in the
+                                          combined value out */
+       int rest,                   /* (i) the lsb value */
+       int bitno_rest              /* (i) the number of bits in the
+                                          lsb part */
+   ){
+       *index = *index<<bitno_rest;
+       *index += rest;
+   }
+
+   /*----------------------------------------------------------------*
+    *  packing of bits into bitstream, i.e., vector of bytes
+    *---------------------------------------------------------------*/
+
+   void dopack(
+       unsigned char **bitstream,  /* (i/o) on entrance pointer to
+                                          place in bitstream to pack
+                                          new data, on exit pointer
+                                          to place in bitstream to
+                                          pack future data */
+       int index,                  /* (i) the value to pack */
+       int bitno,                  /* (i) the number of bits that the
+                                          value will fit within */
+       int *pos                /* (i/o) write position in the
+                                          current byte */
+   ){
+       int posLeft;
+
+       /* Clear the bits before starting in a new byte */
+
+       if ((*pos)==0) {
+
+
+
+Andersen, et al.              Experimental                    [Page 180]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           **bitstream=0;
+       }
+
+       while (bitno>0) {
+
+           /* Jump to the next byte if end of this byte is reached*/
+
+           if (*pos==8) {
+               *pos=0;
+               (*bitstream)++;
+               **bitstream=0;
+           }
+
+           posLeft=8-(*pos);
+
+           /* Insert index into the bitstream */
+
+           if (bitno <= posLeft) {
+               **bitstream |= (unsigned char)(index<<(posLeft-bitno));
+               *pos+=bitno;
+               bitno=0;
+           } else {
+               **bitstream |= (unsigned char)(index>>(bitno-posLeft));
+
+               *pos=8;
+               index-=((index>>(bitno-posLeft))<<(bitno-posLeft));
+
+               bitno-=posLeft;
+           }
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  unpacking of bits from bitstream, i.e., vector of bytes
+    *---------------------------------------------------------------*/
+
+   void unpack(
+       unsigned char **bitstream,  /* (i/o) on entrance pointer to
+                                          place in bitstream to
+                                          unpack new data from, on
+                                          exit pointer to place in
+                                          bitstream to unpack future
+                                          data from */
+       int *index,                 /* (o) resulting value */
+       int bitno,                  /* (i) number of bits used to
+                                          represent the value */
+       int *pos                /* (i/o) read position in the
+                                          current byte */
+
+
+
+Andersen, et al.              Experimental                    [Page 181]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   ){
+       int BitsLeft;
+
+       *index=0;
+
+       while (bitno>0) {
+
+           /* move forward in bitstream when the end of the
+              byte is reached */
+
+           if (*pos==8) {
+               *pos=0;
+               (*bitstream)++;
+           }
+
+           BitsLeft=8-(*pos);
+
+           /* Extract bits to index */
+
+           if (BitsLeft>=bitno) {
+               *index+=((((**bitstream)<<(*pos)) & 0xFF)>>(8-bitno));
+
+               *pos+=bitno;
+               bitno=0;
+           } else {
+
+               if ((8-bitno)>0) {
+                   *index+=((((**bitstream)<<(*pos)) & 0xFF)>>
+                       (8-bitno));
+                   *pos=8;
+               } else {
+                   *index+=(((int)(((**bitstream)<<(*pos)) & 0xFF))<<
+                       (bitno-8));
+                   *pos=8;
+               }
+               bitno-=BitsLeft;
+           }
+       }
+   }
+
+A.43.  StateConstructW.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       StateConstructW.h
+
+
+
+
+Andersen, et al.              Experimental                    [Page 182]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_STATECONSTRUCTW_H
+   #define __iLBC_STATECONSTRUCTW_H
+
+   void StateConstructW(
+       int idxForMax,      /* (i) 6-bit index for the quantization of
+                                  max amplitude */
+       int *idxVec,    /* (i) vector of quantization indexes */
+       float *syntDenum,   /* (i) synthesis filter denumerator */
+       float *out,         /* (o) the decoded state vector */
+       int len             /* (i) length of a state vector */
+   );
+
+   #endif
+
+A.44.  StateConstructW.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       StateConstructW.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include "filter.h"
+
+   /*----------------------------------------------------------------*
+    *  decoding of the start state
+    *---------------------------------------------------------------*/
+
+   void StateConstructW(
+       int idxForMax,      /* (i) 6-bit index for the quantization of
+                                  max amplitude */
+       int *idxVec,    /* (i) vector of quantization indexes */
+       float *syntDenum,   /* (i) synthesis filter denumerator */
+
+
+
+Andersen, et al.              Experimental                    [Page 183]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *out,         /* (o) the decoded state vector */
+       int len             /* (i) length of a state vector */
+   ){
+       float maxVal, tmpbuf[LPC_FILTERORDER+2*STATE_LEN], *tmp,
+           numerator[LPC_FILTERORDER+1];
+       float foutbuf[LPC_FILTERORDER+2*STATE_LEN], *fout;
+       int k,tmpi;
+
+       /* decoding of the maximum value */
+
+       maxVal = state_frgqTbl[idxForMax];
+       maxVal = (float)pow(10,maxVal)/(float)4.5;
+
+       /* initialization of buffers and coefficients */
+
+       memset(tmpbuf, 0, LPC_FILTERORDER*sizeof(float));
+       memset(foutbuf, 0, LPC_FILTERORDER*sizeof(float));
+       for (k=0; k<LPC_FILTERORDER; k++) {
+           numerator[k]=syntDenum[LPC_FILTERORDER-k];
+       }
+       numerator[LPC_FILTERORDER]=syntDenum[0];
+       tmp = &tmpbuf[LPC_FILTERORDER];
+       fout = &foutbuf[LPC_FILTERORDER];
+
+       /* decoding of the sample values */
+
+       for (k=0; k<len; k++) {
+           tmpi = len-1-k;
+           /* maxVal = 1/scal */
+           tmp[k] = maxVal*state_sq3Tbl[idxVec[tmpi]];
+       }
+
+       /* circular convolution with all-pass filter */
+
+       memset(tmp+len, 0, len*sizeof(float));
+       ZeroPoleFilter(tmp, numerator, syntDenum, 2*len,
+           LPC_FILTERORDER, fout);
+       for (k=0;k<len;k++) {
+           out[k] = fout[len-1-k]+fout[2*len-1-k];
+       }
+   }
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 184]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.45.  StateSearchW.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       StateSearchW.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_STATESEARCHW_H
+   #define __iLBC_STATESEARCHW_H
+
+   void AbsQuantW(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) Encoder instance */
+       float *in,          /* (i) vector to encode */
+       float *syntDenum,   /* (i) denominator of synthesis filter */
+       float *weightDenum, /* (i) denominator of weighting filter */
+       int *out,           /* (o) vector of quantizer indexes */
+       int len,        /* (i) length of vector to encode and
+                                  vector of quantizer indexes */
+       int state_first     /* (i) position of start state in the
+                                  80 vec */
+   );
+
+   void StateSearchW(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) Encoder instance */
+       float *residual,/* (i) target residual vector */
+       float *syntDenum,   /* (i) lpc synthesis filter */
+       float *weightDenum, /* (i) weighting filter denuminator */
+       int *idxForMax,     /* (o) quantizer index for maximum
+                                  amplitude */
+       int *idxVec,    /* (o) vector of quantization indexes */
+       int len,        /* (i) length of all vectors */
+       int state_first     /* (i) position of start state in the
+                                  80 vec */
+   );
+
+
+   #endif
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 185]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.46.  StateSearchW.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       StateSearchW.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include <math.h>
+   #include <string.h>
+
+   #include "iLBC_define.h"
+   #include "constants.h"
+   #include "filter.h"
+   #include "helpfun.h"
+
+   /*----------------------------------------------------------------*
+    *  predictive noise shaping encoding of scaled start state
+    *  (subrutine for StateSearchW)
+    *---------------------------------------------------------------*/
+
+   void AbsQuantW(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) Encoder instance */
+       float *in,          /* (i) vector to encode */
+       float *syntDenum,   /* (i) denominator of synthesis filter */
+       float *weightDenum, /* (i) denominator of weighting filter */
+       int *out,           /* (o) vector of quantizer indexes */
+       int len,        /* (i) length of vector to encode and
+                                  vector of quantizer indexes */
+       int state_first     /* (i) position of start state in the
+                                  80 vec */
+   ){
+       float *syntOut;
+       float syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS];
+       float toQ, xq;
+       int n;
+       int index;
+
+       /* initialization of buffer for filtering */
+
+       memset(syntOutBuf, 0, LPC_FILTERORDER*sizeof(float));
+
+
+
+
+Andersen, et al.              Experimental                    [Page 186]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       /* initialization of pointer for filtering */
+
+       syntOut = &syntOutBuf[LPC_FILTERORDER];
+
+       /* synthesis and weighting filters on input */
+
+       if (state_first) {
+           AllPoleFilter (in, weightDenum, SUBL, LPC_FILTERORDER);
+       } else {
+           AllPoleFilter (in, weightDenum,
+               iLBCenc_inst->state_short_len-SUBL,
+               LPC_FILTERORDER);
+       }
+
+       /* encoding loop */
+
+       for (n=0; n<len; n++) {
+
+           /* time update of filter coefficients */
+
+           if ((state_first)&&(n==SUBL)){
+               syntDenum += (LPC_FILTERORDER+1);
+               weightDenum += (LPC_FILTERORDER+1);
+
+               /* synthesis and weighting filters on input */
+               AllPoleFilter (&in[n], weightDenum, len-n,
+                   LPC_FILTERORDER);
+
+           } else if ((state_first==0)&&
+               (n==(iLBCenc_inst->state_short_len-SUBL))) {
+               syntDenum += (LPC_FILTERORDER+1);
+               weightDenum += (LPC_FILTERORDER+1);
+
+               /* synthesis and weighting filters on input */
+               AllPoleFilter (&in[n], weightDenum, len-n,
+                   LPC_FILTERORDER);
+
+           }
+
+           /* prediction of synthesized and weighted input */
+
+           syntOut[n] = 0.0;
+           AllPoleFilter (&syntOut[n], weightDenum, 1,
+               LPC_FILTERORDER);
+
+           /* quantization */
+
+           toQ = in[n]-syntOut[n];
+
+
+
+Andersen, et al.              Experimental                    [Page 187]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+           sort_sq(&xq, &index, toQ, state_sq3Tbl, 8);
+           out[n]=index;
+           syntOut[n] = state_sq3Tbl[out[n]];
+
+           /* update of the prediction filter */
+
+           AllPoleFilter(&syntOut[n], weightDenum, 1,
+               LPC_FILTERORDER);
+       }
+   }
+
+   /*----------------------------------------------------------------*
+    *  encoding of start state
+    *---------------------------------------------------------------*/
+
+   void StateSearchW(
+       iLBC_Enc_Inst_t *iLBCenc_inst,
+                           /* (i) Encoder instance */
+       float *residual,/* (i) target residual vector */
+       float *syntDenum,   /* (i) lpc synthesis filter */
+       float *weightDenum, /* (i) weighting filter denuminator */
+       int *idxForMax,     /* (o) quantizer index for maximum
+                                  amplitude */
+       int *idxVec,    /* (o) vector of quantization indexes */
+       int len,        /* (i) length of all vectors */
+       int state_first     /* (i) position of start state in the
+                                  80 vec */
+   ){
+       float dtmp, maxVal;
+       float tmpbuf[LPC_FILTERORDER+2*STATE_SHORT_LEN_30MS];
+       float *tmp, numerator[1+LPC_FILTERORDER];
+       float foutbuf[LPC_FILTERORDER+2*STATE_SHORT_LEN_30MS], *fout;
+       int k;
+       float qmax, scal;
+
+       /* initialization of buffers and filter coefficients */
+
+       memset(tmpbuf, 0, LPC_FILTERORDER*sizeof(float));
+       memset(foutbuf, 0, LPC_FILTERORDER*sizeof(float));
+       for (k=0; k<LPC_FILTERORDER; k++) {
+           numerator[k]=syntDenum[LPC_FILTERORDER-k];
+       }
+       numerator[LPC_FILTERORDER]=syntDenum[0];
+       tmp = &tmpbuf[LPC_FILTERORDER];
+       fout = &foutbuf[LPC_FILTERORDER];
+
+       /* circular convolution with the all-pass filter */
+
+
+
+
+Andersen, et al.              Experimental                    [Page 188]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       memcpy(tmp, residual, len*sizeof(float));
+       memset(tmp+len, 0, len*sizeof(float));
+       ZeroPoleFilter(tmp, numerator, syntDenum, 2*len,
+           LPC_FILTERORDER, fout);
+       for (k=0; k<len; k++) {
+           fout[k] += fout[k+len];
+       }
+
+       /* identification of the maximum amplitude value */
+
+       maxVal = fout[0];
+       for (k=1; k<len; k++) {
+
+           if (fout[k]*fout[k] > maxVal*maxVal){
+               maxVal = fout[k];
+           }
+       }
+       maxVal=(float)fabs(maxVal);
+
+       /* encoding of the maximum amplitude value */
+
+       if (maxVal < 10.0) {
+           maxVal = 10.0;
+       }
+       maxVal = (float)log10(maxVal);
+       sort_sq(&dtmp, idxForMax, maxVal, state_frgqTbl, 64);
+
+       /* decoding of the maximum amplitude representation value,
+          and corresponding scaling of start state */
+
+       maxVal=state_frgqTbl[*idxForMax];
+       qmax = (float)pow(10,maxVal);
+       scal = (float)(4.5)/qmax;
+       for (k=0; k<len; k++){
+           fout[k] *= scal;
+       }
+
+       /* predictive noise shaping encoding of scaled start state */
+
+       AbsQuantW(iLBCenc_inst, fout,syntDenum,
+           weightDenum,idxVec, len, state_first);
+   }
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 189]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+A.47.  syntFilter.h
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       syntFilter.h
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #ifndef __iLBC_SYNTFILTER_H
+   #define __iLBC_SYNTFILTER_H
+
+   void syntFilter(
+       float *Out,     /* (i/o) Signal to be filtered */
+       float *a,       /* (i) LP parameters */
+       int len,    /* (i) Length of signal */
+       float *mem      /* (i/o) Filter state */
+   );
+
+   #endif
+
+A.48.  syntFilter.c
+
+   /******************************************************************
+
+       iLBC Speech Coder ANSI-C Source Code
+
+       syntFilter.c
+
+       Copyright (C) The Internet Society (2004).
+       All Rights Reserved.
+
+   ******************************************************************/
+
+   #include "iLBC_define.h"
+
+   /*----------------------------------------------------------------*
+    *  LP synthesis filter.
+    *---------------------------------------------------------------*/
+
+   void syntFilter(
+       float *Out,     /* (i/o) Signal to be filtered */
+       float *a,       /* (i) LP parameters */
+       int len,    /* (i) Length of signal */
+
+
+
+Andersen, et al.              Experimental                    [Page 190]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+       float *mem      /* (i/o) Filter state */
+   ){
+       int i, j;
+       float *po, *pi, *pa, *pm;
+
+       po=Out;
+
+       /* Filter first part using memory from past */
+
+       for (i=0; i<LPC_FILTERORDER; i++) {
+           pi=&Out[i-1];
+           pa=&a[1];
+           pm=&mem[LPC_FILTERORDER-1];
+           for (j=1; j<=i; j++) {
+               *po-=(*pa++)*(*pi--);
+           }
+           for (j=i+1; j<LPC_FILTERORDER+1; j++) {
+               *po-=(*pa++)*(*pm--);
+           }
+           po++;
+       }
+
+       /* Filter last part where the state is entirely in
+          the output vector */
+
+       for (i=LPC_FILTERORDER; i<len; i++) {
+           pi=&Out[i-1];
+           pa=&a[1];
+           for (j=1; j<LPC_FILTERORDER+1; j++) {
+               *po-=(*pa++)*(*pi--);
+           }
+           po++;
+       }
+
+       /* Update state vector */
+
+       memcpy(mem, &Out[len-LPC_FILTERORDER],
+           LPC_FILTERORDER*sizeof(float));
+   }
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 191]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+Authors' Addresses
+
+   Soren Vang Andersen
+   Department of Communication Technology
+   Aalborg University
+   Fredrik Bajers Vej 7A
+   9200 Aalborg
+   Denmark
+
+   Phone:  ++45 9 6358627
+   EMail:  sva@kom.auc.dk
+
+
+   Alan Duric
+   Telio AS
+   Stoperigt. 2
+   Oslo, N-0250
+   Norway
+
+   Phone:  +47 21673555
+   EMail:  alan.duric@telio.no
+
+
+   Henrik Astrom
+   Global IP Sound AB
+   Olandsgatan 42
+   Stockholm, S-11663
+   Sweden
+
+   Phone:  +46 8 54553040
+   EMail:  henrik.astrom@globalipsound.com
+
+
+   Roar Hagen
+   Global IP Sound AB
+   Olandsgatan 42
+   Stockholm, S-11663
+   Sweden
+
+   Phone:  +46 8 54553040
+   EMail:  roar.hagen@globalipsound.com
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 192]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+   W. Bastiaan Kleijn
+   Global IP Sound AB
+   Olandsgatan 42
+   Stockholm, S-11663
+   Sweden
+
+   Phone:  +46 8 54553040
+   EMail:  bastiaan.kleijn@globalipsound.com
+
+
+   Jan Linden
+   Global IP Sound Inc.
+   900 Kearny Street, suite 500
+   San Francisco, CA-94133
+   USA
+
+   Phone: +1 415 397 2555
+   EMail: jan.linden@globalipsound.com
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 193]
+
+RFC 3951              Internet Low Bit Rate Codec          December 2004
+
+
+Full Copyright Statement
+
+   Copyright (C) The Internet Society (2004).
+
+   This document is subject to the rights, licenses and restrictions
+   contained in BCP 78, and except as set forth therein, the authors
+   retain all their rights.
+
+   This document and the information contained herein are provided on an
+   "AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE REPRESENTS
+   OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY AND THE INTERNET
+   ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED,
+   INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE
+   INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED
+   WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
+
+Intellectual Property
+
+   The IETF takes no position regarding the validity or scope of any
+   Intellectual Property Rights or other rights that might be claimed to
+   pertain to the implementation or use of the technology described in
+   this document or the extent to which any license under such rights
+   might or might not be available; nor does it represent that it has
+   made any independent effort to identify any such rights.  Information
+   on the IETF's procedures with respect to rights in IETF Documents can
+   be found in BCP 78 and BCP 79.
+
+   Copies of IPR disclosures made to the IETF Secretariat and any
+   assurances of licenses to be made available, or the result of an
+   attempt made to obtain a general license or permission for the use of
+   such proprietary rights by implementers or users of this
+   specification can be obtained from the IETF on-line IPR repository at
+   http://www.ietf.org/ipr.
+
+   The IETF invites any interested party to bring to its attention any
+   copyrights, patents or patent applications, or other proprietary
+   rights that may cover technology that may be required to implement
+   this standard.  Please address the information to the IETF at ietf-
+   ipr@ietf.org.
+
+
+Acknowledgement
+
+   Funding for the RFC Editor function is currently provided by the
+   Internet Society.
+
+
+
+
+
+
+Andersen, et al.              Experimental                    [Page 194]
+
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3952.txt b/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3952.txt
new file mode 100644
index 0000000..eade7cd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/documentation/rfc3952.txt
@@ -0,0 +1,731 @@
+
+
+
+
+
+
+Network Working Group                                           A. Duric
+Request for Comments: 3952                                         Telio
+Category: Experimental                                       S. Andersen
+                                                      Aalborg University
+                                                           December 2004
+
+
+           Real-time Transport Protocol (RTP) Payload Format
+             for internet Low Bit Rate Codec (iLBC) Speech
+
+Status of this Memo
+
+   This memo defines an Experimental Protocol for the Internet
+   community.  It does not specify an Internet standard of any kind.
+   Discussion and suggestions for improvement are requested.
+   Distribution of this memo is unlimited.
+
+Copyright Notice
+
+   Copyright (C) The Internet Society (2004).
+
+Abstract
+
+   This document describes the Real-time Transport Protocol (RTP)
+   payload format for the internet Low Bit Rate Codec (iLBC) Speech
+   developed by Global IP Sound (GIPS).  Also, within the document there
+   are included necessary details for the use of iLBC with MIME and
+   Session Description Protocol (SDP).
+
+Table of Contents
+
+   1. Introduction. . . . . . . . . . . . . . . . . . . . . . . . . .  2
+   2. Background. . . . . . . . . . . . . . . . . . . . . . . . . . .  2
+   3. RTP Payload Format. . . . . . . . . . . . . . . . . . . . . . .  3
+      3.1. Bitstream definition . . . . . . . . . . . . . . . . . . .  3
+      3.2. Multiple iLBC frames in a RTP packet . . . . . . . . . . .  6
+   4. IANA Considerations . . . . . . . . . . . . . . . . . . . . . .  7
+      4.1. Storage Mode . . . . . . . . . . . . . . . . . . . . . . .  7
+      4.2. MIME registration of iLBC. . . . . . . . . . . . . . . . .  8
+   5. Mapping to SDP Parameters . . . . . . . . . . . . . . . . . . .  9
+   6. Security Considerations . . . . . . . . . . . . . . . . . . . . 11
+   7. References. . . . . . . . . . . . . . . . . . . . . . . . . . . 11
+      7.1. Normative References . . . . . . . . . . . . . . . . . . . 11
+      7.2. Informative References . . . . . . . . . . . . . . . . . . 12
+   8. Acknowledgements. . . . . . . . . . . . . . . . . . . . . . . . 12
+   Authors' Addresses . . . . . . . . . . . . . . . . . . . . . . . . 12
+   Full Copyright Statement . . . . . . . . . . . . . . . . . . . . . 13
+
+
+
+
+Duric & Andersen              Experimental                      [Page 1]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+1.  Introduction
+
+   This document describes how compressed iLBC speech, as produced by
+   the iLBC codec [1], may be formatted for use as an RTP payload type.
+   Methods are provided to packetize the codec data frames into RTP
+   packets.  The sender may send one or more codec data frames per
+   packet depending on the application scenario or based on the
+   transport network condition, bandwidth restriction, delay
+   requirements and packet-loss tolerance.
+
+   The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
+   "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
+   document are to be interpreted as described in BCP 14, RFC 2119 [2].
+
+2.  Background
+
+   Global IP Sound (GIPS) has developed a speech compression algorithm
+   for use in IP based communications [1].  The iLBC codec enables
+   graceful speech quality degradation in the case of lost frames, which
+   occurs in connection with lost or delayed IP packets.
+
+   This codec is suitable for real time communications such as,
+   telephony and videoconferencing, streaming audio, archival and
+   messaging.
+
+   The iLBC codec [1] is an algorithm that compresses each basic frame
+   (20 ms or 30 ms) of 8000 Hz, 16-bit sampled input speech, into output
+   frames with rate of 400 bits for 30 ms basic frame size and 304 bits
+   for 20 ms basic frame size.
+
+   The codec supports two basic frame lengths: 30 ms at 13.33 kbit/s and
+   20 ms at 15.2 kbit/s, using a block independent linear-predictive
+   coding (LPC) algorithm.  When the codec operates at block lengths of
+   20 ms, it produces 304 bits per block which MUST be packetized in 38
+   bytes.  Similarly, for block lengths of 30 ms it produces 400 bits
+   per block which MUST be packetized in 50 bytes.  This algorithm
+   results in a speech coding system with a controlled response to
+   packet losses similar to what is known from pulse code modulation
+   (PCM) with a packet loss concealment (PLC), such as ITU-T G711
+   standard [7], which operates at a fixed bit rate of 64 kbit/s.  At
+   the same time, this algorithm enables fixed bit rate coding with a
+   quality-versus-bit rate tradeoff close to what is known from code-
+   excited linear prediction (CELP).
+
+
+
+
+
+
+
+
+Duric & Andersen              Experimental                      [Page 2]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+3.  RTP Payload Format
+
+   The iLBC codec uses 20 or 30 ms frames and a sampling rate clock of 8
+   kHz, so the RTP timestamp MUST be in units of 1/8000 of a second. The
+   RTP payload for iLBC has the format shown in the figure bellow. No
+   addition header specific to this payload format is required.
+
+   This format is intended for the situations where the sender and the
+   receiver send one or more codec data frames per packet.  The RTP
+   packet looks as follows:
+
+   0                   1                   2                   3
+   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |                      RTP Header [3]                           |
+   +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+   |                                                               |
+   +                 one or more frames of iLBC [1]                |
+   |                                                               |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+   Figure 1, Packet format diagram
+
+   The RTP header of the packetized encoded iLBC speech has the expected
+   values as described in [3].  The usage of M bit SHOULD be as
+   specified in the applicable RTP profile, for example, RFC 3551 [4]
+   specifies that if the sender does not suppress silence (i.e., sends a
+   frame on every frame interval), the M bit will always be zero.  When
+   more then one codec data frame is present in a single RTP packet, the
+   timestamp is, as always, the oldest data frame represented in the RTP
+   packet.
+
+   The assignment of an RTP payload type for this new packet format is
+   outside the scope of this document, and will not be specified here.
+   It is expected that the RTP profile for a particular class of
+   applications will assign a payload type for this encoding, or if that
+   is not done, then a payload type in the dynamic range shall be chosen
+   by the sender.
+
+3.1.  Bitstream definition
+
+   The total number of bits used to describe one frame of 20 ms speech
+   is 304, which fits in 38 bytes and results in a bit rate of 15.20
+   kbit/s.  For the case with a frame length of 30 ms speech the total
+   number of bits used is 400, which fits in 50 bytes and results in a
+   bit rate of 13.33 kbit/s.  In the bitstream definition, the bits are
+   distributed into three classes according to their bit error or loss
+   sensitivity.  The most sensitive bits (class 1) are placed first in
+
+
+
+Duric & Andersen              Experimental                      [Page 3]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   the bitstream for each frame.  The less sensitive bits (class 2) are
+   placed after the class 1 bits.  The least sensitive bits (class 3)
+   are placed at the end of the bitstream for each frame.
+
+   Looking at the 20/30 ms frame length cases for each class: The class
+   1 bits occupy a total of 6/8 bytes (48/64 bits), the class 2 bits
+   occupy 8/12 bytes (64/96 bits), and the class 3 bits occupy 24/30
+   bytes (191/239 bits).  This distribution of the bits enables the use
+   of uneven level protection (ULP).  The detailed bit allocation is
+   shown in the table below.  When a quantization index is distributed
+   between more classes the more significant bits belong to the lowest
+   class.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Duric & Andersen              Experimental                      [Page 4]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   Bitstream structure:
+
+   ------------------------------------------------------------------+
+   Parameter                         |       Bits Class <1,2,3>      |
+                                     |  20 ms frame  |  30 ms frame  |
+   ----------------------------------+---------------+---------------+
+                            Split 1  |   6 <6,0,0>   |   6 <6,0,0>   |
+                   LSF 1    Split 2  |   7 <7,0,0>   |   7 <7,0,0>   |
+   LSF                      Split 3  |   7 <7,0,0>   |   7 <7,0,0>   |
+                   ------------------+---------------+---------------+
+                            Split 1  | NA (Not Appl.)|   6 <6,0,0>   |
+                   LSF 2    Split 2  |      NA       |   7 <7,0,0>   |
+                            Split 3  |      NA       |   7 <7,0,0>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  20 <20,0,0>  |  40 <40,0,0>  |
+   ----------------------------------+---------------+---------------+
+   Block Class.                      |   2 <2,0,0>   |   3 <3,0,0>   |
+   ----------------------------------+---------------+---------------+
+   Position 22 sample segment        |   1 <1,0,0>   |   1 <1,0,0>   |
+   ----------------------------------+---------------+---------------+
+   Scale Factor State Coder          |   6 <6,0,0>   |   6 <6,0,0>   |
+   ----------------------------------+---------------+---------------+
+                   Sample 0          |   3 <0,1,2>   |   3 <0,1,2>   |
+   Quantized       Sample 1          |   3 <0,1,2>   |   3 <0,1,2>   |
+   Residual           :              |   :    :      |   :    :      |
+   State              :              |   :    :      |   :    :      |
+   Samples            :              |   :    :      |   :    :      |
+                   Sample 56         |   3 <0,1,2>   |   3 <0,1,2>   |
+                   Sample 57         |      NA       |   3 <0,1,2>   |
+                   ------------------+---------------+---------------+
+                   Sum               | 171 <0,57,114>| 174 <0,58,116>|
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   7 <6,0,1>   |   7 <4,2,1>   |
+   CB for 22/23             Stage 2  |   7 <0,0,7>   |   7 <0,0,7>   |
+   sample block             Stage 3  |   7 <0,0,7>   |   7 <0,0,7>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  21 <6,0,15>  |  21 <4,2,15>  |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   5 <2,0,3>   |   5 <1,1,3>   |
+   Gain for 22/23           Stage 2  |   4 <1,1,2>   |   4 <1,1,2>   |
+   sample block             Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  12 <3,1,8>   |  12 <2,2,8>   |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   8 <7,0,1>   |   8 <6,1,1>   |
+               sub-block 1  Stage 2  |   7 <0,0,7>   |   7 <0,0,7>   |
+                            Stage 3  |   7 <0,0,7>   |   7 <0,0,7>   |
+                   ------------------+---------------+---------------+
+
+
+
+Duric & Andersen              Experimental                      [Page 5]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+                            Stage 1  |   8 <0,0,8>   |   8 <0,7,1>   |
+               sub-block 2  Stage 2  |   8 <0,0,8>   |   8 <0,0,8>   |
+   Indices                  Stage 3  |   8 <0,0,8>   |   8 <0,0,8>   |
+   for CB          ------------------+---------------+---------------+
+   sub-blocks               Stage 1  |      NA       |   8 <0,7,1>   |
+               sub-block 3  Stage 2  |      NA       |   8 <0,0,8>   |
+                            Stage 3  |      NA       |   8 <0,0,8>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |      NA       |   8 <0,7,1>   |
+               sub-block 4  Stage 2  |      NA       |   8 <0,0,8>   |
+                            Stage 3  |      NA       |   8 <0,0,8>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  46 <7,0,39>  |  94 <6,22,66> |
+   ----------------------------------+---------------+---------------+
+                            Stage 1  |   5 <1,2,2>   |   5 <1,2,2>   |
+               sub-block 1  Stage 2  |   4 <1,1,2>   |   4 <1,2,1>   |
+                            Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |   5 <1,1,3>   |   5 <0,2,3>   |
+               sub-block 2  Stage 2  |   4 <0,2,2>   |   4 <0,2,2>   |
+                            Stage 3  |   3 <0,0,3>   |   3 <0,0,3>   |
+   Gains for       ------------------+---------------+---------------+
+   sub-blocks               Stage 1  |      NA       |   5 <0,1,4>   |
+               sub-block 3  Stage 2  |      NA       |   4 <0,1,3>   |
+                            Stage 3  |      NA       |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                            Stage 1  |      NA       |   5 <0,1,4>   |
+               sub-block 4  Stage 2  |      NA       |   4 <0,1,3>   |
+                            Stage 3  |      NA       |   3 <0,0,3>   |
+                   ------------------+---------------+---------------+
+                   Sum               |  24 <3,6,15>  |  48 <2,12,34> |
+   -------------------------------------------------------------------
+   Empty frame indicator             |   1 <0,0,1>   |   1 <0,0,1>   |
+   -------------------------------------------------------------------
+   SUM                                 304 <48,64,192> 400 <64,96,240>
+
+   Table 3.1 The bitstream definition for iLBC.
+
+   When packetized into the payload, all the class 1 bits MUST be sorted
+   in order (from top and down) as they were specified in the table.
+   Additionally, all the class 2 bits MUST be sorted (from top and down)
+   and all the class 3 bits MUST be sorted in the same sequential order.
+
+3.2.  Multiple iLBC frames in a RTP packet
+
+   More than one iLBC frame may be included in a single RTP packet by a
+   sender.
+
+
+
+
+Duric & Andersen              Experimental                      [Page 6]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   It is important to observe that senders have the following additional
+   restrictions:
+
+   o  SHOULD NOT include more iLBC frames in a single RTP packet than
+      will fit in the MTU of the RTP transport protocol.
+
+   o  Frames MUST NOT be split between RTP packets.
+
+   o  Frames of the different modes (20 ms and 30 ms) MUST NOT be
+      included within the same packet.
+
+   It is RECOMMENDED that the number of frames contained within an RTP
+   packet are consistent with the application.  For example, in
+   telephony and other real time applications where delay is important,
+   the delay is lower depending on the amount of frames per packet
+   (i.e., fewer frames per packet, the lower the delay).  Whereas for
+   bandwidth constrained links or delay insensitive streaming messaging
+   application, one or more frames per packet would be acceptable.
+
+   Information describing the number of frames contained in an RTP
+   packet is not transmitted as part of the RTP payload.  The way to
+   determine the number of iLBC frames is to count the total number of
+   octets within the RTP packet, and divide the octet count by the
+   number of expected octets per frame (32/50 per frame).
+
+4.  IANA Considerations
+
+   One new MIME sub-type as described in this section has been
+   registered.
+
+4.1.  Storage Mode
+
+   The storage mode is used for storing speech frames (e.g., as a file
+   or email attachment).
+
+   +------------------+
+   | Header           |
+   +------------------+
+   | Speech frame 1   |
+   +------------------+
+   :                  :
+   +------------------+
+   | Speech frame n   |
+   +------------------+
+
+   Figure 2, Storage format diagram
+
+
+
+
+
+Duric & Andersen              Experimental                      [Page 7]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   The file begins with a header that includes only a magic number to
+   identify that it is an iLBC file.
+
+   The magic number for iLBC file MUST correspond to the ASCII character
+   string:
+
+      o for 30 ms frame size mode:"#!iLBC30\n", or "0x23 0x21 0x69
+      0x4C 0x42 0x43 0x33 0x30 0x0A" in hexadecimal form,
+
+      o for 20 ms frame size mode:"#!iLBC20\n", or "0x23 0x21 0x69
+      0x4C 0x42 0x43 0x32 0x30 0x0A" in hexadecimal form.
+
+   After the header, follow the speech frames in consecutive order.
+
+   Speech frames lost in transmission MUST be stored as "empty frames",
+   as defined in [1].
+
+4.2.  MIME Registration of iLBC
+
+   MIME media type name: audio
+
+   MIME subtype: iLBC
+
+   Optional parameters:
+
+   All of the parameters does apply for RTP transfer only.
+
+   maxptime:The maximum amount of media which can be encapsulated in
+            each packet, expressed as time in milliseconds.  The time
+            SHALL be calculated as the sum of the time the media present
+            in the packet represents.  The time SHOULD be a multiple of
+            the frame size.  This attribute is probably only meaningful
+            for audio data, but may be used with other media types if it
+            makes sense.  It is a media attribute, and is not dependent
+            on charset.  Note that this attribute was introduced after
+            RFC 2327, and non updated implementations will ignore this
+            attribute.
+
+   mode:    The iLBC operating frame mode (20 or 30 ms) that will be
+            encapsulated in each packet.  Values can be 0, 20 and 30
+            (where 0 is reserved, 20 stands for preferred 20 ms frame
+            size and 30 stands for preferred 30 ms frame size).
+
+   ptime:   Defined as usual for RTP audio (see [5]).
+
+   Encoding considerations:
+            This type is defined for transfer via both RTP (RFC 3550)
+            and stored-file methods as described in Section 4.1, of RFC
+
+
+
+Duric & Andersen              Experimental                      [Page 8]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+            3952.  Audio data is binary data, and must be encoded for
+            non-binary transport; the Base64 encoding is suitable for
+            email.
+
+   Security considerations:
+            See Section 6 of RFC 3952.
+
+   Public specification:
+            Please refer to RFC 3951 [1].
+
+   Additional information:
+            The following applies to stored-file transfer methods:
+
+            Magic number:
+            ASCII character string for:
+            o 30 ms frame size mode "#!iLBC30\n" (or 0x23 0x21
+            0x69 0x4C 0x42 0x43 0x33 0x30 0x0A in hexadecimal)
+            o 20 ms frame size mode "#!iLBC20\n" (or 0x23 0x21
+            0x69 0x4C 0x42 0x43 0x32 0x30 0x0A in hexadecimal)
+
+            File extensions: lbc, LBC
+            Macintosh file type code: none
+            Object identifier or OID: none
+
+   Person & email address to contact for further information:
+            alan.duric@telio.no
+
+   Intended usage: COMMON.
+            It is expected that many VoIP applications will use this
+            type.
+
+   Author/Change controller:
+            alan.duric@telio.no
+            IETF Audio/Video transport working group
+
+5.  Mapping To SDP Parameters
+
+   The information carried in the MIME media type specification has a
+   specific mapping to fields in the Session Description Protocol (SDP)
+   [5], which is commonly used to describe RTP sessions.  When SDP is
+   used to specify sessions employing the iLBC codec, the mapping is as
+   follows:
+
+   o  The MIME type ("audio") goes in SDP "m=" as the media name.
+
+   o  The MIME subtype (payload format name) goes in SDP "a=rtpmap" as
+      the encoding name.
+
+
+
+
+Duric & Andersen              Experimental                      [Page 9]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   o  The parameters "ptime" and "maxptime" go in the SDP "a=ptime" and
+      "a=maxptime" attributes, respectively.
+
+   o  The parameter "mode" goes in the SDP "a=fmtp" attribute by copying
+      it directly from the MIME media type string as "mode=value".
+
+   When conveying information by SDP, the encoding name SHALL be "iLBC"
+   (the same as the MIME subtype).
+
+   An example of the media representation in SDP for describing iLBC
+   might be:
+
+      m=audio 49120 RTP/AVP 97
+      a=rtpmap:97 iLBC/8000
+
+   If 20 ms frame size mode is used, remote iLBC encoder SHALL receive
+   "mode" parameter in the SDP "a=fmtp" attribute by copying them
+   directly from the MIME media type string as a semicolon separated
+   with parameter=value, where parameter is "mode", and values can be 0
+   and 20 (where 0 is reserved and 20 stands for preferred 20 ms frame
+   size).  An example of the media representation in SDP for describing
+   iLBC when 20 ms frame size mode is used might be:
+
+      m=audio 49120 RTP/AVP 97
+      a=rtpmap:97 iLBC/8000
+      a=fmtp:97 mode=20
+
+   It is important to emphasize the bi-directional character of the
+   "mode" parameter - both sides of a bi-directional session MUST use
+   the same "mode" value.
+
+   The offer contains the preferred mode of the offerer.  The answerer
+   may agree to that mode by including the same mode in the answer, or
+   may include a different mode.  The resulting mode used by both
+   parties SHALL be the lower of the bandwidth modes in the offer and
+   answer.
+
+   That is, an offer of "mode=20" receiving an answer of "mode=30" will
+   result in "mode=30" being used by both participants.  Similarly, an
+   offer of "mode=30" and an answer of "mode=20" will result in
+   "mode=30" being used by both participants.
+
+   This is important when one end point utilizes a bandwidth constrained
+   link (e.g., 28.8k modem link or slower), where only the lower frame
+   size will work.
+
+
+
+
+
+
+Duric & Andersen              Experimental                     [Page 10]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   Parameter ptime can not be used for the purpose of specifying iLBC
+   operating mode, due to fact that for the certain values it will be
+   impossible to distinguish which mode is about to be used (e.g., when
+   ptime=60, it would be impossible to distinguish if packet is carrying
+   2 frames of 30 ms or 3 frames of 20 ms, etc.).
+
+   Note that the payload format (encoding) names are commonly shown in
+   upper case.  MIME subtypes are commonly shown in lower case.  These
+   names are case-insensitive in both places.  Similarly, parameter
+   names are case-insensitive both in MIME types and in the default
+   mapping to the SDP a=fmtp attribute
+
+6.  Security Considerations
+
+   RTP packets using the payload format defined in this specification
+   are subject to the general security considerations discussed in [3]
+   and any appropriate profile (e.g., [4]).
+
+   As this format transports encoded speech, the main security issues
+   include confidentiality and authentication of the speech itself.  The
+   payload format itself does not have any built-in security mechanisms.
+   Confidentiality of the media streams is achieved by encryption,
+   therefore external mechanisms, such as SRTP [6], MAY be used for that
+   purpose.  The data compression used with this payload format is
+   applied end-to-end; hence encryption may be performed after
+   compression with no conflict between the two operations.
+
+   A potential denial-of-service threat exists for data encoding using
+   compression techniques that have non-uniform receiver-end
+   computational load.  The attacker can inject pathological datagrams
+   into the stream which are complex to decode and cause the receiver to
+   become overloaded.  However, the encodings covered in this document
+   do not exhibit any significant non-uniformity.
+
+7.  References
+
+7.1.  Normative References
+
+   [1]  Andersen, S., Duric, A., Astrom, H., Hagen, R., Kleijn, W., and
+        J. Linden, "Internet Low Bit Rate Codec (iLBC)", RFC 3951,
+        December 2004.
+
+   [2]  Bradner, S., "Key words for use in RFCs to Indicate Requirement
+        Levels", BCP 14, RFC 2119, March 1997.
+
+   [3]  Schulzrinne, H., Casner, S., Frederick, R., and V. Jacobson,
+        "RTP: A Transport Protocol for Real-Time Applications", STD 64,
+        RFC 3550, July 2003.
+
+
+
+Duric & Andersen              Experimental                     [Page 11]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+   [4]  Schulzrinne, H. and S. Casner, "RTP Profile for Audio and Video
+        Conferences with Minimal Control", STD 65, RFC 3551, July 2003.
+
+   [5]  Handley, M. and V. Jacobson, "SDP: Session Description
+        Protocol", RFC 2327, April 1998.
+
+   [6]  Baugher, M., McGrew, D., Naslund, M., Carrara, E., and K.
+        Norrman, "The Secure Real-time Transport Protocol", RFC 3711,
+        March 2004.
+
+7.2.  Informative References
+
+   [7]  ITU-T Recommendation G.711, available online from the ITU
+        bookstore at http://www.itu.int.
+
+8.  Acknowledgements
+
+   Henry Sinnreich, Patrik Faltstrom, Alan Johnston and Jean-Francois
+   Mule for great support of the iLBC initiative and for valuable
+   feedback and comments.
+
+Authors' Addresses
+
+   Alan Duric
+   Telio AS
+   Stoperigt. 2
+   Oslo, N-0250
+   Norway
+
+   Phone:  +47 21673505
+   EMail:  alan.duric@telio.no
+
+
+   Soren Vang Andersen
+   Department of Communication Technology
+   Aalborg University
+   Fredrik Bajers Vej 7A
+   9200 Aalborg
+   Denmark
+
+   Phone:  ++45 9 6358627
+   EMail:  sva@kom.auc.dk
+
+
+
+
+
+
+
+
+
+Duric & Andersen              Experimental                     [Page 12]
+
+RFC 3952           RTP Payload Format for iLBC Speech      December 2004
+
+
+Full Copyright Statement
+
+   Copyright (C) The Internet Society (2004).
+
+   This document is subject to the rights, licenses and restrictions
+   contained in BCP 78, and except as set forth therein, the authors
+   retain all their rights.
+
+   This document and the information contained herein are provided on an
+   "AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE REPRESENTS
+   OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY AND THE INTERNET
+   ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED,
+   INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE
+   INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED
+   WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
+
+Intellectual Property
+
+   The IETF takes no position regarding the validity or scope of any
+   Intellectual Property Rights or other rights that might be claimed to
+   pertain to the implementation or use of the technology described in
+   this document or the extent to which any license under such rights
+   might or might not be available; nor does it represent that it has
+   made any independent effort to identify any such rights.  Information
+   on the IETF's procedures with respect to rights in IETF Documents can
+   be found in BCP 78 and BCP 79.
+
+   Copies of IPR disclosures made to the IETF Secretariat and any
+   assurances of licenses to be made available, or the result of an
+   attempt made to obtain a general license or permission for the use of
+   such proprietary rights by implementers or users of this
+   specification can be obtained from the IETF on-line IPR repository at
+   http://www.ietf.org/ipr.
+
+   The IETF invites any interested party to bring to its attention any
+   copyrights, patents or patent applications, or other proprietary
+   rights that may cover technology that may be required to implement
+   this standard.  Please address the information to the IETF at ietf-
+   ipr@ietf.org.
+
+
+Acknowledgement
+
+   Funding for the RFC Editor function is currently provided by the
+   Internet Society.
+
+
+
+
+
+
+Duric & Andersen              Experimental                     [Page 13]
+
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/encode.c b/trunk/src/modules/audio_coding/codecs/ilbc/encode.c
new file mode 100644
index 0000000..64e9eab
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/encode.c
@@ -0,0 +1,518 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Encode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lpc_encode.h"
+#include "frame_classify.h"
+#include "state_search.h"
+#include "state_construct.h"
+#include "constants.h"
+#include "cb_search.h"
+#include "cb_construct.h"
+#include "index_conv_enc.h"
+#include "pack_bits.h"
+#include "hp_input.h"
+
+#ifdef SPLIT_10MS
+#include "unpack_bits.h"
+#include "index_conv_dec.h"
+#endif
+#ifndef WEBRTC_BIG_ENDIAN
+#include "swap_bytes.h"
+#endif
+
+/*----------------------------------------------------------------*
+ *  main encoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EncodeImpl(
+    WebRtc_UWord16 *bytes,     /* (o) encoded data bits iLBC */
+    WebRtc_Word16 *block,     /* (i) speech vector to encode */
+    iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
+                                     state */
+                          ){
+  int n, meml_gotten, Nfor, Nback;
+  WebRtc_Word16 diff, start_pos;
+  int index;
+  int subcount, subframe;
+  WebRtc_Word16 start_count, end_count;
+  WebRtc_Word16 *residual;
+  WebRtc_Word32 en1, en2;
+  WebRtc_Word16 scale, max;
+  WebRtc_Word16 *syntdenum;
+  WebRtc_Word16 *decresidual;
+  WebRtc_Word16 *reverseResidual;
+  WebRtc_Word16 *reverseDecresidual;
+  /* Stack based */
+  WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+  WebRtc_Word16 dataVec[BLOCKL_MAX + LPC_FILTERORDER];
+  WebRtc_Word16 memVec[CB_MEML+CB_FILTERLEN];
+  WebRtc_Word16 bitsMemory[sizeof(iLBC_bits)/sizeof(WebRtc_Word16)];
+  iLBC_bits *iLBCbits_inst = (iLBC_bits*)bitsMemory;
+
+
+#ifdef SPLIT_10MS
+  WebRtc_Word16 *weightdenumbuf = iLBCenc_inst->weightdenumbuf;
+  WebRtc_Word16 last_bit;
+#endif
+
+  WebRtc_Word16 *data = &dataVec[LPC_FILTERORDER];
+  WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN];
+
+  /* Reuse som buffers to save stack memory */
+  residual = &iLBCenc_inst->lpc_buffer[LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl];
+  syntdenum = mem;      /* syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX] and mem are used non overlapping in the code */
+  decresidual = residual;     /* Already encoded residual is overwritten by the decoded version */
+  reverseResidual = data;     /* data and reverseResidual are used non overlapping in the code */
+  reverseDecresidual = reverseResidual; /* Already encoded residual is overwritten by the decoded version */
+
+#ifdef SPLIT_10MS
+
+  WebRtcSpl_MemSetW16 (  (WebRtc_Word16 *) iLBCbits_inst, 0,
+                         (WebRtc_Word16) (sizeof(iLBC_bits) / sizeof(WebRtc_Word16))  );
+
+  start_pos = iLBCenc_inst->start_pos;
+  diff = iLBCenc_inst->diff;
+
+  if (iLBCenc_inst->section != 0){
+    WEBRTC_SPL_MEMCPY_W16 (weightdenum, weightdenumbuf,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    /* Un-Packetize the frame into parameters */
+    last_bit = WebRtcIlbcfix_UnpackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    if (last_bit)
+      return;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvDec (iLBCbits_inst->cb_index);
+
+    if (iLBCenc_inst->section == 1){
+      /* Save first 80 samples of a 160/240 sample frame for 20/30msec */
+      WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples, block, 80);
+    }
+    else{ // iLBCenc_inst->section == 2 AND mode = 30ms
+      /* Save second 80 samples of a 240 sample frame for 30msec */
+      WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples + 80, block, 80);
+    }
+  }
+  else{ // iLBCenc_inst->section == 0
+    /* form a complete frame of 160/240 for 20msec/30msec mode */
+    WEBRTC_SPL_MEMCPY_W16 (data + (iLBCenc_inst->mode * 8) - 80, block, 80);
+    WEBRTC_SPL_MEMCPY_W16 (data, iLBCenc_inst->past_samples,
+                           (iLBCenc_inst->mode * 8) - 80);
+    iLBCenc_inst->Nfor_flag = 0;
+    iLBCenc_inst->Nback_flag = 0;
+#else
+    /* copy input block to data*/
+    WEBRTC_SPL_MEMCPY_W16(data,block,iLBCenc_inst->blockl);
+#endif
+
+    /* high pass filtering of input signal and scale down the residual (*0.5) */
+    WebRtcIlbcfix_HpInput(data, (WebRtc_Word16*)WebRtcIlbcfix_kHpInCoefs,
+                          iLBCenc_inst->hpimemy, iLBCenc_inst->hpimemx,
+                          iLBCenc_inst->blockl);
+
+    /* LPC of hp filtered input data */
+    WebRtcIlbcfix_LpcEncode(syntdenum, weightdenum, iLBCbits_inst->lsf, data,
+                            iLBCenc_inst);
+
+    /* Set up state */
+    WEBRTC_SPL_MEMCPY_W16(dataVec, iLBCenc_inst->anaMem, LPC_FILTERORDER);
+
+    /* inverse filter to get residual */
+    for (n=0; n<iLBCenc_inst->nsub; n++ ) {
+      WebRtcSpl_FilterMAFastQ12(
+          &data[n*SUBL], &residual[n*SUBL],
+          &syntdenum[n*(LPC_FILTERORDER+1)],
+          LPC_FILTERORDER+1, SUBL);
+    }
+
+    /* Copy the state for next frame */
+    WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->anaMem, &data[iLBCenc_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+
+    /* find state location */
+
+    iLBCbits_inst->startIdx = WebRtcIlbcfix_FrameClassify(iLBCenc_inst,residual);
+
+    /* check if state should be in first or last part of the
+       two subframes */
+
+    index = (iLBCbits_inst->startIdx-1)*SUBL;
+    max=WebRtcSpl_MaxAbsValueW16(&residual[index], 2*SUBL);
+    scale=WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max,max));
+
+    /* Scale to maximum 25 bits so that the MAC won't cause overflow */
+    scale = scale - 25;
+    if(scale < 0) {
+      scale = 0;
+    }
+
+    diff = STATE_LEN - iLBCenc_inst->state_short_len;
+    en1=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index],
+                                      iLBCenc_inst->state_short_len, scale);
+    index += diff;
+    en2=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index],
+                                      iLBCenc_inst->state_short_len, scale);
+    if (en1 > en2) {
+      iLBCbits_inst->state_first = 1;
+      start_pos = (iLBCbits_inst->startIdx-1)*SUBL;
+    } else {
+      iLBCbits_inst->state_first = 0;
+      start_pos = (iLBCbits_inst->startIdx-1)*SUBL + diff;
+    }
+
+    /* scalar quantization of state */
+
+    WebRtcIlbcfix_StateSearch(iLBCenc_inst, iLBCbits_inst, &residual[start_pos],
+                              &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                              &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)]);
+
+    WebRtcIlbcfix_StateConstruct(iLBCbits_inst->idxForMax, iLBCbits_inst->idxVec,
+                                 &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                                 &decresidual[start_pos], iLBCenc_inst->state_short_len
+                                 );
+
+    /* predictive quantization in state */
+
+    if (iLBCbits_inst->state_first) { /* put adaptive part in the end */
+
+      /* setup memory */
+
+      WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCenc_inst->state_short_len,
+                            decresidual+start_pos, iLBCenc_inst->state_short_len);
+
+      /* encode subframes */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                             &residual[start_pos+iLBCenc_inst->state_short_len],
+                             mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff,
+                             &weightdenum[iLBCbits_inst->startIdx*(LPC_FILTERORDER+1)], 0);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&decresidual[start_pos+iLBCenc_inst->state_short_len],
+                                iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                                mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL,
+                                diff
+                                );
+
+    }
+    else { /* put adaptive part in the beginning */
+
+      /* create reversed vectors for prediction */
+
+      WebRtcSpl_MemCpyReversedOrder(&reverseResidual[diff-1],
+                                    &residual[(iLBCbits_inst->startIdx+1)*SUBL-STATE_LEN], diff);
+
+      /* setup memory */
+
+      meml_gotten = iLBCenc_inst->state_short_len;
+      WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[start_pos], meml_gotten);
+      WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+
+      /* encode subframes */
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                             reverseResidual, mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff,
+                             &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                             0);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(reverseDecresidual,
+                                iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                                mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL,
+                                diff
+                                );
+
+      /* get decoded residual from reversed vector */
+
+      WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1], reverseDecresidual, diff);
+    }
+
+#ifdef SPLIT_10MS
+    iLBCenc_inst->start_pos = start_pos;
+    iLBCenc_inst->diff = diff;
+    iLBCenc_inst->section++;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index);
+    /* Packetize the parameters into the frame */
+    WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    return;
+  }
+#endif
+
+  /* forward prediction of subframes */
+
+  Nfor = iLBCenc_inst->nsub-iLBCbits_inst->startIdx-1;
+
+  /* counter for predicted subframes */
+#ifdef SPLIT_10MS
+  if (iLBCenc_inst->mode == 20)
+  {
+    subcount = 1;
+  }
+  if (iLBCenc_inst->mode == 30)
+  {
+    if (iLBCenc_inst->section == 1)
+    {
+      subcount = 1;
+    }
+    if (iLBCenc_inst->section == 2)
+    {
+      subcount = 3;
+    }
+  }
+#else
+  subcount=1;
+#endif
+
+  if( Nfor > 0 ){
+
+    /* setup memory */
+
+    WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN);
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN,
+                          decresidual+(iLBCbits_inst->startIdx-1)*SUBL, STATE_LEN);
+
+#ifdef SPLIT_10MS
+    if (iLBCenc_inst->Nfor_flag > 0)
+    {
+      for (subframe = 0; subframe < WEBRTC_SPL_MIN (Nfor, 2); subframe++)
+      {
+        /* update memory */
+        WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL));
+        WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL,
+                               &decresidual[(iLBCbits_inst->startIdx + 1 +
+                                             subframe) * SUBL], SUBL);
+      }
+    }
+
+    iLBCenc_inst->Nfor_flag++;
+
+    if (iLBCenc_inst->mode == 20)
+    {
+      start_count = 0;
+      end_count = Nfor;
+    }
+    if (iLBCenc_inst->mode == 30)
+    {
+      if (iLBCenc_inst->section == 1)
+      {
+        start_count = 0;
+        end_count = WEBRTC_SPL_MIN (Nfor, 2);
+      }
+      if (iLBCenc_inst->section == 2)
+      {
+        start_count = WEBRTC_SPL_MIN (Nfor, 2);
+        end_count = Nfor;
+      }
+    }
+#else
+    start_count = 0;
+    end_count = (WebRtc_Word16)Nfor;
+#endif
+
+    /* loop over subframes to encode */
+
+    for (subframe = start_count; subframe < end_count; subframe++){
+
+      /* encode subframe */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                             iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                             &residual[(iLBCbits_inst->startIdx+1+subframe)*SUBL],
+                             mem, MEM_LF_TBL, SUBL,
+                             &weightdenum[(iLBCbits_inst->startIdx+1+subframe)*(LPC_FILTERORDER+1)],
+                             (WebRtc_Word16)subcount);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&decresidual[(iLBCbits_inst->startIdx+1+subframe)*SUBL],
+                                iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                                iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                                mem, MEM_LF_TBL,
+                                SUBL
+                                );
+
+      /* update memory */
+
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, (CB_MEML-SUBL));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &decresidual[(iLBCbits_inst->startIdx+1+subframe)*SUBL], SUBL);
+
+      subcount++;
+    }
+  }
+
+#ifdef SPLIT_10MS
+  if ((iLBCenc_inst->section == 1) &&
+      (iLBCenc_inst->mode == 30) && (Nfor > 0) && (end_count == 2))
+  {
+    iLBCenc_inst->section++;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index);
+    /* Packetize the parameters into the frame */
+    WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    return;
+  }
+#endif
+
+  /* backward prediction of subframes */
+
+  Nback = iLBCbits_inst->startIdx-1;
+
+  if( Nback > 0 ){
+
+    /* create reverse order vectors
+       (The decresidual does not need to be copied since it is
+       contained in the same vector as the residual)
+    */
+
+    WebRtcSpl_MemCpyReversedOrder(&reverseResidual[Nback*SUBL-1], residual, Nback*SUBL);
+
+    /* setup memory */
+
+    meml_gotten = SUBL*(iLBCenc_inst->nsub+1-iLBCbits_inst->startIdx);
+    if( meml_gotten > CB_MEML ) {
+      meml_gotten=CB_MEML;
+    }
+
+    WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[Nback*SUBL], meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+#ifdef SPLIT_10MS
+    if (iLBCenc_inst->Nback_flag > 0)
+    {
+      for (subframe = 0; subframe < WEBRTC_SPL_MAX (2 - Nfor, 0); subframe++)
+      {
+        /* update memory */
+        WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL));
+        WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL,
+                               &reverseDecresidual[subframe * SUBL], SUBL);
+      }
+    }
+
+    iLBCenc_inst->Nback_flag++;
+
+
+    if (iLBCenc_inst->mode == 20)
+    {
+      start_count = 0;
+      end_count = Nback;
+    }
+    if (iLBCenc_inst->mode == 30)
+    {
+      if (iLBCenc_inst->section == 1)
+      {
+        start_count = 0;
+        end_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
+      }
+      if (iLBCenc_inst->section == 2)
+      {
+        start_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
+        end_count = Nback;
+      }
+    }
+#else
+    start_count = 0;
+    end_count = (WebRtc_Word16)Nback;
+#endif
+
+    /* loop over subframes to encode */
+
+    for (subframe = start_count; subframe < end_count; subframe++){
+
+      /* encode subframe */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                             iLBCbits_inst->gain_index+subcount*CB_NSTAGES, &reverseResidual[subframe*SUBL],
+                             mem, MEM_LF_TBL, SUBL,
+                             &weightdenum[(iLBCbits_inst->startIdx-2-subframe)*(LPC_FILTERORDER+1)],
+                             (WebRtc_Word16)subcount);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&reverseDecresidual[subframe*SUBL],
+                                iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                                iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                                mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, (CB_MEML-SUBL));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &reverseDecresidual[subframe*SUBL], SUBL);
+
+      subcount++;
+
+    }
+
+    /* get decoded residual from reversed vector */
+
+    WebRtcSpl_MemCpyReversedOrder(&decresidual[SUBL*Nback-1], reverseDecresidual, SUBL*Nback);
+  }
+  /* end encoding part */
+
+  /* adjust index */
+
+  WebRtcIlbcfix_IndexConvEnc(iLBCbits_inst->cb_index);
+
+  /* Packetize the parameters into the frame */
+
+#ifdef SPLIT_10MS
+  if( (iLBCenc_inst->mode==30) && (iLBCenc_inst->section==1) ){
+    WebRtcIlbcfix_PackBits(iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+  }
+  else{
+    WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode);
+  }
+#else
+  WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode);
+#endif
+
+#ifndef WEBRTC_BIG_ENDIAN
+  /* Swap bytes for LITTLE ENDIAN since the packbits()
+     function assumes BIG_ENDIAN machine */
+#ifdef SPLIT_10MS
+  if (( (iLBCenc_inst->section == 1) && (iLBCenc_inst->mode == 20) ) ||
+      ( (iLBCenc_inst->section == 2) && (iLBCenc_inst->mode == 30) )){
+    WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words);
+  }
+#else
+  WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words);
+#endif
+#endif
+
+#ifdef SPLIT_10MS
+  if (subcount == (iLBCenc_inst->nsub - 1))
+  {
+    iLBCenc_inst->section = 0;
+  }
+  else
+  {
+    iLBCenc_inst->section++;
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+  }
+#endif
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/encode.h b/trunk/src/modules/audio_coding/codecs/ilbc/encode.h
new file mode 100644
index 0000000..b553f0c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/encode.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Encode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  main encoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EncodeImpl(
+    WebRtc_UWord16 *bytes,     /* (o) encoded data bits iLBC */
+    WebRtc_Word16 *block,     /* (i) speech vector to encode */
+    iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
+                                           state */
+                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.c b/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.c
new file mode 100644
index 0000000..d56069b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnergyInverse.c
+
+******************************************************************/
+
+/* Inverses the in vector in into Q29 domain */
+
+#include "energy_inverse.h"
+
+void WebRtcIlbcfix_EnergyInverse(
+    WebRtc_Word16 *energy,    /* (i/o) Energy and inverse
+                                                           energy (in Q29) */
+    int noOfEnergies)  /* (i)   The length of the energy
+                                   vector */
+{
+  WebRtc_Word32 Nom=(WebRtc_Word32)0x1FFFFFFF;
+  WebRtc_Word16 *energyPtr;
+  int i;
+
+  /* Set the minimum energy value to 16384 to avoid overflow */
+  energyPtr=energy;
+  for (i=0; i<noOfEnergies; i++) {
+    (*energyPtr)=WEBRTC_SPL_MAX((*energyPtr),16384);
+    energyPtr++;
+  }
+
+  /* Calculate inverse energy in Q29 */
+  energyPtr=energy;
+  for (i=0; i<noOfEnergies; i++) {
+    (*energyPtr) = (WebRtc_Word16)WebRtcSpl_DivW32W16(Nom, (*energyPtr));
+    energyPtr++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.h b/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.h
new file mode 100644
index 0000000..db13589
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/energy_inverse.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnergyInverse.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_
+
+#include "defines.h"
+
+/* Inverses the in vector in into Q29 domain */
+
+void WebRtcIlbcfix_EnergyInverse(
+    WebRtc_Word16 *energy,     /* (i/o) Energy and inverse
+                                                                   energy (in Q29) */
+    int noOfEnergies);   /* (i)   The length of the energy
+                                   vector */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.c b/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.c
new file mode 100644
index 0000000..3343816
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.c
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhUpsample.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ * upsample finite array assuming zeros outside bounds
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EnhUpsample(
+    WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
+    WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+                                ){
+  int j;
+  WebRtc_Word32 *pu1, *pu11;
+  WebRtc_Word16 *ps, *w16tmp;
+  const WebRtc_Word16 *pp;
+
+  /* filtering: filter overhangs left side of sequence */
+  pu1=useq1;
+  for (j=0;j<ENH_UPS0; j++) {
+    pu11=pu1;
+    /* i = 2 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+2;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu11+=ENH_UPS0;
+    /* i = 3 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+3;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu11+=ENH_UPS0;
+    /* i = 4 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+4;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu1++;
+  }
+
+  /* filtering: simple convolution=inner products
+     (not needed since the sequence is so short)
+  */
+
+  /* filtering: filter overhangs right side of sequence */
+
+  /* Code with loops, which is equivivalent to the expanded version below
+
+     filterlength = 5;
+     hf1 = 2;
+     for(j=0;j<ENH_UPS0; j++){
+     pu = useq1 + (filterlength-hfl)*ENH_UPS0 + j;
+     for(i=1; i<=hfl; i++){
+     *pu=0;
+     pp = polyp[j]+i;
+     ps = seq1+dim1-1;
+     for(k=0;k<filterlength-i;k++) {
+     *pu += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+     }
+     pu+=ENH_UPS0;
+     }
+     }
+  */
+  pu1 = useq1 + 12;
+  w16tmp = seq1+4;
+  for (j=0;j<ENH_UPS0; j++) {
+    pu11 = pu1;
+    /* i = 1 */
+    pp = WebRtcIlbcfix_kEnhPolyPhaser[j]+2;
+    ps = w16tmp;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    pu11+=ENH_UPS0;
+    /* i = 2 */
+    pp = WebRtcIlbcfix_kEnhPolyPhaser[j]+3;
+    ps = w16tmp;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    pu11+=ENH_UPS0;
+
+    pu1++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.h b/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.h
new file mode 100644
index 0000000..53534cc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enh_upsample.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhUpsample.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENH_UPSAMPLE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENH_UPSAMPLE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * upsample finite array assuming zeros outside bounds
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EnhUpsample(
+    WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
+    WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.c b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.c
new file mode 100644
index 0000000..b8f3335
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Enhancer.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "get_sync_seq.h"
+#include "smooth.h"
+
+/*----------------------------------------------------------------*
+ * perform enhancement on idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Enhancer(
+    WebRtc_Word16 *odata,   /* (o) smoothed block, dimension blockl */
+    WebRtc_Word16 *idata,   /* (i) data buffer used for enhancing */
+    WebRtc_Word16 idatal,   /* (i) dimension idata */
+    WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
+    WebRtc_Word16 *period,   /* (i) pitch period array (pitch bward-in time) */
+    WebRtc_Word16 *plocs,   /* (i) locations where period array values valid */
+    WebRtc_Word16 periodl   /* (i) dimension of period and plocs */
+                            ){
+  /* Stack based */
+  WebRtc_Word16 surround[ENH_BLOCKL];
+
+  WebRtcSpl_MemSetW16(surround, 0, ENH_BLOCKL);
+
+  /* get said second sequence of segments */
+
+  WebRtcIlbcfix_GetSyncSeq(idata, idatal, centerStartPos, period, plocs,
+                           periodl, ENH_HL, surround);
+
+  /* compute the smoothed output from said second sequence */
+
+  WebRtcIlbcfix_Smooth(odata, idata+centerStartPos, surround);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.h b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.h
new file mode 100644
index 0000000..e14f559
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Enhancer.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * perform enhancement on idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Enhancer(
+    WebRtc_Word16 *odata,   /* (o) smoothed block, dimension blockl */
+    WebRtc_Word16 *idata,   /* (i) data buffer used for enhancing */
+    WebRtc_Word16 idatal,   /* (i) dimension idata */
+    WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
+    WebRtc_Word16 *period,   /* (i) pitch period array (pitch bward-in time) */
+    WebRtc_Word16 *plocs,   /* (i) locations where period array values valid */
+    WebRtc_Word16 periodl   /* (i) dimension of period and plocs */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c
new file mode 100644
index 0000000..61b71d1
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c
@@ -0,0 +1,381 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhancerInterface.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "xcorr_coef.h"
+#include "enhancer.h"
+#include "hp_output.h"
+
+
+
+/*----------------------------------------------------------------*
+ * interface for enhancer
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
+    WebRtc_Word16 *out,     /* (o) enhanced signal */
+    WebRtc_Word16 *in,      /* (i) unenhanced signal */
+    iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
+                                        ){
+  int iblock;
+  int lag=20, tlag=20;
+  int inLen=iLBCdec_inst->blockl+120;
+  WebRtc_Word16 scale, scale1, plc_blockl;
+  WebRtc_Word16 *enh_buf, *enh_period;
+  WebRtc_Word32 tmp1, tmp2, max, new_blocks;
+  WebRtc_Word16 *enh_bufPtr1;
+  int i, k;
+  WebRtc_Word16 EnChange;
+  WebRtc_Word16 SqrtEnChange;
+  WebRtc_Word16 inc;
+  WebRtc_Word16 win;
+  WebRtc_Word16 *tmpW16ptr;
+  WebRtc_Word16 startPos;
+  WebRtc_Word16 *plc_pred;
+  WebRtc_Word16 *target, *regressor;
+  WebRtc_Word16 max16;
+  int shifts;
+  WebRtc_Word32 ener;
+  WebRtc_Word16 enerSh;
+  WebRtc_Word16 corrSh;
+  WebRtc_Word16 ind, sh;
+  WebRtc_Word16 start, stop;
+  /* Stack based */
+  WebRtc_Word16 totsh[3];
+  WebRtc_Word16 downsampled[(BLOCKL_MAX+120)>>1]; /* length 180 */
+  WebRtc_Word32 corr32[50];
+  WebRtc_Word32 corrmax[3];
+  WebRtc_Word16 corr16[3];
+  WebRtc_Word16 en16[3];
+  WebRtc_Word16 lagmax[3];
+
+  plc_pred = downsampled; /* Reuse memory since plc_pred[ENH_BLOCKL] and
+                              downsampled are non overlapping */
+  enh_buf=iLBCdec_inst->enh_buf;
+  enh_period=iLBCdec_inst->enh_period;
+
+  /* Copy in the new data into the enhancer buffer */
+
+  WEBRTC_SPL_MEMMOVE_W16(enh_buf, &enh_buf[iLBCdec_inst->blockl],
+                         ENH_BUFL-iLBCdec_inst->blockl);
+
+  WEBRTC_SPL_MEMCPY_W16(&enh_buf[ENH_BUFL-iLBCdec_inst->blockl], in,
+                        iLBCdec_inst->blockl);
+
+  /* Set variables that are dependent on frame size */
+  if (iLBCdec_inst->mode==30) {
+    plc_blockl=ENH_BLOCKL;
+    new_blocks=3;
+    startPos=320;  /* Start position for enhancement
+                     (640-new_blocks*ENH_BLOCKL-80) */
+  } else {
+    plc_blockl=40;
+    new_blocks=2;
+    startPos=440;  /* Start position for enhancement
+                    (640-new_blocks*ENH_BLOCKL-40) */
+  }
+
+  /* Update the pitch prediction for each enhancer block, move the old ones */
+  WEBRTC_SPL_MEMMOVE_W16(enh_period, &enh_period[new_blocks],
+                         (ENH_NBLOCKS_TOT-new_blocks));
+
+  k=WebRtcSpl_DownsampleFast(
+      enh_buf+ENH_BUFL-inLen,    /* Input samples */
+      (WebRtc_Word16)(inLen+ENH_BUFL_FILTEROVERHEAD),
+      downsampled,
+      (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(inLen, 1),
+      (WebRtc_Word16*)WebRtcIlbcfix_kLpFiltCoefs,  /* Coefficients in Q12 */
+      FILTERORDER_DS_PLUS1,    /* Length of filter (order-1) */
+      FACTOR_DS,
+      DELAY_DS);
+
+  /* Estimate the pitch in the down sampled domain. */
+  for(iblock = 0; iblock<new_blocks; iblock++){
+
+    /* references */
+    i=60+WEBRTC_SPL_MUL_16_16(iblock,ENH_BLOCKL_HALF);
+    target=downsampled+i;
+    regressor=downsampled+i-10;
+
+    /* scaling */
+    max16=WebRtcSpl_MaxAbsValueW16(&regressor[-50],
+                                   (WebRtc_Word16)(ENH_BLOCKL_HALF+50-1));
+    shifts = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max16, max16)) - 25;
+    shifts = WEBRTC_SPL_MAX(0, shifts);
+
+    /* compute cross correlation */
+    WebRtcSpl_CrossCorrelation(corr32, target, regressor,
+                               ENH_BLOCKL_HALF, 50, (WebRtc_Word16)shifts, -1);
+
+    /* Find 3 highest correlations that should be compared for the
+       highest (corr*corr)/ener */
+
+    for (i=0;i<2;i++) {
+      lagmax[i] = WebRtcSpl_MaxIndexW32(corr32, 50);
+      corrmax[i] = corr32[lagmax[i]];
+      start = lagmax[i] - 2;
+      stop = lagmax[i] + 2;
+      start = WEBRTC_SPL_MAX(0,  start);
+      stop  = WEBRTC_SPL_MIN(49, stop);
+      for (k=start; k<=stop; k++) {
+        corr32[k] = 0;
+      }
+    }
+    lagmax[2] = WebRtcSpl_MaxIndexW32(corr32, 50);
+    corrmax[2] = corr32[lagmax[2]];
+
+    /* Calculate normalized corr^2 and ener */
+    for (i=0;i<3;i++) {
+      corrSh = 15-WebRtcSpl_GetSizeInBits(corrmax[i]);
+      ener = WebRtcSpl_DotProductWithScale(&regressor[-lagmax[i]],
+                                           &regressor[-lagmax[i]],
+                                           ENH_BLOCKL_HALF, shifts);
+      enerSh = 15-WebRtcSpl_GetSizeInBits(ener);
+      corr16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(corrmax[i], corrSh);
+      corr16[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(corr16[i],
+                                                           corr16[i], 16);
+      en16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, enerSh);
+      totsh[i] = enerSh - WEBRTC_SPL_LSHIFT_W32(corrSh, 1);
+    }
+
+    /* Compare lagmax[0..3] for the (corr^2)/ener criteria */
+    ind = 0;
+    for (i=1; i<3; i++) {
+      if (totsh[ind] > totsh[i]) {
+        sh = WEBRTC_SPL_MIN(31, totsh[ind]-totsh[i]);
+        if ( WEBRTC_SPL_MUL_16_16(corr16[ind], en16[i]) <
+            WEBRTC_SPL_MUL_16_16_RSFT(corr16[i], en16[ind], sh)) {
+          ind = i;
+        }
+      } else {
+        sh = WEBRTC_SPL_MIN(31, totsh[i]-totsh[ind]);
+        if (WEBRTC_SPL_MUL_16_16_RSFT(corr16[ind], en16[i], sh) <
+            WEBRTC_SPL_MUL_16_16(corr16[i], en16[ind])) {
+          ind = i;
+        }
+      }
+    }
+
+    lag = lagmax[ind] + 10;
+
+    /* Store the estimated lag in the non-downsampled domain */
+    enh_period[ENH_NBLOCKS_TOT-new_blocks+iblock] =
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(lag, 8);
+
+    /* Store the estimated lag for backward PLC */
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      if (!iblock) {
+        tlag = WEBRTC_SPL_MUL_16_16(lag, 2);
+      }
+    } else {
+      if (iblock==1) {
+        tlag = WEBRTC_SPL_MUL_16_16(lag, 2);
+      }
+    }
+
+    lag = WEBRTC_SPL_MUL_16_16(lag, 2);
+  }
+
+  if ((iLBCdec_inst->prev_enh_pl==1)||(iLBCdec_inst->prev_enh_pl==2)) {
+
+    /* Calculate the best lag of the new frame
+       This is used to interpolate backwards and mix with the PLC'd data
+    */
+
+    /* references */
+    target=in;
+    regressor=in+tlag-1;
+
+    /* scaling */
+    max16=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(plc_blockl+3-1));
+    if (max16>5000)
+      shifts=2;
+    else
+      shifts=0;
+
+    /* compute cross correlation */
+    WebRtcSpl_CrossCorrelation(corr32, target, regressor,
+                               plc_blockl, 3, (WebRtc_Word16)shifts, 1);
+
+    /* find lag */
+    lag=WebRtcSpl_MaxIndexW32(corr32, 3);
+    lag+=tlag-1;
+
+    /* Copy the backward PLC to plc_pred */
+
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      if (lag>plc_blockl) {
+        WEBRTC_SPL_MEMCPY_W16(plc_pred, &in[lag-plc_blockl], plc_blockl);
+      } else {
+        WEBRTC_SPL_MEMCPY_W16(&plc_pred[plc_blockl-lag], in, lag);
+        WEBRTC_SPL_MEMCPY_W16(
+            plc_pred, &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl+lag],
+            (plc_blockl-lag));
+      }
+    } else {
+      int pos;
+
+      pos = plc_blockl;
+
+      while (lag<pos) {
+        WEBRTC_SPL_MEMCPY_W16(&plc_pred[pos-lag], in, lag);
+        pos = pos - lag;
+      }
+      WEBRTC_SPL_MEMCPY_W16(plc_pred, &in[lag-pos], pos);
+
+    }
+
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      /* limit energy change
+         if energy in backward PLC is more than 4 times higher than the forward
+         PLC, then reduce the energy in the backward PLC vector:
+         sample 1...len-16 set energy of the to 4 times forward PLC
+         sample len-15..len interpolate between 4 times fw PLC and bw PLC energy
+
+         Note: Compared to floating point code there is a slight change,
+         the window is 16 samples long instead of 10 samples to simplify the
+         calculations
+      */
+
+      max=WebRtcSpl_MaxAbsValueW16(
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], plc_blockl);
+      max16=WebRtcSpl_MaxAbsValueW16(plc_pred, plc_blockl);
+      max = WEBRTC_SPL_MAX(max, max16);
+      scale=22-(WebRtc_Word16)WebRtcSpl_NormW32(max);
+      scale=WEBRTC_SPL_MAX(scale,0);
+
+      tmp2 = WebRtcSpl_DotProductWithScale(
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl],
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl],
+          plc_blockl, scale);
+      tmp1 = WebRtcSpl_DotProductWithScale(plc_pred, plc_pred,
+                                           plc_blockl, scale);
+
+      /* Check the energy difference */
+      if ((tmp1>0)&&((tmp1>>2)>tmp2)) {
+        /* EnChange is now guaranteed to be <0.5
+           Calculate EnChange=tmp2/tmp1 in Q16
+        */
+
+        scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp1);
+        tmp1=WEBRTC_SPL_SHIFT_W32(tmp1, (scale1-16)); /* using 15 bits */
+
+        tmp2=WEBRTC_SPL_SHIFT_W32(tmp2, (scale1));
+        EnChange = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp2,
+                                                      (WebRtc_Word16)tmp1);
+
+        /* Calculate the Sqrt of the energy in Q15 ((14+16)/2) */
+        SqrtEnChange = (WebRtc_Word16)WebRtcSpl_SqrtFloor(
+            WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)EnChange, 14));
+
+
+        /* Multiply first part of vector with 2*SqrtEnChange */
+        WebRtcSpl_ScaleVector(plc_pred, plc_pred, SqrtEnChange,
+                              (WebRtc_Word16)(plc_blockl-16), 14);
+
+        /* Calculate increase parameter for window part (16 last samples) */
+        /* (1-2*SqrtEnChange)/16 in Q15 */
+        inc=(2048-WEBRTC_SPL_RSHIFT_W16(SqrtEnChange, 3));
+
+        win=0;
+        tmpW16ptr=&plc_pred[plc_blockl-16];
+
+        for (i=16;i>0;i--) {
+          (*tmpW16ptr)=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+              (*tmpW16ptr), (SqrtEnChange+(win>>1)), 14);
+          /* multiply by (2.0*SqrtEnChange+win) */
+
+          win += inc;
+          tmpW16ptr++;
+        }
+      }
+
+      /* Make the linear interpolation between the forward PLC'd data
+         and the backward PLC'd data (from the new frame)
+      */
+
+      if (plc_blockl==40) {
+        inc=400; /* 1/41 in Q14 */
+      } else { /* plc_blockl==80 */
+        inc=202; /* 1/81 in Q14 */
+      }
+      win=0;
+      enh_bufPtr1=&enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl];
+      for (i=0; i<plc_blockl; i++) {
+        win+=inc;
+        *enh_bufPtr1 =
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT((*enh_bufPtr1), win, 14);
+        *enh_bufPtr1 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+                (16384-win), plc_pred[plc_blockl-1-i], 14);
+        enh_bufPtr1--;
+      }
+    } else {
+      WebRtc_Word16 *synt = &downsampled[LPC_FILTERORDER];
+
+      enh_bufPtr1=&enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl];
+      WEBRTC_SPL_MEMCPY_W16(enh_bufPtr1, plc_pred, plc_blockl);
+
+      /* Clear fileter memory */
+      WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER);
+      WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4);
+      WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2);
+
+      /* Initialize filter memory by filtering through 2 lags */
+      WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], iLBCdec_inst->syntMem,
+                            LPC_FILTERORDER);
+      WebRtcSpl_FilterARFastQ12(
+          enh_bufPtr1,
+          synt,
+          &iLBCdec_inst->old_syntdenum[
+                                       (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
+                                       LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+
+      WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], &synt[lag-LPC_FILTERORDER],
+                            LPC_FILTERORDER);
+      WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                             iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                             (WebRtc_Word16)lag);
+      WebRtcSpl_FilterARFastQ12(
+          enh_bufPtr1, synt,
+          &iLBCdec_inst->old_syntdenum[
+                                       (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
+                                       LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+
+      WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &synt[lag-LPC_FILTERORDER],
+                            LPC_FILTERORDER);
+      WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                             iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                             (WebRtc_Word16)lag);
+    }
+  }
+
+
+  /* Perform enhancement block by block */
+
+  for (iblock = 0; iblock<new_blocks; iblock++) {
+    WebRtcIlbcfix_Enhancer(out+WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL),
+                           enh_buf,
+                           ENH_BUFL,
+                           (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL)+startPos),
+                           enh_period,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kEnhPlocs, ENH_NBLOCKS_TOT);
+  }
+
+  return (lag);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h
new file mode 100644
index 0000000..37b27e2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhancerInterface.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * interface for enhancer
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
+    WebRtc_Word16 *out,     /* (o) enhanced signal */
+    WebRtc_Word16 *in,      /* (i) unenhanced signal */
+    iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
+                                        );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c b/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
new file mode 100644
index 0000000..7cece26
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FilteredCbVecs.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Construct an additional codebook vector by filtering the
+ *  initial codebook buffer. This vector is then used to expand
+ *  the codebook with an additional section.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_FilteredCbVecs(
+    WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
+    WebRtc_Word16 *CBmem,  /* (i) Codebook memory that is filtered to create a
+                                           second CB section */
+    int lMem,  /* (i) Length of codebook memory */
+    WebRtc_Word16 samples    /* (i) Number of samples to filter */
+                                  ) {
+
+  /* Set up the memory, start with zero state */
+  WebRtcSpl_MemSetW16(CBmem+lMem, 0, CB_HALFFILTERLEN);
+  WebRtcSpl_MemSetW16(CBmem-CB_HALFFILTERLEN, 0, CB_HALFFILTERLEN);
+  WebRtcSpl_MemSetW16(cbvectors, 0, lMem-samples);
+
+  /* Filter to obtain the filtered CB memory */
+
+  WebRtcSpl_FilterMAFastQ12(
+      CBmem+CB_HALFFILTERLEN+lMem-samples, cbvectors+lMem-samples,
+      (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev, CB_FILTERLEN, samples);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h b/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
new file mode 100644
index 0000000..c502e8f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FilteredCbVecs.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct an additional codebook vector by filtering the
+ *  initial codebook buffer. This vector is then used to expand
+ *  the codebook with an additional section.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_FilteredCbVecs(
+    WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
+    WebRtc_Word16 *CBmem,  /* (i) Codebook memory that is filtered to create a
+                                           second CB section */
+    int lMem,  /* (i) Length of codebook memory */
+    WebRtc_Word16 samples    /* (i) Number of samples to filter */
+                                  );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.c b/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.c
new file mode 100644
index 0000000..ea3675e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.c
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FrameClassify.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Classification of subframes to localize start state
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+    /* (o) Index to the max-energy sub frame */
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i/o) the encoder state structure */
+    WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+                                                ){
+  WebRtc_Word16 max, scale;
+  WebRtc_Word32 ssqEn[NSUB_MAX-1];
+  WebRtc_Word16 *ssqPtr;
+  WebRtc_Word32 *seqEnPtr;
+  WebRtc_Word32 maxW32;
+  WebRtc_Word16 scale1;
+  WebRtc_Word16 pos;
+  int n;
+
+  /*
+    Calculate the energy of each of the 80 sample blocks
+    in the draft the 4 first and last samples are windowed with 1/5...4/5
+    and 4/5...1/5 respectively. To simplify for the fixpoint we have changed
+    this to 0 0 1 1 and 1 1 0 0
+  */
+
+  max = WebRtcSpl_MaxAbsValueW16(residualFIX, iLBCenc_inst->blockl);
+  scale=WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max,max));
+
+  /* Scale to maximum 24 bits so that it won't overflow for 76 samples */
+  scale = scale-24;
+  scale1 = WEBRTC_SPL_MAX(0, scale);
+
+  /* Calculate energies */
+  ssqPtr=residualFIX + 2;
+  seqEnPtr=ssqEn;
+  for (n=(iLBCenc_inst->nsub-1); n>0; n--) {
+    (*seqEnPtr) = WebRtcSpl_DotProductWithScale(ssqPtr, ssqPtr, 76, scale1);
+    ssqPtr += 40;
+    seqEnPtr++;
+  }
+
+  /* Scale to maximum 20 bits in order to allow for the 11 bit window */
+  maxW32 = WebRtcSpl_MaxValueW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1));
+  scale = WebRtcSpl_GetSizeInBits(maxW32) - 20;
+  scale1 = WEBRTC_SPL_MAX(0, scale);
+
+  /* Window each 80 block with the ssqEn_winTbl window to give higher probability for
+     the blocks in the middle
+  */
+  seqEnPtr=ssqEn;
+  if (iLBCenc_inst->mode==20) {
+    ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin+1;
+  } else {
+    ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin;
+  }
+  for (n=(iLBCenc_inst->nsub-1); n>0; n--) {
+    (*seqEnPtr)=WEBRTC_SPL_MUL(((*seqEnPtr)>>scale1), (*ssqPtr));
+    seqEnPtr++;
+    ssqPtr++;
+  }
+
+  /* Extract the best choise of start state */
+  pos = WebRtcSpl_MaxIndexW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1)) + 1;
+
+  return(pos);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.h b/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.h
new file mode 100644
index 0000000..faf4666
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/frame_classify.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FrameClassify.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
+
+WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+    /* (o) Index to the max-energy sub frame */
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i/o) the encoder state structure */
+    WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+                                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.c b/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.c
new file mode 100644
index 0000000..9450a80
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.c
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainDequant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  decoder for quantized gains in the gain-shape coding of
+ *  residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+    /* (o) quantized gain value (Q14) */
+    WebRtc_Word16 index, /* (i) quantization index */
+    WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
+    WebRtc_Word16 stage /* (i) The stage of the search */
+                                                ){
+  WebRtc_Word16 scale;
+  const WebRtc_Word16 *gain;
+
+  /* obtain correct scale factor */
+
+  scale=WEBRTC_SPL_ABS_W16(maxIn);
+  scale = WEBRTC_SPL_MAX(1638, scale);  /* if lower than 0.1, set it to 0.1 */
+
+  /* select the quantization table and return the decoded value */
+  gain = WebRtcIlbcfix_kGain[stage];
+
+  return((WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, gain[index])+8192)>>14));
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.h b/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.h
new file mode 100644
index 0000000..28f2ceb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/gain_dequant.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainDequant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  decoder for quantized gains in the gain-shape coding of
+ *  residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+    /* (o) quantized gain value (Q14) */
+    WebRtc_Word16 index, /* (i) quantization index */
+    WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
+    WebRtc_Word16 stage /* (i) The stage of the search */
+                                         );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.c b/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.c
new file mode 100644
index 0000000..bdf88a5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.c
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainQuant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  quantizer for the gain in the gain-shape coding of residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+    WebRtc_Word16 gain, /* (i) gain value Q14 */
+    WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
+    WebRtc_Word16 stage, /* (i) The stage of the search */
+    WebRtc_Word16 *index /* (o) quantization index */
+                                        ) {
+
+  WebRtc_Word16 scale, returnVal, cblen;
+  WebRtc_Word32 gainW32, measure1, measure2;
+  const WebRtc_Word16 *cbPtr, *cb;
+  int loc, noMoves, noChecks, i;
+
+  /* ensure a lower bound (0.1) on the scaling factor */
+
+  scale = WEBRTC_SPL_MAX(1638, maxIn);
+
+  /* select the quantization table and calculate
+     the length of the table and the number of
+     steps in the binary search that are needed */
+  cb = WebRtcIlbcfix_kGain[stage];
+  cblen = 32>>stage;
+  noChecks = 4-stage;
+
+  /* Multiply the gain with 2^14 to make the comparison
+     easier and with higher precision */
+  gainW32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)gain, 14);
+
+  /* Do a binary search, starting in the middle of the CB
+     loc - defines the current position in the table
+     noMoves - defines the number of steps to move in the CB in order
+     to get next CB location
+  */
+
+  loc = cblen>>1;
+  noMoves = loc;
+  cbPtr = cb + loc; /* Centre of CB */
+
+  for (i=noChecks;i>0;i--) {
+    noMoves>>=1;
+    measure1=WEBRTC_SPL_MUL_16_16(scale, (*cbPtr));
+
+    /* Move up if gain is larger, otherwise move down in table */
+    measure1 = measure1 - gainW32;
+
+    if (0>measure1) {
+      cbPtr+=noMoves;
+      loc+=noMoves;
+    } else {
+      cbPtr-=noMoves;
+      loc-=noMoves;
+    }
+  }
+
+  /* Check which value is the closest one: loc-1, loc or loc+1 */
+
+  measure1=WEBRTC_SPL_MUL_16_16(scale, (*cbPtr));
+  if (gainW32>measure1) {
+    /* Check against value above loc */
+    measure2=WEBRTC_SPL_MUL_16_16(scale, (*(cbPtr+1)));
+    if ((measure2-gainW32)<(gainW32-measure1)) {
+      loc+=1;
+    }
+  } else {
+    /* Check against value below loc */
+    measure2=WEBRTC_SPL_MUL_16_16(scale, (*(cbPtr-1)));
+    if ((gainW32-measure2)<=(measure1-gainW32)) {
+      loc-=1;
+    }
+  }
+
+  /* Guard against getting outside the table. The calculation above can give a location
+     which is one above the maximum value (in very rare cases) */
+  loc=WEBRTC_SPL_MIN(loc, (cblen-1));
+  *index=loc;
+
+  /* Calculate the quantized gain value (in Q14) */
+  returnVal=(WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, cb[loc])+8192)>>14);
+
+  /* return the quantized value */
+  return(returnVal);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.h b/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.h
new file mode 100644
index 0000000..a2f0596
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/gain_quant.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainQuant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  quantizer for the gain in the gain-shape coding of residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+    WebRtc_Word16 gain, /* (i) gain value Q14 */
+    WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
+    WebRtc_Word16 stage, /* (i) The stage of the search */
+    WebRtc_Word16 *index /* (o) quantization index */
+                                       );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c b/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c
new file mode 100644
index 0000000..aba3e31
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetCbVec.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "create_augmented_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Construct codebook vector for given index.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetCbVec(
+    WebRtc_Word16 *cbvec,   /* (o) Constructed codebook vector */
+    WebRtc_Word16 *mem,   /* (i) Codebook buffer */
+    WebRtc_Word16 index,   /* (i) Codebook index */
+    WebRtc_Word16 lMem,   /* (i) Length of codebook buffer */
+    WebRtc_Word16 cbveclen   /* (i) Codebook vector length */
+                            ){
+  WebRtc_Word16 k, base_size;
+  WebRtc_Word16 lag;
+  /* Stack based */
+  WebRtc_Word16 tempbuff2[SUBL+5];
+
+  /* Determine size of codebook sections */
+
+  base_size=lMem-cbveclen+1;
+
+  if (cbveclen==SUBL) {
+    base_size+=WEBRTC_SPL_RSHIFT_W16(cbveclen,1);
+  }
+
+  /* No filter -> First codebook section */
+
+  if (index<lMem-cbveclen+1) {
+
+    /* first non-interpolated vectors */
+
+    k=index+cbveclen;
+    /* get vector */
+    WEBRTC_SPL_MEMCPY_W16(cbvec, mem+lMem-k, cbveclen);
+
+  } else if (index < base_size) {
+
+    /* Calculate lag */
+
+    k=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (index-(lMem-cbveclen+1)))+cbveclen;
+
+    lag=WEBRTC_SPL_RSHIFT_W16(k, 1);
+
+    WebRtcIlbcfix_CreateAugmentedVec(lag, mem+lMem, cbvec);
+
+  }
+
+  /* Higher codebbok section based on filtering */
+
+  else {
+
+    WebRtc_Word16 memIndTest;
+
+    /* first non-interpolated vectors */
+
+    if (index-base_size<lMem-cbveclen+1) {
+
+      /* Set up filter memory, stuff zeros outside memory buffer */
+
+      memIndTest = lMem-(index-base_size+cbveclen);
+
+      WebRtcSpl_MemSetW16(mem-CB_HALFFILTERLEN, 0, CB_HALFFILTERLEN);
+      WebRtcSpl_MemSetW16(mem+lMem, 0, CB_HALFFILTERLEN);
+
+      /* do filtering to get the codebook vector */
+
+      WebRtcSpl_FilterMAFastQ12(
+          &mem[memIndTest+4], cbvec, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
+          CB_FILTERLEN, cbveclen);
+    }
+
+    /* interpolated vectors */
+
+    else {
+      /* Stuff zeros outside memory buffer  */
+      memIndTest = lMem-cbveclen-CB_FILTERLEN;
+      WebRtcSpl_MemSetW16(mem+lMem, 0, CB_HALFFILTERLEN);
+
+      /* do filtering */
+      WebRtcSpl_FilterMAFastQ12(
+          &mem[memIndTest+7], tempbuff2, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
+          CB_FILTERLEN, (WebRtc_Word16)(cbveclen+5));
+
+      /* Calculate lag index */
+      lag = (cbveclen<<1)-20+index-base_size-lMem-1;
+
+      WebRtcIlbcfix_CreateAugmentedVec(lag, tempbuff2+SUBL+5, cbvec);
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h b/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h
new file mode 100644
index 0000000..99b5d4e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetCbVec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_CD_VEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_CD_VEC_H_
+
+void WebRtcIlbcfix_GetCbVec(
+    WebRtc_Word16 *cbvec,   /* (o) Constructed codebook vector */
+    WebRtc_Word16 *mem,   /* (i) Codebook buffer */
+    WebRtc_Word16 index,   /* (i) Codebook index */
+    WebRtc_Word16 lMem,   /* (i) Length of codebook buffer */
+    WebRtc_Word16 cbveclen   /* (i) Codebook vector length */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
new file mode 100644
index 0000000..c55e918
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetLspPoly.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Construct the polynomials F1(z) and F2(z) from the LSP
+ * (Computations are done in Q24)
+ *
+ * The expansion is performed using the following recursion:
+ *
+ * f[0] = 1;
+ * tmp = -2.0 * lsp[0];
+ * f[1] = tmp;
+ * for (i=2; i<=5; i++) {
+ *    b = -2.0 * lsp[2*i-2];
+ *    f[i] = tmp*f[i-1] + 2.0*f[i-2];
+ *    for (j=i; j>=2; j--) {
+ *       f[j] = f[j] + tmp*f[j-1] + f[j-2];
+ *    }
+ *    f[i] = f[i] + tmp;
+ * }
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetLspPoly(
+    WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
+    WebRtc_Word32 *f)  /* (o) polonymial in Q24 */
+{
+  WebRtc_Word32 tmpW32;
+  int i, j;
+  WebRtc_Word16 high, low;
+  WebRtc_Word16 *lspPtr;
+  WebRtc_Word32 *fPtr;
+
+  lspPtr = lsp;
+  fPtr = f;
+  /* f[0] = 1.0 (Q24) */
+  (*fPtr) = (WebRtc_Word32)16777216;
+  fPtr++;
+
+  (*fPtr) = WEBRTC_SPL_MUL((*lspPtr), -1024);
+  fPtr++;
+  lspPtr+=2;
+
+  for(i=2; i<=5; i++)
+  {
+    (*fPtr) = fPtr[-2];
+
+    for(j=i; j>1; j--)
+    {
+      /* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */
+      high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1], 16);
+      low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1]-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)high),16), 1);
+
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(high, (*lspPtr)), 2) +
+          WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(low, (*lspPtr), 15), 2);
+
+      (*fPtr) += fPtr[-2];
+      (*fPtr) -= tmpW32;
+      fPtr--;
+    }
+    (*fPtr) -= (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)(*lspPtr), 10);
+
+    fPtr+=i;
+    lspPtr+=2;
+  }
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h b/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
new file mode 100644
index 0000000..b0520b4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetLspPoly.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Construct the polynomials F1(z) and F2(z) from the LSP
+ * (Computations are done in Q24)
+ *
+ * The expansion is performed using the following recursion:
+ *
+ * f[0] = 1;
+ * tmp = -2.0 * lsp[0];
+ * f[1] = tmp;
+ * for (i=2; i<=5; i++) {
+ *    b = -2.0 * lsp[2*i-2];
+ *    f[i] = tmp*f[i-1] + 2.0*f[i-2];
+ *    for (j=i; j>=2; j--) {
+ *       f[j] = f[j] + tmp*f[j-1] + f[j-2];
+ *    }
+ *    f[i] = f[i] + tmp;
+ * }
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetLspPoly(
+    WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
+    WebRtc_Word32 *f);  /* (o) polonymial in Q24 */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c b/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c
new file mode 100644
index 0000000..ce72865
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetSyncSeq.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "refiner.h"
+#include "nearest_neighbor.h"
+
+/*----------------------------------------------------------------*
+ * get the pitch-synchronous sample sequence
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetSyncSeq(
+    WebRtc_Word16 *idata,   /* (i) original data */
+    WebRtc_Word16 idatal,   /* (i) dimension of data */
+    WebRtc_Word16 centerStartPos, /* (i) where current block starts */
+    WebRtc_Word16 *period,   /* (i) rough-pitch-period array       (Q-2) */
+    WebRtc_Word16 *plocs,   /* (i) where periods of period array are taken (Q-2) */
+    WebRtc_Word16 periodl,   /* (i) dimension period array */
+    WebRtc_Word16 hl,    /* (i) 2*hl+1 is the number of sequences */
+    WebRtc_Word16 *surround  /* (i/o) The contribution from this sequence
+                                summed with earlier contributions */
+                              ){
+  WebRtc_Word16 i,centerEndPos,q;
+  /* Stack based */
+  WebRtc_Word16 lagBlock[2*ENH_HL+1];
+  WebRtc_Word16 blockStartPos[2*ENH_HL+1]; /* Defines the position to search around (Q2) */
+  WebRtc_Word16 plocs2[ENH_PLOCSL];
+
+  centerEndPos=centerStartPos+ENH_BLOCKL-1;
+
+  /* present (find predicted lag from this position) */
+
+  WebRtcIlbcfix_NearestNeighbor(lagBlock+hl,plocs,
+                                (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (centerStartPos+centerEndPos)),
+                                periodl);
+
+  blockStartPos[hl]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, centerStartPos);
+
+  /* past (find predicted position and perform a refined
+     search to find the best sequence) */
+
+  for(q=hl-1;q>=0;q--) {
+    blockStartPos[q]=blockStartPos[q+1]-period[lagBlock[q+1]];
+
+    WebRtcIlbcfix_NearestNeighbor(lagBlock+q, plocs,
+                                  (WebRtc_Word16)(blockStartPos[q] + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)-period[lagBlock[q+1]]),
+                                  periodl);
+
+    if((blockStartPos[q]-(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_OVERHANG))>=0) {
+
+      /* Find the best possible sequence in the 4 times upsampled
+         domain around blockStartPos+q */
+      WebRtcIlbcfix_Refiner(blockStartPos+q,idata,idatal,
+                            centerStartPos,blockStartPos[q],surround,WebRtcIlbcfix_kEnhWt[q]);
+
+    } else {
+      /* Don't add anything since this sequence would
+         be outside the buffer */
+    }
+  }
+
+  /* future (find predicted position and perform a refined
+     search to find the best sequence) */
+
+  for(i=0;i<periodl;i++) {
+    plocs2[i]=(plocs[i]-period[i]);
+  }
+
+  for(q=hl+1;q<=WEBRTC_SPL_MUL_16_16(2, hl);q++) {
+
+    WebRtcIlbcfix_NearestNeighbor(lagBlock+q,plocs2,
+                                  (WebRtc_Word16)(blockStartPos[q-1]+
+                                                  (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)),periodl);
+
+    blockStartPos[q]=blockStartPos[q-1]+period[lagBlock[q]];
+
+    if( (blockStartPos[q]+(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, (ENH_BLOCKL+ENH_OVERHANG)))
+        <
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, idatal)) {
+
+      /* Find the best possible sequence in the 4 times upsampled
+         domain around blockStartPos+q */
+      WebRtcIlbcfix_Refiner(blockStartPos+q, idata, idatal,
+                            centerStartPos,blockStartPos[q],surround,WebRtcIlbcfix_kEnhWt[2*hl-q]);
+
+    }
+    else {
+      /* Don't add anything since this sequence would
+         be outside the buffer */
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h b/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h
new file mode 100644
index 0000000..a0ffd39
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetSyncSeq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * get the pitch-synchronous sample sequence
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetSyncSeq(
+    WebRtc_Word16 *idata,   /* (i) original data */
+    WebRtc_Word16 idatal,   /* (i) dimension of data */
+    WebRtc_Word16 centerStartPos, /* (i) where current block starts */
+    WebRtc_Word16 *period,   /* (i) rough-pitch-period array       (Q-2) */
+    WebRtc_Word16 *plocs,   /* (i) where periods of period array are taken (Q-2) */
+    WebRtc_Word16 periodl,   /* (i) dimension period array */
+    WebRtc_Word16 hl,    /* (i) 2*hl+1 is the number of sequences */
+    WebRtc_Word16 *surround  /* (i/o) The contribution from this sequence
+                                summed with earlier contributions */
+                              );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.c b/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.c
new file mode 100644
index 0000000..f202f62
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.c
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpInput.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  high-pass filter of input with *0.5 and saturation
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_HpInput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len)      /* (i)   Number of samples to filter */
+{
+  int i;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word32 tmpW32b;
+
+  for (i=0; i<len; i++) {
+
+    /*
+        y[i] = b[0]*x[i] + b[1]*x[i-1] + b[2]*x[i-2]
+        + (-a[1])*y[i-1] + (-a[2])*y[i-2];
+    */
+
+    tmpW32  = WEBRTC_SPL_MUL_16_16(y[1], ba[3]);     /* (-a[1])*y[i-1] (low part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[3], ba[4]);     /* (-a[2])*y[i-2] (low part) */
+    tmpW32 = (tmpW32>>15);
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[0], ba[3]);     /* (-a[1])*y[i-1] (high part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[2], ba[4]);     /* (-a[2])*y[i-2] (high part) */
+    tmpW32 = (tmpW32<<1);
+
+    tmpW32 += WEBRTC_SPL_MUL_16_16(signal[i], ba[0]);   /* b[0]*x[0] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[0],      ba[1]);   /* b[1]*x[i-1] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[1],      ba[2]);   /* b[2]*x[i-2] */
+
+    /* Update state (input part) */
+    x[1] = x[0];
+    x[0] = signal[i];
+
+    /* Rounding in Q(12+1), i.e. add 2^12 */
+    tmpW32b = tmpW32 + 4096;
+
+    /* Saturate (to 2^28) so that the HP filtered signal does not overflow */
+    tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)268435455, tmpW32b, (WebRtc_Word32)-268435456);
+
+    /* Convert back to Q0 and multiply with 0.5 */
+    signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 13);
+
+    /* Update state (filtered part) */
+    y[2] = y[0];
+    y[3] = y[1];
+
+    /* upshift tmpW32 by 3 with saturation */
+    if (tmpW32>268435455) {
+      tmpW32 = WEBRTC_SPL_WORD32_MAX;
+    } else if (tmpW32<-268435456) {
+      tmpW32 = WEBRTC_SPL_WORD32_MIN;
+    } else {
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
+    }
+
+    y[0] = (WebRtc_Word16)(tmpW32 >> 16);
+    y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.h b/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.h
new file mode 100644
index 0000000..f56c4f7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/hp_input.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpInput.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_HpInput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len);     /* (i)   Number of samples to filter */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.c b/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.c
new file mode 100644
index 0000000..8e1c919
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.c
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpOutput.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  high-pass filter of output and *2 with saturation
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_HpOutput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len)      /* (i)   Number of samples to filter */
+{
+  int i;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word32 tmpW32b;
+
+  for (i=0; i<len; i++) {
+
+    /*
+      y[i] = b[0]*x[i] + b[1]*x[i-1] + b[2]*x[i-2]
+      + (-a[1])*y[i-1] + (-a[2])*y[i-2];
+    */
+
+    tmpW32  = WEBRTC_SPL_MUL_16_16(y[1], ba[3]);     /* (-a[1])*y[i-1] (low part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[3], ba[4]);     /* (-a[2])*y[i-2] (low part) */
+    tmpW32 = (tmpW32>>15);
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[0], ba[3]);     /* (-a[1])*y[i-1] (high part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[2], ba[4]);     /* (-a[2])*y[i-2] (high part) */
+    tmpW32 = (tmpW32<<1);
+
+    tmpW32 += WEBRTC_SPL_MUL_16_16(signal[i], ba[0]);   /* b[0]*x[0] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[0],      ba[1]);   /* b[1]*x[i-1] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[1],      ba[2]);   /* b[2]*x[i-2] */
+
+    /* Update state (input part) */
+    x[1] = x[0];
+    x[0] = signal[i];
+
+    /* Rounding in Q(12-1), i.e. add 2^10 */
+    tmpW32b = tmpW32 + 1024;
+
+    /* Saturate (to 2^26) so that the HP filtered signal does not overflow */
+    tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)67108863, tmpW32b, (WebRtc_Word32)-67108864);
+
+    /* Convert back to Q0 and multiply with 2 */
+    signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 11);
+
+    /* Update state (filtered part) */
+    y[2] = y[0];
+    y[3] = y[1];
+
+    /* upshift tmpW32 by 3 with saturation */
+    if (tmpW32>268435455) {
+      tmpW32 = WEBRTC_SPL_WORD32_MAX;
+    } else if (tmpW32<-268435456) {
+      tmpW32 = WEBRTC_SPL_WORD32_MIN;
+    } else {
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
+    }
+
+    y[0] = (WebRtc_Word16)(tmpW32 >> 16);
+    y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.h b/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.h
new file mode 100644
index 0000000..c9a7426
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/hp_output.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpOutput.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_HpOutput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                               {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                               is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                              yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len);      /* (i)   Number of samples to filter */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.c b/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.c
new file mode 100644
index 0000000..75b64c4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.c
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ iLBCInterface.c
+
+******************************************************************/
+
+#include "ilbc.h"
+#include "defines.h"
+#include "init_encode.h"
+#include "encode.h"
+#include "init_decode.h"
+#include "decode.h"
+#include <stdlib.h>
+
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst, WebRtc_Word16 *ILBCENC_inst_Addr, WebRtc_Word16 *size) {
+  *iLBC_encinst=(iLBC_encinst_t*)ILBCENC_inst_Addr;
+  *size=sizeof(iLBC_Enc_Inst_t)/sizeof(WebRtc_Word16);
+  if (*iLBC_encinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst, WebRtc_Word16 *ILBCDEC_inst_Addr, WebRtc_Word16 *size) {
+  *iLBC_decinst=(iLBC_decinst_t*)ILBCDEC_inst_Addr;
+  *size=sizeof(iLBC_Dec_Inst_t)/sizeof(WebRtc_Word16);
+  if (*iLBC_decinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst) {
+  *iLBC_encinst=(iLBC_encinst_t*)malloc(sizeof(iLBC_Enc_Inst_t));
+  if (*iLBC_encinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst) {
+  *iLBC_decinst=(iLBC_decinst_t*)malloc(sizeof(iLBC_Dec_Inst_t));
+  if (*iLBC_decinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst) {
+  free(iLBC_encinst);
+  return(0);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst) {
+  free(iLBC_decinst);
+  return(0);
+}
+
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst, WebRtc_Word16 mode)
+{
+  if ((mode==20)||(mode==30)) {
+    WebRtcIlbcfix_InitEncode((iLBC_Enc_Inst_t*) iLBCenc_inst, mode);
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst, WebRtc_Word16 *speechIn, WebRtc_Word16 len, WebRtc_Word16 *encoded) {
+
+  WebRtc_Word16 pos = 0;
+  WebRtc_Word16 encpos = 0;
+
+  if ((len != ((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl) &&
+#ifdef SPLIT_10MS
+      (len != 80) &&
+#endif
+      (len != 2*((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl) &&
+      (len != 3*((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl))
+  {
+    /* A maximum of 3 frames/packet is allowed */
+    return(-1);
+  } else {
+
+    /* call encoder */
+    while (pos<len) {
+      WebRtcIlbcfix_EncodeImpl((WebRtc_UWord16*) &encoded[encpos], &speechIn[pos], (iLBC_Enc_Inst_t*) iLBCenc_inst);
+#ifdef SPLIT_10MS
+      pos += 80;
+      if(((iLBC_Enc_Inst_t*)iLBCenc_inst)->section == 0)
+#else
+        pos += ((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl;
+#endif
+      encpos += ((iLBC_Enc_Inst_t*)iLBCenc_inst)->no_of_words;
+    }
+    return (encpos*2);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 mode) {
+  if ((mode==20)||(mode==30)) {
+    WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, mode, 1);
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst) {
+  WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 20, 1);
+  return(0);
+}
+WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst) {
+  WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 30, 1);
+  return(0);
+}
+
+
+WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+                                  WebRtc_Word16 *encoded,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *decoded,
+                                  WebRtc_Word16 *speechType)
+{
+  int i=0;
+  /* Allow for automatic switching between the frame sizes
+     (although you do get some discontinuity) */
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    /* Test if the mode has changed */
+    if (((iLBC_Dec_Inst_t*)iLBCdec_inst)->mode==20) {
+      if ((len==NO_OF_BYTES_30MS)||
+          (len==2*NO_OF_BYTES_30MS)||
+          (len==3*NO_OF_BYTES_30MS)) {
+        WebRtcIlbcfix_InitDecode(((iLBC_Dec_Inst_t*)iLBCdec_inst), 30, ((iLBC_Dec_Inst_t*)iLBCdec_inst)->use_enhancer);
+      } else {
+        /* Unsupported frame length */
+        return(-1);
+      }
+    } else {
+      if ((len==NO_OF_BYTES_20MS)||
+          (len==2*NO_OF_BYTES_20MS)||
+          (len==3*NO_OF_BYTES_20MS)) {
+        WebRtcIlbcfix_InitDecode(((iLBC_Dec_Inst_t*)iLBCdec_inst), 20, ((iLBC_Dec_Inst_t*)iLBCdec_inst)->use_enhancer);
+      } else {
+        /* Unsupported frame length */
+        return(-1);
+      }
+    }
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+                                       WebRtc_Word16 *encoded,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 *decoded,
+                                       WebRtc_Word16 *speechType)
+{
+  int i=0;
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    return(-1);
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+                                       WebRtc_Word16 *encoded,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 *decoded,
+                                       WebRtc_Word16 *speechType)
+{
+  int i=0;
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    return(-1);
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+  int i;
+  WebRtc_UWord16 dummy;
+
+  for (i=0;i<noOfLostFrames;i++) {
+    /* call decoder */
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], &dummy, (iLBC_Dec_Inst_t*) iLBCdec_inst, 0);
+  }
+  return (noOfLostFrames*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+
+  /* Two input parameters not used, but needed for function pointers in NetEQ */
+  (void)(decoded = NULL);
+  (void)(noOfLostFrames = 0);
+
+  WebRtcSpl_MemSetW16(((iLBC_Dec_Inst_t*)iLBCdec_inst)->enh_buf, 0, ENH_BUFL);
+  ((iLBC_Dec_Inst_t*)iLBCdec_inst)->prev_enh_pl = 2;
+
+  return (0);
+}
+
+void WebRtcIlbcfix_version(char *version)
+{
+  strcpy((char*)version, "1.1.1");
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.gypi b/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.gypi
new file mode 100644
index 0000000..57410c5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/ilbc.gypi
@@ -0,0 +1,190 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'iLBC',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'interface',
+        ],
+      },
+      'sources': [
+        'interface/ilbc.h',
+        'abs_quant.c',
+        'abs_quant_loop.c',
+        'augmented_cb_corr.c',
+        'bw_expand.c',
+        'cb_construct.c',
+        'cb_mem_energy.c',
+        'cb_mem_energy_augmentation.c',
+        'cb_mem_energy_calc.c',
+        'cb_search.c',
+        'cb_search_core.c',
+        'cb_update_best_index.c',
+        'chebyshev.c',
+        'comp_corr.c',
+        'constants.c',
+        'create_augmented_vec.c',
+        'decode.c',
+        'decode_residual.c',
+        'decoder_interpolate_lsf.c',
+        'do_plc.c',
+        'encode.c',
+        'energy_inverse.c',
+        'enh_upsample.c',
+        'enhancer.c',
+        'enhancer_interface.c',
+        'filtered_cb_vecs.c',
+        'frame_classify.c',
+        'gain_dequant.c',
+        'gain_quant.c',
+        'get_cd_vec.c',
+        'get_lsp_poly.c',
+        'get_sync_seq.c',
+        'hp_input.c',
+        'hp_output.c',
+        'ilbc.c',
+        'index_conv_dec.c',
+        'index_conv_enc.c',
+        'init_decode.c',
+        'init_encode.c',
+        'interpolate.c',
+        'interpolate_samples.c',
+        'lpc_encode.c',
+        'lsf_check.c',
+        'lsf_interpolate_to_poly_dec.c',
+        'lsf_interpolate_to_poly_enc.c',
+        'lsf_to_lsp.c',
+        'lsf_to_poly.c',
+        'lsp_to_lsf.c',
+        'my_corr.c',
+        'nearest_neighbor.c',
+        'pack_bits.c',
+        'poly_to_lsf.c',
+        'poly_to_lsp.c',
+        'refiner.c',
+        'simple_interpolate_lsf.c',
+        'simple_lpc_analysis.c',
+        'simple_lsf_dequant.c',
+        'simple_lsf_quant.c',
+        'smooth.c',
+        'smooth_out_data.c',
+        'sort_sq.c',
+        'split_vq.c',
+        'state_construct.c',
+        'state_search.c',
+        'swap_bytes.c',
+        'unpack_bits.c',
+        'vq3.c',
+        'vq4.c',
+        'window32_w32.c',
+        'xcorr_coef.c',
+        'abs_quant.h',
+        'abs_quant_loop.h',
+        'augmented_cb_corr.h',
+        'bw_expand.h',
+        'cb_construct.h',
+        'cb_mem_energy.h',
+        'cb_mem_energy_augmentation.h',
+        'cb_mem_energy_calc.h',
+        'cb_search.h',
+        'cb_search_core.h',
+        'cb_update_best_index.h',
+        'chebyshev.h',
+        'comp_corr.h',
+        'constants.h',
+        'create_augmented_vec.h',
+        'decode.h',
+        'decode_residual.h',
+        'decoder_interpolate_lsf.h',
+        'do_plc.h',
+        'encode.h',
+        'energy_inverse.h',
+        'enh_upsample.h',
+        'enhancer.h',
+        'enhancer_interface.h',
+        'filtered_cb_vecs.h',
+        'frame_classify.h',
+        'gain_dequant.h',
+        'gain_quant.h',
+        'get_cd_vec.h',
+        'get_lsp_poly.h',
+        'get_sync_seq.h',
+        'hp_input.h',
+        'hp_output.h',
+        'defines.h',
+        'index_conv_dec.h',
+        'index_conv_enc.h',
+        'init_decode.h',
+        'init_encode.h',
+        'interpolate.h',
+        'interpolate_samples.h',
+        'lpc_encode.h',
+        'lsf_check.h',
+        'lsf_interpolate_to_poly_dec.h',
+        'lsf_interpolate_to_poly_enc.h',
+        'lsf_to_lsp.h',
+        'lsf_to_poly.h',
+        'lsp_to_lsf.h',
+        'my_corr.h',
+        'nearest_neighbor.h',
+        'pack_bits.h',
+        'poly_to_lsf.h',
+        'poly_to_lsp.h',
+        'refiner.h',
+        'simple_interpolate_lsf.h',
+        'simple_lpc_analysis.h',
+        'simple_lsf_dequant.h',
+        'simple_lsf_quant.h',
+        'smooth.h',
+        'smooth_out_data.h',
+        'sort_sq.h',
+        'split_vq.h',
+        'state_construct.h',
+        'state_search.h',
+        'swap_bytes.h',
+        'unpack_bits.h',
+        'vq3.h',
+        'vq4.h',
+        'window32_w32.h',
+        'xcorr_coef.h',
+     ], # sources
+    }, # iLBC
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [  
+        {
+          'target_name': 'iLBCtest',
+          'type': 'executable',
+          'dependencies': [
+            'iLBC',
+          ],
+          'sources': [
+            'test/iLBC_test.c',
+          ],
+        }, # iLBCtest
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c
new file mode 100644
index 0000000..0d6346a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvDec.c
+
+******************************************************************/
+
+#include "defines.h"
+
+void WebRtcIlbcfix_IndexConvDec(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                ){
+  int k;
+
+  for (k=4;k<6;k++) {
+    /* Readjust the second and third codebook index for the first 40 sample
+       so that they look the same as the first (in terms of lag)
+    */
+    if ((index[k]>=44)&&(index[k]<108)) {
+      index[k]+=64;
+    } else if ((index[k]>=108)&&(index[k]<128)) {
+      index[k]+=128;
+    } else {
+      /* ERROR */
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h
new file mode 100644
index 0000000..f29ee23
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvDec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_IndexConvDec(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c
new file mode 100644
index 0000000..cbc04b6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ IiLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvEnc.c
+
+******************************************************************/
+
+#include "defines.h"
+/*----------------------------------------------------------------*
+ *  Convert the codebook indexes to make the search easier
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_IndexConvEnc(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                ){
+  int k;
+
+  for (k=4;k<6;k++) {
+    /* Readjust the second and third codebook index so that it is
+       packetized into 7 bits (before it was put in lag-wise the same
+       way as for the first codebook which uses 8 bits)
+    */
+    if ((index[k]>=108)&&(index[k]<172)) {
+      index[k]-=64;
+    } else if (index[k]>=236) {
+      index[k]-=128;
+    } else {
+      /* ERROR */
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h
new file mode 100644
index 0000000..d28a6e2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvEnc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Convert the codebook indexes to make the search easier
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_IndexConvEnc(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.c b/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.c
new file mode 100644
index 0000000..b654f1e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.c
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+	WebRtcIlbcfix_InitDecode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of decoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitDecode(		/* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst,	/* (i/o) Decoder instance */
+    WebRtc_Word16 mode,					/* (i) frame size mode */
+    int use_enhancer           /* (i) 1 to use enhancer
+                                  0 to run without enhancer */
+                                                ) {
+  int i;
+
+  iLBCdec_inst->mode = mode;
+
+  /* Set all the variables that are dependent on the frame size mode */
+  if (mode==30) {
+    iLBCdec_inst->blockl = BLOCKL_30MS;
+    iLBCdec_inst->nsub = NSUB_30MS;
+    iLBCdec_inst->nasub = NASUB_30MS;
+    iLBCdec_inst->lpc_n = LPC_N_30MS;
+    iLBCdec_inst->no_of_bytes = NO_OF_BYTES_30MS;
+    iLBCdec_inst->no_of_words = NO_OF_WORDS_30MS;
+    iLBCdec_inst->state_short_len=STATE_SHORT_LEN_30MS;
+  }
+  else if (mode==20) {
+    iLBCdec_inst->blockl = BLOCKL_20MS;
+    iLBCdec_inst->nsub = NSUB_20MS;
+    iLBCdec_inst->nasub = NASUB_20MS;
+    iLBCdec_inst->lpc_n = LPC_N_20MS;
+    iLBCdec_inst->no_of_bytes = NO_OF_BYTES_20MS;
+    iLBCdec_inst->no_of_words = NO_OF_WORDS_20MS;
+    iLBCdec_inst->state_short_len=STATE_SHORT_LEN_20MS;
+  }
+  else {
+    return(-1);
+  }
+
+  /* Reset all the previous LSF to mean LSF */
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+
+  /* Clear the synthesis filter memory */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER);
+
+  /* Set the old synthesis filter to {1.0 0.0 ... 0.0} */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->old_syntdenum, 0, ((LPC_FILTERORDER + 1)*NSUB_MAX));
+  for (i=0; i<NSUB_MAX; i++) {
+    iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)] = 4096;
+  }
+
+  /* Clear the variables that are used for the PLC */
+  iLBCdec_inst->last_lag = 20;
+  iLBCdec_inst->consPLICount = 0;
+  iLBCdec_inst->prevPLI = 0;
+  iLBCdec_inst->perSquare = 0;
+  iLBCdec_inst->prevLag = 120;
+  iLBCdec_inst->prevLpc[0] = 4096;
+  WebRtcSpl_MemSetW16(iLBCdec_inst->prevLpc+1, 0, LPC_FILTERORDER);
+  WebRtcSpl_MemSetW16(iLBCdec_inst->prevResidual, 0, BLOCKL_MAX);
+
+  /* Initialize the seed for the random number generator */
+  iLBCdec_inst->seed = 777;
+
+  /* Set the filter state of the HP filter to 0 */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2);
+  WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4);
+
+  /* Set the variables that are used in the ehnahcer */
+  iLBCdec_inst->use_enhancer = use_enhancer;
+  WebRtcSpl_MemSetW16(iLBCdec_inst->enh_buf, 0, (ENH_BUFL+ENH_BUFL_FILTEROVERHEAD));
+  for (i=0;i<ENH_NBLOCKS_TOT;i++) {
+    iLBCdec_inst->enh_period[i]=160; /* Q(-4) */
+  }
+
+  iLBCdec_inst->prev_enh_pl = 0;
+
+  return (iLBCdec_inst->blockl);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.h b/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.h
new file mode 100644
index 0000000..3452f34
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/init_decode.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitDecode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of decoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitDecode(  /* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
+    WebRtc_Word16 mode,     /* (i) frame size mode */
+    int use_enhancer           /* (i) 1 to use enhancer
+                                  0 to run without enhancer */
+                                         );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.c b/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.c
new file mode 100644
index 0000000..e034bb0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.c
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitEncode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of encoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst,     /* (i/o) Encoder instance */
+    WebRtc_Word16 mode     /* (i) frame size mode */
+                                        ){
+  iLBCenc_inst->mode = mode;
+
+  /* Set all the variables that are dependent on the frame size mode */
+  if (mode==30) {
+    iLBCenc_inst->blockl = BLOCKL_30MS;
+    iLBCenc_inst->nsub = NSUB_30MS;
+    iLBCenc_inst->nasub = NASUB_30MS;
+    iLBCenc_inst->lpc_n = LPC_N_30MS;
+    iLBCenc_inst->no_of_bytes = NO_OF_BYTES_30MS;
+    iLBCenc_inst->no_of_words = NO_OF_WORDS_30MS;
+    iLBCenc_inst->state_short_len=STATE_SHORT_LEN_30MS;
+  }
+  else if (mode==20) {
+    iLBCenc_inst->blockl = BLOCKL_20MS;
+    iLBCenc_inst->nsub = NSUB_20MS;
+    iLBCenc_inst->nasub = NASUB_20MS;
+    iLBCenc_inst->lpc_n = LPC_N_20MS;
+    iLBCenc_inst->no_of_bytes = NO_OF_BYTES_20MS;
+    iLBCenc_inst->no_of_words = NO_OF_WORDS_20MS;
+    iLBCenc_inst->state_short_len=STATE_SHORT_LEN_20MS;
+  }
+  else {
+    return(-1);
+  }
+
+  /* Clear the buffers and set the previous LSF and LSP to the mean value */
+  WebRtcSpl_MemSetW16(iLBCenc_inst->anaMem, 0, LPC_FILTERORDER);
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+  WebRtcSpl_MemSetW16(iLBCenc_inst->lpc_buffer, 0, LPC_LOOKBACK + BLOCKL_MAX);
+
+  /* Set the filter state of the HP filter to 0 */
+  WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemx, 0, 2);
+  WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemy, 0, 4);
+
+#ifdef SPLIT_10MS
+  /*Zeroing the past samples for 10msec Split*/
+  WebRtcSpl_MemSetW16(iLBCenc_inst->past_samples,0,160);
+  iLBCenc_inst->section = 0;
+#endif
+
+  return (iLBCenc_inst->no_of_bytes);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.h b/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.h
new file mode 100644
index 0000000..f1d1858
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/init_encode.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitEncode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of encoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitEncode(  /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst, /* (i/o) Encoder instance */
+    WebRtc_Word16 mode     /* (i) frame size mode */
+                                         );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h b/trunk/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h
new file mode 100644
index 0000000..ca1d39e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * ilbc.h
+ *
+ * This header file contains all of the API's for iLBC.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_INTERFACE_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_INTERFACE_ILBC_H_
+
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include "typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ * Customer has to cast instance to proper type
+ */
+
+typedef struct iLBC_encinst_t_ iLBC_encinst_t;
+
+typedef struct iLBC_decinst_t_ iLBC_decinst_t;
+
+/*
+ * Comfort noise constants
+ */
+
+#define ILBC_SPEECH 1
+#define ILBC_CNG  2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxAssign(...)
+   *
+   * These functions assigns the encoder/decoder instance to the specified
+   * memory location
+   *
+   * Input:
+   *      - XXX_xxxinst       : Pointer to created instance that should be
+   *                            assigned
+   *      - ILBCXXX_inst_Addr : Pointer to the desired memory space
+   *      - size              : The size that this structure occupies (in Word16)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst,
+					    WebRtc_Word16 *ILBCENC_inst_Addr,
+					    WebRtc_Word16 *size);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst,
+					    WebRtc_Word16 *ILBCDEC_inst_Addr,
+					    WebRtc_Word16 *size);
+
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxAssign(...)
+   *
+   * These functions create a instance to the specified structure
+   *
+   * Input:
+   *      - XXX_inst          : Pointer to created instance that should be created
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxFree(...)
+   *
+   * These functions frees the dynamic memory of a specified instance
+   *
+   * Input:
+   *      - XXX_inst          : Pointer to created instance that should be freed
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst);
+
+
+  /****************************************************************************
+   * WebRtcIlbcfix_EncoderInit(...)
+   *
+   * This function initializes a iLBC instance
+   *
+   * Input:
+   *      - iLBCenc_inst      : iLBC instance, i.e. the user that should receive
+   *                            be initialized
+   *      - frameLen          : The frame length of the codec 20/30 (ms)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst,
+					  WebRtc_Word16 frameLen);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_Encode(...)
+   *
+   * This function encodes one iLBC frame. Input speech length has be a
+   * multiple of the frame length.
+   *
+   * Input:
+   *      - iLBCenc_inst      : iLBC instance, i.e. the user that should encode
+   *                            a package
+   *      - speechIn          : Input speech vector
+   *      - len               : Samples in speechIn (160, 240, 320 or 480)
+   *
+   * Output:
+   *  - encoded               : The encoded data vector
+   *
+   * Return value             : >0 - Length (in bytes) of coded data
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst,
+				     WebRtc_Word16 *speechIn,
+				     WebRtc_Word16 len,
+				     WebRtc_Word16 *encoded);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_DecoderInit(...)
+   *
+   * This function initializes a iLBC instance with either 20 or 30 ms frames
+   * Alternatively the WebRtcIlbcfix_DecoderInit_XXms can be used. Then it's
+   * not needed to specify the frame length with a variable.
+   *
+   * Input:
+   *      - iLBC_decinst_t    : iLBC instance, i.e. the user that should receive
+   *                            be initialized
+   *      - frameLen          : The frame length of the codec 20/30 (ms)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst,
+					  WebRtc_Word16 frameLen);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst);
+  WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_Decode(...)
+   *
+   * This function decodes a packet with iLBC frame(s). Output speech length
+   * will be a multiple of 160 or 240 samples ((160 or 240)*frames/packet).
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance, i.e. the user that should decode
+   *                            a packet
+   *      - encoded           : Encoded iLBC frame(s)
+   *      - len               : Bytes in encoded vector
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *      - speechType        : 1 normal, 2 CNG
+   *
+   * Return value             : >0 - Samples in decoded vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+				     WebRtc_Word16* encoded,
+				     WebRtc_Word16 len,
+				     WebRtc_Word16 *decoded,
+				     WebRtc_Word16 *speechType);
+  WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+					 WebRtc_Word16 *encoded,
+					 WebRtc_Word16 len,
+					 WebRtc_Word16 *decoded,
+					 WebRtc_Word16 *speechType);
+  WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+					 WebRtc_Word16 *encoded,
+					 WebRtc_Word16 len,
+					 WebRtc_Word16 *decoded,
+					 WebRtc_Word16 *speechType);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_DecodePlc(...)
+   *
+   * This function conducts PLC for iLBC frame(s). Output speech length
+   * will be a multiple of 160 or 240 samples.
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance, i.e. the user that should perform
+   *                            a PLC
+   *      - noOfLostFrames    : Number of PLC frames to produce
+   *
+   * Output:
+   *      - decoded           : The "decoded" vector
+   *
+   * Return value             : >0 - Samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst,
+					WebRtc_Word16 *decoded,
+					WebRtc_Word16 noOfLostFrames);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_NetEqPlc(...)
+   *
+   * This function updates the decoder when a packet loss has occured, but it
+   * does not produce any PLC data. Function can be used if another PLC method
+   * is used (i.e NetEq).
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance that should be updated
+   *      - noOfLostFrames    : Number of lost frames
+   *
+   * Output:
+   *      - decoded           : The "decoded" vector (nothing in this case)
+   *
+   * Return value             : >0 - Samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst,
+				       WebRtc_Word16 *decoded,
+				       WebRtc_Word16 noOfLostFrames);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_version(...)
+   *
+   * This function returns the version number of iLBC
+   *
+   * Output:
+   *      - version           : Version number of iLBC (maximum 20 char)
+   */
+
+  void WebRtcIlbcfix_version(char *version);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.c b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.c
new file mode 100644
index 0000000..11cb33c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Interpolate.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation between vectors
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Interpolate(
+    WebRtc_Word16 *out, /* (o) output vector */
+    WebRtc_Word16 *in1, /* (i) first input vector */
+    WebRtc_Word16 *in2, /* (i) second input vector */
+    WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
+    WebRtc_Word16 length)  /* (i) number of sample is vectors */
+{
+  int i;
+  WebRtc_Word16 invcoef;
+
+  /*
+    Performs the operation out[i] = in[i]*coef + (1-coef)*in2[i] (with rounding)
+  */
+
+  invcoef = 16384 - coef; /* 16384 = 1.0 (Q14)*/
+  for (i = 0; i < length; i++) {
+    out[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+        (WEBRTC_SPL_MUL_16_16(coef, in1[i]) + WEBRTC_SPL_MUL_16_16(invcoef, in2[i]))+8192,
+        14);
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.h b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.h
new file mode 100644
index 0000000..a12021c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Interpolate.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation between vectors
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Interpolate(
+    WebRtc_Word16 *out, /* (o) output vector */
+    WebRtc_Word16 *in1, /* (i) first input vector */
+    WebRtc_Word16 *in2, /* (i) second input vector */
+    WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
+    WebRtc_Word16 length); /* (i) number of sample is vectors */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c
new file mode 100644
index 0000000..31eb52e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InterpolateSamples.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_InterpolateSamples(
+    WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 lMem    /* (i) Length of the CB memory */
+                                      ) {
+  WebRtc_Word16 *ppi, *ppo, i, j, temp1, temp2;
+  WebRtc_Word16 *tmpPtr;
+
+  /* Calculate the 20 vectors of interpolated samples (4 samples each)
+     that are used in the codebooks for lag 20 to 39 */
+  tmpPtr = interpSamples;
+  for (j=0; j<20; j++) {
+    temp1 = 0;
+    temp2 = 3;
+    ppo = CBmem+lMem-4;
+    ppi = CBmem+lMem-j-24;
+    for (i=0; i<4; i++) {
+
+      *tmpPtr++ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp2],*ppo, 15) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp1], *ppi, 15);
+
+      ppo++;
+      ppi++;
+      temp1++;
+      temp2--;
+    }
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h
new file mode 100644
index 0000000..5c98aaf
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InterpolateSamples.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct the interpolated samples for the Augmented CB
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_InterpolateSamples(
+    WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 lMem    /* (i) Length of the CB memory */
+                                      );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.c b/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.c
new file mode 100644
index 0000000..73d67a0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LpcEncode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "simple_lpc_analysis.h"
+#include "simple_interpolate_lsf.h"
+#include "simple_lsf_quant.h"
+#include "lsf_check.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lpc encoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LpcEncode(
+    WebRtc_Word16 *syntdenum,  /* (i/o) synthesis filter coefficients
+                                           before/after encoding */
+    WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+                                   before/after encoding */
+    WebRtc_Word16 *lsf_index,  /* (o) lsf quantization index */
+    WebRtc_Word16 *data,   /* (i) Speech to do LPC analysis on */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                              ) {
+  /* Stack based */
+  WebRtc_Word16 lsf[LPC_FILTERORDER * LPC_N_MAX];
+  WebRtc_Word16 lsfdeq[LPC_FILTERORDER * LPC_N_MAX];
+
+  /* Calculate LSF's from the input speech */
+  WebRtcIlbcfix_SimpleLpcAnalysis(lsf, data, iLBCenc_inst);
+
+  /* Quantize the LSF's */
+  WebRtcIlbcfix_SimpleLsfQ(lsfdeq, lsf_index, lsf, iLBCenc_inst->lpc_n);
+
+  /* Stableize the LSF's if needed */
+  WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCenc_inst->lpc_n);
+
+  /* Calculate the synthesis and weighting filter coefficients from
+     the optimal LSF and the dequantized LSF */
+  WebRtcIlbcfix_SimpleInterpolateLsf(syntdenum, weightdenum,
+                                     lsf, lsfdeq, iLBCenc_inst->lsfold,
+                                     iLBCenc_inst->lsfdeqold, LPC_FILTERORDER, iLBCenc_inst);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.h b/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.h
new file mode 100644
index 0000000..36967a3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lpc_encode.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LpcEncode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc encoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LpcEncode(
+    WebRtc_Word16 *syntdenum,  /* (i/o) synthesis filter coefficients
+                                  before/after encoding */
+    WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+                                   before/after encoding */
+    WebRtc_Word16 *lsf_index,  /* (o) lsf quantization index */
+    WebRtc_Word16 *data,   /* (i) Speech to do LPC analysis on */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                             );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.c
new file mode 100644
index 0000000..7097d74
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.c
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfCheck.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  check for stability of lsf coefficients
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_LsfCheck(
+    WebRtc_Word16 *lsf, /* LSF parameters */
+    int dim, /* dimension of LSF */
+    int NoAn)  /* No of analysis per frame */
+{
+  int k,n,m, Nit=2, change=0,pos;
+  const WebRtc_Word16 eps=319;  /* 0.039 in Q13 (50 Hz)*/
+  const WebRtc_Word16 eps2=160;  /* eps/2.0 in Q13;*/
+  const WebRtc_Word16 maxlsf=25723; /* 3.14; (4000 Hz)*/
+  const WebRtc_Word16 minlsf=82;  /* 0.01; (0 Hz)*/
+
+  /* LSF separation check*/
+  for (n=0;n<Nit;n++) {  /* Run through a 2 times */
+    for (m=0;m<NoAn;m++) { /* Number of analyses per frame */
+      for (k=0;k<(dim-1);k++) {
+        pos=m*dim+k;
+
+        /* Seperate coefficients with a safety margin of 50 Hz */
+        if ((lsf[pos+1]-lsf[pos])<eps) {
+
+          if (lsf[pos+1]<lsf[pos]) {
+            lsf[pos+1]= lsf[pos]+eps2;
+            lsf[pos]= lsf[pos+1]-eps2;
+          } else {
+            lsf[pos]-=eps2;
+            lsf[pos+1]+=eps2;
+          }
+          change=1;
+        }
+
+        /* Limit minimum and maximum LSF */
+        if (lsf[pos]<minlsf) {
+          lsf[pos]=minlsf;
+          change=1;
+        }
+
+        if (lsf[pos]>maxlsf) {
+          lsf[pos]=maxlsf;
+          change=1;
+        }
+      }
+    }
+  }
+
+  return change;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.h
new file mode 100644
index 0000000..830bbed
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_check.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfCheck.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  check for stability of lsf coefficients
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_LsfCheck(
+    WebRtc_Word16 *lsf, /* LSF parameters */
+    int dim, /* dimension of LSF */
+    int NoAn); /* No of analysis per frame */
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
new file mode 100644
index 0000000..3bb23d0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LspInterpolate2PolyDec.c
+
+******************************************************************/
+
+#include "interpolate.h"
+#include "lsf_to_poly.h"
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation of lsf coefficients for the decoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LspInterpolate2PolyDec(
+    WebRtc_Word16 *a,   /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1,  /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2,  /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef,  /* (i) weighting coefficient to use between
+                                   lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length  /* (i) length of coefficient vectors */
+                                          ){
+  WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+
+  /* interpolate LSF */
+  WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
+
+  /* Compute the filter coefficients from the LSF */
+  WebRtcIlbcfix_Lsf2Poly(a, lsftmp);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
new file mode 100644
index 0000000..23fe3a7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LspInterpolate2PolyDec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation of lsf coefficients for the decoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LspInterpolate2PolyDec(
+    WebRtc_Word16 *a,   /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1,  /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2,  /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef,  /* (i) weighting coefficient to use between
+                                   lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length  /* (i) length of coefficient vectors */
+                                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
new file mode 100644
index 0000000..3b0a34d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfInterpolate2PloyEnc.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "interpolate.h"
+#include "lsf_to_poly.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator and conversion from lsf to a coefficients
+ *  (subrutine to SimpleInterpolateLSF)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
+    WebRtc_Word16 *a,  /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+                           lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length /* (i) length of coefficient vectors */
+                                          ) {
+  /* Stack based */
+  WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+
+  /* interpolate LSF */
+  WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
+
+  /* Compute the filter coefficients from the LSF */
+  WebRtcIlbcfix_Lsf2Poly(a, lsftmp);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
new file mode 100644
index 0000000..1bbbb80
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfInterpolate2PloyEnc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator and conversion from lsf to a coefficients
+ *  (subrutine to SimpleInterpolateLSF)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
+    WebRtc_Word16 *a,  /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+                           lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length /* (i) length of coefficient vectors */
+                                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
new file mode 100644
index 0000000..84278a4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Lsp.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lsf to lsp coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Lsp(
+    WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
+    WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+    WebRtc_Word16 m  /* (i) number of coefficients */
+                           ) {
+  WebRtc_Word16 i, k;
+  WebRtc_Word16 diff; /* difference, which is used for the
+                           linear approximation (Q8) */
+  WebRtc_Word16 freq; /* normalized frequency in Q15 (0..1) */
+  WebRtc_Word32 tmpW32;
+
+  for(i=0; i<m; i++)
+  {
+    freq = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(lsf[i], 20861, 15);
+    /* 20861: 1.0/(2.0*PI) in Q17 */
+    /*
+       Upper 8 bits give the index k and
+       Lower 8 bits give the difference, which needs
+       to be approximated linearly
+    */
+    k = WEBRTC_SPL_RSHIFT_W16(freq, 8);
+    diff = (freq&0x00ff);
+
+    /* Guard against getting outside table */
+
+    if (k>63) {
+      k = 63;
+    }
+
+    /* Calculate linear approximation */
+    tmpW32 = WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kCosDerivative[k], diff);
+    lsp[i] = WebRtcIlbcfix_kCos[k]+(WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(tmpW32, 12));
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
new file mode 100644
index 0000000..db6549b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Lsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lsf to lsp coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Lsp(
+    WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
+    WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+    WebRtc_Word16 m     /* (i) number of coefficients */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
new file mode 100644
index 0000000..f1c4a9e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Poly.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lsf_to_lsp.h"
+#include "get_lsp_poly.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_Lsf2Poly(
+    WebRtc_Word16 *a,     /* (o) predictor coefficients (order = 10) in Q12 */
+    WebRtc_Word16 *lsf    /* (i) line spectral frequencies in Q13 */
+                            ) {
+  WebRtc_Word32 f[2][6]; /* f[0][] and f[1][] corresponds to
+                            F1(z) and F2(z) respectivly */
+  WebRtc_Word32 *f1ptr, *f2ptr;
+  WebRtc_Word16 *a1ptr, *a2ptr;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word16 lsp[10];
+  int i;
+
+  /* Convert lsf to lsp */
+  WebRtcIlbcfix_Lsf2Lsp(lsf, lsp, LPC_FILTERORDER);
+
+  /* Get F1(z) and F2(z) from the lsp */
+  f1ptr=f[0];
+  f2ptr=f[1];
+  WebRtcIlbcfix_GetLspPoly(&lsp[0],f1ptr);
+  WebRtcIlbcfix_GetLspPoly(&lsp[1],f2ptr);
+
+  /* for i = 5 down to 1
+     Compute f1[i] += f1[i-1];
+     and     f2[i] += f2[i-1];
+  */
+  f1ptr=&f[0][5];
+  f2ptr=&f[1][5];
+  for (i=5; i>0; i--)
+  {
+    (*f1ptr) += (*(f1ptr-1));
+    (*f2ptr) -= (*(f2ptr-1));
+    f1ptr--;
+    f2ptr--;
+  }
+
+  /* Get the A(z) coefficients
+     a[0] = 1.0
+     for i = 1 to 5
+     a[i] = (f1[i] + f2[i] + round)>>13;
+     for i = 1 to 5
+     a[11-i] = (f1[i] - f2[i] + round)>>13;
+  */
+  a[0]=4096;
+  a1ptr=&a[1];
+  a2ptr=&a[10];
+  f1ptr=&f[0][1];
+  f2ptr=&f[1][1];
+  for (i=5; i>0; i--)
+  {
+    tmpW32 = (*f1ptr) + (*f2ptr);
+    (*a1ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+
+    tmpW32 = (*f1ptr) - (*f2ptr);
+    (*a2ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+
+    a1ptr++;
+    a2ptr--;
+    f1ptr++;
+    f2ptr++;
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
new file mode 100644
index 0000000..a00693b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Poly.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Convert from LSF coefficients to A coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Poly(
+    WebRtc_Word16 *a,     /* (o) predictor coefficients (order = 10) in Q12 */
+    WebRtc_Word16 *lsf    /* (i) line spectral frequencies in Q13 */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c b/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
new file mode 100644
index 0000000..134afbb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsp2Lsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from LSP coefficients to LSF coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsp2Lsf(
+    WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
+    WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+                           (ordered, so that lsf[i]<lsf[i+1]) */
+    WebRtc_Word16 m  /* (i) Number of coefficients */
+                           )
+{
+  WebRtc_Word16 i, k;
+  WebRtc_Word16 diff; /* diff between table value and desired value (Q15) */
+  WebRtc_Word16 freq; /* lsf/(2*pi) (Q16) */
+  WebRtc_Word16 *lspPtr, *lsfPtr, *cosTblPtr;
+  WebRtc_Word16 tmp;
+
+  /* set the index to maximum index value in WebRtcIlbcfix_kCos */
+  k = 63;
+
+  /*
+     Start with the highest LSP and then work the way down
+     For each LSP the lsf is calculated by first order approximation
+     of the acos(x) function
+  */
+  lspPtr = &lsp[9];
+  lsfPtr = &lsf[9];
+  cosTblPtr=(WebRtc_Word16*)&WebRtcIlbcfix_kCos[k];
+  for(i=m-1; i>=0; i--)
+  {
+    /*
+       locate value in the table, which is just above lsp[i],
+       basically an approximation to acos(x)
+    */
+    while( (((WebRtc_Word32)(*cosTblPtr)-(*lspPtr)) < 0)&&(k>0) )
+    {
+      k-=1;
+      cosTblPtr--;
+    }
+
+    /* Calculate diff, which is used in the linear approximation of acos(x) */
+    diff = (*lspPtr)-(*cosTblPtr);
+
+    /*
+       The linear approximation of acos(lsp[i]) :
+       acos(lsp[i])= k*512 + (WebRtcIlbcfix_kAcosDerivative[ind]*offset >> 11)
+    */
+
+    /* tmp (linear offset) in Q16 */
+    tmp = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAcosDerivative[k],diff, 11);
+
+    /* freq in Q16 */
+    freq = (WebRtc_Word16)WEBRTC_SPL_LSHIFT_W16(k,9)+tmp;
+
+    /* lsf = freq*2*pi */
+    (*lsfPtr) = (WebRtc_Word16)(((WebRtc_Word32)freq*25736)>>15);
+
+    lsfPtr--;
+    lspPtr--;
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h b/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
new file mode 100644
index 0000000..97ba7e4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsp2Lsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from LSP coefficients to LSF coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsp2Lsf(
+    WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
+    WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+                           (ordered, so that lsf[i]<lsf[i+1]) */
+    WebRtc_Word16 m  /* (i) Number of coefficients */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.c b/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.c
new file mode 100644
index 0000000..2162205
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_MyCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * compute cross correlation between sequences
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_MyCorr(
+    WebRtc_Word32 *corr,  /* (o) correlation of seq1 and seq2 */
+    WebRtc_Word16 *seq1,  /* (i) first sequence */
+    WebRtc_Word16 dim1,  /* (i) dimension first seq1 */
+    const WebRtc_Word16 *seq2, /* (i) second sequence */
+    WebRtc_Word16 dim2   /* (i) dimension seq2 */
+                          ){
+  WebRtc_Word16 max, scale, loops;
+
+  /* Calculate correlation between the two sequences. Scale the
+     result of the multiplcication to maximum 26 bits in order
+     to avoid overflow */
+  max=WebRtcSpl_MaxAbsValueW16(seq1, dim1);
+  scale=WebRtcSpl_GetSizeInBits(max);
+
+  scale = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(2,scale)-26);
+  if (scale<0) {
+    scale=0;
+  }
+
+  loops=dim1-dim2+1;
+
+  /* Calculate the cross correlations */
+  WebRtcSpl_CrossCorrelation(corr, (WebRtc_Word16*)seq2, seq1, dim2, loops, scale, 1);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.h b/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.h
new file mode 100644
index 0000000..f588c53
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/my_corr.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_MyCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * compute cross correlation between sequences
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_MyCorr(
+    WebRtc_Word32 *corr,  /* (o) correlation of seq1 and seq2 */
+    WebRtc_Word16 *seq1,  /* (i) first sequence */
+    WebRtc_Word16 dim1,  /* (i) dimension first seq1 */
+    const WebRtc_Word16 *seq2, /* (i) second sequence */
+    WebRtc_Word16 dim2   /* (i) dimension seq2 */
+                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c b/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
new file mode 100644
index 0000000..ea9e1eb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_NearestNeighbor.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Find index in array such that the array element with said
+ * index is the element of said array closest to "value"
+ * according to the squared-error criterion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_NearestNeighbor(
+    WebRtc_Word16 *index, /* (o) index of array element closest to value */
+    WebRtc_Word16 *array, /* (i) data array (Q2) */
+    WebRtc_Word16 value, /* (i) value (Q2) */
+    WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+                                   ){
+  int i;
+  WebRtc_Word16 diff;
+  /* Stack based */
+  WebRtc_Word32 crit[8];
+
+  /* Calculate square distance */
+  for(i=0;i<arlength;i++){
+    diff=array[i]-value;
+    crit[i]=WEBRTC_SPL_MUL_16_16(diff, diff);
+  }
+
+  /* Find the minimum square distance */
+  *index=WebRtcSpl_MinIndexW32(crit, (WebRtc_Word16)arlength);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h b/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
new file mode 100644
index 0000000..705e17a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_NearestNeighbor.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Find index in array such that the array element with said
+ * index is the element of said array closest to "value"
+ * according to the squared-error criterion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_NearestNeighbor(
+    WebRtc_Word16 *index, /* (o) index of array element closest to value */
+    WebRtc_Word16 *array, /* (i) data array (Q2) */
+    WebRtc_Word16 value, /* (i) value (Q2) */
+    WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+                                   );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.c b/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.c
new file mode 100644
index 0000000..3990fbe
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.c
@@ -0,0 +1,251 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_PackBits.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_PackBits(
+    WebRtc_UWord16 *bitstream,   /* (o) The packetized bitstream */
+    iLBC_bits *enc_bits,  /* (i) Encoded bits */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                             ){
+  WebRtc_UWord16 *bitstreamPtr;
+  int i, k;
+  WebRtc_Word16 *tmpPtr;
+
+  bitstreamPtr=bitstream;
+
+  /* Class 1 bits of ULP */
+  /* First WebRtc_Word16 */
+  (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[0])<<10;   /* Bit 0..5  */
+  (*bitstreamPtr) |= (enc_bits->lsf[1])<<3;     /* Bit 6..12 */
+  (*bitstreamPtr) |= (enc_bits->lsf[2]&0x70)>>4;    /* Bit 13..15 */
+  bitstreamPtr++;
+  /* Second WebRtc_Word16 */
+  (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[2]&0xF)<<12;  /* Bit 0..3  */
+
+  if (mode==20) {
+    (*bitstreamPtr) |= (enc_bits->startIdx)<<10;    /* Bit 4..5  */
+    (*bitstreamPtr) |= (enc_bits->state_first)<<9;    /* Bit 6  */
+    (*bitstreamPtr) |= (enc_bits->idxForMax)<<3;    /* Bit 7..12 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[0])&0x70)>>4;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    (*bitstreamPtr) = ((enc_bits->cb_index[0])&0xE)<<12;  /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x18)<<8;  /* Bit 3..4  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x8)<<7;  /* Bit 5  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0xFE)<<2;  /* Bit 6..12 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x10)>>2;  /* Bit 13  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x8)>>2;  /* Bit 14  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x10)>>4;  /* Bit 15  */
+  } else { /* mode==30 */
+    (*bitstreamPtr) |= (enc_bits->lsf[3])<<6;     /* Bit 4..9  */
+    (*bitstreamPtr) |= (enc_bits->lsf[4]&0x7E)>>1;    /* Bit 10..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[4]&0x1)<<15;  /* Bit 0  */
+    (*bitstreamPtr) |= (enc_bits->lsf[5])<<8;     /* Bit 1..7  */
+    (*bitstreamPtr) |= (enc_bits->startIdx)<<5;     /* Bit 8..10 */
+    (*bitstreamPtr) |= (enc_bits->state_first)<<4;    /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->idxForMax)&0x3C)>>2;   /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 4:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->idxForMax&0x3)<<14; /* Bit 0..1  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[0]&0x78)<<7;   /* Bit 2..5  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x10)<<5;  /* Bit 6  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x8)<<5;  /* Bit 7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[3]&0xFC);   /* Bit 8..13 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0x10)>>3;  /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x8)>>3;  /* Bit 15  */
+  }
+  /* Class 2 bits of ULP */
+  /* 4:th to 6:th WebRtc_Word16 for 20 ms case
+     5:th to 7:th WebRtc_Word16 for 30 ms case */
+  bitstreamPtr++;
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<3; k++) {
+    (*bitstreamPtr) = 0;
+    for (i=15; i>=0; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 7:th WebRtc_Word16 */
+    (*bitstreamPtr) = 0;
+    for (i=15; i>6; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x4)<<4;  /* Bit 9  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<2;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x4)<<1;  /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x8)>>1;  /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)>>2;  /* Bit 14..15 */
+
+  } else { /* mode==30 */
+    /* 8:th WebRtc_Word16 */
+    (*bitstreamPtr) = 0;
+    for (i=15; i>5; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    (*bitstreamPtr) |= (enc_bits->cb_index[0]&0x6)<<3;   /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x8);   /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x4);   /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[3]&0x2);    /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[6]&0x80)>>7;   /* Bit 15  */
+    bitstreamPtr++;
+    /* 9:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->cb_index[6]&0x7E)<<9;/* Bit 0..5  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[9]&0xFE)<<2;   /* Bit 6..12 */
+    (*bitstreamPtr) |= (enc_bits->cb_index[12]&0xE0)>>5;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 10:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->cb_index[12]&0x1E)<<11;/* Bit 0..3 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<8;  /* Bit 4..5  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x6)<<7;  /* Bit 6..7  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x18)<<3;  /* Bit 8..9  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)<<2;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[9]&0x10)>>1;  /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[10]&0x8)>>1;  /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[12]&0x10)>>3;  /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[13]&0x8)>>3;  /* Bit 15  */
+  }
+  bitstreamPtr++;
+  /* Class 3 bits of ULP */
+  /*  8:th to 14:th WebRtc_Word16 for 20 ms case
+      11:th to 17:th WebRtc_Word16 for 30 ms case */
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<7; k++) {
+    (*bitstreamPtr) = 0;
+    for (i=14; i>=0; i-=2) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x3))<<i; /* Bit 15-i..14-i*/
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 15:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+    (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<13;  /* Bit 2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<6;   /* Bit 3..9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x7E)>>1;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 16:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[2])&0x1))<<15;
+    /* Bit 0  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<12;  /* Bit 1..3  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<10;  /* Bit 4..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[2]))<<7;   /* Bit 6..8  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<6;  /* Bit 9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x7E)>>1;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 17:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[4])&0x1))<<15;
+    /* Bit 0  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[5])<<8;    /* Bit 1..7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[6]);     /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 18:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[7]))<<8; /* Bit 0..7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[8]);     /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->gain_index[3])&0x3))<<14;
+    /* Bit 0..1  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x3)<<12;  /* Bit 2..3  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[5]))<<9;   /* Bit 4..6  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<6;  /* Bit 7..9  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<4;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[8])<<1;   /* Bit 12..14 */
+  } else { /* mode==30 */
+    /* 18:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+    (*bitstreamPtr) |= (((enc_bits->idxVec[57])&0x3))<<12;  /* Bit 2..3  */
+    (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<11;  /* Bit 4  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<4;   /* Bit 5..11 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x78)>>3;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[2])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<10;  /* Bit 3..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<8;  /* Bit 6..7  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[2])&0x7)<<5;  /* Bit 8..10 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<4;  /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x78)>>3;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 20:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[4])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[5]))<<6;   /* Bit 3..9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[6])&0x1)<<5;  /* Bit 10  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[7])&0xF8)>>3;  /* Bit 11..15 */
+    bitstreamPtr++;
+    /* 21:st WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[7])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[8]))<<5;   /* Bit 3..10 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[9])&0x1)<<4;  /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[10])&0xF0)>>4;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 22:nd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[10])&0xF)<<12;
+    /* Bit 0..3  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[11]))<<4;   /* Bit 4..11 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[12])&0x1)<<3;  /* Bit 12  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[13])&0xE0)>>5;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 23:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[13])&0x1F)<<11;
+    /* Bit 0..4  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[14]))<<3;   /* Bit 5..12 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x3)<<1;  /* Bit 13..14 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x1);   /* Bit 15  */
+    bitstreamPtr++;
+    /* 24:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->gain_index[5]))<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<10;  /* Bit 3..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<8;  /* Bit 6..7  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[8]))<<5;   /* Bit 8..10 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[9])&0xF)<<1;  /* Bit 11..14 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[10])&0x4)>>2;  /* Bit 15  */
+    bitstreamPtr++;
+    /* 25:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->gain_index[10])&0x3)<<14;
+    /* Bit 0..1  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[11]))<<11;  /* Bit 2..4  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[12])&0xF)<<7;  /* Bit 5..8  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[13])&0x7)<<4;  /* Bit 9..11 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[14]))<<1;   /* Bit 12..14 */
+  }
+  /* Last bit is automatically zero */
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.h b/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.h
new file mode 100644
index 0000000..ed3f224
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/pack_bits.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_PackBits.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_PackBits( 
+    WebRtc_UWord16 *bitstream,   /* (o) The packetized bitstream */
+    iLBC_bits *enc_bits,  /* (i) Encoded bits */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                             );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
new file mode 100644
index 0000000..fe91851
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "poly_to_lsp.h"
+#include "lsp_to_lsf.h"
+
+void WebRtcIlbcfix_Poly2Lsf(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients (Q13) */
+    WebRtc_Word16 *a    /* (i) A coefficients (Q12) */
+                            ) {
+  WebRtc_Word16 lsp[10];
+  WebRtcIlbcfix_Poly2Lsp(a, lsp, (WebRtc_Word16*)WebRtcIlbcfix_kLspMean);
+  WebRtcIlbcfix_Lsp2Lsf(lsp, lsf, 10);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
new file mode 100644
index 0000000..0ea595e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lpc coefficients to lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsf(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients (Q13) */
+    WebRtc_Word16 *a    /* (i) A coefficients (Q12) */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
new file mode 100644
index 0000000..29b4213
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
@@ -0,0 +1,156 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsp.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "chebyshev.h"
+
+/*----------------------------------------------------------------*
+ * conversion from lpc coefficients to lsp coefficients
+ * function is only for 10:th order LPC
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsp(
+    WebRtc_Word16 *a,  /* (o) A coefficients in Q12 */
+    WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
+    WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+                              coefficients turn out to be unstable */
+                            ) {
+  WebRtc_Word16 f[2][6]; /* f[0][] represents f1 and f[1][] represents f2 */
+  WebRtc_Word16 *a_i_ptr, *a_10mi_ptr;
+  WebRtc_Word16 *f1ptr, *f2ptr;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word16 x, y, xlow, ylow, xmid, ymid, xhigh, yhigh, xint;
+  WebRtc_Word16 shifts, sign;
+  int i, j;
+  int foundFreqs;
+  int fi_select;
+
+  /*
+     Calculate the two polynomials f1(z) and f2(z)
+     (the sum and the diff polynomial)
+     f1[0] = f2[0] = 1.0;
+     f1[i+1] = a[i+1] + a[10-i] - f1[i];
+     f2[i+1] = a[i+1] - a[10-i] - f1[i];
+  */
+
+  a_i_ptr = a + 1;
+  a_10mi_ptr = a + 10;
+  f1ptr = f[0];
+  f2ptr = f[1];
+  (*f1ptr) = 1024; /* 1.0 in Q10 */
+  (*f2ptr) = 1024; /* 1.0 in Q10 */
+  for (i = 0; i < 5; i++) {
+    (*(f1ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)+(*a_10mi_ptr)), 2) - (*f1ptr));
+    (*(f2ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)-(*a_10mi_ptr)), 2) + (*f2ptr));
+    a_i_ptr++;
+    a_10mi_ptr--;
+    f1ptr++;
+    f2ptr++;
+  }
+
+  /*
+    find the LSPs using the Chebychev pol. evaluation
+  */
+
+  fi_select = 0; /* selector between f1 and f2, start with f1 */
+
+  foundFreqs = 0;
+
+  xlow = WebRtcIlbcfix_kCosGrid[0];
+  ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+
+  /*
+     Iterate until all the 10 LSP's have been found or
+     all the grid points have been tried. If the 10 LSP's can
+     not be found, set the LSP vector to previous LSP
+  */
+
+  for (j = 1; j < COS_GRID_POINTS && foundFreqs < 10; j++) {
+    xhigh = xlow;
+    yhigh = ylow;
+    xlow = WebRtcIlbcfix_kCosGrid[j];
+    ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+
+    if (WEBRTC_SPL_MUL_16_16(ylow, yhigh) <= 0) {
+      /* Run 4 times to reduce the interval */
+      for (i = 0; i < 4; i++) {
+        /* xmid =(xlow + xhigh)/2 */
+        xmid = WEBRTC_SPL_RSHIFT_W16(xlow, 1) + WEBRTC_SPL_RSHIFT_W16(xhigh, 1);
+        ymid = WebRtcIlbcfix_Chebyshev(xmid, f[fi_select]);
+
+        if (WEBRTC_SPL_MUL_16_16(ylow, ymid) <= 0) {
+          yhigh = ymid;
+          xhigh = xmid;
+        } else {
+          ylow = ymid;
+          xlow = xmid;
+        }
+      }
+
+      /*
+        Calculater xint by linear interpolation:
+        xint = xlow - ylow*(xhigh-xlow)/(yhigh-ylow);
+      */
+
+      x = xhigh - xlow;
+      y = yhigh - ylow;
+
+      if (y == 0) {
+        xint = xlow;
+      } else {
+        sign = y;
+        y = WEBRTC_SPL_ABS_W16(y);
+        shifts = (WebRtc_Word16)WebRtcSpl_NormW32(y)-16;
+        y = WEBRTC_SPL_LSHIFT_W16(y, shifts);
+        y = (WebRtc_Word16)WebRtcSpl_DivW32W16(536838144, y); /* 1/(yhigh-ylow) */
+
+        tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(x, y, (19-shifts));
+
+        /* y=(xhigh-xlow)/(yhigh-ylow) */
+        y = (WebRtc_Word16)(tmpW32&0xFFFF);
+
+        if (sign < 0) {
+          y = -y;
+        }
+        /* tmpW32 = ylow*(xhigh-xlow)/(yhigh-ylow) */
+        tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(ylow, y, 10);
+        xint = xlow-(WebRtc_Word16)(tmpW32&0xFFFF);
+      }
+
+      /* Store the calculated lsp */
+      lsp[foundFreqs] = (WebRtc_Word16)xint;
+      foundFreqs++;
+
+      /* if needed, set xlow and ylow for next recursion */
+      if (foundFreqs<10) {
+        xlow = xint;
+        /* Swap between f1 and f2 (f[0][] and f[1][]) */
+        fi_select = ((fi_select+1)&0x1);
+
+        ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+      }
+    }
+  }
+
+  /* Check if M roots found, if not then use the old LSP */
+  if (foundFreqs < 10) {
+    WEBRTC_SPL_MEMCPY_W16(lsp, old_lsp, 10);
+  }
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
new file mode 100644
index 0000000..7eebb25
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * conversion from lpc coefficients to lsp coefficients
+ * function is only for 10:th order LPC
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsp(
+    WebRtc_Word16 *a,  /* (o) A coefficients in Q12 */
+    WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
+    WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+                              coefficients turn out to be unstable */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/refiner.c b/trunk/src/modules/audio_coding/codecs/ilbc/refiner.c
new file mode 100644
index 0000000..9210092
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/refiner.c
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Refiner.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "enh_upsample.h"
+#include "my_corr.h"
+
+/*----------------------------------------------------------------*
+ * find segment starting near idata+estSegPos that has highest
+ * correlation with idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a
+ * resolution of ENH_UPSO times the original of the original
+ * sampling rate
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Refiner(
+    WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
+    WebRtc_Word16 *idata,   /* (i) original data buffer */
+    WebRtc_Word16 idatal,   /* (i) dimension of idata */
+    WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
+    WebRtc_Word16 estSegPos,  /* (i) estimated beginning other segment (Q-2) */
+    WebRtc_Word16 *surround,  /* (i/o) The contribution from this sequence
+                                           summed with earlier contributions */
+    WebRtc_Word16 gain    /* (i) Gain to use for this sequence */
+                           ){
+  WebRtc_Word16 estSegPosRounded,searchSegStartPos,searchSegEndPos,corrdim;
+  WebRtc_Word16 tloc,tloc2,i,st,en,fraction;
+
+  WebRtc_Word32 maxtemp, scalefact;
+  WebRtc_Word16 *filtStatePtr, *polyPtr;
+  /* Stack based */
+  WebRtc_Word16 filt[7];
+  WebRtc_Word32 corrVecUps[ENH_CORRDIM*ENH_UPS0];
+  WebRtc_Word32 corrVecTemp[ENH_CORRDIM];
+  WebRtc_Word16 vect[ENH_VECTL];
+  WebRtc_Word16 corrVec[ENH_CORRDIM];
+
+  /* defining array bounds */
+
+  estSegPosRounded=WEBRTC_SPL_RSHIFT_W16((estSegPos - 2),2);
+
+  searchSegStartPos=estSegPosRounded-ENH_SLOP;
+
+  if (searchSegStartPos<0) {
+    searchSegStartPos=0;
+  }
+  searchSegEndPos=estSegPosRounded+ENH_SLOP;
+
+  if(searchSegEndPos+ENH_BLOCKL >= idatal) {
+    searchSegEndPos=idatal-ENH_BLOCKL-1;
+  }
+  corrdim=searchSegEndPos-searchSegStartPos+1;
+
+  /* compute upsampled correlation and find
+     location of max */
+
+  WebRtcIlbcfix_MyCorr(corrVecTemp,idata+searchSegStartPos,
+                       (WebRtc_Word16)(corrdim+ENH_BLOCKL-1),idata+centerStartPos,ENH_BLOCKL);
+
+  /* Calculate the rescaling factor for the correlation in order to
+     put the correlation in a WebRtc_Word16 vector instead */
+  maxtemp=WebRtcSpl_MaxAbsValueW32(corrVecTemp, (WebRtc_Word16)corrdim);
+
+  scalefact=WebRtcSpl_GetSizeInBits(maxtemp)-15;
+
+  if (scalefact>0) {
+    for (i=0;i<corrdim;i++) {
+      corrVec[i]=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(corrVecTemp[i], scalefact);
+    }
+  } else {
+    for (i=0;i<corrdim;i++) {
+      corrVec[i]=(WebRtc_Word16)corrVecTemp[i];
+    }
+  }
+  /* In order to guarantee that all values are initialized */
+  for (i=corrdim;i<ENH_CORRDIM;i++) {
+    corrVec[i]=0;
+  }
+
+  /* Upsample the correlation */
+  WebRtcIlbcfix_EnhUpsample(corrVecUps,corrVec);
+
+  /* Find maximum */
+  tloc=WebRtcSpl_MaxIndexW32(corrVecUps, (WebRtc_Word16) (ENH_UPS0*corrdim));
+
+  /* make vector can be upsampled without ever running outside
+     bounds */
+  *updStartPos = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(searchSegStartPos,4) + tloc + 4;
+
+  tloc2 = WEBRTC_SPL_RSHIFT_W16((tloc+3), 2);
+
+  st=searchSegStartPos+tloc2-ENH_FL0;
+
+  /* initialize the vector to be filtered, stuff with zeros
+     when data is outside idata buffer */
+  if(st<0){
+    WebRtcSpl_MemSetW16(vect, 0, (WebRtc_Word16)(-st));
+    WEBRTC_SPL_MEMCPY_W16(&vect[-st], idata, (ENH_VECTL+st));
+  }
+  else{
+    en=st+ENH_VECTL;
+
+    if(en>idatal){
+      WEBRTC_SPL_MEMCPY_W16(vect, &idata[st],
+                            (ENH_VECTL-(en-idatal)));
+      WebRtcSpl_MemSetW16(&vect[ENH_VECTL-(en-idatal)], 0,
+                          (WebRtc_Word16)(en-idatal));
+    }
+    else {
+      WEBRTC_SPL_MEMCPY_W16(vect, &idata[st], ENH_VECTL);
+    }
+  }
+  /* Calculate which of the 4 fractions to use */
+  fraction=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(tloc2,ENH_UPS0)-tloc;
+
+  /* compute the segment (this is actually a convolution) */
+
+  filtStatePtr = filt + 6;
+  polyPtr = (WebRtc_Word16*)WebRtcIlbcfix_kEnhPolyPhaser[fraction];
+  for (i=0;i<7;i++) {
+    *filtStatePtr-- = *polyPtr++;
+  }
+
+  WebRtcSpl_FilterMAFastQ12(
+      &vect[6], vect, filt,
+      ENH_FLO_MULT2_PLUS1, ENH_BLOCKL);
+
+  /* Add the contribution from this vector (scaled with gain) to the total surround vector */
+  WebRtcSpl_AddAffineVectorToVector(
+      surround, vect, gain,
+      (WebRtc_Word32)32768, 16, ENH_BLOCKL);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/refiner.h b/trunk/src/modules/audio_coding/codecs/ilbc/refiner.h
new file mode 100644
index 0000000..559555c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/refiner.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Refiner.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * find segment starting near idata+estSegPos that has highest
+ * correlation with idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a
+ * resolution of ENH_UPSO times the original of the original
+ * sampling rate
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Refiner(
+    WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
+    WebRtc_Word16 *idata,   /* (i) original data buffer */
+    WebRtc_Word16 idatal,   /* (i) dimension of idata */
+    WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
+    WebRtc_Word16 estSegPos,  /* (i) estimated beginning other segment (Q-2) */
+    WebRtc_Word16 *surround,  /* (i/o) The contribution from this sequence
+                                 summed with earlier contributions */
+    WebRtc_Word16 gain    /* (i) Gain to use for this sequence */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c b/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
new file mode 100644
index 0000000..ee5e643
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleInterpolateLsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lsf_interpolate_to_poly_enc.h"
+#include "bw_expand.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleInterpolateLsf(
+    WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+                                   resulting from the quantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+                                   resulting from the unquantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *lsf,  /* (i) the unquantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfdeq,  /* (i) the dequantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfold,  /* (i) the unquantized lsf coefficients of
+                                           the previous signal frame Q13 */
+    WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+                                   previous signal frame Q13 */
+    WebRtc_Word16 length,  /* (i) should equate FILTERORDER */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                        ) {
+  int i, pos, lp_length;
+
+  WebRtc_Word16 *lsf2, *lsfdeq2;
+  /* Stack based */
+  WebRtc_Word16 lp[LPC_FILTERORDER + 1];
+
+  lsf2 = lsf + length;
+  lsfdeq2 = lsfdeq + length;
+  lp_length = length + 1;
+
+  if (iLBCenc_inst->mode==30) {
+    /* subframe 1: Interpolation between old and first set of
+       lsf coefficients */
+
+    /* Calculate Analysis/Syntehsis filter from quantized LSF */
+    WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0],
+                                         length);
+    WEBRTC_SPL_MEMCPY_W16(syntdenum, lp, lp_length);
+
+    /* Calculate Weighting filter from quantized LSF */
+    WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0],
+                                         length);
+    WebRtcIlbcfix_BwExpand(weightdenum, lp,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                           (WebRtc_Word16)lp_length);
+
+    /* subframe 2 to 6: Interpolation between first and second
+       set of lsf coefficients */
+
+    pos = lp_length;
+    for (i = 1; i < iLBCenc_inst->nsub; i++) {
+
+      /* Calculate Analysis/Syntehsis filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeq, lsfdeq2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i],
+                                           length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length);
+
+      /* Calculate Weighting filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsf, lsf2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i],
+                                           length);
+      WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                             (WebRtc_Word16)lp_length);
+
+      pos += lp_length;
+    }
+
+    /* update memory */
+
+    WEBRTC_SPL_MEMCPY_W16(lsfold, lsf2, length);
+    WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq2, length);
+
+  } else { /* iLBCenc_inst->mode==20 */
+    pos = 0;
+    for (i = 0; i < iLBCenc_inst->nsub; i++) {
+
+      /* Calculate Analysis/Syntehsis filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i],
+                                           length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length);
+
+      /* Calculate Weighting filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i],
+                                           length);
+      WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                             (WebRtc_Word16)lp_length);
+
+      pos += lp_length;
+    }
+
+    /* update memory */
+
+    WEBRTC_SPL_MEMCPY_W16(lsfold, lsf, length);
+    WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq, length);
+
+  }
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h b/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
new file mode 100644
index 0000000..8cdd7da
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleInterpolateLsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleInterpolateLsf(
+    WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+                                   resulting from the quantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+                                   resulting from the unquantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *lsf,  /* (i) the unquantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfdeq,  /* (i) the dequantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfold,  /* (i) the unquantized lsf coefficients of
+                                           the previous signal frame Q13 */
+    WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+                                   previous signal frame Q13 */
+    WebRtc_Word16 length,  /* (i) should equate FILTERORDER */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                        );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
new file mode 100644
index 0000000..2d19edd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLpcAnalysis.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "window32_w32.h"
+#include "bw_expand.h"
+#include "poly_to_lsf.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lpc analysis (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLpcAnalysis(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients */
+    WebRtc_Word16 *data,   /* (i) new block of speech */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                     ) {
+  int k;
+  int scale;
+  WebRtc_Word16 is;
+  WebRtc_Word16 stability;
+  /* Stack based */
+  WebRtc_Word16 A[LPC_FILTERORDER + 1];
+  WebRtc_Word32 R[LPC_FILTERORDER + 1];
+  WebRtc_Word16 windowedData[BLOCKL_MAX];
+  WebRtc_Word16 rc[LPC_FILTERORDER];
+
+  is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer+is,data,iLBCenc_inst->blockl);
+
+  /* No lookahead, last window is asymmetric */
+
+  for (k = 0; k < iLBCenc_inst->lpc_n; k++) {
+
+    is = LPC_LOOKBACK;
+
+    if (k < (iLBCenc_inst->lpc_n - 1)) {
+
+      /* Hanning table WebRtcIlbcfix_kLpcWin[] is in Q15-domain so the output is right-shifted 15 */
+      WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer, WebRtcIlbcfix_kLpcWin, BLOCKL_MAX, 15);
+    } else {
+
+      /* Hanning table WebRtcIlbcfix_kLpcAsymWin[] is in Q15-domain so the output is right-shifted 15 */
+      WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer+is, WebRtcIlbcfix_kLpcAsymWin, BLOCKL_MAX, 15);
+    }
+
+    /* Compute autocorrelation */
+    WebRtcSpl_AutoCorrelation(windowedData, BLOCKL_MAX, LPC_FILTERORDER, R, &scale);
+
+    /* Window autocorrelation vector */
+    WebRtcIlbcfix_Window32W32(R, R, WebRtcIlbcfix_kLpcLagWin, LPC_FILTERORDER + 1 );
+
+    /* Calculate the A coefficients from the Autocorrelation using Levinson Durbin algorithm */
+    stability=WebRtcSpl_LevinsonDurbin(R, A, rc, LPC_FILTERORDER);
+
+    /*
+       Set the filter to {1.0, 0.0, 0.0,...} if filter from Levinson Durbin algorithm is unstable
+       This should basically never happen...
+    */
+    if (stability!=1) {
+      A[0]=4096;
+      WebRtcSpl_MemSetW16(&A[1], 0, LPC_FILTERORDER);
+    }
+
+    /* Bandwidth expand the filter coefficients */
+    WebRtcIlbcfix_BwExpand(A, A, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, LPC_FILTERORDER+1);
+
+    /* Convert from A to LSF representation */
+    WebRtcIlbcfix_Poly2Lsf(lsf + k*LPC_FILTERORDER, A);
+  }
+
+  is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer,
+                        iLBCenc_inst->lpc_buffer+LPC_LOOKBACK+BLOCKL_MAX-is, is);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
new file mode 100644
index 0000000..83c1e5b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLpcAnalysis.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc analysis (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLpcAnalysis(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients */
+    WebRtc_Word16 *data,   /* (i) new block of speech */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                     );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
new file mode 100644
index 0000000..7b5efa0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfDeQ.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  obtain dequantized lsf coefficients from quantization index
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfDeQ(
+    WebRtc_Word16 *lsfdeq,  /* (o) dequantized lsf coefficients */
+    WebRtc_Word16 *index,  /* (i) quantization index */
+    WebRtc_Word16 lpc_n  /* (i) number of LPCs */
+                                ){
+  int i, j, pos, cb_pos;
+
+  /* decode first LSF */
+
+  pos = 0;
+  cb_pos = 0;
+  for (i = 0; i < LSF_NSPLIT; i++) {
+    for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) {
+      lsfdeq[pos + j] = WebRtcIlbcfix_kLsfCb[cb_pos +
+                                             WEBRTC_SPL_MUL_16_16(index[i], WebRtcIlbcfix_kLsfDimCb[i]) + j];
+    }
+    pos += WebRtcIlbcfix_kLsfDimCb[i];
+    cb_pos += WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kLsfSizeCb[i], WebRtcIlbcfix_kLsfDimCb[i]);
+  }
+
+  if (lpc_n>1) {
+    /* decode last LSF */
+    pos = 0;
+    cb_pos = 0;
+    for (i = 0; i < LSF_NSPLIT; i++) {
+      for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) {
+        lsfdeq[LPC_FILTERORDER + pos + j] = WebRtcIlbcfix_kLsfCb[cb_pos +
+                                                                 WEBRTC_SPL_MUL_16_16(index[LSF_NSPLIT + i], WebRtcIlbcfix_kLsfDimCb[i]) + j];
+      }
+      pos += WebRtcIlbcfix_kLsfDimCb[i];
+      cb_pos += WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kLsfSizeCb[i], WebRtcIlbcfix_kLsfDimCb[i]);
+    }
+  }
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
new file mode 100644
index 0000000..efd3103
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfDeQ.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  obtain dequantized lsf coefficients from quantization index
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfDeQ(
+    WebRtc_Word16 *lsfdeq,  /* (o) dequantized lsf coefficients */
+    WebRtc_Word16 *index,  /* (i) quantization index */
+    WebRtc_Word16 lpc_n  /* (i) number of LPCs */
+                                );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
new file mode 100644
index 0000000..aa27fb4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfQ.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "split_vq.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lsf quantizer (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfQ(
+    WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+                                   (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 *index, /* (o) quantization index */
+    WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+                           quantized (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+                              ){
+
+  /* Quantize first LSF with memoryless split VQ */
+  WebRtcIlbcfix_SplitVq( lsfdeq, index, lsf,
+                         (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+
+  if (lpc_n==2) {
+    /* Quantize second LSF with memoryless split VQ */
+    WebRtcIlbcfix_SplitVq( lsfdeq + LPC_FILTERORDER, index + LSF_NSPLIT,
+                           lsf + LPC_FILTERORDER, (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+  }
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
new file mode 100644
index 0000000..fd17b2e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfQ.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf quantizer (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfQ(
+    WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+                                   (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 *index, /* (o) quantization index */
+    WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+                           quantized (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+                              );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/smooth.c b/trunk/src/modules/audio_coding/codecs/ilbc/smooth.c
new file mode 100644
index 0000000..b606077
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/smooth.c
@@ -0,0 +1,211 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "smooth_out_data.h"
+
+/*----------------------------------------------------------------*
+ * find the smoothed output data
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Smooth(
+    WebRtc_Word16 *odata,   /* (o) smoothed output */
+    WebRtc_Word16 *current,  /* (i) the un enhanced residual for
+                                this block */
+    WebRtc_Word16 *surround  /* (i) The approximation from the
+                                surrounding sequences */
+                          ) {
+  WebRtc_Word16 maxtot, scale, scale1, scale2;
+  WebRtc_Word16 A, B, C, denomW16;
+  WebRtc_Word32 B_W32, denom, num;
+  WebRtc_Word32 errs;
+  WebRtc_Word32 w00,w10,w11, endiff, crit;
+  WebRtc_Word32 w00prim, w10prim, w11_div_w00;
+  WebRtc_Word16 w11prim;
+  WebRtc_Word16 bitsw00, bitsw10, bitsw11;
+  WebRtc_Word32 w11w00, w10w10, w00w00;
+  WebRtc_Word16 max1, max2;
+
+  /* compute some inner products (ensure no overflow by first calculating proper scale factor) */
+
+  w00 = w10 = w11 = 0;
+
+  max1=WebRtcSpl_MaxAbsValueW16(current, ENH_BLOCKL);
+  max2=WebRtcSpl_MaxAbsValueW16(surround, ENH_BLOCKL);
+  maxtot=WEBRTC_SPL_MAX(max1, max2);
+
+  scale=WebRtcSpl_GetSizeInBits(maxtot);
+  scale = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2,scale)-26;
+  scale=WEBRTC_SPL_MAX(0, scale);
+
+  w00=WebRtcSpl_DotProductWithScale(current,current,ENH_BLOCKL,scale);
+  w11=WebRtcSpl_DotProductWithScale(surround,surround,ENH_BLOCKL,scale);
+  w10=WebRtcSpl_DotProductWithScale(surround,current,ENH_BLOCKL,scale);
+
+  if (w00<0) w00 = WEBRTC_SPL_WORD32_MAX;
+  if (w11<0) w11 = WEBRTC_SPL_WORD32_MAX;
+
+  /* Rescale w00 and w11 to w00prim and w11prim, so that w00prim/w11prim
+     is in Q16 */
+
+  bitsw00 = WebRtcSpl_GetSizeInBits(w00);
+  bitsw11 = WebRtcSpl_GetSizeInBits(w11);
+  bitsw10 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(w10));
+  scale1 = 31 - bitsw00;
+  scale2 = 15 - bitsw11;
+
+  if (scale2>(scale1-16)) {
+    scale2 = scale1 - 16;
+  } else {
+    scale1 = scale2 + 16;
+  }
+
+  w00prim = WEBRTC_SPL_LSHIFT_W32(w00, scale1);
+  w11prim = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w11, scale2);
+
+  /* Perform C = sqrt(w11/w00) (C is in Q11 since (16+6)/2=11) */
+  if (w11prim>64) {
+    endiff = WEBRTC_SPL_LSHIFT_W32(
+        (WebRtc_Word32)WebRtcSpl_DivW32W16(w00prim, w11prim), 6);
+    C = (WebRtc_Word16)WebRtcSpl_SqrtFloor(endiff); /* C is in Q11 */
+  } else {
+    C = 1;
+  }
+
+  /* first try enhancement without power-constraint */
+
+  errs = WebRtcIlbcfix_Smooth_odata(odata, current, surround, C);
+
+
+
+  /* if constraint violated by first try, add constraint */
+
+  if ( (6-scale+scale1) > 31) {
+    crit=0;
+  } else {
+    /* crit = 0.05 * w00 (Result in Q-6) */
+    crit = WEBRTC_SPL_SHIFT_W32(
+        WEBRTC_SPL_MUL(ENH_A0, WEBRTC_SPL_RSHIFT_W32(w00prim, 14)),
+        -(6-scale+scale1));
+  }
+
+  if (errs > crit) {
+
+    if( w00 < 1) {
+      w00=1;
+    }
+
+    /* Calculate w11*w00, w10*w10 and w00*w00 in the same Q domain */
+
+    scale1 = bitsw00-15;
+    scale2 = bitsw11-15;
+
+    if (scale2>scale1) {
+      scale = scale2;
+    } else {
+      scale = scale1;
+    }
+
+    w11w00 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w11, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+
+    w10w10 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale));
+
+    w00w00 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+
+    /* Calculate (w11*w00-w10*w10)/(w00*w00) in Q16 */
+    if (w00w00>65536) {
+      endiff = (w11w00-w10w10);
+      endiff = WEBRTC_SPL_MAX(0, endiff);
+      /* denom is in Q16 */
+      denom = WebRtcSpl_DivW32W16(endiff, (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(w00w00, 16));
+    } else {
+      denom = 65536;
+    }
+
+    if( denom > 7){ /* eliminates numerical problems
+                       for if smooth */
+
+      scale=WebRtcSpl_GetSizeInBits(denom)-15;
+
+      if (scale>0) {
+        /* denomW16 is in Q(16+scale) */
+        denomW16=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(denom, scale);
+
+        /* num in Q(34-scale) */
+        num=WEBRTC_SPL_RSHIFT_W32(ENH_A0_MINUS_A0A0DIV4, scale);
+      } else {
+        /* denomW16 is in Q16 */
+        denomW16=(WebRtc_Word16)denom;
+
+        /* num in Q34 */
+        num=ENH_A0_MINUS_A0A0DIV4;
+      }
+
+      /* A sqrt( (ENH_A0-(ENH_A0^2)/4)*(w00*w00)/(w11*w00 + w10*w10) ) in Q9 */
+      A = (WebRtc_Word16)WebRtcSpl_SqrtFloor(WebRtcSpl_DivW32W16(num, denomW16));
+
+      /* B_W32 is in Q30 ( B = 1 - ENH_A0/2 - A * w10/w00 ) */
+      scale1 = 31-bitsw10;
+      scale2 = 21-scale1;
+      w10prim = WEBRTC_SPL_LSHIFT_W32(w10, scale1);
+      w00prim = WEBRTC_SPL_SHIFT_W32(w00, -scale2);
+      scale = bitsw00-scale2-15;
+
+      if (scale>0) {
+        w10prim=WEBRTC_SPL_RSHIFT_W32(w10prim, scale);
+        w00prim=WEBRTC_SPL_RSHIFT_W32(w00prim, scale);
+      }
+
+      if ((w00prim>0)&&(w10prim>0)) {
+        w11_div_w00=WebRtcSpl_DivW32W16(w10prim, (WebRtc_Word16)w00prim);
+
+        if (WebRtcSpl_GetSizeInBits(w11_div_w00)+WebRtcSpl_GetSizeInBits(A)>31) {
+          B_W32 = 0;
+        } else {
+          B_W32 = (WebRtc_Word32)1073741824 - (WebRtc_Word32)ENH_A0DIV2 -
+              WEBRTC_SPL_MUL(A, w11_div_w00);
+        }
+        B = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(B_W32, 16); /* B in Q14 */
+      } else {
+        /* No smoothing */
+        A = 0;
+        B = 16384; /* 1 in Q14 */
+      }
+    }
+    else{ /* essentially no difference between cycles;
+             smoothing not needed */
+
+      A = 0;
+      B = 16384; /* 1 in Q14 */
+    }
+
+    /* create smoothed sequence */
+
+    WebRtcSpl_ScaleAndAddVectors(surround, A, 9,
+                                current, B, 14,
+                                odata, ENH_BLOCKL);
+  }
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/smooth.h b/trunk/src/modules/audio_coding/codecs/ilbc/smooth.h
new file mode 100644
index 0000000..88ce805
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/smooth.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * find the smoothed output data
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Smooth(
+    WebRtc_Word16 *odata,   /* (o) smoothed output */
+    WebRtc_Word16 *current,  /* (i) the un enhanced residual for
+                                this block */
+    WebRtc_Word16 *surround  /* (i) The approximation from the
+                                surrounding sequences */
+                          );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c b/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c
new file mode 100644
index 0000000..9bacd85
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth_odata.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
+    WebRtc_Word16 *odata,
+    WebRtc_Word16 *psseq,
+    WebRtc_Word16 *surround,
+    WebRtc_Word16 C)
+{
+  int i;
+
+  WebRtc_Word16 err;
+  WebRtc_Word32 errs;
+
+  for(i=0;i<80;i++) {
+    odata[i]= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+        (WEBRTC_SPL_MUL_16_16(C, surround[i])+1024), 11);
+  }
+
+  errs=0;
+  for(i=0;i<80;i++) {
+    err=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16((psseq[i]-odata[i]), 3);
+    errs+=WEBRTC_SPL_MUL_16_16(err, err); /* errs in Q-6 */
+  }
+
+  return errs;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h b/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h
new file mode 100644
index 0000000..6fbe694
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth_odata.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * help function to WebRtcIlbcfix_Smooth()
+ *---------------------------------------------------------------*/
+
+WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
+    WebRtc_Word16 *odata,
+    WebRtc_Word16 *psseq,
+    WebRtc_Word16 *surround,
+    WebRtc_Word16 C);
+
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.c b/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.c
new file mode 100644
index 0000000..9276a7b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SortSq.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  scalar quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SortSq(
+    WebRtc_Word16 *xq,   /* (o) the quantized value */
+    WebRtc_Word16 *index,  /* (o) the quantization index */
+    WebRtc_Word16 x,   /* (i) the value to quantize */
+    const WebRtc_Word16 *cb, /* (i) the quantization codebook */
+    WebRtc_Word16 cb_size  /* (i) the size of the quantization codebook */
+                          ){
+  int i;
+
+  if (x <= cb[0]) {
+    *index = 0;
+    *xq = cb[0];
+  } else {
+    i = 0;
+    while ((x > cb[i]) && (i < (cb_size-1))) {
+      i++;
+    }
+
+    if (x > WEBRTC_SPL_RSHIFT_W32(( (WebRtc_Word32)cb[i] + cb[i - 1] + 1),1)) {
+      *index = i;
+      *xq = cb[i];
+    } else {
+      *index = i - 1;
+      *xq = cb[i - 1];
+    }
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.h b/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.h
new file mode 100644
index 0000000..2863dc5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/sort_sq.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SortSq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  scalar quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SortSq(
+    WebRtc_Word16 *xq,   /* (o) the quantized value */
+    WebRtc_Word16 *index,  /* (o) the quantization index */
+    WebRtc_Word16 x,   /* (i) the value to quantize */
+    const WebRtc_Word16 *cb, /* (i) the quantization codebook */
+    WebRtc_Word16 cb_size  /* (i) the size of the quantization codebook */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.c b/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.c
new file mode 100644
index 0000000..d908fa2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.c
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SplitVq.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "vq3.h"
+#include "vq4.h"
+
+/*----------------------------------------------------------------*
+ *  split vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SplitVq(
+    WebRtc_Word16 *qX,  /* (o) the quantized vector in Q13 */
+    WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+                                   codebooks in the split */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize */
+    WebRtc_Word16 *CB,  /* (i) the quantizer codebook in Q13 */
+    WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
+    WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+                           ) {
+
+  WebRtc_Word16 *qXPtr, *indexPtr, *CBPtr, *XPtr;
+
+  /* Quantize X with the 3 vectror quantization tables */
+
+  qXPtr=qX;
+  indexPtr=index;
+  CBPtr=CB;
+  XPtr=X;
+  WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[0]);
+
+  qXPtr+=3;
+  indexPtr+=1;
+  CBPtr+=(dim[0]*cbsize[0]);
+  XPtr+=3;
+  WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[1]);
+
+  qXPtr+=3;
+  indexPtr+=1;
+  CBPtr+=(dim[1]*cbsize[1]);
+  XPtr+=3;
+  WebRtcIlbcfix_Vq4(qXPtr, indexPtr, CBPtr, XPtr, cbsize[2]);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.h b/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.h
new file mode 100644
index 0000000..7264a21
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/split_vq.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SplitVq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  split vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SplitVq(
+    WebRtc_Word16 *qX,  /* (o) the quantized vector in Q13 */
+    WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+                                   codebooks in the split */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize */
+    WebRtc_Word16 *CB,  /* (i) the quantizer codebook in Q13 */
+    WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
+    WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+                           );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.c b/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.c
new file mode 100644
index 0000000..9d03cc3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.c
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateConstruct.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  decoding of the start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateConstruct(
+    WebRtc_Word16 idxForMax,   /* (i) 6-bit index for the quantization of
+                                           max amplitude */
+    WebRtc_Word16 *idxVec,   /* (i) vector of quantization indexes */
+    WebRtc_Word16 *syntDenum,  /* (i) synthesis filter denumerator */
+    WebRtc_Word16 *Out_fix,  /* (o) the decoded state vector */
+    WebRtc_Word16 len    /* (i) length of a state vector */
+                                  ) {
+  int k;
+  WebRtc_Word16 maxVal;
+  WebRtc_Word16 *tmp1, *tmp2, *tmp3;
+  /* Stack based */
+  WebRtc_Word16 numerator[1+LPC_FILTERORDER];
+  WebRtc_Word16 sampleValVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 sampleMaVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 *sampleVal = &sampleValVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleMa = &sampleMaVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleAr = &sampleValVec[LPC_FILTERORDER];
+
+  /* initialization of coefficients */
+
+  for (k=0; k<LPC_FILTERORDER+1; k++){
+    numerator[k] = syntDenum[LPC_FILTERORDER-k];
+  }
+
+  /* decoding of the maximum value */
+
+  maxVal = WebRtcIlbcfix_kFrgQuantMod[idxForMax];
+
+  /* decoding of the sample values */
+  tmp1 = sampleVal;
+  tmp2 = &idxVec[len-1];
+
+  if (idxForMax<37) {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 2097152 (= 0.5 << 22)
+        maxVal is in Q8 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)2097152) >> 22);
+      tmp1++;
+      tmp2--;
+    }
+  } else if (idxForMax<59) {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 262144 (= 0.5 << 19)
+        maxVal is in Q5 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)262144) >> 19);
+      tmp1++;
+      tmp2--;
+    }
+  } else {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 65536 (= 0.5 << 17)
+        maxVal is in Q3 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)65536) >> 17);
+      tmp1++;
+      tmp2--;
+    }
+  }
+
+  /* Set the rest of the data to zero */
+  WebRtcSpl_MemSetW16(&sampleVal[len], 0, len);
+
+  /* circular convolution with all-pass filter */
+
+  /* Set the state to zero */
+  WebRtcSpl_MemSetW16(sampleValVec, 0, (LPC_FILTERORDER));
+
+  /* Run MA filter + AR filter */
+  WebRtcSpl_FilterMAFastQ12(
+      sampleVal, sampleMa,
+      numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(len + LPC_FILTERORDER));
+  WebRtcSpl_MemSetW16(&sampleMa[len + LPC_FILTERORDER], 0, (len - LPC_FILTERORDER));
+  WebRtcSpl_FilterARFastQ12(
+      sampleMa, sampleAr,
+      syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*len));
+
+  tmp1 = &sampleAr[len-1];
+  tmp2 = &sampleAr[2*len-1];
+  tmp3 = Out_fix;
+  for(k=0;k<len;k++){
+    (*tmp3) = (*tmp1) + (*tmp2);
+    tmp1--;
+    tmp2--;
+    tmp3++;
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.h b/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.h
new file mode 100644
index 0000000..465699b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/state_construct.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateConstruct.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_
+
+/*----------------------------------------------------------------*
+ *  Generate the start state from the quantized indexes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateConstruct(
+    WebRtc_Word16 idxForMax,   /* (i) 6-bit index for the quantization of
+                                           max amplitude */
+    WebRtc_Word16 *idxVec,   /* (i) vector of quantization indexes */
+    WebRtc_Word16 *syntDenum,  /* (i) synthesis filter denumerator */
+    WebRtc_Word16 *Out_fix,  /* (o) the decoded state vector */
+    WebRtc_Word16 len    /* (i) length of a state vector */
+                                  );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/state_search.c b/trunk/src/modules/audio_coding/codecs/ilbc/state_search.c
new file mode 100644
index 0000000..824a0ba
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/state_search.c
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateSearch.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "abs_quant.h"
+
+/*----------------------------------------------------------------*
+ *  encoding of start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
+                               and idxVec, input state_first) */
+    WebRtc_Word16 *residual,   /* (i) target residual vector */
+    WebRtc_Word16 *syntDenum,  /* (i) lpc synthesis filter */
+    WebRtc_Word16 *weightDenum  /* (i) weighting filter denuminator */
+                               ) {
+  WebRtc_Word16 k, index;
+  WebRtc_Word16 maxVal;
+  WebRtc_Word16 scale, shift;
+  WebRtc_Word32 maxValsq;
+  WebRtc_Word16 scaleRes;
+  WebRtc_Word16 max;
+  int i;
+  /* Stack based */
+  WebRtc_Word16 numerator[1+LPC_FILTERORDER];
+  WebRtc_Word16 residualLongVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 sampleMa[2*STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 *residualLong = &residualLongVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleAr = residualLong;
+
+  /* Scale to maximum 12 bits to avoid saturation in circular convolution filter */
+  max = WebRtcSpl_MaxAbsValueW16(residual, iLBCenc_inst->state_short_len);
+  scaleRes = WebRtcSpl_GetSizeInBits(max)-12;
+  scaleRes = WEBRTC_SPL_MAX(0, scaleRes);
+  /* Set up the filter coefficients for the circular convolution */
+  for (i=0; i<LPC_FILTERORDER+1; i++) {
+    numerator[i] = (syntDenum[LPC_FILTERORDER-i]>>scaleRes);
+  }
+
+  /* Copy the residual to a temporary buffer that we can filter
+   * and set the remaining samples to zero.
+   */
+  WEBRTC_SPL_MEMCPY_W16(residualLong, residual, iLBCenc_inst->state_short_len);
+  WebRtcSpl_MemSetW16(residualLong + iLBCenc_inst->state_short_len, 0, iLBCenc_inst->state_short_len);
+
+  /* Run the Zero-Pole filter (Ciurcular convolution) */
+  WebRtcSpl_MemSetW16(residualLongVec, 0, LPC_FILTERORDER);
+  WebRtcSpl_FilterMAFastQ12(
+      residualLong, sampleMa,
+      numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(iLBCenc_inst->state_short_len + LPC_FILTERORDER));
+  WebRtcSpl_MemSetW16(&sampleMa[iLBCenc_inst->state_short_len + LPC_FILTERORDER], 0, iLBCenc_inst->state_short_len - LPC_FILTERORDER);
+
+  WebRtcSpl_FilterARFastQ12(
+      sampleMa, sampleAr,
+      syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*iLBCenc_inst->state_short_len));
+
+  for(k=0;k<iLBCenc_inst->state_short_len;k++){
+    sampleAr[k] += sampleAr[k+iLBCenc_inst->state_short_len];
+  }
+
+  /* Find maximum absolute value in the vector */
+  maxVal=WebRtcSpl_MaxAbsValueW16(sampleAr, iLBCenc_inst->state_short_len);
+
+  /* Find the best index */
+
+  if ((((WebRtc_Word32)maxVal)<<scaleRes)<23170) {
+    maxValsq=((WebRtc_Word32)maxVal*maxVal)<<(2+2*scaleRes);
+  } else {
+    maxValsq=(WebRtc_Word32)WEBRTC_SPL_WORD32_MAX;
+  }
+
+  index=0;
+  for (i=0;i<63;i++) {
+
+    if (maxValsq>=WebRtcIlbcfix_kChooseFrgQuant[i]) {
+      index=i+1;
+    } else {
+      i=63;
+    }
+  }
+  iLBC_encbits->idxForMax=index;
+
+  /* Rescale the vector before quantization */
+  scale=WebRtcIlbcfix_kScale[index];
+
+  if (index<27) { /* scale table is in Q16, fout[] is in Q(-1) and we want the result to be in Q11 */
+    shift=4;
+  } else { /* scale table is in Q21, fout[] is in Q(-1) and we want the result to be in Q11 */
+    shift=9;
+  }
+
+  /* Set up vectors for AbsQuant and rescale it with the scale factor */
+  WebRtcSpl_ScaleVectorWithSat(sampleAr, sampleAr, scale,
+                              iLBCenc_inst->state_short_len, (WebRtc_Word16)(shift-scaleRes));
+
+  /* Quantize the values in fout[] */
+  WebRtcIlbcfix_AbsQuant(iLBCenc_inst, iLBC_encbits, sampleAr, weightDenum);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/state_search.h b/trunk/src/modules/audio_coding/codecs/ilbc/state_search.h
new file mode 100644
index 0000000..8b7f298
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/state_search.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateSearch.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  encoding of start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
+                               and idxVec, input state_first) */
+    WebRtc_Word16 *residual,   /* (i) target residual vector */
+    WebRtc_Word16 *syntDenum,  /* (i) lpc synthesis filter */
+    WebRtc_Word16 *weightDenum  /* (i) weighting filter denuminator */
+                               );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.c b/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.c
new file mode 100644
index 0000000..61b8b7b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.c
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SwapBytes.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Swap bytes (to simplify operations on Little Endian machines)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SwapBytes(
+    WebRtc_UWord16 *sequence,   /* (i/o) the sequence to swap */
+    WebRtc_Word16 wordLength   /* (i) number or WebRtc_UWord16 to swap */
+                              ) {
+  int k;
+  WebRtc_UWord16 temp=0;
+  for( k=wordLength; k>0; k-- ) {
+    temp = (*sequence >> 8)|(*sequence << 8);
+    *sequence++ = temp;
+    //*sequence++ = (*sequence >> 8) | (*sequence << 8);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.h b/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.h
new file mode 100644
index 0000000..2f2b3eb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/swap_bytes.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SwapBytes.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Swap bytes (to simplify operations on Little Endian machines)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SwapBytes(
+    WebRtc_UWord16 *sequence,   /* (i/o) the sequence to swap */
+    WebRtc_Word16 wordLength   /* (i) number or WebRtc_UWord16 to swap */
+                              );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
new file mode 100644
index 0000000..d2c9ba5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
@@ -0,0 +1,225 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+        iLBC_test.c
+
+******************************************************************/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include "ilbc.h"
+
+/*---------------------------------------------------------------*
+ *  Main program to test iLBC encoding and decoding
+ *
+ *  Usage:
+ *	  exefile_name.exe <infile> <bytefile> <outfile> <channel>
+ *
+ *    <infile>   : Input file, speech for encoder (16-bit pcm file)
+ *    <bytefile> : Bit stream output from the encoder
+ *    <outfile>  : Output file, decoded speech (16-bit pcm file)
+ *    <channel>  : Bit error file, optional (16-bit)
+ *                     1 - Packet received correctly
+ *                     0 - Packet Lost
+ *
+ *--------------------------------------------------------------*/
+
+#define BLOCKL_MAX			240
+#define ILBCNOOFWORDS_MAX	25
+
+
+int main(int argc, char* argv[])
+{
+
+  FILE *ifileid,*efileid,*ofileid, *cfileid;
+  WebRtc_Word16 data[BLOCKL_MAX];
+  WebRtc_Word16 encoded_data[ILBCNOOFWORDS_MAX], decoded_data[BLOCKL_MAX];
+  int len;
+  short pli, mode;
+  int blockcount = 0;
+  int packetlosscount = 0;
+  int frameLen;
+  WebRtc_Word16 speechType;
+  iLBC_encinst_t *Enc_Inst;
+  iLBC_decinst_t *Dec_Inst;
+
+#ifdef __ILBC_WITH_40BITACC
+  /* Doublecheck that long long exists */
+  if (sizeof(long)>=sizeof(long long)) {
+    fprintf(stderr, "40-bit simulation is not be supported on this platform\n");
+    exit(0);
+  }
+#endif
+
+  /* get arguments and open files */
+
+  if ((argc!=5) && (argc!=6)) {
+    fprintf(stderr,
+            "\n*-----------------------------------------------*\n");
+    fprintf(stderr,
+            "   %s <20,30> input encoded decoded (channel)\n\n",
+            argv[0]);
+    fprintf(stderr,
+            "   mode    : Frame size for the encoding/decoding\n");
+    fprintf(stderr,
+            "                 20 - 20 ms\n");
+    fprintf(stderr,
+            "                 30 - 30 ms\n");
+    fprintf(stderr,
+            "   input   : Speech for encoder (16-bit pcm file)\n");
+    fprintf(stderr,
+            "   encoded : Encoded bit stream\n");
+    fprintf(stderr,
+            "   decoded : Decoded speech (16-bit pcm file)\n");
+    fprintf(stderr,
+            "   channel : Packet loss pattern, optional (16-bit)\n");
+    fprintf(stderr,
+            "                  1 - Packet received correctly\n");
+    fprintf(stderr,
+            "                  0 - Packet Lost\n");
+    fprintf(stderr,
+            "*-----------------------------------------------*\n\n");
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n",
+            argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open encoded file file %s\n",
+            argv[3]); exit(1);}
+  if ( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open decoded file %s\n",
+            argv[4]); exit(1);}
+  if (argc==6) {
+    if( (cfileid=fopen(argv[5],"rb")) == NULL) {
+      fprintf(stderr, "Cannot open channel file %s\n",
+              argv[5]);
+      exit(1);
+    }
+  } else {
+    cfileid=NULL;
+  }
+
+  /* print info */
+
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBC test program                            *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,"\nMode           : %2d ms\n", mode);
+  fprintf(stderr,"Input file     : %s\n", argv[2]);
+  fprintf(stderr,"Encoded file   : %s\n", argv[3]);
+  fprintf(stderr,"Output file    : %s\n", argv[4]);
+  if (argc==6) {
+    fprintf(stderr,"Channel file   : %s\n", argv[5]);
+  }
+  fprintf(stderr,"\n");
+
+  /* Create structs */
+  WebRtcIlbcfix_EncoderCreate(&Enc_Inst);
+  WebRtcIlbcfix_DecoderCreate(&Dec_Inst);
+
+
+  /* Initialization */
+
+  WebRtcIlbcfix_EncoderInit(Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(Dec_Inst, mode);
+  frameLen = mode*8;
+
+  /* loop over input blocks */
+
+  while (((WebRtc_Word16)fread(data,sizeof(WebRtc_Word16),frameLen,ifileid))==
+         frameLen) {
+
+    blockcount++;
+
+    /* encoding */
+
+    fprintf(stderr, "--- Encoding block %i --- ",blockcount);
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, (WebRtc_Word16)frameLen, encoded_data);
+    fprintf(stderr, "\r");
+
+    /* write byte file */
+
+    fwrite(encoded_data, sizeof(WebRtc_Word16), ((len+1)/sizeof(WebRtc_Word16)), efileid);
+
+    /* get channel data if provided */
+    if (argc==6) {
+      if (fread(&pli, sizeof(WebRtc_Word16), 1, cfileid)) {
+        if ((pli!=0)&&(pli!=1)) {
+          fprintf(stderr, "Error in channel file\n");
+          exit(0);
+        }
+        if (pli==0) {
+          /* Packet loss -> remove info from frame */
+          memset(encoded_data, 0,
+                 sizeof(WebRtc_Word16)*ILBCNOOFWORDS_MAX);
+          packetlosscount++;
+        }
+      } else {
+        fprintf(stderr, "Error. Channel file too short\n");
+        exit(0);
+      }
+    } else {
+      pli=1;
+    }
+
+    /* decoding */
+
+    fprintf(stderr, "--- Decoding block %i --- ",blockcount);
+    if (pli==1) {
+      len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data,
+                               (WebRtc_Word16)len, decoded_data,&speechType);
+    } else {
+      len=WebRtcIlbcfix_DecodePlc(Dec_Inst, decoded_data, 1);
+    }
+    fprintf(stderr, "\r");
+
+    /* write output file */
+
+    fwrite(decoded_data,sizeof(WebRtc_Word16),len,ofileid);
+  }
+
+  /* close files */
+
+  fclose(ifileid);  fclose(efileid); fclose(ofileid);
+  if (argc==6) {
+    fclose(cfileid);
+  }
+
+  /* Free structs */
+  WebRtcIlbcfix_EncoderFree(Enc_Inst);
+  WebRtcIlbcfix_DecoderFree(Dec_Inst);
+
+
+  printf("\nDone with simulation\n\n");
+
+  return(0);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
new file mode 100644
index 0000000..ee5e484
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
@@ -0,0 +1,207 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+iLBC Speech Coder ANSI-C Source Code
+
+iLBC_test.c
+
+******************************************************************/
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include "ilbc.h"
+
+//#define JUNK_DATA
+#ifdef JUNK_DATA
+#define SEED_FILE "randseed.txt"
+#endif
+
+
+/*----------------------------------------------------------------*
+*  Main program to test iLBC encoding and decoding
+*
+*  Usage:
+*		exefile_name.exe <infile> <bytefile> <outfile>
+*
+*---------------------------------------------------------------*/
+
+int main(int argc, char* argv[])
+{
+  FILE *ifileid,*efileid,*ofileid, *chfileid;
+  short encoded_data[55], data[240], speechType;
+  short len, mode, pli;
+  int blockcount = 0;
+
+  iLBC_encinst_t *Enc_Inst;
+  iLBC_decinst_t *Dec_Inst;
+#ifdef JUNK_DATA
+  int i;
+  FILE *seedfile;
+  unsigned int random_seed = (unsigned int) time(NULL);//1196764538
+#endif
+
+  /* Create structs */
+  WebRtcIlbcfix_EncoderCreate(&Enc_Inst);
+  WebRtcIlbcfix_DecoderCreate(&Dec_Inst);
+
+  /* get arguments and open files */
+
+  if (argc != 6 ) {
+    fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n",
+            argv[0]);
+    fprintf(stderr, "Example:\n");
+    fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]);
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open channelfile file %s\n",
+            argv[3]); exit(3);}
+  if( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open output file %s\n",
+            argv[4]); exit(3);}
+  if ( (chfileid=fopen(argv[5],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open channel file file %s\n", argv[5]);
+    exit(2);
+  }
+  /* print info */
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBCtest                                     *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+		"*---------------------------------------------------*\n");
+#ifdef SPLIT_10MS
+  fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode);
+#else
+  fprintf(stderr,"\nMode          : %2d ms\n", mode);
+#endif
+  fprintf(stderr,"\nInput file    : %s\n", argv[2]);
+  fprintf(stderr,"Coded file    : %s\n", argv[3]);
+  fprintf(stderr,"Output file   : %s\n\n", argv[4]);
+  fprintf(stderr,"Channel file  : %s\n\n", argv[5]);
+
+#ifdef JUNK_DATA
+  srand(random_seed);
+
+  if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+    fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+  }
+  else {
+    fprintf(seedfile, "%u\n", random_seed);
+    fclose(seedfile);
+  }
+#endif
+
+  /* Initialization */
+  WebRtcIlbcfix_EncoderInit(Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(Dec_Inst, mode);
+
+  /* loop over input blocks */
+#ifdef SPLIT_10MS
+  while(fread(data, sizeof(short), 80, ifileid) == 80) {
+#else
+  while((short)fread(data,sizeof(short),(mode<<3),ifileid)==(mode<<3)) {
+#endif
+    blockcount++;
+
+    /* encoding */
+    fprintf(stderr, "--- Encoding block %i --- ",blockcount);
+#ifdef SPLIT_10MS
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, 80, encoded_data);
+#else
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, (short)(mode<<3), encoded_data);
+#endif
+    fprintf(stderr, "\r");
+
+#ifdef JUNK_DATA
+    for ( i = 0; i < len; i++) {
+      encoded_data[i] = (short) (encoded_data[i] + (short) rand());
+    }
+#endif
+    /* write byte file */
+    if(len != 0){ //len may be 0 in 10ms split case
+      fwrite(encoded_data,1,len,efileid);
+    }
+
+    if(len != 0){ //len may be 0 in 10ms split case
+      /* get channel data if provided */
+      if (argc==6) {
+        if (fread(&pli, sizeof(WebRtc_Word16), 1, chfileid)) {
+          if ((pli!=0)&&(pli!=1)) {
+            fprintf(stderr, "Error in channel file\n");
+            exit(0);
+          }
+          if (pli==0) {
+            /* Packet loss -> remove info from frame */
+            memset(encoded_data, 0, sizeof(WebRtc_Word16)*25);
+          }
+        } else {
+          fprintf(stderr, "Error. Channel file too short\n");
+          exit(0);
+        }
+      } else {
+        pli=1;
+      }
+
+      /* decoding */
+      fprintf(stderr, "--- Decoding block %i --- ",blockcount);
+      if (pli==1) {
+        len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, len, data, &speechType);
+      } else {
+        len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1);
+      }
+      fprintf(stderr, "\r");
+
+      /* write output file */
+      fwrite(data,sizeof(short),len,ofileid);
+    }
+  }
+
+#ifdef JUNK_DATA
+  if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+    fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+  }
+  else {
+    fprintf(seedfile, "ok\n\n");
+    fclose(seedfile);
+  }
+#endif
+
+  /* free structs */
+  WebRtcIlbcfix_EncoderFree(Enc_Inst);
+  WebRtcIlbcfix_DecoderFree(Dec_Inst);
+
+  /* close files */
+  fclose(ifileid);
+  fclose(efileid);
+  fclose(ofileid);
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
new file mode 100644
index 0000000..f67945e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
@@ -0,0 +1,343 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+        iLBC_test.c
+
+******************************************************************/
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "defines.h"
+#include "nit_encode.h"
+#include "encode.h"
+#include "init_decode.h"
+#include "decode.h"
+#include "constants.h"
+#include "ilbc.h"
+
+#define ILBCNOOFWORDS_MAX (NO_OF_BYTES_30MS)/2
+
+/* Runtime statistics */
+#include <time.h>
+/* #define CLOCKS_PER_SEC  1000 */
+
+/*----------------------------------------------------------------*
+ *  Encoder interface function
+ *---------------------------------------------------------------*/
+
+short encode(                         /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst,    /* (i/o) Encoder instance */
+    WebRtc_Word16 *encoded_data,      /* (o) The encoded bytes */
+    WebRtc_Word16 *data               /* (i) The signal block to encode */
+                                                        ){
+
+  /* do the actual encoding */
+  WebRtcIlbcfix_Encode((WebRtc_UWord16 *)encoded_data, data, iLBCenc_inst);
+
+  return (iLBCenc_inst->no_of_bytes);
+}
+
+/*----------------------------------------------------------------*
+ *  Decoder interface function
+ *---------------------------------------------------------------*/
+
+short decode( /* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
+    short *decoded_data, /* (o) Decoded signal block */
+    short *encoded_data, /* (i) Encoded bytes */
+    short mode           /* (i) 0=PL, 1=Normal */
+              ){
+
+  /* check if mode is valid */
+
+  if (mode<0 || mode>1) {
+    printf("\nERROR - Wrong mode - 0, 1 allowed\n"); exit(3);}
+
+  /* do actual decoding of block */
+
+  WebRtcIlbcfix_Decode(decoded_data, (WebRtc_UWord16 *)encoded_data,
+                       iLBCdec_inst, mode);
+
+  return (iLBCdec_inst->blockl);
+}
+
+/*----------------------------------------------------------------*
+ *  Main program to test iLBC encoding and decoding
+ *
+ *  Usage:
+ *		exefile_name.exe <infile> <bytefile> <outfile> <channelfile>
+ *
+ *---------------------------------------------------------------*/
+
+#define MAXFRAMES   10000
+#define MAXFILELEN (BLOCKL_MAX*MAXFRAMES)
+
+int main(int argc, char* argv[])
+{
+
+  /* Runtime statistics */
+
+  float starttime1, starttime2;
+  float runtime1, runtime2;
+  float outtime;
+
+  FILE *ifileid,*efileid,*ofileid, *chfileid;
+  short *inputdata, *encodeddata, *decodeddata;
+  short *channeldata;
+  int blockcount = 0, noOfBlocks=0, i, noOfLostBlocks=0;
+  short mode;
+  iLBC_Enc_Inst_t Enc_Inst;
+  iLBC_Dec_Inst_t Dec_Inst;
+
+  short frameLen;
+  short count;
+#ifdef SPLIT_10MS
+  short size;
+#endif
+
+  inputdata=(short*) malloc(MAXFILELEN*sizeof(short));
+  if (inputdata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    exit(0);
+  }
+  encodeddata=(short*) malloc(ILBCNOOFWORDS_MAX*MAXFRAMES*sizeof(short));
+  if (encodeddata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    exit(0);
+  }
+  decodeddata=(short*) malloc(MAXFILELEN*sizeof(short));
+  if (decodeddata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    free(encodeddata);
+    exit(0);
+  }
+  channeldata=(short*) malloc(MAXFRAMES*sizeof(short));
+  if (channeldata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    exit(0);
+  }
+
+  /* get arguments and open files */
+
+  if (argc != 6 ) {
+    fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n",
+            argv[0]);
+    fprintf(stderr, "Example:\n");
+    fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]);
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open channelfile file %s\n",
+            argv[3]); exit(3);}
+  if( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open output file %s\n",
+            argv[4]); exit(3);}
+  if ( (chfileid=fopen(argv[5],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open channel file file %s\n", argv[5]);
+    exit(2);}
+
+
+  /* print info */
+#ifndef PRINT_MIPS
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBCtest                                     *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+#ifdef SPLIT_10MS
+  fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode);
+#else
+  fprintf(stderr,"\nMode          : %2d ms\n", mode);
+#endif
+  fprintf(stderr,"\nInput file    : %s\n", argv[2]);
+  fprintf(stderr,"Coded file    : %s\n", argv[3]);
+  fprintf(stderr,"Output file   : %s\n\n", argv[4]);
+  fprintf(stderr,"Channel file  : %s\n\n", argv[5]);
+#endif
+
+  /* Initialization */
+
+  WebRtcIlbcfix_EncoderInit(&Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(&Dec_Inst, mode, 1);
+
+  /* extract the input file and channel file */
+
+#ifdef SPLIT_10MS
+  frameLen = (mode==20)? 80:160;
+  fread(Enc_Inst.past_samples, sizeof(short), frameLen, ifileid);
+  Enc_Inst.section = 0;
+
+  while( fread(&inputdata[noOfBlocks*80], sizeof(short),
+               80, ifileid) == 80 ) {
+    noOfBlocks++;
+  }
+
+  noOfBlocks += frameLen/80;
+  frameLen = 80;
+#else
+  frameLen = Enc_Inst.blockl;
+
+  while( fread(&inputdata[noOfBlocks*Enc_Inst.blockl],sizeof(short),
+               Enc_Inst.blockl,ifileid)==(WebRtc_UWord16)Enc_Inst.blockl){
+    noOfBlocks++;
+  }
+#endif
+
+
+  while ((fread(&channeldata[blockcount],sizeof(short), 1,chfileid)==1)
+            && ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) )) {
+    blockcount++;
+  }
+
+  if ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) ) {
+    fprintf(stderr,"Channel file %s is too short\n", argv[4]);
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    free(channeldata);
+    exit(0);
+  }
+
+  count=0;
+
+  /* Runtime statistics */
+
+  starttime1 = clock()/(float)CLOCKS_PER_SEC;
+
+  /* Encoding loop */
+#ifdef PRINT_MIPS
+  printf("-1 -1\n");
+#endif
+
+#ifdef SPLIT_10MS
+  /* "Enc_Inst.section != 0" is to make sure we run through full
+     lengths of all vectors for 10ms split mode.
+  */
+  //   while( (count < noOfBlocks) || (Enc_Inst.section != 0) )    {
+  while( count < blockcount * (Enc_Inst.blockl/frameLen) )    {
+
+    encode(&Enc_Inst, &encodeddata[Enc_Inst.no_of_words *
+                                   (count/(Enc_Inst.nsub/2))],
+           &inputdata[frameLen * count] );
+#else
+    while (count < noOfBlocks) {
+      encode( &Enc_Inst, &encodeddata[Enc_Inst.no_of_words * count],
+              &inputdata[frameLen * count] );
+#endif
+
+#ifdef PRINT_MIPS
+      printf("-1 -1\n");
+#endif
+
+      count++;
+    }
+
+    count=0;
+
+    /* Runtime statistics */
+
+    starttime2=clock()/(float)CLOCKS_PER_SEC;
+    runtime1 = (float)(starttime2-starttime1);
+
+    /* Decoding loop */
+
+    while (count < blockcount) {
+      if (channeldata[count]==1) {
+        /* Normal decoding */
+        decode(&Dec_Inst, &decodeddata[count * Dec_Inst.blockl],
+               &encodeddata[Dec_Inst.no_of_words * count], 1);
+      } else if (channeldata[count]==0) {
+        /* PLC */
+        short emptydata[ILBCNOOFWORDS_MAX];
+        memset(emptydata, 0, Dec_Inst.no_of_words*sizeof(short));
+        decode(&Dec_Inst, &decodeddata[count*Dec_Inst.blockl],
+               emptydata, 0);
+        noOfLostBlocks++;
+      } else {
+        printf("Error in channel file (values have to be either 1 or 0)\n");
+        exit(0);
+      }
+#ifdef PRINT_MIPS
+      printf("-1 -1\n");
+#endif
+
+      count++;
+    }
+
+    /* Runtime statistics */
+
+    runtime2 = (float)(clock()/(float)CLOCKS_PER_SEC-starttime2);
+
+    outtime = (float)((float)blockcount*
+                      (float)mode/1000.0);
+
+#ifndef PRINT_MIPS
+    printf("\nLength of speech file: %.1f s\n", outtime);
+    printf("Lost frames          : %.1f%%\n\n", 100*(float)noOfLostBlocks/(float)blockcount);
+
+    printf("Time to run iLBC_encode+iLBC_decode:");
+    printf(" %.1f s (%.1f%% of realtime)\n", runtime1+runtime2,
+           (100*(runtime1+runtime2)/outtime));
+
+    printf("Time in iLBC_encode                :");
+    printf(" %.1f s (%.1f%% of total runtime)\n",
+           runtime1, 100.0*runtime1/(runtime1+runtime2));
+
+    printf("Time in iLBC_decode                :");
+    printf(" %.1f s (%.1f%% of total runtime)\n\n",
+           runtime2, 100.0*runtime2/(runtime1+runtime2));
+#endif
+
+    /* Write data to files */
+    for (i=0; i<blockcount; i++) {
+      fwrite(&encodeddata[i*Enc_Inst.no_of_words], sizeof(short),
+             Enc_Inst.no_of_words, efileid);
+    }
+    for (i=0;i<blockcount;i++) {
+      fwrite(&decodeddata[i*Enc_Inst.blockl],sizeof(short),Enc_Inst.blockl,ofileid);
+    }
+
+    /* return memory and close files */
+
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    free(channeldata);
+    fclose(ifileid);  fclose(efileid); fclose(ofileid);
+    return(0);
+  }
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt
new file mode 100644
index 0000000..a52f7c1
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt
@@ -0,0 +1,73 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+#
+# This script can be used to verify the bit exactness of iLBC fixed-point version 1.0.6
+#
+
+INP=../../../../../../../test/data/audio_coding
+EXEP=../../../../../../../out/Release
+OUTP=./GeneratedFiles
+mkdir ./GeneratedFiles
+
+$EXEP/iLBCtest 20 $INP/F00.INP $OUTP/F00.BIT20 $OUTP/F00.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F01.INP $OUTP/F01.BIT20 $OUTP/F01.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F02.INP $OUTP/F02.BIT20 $OUTP/F02.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F03.INP $OUTP/F03.BIT20 $OUTP/F03.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F04.INP $OUTP/F04.BIT20 $OUTP/F04.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F05.INP $OUTP/F05.BIT20 $OUTP/F05.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F06.INP $OUTP/F06.BIT20 $OUTP/F06.OUT20 $INP/clean.chn
+
+$EXEP/iLBCtest 30 $INP/F00.INP $OUTP/F00.BIT30 $OUTP/F00.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F01.INP $OUTP/F01.BIT30 $OUTP/F01.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F02.INP $OUTP/F02.BIT30 $OUTP/F02.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F03.INP $OUTP/F03.BIT30 $OUTP/F03.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F04.INP $OUTP/F04.BIT30 $OUTP/F04.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F05.INP $OUTP/F05.BIT30 $OUTP/F05.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F06.INP $OUTP/F06.BIT30 $OUTP/F06.OUT30 $INP/clean.chn
+
+$EXEP/iLBCtest 20 $INP/F00.INP $OUTP/F00.BIT20 $OUTP/F00_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 20 $INP/F01.INP $OUTP/F01.BIT20 $OUTP/F01_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 20 $INP/F02.INP $OUTP/F02.BIT20 $OUTP/F02_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F00.INP $OUTP/F00.BIT30 $OUTP/F00_tlm10.OUT30 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F01.INP $OUTP/F01.BIT30 $OUTP/F01_tlm10.OUT30 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F02.INP $OUTP/F02.BIT30 $OUTP/F02_tlm10.OUT30 $INP/tlm10.chn
+
+
+diff $OUTP/F00.BIT20 $INP/F00.BIT20
+diff $OUTP/F01.BIT20 $INP/F01.BIT20
+diff $OUTP/F02.BIT20 $INP/F02.BIT20
+diff $OUTP/F03.BIT20 $INP/F03.BIT20
+diff $OUTP/F04.BIT20 $INP/F04.BIT20
+diff $OUTP/F05.BIT20 $INP/F05.BIT20
+diff $OUTP/F06.BIT20 $INP/F06.BIT20
+diff $OUTP/F00.OUT20 $INP/F00.OUT20
+diff $OUTP/F01.OUT20 $INP/F01.OUT20
+diff $OUTP/F02.OUT20 $INP/F02.OUT20
+diff $OUTP/F03.OUT20 $INP/F03.OUT20
+diff $OUTP/F04.OUT20 $INP/F04.OUT20
+diff $OUTP/F05.OUT20 $INP/F05.OUT20
+diff $OUTP/F06.OUT20 $INP/F06.OUT20
+
+diff $OUTP/F00.BIT30 $INP/F00.BIT30
+diff $OUTP/F01.BIT30 $INP/F01.BIT30
+diff $OUTP/F02.BIT30 $INP/F02.BIT30
+diff $OUTP/F03.BIT30 $INP/F03.BIT30
+diff $OUTP/F04.BIT30 $INP/F04.BIT30
+diff $OUTP/F05.BIT30 $INP/F05.BIT30
+diff $OUTP/F06.BIT30 $INP/F06.BIT30
+diff $OUTP/F00.OUT30 $INP/F00.OUT30
+diff $OUTP/F01.OUT30 $INP/F01.OUT30
+diff $OUTP/F02.OUT30 $INP/F02.OUT30
+diff $OUTP/F03.OUT30 $INP/F03.OUT30
+diff $OUTP/F04.OUT30 $INP/F04.OUT30
+diff $OUTP/F05.OUT30 $INP/F05.OUT30
+diff $OUTP/F06.OUT30 $INP/F06.OUT30
+
+diff $OUTP/F00_tlm10.OUT20 $INP/F00_tlm10.OUT20
+diff $OUTP/F01_tlm10.OUT20 $INP/F01_tlm10.OUT20
+diff $OUTP/F02_tlm10.OUT20 $INP/F02_tlm10.OUT20
+diff $OUTP/F00_tlm10.OUT30 $INP/F00_tlm10.OUT30
+diff $OUTP/F01_tlm10.OUT30 $INP/F01_tlm10.OUT30
+diff $OUTP/F02_tlm10.OUT30 $INP/F02_tlm10.OUT30
+
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.c b/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.c
new file mode 100644
index 0000000..6c883a7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.c
@@ -0,0 +1,239 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_UnpackBits.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+    WebRtc_UWord16 *bitstream,   /* (i) The packatized bitstream */
+    iLBC_bits *enc_bits,  /* (o) Paramerers from bitstream */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                                        ) {
+  WebRtc_UWord16 *bitstreamPtr;
+  int i, k;
+  WebRtc_Word16 *tmpPtr;
+
+  bitstreamPtr=bitstream;
+
+  /* First WebRtc_Word16 */
+  enc_bits->lsf[0]  =  (*bitstreamPtr)>>10;       /* Bit 0..5  */
+  enc_bits->lsf[1]  = ((*bitstreamPtr)>>3)&0x7F;      /* Bit 6..12 */
+  enc_bits->lsf[2]  = ((*bitstreamPtr)&0x7)<<4;      /* Bit 13..15 */
+  bitstreamPtr++;
+  /* Second WebRtc_Word16 */
+  enc_bits->lsf[2] |= ((*bitstreamPtr)>>12)&0xF;      /* Bit 0..3  */
+
+  if (mode==20) {
+    enc_bits->startIdx             = ((*bitstreamPtr)>>10)&0x3;  /* Bit 4..5  */
+    enc_bits->state_first          = ((*bitstreamPtr)>>9)&0x1;  /* Bit 6  */
+    enc_bits->idxForMax            = ((*bitstreamPtr)>>3)&0x3F;  /* Bit 7..12 */
+    enc_bits->cb_index[0]          = ((*bitstreamPtr)&0x7)<<4;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>12)&0xE;  /* Bit 0..2  */
+    enc_bits->gain_index[0]        = ((*bitstreamPtr)>>8)&0x18;  /* Bit 3..4  */
+    enc_bits->gain_index[1]        = ((*bitstreamPtr)>>7)&0x8;  /* Bit 5  */
+    enc_bits->cb_index[3]          = ((*bitstreamPtr)>>2)&0xFE;  /* Bit 6..12 */
+    enc_bits->gain_index[3]        = ((*bitstreamPtr)<<2)&0x10;  /* Bit 13  */
+    enc_bits->gain_index[4]        = ((*bitstreamPtr)<<2)&0x8;  /* Bit 14  */
+    enc_bits->gain_index[6]        = ((*bitstreamPtr)<<4)&0x10;  /* Bit 15  */
+  } else { /* mode==30 */
+    enc_bits->lsf[3]               = ((*bitstreamPtr)>>6)&0x3F;  /* Bit 4..9  */
+    enc_bits->lsf[4]               = ((*bitstreamPtr)<<1)&0x7E;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    enc_bits->lsf[4]              |= ((*bitstreamPtr)>>15)&0x1;  /* Bit 0  */
+    enc_bits->lsf[5]               = ((*bitstreamPtr)>>8)&0x7F;  /* Bit 1..7  */
+    enc_bits->startIdx             = ((*bitstreamPtr)>>5)&0x7;  /* Bit 8..10 */
+    enc_bits->state_first          = ((*bitstreamPtr)>>4)&0x1;  /* Bit 11  */
+    enc_bits->idxForMax            = ((*bitstreamPtr)<<2)&0x3C;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 4:th WebRtc_Word16 */
+    enc_bits->idxForMax           |= ((*bitstreamPtr)>>14)&0x3;  /* Bit 0..1  */
+    enc_bits->cb_index[0]        = ((*bitstreamPtr)>>7)&0x78;  /* Bit 2..5  */
+    enc_bits->gain_index[0]        = ((*bitstreamPtr)>>5)&0x10;  /* Bit 6  */
+    enc_bits->gain_index[1]        = ((*bitstreamPtr)>>5)&0x8;  /* Bit 7  */
+    enc_bits->cb_index[3]          = ((*bitstreamPtr))&0xFC;  /* Bit 8..13 */
+    enc_bits->gain_index[3]        = ((*bitstreamPtr)<<3)&0x10;  /* Bit 14  */
+    enc_bits->gain_index[4]        = ((*bitstreamPtr)<<3)&0x8;  /* Bit 15  */
+  }
+  /* Class 2 bits of ULP */
+  /* 4:th to 6:th WebRtc_Word16 for 20 ms case
+     5:th to 7:th WebRtc_Word16 for 30 ms case */
+  bitstreamPtr++;
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<3; k++) {
+    for (i=15; i>=0; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 7:th WebRtc_Word16 */
+    for (i=15; i>6; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>4)&0x4; /* Bit 9  */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>1)&0x4; /* Bit 12  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)<<1)&0x8; /* Bit 13  */
+    enc_bits->gain_index[7]        = ((*bitstreamPtr)<<2)&0xC; /* Bit 14..15 */
+
+  } else { /* mode==30 */
+    /* 8:th WebRtc_Word16 */
+    for (i=15; i>5; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>3)&0x6; /* Bit 10..11 */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr))&0x8;  /* Bit 12  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr))&0x4;  /* Bit 13  */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr))&0x2;  /* Bit 14  */
+    enc_bits->cb_index[6]          = ((*bitstreamPtr)<<7)&0x80; /* Bit 15  */
+    bitstreamPtr++;
+    /* 9:th WebRtc_Word16 */
+    enc_bits->cb_index[6]         |= ((*bitstreamPtr)>>9)&0x7E; /* Bit 0..5  */
+    enc_bits->cb_index[9]          = ((*bitstreamPtr)>>2)&0xFE; /* Bit 6..12 */
+    enc_bits->cb_index[12]         = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 10:th WebRtc_Word16 */
+    enc_bits->cb_index[12]         |= ((*bitstreamPtr)>>11)&0x1E;/* Bit 0..3 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>8)&0xC; /* Bit 4..5  */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>7)&0x6; /* Bit 6..7  */
+    enc_bits->gain_index[6]        = ((*bitstreamPtr)>>3)&0x18; /* Bit 8..9  */
+    enc_bits->gain_index[7]        = ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */
+    enc_bits->gain_index[9]        = ((*bitstreamPtr)<<1)&0x10; /* Bit 12  */
+    enc_bits->gain_index[10]       = ((*bitstreamPtr)<<1)&0x8; /* Bit 13  */
+    enc_bits->gain_index[12]       = ((*bitstreamPtr)<<3)&0x10; /* Bit 14  */
+    enc_bits->gain_index[13]       = ((*bitstreamPtr)<<3)&0x8; /* Bit 15  */
+  }
+  bitstreamPtr++;
+  /* Class 3 bits of ULP */
+  /*  8:th to 14:th WebRtc_Word16 for 20 ms case
+      11:th to 17:th WebRtc_Word16 for 30 ms case */
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<7; k++) {
+    for (i=14; i>=0; i-=2) {
+      (*tmpPtr)                 |= ((*bitstreamPtr)>>i)&0x3; /* Bit 15-i..14-i*/
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 15:th WebRtc_Word16 */
+    enc_bits->idxVec[56]          |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>13)&0x1; /* Bit 2  */
+    enc_bits->cb_index[1]          = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9  */
+    enc_bits->cb_index[2]          = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 16:th WebRtc_Word16 */
+    enc_bits->cb_index[2]         |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0  */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr)>>12)&0x7; /* Bit 1..3  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>10)&0x3; /* Bit 4..5  */
+    enc_bits->gain_index[2]        = ((*bitstreamPtr)>>7)&0x7; /* Bit 6..8  */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr)>>6)&0x1; /* Bit 9  */
+    enc_bits->cb_index[4]          = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 17:th WebRtc_Word16 */
+    enc_bits->cb_index[4]         |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0  */
+    enc_bits->cb_index[5]          = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7  */
+    enc_bits->cb_index[6]          = ((*bitstreamPtr))&0xFF; /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 18:th WebRtc_Word16 */
+    enc_bits->cb_index[7]          = (*bitstreamPtr)>>8;  /* Bit 0..7  */
+    enc_bits->cb_index[8]          = (*bitstreamPtr)&0xFF;  /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3  */
+    enc_bits->gain_index[5]        = ((*bitstreamPtr)>>9)&0x7; /* Bit 4..6  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)>>6)&0x7; /* Bit 7..9  */
+    enc_bits->gain_index[7]       |= ((*bitstreamPtr)>>4)&0x3; /* Bit 10..11 */
+    enc_bits->gain_index[8]        = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */
+  } else { /* mode==30 */
+    /* 18:th WebRtc_Word16 */
+    enc_bits->idxVec[56]          |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->idxVec[57]          |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3  */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>11)&1; /* Bit 4  */
+    enc_bits->cb_index[1]          = ((*bitstreamPtr)>>4)&0x7F; /* Bit 5..11 */
+    enc_bits->cb_index[2]          = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    enc_bits->cb_index[2]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7  */
+    enc_bits->gain_index[2]        = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11  */
+    enc_bits->cb_index[4]          = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 20:th WebRtc_Word16 */
+    enc_bits->cb_index[4]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->cb_index[5]          = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9  */
+    enc_bits->cb_index[6]         |= ((*bitstreamPtr)>>5)&0x1; /* Bit 10  */
+    enc_bits->cb_index[7]          = ((*bitstreamPtr)<<3)&0xF8; /* Bit 11..15 */
+    bitstreamPtr++;
+    /* 21:st WebRtc_Word16 */
+    enc_bits->cb_index[7]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->cb_index[8]          = ((*bitstreamPtr)>>5)&0xFF; /* Bit 3..10 */
+    enc_bits->cb_index[9]         |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11  */
+    enc_bits->cb_index[10]         = ((*bitstreamPtr)<<4)&0xF0; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 22:nd WebRtc_Word16 */
+    enc_bits->cb_index[10]        |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3  */
+    enc_bits->cb_index[11]         = ((*bitstreamPtr)>>4)&0xFF; /* Bit 4..11 */
+    enc_bits->cb_index[12]        |= ((*bitstreamPtr)>>3)&0x1; /* Bit 12  */
+    enc_bits->cb_index[13]         = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 23:rd WebRtc_Word16 */
+    enc_bits->cb_index[13]        |= ((*bitstreamPtr)>>11)&0x1F;/* Bit 0..4  */
+    enc_bits->cb_index[14]         = ((*bitstreamPtr)>>3)&0xFF; /* Bit 5..12 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>1)&0x3; /* Bit 13..14 */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)&0x1);  /* Bit 15  */
+    bitstreamPtr++;
+    /* 24:rd WebRtc_Word16 */
+    enc_bits->gain_index[5]        = ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5  */
+    enc_bits->gain_index[7]       |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7  */
+    enc_bits->gain_index[8]        = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */
+    enc_bits->gain_index[9]       |= ((*bitstreamPtr)>>1)&0xF; /* Bit 11..14 */
+    enc_bits->gain_index[10]      |= ((*bitstreamPtr)<<2)&0x4; /* Bit 15  */
+    bitstreamPtr++;
+    /* 25:rd WebRtc_Word16 */
+    enc_bits->gain_index[10]      |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->gain_index[11]       = ((*bitstreamPtr)>>11)&0x7; /* Bit 2..4  */
+    enc_bits->gain_index[12]      |= ((*bitstreamPtr)>>7)&0xF; /* Bit 5..8  */
+    enc_bits->gain_index[13]      |= ((*bitstreamPtr)>>4)&0x7; /* Bit 9..11 */
+    enc_bits->gain_index[14]       = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */
+  }
+  /* Last bit should be zero, otherwise it's an "empty" frame */
+  if (((*bitstreamPtr)&0x1) == 1) {
+    return(1);
+  } else {
+    return(0);
+  }
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.h b/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.h
new file mode 100644
index 0000000..864865f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/unpack_bits.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_UnpackBits.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+    WebRtc_UWord16 *bitstream,   /* (i) The packatized bitstream */
+    iLBC_bits *enc_bits,  /* (o) Paramerers from bitstream */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                                        );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/vq3.c b/trunk/src/modules/audio_coding/codecs/ilbc/vq3.c
new file mode 100644
index 0000000..81d1bfa
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/vq3.c
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq3.c
+
+******************************************************************/
+
+#include "vq3.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq3(
+    WebRtc_Word16 *Xq, /* quantized vector (Q13) */
+    WebRtc_Word16 *index,
+    WebRtc_Word16 *CB, /* codebook in Q13 */
+    WebRtc_Word16 *X,  /* vector to quantize (Q13) */
+    WebRtc_Word16 n_cb
+                       ){
+  WebRtc_Word16 i, j;
+  WebRtc_Word16 pos, minindex=0;
+  WebRtc_Word16 tmp;
+  WebRtc_Word32 dist, mindist;
+
+  pos = 0;
+  mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
+
+  /* Find the codebook with the lowest square distance */
+  for (j = 0; j < n_cb; j++) {
+    tmp = X[0] - CB[pos];
+    dist = WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    for (i = 1; i < 3; i++) {
+      tmp = X[i] - CB[pos + i];
+      dist += WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    }
+
+    if (dist < mindist) {
+      mindist = dist;
+      minindex = j;
+    }
+    pos += 3;
+  }
+
+  /* Store the quantized codebook and the index */
+  for (i = 0; i < 3; i++) {
+    Xq[i] = CB[minindex*3 + i];
+  }
+  *index = minindex;
+
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/vq3.h b/trunk/src/modules/audio_coding/codecs/ilbc/vq3.h
new file mode 100644
index 0000000..f2628e0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/vq3.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq3.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_
+
+#include "typedefs.h"
+
+/*----------------------------------------------------------------*
+ *  Vector quantization of order 3 (based on MSE)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq3(
+    WebRtc_Word16 *Xq,  /* (o) the quantized vector (Q13) */
+    WebRtc_Word16 *index, /* (o) the quantization index */
+    WebRtc_Word16 *CB,  /* (i) the vector quantization codebook (Q13) */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize (Q13) */
+    WebRtc_Word16 n_cb  /* (i) the number of vectors in the codebook */
+                       );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/vq4.c b/trunk/src/modules/audio_coding/codecs/ilbc/vq4.c
new file mode 100644
index 0000000..3d4c26d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/vq4.c
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq4.c
+
+******************************************************************/
+
+#include "vq4.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq4(
+    WebRtc_Word16 *Xq, /* quantized vector (Q13) */
+    WebRtc_Word16 *index,
+    WebRtc_Word16 *CB, /* codebook in Q13 */
+    WebRtc_Word16 *X,  /* vector to quantize (Q13) */
+    WebRtc_Word16 n_cb
+                       ){
+  WebRtc_Word16 i, j;
+  WebRtc_Word16 pos, minindex=0;
+  WebRtc_Word16 tmp;
+  WebRtc_Word32 dist, mindist;
+
+  pos = 0;
+  mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
+
+  /* Find the codebook with the lowest square distance */
+  for (j = 0; j < n_cb; j++) {
+    tmp = X[0] - CB[pos];
+    dist = WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    for (i = 1; i < 4; i++) {
+      tmp = X[i] - CB[pos + i];
+      dist += WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    }
+
+    if (dist < mindist) {
+      mindist = dist;
+      minindex = j;
+    }
+    pos += 4;
+  }
+
+  /* Store the quantized codebook and the index */
+  for (i = 0; i < 4; i++) {
+    Xq[i] = CB[minindex*4 + i];
+  }
+  *index = minindex;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/vq4.h b/trunk/src/modules/audio_coding/codecs/ilbc/vq4.h
new file mode 100644
index 0000000..1b8cff2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/vq4.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq4.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_
+
+#include "typedefs.h"
+
+/*----------------------------------------------------------------*
+ *  Vector quantization of order 4 (based on MSE)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq4(
+    WebRtc_Word16 *Xq,  /* (o) the quantized vector (Q13) */
+    WebRtc_Word16 *index, /* (o) the quantization index */
+    WebRtc_Word16 *CB,  /* (i) the vector quantization codebook (Q13) */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize (Q13) */
+    WebRtc_Word16 n_cb  /* (i) the number of vectors in the codebook */
+                       );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.c b/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.c
new file mode 100644
index 0000000..b0e8406
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.c
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Window32W32.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  window multiplication
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Window32W32(
+    WebRtc_Word32 *z,    /* Output */
+    WebRtc_Word32 *x,    /* Input (same domain as Output)*/
+    const WebRtc_Word32  *y,  /* Q31 Window */
+    WebRtc_Word16 N     /* length to process */
+                               ) {
+  WebRtc_Word16 i;
+  WebRtc_Word16 x_low, x_hi, y_low, y_hi;
+  WebRtc_Word16 left_shifts;
+  WebRtc_Word32 temp;
+
+  left_shifts = (WebRtc_Word16)WebRtcSpl_NormW32(x[0]);
+  WebRtcSpl_VectorBitShiftW32(x, N, x, (WebRtc_Word16)(-left_shifts));
+
+
+  /* The double precision numbers use a special representation:
+   * w32 = hi<<16 + lo<<1
+   */
+  for (i = 0; i < N; i++) {
+    /* Extract higher bytes */
+    x_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(x[i], 16);
+    y_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(y[i], 16);
+
+    /* Extract lower bytes, defined as (w32 - hi<<16)>>1 */
+    temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x_hi, 16);
+    x_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((x[i] - temp), 1);
+
+    temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y_hi, 16);
+    y_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((y[i] - temp), 1);
+
+    /* Calculate z by a 32 bit multiplication using both low and high from x and y */
+    temp = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(x_hi, y_hi), 1);
+    temp = (temp + (WEBRTC_SPL_MUL_16_16_RSFT(x_hi, y_low, 14)));
+
+    z[i] = (temp + (WEBRTC_SPL_MUL_16_16_RSFT(x_low, y_hi, 14)));
+  }
+
+  WebRtcSpl_VectorBitShiftW32(z, N, z, left_shifts);
+
+  return;
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.h b/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.h
new file mode 100644
index 0000000..121188a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/window32_w32.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Window32W32.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  window multiplication
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Window32W32(
+    WebRtc_Word32 *z,    /* Output */
+    WebRtc_Word32 *x,    /* Input (same domain as Output)*/
+    const WebRtc_Word32  *y,  /* Q31 Window */
+    WebRtc_Word16 N     /* length to process */
+                               );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c b/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c
new file mode 100644
index 0000000..04170ad
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_XcorrCoef.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * cross correlation which finds the optimal lag for the
+ * crossCorr*crossCorr/(energy) criteria
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_XcorrCoef(
+    WebRtc_Word16 *target,  /* (i) first array */
+    WebRtc_Word16 *regressor, /* (i) second array */
+    WebRtc_Word16 subl,  /* (i) dimension arrays */
+    WebRtc_Word16 searchLen, /* (i) the search lenght */
+    WebRtc_Word16 offset,  /* (i) samples offset between arrays */
+    WebRtc_Word16 step   /* (i) +1 or -1 */
+                            ){
+  int k;
+  WebRtc_Word16 maxlag;
+  WebRtc_Word16 pos;
+  WebRtc_Word16 max;
+  WebRtc_Word16 crossCorrScale, Energyscale;
+  WebRtc_Word16 crossCorrSqMod, crossCorrSqMod_Max;
+  WebRtc_Word32 crossCorr, Energy;
+  WebRtc_Word16 crossCorrmod, EnergyMod, EnergyMod_Max;
+  WebRtc_Word16 *tp, *rp;
+  WebRtc_Word16 *rp_beg, *rp_end;
+  WebRtc_Word16 totscale, totscale_max;
+  WebRtc_Word16 scalediff;
+  WebRtc_Word32 newCrit, maxCrit;
+  int shifts;
+
+  /* Initializations, to make sure that the first one is selected */
+  crossCorrSqMod_Max=0;
+  EnergyMod_Max=WEBRTC_SPL_WORD16_MAX;
+  totscale_max=-500;
+  maxlag=0;
+  pos=0;
+
+  /* Find scale value and start position */
+  if (step==1) {
+    max=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(subl+searchLen-1));
+    rp_beg = regressor;
+    rp_end = &regressor[subl];
+  } else { /* step==-1 */
+    max=WebRtcSpl_MaxAbsValueW16(&regressor[-searchLen], (WebRtc_Word16)(subl+searchLen-1));
+    rp_beg = &regressor[-1];
+    rp_end = &regressor[subl-1];
+  }
+
+  /* Introduce a scale factor on the Energy in WebRtc_Word32 in
+     order to make sure that the calculation does not
+     overflow */
+
+  if (max>5000) {
+    shifts=2;
+  } else {
+    shifts=0;
+  }
+
+  /* Calculate the first energy, then do a +/- to get the other energies */
+  Energy=WebRtcSpl_DotProductWithScale(regressor, regressor, subl, shifts);
+
+  for (k=0;k<searchLen;k++) {
+    tp = target;
+    rp = &regressor[pos];
+
+    crossCorr=WebRtcSpl_DotProductWithScale(tp, rp, subl, shifts);
+
+    if ((Energy>0)&&(crossCorr>0)) {
+
+      /* Put cross correlation and energy on 16 bit word */
+      crossCorrScale=(WebRtc_Word16)WebRtcSpl_NormW32(crossCorr)-16;
+      crossCorrmod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(crossCorr, crossCorrScale);
+      Energyscale=(WebRtc_Word16)WebRtcSpl_NormW32(Energy)-16;
+      EnergyMod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(Energy, Energyscale);
+
+      /* Square cross correlation and store upper WebRtc_Word16 */
+      crossCorrSqMod=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(crossCorrmod, crossCorrmod, 16);
+
+      /* Calculate the total number of (dynamic) right shifts that have
+         been performed on (crossCorr*crossCorr)/energy
+      */
+      totscale=Energyscale-(crossCorrScale<<1);
+
+      /* Calculate the shift difference in order to be able to compare the two
+         (crossCorr*crossCorr)/energy in the same domain
+      */
+      scalediff=totscale-totscale_max;
+      scalediff=WEBRTC_SPL_MIN(scalediff,31);
+      scalediff=WEBRTC_SPL_MAX(scalediff,-31);
+
+      /* Compute the cross multiplication between the old best criteria
+         and the new one to be able to compare them without using a
+         division */
+
+      if (scalediff<0) {
+        newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max)>>(-scalediff);
+        maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod);
+      } else {
+        newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max);
+        maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod)>>scalediff;
+      }
+
+      /* Store the new lag value if the new criteria is larger
+         than previous largest criteria */
+
+      if (newCrit > maxCrit) {
+        crossCorrSqMod_Max = crossCorrSqMod;
+        EnergyMod_Max = EnergyMod;
+        totscale_max = totscale;
+        maxlag = k;
+      }
+    }
+    pos+=step;
+
+    /* Do a +/- to get the next energy */
+    Energy += step*(WEBRTC_SPL_RSHIFT_W32(
+        ((WebRtc_Word32)(*rp_end)*(*rp_end)) - ((WebRtc_Word32)(*rp_beg)*(*rp_beg)),
+        shifts));
+    rp_beg+=step;
+    rp_end+=step;
+  }
+
+  return(maxlag+offset);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h b/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h
new file mode 100644
index 0000000..ac885c4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_XcorrCoef.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * cross correlation which finds the optimal lag for the
+ * crossCorr*crossCorr/(energy) criteria
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_XcorrCoef(
+    WebRtc_Word16 *target,  /* (i) first array */
+    WebRtc_Word16 *regressor, /* (i) second array */
+    WebRtc_Word16 subl,  /* (i) dimension arrays */
+    WebRtc_Word16 searchLen, /* (i) the search lenght */
+    WebRtc_Word16 offset,  /* (i) samples offset between arrays */
+    WebRtc_Word16 step   /* (i) +1 or -1 */
+                            );
+
+#endif
diff --git a/trunk/src/modules/audio_coding/codecs/pcm16b/Android.mk b/trunk/src/modules/audio_coding/codecs/pcm16b/Android.mk
new file mode 100644
index 0000000..2e88fa6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/pcm16b/Android.mk
@@ -0,0 +1,37 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_pcm16b
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := pcm16b.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../../..
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h b/trunk/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
new file mode 100644
index 0000000..e3cac4d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include "typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcPcm16b_EncodeW16(...)
+ *
+ * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speechIn16b	: Input speech vector
+ *		- len			: Number of samples in speech vector
+ *
+ * Output:
+ *		- speechOut16b	: Encoded data vector (big endian 16 bit)
+ *
+ * Returned value		: Size in bytes of speechOut16b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b);
+
+/****************************************************************************
+ * WebRtcPcm16b_Encode(...)
+ *
+ * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speech16b		: Input speech vector
+ *		- len			: Number of samples in speech vector
+ *
+ * Output:
+ *		- speech8b		: Encoded data vector (big endian 16 bit)
+ *
+ * Returned value		: Size in bytes of speech8b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
+                                  WebRtc_Word16 len,
+                                  unsigned char *speech8b);
+
+/****************************************************************************
+ * WebRtcPcm16b_DecodeW16(...)
+ *
+ * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speechIn16b	: Encoded data vector (big endian 16 bit)
+ *		- len			: Number of bytes in speechIn16b
+ *
+ * Output:
+ *		- speechOut16b	: Decoded speech vector
+ *
+ * Returned value		: Samples in speechOut16b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
+                                     WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b,
+                                     WebRtc_Word16* speechType);
+
+/****************************************************************************
+ * WebRtcPcm16b_Decode(...)
+ *
+ * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speech8b		: Encoded data vector (big endian 16 bit)
+ *		- len			: Number of bytes in speech8b
+ *
+ * Output:
+ *		- speech16b		: Decoded speech vector
+ *
+ * Returned value		: Samples in speech16b
+ */
+
+
+WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *speech16b);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* PCM16B */
diff --git a/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.c b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.c
new file mode 100644
index 0000000..0cff5dd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.c
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "pcm16b.h"
+
+#include <stdlib.h>
+
+#include "typedefs.h"
+
+#ifdef WEBRTC_BIG_ENDIAN
+#include "signal_processing_library.h"
+#endif
+
+#define HIGHEND 0xFF00
+#define LOWEND    0xFF
+
+
+
+/* Encoder with WebRtc_Word16 Output */
+WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b)
+{
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
+#else
+    int i;
+    for (i=0;i<len;i++) {
+        speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|((((WebRtc_UWord16)speechIn16b[i])<<8)&0xFF00);
+    }
+#endif
+    return(len<<1);
+}
+
+
+/* Encoder with char Output (old version) */
+WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
+                                  WebRtc_Word16 len,
+                                  unsigned char *speech8b)
+{
+    WebRtc_Word16 samples=len*2;
+    WebRtc_Word16 pos;
+    WebRtc_Word16 short1;
+    WebRtc_Word16 short2;
+    for (pos=0;pos<len;pos++) {
+        short1=HIGHEND & speech16b[pos];
+        short2=LOWEND & speech16b[pos];
+        short1=short1>>8;
+        speech8b[pos*2]=(unsigned char) short1;
+        speech8b[pos*2+1]=(unsigned char) short2;
+    }
+    return(samples);
+}
+
+
+/* Decoder with WebRtc_Word16 Input instead of char when the WebRtc_Word16 Encoder is used */
+WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
+                                     WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b,
+                                     WebRtc_Word16* speechType)
+{
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(WebRtc_Word16)+1)>>1));
+#else
+    int i;
+    int samples=len>>1;
+
+    for (i=0;i<samples;i++) {
+        speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|(((WebRtc_UWord16)(speechIn16b[i]&0xFF))<<8);
+    }
+#endif
+
+    *speechType=1;
+
+    // Avoid warning.
+    (void)(inst = NULL);
+
+    return(len>>1);
+}
+
+/* "old" version of the decoder that uses char as input (not used in NetEq any more) */
+WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *speech16b)
+{
+    WebRtc_Word16 samples=len>>1;
+    WebRtc_Word16 pos;
+    WebRtc_Word16 shortval;
+    for (pos=0;pos<samples;pos++) {
+        shortval=((unsigned short) speech8b[pos*2]);
+        shortval=(shortval<<8)&HIGHEND;
+        shortval=shortval|(((unsigned short) speech8b[pos*2+1])&LOWEND);
+        speech16b[pos]=shortval;
+    }
+    return(samples);
+}
diff --git a/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
new file mode 100644
index 0000000..38754e6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
@@ -0,0 +1,52 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'PCM16B',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/pcm16b.h',
+        'pcm16b.c',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'pcm16b_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'PCM16B',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'pcm16b_unittest.cc',
+          ],
+        }, # PCM16B_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc
new file mode 100644
index 0000000..eb910b3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "pcm16b.h"
+#include "gtest/gtest.h"
+
+TEST(Pcm16bTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/audio_coding/main/OWNERS b/trunk/src/modules/audio_coding/main/OWNERS
new file mode 100644
index 0000000..e1e6256
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/OWNERS
@@ -0,0 +1,3 @@
+tina.legrand@webrtc.org
+turaj@webrtc.org
+jan.skoglund@webrtc.org
diff --git a/trunk/src/modules/audio_coding/main/interface/audio_coding_module.h b/trunk/src/modules/audio_coding/main/interface/audio_coding_module.h
new file mode 100644
index 0000000..ed13689
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/interface/audio_coding_module.h
@@ -0,0 +1,888 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
+
+#include "audio_coding_module_typedefs.h"
+#include "module.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+// forward declarations
+struct CodecInst;
+
+#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 Khz
+
+// Callback class used for sending data ready to be packetized
+class AudioPacketizationCallback {
+ public:
+  virtual ~AudioPacketizationCallback() {}
+
+  virtual WebRtc_Word32 SendData(
+      FrameType frameType, WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+      const WebRtc_UWord8* payloadData, WebRtc_UWord16 payloadSize,
+      const RTPFragmentationHeader* fragmentation) = 0;
+};
+
+// Callback class used for inband Dtmf detection
+class AudioCodingFeedback {
+ public:
+  virtual ~AudioCodingFeedback() {}
+
+  virtual WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digitDtmf,
+                                     const bool end) = 0;
+};
+
+// Callback class used for reporting VAD decision
+class ACMVADCallback {
+ public:
+  virtual ~ACMVADCallback() {}
+
+  virtual WebRtc_Word32 InFrameType(WebRtc_Word16 frameType) = 0;
+};
+
+// Callback class used for reporting receiver statistics
+class ACMVQMonCallback {
+ public:
+  virtual ~ACMVQMonCallback() {}
+
+  virtual WebRtc_Word32 NetEqStatistics(
+      const WebRtc_Word32 id, // current ACM id
+      const WebRtc_UWord16 MIUsValid, // valid voice duration in ms
+      const WebRtc_UWord16 MIUsReplaced, // concealed voice duration in ms
+      const WebRtc_UWord8 eventFlags, // concealed voice flags
+      const WebRtc_UWord16 delayMS) = 0; // average delay in ms
+};
+
+class AudioCodingModule: public Module {
+ protected:
+  AudioCodingModule() {}
+  virtual ~AudioCodingModule() {}
+
+ public:
+  ///////////////////////////////////////////////////////////////////////////
+  // Creation and destruction of a ACM
+  //
+  static AudioCodingModule* Create(const WebRtc_Word32 id);
+
+  static void Destroy(AudioCodingModule* module);
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Utility functions
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_UWord8 NumberOfCodecs()
+  // Returns number of supported codecs.
+  //
+  // Return value:
+  //   number of supported codecs.
+  ///
+  static WebRtc_UWord8 NumberOfCodecs();
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  // Get supported codec with list number.
+  //
+  // Input:
+  //   -listId             : list number.
+  //
+  // Output:
+  //   -codec              : a structure where the parameters of the codec,
+  //                         given by list number is written to.
+  //
+  // Return value:
+  //   -1 if the list number (listId) is invalid.
+  //    0 if succeeded.
+  //
+  static WebRtc_Word32 Codec(const WebRtc_UWord8 listId, CodecInst& codec);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  // Get supported codec with the given codec name and sampling frequency.
+  // If the sampling frequency is -1 then the search will be only based on
+  // codec name.
+  //
+  // Input:
+  //   -payloadName        : name of the codec.
+  //   -samplingFreqHz     : samling frequency of the codec.
+  //
+  // Output:
+  //   -codec              : a structure where the parameters of the codec,
+  //                         given by name is written to.
+  //
+  // Return value:
+  //   -1 if the list number (listId) is invalid.
+  //    0 if succeeded.
+  //
+  static WebRtc_Word32 Codec(const char* payloadName, CodecInst& codec,
+                             const WebRtc_Word32 samplingFreqHz = -1);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  //
+  // Returns the list number of the given codec name and sampling frequency.
+  // If the sampling frequency is -1 then the search will be only based on
+  // codec name.
+  //
+  // Input:
+  //   -payloadName        : name of the codec.
+  //   -samplingFreqHz     : samling frequency of the codec.
+  //
+  // Return value:
+  //   if the codec is found, the index of the codec in the list,
+  //   -1 if the codec is not found.
+  //
+  static WebRtc_Word32 Codec(const char* payloadName,
+                             const WebRtc_Word32 samplingFreqHz = -1);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool IsCodecValid()
+  // Checks the validity of the parameters of the given codec.
+  //
+  // Input:
+  //   -codec              : the structur which keeps the parameters of the
+  //                         codec.
+  //
+  // Reurn value:
+  //   true if the parameters are valid,
+  //   false if any parameter is not valid.
+  //
+  static bool IsCodecValid(const CodecInst& codec);
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Sender
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 InitializeSender()
+  // Any encoder-related state of ACM will be initialized to the
+  // same state when ACM is created. This will not interrupt or
+  // effect decoding functionality of ACM. ACM will lose all the
+  // encoding-related settings by calling this function.
+  // For instance, a send codec has to be registered again.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 InitializeSender() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ResetEncoder()
+  // This API resets the states of encoder. All the encoder settings, such as
+  // send-codec or VAD/DTX, will be preserved.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ResetEncoder() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterSendCodec()
+  // Registers a codec, specified by "sendCodec," as sending codec.
+  // This API can be called multiple of times to register Codec. The last codec
+  // registered overwrites the previous ones.
+  // The API can also be used to change payload type for CNG and RED, which are
+  // registered by default to default payload types.
+  // Note that registering CNG and RED won't overwrite speech codecs.
+  // This API can be called to set/change the send payload-type, frame-size
+  // or encoding rate (if applicable for the codec).
+  //
+  // Input:
+  //   -sendCodec          : Parameters of the codec to be registered, c.f.
+  //                         common_types.h for the definition of
+  //                         CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 RegisterSendCodec(const CodecInst& sendCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SendCodec()
+  // Get parameters for the codec currently registered as send codec.
+  //
+  // Output:
+  //   -currentSendCodec          : parameters of the send codec.
+  //
+  // Return value:
+  //   -1 if failed to get send codec,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SendCodec(CodecInst& currentSendCodec) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SendFrequency()
+  // Get the sampling frequency of the current encoder in Hertz.
+  //
+  // Return value:
+  //   positive; sampling frequency [Hz] of the current encoder.
+  //   -1 if an error has happened.
+  //
+  virtual WebRtc_Word32 SendFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Bitrate()
+  // Get encoding bit-rate in bits per second.
+  //
+  // Return value:
+  //   positive; encoding rate in bits/sec,
+  //   -1 if an error is happened.
+  //
+  virtual WebRtc_Word32 SendBitrate() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetReceivedEstimatedBandwidth()
+  // Set available bandwidth [bits/sec] of the up-link channel.
+  // This information is used for traffic shaping, and is currently only
+  // supported if iSAC is the send codec.
+  //
+  // Input:
+  //   -bw                 : bandwidth in bits/sec estimated for
+  //                         up-link.
+  // Return value
+  //   -1 if error occurred in setting the bandwidth,
+  //    0 bandwidth is set successfully.
+  //
+  virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(
+      const WebRtc_Word32 bw) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterTransportCallback()
+  // Register a transport callback which will be called to deliver
+  // the encoded buffers whenever Process() is called and a
+  // bit-stream is ready.
+  //
+  // Input:
+  //   -transport          : pointer to the callback class
+  //                         transport->SendData() is called whenever
+  //                         Process() is called and bit-stream is ready
+  //                         to deliver.
+  //
+  // Return value:
+  //   -1 if the transport callback could not be registered
+  //    0 if registration is successful.
+  //
+  virtual WebRtc_Word32 RegisterTransportCallback(
+      AudioPacketizationCallback* transport) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Add10MsData()
+  // Add 10MS of raw (PCM) audio data to the encoder. If the sampling
+  // frequency of the audio does not match the sampling frequency of the
+  // current encoder ACM will resample the audio.
+  //
+  // Input:
+  //   -audioFrame         : the input audio frame, containing raw audio
+  //                         sampling frequency etc.,
+  //                         c.f. module_common_types.h for definition of
+  //                         AudioFrame.
+  //
+  // Return value:
+  //      0   successfully added the frame.
+  //     -1   some error occurred and data is not added.
+  //   < -1   to add the frame to the buffer n samples had to be
+  //          overwritten, -n is the return value in this case.
+  //
+  virtual WebRtc_Word32 Add10MsData(const AudioFrame& audioFrame) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // (FEC) Forward Error Correction
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetFECStatus(const bool enable)
+  // configure FEC status i.e. on/off.
+  //
+  // RFC 2198 describes a solution which has a single payload type which
+  // signifies a packet with redundancy. That packet then becomes a container,
+  // encapsulating multiple payloads into a single RTP packet.
+  // Such a scheme is flexible, since any amount of redundancy may be
+  // encapsulated within a single packet.  There is, however, a small overhead
+  // since each encapsulated payload must be preceded by a header indicating
+  // the type of data enclosed.
+  //
+  // This means that FEC is actually a RED scheme.
+  //
+  // Input:
+  //   -enableFEC          : if true FEC is enabled, otherwise FEC is
+  //                         disabled.
+  //
+  // Return value:
+  //   -1 if failed to set FEC status,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SetFECStatus(const bool enableFEC) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool FECStatus()
+  // Get FEC status
+  //
+  // Return value
+  //   true if FEC is enabled,
+  //   false if FEC is disabled.
+  //
+  virtual bool FECStatus() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   (VAD) Voice Activity Detection
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetVAD()
+  // If DTX is enabled & the codec does not have internal DTX/VAD
+  // WebRtc VAD will be automatically enabled and 'enableVAD' is ignored.
+  //
+  // If DTX is disabled but VAD is enabled no DTX packets are send,
+  // regardless of whether the codec has internal DTX/VAD or not. In this
+  // case, WebRtc VAD is running to label frames as active/in-active.
+  //
+  // Inputs:
+  //   -enableDTX          : if true DTX is enabled,
+  //                         otherwise DTX is disabled.
+  //   -enableVAD          : if true VAD is enabled,
+  //                         otherwise VAD is disabled.
+  //   -vadMode            : determines the aggressiveness of VAD. A more
+  //                         aggressive mode results in more frames labeled
+  //                         as in-active, c.f. definition of
+  //                         ACMVADMode in audio_coding_module_typedefs.h
+  //                         for valid values.
+  //
+  // Return value:
+  //   -1 if failed to set up VAD/DTX,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SetVAD(const bool enableDTX = true,
+                               const bool enableVAD = false,
+                               const ACMVADMode vadMode = VADNormal) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 VAD()
+  // Get VAD status.
+  //
+  // Outputs:
+  //   -dtxEnabled         : is set to true if DTX is enabled, otherwise
+  //                         is set to false.
+  //   -vadEnabled         : is set to true if VAD is enabled, otherwise
+  //                         is set to false.
+  //   -vadMode            : is set to the current aggressiveness of VAD.
+  //
+  // Return value:
+  //   -1 if fails to retrieve the setting of DTX/VAD,
+  //    0 if succeeeded.
+  //
+  virtual WebRtc_Word32 VAD(bool& dtxEnabled, bool& vadEnabled,
+                            ACMVADMode& vadMode) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReplaceInternalDTXWithWebRtc()
+  // Used to replace codec internal DTX scheme with WebRtc. This is only
+  // supported for G729, where this call replaces AnnexB with WebRtc DTX.
+  //
+  // Input:
+  //   -useWebRtcDTX         : if false (default) the codec built-in DTX/VAD
+  //                         scheme is used, otherwise the internal DTX is
+  //                         replaced with WebRtc DTX/VAD.
+  //
+  // Return value:
+  //   -1 if failed to replace codec internal DTX with WebRtc,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ReplaceInternalDTXWithWebRtc(
+      const bool useWebRtcDTX = false) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IsInternalDTXReplacedWithWebRtc()
+  // Get status if the codec internal DTX (when such exists) is replaced with
+  // WebRtc DTX. This is only supported for G729.
+  //
+  // Output:
+  //   -usesWebRtcDTX        : is set to true if the codec internal DTX is
+  //                         replaced with WebRtc DTX/VAD, otherwise it is set
+  //                         to false.
+  //
+  // Return value:
+  //   -1 if failed to determine if codec internal DTX is replaced with WebRtc,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(
+      bool& usesWebRtcDTX) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterVADCallback()
+  // Call this method to register a callback function which is called
+  // any time that ACM encounters an empty frame. That is a frame which is
+  // recognized inactive. Depending on the codec WebRtc VAD or internal codec
+  // VAD is employed to identify a frame as active/inactive.
+  //
+  // Input:
+  //   -vadCallback        : pointer to a callback function.
+  //
+  // Return value:
+  //   -1 if failed to register the callback function.
+  //    0 if the callback function is registered successfully.
+  //
+  virtual WebRtc_Word32 RegisterVADCallback(ACMVADCallback* vadCallback) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Receiver
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 InitializeReceiver()
+  // Any decoder-related state of ACM will be initialized to the
+  // same state when ACM is created. This will not interrupt or
+  // effect encoding functionality of ACM. ACM would lose all the
+  // decoding-related settings by calling this function.
+  // For instance, all registered codecs are deleted and have to be
+  // registered again.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 InitializeReceiver() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ResetDecoder()
+  // This API resets the states of decoders. ACM will not lose any
+  // decoder-related settings, such as registered codecs.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ResetDecoder() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReceiveFrequency()
+  // Get sampling frequency of the last received payload.
+  //
+  // Return value:
+  //   non-negative the sampling frequency in Hertz.
+  //   -1 if an error has occurred.
+  //
+  virtual WebRtc_Word32 ReceiveFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutFrequency()
+  // Get sampling frequency of audio played out.
+  //
+  // Return value:
+  //   the sampling frequency in Hertz.
+  //
+  virtual WebRtc_Word32 PlayoutFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterReceiveCodec()
+  // Register possible decoders, can be called multiple times for
+  // codecs, CNG-NB, CNG-WB, CNG-SWB, AVT and RED.
+  //
+  // Input:
+  //   -receiveCodec       : parameters of the codec to be registered, c.f.
+  //                         common_types.h for the definition of
+  //                         CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to register the codec
+  //    0 if the codec registered successfully.
+  //
+  virtual WebRtc_Word32 RegisterReceiveCodec(const CodecInst& receiveCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 UnregisterReceiveCodec()
+  // Unregister the codec currently registered with a specific payload type
+  // from the list of possible receive codecs.
+  //
+  // Input:
+  //   -payloadType        : The number representing the payload type to
+  //                         unregister.
+  //
+  // Output:
+  //   -1 if the unregistration fails.
+  //    0 if the given codec is successfully unregistered.
+  //
+  virtual WebRtc_Word32 UnregisterReceiveCodec(
+      const WebRtc_Word16 receiveCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReceiveCodec()
+  // Get the codec associated with last received payload.
+  //
+  // Output:
+  //   -currRcvCodec       : parameters of the codec associated with the last
+  //                         received payload, c.f. common_types.h for
+  //                         the definition of CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to retrieve the codec,
+  //    0 if the codec is successfully retrieved.
+  //
+  virtual WebRtc_Word32 ReceiveCodec(CodecInst& currRcvCodec) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IncomingPacket()
+  // Call this function to insert a parsed RTP packet into ACM.
+  //
+  // Inputs:
+  //   -incomingPayload    : received payload.
+  //   -payloadLengthByte  : the length of payload in bytes.
+  //   -rtpInfo            : the relevant information retrieved from RTP
+  //                         header.
+  //
+  // Return value:
+  //   -1 if failed to push in the payload
+  //    0 if payload is successfully pushed in.
+  //
+  virtual WebRtc_Word32 IncomingPacket(const WebRtc_Word8* incomingPayload,
+                                       const WebRtc_Word32 payloadLengthByte,
+                                       const WebRtcRTPHeader& rtpInfo) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IncomingPayload()
+  // Call this API to push incoming payloads when there is no rtp-info.
+  // The rtp-info will be created in ACM. One usage for this API is when
+  // pre-encoded files are pushed in ACM
+  //
+  // Inputs:
+  //   -incomingPayload    : received payload.
+  //   -payloadLenghtByte  : the length, in bytes, of the received payload.
+  //   -payloadType        : the payload-type. This specifies which codec has
+  //                         to be used to decode the payload.
+  //   -timestamp          : send timestamp of the payload. ACM starts with
+  //                         a random value and increment it by the
+  //                         packet-size, which is given when the codec in
+  //                         question is registered by RegisterReceiveCodec().
+  //                         Therefore, it is essential to have the timestamp
+  //                         if the frame-size differ from the registered
+  //                         value or if the incoming payload contains DTX
+  //                         packets.
+  //
+  // Return value:
+  //   -1 if failed to push in the payload
+  //    0 if payload is successfully pushed in.
+  //
+  virtual WebRtc_Word32 IncomingPayload(const WebRtc_Word8* incomingPayload,
+                                        const WebRtc_Word32 payloadLengthByte,
+                                        const WebRtc_UWord8 payloadType,
+                                        const WebRtc_UWord32 timestamp = 0) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetMinimumPlayoutDelay()
+  // Set Minimum playout delay, used for lip-sync.
+  //
+  // Input:
+  //   -timeMs             : minimum delay in milliseconds.
+  //
+  // Return value:
+  //   -1 if failed to set the delay,
+  //    0 if the minimum delay is set.
+  //
+  virtual WebRtc_Word32 SetMinimumPlayoutDelay(const WebRtc_Word32 timeMs) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterIncomingMessagesCallback()
+  // Used by the module to deliver messages to the codec module/application
+  // when a Dtmf tone is detected, as well as when it stopped.
+  //
+  // Inputs:
+  //   -inMsgCallback      : pointer to callback function which will be called
+  //                         if Dtmf is detected.
+  //   -cpt                : enables CPT (Call Progress Tone) detection for the
+  //                         specified country. c.f. definition of ACMCountries
+  //                         in audio_coding_module_typedefs.h for valid
+  //                         entries. The default value disables CPT
+  //                         detection.
+  //
+  // Return value:
+  //   -1 if the message callback could not be registered
+  //    0 if registration is successful.
+  //
+  virtual WebRtc_Word32
+      RegisterIncomingMessagesCallback(
+          AudioCodingFeedback* inMsgCallback,
+          const ACMCountries cpt = ACMDisableCountryDetection) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetDtmfPlayoutStatus()
+  // Configure Dtmf playout, i.e. whether out-of-band
+  // Dtmf tones are played or not.
+  //
+  // Input:
+  //   -enable             : if true to enable playout out-of-band Dtmf tones,
+  //                         false to disable.
+  //
+  // Return value:
+  //   -1 if the method fails, e.g. Dtmf playout is not supported.
+  //    0 if the status is set successfully.
+  //
+  virtual WebRtc_Word32 SetDtmfPlayoutStatus(const bool enable) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool DtmfPlayoutStatus()
+  // Get Dtmf playout status.
+  //
+  // Return value:
+  //   true if out-of-band Dtmf tones are played,
+  //   false if playout of Dtmf tones is disabled.
+  //
+  virtual bool DtmfPlayoutStatus() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetBackgroundNoiseMode()
+  // Sets the mode of the background noise playout in an event of long
+  // packetloss burst. For the valid modes see the declaration of
+  // ACMBackgroundNoiseMode in audio_coding_module_typedefs.h.
+  //
+  // Input:
+  //   -mode               : the mode for the background noise playout.
+  //
+  // Return value:
+  //   -1 if failed to set the mode.
+  //    0 if succeeded in setting the mode.
+  //
+  virtual WebRtc_Word32 SetBackgroundNoiseMode(
+      const ACMBackgroundNoiseMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 BackgroundNoiseMode()
+  // Call this method to get the mode of the background noise playout.
+  // Playout of background noise is a result of a long packetloss burst.
+  // See ACMBackgroundNoiseMode in audio_coding_module_typedefs.h for
+  // possible modes.
+  //
+  // Output:
+  //   -mode             : a reference to ACMBackgroundNoiseMode enumerator.
+  //
+  // Return value:
+  //    0 if the output is a valid mode.
+  //   -1 if ACM failed to output a valid mode.
+  //
+  virtual WebRtc_Word32 BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutTimestamp()
+  // The send timestamp of an RTP packet is associated with the decoded
+  // audio of the packet in question. This function returns the timestamp of
+  // the latest audio obtained by calling PlayoutData10ms().
+  //
+  // Input:
+  //   -timestamp          : a reference to a WebRtc_UWord32 to receive the
+  //                         timestamp.
+  // Return value:
+  //    0 if the output is a correct timestamp.
+  //   -1 if failed to output the correct timestamp.
+  //
+  //
+  virtual WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32& timestamp) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 DecoderEstimatedBandwidth()
+  // Get the estimate of the Bandwidth, in bits/second, based on the incoming
+  // stream. This API is useful in one-way communication scenarios, where
+  // the bandwidth information is sent in an out-of-band fashion.
+  // Currently only supported if iSAC is registered as a reciever.
+  //
+  // Return value:
+  //   >0 bandwidth in bits/second.
+  //   -1 if failed to get a bandwidth estimate.
+  //
+  virtual WebRtc_Word32 DecoderEstimatedBandwidth() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetPlayoutMode()
+  // Call this API to set the playout mode. Playout mode could be optimized
+  // for i) voice, ii) FAX or iii) streaming. In Voice mode, NetEQ is
+  // optimized to deliver highest audio quality while maintaining a minimum
+  // delay. In FAX mode, NetEQ is optimized to have few delay changes as
+  // possible and maintain a constant delay, perhaps large relative to voice
+  // mode, to avoid PLC. In streaming mode, we tolerate a little more delay
+  // to acheive better jitter robustness.
+  //
+  // Input:
+  //   -mode               : playout mode. Possible inputs are:
+  //                         "voice",
+  //                         "fax" and
+  //                         "streaming".
+  //
+  // Return value:
+  //   -1 if failed to set the mode,
+  //    0 if succeeding.
+  //
+  virtual WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // AudioPlayoutMode PlayoutMode()
+  // Get playout mode, i.e. whether it is speech, FAX or streaming. See
+  // audio_coding_module_typedefs.h for definition of AudioPlayoutMode.
+  //
+  // Return value:
+  //   voice:       is for voice output,
+  //   fax:         a mode that is optimized for receiving FAX signals.
+  //                In this mode NetEq tries to maintain a constant high
+  //                delay to avoid PLC if possible.
+  //   streaming:   a mode that is suitable for streaminq. In this mode we
+  //                accept longer delay to improve jitter robustness.
+  //
+  virtual AudioPlayoutMode PlayoutMode() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutData10Ms(
+  // Get 10 milliseconds of raw audio data for playout, at the given sampling
+  // frequency. ACM will perform a resampling if required.
+  //
+  // Input:
+  //   -desiredFreqHz      : the desired sampling frequency, in Hertz, of the
+  //                         output audio. If set to -1, the function returns the
+  //                         audio at the current sampling frequency.
+  //
+  // Output:
+  //   -audioFrame         : output audio frame which contains raw audio data
+  //                         and other relevant parameters, c.f.
+  //                         module_common_types.h for the definition of
+  //                         AudioFrame.
+  //
+  // Return value:
+  //   -1 if the function fails,
+  //    0 if the function succeeds.
+  //
+  virtual WebRtc_Word32
+      PlayoutData10Ms(const WebRtc_Word32 desiredFreqHz,
+                      AudioFrame &audioFrame) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   (CNG) Comfort Noise Generation
+  //   Generate comfort noise when receiving DTX packets
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word16 SetReceiveVADMode()
+  // Configure VAD aggressiveness on the incoming stream.
+  //
+  // Input:
+  //   -mode               : aggressiveness of the VAD on incoming stream.
+  //                         See audio_coding_module_typedefs.h for the
+  //                         definition of ACMVADMode, and possible
+  //                         values for aggressiveness.
+  //
+  // Return value:
+  //   -1 if fails to set the mode,
+  //    0 if the mode is set successfully.
+  //
+  virtual WebRtc_Word16 SetReceiveVADMode(const ACMVADMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // ACMVADMode ReceiveVADMode()
+  // Get VAD aggressiveness on the incoming stream.
+  //
+  // Return value:
+  //   aggressiveness of VAD, running on the incoming stream. A more
+  //   aggressive mode means more audio frames will be labeled as in-active.
+  //   See audio_coding_module_typedefs.h for the definition of
+  //   ACMVADMode.
+  //
+  virtual ACMVADMode ReceiveVADMode() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Codec specific
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetISACMaxRate()
+  // Set the maximum instantaneous rate of iSAC. For a payload of B bits
+  // with a frame-size of T sec the instantaneous rate is B/T bist per
+  // second. Therefore, (B/T < maxRateBitPerSec) and
+  // (B < maxPayloadLenBytes * 8) are always satisfied for iSAC payloads,
+  // c.f SetISACMaxPayloadSize().
+  //
+  // Input:
+  //   -maxRateBitPerSec   : maximum instantaneous bit-rate given in bits/sec.
+  //
+  // Return value:
+  //   -1 if failed to set the maximum rate.
+  //    0 if the maximum rate is set successfully.
+  //
+  virtual WebRtc_Word32 SetISACMaxRate(
+      const WebRtc_UWord32 maxRateBitPerSec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetISACMaxPayloadSize()
+  // Set the maximum payload size of iSAC packets. No iSAC payload,
+  // regardless of its frame-size, may exceed the given limit. For
+  // an iSAC payload of size B bits and frame-size T sec we have;
+  // (B < maxPayloadLenBytes * 8) and (B/T < maxRateBitPerSec), c.f.
+  // SetISACMaxRate().
+  //
+  // Input:
+  //   -maxPayloadLenBytes : maximum payload size in bytes.
+  //
+  // Return value:
+  //   -1 if failed to set the maximm  payload-size.
+  //    0 if the given linit is seet successfully.
+  //
+  virtual WebRtc_Word32 SetISACMaxPayloadSize(
+      const WebRtc_UWord16 maxPayloadLenBytes) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ConfigISACBandwidthEstimator()
+  // Call this function to configure the bandwidth estimator of ISAC.
+  // During the adaptation of bit-rate, iSAC atomatically adjusts the
+  // frame-size (either 30 or 60 ms) to save on RTP header. The initial
+  // frame-size can be specified by the first argument. The configuration also
+  // regards the initial estimate of bandwidths. The estimator starts from
+  // this point and converges to the actual bottleneck. This is given by the
+  // second parameter. Furthermore, it is also possible to control the
+  // adaptation of frame-size. This is specified by the last parameter.
+  //
+  // Input:
+  //   -initFrameSizeMsec  : initial frame-size in milisecods. For iSAC-wb
+  //                         30 ms and 60 ms (default) are acceptable values,
+  //                         and for iSAC-swb 30 ms is the only acceptable
+  //                         value. Zero indiates default value.
+  //   -initRateBitPerSec  : initial estimate of the bandwidth. Values
+  //                         between 10000 and 58000 are acceptable.
+  //   -enforceFrameSize   : if true, the frame-size will not be adapted.
+  //
+  // Return value:
+  //   -1 if failed to configure the bandwidth estimator,
+  //    0 if the configuration was successfully applied.
+  //
+  virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
+      const WebRtc_UWord8 initFrameSizeMsec,
+      const WebRtc_UWord16 initRateBitPerSec,
+      const bool enforceFrameSize = false) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   statistics
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32  NetworkStatistics()
+  // Get network statistics.
+  //
+  // Input:
+  //   -networkStatistics  : a structure that contains network statistics.
+  //
+  // Return value:
+  //   -1 if failed to set the network statistics,
+  //    0 if statistics are set successfully.
+  //
+  virtual WebRtc_Word32 NetworkStatistics(
+      ACMNetworkStatistics& networkStatistics) const = 0;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
diff --git a/trunk/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h b/trunk/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
new file mode 100644
index 0000000..c0e06ef
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+///////////////////////////////////////////////////////////////////////////
+// enum AudioPlayoutMode
+// An enumerator for different playout modes.
+//
+// -voice       : This is the standard mode for VoIP calls. The trade-off
+//                between low delay and jitter robustness is optimized
+//                for high-quality two-way communication.
+//                NetEQs packet loss concealment and signal processing
+//                capabilities are fully employed.
+// -fax         : The fax mode is optimized for decodability of fax signals
+//                rather than for perceived audio quality. When this mode
+//                is selected, NetEQ will do as few delay changes as possible,
+//                trying to maintain a high and constant delay. Meanwhile,
+//                the packet loss concealment efforts are reduced.
+//
+// -streaming   : In the case of one-way communication such as passive
+//                conference participant, a webinar, or a streaming application,
+//                this mode can be used to improve the jitter robustness at
+//                the cost of increased delay.
+//
+enum AudioPlayoutMode {
+  voice = 0,
+  fax = 1,
+  streaming = 2
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMSpeechType
+// An enumerator for possible labels of a decoded frame.
+//
+// -normal      : a normal speech frame. If VAD is enabled on the
+//                incoming stream this label indicate that the
+//                frame is active.
+// -PLC         : a PLC frame. The corresponding packet was lost
+//                and this frame generated by PLC techniques.
+// -CNG         : the frame is comfort noise. This happens if VAD
+//                is enabled at the sender and we have received
+//                SID.
+// -PLCCNG      : PLC will fade to comfort noise if the duration
+//                of PLC is long. This labels such a case.
+// -VADPassive  : the VAD at the receiver recognizes this frame as
+//                passive.
+//
+enum ACMSpeechType {
+  normal = 0,
+  PLC = 1,
+  CNG = 2,
+  PLCCNG = 3,
+  VADPassive = 4
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMVADMode
+// An enumerator for aggressiveness of VAD
+// -VADNormal                : least aggressive mode.
+// -VADLowBitrate            : more aggressive than "VADNormal" to save on
+//                             bit-rate.
+// -VADAggr                  : an aggressive mode.
+// -VADVeryAggr              : the most agressive mode.
+//
+enum ACMVADMode {
+  VADNormal = 0,
+  VADLowBitrate = 1,
+  VADAggr = 2,
+  VADVeryAggr = 3
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMCountries
+// An enumerator for countries, used when enabling CPT for a specific country.
+//
+enum ACMCountries {
+  ACMDisableCountryDetection = -1, // disable CPT detection
+  ACMUSA = 0,
+  ACMJapan,
+  ACMCanada,
+  ACMFrance,
+  ACMGermany,
+  ACMAustria,
+  ACMBelgium,
+  ACMUK,
+  ACMCzech,
+  ACMDenmark,
+  ACMFinland,
+  ACMGreece,
+  ACMHungary,
+  ACMIceland,
+  ACMIreland,
+  ACMItaly,
+  ACMLuxembourg,
+  ACMMexico,
+  ACMNorway,
+  ACMPoland,
+  ACMPortugal,
+  ACMSpain,
+  ACMSweden,
+  ACMTurkey,
+  ACMChina,
+  ACMHongkong,
+  ACMTaiwan,
+  ACMKorea,
+  ACMSingapore,
+  ACMNonStandard1
+// non-standard countries
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMAMRPackingFormat
+// An enumerator for different bit-packing format of AMR codec according to
+// RFC 3267.
+//
+// -AMRUndefined           : undefined.
+// -AMRBandwidthEfficient  : bandwidth-efficient mode.
+// -AMROctetAlligned       : Octet-alligned mode.
+// -AMRFileStorage         : file-storage mode.
+//
+enum ACMAMRPackingFormat {
+  AMRUndefined = -1,
+  AMRBandwidthEfficient = 0,
+  AMROctetAlligned = 1,
+  AMRFileStorage = 2
+};
+
+
+///////////////////////////////////////////////////////////////////////////
+//
+//   Struct containing network statistics
+//
+// -currentBufferSize      : current jitter buffer size in ms
+// -preferredBufferSize    : preferred (optimal) buffer size in ms
+// -jitterPeaksFound       : indicate if peaky-jitter mode is engaged, that is,
+//                           if severe but sparse network delays have occurred.
+// -currentPacketLossRate  : loss rate (network + late) (in Q14)
+// -currentDiscardRate     : late loss rate (in Q14)
+// -currentExpandRate      : fraction (of original stream) of synthesized
+//                           speech inserted through expansion (in Q14)
+// -currentPreemptiveRate  : fraction of synthesized speech inserted through
+//                           pre-emptive expansion (in Q14)
+// -currentAccelerateRate  : fraction of data removed through acceleration
+//                           (in Q14)
+// -clockDriftPPM          : clock-drift between sender and receiver in parts-
+//                           per-million. Positive means that receiver sample
+//                           rate is higher than sender sample rate.
+// -meanWaitingTimeMs      : average packet waiting time in the buffer
+// -medianWaitingTimeMs    : median packet waiting time in the buffer
+// -minWaitingTimeMs       : min packet waiting time in the buffer
+// -maxWaitingTimeMs       : max packet waiting time in the buffer
+typedef struct {
+  WebRtc_UWord16 currentBufferSize;
+  WebRtc_UWord16 preferredBufferSize;
+  bool jitterPeaksFound;
+  WebRtc_UWord16 currentPacketLossRate;
+  WebRtc_UWord16 currentDiscardRate;
+  WebRtc_UWord16 currentExpandRate;
+  WebRtc_UWord16 currentPreemptiveRate;
+  WebRtc_UWord16 currentAccelerateRate;
+  int32_t clockDriftPPM;
+  int meanWaitingTimeMs;
+  int medianWaitingTimeMs;
+  int minWaitingTimeMs;
+  int maxWaitingTimeMs;
+} ACMNetworkStatistics;
+
+///////////////////////////////////////////////////////////////////////////
+//
+// Enumeration of background noise mode a mapping from NetEQ interface.
+//
+// -On                  : default "normal" behavior with eternal noise
+// -Fade                : noise fades to zero after some time
+// -Off                 : background noise is always zero
+//
+enum ACMBackgroundNoiseMode {
+  On,
+  Fade,
+  Off
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
diff --git a/trunk/src/modules/audio_coding/main/source/Android.mk b/trunk/src/modules/audio_coding/main/source/Android.mk
new file mode 100644
index 0000000..90214a9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/Android.mk
@@ -0,0 +1,67 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_audio_coding
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    acm_cng.cc \
+    acm_codec_database.cc \
+    acm_dtmf_detection.cc \
+    acm_dtmf_playout.cc \
+    acm_g722.cc \
+    acm_generic_codec.cc \
+    acm_ilbc.cc \
+    acm_isac.cc \
+    acm_neteq.cc \
+    acm_pcm16b.cc \
+    acm_pcma.cc \
+    acm_pcmu.cc \
+    acm_red.cc \
+    acm_resampler.cc \
+    audio_coding_module.cc \
+    audio_coding_module_impl.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../codecs/cng/include \
+    $(LOCAL_PATH)/../../codecs/g711/include \
+    $(LOCAL_PATH)/../../codecs/g722/include \
+    $(LOCAL_PATH)/../../codecs/ilbc/interface \
+    $(LOCAL_PATH)/../../codecs/iSAC/main/interface \
+    $(LOCAL_PATH)/../../codecs/iSAC/fix/interface \
+    $(LOCAL_PATH)/../../codecs/pcm16b/include \
+    $(LOCAL_PATH)/../../neteq/interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../../common_audio/resampler/include \
+    $(LOCAL_PATH)/../../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../../common_audio/vad/include \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/main/source/acm_amr.cc b/trunk/src/modules/audio_coding/main/source/acm_amr.cc
new file mode 100644
index 0000000..71cf603
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_amr.cc
@@ -0,0 +1,444 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_amr.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_AMR
+// NOTE! GSM AMR is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/amr/main/interface/amr_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcAmr_CreateEnc(AMR_encinst_t_** encInst);
+// int16_t WebRtcAmr_CreateDec(AMR_decinst_t_** decInst);
+// int16_t WebRtcAmr_FreeEnc(AMR_encinst_t_* encInst);
+// int16_t WebRtcAmr_FreeDec(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_Encode(AMR_encinst_t_* encInst,
+//                          int16_t* input,
+//                          int16_t len,
+//                          int16_t*output,
+//                          int16_t mode);
+//  int16_t WebRtcAmr_EncoderInit(AMR_encinst_t_* encInst,
+//                               int16_t dtxMode);
+// int16_t WebRtcAmr_EncodeBitmode(AMR_encinst_t_* encInst,
+//                                 int format);
+// int16_t WebRtcAmr_Decode(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecodePlc(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecoderInit(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecodeBitmode(AMR_decinst_t_* decInst,
+//                                 int format);
+// void WebRtcAmr_Version(char *versionStr, short len);
+#include "amr_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AMR
+ACMAMR::ACMAMR(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1),  // Invalid value.
+      _encodingRate(0),  // Invalid value.
+      _encoderPackingFormat(AMRBandwidthEfficient),
+      _decoderPackingFormat(AMRBandwidthEfficient) {
+  return;
+}
+
+ACMAMR::~ACMAMR() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::EnableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::DisableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                               const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMAMR::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMAMR::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMAMR::DestructDecoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+  return -1;
+}
+
+void ACMAMR::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMR::UnregisterFromNetEqSafe(ACMNetEQ* /* netEq */,
+                                              WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+#define WEBRTC_AMR_MR475  0
+#define WEBRTC_AMR_MR515  1
+#define WEBRTC_AMR_MR59   2
+#define WEBRTC_AMR_MR67   3
+#define WEBRTC_AMR_MR74   4
+#define WEBRTC_AMR_MR795  5
+#define WEBRTC_AMR_MR102  6
+#define WEBRTC_AMR_MR122  7
+
+ACMAMR::ACMAMR(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1), // invalid value
+      _encodingRate(0) { // invalid value
+  _codecID = codecID;
+  _hasInternalDTX = true;
+  _encoderPackingFormat = AMRBandwidthEfficient;
+  _decoderPackingFormat = AMRBandwidthEfficient;
+  return;
+}
+
+ACMAMR::~ACMAMR() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmr_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmr_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* bitStream,
+                                     WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 vadDecision = 1;
+  // sanity check, if the rate is set correctly. we might skip this
+  // sanity check. if rate is not set correctly, initialization flag
+  // should be false and should not be here.
+  if ((_encodingMode < WEBRTC_AMR_MR475) ||
+      (_encodingMode > WEBRTC_AMR_MR122)) {
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+  *bitStreamLenByte = WebRtcAmr_Encode(_encoderInstPtr,
+                                       &_inAudio[_inAudioIxRead],
+                                       _frameLenSmpl,
+                                       (WebRtc_Word16*) bitStream,
+                                       _encodingMode);
+
+  // Update VAD, if internal DTX is used
+  if (_hasInternalDTX && _dtxEnabled) {
+    if (*bitStreamLenByte <= (7 * _frameLenSmpl / 160)) {
+      vadDecision = 0;
+    }
+    for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+      _vadLabel[n] = vadDecision;
+    }
+  }
+  // increment the read index
+  _inAudioIxRead += _frameLenSmpl;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMAMR::EnableDTX() {
+  if (_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // enable DTX
+    if (WebRtcAmr_EncoderInit(_encoderInstPtr, 1) < 0) {
+      return -1;
+    }
+    _dtxEnabled = true;
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+WebRtc_Word16 ACMAMR::DisableDTX() {
+  if (!_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // disable DTX
+    if (WebRtcAmr_EncoderInit(_encoderInstPtr, 0) < 0) {
+      return -1;
+    }
+    _dtxEnabled = false;
+    return 0;
+  } else {
+    // encoder doesn't exists, therefore disabling is harmless
+    return 0;
+  }
+}
+
+WebRtc_Word16 ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+  status += (WebRtcAmr_EncoderInit(
+      _encoderInstPtr, ((codecParams->enableDTX) ? 1 : 0)) < 0) ? -1 : 0;
+  status += (WebRtcAmr_EncodeBitmode(
+      _encoderInstPtr, _encoderPackingFormat) < 0) ? -1 : 0;
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  WebRtc_Word16 status =
+      ((WebRtcAmr_DecoderInit(_decoderInstPtr) < 0) ? -1 : 0);
+  status += WebRtcAmr_DecodeBitmode(_decoderInstPtr, _decoderPackingFormat);
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                               const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // Todo:
+    // log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AMR_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAMR, codecInst.pltype, _decoderInstPtr,
+                8000);
+  SET_AMR_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMAMR::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+  return WebRtcAmr_CreateEnc(&_encoderInstPtr);
+}
+
+void ACMAMR::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmr_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  // there is no encoder set the following
+  _encoderExist = false;
+  _encoderInitialized = false;
+  _encodingMode = -1; // invalid value
+  _encodingRate = 0; // invalid value
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+  return WebRtcAmr_CreateDec(&_decoderInstPtr);
+}
+
+void ACMAMR::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmr_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  // there is no encoder instance set the followings
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 rate) {
+  switch (rate) {
+    case 4750: {
+      _encodingMode = WEBRTC_AMR_MR475;
+      _encodingRate = 4750;
+      break;
+    }
+    case 5150: {
+      _encodingMode = WEBRTC_AMR_MR515;
+      _encodingRate = 5150;
+      break;
+    }
+    case 5900: {
+      _encodingMode = WEBRTC_AMR_MR59;
+      _encodingRate = 5900;
+      break;
+    }
+    case 6700: {
+      _encodingMode = WEBRTC_AMR_MR67;
+      _encodingRate = 6700;
+      break;
+    }
+    case 7400: {
+      _encodingMode = WEBRTC_AMR_MR74;
+      _encodingRate = 7400;
+      break;
+    }
+    case 7950: {
+      _encodingMode = WEBRTC_AMR_MR795;
+      _encodingRate = 7950;
+      break;
+    }
+    case 10200: {
+      _encodingMode = WEBRTC_AMR_MR102;
+      _encodingRate = 10200;
+      break;
+    }
+    case 12200: {
+      _encodingMode = WEBRTC_AMR_MR122;
+      _encodingRate = 12200;
+      break;
+    }
+    default: {
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMAMR::InternalDestructEncoderInst(void* ptrInst) {
+  // Free the memory where ptrInst is pointing to
+  if (ptrInst != NULL) {
+    WebRtcAmr_FreeEnc(reinterpret_cast<AMR_encinst_t_*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMR Encoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmr_EncodeBitmode(_encoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _encoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
+  return _encoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMR decoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmr_DecodeBitmode(_decoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _decoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
+  return _decoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMR::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                              WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not "
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderAMR);
+}
+
+#endif
+
+}
diff --git a/trunk/src/modules/audio_coding/main/source/acm_amr.h b/trunk/src/modules/audio_coding/main/source/acm_amr.h
new file mode 100644
index 0000000..cc342e7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_amr.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct AMR_encinst_t_;
+struct AMR_decinst_t_;
+
+namespace webrtc {
+
+enum ACMAMRPackingFormat;
+
+class ACMAMR: public ACMGenericCodec {
+ public:
+  ACMAMR(WebRtc_Word16 codecID);
+  ~ACMAMR();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 SetAMREncoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMREncoderPackingFormat() const;
+
+  WebRtc_Word16 SetAMRDecoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRDecoderPackingFormat() const;
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+
+  WebRtc_Word16 EnableDTX();
+
+  WebRtc_Word16 DisableDTX();
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  AMR_encinst_t_* _encoderInstPtr;
+  AMR_decinst_t_* _decoderInstPtr;
+  WebRtc_Word16 _encodingMode;
+  WebRtc_Word16 _encodingRate;
+  ACMAMRPackingFormat _encoderPackingFormat;
+  ACMAMRPackingFormat _decoderPackingFormat;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_amrwb.cc b/trunk/src/modules/audio_coding/main/source/acm_amrwb.cc
new file mode 100644
index 0000000..fed2e3f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_amrwb.cc
@@ -0,0 +1,450 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_amrwb.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_AMRWB
+// NOTE! GSM AMR-wb is not included in the open-source package. The
+// following interface file is needed:
+//
+// /modules/audio_coding/codecs/amrwb/main/interface/amrwb_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcAmrWb_CreateEnc(AMRWB_encinst_t_** encInst);
+// int16_t WebRtcAmrWb_CreateDec(AMRWB_decinst_t_** decInst);
+// int16_t WebRtcAmrWb_FreeEnc(AMRWB_encinst_t_* encInst);
+// int16_t WebRtcAmrWb_FreeDec(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_Encode(AMRWB_encinst_t_* encInst, int16_t* input,
+//                            int16_t len, int16_t* output, int16_t mode);
+// int16_t WebRtcAmrWb_EncoderInit(AMRWB_encinst_t_* encInst,
+//                                 int16_t dtxMode);
+// int16_t WebRtcAmrWb_EncodeBitmode(AMRWB_encinst_t_* encInst,
+//                                    int format);
+// int16_t WebRtcAmrWb_Decode(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecodePlc(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecoderInit(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecodeBitmode(AMRWB_decinst_t_* decInst,
+//                                   int format);
+// void WebRtcAmrWb_Version(char *versionStr, short len);
+#include "amrwb_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AMRWB
+ACMAMRwb::ACMAMRwb(WebRtc_Word16 /* codecID*/)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1),  // invalid value
+      _encodingRate(0),  // invalid value
+      _encoderPackingFormat(AMRBandwidthEfficient),
+      _decoderPackingFormat(AMRBandwidthEfficient) {
+  return;
+}
+
+ACMAMRwb::~ACMAMRwb() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                       WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                   WebRtc_Word16 /* bitStreamLenByte */,
+                                   WebRtc_Word16* /* audio */,
+                                   WebRtc_Word16* /* audioSamples */,
+                                   WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::EnableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::DisableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                 const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec*
+ACMAMRwb::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMAMRwb::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMAMRwb::DestructDecoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+  return -1;
+}
+
+void ACMAMRwb::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMRwb::UnregisterFromNetEqSafe(
+    ACMNetEQ* /* netEq */,
+    WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+#define AMRWB_MODE_7k       0
+#define AMRWB_MODE_9k       1
+#define AMRWB_MODE_12k      2
+#define AMRWB_MODE_14k      3
+#define AMRWB_MODE_16k      4
+#define AMRWB_MODE_18k      5
+#define AMRWB_MODE_20k      6
+#define AMRWB_MODE_23k      7
+#define AMRWB_MODE_24k      8
+
+ACMAMRwb::ACMAMRwb(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1), // invalid value
+      _encodingRate(0) { // invalid value
+  _codecID = codecID;
+  _hasInternalDTX = true;
+  _encoderPackingFormat = AMRBandwidthEfficient;
+  _decoderPackingFormat = AMRBandwidthEfficient;
+  return;
+}
+
+ACMAMRwb::~ACMAMRwb() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalEncode(WebRtc_UWord8* bitStream,
+                                       WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 vadDecision = 1;
+  // sanity check, if the rate is set correctly. we might skip this
+  // sanity check. if rate is not set correctly, initialization flag
+  // should be false and should not be here.
+  if ((_encodingMode < AMRWB_MODE_7k) || (_encodingMode > AMRWB_MODE_24k)) {
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+  *bitStreamLenByte = WebRtcAmrWb_Encode(_encoderInstPtr,
+                                         &_inAudio[_inAudioIxRead],
+                                         _frameLenSmpl,
+                                         (WebRtc_Word16*) bitStream,
+                                         _encodingMode);
+
+  // Update VAD, if internal DTX is used
+  if (_hasInternalDTX && _dtxEnabled) {
+    if (*bitStreamLenByte <= (7 * _frameLenSmpl / 160)) {
+      vadDecision = 0;
+    }
+    for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+      _vadLabel[n] = vadDecision;
+    }
+  }
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += _frameLenSmpl;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                   WebRtc_Word16 /* bitStreamLenByte */,
+                                   WebRtc_Word16* /* audio */,
+                                   WebRtc_Word16* /* audioSamples */,
+                                   WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMAMRwb::EnableDTX() {
+  if (_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // enable DTX
+    if (WebRtcAmrWb_EncoderInit(_encoderInstPtr, 1) < 0) {
+      return -1;
+    }
+    _dtxEnabled = true;
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+WebRtc_Word16 ACMAMRwb::DisableDTX() {
+  if (!_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // disable DTX
+    if (WebRtcAmrWb_EncoderInit(_encoderInstPtr, 0) < 0) {
+      return -1;
+    }
+    _dtxEnabled = false;
+    return 0;
+  } else {
+    // encoder doesn't exists, therefore disabling is harmless
+    return 0;
+  }
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  // sanity check
+  if (_encoderInstPtr == NULL) {
+    return -1;
+  }
+
+  WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+  status += (WebRtcAmrWb_EncoderInit(
+      _encoderInstPtr, ((codecParams->enableDTX) ? 1 : 0)) < 0) ? -1 : 0;
+  status += (WebRtcAmrWb_EncodeBitmode(
+      _encoderInstPtr, _encoderPackingFormat) < 0) ? -1 : 0;
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  WebRtc_Word16 status = WebRtcAmrWb_DecodeBitmode(_decoderInstPtr,
+                                                   _decoderPackingFormat);
+  status += ((WebRtcAmrWb_DecoderInit(_decoderInstPtr) < 0) ? -1 : 0);
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                 const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    return -1;
+  }
+
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AMRWB_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAMRWB, codecInst.pltype, _decoderInstPtr,
+                16000);
+  SET_AMRWB_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMAMRwb::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+  return WebRtcAmrWb_CreateEnc(&_encoderInstPtr);
+}
+
+void ACMAMRwb::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  // there is no encoder set the following
+  _encoderExist = false;
+  _encoderInitialized = false;
+  _encodingMode = -1; // invalid value
+  _encodingRate = 0;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+  return WebRtcAmrWb_CreateDec(&_decoderInstPtr);
+}
+
+void ACMAMRwb::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  // there is no encoder instance set the followings
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 rate) {
+  switch (rate) {
+    case 7000: {
+      _encodingMode = AMRWB_MODE_7k;
+      _encodingRate = 7000;
+      break;
+    }
+    case 9000: {
+      _encodingMode = AMRWB_MODE_9k;
+      _encodingRate = 9000;
+      break;
+    }
+    case 12000: {
+      _encodingMode = AMRWB_MODE_12k;
+      _encodingRate = 12000;
+      break;
+    }
+    case 14000: {
+      _encodingMode = AMRWB_MODE_14k;
+      _encodingRate = 14000;
+      break;
+    }
+    case 16000: {
+      _encodingMode = AMRWB_MODE_16k;
+      _encodingRate = 16000;
+      break;
+    }
+    case 18000: {
+      _encodingMode = AMRWB_MODE_18k;
+      _encodingRate = 18000;
+      break;
+    }
+    case 20000: {
+      _encodingMode = AMRWB_MODE_20k;
+      _encodingRate = 20000;
+      break;
+    }
+    case 23000: {
+      _encodingMode = AMRWB_MODE_23k;
+      _encodingRate = 23000;
+      break;
+    }
+    case 24000: {
+      _encodingMode = AMRWB_MODE_24k;
+      _encodingRate = 24000;
+      break;
+    }
+    default: {
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMAMRwb::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcAmrWb_FreeEnc(static_cast<AMRWB_encinst_t_*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMRwb encoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmrWb_EncodeBitmode(_encoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _encoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
+  return _encoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMRwb decoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmrWb_DecodeBitmode(_decoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _decoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
+  return _decoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMRwb::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                                WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not"
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderAMRWB);
+}
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_amrwb.h b/trunk/src/modules/audio_coding/main/source/acm_amrwb.h
new file mode 100644
index 0000000..775f07f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_amrwb.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct AMRWB_encinst_t_;
+struct AMRWB_decinst_t_;
+
+namespace webrtc {
+
+enum ACMAMRPackingFormat;
+
+class ACMAMRwb: public ACMGenericCodec {
+ public:
+  ACMAMRwb(WebRtc_Word16 codecID);
+  ~ACMAMRwb();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams* codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams* codecParams);
+
+  WebRtc_Word16 SetAMRwbEncoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
+
+  WebRtc_Word16 SetAMRwbDecoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+
+  WebRtc_Word16 EnableDTX();
+
+  WebRtc_Word16 DisableDTX();
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  AMRWB_encinst_t_* _encoderInstPtr;
+  AMRWB_decinst_t_* _decoderInstPtr;
+
+  WebRtc_Word16 _encodingMode;
+  WebRtc_Word16 _encodingRate;
+  ACMAMRPackingFormat _encoderPackingFormat;
+  ACMAMRPackingFormat _decoderPackingFormat;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_celt.cc b/trunk/src/modules/audio_coding/main/source/acm_celt.cc
new file mode 100644
index 0000000..5b12f32
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_celt.cc
@@ -0,0 +1,317 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_celt.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+// TODO(tlegrand): Add full paths.
+
+#ifdef WEBRTC_CODEC_CELT
+// NOTE! Celt is not included in the open-source package. Modify this file or
+// your codec API to match the function call and name of used Celt API file.
+#include "celt_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_CELT
+
+ACMCELT::ACMCELT(int16_t /* codecID */)
+    : enc_inst_ptr_(NULL),
+      dec_inst_ptr_(NULL),
+      sampling_freq_(0),
+      bitrate_(0),
+      channels_(1),
+      dec_channels_(1) {
+  return;
+}
+
+ACMCELT::~ACMCELT() {
+  return;
+}
+
+int16_t ACMCELT::InternalEncode(uint8_t* /* bitStream */,
+                                int16_t* /* bitStreamLenByte */) {
+  return -1;
+}
+
+int16_t ACMCELT::DecodeSafe(uint8_t* /* bitStream */,
+                            int16_t /* bitStreamLenByte */,
+                            int16_t* /* audio */,
+                            int16_t* /* audioSamples */,
+                            WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef  */,
+                          const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMCELT::CreateInstance(void) {
+  return NULL;
+}
+
+int16_t ACMCELT::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMCELT::DestructEncoderSafe() {
+  return;
+}
+
+int16_t ACMCELT::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMCELT::DestructDecoderSafe() {
+  return;
+}
+
+void ACMCELT::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+int16_t ACMCELT::UnregisterFromNetEqSafe(ACMNetEQ* /* netEq */,
+                                         int16_t /* payloadType */) {
+  return -1;
+}
+
+bool ACMCELT::IsTrueStereoCodec() {
+  return true;
+}
+
+int16_t ACMCELT::SetBitRateSafe(const int32_t /*rate*/) {
+  return -1;
+}
+
+#else  //===================== Actual Implementation =======================
+
+ACMCELT::ACMCELT(int16_t codecID)
+    : enc_inst_ptr_(NULL),
+      dec_inst_ptr_(NULL),
+      sampling_freq_(32000),  // Default sampling frequency.
+      bitrate_(64000),  // Default rate.
+      channels_(1),  // Default send mono.
+      dec_channels_(1) {  // Default receive mono.
+  // TODO(tlegrand): remove later when ACMGenericCodec has a new constructor.
+  _codecID = codecID;
+
+  return;
+}
+
+ACMCELT::~ACMCELT() {
+  if (enc_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+  }
+  if (dec_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+  }
+  return;
+}
+
+int16_t ACMCELT::InternalEncode(uint8_t* bitStream, int16_t* bitStreamLenByte) {
+  *bitStreamLenByte = 0;
+
+  // Call Encoder.
+  *bitStreamLenByte = WebRtcCelt_Encode(enc_inst_ptr_,
+                                        &_inAudio[_inAudioIxRead],
+                                        bitStream);
+
+  // Increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer.
+  _inAudioIxRead += _frameLenSmpl * channels_;
+
+  if (*bitStreamLenByte < 0) {
+    // Error reported from the encoder.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalEncode: Encode error for Celt");
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+
+  return *bitStreamLenByte;
+}
+
+int16_t ACMCELT::DecodeSafe(uint8_t* /* bitStream */,
+                            int16_t /* bitStreamLenByte */,
+                            int16_t* /* audio */,
+                            int16_t* /* audioSamples */,
+                            WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  // Set bitrate and check that it is within the valid range.
+  int16_t status = SetBitRateSafe((codecParams->codecInstant).rate);
+  if (status < 0) {
+    return -1;
+  }
+
+  // If number of channels changed we need to re-create memory.
+  if (codecParams->codecInstant.channels != channels_) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+    // Store new number of channels.
+    channels_ = codecParams->codecInstant.channels;
+    if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, channels_) < 0) {
+       return -1;
+    }
+  }
+
+  // Initiate encoder.
+  if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* codecParams) {
+  // If number of channels changed we need to re-create memory.
+  if (codecParams->codecInstant.channels != dec_channels_) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+    // Store new number of channels.
+    dec_channels_ = codecParams->codecInstant.channels;
+    if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
+       return -1;
+    }
+  }
+
+  // Initiate decoder.
+  if (WebRtcCelt_DecoderInit(dec_inst_ptr_) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalInitDecoder: init decoder failed for Celt.");
+    return -1;
+  }
+  return 0;
+}
+
+int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                          const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "CodecDef: Decoder uninitialized for Celt");
+    return -1;
+  }
+
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" and "SET_CELT_FUNCTIONS" or "SET_CELTSLAVE_FUNCTIONS".
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderCELT_32, codecInst.pltype, dec_inst_ptr_,
+                32000);
+  // If this is the master of NetEQ, regular decoder will be added, otherwise
+  // the slave decoder will be used.
+  if (_isMaster) {
+    SET_CELT_FUNCTIONS((codecDef));
+  } else {
+    SET_CELTSLAVE_FUNCTIONS((codecDef));
+  }
+  return 0;
+}
+
+ACMGenericCodec* ACMCELT::CreateInstance(void) {
+  return NULL;
+}
+
+int16_t ACMCELT::InternalCreateEncoder() {
+  if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, _noChannels) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateEncoder: create encoder failed for Celt");
+    return -1;
+  }
+  channels_ = _noChannels;
+  return 0;
+}
+
+void ACMCELT::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (enc_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+  }
+}
+
+int16_t ACMCELT::InternalCreateDecoder() {
+  if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateDecoder: create decoder failed for Celt");
+    return -1;
+  }
+
+  return 0;
+}
+
+void ACMCELT::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (dec_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+  }
+}
+
+void ACMCELT::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcCelt_FreeEnc(static_cast<CELT_encinst_t*>(ptrInst));
+  }
+  return;
+}
+
+int16_t ACMCELT::UnregisterFromNetEqSafe(ACMNetEQ* netEq, int16_t payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(
+        webrtc::kTraceError,
+        webrtc::kTraceAudioCoding,
+        _uniqueID,
+        "Cannot unregister codec: given payload-type does not match the stored "
+        "payload type",
+        _decoderParams.codecInstant.plname, payloadType,
+        _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderCELT_32);
+}
+
+bool ACMCELT::IsTrueStereoCodec() {
+  return true;
+}
+
+int16_t ACMCELT::SetBitRateSafe(const int32_t rate) {
+  // Check that rate is in the valid range.
+  if ((rate >= 48000) && (rate <= 128000)) {
+    bitrate_ = rate;
+    return 0;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "SetBitRateSafe: Invalid rate Celt");
+    return -1;
+  }
+}
+
+#endif
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_celt.h b/trunk/src/modules/audio_coding/main/source/acm_celt.h
new file mode 100644
index 0000000..776bd7f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_celt.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct CELT_encinst_t_;
+struct CELT_decinst_t_;
+
+namespace webrtc {
+
+class ACMCELT : public ACMGenericCodec {
+ public:
+  ACMCELT(int16_t codecID);
+  ~ACMCELT();
+
+  ACMGenericCodec* CreateInstance(void);
+
+  int16_t InternalEncode(uint8_t* bitstream, int16_t* bitStreamLenByte);
+
+  int16_t InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  int16_t InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+
+  WebRtc_Word16 DecodeSafe(
+      uint8_t* /* bitStream */,
+      int16_t /* bitStreamLenByte */,
+      int16_t* /* audio */,
+      int16_t* /* audioSamples */,
+      // TODO(leozwang): use int8_t here when WebRtc_Word8 is properly typed.
+      // http://code.google.com/p/webrtc/issues/detail?id=311
+      WebRtc_Word8* /* speechType */);
+
+  int32_t CodecDef(WebRtcNetEQ_CodecDef& codecDef, const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  int16_t InternalCreateEncoder();
+
+  int16_t InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  int16_t UnregisterFromNetEqSafe(ACMNetEQ* netEq, int16_t payloadType);
+
+  bool IsTrueStereoCodec();
+
+  int16_t SetBitRateSafe(const int32_t rate);
+
+  CELT_encinst_t_* enc_inst_ptr_;
+  CELT_decinst_t_* dec_inst_ptr_;
+  uint16_t sampling_freq_;
+  int32_t bitrate_;
+  uint16_t channels_;
+  uint16_t dec_channels_;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_cng.cc b/trunk/src/modules/audio_coding/main/source/acm_cng.cc
new file mode 100644
index 0000000..1bda752
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_cng.cc
@@ -0,0 +1,157 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_cng.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_cng.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc {
+
+ACMCNG::ACMCNG(WebRtc_Word16 codecID) {
+  _encoderInstPtr = NULL;
+  _decoderInstPtr = NULL;
+  _codecID = codecID;
+  _sampFreqHz = ACMCodecDB::CodecFreq(_codecID);
+  return;
+}
+
+ACMCNG::~ACMCNG() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcCng_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcCng_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+// CNG is not like a regular encoder, this function
+// should not be called normally
+// instead the following function is called from inside
+// ACMGenericCodec::ProcessFrameVADDTX
+WebRtc_Word16 ACMCNG::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMCNG::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+// CNG is not like a regular encoder,
+// this function should not be called normally
+// instead the following function is called from inside
+// ACMGenericCodec::ProcessFrameVADDTX
+WebRtc_Word16 ACMCNG::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMCNG::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return WebRtcCng_InitDec(_decoderInstPtr);
+}
+
+WebRtc_Word32 ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                               const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // TODO (tlegrand): log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_CNG_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+
+  if (_sampFreqHz == 8000 || _sampFreqHz == 16000 || _sampFreqHz == 32000) {
+    SET_CODEC_PAR((codecDef), kDecoderCNG, codecInst.pltype,
+        _decoderInstPtr, _sampFreqHz);
+    SET_CNG_FUNCTIONS((codecDef));
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+ACMGenericCodec* ACMCNG::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMCNG::InternalCreateEncoder() {
+  if (WebRtcCng_CreateEnc(&_encoderInstPtr) < 0) {
+    _encoderInstPtr = NULL;
+    return -1;
+  } else {
+    return 0;
+  }
+}
+
+void ACMCNG::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcCng_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  _encoderExist = false;
+  _encoderInitialized = false;
+}
+
+WebRtc_Word16 ACMCNG::InternalCreateDecoder() {
+  if (WebRtcCng_CreateDec(&_decoderInstPtr) < 0) {
+    _decoderInstPtr = NULL;
+    return -1;
+  } else {
+    return 0;
+  }
+}
+
+void ACMCNG::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcCng_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+void ACMCNG::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcCng_FreeEnc(static_cast<CNG_enc_inst*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMCNG::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                              WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(
+                 webrtc::kTraceError,
+                 webrtc::kTraceAudioCoding,
+                 _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not "
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderCNG);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_cng.h b/trunk/src/modules/audio_coding/main/source/acm_cng.h
new file mode 100644
index 0000000..1781b2b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_cng.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct WebRtcCngEncInst;
+struct WebRtcCngDecInst;
+
+namespace webrtc {
+
+class ACMCNG: public ACMGenericCodec {
+ public:
+  ACMCNG(WebRtc_Word16 codecID);
+  ~ACMCNG();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 EnableDTX() {
+    return -1;
+  }
+
+  WebRtc_Word16 DisableDTX() {
+    return -1;
+  }
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  WebRtcCngEncInst* _encoderInstPtr;
+  WebRtcCngDecInst* _decoderInstPtr;
+  WebRtc_Word16 _sampFreqHz;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_codec_database.cc b/trunk/src/modules/audio_coding/main/source/acm_codec_database.cc
new file mode 100644
index 0000000..c623d29
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_codec_database.cc
@@ -0,0 +1,998 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+// TODO(tlegrand): Change constant input pointers in all functions to constant
+// references, where appropriate.
+#include "acm_codec_database.h"
+
+#include <stdio.h>
+
+#include "acm_common_defs.h"
+#include "trace.h"
+
+// Includes needed to get version info and to create the codecs.
+// G.711, PCM mu-law and A-law.
+#include "acm_pcma.h"
+#include "acm_pcmu.h"
+#include "g711_interface.h"
+// CNG.
+#include "acm_cng.h"
+#include "webrtc_cng.h"
+// NetEQ.
+#include "webrtc_neteq.h"
+#ifdef WEBRTC_CODEC_ISAC
+    #include "acm_isac.h"
+    #include "acm_isac_macros.h"
+    #include "isac.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+    #include "acm_isac.h"
+    #include "acm_isac_macros.h"
+    #include "isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    #include "pcm16b.h"
+    #include "acm_pcm16b.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    #include "acm_ilbc.h"
+    #include "ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_AMR
+    #include "acm_amr.h"
+    #include "amr_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+    #include "acm_amrwb.h"
+    #include "amrwb_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    #include "acm_celt.h"
+    #include "celt_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722
+    #include "acm_g722.h"
+    #include "g722_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+    #include "acm_g7221.h"
+    #include "g7221_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+    #include "acm_g7221c.h"
+    #include "g7221c_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G729
+    #include "acm_g729.h"
+    #include "g729_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    #include "acm_g7291.h"
+    #include "g7291_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+    #include "acm_gsmfr.h"
+    #include "gsmfr_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    #include "acm_speex.h"
+    #include "speex_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_AVT
+    #include "acm_dtmf_playout.h"
+#endif
+#ifdef WEBRTC_CODEC_RED
+    #include "acm_red.h"
+#endif
+
+namespace webrtc {
+
+// We dynamically allocate some of the dynamic payload types to the defined
+// codecs. Note! There are a limited number of payload types. If more codecs
+// are defined they will receive reserved fixed payload types (values 67-95).
+const int kDynamicPayloadtypes[ACMCodecDB::kMaxNumCodecs] = {
+  105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+  120, 121, 122, 123, 124, 125, 126,  95,  94,  93,  92,  91,  90,  89,
+  88,  87,  86,  85,  84,  83,  82,  81,  80,  79,  78,  77,  76,  75,
+  74,  73,  72,  71,  70,  69,  68,  67
+};
+
+// Creates database with all supported codec at compile time.
+// Each entry needs the following parameters in the given order:
+// payload type, name, sampling frequency, packet size in samples,
+// default channel support, and default rate.
+#if (defined(WEBRTC_CODEC_PCM16) || \
+     defined(WEBRTC_CODEC_AMR) || defined(WEBRTC_CODEC_AMRWB) || \
+     defined(WEBRTC_CODEC_CELT) || defined(WEBRTC_CODEC_G729_1) || \
+     defined(WEBRTC_CODEC_SPEEX) || defined(WEBRTC_CODEC_G722_1) || \
+     defined(WEBRTC_CODEC_G722_1C))
+static int count_database = 0;
+#endif
+
+const CodecInst ACMCodecDB::database_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
+# if (defined(WEBRTC_CODEC_ISAC))
+  {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  {kDynamicPayloadtypes[count_database++], "L16", 8000, 80, 1, 128000},
+  {kDynamicPayloadtypes[count_database++], "L16", 16000, 160, 1, 256000},
+  {kDynamicPayloadtypes[count_database++], "L16", 32000, 320, 1, 512000},
+#endif
+  // G.711, PCM mu-law and A-law.
+  {0, "PCMU", 8000, 160, 1, 64000},
+  {8, "PCMA", 8000, 160, 1, 64000},
+#ifdef WEBRTC_CODEC_ILBC
+  {102, "ILBC", 8000, 240, 1, 13300},
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  {kDynamicPayloadtypes[count_database++], "AMR", 8000, 160, 1, 12200},
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  {kDynamicPayloadtypes[count_database++], "AMR-WB", 16000, 320, 1, 20000},
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  {kDynamicPayloadtypes[count_database++], "CELT", 32000, 320, 2, 64000},
+#endif
+#ifdef WEBRTC_CODEC_G722
+  {9, "G722", 16000, 320, 1, 64000},
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 32000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 24000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 16000},
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 48000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 32000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 24000},
+#endif
+#ifdef WEBRTC_CODEC_G729
+  {18, "G729", 8000, 240, 1, 8000},
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  {kDynamicPayloadtypes[count_database++], "G7291", 16000, 320, 1, 32000},
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  {3, "GSM", 8000, 160, 1, 13200},
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  {kDynamicPayloadtypes[count_database++], "speex", 8000, 160, 1, 11000},
+  {kDynamicPayloadtypes[count_database++], "speex", 16000, 320, 1, 22000},
+#endif
+  // Comfort noise for three different sampling frequencies.
+  {13, "CN", 8000, 240, 1, 0},
+  {98, "CN", 16000, 480, 1, 0},
+  {99, "CN", 32000, 960, 1, 0},
+#ifdef WEBRTC_CODEC_AVT
+  {106, "telephone-event", 8000, 240, 1, 0},
+#endif
+#ifdef WEBRTC_CODEC_RED
+  {127, "red", 8000, 0, 1, 0},
+#endif
+  // To prevent compile errors due to trailing commas.
+  {-1, "Null", -1, -1, -1, -1}
+};
+
+// Create database with all codec settings at compile time.
+// Each entry needs the following parameters in the given order:
+// Number of allowed packet sizes, a vector with the allowed packet sizes,
+// Basic block samples, max number of channels that are supported.
+const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
+# if (defined(WEBRTC_CODEC_ISAC))
+  {1, {kIsacPacSize960}, 0, 1},
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  {4, {80, 160, 240, 320}, 0, 2},
+  {4, {160, 320, 480, 640}, 0, 2},
+  {2, {320, 640}, 0, 2},
+#endif
+  // G.711, PCM mu-law and A-law.
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+#ifdef WEBRTC_CODEC_ILBC
+  {4, {160, 240, 320, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  {3, {160, 320, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  {3, {320, 640, 960}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  {1, {320}, 0, 2},
+#endif
+#ifdef WEBRTC_CODEC_G722
+  {6, {160, 320, 480, 640, 800, 960}, 0, 2},
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  {1, {320}, 320, 2},
+  {1, {320}, 320, 2},
+  {1, {320}, 320, 2},
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  {1, {640}, 640, 2},
+  {1, {640}, 640, 2},
+  {1, {640}, 640, 2},
+#endif
+#ifdef WEBRTC_CODEC_G729
+  {6, {80, 160, 240, 320, 400, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  {3, {320, 640, 960}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  {3, {160, 320, 480}, 160, 1},
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  {3, {160, 320, 480}, 0, 1},
+  {3, {320, 640, 960}, 0, 1},
+#endif
+  // Comfort noise for three different sampling frequencies.
+  {1, {240}, 240, 1},
+  {1, {480}, 480, 1},
+  {1, {960}, 960, 1},
+#ifdef WEBRTC_CODEC_AVT
+  {1, {240}, 240, 1},
+#endif
+#ifdef WEBRTC_CODEC_RED
+  {1, {0}, 0, 1},
+#endif
+  // To prevent compile errors due to trailing commas.
+  {-1, {-1}, -1, -1}
+};
+
+// Create a database of all NetEQ decoders at compile time.
+const WebRtcNetEQDecoder ACMCodecDB::neteq_decoders_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  kDecoderISAC,
+# if (defined(WEBRTC_CODEC_ISAC))
+  kDecoderISACswb,
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  kDecoderPCM16B,
+  kDecoderPCM16Bwb,
+  kDecoderPCM16Bswb32kHz,
+#endif
+  // G.711, PCM mu-las and A-law.
+  kDecoderPCMu,
+  kDecoderPCMa,
+#ifdef WEBRTC_CODEC_ILBC
+  kDecoderILBC,
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  kDecoderAMR,
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  kDecoderAMRWB,
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  kDecoderCELT_32,
+#endif
+#ifdef WEBRTC_CODEC_G722
+  kDecoderG722,
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  kDecoderG722_1_32,
+  kDecoderG722_1_24,
+  kDecoderG722_1_16,
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  kDecoderG722_1C_48,
+  kDecoderG722_1C_32,
+  kDecoderG722_1C_24,
+#endif
+#ifdef WEBRTC_CODEC_G729
+  kDecoderG729,
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  kDecoderG729_1,
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  kDecoderGSMFR,
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  kDecoderSPEEX_8,
+  kDecoderSPEEX_16,
+#endif
+  // Comfort noise for three different sampling frequencies.
+  kDecoderCNG,
+  kDecoderCNG,
+  kDecoderCNG,
+#ifdef WEBRTC_CODEC_AVT
+  kDecoderAVT,
+#endif
+#ifdef WEBRTC_CODEC_RED
+  kDecoderRED,
+#endif
+  kDecoderReservedEnd
+};
+
+// Get codec information from database.
+// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
+int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
+  // Error check to see that codec_id is not out of bounds.
+  if ((codec_id < 0) || (codec_id >= kNumCodecs)) {
+    return -1;
+  }
+
+  // Copy database information for the codec to the output.
+  memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
+
+  return 0;
+}
+
+// Enumerator for error codes when asking for codec database id.
+enum {
+  kInvalidCodec = -10,
+  kInvalidFrequency = -20,
+  kInvalidPayloadtype = -30,
+  kInvalidPacketSize = -40,
+  kInvalidRate = -50
+};
+
+// Gets the codec id number from the database. If there is some mismatch in
+// the codec settings, an error message will be recorded in the error string.
+// NOTE! Only the first mismatch found will be recorded in the error string.
+int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id,
+                            char* err_message, int max_message_len_byte) {
+  int codec_id = ACMCodecDB::CodecNumber(codec_inst, mirror_id);
+
+  // Write error message if ACMCodecDB::CodecNumber() returned error.
+  if ((codec_id < 0) && (err_message != NULL)) {
+    char my_err_msg[1000];
+
+    if (codec_id == kInvalidCodec) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, plname=%s "
+              "is not a valid codec", codec_inst->plname);
+    } else if (codec_id == kInvalidFrequency) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, plfreq=%d "
+              "is not a valid frequency for the codec %s", codec_inst->plfreq,
+              codec_inst->plname);
+    } else if (codec_id == kInvalidPayloadtype) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, payload "
+              "number %d is out of range for %s", codec_inst->pltype,
+              codec_inst->plname);
+    } else if (codec_id == kInvalidPacketSize) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, Packet "
+              "size is out of range for %s", codec_inst->plname);
+    } else if (codec_id == kInvalidRate) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, rate=%d "
+              "is not a valid rate for %s", codec_inst->rate,
+              codec_inst->plname);
+    } else {
+      // Other error
+      sprintf(my_err_msg, "invalid codec parameters to be registered, "
+              "ACMCodecDB::CodecNumber failed");
+    }
+
+    strncpy(err_message, my_err_msg, max_message_len_byte - 1);
+    // make sure that the message is null-terminated.
+    err_message[max_message_len_byte - 1] = '\0';
+  }
+
+  return codec_id;
+}
+
+// Gets the codec id number from the database. If there is some mismatch in
+// the codec settings, the function will return an error code.
+// NOTE! The first mismatch found will generate the return value.
+int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id) {
+  int codec_number = -1;
+  bool name_match = false;
+
+  // Looks for a matching payload name and frequency in the codec list.
+  // Need to check both since some codecs have several codec entries with
+  // different frequencies (like iSAC).
+  for (int i = 0; i < kNumCodecs; i++) {
+    if (STR_CASE_CMP(database_[i].plname, codec_inst->plname) == 0) {
+      // We have found a matching codec name in the list.
+      name_match = true;
+
+      // Checks if frequency match.
+      if (codec_inst->plfreq == database_[i].plfreq) {
+        codec_number = i;
+        break;
+      }
+    }
+  }
+
+  // Checks if the error is in the name or in the frequency.
+  if (codec_number == -1) {
+    if (!name_match) {
+      return kInvalidCodec;
+    } else {
+      return kInvalidFrequency;
+    }
+  }
+
+  // Checks the validity of payload type
+  if (!ValidPayloadType(codec_inst->pltype)) {
+    return kInvalidPayloadtype;
+  }
+
+  // Comfort Noise is special case, packet-size & rate is not checked.
+  if (STR_CASE_CMP(database_[codec_number].plname, "CN") == 0) {
+    *mirror_id = codec_number;
+    return codec_number;
+  }
+
+  // RED is special case, packet-size & rate is not checked.
+  if (STR_CASE_CMP(database_[codec_number].plname, "red") == 0) {
+    *mirror_id = codec_number;
+    return codec_number;
+  }
+
+  // Checks the validity of packet size.
+  if (codec_settings_[codec_number].num_packet_sizes > 0) {
+    bool packet_size_ok = false;
+    int i;
+    int packet_size_samples;
+    for (i = 0; i < codec_settings_[codec_number].num_packet_sizes; i++) {
+      packet_size_samples =
+          codec_settings_[codec_number].packet_sizes_samples[i];
+      if (codec_inst->pacsize == packet_size_samples) {
+        packet_size_ok = true;
+        break;
+      }
+    }
+
+    if (!packet_size_ok) {
+      return kInvalidPacketSize;
+    }
+  }
+
+  if (codec_inst->pacsize < 1) {
+    return kInvalidPacketSize;
+  }
+
+
+  // Check the validity of rate. Codecs with multiple rates have their own
+  // function for this.
+  *mirror_id = codec_number;
+  if (STR_CASE_CMP("isac", codec_inst->plname) == 0) {
+    if (IsISACRateValid(codec_inst->rate)) {
+      // Set mirrorID to iSAC WB which is only created once to be used both for
+      // iSAC WB and SWB, because they need to share struct.
+      *mirror_id = kISAC;
+      return  codec_number;
+    } else {
+      return kInvalidRate;
+    }
+  } else if (STR_CASE_CMP("ilbc", codec_inst->plname) == 0) {
+    return IsILBCRateValid(codec_inst->rate, codec_inst->pacsize)
+        ? codec_number : kInvalidRate;
+  } else if (STR_CASE_CMP("amr", codec_inst->plname) == 0) {
+    return IsAMRRateValid(codec_inst->rate)
+        ? codec_number : kInvalidRate;
+  } else if (STR_CASE_CMP("amr-wb", codec_inst->plname) == 0) {
+    return IsAMRwbRateValid(codec_inst->rate)
+        ? codec_number : kInvalidRate;
+  } else if (STR_CASE_CMP("g7291", codec_inst->plname) == 0) {
+    return IsG7291RateValid(codec_inst->rate)
+        ? codec_number : kInvalidRate;
+  } else if (STR_CASE_CMP("speex", codec_inst->plname) == 0) {
+    return IsSpeexRateValid(codec_inst->rate)
+        ? codec_number : kInvalidRate;
+  } else if (STR_CASE_CMP("celt", codec_inst->plname) == 0) {
+    return IsCeltRateValid(codec_inst->rate)
+        ? codec_number : kInvalidRate;
+  }
+
+  return IsRateValid(codec_number, codec_inst->rate) ?
+      codec_number : kInvalidRate;
+}
+
+// Gets codec id number, and mirror id, from database for the receiver.
+int ACMCodecDB::ReceiverCodecNumber(const CodecInst* codec_inst,
+    int* mirror_id) {
+  int codec_number = -1;
+
+  // Looks for a matching payload name and frequency in the codec list.
+  // Need to check both since some codecs have several codec entries with
+  // different frequencies (like iSAC).
+  for (int i = 0; i < kNumCodecs; i++) {
+    if (STR_CASE_CMP(database_[i].plname, codec_inst->plname) == 0) {
+      // We have found a matching codec name in the list.
+
+      // Check if frequency match.
+      if (codec_inst->plfreq == database_[i].plfreq) {
+        codec_number = i;
+        *mirror_id = codec_number;
+
+        // Check if codec is iSAC, set mirrorID to iSAC WB which is only
+        // created once to be used both for iSAC WB and SWB, because they need
+        // to share struct.
+        if (STR_CASE_CMP(codec_inst->plname, "ISAC") == 0) {
+          *mirror_id = kISAC;
+        }
+
+        break;
+      }
+    }
+  }
+
+  return codec_number;
+}
+
+// Returns the codec sampling frequency for codec with id = "codec_id" in
+// database.
+int ACMCodecDB::CodecFreq(int codec_id) {
+  // Error check to see that codec_id is not out of bounds.
+  if (codec_id < 0 || codec_id >= kNumCodecs) {
+    return -1;
+  }
+
+  return database_[codec_id].plfreq;
+}
+
+// Returns the codec's basic coding block size in samples.
+int ACMCodecDB::BasicCodingBlock(int codec_id) {
+  // Error check to see that codec_id is not out of bounds.
+  if (codec_id < 0 || codec_id >= kNumCodecs) {
+      return -1;
+  }
+
+  return codec_settings_[codec_id].basic_block_samples;
+}
+
+// Returns the NetEQ decoder database.
+const WebRtcNetEQDecoder* ACMCodecDB::NetEQDecoders() {
+  return neteq_decoders_;
+}
+
+// All version numbers for the codecs in the database are listed in text.
+// TODO(tlegrand): change to use std::string.
+int ACMCodecDB::CodecsVersion(char* version, size_t* remaining_buffer_bytes,
+                              size_t* position) {
+  const size_t kTemporaryBufferSize = 500;
+  const size_t kVersionBufferSize = 1000;
+  char versions_buffer[kVersionBufferSize];
+  char version_num_buf[kTemporaryBufferSize];
+  size_t len = *position;
+  size_t remaining_size = kVersionBufferSize;
+
+  versions_buffer[0] = '\0';
+
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  ACM_ISAC_VERSION(version_num_buf);
+  strncat(versions_buffer, "ISAC\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, "\n", remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, "L16\t\t1.0.0\n", remaining_size);
+#endif
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG711_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.711\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, "\n", remaining_size);
+#ifdef WEBRTC_CODEC_ILBC
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcIlbcfix_version(version_num_buf);
+  strncat(versions_buffer, "ILBC\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, "\n", remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcAmr_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "AMR\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcAmrWb_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "AMR-WB\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_G722
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG722_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.722\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG7221_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.722.1\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG7221c_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.722.1C\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_G729
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG729_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.729\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcG7291_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "G.729.1\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcGSMFR_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "GSM-FR\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcSpeex_Version(version_num_buf, kTemporaryBufferSize);
+  strncat(versions_buffer, "Speex\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#endif
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  WebRtcCng_Version(version_num_buf);
+  strncat(versions_buffer, "CNG\t\t", remaining_size);
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, version_num_buf, remaining_size);
+#ifdef WEBRTC_CODEC_AVT
+  remaining_size = kVersionBufferSize - strlen(versions_buffer);
+  strncat(versions_buffer, "Tone Generation\t1.0.0\n", remaining_size);
+#endif
+  strncpy(&version[len], versions_buffer, *remaining_buffer_bytes);
+  *position = strlen(version);
+  *remaining_buffer_bytes -= (*position - len);
+  if (*remaining_buffer_bytes < strlen(versions_buffer)) {
+    return -1;
+  }
+
+  return 0;
+}
+
+// Gets mirror id. The Id is used for codecs sharing struct for settings that
+// need different payload types.
+int ACMCodecDB::MirrorID(int codec_id) {
+  if (STR_CASE_CMP(database_[codec_id].plname, "isac") == 0) {
+    return kISAC;
+  } else {
+    return codec_id;
+  }
+}
+
+// Creates memory/instance for storing codec state.
+ACMGenericCodec* ACMCodecDB::CreateCodecInstance(const CodecInst* codec_inst) {
+  // All we have support for right now.
+  if (!STR_CASE_CMP(codec_inst->plname, "ISAC")) {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    return new ACMISAC(kISAC);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "PCMU")) {
+    return new ACMPCMU(kPCMU);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "PCMA")) {
+    return new ACMPCMA(kPCMA);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "ILBC")) {
+#ifdef WEBRTC_CODEC_ILBC
+    return new ACMILBC(kILBC);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "AMR")) {
+#ifdef WEBRTC_CODEC_AMR
+    return new ACMAMR(kGSMAMR);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "AMR-WB")) {
+#ifdef WEBRTC_CODEC_AMRWB
+    return new ACMAMRwb(kGSMAMRWB);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CELT")) {
+#ifdef WEBRTC_CODEC_CELT
+    return new ACMCELT(kCELT32);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G722")) {
+#ifdef WEBRTC_CODEC_G722
+    return new ACMG722(kG722);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G7221")) {
+    switch (codec_inst->plfreq) {
+      case 16000: {
+#ifdef WEBRTC_CODEC_G722_1
+        int codec_id;
+        switch (codec_inst->rate) {
+          case 16000 : {
+            codec_id = kG722_1_16;
+            break;
+          }
+          case 24000 : {
+            codec_id = kG722_1_24;
+            break;
+          }
+          case 32000 : {
+            codec_id = kG722_1_32;
+            break;
+          }
+          default: {
+            return NULL;
+          }
+          return new ACMG722_1(codec_id);
+        }
+#endif
+      }
+      case 32000: {
+#ifdef WEBRTC_CODEC_G722_1C
+        int codec_id;
+        switch (codec_inst->rate) {
+          case 24000 : {
+            codec_id = kG722_1C_24;
+            break;
+          }
+          case 32000 : {
+            codec_id = kG722_1C_32;
+            break;
+          }
+          case 48000 : {
+            codec_id = kG722_1C_48;
+            break;
+          }
+          default: {
+            return NULL;
+          }
+          return new ACMG722_1C(codec_id);
+        }
+#endif
+      }
+    }
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
+    // For CN we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kCNNB;
+        break;
+      }
+      case 16000: {
+        codec_id = kCNWB;
+        break;
+      }
+      case 32000: {
+        codec_id = kCNSWB;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMCNG(codec_id);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G729")) {
+#ifdef WEBRTC_CODEC_G729
+    return new ACMG729(kG729);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G7291")) {
+#ifdef WEBRTC_CODEC_G729_1
+    return new ACMG729_1(kG729_1);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "speex")) {
+#ifdef WEBRTC_CODEC_SPEEX
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kSPEEX8;
+        break;
+      }
+      case 16000: {
+        codec_id = kSPEEX16;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMSPEEX(codec_id);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
+    // For CN we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kCNNB;
+        break;
+      }
+      case 16000: {
+        codec_id = kCNWB;
+        break;
+      }
+      case 32000: {
+        codec_id = kCNSWB;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMCNG(codec_id);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "L16")) {
+#ifdef WEBRTC_CODEC_PCM16
+    // For L16 we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kPCM16B;
+        break;
+      }
+      case 16000: {
+        codec_id =kPCM16Bwb;
+        break;
+      }
+      case 32000: {
+        codec_id = kPCM16Bswb32kHz;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMPCM16B(codec_id);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "telephone-event")) {
+#ifdef WEBRTC_CODEC_AVT
+    return new ACMDTMFPlayout(kAVT);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "red")) {
+#ifdef WEBRTC_CODEC_RED
+    return new ACMRED(kRED);
+#endif
+  }
+  return NULL;
+}
+
+// Checks if the bitrate is valid for the codec.
+bool ACMCodecDB::IsRateValid(int codec_id, int rate) {
+  if (database_[codec_id].rate == rate) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for iSAC.
+bool ACMCodecDB::IsISACRateValid(int rate) {
+  if ((rate == -1) || ((rate <= 56000) && (rate >= 10000))) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for iLBC.
+bool ACMCodecDB::IsILBCRateValid(int rate, int frame_size_samples) {
+  if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
+      (rate == 13300)) {
+    return true;
+  } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
+      (rate == 15200)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Check if the bitrate is valid for the GSM-AMR.
+bool ACMCodecDB::IsAMRRateValid(int rate) {
+  switch (rate) {
+    case 4750:
+    case 5150:
+    case 5900:
+    case 6700:
+    case 7400:
+    case 7950:
+    case 10200:
+    case 12200: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Check if the bitrate is valid for GSM-AMR-WB.
+bool ACMCodecDB::IsAMRwbRateValid(int rate) {
+  switch (rate) {
+    case 7000:
+    case 9000:
+    case 12000:
+    case 14000:
+    case 16000:
+    case 18000:
+    case 20000:
+    case 23000:
+    case 24000: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Check if the bitrate is valid for G.729.1.
+bool ACMCodecDB::IsG7291RateValid(int rate) {
+  switch (rate) {
+    case 8000:
+    case 12000:
+    case 14000:
+    case 16000:
+    case 18000:
+    case 20000:
+    case 22000:
+    case 24000:
+    case 26000:
+    case 28000:
+    case 30000:
+    case 32000: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Checks if the bitrate is valid for Speex.
+bool ACMCodecDB::IsSpeexRateValid(int rate) {
+  if (rate > 2000) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for Celt.
+bool ACMCodecDB::IsCeltRateValid(int rate) {
+  if ((rate >= 48000) && (rate <= 128000)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the payload type is in the valid range.
+bool ACMCodecDB::ValidPayloadType(int payload_type) {
+  if ((payload_type < 0) || (payload_type > 127)) {
+    return false;
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_codec_database.h b/trunk/src/modules/audio_coding/main/source/acm_codec_database.h
new file mode 100644
index 0000000..6830e65
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_codec_database.h
@@ -0,0 +1,308 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
+
+#include "acm_generic_codec.h"
+#include "common_types.h"
+#include "webrtc_neteq.h"
+
+namespace webrtc {
+
+// TODO(tlegrand): replace class ACMCodecDB with a namespace.
+class ACMCodecDB {
+ public:
+  // Enum with array indexes for the supported codecs. NOTE! The order MUST
+  // be the same as when creating the database in acm_codec_database.cc.
+  enum {
+    kNone = -1
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    , kISAC
+# if (defined(WEBRTC_CODEC_ISAC))
+    , kISACSWB
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    , kPCM16B
+    , kPCM16Bwb
+    , kPCM16Bswb32kHz
+#endif
+    , kPCMU
+    , kPCMA
+#ifdef WEBRTC_CODEC_ILBC
+    , kILBC
+#endif
+#ifdef WEBRTC_CODEC_AMR
+    , kGSMAMR
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+    , kGSMAMRWB
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    , kCELT32
+#endif
+#ifdef WEBRTC_CODEC_G722
+    , kG722
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+    , kG722_1_32
+    , kG722_1_24
+    , kG722_1_16
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+    , kG722_1C_48
+    , kG722_1C_32
+    , kG722_1C_24
+#endif
+#ifdef WEBRTC_CODEC_G729
+    , kG729
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    , kG729_1
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+    , kGSMFR
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    , kSPEEX8
+    , kSPEEX16
+#endif
+    , kCNNB
+    , kCNWB
+    , kCNSWB
+#ifdef WEBRTC_CODEC_AVT
+    , kAVT
+#endif
+#ifdef WEBRTC_CODEC_RED
+    , kRED
+#endif
+    , kNumCodecs
+  };
+
+  // Set unsupported codecs to -1
+#ifndef WEBRTC_CODEC_ISAC
+  enum {kISACSWB = -1};
+# ifndef WEBRTC_CODEC_ISACFX
+  enum {kISAC = -1};
+# endif
+#endif
+#ifndef WEBRTC_CODEC_PCM16
+  enum {kPCM16B = -1};
+  enum {kPCM16Bwb = -1};
+  enum {kPCM16Bswb32kHz = -1};
+#endif
+  // 48 kHz not supported, always set to -1.
+  enum {kPCM16Bswb48kHz = -1};
+#ifndef WEBRTC_CODEC_ILBC
+  enum {kILBC = -1};
+#endif
+#ifndef WEBRTC_CODEC_AMR
+  enum {kGSMAMR = -1};
+#endif
+#ifndef WEBRTC_CODEC_AMRWB
+  enum {kGSMAMRWB = -1};
+#endif
+#ifndef WEBRTC_CODEC_CELT
+  enum {kCELT32 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722
+  enum {kG722 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722_1
+  enum {kG722_1_32 = -1};
+  enum {kG722_1_24 = -1};
+  enum {kG722_1_16 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722_1C
+  enum {kG722_1C_48 = -1};
+  enum {kG722_1C_32 = -1};
+  enum {kG722_1C_24 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G729
+  enum {kG729 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G729_1
+  enum {kG729_1 = -1};
+#endif
+#ifndef WEBRTC_CODEC_GSMFR
+  enum {kGSMFR = -1};
+#endif
+#ifndef WEBRTC_CODEC_SPEEX
+  enum {kSPEEX8 = -1};
+  enum {kSPEEX16 = -1};
+#endif
+#ifndef WEBRTC_CODEC_AVT
+  enum {kAVT = -1};
+#endif
+#ifndef WEBRTC_CODEC_RED
+  enum {kRED = -1};
+#endif
+
+  // kMaxNumCodecs - Maximum number of codecs that can be activated in one
+  //                 build.
+  // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
+  // These might need to be increased if adding a new codec to the database
+  static const int kMaxNumCodecs =  50;
+  static const int kMaxNumPacketSize = 6;
+
+  // Codec specific settings
+  //
+  // num_packet_sizes     - number of allowed packet sizes.
+  // packet_sizes_samples - list of the allowed packet sizes.
+  // basic_block_samples  - assigned a value different from 0 if the codec
+  //                        requires to be fed with a specific number of samples
+  //                        that can be different from packet size.
+  // channel_support      - number of channels supported to encode;
+  //                        1 = mono, 2 = stereo, etc.
+  struct CodecSettings {
+    int num_packet_sizes;
+    int packet_sizes_samples[kMaxNumPacketSize];
+    int basic_block_samples;
+    int channel_support;
+  };
+
+  // Gets codec information from database at the position in database given by
+  // [codec_id].
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Output:
+  //   [codec_inst] - filled with information about the codec.
+  // Return:
+  //   0 if successful, otherwise -1.
+  static int Codec(int codec_id, CodecInst* codec_inst);
+
+  // Returns codec id and mirror id from database, given the information
+  // received in the input [codec_inst]. Mirror id is a number that tells
+  // where to find the codec's memory (instance). The number is either the
+  // same as codec id (most common), or a number pointing at a different
+  // entry in the database, if the codec has several entries with different
+  // payload types. This is used for codecs that must share one struct even if
+  // the payload type differs.
+  // One example is the codec iSAC which has the same struct for both 16 and
+  // 32 khz, but they have different entries in the database. Let's say the
+  // function is called with iSAC 32kHz. The function will return 1 as that is
+  // the entry in the data base, and [mirror_id] = 0, as that is the entry for
+  // iSAC 16 kHz, which holds the shared memory.
+  // Input:
+  //   [codec_inst] - Information about the codec for which we require the
+  //                  database id.
+  // Output:
+  //   [mirror_id] - mirror id, which most often is the same as the return
+  //                 value, see above.
+  //   [err_message] - if present, in the event of a mismatch found between the
+  //                   input and the database, a descriptive error message is
+  //                   written here.
+  //   [err_message] - if present, the length of error message is returned here.
+  // Return:
+  //   codec id if successful, otherwise < 0.
+  static int CodecNumber(const CodecInst* codec_inst, int* mirror_id,
+                         char* err_message, int max_message_len_byte);
+  static int CodecNumber(const CodecInst* codec_inst, int* mirror_id);
+  static int ReceiverCodecNumber(const CodecInst* codec_inst, int* mirror_id);
+
+  // Returns the codec sampling frequency for codec with id = "codec_id" in
+  // database.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Return:
+  //   codec sampling frequency if successful, otherwise -1.
+  static int CodecFreq(int codec_id);
+
+  // Return the codec's basic coding block size in samples.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Return:
+  //   codec basic block size if successful, otherwise -1.
+  static int BasicCodingBlock(int codec_id);
+
+  // Returns the NetEQ decoder database.
+  static const WebRtcNetEQDecoder* NetEQDecoders();
+
+  // All version numbers for the codecs in the database are listed in text.
+  // Input/Output:
+  //   [version] - pointer to a char vector with minimum size 1000 bytes.
+  //               Audio coding module's and all component's versions is
+  //               written here.
+  //   [remaining_buffer_bytes] - remaining space in buffer.
+  //   [position] - current position to write at in buffer.
+  // Return:
+  //   -1 if version information doesn't fit, 0 on success.
+  static int CodecsVersion(char* version, size_t* remaining_buffer_bytes,
+                           size_t* position);
+
+  // Returns mirror id, which is a number that tells where to find the codec's
+  // memory (instance). It is either the same as codec id (most common), or a
+  // number pointing at a different entry in the database, if the codec have
+  // several entries with different payload types. This is used for codecs that
+  // must share struct even if the payload type differs.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies codec's position in the database.
+  // Return:
+  //   Mirror id on success, otherwise -1.
+  static int MirrorID(int codec_id);
+
+  // Create memory/instance for storing codec state.
+  // Input:
+  //   [codec_inst] - information about codec. Only name of codec, "plname", is
+  //                  used in this function.
+  static ACMGenericCodec* CreateCodecInstance(const CodecInst* codec_inst);
+
+  // Checks if the bitrate is valid for the codec.
+  // Input:
+  //   [codec_id] - number that specifies codec's position in the database.
+  //   [rate] - bitrate to check.
+  //   [frame_size_samples] - (used for iLBC) specifies which frame size to go
+  //                          with the rate.
+  static bool IsRateValid(int codec_id, int rate);
+  static bool IsISACRateValid(int rate);
+  static bool IsILBCRateValid(int rate, int frame_size_samples);
+  static bool IsAMRRateValid(int rate);
+  static bool IsAMRwbRateValid(int rate);
+  static bool IsG7291RateValid(int rate);
+  static bool IsSpeexRateValid(int rate);
+  static bool IsCeltRateValid(int rate);
+
+  // Check if the payload type is valid, meaning that it is in the valid range
+  // of 0 to 127.
+  // Input:
+  //   [payload_type] - payload type.
+  static bool ValidPayloadType(int payload_type);
+
+  // Databases with information about the supported codecs
+  // database_ - stored information about all codecs: payload type, name,
+  //             sampling frequency, packet size in samples, default channel
+  //             support, and default rate.
+  // codec_settings_ - stored codec settings: number of allowed packet sizes,
+  //                   a vector with the allowed packet sizes, basic block
+  //                   samples, and max number of channels that are supported.
+  // neteq_decoders_ - list of supported decoders in NetEQ.
+  static const CodecInst database_[kMaxNumCodecs];
+  static const CodecSettings codec_settings_[kMaxNumCodecs];
+  static const WebRtcNetEQDecoder neteq_decoders_[kMaxNumCodecs];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_common_defs.h b/trunk/src/modules/audio_coding/main/source/acm_common_defs.h
new file mode 100644
index 0000000..fd8dbd6
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_common_defs.h
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
+
+#include <string.h>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "typedefs.h"
+
+// Checks for enabled codecs, we prevent enabling codecs which are not
+// compatible.
+#if ((defined WEBRTC_CODEC_ISAC) && (defined WEBRTC_CODEC_ISACFX))
+#error iSAC and iSACFX codecs cannot be enabled at the same time
+#endif
+
+#ifdef WIN32
+// OS-dependent case-insensitive string comparison
+#define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+// OS-dependent case-insensitive string comparison
+#define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+
+// 60 ms is the maximum block size we support. An extra 20 ms is considered
+// for safety if process() method is not called when it should be, i.e. we
+// accept 20 ms of jitter. 80 ms @ 32 kHz (super wide-band) is 2560 samples.
+#define AUDIO_BUFFER_SIZE_W16  2560
+
+// There is one timestamp per each 10 ms of audio
+// the audio buffer, at max, may contain 32 blocks of 10ms
+// audio if the sampling frequency is 8000 Hz (80 samples per block).
+// Therefore, The size of the buffer where we keep timestamps
+// is defined as follows
+#define TIMESTAMP_BUFFER_SIZE_W32  (AUDIO_BUFFER_SIZE_W16/80)
+
+// The maximum size of a payload, that is 60 ms of PCM-16 @ 32 kHz stereo
+#define MAX_PAYLOAD_SIZE_BYTE   7680
+
+// General codec specific defines
+const int kIsacWbDefaultRate = 32000;
+const int kIsacSwbDefaultRate = 56000;
+const int kIsacPacSize480 = 480;
+const int kIsacPacSize960 = 960;
+
+// An encoded bit-stream is labeled by one of the following enumerators.
+//
+//   kNoEncoding              : There has been no encoding.
+//   kActiveNormalEncoded     : Active audio frame coded by the codec.
+//   kPassiveNormalEncoded    : Passive audio frame coded by the codec.
+//   kPassiveDTXNB            : Passive audio frame coded by narrow-band CN.
+//   kPassiveDTXWB            : Passive audio frame coded by wide-band CN.
+//   kPassiveDTXSWB           : Passive audio frame coded by super-wide-band CN.
+//
+enum WebRtcACMEncodingType {
+  kNoEncoding,
+  kActiveNormalEncoded,
+  kPassiveNormalEncoded,
+  kPassiveDTXNB,
+  kPassiveDTXWB,
+  kPassiveDTXSWB
+};
+
+// A structure which contains codec parameters. For instance, used when
+// initializing encoder and decoder.
+//
+//   codecInstant            : c.f. common_types.h
+//   enableDTX               : set true to enable DTX. If codec does not have
+//                             internal DTX, this will enable VAD.
+//   enableVAD               : set true to enable VAD.
+//   vadMode                 : VAD mode, c.f. audio_coding_module_typedefs.h
+//                             for possible values.
+struct WebRtcACMCodecParams {
+  CodecInst codecInstant;
+  bool enableDTX;
+  bool enableVAD;
+  ACMVADMode vadMode;
+};
+
+// A structure that encapsulates audio buffer and related parameters
+// used for synchronization of audio of two ACMs.
+//
+//   inAudio                 : same as ACMGenericCodec::_inAudio
+//   inAudioIxRead           : same as ACMGenericCodec::_inAudioIxRead
+//   inAudioIxWrite          : same as ACMGenericCodec::_inAudioIxWrite
+//   inTimestamp             : same as ACMGenericCodec::_inTimestamp
+//   inTimestampIxWrite      : same as ACMGenericCodec::_inTImestampIxWrite
+//   lastTimestamp           : same as ACMGenericCodec::_lastTimestamp
+//   lastInTimestamp         : same as AudioCodingModuleImpl::_lastInTimestamp
+//
+struct WebRtcACMAudioBuff {
+  WebRtc_Word16 inAudio[AUDIO_BUFFER_SIZE_W16];
+  WebRtc_Word16 inAudioIxRead;
+  WebRtc_Word16 inAudioIxWrite;
+  WebRtc_UWord32 inTimestamp[TIMESTAMP_BUFFER_SIZE_W32];
+  WebRtc_Word16 inTimestampIxWrite;
+  WebRtc_UWord32 lastTimestamp;
+  WebRtc_UWord32 lastInTimestamp;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.cc b/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.cc
new file mode 100644
index 0000000..7669b07
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.cc
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_dtmf_detection.h"
+#include "audio_coding_module_typedefs.h"
+
+namespace webrtc {
+
+ACMDTMFDetection::ACMDTMFDetection()
+    : _init(0) {}
+
+ACMDTMFDetection::~ACMDTMFDetection() {}
+
+WebRtc_Word16 ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFDetection::Disable() {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFDetection::Detect(
+    const WebRtc_Word16* /* inAudioBuff */,
+    const WebRtc_UWord16 /* inBuffLenWord16 */,
+    const WebRtc_Word32 /* inFreqHz */,
+    bool& /* toneDetected */,
+    WebRtc_Word16& /* tone  */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFDetection::GetVersion(
+    WebRtc_Word8* /* version */,
+    WebRtc_UWord32& /* remainingBufferInBytes */,
+    WebRtc_UWord32& /* position */) {
+  return -1;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.h b/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.h
new file mode 100644
index 0000000..16aba84
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_dtmf_detection.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
+
+#include "acm_resampler.h"
+#include "audio_coding_module_typedefs.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class ACMDTMFDetection {
+ public:
+  ACMDTMFDetection();
+  ~ACMDTMFDetection();
+  WebRtc_Word16 Enable(ACMCountries cpt = ACMDisableCountryDetection);
+  WebRtc_Word16 Disable();
+  WebRtc_Word16 Detect(const WebRtc_Word16* inAudioBuff,
+                       const WebRtc_UWord16 inBuffLenWord16,
+                       const WebRtc_Word32 inFreqHz,
+                       bool& toneDetected,
+                       WebRtc_Word16& tone);
+
+  static WebRtc_Word16 GetVersion(WebRtc_Word8* version,
+                                  WebRtc_UWord32& remainingBufferInBytes,
+                                  WebRtc_UWord32& position);
+
+ private:
+  ACMResampler _resampler;
+  bool _init;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.cc b/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.cc
new file mode 100644
index 0000000..e748f8a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.cc
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_dtmf_playout.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AVT
+
+ACMDTMFPlayout::ACMDTMFPlayout(
+    WebRtc_Word16 /* codecID */) {
+  return;
+}
+
+ACMDTMFPlayout::~ACMDTMFPlayout() {
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                         WebRtc_Word16 /* bitStreamLenByte */,
+                                         WebRtc_Word16* /* audio */,
+                                         WebRtc_Word16* /* audioSamples */,
+                                         WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                       const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+void ACMDTMFPlayout::DestructEncoderSafe() {
+  return;
+}
+
+void ACMDTMFPlayout::DestructDecoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::UnregisterFromNetEqSafe(
+    ACMNetEQ* /* netEq */,
+    WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+ACMDTMFPlayout::ACMDTMFPlayout(WebRtc_Word16 codecID) {
+  _codecID = codecID;
+}
+
+ACMDTMFPlayout::~ACMDTMFPlayout() {
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                         WebRtc_Word16 /* bitStreamLenByte */,
+                                         WebRtc_Word16* /* audio */,
+                                         WebRtc_Word16* /* audioSamples */,
+                                         WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization,
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization,
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                       const CodecInst& codecInst) {
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AVT_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAVT, codecInst.pltype, NULL, 8000);
+  SET_AVT_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptrInst */) {
+  // DTMFPlayout has no instance
+  return;
+}
+
+void ACMDTMFPlayout::DestructEncoderSafe() {
+  // DTMFPlayout has no instance
+  return;
+}
+
+void ACMDTMFPlayout::DestructDecoderSafe() {
+  // DTMFPlayout has no instance
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::UnregisterFromNetEqSafe(
+    ACMNetEQ* netEq,
+    WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError,
+                 webrtc::kTraceAudioCoding,
+                 _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not "
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderAVT);
+}
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.h b/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.h
new file mode 100644
index 0000000..a75d691
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_dtmf_playout.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc {
+
+class ACMDTMFPlayout: public ACMGenericCodec {
+ public:
+  ACMDTMFPlayout(WebRtc_Word16 codecID);
+  ~ACMDTMFPlayout();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  void InternalDestructEncoderInst(void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g722.cc b/trunk/src/modules/audio_coding/main/source/acm_g722.cc
new file mode 100644
index 0000000..40e431c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g722.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g722.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+#include "g722_interface.h"
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722
+
+ACMG722::ACMG722(WebRtc_Word16 /* codecID */)
+    : _ptrEncStr(NULL),
+      _ptrDecStr(NULL),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+ACMG722::~ACMG722() {
+  return;
+}
+
+WebRtc_Word32 ACMG722::Add10MsDataSafe(const WebRtc_UWord32 /* timestamp */,
+                                       const WebRtc_Word16* /* data */,
+                                       const WebRtc_UWord16 /* lengthSmpl */,
+                                       const WebRtc_UWord8 /* audioChannel */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                      WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMG722::UnregisterFromNetEqSafe(
+    ACMNetEQ* /* netEq */,
+    WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+// Encoder and decoder memory
+struct ACMG722EncStr {
+  G722EncInst* inst; // instance for left channel in case of stereo
+  G722EncInst* instRight; // instance for right channel in case of stereo
+};
+struct ACMG722DecStr {
+  G722DecInst* inst; // instance for left channel in case of stereo
+  G722DecInst* instRight; // instance for right channel in case of stereo
+};
+
+ACMG722::ACMG722(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL) {
+  // Encoder
+  _ptrEncStr = new ACMG722EncStr;
+  if (_ptrEncStr != NULL) {
+    _ptrEncStr->inst = NULL;
+    _ptrEncStr->instRight = NULL;
+  }
+  // Decoder
+  _ptrDecStr = new ACMG722DecStr;
+  if (_ptrDecStr != NULL) {
+    _ptrDecStr->inst = NULL;
+    _ptrDecStr->instRight = NULL; // Not used
+  }
+  _codecID = codecID;
+  return;
+}
+
+ACMG722::~ACMG722() {
+  // Encoder
+  if (_ptrEncStr != NULL) {
+    if (_ptrEncStr->inst != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->inst);
+      _ptrEncStr->inst = NULL;
+    }
+    if (_ptrEncStr->instRight != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->instRight);
+      _ptrEncStr->instRight = NULL;
+    }
+    delete _ptrEncStr;
+    _ptrEncStr = NULL;
+  }
+  // Decoder
+  if (_ptrDecStr != NULL) {
+    if (_ptrDecStr->inst != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->inst);
+      _ptrDecStr->inst = NULL;
+    }
+    if (_ptrDecStr->instRight != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->instRight);
+      _ptrDecStr->instRight = NULL;
+    }
+    delete _ptrDecStr;
+    _ptrDecStr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word32 ACMG722::Add10MsDataSafe(const WebRtc_UWord32 timestamp,
+                                       const WebRtc_Word16* data,
+                                       const WebRtc_UWord16 lengthSmpl,
+                                       const WebRtc_UWord8 audioChannel) {
+  return ACMGenericCodec::Add10MsDataSafe((timestamp >> 1), data, lengthSmpl,
+                                          audioChannel);
+}
+
+WebRtc_Word16 ACMG722::InternalEncode(WebRtc_UWord8* bitStream,
+                                      WebRtc_Word16* bitStreamLenByte) {
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    WebRtc_Word16 leftChannel[960];
+    WebRtc_Word16 rightChannel[960];
+    WebRtc_UWord8 outLeft[480];
+    WebRtc_UWord8 outRight[480];
+    WebRtc_Word16 lenInBytes;
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+    lenInBytes = WebRtcG722_Encode(_encoderInstPtr, leftChannel, _frameLenSmpl,
+                                   (WebRtc_Word16*) outLeft);
+    lenInBytes += WebRtcG722_Encode(_encoderInstPtrRight, rightChannel,
+                                    _frameLenSmpl, (WebRtc_Word16*) outRight);
+    *bitStreamLenByte = lenInBytes;
+
+    // Interleave the 4 bits per sample from left and right channel
+    for (int i = 0, j = 0; i < lenInBytes; i += 2, j++) {
+      bitStream[i] = (outRight[j] & 0xF0) + (outLeft[j] >> 4);
+      bitStream[i + 1] = ((outRight[j] & 0x0F) << 4) + (outLeft[j] & 0x0F);
+    }
+  } else {
+    *bitStreamLenByte = WebRtcG722_Encode(_encoderInstPtr,
+                                          &_inAudio[_inAudioIxRead],
+                                          _frameLenSmpl,
+                                          (WebRtc_Word16*) bitStream);
+  }
+
+  // increment the read index this tell the caller how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += _frameLenSmpl * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  if (codecParams->codecInstant.channels == 2) {
+    // Create codec struct for right channel
+    if (_ptrEncStr->instRight == NULL) {
+      WebRtcG722_CreateEncoder(&_ptrEncStr->instRight);
+      if (_ptrEncStr->instRight == NULL) {
+        return -1;
+      }
+    }
+    _encoderInstPtrRight = _ptrEncStr->instRight;
+    if (WebRtcG722_EncoderInit(_encoderInstPtrRight) < 0) {
+      return -1;
+    }
+  }
+
+  return WebRtcG722_EncoderInit(_encoderInstPtr);
+}
+
+WebRtc_Word16 ACMG722::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return WebRtcG722_DecoderInit(_decoderInstPtr);
+}
+
+WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // TODO: log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderG722, codecInst.pltype, _decoderInstPtr,
+                16000);
+  SET_G722_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMG722::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+  if (_ptrEncStr == NULL) {
+    // this structure must be created at the costructor
+    // if it is still NULL then there is a probelm and
+    // we dont continue
+    return -1;
+  }
+  WebRtcG722_CreateEncoder(&_ptrEncStr->inst);
+  if (_ptrEncStr->inst == NULL) {
+    return -1;
+  }
+  _encoderInstPtr = _ptrEncStr->inst;
+  return 0;
+}
+
+void ACMG722::DestructEncoderSafe() {
+  if (_ptrEncStr != NULL) {
+    if (_ptrEncStr->inst != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->inst);
+      _ptrEncStr->inst = NULL;
+    }
+  }
+  _encoderExist = false;
+  _encoderInitialized = false;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+  if (_ptrDecStr == NULL) {
+    // this structure must be created at the costructor
+    // if it is still NULL then there is a probelm and
+    // we dont continue
+    return -1;
+  }
+
+  WebRtcG722_CreateDecoder(&_ptrDecStr->inst);
+  if (_ptrDecStr->inst == NULL) {
+    return -1;
+  }
+  _decoderInstPtr = _ptrDecStr->inst;
+  return 0;
+}
+
+void ACMG722::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_ptrDecStr != NULL) {
+    if (_ptrDecStr->inst != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->inst);
+      _ptrDecStr->inst = NULL;
+    }
+  }
+}
+
+void ACMG722::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcG722_FreeEncoder(static_cast<G722EncInst*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                               WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError,
+                 webrtc::kTraceAudioCoding,
+                 _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not "
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  return netEq->RemoveCodec(kDecoderG722);
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g722.h b/trunk/src/modules/audio_coding/main/source/acm_g722.h
new file mode 100644
index 0000000..776d188
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g722.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
+
+#include "acm_generic_codec.h"
+
+typedef struct WebRtcG722EncInst G722EncInst;
+typedef struct WebRtcG722DecInst G722DecInst;
+
+namespace webrtc {
+
+// forward declaration
+struct ACMG722EncStr;
+struct ACMG722DecStr;
+
+class ACMG722: public ACMGenericCodec {
+ public:
+  ACMG722(WebRtc_Word16 codecID);
+  ~ACMG722();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  WebRtc_Word32 Add10MsDataSafe(const WebRtc_UWord32 timestamp,
+                                const WebRtc_Word16* data,
+                                const WebRtc_UWord16 lengthSmpl,
+                                const WebRtc_UWord8 audioChannel);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  ACMG722EncStr* _ptrEncStr;
+  ACMG722DecStr* _ptrDecStr;
+
+  G722EncInst* _encoderInstPtr;
+  G722EncInst* _encoderInstPtrRight; // Prepared for stereo
+  G722DecInst* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7221.cc b/trunk/src/modules/audio_coding/main/source/acm_g7221.cc
new file mode 100644
index 0000000..b82d931
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7221.cc
@@ -0,0 +1,528 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7221.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G722_1
+// NOTE! G.722.1 is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/g7221/main/interface/g7221_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcG7221_CreateEnc16(G722_1_16_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateEnc24(G722_1_24_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateEnc32(G722_1_32_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateDec16(G722_1_16_decinst_t_** decInst);
+// int16_t WebRtcG7221_CreateDec24(G722_1_24_decinst_t_** decInst);
+// int16_t WebRtcG7221_CreateDec32(G722_1_32_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221_FreeEnc16(G722_1_16_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeEnc24(G722_1_24_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeEnc32(G722_1_32_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeDec16(G722_1_16_decinst_t_** decInst);
+// int16_t WebRtcG7221_FreeDec24(G722_1_24_decinst_t_** decInst);
+// int16_t WebRtcG7221_FreeDec32(G722_1_32_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221_EncoderInit16(G722_1_16_encinst_t_* encInst);
+// int16_t WebRtcG7221_EncoderInit24(G722_1_24_encinst_t_* encInst);
+// int16_t WebRtcG7221_EncoderInit32(G722_1_32_encinst_t_* encInst);
+// int16_t WebRtcG7221_DecoderInit16(G722_1_16_decinst_t_* decInst);
+// int16_t WebRtcG7221_DecoderInit24(G722_1_24_decinst_t_* decInst);
+// int16_t WebRtcG7221_DecoderInit32(G722_1_32_decinst_t_* decInst);
+//
+// int16_t WebRtcG7221_Encode16(G722_1_16_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Encode24(G722_1_24_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Encode32(G722_1_32_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+//
+// int16_t WebRtcG7221_Decode16(G722_1_16_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Decode24(G722_1_24_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Decode32(G722_1_32_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+//
+// int16_t WebRtcG7221_DecodePlc16(G722_1_16_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+// int16_t WebRtcG7221_DecodePlc24(G722_1_24_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+// int16_t WebRtcG7221_DecodePlc32(G722_1_32_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+// void WebRtcG7221_Version(char *versionStr, short len);
+#include "g7221_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722_1
+
+ACMG722_1::ACMG722_1(WebRtc_Word16 /* codecID */)
+    : _operationalRate(-1),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL),
+      _encoderInst16Ptr(NULL),
+      _encoderInst16PtrR(NULL),
+      _encoderInst24Ptr(NULL),
+      _encoderInst24PtrR(NULL),
+      _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL),
+      _decoderInst16Ptr(NULL),
+      _decoderInst24Ptr(NULL),
+      _decoderInst32Ptr(NULL) {
+  return;
+}
+
+ACMG722_1::~ACMG722_1() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                        WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitStream  */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef  */,
+                                  const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722_1::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722_1::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722_1::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722_1::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::UnregisterFromNetEqSafe(
+    ACMNetEQ* /* netEq */,  WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+ACMG722_1::ACMG722_1(
+    WebRtc_Word16 codecID):
+    _encoderInstPtr(NULL),
+    _encoderInstPtrRight(NULL),
+    _decoderInstPtr(NULL),
+    _encoderInst16Ptr(NULL),
+    _encoderInst16PtrR(NULL),
+    _encoderInst24Ptr(NULL),
+    _encoderInst24PtrR(NULL),
+    _encoderInst32Ptr(NULL),
+    _encoderInst32PtrR(NULL),
+    _decoderInst16Ptr(NULL),
+    _decoderInst24Ptr(NULL),
+    _decoderInst32Ptr(NULL) {
+  _codecID = codecID;
+  if (_codecID == ACMCodecDB::kG722_1_16) {
+    _operationalRate = 16000;
+  } else if (_codecID == ACMCodecDB::kG722_1_24) {
+    _operationalRate = 24000;
+  } else if (_codecID == ACMCodecDB::kG722_1_32) {
+    _operationalRate = 32000;
+  } else {
+    _operationalRate = -1;
+  }
+  return;
+}
+
+ACMG722_1::~ACMG722_1() {
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+
+  switch (_operationalRate) {
+    case 16000: {
+      _encoderInst16Ptr = NULL;
+      _encoderInst16PtrR = NULL;
+      _decoderInst16Ptr = NULL;
+      break;
+    }
+    case 24000: {
+      _encoderInst24Ptr = NULL;
+      _encoderInst24PtrR = NULL;
+      _decoderInst24Ptr = NULL;
+      break;
+    }
+    case 32000: {
+      _encoderInst32Ptr = NULL;
+      _encoderInst32PtrR = NULL;
+      _decoderInst32Ptr = NULL;
+      break;
+    }
+    default: {
+      break;
+    }
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalEncode(WebRtc_UWord8* bitStream,
+                                        WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 leftChannel[320];
+  WebRtc_Word16 rightChannel[320];
+  WebRtc_Word16 lenInBytes;
+  WebRtc_Word16 outB[160];
+
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+  } else {
+    memcpy(leftChannel, &_inAudio[_inAudioIxRead], 320);
+  }
+
+  switch (_operationalRate) {
+    case 16000: {
+      Inst lenInBytes = WebRtcG7221_Encode16(_encoderInst16Ptr, leftChannel,
+                                             320, &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode16(_encoderInst16PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 24000: {
+      lenInBytes = WebRtcG7221_Encode24(_encoderInst24Ptr, leftChannel, 320,
+                                        &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode24(_encoderInst24PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 32000: {
+      lenInBytes = WebRtcG7221_Encode32(_encoderInst32Ptr, leftChannel, 320,
+                                        &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode32(_encoderInst32PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitEncode: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  memcpy(bitStream, outB, lenInBytes);
+  *bitStreamLenByte = lenInBytes;
+
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += 320 * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 ret;
+
+  switch (_operationalRate) {
+    case 16000: {
+      ret = WebRtcG7221_EncoderInit16(_encoderInst16PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit16(_encoderInst16Ptr);
+    }
+    case 24000: {
+      ret = WebRtcG7221_EncoderInit24(_encoderInst24PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit24(_encoderInst24Ptr);
+    }
+    case 32000: {
+      ret = WebRtcG7221_EncoderInit32(_encoderInst32PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit32(_encoderInst32Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError,Inst webrtc::kTraceAudioCoding,
+                   _uniqueID, "InternalInitEncoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  switch (_operationalRate) {
+    case 16000: {
+      return WebRtcG7221_DecoderInit16(_decoderInst16Ptr);
+    }
+    case 24000: {
+      return WebRtcG7221_DecoderInit24(_decoderInst24Ptr);
+    }
+    case 32000: {
+      return WebRtcG7221_DecoderInit32(_decoderInst32Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitDecoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                  const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // Todo:
+    // log error
+    return -1;
+  }
+  // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
+  // Get an entry of that array (neteq wrapper will allocate memory)
+  // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
+  // be the index of the entry.
+  // Fill up the given structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+  switch (_operationalRate) {
+    case 16000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_16, codecInst.pltype,
+          _decoderInst16Ptr, 16000);
+      SET_G722_1_16_FUNCTIONS((codecDef));
+      break;
+    }
+    case 24000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_24, codecInst.pltype,
+          _decoderInst24Ptr, 16000);
+      SET_G722_1_24_FUNCTIONS((codecDef));
+      break;
+    }
+    case 32000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_32, codecInst.pltype,
+          _decoderInst32Ptr, 16000);
+      SET_G722_1_32_FUNCTIONS((codecDef));
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "CodecDef: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+ACMGenericCodec* ACMG722_1::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+  if ((_encoderInstPtr == NULL) || (_encoderInstPtrRight == NULL)) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 16000: {
+      WebRtcG7221_CreateEnc16(&_encoderInst16Ptr);
+      WebRtcG7221_CreateEnc16(&_encoderInst16PtrR);
+      break;
+    }
+    case 24000: {
+      WebRtcG7221_CreateEnc24(&_encoderInst24Ptr);
+      WebRtcG7221_CreateEnc24(&_encoderInst24PtrR);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221_CreateEnc32(&_encoderInst32Ptr);
+      WebRtcG7221_CreateEnc32(&_encoderInst32PtrR);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  _encoderInst16Ptr = NULL;
+  _encoderInst24Ptr = NULL;
+  _encoderInst32Ptr = NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+  if (_decoderInstPtr == NULL) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 16000: {
+      WebRtcG7221_CreateDec16(&_decoderInst16Ptr);
+      break;
+    }
+    case 24000: {
+      WebRtcG7221_CreateDec24(&_decoderInst24Ptr);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221_CreateDec32(&_decoderInst32Ptr);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateDecoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+  _decoderInst16Ptr = NULL;
+  _decoderInst24Ptr = NULL;
+  _decoderInst32Ptr = NULL;
+}
+
+void ACMG722_1::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    delete ptrInst;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                                 WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError,
+                 webrtc::kTraceAudioCoding,
+                 _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not "
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 16000: {
+      return netEq->RemoveCodec(kDecoderG722_1_16);
+    }
+    case 24000: {
+      return netEq->RemoveCodec(kDecoderG722_1_24);
+    }
+    case 32000: {
+      return netEq->RemoveCodec(kDecoderG722_1_32);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "UnregisterFromNetEqSafe: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7221.h b/trunk/src/modules/audio_coding/main/source/acm_g7221.h
new file mode 100644
index 0000000..3130afd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7221.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G722_1_16_encinst_t_;
+struct G722_1_16_decinst_t_;
+struct G722_1_24_encinst_t_;
+struct G722_1_24_decinst_t_;
+struct G722_1_32_encinst_t_;
+struct G722_1_32_decinst_t_;
+struct G722_1_Inst_t_;
+
+namespace webrtc {
+
+class ACMG722_1: public ACMGenericCodec {
+ public:
+  ACMG722_1(WebRtc_Word16 codecID);
+  ~ACMG722_1();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                        WebRtc_Word16 payloadType);
+
+  WebRtc_Word32 _operationalRate;
+
+  G722_1_Inst_t_* _encoderInstPtr;
+  G722_1_Inst_t_* _encoderInstPtrRight; //Used in stereo mode
+  G722_1_Inst_t_* _decoderInstPtr;
+
+  // Only one set of these pointer is valid at any instance
+  G722_1_16_encinst_t_* _encoderInst16Ptr;
+  G722_1_16_encinst_t_* _encoderInst16PtrR;
+  G722_1_24_encinst_t_* _encoderInst24Ptr;
+  G722_1_24_encinst_t_* _encoderInst24PtrR;
+  G722_1_32_encinst_t_* _encoderInst32Ptr;
+  G722_1_32_encinst_t_* _encoderInst32PtrR;
+
+  // Only one of these pointer is valid at any instance
+  G722_1_16_decinst_t_* _decoderInst16Ptr;
+  G722_1_24_decinst_t_* _decoderInst24Ptr;
+  G722_1_32_decinst_t_* _decoderInst32Ptr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7221c.cc b/trunk/src/modules/audio_coding/main/source/acm_g7221c.cc
new file mode 100644
index 0000000..920965e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7221c.cc
@@ -0,0 +1,532 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7221c.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+#include "trace.h"
+
+#ifdef WEBRTC_CODEC_G722_1C
+// NOTE! G.722.1C is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/g7221c/main/interface/g7221c_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcG7221C_CreateEnc24(G722_1C_24_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateEnc32(G722_1C_32_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateEnc48(G722_1C_48_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateDec24(G722_1C_24_decinst_t_** decInst);
+// int16_t WebRtcG7221C_CreateDec32(G722_1C_32_decinst_t_** decInst);
+// int16_t WebRtcG7221C_CreateDec48(G722_1C_48_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221C_FreeEnc24(G722_1C_24_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeEnc32(G722_1C_32_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeEnc48(G722_1C_48_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeDec24(G722_1C_24_decinst_t_** decInst);
+// int16_t WebRtcG7221C_FreeDec32(G722_1C_32_decinst_t_** decInst);
+// int16_t WebRtcG7221C_FreeDec48(G722_1C_48_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221C_EncoderInit24(G722_1C_24_encinst_t_* encInst);
+// int16_t WebRtcG7221C_EncoderInit32(G722_1C_32_encinst_t_* encInst);
+// int16_t WebRtcG7221C_EncoderInit48(G722_1C_48_encinst_t_* encInst);
+// int16_t WebRtcG7221C_DecoderInit24(G722_1C_24_decinst_t_* decInst);
+// int16_t WebRtcG7221C_DecoderInit32(G722_1C_32_decinst_t_* decInst);
+// int16_t WebRtcG7221C_DecoderInit48(G722_1C_48_decinst_t_* decInst);
+//
+// int16_t WebRtcG7221C_Encode24(G722_1C_24_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Encode32(G722_1C_32_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Encode48(G722_1C_48_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+//
+// int16_t WebRtcG7221C_Decode24(G722_1C_24_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Decode32(G722_1C_32_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Decode48(G722_1C_48_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+//
+// int16_t WebRtcG7221C_DecodePlc24(G722_1C_24_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+// int16_t WebRtcG7221C_DecodePlc32(G722_1C_32_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+// int16_t WebRtcG7221C_DecodePlc48(G722_1C_48_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+// void WebRtcG7221C_Version(char *versionStr, short len);
+#include "g7221c_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722_1C
+
+ACMG722_1C::ACMG722_1C(WebRtc_Word16 /* codecID */)
+    : _operationalRate(-1),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL),
+      _encoderInst24Ptr(NULL),
+      _encoderInst24PtrR(NULL),
+      _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL),
+      _encoderInst48Ptr(NULL),
+      _encoderInst48PtrR(NULL),
+      _decoderInst24Ptr(NULL),
+      _decoderInst32Ptr(NULL),
+      _decoderInst48Ptr(NULL) {
+  return;
+}
+
+ACMG722_1C::~ACMG722_1C() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16 /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio */,
+    WebRtc_Word16* /* audioSamples */,
+    WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+    const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722_1C::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722_1C::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722_1C::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722_1C::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::UnregisterFromNetEqSafe(
+    ACMNetEQ* /* netEq */,
+    WebRtc_Word16 /* payloadType */) {
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+ACMG722_1C::ACMG722_1C(WebRtc_Word16 codecID) :
+  _encoderInstPtr(NULL), _encoderInstPtrRight(NULL), _decoderInstPtr(NULL),
+      _encoderInst24Ptr(NULL), _encoderInst24PtrR(NULL), _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL), _encoderInst48Ptr(NULL), _encoderInst48PtrR(NULL),
+      _decoderInst24Ptr(NULL), _decoderInst32Ptr(NULL), _decoderInst48Ptr(NULL) {
+  _codecID = codecID;
+  if (_codecID == ACMCodecDB::kG722_1C_24) {
+    _operationalRate = 24000;
+  } else if (_codecID == ACMCodecDB::kG722_1C_32) {
+    _operationalRate = 32000;
+  } else if (_codecID == ACMCodecDB::kG722_1C_48) {
+    _operationalRate = 48000;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Wrong codec id for G722_1c.");
+    _operationalRate = -1;
+  }
+  return;
+}
+
+ACMG722_1C::~ACMG722_1C() {
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+
+  switch (_operationalRate) {
+    case 24000: {
+      _encoderInst24Ptr = NULL;
+      _encoderInst24PtrR = NULL;
+      _decoderInst24Ptr = NULL;
+      break;
+    }
+    case 32000: {
+      _encoderInst32Ptr = NULL;
+      _encoderInst32PtrR = NULL;
+      _decoderInst32Ptr = NULL;
+      break;
+    }
+    case 48000: {
+      _encoderInst48Ptr = NULL;
+      _encoderInst48PtrR = NULL;
+      _decoderInst48Ptr = NULL;
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "Wrong rate for G722_1c.");
+      break;
+    }
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalEncode(WebRtc_UWord8* bitStream,
+                                         WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 leftChannel[640];
+  WebRtc_Word16 rightChannel[640];
+  WebRtc_Word16 lenInBytes;
+  WebRtc_Word16 outB[240];
+
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+  } else {
+    memcpy(leftChannel, &_inAudio[_inAudioIxRead], 640);
+  }
+
+  switch (_operationalRate) {
+    case 24000: {
+      lenInBytes = WebRtcG7221C_Encode24(_encoderInst24Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode24(_encoderInst24PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 32000: {
+      lenInBytes = WebRtcG7221C_Encode32(_encoderInst32Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode32(_encoderInst32PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 48000: {
+      lenInBytes = WebRtcG7221C_Encode48(_encoderInst48Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode48(_encoderInst48PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalEncode: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+
+  memcpy(bitStream, outB, lenInBytes);
+  *bitStreamLenByte = lenInBytes;
+
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += 640 * _noChannels;
+
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16 /* bitStreamLenByte */,
+                                     WebRtc_Word16* /* audio */,
+                                     WebRtc_Word16* /* audioSamples */,
+                                     WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 ret;
+
+  switch (_operationalRate) {
+    case 24000: {
+      ret = WebRtcG7221C_EncoderInit24(_encoderInst24PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit24(_encoderInst24Ptr);
+    }
+    case 32000: {
+      ret = WebRtcG7221C_EncoderInit32(_encoderInst32PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit32(_encoderInst32Ptr);
+    }
+    case 48000: {
+      ret = WebRtcG7221C_EncoderInit48(_encoderInst48PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit48(_encoderInst48Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitEncode: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  switch (_operationalRate) {
+    case 24000: {
+      return WebRtcG7221C_DecoderInit24(_decoderInst24Ptr);
+    }
+    case 32000: {
+      return WebRtcG7221C_DecoderInit32(_decoderInst32Ptr);
+    }
+    case 48000: {
+      return WebRtcG7221C_DecoderInit48(_decoderInst48Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitDecoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                   const CodecInst& codecInst) {
+
+  if (!_decoderInitialized) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "CodeDef: decoder not initialized for G722_1c");
+    return -1;
+  }
+  // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
+  // get an entry of that array (neteq wrapper will allocate memory)
+  // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
+  // be the index of the entry.
+  // Fill up the given structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+  switch (_operationalRate) {
+    case 24000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_24, codecInst.pltype,
+          _decoderInst24Ptr, 32000);
+      SET_G722_1C_24_FUNCTIONS((codecDef));
+      break;
+    }
+    case 32000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_32, codecInst.pltype,
+          _decoderInst32Ptr, 32000);
+      SET_G722_1C_32_FUNCTIONS((codecDef));
+      break;
+    }
+    case 48000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_32, codecInst.pltype,
+          _decoderInst48Ptr, 32000);
+      SET_G722_1C_48_FUNCTIONS((codecDef));
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "CodeDef: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+ACMGenericCodec*
+ACMG722_1C::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+  if ((_encoderInstPtr == NULL) || (_encoderInstPtrRight == NULL)) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 24000: {
+      WebRtcG7221C_CreateEnc24(&_encoderInst24Ptr);
+      WebRtcG7221C_CreateEnc24(&_encoderInst24PtrR);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221C_CreateEnc32(&_encoderInst32Ptr);
+      WebRtcG7221C_CreateEnc32(&_encoderInst32PtrR);
+      break;
+    }
+    case 48000: {
+      WebRtcG7221C_CreateEnc48(&_encoderInst48Ptr);
+      WebRtcG7221C_CreateEnc48(&_encoderInst48PtrR);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1C::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  _encoderInst24Ptr = NULL;
+  _encoderInst32Ptr = NULL;
+  _encoderInst48Ptr = NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+  if (_decoderInstPtr == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateEncoder: cannot create decoder");
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 24000: {
+      WebRtcG7221C_CreateDec24(&_decoderInst24Ptr);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221C_CreateDec32(&_decoderInst32Ptr);
+      break;
+    }
+    case 48000: {
+      WebRtcG7221C_CreateDec48(&_decoderInst48Ptr);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1C::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+  _decoderInst24Ptr = NULL;
+  _decoderInst32Ptr = NULL;
+  _decoderInst48Ptr = NULL;
+}
+
+void ACMG722_1C::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    delete ptrInst;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::UnregisterFromNetEqSafe(ACMNetEQ* netEq,
+                                                  WebRtc_Word16 payloadType) {
+  if (payloadType != _decoderParams.codecInstant.pltype) {
+    WEBRTC_TRACE(webrtc::kTraceError,
+                 webrtc::kTraceAudioCoding,
+                 _uniqueID,
+                 "Cannot unregister codec %s given payload-type %d does not"
+                 "match the stored payload type",
+                 _decoderParams.codecInstant.plname, payloadType,
+                 _decoderParams.codecInstant.pltype);
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 24000: {
+      return netEq->RemoveCodec(kDecoderG722_1C_24);
+    }
+    case 32000: {
+      return netEq->RemoveCodec(kDecoderG722_1C_32);
+    }
+    case 48000: {
+      return netEq->RemoveCodec(kDecoderG722_1C_48);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "Could not remove codec from NetEQ for G722_1c."
+                   "Sampling frequency doesn't match");
+      return -1;
+    }
+  }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7221c.h b/trunk/src/modules/audio_coding/main/source/acm_g7221c.h
new file mode 100644
index 0000000..5129e4c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7221c.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G722_1C_24_encinst_t_;
+struct G722_1C_24_decinst_t_;
+struct G722_1C_32_encinst_t_;
+struct G722_1C_32_decinst_t_;
+struct G722_1C_48_encinst_t_;
+struct G722_1C_48_decinst_t_;
+struct G722_1_Inst_t_;
+
+namespace webrtc {
+
+class ACMG722_1C : public ACMGenericCodec
+{
+public:
+    ACMG722_1C(WebRtc_Word16 codecID);
+    ~ACMG722_1C();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType);
+
+    WebRtc_Word32    _operationalRate;
+
+    G722_1_Inst_t_*  _encoderInstPtr;
+    G722_1_Inst_t_*  _encoderInstPtrRight; //Used in stereo mode
+    G722_1_Inst_t_*  _decoderInstPtr;
+
+    // Only one set of these pointer is valid at any instance
+    G722_1C_24_encinst_t_* _encoderInst24Ptr;
+    G722_1C_24_encinst_t_* _encoderInst24PtrR;
+    G722_1C_32_encinst_t_* _encoderInst32Ptr;
+    G722_1C_32_encinst_t_* _encoderInst32PtrR;
+    G722_1C_48_encinst_t_* _encoderInst48Ptr;
+    G722_1C_48_encinst_t_* _encoderInst48PtrR;
+
+    // Only one of these pointer is valid at any instance
+    G722_1C_24_decinst_t_* _decoderInst24Ptr;
+    G722_1C_32_decinst_t_* _decoderInst32Ptr;
+    G722_1C_48_decinst_t_* _decoderInst48Ptr;
+};
+
+} // namespace webrtc;
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g729.cc b/trunk/src/modules/audio_coding/main/source/acm_g729.cc
new file mode 100644
index 0000000..bca2d72
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g729.cc
@@ -0,0 +1,546 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g729.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G729
+    // NOTE! G.729 is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/g729/main/interface/g729_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcG729_CreateEnc(G729_encinst_t_** inst);
+    // int16_t WebRtcG729_CreateDec(G729_decinst_t_** inst);
+    // int16_t WebRtcG729_FreeEnc(G729_encinst_t_* inst);
+    // int16_t WebRtcG729_FreeDec(G729_decinst_t_* inst);
+    // int16_t WebRtcG729_Encode(G729_encinst_t_* encInst, int16_t* input,
+    //                                       int16_t len, int16_t* output);
+    // int16_t WebRtcG729_EncoderInit(G729_encinst_t_* encInst, int16_t mode);
+    // int16_t WebRtcG729_Decode(G729_decinst_t_* decInst);
+    // int16_t WebRtcG729_DecodeBwe(G729_decinst_t_* decInst, int16_t* input);
+    // int16_t WebRtcG729_DecodePlc(G729_decinst_t_* decInst);
+    // int16_t WebRtcG729_DecoderInit(G729_decinst_t_* decInst);
+    // void WebRtcG729_Version(char *versionStr, short len);
+    #include "g729_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G729
+
+ACMG729::ACMG729(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMG729::~ACMG729()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::EnableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::DisableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMG729::ReplaceInternalDTXSafe(
+    const bool /*replaceInternalDTX*/)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMG729::IsInternalDTXReplacedSafe(
+    bool* /* internalDTXReplaced */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMG729::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMG729::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMG729::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+ACMG729::ACMG729(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    _hasInternalDTX = true;
+    return;
+}
+
+
+ACMG729::~ACMG729()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        // Delete encoder memory
+        WebRtcG729_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        // Delete decoder memory
+        WebRtcG729_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    // Initialize before entering the loop
+    WebRtc_Word16 noEncodedSamples = 0;
+    WebRtc_Word16 tmpLenByte = 0;
+    WebRtc_Word16 vadDecision = 0;
+    *bitStreamLenByte = 0;
+    while(noEncodedSamples < _frameLenSmpl)
+    {
+        // Call G.729 encoder with pointer to encoder memory, input
+        // audio, number of samples and bitsream
+        tmpLenByte = WebRtcG729_Encode(_encoderInstPtr,
+            &_inAudio[_inAudioIxRead], 80,
+            (WebRtc_Word16*)(&(bitStream[*bitStreamLenByte])));
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += 80;
+
+        // sanity check
+        if(tmpLenByte < 0)
+        {
+            // error has happened
+            *bitStreamLenByte = 0;
+            return -1;
+        }
+
+        // increment number of written bytes
+        *bitStreamLenByte += tmpLenByte;
+        switch(tmpLenByte)
+        {
+        case 0:
+            {
+                if(0 == noEncodedSamples)
+                {
+                    // this is the first 10 ms in this packet and there is
+                    // no data generated, perhaps DTX is enabled and the
+                    // codec is not generating any bit-stream for this 10 ms.
+                    // we do not continue encoding this frame.
+                    return 0;
+                }
+                break;
+            }
+        case 2:
+            {
+                // check if G.729 internal DTX is enabled
+                if(_hasInternalDTX && _dtxEnabled)
+                {
+                    vadDecision = 0;
+                    for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+                    {
+                        _vadLabel[n] = vadDecision;
+                    }
+                }
+                // we got a SID and have to send out this packet no matter
+                // how much audio we have encoded
+                return *bitStreamLenByte;
+            }
+        case 10:
+            {
+                vadDecision = 1;
+                // this is a valid length just continue encoding
+                break;
+            }
+        default:
+            {
+                return -1;
+            }
+        }
+
+        // update number of encoded samples
+        noEncodedSamples += 80;
+    }
+
+    // update VAD decision vector
+    if(_hasInternalDTX && !vadDecision && _dtxEnabled)
+    {
+        for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+        {
+            _vadLabel[n] = vadDecision;
+        }
+    }
+
+    // done encoding, return number of encoded bytes
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMG729::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        // DTX already enabled, do nothing
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        // Re-init the G.729 encoder to turn on DTX
+        if(WebRtcG729_EncoderInit(_encoderInstPtr, 1) < 0)
+        {
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        // DTX already dissabled, do nothing
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        // Re-init the G.729 decoder to turn off DTX
+        if(WebRtcG729_EncoderInit(_encoderInstPtr, 0) < 0)
+        {
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMG729::ReplaceInternalDTXSafe(
+    const bool replaceInternalDTX)
+{
+    // This function is used to dissable the G.729 built in DTX and use an
+    // external instead.
+
+    if(replaceInternalDTX == _hasInternalDTX)
+    {
+        // Make sure we keep the DTX/VAD setting if possible
+        bool oldEnableDTX = _dtxEnabled;
+        bool oldEnableVAD = _vadEnabled;
+        ACMVADMode oldMode = _vadMode;
+        if (replaceInternalDTX)
+        {
+            // Disable internal DTX before enabling external DTX
+            DisableDTX();
+        }
+        else
+        {
+            // Disable external DTX before enabling internal
+            ACMGenericCodec::DisableDTX();
+        }
+        _hasInternalDTX = !replaceInternalDTX;
+        WebRtc_Word16 status = SetVADSafe(oldEnableDTX, oldEnableVAD, oldMode);
+        // Check if VAD status has changed from inactive to active, or if error was
+        // reported
+        if (status == 1) {
+            _vadEnabled = true;
+            return status;
+        } else if (status < 0) {
+            _hasInternalDTX = replaceInternalDTX;
+            return -1;
+        }
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMG729::IsInternalDTXReplacedSafe(
+    bool* internalDTXReplaced)
+{
+    // Get status of wether DTX is replaced or not
+    *internalDTXReplaced = !_hasInternalDTX;
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    // This function is not used. G.729 decoder is called from inside NetEQ
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // Init G.729 encoder
+    return WebRtcG729_EncoderInit(_encoderInstPtr,
+        ((codecParams->enableDTX)? 1:0));
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // Init G.729 decoder
+    return WebRtcG729_DecoderInit(_decoderInstPtr);
+}
+
+
+WebRtc_Word32
+ACMG729::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderG729, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_G729_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMG729::CreateInstance(void)
+{
+    // Function not used
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateEncoder()
+{
+    // Create encoder memory
+    return WebRtcG729_CreateEnc(&_encoderInstPtr);
+}
+
+
+void
+ACMG729::DestructEncoderSafe()
+{
+    // Free encoder memory
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG729_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateDecoder()
+{
+    // Create decoder memory
+    return WebRtcG729_CreateDec(&_decoderInstPtr);
+}
+
+
+void
+ACMG729::DestructDecoderSafe()
+{
+    // Free decoder memory
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG729_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMG729::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcG729_FreeEnc((G729_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    // Remove codec from the NetEQ database
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+    return netEq->RemoveCodec(kDecoderG729);
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g729.h b/trunk/src/modules/audio_coding/main/source/acm_g729.h
new file mode 100644
index 0000000..50b648a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g729.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G729_encinst_t_;
+struct G729_decinst_t_;
+
+namespace webrtc {
+
+class ACMG729 : public ACMGenericCodec
+{
+public:
+    ACMG729(WebRtc_Word16 codecID);
+    ~ACMG729();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+    WebRtc_Word32 ReplaceInternalDTXSafe(
+        const bool replaceInternalDTX);
+
+    WebRtc_Word32 IsInternalDTXReplacedSafe(
+        bool* internalDTXReplaced);
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType);
+
+    G729_encinst_t_* _encoderInstPtr;
+    G729_decinst_t_* _decoderInstPtr;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7291.cc b/trunk/src/modules/audio_coding/main/source/acm_g7291.cc
new file mode 100644
index 0000000..d832e9c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7291.cc
@@ -0,0 +1,500 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7291.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G729_1
+    // NOTE! G.729.1 is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/g7291/main/interface/g7291_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcG7291_Create(G729_1_inst_t_** inst);
+    // int16_t WebRtcG7291_Free(G729_1_inst_t_* inst);
+    // int16_t WebRtcG7291_Encode(G729_1_inst_t_* encInst, int16_t* input,
+    //                            int16_t* output, int16_t myRate,
+    //                            int16_t nrFrames);
+    // int16_t WebRtcG7291_EncoderInit(G729_1_inst_t_* encInst, int16_t myRate,
+    //                                 int16_t flag8kHz, int16_t flagG729mode);
+    // int16_t WebRtcG7291_Decode(G729_1_inst_t_* decInst);
+    // int16_t WebRtcG7291_DecodeBwe(G729_1_inst_t_* decInst, int16_t* input);
+    // int16_t WebRtcG7291_DecodePlc(G729_1_inst_t_* decInst);
+    // int16_t WebRtcG7291_DecoderInit(G729_1_inst_t_* decInst);
+    // void WebRtcG7291_Version(char *versionStr, short len);
+    #include "g7291_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G729_1
+
+ACMG729_1::ACMG729_1( WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _myRate(32000),
+      _flag8kHz(0),
+      _flagG729mode(0) {
+  return;
+}
+
+
+ACMG729_1::~ACMG729_1()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMG729_1::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMG729_1::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729_1::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729_1::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMG729_1::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMG729_1::SetBitRateSafe(
+    const WebRtc_Word32 /*rate*/ )
+{
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+struct G729_1_inst_t_;
+
+ACMG729_1::ACMG729_1(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _myRate(32000),  // Default rate.
+      _flag8kHz(0),
+      _flagG729mode(0) {
+  // TODO(tlegrand): We should add codecID as a input variable to the
+  // constructor of ACMGenericCodec.
+  _codecID = codecID;
+  return;
+}
+
+ACMG729_1::~ACMG729_1()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+
+    // Initialize before entering the loop 
+    WebRtc_Word16 noEncodedSamples = 0;
+    *bitStreamLenByte = 0;
+
+  WebRtc_Word16 byteLengthFrame = 0;
+
+    // Derive number of 20ms frames per encoded packet.
+  // [1,2,3] <=> [20,40,60]ms <=> [320,640,960] samples
+    WebRtc_Word16 n20msFrames = (_frameLenSmpl / 320);
+    // Byte length for the frame. +1 is for rate information.
+    byteLengthFrame = _myRate/(8*50) * n20msFrames + (1 - _flagG729mode);
+
+    // The following might be revised if we have G729.1 Annex C (support for DTX);
+    do
+    {
+        *bitStreamLenByte = WebRtcG7291_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+       (WebRtc_Word16*)bitStream, _myRate, n20msFrames);
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+    _inAudioIxRead += 160;
+
+        // sanity check
+        if(*bitStreamLenByte < 0)
+        {
+      // error has happened
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalEncode: Encode error for G729_1");
+            *bitStreamLenByte = 0;
+            return -1;
+        }
+
+    noEncodedSamples += 160;
+    } while(*bitStreamLenByte == 0);
+
+
+    // This criteria will change if we have Annex C.
+    if(*bitStreamLenByte != byteLengthFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: Encode error for G729_1");
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+
+
+    if(noEncodedSamples != _frameLenSmpl)
+    {
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMG729_1::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+  //set the bit rate and initialize
+  _myRate = codecParams->codecInstant.rate;
+    return SetBitRateSafe( (WebRtc_UWord32)_myRate);
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcG7291_DecoderInit(_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalInitDecoder: init decoder failed for G729_1");
+    return -1;
+  }
+  return 0;
+}
+
+
+WebRtc_Word32
+ACMG729_1::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "CodeDef: Decoder uninitialized for G729_1");
+      return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderG729_1, codecInst.pltype,
+        _decoderInstPtr, 16000);
+    SET_G729_1_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMG729_1::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateEncoder()
+{
+    if (WebRtcG7291_Create(&_encoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "InternalCreateEncoder: create encoder failed for G729_1");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMG729_1::DestructEncoderSafe()
+{
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateDecoder()
+{
+   if (WebRtcG7291_Create(&_decoderInstPtr) < 0)
+   {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+    "InternalCreateDecoder: create decoder failed for G729_1");
+     return -1;
+   }
+   return 0;
+}
+
+
+void
+ACMG729_1::DestructDecoderSafe()
+{
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMG729_1::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        //WebRtcG7291_Free((G729_1_inst_t*)ptrInst);
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "Cannot unregister codec: given payload-type does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+    return netEq->RemoveCodec(kDecoderG729_1);
+}
+
+WebRtc_Word16
+ACMG729_1::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    //allowed rates: { 8000, 12000, 14000, 16000, 18000, 20000,
+    //                22000, 24000, 26000, 28000, 30000, 32000};
+    // TODO(tlegrand): This check exists in one other place two. Should be
+    // possible to reuse code.
+    switch(rate)
+    {
+    case 8000:
+        {
+            _myRate = 8000;
+            break;
+        }
+  case 12000:
+        {
+            _myRate = 12000;
+            break;
+        }
+  case 14000:
+        {
+            _myRate = 14000;
+            break;
+        }
+  case 16000:
+        {
+            _myRate = 16000;
+            break;
+        }
+  case 18000:
+        {
+            _myRate = 18000;
+            break;
+        }
+  case 20000:
+        {
+            _myRate = 20000;
+            break;
+        }
+  case 22000:
+        {
+            _myRate = 22000;
+            break;
+        }
+  case 24000:
+        {
+            _myRate = 24000;
+            break;
+        }
+  case 26000:
+        {
+            _myRate = 26000;
+            break;
+        }
+  case 28000:
+        {
+            _myRate = 28000;
+            break;
+        }
+  case 30000:
+        {
+            _myRate = 30000;
+            break;
+        }
+  case 32000:
+        {
+            _myRate = 32000;
+            break;
+        }
+    default:
+        {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "SetBitRateSafe: Invalid rate G729_1");
+            return -1;
+        }
+    }
+
+    // Re-init with new rate
+    if (WebRtcG7291_EncoderInit(_encoderInstPtr, _myRate, _flag8kHz, _flagG729mode) >= 0)
+    {
+        _encoderParams.codecInstant.rate = _myRate;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_g7291.h b/trunk/src/modules/audio_coding/main/source/acm_g7291.h
new file mode 100644
index 0000000..cb27dc9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_g7291.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G729_1_inst_t_;
+struct G729_1_inst_t_;
+
+namespace webrtc {
+
+class ACMG729_1: public ACMGenericCodec
+{
+public:
+    ACMG729_1(WebRtc_Word16 codecID);
+    ~ACMG729_1();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst& codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    //WebRtc_Word16 EnableDTX();
+    //
+    //WebRtc_Word16 DisableDTX();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ* netEq,
+        WebRtc_Word16   payloadType);
+
+  WebRtc_Word16 SetBitRateSafe(
+    const WebRtc_Word32 rate);
+
+    G729_1_inst_t_* _encoderInstPtr;
+    G729_1_inst_t_* _decoderInstPtr;
+
+    WebRtc_UWord16     _myRate;
+  WebRtc_Word16     _flag8kHz;
+    WebRtc_Word16     _flagG729mode;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_generic_codec.cc b/trunk/src/modules/audio_coding/main/source/acm_generic_codec.cc
new file mode 100644
index 0000000..0e212fb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_generic_codec.cc
@@ -0,0 +1,1554 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <string.h>
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_generic_codec.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_vad.h"
+#include "webrtc_cng.h"
+
+namespace webrtc
+{
+
+// Enum for CNG
+enum
+{
+    kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
+    kNewCNGNumPLCParams = 8
+};
+
+#define ACM_SID_INTERVAL_MSEC 100
+
+// We set some of the variables to invalid values as a check point
+// if a proper initialization has happened. Another approach is
+// to initialize to a default codec that we are sure is always included.
+ACMGenericCodec::ACMGenericCodec()
+    : _inAudioIxWrite(0),
+      _inAudioIxRead(0),
+      _inTimestampIxWrite(0),
+      _inAudio(NULL),
+      _inTimestamp(NULL),
+      _frameLenSmpl(-1),  // invalid value
+      _noChannels(1),
+      _codecID(-1),  // invalid value
+      _noMissedSamples(0),
+      _encoderExist(false),
+      _decoderExist(false),
+      _encoderInitialized(false),
+      _decoderInitialized(false),
+      _registeredInNetEq(false),
+      _hasInternalDTX(false),
+      _ptrVADInst(NULL),
+      _vadEnabled(false),
+      _vadMode(VADNormal),
+      _dtxEnabled(false),
+      _ptrDTXInst(NULL),
+      _numLPCParams(kNewCNGNumPLCParams),
+      _sentCNPrevious(false),
+      _isMaster(true),
+      _netEqDecodeLock(NULL),
+      _codecWrapperLock(*RWLockWrapper::CreateRWLock()),
+      _lastEncodedTimestamp(0),
+      _lastTimestamp(0xD87F3F9F),
+      _isAudioBuffFresh(true),
+      _uniqueID(0) {
+  // Initialize VAD vector.
+  for (int i = 0; i < MAX_FRAME_SIZE_10MSEC; i++) {
+    _vadLabel[i] = 0;
+  }
+
+  // Nullify memory for encoder and decoder, and set payload type to an
+  // invalid value.
+  memset(&_encoderParams, 0, sizeof(WebRtcACMCodecParams));
+  _encoderParams.codecInstant.pltype = -1;
+  memset(&_decoderParams, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams.codecInstant.pltype = -1;
+}
+
+ACMGenericCodec::~ACMGenericCodec()
+{
+    // Check all the members which are pointers and
+    // if they are not NULL delete/free them.
+
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+
+    if (_inAudio != NULL)
+    {
+        delete [] _inAudio;
+        _inAudio = NULL;
+    }
+
+    if (_inTimestamp != NULL)
+    {
+        delete [] _inTimestamp;
+        _inTimestamp = NULL;
+    }
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    delete &_codecWrapperLock;
+}
+
+WebRtc_Word32
+ACMGenericCodec::Add10MsData(
+    const WebRtc_UWord32 timestamp,
+    const WebRtc_Word16* data,
+    const WebRtc_UWord16 lengthSmpl,
+    const WebRtc_UWord8  audioChannel)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return Add10MsDataSafe(timestamp, data, lengthSmpl, audioChannel);
+}
+
+WebRtc_Word32
+ACMGenericCodec::Add10MsDataSafe(
+    const WebRtc_UWord32 timestamp,
+    const WebRtc_Word16* data,
+    const WebRtc_UWord16 lengthSmpl,
+    const WebRtc_UWord8  audioChannel)
+{
+    // The codec expects to get data in correct sampling rate.
+    // get the sampling frequency of the codec
+    WebRtc_UWord16 plFreqHz;
+
+    if(EncoderSampFreq(plFreqHz) < 0)
+    {
+        // _codecID is not correct, perhaps the codec is not initialized yet.
+        return -1;
+    }
+
+    // Sanity check, if the length of the input corresponds to 10 ms.
+    if((plFreqHz / 100) != lengthSmpl)
+    {
+        // This is not 10 ms of audio, given the sampling frequency of the
+        // codec
+        return -1;
+    }
+    if(_lastTimestamp == timestamp)
+    {
+        // Same timestamp as the last time, overwrite.
+        if((_inAudioIxWrite >= lengthSmpl) && (_inTimestampIxWrite > 0))
+        {
+            _inAudioIxWrite -= lengthSmpl;
+            _inTimestampIxWrite--;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+                "Adding 10ms with previous timestamp, \
+overwriting the previous 10ms");
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+                "Adding 10ms with previous timestamp, this will sound bad");
+        }
+    }
+
+    _lastTimestamp = timestamp;
+
+    if ((_inAudioIxWrite + lengthSmpl*audioChannel) > AUDIO_BUFFER_SIZE_W16)
+    {
+        // Get the number of samples to be overwritten
+        WebRtc_Word16 missedSamples = _inAudioIxWrite + lengthSmpl*audioChannel -
+            AUDIO_BUFFER_SIZE_W16;
+
+        // Move the data (overwite the old data)
+        memmove(_inAudio, _inAudio + missedSamples,
+            (AUDIO_BUFFER_SIZE_W16 - lengthSmpl*audioChannel)*sizeof(WebRtc_Word16));
+        // Copy the new data
+        memcpy(_inAudio + (AUDIO_BUFFER_SIZE_W16 - lengthSmpl*audioChannel), data,
+            lengthSmpl*audioChannel * sizeof(WebRtc_Word16));
+
+        // Get the number of 10 ms blocks which are overwritten
+        WebRtc_Word16 missed10MsecBlocks =
+            (WebRtc_Word16)((missedSamples/audioChannel * 100) / plFreqHz);
+
+        // Move the timestamps
+        memmove(_inTimestamp, _inTimestamp + missed10MsecBlocks,
+            (_inTimestampIxWrite - missed10MsecBlocks) * sizeof(WebRtc_UWord32));
+        _inTimestampIxWrite -= missed10MsecBlocks;
+        _inTimestamp[_inTimestampIxWrite] = timestamp;
+        _inTimestampIxWrite++;
+
+        // Buffer is full
+        _inAudioIxWrite = AUDIO_BUFFER_SIZE_W16;
+        IncreaseNoMissedSamples(missedSamples);
+        _isAudioBuffFresh = false;
+        return -missedSamples;
+    }
+    memcpy(_inAudio + _inAudioIxWrite, data, lengthSmpl*audioChannel * sizeof(WebRtc_Word16));
+    _inAudioIxWrite += lengthSmpl*audioChannel;
+
+    assert(_inTimestampIxWrite < TIMESTAMP_BUFFER_SIZE_W32);
+    assert(_inTimestampIxWrite >= 0);
+
+    _inTimestamp[_inTimestampIxWrite] = timestamp;
+    _inTimestampIxWrite++;
+    _isAudioBuffFresh = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::Encode(
+    WebRtc_UWord8*         bitStream,
+    WebRtc_Word16*         bitStreamLenByte,
+    WebRtc_UWord32*        timeStamp,
+    WebRtcACMEncodingType* encodingType)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return EncodeSafe(bitStream, bitStreamLenByte,
+        timeStamp, encodingType);
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::EncodeSafe(
+    WebRtc_UWord8*         bitStream,
+    WebRtc_Word16*         bitStreamLenByte,
+    WebRtc_UWord32*        timeStamp,
+    WebRtcACMEncodingType* encodingType)
+{
+    // Do we have enough data to encode?
+    // we wait until we have a full frame to encode.
+    if(_inAudioIxWrite < _frameLenSmpl*_noChannels)
+    {
+        // There is not enough audio
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        // Doesn't really matter what this parameter set to
+        *encodingType = kNoEncoding;
+        return 0;
+    }
+
+    // Not all codecs accept the whole frame to be pushed into
+    // encoder at once.
+    const WebRtc_Word16 myBasicCodingBlockSmpl =
+        ACMCodecDB::BasicCodingBlock(_codecID);
+    if((myBasicCodingBlockSmpl < 0) ||
+        (!_encoderInitialized) ||
+        (!_encoderExist))
+    {
+        // This should not happen
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        *encodingType = kNoEncoding;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EncodeSafe: error, basic coding sample block is negative");
+        return -1;
+    }
+
+    // This makes the internal encoder read from the begining of the buffer
+    _inAudioIxRead = 0;
+    *timeStamp = _inTimestamp[0];
+
+    // Process the audio through VAD the function doesn't set _vadLabels.
+    // If VAD is disabled all labels are set to ONE (active)
+    WebRtc_Word16 status = 0;
+    WebRtc_Word16 dtxProcessedSamples = 0;
+
+    status = ProcessFrameVADDTX(bitStream, bitStreamLenByte,
+        &dtxProcessedSamples);
+
+    if(status < 0)
+    {
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        *encodingType = kNoEncoding;
+    }
+    else
+    {
+        if(dtxProcessedSamples > 0)
+        {
+            // Dtx have processed some samples may or may not a bit-stream
+            // is generated we should not do any encoding (normally there
+            // will be not enough data)
+
+            // Setting the following makes that the move of audio data
+            // and timestamps happen correctly
+            _inAudioIxRead = dtxProcessedSamples;
+            // This will let the owner of ACMGenericCodec to know that the
+            // generated bit-stream is DTX to use correct payload type
+            WebRtc_UWord16 sampFreqHz;
+            EncoderSampFreq(sampFreqHz);
+            if (sampFreqHz == 8000) {
+                *encodingType = kPassiveDTXNB;
+            } else if (sampFreqHz == 16000) {
+                *encodingType = kPassiveDTXWB;
+            } else if (sampFreqHz == 32000) {
+                *encodingType = kPassiveDTXSWB;
+            } else {
+                status = -1;
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "EncodeSafe: Wrong sampling frequency for DTX.");
+            }
+
+            // Transport empty frame if we have an empty bitstream
+            if ((*bitStreamLenByte == 0)
+                && (_sentCNPrevious || ((_inAudioIxWrite - _inAudioIxRead) <= 0))
+                )
+            {
+                // Makes sure we transmit an empty frame
+                *bitStreamLenByte = 1;
+                *encodingType = kNoEncoding;
+            }
+            _sentCNPrevious = true;
+        }
+        else
+        {
+            _sentCNPrevious = false;
+            // This will let the caller of the method to know if the frame is
+            // Active or non-Active The caller of the method knows that the
+            // stream is encoded by codec and can use the info for callbacks,
+            // if any registered.
+            if(myBasicCodingBlockSmpl == 0)
+            {
+                // This codec can handle all allowed frame sizes as basic
+                // coding block
+                status = InternalEncode(bitStream, bitStreamLenByte);
+
+                if(status < 0)
+                {
+                    // TODO:
+                    // Maybe reseting the encoder to be fresh for the next
+                    // frame
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                        "EncodeSafe: error in internalEncode");
+                    *bitStreamLenByte = 0;
+                    *encodingType = kNoEncoding;
+                }
+            }
+            else
+            {
+                // A basic-coding-block for this codec is defined so we loop
+                // over the audio with the steps of the basic-coding-block.
+                // It is not necessary that in each itteration
+                WebRtc_Word16 tmpBitStreamLenByte;
+
+                // Reset the variables which will be increamented in the loop
+                *bitStreamLenByte = 0;
+                bool done = false;
+                while(!done)
+                {
+                    status = InternalEncode(&bitStream[*bitStreamLenByte],
+                        &tmpBitStreamLenByte);
+                    *bitStreamLenByte += tmpBitStreamLenByte;
+
+                    // Guard Against errors and too large payloads
+                    if((status < 0) ||
+                        (*bitStreamLenByte > MAX_PAYLOAD_SIZE_BYTE))
+                    {
+                        // Error has happened if we are in the middle of a full
+                        // frame we have to exit. Before exiting, whatever bits
+                        // are in the buffer are probably corruptred. Anyways
+                        // we ignore them.
+                        *bitStreamLenByte = 0;
+                        *encodingType = kNoEncoding;
+                        // We might have come here because of the second
+                        // condition.
+                        status = -1;
+                         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
+                            _uniqueID, "EncodeSafe: error in InternalEncode");
+                        // break from the loop
+                        break;
+                    }
+
+                    done = _inAudioIxRead >= _frameLenSmpl;
+                }
+            }
+            if(status >= 0)
+            {
+                *encodingType = (_vadLabel[0] == 1)?
+                kActiveNormalEncoded:kPassiveNormalEncoded;
+                // Transport empty frame if we have an empty bitsteram
+                if ((*bitStreamLenByte == 0) && ((_inAudioIxWrite - _inAudioIxRead) <= 0))
+                {
+                    // Makes sure we transmit an empty frame
+                    *bitStreamLenByte = 1;
+                    *encodingType = kNoEncoding;
+                }
+            }
+        }
+    }
+
+    // Move the timestampe buffer according to the number of 10 ms blocks
+    // which are read.
+    WebRtc_UWord16 sampFreqHz;
+    EncoderSampFreq(sampFreqHz);
+
+    WebRtc_Word16 num10MsecBlocks =
+            (WebRtc_Word16)((_inAudioIxRead/_noChannels * 100) / sampFreqHz);
+    if(_inTimestampIxWrite > num10MsecBlocks)
+    {
+        memmove(_inTimestamp, _inTimestamp + num10MsecBlocks,
+            (_inTimestampIxWrite - num10MsecBlocks) * sizeof(WebRtc_Word32));
+    }
+    _inTimestampIxWrite -= num10MsecBlocks;
+
+    // We have to move the audio that is not encoded to the beginning
+    // of the buffer and accordingly adjust the read and write indices.
+    if(_inAudioIxRead < _inAudioIxWrite)
+    {
+        memmove(_inAudio, &_inAudio[_inAudioIxRead],
+            (_inAudioIxWrite - _inAudioIxRead)*sizeof(WebRtc_Word16));
+    }
+
+    _inAudioIxWrite -= _inAudioIxRead;
+
+    _inAudioIxRead = 0;
+    _lastEncodedTimestamp = *timeStamp;
+    return (status < 0) ? (-1):(*bitStreamLenByte);
+}
+
+WebRtc_Word16
+ACMGenericCodec::Decode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16  bitStreamLenByte,
+    WebRtc_Word16* audio,
+    WebRtc_Word16* audioSamples,
+    WebRtc_Word8*  speechType)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return DecodeSafe(bitStream, bitStreamLenByte, audio,
+        audioSamples, speechType);
+}
+
+bool
+ACMGenericCodec::EncoderInitialized()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _encoderInitialized;
+}
+
+bool
+ACMGenericCodec::DecoderInitialized()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _decoderInitialized;
+}
+
+
+WebRtc_Word32
+ACMGenericCodec::RegisterInNetEq(
+    ACMNetEQ*   netEq,
+    const CodecInst& codecInst)
+{
+    WebRtcNetEQ_CodecDef codecDef;
+    WriteLockScoped wl(_codecWrapperLock);
+
+    if(CodecDef(codecDef, codecInst) < 0)
+    {
+        // Failed to register
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "RegisterInNetEq: error, failed to register");
+        _registeredInNetEq = false;
+        return -1;
+    }
+    else
+    {
+        if(netEq->AddCodec(&codecDef, _isMaster) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "RegisterInNetEq: error, failed to add codec");
+            _registeredInNetEq = false;
+            return -1;
+        }
+        // Registered
+        _registeredInNetEq = true;
+        return 0;
+    }
+}
+
+WebRtc_Word16
+ACMGenericCodec::EncoderParams(
+    WebRtcACMCodecParams* encParams)
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return EncoderParamsSafe(encParams);
+}
+
+WebRtc_Word16
+ACMGenericCodec::EncoderParamsSafe(
+    WebRtcACMCodecParams* encParams)
+{
+    // Codec parameters are valid only if the encoder is initialized
+    if(_encoderInitialized)
+    {
+        WebRtc_Word32 currentRate;
+        memcpy(encParams, &_encoderParams, sizeof(WebRtcACMCodecParams));
+        currentRate = encParams->codecInstant.rate;
+        CurrentRate(currentRate);
+        encParams->codecInstant.rate = currentRate;
+        return 0;
+    }
+    else
+    {
+        encParams->codecInstant.plname[0] = '\0';
+        encParams->codecInstant.pltype    = -1;
+        encParams->codecInstant.pacsize   = 0;
+        encParams->codecInstant.rate      = 0;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EncoderParamsSafe: error, encoder not initialized");
+        return -1;
+    }
+}
+
+bool
+ACMGenericCodec::DecoderParams(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return DecoderParamsSafe(decParams, payloadType);
+}
+
+bool
+ACMGenericCodec::DecoderParamsSafe(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    // Decoder parameters are valid only if decoder is initialized
+    if(_decoderInitialized)
+    {
+        if(payloadType == _decoderParams.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams, sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+    }
+
+    decParams->codecInstant.plname[0] = '\0';
+    decParams->codecInstant.pltype    = -1;
+    decParams->codecInstant.pacsize   = 0;
+    decParams->codecInstant.rate      = 0;
+    return false;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetEncoder()
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return ResetEncoderSafe();
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetEncoderSafe()
+{
+    if(!_encoderExist || !_encoderInitialized)
+    {
+        // We don't reset if doesn't exists or not initialized yet
+        return 0;
+    }
+
+    _inAudioIxWrite     = 0;
+    _inAudioIxRead      = 0;
+    _inTimestampIxWrite = 0;
+    _noMissedSamples    = 0;
+    _isAudioBuffFresh   = true;
+    memset(_inAudio, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    memset(_inTimestamp, 0, TIMESTAMP_BUFFER_SIZE_W32 * sizeof(WebRtc_Word32));
+
+    // Store DTX/VAD params
+    bool enableVAD = _vadEnabled;
+    bool enableDTX = _dtxEnabled;
+    ACMVADMode mode = _vadMode;
+
+    // Reset the encoder
+    if(InternalResetEncoder() < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "ResetEncoderSafe: error in reset encoder");
+        return -1;
+    }
+
+    // Disable DTX & VAD this deletes the states
+    // we like to have fresh start
+    DisableDTX();
+    DisableVAD();
+
+    // Set DTX/VAD
+    return SetVADSafe(enableDTX, enableVAD, mode);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InternalResetEncoder()
+{
+    // For most of the codecs it is sufficient to
+    // call their internal initialization.
+    // There are some exceptions.
+    // ----
+    // For iSAC we don't want to lose BWE history,
+    // so for iSAC we have to over-write this function.
+    // ----
+    return InternalInitEncoder(&_encoderParams);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitEncoder(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return InitEncoderSafe(codecParams, forceInitialization);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitEncoderSafe(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    // Check if we got a valid set of parameters
+    int mirrorID;
+    int codecNumber =
+        ACMCodecDB::CodecNumber(&(codecParams->codecInstant), &mirrorID);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: error, codec number negative");
+        return -1;
+    }
+    // Check if the parameters are for this codec
+    if((_codecID >= 0) && (_codecID != codecNumber) && (_codecID != mirrorID))
+    {
+        // The current codec is not the same as the one given by codecParams
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: current codec is not the same as the one given by codecParams");
+        return -1;
+    }
+
+    if(!CanChangeEncodingParam(codecParams->codecInstant))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: cannot change encoding parameters");
+        return -1;
+    }
+
+    if(_encoderInitialized && !forceInitialization)
+    {
+        // The encoder is already initialized
+        return 0;
+    }
+    WebRtc_Word16 status;
+    if(!_encoderExist)
+    {
+        _encoderInitialized = false;
+        status = CreateEncoder();
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: cannot create encoder");
+            return -1;
+        }
+        else
+        {
+            _encoderExist = true;
+        }
+    }
+    _frameLenSmpl = (codecParams->codecInstant).pacsize;
+    _noChannels = codecParams->codecInstant.channels;
+    status = InternalInitEncoder(codecParams);
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: error in init encoder");
+        _encoderInitialized = false;
+        return -1;
+    }
+    else
+    {
+        memcpy(&_encoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+        _encoderInitialized = true;
+        if(_inAudio == NULL)
+        {
+            _inAudio = new WebRtc_Word16[AUDIO_BUFFER_SIZE_W16];
+            if(_inAudio == NULL)
+            {
+                return -1;
+            }
+            memset(_inAudio, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+        }
+        if(_inTimestamp == NULL)
+        {
+            _inTimestamp = new WebRtc_UWord32[TIMESTAMP_BUFFER_SIZE_W32];
+            if(_inTimestamp == NULL)
+            {
+                return -1;
+            }
+            memset(_inTimestamp, 0, sizeof(WebRtc_UWord32) *
+                TIMESTAMP_BUFFER_SIZE_W32);
+        }
+        _isAudioBuffFresh = true;
+    }
+    status = SetVADSafe(codecParams->enableDTX, codecParams->enableVAD,
+        codecParams->vadMode);
+
+    return status;
+}
+
+bool
+ACMGenericCodec::CanChangeEncodingParam(
+    CodecInst& /*codecInst*/)
+{
+    return true;
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitDecoder(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    WriteLockScoped lockCodc(_codecWrapperLock);
+    WriteLockScoped lockNetEq(*_netEqDecodeLock);
+    return InitDecoderSafe(codecParams, forceInitialization);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitDecoderSafe(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    int mirrorID;
+    // Check if we got a valid set of parameters
+    int codecNumber =
+        ACMCodecDB::ReceiverCodecNumber(&codecParams->codecInstant, &mirrorID);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: error, invalid codec number");
+        return -1;
+    }
+    // Check if the parameters are for this codec
+    if((_codecID >= 0) && (_codecID != codecNumber) && (_codecID != mirrorID))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: current codec is not the same as the one given "
+                    "by codecParams");
+        // The current codec is not the same as the one given by codecParams
+        return -1;
+    }
+
+
+    if(_decoderInitialized && !forceInitialization)
+    {
+        // The encoder is already initialized
+        return 0;
+    }
+
+    WebRtc_Word16 status;
+    if(!_decoderExist)
+    {
+        _decoderInitialized = false;
+        status = CreateDecoder();
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: cannot create decoder");
+            return -1;
+        }
+        else
+        {
+            _decoderExist = true;
+        }
+    }
+
+    status = InternalInitDecoder(codecParams);
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "InitDecoderSafe: cannot init decoder");
+        _decoderInitialized = false;
+        return -1;
+    }
+    else
+    {
+        // Store the parameters
+        SaveDecoderParamSafe(codecParams);
+        _decoderInitialized = true;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetDecoder(WebRtc_Word16 payloadType)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    WriteLockScoped lockNetEq(*_netEqDecodeLock);
+    return ResetDecoderSafe(payloadType);
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetDecoderSafe(WebRtc_Word16 payloadType)
+{
+    WebRtcACMCodecParams decoderParams;
+    if(!_decoderExist || !_decoderInitialized)
+    {
+        return 0;
+    }
+    // Initialization of the decoder should work for all
+    // the codec. If there is a codec that has to keep
+    // some states then we need to define a virtual and
+    // overwrite in that codec
+    DecoderParamsSafe(&decoderParams, (WebRtc_UWord8) payloadType);
+    return InternalInitDecoder(&decoderParams);
+}
+
+void
+ACMGenericCodec::ResetNoMissedSamples()
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    _noMissedSamples = 0;
+}
+
+void
+ACMGenericCodec::IncreaseNoMissedSamples(
+    const WebRtc_Word16 noSamples)
+{
+    _noMissedSamples += noSamples;
+}
+
+// Get the number of missed samples, this can be public
+WebRtc_UWord32
+ACMGenericCodec::NoMissedSamples() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _noMissedSamples;
+}
+void
+ACMGenericCodec::DestructEncoder()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+
+    // Disable VAD and delete the instance
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+    _vadEnabled = false;
+    _vadMode = VADNormal;
+
+    //Disable DTX and delete the instance
+    _dtxEnabled = false;
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    _numLPCParams = kNewCNGNumPLCParams;
+
+    DestructEncoderSafe();
+}
+
+void
+ACMGenericCodec::DestructDecoder()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    _decoderParams.codecInstant.pltype = -1;
+    DestructDecoderSafe();
+}
+
+WebRtc_Word16
+ACMGenericCodec::SetBitRate(
+    const WebRtc_Word32 bitRateBPS)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return SetBitRateSafe(bitRateBPS);
+}
+
+WebRtc_Word16
+ACMGenericCodec::SetBitRateSafe(
+    const WebRtc_Word32 bitRateBPS)
+{
+    // If the codec can change the bit-rate this function
+    // should be overwritten, otherewise the only acceptable
+    // value is the one that is in database.
+    CodecInst codecParams;
+    if(ACMCodecDB::Codec(_codecID, &codecParams) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "SetBitRateSafe: error in ACMCodecDB::Codec");
+        return -1;
+    }
+    if(codecParams.rate != bitRateBPS)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "SetBitRateSafe: rate value is not acceptable");
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetEstimatedBandwidth()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return GetEstimatedBandwidthSafe();
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetEstimatedBandwidthSafe()
+{
+    // All codecs but iSAC will return -1
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetEstimatedBandwidth(
+    WebRtc_Word32 estimatedBandwidth)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return SetEstimatedBandwidthSafe(estimatedBandwidth);
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 /*estimatedBandwidth*/)
+{
+    // All codecs but iSAC will return -1
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetRedPayload(
+    WebRtc_UWord8* redPayload,
+    WebRtc_Word16* payloadBytes)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return GetRedPayloadSafe(redPayload, payloadBytes);
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetRedPayloadSafe(
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1; // Do nothing by default
+}
+
+WebRtc_Word16
+ACMGenericCodec::CreateEncoder()
+{
+    WebRtc_Word16 status = 0;
+    if(!_encoderExist)
+    {
+        status = InternalCreateEncoder();
+        // We just created the codec and obviously it is not initialized
+        _encoderInitialized = false;
+    }
+
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CreateEncoder: error in internal create encoder");
+        _encoderExist = false;
+    }
+    else
+    {
+        _encoderExist = true;
+    }
+    return status;
+}
+
+WebRtc_Word16
+ACMGenericCodec::CreateDecoder()
+{
+    WebRtc_Word16 status = 0;
+    if(!_decoderExist)
+    {
+        status = InternalCreateDecoder();
+        // Decoder just created and obviously it is not initialized
+        _decoderInitialized = false;
+    }
+
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CreateDecoder: error in internal create decoder");
+        _decoderExist = false;
+    }
+    else
+    {
+        _decoderExist = true;
+    }
+    return status;
+}
+
+
+void ACMGenericCodec::DestructEncoderInst(void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WriteLockScoped lockCodec(_codecWrapperLock);
+        ReadLockScoped lockNetEq(*_netEqDecodeLock);
+        InternalDestructEncoderInst(ptrInst);
+    }
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::AudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    memcpy(audioBuff.inAudio, _inAudio,
+        AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    audioBuff.inAudioIxRead = _inAudioIxRead;
+    audioBuff.inAudioIxWrite = _inAudioIxWrite;
+    memcpy(audioBuff.inTimestamp, _inTimestamp,
+        TIMESTAMP_BUFFER_SIZE_W32*sizeof(WebRtc_UWord32));
+    audioBuff.inTimestampIxWrite = _inTimestampIxWrite;
+    audioBuff.lastTimestamp = _lastTimestamp;
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetAudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    memcpy(_inAudio, audioBuff.inAudio,
+        AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    _inAudioIxRead = audioBuff.inAudioIxRead;
+    _inAudioIxWrite = audioBuff.inAudioIxWrite;
+    memcpy(_inTimestamp, audioBuff.inTimestamp,
+        TIMESTAMP_BUFFER_SIZE_W32*sizeof(WebRtc_UWord32));
+    _inTimestampIxWrite = audioBuff.inTimestampIxWrite;
+    _lastTimestamp = audioBuff.lastTimestamp;
+    _isAudioBuffFresh = false;
+    return 0;
+}
+
+
+WebRtc_UWord32
+ACMGenericCodec::LastEncodedTimestamp() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _lastEncodedTimestamp;
+}
+
+
+WebRtc_UWord32
+ACMGenericCodec::EarliestTimestamp() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _inTimestamp[0];
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetVAD(
+    const bool       enableDTX,
+    const bool       enableVAD,
+    const ACMVADMode mode)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return SetVADSafe(enableDTX, enableVAD, mode);
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetVADSafe(
+    const bool       enableDTX,
+    const bool       enableVAD,
+    const ACMVADMode mode)
+{
+    if(enableDTX)
+    {
+        // Make G729 AnnexB a special case
+        if (!STR_CASE_CMP(_encoderParams.codecInstant.plname, "G729") && !_hasInternalDTX)
+        {
+            if (ACMGenericCodec::EnableDTX() < 0)
+            {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "SetVADSafe: error in enable DTX");
+                return -1;
+            }
+        }
+        else
+        {
+            if(EnableDTX() < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "SetVADSafe: error in enable DTX");
+                return -1;
+            }
+        }
+
+        if(_hasInternalDTX)
+        {
+            // Codec has internal DTX, practically we don't need WebRtc VAD,
+            // however, we let the user to turn it on if they need call-backs
+            // on silence. Store VAD mode for future even if VAD is off.
+            _vadMode = mode;
+            return (enableVAD)? EnableVAD(mode):DisableVAD();
+        }
+        else
+        {
+            // Codec does not have internal DTX so enabling DTX requires an
+            // active VAD. 'enableDTX == true' overwrites VAD status.
+            if(EnableVAD(mode) < 0)
+            {
+                // If we cannot create VAD we have to disable DTX
+                if(!_vadEnabled)
+                {
+                    DisableDTX();
+                }
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "SetVADSafe: error in enable VAD");
+                return -1;
+            }
+
+            // Return '1', to let the caller know VAD was turned on, even if the
+            // function was called with VAD='false'
+            if (enableVAD == false) {
+                return 1;
+            } else {
+                return 0;
+            }
+        }
+    }
+    else
+    {
+        // Make G729 AnnexB a special case
+        if (!STR_CASE_CMP(_encoderParams.codecInstant.plname, "G729") && !_hasInternalDTX)
+        {
+            ACMGenericCodec::DisableDTX();
+        }
+        else
+        {
+            DisableDTX();
+        }
+        return (enableVAD)? EnableVAD(mode):DisableVAD();
+    }
+}
+
+WebRtc_Word16
+ACMGenericCodec::EnableDTX()
+{
+    if(_hasInternalDTX)
+    {
+        // We should not be here if we have internal DTX
+        // this function should be overwritten by the derived
+        // class in this case
+        return -1;
+    }
+    if(!_dtxEnabled)
+    {
+        if(WebRtcCng_CreateEnc(&_ptrDTXInst) < 0)
+        {
+            _ptrDTXInst = NULL;
+            return -1;
+        }
+        WebRtc_UWord16 freqHz;
+        EncoderSampFreq(freqHz);
+        if(WebRtcCng_InitEnc(_ptrDTXInst, (WebRtc_Word16)freqHz,
+            ACM_SID_INTERVAL_MSEC, _numLPCParams) < 0)
+        {
+            // Couldn't initialize, has to return -1, and free the memory
+            WebRtcCng_FreeEnc(_ptrDTXInst);
+            _ptrDTXInst = NULL;
+            return -1;
+        }
+        _dtxEnabled = true;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::DisableDTX()
+{
+    if(_hasInternalDTX)
+    {
+        // We should not be here if we have internal DTX
+        // this function should be overwritten by the derived
+        // class in this case
+        return -1;
+    }
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    _dtxEnabled = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::EnableVAD(
+    ACMVADMode mode)
+{
+    if((mode < VADNormal) || (mode > VADVeryAggr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EnableVAD: error in VAD mode range");
+        return -1;
+    }
+
+    if(!_vadEnabled)
+    {
+        if(WebRtcVad_Create(&_ptrVADInst) < 0)
+        {
+            _ptrVADInst = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "EnableVAD: error in create VAD");
+            return -1;
+        }
+        if(WebRtcVad_Init(_ptrVADInst) < 0)
+        {
+            WebRtcVad_Free(_ptrVADInst);
+            _ptrVADInst = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "EnableVAD: error in init VAD");
+            return -1;
+        }
+    }
+
+    // Set the vad mode to the given value
+    if(WebRtcVad_set_mode(_ptrVADInst, mode) < 0)
+    {
+        // We failed to set the mode and we have to return -1. If
+        // we already have a working VAD (_vadEnabled == true) then
+        // we leave it to work. otherwise, the following will be
+        // executed.
+        if(!_vadEnabled)
+        {
+            // We just created the instance but cannot set the mode
+            // we have to free the memomry.
+            WebRtcVad_Free(_ptrVADInst);
+            _ptrVADInst = NULL;
+        }
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+            "EnableVAD: failed to set the VAD mode");
+        return -1;
+    }
+    _vadMode = mode;
+    _vadEnabled = true;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::DisableVAD()
+{
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+    _vadEnabled = false;
+    return 0;
+}
+
+WebRtc_Word32
+ACMGenericCodec::ReplaceInternalDTX(
+    const bool replaceInternalDTX)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return ReplaceInternalDTXSafe(replaceInternalDTX);
+}
+
+WebRtc_Word32
+ACMGenericCodec::ReplaceInternalDTXSafe(
+    const bool /* replaceInternalDTX */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::IsInternalDTXReplaced(
+    bool* internalDTXReplaced)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return IsInternalDTXReplacedSafe(internalDTXReplaced);
+}
+
+WebRtc_Word32
+ACMGenericCodec::IsInternalDTXReplacedSafe(
+    bool* internalDTXReplaced)
+{
+    *internalDTXReplaced = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ProcessFrameVADDTX(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte,
+    WebRtc_Word16* samplesProcessed)
+{
+    if(!_vadEnabled)
+    {
+        // VAD not enabled, set all vadLable[] to 1 (speech detected)
+        for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+        {
+            _vadLabel[n] = 1;
+        }
+        *samplesProcessed = 0;
+        return 0;
+    }
+    WebRtc_UWord16 freqHz;
+    EncoderSampFreq(freqHz);
+
+    // Calculate number of samples in 10 ms blocks, and number ms in one frame
+    WebRtc_Word16 samplesIn10Msec = (WebRtc_Word16)(freqHz / 100);
+    WebRtc_Word32 frameLenMsec = (((WebRtc_Word32)_frameLenSmpl * 1000) / freqHz);
+    WebRtc_Word16 status;
+
+    // Vector for storing maximum 30 ms of mono audio at 32 kHz
+    WebRtc_Word16 audio[960];
+
+    // Calculate number of VAD-blocks to process, and number of samples in each block.
+    int noSamplesToProcess[2];
+    if (frameLenMsec == 40)
+    {
+        // 20 ms in each VAD block
+        noSamplesToProcess[0] = noSamplesToProcess[1] = 2*samplesIn10Msec;
+    }
+    else
+    {
+        // For 10-30 ms framesizes, second VAD block will be size zero ms,
+        // for 50 and 60 ms first VAD block will be 30 ms.
+        noSamplesToProcess[0] = (frameLenMsec > 30)? 3*samplesIn10Msec : _frameLenSmpl;
+        noSamplesToProcess[1] = _frameLenSmpl-noSamplesToProcess[0];
+    }
+
+    int offSet = 0;
+    int loops = (noSamplesToProcess[1]>0) ? 2 : 1;
+    for (int i=0; i<loops; i++) {
+        // If stereo, calculate mean of the two channels
+        if(_noChannels == 2) {
+            for (int j=0; j<noSamplesToProcess[i]; j++) {
+                audio[j] = (_inAudio[(offSet+j)*2]+_inAudio[(offSet+j)*2+1])/2;
+        }
+        offSet = noSamplesToProcess[0];
+        } else {
+            // Mono, copy data from _inAudio to continue work on
+            memcpy(audio, _inAudio, sizeof(WebRtc_Word16)*noSamplesToProcess[i]);
+        }
+
+        // Call VAD
+        status = WebRtcVad_Process(_ptrVADInst, (WebRtc_Word16)freqHz,
+            audio, noSamplesToProcess[i]);
+
+        _vadLabel[i] = status;
+
+        if(status < 0)
+        {
+            // This will force that the data be removed from the buffer
+            *samplesProcessed += noSamplesToProcess[i];
+            return -1;
+        }
+
+        // If VAD decision non-active, update DTX. NOTE! We only do this if the first part of
+        // a frame gets the VAD decision "inactive". Otherwise DTX might say it is time to
+        // transmit SID frame, but we will encode the whole frame, because the first part is
+        // active.
+        *samplesProcessed = 0;
+        if((status == 0) && (i==0) && _dtxEnabled && !_hasInternalDTX)
+        {
+            WebRtc_Word16 bitStreamLen;
+            WebRtc_Word16 num10MsecFrames = noSamplesToProcess[i] / samplesIn10Msec;
+            *bitStreamLenByte = 0;
+            for(WebRtc_Word16 n = 0; n < num10MsecFrames; n++)
+            {
+                // This block is (passive) && (vad enabled)
+                status = WebRtcCng_Encode(_ptrDTXInst, &audio[n*samplesIn10Msec],
+                    samplesIn10Msec, bitStream, &bitStreamLen, 0);
+                if (status < 0) {
+                    return -1;
+                }
+
+                *samplesProcessed += samplesIn10Msec*_noChannels;
+
+                // bitStreamLen will only be > 0 once per 100 ms
+                *bitStreamLenByte += bitStreamLen;
+            }
+
+
+            // Check if all samples got processed by the DTX
+            if(*samplesProcessed != noSamplesToProcess[i]*_noChannels) {
+                // Set to zero since something went wrong. Shouldn't happen.
+                *samplesProcessed = 0;
+            }
+        }
+
+        if(*samplesProcessed > 0)
+        {
+            // The block contains inactive speech, and is processed by DTX.
+            // Discontinue running VAD.
+            break;
+        }
+    }
+
+    return status;
+}
+
+WebRtc_Word16
+ACMGenericCodec::SamplesLeftToEncode()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return (_frameLenSmpl <= _inAudioIxWrite)?
+        0:(_frameLenSmpl - _inAudioIxWrite);
+}
+
+WebRtc_Word32
+ACMGenericCodec::UnregisterFromNetEq(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    if(!_registeredInNetEq)
+    {
+        return 0;
+    }
+    if(UnregisterFromNetEqSafe(netEq, payloadType) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "UnregisterFromNetEq: error, cannot unregister from NetEq");
+        _registeredInNetEq = true;
+        return -1;
+    }
+    else
+    {
+        _registeredInNetEq = false;
+        return 0;
+    }
+}
+
+void
+ACMGenericCodec::SetUniqueID(
+    const WebRtc_UWord32 id)
+{
+    _uniqueID = id;
+}
+
+bool
+ACMGenericCodec::IsAudioBufferFresh() const
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _isAudioBuffFresh;
+}
+
+// This function is replaced by codec specific functions for some codecs
+WebRtc_Word16
+ACMGenericCodec::EncoderSampFreq(WebRtc_UWord16& sampFreqHz)
+{
+    WebRtc_Word32 f;
+    f = ACMCodecDB::CodecFreq(_codecID);
+    if(f < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                     "EncoderSampFreq: codec frequency is negative");
+        return -1;
+    }
+    else
+    {
+        sampFreqHz = (WebRtc_UWord16)f;
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMGenericCodec::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  /* initFrameSizeMsec */,
+    const WebRtc_UWord16 /* initRateBitPerSec */,
+    const bool           /* enforceFrameSize  */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to config iSAC bandwidth estimator.");
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetISACMaxRate(
+    const WebRtc_UWord32 /* maxRateBitPerSec */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to set iSAC max rate.");
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 /* maxPayloadLenBytes */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to set iSAC max payload-size.");
+    return -1;
+}
+
+
+void
+ACMGenericCodec::SaveDecoderParam(
+    const WebRtcACMCodecParams* codecParams)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    SaveDecoderParamSafe(codecParams);
+}
+
+
+void
+ACMGenericCodec::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* codecParams)
+{
+    memcpy(&_decoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+}
+
+WebRtc_Word16
+ACMGenericCodec::UpdateEncoderSampFreq(
+    WebRtc_UWord16 /* encoderSampFreqHz */)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "It is asked for a change in smapling frequency while the \
+current send-codec supports only one sampling rate.");
+    return -1;
+}
+
+
+void
+ACMGenericCodec::SetIsMaster(
+    bool isMaster)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    _isMaster = isMaster;
+}
+
+
+
+WebRtc_Word16
+ACMGenericCodec::REDPayloadISAC(
+        const WebRtc_Word32  /* isacRate        */,
+        const WebRtc_Word16  /* isacBwEstimate  */,
+        WebRtc_UWord8*       /* payload         */,
+        WebRtc_Word16*       /* payloadLenBytes */)
+{
+   WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "Error: REDPayloadISAC is an iSAC specific function");
+    return -1;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_generic_codec.h b/trunk/src/modules/audio_coding/main/source/acm_generic_codec.h
new file mode 100644
index 0000000..7f8ef8c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_generic_codec.h
@@ -0,0 +1,1343 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
+
+#include "acm_common_defs.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+
+#define MAX_FRAME_SIZE_10MSEC 6
+
+// forward declaration
+struct WebRtcVadInst;
+struct WebRtcCngEncInst;
+
+namespace webrtc
+{
+
+// forward declaration
+struct CodecInst;
+class  ACMNetEQ;
+
+class ACMGenericCodec
+{
+public:
+    ///////////////////////////////////////////////////////////////////////////
+    // Constructor of the class
+    //
+    ACMGenericCodec();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // Destructor of the class.
+    //
+    virtual ~ACMGenericCodec();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // ACMGenericCodec* CreateInstance();
+    // The function will be used for FEC. It is not implemented yet.
+    //
+    virtual ACMGenericCodec* CreateInstance() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 Encode()
+    // The function is called to perform an encoding of the audio stored in
+    // audio buffer. An encoding is performed only if enough audio, i.e. equal
+    // to the frame-size of the codec, exist. The audio frame will be processed
+    // by VAD and CN/DTX if required. There are few different cases.
+    //
+    // A) Neither VAD nor DTX is active; the frame is encoded by the encoder.
+    //
+    // B) VAD is enabled but not DTX; in this case the audio is processed by VAD
+    //    and encoded by the encoder. The "*encodingType" will be either
+    //    "activeNormalEncode" or "passiveNormalEncode" if frame is active or
+    //    passive, respectively.
+    //
+    // C) DTX is enabled; if the codec has internal VAD/DTX we just encode the
+    //    frame by the encoder. Otherwise, the frame is passed through VAD and
+    //    if identified as passive, then it will be processed by CN/DTX. If the
+    //    frame is active it will be encoded by the encoder.
+    //
+    // This function acquires the appropriate locks and calls EncodeSafe() for
+    // the actual processing.
+    //
+    // Outputs:
+    //   -bitStream          : a buffer where bit-stream will be written to.
+    //   -bitStreamLenByte   : contains the length of the bit-stream in
+    //                         bytes.
+    //   -timeStamp          : contains the RTP timestamp, this is the
+    //                         sampling time of the first sample encoded
+    //                         (measured in number of samples).
+    //   -encodingType       : contains the type of encoding applied on the
+    //                         audio samples. The alternatives are
+    //                         (c.f. acm_common_types.h)
+    //                         -kNoEncoding:
+    //                            there was not enough data to encode. or
+    //                            some error has happened that we could
+    //                            not do encoding.
+    //                         -kActiveNormalEncoded:
+    //                            the audio frame is active and encoded by
+    //                            the given codec.
+    //                         -kPassiveNormalEncoded:
+    //                            the audio frame is passive but coded with
+    //                            the given codec (NO DTX).
+    //                         -kPassiveDTXWB:
+    //                            The audio frame is passive and used
+    //                            wide-band CN to encode.
+    //                         -kPassiveDTXNB:
+    //                            The audio frame is passive and used
+    //                            narrow-band CN to encode.
+    //
+    // Return value:
+    //   -1 if error is occurred, otherwise the length of the bit-stream in
+    //      bytes.
+    //
+    WebRtc_Word16 Encode(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timeStamp,
+        WebRtcACMEncodingType* encodingType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 Decode()
+    // This function is used to decode a given bit-stream, without engaging
+    // NetEQ.
+    //
+    // This function acquires the appropriate locks and calls DecodeSafe() for
+    // the actual processing. Please note that this is not functional yet.
+    //
+    // Inputs:
+    //   -bitStream          : a buffer where bit-stream will be read.
+    //   -bitStreamLenByte   : the length of the bit-stream in bytes.
+    //
+    // Outputs:
+    //   -audio              : pointer to a buffer where the audio will written.
+    //   -audioSamples       : number of audio samples out of decoding the given
+    //                         bit-stream.
+    //   -speechType         : speech type (for future use).
+    //
+    // Return value:
+    //   -1 if failed to decode,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 Decode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool EncoderInitialized();
+    //
+    // Return value:
+    //   True if the encoder is successfully initialized,
+    //   false otherwise.
+    //
+    bool EncoderInitialized();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool DecoderInitialized();
+    //
+    // Return value:
+    //   True if the decoder is successfully initialized,
+    //   false otherwise.
+    //
+    bool DecoderInitialized();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EncoderParams()
+    // It is called to get encoder parameters. It will call
+    // EncoderParamsSafe() in turn.
+    //
+    // Output:
+    //   -encParams          : a buffer where the encoder parameters is
+    //                         written to. If the encoder is not
+    //                         initialized this buffer is filled with
+    //                         invalid values
+    // Return value:
+    //   -1 if the encoder is not initialized,
+    //    0 otherwise.
+    //
+    //
+    WebRtc_Word16 EncoderParams(
+        WebRtcACMCodecParams *encParams);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DecoderParams(...)
+    // It is called to get decoder parameters. It will call DecoderParamsSafe()
+    // in turn.
+    //
+    // Output:
+    //   -decParams          : a buffer where the decoder parameters is
+    //                         written to. If the decoder is not initialized
+    //                         this buffer is filled with invalid values
+    //
+    // Return value:
+    //   -1 if the decoder is not initialized,
+    //    0 otherwise.
+    //
+    //
+    bool DecoderParams(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InitEncoder(...)
+    // This function is called to initialize the encoder with the given
+    // parameters.
+    //
+    // Input:
+    //   -codecParams        : parameters of encoder.
+    //   -forceInitialization: if false the initialization is invoked only if
+    //                         the encoder is not initialized. If true the
+    //                         encoder is forced to (re)initialize.
+    //
+    // Return value:
+    //   0 if could initialize successfully,
+    //  -1 if failed to initialize.
+    //
+    //
+    WebRtc_Word16 InitEncoder(
+        WebRtcACMCodecParams* codecParams,
+        bool                  forceInitialization);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InitDecoder()
+    // This function is called to initialize the decoder with the given
+    // parameters. (c.f. acm_common_defs.h & common_types.h for the
+    // definition of the structure)
+    //
+    // Input:
+    //   -codecParams        : parameters of decoder.
+    //   -forceInitialization: if false the initialization is invoked only
+    //                         if the decoder is not initialized. If true
+    //                         the encoder is forced to(re)initialize.
+    //
+    // Return value:
+    //   0 if could initialize successfully,
+    //  -1 if failed to initialize.
+    //
+    //
+    WebRtc_Word16 InitDecoder(
+        WebRtcACMCodecParams* codecParams,
+        bool                 forceInitialization);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 RegisterInNetEq(...)
+    // This function is called to register the decoder in NetEq, with the given
+    // payload-type.
+    //
+    // Inputs:
+    //   -netEq              : pointer to NetEq Instance
+    //   -codecInst          : instance with of the codec settings of the codec
+    //
+    // Return values
+    //   -1 if failed to register,
+    //    0 if successfully initialized.
+    //
+    WebRtc_Word32 RegisterInNetEq(
+        ACMNetEQ*             netEq,
+        const CodecInst& codecInst);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 Add10MsData(...)
+    // This function is called to add 10 ms of audio to the audio buffer of
+    // the codec.
+    //
+    // Inputs:
+    //   -timeStamp          : the timestamp of the 10 ms audio. the timestamp
+    //                         is the sampling time of the
+    //                         first sample measured in number of samples.
+    //   -data               : a buffer that contains the audio. The codec
+    //                         expects to get the audio in correct sampling
+    //                         frequency
+    //   -length             : the length of the audio buffer
+    //   -audioChannel       : 0 for mono, 1 for stereo (not supported yet)
+    //
+    // Return values:
+    //   -1 if failed
+    //    0 otherwise.
+    //
+    WebRtc_Word32 Add10MsData(
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_Word16* data,
+        const WebRtc_UWord16 length,
+        const WebRtc_UWord8  audioChannel);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 NoMissedSamples()
+    // This function returns the number of samples which are overwritten in
+    // the audio buffer. The audio samples are overwritten if the input audio
+    // buffer is full, but Add10MsData() is called. (We might remove this
+    // function if it is not used)
+    //
+    // Return Value:
+    //   Number of samples which are overwritten.
+    //
+    WebRtc_UWord32 NoMissedSamples() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void ResetNoMissedSamples()
+    // This function resets the number of overwritten samples to zero.
+    // (We might remove this function if we remove NoMissedSamples())
+    //
+    void ResetNoMissedSamples();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetBitRate()
+    // The function is called to set the encoding rate.
+    //
+    // Input:
+    //   -bitRateBPS         : encoding rate in bits per second
+    //
+    // Return value:
+    //   -1 if failed to set the rate, due to invalid input or given
+    //      codec is not rate-adjustable.
+    //    0 if the rate is adjusted successfully
+    //
+    WebRtc_Word16 SetBitRate(const WebRtc_Word32 bitRateBPS);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // DestructEncoderInst()
+    // This API is used in conferencing. It will free the memory that is pointed
+    // by "ptrInst". "ptrInst" is a pointer to encoder instance, created and
+    // filled up by calling EncoderInst(...).
+    //
+    // Inputs:
+    //   -ptrInst            : pointer to an encoder instance to be deleted.
+    //
+    //
+    void DestructEncoderInst(
+        void* ptrInst);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 AudioBuffer()
+    // This is used when synchronization of codecs is required. There are cases
+    // that the audio buffers of two codecs have to be synched. By calling this
+    // function on can get the audio buffer and other related parameters, such
+    // as timestamps...
+    //
+    // Output:
+    //   -audioBuff          : a pointer to WebRtcACMAudioBuff where the audio
+    //                         buffer of this codec will be written to.
+    //
+    // Return value:
+    //   -1 if fails to copy the audio buffer,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 AudioBuffer(
+        WebRtcACMAudioBuff& audioBuff);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 EarliestTimestamp()
+    // Returns the timestamp of the first 10 ms in audio buffer. This is used
+    // to identify if a synchronization of two encoders is required.
+    //
+    // Return value:
+    //   timestamp of the first 10 ms audio in the audio buffer.
+    //
+    WebRtc_UWord32 EarliestTimestamp() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetAudioBuffer()
+    // This function is called to set the audio buffer and the associated
+    // parameters to a given value.
+    //
+    // Return value:
+    //   -1 if fails to copy the audio buffer,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& audioBuff);
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetVAD()
+    // This is called to set VAD & DTX. If the codec has internal DTX that will
+    // be used. If DTX is enabled and the codec does not have internal DTX,
+    // WebRtc-VAD will be used to decide if the frame is active. If DTX is
+    // disabled but VAD is enabled. The audio is passed through VAD to label it
+    // as active or passive, but the frame is  encoded normally. However the
+    // bit-stream is labeled properly so that ACM::Process() can use this
+    // information. In case of failure, the previous states of the VAD & DTX
+    // are kept.
+    //
+    // Inputs:
+    //   -enableDTX          : if true DTX will be enabled otherwise the DTX is
+    //                         disabled. If codec has internal DTX that will be
+    //                         used, otherwise WebRtc-CNG is used. In the latter
+    //                         case VAD is automatically activated.
+    //   -enableVAD          : if true WebRtc-VAD is enabled, otherwise VAD is
+    //                         disabled, except for the case that DTX is enabled
+    //                         but codec doesn't have internal DTX. In this case
+    //                         VAD is enabled regardless of the value of
+    //                         "enableVAD."
+    //   -mode               : this specifies the aggressiveness of VAD.
+    //
+    // Return value
+    //   -1 if failed to set DTX & VAD as specified,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 SetVAD(
+        const bool             enableDTX = true,
+        const bool             enableVAD = false,
+        const ACMVADMode mode      = VADNormal);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 ReplaceInternalDTX()
+    // This is called to replace the codec internal DTX with WebRtc DTX.
+    // This is only valid for G729 where the user has possibility to replace
+    // AnnexB with WebRtc DTX. For other codecs this function has no effect.
+    //
+    // Input:
+    //   -replaceInternalDTX : if true the internal DTX is replaced with WebRtc.
+    //
+    // Return value
+    //   -1 if failed to replace internal DTX,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 ReplaceInternalDTX(const bool replaceInternalDTX);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 IsInternalDTXReplaced()
+    // This is called to check if the codec internal DTX is replaced by WebRtc DTX.
+    // This is only valid for G729 where the user has possibility to replace
+    // AnnexB with WebRtc DTX. For other codecs this function has no effect.
+    //
+    // Output:
+    //   -internalDTXReplaced    : if true the internal DTX is replaced with WebRtc.
+    //
+    // Return value
+    //   -1 if failed to check if replace internal DTX or replacement not feasible,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 IsInternalDTXReplaced(bool* internalDTXReplaced);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void SetNetEqDecodeLock()
+    // Passes the NetEq lock to the codec.
+    //
+    // Input:
+    //   -netEqDecodeLock    : pointer to the lock associated with NetEQ of ACM.
+    //
+    void SetNetEqDecodeLock(
+        RWLockWrapper* netEqDecodeLock)
+    {
+        _netEqDecodeLock = netEqDecodeLock;
+    }
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool HasInternalDTX()
+    // Used to check if the codec has internal DTX.
+    //
+    // Return value:
+    //   true if the codec has an internal DTX, e.g. G729,
+    //   false otherwise.
+    //
+    bool HasInternalDTX() const
+    {
+        return _hasInternalDTX;
+    }
+
+
+   ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 GetEstimatedBandwidth()
+    // Used to get decoder estimated bandwidth. Only iSAC will provide a value.
+    //
+    //
+    // Return value:
+    //   -1 if fails to get decoder estimated bandwidth,
+    //    >0 estimated bandwidth in bits/sec.
+    //
+    WebRtc_Word32 GetEstimatedBandwidth();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 SetEstimatedBandwidth()
+    // Used to set estiamted bandwidth sent out of band from other side. Only
+    // iSAC will have use for the value.
+    //
+    // Input:
+    //       -estimatedBandwidth:    estimated bandwidth in bits/sec
+    //
+    // Return value:
+    //   -1 if fails to set estimated bandwidth,
+    //    0 on success.
+    //
+    WebRtc_Word32 SetEstimatedBandwidth(WebRtc_Word32 estimatedBandwidth);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 GetRedPayload()
+    // Used to get codec specific RED payload (if such is implemented).
+    // Currently only done in iSAC.
+    //
+    // Outputs:
+    //   -redPayload        : a pointer to the data for RED payload.
+    //   -payloadBytes      : number of bytes in RED payload.
+    //
+    // Return value:
+    //   -1 if fails to get codec specific RED,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 GetRedPayload(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ResetEncoder()
+    // By calling this function you would re-initialize the encoder with the
+    // current parameters. All the settings, e.g. VAD/DTX, frame-size... should
+    // remain unchanged. (In case of iSAC we don't want to lose BWE history.)
+    //
+    // Return value
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ResetEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ResetEncoder()
+    // By calling this function you would re-initialize the decoder with the
+    // current parameters.
+    //
+    // Return value
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ResetDecoder(
+        WebRtc_Word16 payloadType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void DestructEncoder()
+    // This function is called to delete the encoder instance, if possible, to
+    // have a fresh start. For codecs where encoder and decoder share the same
+    // instance we cannot delete the encoder and instead we will initialize the
+    // encoder. We also delete VAD and DTX if they have been created.
+    //
+    void DestructEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void DestructDecoder()
+    // This function is called to delete the decoder instance, if possible, to
+    // have a fresh start. For codecs where encoder and decoder share the same
+    // instance we cannot delete the encoder and instead we will initialize the
+    // decoder. Before deleting decoder instance it has to be removed from the
+    // NetEq list.
+    //
+    void DestructDecoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SamplesLeftToEncode()
+    // Returns the number of samples required to be able to do encoding.
+    //
+    // Return value:
+    //   Number of samples.
+    //
+    WebRtc_Word16 SamplesLeftToEncode();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 LastEncodedTimestamp()
+    // Returns the timestamp of the last frame it encoded.
+    //
+    // Return value:
+    //   Timestamp.
+    //
+    WebRtc_UWord32 LastEncodedTimestamp() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 UnregisterFromNetEq()
+    // To remove the codec from NetEQ. If the codec (or the decoder instance)
+    // is going to be deleted, first the codec has to be removed from NetEq
+    // by calling this function.
+    //
+    // Input:
+    //   -netEq              : pointer to a NetEq instance that the codec
+    //                         has to be unregistered from.
+    //
+    // Output:
+    //   -1 if failed to unregister the codec,
+    //    0 if the codec is successfully unregistered.
+    //
+    WebRtc_Word32 UnregisterFromNetEq(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetUniqueID()
+    // Set a unique ID for the codec to be used for tracing and debuging
+    //
+    // Input
+    //   -id                 : A number to identify the codec.
+    //
+    void SetUniqueID(
+        const WebRtc_UWord32 id);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // IsAudioBufferFresh()
+    // Specifies if ever audio is injected to this codec.
+    //
+    // Return value
+    //   -true; no audio is feed into this codec
+    //   -false; audio has already been  fed to the codec.
+    //
+    bool IsAudioBufferFresh() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // UpdateDecoderSampFreq()
+    // For most of the codecs this function does nothing. It must be
+    // implemented for those codecs that one codec instance serves as the
+    // decoder for different flavers of the codec. One example is iSAC. there,
+    // iSAC 16 kHz and iSAC 32 kHz are treated as two different codecs with
+    // different payload types, however, there is only one iSAC instance to
+    // decode. The reason for that is we would like to decode and encode with
+    // the same codec instance for bandwidth estimator to work.
+    //
+    // Each time that we receive a new payload type, we call this funtion to
+    // prepare the decoder associated with the new payload. Normally, decoders
+    // doesn't have to do anything. For iSAC the decoder has to change it's
+    // sampling rate. The input parameter specifies the current flaver of the
+    // codec in codec database. For instance, if we just got a SWB payload then
+    // the input parameter is ACMCodecDB::isacswb.
+    //
+    // Input:
+    //   -codecId            : the ID of the codec associated with the
+    //                         payload type that we just received.
+    //
+    // Return value:
+    //    0 if succeeded in updating the decoder.
+    //   -1 if failed to update.
+    //
+    virtual WebRtc_Word16 UpdateDecoderSampFreq(
+        WebRtc_Word16 /* codecId */)
+    {
+        return 0;
+    }
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // UpdateEncoderSampFreq()
+    // Call this function to update the encoder sampling frequency. This
+    // is for codecs where one payload-name supports several encoder sampling
+    // frequencies. Otherwise, to change the sampling frequency we need to
+    // register new codec. ACM will consider that as registration of a new
+    // codec, not a change in parameter. For iSAC, switching from WB to SWB
+    // is treated as a change in parameter. Therefore, we need this function.
+    //
+    // Input:
+    //   -encoderSampFreqHz  : encoder sampling frequency.
+    //
+    // Return value:
+    //   -1 if failed, or if this is meaningless for the given codec.
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 UpdateEncoderSampFreq(
+        WebRtc_UWord16 encoderSampFreqHz);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // EncoderSampFreq()
+    // Get the sampling frequency that the encoder (WebRtc wrapper) expects.
+    //
+    // Output:
+    //   -sampFreqHz         : sampling frequency, in Hertz, which the encoder
+    //                         should be fed with.
+    //
+    // Return value:
+    //   -1 if failed to output sampling rate.
+    //    0 if the sample rate is returned successfully.
+    //
+    virtual WebRtc_Word16 EncoderSampFreq(
+        WebRtc_UWord16& sampFreqHz);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 ConfigISACBandwidthEstimator()
+    // Call this function to configure the bandwidth estimator of ISAC.
+    // During the adaptation of bit-rate, iSAC atomatically adjusts the
+    // frame-size (either 30 or 60 ms) to save on RTP header. The initial
+    // frame-size can be specified by the first argument. The configuration also
+    // regards the initial estimate of bandwidths. The estimator starts from
+    // this point and converges to the actual bottleneck. This is given by the
+    // second parameter. Furthermore, it is also possible to control the
+    // adaptation of frame-size. This is specified by the last parameter.
+    //
+    // Input:
+    //   -initFrameSizeMsec  : initial frame-size in milisecods. For iSAC-wb
+    //                         30 ms and 60 ms (default) are acceptable values,
+    //                         and for iSAC-swb 30 ms is the only acceptable
+    //                         value. Zero indiates default value.
+    //   -initRateBitPerSec  : initial estimate of the bandwidth. Values
+    //                         between 10000 and 58000 are acceptable.
+    //   -enforceFrameSize   : if true, the frame-size will not be adapted.
+    //
+    // Return value:
+    //   -1 if failed to configure the bandwidth estimator,
+    //    0 if the configuration was successfully applied.
+    //
+    virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
+        const WebRtc_UWord8  initFrameSizeMsec,
+        const WebRtc_UWord16 initRateBitPerSec,
+        const bool           enforceFrameSize);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetISACMaxPayloadSize()
+    // Set the maximum payload size of iSAC packets. No iSAC payload,
+    // regardless of its frame-size, may exceed the given limit. For
+    // an iSAC payload of size B bits and frame-size T sec we have;
+    // (B < maxPayloadLenBytes * 8) and (B/T < maxRateBitPerSec), c.f.
+    // SetISACMaxRate().
+    //
+    // Input:
+    //   -maxPayloadLenBytes : maximum payload size in bytes.
+    //
+    // Return value:
+    //   -1 if failed to set the maximm  payload-size.
+    //    0 if the given linit is seet successfully.
+    //
+    virtual WebRtc_Word32 SetISACMaxPayloadSize(
+        const WebRtc_UWord16 maxPayloadLenBytes);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetISACMaxRate()
+    // Set the maximum instantaneous rate of iSAC. For a payload of B bits
+    // with a frame-size of T sec the instantaneous rate is B/T bist per
+    // second. Therefore, (B/T < maxRateBitPerSec) and
+    // (B < maxPayloadLenBytes * 8) are always satisfied for iSAC payloads,
+    // c.f SetISACMaxPayloadSize().
+    //
+    // Input:
+    //   -maxRateBitPerSec   : maximum instantaneous bit-rate given in bits/sec.
+    //
+    // Return value:
+    //   -1 if failed to set the maximum rate.
+    //    0 if the maximum rate is set successfully.
+    //
+    virtual WebRtc_Word32 SetISACMaxRate(
+        const WebRtc_UWord32 maxRateBitPerSec);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SaveDecoderParamS()
+    // Save the parameters of decoder.
+    //
+    // Input:
+    //   -codecParams        : pointer to a struct where the parameters of
+    //                         decoder is stored in.
+    //
+    void SaveDecoderParam(
+        const WebRtcACMCodecParams* codecParams);
+
+
+    WebRtc_Word32 FrameSize()
+    {
+        return _frameLenSmpl;
+    }
+
+    void SetIsMaster(bool isMaster);
+
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // REDPayloadISAC()
+    // This is an iSAC-specific function. The function is called to get RED
+    // paylaod from a default-encoder.
+    //
+    // Inputs:
+    //   -isacRate           : the target rate of the main payload. A RED
+    //                         paylaod is generated according to the rate of
+    //                         main paylaod. Note that we are not specifying the
+    //                         rate of RED payload, but the main payload.
+    //   -isacBwEstimate     : bandwidth information should be inserted in
+    //                         RED payload.
+    //
+    // Output:
+    //   -payload            : pointer to a buffer where the RED paylaod will
+    //                         written to.
+    //   -paylaodLenBytes    : a place-holder to write the length of the RED
+    //                         payload in Bytes.
+    //
+    // Return value:
+    //   -1 if an error occures, otherwise the length of the payload (in Bytes)
+    //   is returned.
+    //
+    //
+    virtual WebRtc_Word16 REDPayloadISAC(
+        const WebRtc_Word32 isacRate,
+        const WebRtc_Word16 isacBwEstimate,
+        WebRtc_UWord8*      payload,
+        WebRtc_Word16*      payloadLenBytes);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // IsTrueStereoCodec()
+    // Call to see if current encoder is a true stereo codec. This function
+    // should be overwritten for codecs which are true stereo codecs
+    // Return value:
+    //   -true  if stereo codec
+    //   -false if not stereo codec.
+    //
+    virtual bool IsTrueStereoCodec() {
+      return false;
+    }
+
+protected:
+    ///////////////////////////////////////////////////////////////////////////
+    // All the functions with FunctionNameSafe(...) contain the actual
+    // implementation of FunctionName(...). FunctionName() acquires an
+    // appropriate lock and calls FunctionNameSafe() to do the actual work.
+    // Therefore, for the description of functionality, input/output arguments
+    // and return value we refer to FunctionName()
+    //
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Encode() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    WebRtc_Word16 EncodeSafe(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timeStamp,
+        WebRtcACMEncodingType* encodingType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Decode() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    virtual WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType) = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Add10MsSafe() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 Add10MsDataSafe(
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_Word16* data,
+        const WebRtc_UWord16 length,
+        const WebRtc_UWord8  audioChannel);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See RegisterInNetEq() for the description of function,
+    // input(s)/output(s) and  return value.
+    //
+    virtual WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&  codecInst) = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See EncoderParam() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 EncoderParamsSafe(
+        WebRtcACMCodecParams *encParams);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DecoderParam for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    // Note:
+    // Any Class where a single instance handle several flavers of the
+    // same codec, therefore, several payload types are associated with
+    // the same instance have to implement this function.
+    //
+    // Currently only iSAC is implementing it. A single iSAC instance is
+    // used for decoding both WB & SWB stream. At one moment both WB & SWB
+    // can be registered as receive codec. Hence two payloads are associated
+    // with a single codec instance.
+    //
+    virtual bool  DecoderParamsSafe(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ResetEncoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 ResetEncoderSafe();
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See InitEncoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 InitEncoderSafe(
+        WebRtcACMCodecParams *codecParams,
+        bool                 forceInitialization);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See InitDecoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 InitDecoderSafe(
+        WebRtcACMCodecParams *codecParams,
+        bool                 forceInitialization);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ResetDecoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 ResetDecoderSafe(
+        WebRtc_Word16 payloadType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DestructEncoder() for the description of function,
+    // input(s)/output(s) and return value.
+    //
+    virtual void DestructEncoderSafe() = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DestructDecoder() for the description of function,
+    // input(s)/output(s) and return value.
+    //
+    virtual void DestructDecoderSafe() = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetBitRate() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    // Any codec that can change the bit-rate has to implement this.
+    //
+    virtual WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 bitRateBPS);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See GetEstimatedBandwidth() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 GetEstimatedBandwidthSafe();
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetEstimatedBandwidth() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 SetEstimatedBandwidthSafe(WebRtc_Word32 estimatedBandwidth);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See GetRedPayload() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 GetRedPayloadSafe(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetVAD() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    WebRtc_Word16 SetVADSafe(
+        const bool       enableDTX = true,
+        const bool       enableVAD = false,
+        const ACMVADMode mode      = VADNormal);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ReplaceInternalDTX() for the description of function, input and
+    // return value.
+    //
+    virtual WebRtc_Word32 ReplaceInternalDTXSafe(
+        const bool replaceInternalDTX);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See IsInternalDTXReplaced() for the description of function, input and
+    // return value.
+    //
+    virtual WebRtc_Word32 IsInternalDTXReplacedSafe(
+        bool* internalDTXReplaced);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See UnregisterFromNetEq() for the description of function,
+    // input(s)/output(s) and return value.
+    //
+    virtual WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 CreateEncoder()
+    // Creates the encoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 CreateEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 CreateDecoder()
+    // Creates the decoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 CreateDecoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EnableVAD();
+    // Enables VAD with the given mode. The VAD instance will be created if
+    // it does not exists.
+    //
+    // Input:
+    //   -mode               : VAD mode c.f. audio_coding_module_typedefs.h for
+    //                         the options.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 EnableVAD(ACMVADMode mode);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DisableVAD()
+    // Disables VAD.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 DisableVAD();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EnableDTX()
+    // Enables DTX. This method should be overwritten for codecs which have
+    // internal DTX.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 EnableDTX();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DisableDTX()
+    // Disables usage of DTX. This method should be overwritten for codecs which
+    // have internal DTX.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 DisableDTX();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalEncode()
+    // This is a codec-specific function called in EncodeSafe() to actually
+    // encode a frame of audio.
+    //
+    // Outputs:
+    //   -bitStream          : pointer to a buffer where the bit-stream is
+    //                         written to.
+    //   -bitStreamLenByte   : the length of the bit-stream in byte, a negative
+    //                         value indicates error.
+    //
+    // Return value:
+    //   -1 if failed,
+    //   otherwise the length of the bit-stream is returned.
+    //
+    virtual WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalInitEncoder()
+    // This is a codec-specific function called in InitEncoderSafe(), it has to
+    // do all codec-specific operation to initialize the encoder given the
+    // encoder parameters.
+    //
+    // Input:
+    //   -codecParams        : pointer to a structure that contains parameters to
+    //                         initialize encoder.
+    //                         Set codecParam->CodecInst.rate to -1 for
+    //                         iSAC to operate in adaptive mode.
+    //                         (to do: if frame-length is -1 frame-length will be
+    //                         automatically adjusted, otherwise, given
+    //                         frame-length is forced)
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalInitDecoder()
+    // This is a codec-specific function called in InitDecoderSafe(), it has to
+    // do all codec-specific operation to initialize the decoder given the
+    // decoder parameters.
+    //
+    // Input:
+    //   -codecParams        : pointer to a structure that contains parameters to
+    //                         initialize encoder.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void IncreaseNoMissedSamples()
+    // This method is called to increase the number of samples that are
+    // overwritten in the audio buffer.
+    //
+    // Input:
+    //   -noSamples          : the number of overwritten samples is incremented
+    //                         by this value.
+    //
+    void IncreaseNoMissedSamples(
+        const WebRtc_Word16 noSamples);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalCreateEncoder()
+    // This is a codec-specific method called in CreateEncoderSafe() it is
+    // supposed to perform all codec-specific operations to create encoder
+    // instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalCreateEncoder() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalCreateDecoder()
+    // This is a codec-specific method called in CreateDecoderSafe() it is
+    // supposed to perform all codec-specific operations to create decoder
+    // instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalCreateDecoder() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void InternalDestructEncoderInst()
+    // This is a codec-specific method, used in conferencing, called from
+    // DestructEncoderInst(). The input argument is pointer to encoder instance
+    // (codec instance for codecs that encoder and decoder share the same
+    // instance). This method is called to free the memory that "ptrInst" is
+    // pointing to.
+    //
+    // Input:
+    //   -ptrInst            : pointer to encoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual void InternalDestructEncoderInst(
+        void* ptrInst) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalResetEncoder()
+    // This method is called to reset the states of encoder. However, the
+    // current parameters, e.g. frame-length, should remain as they are. For
+    // most of the codecs a re-initialization of the encoder is what needs to
+    // be down. But for iSAC we like to keep the BWE history so we cannot
+    // re-initialize. As soon as such an API is implemented in iSAC this method
+    // has to be overwritten in ACMISAC class.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalResetEncoder();
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ProcessFrameVADDTX()
+    // This function is called when a full frame of audio is available. It will
+    // break the audio frame into blocks such that each block could be processed
+    // by VAD & CN/DTX. If a frame is divided into two blocks then there are two
+    // cases. First, the first block is active, the second block will not be
+    // processed by CN/DTX but only by VAD and return to caller with
+    // '*samplesProcessed' set to zero. There, the audio frame will be encoded
+    // by the encoder. Second, the first block is inactive and is processed by
+    // CN/DTX, then we stop processing the next block and return to the caller
+    // which is EncodeSafe(), with "*samplesProcessed" equal to the number of
+    // samples in first block.
+    //
+    // Output:
+    //   -bitStream          : pointer to a buffer where DTX frame, if
+    //                         generated, will be written to.
+    //   -bitStreamLenByte   : contains the length of bit-stream in bytes, if
+    //                         generated. Zero if no bit-stream is generated.
+    //   -noSamplesProcessed : contains no of samples that actually CN has
+    //                         processed. Those samples processed by CN will not
+    //                         be encoded by the encoder, obviously. If
+    //                         contains zero, it means that the frame has been
+    //                         identified as active by VAD. Note that
+    //                         "*noSamplesProcessed" might be non-zero but
+    //                         "*bitStreamLenByte" be zero.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ProcessFrameVADDTX(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte,
+        WebRtc_Word16* samplesProcessed);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // CanChangeEncodingParam()
+    // Check if the codec parameters can be changed. In conferencing normally
+    // codec parametrs cannot be changed. The exception is bit-rate of isac.
+    //
+    // return value:
+    //   -true  if codec parameters are allowed to change.
+    //   -flase otherwise.
+    //
+    virtual bool CanChangeEncodingParam(CodecInst& codecInst);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // CurrentRate()
+    // Call to get the current encoding rate of the encoder. This function
+    // should be overwritten for codecs whic automatically change their
+    // target rate. One example is iSAC. The output of the function is the
+    // current target rate.
+    //
+    // Output:
+    //   -rateBitPerSec      : the current target rate of the codec.
+    //
+    virtual void CurrentRate(
+        WebRtc_Word32& /* rateBitPerSec */)
+    {
+        return;
+    }
+
+    virtual void SaveDecoderParamSafe(
+        const WebRtcACMCodecParams* codecParams);
+
+    // &_inAudio[_inAudioIxWrite] always point to where new audio can be
+    // written to
+    WebRtc_Word16         _inAudioIxWrite;
+
+    // &_inAudio[_inAudioIxRead] points to where audio has to be read from
+    WebRtc_Word16         _inAudioIxRead;
+
+    WebRtc_Word16         _inTimestampIxWrite;
+
+    // Where the audio is stored before encoding,
+    // To save memory the following buffer can be allocated
+    // dynamically for 80ms depending on the sampling frequency
+    // of the codec.
+    WebRtc_Word16*        _inAudio;
+    WebRtc_UWord32*       _inTimestamp;
+
+    WebRtc_Word16         _frameLenSmpl;
+    WebRtc_UWord16        _noChannels;
+
+    // This will point to a static database of the supported codecs
+    WebRtc_Word16         _codecID;
+
+    // This will account for the No of samples  were not encoded
+    // the case is rare, either samples are missed due to overwite
+    // at input buffer or due to encoding error
+    WebRtc_UWord32        _noMissedSamples;
+
+    // True if the encoder instance created
+    bool                  _encoderExist;
+    bool                  _decoderExist;
+    // True if the ecncoder instance initialized
+    bool                  _encoderInitialized;
+    bool                  _decoderInitialized;
+
+    bool                  _registeredInNetEq;
+
+    // VAD/DTX
+    bool                  _hasInternalDTX;
+    WebRtcVadInst*        _ptrVADInst;
+    bool                  _vadEnabled;
+    ACMVADMode            _vadMode;
+    WebRtc_Word16         _vadLabel[MAX_FRAME_SIZE_10MSEC];
+    bool                  _dtxEnabled;
+    WebRtcCngEncInst*     _ptrDTXInst;
+    WebRtc_UWord8         _numLPCParams;
+    bool                  _sentCNPrevious;
+    bool                  _isMaster;
+
+    WebRtcACMCodecParams  _encoderParams;
+    WebRtcACMCodecParams  _decoderParams;
+
+    // Used as a global lock for all avaiable decoders
+    // so that no decoder is used when NetEQ decodes.
+    RWLockWrapper*        _netEqDecodeLock;
+    // Used to lock wrapper internal data
+    // such as buffers and state variables.
+    RWLockWrapper&        _codecWrapperLock;
+
+    WebRtc_UWord32        _lastEncodedTimestamp;
+    WebRtc_UWord32        _lastTimestamp;
+    bool                  _isAudioBuffFresh;
+    WebRtc_UWord32        _uniqueID;
+};
+
+} // namespace webrt
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_gsmfr.cc b/trunk/src/modules/audio_coding/main/source/acm_gsmfr.cc
new file mode 100644
index 0000000..c1f817c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_gsmfr.cc
@@ -0,0 +1,416 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_gsmfr.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_GSMFR
+    // NOTE! GSM-FR is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/gsmfr/main/interface/gsmfr_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcGSMFR_CreateEnc(GSMFR_encinst_t_** inst);
+    // int16_t WebRtcGSMFR_CreateDec(GSMFR_decinst_t_** inst);
+    // int16_t WebRtcGSMFR_FreeEnc(GSMFR_encinst_t_* inst);
+    // int16_t WebRtcGSMFR_FreeDec(GSMFR_decinst_t_* inst);
+    // int16_t WebRtcGSMFR_Encode(GSMFR_encinst_t_* encInst, int16_t* input,
+    //                            int16_t len, int16_t* output);
+    // int16_t WebRtcGSMFR_EncoderInit(GSMFR_encinst_t_* encInst, int16_t mode);
+    // int16_t WebRtcGSMFR_Decode(GSMFR_decinst_t_* decInst);
+    // int16_t WebRtcGSMFR_DecodeBwe(GSMFR_decinst_t_* decInst, int16_t* input);
+    // int16_t WebRtcGSMFR_DecodePlc(GSMFR_decinst_t_* decInst);
+    // int16_t WebRtcGSMFR_DecoderInit(GSMFR_decinst_t_* decInst);
+    // void WebRtcGSMFR_Version(char *versionStr, short len);
+    #include "gsmfr_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_GSMFR
+
+ACMGSMFR::ACMGSMFR(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMGSMFR::~ACMGSMFR()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::EnableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DisableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMGSMFR::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMGSMFR::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMGSMFR::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMGSMFR::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMGSMFR::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+ACMGSMFR::ACMGSMFR(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    _hasInternalDTX = true;
+    return;
+}
+
+
+ACMGSMFR::~ACMGSMFR()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcGSMFR_Encode(_encoderInstPtr,
+        &_inAudio[_inAudioIxRead], _frameLenSmpl, (WebRtc_Word16*)bitStream);
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        if(WebRtcGSMFR_EncoderInit(_encoderInstPtr, 1) < 0)
+        {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "EnableDTX: cannot init encoder for GSMFR");
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        if(WebRtcGSMFR_EncoderInit(_encoderInstPtr, 0) < 0)
+        {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "DisableDTX: cannot init encoder for GSMFR");
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    if (WebRtcGSMFR_EncoderInit(_encoderInstPtr, ((codecParams->enableDTX)? 1:0)) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "InternalInitEncoder: cannot init encoder for GSMFR");
+  }
+  return 0;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcGSMFR_DecoderInit(_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalInitDecoder: cannot init decoder for GSMFR");
+    return -1;
+  }
+  return 0;
+}
+
+
+WebRtc_Word32
+ACMGSMFR::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "CodecDef: decoder is not initialized for GSMFR");
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_GSMFR_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderGSMFR, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_GSMFR_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMGSMFR::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateEncoder()
+{
+    if (WebRtcGSMFR_CreateEnc(&_encoderInstPtr) < 0)
+  {
+     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalCreateEncoder: cannot create instance for GSMFR encoder");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMGSMFR::DestructEncoderSafe()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    _encoderExist = false;
+    _encoderInitialized = false;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateDecoder()
+{
+    if (WebRtcGSMFR_CreateDec(&_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalCreateDecoder: cannot create instance for GSMFR decoder");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMGSMFR::DestructDecoderSafe()
+{
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    _decoderExist = false;
+    _decoderInitialized = false;
+}
+
+
+void
+ACMGSMFR::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcGSMFR_FreeEnc((GSMFR_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "Cannot unregister codec: payload-type does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+    return netEq->RemoveCodec(kDecoderGSMFR);
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_gsmfr.h b/trunk/src/modules/audio_coding/main/source/acm_gsmfr.h
new file mode 100644
index 0000000..43e05fc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_gsmfr.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct GSMFR_encinst_t_;
+struct GSMFR_decinst_t_;
+
+namespace webrtc {
+
+class ACMGSMFR : public ACMGenericCodec
+{
+public:
+    ACMGSMFR(WebRtc_Word16 codecID);
+    ~ACMGSMFR();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType);
+
+    GSMFR_encinst_t_* _encoderInstPtr;
+    GSMFR_decinst_t_* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_ilbc.cc b/trunk/src/modules/audio_coding/main/source/acm_ilbc.cc
new file mode 100644
index 0000000..f16e2f8
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_ilbc.cc
@@ -0,0 +1,390 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_ilbc.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_ILBC
+    #include "ilbc.h"
+#endif
+
+namespace webrtc
+{
+
+#ifndef WEBRTC_CODEC_ILBC
+
+ACMILBC::ACMILBC(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMILBC::~ACMILBC()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMILBC::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMILBC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMILBC::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMILBC::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMILBC::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMILBC::SetBitRateSafe(const WebRtc_Word32 /* rate */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+
+ACMILBC::ACMILBC(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    return;
+}
+
+
+ACMILBC::~ACMILBC()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_DecoderFree(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcIlbcfix_Encode(_encoderInstPtr,
+        &_inAudio[_inAudioIxRead], _frameLenSmpl, (WebRtc_Word16*)bitStream);
+    if (*bitStreamLenByte < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: error in encode for ILBC");
+        return -1;
+    }
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMILBC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // initialize with a correct processing block length
+    if((160 == (codecParams->codecInstant).pacsize) ||
+        (320 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 20ms
+        return WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 20);
+    }
+    else if((240 == (codecParams->codecInstant).pacsize) ||
+        (480 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 30ms
+        return WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 30);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalInitEncoder: invalid processing block");
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitDecoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // initialize with a correct processing block length
+    if((160 == (codecParams->codecInstant).pacsize) ||
+        (320 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 20ms
+        return WebRtcIlbcfix_DecoderInit(_decoderInstPtr, 20);
+    }
+    else if((240 == (codecParams->codecInstant).pacsize) ||
+        (480 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 30ms
+        return WebRtcIlbcfix_DecoderInit(_decoderInstPtr, 30);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalInitDecoder: invalid processing block");
+        return -1;
+    }
+}
+
+
+WebRtc_Word32
+ACMILBC::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CodeDef: decoder not initialized for ILBC");
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_ILBC_FUNCTION."
+    // Then return the structure back to NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderILBC, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_ILBC_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMILBC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateEncoder()
+{
+    if (WebRtcIlbcfix_EncoderCreate(&_encoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateEncoder: cannot create instance for ILBC encoder");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMILBC::DestructEncoderSafe()
+{
+    _encoderInitialized = false;
+    _encoderExist = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateDecoder()
+{
+    if (WebRtcIlbcfix_DecoderCreate(&_decoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateDecoder: cannot create instance for ILBC decoder");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMILBC::DestructDecoderSafe()
+{
+    _decoderInitialized = false;
+    _decoderExist = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_DecoderFree(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMILBC::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree((iLBC_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMILBC::SetBitRateSafe(const WebRtc_Word32 rate)
+{
+    // Check that rate is valid. No need to store the value
+    if (rate == 13300)
+    {
+        WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 30);
+    }
+    else if (rate == 15200)
+    {
+        WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 20);
+    }
+    else
+    {
+        return -1;
+    }
+    _encoderParams.codecInstant.rate = rate;
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMILBC::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec: given payload-type does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+    return netEq->RemoveCodec(kDecoderILBC);
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_ilbc.h b/trunk/src/modules/audio_coding/main/source/acm_ilbc.h
new file mode 100644
index 0000000..2d3e420
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_ilbc.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct iLBC_encinst_t_;
+struct iLBC_decinst_t_;
+
+namespace webrtc
+{
+
+class ACMILBC : public ACMGenericCodec
+{
+public:
+    ACMILBC(WebRtc_Word16 codecID);
+    ~ACMILBC();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+    iLBC_encinst_t_* _encoderInstPtr;
+    iLBC_decinst_t_* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_isac.cc b/trunk/src/modules/audio_coding/main/source/acm_isac.cc
new file mode 100644
index 0000000..adbb9c5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_isac.cc
@@ -0,0 +1,1241 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_isac.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+
+#ifdef WEBRTC_CODEC_ISAC
+    #include "acm_isac_macros.h"
+    #include "isac.h"
+#endif
+
+#ifdef WEBRTC_CODEC_ISACFX
+    #include "acm_isac_macros.h"
+    #include "isacfix.h"
+#endif
+
+namespace webrtc
+{
+
+// we need this otherwise we cannot use forward declaration
+// in the header file
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+struct ACMISACInst
+{
+    ACM_ISAC_STRUCT *inst;
+};
+#endif
+
+#define ISAC_MIN_RATE 10000
+#define ISAC_MAX_RATE 56000
+
+
+// How the scaling is computed. iSAC computes a gain based on the
+// bottleneck. It follows the following expression for that
+//
+// G(BN_kbps) = pow(10, (a + b * BN_kbps + c * BN_kbps * BN_kbps) / 20.0)
+//              / 3.4641;
+//
+// Where for 30 ms framelength we have,
+//
+// a = -23; b = 0.48; c = 0;
+//
+// As the default encoder is operating at 32kbps we have the scale as
+//
+// S(BN_kbps) = G(BN_kbps) / G(32);
+
+#define ISAC_NUM_SUPPORTED_RATES 9
+const WebRtc_UWord16 isacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
+    32000,    30000,    26000,   23000,   21000,
+    19000,    17000,   15000,    12000};
+
+const float isacScale[ISAC_NUM_SUPPORTED_RATES] = {
+     1.0f,    0.8954f,  0.7178f, 0.6081f, 0.5445f,
+     0.4875f, 0.4365f,  0.3908f, 0.3311f};
+
+// Tables for bandwidth estimates
+#define NR_ISAC_BANDWIDTHS 24
+const WebRtc_Word32 isacRatesWB[NR_ISAC_BANDWIDTHS] =
+{
+    10000, 11100, 12300, 13700, 15200, 16900,
+    18800, 20900, 23300, 25900, 28700, 31900,
+    10100, 11200, 12400, 13800, 15300, 17000,
+    18900, 21000, 23400, 26000, 28800, 32000};
+
+
+const WebRtc_Word32 isacRatesSWB[NR_ISAC_BANDWIDTHS] =
+{
+    10000, 11000, 12400, 13800, 15300, 17000,
+    18900, 21000, 23200, 25400, 27600, 29800,
+    32000, 34100, 36300, 38500, 40700, 42900,
+    45100, 47300, 49500, 51700, 53900, 56000,
+};
+
+#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
+
+ACMISAC::ACMISAC(WebRtc_Word16 /* codecID */)
+    : _codecInstPtr(NULL),
+      _isEncInitialized(false),
+      _isacCodingMode(CHANNEL_INDEPENDENT),
+      _enforceFrameSize(false),
+      _isacCurrentBN(32000),
+      _samplesIn10MsAudio(160) {  // Initiates to 16 kHz mode.
+  // Initiate decoder parameters for the 32 kHz mode.
+  memset(&_decoderParams32kHz, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams32kHz.codecInstant.pltype = -1;
+
+  return;
+}
+
+
+ACMISAC::~ACMISAC()
+{
+    return;
+}
+
+
+ACMGenericCodec*
+ACMISAC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalEncode(
+    WebRtc_UWord8* /* bitstream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMISAC::DestructDecoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMISAC::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word32
+ACMISAC::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+void
+ACMISAC::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::DeliverCachedIsacData(
+    WebRtc_UWord8*         /* bitStream        */,
+    WebRtc_Word16*         /* bitStreamLenByte */,
+    WebRtc_UWord32*        /* timestamp        */,
+    WebRtcACMEncodingType* /* encodingType     */,
+    const WebRtc_UWord16   /* isacRate         */,
+    const WebRtc_UWord8    /* isacBWestimate   */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::Transcode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */,
+    WebRtc_Word16  /* qBWE             */,
+    WebRtc_Word32  /* scale            */,
+    bool           /* isRED            */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMISAC::SetBitRateSafe(
+    WebRtc_Word32 /* bitRate */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::GetEstimatedBandwidthSafe()
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 /* estimatedBandwidth */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::GetRedPayloadSafe(
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateDecoderSampFreq(
+    WebRtc_Word16 /* codecId */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateEncoderSampFreq(
+    WebRtc_UWord16 /* encoderSampFreqHz */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMISAC::EncoderSampFreq(
+        WebRtc_UWord16& /* sampFreqHz */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  /* initFrameSizeMsec */,
+    const WebRtc_UWord16 /* initRateBitPerSec */,
+    const bool           /* enforceFrameSize  */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 /* maxPayloadLenBytes */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxRate(
+        const WebRtc_UWord32 /* maxRateBitPerSec */)
+{
+    return -1;
+}
+
+
+void
+ACMISAC::UpdateFrameLen()
+{
+    return;
+}
+
+void
+ACMISAC::CurrentRate(
+    WebRtc_Word32& /*rateBitPerSec */)
+{
+    return;
+}
+
+bool
+ACMISAC::DecoderParamsSafe(
+    WebRtcACMCodecParams* /* decParams   */,
+    const WebRtc_UWord8   /* payloadType */)
+{
+    return false;
+}
+
+void
+ACMISAC::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* /* codecParams */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::REDPayloadISAC(
+        const WebRtc_Word32 /* isacRate        */,
+        const WebRtc_Word16 /* isacBwEstimate  */,
+        WebRtc_UWord8*      /* payload         */,
+        WebRtc_Word16*      /* payloadLenBytes */)
+{
+    return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+
+
+#ifdef WEBRTC_CODEC_ISACFX
+
+enum IsacSamplingRate
+{
+    kIsacWideband = 16,
+    kIsacSuperWideband = 32
+};
+
+static float
+ACMISACFixTranscodingScale(
+    WebRtc_UWord16 rate)
+{
+    // find the scale for transcoding, the scale is rounded
+    // downward
+    float  scale = -1;
+    for(WebRtc_Word16 n=0; n < ISAC_NUM_SUPPORTED_RATES; n++)
+    {
+        if(rate >= isacSuportedRates[n])
+        {
+            scale = isacScale[n];
+            break;
+        }
+    }
+    return scale;
+}
+
+static void
+ACMISACFixGetSendBitrate(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32*   bottleNeck)
+{
+    *bottleNeck = WebRtcIsacfix_GetUplinkBw(inst);
+}
+
+static WebRtc_Word16
+ACMISACFixGetNewBitstream(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word16    BWEIndex,
+    WebRtc_Word16    /* jitterIndex */,
+    WebRtc_Word32    rate,
+    WebRtc_Word16*   bitStream,
+    bool             isRED)
+{
+    if (isRED)
+    {
+        // RED not supported with iSACFIX
+        return -1;
+    }
+    float scale = ACMISACFixTranscodingScale((WebRtc_UWord16)rate);
+    return WebRtcIsacfix_GetNewBitStream(inst, BWEIndex, scale, bitStream);
+}
+
+
+static WebRtc_Word16
+ACMISACFixGetSendBWE(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word16*   rateIndex,
+    WebRtc_Word16*   /* dummy */)
+{
+    WebRtc_Word16 localRateIndex;
+    WebRtc_Word16 status = WebRtcIsacfix_GetDownLinkBwIndex(inst, &localRateIndex);
+    if(status < 0)
+    {
+        return -1;
+    }
+    else
+    {
+        *rateIndex = localRateIndex;
+        return 0;
+    }
+}
+
+static WebRtc_Word16
+ACMISACFixControlBWE(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32    rateBPS,
+    WebRtc_Word16    frameSizeMs,
+    WebRtc_Word16    enforceFrameSize)
+{
+    return WebRtcIsacfix_ControlBwe(inst, (WebRtc_Word16)rateBPS,
+        frameSizeMs, enforceFrameSize);
+}
+
+static WebRtc_Word16
+ACMISACFixControl(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32    rateBPS,
+    WebRtc_Word16    frameSizeMs)
+{
+    return WebRtcIsacfix_Control(inst, (WebRtc_Word16)rateBPS,
+        frameSizeMs);
+}
+
+static IsacSamplingRate
+ACMISACFixGetEncSampRate(
+    ACM_ISAC_STRUCT* /* inst */)
+{
+    return kIsacWideband;
+}
+
+
+static IsacSamplingRate
+ACMISACFixGetDecSampRate(
+    ACM_ISAC_STRUCT* /* inst */)
+{
+    return kIsacWideband;
+}
+
+#endif
+
+
+
+
+
+
+ACMISAC::ACMISAC(WebRtc_Word16 codecID)
+    : _isEncInitialized(false),
+      _isacCodingMode(CHANNEL_INDEPENDENT),
+      _enforceFrameSize(false),
+      _isacCurrentBN(32000),
+      _samplesIn10MsAudio(160) {  // Initiates to 16 kHz mode.
+  _codecID = codecID;
+
+  // Create codec instance.
+  _codecInstPtr = new ACMISACInst;
+  if (_codecInstPtr == NULL) {
+    return;
+  }
+  _codecInstPtr->inst = NULL;
+
+  // Initiate decoder parameters for the 32 kHz mode.
+  memset(&_decoderParams32kHz, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams32kHz.codecInstant.pltype = -1;
+
+  // TODO(tlegrand): Check if the following is really needed, now that
+  // ACMGenericCodec has been updated to initialize this value.
+  // Initialize values that can be used uninitialized otherwise
+  _decoderParams.codecInstant.pltype = -1;
+}
+
+
+ACMISAC::~ACMISAC()
+{
+    if (_codecInstPtr != NULL)
+    {
+        if(_codecInstPtr->inst != NULL)
+        {
+            ACM_ISAC_FREE(_codecInstPtr->inst);
+            _codecInstPtr->inst = NULL;
+        }
+        delete _codecInstPtr;
+        _codecInstPtr = NULL;
+    }
+    return;
+}
+
+
+ACMGenericCodec*
+ACMISAC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalEncode(
+    WebRtc_UWord8* bitstream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    // ISAC takes 10ms audio everytime we call encoder, therefor,
+    // it should be treated like codecs with 'basic coding block'
+    // non-zero, and the following 'while-loop' should not be necessary.
+    // However, due to a mistake in the codec the frame-size might change
+    // at the first 10ms pushed in to iSAC if the bit-rate is low, this is
+    // sort of a bug in iSAC. to address this we treat iSAC as the
+    // following.
+
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    *bitStreamLenByte = 0;
+    while((*bitStreamLenByte == 0) && (_inAudioIxRead < _frameLenSmpl))
+    {
+        if(_inAudioIxRead > _inAudioIxWrite)
+        {
+            // something is wrong.
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "The actual fram-size of iSAC appears to be larger that expected. All audio \
+pushed in but no bit-stream is generated.");
+            return -1;
+        }
+        *bitStreamLenByte = ACM_ISAC_ENCODE(_codecInstPtr->inst,
+            &_inAudio[_inAudioIxRead], (WebRtc_Word16*)bitstream);
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += _samplesIn10MsAudio;
+    }
+    if(*bitStreamLenByte == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+            "ISAC Has encoded the whole frame but no bit-stream is generated.");
+    }
+
+    // a packet is generated iSAC, is set in adaptive mode may change
+    // the frame length and we like to update the bottleneck value as
+    // well, although updating bottleneck is not crucial
+    if((*bitStreamLenByte > 0) && (_isacCodingMode == ADAPTIVE))
+    {
+        //_frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+        ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &_isacCurrentBN);
+    }
+    UpdateFrameLen();
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMISAC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // if rate is set to -1 then iSAC has to be in adaptive mode
+    if(codecParams->codecInstant.rate == -1)
+    {
+        _isacCodingMode = ADAPTIVE;
+    }
+
+    // sanity check that rate is in acceptable range
+    else if((codecParams->codecInstant.rate >= ISAC_MIN_RATE) &&
+        (codecParams->codecInstant.rate <= ISAC_MAX_RATE))
+    {
+        _isacCodingMode = CHANNEL_INDEPENDENT;
+        _isacCurrentBN = codecParams->codecInstant.rate;
+    }
+    else
+    {
+        return -1;
+    }
+
+    // we need to set the encoder sampling frequency.
+    if(UpdateEncoderSampFreq((WebRtc_UWord16)codecParams->codecInstant.plfreq) < 0)
+    {
+        return -1;
+    }
+    if(ACM_ISAC_ENCODERINIT(_codecInstPtr->inst, _isacCodingMode) < 0)
+    {
+        return -1;
+    }
+
+    // apply the frame-size and rate if operating in
+    // channel-independent mode
+    if(_isacCodingMode == CHANNEL_INDEPENDENT)
+    {
+        if(ACM_ISAC_CONTROL(_codecInstPtr->inst,
+            codecParams->codecInstant.rate,
+            codecParams->codecInstant.pacsize /
+            (codecParams->codecInstant.plfreq / 1000)) < 0)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        // We need this for adaptive case and has to be called
+        // after initialization
+        ACM_ISAC_GETSENDBITRATE(
+            _codecInstPtr->inst, &_isacCurrentBN);
+    }
+    _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+    return 0;
+}
+
+WebRtc_Word16
+ACMISAC::InternalInitDecoder(
+    WebRtcACMCodecParams*  codecParams)
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+
+    // set decoder sampling frequency.
+    if(codecParams->codecInstant.plfreq == 32000)
+    {
+        UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
+    }
+    else
+    {
+        UpdateDecoderSampFreq(ACMCodecDB::kISAC);
+    }
+
+    // in a one-way communication we may never register send-codec.
+    // However we like that the BWE to work properly so it has to
+    // be initialized. The BWE is initialized when iSAC encoder is initialized.
+    // Therefore, we need this.
+    if(!_encoderInitialized)
+    {
+        // Since we don't require a valid rate or a valid packet size when initializing
+        // the decoder, we set valid values before initializing encoder
+        codecParams->codecInstant.rate = kIsacWbDefaultRate;
+        codecParams->codecInstant.pacsize = kIsacPacSize960;
+        if(InternalInitEncoder(codecParams) < 0)
+        {
+            return -1;
+        }
+        _encoderInitialized = true;
+    }
+
+    return ACM_ISAC_DECODERINIT(_codecInstPtr->inst);
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateDecoder()
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_Word16 status = ACM_ISAC_CREATE (&(_codecInstPtr->inst));
+
+    // specific to codecs with one instance for encoding and decoding
+    _encoderInitialized = false;
+    if(status < 0)
+    {
+        _encoderExist = false;
+    }
+    else
+    {
+        _encoderExist = true;
+    }
+    return status;
+}
+
+
+void
+ACMISAC::DestructDecoderSafe()
+{
+    // codec with shared instance cannot delete.
+    _decoderInitialized = false;
+    return;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateEncoder()
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_Word16 status = ACM_ISAC_CREATE(&(_codecInstPtr->inst));
+
+    // specific to codecs with one instance for encoding and decoding
+    _decoderInitialized = false;
+    if(status < 0)
+    {
+        _decoderExist = false;
+    }
+    else
+    {
+        _decoderExist = true;
+    }
+    return status;
+}
+
+
+void
+ACMISAC::DestructEncoderSafe()
+{
+    // codec with shared instance cannot delete.
+    _encoderInitialized = false;
+    return;
+}
+
+
+WebRtc_Word32
+ACMISAC::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    // Sanity checks
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    if (!_decoderInitialized || !_decoderExist)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_ISAC_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    if(codecInst.plfreq == 16000)
+    {
+        SET_CODEC_PAR((codecDef), kDecoderISAC, codecInst.pltype,
+            _codecInstPtr->inst, 16000);
+#ifdef WEBRTC_CODEC_ISAC
+        SET_ISAC_FUNCTIONS((codecDef));
+#else
+        SET_ISACfix_FUNCTIONS((codecDef));
+#endif
+    }
+    else
+    {
+#ifdef WEBRTC_CODEC_ISAC
+        SET_CODEC_PAR((codecDef), kDecoderISACswb, codecInst.pltype,
+            _codecInstPtr->inst, 32000);
+        SET_ISACSWB_FUNCTIONS((codecDef));
+#else
+        return -1;
+#endif
+    }
+
+    return 0;
+}
+
+
+void
+ACMISAC::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        ACM_ISAC_FREE((ACM_ISAC_STRUCT *)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::Transcode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte,
+    WebRtc_Word16  qBWE,
+    WebRtc_Word32  rate,
+    bool           isRED)
+{
+    WebRtc_Word16 jitterInfo = 0;
+    // transcode from a higher rate to lower rate
+    // sanity check
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+
+    *bitStreamLenByte = ACM_ISAC_GETNEWBITSTREAM(_codecInstPtr->inst,
+        qBWE, jitterInfo, rate, (WebRtc_Word16*)bitStream, (isRED)? 1:0);
+
+    if(*bitStreamLenByte < 0)
+    {
+        // error happened
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+    else
+    {
+        return *bitStreamLenByte;
+    }
+}
+
+WebRtc_Word16
+ACMISAC::SetBitRateSafe(
+    WebRtc_Word32 bitRate)
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 encoderSampFreq;
+    EncoderSampFreq(encoderSampFreq);
+    bool reinit = false;
+    // change the BN of iSAC
+    if(bitRate == -1)
+    {
+        // ADAPTIVE MODE
+        // Check if it was already in adaptive mode
+        if(_isacCodingMode != ADAPTIVE)
+        {
+            // was not in adaptive, then set the mode to adaptive
+            // and flag for re-initialization
+            _isacCodingMode = ADAPTIVE;
+            reinit = true;
+        }
+    }
+    // Sanity check if the rate valid
+    else if((bitRate >= ISAC_MIN_RATE) &&
+        (bitRate <= ISAC_MAX_RATE))
+    {
+        //check if it was in channel-independent mode before
+        if(_isacCodingMode != CHANNEL_INDEPENDENT)
+        {
+            // was not in channel independent, set the mode to
+            // channel-independent and flag for re-initialization
+            _isacCodingMode = CHANNEL_INDEPENDENT;
+            reinit = true;
+        }
+        // store the bottleneck
+        _isacCurrentBN = (WebRtc_UWord16)bitRate;
+    }
+    else
+    {
+        // invlaid rate
+        return -1;
+    }
+
+    WebRtc_Word16 status = 0;
+    if(reinit)
+    {
+        // initialize and check if it is successful
+        if(ACM_ISAC_ENCODERINIT(_codecInstPtr->inst, _isacCodingMode) < 0)
+        {
+            // failed initialization
+            return -1;
+        }
+    }
+    if(_isacCodingMode == CHANNEL_INDEPENDENT)
+    {
+
+        status = ACM_ISAC_CONTROL(_codecInstPtr->inst, _isacCurrentBN,
+            (encoderSampFreq == 32000)? 30:(_frameLenSmpl / 16));
+        if(status < 0)
+        {
+            status = -1;
+        }
+    }
+
+    // Update encoder parameters
+    _encoderParams.codecInstant.rate = bitRate;
+
+    UpdateFrameLen();
+    return status;
+}
+
+
+WebRtc_Word32
+ACMISAC::GetEstimatedBandwidthSafe()
+{
+    WebRtc_Word16 bandwidthIndex;
+    WebRtc_Word16 delayIndex;
+    IsacSamplingRate sampRate;
+
+    // Get bandwidth information
+    ACM_ISAC_GETSENDBWE(_codecInstPtr->inst, &bandwidthIndex, &delayIndex);
+
+    // Validy check of index
+    if ((bandwidthIndex < 0) || (bandwidthIndex >= NR_ISAC_BANDWIDTHS))
+    {
+        return -1;
+    }
+
+    // Check sample frequency
+    sampRate = ACM_ISAC_GETDECSAMPRATE(_codecInstPtr->inst);
+    if(sampRate == kIsacWideband)
+    {
+        return isacRatesWB[bandwidthIndex];
+    }
+    else
+    {
+        return isacRatesSWB[bandwidthIndex];
+    }
+}
+
+WebRtc_Word32
+ACMISAC::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 estimatedBandwidth)
+{
+    IsacSamplingRate sampRate;
+    WebRtc_Word16 bandwidthIndex;
+
+    // Check sample frequency and choose appropriate table
+    sampRate = ACM_ISAC_GETENCSAMPRATE(_codecInstPtr->inst);
+
+    if(sampRate == kIsacWideband)
+    {
+        // Search through the WB rate table to find the index
+
+        bandwidthIndex = NR_ISAC_BANDWIDTHS/2 - 1;
+        for (int i=0; i<(NR_ISAC_BANDWIDTHS/2); i++)
+        {
+            if (estimatedBandwidth == isacRatesWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+            } else if (estimatedBandwidth == isacRatesWB[i+NR_ISAC_BANDWIDTHS/2])
+            {
+                bandwidthIndex = i + NR_ISAC_BANDWIDTHS/2;
+                break;
+            } else if (estimatedBandwidth < isacRatesWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+             }
+        }
+    }
+    else
+    {
+        // Search through the SWB rate table to find the index
+        bandwidthIndex = NR_ISAC_BANDWIDTHS - 1;
+        for (int i=0; i<NR_ISAC_BANDWIDTHS; i++)
+        {
+            if(estimatedBandwidth <= isacRatesSWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+            }
+        }
+    }
+
+    // Set iSAC Bandwidth Estimate
+    ACM_ISAC_SETBWE(_codecInstPtr->inst, bandwidthIndex);
+
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::GetRedPayloadSafe(
+#if (!defined(WEBRTC_CODEC_ISAC))
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1;
+#else
+    WebRtc_UWord8* redPayload,
+    WebRtc_Word16* payloadBytes)
+{
+
+    WebRtc_Word16 bytes = WebRtcIsac_GetRedPayload(_codecInstPtr->inst, (WebRtc_Word16*)redPayload);
+    if (bytes < 0)
+    {
+        return -1;
+    }
+    *payloadBytes = bytes;
+    return 0;
+#endif
+}
+
+
+WebRtc_Word16
+ACMISAC::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType == _decoderParams.codecInstant.pltype)
+    {
+        return netEq->RemoveCodec(kDecoderISAC);
+    }
+    else if(payloadType == _decoderParams32kHz.codecInstant.pltype)
+    {
+        return netEq->RemoveCodec(kDecoderISACswb);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type %d or %d",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype,
+            _decoderParams32kHz.codecInstant.pltype);
+
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateDecoderSampFreq(
+#ifdef WEBRTC_CODEC_ISAC
+    WebRtc_Word16 codecId)
+{
+    if(ACMCodecDB::kISAC == codecId)
+    {
+        return WebRtcIsac_SetDecSampRate(_codecInstPtr->inst, kIsacWideband);
+    }
+    else if(ACMCodecDB::kISACSWB == codecId)
+    {
+        return WebRtcIsac_SetDecSampRate(_codecInstPtr->inst, kIsacSuperWideband);
+    }
+    else
+    {
+        return -1;
+    }
+
+#else
+    WebRtc_Word16 /* codecId */)
+{
+    return 0;
+#endif
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateEncoderSampFreq(
+#ifdef WEBRTC_CODEC_ISAC
+    WebRtc_UWord16 encoderSampFreqHz)
+{
+    WebRtc_UWord16 currentSampRateHz;
+    EncoderSampFreq(currentSampRateHz);
+
+    if(currentSampRateHz != encoderSampFreqHz)
+    {
+        if((encoderSampFreqHz != 16000) && (encoderSampFreqHz != 32000))
+        {
+            return -1;
+        }
+        else
+        {
+            _inAudioIxRead = 0;
+            _inAudioIxWrite = 0;
+            _inTimestampIxWrite = 0;
+            if(encoderSampFreqHz == 16000)
+            {
+                if(WebRtcIsac_SetEncSampRate(_codecInstPtr->inst, kIsacWideband) < 0)
+                {
+                    return -1;
+                }
+                _samplesIn10MsAudio = 160;
+            }
+            else
+            {
+
+                if(WebRtcIsac_SetEncSampRate(_codecInstPtr->inst, kIsacSuperWideband) < 0)
+                {
+                    return -1;
+                }
+                _samplesIn10MsAudio = 320;
+            }
+            _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+            _encoderParams.codecInstant.pacsize = _frameLenSmpl;
+            _encoderParams.codecInstant.plfreq = encoderSampFreqHz;
+            return 0;
+        }
+    }
+#else
+    WebRtc_UWord16 /* codecId */)
+{
+#endif
+    return 0;
+}
+
+WebRtc_Word16
+ACMISAC::EncoderSampFreq(
+    WebRtc_UWord16& sampFreqHz)
+{
+    IsacSamplingRate sampRate;
+    sampRate = ACM_ISAC_GETENCSAMPRATE(_codecInstPtr->inst);
+    if(sampRate == kIsacSuperWideband)
+    {
+        sampFreqHz = 32000;
+    }
+    else
+    {
+        sampFreqHz = 16000;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  initFrameSizeMsec,
+    const WebRtc_UWord16 initRateBitPerSec,
+    const bool           enforceFrameSize)
+{
+    WebRtc_Word16 status;
+    {
+        WebRtc_UWord16 sampFreqHz;
+        EncoderSampFreq(sampFreqHz);
+        // @TODO: at 32kHz we hardcode calling with 30ms and enforce
+        // the frame-size otherwise we might get error. Revise if
+        // control-bwe is changed.
+        if(sampFreqHz == 32000)
+        {
+            status = ACM_ISAC_CONTROL_BWE(_codecInstPtr->inst,
+                initRateBitPerSec, 30, 1);
+        }
+        else
+        {
+            status = ACM_ISAC_CONTROL_BWE(_codecInstPtr->inst,
+                initRateBitPerSec, initFrameSizeMsec, enforceFrameSize? 1:0);
+        }
+    }
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Coutn't config iSAC BWE.");
+        return -1;
+    }
+    UpdateFrameLen();
+    ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &_isacCurrentBN);
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 maxPayloadLenBytes)
+{
+    return ACM_ISAC_SETMAXPAYLOADSIZE(_codecInstPtr->inst, maxPayloadLenBytes);
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxRate(
+    const WebRtc_UWord32 maxRateBitPerSec)
+{
+    return ACM_ISAC_SETMAXRATE(_codecInstPtr->inst, maxRateBitPerSec);
+}
+
+
+void
+ACMISAC::UpdateFrameLen()
+{
+    _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+    _encoderParams.codecInstant.pacsize = _frameLenSmpl;
+}
+
+void
+ACMISAC::CurrentRate(WebRtc_Word32& rateBitPerSec)
+{
+    if(_isacCodingMode == ADAPTIVE)
+    {
+        ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &rateBitPerSec);
+    }
+}
+
+
+bool
+ACMISAC::DecoderParamsSafe(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    if(_decoderInitialized)
+    {
+        if(payloadType == _decoderParams.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams, sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+        if(payloadType == _decoderParams32kHz.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams32kHz,
+                sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+    }
+    return false;
+}
+
+void
+ACMISAC::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* codecParams)
+{
+    // set decoder sampling frequency.
+    if(codecParams->codecInstant.plfreq == 32000)
+    {
+        memcpy(&_decoderParams32kHz, codecParams, sizeof(WebRtcACMCodecParams));
+    }
+    else
+    {
+        memcpy(&_decoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+    }
+}
+
+
+WebRtc_Word16
+ACMISAC::REDPayloadISAC(
+    const WebRtc_Word32  isacRate,
+    const WebRtc_Word16  isacBwEstimate,
+    WebRtc_UWord8*       payload,
+    WebRtc_Word16*       payloadLenBytes)
+{
+    WebRtc_Word16 status;
+    ReadLockScoped rl(_codecWrapperLock);
+    status = Transcode(payload, payloadLenBytes, isacBwEstimate, isacRate, true);
+    return status;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_isac.h b/trunk/src/modules/audio_coding/main/source/acm_isac.h
new file mode 100644
index 0000000..6013c61
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_isac.h
@@ -0,0 +1,153 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+struct ACMISACInst;
+
+enum iSACCodingMode {ADAPTIVE, CHANNEL_INDEPENDENT};
+
+
+class ACMISAC : public ACMGenericCodec
+{
+public:
+    ACMISAC(WebRtc_Word16 codecID);
+    ~ACMISAC();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 DeliverCachedIsacData(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timestamp,
+        WebRtcACMEncodingType* encodingType,
+        const WebRtc_UWord16   isacRate,
+        const WebRtc_UWord8    isacBWestimate);
+
+    WebRtc_Word16 DeliverCachedData(
+        WebRtc_UWord8*         /* bitStream        */,
+        WebRtc_Word16*         /* bitStreamLenByte */,
+        WebRtc_UWord32*        /* timestamp        */,
+        WebRtcACMEncodingType* /* encodingType     */)
+    {
+        return -1;
+    }
+
+    WebRtc_Word16 UpdateDecoderSampFreq(
+        WebRtc_Word16 codecId);
+
+    WebRtc_Word16 UpdateEncoderSampFreq(
+        WebRtc_UWord16 sampFreqHz);
+
+    WebRtc_Word16 EncoderSampFreq(
+        WebRtc_UWord16& sampFreqHz);
+
+    WebRtc_Word32 ConfigISACBandwidthEstimator(
+        const WebRtc_UWord8  initFrameSizeMsec,
+        const WebRtc_UWord16 initRateBitPerSec,
+        const bool           enforceFrameSize);
+
+    WebRtc_Word32 SetISACMaxPayloadSize(
+        const WebRtc_UWord16 maxPayloadLenBytes);
+
+    WebRtc_Word32 SetISACMaxRate(
+        const WebRtc_UWord32 maxRateBitPerSec);
+
+    WebRtc_Word16 REDPayloadISAC(
+        const WebRtc_Word32  isacRate,
+        const WebRtc_Word16  isacBwEstimate,
+        WebRtc_UWord8*       payload,
+        WebRtc_Word16*       payloadLenBytes);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 bitRate);
+
+    WebRtc_Word32 GetEstimatedBandwidthSafe();
+
+    WebRtc_Word32 SetEstimatedBandwidthSafe(WebRtc_Word32 estimatedBandwidth);
+
+    WebRtc_Word32 GetRedPayloadSafe(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 Transcode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte,
+        WebRtc_Word16  qBWE,
+        WebRtc_Word32  rate,
+        bool           isRED);
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType);
+
+    void CurrentRate(WebRtc_Word32& rateBitPerSec);
+
+    void UpdateFrameLen();
+
+    bool DecoderParamsSafe(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+    void SaveDecoderParamSafe(
+        const WebRtcACMCodecParams* codecParams);
+
+    ACMISACInst* _codecInstPtr;
+
+    bool                  _isEncInitialized;
+    iSACCodingMode        _isacCodingMode;
+    bool                  _enforceFrameSize;
+    WebRtc_Word32         _isacCurrentBN;
+    WebRtc_UWord16        _samplesIn10MsAudio;
+    WebRtcACMCodecParams  _decoderParams32kHz;
+};
+
+} //namespace
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_isac_macros.h b/trunk/src/modules/audio_coding/main/source/acm_isac_macros.h
new file mode 100644
index 0000000..9e5f55f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_isac_macros.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+
+#include "engine_configurations.h"
+
+namespace webrtc
+{
+
+#ifdef WEBRTC_CODEC_ISAC
+#   define ACM_ISAC_CREATE            WebRtcIsac_Create
+#   define ACM_ISAC_FREE              WebRtcIsac_Free
+#   define ACM_ISAC_ENCODERINIT       WebRtcIsac_EncoderInit
+#   define ACM_ISAC_ENCODE            WebRtcIsac_Encode
+#   define ACM_ISAC_DECODERINIT       WebRtcIsac_DecoderInit
+#   define ACM_ISAC_DECODE_BWE        WebRtcIsac_UpdateBwEstimate
+#   define ACM_ISAC_DECODE_B          WebRtcIsac_Decode
+#   define ACM_ISAC_DECODEPLC         WebRtcIsac_DecodePlc
+#   define ACM_ISAC_CONTROL           WebRtcIsac_Control
+#   define ACM_ISAC_CONTROL_BWE       WebRtcIsac_ControlBwe
+#   define ACM_ISAC_GETFRAMELEN       WebRtcIsac_ReadFrameLen
+#   define ACM_ISAC_VERSION           WebRtcIsac_version
+#   define ACM_ISAC_GETERRORCODE      WebRtcIsac_GetErrorCode
+#   define ACM_ISAC_GETSENDBITRATE    WebRtcIsac_GetUplinkBw
+#   define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsac_SetMaxPayloadSize
+#   define ACM_ISAC_SETMAXRATE        WebRtcIsac_SetMaxRate
+#   define ACM_ISAC_GETNEWBITSTREAM   WebRtcIsac_GetNewBitStream
+#   define ACM_ISAC_GETSENDBWE        WebRtcIsac_GetDownLinkBwIndex
+#   define ACM_ISAC_SETBWE            WebRtcIsac_UpdateUplinkBw
+#   define ACM_ISAC_GETBWE            WebRtcIsac_ReadBwIndex
+#   define ACM_ISAC_GETNEWFRAMELEN    WebRtcIsac_GetNewFrameLen
+#   define ACM_ISAC_STRUCT            ISACStruct
+#   define ACM_ISAC_GETENCSAMPRATE    WebRtcIsac_EncSampRate
+#   define ACM_ISAC_GETDECSAMPRATE    WebRtcIsac_DecSampRate
+#endif
+
+#ifdef WEBRTC_CODEC_ISACFX
+#   define ACM_ISAC_CREATE            WebRtcIsacfix_Create
+#   define ACM_ISAC_FREE              WebRtcIsacfix_Free
+#   define ACM_ISAC_ENCODERINIT       WebRtcIsacfix_EncoderInit
+#   define ACM_ISAC_ENCODE            WebRtcIsacfix_Encode
+#   define ACM_ISAC_DECODERINIT       WebRtcIsacfix_DecoderInit
+#   define ACM_ISAC_DECODE_BWE        WebRtcIsacfix_UpdateBwEstimate
+#   define ACM_ISAC_DECODE_B          WebRtcIsacfix_Decode
+#   define ACM_ISAC_DECODEPLC         WebRtcIsacfix_DecodePlc
+#   define ACM_ISAC_CONTROL           ACMISACFixControl         // local Impl
+#   define ACM_ISAC_CONTROL_BWE       ACMISACFixControlBWE      // local Impl
+#   define ACM_ISAC_GETFRAMELEN       WebRtcIsacfix_ReadFrameLen
+#   define ACM_ISAC_VERSION           WebRtcIsacfix_version
+#   define ACM_ISAC_GETERRORCODE      WebRtcIsacfix_GetErrorCode
+#   define ACM_ISAC_GETSENDBITRATE    ACMISACFixGetSendBitrate   // local Impl
+#   define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsacfix_SetMaxPayloadSize
+#   define ACM_ISAC_SETMAXRATE        WebRtcIsacfix_SetMaxRate
+#   define ACM_ISAC_GETNEWBITSTREAM   ACMISACFixGetNewBitstream  // local Impl
+#   define ACM_ISAC_GETSENDBWE        ACMISACFixGetSendBWE       // local Impl
+#   define ACM_ISAC_SETBWE            WebRtcIsacfix_UpdateUplinkBw
+#   define ACM_ISAC_GETBWE            WebRtcIsacfix_ReadBwIndex
+#   define ACM_ISAC_GETNEWFRAMELEN    WebRtcIsacfix_GetNewFrameLen
+#   define ACM_ISAC_STRUCT            ISACFIX_MainStruct
+#   define ACM_ISAC_GETENCSAMPRATE    ACMISACFixGetEncSampRate   // local Impl
+#   define ACM_ISAC_GETDECSAMPRATE    ACMISACFixGetDecSampRate   // local Impl
+#endif
+
+} //namespace
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+
diff --git a/trunk/src/modules/audio_coding/main/source/acm_neteq.cc b/trunk/src/modules/audio_coding/main/source/acm_neteq.cc
new file mode 100644
index 0000000..be25918
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_neteq.cc
@@ -0,0 +1,1270 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <algorithm>  // sort
+#include <stdlib.h>  // malloc
+#include <vector>
+
+#include "acm_neteq.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "signal_processing_library.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+namespace webrtc
+{
+
+#define RTP_HEADER_SIZE 12
+#define NETEQ_INIT_FREQ 8000
+#define NETEQ_INIT_FREQ_KHZ (NETEQ_INIT_FREQ/1000)
+#define NETEQ_ERR_MSG_LEN_BYTE (WEBRTC_NETEQ_MAX_ERROR_NAME + 1)
+
+
+ACMNetEQ::ACMNetEQ()
+:
+_id(0),
+_currentSampFreqKHz(NETEQ_INIT_FREQ_KHZ),
+_avtPlayout(false),
+_playoutMode(voice),
+_netEqCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_vadStatus(false),
+_vadMode(VADNormal),
+_decodeLock(RWLockWrapper::CreateRWLock()),
+_numSlaves(0),
+_receivedStereo(false),
+_masterSlaveInfo(NULL),
+_previousAudioActivity(AudioFrame::kVadUnknown),
+_extraDelay(0),
+_callbackCritSect(CriticalSectionWrapper::CreateCriticalSection())
+{
+    for(int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++)
+    {
+        _isInitialized[n]     = false;
+        _ptrVADInst[n]        = NULL;
+        _inst[n]              = NULL;
+        _instMem[n]           = NULL;
+        _netEqPacketBuffer[n] = NULL;
+    }
+}
+
+ACMNetEQ::~ACMNetEQ()
+{
+    {
+        CriticalSectionScoped lock(*_netEqCritSect);
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if (_instMem[idx] != NULL)
+            {
+                free(_instMem[idx]);
+                _instMem[idx] = NULL;
+            }
+            if (_netEqPacketBuffer[idx] != NULL)
+            {
+                free(_netEqPacketBuffer[idx]);
+                _netEqPacketBuffer[idx] = NULL;
+            }
+            if(_ptrVADInst[idx] != NULL)
+            {
+                WebRtcVad_Free(_ptrVADInst[idx]);
+                _ptrVADInst[idx] = NULL;
+            }
+        }
+        if(_masterSlaveInfo != NULL)
+        {
+            free(_masterSlaveInfo);
+            _masterSlaveInfo = NULL;
+        }
+    }
+    if(_netEqCritSect != NULL)
+    {
+        delete _netEqCritSect;
+    }
+
+    if(_decodeLock != NULL)
+    {
+        delete _decodeLock;
+    }
+
+    if(_callbackCritSect != NULL)
+    {
+        delete _callbackCritSect;
+    }
+}
+
+WebRtc_Word32
+ACMNetEQ::Init()
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(InitByIdxSafe(idx) < 0)
+        {
+            return -1;
+        }
+        // delete VAD instance and start fresh if required.
+        if(_ptrVADInst[idx] != NULL)
+        {
+            WebRtcVad_Free(_ptrVADInst[idx]);
+            _ptrVADInst[idx] = NULL;
+        }
+        if(_vadStatus)
+        {
+            // Has to enable VAD
+            if(EnableVADByIdxSafe(idx) < 0)
+            {
+                // Failed to enable VAD.
+                // Delete VAD instance, if it is created
+                if(_ptrVADInst[idx] != NULL)
+                {
+                    WebRtcVad_Free(_ptrVADInst[idx]);
+                    _ptrVADInst[idx] = NULL;
+                }
+                // We are at initialization of NetEq, if failed to
+                // enable VAD, we delete the NetEq instance.
+                if (_instMem[idx] != NULL) {
+                    free(_instMem[idx]);
+                    _instMem[idx] = NULL;
+                    _inst[idx] = NULL;
+                }
+                _isInitialized[idx] = false;
+                return -1;
+            }
+        }
+        _isInitialized[idx] = true;
+    }
+    if (EnableVAD() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::InitByIdxSafe(
+    const WebRtc_Word16 idx)
+{
+    int memorySizeBytes;
+    if (WebRtcNetEQ_AssignSize(&memorySizeBytes) != 0)
+    {
+        LogError("AssignSize", idx);
+        return -1;
+    }
+
+    if(_instMem[idx] != NULL)
+    {
+        free(_instMem[idx]);
+        _instMem[idx] = NULL;
+    }
+    _instMem[idx] = malloc(memorySizeBytes);
+    if (_instMem[idx] == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not allocate memory for NetEq");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    if (WebRtcNetEQ_Assign(&_inst[idx], _instMem[idx]) != 0)
+    {
+        if (_instMem[idx] != NULL) {
+            free(_instMem[idx]);
+            _instMem[idx] = NULL;
+        }
+        LogError("Assign", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not Assign");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    if (WebRtcNetEQ_Init(_inst[idx], NETEQ_INIT_FREQ) != 0)
+    {
+        if (_instMem[idx] != NULL) {
+            free(_instMem[idx]);
+            _instMem[idx] = NULL;
+        }
+        LogError("Init", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not initialize NetEq");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    _isInitialized[idx] = true;
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::EnableVADByIdxSafe(
+    const WebRtc_Word16 idx)
+{
+    if(_ptrVADInst[idx] == NULL)
+    {
+        if(WebRtcVad_Create(&_ptrVADInst[idx]) < 0)
+        {
+            _ptrVADInst[idx] = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "EnableVADByIdxSafe: NetEq Initialization error: could not create VAD");
+            return -1;
+        }
+    }
+
+    if(WebRtcNetEQ_SetVADInstance(_inst[idx], _ptrVADInst[idx],
+        (WebRtcNetEQ_VADInitFunction)    WebRtcVad_Init,
+        (WebRtcNetEQ_VADSetmodeFunction) WebRtcVad_set_mode,
+        (WebRtcNetEQ_VADFunction)        WebRtcVad_Process) < 0)
+    {
+       LogError("setVADinstance", idx);
+       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+           "EnableVADByIdxSafe: NetEq Initialization error: could not set VAD instance");
+        return -1;
+    }
+
+    if(WebRtcNetEQ_SetVADMode(_inst[idx], _vadMode) < 0)
+    {
+        LogError("setVADmode", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "EnableVADByIdxSafe: NetEq Initialization error: could not set VAD mode");
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+ACMNetEQ::AllocatePacketBuffer(
+    const WebRtcNetEQDecoder* usedCodecs,
+    WebRtc_Word16     noOfCodecs)
+{
+    // Due to WebRtcNetEQ_GetRecommendedBufferSize
+    // the following has to be int otherwise we will have compiler error
+    // if not casted
+
+    CriticalSectionScoped lock(*_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(AllocatePacketBufferByIdxSafe(usedCodecs, noOfCodecs, idx) < 0)
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::AllocatePacketBufferByIdxSafe(
+    const WebRtcNetEQDecoder*    usedCodecs,
+    WebRtc_Word16       noOfCodecs,
+    const WebRtc_Word16 idx)
+{
+    int maxNoPackets;
+    int bufferSizeInBytes;
+
+    if(!_isInitialized[idx])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "AllocatePacketBufferByIdxSafe: NetEq is not initialized.");
+        return -1;
+    }
+    if (WebRtcNetEQ_GetRecommendedBufferSize(_inst[idx], usedCodecs, noOfCodecs,
+        kTCPLargeJitter , &maxNoPackets, &bufferSizeInBytes)
+        != 0)
+    {
+        LogError("GetRecommendedBufferSize", idx);
+        return -1;
+    }
+    if(_netEqPacketBuffer[idx] != NULL)
+    {
+        free(_netEqPacketBuffer[idx]);
+        _netEqPacketBuffer[idx] = NULL;
+    }
+
+    _netEqPacketBuffer[idx] = (WebRtc_Word16 *)malloc(bufferSizeInBytes);
+    if (_netEqPacketBuffer[idx] == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "AllocatePacketBufferByIdxSafe: NetEq Initialization error: could not allocate "
+            "memory for NetEq Packet Buffer");
+        return -1;
+
+    }
+    if (WebRtcNetEQ_AssignBuffer(_inst[idx], maxNoPackets, _netEqPacketBuffer[idx],
+        bufferSizeInBytes) != 0)
+    {
+        if (_netEqPacketBuffer[idx] != NULL) {
+            free(_netEqPacketBuffer[idx]);
+            _netEqPacketBuffer[idx] = NULL;
+        }
+        LogError("AssignBuffer", idx);
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+ACMNetEQ::SetExtraDelay(
+    const WebRtc_Word32 delayInMS)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetExtraDelay: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_SetExtraDelay(_inst[idx], delayInMS) < 0)
+        {
+            LogError("SetExtraDelay", idx);
+            return -1;
+        }
+    }
+    _extraDelay = delayInMS;
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMNetEQ::SetAVTPlayout(
+    const bool enable)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if (_avtPlayout != enable)
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetAVTPlayout: NetEq is not initialized.");
+                return -1;
+            }
+            if(WebRtcNetEQ_SetAVTPlayout(_inst[idx], (enable) ? 1 : 0) < 0)
+            {
+                LogError("SetAVTPlayout", idx);
+                return -1;
+            }
+        }
+    }
+    _avtPlayout = enable;
+    return 0;
+}
+
+
+bool
+ACMNetEQ::AVTPlayout() const
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    return _avtPlayout;
+}
+
+WebRtc_Word32
+ACMNetEQ::CurrentSampFreqHz() const
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "CurrentSampFreqHz: NetEq is not initialized.");
+        return -1;
+    }
+    return (WebRtc_Word32)(1000*_currentSampFreqKHz);
+}
+
+
+WebRtc_Word32
+ACMNetEQ::SetPlayoutMode(
+    const AudioPlayoutMode mode)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(_playoutMode != mode)
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetPlayoutMode: NetEq is not initialized.");
+                return -1;
+            }
+
+            enum WebRtcNetEQPlayoutMode playoutMode = kPlayoutOff;
+            switch(mode)
+            {
+            case voice:
+                playoutMode = kPlayoutOn;
+                break;
+            case fax:
+                playoutMode = kPlayoutFax;
+                break;
+            case streaming:
+                playoutMode = kPlayoutStreaming;
+                break;
+            }
+            if(WebRtcNetEQ_SetPlayoutMode(_inst[idx], playoutMode) < 0)
+            {
+                LogError("SetPlayoutMode", idx);
+                return -1;
+            }
+        }
+        _playoutMode = mode;
+    }
+
+    return 0;
+}
+
+AudioPlayoutMode
+ACMNetEQ::PlayoutMode() const
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    return _playoutMode;
+}
+
+
+WebRtc_Word32
+ACMNetEQ::NetworkStatistics(
+    ACMNetworkStatistics* statistics) const
+{
+    WebRtcNetEQ_NetworkStatistics stats;
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "NetworkStatistics: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_GetNetworkStatistics(_inst[0], &stats) == 0)
+    {
+        statistics->currentAccelerateRate = stats.currentAccelerateRate;
+        statistics->currentBufferSize = stats.currentBufferSize;
+        statistics->jitterPeaksFound = (stats.jitterPeaksFound > 0);
+        statistics->currentDiscardRate = stats.currentDiscardRate;
+        statistics->currentExpandRate = stats.currentExpandRate;
+        statistics->currentPacketLossRate = stats.currentPacketLossRate;
+        statistics->currentPreemptiveRate = stats.currentPreemptiveRate;
+        statistics->preferredBufferSize = stats.preferredBufferSize;
+        statistics->clockDriftPPM = stats.clockDriftPPM;
+    }
+    else
+    {
+        LogError("getNetworkStatistics", 0);
+        return -1;
+    }
+    const int kArrayLen = 100;
+    int waiting_times[kArrayLen];
+    int waiting_times_len = WebRtcNetEQ_GetRawFrameWaitingTimes(
+        _inst[0], kArrayLen, waiting_times);
+    if (waiting_times_len > 0)
+    {
+        std::vector<int> waiting_times_vec(waiting_times,
+                                           waiting_times + waiting_times_len);
+        std::sort(waiting_times_vec.begin(), waiting_times_vec.end());
+        size_t size = waiting_times_vec.size();
+        assert(size == static_cast<size_t>(waiting_times_len));
+        if (size % 2 == 0)
+        {
+            statistics->medianWaitingTimeMs =
+                (waiting_times_vec[size / 2 - 1] +
+                    waiting_times_vec[size / 2]) / 2;
+        }
+        else
+        {
+            statistics->medianWaitingTimeMs = waiting_times_vec[size / 2];
+        }
+        statistics->minWaitingTimeMs = waiting_times_vec.front();
+        statistics->maxWaitingTimeMs = waiting_times_vec.back();
+        double sum = 0;
+        for (size_t i = 0; i < size; ++i) {
+          sum += waiting_times_vec[i];
+        }
+        statistics->meanWaitingTimeMs = static_cast<int>(sum / size);
+    }
+    else if (waiting_times_len == 0)
+    {
+        statistics->meanWaitingTimeMs = -1;
+        statistics->medianWaitingTimeMs = -1;
+        statistics->minWaitingTimeMs = -1;
+        statistics->maxWaitingTimeMs = -1;
+    }
+    else
+    {
+        LogError("getRawFrameWaitingTimes", 0);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+ACMNetEQ::RecIn(
+    const WebRtc_Word8*    incomingPayload,
+    const WebRtc_Word32    payloadLength,
+    const WebRtcRTPHeader& rtpInfo)
+{
+    // translate to NetEq struct
+    WebRtcNetEQ_RTPInfo netEqRTPInfo;
+    netEqRTPInfo.payloadType = rtpInfo.header.payloadType;
+    netEqRTPInfo.sequenceNumber = rtpInfo.header.sequenceNumber;
+    netEqRTPInfo.timeStamp = rtpInfo.header.timestamp;
+    netEqRTPInfo.SSRC = rtpInfo.header.ssrc;
+    netEqRTPInfo.markerBit = rtpInfo.header.markerBit;
+
+    CriticalSectionScoped lock(*_netEqCritSect);
+    // Down-cast the time to (32-6)-bit since we only care about
+    // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
+    // we masked 6 most significant bits of 32-bit so we don't loose resolution
+    // when do the following multiplication.
+    const WebRtc_UWord32 nowInMs = static_cast<WebRtc_UWord32>(
+        TickTime::MillisecondTimestamp() & 0x03ffffff);
+    WebRtc_UWord32 recvTimestamp = static_cast<WebRtc_UWord32>
+        (_currentSampFreqKHz * nowInMs);
+
+    int status;
+
+    if(rtpInfo.type.Audio.channel == 1)
+    {
+        if(!_isInitialized[0])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq is not initialized.");
+            return -1;
+        }
+        // PUSH into Master
+        status = WebRtcNetEQ_RecInRTPStruct(_inst[0], &netEqRTPInfo,
+            (WebRtc_UWord8 *)incomingPayload, (WebRtc_Word16)payloadLength,
+            recvTimestamp);
+        if(status < 0)
+        {
+            LogError("RecInRTPStruct", 0);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq, error in pushing in Master");
+            return -1;
+        }
+    }
+    else if(rtpInfo.type.Audio.channel == 2)
+    {
+        if(!_isInitialized[1])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq is not initialized.");
+            return -1;
+        }
+        // PUSH into Slave
+        status = WebRtcNetEQ_RecInRTPStruct(_inst[1], &netEqRTPInfo,
+            (WebRtc_UWord8 *)incomingPayload, (WebRtc_Word16)payloadLength,
+            recvTimestamp);
+        if(status < 0)
+        {
+            LogError("RecInRTPStruct", 1);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq, error in pushing in Slave");
+            return -1;
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq, error invalid numbe of channels %d \
+(1, for Master stream, and 2, for slave stream, are valid values)",
+                rtpInfo.type.Audio.channel);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+ACMNetEQ::RecOut(
+    AudioFrame& audioFrame)
+{
+    enum WebRtcNetEQOutputType type;
+    WebRtc_Word16 payloadLenSample;
+    enum WebRtcNetEQOutputType typeMaster;
+    enum WebRtcNetEQOutputType typeSlave;
+
+    WebRtc_Word16 payloadLenSampleSlave;
+
+    CriticalSectionScoped lockNetEq(*_netEqCritSect);
+
+    if(!_receivedStereo)
+    {
+        if(!_isInitialized[0])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq is not initialized.");
+            return -1;
+        }
+        {
+            WriteLockScoped lockCodec(*_decodeLock);
+            if(WebRtcNetEQ_RecOut(_inst[0], &(audioFrame._payloadData[0]),
+                &payloadLenSample) != 0)
+            {
+                LogError("RecOut", 0);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id, 
+                    "RecOut: NetEq, error in pulling out for mono case");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+        }
+        WebRtcNetEQ_GetSpeechOutputType(_inst[0], &type);
+        audioFrame._audioChannel = 1;
+    }
+    else
+    {
+        if(!_isInitialized[0] || !_isInitialized[1])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq is not initialized.");
+            return -1;
+        }
+        WebRtc_Word16 payloadMaster[480];
+        WebRtc_Word16 payloadSlave[480];
+        {
+            WriteLockScoped lockCodec(*_decodeLock);
+            if(WebRtcNetEQ_RecOutMasterSlave(_inst[0], payloadMaster,
+                &payloadLenSample, _masterSlaveInfo, 1) != 0)
+            {
+                LogError("RecOutMasterSlave", 0);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "RecOut: NetEq, error in pulling out for master");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+            if(WebRtcNetEQ_RecOutMasterSlave(_inst[1], payloadSlave,
+                &payloadLenSampleSlave, _masterSlaveInfo, 0) != 0)
+            {
+                LogError("RecOutMasterSlave", 1);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "RecOut: NetEq, error in pulling out for slave");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+        }
+        if(payloadLenSample != payloadLenSampleSlave)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+                "RecOut: mismatch between the lenght of the decoded \
+audio by Master (%d samples) and Slave (%d samples).",
+            payloadLenSample, payloadLenSampleSlave);
+            if(payloadLenSample > payloadLenSampleSlave)
+            {
+                memset(&payloadSlave[payloadLenSampleSlave], 0,
+                    (payloadLenSample - payloadLenSampleSlave) * sizeof(WebRtc_Word16));
+            }
+        }
+
+        for(WebRtc_Word16 n = 0; n < payloadLenSample; n++)
+        {
+            audioFrame._payloadData[n<<1]     = payloadMaster[n];
+            audioFrame._payloadData[(n<<1)+1] = payloadSlave[n];
+        }
+        audioFrame._audioChannel = 2;
+
+        WebRtcNetEQ_GetSpeechOutputType(_inst[0], &typeMaster);
+        WebRtcNetEQ_GetSpeechOutputType(_inst[1], &typeSlave);
+        if((typeMaster == kOutputNormal) ||
+            (typeSlave == kOutputNormal))
+        {
+            type = kOutputNormal;
+        }
+        else
+        {
+            type = typeMaster;
+        }
+    }
+
+    audioFrame._payloadDataLengthInSamples = static_cast<WebRtc_UWord16>(payloadLenSample);
+    // NetEq always returns 10 ms of audio.
+    _currentSampFreqKHz = static_cast<float>(audioFrame._payloadDataLengthInSamples) / 10.0f;
+    audioFrame._frequencyInHz = audioFrame._payloadDataLengthInSamples * 100;
+    if(_vadStatus)
+    {
+        if(type == kOutputVADPassive)
+        {
+            audioFrame._vadActivity = AudioFrame::kVadPassive;
+            audioFrame._speechType = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputNormal)
+        {
+            audioFrame._vadActivity = AudioFrame::kVadActive;
+            audioFrame._speechType = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputPLC)
+        {
+            audioFrame._vadActivity = _previousAudioActivity;
+            audioFrame._speechType  = AudioFrame::kPLC;
+        }
+        else if(type == kOutputCNG)
+        {
+            audioFrame._vadActivity = AudioFrame::kVadPassive;
+            audioFrame._speechType  = AudioFrame::kCNG;
+        }
+        else
+        {
+            audioFrame._vadActivity = AudioFrame::kVadPassive;
+            audioFrame._speechType  = AudioFrame::kPLCCNG;
+        }
+    }
+    else
+    {
+        // Always return kVadUnknown when receive VAD is inactive
+        audioFrame._vadActivity = AudioFrame::kVadUnknown;
+
+        if(type == kOutputNormal)
+        {
+            audioFrame._speechType  = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputPLC)
+        {
+            audioFrame._speechType  = AudioFrame::kPLC;
+        }
+        else if(type == kOutputPLCtoCNG)
+        {
+            audioFrame._speechType  = AudioFrame::kPLCCNG;
+        }
+        else if(type == kOutputCNG)
+        {
+            audioFrame._speechType  = AudioFrame::kCNG;
+        }
+        else
+        {
+            // type is kOutputVADPassive which
+            // we don't expect to get if _vadStatus is false
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq returned kVadPassive while _vadStatus is false.");
+            audioFrame._vadActivity = AudioFrame::kVadUnknown;
+            audioFrame._speechType  = AudioFrame::kNormalSpeech;
+        }
+    }
+    _previousAudioActivity = audioFrame._vadActivity;
+
+    return 0;
+}
+
+// When ACMGenericCodec has set the codec specific parameters in codecDef
+// it calls AddCodec() to add the new codec to the NetEQ database.
+WebRtc_Word32
+ACMNetEQ::AddCodec(
+    WebRtcNetEQ_CodecDef* codecDef,
+    bool                  toMaster)
+{
+    if (codecDef == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "ACMNetEQ::AddCodec: error, codecDef is NULL");
+        return -1;
+    }
+    CriticalSectionScoped lock(*_netEqCritSect);
+
+    WebRtc_Word16 idx;
+    if(toMaster)
+    {
+        idx = 0;
+    }
+    else
+    {
+        idx = 1;
+    }
+
+    if(!_isInitialized[idx])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "ACMNetEQ::AddCodec: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_CodecDbAdd(_inst[idx], codecDef) < 0)
+    {
+        LogError("CodecDB_Add", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "ACMNetEQ::AddCodec: NetEq, error in adding codec");
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+// Creates a Word16 RTP packet out of a Word8 payload and an rtp info struct.
+// Must be byte order safe.
+void
+ACMNetEQ::RTPPack(
+    WebRtc_Word16*         rtpPacket,
+    const WebRtc_Word8*    payload,
+    const WebRtc_Word32    payloadLengthW8,
+    const WebRtcRTPHeader& rtpInfo)
+{
+    WebRtc_Word32 idx = 0;
+    WEBRTC_SPL_SET_BYTE(rtpPacket, (WebRtc_Word8)0x80, idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, rtpInfo.header.payloadType, idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.sequenceNumber), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.sequenceNumber), 0), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 3), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 2), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 0), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 3), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 2), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 0), idx);
+    idx++;
+
+    for (WebRtc_Word16 i=0; i < payloadLengthW8; i++)
+    {
+        WEBRTC_SPL_SET_BYTE(rtpPacket, payload[i], idx);
+        idx++;
+    }
+    if (payloadLengthW8 & 1)
+    {
+        // Our 16 bits buffer is one byte too large, set that
+        // last byte to zero.
+        WEBRTC_SPL_SET_BYTE(rtpPacket, 0x0, idx);
+    }
+}
+
+WebRtc_Word16
+ACMNetEQ::EnableVAD()
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if (_vadStatus)
+    {
+        return 0;
+    }
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetVADStatus: NetEq is not initialized.");
+            return -1;
+        }
+        // VAD was off and we have to turn it on
+        if(EnableVADByIdxSafe(idx) < 0)
+        {
+            return -1;
+        }
+
+        // Set previous VAD status to PASSIVE
+        _previousAudioActivity = AudioFrame::kVadPassive;
+    }
+    _vadStatus = true;
+    return 0;
+}
+
+
+ACMVADMode
+ACMNetEQ::VADMode() const
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    return _vadMode;
+}
+
+
+WebRtc_Word16
+ACMNetEQ::SetVADMode(
+    const ACMVADMode mode)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if((mode < VADNormal) || (mode > VADVeryAggr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "SetVADMode: NetEq error: could not set VAD mode, mode is not supported");
+        return -1;
+    }
+    else
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetVADMode: NetEq is not initialized.");
+                return -1;
+            }
+            if(WebRtcNetEQ_SetVADMode(_inst[idx], mode) < 0)
+            {
+                LogError("SetVADmode", idx);
+                return -1;
+            }
+        }
+        _vadMode = mode;
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMNetEQ::FlushBuffers()
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "FlushBuffers: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_FlushBuffers(_inst[idx]) < 0)
+        {
+            LogError("FlushBuffers", idx);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMNetEQ::GetVersion(
+    char*   version,
+    WebRtc_UWord32& remainingBufferInBytes,
+    WebRtc_UWord32& position)
+{
+    WebRtc_UWord32 len = position;
+    strncpy(&version[position], "NetEq\t\t", remainingBufferInBytes);
+    position = (WebRtc_UWord32)strlen(version);
+    remainingBufferInBytes -= (position - len);
+    len = position;
+
+    char myVersion[100];
+    if(WebRtcNetEQ_GetVersion(myVersion) < 0)
+    {
+        return -1;
+    }
+
+    strncpy(&version[position], myVersion, remainingBufferInBytes);
+    position = (WebRtc_UWord32)strlen(version);
+    remainingBufferInBytes -= (position - len);
+    len = position;
+
+    strncpy(&version[position], "\n", remainingBufferInBytes);
+    position = (WebRtc_UWord32)strlen(version);
+    remainingBufferInBytes -= (position - len);
+    len = position;
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::RemoveCodec(
+    WebRtcNetEQDecoder codecIdx,
+    bool               isStereo)
+{
+    // sanity check
+    if((codecIdx <= kDecoderReservedStart) ||
+        (codecIdx >= kDecoderReservedEnd))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RemoveCodec: NetEq error: could not Remove Codec, codec index out of range");
+        return -1;
+    }
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RemoveCodec: NetEq is not initialized.");
+        return -1;
+    }
+
+    if(WebRtcNetEQ_CodecDbRemove(_inst[0], codecIdx) < 0)
+    {
+        LogError("CodecDB_Remove", 0);
+        return -1;
+    }
+
+    if(isStereo)
+    {
+        if(WebRtcNetEQ_CodecDbRemove(_inst[1], codecIdx) < 0)
+        {
+            LogError("CodecDB_Remove", 1);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::SetBackgroundNoiseMode(
+    const ACMBackgroundNoiseMode mode)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetBackgroundNoiseMode: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_SetBGNMode(_inst[idx], (WebRtcNetEQBGNMode)mode) < 0)
+        {
+            LogError("SetBGNMode", idx);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::BackgroundNoiseMode(
+    ACMBackgroundNoiseMode& mode)
+{
+    WebRtcNetEQBGNMode myMode;
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "BackgroundNoiseMode: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_GetBGNMode(_inst[0], &myMode) < 0)
+    {
+        LogError("WebRtcNetEQ_GetBGNMode", 0);
+        return -1;
+    }
+    else
+    {
+        mode = (ACMBackgroundNoiseMode)myMode;
+    }
+    return 0;
+}
+
+void 
+ACMNetEQ::SetUniqueId(
+    WebRtc_Word32 id)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    _id = id;
+}
+
+
+void
+ACMNetEQ::LogError(
+    const char* neteqFuncName,
+    const WebRtc_Word16 idx) const
+{
+    char errorName[NETEQ_ERR_MSG_LEN_BYTE];
+    char myFuncName[50];
+    int neteqErrorCode = WebRtcNetEQ_GetErrorCode(_inst[idx]);
+    WebRtcNetEQ_GetErrorName(neteqErrorCode, errorName, NETEQ_ERR_MSG_LEN_BYTE - 1);
+    strncpy(myFuncName, neteqFuncName, 49);
+    errorName[NETEQ_ERR_MSG_LEN_BYTE - 1] = '\0';
+    myFuncName[49] = '\0';
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+        "NetEq-%d Error in function %s, error-code: %d, error-string: %s",
+        idx,
+        myFuncName,
+        neteqErrorCode,
+        errorName);
+}
+
+
+WebRtc_Word32
+ACMNetEQ::PlayoutTimestamp(
+    WebRtc_UWord32& timestamp)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    if(WebRtcNetEQ_GetSpeechTimeStamp(_inst[0], &timestamp) < 0)
+    {
+        LogError("GetSpeechTimeStamp", 0);
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+WebRtc_Word16
+ACMNetEQ::AddSlave(
+    const WebRtcNetEQDecoder* usedCodecs,
+    WebRtc_Word16       noOfCodecs)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    const WebRtc_Word16 slaveIdx = 1;
+    if(_numSlaves < 1)
+    {
+        // initialize the receiver, this also sets up VAD.
+        if(InitByIdxSafe(slaveIdx) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Initialize");
+            return -1;
+        }
+
+        // Allocate buffer.
+        if(AllocatePacketBufferByIdxSafe(usedCodecs, noOfCodecs, slaveIdx) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Allocate Packet Buffer");
+            return -1;
+        }
+
+        if(_masterSlaveInfo != NULL)
+        {
+            free(_masterSlaveInfo);
+            _masterSlaveInfo = NULL;
+        }
+        int msInfoSize = WebRtcNetEQ_GetMasterSlaveInfoSize();
+        _masterSlaveInfo = malloc(msInfoSize);
+
+        if(_masterSlaveInfo == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Allocate memory for Master-Slave Info");
+            return -1;
+        }
+
+        // We accept this as initialized NetEQ, the rest is to synchronize
+        // Slave with Master.
+        _numSlaves = 1;
+        _isInitialized[slaveIdx] = true;
+
+        // Set Slave delay as all other instances.
+        if(WebRtcNetEQ_SetExtraDelay(_inst[slaveIdx], _extraDelay) < 0)
+        {
+            LogError("SetExtraDelay", slaveIdx);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set delay");
+            return -1;
+        }
+
+        // Set AVT
+        if(WebRtcNetEQ_SetAVTPlayout(_inst[slaveIdx], (_avtPlayout) ? 1 : 0) < 0)
+        {
+            LogError("SetAVTPlayout", slaveIdx);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set AVT playout.");
+            return -1;
+        }
+
+        // Set Background Noise
+        WebRtcNetEQBGNMode currentMode;
+        if(WebRtcNetEQ_GetBGNMode(_inst[0], &currentMode) < 0)
+        {
+            LogError("GetBGNMode", 0);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AAddSlave: AddSlave Failed, Could not Get BGN form Master.");
+            return -1;
+        }
+
+        if(WebRtcNetEQ_SetBGNMode(_inst[slaveIdx], (WebRtcNetEQBGNMode)currentMode) < 0)
+        {
+            LogError("SetBGNMode", slaveIdx);
+             WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set BGN mode.");
+           return -1;
+        }
+
+        enum WebRtcNetEQPlayoutMode playoutMode = kPlayoutOff;
+        switch(_playoutMode)
+        {
+        case voice:
+            playoutMode = kPlayoutOn;
+            break;
+        case fax:
+            playoutMode = kPlayoutFax;
+            break;
+        case streaming:
+            playoutMode = kPlayoutStreaming;
+            break;
+        }
+        if(WebRtcNetEQ_SetPlayoutMode(_inst[slaveIdx], playoutMode) < 0)
+        {
+            LogError("SetPlayoutMode", 1);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Set Playout Mode.");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+void
+ACMNetEQ::SetReceivedStereo(
+    bool receivedStereo)
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    _receivedStereo = receivedStereo;
+}
+
+WebRtc_UWord8
+ACMNetEQ::NumSlaves()
+{
+    CriticalSectionScoped lock(*_netEqCritSect);
+    return _numSlaves;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_neteq.h b/trunk/src/modules/audio_coding/main/source/acm_neteq.h
new file mode 100644
index 0000000..677c622
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_neteq.h
@@ -0,0 +1,383 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
+
+#include "audio_coding_module.h"
+#include "audio_coding_module_typedefs.h"
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+#include "webrtc_neteq.h"
+#include "webrtc_vad.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RWLockWrapper;
+struct CodecInst;
+enum AudioPlayoutMode;
+enum ACMSpeechType;
+
+#define MAX_NUM_SLAVE_NETEQ 1
+
+class ACMNetEQ
+{
+public:
+    // Constructor of the class
+    ACMNetEQ();
+
+    // Destructor of the class.
+    ~ACMNetEQ();
+
+    //
+    // GetVersion()
+    // Fills the version array with the NetEQ version and updates the
+    // remainingBufferInBytes and position variables accordingly.
+    //
+    // Output:
+    //   - version               : An array to be filled with the version
+    //                             data.
+    //
+    // Input/Output:
+    //   - remainingBuffInBytes  : The number of free bytes at the end of
+    //                             the version array.
+    //   - position              : Position where the free space starts.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ returned an error.
+    //
+    static WebRtc_Word32 GetVersion(
+        char*   version,
+        WebRtc_UWord32& remainingBuffInBytes,
+        WebRtc_UWord32& position);
+
+    //
+    // Init()
+    // Allocates memory for NetEQ and VAD and initializes them.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ or VAD returned an error or
+    //                            if out of memory.
+    //
+    WebRtc_Word32 Init();
+
+    //
+    // RecIn()
+    // Gives the payload to NetEQ.
+    //
+    // Input:
+    //   - incomingPayload       : Incoming audio payload.
+    //   - payloadLength         : Length of incoming audio payload.
+    //   - rtpInfo               : RTP header for the incoming payload containing
+    //                             information about payload type, sequence number,
+    //                             timestamp, ssrc and marker bit.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 RecIn(
+        const WebRtc_Word8*    incomingPayload,
+        const WebRtc_Word32    payloadLength,
+        const WebRtcRTPHeader&   rtpInfo);
+
+    //
+    // RecOut()
+    // Asks NetEQ for 10 ms of decoded audio.
+    //
+    // Input:
+    //   -audioFrame             : an audio frame were output data and
+    //                             associated parameters are written to.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ returned an error.
+    //
+    WebRtc_Word32 RecOut(
+        AudioFrame& audioFrame);
+
+    //
+    // AddCodec()
+    // Adds a new codec to the NetEQ codec database.
+    //
+    // Input:
+    //   - codecDef              : The codec to be added.
+    //   - toMaster              : true if the codec has to be added to Master
+    //                             NetEq, otherwise will be added to the Slave
+    //                             NetEQ.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 AddCodec(
+        WebRtcNetEQ_CodecDef *codecDef,
+        bool                  toMaster = true);
+
+    //
+    // AllocatePacketBuffer()
+    // Allocates the NetEQ packet buffer.
+    //
+    // Input:
+    //   - usedCodecs            : An array of the codecs to be used by NetEQ.
+    //   - noOfCodecs            : Number of codecs in usedCodecs.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 AllocatePacketBuffer(
+        const WebRtcNetEQDecoder* usedCodecs,
+        WebRtc_Word16    noOfCodecs);
+
+    //
+    // SetExtraDelay()
+    // Sets an delayInMS milliseconds extra delay in NetEQ.
+    //
+    // Input:
+    //   - delayInMS             : Extra delay in milliseconds.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetExtraDelay(
+        const WebRtc_Word32 delayInMS);
+
+    //
+    // SetAVTPlayout()
+    // Enable/disable playout of AVT payloads.
+    //
+    // Input:
+    //   - enable                : Enable if true, disable if false.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetAVTPlayout(
+        const bool enable);
+
+    //
+    // AVTPlayout()
+    // Get the current AVT playout state.
+    //
+    // Return value              : True if AVT playout is enabled.
+    //                             False if AVT playout is disabled.
+    //
+    bool AVTPlayout() const;
+
+    //
+    // CurrentSampFreqHz()
+    // Get the current sampling frequency in Hz.
+    //
+    // Return value              : Sampling frequency in Hz.
+    //
+    WebRtc_Word32 CurrentSampFreqHz() const;
+
+    //
+    // SetPlayoutMode()
+    // Sets the playout mode to voice or fax.
+    //
+    // Input:
+    //   - mode                  : The playout mode to be used, voice,
+    //                             fax, or streaming.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetPlayoutMode(
+        const AudioPlayoutMode mode);
+
+    //
+    // PlayoutMode()
+    // Get the current playout mode.
+    //
+    // Return value              : The current playout mode.
+    //
+    AudioPlayoutMode PlayoutMode() const;
+
+    //
+    // NetworkStatistics()
+    // Get the current network statistics from NetEQ.
+    //
+    // Output:
+    //   - statistics            : The current network statistics.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 NetworkStatistics(
+        ACMNetworkStatistics* statistics) const;
+
+    //
+    // VADMode()
+    // Get the current VAD Mode.
+    //
+    // Return value              : The current VAD mode.
+    //
+    ACMVADMode VADMode() const;
+
+    //
+    // SetVADMode()
+    // Set the VAD mode.
+    //
+    // Input:
+    //   - mode                  : The new VAD mode.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 SetVADMode(
+        const ACMVADMode mode);
+
+    //
+    // DecodeLock()
+    // Get the decode lock used to protect decoder instances while decoding.
+    //
+    // Return value              : Pointer to the decode lock.
+    //
+    RWLockWrapper* DecodeLock() const
+    {
+        return _decodeLock;
+    }
+
+    //
+    // FlushBuffers()
+    // Flushes the NetEQ packet and speech buffers.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ returned an error.
+    //
+    WebRtc_Word32 FlushBuffers();
+
+    //
+    // RemoveCodec()
+    // Removes a codec from the NetEQ codec database.
+    //
+    // Input:
+    //   - codecIdx              : Codec to be removed.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 RemoveCodec(
+        WebRtcNetEQDecoder codecIdx,
+        bool isStereo = false);
+
+
+    //
+    // SetBackgroundNoiseMode()
+    // Set the mode of the background noise.
+    //
+    // Input:
+    //   - mode                  : an enumerator specifying the mode of the
+    //                             background noise.
+    //
+    // Return value              : 0 if succeeded,
+    //                            -1 if failed to set the mode.
+    //
+    WebRtc_Word16 SetBackgroundNoiseMode(
+        const ACMBackgroundNoiseMode mode);
+
+    //
+    // BackgroundNoiseMode()
+    // return the mode of the background noise.
+    //
+    // Return value              : The mode of background noise.
+    //
+    WebRtc_Word16 BackgroundNoiseMode(
+        ACMBackgroundNoiseMode& mode);
+
+    void SetUniqueId(
+        WebRtc_Word32 id);
+
+    WebRtc_Word32 PlayoutTimestamp(
+        WebRtc_UWord32& timestamp);
+
+    void SetReceivedStereo(
+        bool receivedStereo);
+
+    WebRtc_UWord8 NumSlaves();
+
+    enum JB {masterJB = 0, slaveJB = 1};
+
+    WebRtc_Word16 AddSlave(
+        const WebRtcNetEQDecoder*    usedCodecs,
+        WebRtc_Word16       noOfCodecs);
+
+private:
+    //
+    // RTPPack()
+    // Creates a Word16 RTP packet out of the payload data in Word16 and
+    // a WebRtcRTPHeader.
+    //
+    // Input:
+    //   - payload               : Payload to be packetized.
+    //   - payloadLengthW8       : Length of the payload in bytes.
+    //   - rtpInfo               : RTP header struct.
+    //
+    // Output:
+    //   - rtpPacket             : The RTP packet.
+    //
+    static void RTPPack(
+        WebRtc_Word16*         rtpPacket,
+        const WebRtc_Word8*    payload,
+        const WebRtc_Word32    payloadLengthW8,
+        const WebRtcRTPHeader& rtpInfo);
+
+    void LogError(
+        const char* neteqFuncName,
+        const WebRtc_Word16 idx) const;
+
+    WebRtc_Word16 InitByIdxSafe(
+        const WebRtc_Word16 idx);
+
+    // EnableVAD()
+    // Enable VAD.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 EnableVAD();
+
+    WebRtc_Word16 EnableVADByIdxSafe(
+        const WebRtc_Word16 idx);
+
+    WebRtc_Word16 AllocatePacketBufferByIdxSafe(
+        const WebRtcNetEQDecoder* usedCodecs,
+        WebRtc_Word16       noOfCodecs,
+        const WebRtc_Word16 idx);
+
+    void*                   _inst[MAX_NUM_SLAVE_NETEQ + 1];
+    void*                   _instMem[MAX_NUM_SLAVE_NETEQ + 1];
+
+    WebRtc_Word16*          _netEqPacketBuffer[MAX_NUM_SLAVE_NETEQ + 1];
+
+    WebRtc_Word32           _id;
+    float                   _currentSampFreqKHz;
+    bool                    _avtPlayout;
+    AudioPlayoutMode        _playoutMode;
+    CriticalSectionWrapper* _netEqCritSect;
+
+    WebRtcVadInst*          _ptrVADInst[MAX_NUM_SLAVE_NETEQ + 1];
+
+    bool                    _vadStatus;
+    ACMVADMode              _vadMode;
+    RWLockWrapper*          _decodeLock;
+    bool                    _isInitialized[MAX_NUM_SLAVE_NETEQ + 1];
+    WebRtc_UWord8           _numSlaves;
+    bool                    _receivedStereo;
+    void*                   _masterSlaveInfo;
+    AudioFrame::VADActivity _previousAudioActivity;
+    WebRtc_Word32           _extraDelay;
+
+    CriticalSectionWrapper* _callbackCritSect;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_neteq_unittest.cc b/trunk/src/modules/audio_coding/main/source/acm_neteq_unittest.cc
new file mode 100644
index 0000000..7feb2e0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_neteq_unittest.cc
@@ -0,0 +1,147 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains unit tests for ACM's NetEQ wrapper (class ACMNetEQ).
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "modules/audio_coding/main/source/acm_codec_database.h"
+#include "modules/audio_coding/main/source/acm_neteq.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
+#include "modules/interface/module_common_types.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class AcmNetEqTest : public ::testing::Test {
+ protected:
+  static const size_t kMaxPayloadLen = 5760;  // 60 ms, 48 kHz, 16 bit samples.
+  static const int kPcm16WbPayloadType = 94;
+  AcmNetEqTest() {}
+  virtual void SetUp();
+  virtual void TearDown() {}
+
+  void InsertZeroPacket(uint16_t sequence_number,
+                        uint32_t timestamp,
+                        uint8_t payload_type,
+                        uint32_t ssrc,
+                        bool marker_bit,
+                        size_t len_payload_bytes);
+  void PullData(int expected_num_samples);
+
+  ACMNetEQ neteq_;
+};
+
+void AcmNetEqTest::SetUp() {
+  ASSERT_EQ(0, neteq_.Init());
+  ASSERT_EQ(0, neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
+                                           ACMCodecDB::kNumCodecs));
+  WebRtcNetEQ_CodecDef codec_def;
+  SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, kPcm16WbPayloadType, NULL, 16000);
+  SET_PCM16B_WB_FUNCTIONS(codec_def);
+  ASSERT_EQ(0, neteq_.AddCodec(&codec_def, true));
+}
+
+void AcmNetEqTest::InsertZeroPacket(uint16_t sequence_number,
+                                    uint32_t timestamp,
+                                    uint8_t payload_type,
+                                    uint32_t ssrc,
+                                    bool marker_bit,
+                                    size_t len_payload_bytes) {
+  ASSERT_TRUE(len_payload_bytes <= kMaxPayloadLen);
+  uint16_t payload[kMaxPayloadLen] = {0};
+  WebRtcRTPHeader rtp_header;
+  rtp_header.header.sequenceNumber = sequence_number;
+  rtp_header.header.timestamp = timestamp;
+  rtp_header.header.ssrc = ssrc;
+  rtp_header.header.payloadType = payload_type;
+  rtp_header.header.markerBit = marker_bit;
+  rtp_header.type.Audio.channel = 1;
+  ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<WebRtc_Word8*>(payload),
+                            len_payload_bytes, rtp_header));
+}
+
+void AcmNetEqTest::PullData(int expected_num_samples) {
+  AudioFrame out_frame;
+  ASSERT_EQ(0, neteq_.RecOut(out_frame));
+  ASSERT_EQ(expected_num_samples, out_frame._payloadDataLengthInSamples);
+}
+
+TEST_F(AcmNetEqTest, NetworkStatistics) {
+  // Use fax mode to avoid time-scaling. This is to simplify the testing of
+  // packet waiting times in the packet buffer.
+  neteq_.SetPlayoutMode(fax);
+  // Insert 31 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
+  int num_frames = 30;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  int i, j;
+  for (i = 0; i < num_frames; ++i) {
+    InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                     kPayloadBytes);
+  }
+  // Pull out data once.
+  PullData(kSamples);
+  // Insert one more packet (to produce different mean and median).
+  i = num_frames;
+  InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                   kPayloadBytes);
+  // Pull out all data.
+  for (j = 1; j < num_frames + 1; ++j) {
+    PullData(kSamples);
+  }
+
+  ACMNetworkStatistics stats;
+  ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
+  EXPECT_EQ(0, stats.currentBufferSize);
+  EXPECT_EQ(0, stats.preferredBufferSize);
+  EXPECT_FALSE(stats.jitterPeaksFound);
+  EXPECT_EQ(0, stats.currentPacketLossRate);
+  EXPECT_EQ(0, stats.currentDiscardRate);
+  EXPECT_EQ(0, stats.currentExpandRate);
+  EXPECT_EQ(0, stats.currentPreemptiveRate);
+  EXPECT_EQ(0, stats.currentAccelerateRate);
+  EXPECT_EQ(-916, stats.clockDriftPPM);  // Initial value is slightly off.
+  EXPECT_EQ(300, stats.maxWaitingTimeMs);
+  EXPECT_EQ(10, stats.minWaitingTimeMs);
+  EXPECT_EQ(159, stats.meanWaitingTimeMs);
+  EXPECT_EQ(160, stats.medianWaitingTimeMs);
+}
+
+TEST_F(AcmNetEqTest, TestZeroLengthWaitingTimesVector) {
+  // Insert one packet.
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  int i = 0;
+  InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                   kPayloadBytes);
+  // Do not pull out any data.
+
+  ACMNetworkStatistics stats;
+  ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
+  EXPECT_EQ(0, stats.currentBufferSize);
+  EXPECT_EQ(0, stats.preferredBufferSize);
+  EXPECT_FALSE(stats.jitterPeaksFound);
+  EXPECT_EQ(0, stats.currentPacketLossRate);
+  EXPECT_EQ(0, stats.currentDiscardRate);
+  EXPECT_EQ(0, stats.currentExpandRate);
+  EXPECT_EQ(0, stats.currentPreemptiveRate);
+  EXPECT_EQ(0, stats.currentAccelerateRate);
+  EXPECT_EQ(-916, stats.clockDriftPPM);  // Initial value is slightly off.
+  EXPECT_EQ(-1, stats.minWaitingTimeMs);
+  EXPECT_EQ(-1, stats.maxWaitingTimeMs);
+  EXPECT_EQ(-1, stats.meanWaitingTimeMs);
+  EXPECT_EQ(-1, stats.medianWaitingTimeMs);
+}
+
+}  // namespace
diff --git a/trunk/src/modules/audio_coding/main/source/acm_opus.cc b/trunk/src/modules/audio_coding/main/source/acm_opus.cc
new file mode 100644
index 0000000..ebdf971
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_opus.cc
@@ -0,0 +1,477 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_opus.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_OPUS
+    // NOTE! Opus is not included in the open-source package. Modify this file or your codec
+    // API to match the function call and name of used Opus API file.
+    // #include "opus_interface.h"
+#endif
+
+namespace webrtc
+{
+
+#ifndef WEBRTC_CODEC_OPUS
+
+ACMOPUS::ACMOPUS(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _mySampFreq(0),
+      _myRate(0),
+      _opusMode(0),
+      _flagVBR(0) {
+  return;
+}
+
+
+ACMOPUS::~ACMOPUS()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMOPUS::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMOPUS::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMOPUS::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMOPUS::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMOPUS::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMOPUS::SetBitRateSafe(
+    const WebRtc_Word32 /*rate*/ )
+{
+    return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+// Remove when integrating a real Opus wrapper
+extern WebRtc_Word16 WebRtcOpus_CreateEnc(OPUS_inst_t_** inst, WebRtc_Word16 samplFreq);
+extern WebRtc_Word16 WebRtcOpus_CreateDec(OPUS_inst_t_** inst, WebRtc_Word16 samplFreq);
+extern WebRtc_Word16 WebRtcOpus_FreeEnc(OPUS_inst_t_* inst);
+extern WebRtc_Word16 WebRtcOpus_FreeDec(OPUS_inst_t_* inst);
+extern WebRtc_Word16 WebRtcOpus_Encode(OPUS_inst_t_* encInst,
+                                       WebRtc_Word16* input,
+                                       WebRtc_Word16* output,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 byteLen);
+extern WebRtc_Word16 WebRtcOpus_EncoderInit(OPUS_inst_t_* encInst,
+                                            WebRtc_Word16 samplFreq,
+                                            WebRtc_Word16 mode,
+                                            WebRtc_Word16 vbrFlag);
+extern WebRtc_Word16 WebRtcOpus_Decode(OPUS_inst_t_* decInst);
+extern WebRtc_Word16 WebRtcOpus_DecodeBwe(OPUS_inst_t_* decInst, WebRtc_Word16* input);
+extern WebRtc_Word16 WebRtcOpus_DecodePlc(OPUS_inst_t_* decInst);
+extern WebRtc_Word16 WebRtcOpus_DecoderInit(OPUS_inst_t_* decInst);
+
+ACMOPUS::ACMOPUS(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _mySampFreq(48000),  // Default sampling frequency.
+      _myRate(50000),  // Default rate.
+      _opusMode(1),  // Default mode is the hybrid mode.
+      _flagVBR(0) {  // Default VBR off.
+  _codecID = codecID;
+
+  // Current implementation doesn't have DTX. That might change.
+  _hasInternalDTX = false;
+
+  return;
+}
+
+ACMOPUS::~ACMOPUS()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    WebRtc_Word16 noEncodedSamples = 0;
+    WebRtc_Word16 tmpLenByte = 0;
+    *bitStreamLenByte = 0;
+
+    WebRtc_Word16 byteLengthFrame = 0;
+
+    // Derive what byte-length is requested
+    byteLengthFrame = _myRate*_frameLenSmpl/(8*_mySampFreq);
+
+    // Call Encoder
+    *bitStreamLenByte = WebRtcOpus_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+           (WebRtc_Word16*)bitStream, _frameLenSmpl, byteLengthFrame);
+
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+
+    // sanity check
+    if(*bitStreamLenByte < 0)
+    {
+        // error has happened
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: Encode error for Opus");
+            *bitStreamLenByte = 0;
+            return -1;
+    }
+
+    return *bitStreamLenByte;
+}
+
+
+
+WebRtc_Word16
+ACMOPUS::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    //set the bit rate and initialize
+    _myRate = codecParams->codecInstant.rate;
+    return SetBitRateSafe( (WebRtc_UWord32)_myRate);
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcOpus_DecoderInit(_decoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "InternalInitDecoder: init decoder failed for Opus");
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMOPUS::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CodeDef: Decoder uninitialized for Opus");
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderOpus, codecInst.pltype,
+        _decoderInstPtr, 16000);
+    SET_OPUS_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMOPUS::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateEncoder()
+{
+    if (WebRtcOpus_CreateEnc(&_encoderInstPtr, _mySampFreq) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateEncoder: create encoder failed for Opus");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMOPUS::DestructEncoderSafe()
+{
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateDecoder()
+{
+   if (WebRtcOpus_CreateDec(&_decoderInstPtr, _mySampFreq) < 0)
+   {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalCreateDecoder: create decoder failed for Opus");
+       return -1;
+   }
+   return 0;
+}
+
+
+void
+ACMOPUS::DestructDecoderSafe()
+{
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMOPUS::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcOpus_FreeEnc((OPUS_inst_t*)ptrInst);
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec: given payload-type does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+    return netEq->RemoveCodec(kDecoderOpus);
+}
+
+WebRtc_Word16
+ACMOPUS::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    //allowed rates: {8000, 12000, 14000, 16000, 18000, 20000,
+    //                22000, 24000, 26000, 28000, 30000, 32000};
+    switch(rate)
+    {
+    case 8000:
+        {
+            _myRate = 8000;
+            break;
+        }
+    case 12000:
+        {
+            _myRate = 12000;
+            break;
+        }
+    case 14000:
+        {
+            _myRate = 14000;
+            break;
+        }
+    case 16000:
+        {
+            _myRate = 16000;
+            break;
+        }
+    case 18000:
+        {
+            _myRate = 18000;
+            break;
+        }
+    case 20000:
+        {
+            _myRate = 20000;
+            break;
+        }
+    case 22000:
+        {
+            _myRate = 22000;
+            break;
+        }
+    case 24000:
+        {
+            _myRate = 24000;
+            break;
+        }
+    case 26000:
+        {
+            _myRate = 26000;
+            break;
+        }
+    case 28000:
+        {
+            _myRate = 28000;
+            break;
+        }
+    case 30000:
+        {
+            _myRate = 30000;
+            break;
+        }
+    case 32000:
+        {
+            _myRate = 32000;
+            break;
+        }
+    default:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "SetBitRateSafe: Invalid rate Opus");
+            return -1;
+        }
+    }
+
+    // Re-init with new rate
+    if (WebRtcOpus_EncoderInit(_encoderInstPtr, _mySampFreq, _opusMode, _flagVBR) >= 0)
+    {
+        _encoderParams.codecInstant.rate = _myRate;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_opus.h b/trunk/src/modules/audio_coding/main/source/acm_opus.h
new file mode 100644
index 0000000..278c369
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_opus.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct OPUS_inst_t_;
+struct OPUS_inst_t_;
+
+namespace webrtc
+{
+
+class ACMOPUS: public ACMGenericCodec
+{
+public:
+    ACMOPUS(WebRtc_Word16 codecID);
+    ~ACMOPUS();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst& codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ* netEq,
+        WebRtc_Word16   payloadType);
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    OPUS_inst_t_* _encoderInstPtr;
+    OPUS_inst_t_* _decoderInstPtr;
+
+    WebRtc_UWord16    _mySampFreq;
+    WebRtc_UWord16    _myRate;
+    WebRtc_Word16     _opusMode;
+    WebRtc_Word16     _flagVBR;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcm16b.cc b/trunk/src/modules/audio_coding/main/source/acm_pcm16b.cc
new file mode 100644
index 0000000..5c93e90
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcm16b.cc
@@ -0,0 +1,334 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_pcm16b.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_PCM16
+    #include "pcm16b.h"
+#endif
+
+namespace webrtc
+{
+
+#ifndef WEBRTC_CODEC_PCM16
+
+ACMPCM16B::ACMPCM16B(
+    WebRtc_Word16 /* codecID */)
+{
+    return;
+}
+
+
+ACMPCM16B::~ACMPCM16B()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+   return -1;
+}
+
+
+WebRtc_Word32
+ACMPCM16B::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMPCM16B::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMPCM16B::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+
+void
+ACMPCM16B::DestructEncoderSafe()
+{
+    return;
+}
+
+void
+ACMPCM16B::DestructDecoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+
+
+#else     //===================== Actual Implementation =======================
+
+
+ACMPCM16B::ACMPCM16B(
+    WebRtc_Word16 codecID)
+{
+    _codecID = codecID;
+    _samplingFreqHz = ACMCodecDB::CodecFreq(_codecID);
+}
+
+
+ACMPCM16B::~ACMPCM16B()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcPcm16b_Encode(&_inAudio[_inAudioIxRead],
+                                            _frameLenSmpl*_noChannels,
+                                            bitStream);
+    // increment the read index to tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl*_noChannels;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+   // This codec does not need initialization,
+   // PCM has no instance
+   return 0;
+}
+
+
+WebRtc_Word32
+ACMPCM16B::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    switch(_samplingFreqHz)
+    {
+    case 8000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderPCM16B, codecInst.pltype,
+                NULL, 8000);
+            SET_PCM16B_FUNCTIONS((codecDef));
+            break;
+        }
+    case 16000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderPCM16Bwb, codecInst.pltype,
+                NULL, 16000);
+            SET_PCM16B_WB_FUNCTIONS((codecDef));
+            break;
+        }
+    case 32000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderPCM16Bswb32kHz,
+                codecInst.pltype, NULL, 32000);
+            SET_PCM16B_SWB32_FUNCTIONS((codecDef));
+            break;
+        }
+    default:
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMPCM16B::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalCreateEncoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::InternalCreateDecoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+void
+ACMPCM16B::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    // PCM has no instance
+   return;
+}
+
+
+void
+ACMPCM16B::DestructEncoderSafe()
+{
+    // PCM has no instance
+    _encoderExist = false;
+    _encoderInitialized = false;
+     return;
+}
+
+void
+ACMPCM16B::DestructDecoderSafe()
+{
+    // PCM has no instance
+    _decoderExist = false;
+    _decoderInitialized = false;
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCM16B::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+
+    switch(_samplingFreqHz)
+    {
+    case 8000:
+        {
+            return netEq->RemoveCodec(kDecoderPCM16B);
+        }
+    case 16000:
+        {
+            return netEq->RemoveCodec(kDecoderPCM16Bwb);
+        }
+    case 32000:
+        {
+            return netEq->RemoveCodec(kDecoderPCM16Bswb32kHz);
+        }
+    default:
+        {
+            return -1;
+        }
+    }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcm16b.h b/trunk/src/modules/audio_coding/main/source/acm_pcm16b.h
new file mode 100644
index 0000000..a81d8fe
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcm16b.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCM16B : public ACMGenericCodec
+{
+public:
+    ACMPCM16B(WebRtc_Word16 codecID);
+    ~ACMPCM16B();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word32 _samplingFreqHz;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcma.cc b/trunk/src/modules/audio_coding/main/source/acm_pcma.cc
new file mode 100644
index 0000000..c86bd1c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcma.cc
@@ -0,0 +1,165 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_pcma.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+// Codec interface
+#include "g711_interface.h"
+
+namespace webrtc
+{
+
+ACMPCMA::ACMPCMA(WebRtc_Word16 codecID)
+{
+    _codecID = codecID;
+}
+
+
+ACMPCMA::~ACMPCMA()
+{
+    return;
+}
+
+
+WebRtc_Word16 
+ACMPCMA::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcG711_EncodeA(NULL, &_inAudio[_inAudioIxRead],
+        _frameLenSmpl*_noChannels, (WebRtc_Word16*)bitStream);
+    // increment the read index this tell the caller that how far 
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl*_noChannels;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16 
+ACMPCMA::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */, 
+    WebRtc_Word16* /* audio            */, 
+    WebRtc_Word16* /* audioSamples     */, 
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16 
+ACMPCMA::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // PCM has no instance
+    return 0;    
+}
+
+
+WebRtc_Word16 
+ACMPCMA::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word32 ACMPCMA::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&  codecInst)
+{
+    // Fill up the structure by calling 
+    // "SET_CODEC_PAR" & "SET_PCMA_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderPCMa, codecInst.pltype, NULL, 8000);
+    SET_PCMA_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMPCMA::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMPCMA::InternalCreateEncoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCMA::InternalCreateDecoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+void 
+ACMPCMA::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    // PCM has no instance
+    return;
+}
+
+
+void 
+ACMPCMA::DestructEncoderSafe()
+{
+    // PCM has no instance
+    return;
+}
+
+
+void 
+ACMPCMA::DestructDecoderSafe()
+{
+    // PCM has no instance
+    _decoderInitialized = false;
+    _decoderExist = false;
+    return;
+}
+
+
+WebRtc_Word16 
+ACMPCMA::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID, 
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type", 
+            _decoderParams.codecInstant.plname, 
+            payloadType, 
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+
+    return netEq->RemoveCodec(kDecoderPCMa);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcma.h b/trunk/src/modules/audio_coding/main/source/acm_pcma.h
new file mode 100644
index 0000000..db25798
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcma.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCMA : public ACMGenericCodec
+{
+public:
+    ACMPCMA(WebRtc_Word16 codecID);
+    ~ACMPCMA();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcmu.cc b/trunk/src/modules/audio_coding/main/source/acm_pcmu.cc
new file mode 100644
index 0000000..320ba5b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcmu.cc
@@ -0,0 +1,167 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_pcmu.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+// Codec interface
+#include "g711_interface.h"
+
+namespace webrtc
+{
+
+ACMPCMU::ACMPCMU(WebRtc_Word16 codecID)
+{
+    _codecID = codecID;
+}
+
+
+ACMPCMU::~ACMPCMU()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCMU::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcG711_EncodeU(NULL, &_inAudio[_inAudioIxRead],
+        _frameLenSmpl*_noChannels, (WebRtc_Word16*)bitStream);
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl*_noChannels;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMPCMU::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCMU::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCMU::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+   // This codec does not need initialization,
+   // PCM has no instance
+   return 0;
+}
+
+
+WebRtc_Word32
+ACMPCMU::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderPCMu, codecInst.pltype, NULL, 8000);
+    SET_PCMU_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMPCMU::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMPCMU::InternalCreateEncoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMPCMU::InternalCreateDecoder()
+{
+    // PCM has no instance
+    return 0;
+}
+
+
+void
+ACMPCMU::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    // PCM has no instance
+    return;
+}
+
+
+void
+ACMPCMU::DestructEncoderSafe()
+{
+    // PCM has no instance
+    _encoderExist = false;
+    _encoderInitialized = false;
+    return;
+}
+
+void ACMPCMU::DestructDecoderSafe()
+{
+    // PCM has no instance
+    _decoderInitialized = false;
+    _decoderExist = false;
+    return;
+}
+
+
+WebRtc_Word16
+ACMPCMU::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+
+    return netEq->RemoveCodec(kDecoderPCMu);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_pcmu.h b/trunk/src/modules/audio_coding/main/source/acm_pcmu.h
new file mode 100644
index 0000000..2fc4223
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_pcmu.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCMU : public ACMGenericCodec
+{
+public:
+    ACMPCMU(WebRtc_Word16 codecID);
+    ~ACMPCMU();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&  codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_red.cc b/trunk/src/modules/audio_coding/main/source/acm_red.cc
new file mode 100644
index 0000000..88dbc24
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_red.cc
@@ -0,0 +1,163 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_red.h"
+#include "acm_neteq.h"
+#include "acm_common_defs.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc
+{
+
+ACMRED::ACMRED(WebRtc_Word16 codecID)
+{
+    _codecID = codecID;
+}
+
+
+ACMRED::~ACMRED()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    // RED is never used as an encoder
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+   // This codec does not need initialization,
+   // RED has no instance
+   return 0;
+}
+
+
+WebRtc_Word32
+ACMRED::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderRED, codecInst.pltype, NULL, 8000);
+    SET_RED_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMRED::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalCreateEncoder()
+{
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalCreateDecoder()
+{
+    // RED has no instance
+    return 0;
+}
+
+
+void
+ACMRED::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    // RED has no instance
+    return;
+}
+
+
+void
+ACMRED::DestructEncoderSafe()
+{
+    // RED has no instance
+    return;
+}
+
+void ACMRED::DestructDecoderSafe()
+{
+    // RED has no instance
+    return;
+}
+
+
+WebRtc_Word16
+ACMRED::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+
+    return netEq->RemoveCodec(kDecoderRED);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_red.h b/trunk/src/modules/audio_coding/main/source/acm_red.h
new file mode 100644
index 0000000..d22cb8c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_red.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMRED : public ACMGenericCodec
+{
+public:
+    ACMRED(WebRtc_Word16 codecID);
+    ~ACMRED();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*       netEq,
+        WebRtc_Word16   payloadType);
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_resampler.cc b/trunk/src/modules/audio_coding/main/source/acm_resampler.cc
new file mode 100644
index 0000000..1e36e73
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_resampler.cc
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "acm_resampler.h"
+#include "critical_section_wrapper.h"
+#include "resampler.h"
+#include "signal_processing_library.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+ACMResampler::ACMResampler():
+
+_resamplerCritSect(*CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+ACMResampler::~ACMResampler()
+{
+
+    delete &_resamplerCritSect;
+}
+
+
+WebRtc_Word16
+ACMResampler::Resample10Msec(
+    const WebRtc_Word16* inAudio,
+    WebRtc_Word32        inFreqHz,
+    WebRtc_Word16*       outAudio,
+    WebRtc_Word32        outFreqHz,
+    WebRtc_UWord8        numAudioChannels)
+{
+
+    CriticalSectionScoped cs(_resamplerCritSect);
+
+    if(inFreqHz == outFreqHz)
+    {
+        size_t length = static_cast<size_t>(inFreqHz * numAudioChannels / 100);
+        memcpy(outAudio, inAudio, length * sizeof(WebRtc_Word16));
+        return static_cast<WebRtc_Word16>(inFreqHz / 100);
+    }
+
+    int maxLen = 480 * numAudioChannels; //max number of samples for 10ms at 48kHz
+    int lengthIn = (WebRtc_Word16)(inFreqHz / 100) * numAudioChannels;
+    int outLen;
+
+    WebRtc_Word32 ret;
+    ResamplerType type;
+    type = (numAudioChannels == 1)? kResamplerSynchronous:kResamplerSynchronousStereo;
+
+    ret = _resampler.ResetIfNeeded(inFreqHz,outFreqHz,type);
+    if (ret < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
+            "Error in reset of resampler");
+        return -1;
+    }
+
+    ret = _resampler.Push(inAudio, lengthIn, outAudio, maxLen, outLen);
+    if (ret < 0 )
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
+            "Error in resampler: resampler.Push");
+        return -1;
+    }
+
+   WebRtc_Word16 outAudioLenSmpl = (WebRtc_Word16) outLen / numAudioChannels;
+
+   return outAudioLenSmpl;
+
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_resampler.h b/trunk/src/modules/audio_coding/main/source/acm_resampler.h
new file mode 100644
index 0000000..246a4fe
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_resampler.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
+
+#include "resampler.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ACMResampler
+{
+public:
+    ACMResampler();
+    ~ACMResampler();
+
+    WebRtc_Word16 Resample10Msec(
+        const WebRtc_Word16* inAudio,
+        const WebRtc_Word32  inFreqHz,
+        WebRtc_Word16*       outAudio,
+        const WebRtc_Word32  outFreqHz,
+        WebRtc_UWord8        numAudioChannels);
+
+private:
+
+    //Use the Resampler class
+    Resampler               _resampler;
+    CriticalSectionWrapper& _resamplerCritSect;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
diff --git a/trunk/src/modules/audio_coding/main/source/acm_speex.cc b/trunk/src/modules/audio_coding/main/source/acm_speex.cc
new file mode 100644
index 0000000..acb2a3b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_speex.cc
@@ -0,0 +1,669 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_speex.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_SPEEX
+    // NOTE! Speex is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/speex/main/interface/speex_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcSpeex_CreateEnc(SPEEX_encinst_t **SPEEXenc_inst,
+    //                               int32_t fs);
+    // int16_t WebRtcSpeex_FreeEnc(SPEEX_encinst_t *SPEEXenc_inst);
+    // int16_t WebRtcSpeex_CreateDec(SPEEX_decinst_t **SPEEXdec_inst,
+    //                               int32_t fs,
+    //                               int16_t enh_enabled);
+    // int16_t WebRtcSpeex_FreeDec(SPEEX_decinst_t *SPEEXdec_inst);
+    // int16_t WebRtcSpeex_Encode(SPEEX_encinst_t *SPEEXenc_inst,
+    //                            int16_t *speechIn,
+    //                            int32_t rate);
+    // int16_t WebRtcSpeex_EncoderInit(SPEEX_encinst_t *SPEEXenc_inst,
+    //                                 int16_t vbr, int16_t complexity,
+    //                                 int16_t vad_enable);
+    // int16_t WebRtcSpeex_GetBitstream(SPEEX_encinst_t *SPEEXenc_inst,
+    //                                  int16_t *encoded);
+    // int16_t WebRtcSpeex_DecodePlc(SPEEX_decinst_t *SPEEXdec_inst,
+    //                               int16_t *decoded, int16_t noOfLostFrames);
+    // int16_t WebRtcSpeex_Decode(SPEEX_decinst_t *SPEEXdec_inst,
+    //                            int16_t *encoded, int16_t len,
+    //                            int16_t *decoded, int16_t *speechType);
+    // int16_t WebRtcSpeex_DecoderInit(SPEEX_decinst_t *SPEEXdec_inst);
+    // void WebRtcSpeex_Version(char *versionStr, short len);
+    #include "speex_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_SPEEX
+ACMSPEEX::ACMSPEEX(WebRtc_Word16 /* codecID*/)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _complMode(0),
+      _vbrEnabled(false),
+      _encodingRate(-1),
+      _samplingFrequency(-1),
+      _samplesIn20MsAudio(-1) {
+  return;
+}
+
+ACMSPEEX::~ACMSPEEX()
+{
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::EnableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMSPEEX::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+ACMGenericCodec*
+ACMSPEEX::CreateInstance(void)
+{
+    return NULL;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateEncoder()
+{
+    return -1;
+}
+
+void
+ACMSPEEX::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateDecoder()
+{
+    return -1;
+}
+
+void
+ACMSPEEX::DestructDecoderSafe()
+{
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetBitRateSafe(
+    const WebRtc_Word32 /* rate */)
+{
+    return -1;
+}
+
+void
+ACMSPEEX::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::UnregisterFromNetEqSafe(
+    ACMNetEQ*     /* netEq       */,
+    WebRtc_Word16 /* payloadType */)
+{
+    return -1;
+}
+
+#ifdef UNUSEDSPEEX
+WebRtc_Word16
+ACMSPEEX::EnableVBR()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableVBR()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetComplMode(
+    WebRtc_Word16 mode)
+{
+    return -1;
+}
+#endif
+
+#else     //===================== Actual Implementation =======================
+
+ACMSPEEX::ACMSPEEX(WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+
+    // Set sampling frequency, frame size and rate Speex
+    if(_codecID == ACMCodecDB::kSPEEX8)
+    {
+        _samplingFrequency = 8000;
+        _samplesIn20MsAudio = 160;
+        _encodingRate = 11000;
+    }
+    else if(_codecID == ACMCodecDB::kSPEEX16)
+    {
+        _samplingFrequency = 16000;
+        _samplesIn20MsAudio = 320;
+        _encodingRate = 22000;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Wrong codec id for Speex.");
+
+        _samplingFrequency = -1;
+        _samplesIn20MsAudio = -1;
+        _encodingRate = -1;
+    }
+
+    _hasInternalDTX = true;
+    _dtxEnabled = false;
+    _vbrEnabled = false;
+    _complMode =  3; // default complexity value
+
+    return;
+}
+
+ACMSPEEX::~ACMSPEEX()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    WebRtc_Word16 status;
+    WebRtc_Word16 numEncodedSamples = 0;
+    WebRtc_Word16 n = 0;
+
+    while( numEncodedSamples < _frameLenSmpl)
+    {
+        status = WebRtcSpeex_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+            _encodingRate);
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += _samplesIn20MsAudio;
+        numEncodedSamples += _samplesIn20MsAudio;
+
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "Error in Speex encoder");
+            return status;
+        }
+
+        // Update VAD, if internal DTX is used
+        if(_hasInternalDTX && _dtxEnabled)
+        {
+            _vadLabel[n++] = status;
+            _vadLabel[n++] = status;
+        }
+
+        if(status == 0)
+        {
+            // This frame is detected as inactive. We need send whatever
+            // encoded so far.
+            *bitStreamLenByte = WebRtcSpeex_GetBitstream(_encoderInstPtr,
+                (WebRtc_Word16*)bitStream);
+
+            return *bitStreamLenByte;
+        }
+    }
+
+    *bitStreamLenByte = WebRtcSpeex_GetBitstream(_encoderInstPtr,
+        (WebRtc_Word16*)bitStream);
+    return *bitStreamLenByte;
+}
+
+WebRtc_Word16
+ACMSPEEX::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // enable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, (_vbrEnabled ? 1:0), _complMode, 1) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot enable DTX for Speex");
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // disable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, (_vbrEnabled ? 1:0), _complMode, 0) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot disable DTX for Speex");
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // sanity check
+    if (_encoderInstPtr == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Cannot initialize Speex encoder, instance does not exist");
+        return -1;
+    }
+
+    WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+    status += (WebRtcSpeex_EncoderInit(_encoderInstPtr, _vbrEnabled, _complMode, ((codecParams->enableDTX)? 1:0)) < 0)? -1:0;
+
+    if (status >= 0) {
+        return 0;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error in initialization of Speex encoder");
+        return -1;
+    }
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    WebRtc_Word16 status;
+
+    // sanity check
+    if (_decoderInstPtr == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Cannot initialize Speex decoder, instance does not exist");
+        return -1;
+    }
+    status = ((WebRtcSpeex_DecoderInit(_decoderInstPtr) < 0)? -1:0);
+
+    if (status >= 0) {
+        return 0;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error in initialization of Speex decoder");
+        return -1;
+    }
+}
+
+WebRtc_Word32
+ACMSPEEX::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error, Speex decoder is not initialized");
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_SPEEX_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+
+    switch(_samplingFrequency)
+    {
+    case 8000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderSPEEX_8, codecInst.pltype,
+                _decoderInstPtr, 8000);
+            break;
+        }
+    case 16000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderSPEEX_16, codecInst.pltype,
+                _decoderInstPtr, 16000);
+            break;
+        }
+    default:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Unsupported sampling frequency for Speex");
+
+            return -1;
+        }
+    }
+
+    SET_SPEEX_FUNCTIONS((codecDef));
+    return 0;
+}
+
+ACMGenericCodec*
+ACMSPEEX::CreateInstance(void)
+{
+    return NULL;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateEncoder()
+{
+    return WebRtcSpeex_CreateEnc(&_encoderInstPtr, _samplingFrequency);
+}
+
+void
+ACMSPEEX::DestructEncoderSafe()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    // there is no encoder set the following
+    _encoderExist = false;
+    _encoderInitialized = false;
+    _encodingRate = 0;
+}
+
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateDecoder()
+{
+    return WebRtcSpeex_CreateDec(&_decoderInstPtr, _samplingFrequency, 1);
+}
+
+void
+ACMSPEEX::DestructDecoderSafe()
+{
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    // there is no encoder instance set the followings
+    _decoderExist = false;
+    _decoderInitialized = false;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    // Check if changed rate
+    if (rate == _encodingRate) {
+        return 0;
+    } else if (rate > 2000) {
+        _encodingRate = rate;
+        _encoderParams.codecInstant.rate = rate;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Unsupported encoding rate for Speex");
+
+        return -1;
+    }
+
+    return 0;
+}
+
+
+void
+ACMSPEEX::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcSpeex_FreeEnc((SPEEX_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMSPEEX::UnregisterFromNetEqSafe(
+    ACMNetEQ*     netEq,
+    WebRtc_Word16 payloadType)
+{
+    if(payloadType != _decoderParams.codecInstant.pltype)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot unregister codec %s given payload-type %d does not match \
+the stored payload type",
+            _decoderParams.codecInstant.plname,
+            payloadType,
+            _decoderParams.codecInstant.pltype);
+        return -1;
+    }
+
+
+    switch(_samplingFrequency)
+    {
+    case 8000:
+        {
+            return netEq->RemoveCodec(kDecoderSPEEX_8);
+        }
+    case 16000:
+        {
+            return netEq->RemoveCodec(kDecoderSPEEX_16);
+        }
+    default:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Could not unregister Speex from NetEQ. Sampling frequency doesn't match");
+            return -1;
+        }
+    }
+}
+
+
+#ifdef UNUSEDSPEEX
+
+// This API is currently not in use. If requested to be able to enable/disable VBR
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::EnableVBR()
+{
+    if(_vbrEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // enable Variable Bit Rate (VBR)
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 1, _complMode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot enable VBR mode for Speex");
+
+            return -1;
+        }
+        _vbrEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+// This API is currently not in use. If requested to be able to enable/disable VBR
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::DisableVBR()
+{
+    if(!_vbrEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // disable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 0, _complMode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot disable DTX for Speex");
+
+            return -1;
+        }
+        _vbrEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+// This API is currently not in use. If requested to be able to set complexity
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::SetComplMode(
+    WebRtc_Word16 mode)
+{
+    // Check if new mode
+    if(mode == _complMode)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // Set new mode
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 0, mode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Error in complexity mode for Speex");
+            return -1;
+        }
+        _complMode = mode;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+#endif
+
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/acm_speex.h b/trunk/src/modules/audio_coding/main/source/acm_speex.h
new file mode 100644
index 0000000..7988e45
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/acm_speex.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct SPEEX_encinst_t_;
+struct SPEEX_decinst_t_;
+
+namespace webrtc {
+
+class ACMSPEEX : public ACMGenericCodec
+{
+public:
+    ACMSPEEX(WebRtc_Word16 codecID);
+    ~ACMSPEEX();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+#ifdef UNUSEDSPEEX
+    WebRtc_Word16 EnableVBR();
+
+    WebRtc_Word16 DisableVBR();
+
+    WebRtc_Word16 SetComplMode(
+        WebRtc_Word16 mode);
+#endif
+
+    WebRtc_Word16 UnregisterFromNetEqSafe(
+        ACMNetEQ*     netEq,
+        WebRtc_Word16 payloadType);
+
+    SPEEX_encinst_t_* _encoderInstPtr;
+    SPEEX_decinst_t_* _decoderInstPtr;
+    WebRtc_Word16     _complMode;
+    bool              _vbrEnabled;
+    WebRtc_Word32     _encodingRate;
+    WebRtc_Word16     _samplingFrequency;
+    WebRtc_UWord16    _samplesIn20MsAudio;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
diff --git a/trunk/src/modules/audio_coding/main/source/audio_coding_module.cc b/trunk/src/modules/audio_coding/main/source/audio_coding_module.cc
new file mode 100644
index 0000000..2cd959d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/audio_coding_module.cc
@@ -0,0 +1,136 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "acm_dtmf_detection.h"
+#include "audio_coding_module.h"
+#include "audio_coding_module_impl.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+// Create module
+AudioCodingModule*
+AudioCodingModule::Create(
+    const WebRtc_Word32 id)
+{
+    return new AudioCodingModuleImpl(id);
+}
+
+// Destroy module
+void
+AudioCodingModule::Destroy(
+        AudioCodingModule* module)
+{
+    delete static_cast<AudioCodingModuleImpl*> (module);
+}
+
+// Get number of supported codecs
+WebRtc_UWord8 AudioCodingModule::NumberOfCodecs()
+{
+    return static_cast<WebRtc_UWord8>(ACMCodecDB::kNumCodecs);
+}
+
+// Get supported codec param with id
+WebRtc_Word32
+AudioCodingModule::Codec(
+    const WebRtc_UWord8 listId,
+    CodecInst&          codec)
+{
+    // Get the codec settings for the codec with the given list ID
+    return ACMCodecDB::Codec(listId, &codec);
+}
+
+// Get supported codec Param with name
+WebRtc_Word32
+AudioCodingModule::Codec(
+    const char* payloadName,
+    CodecInst&          codec,
+    const WebRtc_Word32 samplingFreqHz)
+{
+    // Search through codec list for a matching name
+    for(int codecCntr = 0; codecCntr < ACMCodecDB::kNumCodecs; codecCntr++)
+    {
+        // Store codec settings for codec number "codeCntr" in the output struct
+        ACMCodecDB::Codec(codecCntr, &codec);
+
+        if(!STR_CASE_CMP(codec.plname, payloadName))
+        {
+            // If samplingFreqHz is set (!= -1), check if frequency matches
+            if((samplingFreqHz == codec.plfreq) || (samplingFreqHz == -1))
+            {
+                // We found a match, return OK
+                return 0;
+            }
+        }
+    }
+
+    // if we are here we couldn't find anything
+    // set the params to unacceptable values
+    codec.plname[0] = '\0';
+    codec.pltype    = -1;
+    codec.pacsize   = 0;
+    codec.rate      = 0;
+    codec.plfreq    = 0;
+    return -1;
+}
+
+// Get supported codec Index with name, and frequency if needed
+WebRtc_Word32
+AudioCodingModule::Codec(
+    const char* payloadName,
+    const WebRtc_Word32 samplingFreqHz)
+{
+    CodecInst codec;
+
+    // Search through codec list for a matching name
+    for(int codecCntr = 0; codecCntr < ACMCodecDB::kNumCodecs; codecCntr++)
+    {
+        // Temporally store codec settings for codec number "codeCntr" in "codec"
+        ACMCodecDB::Codec(codecCntr, &codec);
+
+        if(!STR_CASE_CMP(codec.plname, payloadName))
+        {
+            // If samplingFreqHz is set (!= -1), check if frequency matches
+            if((samplingFreqHz == codec.plfreq) || (samplingFreqHz == -1))
+            {
+                // We found a match, return codec list number (index)
+                return codecCntr;
+            }
+        }
+    }
+
+    // We did not find a matching codec in the list
+    return -1;
+}
+
+// Checks the validity of the parameters of the given codec
+bool
+AudioCodingModule::IsCodecValid(
+    const CodecInst& codec)
+{
+    int mirrorID;
+    char errMsg[500];
+
+    int codecNumber = ACMCodecDB::CodecNumber(&codec, &mirrorID, errMsg, 500);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, -1, errMsg);
+        return false;
+    }
+    else
+    {
+        return true;
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/audio_coding_module.gypi b/trunk/src/modules/audio_coding/main/source/audio_coding_module.gypi
new file mode 100644
index 0000000..80114e0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/audio_coding_module.gypi
@@ -0,0 +1,152 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_coding_module',
+      'type': '<(library)',
+      'dependencies': [
+        'CNG',
+        'G711',
+        'G722',
+        'iLBC',
+        'iSAC',
+        'iSACFix',
+        'PCM16B',
+        'NetEq',
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+        '../interface',
+        '../../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/audio_coding_module.h',
+        '../interface/audio_coding_module_typedefs.h',
+        'acm_amr.cc',
+        'acm_amr.h',
+        'acm_amrwb.cc',
+        'acm_amrwb.h',
+        'acm_celt.cc',
+        'acm_celt.h',
+        'acm_cng.cc',
+        'acm_cng.h',
+        'acm_codec_database.cc',
+        'acm_codec_database.h',
+        'acm_dtmf_detection.cc',
+        'acm_dtmf_detection.h',
+        'acm_dtmf_playout.cc',
+        'acm_dtmf_playout.h',
+        'acm_g722.cc',
+        'acm_g722.h',
+        'acm_g7221.cc',
+        'acm_g7221.h',
+        'acm_g7221c.cc',
+        'acm_g7221c.h',
+        'acm_g729.cc',
+        'acm_g729.h',
+        'acm_g7291.cc',
+        'acm_g7291.h',
+        'acm_generic_codec.cc',
+        'acm_generic_codec.h',
+        'acm_gsmfr.cc',
+        'acm_gsmfr.h',
+        'acm_ilbc.cc',
+        'acm_ilbc.h',
+        'acm_isac.cc',
+        'acm_isac.h',
+        'acm_isac_macros.h',
+        'acm_neteq.cc',
+        'acm_neteq.h',
+        'acm_opus.cc',
+        'acm_opus.h',
+        'acm_speex.cc',
+        'acm_speex.h',
+        'acm_pcm16b.cc',
+        'acm_pcm16b.h',
+        'acm_pcma.cc',
+        'acm_pcma.h',
+        'acm_pcmu.cc',
+        'acm_pcmu.h',
+        'acm_red.cc',
+        'acm_red.h',
+        'acm_resampler.cc',
+        'acm_resampler.h',
+        'audio_coding_module.cc',
+        'audio_coding_module_impl.cc',
+        'audio_coding_module_impl.h',
+      ],
+    },
+  ],
+  # Exclude the test targets when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'audio_coding_module_test',
+          'type': 'executable',
+          'dependencies': [
+            'audio_coding_module',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+             '../test/ACMTest.cc',
+             '../test/APITest.cc',
+             '../test/Channel.cc',
+             '../test/EncodeDecodeTest.cc',
+             '../test/iSACTest.cc',
+             '../test/PCMFile.cc',
+             '../test/RTPFile.cc',
+             '../test/SpatialAudio.cc',
+             '../test/TestAllCodecs.cc',
+             '../test/Tester.cc',
+             '../test/TestFEC.cc',
+             '../test/TestStereo.cc',
+             '../test/TestVADDTX.cc',
+             '../test/TimedTrace.cc',
+             '../test/TwoWayCommunication.cc',
+             '../test/utility.cc',
+          ],
+        },
+        {
+          'target_name': 'audio_coding_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'audio_coding_module',
+            'NetEq',
+            '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+             'acm_neteq_unittest.cc',
+          ],
+        }, # audio_coding_unittests
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.cc b/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.cc
new file mode 100644
index 0000000..18b6f4f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.cc
@@ -0,0 +1,2624 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_dtmf_detection.h"
+#include "acm_generic_codec.h"
+#include "acm_resampler.h"
+#include "audio_coding_module_impl.h"
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#ifdef ACM_QA_TEST
+#   include <stdio.h>
+#endif
+
+#ifdef TIMED_LOGGING
+    char message[500];
+    #include "../test/timedtrace.h"
+    #include <string.h>
+    #define LOGWITHTIME(logString)                \
+                sprintf(message, logString, _id); \
+                _trace.TimedLogg(message);
+#else
+    #define LOGWITHTIME(logString)
+#endif
+
+namespace webrtc
+{
+
+enum {
+    kACMToneEnd = 999
+};
+
+AudioCodingModuleImpl::AudioCodingModuleImpl(
+    const WebRtc_Word32 id):
+    _packetizationCallback(NULL),
+    _id(id),
+    _lastTimestamp(0),
+    _lastInTimestamp(0),
+    _vadEnabled(false),
+    _dtxEnabled(false),
+    _vadMode(VADNormal),
+    _stereoSend(false),
+    _prev_received_channel(0),
+    _expected_channels(1),
+    _currentSendCodecIdx(-1),    // invalid value
+    _sendCodecRegistered(false),
+    _acmCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+    _vadCallback(NULL),
+    _lastRecvAudioCodecPlType(255),
+    _isFirstRED(true),
+    _fecEnabled(false),
+    _fragmentation(NULL),
+    _lastFECTimestamp(0),
+    _redPayloadType(255),
+    _receiveREDPayloadType(255),  // invalid value
+    _previousPayloadType(255),
+    _dummyRTPHeader(NULL),
+    _recvPlFrameSizeSmpls(0),
+    _receiverInitialized(false),
+    _dtmfDetector(NULL),
+    _dtmfCallback(NULL),
+    _lastDetectedTone(kACMToneEnd),
+    _callbackCritSect(CriticalSectionWrapper::CreateCriticalSection())
+{
+    _lastTimestamp = 0xD87F3F9F;
+    _lastInTimestamp = 0xD87F3F9F;
+
+    // Nullify send codec memory, set payload type and set codec name to
+    // invalid values.
+    memset(&_sendCodecInst, 0, sizeof(CodecInst));
+    strncpy(_sendCodecInst.plname, "noCodecRegistered", 31);
+    _sendCodecInst.pltype = -1;
+
+    // Nullify memory for CNG, DTMF and RED.
+    memset(&_cngNB, 0, sizeof(CodecInst));
+    memset(&_cngWB, 0, sizeof(CodecInst));
+    memset(&_cngSWB, 0, sizeof(CodecInst));
+    memset(&_RED, 0, sizeof(CodecInst));
+    memset(&_DTMF, 0, sizeof(CodecInst));
+
+    for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++)
+    {
+        _codecs[i]            = NULL;
+        _registeredPlTypes[i] = -1;
+        _stereoReceive[i]     = false;
+        _slaveCodecs[i]       = NULL;
+        _mirrorCodecIdx[i]    = -1;
+    }
+
+    _netEq.SetUniqueId(_id);
+
+    // Allocate memory for RED
+    _redBuffer = new WebRtc_UWord8[MAX_PAYLOAD_SIZE_BYTE];
+    _fragmentation = new RTPFragmentationHeader;
+    _fragmentation->fragmentationVectorSize = 2;
+    _fragmentation->fragmentationOffset = new WebRtc_UWord32[2];
+    _fragmentation->fragmentationLength = new WebRtc_UWord32[2];
+    _fragmentation->fragmentationTimeDiff = new WebRtc_UWord16[2];
+    _fragmentation->fragmentationPlType = new WebRtc_UWord8[2];
+
+    // Register the default payload type for RED and for
+    // CNG for the three frequencies 8, 16 and 32 kHz
+    for (int i = (ACMCodecDB::kNumCodecs - 1); i>=0; i--)
+    {
+        if((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "red") == 0))
+        {
+            _redPayloadType = ACMCodecDB::database_[i].pltype;
+        }
+        else if ((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "CN") == 0))
+        {
+            if (ACMCodecDB::database_[i].plfreq == 8000)
+            {
+                memcpy(&_cngNB, &ACMCodecDB::database_[i], sizeof(_cngNB));
+            }
+            else if (ACMCodecDB::database_[i].plfreq == 16000)
+            {
+                memcpy(&_cngWB, &ACMCodecDB::database_[i], sizeof(_cngWB));
+            } else if (ACMCodecDB::database_[i].plfreq == 32000)
+            {
+                memcpy(&_cngSWB, &ACMCodecDB::database_[i], sizeof(_cngSWB));
+            }
+        }
+    }
+
+    if(InitializeReceiverSafe() < 0 )
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot initialize reciever");
+    }
+#ifdef TIMED_LOGGING
+    _trace.SetUp("TimedLogg.txt");
+#endif
+
+#ifdef ACM_QA_TEST
+    char fileName[500];
+    sprintf(fileName, "ACM_QA_incomingPL_%03d_%d%d%d%d%d%d.dat",
+        _id,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10);
+
+    _incomingPL = fopen(fileName, "wb");
+
+    sprintf(fileName, "ACM_QA_outgoingPL_%03d_%d%d%d%d%d%d.dat",
+        _id,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10,
+        rand() % 10);
+    _outgoingPL = fopen(fileName, "wb");
+#endif
+
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
+}
+
+AudioCodingModuleImpl::~AudioCodingModuleImpl()
+{
+    {
+        CriticalSectionScoped lock(*_acmCritSect);
+        _currentSendCodecIdx = -1;
+
+        for (int i=0; i < ACMCodecDB::kMaxNumCodecs; i++)
+        {
+            if (_codecs[i] != NULL)
+            {
+                assert(_mirrorCodecIdx[i] > -1);
+                if(_codecs[_mirrorCodecIdx[i]] != NULL)
+                {
+                    delete _codecs[_mirrorCodecIdx[i]];
+                    _codecs[_mirrorCodecIdx[i]] = NULL;
+                }
+                _codecs[i] = NULL;
+            }
+
+            if(_slaveCodecs[i] != NULL)
+            {
+                assert(_mirrorCodecIdx[i] > -1);
+                if(_slaveCodecs[_mirrorCodecIdx[i]] != NULL)
+                {
+                    delete _slaveCodecs[_mirrorCodecIdx[i]];
+                    _slaveCodecs[_mirrorCodecIdx[i]] =  NULL;
+                }
+                _slaveCodecs[i] = NULL;
+            }
+        }
+
+        if(_dtmfDetector != NULL)
+        {
+            delete _dtmfDetector;
+            _dtmfDetector = NULL;
+        }
+        if(_dummyRTPHeader != NULL)
+        {
+            delete _dummyRTPHeader;
+            _dummyRTPHeader = NULL;
+        }
+        if(_redBuffer != NULL)
+        {
+            delete [] _redBuffer;
+            _redBuffer = NULL;
+        }
+        if(_fragmentation != NULL)
+        {
+            // Only need to delete fragmentation header, it will clean
+            // up it's own memory
+            delete _fragmentation;
+            _fragmentation = NULL;
+        }
+    }
+
+#ifdef ACM_QA_TEST
+        if(_incomingPL != NULL)
+        {
+            fclose(_incomingPL);
+        }
+
+        if(_outgoingPL != NULL)
+        {
+            fclose(_outgoingPL);
+        }
+#endif
+
+    delete _callbackCritSect;
+    _callbackCritSect = NULL;
+
+    delete _acmCritSect;
+    _acmCritSect = NULL;
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, _id, "Destroyed");
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::ChangeUniqueId(
+    const WebRtc_Word32 id)
+{
+    {
+        CriticalSectionScoped lock(*_acmCritSect);
+        _id = id;
+#ifdef ACM_QA_TEST
+        if(_incomingPL != NULL)
+        {
+            fclose(_incomingPL);
+        }
+
+        if(_outgoingPL != NULL)
+        {
+            fclose(_outgoingPL);
+        }
+
+        char fileName[500];
+        sprintf(fileName, "ACM_QA_incomingPL_%03d_%d%d%d%d%d%d.dat",
+            _id,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10);
+
+        _incomingPL = fopen(fileName, "wb");
+
+        sprintf(fileName, "ACM_QA_outgoingPL_%03d_%d%d%d%d%d%d.dat",
+            _id,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10,
+            rand() % 10);
+        _outgoingPL = fopen(fileName, "wb");
+#endif
+
+        for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++)
+        {
+            if(_codecs[i] != NULL)
+            {
+                _codecs[i]->SetUniqueID(id);
+            }
+        }
+    }
+
+    _netEq.SetUniqueId(_id);
+    return 0;
+}
+
+// returns the number of milliseconds until the module want a
+// worker thread to call Process
+WebRtc_Word32
+AudioCodingModuleImpl::TimeUntilNextProcess()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("TimeUntilNextProcess"))
+    {
+        return -1;
+    }
+    return _codecs[_currentSendCodecIdx]->SamplesLeftToEncode() /
+        (_sendCodecInst.plfreq / 1000);
+}
+
+// Process any pending tasks such as timeouts
+WebRtc_Word32
+AudioCodingModuleImpl::Process()
+{
+    WebRtc_UWord8 bitStream[2 * MAX_PAYLOAD_SIZE_BYTE]; // Make room for 1 RED payload
+    WebRtc_Word16 lengthBytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
+    WebRtc_Word16 redLengthBytes = lengthBytes;
+    WebRtc_UWord32 rtpTimestamp;
+    WebRtc_Word16 status;
+    WebRtcACMEncodingType encodingType;
+    FrameType frameType = kAudioFrameSpeech;
+    WebRtc_UWord8 currentPayloadType = 0;
+    bool hasDataToSend = false;
+    bool fecActive = false;
+
+    // keep the scope of the ACM critical section limited
+    {
+        CriticalSectionScoped lock(*_acmCritSect);
+        if(!HaveValidEncoder("Process"))
+        {
+            return -1;
+        }
+
+        status = _codecs[_currentSendCodecIdx]->Encode(bitStream, &lengthBytes,
+                 &rtpTimestamp, &encodingType);
+        if (status < 0) // Encode failed
+        {
+            // logging error
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Process(): Encoding Failed");
+            lengthBytes = 0;
+            return -1;
+        }
+        else if(status == 0)
+        {
+            // Not enough data
+            return 0;
+        }
+        else
+        {
+            switch(encodingType)
+            {
+            case kNoEncoding:
+                {
+                    currentPayloadType = _previousPayloadType;
+                    frameType = kFrameEmpty;
+                    lengthBytes = 0;
+                    break;
+                }
+            case kActiveNormalEncoded:
+            case kPassiveNormalEncoded:
+                {
+                    currentPayloadType = (WebRtc_UWord8)_sendCodecInst.pltype;
+                    frameType = kAudioFrameSpeech;
+                    break;
+                }
+            case kPassiveDTXNB:
+                {
+                    currentPayloadType = (WebRtc_UWord8)_cngNB.pltype;
+                    frameType = kAudioFrameCN;
+                    _isFirstRED = true;
+                    break;
+                }
+            case kPassiveDTXWB:
+                {
+                    currentPayloadType = (WebRtc_UWord8)_cngWB.pltype;
+                    frameType = kAudioFrameCN;
+                    _isFirstRED = true;
+                    break;
+                }
+            case kPassiveDTXSWB:
+                {
+                    currentPayloadType = (WebRtc_UWord8)_cngSWB.pltype;
+                    frameType = kAudioFrameCN;
+                    _isFirstRED = true;
+                    break;
+                }
+            }
+            hasDataToSend = true;
+            _previousPayloadType = currentPayloadType;
+
+            // Redundancy encode is done here,
+            // the two bitstreams packetized into
+            // one RTP packet and the fragmentation points
+            // are set.
+            // Only apply RED on speech data.
+            if((_fecEnabled) &&
+                ((encodingType == kActiveNormalEncoded) ||
+                 (encodingType == kPassiveNormalEncoded)))
+            {
+                // FEC is enabled within this scope.
+                //
+                // Note that, a special solution exists for iSAC since it is the only codec for
+                // which getRedPayload has a non-empty implementation.
+                //
+                // Summary of the FEC scheme below (use iSAC as example):
+                //
+                //  1st (_firstRED is true) encoded iSAC frame (primary #1) =>
+                //      - call getRedPayload() and store redundancy for packet #1 in second
+                //        fragment of RED buffer (old data)
+                //      - drop the primary iSAC frame
+                //      - don't call SendData
+                //  2nd (_firstRED is false) encoded iSAC frame (primary #2) =>
+                //      - store primary #2 in 1st fragment of RED buffer and send the combined
+                //        packet
+                //      - the transmitted packet contains primary #2 (new) and reduncancy for
+                //        packet #1 (old)
+                //      - call getRedPayload() and store redundancy for packet #2 in second
+                //        fragment of RED buffer
+                //
+                //  ...
+                //
+                //  Nth encoded iSAC frame (primary #N) =>
+                //      - store primary #N in 1st fragment of RED buffer and send the combined
+                //        packet
+                //      - the transmitted packet contains primary #N (new) and reduncancy for
+                //        packet #(N-1) (old)
+                //      - call getRedPayload() and store redundancy for packet #N in second
+                //        fragment of RED buffer
+                //
+                //  For all other codecs, getRedPayload does nothing and returns -1 =>
+                //  redundant data is only a copy.
+                //
+                //  First combined packet contains : #2 (new) and #1 (old)
+                //  Second combined packet contains: #3 (new) and #2 (old)
+                //  Third combined packet contains : #4 (new) and #3 (old)
+                //
+                //  Hence, even if every second packet is dropped, perfect reconstruction is
+                //  possible.
+                fecActive = true;
+
+                hasDataToSend = false;
+                if(!_isFirstRED)    // skip this part for the first packet in a RED session
+                {
+                    // Rearrange bitStream such that FEC packets are included.
+                    // Replace bitStream now that we have stored current bitStream.
+                    memcpy(bitStream + _fragmentation->fragmentationOffset[1], _redBuffer,
+                        _fragmentation->fragmentationLength[1]);
+                    // Update the fragmentation time difference vector
+                    WebRtc_UWord16 timeSinceLastTimestamp =
+                            WebRtc_UWord16(rtpTimestamp - _lastFECTimestamp);
+
+                    // Update fragmentation vectors
+                    _fragmentation->fragmentationPlType[1] =
+                            _fragmentation->fragmentationPlType[0];
+                    _fragmentation->fragmentationTimeDiff[1] = timeSinceLastTimestamp;
+                    hasDataToSend = true;
+                }
+
+                // Insert new packet length.
+                _fragmentation->fragmentationLength[0] = lengthBytes;
+
+                // Insert new packet payload type.
+                _fragmentation->fragmentationPlType[0] = currentPayloadType;
+                _lastFECTimestamp = rtpTimestamp;
+
+                // can be modified by the GetRedPayload() call if iSAC is utilized
+                redLengthBytes = lengthBytes;
+                // A fragmentation header is provided => packetization according to RFC 2198
+                // (RTP Payload for Redundant Audio Data) will be used.
+                // First fragment is the current data (new).
+                // Second fragment is the previous data (old).
+                lengthBytes =
+                    static_cast<WebRtc_Word16> (_fragmentation->fragmentationLength[0] +
+                            _fragmentation->fragmentationLength[1]);
+
+                // Get, and store, redundant data from the encoder based on the recently
+                // encoded frame.
+                // NOTE - only iSAC contains an implementation; all other codecs does nothing
+                // and returns -1.
+                if (_codecs[_currentSendCodecIdx]->GetRedPayload(_redBuffer,
+                        &redLengthBytes) == -1)
+                {
+                    // The codec was not iSAC => use current encoder output as redundant data
+                    // instead (trivial FEC scheme)
+                    memcpy(_redBuffer, bitStream, redLengthBytes);
+                }
+
+                _isFirstRED = false;
+                // Update payload type with RED payload type
+                currentPayloadType = _redPayloadType;
+            }
+        }
+    }
+
+    if(hasDataToSend)
+    {
+        CriticalSectionScoped lock(*_callbackCritSect);
+#ifdef ACM_QA_TEST
+        if(_outgoingPL != NULL)
+        {
+            fwrite(&rtpTimestamp,       sizeof(WebRtc_UWord32), 1, _outgoingPL);
+            fwrite(&currentPayloadType, sizeof(WebRtc_UWord8),  1, _outgoingPL);
+            fwrite(&lengthBytes,        sizeof(WebRtc_Word16),  1, _outgoingPL);
+        }
+#endif
+
+        if(_packetizationCallback != NULL)
+        {
+            if (fecActive) {
+                _packetizationCallback->SendData(frameType, currentPayloadType,
+                    rtpTimestamp, bitStream, lengthBytes, _fragmentation);
+            } else {
+                _packetizationCallback->SendData(frameType, currentPayloadType,
+                    rtpTimestamp, bitStream, lengthBytes, NULL);
+            }
+        }
+
+        // This is for test
+        if(_vadCallback != NULL)
+        {
+            _vadCallback->InFrameType(((WebRtc_Word16)encodingType));
+        }
+    }
+    if (fecActive) {
+        _fragmentation->fragmentationLength[1] = redLengthBytes;
+    }
+    return lengthBytes;
+}
+
+
+
+
+/////////////////////////////////////////
+//   Sender
+//
+
+// Initialize send codec
+WebRtc_Word32
+AudioCodingModuleImpl::InitializeSender()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    _sendCodecRegistered = false;
+    _currentSendCodecIdx = -1; // invalid value
+
+    _sendCodecInst.plname[0] = '\0';
+
+    for(int codecCntr = 0; codecCntr < ACMCodecDB::kMaxNumCodecs; codecCntr++)
+    {
+        if(_codecs[codecCntr] != NULL)
+        {
+            _codecs[codecCntr]->DestructEncoder();
+        }
+    }
+    // Initialize FEC/RED
+    _isFirstRED = true;
+    if(_fecEnabled)
+    {
+        if(_redBuffer != NULL)
+        {
+            memset(_redBuffer, 0, MAX_PAYLOAD_SIZE_BYTE);
+        }
+        if(_fragmentation != NULL)
+        {
+            _fragmentation->fragmentationVectorSize = 2;
+            _fragmentation->fragmentationOffset[0] = 0;
+            _fragmentation->fragmentationOffset[0] = MAX_PAYLOAD_SIZE_BYTE;
+            memset(_fragmentation->fragmentationLength, 0, sizeof(WebRtc_UWord32) * 2);
+            memset(_fragmentation->fragmentationTimeDiff, 0, sizeof(WebRtc_UWord16) * 2);
+            memset(_fragmentation->fragmentationPlType, 0, sizeof(WebRtc_UWord8) * 2);
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::ResetEncoder()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    if(!HaveValidEncoder("ResetEncoder"))
+    {
+        return -1;
+    }
+    return _codecs[_currentSendCodecIdx]->ResetEncoder();
+}
+
+void
+AudioCodingModuleImpl::UnregisterSendCodec()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    _sendCodecRegistered = false;
+    _currentSendCodecIdx = -1;    // invalid value
+
+    return;
+}
+
+ACMGenericCodec*
+AudioCodingModuleImpl::CreateCodec(
+    const CodecInst& codec)
+{
+    ACMGenericCodec* myCodec = NULL;
+
+    myCodec = ACMCodecDB::CreateCodecInstance(&codec);
+    if(myCodec == NULL)
+    {
+        // Error, could not create the codec
+
+        // logging error
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "ACMCodecDB::CreateCodecInstance() failed in \
+CreateCodec()");
+        return myCodec;
+    }
+    myCodec->SetUniqueID(_id);
+    myCodec->SetNetEqDecodeLock(_netEq.DecodeLock());
+
+    return myCodec;
+}
+
+// can be called multiple times for Codec, CNG, RED
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterSendCodec(
+    const CodecInst& sendCodec)
+{
+    if((sendCodec.channels != 1) && (sendCodec.channels != 2))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Registering Send codec failed due to wrong number of channels, %d. Only\
+mono codecs are supported, i.e. channels=1.", sendCodec.channels);
+        return -1;
+    }
+
+    char errMsg[500];
+    int mirrorId;
+    int codecID = ACMCodecDB::CodecNumber(&sendCodec, &mirrorId, errMsg,
+                                          sizeof(errMsg));
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    // Check for reported errors from function CodecNumber()
+    if(codecID < 0)
+    {
+        if(!_sendCodecRegistered)
+        {
+            // This values has to be NULL if there is no codec registered
+            _currentSendCodecIdx = -1;  // invalid value
+        }
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id, errMsg);
+        // Failed to register Send Codec
+        return -1;
+    }
+
+    // telephone-event cannot be a send codec
+    if(!STR_CASE_CMP(sendCodec.plname, "telephone-event"))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "telephone-event cannot be registered as send codec");
+        return -1;
+    }
+
+    // RED can be registered with other payload type. If not registered a default
+    // payload type is used.
+    if(!STR_CASE_CMP(sendCodec.plname, "red"))
+    {
+        // Check if the payload-type is valid
+        if(!ACMCodecDB::ValidPayloadType(sendCodec.pltype))
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Invalid payload-type %d for %s.",
+                sendCodec.pltype, sendCodec.plname);
+            return -1;
+        }
+        // Set RED payload type
+        _redPayloadType = (WebRtc_UWord8)sendCodec.pltype;
+        return 0;
+    }
+
+    // CNG can be registered with other payload type. If not registered the
+    // default payload types will be used: CNNB=13 (fixed), CNWB=97, CNSWB=98
+    if(!STR_CASE_CMP(sendCodec.plname, "CN"))
+    {
+        // CNG is registered
+        switch(sendCodec.plfreq)
+        {
+        case 8000:
+            {
+                memcpy(&_cngNB, &sendCodec, sizeof(_cngNB));
+                break;
+            }
+        case 16000:
+            {
+                memcpy(&_cngWB, &sendCodec, sizeof(_cngWB));
+                break;
+            }
+        case 32000:
+            {
+                memcpy(&_cngSWB, &sendCodec, sizeof(_cngSWB));
+                break;
+            }
+        default :
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "RegisterSendCodec() failed, invalid frequency for CNG registeration");
+                return -1;
+            }
+        }
+
+        return 0;
+    }
+
+    // Check if the payload-type is valid
+    if(!ACMCodecDB::ValidPayloadType(sendCodec.pltype))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Invalid payload-type %d for %s.",
+                sendCodec.pltype, sendCodec.plname);
+        return -1;
+    }
+
+    // Check if codec supports the number of channels
+    if(ACMCodecDB::codec_settings_[codecID].channel_support < sendCodec.channels)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "%d number of channels not supportedn for %s.",
+                sendCodec.channels, sendCodec.plname);
+        return -1;
+    }
+
+    // Set Stereo
+    if (sendCodec.channels == 2)
+    {
+        _stereoSend = true;
+    }
+
+    // check if the codec is already registered as send codec
+    bool oldCodecFamily;
+    if(_sendCodecRegistered)
+    {
+        int sendCodecMirrorID;
+        int sendCodecID =
+                ACMCodecDB::CodecNumber(&_sendCodecInst, &sendCodecMirrorID);
+        assert(sendCodecID >= 0);
+        oldCodecFamily = (sendCodecID == codecID) || (mirrorId == sendCodecMirrorID);
+    }
+    else
+    {
+        oldCodecFamily = false;
+    }
+
+    // If new codec, register
+    if (!oldCodecFamily)
+    {
+        if(_codecs[mirrorId] == NULL)
+        {
+
+            _codecs[mirrorId] = CreateCodec(sendCodec);
+            if(_codecs[mirrorId] == NULL)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Cannot Create the codec");
+                return -1;
+            }
+            _mirrorCodecIdx[mirrorId] = mirrorId;
+        }
+
+        if(mirrorId != codecID)
+        {
+            _codecs[codecID] = _codecs[mirrorId];
+            _mirrorCodecIdx[codecID] = mirrorId;
+        }
+
+        ACMGenericCodec* tmpCodecPtr = _codecs[codecID];
+        WebRtc_Word16 status;
+        WebRtcACMCodecParams codecParams;
+
+        memcpy(&(codecParams.codecInstant), &sendCodec,
+            sizeof(CodecInst));
+        codecParams.enableVAD = _vadEnabled;
+        codecParams.enableDTX = _dtxEnabled;
+        codecParams.vadMode   = _vadMode;
+        // force initialization
+        status = tmpCodecPtr->InitEncoder(&codecParams, true);
+
+        // Check if VAD was turned on, or if error is reported
+        if (status == 1) {
+            _vadEnabled = true;
+        } else if (status < 0)
+        {
+            // could not initialize the encoder
+
+            // Check if already have a registered codec
+            // Depending on that different messages are logged
+            if(!_sendCodecRegistered)
+            {
+                _currentSendCodecIdx = -1;     // invalid value
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Cannot Initialize the encoder No Encoder is registered");
+            }
+            else
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Cannot Initialize the encoder, continue encoding \
+with the previously registered codec");
+            }
+            return -1;
+        }
+
+        // Everything is fine so we can replace the previous codec
+        // with this one
+        if(_sendCodecRegistered)
+        {
+            // If we change codec we start fresh with FEC.
+            // This is not strictly required by the standard.
+            _isFirstRED = true;
+
+            if(tmpCodecPtr->SetVAD(_dtxEnabled, _vadEnabled, _vadMode) < 0){
+                // SetVAD failed
+                _vadEnabled = false;
+                _dtxEnabled = false;
+            }
+
+        }
+
+        _currentSendCodecIdx = codecID;
+        _sendCodecRegistered = true;
+        memcpy(&_sendCodecInst, &sendCodec, sizeof(CodecInst));
+        _previousPayloadType = _sendCodecInst.pltype;
+        return 0;
+    }
+    else
+    {
+        // If codec is the same as already registers check if any parameters
+        // has changed compared to the current values.
+        // If any parameter is valid then apply it and record.
+        bool forceInit = false;
+
+        if(mirrorId != codecID)
+        {
+            _codecs[codecID] = _codecs[mirrorId];
+            _mirrorCodecIdx[codecID] = mirrorId;
+        }
+
+        // check the payload-type
+        if(sendCodec.pltype != _sendCodecInst.pltype)
+        {
+            // At this point check if the given payload type is valid.
+            // Record it later when the sampling frequency is changed
+            // successfully.
+            if(!ACMCodecDB::ValidPayloadType(sendCodec.pltype))
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Out of range payload type");
+                return -1;
+            }
+
+        }
+
+        // If there is a codec that ONE instance of codec supports multiple
+        // sampling frequencies, then we need to take care of it here.
+        // one such a codec is iSAC. Both WB and SWB are encoded and decoded
+        // with one iSAC instance. Therefore, we need to update the encoder
+        // frequency if required.
+        if(_sendCodecInst.plfreq != sendCodec.plfreq)
+        {
+            forceInit = true;
+
+            // if sampling frequency is changed we have to start fresh with RED.
+            _isFirstRED = true;
+        }
+
+        // If packet size or number of channels has changed, we need to
+        // re-initialize the encoder.
+        if(_sendCodecInst.pacsize != sendCodec.pacsize)
+        {
+            forceInit = true;
+        }
+        if(_sendCodecInst.channels != sendCodec.channels)
+        {
+            forceInit = true;
+        }
+
+        if(forceInit)
+        {
+            WebRtcACMCodecParams codecParams;
+
+            memcpy(&(codecParams.codecInstant), &sendCodec,
+                sizeof(CodecInst));
+            codecParams.enableVAD = _vadEnabled;
+            codecParams.enableDTX = _dtxEnabled;
+            codecParams.vadMode   = _vadMode;
+
+            // force initialization
+            if(_codecs[_currentSendCodecIdx]->InitEncoder(&codecParams, true) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Could not change the codec packet-size.");
+                return -1;
+            }
+
+            _sendCodecInst.plfreq = sendCodec.plfreq;
+            _sendCodecInst.pacsize = sendCodec.pacsize;
+            _sendCodecInst.channels = sendCodec.channels;
+        }
+
+        // If the change of sampling frequency has been successful then
+        // we store the payload-type.
+        _sendCodecInst.pltype = sendCodec.pltype;
+
+        // check if a change in Rate is required
+        if(sendCodec.rate != _sendCodecInst.rate)
+        {
+            if(_codecs[codecID]->SetBitRate(sendCodec.rate) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Could not change the codec rate.");
+                return -1;
+            }
+            _sendCodecInst.rate = sendCodec.rate;
+        }
+        _previousPayloadType = _sendCodecInst.pltype;
+
+        return 0;
+    }
+}
+
+// get current send codec
+WebRtc_Word32
+AudioCodingModuleImpl::SendCodec(
+    CodecInst& currentSendCodec) const
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+        "SendCodec()");
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!_sendCodecRegistered)
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+            "SendCodec Failed, no codec is registered");
+
+        return -1;
+    }
+    WebRtcACMCodecParams encoderParam;
+    _codecs[_currentSendCodecIdx]->EncoderParams(&encoderParam);
+    encoderParam.codecInstant.pltype = _sendCodecInst.pltype;
+    memcpy(&currentSendCodec, &(encoderParam.codecInstant),
+        sizeof(CodecInst));
+
+    return 0;
+}
+
+// get current send freq
+WebRtc_Word32
+AudioCodingModuleImpl::SendFrequency() const
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+        "SendFrequency()");
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!_sendCodecRegistered)
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+            "SendFrequency Failed, no codec is registered");
+
+        return -1;
+    }
+
+    return _sendCodecInst.plfreq;
+}
+
+// Get encode bitrate
+// Adaptive rate codecs return their current encode target rate, while other codecs
+// return there longterm avarage or their fixed rate.
+WebRtc_Word32
+AudioCodingModuleImpl::SendBitrate() const
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!_sendCodecRegistered)
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+            "SendBitrate Failed, no codec is registered");
+
+        return -1;
+    }
+
+    WebRtcACMCodecParams encoderParam;
+    _codecs[_currentSendCodecIdx]->EncoderParams(&encoderParam);
+
+    return encoderParam.codecInstant.rate;
+}
+
+// set available bandwidth, inform the encoder about the estimated bandwidth
+// received from the remote party
+WebRtc_Word32
+AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
+    const WebRtc_Word32  bw )
+{
+    return _codecs[_currentSendCodecIdx]->SetEstimatedBandwidth(bw);
+}
+
+// register a transport callback wich will be called to deliver
+// the encoded buffers
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterTransportCallback(
+    AudioPacketizationCallback* transport)
+{
+    CriticalSectionScoped lock(*_callbackCritSect);
+    _packetizationCallback = transport;
+    return 0;
+}
+
+// Used by the module to deliver messages to the codec module/appliation
+// AVT(DTMF)
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterIncomingMessagesCallback(
+#ifndef WEBRTC_DTMF_DETECTION
+    AudioCodingFeedback* /* incomingMessagesCallback */,
+    const ACMCountries   /* cpt                      */)
+{
+    return -1;
+#else
+    AudioCodingFeedback* incomingMessagesCallback,
+    const ACMCountries   cpt)
+{
+    WebRtc_Word16 status = 0;
+
+    // Enter the critical section for callback
+    {
+        CriticalSectionScoped lock(*_callbackCritSect);
+        _dtmfCallback = incomingMessagesCallback;
+    }
+    // enter the ACM critical section to set up the DTMF class.
+    {
+        CriticalSectionScoped lock(*_acmCritSect);
+        // Check if the call is to disable or enable the callback
+        if(incomingMessagesCallback == NULL)
+        {
+            // callback is disabled, delete DTMF-detector class
+            if(_dtmfDetector != NULL)
+            {
+                delete _dtmfDetector;
+                _dtmfDetector = NULL;
+            }
+            status = 0;
+        }
+        else
+        {
+            status = 0;
+            if(_dtmfDetector == NULL)
+            {
+                _dtmfDetector = new(ACMDTMFDetection);
+                if(_dtmfDetector == NULL)
+                {
+                    status = -1;
+                }
+            }
+            if(status >= 0)
+            {
+                status = _dtmfDetector->Enable(cpt);
+                if(status < 0)
+                {
+                    // failed to initialize if DTMF-detection was not enabled before,
+                    // delete the class, and set the callback to NULL and return -1.
+                    delete _dtmfDetector;
+                    _dtmfDetector = NULL;
+                }
+            }
+        }
+    }
+    // check if we failed in setting up the DTMF-detector class
+    if((status < 0))
+    {
+        // we failed, we cannot have the callback
+        CriticalSectionScoped lock(*_callbackCritSect);
+        _dtmfCallback = NULL;
+    }
+
+    return status;
+#endif
+}
+
+
+// Add 10MS of raw (PCM) audio data to the encoder
+WebRtc_Word32
+AudioCodingModuleImpl::Add10MsData(
+    const AudioFrame& audioFrame)
+{
+    // Do we have a codec registered?
+    CriticalSectionScoped lock(*_acmCritSect);
+    if(!HaveValidEncoder("Add10MsData"))
+    {
+        return -1;
+    }
+
+    if(audioFrame._payloadDataLengthInSamples == 0)
+    {
+        assert(false);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot Add 10 ms audio, payload length is zero");
+        return -1;
+    }
+    // Allow for 8, 16, 32 and 48kHz input audio
+    if((audioFrame._frequencyInHz  != 8000)  &&
+        (audioFrame._frequencyInHz != 16000) &&
+        (audioFrame._frequencyInHz != 32000) &&
+        (audioFrame._frequencyInHz != 48000))
+    {
+        assert(false);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot Add 10 ms audio, input frequency not valid");
+        return -1;
+    }
+
+
+    // If the length and frequency matches. We currently just support raw PCM
+    if((audioFrame._frequencyInHz/ 100) !=
+        audioFrame._payloadDataLengthInSamples)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot Add 10 ms audio, input frequency and length doesn't \
+match");
+        return -1;
+    }
+
+    // Calculate the timestamp that should be pushed to codec.
+    // This might be different from the timestamp of the frame
+    // due to re-sampling
+    bool resamplingRequired =
+        ((WebRtc_Word32)audioFrame._frequencyInHz != _sendCodecInst.plfreq);
+
+    // If number of channels in audio doesn't match codec mode, we need
+    // either mono-to-stereo or stereo-to-mono conversion.
+    WebRtc_Word16 audio[WEBRTC_10MS_PCM_AUDIO];
+    int audio_channels = _sendCodecInst.channels;
+    if (audioFrame._audioChannel != _sendCodecInst.channels) {
+      if (_sendCodecInst.channels == 2) {
+        // Do mono-to-stereo conversion by copying each sample.
+        for (int k = 0; k < audioFrame._payloadDataLengthInSamples; k++) {
+          audio[k * 2] = audioFrame._payloadData[k];
+          audio[(k * 2) + 1] = audioFrame._payloadData[k];
+        }
+      } else if (_sendCodecInst.channels == 1) {
+        // Do stereo-to-mono conversion by creating the average of the stereo
+        // samples.
+        for (int k = 0; k < audioFrame._payloadDataLengthInSamples; k++) {
+          audio[k] = (audioFrame._payloadData[k * 2] +
+              audioFrame._payloadData[(k * 2) + 1]) >> 1;
+        }
+      }
+    } else {
+      // Copy payload data for future use.
+      size_t length = static_cast<size_t>(
+          audioFrame._payloadDataLengthInSamples * audio_channels);
+      memcpy(audio, audioFrame._payloadData, length * sizeof(WebRtc_UWord16));
+    }
+
+    WebRtc_UWord32 currentTimestamp;
+    WebRtc_Word32 status;
+    // if it is required, we have to do a resampling.
+    if(resamplingRequired)
+    {
+        WebRtc_Word16 resampledAudio[WEBRTC_10MS_PCM_AUDIO];
+        WebRtc_Word32 sendPlFreq = _sendCodecInst.plfreq;
+        WebRtc_UWord32 diffInputTimestamp;
+        WebRtc_Word16 newLengthSmpl;
+
+        // calculate the timestamp of this frame
+        if(_lastInTimestamp > audioFrame._timeStamp)
+        {
+            // a wrap around has happened
+            diffInputTimestamp = ((WebRtc_UWord32)0xFFFFFFFF - _lastInTimestamp)
+                + audioFrame._timeStamp;
+        }
+        else
+        {
+            diffInputTimestamp = audioFrame._timeStamp - _lastInTimestamp;
+        }
+        currentTimestamp = _lastTimestamp + (WebRtc_UWord32)(diffInputTimestamp *
+            ((double)_sendCodecInst.plfreq / (double)audioFrame._frequencyInHz));
+
+         newLengthSmpl = _inputResampler.Resample10Msec(
+             audio, audioFrame._frequencyInHz, resampledAudio, sendPlFreq,
+             audio_channels);
+
+        if(newLengthSmpl < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Cannot add 10 ms audio, resmapling failed");
+            return -1;
+        }
+        status = _codecs[_currentSendCodecIdx]->Add10MsData(currentTimestamp,
+            resampledAudio, newLengthSmpl, audio_channels);
+    }
+    else
+    {
+        currentTimestamp = audioFrame._timeStamp;
+
+        status = _codecs[_currentSendCodecIdx]->Add10MsData(currentTimestamp,
+            audio, audioFrame._payloadDataLengthInSamples,
+            audio_channels);
+    }
+    _lastInTimestamp = audioFrame._timeStamp;
+    _lastTimestamp = currentTimestamp;
+    return status;
+}
+
+/////////////////////////////////////////
+//   (FEC) Forward Error Correction
+//
+
+bool
+AudioCodingModuleImpl::FECStatus() const
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    return _fecEnabled;
+}
+
+// configure FEC status i.e on/off
+WebRtc_Word32
+AudioCodingModuleImpl::SetFECStatus(
+#ifdef WEBRTC_CODEC_RED
+    const bool enableFEC)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if (_fecEnabled != enableFEC)
+    {
+        // Reset the RED buffer
+        memset(_redBuffer, 0, MAX_PAYLOAD_SIZE_BYTE);
+
+        // Reset fragmentation buffers
+        _fragmentation->fragmentationVectorSize = 2;
+        _fragmentation->fragmentationOffset[0] = 0;
+        _fragmentation->fragmentationOffset[1] = MAX_PAYLOAD_SIZE_BYTE;
+        memset(_fragmentation->fragmentationLength, 0, sizeof(WebRtc_UWord32) * 2);
+        memset(_fragmentation->fragmentationTimeDiff, 0, sizeof(WebRtc_UWord16) * 2);
+        memset(_fragmentation->fragmentationPlType, 0, sizeof(WebRtc_UWord8) * 2);
+
+        // set _fecEnabled
+        _fecEnabled = enableFEC;
+    }
+    _isFirstRED = true; // Make sure we restart FEC
+    return 0;
+#else
+    const bool /* enableFEC */)
+{
+    _fecEnabled = false;
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+               "  WEBRTC_CODEC_RED is undefined => _fecEnabled = %d", _fecEnabled);
+    return -1;
+#endif
+}
+
+
+/////////////////////////////////////////
+//   (VAD) Voice Activity Detection
+//
+
+WebRtc_Word32
+AudioCodingModuleImpl::SetVAD(
+    const bool       enableDTX,
+    const bool       enableVAD,
+    const ACMVADMode vadMode)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    // sanity check of the mode
+    if((vadMode != VADNormal)      &&
+       (vadMode != VADLowBitrate) &&
+       (vadMode != VADAggr)       &&
+       (vadMode != VADVeryAggr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Invalid VAD Mode %d, no change is made to VAD/DTX status",
+            (int)vadMode);
+        return -1;
+    }
+
+    // If a send codec is registered, set VAD/DTX for the codec
+    if(HaveValidEncoder("SetVAD")) {
+        WebRtc_Word16 status =
+                _codecs[_currentSendCodecIdx]->SetVAD(enableDTX, enableVAD, vadMode);
+        if(status == 1) {
+            // Vad was enabled;
+            _vadEnabled = true;
+            _dtxEnabled = enableDTX;
+            _vadMode = vadMode;
+
+            return 0;
+        } else if (status < 0) {
+            // SetVAD failed
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "SetVAD failed");
+
+            _vadEnabled = false;
+            _dtxEnabled = false;
+
+            return -1;
+        }
+    }
+
+    _vadEnabled = enableVAD;
+    _dtxEnabled = enableDTX;
+    _vadMode = vadMode;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::VAD(
+    bool&       dtxEnabled,
+    bool&       vadEnabled,
+    ACMVADMode& vadMode) const
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    dtxEnabled = _dtxEnabled;
+    vadEnabled = _vadEnabled;
+    vadMode = _vadMode;
+
+    return 0;
+}
+
+/////////////////////////////////////////
+//   Receiver
+//
+
+WebRtc_Word32
+AudioCodingModuleImpl::InitializeReceiver()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    return InitializeReceiverSafe();
+}
+
+// Initialize receiver, resets codec database etc
+WebRtc_Word32
+AudioCodingModuleImpl::InitializeReceiverSafe()
+{
+    // If the receiver is already initialized then we
+    // also like to destruct decoders if any exist. After a call
+    // to this function, we should have a clean start-up.
+    if(_receiverInitialized)
+    {
+        for(int codecCntr = 0; codecCntr < ACMCodecDB::kNumCodecs; codecCntr++)
+        {
+            if(UnregisterReceiveCodecSafe(codecCntr) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "InitializeReceiver() failed, Could not unregister codec");
+                return -1;
+            }
+        }
+    }
+    if (_netEq.Init() != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitializeReceiver() failed, Could not initialize NetEQ");
+        return -1;
+    }
+    _netEq.SetUniqueId(_id);
+    if (_netEq.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
+        ACMCodecDB::kNumCodecs) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "NetEQ cannot allocatePacket Buffer");
+        return -1;
+    }
+
+    // Register RED and CN
+    int regInNeteq = 0;
+    for (int i = (ACMCodecDB::kNumCodecs - 1); i>-1; i--) {
+        if((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "red") == 0)) {
+            regInNeteq = 1;
+        } else if ((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "CN") == 0)) {
+            regInNeteq = 1;
+        }
+
+        if (regInNeteq == 1) {
+           if(RegisterRecCodecMSSafe(ACMCodecDB::database_[i], i, i,
+                ACMNetEQ::masterJB) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Cannot register master codec.");
+                return -1;
+            }
+            _registeredPlTypes[i] = ACMCodecDB::database_[i].pltype;
+            regInNeteq = 0;
+        }
+    }
+
+    _receiverInitialized = true;
+    return 0;
+}
+
+// Reset the decoder state
+WebRtc_Word32
+AudioCodingModuleImpl::ResetDecoder()
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    for(int codecCntr = 0; codecCntr < ACMCodecDB::kMaxNumCodecs; codecCntr++)
+    {
+        if((_codecs[codecCntr] != NULL) && (_registeredPlTypes[codecCntr] != -1))
+        {
+            if(_codecs[codecCntr]->ResetDecoder(_registeredPlTypes[codecCntr]) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "ResetDecoder failed:");
+                return -1;
+            }
+        }
+    }
+    return _netEq.FlushBuffers();
+}
+
+// get current receive freq
+WebRtc_Word32
+AudioCodingModuleImpl::ReceiveFrequency() const
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+        "ReceiveFrequency()");
+    WebRtcACMCodecParams codecParams;
+
+    CriticalSectionScoped lock(*_acmCritSect);
+    if(DecoderParamByPlType(_lastRecvAudioCodecPlType, codecParams) < 0)
+    {
+        return _netEq.CurrentSampFreqHz();
+    }
+    else
+    {
+        return codecParams.codecInstant.plfreq;
+    }
+}
+
+// get current playout freq
+WebRtc_Word32
+AudioCodingModuleImpl::PlayoutFrequency() const
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+        "PlayoutFrequency()");
+
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    return _netEq.CurrentSampFreqHz();
+}
+
+
+// register possible reveive codecs, can be called multiple times,
+// for codecs, CNG (NB, WB and SWB), DTMF, RED
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterReceiveCodec(
+    const CodecInst& receiveCodec)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(receiveCodec.channels > 2)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "More than 2 audio channel is not supported.");
+        return -1;
+    }
+
+    int mirrorId;
+    int codecId = ACMCodecDB::ReceiverCodecNumber(&receiveCodec, &mirrorId);
+
+    if(codecId < 0 || codecId >= ACMCodecDB::kNumCodecs)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Wrong codec params to be registered as receive codec");
+        return -1;
+    }
+    // Check if the payload-type is valid.
+    if(!ACMCodecDB::ValidPayloadType(receiveCodec.pltype))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Invalid payload-type %d for %s.",
+                receiveCodec.pltype, receiveCodec.plname);
+        return -1;
+    }
+
+    if(!_receiverInitialized)
+    {
+        if(InitializeReceiverSafe() < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Cannot initialize reciver, so failed registering a codec.");
+            return -1;
+        }
+    }
+
+    // If codec already registered, start with unregistering
+    if(_registeredPlTypes[codecId] != -1)
+    {
+        if(UnregisterReceiveCodecSafe(codecId) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Cannot register master codec.");
+            return -1;
+        }
+    }
+
+    if(RegisterRecCodecMSSafe(receiveCodec, codecId, mirrorId,
+        ACMNetEQ::masterJB) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot register master codec.");
+        return -1;
+    }
+
+    // If receive stereo, make sure we have two instances of NetEQ, one for each channel
+    if(receiveCodec.channels == 2)
+    {
+        if(_netEq.NumSlaves() < 1)
+        {
+            if(_netEq.AddSlave(ACMCodecDB::NetEQDecoders(),
+                   ACMCodecDB::kNumCodecs) < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Cannot Add Slave jitter buffer to NetEQ.");
+                return -1;
+            }
+
+            // Register RED and CN in slave.
+            bool reg_in_neteq = false;
+            for (int i = (ACMCodecDB::kNumCodecs - 1); i > -1; i--) {
+                if((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "RED") == 0)) {
+                    reg_in_neteq = true;
+                } else if ((STR_CASE_CMP(ACMCodecDB::database_[i].plname, "CN") == 0)) {
+                    reg_in_neteq = true;
+                }
+
+                if (reg_in_neteq) {
+                   if(RegisterRecCodecMSSafe(ACMCodecDB::database_[i], i, i,
+                        ACMNetEQ::slaveJB) < 0) {
+                        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                            "Cannot register slave codec.");
+                        return -1;
+                    }
+                    _registeredPlTypes[i] = ACMCodecDB::database_[i].pltype;
+                    reg_in_neteq = false;
+                }
+            }
+        }
+
+        if(RegisterRecCodecMSSafe(receiveCodec, codecId, mirrorId,
+            ACMNetEQ::slaveJB) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Cannot register slave codec.");
+            return -1;
+        }
+
+        if((_stereoReceive[codecId] == false) &&
+            (_lastRecvAudioCodecPlType == receiveCodec.pltype))
+        {
+            _lastRecvAudioCodecPlType = -1;
+        }
+        _stereoReceive[codecId] = true;
+    }
+    else
+    {
+        _stereoReceive[codecId] = false;
+    }
+
+    _registeredPlTypes[codecId] = receiveCodec.pltype;
+
+    if(!STR_CASE_CMP(receiveCodec.plname, "RED"))
+    {
+        _receiveREDPayloadType = receiveCodec.pltype;
+    }
+    return 0;
+}
+
+
+
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterRecCodecMSSafe(
+    const CodecInst& receiveCodec,
+    WebRtc_Word16    codecId,
+    WebRtc_Word16    mirrorId,
+    ACMNetEQ::JB     jitterBuffer)
+{
+    ACMGenericCodec** codecArray;
+    if(jitterBuffer == ACMNetEQ::masterJB)
+    {
+        codecArray = &_codecs[0];
+    }
+    else if(jitterBuffer == ACMNetEQ::slaveJB)
+    {
+        codecArray = &_slaveCodecs[0];
+        if (_codecs[codecId]->IsTrueStereoCodec()) {
+          // True stereo codecs need to use the same codec memory
+          // for both master and slave.
+          _slaveCodecs[mirrorId] = _codecs[mirrorId];
+          _mirrorCodecIdx[mirrorId] = mirrorId;
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RegisterReceiveCodecMSSafe failed, jitterBuffer is neither master or slave ");
+        return -1;
+    }
+
+    if (codecArray[mirrorId] == NULL)
+    {
+        codecArray[mirrorId] = CreateCodec(receiveCodec);
+        if(codecArray[mirrorId] == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Cannot create codec to register as receive codec");
+            return -1;
+        }
+        _mirrorCodecIdx[mirrorId] = mirrorId;
+    }
+    if(mirrorId != codecId)
+    {
+        codecArray[codecId] = codecArray[mirrorId];
+        _mirrorCodecIdx[codecId] = mirrorId;
+    }
+
+    codecArray[codecId]->SetIsMaster(jitterBuffer == ACMNetEQ::masterJB);
+
+    WebRtc_Word16 status = 0;
+    bool registerInNetEq = true;
+    WebRtcACMCodecParams codecParams;
+    memcpy(&(codecParams.codecInstant), &receiveCodec,
+        sizeof(CodecInst));
+    codecParams.enableVAD = false;
+    codecParams.enableDTX = false;
+    codecParams.vadMode   = VADNormal;
+    if (!codecArray[codecId]->DecoderInitialized())
+    {
+        // force initialization
+        status = codecArray[codecId]->InitDecoder(&codecParams, true);
+        if(status < 0)
+        {
+            // could not initialize the decoder we don't want to
+            // continue if we could not initialize properly.
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "could not initialize the receive codec, codec not registered");
+
+            return -1;
+        }
+    }
+    else if(mirrorId != codecId)
+    {
+        // Currently this only happens for iSAC.
+        // we have to store the decoder parameters
+
+        codecArray[codecId]->SaveDecoderParam(&codecParams);
+    }
+    if (registerInNetEq)
+    {
+        if(codecArray[codecId]->RegisterInNetEq(&_netEq, receiveCodec)
+            != 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "Receive codec could not be registered in NetEQ");
+
+            return -1;
+        }
+        // Guaranty that the same payload-type that is
+        // registered in NetEQ is stored in the codec.
+        codecArray[codecId]->SaveDecoderParam(&codecParams);
+    }
+
+    return status;
+}
+
+
+
+// Get current received codec
+WebRtc_Word32
+AudioCodingModuleImpl::ReceiveCodec(
+    CodecInst& currentReceiveCodec) const
+{
+    WebRtcACMCodecParams decoderParam;
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    for(int decCntr = 0; decCntr < ACMCodecDB::kMaxNumCodecs; decCntr++)
+    {
+        if(_codecs[decCntr] != NULL)
+        {
+            if(_codecs[decCntr]->DecoderInitialized())
+            {
+                if(_codecs[decCntr]->DecoderParams(&decoderParam,
+                    _lastRecvAudioCodecPlType))
+                {
+                    memcpy(&currentReceiveCodec, &decoderParam.codecInstant,
+                        sizeof(CodecInst));
+                    return 0;
+                }
+            }
+        }
+    }
+
+    // if we are here then we haven't found any codec
+    // set codec pltype to -1 to indicate that the structure
+    // is invalid and return -1.
+    currentReceiveCodec.pltype = -1;
+    return -1;
+}
+
+// Incoming packet from network parsed and ready for decode
+WebRtc_Word32
+AudioCodingModuleImpl::IncomingPacket(
+    const WebRtc_Word8*    incomingPayload,
+    const WebRtc_Word32    payloadLength,
+    const WebRtcRTPHeader& rtpInfo)
+{
+    if (payloadLength < 0)
+    {
+        // Log error
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "IncomingPacket() Error, payload-length cannot be negative");
+        return -1;
+    }
+    {
+        // store the payload Type. this will be used to retrieve "received codec"
+        // and "received frequency."
+        CriticalSectionScoped lock(*_acmCritSect);
+#ifdef ACM_QA_TEST
+        if(_incomingPL != NULL)
+        {
+            fwrite(&rtpInfo.header.timestamp,   sizeof(WebRtc_UWord32), 1, _incomingPL);
+            fwrite(&rtpInfo.header.payloadType, sizeof(WebRtc_UWord8),  1, _incomingPL);
+            fwrite(&payloadLength,              sizeof(WebRtc_Word16),  1, _incomingPL);
+        }
+#endif
+
+        WebRtc_UWord8 myPayloadType;
+
+        // Check if this is an RED payload
+        if(rtpInfo.header.payloadType == _receiveREDPayloadType)
+        {
+            // get the primary payload-type.
+            myPayloadType = (WebRtc_UWord8)(incomingPayload[0] & 0x7F);
+        }
+        else
+        {
+            myPayloadType = rtpInfo.header.payloadType;
+        }
+
+        // If payload is audio, check if received payload is different from previous
+        if((!rtpInfo.type.Audio.isCNG)       &&
+            (myPayloadType != _cngNB.pltype) &&
+            (myPayloadType != _cngWB.pltype) &&
+            (myPayloadType != _cngSWB.pltype))
+        {
+            // This is Audio not CNG
+
+            if(myPayloadType != _lastRecvAudioCodecPlType)
+            {
+                // We detect a change in payload type. It is necessary for iSAC
+                // we are going to use ONE iSAC instance for decoding both WB and
+                // SWB payloads. If payload is changed there might be a need to reset
+                // sampling rate of decoder. depending what we have received "now".
+                for(int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++)
+                {
+                    if(_registeredPlTypes[i] == myPayloadType)
+                    {
+                        if(_codecs[i] == NULL)
+                        {
+                            // we found a payload type but the corresponding
+                            // codec is NULL this should not happen
+                            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                                "IncomingPacket() Error, payload type found but corresponding "
+                                "codec is NULL");
+                            return -1;
+                        }
+                        _codecs[i]->UpdateDecoderSampFreq(i);
+                        _netEq.SetReceivedStereo(_stereoReceive[i]);
+
+                        // Store number of channels we expect to receive for the
+                        // current payload type.
+                        if (_stereoReceive[i]) {
+                          _expected_channels = 2;
+                        } else {
+                          _expected_channels = 1;
+                        }
+
+                        // Reset previous received channel
+                        _prev_received_channel = 0;
+
+                        break;
+                    }
+                }
+            }
+            _lastRecvAudioCodecPlType = myPayloadType;
+        }
+    }
+
+    // Check that number of received channels match the setup for the
+    // received codec.
+    if (_expected_channels == 2) {
+      if ((_prev_received_channel == 1) && (rtpInfo.type.Audio.channel == 1)) {
+        // We expect every second call to this function to be for channel 2,
+        // since we are in stereo-receive mode.
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "IncomingPacket() Error, payload is"
+                    "mono, but codec registered as stereo.");
+        return -1;
+      }
+      _prev_received_channel = rtpInfo.type.Audio.channel;
+    } else if (rtpInfo.type.Audio.channel == 2) {
+      // Codec is registered as mono, but we receive a stereo packet.
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "IncomingPacket() Error, payload is"
+                   "stereo, but codec registered as mono.");
+      return -1;
+    }
+
+    // Insert packet into NetEQ.
+    return _netEq.RecIn(incomingPayload, payloadLength, rtpInfo);
+}
+
+// Minimum playout delay (Used for lip-sync)
+WebRtc_Word32
+AudioCodingModuleImpl::SetMinimumPlayoutDelay(
+    const WebRtc_Word32 timeMs)
+{
+    if((timeMs < 0) || (timeMs > 1000))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Delay must be in the range of 0-1000 milliseconds.");
+        return -1;
+    }
+    return _netEq.SetExtraDelay(timeMs);
+}
+
+// Get Dtmf playout status
+bool
+AudioCodingModuleImpl::DtmfPlayoutStatus() const
+{
+#ifndef WEBRTC_CODEC_AVT
+    return false;
+#else
+    return _netEq.AVTPlayout();
+#endif
+}
+
+// configure Dtmf playout status i.e on/off
+// playout the incoming outband Dtmf tone
+WebRtc_Word32
+AudioCodingModuleImpl::SetDtmfPlayoutStatus(
+#ifndef WEBRTC_CODEC_AVT
+    const bool /* enable */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+        "SetDtmfPlayoutStatus() failed: AVT is not supported.");
+    return -1;
+#else
+    const bool enable)
+{
+    return _netEq.SetAVTPlayout(enable);
+#endif
+}
+
+// Estimate the Bandwidth based on the incoming stream
+// This is also done in the RTP module
+// need this for one way audio where the RTCP send the BW estimate
+WebRtc_Word32
+AudioCodingModuleImpl::DecoderEstimatedBandwidth() const
+{
+    CodecInst codecInst;
+    WebRtc_Word16 codecID = -1;
+    int plTypWB;
+    int plTypSWB;
+
+    // Get iSAC settings
+    for(int codecCntr = 0; codecCntr < ACMCodecDB::kNumCodecs; codecCntr++)
+    {
+        // Store codec settings for codec number "codeCntr" in the output struct
+        ACMCodecDB::Codec(codecCntr, &codecInst);
+
+        if(!STR_CASE_CMP(codecInst.plname, "isac"))
+        {
+            codecID = 1;
+            plTypWB = codecInst.pltype;
+
+            ACMCodecDB::Codec(codecCntr+1, &codecInst);
+            plTypSWB = codecInst.pltype;
+
+            break;
+        }
+    }
+
+    if(codecID < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "DecoderEstimatedBandwidth failed");
+        return -1;
+    }
+
+    if ((_lastRecvAudioCodecPlType == plTypWB) || (_lastRecvAudioCodecPlType == plTypSWB))
+    {
+        return _codecs[codecID]->GetEstimatedBandwidth();
+    } else {
+        return -1;
+    }
+}
+
+// Set playout mode for: voice, fax, or streaming
+WebRtc_Word32
+AudioCodingModuleImpl::SetPlayoutMode(
+    const AudioPlayoutMode mode)
+{
+    if((mode  != voice) &&
+        (mode != fax)   &&
+        (mode != streaming))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Invalid playout mode.");
+        return -1;
+    }
+    return _netEq.SetPlayoutMode(mode);
+}
+
+// Get playout mode voice, fax
+AudioPlayoutMode
+AudioCodingModuleImpl::PlayoutMode() const
+{
+    return _netEq.PlayoutMode();
+}
+
+
+// Get 10 milliseconds of raw audio data to play out
+// automatic resample to the requested frequency
+WebRtc_Word32
+AudioCodingModuleImpl::PlayoutData10Ms(
+    const WebRtc_Word32 desiredFreqHz,
+    AudioFrame&         audioFrame)
+{
+    bool stereoMode;
+
+     // recOut always returns 10 ms
+    if (_netEq.RecOut(_audioFrame) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "PlayoutData failed, RecOut Failed");
+        return -1;
+    }
+
+    audioFrame._audioChannel = _audioFrame._audioChannel;
+    audioFrame._vadActivity  = _audioFrame._vadActivity;
+    audioFrame._speechType   = _audioFrame._speechType;
+
+    stereoMode =  (_audioFrame._audioChannel > 1);
+    //For stereo playout:
+    // Master and Slave samples are interleaved starting with Master
+
+    const WebRtc_UWord16 recvFreq = static_cast<WebRtc_UWord16>(_audioFrame._frequencyInHz);
+    bool toneDetected = false;
+    WebRtc_Word16 lastDetectedTone;
+    WebRtc_Word16 tone;
+
+     // limit the scope of ACM Critical section
+    // perhaps we don't need to have output resampler in
+    // critical section, it is supposed to be called in this
+    // function and no where else. However, it won't degrade complexity
+    {
+        CriticalSectionScoped lock(*_acmCritSect);
+
+        if ((recvFreq != desiredFreqHz) && (desiredFreqHz != -1))
+        {
+            // resample payloadData
+            WebRtc_Word16 tmpLen = _outputResampler.Resample10Msec(
+                _audioFrame._payloadData, recvFreq, audioFrame._payloadData, desiredFreqHz,
+                _audioFrame._audioChannel);
+
+            if(tmpLen < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "PlayoutData failed, resampler failed");
+                return -1;
+            }
+
+            //Set the payload data length from the resampler
+            audioFrame._payloadDataLengthInSamples = (WebRtc_UWord16)tmpLen;
+            // set the ssampling frequency
+            audioFrame._frequencyInHz = desiredFreqHz;
+        }
+        else
+        {
+            memcpy(audioFrame._payloadData, _audioFrame._payloadData,
+              _audioFrame._payloadDataLengthInSamples * audioFrame._audioChannel
+              * sizeof(WebRtc_Word16));
+            // set the payload length
+            audioFrame._payloadDataLengthInSamples = _audioFrame._payloadDataLengthInSamples;
+            // set the sampling frequency
+            audioFrame._frequencyInHz = recvFreq;
+        }
+
+        //Tone detection done for master channel
+        if(_dtmfDetector != NULL)
+        {
+            // Dtmf Detection
+            if(audioFrame._frequencyInHz == 8000)
+            {
+                // use audioFrame._payloadData then Dtmf detector doesn't
+                // need resampling
+                if(!stereoMode)
+                {
+                    _dtmfDetector->Detect(audioFrame._payloadData,
+                        audioFrame._payloadDataLengthInSamples,
+                        audioFrame._frequencyInHz, toneDetected, tone);
+                }
+                else
+                {
+                    // we are in 8 kHz so the master channel needs only 80 samples
+                    WebRtc_Word16 masterChannel[80];
+                    for(int n = 0; n < 80; n++)
+                    {
+                        masterChannel[n] = audioFrame._payloadData[n<<1];
+                    }
+                    _dtmfDetector->Detect(masterChannel,
+                        audioFrame._payloadDataLengthInSamples,
+                        audioFrame._frequencyInHz, toneDetected, tone);
+                }
+            }
+            else
+            {
+                // Do the detection on the audio that we got from NetEQ (_audioFrame).
+                if(!stereoMode)
+                {
+                    _dtmfDetector->Detect(_audioFrame._payloadData,
+                        _audioFrame._payloadDataLengthInSamples, recvFreq,
+                        toneDetected, tone);
+                }
+                else
+                {
+                    WebRtc_Word16 masterChannel[WEBRTC_10MS_PCM_AUDIO];
+                    for(int n = 0; n < _audioFrame._payloadDataLengthInSamples; n++)
+                    {
+                        masterChannel[n] = _audioFrame._payloadData[n<<1];
+                    }
+                    _dtmfDetector->Detect(masterChannel,
+                        _audioFrame._payloadDataLengthInSamples, recvFreq,
+                        toneDetected, tone);
+                }
+            }
+        }
+
+        // we want to do this while we are in _acmCritSect
+        // doesn't really need to initialize the following
+        // variable but Linux complains if we don't
+        lastDetectedTone = kACMToneEnd;
+        if(toneDetected)
+        {
+            lastDetectedTone = _lastDetectedTone;
+            _lastDetectedTone = tone;
+        }
+    }
+
+    if(toneDetected)
+    {
+        // we will deal with callback here, so enter callback critical
+        // section
+        CriticalSectionScoped lock(*_callbackCritSect);
+
+        if(_dtmfCallback != NULL)
+        {
+            if(tone != kACMToneEnd)
+            {
+                // just a tone
+                _dtmfCallback->IncomingDtmf((WebRtc_UWord8)tone, false);
+            }
+            else if((tone == kACMToneEnd) &&
+                (lastDetectedTone != kACMToneEnd))
+            {
+                // The tone is "END" and the previously detected tone is
+                // not "END," so call fir an end.
+                _dtmfCallback->IncomingDtmf((WebRtc_UWord8)lastDetectedTone,
+                    true);
+            }
+        }
+    }
+
+    audioFrame._id = _id;
+    audioFrame._volume = -1;
+    audioFrame._energy = -1;
+    audioFrame._timeStamp = 0;
+
+    return 0;
+}
+
+/////////////////////////////////////////
+//   (CNG) Comfort Noise Generation
+//   Generate comfort noise when receiving DTX packets
+//
+
+// Get VAD aggressiveness on the incoming stream
+ACMVADMode
+AudioCodingModuleImpl::ReceiveVADMode() const
+{
+    return _netEq.VADMode();
+}
+
+// Configure VAD aggressiveness on the incoming stream
+WebRtc_Word16
+AudioCodingModuleImpl::SetReceiveVADMode(
+    const ACMVADMode mode)
+{
+    return _netEq.SetVADMode(mode);
+}
+
+/////////////////////////////////////////
+//   statistics
+//
+
+WebRtc_Word32
+AudioCodingModuleImpl::NetworkStatistics(
+    ACMNetworkStatistics& statistics) const
+{
+    WebRtc_Word32 status;
+    status = _netEq.NetworkStatistics(&statistics);
+    return status;
+}
+
+void
+AudioCodingModuleImpl::DestructEncoderInst(
+    void* ptrInst)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+        "DestructEncoderInst()");
+    if(!HaveValidEncoder("DestructEncoderInst"))
+    {
+        return;
+    }
+
+    _codecs[_currentSendCodecIdx]->DestructEncoderInst(ptrInst);
+}
+
+WebRtc_Word16
+AudioCodingModuleImpl::AudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+        "AudioBuffer()");
+    if(!HaveValidEncoder("AudioBuffer"))
+    {
+        return -1;
+    }
+
+    audioBuff.lastInTimestamp = _lastInTimestamp;
+    return _codecs[_currentSendCodecIdx]->AudioBuffer(audioBuff);
+}
+
+WebRtc_Word16
+AudioCodingModuleImpl::SetAudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+        "SetAudioBuffer()");
+    if(!HaveValidEncoder("SetAudioBuffer"))
+    {
+        return -1;
+    }
+
+    return _codecs[_currentSendCodecIdx]->SetAudioBuffer(audioBuff);
+}
+
+
+WebRtc_UWord32
+AudioCodingModuleImpl::EarliestTimestamp() const
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+        "EarliestTimestamp()");
+    if(!HaveValidEncoder("EarliestTimestamp"))
+    {
+        return -1;
+    }
+
+    return _codecs[_currentSendCodecIdx]->EarliestTimestamp();
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::RegisterVADCallback(
+    ACMVADCallback* vadCallback)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+        "RegisterVADCallback()");
+    CriticalSectionScoped lock(*_callbackCritSect);
+    _vadCallback = vadCallback;
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::IncomingPayload(
+    const WebRtc_Word8*  incomingPayload,
+    const WebRtc_Word32  payloadLength,
+    const WebRtc_UWord8  payloadType,
+    const WebRtc_UWord32 timestamp)
+{
+    if (payloadLength < 0)
+    {
+        // Log error in trace file.
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "IncomingPacket() Error, payload-length cannot be negative");
+        return -1;
+    }
+
+    if(_dummyRTPHeader == NULL)
+    {
+        // This is the first time that we are using _dummyRTPHeader
+        // so we have to create it.
+        WebRtcACMCodecParams codecParams;
+        _dummyRTPHeader = new WebRtcRTPHeader;
+        if (_dummyRTPHeader == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "IncomingPacket() Error, out of memory");
+            return -1;
+        }
+        _dummyRTPHeader->header.payloadType = payloadType;
+        // Don't matter in this case
+        _dummyRTPHeader->header.ssrc = 0;
+        _dummyRTPHeader->header.markerBit = false;
+        // start with random numbers
+        _dummyRTPHeader->header.sequenceNumber = rand();
+        _dummyRTPHeader->header.timestamp = (((WebRtc_UWord32)rand()) << 16) +
+            (WebRtc_UWord32)rand();
+        _dummyRTPHeader->type.Audio.channel = 1;
+
+        if(DecoderParamByPlType(payloadType, codecParams) < 0)
+        {
+            // we didn't find a codec with the given payload.
+            // something is wrong we exit, but we delete _dummyRTPHeader
+            // and set it to NULL to start clean next time
+            delete _dummyRTPHeader;
+            _dummyRTPHeader = NULL;
+            return -1;
+        }
+        _recvPlFrameSizeSmpls = codecParams.codecInstant.pacsize;
+    }
+
+    if(payloadType != _dummyRTPHeader->header.payloadType)
+    {
+        // payload type has changed since the last time we might need to
+        // update the frame-size
+        WebRtcACMCodecParams codecParams;
+        if(DecoderParamByPlType(payloadType, codecParams) < 0)
+        {
+            // we didn't find a codec with the given payload.
+            // something is wrong we exit
+            return -1;
+        }
+        _recvPlFrameSizeSmpls = codecParams.codecInstant.pacsize;
+        _dummyRTPHeader->header.payloadType = payloadType;
+    }
+
+    if(timestamp > 0)
+    {
+        _dummyRTPHeader->header.timestamp = timestamp;
+    }
+
+    // store the payload Type. this will be used to retrieve "received codec"
+    // and "received frequency."
+    _lastRecvAudioCodecPlType = payloadType;
+
+    // Insert in NetEQ
+    if(_netEq.RecIn(incomingPayload, payloadLength, (*_dummyRTPHeader)) < 0)
+    {
+        return -1;
+    }
+
+    // get ready for the next payload
+    _dummyRTPHeader->header.sequenceNumber++;
+    _dummyRTPHeader->header.timestamp += _recvPlFrameSizeSmpls;
+    return 0;
+}
+
+WebRtc_Word16
+AudioCodingModuleImpl::DecoderParamByPlType(
+    const WebRtc_UWord8    payloadType,
+    WebRtcACMCodecParams&  codecParams) const
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    for(WebRtc_Word16 codecCntr = 0; codecCntr < ACMCodecDB::kMaxNumCodecs; codecCntr++)
+    {
+        if(_codecs[codecCntr] != NULL)
+        {
+            if(_codecs[codecCntr]->DecoderInitialized())
+            {
+                if(_codecs[codecCntr]->DecoderParams(&codecParams,
+                    payloadType))
+                {
+                    return 0;
+                }
+            }
+        }
+    }
+    // if we are here it means that we could not find a
+    // codec with that payload type. reset the values to
+    // not acceptable values and return -1;
+    codecParams.codecInstant.plname[0] = '\0';
+    codecParams.codecInstant.pacsize   = 0;
+    codecParams.codecInstant.rate      = 0;
+    codecParams.codecInstant.pltype    = -1;
+    return -1;
+}
+
+
+
+WebRtc_Word16
+AudioCodingModuleImpl::DecoderListIDByPlName(
+    const char*  payloadName,
+    const WebRtc_UWord16 sampFreqHz) const
+{
+    WebRtcACMCodecParams codecParams;
+    CriticalSectionScoped lock(*_acmCritSect);
+    for(WebRtc_Word16 codecCntr = 0; codecCntr < ACMCodecDB::kMaxNumCodecs; codecCntr++)
+    {
+        if((_codecs[codecCntr] != NULL))
+        {
+            if(_codecs[codecCntr]->DecoderInitialized())
+            {
+                assert(_registeredPlTypes[codecCntr] >= 0);
+                assert(_registeredPlTypes[codecCntr] <= 255);
+                _codecs[codecCntr]->DecoderParams(&codecParams,
+                    (WebRtc_UWord8)_registeredPlTypes[codecCntr]);
+                if(!STR_CASE_CMP(codecParams.codecInstant.plname, payloadName))
+                {
+                    // Check if the given sampling frequency matches.
+                    // A zero sampling frequency means we matching the names
+                    // is sufficient and we don't need to check for the
+                    // frequencies.
+                    // Currently it is only iSAC which has one name but two
+                    // sampling frequencies.
+                    if((sampFreqHz == 0) ||
+                        (codecParams.codecInstant.plfreq == sampFreqHz))
+                    {
+                        return codecCntr;
+                    }
+                }
+            }
+        }
+    }
+    // if we are here it means that we could not find a
+    // codec with that payload type. return -1;
+    return -1;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::LastEncodedTimestamp(WebRtc_UWord32& timestamp) const
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    if(!HaveValidEncoder("LastEncodedTimestamp"))
+    {
+        return -1;
+    }
+    timestamp = _codecs[_currentSendCodecIdx]->LastEncodedTimestamp();
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(bool useWebRtcDTX)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("ReplaceInternalDTXWithWebRtc"))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Cannot replace codec internal DTX when no send codec is registered.");
+        return -1;
+    }
+
+    WebRtc_Word32 res = _codecs[_currentSendCodecIdx]->ReplaceInternalDTX(useWebRtcDTX);
+    // Check if VAD is turned on, or if there is any error
+    if(res == 1)
+    {
+        _vadEnabled = true;
+    } else if(res < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "Failed to set ReplaceInternalDTXWithWebRtc(%d)", useWebRtcDTX);
+        return res;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(bool& usesWebRtcDTX)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("IsInternalDTXReplacedWithWebRtc"))
+    {
+        return -1;
+    }
+    if(_codecs[_currentSendCodecIdx]->IsInternalDTXReplaced(&usesWebRtcDTX) < 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+AudioCodingModuleImpl::SetISACMaxRate(
+    const WebRtc_UWord32 maxRateBitPerSec)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("SetISACMaxRate"))
+    {
+        return -1;
+    }
+
+    return _codecs[_currentSendCodecIdx]->SetISACMaxRate(maxRateBitPerSec);
+}
+
+
+WebRtc_Word32
+AudioCodingModuleImpl::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 maxPayloadLenBytes)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("SetISACMaxPayloadSize"))
+    {
+        return -1;
+    }
+
+    return _codecs[_currentSendCodecIdx]->SetISACMaxPayloadSize(maxPayloadLenBytes);
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  initFrameSizeMsec,
+    const WebRtc_UWord16 initRateBitPerSec,
+    const bool           enforceFrameSize)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+
+    if(!HaveValidEncoder("ConfigISACBandwidthEstimator"))
+    {
+        return -1;
+    }
+
+    return _codecs[_currentSendCodecIdx]->ConfigISACBandwidthEstimator(
+        initFrameSizeMsec, initRateBitPerSec, enforceFrameSize);
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::SetBackgroundNoiseMode(
+    const ACMBackgroundNoiseMode mode)
+{
+    if((mode < On) ||
+        (mode > Off))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "The specified background noise is out of range.\n");
+        return -1;
+    }
+    return _netEq.SetBackgroundNoiseMode(mode);
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::BackgroundNoiseMode(
+    ACMBackgroundNoiseMode& mode)
+{
+    return _netEq.BackgroundNoiseMode(mode);
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::PlayoutTimestamp(
+    WebRtc_UWord32& timestamp)
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+        "PlayoutTimestamp()");
+    return _netEq.PlayoutTimestamp(timestamp);
+}
+
+bool
+AudioCodingModuleImpl::HaveValidEncoder(
+    const char* callerName) const
+{
+    if((!_sendCodecRegistered) ||
+        (_currentSendCodecIdx < 0) ||
+        (_currentSendCodecIdx >= ACMCodecDB::kNumCodecs))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "%s failed: No send codec is registered.", callerName);
+        return false;
+    }
+    if((_currentSendCodecIdx < 0) ||
+        (_currentSendCodecIdx >= ACMCodecDB::kNumCodecs))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "%s failed: Send codec index out of range.", callerName);
+        return false;
+    }
+    if(_codecs[_currentSendCodecIdx] == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "%s failed: Send codec is NULL pointer.", callerName);
+        return false;
+    }
+    return true;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::UnregisterReceiveCodec(
+    const WebRtc_Word16 payloadType)
+{
+    CriticalSectionScoped lock(*_acmCritSect);
+    WebRtc_Word16 codecID;
+
+    // Search through the list of registered payload types
+    for (codecID = 0; codecID < ACMCodecDB::kMaxNumCodecs; codecID++)
+    {
+        if (_registeredPlTypes[codecID] == payloadType)
+        {
+            // we have found the codecID registered with the payload type
+            break;
+        }
+    }
+
+    if(codecID >= ACMCodecDB::kNumCodecs)
+    {
+        // payload type was not registered. No need to unregister
+        return 0;
+    }
+
+    // Unregister the codec with the given payload type
+    return UnregisterReceiveCodecSafe(codecID);
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
+    const WebRtc_Word16 codecID)
+{
+    const WebRtcNetEQDecoder *neteqDecoder = ACMCodecDB::NetEQDecoders();
+    WebRtc_Word16 mirrorID = ACMCodecDB::MirrorID(codecID);
+    if(_codecs[codecID] != NULL)
+    {
+        if(_registeredPlTypes[codecID] != -1)
+        {
+            // before deleting the decoder instance unregister
+            // from NetEQ.
+            if(_netEq.RemoveCodec(neteqDecoder[codecID], _stereoReceive[codecID]) < 0)
+            {
+                CodecInst codecInst;
+                ACMCodecDB::Codec(codecID, &codecInst);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "Unregistering %s-%d from NetEQ failed.",
+                    codecInst.plname, codecInst.plfreq);
+                return -1;
+            }
+
+            // CN is a special case for NetEQ, all three sampling frequencies are
+            // deletad if one is deleted
+            if(STR_CASE_CMP(ACMCodecDB::database_[codecID].plname, "CN") == 0)
+            {
+                // Search codecs nearby in the database to unregister all CN.
+                for (int i=-2; i<3; i++)
+                {
+                    if (STR_CASE_CMP(ACMCodecDB::database_[codecID+i].plname, "CN") == 0)
+                    {
+                        _codecs[codecID+i]->DestructDecoder();
+                        if(_stereoReceive[codecID+i])
+                        {
+                            _slaveCodecs[codecID+i]->DestructDecoder();
+                        }
+                        _registeredPlTypes[codecID+i] = -1;
+                    }
+                }
+            } else
+            {
+                if(codecID == mirrorID)
+                {
+                    _codecs[codecID]->DestructDecoder();
+                    if(_stereoReceive[codecID])
+                    {
+                        _slaveCodecs[codecID]->DestructDecoder();
+                    }
+                }
+            }
+        }
+    }
+
+    if(_registeredPlTypes[codecID] == _receiveREDPayloadType)
+    {
+        // RED is going to be unregistered.
+        // set the following to an invalid value.
+        _receiveREDPayloadType = 255;
+    }
+    _registeredPlTypes[codecID] = -1;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioCodingModuleImpl::REDPayloadISAC(
+    const WebRtc_Word32  isacRate,
+    const WebRtc_Word16  isacBwEstimate,
+    WebRtc_UWord8*       payload,
+    WebRtc_Word16*       payloadLenByte)
+{
+   if(!HaveValidEncoder("EncodeData"))
+   {
+       return -1;
+   }
+   WebRtc_Word16 status;
+
+   status = _codecs[_currentSendCodecIdx]->REDPayloadISAC(isacRate, isacBwEstimate,
+       payload, payloadLenByte);
+
+   return status;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.h b/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.h
new file mode 100644
index 0000000..422b22e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/source/audio_coding_module_impl.h
@@ -0,0 +1,384 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
+
+#include "acm_codec_database.h"
+#include "acm_neteq.h"
+#include "acm_resampler.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+
+namespace webrtc {
+
+class ACMDTMFDetection;
+class ACMGenericCodec;
+class CriticalSectionWrapper;
+class RWLockWrapper;
+
+//#define TIMED_LOGGING
+
+#ifdef TIMED_LOGGING
+    #include "../test/timedtrace.h"
+#endif
+
+#ifdef ACM_QA_TEST
+#   include <stdio.h>
+#endif
+
+class AudioCodingModuleImpl : public AudioCodingModule
+{
+public:
+    // constructor
+    AudioCodingModuleImpl(
+        const WebRtc_Word32 id);
+
+    // destructor
+    ~AudioCodingModuleImpl();
+
+    // get version information for ACM and all components
+    WebRtc_Word32 Version(
+        char*   version,
+        WebRtc_UWord32& remainingBufferInBytes,
+        WebRtc_UWord32& position) const;
+
+    // change the unique identifier of this object
+    virtual WebRtc_Word32 ChangeUniqueId(
+        const WebRtc_Word32 id);
+
+    // returns the number of milliseconds until the module want
+    // a worker thread to call Process
+    WebRtc_Word32 TimeUntilNextProcess();
+
+    // Process any pending tasks such as timeouts
+    WebRtc_Word32 Process();
+
+    // used in conference to go to and from active encoding, hence
+    // in and out of mix
+    WebRtc_Word32 SetMode(
+        const bool passive);
+
+
+
+    /////////////////////////////////////////
+    //   Sender
+    //
+
+    // initialize send codec
+    WebRtc_Word32 InitializeSender();
+
+    // reset send codec
+    WebRtc_Word32 ResetEncoder();
+
+    // can be called multiple times for Codec, CNG, RED
+    WebRtc_Word32 RegisterSendCodec(
+        const CodecInst& sendCodec);
+
+    // get current send codec
+    WebRtc_Word32 SendCodec(
+        CodecInst& currentSendCodec) const;
+
+    // get current send freq
+    WebRtc_Word32 SendFrequency() const;
+
+    // Get encode bitrate
+    // Adaptive rate codecs return their current encode target rate, while other codecs
+    // return there longterm avarage or their fixed rate.
+    WebRtc_Word32 SendBitrate() const;
+
+    // set available bandwidth, inform the encoder about the
+    // estimated bandwidth received from the remote party
+    virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(
+        const WebRtc_Word32 bw);
+
+    // register a transport callback which will be
+    // called to deliver the encoded buffers
+    WebRtc_Word32 RegisterTransportCallback(
+        AudioPacketizationCallback* transport);
+
+    // Used by the module to deliver messages to the codec module/application
+    // AVT(DTMF)
+    WebRtc_Word32 RegisterIncomingMessagesCallback(
+        AudioCodingFeedback* incomingMessagesCallback,
+        const ACMCountries cpt);
+
+     // Add 10MS of raw (PCM) audio data to the encoder
+    WebRtc_Word32 Add10MsData(
+        const AudioFrame& audioFrame);
+
+    // set background noise mode for NetEQ, on, off or fade
+    WebRtc_Word32 SetBackgroundNoiseMode(
+        const ACMBackgroundNoiseMode mode);
+
+    // get current background noise mode
+    WebRtc_Word32 BackgroundNoiseMode(
+        ACMBackgroundNoiseMode& mode);
+
+    /////////////////////////////////////////
+    // (FEC) Forward Error Correction
+    //
+
+    // configure FEC status i.e on/off
+    WebRtc_Word32 SetFECStatus(
+        const bool enable);
+
+    // Get FEC status
+    bool FECStatus() const;
+
+    /////////////////////////////////////////
+    //   (VAD) Voice Activity Detection
+    //   and
+    //   (CNG) Comfort Noise Generation
+    //
+
+    WebRtc_Word32 SetVAD(
+        const bool             enableDTX = true,
+        const bool             enableVAD = false,
+        const ACMVADMode vadMode   = VADNormal);
+
+    WebRtc_Word32 VAD(
+        bool&             dtxEnabled,
+        bool&             vadEnabled,
+        ACMVADMode& vadMode) const;
+
+    WebRtc_Word32 RegisterVADCallback(
+        ACMVADCallback* vadCallback);
+
+    // Get VAD aggressiveness on the incoming stream
+    ACMVADMode ReceiveVADMode() const;
+
+    // Configure VAD aggressiveness on the incoming stream
+    WebRtc_Word16 SetReceiveVADMode(
+        const ACMVADMode mode);
+
+
+    /////////////////////////////////////////
+    //   Receiver
+    //
+
+    // initialize receiver, resets codec database etc
+    WebRtc_Word32 InitializeReceiver();
+
+    // reset the decoder state
+    WebRtc_Word32 ResetDecoder();
+
+    // get current receive freq
+    WebRtc_Word32 ReceiveFrequency() const;
+
+    // get current playout freq
+    WebRtc_Word32 PlayoutFrequency() const;
+
+    // register possible reveive codecs, can be called multiple times,
+    // for codecs, CNG, DTMF, RED
+    WebRtc_Word32 RegisterReceiveCodec(
+        const CodecInst& receiveCodec);
+
+    // get current received codec
+    WebRtc_Word32 ReceiveCodec(
+        CodecInst& currentReceiveCodec) const;
+
+    // incoming packet from network parsed and ready for decode
+    WebRtc_Word32 IncomingPacket(
+        const WebRtc_Word8*    incomingPayload,
+        const WebRtc_Word32    payloadLength,
+        const WebRtcRTPHeader& rtpInfo);
+
+    // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
+    // One usage for this API is when pre-encoded files are pushed in ACM.
+    WebRtc_Word32 IncomingPayload(
+        const WebRtc_Word8*  incomingPayload,
+        const WebRtc_Word32  payloadLength,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timestamp = 0);
+
+    // Minimum playout dealy (Used for lip-sync)
+    WebRtc_Word32 SetMinimumPlayoutDelay(
+        const WebRtc_Word32 timeMs);
+
+    // configure Dtmf playout status i.e on/off playout the incoming outband Dtmf tone
+    WebRtc_Word32 SetDtmfPlayoutStatus(
+        const bool enable);
+
+    // Get Dtmf playout status
+    bool DtmfPlayoutStatus() const;
+
+    // Estimate the Bandwidth based on the incoming stream
+    // This is also done in the RTP module
+    // need this for one way audio where the RTCP send the BW estimate
+    WebRtc_Word32 DecoderEstimatedBandwidth() const;
+
+    // Set playout mode voice, fax
+    WebRtc_Word32 SetPlayoutMode(
+        const AudioPlayoutMode mode);
+
+    // Get playout mode voice, fax
+    AudioPlayoutMode PlayoutMode() const;
+
+    // Get playout timestamp
+    WebRtc_Word32 PlayoutTimestamp(
+        WebRtc_UWord32& timestamp);
+
+    // Get 10 milliseconds of raw audio data to play out
+    // automatic resample to the requested frequency if > 0
+    WebRtc_Word32 PlayoutData10Ms(
+        const WebRtc_Word32   desiredFreqHz,
+        AudioFrame            &audioFrame);
+
+
+    /////////////////////////////////////////
+    //   Statistics
+    //
+
+    WebRtc_Word32  NetworkStatistics(
+        ACMNetworkStatistics& statistics) const;
+
+    void DestructEncoderInst(void* ptrInst);
+
+    WebRtc_Word16 AudioBuffer(WebRtcACMAudioBuff& audioBuff);
+
+    // GET RED payload for iSAC. The method id called
+    // when 'this' ACM is default ACM.
+    WebRtc_Word32 REDPayloadISAC(
+        const WebRtc_Word32  isacRate,
+        const WebRtc_Word16  isacBwEstimate,
+        WebRtc_UWord8*       payload,
+        WebRtc_Word16*       payloadLenByte);
+
+    WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& audioBuff);
+
+    WebRtc_UWord32 EarliestTimestamp() const;
+
+    WebRtc_Word32 LastEncodedTimestamp(WebRtc_UWord32& timestamp) const;
+
+    WebRtc_Word32 ReplaceInternalDTXWithWebRtc(
+        const bool useWebRtcDTX);
+
+    WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(
+        bool& usesWebRtcDTX);
+
+    WebRtc_Word32 SetISACMaxRate(
+        const WebRtc_UWord32 rateBitPerSec);
+
+    WebRtc_Word32 SetISACMaxPayloadSize(
+        const WebRtc_UWord16 payloadLenBytes);
+
+    WebRtc_Word32 ConfigISACBandwidthEstimator(
+        const WebRtc_UWord8  initFrameSizeMsec,
+        const WebRtc_UWord16 initRateBitPerSec,
+        const bool           enforceFrameSize = false);
+
+    WebRtc_Word32 UnregisterReceiveCodec(
+        const WebRtc_Word16 payloadType);
+
+protected:
+    void UnregisterSendCodec();
+
+    WebRtc_Word32 UnregisterReceiveCodecSafe(
+        const WebRtc_Word16 codecID);
+
+    ACMGenericCodec* CreateCodec(
+        const CodecInst& codec);
+
+    WebRtc_Word16 DecoderParamByPlType(
+        const WebRtc_UWord8    payloadType,
+        WebRtcACMCodecParams&  codecParams) const;
+
+    WebRtc_Word16 DecoderListIDByPlName(
+        const char*  payloadName,
+        const WebRtc_UWord16 sampFreqHz = 0) const;
+
+    WebRtc_Word32 InitializeReceiverSafe();
+
+    bool HaveValidEncoder(const char* callerName) const;
+
+    WebRtc_Word32 RegisterRecCodecMSSafe(
+        const CodecInst& receiveCodec,
+        WebRtc_Word16         codecId,
+        WebRtc_Word16         mirrorId,
+        ACMNetEQ::JB          jitterBuffer);
+
+private:
+    AudioPacketizationCallback*    _packetizationCallback;
+    WebRtc_Word32                  _id;
+    WebRtc_UWord32                 _lastTimestamp;
+    WebRtc_UWord32                 _lastInTimestamp;
+    CodecInst                      _sendCodecInst;
+    CodecInst                      _cngNB;
+    CodecInst                      _cngWB;
+    CodecInst                      _cngSWB;
+    CodecInst                      _RED;
+    CodecInst                      _DTMF;
+    bool                           _vadEnabled;
+    bool                           _dtxEnabled;
+    ACMVADMode                     _vadMode;
+    ACMGenericCodec*               _codecs[ACMCodecDB::kMaxNumCodecs];
+    ACMGenericCodec*               _slaveCodecs[ACMCodecDB::kMaxNumCodecs];
+    WebRtc_Word16                  _mirrorCodecIdx[ACMCodecDB::kMaxNumCodecs];
+    bool                           _stereoReceive[ACMCodecDB::kMaxNumCodecs];
+    bool                           _stereoSend;
+    int                            _prev_received_channel;
+    int                            _expected_channels;
+    WebRtc_Word32                  _currentSendCodecIdx;
+    bool                           _sendCodecRegistered;
+    ACMResampler                   _inputResampler;
+    ACMResampler                   _outputResampler;
+    ACMNetEQ                       _netEq;
+    CriticalSectionWrapper*        _acmCritSect;
+    ACMVADCallback*                _vadCallback;
+    WebRtc_UWord8                  _lastRecvAudioCodecPlType;
+
+    // RED/FEC
+    bool                           _isFirstRED;
+    bool                           _fecEnabled;
+    WebRtc_UWord8*                 _redBuffer;
+    RTPFragmentationHeader*        _fragmentation;
+    WebRtc_UWord32                 _lastFECTimestamp;
+    WebRtc_UWord8                  _redPayloadType;
+    // if no RED is registered as receive codec this
+    // will have an invalid value.
+    WebRtc_UWord8                  _receiveREDPayloadType;
+
+    // This is to keep track of CN instances where we can send DTMFs
+    WebRtc_UWord8                  _previousPayloadType;
+
+    // This keeps track of payload types associated with _codecs[].
+    // We define it as signed variable and initialize with -1 to indicate
+    // unused elements.
+    WebRtc_Word16                  _registeredPlTypes[ACMCodecDB::kMaxNumCodecs];
+
+    // Used when payloads are pushed into ACM without any RTP info
+    // One example is when pre-encoded bit-stream is pushed from
+    // a file.
+    WebRtcRTPHeader*               _dummyRTPHeader;
+    WebRtc_UWord16                 _recvPlFrameSizeSmpls;
+
+    bool                           _receiverInitialized;
+    ACMDTMFDetection*              _dtmfDetector;
+
+    AudioCodingFeedback*           _dtmfCallback;
+    WebRtc_Word16                  _lastDetectedTone;
+    CriticalSectionWrapper*        _callbackCritSect;
+#ifdef TIMED_LOGGING
+    TimedTrace                     _trace;
+#endif
+
+    AudioFrame                     _audioFrame;
+
+#ifdef ACM_QA_TEST
+    FILE* _outgoingPL;
+    FILE* _incomingPL;
+#endif
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/trunk/src/modules/audio_coding/main/test/ACMTest.cc b/trunk/src/modules/audio_coding/main/test/ACMTest.cc
new file mode 100644
index 0000000..1bbac0e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/ACMTest.cc
@@ -0,0 +1,16 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ACMTest.h"
+
+ACMTest::~ACMTest()
+{
+}
+
diff --git a/trunk/src/modules/audio_coding/main/test/ACMTest.h b/trunk/src/modules/audio_coding/main/test/ACMTest.h
new file mode 100644
index 0000000..e965671
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/ACMTest.h
@@ -0,0 +1,21 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACMTEST_H
+#define ACMTEST_H
+
+class ACMTest
+{
+public:
+    virtual ~ACMTest() =0;
+    virtual void Perform() =0;
+};
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/APITest.cc b/trunk/src/modules/audio_coding/main/test/APITest.cc
new file mode 100644
index 0000000..ed44178
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/APITest.cc
@@ -0,0 +1,1569 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cctype>
+#include <iostream>
+#include <ostream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "APITest.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "event_wrapper.h"
+#include "gtest/gtest.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define TEST_DURATION_SEC 600
+
+#define NUMBER_OF_SENDER_TESTS 6
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+#define CHECK_THREAD_NULLITY(myThread, S)                                      \
+    if(myThread != NULL)                                                       \
+    {                                                                          \
+        unsigned int i;                                                        \
+        (myThread)->Start(i);                                                  \
+    }                                                                          \
+    else                                                                       \
+    {                                                                          \
+      ADD_FAILURE() << S;                                                      \
+    }
+
+
+void
+APITest::Wait(WebRtc_UWord32 waitLengthMs)
+{
+    if(_randomTest)
+    {
+        return;
+    }
+    else
+    {
+        EventWrapper* myEvent = EventWrapper::Create();
+        myEvent->Wait(waitLengthMs);
+        delete myEvent;
+        return;
+    }
+}
+
+
+
+APITest::APITest():
+_acmA(NULL),
+_acmB(NULL),
+_channel_A2B(NULL),
+_channel_B2A(NULL),
+_writeToFile(true),
+_pullEventA(NULL),
+_pushEventA(NULL),
+_processEventA(NULL),
+_apiEventA(NULL),
+_pullEventB(NULL),
+_pushEventB(NULL),
+_processEventB(NULL),
+_apiEventB(NULL),
+_codecCntrA(0),
+_codecCntrB(0),
+_testCntrA(1),
+_testCntrB(1),
+_thereIsEncoderA(false),
+_thereIsEncoderB(false),
+_thereIsDecoderA(false),
+_thereIsDecoderB(false),
+_sendVADA(false),
+_sendDTXA(false),
+_sendVADModeA(VADNormal),
+_sendVADB(false),
+_sendDTXB(false),
+_sendVADModeB(VADNormal),
+_minDelayA(0),
+_minDelayB(0),
+_dotPositionA(0),
+_dotMoveDirectionA(1),
+_dotPositionB(39),
+_dotMoveDirectionB(-1),
+_dtmfCallback(NULL),
+_vadCallbackA(NULL),
+_vadCallbackB(NULL),
+_apiTestRWLock(*RWLockWrapper::CreateRWLock()),
+_randomTest(false),
+_testNumA(0),
+_testNumB(1)
+{
+    int n;
+    for( n = 0; n < 32; n++)
+    {
+        _payloadUsed[n] = false;
+    }
+
+    for(n = 0; n < 3; n++)
+    {
+        _receiveVADActivityA[n] = 0;
+        _receiveVADActivityB[n] = 0;
+    }
+    
+    _movingDot[40] = '\0';
+
+    for(int n = 0; n <40; n++)
+    {
+        _movingDot[n]  = ' ';
+    }
+}
+
+APITest::~APITest()
+{
+    DESTROY_ACM(_acmA);
+    DESTROY_ACM(_acmB);
+
+    DELETE_POINTER(_channel_A2B);
+    DELETE_POINTER(_channel_B2A);
+
+    DELETE_POINTER(_pushEventA);
+    DELETE_POINTER(_pullEventA);
+    DELETE_POINTER(_processEventA);
+    DELETE_POINTER(_apiEventA);
+
+    DELETE_POINTER(_pushEventB);
+    DELETE_POINTER(_pullEventB);
+    DELETE_POINTER(_processEventB);
+    DELETE_POINTER(_apiEventB);
+
+    _inFileA.Close();
+    _outFileA.Close();
+
+    _inFileB.Close();
+    _outFileB.Close();
+
+    DELETE_POINTER(_dtmfCallback);
+    DELETE_POINTER(_vadCallbackA);
+    DELETE_POINTER(_vadCallbackB);
+
+    delete &_apiTestRWLock;
+}
+
+
+
+//WebRtc_Word16
+//APITest::SetInFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//{
+//    return _inFile.Open(fileName, frequencyHz, "rb");
+//}
+//
+//WebRtc_Word16
+//APITest::SetOutFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//{
+//    return _outFile.Open(fileName, frequencyHz, "wb");
+//}
+
+WebRtc_Word16 
+APITest::SetUp()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    CodecInst dummyCodec;
+    int lastPayloadType = 0;
+
+    WebRtc_Word16 numCodecs = _acmA->NumberOfCodecs();
+    for(WebRtc_UWord8 n = 0; n < numCodecs; n++)
+    {
+        AudioCodingModule::Codec(n, dummyCodec);
+        if((STR_CASE_CMP(dummyCodec.plname, "CN") == 0) &&
+            (dummyCodec.plfreq == 32000))
+        {
+            continue;
+        }
+
+        printf("Register Receive Codec %s  ", dummyCodec.plname);
+
+        if((n != 0) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // Check registration with an already occupied payload type
+            int currentPayloadType = dummyCodec.pltype;
+            dummyCodec.pltype = 97; //lastPayloadType;
+            CHECK_ERROR(_acmB->RegisterReceiveCodec(dummyCodec));
+            dummyCodec.pltype = currentPayloadType;
+        }
+
+        if((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // test if re-registration works;
+            CodecInst nextCodec;
+            int currentPayloadType = dummyCodec.pltype;
+            AudioCodingModule::Codec(n + 1, nextCodec);
+            dummyCodec.pltype = nextCodec.pltype;
+            if(!FixedPayloadTypeCodec(nextCodec.plname))
+            {
+                _acmB->RegisterReceiveCodec(dummyCodec);
+            }
+            dummyCodec.pltype = currentPayloadType;
+        }
+
+        if((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // test if un-registration works;
+            CodecInst nextCodec;
+            AudioCodingModule::Codec(n + 1, nextCodec);
+            nextCodec.pltype = dummyCodec.pltype;
+            if(!FixedPayloadTypeCodec(nextCodec.plname))
+            {
+                CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(nextCodec));
+                CHECK_ERROR_MT(_acmA->UnregisterReceiveCodec(nextCodec.pltype));
+            }
+        }
+
+
+        CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(dummyCodec));
+        printf("   side A done!");
+        CHECK_ERROR_MT(_acmB->RegisterReceiveCodec(dummyCodec));
+        printf("   side B done!\n");
+
+        if(!strcmp(dummyCodec.plname, "CN"))
+        {
+            CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+            CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+        }
+        lastPayloadType = dummyCodec.pltype;
+        if((lastPayloadType >= 96) && (lastPayloadType <= 127))
+        {
+            _payloadUsed[lastPayloadType - 96] = true;
+        }
+    }
+    _thereIsDecoderA = true;
+    _thereIsDecoderB = true;
+
+    // Register Send Codec
+    AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrA, dummyCodec);
+    CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+    _thereIsEncoderA = true;
+    //
+    AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrB, dummyCodec);
+    CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+    _thereIsEncoderB = true;
+
+    char fileName[500];
+    WebRtc_UWord16 frequencyHz;
+    
+    printf("\n\nAPI Test\n");
+    printf("========\n");
+    printf("Hit enter to accept the default values indicated in []\n\n");
+
+    //--- Input A
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    frequencyHz = 32000;
+    printf("Enter input file at side A [%s]: ", fileName);
+    PCMFile::ChooseFile(fileName, 499, &frequencyHz);
+    _inFileA.Open(fileName, frequencyHz, "rb", true);
+
+    //--- Output A
+    std::string outputFileA = webrtc::test::OutputPath() + "outA.pcm";
+    strcpy(fileName, outputFileA.c_str());
+    printf("Enter output file at side A [%s]: ", fileName);
+    PCMFile::ChooseFile(fileName, 499, &frequencyHz);
+    _outFileA.Open(fileName, frequencyHz, "wb");
+
+    //--- Input B
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    printf("\n\nEnter input file at side B [%s]: ", fileName);
+    PCMFile::ChooseFile(fileName, 499, &frequencyHz);
+    _inFileB.Open(fileName, frequencyHz, "rb", true);
+
+    //--- Output B
+    std::string outputFileB = webrtc::test::OutputPath() + "outB.pcm";
+    strcpy(fileName, outputFileB.c_str());
+    printf("Enter output file at side B [%s]: ", fileName);
+    PCMFile::ChooseFile(fileName, 499, &frequencyHz);
+    _outFileB.Open(fileName, frequencyHz, "wb");
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel(2);
+    CHECK_ERROR_MT(_acmA->RegisterTransportCallback(_channel_A2B));
+    _channel_A2B->RegisterReceiverACM(_acmB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel(1);
+    CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
+    _channel_B2A->RegisterReceiverACM(_acmA);
+    
+    //--- EVENT TIMERS
+    // A
+    _pullEventA    = EventWrapper::Create();
+    _pushEventA    = EventWrapper::Create();
+    _processEventA = EventWrapper::Create();
+    _apiEventA     = EventWrapper::Create();
+    // B
+    _pullEventB    = EventWrapper::Create();
+    _pushEventB    = EventWrapper::Create();
+    _processEventB = EventWrapper::Create();
+    _apiEventB     = EventWrapper::Create();
+
+    //--- I/O params
+    // A
+    _outFreqHzA = _outFileA.SamplingFrequency();
+    // B
+    _outFreqHzB = _outFileB.SamplingFrequency();
+
+
+    //Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");    
+
+    char print[11];
+
+    printf("\nRandom Test (y/n)?");
+    EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+    print[10] = '\0';
+    if(strstr(print, "y") != NULL)
+    {
+        _randomTest = true;
+        _verbose = false;
+        _writeToFile = false;
+        Trace::CreateTrace();
+        Trace::SetTraceFile("ACMAPITest.txt");
+        //freopen("APITest_log.txt", "w", stdout);
+    }
+    else
+    {
+        Trace::CreateTrace();
+        Trace::SetTraceFile("ACMAPITest.txt", true);
+        _randomTest = false;
+        printf("\nPrint Tests (y/n)? ");
+        EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+        print[10] = '\0';
+        if(strstr(print, "y") == NULL)
+        {
+            EXPECT_TRUE(freopen("APITest_log.txt", "w", stdout) != 0);
+            _verbose = false;
+        }
+    }
+
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfCallback = new DTMFDetector;
+#endif
+    _vadCallbackA = new VADCallback;
+    _vadCallbackB = new VADCallback;
+    
+    return 0;
+}
+
+bool 
+APITest::PushAudioThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->PushAudioRunA();
+}
+
+bool 
+APITest::PushAudioThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->PushAudioRunB();
+}
+
+bool 
+APITest::PullAudioThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->PullAudioRunA();
+}
+
+bool 
+APITest::PullAudioThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->PullAudioRunB();
+}
+
+bool
+APITest::ProcessThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->ProcessRunA();
+}
+
+bool
+APITest::ProcessThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->ProcessRunB();
+}
+
+bool
+APITest::APIThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->APIRunA();
+}
+
+bool
+APITest::APIThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->APIRunB();
+}
+
+bool 
+APITest::PullAudioRunA()
+{
+    _pullEventA->Wait(100);
+    AudioFrame audioFrame;
+    if(_acmA->PlayoutData10Ms(_outFreqHzA, audioFrame) < 0)
+    {
+        bool thereIsDecoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsDecoder = _thereIsDecoderA;
+        }
+        if(thereIsDecoder)
+        {
+            fprintf(stderr, "\n>>>>>>    cannot pull audio A       <<<<<<<< \n");
+        }
+    }
+    else
+    {
+        if(_writeToFile)
+        {
+            _outFileA.Write10MsData(audioFrame);
+        }
+        _receiveVADActivityA[(int)audioFrame._vadActivity]++;
+    }
+    return true;
+}
+
+bool 
+APITest::PullAudioRunB()
+{
+    _pullEventB->Wait(100);
+    AudioFrame audioFrame;
+    if(_acmB->PlayoutData10Ms(_outFreqHzB, audioFrame) < 0)
+    {
+        bool thereIsDecoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsDecoder = _thereIsDecoderB;
+        }
+        if(thereIsDecoder)
+        {
+            fprintf(stderr, "\n>>>>>>    cannot pull audio B       <<<<<<<< \n");
+            fprintf(stderr, "%d %d\n", _testNumA, _testNumB);
+        }
+    }
+    else
+    {
+        if(_writeToFile)
+        {
+            _outFileB.Write10MsData(audioFrame);
+        }
+        _receiveVADActivityB[(int)audioFrame._vadActivity]++;
+    }     
+    return true;
+}
+
+bool 
+APITest::PushAudioRunA()
+{
+    _pushEventA->Wait(100);
+    AudioFrame audioFrame;
+    _inFileA.Read10MsData(audioFrame);
+    if(_acmA->Add10MsData(audioFrame) < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderA;
+        }
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>        add10MsData at A failed       <<<<\n");
+        }
+    }
+    return true;
+}
+
+bool 
+APITest::PushAudioRunB()
+{
+    _pushEventB->Wait(100);
+    AudioFrame audioFrame;
+    _inFileB.Read10MsData(audioFrame);
+    if(_acmB->Add10MsData(audioFrame) < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderB;
+        }
+
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>   cannot add audio to B    <<<<");
+        }
+    }
+
+    return true;
+}
+
+bool
+APITest::ProcessRunA()
+{
+    _processEventA->Wait(100);
+    if(_acmA->Process() < 0)
+    {
+        // do not print error message if there is no encoder
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderA;
+        }
+
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>>      Process Failed at A     <<<<<\n");
+        }
+    }
+    return true;
+}
+
+bool
+APITest::ProcessRunB()
+{
+    _processEventB->Wait(100);
+    if(_acmB->Process() < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderB;
+        }
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>>      Process Failed at B     <<<<<\n");
+        }
+    }
+    return true;
+}
+
+/*/
+ *
+ * In side A we test the APIs which are related to sender Side.
+ *
+/*/
+
+
+void
+APITest::RunTest(char thread)
+{
+    int testNum;
+    {
+        WriteLockScoped cs(_apiTestRWLock);
+        if(thread == 'A')
+        {
+            _testNumA = (_testNumB + 1 + (rand() % 6)) % 7;
+            testNum = _testNumA;
+            
+            _movingDot[_dotPositionA] = ' ';
+            if(_dotPositionA == 0)
+            {
+                _dotMoveDirectionA = 1;
+            }
+            if(_dotPositionA == 19)
+            {
+                _dotMoveDirectionA = -1;
+            }
+            _dotPositionA += _dotMoveDirectionA;            
+            _movingDot[_dotPositionA] = (_dotMoveDirectionA > 0)? '>':'<';
+        }
+        else
+        {
+            _testNumB = (_testNumA + 1 + (rand() % 6)) % 7;
+            testNum = _testNumB;
+
+            _movingDot[_dotPositionB] = ' ';
+            if(_dotPositionB == 20)
+            {
+                _dotMoveDirectionB = 1;
+            }
+            if(_dotPositionB == 39)
+            {
+                _dotMoveDirectionB = -1;
+            }
+            _dotPositionB += _dotMoveDirectionB;            
+            _movingDot[_dotPositionB] = (_dotMoveDirectionB > 0)? '>':'<';
+        }
+        //fprintf(stderr, "%c: %d \n", thread, testNum);
+        //fflush(stderr);
+    }
+    switch(testNum)
+    {
+    case 0:
+        CurrentCodec('A');
+        ChangeCodec('A');
+        break;
+    case 1:
+        TestPlayout('B');
+        break;
+    case 2:
+        if(!_randomTest)
+        {
+            fprintf(stdout, "\nTesting Delay ...\n");
+        }
+        TestDelay('A');
+        break;
+    case 3:
+        TestSendVAD('A');    
+        break;
+    case 4:
+        TestRegisteration('A');
+        break;
+    case 5:
+        TestReceiverVAD('A');
+        break;
+    case 6:
+#ifdef WEBRTC_DTMF_DETECTION
+        LookForDTMF('A');
+#endif
+        break;
+    default:
+        fprintf(stderr, "Wrong Test Number\n");
+        getchar();
+        exit(1);
+    }
+}
+
+
+
+bool
+APITest::APIRunA()
+{   
+    _apiEventA->Wait(50);
+
+    bool randomTest;
+    {
+        ReadLockScoped rl(_apiTestRWLock);
+        randomTest = _randomTest;
+    }
+    if(randomTest)
+    {
+        RunTest('A');
+    }
+    else
+    {
+        CurrentCodec('A');
+        ChangeCodec('A');
+        TestPlayout('B');
+        if(_codecCntrA == 0)
+        {
+            fprintf(stdout, "\nTesting Delay ...\n");
+            TestDelay('A');
+        }
+        // VAD TEST
+        TestSendVAD('A');    
+        TestRegisteration('A');
+        TestReceiverVAD('A');
+#ifdef WEBRTC_DTMF_DETECTION
+        LookForDTMF('A');
+#endif
+    }
+    return true;
+}
+
+bool
+APITest::APIRunB()
+{   
+    _apiEventB->Wait(50);
+    bool randomTest;
+    {
+        ReadLockScoped rl(_apiTestRWLock);
+        randomTest = _randomTest;
+    }
+    //_apiEventB->Wait(2000);
+    if(randomTest)
+    {
+        RunTest('B');
+    }
+ 
+    return true;
+}
+
+void
+APITest::Perform()
+{
+    SetUp();
+
+    //--- THREADS
+    // A
+    // PUSH
+    ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA, 
+        this, kNormalPriority, "PushAudioThreadA");
+    CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
+    // PULL
+    ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA, 
+        this, kNormalPriority, "PullAudioThreadA");
+    CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
+    // Process
+    ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA, 
+        this, kNormalPriority, "ProcessThreadA");
+    CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
+    // API 
+    ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA, 
+        this, kNormalPriority, "APIThreadA");
+    CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
+    // B
+    // PUSH
+    ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB, 
+        this, kNormalPriority, "PushAudioThreadB");
+    CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
+    // PULL
+    ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB, 
+        this, kNormalPriority, "PullAudioThreadB");
+    CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
+    // Process
+    ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB, 
+        this, kNormalPriority, "ProcessThreadB");
+    CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
+    // API
+    ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB, 
+        this, kNormalPriority, "APIThreadB");
+    CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
+ 
+
+    //_apiEventA->StartTimer(true, 5000);
+    //_apiEventB->StartTimer(true, 5000);
+
+    _processEventA->StartTimer(true, 10);
+    _processEventB->StartTimer(true, 10);
+    
+    _pullEventA->StartTimer(true, 10);
+    _pullEventB->StartTimer(true, 10);
+
+    _pushEventA->StartTimer(true, 10);
+    _pushEventB->StartTimer(true, 10);
+
+    // Keep main thread waiting for sender/receiver
+    // threads to complete
+    EventWrapper* completeEvent = EventWrapper::Create();
+    WebRtc_UWord64 startTime = TickTime::MillisecondTimestamp();
+    WebRtc_UWord64 currentTime;
+    do
+    {
+        {
+            //ReadLockScoped rl(_apiTestRWLock);
+            //fprintf(stderr, "\r%s", _movingDot);
+        }
+        //fflush(stderr);
+        completeEvent->Wait(50);
+        currentTime = TickTime::MillisecondTimestamp();
+    } while((currentTime - startTime) < 120000); // Run test in 2 minutes (120000 ms)
+
+    //completeEvent->Wait(0xFFFFFFFF);//(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
+    delete completeEvent;
+    
+    myPushAudioThreadA->Stop();
+    myPullAudioThreadA->Stop();
+    myProcessThreadA->Stop();
+    myAPIThreadA->Stop();
+
+    delete myPushAudioThreadA;
+    delete myPullAudioThreadA;
+    delete myProcessThreadA;
+    delete myAPIThreadA;
+
+
+    myPushAudioThreadB->Stop();
+    myPullAudioThreadB->Stop();
+    myProcessThreadB->Stop();
+    myAPIThreadB->Stop();
+
+    delete myPushAudioThreadB;
+    delete myPullAudioThreadB;
+    delete myProcessThreadB;
+    delete myAPIThreadB;
+}
+
+
+void
+APITest::CheckVADStatus(char side)
+{
+
+    bool dtxEnabled;
+    bool vadEnabled;
+    ACMVADMode vadMode;
+
+    if(side == 'A')
+    {
+        _acmA->VAD(dtxEnabled, vadEnabled, vadMode);
+        _acmA->RegisterVADCallback(NULL);
+        _vadCallbackA->Reset();
+        _acmA->RegisterVADCallback(_vadCallbackA);
+        
+        if(!_randomTest)
+        {
+            if(_verbose)
+            {
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", 
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode);
+                Wait(5000);
+                fprintf(stdout, " => bit-rate %3.0f kbps\n",
+                    _channel_A2B->BitRate());
+            }
+            else
+            {
+                Wait(5000);
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n", 
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode,
+                    _channel_A2B->BitRate());
+            }
+            _vadCallbackA->PrintFrameTypes();
+        }
+
+        if(dtxEnabled != _sendDTXA)
+        {
+            fprintf(stderr, ">>>   Error Enabling DTX    <<<\n");
+        }
+        if((vadEnabled != _sendVADA) && (!dtxEnabled))
+        {
+            fprintf(stderr, ">>>   Error Enabling VAD    <<<\n");
+        }
+        if((vadMode != _sendVADModeA) && vadEnabled)
+        {
+            fprintf(stderr, ">>>   Error setting VAD-mode    <<<\n");
+        }
+    }
+    else
+    {
+        _acmB->VAD(dtxEnabled, vadEnabled, vadMode);
+
+        _acmB->RegisterVADCallback(NULL);
+        _vadCallbackB->Reset();
+        _acmB->RegisterVADCallback(_vadCallbackB);
+        
+        if(!_randomTest)
+        {
+            if(_verbose)
+            {
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", 
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode);
+                Wait(5000);
+                fprintf(stdout, " => bit-rate %3.0f kbps\n",
+                    _channel_B2A->BitRate());
+            }
+            else
+            {
+                Wait(5000);
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n", 
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode,
+                    _channel_B2A->BitRate());
+            }
+            _vadCallbackB->PrintFrameTypes();
+        }
+
+        if(dtxEnabled != _sendDTXB)
+        {
+            fprintf(stderr, ">>>   Error Enabling DTX    <<<\n");
+        }
+        if((vadEnabled != _sendVADB) && (!dtxEnabled))
+        {
+            fprintf(stderr, ">>>   Error Enabling VAD    <<<\n");
+        }
+        if((vadMode != _sendVADModeB) && vadEnabled)
+        {
+            fprintf(stderr, ">>>   Error setting VAD-mode    <<<\n");
+        }
+    }
+}
+
+// Set Min delay, get delay, playout timestamp
+void
+APITest::TestDelay(char side)
+{
+    AudioCodingModule* myACM;
+    Channel* myChannel;
+    WebRtc_Word32* myMinDelay;
+    EventWrapper* myEvent = EventWrapper::Create();
+
+    WebRtc_UWord32 inTimestamp = 0;
+    WebRtc_UWord32 outTimestamp = 0;
+    double estimDelay = 0;    
+
+    double averageEstimDelay = 0;
+    double averageDelay = 0;
+
+    CircularBuffer estimDelayCB(100);
+    estimDelayCB.SetArithMean(true);
+
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        myChannel = _channel_B2A;
+        myMinDelay = &_minDelayA;
+    }
+    else
+    {
+        myACM = _acmB;
+        myChannel = _channel_A2B;
+        myMinDelay = &_minDelayB;
+    }
+
+
+    CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+
+    inTimestamp = myChannel->LastInTimestamp();        
+    CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
+
+    if(!_randomTest)
+    {
+        myEvent->StartTimer(true, 30);
+        int n = 0;
+        int settlePoint = 5000;
+        while(n < settlePoint + 400)
+        {
+            myEvent->Wait(1000);
+
+            inTimestamp = myChannel->LastInTimestamp();        
+            CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
+
+            //std::cout << outTimestamp << std::endl << std::flush;
+            estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) / 
+                ((double)myACM->ReceiveFrequency() / 1000.0);
+
+            estimDelayCB.Update(estimDelay);
+
+            estimDelayCB.ArithMean(averageEstimDelay);
+            //printf("\n %6.1f \n", estimDelay);
+            //std::cout << " " << std::flush;
+
+            if(_verbose)
+            {
+                fprintf(stdout, "\rExpected: %4d,    retreived: %6.1f,   measured: %6.1f",
+                    *myMinDelay, averageDelay, averageEstimDelay);
+                std::cout << " " << std::flush;
+            }
+            if((averageDelay > *myMinDelay) && (n < settlePoint))
+            {
+                settlePoint = n;
+            }
+            n++;
+        }
+        myEvent->StopTimer();
+    }
+
+    if((!_verbose) && (!_randomTest))
+    {
+        fprintf(stdout, "\nExpected: %4d,    retreived: %6.1f,   measured: %6.1f",
+            *myMinDelay, averageDelay, averageEstimDelay);
+    }
+
+    *myMinDelay = (rand() % 1000) + 1;
+  
+    ACMNetworkStatistics networkStat;
+    CHECK_ERROR_MT(myACM->NetworkStatistics(networkStat));
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
+        fprintf(stdout, "--------------------------------------\n");
+        fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);    
+        fprintf(stdout, "Preferred buffer-size... %d\n", networkStat.preferredBufferSize);
+        fprintf(stdout, "Peaky jitter mode........%d\n", networkStat.jitterPeaksFound);
+        fprintf(stdout, "packet-size rate........ %d\n", networkStat.currentPacketLossRate);
+        fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);   
+        fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);    
+        fprintf(stdout, "Preemptive rate......... %d\n", networkStat.currentPreemptiveRate);
+        fprintf(stdout, "Accelerate rate......... %d\n", networkStat.currentAccelerateRate);
+        fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
+        fprintf(stdout, "Mean waiting time....... %d\n", networkStat.meanWaitingTimeMs);
+        fprintf(stdout, "Median waiting time..... %d\n", networkStat.medianWaitingTimeMs);
+        fprintf(stdout, "Min waiting time........ %d\n", networkStat.minWaitingTimeMs);
+        fprintf(stdout, "Max waiting time........ %d\n", networkStat.maxWaitingTimeMs);
+    }
+
+    CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+    if(!_randomTest)
+    {
+        myEvent->Wait(500);
+        fprintf(stdout, "\n");
+        fprintf(stdout, "\n");
+    }
+    delete myEvent;
+}
+
+// Unregister a codec & register again.
+void
+APITest::TestRegisteration(char sendSide)
+{
+    AudioCodingModule* sendACM;
+    AudioCodingModule* receiveACM;
+    bool* thereIsDecoder;
+    EventWrapper* myEvent = EventWrapper::Create();
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "---------------------------------------------------------\n");
+        fprintf(stdout, "           Unregister/register Receive Codec\n");
+        fprintf(stdout, "---------------------------------------------------------\n");
+    }
+    
+    switch(sendSide)
+    {
+    case 'A':
+        {
+            sendACM = _acmA;
+            receiveACM = _acmB;
+            thereIsDecoder = &_thereIsDecoderB;
+            break;
+        }
+    case 'B':
+        {
+            sendACM = _acmB;
+            receiveACM = _acmA;
+            thereIsDecoder = &_thereIsDecoderA;
+            break;
+        }
+    default:
+        fprintf(stderr, "Invalid sender-side in TestRegistration(%c)\n", sendSide);
+        exit(-1);
+    }
+
+    CodecInst myCodec;
+    if(sendACM->SendCodec(myCodec) < 0)
+    {
+        AudioCodingModule::Codec(_codecCntrA, myCodec);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+        fflush(stdout);
+    }
+    {
+        WriteLockScoped wl(_apiTestRWLock);
+        *thereIsDecoder = false;
+    }
+    //myEvent->Wait(20);
+    CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+    Wait(1000);
+
+    int currentPayload = myCodec.pltype;
+
+    if(!FixedPayloadTypeCodec(myCodec.plname))
+    {
+        WebRtc_Word32 i;
+        for(i = 0; i < 32; i++)
+        {
+            if(!_payloadUsed[i])
+            {
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Register receive codec with new Payload, AUDIO BACK.\n");
+                }
+                //myCodec.pltype = i + 96;
+                //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+                //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
+                //myEvent->Wait(20);
+                //{
+                //    WriteLockScoped wl(_apiTestRWLock);
+                //    *thereIsDecoder = true;
+                //}
+                Wait(1000);
+
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+                }
+                //{
+                //    WriteLockScoped wl(_apiTestRWLock);
+                //    *thereIsDecoder = false;
+                //}
+                //myEvent->Wait(20);
+                //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+                Wait(1000);
+
+                myCodec.pltype = currentPayload;
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Register receive codec with default Payload, AUDIO BACK.\n");
+                    fflush(stdout);
+                }
+                CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+                //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
+                myEvent->Wait(20);
+                {
+                    WriteLockScoped wl(_apiTestRWLock);
+                    *thereIsDecoder = true;
+                }
+                Wait(1000);
+
+                break;
+            }
+        }
+        if(i == 32)
+        {
+            CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+            {
+                WriteLockScoped wl(_apiTestRWLock);
+                *thereIsDecoder = true;
+            }
+        }
+    }
+    else
+    {
+        if(!_randomTest)
+        {
+            fprintf(stdout, "Register receive codec with fixed Payload, AUDIO BACK.\n");
+            fflush(stdout);
+        }
+        CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+        //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+        //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+        myEvent->Wait(20);
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            *thereIsDecoder = true;
+        }
+    }
+    delete myEvent;
+    if(!_randomTest)
+    {
+        fprintf(stdout, "---------------------------------------------------------\n");
+    }
+}
+
+// Playout Mode, background noise mode.
+// Receiver Frequency, playout frequency.
+void
+APITest::TestPlayout(char receiveSide)
+{
+    AudioCodingModule* receiveACM;
+    AudioPlayoutMode* playoutMode = NULL;
+    ACMBackgroundNoiseMode* bgnMode = NULL;
+    switch(receiveSide)
+    {
+        case 'A':
+            {
+                receiveACM = _acmA;
+                playoutMode = &_playoutModeA;
+                bgnMode = &_bgnModeA;
+                break;
+            }
+        case 'B':
+            {
+                receiveACM = _acmB;
+                playoutMode = &_playoutModeB;
+                bgnMode = &_bgnModeB;
+                break;
+            }
+        default:
+            receiveACM = _acmA;
+    }
+
+    WebRtc_Word32 receiveFreqHz = receiveACM->ReceiveFrequency();
+    WebRtc_Word32 playoutFreqHz = receiveACM->PlayoutFrequency();
+
+    CHECK_ERROR_MT(receiveFreqHz);
+    CHECK_ERROR_MT(playoutFreqHz);
+    
+    char bgnString[25];
+    switch(*bgnMode)
+    {
+    case On:
+        {
+            *bgnMode = Fade;
+            strncpy(bgnString, "Fade", 25);
+            break;
+        }
+    case Fade:
+        {
+            *bgnMode = Off;
+            strncpy(bgnString, "OFF", 25);
+            break;
+        }
+    case Off:
+        {
+            *bgnMode = On;
+            strncpy(bgnString, "ON", 25);
+            break;
+        }
+    default:
+        *bgnMode = On;
+        strncpy(bgnString, "ON", 25);
+    }
+    CHECK_ERROR_MT(receiveACM->SetBackgroundNoiseMode(*bgnMode));
+    bgnString[24] = '\0';
+
+    char playoutString[25];
+    switch(*playoutMode)
+    {
+    case voice:
+        {
+            *playoutMode = fax;
+            strncpy(playoutString, "FAX", 25);
+            break;
+        }
+    case fax:
+        {
+            *playoutMode = streaming;
+            strncpy(playoutString, "Streaming", 25);
+            break;
+        }
+    case streaming:
+        {
+            *playoutMode = voice;
+            strncpy(playoutString, "Voice", 25);
+            break;
+        }
+    default:
+        *playoutMode = voice;
+        strncpy(playoutString, "Voice", 25);
+    }
+    CHECK_ERROR_MT(receiveACM->SetPlayoutMode(*playoutMode));
+    playoutString[24] = '\0';
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n");
+        fprintf(stdout, "In Side %c\n", receiveSide);
+        fprintf(stdout, "---------------------------------\n");
+        fprintf(stdout, "Receive Frequency....... %d Hz\n", receiveFreqHz);
+        fprintf(stdout, "Playout Frequency....... %d Hz\n", playoutFreqHz);
+        fprintf(stdout, "Audio Playout Mode...... %s\n", playoutString);
+        fprintf(stdout, "Background Noise Mode... %s\n", bgnString);
+    }
+}
+
+// set/get receiver VAD status & mode.
+void
+APITest::TestReceiverVAD(char side)
+{
+    AudioCodingModule* myACM;
+    int* myReceiveVADActivity;
+
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        myReceiveVADActivity = _receiveVADActivityA;
+    }
+    else
+    {
+        myACM = _acmB;
+        myReceiveVADActivity = _receiveVADActivityB;
+    }
+
+    ACMVADMode mode = myACM->ReceiveVADMode();
+
+    CHECK_ERROR_MT(mode);
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nCurrent Receive VAD at side %c\n", side);
+        fprintf(stdout, "----------------------------------\n");
+        fprintf(stdout, "mode.......... %d\n", (int)mode);
+        fprintf(stdout, "VAD Active.... %d\n", myReceiveVADActivity[0]);
+        fprintf(stdout, "VAD Passive... %d\n", myReceiveVADActivity[1]);
+        fprintf(stdout, "VAD Unknown... %d\n", myReceiveVADActivity[2]);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\nChange Receive VAD at side %c\n\n", side);
+    }
+
+    switch(mode)
+    {
+      case VADNormal:
+          mode = VADAggr;
+          break;
+      case VADLowBitrate:
+          mode = VADVeryAggr;
+          break;
+      case VADAggr:
+          mode = VADLowBitrate;
+          break;
+      case VADVeryAggr:
+          mode = VADNormal;
+          break;
+      default:
+          mode = VADNormal;
+
+          CHECK_ERROR_MT(myACM->SetReceiveVADMode(mode));
+    }
+    for(int n = 0; n < 3; n++)
+    {
+        myReceiveVADActivity[n] = 0;
+    }
+}
+
+
+void
+APITest::TestSendVAD(char side)
+{
+    if(_randomTest)
+    {
+        return;
+    }
+
+    bool* vad;
+    bool* dtx;
+    ACMVADMode* mode;
+    Channel* myChannel;
+    AudioCodingModule* myACM;
+
+    CodecInst myCodec;
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+        fprintf(stdout, "                Test VAD API\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+    }
+
+    if(side == 'A')
+    {
+        AudioCodingModule::Codec(_codecCntrA, myCodec);
+        vad = &_sendVADA;
+        dtx = &_sendDTXA;
+        mode = &_sendVADModeA;
+        myChannel = _channel_A2B;
+        myACM = _acmA;
+    }
+    else
+    {
+        AudioCodingModule::Codec(_codecCntrB, myCodec);
+        vad = &_sendVADB;
+        dtx = &_sendDTXB;
+        mode = &_sendVADModeB;
+        myChannel = _channel_B2A;
+        myACM = _acmB;
+    }
+
+    CheckVADStatus(side);
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+    }
+
+    switch(*mode)
+    {
+    case VADNormal:
+        *vad = true;
+        *dtx = true;
+        *mode = VADAggr;
+        break;
+    case VADLowBitrate:
+        *vad = true;
+        *dtx = true;
+        *mode = VADVeryAggr;
+        break;
+    case VADAggr:
+        *vad = true;
+        *dtx = true;
+        *mode = VADLowBitrate;
+        break;
+    case VADVeryAggr:
+        *vad = false;
+        *dtx = false;
+        *mode = VADNormal;
+        break;
+    default:
+        *mode = VADNormal;
+    }
+
+    *dtx = (myCodec.plfreq == 32000)? false:*dtx;
+
+    CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+    myChannel->ResetStats();
+
+    CheckVADStatus(side);
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+    }
+
+    // Fault Test
+    CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)-1));
+    CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)4));
+    
+
+
+}
+
+
+void
+APITest::CurrentCodec(char side)
+{
+    CodecInst myCodec;
+    if(side == 'A')
+    {
+        _acmA->SendCodec(myCodec);
+    }
+    else
+    {
+        _acmB->SendCodec(myCodec);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "Send codec in Side A\n");
+        fprintf(stdout, "----------------------------\n");
+        fprintf(stdout, "Name................. %s\n", myCodec.plname);
+        fprintf(stdout, "Sampling Frequency... %d\n", myCodec.plfreq);
+        fprintf(stdout, "Rate................. %d\n", myCodec.rate);
+        fprintf(stdout, "Payload-type......... %d\n", myCodec.pltype);
+        fprintf(stdout, "Packet-size.......... %d\n", myCodec.pacsize);
+    }
+
+    Wait(100);
+}
+
+void
+APITest::ChangeCodec(char side)
+{
+    CodecInst myCodec;
+    AudioCodingModule* myACM;
+    WebRtc_UWord8* codecCntr;
+    bool* thereIsEncoder;
+    bool* vad;
+    bool* dtx;
+    ACMVADMode* mode;
+    Channel* myChannel;
+    // Reset and Wait
+    if(!_randomTest)
+    {
+        fprintf(stdout, "Reset Encoder Side A \n");
+    }
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        codecCntr = &_codecCntrA;
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            thereIsEncoder = &_thereIsEncoderA;
+        }
+        vad = &_sendVADA;
+        dtx = &_sendDTXA;
+        mode = &_sendVADModeA;
+        myChannel = _channel_A2B;
+    }
+    else
+    {
+        myACM = _acmB;
+        codecCntr = &_codecCntrB;
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            thereIsEncoder = &_thereIsEncoderB;
+        }
+        vad = &_sendVADB;
+        dtx = &_sendDTXB;
+        mode = &_sendVADModeB;
+        myChannel = _channel_B2A;
+    }
+
+    myACM->ResetEncoder();  
+    Wait(100);
+
+    // Register the next codec
+    do
+    {
+        *codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)? 
+            (*codecCntr + 1):0;     
+
+        if(*codecCntr == 0)
+        {
+            //printf("Initialize Sender Side A \n");
+            {
+                WriteLockScoped wl(_apiTestRWLock);
+                *thereIsEncoder = false;
+            }
+            CHECK_ERROR_MT(myACM->InitializeSender());
+            Wait(1000);   
+
+            // After Initialization CN is lost, re-register them
+            if(AudioCodingModule::Codec("CN", myCodec, 8000) >= 0)
+            {
+                CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+            }
+            if(AudioCodingModule::Codec("CN", myCodec, 16000) >= 0)
+            {
+                CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+            }
+            // VAD & DTX are disabled after initialization
+            *vad = false;
+            *dtx = false;
+            _writeToFile = false;
+        }
+
+        AudioCodingModule::Codec(*codecCntr, myCodec);
+    } while(!STR_CASE_CMP(myCodec.plname, "CN")          ||
+        !STR_CASE_CMP(myCodec.plname, "telephone-event") ||
+        !STR_CASE_CMP(myCodec.plname, "RED"));
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n====================================================================\n");
+        fprintf(stdout, "      Registering New Codec %s, %d kHz, %d kbps\n",
+            myCodec.plname, myCodec.plfreq / 1000, myCodec.rate / 1000);
+    }
+    //std::cout<< std::flush;
+
+    // NO DTX for supe-wideband codec at this point
+    if(myCodec.plfreq == 32000)
+    {
+        *dtx = false;
+        CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+
+    }
+
+    CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+    myChannel->ResetStats();
+    {
+        WriteLockScoped wl(_apiTestRWLock);
+        *thereIsEncoder = true;
+    }
+    Wait(500);
+}
+
+ 
+void 
+APITest::LookForDTMF(char side)
+{
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nLooking for DTMF Signal in Side %c\n", side);
+        fprintf(stdout, "----------------------------------------\n");
+    }
+ 
+    if(side == 'A')
+    {
+        _acmB->RegisterIncomingMessagesCallback(NULL);
+        _acmA->RegisterIncomingMessagesCallback(_dtmfCallback);    
+        Wait(1000);
+        _acmA->RegisterIncomingMessagesCallback(NULL);
+    }
+    else
+    {
+        _acmA->RegisterIncomingMessagesCallback(NULL);
+        _acmB->RegisterIncomingMessagesCallback(_dtmfCallback);
+        Wait(1000);
+        _acmB->RegisterIncomingMessagesCallback(NULL);
+    }
+}
+
+} // namespace webrtc
+
diff --git a/trunk/src/modules/audio_coding/main/test/APITest.h b/trunk/src/modules/audio_coding/main/test/APITest.h
new file mode 100644
index 0000000..db0a87c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/APITest.h
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_H
+#define API_TEST_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "event_wrapper.h"
+#include "utility.h"
+
+namespace webrtc {
+
+enum APITESTAction {TEST_CHANGE_CODEC_ONLY = 0, DTX_TEST = 1};
+
+class APITest : public ACMTest
+{
+public:
+    APITest();
+    ~APITest();
+
+    void Perform();
+private:
+    WebRtc_Word16 SetUp();
+    
+    static bool PushAudioThreadA(void* obj);
+    static bool PullAudioThreadA(void* obj);
+    static bool ProcessThreadA(void* obj);
+    static bool APIThreadA(void* obj);
+
+    static bool PushAudioThreadB(void* obj);
+    static bool PullAudioThreadB(void* obj);
+    static bool ProcessThreadB(void* obj);
+    static bool APIThreadB(void* obj);
+
+    void CheckVADStatus(char side);
+
+    // Set Min delay, get delay, playout timestamp
+    void TestDelay(char side);
+
+    // Unregister a codec & register again.
+    void TestRegisteration(char side);
+
+    // Playout Mode, background noise mode.
+    // Receiver Frequency, playout frequency.
+    void TestPlayout(char receiveSide);
+
+    // set/get receiver VAD status & mode.
+    void TestReceiverVAD(char side);
+
+    // 
+    void TestSendVAD(char side);
+
+    void CurrentCodec(char side);
+    
+    void ChangeCodec(char side);
+    
+    void Wait(WebRtc_UWord32 waitLengthMs);
+
+    void LookForDTMF(char side);
+
+    void RunTest(char thread);
+    
+    bool PushAudioRunA();    
+    bool PullAudioRunA();
+    bool ProcessRunA();
+    bool APIRunA();
+  
+    bool PullAudioRunB();
+    bool PushAudioRunB();
+    bool ProcessRunB();
+    bool APIRunB();
+
+
+
+    //--- ACMs
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+    
+    //--- Channels
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+    
+    //--- I/O files
+    // A
+    PCMFile _inFileA;
+    PCMFile _outFileA;
+    // B
+    PCMFile _outFileB;
+    PCMFile _inFileB;
+    
+    //--- I/O params
+    // A
+    WebRtc_Word32 _outFreqHzA;
+    // B
+    WebRtc_Word32 _outFreqHzB;
+    
+    // Should we write to file.
+    // we might skip writing to file if we
+    // run the test for a long time.
+    bool _writeToFile;
+    //--- Events
+    // A
+    EventWrapper* _pullEventA;      // pulling data from ACM
+    EventWrapper* _pushEventA;      // pushing data to ACM
+    EventWrapper* _processEventA;   // process
+    EventWrapper* _apiEventA;       // API calls
+    // B
+    EventWrapper* _pullEventB;      // pulling data from ACM
+    EventWrapper* _pushEventB;      // pushing data to ACM
+    EventWrapper* _processEventB;   // process
+    EventWrapper* _apiEventB;       // API calls
+
+    // keep track of the codec in either side.
+    WebRtc_UWord8 _codecCntrA;
+    WebRtc_UWord8 _codecCntrB;
+
+    // keep track of tests
+    WebRtc_UWord8 _testCntrA;
+    WebRtc_UWord8 _testCntrB;
+
+    // Is set to true if there is no encoder in either side
+    bool _thereIsEncoderA;
+    bool _thereIsEncoderB;
+    bool _thereIsDecoderA;
+    bool _thereIsDecoderB;
+
+    bool             _sendVADA;
+    bool             _sendDTXA;
+    ACMVADMode       _sendVADModeA;
+
+    bool             _sendVADB;
+    bool             _sendDTXB;
+    ACMVADMode       _sendVADModeB;
+
+    WebRtc_Word32    _minDelayA;
+    WebRtc_Word32    _minDelayB;
+    bool             _payloadUsed[32];
+        
+    AudioPlayoutMode    _playoutModeA;
+    AudioPlayoutMode    _playoutModeB;
+
+    ACMBackgroundNoiseMode _bgnModeA;
+    ACMBackgroundNoiseMode _bgnModeB;
+
+
+    int            _receiveVADActivityA[3];
+    int            _receiveVADActivityB[3];
+    bool           _verbose;
+    
+    int            _dotPositionA;
+    int            _dotMoveDirectionA;
+    int            _dotPositionB;
+    int            _dotMoveDirectionB;
+
+    char           _movingDot[41];
+    
+    DTMFDetector*  _dtmfCallback;
+    VADCallback*   _vadCallbackA;
+    VADCallback*   _vadCallbackB;
+    RWLockWrapper&    _apiTestRWLock;
+    bool           _randomTest;
+    int            _testNumA;
+    int            _testNumB;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/Channel.cc b/trunk/src/modules/audio_coding/main/test/Channel.cc
new file mode 100644
index 0000000..363b106
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/Channel.cc
@@ -0,0 +1,483 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <iostream>
+
+#include "audio_coding_module.h"
+#include "Channel.h"
+#include "tick_util.h"
+#include "typedefs.h"
+#include "common_types.h"
+
+namespace webrtc {
+
+WebRtc_Word32 
+Channel::SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation)
+{
+    WebRtcRTPHeader rtpInfo;
+    WebRtc_Word32   status;
+    WebRtc_UWord16  payloadDataSize = payloadSize;
+
+    rtpInfo.header.markerBit = false;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.timestamp = timeStamp;
+    if(frameType == kAudioFrameCN)
+    {
+        rtpInfo.type.Audio.isCNG = true;
+    }
+    else
+    {
+        rtpInfo.type.Audio.isCNG = false;
+    }
+    if(frameType == kFrameEmpty)
+    {
+        // Skip this frame
+        return 0;
+    }
+
+    rtpInfo.type.Audio.channel = 1;
+    // Treat fragmentation separately
+    if(fragmentation != NULL)
+    {
+        if((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) && // silence for too long send only new data
+            (fragmentation->fragmentationVectorSize == 2))
+        {
+            // only 0x80 if we have multiple blocks
+            _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
+            WebRtc_UWord32 REDheader =  (((WebRtc_UWord32)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
+            _payloadData[1] = WebRtc_UWord8((REDheader >> 16) & 0x000000FF);
+            _payloadData[2] = WebRtc_UWord8((REDheader >> 8) & 0x000000FF);
+            _payloadData[3] = WebRtc_UWord8(REDheader & 0x000000FF);
+
+            _payloadData[4] = fragmentation->fragmentationPlType[0];
+            // copy the RED data
+            memcpy(_payloadData + 5,
+                payloadData + fragmentation->fragmentationOffset[1],
+                fragmentation->fragmentationLength[1]);
+            // copy the normal data
+            memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+            payloadDataSize += 5;
+        } else
+        {
+            // single block (newest one)
+            memcpy(_payloadData,
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+            payloadDataSize = WebRtc_UWord16(fragmentation->fragmentationLength[0]);
+            rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
+        }
+    }
+    else
+    {
+        memcpy(_payloadData, payloadData, payloadDataSize);
+        if(_isStereo)
+        {
+            if(_leftChannel)
+            {
+                memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
+                _leftChannel = false;
+                rtpInfo.type.Audio.channel = 1;
+            }
+            else
+            {
+                memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
+                _leftChannel = true;
+                rtpInfo.type.Audio.channel = 2;
+            }
+        }
+    }
+    
+    _channelCritSect->Enter();
+    if(_saveBitStream)
+    {
+        //fwrite(payloadData, sizeof(WebRtc_UWord8), payloadSize, _bitStreamFile);
+    }
+
+    if(!_isStereo)
+    {
+        CalcStatistics(rtpInfo, payloadSize);
+    }
+    _lastInTimestamp = timeStamp;
+    _totalBytes += payloadDataSize;
+    _channelCritSect->Leave();
+
+    if(_useFECTestWithPacketLoss)
+    {
+        _packetLoss += 1;
+        if(_packetLoss == 3)
+        {
+            _packetLoss = 0;
+            return 0;
+        }
+    }
+
+    
+    //status = _receiverACM->IncomingPayload((WebRtc_Word8*)_payloadData, payloadSize, payloadType, timeStamp);
+    status = _receiverACM->IncomingPacket((WebRtc_Word8*)_payloadData, payloadDataSize, rtpInfo);
+
+    //delete [] payloadData;
+
+    
+
+    return status;
+}
+
+void 
+Channel::CalcStatistics(
+    WebRtcRTPHeader& rtpInfo, 
+    WebRtc_UWord16   payloadSize)
+{
+    int n;
+    if((rtpInfo.header.payloadType != _lastPayloadType) &&
+        (_lastPayloadType != -1))
+    {
+        // payload-type is changed.
+        // we have to terminate the calculations on the previous payload type
+        // we ignore the last packet in that payload type just to make things 
+        // easier.
+        for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+        {
+            if(_lastPayloadType == _payloadStats[n].payloadType)
+            {
+                _payloadStats[n].newPacket = true;
+                break;
+            }
+        }
+    }
+    _lastPayloadType = rtpInfo.header.payloadType;
+
+    bool newPayload = true;
+    ACMTestPayloadStats* currentPayloadStr;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        if(rtpInfo.header.payloadType == _payloadStats[n].payloadType)
+        {
+            newPayload = false;
+            currentPayloadStr = &_payloadStats[n];
+            break;
+        }
+    }
+
+    if(!newPayload)
+    {
+        if(!currentPayloadStr->newPacket)
+        {
+            WebRtc_UWord32 lastFrameSizeSample = (WebRtc_UWord32)((WebRtc_UWord32)rtpInfo.header.timestamp -
+                (WebRtc_UWord32)currentPayloadStr->lastTimestamp);
+            assert(lastFrameSizeSample > 0);
+            int k = 0;
+            while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
+                lastFrameSizeSample) && 
+                (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
+            {
+                k++;
+            }
+            ACMTestFrameSizeStats* currentFrameSizeStats = 
+                &(currentPayloadStr->frameSizeStats[k]);
+            currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
+
+            // increment the number of encoded samples.
+            currentFrameSizeStats->totalEncodedSamples +=
+                lastFrameSizeSample;
+            // increment the number of recveived packets
+            currentFrameSizeStats->numPackets++;
+            // increment the total number of bytes (this is based on
+            // the previous payload we don't know the frame-size of
+            // the current payload.
+            currentFrameSizeStats->totalPayloadLenByte += 
+                currentPayloadStr->lastPayloadLenByte;
+            // store the maximum payload-size (this is based on
+            // the previous payload we don't know the frame-size of
+            // the current payload.
+            if(currentFrameSizeStats->maxPayloadLen < 
+                currentPayloadStr->lastPayloadLenByte)
+            {
+                currentFrameSizeStats->maxPayloadLen = 
+                    currentPayloadStr->lastPayloadLenByte;
+            }
+            // store the current values for the next time
+            currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+            currentPayloadStr->lastPayloadLenByte = payloadSize;
+        }
+        else
+        {
+            currentPayloadStr->newPacket          = false;
+            currentPayloadStr->lastPayloadLenByte = payloadSize;
+            currentPayloadStr->lastTimestamp      = rtpInfo.header.timestamp;
+            currentPayloadStr->payloadType        = rtpInfo.header.payloadType;
+        }
+    }
+    else
+    {
+        n = 0;
+        while(_payloadStats[n].payloadType != -1)
+        {
+            n++;
+        }
+        // first packet
+        _payloadStats[n].newPacket          = false;
+        _payloadStats[n].lastPayloadLenByte = payloadSize;
+        _payloadStats[n].lastTimestamp      = rtpInfo.header.timestamp;
+        _payloadStats[n].payloadType        = rtpInfo.header.payloadType;
+    }
+}
+
+Channel::Channel(WebRtc_Word16 chID) :
+_receiverACM(NULL),
+_seqNo(0),
+_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_bitStreamFile(NULL),
+_saveBitStream(false),
+_lastPayloadType(-1),
+_isStereo(false),
+_leftChannel(true),
+_lastInTimestamp(0),
+_packetLoss(0),
+_useFECTestWithPacketLoss(false),
+_chID(chID),
+_beginTime(TickTime::MillisecondTimestamp()),
+_totalBytes(0)
+{
+    int n;
+    int k;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        _payloadStats[n].payloadType = -1;
+        _payloadStats[n].newPacket   = true;
+        for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
+        {
+            _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+            _payloadStats[n].frameSizeStats[k].maxPayloadLen   = 0;
+            _payloadStats[n].frameSizeStats[k].numPackets      = 0;
+            _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+            _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+        }
+    }
+    if(chID >= 0)
+    {
+        _saveBitStream = true;
+        char bitStreamFileName[500];
+        sprintf(bitStreamFileName, "bitStream_%d.dat", chID); 
+        _bitStreamFile = fopen(bitStreamFileName, "wb");
+    }
+    else
+    {
+        _saveBitStream = false;
+    }
+}
+
+Channel::~Channel()
+{
+    delete _channelCritSect;
+}
+
+void 
+Channel::RegisterReceiverACM(AudioCodingModule* acm)
+{
+    _receiverACM = acm;
+    return;
+}
+
+void 
+Channel::ResetStats()
+{
+    int n;
+    int k;
+    _channelCritSect->Enter();
+    _lastPayloadType = -1;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        _payloadStats[n].payloadType = -1;
+        _payloadStats[n].newPacket   = true;
+        for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
+        {
+            _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+            _payloadStats[n].frameSizeStats[k].maxPayloadLen   = 0;
+            _payloadStats[n].frameSizeStats[k].numPackets      = 0;
+            _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+            _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+        }
+    }
+    _beginTime = TickTime::MillisecondTimestamp();
+    _totalBytes = 0;
+    _channelCritSect->Leave();
+}
+
+WebRtc_Word16 
+Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
+{
+    _channelCritSect->Enter();
+    int n;
+    payloadStats.payloadType = -1;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        if(_payloadStats[n].payloadType == codecInst.pltype)
+        {
+            memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
+            break;
+        }
+    }
+    if(payloadStats.payloadType == -1)
+    {
+        _channelCritSect->Leave();
+        return -1;
+    }
+    for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+    {
+        if(payloadStats.frameSizeStats[n].frameSizeSample == 0)
+        {
+            _channelCritSect->Leave();
+            return 0;
+        }
+        payloadStats.frameSizeStats[n].usageLenSec = 
+            (double)payloadStats.frameSizeStats[n].totalEncodedSamples
+            / (double)codecInst.plfreq;
+
+        payloadStats.frameSizeStats[n].rateBitPerSec = 
+            payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 / 
+            payloadStats.frameSizeStats[n].usageLenSec;
+
+    }
+    _channelCritSect->Leave();
+    return 0;
+}
+
+void 
+Channel::Stats(WebRtc_UWord32* numPackets)
+{
+    _channelCritSect->Enter();
+    int k;
+    int n;
+    memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+    for(k = 0; k < MAX_NUM_PAYLOADS; k++)
+    {
+        if(_payloadStats[k].payloadType == -1)
+        {
+            break;
+        }
+        numPackets[k] = 0;
+        for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+        {
+            if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
+            {
+                break;
+            }
+            numPackets[k] += 
+                _payloadStats[k].frameSizeStats[n].numPackets;
+        }
+    }
+    _channelCritSect->Leave();
+}
+
+void 
+Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
+{
+    _channelCritSect->Enter();
+    
+    int k;
+    int n;
+    memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+    for(k = 0; k < MAX_NUM_PAYLOADS; k++)
+    {
+        if(_payloadStats[k].payloadType == -1)
+        {
+            break;
+        }
+        payloadType[k] = (WebRtc_UWord8)_payloadStats[k].payloadType;
+        payloadLenByte[k] = 0;
+        for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+        {
+            if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
+            {
+                break;
+            }
+            payloadLenByte[k] += (WebRtc_UWord16)
+                _payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
+        }
+    }
+
+    _channelCritSect->Leave();
+}
+
+
+void
+Channel::PrintStats(CodecInst& codecInst)
+{
+    ACMTestPayloadStats payloadStats;
+    Stats(codecInst, payloadStats);
+    printf("%s %d kHz\n", 
+        codecInst.plname,
+        codecInst.plfreq / 1000);
+    printf("=====================================================\n");
+    if(payloadStats.payloadType == -1)
+    {
+        printf("No Packets are sent with payload-type %d (%s)\n\n",
+            codecInst.pltype,
+            codecInst.plname);
+        return;
+    }
+    for(int k = 0; k < MAX_NUM_FRAMESIZES; k++)
+    {
+        if(payloadStats.frameSizeStats[k].frameSizeSample == 0)
+        {
+            break;
+        }
+        printf("Frame-size.................... %d samples\n", 
+            payloadStats.frameSizeStats[k].frameSizeSample);
+        printf("Average Rate.................. %.0f bits/sec\n", 
+            payloadStats.frameSizeStats[k].rateBitPerSec);
+        printf("Maximum Payload-Size.......... %d Bytes\n",
+            payloadStats.frameSizeStats[k].maxPayloadLen);
+        printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
+            ((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 * 
+            (double)codecInst.plfreq) / 
+            (double)payloadStats.frameSizeStats[k].frameSizeSample);
+        printf("Number of Packets............. %u\n",
+               (unsigned int)payloadStats.frameSizeStats[k].numPackets);
+        printf("Duration...................... %0.3f sec\n\n", 
+            payloadStats.frameSizeStats[k].usageLenSec);
+
+    }
+
+}
+
+WebRtc_UWord32
+Channel::LastInTimestamp()
+{
+    WebRtc_UWord32 timestamp;
+    _channelCritSect->Enter();
+    timestamp = _lastInTimestamp;
+    _channelCritSect->Leave();
+    return timestamp;
+}
+
+double
+Channel::BitRate()
+{
+    double rate;
+    WebRtc_UWord64 currTime = TickTime::MillisecondTimestamp();
+    _channelCritSect->Enter();
+    rate =   ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
+    _channelCritSect->Leave();
+    return rate;
+}   
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/Channel.h b/trunk/src/modules/audio_coding/main/test/Channel.h
new file mode 100644
index 0000000..375bec7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/Channel.h
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CHANNEL_H
+#define CHANNEL_H
+
+#include <stdio.h>
+
+#include "audio_coding_module.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+
+namespace webrtc {
+
+#define MAX_NUM_PAYLOADS   50
+#define MAX_NUM_FRAMESIZES  6
+
+
+struct ACMTestFrameSizeStats
+{
+    WebRtc_UWord16 frameSizeSample;
+    WebRtc_Word16  maxPayloadLen;
+    WebRtc_UWord32 numPackets;
+    WebRtc_UWord64 totalPayloadLenByte;
+    WebRtc_UWord64 totalEncodedSamples;
+    double         rateBitPerSec;
+    double         usageLenSec;
+           
+};
+
+struct ACMTestPayloadStats
+{
+    bool                  newPacket;
+    WebRtc_Word16         payloadType;
+    WebRtc_Word16         lastPayloadLenByte;
+    WebRtc_UWord32        lastTimestamp;
+    ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
+};
+
+class Channel: public AudioPacketizationCallback
+{
+public:
+
+    Channel(
+        WebRtc_Word16 chID = -1);
+    ~Channel();
+
+    WebRtc_Word32 SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    void RegisterReceiverACM(
+        AudioCodingModule *acm);
+    
+    void ResetStats();
+    
+    WebRtc_Word16 Stats(
+        CodecInst&           codecInst,
+        ACMTestPayloadStats& payloadStats);
+    
+    void Stats(
+        WebRtc_UWord32* numPackets);
+    
+    void Stats(
+        WebRtc_UWord8*  payloadLenByte, 
+        WebRtc_UWord32* payloadType);
+    
+    void PrintStats(
+        CodecInst& codecInst);
+    
+    void SetIsStereo(bool isStereo)
+    {
+        _isStereo = isStereo;
+    }
+
+    WebRtc_UWord32 LastInTimestamp();
+    
+    void SetFECTestWithPacketLoss(bool usePacketLoss)
+    {
+        _useFECTestWithPacketLoss = usePacketLoss;
+    }
+
+    double BitRate();
+
+private:
+    void CalcStatistics(
+        WebRtcRTPHeader& rtpInfo,
+        WebRtc_UWord16   payloadSize);
+
+    AudioCodingModule*      _receiverACM;
+    WebRtc_UWord16          _seqNo;
+    // 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
+    WebRtc_UWord8           _payloadData[60 * 32 * 2 * 2];
+
+    CriticalSectionWrapper* _channelCritSect;
+    FILE*                   _bitStreamFile;
+    bool                    _saveBitStream;
+    WebRtc_Word16           _lastPayloadType;
+    ACMTestPayloadStats     _payloadStats[MAX_NUM_PAYLOADS];
+    bool                    _isStereo;
+    WebRtcRTPHeader         _rtpInfo;
+    bool                    _leftChannel;
+    WebRtc_UWord32          _lastInTimestamp;
+    // FEC Test variables
+    WebRtc_Word16           _packetLoss;
+    bool                    _useFECTestWithPacketLoss;
+    WebRtc_Word16           _chID;
+    WebRtc_UWord64          _beginTime;
+    WebRtc_UWord64          _totalBytes;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.cc b/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.cc
new file mode 100644
index 0000000..74bb84c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.cc
@@ -0,0 +1,407 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "EncodeDecodeTest.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "gtest/gtest.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestPacketization::TestPacketization(RTPStream *rtpStream,
+                                     WebRtc_UWord16 frequency)
+    : _rtpStream(rtpStream),
+      _frequency(frequency),
+      _seqNo(0) {
+}
+
+TestPacketization::~TestPacketization() { }
+
+WebRtc_Word32 TestPacketization::SendData(
+    const FrameType /* frameType */,
+    const WebRtc_UWord8 payloadType,
+    const WebRtc_UWord32 timeStamp,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadSize,
+    const RTPFragmentationHeader* /* fragmentation */) {
+  _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
+                    _frequency);
+  return 1;
+}
+
+Sender::Sender()
+    : _acm(NULL),
+      _pcmFile(),
+      _audioFrame(),
+      _payloadSize(0),
+      _timeStamp(0),
+      _packetization(NULL) {
+}
+
+void Sender::Setup(AudioCodingModule *acm, RTPStream *rtpStream) {
+  acm->InitializeSender();
+  struct CodecInst sendCodec;
+  int noOfCodecs = acm->NumberOfCodecs();
+  int codecNo;
+
+  if (testMode == 1) {
+    // Set the codec, input file, and parameters for the current test.
+    codecNo = codeId;
+    // Use same input file for now.
+    char fileName[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _pcmFile.Open(fileName, 32000, "rb");
+  } else if (testMode == 0) {
+    // Set the codec, input file, and parameters for the current test.
+    codecNo = codeId;
+    acm->Codec(codecNo, sendCodec);
+    // Use same input file for now.
+    char fileName[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _pcmFile.Open(fileName, 32000, "rb");
+  } else {
+    printf("List of supported codec.\n");
+    for (int n = 0; n < noOfCodecs; n++) {
+      acm->Codec(n, sendCodec);
+      printf("%d %s\n", n, sendCodec.plname);
+    }
+    printf("Choose your codec:");
+    ASSERT_GT(scanf("%d", &codecNo), 0);
+    char fileName[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _pcmFile.Open(fileName, 32000, "rb");
+  }
+
+  acm->Codec(codecNo, sendCodec);
+  acm->RegisterSendCodec(sendCodec);
+  _packetization = new TestPacketization(rtpStream, sendCodec.plfreq);
+  if (acm->RegisterTransportCallback(_packetization) < 0) {
+    printf("Registering Transport Callback failed, for run: codecId: %d: --\n",
+           codeId);
+  }
+
+    _acm = acm;
+  }
+
+void Sender::Teardown() {
+  _pcmFile.Close();
+  delete _packetization;
+}
+
+bool Sender::Add10MsData() {
+  if (!_pcmFile.EndOfFile()) {
+    _pcmFile.Read10MsData(_audioFrame);
+    WebRtc_Word32 ok = _acm->Add10MsData(_audioFrame);
+    if (ok != 0) {
+      printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
+      exit(1);
+    }
+    return true;
+  }
+  return false;
+}
+
+bool Sender::Process() {
+  WebRtc_Word32 ok = _acm->Process();
+  if (ok < 0) {
+    printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
+    exit(1);
+  }
+  return true;
+}
+
+void Sender::Run() {
+  while (true) {
+    if (!Add10MsData()) {
+      break;
+    }
+    if (!Process()) { // This could be done in a processing thread
+      break;
+    }
+  }
+}
+
+Receiver::Receiver()
+    : _playoutLengthSmpls(WEBRTC_10MS_PCM_AUDIO),
+      _payloadSizeBytes(MAX_INCOMING_PAYLOAD) {
+}
+
+void Receiver::Setup(AudioCodingModule *acm, RTPStream *rtpStream) {
+  struct CodecInst recvCodec;
+  int noOfCodecs;
+  acm->InitializeReceiver();
+
+  noOfCodecs = acm->NumberOfCodecs();
+  for (int i = 0; i < noOfCodecs; i++) {
+    acm->Codec((WebRtc_UWord8) i, recvCodec);
+    if (acm->RegisterReceiveCodec(recvCodec) != 0) {
+      printf("Unable to register codec: for run: codecId: %d\n", codeId);
+      exit(1);
+    }
+  }
+
+  char filename[128];
+  _rtpStream = rtpStream;
+  int playSampFreq;
+
+  if (testMode == 1) {
+    playSampFreq=recvCodec.plfreq;
+    //output file for current run
+    sprintf(filename,"%s/out%dFile.pcm", webrtc::test::OutputPath().c_str(),
+            codeId);
+    _pcmFile.Open(filename, recvCodec.plfreq, "wb+");
+  } else if (testMode == 0) {
+    playSampFreq=32000;
+    //output file for current run
+    sprintf(filename,  "%s/encodeDecode_out%d.pcm",
+            webrtc::test::OutputPath().c_str(), codeId);
+    _pcmFile.Open(filename, 32000/*recvCodec.plfreq*/, "wb+");
+  } else {
+    printf("\nValid output frequencies:\n");
+    printf("8000\n16000\n32000\n-1,");
+    printf("which means output freq equal to received signal freq");
+    printf("\n\nChoose output sampling frequency: ");
+    ASSERT_GT(scanf("%d", &playSampFreq), 0);
+    sprintf(filename,  "%s/outFile.pcm", webrtc::test::OutputPath().c_str());
+    _pcmFile.Open(filename, 32000, "wb+");
+  }
+
+  _realPayloadSizeBytes = 0;
+  _playoutBuffer = new WebRtc_Word16[WEBRTC_10MS_PCM_AUDIO];
+  _frequency = playSampFreq;
+  _acm = acm;
+  _firstTime = true;
+}
+
+void Receiver::Teardown() {
+  delete [] _playoutBuffer;
+  _pcmFile.Close();
+  if (testMode > 1)
+    Trace::ReturnTrace();
+}
+
+bool Receiver::IncomingPacket() {
+  if (!_rtpStream->EndOfFile()) {
+    if (_firstTime) {
+      _firstTime = false;
+      _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+                                               _payloadSizeBytes, &_nextTime);
+      if (_realPayloadSizeBytes == 0) {
+        if (_rtpStream->EndOfFile()) {
+          _firstTime = true;
+          return true;
+        } else {
+          printf("Error in reading incoming payload.\n");
+          return false;
+        }
+      }
+   }
+
+   WebRtc_Word32 ok = _acm->IncomingPacket(_incomingPayload,
+                                           _realPayloadSizeBytes, _rtpInfo);
+   if (ok != 0) {
+     printf("Error when inserting packet to ACM, for run: codecId: %d\n",
+            codeId);
+     exit(1);
+   }
+   _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+                                            _payloadSizeBytes, &_nextTime);
+    if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
+      _firstTime = true;
+    }
+  }
+  return true;
+}
+
+bool Receiver::PlayoutData() {
+  AudioFrame audioFrame;
+
+  if (_acm->PlayoutData10Ms(_frequency, audioFrame) != 0) {
+    printf("Error when calling PlayoutData10Ms, for run: codecId: %d\n",
+           codeId);
+    exit(1);
+  }
+  if (_playoutLengthSmpls == 0) {
+    return false;
+  }
+  _pcmFile.Write10MsData(audioFrame._payloadData,
+                         audioFrame._payloadDataLengthInSamples);
+  return true;
+}
+
+void Receiver::Run() {
+  WebRtc_UWord8 counter500Ms = 50;
+  WebRtc_UWord32 clock = 0;
+
+  while (counter500Ms > 0) {
+    if (clock == 0 || clock >= _nextTime) {
+      IncomingPacket();
+      if (clock == 0) {
+        clock = _nextTime;
+      }
+    }
+    if ((clock % 10) == 0) {
+      if (!PlayoutData()) {
+        clock++;
+        continue;
+      }
+    }
+    if (_rtpStream->EndOfFile()) {
+      counter500Ms--;
+    }
+    clock++;
+  }
+}
+
+EncodeDecodeTest::EncodeDecodeTest() {
+  _testMode = 2;
+  Trace::CreateTrace();
+  Trace::SetTraceFile("acm_encdec_test.txt");
+}
+
+EncodeDecodeTest::EncodeDecodeTest(int testMode) {
+  //testMode == 0 for autotest
+  //testMode == 1 for testing all codecs/parameters
+  //testMode > 1 for specific user-input test (as it was used before)
+ _testMode = testMode;
+ if(_testMode != 0) {
+   Trace::CreateTrace();
+   Trace::SetTraceFile("acm_encdec_test.txt");
+ }
+}
+
+void EncodeDecodeTest::Perform() {
+  if (_testMode == 0) {
+    printf("Running Encode/Decode Test");
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+                 "---------- EncodeDecodeTest ----------");
+  }
+
+  int numCodecs = 1;
+  int codePars[3]; //freq, pacsize, rate
+  int numPars[52]; //number of codec parameters sets (rate,freq,pacsize)to test,
+                   //for a given codec
+
+  codePars[0] = 0;
+  codePars[1] = 0;
+  codePars[2] = 0;
+
+  if (_testMode == 1) {
+    AudioCodingModule *acmTmp = AudioCodingModule::Create(0);
+    struct CodecInst sendCodecTmp;
+    numCodecs = acmTmp->NumberOfCodecs();
+    printf("List of supported codec.\n");
+    for(int n = 0; n < numCodecs; n++) {
+      acmTmp->Codec(n, sendCodecTmp);
+      if (STR_CASE_CMP(sendCodecTmp.plname, "telephone-event") == 0) {
+        numPars[n] = 0;
+      } else if (STR_CASE_CMP(sendCodecTmp.plname, "cn") == 0) {
+        numPars[n] = 0;
+      } else if (STR_CASE_CMP(sendCodecTmp.plname, "red") == 0) {
+        numPars[n] = 0;
+      } else {
+        numPars[n] = 1;
+        printf("%d %s\n", n, sendCodecTmp.plname);
+      }
+    }
+    AudioCodingModule::Destroy(acmTmp);
+  } else if (_testMode == 0) {
+    AudioCodingModule *acmTmp = AudioCodingModule::Create(0);
+    numCodecs = acmTmp->NumberOfCodecs();
+    AudioCodingModule::Destroy(acmTmp);
+    struct CodecInst dummyCodec;
+
+    //chose range of testing for codecs/parameters
+    for(int i = 0 ; i < numCodecs ; i++) {
+      numPars[i] = 1;
+      acmTmp->Codec(i, dummyCodec);
+      if (STR_CASE_CMP(dummyCodec.plname, "telephone-event") == 0) {
+        numPars[i] = 0;
+      } else if (STR_CASE_CMP(dummyCodec.plname, "cn") == 0) {
+        numPars[i] = 0;
+      } else if (STR_CASE_CMP(dummyCodec.plname, "red") == 0) {
+        numPars[i] = 0;
+      }
+    }
+  } else {
+    numCodecs = 1;
+    numPars[0] = 1;
+  }
+
+  _receiver.testMode = _testMode;
+
+  //loop over all codecs:
+  for (int codeId = 0; codeId < numCodecs; codeId++) {
+    //only encode using real encoders, not telephone-event anc cn
+    for (int loopPars = 1; loopPars <= numPars[codeId]; loopPars++) {
+      if (_testMode == 1) {
+        printf("\n");
+        printf("***FOR RUN: codeId: %d\n", codeId);
+        printf("\n");
+      } else if (_testMode == 0) {
+        printf(".");
+      }
+
+      EncodeToFile(1, codeId, codePars, _testMode);
+
+      AudioCodingModule *acm = AudioCodingModule::Create(10);
+      RTPFile rtpFile;
+      std::string fileName = webrtc::test::OutputPath() + "outFile.rtp";
+      rtpFile.Open(fileName.c_str(), "rb");
+
+      _receiver.codeId = codeId;
+
+      rtpFile.ReadHeader();
+      _receiver.Setup(acm, &rtpFile);
+      _receiver.Run();
+      _receiver.Teardown();
+      rtpFile.Close();
+      AudioCodingModule::Destroy(acm);
+
+      if (_testMode == 1) {
+        printf("***COMPLETED RUN FOR: codecID: %d ***\n", codeId);
+      }
+    }
+  }
+  if (_testMode == 0) {
+    printf("Done!\n");
+  }
+  if (_testMode == 1)
+    Trace::ReturnTrace();
+}
+
+void EncodeDecodeTest::EncodeToFile(int fileType, int codeId, int* codePars,
+                                    int testMode) {
+  AudioCodingModule *acm = AudioCodingModule::Create(0);
+  RTPFile rtpFile;
+  std::string fileName = webrtc::test::OutputPath() + "outFile.rtp";
+  rtpFile.Open(fileName.c_str(), "wb+");
+  rtpFile.WriteHeader();
+
+  //for auto_test and logging
+  _sender.testMode = testMode;
+  _sender.codeId = codeId;
+
+  _sender.Setup(acm, &rtpFile);
+  struct CodecInst sendCodecInst;
+  if (acm->SendCodec(sendCodecInst) >= 0) {
+    _sender.Run();
+  }
+  _sender.Teardown();
+  rtpFile.Close();
+  AudioCodingModule::Destroy(acm);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.h b/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.h
new file mode 100644
index 0000000..a730fea
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/EncodeDecodeTest.h
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
+
+#include <stdio.h>
+
+#include "ACMTest.h"
+#include "audio_coding_module.h"
+#include "RTPFile.h"
+#include "PCMFile.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+#define MAX_INCOMING_PAYLOAD 8096
+
+// TestPacketization callback which writes the encoded payloads to file
+class TestPacketization: public AudioPacketizationCallback {
+ public:
+  TestPacketization(RTPStream *rtpStream, WebRtc_UWord16 frequency);
+  ~TestPacketization();
+  virtual WebRtc_Word32 SendData(const FrameType frameType,
+                                 const WebRtc_UWord8 payloadType,
+                                 const WebRtc_UWord32 timeStamp,
+                                 const WebRtc_UWord8* payloadData,
+                                 const WebRtc_UWord16 payloadSize,
+                                 const RTPFragmentationHeader* fragmentation);
+
+ private:
+  static void MakeRTPheader(WebRtc_UWord8* rtpHeader, WebRtc_UWord8 payloadType,
+                            WebRtc_Word16 seqNo, WebRtc_UWord32 timeStamp,
+                            WebRtc_UWord32 ssrc);
+  RTPStream* _rtpStream;
+  WebRtc_Word32 _frequency;
+  WebRtc_Word16 _seqNo;
+};
+
+class Sender {
+ public:
+  Sender();
+  void Setup(AudioCodingModule *acm, RTPStream *rtpStream);
+  void Teardown();
+  void Run();
+  bool Add10MsData();
+  bool Process();
+
+  //for auto_test and logging
+  WebRtc_UWord8 testMode;
+  WebRtc_UWord8 codeId;
+
+ private:
+  AudioCodingModule* _acm;
+  PCMFile _pcmFile;
+  AudioFrame _audioFrame;
+  WebRtc_UWord16 _payloadSize;
+  WebRtc_UWord32 _timeStamp;
+  TestPacketization* _packetization;
+};
+
+class Receiver {
+ public:
+  Receiver();
+  void Setup(AudioCodingModule *acm, RTPStream *rtpStream);
+  void Teardown();
+  void Run();
+  bool IncomingPacket();
+  bool PlayoutData();
+
+  //for auto_test and logging
+  WebRtc_UWord8 codeId;
+  WebRtc_UWord8 testMode;
+
+ private:
+  AudioCodingModule* _acm;
+  bool _rtpEOF;
+  RTPStream* _rtpStream;
+  PCMFile _pcmFile;
+  WebRtc_Word16* _playoutBuffer;
+  WebRtc_UWord16 _playoutLengthSmpls;
+  WebRtc_Word8 _incomingPayload[MAX_INCOMING_PAYLOAD];
+  WebRtc_UWord16 _payloadSizeBytes;
+  WebRtc_UWord16 _realPayloadSizeBytes;
+  WebRtc_Word32 _frequency;
+  bool _firstTime;
+  WebRtcRTPHeader _rtpInfo;
+  WebRtc_UWord32 _nextTime;
+};
+
+class EncodeDecodeTest: public ACMTest {
+ public:
+  EncodeDecodeTest();
+  EncodeDecodeTest(int testMode);
+  virtual void Perform();
+
+  WebRtc_UWord16 _playoutFreq;
+  WebRtc_UWord8 _testMode;
+
+ private:
+  void EncodeToFile(int fileType, int codeId, int* codePars, int testMode);
+
+ protected:
+  Sender _sender;
+  Receiver _receiver;
+};      
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/PCMFile.cc b/trunk/src/modules/audio_coding/main/test/PCMFile.cc
new file mode 100644
index 0000000..520ddbe
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/PCMFile.cc
@@ -0,0 +1,302 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "PCMFile.h"
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+PCMFile::PCMFile(): 
+_pcmFile(NULL), 
+_nSamples10Ms(160), 
+_frequency(16000), 
+_endOfFile(false), 
+_autoRewind(false), 
+_rewinded(false),
+_timestamp(0),
+_readStereo(false),
+_saveStereo(false)
+{
+    _timestamp = (((WebRtc_UWord32)rand() & 0x0000FFFF) << 16) |
+        ((WebRtc_UWord32)rand() & 0x0000FFFF);
+}
+
+/*
+PCMFile::~PCMFile()
+{
+    if(_pcmFile != NULL)
+    {
+        fclose(_pcmFile);
+        _pcmFile = NULL;
+    }
+}
+*/
+
+WebRtc_Word16
+PCMFile::ChooseFile(
+    char*       fileName, 
+    WebRtc_Word16 maxLen)
+{
+    char tmpName[MAX_FILE_NAME_LENGTH_BYTE];
+    //strcpy(_fileName, "in.pcm");
+    //printf("\n\nPlease enter the input file: ");
+    EXPECT_TRUE(fgets(tmpName, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+    tmpName[MAX_FILE_NAME_LENGTH_BYTE-1] = '\0';
+    WebRtc_Word16 n = 0;
+
+    // removing leading spaces
+    while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+        (tmpName[n] != 0) && 
+        (n < MAX_FILE_NAME_LENGTH_BYTE))
+    {
+        n++;
+    }
+    if(n > 0)
+    {
+        memmove(tmpName, &tmpName[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+    }
+
+    //removing trailing spaces
+    n = (WebRtc_Word16)(strlen(tmpName) - 1);
+    if(n >= 0)
+    {
+        while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+            (n >= 0))
+        {
+            n--;
+        }
+    }
+    if(n >= 0)
+    {
+        tmpName[n + 1] = '\0';
+    }
+
+    WebRtc_Word16 len = (WebRtc_Word16)strlen(tmpName);
+    if(len > maxLen)
+    {
+        return -1;
+    }    
+    if(len > 0)
+    {
+        strncpy(fileName, tmpName, len+1);
+    }
+    return 0;
+}
+
+WebRtc_Word16
+PCMFile::ChooseFile(
+    char*         fileName, 
+    WebRtc_Word16   maxLen, 
+    WebRtc_UWord16* frequencyHz)
+{
+    char tmpName[MAX_FILE_NAME_LENGTH_BYTE];
+    //strcpy(_fileName, "in.pcm");
+    //printf("\n\nPlease enter the input file: ");
+    EXPECT_TRUE(fgets(tmpName, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+    tmpName[MAX_FILE_NAME_LENGTH_BYTE-1] = '\0';
+    WebRtc_Word16 n = 0;
+
+    // removing leading spaces
+    while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+        (tmpName[n] != 0) && 
+        (n < MAX_FILE_NAME_LENGTH_BYTE))
+    {
+        n++;
+    }
+    if(n > 0)
+    {
+        memmove(tmpName, &tmpName[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+    }
+
+    //removing trailing spaces
+    n = (WebRtc_Word16)(strlen(tmpName) - 1);
+    if(n >= 0)
+    {
+        while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+            (n >= 0))
+        {
+            n--;
+        }
+    }
+    if(n >= 0)
+    {
+        tmpName[n + 1] = '\0';
+    }
+
+    WebRtc_Word16 len = (WebRtc_Word16)strlen(tmpName);
+    if(len > maxLen)
+    {
+        return -1;
+    }    
+    if(len > 0)
+    {
+        strncpy(fileName, tmpName, len+1);
+    }
+    printf("Enter the sampling frequency (in Hz) of the above file [%u]: ", *frequencyHz);
+    EXPECT_TRUE(fgets(tmpName, 10, stdin) != NULL);
+    WebRtc_UWord16 tmpFreq = (WebRtc_UWord16)atoi(tmpName);
+    if(tmpFreq > 0)
+    {
+        *frequencyHz = tmpFreq;
+    }
+    return 0;
+}
+
+void 
+PCMFile::Open(
+    const char*        filename,
+    WebRtc_UWord16 frequency, 
+    const char*  mode, 
+    bool         autoRewind)
+{
+    if ((_pcmFile = fopen(filename, mode)) == NULL)
+    {
+        printf("Cannot open file %s.\n", filename);
+        ADD_FAILURE() << "Unable to read file";
+    }
+    _frequency = frequency;
+    _nSamples10Ms = (WebRtc_UWord16)(_frequency / 100);
+    _autoRewind = autoRewind;
+    _endOfFile = false;
+    _rewinded = false;
+}
+
+WebRtc_Word32 
+PCMFile::SamplingFrequency() const
+{
+    return _frequency;
+}
+
+WebRtc_UWord16 
+PCMFile::PayloadLength10Ms() const
+{
+    return _nSamples10Ms;
+}
+
+WebRtc_Word32 
+PCMFile::Read10MsData(
+    AudioFrame& audioFrame)
+{
+    WebRtc_UWord16 noChannels = 1;
+    if (_readStereo)
+    {
+        noChannels = 2;
+    }
+
+    WebRtc_Word32 payloadSize = (WebRtc_Word32)fread(audioFrame._payloadData, sizeof(WebRtc_UWord16), _nSamples10Ms*noChannels, _pcmFile);
+    if (payloadSize < _nSamples10Ms*noChannels) {
+        for (int k = payloadSize; k < _nSamples10Ms*noChannels; k++)
+        {
+            audioFrame._payloadData[k] = 0;
+        }
+        if(_autoRewind)
+        {
+            rewind(_pcmFile);
+            _rewinded = true;
+        }
+        else
+        {
+            _endOfFile = true;
+        }
+    }
+    audioFrame._payloadDataLengthInSamples = _nSamples10Ms;
+    audioFrame._frequencyInHz = _frequency;
+    audioFrame._audioChannel = noChannels;
+    audioFrame._timeStamp = _timestamp;
+    _timestamp += _nSamples10Ms;
+    return _nSamples10Ms;
+}
+
+void 
+PCMFile::Write10MsData(
+    AudioFrame& audioFrame)
+{
+    if(audioFrame._audioChannel == 1)
+    {
+        if(!_saveStereo)
+        {
+            fwrite(audioFrame._payloadData, sizeof(WebRtc_UWord16), 
+                audioFrame._payloadDataLengthInSamples, _pcmFile);
+        }
+        else
+        {
+            WebRtc_Word16* stereoAudio = new WebRtc_Word16[2 * 
+                audioFrame._payloadDataLengthInSamples];
+            int k;
+            for(k = 0; k < audioFrame._payloadDataLengthInSamples; k++)
+            {
+                stereoAudio[k<<1] = audioFrame._payloadData[k];
+                stereoAudio[(k<<1) + 1] = audioFrame._payloadData[k];
+            }
+            fwrite(stereoAudio, sizeof(WebRtc_Word16), 2*audioFrame._payloadDataLengthInSamples,
+                _pcmFile);
+            delete [] stereoAudio;
+        }
+    }
+    else
+    {
+        fwrite(audioFrame._payloadData, sizeof(WebRtc_Word16), 
+            audioFrame._audioChannel * audioFrame._payloadDataLengthInSamples, _pcmFile);
+    }
+}
+
+
+void 
+PCMFile::Write10MsData(
+    WebRtc_Word16* playoutBuffer, 
+    WebRtc_UWord16 playoutLengthSmpls)
+{
+    fwrite(playoutBuffer, sizeof(WebRtc_UWord16), playoutLengthSmpls, _pcmFile);
+}
+
+
+void 
+PCMFile::Close()
+{
+    fclose(_pcmFile);
+    _pcmFile = NULL;
+}
+
+void 
+PCMFile::Rewind()
+{
+    rewind(_pcmFile);
+    _endOfFile = false;
+}
+
+bool 
+PCMFile::Rewinded()
+{
+    return _rewinded;
+}
+
+void
+PCMFile::SaveStereo(
+    bool saveStereo)
+{
+    _saveStereo = saveStereo;
+}
+
+void
+PCMFile::ReadStereo(
+    bool readStereo)
+{
+    _readStereo = readStereo;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/PCMFile.h b/trunk/src/modules/audio_coding/main/test/PCMFile.h
new file mode 100644
index 0000000..a9cb9cf
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/PCMFile.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PCMFILE_H
+#define PCMFILE_H
+
+#include "typedefs.h"
+#include "module_common_types.h"
+#include <cstdio>
+#include <cstdlib>
+
+namespace webrtc {
+
+class PCMFile
+{
+public:
+    PCMFile();
+    ~PCMFile()
+    {
+        if(_pcmFile != NULL)
+        {
+            fclose(_pcmFile);
+        }
+    }
+    void Open(const char *filename, WebRtc_UWord16 frequency, const char *mode,
+              bool autoRewind = false);
+    
+    WebRtc_Word32 Read10MsData(AudioFrame& audioFrame);
+    
+    void Write10MsData(WebRtc_Word16 *playoutBuffer, WebRtc_UWord16 playoutLengthSmpls);
+    void Write10MsData(AudioFrame& audioFrame);
+
+    WebRtc_UWord16 PayloadLength10Ms() const;
+    WebRtc_Word32 SamplingFrequency() const;
+    void Close();
+    bool EndOfFile() const { return _endOfFile; }
+    void Rewind();
+    static WebRtc_Word16 ChooseFile(char* fileName, WebRtc_Word16 maxLen,
+                                    WebRtc_UWord16* frequencyHz);
+    static WebRtc_Word16 ChooseFile(char* fileName, WebRtc_Word16 maxLen);
+    bool Rewinded();
+    void SaveStereo(
+        bool saveStereo = true);
+    void ReadStereo(
+        bool readStereo = true);
+private:
+    FILE*           _pcmFile;
+    WebRtc_UWord16  _nSamples10Ms;
+    WebRtc_Word32   _frequency;
+    bool            _endOfFile;
+    bool            _autoRewind;
+    bool            _rewinded;
+    WebRtc_UWord32  _timestamp;
+    bool            _readStereo;
+    bool            _saveStereo;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/RTPFile.cc b/trunk/src/modules/audio_coding/main/test/RTPFile.cc
new file mode 100644
index 0000000..29d6a1d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/RTPFile.cc
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "RTPFile.h"
+
+#include <stdlib.h>
+
+#ifdef WIN32
+#   include <Winsock2.h>
+#else
+#   include <arpa/inet.h>
+#endif
+
+#include "audio_coding_module.h"
+#include "engine_configurations.h"
+#include "gtest/gtest.h" // TODO (tlegrand): Consider removing usage of gtest.
+#include "rw_lock_wrapper.h"
+
+namespace webrtc {
+
+void RTPStream::ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader)
+{
+    rtpInfo->header.payloadType = rtpHeader[1];
+    rtpInfo->header.sequenceNumber = (static_cast<WebRtc_UWord16>(rtpHeader[2])<<8) | rtpHeader[3];
+    rtpInfo->header.timestamp = (static_cast<WebRtc_UWord32>(rtpHeader[4])<<24) |
+                         (static_cast<WebRtc_UWord32>(rtpHeader[5])<<16) |
+                         (static_cast<WebRtc_UWord32>(rtpHeader[6])<<8) |
+                         rtpHeader[7];
+    rtpInfo->header.ssrc = (static_cast<WebRtc_UWord32>(rtpHeader[8])<<24) |
+                    (static_cast<WebRtc_UWord32>(rtpHeader[9])<<16) |
+                    (static_cast<WebRtc_UWord32>(rtpHeader[10])<<8) |
+                    rtpHeader[11];
+}
+
+void RTPStream::MakeRTPheader(WebRtc_UWord8* rtpHeader, 
+                              WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo,
+                              WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc)
+{
+    rtpHeader[0]=(unsigned char)0x80;
+    rtpHeader[1]=(unsigned char)(payloadType & 0xFF);
+    rtpHeader[2]=(unsigned char)((seqNo>>8)&0xFF);
+    rtpHeader[3]=(unsigned char)((seqNo)&0xFF);
+    rtpHeader[4]=(unsigned char)((timeStamp>>24)&0xFF);
+    rtpHeader[5]=(unsigned char)((timeStamp>>16)&0xFF);
+
+    rtpHeader[6]=(unsigned char)((timeStamp>>8)&0xFF); 
+    rtpHeader[7]=(unsigned char)(timeStamp & 0xFF);
+
+    rtpHeader[8]=(unsigned char)((ssrc>>24)&0xFF);
+    rtpHeader[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+    rtpHeader[10]=(unsigned char)((ssrc>>8)&0xFF);
+    rtpHeader[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+
+RTPPacket::RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                    WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+                                    :
+payloadType(payloadType),
+timeStamp(timeStamp),
+seqNo(seqNo),
+payloadSize(payloadSize),
+frequency(frequency)
+{
+    if (payloadSize > 0)
+    {
+        this->payloadData = new WebRtc_UWord8[payloadSize];
+        memcpy(this->payloadData, payloadData, payloadSize);
+    }
+}
+
+RTPPacket::~RTPPacket()
+{
+    delete [] payloadData;
+}
+
+RTPBuffer::RTPBuffer()
+{
+    _queueRWLock = RWLockWrapper::CreateRWLock();
+}
+
+RTPBuffer::~RTPBuffer()
+{
+    delete _queueRWLock;
+}
+
+void
+RTPBuffer::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                    const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+{
+    RTPPacket *packet = new RTPPacket(payloadType, timeStamp, seqNo, payloadData, payloadSize, frequency);
+    _queueRWLock->AcquireLockExclusive();
+    _rtpQueue.push(packet);
+    _queueRWLock->ReleaseLockExclusive();
+}
+
+WebRtc_UWord16
+RTPBuffer::Read(WebRtcRTPHeader* rtpInfo,
+                WebRtc_Word8* payloadData,
+                WebRtc_UWord16 payloadSize,
+                WebRtc_UWord32* offset)
+{
+    _queueRWLock->AcquireLockShared();
+    RTPPacket *packet = _rtpQueue.front();
+    _rtpQueue.pop();
+    _queueRWLock->ReleaseLockShared();
+    rtpInfo->header.markerBit = 1;
+    rtpInfo->header.payloadType = packet->payloadType;
+    rtpInfo->header.sequenceNumber = packet->seqNo;
+    rtpInfo->header.ssrc = 0;
+    rtpInfo->header.timestamp = packet->timeStamp;
+    if (packet->payloadSize > 0 && payloadSize >= packet->payloadSize)
+    {
+        memcpy(payloadData, packet->payloadData, packet->payloadSize);
+    }
+    else
+    {
+        return 0;
+    }
+    *offset = (packet->timeStamp/(packet->frequency/1000));
+
+    return packet->payloadSize;
+}
+
+bool
+RTPBuffer::EndOfFile() const
+{
+    _queueRWLock->AcquireLockShared();
+    bool eof = _rtpQueue.empty();
+    _queueRWLock->ReleaseLockShared();
+    return eof;
+}
+
+void RTPFile::Open(const char *filename, const char *mode)
+{
+    if ((_rtpFile = fopen(filename, mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", filename);
+        ADD_FAILURE() << "Unable to write file";
+        exit(1);
+    }
+}
+
+void RTPFile::Close()
+{
+    if (_rtpFile != NULL)
+    {
+        fclose(_rtpFile);
+        _rtpFile = NULL;
+    }
+}
+
+
+void RTPFile::WriteHeader()
+{
+    // Write data in a format that NetEQ and RTP Play can parse
+    fprintf(_rtpFile, "#!RTPencode%s\n", "1.0");
+    WebRtc_UWord32 dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
+    fwrite(&dummy_variable, 4, 1, _rtpFile);
+    fwrite(&dummy_variable, 4, 1, _rtpFile);
+    fwrite(&dummy_variable, 4, 1, _rtpFile);
+    fwrite(&dummy_variable, 2, 1, _rtpFile);
+    fwrite(&dummy_variable, 2, 1, _rtpFile);
+    fflush(_rtpFile);
+}
+
+void RTPFile::ReadHeader()
+{
+    WebRtc_UWord32 start_sec, start_usec, source;
+    WebRtc_UWord16 port, padding;
+    char fileHeader[40];
+    EXPECT_TRUE(fgets(fileHeader, 40, _rtpFile) != 0);
+    EXPECT_EQ(1u, fread(&start_sec, 4, 1, _rtpFile));
+    start_sec=ntohl(start_sec);
+    EXPECT_EQ(1u, fread(&start_usec, 4, 1, _rtpFile));
+    start_usec=ntohl(start_usec);
+    EXPECT_EQ(1u, fread(&source, 4, 1, _rtpFile));
+    source=ntohl(source);
+    EXPECT_EQ(1u, fread(&port, 2, 1, _rtpFile));
+    port=ntohs(port);
+    EXPECT_EQ(1u, fread(&padding, 2, 1, _rtpFile));
+    padding=ntohs(padding);
+}
+
+void RTPFile::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                    const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+{
+    /* write RTP packet to file */
+    WebRtc_UWord8 rtpHeader[12];
+    MakeRTPheader(rtpHeader, payloadType, seqNo, timeStamp, 0);
+    WebRtc_UWord16 lengthBytes = htons(12 + payloadSize + 8);
+    WebRtc_UWord16 plen = htons(12 + payloadSize);
+    WebRtc_UWord32 offsetMs;
+
+    offsetMs = (timeStamp/(frequency/1000));
+    offsetMs = htonl(offsetMs);
+    fwrite(&lengthBytes, 2, 1, _rtpFile);
+    fwrite(&plen, 2, 1, _rtpFile);
+    fwrite(&offsetMs, 4, 1, _rtpFile);
+    fwrite(rtpHeader, 12, 1, _rtpFile);
+    fwrite(payloadData, 1, payloadSize, _rtpFile);
+}
+
+WebRtc_UWord16 RTPFile::Read(WebRtcRTPHeader* rtpInfo,
+                   WebRtc_Word8* payloadData, 
+                   WebRtc_UWord16 payloadSize,
+                   WebRtc_UWord32* offset)
+{
+    WebRtc_UWord16 lengthBytes;
+    WebRtc_UWord16 plen;
+    WebRtc_UWord8 rtpHeader[12];
+    size_t read_len = fread(&lengthBytes, 2, 1, _rtpFile);
+    /* Check if we have reached end of file. */
+    if ((read_len == 0) && feof(_rtpFile))
+    {
+        _rtpEOF = true;
+        return 0;
+    }
+    EXPECT_EQ(1u, fread(&plen, 2, 1, _rtpFile));
+    EXPECT_EQ(1u, fread(offset, 4, 1, _rtpFile));
+    lengthBytes = ntohs(lengthBytes);
+    plen = ntohs(plen);
+    *offset = ntohl(*offset);
+    EXPECT_GT(plen, 11);
+
+    EXPECT_EQ(1u, fread(rtpHeader, 12, 1, _rtpFile));
+    ParseRTPHeader(rtpInfo, rtpHeader);
+    rtpInfo->type.Audio.isCNG = false;
+    rtpInfo->type.Audio.channel = 1;
+    EXPECT_EQ(lengthBytes, plen + 8);
+
+    if (plen == 0)
+    {
+        return 0;
+    }
+    if (payloadSize < (lengthBytes - 20))
+    {
+      return -1;
+    }
+    if (lengthBytes < 20)
+    {
+      return -1;
+    }
+    lengthBytes -= 20;
+    EXPECT_EQ(lengthBytes, fread(payloadData, 1, lengthBytes, _rtpFile));
+    return lengthBytes;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/RTPFile.h b/trunk/src/modules/audio_coding/main/test/RTPFile.h
new file mode 100644
index 0000000..e11b160
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/RTPFile.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTPFILE_H
+#define RTPFILE_H
+
+#include "audio_coding_module.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+#include "rw_lock_wrapper.h"
+#include <stdio.h>
+#include <queue>
+
+namespace webrtc {
+
+class RTPStream
+{
+public:
+    virtual ~RTPStream(){}
+
+    virtual void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency) = 0;
+
+    // Returns the packet's payload size. Zero should be treated as an
+    // end-of-stream (in the case that EndOfFile() is true) or an error.
+    virtual WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_Word8* payloadData,
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset) = 0;
+    virtual bool EndOfFile() const = 0;
+
+protected:
+    void MakeRTPheader(WebRtc_UWord8* rtpHeader, 
+                                      WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo, 
+                                      WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc);
+    void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader);
+};
+
+class RTPPacket
+{
+public:
+    RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+                                     WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    ~RTPPacket();
+    WebRtc_UWord8 payloadType;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_Word16 seqNo;
+    WebRtc_UWord8* payloadData;
+    WebRtc_UWord16 payloadSize;
+    WebRtc_UWord32 frequency;
+};
+
+class RTPBuffer : public RTPStream
+{
+public:
+    RTPBuffer();
+    ~RTPBuffer();
+    void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_Word8* payloadData,
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset);
+    virtual bool EndOfFile() const;
+private:
+    RWLockWrapper*             _queueRWLock;
+    std::queue<RTPPacket *>   _rtpQueue;
+};
+
+class RTPFile : public RTPStream
+{
+public:
+    ~RTPFile(){}
+    RTPFile() : _rtpFile(NULL),_rtpEOF(false) {}
+    void Open(const char *outFilename, const char *mode);
+    void Close();
+    void WriteHeader();
+    void ReadHeader();
+    void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_Word8* payloadData, 
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset);
+    bool EndOfFile() const { return _rtpEOF; }
+private:
+    FILE*   _rtpFile;
+    bool    _rtpEOF;
+};
+
+} // namespace webrtc
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/SpatialAudio.cc b/trunk/src/modules/audio_coding/main/test/SpatialAudio.cc
new file mode 100644
index 0000000..e19ca44
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/SpatialAudio.cc
@@ -0,0 +1,246 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include <math.h>
+
+#include "common_types.h"
+#include "SpatialAudio.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define NUM_PANN_COEFFS 10
+
+SpatialAudio::SpatialAudio(int testMode)
+{
+    _testMode = testMode;
+}
+
+SpatialAudio::~SpatialAudio()
+{
+    AudioCodingModule::Destroy(_acmLeft);
+    AudioCodingModule::Destroy(_acmRight);
+    AudioCodingModule::Destroy(_acmReceiver);
+    delete _channel;
+    _inFile.Close();
+    _outFile.Close();
+}
+
+WebRtc_Word16 
+SpatialAudio::Setup()
+{
+    // Create ACMs and the Channel;
+    _acmLeft = AudioCodingModule::Create(1);
+    _acmRight = AudioCodingModule::Create(2);
+    _acmReceiver = AudioCodingModule::Create(3);
+    _channel = new Channel;
+
+    // Register callback for the sender side.
+    CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
+    CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
+    // Register the receiver ACM in channel
+    _channel->RegisterReceiverACM(_acmReceiver);
+
+    char audioFileName[MAX_FILE_NAME_LENGTH_BYTE];
+    WebRtc_UWord16 sampFreqHz = 32000;
+
+    strncpy(audioFileName, "./test/data/audio_coding/testfile32kHz.pcm",
+            MAX_FILE_NAME_LENGTH_BYTE - 1);
+    if(_testMode == 1)
+    {
+        printf("Enter the input file [%s]: ", audioFileName);
+        PCMFile::ChooseFile(audioFileName, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
+    }
+    _inFile.Open(audioFileName, sampFreqHz, "rb", false);
+
+    if(_testMode == 0)
+    {
+        std::string outputFile = webrtc::test::OutputPath() +
+            "out_spatial_autotest.pcm";
+        strncpy(audioFileName, outputFile.c_str(),
+                MAX_FILE_NAME_LENGTH_BYTE - 1);
+    }
+    else if(_testMode == 1)
+    {
+        printf("\n");
+        std::string outputFile = webrtc::test::OutputPath() +
+            "testspatial_out.pcm";
+        strncpy(audioFileName, outputFile.c_str(),
+                MAX_FILE_NAME_LENGTH_BYTE - 1);
+        printf("Enter the output file [%s]: ", audioFileName);
+        PCMFile::ChooseFile(audioFileName, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
+    }
+    else
+    {
+        std::string outputFile = webrtc::test::OutputPath() +
+            "testspatial_out.pcm";
+        strncpy(audioFileName, outputFile.c_str(),
+                MAX_FILE_NAME_LENGTH_BYTE - 1);
+    }
+    _outFile.Open(audioFileName, sampFreqHz, "wb", false);
+    _outFile.SaveStereo(true);
+
+
+    // Register couple of codecs as receive codec    
+    CodecInst codecInst;
+
+    _acmLeft->Codec((WebRtc_UWord8)0, codecInst);    
+    codecInst.channels = 2;
+    CHECK_ERROR(_acmReceiver->RegisterReceiveCodec(codecInst));
+
+    _acmLeft->Codec((WebRtc_UWord8)3, codecInst);    
+    codecInst.channels = 2;
+    CHECK_ERROR(_acmReceiver->RegisterReceiveCodec(codecInst));
+ 
+    _acmLeft->Codec((WebRtc_UWord8)1, codecInst);
+    CHECK_ERROR(_acmReceiver->RegisterReceiveCodec(codecInst));
+    
+    _acmLeft->Codec((WebRtc_UWord8)4, codecInst);
+    CHECK_ERROR(_acmReceiver->RegisterReceiveCodec(codecInst));
+
+    return 0;
+}
+
+void
+SpatialAudio::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running SpatialAudio Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1, "---------- SpatialAudio ----------");
+    }
+
+    Setup();
+
+    CodecInst codecInst;
+    _acmLeft->Codec((WebRtc_UWord8)1, codecInst);
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    EncodeDecode();
+
+    WebRtc_Word16 pannCntr = 0;
+
+    double leftPanning[NUM_PANN_COEFFS] =  
+        {1.00, 0.95, 0.90, 0.85, 0.80, 0.75, 0.70, 0.60, 0.55, 0.50};
+    double rightPanning[NUM_PANN_COEFFS] = 
+        {0.50, 0.55, 0.60, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95, 1.00};
+
+    while((pannCntr + 1) < NUM_PANN_COEFFS)
+    {
+        _acmLeft->Codec((WebRtc_UWord8)0, codecInst);    
+        codecInst.pacsize = 480;
+        CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+        CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr++;
+
+        // Change codec    
+        _acmLeft->Codec((WebRtc_UWord8)3, codecInst);    
+        codecInst.pacsize = 320;
+        CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+        CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr++;
+        if(_testMode == 0)
+        {
+            printf(".");
+        }
+    }
+
+    _acmLeft->Codec((WebRtc_UWord8)4, codecInst);
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    EncodeDecode();
+
+    _acmLeft->Codec((WebRtc_UWord8)0, codecInst);    
+    codecInst.pacsize = 480;
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+    pannCntr = NUM_PANN_COEFFS -1;
+    while(pannCntr >= 0)
+    {
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr--;
+        if(_testMode == 0)
+        {
+            printf(".");
+        }
+    }
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+}
+
+void 
+SpatialAudio::EncodeDecode(
+    const double leftPanning, 
+    const double rightPanning)
+{
+    AudioFrame audioFrame;
+    WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+
+    const double rightToLeftRatio = rightPanning / leftPanning;
+
+    _channel->SetIsStereo(true);
+
+    while(!_inFile.EndOfFile())
+    {
+        _inFile.Read10MsData(audioFrame);
+        for(int n = 0; n < audioFrame._payloadDataLengthInSamples; n++)
+        {
+            audioFrame._payloadData[n] = (WebRtc_Word16)floor(
+                audioFrame._payloadData[n] * leftPanning + 0.5);
+        }
+        CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+        for(int n = 0; n < audioFrame._payloadDataLengthInSamples; n++)
+        {
+            audioFrame._payloadData[n] = (WebRtc_Word16)floor(
+                audioFrame._payloadData[n] * rightToLeftRatio + 0.5);
+        }
+        CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmLeft->Process());
+        CHECK_ERROR(_acmRight->Process());
+
+        CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, audioFrame));
+        _outFile.Write10MsData(audioFrame);
+    }
+    _inFile.Rewind();
+}
+
+void 
+SpatialAudio::EncodeDecode()
+{
+    AudioFrame audioFrame;
+    WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+
+    _channel->SetIsStereo(false);
+
+    while(!_inFile.EndOfFile())
+    {
+        _inFile.Read10MsData(audioFrame);
+        CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmLeft->Process());
+
+        CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, audioFrame));
+        _outFile.Write10MsData(audioFrame);
+    }
+    _inFile.Rewind();
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/SpatialAudio.h b/trunk/src/modules/audio_coding/main/test/SpatialAudio.h
new file mode 100644
index 0000000..6a88327
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/SpatialAudio.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_TEST_SPATIAL_AUDIO_H
+#define ACM_TEST_SPATIAL_AUDIO_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+namespace webrtc {
+
+class SpatialAudio : public ACMTest
+{
+public:
+    SpatialAudio(int testMode);
+    ~SpatialAudio();
+
+    void Perform();
+private:
+    WebRtc_Word16 Setup();
+    void EncodeDecode(double leftPanning, double rightPanning);
+    void EncodeDecode();
+
+    AudioCodingModule* _acmLeft;
+    AudioCodingModule* _acmRight;
+    AudioCodingModule* _acmReceiver;
+    Channel*               _channel;
+    PCMFile                _inFile;
+    PCMFile                _outFile;
+    int                    _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/TestAllCodecs.cc b/trunk/src/modules/audio_coding/main/test/TestAllCodecs.cc
new file mode 100644
index 0000000..9d7f1e3
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestAllCodecs.cc
@@ -0,0 +1,872 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestAllCodecs.h"
+
+#include <cassert>
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+// Class for simulating packet handling
+TestPack::TestPack():
+_receiverACM(NULL),
+_seqNo(0),
+_timeStampDiff(0),
+_lastInTimestamp(0),
+_totalBytes(0),
+_payloadSize(0)
+{
+}
+TestPack::~TestPack()
+{
+}
+
+void 
+TestPack::RegisterReceiverACM(AudioCodingModule* acm)
+{
+    _receiverACM = acm;
+    return;
+}
+WebRtc_Word32 
+TestPack::SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation)
+{
+    WebRtcRTPHeader rtpInfo;
+    WebRtc_Word32   status;
+    WebRtc_UWord16  payloadDataSize = payloadSize;
+
+    rtpInfo.header.markerBit = false;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.timestamp = timeStamp;
+    if(frameType == kAudioFrameCN)
+    {
+        rtpInfo.type.Audio.isCNG = true;
+    }
+    else
+    {
+        rtpInfo.type.Audio.isCNG = false;
+    }
+    if(frameType == kFrameEmpty)
+    {
+        // Skip this frame
+        return 0;
+    }
+
+    rtpInfo.type.Audio.channel = 1;
+    memcpy(_payloadData, payloadData, payloadDataSize);
+    
+    status = _receiverACM->IncomingPacket((WebRtc_Word8*)_payloadData, payloadDataSize, rtpInfo);
+
+    _payloadSize = payloadDataSize;
+    _timeStampDiff = timeStamp - _lastInTimestamp;
+    _lastInTimestamp = timeStamp;
+    _totalBytes += payloadDataSize;
+    return status;
+}
+
+WebRtc_UWord16
+TestPack::GetPayloadSize()
+{
+    return _payloadSize;
+}
+
+
+WebRtc_UWord32
+TestPack::GetTimeStampDiff()
+{
+    return _timeStampDiff;
+}
+
+void 
+TestPack::ResetPayloadSize()
+{
+    _payloadSize = 0;
+}
+
+TestAllCodecs::TestAllCodecs(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testCntr(0),
+_packSizeSamp(0),
+_packSizeBytes(0),
+_counter(0)
+{
+    // testMode = 0 for silent test (auto test)
+    _testMode = testMode;
+}
+
+TestAllCodecs::~TestAllCodecs()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestAllCodecs::Perform()
+{
+
+    char file[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _inFileA.Open(file, 32000, "rb");
+
+    if(_testMode == 0)
+    {
+        printf("Running All Codecs Test");
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                     "---------- TestAllCodecs ----------");
+    }
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+ 
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+
+    // Create and connect the channel
+    _channelA2B = new TestPack;    
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    // All codecs are tested for all allowed sampling frequencies, rates and packet sizes
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecAMR[] = "AMR";
+    RegisterSendCodec('A', codecAMR, 8000, 4750, 160, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 4750, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 4750, 480, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5150, 160, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5150, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5150, 480, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5900, 160, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5900, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 5900, 480, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 6700, 160, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 6700, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 6700, 480, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7400, 160, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7400, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7400, 480, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7950, 160, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7950, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 7950, 480, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 10200, 160, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 10200, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 10200, 480, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 12200, 160, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 12200, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMR, 8000, 12200, 480, 3);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    char codecAMRWB[] = "AMR-WB";
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecAMRWB, 16000, 7000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 7000, 640, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 7000, 960, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 9000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 9000, 640, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 9000, 960, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 12000, 320, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 12000, 640, 6);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 12000, 960, 8);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 14000, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 14000, 640, 4);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 14000, 960, 5);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 16000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 16000, 640, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 16000, 960, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 18000, 320, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 18000, 640, 4);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 18000, 960, 5);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 20000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 20000, 640, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 20000, 960, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 23000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 23000, 640, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 23000, 960, 3);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 24000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 24000, 640, 2);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecAMRWB, 16000, 24000, 960, 2);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG722[] = "G722";
+    RegisterSendCodec('A', codecG722, 16000, 64000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 480, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 640, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 800, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 960, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG7221_1[] = "G7221";
+    RegisterSendCodec('A', codecG7221_1, 16000, 32000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7221_1, 16000, 24000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7221_1, 16000, 16000, 320, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG7221_2[] = "G7221";
+    RegisterSendCodec('A', codecG7221_2, 32000, 48000, 640, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7221_2, 32000, 32000, 640, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7221_2, 32000, 24000, 640, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_G729
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG729[] = "G729";
+    RegisterSendCodec('A', codecG729, 8000, 8000, 80, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG729, 8000, 8000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG729, 8000, 8000, 240, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG729, 8000, 8000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG729, 8000, 8000, 400, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG729, 8000, 8000, 480, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG7291[] = "G7291";
+    RegisterSendCodec('A', codecG7291, 16000, 8000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 8000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 8000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 12000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 12000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 12000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 14000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 14000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 14000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 16000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 16000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 16000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 18000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 18000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 18000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 20000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 20000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 20000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 22000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 22000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 22000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 24000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 24000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 24000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 26000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 26000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 26000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 28000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 28000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 28000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 30000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 30000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 30000, 960, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 32000, 320, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 32000, 640, 1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecG7291, 16000, 32000, 960, 1);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecGSM[] = "GSM";
+    RegisterSendCodec('A', codecGSM, 8000, 13200, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecGSM, 8000, 13200, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecGSM, 8000, 13200, 480, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecILBC[] = "ILBC";
+    RegisterSendCodec('A', codecILBC, 8000, 13300, 240, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecILBC, 8000, 13300, 480, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecILBC, 8000, 15200, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecILBC, 8000, 15200, 320, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecISAC[] = "ISAC";
+    RegisterSendCodec('A', codecISAC, 16000, -1, 480, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 16000, -1, 960, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 16000, 15000, 480, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 16000, 32000, 960, -1);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecISAC, 32000, -1, 960, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 32000, 56000, 960, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 32000, 37000, 960, -1);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecISAC, 32000, 32000, 960, -1);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++; 
+    OpenOutFile(_testCntr);
+    char codecL16[] = "L16";
+    RegisterSendCodec('A', codecL16, 8000, 128000, 80, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 240, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 320, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;  
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 480, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 640, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++; 
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecL16, 32000, 512000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecL16, 32000, 512000, 640, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecPCMA[] = "PCMA";
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 80, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 240, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 400, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 480, 0);
+    Run(_channelA2B);
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    char codecPCMU[] = "PCMU";
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 80, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 240, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 400, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 480, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#ifdef WEBRTC_CODEC_SPEEX
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;   
+    OpenOutFile(_testCntr);
+    char codecSPEEX[] = "SPEEX";
+    RegisterSendCodec('A', codecSPEEX, 8000, 2400, 160, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecSPEEX, 8000, 8000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecSPEEX, 8000, 18200, 480, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;  
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecSPEEX, 16000, 4000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecSPEEX, 16000, 12800, 640, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecSPEEX, 16000, 34200, 960, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecCELT_32[] = "CELT";
+    RegisterSendCodec('A', codecCELT_32, 32000, 48000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecCELT_32, 32000, 64000, 320, 0);
+    Run(_channelA2B);
+    RegisterSendCodec('A', codecCELT_32, 32000, 128000, 320, 0);
+    Run(_channelA2B);
+    _outFileB.Close();
+#endif
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+    } else {
+        printf("Done!\n");
+    }
+
+    /* Print out all codecs that were not tested in the run */
+    if(_testMode != 0) {
+        printf("The following codecs was not included in the test:\n");
+#ifndef WEBRTC_CODEC_GSMAMR
+        printf("   GSMAMR\n");
+#endif
+#ifndef WEBRTC_CODEC_GSMAMRWB
+        printf("   GSMAMR-wb\n");
+#endif
+#ifndef WEBRTC_CODEC_G722
+        printf("   G.722\n");
+#endif
+#ifndef WEBRTC_CODEC_G722_1
+        printf("   G.722.1\n");
+#endif
+#ifndef WEBRTC_CODEC_G722_1C
+        printf("   G.722.1C\n");
+#endif
+#ifndef WEBRTC_CODEC_G729
+        printf("   G.729\n");
+#endif
+#ifndef WEBRTC_CODEC_G729_1
+        printf("   G.729.1\n");
+#endif
+#ifndef WEBRTC_CODEC_GSMFR
+        printf("   GSMFR\n");
+#endif
+#ifndef WEBRTC_CODEC_ILBC
+        printf("   iLBC\n");
+#endif
+#ifndef WEBRTC_CODEC_ISAC
+        printf("   ISAC float\n");
+#endif
+#ifndef WEBRTC_CODEC_ISACFX
+        printf("   ISAC fix\n");
+#endif
+#ifndef WEBRTC_CODEC_PCM16
+        printf("   PCM16\n");
+#endif
+#ifndef WEBRTC_CODEC_SPEEX
+        printf("   Speex\n");
+#endif
+
+        printf("\nTo complete the test, listen to the %d number of output files.\n", _testCntr);
+    }
+}
+
+// Register Codec to use in the test
+//
+// Input:   side            - which ACM to use, 'A' or 'B'
+//          codecName       - name to use when register the codec
+//          samplingFreqHz  - sampling frequency in Herz
+//          rate            - bitrate in bytes
+//          packSize        - packet size in samples
+//          extraByte       - if extra bytes needed compared to the bitrate 
+//                            used when registering, can be an internal header
+//                            set to -1 if the codec is a variable rate codec
+WebRtc_Word16 TestAllCodecs::RegisterSendCodec(char side, 
+                                             char* codecName, 
+                                             WebRtc_Word32 samplingFreqHz,
+                                             int rate,
+                                             int packSize,
+                                             int extraByte)
+{
+    if(_testMode != 0) {
+        // Print out codec and settings
+        printf("codec: %s Freq: %d Rate: %d PackSize: %d", codecName, samplingFreqHz, rate, packSize);
+    }
+
+    // Store packetsize in samples, used to validate the recieved packet
+    _packSizeSamp = packSize;
+
+    // Store the expected packet size in bytes, used to validate the recieved packet
+    // If variable rate codec (extraByte == -1), set to -1 (65535)
+    if (extraByte != -1) 
+    {
+        // Add 0.875 to always round up to a whole byte
+        _packSizeBytes = (WebRtc_UWord16)((float)(packSize*rate)/(float)(samplingFreqHz*8)+0.875)+extraByte;
+    } 
+    else 
+    {
+        // Packets will have a variable size
+        _packSizeBytes = -1;
+    }
+
+    // Set pointer to the ACM where to register the codec
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    CodecInst myCodecParam;
+
+    // Get all codec paramters before registering
+    CHECK_ERROR(AudioCodingModule::Codec(codecName, myCodecParam, samplingFreqHz));
+    myCodecParam.rate = rate;
+    myCodecParam.pacsize = packSize;
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+
+    // initialization was succesful
+    return 0;
+}
+
+void TestAllCodecs::Run(TestPack* channel)
+{
+    AudioFrame audioFrame;
+
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+    WebRtc_UWord16 recSize;
+    WebRtc_UWord32 timeStampDiff;
+    channel->ResetPayloadSize();
+    int errorCount = 0;
+
+    // Only run 1 second for each test case
+    while((_counter<1000)&& (!_inFileA.EndOfFile()))
+    {
+        // Add 10 msec to ACM
+         _inFileA.Read10MsData(audioFrame);
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+        // Run sender side of ACM
+        CHECK_ERROR(_acmA->Process());
+
+        // Verify that the received packet size matches the settings
+        recSize = channel->GetPayloadSize();
+        if (recSize) {
+            if ((recSize != _packSizeBytes) && (_packSizeBytes < 65535)) {
+                errorCount++;
+            }
+
+        // Verify that the timestamp is updated with expected length
+        timeStampDiff = channel->GetTimeStampDiff();
+        if ((_counter > 10) && (timeStampDiff != _packSizeSamp))
+            errorCount++;
+        }
+
+
+        // Run received side of ACM
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+
+        // Write output speech to file
+        _outFileB.Write10MsData(audioFrame._payloadData, audioFrame._payloadDataLengthInSamples);
+    }
+
+    if (errorCount) 
+    {
+        printf(" - test FAILED\n");
+    }
+    else if(_testMode != 0)
+    {
+        printf(" - test PASSED\n");
+    }
+
+    // Reset _counter
+    if (_counter == 1000) {
+        _counter = 0;
+    }
+    if (_inFileA.EndOfFile()) {
+        _inFileA.Rewind();
+    }
+}
+
+void TestAllCodecs::OpenOutFile(WebRtc_Word16 testNumber)
+{
+    char fileName[500];
+    sprintf(fileName, "%s/testallcodecs_out_%02d.pcm",
+            webrtc::test::OutputPath().c_str(), testNumber);
+    _outFileB.Open(fileName, 32000, "wb");
+}
+
+void TestAllCodecs::DisplaySendReceiveCodec()
+{
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    printf("%s -> ", myCodecParam.plname);
+    _acmB->ReceiveCodec(myCodecParam);
+    printf("%s\n", myCodecParam.plname);
+}
+
+} // namespace webrtc
+
diff --git a/trunk/src/modules/audio_coding/main/test/TestAllCodecs.h b/trunk/src/modules/audio_coding/main/test/TestAllCodecs.h
new file mode 100644
index 0000000..e0d621f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestAllCodecs.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_ALL_CODECS_H
+#define TEST_ALL_CODECS_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+class TestPack : public AudioPacketizationCallback
+{
+public:
+    TestPack();
+    ~TestPack();
+    
+    void RegisterReceiverACM(AudioCodingModule* acm);
+    
+    virtual WebRtc_Word32 SendData(const FrameType frameType,
+        const WebRtc_UWord8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData, 
+        const WebRtc_UWord16 payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    WebRtc_UWord16 GetPayloadSize();
+    WebRtc_UWord32 GetTimeStampDiff();
+    void ResetPayloadSize();
+
+private:
+    AudioCodingModule* _receiverACM;
+    WebRtc_Word16            _seqNo;
+    WebRtc_UWord8            _payloadData[60 * 32 * 2 * 2]; 
+    WebRtc_UWord32           _timeStampDiff;
+    WebRtc_UWord32           _lastInTimestamp;
+    WebRtc_UWord64           _totalBytes;
+    WebRtc_UWord16           _payloadSize;
+};
+
+class TestAllCodecs : public ACMTest
+{
+public:
+    TestAllCodecs(int testMode);
+    ~TestAllCodecs();
+
+    void Perform();
+private:
+    // The default value of '-1' indicates that the registration is based only on codec name
+    // and a sampling frequncy matching is not required. This is useful for codecs which support
+    // several sampling frequency.
+    WebRtc_Word16 RegisterSendCodec(char side, 
+        char* codecName, 
+        WebRtc_Word32 sampFreqHz,
+        int rate,
+        int packSize,
+        int extraByte);
+
+    void Run(TestPack* channel);
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void DisplaySendReceiveCodec();
+
+    WebRtc_Word32 SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    int                     _testMode;
+
+    AudioCodingModule*      _acmA;
+    AudioCodingModule*      _acmB;
+
+    TestPack*               _channelA2B;
+
+    PCMFile                _inFileA;
+    PCMFile                _outFileB;
+    WebRtc_Word16          _testCntr;
+    WebRtc_UWord16         _packSizeSamp;
+    WebRtc_UWord16         _packSizeBytes;
+    int                    _counter;
+};
+
+#endif // TEST_ALL_CODECS_H
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/TestFEC.cc b/trunk/src/modules/audio_coding/main/test/TestFEC.cc
new file mode 100644
index 0000000..81ca91a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestFEC.cc
@@ -0,0 +1,627 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestFEC.h"
+
+#include <cassert>
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestFEC::TestFEC(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testCntr(0)
+{
+    _testMode = testMode;
+}
+
+TestFEC::~TestFEC()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestFEC::Perform()
+{
+
+    if(_testMode == 0)
+    {
+        printf("Running FEC Test");
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                     "---------- TestFEC ----------");
+    }
+    char fileName[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _inFileA.Open(fileName, 32000, "rb");
+
+
+    bool fecEnabled;
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+    if(_testMode != 0)
+    {
+        printf("Registering codecs at receiver... \n");
+    }
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        if(_testMode != 0)
+        {
+            printf("%s\n", myCodecParam.plname);
+        }
+        _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+
+    // Create and connect the channel
+    _channelA2B = new Channel;    
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+#ifndef WEBRTC_CODEC_G722
+    printf("G722 needs to be activated to run this test\n");
+    exit(-1);
+#endif
+    char nameG722[] = "G722";
+    RegisterSendCodec('A', nameG722, 16000);
+    char nameCN[] = "CN";
+    RegisterSendCodec('A', nameCN, 16000);
+    char nameRED[] = "RED";
+    RegisterSendCodec('A', nameRED);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    char nameISAC[] = "iSAC";
+    RegisterSendCodec('A',nameISAC, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(false, false, VADNormal);
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+
+
+
+
+    _channelA2B->SetFECTestWithPacketLoss(true);
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A',nameG722);
+    RegisterSendCodec('A', nameCN, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(false, false, VADNormal);
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+}
+
+WebRtc_Word32 TestFEC::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode)
+{
+    if(_testMode != 0)
+    {
+        printf("DTX %s; VAD %s; VAD-Mode %d\n", 
+            enableDTX? "ON":"OFF", 
+            enableVAD? "ON":"OFF", 
+            (WebRtc_Word16)vadMode);
+    }
+    return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
+}
+
+WebRtc_Word16 TestFEC::RegisterSendCodec(char side, char* codecName, WebRtc_Word32 samplingFreqHz)
+{
+    if(_testMode != 0)
+    {
+        if(samplingFreqHz > 0)
+        {
+            printf("Registering %s-%d for side %c\n", codecName, samplingFreqHz, side);
+        }
+        else
+        {
+            printf("Registering %s for side %c\n", codecName, side);
+        }
+    }
+    std::cout << std::flush;
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    CodecInst myCodecParam;
+
+    CHECK_ERROR(AudioCodingModule::Codec(codecName, myCodecParam, samplingFreqHz));
+
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+
+    // initialization was succesful
+    return 0;
+}
+
+void TestFEC::Run()
+{
+    AudioFrame audioFrame;
+
+    WebRtc_UWord16 msecPassed = 0;
+    WebRtc_UWord32 secPassed  = 0;
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    while(!_inFileA.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+        CHECK_ERROR(_acmA->Process());
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+        _outFileB.Write10MsData(audioFrame._payloadData, audioFrame._payloadDataLengthInSamples);
+        msecPassed += 10;
+        if(msecPassed >= 1000)
+        {
+            msecPassed = 0;
+            secPassed++;
+        }
+        if(((secPassed%5) == 4) && (msecPassed == 0) && (_testCntr > 14))
+        {
+            printf("%3u:%3u  ", secPassed, msecPassed);
+            _acmA->SetFECStatus(false);
+            printf("FEC currently %s\n",(_acmA->FECStatus()?"ON":"OFF"));
+        }
+        if(((secPassed%5) == 4) && (msecPassed >= 990) && (_testCntr > 14))
+        {
+            printf("%3u:%3u  ", secPassed, msecPassed);
+            _acmA->SetFECStatus(true);
+            printf("FEC currently %s\n",(_acmA->FECStatus()?"ON":"OFF"));
+        }
+    }
+    _inFileA.Rewind();
+}
+
+void TestFEC::OpenOutFile(WebRtc_Word16 testNumber)
+{
+    char fileName[500];
+
+    if(_testMode == 0)
+    {
+        sprintf(fileName, "%s/TestFEC_autoFile_%02d.pcm",
+                webrtc::test::OutputPath().c_str(), testNumber);
+    }
+    else
+    {
+        sprintf(fileName, "%s/TestFEC_outFile_%02d.pcm",
+                webrtc::test::OutputPath().c_str(), testNumber);
+    }
+    _outFileB.Open(fileName, 32000, "wb");
+}
+
+void TestFEC::DisplaySendReceiveCodec()
+{
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    printf("%s -> ", myCodecParam.plname);
+    _acmB->ReceiveCodec(myCodecParam);
+    printf("%s\n", myCodecParam.plname);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/TestFEC.h b/trunk/src/modules/audio_coding/main/test/TestFEC.h
new file mode 100644
index 0000000..00e951f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestFEC.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FEC_H
+#define TEST_FEC_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+class TestFEC : public ACMTest
+{
+public:
+    TestFEC(int testMode);
+    ~TestFEC();
+
+    void Perform();
+private:
+    // The default value of '-1' indicates that the registration is based only on codec name
+    // and a sampling frequncy matching is not required. This is useful for codecs which support
+    // several sampling frequency.
+    WebRtc_Word16 RegisterSendCodec(char side, char* codecName, WebRtc_Word32 sampFreqHz = -1);
+    void Run();
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void DisplaySendReceiveCodec();
+    WebRtc_Word32 SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel*               _channelA2B;
+
+    PCMFile                _inFileA;
+    PCMFile                _outFileB;
+    WebRtc_Word16            _testCntr;
+    int                    _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/TestStereo.cc b/trunk/src/modules/audio_coding/main/test/TestStereo.cc
new file mode 100644
index 0000000..2105d36
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestStereo.cc
@@ -0,0 +1,804 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestStereo.h"
+
+#include <cassert>
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+// Class for simulating packet handling
+TestPackStereo::TestPackStereo():
+_receiverACM(NULL),
+_seqNo(0),
+_timeStampDiff(0),
+_lastInTimestamp(0),
+_totalBytes(0),
+_payloadSize(0),
+_noChannels(1),
+_codecType(0)
+{
+}
+TestPackStereo::~TestPackStereo()
+{
+}
+
+void 
+TestPackStereo::RegisterReceiverACM(AudioCodingModule* acm)
+{
+    _receiverACM = acm;
+    return;
+}
+
+
+WebRtc_Word32 
+TestPackStereo::SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation)
+{
+    WebRtcRTPHeader rtpInfo;
+    WebRtc_Word32   status;
+    WebRtc_UWord16  payloadDataSize = payloadSize;
+    WebRtc_UWord8 payloadDataMaster[60 * 32 * 2 * 2]; 
+    WebRtc_UWord8 payloadDataSlave[60 * 32 * 2 * 2];
+
+    rtpInfo.header.markerBit = false;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.timestamp = timeStamp;
+    if(frameType == kFrameEmpty)
+    {
+        // Skip this frame
+        return 0;
+    }    
+    if(frameType != kAudioFrameCN)
+    {
+        rtpInfo.type.Audio.isCNG = false;
+
+        // For stereo we need to call ACM with two incoming packets, one for each channel.
+        // Different packet-splitting depending on codec.
+        if (_codecType == 0) {
+            // one byte per sample
+            for (int i=0, j=0; i<payloadDataSize; i+=2, j++)
+            {
+                payloadDataMaster[j] = payloadData[i];
+                payloadDataSlave[j] = payloadData[i+1];
+            }
+        } else if (_codecType == 1) {
+            // two bytes per sample
+            for (int i=0, j=0; i<payloadDataSize; i+=4, j+=2)
+            {
+                payloadDataMaster[j] = payloadData[i];
+                payloadDataMaster[j+1] = payloadData[i+1];
+                payloadDataSlave[j] = payloadData[i+2];
+                payloadDataSlave[j+1] = payloadData[i+3];
+            }
+        } else if (_codecType == 2) {
+            // frameBased
+            memcpy(payloadDataMaster, &payloadData[0], payloadDataSize/2);
+            memcpy(payloadDataSlave, &payloadData[payloadDataSize/2], payloadDataSize/2);
+        } else if (_codecType == 3) {
+            // four bits per sample
+            for (int i=0, j=0; i<payloadDataSize; i+=2, j++)
+            {
+                payloadDataMaster[j] = (payloadData[i] & 0xF0) + (payloadData[i+1] >> 4);
+                payloadDataSlave[j] = ((payloadData[i] & 0x0F) << 4) + (payloadData[i+1] & 0x0F);
+            }
+        } else if (_codecType == 4) {
+          // True stereo, call both master and slave with whole stream.
+          memcpy(payloadDataMaster, payloadData, payloadSize);
+          memcpy(payloadDataSlave, payloadData, payloadSize);
+          payloadDataSize = payloadSize*2;
+        }
+    }
+    else
+    {
+        // If CNG packet, send the same packet to both master and slave.
+        rtpInfo.type.Audio.isCNG = true;
+        memcpy(payloadDataMaster, payloadData, payloadSize);
+        memcpy(payloadDataSlave, payloadData, payloadSize);
+        payloadDataSize = payloadSize*2;
+    }
+
+    if (_codecType != 5) {
+      // Call ACM with two packets, one for each channel
+      rtpInfo.type.Audio.channel = 1;
+      status = _receiverACM->IncomingPacket((WebRtc_Word8*)payloadDataMaster, payloadDataSize/2, rtpInfo);
+      rtpInfo.type.Audio.channel = 2;
+      status = _receiverACM->IncomingPacket((WebRtc_Word8*)payloadDataSlave, payloadDataSize/2, rtpInfo);
+    } else {
+      // Mono case, call ACM with one packet.
+      rtpInfo.type.Audio.channel = 1;
+      status = _receiverACM->IncomingPacket((WebRtc_Word8*)payloadData, payloadDataSize, rtpInfo);
+    }
+
+    if (frameType != kAudioFrameCN) {
+        _payloadSize = payloadDataSize;
+    } else {
+        _payloadSize = -1;
+    }
+    _timeStampDiff = timeStamp - _lastInTimestamp;
+    _lastInTimestamp = timeStamp;
+    _totalBytes += payloadDataSize;
+    return status;
+}
+
+WebRtc_UWord16
+TestPackStereo::GetPayloadSize()
+{
+    return _payloadSize;
+}
+
+
+WebRtc_UWord32
+TestPackStereo::GetTimeStampDiff()
+{
+    return _timeStampDiff;
+}
+
+void 
+TestPackStereo::ResetPayloadSize()
+{
+    _payloadSize = 0;
+}
+
+void 
+TestPackStereo::SetCodecType(int codecType)
+{
+    _codecType = codecType;
+}
+
+TestStereo::TestStereo(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testCntr(0),
+_packSizeSamp(0),
+_packSizeBytes(0),
+_counter(0)
+{
+    // testMode = 0 for silent test (auto test)
+    _testMode = testMode;
+}
+
+TestStereo::~TestStereo()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestStereo::Perform()
+{
+     char file_name_stereo[500];
+     char file_name_mono[500];
+     WebRtc_UWord16 frequencyHz;
+     int audio_channels;
+     int codec_channels;
+
+     if(_testMode == 0)
+      {
+          printf("Running Stereo Test");
+          WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                       "---------- TestStereo ----------");
+      }
+
+     strcpy(file_name_stereo, "./test/data/audio_coding/teststereo32kHz.pcm");
+     strcpy(file_name_mono, "./test/data/audio_coding/testfile32kHz.pcm");
+     frequencyHz = 32000;
+
+    _in_file_stereo.Open(file_name_stereo, frequencyHz, "rb");
+    _in_file_stereo.ReadStereo(true);
+    _in_file_mono.Open(file_name_mono, frequencyHz, "rb");
+    _in_file_mono.ReadStereo(false);
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+ 
+    // Register receiving codecs as stereo.
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        if(!strcmp(myCodecParam.plname, "L16") || 
+            !strcmp(myCodecParam.plname, "PCMA")|| 
+            !strcmp(myCodecParam.plname, "PCMU")|| 
+            !strcmp(myCodecParam.plname, "G722")||
+            !strcmp(myCodecParam.plname, "CELT"))
+        {
+            myCodecParam.channels=2;
+            _acmB->RegisterReceiveCodec(myCodecParam);
+        }
+    }
+
+    // Create and connect the channel.
+    _channelA2B = new TestPackStereo;    
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    //
+    // Test Stereo-To-Stereo for all codecs.
+    //
+    audio_channels = 2;
+    codec_channels = 2;
+
+    // All codecs are tested for all allowed sampling frequencies, rates and packet sizes
+#ifdef WEBRTC_CODEC_G722
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _channelA2B->SetCodecType(3);
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecG722[] = "G722";
+    RegisterSendCodec('A', codecG722, 16000, 64000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 480, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 640, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 800, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 960, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecG722, 16000, 64000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _channelA2B->SetCodecType(1);
+    _testCntr++; 
+    OpenOutFile(_testCntr);
+    char codecL16[] = "L16";
+    RegisterSendCodec('A', codecL16, 8000, 128000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 240, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecL16, 8000, 128000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;  
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 480, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 640, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecL16, 16000, 256000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++; 
+    OpenOutFile(_testCntr);
+    RegisterSendCodec('A', codecL16, 32000, 512000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecL16, 32000, 512000, 640, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecL16, 32000, 512000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+#endif
+#define PCMA_AND_PCMU
+#ifdef PCMA_AND_PCMU
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _channelA2B->SetCodecType(0);
+    audio_channels = 2;
+    codec_channels = 2;
+    _testCntr++; 
+    OpenOutFile(_testCntr);
+    char codecPCMA[] = "PCMA";
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 240, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 400, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 480, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecPCMA, 8000, 64000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecPCMU[] = "PCMU";
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 160, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 240, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 400, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 480, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecPCMU, 8000, 64000, 80, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    if(_testMode != 0) {
+        printf("=======================================================================\n");
+        printf("Test number: %d\n",_testCntr + 1);
+        printf("Test type: Stereo-to-stereo\n");
+    } else {
+        printf(".");
+    }
+    _channelA2B->SetCodecType(4);
+    audio_channels = 2;
+    codec_channels = 2;
+    _testCntr++;
+    OpenOutFile(_testCntr);
+    char codecCELT[] = "CELT";
+    RegisterSendCodec('A', codecCELT, 32000, 48000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecCELT, 32000, 64000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    RegisterSendCodec('A', codecCELT, 32000, 128000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(true, true, VADNormal);
+    RegisterSendCodec('A', codecCELT, 32000, 48000, 320, codec_channels);
+    Run(_channelA2B, audio_channels, codec_channels);
+    _acmA->SetVAD(false, false, VADNormal);
+    _outFileB.Close();
+#endif
+  //
+  // Test Mono-To-Stereo for all codecs.
+  //
+  audio_channels = 1;
+  codec_channels = 2;
+
+#ifdef WEBRTC_CODEC_G722
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  _channelA2B->SetCodecType(3);
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecG722, 16000, 64000, 160, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  _channelA2B->SetCodecType(1);
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecL16, 8000, 128000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecL16, 16000, 256000, 160, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecL16, 32000, 512000, 320, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+#ifdef PCMA_AND_PCMU
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  _channelA2B->SetCodecType(0);
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecPCMU, 8000, 64000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  RegisterSendCodec('A', codecPCMA, 8000, 64000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  _testCntr++;
+  _channelA2B->SetCodecType(4);
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecCELT, 32000, 64000, 320, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+
+  //
+  // Test Stereo-To-Mono for all codecs.
+  //
+  audio_channels = 2;
+  codec_channels = 1;
+  _channelA2B->SetCodecType(5);
+
+  // Register receivers as mono.
+  for(WebRtc_UWord8 n = 0; n < numEncoders; n++) {
+    _acmB->Codec(n, myCodecParam);
+    if(!strcmp(myCodecParam.plname, "L16") ||
+        !strcmp(myCodecParam.plname, "PCMA")||
+        !strcmp(myCodecParam.plname, "PCMU")||
+        !strcmp(myCodecParam.plname, "G722")||
+        !strcmp(myCodecParam.plname, "CELT")) {
+      myCodecParam.channels = 1;
+      _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+  }
+#ifdef WEBRTC_CODEC_G722
+  // Run stereo audio and mono codec.
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecG722, 16000, 64000, 160, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecL16, 8000, 128000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Stereo-to-mono\n");
+   }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecL16, 16000, 256000, 160, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+  if(_testMode != 0) {
+     printf("===============================================================\n");
+     printf("Test number: %d\n",_testCntr + 1);
+     printf("Test type: Stereo-to-mono\n");
+   }
+   _testCntr++;
+   OpenOutFile(_testCntr);
+   RegisterSendCodec('A', codecL16, 32000, 512000, 320, codec_channels);
+   Run(_channelA2B, audio_channels, codec_channels);
+   _outFileB.Close();
+#endif
+#ifdef PCMA_AND_PCMU
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecPCMU, 8000, 64000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  RegisterSendCodec('A', codecPCMA, 8000, 64000, 80, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if(_testMode != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",_testCntr + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  _testCntr++;
+  OpenOutFile(_testCntr);
+  RegisterSendCodec('A', codecCELT, 32000, 64000, 320, codec_channels);
+  Run(_channelA2B, audio_channels, codec_channels);
+  _outFileB.Close();
+#endif
+
+    // Print out which codecs were tested, and which were not, in the run.
+    if(_testMode != 0) {
+        printf("\nThe following codecs was INCLUDED in the test:\n");
+#ifdef WEBRTC_CODEC_G722
+        printf("   G.722\n");
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+        printf("   PCM16\n");
+#endif
+        printf("   G.711\n");
+
+        printf("\nTo complete the test, listen to the %d number of output files.\n", _testCntr);
+    } else {
+        printf("Done!\n");
+    }
+}
+
+// Register Codec to use in the test
+//
+// Input:   side            - which ACM to use, 'A' or 'B'
+//          codecName       - name to use when register the codec
+//          samplingFreqHz  - sampling frequency in Herz
+//          rate            - bitrate in bytes
+//          packSize        - packet size in samples
+//          extraByte       - if extra bytes needed compared to the bitrate 
+//                            used when registering, can be an internal header
+//                            set to -1 if the codec is a variable rate codec
+WebRtc_Word16 TestStereo::RegisterSendCodec(char side, 
+                                          char* codecName, 
+                                          WebRtc_Word32 samplingFreqHz,
+                                          int rate,
+                                          int packSize,
+                                          int channels)
+{
+    if(_testMode != 0) {
+        // Print out codec and settings
+        printf("Codec: %s Freq: %d Rate: %d PackSize: %d", codecName, samplingFreqHz, rate, packSize);
+    }
+
+    // Store packetsize in samples, used to validate the recieved packet
+    _packSizeSamp = packSize;
+
+    // Store the expected packet size in bytes, used to validate the recieved packet
+    // Add 0.875 to always round up to a whole byte
+    _packSizeBytes = (WebRtc_UWord16)((float)(packSize*rate)/(float)(samplingFreqHz*8)+0.875);
+
+    // Set pointer to the ACM where to register the codec
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    CodecInst myCodecParam;
+
+    // Get all codec parameters before registering
+    CHECK_ERROR(AudioCodingModule::Codec(codecName, myCodecParam, samplingFreqHz));
+    myCodecParam.rate = rate;
+    myCodecParam.pacsize = packSize;
+    // Start with register codec as mono, to test that changing to stereo works.
+    myCodecParam.channels = 1;
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+    // Register codec as stereo.
+    if (channels == 2) {
+      myCodecParam.channels = 2;
+      CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+    }
+    // Initialization was successful.
+    return 0;
+}
+
+void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels)
+{
+    AudioFrame audioFrame;
+
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+    WebRtc_UWord16 recSize;
+    WebRtc_UWord32 timeStampDiff;
+    channel->ResetPayloadSize();
+    int errorCount = 0;
+
+    // Only run 1 second for each test case
+    // TODO(tlegrand): either remove |_counter| or start using it as the comment
+    // above says. Now |_counter| is always 0.
+    while(_counter<1000)
+    {
+        // Add 10 msec to ACM
+        if (in_channels == 1) {
+          if (_in_file_mono.EndOfFile()) {
+            break;
+          }
+          _in_file_mono.Read10MsData(audioFrame);
+        } else {
+          if (_in_file_stereo.EndOfFile()) {
+            break;
+          }
+          _in_file_stereo.Read10MsData(audioFrame);
+        }
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+        // Run sender side of ACM
+        CHECK_ERROR(_acmA->Process());
+
+        // Verify that the received packet size matches the settings
+        recSize = channel->GetPayloadSize();
+        if ((0 < recSize) & (recSize < 65535)) {
+            if ((recSize != _packSizeBytes * out_channels) &&
+                (_packSizeBytes < 65535)) {
+                errorCount++;
+            }
+
+            // Verify that the timestamp is updated with expected length
+            timeStampDiff = channel->GetTimeStampDiff();
+            if ((_counter > 10) && (timeStampDiff != _packSizeSamp)) {
+                errorCount++;
+            }
+        }
+
+        // Run received side of ACM
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+
+        // Write output speech to file
+        _outFileB.Write10MsData(
+            audioFrame._payloadData,
+            audioFrame._payloadDataLengthInSamples * audioFrame._audioChannel);
+    }
+
+    if (errorCount) 
+    {
+        printf(" - test FAILED\n");
+    } 
+    else if(_testMode != 0)
+    {
+        printf(" - test PASSED\n");
+    }
+
+    // Reset _counter
+    if (_counter == 1000) {
+        _counter = 0;
+    }
+    if (_in_file_mono.EndOfFile()) {
+        _in_file_mono.Rewind();
+    }
+    if (_in_file_stereo.EndOfFile()) {
+        _in_file_stereo.Rewind();
+    }
+}
+
+void TestStereo::OpenOutFile(WebRtc_Word16 testNumber)
+{
+    char fileName[500];
+    sprintf(fileName, "%s/teststereo_out_%02d.pcm",
+            webrtc::test::OutputPath().c_str(), testNumber);
+    _outFileB.Open(fileName, 32000, "wb");
+}
+
+void TestStereo::DisplaySendReceiveCodec()
+{
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    if(_testMode != 0) {
+        printf("%s -> ", myCodecParam.plname);
+    }
+    _acmB->ReceiveCodec(myCodecParam);
+    if(_testMode != 0) {
+        printf("%s\n", myCodecParam.plname);
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/TestStereo.h b/trunk/src/modules/audio_coding/main/test/TestStereo.h
new file mode 100644
index 0000000..94a0c56
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestStereo.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_STEREO_H
+#define TEST_STEREO_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+class TestPackStereo : public AudioPacketizationCallback
+{
+public:
+    TestPackStereo();
+    ~TestPackStereo();
+    
+    void RegisterReceiverACM(AudioCodingModule* acm);
+    
+    virtual WebRtc_Word32 SendData(const FrameType frameType,
+        const WebRtc_UWord8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData, 
+        const WebRtc_UWord16 payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    WebRtc_UWord16 GetPayloadSize();
+    WebRtc_UWord32 GetTimeStampDiff();
+    void ResetPayloadSize();
+    void SetCodecType(int codecType);
+
+
+private:
+    AudioCodingModule* _receiverACM;
+    WebRtc_Word16            _seqNo;
+    WebRtc_UWord8            _payloadData[60 * 32 * 2 * 2]; 
+    WebRtc_UWord32           _timeStampDiff;
+    WebRtc_UWord32           _lastInTimestamp;
+    WebRtc_UWord64           _totalBytes;
+    WebRtc_UWord16           _payloadSize;
+    WebRtc_UWord16           _noChannels;
+    int                    _codecType;
+};
+
+class TestStereo : public ACMTest
+{
+public:
+    TestStereo(int testMode);
+    ~TestStereo();
+
+    void Perform();
+private:
+    // The default value of '-1' indicates that the registration is based only on codec name
+    // and a sampling frequncy matching is not required. This is useful for codecs which support
+    // several sampling frequency.
+    WebRtc_Word16 RegisterSendCodec(char side, 
+        char* codecName, 
+        WebRtc_Word32 sampFreqHz,
+        int rate,
+        int packSize,
+        int channels);
+
+    void Run(TestPackStereo* channel, int in_channels, int out_channels);
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void DisplaySendReceiveCodec();
+
+    WebRtc_Word32 SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData, 
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    int                    _testMode;
+
+    AudioCodingModule*     _acmA;
+    AudioCodingModule*     _acmB;
+
+    TestPackStereo*        _channelA2B;
+
+    PCMFile                _in_file_stereo;
+    PCMFile                _in_file_mono;
+    PCMFile                _outFileB;
+    WebRtc_Word16          _testCntr;
+    WebRtc_UWord16         _packSizeSamp;
+    WebRtc_UWord16         _packSizeBytes;
+    int                    _counter;
+    int                    _codecType;
+};
+
+} // namespace webrtc
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/main/test/TestVADDTX.cc b/trunk/src/modules/audio_coding/main/test/TestVADDTX.cc
new file mode 100644
index 0000000..d7329fa
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestVADDTX.cc
@@ -0,0 +1,511 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestVADDTX.h"
+
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestVADDTX::TestVADDTX(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testResults(0)
+{
+    //testMode == 1 for more extensive testing
+    //testMode == 0 for quick test (autotest)
+   _testMode = testMode;
+}
+
+TestVADDTX::~TestVADDTX()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestVADDTX::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running VAD/DTX Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+                     "---------- TestVADDTX ----------");
+    }
+    char fileName[] = "./test/data/audio_coding/testfile32kHz.pcm";
+    _inFileA.Open(fileName, 32000, "rb");
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+    if(_testMode != 0)
+    {
+        printf("Registering codecs at receiver... \n");
+    }
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        if(_testMode != 0)
+        {
+            printf("%s\n", myCodecParam.plname);
+        }
+        _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+
+    // Create and connect the channel
+    _channelA2B = new Channel;
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    _acmA->RegisterVADCallback(&_monitor);
+
+
+    WebRtc_Word16 testCntr = 1;
+    WebRtc_Word16 testResults = 0;
+
+#ifdef WEBRTC_CODEC_ISAC
+    // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iSAC WB as send codec
+    char nameISAC[] = "ISAC";
+    RegisterSendCodec('A', nameISAC, 16000);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+
+   // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iSAC SWB as send codec
+    RegisterSendCodec('A', nameISAC, 32000);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iLBC as send codec
+    char nameILBC[] = "ilbc";
+    RegisterSendCodec('A', nameILBC);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+
+#endif
+    if(_testMode) {
+        printf("Done!\n");
+    }
+
+    printf("VAD/DTX test completed with %d subtests failed\n", testResults);
+    if (testResults > 0)
+    {
+        printf("Press return\n\n");
+        getchar();
+    }
+}
+
+void TestVADDTX::runTestCases()
+{
+    if(_testMode != 0)
+    {
+        CodecInst myCodecParam;
+        _acmA->SendCodec(myCodecParam);
+        printf("%s\n", myCodecParam.plname);
+    }
+    else
+    {
+        printf(".");
+    }
+    // #1 DTX = OFF, VAD = ON, VADNormal
+    if(_testMode != 0)
+        printf("Test #1 ");
+    SetVAD(false, true, VADNormal);
+    Run();
+    _testResults += VerifyTest();
+
+    // #2 DTX = OFF, VAD = ON, VADAggr
+    if(_testMode != 0)
+        printf("Test #2 ");
+    SetVAD(false, true, VADAggr);
+    Run();
+    _testResults += VerifyTest();
+
+    // #3 DTX = ON, VAD = ON, VADLowBitrate
+    if(_testMode != 0)
+        printf("Test #3 ");
+    SetVAD(true, true, VADLowBitrate);
+    Run();
+    _testResults += VerifyTest();
+
+    // #4 DTX = ON, VAD = ON, VADVeryAggr
+    if(_testMode != 0)
+        printf("Test #4 ");
+    SetVAD(true, true, VADVeryAggr);
+    Run();
+    _testResults += VerifyTest();
+
+    // #5 DTX = ON, VAD = OFF, VADNormal
+    if(_testMode != 0)
+        printf("Test #5 ");
+    SetVAD(true, false, VADNormal);
+    Run();
+    _testResults += VerifyTest();
+
+}
+void TestVADDTX::runTestInternalDTX()
+{
+    // #6 DTX = ON, VAD = ON, VADNormal
+    if(_testMode != 0)
+        printf("Test #6 ");
+
+    SetVAD(true, true, VADNormal);
+    if(_acmA->ReplaceInternalDTXWithWebRtc(true) < 0) {
+        printf("Was not able to replace DTX since CN was not registered\n");
+     }
+    Run();
+    _testResults += VerifyTest();
+}
+
+void TestVADDTX::SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode)
+{
+    bool dtxEnabled, vadEnabled;
+    ACMVADMode vadModeSet;
+
+    if (_acmA->SetVAD(statusDTX, statusVAD, (ACMVADMode) vadMode) < 0) {
+      assert(false);
+    }
+    if (_acmA->VAD(dtxEnabled, vadEnabled, vadModeSet) < 0) {
+      assert(false);
+    }
+
+    if(_testMode != 0)
+    {
+        if(statusDTX != dtxEnabled)
+        {
+            printf("DTX: %s not the same as requested: %s\n",
+            dtxEnabled? "ON":"OFF", dtxEnabled? "OFF":"ON");
+        }
+        if(((statusVAD == true) && (vadEnabled == false)) ||
+           ((statusVAD == false) && (vadEnabled == false) &&
+               (statusDTX == true)))
+        {
+            printf("VAD: %s not the same as requested: %s\n",
+            vadEnabled? "ON":"OFF", vadEnabled? "OFF":"ON");
+        }
+        if(vadModeSet != vadMode)
+        {
+            printf("VAD mode: %d not the same as requested: %d\n",
+            (WebRtc_Word16)vadModeSet, (WebRtc_Word16)vadMode);
+        }
+    }
+
+    // Requested VAD/DTX settings
+    _setStruct.statusDTX = statusDTX;
+    _setStruct.statusVAD = statusVAD;
+    _setStruct.vadMode = (ACMVADMode) vadMode;
+
+    // VAD settings after setting VAD in ACM
+    _getStruct.statusDTX = dtxEnabled;
+    _getStruct.statusVAD = vadEnabled;
+    _getStruct.vadMode = vadModeSet;
+
+}
+
+VADDTXstruct TestVADDTX::GetVAD()
+{
+    VADDTXstruct retStruct;
+    bool dtxEnabled, vadEnabled;
+    ACMVADMode vadModeSet;
+
+    if (_acmA->VAD(dtxEnabled, vadEnabled, vadModeSet) < 0) {
+      assert(false);
+    }
+
+    retStruct.statusDTX = dtxEnabled;
+    retStruct.statusVAD = vadEnabled;
+    retStruct.vadMode = vadModeSet;
+    return retStruct;
+}
+
+WebRtc_Word16 TestVADDTX::RegisterSendCodec(char side,
+                                          char* codecName,
+                                          WebRtc_Word32 samplingFreqHz,
+                                          WebRtc_Word32 rateKbps)
+{
+    if(_testMode != 0)
+    {
+        printf("Registering %s for side %c\n", codecName, side);
+    }
+    std::cout << std::flush;
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        return -1;
+    }
+
+    CodecInst myCodecParam;
+    for(WebRtc_Word16 codecCntr = 0; codecCntr < myACM->NumberOfCodecs();
+        codecCntr++)
+    {
+        CHECK_ERROR(myACM->Codec((WebRtc_UWord8)codecCntr, myCodecParam));
+        if(!STR_CASE_CMP(myCodecParam.plname, codecName))
+        {
+            if((samplingFreqHz == -1) || (myCodecParam.plfreq == samplingFreqHz))
+            {
+                if((rateKbps == -1) || (myCodecParam.rate == rateKbps))
+                {
+                    break;
+                }
+            }
+        }
+    }
+
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+
+    // initialization was succesful
+    return 0;
+}
+
+void TestVADDTX::Run()
+{
+    AudioFrame audioFrame;
+
+    WebRtc_UWord16 SamplesIn10MsecA = _inFileA.PayloadLength10Ms();
+    WebRtc_UWord32 timestampA = 1;
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    while(!_inFileA.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        audioFrame._timeStamp = timestampA;
+        timestampA += SamplesIn10MsecA;
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmA->Process());
+
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+        _outFileB.Write10MsData(audioFrame._payloadData, audioFrame._payloadDataLengthInSamples);
+    }
+#ifdef PRINT_STAT
+    _monitor.PrintStatistics(_testMode);
+#endif
+    _inFileA.Rewind();
+    _monitor.GetStatistics(_statCounter);
+    _monitor.ResetStatistics();
+}
+
+void TestVADDTX::OpenOutFile(WebRtc_Word16 testNumber)
+{
+    char fileName[500];
+    if(_testMode == 0)
+    {
+        sprintf(fileName, "%s/testVADDTX_autoFile_%02d.pcm",
+                webrtc::test::OutputPath().c_str(), testNumber);
+    }
+    else
+    {
+        sprintf(fileName, "%s/testVADDTX_outFile_%02d.pcm",
+                webrtc::test::OutputPath().c_str(), testNumber);
+    }
+    _outFileB.Open(fileName, 16000, "wb");
+}
+
+
+WebRtc_Word16 TestVADDTX::VerifyTest()
+{
+    // Verify empty frame result
+    WebRtc_UWord8 statusEF = 0;
+    WebRtc_UWord8 vadPattern = 0;
+    WebRtc_UWord8 emptyFramePattern[6];
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    bool dtxInUse = true;
+    bool isReplaced = false;
+    if ((STR_CASE_CMP(myCodecParam.plname,"G729") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"G723") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"AMR") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"AMR-wb") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"speex") == 0))
+    {
+        _acmA->IsInternalDTXReplacedWithWebRtc(isReplaced);
+        if (!isReplaced)
+        {
+            dtxInUse = false;
+        }
+    }
+
+    // Check for error in VAD/DTX settings
+    if (_getStruct.statusDTX != _setStruct.statusDTX){
+        // DTX status doesn't match expected
+        vadPattern |= 4;
+    }
+    if (_getStruct.statusDTX){
+        if ((!_getStruct.statusVAD && dtxInUse) || (!dtxInUse && (_getStruct.statusVAD !=_setStruct.statusVAD)))
+        {
+            // Missmatch in VAD setting
+            vadPattern |= 2;
+        }
+    } else {
+        if (_getStruct.statusVAD != _setStruct.statusVAD){
+            // VAD status doesn't match expected
+            vadPattern |= 2;
+        }
+    }
+    if (_getStruct.vadMode != _setStruct.vadMode){
+        // VAD Mode doesn't match expected
+        vadPattern |= 1;
+    }
+
+    // Set expected empty frame pattern
+    int ii;
+    for (ii = 0; ii < 6; ii++) {
+        emptyFramePattern[ii] = 0;
+    }
+    emptyFramePattern[0] = 1; // "kNoEncoding", not important to check. Codecs with packetsize != 80 samples will get this output.
+    emptyFramePattern[1] = 1; // Expect to always receive some frames labeled "kActiveNormalEncoded"
+    emptyFramePattern[2] = (((!_getStruct.statusDTX && _getStruct.statusVAD) || (!dtxInUse && _getStruct.statusDTX))); // "kPassiveNormalEncoded"
+    emptyFramePattern[3] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 8000))); // "kPassiveDTXNB"
+    emptyFramePattern[4] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 16000))); // "kPassiveDTXWB"
+    emptyFramePattern[5] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 32000))); // "kPassiveDTXSWB"
+
+    // Check pattern 1-5 (skip 0)
+    for (int ii = 1; ii < 6; ii++)
+    {
+        if (emptyFramePattern[ii])
+        {
+            statusEF |= (_statCounter[ii] == 0);
+        }
+        else
+        {
+            statusEF |= (_statCounter[ii] > 0);
+        }
+    }
+    if ((statusEF == 0) && (vadPattern == 0))
+    {
+        if(_testMode != 0)
+        {
+            printf(" Test OK!\n");
+        }
+        return 0;
+    }
+    else
+    {
+        if (statusEF)
+        {
+            printf("\t\t\tUnexpected empty frame result!\n");
+        }
+        if (vadPattern)
+        {
+            printf("\t\t\tUnexpected SetVAD() result!\tDTX: %d\tVAD: %d\tMode: %d\n", (vadPattern >> 2) & 1, (vadPattern >> 1) & 1, vadPattern & 1);
+        }
+        return 1;
+    }
+}
+
+ActivityMonitor::ActivityMonitor()
+{
+    _counter[0] = _counter[1] = _counter[2] = _counter[3] = _counter[4] = _counter[5] = 0;
+}
+
+ActivityMonitor::~ActivityMonitor()
+{
+}
+
+WebRtc_Word32 ActivityMonitor::InFrameType(WebRtc_Word16 frameType)
+{
+    _counter[frameType]++;
+    return 0;
+}
+
+void ActivityMonitor::PrintStatistics(int testMode)
+{
+    if(testMode != 0)
+    {
+        printf("\n");
+        printf("kActiveNormalEncoded  kPassiveNormalEncoded  kPassiveDTXWB  kPassiveDTXNB kPassiveDTXSWB kFrameEmpty\n");
+
+        printf("%19u", _counter[1]);
+        printf("%22u", _counter[2]);
+        printf("%14u", _counter[3]);
+        printf("%14u", _counter[4]);
+        printf("%14u", _counter[5]);
+        printf("%11u", _counter[0]);
+
+        printf("\n\n");
+    }
+}
+
+void ActivityMonitor::ResetStatistics()
+{
+    _counter[0] = _counter[1] = _counter[2] = _counter[3] = _counter[4] = _counter[5] = 0;
+}
+
+void ActivityMonitor::GetStatistics(WebRtc_UWord32* getCounter)
+{
+    for (int ii = 0; ii < 6; ii++)
+    {
+        getCounter[ii] = _counter[ii];
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/TestVADDTX.h b/trunk/src/modules/audio_coding/main/test/TestVADDTX.h
new file mode 100644
index 0000000..e8f9e1e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TestVADDTX.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_VAD_DTX_H
+#define TEST_VAD_DTX_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+typedef struct 
+{
+    bool statusDTX;
+    bool statusVAD;
+    ACMVADMode vadMode;
+} VADDTXstruct;
+
+class ActivityMonitor : public ACMVADCallback
+{
+public:
+    ActivityMonitor();
+    ~ActivityMonitor();
+    WebRtc_Word32 InFrameType(WebRtc_Word16 frameType);
+    void PrintStatistics(int testMode);
+    void ResetStatistics();
+    void GetStatistics(WebRtc_UWord32* getCounter);
+private:
+    // counting according to
+    /*enum WebRtcACMEncodingType
+    {
+        kNoEncoding,
+        kActiveNormalEncoded,
+        kPassiveNormalEncoded,
+        kPassiveDTXNB,
+        kPassiveDTXWB,
+        kPassiveDTXSWB
+    };*/
+    WebRtc_UWord32 _counter[6];
+};
+
+class TestVADDTX : public ACMTest
+{
+public:
+    TestVADDTX(int testMode);
+    ~TestVADDTX();
+
+    void Perform();
+private:
+    // Registration can be based on codec name only, codec name and sampling frequency, or 
+    // codec name, sampling frequency and rate.
+    WebRtc_Word16 RegisterSendCodec(char side, 
+        char* codecName, 
+        WebRtc_Word32 samplingFreqHz = -1,
+        WebRtc_Word32 rateKhz = -1);
+    void Run();
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void runTestCases();
+    void runTestInternalDTX();
+    void SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode);
+    VADDTXstruct GetVAD();
+    WebRtc_Word16 VerifyTest();//VADDTXstruct setDTX, VADDTXstruct getDTX);
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel*               _channelA2B;
+
+    PCMFile                _inFileA;
+    PCMFile                _outFileB;
+
+    ActivityMonitor        _monitor;
+    WebRtc_UWord32           _statCounter[6];
+
+    int                    _testMode;
+    int                    _testResults;
+    VADDTXstruct           _setStruct;
+    VADDTXstruct           _getStruct;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/Tester.cc b/trunk/src/modules/audio_coding/main/test/Tester.cc
new file mode 100644
index 0000000..e35a0ec
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/Tester.cc
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+#include "audio_coding_module.h"
+#include "trace.h"
+
+#include "APITest.h"
+#include "EncodeDecodeTest.h"
+#include "gtest/gtest.h"
+#include "iSACTest.h"
+#include "SpatialAudio.h"
+#include "TestAllCodecs.h"
+#include "TestFEC.h"
+#include "TestStereo.h"
+#include "TestVADDTX.h"
+#include "TwoWayCommunication.h"
+#include "testsupport/fileutils.h"
+
+using webrtc::AudioCodingModule;
+using webrtc::Trace;
+
+// Be sure to create the following directories before running the tests:
+// ./modules/audio_coding/main/test/res_tests
+// ./modules/audio_coding/main/test/res_autotests
+
+// Choose what tests to run by defining one or more of the following:
+#define ACM_AUTO_TEST            // Most common codecs and settings will be tested
+//#define ACM_TEST_ENC_DEC        // You decide what to test in run time.
+                                  // Used for debugging and for testing while implementing.
+//#define ACM_TEST_TWO_WAY        // Debugging
+//#define ACM_TEST_ALL_ENC_DEC    // Loop through all defined codecs and settings
+//#define ACM_TEST_STEREO         // Run stereo and spatial audio tests
+//#define ACM_TEST_VAD_DTX        // Run all VAD/DTX tests
+//#define ACM_TEST_FEC            // Test FEC (also called RED)
+//#define ACM_TEST_CODEC_SPEC_API // Only iSAC has codec specfic APIs in this version
+//#define ACM_TEST_FULL_API       // Test all APIs with threads (long test)
+
+
+void PopulateTests(std::vector<ACMTest*>* tests)
+{
+
+     Trace::CreateTrace();
+     std::string trace_file = webrtc::test::OutputPath() + "acm_trace.txt";
+     Trace::SetTraceFile(trace_file.c_str());
+
+     printf("The following tests will be executed:\n");
+#ifdef ACM_AUTO_TEST
+    printf("  ACM auto test\n");
+    tests->push_back(new webrtc::EncodeDecodeTest(0));
+    tests->push_back(new webrtc::TwoWayCommunication(0));
+    tests->push_back(new webrtc::TestAllCodecs(0));
+    tests->push_back(new webrtc::TestStereo(0));
+    tests->push_back(new webrtc::SpatialAudio(0));
+    tests->push_back(new webrtc::TestVADDTX(0));
+    tests->push_back(new webrtc::TestFEC(0));
+    tests->push_back(new webrtc::ISACTest(0));
+#endif
+#ifdef ACM_TEST_ENC_DEC
+    printf("  ACM encode-decode test\n");
+    tests->push_back(new webrtc::EncodeDecodeTest(2));
+#endif
+#ifdef ACM_TEST_TWO_WAY
+    printf("  ACM two-way communication test\n");
+    tests->push_back(new webrtc::TwoWayCommunication(1));
+#endif
+#ifdef ACM_TEST_ALL_ENC_DEC
+    printf("  ACM all codecs test\n");
+    tests->push_back(new webrtc::TestAllCodecs(1));
+#endif
+#ifdef ACM_TEST_STEREO
+    printf("  ACM stereo test\n");
+    tests->push_back(new webrtc::TestStereo(1));
+    tests->push_back(new webrtc::SpatialAudio(2));
+#endif
+#ifdef ACM_TEST_VAD_DTX
+    printf("  ACM VAD-DTX test\n");
+    tests->push_back(new webrtc::TestVADDTX(1));
+#endif
+#ifdef ACM_TEST_FEC
+    printf("  ACM FEC test\n");
+    tests->push_back(new webrtc::TestFEC(1));
+#endif
+#ifdef ACM_TEST_CODEC_SPEC_API
+    printf("  ACM codec API test\n");
+    tests->push_back(new webrtc::ISACTest(1));
+#endif
+#ifdef ACM_TEST_FULL_API
+    printf("  ACM full API test\n");
+    tests->push_back(new webrtc::APITest());
+#endif
+    printf("\n");
+}
+
+// TODO(kjellander): Make this a proper gtest instead of using this single test
+// to run all the tests.
+TEST(AudioCodingModuleTest, RunAllTests)
+{
+    std::vector<ACMTest*> tests;
+    PopulateTests(&tests);
+    std::vector<ACMTest*>::iterator it;
+    for (it=tests.begin() ; it < tests.end(); it++)
+    {
+        (*it)->Perform();
+        delete (*it);
+    }
+
+    Trace::ReturnTrace();
+    printf("ACM test completed\n");
+}
diff --git a/trunk/src/modules/audio_coding/main/test/TimedTrace.cc b/trunk/src/modules/audio_coding/main/test/TimedTrace.cc
new file mode 100644
index 0000000..6bf301f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TimedTrace.cc
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TimedTrace.h"
+#include <math.h>
+
+double TimedTrace::_timeEllapsedSec = 0;
+FILE*  TimedTrace::_timedTraceFile = NULL;
+
+TimedTrace::TimedTrace()
+{
+
+}
+
+TimedTrace::~TimedTrace()
+{
+    if(_timedTraceFile != NULL)
+    {
+        fclose(_timedTraceFile);
+    }
+    _timedTraceFile = NULL;
+}
+
+WebRtc_Word16
+TimedTrace::SetUp(char* fileName)
+{
+    if(_timedTraceFile == NULL)
+    {
+        _timedTraceFile = fopen(fileName, "w");
+    }
+    if(_timedTraceFile == NULL)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+void
+TimedTrace::SetTimeEllapsed(double timeEllapsedSec)
+{
+    _timeEllapsedSec = timeEllapsedSec;
+}
+
+double
+TimedTrace::TimeEllapsed()
+{
+    return _timeEllapsedSec;
+}
+
+void
+TimedTrace::Tick10Msec()
+{
+    _timeEllapsedSec += 0.010;
+}
+
+void
+TimedTrace::TimedLogg(char* message)
+{    
+    unsigned int minutes = (WebRtc_UWord32)floor(_timeEllapsedSec / 60.0);
+    double seconds = _timeEllapsedSec - minutes * 60;
+    //char myFormat[100] = "%8.2f, %3u:%05.2f: %s\n";
+    if(_timedTraceFile != NULL)
+    {
+        fprintf(_timedTraceFile, "%8.2f, %3u:%05.2f: %s\n", 
+            _timeEllapsedSec, 
+            minutes, 
+            seconds, 
+            message);
+    }
+}
diff --git a/trunk/src/modules/audio_coding/main/test/TimedTrace.h b/trunk/src/modules/audio_coding/main/test/TimedTrace.h
new file mode 100644
index 0000000..d37d287
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TimedTrace.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TIMED_TRACE_H
+#define TIMED_TRACE_H
+
+#include "typedefs.h"
+
+#include <cstdio>
+#include <cstdlib>
+
+
+class TimedTrace
+{
+public:
+    TimedTrace();
+    ~TimedTrace();
+
+    void SetTimeEllapsed(double myTime);
+    double TimeEllapsed();
+    void Tick10Msec();
+    WebRtc_Word16 SetUp(char* fileName);
+    void TimedLogg(char* message);
+
+private:
+    static double _timeEllapsedSec;
+    static FILE*  _timedTraceFile;
+
+};
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.cc b/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.cc
new file mode 100644
index 0000000..c67733a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.cc
@@ -0,0 +1,524 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TwoWayCommunication.h"
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <Windows.h>
+#endif
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "gtest/gtest.h"
+#include "PCMFile.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+TwoWayCommunication::TwoWayCommunication(int testMode)
+{
+    _testMode = testMode;
+}
+
+TwoWayCommunication::~TwoWayCommunication()
+{
+    AudioCodingModule::Destroy(_acmA);
+    AudioCodingModule::Destroy(_acmB);
+
+    AudioCodingModule::Destroy(_acmRefA);
+    AudioCodingModule::Destroy(_acmRefB);
+
+    delete _channel_A2B;
+    delete _channel_B2A;
+
+    delete _channelRef_A2B;
+    delete _channelRef_B2A;
+#ifdef WEBRTC_DTMF_DETECTION
+    if(_dtmfDetectorA != NULL)
+    {
+        delete _dtmfDetectorA;
+    }
+    if(_dtmfDetectorB != NULL)
+    {
+        delete _dtmfDetectorB;
+    }
+#endif
+    _inFileA.Close();
+    _inFileB.Close();
+    _outFileA.Close();
+    _outFileB.Close();
+    _outFileRefA.Close();
+    _outFileRefB.Close();
+}
+
+
+WebRtc_UWord8
+TwoWayCommunication::ChooseCodec(WebRtc_UWord8* codecID_A,
+                                 WebRtc_UWord8* codecID_B)
+{
+    AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
+    WebRtc_UWord8 noCodec = tmpACM->NumberOfCodecs();
+    CodecInst codecInst;
+    printf("List of Supported Codecs\n");
+    printf("========================\n");
+    for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+    {
+        tmpACM->Codec(codecCntr, codecInst);
+        printf("%d- %s\n", codecCntr, codecInst.plname);
+    }
+    printf("\nChoose a send codec for side A [0]: ");
+    char myStr[15] = "";
+    EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+    *codecID_A = (WebRtc_UWord8)atoi(myStr);
+
+    printf("\nChoose a send codec for side B [0]: ");
+    EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+    *codecID_B = (WebRtc_UWord8)atoi(myStr);
+
+    AudioCodingModule::Destroy(tmpACM);
+    printf("\n");
+    return 0;
+}
+
+WebRtc_Word16
+TwoWayCommunication::ChooseFile(char* fileName, WebRtc_Word16 maxLen,
+                                WebRtc_UWord16* frequencyHz)
+{
+    char tmpName[MAX_FILE_NAME_LENGTH_BYTE];
+    //strcpy(_fileName, "in.pcm");
+    //printf("\n\nPlease enter the input file: ");
+    EXPECT_TRUE(fgets(tmpName, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+    tmpName[MAX_FILE_NAME_LENGTH_BYTE-1] = '\0';
+    WebRtc_Word16 n = 0;
+
+    // removing leading spaces
+    while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+        (tmpName[n] != 0) && 
+        (n < MAX_FILE_NAME_LENGTH_BYTE))
+    {
+        n++;
+    }
+    if(n > 0)
+    {
+        memmove(tmpName, &tmpName[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+    }
+
+    //removing trailing spaces
+    n = (WebRtc_Word16)(strlen(tmpName) - 1);
+    if(n >= 0)
+    {
+        while((isspace(tmpName[n]) || iscntrl(tmpName[n])) && 
+            (n >= 0))
+        {
+            n--;
+        }
+    }
+    if(n >= 0)
+    {
+        tmpName[n + 1] = '\0';
+    }
+
+    WebRtc_Word16 len = (WebRtc_Word16)strlen(tmpName);
+    if(len > maxLen)
+    {
+        return -1;
+    }    
+    if(len > 0)
+    {
+        strncpy(fileName, tmpName, len+1);
+    }
+    printf("Enter the sampling frequency (in Hz) of the above file [%u]: ",
+           *frequencyHz);
+    EXPECT_TRUE(fgets(tmpName, 6, stdin) != NULL);
+    WebRtc_UWord16 tmpFreq = (WebRtc_UWord16)atoi(tmpName);
+    if(tmpFreq > 0)
+    {
+        *frequencyHz = tmpFreq;
+    }
+    return 0;
+}
+
+WebRtc_Word16 TwoWayCommunication::SetUp()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    _acmRefA = AudioCodingModule::Create(3);
+    _acmRefB = AudioCodingModule::Create(4);
+
+    WebRtc_UWord8 codecID_A;
+    WebRtc_UWord8 codecID_B;
+
+    ChooseCodec(&codecID_A, &codecID_B);
+    CodecInst codecInst_A;
+    CodecInst codecInst_B;
+    CodecInst dummyCodec;
+    _acmA->Codec(codecID_A, codecInst_A);
+    _acmB->Codec(codecID_B, codecInst_B);
+
+    _acmA->Codec(6, dummyCodec);
+
+    //--- Set A codecs
+    CHECK_ERROR(_acmA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorA = new(DTMFDetector);
+    CHECK_ERROR(_acmA->RegisterIncomingMessagesCallback(_dtmfDetectorA,
+                                                        ACMUSA));
+#endif
+    //--- Set ref-A codecs
+    CHECK_ERROR(_acmRefA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmRefA->RegisterReceiveCodec(codecInst_B));
+
+    //--- Set B codecs
+    CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(codecInst_A));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorB = new(DTMFDetector);
+    CHECK_ERROR(_acmB->RegisterIncomingMessagesCallback(_dtmfDetectorB,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-B codecs
+    CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
+
+    char fileName[500];
+    char refFileName[500];
+    WebRtc_UWord16 frequencyHz;
+    
+    //--- Input A
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    frequencyHz = 32000;
+    printf("Enter input file at side A [%s]: ", fileName);
+    ChooseFile(fileName, 499, &frequencyHz);
+
+
+    _inFileA.Open(fileName, frequencyHz, "rb");
+
+    //--- Output A
+    std::string outputFileA = webrtc::test::OutputPath() + "outA.pcm";
+    strcpy(fileName, outputFileA.c_str());
+    frequencyHz = 16000;
+    printf("Enter output file at side A [%s]: ", fileName);
+    ChooseFile(fileName, 499, &frequencyHz);
+    _outFileA.Open(fileName, frequencyHz, "wb");
+    strcpy(refFileName, "ref_");
+    strcat(refFileName, fileName);
+    _outFileRefA.Open(refFileName, frequencyHz, "wb");
+
+    //--- Input B
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    frequencyHz = 32000;
+    printf("\n\nEnter input file at side B [%s]: ", fileName);
+    ChooseFile(fileName, 499, &frequencyHz);
+    _inFileB.Open(fileName, frequencyHz, "rb");
+
+    //--- Output B
+    std::string outputFileB = webrtc::test::OutputPath() + "outB.pcm";
+    strcpy(fileName, outputFileB.c_str());
+    frequencyHz = 16000;
+    printf("Enter output file at side B [%s]: ", fileName);
+    ChooseFile(fileName, 499, &frequencyHz);
+    _outFileB.Open(fileName, frequencyHz, "wb");
+    strcpy(refFileName, "ref_");
+    strcat(refFileName, fileName);
+    _outFileRefB.Open(refFileName, frequencyHz, "wb");
+    
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    _acmA->RegisterTransportCallback(_channel_A2B);
+    _channel_A2B->RegisterReceiverACM(_acmB);
+    //--- Do the same for the reference
+    _channelRef_A2B = new Channel;
+    _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+    _channelRef_A2B->RegisterReceiverACM(_acmRefB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    _acmB->RegisterTransportCallback(_channel_B2A);
+    _channel_B2A->RegisterReceiverACM(_acmA);
+    //--- Do the same for reference
+    _channelRef_B2A = new Channel;
+    _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+    _channelRef_B2A->RegisterReceiverACM(_acmRefA);
+
+    // The clicks will be more obvious when we 
+    // are in FAX mode.
+    _acmB->SetPlayoutMode(fax);
+    _acmRefB->SetPlayoutMode(fax);
+
+    return 0;
+}
+
+WebRtc_Word16 TwoWayCommunication::SetUpAutotest()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    _acmRefA = AudioCodingModule::Create(3);
+    _acmRefB = AudioCodingModule::Create(4);
+
+    CodecInst codecInst_A;
+    CodecInst codecInst_B;
+    CodecInst dummyCodec;
+
+    _acmA->Codec("ISAC", codecInst_A, 16000);
+    _acmB->Codec("L16", codecInst_B, 8000);
+    _acmA->Codec(6, dummyCodec);
+
+    //--- Set A codecs
+    CHECK_ERROR(_acmA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorA = new(DTMFDetector);
+    CHECK_ERROR(_acmA->RegisterIncomingMessagesCallback(_dtmfDetectorA,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-A codecs
+    CHECK_ERROR(_acmRefA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmRefA->RegisterReceiveCodec(codecInst_B));
+
+    //--- Set B codecs
+    CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(codecInst_A));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorB = new(DTMFDetector);
+    CHECK_ERROR(_acmB->RegisterIncomingMessagesCallback(_dtmfDetectorB,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-B codecs
+    CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
+
+    char fileName[500];
+    char refFileName[500];
+    WebRtc_UWord16 frequencyHz;
+
+
+    //--- Input A
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    frequencyHz = 16000;
+    _inFileA.Open(fileName, frequencyHz, "rb");
+
+    //--- Output A
+    std::string outputFileA = webrtc::test::OutputPath() + "outAutotestA.pcm";
+    strcpy(fileName, outputFileA.c_str());
+    frequencyHz = 16000;
+    _outFileA.Open(fileName, frequencyHz, "wb");
+    std::string outputRefFileA = webrtc::test::OutputPath() + "ref_outAutotestA.pcm";
+    strcpy(refFileName, outputRefFileA.c_str());
+    _outFileRefA.Open(refFileName, frequencyHz, "wb");
+
+    //--- Input B
+    strcpy(fileName, "./test/data/audio_coding/testfile32kHz.pcm");
+    frequencyHz = 16000;
+    _inFileB.Open(fileName, frequencyHz, "rb");
+
+    //--- Output B
+    std::string outputFileB = webrtc::test::OutputPath() + "outAutotestB.pcm";
+    strcpy(fileName, outputFileB.c_str());
+    frequencyHz = 16000;
+    _outFileB.Open(fileName, frequencyHz, "wb");
+    std::string outputRefFileB = webrtc::test::OutputPath() + "ref_outAutotestB.pcm";
+    strcpy(refFileName, outputRefFileB.c_str());
+    _outFileRefB.Open(refFileName, frequencyHz, "wb");
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    _acmA->RegisterTransportCallback(_channel_A2B);
+    _channel_A2B->RegisterReceiverACM(_acmB);
+    //--- Do the same for the reference
+    _channelRef_A2B = new Channel;
+    _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+    _channelRef_A2B->RegisterReceiverACM(_acmRefB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    _acmB->RegisterTransportCallback(_channel_B2A);
+    _channel_B2A->RegisterReceiverACM(_acmA);
+    //--- Do the same for reference
+    _channelRef_B2A = new Channel;
+    _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+    _channelRef_B2A->RegisterReceiverACM(_acmRefA);
+
+    // The clicks will be more obvious when we 
+    // are in FAX mode.
+    _acmB->SetPlayoutMode(fax);
+    _acmRefB->SetPlayoutMode(fax);
+
+    return 0;
+}
+
+void
+TwoWayCommunication::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running TwoWayCommunication Test");
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                     "---------- TwoWayCommunication ----------");
+        SetUpAutotest();
+    }
+    else
+    {
+        SetUp();
+    }
+    unsigned int msecPassed = 0;
+    unsigned int secPassed  = 0;
+
+    WebRtc_Word32 outFreqHzA = _outFileA.SamplingFrequency();
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    AudioFrame audioFrame;
+
+    CodecInst codecInst_B;
+    CodecInst dummy;
+
+    _acmB->SendCodec(codecInst_B);
+
+    if(_testMode != 0)
+    {
+        printf("\n");
+        printf("sec:msec                   A                              B\n");
+        printf("--------                 -----                        -----\n");
+    }
+
+    while(!_inFileA.EndOfFile() && !_inFileB.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        _acmA->Add10MsData(audioFrame);
+        _acmRefA->Add10MsData(audioFrame);
+
+        _inFileB.Read10MsData(audioFrame);
+        _acmB->Add10MsData(audioFrame);
+        _acmRefB->Add10MsData(audioFrame);
+
+
+        _acmA->Process();
+        _acmB->Process();
+        _acmRefA->Process();
+        _acmRefB->Process();
+
+        _acmA->PlayoutData10Ms(outFreqHzA, audioFrame);
+        _outFileA.Write10MsData(audioFrame);
+
+        _acmRefA->PlayoutData10Ms(outFreqHzA, audioFrame);
+        _outFileRefA.Write10MsData(audioFrame);
+
+        _acmB->PlayoutData10Ms(outFreqHzB, audioFrame);
+        _outFileB.Write10MsData(audioFrame);
+
+        _acmRefB->PlayoutData10Ms(outFreqHzB, audioFrame);
+        _outFileRefB.Write10MsData(audioFrame);
+
+        msecPassed += 10;
+        if(msecPassed >= 1000)
+        {
+            msecPassed = 0;
+            secPassed++;
+        }
+        if(((secPassed%5) == 4) && (msecPassed == 0))
+        {
+            if(_testMode != 0)
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+            }
+            _acmA->ResetEncoder();
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "---------- Errors epected");
+                printf(".");
+            }
+            else
+            {
+                printf("Reset Encoder (click in side B)               ");
+                printf("Initialize Sender (no audio in side A)\n");
+            }
+            CHECK_ERROR(_acmB->InitializeSender());
+        }
+        if(((secPassed%5) == 4) && (msecPassed >= 990))
+        {
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "----- END: Errors epected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("                                              ");
+                printf("Register Send Codec (audio back in side A)\n");
+            }
+            CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+            CHECK_ERROR(_acmB->SendCodec(dummy));
+        }
+        if(((secPassed%7) == 6) && (msecPassed == 0))
+        {
+            CHECK_ERROR(_acmB->ResetDecoder());
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "---------- Errors epected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("Initialize Receiver (no audio in side A)      ");
+                printf("Reset Decoder\n");
+            }
+            CHECK_ERROR(_acmA->InitializeReceiver());
+        }
+        if(((secPassed%7) == 6) && (msecPassed >= 990))
+        {
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "----- END: Errors epected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("Register Receive Coded (audio back in side A)\n");
+            }
+            CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+        }
+        //Sleep(9);
+    }
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+
+#ifdef WEBRTC_DTMF_DETECTION
+    printf("\nDTMF at Side A\n");
+    _dtmfDetectorA->PrintDetectedDigits();
+
+    printf("\nDTMF at Side B\n");
+    _dtmfDetectorB->PrintDetectedDigits();
+#endif
+
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.h b/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.h
new file mode 100644
index 0000000..0b33317
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/TwoWayCommunication.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TWO_WAY_COMMUNICATION_H
+#define TWO_WAY_COMMUNICATION_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+
+namespace webrtc {
+
+class TwoWayCommunication : public ACMTest
+{
+public:
+    TwoWayCommunication(int testMode = 1);
+    ~TwoWayCommunication();
+
+    void Perform();
+private:
+    WebRtc_UWord8 ChooseCodec(WebRtc_UWord8* codecID_A, WebRtc_UWord8* codecID_B);
+    WebRtc_Word16 ChooseFile(char* fileName, WebRtc_Word16 maxLen, WebRtc_UWord16* frequencyHz);
+    WebRtc_Word16 SetUp();
+    WebRtc_Word16 SetUpAutotest();
+
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    AudioCodingModule* _acmRefA;
+    AudioCodingModule* _acmRefB;
+
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+
+    Channel* _channelRef_A2B;
+    Channel* _channelRef_B2A;
+
+    PCMFile _inFileA;
+    PCMFile _inFileB;
+
+    PCMFile _outFileA;
+    PCMFile _outFileB;
+
+    PCMFile _outFileRefA;
+    PCMFile _outFileRefB;
+
+    DTMFDetector* _dtmfDetectorA;
+    DTMFDetector* _dtmfDetectorB;
+
+    int _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/iSACTest.cc b/trunk/src/modules/audio_coding/main/test/iSACTest.cc
new file mode 100644
index 0000000..966c4c5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/iSACTest.cc
@@ -0,0 +1,598 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#if _WIN32
+#include <windows.h>
+#elif WEBRTC_LINUX
+#include <ctime>
+#else
+#include <sys/time.h>
+#include <time.h>
+#endif 
+
+#include "event_wrapper.h"
+#include "iSACTest.h"
+#include "utility.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+
+namespace webrtc {
+
+void SetISACConfigDefault(
+    ACMTestISACConfig& isacConfig)
+{
+    isacConfig.currentRateBitPerSec = 0;
+    isacConfig.currentFrameSizeMsec = 0;
+    isacConfig.maxRateBitPerSec     = 0;
+    isacConfig.maxPayloadSizeByte   = 0;
+    isacConfig.encodingMode         = -1;
+    isacConfig.initRateBitPerSec    = 0;
+    isacConfig.initFrameSizeInMsec  = 0;
+    isacConfig.enforceFrameSize     = false;
+    return;
+}
+
+
+WebRtc_Word16 SetISAConfig(
+    ACMTestISACConfig& isacConfig,
+    AudioCodingModule* acm,
+    int testMode)
+{
+
+    if((isacConfig.currentRateBitPerSec != 0) ||
+        (isacConfig.currentFrameSizeMsec != 0))
+    {
+        CodecInst sendCodec;
+        acm->SendCodec(sendCodec);
+        if(isacConfig.currentRateBitPerSec < 0)
+        {
+            sendCodec.rate = -1;
+            CHECK_ERROR(acm->RegisterSendCodec(sendCodec));
+            if(testMode != 0)
+            {
+                printf("ISAC-%s Registered in adaptive (channel-dependent) mode.\n", 
+                    (sendCodec.plfreq == 32000)? "swb":"wb");
+            }
+        }
+        else
+        {
+
+            if(isacConfig.currentRateBitPerSec != 0)
+            {
+                sendCodec.rate = isacConfig.currentRateBitPerSec;
+            }
+            if(isacConfig.currentFrameSizeMsec != 0)
+            {
+                sendCodec.pacsize = isacConfig.currentFrameSizeMsec *
+                    (sendCodec.plfreq / 1000);
+            }
+            CHECK_ERROR(acm->RegisterSendCodec(sendCodec));
+            if(testMode != 0)
+            {
+                printf("Target rate is set to %d bit/sec with frame-size %d ms \n",
+                    (int)isacConfig.currentRateBitPerSec,
+                    (int)sendCodec.pacsize / (sendCodec.plfreq / 1000));
+            }
+        }
+    }
+
+    if(isacConfig.maxRateBitPerSec > 0)
+    {
+        CHECK_ERROR(acm->SetISACMaxRate(isacConfig.maxRateBitPerSec));
+        if(testMode != 0)
+        {
+            printf("Max rate is set to %u bit/sec\n",
+                isacConfig.maxRateBitPerSec);
+        }
+    }
+    if(isacConfig.maxPayloadSizeByte > 0)
+    {
+        CHECK_ERROR(acm->SetISACMaxPayloadSize(isacConfig.maxPayloadSizeByte));
+        if(testMode != 0)
+        {
+            printf("Max payload-size is set to %u bit/sec\n",
+                isacConfig.maxPayloadSizeByte);
+        }
+    }
+    if((isacConfig.initFrameSizeInMsec != 0) ||
+        (isacConfig.initRateBitPerSec != 0))
+    {
+        CHECK_ERROR(acm->ConfigISACBandwidthEstimator(
+            (WebRtc_UWord8)isacConfig.initFrameSizeInMsec,
+            (WebRtc_UWord16)isacConfig.initRateBitPerSec, 
+            isacConfig.enforceFrameSize));
+        if((isacConfig.initFrameSizeInMsec != 0) && (testMode != 0))
+        {
+            printf("Initialize BWE to %d msec frame-size\n",
+                isacConfig.initFrameSizeInMsec);
+        }
+        if((isacConfig.initRateBitPerSec != 0) && (testMode != 0))
+        {
+            printf("Initialize BWE to %u bit/sec send-bandwidth\n",
+                isacConfig.initRateBitPerSec);
+        }
+    }
+
+    return 0;
+}
+
+
+ISACTest::ISACTest(int testMode)
+{
+    _testMode = testMode;
+}
+
+ISACTest::~ISACTest()
+{
+    AudioCodingModule::Destroy(_acmA);
+    AudioCodingModule::Destroy(_acmB);
+
+    delete _channel_A2B;
+    delete _channel_B2A;
+}
+
+
+WebRtc_Word16
+ISACTest::Setup()
+{
+    int codecCntr;
+    CodecInst codecParam;
+
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    for(codecCntr = 0; codecCntr < AudioCodingModule::NumberOfCodecs(); codecCntr++)
+    {
+        AudioCodingModule::Codec(codecCntr, codecParam);
+        if(!STR_CASE_CMP(codecParam.plname, "ISAC") && codecParam.plfreq == 16000)
+        {
+            memcpy(&_paramISAC16kHz, &codecParam, sizeof(CodecInst));
+            _idISAC16kHz = codecCntr;
+        }
+        if(!STR_CASE_CMP(codecParam.plname, "ISAC") && codecParam.plfreq == 32000)
+        {
+            memcpy(&_paramISAC32kHz, &codecParam, sizeof(CodecInst));
+            _idISAC32kHz = codecCntr;
+        }        
+    }
+
+    // register both iSAC-wb & iSAC-swb in both sides as receiver codecs
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(_paramISAC32kHz));
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    CHECK_ERROR(_acmA->RegisterTransportCallback(_channel_A2B));
+    _channel_A2B->RegisterReceiverACM(_acmB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    CHECK_ERROR(_acmB->RegisterTransportCallback(_channel_B2A));
+    _channel_B2A->RegisterReceiverACM(_acmA);
+
+    strncpy(_fileNameSWB, "./test/data/audio_coding/testfile32kHz.pcm",
+            MAX_FILE_NAME_LENGTH_BYTE);
+
+    _acmB->RegisterSendCodec(_paramISAC16kHz);
+    _acmA->RegisterSendCodec(_paramISAC32kHz);
+
+    if(_testMode != 0)
+    {
+        printf("Side A Send Codec\n");
+        printf("%s %d\n", _paramISAC32kHz.plname, _paramISAC32kHz.plfreq);
+
+        printf("Side B Send Codec\n");
+        printf("%s %d\n", _paramISAC16kHz.plname, _paramISAC16kHz.plfreq);
+    }
+
+    _inFileA.Open(_fileNameSWB, 32000, "rb");
+    std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm";
+    std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm";
+    _outFileA.Open(fileNameA.c_str(), 32000, "wb");
+    _outFileB.Open(fileNameB.c_str(), 32000, "wb");
+
+    while(!_inFileA.EndOfFile())
+    {
+        Run10ms();
+    }
+    CodecInst receiveCodec;
+    CHECK_ERROR(_acmA->ReceiveCodec(receiveCodec));
+    if(_testMode != 0)
+    {
+        printf("Side A Receive Codec\n");
+        printf("%s %d\n", receiveCodec.plname, receiveCodec.plfreq);
+    }
+
+    CHECK_ERROR(_acmB->ReceiveCodec(receiveCodec));
+    if(_testMode != 0)
+    {
+        printf("Side B Receive Codec\n");
+        printf("%s %d\n", receiveCodec.plname, receiveCodec.plfreq);
+    }
+
+    _inFileA.Close();
+    _outFileA.Close();
+    _outFileB.Close();
+
+    return 0;
+}
+
+
+void
+ISACTest::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running iSAC Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1, "---------- iSACTest ----------");
+    }
+
+    Setup();
+
+    WebRtc_Word16 testNr = 0;
+    ACMTestISACConfig wbISACConfig;
+    ACMTestISACConfig swbISACConfig;
+
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+
+    wbISACConfig.currentRateBitPerSec = -1;
+    swbISACConfig.currentRateBitPerSec = -1;
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+    if (_testMode != 0)
+    {
+        SetISACConfigDefault(wbISACConfig);
+        SetISACConfigDefault(swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = -1;
+        swbISACConfig.currentRateBitPerSec = -1;
+        wbISACConfig.initRateBitPerSec = 13000;
+        wbISACConfig.initFrameSizeInMsec = 60;
+        swbISACConfig.initRateBitPerSec = 20000;
+        swbISACConfig.initFrameSizeInMsec = 30;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+        SetISACConfigDefault(wbISACConfig);
+        SetISACConfigDefault(swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = 20000;
+        swbISACConfig.currentRateBitPerSec = 48000;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = 16000;
+        swbISACConfig.currentRateBitPerSec = 30000;
+        wbISACConfig.currentFrameSizeMsec = 60;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+    }
+
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+    
+    int user_input;
+    if((_testMode == 0) || (_testMode == 1))
+    {
+        swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
+        wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
+    }
+    else
+    {
+        printf("Enter the max payload-size for side A: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+        printf("Enter the max payload-size for side B: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+    }
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+    _acmA->ResetEncoder();
+    _acmB->ResetEncoder();
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+
+    if((_testMode == 0) || (_testMode == 1))
+    {
+        swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
+        wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
+    }
+    else
+    {
+        printf("Enter the max rate for side A: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+        printf("Enter the max rate for side B: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+    }
+ 
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+
+    testNr++;
+    if(_testMode == 0)
+    {
+        SwitchingSamplingRate(testNr, 4);
+        printf("Done!\n");
+    }
+    else
+    {
+        SwitchingSamplingRate(testNr, 80);
+    }
+}
+
+
+void
+ISACTest::Run10ms()
+{
+    AudioFrame audioFrame;
+
+    _inFileA.Read10MsData(audioFrame);
+    CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+    CHECK_ERROR(_acmB->Add10MsData(audioFrame));
+
+    CHECK_ERROR(_acmA->Process());
+    CHECK_ERROR(_acmB->Process());
+
+    CHECK_ERROR(_acmA->PlayoutData10Ms(32000, audioFrame));
+    _outFileA.Write10MsData(audioFrame);
+
+    CHECK_ERROR(_acmB->PlayoutData10Ms(32000, audioFrame));
+    _outFileB.Write10MsData(audioFrame);
+}
+
+void
+ISACTest::EncodeDecode(
+    int                testNr,
+    ACMTestISACConfig& wbISACConfig,
+    ACMTestISACConfig& swbISACConfig)
+{
+    if(_testMode == 0)
+    {
+        printf(".");
+    }
+    else
+    {
+        printf("\nTest %d:\n\n", testNr);
+    }
+    char fileNameOut[MAX_FILE_NAME_LENGTH_BYTE];
+
+    // Files in Side A 
+    _inFileA.Open(_fileNameSWB, 32000, "rb", true);
+    if(_testMode == 0)
+    {
+        sprintf(fileNameOut,
+                "%s/out_iSACTest_%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "A",
+                testNr);
+    }
+    else
+    {
+        sprintf(fileNameOut,
+                "%s/out%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "A",
+                testNr);
+    }
+    _outFileA.Open(fileNameOut, 32000, "wb");
+
+    // Files in Side B
+    _inFileB.Open(_fileNameSWB, 32000, "rb", true);
+    if(_testMode == 0)
+    {
+        sprintf(fileNameOut,
+                "%s/out_iSACTest_%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "B",
+                testNr);
+    }
+    else
+    {
+        sprintf(fileNameOut,
+                "%s/out%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "B",
+                testNr);
+    }
+    _outFileB.Open(fileNameOut, 32000, "wb");
+    
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+    
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+    if(_testMode != 0)
+    {
+        printf("Side A Sending Super-Wideband \n");
+        printf("Side B Sending Wideband\n\n");
+    }
+
+    SetISAConfig(swbISACConfig, _acmA, _testMode);
+    SetISAConfig(wbISACConfig,  _acmB, _testMode);
+
+    bool adaptiveMode = false;
+    if((swbISACConfig.currentRateBitPerSec == -1) ||
+        (wbISACConfig.currentRateBitPerSec == -1))
+    {
+        adaptiveMode = true;
+    }
+    _myTimer.Reset();
+    _channel_A2B->ResetStats();
+    _channel_B2A->ResetStats();
+
+    char currentTime[500];
+    if(_testMode == 2) printf("\n");
+    CodecInst sendCodec;
+    EventWrapper* myEvent = EventWrapper::Create();
+    myEvent->StartTimer(true, 10);
+    while(!(_inFileA.EndOfFile() || _inFileA.Rewinded()))
+    {
+        Run10ms();
+        _myTimer.Tick10ms();
+        _myTimer.CurrentTimeHMS(currentTime);
+        if(_testMode == 2) printf("\r%s   ", currentTime);
+
+        if((adaptiveMode) && (_testMode != 0))
+        {
+            myEvent->Wait(5000);
+
+            _acmA->SendCodec(sendCodec);
+            if(_testMode == 2) printf("[%d]  ", sendCodec.rate);
+            _acmB->SendCodec(sendCodec);
+            if(_testMode == 2) printf("[%d]  ", sendCodec.rate);
+        }
+    }
+
+    if(_testMode != 0)
+    {
+        printf("\n\nSide A statistics\n\n");
+        _channel_A2B->PrintStats(_paramISAC32kHz);
+
+        printf("\n\nSide B statistics\n\n");
+        _channel_B2A->PrintStats(_paramISAC16kHz);
+    }
+    
+    _channel_A2B->ResetStats();
+    _channel_B2A->ResetStats();
+
+    if(_testMode != 0) printf("\n");
+    _outFileA.Close();
+    _outFileB.Close();
+    _inFileA.Close();
+    _inFileB.Close();
+}
+
+void
+ISACTest::SwitchingSamplingRate(
+    int testNr, 
+    int maxSampRateChange)
+{
+    char fileNameOut[MAX_FILE_NAME_LENGTH_BYTE];
+        
+    // Files in Side A 
+    _inFileA.Open(_fileNameSWB, 32000, "rb");
+    if(_testMode == 0)
+    {
+        sprintf(fileNameOut,
+                "%s/out_iSACTest_%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "A",
+                testNr);
+    }
+    else
+    {
+        printf("\nTest %d", testNr);
+        printf("    Alternate between WB and SWB at the sender Side\n\n");
+        sprintf(fileNameOut,
+                "%s/out%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "A",
+                testNr);
+    }
+    _outFileA.Open(fileNameOut, 32000, "wb", true);
+    
+    // Files in Side B
+    _inFileB.Open(_fileNameSWB, 32000, "rb");
+    if(_testMode == 0)
+    {
+        sprintf(fileNameOut,
+                "%s/out_iSACTest_%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "B",
+                testNr);
+    }
+    else
+    {
+        sprintf(fileNameOut, "%s/out%s_%02d.pcm",
+                webrtc::test::OutputPath().c_str(),
+                "B",
+                testNr);
+    }
+    _outFileB.Open(fileNameOut, 32000, "wb", true);
+
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+    if(_testMode != 0)
+    {
+        printf("Side A Sending Super-Wideband \n");
+        printf("Side B Sending Wideband\n");
+    }
+
+    int numSendCodecChanged = 0;
+    _myTimer.Reset();
+    char currentTime[50];
+    while(numSendCodecChanged < (maxSampRateChange<<1))
+    {
+        Run10ms();
+        _myTimer.Tick10ms();
+        _myTimer.CurrentTimeHMS(currentTime);
+        if(_testMode == 2) printf("\r%s", currentTime);
+        if(_inFileA.EndOfFile())
+        {
+            if(_inFileA.SamplingFrequency() == 16000)
+            {
+                if(_testMode != 0) printf("\nSide A switched to Send Super-Wideband\n");
+                _inFileA.Close();
+                _inFileA.Open(_fileNameSWB, 32000, "rb");
+                CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+            }
+            else
+            {
+                if(_testMode != 0) printf("\nSide A switched to Send Wideband\n");
+                _inFileA.Close();
+                _inFileA.Open(_fileNameSWB, 32000, "rb");
+                CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC16kHz));
+            }
+            numSendCodecChanged++;
+        }
+
+        if(_inFileB.EndOfFile())
+        {
+            if(_inFileB.SamplingFrequency() == 16000)
+            {
+                if(_testMode != 0) printf("\nSide B switched to Send Super-Wideband\n");
+                _inFileB.Close();
+                _inFileB.Open(_fileNameSWB, 32000, "rb");
+                CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC32kHz));
+            }
+            else
+            {
+                if(_testMode != 0) printf("\nSide B switched to Send Wideband\n");
+                _inFileB.Close();
+                _inFileB.Open(_fileNameSWB, 32000, "rb");
+                CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+            }
+            numSendCodecChanged++;
+        }
+    }
+    _outFileA.Close();
+    _outFileB.Close();
+    _inFileA.Close();
+    _inFileB.Close();
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/iSACTest.h b/trunk/src/modules/audio_coding/main/test/iSACTest.h
new file mode 100644
index 0000000..17bacad
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/iSACTest.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_ISAC_TEST_H
+#define ACM_ISAC_TEST_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+#include "common_types.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+#define NO_OF_CLIENTS             15
+
+namespace webrtc {
+
+struct ACMTestISACConfig
+{
+    WebRtc_Word32  currentRateBitPerSec;
+    WebRtc_Word16  currentFrameSizeMsec;
+    WebRtc_UWord32 maxRateBitPerSec;
+    WebRtc_Word16  maxPayloadSizeByte;
+    WebRtc_Word16  encodingMode;
+    WebRtc_UWord32 initRateBitPerSec;
+    WebRtc_Word16  initFrameSizeInMsec;
+    bool           enforceFrameSize;
+};
+
+
+
+class ISACTest : public ACMTest
+{
+public:
+    ISACTest(int testMode);
+    ~ISACTest();
+
+    void Perform();
+private:
+    WebRtc_Word16 Setup();
+    WebRtc_Word16 SetupConference();
+    WebRtc_Word16 RunConference();    
+    
+
+    void Run10ms();
+
+    void EncodeDecode(
+        int                testNr,
+        ACMTestISACConfig& wbISACConfig,
+        ACMTestISACConfig& swbISACConfig);
+    
+    void TestBWE(
+        int testNr);
+
+    void SwitchingSamplingRate(
+        int testNr, 
+        int maxSampRateChange);
+
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+
+    PCMFile _inFileA;
+    PCMFile _inFileB;
+
+    PCMFile _outFileA;
+    PCMFile _outFileB;
+
+    WebRtc_UWord8 _idISAC16kHz;
+    WebRtc_UWord8 _idISAC32kHz;
+    CodecInst _paramISAC16kHz;
+    CodecInst _paramISAC32kHz;
+
+    char _fileNameWB[MAX_FILE_NAME_LENGTH_BYTE];
+    char _fileNameSWB[MAX_FILE_NAME_LENGTH_BYTE];
+
+    ACMTestTimer _myTimer;
+    int _testMode;
+    
+    AudioCodingModule* _defaultACM32;
+    AudioCodingModule* _defaultACM16;
+    
+    AudioCodingModule* _confACM[NO_OF_CLIENTS];
+    AudioCodingModule* _clientACM[NO_OF_CLIENTS];
+    Channel*               _conf2Client[NO_OF_CLIENTS];
+    Channel*               _client2Conf[NO_OF_CLIENTS];
+
+    PCMFile                _clientOutFile[NO_OF_CLIENTS];
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/audio_coding/main/test/utility.cc b/trunk/src/modules/audio_coding/main/test/utility.cc
new file mode 100644
index 0000000..56acbf7
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/utility.cc
@@ -0,0 +1,434 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "utility.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "gtest/gtest.h"
+
+#define NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE 13
+
+namespace webrtc {
+
+ACMTestTimer::ACMTestTimer() :
+_msec(0),
+_sec(0),
+_min(0),
+_hour(0)
+{
+    return;
+}
+
+ACMTestTimer::~ACMTestTimer()
+{
+    return;
+}
+
+void ACMTestTimer::Reset()
+{
+    _msec = 0;
+    _sec = 0;
+    _min = 0;
+    _hour = 0;
+    return;
+}
+void ACMTestTimer::Tick10ms()
+{
+    _msec += 10;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick1ms()
+{
+    _msec++;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick100ms()
+{
+    _msec += 100;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick1sec()
+{
+    _sec++;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::CurrentTimeHMS(char* currTime)
+{
+    sprintf(currTime, "%4lu:%02u:%06.3f", _hour, _min, (double)_sec + (double)_msec / 1000.); 
+    return;
+}
+
+void ACMTestTimer::CurrentTime(
+        unsigned long&  h, 
+        unsigned char&  m,
+        unsigned char&  s,
+        unsigned short& ms)
+{
+    h = _hour;
+    m = _min;
+    s = _sec;
+    ms = _msec;
+    return;
+}
+
+void ACMTestTimer::Adjust()
+{
+    unsigned int n;
+    if(_msec >= 1000)
+    {
+        n = _msec / 1000;
+        _msec -= (1000 * n);
+        _sec += n;
+    }
+    if(_sec >= 60)
+    {
+        n = _sec / 60;
+        _sec -= (n * 60);
+        _min += n;
+    }
+    if(_min >= 60)
+    {
+        n = _min / 60;
+        _min -= (n * 60);
+        _hour += n;
+    }
+}
+
+
+WebRtc_Word16
+ChooseCodec(
+    CodecInst& codecInst)
+{
+
+    PrintCodecs();
+    //AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
+    WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
+    WebRtc_Word8 codecID;
+    bool outOfRange = false;
+    char myStr[15] = "";
+    do
+    {
+        printf("\nChoose a codec [0]: ");
+        EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+        codecID = atoi(myStr);
+        if((codecID < 0) || (codecID >= noCodec))
+        {
+            printf("\nOut of range.\n");
+            outOfRange = true;
+        }
+    } while(outOfRange);
+
+    CHECK_ERROR(AudioCodingModule::Codec((WebRtc_UWord8)codecID, codecInst));
+    return 0;
+}
+
+void
+PrintCodecs()
+{
+    WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
+        
+    CodecInst codecInst;
+    printf("No  Name                [Hz]    [bps]\n");     
+    for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+    {
+        AudioCodingModule::Codec(codecCntr, codecInst);
+        printf("%2d- %-18s %5d   %6d\n", 
+            codecCntr, codecInst.plname, codecInst.plfreq, codecInst.rate);
+    }
+
+}
+
+CircularBuffer::CircularBuffer(WebRtc_UWord32 len):
+_buff(NULL),
+_idx(0),
+_buffIsFull(false),
+_calcAvg(false),
+_calcVar(false),
+_sum(0),
+_sumSqr(0)
+{
+    _buff = new double[len];
+    if(_buff == NULL)
+    {
+        _buffLen = 0;
+    }
+    else
+    {
+        for(WebRtc_UWord32 n = 0; n < len; n++)
+        {
+            _buff[n] = 0;
+        }
+        _buffLen = len;
+    }
+}
+
+CircularBuffer::~CircularBuffer()
+{
+    if(_buff != NULL)
+    {
+        delete [] _buff;
+        _buff = NULL;
+    }
+}
+
+void
+CircularBuffer::Update(
+    const double newVal)
+{
+    assert(_buffLen > 0);
+    
+    // store the value that is going to be overwritten
+    double oldVal = _buff[_idx];
+    // record the new value
+    _buff[_idx] = newVal;
+    // increment the index, to point to where we would
+    // write next
+    _idx++;
+    // it is a circular buffer, if we are at the end
+    // we have to cycle to the beginning 
+    if(_idx >= _buffLen)
+    {
+        // flag that the buffer is filled up.
+        _buffIsFull = true;
+        _idx = 0;
+    }
+    
+    // Update 
+
+    if(_calcAvg)
+    {
+        // for the average we have to update
+        // the sum
+        _sum += (newVal - oldVal);
+    }
+
+    if(_calcVar)
+    {
+        // to calculate variance we have to update
+        // the sum of squares 
+        _sumSqr += (double)(newVal - oldVal) * (double)(newVal + oldVal);
+    }
+}
+
+void 
+CircularBuffer::SetArithMean(
+    bool enable)
+{
+    assert(_buffLen > 0);
+
+    if(enable && !_calcAvg)
+    {
+        WebRtc_UWord32 lim;
+        if(_buffIsFull)
+        {
+            lim = _buffLen;
+        }
+        else
+        {
+            lim = _idx;
+        }
+        _sum = 0;
+        for(WebRtc_UWord32 n = 0; n < lim; n++)
+        {
+            _sum += _buff[n];
+        }
+    }
+    _calcAvg = enable;
+}
+
+void
+CircularBuffer::SetVariance(
+    bool enable)
+{
+    assert(_buffLen > 0);
+
+    if(enable && !_calcVar)
+    {
+        WebRtc_UWord32 lim;
+        if(_buffIsFull)
+        {
+            lim = _buffLen;
+        }
+        else
+        {
+            lim = _idx;
+        }
+        _sumSqr = 0;
+        for(WebRtc_UWord32 n = 0; n < lim; n++)
+        {
+            _sumSqr += _buff[n] * _buff[n];
+        }
+    }
+    _calcAvg = enable;
+}
+
+WebRtc_Word16
+CircularBuffer::ArithMean(double& mean)
+{
+    assert(_buffLen > 0);
+
+    if(_buffIsFull)
+    {
+
+        mean = _sum / (double)_buffLen;
+        return 0;
+    }
+    else
+    {
+        if(_idx > 0)
+        {
+            mean = _sum / (double)_idx;
+            return 0;
+        }
+        else
+        {
+            return -1;
+        }
+
+    }
+}
+
+WebRtc_Word16
+CircularBuffer::Variance(double& var)
+{
+    assert(_buffLen > 0);
+
+    if(_buffIsFull)
+    {
+        var = _sumSqr / (double)_buffLen;
+        return 0;
+    }
+    else
+    {
+        if(_idx > 0)
+        {
+            var = _sumSqr / (double)_idx;
+            return 0;
+        }
+        else
+        {
+            return -1;
+        }
+    }
+}
+
+
+
+bool
+FixedPayloadTypeCodec(const char* payloadName)
+{
+    char fixPayloadTypeCodecs[NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE][32] = {
+        "PCMU",
+        "PCMA",
+        "GSM",
+        "G723",
+        "DVI4",
+        "LPC",
+        "PCMA",
+        "G722",
+        "QCELP",
+        "CN",
+        "MPA",
+        "G728",
+        "G729"
+    };
+
+    for(int n = 0; n < NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE; n++)
+    {
+        if(!STR_CASE_CMP(payloadName, fixPayloadTypeCodecs[n]))
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+DTMFDetector::DTMFDetector()
+{
+    for(WebRtc_Word16 n = 0; n < 1000; n++)
+    {
+        _toneCntr[n] = 0;
+    }
+}
+
+DTMFDetector::~DTMFDetector()
+{
+}
+
+WebRtc_Word32 DTMFDetector::IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool /* toneEnded */)
+{
+    fprintf(stdout, "%d-",digitDtmf);
+    _toneCntr[digitDtmf]++;
+    return 0;
+}
+
+void DTMFDetector::PrintDetectedDigits()
+{
+    for(WebRtc_Word16 n = 0; n < 1000; n++)
+    {
+        if(_toneCntr[n] > 0)
+        {
+            fprintf(stdout, "%d %u  msec, \n", n, _toneCntr[n]*10);
+        }
+    }
+    fprintf(stdout, "\n");
+    return;
+}
+
+void 
+VADCallback::Reset()
+{
+    for(int n = 0; n < 6; n++)
+    {
+        _numFrameTypes[n] = 0;
+    }
+}
+
+VADCallback::VADCallback()
+{
+    for(int n = 0; n < 6; n++)
+    {
+        _numFrameTypes[n] = 0;
+    }
+}
+
+void
+VADCallback::PrintFrameTypes()
+{
+    fprintf(stdout, "No encoding.................. %d\n", _numFrameTypes[0]);
+    fprintf(stdout, "Active normal encoded........ %d\n", _numFrameTypes[1]);
+    fprintf(stdout, "Passive normal encoded....... %d\n", _numFrameTypes[2]);
+    fprintf(stdout, "Passive DTX wideband......... %d\n", _numFrameTypes[3]);
+    fprintf(stdout, "Passive DTX narrowband....... %d\n", _numFrameTypes[4]);
+    fprintf(stdout, "Passive DTX super-wideband... %d\n", _numFrameTypes[5]);
+}
+
+WebRtc_Word32 
+VADCallback::InFrameType(
+    WebRtc_Word16 frameType)
+{
+    _numFrameTypes[frameType]++;
+    return 0;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_coding/main/test/utility.h b/trunk/src/modules/audio_coding/main/test/utility.h
new file mode 100644
index 0000000..887c735
--- /dev/null
+++ b/trunk/src/modules/audio_coding/main/test/utility.h
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_TEST_UTILITY_H
+#define ACM_TEST_UTILITY_H
+
+#include "audio_coding_module.h"
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+//-----------------------------
+#define CHECK_ERROR(f)                                                                      \
+    do {                                                                                    \
+        EXPECT_GE(f, 0) << "Error Calling API";                                             \
+    }while(0)
+
+//-----------------------------
+#define CHECK_PROTECTED(f)                                                                  \
+    do {                                                                                    \
+        if(f >= 0) {                                                                        \
+            ADD_FAILURE() << "Error Calling API";                                           \
+        }                                                                                   \
+        else {                                                                              \
+            printf("An expected error is caught.\n");                                       \
+        }                                                                                   \
+    }while(0)
+
+//----------------------------
+#define CHECK_ERROR_MT(f)                                                                   \
+    do {                                                                                    \
+        if(f < 0) {                                                                         \
+            fprintf(stderr, "Error Calling API in file %s at line %d \n",                   \
+                __FILE__, __LINE__);                                                        \
+        }                                                                                   \
+    }while(0)
+
+//----------------------------
+#define CHECK_PROTECTED_MT(f)                                                               \
+    do {                                                                                    \
+        if(f >= 0) {                                                                        \
+            fprintf(stderr, "Error Calling API in file %s at line %d \n",                   \
+                __FILE__, __LINE__);                                                        \
+        }                                                                                   \
+        else {                                                                              \
+            printf("An expected error is caught.\n");                                       \
+        }                                                                                   \
+    }while(0)
+
+
+
+#ifdef WIN32
+    /* Exclude rarely-used stuff from Windows headers */
+    //#define WIN32_LEAN_AND_MEAN 
+    /* OS-dependent case-insensitive string comparison */
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    /* OS-dependent case-insensitive string comparison */
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+#define DESTROY_ACM(acm)                                                                    \
+    do {                                                                                    \
+        if(acm != NULL) {                                                                   \
+            AudioCodingModule::Destroy(acm);                       \
+            acm = NULL;                                                                     \
+        }                                                                                   \
+    } while(0)
+
+
+#define DELETE_POINTER(p)                                                                   \
+    do {                                                                                    \
+        if(p != NULL) {                                                                     \
+            delete p;                                                                       \
+            p = NULL;                                                                       \
+        }                                                                                   \
+    } while(0)
+
+class ACMTestTimer
+{
+public:
+    ACMTestTimer();
+    ~ACMTestTimer();
+
+    void Reset();
+    void Tick10ms();
+    void Tick1ms();
+    void Tick100ms();
+    void Tick1sec();
+    void CurrentTimeHMS(
+        char* currTime);
+    void CurrentTime(
+        unsigned long&  h, 
+        unsigned char&  m,
+        unsigned char&  s,
+        unsigned short& ms);
+
+private:
+    void Adjust();
+
+    unsigned short _msec;
+    unsigned char  _sec;
+    unsigned char  _min;
+    unsigned long  _hour;  
+};
+
+
+
+class CircularBuffer
+{
+public:
+    CircularBuffer(WebRtc_UWord32 len);
+    ~CircularBuffer();
+
+    void SetArithMean(
+        bool enable);
+    void SetVariance(
+        bool enable);
+
+    void Update(
+        const double newVal);
+    void IsBufferFull();
+    
+    WebRtc_Word16 Variance(double& var);
+    WebRtc_Word16 ArithMean(double& mean);
+
+protected:
+    double* _buff;
+    WebRtc_UWord32 _idx;
+    WebRtc_UWord32 _buffLen;
+
+    bool         _buffIsFull;
+    bool         _calcAvg;
+    bool         _calcVar;
+    double       _sum;
+    double       _sumSqr;
+};
+
+
+
+
+
+WebRtc_Word16 ChooseCodec(
+    CodecInst& codecInst);
+
+void PrintCodecs();
+
+bool FixedPayloadTypeCodec(const char* payloadName);
+
+
+
+
+class DTMFDetector: public AudioCodingFeedback
+{
+public:
+    DTMFDetector();
+    ~DTMFDetector();
+    // used for inband DTMF detection
+    WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool toneEnded);
+    void PrintDetectedDigits();
+
+private:
+    WebRtc_UWord32 _toneCntr[1000];
+
+};
+
+
+
+
+class VADCallback : public ACMVADCallback
+{
+public:
+    VADCallback();
+    ~VADCallback(){}
+
+    WebRtc_Word32 InFrameType(
+        WebRtc_Word16 frameType);
+
+    void PrintFrameTypes();
+    void Reset();
+
+private:
+    WebRtc_UWord32 _numFrameTypes[6];
+};
+
+} // namespace webrtc
+
+#endif // ACM_TEST_UTILITY_H
diff --git a/trunk/src/modules/audio_coding/neteq/Android.mk b/trunk/src/modules/audio_coding/neteq/Android.mk
new file mode 100644
index 0000000..84267be
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/Android.mk
@@ -0,0 +1,73 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_neteq
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    accelerate.c \
+    automode.c \
+    bgn_update.c \
+    bufstats_decision.c \
+    cng_internal.c \
+    codec_db.c \
+    correlator.c \
+    dsp.c \
+    dsp_helpfunctions.c \
+    dtmf_buffer.c \
+    dtmf_tonegen.c \
+    expand.c \
+    mcu_address_init.c \
+    mcu_dsp_common.c \
+    mcu_reset.c \
+    merge.c \
+    min_distortion.c \
+    mix_voice_unvoice.c \
+    mute_signal.c \
+    normal.c \
+    packet_buffer.c \
+    peak_detection.c \
+    preemptive_expand.c \
+    random_vector.c \
+    recin.c \
+    recout.c \
+    rtcp.c \
+    rtp.c \
+    set_fs.c \
+    signal_mcu.c \
+    split_and_insert.c \
+    unmute_signal.c \
+    webrtc_neteq.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DNETEQ_VOICEENGINE_CODECS'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/../codecs/cng/include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_coding/neteq/OWNERS b/trunk/src/modules/audio_coding/neteq/OWNERS
new file mode 100644
index 0000000..1d25542
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/OWNERS
@@ -0,0 +1,2 @@
+henrik.lundin@webrtc.org
+tina.legrand@webrtc.org
diff --git a/trunk/src/modules/audio_coding/neteq/accelerate.c b/trunk/src/modules/audio_coding/neteq/accelerate.c
new file mode 100644
index 0000000..285de4d
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/accelerate.c
@@ -0,0 +1,489 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the Accelerate algorithm that is used to reduce
+ * the delay by removing a part of the audio stream.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define ACCELERATE_CORR_LEN 50
+#define ACCELERATE_MIN_LAG 10
+#define ACCELERATE_MAX_LAG 60
+#define ACCELERATE_DOWNSAMPLED_LEN (ACCELERATE_CORR_LEN + ACCELERATE_MAX_LAG)
+
+/* Scratch usage:
+
+ Type	        Name                size    startpos    endpos
+ WebRtc_Word16  pw16_downSampSpeech 110     0           109
+ WebRtc_Word32  pw32_corr           2*50    110         209
+ WebRtc_Word16  pw16_corr           50      0           49
+
+ Total: 110+2*50
+ */
+
+#define	 SCRATCH_PW16_DS_SPEECH			0
+#define	 SCRATCH_PW32_CORR				ACCELERATE_DOWNSAMPLED_LEN
+#define	 SCRATCH_PW16_CORR				0
+
+/****************************************************************************
+ * WebRtcNetEQ_Accelerate(...)
+ *
+ * This function tries to shorten the audio data by removing one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - BGNonly       : If non-zero, Accelerate will only remove the last 
+ *                        DEFAULT_TIME_ADJUST seconds of the input.
+ *                        No signal matching is done.
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
+#ifdef SCRATCH
+                           WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                           const WebRtc_Word16 *pw16_decoded, int len,
+                           WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                           WebRtc_Word16 BGNonly)
+{
+
+#ifdef SCRATCH
+    /* Use scratch memory for internal temporary vectors */
+    WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+    WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+#else
+    /* Allocate memory for temporary vectors */
+    WebRtc_Word16 pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
+    WebRtc_Word32 pw32_corr[ACCELERATE_CORR_LEN];
+    WebRtc_Word16 pw16_corr[ACCELERATE_CORR_LEN];
+#endif
+    WebRtc_Word16 w16_decodedMax = 0;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word32 w32_tmp2;
+
+    const WebRtc_Word16 w16_startLag = ACCELERATE_MIN_LAG;
+    const WebRtc_Word16 w16_endLag = ACCELERATE_MAX_LAG;
+    const WebRtc_Word16 w16_corrLen = ACCELERATE_CORR_LEN;
+    const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+    WebRtc_Word16 *pw16_vectmp;
+    WebRtc_Word16 w16_inc, w16_startfact;
+    WebRtc_Word16 w16_bestIndex, w16_bestVal;
+    WebRtc_Word16 w16_VAD = 1;
+    WebRtc_Word16 fsMult;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+    WebRtc_Word16 w16_en1, w16_en2;
+    WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+    WebRtc_Word16 w16_sqrtEn1En2;
+    WebRtc_Word16 w16_bestCorr = 0;
+    int ok;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
+
+    /* Pre-calculate common multiplication with fsMult */
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /* Sanity check for len variable; must be (almost) 30 ms 
+     (120*fsMult + max(bestIndex)) */
+    if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
+    {
+        /* Length of decoded data too short */
+        inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+        *pw16_len = len;
+
+        /* simply move all data from decoded to outData */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return NETEQ_OTHER_ERROR;
+    }
+
+    /***********************************/
+    /* Special operations for BGN only */
+    /***********************************/
+
+    /* Check if "background noise only" flag is set */
+    if (BGNonly)
+    {
+        /* special operation for BGN only; simply remove a chunk of data */
+        w16_bestIndex = DEFAULT_TIME_ADJUST * WEBRTC_SPL_LSHIFT_W16(fsMult, 3); /* X*fs/1000 */
+
+        /* Sanity check for bestIndex */
+        if (w16_bestIndex > len)
+        { /* not good, do nothing instead */
+            inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+            *pw16_len = len;
+
+            /* simply move all data from decoded to outData */
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* set length parameter */
+        *pw16_len = len - w16_bestIndex; /* we remove bestIndex samples */
+
+        /* copy to output */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, *pw16_len);
+
+        /* set mode */
+        inst->w16_mode = MODE_LOWEN_ACCELERATE;
+
+        /* update statistics */
+        inst->statInst.accelerateLength += w16_bestIndex;
+
+        return 0;
+    } /* end of special code for BGN mode */
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    if (msInfo->msMode != NETEQ_SLAVE)
+    {
+        /* Find correlation lag only for non-slave instances */
+
+#endif
+
+        /****************************************************************/
+        /* Find the strongest correlation lag by downsampling to 4 kHz, */
+        /* calculating correlation for downsampled signal and finding   */
+        /* the strongest correlation peak.                              */
+        /****************************************************************/
+
+        /* find maximum absolute value */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* downsample the decoded speech to 4 kHz */
+        ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
+            ACCELERATE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
+        if (ok != 0)
+        {
+            /* error */
+            inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+            *pw16_len = len;
+            /* simply move all data from decoded to outData */
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /*
+         * Set scaling factor for cross correlation to protect against overflow
+         * (log2(50) => 6)
+         */
+        w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */
+        WebRtcNetEQ_CrossCorr(
+            pw32_corr, &pw16_downSampSpeech[w16_endLag],
+            &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
+            (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find the strongest correlation peak by using the parabolic fit method */
+        WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+            &w16_bestIndex, &w16_bestVal);
+        /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+        /* Compensate bestIndex for displaced starting position */
+        w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+        /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        if (msInfo->extraInfo == ACC_FAIL)
+        {
+            /* Master has signaled an unsuccessful accelerate */
+            w16_bestIndex = 0;
+        }
+        else
+        {
+            /* Get best index from master */
+            w16_bestIndex = msInfo->bestIndex;
+        }
+    }
+    else
+    {
+        /* Invalid mode */
+        return MASTER_SLAVE_ERROR;
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find the strongest correlation peak by using the parabolic fit method */
+    WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+        &w16_bestIndex, &w16_bestVal);
+    /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+    /* Compensate bestIndex for displaced starting position */
+    w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+    /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+#endif /* NETEQ_STEREO */
+
+#ifdef NETEQ_STEREO
+
+    if (msInfo->msMode != NETEQ_SLAVE)
+    {
+        /* Calculate correlation only for non-slave instances */
+
+#endif /* NETEQ_STEREO */
+
+        /*****************************************************/
+        /* Calculate correlation bestCorr for the found lag. */
+        /* Also do a simple VAD decision.                    */
+        /*****************************************************/
+
+        /*
+         * Calculate scaling to ensure that bestIndex samples can be square-summed
+         * without overflowing
+         */
+        w16_tmp = (31
+            - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
+        w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
+        w16_tmp -= 31;
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Calculate energies for vec1 and vec2 */
+        w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
+        w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+
+        /* Calculate cross-correlation at the found lag */
+        w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+            w16_bestIndex, w16_tmp);
+
+        /* Check VAD constraint 
+         ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
+        if (inst->BGNInst.w16_initialized == 1)
+        {
+            w32_tmp2 = inst->BGNInst.w32_energy;
+        }
+        else
+        {
+            /* if BGN parameters have not been estimated, use a fixed threshold */
+            w32_tmp2 = 75000;
+        }
+        w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
+        w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
+        w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+        w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
+
+        /* Scale w32_tmp properly before comparing with w32_tmp2 */
+        /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
+        if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
+        {
+            /* Cannot scale only w32_tmp, must scale w32_temp2 too */
+            WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
+            w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
+        }
+        else
+        {
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
+        }
+
+        if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        {
+            /* The signal seems to be passive speech */
+            w16_VAD = 0;
+            w16_bestCorr = 0; /* Correlation does not matter */
+        }
+        else
+        {
+            /* The signal is active speech */
+            w16_VAD = 1;
+
+            /* Calculate correlation (cc/sqrt(en1*en2)) */
+
+            /* Start with calculating scale values */
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                w16_en1Scale += 1;
+            }
+
+            /* Convert energies to WebRtc_Word16 */
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+
+            /* Calculate energy product */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+
+            /* Calculate square-root of energy product */
+            w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
+            w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
+            w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+            w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
+        }
+
+#ifdef NETEQ_STEREO
+
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+#endif /* NETEQ_STEREO */
+
+    /************************************************/
+    /* Check accelerate criteria and remove samples */
+    /************************************************/
+
+    /* Check for strong correlation (>0.9) or passive speech */
+#ifdef NETEQ_STEREO
+    if ((((w16_bestCorr > 14746) || (w16_VAD == 0)) && (msInfo->msMode != NETEQ_SLAVE))
+        || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->extraInfo != ACC_FAIL)))
+#else
+    if ((w16_bestCorr > 14746) || (w16_VAD == 0))
+#endif
+    {
+        /* Do accelerate operation by overlap add */
+
+        /*
+         * Calculate cross-fading slope so that the fading factor goes from
+         * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
+         */
+        w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
+            (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+
+        /* Initiate fading factor */
+        w16_startfact = 16384 - w16_inc;
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Copy unmodified part [0 to 15 ms minus 1 pitch period] */
+        w16_tmp = (fsMult120 - w16_bestIndex);
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_tmp);
+
+        /* Generate interpolated part of length bestIndex (1 pitch period) */
+        pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
+        /* Reuse mixing function from Expand */
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
+
+        /* Move the last part (also unmodified) */
+        /* Take from decoded at 15 ms + 1 pitch period */
+        pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
+            (WebRtc_Word16) (len - fsMult120 - w16_bestIndex));
+
+        /* Set the mode flag */
+        if (w16_VAD)
+        {
+            inst->w16_mode = MODE_SUCCESS_ACCELERATE;
+        }
+        else
+        {
+            inst->w16_mode = MODE_LOWEN_ACCELERATE;
+        }
+
+        /* Calculate resulting length = original length - pitch period */
+        *pw16_len = len - w16_bestIndex;
+
+        /* Update in-call statistics */
+        inst->statInst.accelerateLength += w16_bestIndex;
+
+        return 0;
+    }
+    else
+    {
+        /* Accelerate not allowed */
+
+#ifdef NETEQ_STEREO
+        /* Signal to slave(s) that this was unsuccessful */
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            msInfo->extraInfo = ACC_FAIL;
+        }
+#endif
+
+        /* Set mode flag to unsuccessful accelerate */
+        inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+
+        /* Length is unmodified */
+        *pw16_len = len;
+
+        /* Simply move all data from decoded to outData */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return 0;
+    }
+}
+
+#undef SCRATCH_PW16_DS_SPEECH
+#undef SCRATCH_PW32_CORR
+#undef SCRATCH_PW16_CORR
diff --git a/trunk/src/modules/audio_coding/neteq/automode.c b/trunk/src/modules/audio_coding/neteq/automode.c
new file mode 100644
index 0000000..da127e0
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/automode.c
@@ -0,0 +1,736 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the implementation of automatic buffer level optimization.
+ */
+
+#include "automode.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "neteq_defines.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+/* special code for offline delay logging */
+#include <stdio.h>
+#include "delay_logging.h"
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+#endif /* NETEQ_DELAY_LOGGING */
+
+
+int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
+                                    WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word32 fsHz, int mdCodec, int streamingMode)
+{
+    WebRtc_UWord32 timeIat; /* inter-arrival time */
+    int i;
+    WebRtc_Word32 tempsum = 0; /* temp summation */
+    WebRtc_Word32 tempvar; /* temporary variable */
+    int retval = 0; /* return value */
+    WebRtc_Word16 packetLenSamp; /* packet speech length in samples */
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (maxBufLen <= 1 || fsHz <= 0)
+    {
+        /* maxBufLen must be at least 2 and fsHz must both be strictly positive */
+        return -1;
+    }
+
+    /****************************/
+    /* Update packet statistics */
+    /****************************/
+
+    /* Try calculating packet length from current and previous timestamps */
+    if ((timeStamp <= inst->lastTimeStamp) || (seqNumber <= inst->lastSeqNo))
+    {
+        /* Wrong timestamp or sequence order; revert to backup plan */
+        packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */
+    }
+    else if (timeStamp > inst->lastTimeStamp)
+    {
+        /* calculate timestamps per packet */
+        packetLenSamp = (WebRtc_Word16) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
+            seqNumber - inst->lastSeqNo);
+    }
+
+    /* Check that the packet size is positive; if not, the statistics cannot be updated. */
+    if (packetLenSamp > 0)
+    { /* packet size ok */
+
+        /* calculate inter-arrival time in integer packets (rounding down) */
+        timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp);
+
+        /* Special operations for streaming mode */
+        if (streamingMode != 0)
+        {
+            /*
+             * Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
+             * than timeIat).
+             */
+            WebRtc_Word16 timeIatQ8 = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
+
+            /*
+             * Calculate cumulative sum iat with sequence number compensation (ideal arrival
+             * times makes this sum zero).
+             */
+            inst->cSumIatQ8 += (timeIatQ8
+                - WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8));
+
+            /* subtract drift term */
+            inst->cSumIatQ8 -= CSUM_IAT_DRIFT;
+
+            /* ensure not negative */
+            inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0);
+
+            /* remember max */
+            if (inst->cSumIatQ8 > inst->maxCSumIatQ8)
+            {
+                inst->maxCSumIatQ8 = inst->cSumIatQ8;
+                inst->maxCSumUpdateTimer = 0;
+            }
+
+            /* too long since the last maximum was observed; decrease max value */
+            if (inst->maxCSumUpdateTimer > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+                MAX_STREAMING_PEAK_PERIOD))
+            {
+                inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
+            }
+        } /* end of streaming mode */
+
+        /* check for discontinuous packet sequence and re-ordering */
+        if (seqNumber > inst->lastSeqNo + 1)
+        {
+            /* Compensate for gap in the sequence numbers.
+             * Reduce IAT with expected extra time due to lost packets, but ensure that
+             * the IAT is not negative.
+             */
+            timeIat -= WEBRTC_SPL_MIN(timeIat,
+                (WebRtc_UWord32) (seqNumber - inst->lastSeqNo - 1));
+        }
+        else if (seqNumber < inst->lastSeqNo)
+        {
+            /* compensate for re-ordering */
+            timeIat += (WebRtc_UWord32) (inst->lastSeqNo + 1 - seqNumber);
+        }
+
+        /* saturate IAT at maximum value */
+        timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT );
+
+        /* update iatProb = forgetting_factor * iatProb for all elements */
+        for (i = 0; i <= MAX_IAT; i++)
+        {
+            WebRtc_Word32 tempHi, tempLo; /* Temporary variables */
+
+            /*
+             * Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
+             * to come back to Q30. The operation is done in two steps:
+             */
+
+            /*
+             * 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
+             * 16 steps right to get the high 16 bits in a WebRtc_Word16 prior to
+             * multiplication, and left-shift with 1 afterwards to come back to
+             * Q30 = (Q15 * (Q30>>16)) << 1.
+             */
+            tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
+                (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
+            tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
+
+            /*
+             * 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps
+             * afterwards to come back to Q30 = (Q15 * Q30) >> 15.
+             */
+            tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
+            tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
+                (WebRtc_UWord16) tempLo);
+            tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
+
+            /* Finally, add the high and low parts */
+            inst->iatProb[i] = tempHi + tempLo;
+
+            /* Sum all vector elements while we are at it... */
+            tempsum += inst->iatProb[i];
+        }
+
+        /*
+         * Increase the probability for the currently observed inter-arrival time
+         * with 1 - iatProbFact. The factor is in Q15, iatProb in Q30;
+         * hence, left-shift 15 steps to obtain result in Q30.
+         */
+        inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15;
+
+        tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */
+
+        /*
+         * Update iatProbFact (changes only during the first seconds after reset)
+         * The factor converges to IAT_PROB_FACT.
+         */
+        inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2;
+
+        /* iatProb should sum up to 1 (in Q30). */
+        tempsum -= 1 << 30; /* should be zero */
+
+        /* Check if it does, correct if it doesn't. */
+        if (tempsum > 0)
+        {
+            /* tempsum too large => decrease a few values in the beginning */
+            i = 0;
+            while (i <= MAX_IAT && tempsum > 0)
+            {
+                /* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */
+                tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4);
+                inst->iatProb[i++] -= tempvar;
+                tempsum -= tempvar;
+            }
+        }
+        else if (tempsum < 0)
+        {
+            /* tempsum too small => increase a few values in the beginning */
+            i = 0;
+            while (i <= MAX_IAT && tempsum < 0)
+            {
+                /* Add iatProb[i] / 16 to iatProb, but not more than tempsum */
+                tempvar = WEBRTC_SPL_MIN(-tempsum, inst->iatProb[i] >> 4);
+                inst->iatProb[i++] += tempvar;
+                tempsum += tempvar;
+            }
+        }
+
+        /* Calculate optimal buffer level based on updated statistics */
+        tempvar = (WebRtc_Word32) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
+            streamingMode);
+        if (tempvar > 0)
+        {
+            inst->optBufLevel = (WebRtc_UWord16) tempvar;
+
+            if (streamingMode != 0)
+            {
+                inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
+                    inst->maxCSumIatQ8);
+            }
+
+            /*********/
+            /* Limit */
+            /*********/
+
+            /* Subtract extra delay from maxBufLen */
+            if (inst->extraDelayMs > 0 && inst->packetSpeechLenSamp > 0)
+            {
+                maxBufLen -= inst->extraDelayMs / inst->packetSpeechLenSamp * fsHz / 1000;
+                maxBufLen = WEBRTC_SPL_MAX(maxBufLen, 1); // sanity: at least one packet
+            }
+
+            maxBufLen = WEBRTC_SPL_LSHIFT_W32(maxBufLen, 8); /* shift to Q8 */
+
+            /* Enforce upper limit; 75% of maxBufLen */
+            inst->optBufLevel = (WebRtc_UWord16) WEBRTC_SPL_MIN( inst->optBufLevel,
+                (maxBufLen >> 1) + (maxBufLen >> 2) ); /* 1/2 + 1/4 = 75% */
+        }
+        else
+        {
+            retval = (int) tempvar;
+        }
+
+    } /* end if */
+
+    /*******************************/
+    /* Update post-call statistics */
+    /*******************************/
+
+    /* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
+    timeIat = WEBRTC_SPL_UDIV(
+        WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (WebRtc_Word16) 1000),
+        (WebRtc_UWord32) fsHz);
+
+    /* Increase counter corresponding to current inter-arrival time */
+    if (timeIat > 2000)
+    {
+        inst->countIAT2000ms++;
+    }
+    else if (timeIat > 1000)
+    {
+        inst->countIAT1000ms++;
+    }
+    else if (timeIat > 500)
+    {
+        inst->countIAT500ms++;
+    }
+
+    if (timeIat > inst->longestIATms)
+    {
+        /* update maximum value */
+        inst->longestIATms = timeIat;
+    }
+
+    /***********************************/
+    /* Prepare for next packet arrival */
+    /***********************************/
+
+    inst->packetIatCountSamp = 0; /* reset inter-arrival time counter */
+
+    inst->lastSeqNo = seqNumber; /* remember current sequence number */
+
+    inst->lastTimeStamp = timeStamp; /* remember current timestamp */
+
+    return retval;
+}
+
+
+WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
+                                            int mdCodec, WebRtc_UWord32 timeIatPkts,
+                                            int streamingMode)
+{
+
+    WebRtc_Word32 sum1 = 1 << 30; /* assign to 1 in Q30 */
+    WebRtc_Word16 B;
+    WebRtc_UWord16 Bopt;
+    int i;
+    WebRtc_Word32 betaInv; /* optimization parameter */
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    int temp_var;
+#endif
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (fsHz <= 0)
+    {
+        /* fsHz must be strictly positive */
+        return -1;
+    }
+
+    /***********************************************/
+    /* Get betaInv parameter based on playout mode */
+    /***********************************************/
+
+    if (streamingMode)
+    {
+        /* streaming (listen-only) mode */
+        betaInv = AUTOMODE_STREAMING_BETA_INV_Q30;
+    }
+    else
+    {
+        /* normal mode */
+        betaInv = AUTOMODE_BETA_INV_Q30;
+    }
+
+    /*******************************************************************/
+    /* Calculate optimal buffer level without considering jitter peaks */
+    /*******************************************************************/
+
+    /*
+     * Find the B for which the probability of observing an inter-arrival time larger
+     * than or equal to B is less than or equal to betaInv.
+     */
+    B = 0; /* start from the beginning of iatProb */
+    sum1 -= inst->iatProb[B]; /* ensure that optimal level is not less than 1 */
+
+    do
+    {
+        /*
+         * Subtract the probabilities one by one until the sum is no longer greater
+         * than betaInv.
+         */
+        sum1 -= inst->iatProb[++B];
+    }
+    while ((sum1 > betaInv) && (B < MAX_IAT));
+
+    Bopt = B; /* This is our primary value for the optimal buffer level Bopt */
+
+    if (mdCodec)
+    {
+        /*
+         * Use alternative cost function when multiple description codec is in use.
+         * Do not have to re-calculate all points, just back off a few steps from
+         * previous value of B.
+         */
+        WebRtc_Word32 sum2 = sum1; /* copy sum1 */
+
+        while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
+        {
+            /* Go backwards in the sum until the modified cost function solution is found */
+            sum2 += inst->iatProb[Bopt--];
+        }
+
+        Bopt++; /* This is the optimal level when using an MD codec */
+
+        /* Now, Bopt and B can have different values. */
+    }
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF;
+    fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
+    temp_var = (int) (Bopt * inst->packetSpeechLenSamp);
+#endif
+
+    /******************************************************************/
+    /* Make levelFiltFact adaptive: Larger B <=> larger levelFiltFact */
+    /******************************************************************/
+
+    switch (B)
+    {
+        case 0:
+        case 1:
+        {
+            inst->levelFiltFact = 251;
+            break;
+        }
+        case 2:
+        case 3:
+        {
+            inst->levelFiltFact = 252;
+            break;
+        }
+        case 4:
+        case 5:
+        case 6:
+        case 7:
+        {
+            inst->levelFiltFact = 253;
+            break;
+        }
+        default: /* B > 7 */
+        {
+            inst->levelFiltFact = 254;
+            break;
+        }
+    }
+
+    /************************/
+    /* Peak mode operations */
+    /************************/
+
+    /* Compare current IAT with peak threshold
+     *
+     * If IAT > optimal level + threshold (+1 for MD codecs)
+     * or if IAT > 2 * optimal level (note: optimal level is in Q8):
+     */
+    if (timeIatPkts > (WebRtc_UWord32) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
+        || timeIatPkts > (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
+    {
+        /* A peak is observed */
+
+        if (inst->peakIndex == -1)
+        {
+            /* this is the first peak; prepare for next peak */
+            inst->peakIndex = 0;
+            /* set the mode-disable counter */
+            inst->peakModeDisabled = WEBRTC_SPL_LSHIFT_W16(1, NUM_PEAKS_REQUIRED-2);
+        }
+        else if (inst->peakIatCountSamp
+            <=
+            (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
+        {
+            /* This is not the first peak and the period time is valid */
+
+            /* store time elapsed since last peak */
+            inst->peakPeriodSamp[inst->peakIndex] = inst->peakIatCountSamp;
+
+            /* saturate height to 16 bits */
+            inst->peakHeightPkt[inst->peakIndex]
+                =
+                (WebRtc_Word16) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
+
+            /* increment peakIndex and wrap/modulo */
+            inst->peakIndex = (inst->peakIndex + 1) & PEAK_INDEX_MASK;
+
+            /* process peak vectors */
+            inst->curPeakHeight = 0;
+            inst->curPeakPeriod = 0;
+
+            for (i = 0; i < NUM_PEAKS; i++)
+            {
+                /* Find maximum of peak heights and peak periods */
+                inst->curPeakHeight
+                    = WEBRTC_SPL_MAX(inst->curPeakHeight, inst->peakHeightPkt[i]);
+                inst->curPeakPeriod
+                    = WEBRTC_SPL_MAX(inst->curPeakPeriod, inst->peakPeriodSamp[i]);
+
+            }
+
+            inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
+
+        }
+        else if (inst->peakIatCountSamp > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+            WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
+        {
+            /*
+             * More than 2 * MAX_PEAK_PERIOD has elapsed since last peak;
+             * too long time => reset peak statistics
+             */
+            inst->curPeakHeight = 0;
+            inst->curPeakPeriod = 0;
+            for (i = 0; i < NUM_PEAKS; i++)
+            {
+                inst->peakHeightPkt[i] = 0;
+                inst->peakPeriodSamp[i] = 0;
+            }
+
+            inst->peakIndex = -1; /* Next peak is first peak */
+            inst->peakIatCountSamp = 0;
+        }
+
+        inst->peakIatCountSamp = 0; /* Reset peak interval timer */
+    } /* end if peak is observed */
+
+    /* Evaluate peak mode conditions */
+
+    /*
+     * If not disabled (enough peaks have been observed) and
+     * time since last peak is less than two peak periods.
+     */
+    inst->peakFound = 0;
+    if ((!inst->peakModeDisabled) && (inst->peakIatCountSamp
+        <= WEBRTC_SPL_LSHIFT_W32(inst->curPeakPeriod , 1)))
+    {
+        /* Engage peak mode */
+        inst->peakFound = 1;
+        /* Set optimal buffer level to curPeakHeight (if it's not already larger) */
+        Bopt = WEBRTC_SPL_MAX(Bopt, inst->curPeakHeight);
+
+#ifdef NETEQ_DELAY_LOGGING
+        /* special code for offline delay logging */
+        temp_var = (int) -(Bopt * inst->packetSpeechLenSamp);
+#endif
+    }
+
+    /* Scale Bopt to Q8 */
+    Bopt = WEBRTC_SPL_LSHIFT_U16(Bopt,8);
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
+#endif
+
+    /* Sanity check: Bopt must be strictly positive */
+    if (Bopt <= 0)
+    {
+        Bopt = WEBRTC_SPL_LSHIFT_W16(1, 8); /* 1 in Q8 */
+    }
+
+    return Bopt; /* return value in Q8 */
+}
+
+
+int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
+                                  int sampPerCall, WebRtc_Word16 fsMult)
+{
+
+    WebRtc_Word16 curSizeFrames;
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (sampPerCall <= 0 || fsMult <= 0)
+    {
+        /* sampPerCall and fsMult must both be strictly positive */
+        return -1;
+    }
+
+    /* Check if packet size has been detected */
+    if (inst->packetSpeechLenSamp > 0)
+    {
+        /*
+         * Current buffer level in packet lengths
+         * = (curSizeMs8 * fsMult) / packetSpeechLenSamp
+         */
+        curSizeFrames = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+            WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
+    }
+    else
+    {
+        curSizeFrames = 0;
+    }
+
+    /* Filter buffer level */
+    if (inst->levelFiltFact > 0) /* check that filter factor is set */
+    {
+        /* Filter:
+         * buffLevelFilt = levelFiltFact * buffLevelFilt
+         *                  + (1-levelFiltFact) * curSizeFrames
+         *
+         * levelFiltFact is in Q8
+         */
+        inst->buffLevelFilt = (WebRtc_UWord16) (WEBRTC_SPL_RSHIFT_W32(
+            WEBRTC_SPL_MUL_16_U16(inst->levelFiltFact, inst->buffLevelFilt), 8)
+            + WEBRTC_SPL_MUL_16_16(256 - inst->levelFiltFact, curSizeFrames));
+    }
+
+    /* Account for time-scale operations (accelerate and pre-emptive expand) */
+    if (inst->prevTimeScale)
+    {
+        /*
+         * Time-scaling has been performed since last filter update.
+         * Subtract the sampleMemory from buffLevelFilt after converting sampleMemory
+         * from samples to packets in Q8. Make sure that the filtered value is
+         * non-negative.
+         */
+        inst->buffLevelFilt = (WebRtc_UWord16) WEBRTC_SPL_MAX( inst->buffLevelFilt -
+            WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_LSHIFT_W32(inst->sampleMemory, 8), /* sampleMemory in Q8 */
+                inst->packetSpeechLenSamp ), /* divide by packetSpeechLenSamp */
+            0);
+
+        /*
+         * Reset flag and set timescaleHoldOff timer to prevent further time-scaling
+         * for some time.
+         */
+        inst->prevTimeScale = 0;
+        inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT;
+    }
+
+    /* Update time counters and HoldOff timer */
+    inst->packetIatCountSamp += sampPerCall; /* packet inter-arrival time */
+    inst->peakIatCountSamp += sampPerCall; /* peak inter-arrival time */
+    inst->timescaleHoldOff >>= 1; /* time-scaling limiter */
+    inst->maxCSumUpdateTimer += sampPerCall; /* cumulative-sum timer */
+
+    return 0;
+
+}
+
+
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
+                                   WebRtc_Word32 fsHz)
+{
+
+    /* Sanity check for newLenSamp and fsHz */
+    if (newLenSamp <= 0 || fsHz <= 0)
+    {
+        return -1;
+    }
+
+    inst->packetSpeechLenSamp = newLenSamp; /* Store packet size in instance */
+
+    /* Make NetEQ wait for first regular packet before starting the timer */
+    inst->lastPackCNGorDTMF = 1;
+
+    inst->packetIatCountSamp = 0; /* Reset packet time counter */
+
+    /*
+     * Calculate peak threshold from packet size. The threshold is defined as
+     * the (fractional) number of packets that corresponds to PEAK_HEIGHT
+     * (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
+     */
+    inst->peakThresholdPkt = (WebRtc_UWord16) WebRtcSpl_DivW32W16ResW16(
+        WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
+            (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
+
+    return 0;
+}
+
+
+int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets)
+{
+
+    int i;
+    WebRtc_UWord16 tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
+
+    /* Sanity check for maxBufLenPackets */
+    if (maxBufLenPackets <= 1)
+    {
+        /* Invalid value; set to 10 instead (arbitary small number) */
+        maxBufLenPackets = 10;
+    }
+
+    /* Reset filtered buffer level */
+    inst->buffLevelFilt = 0;
+
+    /* Reset packet size to unknown */
+    inst->packetSpeechLenSamp = 0;
+
+    /*
+     * Flag that last packet was special payload, so that automode will treat the next speech
+     * payload as the first payload received.
+     */
+    inst->lastPackCNGorDTMF = 1;
+
+    /* Reset peak detection parameters */
+    inst->peakModeDisabled = 1; /* disable peak mode */
+    inst->peakIatCountSamp = 0;
+    inst->peakIndex = -1; /* indicates that no peak is registered */
+    inst->curPeakHeight = 0;
+    inst->curPeakPeriod = 0;
+    for (i = 0; i < NUM_PEAKS; i++)
+    {
+        inst->peakHeightPkt[i] = 0;
+        inst->peakPeriodSamp[i] = 0;
+    }
+
+    /*
+     * Set the iatProb PDF vector to an exponentially decaying distribution
+     * iatProb[i] = 0.5^(i+1), i = 0, 1, 2, ...
+     * iatProb is in Q30.
+     */
+    for (i = 0; i <= MAX_IAT; i++)
+    {
+        /* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
+        tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
+        /* store in PDF vector */
+        inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) tempprob, 16);
+    }
+
+    /*
+     * Calculate the optimal buffer level corresponding to the initial PDF.
+     * No need to call WebRtcNetEQ_CalcOptimalBufLvl() since we have just hard-coded
+     * all the variables that the buffer level depends on => we know the result
+     */
+    inst->optBufLevel = WEBRTC_SPL_MIN(4,
+        (maxBufLenPackets >> 1) + (maxBufLenPackets >> 1)); /* 75% of maxBufLenPackets */
+    inst->levelFiltFact = 253;
+
+    /*
+     * Reset the iat update forgetting factor to 0 to make the impact of the first
+     * incoming packets greater.
+     */
+    inst->iatProbFact = 0;
+
+    /* Reset packet inter-arrival time counter */
+    inst->packetIatCountSamp = 0;
+
+    /* Clear time-scaling related variables */
+    inst->prevTimeScale = 0;
+    inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT; /* don't allow time-scaling immediately */
+
+    inst->cSumIatQ8 = 0;
+    inst->maxCSumIatQ8 = 0;
+
+    return 0;
+}
+
+int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst) {
+  int i;
+  int32_t sum_q24 = 0;
+  assert(inst);
+  for (i = 0; i <= MAX_IAT; ++i) {
+    /* Shift 6 to fit worst case: 2^30 * 64. */
+    sum_q24 += (inst->iatProb[i] >> 6) * i;
+  }
+  /* Subtract the nominal inter-arrival time 1 = 2^24 in Q24. */
+  sum_q24 -= (1 << 24);
+  /*
+   * Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
+   * Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
+   */
+  return ((sum_q24 >> 7) * 15625) >> 11;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/automode.h b/trunk/src/modules/audio_coding/neteq/automode.h
new file mode 100644
index 0000000..dbd09cf
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/automode.h
@@ -0,0 +1,264 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the functionality for automatic buffer level optimization.
+ */
+
+#ifndef AUTOMODE_H
+#define AUTOMODE_H
+
+#include "typedefs.h"
+
+/*************/
+/* Constants */
+/*************/
+
+/* The beta parameter defines the trade-off between delay and underrun probability. */
+/* It is defined through its inverse in Q30 */
+#define AUTOMODE_BETA_INV_Q30 53687091  /* 1/20 in Q30 */
+#define AUTOMODE_STREAMING_BETA_INV_Q30 536871 /* 1/2000 in Q30 */
+
+/* Forgetting factor for the inter-arrival time statistics */
+#define IAT_PROB_FACT 32745       /* 0.9993 in Q15 */
+
+/* Maximum inter-arrival time to register (in "packet-times") */
+#define MAX_IAT 64
+#define PEAK_HEIGHT 20            /* 0.08s in Q8 */
+
+/* The value (1<<5) sets maximum accelerate "speed" to about 100 ms/s */
+#define AUTOMODE_TIMESCALE_LIMIT (1<<5)
+
+/* Peak mode related parameters */
+/* Number of peaks in peak vector; must be a power of 2 */
+#define NUM_PEAKS 8
+
+/* Must be NUM_PEAKS-1 */
+#define PEAK_INDEX_MASK 0x0007
+
+/* Longest accepted peak distance */
+#define MAX_PEAK_PERIOD 10
+#define MAX_STREAMING_PEAK_PERIOD 600 /* 10 minutes */
+
+/* Number of peaks required before peak mode can be engaged */
+#define NUM_PEAKS_REQUIRED 3
+
+/* Drift term for cumulative sum */
+#define CSUM_IAT_DRIFT 2
+
+/*******************/
+/* Automode struct */
+/*******************/
+
+/* The automode struct is a sub-struct of the
+ bufstats-struct (BufstatsInst_t). */
+
+typedef struct
+{
+
+    /* Filtered current buffer level */
+    WebRtc_UWord16 levelFiltFact; /* filter forgetting factor in Q8 */
+    WebRtc_UWord16 buffLevelFilt; /* filtered buffer level in Q8 */
+
+    /* Inter-arrival time (iat) statistics */
+    WebRtc_Word32 iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
+    WebRtc_Word16 iatProbFact; /* iat forgetting factor in Q15 */
+    WebRtc_UWord32 packetIatCountSamp; /* time (in timestamps) elapsed since last
+     packet arrival, based on RecOut calls */
+    WebRtc_UWord16 optBufLevel; /* current optimal buffer level in Q8 */
+
+    /* Packet related information */
+    WebRtc_Word16 packetSpeechLenSamp; /* speech samples per incoming packet */
+    WebRtc_Word16 lastPackCNGorDTMF; /* indicates that the last received packet
+     contained special information */
+    WebRtc_UWord16 lastSeqNo; /* sequence number for last packet received */
+    WebRtc_UWord32 lastTimeStamp; /* timestamp for the last packet received */
+    WebRtc_Word32 sampleMemory; /* memory position for keeping track of how many
+     samples we cut during expand */
+    WebRtc_Word16 prevTimeScale; /* indicates that the last mode was an accelerate
+     or pre-emptive expand operation */
+    WebRtc_UWord32 timescaleHoldOff; /* counter that is shifted one step right each
+     RecOut call; time-scaling allowed when it has
+     reached 0 */
+    WebRtc_Word16 extraDelayMs; /* extra delay for sync with video */
+
+    /* Peak-detection */
+    /* vector with the latest peak periods (peak spacing in samples) */
+    WebRtc_UWord32 peakPeriodSamp[NUM_PEAKS];
+    /* vector with the latest peak heights (in packets) */
+    WebRtc_Word16 peakHeightPkt[NUM_PEAKS];
+    WebRtc_Word16 peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
+     -1 if still waiting for first peak */
+    WebRtc_UWord16 peakThresholdPkt; /* definition of peak (in packets);
+     calculated from PEAK_HEIGHT */
+    WebRtc_UWord32 peakIatCountSamp; /* samples elapsed since last peak was observed */
+    WebRtc_UWord32 curPeakPeriod; /* current maximum of peakPeriodSamp vector */
+    WebRtc_Word16 curPeakHeight; /* derived from peakHeightPkt vector;
+     used as optimal buffer level in peak mode */
+    WebRtc_Word16 peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
+    uint16_t peakFound; /* 1 if peaks are detected and extra delay is applied;
+                        * 0 otherwise. */
+
+    /* Post-call statistics */
+    WebRtc_UWord32 countIAT500ms; /* number of times we got small network outage */
+    WebRtc_UWord32 countIAT1000ms; /* number of times we got medium network outage */
+    WebRtc_UWord32 countIAT2000ms; /* number of times we got large network outage */
+    WebRtc_UWord32 longestIATms; /* mSec duration of longest network outage */
+
+    WebRtc_Word16 cSumIatQ8; /* cumulative sum of inter-arrival times */
+    WebRtc_Word16 maxCSumIatQ8; /* max cumulative sum IAT */
+    WebRtc_UWord32 maxCSumUpdateTimer;/* time elapsed since maximum was observed */
+
+} AutomodeInst_t;
+
+/*************/
+/* Functions */
+/*************/
+
+/****************************************************************************
+ * WebRtcNetEQ_UpdateIatStatistics(...)
+ *
+ * Update the packet inter-arrival time statistics when a new packet arrives.
+ * This function should be called for every arriving packet, with some
+ * exceptions when using DTX/VAD and DTMF. A new optimal buffer level is
+ * calculated after the update.
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *		- maxBufLen		: Maximum number of packets the buffer can hold
+ *		- seqNumber     : RTP sequence number of incoming packet
+ *      - timeStamp     : RTP timestamp of incoming packet
+ *      - fsHz          : Sample rate in Hz
+ *      - mdCodec       : Non-zero if the current codec is a multiple-
+ *                        description codec
+ *      - streamingMode : A non-zero value will increase jitter robustness (and delay)
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
+                                    WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word32 fsHz, int mdCodec, int streamingMode);
+
+/****************************************************************************
+ * WebRtcNetEQ_CalcOptimalBufLvl(...)
+ *
+ * Calculate the optimal buffer level based on packet inter-arrival time
+ * statistics.
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *      - fsHz          : Sample rate in Hz
+ *      - mdCodec       : Non-zero if the current codec is a multiple-
+ *                        description codec
+ *      - timeIatPkts   : Currently observed inter-arrival time in packets
+ *      - streamingMode : A non-zero value will increase jitter robustness (and delay)
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			: >0 - Optimal buffer level
+ *                        <0 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
+                                            int mdCodec, WebRtc_UWord32 timeIatPkts,
+                                            int streamingMode);
+
+/****************************************************************************
+ * WebRtcNetEQ_BufferLevelFilter(...)
+ *
+ * Update filtered buffer level. The function must be called once for each
+ * RecOut call, since the timing of automode hinges on counters that are
+ * updated by this function.
+ *
+ * Input:
+ *      - curSizeMs8    : Total length of unused speech data in packet buffer
+ *                        and sync buffer, in ms * 8
+ *		- inst	        : Automode instance
+ *		- sampPerCall	: Number of samples per RecOut call
+ *      - fsMult        : Sample rate in Hz divided by 8000
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                      : <0 - Error
+ */
+
+int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
+                                  int sampPerCall, WebRtc_Word16 fsMult);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetPacketSpeechLen(...)
+ *
+ * Provide the number of speech samples extracted from a packet to the
+ * automode instance. Several of the calculations within automode depend
+ * on knowing the packet size.
+ *
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *		- newLenSamp    : Number of samples per RecOut call
+ *      - fsHz          : Sample rate in Hz
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
+                                   WebRtc_Word32 fsHz);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetAutomode(...)
+ *
+ * Reset the automode instance.
+ *
+ *
+ * Input:
+ *		- inst	            : Automode instance
+ *		- maxBufLenPackets  : Maximum number of packets that the packet
+ *                            buffer can hold (>1)
+ *
+ * Output:
+ *      - inst              : Updated automode instance
+ *
+ * Return value			    :  0 - Ok
+ */
+
+int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets);
+
+/****************************************************************************
+ * WebRtcNetEQ_AverageIAT(...)
+ *
+ * Calculate the average inter-arrival time based on current statistics.
+ * The average is expressed in parts per million relative the nominal. That is,
+ * if the average inter-arrival time is equal to the nominal frame time,
+ * the return value is zero. A positive value corresponds to packet spacing
+ * being too large, while a negative value means that the packets arrive with
+ * less spacing than expected.
+ *
+ *
+ * Input:
+ *    - inst              : Automode instance.
+ *
+ * Return value           : Average relative inter-arrival time in samples.
+ */
+
+int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst);
+
+#endif /* AUTOMODE_H */
diff --git a/trunk/src/modules/audio_coding/neteq/bgn_update.c b/trunk/src/modules/audio_coding/neteq/bgn_update.c
new file mode 100644
index 0000000..05956c2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/bgn_update.c
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for updating the background noise estimate.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage: 
+ Designed for BGN_LPC_ORDER <= 10
+
+ Type           Name            size   startpos  endpos
+ WebRtc_Word32  pw32_autoCorr   22     0         21  (Length (BGN_LPC_ORDER + 1)*2)
+ WebRtc_Word16  pw16_tempVec    10     22        31	(Length BGN_LPC_ORDER)
+ WebRtc_Word16  pw16_rc         10     32        41	(Length BGN_LPC_ORDER)
+ WebRtc_Word16  pw16_outVec     74     0         73  (Length BGN_LPC_ORDER + 64)
+
+ Total: 74
+ */
+
+#if (BGN_LPC_ORDER > 10) && (defined SCRATCH)
+#error BGN_LPC_ORDER is too large for current scratch memory allocation
+#endif
+
+#define	 SCRATCH_PW32_AUTO_CORR			0
+#define	 SCRATCH_PW16_TEMP_VEC			22
+#define	 SCRATCH_PW16_RC				32
+#define	 SCRATCH_PW16_OUT_VEC			0
+
+#define NETEQFIX_BGNFRAQINCQ16	229 /* 0.0035 in Q16 */
+
+/****************************************************************************
+ * WebRtcNetEQ_BGNUpdate(...)
+ *
+ * This function updates the background noise parameter estimates.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, where the speech history is stored.
+ *      - scratchPtr    : Pointer to scratch vector.
+ *
+ * Output:
+ *		- inst			: Updated information about the BGN characteristics.
+ *
+ * Return value			: No return value
+ */
+
+void WebRtcNetEQ_BGNUpdate(
+#ifdef SCRATCH
+                           DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+#else
+                           DSPInst_t *inst
+#endif
+)
+{
+    const WebRtc_Word16 w16_vecLen = 256;
+    BGNInst_t *BGN_Inst = &(inst->BGNInst);
+#ifdef SCRATCH
+    WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
+    WebRtc_Word16 *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
+    WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+    WebRtc_Word16 *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
+#else
+    WebRtc_Word32 pw32_autoCorr[BGN_LPC_ORDER + 1];
+    WebRtc_Word16 pw16_tempVec[BGN_LPC_ORDER];
+    WebRtc_Word16 pw16_outVec[BGN_LPC_ORDER + 64];
+    WebRtc_Word16 pw16_rc[BGN_LPC_ORDER];
+#endif
+    WebRtc_Word16 pw16_A[BGN_LPC_ORDER + 1];
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word16 *pw16_vec;
+    WebRtc_Word16 w16_maxSample;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word16 w16_enSampleShift;
+    WebRtc_Word32 w32_en, w32_enBGN;
+    WebRtc_Word32 w32_enUpdateThreashold;
+    WebRtc_Word16 stability;
+
+    pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
+
+#ifdef NETEQ_VAD
+    if( !inst->VADInst.VADEnabled /* we are not using post-decode VAD */
+        || inst->VADInst.VADDecision == 0 )
+    { /* ... or, post-decode VAD says passive speaker */
+#endif /* NETEQ_VAD */
+
+    /*Insert zeros to guarantee that boundary values do not distort autocorrelation */
+    WEBRTC_SPL_MEMCPY_W16(pw16_tempVec, pw16_vec - BGN_LPC_ORDER, BGN_LPC_ORDER);
+    WebRtcSpl_MemSetW16(pw16_vec - BGN_LPC_ORDER, 0, BGN_LPC_ORDER);
+
+    w16_maxSample = WebRtcSpl_MaxAbsValueW16(pw16_vec, w16_vecLen);
+    w16_tmp = 8 /* log2(w16_veclen) = 8 */
+        - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_maxSample, w16_maxSample));
+    w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+    WebRtcNetEQ_CrossCorr(pw32_autoCorr, pw16_vec, pw16_vec, w16_vecLen, BGN_LPC_ORDER + 1,
+        w16_tmp, -1);
+
+    /* Copy back data */
+    WEBRTC_SPL_MEMCPY_W16(pw16_vec - BGN_LPC_ORDER, pw16_tempVec, BGN_LPC_ORDER);
+
+    w16_enSampleShift = 8 - w16_tmp; /* Number of shifts to get energy/sample */
+    /* pw32_autoCorr[0]>>w16_enSampleShift */
+    w32_en = WEBRTC_SPL_RSHIFT_W32(pw32_autoCorr[0], w16_enSampleShift);
+    if ((w32_en < BGN_Inst->w32_energyUpdate
+#ifdef NETEQ_VAD
+        /* post-decode VAD disabled and w32_en sufficiently low */
+         && !inst->VADInst.VADEnabled)
+    /* ... or, post-decode VAD says passive speaker */
+    || (inst->VADInst.VADEnabled && inst->VADInst.VADDecision == 0)
+#else
+    ) /* just close the extra parenthesis */
+#endif /* NETEQ_VAD */
+    )
+    {
+        /* Generate LPC coefficients */
+        if (pw32_autoCorr[0] > 0)
+        {
+            /* regardless of whether the filter is actually updated or not,
+             update energy threshold levels, since we have in fact observed
+             a low energy signal */
+            if (w32_en < BGN_Inst->w32_energyUpdate)
+            {
+                /* Never get under 1.0 in average sample energy */
+                BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
+                BGN_Inst->w32_energyUpdateLow = 0;
+            }
+
+            stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, pw16_A, pw16_rc, BGN_LPC_ORDER);
+            /* Only update BGN if filter is stable */
+            if (stability != 1)
+            {
+                return;
+            }
+        }
+        else
+        {
+            /* Do not update */
+            return;
+        }
+        /* Generate the CNG gain factor by looking at the energy of the residual */
+        WebRtcSpl_FilterMAFastQ12(pw16_vec + w16_vecLen - 64, pw16_outVec, pw16_A,
+            BGN_LPC_ORDER + 1, 64);
+        w32_enBGN = WebRtcNetEQ_DotW16W16(pw16_outVec, pw16_outVec, 64, 0);
+        /* Dot product should never overflow since it is BGN and residual! */
+
+        /*
+         * Check spectral flatness
+         * Comparing the residual variance with the input signal variance tells
+         * if the spectrum is flat or not.
+         * (20*w32_enBGN) >= (w32_en<<6)
+         * Also ensure that the energy is non-zero.
+         */
+        if ((WEBRTC_SPL_MUL_32_16(w32_enBGN, 20) >= WEBRTC_SPL_LSHIFT_W32(w32_en, 6))
+            && (w32_en > 0))
+        {
+            /* spectrum is flat enough; save filter parameters */
+
+            WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filter, pw16_A, BGN_LPC_ORDER+1);
+            WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filterState,
+                pw16_vec + w16_vecLen - BGN_LPC_ORDER, BGN_LPC_ORDER);
+
+            /* Save energy level */
+            BGN_Inst->w32_energy = WEBRTC_SPL_MAX(w32_en, 1);
+
+            /* Update energy threshold levels */
+            /* Never get under 1.0 in average sample energy */
+            BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
+            BGN_Inst->w32_energyUpdateLow = 0;
+
+            /* Normalize w32_enBGN to 29 or 30 bits before sqrt */
+            w16_tmp2 = WebRtcSpl_NormW32(w32_enBGN) - 1;
+            if (w16_tmp2 & 0x1)
+            {
+                w16_tmp2 -= 1; /* Even number of shifts required */
+            }
+            w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
+
+            /* Calculate scale and shift factor */
+            BGN_Inst->w16_scale = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_enBGN);
+            BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
+            /* 6=log2(64) */
+
+            BGN_Inst->w16_initialized = 1;
+        }
+
+    }
+    else
+    {
+        /*
+         * Will only happen if post-decode VAD is disabled and w32_en is not low enough.
+         * Increase the threshold for update so that it increases by a factor 4 in four
+         * seconds.
+         * energy = energy * 1.0035
+         */
+        w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
+            BGN_Inst->w32_energyUpdateLow, 16);
+        w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)(BGN_Inst->w32_energyUpdate & 0xFF));
+        w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
+        BGN_Inst->w32_energyUpdateLow += w32_tmp;
+
+        BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)(BGN_Inst->w32_energyUpdate>>16));
+        BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
+        BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
+
+        /* Update maximum energy */
+        /* Decrease by a factor 1/1024 each time */
+        BGN_Inst->w32_energyMax = BGN_Inst->w32_energyMax - (BGN_Inst->w32_energyMax >> 10);
+        if (w32_en > BGN_Inst->w32_energyMax)
+        {
+            BGN_Inst->w32_energyMax = w32_en;
+        }
+
+        /* Set update level to at the minimum 60.21dB lower then the maximum energy */
+        w32_enUpdateThreashold = (BGN_Inst->w32_energyMax + 524288) >> 20;
+        if (w32_enUpdateThreashold > BGN_Inst->w32_energyUpdate)
+        {
+            BGN_Inst->w32_energyUpdate = w32_enUpdateThreashold;
+        }
+    }
+
+#ifdef NETEQ_VAD
+} /* closing initial if-statement */
+#endif /* NETEQ_VAD */
+
+    return;
+}
+
+#undef	 SCRATCH_PW32_AUTO_CORR
+#undef	 SCRATCH_PW16_TEMP_VEC
+#undef	 SCRATCH_PW16_RC
+#undef	 SCRATCH_PW16_OUT_VEC
+
diff --git a/trunk/src/modules/audio_coding/neteq/buffer_stats.h b/trunk/src/modules/audio_coding/neteq/buffer_stats.h
new file mode 100644
index 0000000..9820519
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/buffer_stats.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Calculates and stores the packet buffer statistics.
+ */
+
+#ifndef BUFFER_STATS_H
+#define BUFFER_STATS_H
+
+#include "automode.h"
+#include "webrtc_neteq.h" /* to define enum WebRtcNetEQPlayoutMode */
+
+/* NetEQ related decisions */
+#define BUFSTATS_DO_NORMAL					0
+#define BUFSTATS_DO_ACCELERATE				1
+#define BUFSTATS_DO_MERGE					2
+#define BUFSTATS_DO_EXPAND					3
+#define BUFSTAT_REINIT						4
+#define BUFSTATS_DO_RFC3389CNG_PACKET		5
+#define BUFSTATS_DO_RFC3389CNG_NOPACKET		6
+#define BUFSTATS_DO_INTERNAL_CNG_NOPACKET	7
+#define BUFSTATS_DO_PREEMPTIVE_EXPAND		8
+#define BUFSTAT_REINIT_DECODER              9
+#define BUFSTATS_DO_DTMF_ONLY               10
+/* Decisions related to when NetEQ is switched off (or in FAX mode) */
+#define BUFSTATS_DO_ALTERNATIVE_PLC				   11
+#define BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS		   12
+#define BUFSTATS_DO_AUDIO_REPETITION			   13
+#define BUFSTATS_DO_AUDIO_REPETITION_INC_TS		   14
+
+/* Reinit decoder states after this number of expands (upon arrival of new packet) */
+#define REINIT_AFTER_EXPANDS 100
+
+/* Wait no longer than this number of RecOut calls before using an "early" packet */
+#define MAX_WAIT_FOR_PACKET 10
+
+/* CNG modes */
+#define CNG_OFF 0
+#define CNG_RFC3389_ON 1
+#define CNG_INTERNAL_ON 2
+
+typedef struct
+{
+
+    /* store statistical data here */
+    WebRtc_Word16 w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
+    WebRtc_Word16 w16_noExpand;
+    WebRtc_Word32 uw32_CNGplayedTS;
+
+    /* VQmon data */
+    WebRtc_UWord16 avgDelayMsQ8;
+    WebRtc_Word16 maxDelayMs;
+
+    AutomodeInst_t Automode_inst;
+
+} BufstatsInst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_BufstatsDecision()
+ *
+ * Gives a decision about what action that is currently desired 
+ *
+ *
+ *	Input:
+ *		inst:			    The bufstat instance
+ *		cur_size:		    Current buffer size in ms in Q3 domain
+ *		targetTS:		    The desired timestamp to start playout from
+ *		availableTS:	    The closest future value available in buffer
+ *		noPacket		    1 if no packet is available, makes availableTS undefined
+ *		prevPlayMode	    mode of last NetEq playout
+ *		timestampsPerCall	number of timestamp for 10ms
+ *
+ *	Output:
+ *		Returns:		    A decision, as defined above (see top of file)
+ *
+ */
+
+WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
+                                            WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
+                                            WebRtc_UWord32 availableTS, int noPacket,
+                                            int cngPacket, int prevPlayMode,
+                                            enum WebRtcNetEQPlayoutMode playoutMode,
+                                            int timestampsPerCall, int NoOfExpandCalls,
+                                            WebRtc_Word16 fs_mult,
+                                            WebRtc_Word16 lastModeBGNonly, int playDtmf);
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/bufstats_decision.c b/trunk/src/modules/audio_coding/neteq/bufstats_decision.c
new file mode 100644
index 0000000..3d37e17
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/bufstats_decision.c
@@ -0,0 +1,426 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function where the main decision logic for buffer level
+ * adaptation happens.
+ */
+
+#include "buffer_stats.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "neteq_defines.h"
+#include "neteq_error_codes.h"
+#include "webrtc_neteq.h"
+
+#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7  */
+
+WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
+                                            WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
+                                            WebRtc_UWord32 availableTS, int noPacket,
+                                            int cngPacket, int prevPlayMode,
+                                            enum WebRtcNetEQPlayoutMode playoutMode,
+                                            int timestampsPerCall, int NoOfExpandCalls,
+                                            WebRtc_Word16 fs_mult,
+                                            WebRtc_Word16 lastModeBGNonly, int playDtmf)
+{
+
+    int currentDelayMs;
+    WebRtc_Word32 currSizeSamples = cur_size;
+    WebRtc_Word16 extraDelayPacketsQ8 = 0;
+
+    /* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
+    WebRtc_Word32 curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
+    WebRtc_UWord16 level_limit_hi, level_limit_lo;
+
+    inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
+        || prevPlayMode == MODE_LOWEN_ACCELERATE || prevPlayMode == MODE_SUCCESS_PREEMPTIVE
+        || prevPlayMode == MODE_LOWEN_PREEMPTIVE);
+
+    if ((prevPlayMode != MODE_RFC3389CNG) && (prevPlayMode != MODE_CODEC_INTERNAL_CNG))
+    {
+        /*
+         * Do not update buffer history if currently playing CNG
+         * since it will bias the filtered buffer level.
+         */
+        WebRtcNetEQ_BufferLevelFilter(cur_size, &(inst->Automode_inst), timestampsPerCall,
+            fs_mult);
+    }
+    else
+    {
+        /* only update time counters */
+        inst->Automode_inst.packetIatCountSamp += timestampsPerCall; /* packet inter-arrival time */
+        inst->Automode_inst.peakIatCountSamp += timestampsPerCall; /* peak inter-arrival time */
+        inst->Automode_inst.timescaleHoldOff >>= 1; /* time-scaling limiter */
+    }
+    cur_size = WEBRTC_SPL_MIN(curr_sizeQ7, WEBRTC_SPL_WORD16_MAX);
+
+    /* Calculate VQmon related variables */
+    /* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
+    inst->avgDelayMsQ8 = (WebRtc_Word16) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
+        + (cur_size >> 9));
+
+    /* Update maximum delay if needed */
+    currentDelayMs = (curr_sizeQ7 >> 7);
+    if (currentDelayMs > inst->maxDelayMs)
+    {
+        inst->maxDelayMs = currentDelayMs;
+    }
+
+    /* NetEQ is on with normal or steaming mode */
+    if (playoutMode == kPlayoutOn || playoutMode == kPlayoutStreaming)
+    {
+        /* Guard for errors, so that it should not get stuck in error mode */
+        if (prevPlayMode == MODE_ERROR)
+        {
+            if (noPacket)
+            {
+                return BUFSTATS_DO_EXPAND;
+            }
+            else
+            {
+                return BUFSTAT_REINIT;
+            }
+        }
+
+        if (prevPlayMode != MODE_EXPAND && prevPlayMode != MODE_FADE_TO_BGN)
+        {
+            inst->w16_noExpand = 1;
+        }
+        else
+        {
+            inst->w16_noExpand = 0;
+        }
+
+        if (cngPacket)
+        {
+            /* signed difference between wanted and available TS */
+            WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+            int32_t optimal_level_samp = (inst->Automode_inst.optBufLevel *
+                inst->Automode_inst.packetSpeechLenSamp) >> 8;
+            int32_t excess_waiting_time_samp = -diffTS - optimal_level_samp;
+
+            if (excess_waiting_time_samp > optimal_level_samp / 2)
+            {
+                /* The waiting time for this packet will be longer than 1.5
+                 * times the wanted buffer delay. Advance the clock to cut
+                 * waiting time down to the optimal.
+                 */
+                inst->uw32_CNGplayedTS += excess_waiting_time_samp;
+                diffTS += excess_waiting_time_samp;
+            }
+
+            if ((diffTS) < 0 && (prevPlayMode == MODE_RFC3389CNG))
+            {
+                /* Not time to play this packet yet. Wait another round before using this
+                 * packet. Keep on playing CNG from previous CNG parameters. */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+
+            /* otherwise, go for the CNG packet now */
+            return BUFSTATS_DO_RFC3389CNG_PACKET;
+        }
+
+        /*Check for expand/cng */
+        if (noPacket)
+        {
+            if (inst->w16_cngOn == CNG_RFC3389_ON)
+            {
+                /* keep on playing CNG */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+            else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+            {
+                /* keep on playing internal CNG */
+                return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+            }
+            else if (playDtmf == 1)
+            {
+                /* we have not audio data, but can play DTMF */
+                return BUFSTATS_DO_DTMF_ONLY;
+            }
+            else
+            {
+                /* nothing to play => do Expand */
+                return BUFSTATS_DO_EXPAND;
+            }
+        }
+
+        /*
+         * If the expand period was very long, reset NetEQ since it is likely that the
+         * sender was restarted.
+         */
+        if (NoOfExpandCalls > REINIT_AFTER_EXPANDS) return BUFSTAT_REINIT_DECODER;
+
+        /* Calculate extra delay in Q8 packets */
+        if (inst->Automode_inst.extraDelayMs > 0 && inst->Automode_inst.packetSpeechLenSamp
+            > 0)
+        {
+            extraDelayPacketsQ8 = WebRtcSpl_DivW32W16ResW16(
+                (WEBRTC_SPL_MUL(inst->Automode_inst.extraDelayMs, 8 * fs_mult) << 8),
+                inst->Automode_inst.packetSpeechLenSamp);
+            /* (extra delay in samples in Q8) */
+        }
+
+        /* Check if needed packet is available */
+        if (targetTS == availableTS)
+        {
+
+            /* If last mode was not expand, and there is no DTMF to play */
+            if (inst->w16_noExpand == 1 && playDtmf == 0)
+            {
+                /* If so check for accelerate */
+
+                level_limit_lo = ((inst->Automode_inst.optBufLevel) >> 1) /* 50 % */
+                    + ((inst->Automode_inst.optBufLevel) >> 2); /* ... + 25% = 75% */
+
+                /* set upper limit to optBufLevel, but make sure that window is at least 20ms */
+                level_limit_hi = WEBRTC_SPL_MAX(inst->Automode_inst.optBufLevel,
+                    level_limit_lo +
+                    WebRtcSpl_DivW32W16ResW16((WEBRTC_SPL_MUL(20*8, fs_mult) << 8),
+                        inst->Automode_inst.packetSpeechLenSamp));
+
+                /* if extra delay is non-zero, add it */
+                if (extraDelayPacketsQ8 > 0)
+                {
+                    level_limit_hi += extraDelayPacketsQ8;
+                    level_limit_lo += extraDelayPacketsQ8;
+                }
+
+                if (((inst->Automode_inst.buffLevelFilt >= level_limit_hi) &&
+                    (inst->Automode_inst.timescaleHoldOff == 0)) ||
+                    (inst->Automode_inst.buffLevelFilt >= level_limit_hi << 2))
+                {
+                    /*
+                     * Buffer level higher than limit and time-scaling allowed,
+                     * OR buffer level _really_ high.
+                     */
+                    return BUFSTATS_DO_ACCELERATE;
+                }
+                else if ((inst->Automode_inst.buffLevelFilt < level_limit_lo)
+                    && (inst->Automode_inst.timescaleHoldOff == 0))
+                {
+                    return BUFSTATS_DO_PREEMPTIVE_EXPAND;
+                }
+            }
+            return BUFSTATS_DO_NORMAL;
+        }
+
+        /* Check for Merge */
+        else if (availableTS > targetTS)
+        {
+
+            /* Check that we do not play a packet "too early" */
+            if ((prevPlayMode == MODE_EXPAND)
+                && (availableTS - targetTS
+                    < (WebRtc_UWord32) WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
+                        (WebRtc_Word16)REINIT_AFTER_EXPANDS))
+                && (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
+                && (availableTS
+                    > targetTS
+                        + WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
+                            (WebRtc_Word16)NoOfExpandCalls))
+                && (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
+                    + extraDelayPacketsQ8))
+            {
+                if (playDtmf == 1)
+                {
+                    /* we still have DTMF to play, so do not perform expand */
+                    return BUFSTATS_DO_DTMF_ONLY;
+                }
+                else
+                {
+                    /* nothing to play */
+                    return BUFSTATS_DO_EXPAND;
+                }
+            }
+
+            /* If previous was CNG period or BGNonly then no merge is needed */
+            if ((prevPlayMode == MODE_RFC3389CNG) || (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
+                || lastModeBGNonly)
+            {
+                /*
+                 * Keep the same delay as before the CNG (or maximum 70 ms in buffer as safety
+                 * precaution), but make sure that the number of samples in buffer is no
+                 * higher than 4 times the optimal level.
+                 */
+                WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+                if (diffTS >= 0
+                    || (WEBRTC_SPL_MUL_16_16_RSFT( inst->Automode_inst.optBufLevel
+                        + extraDelayPacketsQ8,
+                        inst->Automode_inst.packetSpeechLenSamp, 6) < currSizeSamples))
+                {
+                    /* it is time to play this new packet */
+                    return BUFSTATS_DO_NORMAL;
+                }
+                else
+                {
+                    /* it is too early to play this new packet => keep on playing CNG */
+                    if (prevPlayMode == MODE_RFC3389CNG)
+                    {
+                        return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                    }
+                    else if (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
+                    {
+                        return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                    }
+                    else if (playDtmf == 1)
+                    {
+                        /* we have not audio data, but can play DTMF */
+                        return BUFSTATS_DO_DTMF_ONLY;
+                    }
+                    else /* lastModeBGNonly */
+                    {
+                        /* signal expand, but this will result in BGN again */
+                        return BUFSTATS_DO_EXPAND;
+                    }
+                }
+            }
+
+            /* Do not merge unless we have done a Expand before (for complexity reasons) */
+            if ((inst->w16_noExpand == 0) || ((frameSize < timestampsPerCall) && (cur_size
+                > NETEQ_BUFSTAT_20MS_Q7)))
+            {
+                return BUFSTATS_DO_MERGE;
+            }
+            else if (playDtmf == 1)
+            {
+                /* play DTMF instead of expand */
+                return BUFSTATS_DO_DTMF_ONLY;
+            }
+            else
+            {
+                return BUFSTATS_DO_EXPAND;
+            }
+        }
+    }
+    else
+    { /* kPlayoutOff or kPlayoutFax */
+        if (cngPacket)
+        {
+            if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+            {
+                /* time to play this packet now */
+                return BUFSTATS_DO_RFC3389CNG_PACKET;
+            }
+            else
+            {
+                /* wait before playing this packet */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+        }
+        if (noPacket)
+        {
+            /*
+             * No packet =>
+             * 1. If in CNG mode play as usual
+             * 2. Otherwise use other method to generate data and hold TS value
+             */
+            if (inst->w16_cngOn == CNG_RFC3389_ON)
+            {
+                /* keep on playing CNG */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+            else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+            {
+                /* keep on playing internal CNG */
+                return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+            }
+            else
+            {
+                /* nothing to play => invent some data to play out */
+                if (playoutMode == kPlayoutOff)
+                {
+                    return BUFSTATS_DO_ALTERNATIVE_PLC;
+                }
+                else if (playoutMode == kPlayoutFax)
+                {
+                    return BUFSTATS_DO_AUDIO_REPETITION;
+                }
+                else
+                {
+                    /* UNDEFINED, should not get here... */
+                    assert(0);
+                    return BUFSTAT_REINIT;
+                }
+            }
+        }
+        else if (targetTS == availableTS)
+        {
+            return BUFSTATS_DO_NORMAL;
+        }
+        else
+        {
+            if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+            {
+                return BUFSTATS_DO_NORMAL;
+            }
+            else if (playoutMode == kPlayoutOff)
+            {
+                /*
+                 * If currently playing CNG, continue with that. Don't increase TS
+                 * since uw32_CNGplayedTS will be increased.
+                 */
+                if (inst->w16_cngOn == CNG_RFC3389_ON)
+                {
+                    return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                }
+                else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+                {
+                    return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                }
+                else
+                {
+                    /*
+                     * Otherwise, do PLC and increase TS while waiting for the time to
+                     * play this packet.
+                     */
+                    return BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS;
+                }
+            }
+            else if (playoutMode == kPlayoutFax)
+            {
+                /*
+                 * If currently playing CNG, continue with that don't increase TS since
+                 * uw32_CNGplayedTS will be increased.
+                 */
+                if (inst->w16_cngOn == CNG_RFC3389_ON)
+                {
+                    return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                }
+                else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+                {
+                    return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                }
+                else
+                {
+                    /*
+                     * Otherwise, do audio repetition and increase TS while waiting for the
+                     * time to play this packet.
+                     */
+                    return BUFSTATS_DO_AUDIO_REPETITION_INC_TS;
+                }
+            }
+            else
+            {
+                /* UNDEFINED, should not get here... */
+                assert(0);
+                return BUFSTAT_REINIT;
+            }
+        }
+    }
+    /* We should not get here (but sometimes we do anyway...) */
+    return BUFSTAT_REINIT;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/cng_internal.c b/trunk/src/modules/audio_coding/neteq/cng_internal.c
new file mode 100644
index 0000000..f3a10dc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/cng_internal.c
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for obtaining comfort noise from noise parameters
+ * according to IETF RFC 3389.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+#include "webrtc_cng.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Cng(...)
+ *
+ * This function produces CNG according to RFC 3389.
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - len           : Number of samples to produce (max 640 or
+ *                        640 - fsHz*5/8000 for first-time CNG, governed by
+ *                        the definition of WEBRTC_CNG_MAX_OUTSIZE_ORDER in
+ *                        webrtc_cng.h)
+ *
+ * Output:
+ *      - pw16_outData  : Output CNG
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+#ifdef NETEQ_CNG_CODEC
+/* Must compile NetEQ with CNG support to enable this function */
+
+int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len)
+{
+    WebRtc_Word16 w16_winMute = 0; /* mixing factor for overlap data */
+    WebRtc_Word16 w16_winUnMute = 0; /* mixing factor for comfort noise */
+    WebRtc_Word16 w16_winMuteInc = 0; /* mixing factor increment (negative) */
+    WebRtc_Word16 w16_winUnMuteInc = 0; /* mixing factor increment */
+    int i;
+
+    /*
+     * Check if last RecOut call was other than RFC3389,
+     * that is, this call is the first of a CNG period.
+     */
+    if (inst->w16_mode != MODE_RFC3389CNG)
+    {
+        /* Reset generation and overlap slightly with old data */
+
+        /* Generate len samples + overlap */
+        if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
+            (WebRtc_Word16) (len + inst->ExpandInst.w16_overlap), 1) < 0)
+        {
+            /* error returned */
+            return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+        }
+
+        /* Set windowing parameters depending on sample rate */
+        if (inst->fs == 8000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs == 16000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs == 32000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else if (inst->fs == 48000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
+#endif
+        }
+        else
+        {
+            /* Unsupported sample rate (should not be possible) */
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* Do overlap add between new vector and overlap */
+        for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
+        {
+            /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
+            inst->ExpandInst.pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                WEBRTC_SPL_MUL_16_16(
+                    inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
+                WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
+                + 16384, 15); /* shift with proper rounding */
+
+            w16_winMute += w16_winMuteInc; /* decrease mute factor (inc<0) */
+            w16_winUnMute += w16_winUnMuteInc; /* increase unmute factor (inc>0) */
+
+        }
+
+        /*
+         * Shift the contents of the outData buffer by overlap samples, since we
+         * already used these first samples in the overlapVec above
+         */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_outData+inst->ExpandInst.w16_overlap, len);
+
+    }
+    else
+    {
+        /* This is a subsequent CNG call; no special overlap needed */
+
+        /* Generate len samples */
+        if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (WebRtc_Word16) len, 0) < 0)
+        {
+            /* error returned */
+            return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+        }
+    }
+
+    return 0;
+
+}
+
+#endif /* NETEQ_CNG_CODEC */
+
diff --git a/trunk/src/modules/audio_coding/neteq/codec_db.c b/trunk/src/modules/audio_coding/neteq/codec_db.c
new file mode 100644
index 0000000..cb63aea
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/codec_db.c
@@ -0,0 +1,743 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the codec database.
+ */
+
+#include "codec_db.h"
+
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+/*
+ * Resets the codec database.
+ */
+
+int WebRtcNetEQ_DbReset(CodecDbInst_t *inst)
+{
+    int i;
+
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) inst, 0,
+        sizeof(CodecDbInst_t) / sizeof(WebRtc_Word16));
+
+    for (i = 0; i < NUM_TOTAL_CODECS; i++)
+    {
+        inst->position[i] = -1;
+    }
+
+    for (i = 0; i < NUM_CODECS; i++)
+    {
+        inst->payloadType[i] = -1;
+    }
+
+    for (i = 0; i < NUM_CNG_CODECS; i++)
+    {
+        inst->CNGpayloadType[i] = -1;
+    }
+
+    return 0;
+}
+
+/*
+ * Adds a new codec to the database.
+ */
+
+int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                      WebRtc_Word16 payloadType, FuncDecode funcDecode,
+                      FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
+                      FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
+                      FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
+                      FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
+                      void* codec_state, WebRtc_UWord16 codec_fs)
+{
+
+    int temp;
+    int insertCNGcodec = 0, overwriteCNGcodec = 0, CNGpos = -1;
+
+#ifndef NETEQ_RED_CODEC
+    if (codec == kDecoderRED)
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+#endif
+    if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
+        >= (int) kDecoderReservedEnd))
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+
+    if ((codec_fs != 8000)
+#ifdef NETEQ_WIDEBAND
+    &&(codec_fs!=16000)
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    &&(codec_fs!=32000)
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    &&(codec_fs!=48000)
+#endif
+    )
+    {
+        return CODEC_DB_UNSUPPORTED_FS;
+    }
+
+    /* Ensure that the codec type is supported */
+    switch (codec)
+    {
+#ifdef NETEQ_PCM16B_CODEC
+        case kDecoderPCM16B :
+#endif
+#ifdef NETEQ_G711_CODEC
+        case kDecoderPCMu :
+        case kDecoderPCMa :
+#endif
+#ifdef NETEQ_ILBC_CODEC
+        case kDecoderILBC :
+#endif
+#ifdef NETEQ_ISAC_CODEC
+        case kDecoderISAC :
+#endif
+#ifdef NETEQ_ISAC_SWB_CODEC
+        case kDecoderISACswb :
+#endif
+#ifdef NETEQ_G722_CODEC
+        case kDecoderG722 :
+#endif
+#ifdef NETEQ_WIDEBAND
+        case kDecoderPCM16Bwb :
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        case kDecoderPCM16Bswb32kHz :
+#endif
+#ifdef NETEQ_CNG_CODEC
+        case kDecoderCNG :
+#endif
+#ifdef NETEQ_ATEVENT_DECODE
+        case kDecoderAVT :
+#endif
+#ifdef NETEQ_RED_CODEC
+        case kDecoderRED :
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef NETEQ_ARBITRARY_CODEC
+        case kDecoderArbitrary:
+#endif
+#ifdef NETEQ_G729_CODEC
+        case kDecoderG729:
+#endif
+#ifdef NETEQ_G729_1_CODEC
+        case kDecoderG729_1 :
+#endif
+#ifdef NETEQ_G726_CODEC
+        case kDecoderG726_16 :
+        case kDecoderG726_24 :
+        case kDecoderG726_32 :
+        case kDecoderG726_40 :
+#endif
+#ifdef NETEQ_G722_1_CODEC
+        case kDecoderG722_1_16 :
+        case kDecoderG722_1_24 :
+        case kDecoderG722_1_32 :
+#endif
+#ifdef NETEQ_G722_1C_CODEC
+        case kDecoderG722_1C_24 :
+        case kDecoderG722_1C_32 :
+        case kDecoderG722_1C_48 :
+#endif
+#ifdef NETEQ_SPEEX_CODEC
+        case kDecoderSPEEX_8 :
+        case kDecoderSPEEX_16 :
+#endif
+#ifdef NETEQ_CELT_CODEC
+        case kDecoderCELT_32 :
+#endif
+#ifdef NETEQ_GSMFR_CODEC
+        case kDecoderGSMFR :
+#endif
+#ifdef NETEQ_AMR_CODEC
+        case kDecoderAMR :
+#endif
+#ifdef NETEQ_AMRWB_CODEC
+        case kDecoderAMRWB :
+#endif
+        {
+            /* If we end up here, the inserted codec is supported => Do nothing */
+            break;
+        }
+    default:
+    {
+        /* If we get to this point, the inserted codec is not supported */
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+    }
+
+    /* Check to see if payload type is taken */
+    if (WebRtcNetEQ_DbGetCodec(inst, payloadType) > 0)
+    {
+        return CODEC_DB_PAYLOAD_TAKEN;
+    }
+
+    /* Special case for CNG codecs */
+    if (codec == kDecoderCNG)
+    {
+        /* check if this is first CNG codec to be registered */
+        if (WebRtcNetEQ_DbGetPayload(inst, codec) == CODEC_DB_NOT_EXIST2)
+        {
+            /* no other CNG codec found */
+            insertCNGcodec = 1;
+        }
+
+        /* find the appropriate insert position in CNG payload vector */
+        switch (codec_fs)
+        {
+#ifdef NETEQ_WIDEBAND
+            case 16000:
+            CNGpos = 1;
+            break;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+            case 32000:
+            CNGpos = 2;
+            break;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+            case 48000:
+            CNGpos = 3;
+            break;
+#endif
+            default: /* 8000 Hz case */
+                CNGpos = 0;
+                /*
+                 * The 8 kHz CNG payload type is the one associated with the regular codec DB
+                 * should override any other setting.
+                 * Overwrite if this isn't the first CNG
+                 */
+                overwriteCNGcodec = !insertCNGcodec;
+                break;
+        }
+
+        /* insert CNG payload type */
+        inst->CNGpayloadType[CNGpos] = payloadType;
+
+    }
+
+    if ((codec != kDecoderCNG) || (insertCNGcodec == 1) || (overwriteCNGcodec == 1))
+    {
+        /* Check if we have reached the maximum numbers of simultaneous codecs */
+        if (inst->nrOfCodecs == NUM_CODECS) return CODEC_DB_FULL;
+
+        /* Check that codec has not already been initialized to DB =>
+         remove it and reinitialize according to new spec */
+        if ((inst->position[codec] != -1) && (overwriteCNGcodec != 1))
+        { /* if registering multiple CNG codecs, don't remove, just overwrite */
+            WebRtcNetEQ_DbRemove(inst, codec);
+        }
+
+        if (overwriteCNGcodec == 1)
+        {
+            temp = inst->position[codec];
+        }
+        else
+        {
+            temp = inst->nrOfCodecs; /* Store this codecs position */
+            inst->position[codec] = temp;
+            inst->nrOfCodecs++;
+        }
+
+        inst->payloadType[temp] = payloadType;
+
+        /* Copy to database */
+        inst->codec_state[temp] = codec_state;
+        inst->funcDecode[temp] = funcDecode;
+        inst->funcDecodeRCU[temp] = funcDecodeRCU;
+        inst->funcAddLatePkt[temp] = funcAddLatePkt;
+        inst->funcDecodeInit[temp] = funcDecodeInit;
+        inst->funcDecodePLC[temp] = funcDecodePLC;
+        inst->funcGetMDinfo[temp] = funcGetMDinfo;
+        inst->funcGetPitch[temp] = funcGetPitch;
+        inst->funcUpdBWEst[temp] = funcUpdBWEst;
+        inst->funcGetErrorCode[temp] = funcGetErrorCode;
+        inst->codec_fs[temp] = codec_fs;
+
+    }
+
+    return 0;
+}
+
+/*
+ * Removes a codec from the database.
+ */
+
+int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec)
+{
+    int i;
+    int pos = -1;
+
+#ifndef NETEQ_RED_CODEC
+    if (codec == kDecoderRED)
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+#endif
+    if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
+        >= (int) kDecoderReservedEnd))
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+
+    pos = inst->position[codec];
+    if (pos == -1)
+    {
+        return CODEC_DB_NOT_EXIST4;
+    }
+    else
+    {
+        /* Remove this codec */
+        inst->position[codec] = -1;
+        for (i = pos; i < (inst->nrOfCodecs - 1); i++)
+        {
+            inst->payloadType[i] = inst->payloadType[i + 1];
+            inst->codec_state[i] = inst->codec_state[i + 1];
+            inst->funcDecode[i] = inst->funcDecode[i + 1];
+            inst->funcDecodeRCU[i] = inst->funcDecodeRCU[i + 1];
+            inst->funcAddLatePkt[i] = inst->funcAddLatePkt[i + 1];
+            inst->funcDecodeInit[i] = inst->funcDecodeInit[i + 1];
+            inst->funcDecodePLC[i] = inst->funcDecodePLC[i + 1];
+            inst->funcGetMDinfo[i] = inst->funcGetMDinfo[i + 1];
+            inst->funcGetPitch[i] = inst->funcGetPitch[i + 1];
+            inst->funcUpdBWEst[i] = inst->funcUpdBWEst[i + 1];
+            inst->funcGetErrorCode[i] = inst->funcGetErrorCode[i + 1];
+            inst->codec_fs[i] = inst->codec_fs[i + 1];
+        }
+        inst->payloadType[i] = -1;
+        inst->codec_state[i] = NULL;
+        inst->funcDecode[i] = NULL;
+        inst->funcDecodeRCU[i] = NULL;
+        inst->funcAddLatePkt[i] = NULL;
+        inst->funcDecodeInit[i] = NULL;
+        inst->funcDecodePLC[i] = NULL;
+        inst->funcGetMDinfo[i] = NULL;
+        inst->funcGetPitch[i] = NULL;
+        inst->funcUpdBWEst[i] = NULL;
+        inst->funcGetErrorCode[i] = NULL;
+        inst->codec_fs[i] = 0;
+        /* Move down all the codecs above this one */
+        for (i = 0; i < NUM_TOTAL_CODECS; i++)
+        {
+            if (inst->position[i] >= pos)
+            {
+                inst->position[i] = inst->position[i] - 1;
+            }
+        }
+        inst->nrOfCodecs--;
+
+        if (codec == kDecoderCNG)
+        {
+            /* also remove all registered CNG payload types */
+            for (i = 0; i < NUM_CNG_CODECS; i++)
+            {
+                inst->CNGpayloadType[i] = -1;
+            }
+        }
+    }
+    return 0;
+}
+
+/*
+ * Get the decoder function pointers for a codec.
+ */
+
+int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                          CodecFuncInst_t *ptr_inst)
+{
+
+    int pos = inst->position[codec];
+    if ((codec <= kDecoderReservedStart) || (codec >= kDecoderReservedEnd) || (codec
+        > NUM_TOTAL_CODECS))
+    {
+        /* ERROR */
+        pos = -1;
+    }
+    if (pos >= 0)
+    {
+        ptr_inst->codec_state = inst->codec_state[pos];
+        ptr_inst->funcAddLatePkt = inst->funcAddLatePkt[pos];
+        ptr_inst->funcDecode = inst->funcDecode[pos];
+        ptr_inst->funcDecodeRCU = inst->funcDecodeRCU[pos];
+        ptr_inst->funcDecodeInit = inst->funcDecodeInit[pos];
+        ptr_inst->funcDecodePLC = inst->funcDecodePLC[pos];
+        ptr_inst->funcGetMDinfo = inst->funcGetMDinfo[pos];
+        ptr_inst->funcUpdBWEst = inst->funcUpdBWEst[pos];
+        ptr_inst->funcGetErrorCode = inst->funcGetErrorCode[pos];
+        ptr_inst->codec_fs = inst->codec_fs[pos];
+        return 0;
+    }
+    else
+    {
+        WebRtcSpl_MemSetW16((WebRtc_Word16*) ptr_inst, 0,
+            sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+        return CODEC_DB_NOT_EXIST1;
+    }
+}
+
+/*
+ * Returns payload number given a codec identifier.
+ */
+
+int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID)
+{
+    if (inst->position[codecID] == -1)
+        return CODEC_DB_NOT_EXIST2;
+    else
+        return (inst->payloadType[inst->position[codecID]]);
+
+}
+
+/*
+ * Returns codec identifier given a payload number.
+ * Returns -1 if the payload type does not exist.
+ */
+
+int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType)
+{
+    int i, pos;
+
+    for (i = 0; i < NUM_TOTAL_CODECS; i++)
+    {
+        pos = inst->position[i];
+        if (pos != -1)
+        {
+            if (inst->payloadType[pos] == payloadType) return i;
+        }
+    }
+
+    /* did not find payload type */
+    /* check if it's a CNG codec */
+    if (WebRtcNetEQ_DbIsCNGPayload(inst, payloadType))
+    {
+        return kDecoderCNG;
+    }
+
+    /* found no match */
+    return CODEC_DB_NOT_EXIST3;
+}
+
+/*
+ * Extracts the Payload Split information of the codec with the specified payloadType.
+ */
+
+int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
+                               int codedsize)
+{
+
+    switch (codecID)
+    {
+#ifdef NETEQ_ISAC_CODEC
+        case kDecoderISAC:
+#endif
+#ifdef NETEQ_ISAC_SWB_CODEC
+        case kDecoderISACswb:
+#endif
+#ifdef NETEQ_ARBITRARY_CODEC
+        case kDecoderArbitrary:
+#endif
+#ifdef NETEQ_AMR_CODEC
+        case kDecoderAMR:
+#endif
+#ifdef NETEQ_AMRWB_CODEC
+        case kDecoderAMRWB:
+#endif
+#ifdef NETEQ_G726_CODEC
+            /* Treat G726 as non-splittable to simplify the implementation */
+        case kDecoderG726_16:
+        case kDecoderG726_24:
+        case kDecoderG726_32:
+        case kDecoderG726_40:
+#endif
+#ifdef NETEQ_SPEEX_CODEC
+        case kDecoderSPEEX_8:
+        case kDecoderSPEEX_16:
+#endif
+#ifdef NETEQ_CELT_CODEC
+        case kDecoderCELT_32 :
+#endif
+#ifdef NETEQ_G729_1_CODEC
+        case kDecoderG729_1:
+#endif
+        {
+            /* These codecs' payloads are not splittable */
+            inst->deltaBytes = NO_SPLIT;
+            return 0;
+        }
+
+            /*
+             * Sample based coders are a special case.
+             * In this case, deltaTime signals the number of bytes per timestamp unit times 2
+             * in log2 domain.
+             */
+#if (defined NETEQ_G711_CODEC)
+        case kDecoderPCMu:
+        case kDecoderPCMa:
+        {
+            inst->deltaBytes = -12;
+            inst->deltaTime = 1;
+            return 0;
+        }
+#endif
+#if (defined NETEQ_G722_CODEC)
+        case kDecoderG722:
+        {
+            inst->deltaBytes = -14;
+            inst->deltaTime = 0;
+            return 0;
+        }
+#endif
+#if (defined NETEQ_PCM16B_CODEC)
+        case kDecoderPCM16B:
+        {
+            inst->deltaBytes = -12;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_WIDEBAND))
+        case kDecoderPCM16Bwb:
+        {
+            inst->deltaBytes = -14;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_32KHZ_WIDEBAND))
+        case kDecoderPCM16Bswb32kHz:
+        {
+            inst->deltaBytes = -18;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_48KHZ_WIDEBAND))
+        case kDecoderPCM16Bswb48kHz:
+        {
+            inst->deltaBytes = -22;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+
+            /* Splittable payloads */
+#ifdef NETEQ_G722_1_CODEC
+        case kDecoderG722_1_16:
+        {
+            inst->deltaBytes = 40;
+            inst->deltaTime = 320;
+            return 0;
+        }
+        case kDecoderG722_1_24:
+        {
+            inst->deltaBytes = 60;
+            inst->deltaTime = 320;
+            return 0;
+        }
+        case kDecoderG722_1_32:
+        {
+            inst->deltaBytes = 80;
+            inst->deltaTime = 320;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_G722_1C_CODEC
+        case kDecoderG722_1C_24:
+        {
+            inst->deltaBytes = 60;
+            inst->deltaTime = 640;
+            return 0;
+        }
+        case kDecoderG722_1C_32:
+        {
+            inst->deltaBytes = 80;
+            inst->deltaTime = 640;
+            return 0;
+        }
+        case kDecoderG722_1C_48:
+        {
+            inst->deltaBytes = 120;
+            inst->deltaTime = 640;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_G729_CODEC
+        case kDecoderG729:
+        {
+            inst->deltaBytes = 10;
+            inst->deltaTime = 80;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_ILBC_CODEC
+        case kDecoderILBC:
+        {
+            /* Check for splitting of iLBC packets.
+             * If payload size is a multiple of 50 bytes it should be split into 30ms frames.
+             * If payload size is a multiple of 38 bytes it should be split into 20ms frames.
+             * Least common multiplier between 38 and 50 is 950, so the payload size must be less than
+             * 950 bytes in order to resolve the frames unambiguously.
+             * Currently max 12 frames in one bundle.
+             */
+            switch (codedsize)
+            {
+                case 50:
+                case 100:
+                case 150:
+                case 200:
+                case 250:
+                case 300:
+                case 350:
+                case 400:
+                case 450:
+                case 500:
+                case 550:
+                case 600:
+                {
+                    inst->deltaBytes = 50;
+                    inst->deltaTime = 240;
+                    break;
+                }
+                case 38:
+                case 76:
+                case 114:
+                case 152:
+                case 190:
+                case 228:
+                case 266:
+                case 304:
+                case 342:
+                case 380:
+                case 418:
+                case 456:
+                {
+                    inst->deltaBytes = 38;
+                    inst->deltaTime = 160;
+                    break;
+                }
+                default:
+                {
+                    return AMBIGUOUS_ILBC_FRAME_SIZE; /* Something not supported... */
+                }
+            }
+            return 0;
+        }
+#endif
+#ifdef NETEQ_GSMFR_CODEC
+        case kDecoderGSMFR:
+        {
+            inst->deltaBytes = 33;
+            inst->deltaTime = 160;
+            return 0;
+        }
+#endif
+        default:
+        { /*Unknown codec */
+            inst->deltaBytes = NO_SPLIT;
+            return CODEC_DB_UNKNOWN_CODEC;
+        }
+    } /* end of switch */
+}
+
+/*
+ * Returns 1 if codec is multiple description, 0 otherwise.
+ * NOTE: This function is a stub, since there currently are no MD codecs.
+ */
+int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID)
+{
+    if (0) /* Add test for MD codecs here */
+        return 1;
+    else
+        return 0;
+}
+
+/*
+ * Returns 1 if payload type is registered as a CNG codec, 0 otherwise
+ */
+int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType)
+{
+#ifdef NETEQ_CNG_CODEC
+    int i;
+
+    for(i=0; i<NUM_CNG_CODECS; i++)
+    {
+        if( (inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType) )
+        {
+            return 1;
+        }
+    }
+#endif
+
+    return 0;
+
+}
+
+/*
+ * Return the sample rate for the codec with the given payload type, 0 if error
+ */
+WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
+{
+    int i;
+    CodecFuncInst_t codecInst;
+
+    /* Sanity */
+    if (inst == NULL)
+    {
+        /* return 0 Hz */
+        return 0;
+    }
+
+    /* Check among CNG payloads */
+    for (i = 0; i < NUM_CNG_CODECS; i++)
+    {
+        if ((inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType))
+        {
+            switch (i)
+            {
+                case 1:
+                    return 16000;
+                case 2:
+                    return 32000;
+                case 3:
+                    return 48000;
+                default:
+                    return 8000;
+            }
+        }
+    }
+
+    /* Not a CNG payload, check the other payloads */
+    i = WebRtcNetEQ_DbGetCodec(inst, payloadType);
+    if (i >= 0)
+    {
+        if (WebRtcNetEQ_DbGetPtrs(inst, (enum WebRtcNetEQDecoder) i, &codecInst) != 0)
+        {
+            /* Unexpected error, return 0 Hz */
+            return 0;
+        }
+        return codecInst.codec_fs;
+    }
+
+    /* If we end up here, we got an error, return 0 Hz */
+    return 0;
+
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/codec_db.h b/trunk/src/modules/audio_coding/neteq/codec_db.h
new file mode 100644
index 0000000..7f42980
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/codec_db.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface for the codec database.
+ */
+
+#ifndef CODEC_DB_H
+#define CODEC_DB_H
+
+#include "typedefs.h"
+
+#include "webrtc_neteq.h"
+#include "codec_db_defines.h"
+#include "neteq_defines.h"
+
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define NUM_CNG_CODECS 4
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define NUM_CNG_CODECS 3
+#elif defined(NETEQ_WIDEBAND)
+    #define NUM_CNG_CODECS 2
+#else
+    #define NUM_CNG_CODECS 1
+#endif
+
+typedef struct
+{
+
+    WebRtc_Word16 position[NUM_TOTAL_CODECS];
+    WebRtc_Word16 nrOfCodecs;
+
+    WebRtc_Word16 payloadType[NUM_CODECS];
+    FuncDecode funcDecode[NUM_CODECS];
+    FuncDecode funcDecodeRCU[NUM_CODECS];
+    FuncDecodePLC funcDecodePLC[NUM_CODECS];
+    FuncDecodeInit funcDecodeInit[NUM_CODECS];
+    FuncAddLatePkt funcAddLatePkt[NUM_CODECS];
+    FuncGetMDinfo funcGetMDinfo[NUM_CODECS];
+    FuncGetPitchInfo funcGetPitch[NUM_CODECS];
+    FuncUpdBWEst funcUpdBWEst[NUM_CODECS];
+    FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
+    void * codec_state[NUM_CODECS];
+    WebRtc_UWord16 codec_fs[NUM_CODECS];
+    WebRtc_Word16 CNGpayloadType[NUM_CNG_CODECS];
+
+} CodecDbInst_t;
+
+#define NO_SPLIT -1 /* codec payload cannot be split */
+
+typedef struct
+{
+    WebRtc_Word16 deltaBytes;
+    WebRtc_Word16 deltaTime;
+} SplitInfo_t;
+
+/*
+ * Resets the codec database.
+ */
+int WebRtcNetEQ_DbReset(CodecDbInst_t *inst);
+
+/*
+ * Adds a new codec to the database.
+ */
+int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                      WebRtc_Word16 payloadType, FuncDecode funcDecode,
+                      FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
+                      FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
+                      FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
+                      FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
+                      void* codec_state, WebRtc_UWord16 codec_fs);
+
+/*
+ * Removes a codec from the database.
+ */
+int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec);
+
+/*
+ * Get the decoder function pointers for a codec.
+ */
+int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder,
+                          CodecFuncInst_t *ptr_inst);
+
+/*
+ * Returns payload number given a codec identifier.
+ */
+
+int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID);
+
+/*
+ * Returns codec identifier given a payload number.
+ */
+
+int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType);
+
+/*
+ * Extracts the Payload Split information of the codec with the specified payloadType.
+ */
+
+int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
+                               int codedsize);
+
+/*
+ * Returns 1 if codec is multiple description type, 0 otherwise.
+ */
+int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID);
+
+/*
+ * Returns 1 if payload type is registered as a CNG codec, 0 otherwise.
+ */
+int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType);
+
+/*
+ * Return the sample rate for the codec with the given payload type, 0 if error.
+ */
+WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/neteq/codec_db_defines.h b/trunk/src/modules/audio_coding/neteq/codec_db_defines.h
new file mode 100644
index 0000000..9b78b86
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/codec_db_defines.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Some definitions related to the codec database.
+ */
+
+#ifndef CODEC_DB_DEFINES_H
+#define CODEC_DB_DEFINES_H
+
+#include "typedefs.h"
+
+#define NUM_CODECS 47 /* probably too large with the limited set of supported codecs*/
+#define NUM_TOTAL_CODECS	kDecoderReservedEnd
+
+/*
+ * Pointer to decoder function.
+ */
+typedef WebRtc_Word16 (*FuncDecode)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len,
+                                    WebRtc_Word16* decoded, WebRtc_Word16* speechType);
+
+/*
+ * Pointer to PLC function.
+ */
+typedef WebRtc_Word16 (*FuncDecodePLC)(void* state, WebRtc_Word16* decodec,
+                                       WebRtc_Word16 frames);
+
+/*
+ * Pointer to decoder init function.
+ */
+typedef WebRtc_Word16 (*FuncDecodeInit)(void* state);
+
+/*
+ * Pointer to add late packet function.
+ */
+typedef WebRtc_Word16
+                (*FuncAddLatePkt)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len);
+
+/*
+ * Pointer to get MD infofunction.
+ */
+typedef WebRtc_Word16 (*FuncGetMDinfo)(void* state);
+
+/*
+ * Pointer to pitch info function.
+ * Return 0 for unvoiced, -1 if pitch not availiable.
+ */
+typedef WebRtc_Word16 (*FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
+                                          WebRtc_Word16* length);
+
+/*
+ *  Pointer to the update bandwidth estimate function
+ */
+typedef WebRtc_Word16 (*FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
+                                      WebRtc_Word32 packet_size,
+                                      WebRtc_UWord16 rtp_seq_number, WebRtc_UWord32 send_ts,
+                                      WebRtc_UWord32 arr_ts);
+
+/*
+ *  Pointer to error code function
+ */
+typedef WebRtc_Word16 (*FuncGetErrorCode)(void* state);
+
+typedef struct CodecFuncInst_t_
+{
+
+    FuncDecode funcDecode;
+    FuncDecode funcDecodeRCU;
+    FuncDecodePLC funcDecodePLC;
+    FuncDecodeInit funcDecodeInit;
+    FuncAddLatePkt funcAddLatePkt;
+    FuncGetMDinfo funcGetMDinfo;
+    FuncUpdBWEst funcUpdBWEst; /* Currently in use for the ISAC family (without LC) only*/
+    FuncGetErrorCode funcGetErrorCode;
+    void * codec_state;
+    WebRtc_UWord16 codec_fs;
+    WebRtc_UWord32 timeStamp;
+
+} CodecFuncInst_t;
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/neteq/correlator.c b/trunk/src/modules/audio_coding/neteq/correlator.c
new file mode 100644
index 0000000..97c41da
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/correlator.c
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage:
+
+ Type           Name                size  startpos  endpos
+ WebRtc_Word16  pw16_corrVec        62    0         61
+ WebRtc_Word16  pw16_data_ds        124   0         123
+ WebRtc_Word32  pw32_corr           2*54  124       231
+
+ Total:  232
+ */
+
+#define	 SCRATCH_pw16_corrVec			0
+#define	 SCRATCH_pw16_data_ds			0
+#define	 SCRATCH_pw32_corr				124
+
+#define NETEQ_CORRELATOR_DSVECLEN 		124	/* 124 = 60 + 10 + 54 */
+
+WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+#ifdef SCRATCH
+                                     WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                     WebRtc_Word16 *pw16_data,
+                                     WebRtc_Word16 w16_dataLen,
+                                     WebRtc_Word16 *pw16_corrOut,
+                                     WebRtc_Word16 *pw16_corrScale)
+{
+    WebRtc_Word16 w16_corrLen = 60;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+    /*	WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
+#else
+    WebRtc_Word16 pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
+    WebRtc_Word32 pw32_corr[54];
+    /*	WebRtc_Word16 pw16_corrVec[4+54+4];*/
+#endif
+    /*	WebRtc_Word16 *pw16_corr=&pw16_corrVec[4];*/
+    WebRtc_Word16 w16_maxVal;
+    WebRtc_Word32 w32_maxVal;
+    WebRtc_Word16 w16_normVal;
+    WebRtc_Word16 w16_normVal2;
+    /*	WebRtc_Word16 w16_corrUpsLen;*/
+    WebRtc_Word16 *pw16_B = NULL;
+    WebRtc_Word16 w16_Blen = 0;
+    WebRtc_Word16 w16_factor = 0;
+
+    /* Set constants depending on frequency used */
+    if (inst->fs == 8000)
+    {
+        w16_Blen = 3;
+        w16_factor = 2;
+        pw16_B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+#ifdef NETEQ_WIDEBAND
+    }
+    else if (inst->fs==16000)
+    {
+        w16_Blen = 5;
+        w16_factor = 4;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    }
+    else if (inst->fs==32000)
+    {
+        w16_Blen = 7;
+        w16_factor = 8;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    }
+    else /* if inst->fs==48000 */
+    {
+        w16_Blen = 7;
+        w16_factor = 12;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl;
+#endif
+    }
+
+    /* Downsample data in order to work on a 4 kHz sampled signal */
+    WebRtcSpl_DownsampleFast(
+        pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
+        (WebRtc_Word16) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
+        NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (WebRtc_Word16) 0);
+
+    /* Normalize downsampled vector to using entire 16 bit */
+    w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
+    w16_normVal = 16 - WebRtcSpl_NormW32((WebRtc_Word32) w16_maxVal);
+    WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
+        w16_normVal);
+
+    /* Correlate from lag 10 to lag 60 (20..120 in NB and 40..240 in WB) */
+
+    WebRtcNetEQ_CrossCorr(
+        pw32_corr, &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen],
+        &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen - 10], 60, 54,
+        6 /*maxValue... shifts*/, -1);
+
+    /*
+     * Move data from w32 to w16 vector.
+     * Normalize downsampled vector to using all 14 bits
+     */
+    w32_maxVal = WebRtcSpl_MaxAbsValueW32(pw32_corr, 54);
+    w16_normVal2 = 18 - WebRtcSpl_NormW32(w32_maxVal);
+    w16_normVal2 = WEBRTC_SPL_MAX(w16_normVal2, 0);
+
+    WebRtcSpl_VectorBitShiftW32ToW16(pw16_corrOut, 54, pw32_corr, w16_normVal2);
+
+    /* Total scale factor (right shifts) of correlation value */
+    *pw16_corrScale = 2 * w16_normVal + 6 + w16_normVal2;
+
+    return (50 + 1);
+}
+
+#undef	 SCRATCH_pw16_corrVec
+#undef	 SCRATCH_pw16_data_ds
+#undef	 SCRATCH_pw32_corr
+
diff --git a/trunk/src/modules/audio_coding/neteq/delay_logging.h b/trunk/src/modules/audio_coding/neteq/delay_logging.h
new file mode 100644
index 0000000..04b1c40
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/delay_logging.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Contains definitions for the delay logging functionality. Only used for debugging and
+ * tracing purposes.
+ */
+
+#ifndef DELAY_LOGGING_H
+#define DELAY_LOGGING_H
+
+#define NETEQ_DELAY_LOGGING_VERSION_STRING "2.0"
+
+#define NETEQ_DELAY_LOGGING_SIGNAL_RECIN 1
+#define NETEQ_DELAY_LOGGING_SIGNAL_FLUSH 2
+#define NETEQ_DELAY_LOGGING_SIGNAL_CLOCK 3
+#define NETEQ_DELAY_LOGGING_SIGNAL_EOF 4
+#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE 5
+#define NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS 6
+#define NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO 7
+#define NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO 8
+#define NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO 9
+#define NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO 10
+#define NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF 11
+#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC 12
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/dsp.c b/trunk/src/modules/audio_coding/neteq/dsp.c
new file mode 100644
index 0000000..96f07d2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dsp.c
@@ -0,0 +1,522 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some DSP initialization functions and 
+ * constant table definitions.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+/* Filter coefficients used when downsampling from the indicated 
+ sample rates (8, 16, 32, 48 kHz) to 4 kHz.
+ Coefficients are in Q12. */
+
+/* {0.3, 0.4, 0.3} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
+
+#ifdef NETEQ_WIDEBAND
+/* {0.15, 0.2, 0.3, 0.2, 0.15} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[] =
+{   614, 819, 1229, 819, 614};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[] =
+{   584, 512, 625, 667, 625, 512, 584};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[] =
+{   1019, 390, 427, 440, 427, 390, 1019};
+#endif
+
+/* Constants used in expand function WebRtcNetEQ_Expand */
+
+/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
+const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
+
+/* Tabulated divisions to save complexity */
+/* 1049/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
+
+/* 2097/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
+
+/* 5243/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
+
+#ifdef WEBRTC_NETEQ_40BITACC_TEST
+/*
+ * Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
+ * implementation where the main (spl and NetEQ) functions have been
+ * 40-bit optimized. For testing purposes.
+ */
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccCrossCorr(...)
+ *
+ * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
+ * is fixed and seq2 slides as the pointer is increased with step
+ *
+ * Input:
+ *		- seq1			: First sequence (fixed throughout the correlation)
+ *		- seq2			: Second sequence (slided step_seq2 for each 
+ *						  new correlation)
+ *		- dimSeq		: Number of samples to use in the cross correlation.
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- dimCrossCorr	: Number of CrossCorrelations to calculate (start 
+ *						  position for seq2 is updated for each new one)
+ *		- rShift			: Number of right shifts to use
+ *		- step_seq2		: How many (positive or negative) steps the seq2 
+ *						  pointer should be updated for each new cross 
+ *						  correlation value
+ *
+ * Output:
+ *		- crossCorr		: The cross correlation in Q-rShift
+ */
+
+void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr,
+    WebRtc_Word16 *seq1,
+    WebRtc_Word16 *seq2,
+    WebRtc_Word16 dimSeq,
+    WebRtc_Word16 dimCrossCorr,
+    WebRtc_Word16 rShift,
+    WebRtc_Word16 step_seq2)
+{
+    int i, j;
+    WebRtc_Word16 *seq1Ptr, *seq2Ptr;
+    WebRtc_Word64 acc;
+
+    for (i = 0; i < dimCrossCorr; i++)
+    {
+        /* Set the pointer to the static vector, set the pointer to
+         the sliding vector and initialize crossCorr */
+        seq1Ptr = seq1;
+        seq2Ptr = seq2 + (step_seq2 * i);
+        acc = 0;
+
+        /* Perform the cross correlation */
+        for (j = 0; j < dimSeq; j++)
+        {
+            acc += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+            seq1Ptr++;
+            seq2Ptr++;
+        }
+
+        (*crossCorr) = (WebRtc_Word32) (acc >> rShift);
+        crossCorr++;
+    }
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccDotW16W16(...)
+ *
+ * Calculates the dot product between two vectors (WebRtc_Word16)
+ *
+ * Input:
+ *		- vector1		: Vector 1
+ *		- vector2		: Vector 2
+ *		- len			: Number of samples in vector
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- scaling		: The number of left shifts required to avoid overflow 
+ *						  in the dot product
+ * Return value			: The dot product
+ */
+
+WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1,
+    WebRtc_Word16 *vector2,
+    int len,
+    int scaling)
+{
+    WebRtc_Word32 sum;
+    int i;
+    WebRtc_Word64 acc;
+
+    acc = 0;
+    for (i = 0; i < len; i++)
+    {
+        acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
+    }
+
+    sum = (WebRtc_Word32) (acc >> scaling);
+
+    return(sum);
+}
+
+#endif /* WEBRTC_NETEQ_40BITACC_TEST */
+
+/****************************************************************************
+ * WebRtcNetEQ_DSPInit(...)
+ *
+ * Initializes DSP side of NetEQ.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *      - fs            : Initial sample rate (may change when decoding data)
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs)
+{
+
+    int res = 0;
+    WebRtc_Word16 fs_mult;
+
+    /* Pointers and values to save before clearing the instance */
+#ifdef NETEQ_CNG_CODEC
+    void *savedPtr1 = inst->CNG_Codec_inst;
+#endif
+    void *savedPtr2 = inst->pw16_readAddress;
+    void *savedPtr3 = inst->pw16_writeAddress;
+    void *savedPtr4 = inst->main_inst;
+#ifdef NETEQ_VAD
+    void *savedVADptr = inst->VADInst.VADState;
+    VADInitFunction savedVADinit = inst->VADInst.initFunction;
+    VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
+    VADFunction savedVADfunc = inst->VADInst.VADFunction;
+    WebRtc_Word16 savedVADEnabled = inst->VADInst.VADEnabled;
+    WebRtc_Word16 savedVADMode = inst->VADInst.VADMode;
+#endif /* NETEQ_VAD */
+    DSPStats_t saveStats;
+    WebRtc_Word16 saveMsPerCall = inst->millisecondsPerCall;
+    enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo saveMSinfo;
+#endif
+
+    /* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
+        sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+
+#ifdef NETEQ_STEREO
+    /* copy contents of msInfo to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveMSinfo, &(inst->msInfo),
+        sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+#endif
+
+    /* check that the sample rate is valid */
+    if ((fs != 8000)
+#ifdef NETEQ_WIDEBAND
+    &&(fs!=16000)
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    &&(fs!=32000)
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    &&(fs!=48000)
+#endif
+    )
+    {
+        /* invalid rate */
+        return (CODEC_DB_UNSUPPORTED_FS);
+    }
+
+    /* calcualte fs/8000 */
+    fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
+
+    /* Set everything to zero since most variables should be zero at start */
+    WebRtcSpl_MemSetW16((WebRtc_Word16 *) inst, 0, sizeof(DSPInst_t) / sizeof(WebRtc_Word16));
+
+    /* Restore saved pointers  */
+#ifdef NETEQ_CNG_CODEC
+    inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
+#endif
+    inst->pw16_readAddress = (WebRtc_Word16 *) savedPtr2;
+    inst->pw16_writeAddress = (WebRtc_Word16 *) savedPtr3;
+    inst->main_inst = savedPtr4;
+#ifdef NETEQ_VAD
+    inst->VADInst.VADState = savedVADptr;
+    inst->VADInst.initFunction = savedVADinit;
+    inst->VADInst.setmodeFunction = savedVADsetmode;
+    inst->VADInst.VADFunction = savedVADfunc;
+    inst->VADInst.VADEnabled = savedVADEnabled;
+    inst->VADInst.VADMode = savedVADMode;
+#endif /* NETEQ_VAD */
+
+    /* Initialize main part */
+    inst->fs = fs;
+    inst->millisecondsPerCall = saveMsPerCall;
+    inst->timestampsPerCall = inst->millisecondsPerCall * 8 * fs_mult;
+    inst->ExpandInst.w16_overlap = 5 * fs_mult;
+    inst->endPosition = 565 * fs_mult;
+    inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
+    inst->w16_seedInc = 1;
+    inst->uw16_seed = 777;
+    inst->w16_muteFactor = 16384; /* 1.0 in Q14 */
+    inst->w16_frameLen = 3 * inst->timestampsPerCall; /* Dummy initialize to 30ms */
+
+    inst->w16_speechHistoryLen = 256 * fs_mult;
+    inst->pw16_speechHistory = &inst->speechBuffer[inst->endPosition
+        - inst->w16_speechHistoryLen];
+    inst->ExpandInst.pw16_overlapVec = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen
+        - inst->ExpandInst.w16_overlap]);
+
+    /* Reusage of memory in speechBuffer inside Expand */
+    inst->ExpandInst.pw16_expVecs[0] = &inst->speechBuffer[0];
+    inst->ExpandInst.pw16_expVecs[1] = &inst->speechBuffer[126 * fs_mult];
+    inst->ExpandInst.pw16_arState = &inst->speechBuffer[2 * 126 * fs_mult];
+    inst->ExpandInst.pw16_arFilter = &inst->speechBuffer[2 * 126 * fs_mult
+        + UNVOICED_LPC_ORDER];
+    /* Ends at 2*126*fs_mult+UNVOICED_LPC_ORDER+(UNVOICED_LPC_ORDER+1) */
+
+    inst->ExpandInst.w16_expandMuteFactor = 16384; /* 1.0 in Q14 */
+
+    /* Initialize BGN part */
+    inst->BGNInst.pw16_filter[0] = 4096;
+    inst->BGNInst.w16_scale = 20000;
+    inst->BGNInst.w16_scaleShift = 24;
+    inst->BGNInst.w32_energyUpdate = 500000;
+    inst->BGNInst.w32_energyUpdateLow = 0;
+    inst->BGNInst.w32_energy = 2500;
+    inst->BGNInst.w16_initialized = 0;
+    inst->BGNInst.bgnMode = saveBgnMode;
+
+    /* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
+        sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+
+#ifdef NETEQ_STEREO
+    /* Recreate MSinfo */WEBRTC_SPL_MEMCPY_W16(&(inst->msInfo), &saveMSinfo,
+        sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+#endif
+
+#ifdef NETEQ_CNG_CODEC
+    if (inst->CNG_Codec_inst!=NULL)
+    {
+        /* initialize comfort noise generator */
+        res |= WebRtcCng_InitDec(inst->CNG_Codec_inst);
+    }
+#endif
+
+#ifdef NETEQ_VAD
+    /* initialize PostDecode VAD instance
+     (don't bother checking for NULL instance, this is done inside init function) */
+    res |= WebRtcNetEQ_InitVAD(&inst->VADInst, fs);
+#endif /* NETEQ_VAD */
+
+    return (res);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_AddressInit(...)
+ *
+ * Initializes the shared-memory communication on the DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *      - data2McuAddress   : Pointer to memory where DSP writes / MCU reads
+ *      - data2DspAddress   : Pointer to memory where MCU writes / DSP reads
+ *      - mainInst          : NetEQ main instance
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
+                            const void *data2DspAddress, const void *mainInst)
+{
+
+    /* set shared-memory addresses in the DSP instance */
+    inst->pw16_readAddress = (WebRtc_Word16 *) data2DspAddress;
+    inst->pw16_writeAddress = (WebRtc_Word16 *) data2McuAddress;
+
+    /* set pointer to main NetEQ instance */
+    inst->main_inst = (void *) mainInst;
+
+    /* set output frame size to 10 ms = 80 samples in narrowband */
+    inst->millisecondsPerCall = 10;
+    inst->timestampsPerCall = 80;
+
+    return (0);
+
+}
+
+/****************************************************************************
+ * NETEQDSP_clearInCallStats(...)
+ *
+ * Reset in-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst)
+{
+
+    /* Reset statistics counters */
+    inst->statInst.accelerateLength = 0;
+    inst->statInst.expandLength = 0;
+    inst->statInst.preemptiveLength = 0;
+
+    return (0);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearPostCallStats(...)
+ *
+ * Reset post-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst)
+{
+
+    /* Reset statistics counters */
+    inst->statInst.expandedVoiceSamples = 0;
+    inst->statInst.expandedNoiseSamples = 0;
+
+    return (0);
+}
+
+#ifdef NETEQ_VAD
+
+/****************************************************************************
+ * WebRtcNetEQ_InitVAD(...)
+ *
+ * Initializes post-decode VAD instance.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - fs            : Initial sample rate
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs)
+{
+
+    int res = 0;
+
+    /* initially, disable the post-decode VAD */
+    VADInst->VADEnabled = 0;
+
+    if (VADInst->VADState != NULL /* if VAD state is provided */
+        && VADInst->initFunction != NULL /* and all function ... */
+        && VADInst->setmodeFunction != NULL /* ... pointers ... */
+        && VADInst->VADFunction != NULL) /* ... are defined */
+    {
+        res = VADInst->initFunction( VADInst->VADState ); /* call VAD init function */
+        res |= WebRtcNetEQ_SetVADModeInternal( VADInst, VADInst->VADMode );
+
+        if (res!=0)
+        {
+            /* something is wrong; play it safe and set the VADstate to NULL */
+            VADInst->VADState = NULL;
+        }
+        else if (fs<=16000)
+        {
+            /* enable VAD if NB or WB (VAD cannot handle SWB) */
+            VADInst->VADEnabled = 1;
+        }
+    }
+
+    /* reset SID/CNG interval counter */
+    VADInst->SIDintervalCounter = 0;
+
+    /* initialize with active-speaker decision */
+    VADInst->VADDecision = 1;
+
+    return(res);
+
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADModeInternal(...)
+ *
+ * Set the VAD mode in the VAD struct, and communicate it to the VAD instance 
+ * if it exists.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - mode          : Mode number passed on to the VAD function
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode)
+{
+
+    int res = 0;
+
+    VADInst->VADMode = mode;
+
+    if (VADInst->VADState != NULL)
+    {
+        /* call setmode function */
+        res = VADInst->setmodeFunction(VADInst->VADState, mode);
+    }
+
+    return(res);
+
+}
+
+#endif /* NETEQ_VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushSpeechBuffer(...)
+ *
+ * Flush the speech buffer.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
+{
+    WebRtc_Word16 fs_mult;
+
+    /* calcualte fs/8000 */
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+
+    /* clear buffer */
+    WebRtcSpl_MemSetW16(inst->speechBuffer, 0, SPEECH_BUF_SIZE);
+    inst->endPosition = 565 * fs_mult;
+    inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/dsp.h b/trunk/src/modules/audio_coding/neteq/dsp.h
new file mode 100644
index 0000000..94213e4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dsp.h
@@ -0,0 +1,788 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some DSP initialization functions,
+ * constant table definitions and other parameters.
+ * Also contains definitions of all DSP-side data structures. 
+ */
+
+
+#ifndef DSP_H
+#define DSP_H
+
+#include "typedefs.h"
+
+#include "webrtc_cng.h"
+
+#include "codec_db_defines.h"
+#include "neteq_defines.h"
+#include "neteq_statistics.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+#include "dtmf_tonegen.h"
+#endif
+
+
+
+/*****************************/
+/* Pre-processor definitions */
+/*****************************/
+
+/* FSMULT is the sample rate divided by 8000 */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+	#define FSMULT	6
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+	#define FSMULT	4
+#elif defined(NETEQ_WIDEBAND)
+	#define FSMULT 2
+#else
+	#define FSMULT 1
+#endif
+
+/* Size of the speech buffer (or synchronization buffer). */
+/* 60 ms decoding + 10 ms syncbuff + 0.625ms lookahead */
+#define SPEECH_BUF_SIZE (565 * FSMULT)
+
+/* Misc definitions */
+#define BGN_LPC_ORDER				(4 + FSMULT)  /* 5, 6, 8, or 10 */
+#define UNVOICED_LPC_ORDER			6
+#define RANDVEC_NO_OF_SAMPLES		256
+
+/* Number of milliseconds to remove/add during accelerate/pre-emptive expand
+   under BGNonly operation */
+#define DEFAULT_TIME_ADJUST 8
+
+/* Number of RecOut calls without CNG/SID before re-enabling post-decode VAD */
+#define POST_DECODE_VAD_AUTO_ENABLE 3000  
+
+/* 8kHz windowing in Q15 (over 5 samples) */
+#define NETEQ_OVERLAP_WINMUTE_8KHZ_START	27307
+#define NETEQ_OVERLAP_WINMUTE_8KHZ_INC		-5461
+#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_START	 5461
+#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC	 5461
+/* 16kHz windowing in Q15 (over 10 samples) */
+#define NETEQ_OVERLAP_WINMUTE_16KHZ_START	29789
+#define NETEQ_OVERLAP_WINMUTE_16KHZ_INC		-2979
+#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_START	 2979
+#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC	 2979
+/* 32kHz windowing in Q15 (over 20 samples) */
+#define NETEQ_OVERLAP_WINMUTE_32KHZ_START	31208
+#define NETEQ_OVERLAP_WINMUTE_32KHZ_INC		-1560
+#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_START	 1560
+#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC	 1560
+/* 48kHz windowing in Q15 (over 30 samples) */
+#define NETEQ_OVERLAP_WINMUTE_48KHZ_START	31711
+#define NETEQ_OVERLAP_WINMUTE_48KHZ_INC		-1057
+#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_START	 1057
+#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC	 1057
+
+/* Fade BGN towards zero after this many Expand calls */
+#define FADE_BGN_TIME 200
+
+
+/*******************/
+/* Constant tables */
+/*******************/
+
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_k1049div[];
+extern const WebRtc_Word16 WebRtcNetEQ_k2097div[];
+extern const WebRtc_Word16 WebRtcNetEQ_k5243div[];
+
+
+
+/************/
+/* Typedefs */
+/************/
+
+enum BGNMode
+{
+    BGN_ON,     /* default "normal" behavior with eternal noise */
+    BGN_FADE,   /* noise fades to zero after some time */
+    BGN_OFF     /* background noise is always zero */
+};
+
+#ifdef NETEQ_STEREO
+enum MasterSlaveMode
+{
+    NETEQ_MONO,     /* stand-alone instance */
+    NETEQ_MASTER,   /* master instance in a spatial/stereo configuration */
+    NETEQ_SLAVE     /* slave instance in a spatial/stereo configuration */
+};
+
+enum MasterSlaveExtraInfo
+{
+    NO_INFO,        /* no info to convey */
+    ACC_FAIL,       /* signal that accelerate failed */
+    PE_EXP_FAIL,    /* signal that pre-emptive expand failed */
+    DTMF_OVERDUB,   /* signal that DTMF overdub is generated */
+    DTMF_ONLY       /* signal that DTMF only is played */
+};
+#endif
+
+/****************************/
+/* DSP-side data structures */
+/****************************/
+
+/* Background noise (BGN) instance for storing BGN parameters 
+ (sub-instance of NETEQDSP_inst) */
+typedef struct BGNInst_t_
+{
+
+    WebRtc_Word32 w32_energy;
+    WebRtc_Word32 w32_energyMax;
+    WebRtc_Word32 w32_energyUpdate;
+    WebRtc_Word32 w32_energyUpdateLow;
+    WebRtc_Word16 pw16_filterState[BGN_LPC_ORDER];
+    WebRtc_Word16 pw16_filter[BGN_LPC_ORDER + 1];
+    WebRtc_Word16 w16_mutefactor;
+    WebRtc_Word16 w16_scale;
+    WebRtc_Word16 w16_scaleShift;
+    WebRtc_Word16 w16_initialized;
+    enum BGNMode bgnMode;
+
+} BGNInst_t;
+
+/* Expansion instance (sub-instance of NETEQDSP_inst) */
+typedef struct ExpandInst_t_
+{
+
+    WebRtc_Word16 w16_overlap; /* Constant, 5 for NB and 10 for WB */
+    WebRtc_Word16 w16_consecExp; /* Number of consecutive expand calls */
+    WebRtc_Word16 *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1]	*/
+    WebRtc_Word16 *pw16_arState; /* length [UNVOICED_LPC_ORDER]		*/
+    WebRtc_Word16 w16_arGain;
+    WebRtc_Word16 w16_arGainScale;
+    WebRtc_Word16 w16_vFraction; /* Q14 */
+    WebRtc_Word16 w16_currentVFraction; /* Q14 */
+    WebRtc_Word16 *pw16_expVecs[2];
+    WebRtc_Word16 w16_lags[3];
+    WebRtc_Word16 w16_maxLag;
+    WebRtc_Word16 *pw16_overlapVec; /* last samples of speech history */
+    WebRtc_Word16 w16_lagsDirection;
+    WebRtc_Word16 w16_lagsPosition;
+    WebRtc_Word16 w16_expandMuteFactor; /* Q14 */
+    WebRtc_Word16 w16_stopMuting;
+    WebRtc_Word16 w16_onset;
+    WebRtc_Word16 w16_muteSlope; /* Q20 */
+
+} ExpandInst_t;
+
+#ifdef NETEQ_VAD
+
+/*
+ * VAD function pointer types, replicating the typedefs in webrtc_neteq_internal.h.
+ * These function pointers match the definitions of WebRtc VAD functions WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
+ */
+typedef int (*VADInitFunction)(void *VAD_inst);
+typedef int (*VADSetmodeFunction)(void *VAD_inst, int mode);
+typedef WebRtc_Word16 (*VADFunction)(void *VAD_inst, WebRtc_Word16 fs, WebRtc_Word16 *frame,
+                                     WebRtc_Word16 frameLen);
+
+/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
+typedef struct PostDecodeVAD_t_
+{
+
+    void *VADState; /* pointer to a VAD instance */
+
+    WebRtc_Word16 VADEnabled; /* 1 if enabled, 0 if disabled */
+    int VADMode; /* mode parameter to pass to the VAD function */
+    WebRtc_Word16 VADDecision; /* 1 for active, 0 for passive */
+    WebRtc_Word16 SIDintervalCounter; /* reset when decoding CNG/SID frame,
+     increment for each recout call */
+
+    /* Function pointers */
+    VADInitFunction initFunction; /* VAD init function */
+    VADSetmodeFunction setmodeFunction; /* VAD setmode function */
+    VADFunction VADFunction; /* VAD function */
+
+} PostDecodeVAD_t;
+
+#endif /* NETEQ_VAD */
+
+#ifdef NETEQ_STEREO
+#define MAX_MS_DECODES 10
+
+typedef struct 
+{
+    /* Stand-alone, master, or slave */
+    enum MasterSlaveMode    msMode;
+
+    enum MasterSlaveExtraInfo  extraInfo;
+
+    WebRtc_UWord16 instruction;
+    WebRtc_Word16 distLag;
+    WebRtc_Word16 corrLag;
+    WebRtc_Word16 bestIndex;
+
+    WebRtc_UWord32 endTimestamp;
+    WebRtc_UWord16 samplesLeftWithOverlap;
+
+} MasterSlaveInfo;
+#endif
+
+
+/* "Main" NetEQ DSP instance */
+typedef struct DSPInst_t_
+{
+
+    /* MCU/DSP Communication layer */
+    WebRtc_Word16 *pw16_readAddress;
+    WebRtc_Word16 *pw16_writeAddress;
+    void *main_inst;
+
+    /* Output frame size in ms and samples */
+    WebRtc_Word16 millisecondsPerCall;
+    WebRtc_Word16 timestampsPerCall;
+
+    /*
+     *	Example of speech buffer
+     *
+     *  -----------------------------------------------------------
+     *  |            History  T-60 to T         |     Future      |
+     *	-----------------------------------------------------------
+     *						                    ^			      ^
+     *					                    	|			      |
+     *					                   curPosition	   endPosition
+     *
+     *		History is gradually shifted out to the left when inserting
+     *      new data at the end.
+     */
+
+    WebRtc_Word16 speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
+    int curPosition; /* Next sample to play */
+    int endPosition; /* Position that ends future data */
+    WebRtc_UWord32 endTimestamp; /* Timestamp value at end of future data */
+    WebRtc_UWord32 videoSyncTimestamp; /* (Estimated) timestamp of the last
+     played sample (usually same as
+     endTimestamp-(endPosition-curPosition)
+     except during Expand and CNG) */
+    WebRtc_UWord16 fs; /* sample rate in Hz */
+    WebRtc_Word16 w16_frameLen; /* decoder frame length in samples */
+    WebRtc_Word16 w16_mode; /* operation used during last RecOut call */
+    WebRtc_Word16 w16_muteFactor; /* speech mute factor in Q14 */
+    WebRtc_Word16 *pw16_speechHistory; /* beginning of speech history during Expand */
+    WebRtc_Word16 w16_speechHistoryLen; /* 256 for NB and 512 for WB */
+
+    /* random noise seed parameters */
+    WebRtc_Word16 w16_seedInc;
+    WebRtc_UWord32 uw16_seed;
+
+    /* VQmon related variable */
+    WebRtc_Word16 w16_concealedTS;
+
+    /*****************/
+    /* Sub-instances */
+    /*****************/
+
+    /* Decoder data */
+    CodecFuncInst_t codec_ptr_inst;
+
+#ifdef NETEQ_CNG_CODEC
+    /* CNG "decoder" instance */
+    CNG_dec_inst *CNG_Codec_inst;
+#endif /* NETEQ_CNG_CODEC */
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* DTMF generator instance */
+    dtmf_tone_inst_t DTMFInst;
+#endif /* NETEQ_CNG_CODEC */
+
+#ifdef NETEQ_VAD
+    /* Post-decode VAD instance */
+    PostDecodeVAD_t VADInst;
+#endif /* NETEQ_VAD */
+
+    /* Expand instance (defined above) */
+    ExpandInst_t ExpandInst;
+
+    /* Background noise instance (defined above) */
+    BGNInst_t BGNInst;
+
+    /* Internal statistics instance */
+    DSPStats_t statInst;
+
+#ifdef NETEQ_STEREO
+    /* Pointer to Master/Slave info */
+    MasterSlaveInfo *msInfo;
+#endif
+
+} DSPInst_t;
+
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+/****************************************************************************
+ * WebRtcNetEQ_DSPInit(...)
+ *
+ * Initializes DSP side of NetEQ.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *      - fs            : Initial sample rate (may change when decoding data)
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs);
+
+/****************************************************************************
+ * WebRtcNetEQ_AddressInit(...)
+ *
+ * Initializes the shared-memory communication on the DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *      - data2McuAddress   : Pointer to memory where DSP writes / MCU reads
+ *      - data2DspAddress   : Pointer to memory where MCU writes / DSP reads
+ *      - mainInst          : NetEQ main instance
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
+                            const void *data2DspAddress, const void *mainInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearInCallStats(...)
+ *
+ * Reset in-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearPostCallStats(...)
+ *
+ * Reset post-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutInternal(...)
+ *
+ * This function asks NetEQ for more speech/audio data.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, i.e. the user that requests more 
+ *						  speech/audio data.
+ *		- outdata		: Pointer to a memory space where the output data
+ *						  should be stored.
+ *      - BGNonly       : If non-zero, RecOut will only produce background
+ *                        noise. It will still draw packets from the packet
+ *                        buffer, but they will never be decoded.
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- len			: Number of samples that were outputted from NetEq
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Normal(...)
+ *
+ * This function has the possibility to modify data that is played out in Normal
+ * mode, for example adjust the gain of the signal. The length of the signal 
+ * can not be changed.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *		- decoded		: Pointer to vector of new data from decoder
+ *      - len           : Number of input samples
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- pw16_len		: Pointer to varibale where the number of samples 
+ *                        produced will be written
+ *
+ * Return value			: >=0 - Number of samples written to outData
+ *						   -1 - Error
+ */
+
+int WebRtcNetEQ_Normal(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_Expand(...)
+ *
+ * This function produces one "chunk" of expansion data (PLC audio). The
+ * lenght of the produced audio depends on the speech history.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - BGNonly       : If non-zero, Expand will only produce background
+ *                        noise.
+ *      - pw16_len      : Desired number of samples (only for BGN mode).
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- outdata		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples that were outputted from NetEq
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Expand(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_GenerateBGN(...)
+ *
+ * This function generates and writes len samples of background noise to the
+ * output vector. The Expand function will be called repeteadly until the
+ * correct number of samples is produced.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - len           : Desired length of produced BGN.
+ *						  
+ *
+ * Output:
+ *		- pw16_outData	: Pointer to a memory space where the output data
+ *						  should be stored
+ *
+ * Return value			: >=0 - Number of noise samples produced and written
+ *                              to output
+ *						  -1  - Error
+ */
+
+int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
+#ifdef SCRATCH
+                            WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                            WebRtc_Word16 *pw16_outData, WebRtc_Word16 len);
+
+/****************************************************************************
+ * WebRtcNetEQ_PreEmptiveExpand(...)
+ *
+ * This function tries to extend the audio data by repeating one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low. The algorithm is the
+ * reciprocal of the Accelerate algorithm.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - oldDataLen    : Length of the part of decoded that has already been played out.
+ *      - BGNonly       : If non-zero, Pre-emptive Expand will only copy 
+ *                        the first DEFAULT_TIME_ADJUST seconds of the
+ *                        input and append to the end. No signal matching is
+ *                        done.
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored. The vector must be at least
+ *						  min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
+ *						  elements long.
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
+#ifdef SCRATCH
+                                 WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                 const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
+                                 WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                                 WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Accelerate(...)
+ *
+ * This function tries to shorten the audio data by removing one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - BGNonly       : If non-zero, Accelerate will only remove the last 
+ *                        DEFAULT_TIME_ADJUST seconds of the intput.
+ *                        No signal matching is done.
+ *
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
+#ifdef SCRATCH
+                           WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                           const WebRtc_Word16 *pw16_decoded, int len,
+                           WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                           WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Merge(...)
+ *
+ * This function is used to merge new data from the decoder to the exisiting
+ * stream in the synchronization buffer. The merge operation is typically
+ * done after a packet loss, where the end of the expanded data does not
+ * fit naturally with the new decoded data.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to new decoded speech.
+ *      - len           : Number of samples in pw16_decoded.
+ *
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- outData	    : Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to pw16_outData
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Merge(DSPInst_t *inst,
+#ifdef SCRATCH
+                      WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                      WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
+                      WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_Cng(...)
+ *
+ * This function produces CNG according to RFC 3389
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *		- len			: Number of samples to produce
+ *
+ * Output:
+ *		- pw16_outData	: Output CNG
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+#ifdef NETEQ_CNG_CODEC
+/* Must compile NetEQ with CNG support to enable this function */
+
+int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len);
+
+#endif /* NETEQ_CNG_CODEC */
+
+/****************************************************************************
+ * WebRtcNetEQ_BGNUpdate(...)
+ *
+ * This function updates the background noise parameter estimates.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, where the speech history is stored.
+ *      - scratchPtr    : Pointer to scratch vector.
+ *
+ * Output:
+ *		- inst			: Updated information about the BGN characteristics.
+ *
+ * Return value			: No return value
+ */
+
+void WebRtcNetEQ_BGNUpdate(
+#ifdef SCRATCH
+                           DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+#else
+                           DSPInst_t *inst
+#endif
+                );
+
+#ifdef NETEQ_VAD
+/* Functions used by post-decode VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_InitVAD(...)
+ *
+ * Initializes post-decode VAD instance.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - fs            : Initial sample rate
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADModeInternal(...)
+ *
+ * Set the VAD mode in the VAD struct, and communicate it to the VAD instance 
+ * if it exists.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - mode          : Mode number passed on to the VAD function
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode);
+
+#endif /* NETEQ_VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushSpeechBuffer(...)
+ *
+ * Flush the speech buffer.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst);
+
+#ifndef WEBRTC_NETEQ_40BITACC_TEST
+
+#include "signal_processing_library.h"
+/* Map to regular SPL functions */
+#define WebRtcNetEQ_CrossCorr   WebRtcSpl_CrossCorrelation
+#define WebRtcNetEQ_DotW16W16   WebRtcSpl_DotProductWithScale
+
+#else /* WEBRTC_NETEQ_40BITACC_TEST defined */
+/* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP 
+ implementation where the main (splib and NetEQ) functions have been
+ 40-bit optimized. */
+
+/* Map to special 40-bit optimized functions, defined below */
+#define WebRtcNetEQ_CrossCorr		WebRtcNetEQ_40BitAccCrossCorr
+#define WebRtcNetEQ_DotW16W16	    WebRtcNetEQ_40BitAccDotW16W16
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccCrossCorr(...)
+ *
+ * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
+ * is fixed and seq2 slides as the pointer is increased with step
+ *
+ * Input:
+ *		- seq1			: First sequence (fixed throughout the correlation)
+ *		- seq2			: Second sequence (slided step_seq2 for each 
+ *						  new correlation)
+ *		- dimSeq		: Number of samples to use in the cross correlation.
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- dimCrossCorr	: Number of CrossCorrelations to calculate (start 
+ *						  position for seq2 is updated for each new one)
+ *		- rShift			: Number of right shifts to use
+ *		- step_seq2		: How many (positive or negative) steps the seq2 
+ *						  pointer should be updated for each new cross 
+ *						  correlation value
+ *
+ * Output:
+ *		- crossCorr		: The cross correlation in Q-rShift
+ */
+
+void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr, WebRtc_Word16 *seq1,
+                                   WebRtc_Word16 *seq2, WebRtc_Word16 dimSeq,
+                                   WebRtc_Word16 dimCrossCorr, WebRtc_Word16 rShift,
+                                   WebRtc_Word16 step_seq2);
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccDotW16W16(...)
+ *
+ * Calculates the dot product between two vectors (WebRtc_Word16)
+ *
+ * Input:
+ *		- vector1		: Vector 1
+ *		- vector2		: Vector 2
+ *		- len			: Number of samples in vector
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- scaling		: The number of right shifts (after multiplication)
+ *                        required to avoid overflow in the dot product.
+ * Return value			: The dot product
+ */
+
+WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
+                                            int len, int scaling);
+
+#endif /* WEBRTC_NETEQ_40BITACC_TEST */
+
+#endif /* DSP_H */
diff --git a/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.c b/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.c
new file mode 100644
index 0000000..6e9a283
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.c
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some help functions that did not fit elsewhere.
+ */
+
+#include "dsp_helpfunctions.h"
+
+
+WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz)
+{
+    switch (fsHz)
+    {
+        case 8000:
+        {
+            return 1;
+        }
+        case 16000:
+        {
+            return 2;
+        }
+        case 32000:
+        {
+            return 4;
+        }
+        case 48000:
+        {
+            return 6;
+        }
+        default:
+        {
+            return 1;
+        }
+    }
+}
+
+
+int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
+                                 WebRtc_Word16 *out, int outLen, int compensateDelay)
+{
+    WebRtc_Word16 *B; /* filter coefficients */
+    WebRtc_Word16 Blen; /* number of coefficients */
+    WebRtc_Word16 filterDelay; /* phase delay in samples */
+    WebRtc_Word16 factor; /* conversion rate (inFsHz/8000) */
+    int ok;
+
+    /* Set constants depending on frequency used */
+    /* NOTE: The phase delay values are wrong compared to the true phase delay
+     of the filters. However, the error is preserved (through the +1 term)
+     for consistency. */
+    switch (inFsHz)
+    {
+        case 8000:
+        {
+            Blen = 3;
+            factor = 2;
+            B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+            filterDelay = 1 + 1;
+            break;
+        }
+#ifdef NETEQ_WIDEBAND
+            case 16000:
+            {
+                Blen = 5;
+                factor = 4;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample16kHzTbl;
+                filterDelay = 2 + 1;
+                break;
+            }
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+            case 32000:
+            {
+                Blen = 7;
+                factor = 8;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample32kHzTbl;
+                filterDelay = 3 + 1;
+                break;
+            }
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+            case 48000:
+            {
+                Blen = 7;
+                factor = 12;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample48kHzTbl;
+                filterDelay = 3 + 1;
+                break;
+            }
+#endif
+        default:
+        {
+            /* unsupported or wrong sample rate */
+            return -1;
+        }
+    }
+
+    if (!compensateDelay)
+    {
+        /* disregard delay compensation */
+        filterDelay = 0;
+    }
+
+    ok = WebRtcSpl_DownsampleFast((WebRtc_Word16*) &in[Blen - 1],
+        (WebRtc_Word16) (inLen - (Blen - 1)), /* number of input samples */
+        out, (WebRtc_Word16) outLen, /* number of output samples to produce */
+        B, Blen, factor, filterDelay); /* filter parameters */
+
+    return ok; /* return value is -1 if input signal is too short */
+
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.h b/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.h
new file mode 100644
index 0000000..f728c09
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dsp_helpfunctions.h
@@ -0,0 +1,220 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Various help functions used by the DSP functions.
+ */
+
+#ifndef DSP_HELPFUNCTIONS_H
+#define DSP_HELPFUNCTIONS_H
+
+#include "typedefs.h"
+
+#include "dsp.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Correlator(...)
+ *
+ * Calculate signal correlation.
+ *
+ * Input:
+ *      - inst          : DSP instance
+ *      - data          : Speech history to do expand from (older history in data[-4..-1])
+ *      - dataLen       : Length of data
+ *
+ * Output:
+ *      - corrOut       : CC of downsampled signal
+ *      - corrScale     : Scale factor for correlation (-Qdomain)
+ *
+ * Return value         : Length of correlated data
+ */
+
+WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+#ifdef SCRATCH
+                                     WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                     WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                     WebRtc_Word16 *pw16_corrOut,
+                                     WebRtc_Word16 *pw16_corrScale);
+
+/****************************************************************************
+ * WebRtcNetEQ_PeakDetection(...)
+ *
+ * Peak detection with parabolic fit.
+ *
+ * Input:
+ *      - data          : Data sequence for peak detection
+ *      - dataLen       : Length of data
+ *      - nmbPeaks      : Number of peaks to detect
+ *      - fs_mult       : Sample rate multiplier
+ *
+ * Output:
+ *      - corrIndex     : Index of the peak
+ *      - winner        : Value of the peak
+ *
+ * Return value         : 0 for ok
+ */
+
+WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                        WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
+                                        WebRtc_Word16 *pw16_corrIndex,
+                                        WebRtc_Word16 *pw16_winners);
+
+/****************************************************************************
+ * WebRtcNetEQ_PrblFit(...)
+ *
+ * Three-point parbola fit.
+ *
+ * Input:
+ *      - 3pts          : Three input samples
+ *      - fs_mult       : Sample rate multiplier
+ *
+ * Output:
+ *      - Ind           : Index of the peak
+ *      - outVal        : Value of the peak
+ *
+ * Return value         : 0 for ok
+ */
+
+WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
+                                  WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult);
+
+/****************************************************************************
+ * WebRtcNetEQ_MinDistortion(...)
+ *
+ * Find the lag that results in minimum distortion.
+ *
+ * Input:
+ *      - data          : Start of speech to perform distortion on, second vector is assumed
+ *                        to be data[-Lag]
+ *      - minLag        : Start lag
+ *      - maxLag        : End lag
+ *      - len           : Length to correlate
+ *
+ * Output:
+ *      - dist          : Distorion value
+ *
+ * Return value         : Lag for minimum distortion
+ */
+
+WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
+                                        WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
+                                        WebRtc_Word16 len, WebRtc_Word32 *pw16_dist);
+
+/****************************************************************************
+ * WebRtcNetEQ_RandomVec(...)
+ *
+ * Generate random vector.
+ *
+ * Input:
+ *      - seed          : Current seed (input/output)
+ *      - len           : Number of samples to generate
+ *      - incVal        : Jump step
+ *
+ * Output:
+ *      - randVec       : Generated random vector
+ */
+
+void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
+                           WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval);
+
+/****************************************************************************
+ * WebRtcNetEQ_MixVoiceUnvoice(...)
+ *
+ * Mix voiced and unvoiced signal.
+ *
+ * Input:
+ *      - voicedVec         : Voiced input signal
+ *      - unvoicedVec       : Unvoiced input signal
+ *      - current_vfraction : Current mixing factor
+ *      - vfraction_change  : Mixing factor change per sample
+ *      - N                 : Number of samples
+ *
+ * Output:
+ *      - outData           : Mixed signal
+ */
+
+void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
+                                 WebRtc_Word16 *pw16_unvoicedVec,
+                                 WebRtc_Word16 *w16_current_vfraction,
+                                 WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_UnmuteSignal(...)
+ *
+ * Gradually reduce attenuation.
+ *
+ * Input:
+ *      - inVec         : Input signal
+ *      - startMuteFact : Starting attenuation
+ *      - unmuteFact    : Factor to "unmute" with (Q20)
+ *      - N             : Number of samples
+ *
+ * Output:
+ *      - outVec        : Output signal
+ */
+
+void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
+                              WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
+                              WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_MuteSignal(...)
+ *
+ * Gradually increase attenuation.
+ *
+ * Input:
+ *      - inout         : Input/output signal
+ *      - muteSlope     : Slope of muting
+ *      - N             : Number of samples
+ */
+
+void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
+                            WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_CalcFsMult(...)
+ *
+ * Calculate the sample rate divided by 8000.
+ *
+ * Input:
+ *		- fsHz			: Sample rate in Hz in {8000, 16000, 32000, 48000}.
+ *
+ * Return value			: fsHz/8000 for the valid values, 1 for other inputs
+ */
+
+WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz);
+
+/****************************************************************************
+ * WebRtcNetEQ_DownSampleTo4kHz(...)
+ *
+ * Lowpass filter and downsample a signal to 4 kHz sample rate.
+ *
+ * Input:
+ *      - in                : Input signal samples.
+ *      - inLen             : Number of input samples.
+ *		- inFsHz		    : Input sample rate in Hz.
+ *      - outLen            : Desired number of samples in decimated signal.
+ *      - compensateDelay   : If non-zero, compensate for the phase delay of
+ *                            of the anti-alias filter.
+ *
+ * Output:
+ *      - out               : Output signal samples.
+ *
+ * Return value			    : 0 - Ok
+ *                           -1 - Error
+ *
+ */
+
+int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
+                                 WebRtc_Word16 *out, int outLen, int compensateDelay);
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/neteq/dtmf_buffer.c b/trunk/src/modules/audio_coding/neteq/dtmf_buffer.c
new file mode 100644
index 0000000..f00f9c9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dtmf_buffer.c
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of packet buffer for DTMF messages.
+ */
+
+#include "dtmf_buffer.h"
+
+#include "typedefs.h" /* to define endianness */
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+
+#ifdef NETEQ_ATEVENT_DECODE
+
+WebRtc_Word16 WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
+{
+
+    int i;
+    for (i = 0; i < 3; i++)
+    {
+        DTMFdec_inst->EventQueue[i] = DTMFdec_inst->EventQueue[i + 1];
+        DTMFdec_inst->EventQueueVolume[i] = DTMFdec_inst->EventQueueVolume[i + 1];
+        DTMFdec_inst->EventQueueEnded[i] = DTMFdec_inst->EventQueueEnded[i + 1];
+        DTMFdec_inst->EventQueueStartTime[i] = DTMFdec_inst->EventQueueStartTime[i + 1];
+        DTMFdec_inst->EventQueueEndTime[i] = DTMFdec_inst->EventQueueEndTime[i + 1];
+    }
+    DTMFdec_inst->EventBufferSize--;
+    DTMFdec_inst->EventQueue[3] = -1;
+    DTMFdec_inst->EventQueueVolume[3] = 0;
+    DTMFdec_inst->EventQueueEnded[3] = 0;
+    DTMFdec_inst->EventQueueStartTime[3] = 0;
+    DTMFdec_inst->EventQueueEndTime[3] = 0;
+
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
+                                          WebRtc_Word16 MaxPLCtime)
+{
+    int i;
+    if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+    if (fs == 8000)
+        DTMFdec_inst->framelen = 80;
+    else if (fs == 16000)
+        DTMFdec_inst->framelen = 160;
+    else if (fs == 32000)
+        DTMFdec_inst->framelen = 320;
+    else
+        /* fs == 48000 */
+        DTMFdec_inst->framelen = 480;
+
+    DTMFdec_inst->MaxPLCtime = MaxPLCtime;
+    DTMFdec_inst->CurrentPLCtime = 0;
+    DTMFdec_inst->EventBufferSize = 0;
+    for (i = 0; i < 4; i++)
+    {
+        DTMFdec_inst->EventQueue[i] = -1;
+        DTMFdec_inst->EventQueueVolume[i] = 0;
+        DTMFdec_inst->EventQueueEnded[i] = 0;
+        DTMFdec_inst->EventQueueStartTime[i] = 0;
+        DTMFdec_inst->EventQueueEndTime[i] = 0;
+    }
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+                                          const WebRtc_Word16 *encoded, WebRtc_Word16 len,
+                                          WebRtc_UWord32 timeStamp)
+{
+
+    int i;
+    WebRtc_Word16 value;
+    const WebRtc_Word16 *EventStart;
+    WebRtc_Word16 endEvent;
+    WebRtc_Word16 Volume;
+    WebRtc_Word16 Duration;
+    WebRtc_Word16 position = -1;
+
+    /* Extract event */
+    if (len == 4)
+    {
+        EventStart = encoded;
+#ifdef WEBRTC_BIG_ENDIAN
+        value=((*EventStart)>>8);
+        endEvent=((*EventStart)&0x80)>>7;
+        Volume=((*EventStart)&0x3F);
+        Duration=EventStart[1];
+#else
+        value = ((*EventStart) & 0xFF);
+        endEvent = ((*EventStart) & 0x8000) >> 15;
+        Volume = ((*EventStart) & 0x3F00) >> 8;
+        Duration = (((((WebRtc_UWord16) EventStart[1]) >> 8) & 0xFF)
+            | (((WebRtc_UWord16) (EventStart[1] & 0xFF)) << 8));
+#endif
+        /* Only events between 0-15 are supported (DTMF tones) */
+        if ((value < 0) || (value > 15))
+        {
+            return 0;
+        }
+
+        /* Discard all DTMF tones with really low volume (<-36dbm0) */
+        if (Volume > 36)
+        {
+            return 0;
+        }
+
+        /*Are there any unended events of the same type? */
+        for (i = 0; i < DTMFdec_inst->EventBufferSize; i++)
+        {
+            /* Going through the whole queue even when we have found a match will
+             ensure that we add to the latest applicable event  */
+            if ((DTMFdec_inst->EventQueue[i] == value) && (!DTMFdec_inst->EventQueueEnded[i]
+                || endEvent)) position = i;
+        }
+        if (position > -1)
+        {
+            DTMFdec_inst->EventQueueVolume[position] = Volume;
+            if ((timeStamp + Duration) > DTMFdec_inst->EventQueueEndTime[position]) DTMFdec_inst->EventQueueEndTime[position]
+                = DTMFdec_inst->EventQueueStartTime[position] + Duration;
+            if (endEvent) DTMFdec_inst->EventQueueEnded[position] = 1;
+        }
+        else
+        {
+            if (DTMFdec_inst->EventBufferSize == MAX_DTMF_QUEUE_SIZE)
+            { /* Buffer full */
+                /* Remove one event */
+                DTMFdec_inst->EventBufferSize--;
+            }
+            /* Store data in the instance on a new position*/
+            DTMFdec_inst->EventQueue[DTMFdec_inst->EventBufferSize] = value;
+            DTMFdec_inst->EventQueueVolume[DTMFdec_inst->EventBufferSize] = Volume;
+            DTMFdec_inst->EventQueueEnded[DTMFdec_inst->EventBufferSize] = endEvent;
+            DTMFdec_inst->EventQueueStartTime[DTMFdec_inst->EventBufferSize] = timeStamp;
+            DTMFdec_inst->EventQueueEndTime[DTMFdec_inst->EventBufferSize] = timeStamp
+                + Duration;
+            DTMFdec_inst->EventBufferSize++;
+        }
+        return 0;
+    }
+    return DTMF_INSERT_ERROR;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
+                                     WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp)
+{
+
+    if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
+
+    /* We have events, is it time to play them? */
+    if (currTimeStamp < DTMFdec_inst->EventQueueStartTime[0])
+    {
+        /*No, just return zero */
+        return 0;
+    }
+
+    /* Continue on the event that is currently ongoing */
+    *event = DTMFdec_inst->EventQueue[0];
+    *volume = DTMFdec_inst->EventQueueVolume[0];
+
+    if (DTMFdec_inst->EventQueueEndTime[0] >= (currTimeStamp + DTMFdec_inst->framelen))
+    {
+
+        /* Still at least framLen to play */
+
+        DTMFdec_inst->CurrentPLCtime = 0;
+        if ((DTMFdec_inst->EventQueueEndTime[0] == (currTimeStamp + DTMFdec_inst->framelen))
+            && (DTMFdec_inst->EventQueueEnded[0]))
+        { /* We are done */
+            /*Remove the event from Queue*/
+            WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+        }
+        return DTMFdec_inst->framelen;
+
+    }
+    else
+    {
+        if ((DTMFdec_inst->EventQueueEnded[0]) || (DTMFdec_inst->EventQueue[1] > -1))
+        {
+            /*
+             * Less than frameLen to play and end of event or already received next event.
+             * Give our a whole frame size of audio to simplify things.
+             */
+
+            /*Remove the event from Queue*/
+            WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+            DTMFdec_inst->CurrentPLCtime = 0;
+
+            return DTMFdec_inst->framelen;
+
+        }
+        else
+        {
+            /* Less than frameLen to play and not end of event. */
+            DTMFdec_inst->CurrentPLCtime = (WebRtc_Word16) (currTimeStamp
+                - DTMFdec_inst->EventQueueEndTime[0]);
+
+            if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
+                || (DTMFdec_inst->CurrentPLCtime < -DTMFdec_inst->MaxPLCtime))
+            {
+                /*Remove the event from queue*/
+                WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+                DTMFdec_inst->CurrentPLCtime = 0;
+            }
+
+            /* If we have a new event that it's time to play */
+            if ((DTMFdec_inst->EventQueue[1] > -1) && (DTMFdec_inst->EventQueueStartTime[1]
+                >= (currTimeStamp + DTMFdec_inst->framelen)))
+            {
+                /*Remove the event from queue*/
+                WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+                DTMFdec_inst->CurrentPLCtime = 0;
+            }
+
+            return DTMFdec_inst->framelen;
+        }
+    }
+}
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/dtmf_buffer.h b/trunk/src/modules/audio_coding/neteq/dtmf_buffer.h
new file mode 100644
index 0000000..e185411
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dtmf_buffer.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Packet buffer for DTMF messages.
+ */
+
+#ifndef DTMF_BUFFER_H
+#define DTMF_BUFFER_H
+
+#include "typedefs.h"
+
+#include "neteq_defines.h"
+
+/* Include this code only if ATEVENT (DTMF) is defined in */
+#ifdef NETEQ_ATEVENT_DECODE
+
+#define MAX_DTMF_QUEUE_SIZE 4 
+
+typedef struct dtmf_inst_t_
+{
+    WebRtc_Word16 MaxPLCtime;
+    WebRtc_Word16 CurrentPLCtime;
+    WebRtc_Word16 EventQueue[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_UWord32 EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_UWord32 EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventBufferSize;
+    WebRtc_Word16 framelen;
+} dtmf_inst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfDecoderInit(...)
+ *
+ * This function initializes a DTMF instance.
+ *
+ * Input:
+ *      - DTMF_decinst_t    : DTMF instance
+ *      - fs                : The sample rate used for the DTMF
+ *      - MaxPLCtime        : Maximum length for a PLC before zeros should be inserted
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
+                                          WebRtc_Word16 MaxPLCtime);
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfInsertEvent(...)
+ *
+ * This function decodes a packet with DTMF frames.
+ *
+ * Input:
+ *      - DTMFdec_inst      : DTMF instance
+ *      - encoded           : Encoded DTMF frame(s)
+ *      - len               : Bytes in encoded vector
+ *
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+                                          const WebRtc_Word16 *encoded, WebRtc_Word16 len,
+                                          WebRtc_UWord32 timeStamp);
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfDecode(...)
+ *
+ * This function decodes a packet with DTMF frame(s). Output will be the
+ * event that should be played for next 10 ms. 
+ *
+ * Input:
+ *      - DTMFdec_inst      : DTMF instance
+ *      - currTimeStamp     : The current playout timestamp
+ *
+ * Output:
+ *      - event             : Event number to be played
+ *      - volume            : Event volume to be played
+ *
+ * Return value             : >0 - There is a event to be played
+ *                             0 - No event to be played
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
+                                     WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp);
+
+#endif    /* NETEQ_ATEVENT_DECODE */
+
+#endif    /* DTMF_BUFFER_H */
+
diff --git a/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.c b/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.c
new file mode 100644
index 0000000..a52f9bc
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.c
@@ -0,0 +1,371 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the DTMF tone generator and its parameters.
+ *
+ * A sinusoid is generated using the recursive oscillator model
+ *
+ *      y[n] = sin(w*n + phi) = 2*cos(w) * y[n-1] - y[n-2]
+ *                            = a * y[n-1] - y[n-2]
+ *
+ * initialized with 
+ *      y[-2] = 0
+ *      y[-1] = sin(w)
+ *
+ * A DTMF signal is a combination of two sinusoids, depending
+ * on which event is sent (i.e, which key is pressed). The following
+ * table maps each key (event codes in parentheses) into two tones:
+ *
+ *  	    1209 Hz     1336 Hz     1477 Hz     1633 Hz
+ * 697 Hz   1 (ev. 1)   2 (ev. 2) 	3 (ev. 3) 	A (ev. 12)
+ * 770 Hz 	4 (ev. 4)	5 (ev. 5) 	6 (ev. 6)	B (ev. 13)
+ * 852 Hz 	7 (ev. 7) 	8 (ev. 8) 	9 (ev. 9)	C (ev. 14)
+ * 941 Hz 	* (ev. 10) 	0 (ev. 0)	# (ev. 11)	D (ev. 15)
+ *
+ * The two tones are added to form the DTMF signal.
+ *
+ */
+
+#include "dtmf_tonegen.h"
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+/* Must compile NetEQ with DTMF support to enable the functionality */
+
+/*******************/
+/* Constant tables */
+/*******************/
+
+/*
+ * All tables corresponding to the oscillator model are organized so that
+ * the coefficients for a specific frequency is found in the same position
+ * in every table. The positions for the tones follow this layout:
+ *
+ *  dummyVector[8] =
+ *  {
+ *      697 Hz,	    770 Hz,	    852 Hz,     941 Hz,
+ *      1209 Hz,    1336 Hz,    1477 Hz,    1633 Hz
+ *  };
+ */
+
+/*
+ * Tables for the constant a = 2*cos(w) = 2*cos(2*pi*f/fs)
+ * in the oscillator model, for 8, 16, 32 and 48 kHz sample rate.
+ * Table values in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl8Khz[8] =
+{
+    27980, 26956, 25701, 24219,
+    19073, 16325, 13085, 9315
+};
+
+#ifdef NETEQ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl16Khz[8]=
+{
+    31548, 31281, 30951, 30556,
+    29144, 28361, 27409, 26258
+};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl32Khz[8]=
+{
+    32462, 32394, 32311, 32210,
+    31849, 31647, 31400, 31098
+};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl48Khz[8]=
+{
+    32632, 32602, 32564, 32520,
+    32359, 32268, 32157, 32022
+};
+#endif
+
+/*
+ * Initialization values y[-1] = sin(w) = sin(2*pi*f/fs), for 8, 16, 32 and 48 kHz sample rate.
+ * Table values in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
+{
+    8528, 9315, 10163, 11036,
+    13323, 14206,15021, 15708
+};
+
+#ifdef NETEQ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
+{
+    4429, 4879, 5380, 5918,
+    7490, 8207, 8979, 9801
+};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
+{
+    2235, 2468, 2728, 3010,
+    3853, 4249, 4685, 5164
+};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
+{
+    1493, 1649, 1823, 2013,
+    2582, 2851, 3148, 3476
+};
+#endif
+
+/* Volume in dBm0 from 0 to -63, where 0 is the first table entry.
+ Everything below -36 is discarded, wherefore the table stops at -36.
+ Table entries are in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
+                                                7210, 6426, 5727, 5104, 4549, 4054, 3614,
+                                                3221, 2870, 2558, 2280, 2032, 1811, 1614,
+                                                1439, 1282, 1143, 1018, 908, 809, 721, 643,
+                                                573, 510, 455, 405, 361, 322, 287, 256 };
+
+/****************************************************************************
+ * WebRtcNetEQ_DTMFGenerate(...)
+ *
+ * Generate 10 ms DTMF signal according to input parameters.
+ *
+ * Input:
+ *		- DTMFdecInst	: DTMF instance
+ *      - value         : DTMF event number (0-15)
+ *      - volume        : Volume of generated signal (0-36)
+ *                        Volume is given in negative dBm0, i.e., volume == 0
+ *                        means 0 dBm0 while volume == 36 mean -36 dBm0.
+ *      - sampFreq      : Sample rate in Hz
+ *
+ * Output:
+ *      - signal        : Pointer to vector where DTMF signal is stored;
+ *                        Vector must be at least sampFreq/100 samples long.
+ *		- DTMFdecInst	: Updated DTMF instance
+ *
+ * Return value			: >0 - Number of samples written to signal
+ *                      : <0 - error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, WebRtc_Word16 value,
+                                       WebRtc_Word16 volume, WebRtc_Word16 *signal,
+                                       WebRtc_UWord16 sampFreq, WebRtc_Word16 extFrameLen)
+{
+    const WebRtc_Word16 *aTbl; /* pointer to a-coefficient table */
+    const WebRtc_Word16 *yInitTable; /* pointer to initialization value table */
+    WebRtc_Word16 a1 = 0; /* a-coefficient for first tone (low tone) */
+    WebRtc_Word16 a2 = 0; /* a-coefficient for second tone (high tone) */
+    int i;
+    int frameLen; /* number of samples to generate */
+    int lowIndex;
+    int highIndex;
+    WebRtc_Word32 tempVal;
+    WebRtc_Word16 tempValLow;
+    WebRtc_Word16 tempValHigh;
+
+    /* Sanity check for volume */
+    if ((volume < 0) || (volume > 36))
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+
+    /* Sanity check for extFrameLen */
+    if (extFrameLen < -1)
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+
+    /* Select oscillator coefficient tables based on sample rate */
+    if (sampFreq == 8000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl8Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab8Khz;
+        frameLen = 80;
+#ifdef NETEQ_WIDEBAND
+    }
+    else if (sampFreq == 16000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl16Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab16Khz;
+        frameLen = 160;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    }
+    else if (sampFreq == 32000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl32Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab32Khz;
+        frameLen = 320;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    }
+    else if (sampFreq == 48000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl48Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab48Khz;
+        frameLen = 480;
+#endif
+    }
+    else
+    {
+        /* unsupported sample rate */
+        return DTMF_GEN_UNKNOWN_SAMP_FREQ;
+    }
+
+    if (extFrameLen >= 0)
+    {
+        frameLen = extFrameLen;
+    }
+
+    /* select low frequency based on event value */
+    switch (value)
+    {
+        case 1:
+        case 2:
+        case 3:
+        case 12: /* first row on keypad */
+        {
+            lowIndex = 0; /* low frequency: 697 Hz */
+            break;
+        }
+        case 4:
+        case 5:
+        case 6:
+        case 13: /* second row on keypad */
+        {
+            lowIndex = 1; /* low frequency: 770 Hz */
+            break;
+        }
+        case 7:
+        case 8:
+        case 9:
+        case 14: /* third row on keypad */
+        {
+            lowIndex = 2; /* low frequency: 852 Hz */
+            break;
+        }
+        case 0:
+        case 10:
+        case 11:
+        case 15: /* fourth row on keypad */
+        {
+            lowIndex = 3; /* low frequency: 941 Hz */
+            break;
+        }
+        default:
+        {
+            return DTMF_DEC_PARAMETER_ERROR;
+        }
+    } /* end switch */
+
+    /* select high frequency based on event value */
+    switch (value)
+    {
+        case 1:
+        case 4:
+        case 7:
+        case 10: /* first column on keypad */
+        {
+            highIndex = 4; /* high frequency: 1209 Hz */
+            break;
+        }
+        case 2:
+        case 5:
+        case 8:
+        case 0: /* second column on keypad */
+        {
+            highIndex = 5;/* high frequency: 1336 Hz */
+            break;
+        }
+        case 3:
+        case 6:
+        case 9:
+        case 11: /* third column on keypad */
+        {
+            highIndex = 6;/* high frequency: 1477 Hz */
+            break;
+        }
+        case 12:
+        case 13:
+        case 14:
+        case 15: /* fourth column on keypad (special) */
+        {
+            highIndex = 7;/* high frequency: 1633 Hz */
+            break;
+        }
+        default:
+        {
+            return DTMF_DEC_PARAMETER_ERROR;
+        }
+    } /* end switch */
+
+    /* select coefficients based on results from switches above */
+    a1 = aTbl[lowIndex]; /* coefficient for first (low) tone */
+    a2 = aTbl[highIndex]; /* coefficient for second (high) tone */
+
+    if (DTMFdecInst->reinit)
+    {
+        /* set initial values for the recursive model */
+        DTMFdecInst->oldOutputLow[0] = yInitTable[lowIndex];
+        DTMFdecInst->oldOutputLow[1] = 0;
+        DTMFdecInst->oldOutputHigh[0] = yInitTable[highIndex];
+        DTMFdecInst->oldOutputHigh[1] = 0;
+
+        /* reset reinit flag */
+        DTMFdecInst->reinit = 0;
+    }
+
+    /* generate signal sample by sample */
+    for (i = 0; i < frameLen; i++)
+    {
+
+        /* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
+        tempValLow
+                        = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
+                                        + 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
+        tempValHigh
+                        = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
+                                        + 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
+
+        /* Update recursion memory */
+        DTMFdecInst->oldOutputLow[0] = DTMFdecInst->oldOutputLow[1];
+        DTMFdecInst->oldOutputLow[1] = tempValLow;
+        DTMFdecInst->oldOutputHigh[0] = DTMFdecInst->oldOutputHigh[1];
+        DTMFdecInst->oldOutputHigh[1] = tempValHigh;
+
+        /* scale high tone with 32768 (15 left shifts) 
+         and low tone with 23171 (3dB lower than high tone) */
+        tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
+                        + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tempValHigh, 15);
+
+        /* Norm the signal to Q14 (with proper rounding) */
+        tempVal = (tempVal + 16384) >> 15;
+
+        /* Scale the signal to correct dbM0 value */
+        signal[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                               (WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
+                               + 8192), 14); /* volume value is in Q14; use proper rounding */
+    }
+
+    return frameLen;
+
+}
+
+#endif /* NETEQ_ATEVENT_DECODE */
+
diff --git a/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.h b/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.h
new file mode 100644
index 0000000..add6eb1
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/dtmf_tonegen.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the DTMF tone generator function.
+ */
+
+#ifndef DTMF_TONEGEN_H
+#define DTMF_TONEGEN_H
+
+#include "typedefs.h"
+
+#include "neteq_defines.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+/* Must compile NetEQ with DTMF support to enable the functionality */
+
+#define DTMF_AMP_LOW	23171	/* 3 dB lower than the high frequency */
+
+/* The DTMF generator struct (part of DSP main struct DSPInst_t) */
+typedef struct dtmf_tone_inst_t_
+{
+
+    WebRtc_Word16 reinit; /* non-zero if the oscillator model should
+     be reinitialized for next event */
+    WebRtc_Word16 oldOutputLow[2]; /* oscillator recursion history (low tone) */
+    WebRtc_Word16 oldOutputHigh[2]; /* oscillator recursion history (high tone) */
+
+    int lastDtmfSample; /* index to the first non-DTMF sample in the
+     speech history, if non-negative */
+}dtmf_tone_inst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_DTMFGenerate(...)
+ *
+ * Generate 10 ms DTMF signal according to input parameters.
+ *
+ * Input:
+ *		- DTMFdecInst	: DTMF instance
+ *      - value         : DTMF event number (0-15)
+ *      - volume        : Volume of generated signal (0-36)
+ *                        Volume is given in negative dBm0, i.e., volume == 0
+ *                        means 0 dBm0 while volume == 36 mean -36 dBm0.
+ *      - sampFreq      : Sample rate in Hz
+ *
+ * Output:
+ *      - signal        : Pointer to vector where DTMF signal is stored;
+ *                        Vector must be at least sampFreq/100 samples long.
+ *		- DTMFdecInst	: Updated DTMF instance
+ *
+ * Return value			: >0 - Number of samples written to signal
+ *                      : <0 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
+                WebRtc_Word16 value,
+                WebRtc_Word16 volume,
+                WebRtc_Word16 *signal,
+                WebRtc_UWord16 sampFreq,
+                WebRtc_Word16 frameLen
+);
+
+#endif /* NETEQ_ATEVENT_DECODE */
+
+#endif /* DTMF_TONEGEN_H */
+
diff --git a/trunk/src/modules/audio_coding/neteq/expand.c b/trunk/src/modules/audio_coding/neteq/expand.c
new file mode 100644
index 0000000..3db7a2a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/expand.c
@@ -0,0 +1,1216 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the function to expand from the speech history, to produce concealment data or
+ * increasing delay.
+ */
+
+#include "dsp.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define CHECK_NO_OF_CORRMAX        3
+#define DISTLEN                    20
+#define LPCANALASYSLEN           160
+
+/* Scratch usage:
+
+ Type            Name                    size            startpos        endpos
+ (First part of first expand)
+ WebRtc_Word16  pw16_bestCorrIndex      3               0               2
+ WebRtc_Word16  pw16_bestCorr           3               3               5
+ WebRtc_Word16  pw16_bestDistIndex      3               6               8
+ WebRtc_Word16  pw16_bestDist           3               9               11
+ WebRtc_Word16  pw16_corrVec            102*fs/8000     12              11+102*fs/8000
+ func           WebRtcNetEQ_Correlator  232             12+102*fs/8000  243+102*fs/8000
+
+ (Second part of first expand)
+ WebRtc_Word32  pw32_corr2              99*fs/8000+1    0               99*fs/8000
+ WebRtc_Word32  pw32_autoCorr           2*7             0               13
+ WebRtc_Word16  pw16_rc                 6               14              19
+
+ Signal combination:
+ WebRtc_Word16  pw16_randVec            30+120*fs/8000  0               29+120*fs/8000
+ WebRtc_Word16  pw16_scaledRandVec      125*fs/8000     30+120*fs/8000  29+245*fs/8000
+ WebRtc_Word16  pw16_unvoicedVecSpace   10+125*fs/8000  30+245*fs/8000  39+370*fs/8000
+
+ Total: 40+370*fs/8000 (size depends on UNVOICED_LPC_ORDER and BGN_LPC_ORDER)
+ */
+
+#if ((BGN_LPC_ORDER > 10) || (UNVOICED_LPC_ORDER > 10)) && (defined SCRATCH)
+#error BGN_LPC_ORDER and/or BGN_LPC_ORDER are too large for current scratch memory allocation
+#endif
+
+#define     SCRATCH_PW16_BEST_CORR_INDEX    0
+#define     SCRATCH_PW16_BEST_CORR          3
+#define     SCRATCH_PW16_BEST_DIST_INDEX    6
+#define     SCRATCH_PW16_BEST_DIST          9
+#define     SCRATCH_PW16_CORR_VEC           12
+#define     SCRATCH_PW16_CORR2              0
+#define     SCRATCH_PW32_AUTO_CORR          0
+#define     SCRATCH_PW16_RC                 14
+#define     SCRATCH_PW16_RAND_VEC           0
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     624
+#define     SCRATCH_PW16_SCALED_RAND_VEC    750
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 1500
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     420
+#define     SCRATCH_PW16_SCALED_RAND_VEC    510
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 1010
+#elif (defined(NETEQ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     216
+#define     SCRATCH_PW16_SCALED_RAND_VEC    270
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 520
+#else    /* NB */
+#define     SCRATCH_NETEQDSP_CORRELATOR     114
+#define     SCRATCH_PW16_SCALED_RAND_VEC    150
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 275
+#endif
+
+/****************************************************************************
+ * WebRtcNetEQ_Expand(...)
+ *
+ * This function produces one "chunk" of expansion data (PLC audio). The
+ * length of the produced audio depends on the speech history.
+ *
+ * Input:
+ *      - inst          : DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - outdata       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - BGNonly       : If non-zero, "expand" will only produce background noise.
+ *      - pw16_len      : Desired number of samples (only for BGN mode).
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - pw16_len      : Number of samples that were output from NetEq
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_Expand(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly)
+{
+
+    WebRtc_Word16 fs_mult;
+    ExpandInst_t *ExpandState = &(inst->ExpandInst);
+    BGNInst_t *BGNState = &(inst->BGNInst);
+    int i;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
+    WebRtc_Word16 *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
+    WebRtc_Word16 *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
+#else
+    WebRtc_Word16 pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
+    WebRtc_Word16 pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
+    WebRtc_Word16 pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
+#endif
+    /* 125 for NB and 250 for WB etc. Reuse pw16_outData[] for this vector */
+    WebRtc_Word16 *pw16_voicedVecStorage = pw16_outData;
+    WebRtc_Word16 *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
+    WebRtc_Word16 *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
+    WebRtc_Word16 *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
+    WebRtc_Word16 w16_expVecsLen, w16_lag = 0, w16_expVecPos;
+    WebRtc_Word16 w16_randLen;
+    WebRtc_Word16 w16_vfractionChange; /* in Q14 */
+    WebRtc_Word16 w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word16 stability;
+    enum BGNMode bgnMode = inst->BGNInst.bgnMode;
+
+    /* Pre-calculate common multiplications with fs_mult */
+    WebRtc_Word16 fsMult4;
+    WebRtc_Word16 fsMult20;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word16 fsMultDistLen;
+    WebRtc_Word16 fsMultLPCAnalasysLen;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    /* fs is WebRtc_UWord16 (to hold fs=48000) */
+    fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs); /* calculate fs/8000 */
+
+    /* Pre-calculate common multiplications with fs_mult */
+    fsMult4 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
+    fsMult20 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
+    fsMultDistLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
+    fsMultLPCAnalasysLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
+
+    /*
+     * Perform all the initial setup if it's the first expansion.
+     * If background noise (BGN) only, this setup is not needed.
+     */
+    if (ExpandState->w16_consecExp == 0 && !BGNonly)
+    {
+        /* Setup more variables */
+#ifdef SCRATCH
+        WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr
+            + SCRATCH_PW32_AUTO_CORR);
+        WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+        WebRtc_Word16 *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
+        WebRtc_Word16 *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
+        WebRtc_Word16 *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
+        WebRtc_Word16 *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
+        WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
+        WebRtc_Word32 *pw32_corr2 = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
+#else
+        WebRtc_Word32 pw32_autoCorr[UNVOICED_LPC_ORDER+1];
+        WebRtc_Word16 pw16_rc[UNVOICED_LPC_ORDER];
+        WebRtc_Word16 pw16_corrVec[FSMULT*102]; /* 102 for NB */
+        WebRtc_Word16 pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestCorr[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestDist[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word32 pw32_corr2[(99*FSMULT)+1];
+#endif
+        WebRtc_Word32 pw32_bestDist[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 w16_ind = 0;
+        WebRtc_Word16 w16_corrVecLen;
+        WebRtc_Word16 w16_corrScale;
+        WebRtc_Word16 w16_distScale;
+        WebRtc_Word16 w16_indMin, w16_indMax;
+        WebRtc_Word16 w16_len;
+        WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+        WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+        WebRtc_Word16 w16_en1, w16_en2;
+        WebRtc_Word32 w32_en1_mul_en2;
+        WebRtc_Word16 w16_sqrt_en1en2;
+        WebRtc_Word16 w16_ccShiftL;
+        WebRtc_Word16 w16_bestcorr; /* Correlation in Q14 */
+        WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+        WebRtc_Word16 w16_factor;
+        WebRtc_Word16 w16_DistLag, w16_CorrLag, w16_diffLag;
+        WebRtc_Word16 w16_energyLen;
+        WebRtc_Word16 w16_slope;
+        WebRtc_Word16 w16_startInd;
+        WebRtc_Word16 w16_noOfcorr2;
+        WebRtc_Word16 w16_scale;
+
+        /* Initialize some variables */
+        ExpandState->w16_lagsDirection = 1;
+        ExpandState->w16_lagsPosition = -1;
+        ExpandState->w16_expandMuteFactor = 16384; /* Start from 1.0 (Q14) */
+        BGNState->w16_mutefactor = 0; /* Start with 0 gain for BGN (value in Q14) */
+        inst->w16_seedInc = 1;
+
+#ifdef NETEQ_STEREO
+        /* Sanity for msInfo */
+        if (msInfo == NULL)
+        {
+            /* this should not happen here */
+            return MASTER_SLAVE_ERROR;
+        }
+
+        /*
+         * Do not calculate correlations for slave instance(s)
+         * unless lag info from master is corrupt
+         */
+        if ((msInfo->msMode != NETEQ_SLAVE)
+        || ((msInfo->distLag <= 0) || (msInfo->corrLag <= 0)))
+        {
+#endif
+            /* Calculate correlation vector in downsampled domain (4 kHz sample rate) */
+            w16_corrVecLen = WebRtcNetEQ_Correlator(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQDSP_CORRELATOR,
+#endif
+                inst->pw16_speechHistory, inst->w16_speechHistoryLen, pw16_corrVec,
+                &w16_corrScale);
+
+            /* Find peaks in correlation vector using parabolic fit method */
+            WebRtcNetEQ_PeakDetection(pw16_corrVec, w16_corrVecLen, CHECK_NO_OF_CORRMAX, fs_mult,
+                pw16_bestCorrIndex, pw16_bestCorr);
+
+            /*
+             * Adjust peak locations; cross-correlation lags start at 2.5 ms
+             * (20*fs_mult samples)
+             */
+            pw16_bestCorrIndex[0] += fsMult20;
+            pw16_bestCorrIndex[1] += fsMult20;
+            pw16_bestCorrIndex[2] += fsMult20;
+
+            /* Calculate distortion around the 3 (CHECK_NO_OF_CORRMAX) best lags */
+            w16_distScale = 0;
+            for (i = 0; i < CHECK_NO_OF_CORRMAX; i++)
+            {
+                w16_tmp = fsMult20;
+                w16_tmp2 = pw16_bestCorrIndex[i] - fsMult4;
+                w16_indMin = WEBRTC_SPL_MAX(w16_tmp, w16_tmp2);
+                w16_tmp = fsMult120 - 1;
+                w16_tmp2 = pw16_bestCorrIndex[i] + fsMult4;
+                w16_indMax = WEBRTC_SPL_MIN(w16_tmp, w16_tmp2);
+
+                pw16_bestDistIndex[i] = WebRtcNetEQ_MinDistortion(
+                    &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultDistLen]),
+                    w16_indMin, w16_indMax, fsMultDistLen, &pw32_bestDist[i]);
+
+                w16_distScale
+                    = WEBRTC_SPL_MAX(16 - WebRtcSpl_NormW32(pw32_bestDist[i]), w16_distScale);
+
+            }
+
+            /* Shift the distortion values to fit in WebRtc_Word16 */
+            WebRtcSpl_VectorBitShiftW32ToW16(pw16_bestDist, CHECK_NO_OF_CORRMAX, pw32_bestDist,
+                w16_distScale);
+
+            /*
+             * Find index of maximum criteria, where crit[i] = bestCorr[i])/(bestDist[i])
+             * Do this by a cross multiplication.
+             */
+
+            w32_en1 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0],pw16_bestDist[1]);
+            w32_en2 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[0]);
+            if (w32_en1 >= w32_en2)
+            {
+                /* 0 wins over 1 */
+                w32_en1
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0], pw16_bestDist[2]);
+                w32_en2
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2], pw16_bestDist[0]);
+                if (w32_en1 >= w32_en2)
+                {
+                    /* 0 wins over 2 */
+                    w16_ind = 0;
+                }
+                else
+                {
+                    /* 2 wins over 0 */
+                    w16_ind = 2;
+                }
+            }
+            else
+            {
+                /* 1 wins over 0 */
+                w32_en1
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[2]);
+                w32_en2
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2],pw16_bestDist[1]);
+                if ((WebRtc_Word32) w32_en1 >= (WebRtc_Word32) w32_en2)
+                {
+                    /* 1 wins over 2 */
+                    w16_ind = 1;
+                }
+                else
+                {
+                    /* 2 wins over 1 */
+                    w16_ind = 2;
+                }
+            }
+
+#ifdef NETEQ_STEREO
+        }
+
+        /* Store DistLag and CorrLag of the position with highest criteria */
+        if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO)
+            || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->distLag <= 0 || msInfo->corrLag
+                <= 0)))
+        {
+            /* lags not provided externally */
+            w16_DistLag = pw16_bestDistIndex[w16_ind];
+            w16_CorrLag = pw16_bestCorrIndex[w16_ind];
+            if (msInfo->msMode == NETEQ_MASTER)
+            {
+                msInfo->distLag = w16_DistLag;
+                msInfo->corrLag = w16_CorrLag;
+            }
+        }
+        else if (msInfo->msMode == NETEQ_SLAVE)
+        {
+            /* lags provided externally (from master) */
+            w16_DistLag = msInfo->distLag;
+            w16_CorrLag = msInfo->corrLag;
+
+            /* sanity for lag values */
+            if ((w16_DistLag <= 0) || (w16_CorrLag <= 0))
+            {
+                return MASTER_SLAVE_ERROR;
+            }
+        }
+        else
+        {
+            /* Invalid mode */
+            return MASTER_SLAVE_ERROR;
+        }
+#else /* not NETEQ_STEREO */
+        w16_DistLag = pw16_bestDistIndex[w16_ind];
+        w16_CorrLag = pw16_bestCorrIndex[w16_ind];
+#endif
+
+        ExpandState->w16_maxLag = WEBRTC_SPL_MAX(w16_DistLag, w16_CorrLag);
+
+        /* Calculate the exact best correlation (in the range within CorrLag-DistLag) */
+        w16_len = w16_DistLag + 10;
+        w16_len = WEBRTC_SPL_MIN(w16_len, fsMult120);
+        w16_len = WEBRTC_SPL_MAX(w16_len, 60 * fs_mult);
+
+        w16_startInd = WEBRTC_SPL_MIN(w16_DistLag, w16_CorrLag);
+        w16_noOfcorr2 = WEBRTC_SPL_ABS_W16((w16_DistLag-w16_CorrLag)) + 1;
+        /* w16_noOfcorr2 maximum value is 99*fs_mult + 1 */
+
+        /* Calculate suitable scaling */
+        w16_tmp
+            = WebRtcSpl_MaxAbsValueW16(
+                &inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd
+                    - w16_noOfcorr2],
+                (WebRtc_Word16) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
+        w16_corrScale = ((31 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_tmp, w16_tmp)))
+            + (31 - WebRtcSpl_NormW32(w16_len))) - 31;
+        w16_corrScale = WEBRTC_SPL_MAX(0, w16_corrScale);
+
+        /*
+         * Perform the correlation, store in pw32_corr2
+         */
+
+        WebRtcNetEQ_CrossCorr(pw32_corr2,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd]),
+            w16_len, w16_noOfcorr2, w16_corrScale, -1);
+
+        /* Find maximizing index */
+        w16_ind = WebRtcSpl_MaxIndexW32(pw32_corr2, w16_noOfcorr2);
+        w32_cc = pw32_corr2[w16_ind]; /* this is maximum correlation */
+        w16_ind = w16_ind + w16_startInd; /* correct index for start offset */
+
+        /* Calculate energies */
+        w32_en1 = WebRtcNetEQ_DotW16W16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]), w16_len,
+            w16_corrScale);
+        w32_en2 = WebRtcNetEQ_DotW16W16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
+            w16_len, w16_corrScale);
+
+        /* Calculate the correlation value w16_bestcorr */
+        if ((w32_en1 > 0) && (w32_en2 > 0))
+        {
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                /* if sum is odd */
+                w16_en1Scale += 1;
+            }
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+            w32_en1_mul_en2 = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+            w16_sqrt_en1en2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_ccShiftL = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_ccShiftL);
+            w16_bestcorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
+            w16_bestcorr = WEBRTC_SPL_MIN(16384, w16_bestcorr); /* set maximum to 1.0 */
+
+        }
+        else
+        {
+            /* if either en1 or en2 is zero */
+            w16_bestcorr = 0;
+        }
+
+        /*
+         * Extract the two vectors, pw16_expVecs[0][] and pw16_expVecs[1][],
+         * from the SpeechHistory[]
+         */
+        w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
+        pw16_vec1 = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_expVecsLen]);
+        pw16_vec2 = pw16_vec1 - w16_DistLag;
+        /* Normalize the second vector to the same energy as the first */
+        w32_en1 = WebRtcNetEQ_DotW16W16(pw16_vec1, pw16_vec1, w16_expVecsLen, w16_corrScale);
+        w32_en2 = WebRtcNetEQ_DotW16W16(pw16_vec2, pw16_vec2, w16_expVecsLen, w16_corrScale);
+
+        /*
+         * Confirm that energy factor sqrt(w32_en1/w32_en2) is within difference 0.5 - 2.0
+         * w32_en1/w32_en2 within 0.25 - 4
+         */
+        if (((w32_en1 >> 2) < w32_en2) && ((w32_en1) > (w32_en2 >> 2)))
+        {
+
+            /* Energy constraint fulfilled => use both vectors and scale them accordingly */
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+            w16_en1Scale = w16_en2Scale - 13;
+
+            /* calculate w32_en1/w32_en2 in Q13 */
+            w32_en1_mul_en2 = WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_SHIFT_W32(w32_en1, -w16_en1Scale),
+                (WebRtc_Word16) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
+
+            /* calculate factor in Q13 (sqrt of en1/en2 in Q26) */
+            w16_factor = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+                WEBRTC_SPL_LSHIFT_W32(w32_en1_mul_en2, 13));
+
+            /* Copy the two vectors and give them the same energy */
+
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
+            WebRtcSpl_AffineTransformVector(ExpandState->pw16_expVecs[1], pw16_vec2,
+                w16_factor, 4096, 13, w16_expVecsLen);
+
+        }
+        else
+        {
+            /* Energy change constraint not fulfilled => only use last vector */
+
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[1], ExpandState->pw16_expVecs[0],
+                w16_expVecsLen);
+
+            /* Set the w16_factor since it is used by muting slope */
+            if (((w32_en1 >> 2) < w32_en2) || (w32_en2 == 0))
+            {
+                w16_factor = 4096; /* 0.5 in Q13*/
+            }
+            else
+            {
+                w16_factor = 16384; /* 2.0 in Q13*/
+            }
+        }
+
+        /* Set the 3 lag values */
+        w16_diffLag = w16_DistLag - w16_CorrLag;
+        if (w16_diffLag == 0)
+        {
+            /* DistLag and CorrLag are equal */
+            ExpandState->w16_lags[0] = w16_DistLag;
+            ExpandState->w16_lags[1] = w16_DistLag;
+            ExpandState->w16_lags[2] = w16_DistLag;
+        }
+        else
+        {
+            /* DistLag and CorrLag are not equal; use different combinations of the two */
+            ExpandState->w16_lags[0] = w16_DistLag; /* DistLag only */
+            ExpandState->w16_lags[1] = ((w16_DistLag + w16_CorrLag) >> 1); /* 50/50 */
+            /* Third lag, move one half-step towards CorrLag (in both cases) */
+            if (w16_diffLag > 0)
+            {
+                ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag - 1) >> 1;
+            }
+            else
+            {
+                ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag + 1) >> 1;
+            }
+        }
+
+        /*************************************************
+         * Calculate the LPC and the gain of the filters *
+         *************************************************/
+
+        /* Calculate scale value needed for autocorrelation */
+        w16_tmp = WebRtcSpl_MaxAbsValueW16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultLPCAnalasysLen]),
+            fsMultLPCAnalasysLen);
+
+        w16_tmp = 16 - WebRtcSpl_NormW32(w16_tmp);
+        w16_tmp = WEBRTC_SPL_MIN(w16_tmp,0);
+        w16_tmp = (w16_tmp << 1) + 7;
+        w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
+
+        /* set w16_ind to simplify the following expressions */
+        w16_ind = inst->w16_speechHistoryLen - fsMultLPCAnalasysLen - UNVOICED_LPC_ORDER;
+
+        /* store first UNVOICED_LPC_ORDER samples in pw16_rc */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_rc, &inst->pw16_speechHistory[w16_ind], UNVOICED_LPC_ORDER);
+
+        /* set first samples to zero */
+        WebRtcSpl_MemSetW16(&inst->pw16_speechHistory[w16_ind], 0, UNVOICED_LPC_ORDER);
+
+        /* Calculate UNVOICED_LPC_ORDER+1 lags of the ACF */
+
+        WebRtcNetEQ_CrossCorr(
+            pw32_autoCorr, &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]),
+            &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]), fsMultLPCAnalasysLen,
+            UNVOICED_LPC_ORDER + 1, w16_tmp, -1);
+
+        /* Recover the stored samples from pw16_rc */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[w16_ind], pw16_rc, UNVOICED_LPC_ORDER);
+
+        if (pw32_autoCorr[0] > 0)
+        { /* check that variance is positive */
+
+            /* estimate AR filter parameters using Levinson-Durbin algorithm
+             (UNVOICED_LPC_ORDER+1 filter coefficients) */
+            stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, ExpandState->pw16_arFilter,
+                pw16_rc, UNVOICED_LPC_ORDER);
+
+            /* Only update BGN if filter is stable */
+            if (stability != 1)
+            {
+                /* Set first coefficient to 4096 (1.0 in Q12)*/
+                ExpandState->pw16_arFilter[0] = 4096;
+                /* Set remaining UNVOICED_LPC_ORDER coefficients to zero */
+                WebRtcSpl_MemSetW16(ExpandState->pw16_arFilter + 1, 0, UNVOICED_LPC_ORDER);
+            }
+
+        }
+
+        if (w16_DistLag < 40)
+        {
+            w16_energyLen = 2 * w16_DistLag;
+        }
+        else
+        {
+            w16_energyLen = w16_DistLag;
+        }
+        w16_randLen = w16_energyLen + 30; /* Startup part */
+
+        /* Extract a noise segment */
+        if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
+        {
+            WEBRTC_SPL_MEMCPY_W16(pw16_randVec,
+                (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl, w16_randLen);
+        }
+        else
+        { /* only applies to SWB where length could be larger than 256 */
+#if FSMULT >= 2  /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
+            WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl,
+                RANDVEC_NO_OF_SAMPLES);
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            assert(w16_randLen <= FSMULT * 120 + 30);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
+                (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+#else
+            assert(0);
+#endif
+        }
+
+        /* Set up state vector and calculate scale factor for unvoiced filtering */
+
+        WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+        WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128 - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+        WebRtcSpl_FilterMAFastQ12(&inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128],
+            pw16_unvoicedVec, ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, 128);
+        if (WebRtcSpl_MaxAbsValueW16(pw16_unvoicedVec, 128) > 4000)
+        {
+            w16_scale = 4;
+        }
+        else
+        {
+            w16_scale = 0;
+        }
+        w32_tmp = WebRtcNetEQ_DotW16W16(pw16_unvoicedVec, pw16_unvoicedVec, 128, w16_scale);
+
+        /* Normalize w32_tmp to 28 or 29 bits to preserve sqrt() accuracy */
+        w16_tmp = WebRtcSpl_NormW32(w32_tmp) - 3;
+        w16_tmp += ((w16_tmp & 0x1) ^ 0x1); /* Make sure we do an odd number of shifts since we
+         from earlier have 7 shifts from dividing with 128.*/
+        w32_tmp = WEBRTC_SPL_SHIFT_W32(w32_tmp, w16_tmp);
+        w32_tmp = WebRtcSpl_SqrtFloor(w32_tmp);
+        ExpandState->w16_arGainScale = 13 + ((w16_tmp + 7 - w16_scale) >> 1);
+        ExpandState->w16_arGain = (WebRtc_Word16) w32_tmp;
+
+        /********************************************************************
+         * Calculate vfraction from bestcorr                                *
+         * if (bestcorr>0.480665)                                           *
+         *     vfraction = ((bestcorr-0.4)/(1-0.4)).^2                      *
+         * else    vfraction = 0                                            *
+         *                                                                  *
+         * approximation (coefficients in Q12):                             *
+         * if (x>0.480665)    (y(x)<0.3)                                    *
+         *   y(x) = -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 *
+         * else y(x) = 0;                                                   *
+         ********************************************************************/
+
+        if (w16_bestcorr > 7875)
+        {
+            /* if x>0.480665 */
+            WebRtc_Word16 w16_x1, w16_x2, w16_x3;
+            w16_x1 = w16_bestcorr;
+            w32_tmp = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) w16_x1, w16_x1);
+            w16_x2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_x1, w16_x2);
+            w16_x3 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+            w32_tmp
+                = (WebRtc_Word32) WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
+            ExpandState->w16_vFraction = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
+            ExpandState->w16_vFraction = WEBRTC_SPL_MIN(ExpandState->w16_vFraction, 16384);
+            ExpandState->w16_vFraction = WEBRTC_SPL_MAX(ExpandState->w16_vFraction, 0);
+        }
+        else
+        {
+            ExpandState->w16_vFraction = 0;
+        }
+
+        /***********************************************************************
+         * Calculate muting slope, reuse value from earlier scaling of ExpVecs *
+         ***********************************************************************/
+        w16_slope = w16_factor;
+
+        if (w16_slope > 12288)
+        {
+            /* w16_slope > 1.5 ? */
+            /* Calculate (1-(1/slope))/w16_DistLag = (slope-1)/(w16_DistLag*slope) */
+            w32_tmp = w16_slope - 8192;
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 12); /* Value in Q25 (13+12=25) */
+            w16_tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
+                w16_slope, 8); /* Value in Q5  (13-8=5)  */
+            w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                w16_tmp); /* Res in Q20 (25-5=20) */
+
+            if (w16_slope > 14746)
+            { /* w16_slope > 1.8 ? */
+                ExpandState->w16_muteSlope = (w16_tmp + 1) >> 1;
+            }
+            else
+            {
+                ExpandState->w16_muteSlope = (w16_tmp + 4) >> 3;
+            }
+            ExpandState->w16_onset = 1;
+        }
+        else if (ExpandState->w16_vFraction > 13107)
+        {
+            /* w16_vFraction > 0.8 ? */
+            if (w16_slope > 8028)
+            {
+                /* w16_vFraction > 0.98 ? */
+                ExpandState->w16_muteSlope = 0;
+            }
+            else
+            {
+                /* Calculate (1-slope)/w16_DistLag */
+                w32_tmp = 8192 - w16_slope;
+                w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
+                ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                    w16_DistLag); /* Res in Q20 (20-0=20) */
+            }
+            ExpandState->w16_onset = 0;
+        }
+        else
+        {
+            /*
+             * Use the minimum of 0.005 (0.9 on 50 samples in NB and the slope)
+             * and ((1-slope)/w16_DistLag)
+             */
+            w32_tmp = 8192 - w16_slope;
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
+            w32_tmp = WEBRTC_SPL_MAX(w32_tmp, 0);
+            ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                w16_DistLag); /* Res   in Q20    (20-0=20) */
+            w16_tmp = WebRtcNetEQ_k5243div[fs_mult]; /* 0.005/fs_mult = 5243/fs_mult */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(w16_tmp, ExpandState->w16_muteSlope);
+            ExpandState->w16_onset = 0;
+        }
+    }
+    else
+    {
+        /* This is not the first Expansion, parameters are already estimated. */
+
+        /* Extract a noise segment */
+        if (BGNonly) /* If we should produce nothing but background noise */
+        {
+            if (*pw16_len > 0)
+            {
+                /*
+                 * Set length to input parameter length, but not more than length
+                 * of pw16_randVec
+                 */
+                w16_lag = WEBRTC_SPL_MIN(*pw16_len, FSMULT * 120 + 30);
+            }
+            else
+            {
+                /* set length to 15 ms */
+                w16_lag = fsMult120;
+            }
+            w16_randLen = w16_lag;
+        }
+        else
+        {
+            w16_randLen = ExpandState->w16_maxLag;
+        }
+
+        if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
+        {
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, w16_randLen,
+                inst->w16_seedInc);
+        }
+        else
+        { /* only applies to SWB where length could be larger than 256 */
+#if FSMULT >= 2  /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, RANDVEC_NO_OF_SAMPLES,
+                inst->w16_seedInc);
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            assert(w16_randLen <= FSMULT * 120 + 30);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
+                (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+#else
+            assert(0);
+#endif
+        }
+    } /* end if(first expand or BGNonly) ... else ... */
+
+    if (!BGNonly) /* Voiced and unvoiced parts not used if generating BGN only */
+    {
+
+        /*************************************************
+         * Generate signal                               *
+         *************************************************/
+
+        /*
+         * Voiced part
+         */
+
+        /* Linearly mute the use_vfraction value from 1 to vfraction */
+        if (ExpandState->w16_consecExp == 0)
+        {
+            ExpandState->w16_currentVFraction = 16384; /* 1.0 in Q14 */
+        }
+
+        ExpandState->w16_lagsPosition = ExpandState->w16_lagsPosition
+            + ExpandState->w16_lagsDirection;
+
+        /* Change direction if needed */
+        if (ExpandState->w16_lagsPosition == 0)
+        {
+            ExpandState->w16_lagsDirection = 1;
+        }
+        if (ExpandState->w16_lagsPosition == 2)
+        {
+            ExpandState->w16_lagsDirection = -1;
+        }
+
+        /* Generate a weighted vector with the selected lag */
+        w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
+        w16_lag = ExpandState->w16_lags[ExpandState->w16_lagsPosition];
+        /* Copy lag+overlap data */
+        w16_expVecPos = w16_expVecsLen - w16_lag - ExpandState->w16_overlap;
+        w16_tmp = w16_lag + ExpandState->w16_overlap;
+        if (ExpandState->w16_lagsPosition == 0)
+        {
+            WEBRTC_SPL_MEMCPY_W16(pw16_voicedVecStorage,
+                &(ExpandState->pw16_expVecs[0][w16_expVecPos]), w16_tmp);
+        }
+        else if (ExpandState->w16_lagsPosition == 1)
+        {
+            WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 3,
+                &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 2, pw16_voicedVecStorage,
+                w16_tmp);
+
+        }
+        else if (ExpandState->w16_lagsPosition == 2)
+        {
+            WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 1,
+                &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 1, pw16_voicedVecStorage,
+                w16_tmp);
+        }
+
+        if (inst->fs == 8000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs == 16000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs == 32000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else /* if (inst->fs==48000) */
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
+#endif
+        }
+
+        /* Smooth the expanded if it has not been muted to or vfraction is larger than 0.5 */
+        if ((ExpandState->w16_expandMuteFactor > 819) && (ExpandState->w16_currentVFraction
+            > 8192))
+        {
+            for (i = 0; i < ExpandState->w16_overlap; i++)
+            {
+                /* Do overlap add between new vector and overlap */
+                ExpandState->pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                    WEBRTC_SPL_MUL_16_16(ExpandState->pw16_overlapVec[i], w16_winMute) +
+                    WEBRTC_SPL_MUL_16_16(
+                        WEBRTC_SPL_MUL_16_16_RSFT(ExpandState->w16_expandMuteFactor,
+                            pw16_voicedVecStorage[i], 14), w16_winUnMute) + 16384, 15);
+                w16_winMute += w16_winMuteInc;
+                w16_winUnMute += w16_winUnMuteInc;
+            }
+        }
+        else if (ExpandState->w16_expandMuteFactor == 0
+#ifdef NETEQ_STEREO
+            && msInfo->msMode == NETEQ_MONO /* only if mono mode is selected */
+#endif
+        )
+        {
+            /* if ExpandState->w16_expandMuteFactor = 0 => all is CNG component 
+             set the output length to 15ms (for best CNG production) */
+            w16_tmp = fsMult120;
+            ExpandState->w16_maxLag = w16_tmp;
+            ExpandState->w16_lags[0] = w16_tmp;
+            ExpandState->w16_lags[1] = w16_tmp;
+            ExpandState->w16_lags[2] = w16_tmp;
+        }
+
+        /*
+         * Unvoiced part
+         */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
+            ExpandState->pw16_arState,
+            UNVOICED_LPC_ORDER);
+        if (ExpandState->w16_arGainScale > 0)
+        {
+            w32_tmp = ((WebRtc_Word32) 1) << (ExpandState->w16_arGainScale - 1);
+        }
+        else
+        {
+            w32_tmp = 0;
+        }
+
+        /* Note that shift value can be >16 which complicates things for some DSPs */
+        WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
+            ExpandState->w16_arGain, w32_tmp, ExpandState->w16_arGainScale, w16_lag);
+
+        WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_unvoicedVec,
+            ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, w16_lag);
+
+        WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
+            &(pw16_unvoicedVec[w16_lag - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+
+        /*
+         * Voiced + Unvoiced
+         */
+
+        /* For lag = 
+         <=31*fs_mult         => go from 1 to 0 in about 8 ms
+         (>=31..<=63)*fs_mult => go from 1 to 0 in about 16 ms
+         >=64*fs_mult         => go from 1 to 0 in about 32 ms
+         */
+        w16_tmp = (31 - WebRtcSpl_NormW32(ExpandState->w16_maxLag)) - 5; /* getbits(w16_maxLag) -5 */
+        w16_vfractionChange = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
+        if (ExpandState->w16_stopMuting == 1)
+        {
+            w16_vfractionChange = 0;
+        }
+
+        /* Create combined signal (unmuted) by shifting in more and more of unvoiced part */
+        w16_tmp = 8 - w16_tmp; /* getbits(w16_vfractionChange) */
+        w16_tmp = (ExpandState->w16_currentVFraction - ExpandState->w16_vFraction) >> w16_tmp;
+        w16_tmp = WEBRTC_SPL_MIN(w16_tmp, w16_lag);
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_outData, pw16_voicedVec, pw16_unvoicedVec,
+            &ExpandState->w16_currentVFraction, w16_vfractionChange, w16_tmp);
+
+        if (w16_tmp < w16_lag)
+        {
+            if (w16_vfractionChange != 0)
+            {
+                ExpandState->w16_currentVFraction = ExpandState->w16_vFraction;
+            }
+            w16_tmp2 = 16384 - ExpandState->w16_currentVFraction;
+            WebRtcSpl_ScaleAndAddVectorsWithRound(pw16_voicedVec + w16_tmp,
+                ExpandState->w16_currentVFraction, pw16_unvoicedVec + w16_tmp, w16_tmp2, 14,
+                pw16_outData + w16_tmp, (WebRtc_Word16) (w16_lag - w16_tmp));
+        }
+
+        /* Select muting factor */
+        if (ExpandState->w16_consecExp == 3)
+        {
+            /* 0.95 on 50 samples in NB (0.0010/fs_mult in Q20) */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
+                WebRtcNetEQ_k1049div[fs_mult]);
+        }
+        if (ExpandState->w16_consecExp == 7)
+        {
+            /* 0.90 on 50 samples in NB (0.0020/fs_mult in Q20) */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
+                WebRtcNetEQ_k2097div[fs_mult]);
+        }
+
+        /* Mute segment according to slope value */
+        if ((ExpandState->w16_consecExp != 0) || (ExpandState->w16_onset != 1))
+        {
+            /* Mute to the previous level, then continue with the muting */
+            WebRtcSpl_AffineTransformVector(pw16_outData, pw16_outData,
+                ExpandState->w16_expandMuteFactor, 8192, 14, w16_lag);
+
+            if ((ExpandState->w16_stopMuting != 1))
+            {
+                WebRtcNetEQ_MuteSignal(pw16_outData, ExpandState->w16_muteSlope, w16_lag);
+
+                w16_tmp = 16384 - (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_lag,
+                    ExpandState->w16_muteSlope) + 8192) >> 6); /* 20-14 = 6 */
+                w16_tmp = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
+                    ExpandState->w16_expandMuteFactor) + 8192) >> 14);
+
+                /* Guard against getting stuck with very small (but sometimes audible) gain */
+                if ((ExpandState->w16_consecExp > 3) && (w16_tmp
+                    >= ExpandState->w16_expandMuteFactor))
+                {
+                    ExpandState->w16_expandMuteFactor = 0;
+                }
+                else
+                {
+                    ExpandState->w16_expandMuteFactor = w16_tmp;
+                }
+            }
+        }
+
+    } /* end if(!BGNonly) */
+
+    /*
+     * BGN
+     */
+
+    if (BGNState->w16_initialized == 1)
+    {
+        /* BGN parameters are initialized; use them */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_cngVec - BGN_LPC_ORDER,
+            BGNState->pw16_filterState,
+            BGN_LPC_ORDER);
+
+        if (BGNState->w16_scaleShift > 1)
+        {
+            w32_tmp = ((WebRtc_Word32) 1) << (BGNState->w16_scaleShift - 1);
+        }
+        else
+        {
+            w32_tmp = 0;
+        }
+
+        /* Scale random vector to correct energy level */
+        /* Note that shift value can be >16 which complicates things for some DSPs */
+        WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
+            BGNState->w16_scale, w32_tmp, BGNState->w16_scaleShift, w16_lag);
+
+        WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_cngVec, BGNState->pw16_filter,
+            BGN_LPC_ORDER + 1, w16_lag);
+
+        WEBRTC_SPL_MEMCPY_W16(BGNState->pw16_filterState,
+            &(pw16_cngVec[w16_lag-BGN_LPC_ORDER]),
+            BGN_LPC_ORDER);
+
+        /* Unmute the insertion of background noise */
+
+        if (bgnMode == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME
+            && BGNState->w16_mutefactor > 0)
+        {
+            /* fade BGN to zero */
+            /* calculate muting slope, approx 2^18/fsHz */
+            WebRtc_Word16 muteFactor;
+            if (fs_mult == 1)
+            {
+                muteFactor = -32;
+            }
+            else if (fs_mult == 2)
+            {
+                muteFactor = -16;
+            }
+            else if (fs_mult == 4)
+            {
+                muteFactor = -8;
+            }
+            else
+            {
+                muteFactor = -5;
+            }
+            /* use UnmuteSignal function with negative slope */
+            WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
+            pw16_cngVec, muteFactor, /* In Q20 */
+            w16_lag);
+        }
+        else if (BGNState->w16_mutefactor < 16384 && !BGNonly)
+        {
+            /* if (w16_mutefactor < 1)  and not BGN only (since then we use no muting) */
+
+            /*
+             * If BGN_OFF, or if BNG_FADE has started fading,
+             * mutefactor should not be increased.
+             */
+            if (ExpandState->w16_stopMuting != 1 && bgnMode != BGN_OFF && !(bgnMode
+                == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME))
+            {
+                WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
+                pw16_cngVec, ExpandState->w16_muteSlope, /* In Q20 */
+                w16_lag);
+            }
+            else
+            {
+                /* BGN_ON and stop muting, or
+                 * BGN_OFF (mute factor is always 0), or
+                 * BGN_FADE has reached 0 */
+                WebRtcSpl_AffineTransformVector(pw16_cngVec, pw16_cngVec,
+                    BGNState->w16_mutefactor, 8192, 14, w16_lag);
+            }
+        }
+    }
+    else
+    {
+        /* BGN parameters have not been initialized; use zero noise */
+        WebRtcSpl_MemSetW16(pw16_cngVec, 0, w16_lag);
+    }
+
+    if (BGNonly)
+    {
+        /* Copy BGN to outdata */
+        for (i = 0; i < w16_lag; i++)
+        {
+            pw16_outData[i] = pw16_cngVec[i];
+        }
+    }
+    else
+    {
+        /* Add CNG vector to the Voiced + Unvoiced vectors */
+        for (i = 0; i < w16_lag; i++)
+        {
+            pw16_outData[i] = pw16_outData[i] + pw16_cngVec[i];
+        }
+
+        /* increase call number */
+        ExpandState->w16_consecExp = ExpandState->w16_consecExp + 1;
+        if (ExpandState->w16_consecExp < 0) /* Guard against overflow */
+            ExpandState->w16_consecExp = FADE_BGN_TIME; /* "Arbitrary" large num of expands */
+    }
+
+    inst->w16_mode = MODE_EXPAND;
+    *pw16_len = w16_lag;
+
+    /* Update in-call and post-call statistics */
+    if (ExpandState->w16_stopMuting != 1 || BGNonly)
+    {
+        /*
+         * Only do this if StopMuting != 1 or if explicitly BGNonly, otherwise Expand is
+         * called from Merge or Normal and special measures must be taken.
+         */
+        inst->statInst.expandLength += (WebRtc_UWord32) *pw16_len;
+        if (ExpandState->w16_expandMuteFactor == 0 || BGNonly)
+        {
+            /* Only noise expansion */
+            inst->statInst.expandedNoiseSamples += *pw16_len;
+        }
+        else
+        {
+            /* Voice expand (note: not necessarily _voiced_) */
+            inst->statInst.expandedVoiceSamples += *pw16_len;
+        }
+    }
+
+    return 0;
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_GenerateBGN(...)
+ *
+ * This function generates and writes len samples of background noise to the
+ * output vector. The Expand function will be called repeatedly until the
+ * correct number of samples is produced.
+ *
+ * Input:
+ *      - inst          : NetEq instance, i.e. the user that requests more
+ *                        speech/audio data
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - len           : Desired length of produced BGN.
+ *
+ *
+ * Output:
+ *      - pw16_outData  : Pointer to a memory space where the output data
+ *                        should be stored
+ *
+ * Return value         : >=0 - Number of noise samples produced and written
+ *                              to output
+ *                        -1  - Error
+ */
+
+int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
+#ifdef SCRATCH
+                            WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                            WebRtc_Word16 *pw16_outData, WebRtc_Word16 len)
+{
+
+    WebRtc_Word16 pos = 0;
+    WebRtc_Word16 tempLen = len;
+
+    while (tempLen > 0)
+    {
+        /* while we still need more noise samples, call Expand to obtain background noise */
+        WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr,
+#endif
+            &pw16_outData[pos], &tempLen, 1 /*BGNonly*/);
+
+        pos += tempLen; /* we got this many samples */
+        tempLen = len - pos; /* this is the number of samples we still need */
+    }
+
+    return pos;
+}
+
+#undef   SCRATCH_PW16_BEST_CORR_INDEX
+#undef   SCRATCH_PW16_BEST_CORR
+#undef   SCRATCH_PW16_BEST_DIST_INDEX
+#undef   SCRATCH_PW16_BEST_DIST
+#undef   SCRATCH_PW16_CORR_VEC
+#undef   SCRATCH_PW16_CORR2
+#undef   SCRATCH_PW32_AUTO_CORR
+#undef   SCRATCH_PW16_RC
+#undef   SCRATCH_PW16_RAND_VEC
+#undef   SCRATCH_NETEQDSP_CORRELATOR
+#undef   SCRATCH_PW16_SCALED_RAND_VEC
+#undef   SCRATCH_PW16_UNVOICED_VEC_SPACE
+
diff --git a/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq.h b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq.h
new file mode 100644
index 0000000..909131b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq.h
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the main API for NetEQ. Helper macros are located in webrtc_neteq_help_macros.h,
+ * while some internal API functions are found in webrtc_neteq_internal.h.
+ */
+
+#include "typedefs.h"
+
+#ifndef WEBRTC_NETEQ_H
+#define WEBRTC_NETEQ_H
+
+#ifdef __cplusplus 
+extern "C"
+{
+#endif
+
+/**********************************************************
+ * Definitions
+ */
+
+enum WebRtcNetEQDecoder
+{
+    kDecoderReservedStart,
+    kDecoderPCMu,
+    kDecoderPCMa,
+    kDecoderILBC,
+    kDecoderISAC,
+    kDecoderISACswb,
+    kDecoderPCM16B,
+    kDecoderPCM16Bwb,
+    kDecoderPCM16Bswb32kHz,
+    kDecoderPCM16Bswb48kHz,
+    kDecoderG722,
+    kDecoderRED,
+    kDecoderAVT,
+    kDecoderCNG,
+    kDecoderArbitrary,
+    kDecoderG729,
+    kDecoderG729_1,
+    kDecoderG726_16,
+    kDecoderG726_24,
+    kDecoderG726_32,
+    kDecoderG726_40,
+    kDecoderG722_1_16,
+    kDecoderG722_1_24,
+    kDecoderG722_1_32,
+    kDecoderG722_1C_24,
+    kDecoderG722_1C_32,
+    kDecoderG722_1C_48,
+    kDecoderSPEEX_8,
+    kDecoderSPEEX_16,
+    kDecoderCELT_32,
+    kDecoderGSMFR,
+    kDecoderAMR,
+    kDecoderAMRWB,
+    kDecoderReservedEnd
+};
+
+enum WebRtcNetEQNetworkType
+{
+    kUDPNormal,
+    kUDPVideoSync,
+    kTCPNormal,
+    kTCPLargeJitter,
+    kTCPXLargeJitter
+};
+
+enum WebRtcNetEQOutputType
+{
+    kOutputNormal,
+    kOutputPLC,
+    kOutputCNG,
+    kOutputPLCtoCNG,
+    kOutputVADPassive
+};
+
+enum WebRtcNetEQPlayoutMode
+{
+    kPlayoutOn, kPlayoutOff, kPlayoutFax, kPlayoutStreaming
+};
+
+/* Available modes for background noise (inserted after long expands) */
+enum WebRtcNetEQBGNMode
+{
+    kBGNOn, /* default "normal" behavior with eternal noise */
+    kBGNFade, /* noise fades to zero after some time */
+    kBGNOff
+/* background noise is always zero */
+};
+
+/*************************************************
+ * Definitions of decoder calls and the default 
+ * API function calls for each codec
+ */
+
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecode)(void* state, WebRtc_Word16* encoded,
+                                                WebRtc_Word16 len, WebRtc_Word16* decoded,
+                                                WebRtc_Word16* speechType);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodePLC)(void* state, WebRtc_Word16* decoded,
+                                                   WebRtc_Word16 frames);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodeInit)(void* state);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncAddLatePkt)(void* state, WebRtc_Word16* encoded,
+                                                    WebRtc_Word16 len);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
+                                                      WebRtc_Word16* length);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
+                                                  WebRtc_Word32 packet_size,
+                                                  WebRtc_UWord16 rtp_seq_number,
+                                                  WebRtc_UWord32 send_ts,
+                                                  WebRtc_UWord32 arr_ts);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
+
+/**********************************************************
+ * Structures
+ */
+
+typedef struct
+{
+    enum WebRtcNetEQDecoder codec;
+    WebRtc_Word16 payloadType;
+    WebRtcNetEQ_FuncDecode funcDecode;
+    WebRtcNetEQ_FuncDecode funcDecodeRCU;
+    WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
+    WebRtcNetEQ_FuncDecodeInit funcDecodeInit;
+    WebRtcNetEQ_FuncAddLatePkt funcAddLatePkt;
+    WebRtcNetEQ_FuncGetMDinfo funcGetMDinfo;
+    WebRtcNetEQ_FuncGetPitchInfo funcGetPitch;
+    WebRtcNetEQ_FuncUpdBWEst funcUpdBWEst;
+    WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
+    void* codec_state;
+    WebRtc_UWord16 codec_fs;
+} WebRtcNetEQ_CodecDef;
+
+typedef struct
+{
+    WebRtc_UWord16 fraction_lost;
+    WebRtc_UWord32 cum_lost;
+    WebRtc_UWord32 ext_max;
+    WebRtc_UWord32 jitter;
+} WebRtcNetEQ_RTCPStat;
+
+/**********************************************************
+ * NETEQ Functions
+ */
+
+/* Info functions */
+
+#define WEBRTC_NETEQ_MAX_ERROR_NAME 40
+int WebRtcNetEQ_GetVersion(char *version);
+int WebRtcNetEQ_GetErrorCode(void *inst);
+int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen);
+
+/* Instance memory assign functions */
+
+int WebRtcNetEQ_AssignSize(int *sizeinbytes);
+int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr);
+int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
+                                         int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
+                                         int *MaxNoOfPackets, int *sizeinbytes);
+int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
+                             int sizeinbytes);
+
+/* Init functions */
+
+int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs);
+int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
+int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
+int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
+int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode);
+int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode);
+
+/* Codec Database functions */
+
+int WebRtcNetEQ_CodecDbReset(void *inst);
+int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
+int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
+                                   WebRtc_Word16 *MaxEntries);
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+                                    enum WebRtcNetEQDecoder *codec);
+
+/* Real-time functions */
+
+int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
+                      WebRtc_UWord32 uw32_timeRec);
+int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
+int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp);
+int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
+
+/* VQmon related functions */
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
+                                      WebRtc_UWord16 *concealedVoiceDurationMs,
+                                      WebRtc_UWord8 *concealedVoiceFlags);
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
+                                      WebRtc_UWord8 *adaptationRate);
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
+                                     WebRtc_UWord16 *maxDelayMs);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
new file mode 100644
index 0000000..214bd10
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
@@ -0,0 +1,387 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some helper macros that can be used when loading the
+ * NetEQ codec database.
+ */
+
+#ifndef WEBRTC_NETEQ_HELP_MACROS_H
+#define WEBRTC_NETEQ_HELP_MACROS_H
+
+#ifndef NULL
+#define NULL 0
+#endif
+
+/**********************************************************
+ * Help macros for NetEQ initialization
+ */
+
+#define SET_CODEC_PAR(inst,decoder,pt,state,fs) \
+                    inst.codec=decoder; \
+                    inst.payloadType=pt; \
+                    inst.codec_state=state; \
+                    inst.codec_fs=fs;
+
+#define SET_PCMU_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeU; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCMA_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeA; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_ILBC_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIlbcfix_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcIlbcfix_NetEqPlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIlbcfix_Decoderinit30Ms; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_ISAC_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
+                    inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
+
+#define SET_ISACfix_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsacfix_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsacfix_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsacfix_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsacfix_GetErrorCode;
+
+#define SET_ISACSWB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
+                    inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
+
+#define SET_G729_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG729_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG729_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG729_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G729_1_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7291_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7291_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcG7291_DecodeBwe; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_WB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_SWB32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+
+#define SET_PCM16B_SWB48_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG722_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG722_DecoderInit;\
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_16_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc16; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit16; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc24; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc32; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc24; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc32; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_48_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode48; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc48; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit48; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AMR_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmr_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmr_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmr_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AMRWB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmrWb_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmrWb_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmrWb_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_GSMFR_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcGSMFR_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcGSMFR_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcGSMFR_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_16_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit16; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_40_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode40; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit40; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_SPEEX_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcSpeex_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcSpeex_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcSpeex_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CELT_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcCelt_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CELTSLAVE_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_DecodeSlave; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcCelt_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_RED_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AVT_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CNG_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#endif /* WEBRTC_NETEQ_HELP_MACROS_H */
+
diff --git a/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
new file mode 100644
index 0000000..70ed044
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the internal API functions.
+ */
+
+#include "typedefs.h"
+
+#ifndef WEBRTC_NETEQ_INTERNAL_H
+#define WEBRTC_NETEQ_INTERNAL_H
+
+#ifdef __cplusplus 
+extern "C"
+{
+#endif
+
+typedef struct
+{
+    WebRtc_UWord8 payloadType;
+    WebRtc_UWord16 sequenceNumber;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_UWord32 SSRC;
+    WebRtc_UWord8 markerBit;
+} WebRtcNetEQ_RTPInfo;
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInRTPStruct(...)
+ *
+ * Alternative RecIn function, used when the RTP data has already been
+ * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
+ *
+ * Input:
+ *		- inst	            : NetEQ instance
+ *		- rtpInfo		    : Pointer to RTP info
+ *		- payloadPtr        : Pointer to the RTP payload (first byte after header)
+ *      - payloadLenBytes   : Length (in bytes) of the payload in payloadPtr
+ *      - timeRec           : Receive time (in timestamps of the used codec)
+ *
+ * Return value			    :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
+                               const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
+                               WebRtc_UWord32 timeRec);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetMasterSlaveInfoSize(...)
+ *
+ * Get size in bytes for master/slave struct msInfo used in 
+ * WebRtcNetEQ_RecOutMasterSlave.
+ *
+ * Return value			    :  Struct size in bytes
+ * 
+ */
+
+int WebRtcNetEQ_GetMasterSlaveInfoSize();
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutMasterSlave(...)
+ *
+ * RecOut function for running several NetEQ instances in master/slave mode.
+ * One master can be used to control several slaves. 
+ * The MasterSlaveInfo struct must be allocated outside NetEQ.
+ * Use function WebRtcNetEQ_GetMasterSlaveInfoSize to get the size needed.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *      - isMaster      : Non-zero indicates that this is the master channel
+ *      - msInfo        : (slave only) Information from master
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *      - msInfo        : (master only) Information to slave(s)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
+                                  WebRtc_Word16 *pw16_len, void *msInfo,
+                                  WebRtc_Word16 isMaster);
+
+typedef struct
+{
+    uint16_t currentBufferSize;         /* Current jitter buffer size in ms. */
+    uint16_t preferredBufferSize;       /* Preferred buffer size in ms. */
+    uint16_t jitterPeaksFound;          /* 1 if adding extra delay due to peaky
+                                         * jitter; 0 otherwise. */
+    uint16_t currentPacketLossRate;     /* Loss rate (network + late) (Q14). */
+    uint16_t currentDiscardRate;        /* Late loss rate (Q14). */
+    uint16_t currentExpandRate;         /* Fraction (of original stream) of
+                                         * synthesized speech inserted through
+                                         * expansion (in Q14). */
+    uint16_t currentPreemptiveRate;     /* Fraction of data inserted through
+                                         * pre-emptive expansion (in Q14). */
+    uint16_t currentAccelerateRate;     /* Fraction of data removed through
+                                         * acceleration (in Q14). */
+    int32_t clockDriftPPM;              /* Average clock-drift in parts-per-
+                                         * million (positive or negative). */
+} WebRtcNetEQ_NetworkStatistics;
+
+/*
+ * Get the "in-call" statistics from NetEQ.
+ * The statistics are reset after the query.
+ */
+int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats);
+
+/*
+ * Get the raw waiting times for decoded frames. The function writes the last
+ * recorded waiting times (from frame arrival to frame decoding) to the memory
+ * pointed to by waitingTimeMs. The number of elements written is in the return
+ * value. No more than maxLength elements are written. Statistics are reset on
+ * each query.
+ */
+int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
+                                        int max_length,
+                                        int* waiting_times_ms);
+
+/***********************************************/
+/* Functions for post-decode VAD functionality */
+/***********************************************/
+
+/* NetEQ must be compiled with the flag NETEQ_VAD enabled for these functions to work. */
+
+/*
+ * VAD function pointer types
+ *
+ * These function pointers match the definitions of webrtc VAD functions WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
+ */
+typedef int (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
+typedef int (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, int mode);
+typedef WebRtc_Word16 (*WebRtcNetEQ_VADFunction)(void *VAD_inst, WebRtc_Word16 fs,
+                                                 WebRtc_Word16 *frame, WebRtc_Word16 frameLen);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADInstance(...)
+ *
+ * Provide a pointer to an allocated VAD instance. If function is never 
+ * called or it is called with NULL pointer as VAD_inst, the post-decode
+ * VAD functionality is disabled. Also provide pointers to init, setmode
+ * and VAD functions. These are typically pointers to WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
+ * interface file webrtc_vad.h.
+ *
+ * Input:
+ *      - NetEQ_inst        : NetEQ instance
+ *		- VADinst		    : VAD instance
+ *		- initFunction	    : Pointer to VAD init function
+ *		- setmodeFunction   : Pointer to VAD setmode function
+ *		- VADfunction	    : Pointer to VAD function
+ *
+ * Output:
+ *		- NetEQ_inst	    : Updated NetEQ instance
+ *
+ * Return value			    :  0 - Ok
+ *						      -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
+                               WebRtcNetEQ_VADInitFunction initFunction,
+                               WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
+                               WebRtcNetEQ_VADFunction VADFunction);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADMode(...)
+ *
+ * Pass an aggressiveness mode parameter to the post-decode VAD instance.
+ * If this function is never called, mode 0 (quality mode) is used as default.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- mode  		: mode parameter (same range as WebRtc VAD mode)
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADMode(void *NetEQ_inst, int mode);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutNoDecode(...)
+ *
+ * Special RecOut that does not do any decoding.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushBuffers(...)
+ *
+ * Flush packet and speech buffers. Does not reset codec database or 
+ * jitter statistics.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_FlushBuffers(void *inst);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/mcu.h b/trunk/src/modules/audio_coding/neteq/mcu.h
new file mode 100644
index 0000000..499684a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mcu.h
@@ -0,0 +1,284 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * MCU struct and functions related to the MCU side operations.
+ */
+
+#ifndef MCU_H
+#define MCU_H
+
+#include "typedefs.h"
+
+#include "codec_db.h"
+#include "rtcp.h"
+#include "packet_buffer.h"
+#include "buffer_stats.h"
+#include "neteq_statistics.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+#include "dtmf_buffer.h"
+#endif
+
+#define MAX_ONE_DESC 5 /* cannot do more than this many consecutive one-descriptor decodings */
+#define MAX_LOSS_REPORT_PERIOD 60   /* number of seconds between auto-reset */
+
+enum TsScaling
+{
+    kTSnoScaling = 0,
+    kTSscalingTwo,
+    kTSscalingTwoThirds,
+    kTSscalingFourThirds
+};
+
+enum { kLenWaitingTimes = 100 };
+
+typedef struct
+{
+
+    WebRtc_Word16 current_Codec;
+    WebRtc_Word16 current_Payload;
+    WebRtc_UWord32 timeStamp; /* Next timestamp that should be played */
+    WebRtc_Word16 millisecondsPerCall;
+    WebRtc_UWord16 timestampsPerCall; /* Output chunk size */
+    WebRtc_UWord16 fs;
+    WebRtc_UWord32 ssrc; /* Current ssrc */
+    WebRtc_Word16 new_codec;
+    WebRtc_Word16 first_packet;
+
+    /* MCU/DSP Communication layer */
+    WebRtc_Word16 *pw16_readAddress;
+    WebRtc_Word16 *pw16_writeAddress;
+    void *main_inst;
+
+    CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
+     functions to use and which codpoints that
+     have been assigned */
+    SplitInfo_t PayloadSplit_inst; /* Information about how the current codec
+     payload should be splitted */
+    WebRtcNetEQ_RTCP_t RTCP_inst; /* RTCP statistics */
+    PacketBuf_t PacketBuffer_inst; /* The packet buffer */
+    BufstatsInst_t BufferStat_inst; /* Statistics that are used to make decision
+     for what the DSP should perform */
+#ifdef NETEQ_ATEVENT_DECODE
+    dtmf_inst_t DTMF_inst;
+#endif
+    int NoOfExpandCalls;
+    WebRtc_Word16 AVT_PlayoutOn;
+    enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
+
+    WebRtc_Word16 one_desc; /* Number of times running on one desc */
+
+    WebRtc_UWord32 lostTS; /* Number of timestamps lost */
+    WebRtc_UWord32 lastReportTS; /* Timestamp elapsed since last report was given */
+
+    int waiting_times[kLenWaitingTimes];  /* Waiting time statistics storage. */
+    int len_waiting_times;
+    int next_waiting_time_index;
+
+    WebRtc_UWord32 externalTS;
+    WebRtc_UWord32 internalTS;
+    WebRtc_Word16 TSscalingInitialized;
+    enum TsScaling scalingFactor;
+
+#ifdef NETEQ_STEREO
+    int usingStereo;
+#endif
+
+} MCUInst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_McuReset(...)
+ *
+ * Reset the MCU instance.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuReset(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetMcuInCallStats(...)
+ *
+ * Reset MCU-side statistics variables for the in-call statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetWaitingTimeStats(...)
+ *
+ * Reset waiting-time statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance.
+ *
+ * Return value         : n/a
+ */
+void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_LogWaitingTime(...)
+ *
+ * Log waiting-time to the statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance.
+ *      - waiting_time  : Waiting time in "RecOut calls" (i.e., 1 call = 10 ms).
+ *
+ * Return value         : n/a
+ */
+void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetMcuJitterStat(...)
+ *
+ * Reset MCU-side statistics variables for the post-call statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_McuAddressInit(...)
+ *
+ * Initializes MCU with read address and write address.
+ *
+ * Input:
+ *      - inst              : MCU instance
+ *      - Data2McuAddress   : Pointer to MCU address
+ *      - Data2DspAddress   : Pointer to DSP address
+ *      - main_inst         : Pointer to NetEQ main instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
+                               void * Data2DspAddress, void *main_inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_McuSetFs(...)
+ *
+ * Initializes MCU with read address and write address.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *      - fs_hz         : Sample rate in Hz -- 8000, 16000, 32000, (48000)
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs_hz);
+
+/****************************************************************************
+ * WebRtcNetEQ_SignalMcu(...)
+ *
+ * Signal the MCU that data is available and ask for a RecOut decision.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_SignalMcu(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInInternal(...)
+ *
+ * This function inserts a packet into the jitter buffer.
+ *
+ * Input:
+ *		- MCU_inst		: MCU instance
+ *		- RTPpacket	    : The RTP packet, parsed into NetEQ's internal RTP struct
+ *		- uw32_timeRec	: Time stamp for the arrival of the packet (not RTP timestamp)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
+                              WebRtc_UWord32 uw32_timeRec);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInInternal(...)
+ *
+ * Split the packet according to split_inst and inserts the parts into
+ * Buffer_inst.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - RTPpacket     : The RTP packet, parsed into NetEQ's internal RTP struct
+ *      - uw32_timeRec  : Time stamp for the arrival of the packet (not RTP timestamp)
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
+                                      SplitInfo_t *split_inst, WebRtc_Word16 *flushed);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetTimestampScaling(...)
+ *
+ * Update information about timestamp scaling for a payload type
+ * in MCU_inst->scalingFactor.
+ *
+ * Input:
+ *      - MCU_inst          : MCU instance
+ *      - rtpPayloadType    : RTP payload number
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType);
+
+/****************************************************************************
+ * WebRtcNetEQ_ScaleTimestampExternalToInternal(...)
+ *
+ * Convert from external to internal timestamp using current scaling info.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - externalTS    : External timestamp
+ *
+ * Return value         : Internal timestamp
+ */
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 externalTS);
+
+/****************************************************************************
+ * WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
+ *
+ * Convert from external to internal timestamp using current scaling info.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - externalTS    : Internal timestamp
+ *
+ * Return value         : External timestamp
+ */
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 internalTS);
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/mcu_address_init.c b/trunk/src/modules/audio_coding/neteq/mcu_address_init.c
new file mode 100644
index 0000000..0306a85
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mcu_address_init.c
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "mcu.h"
+
+#include <string.h> /* to define NULL */
+
+/*
+ * Initializes MCU with read address and write address
+ */
+int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
+                               void * Data2DspAddress, void *main_inst)
+{
+
+    inst->pw16_readAddress = (WebRtc_Word16*) Data2McuAddress;
+    inst->pw16_writeAddress = (WebRtc_Word16*) Data2DspAddress;
+    inst->main_inst = main_inst;
+
+    inst->millisecondsPerCall = 10;
+
+    /* Do expansions in the beginning */
+    if (inst->pw16_writeAddress != NULL) inst->pw16_writeAddress[0] = DSP_INSTR_EXPAND;
+
+    return (0);
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.c b/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.c
new file mode 100644
index 0000000..13025d4
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.c
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Communication between MCU and DSP sides.
+ */
+
+#include "mcu_dsp_common.h"
+
+#include <string.h>
+
+/* Initialize instances with read and write address */
+int WebRtcNetEQ_DSPinit(MainInst_t *inst)
+{
+    int res = 0;
+
+    res |= WebRtcNetEQ_AddressInit(&inst->DSPinst, NULL, NULL, inst);
+    res |= WebRtcNetEQ_McuAddressInit(&inst->MCUinst, NULL, NULL, inst);
+
+    return res;
+
+}
+
+/* The DSP side will call this function to interrupt the MCU side */
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem)
+{
+    inst->MCUinst.pw16_readAddress = pw16_shared_mem;
+    inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
+    return WebRtcNetEQ_SignalMcu(&inst->MCUinst);
+}
diff --git a/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.h b/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.h
new file mode 100644
index 0000000..e3f4213
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mcu_dsp_common.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The main NetEQ instance, which is where the DSP and MCU sides join.
+ */
+
+#ifndef MCU_DSP_COMMON_H
+#define MCU_DSP_COMMON_H
+
+#include "typedefs.h"
+
+#include "dsp.h"
+#include "mcu.h"
+
+/* Define size of shared memory area. */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define SHARED_MEM_SIZE (6*640)
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define SHARED_MEM_SIZE (4*640)
+#elif defined(NETEQ_WIDEBAND)
+    #define SHARED_MEM_SIZE (2*640)
+#else
+    #define SHARED_MEM_SIZE 640
+#endif
+
+/* Struct to hold the NetEQ instance */
+typedef struct
+{
+    DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
+    MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
+    WebRtc_Word16 ErrorCode; /* Store last error code */
+#ifdef NETEQ_STEREO
+    WebRtc_Word16 masterSlave; /* 0 = not set, 1 = master, 2 = slave */
+#endif /* NETEQ_STEREO */
+} MainInst_t;
+
+/* Struct used for communication between DSP and MCU sides of NetEQ */
+typedef struct
+{
+    WebRtc_UWord32 playedOutTS; /* Timestamp position at end of DSP data */
+    WebRtc_UWord16 samplesLeft; /* Number of samples stored */
+    WebRtc_Word16 MD; /* Multiple description codec information */
+    WebRtc_Word16 lastMode; /* Latest mode of NetEQ playout */
+    WebRtc_Word16 frameLen; /* Frame length of previously decoded packet */
+} DSP2MCU_info_t;
+
+/* Initialize instances with read and write address */
+int WebRtcNetEQ_DSPinit(MainInst_t *inst);
+
+/* The DSP side will call this function to interrupt the MCU side */
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem);
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/mcu_reset.c b/trunk/src/modules/audio_coding/neteq/mcu_reset.c
new file mode 100644
index 0000000..3aae4ce
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mcu_reset.c
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Reset MCU side data.
+ */
+
+#include "mcu.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "automode.h"
+
+int WebRtcNetEQ_McuReset(MCUInst_t *inst)
+{
+
+#ifdef NETEQ_ATEVENT_DECODE
+    int ok;
+#endif
+
+    /* MCU/DSP Communication layer */
+    inst->pw16_readAddress = NULL;
+    inst->pw16_writeAddress = NULL;
+    inst->main_inst = NULL;
+    inst->one_desc = 0;
+    inst->BufferStat_inst.Automode_inst.extraDelayMs = 0;
+    inst->NetEqPlayoutMode = kPlayoutOn;
+
+    WebRtcNetEQ_DbReset(&inst->codec_DB_inst);
+    memset(&inst->PayloadSplit_inst, 0, sizeof(SplitInfo_t));
+
+    /* Clear the Packet buffer and the pointer to memory storage */
+    WebRtcNetEQ_PacketBufferFlush(&inst->PacketBuffer_inst);
+    inst->PacketBuffer_inst.memorySizeW16 = 0;
+    inst->PacketBuffer_inst.maxInsertPositions = 0;
+
+    /* Clear the decision and delay history */
+    memset(&inst->BufferStat_inst, 0, sizeof(BufstatsInst_t));
+#ifdef NETEQ_ATEVENT_DECODE
+    ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
+    if (ok != 0)
+    {
+        return ok;
+    }
+#endif
+    inst->NoOfExpandCalls = 0;
+    inst->current_Codec = -1;
+    inst->current_Payload = -1;
+
+    inst->millisecondsPerCall = 10;
+    inst->timestampsPerCall = inst->millisecondsPerCall * 8;
+    inst->fs = 8000;
+    inst->first_packet = 1;
+
+    WebRtcNetEQ_ResetMcuInCallStats(inst);
+
+    WebRtcNetEQ_ResetWaitingTimeStats(inst);
+
+    WebRtcNetEQ_ResetMcuJitterStat(inst);
+
+    WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+        inst->PacketBuffer_inst.maxInsertPositions);
+
+    return 0;
+}
+
+/*
+ * Reset MCU-side statistics variables for the in-call statistics.
+ */
+
+int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst)
+{
+    inst->lostTS = 0;
+    inst->lastReportTS = 0;
+    inst->PacketBuffer_inst.discardedPackets = 0;
+
+    return 0;
+}
+
+/*
+ * Reset waiting-time statistics.
+ */
+
+void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst) {
+  memset(inst->waiting_times, 0,
+         kLenWaitingTimes * sizeof(inst->waiting_times[0]));
+  inst->len_waiting_times = 0;
+  inst->next_waiting_time_index = 0;
+}
+
+/*
+ * Store waiting-time in the statistics.
+ */
+
+void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time) {
+  assert(inst->next_waiting_time_index < kLenWaitingTimes);
+  inst->waiting_times[inst->next_waiting_time_index] = waiting_time;
+  inst->next_waiting_time_index++;
+  if (inst->next_waiting_time_index >= kLenWaitingTimes) {
+    inst->next_waiting_time_index = 0;
+  }
+  if (inst->len_waiting_times < kLenWaitingTimes) {
+    inst->len_waiting_times++;
+  }
+}
+
+/*
+ * Reset all MCU-side statistics variables for the post-call statistics.
+ */
+
+int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst)
+{
+    inst->BufferStat_inst.Automode_inst.countIAT500ms = 0;
+    inst->BufferStat_inst.Automode_inst.countIAT1000ms = 0;
+    inst->BufferStat_inst.Automode_inst.countIAT2000ms = 0;
+    inst->BufferStat_inst.Automode_inst.longestIATms = 0;
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/merge.c b/trunk/src/modules/audio_coding/neteq/merge.c
new file mode 100644
index 0000000..5f020a9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/merge.c
@@ -0,0 +1,564 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the function to merge a new packet with expanded data after a packet loss.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Merge(...)
+ *
+ * This function...
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *      - decoded       : Pointer to new decoded speech.
+ *      - len           : Number of samples in pw16_decoded.
+ *
+ *
+ * Output:
+ *      - inst          : Updated user information
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - pw16_len      : Number of samples written to pw16_outData
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+/* Scratch usage:
+
+ Type           Name                    size            startpos        endpos
+ WebRtc_Word16  pw16_expanded           210*fs/8000     0               209*fs/8000
+ WebRtc_Word16  pw16_expandedLB         100             210*fs/8000     99+210*fs/8000
+ WebRtc_Word16  pw16_decodedLB          40              100+210*fs/8000 139+210*fs/8000
+ WebRtc_Word32  pw32_corr               2*60            140+210*fs/8000 260+210*fs/8000
+ WebRtc_Word16  pw16_corrVec            68              210*fs/8000     67+210*fs/8000
+
+ [gap in scratch vector]
+
+ func           WebRtcNetEQ_Expand      40+370*fs/8000  126*fs/8000     39+496*fs/8000
+
+ Total:  40+496*fs/8000
+ */
+
+#define SCRATCH_pw16_expanded          0
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        1260
+#define SCRATCH_pw16_decodedLB         1360
+#define SCRATCH_pw32_corr              1400
+#define SCRATCH_pw16_corrVec           1260
+#define SCRATCH_NETEQ_EXPAND            756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        840
+#define SCRATCH_pw16_decodedLB         940
+#define SCRATCH_pw32_corr              980
+#define SCRATCH_pw16_corrVec           840
+#define SCRATCH_NETEQ_EXPAND            504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        420
+#define SCRATCH_pw16_decodedLB         520
+#define SCRATCH_pw32_corr              560
+#define SCRATCH_pw16_corrVec           420
+#define SCRATCH_NETEQ_EXPAND            252
+#else    /* NB */
+#define SCRATCH_pw16_expandedLB        210
+#define SCRATCH_pw16_decodedLB         310
+#define SCRATCH_pw32_corr              350
+#define SCRATCH_pw16_corrVec           210
+#define SCRATCH_NETEQ_EXPAND            126
+#endif
+
+int WebRtcNetEQ_Merge(DSPInst_t *inst,
+#ifdef SCRATCH
+                      WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                      WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
+                      WebRtc_Word16 *pw16_len)
+{
+
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 fs_shift;
+    WebRtc_Word32 w32_En_new_frame, w32_En_old_frame;
+    WebRtc_Word16 w16_expmax, w16_newmax;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
+    WebRtc_Word16 *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
+    WebRtc_Word16 *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+    WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+#else
+    WebRtc_Word16 pw16_expanded[(125+80+5)*FSMULT];
+    WebRtc_Word16 pw16_expandedLB[100];
+    WebRtc_Word16 pw16_decodedLB[40];
+    WebRtc_Word32 pw32_corr[60];
+    WebRtc_Word16 pw16_corrVec[4+60+4];
+#endif
+    WebRtc_Word16 *pw16_corr = &pw16_corrVec[4];
+    WebRtc_Word16 w16_stopPos = 0, w16_bestIndex, w16_interpLen;
+    WebRtc_Word16 w16_bestVal; /* bestVal is dummy */
+    WebRtc_Word16 w16_startfact, w16_inc;
+    WebRtc_Word16 w16_expandedLen;
+    WebRtc_Word16 w16_startPos;
+    WebRtc_Word16 w16_expLen, w16_newLen = 0;
+    WebRtc_Word16 *pw16_decodedOut;
+    WebRtc_Word16 w16_muted;
+
+    int w16_decodedLen = len;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+    fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
+
+    /*************************************
+     * Generate data to merge with
+     *************************************/
+    /*
+     * Check how much data that is left since earlier
+     * (at least there should be the overlap)...
+     */
+    w16_startPos = inst->endPosition - inst->curPosition;
+    /* Get one extra expansion to merge and overlap with */
+    inst->ExpandInst.w16_stopMuting = 1;
+    inst->ExpandInst.w16_lagsDirection = 1; /* make sure we get the "optimal" lag */
+    inst->ExpandInst.w16_lagsPosition = -1; /* out of the 3 possible ones */
+    w16_expandedLen = 0; /* Does not fill any function currently */
+
+    if (w16_startPos >= 210 * FSMULT)
+    {
+        /*
+         * The number of samples available in the sync buffer is more than what fits in
+         * pw16_expanded.Keep the first 210*FSMULT samples, but shift them towards the end of
+         * the buffer. This is ok, since all of the buffer will be expand data anyway, so as
+         * long as the beginning is left untouched, we're fine.
+         */
+
+        w16_tmp = w16_startPos - 210 * FSMULT; /* length difference */
+
+        WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[inst->curPosition+w16_tmp] ,
+                               &inst->speechBuffer[inst->curPosition], 210*FSMULT);
+
+        inst->curPosition += w16_tmp; /* move start position of sync buffer accordingly */
+        w16_startPos = 210 * FSMULT; /* this is the truncated length */
+    }
+
+    WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+        pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+        pw16_expanded, /* let Expand write to beginning of pw16_expanded to avoid overflow */
+        &w16_newLen, 0);
+
+    /*
+     * Now shift the data in pw16_expanded to where it belongs.
+     * Truncate all that ends up outside the vector.
+     */
+
+    WEBRTC_SPL_MEMMOVE_W16(&pw16_expanded[w16_startPos], pw16_expanded,
+                           WEBRTC_SPL_MIN(w16_newLen,
+                               WEBRTC_SPL_MAX(210*FSMULT - w16_startPos, 0) ) );
+
+    inst->ExpandInst.w16_stopMuting = 0;
+
+    /* Copy what is left since earlier into the expanded vector */
+
+    WEBRTC_SPL_MEMCPY_W16(pw16_expanded, &inst->speechBuffer[inst->curPosition], w16_startPos);
+
+    /*
+     * Do "ugly" copy and paste from the expanded in order to generate more data
+     * to correlate (but not interpolate) with.
+     */
+    w16_expandedLen = (120 + 80 + 2) * fs_mult;
+    w16_expLen = w16_startPos + w16_newLen;
+
+    if (w16_expLen < w16_expandedLen)
+    {
+        while ((w16_expLen + w16_newLen) < w16_expandedLen)
+        {
+            WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
+                w16_newLen);
+            w16_expLen += w16_newLen;
+        }
+
+        /* Copy last part (fraction of a whole expansion) */
+
+        WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
+                              (w16_expandedLen-w16_expLen));
+    }
+    w16_expLen = w16_expandedLen;
+
+    /* Adjust muting factor (main muting factor times expand muting factor) */
+    inst->w16_muteFactor
+        = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+            inst->ExpandInst.w16_expandMuteFactor, 14);
+
+    /* Adjust muting factor if new vector is more or less of the BGN energy */
+    len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
+    w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (WebRtc_Word16) len);
+    w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+    /* Calculate energy of old data */
+    w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
+    w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
+    w32_En_old_frame = WebRtcNetEQ_DotW16W16(pw16_expanded, pw16_expanded, len, w16_tmp);
+
+    /* Calculate energy of new data */
+    w16_tmp2 = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_newmax, w16_newmax));
+    w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2,0);
+    w32_En_new_frame = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, len, w16_tmp2);
+
+    /* Align to same Q-domain */
+    if (w16_tmp2 > w16_tmp)
+    {
+        w32_En_old_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_old_frame, (w16_tmp2-w16_tmp));
+    }
+    else
+    {
+        w32_En_new_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_new_frame, (w16_tmp-w16_tmp2));
+    }
+
+    /* Calculate muting factor to use for new frame */
+    if (w32_En_new_frame > w32_En_old_frame)
+    {
+        /* Normalize w32_En_new_frame to 14 bits */
+        w16_tmp = WebRtcSpl_NormW32(w32_En_new_frame) - 17;
+        w32_En_new_frame = WEBRTC_SPL_SHIFT_W32(w32_En_new_frame, w16_tmp);
+
+        /*
+         * Put w32_En_old_frame in a domain 14 higher, so that
+         * w32_En_old_frame/w32_En_new_frame is in Q14
+         */
+        w16_tmp = w16_tmp + 14;
+        w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
+        w16_tmp
+            = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (WebRtc_Word16) w32_En_new_frame);
+        /* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
+        w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+            WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,14));
+    }
+    else
+    {
+        w16_muted = 16384; /* Set = 1.0 when old frame has higher energy than new */
+    }
+
+    /* Set the raise the continued muting factor w16_muted if w16_muteFactor is lower */
+    if (w16_muted > inst->w16_muteFactor)
+    {
+        inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
+    }
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    /* do not downsample and calculate correlations for slave instance(s) */
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+#endif
+
+        /*********************************************
+         * Downsample to 4kHz and find best overlap
+         *********************************************/
+
+        /* Downsample to 4 kHz */
+        if (inst->fs == 8000)
+        {
+            WebRtcSpl_DownsampleFast(&pw16_expanded[2], (WebRtc_Word16) (w16_expandedLen - 2),
+                pw16_expandedLB, (WebRtc_Word16) (100),
+                (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl, (WebRtc_Word16) 3,
+                (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+            if (w16_decodedLen <= 80)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 2;
+                w16_tmp = temp_len / 2;
+                WebRtcSpl_DownsampleFast(&pw16_decoded[2], temp_len,
+                                         pw16_decodedLB, w16_tmp,
+                                         (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
+                    (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(&pw16_decoded[2],
+                    (WebRtc_Word16) (w16_decodedLen - 2), pw16_decodedLB,
+                    (WebRtc_Word16) (40), (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
+                    (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+            }
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs==16000)
+        {
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[4], (WebRtc_Word16)(w16_expandedLen-4),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                (WebRtc_Word16)4, (WebRtc_Word16)0);
+            if (w16_decodedLen<=160)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 4;
+                w16_tmp = temp_len / 4;
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[4], temp_len,
+                    pw16_decodedLB, w16_tmp,
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                    (WebRtc_Word16)4, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[4], (WebRtc_Word16)(w16_decodedLen-4),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                    (WebRtc_Word16)4, (WebRtc_Word16)0);
+            }
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs==32000)
+        {
+            /*
+             * TODO(hlundin) Why is the offset into pw16_expanded 6?
+             */
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                (WebRtc_Word16)8, (WebRtc_Word16)0);
+            if (w16_decodedLen<=320)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 6;
+                w16_tmp = temp_len / 8;
+                WebRtcSpl_DownsampleFast(
+                      &pw16_decoded[6], temp_len,
+                      pw16_decodedLB, w16_tmp,
+                      (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                      (WebRtc_Word16)8, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)8, (WebRtc_Word16)0);
+            }
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else /* if (inst->fs==48000) */
+        {
+            /*
+             * TODO(hlundin) Why is the offset into pw16_expanded 6?
+             */
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                (WebRtc_Word16)12, (WebRtc_Word16)0);
+            if (w16_decodedLen<=320)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                /*
+                 * TODO(hlundin): Is this correct? Downsampling is a factor 12
+                 * but w16_tmp = temp_len / 8.
+                 * (Was w16_tmp = ((w16_decodedLen-6)>>3) before re-write.)
+                 */
+                WebRtc_Word16 temp_len = w16_decodedLen - 6;
+                w16_tmp = temp_len / 8;
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], temp_len,
+                    pw16_decodedLB, w16_tmp,
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)12, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)12, (WebRtc_Word16)0);
+            }
+#endif
+        }
+
+        /* Calculate correlation without any normalization (40 samples) */
+        w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) inst->ExpandInst.w16_maxLag,
+            (WebRtc_Word16) (fs_mult * 2)) + 1;
+        w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
+        w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
+        if (w32_tmp > 26843546)
+        {
+            w16_tmp = 3;
+        }
+        else
+        {
+            w16_tmp = 0;
+        }
+
+        WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
+            (WebRtc_Word16) w16_stopPos, w16_tmp, 1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_stopPos, pw32_corr, w16_tmp);
+
+        /* Calculate allowed starting point for peak finding.
+         The peak location bestIndex must fulfill two criteria:
+         (1) w16_bestIndex+w16_decodedLen < inst->timestampsPerCall+inst->ExpandInst.w16_overlap
+         (2) w16_bestIndex+w16_decodedLen < w16_startPos */
+        w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
+                inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
+        /* Downscale starting index to 4kHz domain */
+        w16_tmp2 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) w16_tmp,
+            (WebRtc_Word16) (fs_mult << 1));
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE)  */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* This is master or mono instance; find peak */
+        WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
+            &w16_bestVal);
+        w16_bestIndex += w16_tmp; /* compensate for modified starting index */
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        /* Get peak location from master instance */
+        w16_bestIndex = msInfo->bestIndex;
+    }
+    else
+    {
+        /* Invalid mode */
+        return MASTER_SLAVE_ERROR;
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find peak */
+    WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
+        &w16_bestVal);
+    w16_bestIndex += w16_tmp; /* compensate for modified starting index */
+
+#endif /* NETEQ_STEREO */
+
+    /*
+     * Ensure that underrun does not occur for 10ms case => we have to get at least
+     * 10ms + overlap . (This should never happen thanks to the above modification of
+     * peak-finding starting point.)
+     * */
+    while ((w16_bestIndex + w16_decodedLen) < (inst->timestampsPerCall
+        + inst->ExpandInst.w16_overlap) || w16_bestIndex + w16_decodedLen < w16_startPos)
+    {
+        w16_bestIndex += w16_newLen; /* Jump one lag ahead */
+    }
+    pw16_decodedOut = pw16_outData + w16_bestIndex;
+
+    /* Mute the new decoded data if needed (and unmute it linearly) */
+    w16_interpLen = WEBRTC_SPL_MIN(60*fs_mult,
+        w16_expandedLen-w16_bestIndex); /* this is the overlapping part of pw16_expanded */
+    w16_interpLen = WEBRTC_SPL_MIN(w16_interpLen, w16_decodedLen);
+    w16_inc = WebRtcSpl_DivW32W16ResW16(4194,
+        fs_mult); /* in Q20, 0.004 for NB and 0.002 for WB */
+    if (inst->w16_muteFactor < 16384)
+    {
+        WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
+            (WebRtc_Word16) w16_interpLen);
+        WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
+            &pw16_decodedOut[w16_interpLen], w16_inc,
+            (WebRtc_Word16) (w16_decodedLen - w16_interpLen));
+    }
+    else
+    {
+        /* No muting needed */
+
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_decodedOut[w16_interpLen], &pw16_decoded[w16_interpLen],
+            (w16_decodedLen-w16_interpLen));
+    }
+
+    /* Do overlap and interpolate linearly */
+    w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (WebRtc_Word16) (w16_interpLen + 1)); /* Q14 */
+    w16_startfact = (16384 - w16_inc);
+    WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
+    WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
+        &w16_startfact, w16_inc, w16_interpLen);
+
+    inst->w16_mode = MODE_MERGE;
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /* New added length (w16_startPos samples were borrowed) */
+    *pw16_len = w16_bestIndex + w16_decodedLen - w16_startPos;
+
+    /* Update VQmon parameter */
+    inst->w16_concealedTS += (*pw16_len - w16_decodedLen);
+    inst->w16_concealedTS = WEBRTC_SPL_MAX(0, inst->w16_concealedTS);
+
+    /* Update in-call and post-call statistics */
+    if (inst->ExpandInst.w16_expandMuteFactor == 0)
+    {
+        /* expansion generates noise only */
+        inst->statInst.expandedNoiseSamples += (*pw16_len - w16_decodedLen);
+    }
+    else
+    {
+        /* expansion generates more than only noise */
+        inst->statInst.expandedVoiceSamples += (*pw16_len - w16_decodedLen);
+    }
+    inst->statInst.expandLength += (*pw16_len - w16_decodedLen);
+
+
+    /* Copy back the first part of the data to the speechHistory */
+
+    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition], pw16_outData, w16_startPos);
+
+
+    /* Move data to within outData */
+
+    WEBRTC_SPL_MEMMOVE_W16(pw16_outData, &pw16_outData[w16_startPos], (*pw16_len));
+
+    return 0;
+}
+
+#undef     SCRATCH_pw16_expanded
+#undef     SCRATCH_pw16_expandedLB
+#undef     SCRATCH_pw16_decodedLB
+#undef     SCRATCH_pw32_corr
+#undef     SCRATCH_pw16_corrVec
+#undef     SCRATCH_NETEQ_EXPAND
diff --git a/trunk/src/modules/audio_coding/neteq/min_distortion.c b/trunk/src/modules/audio_coding/neteq/min_distortion.c
new file mode 100644
index 0000000..4c9ee1c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/min_distortion.c
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Calculate best overlap fit according to distortion measure.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
+                                        WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
+                                        WebRtc_Word16 len, WebRtc_Word32 *pw16_dist)
+{
+    int i, j;
+    const WebRtc_Word16 *pw16_data1;
+    const WebRtc_Word16 *pw16_data2;
+    WebRtc_Word32 w32_diff;
+    WebRtc_Word32 w32_sumdiff;
+    WebRtc_Word16 bestIndex = -1;
+    WebRtc_Word32 minDist = WEBRTC_SPL_WORD32_MAX;
+
+    for (i = w16_minLag; i <= w16_maxLag; i++)
+    {
+        w32_sumdiff = 0;
+        pw16_data1 = pw16_data;
+        pw16_data2 = pw16_data - i;
+
+        for (j = 0; j < len; j++)
+        {
+            w32_diff = pw16_data1[j] - pw16_data2[j];
+            w32_sumdiff += WEBRTC_SPL_ABS_W32(w32_diff);
+        }
+
+        /* Compare with previous minimum */
+        if (w32_sumdiff < minDist)
+        {
+            minDist = w32_sumdiff;
+            bestIndex = i;
+        }
+    }
+
+    *pw16_dist = minDist;
+
+    return bestIndex;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/mix_voice_unvoice.c b/trunk/src/modules/audio_coding/neteq/mix_voice_unvoice.c
new file mode 100644
index 0000000..9895630
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mix_voice_unvoice.c
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function mixes a voiced signal with an unvoiced signal and
+ * updates the weight on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
+                                 WebRtc_Word16 *pw16_unvoicedVec,
+                                 WebRtc_Word16 *w16_current_vfraction,
+                                 WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word16 vfraction = *w16_current_vfraction;
+
+    w16_tmp2 = 16384 - vfraction;
+    for (i = 0; i < N; i++)
+    {
+        pw16_outData[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+            WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
+            WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
+            14);
+        vfraction -= w16_vfraction_change;
+        w16_tmp2 += w16_vfraction_change;
+    }
+    *w16_current_vfraction = vfraction;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/mute_signal.c b/trunk/src/modules/audio_coding/neteq/mute_signal.c
new file mode 100644
index 0000000..ee899cf
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/mute_signal.c
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function mutes a signal linearly on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
+                            WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_Word32 w32_tmp = 1048608; /* (16384<<6 + 32) */
+
+    for (i = 0; i < N; i++)
+    {
+        pw16_inout[i]
+            = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16((WebRtc_Word16)(w32_tmp>>6), pw16_inout[i])
+                + 8192) >> 14);
+        w32_tmp -= muteSlope;
+    }
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/neteq.gypi b/trunk/src/modules/audio_coding/neteq/neteq.gypi
new file mode 100644
index 0000000..c72efdd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/neteq.gypi
@@ -0,0 +1,289 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'NetEq',
+      'type': '<(library)',
+      'dependencies': [
+        'CNG',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'defines': [
+        'NETEQ_VOICEENGINE_CODECS', # TODO: Should create a Chrome define which
+        'SCRATCH',                  # specifies a subset of codecs to support.
+      ],
+      'include_dirs': [
+        'interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'interface',
+        ],
+      },
+      'sources': [
+        'interface/webrtc_neteq.h',
+        'interface/webrtc_neteq_help_macros.h',
+        'interface/webrtc_neteq_internal.h',
+        'accelerate.c',
+        'automode.c',
+        'automode.h',
+        'bgn_update.c',
+        'buffer_stats.h',
+        'bufstats_decision.c',
+        'cng_internal.c',
+        'codec_db.c',
+        'codec_db.h',
+        'codec_db_defines.h',
+        'correlator.c',
+        'delay_logging.h',
+        'dsp.c',
+        'dsp.h',
+        'dsp_helpfunctions.c',
+        'dsp_helpfunctions.h',
+        'dtmf_buffer.c',
+        'dtmf_buffer.h',
+        'dtmf_tonegen.c',
+        'dtmf_tonegen.h',
+        'expand.c',
+        'mcu.h',
+        'mcu_address_init.c',
+        'mcu_dsp_common.c',
+        'mcu_dsp_common.h',
+        'mcu_reset.c',
+        'merge.c',
+        'min_distortion.c',
+        'mix_voice_unvoice.c',
+        'mute_signal.c',
+        'neteq_defines.h',
+        'neteq_error_codes.h',
+        'neteq_statistics.h',
+        'normal.c',
+        'packet_buffer.c',
+        'packet_buffer.h',
+        'peak_detection.c',
+        'preemptive_expand.c',
+        'random_vector.c',
+        'recin.c',
+        'recout.c',
+        'rtcp.c',
+        'rtcp.h',
+        'rtp.c',
+        'rtp.h',
+        'set_fs.c',
+        'signal_mcu.c',
+        'split_and_insert.c',
+        'unmute_signal.c',
+        'webrtc_neteq.c',
+      ],
+    },
+  ], # targets
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'neteq_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'NetEq',
+            'NetEqTestTools',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'webrtc_neteq_unittest.cc',
+          ],
+        }, # neteq_unittests
+        {
+          'target_name': 'NetEqRTPplay',
+          'type': 'executable',
+          'dependencies': [
+            'NetEq',         # NetEQ library defined above
+            'NetEqTestTools',# Test helpers
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+          ],
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            '.',
+            'test',
+          ],
+          'sources': [
+            'test/NetEqRTPplay.cc',
+          ],
+        },
+       {
+          'target_name': 'RTPencode',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',# Test helpers
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+            '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+          ],
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            'interface',
+            'test',
+          ],
+          'sources': [
+            'test/RTPencode.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPjitter',
+          'type': 'executable',
+          'dependencies': [
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPjitter.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPanalyze',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPanalyze.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPchange',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+           'test/RTPchange.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPtimeshift',
+          'type': 'executable',
+          'dependencies': [
+           'NetEqTestTools',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPtimeshift.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPcat',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPcat.cc',
+          ],
+        },
+        {
+         'target_name': 'NetEqTestTools',
+          # Collection of useful functions used in other tests
+          'type': '<(library)',
+          'dependencies': [
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'interface',
+              'test',
+            ],
+          },
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            'interface',
+            'test',
+          ],
+          'sources': [
+            'test/NETEQTEST_NetEQClass.cc',
+            'test/NETEQTEST_RTPpacket.cc',
+            'test/NETEQTEST_CodecClass.cc',
+            'test/NETEQTEST_NetEQClass.h',
+            'test/NETEQTEST_RTPpacket.h',
+            'test/NETEQTEST_CodecClass.h',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_coding/neteq/neteq_defines.h b/trunk/src/modules/audio_coding/neteq/neteq_defines.h
new file mode 100644
index 0000000..318e6bb
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/neteq_defines.h
@@ -0,0 +1,356 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*****************************************************************************************
+ *
+ * Compilation flags in NetEQ:
+ *
+ *****************************************************************************************
+ *
+ ***** Platform flags ******
+ *
+ * SCRATCH                        Run NetEQ with "Scratch memory" to save some stack memory.
+ *                                Definition can be used on all platforms
+ *
+ ***** Summary flags ******
+ *
+ * NETEQ_ALL_SPECIAL_CODECS       Add support for special codecs (CN/RED/DTMF)
+ *
+ * NETEQ_ALL_NB_CODECS            Add support for all NB codecs (except CN/RED/DTMF)
+ *
+ * NETEQ_ALL_WB_CODECS            Add support for all WB codecs (except CN/RED/DTMF)
+ *
+ * NETEQ_VOICEENGINE_CODECS       Support for all NB, WB and SWB32 codecs and CN, RED and DTMF
+ *
+ * NETEQ_ALL_CODECS               Support for all NB, WB, SWB 32kHz and SWB 48kHz as well as
+ *                                CN, RED and DTMF
+ *
+ ***** Sampling frequency ****** 
+ * (Note: usually not needed when Summary flags are used)
+ *
+ * NETEQ_WIDEBAND                 Wideband enabled
+ *
+ * NETEQ_32KHZ_WIDEBAND           Super wideband @ 32kHz enabled
+ *
+ * NETEQ_48KHZ_WIDEBAND           Super wideband @ 48kHz enabled
+ *
+ ***** Special Codec ****** 
+ * (Note: not needed if NETEQ_ALL_CODECS is used)
+ *
+ * NETEQ_RED_CODEC                With this flag you enable NetEQ to understand redundancy in
+ *                                the RTP. NetEQ will use the redundancy if it's the same
+ *                                codec
+ *
+ * NETEQ_CNG_CODEC                Enable DTX with the CN payload
+ *
+ * NETEQ_ATEVENT_DECODE           Enable AVT event and play out the corresponding DTMF tone
+ *
+ ***** Speech Codecs *****
+ * (Note: Not needed if Summary flags are used)
+ *
+ * NETEQ_G711_CODEC               Enable G.711 u- and A-law
+ *
+ * NETEQ_PCM16B_CODEC             Enable uncompressed 16-bit
+ *
+ * NETEQ_ILBC_CODEC               Enable iLBC
+ *
+ * NETEQ_ISAC_CODEC               Enable iSAC
+ *
+ * NETEQ_ISAC_SWB_CODEC           Enable iSAC-SWB
+ *
+ * NETEQ_G722_CODEC               Enable G.722
+ *
+ * NETEQ_G729_CODEC               Enable G.729
+ *
+ * NETEQ_G729_1_CODEC             Enable G.729.1
+ *
+ * NETEQ_G726_CODEC               Enable G.726
+ *
+ * NETEQ_G722_1_CODEC             Enable G722.1
+ *
+ * NETEQ_G722_1C_CODEC            Enable G722.1 Annex C
+ *
+ * NETEQ_SPEEX_CODEC              Enable Speex (at 8 and 16 kHz sample rate)
+ *
+ * NETEQ_CELT_CODEC               Enable Celt (at 32 kHz sample rate)
+ *
+ * NETEQ_GSMFR_CODEC              Enable GSM-FR
+ *
+ * NETEQ_AMR_CODEC                Enable AMR (narrowband)
+ *
+ * NETEQ_AMRWB_CODEC              Enable AMR-WB
+ *
+ * NETEQ_CNG_CODEC                Enable DTX with the CNG payload
+ *
+ * NETEQ_ATEVENT_DECODE           Enable AVT event and play out the corresponding DTMF tone
+ *
+ ***** Test flags ******
+ *
+ * WEBRTC_NETEQ_40BITACC_TEST     Run NetEQ with simulated 40-bit accumulator to run
+ *                                bit-exact to a DSP implementation where the main (splib
+ *                                and NetEQ) functions have been 40-bit optimized
+ *
+ *****************************************************************************************
+ */
+
+#if !defined NETEQ_DEFINES_H
+#define NETEQ_DEFINES_H
+
+/* Data block structure for MCU to DSP communication:
+ *
+ *
+ *  First 3 16-bit words are pre-header that contains instructions and timestamp update
+ *  Fourth 16-bit word is length of data block 1
+ *  Rest is payload data
+ *
+ *  0               48          64          80
+ *  -------------...----------------------------------------------------------------------
+ *  |  PreHeader ... | Length 1 |  Payload data 1 ...... | Lenght 2| Data block 2....    | ...
+ *  -------------...----------------------------------------------------------------------
+ *
+ *
+ *  Preheader:
+ *  4 MSB can be either of:
+ */
+
+#define DSP_INSTR_NORMAL                         0x1000
+/* Payload data will contain the encoded frames */
+
+#define DSP_INSTR_MERGE                          0x2000
+/* Payload data block 1 will contain the encoded frame */
+/* Info block will contain the number of missing samples */
+
+#define DSP_INSTR_EXPAND                         0x3000
+/* Payload data will be empty */
+
+#define DSP_INSTR_ACCELERATE                     0x4000
+/* Payload data will contain the encoded frame */
+
+#define DSP_INSTR_DO_RFC3389CNG                  0x5000
+/* Payload data will contain the SID frame if there is one*/
+
+#define DSP_INSTR_DTMF_GENERATE                  0x6000
+/* Payload data will be one WebRtc_Word16 with the current DTMF value and one
+ * WebRtc_Word16 with the current volume value
+ */
+#define DSP_INSTR_NORMAL_ONE_DESC                0x7000
+/* No encoded frames */
+
+#define DSP_INSTR_DO_CODEC_INTERNAL_CNG          0x8000
+/* Codec has a built-in VAD/DTX scheme (use the above for "no transmission") */
+
+#define DSP_INSTR_PREEMPTIVE_EXPAND              0x9000
+/* Payload data will contain the encoded frames, if any */
+
+#define DSP_INSTR_DO_ALTERNATIVE_PLC             0xB000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS      0xC000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_AUDIO_REPETITION            0xD000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_AUDIO_REPETITION_INC_TS     0xE000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_FADE_TO_BGN                    0xF000
+/* Exception handling: fade out to BGN (expand) */
+
+/*
+ * Next 4 bits signal additional data that needs to be transmitted
+ */
+
+#define DSP_CODEC_NO_CHANGE                      0x0100
+#define DSP_CODEC_NEW_CODEC                      0x0200
+#define DSP_CODEC_ADD_LATE_PKT                   0x0300
+#define DSP_CODEC_RESET                          0x0400
+#define DSP_DTMF_PAYLOAD                         0x0010
+
+/*
+ * The most significant bit of the payload-length
+ * is used to flag whether the associated payload
+ * is redundant payload. This currently useful only for
+ * iSAC, where redundant payloads have to be treated 
+ * differently. Every time the length is read it must be
+ * masked by DSP_CODEC_MASK_RED_FLAG to ignore the flag.
+ * Use DSP_CODEC_RED_FLAG to set or retrieve the flag.
+ */
+#define DSP_CODEC_MASK_RED_FLAG                  0x7FFF
+#define DSP_CODEC_RED_FLAG                       0x8000
+
+/*
+ * The first block of payload data consist of decode function pointers,
+ * and then the speech blocks.
+ *
+ */
+
+
+/*
+ * The playout modes that NetEq produced (i.e. gives more info about if the 
+ * Accelerate was successful or not)
+ */
+
+#define MODE_NORMAL                    0x0000
+#define MODE_EXPAND                    0x0001
+#define MODE_MERGE                     0x0002
+#define MODE_SUCCESS_ACCELERATE        0x0003
+#define MODE_UNSUCCESS_ACCELERATE      0x0004
+#define MODE_RFC3389CNG                0x0005
+#define MODE_LOWEN_ACCELERATE          0x0006
+#define MODE_DTMF                      0x0007
+#define MODE_ONE_DESCRIPTOR            0x0008
+#define MODE_CODEC_INTERNAL_CNG        0x0009
+#define MODE_SUCCESS_PREEMPTIVE        0x000A
+#define MODE_UNSUCCESS_PREEMPTIVE      0x000B
+#define MODE_LOWEN_PREEMPTIVE          0x000C
+#define MODE_FADE_TO_BGN               0x000D
+
+#define MODE_ERROR                     0x0010
+
+#define MODE_AWAITING_CODEC_PTR        0x0100
+
+#define MODE_BGN_ONLY                  0x0200
+
+#define MODE_MASTER_DTMF_SIGNAL        0x0400
+
+#define MODE_USING_STEREO              0x0800
+
+
+
+/***********************/
+/* Group codec defines */
+/***********************/
+
+#if (defined(NETEQ_ALL_SPECIAL_CODECS))
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+#endif
+
+#if (defined(NETEQ_ALL_NB_CODECS))        /* Except RED, DTMF and CNG */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_G726_CODEC
+    #define NETEQ_GSMFR_CODEC
+    #define NETEQ_AMR_CODEC
+#endif
+
+#if (defined(NETEQ_ALL_WB_CODECS))        /* Except RED, DTMF and CNG */
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_SPEEX_CODEC
+    #define NETEQ_AMRWB_CODEC
+    #define NETEQ_WIDEBAND
+#endif
+
+#if (defined(NETEQ_ALL_WB32_CODECS))        /* AAC, RED, DTMF and CNG */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+#endif
+
+#if (defined(NETEQ_VOICEENGINE_CODECS))
+    /* Special codecs */
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+
+    /* Narrowband codecs */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_AMR_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_GSMFR_CODEC
+
+    /* Wideband codecs */
+    #define NETEQ_WIDEBAND
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_AMRWB_CODEC
+    #define NETEQ_SPEEX_CODEC
+
+    /* Super wideband 32kHz codecs */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+
+#endif 
+
+#if (defined(NETEQ_ALL_CODECS))
+    /* Special codecs */
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+
+    /* Narrowband codecs */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_G726_CODEC
+    #define NETEQ_GSMFR_CODEC
+    #define NETEQ_AMR_CODEC
+
+    /* Wideband codecs */
+    #define NETEQ_WIDEBAND
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_SPEEX_CODEC
+    #define NETEQ_AMRWB_CODEC
+
+    /* Super wideband 32kHz codecs */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+
+    /* Super wideband 48kHz codecs */
+    #define NETEQ_48KHZ_WIDEBAND
+#endif
+
+/* Max output size from decoding one frame */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     2880    /* 60 ms super wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    3600    /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     1920    /* 60 ms super wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    2400    /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
+#elif defined(NETEQ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     960        /* 60 ms wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    1200    /* 60+15 ms wideband (60 ms decoded + 10 ms for merge overlap) */
+#else
+    #define NETEQ_MAX_FRAME_SIZE     480        /* 60 ms narrowband */
+    #define NETEQ_MAX_OUTPUT_SIZE    600        /* 60+15 ms narrowband (60 ms decoded + 10 ms for merge overlap) */
+#endif
+
+
+/* Enable stereo */
+#define NETEQ_STEREO
+
+#endif /* #if !defined NETEQ_DEFINES_H */
+
diff --git a/trunk/src/modules/audio_coding/neteq/neteq_error_codes.h b/trunk/src/modules/audio_coding/neteq/neteq_error_codes.h
new file mode 100644
index 0000000..1ce4680
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/neteq_error_codes.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Definition of error codes.
+ *
+ * NOTE: When modifying the error codes,
+ * also modify the function WebRtcNetEQ_GetErrorCode!
+ */
+
+#if !defined NETEQ_ERROR_CODES_H
+#define NETEQ_ERROR_CODES_H
+
+/* Misc Error */
+#define NETEQ_OTHER_ERROR               -1000
+
+/* Misc Recout Errors */
+#define FAULTY_INSTRUCTION              -1001
+#define FAULTY_NETWORK_TYPE             -1002
+#define FAULTY_DELAYVALUE               -1003
+#define FAULTY_PLAYOUTMODE              -1004
+#define CORRUPT_INSTANCE                -1005
+#define ILLEGAL_MASTER_SLAVE_SWITCH     -1006
+#define MASTER_SLAVE_ERROR              -1007
+
+/* Misc Recout problems */
+#define UNKNOWN_BUFSTAT_DECISION        -2001
+#define RECOUT_ERROR_DECODING           -2002
+#define RECOUT_ERROR_SAMPLEUNDERRUN     -2003
+#define RECOUT_ERROR_DECODED_TOO_MUCH   -2004
+
+/* Misc RecIn problems */
+#define RECIN_CNG_ERROR                 -3001
+#define RECIN_UNKNOWNPAYLOAD            -3002
+#define RECIN_BUFFERINSERT_ERROR        -3003
+
+/* PBUFFER/BUFSTAT ERRORS */
+#define PBUFFER_INIT_ERROR              -4001
+#define PBUFFER_INSERT_ERROR1           -4002
+#define PBUFFER_INSERT_ERROR2           -4003
+#define PBUFFER_INSERT_ERROR3           -4004
+#define PBUFFER_INSERT_ERROR4           -4005
+#define PBUFFER_INSERT_ERROR5           -4006
+#define UNKNOWN_G723_HEADER             -4007
+#define PBUFFER_NONEXISTING_PACKET      -4008
+#define PBUFFER_NOT_INITIALIZED         -4009
+#define AMBIGUOUS_ILBC_FRAME_SIZE       -4010
+
+/* CODEC DATABASE ERRORS */
+#define CODEC_DB_FULL                   -5001
+#define CODEC_DB_NOT_EXIST1             -5002
+#define CODEC_DB_NOT_EXIST2             -5003
+#define CODEC_DB_NOT_EXIST3             -5004
+#define CODEC_DB_NOT_EXIST4             -5005
+#define CODEC_DB_UNKNOWN_CODEC          -5006
+#define CODEC_DB_PAYLOAD_TAKEN          -5007
+#define CODEC_DB_UNSUPPORTED_CODEC      -5008
+#define CODEC_DB_UNSUPPORTED_FS         -5009
+
+/* DTMF ERRORS */
+#define DTMF_DEC_PARAMETER_ERROR        -6001
+#define DTMF_INSERT_ERROR               -6002
+#define DTMF_GEN_UNKNOWN_SAMP_FREQ      -6003
+#define DTMF_NOT_SUPPORTED              -6004
+
+/* RTP/PACKET ERRORS */
+#define RED_SPLIT_ERROR1                -7001
+#define RED_SPLIT_ERROR2                -7002
+#define RTP_TOO_SHORT_PACKET            -7003
+#define RTP_CORRUPT_PACKET              -7004
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/neteq_statistics.h b/trunk/src/modules/audio_coding/neteq/neteq_statistics.h
new file mode 100644
index 0000000..d07f330
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/neteq_statistics.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Definitions of statistics data structures for MCU and DSP sides.
+ */
+
+#include "typedefs.h"
+
+#ifndef NETEQ_STATISTICS_H
+#define NETEQ_STATISTICS_H
+
+/*
+ * Statistics struct on DSP side
+ */
+typedef struct
+{
+
+    /* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
+    WebRtc_UWord32 expandLength; /* number of samples produced through expand */
+    WebRtc_UWord32 preemptiveLength; /* number of samples produced through pre-emptive
+     expand */
+    WebRtc_UWord32 accelerateLength; /* number of samples removed through accelerate */
+
+    /* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
+    WebRtc_UWord32 expandedVoiceSamples; /* number of voice samples produced through expand */
+    WebRtc_UWord32 expandedNoiseSamples; /* number of noise (background) samples produced
+     through expand */
+
+} DSPStats_t;
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/neteq/normal.c b/trunk/src/modules/audio_coding/neteq/normal.c
new file mode 100644
index 0000000..b33940a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/normal.c
@@ -0,0 +1,279 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for handling "normal" speech operation.
+ */
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage:
+
+ Type           Name                    size            startpos        endpos
+ WebRtc_Word16  pw16_expanded           125*fs/8000     0               125*fs/8000-1
+
+ func           WebRtcNetEQ_Expand      40+370*fs/8000  125*fs/8000     39+495*fs/8000
+
+ Total:  40+495*fs/8000
+ */
+
+#define     SCRATCH_PW16_EXPANDED           0
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    252
+#else    /* NB */
+#define     SCRATCH_NETEQ_EXPAND    126
+#endif
+
+/****************************************************************************
+ * WebRtcNetEQ_Normal(...)
+ *
+ * This function has the possibility to modify data that is played out in Normal
+ * mode, for example adjust the gain of the signal. The length of the signal 
+ * can not be changed.
+ *
+ * Input:
+ *      - inst          : NetEq instance, i.e. the user that requests more
+ *                        speech/audio data
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - decoded       : Pointer to vector of new data from decoder
+ *                        (Vector contents may be altered by the function)
+ *      - len           : Number of input samples
+ *
+ * Output:
+ *      - inst          : Updated user information
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - pw16_len      : Pointer to variable where the number of samples
+ *                        produced will be written
+ *
+ * Return value         : >=0 - Number of samples written to outData
+ *                         -1 - Error
+ */
+
+int WebRtcNetEQ_Normal(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+{
+
+    int i;
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 fs_shift;
+    WebRtc_Word32 w32_En_speech;
+    WebRtc_Word16 enLen;
+    WebRtc_Word16 w16_muted;
+    WebRtc_Word16 w16_inc, w16_frac;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word32 w32_tmp;
+
+    /* Sanity check */
+    if (len < 0)
+    {
+        /* Cannot have negative length of input vector */
+        return (-1);
+    }
+
+    if (len == 0)
+    {
+        /* Still got some data to play => continue with the same mode */
+        *pw16_len = len;
+        return (len);
+    }
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+    fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
+
+    /*
+     * Check if last RecOut call resulted in an Expand or a FadeToBGN. If so, we have to take
+     * care of some cross-fading and unmuting.
+     */
+    if (inst->w16_mode == MODE_EXPAND || inst->w16_mode == MODE_FADE_TO_BGN)
+    {
+
+        /* Define memory where temporary result from Expand algorithm can be stored. */
+#ifdef SCRATCH
+        WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
+#else
+        WebRtc_Word16 pw16_expanded[FSMULT * 125];
+#endif
+        WebRtc_Word16 expandedLen = 0;
+        WebRtc_Word16 w16_decodedMax;
+
+        /* Find largest value in new data */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* Generate interpolation data using Expand */
+        /* First, set Expand parameters to appropriate values. */
+        inst->ExpandInst.w16_lagsPosition = 0;
+        inst->ExpandInst.w16_lagsDirection = 0;
+        inst->ExpandInst.w16_stopMuting = 1; /* Do not mute signal any more */
+
+        /* Call Expand */
+        WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+            pw16_expanded, &expandedLen, (WebRtc_Word16) (inst->w16_mode == MODE_FADE_TO_BGN));
+
+        inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
+        inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
+
+        /* Adjust muting factor (main muting factor times expand muting factor) */
+        if (inst->w16_mode == MODE_FADE_TO_BGN)
+        {
+            /* If last mode was FadeToBGN, the mute factor should be zero. */
+            inst->w16_muteFactor = 0;
+        }
+        else
+        {
+            /* w16_muteFactor * w16_expandMuteFactor */
+            inst->w16_muteFactor
+                = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+                    inst->ExpandInst.w16_expandMuteFactor, 14);
+        }
+
+        /* Adjust muting factor if needed (to BGN level) */
+        enLen = WEBRTC_SPL_MIN(fs_mult<<6, len); /* min( fs_mult * 64, len ) */
+        w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(
+            WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
+        w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
+        w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (WebRtc_Word16) (enLen >> w16_tmp));
+
+        if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
+        {
+            /* Normalize new frame energy to 15 bits */
+            w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
+            /* we want inst->BGNInst.energy/En_speech in Q14 */
+            w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
+            w16_tmp = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
+            w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
+            w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+                WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) w16_tmp,
+                    14)); /* w16_muted in Q14 (sqrt(Q28)) */
+        }
+        else
+        {
+            w16_muted = 16384; /* 1.0 in Q14 */
+        }
+        if (w16_muted > inst->w16_muteFactor)
+        {
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
+        }
+
+        /* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14) */
+        w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
+        for (i = 0; i < len; i++)
+        {
+            /* scale with mute factor */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
+            /* shift 14 with proper rounding */
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+            /* increase mute_factor towards 16384 */
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
+        }
+
+        /*
+         * Interpolate the expanded data into the new vector
+         * (NB/WB/SWB32/SWB40 8/16/32/32 samples)
+         */
+        fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
+        w16_inc = 4 >> fs_shift;
+        w16_frac = w16_inc;
+        for (i = 0; i < 8 * fs_mult; i++)
+        {
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
+                    WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
+                5);
+            w16_frac += w16_inc;
+        }
+
+#ifdef NETEQ_CNG_CODEC
+    }
+    else if (inst->w16_mode==MODE_RFC3389CNG)
+    { /* previous was RFC 3389 CNG...*/
+        WebRtc_Word16 pw16_CngInterp[32];
+        /* Reset mute factor and start up fresh */
+        inst->w16_muteFactor = 16384;
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            /* Generate long enough for 32kHz */
+            if(WebRtcCng_Generate(inst->CNG_Codec_inst,pw16_CngInterp, 32, 0)<0)
+            {
+                /* error returned; set return vector to all zeros */
+                WebRtcSpl_MemSetW16(pw16_CngInterp, 0, 32);
+            }
+        }
+        else
+        {
+            /*
+             * If no CNG instance is defined, just copy from the decoded data.
+             * (This will result in interpolating the decoded with itself.)
+             */
+            WEBRTC_SPL_MEMCPY_W16(pw16_CngInterp, pw16_decoded, fs_mult * 8);
+        }
+        /*
+         * Interpolate the CNG into the new vector
+         * (NB/WB/SWB32kHz/SWB48kHz 8/16/32/32 samples)
+         */
+        fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
+        w16_inc = 4>>fs_shift;
+        w16_frac = w16_inc;
+        for (i = 0; i < 8 * fs_mult; i++)
+        {
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
+                    WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
+                5);
+            w16_frac += w16_inc;
+        }
+#endif
+
+    }
+    else if (inst->w16_muteFactor < 16384)
+    {
+        /*
+         * Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are still
+         * ramping up from previous muting.
+         * If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14)
+         */
+        w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
+        for (i = 0; i < len; i++)
+        {
+            /* scale with mute factor */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
+            /* shift 14 with proper rounding */
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+            /* increase mute_factor towards 16384 */
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
+        }
+    }
+
+    /* Copy data to other buffer */WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
+
+    inst->w16_mode = MODE_NORMAL;
+    *pw16_len = len;
+    return (len);
+
+}
+
+#undef SCRATCH_PW16_EXPANDED
+#undef SCRATCH_NETEQ_EXPAND
+
diff --git a/trunk/src/modules/audio_coding/neteq/packet_buffer.c b/trunk/src/modules/audio_coding/neteq/packet_buffer.c
new file mode 100644
index 0000000..8b9073c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/packet_buffer.c
@@ -0,0 +1,735 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the actual packet buffer data structure.
+ */
+
+#include "packet_buffer.h"
+
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+/* special code for offline delay logging */
+#include "delay_logging.h"
+#include <stdio.h>
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+extern WebRtc_UWord32 tot_received_packets;
+#endif /* NETEQ_DELAY_LOGGING */
+
+
+int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
+                                 WebRtc_Word16 *pw16_memory, int memorySize)
+{
+    int i;
+    int pos = 0;
+
+    /* Sanity check */
+    if ((memorySize < PBUFFER_MIN_MEMORY_SIZE) || (pw16_memory == NULL)
+        || (maxNoOfPackets < 2) || (maxNoOfPackets > 600))
+    {
+        /* Invalid parameters */
+        return (PBUFFER_INIT_ERROR);
+    }
+
+    /* Clear the buffer instance */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) bufferInst, 0,
+        sizeof(PacketBuf_t) / sizeof(WebRtc_Word16));
+
+    /* Clear the buffer memory */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) pw16_memory, 0, memorySize);
+
+    /* Set maximum number of packets */
+    bufferInst->maxInsertPositions = maxNoOfPackets;
+
+    /* Initialize array pointers */
+    /* After each pointer has been set, the index pos is advanced to point immediately
+     * after the the recently allocated vector. Note that one step for the pos index
+     * corresponds to a WebRtc_Word16.
+     */
+
+    bufferInst->timeStamp = (WebRtc_UWord32*) &pw16_memory[pos];
+    pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * WebRtc_UWord32 */
+
+    bufferInst->payloadLocation = (WebRtc_Word16**) &pw16_memory[pos];
+    pos += maxNoOfPackets * (sizeof(WebRtc_Word16*) / sizeof(WebRtc_Word16)); /* advance */
+
+    bufferInst->seqNumber = (WebRtc_UWord16*) &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_UWord16 */
+
+    bufferInst->payloadType = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->payloadLengthBytes = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->rcuPlCntr = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->waitingTime = (int*) (&pw16_memory[pos]);
+    /* Advance maxNoOfPackets * sizeof(waitingTime element). */
+    pos += maxNoOfPackets *
+        sizeof(*bufferInst->waitingTime) / sizeof(*pw16_memory);
+
+    /* The payload memory starts after the slot arrays */
+    bufferInst->startPayloadMemory = &pw16_memory[pos];
+    bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+    bufferInst->memorySizeW16 = (memorySize - pos); /* Remaining memory */
+
+    /* Initialize each payload slot as empty with infinite delay */
+    for (i = 0; i < bufferInst->maxInsertPositions; i++)
+    {
+        bufferInst->payloadType[i] = -1;
+    }
+
+    /* Reset buffer parameters */
+    bufferInst->numPacketsInBuffer = 0;
+    bufferInst->packSizeSamples = 0;
+    bufferInst->insertPosition = 0;
+
+    /* Reset buffer statistics */
+    bufferInst->discardedPackets = 0;
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst)
+{
+    int i;
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* Packet buffer has not been initialized */
+        /* Don't do the flushing operation, since we do not
+         know the state of the struct variables */
+        return (0);
+    }
+
+    /* Set all payload lengths to zero */
+    WebRtcSpl_MemSetW16(bufferInst->payloadLengthBytes, 0, bufferInst->maxInsertPositions);
+
+    /* Reset buffer variables */
+    bufferInst->numPacketsInBuffer = 0;
+    bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+    bufferInst->insertPosition = 0;
+
+    /* Clear all slots, starting with the last one */
+    for (i = (bufferInst->maxInsertPositions - 1); i >= 0; i--)
+    {
+        bufferInst->payloadType[i] = -1;
+        bufferInst->timeStamp[i] = 0;
+        bufferInst->seqNumber[i] = 0;
+    }
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
+                                   WebRtc_Word16 *flushed)
+{
+    int nextPos;
+    int i;
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    int temp_var;
+#endif /* NETEQ_DELAY_LOGGING */
+
+    /* Initialize to "no flush" */
+    *flushed = 0;
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* packet buffer has not been initialized */
+        return (-1);
+    }
+
+    /* Sanity check for payload length
+     (payloadLen in bytes and memory size in WebRtc_Word16) */
+    if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
+        <= 0))
+    {
+        /* faulty or too long payload length */
+        return (-1);
+    }
+
+    /* Find a position in the buffer for this packet */
+    if (bufferInst->numPacketsInBuffer != 0)
+    {
+        /* Get the next slot */
+        bufferInst->insertPosition++;
+        if (bufferInst->insertPosition >= bufferInst->maxInsertPositions)
+        {
+            /* "Wrap around" and start from the beginning */
+            bufferInst->insertPosition = 0;
+        }
+
+        /* Check if there is enough space for the new packet */
+        if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
+            >= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
+        {
+            WebRtc_Word16 *tempMemAddress;
+
+            /*
+             * Payload does not fit at the end of the memory, put it in the beginning
+             * instead
+             */
+            bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+
+            /*
+             * Now, we must search for the next non-empty payload,
+             * finding the one with the lowest start address for the payload
+             */
+            tempMemAddress = &bufferInst->startPayloadMemory[bufferInst->memorySizeW16];
+            nextPos = -1;
+
+            /* Loop through all slots again */
+            for (i = 0; i < bufferInst->maxInsertPositions; i++)
+            {
+                /* Look for the non-empty slot with the lowest
+                 payload location address */
+                if (bufferInst->payloadLengthBytes[i] != 0 && bufferInst->payloadLocation[i]
+                    < tempMemAddress)
+                {
+                    tempMemAddress = bufferInst->payloadLocation[i];
+                    nextPos = i;
+                }
+            }
+
+            /* Check that we did find a previous payload */
+            if (nextPos == -1)
+            {
+                /* The buffer is corrupt => flush and return error */
+                WebRtcNetEQ_PacketBufferFlush(bufferInst);
+                *flushed = 1;
+                return (-1);
+            }
+        }
+        else
+        {
+            /* Payload fits at the end of memory. */
+
+            /* Find the next non-empty slot. */
+            nextPos = bufferInst->insertPosition + 1;
+
+            /* Increase nextPos until a non-empty slot is found or end of array is encountered*/
+            while ((bufferInst->payloadLengthBytes[nextPos] == 0) && (nextPos
+                < bufferInst->maxInsertPositions))
+            {
+                nextPos++;
+            }
+
+            if (nextPos == bufferInst->maxInsertPositions)
+            {
+                /*
+                 * Reached the end of the array, so there must be a packet in the first
+                 * position instead
+                 */
+                nextPos = 0;
+
+                /* Increase nextPos until a non-empty slot is found */
+                while (bufferInst->payloadLengthBytes[nextPos] == 0)
+                {
+                    nextPos++;
+                }
+            }
+        } /* end if-else */
+
+        /*
+         * Check if the new payload will extend into a payload later in memory.
+         * If so, the buffer is full.
+         */
+        if ((bufferInst->currentMemoryPos <= bufferInst->payloadLocation[nextPos])
+            && ((&bufferInst->currentMemoryPos[(RTPpacket->payloadLen + 1) >> 1])
+                > bufferInst->payloadLocation[nextPos]))
+        {
+            /* Buffer is full, so the buffer must be flushed */
+            WebRtcNetEQ_PacketBufferFlush(bufferInst);
+            *flushed = 1;
+        }
+
+        if (bufferInst->payloadLengthBytes[bufferInst->insertPosition] != 0)
+        {
+            /* All positions are already taken and entire buffer should be flushed */
+            WebRtcNetEQ_PacketBufferFlush(bufferInst);
+            *flushed = 1;
+        }
+
+    }
+    else
+    {
+        /* Buffer is empty, just insert the packet at the beginning */
+        bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+        bufferInst->insertPosition = 0;
+    }
+
+    /* Insert packet in the found position */
+    if (RTPpacket->starts_byte1 == 0)
+    {
+        /* Payload is 16-bit aligned => just copy it */
+
+        WEBRTC_SPL_MEMCPY_W16(bufferInst->currentMemoryPos,
+            RTPpacket->payload, (RTPpacket->payloadLen + 1) >> 1);
+    }
+    else
+    {
+        /* Payload is not 16-bit aligned => align it during copy operation */
+        for (i = 0; i < RTPpacket->payloadLen; i++)
+        {
+            /* copy the (i+1)-th byte to the i-th byte */
+
+            WEBRTC_SPL_SET_BYTE(bufferInst->currentMemoryPos,
+                (WEBRTC_SPL_GET_BYTE(RTPpacket->payload, (i + 1))), i);
+        }
+    }
+
+    /* Copy the packet information */
+    bufferInst->payloadLocation[bufferInst->insertPosition] = bufferInst->currentMemoryPos;
+    bufferInst->payloadLengthBytes[bufferInst->insertPosition] = RTPpacket->payloadLen;
+    bufferInst->payloadType[bufferInst->insertPosition] = RTPpacket->payloadType;
+    bufferInst->seqNumber[bufferInst->insertPosition] = RTPpacket->seqNumber;
+    bufferInst->timeStamp[bufferInst->insertPosition] = RTPpacket->timeStamp;
+    bufferInst->rcuPlCntr[bufferInst->insertPosition] = RTPpacket->rcuPlCntr;
+    bufferInst->rcuPlCntr[bufferInst->insertPosition] = 0;
+    bufferInst->waitingTime[bufferInst->insertPosition] = 0;
+    /* Update buffer parameters */
+    bufferInst->numPacketsInBuffer++;
+    bufferInst->currentMemoryPos += (RTPpacket->payloadLen + 1) >> 1;
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    if (*flushed)
+    {
+        temp_var = NETEQ_DELAY_LOGGING_SIGNAL_FLUSH;
+        fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
+    }
+    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
+    fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
+    fwrite( &RTPpacket->timeStamp, sizeof(WebRtc_UWord32), 1, delay_fid2 );
+    fwrite( &RTPpacket->seqNumber, sizeof(WebRtc_UWord16), 1, delay_fid2 );
+    fwrite( &RTPpacket->payloadType, sizeof(int), 1, delay_fid2 );
+    fwrite( &RTPpacket->payloadLen, sizeof(WebRtc_Word16), 1, delay_fid2 );
+    tot_received_packets++;
+#endif /* NETEQ_DELAY_LOGGING */
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
+                                    int bufferPosition, int *waitingTime)
+{
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* packet buffer has not been initialized */
+        return (PBUFFER_NOT_INITIALIZED);
+    }
+
+    if (bufferPosition < 0 || bufferPosition >= bufferInst->maxInsertPositions)
+    {
+        /* buffer position is outside valid range */
+        return (NETEQ_OTHER_ERROR);
+    }
+
+    /* Check that there is a valid payload in the specified position */
+    if (bufferInst->payloadLengthBytes[bufferPosition] <= 0)
+    {
+        /* The position does not contain a valid payload */
+        RTPpacket->payloadLen = 0; /* Set zero length */
+        return (PBUFFER_NONEXISTING_PACKET); /* Return error */
+    }
+
+    /* Payload exists => extract payload data */
+
+    /* Copy the actual data payload to RTP packet struct */
+
+    WEBRTC_SPL_MEMCPY_W16((WebRtc_Word16*) RTPpacket->payload,
+        bufferInst->payloadLocation[bufferPosition],
+        (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in WebRtc_Word16*/
+
+    /* Copy payload parameters */
+    RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
+    RTPpacket->payloadType = bufferInst->payloadType[bufferPosition];
+    RTPpacket->seqNumber = bufferInst->seqNumber[bufferPosition];
+    RTPpacket->timeStamp = bufferInst->timeStamp[bufferPosition];
+    RTPpacket->rcuPlCntr = bufferInst->rcuPlCntr[bufferPosition];
+    *waitingTime = bufferInst->waitingTime[bufferPosition];
+    RTPpacket->starts_byte1 = 0; /* payload is 16-bit aligned */
+
+    /* Clear the position in the packet buffer */
+    bufferInst->payloadType[bufferPosition] = -1;
+    bufferInst->payloadLengthBytes[bufferPosition] = 0;
+    bufferInst->seqNumber[bufferPosition] = 0;
+    bufferInst->timeStamp[bufferPosition] = 0;
+    bufferInst->waitingTime[bufferPosition] = 0;
+    bufferInst->payloadLocation[bufferPosition] = bufferInst->startPayloadMemory;
+
+    /* Reduce packet counter with one */
+    bufferInst->numPacketsInBuffer--;
+
+    return (0);
+}
+
+int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
+                                                uint32_t current_time_stamp,
+                                                uint32_t* time_stamp,
+                                                int* buffer_position,
+                                                int erase_old_packets,
+                                                int16_t* payload_type) {
+  int32_t time_stamp_diff = WEBRTC_SPL_WORD32_MAX;  /* Smallest diff found. */
+  int32_t new_diff;
+  int i;
+  int16_t rcu_payload_cntr;
+
+  if (buffer_inst->startPayloadMemory == NULL) {
+    /* Packet buffer has not been initialized. */
+    return PBUFFER_NOT_INITIALIZED;
+  }
+
+  /* Initialize all return values. */
+  *time_stamp = 0;
+  *payload_type = -1;  /* Indicates that no packet was found. */
+  *buffer_position = -1;  /* Indicates that no packet was found. */
+  rcu_payload_cntr = WEBRTC_SPL_WORD16_MAX;  /* Indicates no packet found. */
+
+  /* Check if buffer is empty. */
+  if (buffer_inst->numPacketsInBuffer <= 0) {
+    return 0;
+  }
+
+  /* Loop through all slots in buffer. */
+  if (erase_old_packets) {  /* If old payloads should be discarded. */
+    for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+      /* Calculate difference between this slot and current_time_stamp. */
+      new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
+
+      /* Check if payload should be discarded. */
+      if ((new_diff < 0)  /* Payload is too old */
+          && (new_diff > -30000)  /* Account for TS wrap-around. */
+          && (buffer_inst->payloadLengthBytes[i] > 0)) {  /* Payload exists. */
+        /* Throw away old packet. */
+
+        /* Clear the position in the buffer. */
+        buffer_inst->payloadType[i] = -1;
+        buffer_inst->payloadLengthBytes[i] = 0;
+
+        /* Reduce packet counter by one. */
+        buffer_inst->numPacketsInBuffer--;
+        /* Increase discard counter for in-call statistics. */
+        buffer_inst->discardedPackets++;
+      } else if (((new_diff < time_stamp_diff) 
+                  || ((new_diff == time_stamp_diff)
+                      && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
+                      && (buffer_inst->payloadLengthBytes[i] > 0)) {
+        /* New diff is smaller than previous diffs or we have a candidate with a
+         * time stamp as previous candidate but better RCU-counter; 
+         * and the payload exists. 
+         */ 
+        /* Save this position as the best candidate. */
+        *buffer_position = i;
+        time_stamp_diff = new_diff;
+        *payload_type = buffer_inst->payloadType[i];
+        rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
+      }
+    }
+  } else {
+    for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+      /* Calculate difference between this slot and current_time_stamp. */
+      new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
+
+      /* Check if this is the oldest packet. */
+      if (((new_diff < time_stamp_diff) 
+           || ((new_diff == time_stamp_diff)
+               && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
+               && (buffer_inst->payloadLengthBytes[i] > 0)) {
+        /* New diff is smaller than previous diffs or we have a candidate with a
+         * time_stamp as previous candidate but better RCU-counter; 
+         * and the payload exists. 
+         */ 
+        /* Save this position as the best candidate. */
+        *buffer_position = i;
+        time_stamp_diff = new_diff;
+        *payload_type = buffer_inst->payloadType[i];
+        rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
+      }
+    }
+  }
+
+  /* Check that we did find a real position. */
+  if (*buffer_position >= 0) {
+    /* Get the time_stamp for the best position. */
+    *time_stamp = buffer_inst->timeStamp[*buffer_position];
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst)
+{
+    int i, count;
+    WebRtc_Word32 sizeSamples;
+
+    count = 0;
+
+    /* Loop through all slots in the buffer */
+    for (i = 0; i < bufferInst->maxInsertPositions; i++)
+    {
+        /* Only count the packets with non-zero size */
+        if (bufferInst->payloadLengthBytes[i] != 0)
+        {
+            count++;
+        }
+    }
+
+    /*
+     * Calculate buffer size as number of packets times packet size
+     * (packet size is that of the latest decoded packet)
+     */
+    sizeSamples = WEBRTC_SPL_MUL_16_16(bufferInst->packSizeSamples, count);
+
+    /* Sanity check; size cannot be negative */
+    if (sizeSamples < 0)
+    {
+        sizeSamples = 0;
+    }
+
+    return sizeSamples;
+}
+
+void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst) {
+  int i;
+  /* Loop through all slots in the buffer. */
+  for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+    /* Only increment waiting time for the packets with non-zero size. */
+    if (buffer_inst->payloadLengthBytes[i] != 0) {
+      buffer_inst->waitingTime[i]++;
+    }
+  }
+}
+
+int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
+                                        int noOfCodecs, int *maxBytes, int *maxSlots)
+{
+    int i;
+    int ok = 0;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 codecBytes;
+    WebRtc_Word16 codecBuffers;
+
+    /* Initialize return variables to zero */
+    *maxBytes = 0;
+    *maxSlots = 0;
+
+    /* Loop through all codecs supplied to function */
+    for (i = 0; i < noOfCodecs; i++)
+    {
+        /* Find current codec and set parameters accordingly */
+
+        if (codecID[i] == kDecoderPCMu)
+        {
+            codecBytes = 1680; /* Up to 210ms @ 64kbps */
+            codecBuffers = 30; /* Down to 5ms frames */
+        }
+        else if (codecID[i] == kDecoderPCMa)
+        {
+            codecBytes = 1680; /* Up to 210ms @ 64kbps */
+            codecBuffers = 30; /* Down to 5ms frames */
+        }
+        else if (codecID[i] == kDecoderILBC)
+        {
+            codecBytes = 380; /* 200ms @ 15.2kbps (20ms frames) */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderISAC)
+        {
+            codecBytes = 960; /* 240ms @ 32kbps (60ms frames) */
+            codecBuffers = 8;
+        }
+        else if (codecID[i] == kDecoderISACswb)
+        {
+            codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
+            codecBuffers = 8;
+        }
+        else if (codecID[i] == kDecoderPCM16B)
+        {
+            codecBytes = 3360; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderPCM16Bwb)
+        {
+            codecBytes = 6720; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderPCM16Bswb32kHz)
+        {
+            codecBytes = 13440; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderPCM16Bswb48kHz)
+        {
+            codecBytes = 20160; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderG722)
+        {
+            codecBytes = 1680; /* 210ms @ 64kbps */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderRED)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderAVT)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderCNG)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderG729)
+        {
+            codecBytes = 210; /* 210ms @ 8kbps */
+            codecBuffers = 20; /* max 200ms supported for 10ms frames */
+        }
+        else if (codecID[i] == kDecoderG729_1)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10; /* max 200ms supported for 20ms frames */
+        }
+        else if (codecID[i] == kDecoderG726_16)
+        {
+            codecBytes = 400; /* 200ms @ 16kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_24)
+        {
+            codecBytes = 600; /* 200ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_32)
+        {
+            codecBytes = 800; /* 200ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_40)
+        {
+            codecBytes = 1000; /* 200ms @ 40kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_16)
+        {
+            codecBytes = 420; /* 210ms @ 16kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_24)
+        {
+            codecBytes = 630; /* 210ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_32)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_24)
+        {
+            codecBytes = 630; /* 210ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_32)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_48)
+        {
+            codecBytes = 1260; /* 210ms @ 48kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderSPEEX_8)
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderSPEEX_16)
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderCELT_32)
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderGSMFR)
+        {
+            codecBytes = 340; /* 200ms */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderAMR)
+        {
+            codecBytes = 384; /* 240ms @ 12.2kbps+headers (60ms frames) */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderAMRWB)
+        {
+            codecBytes = 744;
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderArbitrary)
+        {
+            codecBytes = 6720; /* Assume worst case uncompressed WB 210ms */
+            codecBuffers = 15;
+        }
+        else
+        {
+            /*Unknow codec */
+            codecBytes = 0;
+            codecBuffers = 0;
+            ok = CODEC_DB_UNKNOWN_CODEC;
+        }
+
+        /* Update max variables */
+        *maxBytes = WEBRTC_SPL_MAX((*maxBytes), codecBytes);
+        *maxSlots = WEBRTC_SPL_MAX((*maxSlots), codecBuffers);
+
+    } /* end of for loop */
+
+    /*
+     * Add size needed by the additional pointers for each slot inside struct,
+     * as indicated on each line below.
+     */
+    w16_tmp = (sizeof(WebRtc_UWord32) /* timeStamp */
+    + sizeof(WebRtc_Word16*) /* payloadLocation */
+    + sizeof(WebRtc_UWord16) /* seqNumber */
+    + sizeof(WebRtc_Word16)  /* payloadType */
+    + sizeof(WebRtc_Word16)  /* payloadLengthBytes */
+    + sizeof(WebRtc_Word16)  /* rcuPlCntr   */
+    + sizeof(int));          /* waitingTime */
+    /* Add the extra size per slot to the memory count */
+    *maxBytes += w16_tmp * (*maxSlots);
+
+    return ok;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/packet_buffer.h b/trunk/src/modules/audio_coding/neteq/packet_buffer.h
new file mode 100644
index 0000000..662f8af
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/packet_buffer.h
@@ -0,0 +1,220 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface for the actual packet buffer data structure.
+ */
+
+#ifndef PACKET_BUFFER_H
+#define PACKET_BUFFER_H
+
+#include "typedefs.h"
+
+#include "webrtc_neteq.h"
+#include "rtp.h"
+
+/* Define minimum allowed buffer memory, in 16-bit words */
+#define PBUFFER_MIN_MEMORY_SIZE	150
+
+/****************************/
+/* The packet buffer struct */
+/****************************/
+
+typedef struct
+{
+
+    /* Variables common to the entire buffer */
+    WebRtc_UWord16 packSizeSamples; /* packet size in samples of last decoded packet */
+    WebRtc_Word16 *startPayloadMemory; /* pointer to the payload memory */
+    int memorySizeW16; /* the size (in WebRtc_Word16) of the payload memory */
+    WebRtc_Word16 *currentMemoryPos; /* The memory position to insert next payload */
+    int numPacketsInBuffer; /* The number of packets in the buffer */
+    int insertPosition; /* The position to insert next packet */
+    int maxInsertPositions; /* Maximum number of packets allowed */
+
+    /* Arrays with one entry per packet slot */
+    /* NOTE: If these are changed, the changes must be accounted for at the end of
+     the function WebRtcNetEQ_GetDefaultCodecSettings(). */
+    WebRtc_UWord32 *timeStamp; /* Timestamp in slot n */
+    WebRtc_Word16 **payloadLocation; /* Memory location of payload in slot n */
+    WebRtc_UWord16 *seqNumber; /* Sequence number in slot n */
+    WebRtc_Word16 *payloadType; /* Payload type of packet in slot n */
+    WebRtc_Word16 *payloadLengthBytes; /* Payload length of packet in slot n */
+    WebRtc_Word16 *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
+     2 for redundant payload */
+    int *waitingTime;
+
+
+    /* Statistics counter */
+    WebRtc_UWord16 discardedPackets; /* Number of discarded packets */
+
+} PacketBuf_t;
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferInit(...)
+ *
+ * This function initializes the packet buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance to be initialized
+ *		- noOfPackets	: Maximum number of packets that buffer should hold
+ *		- memory		: Pointer to the storage memory for the payloads
+ *		- memorySize	: The size of the payload memory (in WebRtc_Word16)
+ *
+ * Output:
+ *      - bufferInst    : Updated buffer instance
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
+                                 WebRtc_Word16 *pw16_memory, int memorySize);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferFlush(...)
+ *
+ * This function flushes all the packets in the buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance to be flushed
+ *
+ * Output:
+ *      - bufferInst    : Flushed buffer instance
+ *
+ * Return value			:  0 - Ok
+ */
+
+int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferInsert(...)
+ *
+ * This function inserts an RTP packet into the packet buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *		- RTPpacket		: An RTP packet struct (with payload, sequence
+ *						  number, etc.)
+ *
+ * Output:
+ *      - bufferInst    : Updated buffer instance
+ *		- flushed		: 1 if buffer was flushed, 0 otherwise
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
+                                   WebRtc_Word16 *flushed);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferExtract(...)
+ *
+ * This function extracts a payload from the buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *		- bufferPosition: Position of the packet that should be extracted
+ *
+ * Output:
+ *		- RTPpacket		: An RTP packet struct (with payload, sequence 
+ *						  number, etc)
+ *      - bufferInst    : Updated buffer instance
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
+                                    int bufferPosition, int *waitingTime);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferFindLowestTimestamp(...)
+ *
+ * This function finds the next packet with the lowest timestamp.
+ *
+ * Input:
+ *       - buffer_inst        : Buffer instance.
+ *       - current_time_stamp : The timestamp to compare packet timestamps with.
+ *       - erase_old_packets  : If non-zero, erase packets older than currentTS.
+ *
+ * Output:
+ *       - time_stamp         : Lowest timestamp that was found.
+ *       - buffer_position    : Position of this packet (-1 if there are no
+ *                              packets in the buffer).
+ *       - payload_type       : Payload type of the found payload.
+ *
+ * Return value               :  0 - Ok;
+ *                             < 0 - Error.
+ */
+
+int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
+                                                uint32_t current_time_stamp,
+                                                uint32_t* time_stamp,
+                                                int* buffer_position,
+                                                int erase_old_packets,
+                                                int16_t* payload_type);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferGetSize(...)
+ *
+ * Calculate and return an estimate of the total data length (in samples)
+ * currently in the buffer. The estimate is calculated as the number of
+ * packets currently in the buffer (which does not have any remaining waiting
+ * time), multiplied with the number of samples obtained from the last
+ * decoded packet.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *
+ * Return value			: The buffer size in samples
+ */
+
+WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_IncrementWaitingTimes(...)
+ *
+ * Increment the waiting time for all packets in the buffer by one.
+ *
+ * Input:
+ *    - bufferInst  : Buffer instance
+ *
+ * Return value     : n/a
+ */
+
+void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetDefaultCodecSettings(...)
+ *
+ * Calculates a recommended buffer size for a specific set of codecs.
+ *
+ * Input:
+ *		- codecID	    : An array of codec types that will be used
+ *      - noOfCodecs    : Number of codecs in array codecID
+ *
+ * Output:
+ *		- maxBytes	    : Recommended buffer memory size in bytes
+ *      - maxSlots      : Recommended number of slots in buffer
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
+                                        int noOfCodecs, int *maxBytes, int *maxSlots);
+
+#endif /* PACKET_BUFFER_H */
diff --git a/trunk/src/modules/audio_coding/neteq/peak_detection.c b/trunk/src/modules/audio_coding/neteq/peak_detection.c
new file mode 100644
index 0000000..678c7f9
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/peak_detection.c
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the peak detection used for finding correlation peaks.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
+const WebRtc_Word16 WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
+                                                    { 150, 50, 80 }, { 160, 57, 85 },
+                                                    { 180, 72, 96 }, { 200, 89, 107 },
+                                                    { 210, 98, 112 }, { 220, 108, 117 },
+                                                    { 240, 128, 128 }, { 260, 150, 139 },
+                                                    { 270, 162, 144 }, { 280, 174, 149 },
+                                                    { 300, 200, 160 }, { 320, 228, 171 },
+                                                    { 330, 242, 176 }, { 340, 257, 181 },
+                                                    { 360, 288, 192 } };
+
+WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                        WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
+                                        WebRtc_Word16 *pw16_winIndex,
+                                        WebRtc_Word16 *pw16_winValue)
+{
+    /* Local variables */
+    int i;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word16 indMin = 0;
+    WebRtc_Word16 indMax = 0;
+
+    /* Peak detection */
+
+    for (i = 0; i <= (w16_nmbPeaks - 1); i++)
+    {
+        if (w16_nmbPeaks == 1)
+        {
+            /*
+             * Single peak
+             * The parabola fit assumes that an extra point is available; worst case it gets
+             * a zero on the high end of the signal.
+             */
+            w16_dataLen++;
+        }
+
+        pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (WebRtc_Word16) (w16_dataLen - 1));
+
+        if (i != w16_nmbPeaks - 1)
+        {
+            w16_tmp = pw16_winIndex[i] - 2; /* *fs_mult; */
+            indMin = WEBRTC_SPL_MAX(0, w16_tmp);
+            w16_tmp = pw16_winIndex[i] + 2; /* *fs_mult; */
+            w16_tmp2 = w16_dataLen - 1;
+            indMax = WEBRTC_SPL_MIN(w16_tmp2, w16_tmp);
+        }
+
+        if ((pw16_winIndex[i] != 0) && (pw16_winIndex[i] != (w16_dataLen - 2)))
+        {
+            /* Parabola fit*/
+            WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]), &(pw16_winIndex[i]),
+                &(pw16_winValue[i]), fs_mult);
+        }
+        else
+        {
+            if (pw16_winIndex[i] == (w16_dataLen - 2))
+            {
+                if (pw16_data[pw16_winIndex[i]] > pw16_data[pw16_winIndex[i] + 1])
+                {
+                    WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]),
+                        &(pw16_winIndex[i]), &(pw16_winValue[i]), fs_mult);
+                }
+                else if (pw16_data[pw16_winIndex[i]] <= pw16_data[pw16_winIndex[i] + 1])
+                {
+                    pw16_winValue[i] = (pw16_data[pw16_winIndex[i]]
+                        + pw16_data[pw16_winIndex[i] + 1]) >> 1; /* lin approx */
+                    pw16_winIndex[i] = (pw16_winIndex[i] * 2 + 1) * fs_mult;
+                }
+            }
+            else
+            {
+                pw16_winValue[i] = pw16_data[pw16_winIndex[i]];
+                pw16_winIndex[i] = pw16_winIndex[i] * 2 * fs_mult;
+            }
+        }
+
+        if (i != w16_nmbPeaks - 1)
+        {
+            WebRtcSpl_MemSetW16(&(pw16_data[indMin]), 0, (indMax - indMin + 1));
+            /* for (j=indMin; j<=indMax; j++) pw16_data[j] = 0; */
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
+                                  WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult)
+{
+    /* Variables */
+    WebRtc_Word32 Num, Den;
+    WebRtc_Word32 temp;
+    WebRtc_Word16 flag, stp, strt, lmt;
+    WebRtc_UWord16 PFind[13];
+
+    if (fs_mult == 1)
+    {
+        PFind[0] = 0;
+        PFind[1] = 8;
+        PFind[2] = 16;
+    }
+    else if (fs_mult == 2)
+    {
+        PFind[0] = 0;
+        PFind[1] = 4;
+        PFind[2] = 8;
+        PFind[3] = 12;
+        PFind[4] = 16;
+    }
+    else if (fs_mult == 4)
+    {
+        PFind[0] = 0;
+        PFind[1] = 2;
+        PFind[2] = 4;
+        PFind[3] = 6;
+        PFind[4] = 8;
+        PFind[5] = 10;
+        PFind[6] = 12;
+        PFind[7] = 14;
+        PFind[8] = 16;
+    }
+    else
+    {
+        PFind[0] = 0;
+        PFind[1] = 1;
+        PFind[2] = 3;
+        PFind[3] = 4;
+        PFind[4] = 5;
+        PFind[5] = 7;
+        PFind[6] = 8;
+        PFind[7] = 9;
+        PFind[8] = 11;
+        PFind[9] = 12;
+        PFind[10] = 13;
+        PFind[11] = 15;
+        PFind[12] = 16;
+    }
+
+    /*	Num = -3*pw16_3pts[0] + 4*pw16_3pts[1] - pw16_3pts[2]; */
+    /*	Den =    pw16_3pts[0] - 2*pw16_3pts[1] + pw16_3pts[2]; */
+    Num = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],-3) + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],4)
+        - pw16_3pts[2];
+
+    Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
+
+    temp = (WebRtc_Word32) WEBRTC_SPL_MUL(Num, (WebRtc_Word32)120); /* need 32_16 really */
+    flag = 1;
+    stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
+    strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
+        + WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
+
+    if (temp < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)strt))
+    {
+        lmt = strt - stp;
+        while (flag)
+        {
+            if ((flag == fs_mult) || (temp
+                > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+            {
+                *pw16_outVal
+                    = (WebRtc_Word16)
+                    (((WebRtc_Word32) ((WebRtc_Word32) WEBRTC_SPL_MUL(Den,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
+                        + (WebRtc_Word32) WEBRTC_SPL_MUL(Num,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
+                        + WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
+                *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
+                flag = 0;
+            }
+            else
+            {
+                flag++;
+                lmt -= stp;
+            }
+        }
+    }
+    else if (temp > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)(strt+stp)))
+    {
+        lmt = strt + (stp << 1);
+        while (flag)
+        {
+            if ((flag == fs_mult) || (temp
+                < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+            {
+                WebRtc_Word32 temp_term_1, temp_term_2, temp_term_3;
+
+                temp_term_1 = WEBRTC_SPL_MUL(Den,
+                    (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
+                temp_term_2 = WEBRTC_SPL_MUL(Num,
+                    (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
+                temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
+
+                *pw16_outVal
+                    = (WebRtc_Word16) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
+
+                *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
+                flag = 0;
+            }
+            else
+            {
+                flag++;
+                lmt += stp;
+            }
+        }
+
+    }
+    else
+    {
+        *pw16_outVal = pw16_3pts[1];
+        *pw16_Ind = (*pw16_Ind) * 2 * fs_mult;
+    }
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/preemptive_expand.c b/trunk/src/modules/audio_coding/neteq/preemptive_expand.c
new file mode 100644
index 0000000..167bc3a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/preemptive_expand.c
@@ -0,0 +1,524 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the Pre-emptive Expand algorithm that is used to increase
+ * the delay by repeating a part of the audio stream.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define PREEMPTIVE_CORR_LEN 50
+#define PREEMPTIVE_MIN_LAG 10
+#define PREEMPTIVE_MAX_LAG 60
+#define PREEMPTIVE_DOWNSAMPLED_LEN (PREEMPTIVE_CORR_LEN + PREEMPTIVE_MAX_LAG)
+
+/* Scratch usage:
+
+ Type             Name                 size            startpos         endpos
+ WebRtc_Word16    pw16_downSampSpeech  110             0                109
+ WebRtc_Word32    pw32_corr            2*50            110              209
+ WebRtc_Word16    pw16_corr            50              0                49
+
+ Total: 110+2*50
+ */
+
+#define     SCRATCH_PW16_DS_SPEECH           0
+#define     SCRATCH_PW32_CORR                PREEMPTIVE_DOWNSAMPLED_LEN
+#define     SCRATCH_PW16_CORR                0
+
+/****************************************************************************
+ * WebRtcNetEQ_PreEmptiveExpand(...)
+ *
+ * This function tries to extend the audio data by repeating one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low. The algorithm is the
+ * reciprocal of the Accelerate algorithm.
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *      - decoded       : Pointer to newly decoded speech.
+ *      - len           : Length of decoded speech.
+ *      - oldDataLen    : Length of the part of decoded that has already been played out.
+ *      - BGNonly       : If non-zero, Pre-emptive Expand will only copy 
+ *                        the first DEFAULT_TIME_ADJUST seconds of the
+ *                        input and append to the end. No signal matching is
+ *                        done.
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored. The vector must be at least
+ *                        min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
+ *                        elements long.
+ *      - pw16_len      : Number of samples written to outData.
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
+#ifdef SCRATCH
+                                 WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                 const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
+                                 WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                                 WebRtc_Word16 BGNonly)
+{
+
+#ifdef SCRATCH
+    /* Use scratch memory for internal temporary vectors */
+    WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+    WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+#else
+    /* Allocate memory for temporary vectors */
+    WebRtc_Word16 pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
+    WebRtc_Word32 pw32_corr[PREEMPTIVE_CORR_LEN];
+    WebRtc_Word16 pw16_corr[PREEMPTIVE_CORR_LEN];
+#endif
+    WebRtc_Word16 w16_decodedMax = 0;
+    WebRtc_Word16 w16_tmp = 0;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word32 w32_tmp2;
+
+    const WebRtc_Word16 w16_startLag = PREEMPTIVE_MIN_LAG;
+    const WebRtc_Word16 w16_endLag = PREEMPTIVE_MAX_LAG;
+    const WebRtc_Word16 w16_corrLen = PREEMPTIVE_CORR_LEN;
+    const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+    WebRtc_Word16 *pw16_vectmp;
+    WebRtc_Word16 w16_inc, w16_startfact;
+    WebRtc_Word16 w16_bestIndex, w16_bestVal;
+    WebRtc_Word16 w16_VAD = 1;
+    WebRtc_Word16 fsMult;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+    WebRtc_Word16 w16_en1, w16_en2;
+    WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+    WebRtc_Word16 w16_sqrtEn1En2;
+    WebRtc_Word16 w16_bestCorr = 0;
+    int ok;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
+
+    /* Pre-calculate common multiplication with fsMult */
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /*
+     * Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
+     * Also, the new part must be at least .625 ms (w16_overlap).
+     */
+    if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
+        - inst->ExpandInst.w16_overlap)
+    {
+        /* Length of decoded data too short */
+        inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+        *pw16_len = len;
+
+        
+        /* simply move all data from decoded to outData */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return NETEQ_OTHER_ERROR;
+    }
+
+    /***********************************/
+    /* Special operations for BGN only */
+    /***********************************/
+
+    /* Check if "background noise only" flag is set */
+    if (BGNonly)
+    {
+        /* special operation for BGN only; simply insert a chunk of data */
+        w16_bestIndex = DEFAULT_TIME_ADJUST * (fsMult << 3); /* X*fs/1000 */
+
+        /* Sanity check for bestIndex */
+        if (w16_bestIndex > len)
+        { /* not good, do nothing instead */
+            inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+            *pw16_len = len;
+
+
+            /* simply move all data from decoded to outData */
+
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* set length parameter */
+        *pw16_len = len + w16_bestIndex;
+
+
+        /* copy to output */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
+        WEBRTC_SPL_MEMCPY_W16(&pw16_outData[len], pw16_decoded, w16_bestIndex);
+
+        /* set mode */
+        inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
+
+        /* update statistics */
+        inst->statInst.preemptiveLength += w16_bestIndex;
+
+        return 0;
+    } /* end of special code for BGN mode */
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find correlation lag only for non-slave instances */
+
+#endif
+
+        /****************************************************************/
+        /* Find the strongest correlation lag by downsampling to 4 kHz, */
+        /* calculating correlation for downsampled signal and finding   */
+        /* the strongest correlation peak.                              */
+        /****************************************************************/
+
+        /* find maximum absolute value */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* downsample the decoded speech to 4 kHz */
+        ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
+            PREEMPTIVE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
+        if (ok != 0)
+        {
+            /* error */
+            inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+            *pw16_len = len;
+
+
+            /* simply move all data from decoded to outData */
+
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /*
+         * Set scaling factor for cross correlation to protect against
+         * overflow (log2(50) => 6)
+         */
+        w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
+            pw32_corr, &pw16_downSampSpeech[w16_endLag],
+            &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
+            (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
+
+        /* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
+        /* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
+        w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) (NETEQ_MAX_OUTPUT_SIZE - len),
+            (WebRtc_Word16) (fsMult << 1)) - w16_startLag;
+        w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find the strongest correlation peak by using the parabolic fit method */
+        WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
+        /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+        /* Compensate bestIndex for displaced starting position */
+        w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+        /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        if (msInfo->extraInfo == PE_EXP_FAIL)
+        {
+            /* Master has signaled an unsuccessful preemptive expand */
+            w16_bestIndex = 0;
+        }
+        else
+        {
+            /* Get best index from master */
+            w16_bestIndex = msInfo->bestIndex;
+        }
+    }
+    else
+    {
+        /* Invalid mode */
+        return (MASTER_SLAVE_ERROR);
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find the strongest correlation peak by using the parabolic fit method */
+    WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
+    /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+    /* Compensate bestIndex for displaced starting position */
+    w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+    /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+#endif /* NETEQ_STEREO */
+
+#ifdef NETEQ_STEREO
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Calculate correlation only for non-slave instances */
+
+#endif /* NETEQ_STEREO */
+
+        /*****************************************************/
+        /* Calculate correlation bestCorr for the found lag. */
+        /* Also do a simple VAD decision.                    */
+        /*****************************************************/
+
+        /*
+         * Calculate scaling to ensure that bestIndex samples can be square-summed
+         * without overflowing
+         */
+        w16_tmp = (31
+            - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
+        w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
+        w16_tmp -= 31;
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Calculate energies for vec1 and vec2 */
+        w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
+        w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+
+        /* Calculate cross-correlation at the found lag */
+        w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+            w16_bestIndex, w16_tmp);
+
+        /* Check VAD constraint 
+         ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
+        if (inst->BGNInst.w16_initialized == 1)
+        {
+            w32_tmp2 = inst->BGNInst.w32_energy;
+        }
+        else
+        {
+            /* if BGN parameters have not been estimated, use a fixed threshold */
+            w32_tmp2 = 75000;
+        }
+        w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
+        w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
+        w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+        w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
+
+        /* Scale w32_tmp properly before comparing with w32_tmp2 */
+        /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
+        if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
+        {
+            /* Cannot scale only w32_tmp, must scale w32_temp2 too */
+            WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
+            w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
+        }
+        else
+        {
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
+        }
+
+        if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        {
+            /* The signal seems to be passive speech */
+            w16_VAD = 0;
+            w16_bestCorr = 0; /* Correlation does not matter */
+
+            /* For low energy expansion, the new data can be less than 15 ms,
+             but we must ensure that bestIndex is not larger than the new data. */
+            w16_bestIndex = WEBRTC_SPL_MIN( w16_bestIndex, len - oldDataLen );
+        }
+        else
+        {
+            /* The signal is active speech */
+            w16_VAD = 1;
+
+            /* Calculate correlation (cc/sqrt(en1*en2)) */
+
+            /* Start with calculating scale values */
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                w16_en1Scale += 1;
+            }
+
+            /* Convert energies to WebRtc_Word16 */
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+
+            /* Calculate energy product */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+
+            /* Calculate square-root of energy product */
+            w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
+            w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
+            w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+            w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
+        }
+
+#ifdef NETEQ_STEREO
+
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+#endif /* NETEQ_STEREO */
+
+    /*******************************************************/
+    /* Check preemptive expand criteria and insert samples */
+    /*******************************************************/
+
+    /* Check for strong correlation (>0.9) and at least 15 ms new data, 
+     or passive speech */
+#ifdef NETEQ_STEREO
+    if (((((w16_bestCorr > 14746) && (oldDataLen <= fsMult120)) || (w16_VAD == 0))
+        && (msInfo->msMode != NETEQ_SLAVE)) || ((msInfo->msMode == NETEQ_SLAVE)
+        && (msInfo->extraInfo != PE_EXP_FAIL)))
+#else
+    if (((w16_bestCorr > 14746) && (oldDataLen <= fsMult120))
+        || (w16_VAD == 0))
+#endif
+    {
+        /* Do expand operation by overlap add */
+
+        /* Set length of the first part, not to be modified */
+        WebRtc_Word16 w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
+
+        /*
+         * Calculate cross-fading slope so that the fading factor goes from
+         * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
+         */
+        w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
+            (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+
+        /* Initiate fading factor */
+        w16_startfact = 16384 - w16_inc;
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[w16_startIndex - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[w16_startIndex];
+
+
+        /* Copy unmodified part [0 to 15 ms] */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_startIndex);
+
+        /* Generate interpolated part of length bestIndex (1 pitch period) */
+        pw16_vectmp = pw16_outData + w16_startIndex;
+        /* Reuse mixing function from Expand */
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
+
+        /* Move the last part (also unmodified) */
+        /* Take from decoded at 15 ms */
+        pw16_vec2 = &pw16_decoded[w16_startIndex];
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
+            (WebRtc_Word16) (len - w16_startIndex));
+
+        /* Set the mode flag */
+        if (w16_VAD)
+        {
+            inst->w16_mode = MODE_SUCCESS_PREEMPTIVE;
+        }
+        else
+        {
+            inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
+        }
+
+        /* Calculate resulting length = original length + pitch period */
+        *pw16_len = len + w16_bestIndex;
+
+        /* Update in-call statistics */
+        inst->statInst.preemptiveLength += w16_bestIndex;
+
+        return 0;
+    }
+    else
+    {
+        /* Preemptive Expand not allowed */
+
+#ifdef NETEQ_STEREO
+        /* Signal to slave(s) that this was unsuccessful */
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            msInfo->extraInfo = PE_EXP_FAIL;
+        }
+#endif
+
+        /* Set mode flag to unsuccessful preemptive expand */
+        inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+
+        /* Length is unmodified */
+        *pw16_len = len;
+
+
+        /* Simply move all data from decoded to outData */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return 0;
+    }
+}
+
+#undef     SCRATCH_PW16_DS_SPEECH
+#undef     SCRATCH_PW32_CORR
+#undef     SCRATCH_PW16_CORR
diff --git a/trunk/src/modules/audio_coding/neteq/random_vector.c b/trunk/src/modules/audio_coding/neteq/random_vector.c
new file mode 100644
index 0000000..217bacd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/random_vector.c
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function generates a pseudo-random vector.
+ */
+
+#include "dsp_helpfunctions.h"
+
+/*
+ * Values are normalized so that
+ * sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
+ */
+const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
+{
+	2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522, 
+	13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314, 
+	5426, 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112, -613, 201, -10367, -2960, 
+	-2419, 3442, 4299, -6116, -6092, 1552, -1650, -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968, 
+	12052, -5845, -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552, -9084, -5753, 
+	8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403, 11407, 6232, -1683, 24340, -11166, 4017, -10448, 
+	3153, -2936, 6212, 2891, -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403, 2205, 
+	4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339, 5079, -2645, -9515, 6622, 14651, 15852, 
+	359, 122, 8246, -3502, -6696, -3679, -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219, 
+	1141, 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117, 13792, 5742, 16168, 8661, 
+	-1609, -6095, 1881, 14380, -5588, 6758, -6425, -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952, 
+	-10146, -4667, -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559, 4740, -4819, 992, 
+	-8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588, -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188, 
+	-1022, 4852, 10101, -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678, -1600, -9230, 
+	68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947, 4341, 1014, -4889, -2603, 1246, -5630, 
+	-3596, -870, -1298, 2784, -3317, -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579
+};
+
+
+void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
+                           WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval)
+{
+    int i;
+    WebRtc_Word16 w16_pos;
+    for (i = 0; i < w16_len; i++)
+    {
+        *w32_seed = (*w32_seed) + w16_incval;
+        w16_pos = (WebRtc_Word16) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
+        pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
+    }
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/recin.c b/trunk/src/modules/audio_coding/neteq/recin.c
new file mode 100644
index 0000000..608eb6e
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/recin.c
@@ -0,0 +1,472 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the RecIn function, which is the main function for inserting RTP
+ * packets into NetEQ.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "dtmf_buffer.h"
+#include "neteq_defines.h"
+#include "neteq_error_codes.h"
+
+
+int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
+                              WebRtc_UWord32 uw32_timeRec)
+{
+    RTPPacket_t RTPpacket[2];
+    int i_k;
+    int i_ok = 0, i_No_Of_Payloads = 1;
+    WebRtc_Word16 flushed = 0;
+    WebRtc_Word16 codecPos;
+    int curr_Codec;
+    WebRtc_Word16 isREDPayload = 0;
+    WebRtc_Word32 temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer;
+#ifdef NETEQ_RED_CODEC
+    RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
+    RTPpacketPtr[0] = &RTPpacket[0];
+    RTPpacketPtr[1] = &RTPpacket[1];
+#endif
+
+    /*
+     * Copy from input RTP packet to local copy
+     * (mainly to enable multiple payloads using RED)
+     */
+
+    WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t));
+
+    /* Reinitialize NetEq if it's needed (changed SSRC or first call) */
+
+    if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1))
+    {
+        WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber);
+        MCU_inst->first_packet = 0;
+
+        /* Flush the buffer */
+        WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+
+        /* Store new SSRC */
+        MCU_inst->ssrc = RTPpacket[0].ssrc;
+
+        /* Update codecs */
+        MCU_inst->timeStamp = RTPpacket[0].timeStamp;
+        MCU_inst->current_Payload = RTPpacket[0].payloadType;
+
+        /*Set MCU to update codec on next SignalMCU call */
+        MCU_inst->new_codec = 1;
+
+        /* Reset timestamp scaling */
+        MCU_inst->TSscalingInitialized = 0;
+
+    }
+
+    /* Call RTCP statistics */
+    i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst), RTPpacket[0].seqNumber,
+        RTPpacket[0].timeStamp, uw32_timeRec);
+
+    /* If Redundancy is supported and this is the redundancy payload, separate the payloads */
+#ifdef NETEQ_RED_CODEC
+    if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+        kDecoderRED))
+    {
+
+        /* Split the payload into a main and a redundancy payloads */
+        i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads);
+        if (i_ok < 0)
+        {
+            /* error returned */
+            return i_ok;
+        }
+
+        /*
+         * Only accept a few redundancies of the same type as the main data,
+         * AVT events and CNG.
+         */
+        if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType)
+            && (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+                kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload(
+            &MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload(
+            &MCU_inst->codec_DB_inst, RTPpacket[0].payloadType))
+            && (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType)))
+        {
+            i_No_Of_Payloads = 1;
+        }
+        isREDPayload = 1;
+    }
+#endif
+
+    /* loop over the number of payloads */
+    for (i_k = 0; i_k < i_No_Of_Payloads; i_k++)
+    {
+
+        if (isREDPayload == 1)
+        {
+            RTPpacket[i_k].rcuPlCntr = i_k;
+        }
+        else
+        {
+            RTPpacket[i_k].rcuPlCntr = 0;
+        }
+
+        /* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */
+        if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+            kDecoderILBC))
+        {
+            i_ok = WebRtcNetEQ_DbGetSplitInfo(
+                &MCU_inst->PayloadSplit_inst,
+                (enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+                    RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen);
+            if (i_ok < 0)
+            {
+                /* error returned */
+                return i_ok;
+            }
+        }
+
+        /* Get information about timestamp scaling for this payload type */
+        i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType);
+        if (i_ok < 0)
+        {
+            /* error returned */
+            return i_ok;
+        }
+
+        if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling)
+        {
+            /* Must initialize scaling with current timestamps */
+            MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
+            MCU_inst->internalTS = RTPpacket[i_k].timeStamp;
+            MCU_inst->TSscalingInitialized = 1;
+        }
+
+        /* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
+        if (MCU_inst->TSscalingInitialized == 1)
+        {
+            WebRtc_UWord32 newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
+                RTPpacket[i_k].timeStamp);
+
+            /* save the incoming timestamp for next time */
+            MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
+
+            /* add the scaled difference to last scaled timestamp and save ... */
+            MCU_inst->internalTS = newTS;
+
+            RTPpacket[i_k].timeStamp = newTS;
+        }
+
+        /* Is this a DTMF packet?*/
+        if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+            kDecoderAVT))
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            if (MCU_inst->AVT_PlayoutOn)
+            {
+                i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst,
+                    RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen,
+                    RTPpacket[i_k].timeStamp);
+                if (i_ok != 0)
+                {
+                    return i_ok;
+                }
+            }
+#endif
+#ifdef NETEQ_STEREO
+            if (MCU_inst->usingStereo == 0)
+            {
+                /* do not set this for DTMF packets when using stereo mode */
+                MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+            }
+#else
+            MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+#endif
+        }
+        else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst,
+            RTPpacket[i_k].payloadType))
+        {
+            /* Is this a CNG packet? how should we handle this?*/
+#ifdef NETEQ_CNG_CODEC
+            /* Get CNG sample rate */
+            WebRtc_UWord16 fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
+                RTPpacket[i_k].payloadType);
+            if ((fsCng != MCU_inst->fs) && (fsCng > 8000))
+            {
+                /*
+                 * We have received CNG with a different sample rate from what we are using
+                 * now (must be > 8000, since we may use only one CNG type (default) for all
+                 * frequencies). Flush buffer and signal new codec.
+                 */
+                WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+                MCU_inst->new_codec = 1;
+                MCU_inst->current_Codec = -1;
+            }
+            i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst,
+                &RTPpacket[i_k], &flushed);
+            if (i_ok < 0)
+            {
+                return RECIN_CNG_ERROR;
+            }
+            MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+#else /* NETEQ_CNG_CODEC not defined */
+            return RECIN_UNKNOWNPAYLOAD;
+#endif /* NETEQ_CNG_CODEC */
+        }
+        else
+        {
+            /* Reinitialize the splitting if the payload and/or the payload length has changed */
+            curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+                RTPpacket[i_k].payloadType);
+            if (curr_Codec != MCU_inst->current_Codec)
+            {
+                if (curr_Codec < 0)
+                {
+                    return RECIN_UNKNOWNPAYLOAD;
+                }
+                MCU_inst->current_Codec = curr_Codec;
+                MCU_inst->current_Payload = RTPpacket[i_k].payloadType;
+                i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst,
+                    (enum WebRtcNetEQDecoder) MCU_inst->current_Codec,
+                    RTPpacket[i_k].payloadLen);
+                if (i_ok < 0)
+                { /* error returned */
+                    return i_ok;
+                }
+                WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+                MCU_inst->new_codec = 1;
+            }
+
+            /* Parse the payload and insert it into the buffer */
+            i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k],
+                &MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst, &flushed);
+            if (i_ok < 0)
+            {
+                return i_ok;
+            }
+            if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0)
+            {
+                /* first normal packet after CNG or DTMF */
+                MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1;
+            }
+        }
+        /* Reset DSP timestamp etc. if packet buffer flushed */
+        if (flushed)
+        {
+            MCU_inst->new_codec = 1;
+        }
+    }
+
+    /*
+     * Update Bandwidth Estimate
+     * Only send the main payload to BWE
+     */
+    if ((curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+        RTPpacket[0].payloadType)) >= 0)
+    {
+        codecPos = MCU_inst->codec_DB_inst.position[curr_Codec];
+        if (MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos] != NULL) /* codec has BWE function */
+        {
+            if (RTPpacket[0].starts_byte1) /* check for shifted byte alignment */
+            {
+                /* re-align to 16-bit alignment */
+                for (i_k = 0; i_k < RTPpacket[0].payloadLen; i_k++)
+                {
+                    WEBRTC_SPL_SET_BYTE(RTPpacket[0].payload,
+                        WEBRTC_SPL_GET_BYTE(RTPpacket[0].payload, i_k+1),
+                        i_k);
+                }
+                RTPpacket[0].starts_byte1 = 0;
+            }
+
+            MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
+                MCU_inst->codec_DB_inst.codec_state[codecPos],
+                (G_CONST WebRtc_UWord16 *) RTPpacket[0].payload,
+                (WebRtc_Word32) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
+                (WebRtc_UWord32) RTPpacket[0].timeStamp, (WebRtc_UWord32) uw32_timeRec);
+        }
+    }
+
+    if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == 0)
+    {
+        /* Calculate the total speech length carried in each packet */
+        temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer - temp_bufsize;
+        temp_bufsize *= MCU_inst->PacketBuffer_inst.packSizeSamples;
+
+        if ((temp_bufsize > 0) && (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF
+            == 0) && (temp_bufsize
+            != MCU_inst->BufferStat_inst.Automode_inst.packetSpeechLenSamp))
+        {
+            /* Change the auto-mode parameters if packet length has changed */
+            WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
+                (WebRtc_Word16) temp_bufsize, MCU_inst->fs);
+        }
+
+        /* update statistics */
+        if ((WebRtc_Word32) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
+            && !MCU_inst->new_codec)
+        {
+            /*
+             * Only update statistics if incoming packet is not older than last played out
+             * packet, and if new codec flag is not set.
+             */
+            WebRtcNetEQ_UpdateIatStatistics(&MCU_inst->BufferStat_inst.Automode_inst,
+                MCU_inst->PacketBuffer_inst.maxInsertPositions, RTPpacket[0].seqNumber,
+                RTPpacket[0].timeStamp, MCU_inst->fs,
+                WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) MCU_inst->current_Codec),
+                (MCU_inst->NetEqPlayoutMode == kPlayoutStreaming));
+        }
+    }
+    else if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == -1)
+    {
+        /*
+         * This is first "normal" packet after CNG or DTMF.
+         * Reset packet time counter and measure time until next packet,
+         * but don't update statistics.
+         */
+        MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 0;
+        MCU_inst->BufferStat_inst.Automode_inst.packetIatCountSamp = 0;
+    }
+    return 0;
+
+}
+
+int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType)
+{
+    enum WebRtcNetEQDecoder codec;
+    int codecNumber;
+
+    codecNumber = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, rtpPayloadType);
+    if (codecNumber < 0)
+    {
+        /* error */
+        return codecNumber;
+    }
+
+    /* cast to enumerator */
+    codec = (enum WebRtcNetEQDecoder) codecNumber;
+
+    /*
+     * The factor obtained below is the number with which the RTP timestamp must be
+     * multiplied to get the true sample count.
+     */
+    switch (codec)
+    {
+        case kDecoderG722:
+        {
+            /* Use timestamp scaling with factor 2 (two output samples per RTP timestamp) */
+            MCU_inst->scalingFactor = kTSscalingTwo;
+            break;
+        }
+        case kDecoderAVT:
+        case kDecoderCNG:
+        {
+            /* do not change the timestamp scaling settings */
+            break;
+        }
+        default:
+        {
+            /* do not use timestamp scaling */
+            MCU_inst->scalingFactor = kTSnoScaling;
+            break;
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 externalTS)
+{
+    WebRtc_Word32 timestampDiff;
+    WebRtc_UWord32 internalTS;
+
+    /* difference between this and last incoming timestamp */
+    timestampDiff = externalTS - MCU_inst->externalTS;
+
+    switch (MCU_inst->scalingFactor)
+    {
+        case kTSscalingTwo:
+        {
+            /* multiply with 2 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingTwoThirds:
+        {
+            /* multiply with 2/3 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
+            timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
+            break;
+        }
+        case kTSscalingFourThirds:
+        {
+            /* multiply with 4/3 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 2);
+            timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
+            break;
+        }
+        default:
+        {
+            /* no scaling */
+        }
+    }
+
+    /* add the scaled difference to last scaled timestamp and save ... */
+    internalTS = MCU_inst->internalTS + timestampDiff;
+
+    return internalTS;
+}
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 internalTS)
+{
+    WebRtc_Word32 timestampDiff;
+    WebRtc_UWord32 externalTS;
+
+    /* difference between this and last incoming timestamp */
+    timestampDiff = (WebRtc_Word32) internalTS - MCU_inst->internalTS;
+
+    switch (MCU_inst->scalingFactor)
+    {
+        case kTSscalingTwo:
+        {
+            /* divide by 2 */
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingTwoThirds:
+        {
+            /* multiply with 3/2 */
+            timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingFourThirds:
+        {
+            /* multiply with 3/4 */
+            timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 2);
+            break;
+        }
+        default:
+        {
+            /* no scaling */
+        }
+    }
+
+    /* add the scaled difference to last scaled timestamp and save ... */
+    externalTS = MCU_inst->externalTS + timestampDiff;
+
+    return externalTS;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/recout.c b/trunk/src/modules/audio_coding/neteq/recout.c
new file mode 100644
index 0000000..dab5540
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/recout.c
@@ -0,0 +1,1429 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of RecOut function, which is the main function for the audio output
+ * process. This function must be called (through the NetEQ API) once every 10 ms.
+ */
+
+#include "dsp.h"
+
+#include <assert.h>
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+#include "neteq_defines.h"
+#include "mcu_dsp_common.h"
+
+/* Audio types */
+#define TYPE_SPEECH 1
+#define TYPE_CNG 2
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#include <stdio.h>
+#pragma message("*******************************************************************")
+#pragma message("You have specified to use NETEQ_DELAY_LOGGING in the NetEQ library.")
+#pragma message("Make sure that your test application supports this.")
+#pragma message("*******************************************************************")
+#endif
+
+/* Scratch usage:
+
+ Type           Name                            size             startpos      endpos
+ WebRtc_Word16  pw16_NetEqAlgorithm_buffer      600*fs/8000      0             600*fs/8000-1
+ struct         dspInfo                         6                600*fs/8000   605*fs/8000
+
+ func           WebRtcNetEQ_Normal              40+495*fs/8000   0             39+495*fs/8000
+ func           WebRtcNetEQ_Merge               40+496*fs/8000   0             39+496*fs/8000
+ func           WebRtcNetEQ_Expand              40+370*fs/8000   126*fs/800    39+496*fs/8000
+ func           WebRtcNetEQ_Accelerate          210              240*fs/8000   209+240*fs/8000
+ func           WebRtcNetEQ_BGNUpdate           69               480*fs/8000   68+480*fs/8000
+
+ Total:  605*fs/8000
+ */
+
+#define SCRATCH_ALGORITHM_BUFFER            0
+#define SCRATCH_NETEQ_NORMAL                0
+#define SCRATCH_NETEQ_MERGE                 0
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     3600
+#define SCRATCH_NETEQ_ACCELERATE            1440
+#define SCRATCH_NETEQ_BGN_UPDATE            2880
+#define SCRATCH_NETEQ_EXPAND                756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     2400
+#define SCRATCH_NETEQ_ACCELERATE            960
+#define SCRATCH_NETEQ_BGN_UPDATE            1920
+#define SCRATCH_NETEQ_EXPAND                504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     1200
+#define SCRATCH_NETEQ_ACCELERATE            480
+#define SCRATCH_NETEQ_BGN_UPDATE            960
+#define SCRATCH_NETEQ_EXPAND                252
+#else    /* NB */
+#define SCRATCH_DSP_INFO                     600
+#define SCRATCH_NETEQ_ACCELERATE            240
+#define SCRATCH_NETEQ_BGN_UPDATE            480
+#define SCRATCH_NETEQ_EXPAND                126
+#endif
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 3636
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 2424
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 1212
+#else    /* NB */
+#define SIZE_SCRATCH_BUFFER                 606
+#endif
+
+#ifdef NETEQ_DELAY_LOGGING
+extern FILE *delay_fid2; /* file pointer to delay log file */
+extern WebRtc_UWord32 tot_received_packets;
+#endif
+
+
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len, WebRtc_Word16 BGNonly)
+{
+
+    WebRtc_Word16 blockLen, payloadLen, len = 0, pos;
+    WebRtc_Word16 w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
+    WebRtc_Word16 *blockPtr;
+    WebRtc_Word16 MD = 0;
+
+    WebRtc_Word16 speechType = TYPE_SPEECH;
+    WebRtc_UWord16 instr;
+    WebRtc_UWord16 uw16_tmp;
+#ifdef SCRATCH
+    char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
+    WebRtc_Word16 *pw16_scratchPtr = (WebRtc_Word16*) pw8_ScratchBuffer;
+    WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE];
+    WebRtc_Word16 *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
+        + SCRATCH_ALGORITHM_BUFFER;
+    DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
+#else
+    WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE];
+    WebRtc_Word16 pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE];
+    DSP2MCU_info_t dspInfoStruct;
+    DSP2MCU_info_t *dspInfo = &dspInfoStruct;
+#endif
+    WebRtc_Word16 fs_mult;
+    int borrowedSamples;
+    int oldBorrowedSamples;
+    int return_value = 0;
+    WebRtc_Word16 lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
+    void *mainInstBackup = inst->main_inst;
+
+#ifdef NETEQ_DELAY_LOGGING
+    int temp_var;
+#endif
+    WebRtc_Word16 dtmfValue = -1;
+    WebRtc_Word16 dtmfVolume = -1;
+    int playDtmf = 0;
+#ifdef NETEQ_ATEVENT_DECODE
+    int dtmfSwitch = 0;
+#endif
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+    WebRtc_Word16 *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
+    inst->pw16_readAddress = sharedMem;
+    inst->pw16_writeAddress = sharedMem;
+
+    /* Get information about if there is one descriptor left */
+    if (inst->codec_ptr_inst.funcGetMDinfo != NULL)
+    {
+        MD = inst->codec_ptr_inst.funcGetMDinfo(inst->codec_ptr_inst.codec_state);
+        if (MD > 0)
+            MD = 1;
+        else
+            MD = 0;
+    }
+
+#ifdef NETEQ_STEREO
+    if ((msInfo->msMode == NETEQ_SLAVE) && (inst->codec_ptr_inst.funcDecode != NULL))
+    {
+        /*
+         * Valid function pointers indicate that we have decoded something,
+         * and that the timestamp information is correct.
+         */
+
+        /* Get the information from master to correct synchronization */
+        WebRtc_UWord32 currentMasterTimestamp;
+        WebRtc_UWord32 currentSlaveTimestamp;
+
+        currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
+        currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
+
+        if (currentSlaveTimestamp < currentMasterTimestamp)
+        {
+            /* brute-force discard a number of samples to catch up */
+            inst->curPosition += currentMasterTimestamp - currentSlaveTimestamp;
+
+            /* make sure we have at least "overlap" samples left */
+            inst->curPosition = WEBRTC_SPL_MIN(inst->curPosition,
+                inst->endPosition - inst->ExpandInst.w16_overlap);
+        }
+        else if (currentSlaveTimestamp > currentMasterTimestamp)
+        {
+            /* back off current position to slow down */
+            inst->curPosition -= currentSlaveTimestamp - currentMasterTimestamp;
+
+            /* make sure we do not end up outside the speech history */
+            inst->curPosition = WEBRTC_SPL_MAX(inst->curPosition, 0);
+        }
+    }
+#endif
+
+    /* Write status data to shared memory */
+    dspInfo->playedOutTS = inst->endTimestamp;
+    dspInfo->samplesLeft = inst->endPosition - inst->curPosition
+        - inst->ExpandInst.w16_overlap;
+    dspInfo->MD = MD;
+    dspInfo->lastMode = inst->w16_mode;
+    dspInfo->frameLen = inst->w16_frameLen;
+
+    /* Force update of codec if codec function is NULL */
+    if (inst->codec_ptr_inst.funcDecode == NULL)
+    {
+        dspInfo->lastMode |= MODE_AWAITING_CODEC_PTR;
+    }
+
+#ifdef NETEQ_STEREO
+    if (msInfo->msMode == NETEQ_SLAVE && (msInfo->extraInfo == DTMF_OVERDUB
+        || msInfo->extraInfo == DTMF_ONLY))
+    {
+        /* Signal that the master instance generated DTMF tones */
+        dspInfo->lastMode |= MODE_MASTER_DTMF_SIGNAL;
+    }
+
+    if (msInfo->msMode != NETEQ_MONO)
+    {
+        /* We are using stereo mode; signal this to MCU side */
+        dspInfo->lastMode |= MODE_USING_STEREO;
+    }
+#endif
+
+    WEBRTC_SPL_MEMCPY_W8(inst->pw16_writeAddress,dspInfo,sizeof(DSP2MCU_info_t));
+
+    /* Signal MCU with "interrupt" call to main inst*/
+#ifdef NETEQ_STEREO
+    assert(msInfo != NULL);
+    if (msInfo->msMode == NETEQ_MASTER)
+    {
+        /* clear info to slave */
+        WebRtcSpl_MemSetW16((WebRtc_Word16 *) msInfo, 0,
+            sizeof(MasterSlaveInfo) / sizeof(WebRtc_Word16));
+        /* re-set mode */
+        msInfo->msMode = NETEQ_MASTER;
+
+        /* Store some information to slave */
+        msInfo->endTimestamp = inst->endTimestamp;
+        msInfo->samplesLeftWithOverlap = inst->endPosition - inst->curPosition;
+    }
+#endif
+
+    /*
+     * This call will trigger the MCU side to make a decision based on buffer contents and
+     * decision history. Instructions, encoded data and function pointers will be written
+     * to the shared memory.
+     */
+    return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
+
+    /* Read MCU data and instructions */
+    instr = (WebRtc_UWord16) (inst->pw16_readAddress[0] & 0xf000);
+
+#ifdef NETEQ_STEREO
+    if (msInfo->msMode == NETEQ_MASTER)
+    {
+        msInfo->instruction = instr;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        /* Nothing to do */
+    }
+#endif
+
+    /* check for error returned from MCU side, if so, return error */
+    if (return_value < 0)
+    {
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return return_value;
+    }
+
+    blockPtr = &((inst->pw16_readAddress)[3]);
+
+    /* Check for DTMF payload flag */
+    if ((inst->pw16_readAddress[0] & DSP_DTMF_PAYLOAD) != 0)
+    {
+        playDtmf = 1;
+        dtmfValue = blockPtr[1];
+        dtmfVolume = blockPtr[2];
+        blockPtr += 3;
+
+#ifdef NETEQ_STEREO
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            /* signal to slave that master is using DTMF */
+            msInfo->extraInfo = DTMF_OVERDUB;
+        }
+#endif
+    }
+
+    blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of WebRtc_Word16 */
+    payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+    blockPtr++;
+
+    /* Do we have to change our decoder? */
+    if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_NEW_CODEC)
+    {
+        WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
+        if (inst->codec_ptr_inst.codec_fs != 0)
+        {
+            return_value = WebRtcNetEQ_DSPInit(inst, inst->codec_ptr_inst.codec_fs);
+            if (return_value != 0)
+            { /* error returned */
+                instr = DSP_INSTR_FADE_TO_BGN; /* emergency instruction */
+            }
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+            fwrite(&inst->fs, sizeof(WebRtc_UWord16), 1, delay_fid2);
+#endif
+        }
+
+        /* Copy it again since the init destroys this part */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
+        inst->endTimestamp = inst->codec_ptr_inst.timeStamp;
+        inst->videoSyncTimestamp = inst->codec_ptr_inst.timeStamp;
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+        if (inst->codec_ptr_inst.funcDecodeInit != NULL)
+        {
+            inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+        }
+
+#ifdef NETEQ_CNG_CODEC
+
+        /* Also update the CNG state as this might be uninitialized */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->CNG_Codec_inst,blockPtr,(payloadLen+1)>>1);
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            WebRtcCng_InitDec(inst->CNG_Codec_inst);
+        }
+#endif
+    }
+    else if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_RESET)
+    {
+        /* Reset the current codec (but not DSP struct) */
+        if (inst->codec_ptr_inst.funcDecodeInit != NULL)
+        {
+            inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+        }
+
+#ifdef NETEQ_CNG_CODEC
+        /* And reset CNG */
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            WebRtcCng_InitDec(inst->CNG_Codec_inst);
+        }
+#endif /*NETEQ_CNG_CODEC*/
+    }
+
+    fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs);
+
+    /* Add late packet? */
+    if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_ADD_LATE_PKT)
+    {
+        if (inst->codec_ptr_inst.funcAddLatePkt != NULL)
+        {
+            /* Only do this if the codec has support for Add Late Pkt */
+            inst->codec_ptr_inst.funcAddLatePkt(inst->codec_ptr_inst.codec_state, blockPtr,
+                payloadLen);
+        }
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+    }
+
+    /* Do we have to decode data? */
+    if ((instr == DSP_INSTR_NORMAL) || (instr == DSP_INSTR_ACCELERATE) || (instr
+        == DSP_INSTR_MERGE) || (instr == DSP_INSTR_PREEMPTIVE_EXPAND))
+    {
+        /* Do we need to update codec-internal PLC state? */
+        if ((instr == DSP_INSTR_MERGE) && (inst->codec_ptr_inst.funcDecodePLC != NULL))
+        {
+            len = 0;
+            len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                &pw16_decoded_buffer[len], 1);
+        }
+        len = 0;
+
+        /* Do decoding */
+        while ((blockLen > 0) && (len < (240 * fs_mult))) /* Guard somewhat against overflow */
+        {
+            if (inst->codec_ptr_inst.funcDecode != NULL)
+            {
+                WebRtc_Word16 dec_Len;
+                if (!BGNonly)
+                {
+                    /* Do decoding as normal
+                     *
+                     * blockPtr is pointing to payload, at this point,
+                     * the most significant bit of *(blockPtr - 1) is a flag if set to 1
+                     * indicates that the following payload is the redundant payload.
+                     */
+                    if (((*(blockPtr - 1) & DSP_CODEC_RED_FLAG) != 0)
+                        && (inst->codec_ptr_inst.funcDecodeRCU != NULL))
+                    {
+                        dec_Len = inst->codec_ptr_inst.funcDecodeRCU(
+                            inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
+                            &pw16_decoded_buffer[len], &speechType);
+                    }
+                    else
+                    {
+                        dec_Len = inst->codec_ptr_inst.funcDecode(
+                            inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
+                            &pw16_decoded_buffer[len], &speechType);
+                    }
+                }
+                else
+                {
+                    /*
+                     * Background noise mode: don't decode, just produce the same length BGN.
+                     * Don't call Expand for BGN here, since Expand uses the memory where the
+                     * bitstreams are stored (sharemem).
+                     */
+                    dec_Len = inst->w16_frameLen;
+                }
+
+                if (dec_Len > 0)
+                {
+                    len += dec_Len;
+                    /* Update frameLen */
+                    inst->w16_frameLen = dec_Len;
+                }
+                else if (dec_Len < 0)
+                {
+                    /* Error */
+                    len = -1;
+                    break;
+                }
+                /*
+                 * Sanity check (although we might still write outside memory when this
+                 * happens...)
+                 */
+                if (len > NETEQ_MAX_FRAME_SIZE)
+                {
+                    WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
+                    *pw16_len = inst->timestampsPerCall;
+                    inst->w16_mode = MODE_ERROR;
+                    dspInfo->lastMode = MODE_ERROR;
+                    return RECOUT_ERROR_DECODED_TOO_MUCH;
+                }
+
+                /* Verify that instance was not corrupted by decoder */
+                if (mainInstBackup != inst->main_inst)
+                {
+                    /* Instance is corrupt */
+                    return CORRUPT_INSTANCE;
+                }
+
+            }
+            blockPtr += blockLen;
+            blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
+            payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+            blockPtr++;
+        }
+
+        if (len < 0)
+        {
+            len = 0;
+            inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
+            if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
+            {
+                return_value = -inst->codec_ptr_inst.funcGetErrorCode(
+                    inst->codec_ptr_inst.codec_state);
+            }
+            else
+            {
+                return_value = RECOUT_ERROR_DECODING;
+            }
+            instr = DSP_INSTR_FADE_TO_BGN;
+        }
+        if (speechType != TYPE_CNG)
+        {
+            /*
+             * Don't increment timestamp if codec returned CNG speech type
+             * since in this case, the MCU side will increment the CNGplayedTS counter.
+             */
+            inst->endTimestamp += len;
+        }
+    }
+    else if (instr == DSP_INSTR_NORMAL_ONE_DESC)
+    {
+        if (inst->codec_ptr_inst.funcDecode != NULL)
+        {
+            len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state, NULL, 0,
+                pw16_decoded_buffer, &speechType);
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+            fwrite(&inst->endTimestamp, sizeof(WebRtc_UWord32), 1, delay_fid2);
+            fwrite(&dspInfo->samplesLeft, sizeof(WebRtc_UWord16), 1, delay_fid2);
+            tot_received_packets++;
+#endif
+        }
+        if (speechType != TYPE_CNG)
+        {
+            /*
+             * Don't increment timestamp if codec returned CNG speech type
+             * since in this case, the MCU side will increment the CNGplayedTS counter.
+             */
+            inst->endTimestamp += len;
+        }
+
+        /* Verify that instance was not corrupted by decoder */
+        if (mainInstBackup != inst->main_inst)
+        {
+            /* Instance is corrupt */
+            return CORRUPT_INSTANCE;
+        }
+
+        if (len <= 0)
+        {
+            len = 0;
+            if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
+            {
+                return_value = -inst->codec_ptr_inst.funcGetErrorCode(
+                    inst->codec_ptr_inst.codec_state);
+            }
+            else
+            {
+                return_value = RECOUT_ERROR_DECODING;
+            }
+            if ((inst->codec_ptr_inst.funcDecodeInit != NULL)
+                && (inst->codec_ptr_inst.codec_state != NULL))
+            {
+                /* Reinitialize codec state as something is obviously wrong */
+                inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+            }
+            inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
+            instr = DSP_INSTR_FADE_TO_BGN;
+        }
+    }
+
+    if (len == 0 && lastModeBGNonly) /* no new data */
+    {
+        BGNonly = 1; /* force BGN this time too */
+    }
+
+#ifdef NETEQ_VAD
+    if ((speechType == TYPE_CNG) /* decoder responded with codec-internal CNG */
+    || ((instr == DSP_INSTR_DO_RFC3389CNG) && (blockLen > 0)) /* ... or, SID frame */
+    || (inst->fs > 16000)) /* ... or, if not NB or WB */
+    {
+        /* disable post-decode VAD upon first sign of send-side DTX/VAD active, or if SWB */
+        inst->VADInst.VADEnabled = 0;
+        inst->VADInst.VADDecision = 1; /* set to always active, just to be on the safe side */
+        inst->VADInst.SIDintervalCounter = 0; /* reset SID interval counter */
+    }
+    else if (!inst->VADInst.VADEnabled) /* VAD disabled and no SID/CNG data observed this time */
+    {
+        inst->VADInst.SIDintervalCounter++; /* increase counter */
+    }
+
+    /* check for re-enabling the VAD */
+    if (inst->VADInst.SIDintervalCounter >= POST_DECODE_VAD_AUTO_ENABLE)
+    {
+        /*
+         * It's been a while since the last CNG/SID frame was observed => re-enable VAD.
+         * (Do not care to look for a VAD instance, since this is done inside the init
+         * function)
+         */
+        WebRtcNetEQ_InitVAD(&inst->VADInst, inst->fs);
+    }
+
+    if (len > 0 /* if we decoded any data */
+    && inst->VADInst.VADEnabled /* and VAD enabled */
+    && inst->fs <= 16000) /* can only do VAD for NB and WB */
+    {
+        int VADframeSize; /* VAD frame size in ms */
+        int VADSamplePtr = 0;
+
+        inst->VADInst.VADDecision = 0;
+
+        if (inst->VADInst.VADFunction != NULL) /* make sure that VAD function is provided */
+        {
+            /* divide the data into groups, as large as possible */
+            for (VADframeSize = 30; VADframeSize >= 10; VADframeSize -= 10)
+            {
+                /* loop through 30, 20, 10 */
+
+                while (inst->VADInst.VADDecision == 0
+                    && len - VADSamplePtr >= VADframeSize * fs_mult * 8)
+                {
+                    /*
+                     * Only continue until first active speech found, and as long as there is
+                     * one VADframeSize left.
+                     */
+
+                    /* call VAD with new decoded data */
+                    inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
+                        inst->VADInst.VADState, (WebRtc_Word16) inst->fs,
+                        (WebRtc_Word16 *) &pw16_decoded_buffer[VADSamplePtr],
+                        (WebRtc_Word16) (VADframeSize * fs_mult * 8));
+
+                    VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
+                }
+            }
+        }
+        else
+        { /* VAD function is NULL */
+            inst->VADInst.VADDecision = 1; /* set decision to active */
+            inst->VADInst.VADEnabled = 0; /* disable VAD since we have no VAD function */
+        }
+
+    }
+#endif /* NETEQ_VAD */
+
+    /* Adjust timestamp if needed */
+    uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[1];
+    inst->endTimestamp += (((WebRtc_UWord32) uw16_tmp) << 16);
+    uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[2];
+    inst->endTimestamp += uw16_tmp;
+
+    if (BGNonly && len > 0)
+    {
+        /*
+         * If BGN mode, we did not produce any data at decoding.
+         * Do it now instead.
+         */
+
+        WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+            pw16_decoded_buffer, len);
+    }
+
+    /* Switch on the instruction received from the MCU side. */
+    switch (instr)
+    {
+        case DSP_INSTR_NORMAL:
+
+            /* Allow for signal processing to apply gain-back etc */
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if ((speechType == TYPE_CNG) || ((inst->w16_mode == MODE_CODEC_INTERNAL_CNG)
+                && (len == 0)))
+            {
+                inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+            }
+
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+        case DSP_INSTR_NORMAL_ONE_DESC:
+
+            /* Allow for signal processing to apply gain-back etc */
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            inst->w16_mode = MODE_ONE_DESCRIPTOR;
+            break;
+        case DSP_INSTR_MERGE:
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+            temp_var = -len;
+#endif
+            /* Call Merge with history*/
+            return_value = WebRtcNetEQ_Merge(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_MERGE,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+
+            if (return_value < 0)
+            {
+                /* error */
+                return return_value;
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var += len;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_EXPAND:
+            len = 0;
+            pos = 0;
+            while ((inst->endPosition - inst->curPosition - inst->ExpandInst.w16_overlap + pos)
+                < (inst->timestampsPerCall))
+            {
+                return_value = WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+                /*
+                 * Update buffer, but only end part (otherwise expand state is destroyed
+                 * since it reuses speechBuffer[] memory
+                 */
+
+                WEBRTC_SPL_MEMMOVE_W16(inst->pw16_speechHistory,
+                                       inst->pw16_speechHistory + len,
+                                       (inst->w16_speechHistoryLen-len));
+                WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[inst->w16_speechHistoryLen-len],
+                                      pw16_NetEqAlgorithm_buffer, len);
+
+                inst->curPosition -= len;
+
+                /* Update variables for VQmon */
+                inst->w16_concealedTS += len;
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
+                fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+                temp_var = len;
+                fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+                len = 0; /* already written the data, so do not write it again further down. */
+            }
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_ACCELERATE:
+            if (len < 3 * 80 * fs_mult)
+            {
+                /* We need to move data from the speechBuffer[] in order to get 30 ms */
+                borrowedSamples = 3 * 80 * fs_mult - len;
+
+                WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
+                                       pw16_decoded_buffer, len);
+                WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
+                                      &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                                      borrowedSamples);
+
+                return_value = WebRtcNetEQ_Accelerate(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                    pw16_decoded_buffer, 3 * inst->timestampsPerCall,
+                    pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+                /* Copy back samples to the buffer */
+                if (len < borrowedSamples)
+                {
+                    /*
+                     * This destroys the beginning of the buffer, but will not cause any
+                     * problems
+                     */
+
+                    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
+                        pw16_NetEqAlgorithm_buffer, len);
+                    WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[borrowedSamples-len],
+                                           inst->speechBuffer,
+                                           (inst->endPosition-(borrowedSamples-len)));
+
+                    inst->curPosition += (borrowedSamples - len);
+#ifdef NETEQ_DELAY_LOGGING
+                    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                    fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+                    temp_var = 3 * inst->timestampsPerCall - len;
+                    fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+                    len = 0;
+                }
+                else
+                {
+                    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
+                        pw16_NetEqAlgorithm_buffer, borrowedSamples);
+                    WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
+                                           &pw16_NetEqAlgorithm_buffer[borrowedSamples],
+                                           (len-borrowedSamples));
+#ifdef NETEQ_DELAY_LOGGING
+                    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                    fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+                    temp_var = 3 * inst->timestampsPerCall - len;
+                    fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+                    len = len - borrowedSamples;
+                }
+
+            }
+            else
+            {
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+                temp_var = len;
+#endif
+                return_value = WebRtcNetEQ_Accelerate(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                    pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var -= len;
+                fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+            }
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_DO_RFC3389CNG:
+#ifdef NETEQ_CNG_CODEC
+            if (blockLen > 0)
+            {
+                if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (WebRtc_UWord8*) blockPtr,
+                    payloadLen) < 0)
+                {
+                    /* error returned from CNG function */
+                    return_value = -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+                    len = inst->timestampsPerCall;
+                    WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+                    break;
+                }
+            }
+
+            if (BGNonly)
+            {
+                /* Get data from BGN function instead of CNG */
+                len = WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_NetEqAlgorithm_buffer, inst->timestampsPerCall);
+                if (len != inst->timestampsPerCall)
+                {
+                    /* this is not good, treat this as an error */
+                    return_value = -1;
+                }
+            }
+            else
+            {
+                return_value = WebRtcNetEQ_Cng(inst, pw16_NetEqAlgorithm_buffer,
+                    inst->timestampsPerCall);
+            }
+            len = inst->timestampsPerCall;
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->w16_mode = MODE_RFC3389CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+
+            if (return_value < 0)
+            {
+                /* error returned */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+
+            break;
+#else
+            return FAULTY_INSTRUCTION;
+#endif
+        case DSP_INSTR_DO_CODEC_INTERNAL_CNG:
+            /*
+             * This represents the case when there is no transmission and the decoder should
+             * do internal CNG.
+             */
+            len = 0;
+            if (inst->codec_ptr_inst.funcDecode != NULL && !BGNonly)
+            {
+                len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state,
+                    blockPtr, 0, pw16_decoded_buffer, &speechType);
+            }
+            else
+            {
+                /* get BGN data */
+                len = WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_decoded_buffer, inst->timestampsPerCall);
+            }
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+            inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+
+        case DSP_INSTR_DTMF_GENERATE:
+#ifdef NETEQ_ATEVENT_DECODE
+            dtmfSwitch = 0;
+            if ((inst->w16_mode != MODE_DTMF) && (inst->DTMFInst.reinit == 0))
+            {
+                /* Special case; see below.
+                 * We must catch this before calling DTMFGenerate,
+                 * since reinit is set to 0 in that call.
+                 */
+                dtmfSwitch = 1;
+            }
+
+            len = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+                pw16_NetEqAlgorithm_buffer, inst->fs, -1);
+            if (len < 0)
+            {
+                /* error occurred */
+                return_value = len;
+                len = inst->timestampsPerCall;
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+
+            if (dtmfSwitch == 1)
+            {
+                /*
+                 * This is the special case where the previous operation was DTMF overdub.
+                 * but the current instruction is "regular" DTMF. We must make sure that the
+                 * DTMF does not have any discontinuities. The first DTMF sample that we
+                 * generate now must be played out immediately, wherefore it must be copied to
+                 * the speech buffer.
+                 */
+
+                /*
+                 * Generate extra DTMF data to fill the space between
+                 * curPosition and endPosition
+                 */
+                WebRtc_Word16 tempLen;
+
+                tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+                    &pw16_NetEqAlgorithm_buffer[len], inst->fs,
+                    inst->endPosition - inst->curPosition);
+                if (tempLen < 0)
+                {
+                    /* error occurred */
+                    return_value = tempLen;
+                    len = inst->endPosition - inst->curPosition;
+                    WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0,
+                        inst->endPosition - inst->curPosition);
+                }
+
+                /* Add to total length */
+                len += tempLen;
+
+                /* Overwrite the "future" part of the speech buffer with the new DTMF data */
+
+                WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition],
+                                      pw16_NetEqAlgorithm_buffer,
+                                      inst->endPosition - inst->curPosition);
+
+                /* Shuffle the remaining data to the beginning of algorithm buffer */
+                len -= (inst->endPosition - inst->curPosition);
+                WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
+                    &pw16_NetEqAlgorithm_buffer[inst->endPosition - inst->curPosition],
+                    len);
+            }
+
+            inst->endTimestamp += inst->timestampsPerCall;
+            inst->DTMFInst.reinit = 0;
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->w16_mode = MODE_DTMF;
+            BGNonly = 0; /* override BGN only and let DTMF through */
+
+            playDtmf = 0; /* set to zero because the DTMF is already in the Algorithm buffer */
+            /*
+             * If playDtmf is 1, an extra DTMF vector will be generated and overdubbed
+             * on the output.
+             */
+
+#ifdef NETEQ_STEREO
+            if (msInfo->msMode == NETEQ_MASTER)
+            {
+                /* signal to slave that master is using DTMF only */
+                msInfo->extraInfo = DTMF_ONLY;
+            }
+#endif
+
+            break;
+#else
+            inst->w16_mode = MODE_ERROR;
+            dspInfo->lastMode = MODE_ERROR;
+            return FAULTY_INSTRUCTION;
+#endif
+
+        case DSP_INSTR_DO_ALTERNATIVE_PLC:
+            if (inst->codec_ptr_inst.funcDecodePLC != 0)
+            {
+                len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                    pw16_NetEqAlgorithm_buffer, 1);
+            }
+            else
+            {
+                len = inst->timestampsPerCall;
+                /* ZeroStuffing... */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+        case DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS:
+            if (inst->codec_ptr_inst.funcDecodePLC != 0)
+            {
+                len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                    pw16_NetEqAlgorithm_buffer, 1);
+            }
+            else
+            {
+                len = inst->timestampsPerCall;
+                /* ZeroStuffing... */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->endTimestamp += len;
+            break;
+        case DSP_INSTR_DO_AUDIO_REPETITION:
+            len = inst->timestampsPerCall;
+            /* copy->paste... */
+            WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
+                                  &inst->speechBuffer[inst->endPosition-len], len);
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+        case DSP_INSTR_DO_AUDIO_REPETITION_INC_TS:
+            len = inst->timestampsPerCall;
+            /* copy->paste... */
+            WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
+                                  &inst->speechBuffer[inst->endPosition-len], len);
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->endTimestamp += len;
+            break;
+
+        case DSP_INSTR_PREEMPTIVE_EXPAND:
+            if (len < 3 * inst->timestampsPerCall)
+            {
+                /* borrow samples from sync buffer if necessary */
+                borrowedSamples = 3 * inst->timestampsPerCall - len; /* borrow this many samples */
+                /* calculate how many of these are already played out */
+                oldBorrowedSamples = WEBRTC_SPL_MAX(0,
+                    borrowedSamples - (inst->endPosition - inst->curPosition));
+                WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
+                                       pw16_decoded_buffer, len);
+                WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
+                                      &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                                      borrowedSamples);
+            }
+            else
+            {
+                borrowedSamples = 0;
+                oldBorrowedSamples = 0;
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            w16_tmp1 = len;
+#endif
+            /* do the expand */
+            return_value = WebRtcNetEQ_PreEmptiveExpand(inst,
+#ifdef SCRATCH
+                /* use same scratch memory as Accelerate */
+                pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                pw16_decoded_buffer, len + borrowedSamples, oldBorrowedSamples,
+                pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+            if (return_value < 0)
+            {
+                /* error */
+                return return_value;
+            }
+
+            if (borrowedSamples > 0)
+            {
+                /* return borrowed samples */
+
+                /* Copy back to last part of speechBuffer from beginning of output buffer */
+                WEBRTC_SPL_MEMCPY_W16( &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                    pw16_NetEqAlgorithm_buffer,
+                    borrowedSamples);
+
+                len -= borrowedSamples; /* remove the borrowed samples from new total length */
+
+                /* Move to beginning of output buffer from end of output buffer */
+                WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
+                    &pw16_NetEqAlgorithm_buffer[borrowedSamples],
+                    len);
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+            temp_var = len - w16_tmp1; /* number of samples added */
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+            /* If last packet was decoded as inband CNG, set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_FADE_TO_BGN:
+        {
+            int tempReturnValue;
+            /* do not overwrite return_value, since it likely contains an error code */
+
+            /* calculate interpolation length */
+            w16_tmp3 = WEBRTC_SPL_MIN(inst->endPosition - inst->curPosition,
+                    inst->timestampsPerCall);
+            /* check that it will fit in pw16_NetEqAlgorithm_buffer */
+            if (w16_tmp3 + inst->w16_frameLen > NETEQ_MAX_OUTPUT_SIZE)
+            {
+                w16_tmp3 = NETEQ_MAX_OUTPUT_SIZE - inst->w16_frameLen;
+            }
+
+            /* call Expand */
+            len = inst->timestampsPerCall + inst->ExpandInst.w16_overlap;
+            pos = 0;
+
+            tempReturnValue = WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                pw16_NetEqAlgorithm_buffer, &len, 1);
+
+            if (tempReturnValue < 0)
+            {
+                /* error */
+                /* this error value will override return_value */
+                return tempReturnValue;
+            }
+
+            pos += len; /* got len samples from expand */
+
+            /* copy to fill the demand */
+            while (pos + len <= inst->w16_frameLen + w16_tmp3)
+            {
+                WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos],
+                    pw16_NetEqAlgorithm_buffer, len);
+                pos += len;
+            }
+
+            /* fill with fraction of the expand vector if needed */
+            if (pos < inst->w16_frameLen + w16_tmp3)
+            {
+                WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos], pw16_NetEqAlgorithm_buffer,
+                    inst->w16_frameLen + w16_tmp3 - pos);
+            }
+
+            len = inst->w16_frameLen + w16_tmp3; /* truncate any surplus samples since we don't want these */
+
+            /*
+             * Mix with contents in sync buffer. Find largest power of two that is less than
+             * interpolate length divide 16384 with this number; result is in w16_tmp2.
+             */
+            w16_tmp1 = 2;
+            w16_tmp2 = 16384;
+            while (w16_tmp1 <= w16_tmp3)
+            {
+                w16_tmp2 >>= 1; /* divide with 2 */
+                w16_tmp1 <<= 1; /* increase with a factor of 2 */
+            }
+
+            w16_tmp1 = 0;
+            pos = 0;
+            while (w16_tmp1 < 16384)
+            {
+                inst->speechBuffer[inst->curPosition + pos]
+                    =
+                    (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                        WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
+                            16384-w16_tmp1 ) +
+                        WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
+                        14 );
+                w16_tmp1 += w16_tmp2;
+                pos++;
+            }
+
+            /* overwrite remainder of speech buffer */
+
+            WEBRTC_SPL_MEMCPY_W16( &inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
+                &pw16_NetEqAlgorithm_buffer[pos], w16_tmp3 - pos);
+
+            len -= w16_tmp3;
+            /* shift algorithm buffer */
+
+            WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
+                &pw16_NetEqAlgorithm_buffer[w16_tmp3],
+                len );
+
+            /* Update variables for VQmon */
+            inst->w16_concealedTS += len;
+
+            inst->w16_mode = MODE_FADE_TO_BGN;
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+            temp_var = len;
+            fwrite(&temp_var, sizeof(int), 1, delay_fid2);
+#endif
+
+            break;
+        }
+
+        default:
+            inst->w16_mode = MODE_ERROR;
+            dspInfo->lastMode = MODE_ERROR;
+            return FAULTY_INSTRUCTION;
+    } /* end of grand switch */
+
+    /* Copy data directly to output buffer */
+
+    w16_tmp2 = 0;
+    if ((inst->endPosition + len - inst->curPosition - inst->ExpandInst.w16_overlap)
+        >= inst->timestampsPerCall)
+    {
+        w16_tmp2 = inst->endPosition - inst->curPosition;
+        w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2, 0); /* Additional error protection, just in case */
+        w16_tmp1 = WEBRTC_SPL_MIN(w16_tmp2, inst->timestampsPerCall);
+        w16_tmp2 = inst->timestampsPerCall - w16_tmp1;
+        WEBRTC_SPL_MEMCPY_W16(pw16_outData, &inst->speechBuffer[inst->curPosition], w16_tmp1);
+        WEBRTC_SPL_MEMCPY_W16(&pw16_outData[w16_tmp1], pw16_NetEqAlgorithm_buffer, w16_tmp2);
+        DataEnough = 1;
+    }
+    else
+    {
+        DataEnough = 0;
+    }
+
+    if (playDtmf != 0)
+    {
+#ifdef NETEQ_ATEVENT_DECODE
+        WebRtc_Word16 outDataIndex = 0;
+        WebRtc_Word16 overdubLen = -1; /* default len */
+        WebRtc_Word16 dtmfLen;
+
+        /*
+         * Overdub the output with DTMF. Note that this is not executed if the
+         * DSP_INSTR_DTMF_GENERATE operation is performed above.
+         */
+        if (inst->DTMFInst.lastDtmfSample - inst->curPosition > 0)
+        {
+            /* special operation for transition from "DTMF only" to "DTMF overdub" */
+            outDataIndex
+                = WEBRTC_SPL_MIN(inst->DTMFInst.lastDtmfSample - inst->curPosition,
+                    inst->timestampsPerCall);
+            overdubLen = inst->timestampsPerCall - outDataIndex;
+        }
+
+        dtmfLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+            &pw16_outData[outDataIndex], inst->fs, overdubLen);
+        if (dtmfLen < 0)
+        {
+            /* error occurred */
+            return_value = dtmfLen;
+        }
+        inst->DTMFInst.reinit = 0;
+#else
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return FAULTY_INSTRUCTION;
+#endif
+    }
+
+    /*
+     * Shuffle speech buffer to allow more data. Move data from pw16_NetEqAlgorithm_buffer
+     * to speechBuffer.
+     */
+    if (instr != DSP_INSTR_EXPAND)
+    {
+        w16_tmp1 = WEBRTC_SPL_MIN(inst->endPosition, len);
+        WEBRTC_SPL_MEMMOVE_W16(inst->speechBuffer, inst->speechBuffer + w16_tmp1,
+                               (inst->endPosition-w16_tmp1));
+        WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-w16_tmp1],
+                              &pw16_NetEqAlgorithm_buffer[len-w16_tmp1], w16_tmp1);
+#ifdef NETEQ_ATEVENT_DECODE
+        /* Update index to end of DTMF data in speech buffer */
+        if (instr == DSP_INSTR_DTMF_GENERATE)
+        {
+            /* We have written DTMF data to the end of speech buffer */
+            inst->DTMFInst.lastDtmfSample = inst->endPosition;
+        }
+        else if (inst->DTMFInst.lastDtmfSample > 0)
+        {
+            /* The end of DTMF data in speech buffer has been shuffled */
+            inst->DTMFInst.lastDtmfSample -= w16_tmp1;
+        }
+#endif
+        /*
+         * Update the BGN history if last operation was not expand (nor Merge, Accelerate
+         * or Pre-emptive expand, to save complexity).
+         */
+        if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_MERGE)
+            && (inst->w16_mode != MODE_SUCCESS_ACCELERATE) && (inst->w16_mode
+            != MODE_LOWEN_ACCELERATE) && (inst->w16_mode != MODE_SUCCESS_PREEMPTIVE)
+            && (inst->w16_mode != MODE_LOWEN_PREEMPTIVE) && (inst->w16_mode
+            != MODE_FADE_TO_BGN) && (inst->w16_mode != MODE_DTMF) && (!BGNonly))
+        {
+            WebRtcNetEQ_BGNUpdate(inst
+#ifdef SCRATCH
+                , pw16_scratchPtr + SCRATCH_NETEQ_BGN_UPDATE
+#endif
+            );
+        }
+    }
+    else /* instr == DSP_INSTR_EXPAND */
+    {
+        /* Nothing should be done since data is already copied to output. */
+    }
+
+    inst->curPosition -= len;
+
+    /*
+     * Extra protection in case something should go totally wrong in terms of sizes...
+     * If everything is ok this should NEVER happen.
+     */
+    if (inst->curPosition < -inst->timestampsPerCall)
+    {
+        inst->curPosition = -inst->timestampsPerCall;
+    }
+
+    if ((instr != DSP_INSTR_EXPAND) && (instr != DSP_INSTR_MERGE) && (instr
+        != DSP_INSTR_FADE_TO_BGN))
+    {
+        /* Reset concealed TS parameter if it does not seem to have been flushed */
+        if (inst->w16_concealedTS > inst->timestampsPerCall)
+        {
+            inst->w16_concealedTS = 0;
+        }
+    }
+
+    /*
+     * Double-check that we actually have 10 ms to play. If we haven't, there has been a
+     * serious error.The decoder might have returned way too few samples
+     */
+    if (!DataEnough)
+    {
+        /* This should not happen. Set outdata to zeros, and return error. */
+        WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
+        *pw16_len = inst->timestampsPerCall;
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return RECOUT_ERROR_SAMPLEUNDERRUN;
+    }
+
+    /*
+     * Update Videosync timestamp (this special timestamp is needed since the endTimestamp
+     * stops during CNG and Expand periods.
+     */
+    if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
+    {
+        WebRtc_UWord32 uw32_tmpTS;
+        uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
+        if ((WebRtc_Word32) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
+        {
+            inst->videoSyncTimestamp = uw32_tmpTS;
+        }
+    }
+    else
+    {
+        inst->videoSyncTimestamp += inst->timestampsPerCall;
+    }
+
+    /* After this, regardless of what has happened, deliver 10 ms of future data */
+    inst->curPosition += inst->timestampsPerCall;
+    *pw16_len = inst->timestampsPerCall;
+
+    /* Remember if BGNonly was used */
+    if (BGNonly)
+    {
+        inst->w16_mode |= MODE_BGN_ONLY;
+    }
+
+    return return_value;
+}
+
+#undef    SCRATCH_ALGORITHM_BUFFER
+#undef    SCRATCH_NETEQ_NORMAL
+#undef    SCRATCH_NETEQ_MERGE
+#undef    SCRATCH_NETEQ_BGN_UPDATE
+#undef    SCRATCH_NETEQ_EXPAND
+#undef    SCRATCH_DSP_INFO
+#undef    SCRATCH_NETEQ_ACCELERATE
+#undef    SIZE_SCRATCH_BUFFER
diff --git a/trunk/src/modules/audio_coding/neteq/rtcp.c b/trunk/src/modules/audio_coding/neteq/rtcp.c
new file mode 100644
index 0000000..35f73da
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/rtcp.c
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of RTCP statistics reporting.
+ */
+
+#include "rtcp.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo)
+{
+    /*
+     * Initialize everything to zero and then set the start values for the RTP packet stream.
+     */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) RTCP_inst, 0,
+        sizeof(WebRtcNetEQ_RTCP_t) / sizeof(WebRtc_Word16));
+    RTCP_inst->base_seq = uw16_seqNo;
+    RTCP_inst->max_seq = uw16_seqNo;
+    return 0;
+}
+
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
+                           WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime)
+{
+    WebRtc_Word16 w16_SeqDiff;
+    WebRtc_Word32 w32_TimeDiff;
+    WebRtc_Word32 w32_JitterDiff;
+
+    /*
+     * Update number of received packets, and largest packet number received.
+     */
+    RTCP_inst->received++;
+    w16_SeqDiff = uw16_seqNo - RTCP_inst->max_seq;
+    if (w16_SeqDiff >= 0)
+    {
+        if (uw16_seqNo < RTCP_inst->max_seq)
+        {
+            /* Wrap around detected */
+            RTCP_inst->cycles++;
+        }
+        RTCP_inst->max_seq = uw16_seqNo;
+    }
+
+    /* Calculate Jitter, and update previous timestamps */
+    /* Note that the value in RTCP_inst->jitter is in Q4. */
+    if (RTCP_inst->received > 1)
+    {
+        w32_TimeDiff = (uw32_recTime - (uw32_timeStamp - RTCP_inst->transit));
+        w32_TimeDiff = WEBRTC_SPL_ABS_W32(w32_TimeDiff);
+        w32_JitterDiff = WEBRTC_SPL_LSHIFT_W16(w32_TimeDiff, 4) - RTCP_inst->jitter;
+        RTCP_inst->jitter = RTCP_inst->jitter + WEBRTC_SPL_RSHIFT_W32((w32_JitterDiff + 8), 4);
+    }
+    RTCP_inst->transit = (uw32_timeStamp - uw32_recTime);
+    return 0;
+}
+
+int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
+                             WebRtc_UWord16 *puw16_fraction_lost,
+                             WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
+                             WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset)
+{
+    WebRtc_UWord32 uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
+    WebRtc_Word32 w32_lost;
+
+    /* Extended highest sequence number received */
+    *puw32_ext_max
+        = (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)RTCP_inst->cycles, 16)
+            + RTCP_inst->max_seq;
+
+    /*
+     * Calculate expected number of packets and compare it to the number of packets that
+     * were actually received => the cumulative number of packets lost can be extracted.
+     */
+    uw32_exp_nr = *puw32_ext_max - RTCP_inst->base_seq + 1;
+    if (RTCP_inst->received == 0)
+    {
+        /* no packets received, assume none lost */
+        *puw32_cum_lost = 0;
+    }
+    else if (uw32_exp_nr > RTCP_inst->received)
+    {
+        *puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
+        if (*puw32_cum_lost > (WebRtc_UWord32) 0xFFFFFF)
+        {
+            *puw32_cum_lost = 0xFFFFFF;
+        }
+    }
+    else
+    {
+        *puw32_cum_lost = 0;
+    }
+
+    /* Fraction lost (Since last report) */
+    uw32_exp_interval = uw32_exp_nr - RTCP_inst->exp_prior;
+    if (!doNotReset)
+    {
+        RTCP_inst->exp_prior = uw32_exp_nr;
+    }
+    uw32_rec_interval = RTCP_inst->received - RTCP_inst->rec_prior;
+    if (!doNotReset)
+    {
+        RTCP_inst->rec_prior = RTCP_inst->received;
+    }
+    w32_lost = (WebRtc_Word32) (uw32_exp_interval - uw32_rec_interval);
+    if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
+    {
+        *puw16_fraction_lost = 0;
+    }
+    else
+    {
+        *puw16_fraction_lost = (WebRtc_UWord16) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
+            / uw32_exp_interval);
+    }
+    if (*puw16_fraction_lost > 0xFF)
+    {
+        *puw16_fraction_lost = 0xFF;
+    }
+
+    /* Inter-arrival jitter */
+    *puw32_jitter = (RTCP_inst->jitter) >> 4; /* scaling from Q4 */
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/rtcp.h b/trunk/src/modules/audio_coding/neteq/rtcp.h
new file mode 100644
index 0000000..009e019
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/rtcp.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTCP statistics reporting.
+ */
+
+#ifndef RTCP_H
+#define RTCP_H
+
+#include "typedefs.h"
+
+typedef struct
+{
+    WebRtc_UWord16 cycles; /* The number of wrap-arounds for the sequence number */
+    WebRtc_UWord16 max_seq; /* The maximum sequence number received
+     (starts from 0 again after wrap around) */
+    WebRtc_UWord16 base_seq; /* The sequence number of the first packet that arrived */
+    WebRtc_UWord32 received; /* The number of packets that has been received */
+    WebRtc_UWord32 rec_prior; /* Number of packets received when last report was generated */
+    WebRtc_UWord32 exp_prior; /* Number of packets that should have been received if no
+     packets were lost. Stored value from last report. */
+    WebRtc_UWord32 jitter; /* Jitter statistics at this instance (calculated according to RFC) */
+    WebRtc_Word32 transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
+} WebRtcNetEQ_RTCP_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPInit(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *		- seqNo			: Packet number of the first received frame.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo);
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPUpdate(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *		- seqNo			: Packet number of the first received frame.
+ *		- timeStamp		: Time stamp from the RTP header.
+ *		- recTime		: Time (in RTP timestamps) when this packet was received.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
+                           WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime);
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPGetStats(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *      - doNotReset    : If non-zero, the fraction lost statistics will not
+ *                        be reset.
+ *
+ * Output:
+ *		- RTCP_inst		: Updated RTCP information (some statistics are 
+ *						  reset when generating this report)
+ *		- fraction_lost : Number of lost RTP packets divided by the number of
+ *						  expected packets, since the last RTCP Report.
+ *		- cum_lost		: Cumulative number of lost packets during this 
+ *						  session.
+ *		- ext_max		: Extended highest sequence number received.
+ *		- jitter		: Inter-arrival jitter.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
+                             WebRtc_UWord16 *puw16_fraction_lost,
+                             WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
+                             WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset);
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/rtp.c b/trunk/src/modules/audio_coding/neteq/rtp.c
new file mode 100644
index 0000000..bd4f9a2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/rtp.c
@@ -0,0 +1,240 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTP related functions.
+ */
+
+#include "rtp.h"
+
+#include "typedefs.h" /* to define endianness */
+
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+                               RTPPacket_t* RTPheader)
+{
+    int i_P, i_X, i_CC, i_startPosition;
+    int i_IPver;
+    int i_extlength = -1; /* Default value is there is no extension */
+    int i_padlength = 0; /* Default value if there is no padding */
+
+    if (i_DatagramLen < 12)
+    {
+        return RTP_TOO_SHORT_PACKET;
+    }
+
+#ifdef WEBRTC_BIG_ENDIAN
+    i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
+    i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
+    i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
+    i_CC = ((WebRtc_UWord16) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
+    RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type	*/
+    RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number	*/
+    RTPheader->timeStamp = ((((WebRtc_UWord32) ((WebRtc_UWord16) pw16_Datagram[2])) << 16)
+        | (WebRtc_UWord16) (pw16_Datagram[3])); /* Get timestamp */
+    RTPheader->ssrc = (((WebRtc_UWord32) pw16_Datagram[4]) << 16)
+        + (((WebRtc_UWord32) pw16_Datagram[5])); /* Get the SSRC */
+
+    if (i_X == 1)
+    {
+        /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
+        i_extlength = pw16_Datagram[7 + 2 * i_CC];
+    }
+    if (i_P == 1)
+    {
+        /* Padding exists. Find out how many bytes the padding consists of. */
+        if (i_DatagramLen & 0x1)
+        {
+            /* odd number of bytes => last byte in higher byte */
+            i_padlength = (((WebRtc_UWord16) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
+        }
+        else
+        {
+            /* even number of bytes => last byte in lower byte */
+            i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF);
+        }
+    }
+#else /* WEBRTC_LITTLE_ENDIAN */
+    i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
+    i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
+    i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
+    i_CC = (WebRtc_UWord16) (pw16_Datagram[0] & 0xF); /* Get the CC number */
+    RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
+    RTPheader->seqNumber = (((((WebRtc_UWord16) pw16_Datagram[1]) >> 8) & 0xFF)
+        | (((WebRtc_UWord16) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
+    RTPheader->timeStamp = ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF) << 24)
+        | ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF00) << 8)
+        | ((((WebRtc_UWord16) pw16_Datagram[3]) >> 8) & 0xFF)
+        | ((((WebRtc_UWord16) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
+    RTPheader->ssrc = ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF) << 24)
+        | ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF00) << 8)
+        | ((((WebRtc_UWord16) pw16_Datagram[5]) >> 8) & 0xFF)
+        | ((((WebRtc_UWord16) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
+
+    if (i_X == 1)
+    {
+        /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
+        i_extlength = (((((WebRtc_UWord16) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
+            | (((WebRtc_UWord16) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
+    }
+    if (i_P == 1)
+    {
+        /* Padding exists. Find out how many bytes the padding consists of. */
+        if (i_DatagramLen & 0x1)
+        {
+            /* odd number of bytes => last byte in higher byte */
+            i_padlength = (pw16_Datagram[i_DatagramLen >> 1] & 0xFF);
+        }
+        else
+        {
+            /* even number of bytes => last byte in lower byte */
+            i_padlength = (((WebRtc_UWord16) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
+        }
+    }
+#endif
+
+    i_startPosition = 12 + 4 * (i_extlength + 1) + 4 * i_CC;
+    RTPheader->payload = &pw16_Datagram[i_startPosition >> 1];
+    RTPheader->payloadLen = i_DatagramLen - i_startPosition - i_padlength;
+    RTPheader->starts_byte1 = 0;
+
+    if ((i_IPver != 2) || (RTPheader->payloadLen <= 0) || (RTPheader->payloadLen >= 16000)
+        || (i_startPosition < 12) || (i_startPosition > i_DatagramLen))
+    {
+        return RTP_CORRUPT_PACKET;
+    }
+
+    return 0;
+}
+
+#ifdef NETEQ_RED_CODEC
+
+int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
+                                int *i_No_Of_Payloads)
+{
+    const WebRtc_Word16 *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
+    WebRtc_UWord16 uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
+    int i_blockLength, i_k;
+    int i_discardedBlockLength = 0;
+    int singlePayload = 0;
+
+#ifdef WEBRTC_BIG_ENDIAN
+    if ((pw16_data[0] & 0x8000) == 0)
+    {
+        /* Only one payload in this packet*/
+        singlePayload = 1;
+        /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
+        i_blockLength = -4;
+        RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
+    }
+    else
+    {
+        /* Discard all but the two last payloads. */
+        while (((pw16_data[2] & 0x8000) == 1)&&
+            (pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
+        {
+            i_discardedBlockLength += (4+(((WebRtc_UWord16)pw16_data[1]) & 0x3FF));
+            pw16_data+=2;
+        }
+        if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
+        {
+            return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
+        }
+        singlePayload = 0; /* the packet contains more than one payload */
+        uw16_secondPayload = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
+        RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[2]) & 0x7F00) >> 8);
+        uw16_offsetTimeStamp = ((((WebRtc_UWord16)pw16_data[0]) & 0xFF) << 6) +
+        ((((WebRtc_UWord16)pw16_data[1]) & 0xFC00) >> 10);
+        i_blockLength = (((WebRtc_UWord16)pw16_data[1]) & 0x3FF);
+    }
+#else /* WEBRTC_LITTLE_ENDIAN */
+    if ((pw16_data[0] & 0x80) == 0)
+    {
+        /* Only one payload in this packet */
+        singlePayload = 1;
+        /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
+        i_blockLength = -4;
+        RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
+    }
+    else
+    {
+        /* Discard all but the two last payloads. */
+        while (((pw16_data[2] & 0x80) == 1) && (pw16_data < ((RTPheader[0]->payload)
+            + ((RTPheader[0]->payloadLen + 1) >> 1))))
+        {
+            i_discardedBlockLength += (4 + ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+                + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8));
+            pw16_data += 2;
+        }
+        if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
+        {
+            return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
+        }
+        singlePayload = 0; /* the packet contains more than one payload */
+        uw16_secondPayload = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
+        RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[2]) & 0x7F);
+        uw16_offsetTimeStamp = ((((WebRtc_UWord16) pw16_data[0]) & 0xFF00) >> 2)
+            + ((((WebRtc_UWord16) pw16_data[1]) & 0xFC) >> 2);
+        i_blockLength = ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+            + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8);
+    }
+#endif
+
+    if (i_MaximumPayloads < 2 || singlePayload == 1)
+    {
+        /* Reject the redundancy; or no redundant payload present. */
+        for (i_k = 1; i_k < i_MaximumPayloads; i_k++)
+        {
+            RTPheader[i_k]->payloadType = -1;
+            RTPheader[i_k]->payloadLen = 0;
+        }
+
+        /* update the pointer for the main data */
+        pw16_data = &pw16_data[(5 + i_blockLength) >> 1];
+        RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
+        RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
+            - i_discardedBlockLength;
+        RTPheader[0]->payload = pw16_data;
+
+        *i_No_Of_Payloads = 1;
+
+    }
+    else
+    {
+        /* Redundancy accepted, put the redundancy in second RTPheader. */
+        RTPheader[1]->payloadType = uw16_secondPayload;
+        RTPheader[1]->payload = &pw16_data[5 >> 1];
+        RTPheader[1]->starts_byte1 = 5 & 0x1;
+        RTPheader[1]->seqNumber = RTPheader[0]->seqNumber;
+        RTPheader[1]->timeStamp = RTPheader[0]->timeStamp - uw16_offsetTimeStamp;
+        RTPheader[1]->ssrc = RTPheader[0]->ssrc;
+        RTPheader[1]->payloadLen = i_blockLength;
+
+        /* Modify first RTP packet, so that it contains the main data. */
+        RTPheader[0]->payload = &pw16_data[(5 + i_blockLength) >> 1];
+        RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
+        RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
+            - i_discardedBlockLength;
+
+        /* Clear the following payloads. */
+        for (i_k = 2; i_k < i_MaximumPayloads; i_k++)
+        {
+            RTPheader[i_k]->payloadType = -1;
+            RTPheader[i_k]->payloadLen = 0;
+        }
+
+        *i_No_Of_Payloads = 2;
+    }
+    return 0;
+}
+
+#endif
+
diff --git a/trunk/src/modules/audio_coding/neteq/rtp.h b/trunk/src/modules/audio_coding/neteq/rtp.h
new file mode 100644
index 0000000..8490d62
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/rtp.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTP data struct and related functions.
+ */
+
+#ifndef RTP_H
+#define RTP_H
+
+#include "typedefs.h"
+
+#include "codec_db.h"
+
+typedef struct
+{
+    WebRtc_UWord16 seqNumber;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_UWord32 ssrc;
+    int payloadType;
+    const WebRtc_Word16 *payload;
+    WebRtc_Word16 payloadLen;
+    WebRtc_Word16 starts_byte1;
+    WebRtc_Word16 rcuPlCntr;
+} RTPPacket_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_RTPPayloadInfo(...)
+ *
+ * Converts a datagram into an RTP header struct.
+ *
+ * Input:
+ *		- Datagram		: UDP datagram from the network
+ *		- DatagramLen	: Length in bytes of the datagram
+ *
+ * Output:
+ *		- RTPheader		: Structure with the datagram info
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+                               RTPPacket_t* RTPheader);
+
+/****************************************************************************
+ * WebRtcNetEQ_RedundancySplit(...)
+ *
+ * Splits a Redundancy RTP struct into two RTP structs. User has to check 
+ * that it's really the redundancy payload. No such check is done inside this
+ * function.
+ *
+ * Input:
+ *		- RTPheader		: First header holds the whole RTP packet (with the redundancy payload)
+ *		- MaximumPayloads: 
+ *						  The maximum number of RTP payloads that should be
+ *						  extracted (1+maximum_no_of_Redundancies).
+ *
+ * Output:
+ *		- RTPheader		: First header holds the main RTP data, while 2..N 
+ *						  holds the redundancy data.
+ *		- No_Of
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
+                                int *i_No_Of_Payloads);
+
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/set_fs.c b/trunk/src/modules/audio_coding/neteq/set_fs.c
new file mode 100644
index 0000000..b2ad5ca
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/set_fs.c
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Function were the sample rate is set.
+ */
+
+#include "mcu.h"
+
+#include "dtmf_buffer.h"
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs)
+{
+    WebRtc_Word16 ok = 0;
+
+    switch (fs)
+    {
+        case 8000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 8;
+            break;
+        }
+
+#ifdef NETEQ_WIDEBAND
+        case 16000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 16000, 1120);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 16;
+            break;
+        }
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+        case 32000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 32000, 2240);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 32;
+            break;
+        }
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+        case 48000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 48000, 3360);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 48;
+            break;
+        }
+#endif
+
+        default:
+        {
+            /* Not supported yet */
+            return CODEC_DB_UNSUPPORTED_FS;
+        }
+    } /* end switch */
+
+    inst->fs = fs;
+
+    return ok;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/signal_mcu.c b/trunk/src/modules/audio_coding/neteq/signal_mcu.c
new file mode 100644
index 0000000..0180f38
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/signal_mcu.c
@@ -0,0 +1,766 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Signal the MCU that data is available and ask for a RecOut decision.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "dtmf_buffer.h"
+#include "mcu_dsp_common.h"
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#include <stdio.h>
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+#endif
+
+
+/*
+ * Signals the MCU that DSP status data is available.
+ */
+int WebRtcNetEQ_SignalMcu(MCUInst_t *inst)
+{
+
+    int i_bufferpos, i_res;
+    WebRtc_UWord16 uw16_instr;
+    DSP2MCU_info_t dspInfo;
+    WebRtc_Word16 *blockPtr, blockLen;
+    WebRtc_UWord32 uw32_availableTS;
+    RTPPacket_t temp_pkt;
+    WebRtc_Word32 w32_bufsize, w32_tmp;
+    WebRtc_Word16 payloadType = -1;
+    WebRtc_Word16 wantedNoOfTimeStamps;
+    WebRtc_Word32 totalTS;
+    WebRtc_Word16 oldPT, latePacketExist = 0;
+    WebRtc_UWord32 oldTS, prevTS, uw32_tmp;
+    WebRtc_UWord16 prevSeqNo;
+    WebRtc_Word16 nextSeqNoAvail;
+    WebRtc_Word16 fs_mult, w16_tmp;
+    WebRtc_Word16 lastModeBGNonly = 0;
+#ifdef NETEQ_DELAY_LOGGING
+    int temp_var;
+#endif
+    int playDtmf = 0;
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+
+    /* Increment counter since last statistics report */
+    inst->lastReportTS += inst->timestampsPerCall;
+
+    /* Increment waiting time for all packets. */
+    WebRtcNetEQ_IncrementWaitingTimes(&inst->PacketBuffer_inst);
+
+    /* Read info from DSP so we now current status */
+
+    WEBRTC_SPL_MEMCPY_W8(&dspInfo,inst->pw16_readAddress,sizeof(DSP2MCU_info_t));
+
+    /* Set blockPtr to first payload block */
+    blockPtr = &inst->pw16_writeAddress[3];
+
+    /* Clear instruction word and number of lost samples (2*WebRtc_Word16) */
+    inst->pw16_writeAddress[0] = 0;
+    inst->pw16_writeAddress[1] = 0;
+    inst->pw16_writeAddress[2] = 0;
+
+    if ((dspInfo.lastMode & MODE_AWAITING_CODEC_PTR) != 0)
+    {
+        /*
+         * Make sure state is adjusted so that a codec update is
+         * performed when first packet arrives.
+         */
+        if (inst->new_codec != 1)
+        {
+            inst->current_Codec = -1;
+        }
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_AWAITING_CODEC_PTR);
+    }
+
+#ifdef NETEQ_STEREO
+    if ((dspInfo.lastMode & MODE_MASTER_DTMF_SIGNAL) != 0)
+    {
+        playDtmf = 1; /* force DTMF decision */
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_MASTER_DTMF_SIGNAL);
+    }
+
+    if ((dspInfo.lastMode & MODE_USING_STEREO) != 0)
+    {
+        if (inst->usingStereo == 0)
+        {
+            /* stereo mode changed; reset automode instance to re-synchronize statistics */
+            WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+                inst->PacketBuffer_inst.maxInsertPositions);
+        }
+        inst->usingStereo = 1;
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_USING_STEREO);
+    }
+    else
+    {
+        inst->usingStereo = 0;
+    }
+#endif
+
+    /* detect if BGN_ONLY flag is set in lastMode */
+    if ((dspInfo.lastMode & MODE_BGN_ONLY) != 0)
+    {
+        lastModeBGNonly = 1; /* remember flag */
+        dspInfo.lastMode ^= MODE_BGN_ONLY; /* clear the flag */
+    }
+
+    if ((dspInfo.lastMode == MODE_RFC3389CNG) || (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
+        || (dspInfo.lastMode == MODE_EXPAND))
+    {
+        /*
+         * If last mode was CNG (or Expand, since this could be covering up for a lost CNG
+         * packet), increase the CNGplayedTS counter.
+         */
+        inst->BufferStat_inst.uw32_CNGplayedTS += inst->timestampsPerCall;
+
+        if (dspInfo.lastMode == MODE_RFC3389CNG)
+        {
+            /* remember that RFC3389CNG is on (needed if CNG is interrupted by DTMF) */
+            inst->BufferStat_inst.w16_cngOn = CNG_RFC3389_ON;
+        }
+        else if (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
+        {
+            /* remember that internal CNG is on (needed if CNG is interrupted by DTMF) */
+            inst->BufferStat_inst.w16_cngOn = CNG_INTERNAL_ON;
+        }
+
+    }
+
+    /* Update packet size from previously decoded packet */
+    if (dspInfo.frameLen > 0)
+    {
+        inst->PacketBuffer_inst.packSizeSamples = dspInfo.frameLen;
+    }
+
+    /* Look for late packet (unless codec has changed) */
+    if (inst->new_codec != 1)
+    {
+        if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec))
+        {
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                inst->timeStamp, &uw32_availableTS, &i_bufferpos, 1, &payloadType);
+            if ((inst->new_codec != 1) && (inst->timeStamp == uw32_availableTS)
+                && (inst->timeStamp < dspInfo.playedOutTS) && (i_bufferpos != -1)
+                && (WebRtcNetEQ_DbGetPayload(&(inst->codec_DB_inst),
+                    (enum WebRtcNetEQDecoder) inst->current_Codec) == payloadType))
+            {
+                int waitingTime;
+                temp_pkt.payload = blockPtr + 1;
+                i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
+                    i_bufferpos, &waitingTime);
+                if (i_res < 0)
+                { /* error returned */
+                    return i_res;
+                }
+                WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
+                *blockPtr = temp_pkt.payloadLen;
+                /* set the flag if this is a redundant payload */
+                if (temp_pkt.rcuPlCntr > 0)
+                {
+                    *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
+                }
+                blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
+
+                /*
+                 * Close the data with a zero size block, in case we will not write any
+                 * more data.
+                 */
+                *blockPtr = 0;
+                inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
+                        | DSP_CODEC_ADD_LATE_PKT;
+                latePacketExist = 1;
+            }
+        }
+    }
+
+    i_res = WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+        dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
+        &payloadType);
+    if (i_res < 0)
+    { /* error returned */
+        return i_res;
+    }
+
+    if (inst->BufferStat_inst.w16_cngOn == CNG_RFC3389_ON)
+    {
+        /*
+         * Because of timestamp peculiarities, we have to "manually" disallow using a CNG
+         * packet with the same timestamp as the one that was last played. This can happen
+         * when using redundancy and will cause the timing to shift.
+         */
+        while (i_bufferpos != -1 && WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst,
+            payloadType) && dspInfo.playedOutTS >= uw32_availableTS)
+        {
+
+            /* Don't use this packet, discard it */
+            inst->PacketBuffer_inst.payloadType[i_bufferpos] = -1;
+            inst->PacketBuffer_inst.payloadLengthBytes[i_bufferpos] = 0;
+            inst->PacketBuffer_inst.numPacketsInBuffer--;
+
+            /* Check buffer again */
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
+                &payloadType);
+        }
+    }
+
+    /* Check packet buffer */
+    w32_bufsize = WebRtcNetEQ_PacketBufferGetSize(&inst->PacketBuffer_inst);
+
+    if (dspInfo.lastMode == MODE_SUCCESS_ACCELERATE || dspInfo.lastMode
+        == MODE_LOWEN_ACCELERATE || dspInfo.lastMode == MODE_SUCCESS_PREEMPTIVE
+        || dspInfo.lastMode == MODE_LOWEN_PREEMPTIVE)
+    {
+        /* Subtract (dspInfo.samplesLeft + inst->timestampsPerCall) from sampleMemory */
+        inst->BufferStat_inst.Automode_inst.sampleMemory -= dspInfo.samplesLeft
+            + inst->timestampsPerCall;
+    }
+
+    /* calculate total current buffer size (in ms*8), including sync buffer */
+    w32_bufsize = WebRtcSpl_DivW32W16((w32_bufsize + dspInfo.samplesLeft), fs_mult);
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* DTMF data will affect the decision */
+    if (WebRtcNetEQ_DtmfDecode(&inst->DTMF_inst, blockPtr + 1, blockPtr + 2,
+        dspInfo.playedOutTS + inst->BufferStat_inst.uw32_CNGplayedTS) > 0)
+    {
+        playDtmf = 1;
+
+        /* Flag DTMF payload */
+        inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] | DSP_DTMF_PAYLOAD;
+
+        /* Block Length in bytes */
+        blockPtr[0] = 4;
+        /* Advance to next payload position */
+        blockPtr += 3;
+    }
+#endif
+
+    /* Update statistics and make decision */
+    uw16_instr = WebRtcNetEQ_BufstatsDecision(&inst->BufferStat_inst,
+        inst->PacketBuffer_inst.packSizeSamples, w32_bufsize, dspInfo.playedOutTS,
+        uw32_availableTS, i_bufferpos == -1,
+        WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType), dspInfo.lastMode,
+        inst->NetEqPlayoutMode, inst->timestampsPerCall, inst->NoOfExpandCalls, fs_mult,
+        lastModeBGNonly, playDtmf);
+
+    /* Check if time to reset loss counter */
+    if (inst->lastReportTS > WEBRTC_SPL_UMUL(inst->fs, MAX_LOSS_REPORT_PERIOD))
+    {
+        /* reset loss counter */
+        WebRtcNetEQ_ResetMcuInCallStats(inst);
+    }
+
+    /* Check sync buffer size */
+    if ((dspInfo.samplesLeft >= inst->timestampsPerCall) && (uw16_instr
+        != BUFSTATS_DO_ACCELERATE) && (uw16_instr != BUFSTATS_DO_MERGE) && (uw16_instr
+            != BUFSTATS_DO_PREEMPTIVE_EXPAND))
+    {
+        *blockPtr = 0;
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_NORMAL;
+        return 0;
+    }
+
+    if (uw16_instr == BUFSTATS_DO_EXPAND)
+    {
+        inst->NoOfExpandCalls++;
+    }
+    else
+    {
+        /* reset counter */
+        inst->NoOfExpandCalls = 0;
+    }
+
+    /* New codec or big change in packet number? */
+    if (((inst->new_codec) || (uw16_instr == BUFSTAT_REINIT)) && (uw16_instr
+        != BUFSTATS_DO_EXPAND))
+    {
+        CodecFuncInst_t cinst;
+
+        /* Clear other instructions */
+        blockPtr = &inst->pw16_writeAddress[3];
+        /* Clear instruction word */
+        inst->pw16_writeAddress[0] = 0;
+
+        inst->timeStamp = uw32_availableTS;
+        dspInfo.playedOutTS = uw32_availableTS;
+        if (inst->current_Codec != -1)
+        {
+            i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst,
+                (enum WebRtcNetEQDecoder) inst->current_Codec, &cinst);
+            if (i_res < 0)
+            { /* error returned */
+                return i_res;
+            }
+        }
+        else
+        {
+            /* The main codec has not been initialized yet (first packets are DTMF or CNG). */
+            if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
+            {
+                /* The currently extracted packet is CNG; get CNG fs */
+                WebRtc_UWord16 tempFs;
+
+                tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
+                if (tempFs > 0)
+                {
+                    inst->fs = tempFs;
+                }
+            }
+            WebRtcSpl_MemSetW16((WebRtc_Word16*) &cinst, 0,
+                                sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+            cinst.codec_fs = inst->fs;
+        }
+        cinst.timeStamp = inst->timeStamp;
+        blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(WebRtc_Word16) - 1); /* in Word16 */
+        *blockPtr = blockLen * 2;
+        blockPtr++;
+        WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
+        blockPtr += blockLen;
+        inst->new_codec = 0;
+
+        /* Reinitialize the MCU fs */
+        i_res = WebRtcNetEQ_McuSetFs(inst, cinst.codec_fs);
+        if (i_res < 0)
+        { /* error returned */
+            return i_res;
+        }
+
+        /* Set the packet size by guessing */
+        inst->PacketBuffer_inst.packSizeSamples = inst->timestampsPerCall * 3;
+
+        WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+                                  inst->PacketBuffer_inst.maxInsertPositions);
+
+#ifdef NETEQ_CNG_CODEC
+        /* Also insert CNG state as this might be needed by DSP */
+        i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst, kDecoderCNG, &cinst);
+        if ((i_res < 0) && (i_res != CODEC_DB_NOT_EXIST1))
+        {
+            /* other error returned */
+            /* (CODEC_DB_NOT_EXIST1 simply indicates that CNG is not used */
+            return i_res;
+        }
+        else
+        {
+            /* CNG exists */
+            blockLen = (sizeof(cinst.codec_state)) >> (sizeof(WebRtc_Word16) - 1);
+            *blockPtr = blockLen * 2;
+            blockPtr++;
+            WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
+            blockPtr += blockLen;
+        }
+#endif
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
+                | DSP_CODEC_NEW_CODEC;
+
+        if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
+        {
+            /*
+             * Change decision to CNG packet, since we do have a CNG packet, but it was
+             * considered too early to use. Now, use it anyway.
+             */
+            uw16_instr = BUFSTATS_DO_RFC3389CNG_PACKET;
+        }
+        else if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
+        {
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+
+        /* reset loss counter */
+        WebRtcNetEQ_ResetMcuInCallStats(inst);
+    }
+
+    /* Should we just reset the decoder? */
+    if (uw16_instr == BUFSTAT_REINIT_DECODER)
+    {
+        /* Change decision to normal and flag decoder reset */
+        uw16_instr = BUFSTATS_DO_NORMAL;
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff) | DSP_CODEC_RESET;
+    }
+
+    /* Expand requires no new packet */
+    if (uw16_instr == BUFSTATS_DO_EXPAND)
+    {
+
+        inst->timeStamp = dspInfo.playedOutTS;
+
+        /* Have we got one descriptor left? */
+        if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec)
+            && (dspInfo.MD || latePacketExist))
+        {
+
+            if (dspInfo.lastMode != MODE_ONE_DESCRIPTOR)
+            {
+                /* this is the first "consecutive" one-descriptor decoding; reset counter */
+                inst->one_desc = 0;
+            }
+            if (inst->one_desc < MAX_ONE_DESC)
+            {
+                /* use that one descriptor */
+                inst->one_desc++; /* increase counter */
+                inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                        | DSP_INSTR_NORMAL_ONE_DESC;
+
+                /* decrease counter since we did no Expand */
+                inst->NoOfExpandCalls = WEBRTC_SPL_MAX(inst->NoOfExpandCalls - 1, 0);
+                return 0;
+            }
+            else
+            {
+                /* too many consecutive one-descriptor decodings; do expand instead */
+                inst->one_desc = 0; /* reset counter */
+            }
+
+        }
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_EXPAND;
+        return 0;
+    }
+
+    /* Merge is not needed if we still have a descriptor */
+    if ((uw16_instr == BUFSTATS_DO_MERGE) && (dspInfo.MD != 0))
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_NORMAL_ONE_DESC;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do CNG without trying to extract any packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DO_RFC3389CNG;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do built-in CNG without extracting any new packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_INTERNAL_CNG_NOPACKET)
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DO_CODEC_INTERNAL_CNG;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do DTMF without extracting any new packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
+    {
+        WebRtc_UWord32 timeStampJump = 0;
+
+        /* Update timestamp */
+        if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
+        {
+            /* Jump in timestamps if needed */
+            timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
+            inst->pw16_writeAddress[1] = (WebRtc_UWord16) (timeStampJump >> 16);
+            inst->pw16_writeAddress[2] = (WebRtc_UWord16) (timeStampJump & 0xFFFF);
+        }
+
+        inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
+
+        inst->BufferStat_inst.uw32_CNGplayedTS = 0;
+        inst->NoOfExpandCalls = 0;
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DTMF_GENERATE;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    if (uw16_instr == BUFSTATS_DO_ACCELERATE)
+    {
+        /* In order to do a Accelerate we need at least 30 ms of data */
+        if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
+        {
+            /* Already have enough data, so we do not need to extract any more */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_ACCELERATE;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /* Avoid decoding more data as it might overflow playout buffer */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_NORMAL;
+            *blockPtr = 0;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /* For >= 30ms allow Accelerate with a decoding to avoid overflow in playout buffer */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
+        {
+            /* We need to decode another 10 ms in order to do an Accelerate */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else
+        {
+            /*
+             * Build up decoded data by decoding at least 20 ms of data.
+             * Do not perform Accelerate yet, but wait until we only need to do one decoding.
+             */
+            wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+    }
+    else if (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND)
+    {
+        /* In order to do a Preemptive Expand we need at least 30 ms of data */
+        if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
+        {
+            /* Already have enough data, so we do not need to extract any more */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_PREEMPTIVE_EXPAND;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /*
+             * Avoid decoding more data as it might overflow playout buffer;
+             * still try Preemptive Expand though.
+             */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_PREEMPTIVE_EXPAND;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /*
+             * For >= 30ms allow Preemptive Expand with a decoding to avoid overflow in
+             * playout buffer
+             */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
+        {
+            /* We need to decode another 10 ms in order to do an Preemptive Expand */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else
+        {
+            /*
+             * Build up decoded data by decoding at least 20 ms of data,
+             * Still try to perform Preemptive Expand.
+             */
+            wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
+        }
+    }
+    else
+    {
+        wantedNoOfTimeStamps = inst->timestampsPerCall;
+    }
+
+    /* Otherwise get data from buffer, try to get at least 10ms */
+    totalTS = 0;
+    oldTS = uw32_availableTS;
+    if ((i_bufferpos > -1) && (uw16_instr != BUFSTATS_DO_ALTERNATIVE_PLC) && (uw16_instr
+        != BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS) && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION)
+        && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
+    {
+        uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
+        inst->pw16_writeAddress[1] = (WebRtc_UWord16) (uw32_tmp >> 16);
+        inst->pw16_writeAddress[2] = (WebRtc_UWord16) (uw32_tmp & 0xFFFF);
+        if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
+        {
+            /*
+             * Adjustment of TS only corresponds to an actual packet loss
+             * if comfort noise is not played. If comfort noise was just played,
+             * this adjustment of TS is only done to get back in sync with the
+             * stream TS; no loss to report.
+             */
+            inst->lostTS += uw32_tmp;
+        }
+
+        if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
+        {
+            /* We are about to decode and use a non-CNG packet => CNG period is ended */
+            inst->BufferStat_inst.w16_cngOn = CNG_OFF;
+        }
+
+        /*
+         * Reset CNG timestamp as a new packet will be delivered.
+         * (Also if CNG packet, since playedOutTS is updated.)
+         */
+        inst->BufferStat_inst.uw32_CNGplayedTS = 0;
+
+        prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
+        prevTS = inst->PacketBuffer_inst.timeStamp[i_bufferpos];
+        oldPT = inst->PacketBuffer_inst.payloadType[i_bufferpos];
+
+        /* clear flag bits */
+        inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] & 0xFF3F;
+        do
+        {
+            int waitingTime;
+            inst->timeStamp = uw32_availableTS;
+            /* Write directly to shared memory */
+            temp_pkt.payload = blockPtr + 1;
+            i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
+                i_bufferpos, &waitingTime);
+
+            if (i_res < 0)
+            {
+                /* error returned */
+                return i_res;
+            }
+            WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
+            fwrite(&temp_var,sizeof(int),1,delay_fid2);
+            fwrite(&temp_pkt.timeStamp,sizeof(WebRtc_UWord32),1,delay_fid2);
+            fwrite(&dspInfo.samplesLeft, sizeof(WebRtc_UWord16), 1, delay_fid2);
+#endif
+
+            *blockPtr = temp_pkt.payloadLen;
+            /* set the flag if this is a redundant payload */
+            if (temp_pkt.rcuPlCntr > 0)
+            {
+                *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
+            }
+            blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
+
+            if (i_bufferpos > -1)
+            {
+                /*
+                 * Store number of TS extracted (last extracted is assumed to be of
+                 * packSizeSamples).
+                 */
+                totalTS = uw32_availableTS - oldTS + inst->PacketBuffer_inst.packSizeSamples;
+            }
+            /* Check what next packet is available */
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                inst->timeStamp, &uw32_availableTS, &i_bufferpos, 0, &payloadType);
+
+            nextSeqNoAvail = 0;
+            if ((i_bufferpos > -1) && (oldPT
+                == inst->PacketBuffer_inst.payloadType[i_bufferpos]))
+            {
+                w16_tmp = inst->PacketBuffer_inst.seqNumber[i_bufferpos] - prevSeqNo;
+                w32_tmp = inst->PacketBuffer_inst.timeStamp[i_bufferpos] - prevTS;
+                if ((w16_tmp == 1) || /* Next packet */
+                    ((w16_tmp == 0) && (w32_tmp == inst->PacketBuffer_inst.packSizeSamples)))
+                { /* or packet split into frames */
+                    nextSeqNoAvail = 1;
+                }
+                prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
+            }
+
+        }
+        while ((totalTS < wantedNoOfTimeStamps) && (nextSeqNoAvail == 1));
+    }
+
+    if ((uw16_instr == BUFSTATS_DO_ACCELERATE)
+        || (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND))
+    {
+        /* Check that we have enough data (30ms) to do the Accelearate */
+        if ((totalTS + dspInfo.samplesLeft) < WEBRTC_SPL_MUL(3,inst->timestampsPerCall)
+            && (uw16_instr == BUFSTATS_DO_ACCELERATE))
+        {
+            /* Not enough, do normal operation instead */
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+        else
+        {
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft + totalTS;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+        }
+    }
+
+    /* Close the data with a zero size block */
+    *blockPtr = 0;
+
+    /* Write data to DSP */
+    switch (uw16_instr)
+    {
+        case BUFSTATS_DO_NORMAL:
+            /* Normal with decoding included */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_NORMAL;
+            break;
+        case BUFSTATS_DO_ACCELERATE:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_ACCELERATE;
+            break;
+        case BUFSTATS_DO_MERGE:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_MERGE;
+            break;
+        case BUFSTATS_DO_RFC3389CNG_PACKET:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_DO_RFC3389CNG;
+            break;
+        case BUFSTATS_DO_ALTERNATIVE_PLC:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_ALTERNATIVE_PLC;
+            break;
+        case BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS;
+            break;
+        case BUFSTATS_DO_AUDIO_REPETITION:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_AUDIO_REPETITION;
+            break;
+        case BUFSTATS_DO_AUDIO_REPETITION_INC_TS:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_AUDIO_REPETITION_INC_TS;
+            break;
+        case BUFSTATS_DO_PREEMPTIVE_EXPAND:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_PREEMPTIVE_EXPAND;
+            break;
+        default:
+            return UNKNOWN_BUFSTAT_DECISION;
+    }
+
+    inst->timeStamp = dspInfo.playedOutTS;
+    return 0;
+
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/split_and_insert.c b/trunk/src/modules/audio_coding/neteq/split_and_insert.c
new file mode 100644
index 0000000..03c1569
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/split_and_insert.c
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Split an RTP payload (if possible and suitable) and insert into packet buffer.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
+                                      SplitInfo_t *split_inst, WebRtc_Word16 *flushed)
+{
+
+    int i_ok;
+    int len;
+    int i;
+    RTPPacket_t temp_packet;
+    WebRtc_Word16 localFlushed = 0;
+    const WebRtc_Word16 *pw16_startPayload;
+    *flushed = 0;
+
+    len = packet->payloadLen;
+
+    /* Copy to temp packet that can be modified. */
+
+    WEBRTC_SPL_MEMCPY_W8(&temp_packet,packet,sizeof(RTPPacket_t));
+
+    if (split_inst->deltaBytes == NO_SPLIT)
+    {
+        /* Not splittable codec */
+        i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, packet, &localFlushed);
+        *flushed |= localFlushed;
+        if (i_ok < 0)
+        {
+            return PBUFFER_INSERT_ERROR5;
+        }
+    }
+    else if (split_inst->deltaBytes < -10)
+    {
+        /* G711, PCM16B or G722, use "soft splitting" */
+        int split_size = packet->payloadLen;
+        int mult = WEBRTC_SPL_ABS_W32(split_inst->deltaBytes) - 10;
+
+        /* Find "chunk size" >= 20 ms and < 40 ms
+         * split_inst->deltaTime in this case contains the number of bytes per
+         * timestamp unit times 2
+         */
+        while (split_size >= ((80 << split_inst->deltaTime) * mult))
+        {
+            split_size >>= 1;
+        }
+
+        /* Make the size an even value. */
+        if (split_size > 1)
+        {
+            split_size >>= 1;
+            split_size *= 2;
+        }
+
+        temp_packet.payloadLen = split_size;
+        pw16_startPayload = temp_packet.payload;
+        i = 0;
+        while (len >= (2 * split_size))
+        {
+            /* insert every chunk */
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            temp_packet.timeStamp += ((2 * split_size) >> split_inst->deltaTime);
+            i++;
+            temp_packet.payload = &(pw16_startPayload[(i * split_size) >> 1]);
+            temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_size & 0x1);
+
+            len -= split_size;
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR1;
+            }
+        }
+
+        /* Insert the rest */
+        temp_packet.payloadLen = len;
+        i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+        *flushed |= localFlushed;
+        if (i_ok < 0)
+        {
+            return PBUFFER_INSERT_ERROR2;
+        }
+    }
+    else
+    {
+        /* Frame based codec, use hard splitting. */
+        i = 0;
+        pw16_startPayload = temp_packet.payload;
+        while (len >= split_inst->deltaBytes)
+        {
+
+            temp_packet.payloadLen = split_inst->deltaBytes;
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            i++;
+            temp_packet.payload = &(pw16_startPayload[(i * split_inst->deltaBytes) >> 1]);
+            temp_packet.timeStamp += split_inst->deltaTime;
+            temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_inst->deltaBytes
+                & 0x1);
+
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR3;
+            }
+            len -= split_inst->deltaBytes;
+
+        }
+        if (len > 0)
+        {
+            /* Must be a either an error or a SID frame at the end of the packet. */
+            temp_packet.payloadLen = len;
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR4;
+            }
+        }
+    }
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
new file mode 100644
index 0000000..64bf508
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
@@ -0,0 +1,657 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_CodecClass.h"
+
+#include <stdlib.h>  // exit
+
+#include "webrtc_neteq_help_macros.h"
+
+NETEQTEST_Decoder::NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt)
+:
+_decoder(NULL),
+_decoderType(type),
+_pt(pt),
+_fs(fs),
+_name(name)
+{
+}
+
+int NETEQTEST_Decoder::loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst)
+{
+    SET_CODEC_PAR(codecInst, _decoderType, _pt, _decoder, _fs);
+    int err = neteq.loadCodec(codecInst);
+    
+    if (err)
+    {
+        printf("Error loading codec %s into NetEQ database\n", _name.c_str());
+    }
+
+    return(err);
+}
+
+
+// iSAC
+#ifdef CODEC_ISAC
+#include "isac.h"
+
+decoder_iSAC::decoder_iSAC(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderISAC, 16000, "iSAC", pt)
+{
+    WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
+    WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacWideband);
+}
+
+
+decoder_iSAC::~decoder_iSAC()
+{
+    if (_decoder)
+    {
+        WebRtcIsac_Free((ISACStruct *) _decoder);
+        _decoder = NULL;
+    }
+}
+
+
+int decoder_iSAC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ISAC_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+#endif
+
+#ifdef CODEC_ISAC_SWB
+decoder_iSACSWB::decoder_iSACSWB(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderISACswb, 32000, "iSAC swb", pt)
+{
+    WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
+    WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacSuperWideband);
+}
+
+decoder_iSACSWB::~decoder_iSACSWB()
+{
+    if (_decoder)
+    {
+        WebRtcIsac_Free((ISACStruct *) _decoder);
+        _decoder = NULL;
+    }
+}
+
+int decoder_iSACSWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ISACSWB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+#endif
+
+// PCM u/A
+#ifdef CODEC_G711
+#include "g711_interface.h"
+
+decoder_PCMU::decoder_PCMU(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderPCMu, 8000, "G.711-u", pt)
+{
+    // no state to crate or init
+}
+
+int decoder_PCMU::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCMU_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+
+decoder_PCMA::decoder_PCMA(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderPCMa, 8000, "G.711-A", pt)
+{
+    // no state to crate or init
+}
+
+int decoder_PCMA::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCMA_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+// Linear PCM16b
+#if (defined(CODEC_PCM16B) || defined(CODEC_PCM16B_WB) || \
+    defined(CODEC_PCM16B_32KHZ) || defined(CODEC_PCM16B_48KHZ))
+#include "pcm16b.h"
+#endif
+
+#ifdef CODEC_PCM16B
+int decoder_PCM16B_NB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_WB
+int decoder_PCM16B_WB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_WB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_32KHZ
+int decoder_PCM16B_SWB32::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_SWB32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_48KHZ
+int decoder_PCM16B_SWB48::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_SWB48_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_ILBC
+#include "ilbc.h"
+decoder_ILBC::decoder_ILBC(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderILBC, 8000, "iLBC", pt)
+{
+    WebRtc_Word16 err = WebRtcIlbcfix_DecoderCreate((iLBC_decinst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_ILBC::~decoder_ILBC()
+{
+    WebRtcIlbcfix_DecoderFree((iLBC_decinst_t *) _decoder);
+}
+
+int decoder_ILBC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ILBC_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G729
+#include "G729Interface.h"
+decoder_G729::decoder_G729(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG729, 8000, "G.729", pt)
+{
+    WebRtc_Word16 err = WebRtcG729_CreateDec((G729_decinst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G729::~decoder_G729()
+{
+    WebRtcG729_FreeDec((G729_decinst_t *) _decoder);
+}
+
+int decoder_G729::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G729_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G729_1
+#include "G729_1Interface.h"
+decoder_G729_1::decoder_G729_1(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG729_1, 16000, "G.729.1", pt)
+{
+    WebRtc_Word16 err = WebRtcG7291_Create((G729_1_inst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G729_1::~decoder_G729_1()
+{
+    WebRtcG7291_Free((G729_1_inst_t *) _decoder);
+}
+
+int decoder_G729_1::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G729_1_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722
+#include "g722_interface.h"
+decoder_G722::decoder_G722(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722, 16000, "G.722", pt)
+{
+     WebRtc_Word16 err = WebRtcG722_CreateDecoder((G722DecInst **) &_decoder);
+     if (err)
+     {
+         exit(EXIT_FAILURE);
+     }
+}
+
+decoder_G722::~decoder_G722()
+{
+    WebRtcG722_FreeDecoder((G722DecInst *) _decoder);
+}
+
+int decoder_G722::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_G722_1_16) || defined(CODEC_G722_1_24) || \
+    defined(CODEC_G722_1_32) || defined(CODEC_G722_1C_24) || \
+    defined(CODEC_G722_1C_32) || defined(CODEC_G722_1C_48))
+#include "G722_1Interface.h"
+#endif
+
+#ifdef CODEC_G722_1_16
+decoder_G722_1_16::decoder_G722_1_16(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_16, 16000, "G.722.1 (16 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec16((G722_1_16_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_16::~decoder_G722_1_16()
+{
+    WebRtcG7221_FreeDec16((G722_1_16_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_16::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_16_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1_24
+decoder_G722_1_24::decoder_G722_1_24(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_24, 16000, "G.722.1 (24 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec24((G722_1_24_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_24::~decoder_G722_1_24()
+{
+    WebRtcG7221_FreeDec24((G722_1_24_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_24_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1_32
+decoder_G722_1_32::decoder_G722_1_32(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_32, 16000, "G.722.1 (32 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec32((G722_1_32_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_32::~decoder_G722_1_32()
+{
+    WebRtcG7221_FreeDec32((G722_1_32_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_24
+decoder_G722_1C_24::decoder_G722_1C_24(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_24, 32000, "G.722.1C (24 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec24((G722_1C_24_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_24::~decoder_G722_1C_24()
+{
+    WebRtcG7221C_FreeDec24((G722_1C_24_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_24_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_32
+decoder_G722_1C_32::decoder_G722_1C_32(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_32, 32000, "G.722.1C (32 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec32((G722_1C_32_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_32::~decoder_G722_1C_32()
+{
+    WebRtcG7221C_FreeDec32((G722_1C_32_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_48
+decoder_G722_1C_48::decoder_G722_1C_48(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_48, 32000, "G.722.1C (48 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec48((G722_1C_48_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_48::~decoder_G722_1C_48()
+{
+    WebRtcG7221C_FreeDec48((G722_1C_48_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_48::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_48_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_AMR
+#include "AMRInterface.h"
+#include "AMRCreation.h"
+decoder_AMR::decoder_AMR(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderAMR, 8000, "AMR", pt)
+{
+    if (WebRtcAmr_CreateDec((AMR_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+
+    WebRtcAmr_DecodeBitmode((AMR_decinst_t *) _decoder, AMRBandwidthEfficient);
+}
+
+decoder_AMR::~decoder_AMR()
+{
+    WebRtcAmr_FreeDec((AMR_decinst_t *) _decoder);
+}
+
+int decoder_AMR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AMR_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_AMRWB
+#include "AMRWBInterface.h"
+#include "AMRWBCreation.h"
+decoder_AMRWB::decoder_AMRWB(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderAMRWB, 16000, "AMR wb", pt)
+{
+    if (WebRtcAmrWb_CreateDec((AMRWB_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+
+    WebRtcAmrWb_DecodeBitmode((AMRWB_decinst_t *) _decoder, AMRBandwidthEfficient);
+}
+
+decoder_AMRWB::~decoder_AMRWB()
+{
+    WebRtcAmrWb_FreeDec((AMRWB_decinst_t *) _decoder);
+}
+
+int decoder_AMRWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AMRWB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_GSMFR
+#include "GSMFRInterface.h"
+#include "GSMFRCreation.h"
+decoder_GSMFR::decoder_GSMFR(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderGSMFR, 8000, "GSM-FR", pt)
+{
+    if (WebRtcGSMFR_CreateDec((GSMFR_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_GSMFR::~decoder_GSMFR()
+{
+    WebRtcGSMFR_FreeDec((GSMFR_decinst_t *) _decoder);
+}
+
+int decoder_GSMFR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_GSMFR_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_SPEEX_8) || defined (CODEC_SPEEX_16))
+#include "SpeexInterface.h"
+decoder_SPEEX::decoder_SPEEX(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(fs == 8000 ? kDecoderSPEEX_8 : kDecoderSPEEX_16, 
+                  fs, "SPEEX " + fs/1000, pt)
+{
+    if (fs != 8000 && fs != 16000)
+        throw std::exception("Wrong sample rate for SPEEX");
+
+    if (WebRtcSpeex_CreateDec((SPEEX_decinst_t **) &_decoder, fs, 1))
+        exit(EXIT_FAILURE);
+}
+
+decoder_SPEEX::~decoder_SPEEX()
+{
+    WebRtcSpeex_FreeDec((SPEEX_decinst_t *) _decoder);
+}
+
+int decoder_SPEEX::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_SPEEX_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_CELT_32
+#include "celt_interface.h"
+decoder_CELT::decoder_CELT(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(kDecoderCELT_32, fs, "CELT", pt)
+{
+   if (WebRtcCelt_CreateDec((CELT_decinst_t **) &_decoder, 1))
+        exit(EXIT_FAILURE);
+}
+
+decoder_CELT::~decoder_CELT()
+{
+    WebRtcCelt_FreeDec((CELT_decinst_t *) _decoder);
+}
+
+int decoder_CELT::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_CELT_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_RED
+int decoder_RED::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_RED_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_ATEVENT_DECODE
+int decoder_AVT::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AVT_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+#include "webrtc_cng.h"
+decoder_CNG::decoder_CNG(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(kDecoderCNG, fs, "CNG " + fs/1000, pt)
+{
+    if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000)
+        exit(EXIT_FAILURE);
+
+    if (WebRtcCng_CreateDec((CNG_dec_inst **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_CNG::~decoder_CNG()
+{
+    WebRtcCng_FreeDec((CNG_dec_inst *) _decoder);
+}
+
+int decoder_CNG::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_CNG_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
new file mode 100644
index 0000000..ff49049
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
@@ -0,0 +1,301 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_CODECCLASS_H
+#define NETEQTEST_CODECCLASS_H
+
+#include <string>
+#include <string.h>
+
+#include "typedefs.h"
+#include "webrtc_neteq.h"
+#include "NETEQTEST_NetEQClass.h"
+
+class NETEQTEST_Decoder
+{
+public:
+    NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt = 0);
+    virtual ~NETEQTEST_Decoder() {};
+
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
+
+    int getName(char * name, int maxLen) const { strncpy( name, _name.c_str(), maxLen ); return 0;};
+
+    void setPT(WebRtc_UWord8 pt) { _pt = pt; };
+    WebRtc_UWord16 getFs() const { return (_fs); };
+    enum WebRtcNetEQDecoder getType() const { return (_decoderType); };
+    WebRtc_UWord8 getPT() const { return (_pt); };
+
+protected:
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst);
+
+    void * _decoder;
+    enum WebRtcNetEQDecoder _decoderType;
+    WebRtc_UWord8 _pt;
+    WebRtc_UWord16 _fs;
+    std::string _name;
+
+private:
+};
+
+
+class decoder_iSAC : public NETEQTEST_Decoder
+{
+public:
+    decoder_iSAC(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_iSAC();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_iSACSWB : public NETEQTEST_Decoder
+{
+public:
+    decoder_iSACSWB(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_iSACSWB();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_PCMU : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCMU(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_PCMU() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_PCMA : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCMA(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_PCMA() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_PCM16B_NB : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_NB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16B, 8000, "PCM16 nb", pt) {};
+    virtual ~decoder_PCM16B_NB() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_WB : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_WB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bwb, 16000, "PCM16 wb", pt) {};
+    virtual ~decoder_PCM16B_WB() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_SWB32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_SWB32(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb32kHz, 32000, "PCM16 swb32", pt) {};
+    virtual ~decoder_PCM16B_SWB32() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_SWB48 : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_SWB48(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb48kHz, 48000, "PCM16 swb48", pt) {};
+    virtual ~decoder_PCM16B_SWB48() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_ILBC : public NETEQTEST_Decoder
+{
+public:
+    decoder_ILBC(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_ILBC();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G729 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G729(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G729();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G729_1 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G729_1(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G729_1();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722_1_16 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_16(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_16();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1_24 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_24(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_24();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1_32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_32(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_32();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722_1C_24 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_24(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_24();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1C_32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_32(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_32();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1C_48 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_48(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_48();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_AMR : public NETEQTEST_Decoder
+{
+public:
+    decoder_AMR(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_AMR();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_AMRWB : public NETEQTEST_Decoder
+{
+public:
+    decoder_AMRWB(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_AMRWB();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_GSMFR : public NETEQTEST_Decoder
+{
+public:
+    decoder_GSMFR(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_GSMFR();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726 : public NETEQTEST_Decoder
+{
+public:
+    //virtual decoder_G726(WebRtc_UWord8 pt = 0) = 0;
+    decoder_G726(enum WebRtcNetEQDecoder type, const char * name, WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G726();
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
+};
+
+class decoder_G726_16 : public decoder_G726
+{
+public:
+    decoder_G726_16(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_16, "G.726 (16 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_24 : public decoder_G726
+{
+public:
+    decoder_G726_24(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_24, "G.726 (24 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_32 : public decoder_G726
+{
+public:
+    decoder_G726_32(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_32, "G.726 (32 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_40 : public decoder_G726
+{
+public:
+    decoder_G726_40(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_40, "G.726 (40 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_SPEEX : public NETEQTEST_Decoder
+{
+public:
+    decoder_SPEEX(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+    virtual ~decoder_SPEEX();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_CELT : public NETEQTEST_Decoder
+{
+public:
+    decoder_CELT(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 32000);
+    virtual ~decoder_CELT();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_RED : public NETEQTEST_Decoder
+{
+public:
+    decoder_RED(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderRED, 8000, "RED", pt) {};
+    virtual ~decoder_RED() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_AVT : public NETEQTEST_Decoder
+{
+public:
+    decoder_AVT(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderAVT, 8000, "AVT", pt) {};
+    virtual ~decoder_AVT() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_CNG : public NETEQTEST_Decoder
+{
+public:
+    decoder_CNG(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+    virtual ~decoder_CNG();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+#endif //NETEQTEST_CODECCLASS_H
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
new file mode 100644
index 0000000..2e60658
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
@@ -0,0 +1,384 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory.h>
+
+#include "NETEQTEST_NetEQClass.h"
+
+
+NETEQTEST_NetEQClass::NETEQTEST_NetEQClass()
+    :
+    _inst(NULL),
+    _instMem(NULL),
+    _bufferMem(NULL),
+    _preparseRTP(false),
+    _fsmult(1),
+    _isMaster(true)
+{
+#ifdef WINDOWS_TIMING
+    _totTimeRecIn.QuadPart = 0;
+    _totTimeRecOut.QuadPart = 0;
+#endif
+}
+
+NETEQTEST_NetEQClass::NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
+        WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+    :
+    _inst(NULL),
+    _instMem(NULL),
+    _bufferMem(NULL),
+    _preparseRTP(false),
+    _fsmult(1),
+    _isMaster(true)
+{
+#ifdef WINDOWS_TIMING
+    _totTimeRecIn.QuadPart = 0;
+    _totTimeRecOut.QuadPart = 0;
+#endif
+
+    if (assign() == 0)
+    {
+        if (init(fs) == 0)
+        {
+            assignBuffer(usedCodec, noOfCodecs, nwType);
+        }
+    }
+}
+
+
+NETEQTEST_NetEQClass::~NETEQTEST_NetEQClass()
+{
+    if (_instMem)
+    {
+        delete [] _instMem;
+        _instMem = NULL;
+    }
+
+    if (_bufferMem)
+    {
+        delete [] _bufferMem;
+        _bufferMem = NULL;
+    }
+
+    _inst = NULL;
+}
+
+int NETEQTEST_NetEQClass::assign()
+{
+    int memSize;
+
+    WebRtcNetEQ_AssignSize(&memSize);
+
+    if (_instMem)
+    {
+        delete [] _instMem;
+        _instMem = NULL;
+    }
+
+    _instMem = new WebRtc_Word8[memSize];
+
+    int ret = WebRtcNetEQ_Assign(&_inst, _instMem);
+
+    if (ret)
+    {
+        printError();
+    }
+
+    return (ret);
+}
+
+
+int NETEQTEST_NetEQClass::init(WebRtc_UWord16 fs)
+{
+    int ret;
+
+    if (!_inst)
+    {
+        // not assigned
+        ret = assign();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+    }
+
+    ret = WebRtcNetEQ_Init(_inst, fs);
+
+    if (ret != 0)
+    {
+        printError();
+    }
+
+    return (ret);
+
+}
+
+
+int NETEQTEST_NetEQClass::assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType)
+{
+    int numPackets, memSize, ret;
+
+    if (!_inst)
+    {
+        // not assigned
+        ret = assign();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+
+        ret = init();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+    }
+
+    ret = WebRtcNetEQ_GetRecommendedBufferSize(_inst, usedCodec, noOfCodecs, nwType, &numPackets, &memSize);
+
+    if (ret != 0)
+    {
+        printError();
+        return (ret);
+    }
+
+    if (_bufferMem)
+    {
+        delete [] _bufferMem;
+        _bufferMem = NULL;
+    }
+
+    _bufferMem = new WebRtc_Word8[memSize];
+
+    memset(_bufferMem, -1, memSize);
+
+    ret = WebRtcNetEQ_AssignBuffer(_inst, numPackets, _bufferMem, memSize);
+
+    if (ret != 0)
+    {
+        printError();
+    }
+
+    return (ret);
+}
+
+int NETEQTEST_NetEQClass::loadCodec(WebRtcNetEQ_CodecDef &codecInst)
+{
+    int err = WebRtcNetEQ_CodecDbAdd(_inst, &codecInst);
+
+    if (err)
+    {
+        printError();
+    }
+
+    return (err);
+}
+
+void NETEQTEST_NetEQClass::printError()
+{
+    if (_inst)
+    {
+        int errorCode = WebRtcNetEQ_GetErrorCode(_inst);
+
+        if (errorCode)
+        {
+            char errorName[WEBRTC_NETEQ_MAX_ERROR_NAME];
+
+            WebRtcNetEQ_GetErrorName(errorCode, errorName, WEBRTC_NETEQ_MAX_ERROR_NAME);
+
+            printf("Error %i: %s\n", errorCode, errorName);
+        }
+    }
+}
+
+void NETEQTEST_NetEQClass::printError(NETEQTEST_RTPpacket &rtp)
+{
+    // print regular error info
+    printError();
+
+    // print extra info from packet
+    printf("\tRTP: TS=%u, SN=%u, PT=%u, M=%i, len=%i\n",
+           rtp.timeStamp(), rtp.sequenceNumber(), rtp.payloadType(),
+           rtp.markerBit(), rtp.payloadLen());
+
+}
+
+int NETEQTEST_NetEQClass::recIn(NETEQTEST_RTPpacket &rtp)
+{
+
+    int err;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER countA, countB;
+#endif
+
+    if (_preparseRTP)
+    {
+        WebRtcNetEQ_RTPInfo rtpInfo;
+        // parse RTP header
+        rtp.parseHeader(rtpInfo);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+        err = WebRtcNetEQ_RecInRTPStruct(_inst, &rtpInfo, rtp.payload(), rtp.payloadLen(), rtp.time() * _fsmult * 8);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countB); // get stop count for processor
+        _totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    }
+    else
+    {
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+        err = WebRtcNetEQ_RecIn(_inst, (WebRtc_Word16 *) rtp.datagram(), rtp.dataLen(), rtp.time() * _fsmult * 8);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countB); // get stop count for processor
+        _totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    }
+
+    if (err)
+    {
+        printError(rtp);
+    }
+
+    return (err);
+
+}
+
+
+WebRtc_Word16 NETEQTEST_NetEQClass::recOut(WebRtc_Word16 *outData, void *msInfo, enum WebRtcNetEQOutputType *outputType)
+{
+    int err;
+    WebRtc_Word16 outLen = 0;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER countA, countB;
+#endif
+
+#ifdef WINDOWS_TIMING
+    QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+    if (!msInfo)
+    {
+        // no msInfo given, do mono mode
+        err = WebRtcNetEQ_RecOut(_inst, outData, &outLen);
+    }
+    else
+    {
+        // master/slave mode
+        err = WebRtcNetEQ_RecOutMasterSlave(_inst, outData, &outLen, msInfo, static_cast<WebRtc_Word16>(_isMaster));
+    }
+
+#ifdef WINDOWS_TIMING
+    QueryPerformanceCounter(&countB); // get stop count for processor
+    _totTimeRecOut.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    if (err)
+    {
+        printError();
+    }
+    else
+    {
+        int newfsmult = static_cast<int>(outLen / 80);
+
+        if (newfsmult != _fsmult)
+        {
+#ifdef NETEQTEST_PRINT_WARNINGS
+            printf("Warning: output sample rate changed\n");
+#endif  // NETEQTEST_PRINT_WARNINGS
+            _fsmult = newfsmult;
+        }
+    }
+
+    if (outputType != NULL)
+    {
+        err = WebRtcNetEQ_GetSpeechOutputType(_inst, outputType);
+
+        if (err)
+        {
+            printError();
+        }
+    }
+
+    return (outLen);
+}
+
+
+WebRtc_UWord32 NETEQTEST_NetEQClass::getSpeechTimeStamp()
+{
+
+    WebRtc_UWord32 ts = 0;
+    int err;
+
+    err = WebRtcNetEQ_GetSpeechTimeStamp(_inst, &ts);
+
+    if (err)
+    {
+        printError();
+        ts = 0;
+    }
+
+    return (ts);
+
+}
+
+WebRtcNetEQOutputType NETEQTEST_NetEQClass::getOutputType() {
+  WebRtcNetEQOutputType type;
+
+  int err = WebRtcNetEQ_GetSpeechOutputType(_inst, &type);
+  if (err)
+  {
+    printError();
+    type = kOutputNormal;
+  }
+  return (type);
+}
+
+//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels)
+//:
+//channels(numChannels, new NETEQTEST_NetEQClass())
+//{
+//    //for (int i = 0; i < numChannels; i++)
+//    //{
+//    //    channels.push_back(new NETEQTEST_NetEQClass());
+//    //}
+//}
+//
+//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
+//                      WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+//                      :
+//channels(numChannels, new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType))
+//{
+//    //for (int i = 0; i < numChannels; i++)
+//    //{
+//    //    channels.push_back(new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType));
+//    //}
+//}
+//
+//NETEQTEST_NetEQVector::~NETEQTEST_NetEQVector()
+//{
+//}
+
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
new file mode 100644
index 0000000..c425b58
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_NETEQCLASS_H
+#define NETEQTEST_NETEQCLASS_H
+
+#include <stdio.h>
+#include <vector>
+
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+#include "NETEQTEST_RTPpacket.h"
+
+#ifdef WIN32
+#define WINDOWS_TIMING // complexity measurement only implemented for windows
+//TODO(hlundin):Add complexity testing for Linux.
+#include <windows.h>
+#endif
+
+class NETEQTEST_NetEQClass
+{
+public:
+    NETEQTEST_NetEQClass();
+    NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, 
+        WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+    ~NETEQTEST_NetEQClass();
+
+    int assign();
+    int init(WebRtc_UWord16 fs = 8000);
+    int assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+    int loadCodec(WebRtcNetEQ_CodecDef & codecInst);
+    int recIn(NETEQTEST_RTPpacket & rtp);
+    WebRtc_Word16 recOut(WebRtc_Word16 *outData, void *msInfo = NULL, enum WebRtcNetEQOutputType *outputType = NULL);
+    WebRtc_UWord32 getSpeechTimeStamp();
+    WebRtcNetEQOutputType getOutputType();
+
+    void * instance() { return (_inst); };
+    void usePreparseRTP( bool useIt = true ) { _preparseRTP = useIt; };
+    bool usingPreparseRTP() { return (_preparseRTP); };
+    void setMaster( bool isMaster = true ) { _isMaster = isMaster; };
+    void setSlave() { _isMaster = false; };
+    bool isMaster() { return (_isMaster); };
+    bool isSlave() { return (!_isMaster); };
+
+#ifdef WINDOWS_TIMING
+    double getRecInTime() { return (static_cast<double>( _totTimeRecIn.QuadPart )); };
+    double getRecOutTime() { return (static_cast<double>( _totTimeRecOut.QuadPart )); };
+#else
+    double getRecInTime() { return (0.0); };
+    double getRecOutTime() { return (0.0); };
+
+#endif
+
+    void printError();
+    void printError(NETEQTEST_RTPpacket & rtp);
+
+private:
+    void *          _inst;
+    WebRtc_Word8 *    _instMem;
+    WebRtc_Word8 *    _bufferMem;
+    bool            _preparseRTP;
+    int             _fsmult;
+    bool            _isMaster;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER   _totTimeRecIn;
+    LARGE_INTEGER   _totTimeRecOut;
+#endif
+};
+
+
+
+//class NETEQTEST_NetEQVector
+//{
+//public:
+//    NETEQTEST_NetEQVector(int numChannels);
+//    NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, 
+//        WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+//    ~NETEQTEST_NetEQVector();
+//
+//private:
+//    std::vector<NETEQTEST_NetEQClass *> channels;
+//};
+
+#endif //NETEQTEST_NETEQCLASS_H
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
new file mode 100644
index 0000000..0412f06
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
@@ -0,0 +1,875 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_RTPpacket.h"
+
+#include <assert.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h> // for htons, htonl, etc
+#endif
+
+#include <cstdlib>
+
+#include "gtest/gtest.h"
+
+#define HDR_SIZE 8 // rtpplay packet header size in bytes
+
+
+NETEQTEST_RTPpacket::NETEQTEST_RTPpacket()
+:
+_datagram(NULL),
+_payloadPtr(NULL),
+_memSize(0),
+_datagramLen(-1),
+_payloadLen(0),
+_rtpParsed(false),
+_receiveTime(0),
+_lost(false)
+{
+    memset(&_rtpInfo, 0, sizeof(_rtpInfo));
+    _blockList.clear();
+}
+
+NETEQTEST_RTPpacket::NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe)
+{
+
+    memcpy(this, &copyFromMe, sizeof(NETEQTEST_RTPpacket));
+
+    _datagram = NULL;
+    _payloadPtr = NULL;
+
+    if(copyFromMe._datagram)
+    {
+        _datagram = new WebRtc_UWord8[_memSize];
+        
+        if(_datagram)
+        {
+            memcpy(_datagram, copyFromMe._datagram, _memSize);
+        }
+    }
+
+    if(copyFromMe._payloadPtr)
+    {
+        _payloadPtr = _datagram + (copyFromMe._payloadPtr - copyFromMe._datagram);
+    }
+
+    _blockList = copyFromMe._blockList;
+
+}
+
+    
+NETEQTEST_RTPpacket & NETEQTEST_RTPpacket::operator = (const NETEQTEST_RTPpacket & other)
+{
+    if (this != &other) // protect against invalid self-assignment
+    {
+
+        // deallocate datagram memory if allocated
+        if(_datagram)
+        {
+            delete [] _datagram;
+        }
+
+        // do shallow copy
+        memcpy(this, &other, sizeof(NETEQTEST_RTPpacket));
+
+        // reset pointers
+        _datagram = NULL;
+        _payloadPtr = NULL;
+
+        if(other._datagram)
+        {
+            _datagram = new WebRtc_UWord8[other._memSize];
+            _memSize = other._memSize;
+
+            if(_datagram)
+            {
+                memcpy(_datagram, other._datagram, _memSize);
+            }
+        }
+
+        if(other._payloadPtr)
+        {
+            _payloadPtr = _datagram + (other._payloadPtr - other._datagram);
+        }
+
+        // copy the blocking list (map)
+        _blockList = other._blockList;
+
+    }
+
+    // by convention, always return *this
+    return *this;
+}
+
+
+
+NETEQTEST_RTPpacket::~NETEQTEST_RTPpacket()
+{
+    if(_datagram) 
+    {
+        delete [] _datagram;
+    }
+}
+
+
+void NETEQTEST_RTPpacket::reset()
+{
+    if(_datagram) {
+        delete [] _datagram;
+    }
+    _datagram = NULL;
+    _memSize = 0;
+    _datagramLen = -1;
+    _payloadLen = 0;
+    _payloadPtr = NULL;
+    _receiveTime = 0;
+    memset(&_rtpInfo, 0, sizeof(_rtpInfo));
+    _rtpParsed = false;
+
+}
+
+int NETEQTEST_RTPpacket::skipFileHeader(FILE *fp)
+{
+    if (!fp) {
+        return -1;
+    }
+
+    const int kFirstLineLength = 40;
+    char firstline[kFirstLineLength];
+    if (fgets(firstline, kFirstLineLength, fp) == NULL) {
+        return -1;
+    }
+    if (strncmp(firstline, "#!rtpplay", 9) == 0) {
+        if (strncmp(firstline, "#!rtpplay1.0", 12) != 0) {
+            return -1;
+        }
+    }
+    else if (strncmp(firstline, "#!RTPencode", 11) == 0) {
+        if (strncmp(firstline, "#!RTPencode1.0", 14) != 0) {
+            return -1;
+        }
+    }
+    else
+    {
+        return -1;
+    }
+
+    const int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+    if (fseek(fp, kRtpDumpHeaderSize, SEEK_CUR) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+int NETEQTEST_RTPpacket::readFromFile(FILE *fp)
+{
+    if(!fp)
+    {
+        return(-1);
+    }
+
+	WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    if (fread(&length,2,1,fp)==0)
+    {
+        reset();
+        return(-2);
+    }
+    length = ntohs(length);
+
+    if (fread(&plen,2,1,fp)==0)
+    {
+        reset();
+        return(-1);
+    }
+    int packetLen = ntohs(plen);
+
+    if (fread(&offset,4,1,fp)==0)
+    {
+        reset();
+        return(-1);
+    }
+    WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
+	
+	// Use length here because a plen of 0 specifies rtcp
+	length = (WebRtc_UWord16) (length - HDR_SIZE);
+
+    // check buffer size
+    if (_datagram && _memSize < length)
+    {
+        reset();
+    }
+
+    if (!_datagram)
+    {
+        _datagram = new WebRtc_UWord8[length];
+        _memSize = length;
+    }
+
+	if (fread((unsigned short *) _datagram,1,length,fp) != length)
+    {
+        reset();
+		return(-1);
+    }
+
+    _datagramLen = length;
+    _receiveTime = receiveTime;
+
+    if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+    {
+        // discard this payload
+        return(readFromFile(fp));
+    }
+
+	return(packetLen);
+
+}
+
+
+int NETEQTEST_RTPpacket::readFixedFromFile(FILE *fp, size_t length)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    // check buffer size
+    if (_datagram && _memSize < static_cast<int>(length))
+    {
+        reset();
+    }
+
+    if (!_datagram)
+    {
+        _datagram = new WebRtc_UWord8[length];
+        _memSize = length;
+    }
+
+    if (fread(_datagram, 1, length, fp) != length)
+    {
+        reset();
+        return -1;
+    }
+
+    _datagramLen = length;
+    _receiveTime = 0;
+
+    if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+    {
+        // discard this payload
+        return readFromFile(fp);
+    }
+
+    return length;
+
+}
+
+
+int NETEQTEST_RTPpacket::writeToFile(FILE *fp)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    // length including RTPplay header
+    length = htons(_datagramLen + HDR_SIZE);
+    if (fwrite(&length, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // payload length
+    plen = htons(_datagramLen);
+    if (fwrite(&plen, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+    
+    // offset (=receive time)
+    offset = htonl(_receiveTime);
+    if (fwrite(&offset, 4, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+
+    // write packet data
+    if (fwrite(_datagram, 1, _datagramLen, fp) !=
+            static_cast<size_t>(_datagramLen))
+    {
+        return -1;
+    }
+
+    return _datagramLen + HDR_SIZE; // total number of bytes written
+
+}
+
+
+void NETEQTEST_RTPpacket::blockPT(WebRtc_UWord8 pt)
+{
+    _blockList[pt] = true;
+}
+
+
+void NETEQTEST_RTPpacket::parseHeader()
+{
+    if (_rtpParsed)
+    {
+        // nothing to do
+        return;
+    }
+
+    if (_datagramLen < 12)
+    {
+        // corrupt packet?
+        return;
+    }
+
+    _payloadLen = parseRTPheader(_datagram, _datagramLen, &_rtpInfo, &_payloadPtr);
+
+    _rtpParsed = true;
+
+    return;
+
+}
+
+void NETEQTEST_RTPpacket::parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo)
+{
+    if (!_rtpParsed)
+    {
+        // parse the header
+        parseHeader();
+    }
+
+    memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcNetEQ_RTPInfo));
+}
+
+WebRtcNetEQ_RTPInfo const * NETEQTEST_RTPpacket::RTPinfo() const
+{
+    if (_rtpParsed)
+    {
+        return &_rtpInfo;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_UWord8 * NETEQTEST_RTPpacket::datagram() const
+{
+    if (_datagramLen > 0)
+    {
+        return _datagram;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_UWord8 * NETEQTEST_RTPpacket::payload() const
+{
+    if (_payloadLen > 0)
+    {
+        return _payloadPtr;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen() const
+{
+    return _payloadLen;
+}
+
+WebRtc_Word16 NETEQTEST_RTPpacket::dataLen() const
+{
+    return _datagramLen;
+}
+
+bool NETEQTEST_RTPpacket::isParsed() const
+{
+    return _rtpParsed;
+}
+
+bool NETEQTEST_RTPpacket::isLost() const
+{
+    return _lost;
+}
+
+WebRtc_UWord8  NETEQTEST_RTPpacket::payloadType() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    
+    if(_datagram)
+    {
+        parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.payloadType;
+}
+
+WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    
+    if(_datagram)
+    {
+        parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.sequenceNumber;
+}
+
+WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    
+    if(_datagram)
+    {
+        parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.timeStamp;
+}
+
+WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    
+    if(_datagram)
+    {
+        parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.SSRC;
+}
+
+WebRtc_UWord8  NETEQTEST_RTPpacket::markerBit() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    
+    if(_datagram)
+    {
+        parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.markerBit;
+}
+
+
+
+int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
+{
+    
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.payloadType = pt;
+    }
+
+    _datagram[1]=(unsigned char)(pt & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
+{
+    
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.sequenceNumber = sn;
+    }
+
+    _datagram[2]=(unsigned char)((sn>>8)&0xFF);
+    _datagram[3]=(unsigned char)((sn)&0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
+{
+    
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.timeStamp = ts;
+    }
+
+    _datagram[4]=(unsigned char)((ts>>24)&0xFF);
+    _datagram[5]=(unsigned char)((ts>>16)&0xFF);
+    _datagram[6]=(unsigned char)((ts>>8)&0xFF); 
+    _datagram[7]=(unsigned char)(ts & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
+{
+    
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.SSRC = ssrc;
+    }
+
+    _datagram[8]=(unsigned char)((ssrc>>24)&0xFF);
+    _datagram[9]=(unsigned char)((ssrc>>16)&0xFF);
+    _datagram[10]=(unsigned char)((ssrc>>8)&0xFF);
+    _datagram[11]=(unsigned char)(ssrc & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
+{
+    
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (_rtpParsed)
+    {
+        _rtpInfo.markerBit = mb;
+    }
+
+    if (mb)
+    {
+        _datagram[0] |= 0x01;
+    }
+    else
+    {
+        _datagram[0] &= 0xFE;
+    }
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo)
+{
+    if (_datagramLen < 12)
+    {
+        // this packet is not ok
+        return -1;
+    }
+
+    makeRTPheader(_datagram, 
+        RTPinfo->payloadType, 
+        RTPinfo->sequenceNumber, 
+        RTPinfo->timeStamp, 
+        RTPinfo->SSRC,
+        RTPinfo->markerBit);
+
+    return 0;
+}
+
+
+int NETEQTEST_RTPpacket::splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode)
+{
+    // if mono, do nothing
+    if (mode == stereoModeMono)
+    {
+        return 0;
+    }
+
+    // check that the RTP header info is parsed
+    parseHeader();
+
+    // start by copying the main rtp packet
+    slaveRtp = *this;
+
+    if(_payloadLen == 0)
+    {
+        // do no more
+        return 0;
+    }
+
+    if(_payloadLen%2 != 0)
+    {
+        // length must be a factor of 2
+        return -1;
+    }
+
+    switch(mode)
+    {
+    case stereoModeSample1:
+        {
+            // sample based codec with 1-byte samples
+            splitStereoSample(slaveRtp, 1 /* 1 byte/sample */);
+            break;
+        }
+    case stereoModeSample2:
+        {
+            // sample based codec with 2-byte samples
+            splitStereoSample(slaveRtp, 2 /* 2 bytes/sample */);
+            break;
+        }
+    case stereoModeFrame:
+        {
+            // frame based codec
+            splitStereoFrame(slaveRtp);
+            break;
+        }
+    case stereoModeMono:
+        {
+            assert(false);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+
+void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const
+{
+    rtp_data[0]=(unsigned char)0x80;
+    if (markerBit)
+    {
+        rtp_data[0] |= 0x01;
+    }
+    else
+    {
+        rtp_data[0] &= 0xFE;
+    }
+    rtp_data[1]=(unsigned char)(payloadType & 0xFF);
+    rtp_data[2]=(unsigned char)((seqNo>>8)&0xFF);
+    rtp_data[3]=(unsigned char)((seqNo)&0xFF);
+    rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
+    rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
+
+    rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF); 
+    rtp_data[7]=(unsigned char)(timestamp & 0xFF);
+
+    rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
+    rtp_data[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+    rtp_data[10]=(unsigned char)((ssrc>>8)&0xFF);
+    rtp_data[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+
+WebRtc_UWord16 NETEQTEST_RTPpacket::parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr) const
+{
+    WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) datagram;
+    int i_P, i_X, i_CC, i_extlength=-1, i_padlength=0, i_startPosition;
+
+	i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5);				/* Extract the P bit		*/
+	i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4);				/* Extract the X bit		*/
+	i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF);						/* Get the CC number		*/
+    RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01);    /* Get the marker bit */
+    RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F);	/* Get the coder type		*/
+    RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) | 
+		( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8));			/* Get the packet number	*/
+	RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) | 
+		((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) | 
+		((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
+		((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8);			/* Get timestamp            */
+	RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) | 
+		((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) | 
+		((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
+		((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8);			/* Get the SSRC				*/
+
+	if (i_X==1) {
+		/* Extention header exists. Find out how many WebRtc_Word32 it consists of */
+		i_extlength=((( ((WebRtc_UWord16)rtp_data[7+2*i_CC]) >> 8) & 0xFF) |
+				( ((WebRtc_UWord16)(rtp_data[7+2*i_CC]&0xFF)) << 8));
+	}
+	if (i_P==1) {
+		/* Padding exists. Find out how many bytes the padding consists of */
+		if (datagramLen & 0x1) {
+			/* odd number of bytes => last byte in higher byte */
+			i_padlength=(rtp_data[datagramLen>>1] & 0xFF);
+		} else {
+			/* even number of bytes => last byte in lower byte */
+			i_padlength=(((WebRtc_UWord16)rtp_data[(datagramLen>>1)-1]) >> 8);
+		}
+	}
+
+	i_startPosition=12+4*(i_extlength+1)+4*i_CC;
+
+    if (payloadPtr) {
+        *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition>>1];
+    }
+
+	return (WebRtc_UWord16) (datagramLen-i_startPosition-i_padlength);
+}
+
+//void NETEQTEST_RTPpacket::splitStereoSample(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes, int stride)
+void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride)
+{
+    if(!_payloadPtr || !slaveRtp._payloadPtr 
+        || _payloadLen <= 0 || slaveRtp._memSize < _memSize)
+    {
+        return;
+    }
+
+    WebRtc_UWord8 *readDataPtr = _payloadPtr;
+    WebRtc_UWord8 *writeDataPtr = _payloadPtr;
+    WebRtc_UWord8 *slaveData = slaveRtp._payloadPtr;
+
+    while (readDataPtr - _payloadPtr < _payloadLen)
+    {
+        // master data
+        for (int ix = 0; ix < stride; ix++) {
+            *writeDataPtr = *readDataPtr;
+            writeDataPtr++;
+            readDataPtr++;
+        }
+
+        // slave data
+        for (int ix = 0; ix < stride; ix++) {
+            *slaveData = *readDataPtr;
+            slaveData++;
+            readDataPtr++;
+        }
+    }
+
+    _payloadLen /= 2;
+    slaveRtp._payloadLen = _payloadLen;
+}
+
+
+//void NETEQTEST_RTPpacket::splitStereoFrame(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes)
+void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp)
+{
+    if(!_payloadPtr || !slaveRtp._payloadPtr 
+        || _payloadLen <= 0 || slaveRtp._memSize < _memSize)
+    {
+        return;
+    }
+
+    memmove(slaveRtp._payloadPtr, _payloadPtr + _payloadLen/2, _payloadLen/2);
+
+    _payloadLen /= 2;
+    slaveRtp._payloadLen = _payloadLen;
+}
+
+// Get the RTP header for the RED payload indicated by argument index.
+// The first RED payload is index = 0.
+int NETEQTEST_RTPpacket::extractRED(int index, WebRtcNetEQ_RTPInfo& red)
+{
+//
+//  0                   1                    2                   3
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3  4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1|   block PT  |  timestamp offset         |   block length    |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1|    ...                                                      |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |0|   block PT  |
+// +-+-+-+-+-+-+-+-+
+//
+
+    parseHeader();
+
+    WebRtc_UWord8* ptr = payload();
+    WebRtc_UWord8* payloadEndPtr = ptr + payloadLen();
+    int num_encodings = 0;
+    int total_len = 0;
+
+    while ((ptr < payloadEndPtr) && (*ptr & 0x80))
+    {
+        int len = ((ptr[2] & 0x03) << 8) + ptr[3];
+        if (num_encodings == index)
+        {
+            // Header found.
+            red.payloadType = ptr[0] & 0x7F;
+            WebRtc_UWord32 offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
+            red.sequenceNumber = sequenceNumber();
+            red.timeStamp = timeStamp() - offset;
+            red.markerBit = markerBit();
+            red.SSRC = SSRC();
+            return len;
+        }
+        ++num_encodings;
+        total_len += len;
+        ptr += 4;
+    }
+    if ((ptr < payloadEndPtr) && (num_encodings == index))
+    {
+        // Last header.
+        red.payloadType = ptr[0] & 0x7F;
+        red.sequenceNumber = sequenceNumber();
+        red.timeStamp = timeStamp();
+        red.markerBit = markerBit();
+        red.SSRC = SSRC();
+        ++ptr;
+        return payloadLen() - (ptr - payload()) - total_len;
+    }
+    return -1;
+}
+
+// Randomize the payload, not the RTP header.
+void NETEQTEST_RTPpacket::scramblePayload(void)
+{
+    parseHeader();
+
+    for (int i = 0; i < _payloadLen; ++i)
+    {
+        _payloadPtr[i] = static_cast<WebRtc_UWord8>(std::rand());
+    }
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
new file mode 100644
index 0000000..0478568
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_RTPPACKET_H
+#define NETEQTEST_RTPPACKET_H
+
+#include <map>
+#include <stdio.h>
+#include "typedefs.h"
+#include "webrtc_neteq_internal.h"
+
+enum stereoModes {
+    stereoModeMono,
+    stereoModeSample1,
+    stereoModeSample2,
+    stereoModeFrame
+};
+
+class NETEQTEST_RTPpacket
+{
+public:
+    NETEQTEST_RTPpacket();
+    NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe);
+    NETEQTEST_RTPpacket & operator = (const NETEQTEST_RTPpacket & other);
+    bool operator !() const { return (dataLen() < 0); };
+    ~NETEQTEST_RTPpacket();
+    void reset();
+    static int skipFileHeader(FILE *fp);
+    int readFromFile(FILE *fp);
+    int readFixedFromFile(FILE *fp, size_t len);
+    int writeToFile(FILE *fp);
+    void blockPT(WebRtc_UWord8 pt);
+    //WebRtc_Word16 payloadType();
+    void parseHeader();
+    void parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo);
+    WebRtcNetEQ_RTPInfo const * RTPinfo() const;
+    WebRtc_UWord8 * datagram() const;
+    WebRtc_UWord8 * payload() const;
+    WebRtc_Word16 payloadLen() const;
+    WebRtc_Word16 dataLen() const;
+    bool isParsed() const;
+    bool isLost() const;
+    WebRtc_UWord32 time() const { return _receiveTime; };
+
+    WebRtc_UWord8  payloadType() const;
+    WebRtc_UWord16 sequenceNumber() const;
+    WebRtc_UWord32 timeStamp() const;
+    WebRtc_UWord32 SSRC() const;
+    WebRtc_UWord8  markerBit() const;
+
+    int setPayloadType(WebRtc_UWord8 pt);
+    int setSequenceNumber(WebRtc_UWord16 sn);
+    int setTimeStamp(WebRtc_UWord32 ts);
+    int setSSRC(WebRtc_UWord32 ssrc);
+    int setMarkerBit(WebRtc_UWord8 mb);
+    void setTime(WebRtc_UWord32 receiveTime) { _receiveTime = receiveTime; };
+
+    int setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo);
+
+    int splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode);
+
+    int extractRED(int index, WebRtcNetEQ_RTPInfo& red);
+
+    void scramblePayload(void);
+
+    WebRtc_UWord8 *       _datagram;
+    WebRtc_UWord8 *       _payloadPtr;
+    int                 _memSize;
+    WebRtc_Word16         _datagramLen;
+    WebRtc_Word16         _payloadLen;
+    WebRtcNetEQ_RTPInfo  _rtpInfo;
+    bool                _rtpParsed;
+    WebRtc_UWord32        _receiveTime;
+    bool                _lost;
+    std::map<WebRtc_UWord8, bool> _blockList;
+
+private:
+    void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
+    WebRtc_UWord16 parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr = NULL) const;
+    void splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride);
+    void splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp);
+};
+
+#endif //NETEQTEST_RTPPACKET_H
diff --git a/trunk/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc b/trunk/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc
new file mode 100644
index 0000000..d3f7eb5
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc
@@ -0,0 +1,1736 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+/* header includes */
+#include "typedefs.h"
+#include "stdio.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+#include "webrtc_neteq_help_macros.h"
+#include "neteq_error_codes.h" // for the API test
+
+#include "NETEQTEST_RTPpacket.h"
+#include "NETEQTEST_NetEQClass.h"
+#include "NETEQTEST_CodecClass.h"
+
+#include <string.h>
+#include <stdlib.h>
+#include <time.h>
+#include <map>
+#include <vector>
+
+#ifdef WIN32
+#include <cassert>
+#include <windows.h>
+#endif
+
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#include <libgen.h>
+#include <cassert>
+#endif
+
+//#include "vld.h"
+
+//#define NETEQ_DELAY_LOGGING
+//#define DUMMY_SLAVE_CHANNEL
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#define DUMMY_SLAVE_CHANNEL // do not use a slave channel, only generate zeros instead
+#endif
+
+
+/************************/
+/* Define payload types */
+/************************/
+
+// Payload types are defined in the textfile ptypes.txt, and can be changed after compilation.
+
+
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define TIME_STEP 1
+#define FIRSTLINELEN 40
+#define MAX_NETEQ_BUFFERSIZE	170000 //100000
+#define CHECK_ZERO(a) {int errCode = a; char tempErrName[WEBRTC_NETEQ_MAX_ERROR_NAME]; if((errCode)!=0){errCode = WebRtcNetEQ_GetErrorCode(inst); WebRtcNetEQ_GetErrorName(errCode, tempErrName, WEBRTC_NETEQ_MAX_ERROR_NAME); printf("\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, errCode); exit(0);}}
+#define CHECK_NOT_NULL(a) if((a)==NULL){printf("\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+//#define PLAY_CLEAN // ignore arrival times and let the packets arrive according to RTP timestamps
+#define HDR_SIZE 8 // rtpplay packet header size in bytes
+//#define JUNK_DATA   // scramble the payloads to test error resilience
+//#define ZERO_TS_START
+
+#ifdef JUNK_DATA
+    #define SEED_FILE "randseed.txt"
+#endif
+
+#ifdef WIN32
+#define MY_MAX_DRIVE _MAX_DRIVE
+#define MY_MAX_PATH _MAX_PATH
+#define MY_MAX_FNAME _MAX_FNAME
+#define MY_MAX_EXT _MAX_EXT
+
+#elif defined(WEBRTC_LINUX)
+#include <linux/limits.h>
+#define MY_MAX_PATH PATH_MAX
+
+#elif defined(WEBRTC_MAC)
+#include <sys/syslimits.h>
+#define MY_MAX_PATH PATH_MAX
+#endif // WEBRTC_MAC
+
+/************/
+/* Typedefs */
+/************/
+
+typedef struct {
+    enum WebRtcNetEQDecoder  codec;
+    enum stereoModes    stereo;
+    NETEQTEST_Decoder * decoder[2];
+    int            fs;
+} decoderStruct;
+
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen);
+int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime);
+void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d);
+bool splitStereo(NETEQTEST_RTPpacket& rtp, NETEQTEST_RTPpacket& rtpSlave,
+                 const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs, 
+                 const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+                 bool *isStereo);
+void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders);
+int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec);
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber);
+void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders);
+int doAPItest();
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode);
+
+
+
+/********************/
+/* Global variables */
+/********************/
+
+WebRtc_Word16 NetEqPacketBuffer[MAX_NETEQ_BUFFERSIZE>>1];
+WebRtc_Word16 NetEqPacketBufferSlave[MAX_NETEQ_BUFFERSIZE>>1];
+
+#ifdef NETEQ_DELAY_LOGGING
+extern "C" { 
+	FILE *delay_fid2;	/* file pointer */
+	WebRtc_UWord32 tot_received_packets=0;
+} 
+#endif
+
+#ifdef DEF_BUILD_DATE
+extern char BUILD_DATE;
+#endif
+
+WebRtc_UWord32 writtenSamples = 0;
+WebRtc_UWord32 simClock=0;
+
+int main(int argc, char* argv[])
+{
+    std::vector<NETEQTEST_NetEQClass *> NetEQvector;
+    NETEQTEST_RTPpacket rtp;
+	char   version[20];
+
+    NETEQTEST_RTPpacket slaveRtp;
+    //bool switchMS = false;
+    //bool duplicatePayload = false;
+	enum WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd-1];
+	int noOfCodecs;
+	int ok;
+	WebRtc_Word16 out_data[640*2];
+	WebRtc_Word16 outLen, writeLen;
+    int fs = 8000;
+	WebRtcNetEQ_RTCPStat RTCPstat;
+#ifdef WIN32
+	char outdrive[MY_MAX_DRIVE];
+	char outpath[MY_MAX_PATH];
+	char outfile[MY_MAX_FNAME];
+	char outext[MY_MAX_EXT];
+#endif
+	char outfilename[MY_MAX_PATH];
+#ifdef NETEQ_DELAY_LOGGING
+	float clock_float;
+	int temp_var;
+#endif
+#ifdef JUNK_DATA
+    FILE *seedfile;
+#endif
+    FILE *recoutTimes = NULL;
+    FILE *extraDelays = NULL;
+    WebRtcNetEQPlayoutMode streamingMode = kPlayoutOn;
+    bool preParseRTP = false;
+    bool rtpOnly = false;
+    int packetLen = 0;
+    int packetCount = 0;
+    std::map<WebRtc_UWord8, decoderStruct> decoders;
+
+	/* get the version string */
+	WebRtcNetEQ_GetVersion(version);
+	printf("\n\nNetEq version: %s\n", version);
+#ifdef DEF_BUILD_DATE
+	printf("Build time: %s\n", __BUILD_DATE);
+#endif
+
+	/* check number of parameters */
+	if ((argc < 3)
+#ifdef WIN32 // implicit output file name possible for windows
+        && (argc < 2)
+#endif
+        ) {
+		/* print help text and exit */
+		printf("Test program for NetEQ.\n");
+		printf("The program reads an RTP stream from file and inserts it into NetEQ.\n");
+		printf("The format of the RTP stream file should be the same as for rtpplay,\n");
+		printf("and can be obtained e.g., from Ethereal by using\n");
+		printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
+		printf("Usage:\n\n");
+#ifdef WIN32
+		printf("%s RTPfile [outfile] [-options]\n", argv[0]);
+#else
+        printf("%s RTPfile outfile [-options]\n", argv[0]);
+#endif
+		printf("where:\n");
+
+		printf("RTPfile      : RTP stream input file\n\n");
+
+		printf("outfile      : PCM speech output file\n");
+		printf("               Output file name is derived from RTP file name if omitted\n\n");
+
+        printf("-options are optional switches:\n");
+        printf("\t-recout datfile        : supply recout times\n");
+        printf("\t-extradelay datfile    : supply extra delay settings and timing\n");
+        printf("\t-streaming             : engage streaming mode\n");
+        printf("\t-fax                   : engage fax mode\n");
+        printf("\t-preparsertp           : use RecIn with pre-parsed RTP\n");
+        printf("\t-rtponly packLenBytes  : input file consists of constant size RTP packets without RTPplay headers\n");
+        //printf("\t-switchms              : switch from mono to stereo (copy channel) after 10 seconds\n");
+        //printf("\t-duplicate             : use two instances with identical input (2-channel mono)\n");
+
+		return(0);
+	}
+
+	if (strcmp(argv[1], "-apitest")==0) {
+		// do API test and then return
+		ok=doAPItest();
+
+		if (ok==0)
+			printf("API test successful!\n");
+		else
+			printf("API test failed!\n");
+
+		return(ok);
+	}
+
+	FILE* in_file=fopen(argv[1],"rb");
+	CHECK_NOT_NULL(in_file);
+	printf("Input file: %s\n",argv[1]);
+
+    int argIx = 2; // index of next argument from command line
+
+	if ( argc >= 3 && argv[2][0] != '-' ) { // output name given on command line
+		strcpy(outfilename, argv[2]);
+        argIx++;
+	} else { // derive output name from input name
+#ifdef WIN32
+		_splitpath(argv[1],outdrive,outpath,outfile,outext);
+		_makepath(outfilename,outdrive,outpath,outfile,"pcm");
+#else
+        fprintf(stderr,"Output file name must be specified.\n");
+		return(-1);
+#endif
+	}
+	FILE* out_file=fopen(outfilename,"wb");
+	if (out_file==NULL) {
+		fprintf(stderr,"Could not open file %s for writing\n", outfilename);
+		return(-1);
+	}
+	printf("Output file: %s\n",outfilename);
+
+    // Parse for more arguments, all beginning with '-'
+    
+    while( argIx < argc ) {
+        if (argv[argIx][0] != '-') {
+            fprintf(stderr,"Unknown input argument %s\n", argv[argIx]);
+            return(-1);
+        }
+
+        if( strcmp(argv[argIx], "-recout") == 0 ) {
+            argIx++;
+            recoutTimes = fopen(argv[argIx], "rb");
+            CHECK_NOT_NULL(recoutTimes);
+            argIx++;
+        }
+        else if( strcmp(argv[argIx], "-extradelay") == 0 ) {
+            argIx++;
+            extraDelays = fopen(argv[argIx], "rb");
+            CHECK_NOT_NULL(extraDelays);
+            argIx++;
+        }
+        else if( strcmp(argv[argIx], "-streaming") == 0 ) {
+            argIx++;
+            streamingMode = kPlayoutStreaming;
+        }
+        else if( strcmp(argv[argIx], "-fax") == 0 ) {
+            argIx++;
+            streamingMode = kPlayoutFax;
+        }
+        else if( strcmp(argv[argIx], "-preparsertp") == 0 ) {
+            argIx++;
+            preParseRTP = true;
+        }
+        else if( strcmp(argv[argIx], "-rtponly") == 0 ) {
+            argIx++;
+            rtpOnly = true;
+            packetLen = atoi(argv[argIx]);
+            argIx++;
+            if (packetLen <= 0)
+            {
+                printf("Wrong packet size used with argument -rtponly.\n");
+                exit(1);
+            }
+        }
+        //else if( strcmp(argv[argIx], "-switchms") == 0 ) {
+        //    argIx++;
+        //    switchMS = true;
+        //}
+        //else if( strcmp(argv[argIx], "-duplicate") == 0 ) {
+        //    argIx++;
+        //    duplicatePayload = true;
+        //}
+        else {
+            fprintf(stderr,"Unknown input argument %s\n", argv[argIx]);
+            return(-1);
+        }
+    }
+
+
+
+#ifdef NETEQ_DELAY_LOGGING
+	char delayfile[MY_MAX_PATH];
+#ifdef WIN32
+	_splitpath(outfilename,outdrive,outpath,outfile,outext);
+	_makepath(delayfile,outdrive,outpath,outfile,"d");
+#else
+    sprintf(delayfile, "%s.d", outfilename);
+#endif
+	delay_fid2 = fopen(delayfile,"wb");
+	fprintf(delay_fid2, "#!NetEQ_Delay_Logging%s\n", NETEQ_DELAY_LOGGING_VERSION_STRING);
+#endif
+
+	char ptypesfile[MY_MAX_PATH];
+#ifdef WIN32
+    _splitpath(argv[0],outdrive,outpath,outfile,outext);
+	_makepath(ptypesfile,outdrive,outpath,"ptypes","txt");
+#else
+	// TODO(hlundin): Include path to ptypes, as for WIN32 above.
+  strcpy(ptypesfile, "ptypes.txt");
+#endif
+    FILE *ptypeFile = fopen(ptypesfile,"rt");
+    if (!ptypeFile) {
+        // Check if we can find the file at the usual place in the trunk.
+        if (strstr(argv[0], "out/Debug/")) {
+            int path_len = strstr(argv[0], "out/Debug/") - argv[0];
+            strncpy(ptypesfile, argv[0], path_len);
+            ptypesfile[path_len] = '\0';
+            strcat(ptypesfile,
+                   "src/modules/audio_coding/NetEQ/main/test/ptypes.txt");
+            ptypeFile = fopen(ptypesfile,"rt");
+        }
+    }
+    CHECK_NOT_NULL(ptypeFile);
+    printf("Ptypes file: %s\n\n", ptypesfile);
+
+    parsePtypeFile(ptypeFile, &decoders);
+    fclose(ptypeFile);
+
+    noOfCodecs = populateUsedCodec(&decoders, usedCodec);
+
+
+	/* read RTP file header */
+    if (!rtpOnly)
+    {
+        if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0)
+        {
+            fprintf(stderr, "Wrong format in RTP file.\n");
+            return -1;
+        }
+    }
+
+    /* check payload type for first speech packet */
+    long tempFilePos = ftell(in_file);
+    enum stereoModes stereoMode = stereoModeMono;
+
+    if (!rtpOnly)
+    {
+        while (rtp.readFromFile(in_file) >= 0)
+        {
+            if (decoders.count(rtp.payloadType()) > 0
+                && decoders[rtp.payloadType()].codec != kDecoderRED
+                && decoders[rtp.payloadType()].codec != kDecoderAVT
+                && decoders[rtp.payloadType()].codec != kDecoderCNG )
+            {
+                stereoMode = decoders[rtp.payloadType()].stereo;
+                fs = decoders[rtp.payloadType()].fs;
+                break;
+            }
+        }
+    }
+    else
+    {
+        while (rtp.readFixedFromFile(in_file, packetLen) >= 0)
+        {
+            if (decoders.count(rtp.payloadType()) > 0
+                && decoders[rtp.payloadType()].codec != kDecoderRED
+                && decoders[rtp.payloadType()].codec != kDecoderAVT
+                && decoders[rtp.payloadType()].codec != kDecoderCNG )
+            {
+                stereoMode = decoders[rtp.payloadType()].stereo;
+                fs = decoders[rtp.payloadType()].fs;
+                break;
+            }
+        }
+    }
+
+    fseek(in_file, tempFilePos, SEEK_SET /* from beginning */);
+
+
+    /* block some payload types */
+    //rtp.blockPT(72);
+    //rtp.blockPT(23);
+
+	/* read first packet */
+    if (!rtpOnly)
+    {
+        rtp.readFromFile(in_file);
+    }
+    else
+    {
+        rtp.readFixedFromFile(in_file, packetLen);
+        rtp.setTime((1000 * rtp.timeStamp()) / fs);
+    }
+    if (!rtp)
+    {
+        printf("\nWarning: RTP file is empty\n\n");
+    }
+
+
+  	/* Initialize NetEQ instances */
+    int numInst = 1;
+    if (stereoMode > stereoModeMono)
+    {
+        numInst = 2;
+    }
+
+    for (int i = 0; i < numInst; i++)
+    {
+        // create memory, allocate, initialize, and allocate packet buffer memory
+        NetEQvector.push_back (new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, static_cast<WebRtc_UWord16>(fs), kTCPLargeJitter));
+
+        createAndInsertDecoders (NetEQvector[i], &decoders, i /* channel */);
+
+        WebRtcNetEQ_SetAVTPlayout(NetEQvector[i]->instance(),1); // enable DTMF playout
+
+        WebRtcNetEQ_SetPlayoutMode(NetEQvector[i]->instance(), streamingMode);
+
+        NetEQvector[i]->usePreparseRTP(preParseRTP);
+
+        if (numInst > 1)
+        {
+            // we are using master/slave mode
+            if (i == 0)
+            {
+                // first instance is master
+                NetEQvector[i]->isMaster();
+            }
+            else
+            {
+                // all other are slaves
+                NetEQvector[i]->isSlave();
+            }
+        }
+    }
+
+
+#ifdef ZERO_TS_START
+    WebRtc_UWord32 firstTS = rtp.timeStamp();
+    rtp.setTimeStamp(0);
+#else
+    WebRtc_UWord32 firstTS = 0;
+#endif
+
+    // check stereo mode
+    if (stereoMode > stereoModeMono)
+    {
+        if(rtp.splitStereo(slaveRtp, stereoMode))
+        {
+            printf("Error in splitStereo\n");
+        }
+    }
+
+#ifdef PLAY_CLEAN
+	WebRtc_UWord32 prevTS = rtp.timeStamp(); 
+	WebRtc_UWord32 currTS, prev_time;
+#endif
+
+#ifdef JUNK_DATA
+    unsigned int random_seed = (unsigned int) /*1196764538; */time(NULL);
+    srand(random_seed);
+
+    if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+    }
+    else {
+        fprintf(seedfile, "%u\n", random_seed);
+        fclose(seedfile);
+    }
+#endif
+
+    WebRtc_UWord32 nextRecoutTime;
+    int lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
+
+    if (recoutTimes)
+        simClock = (rtp.time() < nextRecoutTime ? rtp.time(): nextRecoutTime);
+    else
+        simClock = rtp.time(); // start immediately with first packet
+
+    WebRtc_UWord32 start_clock = simClock;
+
+    WebRtc_UWord32 nextExtraDelayTime;
+    int extraDelay = -1;
+    getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
+
+    void *msInfo;
+    msInfo = malloc(WebRtcNetEQ_GetMasterSlaveInfoSize());
+    if(msInfo == NULL)
+        return(-1);
+
+    while(rtp.dataLen() >= 0 || (recoutTimes && !lastRecout)) {
+//        printf("simClock = %Lu\n", simClock);
+		
+#ifdef NETEQ_DELAY_LOGGING
+		temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CLOCK;
+		clock_float = (float) simClock;
+		fwrite(&temp_var,sizeof(int),1,delay_fid2);
+		fwrite(&clock_float, sizeof(float),1,delay_fid2);
+#endif
+        /* time to set extra delay */
+        if (extraDelay > -1 && simClock >= nextExtraDelayTime) {
+            // set extra delay for all instances
+            for (int i = 0; i < numInst; i++)
+            {
+                WebRtcNetEQ_SetExtraDelay(NetEQvector[i]->instance(), extraDelay);
+            }
+            getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
+        }
+
+		/* check if time to receive */
+        while (simClock >= rtp.time() && rtp.dataLen() >= 0)
+        {
+            if (rtp.dataLen() > 0)
+            {
+
+                // insert main packet
+                NetEQvector[0]->recIn(rtp);
+
+                if (stereoMode > stereoModeMono
+                    && slaveRtp.dataLen() > 0)
+                {
+                    // insert slave packet
+                    NetEQvector[1]->recIn(slaveRtp);
+                }
+
+			}
+
+			/* get next packet */
+#ifdef PLAY_CLEAN
+			prev_time = rtp.time();
+#endif
+            if (!rtpOnly)
+            {
+                rtp.readFromFile(in_file);
+            }
+            else
+            {
+                rtp.readFixedFromFile(in_file, packetLen);
+                rtp.setTime((1000 * rtp.timeStamp()) / fs);
+            }
+
+            if (rtp.dataLen() >= 0)
+            {
+                rtp.setTimeStamp(rtp.timeStamp() - firstTS);
+            }
+
+            packetCount++;
+
+            if (changeStereoMode(rtp, decoders, &stereoMode))
+            {
+                printf("Warning: stereo mode changed\n");
+            }
+
+            if (stereoMode > stereoModeMono)
+            {
+                if(rtp.splitStereo(slaveRtp, stereoMode))
+                {
+                    printf("Error in splitStereo\n");
+                }
+            }
+
+#ifdef PLAY_CLEAN
+			currTS = rtp.timeStamp(); 
+			rtp.setTime(prev_time + (currTS-prevTS)/(fs/1000));
+			prevTS = currTS;
+#endif
+		}
+		
+		/* check if time to RecOut */
+		if ( (!recoutTimes && (simClock%10)==0) // recout times not given from file
+        || ( recoutTimes && (simClock >= nextRecoutTime) ) ) // recout times given from file
+        {
+            if (stereoMode > stereoModeMono)
+            {
+                // stereo
+                WebRtc_Word16 tempLen; 
+                tempLen = NetEQvector[0]->recOut( out_data, msInfo ); // master
+                outLen = NetEQvector[1]->recOut( &out_data[tempLen], msInfo ); // slave
+
+                assert(tempLen == outLen);
+
+                writeLen = outLen * 2;
+                stereoInterleave(out_data, writeLen);
+            }
+            else
+            {
+                // mono
+                outLen = NetEQvector[0]->recOut( out_data );
+                writeLen = outLen;
+            }
+
+            // write to file
+            fwrite(out_data,writeLen,2,out_file);
+            writtenSamples += writeLen;
+
+
+            lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
+
+            /* ask for statistics */
+            WebRtcNetEQ_NetworkStatistics inCallStats;
+            WebRtcNetEQ_GetNetworkStatistics(NetEQvector[0]->instance(), &inCallStats);
+
+        }
+
+		/* increase time */
+		simClock+=TIME_STEP;
+	}
+
+	fclose(in_file);
+	fclose(out_file);
+
+#ifdef NETEQ_DELAY_LOGGING
+	temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EOF;
+	fwrite(&temp_var,sizeof(int),1,delay_fid2);
+	fwrite(&tot_received_packets,sizeof(WebRtc_UWord32),1,delay_fid2);
+	fprintf(delay_fid2,"End of file\n");
+	fclose(delay_fid2);
+#endif
+
+	WebRtcNetEQ_GetRTCPStats(NetEQvector[0]->instance(), &RTCPstat);
+	printf("RTCP statistics:\n");
+	printf("	cum_lost        : %d\n", (int) RTCPstat.cum_lost);
+	printf("	ext_max         : %d\n", (int) RTCPstat.ext_max);
+	printf("	fraction_lost   : %d (%f%%)\n", RTCPstat.fraction_lost, (float)(100.0*RTCPstat.fraction_lost/256.0));
+	printf("	jitter          : %d\n", (int) RTCPstat.jitter);
+
+    printf("\n    Call duration ms    : %u\n", simClock-start_clock);
+
+    printf("\nComplexity estimates (including sub-components):\n");
+    printf("    RecIn complexity    : %.2f MCPS\n", NetEQvector[0]->getRecInTime() / ((float) 1000*(simClock-start_clock)));
+    printf("    RecOut complexity   : %.2f MCPS\n", NetEQvector[0]->getRecOutTime() / ((float) 1000*(simClock-start_clock)));
+
+    free_coders(decoders);
+	//free_coders(0 /* first channel */);
+ //   if (stereoMode > stereoModeMono) {
+ //       free_coders(1 /* second channel */);
+ //   }
+    free(msInfo);
+
+    for (std::vector<NETEQTEST_NetEQClass *>::iterator it = NetEQvector.begin(); 
+        it < NetEQvector.end(); delete *it++);
+
+	printf("\nSimulation done!\n");
+
+#ifdef JUNK_DATA
+    if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+    }
+    else {
+        fprintf(seedfile, "ok\n\n");
+        fclose(seedfile);
+    }
+#endif
+
+
+    // Log complexity to file
+/*    FILE *statfile;
+    statfile = fopen("complexity.txt","at");
+    fprintf(statfile,"%.4f, %.4f\n", (float) totTime_RecIn.QuadPart / ((float) 1000*(simClock-start_clock)), (float) totTime_RecOut.QuadPart / ((float) 1000*(simClock-start_clock)));
+    fclose(statfile);*/
+
+	return(0);
+
+}
+
+
+
+
+
+/****************/
+/* Subfunctions */
+/****************/
+
+bool splitStereo(NETEQTEST_RTPpacket& rtp, NETEQTEST_RTPpacket& rtpSlave,
+                 const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs, 
+                 const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+                 bool *isStereo)
+{
+
+    // init
+    //bool isStereo = false;
+    enum stereoModes tempStereoMode = stereoModeMono;
+    bool isCng = false;
+
+    // check payload length
+    if (rtp.dataLen() <= 0) {
+        //*isStereo = false; // don't change
+        return(*isStereo);
+    }
+
+    // check payload type
+    WebRtc_Word16 ptype = rtp.payloadType();
+
+    // is this a cng payload?
+    for (int k = 0; k < noOfCngCodecs; k++) {
+        if (ptype == cngPtype[k]) {
+            // do not change stereo state
+            isCng = true;
+            tempStereoMode = stereoModeFrame;
+        }
+    }
+
+    if (!isCng)
+    {
+        *isStereo = false;
+
+        // is this payload type a stereo codec? which type?
+        for (int k = 0; k < noOfStereoCodecs; k++) {
+            if (ptype == stereoPtype[k]) {
+                tempStereoMode = stereoMode[k];
+                *isStereo = true;
+                break; // exit for loop
+            }
+        }
+    }
+
+    if (*isStereo)
+    {
+        // split the payload if stereo
+
+        if(rtp.splitStereo(rtpSlave, tempStereoMode))
+        {
+            printf("Error in splitStereo\n");
+        }
+
+    }
+
+    return(*isStereo);
+
+}
+
+void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen)
+{
+    int k;
+
+    for(k = totalLen/2; k < totalLen; k++) {
+        WebRtc_Word16 temp = data[k];
+        memmove(&data[2*k - totalLen + 2], &data[2*k - totalLen + 1], (totalLen - k -1) *  sizeof(WebRtc_Word16));
+        data[2*k - totalLen + 1] = temp;
+    }
+}
+
+
+int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime) {
+
+    float tempTime;
+
+    if (!fp) {
+        return -1;
+    }
+
+    if (fread(&tempTime, sizeof(float), 1, fp) != 0) {
+        // not end of file
+        *nextTime = (WebRtc_UWord32) tempTime;
+        return 0;
+    }
+    
+    *nextTime = 0;
+    fclose(fp);
+
+    return 1;
+}
+
+void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d) {
+
+    float temp[2];
+
+    if(!fp) {
+        *d = -1;
+        return;
+    }
+
+    if (fread(&temp, sizeof(float), 2, fp) != 0) {
+        // not end of file
+        *t = (WebRtc_UWord32) temp[0];
+        *d = (int) temp[1];
+        return;
+    }
+    
+    *d = -1;
+    fclose(fp);
+
+    return;
+}
+    
+
+void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders)
+{
+    int n, pt;
+    char codec[100];
+    decoderStruct tempDecoder;
+
+    // read first line
+    n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+
+    while (n==2)
+    {
+        memset(&tempDecoder, 0, sizeof(decoderStruct));
+        tempDecoder.stereo = stereoModeMono;
+
+        if( pt >= 0  // < 0 disables this codec
+            && isalpha(codec[0]) ) // and is a letter
+        {
+
+            /* check for stereo */
+            int L = strlen(codec);
+            bool isStereo = false;
+
+            if (codec[L-1] == '*') {
+                // stereo codec 
+                isStereo = true;
+
+                // remove '*'
+                codec[L-1] = '\0';
+            }
+
+#ifdef CODEC_G711
+            if(strcmp(codec, "pcmu") == 0) {
+                tempDecoder.codec = kDecoderPCMu;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "pcma") == 0) {
+                tempDecoder.codec = kDecoderPCMa;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_IPCMU
+            else if(strcmp(codec, "eg711u") == 0) {
+                tempDecoder.codec = kDecoderEG711u;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_IPCMA
+            else if(strcmp(codec, "eg711a") == 0) {
+                tempDecoder.codec = kDecoderEG711a;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_ILBC
+            else if(strcmp(codec, "ilbc") == 0) {
+                tempDecoder.codec = kDecoderILBC;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_ISAC
+            else if(strcmp(codec, "isac") == 0) {
+                tempDecoder.codec = kDecoderISAC;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_ISACLC
+            else if(strcmp(codec, "isaclc") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_ISACLC;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_ISAC_SWB
+            else if(strcmp(codec, "isacswb") == 0) {
+                tempDecoder.codec = kDecoderISACswb;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_IPCMWB
+            else if(strcmp(codec, "ipcmwb") == 0) {
+                tempDecoder.codec = kDecoderIPCMwb;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722
+            else if(strcmp(codec, "g722") == 0) {
+                tempDecoder.codec = kDecoderG722;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_16
+            else if(strcmp(codec, "g722_1_16") == 0) {
+                tempDecoder.codec = kDecoderG722_1_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_24
+            else if(strcmp(codec, "g722_1_24") == 0) {
+                tempDecoder.codec = kDecoderG722_1_24;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_32
+            else if(strcmp(codec, "g722_1_32") == 0) {
+                tempDecoder.codec = kDecoderG722_1_32;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1C_24
+            else if(strcmp(codec, "g722_1c_24") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_24;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G722_1C_32
+            else if(strcmp(codec, "g722_1c_32") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_32;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G722_1C_48
+            else if(strcmp(codec, "g722_1c_48") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_48;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G723
+            else if(strcmp(codec, "g723") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_G723;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G726
+            else if(strcmp(codec, "g726_16") == 0) {
+                tempDecoder.codec = kDecoderG726_16;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_24") == 0) {
+                tempDecoder.codec = kDecoderG726_24;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_32") == 0) {
+                tempDecoder.codec = kDecoderG726_32;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_40") == 0) {
+                tempDecoder.codec = kDecoderG726_40;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729
+            else if(strcmp(codec, "g729") == 0) {
+                tempDecoder.codec = kDecoderG729;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729D
+            else if(strcmp(codec, "g729d") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_G729D;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729_1
+            else if(strcmp(codec, "g729_1") == 0) {
+                tempDecoder.codec = kDecoderG729_1;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_GSMFR
+            else if(strcmp(codec, "gsmfr") == 0) {
+                tempDecoder.codec = kDecoderGSMFR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_GSMEFR
+            else if(strcmp(codec, "gsmefr") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_GSMEFR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_AMR
+            else if(strcmp(codec, "amr") == 0) {
+                tempDecoder.codec = kDecoderAMR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_AMRWB
+            else if(strcmp(codec, "amrwb") == 0) {
+                tempDecoder.codec = kDecoderAMRWB;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_DVI4
+            else if(strcmp(codec, "dvi4") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_DVI4;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SPEEX_8
+            else if(strcmp(codec, "speex8") == 0) {
+                tempDecoder.codec = kDecoderSPEEX_8;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SPEEX_16
+            else if(strcmp(codec, "speex16") == 0) {
+                tempDecoder.codec = kDecoderSPEEX_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_CELT_32
+            else if(strcmp(codec, "celt32") == 0) {
+                tempDecoder.codec = kDecoderCELT_32;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_SILK_NB
+            else if(strcmp(codec, "silk8") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_8;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SILK_WB
+            else if(strcmp(codec, "silk12") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_12;
+                tempDecoder.fs = 16000;
+            }
+            else if(strcmp(codec, "silk16") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_SILK_SWB
+            else if(strcmp(codec, "silk24") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_24;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_MELPE
+            else if(strcmp(codec, "melpe") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_MELPE;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_PCM16B
+            else if(strcmp(codec, "pcm16b") == 0) {
+                tempDecoder.codec = kDecoderPCM16B;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_PCM16B_WB
+            else if(strcmp(codec, "pcm16b_wb") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bwb;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+            else if(strcmp(codec, "pcm16b_swb32khz") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bswb32kHz;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+            else if(strcmp(codec, "pcm16b_swb48khz") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bswb48kHz;
+                tempDecoder.fs = 48000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC8
+            else if(strcmp(codec, "cn") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC16
+            else if(strcmp(codec, "cn_wb") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC32
+            else if(strcmp(codec, "cn_swb32") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC48
+            else if(strcmp(codec, "cn_swb48") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 48000;
+            }
+#endif
+#ifdef CODEC_ATEVENT_DECODE
+            else if(strcmp(codec, "avt") == 0) {
+                tempDecoder.codec = kDecoderAVT;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_RED
+            else if(strcmp(codec, "red") == 0) {
+                tempDecoder.codec = kDecoderRED;
+                tempDecoder.fs = 8000;
+            }
+#endif
+            else if(isalpha(codec[0])) {
+                printf("Unsupported codec %s\n", codec);
+                // read next line and continue while loop
+                n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+                continue;
+            }
+            else {
+                // name is not recognized, and does not start with a letter
+                // hence, it is commented out
+                // read next line and continue while loop
+                n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+                continue;
+            }
+
+            // handle stereo
+            if (tempDecoder.codec == kDecoderCNG)
+            {
+                // always set stereo mode for CNG, even if it is not marked at stereo
+                tempDecoder.stereo = stereoModeFrame;
+            }
+            else if(isStereo)
+            {
+                switch(tempDecoder.codec) {
+                    // sample based codecs 
+                    case kDecoderPCMu:
+                    case kDecoderPCMa:
+                    case kDecoderG722:
+                        {
+                            // 1 octet per sample
+                            tempDecoder.stereo = stereoModeSample1;
+                            break;
+                        }
+                    case kDecoderPCM16B:
+                    case kDecoderPCM16Bwb:
+                    case kDecoderPCM16Bswb32kHz:
+                    case kDecoderPCM16Bswb48kHz:
+                        {
+                            // 2 octets per sample
+                            tempDecoder.stereo = stereoModeSample2;
+                            break;
+                        }
+
+                        // fixed-rate frame codecs
+//                    case kDecoderG729:
+//                    case NETEQ_CODEC_G729D:
+//                    case NETEQ_CODEC_G729E:
+//                    case kDecoderG722_1_16:
+//                    case kDecoderG722_1_24:
+//                    case kDecoderG722_1_32:
+//                    case kDecoderG722_1C_24:
+//                    case kDecoderG722_1C_32:
+//                    case kDecoderG722_1C_48:
+//                    case NETEQ_CODEC_MELPE:
+//                        {
+//                            tempDecoder.stereo = stereoModeFrame;
+//                            break;
+//                        }
+                    default:
+                        {
+                            printf("Cannot use codec %s as stereo codec\n", codec);
+                            exit(0);
+                        }
+                }
+            }
+
+            if (pt > 127)
+            {
+                printf("Payload type must be less than 128\n");
+                exit(0);
+            }
+
+            // insert into codecs map
+            (*decoders)[static_cast<WebRtc_UWord8>(pt)] = tempDecoder;
+
+        }
+
+        n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+    } // end while
+
+}
+
+
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode)
+{
+        if (decoders.count(rtp.payloadType()) > 0
+            && decoders[rtp.payloadType()].codec != kDecoderRED
+            && decoders[rtp.payloadType()].codec != kDecoderAVT
+            && decoders[rtp.payloadType()].codec != kDecoderCNG )
+        {
+            if (decoders[rtp.payloadType()].stereo != *stereoMode)
+            {
+                *stereoMode = decoders[rtp.payloadType()].stereo;
+                return true; // stereo mode did change
+            }
+        }
+
+        return false; // stereo mode did not change
+}
+
+
+int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec)
+{
+    int numCodecs = 0;
+
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    it = decoders->begin();
+
+    for (int i = 0; i < static_cast<int>(decoders->size()); i++, it++)
+    {
+        usedCodec[numCodecs] = (*it).second.codec;
+        numCodecs++;
+    }
+
+    return numCodecs;
+}
+
+
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber)
+{
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    for (it = decoders->begin(); it != decoders->end();  it++)
+    {
+        if (channelNumber == 0 ||
+            ((*it).second.stereo > stereoModeMono ))
+        {
+            // create decoder instance
+            WebRtc_UWord8 pt = static_cast<WebRtc_UWord8>( (*it).first );
+            NETEQTEST_Decoder **dec = &((*it).second.decoder[channelNumber]);
+            enum WebRtcNetEQDecoder type = (*it).second.codec;
+
+            switch (type)
+            {
+#ifdef CODEC_G711
+            case kDecoderPCMu:
+                *dec = new decoder_PCMU( pt );
+                break;
+            case kDecoderPCMa:
+                *dec = new decoder_PCMA( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMU
+            case kDecoderEG711u:
+                *dec = new decoder_IPCMU( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMA
+            case kDecoderEG711a:
+                *dec = new decoder_IPCMA( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMWB
+            case kDecoderIPCMwb:
+                *dec = new decoder_IPCMWB( pt );
+                break;
+#endif
+#ifdef CODEC_ILBC
+            case kDecoderILBC:
+                *dec = new decoder_ILBC( pt );
+                break;
+#endif
+#ifdef CODEC_ISAC
+            case kDecoderISAC:
+                *dec = new decoder_iSAC( pt );
+                break;
+#endif
+#ifdef CODEC_ISAC_SWB
+            case kDecoderISACswb:
+                *dec = new decoder_iSACSWB( pt );
+                break;
+#endif
+#ifdef CODEC_G729
+            case kDecoderG729:
+                *dec = new decoder_G729( pt );
+                break;
+            case NETEQ_CODEC_G729D:
+                printf("Error: G729D not supported\n");
+                break;
+#endif
+#ifdef CODEC_G729E
+            case NETEQ_CODEC_G729E:
+                *dec = new decoder_G729E( pt );
+                break;
+#endif
+#ifdef CODEC_G729_1
+            case kDecoderG729_1:
+                *dec = new decoder_G729_1( pt );
+                break;
+#endif
+#ifdef CODEC_G723
+            case NETEQ_CODEC_G723:
+                *dec = new decoder_G723( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B
+            case kDecoderPCM16B:
+                *dec = new decoder_PCM16B_NB( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_WB
+            case kDecoderPCM16Bwb:
+                *dec = new decoder_PCM16B_WB( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+            case kDecoderPCM16Bswb32kHz:
+                *dec = new decoder_PCM16B_SWB32( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+            case kDecoderPCM16Bswb48kHz:
+                *dec = new decoder_PCM16B_SWB48( pt );
+                break;
+#endif
+#ifdef CODEC_DVI4
+            case NETEQ_CODEC_DVI4:
+                *dec = new decoder_DVI4( pt );
+                break;
+#endif
+#ifdef CODEC_G722
+            case kDecoderG722:
+                *dec = new decoder_G722( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_16
+            case kDecoderG722_1_16:
+                *dec = new decoder_G722_1_16( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_24
+            case kDecoderG722_1_24:
+                *dec = new decoder_G722_1_24( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_32
+            case kDecoderG722_1_32:
+                *dec = new decoder_G722_1_32( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_24
+            case kDecoderG722_1C_24:
+                *dec = new decoder_G722_1C_24( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_32
+            case kDecoderG722_1C_32:
+                *dec = new decoder_G722_1C_32( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_48
+            case kDecoderG722_1C_48:
+                *dec = new decoder_G722_1C_48( pt );
+                break;
+#endif
+#ifdef CODEC_AMR
+            case kDecoderAMR:
+                *dec = new decoder_AMR( pt );
+                break;
+#endif
+#ifdef CODEC_AMRWB
+            case kDecoderAMRWB:
+                *dec = new decoder_AMRWB( pt );
+                break;
+#endif
+#ifdef CODEC_GSMFR
+            case kDecoderGSMFR:
+                *dec = new decoder_GSMFR( pt );
+                break;
+#endif
+#ifdef CODEC_GSMEFR
+            case NETEQ_CODEC_GSMEFR:
+                *dec = new decoder_GSMEFR( pt );
+                break;
+#endif
+#ifdef CODEC_G726
+            case kDecoderG726_16:
+                *dec = new decoder_G726_16( pt );
+                break;
+            case kDecoderG726_24:
+                *dec = new decoder_G726_24( pt );
+                break;
+            case kDecoderG726_32:
+                *dec = new decoder_G726_32( pt );
+                break;
+            case kDecoderG726_40:
+                *dec = new decoder_G726_40( pt );
+                break;
+#endif
+#ifdef CODEC_MELPE
+            case NETEQ_CODEC_MELPE:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_MELPE( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SPEEX_8
+            case kDecoderSPEEX_8:
+                *dec = new decoder_SPEEX( pt, 8000 );
+                break;
+#endif
+#ifdef CODEC_SPEEX_16
+            case kDecoderSPEEX_16:
+                *dec = new decoder_SPEEX( pt, 16000 );
+                break;
+#endif
+#ifdef CODEC_CELT_32
+            case kDecoderCELT_32:
+                *dec = new decoder_CELT( pt, 32000 );
+                break;
+#endif
+#ifdef CODEC_RED
+            case kDecoderRED:
+                *dec = new decoder_RED( pt );
+                break;
+#endif
+#ifdef CODEC_ATEVENT_DECODE
+            case kDecoderAVT:
+                *dec = new decoder_AVT( pt );
+                break;
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+            case kDecoderCNG:
+                *dec = new decoder_CNG( pt, static_cast<WebRtc_UWord16>((*it).second.fs) );
+                break;
+#endif
+#ifdef CODEC_ISACLC
+            case NETEQ_CODEC_ISACLC:
+                *dec = new decoder_iSACLC( pt );
+                break;
+#endif
+#ifdef CODEC_SILK_NB
+            case NETEQ_CODEC_SILK_8:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK8( pt );
+#endif
+				break;
+#endif
+#ifdef CODEC_SILK_WB
+            case NETEQ_CODEC_SILK_12:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK12( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SILK_WB
+            case NETEQ_CODEC_SILK_16:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK16( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SILK_SWB
+            case NETEQ_CODEC_SILK_24:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK24( pt );
+#endif
+                break;
+#endif
+
+            default:
+                printf("Unknown codec type encountered in createAndInsertDecoders\n");
+                exit(0);
+            }
+
+            // insert into codec DB
+            if (*dec)
+            {
+                (*dec)->loadToNetEQ(*neteq);
+            }
+        }
+    }
+
+}
+
+
+void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders)
+{
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    for (it = decoders.begin(); it != decoders.end();  it++)
+    {
+        if ((*it).second.decoder[0])
+        {
+            delete (*it).second.decoder[0];
+        }
+
+        if ((*it).second.decoder[1])
+        {
+            delete (*it).second.decoder[1];
+        }
+    }
+}
+
+
+
+#include "pcm16b.h"
+#include "g711_interface.h"
+#include "isac.h"
+
+int doAPItest() {
+
+	char   version[20];
+	void *inst;
+	enum WebRtcNetEQDecoder usedCodec;
+	int NetEqBufferMaxPackets, BufferSizeInBytes;
+	WebRtcNetEQ_CodecDef codecInst;
+	WebRtcNetEQ_RTCPStat RTCPstat;
+	WebRtc_UWord32 timestamp;
+	int memorySize;
+	int ok;
+		
+	printf("API-test:\n");
+
+	/* get the version string */
+	WebRtcNetEQ_GetVersion(version);
+	printf("NetEq version: %s\n\n", version);
+
+	/* test that API functions return -1 if instance is NULL */
+#define CHECK_MINUS_ONE(x) {int errCode = x; if((errCode)!=-1){printf("\n API test failed at line %d: %s. Function did not return -1 as expected\n",__LINE__,#x); return(-1);}} 
+//#define RESET_ERROR(x) ((MainInst_t*) x)->ErrorCode = 0;
+	inst = NULL;
+
+	CHECK_MINUS_ONE(WebRtcNetEQ_GetErrorCode(inst))
+	CHECK_MINUS_ONE(WebRtcNetEQ_Assign(&inst, NULL))
+//	printf("WARNING: Test of WebRtcNetEQ_Assign() is disabled due to a bug.\n");
+	usedCodec=kDecoderPCMu;
+	CHECK_MINUS_ONE(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter,  &NetEqBufferMaxPackets, &BufferSizeInBytes))
+	CHECK_MINUS_ONE(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+
+	CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 8000))
+	CHECK_MINUS_ONE(WebRtcNetEQ_SetAVTPlayout(inst, 0))
+	CHECK_MINUS_ONE(WebRtcNetEQ_SetExtraDelay(inst, 17))
+	CHECK_MINUS_ONE(WebRtcNetEQ_SetPlayoutMode(inst, kPlayoutOn))
+	
+	CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbReset(inst))
+	CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+	CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbRemove(inst, usedCodec))
+	WebRtc_Word16 temp1, temp2;
+	CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2))
+	CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetCodecInfo(inst, 0, &usedCodec))
+
+	CHECK_MINUS_ONE(WebRtcNetEQ_RecIn(inst, &temp1, 17, 4711))
+	CHECK_MINUS_ONE(WebRtcNetEQ_RecOut(inst, &temp1, &temp2))
+	CHECK_MINUS_ONE(WebRtcNetEQ_GetRTCPStats(inst, &RTCPstat)); // error here!!!
+	CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechTimeStamp(inst, &timestamp))
+	WebRtcNetEQOutputType temptype;
+	CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechOutputType(inst, &temptype))
+
+	WebRtc_UWord8 tempFlags;
+	WebRtc_UWord16 utemp1, utemp2;
+	CHECK_MINUS_ONE(WebRtcNetEQ_VQmonRecOutStatistics(inst, &utemp1, &utemp2, &tempFlags))
+	CHECK_MINUS_ONE(WebRtcNetEQ_VQmonGetRxStatistics(inst, &utemp1, &utemp2))
+
+	WebRtcNetEQ_AssignSize(&memorySize);
+	CHECK_ZERO(WebRtcNetEQ_Assign(&inst, malloc(memorySize)))
+
+	/* init with wrong sample frequency */
+	CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 17))
+	
+	/* init with correct fs */
+	CHECK_ZERO(WebRtcNetEQ_Init(inst, 8000))
+
+	/* GetRecommendedBufferSize with wrong codec */
+	usedCodec=kDecoderReservedStart;
+	ok = WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+	if((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNKNOWN_CODEC))){
+		printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong codec.\n");
+		printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+	}
+	//RESET_ERROR(inst)
+
+	/* GetRecommendedBufferSize with wrong network type */
+	usedCodec = kDecoderPCMu;
+	ok=WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, (enum WebRtcNetEQNetworkType) 4711 , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+	if ((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_NETWORK_TYPE))) {
+		printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong network type.\n");
+		printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+		//RESET_ERROR(inst)
+	}
+	CHECK_ZERO(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes))
+
+	/* try to do RecIn before assigning the packet buffer */
+/*	makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, 17,4711, 1235412312);
+	makeDTMFpayload(&rtp_data[12], 1, 1, 10, 100);
+	ok = WebRtcNetEQ_RecIn(inst, (short *) rtp_data, 12+4, 4711);
+	printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));*/
+	
+	/* check all limits of WebRtcNetEQ_AssignBuffer */
+	ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, 149<<1);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+		printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong sizeinbytes\n");
+	}
+	ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NULL, BufferSizeInBytes);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+		printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for NULL memory pointer\n");
+	}
+	ok=WebRtcNetEQ_AssignBuffer(inst, 1, NetEqPacketBuffer, BufferSizeInBytes);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+		printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+	}
+	ok=WebRtcNetEQ_AssignBuffer(inst, 601, NetEqPacketBuffer, BufferSizeInBytes);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+		printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+	}
+
+	/* do correct assignbuffer */
+	CHECK_ZERO(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+
+	ok=WebRtcNetEQ_SetExtraDelay(inst, -1);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+		printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too small delay\n");
+	}
+	ok=WebRtcNetEQ_SetExtraDelay(inst, 1001);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+		printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too large delay\n");
+	}
+
+	ok=WebRtcNetEQ_SetPlayoutMode(inst,(enum WebRtcNetEQPlayoutMode) 4711);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_PLAYOUTMODE))) {
+		printf("WebRtcNetEQ_SetPlayoutMode() did not return proper error code for wrong mode\n");
+	}
+
+	/* number of codecs should return zero before adding any codecs */
+	WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2);
+	if(temp1!=0)
+		printf("WebRtcNetEQ_CodecDbGetSizeInfo() return non-zero number of codecs in DB before adding any codecs\n");
+
+	/* get info from empty database */
+	ok=WebRtcNetEQ_CodecDbGetCodecInfo(inst, 17, &usedCodec);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST1))) {
+		printf("WebRtcNetEQ_CodecDbGetCodecInfo() did not return proper error code for out-of-range entry number\n");
+	}
+
+	/* remove codec from empty database */
+	ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderPCMa);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST4))) {
+		printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that has not been added\n");
+	}
+
+	/* add codec with unsupported fs */
+#ifdef CODEC_PCM16B
+#ifndef NETEQ_48KHZ_WIDEBAND
+	SET_CODEC_PAR(codecInst,kDecoderPCM16Bswb48kHz,77,NULL,48000);
+	SET_PCM16B_SWB48_FUNCTIONS(codecInst);
+	ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_FS))) {
+		printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding codec with unsupported sample freq\n");
+	}
+#else
+	printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled with 48kHz support.\n");
+#endif
+#else
+    printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled without PCM16B support.\n");
+#endif
+
+	/* add two codecs with identical payload types */
+	SET_CODEC_PAR(codecInst,kDecoderPCMa,17,NULL,8000);
+	SET_PCMA_FUNCTIONS(codecInst);
+	CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+
+	SET_CODEC_PAR(codecInst,kDecoderPCMu,17,NULL,8000);
+	SET_PCMU_FUNCTIONS(codecInst);
+	ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+		printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding two codecs with identical payload types\n");
+	}
+
+	/* try adding several payload types for CNG codecs */
+	SET_CODEC_PAR(codecInst,kDecoderCNG,105,NULL,16000);
+	SET_CNG_FUNCTIONS(codecInst);
+	CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+	SET_CODEC_PAR(codecInst,kDecoderCNG,13,NULL,8000);
+	SET_CNG_FUNCTIONS(codecInst);
+	CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+
+    /* try adding a speech codec over a CNG codec */
+    SET_CODEC_PAR(codecInst,kDecoderISAC,105,NULL,16000); /* same as WB CNG above */
+	SET_ISAC_FUNCTIONS(codecInst);
+	ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+		printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+	}
+
+    /* try adding a CNG codec over a speech codec */
+    SET_CODEC_PAR(codecInst,kDecoderCNG,17,NULL,32000); /* same as PCMU above */
+	SET_CNG_FUNCTIONS(codecInst);
+	ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+		printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+	}
+
+
+	/* remove codec out of range */
+	ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedStart);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+		printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+	}
+	ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedEnd);
+	if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+		printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+	}
+
+	/*SET_CODEC_PAR(codecInst,kDecoderEG711a,NETEQ_CODEC_EG711A_PT,NetEqiPCMAState,8000);
+	SET_IPCMA_FUNCTIONS(codecInst);
+	CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+*/
+	free(inst);
+
+	return(0);
+
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/PayloadTypes.h b/trunk/src/modules/audio_coding/neteq/test/PayloadTypes.h
new file mode 100644
index 0000000..f6cc3da
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/PayloadTypes.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* PayloadTypes.h */
+/* Used by NetEqRTPplay application */
+
+/* RTP defined codepoints */
+#define NETEQ_CODEC_PCMU_PT				0
+#define NETEQ_CODEC_GSMFR_PT			3
+#define NETEQ_CODEC_G723_PT				4
+#define NETEQ_CODEC_DVI4_PT				125 // 8 kHz version
+//#define NETEQ_CODEC_DVI4_16_PT			6  // 16 kHz version
+#define NETEQ_CODEC_PCMA_PT				8
+#define NETEQ_CODEC_G722_PT				9
+#define NETEQ_CODEC_CN_PT				13
+//#define NETEQ_CODEC_G728_PT				15
+//#define NETEQ_CODEC_DVI4_11_PT			16  // 11.025 kHz version
+//#define NETEQ_CODEC_DVI4_22_PT			17  // 22.050 kHz version
+#define NETEQ_CODEC_G729_PT				18
+
+/* Dynamic RTP codepoints as defined in VoiceEngine (file VEAPI.cpp) */
+#define NETEQ_CODEC_IPCMWB_PT			97
+#define NETEQ_CODEC_SPEEX8_PT			98
+#define NETEQ_CODEC_SPEEX16_PT			99
+#define NETEQ_CODEC_EG711U_PT			100
+#define NETEQ_CODEC_EG711A_PT			101
+#define NETEQ_CODEC_ILBC_PT				102
+#define NETEQ_CODEC_ISAC_PT				103
+#define NETEQ_CODEC_ISACLC_PT			119
+#define NETEQ_CODEC_ISACSWB_PT			104
+#define NETEQ_CODEC_AVT_PT				106
+#define NETEQ_CODEC_G722_1_16_PT		108
+#define NETEQ_CODEC_G722_1_24_PT		109
+#define NETEQ_CODEC_G722_1_32_PT		110
+#define NETEQ_CODEC_SC3_PT				111
+#define NETEQ_CODEC_AMR_PT				112
+#define NETEQ_CODEC_GSMEFR_PT			113
+//#define NETEQ_CODEC_ILBCRCU_PT			114
+#define NETEQ_CODEC_G726_16_PT			115
+#define NETEQ_CODEC_G726_24_PT			116
+#define NETEQ_CODEC_G726_32_PT			121
+#define NETEQ_CODEC_RED_PT				117
+#define NETEQ_CODEC_G726_40_PT			118
+//#define NETEQ_CODEC_ENERGY_PT			120
+#define NETEQ_CODEC_CN_WB_PT			105
+#define NETEQ_CODEC_CN_SWB_PT           126
+#define NETEQ_CODEC_G729_1_PT			107
+#define NETEQ_CODEC_G729D_PT			123
+#define NETEQ_CODEC_MELPE_PT			124
+#define NETEQ_CODEC_CELT32_PT     114
+
+/* Extra dynamic codepoints */
+#define NETEQ_CODEC_AMRWB_PT			120
+#define NETEQ_CODEC_PCM16B_PT			93
+#define NETEQ_CODEC_PCM16B_WB_PT		94
+#define NETEQ_CODEC_PCM16B_SWB32KHZ_PT	95
+#define NETEQ_CODEC_PCM16B_SWB48KHZ_PT	96
+#define NETEQ_CODEC_MPEG4AAC_PT			122
+
+
+/* Not default in VoiceEngine */
+#define NETEQ_CODEC_G722_1C_24_PT		84
+#define NETEQ_CODEC_G722_1C_32_PT		85
+#define NETEQ_CODEC_G722_1C_48_PT		86
+
+#define NETEQ_CODEC_SILK_8_PT			80
+#define NETEQ_CODEC_SILK_12_PT			81
+#define NETEQ_CODEC_SILK_16_PT			82
+#define NETEQ_CODEC_SILK_24_PT			83
+
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPanalyze.cc b/trunk/src/modules/audio_coding/neteq/test/RTPanalyze.cc
new file mode 100644
index 0000000..12617dd
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPanalyze.cc
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdio.h>
+#include <vector>
+
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+
+enum {
+  kRedPayloadType = 127
+};
+
+int main(int argc, char* argv[]) {
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+  printf("Input file: %s\n", argv[1]);
+
+  FILE* out_file = fopen(argv[2], "wt");
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[2]);
+    return -1;
+  }
+  printf("Output file: %s\n\n", argv[2]);
+
+  // Print file header.
+  fprintf(out_file, "SeqNo  TimeStamp   SendTime  Size    PT  M\n");
+
+  // Read file header.
+  NETEQTEST_RTPpacket::skipFileHeader(in_file);
+  NETEQTEST_RTPpacket packet;
+
+  while (packet.readFromFile(in_file) >= 0) {
+    // Write packet data to file.
+    fprintf(out_file, "%5u %10u %10u %5i %5i %2i\n",
+            packet.sequenceNumber(), packet.timeStamp(), packet.time(),
+            packet.dataLen(), packet.payloadType(), packet.markerBit());
+    if (packet.payloadType() == kRedPayloadType) {
+      WebRtcNetEQ_RTPInfo red_header;
+      int len;
+      int red_index = 0;
+      while ((len = packet.extractRED(red_index++, red_header)) >= 0) {
+        fprintf(out_file, "* %5u %10u %10u %5i %5i\n",
+                red_header.sequenceNumber, red_header.timeStamp,
+                packet.time(), len, red_header.payloadType);
+      }
+      assert(red_index > 1);  // We must get at least one payload.
+    }
+  }
+
+  fclose(in_file);
+  fclose(out_file);
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPcat.cc b/trunk/src/modules/audio_coding/neteq/test/RTPcat.cc
new file mode 100644
index 0000000..001b00b
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPcat.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+
+#define FIRSTLINELEN 40
+
+int main(int argc, char* argv[]) {
+  if (argc < 3) {
+    printf("Usage: RTPcat in1.rtp int2.rtp [...] out.rtp\n");
+    exit(1);
+  }
+
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+
+  FILE* out_file = fopen(argv[argc - 1], "wb");  // Last parameter is out file.
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[argc - 1]);
+    return -1;
+  }
+  printf("Output RTP file: %s\n\n", argv[argc - 1]);
+
+  // Read file header and write directly to output file.
+  char firstline[FIRSTLINELEN];
+  const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+  EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, in_file) != NULL);
+  EXPECT_GT(fputs(firstline, out_file), 0);
+  EXPECT_EQ(kRtpDumpHeaderSize, fread(firstline, 1, kRtpDumpHeaderSize,
+                                      in_file));
+  EXPECT_EQ(kRtpDumpHeaderSize, fwrite(firstline, 1, kRtpDumpHeaderSize,
+                                       out_file));
+
+  // Close input file and re-open it later (easier to write the loop below).
+  fclose(in_file);
+
+  for (int i = 1; i < argc - 1; i++) {
+    in_file = fopen(argv[i], "rb");
+    if (!in_file) {
+      printf("Cannot open input file %s\n", argv[i]);
+      return -1;
+    }
+    printf("Input RTP file: %s\n", argv[i]);
+
+    NETEQTEST_RTPpacket::skipFileHeader(in_file);
+    NETEQTEST_RTPpacket packet;
+    int pack_len = packet.readFromFile(in_file);
+    if (pack_len < 0) {
+      exit(1);
+    }
+    while (pack_len >= 0) {
+      packet.writeToFile(out_file);
+      pack_len = packet.readFromFile(in_file);
+    }
+    fclose(in_file);
+  }
+  fclose(out_file);
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPchange.cc b/trunk/src/modules/audio_coding/neteq/test/RTPchange.cc
new file mode 100644
index 0000000..ecbd81c
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPchange.cc
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+
+#define FIRSTLINELEN 40
+
+static bool pktCmp(NETEQTEST_RTPpacket *a, NETEQTEST_RTPpacket *b) {
+  return (a->time() < b->time());
+}
+
+int main(int argc, char* argv[]) {
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+  printf("Input RTP file: %s\n", argv[1]);
+
+  FILE* stat_file = fopen(argv[2], "rt");
+  if (!stat_file) {
+    printf("Cannot open timing file %s\n", argv[2]);
+    return -1;
+  }
+  printf("Timing file: %s\n", argv[2]);
+
+  FILE* out_file = fopen(argv[3], "wb");
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[3]);
+    return -1;
+  }
+  printf("Output RTP file: %s\n\n", argv[3]);
+
+  // Read all statistics and insert into map.
+  // Read first line.
+  char temp_str[100];
+  if (fgets(temp_str, 100, stat_file) == NULL) {
+    printf("Failed to read timing file %s\n", argv[2]);
+    return -1;
+  }
+  // Define map.
+  std::map<std::pair<uint16_t, uint32_t>, uint32_t> packet_stats;
+  uint16_t seq_no;
+  uint32_t ts;
+  uint32_t send_time;
+
+  while (fscanf(stat_file,
+                "%hu %u %u %*i %*i\n", &seq_no, &ts, &send_time) == 3) {
+    std::pair<uint16_t, uint32_t>
+        temp_pair = std::pair<uint16_t, uint32_t>(seq_no, ts);
+
+    packet_stats[temp_pair] = send_time;
+  }
+
+  fclose(stat_file);
+
+  // Read file header and write directly to output file.
+  char first_line[FIRSTLINELEN];
+  if (fgets(first_line, FIRSTLINELEN, in_file) == NULL) {
+    printf("Failed to read first line of input file %s\n", argv[1]);
+    return -1;
+  }
+  fputs(first_line, out_file);
+  // start_sec + start_usec + source + port + padding
+  const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+  if (fread(first_line, 1, kRtpDumpHeaderSize, in_file)
+      != kRtpDumpHeaderSize) {
+    printf("Failed to read RTP dump header from input file %s\n", argv[1]);
+    return -1;
+  }
+  if (fwrite(first_line, 1, kRtpDumpHeaderSize, out_file)
+      != kRtpDumpHeaderSize) {
+    printf("Failed to write RTP dump header to output file %s\n", argv[3]);
+    return -1;
+  }
+
+  std::vector<NETEQTEST_RTPpacket *> packet_vec;
+
+  while (1) {
+    // Insert in vector.
+    NETEQTEST_RTPpacket *new_packet = new NETEQTEST_RTPpacket();
+    if (new_packet->readFromFile(in_file) < 0) {
+      // End of file.
+      break;
+    }
+
+    // Look for new send time in statistics vector.
+    std::pair<uint16_t, uint32_t> temp_pair =
+        std::pair<uint16_t, uint32_t>(new_packet->sequenceNumber(),
+                                      new_packet->timeStamp());
+
+    uint32_t new_send_time = packet_stats[temp_pair];
+    new_packet->setTime(new_send_time);  // Set new send time.
+    packet_vec.push_back(new_packet);  // Insert in vector.
+  }
+
+  // Sort the vector according to send times.
+  std::sort(packet_vec.begin(), packet_vec.end(), pktCmp);
+
+  std::vector<NETEQTEST_RTPpacket *>::iterator it;
+  for (it = packet_vec.begin(); it != packet_vec.end(); it++) {
+    // Write to out file.
+    if ((*it)->writeToFile(out_file) < 0) {
+      printf("Error writing to file\n");
+      return -1;
+    }
+    // Delete packet.
+    delete *it;
+  }
+
+  fclose(in_file);
+  fclose(out_file);
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPencode.cc b/trunk/src/modules/audio_coding/neteq/test/RTPencode.cc
new file mode 100644
index 0000000..f328794
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPencode.cc
@@ -0,0 +1,2031 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+/* header includes */
+#include "typedefs.h"
+#include "stdio.h"
+#include "webrtc_neteq.h" // needed for enum WebRtcNetEQDecoder
+#include <string.h>
+#include <stdlib.h>
+#include <cassert>
+
+#ifdef WIN32
+#include <winsock2.h>
+#endif
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#endif
+
+
+/************************/
+/* Define payload types */
+/************************/
+
+#include "PayloadTypes.h"
+
+
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define STOPSENDTIME 3000
+#define RESTARTSENDTIME 0 //162500
+#define FIRSTLINELEN 40
+#define CHECK_NOT_NULL(a) if((a)==0){printf("\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+
+//#define MULTIPLE_SAME_TIMESTAMP
+#define REPEAT_PACKET_DISTANCE 17
+#define REPEAT_PACKET_COUNT 1  // number of extra packets to send
+
+//#define INSERT_OLD_PACKETS
+#define OLD_PACKET 5 // how many seconds too old should the packet be?
+
+//#define TIMESTAMP_WRAPAROUND
+
+//#define RANDOM_DATA
+//#define RANDOM_PAYLOAD_DATA
+#define RANDOM_SEED 10
+
+//#define INSERT_DTMF_PACKETS
+//#define NO_DTMF_OVERDUB
+#define DTMF_PACKET_INTERVAL 2000
+#define DTMF_DURATION 500
+
+#define STEREO_MODE_FRAME 0
+#define STEREO_MODE_SAMPLE_1 1 //1 octet per sample
+#define STEREO_MODE_SAMPLE_2 2 //2 octets per sample
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+void NetEQTest_GetCodec_and_PT(char * name, enum WebRtcNetEQDecoder *codec, int *PT, int frameLen, int *fs, int *bitrate, int *useRed);
+int NetEQTest_init_coders(enum WebRtcNetEQDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels);
+void defineCodecs(enum WebRtcNetEQDecoder *usedCodec, int *noOfCodecs );
+int NetEQTest_free_coders(enum WebRtcNetEQDecoder coder, int numChannels);
+int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc);
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
+                        int seqNo, WebRtc_UWord32 ssrc);
+int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration);
+void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples);
+void stereoInterleave(unsigned char* data, int dataLen, int stride);
+
+/*********************/
+/* Codec definitions */
+/*********************/
+
+#include "webrtc_vad.h"
+
+#if ((defined CODEC_PCM16B)||(defined NETEQ_ARBITRARY_CODEC))
+	#include "pcm16b.h"
+#endif
+#ifdef CODEC_G711
+	#include "g711_interface.h"
+#endif
+#ifdef CODEC_G729
+	#include "G729Interface.h"
+#endif
+#ifdef CODEC_G729_1
+	#include "G729_1Interface.h"
+#endif
+#ifdef CODEC_AMR
+	#include "AMRInterface.h"
+	#include "AMRCreation.h"
+#endif
+#ifdef CODEC_AMRWB
+	#include "AMRWBInterface.h"
+	#include "AMRWBCreation.h"
+#endif
+#ifdef CODEC_ILBC
+	#include "ilbc.h"
+#endif
+#if (defined CODEC_ISAC || defined CODEC_ISAC_SWB) 
+	#include "isac.h"
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+	#include "isacfix.h"
+	#ifdef CODEC_ISAC
+		#error Cannot have both ISAC and ISACfix defined. Please de-select one in the beginning of RTPencode.cpp
+	#endif
+#endif
+#ifdef CODEC_G722
+	#include "g722_interface.h"
+#endif
+#ifdef CODEC_G722_1_24
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1_32
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1_16
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_24
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_32
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_48
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G726
+    #include "G726Creation.h"
+    #include "G726Interface.h"
+#endif
+#ifdef CODEC_GSMFR
+	#include "GSMFRInterface.h"
+	#include "GSMFRCreation.h"
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+  #include "webrtc_cng.h"
+#endif
+#if ((defined CODEC_SPEEX_8)||(defined CODEC_SPEEX_16))
+	#include "SpeexInterface.h"
+#endif
+#ifdef CODEC_CELT_32
+#include "celt_interface.h"
+#endif
+
+
+/***********************************/
+/* Global codec instance variables */
+/***********************************/
+
+WebRtcVadInst *VAD_inst[2];
+
+#ifdef CODEC_G722
+    G722EncInst *g722EncState[2];
+#endif
+
+#ifdef CODEC_G722_1_24
+	G722_1_24_encinst_t *G722_1_24enc_inst[2];
+#endif
+#ifdef CODEC_G722_1_32
+	G722_1_32_encinst_t *G722_1_32enc_inst[2];
+#endif
+#ifdef CODEC_G722_1_16
+	G722_1_16_encinst_t *G722_1_16enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_24
+	G722_1C_24_encinst_t *G722_1C_24enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_32
+	G722_1C_32_encinst_t *G722_1C_32enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_48
+	G722_1C_48_encinst_t *G722_1C_48enc_inst[2];
+#endif
+#ifdef CODEC_G726
+    G726_encinst_t *G726enc_inst[2];
+#endif
+#ifdef CODEC_G729
+	G729_encinst_t *G729enc_inst[2];
+#endif
+#ifdef CODEC_G729_1
+	G729_1_inst_t *G729_1_inst[2];
+#endif
+#ifdef CODEC_AMR
+	AMR_encinst_t *AMRenc_inst[2];
+	WebRtc_Word16		  AMR_bitrate;
+#endif
+#ifdef CODEC_AMRWB
+	AMRWB_encinst_t *AMRWBenc_inst[2];
+	WebRtc_Word16		  AMRWB_bitrate;
+#endif
+#ifdef CODEC_ILBC
+	iLBC_encinst_t *iLBCenc_inst[2];
+#endif
+#ifdef CODEC_ISAC
+	ISACStruct *ISAC_inst[2];
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+	ISACFIX_MainStruct *ISAC_inst[2];
+#endif
+#ifdef CODEC_ISAC_SWB
+	ISACStruct *ISACSWB_inst[2];
+#endif
+#ifdef CODEC_GSMFR
+	GSMFR_encinst_t *GSMFRenc_inst[2];
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+	CNG_enc_inst *CNGenc_inst[2];
+#endif
+#ifdef CODEC_SPEEX_8
+	SPEEX_encinst_t *SPEEX8enc_inst[2];
+#endif
+#ifdef CODEC_SPEEX_16
+	SPEEX_encinst_t *SPEEX16enc_inst[2];
+#endif
+#ifdef CODEC_CELT_32
+  CELT_encinst_t *CELT32enc_inst[2];
+#endif
+#ifdef CODEC_G711
+    void *G711state[2]={NULL, NULL};
+#endif
+
+
+int main(int argc, char* argv[])
+{
+	int packet_size, fs;
+	enum WebRtcNetEQDecoder usedCodec;
+	int payloadType;
+	int bitrate = 0;
+	int useVAD, vad;
+    int useRed=0;
+	int len, enc_len;
+	WebRtc_Word16 org_data[4000];
+	unsigned char rtp_data[8000];
+	WebRtc_Word16 seqNo=0xFFF;
+	WebRtc_UWord32 ssrc=1235412312;
+	WebRtc_UWord32 timestamp=0xAC1245;
+	WebRtc_UWord16 length, plen;
+	WebRtc_UWord32 offset;
+	double sendtime = 0;
+    int red_PT[2] = {0};
+    WebRtc_UWord32 red_TS[2] = {0};
+    WebRtc_UWord16 red_len[2] = {0};
+    int RTPheaderLen=12;
+	unsigned char red_data[8000];
+#ifdef INSERT_OLD_PACKETS
+	WebRtc_UWord16 old_length, old_plen;
+	int old_enc_len;
+	int first_old_packet=1;
+	unsigned char old_rtp_data[8000];
+	int packet_age=0;
+#endif
+#ifdef INSERT_DTMF_PACKETS
+	int NTone = 1;
+	int DTMFfirst = 1;
+	WebRtc_UWord32 DTMFtimestamp;
+    bool dtmfSent = false;
+#endif
+    bool usingStereo = false;
+    int stereoMode = 0;
+    int numChannels = 1;
+
+	/* check number of parameters */
+	if ((argc != 6) && (argc != 7)) {
+		/* print help text and exit */
+		printf("Application to encode speech into an RTP stream.\n");
+		printf("The program reads a PCM file and encodes is using the specified codec.\n");
+		printf("The coded speech is packetized in RTP packest and written to the output file.\n");
+		printf("The format of the RTP stream file is simlilar to that of rtpplay,\n");
+		printf("but with the receive time euqal to 0 for all packets.\n");
+		printf("Usage:\n\n");
+		printf("%s PCMfile RTPfile frameLen codec useVAD bitrate\n", argv[0]);
+		printf("where:\n");
+
+		printf("PCMfile      : PCM speech input file\n\n");
+
+		printf("RTPfile      : RTP stream output file\n\n");
+
+		printf("frameLen     : 80...960...  Number of samples per packet (limit depends on codec)\n\n");
+
+		printf("codecName\n");
+#ifdef CODEC_PCM16B
+		printf("             : pcm16b       16 bit PCM (8kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_WB
+		printf("             : pcm16b_wb   16 bit PCM (16kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+		printf("             : pcm16b_swb32 16 bit PCM (32kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+		printf("             : pcm16b_swb48 16 bit PCM (48kHz)\n");
+#endif
+#ifdef CODEC_G711
+		printf("             : pcma         g711 A-law (8kHz)\n");
+#endif
+#ifdef CODEC_G711
+		printf("             : pcmu         g711 u-law (8kHz)\n");
+#endif
+#ifdef CODEC_G729
+		printf("             : g729         G729 (8kHz and 8kbps) CELP (One-Three frame(s)/packet)\n");
+#endif
+#ifdef CODEC_G729_1
+		printf("             : g729.1       G729.1 (16kHz) variable rate (8--32 kbps)\n");
+#endif
+#ifdef CODEC_G722_1_16
+		printf("             : g722.1_16    G722.1 coder (16kHz) (g722.1 with 16kbps)\n");
+#endif
+#ifdef CODEC_G722_1_24
+		printf("             : g722.1_24    G722.1 coder (16kHz) (the 24kbps version)\n");
+#endif
+#ifdef CODEC_G722_1_32
+		printf("             : g722.1_32    G722.1 coder (16kHz) (the 32kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_24
+		printf("             : g722.1C_24    G722.1 C coder (32kHz) (the 24kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_32
+		printf("             : g722.1C_32    G722.1 C coder (32kHz) (the 32kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_48
+		printf("             : g722.1C_48    G722.1 C coder (32kHz) (the 48kbps)\n");
+#endif
+
+#ifdef CODEC_G726
+        printf("             : g726_16      G726 coder (8kHz) 16kbps\n");
+        printf("             : g726_24      G726 coder (8kHz) 24kbps\n");
+        printf("             : g726_32      G726 coder (8kHz) 32kbps\n");
+        printf("             : g726_40      G726 coder (8kHz) 40kbps\n");
+#endif
+#ifdef CODEC_AMR
+		printf("             : AMRXk        Adaptive Multi Rate CELP codec (8kHz)\n");
+		printf("                            X = 4.75, 5.15, 5.9, 6.7, 7.4, 7.95, 10.2 or 12.2\n");
+#endif
+#ifdef CODEC_AMRWB
+		printf("             : AMRwbXk      Adaptive Multi Rate Wideband CELP codec (16kHz)\n");
+		printf("                            X = 7, 9, 12, 14, 16, 18, 20, 23 or 24\n");
+#endif
+#ifdef CODEC_ILBC
+		printf("             : ilbc         iLBC codec (8kHz and 13.8kbps)\n");
+#endif
+#ifdef CODEC_ISAC
+		printf("             : isac         iSAC (16kHz and 32.0 kbps). To set rate specify a rate parameter as last parameter\n");
+#endif
+#ifdef CODEC_ISAC_SWB
+		printf("             : isacswb       iSAC SWB (32kHz and 32.0-52.0 kbps). To set rate specify a rate parameter as last parameter\n");
+#endif
+#ifdef CODEC_GSMFR
+		printf("             : gsmfr        GSM FR codec (8kHz and 13kbps)\n");
+#endif
+#ifdef CODEC_G722
+		printf("             : g722         g722 coder (16kHz) (the 64kbps version)\n");
+#endif
+#ifdef CODEC_SPEEX_8
+		printf("             : speex8       speex coder (8 kHz)\n");
+#endif
+#ifdef CODEC_SPEEX_16
+		printf("             : speex16      speex coder (16 kHz)\n");
+#endif
+#ifdef CODEC_CELT_32
+    printf("             : celt32       celt coder (32 kHz)\n");
+#endif
+#ifdef CODEC_RED
+#ifdef CODEC_G711
+		printf("             : red_pcm      Redundancy RTP packet with 2*G711A frames\n");
+#endif
+#ifdef CODEC_ISAC
+		printf("             : red_isac     Redundancy RTP packet with 2*iSAC frames\n");
+#endif
+#endif
+        printf("\n");
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+		printf("useVAD       : 0 Voice Activity Detection is switched off\n");
+		printf("             : 1 Voice Activity Detection is switched on\n\n");
+#else
+		printf("useVAD       : 0 Voice Activity Detection switched off (on not supported)\n\n");
+#endif
+		printf("bitrate      : Codec bitrate in bps (only applies to vbr codecs)\n\n");
+
+		return(0);
+	}
+
+	FILE* in_file=fopen(argv[1],"rb");
+	CHECK_NOT_NULL(in_file);
+	printf("Input file: %s\n",argv[1]);
+	FILE* out_file=fopen(argv[2],"wb");
+	CHECK_NOT_NULL(out_file);
+	printf("Output file: %s\n\n",argv[2]);
+	packet_size=atoi(argv[3]);
+	CHECK_NOT_NULL(packet_size);
+	printf("Packet size: %i\n",packet_size);
+
+    // check for stereo
+    if(argv[4][strlen(argv[4])-1] == '*') {
+        // use stereo
+        usingStereo = true;
+        numChannels = 2;
+        argv[4][strlen(argv[4])-1] = '\0';
+    }
+
+	NetEQTest_GetCodec_and_PT(argv[4], &usedCodec, &payloadType, packet_size, &fs, &bitrate, &useRed);
+
+    if(useRed) {
+        RTPheaderLen = 12 + 4 + 1; /* standard RTP = 12; 4 bytes per redundant payload, except last one which is 1 byte */
+    }
+
+	useVAD=atoi(argv[5]);
+#if !(defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+	if (useVAD!=0) {
+		printf("Error: this simulation does not support VAD/DTX/CNG\n");
+	}
+#endif
+	
+    // check stereo type
+    if(usingStereo)
+    {
+        switch(usedCodec) 
+        {
+            // sample based codecs 
+        case kDecoderPCMu:
+        case kDecoderPCMa:
+        case kDecoderG722:
+            {
+                // 1 octet per sample
+                stereoMode = STEREO_MODE_SAMPLE_1;
+                break;
+            }
+        case kDecoderPCM16B:
+        case kDecoderPCM16Bwb:
+        case kDecoderPCM16Bswb32kHz:
+        case kDecoderPCM16Bswb48kHz:
+            {
+                // 2 octets per sample
+                stereoMode = STEREO_MODE_SAMPLE_2;
+                break;
+            }
+
+            // fixed-rate frame codecs (with internal VAD)
+        case kDecoderG729:
+            {
+                if(useVAD) {
+                    printf("Cannot use codec-internal VAD and stereo\n");
+                    exit(0);
+                }
+                // break intentionally omitted
+            }
+        case kDecoderG722_1_16:
+        case kDecoderG722_1_24:
+        case kDecoderG722_1_32:
+        case kDecoderG722_1C_24:
+        case kDecoderG722_1C_32:
+        case kDecoderG722_1C_48:
+            {
+                stereoMode = STEREO_MODE_FRAME;
+                break;
+            }
+        default:
+            {
+                printf("Cannot use codec %s as stereo codec\n", argv[4]);
+                exit(0);
+            }
+        }
+    }
+
+	if ((usedCodec == kDecoderISAC) || (usedCodec == kDecoderISACswb))
+    {
+        if (argc != 7)
+        {
+            if (usedCodec == kDecoderISAC)
+            {
+                bitrate = 32000;
+                printf(
+                    "Running iSAC at default bitrate of 32000 bps (to specify explicitly add the bps as last parameter)\n");
+            }
+            else // (usedCodec==kDecoderISACswb)
+            {
+                bitrate = 56000;
+                printf(
+                    "Running iSAC at default bitrate of 56000 bps (to specify explicitly add the bps as last parameter)\n");
+            }
+        }
+        else
+        {
+            bitrate = atoi(argv[6]);
+            if (usedCodec == kDecoderISAC)
+            {
+                if ((bitrate < 10000) || (bitrate > 32000))
+                {
+                    printf(
+                        "Error: iSAC bitrate must be between 10000 and 32000 bps (%i is invalid)\n",
+                        bitrate);
+                    exit(0);
+                }
+                printf("Running iSAC at bitrate of %i bps\n", bitrate);
+            }
+            else // (usedCodec==kDecoderISACswb)
+            {
+                if ((bitrate < 32000) || (bitrate > 56000))
+                {
+                    printf(
+                        "Error: iSAC SWB bitrate must be between 32000 and 56000 bps (%i is invalid)\n",
+                        bitrate);
+                    exit(0);
+                }
+            }
+        }
+    }
+    else
+    {
+        if (argc == 7)
+        {
+            printf(
+                "Error: Bitrate parameter can only be specified for iSAC, G.723, and G.729.1\n");
+            exit(0);
+        }
+    }
+	
+    if(useRed) {
+        printf("Redundancy engaged. ");
+    }
+	printf("Used codec: %i\n",usedCodec);
+	printf("Payload type: %i\n",payloadType);
+	
+	NetEQTest_init_coders(usedCodec, packet_size, bitrate, fs, useVAD, numChannels);
+
+	/* write file header */
+	//fprintf(out_file, "#!RTPencode%s\n", "1.0");
+	fprintf(out_file, "#!rtpplay%s \n", "1.0"); // this is the string that rtpplay needs
+	WebRtc_UWord32 dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
+	fwrite(&dummy_variable, 4, 1, out_file);
+	fwrite(&dummy_variable, 4, 1, out_file);
+	fwrite(&dummy_variable, 4, 1, out_file);
+	fwrite(&dummy_variable, 2, 1, out_file);
+	fwrite(&dummy_variable, 2, 1, out_file);
+
+#ifdef TIMESTAMP_WRAPAROUND
+	timestamp = 0xFFFFFFFF - fs*10; /* should give wrap-around in 10 seconds */
+#endif
+#if defined(RANDOM_DATA) | defined(RANDOM_PAYLOAD_DATA)
+	srand(RANDOM_SEED);
+#endif
+
+    /* if redundancy is used, the first redundant payload is zero length */
+    red_len[0] = 0;
+
+	/* read first frame */
+	len=fread(org_data,2,packet_size * numChannels,in_file) / numChannels;
+
+    /* de-interleave if stereo */
+    if ( usingStereo )
+    {
+        stereoDeInterleave(org_data, len * numChannels);
+    }
+
+	while (len==packet_size) {
+
+#ifdef INSERT_DTMF_PACKETS
+        dtmfSent = false;
+
+        if ( sendtime >= NTone * DTMF_PACKET_INTERVAL ) {
+            if ( sendtime < NTone * DTMF_PACKET_INTERVAL + DTMF_DURATION ) {
+                // tone has not ended
+                if (DTMFfirst==1) {
+                    DTMFtimestamp = timestamp; // save this timestamp
+                    DTMFfirst=0;
+                }
+                makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, seqNo,DTMFtimestamp, ssrc);
+                enc_len = makeDTMFpayload(&rtp_data[12], NTone % 12, 0, 4, (int) (sendtime - NTone * DTMF_PACKET_INTERVAL)*(fs/1000) + len);
+            }
+            else {
+                // tone has ended
+                makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, seqNo,DTMFtimestamp, ssrc);
+                enc_len = makeDTMFpayload(&rtp_data[12], NTone % 12, 1, 4, DTMF_DURATION*(fs/1000));
+                NTone++;
+                DTMFfirst=1;
+            }
+
+            /* write RTP packet to file */
+            length = htons(12 + enc_len + 8);
+            plen = htons(12 + enc_len);
+            offset = (WebRtc_UWord32) sendtime; //(timestamp/(fs/1000));
+            offset = htonl(offset);
+            fwrite(&length, 2, 1, out_file);
+            fwrite(&plen, 2, 1, out_file);
+            fwrite(&offset, 4, 1, out_file);
+            fwrite(rtp_data, 12 + enc_len, 1, out_file);
+
+            dtmfSent = true;
+        }
+#endif
+
+#ifdef NO_DTMF_OVERDUB
+        /* If DTMF is sent, we should not send any speech packets during the same time */
+        if (dtmfSent) {
+            enc_len = 0;
+        }
+        else {
+#endif
+		/* encode frame */
+		enc_len=NetEQTest_encode(usedCodec, org_data, packet_size, &rtp_data[12] ,fs,&vad, useVAD, bitrate, numChannels);
+		if (enc_len==-1) {
+			printf("Error encoding frame\n");
+			exit(0);
+		}
+
+        if ( usingStereo &&
+            stereoMode != STEREO_MODE_FRAME &&
+            vad == 1 )
+        {
+            // interleave the encoded payload for sample-based codecs (not for CNG)
+            stereoInterleave(&rtp_data[12], enc_len, stereoMode);
+        }
+#ifdef NO_DTMF_OVERDUB
+        }
+#endif
+		
+		if (enc_len > 0 && (sendtime <= STOPSENDTIME || sendtime > RESTARTSENDTIME)) {
+            if(useRed) {
+                if(red_len[0] > 0) {
+                    memmove(&rtp_data[RTPheaderLen+red_len[0]], &rtp_data[12], enc_len);
+                    memcpy(&rtp_data[RTPheaderLen], red_data, red_len[0]);
+
+                    red_len[1] = enc_len;
+                    red_TS[1] = timestamp;
+                    if(vad)
+                        red_PT[1] = payloadType;
+                    else
+                        red_PT[1] = NETEQ_CODEC_CN_PT;
+
+                    makeRedundantHeader(rtp_data, red_PT, 2, red_TS, red_len, seqNo++, ssrc);
+
+
+                    enc_len += red_len[0] + RTPheaderLen - 12;
+                }
+                else { // do not use redundancy payload for this packet, i.e., only last payload
+                    memmove(&rtp_data[RTPheaderLen-4], &rtp_data[12], enc_len);
+                    //memcpy(&rtp_data[RTPheaderLen], red_data, red_len[0]);
+
+                    red_len[1] = enc_len;
+                    red_TS[1] = timestamp;
+                    if(vad)
+                        red_PT[1] = payloadType;
+                    else
+                        red_PT[1] = NETEQ_CODEC_CN_PT;
+
+                    makeRedundantHeader(rtp_data, red_PT, 2, red_TS, red_len, seqNo++, ssrc);
+
+
+                    enc_len += red_len[0] + RTPheaderLen - 4 - 12; // 4 is length of redundancy header (not used)
+                }
+            }
+            else {
+                
+                /* make RTP header */
+                if (vad) // regular speech data
+                    makeRTPheader(rtp_data, payloadType, seqNo++,timestamp, ssrc);
+                else // CNG data
+                    makeRTPheader(rtp_data, NETEQ_CODEC_CN_PT, seqNo++,timestamp, ssrc);
+                
+            }
+#ifdef MULTIPLE_SAME_TIMESTAMP
+			int mult_pack=0;
+			do {
+#endif //MULTIPLE_SAME_TIMESTAMP
+			/* write RTP packet to file */
+			length = htons(12 + enc_len + 8);
+			plen = htons(12 + enc_len);
+			offset = (WebRtc_UWord32) sendtime; //(timestamp/(fs/1000));
+			offset = htonl(offset);
+			fwrite(&length, 2, 1, out_file);
+			fwrite(&plen, 2, 1, out_file);
+			fwrite(&offset, 4, 1, out_file);
+#ifdef RANDOM_DATA
+			for (int k=0; k<12+enc_len; k++) {
+				rtp_data[k] = rand() + rand();
+			}
+#endif
+#ifdef RANDOM_PAYLOAD_DATA
+			for (int k=12; k<12+enc_len; k++) {
+				rtp_data[k] = rand() + rand();
+			}
+#endif
+			fwrite(rtp_data, 12 + enc_len, 1, out_file);
+#ifdef MULTIPLE_SAME_TIMESTAMP
+			} while ( (seqNo%REPEAT_PACKET_DISTANCE == 0) && (mult_pack++ < REPEAT_PACKET_COUNT) );
+#endif //MULTIPLE_SAME_TIMESTAMP
+
+#ifdef INSERT_OLD_PACKETS
+			if (packet_age >= OLD_PACKET*fs) {
+				if (!first_old_packet) {
+					// send the old packet
+					fwrite(&old_length, 2, 1, out_file);
+					fwrite(&old_plen, 2, 1, out_file);
+					fwrite(&offset, 4, 1, out_file);
+					fwrite(old_rtp_data, 12 + old_enc_len, 1, out_file);
+				}
+				// store current packet as old
+				old_length=length;
+				old_plen=plen;
+				memcpy(old_rtp_data,rtp_data,12+enc_len);
+				old_enc_len=enc_len;
+				first_old_packet=0;
+				packet_age=0;
+
+			}
+			packet_age += packet_size;
+#endif
+			
+            if(useRed) {
+                /* move data to redundancy store */
+#ifdef CODEC_ISAC
+                if(usedCodec==kDecoderISAC)
+                {
+                    assert(!usingStereo); // Cannot handle stereo yet
+                    red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (WebRtc_Word16*)red_data);
+                }
+                else
+                {
+#endif
+                    memcpy(red_data, &rtp_data[RTPheaderLen+red_len[0]], enc_len);
+                    red_len[0]=red_len[1];
+#ifdef CODEC_ISAC
+                }
+#endif
+                red_TS[0]=red_TS[1];
+                red_PT[0]=red_PT[1];
+            }
+            
+		}
+
+		/* read next frame */
+        len=fread(org_data,2,packet_size * numChannels,in_file) / numChannels;
+        /* de-interleave if stereo */
+        if ( usingStereo )
+        {
+            stereoDeInterleave(org_data, len * numChannels);
+        }
+
+        if (payloadType==NETEQ_CODEC_G722_PT)
+            timestamp+=len>>1;
+        else
+            timestamp+=len;
+
+		sendtime += (double) len/(fs/1000);
+	}
+	
+	NetEQTest_free_coders(usedCodec, numChannels);
+	fclose(in_file);
+	fclose(out_file);
+    printf("Done!\n");
+
+	return(0);
+}
+
+
+
+
+/****************/
+/* Subfunctions */
+/****************/
+
+void NetEQTest_GetCodec_and_PT(char * name, enum WebRtcNetEQDecoder *codec, int *PT, int frameLen, int *fs, int *bitrate, int *useRed) {
+
+	*bitrate = 0; /* Default bitrate setting */
+    *useRed = 0; /* Default no redundancy */
+
+	if(!strcmp(name,"pcmu")){
+		*codec=kDecoderPCMu;
+		*PT=NETEQ_CODEC_PCMU_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcma")){
+		*codec=kDecoderPCMa;
+		*PT=NETEQ_CODEC_PCMA_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcm16b")){
+		*codec=kDecoderPCM16B;
+		*PT=NETEQ_CODEC_PCM16B_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcm16b_wb")){
+		*codec=kDecoderPCM16Bwb;
+		*PT=NETEQ_CODEC_PCM16B_WB_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"pcm16b_swb32")){
+		*codec=kDecoderPCM16Bswb32kHz;
+		*PT=NETEQ_CODEC_PCM16B_SWB32KHZ_PT;
+		*fs=32000;
+	}
+	else if(!strcmp(name,"pcm16b_swb48")){
+		*codec=kDecoderPCM16Bswb48kHz;
+		*PT=NETEQ_CODEC_PCM16B_SWB48KHZ_PT;
+		*fs=48000;
+	}
+	else if(!strcmp(name,"g722")){
+		*codec=kDecoderG722;
+		*PT=NETEQ_CODEC_G722_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_16")){
+		*codec=kDecoderG722_1_16;
+		*PT=NETEQ_CODEC_G722_1_16_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_24")){
+		*codec=kDecoderG722_1_24;
+		*PT=NETEQ_CODEC_G722_1_24_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_32")){
+		*codec=kDecoderG722_1_32;
+		*PT=NETEQ_CODEC_G722_1_32_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1C_24")){
+		*codec=kDecoderG722_1C_24;
+		*PT=NETEQ_CODEC_G722_1C_24_PT;
+		*fs=32000;
+	}
+	else if(!strcmp(name,"g722.1C_32")){
+		*codec=kDecoderG722_1C_32;
+		*PT=NETEQ_CODEC_G722_1C_32_PT;
+		*fs=32000;
+	}
+    else if(!strcmp(name,"g722.1C_48")){
+		*codec=kDecoderG722_1C_48;
+		*PT=NETEQ_CODEC_G722_1C_48_PT;
+		*fs=32000;
+	}
+    else if(!strcmp(name,"g726_16")){
+        *fs=8000;
+        *codec=kDecoderG726_16;
+        *PT=NETEQ_CODEC_G726_16_PT;
+        *bitrate=16;
+    }
+    else if(!strcmp(name,"g726_24")){
+        *fs=8000;
+        *codec=kDecoderG726_24;
+        *PT=NETEQ_CODEC_G726_24_PT;
+        *bitrate=24;
+    }
+    else if(!strcmp(name,"g726_32")){
+        *fs=8000;
+        *codec=kDecoderG726_32;
+        *PT=NETEQ_CODEC_G726_32_PT;
+        *bitrate=32;
+    }
+    else if(!strcmp(name,"g726_40")){
+        *fs=8000;
+        *codec=kDecoderG726_40;
+        *PT=NETEQ_CODEC_G726_40_PT;
+        *bitrate=40;
+    }
+	else if((!strcmp(name,"amr4.75k"))||(!strcmp(name,"amr5.15k"))||(!strcmp(name,"amr5.9k"))||
+			(!strcmp(name,"amr6.7k"))||(!strcmp(name,"amr7.4k"))||(!strcmp(name,"amr7.95k"))||
+			(!strcmp(name,"amr10.2k"))||(!strcmp(name,"amr12.2k"))) {
+		*fs=8000;
+		if (!strcmp(name,"amr4.75k"))
+			*bitrate = 0;
+		if (!strcmp(name,"amr5.15k"))
+			*bitrate = 1;
+		if (!strcmp(name,"amr5.9k"))
+			*bitrate = 2;
+		if (!strcmp(name,"amr6.7k"))
+			*bitrate = 3;
+		if (!strcmp(name,"amr7.4k"))
+			*bitrate = 4;
+		if (!strcmp(name,"amr7.95k"))
+			*bitrate = 5;
+		if (!strcmp(name,"amr10.2k"))
+			*bitrate = 6;
+		if (!strcmp(name,"amr12.2k"))
+			*bitrate = 7;
+		*codec=kDecoderAMR;
+		*PT=NETEQ_CODEC_AMR_PT;
+	}
+	else if((!strcmp(name,"amrwb7k"))||(!strcmp(name,"amrwb9k"))||(!strcmp(name,"amrwb12k"))||
+			(!strcmp(name,"amrwb14k"))||(!strcmp(name,"amrwb16k"))||(!strcmp(name,"amrwb18k"))||
+			(!strcmp(name,"amrwb20k"))||(!strcmp(name,"amrwb23k"))||(!strcmp(name,"amrwb24k"))) {
+		*fs=16000;
+		if (!strcmp(name,"amrwb7k"))
+			*bitrate = 7000;
+		if (!strcmp(name,"amrwb9k"))
+			*bitrate = 9000;
+		if (!strcmp(name,"amrwb12k"))
+			*bitrate = 12000;
+		if (!strcmp(name,"amrwb14k"))
+			*bitrate = 14000;
+		if (!strcmp(name,"amrwb16k"))
+			*bitrate = 16000;
+		if (!strcmp(name,"amrwb18k"))
+			*bitrate = 18000;
+		if (!strcmp(name,"amrwb20k"))
+			*bitrate = 20000;
+		if (!strcmp(name,"amrwb23k"))
+			*bitrate = 23000;
+		if (!strcmp(name,"amrwb24k"))
+			*bitrate = 24000;
+		*codec=kDecoderAMRWB;
+		*PT=NETEQ_CODEC_AMRWB_PT;
+	}
+	else if((!strcmp(name,"ilbc"))&&((frameLen%240==0)||(frameLen%160==0))){
+		*fs=8000;
+		*codec=kDecoderILBC;
+		*PT=NETEQ_CODEC_ILBC_PT;
+	}
+	else if(!strcmp(name,"isac")){
+		*fs=16000;
+		*codec=kDecoderISAC;
+		*PT=NETEQ_CODEC_ISAC_PT;
+	}
+    else if(!strcmp(name,"isacswb")){
+		*fs=32000;
+		*codec=kDecoderISACswb;
+		*PT=NETEQ_CODEC_ISACSWB_PT;
+	}
+	else if(!strcmp(name,"g729")){
+		*fs=8000;
+		*codec=kDecoderG729;
+		*PT=NETEQ_CODEC_G729_PT;
+	}
+	else if(!strcmp(name,"g729.1")){
+		*fs=16000;
+		*codec=kDecoderG729_1;
+		*PT=NETEQ_CODEC_G729_1_PT;
+	}
+	else if(!strcmp(name,"gsmfr")){
+		*fs=8000;
+		*codec=kDecoderGSMFR;
+		*PT=NETEQ_CODEC_GSMFR_PT;
+	}
+	else if(!strcmp(name,"speex8")){
+		*fs=8000;
+		*codec=kDecoderSPEEX_8;
+		*PT=NETEQ_CODEC_SPEEX8_PT;
+	}
+	else if(!strcmp(name,"speex16")){
+		*fs=16000;
+		*codec=kDecoderSPEEX_16;
+		*PT=NETEQ_CODEC_SPEEX16_PT;
+	}
+  else if(!strcmp(name,"celt32")){
+    *fs=32000;
+    *codec=kDecoderCELT_32;
+    *PT=NETEQ_CODEC_CELT32_PT;
+  }
+    else if(!strcmp(name,"red_pcm")){
+		*codec=kDecoderPCMa;
+		*PT=NETEQ_CODEC_PCMA_PT; /* this will be the PT for the sub-headers */
+		*fs=8000;
+        *useRed = 1;
+	} else if(!strcmp(name,"red_isac")){
+		*codec=kDecoderISAC;
+		*PT=NETEQ_CODEC_ISAC_PT; /* this will be the PT for the sub-headers */
+		*fs=16000;
+        *useRed = 1;
+    } else {
+		printf("Error: Not a supported codec (%s)\n", name);
+		exit(0);
+	}
+
+}
+
+
+
+
+int NetEQTest_init_coders(enum WebRtcNetEQDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels){
+	
+	int ok=0;
+	
+    for (int k = 0; k < numChannels; k++) 
+    {
+        ok=WebRtcVad_Create(&VAD_inst[k]);
+        if (ok!=0) {
+            printf("Error: Couldn't allocate memory for VAD instance\n");
+            exit(0);
+        }
+        ok=WebRtcVad_Init(VAD_inst[k]);
+        if (ok==-1) {
+            printf("Error: Initialization of VAD struct failed\n");	
+            exit(0); 
+        }
+
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+        ok=WebRtcCng_CreateEnc(&CNGenc_inst[k]);
+        if (ok!=0) {
+            printf("Error: Couldn't allocate memory for CNG encoding instance\n");
+            exit(0);
+        }
+        if(sampfreq <= 16000) {
+            ok=WebRtcCng_InitEnc(CNGenc_inst[k],sampfreq, 200, 5);
+            if (ok==-1) {
+                printf("Error: Initialization of CNG struct failed. Error code %d\n", 
+                    WebRtcCng_GetErrorCodeEnc(CNGenc_inst[k]));	
+                exit(0); 
+            }
+        }
+#endif
+
+        switch (coder) {
+    case kDecoderReservedStart : // dummy codec
+#ifdef CODEC_PCM16B
+    case kDecoderPCM16B :
+#endif
+#ifdef CODEC_PCM16B_WB
+    case kDecoderPCM16Bwb :
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+    case kDecoderPCM16Bswb32kHz :
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+    case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef CODEC_G711
+    case kDecoderPCMu :
+    case kDecoderPCMa :
+#endif
+        // do nothing
+        break;
+#ifdef CODEC_G729
+    case kDecoderG729:
+        if (sampfreq==8000) {
+            if ((enc_frameSize==80)||(enc_frameSize==160)||(enc_frameSize==240)||(enc_frameSize==320)||(enc_frameSize==400)||(enc_frameSize==480)) {
+                ok=WebRtcG729_CreateEnc(&G729enc_inst[k]);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for G729 encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: g729 only supports 10, 20, 30, 40, 50 or 60 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG729_EncoderInit(G729enc_inst[k], vad);
+            if ((vad==1)&&(enc_frameSize!=80)) {
+                printf("\nError - This simulation only supports VAD for G729 at 10ms packets (not %dms)\n", (enc_frameSize>>3));
+            }
+        } else {
+            printf("\nError - g729 is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G729_1
+    case kDecoderG729_1:
+        if (sampfreq==16000) {
+            if ((enc_frameSize==320)||(enc_frameSize==640)||(enc_frameSize==960)
+                ) {
+                    ok=WebRtcG7291_Create(&G729_1_inst[k]);
+                    if (ok!=0) {
+                        printf("Error: Couldn't allocate memory for G.729.1 codec instance\n");
+                        exit(0);
+                    }
+                } else {
+                    printf("\nError: G.729.1 only supports 20, 40 or 60 ms!!\n\n");
+                    exit(0);
+                }
+                if (!(((bitrate >= 12000) && (bitrate <= 32000) && (bitrate%2000 == 0)) || (bitrate == 8000))) {
+                    /* must be 8, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, or 32 kbps */
+                    printf("\nError: G.729.1 bitrate must be 8000 or 12000--32000 in steps of 2000 bps\n");
+                    exit(0);
+                }
+                WebRtcG7291_EncoderInit(G729_1_inst[k], bitrate, 0 /* flag8kHz*/, 0 /*flagG729mode*/);
+        } else {
+            printf("\nError - G.729.1 input is always 16 kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_SPEEX_8
+    case kDecoderSPEEX_8 :
+        if (sampfreq==8000) {
+            if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {
+                ok=WebRtcSpeex_CreateEnc(&SPEEX8enc_inst[k], sampfreq);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Speex encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Speex only supports 20, 40, and 60 ms!!\n\n");
+                exit(0);
+            }
+            if ((vad==1)&&(enc_frameSize!=160)) {
+                printf("\nError - This simulation only supports VAD for Speex at 20ms packets (not %dms)\n", (enc_frameSize>>3));
+                vad=0;
+            }
+            ok=WebRtcSpeex_EncoderInit(SPEEX8enc_inst[k], 0/*vbr*/, 3 /*complexity*/, vad);
+            if (ok!=0) exit(0);
+        } else {
+            printf("\nError - Speex8 called with sample frequency other than 8 kHz.\n\n");
+        }
+        break;
+#endif
+#ifdef CODEC_SPEEX_16
+    case kDecoderSPEEX_16 :
+        if (sampfreq==16000) {
+            if ((enc_frameSize==320)||(enc_frameSize==640)||(enc_frameSize==960)) {
+                ok=WebRtcSpeex_CreateEnc(&SPEEX16enc_inst[k], sampfreq);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Speex encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Speex only supports 20, 40, and 60 ms!!\n\n");
+                exit(0);
+            }
+            if ((vad==1)&&(enc_frameSize!=320)) {
+                printf("\nError - This simulation only supports VAD for Speex at 20ms packets (not %dms)\n", (enc_frameSize>>4));
+                vad=0;
+            }
+            ok=WebRtcSpeex_EncoderInit(SPEEX16enc_inst[k], 0/*vbr*/, 3 /*complexity*/, vad);
+            if (ok!=0) exit(0);
+        } else {
+            printf("\nError - Speex16 called with sample frequency other than 16 kHz.\n\n");
+        }
+        break;
+#endif
+#ifdef CODEC_CELT_32
+    case kDecoderCELT_32 :
+        if (sampfreq==32000) {
+            if (enc_frameSize==320) {
+                ok=WebRtcCelt_CreateEnc(&CELT32enc_inst[k], 1 /*mono*/);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Celt encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Celt only supports 10 ms!!\n\n");
+                exit(0);
+            }
+            ok=WebRtcCelt_EncoderInit(CELT32enc_inst[k],  1 /*mono*/, 48000 /*bitrate*/);
+            if (ok!=0) exit(0);
+        } else {
+          printf("\nError - Celt32 called with sample frequency other than 32 kHz.\n\n");
+        }
+        break;
+#endif
+
+#ifdef CODEC_G722_1_16
+    case kDecoderG722_1_16 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc16(&G722_1_16enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {				
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit16((G722_1_16_encinst_t*)G722_1_16enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1_24
+    case kDecoderG722_1_24 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc24(&G722_1_24enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit24((G722_1_24_encinst_t*)G722_1_24enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1_32
+    case kDecoderG722_1_32 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc32(&G722_1_32enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit32((G722_1_32_encinst_t*)G722_1_32enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_24
+    case kDecoderG722_1C_24 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc24(&G722_1C_24enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_32
+    case kDecoderG722_1C_32 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc32(&G722_1C_32enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_48
+    case kDecoderG722_1C_48 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc48(&G722_1C_48enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722
+    case kDecoderG722 :
+        if (sampfreq==16000) {
+            if (enc_frameSize%2==0) {				
+            } else {
+                printf("\nError - g722 frames must have an even number of enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcG722_CreateEncoder(&g722EncState[k]);
+            WebRtcG722_EncoderInit(g722EncState[k]);
+        } else {
+            printf("\nError - g722 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_AMR
+    case kDecoderAMR :
+        if (sampfreq==8000) {
+            ok=WebRtcAmr_CreateEnc(&AMRenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for AMR encoding instance\n");
+                exit(0);
+            }if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {				
+            } else {
+                printf("\nError - AMR must have a multiple of 160 enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcAmr_EncoderInit(AMRenc_inst[k], vad);
+            WebRtcAmr_EncodeBitmode(AMRenc_inst[k], AMRBandwidthEfficient);
+            AMR_bitrate = bitrate;
+        } else {
+            printf("\nError - AMR is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_AMRWB
+    case kDecoderAMRWB : 
+        if (sampfreq==16000) {
+            ok=WebRtcAmrWb_CreateEnc(&AMRWBenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for AMRWB encoding instance\n");
+                exit(0);
+            }
+            if (((enc_frameSize/320)<0)||((enc_frameSize/320)>3)||((enc_frameSize%320)!=0)) {
+                printf("\nError - AMRwb must have frameSize of 20, 40 or 60ms\n");
+                exit(0);
+            }
+            WebRtcAmrWb_EncoderInit(AMRWBenc_inst[k], vad);
+            if (bitrate==7000) {
+                AMRWB_bitrate = AMRWB_MODE_7k;
+            } else if (bitrate==9000) {
+                AMRWB_bitrate = AMRWB_MODE_9k;
+            } else if (bitrate==12000) {
+                AMRWB_bitrate = AMRWB_MODE_12k;
+            } else if (bitrate==14000) {
+                AMRWB_bitrate = AMRWB_MODE_14k;
+            } else if (bitrate==16000) {
+                AMRWB_bitrate = AMRWB_MODE_16k;
+            } else if (bitrate==18000) {
+                AMRWB_bitrate = AMRWB_MODE_18k;
+            } else if (bitrate==20000) {
+                AMRWB_bitrate = AMRWB_MODE_20k;
+            } else if (bitrate==23000) {
+                AMRWB_bitrate = AMRWB_MODE_23k;
+            } else if (bitrate==24000) {
+                AMRWB_bitrate = AMRWB_MODE_24k;
+            }
+            WebRtcAmrWb_EncodeBitmode(AMRWBenc_inst[k], AMRBandwidthEfficient);
+
+        } else {
+            printf("\nError - AMRwb is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ILBC
+    case kDecoderILBC :
+        if (sampfreq==8000) {
+            ok=WebRtcIlbcfix_EncoderCreate(&iLBCenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iLBC encoding instance\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==240)||(enc_frameSize==320)||(enc_frameSize==480)) {				
+            } else {
+                printf("\nError - iLBC only supports 160, 240, 320 and 480 enc_frameSize (20, 30, 40 and 60 ms)\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==320)) {
+                /* 20 ms version */
+                WebRtcIlbcfix_EncoderInit(iLBCenc_inst[k], 20);
+            } else {
+                /* 30 ms version */
+                WebRtcIlbcfix_EncoderInit(iLBCenc_inst[k], 30);
+            }
+        } else {
+            printf("\nError - iLBC is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ISAC
+    case kDecoderISAC:
+        if (sampfreq==16000) {
+            ok=WebRtcIsac_Create(&ISAC_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC instance\n");
+                exit(0);
+            }if ((enc_frameSize==480)||(enc_frameSize==960)) {
+            } else {
+                printf("\nError - iSAC only supports frameSize (30 and 60 ms)\n");
+                exit(0);
+            }
+            WebRtcIsac_EncoderInit(ISAC_inst[k],1);
+            if ((bitrate<10000)||(bitrate>32000)) {
+                printf("\nError - iSAC bitrate has to be between 10000 and 32000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsac_Control(ISAC_inst[k], bitrate, enc_frameSize>>4);
+        } else {
+            printf("\nError - iSAC only supports 480 or 960 enc_frameSize (30 or 60 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+    case kDecoderISAC:
+        if (sampfreq==16000) {
+            ok=WebRtcIsacfix_Create(&ISAC_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC instance\n");
+                exit(0);
+            }if ((enc_frameSize==480)||(enc_frameSize==960)) {
+            } else {
+                printf("\nError - iSAC only supports frameSize (30 and 60 ms)\n");
+                exit(0);
+            }
+            WebRtcIsacfix_EncoderInit(ISAC_inst[k],1);
+            if ((bitrate<10000)||(bitrate>32000)) {
+                printf("\nError - iSAC bitrate has to be between 10000 and 32000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsacfix_Control(ISAC_inst[k], bitrate, enc_frameSize>>4);
+        } else {
+            printf("\nError - iSAC only supports 480 or 960 enc_frameSize (30 or 60 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ISAC_SWB
+    case kDecoderISACswb:
+        if (sampfreq==32000) {
+            ok=WebRtcIsac_Create(&ISACSWB_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC SWB instance\n");
+                exit(0);
+            }if (enc_frameSize==960) {
+            } else {
+                printf("\nError - iSAC SWB only supports frameSize 30 ms\n");
+                exit(0);
+            }
+            ok = WebRtcIsac_SetEncSampRate(ISACSWB_inst[k], kIsacSuperWideband);
+            if (ok!=0) {
+                printf("Error: Couldn't set sample rate for iSAC SWB instance\n");
+                exit(0);
+            }
+            WebRtcIsac_EncoderInit(ISACSWB_inst[k],1);
+            if ((bitrate<32000)||(bitrate>56000)) {
+                printf("\nError - iSAC SWB bitrate has to be between 32000 and 56000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsac_Control(ISACSWB_inst[k], bitrate, enc_frameSize>>5);
+        } else {
+            printf("\nError - iSAC SWB only supports 960 enc_frameSize (30 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_GSMFR
+    case kDecoderGSMFR:
+        if (sampfreq==8000) {
+            ok=WebRtcGSMFR_CreateEnc(&GSMFRenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for GSM FR encoding instance\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {			
+            } else {
+                printf("\nError - GSM FR must have a multiple of 160 enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcGSMFR_EncoderInit(GSMFRenc_inst[k], 0);
+        } else {
+            printf("\nError - GSM FR is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+    default :
+        printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
+        exit(0);
+        break;
+        }
+
+        if (ok != 0) {
+            return(ok);
+        }
+    } // end for
+
+    return(0);
+}			
+
+
+
+
+int NetEQTest_free_coders(enum WebRtcNetEQDecoder coder, int numChannels) {
+
+    for (int k = 0; k < numChannels; k++)
+    {
+        WebRtcVad_Free(VAD_inst[k]);
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+        WebRtcCng_FreeEnc(CNGenc_inst[k]);
+#endif
+
+        switch (coder) 
+        {
+        case kDecoderReservedStart : // dummy codec
+#ifdef CODEC_PCM16B
+        case kDecoderPCM16B :
+#endif
+#ifdef CODEC_PCM16B_WB
+        case kDecoderPCM16Bwb :
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+        case kDecoderPCM16Bswb32kHz :
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+        case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef CODEC_G711
+        case kDecoderPCMu :
+        case kDecoderPCMa :
+#endif
+            // do nothing
+            break;
+#ifdef CODEC_G729
+        case kDecoderG729:
+            WebRtcG729_FreeEnc(G729enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G729_1
+        case kDecoderG729_1:
+            WebRtcG7291_Free(G729_1_inst[k]);
+            break;
+#endif
+#ifdef CODEC_SPEEX_8
+        case kDecoderSPEEX_8 :
+            WebRtcSpeex_FreeEnc(SPEEX8enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_SPEEX_16
+        case kDecoderSPEEX_16 :
+            WebRtcSpeex_FreeEnc(SPEEX16enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_CELT_32
+        case kDecoderCELT_32 :
+            WebRtcCelt_FreeEnc(CELT32enc_inst[k]);
+            break;
+#endif
+
+#ifdef CODEC_G722_1_16
+        case kDecoderG722_1_16 :
+            WebRtcG7221_FreeEnc16(G722_1_16enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1_24
+        case kDecoderG722_1_24 :
+            WebRtcG7221_FreeEnc24(G722_1_24enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1_32
+        case kDecoderG722_1_32 :
+            WebRtcG7221_FreeEnc32(G722_1_32enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_24
+        case kDecoderG722_1C_24 :
+            WebRtcG7221C_FreeEnc24(G722_1C_24enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_32
+        case kDecoderG722_1C_32 :
+            WebRtcG7221C_FreeEnc32(G722_1C_32enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_48
+        case kDecoderG722_1C_48 :
+            WebRtcG7221C_FreeEnc48(G722_1C_48enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722
+        case kDecoderG722 :
+            WebRtcG722_FreeEncoder(g722EncState[k]);
+            break;
+#endif
+#ifdef CODEC_AMR
+        case kDecoderAMR :
+            WebRtcAmr_FreeEnc(AMRenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_AMRWB
+        case kDecoderAMRWB : 
+            WebRtcAmrWb_FreeEnc(AMRWBenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ILBC
+        case kDecoderILBC :
+            WebRtcIlbcfix_EncoderFree(iLBCenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ISAC
+        case kDecoderISAC:
+            WebRtcIsac_Free(ISAC_inst[k]);
+            break;
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+        case kDecoderISAC:
+            WebRtcIsacfix_Free(ISAC_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ISAC_SWB
+        case kDecoderISACswb:
+            WebRtcIsac_Free(ISACSWB_inst[k]);
+            break;
+#endif
+#ifdef CODEC_GSMFR
+        case kDecoderGSMFR:
+            WebRtcGSMFR_FreeEnc(GSMFRenc_inst[k]);
+            break;
+#endif
+        default :
+            printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
+            exit(0);
+            break;
+        }
+    }
+
+	return(0);
+}
+
+
+
+
+
+
+int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , 
+						  int * vad, int useVAD, int bitrate, int numChannels){
+
+	short cdlen = 0;
+	WebRtc_Word16 *tempdata;
+	static int first_cng=1;
+	WebRtc_Word16 tempLen;
+
+	*vad =1;
+
+    // check VAD first
+	if(useVAD&&
+			   (coder!=kDecoderG729)&&(coder!=kDecoderAMR)&&
+			   (coder!=kDecoderSPEEX_8)&&(coder!=kDecoderSPEEX_16))
+    {
+        *vad = 0;
+
+        for (int k = 0; k < numChannels; k++)
+        {
+            tempLen = frameLen;
+            tempdata = &indata[k*frameLen];
+            int localVad=0;
+            /* Partition the signal and test each chunk for VAD.
+            All chunks must be VAD=0 to produce a total VAD=0. */
+            while (tempLen >= 10*sampleRate/1000) {
+                if ((tempLen % 30*sampleRate/1000) == 0) { // tempLen is multiple of 30ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 30*sampleRate/1000);
+                    tempdata += 30*sampleRate/1000;
+                    tempLen -= 30*sampleRate/1000;
+                }
+                else if (tempLen >= 20*sampleRate/1000) { // tempLen >= 20ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 20*sampleRate/1000);
+                    tempdata += 20*sampleRate/1000;
+                    tempLen -= 20*sampleRate/1000;
+                }
+                else { // use 10ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 10*sampleRate/1000);
+                    tempdata += 10*sampleRate/1000;
+                    tempLen -= 10*sampleRate/1000;
+                }
+            }
+
+            // aggregate all VAD decisions over all channels
+            *vad |= localVad;
+        }
+
+        if(!*vad){
+            // all channels are silent
+            cdlen = 0;
+            for (int k = 0; k < numChannels; k++)
+            {
+                WebRtcCng_Encode(CNGenc_inst[k],&indata[k*frameLen], (frameLen <= 640 ? frameLen : 640) /* max 640 */,
+                    encoded,&tempLen,first_cng);
+                encoded += tempLen;
+                cdlen += tempLen;
+            }
+            *vad=0;
+            first_cng=0;
+            return(cdlen);
+        }
+	}
+
+
+    // loop over all channels
+    int totalLen = 0;
+
+    for (int k = 0; k < numChannels; k++)
+    {
+        /* Encode with the selected coder type */
+        if (coder==kDecoderPCMu) { /*g711 u-law */
+#ifdef CODEC_G711
+            cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+#endif
+        }  
+        else if (coder==kDecoderPCMa) { /*g711 A-law */
+#ifdef CODEC_G711
+            cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+        }
+#endif
+#ifdef CODEC_PCM16B
+        else if ((coder==kDecoderPCM16B)||(coder==kDecoderPCM16Bwb)||
+            (coder==kDecoderPCM16Bswb32kHz)||(coder==kDecoderPCM16Bswb48kHz)) { /*pcm16b (8kHz, 16kHz, 32kHz or 48kHz) */
+                cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (WebRtc_Word16*) encoded);
+            }
+#endif
+#ifdef CODEC_G722
+        else if (coder==kDecoderG722) { /*g722 */
+            cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (WebRtc_Word16*)encoded);
+            cdlen=frameLen>>1;
+        }
+#endif
+#ifdef CODEC_G722_1_16
+        else if (coder==kDecoderG722_1_16) { /* g722.1 16kbit/s mode */
+            cdlen=WebRtcG7221_Encode16((G722_1_16_encinst_t*)G722_1_16enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1_24
+        else if (coder==kDecoderG722_1_24) { /* g722.1 24kbit/s mode*/
+            cdlen=WebRtcG7221_Encode24((G722_1_24_encinst_t*)G722_1_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1_32
+        else if (coder==kDecoderG722_1_32) { /* g722.1 32kbit/s mode */
+            cdlen=WebRtcG7221_Encode32((G722_1_32_encinst_t*)G722_1_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_24
+        else if (coder==kDecoderG722_1C_24) { /* g722.1 32 kHz 24kbit/s mode*/
+            cdlen=WebRtcG7221C_Encode24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_32
+        else if (coder==kDecoderG722_1C_32) { /* g722.1 32 kHz 32kbit/s mode */
+            cdlen=WebRtcG7221C_Encode32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_48
+        else if (coder==kDecoderG722_1C_48) { /* g722.1 32 kHz 48kbit/s mode */
+            cdlen=WebRtcG7221C_Encode48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G729
+        else if (coder==kDecoderG729) { /*g729 */
+            WebRtc_Word16 dataPos=0;
+            WebRtc_Word16 len=0;
+            cdlen = 0;
+            for (dataPos=0;dataPos<frameLen;dataPos+=80) {
+                len=WebRtcG729_Encode(G729enc_inst[k], &indata[dataPos], 80, (WebRtc_Word16*)(&encoded[cdlen]));
+                cdlen += len;
+            }
+        }
+#endif
+#ifdef CODEC_G729_1
+        else if (coder==kDecoderG729_1) { /*g729.1 */
+            WebRtc_Word16 dataPos=0;
+            WebRtc_Word16 len=0;
+            cdlen = 0;
+            for (dataPos=0;dataPos<frameLen;dataPos+=160) {
+                len=WebRtcG7291_Encode(G729_1_inst[k], &indata[dataPos], (WebRtc_Word16*)(&encoded[cdlen]), bitrate, frameLen/320 /* num 20ms frames*/);
+                cdlen += len;
+            }
+        }
+#endif
+#ifdef CODEC_AMR
+        else if (coder==kDecoderAMR) { /*AMR */
+            cdlen=WebRtcAmr_Encode(AMRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMR_bitrate);
+        }
+#endif
+#ifdef CODEC_AMRWB
+        else if (coder==kDecoderAMRWB) { /*AMR-wb */
+            cdlen=WebRtcAmrWb_Encode(AMRWBenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMRWB_bitrate);
+        }
+#endif
+#ifdef CODEC_ILBC
+        else if (coder==kDecoderILBC) { /*iLBC */
+            cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(WebRtc_Word16*)encoded);
+        }
+#endif
+#if (defined(CODEC_ISAC) || defined(NETEQ_ISACFIX_CODEC)) // TODO(hlundin): remove all NETEQ_ISACFIX_CODEC
+        else if (coder==kDecoderISAC) { /*iSAC */
+            int noOfCalls=0;
+            cdlen=0;
+            while (cdlen<=0) {
+#ifdef CODEC_ISAC /* floating point */
+                cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+#else /* fixed point */
+                cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+#endif
+                noOfCalls++;
+            }
+        }
+#endif
+#ifdef CODEC_ISAC_SWB
+        else if (coder==kDecoderISACswb) { /* iSAC SWB */
+            int noOfCalls=0;
+            cdlen=0;
+            while (cdlen<=0) {
+                cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(WebRtc_Word16*)encoded);
+                noOfCalls++;
+            }
+        }
+#endif
+#ifdef CODEC_GSMFR
+        else if (coder==kDecoderGSMFR) { /* GSM FR */
+            cdlen=WebRtcGSMFR_Encode(GSMFRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_SPEEX_8
+        else if (coder==kDecoderSPEEX_8) { /* Speex */
+            int encodedLen = 0;
+            int retVal = 1;
+            while (retVal == 1 && encodedLen < frameLen) {
+                retVal = WebRtcSpeex_Encode(SPEEX8enc_inst[k], &indata[encodedLen], 15000);
+                encodedLen += 20*8; /* 20 ms */
+            }
+            if( (retVal == 0 && encodedLen != frameLen) || retVal < 0) {
+                printf("Error encoding speex frame!\n");
+                exit(0);
+            }
+            cdlen=WebRtcSpeex_GetBitstream(SPEEX8enc_inst[k], (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_SPEEX_16
+        else if (coder==kDecoderSPEEX_16) { /* Speex */
+            int encodedLen = 0;
+            int retVal = 1;
+            while (retVal == 1 && encodedLen < frameLen) {
+                retVal = WebRtcSpeex_Encode(SPEEX16enc_inst[k], &indata[encodedLen], 15000);
+                encodedLen += 20*16; /* 20 ms */
+            }
+            if( (retVal == 0 && encodedLen != frameLen) || retVal < 0) {
+                printf("Error encoding speex frame!\n");
+                exit(0);
+            }
+            cdlen=WebRtcSpeex_GetBitstream(SPEEX16enc_inst[k], (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_CELT_32
+        else if (coder==kDecoderCELT_32) { /* Celt */
+            int encodedLen = 0;
+            cdlen = 0;
+            while (cdlen <= 0) {
+                cdlen = WebRtcCelt_Encode(CELT32enc_inst[k], &indata[encodedLen], encoded);
+                encodedLen += 10*32; /* 10 ms */
+            }
+            if( (encodedLen != frameLen) || cdlen < 0) {
+                printf("Error encoding Celt frame!\n");
+                exit(0);
+            }
+        }
+#endif
+
+        indata += frameLen;
+        encoded += cdlen;
+        totalLen += cdlen;
+
+    } // end for
+
+	first_cng=1;
+	return(totalLen);
+}
+
+
+
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc){
+			
+			rtp_data[0]=(unsigned char)0x80;
+			rtp_data[1]=(unsigned char)(payloadType & 0xFF);
+			rtp_data[2]=(unsigned char)((seqNo>>8)&0xFF);
+			rtp_data[3]=(unsigned char)((seqNo)&0xFF);
+			rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
+			rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
+
+			rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF); 
+			rtp_data[7]=(unsigned char)(timestamp & 0xFF);
+
+			rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
+			rtp_data[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+			rtp_data[10]=(unsigned char)((ssrc>>8)&0xFF);
+			rtp_data[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
+                        int seqNo, WebRtc_UWord32 ssrc)
+{
+
+    int i;
+    unsigned char *rtpPointer;
+    WebRtc_UWord16 offset;
+
+    /* first create "standard" RTP header */
+    makeRTPheader(rtp_data, NETEQ_CODEC_RED_PT, seqNo, timestamp[numPayloads-1], ssrc);
+
+    rtpPointer = &rtp_data[12];
+
+    /* add one sub-header for each redundant payload (not the primary) */
+    for(i=0; i<numPayloads-1; i++) {                                            /* |0 1 2 3 4 5 6 7| */
+        if(blockLen[i] > 0) {
+            offset = (WebRtc_UWord16) (timestamp[numPayloads-1] - timestamp[i]);
+
+            rtpPointer[0] = (unsigned char) ( 0x80 | (0x7F & payloadType[i]) ); /* |F|   block PT  | */
+            rtpPointer[1] = (unsigned char) ((offset >> 6) & 0xFF);             /* |  timestamp-   | */
+            rtpPointer[2] = (unsigned char) ( ((offset & 0x3F)<<2) |
+                ( (blockLen[i]>>8) & 0x03 ) );                                  /* | -offset   |bl-| */
+            rtpPointer[3] = (unsigned char) ( blockLen[i] & 0xFF );             /* | -ock length   | */
+
+            rtpPointer += 4;
+        }
+    }
+
+    /* last sub-header */
+    rtpPointer[0]= (unsigned char) (0x00 | (0x7F&payloadType[numPayloads-1]));/* |F|   block PT  | */
+    rtpPointer += 1;
+
+    return(rtpPointer - rtp_data); /* length of header in bytes */
+}
+
+
+
+int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration) {
+	unsigned char E,R,V;
+	R=0;
+	V=(unsigned char)Volume;
+	if (End==0) {
+		E = 0x00;
+	} else {
+		E = 0x80;
+	}
+	payload_data[0]=(unsigned char)Event;
+	payload_data[1]=(unsigned char)(E|R|V);
+	//Duration equals 8 times time_ms, default is 8000 Hz.
+	payload_data[2]=(unsigned char)((Duration>>8)&0xFF);
+	payload_data[3]=(unsigned char)(Duration&0xFF);
+	return(4);
+}
+
+void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples)
+{
+
+    WebRtc_Word16 *tempVec;
+    WebRtc_Word16 *readPtr, *writeL, *writeR;
+
+    if (numSamples <= 0)
+        return;
+
+    tempVec = (WebRtc_Word16 *) malloc(sizeof(WebRtc_Word16) * numSamples);
+    if (tempVec == NULL) {
+        printf("Error allocating memory\n");
+        exit(0);
+    }
+
+    memcpy(tempVec, audioSamples, numSamples*sizeof(WebRtc_Word16));
+
+    writeL = audioSamples;
+    writeR = &audioSamples[numSamples/2];
+    readPtr = tempVec;
+
+    for (int k = 0; k < numSamples; k += 2)
+    {
+        *writeL = *readPtr;
+        readPtr++;
+        *writeR = *readPtr;
+        readPtr++;
+        writeL++;
+        writeR++;
+    }
+
+    free(tempVec);
+
+}
+
+
+void stereoInterleave(unsigned char* data, int dataLen, int stride)
+{
+
+    unsigned char *ptrL, *ptrR;
+    unsigned char temp[10];
+
+    if (stride > 10)
+    {
+        exit(0);
+    }
+
+    if (dataLen%1 != 0)
+    {
+        // must be even number of samples
+        printf("Error: cannot interleave odd sample number\n");
+        exit(0);
+    }
+
+    ptrL = data + stride;
+    ptrR = &data[dataLen/2];
+
+    while (ptrL < ptrR) {
+        // copy from right pointer to temp
+        memcpy(temp, ptrR, stride);
+
+        // shift data between pointers
+        memmove(ptrL + stride, ptrL, ptrR - ptrL);
+
+        // copy from temp to left pointer
+        memcpy(ptrL, temp, stride);
+
+        // advance pointers
+        ptrL += stride*2;
+        ptrR += stride;
+    }
+
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPjitter.cc b/trunk/src/modules/audio_coding/neteq/test/RTPjitter.cc
new file mode 100644
index 0000000..795111f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPjitter.cc
@@ -0,0 +1,196 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+/* header includes */
+#include "typedefs.h"
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef WIN32
+#include <winsock2.h>
+#include <io.h>
+#endif
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#endif
+#include <search.h>
+#include <float.h>
+
+#include "gtest/gtest.h"
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+#define CHECK_ZERO(a) {int errCode = a; if((errCode)!=0){fprintf(stderr,"\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, WebRtcNetEQ_GetErrorCode(inst)); exit(0);}}
+#define CHECK_NOT_NULL(a) if((a)==NULL){fprintf(stderr,"\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+
+struct arr_time {
+	float time;
+	WebRtc_UWord32 ix;
+};
+
+int filelen(FILE *fid)
+{
+  fpos_t cur_pos;
+  int len;
+
+  if (!fid || fgetpos(fid, &cur_pos)) {
+    return(-1);
+  }
+
+  fseek(fid, 0, SEEK_END);
+  len = ftell(fid);
+
+  fsetpos(fid, &cur_pos);
+
+  return (len);
+}
+
+int compare_arr_time(const void *x, const void *y);
+
+int main(int argc, char* argv[])
+{
+	unsigned int	dat_len, rtp_len, Npack, k;
+	arr_time		*time_vec;
+	char			firstline[FIRSTLINELEN];
+	unsigned char	*rtp_vec = NULL, **packet_ptr, *temp_packet;
+	const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+	WebRtc_UWord16			len;
+	WebRtc_UWord32			*offset;
+
+/* check number of parameters */
+	if (argc != 4) {
+		/* print help text and exit */
+		printf("Apply jitter on RTP stream.\n");
+		printf("The program reads an RTP stream and packet timing from two files.\n");
+		printf("The RTP stream is modified to have the same jitter as described in the timing files.\n");
+		printf("The format of the RTP stream file should be the same as for rtpplay,\n");
+		printf("and can be obtained e.g., from Ethereal by using\n");
+		printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
+		printf("Usage:\n\n");
+		printf("%s RTP_infile dat_file RTP_outfile\n", argv[0]);
+		printf("where:\n");
+
+		printf("RTP_infile       : RTP stream input file\n\n");
+
+		printf("dat_file         : file with packet arrival times in ms\n\n");
+
+		printf("RTP_outfile      : RTP stream output file\n\n");
+
+		return(0);
+	}
+
+	FILE* in_file=fopen(argv[1],"rb");
+	CHECK_NOT_NULL(in_file);
+	printf("Input file: %s\n",argv[1]);
+	FILE* dat_file=fopen(argv[2],"rb");
+	CHECK_NOT_NULL(dat_file);
+	printf("Dat-file: %s\n",argv[2]);
+	FILE* out_file=fopen(argv[3],"wb");
+	CHECK_NOT_NULL(out_file);
+	printf("Output file: %s\n\n",argv[3]);
+	
+	time_vec = (arr_time *) malloc(sizeof(arr_time)*(filelen(dat_file)/sizeof(float)) + 1000); // add 1000 bytes to avoid (rare) strange error
+	if (time_vec==NULL) {
+		fprintf(stderr, "Error: could not allocate memory for reading dat file\n");
+		goto closing;
+	}
+
+	dat_len=0;
+	while(fread(&(time_vec[dat_len].time),sizeof(float),1,dat_file)>0) {
+		time_vec[dat_len].ix=dat_len;
+		dat_len++;
+	}
+	
+	qsort(time_vec,dat_len,sizeof(arr_time),compare_arr_time);
+
+
+	rtp_vec = (unsigned char *) malloc(sizeof(unsigned char)*filelen(in_file));
+	if (rtp_vec==NULL) {
+		fprintf(stderr,"Error: could not allocate memory for reading rtp file\n");
+		goto closing;
+	}
+
+	// read file header and write directly to output file
+	EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, in_file) != NULL);
+	EXPECT_GT(fputs(firstline, out_file), 0);
+	EXPECT_EQ(kRtpDumpHeaderSize, fread(firstline, 1, kRtpDumpHeaderSize,
+	                                    in_file));
+	EXPECT_EQ(kRtpDumpHeaderSize, fwrite(firstline, 1, kRtpDumpHeaderSize,
+	                                     out_file));
+
+	// read all RTP packets into vector
+	rtp_len=0;
+	Npack=0;
+	len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
+	while(len==2) {
+		len = ntohs(*((WebRtc_UWord16 *)(rtp_vec + rtp_len)));
+		rtp_len += 2;
+		if(fread(&rtp_vec[rtp_len], sizeof(unsigned char), len-2, in_file)!=(unsigned) (len-2)) {
+			fprintf(stderr,"Error: currupt packet length\n");
+			goto closing;
+		}
+		rtp_len += len-2;
+		Npack++;
+		len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
+	}
+
+	packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*));
+
+	packet_ptr[0]=rtp_vec;
+	k=1;
+	while(k<Npack) {
+		len = ntohs(*((WebRtc_UWord16 *) packet_ptr[k-1]));
+		packet_ptr[k]=packet_ptr[k-1]+len;
+		k++;
+	}
+
+	for(k=0; k<dat_len && k<Npack; k++) {
+		if(time_vec[k].time < FLT_MAX && time_vec[k].ix < Npack){ 
+			temp_packet = packet_ptr[time_vec[k].ix];
+			offset = (WebRtc_UWord32 *) (temp_packet+4);
+			if ( time_vec[k].time >= 0 ) {
+				*offset = htonl((WebRtc_UWord32) time_vec[k].time);
+			}
+			else {
+				*offset = htonl((WebRtc_UWord32) 0);
+				fprintf(stderr, "Warning: negative receive time in dat file transformed to 0.\n");
+			}
+
+			// write packet to file
+			fwrite(temp_packet, sizeof(unsigned char), ntohs(*((WebRtc_UWord16*) temp_packet)), out_file);
+		}
+	}
+
+
+closing:
+	free(time_vec);
+	free(rtp_vec);
+	fclose(in_file);
+	fclose(dat_file);
+	fclose(out_file);
+
+	return(0);
+}
+
+
+
+int compare_arr_time(const void *xp, const void *yp) {
+
+	if(((arr_time *)xp)->time == ((arr_time *)yp)->time)
+		return(0);
+	else if(((arr_time *)xp)->time > ((arr_time *)yp)->time)
+		return(1);
+
+	return(-1);
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/RTPtimeshift.cc b/trunk/src/modules/audio_coding/neteq/test/RTPtimeshift.cc
new file mode 100644
index 0000000..dc7ff9f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/RTPtimeshift.cc
@@ -0,0 +1,97 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <stdio.h>
+#include <vector>
+
+#include "NETEQTEST_RTPpacket.h"
+#include "gtest/gtest.h"
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+
+
+int main(int argc, char* argv[])
+{
+    if(argc < 4 || argc > 6)
+    {
+        printf("Usage: RTPtimeshift in.rtp out.rtp newStartTS [newStartSN [newStartArrTime]]\n");
+        exit(1);
+    }
+
+	FILE *inFile=fopen(argv[1],"rb");
+	if (!inFile)
+    {
+        printf("Cannot open input file %s\n", argv[1]);
+        return(-1);
+    }
+    printf("Input RTP file: %s\n",argv[1]);
+
+	FILE *outFile=fopen(argv[2],"wb");
+	if (!outFile)
+    {
+        printf("Cannot open output file %s\n", argv[2]);
+        return(-1);
+    }
+	printf("Output RTP file: %s\n\n",argv[2]);
+
+    // read file header and write directly to output file
+	const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+	char firstline[FIRSTLINELEN];
+	EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, inFile) != NULL);
+	EXPECT_GT(fputs(firstline, outFile), 0);
+	EXPECT_EQ(kRtpDumpHeaderSize,
+	          fread(firstline, 1, kRtpDumpHeaderSize, inFile));
+	EXPECT_EQ(kRtpDumpHeaderSize,
+	          fwrite(firstline, 1, kRtpDumpHeaderSize, outFile));
+	NETEQTEST_RTPpacket packet;
+	int packLen = packet.readFromFile(inFile);
+	if (packLen < 0)
+	{
+	    exit(1);
+	}
+
+    // get new start TS and start SeqNo from arguments
+	WebRtc_UWord32 TSdiff = atoi(argv[3]) - packet.timeStamp();
+	WebRtc_UWord16 SNdiff = 0;
+	WebRtc_UWord32 ATdiff = 0;
+    if (argc > 4)
+    {
+        if (argv[4] >= 0)
+            SNdiff = atoi(argv[4]) - packet.sequenceNumber();
+        if (argc > 5)
+        {
+            if (argv[5] >= 0)
+                ATdiff = atoi(argv[5]) - packet.time();
+        }
+    }
+
+    while (packLen >= 0)
+    {
+        
+        packet.setTimeStamp(packet.timeStamp() + TSdiff);
+        packet.setSequenceNumber(packet.sequenceNumber() + SNdiff);
+        packet.setTime(packet.time() + ATdiff);
+
+        packet.writeToFile(outFile);
+
+        packLen = packet.readFromFile(inFile);
+
+    }
+
+    fclose(inFile);
+    fclose(outFile);
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m b/trunk/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m
new file mode 100644
index 0000000..77b394f
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m
@@ -0,0 +1,191 @@
+function outStruct = parse_delay_file(file)
+
+fid = fopen(file, 'rb');
+if fid == -1
+    error('Cannot open file %s', file);
+end
+
+textline = fgetl(fid);
+if ~strncmp(textline, '#!NetEQ_Delay_Logging', 21)
+    error('Wrong file format');
+end
+
+ver = sscanf(textline, '#!NetEQ_Delay_Logging%d.%d');
+if ~all(ver == [2; 0])
+    error('Wrong version of delay logging function')
+end
+
+
+start_pos = ftell(fid);
+fseek(fid, -12, 'eof');
+textline = fgetl(fid);
+if ~strncmp(textline, 'End of file', 21)
+    error('File ending is not correct. Seems like the simulation ended abnormally.');
+end
+
+fseek(fid,-12-4, 'eof');
+Npackets = fread(fid, 1, 'int32');
+fseek(fid, start_pos, 'bof');
+
+rtpts = zeros(Npackets, 1);
+seqno = zeros(Npackets, 1);
+pt = zeros(Npackets, 1);
+plen = zeros(Npackets, 1);
+recin_t = nan*ones(Npackets, 1);
+decode_t = nan*ones(Npackets, 1);
+playout_delay = zeros(Npackets, 1);
+optbuf = zeros(Npackets, 1);
+
+fs_ix = 1;
+clock = 0;
+ts_ix = 1;
+ended = 0;
+late_packets = 0;
+fs_now = 8000;
+last_decode_k = 0;
+tot_expand = 0;
+tot_accelerate = 0;
+tot_preemptive = 0;
+
+while not(ended)
+    signal = fread(fid, 1, '*int32');
+    
+    switch signal
+        case 3 % NETEQ_DELAY_LOGGING_SIGNAL_CLOCK
+            clock = fread(fid, 1, '*float32');
+            
+            % keep on reading batches of M until the signal is no longer "3"
+            % read int32 + float32 in one go
+            % this is to save execution time
+            temp = [3; 0];
+            M = 120;
+            while all(temp(1,:) == 3)
+                fp = ftell(fid);
+                temp = fread(fid, [2 M], '*int32');
+            end
+            
+            % back up to last clock event
+            fseek(fid, fp - ftell(fid) + ...
+                (find(temp(1,:) ~= 3, 1 ) - 2) * 2 * 4 + 4, 'cof');
+            % read the last clock value
+            clock = fread(fid, 1, '*float32');
+            
+        case 1 % NETEQ_DELAY_LOGGING_SIGNAL_RECIN
+            temp_ts = fread(fid, 1, 'uint32');
+            
+            if late_packets > 0
+                temp_ix = ts_ix - 1;
+                while (temp_ix >= 1) && (rtpts(temp_ix) ~= temp_ts)
+                    % TODO(hlundin): use matlab vector search instead?
+                    temp_ix = temp_ix - 1;
+                end
+                
+                if temp_ix >= 1
+                    % the ts was found in the vector
+                    late_packets = late_packets - 1;
+                else
+                    temp_ix = ts_ix;
+                    ts_ix = ts_ix + 1;
+                end
+            else
+                temp_ix = ts_ix;
+                ts_ix = ts_ix + 1;
+            end
+            
+            rtpts(temp_ix) = temp_ts;
+            seqno(temp_ix) = fread(fid, 1, 'uint16');
+            pt(temp_ix) = fread(fid, 1, 'int32');
+            plen(temp_ix) = fread(fid, 1, 'int16');
+            recin_t(temp_ix) = clock;
+            
+        case 2 % NETEQ_DELAY_LOGGING_SIGNAL_FLUSH
+            % do nothing
+            
+        case 4 % NETEQ_DELAY_LOGGING_SIGNAL_EOF
+            ended = 1;
+            
+        case 5 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE
+            last_decode_ts = fread(fid, 1, 'uint32');
+            temp_delay = fread(fid, 1, 'uint16');
+            
+            k = find(rtpts(1:(ts_ix - 1))==last_decode_ts,1,'last');
+            if ~isempty(k)
+                decode_t(k) = clock;
+                playout_delay(k) = temp_delay + ...
+                    5 *  fs_now / 8000; % add overlap length
+                last_decode_k = k;
+            end
+            
+        case 6 % NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS
+            fsvec(fs_ix) = fread(fid, 1, 'uint16');
+            fschange_ts(fs_ix) = last_decode_ts;
+            fs_now = fsvec(fs_ix);
+            fs_ix = fs_ix + 1;
+            
+        case 7 % NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO
+            playout_delay(last_decode_k) = playout_delay(last_decode_k) ...
+                + fread(fid, 1, 'int32');
+            
+        case 8 % NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_expand = tot_expand + temp / (fs_now / 1000);
+            end                
+            
+        case 9 % NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_accelerate = tot_accelerate + temp / (fs_now / 1000);
+            end                
+
+        case 10 % NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_preemptive = tot_preemptive + temp / (fs_now / 1000);
+            end                
+            
+        case 11 % NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF
+            optbuf(last_decode_k) = fread(fid, 1, 'int32');
+            
+        case 12 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC
+            last_decode_ts = fread(fid, 1, 'uint32');
+            k = ts_ix - 1;
+            
+            while (k >= 1) && (rtpts(k) ~= last_decode_ts)
+                % TODO(hlundin): use matlab vector search instead?
+                k = k - 1;
+            end
+            
+            if k < 1
+                % packet not received yet
+                k = ts_ix;
+                rtpts(ts_ix) = last_decode_ts;
+                late_packets = late_packets + 1;
+            end
+            
+            decode_t(k) = clock;
+            playout_delay(k) = fread(fid, 1, 'uint16') + ...
+                5 *  fs_now / 8000; % add overlap length
+            last_decode_k = k;
+             
+    end
+    
+end
+
+
+fclose(fid);
+
+outStruct = struct(...
+    'ts', rtpts, ...
+    'sn', seqno, ...
+    'pt', pt,...
+    'plen', plen,...
+    'arrival', recin_t,...
+    'decode', decode_t,...
+    'fs', fsvec(:),...
+    'fschange_ts', fschange_ts(:),...
+    'playout_delay', playout_delay,...
+    'tot_expand', tot_expand,...
+    'tot_accelerate', tot_accelerate,...
+    'tot_preemptive', tot_preemptive,...
+    'optbuf', optbuf);
diff --git a/trunk/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m b/trunk/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m
new file mode 100644
index 0000000..bc1c85a
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m
@@ -0,0 +1,187 @@
+function [delay_struct, delayvalues] = plot_neteq_delay(delayfile, varargin)
+
+% InfoStruct = plot_neteq_delay(delayfile)
+% InfoStruct = plot_neteq_delay(delayfile, 'skipdelay', skip_seconds)
+%
+% Henrik Lundin, 2006-11-17
+% Henrik Lundin, 2011-05-17
+%
+
+try
+    s = parse_delay_file(delayfile);
+catch
+    error(lasterr);
+end
+
+delayskip=0;
+noplot=0;
+arg_ptr=1;
+delaypoints=[];
+
+s.sn=unwrap_seqno(s.sn);
+
+while arg_ptr+1 <= nargin
+    switch lower(varargin{arg_ptr})
+    case {'skipdelay', 'delayskip'}
+        % skip a number of seconds in the beginning when calculating delays
+        delayskip = varargin{arg_ptr+1};
+        arg_ptr = arg_ptr + 2;
+    case 'noplot'
+        noplot=1;
+        arg_ptr = arg_ptr + 1;
+    case {'get_delay', 'getdelay'}
+        % return a vector of delay values for the points in the given vector
+        delaypoints = varargin{arg_ptr+1};
+        arg_ptr = arg_ptr + 2;
+    otherwise
+        warning('Unknown switch %s\n', varargin{arg_ptr});
+        arg_ptr = arg_ptr + 1;
+    end
+end
+
+% find lost frames that were covered by one-descriptor decoding
+one_desc_ix=find(isnan(s.arrival));
+for k=1:length(one_desc_ix)
+    ix=find(s.ts==max(s.ts(s.ts(one_desc_ix(k))>s.ts)));
+    s.sn(one_desc_ix(k))=s.sn(ix)+1;
+    s.pt(one_desc_ix(k))=s.pt(ix);
+    s.arrival(one_desc_ix(k))=s.arrival(ix)+s.decode(one_desc_ix(k))-s.decode(ix);
+end
+
+% remove duplicate received frames that were never decoded (RED codec)
+if length(unique(s.ts(isfinite(s.ts)))) < length(s.ts(isfinite(s.ts)))
+    ix=find(isfinite(s.decode));
+    s.sn=s.sn(ix);
+    s.ts=s.ts(ix);
+    s.arrival=s.arrival(ix);
+    s.playout_delay=s.playout_delay(ix);
+    s.pt=s.pt(ix);
+    s.optbuf=s.optbuf(ix);
+    plen=plen(ix);
+    s.decode=s.decode(ix);
+end
+
+% find non-unique sequence numbers
+[~,un_ix]=unique(s.sn);
+nonun_ix=setdiff(1:length(s.sn),un_ix);
+if ~isempty(nonun_ix)
+    warning('RTP sequence numbers are in error');
+end
+            
+% sort vectors
+[s.sn,sort_ix]=sort(s.sn);
+s.ts=s.ts(sort_ix);
+s.arrival=s.arrival(sort_ix);
+s.decode=s.decode(sort_ix);
+s.playout_delay=s.playout_delay(sort_ix);
+s.pt=s.pt(sort_ix);
+
+send_t=s.ts-s.ts(1);
+if length(s.fs)<1
+    warning('No info about sample rate found in file. Using default 8000.');
+    s.fs(1)=8000;
+    s.fschange_ts(1)=min(s.ts);
+elseif s.fschange_ts(1)>min(s.ts)
+    s.fschange_ts(1)=min(s.ts);
+end
+
+end_ix=length(send_t);
+for k=length(s.fs):-1:1
+    start_ix=find(s.ts==s.fschange_ts(k));
+    send_t(start_ix:end_ix)=send_t(start_ix:end_ix)/s.fs(k)*1000;
+    s.playout_delay(start_ix:end_ix)=s.playout_delay(start_ix:end_ix)/s.fs(k)*1000;
+    s.optbuf(start_ix:end_ix)=s.optbuf(start_ix:end_ix)/s.fs(k)*1000;
+    end_ix=start_ix-1;
+end
+
+tot_time=max(send_t)-min(send_t);
+
+seq_ix=s.sn-min(s.sn)+1;
+send_t=send_t+max(min(s.arrival-send_t),0);
+
+plot_send_t=nan*ones(max(seq_ix),1);
+plot_send_t(seq_ix)=send_t;
+plot_nw_delay=nan*ones(max(seq_ix),1);
+plot_nw_delay(seq_ix)=s.arrival-send_t;
+
+cng_ix=find(s.pt~=13); % find those packets that are not CNG/SID
+    
+if noplot==0
+    h=plot(plot_send_t/1000,plot_nw_delay);
+    set(h,'color',0.75*[1 1 1]);
+    hold on
+    if any(s.optbuf~=0)
+        peak_ix=find(s.optbuf(cng_ix)<0); % peak mode is labeled with negative values
+        no_peak_ix=find(s.optbuf(cng_ix)>0); %setdiff(1:length(cng_ix),peak_ix);
+        h1=plot(send_t(cng_ix(peak_ix))/1000,...
+            s.arrival(cng_ix(peak_ix))+abs(s.optbuf(cng_ix(peak_ix)))-send_t(cng_ix(peak_ix)),...
+            'r.');
+        h2=plot(send_t(cng_ix(no_peak_ix))/1000,...
+            s.arrival(cng_ix(no_peak_ix))+abs(s.optbuf(cng_ix(no_peak_ix)))-send_t(cng_ix(no_peak_ix)),...
+            'g.');
+        set([h1, h2],'markersize',1)
+    end
+    %h=plot(send_t(seq_ix)/1000,s.decode+s.playout_delay-send_t(seq_ix));
+    h=plot(send_t(cng_ix)/1000,s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix));
+    set(h,'linew',1.5);
+    hold off
+    ax1=axis;
+    axis tight
+    ax2=axis;
+    axis([ax2(1:3) ax1(4)])
+end
+
+
+% calculate delays and other parameters
+
+delayskip_ix = find(send_t-send_t(1)>=delayskip*1000, 1 );
+
+use_ix = intersect(cng_ix,... % use those that are not CNG/SID frames...
+    intersect(find(isfinite(s.decode)),... % ... that did arrive ...
+    (delayskip_ix:length(s.decode))')); % ... and are sent after delayskip seconds
+
+mean_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-send_t(use_ix));
+neteq_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-s.arrival(use_ix));
+
+Npack=max(s.sn(delayskip_ix:end))-min(s.sn(delayskip_ix:end))+1;
+nw_lossrate=(Npack-length(s.sn(delayskip_ix:end)))/Npack;
+neteq_lossrate=(length(s.sn(delayskip_ix:end))-length(use_ix))/Npack;
+
+delay_struct=struct('mean_delay',mean_delay,'neteq_delay',neteq_delay,...
+    'nw_lossrate',nw_lossrate,'neteq_lossrate',neteq_lossrate,...
+    'tot_expand',round(s.tot_expand),'tot_accelerate',round(s.tot_accelerate),...
+    'tot_preemptive',round(s.tot_preemptive),'tot_time',tot_time,...
+    'filename',delayfile,'units','ms','fs',unique(s.fs));
+    
+if not(isempty(delaypoints))
+    delayvalues=interp1(send_t(cng_ix),...
+        s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix),...
+        delaypoints,'nearest',NaN);
+else
+    delayvalues=[];
+end
+
+
+
+% SUBFUNCTIONS %
+
+function y=unwrap_seqno(x)
+
+jumps=find(abs((diff(x)-1))>65000);
+
+while ~isempty(jumps)
+    n=jumps(1);
+    if x(n+1)-x(n) < 0
+        % negative jump
+        x(n+1:end)=x(n+1:end)+65536;
+    else
+        % positive jump
+        x(n+1:end)=x(n+1:end)-65536;
+    end
+    
+    jumps=find(abs((diff(x(n+1:end))-1))>65000);
+end
+
+y=x;
+
+return;
diff --git a/trunk/src/modules/audio_coding/neteq/test/ptypes.txt b/trunk/src/modules/audio_coding/neteq/test/ptypes.txt
new file mode 100644
index 0000000..c3d4e25
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/test/ptypes.txt
@@ -0,0 +1,20 @@
+pcmu 0
+pcma 8
+cn 13
+//ipcmwb 97
+//eg711u 100
+//eg711a 101
+ilbc 102
+isac 103
+isacswb 104
+avt 106
+red 117
+cn_wb 98
+cn_swb32 99
+pcm16b 93
+pcm16b_wb 94
+pcm16b_swb32khz 95
+//pcm16b_swb48khz 96
+//mpeg4aac 122
+g722 9
+celt32 114
diff --git a/trunk/src/modules/audio_coding/neteq/unmute_signal.c b/trunk/src/modules/audio_coding/neteq/unmute_signal.c
new file mode 100644
index 0000000..ee9daa8
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/unmute_signal.c
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function "unmutes" a vector on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+
+void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
+                              WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
+                              WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_UWord16 w16_tmp;
+    WebRtc_Word32 w32_tmp;
+
+    w16_tmp = (WebRtc_UWord16) *startMuteFact;
+    w32_tmp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,6) + 32;
+    for (i = 0; i < N; i++)
+    {
+        pw16_outVec[i]
+            = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
+        w32_tmp += unmuteFact;
+        w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
+        w16_tmp = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
+        w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
+    }
+    *startMuteFact = (WebRtc_Word16) w16_tmp;
+}
+
diff --git a/trunk/src/modules/audio_coding/neteq/webrtc_neteq.c b/trunk/src/modules/audio_coding/neteq/webrtc_neteq.c
new file mode 100644
index 0000000..5e99fd8
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/webrtc_neteq.c
@@ -0,0 +1,1654 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of main NetEQ API.
+ */
+
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+#include "mcu_dsp_common.h"
+#include "rtcp.h"
+
+#define RETURN_ON_ERROR( macroExpr, macroInstPtr )  { \
+    if ((macroExpr) != 0) { \
+    if ((macroExpr) == -1) { \
+    (macroInstPtr)->ErrorCode = - (NETEQ_OTHER_ERROR); \
+    } else { \
+    (macroInstPtr)->ErrorCode = -((WebRtc_Word16) (macroExpr)); \
+    } \
+    return(-1); \
+    } }
+
+int WebRtcNetEQ_strncpy(char *strDest, int numberOfElements,
+                        const char *strSource, int count)
+{
+    /* check vector lengths */
+    if (count > numberOfElements)
+    {
+        strDest[0] = '\0';
+        return (-1);
+    }
+    else
+    {
+        strncpy(strDest, strSource, count);
+        return (0);
+    }
+}
+
+/**********************************************************
+ * NETEQ Functions
+ */
+
+/*****************************************
+ * Info functions
+ */
+
+int WebRtcNetEQ_GetVersion(char *version)
+{
+    char versionString[] = "3.3.0\0    ";
+    char endChar[] = " ";
+    int i = 0;
+    while ((versionString[i] != endChar[0]) && (i <= 20))
+    {
+        version[i] = versionString[i]; /* To avoid using strcpy */
+        i++;
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_GetErrorCode(void *inst)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    return (NetEqMainInst->ErrorCode);
+}
+
+int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen)
+{
+    if ((errorName == NULL) || (maxStrLen <= 0))
+    {
+        return (-1);
+    }
+
+    if (errorCode < 0)
+    {
+        errorCode = -errorCode; // absolute value
+    }
+
+    switch (errorCode)
+    {
+        case 1: // could be -1
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "OTHER_ERROR", maxStrLen);
+            break;
+        }
+        case 1001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_INSTRUCTION", maxStrLen);
+            break;
+        }
+        case 1002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_NETWORK_TYPE", maxStrLen);
+            break;
+        }
+        case 1003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_DELAYVALUE", maxStrLen);
+            break;
+        }
+        case 1004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_PLAYOUTMODE", maxStrLen);
+            break;
+        }
+        case 1005:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CORRUPT_INSTANCE", maxStrLen);
+            break;
+        }
+        case 1006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "ILLEGAL_MASTER_SLAVE_SWITCH", maxStrLen);
+            break;
+        }
+        case 1007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "MASTER_SLAVE_ERROR", maxStrLen);
+            break;
+        }
+        case 2001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_BUFSTAT_DECISION", maxStrLen);
+            break;
+        }
+        case 2002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODING", maxStrLen);
+            break;
+        }
+        case 2003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_SAMPLEUNDERRUN", maxStrLen);
+            break;
+        }
+        case 2004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODED_TOO_MUCH",
+                maxStrLen);
+            break;
+        }
+        case 3001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_CNG_ERROR", maxStrLen);
+            break;
+        }
+        case 3002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_UNKNOWNPAYLOAD", maxStrLen);
+            break;
+        }
+        case 3003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_BUFFERINSERT_ERROR", maxStrLen);
+            break;
+        }
+        case 4001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INIT_ERROR", maxStrLen);
+            break;
+        }
+        case 4002:
+        case 4003:
+        case 4004:
+        case 4005:
+        case 4006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INSERT_ERROR1", maxStrLen);
+            break;
+        }
+        case 4007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_G723_HEADER", maxStrLen);
+            break;
+        }
+        case 4008:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NONEXISTING_PACKET", maxStrLen);
+            break;
+        }
+        case 4009:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NOT_INITIALIZED", maxStrLen);
+            break;
+        }
+        case 4010:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "AMBIGUOUS_ILBC_FRAME_SIZE", maxStrLen);
+            break;
+        }
+        case 5001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_FULL", maxStrLen);
+            break;
+        }
+        case 5002:
+        case 5003:
+        case 5004:
+        case 5005:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_NOT_EXIST", maxStrLen);
+            break;
+        }
+        case 5006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNKNOWN_CODEC", maxStrLen);
+            break;
+        }
+        case 5007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_PAYLOAD_TAKEN", maxStrLen);
+            break;
+        }
+        case 5008:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_CODEC", maxStrLen);
+            break;
+        }
+        case 5009:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_FS", maxStrLen);
+            break;
+        }
+        case 6001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_DEC_PARAMETER_ERROR", maxStrLen);
+            break;
+        }
+        case 6002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_INSERT_ERROR", maxStrLen);
+            break;
+        }
+        case 6003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_GEN_UNKNOWN_SAMP_FREQ", maxStrLen);
+            break;
+        }
+        case 6004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_NOT_SUPPORTED", maxStrLen);
+            break;
+        }
+        case 7001:
+        case 7002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RED_SPLIT_ERROR", maxStrLen);
+            break;
+        }
+        case 7003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_TOO_SHORT_PACKET", maxStrLen);
+            break;
+        }
+        case 7004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_CORRUPT_PACKET", maxStrLen);
+            break;
+        }
+        default:
+        {
+            /* check for decoder error ranges */
+            if (errorCode >= 6010 && errorCode <= 6810)
+            {
+                /* iSAC error code */
+                WebRtcNetEQ_strncpy(errorName, maxStrLen, "iSAC ERROR", maxStrLen);
+                break;
+            }
+
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_ERROR", maxStrLen);
+            return (-1);
+        }
+    }
+
+    return (0);
+}
+
+/* Assign functions (create not allowed in order to avoid malloc in lib) */
+int WebRtcNetEQ_AssignSize(int *sizeinbytes)
+{
+    *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(WebRtc_Word16);
+    return (0);
+}
+
+int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) NETEQ_inst_Addr;
+    *inst = NETEQ_inst_Addr;
+    if (*inst == NULL) return (-1);
+    /* Clear memory */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) NetEqMainInst, 0,
+        (sizeof(MainInst_t) / sizeof(WebRtc_Word16)));
+    ok = WebRtcNetEQ_McuReset(&NetEqMainInst->MCUinst);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
+                                         int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
+                                         int *MaxNoOfPackets, int *sizeinbytes)
+{
+    int ok = 0;
+    int multiplier;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *MaxNoOfPackets = 0;
+    *sizeinbytes = 0;
+    ok = WebRtcNetEQ_GetDefaultCodecSettings(codec, noOfCodecs, sizeinbytes, MaxNoOfPackets);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    if (nwType == kUDPNormal)
+    {
+        multiplier = 1;
+    }
+    else if (nwType == kUDPVideoSync)
+    {
+        multiplier = 4;
+    }
+    else if (nwType == kTCPNormal)
+    {
+        multiplier = 4;
+    }
+    else if (nwType == kTCPLargeJitter)
+    {
+        multiplier = 8;
+    }
+    else if (nwType == kTCPXLargeJitter)
+    {
+        multiplier = 20;
+    }
+    else
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_NETWORK_TYPE;
+        return (-1);
+    }
+    *MaxNoOfPackets = (*MaxNoOfPackets) * multiplier;
+    *sizeinbytes = (*sizeinbytes) * multiplier;
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
+                             int sizeinbytes)
+{
+    int ok;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_PacketBufferInit(&NetEqMainInst->MCUinst.PacketBuffer_inst,
+        MaxNoOfPackets, (WebRtc_Word16*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/************************************************
+ * Init functions
+ */
+
+/****************************************************************************
+ * WebRtcNetEQ_Init(...)
+ *
+ * Initialize NetEQ.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- fs            : Initial sample rate in Hz (may change with payload)
+ *
+ * Output:
+ *		- inst	        : Initialized NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs)
+{
+    int ok = 0;
+
+    /* Typecast inst to internal instance format */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+    /* Start out with no PostDecode VAD instance */
+    NetEqMainInst->DSPinst.VADInst.VADState = NULL;
+    /* Also set all VAD function pointers to NULL */
+    NetEqMainInst->DSPinst.VADInst.initFunction = NULL;
+    NetEqMainInst->DSPinst.VADInst.setmodeFunction = NULL;
+    NetEqMainInst->DSPinst.VADInst.VADFunction = NULL;
+#endif /* NETEQ_VAD */
+
+    ok = WebRtcNetEQ_DSPinit(NetEqMainInst); /* Init addresses between MCU and DSP */
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    ok = WebRtcNetEQ_DSPInit(&NetEqMainInst->DSPinst, fs); /* Init dsp side */
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    /* set BGN mode to default, since it is not cleared by DSP init function */
+    NetEqMainInst->DSPinst.BGNInst.bgnMode = BGN_ON;
+
+    /* init statistics functions and counters */
+    ok = WebRtcNetEQ_ClearInCallStats(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    ok = WebRtcNetEQ_ClearPostCallStats(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    ok = WebRtcNetEQ_ResetMcuJitterStat(&NetEqMainInst->MCUinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* flush packet buffer */
+    ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* set some variables to initial values */
+    NetEqMainInst->MCUinst.current_Codec = -1;
+    NetEqMainInst->MCUinst.current_Payload = -1;
+    NetEqMainInst->MCUinst.first_packet = 1;
+    NetEqMainInst->MCUinst.one_desc = 0;
+    NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = 0;
+    NetEqMainInst->MCUinst.NoOfExpandCalls = 0;
+    NetEqMainInst->MCUinst.fs = fs;
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* init DTMF decoder */
+    ok = WebRtcNetEQ_DtmfDecoderInit(&(NetEqMainInst->MCUinst.DTMF_inst),fs,560);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+#endif
+
+    /* init RTCP statistics */
+    WebRtcNetEQ_RTCPInit(&(NetEqMainInst->MCUinst.RTCP_inst), 0);
+
+    /* set BufferStat struct to zero */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
+        sizeof(BufstatsInst_t) / sizeof(WebRtc_Word16));
+
+    /* reset automode */
+    WebRtcNetEQ_ResetAutomode(&(NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst),
+        NetEqMainInst->MCUinst.PacketBuffer_inst.maxInsertPositions);
+
+    NetEqMainInst->ErrorCode = 0;
+
+#ifdef NETEQ_STEREO
+    /* set master/slave info to undecided */
+    NetEqMainInst->masterSlave = 0;
+#endif
+
+    return (ok);
+}
+
+int WebRtcNetEQ_FlushBuffers(void *inst)
+{
+    int ok = 0;
+
+    /* Typecast inst to internal instance format */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    /* Flush packet buffer */
+    ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* Set MCU to wait for new codec */
+    NetEqMainInst->MCUinst.first_packet = 1;
+
+    /* Flush speech buffer */
+    ok = WebRtcNetEQ_FlushSpeechBuffer(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    return 0;
+}
+
+int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+#ifdef NETEQ_ATEVENT_DECODE
+    NetEqMainInst->MCUinst.AVT_PlayoutOn = PlayoutAVTon;
+    return(0);
+#else
+    if (PlayoutAVTon != 0)
+    {
+        NetEqMainInst->ErrorCode = -DTMF_NOT_SUPPORTED;
+        return (-1);
+    }
+    else
+    {
+        return (0);
+    }
+#endif
+}
+
+int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    if ((DelayInMs < 0) || (DelayInMs > 1000))
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
+        return (-1);
+    }
+    NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = DelayInMs;
+    return (0);
+}
+
+int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    if ((playoutMode != kPlayoutOn) && (playoutMode != kPlayoutOff) && (playoutMode
+        != kPlayoutFax) && (playoutMode != kPlayoutStreaming))
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_PLAYOUTMODE;
+        return (-1);
+    }
+    else
+    {
+        NetEqMainInst->MCUinst.NetEqPlayoutMode = playoutMode;
+        return (0);
+    }
+}
+
+int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode)
+{
+
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    NetEqMainInst->DSPinst.BGNInst.bgnMode = (enum BGNMode) bgnMode;
+
+    return (0);
+}
+
+int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode)
+{
+
+    const MainInst_t *NetEqMainInst = (const MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    *bgnMode = (enum WebRtcNetEQBGNMode) NetEqMainInst->DSPinst.BGNInst.bgnMode;
+
+    return (0);
+}
+
+/************************************************
+ * CodecDB functions
+ */
+
+int WebRtcNetEQ_CodecDbReset(void *inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_DbReset(&NetEqMainInst->MCUinst.codec_DB_inst);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    /* set function pointers to NULL to prevent RecOut from using the codec */
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
+
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
+                                   WebRtc_Word16 *MaxEntries)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *MaxEntries = NUM_CODECS;
+    *UsedEntries = NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs;
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+                                    enum WebRtcNetEQDecoder *codec)
+{
+    int i;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *codec = (enum WebRtcNetEQDecoder) 0;
+    if ((Entry >= 0) && (Entry < NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs))
+    {
+        for (i = 0; i < NUM_TOTAL_CODECS; i++)
+        {
+            if (NetEqMainInst->MCUinst.codec_DB_inst.position[i] == Entry)
+            {
+                *codec = (enum WebRtcNetEQDecoder) i;
+            }
+        }
+    }
+    else
+    {
+        NetEqMainInst->ErrorCode = -(CODEC_DB_NOT_EXIST1);
+        return (-1);
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_DbAdd(&NetEqMainInst->MCUinst.codec_DB_inst, codecInst->codec,
+        codecInst->payloadType, codecInst->funcDecode, codecInst->funcDecodeRCU,
+        codecInst->funcDecodePLC, codecInst->funcDecodeInit, codecInst->funcAddLatePkt,
+        codecInst->funcGetMDinfo, codecInst->funcGetPitch, codecInst->funcUpdBWEst,
+        codecInst->funcGetErrorCode, codecInst->codec_state, codecInst->codec_fs);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* check if currently used codec is being removed */
+    if (NetEqMainInst->MCUinst.current_Codec == (WebRtc_Word16) codec)
+    {
+        /* set function pointers to NULL to prevent RecOut from using the codec */
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
+    }
+
+    ok = WebRtcNetEQ_DbRemove(&NetEqMainInst->MCUinst.codec_DB_inst, codec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/*********************************
+ * Real-time functions
+ */
+
+int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
+                      WebRtc_UWord32 uw32_timeRec)
+{
+    int ok = 0;
+    RTPPacket_t RTPpacket;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    /* Parse RTP header */
+    ok = WebRtcNetEQ_RTPPayloadInfo(p_w16datagramstart, w16_RTPlen, &RTPpacket);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInRTPStruct(...)
+ *
+ * Alternative RecIn function, used when the RTP data has already been
+ * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
+ *
+ * Input:
+ *		- inst	            : NetEQ instance
+ *		- rtpInfo		    : Pointer to RTP info
+ *		- payloadPtr        : Pointer to the RTP payload (first byte after header)
+ *      - payloadLenBytes   : Length (in bytes) of the payload in payloadPtr
+ *      - timeRec           : Receive time (in timestamps of the used codec)
+ *
+ * Return value			    :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
+                               const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
+                               WebRtc_UWord32 uw32_timeRec)
+{
+    int ok = 0;
+    RTPPacket_t RTPpacket;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    /* Load NetEQ's RTP struct from Module RTP struct */
+    RTPpacket.payloadType = rtpInfo->payloadType;
+    RTPpacket.seqNumber = rtpInfo->sequenceNumber;
+    RTPpacket.timeStamp = rtpInfo->timeStamp;
+    RTPpacket.ssrc = rtpInfo->SSRC;
+    RTPpacket.payload = (const WebRtc_Word16*) payloadPtr;
+    RTPpacket.payloadLen = payloadLenBytes;
+    RTPpacket.starts_byte1 = 0;
+
+    ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo msInfo;
+    msInfo.msMode = NETEQ_MONO;
+#endif
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+#ifdef NETEQ_STEREO
+    NetEqMainInst->DSPinst.msInfo = &msInfo;
+#endif
+
+    ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 0 /* not BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutMasterSlave(...)
+ *
+ * RecOut function for running several NetEQ instances in master/slave mode.
+ * One master can be used to control several slaves.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *      - isMaster      : Non-zero indicates that this is the master channel
+ *      - msInfo        : (slave only) Information from master
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *      - msInfo        : (master only) Information to slave(s)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
+                                  WebRtc_Word16 *pw16_len, void *msInfo,
+                                  WebRtc_Word16 isMaster)
+{
+#ifndef NETEQ_STEREO
+    /* Stereo not supported */
+    return(-1);
+#else
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    if (msInfo == NULL)
+    {
+        /* msInfo not provided */
+        NetEqMainInst->ErrorCode = NETEQ_OTHER_ERROR;
+        return (-1);
+    }
+
+    /* translate from external to internal Master/Slave information */
+    NetEqMainInst->DSPinst.msInfo = (MasterSlaveInfo *) msInfo;
+
+    /* check that we have not done a master/slave switch without first re-initializing */
+    if ((NetEqMainInst->masterSlave == 1 && !isMaster) || /* switch from master to slave */
+    (NetEqMainInst->masterSlave == 2 && isMaster)) /* switch from slave to master */
+    {
+        NetEqMainInst->ErrorCode = ILLEGAL_MASTER_SLAVE_SWITCH;
+        return (-1);
+    }
+
+    if (!isMaster)
+    {
+        /* this is the slave */
+        NetEqMainInst->masterSlave = 2;
+        NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_SLAVE;
+    }
+    else
+    {
+        NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_MASTER;
+    }
+
+    ok  = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 0 /* not BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    if (isMaster)
+    {
+        /* this is the master */
+        NetEqMainInst->masterSlave = 1;
+    }
+
+    return (ok);
+#endif
+}
+
+int WebRtcNetEQ_GetMasterSlaveInfoSize()
+{
+#ifdef NETEQ_STEREO
+    return (sizeof(MasterSlaveInfo));
+#else
+    return(-1);
+#endif
+}
+
+/* Special RecOut that does not do any decoding. */
+int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo msInfo;
+#endif
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+#ifdef NETEQ_STEREO
+    /* keep same mode as before */
+    switch (NetEqMainInst->masterSlave)
+    {
+        case 1:
+        {
+            msInfo.msMode = NETEQ_MASTER;
+            break;
+        }
+        case 2:
+        {
+            msInfo.msMode = NETEQ_SLAVE;
+            break;
+        }
+        default:
+        {
+            msInfo.msMode = NETEQ_MONO;
+            break;
+        }
+    }
+
+    NetEqMainInst->DSPinst.msInfo = &msInfo;
+#endif
+
+    ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 1 /* BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
+        &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
+        &RTCP_inst->jitter, 0);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
+        &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
+        &RTCP_inst->jitter, 1);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    if (NetEqMainInst->MCUinst.TSscalingInitialized)
+    {
+        *timestamp = WebRtcNetEQ_ScaleTimestampInternalToExternal(&NetEqMainInst->MCUinst,
+            NetEqMainInst->DSPinst.videoSyncTimestamp);
+    }
+    else
+    {
+        *timestamp = NetEqMainInst->DSPinst.videoSyncTimestamp;
+    }
+
+    return (0);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_GetSpeechOutputType(...)
+ *
+ * Get the output type for the audio provided by the latest call to
+ * WebRtcNetEQ_RecOut().
+ *
+ * kOutputNormal = normal audio (possibly processed)
+ * kOutputPLC = loss concealment through stretching audio
+ * kOutputCNG = comfort noise (codec-internal or RFC3389)
+ * kOutputPLCtoCNG = background noise only due to long expand or error
+ * kOutputVADPassive = PostDecode VAD signalling passive speaker
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- outputType    : Output type from enum list WebRtcNetEQOutputType
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType)
+{
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    if ((NetEqMainInst->DSPinst.w16_mode & MODE_BGN_ONLY) != 0)
+    {
+        /* If last mode was background noise only */
+        *outputType = kOutputPLCtoCNG;
+
+    }
+    else if ((NetEqMainInst->DSPinst.w16_mode == MODE_CODEC_INTERNAL_CNG)
+        || (NetEqMainInst->DSPinst.w16_mode == MODE_RFC3389CNG))
+    {
+        /* If CN or internal CNG */
+        *outputType = kOutputCNG;
+
+#ifdef NETEQ_VAD
+    }
+    else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 )
+    {
+        /* post-decode VAD says passive speaker */
+        *outputType = kOutputVADPassive;
+#endif /* NETEQ_VAD */
+
+    }
+    else if ((NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
+        && (NetEqMainInst->DSPinst.ExpandInst.w16_expandMuteFactor == 0))
+    {
+        /* Expand mode has faded down to background noise only (very long expand) */
+        *outputType = kOutputPLCtoCNG;
+
+    }
+    else if (NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
+    {
+        /* PLC mode */
+        *outputType = kOutputPLC;
+
+    }
+    else
+    {
+        /* Normal speech output type (can still be manipulated, e.g., accelerated) */
+        *outputType = kOutputNormal;
+    }
+
+    return (0);
+}
+
+/**********************************
+ * Functions related to VQmon 
+ */
+
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_LOST       0x01
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_DISCARDED  0x02
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_SUPRESS    0x04
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_CNGACTIVE  0x80
+
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
+                                      WebRtc_UWord16 *concealedVoiceDurationMs,
+                                      WebRtc_UWord8 *concealedVoiceFlags)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 ms_lost;
+    if (NetEqMainInst == NULL) return (-1);
+    fs_mult = WebRtcSpl_DivW32W16ResW16(NetEqMainInst->MCUinst.fs, 8000);
+
+    ms_lost = WebRtcSpl_DivW32W16ResW16(
+        (WebRtc_Word32) NetEqMainInst->DSPinst.w16_concealedTS, (WebRtc_Word16) (8 * fs_mult));
+    if (ms_lost > NetEqMainInst->DSPinst.millisecondsPerCall) ms_lost
+        = NetEqMainInst->DSPinst.millisecondsPerCall;
+
+    *validVoiceDurationMs = NetEqMainInst->DSPinst.millisecondsPerCall - ms_lost;
+    *concealedVoiceDurationMs = ms_lost;
+    if (ms_lost > 0)
+    {
+        *concealedVoiceFlags = WEBRTC_NETEQ_CONCEALMENTFLAG_LOST;
+    }
+    else
+    {
+        *concealedVoiceFlags = 0;
+    }
+    NetEqMainInst->DSPinst.w16_concealedTS -= ms_lost * (8 * fs_mult);
+
+    return (0);
+}
+
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
+                                      WebRtc_UWord8 *adaptationRate)
+{
+    /* Dummy check the inst, just to avoid compiler warnings. */
+    if (inst == NULL)
+    {
+        /* Do nothing. */
+    }
+
+    /* Hardcoded variables that are used for VQmon as jitter buffer parameters */
+    *absMaxDelayMs = 240;
+    *adaptationRate = 1;
+    return (0);
+}
+
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
+                                     WebRtc_UWord16 *maxDelayMs)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *avgDelayMs = (WebRtc_UWord16) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
+    *maxDelayMs = (WebRtc_UWord16) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
+    return (0);
+}
+
+/*************************************
+ * Statistics functions
+ */
+
+/* Get the "in-call" statistics from NetEQ.
+ * The statistics are reset after the query. */
+int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats)
+
+{
+
+    WebRtc_UWord16 tempU16;
+    WebRtc_UWord32 tempU32, tempU32_2;
+    int numShift;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    /*******************/
+    /* Get buffer size */
+    /*******************/
+
+    if (NetEqMainInst->MCUinst.fs != 0)
+    {
+        WebRtc_Word32 temp32;
+        /* Query packet buffer for number of samples. */
+        temp32 = WebRtcNetEQ_PacketBufferGetSize(
+            &NetEqMainInst->MCUinst.PacketBuffer_inst);
+
+        /* Divide by sample rate.
+         * Calculate temp32 * 1000 / fs to get result in ms. */
+        stats->currentBufferSize = (WebRtc_UWord16)
+            WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
+
+        /* Add number of samples yet to play in sync buffer. */
+        temp32 = (WebRtc_Word32) (NetEqMainInst->DSPinst.endPosition -
+            NetEqMainInst->DSPinst.curPosition);
+        stats->currentBufferSize += (WebRtc_UWord16)
+            WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
+    }
+    else
+    {
+        /* Sample rate not initialized. */
+        stats->currentBufferSize = 0;
+    }
+
+    /***************************/
+    /* Get optimal buffer size */
+    /***************************/
+
+    if (NetEqMainInst->MCUinst.fs != 0 && NetEqMainInst->MCUinst.fs <= WEBRTC_SPL_WORD16_MAX)
+    {
+        /* preferredBufferSize = Bopt * packSizeSamples / (fs/1000) */
+        stats->preferredBufferSize
+            = (WebRtc_UWord16) WEBRTC_SPL_MUL_16_16(
+                (WebRtc_Word16) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
+                WebRtcSpl_DivW32W16ResW16(
+                    (WebRtc_Word32) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
+                    WebRtcSpl_DivW32W16ResW16( (WebRtc_Word32) NetEqMainInst->MCUinst.fs, (WebRtc_Word16) 1000 ) /* samples per ms */
+                ) );
+
+        /* add extra delay */
+        if (NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs > 0)
+        {
+            stats->preferredBufferSize
+                += NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs;
+        }
+    }
+    else
+    {
+        /* sample rate not initialized */
+        stats->preferredBufferSize = 0;
+    }
+
+    /***********************************/
+    /* Check if jitter peaks are found */
+    /***********************************/
+
+    stats->jitterPeaksFound =
+        NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.peakFound;
+
+    /***********************/
+    /* Calculate loss rate */
+    /***********************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->MCUinst.lostTS == 0)
+    {
+        /* no losses */
+        stats->currentPacketLossRate = 0;
+    }
+    else if (NetEqMainInst->MCUinst.lostTS < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->MCUinst.lostTS); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentPacketLossRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( (WebRtc_UWord32) NetEqMainInst->MCUinst.lostTS, numShift);
+
+            stats->currentPacketLossRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentPacketLossRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /**************************/
+    /* Calculate discard rate */
+    /**************************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    /* number of discarded samples */
+    tempU32_2
+        = WEBRTC_SPL_MUL_16_U16( (WebRtc_Word16) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
+            NetEqMainInst->MCUinst.PacketBuffer_inst.discardedPackets);
+
+    if (tempU32_2 == 0)
+    {
+        /* no discarded samples */
+        stats->currentDiscardRate = 0;
+    }
+    else if (tempU32_2 < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(tempU32_2); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentDiscardRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32 = WEBRTC_SPL_SHIFT_W32( tempU32_2, numShift);
+
+            stats->currentDiscardRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentDiscardRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /*************************************************************/
+    /* Calculate Accelerate, Expand and Pre-emptive Expand rates */
+    /*************************************************************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.accelerateLength == 0)
+    {
+        /* no accelerate */
+        stats->currentAccelerateRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.accelerateLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.accelerateLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentAccelerateRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.accelerateLength, numShift);
+
+            stats->currentAccelerateRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentAccelerateRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.expandLength == 0)
+    {
+        /* no expand */
+        stats->currentExpandRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.expandLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.expandLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentExpandRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.expandLength, numShift);
+
+            stats->currentExpandRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentExpandRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.preemptiveLength == 0)
+    {
+        /* no pre-emptive expand */
+        stats->currentPreemptiveRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.preemptiveLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.preemptiveLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentPreemptiveRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.preemptiveLength, numShift);
+
+            stats->currentPreemptiveRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentPreemptiveRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    stats->clockDriftPPM = WebRtcNetEQ_AverageIAT(
+        &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst);
+
+    /* reset counters */
+    WebRtcNetEQ_ResetMcuInCallStats(&(NetEqMainInst->MCUinst));
+    WebRtcNetEQ_ClearInCallStats(&(NetEqMainInst->DSPinst));
+
+    return (0);
+}
+
+int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
+                                        int max_length,
+                                        int* waiting_times_ms) {
+  int i = 0;
+  MainInst_t *main_inst = (MainInst_t*) inst;
+  if (main_inst == NULL) return -1;
+
+  while ((i < max_length) && (i < main_inst->MCUinst.len_waiting_times)) {
+    waiting_times_ms[i] = main_inst->MCUinst.waiting_times[i] *
+        main_inst->DSPinst.millisecondsPerCall;
+    ++i;
+  }
+  assert(i <= kLenWaitingTimes);
+  WebRtcNetEQ_ResetWaitingTimeStats(&main_inst->MCUinst);
+  return i;
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADInstance(...)
+ *
+ * Provide a pointer to an allocated VAD instance. If function is never 
+ * called or it is called with NULL pointer as VAD_inst, the post-decode
+ * VAD functionality is disabled. Also provide pointers to init, setmode
+ * and VAD functions. These are typically pointers to WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
+ * interface file webrtc_vad.h.
+ *
+ * Input:
+ *      - NetEQ_inst        : NetEQ instance
+ *		- VADinst		    : VAD instance
+ *		- initFunction	    : Pointer to VAD init function
+ *		- setmodeFunction   : Pointer to VAD setmode function
+ *		- VADfunction	    : Pointer to VAD function
+ *
+ * Output:
+ *		- NetEQ_inst	    : Updated NetEQ instance
+ *
+ * Return value			    :  0 - Ok
+ *						      -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
+                               WebRtcNetEQ_VADInitFunction initFunction,
+                               WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
+                               WebRtcNetEQ_VADFunction VADFunction)
+{
+
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) NetEQ_inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+
+    /* Store pointer in PostDecode VAD struct */
+    NetEqMainInst->DSPinst.VADInst.VADState = VAD_inst;
+
+    /* Store function pointers */
+    NetEqMainInst->DSPinst.VADInst.initFunction = initFunction;
+    NetEqMainInst->DSPinst.VADInst.setmodeFunction = setmodeFunction;
+    NetEqMainInst->DSPinst.VADInst.VADFunction = VADFunction;
+
+    /* Call init function and return the result (ok or fail) */
+    return(WebRtcNetEQ_InitVAD(&NetEqMainInst->DSPinst.VADInst, NetEqMainInst->DSPinst.fs));
+
+#else /* NETEQ_VAD not defined */
+    return (-1);
+#endif /* NETEQ_VAD */
+
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADMode(...)
+ *
+ * Pass an aggressiveness mode parameter to the post-decode VAD instance.
+ * If this function is never called, mode 0 (quality mode) is used as default.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- mode  		: mode parameter (same range as WebRtc VAD mode)
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADMode(void *inst, int mode)
+{
+
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+
+    /* Set mode and return result */
+    return(WebRtcNetEQ_SetVADModeInternal(&NetEqMainInst->DSPinst.VADInst, mode));
+
+#else /* NETEQ_VAD not defined */
+    return (-1);
+#endif /* NETEQ_VAD */
+
+}
diff --git a/trunk/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc b/trunk/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
new file mode 100644
index 0000000..fbb9cc2
--- /dev/null
+++ b/trunk/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
@@ -0,0 +1,605 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for NetEQ.
+ */
+
+#include <stdlib.h>
+#include <string.h>  // memset
+
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "modules/audio_coding/neteq/interface/webrtc_neteq.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "testsupport/fileutils.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class RefFiles {
+ public:
+  RefFiles(const std::string& input_file, const std::string& output_file);
+  ~RefFiles();
+  template<class T> void ProcessReference(const T& test_results);
+  template<typename T, size_t n> void ProcessReference(
+      const T (&test_results)[n],
+      size_t length);
+  template<typename T, size_t n> void WriteToFile(
+      const T (&test_results)[n],
+      size_t length);
+  template<typename T, size_t n> void ReadFromFileAndCompare(
+      const T (&test_results)[n],
+      size_t length);
+  void WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats);
+  void ReadFromFileAndCompare(const WebRtcNetEQ_NetworkStatistics& stats);
+  void WriteToFile(const WebRtcNetEQ_RTCPStat& stats);
+  void ReadFromFileAndCompare(const WebRtcNetEQ_RTCPStat& stats);
+
+  FILE* input_fp_;
+  FILE* output_fp_;
+};
+
+RefFiles::RefFiles(const std::string &input_file,
+                   const std::string &output_file)
+    : input_fp_(NULL),
+      output_fp_(NULL) {
+  if (!input_file.empty()) {
+    input_fp_ = fopen(input_file.c_str(), "rb");
+    EXPECT_TRUE(input_fp_ != NULL);
+  }
+  if (!output_file.empty()) {
+    output_fp_ = fopen(output_file.c_str(), "wb");
+    EXPECT_TRUE(output_fp_ != NULL);
+  }
+}
+
+RefFiles::~RefFiles() {
+  if (input_fp_) {
+    EXPECT_EQ(EOF, fgetc(input_fp_));  // Make sure that we reached the end.
+    fclose(input_fp_);
+  }
+  if (output_fp_) fclose(output_fp_);
+}
+
+template<class T>
+void RefFiles::ProcessReference(const T& test_results) {
+  WriteToFile(test_results);
+  ReadFromFileAndCompare(test_results);
+}
+
+template<typename T, size_t n>
+void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
+  WriteToFile(test_results, length);
+  ReadFromFileAndCompare(test_results, length);
+}
+
+template<typename T, size_t n>
+void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
+  if (output_fp_) {
+    ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
+  }
+}
+
+template<typename T, size_t n>
+void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
+                                      size_t length) {
+  if (input_fp_) {
+    // Read from ref file.
+    T* ref = new T[length];
+    ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
+    // Compare
+    EXPECT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
+    delete [] ref;
+  }
+}
+
+void RefFiles::WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats) {
+  if (output_fp_) {
+    ASSERT_EQ(1u, fwrite(&stats, sizeof(WebRtcNetEQ_NetworkStatistics), 1,
+                         output_fp_));
+  }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+    const WebRtcNetEQ_NetworkStatistics& stats) {
+  if (input_fp_) {
+    // Read from ref file.
+    size_t stat_size = sizeof(WebRtcNetEQ_NetworkStatistics);
+    WebRtcNetEQ_NetworkStatistics ref_stats;
+    ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
+    // Compare
+    EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
+  }
+}
+
+void RefFiles::WriteToFile(const WebRtcNetEQ_RTCPStat& stats) {
+  if (output_fp_) {
+    ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.cum_lost), sizeof(stats.cum_lost), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.ext_max), sizeof(stats.ext_max), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
+                         output_fp_));
+  }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+    const WebRtcNetEQ_RTCPStat& stats) {
+  if (input_fp_) {
+    // Read from ref file.
+    WebRtcNetEQ_RTCPStat ref_stats;
+    ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
+                        sizeof(ref_stats.fraction_lost), 1, input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.cum_lost), sizeof(ref_stats.cum_lost), 1,
+                        input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.ext_max), sizeof(ref_stats.ext_max), 1,
+                        input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
+                        input_fp_));
+    // Compare
+    EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
+    EXPECT_EQ(ref_stats.cum_lost, stats.cum_lost);
+    EXPECT_EQ(ref_stats.ext_max, stats.ext_max);
+    EXPECT_EQ(ref_stats.jitter, stats.jitter);
+  }
+}
+
+class NetEqDecodingTest : public ::testing::Test {
+ protected:
+  // NetEQ must be polled for data once every 10 ms. Thus, neither of the
+  // constants below can be changed.
+  static const int kTimeStepMs = 10;
+  static const int kBlockSize8kHz = kTimeStepMs * 8;
+  static const int kBlockSize16kHz = kTimeStepMs * 16;
+  static const int kBlockSize32kHz = kTimeStepMs * 32;
+  static const int kMaxBlockSize = kBlockSize32kHz;
+
+  NetEqDecodingTest();
+  virtual void SetUp();
+  virtual void TearDown();
+  void SelectDecoders(WebRtcNetEQDecoder* used_codec);
+  void LoadDecoders();
+  void OpenInputFile(const std::string &rtp_file);
+  void Process(NETEQTEST_RTPpacket* rtp_ptr, int16_t* out_len);
+  void DecodeAndCompare(const std::string &rtp_file,
+                        const std::string &ref_file);
+  void DecodeAndCheckStats(const std::string &rtp_file,
+                           const std::string &stat_ref_file,
+                           const std::string &rtcp_ref_file);
+  static void PopulateRtpInfo(int frame_index,
+                              int timestamp,
+                              WebRtcNetEQ_RTPInfo* rtp_info);
+  static void PopulateCng(int frame_index,
+                          int timestamp,
+                          WebRtcNetEQ_RTPInfo* rtp_info,
+                          uint8_t* payload,
+                          int* payload_len);
+
+  NETEQTEST_NetEQClass* neteq_inst_;
+  std::vector<NETEQTEST_Decoder*> dec_;
+  FILE* rtp_fp_;
+  unsigned int sim_clock_;
+  int16_t out_data_[kMaxBlockSize];
+};
+
+NetEqDecodingTest::NetEqDecodingTest()
+    : neteq_inst_(NULL),
+      rtp_fp_(NULL),
+      sim_clock_(0) {
+  memset(out_data_, 0, sizeof(out_data_));
+}
+
+void NetEqDecodingTest::SetUp() {
+  WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
+
+  SelectDecoders(usedCodec);
+  neteq_inst_ = new NETEQTEST_NetEQClass(usedCodec, dec_.size(), 8000,
+                                         kTCPLargeJitter);
+  ASSERT_TRUE(neteq_inst_);
+  LoadDecoders();
+}
+
+void NetEqDecodingTest::TearDown() {
+  if (neteq_inst_)
+    delete neteq_inst_;
+  for (size_t i = 0; i < dec_.size(); ++i) {
+    if (dec_[i])
+      delete dec_[i];
+  }
+  if (rtp_fp_)
+    fclose(rtp_fp_);
+}
+
+void NetEqDecodingTest::SelectDecoders(WebRtcNetEQDecoder* used_codec) {
+  *used_codec++ = kDecoderPCMu;
+  dec_.push_back(new decoder_PCMU(0));
+  *used_codec++ = kDecoderPCMa;
+  dec_.push_back(new decoder_PCMA(8));
+  *used_codec++ = kDecoderILBC;
+  dec_.push_back(new decoder_ILBC(102));
+  *used_codec++ = kDecoderISAC;
+  dec_.push_back(new decoder_iSAC(103));
+  *used_codec++ = kDecoderISACswb;
+  dec_.push_back(new decoder_iSACSWB(104));
+  *used_codec++ = kDecoderPCM16B;
+  dec_.push_back(new decoder_PCM16B_NB(93));
+  *used_codec++ = kDecoderPCM16Bwb;
+  dec_.push_back(new decoder_PCM16B_WB(94));
+  *used_codec++ = kDecoderPCM16Bswb32kHz;
+  dec_.push_back(new decoder_PCM16B_SWB32(95));
+  *used_codec++ = kDecoderCNG;
+  dec_.push_back(new decoder_CNG(13, 8000));
+  *used_codec++ = kDecoderCNG;
+  dec_.push_back(new decoder_CNG(98, 16000));
+}
+
+void NetEqDecodingTest::LoadDecoders() {
+  for (size_t i = 0; i < dec_.size(); ++i) {
+    ASSERT_EQ(0, dec_[i]->loadToNetEQ(*neteq_inst_));
+  }
+}
+
+void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
+  rtp_fp_ = fopen(rtp_file.c_str(), "rb");
+  ASSERT_TRUE(rtp_fp_ != NULL);
+  ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
+}
+
+void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int16_t* out_len) {
+  // Check if time to receive.
+  while ((sim_clock_ >= rtp->time()) &&
+         (rtp->dataLen() >= 0)) {
+    if (rtp->dataLen() > 0) {
+      ASSERT_EQ(0, neteq_inst_->recIn(*rtp));
+    }
+    // Get next packet.
+    ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
+  }
+
+  // RecOut
+  *out_len = neteq_inst_->recOut(out_data_);
+  ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
+              (*out_len == kBlockSize16kHz) ||
+              (*out_len == kBlockSize32kHz));
+
+  // Increase time.
+  sim_clock_ += kTimeStepMs;
+}
+
+void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
+                                         const std::string &ref_file) {
+  OpenInputFile(rtp_file);
+
+  std::string ref_out_file = "";
+  if (ref_file.empty()) {
+    ref_out_file = webrtc::test::OutputPath() + "neteq_out.pcm";
+  }
+  RefFiles ref_files(ref_file, ref_out_file);
+
+  NETEQTEST_RTPpacket rtp;
+  ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+  while (rtp.dataLen() >= 0) {
+    int16_t out_len;
+    Process(&rtp, &out_len);
+    ref_files.ProcessReference(out_data_, out_len);
+  }
+}
+
+void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
+                                            const std::string &stat_ref_file,
+                                            const std::string &rtcp_ref_file) {
+  OpenInputFile(rtp_file);
+  std::string stat_out_file = "";
+  if (stat_ref_file.empty()) {
+    stat_out_file = webrtc::test::OutputPath() +
+        "neteq_network_stats.dat";
+  }
+  RefFiles network_stat_files(stat_ref_file, stat_out_file);
+
+  std::string rtcp_out_file = "";
+  if (rtcp_ref_file.empty()) {
+    rtcp_out_file = webrtc::test::OutputPath() +
+        "neteq_rtcp_stats.dat";
+  }
+  RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
+
+  NETEQTEST_RTPpacket rtp;
+  ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+  while (rtp.dataLen() >= 0) {
+    int16_t out_len;
+    Process(&rtp, &out_len);
+
+    // Query the network statistics API once per second
+    if (sim_clock_ % 1000 == 0) {
+      // Process NetworkStatistics.
+      WebRtcNetEQ_NetworkStatistics network_stats;
+      ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                    &network_stats));
+      network_stat_files.ProcessReference(network_stats);
+
+      // Process RTCPstat.
+      WebRtcNetEQ_RTCPStat rtcp_stats;
+      ASSERT_EQ(0, WebRtcNetEQ_GetRTCPStats(neteq_inst_->instance(),
+                                            &rtcp_stats));
+      rtcp_stat_files.ProcessReference(rtcp_stats);
+    }
+  }
+}
+
+void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
+                                        int timestamp,
+                                        WebRtcNetEQ_RTPInfo* rtp_info) {
+  rtp_info->sequenceNumber = frame_index;
+  rtp_info->timeStamp = timestamp;
+  rtp_info->SSRC = 0x1234;  // Just an arbitrary SSRC.
+  rtp_info->payloadType = 94;  // PCM16b WB codec.
+  rtp_info->markerBit = 0;
+}
+
+void NetEqDecodingTest::PopulateCng(int frame_index,
+                                    int timestamp,
+                                    WebRtcNetEQ_RTPInfo* rtp_info,
+                                    uint8_t* payload,
+                                    int* payload_len) {
+  rtp_info->sequenceNumber = frame_index;
+  rtp_info->timeStamp = timestamp;
+  rtp_info->SSRC = 0x1234;  // Just an arbitrary SSRC.
+  rtp_info->payloadType = 98;  // WB CNG.
+  rtp_info->markerBit = 0;
+  payload[0] = 64;  // Noise level -64 dBov, quite arbitrarily chosen.
+  *payload_len = 1;  // Only noise level, no spectral parameters.
+}
+
+TEST_F(NetEqDecodingTest, TestBitExactness) {
+  const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+      "resources/neteq_universal.rtp";
+  const std::string kInputRefFile =
+      webrtc::test::ResourcePath("neteq_universal_ref", "pcm");
+  DecodeAndCompare(kInputRtpFile, kInputRefFile);
+}
+
+TEST_F(NetEqDecodingTest, TestNetworkStatistics) {
+  const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+      "resources/neteq_universal.rtp";
+  const std::string kNetworkStatRefFile =
+      webrtc::test::ResourcePath("neteq_network_stats", "dat");
+  const std::string kRtcpStatRefFile =
+      webrtc::test::ResourcePath("neteq_rtcp_stats", "dat");
+  DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
+}
+
+TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
+  // Use fax mode to avoid time-scaling. This is to simplify the testing of
+  // packet waiting times in the packet buffer.
+  ASSERT_EQ(0,
+            WebRtcNetEQ_SetPlayoutMode(neteq_inst_->instance(), kPlayoutFax));
+  // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
+  int num_frames = 30;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  for (int i = 0; i < num_frames; ++i) {
+    uint16_t payload[kSamples] = {0};
+    WebRtcNetEQ_RTPInfo rtp_info;
+    rtp_info.sequenceNumber = i;
+    rtp_info.timeStamp = i * kSamples;
+    rtp_info.SSRC = 0x1234;  // Just an arbitrary SSRC.
+    rtp_info.payloadType = 94;  // PCM16b WB codec.
+    rtp_info.markerBit = 0;
+    ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
+                                            reinterpret_cast<uint8_t*>(payload),
+                                            kPayloadBytes, 0));
+  }
+  // Pull out all data.
+  for (int i = 0; i < num_frames; ++i) {
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+  const int kVecLen = 110;  // More than kLenWaitingTimes in mcu.h.
+  int waiting_times[kVecLen];
+  int len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                                kVecLen, waiting_times);
+  EXPECT_EQ(num_frames, len);
+  // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
+  // spacing (per definition), we expect the delay to increase with 10 ms for
+  // each packet.
+  for (int i = 0; i < len; ++i) {
+    EXPECT_EQ((i + 1) * 10, waiting_times[i]);
+  }
+
+  // Check statistics again and make sure it's been reset.
+  EXPECT_EQ(0, WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                                   kVecLen, waiting_times));
+
+  // Process > 100 frames, and make sure that that we get statistics
+  // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
+  num_frames = 110;
+  for (int i = 0; i < num_frames; ++i) {
+    uint16_t payload[kSamples] = {0};
+    WebRtcNetEQ_RTPInfo rtp_info;
+    rtp_info.sequenceNumber = i;
+    rtp_info.timeStamp = i * kSamples;
+    rtp_info.SSRC = 0x1235;  // Just an arbitrary SSRC.
+    rtp_info.payloadType = 94;  // PCM16b WB codec.
+    rtp_info.markerBit = 0;
+    ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
+                                            reinterpret_cast<uint8_t*>(payload),
+                                            kPayloadBytes, 0));
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                            kVecLen, waiting_times);
+  EXPECT_EQ(100, len);
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
+  const int kNumFrames = 3000;  // Needed for convergence.
+  int frame_index = 0;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  while (frame_index < kNumFrames) {
+    // Insert one packet each time, except every 10th time where we insert two
+    // packets at once. This will create a negative clock-drift of approx. 10%.
+    int num_packets = (frame_index % 10 == 0 ? 2 : 1);
+    for (int n = 0; n < num_packets; ++n) {
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++frame_index;
+    }
+
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  WebRtcNetEQ_NetworkStatistics network_stats;
+  ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                &network_stats));
+  EXPECT_EQ(-106911, network_stats.clockDriftPPM);
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
+  const int kNumFrames = 5000;  // Needed for convergence.
+  int frame_index = 0;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  for (int i = 0; i < kNumFrames; ++i) {
+    // Insert one packet each time, except every 10th time where we don't insert
+    // any packet. This will create a positive clock-drift of approx. 11%.
+    int num_packets = (i % 10 == 9 ? 0 : 1);
+    for (int n = 0; n < num_packets; ++n) {
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++frame_index;
+    }
+
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  WebRtcNetEQ_NetworkStatistics network_stats;
+  ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                &network_stats));
+  EXPECT_EQ(108352, network_stats.clockDriftPPM);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithClockDrift) {
+  uint16_t seq_no = 0;
+  uint32_t timestamp = 0;
+  const int kFrameSizeMs = 30;
+  const int kSamples = kFrameSizeMs * 16;
+  const int kPayloadBytes = kSamples * 2;
+  // Apply a clock drift of -25 ms / s (sender faster than receiver).
+  const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+  double next_input_time_ms = 0.0;
+  double t_ms;
+
+  // Insert speech for 5 seconds.
+  const int kSpeechDurationMs = 5000;
+  for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one 30 ms speech frame.
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++seq_no;
+      timestamp += kSamples;
+      next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  EXPECT_EQ(kOutputNormal, neteq_inst_->getOutputType());
+  int32_t delay_before = timestamp - neteq_inst_->getSpeechTimeStamp();
+
+  // Insert CNG for 1 minute (= 60000 ms).
+  const int kCngPeriodMs = 100;
+  const int kCngPeriodSamples = kCngPeriodMs * 16;  // Period in 16 kHz samples.
+  const int kCngDurationMs = 60000;
+  for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one CNG frame each 100 ms.
+      uint8_t payload[kPayloadBytes];
+      int payload_len;
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           payload_len, 0));
+      ++seq_no;
+      timestamp += kCngPeriodSamples;
+      next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  EXPECT_EQ(kOutputCNG, neteq_inst_->getOutputType());
+
+  // Insert speech again until output type is speech.
+  while (neteq_inst_->getOutputType() != kOutputNormal) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one 30 ms speech frame.
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++seq_no;
+      timestamp += kSamples;
+      next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+    // Increase clock.
+    t_ms += 10;
+  }
+
+  int32_t delay_after = timestamp - neteq_inst_->getSpeechTimeStamp();
+  // Compare delay before and after, and make sure it differs less than 20 ms.
+  EXPECT_LE(delay_after, delay_before + 20 * 16);
+  EXPECT_GE(delay_after, delay_before - 20 * 16);
+}
+
+}  // namespace
diff --git a/trunk/src/modules/audio_conference_mixer/OWNERS b/trunk/src/modules/audio_conference_mixer/OWNERS
new file mode 100644
index 0000000..7dc791e
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/OWNERS
@@ -0,0 +1,3 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+andrew@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h
new file mode 100644
index 0000000..4ece1bf
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
+
+#include "audio_conference_mixer_defines.h"
+#include "module.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+class AudioMixerOutputReceiver;
+class AudioMixerStatusReceiver;
+class MixerParticipant;
+class Trace;
+
+class AudioConferenceMixer : public Module
+{
+public:
+    enum {kMaximumAmountOfMixedParticipants = 3};
+    enum Frequency
+    {
+        kNbInHz           = 8000,
+        kWbInHz           = 16000,
+        kSwbInHz          = 32000,
+        kLowestPossible   = -1,
+        kDefaultFrequency = kWbInHz
+    };
+
+    // Factory method. Constructor disabled.
+    static AudioConferenceMixer* Create(int id);
+    virtual ~AudioConferenceMixer() {}
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+    virtual WebRtc_Word32 TimeUntilNextProcess() = 0 ;
+    virtual WebRtc_Word32 Process() = 0;
+
+    // Register/unregister a callback class for receiving the mixed audio.
+    virtual WebRtc_Word32 RegisterMixedStreamCallback(
+        AudioMixerOutputReceiver& receiver) = 0;
+    virtual WebRtc_Word32 UnRegisterMixedStreamCallback() = 0;
+
+    // Register/unregister a callback class for receiving status information.
+    virtual WebRtc_Word32 RegisterMixerStatusCallback(
+        AudioMixerStatusReceiver& mixerStatusCallback,
+        const WebRtc_UWord32 amountOf10MsBetweenCallbacks) = 0;
+    virtual WebRtc_Word32 UnRegisterMixerStatusCallback() = 0;
+
+    // Add/remove participants as candidates for mixing.
+    virtual WebRtc_Word32 SetMixabilityStatus(
+        MixerParticipant& participant,
+        const bool mixable) = 0;
+    // mixable is set to true if a participant is a candidate for mixing.
+    virtual WebRtc_Word32 MixabilityStatus(
+        MixerParticipant& participant,
+        bool& mixable) = 0;
+
+    // Inform the mixer that the participant should always be mixed and not
+    // count toward the number of mixed participants. Note that a participant
+    // must have been added to the mixer (by calling SetMixabilityStatus())
+    // before this function can be successfully called.
+    virtual WebRtc_Word32 SetAnonymousMixabilityStatus(
+        MixerParticipant& participant, const bool mixable) = 0;
+    // mixable is set to true if the participant is mixed anonymously.
+    virtual WebRtc_Word32 AnonymousMixabilityStatus(
+        MixerParticipant& participant, bool& mixable) = 0;
+
+    // Set the minimum sampling frequency at which to mix. The mixing algorithm
+    // may still choose to mix at a higher samling frequency to avoid
+    // downsampling of audio contributing to the mixed audio.
+    virtual WebRtc_Word32 SetMinimumMixingFrequency(Frequency freq) = 0;
+
+protected:
+    AudioConferenceMixer() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
diff --git a/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h b/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
new file mode 100644
index 0000000..718470d
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+
+#include "map_wrapper.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class MixHistory;
+
+// A callback class that all mixer participants must inherit from/implement.
+class MixerParticipant
+{
+public:
+    // The implementation of this function should update audioFrame with new
+    // audio every time it's called.
+    //
+    // If it returns -1, the frame will not be added to the mix.
+    virtual WebRtc_Word32 GetAudioFrame(const WebRtc_Word32 id,
+                                        AudioFrame& audioFrame) = 0;
+
+    // mixed will be set to true if the participant was mixed this mix iteration
+    WebRtc_Word32 IsMixed(bool& mixed) const;
+
+    // This function specifies the sampling frequency needed for the AudioFrame
+    // for future GetAudioFrame(..) calls.
+    virtual WebRtc_Word32 NeededFrequency(const WebRtc_Word32 id) = 0;
+
+    MixHistory* _mixHistory;
+protected:
+    MixerParticipant();
+    virtual ~MixerParticipant();
+};
+
+// Container struct for participant statistics.
+struct ParticipantStatistics
+{
+    WebRtc_Word32 participant;
+    WebRtc_Word32 level;
+};
+
+class AudioMixerStatusReceiver
+{
+public:
+    // Callback function that provides an array of ParticipantStatistics for the
+    // participants that were mixed last mix iteration.
+    virtual void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size) = 0;
+    // Callback function that provides an array of the ParticipantStatistics for
+    // the participants that had a positiv VAD last mix iteration.
+    virtual void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size) = 0;
+    // Callback function that provides the audio level of the mixed audio frame
+    // from the last mix iteration.
+    virtual void MixedAudioLevel(
+        const WebRtc_Word32  id,
+        const WebRtc_UWord32 level) = 0;
+protected:
+    AudioMixerStatusReceiver() {}
+    virtual ~AudioMixerStatusReceiver() {}
+};
+
+class AudioMixerOutputReceiver
+{
+public:
+    // This callback function provides the mixed audio for this mix iteration.
+    // Note that uniqueAudioFrames is an array of AudioFrame pointers with the
+    // size according to the size parameter.
+    virtual void NewMixedAudio(const WebRtc_Word32 id,
+                               const AudioFrame& generalAudioFrame,
+                               const AudioFrame** uniqueAudioFrames,
+                               const WebRtc_UWord32 size) = 0;
+protected:
+    AudioMixerOutputReceiver() {}
+    virtual ~AudioMixerOutputReceiver() {}
+};
+
+class AudioRelayReceiver
+{
+public:
+    // This callback function provides the mix decision for this mix iteration.
+    // mixerList is a list of elements of the type
+    // [int,MixerParticipant*]
+    virtual void NewAudioToRelay(const WebRtc_Word32 id,
+                                 const MapWrapper& mixerList) = 0;
+protected:
+    AudioRelayReceiver() {}
+    virtual ~AudioRelayReceiver() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/Android.mk b/trunk/src/modules/audio_conference_mixer/source/Android.mk
new file mode 100644
index 0000000..a90ff1d
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/Android.mk
@@ -0,0 +1,44 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_audio_conference_mixer
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    audio_frame_manipulator.cc \
+    level_indicator.cc \
+    audio_conference_mixer_impl.cc \
+    time_scheduler.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../audio_processing/include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../system_wrappers/interface
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi
new file mode 100644
index 0000000..fd116c6
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi
@@ -0,0 +1,70 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_conference_mixer',
+      'type': '<(library)',
+      'dependencies': [
+        'audio_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/audio_conference_mixer.h',
+        '../interface/audio_conference_mixer_defines.h',
+        'audio_frame_manipulator.cc',
+        'audio_frame_manipulator.h',
+        'level_indicator.cc',
+        'level_indicator.h',
+        'memory_pool.h',
+        'memory_pool_posix.h',
+        'memory_pool_win.h',
+        'audio_conference_mixer_impl.cc',
+        'audio_conference_mixer_impl.h',
+        'time_scheduler.cc',
+        'time_scheduler.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'audio_conference_mixer_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'audio_conference_mixer',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+            'audio_conference_mixer_unittest.cc',
+          ],
+        }, # audio_conference_mixer_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
new file mode 100644
index 0000000..8da0ecd
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -0,0 +1,1187 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_conference_mixer_defines.h"
+#include "audio_conference_mixer_impl.h"
+#include "audio_frame_manipulator.h"
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "map_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+namespace {
+void SetParticipantStatistics(ParticipantStatistics* stats,
+                              const AudioFrame& frame)
+{
+    stats->participant = frame._id;
+    stats->level = frame._volume;
+}
+}  // namespace
+
+MixerParticipant::MixerParticipant()
+    : _mixHistory(new MixHistory())
+{
+}
+
+MixerParticipant::~MixerParticipant()
+{
+    delete _mixHistory;
+}
+
+WebRtc_Word32 MixerParticipant::IsMixed(bool& mixed) const
+{
+    return _mixHistory->IsMixed(mixed);
+}
+
+MixHistory::MixHistory()
+    : _isMixed(0)
+{
+}
+
+MixHistory::~MixHistory()
+{
+}
+
+WebRtc_Word32 MixHistory::IsMixed(bool& mixed) const
+{
+    mixed = (_isMixed.Value() == 1);
+    return 0;
+}
+
+WebRtc_Word32 MixHistory::WasMixed(bool& wasMixed) const
+{
+    // Was mixed is the same as is mixed depending on perspective. This function
+    // is for the perspective of AudioConferenceMixerImpl.
+    return IsMixed(wasMixed);
+}
+
+WebRtc_Word32 MixHistory::SetIsMixed(const bool mixed)
+{
+    _isMixed = mixed ? 1 : 0;
+    return 0;
+}
+
+void MixHistory::ResetMixedStatus()
+{
+    _isMixed  = 0;
+}
+
+AudioConferenceMixer* AudioConferenceMixer::Create(int id)
+{
+    AudioConferenceMixerImpl* mixer = new AudioConferenceMixerImpl(id);
+    if(!mixer->Init())
+    {
+        delete mixer;
+        return NULL;
+    }
+    return mixer;
+}
+
+AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
+    : _scratchParticipantsToMixAmount(0),
+      _scratchMixedParticipants(),
+      _scratchVadPositiveParticipantsAmount(0),
+      _scratchVadPositiveParticipants(),
+      _crit(NULL),
+      _cbCrit(NULL),
+      _id(id),
+      _minimumMixingFreq(kLowestPossible),
+      _mixReceiver(NULL),
+      _mixerStatusCallback(NULL),
+      _amountOf10MsBetweenCallbacks(1),
+      _amountOf10MsUntilNextCallback(0),
+      _mixerStatusCb(false),
+      _outputFrequency(kDefaultFrequency),
+      _sampleSize(0),
+      _audioFramePool(NULL),
+      _participantList(),
+      _additionalParticipantList(),
+      _numMixedParticipants(0),
+      _timeStamp(0),
+      _timeScheduler(kProcessPeriodicityInMs),
+      _mixedAudioLevel(),
+      _processCalls(0),
+      _limiter(NULL)
+{}
+
+bool AudioConferenceMixerImpl::Init()
+{
+    _crit.reset(CriticalSectionWrapper::CreateCriticalSection());
+    if (_crit.get() == NULL)
+        return false;
+
+    _cbCrit.reset(CriticalSectionWrapper::CreateCriticalSection());
+    if(_cbCrit.get() == NULL)
+        return false;
+
+    _limiter.reset(AudioProcessing::Create(_id));
+    if(_limiter.get() == NULL)
+        return false;
+
+    MemoryPool<AudioFrame>::CreateMemoryPool(_audioFramePool,
+                                             DEFAULT_AUDIO_FRAME_POOLSIZE);
+    if(_audioFramePool == NULL)
+        return false;
+
+    if(SetOutputFrequency(kDefaultFrequency) == -1)
+        return false;
+
+    // Assume mono.
+    if (!SetNumLimiterChannels(1))
+        return false;
+
+    if(_limiter->gain_control()->set_mode(GainControl::kFixedDigital) != 
+        _limiter->kNoError)
+        return false;
+
+    // We smoothly limit the mixed frame to -7 dbFS. -6 would correspond to the
+    // divide-by-2 but -7 is used instead to give a bit of headroom since the
+    // AGC is not a hard limiter.
+    if(_limiter->gain_control()->set_target_level_dbfs(7) != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->set_compression_gain_db(0)
+        != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->enable_limiter(true) != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->Enable(true) != _limiter->kNoError)
+        return false;
+
+    return true;
+}
+
+AudioConferenceMixerImpl::~AudioConferenceMixerImpl()
+{
+    MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool);
+    assert(_audioFramePool == NULL);
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// Process should be called every kProcessPeriodicityInMs ms
+WebRtc_Word32 AudioConferenceMixerImpl::TimeUntilNextProcess()
+{
+    WebRtc_Word32 timeUntilNextProcess = 0;
+    CriticalSectionScoped cs(_crit.get());
+    if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "failed in TimeToNextUpdate() call");
+        // Sanity check
+        assert(false);
+        return -1;
+    }
+    return timeUntilNextProcess;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::Process()
+{
+    WebRtc_UWord32 remainingParticipantsAllowedToMix =
+        kMaximumAmountOfMixedParticipants;
+    {
+        CriticalSectionScoped cs(_crit.get());
+        assert(_processCalls == 0);
+        _processCalls++;
+
+        // Let the scheduler know that we are running one iteration.
+        _timeScheduler.UpdateScheduler();
+    }
+
+    ListWrapper mixList;
+    ListWrapper rampOutList;
+    ListWrapper additionalFramesList;
+    MapWrapper mixedParticipantsMap;
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+
+        WebRtc_Word32 lowFreq = GetLowestMixingFrequency();
+        // SILK can run in 12 kHz and 24 kHz. These frequencies are not
+        // supported so use the closest higher frequency to not lose any
+        // information.
+        // TODO(henrike): this is probably more appropriate to do in
+        //                GetLowestMixingFrequency().
+        if (lowFreq == 12000)
+        {
+            lowFreq = 16000;
+        } else if (lowFreq == 24000) {
+            lowFreq = 32000;
+        }
+        if(lowFreq <= 0)
+        {
+            CriticalSectionScoped cs(_crit.get());
+            _processCalls--;
+            return 0;
+        } else  {
+            switch(lowFreq)
+            {
+            case 8000:
+                if(OutputFrequency() != kNbInHz)
+                {
+                    SetOutputFrequency(kNbInHz);
+                }
+                break;
+            case 16000:
+                if(OutputFrequency() != kWbInHz)
+                {
+                    SetOutputFrequency(kWbInHz);
+                }
+                break;
+            case 32000:
+                if(OutputFrequency() != kSwbInHz)
+                {
+                    SetOutputFrequency(kSwbInHz);
+                }
+                break;
+            default:
+                assert(false);
+
+                CriticalSectionScoped cs(_crit.get());
+                _processCalls--;
+                return -1;
+            }
+        }
+
+        UpdateToMix(mixList, rampOutList, mixedParticipantsMap,
+                    remainingParticipantsAllowedToMix);
+
+        GetAdditionalAudio(additionalFramesList);
+        UpdateMixedStatus(mixedParticipantsMap);
+        _scratchParticipantsToMixAmount = mixedParticipantsMap.Size();
+    }
+
+    // Clear mixedParticipantsMap to avoid memory leak warning.
+    // Please note that the mixedParticipantsMap doesn't own any dynamically
+    // allocated memory.
+    while(mixedParticipantsMap.Erase(mixedParticipantsMap.First()) == 0) {}
+
+    // Get an AudioFrame for mixing from the memory pool.
+    AudioFrame* mixedAudio = NULL;
+    if(_audioFramePool->PopMemory(mixedAudio) == -1)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                     "failed PopMemory() call");
+        assert(false);
+        return -1;
+    }
+
+    bool timeForMixerCallback = false;
+    int retval = 0;
+    WebRtc_Word32 audioLevel = 0;
+    {
+        const ListItem* firstItem = mixList.First();
+        // Assume mono.
+        WebRtc_UWord8 numberOfChannels = 1;
+        if(firstItem != NULL)
+        {
+            // Use the same number of channels as the first frame to be mixed.
+            numberOfChannels = static_cast<const AudioFrame*>(
+                firstItem->GetItem())->_audioChannel;
+        }
+        // TODO(henrike): it might be better to decide the number of channels
+        //                with an API instead of dynamically.
+
+        CriticalSectionScoped cs(_crit.get());
+        if (!SetNumLimiterChannels(numberOfChannels))
+            retval = -1;
+
+        mixedAudio->UpdateFrame(-1, _timeStamp, NULL, 0, _outputFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadPassive, numberOfChannels);
+
+        _timeStamp += _sampleSize;
+
+        MixFromList(*mixedAudio, mixList);
+        MixAnonomouslyFromList(*mixedAudio, additionalFramesList);
+        MixAnonomouslyFromList(*mixedAudio, rampOutList);
+
+        if(mixedAudio->_payloadDataLengthInSamples == 0)
+        {
+            // Nothing was mixed, set the audio samples to silence.
+            memset(mixedAudio->_payloadData, 0, _sampleSize);
+            mixedAudio->_payloadDataLengthInSamples = _sampleSize;
+        }
+        else
+        {
+            // Only call the limiter if we have something to mix.
+            if(!LimitMixedAudio(*mixedAudio))
+                retval = -1;
+        }
+
+        _mixedAudioLevel.ComputeLevel(mixedAudio->_payloadData,_sampleSize);
+        audioLevel = _mixedAudioLevel.GetLevel();
+
+        if(_mixerStatusCb)
+        {
+            _scratchVadPositiveParticipantsAmount = 0;
+            UpdateVADPositiveParticipants(mixList);
+            if(_amountOf10MsUntilNextCallback-- == 0)
+            {
+                _amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks;
+                timeForMixerCallback = true;
+            }
+        }
+    }
+
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        if(_mixReceiver != NULL)
+        {
+            const AudioFrame** dummy = NULL;
+            _mixReceiver->NewMixedAudio(
+                _id,
+                *mixedAudio,
+                dummy,
+                0);
+        }
+
+        if((_mixerStatusCallback != NULL) &&
+            timeForMixerCallback)
+        {
+            _mixerStatusCallback->MixedParticipants(
+                _id,
+                _scratchMixedParticipants,
+                _scratchParticipantsToMixAmount);
+
+            _mixerStatusCallback->VADPositiveParticipants(
+                _id,
+                _scratchVadPositiveParticipants,
+                _scratchVadPositiveParticipantsAmount);
+            _mixerStatusCallback->MixedAudioLevel(_id,audioLevel);
+        }
+    }
+
+    // Reclaim all outstanding memory.
+    _audioFramePool->PushMemory(mixedAudio);
+    ClearAudioFrameList(mixList);
+    ClearAudioFrameList(rampOutList);
+    ClearAudioFrameList(additionalFramesList);
+    {
+        CriticalSectionScoped cs(_crit.get());
+        _processCalls--;
+    }
+    return retval;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::RegisterMixedStreamCallback(
+    AudioMixerOutputReceiver& mixReceiver)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(_mixReceiver != NULL)
+    {
+        return -1;
+    }
+    _mixReceiver = &mixReceiver;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(_mixReceiver == NULL)
+    {
+        return -1;
+    }
+    _mixReceiver = NULL;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetOutputFrequency(
+    const Frequency frequency)
+{
+    CriticalSectionScoped cs(_crit.get());
+    const int error = _limiter->set_sample_rate_hz(frequency);
+    if(error != _limiter->kNoError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "Error from AudioProcessing: %d", error);
+        return -1;
+    }
+
+    _outputFrequency = frequency;
+    _sampleSize = (_outputFrequency*kProcessPeriodicityInMs) / 1000;
+
+    return 0;
+}
+
+AudioConferenceMixer::Frequency
+AudioConferenceMixerImpl::OutputFrequency() const
+{
+    CriticalSectionScoped cs(_crit.get());
+    return _outputFrequency;
+}
+
+bool AudioConferenceMixerImpl::SetNumLimiterChannels(int numChannels)
+{
+    if(_limiter->num_input_channels() != numChannels)
+    {
+        const int error = _limiter->set_num_channels(numChannels,
+                                                     numChannels);
+        if(error != _limiter->kNoError)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "Error from AudioProcessing: %d", error);
+            assert(false);
+            return false;
+        }
+    }
+
+    return true;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::RegisterMixerStatusCallback(
+    AudioMixerStatusReceiver& mixerStatusCallback,
+    const WebRtc_UWord32 amountOf10MsBetweenCallbacks)
+{
+    if(amountOf10MsBetweenCallbacks == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceAudioMixerServer,
+            _id,
+            "amountOf10MsBetweenCallbacks(%d) needs to be larger than 0");
+        return -1;
+    }
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        if(_mixerStatusCallback != NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixer status callback already registered");
+            return -1;
+        }
+        _mixerStatusCallback = &mixerStatusCallback;
+    }
+    {
+        CriticalSectionScoped cs(_crit.get());
+        _amountOf10MsBetweenCallbacks  = amountOf10MsBetweenCallbacks;
+        _amountOf10MsUntilNextCallback = 0;
+        _mixerStatusCb                 = true;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
+{
+    {
+        CriticalSectionScoped cs(_crit.get());
+        if(!_mixerStatusCb)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixer status callback not registered");
+            return -1;
+        }
+        _mixerStatusCb = false;
+    }
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        _mixerStatusCallback = NULL;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetMixabilityStatus(
+    MixerParticipant& participant,
+    const bool mixable)
+{
+    if (!mixable)
+    {
+        // Anonymous participants are in a separate list. Make sure that the
+        // participant is in the _participantList if it is being mixed.
+        SetAnonymousMixabilityStatus(participant, false);
+    }
+    WebRtc_UWord32 numMixedParticipants;
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        const bool isMixed =
+            IsParticipantInList(participant,_participantList);
+        // API must be called with a new state.
+        if(!(mixable ^ isMixed))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixable is aready %s",
+                         isMixed ? "ON" : "off");
+            return -1;
+        }
+        bool success = false;
+        if(mixable)
+        {
+            success = AddParticipantToList(participant,_participantList);
+        }
+        else
+        {
+            success = RemoveParticipantFromList(participant,_participantList);
+        }
+        if(!success)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "failed to %s participant",
+                         mixable ? "add" : "remove");
+            assert(false);
+            return -1;
+        }
+
+        int numMixedNonAnonymous = _participantList.GetSize();
+        if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants)
+        {
+            numMixedNonAnonymous = kMaximumAmountOfMixedParticipants;
+        }
+        numMixedParticipants = numMixedNonAnonymous +
+                               _additionalParticipantList.GetSize();
+    }
+    // A MixerParticipant was added or removed. Make sure the scratch
+    // buffer is updated if necessary.
+    // Note: The scratch buffer may only be updated in Process().
+    CriticalSectionScoped cs(_crit.get());
+    _numMixedParticipants = numMixedParticipants;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::MixabilityStatus(
+    MixerParticipant& participant,
+    bool& mixable)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    mixable = IsParticipantInList(participant, _participantList);
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
+    MixerParticipant& participant, const bool anonymous)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(IsParticipantInList(participant, _additionalParticipantList))
+    {
+        if(anonymous)
+        {
+            return 0;
+        }
+        if(!RemoveParticipantFromList(participant, _additionalParticipantList))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "unable to remove participant from anonymous list");
+            assert(false);
+            return -1;
+        }
+        return AddParticipantToList(participant, _participantList) ? 0 : -1;
+    }
+    if(!anonymous)
+    {
+        return 0;
+    }
+    const bool mixable = RemoveParticipantFromList(participant,
+                                                   _participantList);
+    if(!mixable)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceAudioMixerServer,
+            _id,
+            "participant must be registered before turning it into anonymous");
+        // Setting anonymous status is only possible if MixerParticipant is
+        // already registered.
+        return -1;
+    }
+    return AddParticipantToList(participant, _additionalParticipantList) ?
+        0 : -1;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::AnonymousMixabilityStatus(
+    MixerParticipant& participant, bool& mixable)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    mixable = IsParticipantInList(participant,
+                                  _additionalParticipantList);
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetMinimumMixingFrequency(
+    Frequency freq)
+{
+    // Make sure that only allowed sampling frequencies are used. Use closest
+    // higher sampling frequency to avoid losing information.
+    if (static_cast<int>(freq) == 12000)
+    {
+         freq = kWbInHz;
+    } else if (static_cast<int>(freq) == 24000) {
+        freq = kSwbInHz;
+    }
+
+    if((freq == kNbInHz) || (freq == kWbInHz) || (freq == kSwbInHz) ||
+       (freq == kLowestPossible))
+    {
+        _minimumMixingFreq=freq;
+        return 0;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "SetMinimumMixingFrequency incorrect frequency: %i",freq);
+        assert(false);
+        return -1;
+    }
+}
+
+// Check all AudioFrames that are to be mixed. The highest sampling frequency
+// found is the lowest that can be used without losing information.
+WebRtc_Word32 AudioConferenceMixerImpl::GetLowestMixingFrequency()
+{
+    const int participantListFrequency =
+        GetLowestMixingFrequencyFromList(_participantList);
+    const int anonymousListFrequency =
+        GetLowestMixingFrequencyFromList(_additionalParticipantList);
+    const int highestFreq =
+        (participantListFrequency > anonymousListFrequency) ?
+            participantListFrequency : anonymousListFrequency;
+    // Check if the user specified a lowest mixing frequency.
+    if(_minimumMixingFreq != kLowestPossible)
+    {
+        if(_minimumMixingFreq > highestFreq)
+        {
+            return _minimumMixingFreq;
+        }
+    }
+    return highestFreq;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::GetLowestMixingFrequencyFromList(
+    ListWrapper& mixList)
+{
+    WebRtc_Word32 highestFreq = 8000;
+    ListItem* item = mixList.First();
+    while(item)
+    {
+        MixerParticipant* participant =
+            static_cast<MixerParticipant*>(item->GetItem());
+        const WebRtc_Word32 neededFrequency = participant->NeededFrequency(_id);
+        if(neededFrequency > highestFreq)
+        {
+            highestFreq = neededFrequency;
+        }
+        item = mixList.Next(item);
+    }
+    return highestFreq;
+}
+
+void AudioConferenceMixerImpl::UpdateToMix(
+    ListWrapper& mixList,
+    ListWrapper& rampOutList,
+    MapWrapper& mixParticipantList,
+    WebRtc_UWord32& maxAudioFrameCounter)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateToMix(mixList,rampOutList,mixParticipantList,%d)",
+                 maxAudioFrameCounter);
+    const WebRtc_UWord32 mixListStartSize = mixList.GetSize();
+    ListWrapper activeList; // Elements are AudioFrames
+    // Struct needed by the passive lists to keep track of which AudioFrame
+    // belongs to which MixerParticipant.
+    struct ParticipantFramePair
+    {
+        MixerParticipant* participant;
+        AudioFrame* audioFrame;
+    };
+    ListWrapper passiveWasNotMixedList; // Elements are MixerParticipant
+    ListWrapper passiveWasMixedList;    // Elements are MixerParticipant
+    ListItem* item = _participantList.First();
+    while(item)
+    {
+        // Stop keeping track of passive participants if there are already
+        // enough participants available (they wont be mixed anyway).
+        bool mustAddToPassiveList = (maxAudioFrameCounter >
+                                    (activeList.GetSize() +
+                                     passiveWasMixedList.GetSize() +
+                                     passiveWasNotMixedList.GetSize()));
+
+        MixerParticipant* participant = static_cast<MixerParticipant*>(
+            item->GetItem());
+        bool wasMixed = false;
+        participant->_mixHistory->WasMixed(wasMixed);
+        AudioFrame* audioFrame = NULL;
+        if(_audioFramePool->PopMemory(audioFrame) == -1)
+        {
+            WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                         "failed PopMemory() call");
+            assert(false);
+            return;
+        }
+        audioFrame->_frequencyInHz = _outputFrequency;
+
+        if(participant->GetAudioFrame(_id,*audioFrame) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "failed to GetAudioFrame() from participant");
+            _audioFramePool->PushMemory(audioFrame);
+            item = _participantList.Next(item);
+            continue;
+        }
+        // TODO(henrike): this assert triggers in some test cases where SRTP is
+        // used which prevents NetEQ from making a VAD. Temporarily disable this
+        // assert until the problem is fixed on a higher level.
+        // assert(audioFrame->_vadActivity != AudioFrame::kVadUnknown);
+        if (audioFrame->_vadActivity == AudioFrame::kVadUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "invalid VAD state from participant");
+        }
+
+        if(audioFrame->_vadActivity == AudioFrame::kVadActive)
+        {
+            if(!wasMixed)
+            {
+                RampIn(*audioFrame);
+            }
+
+            if(activeList.GetSize() >= maxAudioFrameCounter)
+            {
+                // There are already more active participants than should be
+                // mixed. Only keep the ones with the highest energy.
+                ListItem* replaceItem = NULL;
+                CalculateEnergy(*audioFrame);
+                WebRtc_UWord32 lowestEnergy = audioFrame->_energy;
+
+                ListItem* activeItem = activeList.First();
+                while(activeItem)
+                {
+                    AudioFrame* replaceFrame = static_cast<AudioFrame*>(
+                        activeItem->GetItem());
+                    CalculateEnergy(*replaceFrame);
+                    if(replaceFrame->_energy < lowestEnergy)
+                    {
+                        replaceItem = activeItem;
+                        lowestEnergy = replaceFrame->_energy;
+                    }
+                    activeItem = activeList.Next(activeItem);
+                }
+                if(replaceItem != NULL)
+                {
+                    AudioFrame* replaceFrame = static_cast<AudioFrame*>(
+                        replaceItem->GetItem());
+
+                    bool replaceWasMixed = false;
+                    MapItem* replaceParticipant = mixParticipantList.Find(
+                        replaceFrame->_id);
+                    // When a frame is pushed to |activeList| it is also pushed
+                    // to mixParticipantList with the frame's id. This means
+                    // that the Find call above should never fail.
+                    if(replaceParticipant == NULL)
+                    {
+                        assert(false);
+                    } else {
+                        static_cast<MixerParticipant*>(
+                            replaceParticipant->GetItem())->_mixHistory->
+                            WasMixed(replaceWasMixed);
+
+                        mixParticipantList.Erase(replaceFrame->_id);
+                        activeList.Erase(replaceItem);
+
+                        activeList.PushFront(static_cast<void*>(audioFrame));
+                        mixParticipantList.Insert(
+                            audioFrame->_id,
+                            static_cast<void*>(participant));
+                        assert(mixParticipantList.Size() <=
+                               kMaximumAmountOfMixedParticipants);
+
+                        if(replaceWasMixed)
+                        {
+                            RampOut(*replaceFrame);
+                            rampOutList.PushBack(
+                                static_cast<void*>(replaceFrame));
+                            assert(rampOutList.GetSize() <=
+                                   kMaximumAmountOfMixedParticipants);
+                        } else {
+                            _audioFramePool->PushMemory(replaceFrame);
+                        }
+                    }
+                } else {
+                    if(wasMixed)
+                    {
+                        RampOut(*audioFrame);
+                        rampOutList.PushBack(static_cast<void*>(audioFrame));
+                        assert(rampOutList.GetSize() <=
+                               kMaximumAmountOfMixedParticipants);
+                    } else {
+                        _audioFramePool->PushMemory(audioFrame);
+                    }
+                }
+            } else {
+                activeList.PushFront(static_cast<void*>(audioFrame));
+                mixParticipantList.Insert(audioFrame->_id,
+                                          static_cast<void*>(participant));
+                assert(mixParticipantList.Size() <=
+                       kMaximumAmountOfMixedParticipants);
+            }
+        } else {
+            if(wasMixed)
+            {
+                ParticipantFramePair* pair = new ParticipantFramePair;
+                pair->audioFrame  = audioFrame;
+                pair->participant = participant;
+                passiveWasMixedList.PushBack(static_cast<void*>(pair));
+            } else if(mustAddToPassiveList) {
+                RampIn(*audioFrame);
+                ParticipantFramePair* pair = new ParticipantFramePair;
+                pair->audioFrame  = audioFrame;
+                pair->participant = participant;
+                passiveWasNotMixedList.PushBack(static_cast<void*>(pair));
+            } else {
+                _audioFramePool->PushMemory(audioFrame);
+            }
+        }
+        item = _participantList.Next(item);
+    }
+    assert(activeList.GetSize() <= maxAudioFrameCounter);
+    // At this point it is known which participants should be mixed. Transfer
+    // this information to this functions output parameters.
+    while(!activeList.Empty())
+    {
+        ListItem* mixItem = activeList.First();
+        mixList.PushBack(mixItem->GetItem());
+        activeList.Erase(mixItem);
+    }
+    // Always mix a constant number of AudioFrames. If there aren't enough
+    // active participants mix passive ones. Starting with those that was mixed
+    // last iteration.
+    while(!passiveWasMixedList.Empty())
+    {
+        ListItem* mixItem = passiveWasMixedList.First();
+        ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
+            mixItem->GetItem());
+        if(mixList.GetSize() <  maxAudioFrameCounter + mixListStartSize)
+        {
+            mixList.PushBack(pair->audioFrame);
+            mixParticipantList.Insert(pair->audioFrame->_id,
+                                      static_cast<void*>(pair->participant));
+            assert(mixParticipantList.Size() <=
+                   kMaximumAmountOfMixedParticipants);
+        }
+        else
+        {
+            _audioFramePool->PushMemory(pair->audioFrame);
+        }
+        delete pair;
+        passiveWasMixedList.Erase(mixItem);
+    }
+    // And finally the ones that have not been mixed for a while.
+    while(!passiveWasNotMixedList.Empty())
+    {
+        ListItem* mixItem = passiveWasNotMixedList.First();
+        ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
+            mixItem->GetItem());
+        if(mixList.GetSize() <  maxAudioFrameCounter + mixListStartSize)
+        {
+            mixList.PushBack(pair->audioFrame);
+            mixParticipantList.Insert(pair->audioFrame->_id,
+                                      static_cast<void*>(pair->participant));
+            assert(mixParticipantList.Size() <=
+                   kMaximumAmountOfMixedParticipants);
+        }
+        else
+        {
+            _audioFramePool->PushMemory(pair->audioFrame);
+        }
+        delete pair;
+        passiveWasNotMixedList.Erase(mixItem);
+    }
+    assert(maxAudioFrameCounter + mixListStartSize >= mixList.GetSize());
+    maxAudioFrameCounter += mixListStartSize - mixList.GetSize();
+}
+
+void AudioConferenceMixerImpl::GetAdditionalAudio(
+    ListWrapper& additionalFramesList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "GetAdditionalAudio(additionalFramesList)");
+    ListItem* item = _additionalParticipantList.First();
+    while(item)
+    {
+        // The GetAudioFrame() callback may remove the current item. Store the
+        // next item just in case that happens.
+        ListItem* nextItem = _additionalParticipantList.Next(item);
+
+        MixerParticipant* participant = static_cast<MixerParticipant*>(
+            item->GetItem());
+        AudioFrame* audioFrame = NULL;
+        if(_audioFramePool->PopMemory(audioFrame) == -1)
+        {
+            WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                         "failed PopMemory() call");
+            assert(false);
+            return;
+        }
+        audioFrame->_frequencyInHz = _outputFrequency;
+        if(participant->GetAudioFrame(_id, *audioFrame) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "failed to GetAudioFrame() from participant");
+            _audioFramePool->PushMemory(audioFrame);
+            item = nextItem;
+            continue;
+        }
+        if(audioFrame->_payloadDataLengthInSamples == 0)
+        {
+            // Empty frame. Don't use it.
+            _audioFramePool->PushMemory(audioFrame);
+            item = nextItem;
+            continue;
+        }
+        additionalFramesList.PushBack(static_cast<void*>(audioFrame));
+        item = nextItem;
+    }
+}
+
+void AudioConferenceMixerImpl::UpdateMixedStatus(
+    MapWrapper& mixedParticipantsMap)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateMixedStatus(mixedParticipantsMap)");
+    assert(mixedParticipantsMap.Size() <= kMaximumAmountOfMixedParticipants);
+
+    // Loop through all participants. If they are in the mix map they
+    // were mixed.
+    ListItem* participantItem = _participantList.First();
+    while(participantItem != NULL)
+    {
+        bool isMixed = false;
+        MixerParticipant* participant =
+            static_cast<MixerParticipant*>(participantItem->GetItem());
+
+        MapItem* mixedItem = mixedParticipantsMap.First();
+        while(mixedItem)
+        {
+            if(participant == mixedItem->GetItem())
+            {
+                isMixed = true;
+                break;
+            }
+            mixedItem = mixedParticipantsMap.Next(mixedItem);
+        }
+        participant->_mixHistory->SetIsMixed(isMixed);
+        participantItem = _participantList.Next(participantItem);
+    }
+}
+
+void AudioConferenceMixerImpl::ClearAudioFrameList(ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "ClearAudioFrameList(audioFrameList)");
+    ListItem* item = audioFrameList.First();
+    while(item)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        _audioFramePool->PushMemory(audioFrame);
+        audioFrameList.Erase(item);
+        item = audioFrameList.First();
+    }
+}
+
+void AudioConferenceMixerImpl::UpdateVADPositiveParticipants(
+    ListWrapper& mixList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateVADPositiveParticipants(mixList)");
+
+    ListItem* item = mixList.First();
+    while(item != NULL)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        CalculateEnergy(*audioFrame);
+        if(audioFrame->_vadActivity == AudioFrame::kVadActive)
+        {
+            _scratchVadPositiveParticipants[
+                _scratchVadPositiveParticipantsAmount].participant =
+                audioFrame->_id;
+            _scratchVadPositiveParticipants[
+                _scratchVadPositiveParticipantsAmount].level =
+                audioFrame->_volume;
+            _scratchVadPositiveParticipantsAmount++;
+        }
+        item = mixList.Next(item);
+    }
+}
+
+bool AudioConferenceMixerImpl::IsParticipantInList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "IsParticipantInList(participant,participantList)");
+    ListItem* item = participantList.First();
+    while(item != NULL)
+    {
+        MixerParticipant* rhsParticipant =
+            static_cast<MixerParticipant*>(item->GetItem());
+        if(&participant == rhsParticipant)
+        {
+            return true;
+        }
+        item = participantList.Next(item);
+    }
+    return false;
+}
+
+bool AudioConferenceMixerImpl::AddParticipantToList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "AddParticipantToList(participant, participantList)");
+    if(participantList.PushBack(static_cast<void*>(&participant)) == -1)
+    {
+        return false;
+    }
+    // Make sure that the mixed status is correct for new MixerParticipant.
+    participant._mixHistory->ResetMixedStatus();
+    return true;
+}
+
+bool AudioConferenceMixerImpl::RemoveParticipantFromList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "RemoveParticipantFromList(participant, participantList)");
+    ListItem* item = participantList.First();
+    while(item)
+    {
+        if(item->GetItem() == &participant)
+        {
+            participantList.Erase(item);
+            // Participant is no longer mixed, reset to default.
+            participant._mixHistory->ResetMixedStatus();
+            return true;
+        }
+        item = participantList.Next(item);
+    }
+    return false;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::MixFromList(
+    AudioFrame& mixedAudio,
+    const ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "MixFromList(mixedAudio, audioFrameList)");
+    WebRtc_UWord32 position = 0;
+    ListItem* item = audioFrameList.First();
+    if(item == NULL)
+    {
+        return 0;
+    }
+
+    if(_numMixedParticipants == 1)
+    {
+        // No mixing required here; skip the saturation protection.
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        mixedAudio = *audioFrame;
+        SetParticipantStatistics(&_scratchMixedParticipants[position],
+                                 *audioFrame);
+        return 0;
+    }
+
+    while(item != NULL)
+    {
+        if(position >= kMaximumAmountOfMixedParticipants)
+        {
+            WEBRTC_TRACE(
+                kTraceMemory,
+                kTraceAudioMixerServer,
+                _id,
+                "Trying to mix more than max amount of mixed participants:%d!",
+                kMaximumAmountOfMixedParticipants);
+            // Assert and avoid crash
+            assert(false);
+            position = 0;
+        }
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+
+        // Divide by two to avoid saturation in the mixing.
+        *audioFrame >>= 1;
+        mixedAudio += *audioFrame;
+
+        SetParticipantStatistics(&_scratchMixedParticipants[position],
+                                 *audioFrame);
+
+        position++;
+        item = audioFrameList.Next(item);
+    }
+
+    return 0;
+}
+
+// TODO(andrew): consolidate this function with MixFromList.
+WebRtc_Word32 AudioConferenceMixerImpl::MixAnonomouslyFromList(
+    AudioFrame& mixedAudio,
+    const ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "MixAnonomouslyFromList(mixedAudio, audioFrameList)");
+    ListItem* item = audioFrameList.First();
+    if(item == NULL)
+        return 0;
+
+    if(_numMixedParticipants == 1)
+    {
+        // No mixing required here; skip the saturation protection.
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        mixedAudio = *audioFrame;
+        return 0;
+    }
+
+    while(item != NULL)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        // Divide by two to avoid saturation in the mixing.
+        *audioFrame >>= 1;
+        mixedAudio += *audioFrame;
+        item = audioFrameList.Next(item);
+    }
+    return 0;
+}
+
+bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
+{
+    if(_numMixedParticipants == 1)
+    {
+        return true;
+    }
+
+    // Smoothly limit the mixed frame.
+    const int error = _limiter->ProcessStream(&mixedAudio);
+
+    // And now we can safely restore the level. This procedure results in
+    // some loss of resolution, deemed acceptable.
+    //
+    // It's possible to apply the gain in the AGC (with a target level of 0 dbFS
+    // and compression gain of 6 dB). However, in the transition frame when this
+    // is enabled (moving from one to two participants) it has the potential to
+    // create discontinuities in the mixed frame.
+    //
+    // Instead we double the frame (with addition since left-shifting a
+    // negative value is undefined).
+    mixedAudio += mixedAudio;
+
+    if(error != _limiter->kNoError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "Error from AudioProcessing: %d", error);
+        assert(false);
+        return false;
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
new file mode 100644
index 0000000..efa5d68
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+
+#include "atomic32_wrapper.h"
+#include "audio_conference_mixer.h"
+#include "engine_configurations.h"
+#include "level_indicator.h"
+#include "list_wrapper.h"
+#include "memory_pool.h"
+#include "module_common_types.h"
+#include "scoped_ptr.h"
+#include "time_scheduler.h"
+
+namespace webrtc {
+class AudioProcessing;
+class CriticalSectionWrapper;
+
+// Cheshire cat implementation of MixerParticipant's non virtual functions.
+class MixHistory
+{
+public:
+    MixHistory();
+    ~MixHistory();
+
+    // MixerParticipant function
+    WebRtc_Word32 IsMixed(bool& mixed) const;
+
+    // Sets wasMixed to true if the participant was mixed previous mix
+    // iteration.
+    WebRtc_Word32 WasMixed(bool& wasMixed) const;
+
+    // Updates the mixed status.
+    WebRtc_Word32 SetIsMixed(const bool mixed);
+
+    void ResetMixedStatus();
+private:
+    Atomic32Wrapper _isMixed;  // 0 = false, 1 = true
+};
+
+class AudioConferenceMixerImpl : public AudioConferenceMixer
+{
+public:
+    // AudioProcessing only accepts 10 ms frames.
+    enum {kProcessPeriodicityInMs = 10};
+
+    AudioConferenceMixerImpl(int id);
+    ~AudioConferenceMixerImpl();
+
+    // Must be called after ctor.
+    bool Init();
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // AudioConferenceMixer functions
+    virtual WebRtc_Word32 RegisterMixedStreamCallback(
+        AudioMixerOutputReceiver& mixReceiver);
+    virtual WebRtc_Word32 UnRegisterMixedStreamCallback();
+    virtual WebRtc_Word32 RegisterMixerStatusCallback(
+        AudioMixerStatusReceiver& mixerStatusCallback,
+        const WebRtc_UWord32 amountOf10MsBetweenCallbacks);
+    virtual WebRtc_Word32 UnRegisterMixerStatusCallback();
+    virtual WebRtc_Word32 SetMixabilityStatus(MixerParticipant& participant,
+                                              const bool mixable);
+    virtual WebRtc_Word32 MixabilityStatus(MixerParticipant& participant,
+                                           bool& mixable);
+    virtual WebRtc_Word32 SetMinimumMixingFrequency(Frequency freq);
+    virtual WebRtc_Word32 SetAnonymousMixabilityStatus(
+        MixerParticipant& participant, const bool mixable);
+    virtual WebRtc_Word32 AnonymousMixabilityStatus(
+        MixerParticipant& participant, bool& mixable);
+private:
+    enum{DEFAULT_AUDIO_FRAME_POOLSIZE = 50};
+
+    // Set/get mix frequency
+    WebRtc_Word32 SetOutputFrequency(const Frequency frequency);
+    Frequency OutputFrequency() const;
+
+    // Must be called whenever an audio frame indicates the number of channels
+    // has changed.
+    bool SetNumLimiterChannels(int numChannels);
+
+    // Fills mixList with the AudioFrames pointers that should be used when
+    // mixing. Fills mixParticipantList with ParticipantStatistics for the
+    // participants who's AudioFrames are inside mixList.
+    // maxAudioFrameCounter both input and output specifies how many more
+    // AudioFrames that are allowed to be mixed.
+    // rampOutList contain AudioFrames corresponding to an audio stream that
+    // used to be mixed but shouldn't be mixed any longer. These AudioFrames
+    // should be ramped out over this AudioFrame to avoid audio discontinuities.
+    void UpdateToMix(ListWrapper& mixList, ListWrapper& rampOutList,
+                     MapWrapper& mixParticipantList,
+                     WebRtc_UWord32& maxAudioFrameCounter);
+
+    // Return the lowest mixing frequency that can be used without having to
+    // downsample any audio.
+    WebRtc_Word32 GetLowestMixingFrequency();
+    WebRtc_Word32 GetLowestMixingFrequencyFromList(ListWrapper& mixList);
+
+    // Return the AudioFrames that should be mixed anonymously.
+    void GetAdditionalAudio(ListWrapper& additionalFramesList);
+
+    // Update the MixHistory of all MixerParticipants. mixedParticipantsList
+    // should contain a map of MixerParticipants that have been mixed.
+    void UpdateMixedStatus(MapWrapper& mixedParticipantsList);
+
+    // Clears audioFrameList and reclaims all memory associated with it.
+    void ClearAudioFrameList(ListWrapper& audioFrameList);
+
+    // Update the list of MixerParticipants who have a positive VAD. mixList
+    // should be a list of AudioFrames
+    void UpdateVADPositiveParticipants(
+        ListWrapper& mixList);
+
+    // This function returns true if it finds the MixerParticipant in the
+    // specified list of MixerParticipants.
+    bool IsParticipantInList(
+        MixerParticipant& participant,
+        ListWrapper& participantList);
+
+    // Add/remove the MixerParticipant to the specified
+    // MixerParticipant list.
+    bool AddParticipantToList(
+        MixerParticipant& participant,
+        ListWrapper& participantList);
+    bool RemoveParticipantFromList(
+        MixerParticipant& removeParticipant,
+        ListWrapper& participantList);
+
+    // Mix the AudioFrames stored in audioFrameList into mixedAudio.
+    WebRtc_Word32 MixFromList(
+        AudioFrame& mixedAudio,
+        const ListWrapper& audioFrameList);
+    // Mix the AudioFrames stored in audioFrameList into mixedAudio. No
+    // record will be kept of this mix (e.g. the corresponding MixerParticipants
+    // will not be marked as IsMixed()
+    WebRtc_Word32 MixAnonomouslyFromList(AudioFrame& mixedAudio,
+                                         const ListWrapper& audioFrameList);
+
+    bool LimitMixedAudio(AudioFrame& mixedAudio);
+
+    // Scratch memory
+    // Note that the scratch memory may only be touched in the scope of
+    // Process().
+    WebRtc_UWord32         _scratchParticipantsToMixAmount;
+    ParticipantStatistics  _scratchMixedParticipants[
+        kMaximumAmountOfMixedParticipants];
+    WebRtc_UWord32         _scratchVadPositiveParticipantsAmount;
+    ParticipantStatistics  _scratchVadPositiveParticipants[
+        kMaximumAmountOfMixedParticipants];
+
+    scoped_ptr<CriticalSectionWrapper> _crit;
+    scoped_ptr<CriticalSectionWrapper> _cbCrit;
+
+    WebRtc_Word32 _id;
+
+    Frequency _minimumMixingFreq;
+
+    // Mix result callback
+    AudioMixerOutputReceiver* _mixReceiver;
+
+    AudioMixerStatusReceiver* _mixerStatusCallback;
+    WebRtc_UWord32            _amountOf10MsBetweenCallbacks;
+    WebRtc_UWord32            _amountOf10MsUntilNextCallback;
+    bool                      _mixerStatusCb;
+
+    // The current sample frequency and sample size when mixing.
+    Frequency _outputFrequency;
+    WebRtc_UWord16 _sampleSize;
+
+    // Memory pool to avoid allocating/deallocating AudioFrames
+    MemoryPool<AudioFrame>* _audioFramePool;
+
+    // List of all participants. Note all lists are disjunct
+    ListWrapper _participantList;              // May be mixed.
+    ListWrapper _additionalParticipantList;    // Always mixed, anonomously.
+
+    WebRtc_UWord32 _numMixedParticipants;
+
+    WebRtc_UWord32 _timeStamp;
+
+    // Metronome class.
+    TimeScheduler _timeScheduler;
+
+    // Smooth level indicator.
+    LevelIndicator _mixedAudioLevel;
+
+    // Counter keeping track of concurrent calls to process.
+    // Note: should never be higher than 1 or lower than 0.
+    WebRtc_Word16 _processCalls;
+
+    // Used for inhibiting saturation in mixing.
+    scoped_ptr<AudioProcessing> _limiter;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc
new file mode 100644
index 0000000..f895fbd
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "audio_conference_mixer.h"
+#include "gtest/gtest.h"
+
+TEST(AudioConferenceMixerTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc b/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
new file mode 100644
index 0000000..8716454
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_frame_manipulator.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace {
+// Linear ramping over 80 samples.
+// TODO(hellner): ramp using fix point?
+const float rampArray[] = {0.0000f, 0.0127f, 0.0253f, 0.0380f,
+                           0.0506f, 0.0633f, 0.0759f, 0.0886f,
+                           0.1013f, 0.1139f, 0.1266f, 0.1392f,
+                           0.1519f, 0.1646f, 0.1772f, 0.1899f,
+                           0.2025f, 0.2152f, 0.2278f, 0.2405f,
+                           0.2532f, 0.2658f, 0.2785f, 0.2911f,
+                           0.3038f, 0.3165f, 0.3291f, 0.3418f,
+                           0.3544f, 0.3671f, 0.3797f, 0.3924f,
+                           0.4051f, 0.4177f, 0.4304f, 0.4430f,
+                           0.4557f, 0.4684f, 0.4810f, 0.4937f,
+                           0.5063f, 0.5190f, 0.5316f, 0.5443f,
+                           0.5570f, 0.5696f, 0.5823f, 0.5949f,
+                           0.6076f, 0.6203f, 0.6329f, 0.6456f,
+                           0.6582f, 0.6709f, 0.6835f, 0.6962f,
+                           0.7089f, 0.7215f, 0.7342f, 0.7468f,
+                           0.7595f, 0.7722f, 0.7848f, 0.7975f,
+                           0.8101f, 0.8228f, 0.8354f, 0.8481f,
+                           0.8608f, 0.8734f, 0.8861f, 0.8987f,
+                           0.9114f, 0.9241f, 0.9367f, 0.9494f,
+                           0.9620f, 0.9747f, 0.9873f, 1.0000f};
+const int rampSize = sizeof(rampArray)/sizeof(rampArray[0]);
+} // namespace
+
+namespace webrtc {
+void CalculateEnergy(AudioFrame& audioFrame)
+{
+    if(audioFrame._energy != 0xffffffff)
+    {
+        return;
+    }
+    audioFrame._energy = 0;
+    for(int position = 0; position < audioFrame._payloadDataLengthInSamples;
+        position++)
+    {
+        // TODO(andrew): this can easily overflow.
+        audioFrame._energy += audioFrame._payloadData[position] *
+                              audioFrame._payloadData[position];
+    }
+}
+
+void RampIn(AudioFrame& audioFrame)
+{
+    assert(rampSize <= audioFrame._payloadDataLengthInSamples);
+    for(int i = 0; i < rampSize; i++)
+    {
+        audioFrame._payloadData[i] = static_cast<WebRtc_Word16>
+            (rampArray[i] * audioFrame._payloadData[i]);
+    }
+}
+
+void RampOut(AudioFrame& audioFrame)
+{
+    assert(rampSize <= audioFrame._payloadDataLengthInSamples);
+    for(int i = 0; i < rampSize; i++)
+    {
+        const int rampPos = rampSize - 1 - i;
+        audioFrame._payloadData[i] = static_cast<WebRtc_Word16>
+            (rampArray[rampPos] * audioFrame._payloadData[i]);
+    }
+    memset(&audioFrame._payloadData[rampSize], 0,
+           (audioFrame._payloadDataLengthInSamples - rampSize) *
+           sizeof(audioFrame._payloadData[0]));
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h b/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h
new file mode 100644
index 0000000..fdf5d33
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
+
+namespace webrtc {
+class AudioFrame;
+
+// Updates the audioFrame's energy (based on its samples).
+void CalculateEnergy(AudioFrame& audioFrame);
+
+// Apply linear step function that ramps in/out the audio samples in audioFrame
+void RampIn(AudioFrame& audioFrame);
+void RampOut(AudioFrame& audioFrame);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/level_indicator.cc b/trunk/src/modules/audio_conference_mixer/source/level_indicator.cc
new file mode 100644
index 0000000..799a47d
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/level_indicator.cc
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "level_indicator.h"
+
+namespace webrtc {
+// Array for adding smothing to level changes (ad-hoc).
+const WebRtc_UWord32 perm[] =
+    {0,1,2,3,4,4,5,5,5,5,6,6,6,6,6,7,7,7,7,8,8,8,9,9,9,9,9,9,9,9,9,9,9};
+
+LevelIndicator::LevelIndicator()
+    : _max(0),
+      _count(0),
+      _currentLevel(0)
+{
+}
+
+LevelIndicator::~LevelIndicator()
+{
+}
+
+// Level is based on the highest absolute value for all samples.
+void LevelIndicator::ComputeLevel(const WebRtc_Word16* speech,
+                                  const WebRtc_UWord16 nrOfSamples)
+{
+    WebRtc_Word32 min = 0;
+    for(WebRtc_UWord32 i = 0; i < nrOfSamples; i++)
+    {
+        if(_max < speech[i])
+        {
+            _max = speech[i];
+        }
+        if(min > speech[i])
+        {
+            min = speech[i];
+        }
+    }
+
+    // Absolute max value.
+    if(-min > _max)
+    {
+        _max = -min;
+    }
+
+    if(_count == TICKS_BEFORE_CALCULATION)
+    {
+        // Highest sample value maps directly to a level.
+        WebRtc_Word32 position = _max / 1000;
+        if ((position == 0) &&
+            (_max > 250))
+        {
+            position = 1;
+        }
+        _currentLevel = perm[position];
+        // The max value is decayed and stored so that it can be reused to slow
+        // down decreases in level.
+        _max = _max >> 1;
+        _count = 0;
+    } else {
+        _count++;
+    }
+}
+
+WebRtc_Word32 LevelIndicator::GetLevel()
+{
+    return _currentLevel;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_conference_mixer/source/level_indicator.h b/trunk/src/modules/audio_conference_mixer/source/level_indicator.h
new file mode 100644
index 0000000..bdcdf8e
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/level_indicator.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class LevelIndicator
+{
+public:
+    enum{TICKS_BEFORE_CALCULATION = 10};
+
+    LevelIndicator();
+    ~LevelIndicator();
+
+    // Updates the level.
+    void ComputeLevel(const WebRtc_Word16* speech,
+                      const WebRtc_UWord16 nrOfSamples);
+
+    WebRtc_Word32 GetLevel();
+private:
+    WebRtc_Word32  _max;
+    WebRtc_UWord32 _count;
+    WebRtc_UWord32 _currentLevel;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/memory_pool.h b/trunk/src/modules/audio_conference_mixer/source/memory_pool.h
new file mode 100644
index 0000000..caf5d93
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/memory_pool.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
+
+#include <assert.h>
+
+#include "typedefs.h"
+
+#if _WIN32
+#include "memory_pool_win.h"
+#else
+#include "memory_pool_posix.h"
+#endif
+
+namespace webrtc {
+
+template<class MemoryType>
+class MemoryPool
+{
+public:
+    // Factory method, constructor disabled.
+    static WebRtc_Word32 CreateMemoryPool(MemoryPool*& memoryPool,
+                                          WebRtc_UWord32 initialPoolSize);
+
+    // Try to delete the memory pool. Fail with return value -1 if there is
+    // outstanding memory.
+    static WebRtc_Word32 DeleteMemoryPool(
+        MemoryPool*& memoryPool);
+
+    // Get/return unused memory.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+private:
+    MemoryPool(WebRtc_Word32 initialPoolSize);
+    ~MemoryPool();
+
+    MemoryPoolImpl<MemoryType>* _ptrImpl;
+};
+
+template<class MemoryType>
+MemoryPool<MemoryType>::MemoryPool(WebRtc_Word32 initialPoolSize)
+{
+    _ptrImpl = new MemoryPoolImpl<MemoryType>(initialPoolSize);
+}
+
+template<class MemoryType>
+MemoryPool<MemoryType>::~MemoryPool()
+{
+    delete _ptrImpl;
+}
+
+template<class MemoryType> WebRtc_Word32
+MemoryPool<MemoryType>::CreateMemoryPool(MemoryPool*&   memoryPool,
+                                         WebRtc_UWord32 initialPoolSize)
+{
+    memoryPool = new MemoryPool(initialPoolSize);
+    if(memoryPool == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl == NULL)
+    {
+        delete memoryPool;
+        memoryPool = NULL;
+        return -1;
+    }
+    if(!memoryPool->_ptrImpl->Initialize())
+    {
+        delete memoryPool;
+        memoryPool = NULL;
+        return -1;
+    }
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::DeleteMemoryPool(MemoryPool*& memoryPool)
+{
+    if(memoryPool == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl->Terminate() == -1)
+    {
+        return -1;
+    }
+    delete memoryPool;
+    memoryPool = NULL;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    return _ptrImpl->PopMemory(memory);
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+    return _ptrImpl->PushMemory(memory);
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/memory_pool_posix.h b/trunk/src/modules/audio_conference_mixer/source/memory_pool_posix.h
new file mode 100644
index 0000000..45f800b
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/memory_pool_posix.h
@@ -0,0 +1,168 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
+
+#include <assert.h>
+
+#include "critical_section_wrapper.h"
+#include "list_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+template<class MemoryType>
+class MemoryPoolImpl
+{
+public:
+    // MemoryPool functions.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+
+    MemoryPoolImpl(WebRtc_Word32 initialPoolSize);
+    ~MemoryPoolImpl();
+
+    // Atomic functions
+    WebRtc_Word32 Terminate();
+    bool Initialize();
+private:
+    // Non-atomic function.
+    WebRtc_Word32 CreateMemory(WebRtc_UWord32 amountToCreate);
+
+    CriticalSectionWrapper* _crit;
+
+    bool _terminate;
+
+    ListWrapper _memoryPool;
+
+    WebRtc_UWord32 _initialPoolSize;
+    WebRtc_UWord32 _createdMemory;
+    WebRtc_UWord32 _outstandingMemory;
+};
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::MemoryPoolImpl(WebRtc_Word32 initialPoolSize)
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _terminate(false),
+      _memoryPool(),
+      _initialPoolSize(initialPoolSize),
+      _createdMemory(0),
+      _outstandingMemory(0)
+{
+}
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::~MemoryPoolImpl()
+{
+    // Trigger assert if there is outstanding memory.
+    assert(_createdMemory == 0);
+    assert(_outstandingMemory == 0);
+    delete _crit;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    CriticalSectionScoped cs(_crit);
+    if(_terminate)
+    {
+        memory = NULL;
+        return -1;
+    }
+    ListItem* item = _memoryPool.First();
+    if(item == NULL)
+    {
+        // _memoryPool empty create new memory.
+        CreateMemory(_initialPoolSize);
+        item = _memoryPool.First();
+        if(item == NULL)
+        {
+            memory = NULL;
+            return -1;
+        }
+    }
+    memory = static_cast<MemoryType*>(item->GetItem());
+    _memoryPool.Erase(item);
+    _outstandingMemory++;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_crit);
+    _outstandingMemory--;
+    if(_memoryPool.GetSize() > (_initialPoolSize << 1))
+    {
+        // Reclaim memory if less than half of the pool is unused.
+        _createdMemory--;
+        delete memory;
+        memory = NULL;
+        return 0;
+    }
+    _memoryPool.PushBack(static_cast<void*>(memory));
+    memory = NULL;
+    return 0;
+}
+
+template<class MemoryType>
+bool MemoryPoolImpl<MemoryType>::Initialize()
+{
+    CriticalSectionScoped cs(_crit);
+    return CreateMemory(_initialPoolSize) == 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::Terminate()
+{
+    CriticalSectionScoped cs(_crit);
+    assert(_createdMemory == _outstandingMemory + _memoryPool.GetSize());
+
+    _terminate = true;
+    // Reclaim all memory.
+    while(_createdMemory > 0)
+    {
+        ListItem* item = _memoryPool.First();
+        if(item == NULL)
+        {
+            // There is memory that hasn't been returned yet.
+            return -1;
+        }
+        MemoryType* memory = static_cast<MemoryType*>(item->GetItem());
+        delete memory;
+        _memoryPool.Erase(item);
+        _createdMemory--;
+    }
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::CreateMemory(
+    WebRtc_UWord32 amountToCreate)
+{
+    for(WebRtc_UWord32 i = 0; i < amountToCreate; i++)
+    {
+        MemoryType* memory = new MemoryType();
+        if(memory == NULL)
+        {
+            return -1;
+        }
+        _memoryPool.PushBack(static_cast<void*>(memory));
+        _createdMemory++;
+    }
+    return 0;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/memory_pool_win.h b/trunk/src/modules/audio_conference_mixer/source/memory_pool_win.h
new file mode 100644
index 0000000..1275ca1
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/memory_pool_win.h
@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
+
+#include <assert.h>
+#include <windows.h>
+
+#include "aligned_malloc.h"
+#include "atomic32_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+template<class MemoryType> struct MemoryPoolItem;
+
+template<class MemoryType>
+struct MemoryPoolItemPayload
+{
+    MemoryPoolItemPayload()
+        : memoryType(),
+          base(NULL)
+    {
+    }
+    MemoryType                  memoryType;
+    MemoryPoolItem<MemoryType>* base;
+};
+
+template<class MemoryType>
+struct MemoryPoolItem
+{
+    // Atomic single linked list entry header.
+    SLIST_ENTRY itemEntry;
+    // Atomic single linked list payload.
+    MemoryPoolItemPayload<MemoryType>* payload;
+};
+
+template<class MemoryType>
+class MemoryPoolImpl
+{
+public:
+    // MemoryPool functions.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+
+    MemoryPoolImpl(WebRtc_Word32 /*initialPoolSize*/);
+    ~MemoryPoolImpl();
+
+    // Atomic functions.
+    WebRtc_Word32 Terminate();
+    bool Initialize();
+private:
+    // Non-atomic function.
+    MemoryPoolItem<MemoryType>* CreateMemory();
+
+    // Windows implementation of single linked atomic list, documented here:
+    // http://msdn.microsoft.com/en-us/library/ms686962(VS.85).aspx
+
+    // Atomic single linked list head.
+    PSLIST_HEADER _pListHead;
+
+    Atomic32Wrapper _createdMemory;
+    Atomic32Wrapper _outstandingMemory;
+};
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::MemoryPoolImpl(
+    WebRtc_Word32 /*initialPoolSize*/)
+    : _pListHead(NULL),
+      _createdMemory(0),
+      _outstandingMemory(0)
+{
+}
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::~MemoryPoolImpl()
+{
+    Terminate();
+    if(_pListHead != NULL)
+    {
+        AlignedFree(reinterpret_cast<void*>(_pListHead));
+        _pListHead = NULL;
+    }
+    // Trigger assert if there is outstanding memory.
+    assert(_createdMemory.Value() == 0);
+    assert(_outstandingMemory.Value() == 0);
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    if(pListEntry == NULL)
+    {
+        MemoryPoolItem<MemoryType>* item = CreateMemory();
+        if(item == NULL)
+        {
+            return -1;
+        }
+        pListEntry = &(item->itemEntry);
+    }
+    ++_outstandingMemory;
+    memory = &((MemoryPoolItem<MemoryType>*)pListEntry)->payload->memoryType;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+
+    MemoryPoolItem<MemoryType>* item =
+        ((MemoryPoolItemPayload<MemoryType>*)memory)->base;
+
+    const WebRtc_Word32 usedItems  = --_outstandingMemory;
+    const WebRtc_Word32 totalItems = _createdMemory.Value();
+    const WebRtc_Word32 freeItems  = totalItems - usedItems;
+    if(freeItems < 0)
+    {
+        assert(false);
+        delete item->payload;
+        AlignedFree(item);
+        return -1;
+    }
+    if(freeItems >= totalItems>>1)
+    {
+        delete item->payload;
+        AlignedFree(item);
+        --_createdMemory;
+        return 0;
+    }
+    InterlockedPushEntrySList(_pListHead,&(item->itemEntry));
+    return 0;
+}
+
+template<class MemoryType>
+bool MemoryPoolImpl<MemoryType>::Initialize()
+{
+    _pListHead = (PSLIST_HEADER)AlignedMalloc(sizeof(SLIST_HEADER),
+                                              MEMORY_ALLOCATION_ALIGNMENT);
+    if(_pListHead == NULL)
+    {
+        return false;
+    }
+    InitializeSListHead(_pListHead);
+    return true;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::Terminate()
+{
+    WebRtc_Word32 itemsFreed = 0;
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    while(pListEntry != NULL)
+    {
+        MemoryPoolItem<MemoryType>* item = ((MemoryPoolItem<MemoryType>*)pListEntry);
+        delete item->payload;
+        AlignedFree(item);
+        --_createdMemory;
+        itemsFreed++;
+        pListEntry = InterlockedPopEntrySList(_pListHead);
+    }
+    return itemsFreed;
+}
+
+template<class MemoryType>
+MemoryPoolItem<MemoryType>* MemoryPoolImpl<MemoryType>::CreateMemory()
+{
+    MemoryPoolItem<MemoryType>* returnValue = (MemoryPoolItem<MemoryType>*)
+        AlignedMalloc(sizeof(MemoryPoolItem<MemoryType>),
+                      MEMORY_ALLOCATION_ALIGNMENT);
+    if(returnValue == NULL)
+    {
+        return NULL;
+    }
+
+    returnValue->payload = new MemoryPoolItemPayload<MemoryType>();
+    if(returnValue->payload == NULL)
+    {
+        delete returnValue;
+        return NULL;
+    }
+    returnValue->payload->base = returnValue;
+    ++_createdMemory;
+    return returnValue;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
diff --git a/trunk/src/modules/audio_conference_mixer/source/time_scheduler.cc b/trunk/src/modules/audio_conference_mixer/source/time_scheduler.cc
new file mode 100644
index 0000000..183005e
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/time_scheduler.cc
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_wrapper.h"
+#include "time_scheduler.h"
+
+namespace webrtc {
+TimeScheduler::TimeScheduler(const WebRtc_UWord32 periodicityInMs)
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _isStarted(false),
+      _lastPeriodMark(),
+      _periodicityInMs(periodicityInMs),
+      _periodicityInTicks(TickTime::MillisecondsToTicks(periodicityInMs)),
+      _missedPeriods(0)
+ {
+ }
+
+TimeScheduler::~TimeScheduler()
+{
+    delete _crit;
+}
+
+WebRtc_Word32 TimeScheduler::UpdateScheduler()
+{
+    CriticalSectionScoped cs(_crit);
+    if(!_isStarted)
+    {
+        _isStarted = true;
+        _lastPeriodMark = TickTime::Now();
+        return 0;
+    }
+    // Don't perform any calculations until the debt of pending periods have
+    // been worked off.
+    if(_missedPeriods > 0)
+    {
+        _missedPeriods--;
+        return 0;
+    }
+
+    // Calculate the time that has past since previous call to this function.
+    TickTime tickNow = TickTime::Now();
+    TickInterval amassedTicks = tickNow - _lastPeriodMark;
+    WebRtc_Word64 amassedMs = amassedTicks.Milliseconds();
+
+    // Calculate the number of periods the time that has passed correspond to.
+    WebRtc_Word32 periodsToClaim = (WebRtc_Word32)amassedMs /
+        ((WebRtc_Word32)_periodicityInMs);
+
+    // One period will be worked off by this call. Make sure that the number of
+    // pending periods don't end up being negative (e.g. if this function is
+    // called to often).
+    if(periodsToClaim < 1)
+    {
+        periodsToClaim = 1;
+    }
+
+    // Update the last period mark without introducing any drifting.
+    // Note that if this fuunction is called to often _lastPeriodMark can
+    // refer to a time in the future which in turn will yield TimeToNextUpdate
+    // that is greater than the periodicity
+    for(WebRtc_Word32 i = 0; i < periodsToClaim; i++)
+    {
+        _lastPeriodMark += _periodicityInTicks;
+    }
+
+    // Update the total amount of missed periods note that we have processed
+    // one period hence the - 1
+    _missedPeriods += periodsToClaim - 1;
+    return 0;
+}
+
+WebRtc_Word32 TimeScheduler::TimeToNextUpdate(
+    WebRtc_Word32& updateTimeInMS) const
+{
+    CriticalSectionScoped cs(_crit);
+    // Missed periods means that the next UpdateScheduler() should happen
+    // immediately.
+    if(_missedPeriods > 0)
+    {
+        updateTimeInMS = 0;
+        return 0;
+    }
+
+    // Calculate the time (in ms) that has past since last call to
+    // UpdateScheduler()
+    TickTime tickNow = TickTime::Now();
+    TickInterval ticksSinceLastUpdate = tickNow - _lastPeriodMark;
+    const WebRtc_Word32 millisecondsSinceLastUpdate =
+        (WebRtc_Word32) ticksSinceLastUpdate.Milliseconds();
+
+    updateTimeInMS = _periodicityInMs - millisecondsSinceLastUpdate;
+    updateTimeInMS =  (updateTimeInMS < 0) ? 0 : updateTimeInMS;
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_conference_mixer/source/time_scheduler.h b/trunk/src/modules/audio_conference_mixer/source/time_scheduler.h
new file mode 100644
index 0000000..e2674d9
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/source/time_scheduler.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// The TimeScheduler class keeps track of periodic events. It is non-drifting
+// and keeps track of any missed periods so that it is possible to catch up.
+// (compare to a metronome)
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
+
+#include "tick_util.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class TimeScheduler
+{
+public:
+    TimeScheduler(const WebRtc_UWord32 periodicityInMs);
+    ~TimeScheduler();
+
+    // Signal that a periodic event has been triggered.
+    WebRtc_Word32 UpdateScheduler();
+
+    // Set updateTimeInMs to the amount of time until UpdateScheduler() should
+    // be called. This time will never be negative.
+    WebRtc_Word32 TimeToNextUpdate(WebRtc_Word32& updateTimeInMS) const;
+
+private:
+    CriticalSectionWrapper* _crit;
+
+    bool _isStarted;
+    TickTime _lastPeriodMark;
+
+    WebRtc_UWord32 _periodicityInMs;
+    WebRtc_Word64  _periodicityInTicks;
+    WebRtc_UWord32 _missedPeriods;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
diff --git a/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc b/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc
new file mode 100644
index 0000000..e34a3cb
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc
@@ -0,0 +1,1098 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <iostream>
+#include <time.h>
+
+#include "functionTest.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "webrtc_vad.h"
+
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_MAC))
+   #include <sys/stat.h>
+   #define MY_PERMISSION_MASK S_IRWXU | S_IRWXG | S_IRWXO
+   #define MKDIR(directory) mkdir(directory,MY_PERMISSION_MASK)
+#else // defined(WINDOWS)
+   #include <direct.h>
+   #define MKDIR(directory) mkdir(directory)
+#endif
+
+int main(int /*argc*/, char* /*argv[]*/)
+{
+    // Initialize random number generator
+    //unsigned int seed = 1220716312; // just a seed that can be used
+    unsigned int seed = (unsigned)time( NULL );
+    srand(seed);
+    std::cout << "Starting function test. Seed = " << seed << std::endl;
+    std::cout << "Press enter to continue" << std::endl;
+    getchar();
+    MixerWrapper* testInstance1 = MixerWrapper::CreateMixerWrapper();
+    MixerWrapper* testInstance2 = MixerWrapper::CreateMixerWrapper();
+    if((testInstance1 == NULL) ||
+       (testInstance2 == NULL))
+    {
+        assert(false);
+        return 0;
+    }
+
+    char versionString[256] = "";
+    WebRtc_UWord32 remainingBufferInBytes = 256;
+    WebRtc_UWord32 position = 0;
+    AudioConferenceMixer::GetVersion(versionString,remainingBufferInBytes,position);
+
+    int read = 1;
+    while(read != 0)
+    {
+        std::cout << versionString << std::endl;
+        std::cout << "--------Menu-----------" << std::endl;
+        std::cout << std::endl;
+        std::cout << "0. Quit" << std::endl;
+        std::cout << "2. StartMixing" << std::endl;
+        std::cout << "3. StopMixing" << std::endl;
+        std::cout << "4. Create participant(s)" << std::endl;
+        std::cout << "5. Delete participant(s)" << std::endl;
+        std::cout << "6. List participants " << std::endl;
+        std::cout << "7. Print mix status " << std::endl;
+        std::cout << "8. Run identical scenario:" << std::endl;
+        std::cout << "   a. 1 VIP,       3 regular, amount of mixed = 3"  << std::endl;
+        std::cout << "   b. 1 anonymous, 3 regular, amount of mixed = 2"  << std::endl;
+        scanf("%i",&read);
+        getchar();
+        MixerParticipant::ParticipantType participantType;
+        int option = 0;
+        WebRtc_UWord32 id = 0;
+        ListItem* item = NULL;
+        ListWrapper participants;
+        if(read == 0)
+        {
+            // donothing
+        }
+        else if(read == 1)
+        {
+        }
+        else if(read == 2)
+        {
+            testInstance1->StartMixing();
+        }
+        else if(read == 3)
+        {
+            testInstance1->StopMixing();
+        }
+        else if(read == 4)
+        {
+            while(true)
+            {
+                std::cout << "VIP(music)       = " << MixerParticipant::VIP << std::endl;
+                std::cout << "Regular(speech)  = " << MixerParticipant::REGULAR << std::endl;
+                std::cout << "Anonymous(music) = " << MixerParticipant::MIXED_ANONYMOUS << std::endl;
+                std::cout << "Select type of participant: ";
+                scanf("%i",&option);
+                if(option == MixerParticipant::VIP ||
+                   option == MixerParticipant::REGULAR ||
+                   option == MixerParticipant::MIXED_ANONYMOUS)
+                {
+                    break;
+                }
+            }
+            participantType = (MixerParticipant::ParticipantType)option;
+            testInstance1->CreateParticipant(participantType);
+        }
+        else if(read == 5)
+        {
+            std::cout << "Select participant to delete: ";
+            scanf("%i",&option);
+            id = option;
+            testInstance1->DeleteParticipant(id);
+            break;
+        }
+        else if(read == 6)
+        {
+            testInstance1->GetParticipantList(participants);
+            item = participants.First();
+            std::cout << "The following participants have been created: " << std::endl;
+            while(item)
+            {
+                WebRtc_UWord32 id = item->GetUnsignedItem();
+                std::cout << id;
+                item = participants.Next(item);
+                if(item != NULL)
+                {
+                    std::cout << ", ";
+                }
+                else
+                {
+                    std::cout << std::endl;
+                }
+            }
+        }
+        else if(read == 7)
+        {
+            std::cout << "-------------Mixer Status-------------" << std::endl;
+            testInstance1->PrintStatus();
+            testInstance2->PrintStatus();
+            std::cout << "Press enter to continue";
+            getchar();
+            std::cout << std::endl;
+            std::cout << std::endl;
+        }
+        else if(read == 8)
+        {
+            const WebRtc_Word32 amountOfParticipants = 4;
+            MixerParticipant::ParticipantType instance1Participants[] =
+                                                {MixerParticipant::VIP,
+                                                 MixerParticipant::REGULAR,
+                                                 MixerParticipant::REGULAR,
+                                                 MixerParticipant::REGULAR};
+            MixerParticipant::ParticipantType instance2Participants[] =
+                                               {MixerParticipant::MIXED_ANONYMOUS,
+                                                MixerParticipant::REGULAR,
+                                                MixerParticipant::REGULAR,
+                                                MixerParticipant::REGULAR};
+            for(WebRtc_Word32 i = 0; i < amountOfParticipants; i++)
+            {
+                WebRtc_Word32 startPosition = 0;
+                GenerateRandomPosition(startPosition);
+                testInstance1->CreateParticipant(instance1Participants[i],startPosition);
+                testInstance2->CreateParticipant(instance2Participants[i],startPosition);
+            }
+            bool success = true;
+            success = testInstance1->StartMixing();
+            assert(success);
+            success = testInstance2->StartMixing(2);
+            assert(success);
+        }
+    }
+
+    std::cout << "Press enter to stop" << std::endl;
+    getchar();
+    delete testInstance1;
+    delete testInstance2;
+    return 0;
+}
+
+FileWriter::FileWriter()
+    :
+    _file(NULL)
+{
+}
+
+FileWriter::~FileWriter()
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+}
+
+bool
+FileWriter::SetFileName(
+    const char* fileName)
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    _file = fopen(fileName,"wb");
+    return _file != NULL;
+}
+
+bool
+FileWriter::WriteToFile(
+    const AudioFrame& audioFrame)
+{
+    WebRtc_Word32 written = (WebRtc_Word32)fwrite(audioFrame._payloadData,sizeof(WebRtc_Word16),audioFrame._payloadDataLengthInSamples,_file);
+    // Do not flush buffers since that will add (a lot of) delay
+    return written == audioFrame._payloadDataLengthInSamples;
+}
+
+FileReader::FileReader()
+    :
+    _frequency(kDefaultFrequency),
+    _sampleSize((_frequency*kProcessPeriodicityInMs)/1000),
+    _timeStamp(0),
+    _file(NULL),
+    _vadInstr(NULL),
+    _automaticVad(false),
+    _vad(false)
+{
+    if(WebRtcVad_Create(&_vadInstr) == 0)
+    {
+        if(WebRtcVad_Init(_vadInstr) != 0)
+        {
+            assert(false);
+            WebRtcVad_Free(_vadInstr);
+            _vadInstr = NULL;
+        }
+    }
+    else
+    {
+        assert(false);
+    }
+}
+
+FileReader::~FileReader()
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    if(_vadInstr)
+    {
+        WebRtcVad_Free(_vadInstr);
+    }
+}
+
+bool
+FileReader::SetFileName(
+    const char* fileName)
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    _file = fopen(fileName,"rb");
+    return _file != NULL;
+}
+
+bool
+FileReader::ReadFromFile(
+    AudioFrame& audioFrame)
+{
+
+    WebRtc_Word16 buffer[AudioFrame::kMaxAudioFrameSizeSamples];
+    LoopedFileRead(buffer,AudioFrame::kMaxAudioFrameSizeSamples,_sampleSize,_file);
+
+    bool vad = false;
+    GetVAD(buffer,_sampleSize,vad);
+    AudioFrame::VADActivity activity = vad ? AudioFrame::kVadActive :
+                                 AudioFrame::kVadPassive;
+
+    _volumeCalculator.ComputeLevel(buffer,_sampleSize);
+    const WebRtc_Word32 level = _volumeCalculator.GetLevel();
+    return audioFrame.UpdateFrame(  -1,
+                                    _timeStamp,
+                                    buffer,
+                                    _sampleSize,
+                                    _frequency,
+                                    AudioFrame::kNormalSpeech,
+                                    activity,
+                                    0,
+                                    level) == 0;
+
+}
+
+bool
+FileReader::FastForwardFile(
+    const WebRtc_Word32 samples)
+{
+    WebRtc_Word16* tempBuffer = new WebRtc_Word16[samples];
+    bool success = LoopedFileRead(tempBuffer,samples,samples,_file);
+    delete[] tempBuffer;
+    return success;
+}
+
+bool
+FileReader::EnableAutomaticVAD(
+    bool enable,
+    int mode)
+{
+    if(!_automaticVad &&
+       enable)
+    {
+        if(WebRtcVad_Init(_vadInstr) == -1)
+        {
+            return false;
+        }
+    }
+    WebRtcVad_set_mode(_vadInstr,mode);
+    _automaticVad = enable;
+    return true;
+}
+
+bool
+FileReader::SetVAD(
+    bool vad)
+{
+    if(_automaticVad)
+    {
+        return false;
+    }
+    _vad = vad;
+    return true;
+}
+
+bool
+FileReader::GetVAD(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord8 bufferLengthInSamples,
+    bool& vad)
+{
+    if(_automaticVad)
+    {
+        WebRtc_Word16 result = WebRtcVad_Process(_vadInstr,_frequency,buffer,bufferLengthInSamples);
+        if(result == -1)
+        {
+            assert(false);
+            return false;
+        }
+        _vad = vad = (result == 1);
+    }
+    vad = _vad;
+    return true;
+}
+
+MixerParticipant*
+MixerParticipant::CreateParticipant(
+    const WebRtc_UWord32 id,
+    ParticipantType participantType,
+    const WebRtc_Word32 startPosition,
+    char* outputPath)
+{
+    if(participantType == RANDOM)
+    {
+        participantType = (ParticipantType)(rand() % 3);
+    }
+    MixerParticipant* participant = new MixerParticipant(id,participantType);
+    // Randomize the start position so we only need one input file
+    // assume file is smaller than 1 minute wideband = 60 * 16000
+    // Always start at a multiple of 10ms wideband
+    if(!participant->InitializeFileReader(startPosition) ||
+       !participant->InitializeFileWriter(outputPath))
+    {
+        delete participant;
+        return NULL;
+    }
+    return participant;
+}
+
+MixerParticipant::MixerParticipant(
+    const WebRtc_UWord32 id,
+    ParticipantType participantType)
+    :
+    _id(id),
+    _participantType(participantType),
+    _fileReader(),
+    _fileWriter()
+{
+}
+
+MixerParticipant::~MixerParticipant()
+{
+}
+
+WebRtc_Word32
+MixerParticipant::GetAudioFrame(
+    const WebRtc_Word32 /*id*/,
+    AudioFrame& audioFrame)
+{
+    if(!_fileReader.ReadFromFile(audioFrame))
+    {
+        return -1;
+    }
+    audioFrame._id = _id;
+    return 0;
+}
+
+WebRtc_Word32
+MixerParticipant::MixedAudioFrame(
+    const AudioFrame& audioFrame)
+{
+    return _fileWriter.WriteToFile(audioFrame);
+}
+
+WebRtc_Word32
+MixerParticipant::GetParticipantType(
+    ParticipantType& participantType)
+{
+    participantType = _participantType;
+    return 0;
+}
+
+bool
+MixerParticipant::InitializeFileReader(
+    const WebRtc_Word32 startPositionInSamples)
+{
+    char fileName[128] = "";
+    if(_participantType == REGULAR)
+    {
+        sprintf(fileName,"convFile.pcm");
+    }
+    else
+    {
+        sprintf(fileName,"musicFile.pcm");
+    }
+    if(!_fileReader.SetFileName(fileName))
+    {
+        return false;
+    }
+    if(!_fileReader.EnableAutomaticVAD(true,2))
+    {
+        assert(false);
+    }
+    return _fileReader.FastForwardFile(startPositionInSamples);
+}
+
+bool
+MixerParticipant::InitializeFileWriter(
+    char* outputPath)
+{
+    const WebRtc_Word32 stringsize = 128;
+    char fileName[stringsize] = "";
+    strncpy(fileName,outputPath,stringsize);
+    fileName[stringsize-1] = '\0';
+
+    char tempName[stringsize];
+    tempName[0] = '\0';
+    sprintf(tempName,"outputFile%d.pcm",(int)_id);
+    strncat(fileName,tempName,(stringsize - strlen(fileName)));
+    fileName[stringsize-1] = '\0';
+
+    return _fileWriter.SetFileName(fileName);
+}
+
+StatusReceiver::StatusReceiver(
+    const WebRtc_Word32 id)
+    :
+    _id(id),
+    _mixedParticipants(NULL),
+    _mixedParticipantsAmount(0),
+    _mixedParticipantsSize(0),
+    _vadPositiveParticipants(NULL),
+    _vadPositiveParticipantsAmount(0),
+    _vadPositiveParticipantsSize(0),
+    _mixedAudioLevel(0)
+{
+}
+
+StatusReceiver::~StatusReceiver()
+{
+    delete[] _mixedParticipants;
+    delete[] _vadPositiveParticipants;
+}
+
+void
+StatusReceiver::MixedParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+    if(_mixedParticipantsSize < size)
+    {
+        delete[] _mixedParticipants;
+        _mixedParticipantsSize = size;
+        _mixedParticipants = new ParticipantStatistics[size];
+    }
+    _mixedParticipantsAmount = size;
+    memcpy(_mixedParticipants,participantStatistics,sizeof(ParticipantStatistics)*size);
+}
+
+void
+StatusReceiver::VADPositiveParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+
+    if(_vadPositiveParticipantsSize < size)
+    {
+        delete[] _vadPositiveParticipants;
+        _vadPositiveParticipantsSize = size;
+        _vadPositiveParticipants = new ParticipantStatistics[size];
+    }
+    _vadPositiveParticipantsAmount = size;
+    memcpy(_vadPositiveParticipants,participantStatistics,sizeof(ParticipantStatistics)*size);
+}
+
+void
+StatusReceiver::MixedAudioLevel(
+    const WebRtc_Word32  id,
+    const WebRtc_UWord32 level)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+    _mixedAudioLevel = level;
+}
+
+void
+StatusReceiver::PrintMixedParticipants()
+{
+    std::cout << "Mixed participants" << std::endl;
+    if(_mixedParticipantsAmount == 0)
+    {
+        std::cout << "N/A" << std::endl;
+    }
+    for(WebRtc_UWord16 i = 0; i < _mixedParticipantsAmount; i++)
+    {
+        std::cout << i + 1 << ". Participant " << _mixedParticipants[i].participant << ": level = " << _mixedParticipants[i].level << std::endl;
+    }
+}
+
+void
+StatusReceiver::PrintVadPositiveParticipants()
+{
+    std::cout << "VAD positive participants" << std::endl;
+    if(_mixedParticipantsAmount == 0)
+    {
+        std::cout << "N/A"  << std::endl;
+    }
+    for(WebRtc_UWord16 i = 0; i < _mixedParticipantsAmount; i++)
+    {
+        std::cout << i + 1 << ". Participant " << _mixedParticipants[i].participant << ": level = " << _mixedParticipants[i].level << std::endl;
+    }
+}
+
+void
+StatusReceiver::PrintMixedAudioLevel()
+{
+    std::cout << "Mixed audio level = " << _mixedAudioLevel << std::endl;
+}
+
+WebRtc_Word32 MixerWrapper::_mixerWrapperIdCounter = 0;
+
+MixerWrapper::MixerWrapper()
+    :
+    _processThread(NULL),
+    _threadId(0),
+    _firstProcessCall(true),
+    _previousTime(),
+    _periodicityInTicks(TickTime::MillisecondsToTicks(FileReader::kProcessPeriodicityInMs)),
+    _synchronizationEvent(EventWrapper::Create()),
+    _freeItemIds(),
+    _itemIdCounter(0),
+    _mixerParticipants(),
+    _mixerWrappererId(_mixerWrapperIdCounter++),
+    _instanceOutputPath(),
+    _trace(NULL),
+    _statusReceiver(_mixerWrappererId),
+    _generalAudioWriter()
+{
+    sprintf(_instanceOutputPath,"instance%d/",(int)_mixerWrappererId);
+    MKDIR(_instanceOutputPath);
+    _mixer = AudioConferenceMixer::CreateAudioConferenceMixer(
+                                                    _mixerWrappererId);
+    if(_mixer != NULL)
+    {
+        bool success = true;
+
+        success = _mixer->RegisterMixedStreamCallback(*this) == 0;
+        assert(success);
+        success = _mixer->RegisterMixedStreamCallback(*this) == -1;
+        assert(success);
+        success = _mixer->UnRegisterMixedStreamCallback() == 0;
+        assert(success);
+        success = _mixer->UnRegisterMixedStreamCallback() == -1;
+        assert(success);
+        success = _mixer->RegisterMixedStreamCallback(*this) == 0;
+        assert(success);
+
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,2) == 0;
+        assert(success);
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,1) == -1;
+        assert(success);
+        success = _mixer->UnRegisterMixerStatusCallback() == 0;
+        assert(success);
+        success = _mixer->UnRegisterMixerStatusCallback() == -1;
+        assert(success);
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,1) == 0;
+        assert(success);
+    }
+    else
+    {
+        assert(false);
+        std::cout << "Failed to create mixer instance";
+    }
+}
+
+MixerWrapper*
+MixerWrapper::CreateMixerWrapper()
+{
+    MixerWrapper* mixerWrapper = new MixerWrapper();
+    if(!mixerWrapper->InitializeFileWriter())
+    {
+        delete mixerWrapper;
+        return NULL;
+    }
+    return mixerWrapper;
+}
+
+MixerWrapper::~MixerWrapper()
+{
+    StopMixing();
+    ClearAllItemIds();
+    _synchronizationEvent->StopTimer();
+    delete _synchronizationEvent;
+    delete _mixer;
+}
+
+bool
+MixerWrapper::CreateParticipant(
+    MixerParticipant::ParticipantType participantType)
+{
+    WebRtc_Word32 startPosition = 0;
+    GenerateRandomPosition(startPosition);
+    return CreateParticipant(participantType,startPosition);
+}
+
+bool
+MixerWrapper::CreateParticipant(
+    MixerParticipant::ParticipantType participantType,
+    const WebRtc_Word32 startPosition)
+{
+    WebRtc_UWord32 id;
+    if(!GetFreeItemIds(id))
+    {
+        return false;
+    }
+
+    MixerParticipant* participant = MixerParticipant::CreateParticipant(id,participantType,startPosition,_instanceOutputPath);
+    if(!participant)
+    {
+        return false;
+    }
+    if(_mixerParticipants.Insert(id,static_cast<void*>(participant)) != 0)
+    {
+        delete participant;
+        return false;
+    }
+    if(!StartMixingParticipant(id))
+    {
+        DeleteParticipant(id);
+        return false;
+    }
+    return true;
+}
+
+bool
+MixerWrapper::DeleteParticipant(
+    const WebRtc_UWord32 id)
+{
+    bool success = StopMixingParticipant(id);
+    if(!success)
+    {
+        assert(false);
+        return false;
+    }
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    delete participant;
+    _mixerParticipants.Erase(item);
+    AddFreeItemIds(id);
+    return true;
+}
+
+bool
+MixerWrapper::StartMixing(
+    const WebRtc_UWord32 mixedParticipants)
+{
+    if(_processThread)
+    {
+        return false;
+    }
+    if(_mixer->SetAmountOfMixedParticipants(mixedParticipants) != 0)
+    {
+        assert(false);
+    }
+    WebRtc_UWord32 mixedParticipantsTest = 0;
+    _mixer->AmountOfMixedParticipants(mixedParticipantsTest);
+    assert(mixedParticipantsTest == mixedParticipants);
+
+    if(!_synchronizationEvent->StartTimer(true,10))
+    {
+        assert(false);
+        return false;
+    }
+    _processThread = ThreadWrapper::CreateThread(Process, this, kLowPriority);
+    if(!_processThread->Start(_threadId))
+    {
+        delete _processThread;
+        _processThread = NULL;
+        assert(false);
+        return false;
+    }
+
+    return true;
+}
+
+bool
+MixerWrapper::StopMixing()
+{
+    while(_processThread &&
+          !_processThread->Stop())
+    {}
+    _synchronizationEvent->StopTimer();
+
+    delete _processThread;
+    _processThread = NULL;
+    return true;
+}
+
+void
+MixerWrapper::NewMixedAudio(
+    const WebRtc_Word32 id,
+    const AudioFrame& generalAudioFrame,
+    const AudioFrame** uniqueAudioFrames,
+    const WebRtc_UWord32 size)
+{
+    if(id < 0)
+    {
+        assert(false);
+    }
+    // Store the general audio
+    _generalAudioWriter.WriteToFile(generalAudioFrame);
+
+    // Send the unique audio frames to its corresponding participants
+    ListWrapper uniqueAudioFrameList;
+    for(WebRtc_UWord32 i = 0; i < size; i++)
+    {
+        WebRtc_UWord32 id = (uniqueAudioFrames[i])->_id;
+        MapItem* resultItem = _mixerParticipants.Find(id);
+        if(resultItem == NULL)
+        {
+            assert(false);
+            continue;
+        }
+        MixerParticipant* participant = static_cast<MixerParticipant*>(resultItem->GetItem());
+        participant->MixedAudioFrame(*(uniqueAudioFrames[i]));
+        uniqueAudioFrameList.PushBack(resultItem->GetItem());
+    }
+
+    // Send the general audio frames to the remaining participants
+    MapItem* item = _mixerParticipants.First();
+    while(item)
+    {
+        bool isUnique = false;
+        ListItem* compareItem = uniqueAudioFrameList.First();
+        while(compareItem)
+        {
+            if(compareItem->GetItem() == item->GetItem())
+            {
+                isUnique = true;
+                break;
+            }
+            compareItem = uniqueAudioFrameList.Next(compareItem);
+        }
+        if(!isUnique)
+        {
+            MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+            participant->MixedAudioFrame(generalAudioFrame);
+        }
+        item = _mixerParticipants.Next(item);
+    }
+}
+
+bool
+MixerWrapper::GetParticipantList(
+    ListWrapper& participants)
+{
+    MapItem* item = _mixerParticipants.First();
+    while(item)
+    {
+        participants.PushBack(item->GetId());
+        item = _mixerParticipants.Next(item);
+    }
+    return true;
+}
+
+void
+MixerWrapper::PrintStatus()
+{
+    std::cout << "instance " << _mixerWrappererId << std::endl;
+    std::cout << std::endl;
+    _statusReceiver.PrintMixedParticipants();
+    std::cout << std::endl;
+    _statusReceiver.PrintVadPositiveParticipants();
+    std::cout << std::endl;
+    _statusReceiver.PrintMixedAudioLevel();
+    std::cout << "---------------------------------------" << std::endl;
+}
+
+bool
+MixerWrapper::InitializeFileWriter()
+{
+    const WebRtc_Word32 stringsize = 128;
+    char fileName[stringsize] = "";
+    strncpy(fileName,_instanceOutputPath,stringsize);
+    fileName[stringsize-1] = '\0';
+
+    strncat(fileName,"generalOutputFile.pcm",(stringsize - strlen(fileName)));
+    fileName[stringsize-1] = '\0';
+    return _generalAudioWriter.SetFileName(fileName);
+}
+
+bool
+MixerWrapper::Process(
+    void* instance)
+{
+    MixerWrapper* mixerWrapper = static_cast<MixerWrapper*>(instance);
+    return mixerWrapper->Process();
+}
+
+bool
+MixerWrapper::Process()
+{
+    switch(_synchronizationEvent->Wait(1000))
+    {
+    case kEventSignaled:
+         // Normal operation, ~10 ms has passed
+        break;
+    case kEventError:
+        // Error occured end the thread and throw an assertion
+        assert(false);
+        return false;
+    case kEventTimeout:
+        // One second has passed without a timeout something is wrong
+        // end the thread and throw an assertion
+        assert(false);
+        return false;
+    }
+    WebRtc_Word32 processOfset = 0;
+    const TickTime currentTime = TickTime::Now();
+    if(_firstProcessCall)
+    {
+        _previousTime = TickTime::Now();
+        _firstProcessCall = false;
+    }
+    else
+    {
+        TickInterval deltaTime = (currentTime - _previousTime);
+        _previousTime += _periodicityInTicks;
+        processOfset = (WebRtc_Word32) deltaTime.Milliseconds();
+        processOfset -= FileReader::kProcessPeriodicityInMs;
+    }
+
+    _mixer->Process();
+    WebRtc_Word32 timeUntilNextProcess = _mixer->TimeUntilNextProcess();
+    if(processOfset > FileReader::kProcessPeriodicityInMs)
+    {
+        std::cout << "Performance Warning: Process running " << processOfset << " too slow" << std::endl;
+        _previousTime = currentTime;
+        if(timeUntilNextProcess > 0)
+        {
+            std::cout << "Performance Warning: test performance and module performance missmatch" << std::endl;
+        }
+    }
+    else if(processOfset < -FileReader::kProcessPeriodicityInMs)
+    {
+        std::cout << "Performance Warning: Process running " << -processOfset << " too fast" << std::endl;
+        _previousTime = currentTime;
+        if(timeUntilNextProcess < FileReader::kProcessPeriodicityInMs)
+        {
+            std::cout << "Performance Warning: test performance and module performance missmatch" << std::endl;
+        }
+    }
+    return true;
+}
+
+
+bool
+MixerWrapper::StartMixingParticipant(
+    const WebRtc_UWord32 id)
+{
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    MixerParticipant::ParticipantType participantType = MixerParticipant::REGULAR;
+    participant->GetParticipantType(participantType);
+    if(participantType == MixerParticipant::MIXED_ANONYMOUS)
+    {
+        bool anonymouslyMixed = false;
+        bool success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(anonymouslyMixed);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == -1;
+        assert(success);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,false) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(!anonymouslyMixed);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,false) == -1;
+        assert(success);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(anonymouslyMixed);
+        return success;
+    }
+    WebRtc_UWord32 previousAmountOfMixableParticipants = 0;
+    bool success = _mixer->AmountOfMixables(previousAmountOfMixableParticipants) == 0;
+    assert(success);
+
+    success = _mixer->SetMixabilityStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,true) == -1;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == -1;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,true) == 0;
+    assert(success);
+    if(!success)
+    {
+        return false;
+    }
+
+    WebRtc_UWord32 currentAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(currentAmountOfMixableParticipants) == 0;
+    assert(currentAmountOfMixableParticipants == previousAmountOfMixableParticipants + 1);
+
+    bool mixable = true;
+    success = _mixer->MixabilityStatus(*participant,mixable) == 0;
+    assert(success);
+    assert(mixable);
+    if(participantType == MixerParticipant::REGULAR)
+    {
+        return true;
+    }
+    bool IsVIP = false;
+    success = _mixer->SetVIPStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(IsVIP);
+    success = _mixer->SetVIPStatus(*participant,true) == -1;
+    assert(success);
+    success = _mixer->SetVIPStatus(*participant,false) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(!IsVIP);
+    success = _mixer->SetVIPStatus(*participant,false) == -1;
+    assert(success);
+    success = _mixer->SetVIPStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(IsVIP);
+    assert(success);
+    return success;
+}
+
+bool
+MixerWrapper::StopMixingParticipant(
+    const WebRtc_UWord32 id)
+{
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    bool success = false;
+    WebRtc_UWord32 previousAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(previousAmountOfMixableParticipants) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == 0;
+    assert(success);
+    WebRtc_UWord32 currentAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(currentAmountOfMixableParticipants) == 0;
+    assert(success);
+    assert(success ? currentAmountOfMixableParticipants == previousAmountOfMixableParticipants -1 :
+                     currentAmountOfMixableParticipants == previousAmountOfMixableParticipants);
+    return success;
+}
+
+bool
+MixerWrapper::GetFreeItemIds(
+    WebRtc_UWord32& itemId)
+{
+    if(!_freeItemIds.Empty())
+    {
+        ListItem* item = _freeItemIds.First();
+        WebRtc_UWord32* id = static_cast<WebRtc_UWord32*>(item->GetItem());
+        itemId = *id;
+        delete id;
+        return true;
+    }
+    if(_itemIdCounter == (WebRtc_UWord32) -1)
+    {
+        return false;
+    }
+    itemId = _itemIdCounter++;
+    return true;
+}
+
+void
+MixerWrapper::AddFreeItemIds(
+    const WebRtc_UWord32 itemId)
+{
+    WebRtc_UWord32* id = new WebRtc_UWord32;
+    *id = itemId;
+    _freeItemIds.PushBack(static_cast<void*>(id));
+}
+
+void
+MixerWrapper::ClearAllItemIds()
+{
+    ListItem* item = _freeItemIds.First();
+    while(item != NULL)
+    {
+        WebRtc_UWord32* id = static_cast<WebRtc_UWord32*>(item->GetItem());
+        delete id;
+        _freeItemIds.Erase(item);
+        item = _freeItemIds.First();
+    }
+}
+
+bool
+LoopedFileRead(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 samplesToRead,
+    FILE* file)
+{
+    if(bufferSizeInSamples < samplesToRead)
+    {
+        return false;
+    }
+    WebRtc_UWord32 gottenSamples = (WebRtc_UWord32)fread(buffer,sizeof(WebRtc_Word16),samplesToRead,file);
+    if(gottenSamples != samplesToRead)
+    {
+        WebRtc_UWord32 missingSamples = samplesToRead - gottenSamples;
+        fseek(file,0,0);
+        gottenSamples += (WebRtc_UWord32)fread(&buffer[gottenSamples],sizeof(WebRtc_Word16),missingSamples,file);
+    }
+    if(gottenSamples != samplesToRead)
+    {
+        return false;
+    }
+    return true;
+}
+
+void
+GenerateRandomPosition(
+    WebRtc_Word32& startPosition)
+{
+    startPosition = (rand() % (60*16000/160)) * 160;
+}
diff --git a/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h b/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h
new file mode 100644
index 0000000..f25e5f1
--- /dev/null
+++ b/trunk/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h
@@ -0,0 +1,276 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
+
+#include "module_common_types.h"
+#include "level_indicator.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+#include "audio_conference_mixer.h"
+#include "audio_conference_mixer_defines.h"
+#include "tick_util.h"
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+}
+struct WebRtcVadInst;
+
+class FileWriter
+{
+public:
+    FileWriter();
+    ~FileWriter();
+
+    bool SetFileName(
+        const char* fileName);
+
+    bool WriteToFile(
+        const AudioFrame& audioFrame);
+private:
+    FILE* _file;
+};
+
+class FileReader
+{
+public:
+    enum {kProcessPeriodicityInMs = 10};
+    enum Frequency
+    {
+        kNbInHz          = 8000,
+        kWbInHz          = 16000,
+        kDefaultFrequency = kWbInHz
+    };
+
+    FileReader();
+    ~FileReader();
+
+    bool SetFileName(
+        const char* fileName);
+
+    bool ReadFromFile(
+        AudioFrame& audioFrame);
+
+    bool FastForwardFile(
+        const WebRtc_Word32 samples);
+
+    bool EnableAutomaticVAD(
+        bool enable,
+        int mode);
+
+    bool SetVAD(
+        bool vad);
+private:
+    bool GetVAD(
+        WebRtc_Word16* buffer,
+        WebRtc_UWord8 bufferLengthInSamples,
+        bool& vad);
+
+    Frequency       _frequency;
+    WebRtc_UWord8     _sampleSize;
+
+    WebRtc_UWord32 _timeStamp;
+
+    FILE* _file;
+
+    WebRtcVadInst* _vadInstr;
+    bool  _automaticVad;
+    bool  _vad;
+
+    LevelIndicator _volumeCalculator;
+};
+
+class MixerParticipant : public MixerParticipant
+{
+public:
+    enum ParticipantType
+    {
+        VIP             = 0,
+        REGULAR         = 1,
+        MIXED_ANONYMOUS = 2,
+        RANDOM          = 3
+    };
+
+    static MixerParticipant* CreateParticipant(
+        const WebRtc_UWord32 id,
+        ParticipantType participantType,
+        const WebRtc_Word32 startPosition,
+        char* outputPath);
+    ~MixerParticipant();
+
+    WebRtc_Word32 GetAudioFrame(
+        const WebRtc_Word32 id,
+        AudioFrame& audioFrame);
+
+    WebRtc_Word32 MixedAudioFrame(
+        const AudioFrame& audioFrame);
+
+    WebRtc_Word32 GetParticipantType(
+        ParticipantType& participantType);
+private:
+    MixerParticipant(
+        const WebRtc_UWord32 id,
+        ParticipantType participantType);
+
+    bool InitializeFileReader(
+        const WebRtc_Word32 startPositionInSamples);
+
+    bool InitializeFileWriter(
+        char* outputPath);
+
+    WebRtc_UWord32 _id;
+    ParticipantType _participantType;
+
+    FileReader _fileReader;
+    FileWriter _fileWriter;
+};
+
+class StatusReceiver : public AudioMixerStatusReceiver
+{
+public:
+    StatusReceiver(
+        const WebRtc_Word32 id);
+    ~StatusReceiver();
+
+    void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    void MixedAudioLevel(
+        const WebRtc_Word32 id,
+        const WebRtc_UWord32 level);
+
+    void PrintMixedParticipants();
+
+    void PrintVadPositiveParticipants();
+
+    void PrintMixedAudioLevel();
+private:
+    WebRtc_Word32 _id;
+
+    ParticipantStatistics*  _mixedParticipants;
+    WebRtc_UWord32                _mixedParticipantsAmount;
+    WebRtc_UWord32                _mixedParticipantsSize;
+
+    ParticipantStatistics*  _vadPositiveParticipants;
+    WebRtc_UWord32                _vadPositiveParticipantsAmount;
+    WebRtc_UWord32                _vadPositiveParticipantsSize;
+
+    WebRtc_UWord32 _mixedAudioLevel;
+};
+
+class MixerWrapper : public AudioMixerOutputReceiver
+{
+public:
+    static MixerWrapper* CreateMixerWrapper();
+    ~MixerWrapper();
+
+    bool SetMixFrequency(
+        const AudioConferenceMixer::Frequency frequency);
+
+    bool CreateParticipant(
+        MixerParticipant::ParticipantType participantType);
+
+    bool CreateParticipant(
+        MixerParticipant::ParticipantType participantType,
+        const WebRtc_Word32 startPosition);
+
+    bool DeleteParticipant(
+        const WebRtc_UWord32 id);
+
+    bool StartMixing(
+        const WebRtc_UWord32 mixedParticipants = AudioConferenceMixer::kDefaultAmountOfMixedParticipants);
+
+    bool StopMixing();
+
+    void NewMixedAudio(
+        const WebRtc_Word32 id,
+        const AudioFrame& generalAudioFrame,
+        const AudioFrame** uniqueAudioFrames,
+        const WebRtc_UWord32 size);
+
+    bool GetParticipantList(
+        ListWrapper& participants);
+
+    void PrintStatus();
+private:
+    MixerWrapper();
+
+    bool InitializeFileWriter();
+
+    static bool Process(
+        void* instance);
+
+    bool Process();
+
+    bool StartMixingParticipant(
+        const WebRtc_UWord32 id);
+
+    bool StopMixingParticipant(
+        const WebRtc_UWord32 id);
+
+    bool GetFreeItemIds(
+        WebRtc_UWord32& itemId);
+
+    void AddFreeItemIds(
+        const WebRtc_UWord32 itemId);
+
+    void ClearAllItemIds();
+
+    webrtc::ThreadWrapper*  _processThread;
+    unsigned int _threadId;
+
+    // Performance hooks
+    enum{WARNING_COUNTER = 100};
+
+    bool _firstProcessCall;
+    TickTime _previousTime;             // Tick time of previous process
+    const WebRtc_Word64  _periodicityInTicks; // Periodicity
+
+    webrtc::EventWrapper*  _synchronizationEvent;
+
+    ListWrapper        _freeItemIds;
+    WebRtc_UWord32    _itemIdCounter;
+
+    MapWrapper _mixerParticipants;
+
+    static WebRtc_Word32 _mixerWrapperIdCounter;
+    WebRtc_Word32 _mixerWrappererId;
+    char _instanceOutputPath[128];
+
+    webrtc::Trace* _trace;
+    AudioConferenceMixer* _mixer;
+
+    StatusReceiver _statusReceiver;
+
+    FileWriter _generalAudioWriter;
+};
+
+bool
+LoopedFileRead(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 samplesToRead,
+    FILE* file);
+
+void
+GenerateRandomPosition(
+    WebRtc_Word32& startPosition);
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
diff --git a/trunk/src/modules/audio_device/OWNERS b/trunk/src/modules/audio_device/OWNERS
new file mode 100644
index 0000000..a07ced3
--- /dev/null
+++ b/trunk/src/modules/audio_device/OWNERS
@@ -0,0 +1,4 @@
+henrikg@webrtc.org
+henrika@webrtc.org
+niklas.enbom@webrtc.org
+xians@webrtc.org
diff --git a/trunk/src/modules/audio_device/main/interface/audio_device.h b/trunk/src/modules/audio_device/main/interface/audio_device.h
new file mode 100644
index 0000000..1149055
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/interface/audio_device.h
@@ -0,0 +1,207 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
+#define MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
+
+#include "module.h"
+#include "audio_device_defines.h"
+
+namespace webrtc {
+
+class AudioDeviceModule : public RefCountedModule {
+ public:
+  enum ErrorCode {
+    kAdmErrNone = 0,
+    kAdmErrArgument = 1
+  };
+
+  enum AudioLayer {
+    kPlatformDefaultAudio = 0,
+    kWindowsWaveAudio = 1,
+    kWindowsCoreAudio = 2,
+    kLinuxAlsaAudio = 3,
+    kLinuxPulseAudio = 4,
+    kDummyAudio = 5
+  };
+
+  enum WindowsDeviceType {
+    kDefaultCommunicationDevice = -1,
+    kDefaultDevice = -2
+  };
+
+  enum BufferType {
+    kFixedBufferSize  = 0,
+    kAdaptiveBufferSize = 1
+  };
+
+  enum ChannelType {
+    kChannelLeft = 0,
+    kChannelRight = 1,
+    kChannelBoth = 2
+  };
+
+ public:
+  // Retrieve the currently utilized audio layer
+  virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0;
+
+  // Error handling
+  virtual ErrorCode LastError() const = 0;
+  virtual int32_t RegisterEventObserver(AudioDeviceObserver* eventCallback) = 0;
+
+  // Full-duplex transportation of PCM audio
+  virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) = 0;
+
+  // Main initialization and termination
+  virtual int32_t Init() = 0;
+  virtual int32_t Terminate() = 0;
+  virtual bool Initialized() const = 0;
+
+  // Device enumeration
+  virtual int16_t PlayoutDevices() = 0;
+  virtual int16_t RecordingDevices() = 0;
+  virtual int32_t PlayoutDeviceName(uint16_t index,
+                                    char name[kAdmMaxDeviceNameSize],
+                                    char guid[kAdmMaxGuidSize]) = 0;
+  virtual int32_t RecordingDeviceName(uint16_t index,
+                                      char name[kAdmMaxDeviceNameSize],
+                                      char guid[kAdmMaxGuidSize]) = 0;
+
+  // Device selection
+  virtual int32_t SetPlayoutDevice(uint16_t index) = 0;
+  virtual int32_t SetPlayoutDevice(WindowsDeviceType device) = 0;
+  virtual int32_t SetRecordingDevice(uint16_t index) = 0;
+  virtual int32_t SetRecordingDevice(WindowsDeviceType device) = 0;
+
+  // Audio transport initialization
+  virtual int32_t PlayoutIsAvailable(bool* available) = 0;
+  virtual int32_t InitPlayout() = 0;
+  virtual bool PlayoutIsInitialized() const = 0;
+  virtual int32_t RecordingIsAvailable(bool* available) = 0;
+  virtual int32_t InitRecording() = 0;
+  virtual bool RecordingIsInitialized() const = 0;
+
+  // Audio transport control
+  virtual int32_t StartPlayout() = 0;
+  virtual int32_t StopPlayout() = 0;
+  virtual bool Playing() const = 0;
+  virtual int32_t StartRecording() = 0;
+  virtual int32_t StopRecording() = 0;
+  virtual bool Recording() const = 0;
+
+  // Microphone Automatic Gain Control (AGC)
+  virtual int32_t SetAGC(bool enable) = 0;
+  virtual bool AGC() const = 0;
+
+  // Volume control based on the Windows Wave API (Windows only)
+  virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+                                   uint16_t volumeRight) = 0;
+  virtual int32_t WaveOutVolume(uint16_t* volumeLeft,
+                                uint16_t* volumeRight) const = 0;
+
+  // Audio mixer initialization
+  virtual int32_t SpeakerIsAvailable(bool* available) = 0;
+  virtual int32_t InitSpeaker() = 0;
+  virtual bool SpeakerIsInitialized() const = 0;
+  virtual int32_t MicrophoneIsAvailable(bool* available) = 0;
+  virtual int32_t InitMicrophone() = 0;
+  virtual bool MicrophoneIsInitialized() const = 0;
+
+  // Speaker volume controls
+  virtual int32_t SpeakerVolumeIsAvailable(bool* available) = 0;
+  virtual int32_t SetSpeakerVolume(uint32_t volume) = 0;
+  virtual int32_t SpeakerVolume(uint32_t* volume) const = 0;
+  virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const = 0;
+  virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const = 0;
+  virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const = 0;
+
+  // Microphone volume controls
+  virtual int32_t MicrophoneVolumeIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0;
+  virtual int32_t MicrophoneVolume(uint32_t* volume) const = 0;
+  virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const = 0;
+  virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const = 0;
+  virtual int32_t MicrophoneVolumeStepSize(uint16_t* stepSize) const = 0;
+
+  // Speaker mute control
+  virtual int32_t SpeakerMuteIsAvailable(bool* available) = 0;
+  virtual int32_t SetSpeakerMute(bool enable) = 0;
+  virtual int32_t SpeakerMute(bool* enabled) const = 0;
+
+  // Microphone mute control
+  virtual int32_t MicrophoneMuteIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneMute(bool enable) = 0;
+  virtual int32_t MicrophoneMute(bool* enabled) const = 0;
+
+  // Microphone boost control
+  virtual int32_t MicrophoneBoostIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneBoost(bool enable) = 0;
+  virtual int32_t MicrophoneBoost(bool* enabled) const = 0;
+
+  // Stereo support
+  virtual int32_t StereoPlayoutIsAvailable(bool* available) const = 0;
+  virtual int32_t SetStereoPlayout(bool enable) = 0;
+  virtual int32_t StereoPlayout(bool* enabled) const = 0;
+  virtual int32_t StereoRecordingIsAvailable(bool* available) const = 0;
+  virtual int32_t SetStereoRecording(bool enable) = 0;
+  virtual int32_t StereoRecording(bool* enabled) const = 0;
+  virtual int32_t SetRecordingChannel(const ChannelType channel) = 0;
+  virtual int32_t RecordingChannel(ChannelType* channel) const = 0;
+
+  // Delay information and control
+  virtual int32_t SetPlayoutBuffer(const BufferType type,
+                                   uint16_t sizeMS = 0) = 0;
+  virtual int32_t PlayoutBuffer(BufferType* type, uint16_t* sizeMS) const = 0;
+  virtual int32_t PlayoutDelay(uint16_t* delayMS) const = 0;
+  virtual int32_t RecordingDelay(uint16_t* delayMS) const = 0;
+
+  // CPU load
+  virtual int32_t CPULoad(uint16_t* load) const = 0;
+
+  // Recording of raw PCM data
+  virtual int32_t StartRawOutputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) = 0;
+  virtual int32_t StopRawOutputFileRecording() = 0;
+  virtual int32_t StartRawInputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) = 0;
+  virtual int32_t StopRawInputFileRecording() = 0;
+
+  // Native sample rate controls (samples/sec)
+  virtual int32_t SetRecordingSampleRate(const uint32_t samplesPerSec) = 0;
+  virtual int32_t RecordingSampleRate(uint32_t* samplesPerSec) const = 0;
+  virtual int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec) = 0;
+  virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const = 0;
+
+  // Mobile device specific functions
+  virtual int32_t ResetAudioDevice() = 0;
+  virtual int32_t SetLoudspeakerStatus(bool enable) = 0;
+  virtual int32_t GetLoudspeakerStatus(bool* enabled) const = 0;
+
+  // *Experimental - not recommended for use.*
+  // Enables the Windows Core Audio built-in AEC. Fails on other platforms.
+  //
+  // Must be called before InitRecording(). When enabled:
+  // 1. StartPlayout() must be called before StartRecording().
+  // 2. StopRecording() should be called before StopPlayout().
+  //    The reverse order may cause garbage audio to be rendered or the
+  //    capture side to halt until StopRecording() is called.
+  virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
+  virtual bool BuiltInAECIsEnabled() const { return false; }
+
+ protected:
+  virtual ~AudioDeviceModule() {};
+};
+
+AudioDeviceModule* CreateAudioDeviceModule(
+    WebRtc_Word32 id, AudioDeviceModule::AudioLayer audioLayer);
+
+}  // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
diff --git a/trunk/src/modules/audio_device/main/interface/audio_device_defines.h b/trunk/src/modules/audio_device/main/interface/audio_device_defines.h
new file mode 100644
index 0000000..67db65d
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/interface/audio_device_defines.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+static const int kAdmMaxDeviceNameSize = 128;
+static const int kAdmMaxFileNameSize = 512;
+static const int kAdmMaxGuidSize = 128;
+
+static const int kAdmMinPlayoutBufferSizeMs = 10;
+static const int kAdmMaxPlayoutBufferSizeMs = 250;
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceObserver
+// ----------------------------------------------------------------------------
+
+class AudioDeviceObserver
+{
+public:
+    enum ErrorCode
+    {
+        kRecordingError = 0,
+        kPlayoutError = 1
+    };
+    enum WarningCode
+    {
+        kRecordingWarning = 0,
+        kPlayoutWarning = 1
+    };
+
+    virtual void OnErrorIsReported(const ErrorCode error) = 0;
+    virtual void OnWarningIsReported(const WarningCode warning) = 0;
+
+protected:
+    virtual ~AudioDeviceObserver() {}
+};
+
+// ----------------------------------------------------------------------------
+//  AudioTransport
+// ----------------------------------------------------------------------------
+
+class AudioTransport
+{
+public:
+    virtual int32_t RecordedDataIsAvailable(const char* audioSamples,
+                                            const uint32_t nSamples,
+                                            const uint8_t nBytesPerSample,
+                                            const uint8_t nChannels,
+                                            const uint32_t samplesPerSec,
+                                            const uint32_t totalDelayMS,
+                                            const int32_t clockDrift,
+                                            const uint32_t currentMicLevel,
+                                            uint32_t& newMicLevel) = 0;   
+
+    virtual int32_t NeedMorePlayData(const uint32_t nSamples,
+                                     const uint8_t nBytesPerSample,
+                                     const uint8_t nChannels,
+                                     const uint32_t samplesPerSec,
+                                     char* audioSamples,
+                                     uint32_t& nSamplesOut) = 0;
+
+protected:
+    virtual ~AudioTransport() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
diff --git a/trunk/src/modules/audio_device/main/source/Android.mk b/trunk/src/modules/audio_device/main/source/Android.mk
new file mode 100644
index 0000000..affa5e1
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/Android.mk
@@ -0,0 +1,57 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_audio_device
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    audio_device_buffer.cc \
+    audio_device_generic.cc \
+    audio_device_utility.cc \
+    audio_device_impl.cc \
+    android/audio_device_android_opensles.cc \
+    android/audio_device_utility_android.cc \
+    dummy/audio_device_utility_dummy.cc \
+    dummy/audio_device_dummy.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+   '-DWEBRTC_ANDROID_OPENSLES'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH) \
+    $(LOCAL_PATH)/android \
+    $(LOCAL_PATH)/dummy \
+    $(LOCAL_PATH)/linux \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../../common_audio/resampler/include \
+    $(LOCAL_PATH)/../../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface \
+    system/media/wilhelm/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport \
+    libOpenSLES
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.cc b/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.cc
new file mode 100644
index 0000000..c275171
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.cc
@@ -0,0 +1,2920 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device implementation (JNI/AudioTrack/AudioRecord usage)
+ */
+
+// TODO(xians): Break out attach and detach current thread to JVM to
+// separate functions.
+
+#include <stdlib.h>
+#include "audio_device_utility.h"
+#include "audio_device_android_jni.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+// Android logging, uncomment to print trace to logcat instead of
+// trace file/callback
+//#include <android/log.h>
+//#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, \
+//    "WebRTC AD jni", __VA_ARGS__)
+
+namespace webrtc
+{
+
+JavaVM* globalJvm = NULL;
+JNIEnv* globalJNIEnv = NULL;
+jobject globalSndContext = NULL;
+jclass globalScClass = NULL;
+
+// ----------------------------------------------------------------------------
+//  SetAndroidAudioDeviceObjects
+//
+//  Global function for setting Java pointers and creating Java
+//  objects that are global to all instances of VoiceEngine used
+//  by the same Java application.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
+                                           void* context)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1, "%s",
+                 __FUNCTION__);
+
+    globalJvm = (JavaVM*) javaVM;
+    globalSndContext = (jobject) context;
+
+    if (env)
+    {
+        globalJNIEnv = (JNIEnv *) env;
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
+                     "%s: will find class", __FUNCTION__);
+
+        // get java class type (note path to class packet)
+        jclass
+                javaScClassLocal =
+                        globalJNIEnv->FindClass(
+                                "org/webrtc/voiceengine/AudioDeviceAndroid");
+        if (!javaScClassLocal)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                         "%s: could not find java class", __FUNCTION__);
+            return -1; /* exception thrown */
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
+                     "%s: will create global reference", __FUNCTION__);
+
+        // create a global reference to the class (to tell JNI that we are
+        // referencing it after this function has returned)
+        globalScClass
+                = reinterpret_cast<jclass> (globalJNIEnv->NewGlobalRef(
+                        javaScClassLocal));
+        if (!globalScClass)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                         "%s: could not create reference", __FUNCTION__);
+            return -1;
+        }
+
+        // Delete local class ref, we only use the global ref
+        globalJNIEnv->DeleteLocalRef(javaScClassLocal);
+    }
+    else // User is resetting the env variable
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "%s: env is NULL, assuming deinit", __FUNCTION__);
+
+        if (!globalJNIEnv)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+                         "%s: saved env already NULL", __FUNCTION__);
+            return 0;
+        }
+
+        globalJNIEnv->DeleteGlobalRef(globalScClass);
+        globalJNIEnv = (JNIEnv *) NULL;
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidJni - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidJni::AudioDeviceAndroidJni(const WebRtc_Word32 id) :
+            _ptrAudioBuffer(NULL),
+            _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+            _id(id),
+            _timeEventRec(*EventWrapper::Create()),
+            _timeEventPlay(*EventWrapper::Create()),
+            _recStartStopEvent(*EventWrapper::Create()),
+            _playStartStopEvent(*EventWrapper::Create()),
+            _ptrThreadPlay(NULL),
+            _ptrThreadRec(NULL),
+            _recThreadID(0),
+            _playThreadID(0),
+            _playThreadIsInitialized(false),
+            _recThreadIsInitialized(false),
+            _shutdownPlayThread(false),
+            _shutdownRecThread(false),
+            //    _recBuffer[2*REC_BUF_SIZE_IN_SAMPLES]
+            _recordingDeviceIsSpecified(false),
+            _playoutDeviceIsSpecified(false), _initialized(false),
+            _recording(false), _playing(false), _recIsInitialized(false),
+            _playIsInitialized(false), _micIsInitialized(false),
+            _speakerIsInitialized(false), _startRec(false), _stopRec(false),
+            _startPlay(false), _stopPlay(false), _playWarning(0),
+            _playError(0), _recWarning(0), _recError(0), _delayPlayout(0),
+            _delayRecording(0),
+            _AGC(false),
+            _samplingFreqIn(0),
+            _samplingFreqOut(0),
+            _maxSpeakerVolume(0),
+            _loudSpeakerOn(false),
+            _recAudioSource(1), // 1 is AudioSource.MIC which is our default
+            _javaVM(NULL), _javaContext(NULL), _jniEnvPlay(NULL),
+            _jniEnvRec(NULL), _javaScClass(0), _javaScObj(0),
+            _javaPlayBuffer(0), _javaRecBuffer(0), _javaDirectPlayBuffer(NULL),
+            _javaDirectRecBuffer(NULL), _javaMidPlayAudio(0),
+            _javaMidRecAudio(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidJni - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidJni::~AudioDeviceAndroidJni()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    delete &_recStartStopEvent;
+    delete &_playStartStopEvent;
+    delete &_timeEventRec;
+    delete &_timeEventPlay;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const
+{
+
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Init Java member variables
+    // and set up JNI interface to
+    // AudioDeviceAndroid java class
+    if (InitJavaResources() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init Java resources", __FUNCTION__);
+        return -1;
+    }
+
+    // Check the sample rate to be used for playback and recording
+    // and the max playout volume
+    if (InitSampleRate() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init samplerate", __FUNCTION__);
+        return -1;
+    }
+
+    // RECORDING
+    const char* threadName = "webrtc_jni_audio_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+                                                kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    // PLAYOUT
+    threadName = "webrtc_jni_audio_render_thread";
+    _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
+                                                 kRealtimePriority, threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the play audio thread");
+        return -1;
+    }
+
+    threadID = 0;
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::Terminate()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    // RECORDING
+    StopRecording();
+    _shutdownRecThread = true;
+    _timeEventRec.Set(); // Release rec thread from waiting state
+    if (_ptrThreadRec)
+    {
+        // First, the thread must detach itself from Java VM
+        _critSect.Leave();
+        if (kEventSignaled != _recStartStopEvent.Wait(5000))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceAudioDevice,
+                         _id,
+                         "%s: Recording thread shutdown timed out, cannot "
+                         "terminate thread",
+                         __FUNCTION__);
+            // If we close thread anyway, the app will crash
+            return -1;
+        }
+        _recStartStopEvent.Reset();
+        _critSect.Enter();
+
+        // Close down rec thread
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        _critSect.Leave();
+        tmpThread->SetNotAlive();
+        // Release again, we might have returned to waiting state
+        _timeEventRec.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+            _jniEnvRec = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+        _critSect.Enter();
+
+        _recThreadIsInitialized = false;
+    }
+    _micIsInitialized = false;
+    _recordingDeviceIsSpecified = false;
+
+    // PLAYOUT
+    StopPlayout();
+    _shutdownPlayThread = true;
+    _timeEventPlay.Set(); // Release rec thread from waiting state
+    if (_ptrThreadPlay)
+    {
+        // First, the thread must detach itself from Java VM
+        _critSect.Leave();
+        if (kEventSignaled != _playStartStopEvent.Wait(5000))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceAudioDevice,
+                         _id,
+                         "%s: Playout thread shutdown timed out, cannot "
+                         "terminate thread",
+                         __FUNCTION__);
+            // If we close thread anyway, the app will crash
+            return -1;
+        }
+        _playStartStopEvent.Reset();
+        _critSect.Enter();
+
+        // Close down play thread
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+        tmpThread->SetNotAlive();
+        _timeEventPlay.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+            _jniEnvPlay = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+        _critSect.Enter();
+
+        _playThreadIsInitialized = false;
+    }
+    _speakerIsInitialized = false;
+    _playoutDeviceIsSpecified = false;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // Make method IDs and buffer pointers unusable
+    _javaMidPlayAudio = 0;
+    _javaMidRecAudio = 0;
+    _javaDirectPlayBuffer = NULL;
+    _javaDirectRecBuffer = NULL;
+
+    // Delete the references to the java buffers, this allows the
+    // garbage collector to delete them
+    env->DeleteGlobalRef(_javaPlayBuffer);
+    _javaPlayBuffer = 0;
+    env->DeleteGlobalRef(_javaRecBuffer);
+    _javaRecBuffer = 0;
+
+    // Delete the references to the java object and class, this allows the
+    // garbage collector to delete them
+    env->DeleteGlobalRef(_javaScObj);
+    _javaScObj = 0;
+    _javaScClass = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    _initialized = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Initialized() const
+{
+
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerIsAvailable(bool& available)
+{
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneIsAvailable(bool& available)
+{
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _micIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::SpeakerIsInitialized() const
+{
+
+    return _speakerIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::MicrophoneIsInitialized() const
+{
+
+    return _micIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    available = true; // We assume we are always be able to set/get volume
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID setPlayoutVolumeID = env->GetMethodID(_javaScClass,
+                                                    "SetPlayoutVolume", "(I)I");
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, setPlayoutVolumeID,
+                                  static_cast<int> (volume));
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetPlayoutVolume failed (%d)", res);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID getPlayoutVolumeID = env->GetMethodID(_javaScClass,
+                                                    "GetPlayoutVolume", "()I");
+
+    // call java sc object method
+    jint level = env->CallIntMethod(_javaScObj, getPlayoutVolumeID);
+    if (level < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetPlayoutVolume failed (%d)", level);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    volume = static_cast<WebRtc_UWord32> (level);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetWaveOutVolume(
+    WebRtc_UWord16 /*volumeLeft*/,
+    WebRtc_UWord16 /*volumeRight*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MaxSpeakerVolume(
+        WebRtc_UWord32& maxVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    maxVolume = _maxSpeakerVolume;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MinSpeakerVolume(
+        WebRtc_UWord32& minVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    minVolume = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolumeStepSize(
+        WebRtc_UWord16& stepSize) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    stepSize = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerMuteIsAvailable(bool& available)
+{
+
+    available = false; // Speaker mute not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetSpeakerMute(bool /*enable*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerMute(bool& /*enabled*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    available = false; // Mic mute not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneMute(bool /*enable*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneMute(bool& /*enabled*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    available = false; // Mic boost not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneBoost(bool enable)
+{
+
+    if (!_micIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneBoost(bool& enabled) const
+{
+
+    if (!_micIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoRecordingIsAvailable(bool& available)
+{
+
+    available = false; // Stereo recording not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+//
+//  Specifies the number of input channels.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoRecording(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoPlayoutIsAvailable(bool& available)
+{
+
+    available = false; // Stereo playout not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoPlayout(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::AGC() const
+{
+
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolumeIsAvailable(
+        bool& available)
+{
+
+    available = false; // Mic volume not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneVolume(
+        WebRtc_UWord32 /*volume*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolume(
+        WebRtc_UWord32& /*volume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MaxMicrophoneVolume(
+        WebRtc_UWord32& /*maxVolume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MinMicrophoneVolume(
+        WebRtc_UWord32& /*minVolume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolumeStepSize(
+        WebRtc_UWord16& /*stepSize*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceAndroidJni::PlayoutDevices()
+{
+
+    // There is one device only
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return -1;
+    }
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behavior
+    // with other platforms
+    _playoutDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize])
+{
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize])
+{
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceAndroidJni::RecordingDevices()
+{
+
+    // There is one device only
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return -1;
+    }
+
+    // Recording device index will be used for specifying recording
+    // audio source, allow any value
+    _recAudioSource = index;
+    _recordingDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Not initialized");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return 0;
+    }
+
+    // Initialize the speaker
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "attaching");
+
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
+                                                "(I)I");
+
+    int samplingFreq = 44100;
+    if (_samplingFreqOut != 44)
+    {
+        samplingFreq = _samplingFreqOut * 1000;
+    }
+
+    int retVal = -1;
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitPlayback failed (%d)", res);
+    }
+    else
+    {
+        // Set the audio device buffer sampling rate
+        _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut * 1000);
+        _playIsInitialized = true;
+        retVal = 0;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "detaching");
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return retVal;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Not initialized");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return 0;
+    }
+
+    // Initialize the microphone
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
+                                                 "(II)I");
+
+    int samplingFreq = 44100;
+    if (_samplingFreqIn != 44)
+    {
+        samplingFreq = _samplingFreqIn * 1000;
+    }
+
+    int retVal = -1;
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
+                                  samplingFreq);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitRecording failed (%d)", res);
+    }
+    else
+    {
+        // Set the audio device buffer sampling rate
+        _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn * 1000);
+
+        // the init rec function returns a fixed delay
+        _delayRecording = res / _samplingFreqIn;
+
+        _recIsInitialized = true;
+        retVal = 0;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return retVal;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StartRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording not initialized");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID startRecordingID = env->GetMethodID(_javaScClass,
+                                                  "StartRecording", "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, startRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StartRecording failed (%d)", res);
+        return -1;
+    }
+
+    _recWarning = 0;
+    _recError = 0;
+
+    // Signal to recording thread that we want to start
+    _startRec = true;
+    _timeEventRec.Set(); // Release thread from waiting state
+    _critSect.Leave();
+    // Wait for thread to init
+    if (kEventSignaled != _recStartStopEvent.Wait(5000))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Timeout or error starting");
+    }
+    _recStartStopEvent.Reset();
+    _critSect.Enter();
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StopRecording()
+
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording is not initialized");
+        return 0;
+    }
+
+    // make sure we don't start recording (it's asynchronous),
+    // assuming that we are under lock
+    _startRec = false;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
+                                                 "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, stopRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopRecording failed (%d)", res);
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingIsInitialized() const
+{
+
+    return _recIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Recording() const
+{
+
+    return _recording;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutIsInitialized() const
+{
+
+    return _playIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StartPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout not initialized");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID startPlaybackID = env->GetMethodID(_javaScClass, "StartPlayback",
+                                                 "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, startPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StartPlayback failed (%d)", res);
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+
+    // Signal to playout thread that we want to start
+    _startPlay = true;
+    _timeEventPlay.Set(); // Release thread from waiting state
+    _critSect.Leave();
+    // Wait for thread to init
+    if (kEventSignaled != _playStartStopEvent.Wait(5000))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Timeout or error starting");
+    }
+    _playStartStopEvent.Reset();
+    _critSect.Enter();
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StopPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout is not initialized");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
+                                                "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, stopPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopPlayback failed (%d)", res);
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _playWarning = 0;
+    _playError = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+//
+//    Remaining amount of data still in the playout buffer.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = _delayPlayout;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+//
+//    Remaining amount of data still in the recording buffer.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingDelay(
+        WebRtc_UWord16& delayMS) const
+{
+    delayMS = _delayRecording;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Playing() const
+{
+
+    return _playing;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType /*type*/,
+        WebRtc_UWord16 /*sizeMS*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutBuffer(
+        AudioDeviceModule::BufferType& type,
+        WebRtc_UWord16& sizeMS) const
+{
+
+    type = AudioDeviceModule::kAdaptiveBufferSize;
+    sizeMS = _delayPlayout; // Set to current playout delay
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec)
+{
+
+    if (samplesPerSec > 48000 || samplesPerSec < 8000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Invalid sample rate");
+        return -1;
+    }
+
+    // set the recording sample rate to use
+    if (samplesPerSec == 44100)
+    {
+        _samplingFreqIn = 44;
+    }
+    else
+    {
+        _samplingFreqIn = samplesPerSec / 1000;
+    }
+
+    // Update the AudioDeviceBuffer
+    _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec)
+{
+
+    if (samplesPerSec > 48000 || samplesPerSec < 8000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Invalid sample rate");
+        return -1;
+    }
+
+    // set the playout sample rate to use
+    if (samplesPerSec == 44100)
+    {
+        _samplingFreqOut = 44;
+    }
+    else
+    {
+        _samplingFreqOut = samplesPerSec / 1000;
+    }
+
+    // Update the AudioDeviceBuffer
+    _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetLoudspeakerStatus(bool enable)
+{
+
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass,
+                                                     "SetPlayoutSpeaker",
+                                                     "(Z)I");
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "  SetPlayoutSpeaker failed (%d)", res);
+        return -1;
+    }
+
+    _loudSpeakerOn = enable;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::GetLoudspeakerStatus(bool& enabled) const
+{
+
+    enabled = _loudSpeakerOn;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+
+// ----------------------------------------------------------------------------
+//  InitJavaResources
+//
+//  Initializes needed Java resources like the JNI interface to
+//  AudioDeviceAndroid.java
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
+{
+    // todo: Check if we already have created the java object
+    _javaVM = globalJvm;
+    _javaContext = globalSndContext;
+    _javaScClass = globalScClass;
+
+    // use the jvm that has been set
+    if (!_javaVM)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Not a valid Java VM pointer", __FUNCTION__);
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "get method id");
+
+    // get the method ID for the void(void) constructor
+    jmethodID cid = env->GetMethodID(_javaScClass, "<init>", "()V");
+    if (cid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get constructor ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "construct object", __FUNCTION__);
+
+    // construct the object
+    jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
+    if (!javaScObjLocal)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "%s: could not create Java sc object", __FUNCTION__);
+        return -1;
+    }
+
+    // create a reference to the object (to tell JNI that we are referencing it
+    // after this function has returned)
+    _javaScObj = env->NewGlobalRef(javaScObjLocal);
+    if (!_javaScObj)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not create Java sc object reference",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaScObjLocal);
+
+    //////////////////////
+    // AUDIO MANAGEMENT
+
+    // This is not mandatory functionality
+    if (_javaContext)
+    {
+        // Get Context field ID
+        jfieldID fidContext = env->GetFieldID(_javaScClass, "_context",
+                                              "Landroid/content/Context;");
+        if (!fidContext)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: could not get Context fid", __FUNCTION__);
+            return -1;
+        }
+
+        // Set the Java application Context so we can use AudioManager
+        // Get Context object and check it
+        jobject javaContext = (jobject) _javaContext;
+        env->SetObjectField(_javaScObj, fidContext, javaContext);
+        javaContext = env->GetObjectField(_javaScObj, fidContext);
+        if (!javaContext)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: could not set Context", __FUNCTION__);
+            return -1;
+        }
+
+        // Delete local object ref
+        env->DeleteLocalRef(javaContext);
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceWarning,
+                     kTraceAudioDevice,
+                     _id,
+                     "%s: did not set Context - some functionality is not "
+                     "supported",
+                     __FUNCTION__);
+    }
+
+    /////////////
+    // PLAYOUT
+
+    // Get play buffer field ID
+    jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer",
+                                             "Ljava/nio/ByteBuffer;");
+    if (!fidPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer fid", __FUNCTION__);
+        return -1;
+    }
+
+    // Get play buffer object
+    jobject javaPlayBufferLocal =
+            env->GetObjectField(_javaScObj, fidPlayBuffer);
+    if (!javaPlayBufferLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Create a global reference to the object (to tell JNI that we are
+    // referencing it after this function has returned)
+    // NOTE: we are referencing it only through the direct buffer (see below)
+    _javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal);
+    if (!_javaPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer reference", __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaPlayBufferLocal);
+
+    // Get direct buffer
+    _javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer);
+    if (!_javaDirectPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get direct play buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Get the play audio method ID
+    _javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I");
+    if (!_javaMidPlayAudio)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play audio mid", __FUNCTION__);
+        return -1;
+    }
+
+    //////////////
+    // RECORDING
+
+    // Get rec buffer field ID
+    jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer",
+                                            "Ljava/nio/ByteBuffer;");
+    if (!fidRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer fid", __FUNCTION__);
+        return -1;
+    }
+
+    // Get rec buffer object
+    jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer);
+    if (!javaRecBufferLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Create a global reference to the object (to tell JNI that we are
+    // referencing it after this function has returned)
+    // NOTE: we are referencing it only through the direct buffer (see below)
+    _javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal);
+    if (!_javaRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer reference", __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaRecBufferLocal);
+
+    // Get direct buffer
+    _javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer);
+    if (!_javaDirectRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get direct rec buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Get the rec audio method ID
+    _javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I");
+    if (!_javaMidRecAudio)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec audio mid", __FUNCTION__);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSampleRate
+//
+//  checks supported sample rates for playback 
+//  and recording and initializes the rates to be used
+//  Also stores the max playout volume returned from InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitSampleRate()
+{
+    int samplingFreq = 44100;
+    jint res = 0;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    if (_samplingFreqIn > 0)
+    {
+        // read the configured sampling rate
+        samplingFreq = 44100;
+        if (_samplingFreqIn != 44)
+        {
+            samplingFreq = _samplingFreqIn * 1000;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "  Trying configured recording sampling rate %d",
+                     samplingFreq);
+    }
+
+    // get the method ID
+    jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
+                                                 "(II)I");
+
+    bool keepTrying = true;
+    while (keepTrying)
+    {
+        // call java sc object method
+        res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
+                                 samplingFreq);
+        if (res < 0)
+        {
+            switch (samplingFreq)
+            {
+                case 44100:
+                    samplingFreq = 16000;
+                    break;
+                case 16000:
+                    samplingFreq = 8000;
+                    break;
+                default: // error
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "%s: InitRecording failed (%d)", __FUNCTION__,
+                                 res);
+                    return -1;
+            }
+        }
+        else
+        {
+            keepTrying = false;
+        }
+    }
+
+    // set the recording sample rate to use
+    if (samplingFreq == 44100)
+    {
+        _samplingFreqIn = 44;
+    }
+    else
+    {
+        _samplingFreqIn = samplingFreq / 1000;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "Recording sample rate set to (%d)", _samplingFreqIn);
+
+    // get the method ID
+    jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
+                                                 "()I");
+
+    // Call java sc object method
+    res = env->CallIntMethod(_javaScObj, stopRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "StopRecording failed (%d)", res);
+    }
+
+    // get the method ID
+    jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
+                                                "(I)I");
+
+    if (_samplingFreqOut > 0)
+    {
+        // read the configured sampling rate
+        samplingFreq = 44100;
+        if (_samplingFreqOut != 44)
+        {
+            samplingFreq = _samplingFreqOut * 1000;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "  Trying configured playback sampling rate %d",
+                     samplingFreq);
+    }
+    else
+    {
+        // set the preferred sampling frequency
+        if (samplingFreq == 8000)
+        {
+            // try 16000
+            samplingFreq = 16000;
+        }
+        // else use same as recording
+    }
+
+    keepTrying = true;
+    while (keepTrying)
+    {
+        // call java sc object method
+        res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
+        if (res < 0)
+        {
+            switch (samplingFreq)
+            {
+                case 44100:
+                    samplingFreq = 16000;
+                    break;
+                case 16000:
+                    samplingFreq = 8000;
+                    break;
+                default: // error
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "InitPlayback failed (%d)", res);
+                    return -1;
+            }
+        }
+        else
+        {
+            keepTrying = false;
+        }
+    }
+
+    // Store max playout volume
+    _maxSpeakerVolume = static_cast<WebRtc_UWord32> (res);
+    if (_maxSpeakerVolume < 1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Did not get valid max speaker volume value (%d)",
+                     _maxSpeakerVolume);
+    }
+
+    // set the playback sample rate to use
+    if (samplingFreq == 44100)
+    {
+        _samplingFreqOut = 44;
+    }
+    else
+    {
+        _samplingFreqOut = samplingFreq / 1000;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "Playback sample rate set to (%d)", _samplingFreqOut);
+
+    // get the method ID
+    jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
+                                                "()I");
+
+    // Call java sc object method
+    res = env->CallIntMethod(_javaScObj, stopPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopPlayback failed (%d)", res);
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  PlayThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceAndroidJni*> (pThis)->PlayThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceAndroidJni*> (pThis)->RecThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayThreadProcess()
+{
+    if (!_playThreadIsInitialized)
+    {
+        // Do once when thread is started
+
+        // Attach this thread to JVM and get the JNI env for this thread
+        jint res = _javaVM->AttachCurrentThread(&_jniEnvPlay, NULL);
+        if ((res < 0) || !_jniEnvPlay)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id,
+                         "Could not attach playout thread to JVM (%d, %p)",
+                         res, _jniEnvPlay);
+            return false; // Close down thread
+        }
+
+        _playThreadIsInitialized = true;
+    }
+
+    if (!_playing)
+    {
+        switch (_timeEventPlay.Wait(1000))
+        {
+            case kEventSignaled:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Playout thread event signal");
+                _timeEventPlay.Reset();
+                break;
+            case kEventError:
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "Playout thread event error");
+                return true;
+            case kEventTimeout:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Playout thread event timeout");
+                return true;
+        }
+    }
+
+    Lock();
+
+    if (_startPlay)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startPlay true, performing initial actions");
+        _startPlay = false;
+        _playing = true;
+        _playWarning = 0;
+        _playError = 0;
+        _playStartStopEvent.Set();
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Sent signal");
+    }
+
+    if (_playing)
+    {
+        WebRtc_Word8 playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit
+        WebRtc_UWord32 samplesToPlay = _samplingFreqOut * 10;
+
+        // ask for new PCM data to be played out using the AudioDeviceBuffer
+        // ensure that this callback is executed without taking the
+        // audio-thread lock
+        UnLock();
+        WebRtc_UWord32 nSamples =
+                _ptrAudioBuffer->RequestPlayoutData(samplesToPlay);
+        Lock();
+
+        // Check again since play may have stopped during unlocked period
+        if (!_playing)
+        {
+            UnLock();
+            return true;
+        }
+
+        nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        if (nSamples != samplesToPlay)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  invalid number of output samples(%d)", nSamples);
+            _playWarning = 1;
+        }
+
+        // Copy data to our direct buffer (held by java sc object)
+        // todo: Give _javaDirectPlayBuffer directly to VoE?
+        memcpy(_javaDirectPlayBuffer, playBuffer, nSamples * 2);
+
+        UnLock();
+
+        // Call java sc object method to process data in direct buffer
+        // Will block until data has been put in OS playout buffer
+        // (see java sc class)
+        jint res = _jniEnvPlay->CallIntMethod(_javaScObj, _javaMidPlayAudio,
+                                              2 * nSamples);
+        if (res < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "PlayAudio failed (%d)", res);
+            _playWarning = 1;
+        }
+        else if (res > 0)
+        {
+            // we are not recording and have got a delay value from playback
+            _delayPlayout = res / _samplingFreqOut;
+        }
+        // If 0 is returned we are recording and then play delay is updated
+        // in RecordProcess
+
+        Lock();
+
+    } // _playing
+
+    if (_shutdownPlayThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Detaching thread from Java VM");
+
+        // Detach thread from Java VM
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not detach playout thread from JVM");
+            _shutdownPlayThread = false;
+            // If we say OK (i.e. set event) and close thread anyway,
+            // app will crash
+        }
+        else
+        {
+            _jniEnvPlay = NULL;
+            _shutdownPlayThread = false;
+            _playStartStopEvent.Set(); // Signal to Terminate() that we are done
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "Sent signal");
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecThreadProcess()
+{
+    if (!_recThreadIsInitialized)
+    {
+        // Do once when thread is started
+
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&_jniEnvRec, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !_jniEnvRec)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not attach rec thread to JVM (%d, %p)",
+                         res, _jniEnvRec);
+            return false; // Close down thread
+        }
+
+        _recThreadIsInitialized = true;
+    }
+
+    // just sleep if rec has not started
+    if (!_recording)
+    {
+        switch (_timeEventRec.Wait(1000))
+        {
+            case kEventSignaled:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Recording thread event signal");
+                _timeEventRec.Reset();
+                break;
+            case kEventError:
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "Recording thread event error");
+                return true;
+            case kEventTimeout:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Recording thread event timeout");
+                return true;
+        }
+    }
+
+    Lock();
+
+    if (_startRec)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startRec true, performing initial actions");
+        _startRec = false;
+        _recording = true;
+        _recWarning = 0;
+        _recError = 0;
+        _recStartStopEvent.Set();
+    }
+
+    if (_recording)
+    {
+        WebRtc_UWord32 samplesToRec = _samplingFreqIn * 10;
+
+        // Call java sc object method to record data to direct buffer
+        // Will block until data has been recorded (see java sc class),
+        // therefore we must release the lock
+        UnLock();
+        jint playDelayInSamples = _jniEnvRec->CallIntMethod(_javaScObj,
+                                                            _javaMidRecAudio,
+                                                            2 * samplesToRec);
+        if (playDelayInSamples < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "RecordAudio failed");
+            _recWarning = 1;
+        }
+        else
+        {
+            _delayPlayout = playDelayInSamples / _samplingFreqOut;
+        }
+        Lock();
+
+        // Check again since recording may have stopped during Java call
+        if (_recording)
+        {
+//            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+//                         "total delay is %d", msPlayDelay + _delayRecording);
+
+            // Copy data to our direct buffer (held by java sc object)
+            // todo: Give _javaDirectRecBuffer directly to VoE?
+            // todo: Check count <= 480 ?
+            memcpy(_recBuffer, _javaDirectRecBuffer, 2 * samplesToRec);
+
+            // store the recorded buffer (no action will be taken if the
+            // #recorded samples is not a full buffer)
+            _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, samplesToRec);
+
+            // store vqe delay values
+            _ptrAudioBuffer->SetVQEData(_delayPlayout, _delayRecording, 0);
+
+            // deliver recorded samples at specified sample rate, mic level
+            // etc. to the observer using callback
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+        }
+
+    } // _recording
+
+    if (_shutdownRecThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Detaching rec thread from Java VM");
+
+        // Detach thread from Java VM
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not detach recording thread from JVM");
+            _shutdownRecThread = false;
+            // If we say OK (i.e. set event) and close thread anyway,
+            // app will crash
+        }
+        else
+        {
+            _jniEnvRec = NULL;
+            _shutdownRecThread = false;
+            _recStartStopEvent.Set(); // Signal to Terminate() that we are done
+
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "Sent signal rec");
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.h b/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.h
new file mode 100644
index 0000000..e127e26
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_android_jni.h
@@ -0,0 +1,268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device interface (JNI/AudioTrack/AudioRecord usage)
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+
+#include <jni.h> // For accessing AudioDeviceAndroid java class
+
+namespace webrtc
+{
+class EventWrapper;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 44000; // Default is 44.1 kHz
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 44000; // Default is 44.1 kHz
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+
+
+WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
+                                           void* context);
+
+class ThreadWrapper;
+
+class AudioDeviceAndroidJni: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceAndroidJni(const WebRtc_Word32 id);
+    ~AudioDeviceAndroidJni();
+
+    virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize)
+        const;
+
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+    virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+
+private:
+    // Lock
+    void Lock()
+    {
+        _critSect.Enter();
+    };
+    void UnLock()
+    {
+        _critSect.Leave();
+    };
+
+    // Init
+    WebRtc_Word32 InitJavaResources();
+    WebRtc_Word32 InitSampleRate();
+
+    // Threads
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+    // Misc
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    // Events
+    EventWrapper& _timeEventRec;
+    EventWrapper& _timeEventPlay;
+    EventWrapper& _recStartStopEvent;
+    EventWrapper& _playStartStopEvent;
+
+    // Threads
+    ThreadWrapper* _ptrThreadPlay;
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+    bool _playThreadIsInitialized;
+    bool _recThreadIsInitialized;
+    bool _shutdownPlayThread;
+    bool _shutdownRecThread;
+
+    // Rec buffer
+    WebRtc_Word8 _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES];
+
+    // States
+    bool _recordingDeviceIsSpecified;
+    bool _playoutDeviceIsSpecified;
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _micIsInitialized;
+    bool _speakerIsInitialized;
+
+    // Signal flags to threads
+    bool _startRec;
+    bool _stopRec;
+    bool _startPlay;
+    bool _stopPlay;
+
+    // Warnings and errors
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    // Delay
+    WebRtc_UWord16 _delayPlayout;
+    WebRtc_UWord16 _delayRecording;
+
+    // AGC state
+    bool _AGC;
+
+    // Stored device properties
+    WebRtc_UWord16 _samplingFreqIn; // Sampling frequency for Mic
+    WebRtc_UWord16 _samplingFreqOut; // Sampling frequency for Speaker
+    WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
+    bool _loudSpeakerOn;
+    // Stores the desired audio source to use, set in SetRecordingDevice
+    int _recAudioSource;
+
+    // JNI and Java
+    JavaVM* _javaVM; // denotes a Java VM
+    jobject _javaContext; // the application context
+
+    JNIEnv* _jniEnvPlay; // The JNI env for playout thread
+    JNIEnv* _jniEnvRec; // The JNI env for recording thread
+
+    jclass _javaScClass; // AudioDeviceAndroid class
+    jobject _javaScObj; // AudioDeviceAndroid object
+
+    // The play buffer field in AudioDeviceAndroid object (global ref)
+    jobject _javaPlayBuffer;
+    // The rec buffer field in AudioDeviceAndroid object (global ref)
+    jobject _javaRecBuffer;
+    void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer
+    void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer
+    jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid
+    jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc b/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc
new file mode 100644
index 0000000..da88a0e
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc
@@ -0,0 +1,2164 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <time.h>
+#include <sys/time.h>
+
+#include "audio_device_utility.h"
+#include "audio_device_android_opensles.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+#ifdef WEBRTC_ANDROID_DEBUG
+#include <android/log.h>
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(                  \
+           ANDROID_LOG_DEBUG, "WebRTC ADM OpenSLES", __VA_ARGS__)
+#endif
+
+namespace webrtc {
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidOpenSLES - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidOpenSLES::AudioDeviceAndroidOpenSLES(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _slEngineObject(NULL),
+    _slPlayer(NULL),
+    _slEngine(NULL),
+    _slPlayerPlay(NULL),
+    _slOutputMixObject(NULL),
+    _slSpeakerVolume(NULL),
+    _slRecorder(NULL),
+    _slRecorderRecord(NULL),
+    _slAudioIODeviceCapabilities(NULL),
+    _slRecorderSimpleBufferQueue(NULL),
+    _slMicVolume(NULL),
+    _micDeviceId(0),
+    _recQueueSeq(0),
+    _timeEventRec(*EventWrapper::Create()),
+    _ptrThreadRec(NULL),
+    _recThreadID(0),
+    _playQueueSeq(0),
+    _recCurrentSeq(0),
+    _recBufferTotalSize(0),
+    _recordingDeviceIsSpecified(false),
+    _playoutDeviceIsSpecified(false),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _micIsInitialized(false),
+    _speakerIsInitialized(false),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playoutDelay(0),
+    _recordingDelay(0),
+    _AGC(false),
+    _adbSampleRate(0),
+    _samplingRateIn(SL_SAMPLINGRATE_16),
+    _samplingRateOut(SL_SAMPLINGRATE_16),
+    _maxSpeakerVolume(0),
+    _minSpeakerVolume(0),
+    _loudSpeakerOn(false) {
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created",
+                 __FUNCTION__);
+    memset(_playQueueBuffer, 0, sizeof(_playQueueBuffer));
+    memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer));
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+    memset(_recLength, 0, sizeof(_recLength));
+    memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+}
+
+AudioDeviceAndroidOpenSLES::~AudioDeviceAndroidOpenSLES() {
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed",
+                 __FUNCTION__);
+
+    Terminate();
+
+    delete &_timeEventRec;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+void AudioDeviceAndroidOpenSLES::AttachAudioBuffer(
+    AudioDeviceBuffer* audioBuffer) {
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const {
+
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::Init() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized) {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    SLEngineOption EngineOption[] = { (SLuint32) SL_ENGINEOPTION_THREADSAFE,
+            (SLuint32) SL_BOOLEAN_TRUE };
+    WebRtc_Word32 res = slCreateEngine(&_slEngineObject, 1, EngineOption, 0,
+                                       NULL, NULL);
+    //WebRtc_Word32 res = slCreateEngine( &_slEngineObject, 0, NULL, 0, NULL,
+    //    NULL);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create SL Engine Object");
+        return -1;
+    }
+    /* Realizing the SL Engine in synchronous mode. */
+    if ((*_slEngineObject)->Realize(_slEngineObject, SL_BOOLEAN_FALSE)
+            != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to Realize SL Engine");
+        return -1;
+    }
+
+    if ((*_slEngineObject)->GetInterface(_slEngineObject, SL_IID_ENGINE,
+                                         (void*) &_slEngine)
+            != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get SL Engine interface");
+        return -1;
+    }
+
+    // Check the sample rate to be used for playback and recording
+    if (InitSampleRate() != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init samplerate", __FUNCTION__);
+        return -1;
+    }
+
+    // Set the audio device buffer sampling rate, we assume we get the same
+    // for play and record
+    if (_ptrAudioBuffer->SetRecordingSampleRate(_adbSampleRate) < 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Could not set audio device buffer recording "
+                         "sampling rate (%d)", _adbSampleRate);
+    }
+    if (_ptrAudioBuffer->SetPlayoutSampleRate(_adbSampleRate) < 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Could not set audio device buffer playout sampling "
+                         "rate (%d)", _adbSampleRate);
+    }
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::Terminate() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized) {
+        return 0;
+    }
+
+    // RECORDING
+    StopRecording();
+
+    _micIsInitialized = false;
+    _recordingDeviceIsSpecified = false;
+
+    // PLAYOUT
+    StopPlayout();
+
+    if (_slEngineObject != NULL) {
+        (*_slEngineObject)->Destroy(_slEngineObject);
+        _slEngineObject = NULL;
+        _slEngine = NULL;
+    }
+
+    _initialized = false;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::Initialized() const {
+
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerIsAvailable(bool& available) {
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitSpeaker() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneIsAvailable(bool& available) {
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitMicrophone() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _micIsInitialized = true;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::SpeakerIsInitialized() const {
+
+    return _speakerIsInitialized;
+}
+
+bool AudioDeviceAndroidOpenSLES::MicrophoneIsInitialized() const {
+
+    return _micIsInitialized;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolumeIsAvailable(
+                                                                 bool& available) {
+
+    available = true; // We assume we are always be able to set/get volume
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetSpeakerVolume(
+    WebRtc_UWord32 volume) {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    if (_slEngineObject == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetSpeakerVolume, SL Engine object doesnt exist");
+        return -1;
+    }
+
+    if (_slEngine == NULL) {
+        // Get the SL Engine Interface which is implicit
+        if ((*_slEngineObject)->GetInterface(_slEngineObject, SL_IID_ENGINE,
+                                             (void*) &_slEngine)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to GetInterface SL Engine Interface");
+            return -1;
+        }
+    }
+    /*    if (_slOutputMixObject == NULL && _slEngine != NULL)
+     {
+     // Set arrays required[] and iidArray[] for VOLUME interface
+     const SLInterfaceID ids[1] = {SL_IID_VOLUME};
+     const SLboolean req[1] = {SL_BOOLEAN_TRUE};
+     // Create Output Mix object to be used by player
+     if ((*_slEngine)->CreateOutputMix(_slEngine, &_slOutputMixObject, 1, ids,
+     req) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to create Output Mix object");
+     return -1;
+     }
+
+     // Realizing the Output Mix object in synchronous mode.
+     if ((*_slOutputMixObject)->Realize(_slOutputMixObject,
+     SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to realize the output mix object");
+     return -1;
+     }
+     }
+     if (_slSpeakerVolume == NULL && _slOutputMixObject != NULL)
+     {
+     if ((*_slOutputMixObject)->GetInterface(_slOutputMixObject,
+     SL_IID_VOLUME, (void*)&_slSpeakerVolume) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get interface for Speaker Volume");
+     return -1;
+     }
+     }
+
+     WebRtc_Word32 vol(0);
+     vol = ((volume*(_maxSpeakerVolume-_minSpeakerVolume) + (int)(255/2))
+     / (255)) + _minSpeakerVolume;
+     if (_slSpeakerVolume != NULL)
+     {
+     if ((*_slSpeakerVolume)->SetVolumeLevel(_slSpeakerVolume, vol)
+     != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to set speaker volume");
+     return -1;
+     }
+     }
+     */
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolume(
+    WebRtc_UWord32& volume) const {
+
+    /*    if (!_speakerIsInitialized)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  Speaker not initialized");
+     return -1;
+     }
+
+     if (_slEngineObject == NULL)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "SetSpeakerVolume, SL Engine object doesnt exist");
+     return -1;
+     }
+
+     if (_slEngine == NULL && _slEngineObject != NULL)
+     {
+     // Get the SL Engine Interface which is implicit
+     if ((*_slEngineObject)->GetInterface(_slEngineObject,
+     SL_IID_ENGINE, (void*)&_slEngine) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get SL Engine interface");
+     return -1;
+     }
+     }
+     if (_slOutputMixObject == NULL && _slEngine != NULL)
+     {
+     // Set arrays required[] and iidArray[] for VOLUME interface
+     const SLInterfaceID ids[1] = {SL_IID_VOLUME};
+     const SLboolean req[1] = {SL_BOOLEAN_TRUE};
+     // Create Output Mix object to be used by player
+     if ((*_slEngine)->CreateOutputMix(_slEngine,
+     &_slOutputMixObject, 1, ids, req) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to create Output Mixer object");
+     return -1;
+     }
+     // Realizing the Output Mix object in synchronous mode.
+     if ((*_slOutputMixObject)->Realize(_slOutputMixObject,
+     SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to realize Output Mix object");
+     return -1;
+     }
+     }
+     if (_slSpeakerVolume == NULL && _slOutputMixObject != NULL)
+     {
+     if ((*_slOutputMixObject)->GetInterface(_slOutputMixObject,
+     SL_IID_VOLUME, (void*)&_slSpeakerVolume) != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get speaker volume interface");
+     return -1;
+     }
+     }
+
+     SLmillibel vol(0);
+     if (_slSpeakerVolume != NULL)
+     {
+     if ((*_slSpeakerVolume)->GetVolumeLevel(_slSpeakerVolume, &vol)
+     != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get speaker volume");
+     return -1;
+     }
+     }
+     // volume has to be mapped from millibel to [0, 255]
+     //    volume = (WebRtc_UWord32) (((vol - _minSpeakerVolume) * 255 +
+     * (int)((_maxSpeakerVolume - _minSpeakerVolume)/2)) /
+     *  (_maxSpeakerVolume - _minSpeakerVolume));
+     */
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetWaveOutVolume(
+    WebRtc_UWord16 /*volumeLeft*/,
+    WebRtc_UWord16 /*volumeRight*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    maxVolume = _maxSpeakerVolume;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;//
+    }
+
+    minVolume = _minSpeakerVolume;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    stepSize = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerMuteIsAvailable(
+    bool& available) {
+
+    available = false; // Speaker mute not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetSpeakerMute(bool /*enable*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerMute(bool& /*enabled*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneMuteIsAvailable(
+    bool& available) {
+
+    available = false; // Mic mute not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneMute(bool /*enable*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneMute(
+    bool& /*enabled*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneBoostIsAvailable(
+                                                                   bool& available) {
+
+    available = false; // Mic boost not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneBoost(bool enable) {
+
+    if (!_micIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneBoost(bool& enabled) const {
+
+    if (!_micIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoRecordingIsAvailable(
+    bool& available) {
+
+    available = false; // Stereo recording not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetStereoRecording(bool enable) {
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoRecording(bool& enabled) const {
+
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoPlayoutIsAvailable(
+    bool& available) {
+
+    available = false; // Stereo playout not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetStereoPlayout(bool enable) {
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoPlayout(bool& enabled) const {
+
+    enabled = false;
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetAGC(bool enable) {
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::AGC() const {
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolumeIsAvailable(
+    bool& available) {
+
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneVolume(
+    WebRtc_UWord32 volume) {
+
+    if (_slEngineObject == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetMicrophoneVolume, SL Engine Object doesnt exist");
+        return -1;
+    }
+
+    /* Get the optional DEVICE VOLUME interface from the engine */
+    if (_slMicVolume == NULL) {
+        // Get the optional DEVICE VOLUME interface from the engine
+        if ((*_slEngineObject)->GetInterface(_slEngineObject,
+                                             SL_IID_DEVICEVOLUME,
+                                             (void*) &_slMicVolume)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create Output Mix object");
+        }
+    }
+
+    if (_slMicVolume != NULL) {
+        WebRtc_Word32 vol(0);
+        vol = ((volume * (_maxSpeakerVolume - _minSpeakerVolume) + (int) (255
+                / 2)) / (255)) + _minSpeakerVolume;
+        if ((*_slMicVolume)->SetVolume(_slMicVolume, _micDeviceId, vol)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create Output Mix object");
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolume(
+    WebRtc_UWord32& /*volume*/) const {
+
+    /*    if (_slEngineObject == NULL)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "MicrophoneVolume, SL Engine Object doesnt exist");
+     return -1;
+     }
+
+     // Get the optional DEVICE VOLUME interface from the engine
+     if (_slMicVolume == NULL)
+     {
+     // Get the optional DEVICE VOLUME interface from the engine
+     if ((*_slEngineObject)->GetInterface(_slEngineObject,
+     SL_IID_DEVICEVOLUME, (void*)&_slMicVolume)  != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Microphone Volume interface");
+     }
+     }
+
+     SLint32 vol(0);
+     if (_slMicVolume != NULL)
+     {
+     if ((*_slMicVolume)->GetVolume(_slMicVolume, _micDeviceId, &vol)
+     != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Microphone Volume");
+     }
+     }
+     // volume has to be mapped from millibel to [0, 255]
+     //    volume = (WebRtc_UWord32) (((vol - _minSpeakerVolume) * 255 +
+     * (int)((_maxSpeakerVolume - _minSpeakerVolume)/2)) /
+     *  (_maxSpeakerVolume - _minSpeakerVolume));
+     */
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MaxMicrophoneVolume(
+    WebRtc_UWord32& /*maxVolume*/) const {
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const {
+
+    minVolume = 0;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const {
+
+    stepSize = 1;
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceAndroidOpenSLES::PlayoutDevices() {
+
+    return 1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutDevice(WebRtc_UWord16 index) {
+
+    if (_playIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return -1;
+    }
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behaviour
+    // with other platforms
+    _playoutDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize]) {
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid) {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize]) {
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid) {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceAndroidOpenSLES::RecordingDevices() {
+
+    return 1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetRecordingDevice(
+    WebRtc_UWord16 index) {
+
+    if (_recIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return -1;
+    }
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behaviour with
+    // other platforms
+    _recordingDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutIsAvailable(bool& available) {
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1) {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingIsAvailable(bool& available) {
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1) {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitPlayout() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  Not initialized");
+        return -1;
+    }
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    if (_playIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return 0;
+    }
+
+    // Initialize the speaker
+    if (InitSpeaker() == -1) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    if (_slEngineObject == NULL || _slEngine == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  SLObject or Engiine is NULL");
+        return -1;
+    }
+
+    WebRtc_Word32 res(-1);
+    SLDataFormat_PCM pcm;
+    SLDataSource audioSource;
+    SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+    SLDataSink audioSink;
+    SLDataLocator_OutputMix locator_outputmix;
+
+    // Create Output Mix object to be used by player
+    SLInterfaceID ids[N_MAX_INTERFACES];
+    SLboolean req[N_MAX_INTERFACES];
+    for (unsigned int i = 0; i < N_MAX_INTERFACES; i++) {
+        ids[i] = SL_IID_NULL;
+        req[i] = SL_BOOLEAN_FALSE;
+    }
+    ids[0] = SL_IID_ENVIRONMENTALREVERB;
+    res = (*_slEngine)->CreateOutputMix(_slEngine, &_slOutputMixObject, 1, ids,
+                                        req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get SL Output Mix object");
+        return -1;
+    }
+    // Realizing the Output Mix object in synchronous mode.
+    res = (*_slOutputMixObject)->Realize(_slOutputMixObject, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize SL Output Mix object");
+        return -1;
+    }
+    // Get the speaker mixer
+    /*    res = (*_slOutputMixObject)->GetInterface(_slOutputMixObject,
+     * SL_IID_VOLUME, (void*)&_slSpeakerVolume);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Speaker Mixer");
+     return -1;
+     }
+     */
+    // The code below can be moved to startplayout instead
+    /* Setup the data source structure for the buffer queue */
+    simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+    /* Two buffers in our buffer queue, to have low latency*/
+    simpleBufferQueue.numBuffers = N_PLAY_QUEUE_BUFFERS;
+    // TODO(xians), figure out if we should support stereo playout for android
+    /* Setup the format of the content in the buffer queue */
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = 1;
+    // _samplingRateOut is initilized in InitSampleRate()
+    pcm.samplesPerSec = SL_SAMPLINGRATE_16;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audioSource.pFormat = (void *) &pcm;
+    audioSource.pLocator = (void *) &simpleBufferQueue;
+    /* Setup the data sink structure */
+    locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+    locator_outputmix.outputMix = _slOutputMixObject;
+    audioSink.pLocator = (void *) &locator_outputmix;
+    audioSink.pFormat = NULL;
+
+    // Set arrays required[] and iidArray[] for SEEK interface
+    // (PlayItf is implicit)
+    ids[0] = SL_IID_BUFFERQUEUE;
+    ids[1] = SL_IID_EFFECTSEND;
+    req[0] = SL_BOOLEAN_TRUE;
+    req[1] = SL_BOOLEAN_TRUE;
+    // Create the music player
+    res = (*_slEngine)->CreateAudioPlayer(_slEngine, &_slPlayer, &audioSource,
+                                          &audioSink, 2, ids, req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create Audio Player");
+        return -1;
+    }
+
+    // Realizing the player in synchronous mode. 
+    res = (*_slPlayer)->Realize(_slPlayer, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize the player");
+        return -1;
+    }
+    // Get seek and play interfaces 
+    res = (*_slPlayer)->GetInterface(_slPlayer, SL_IID_PLAY,
+                                     (void*) &_slPlayerPlay);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Player interface");
+        return -1;
+    }
+    res = (*_slPlayer)->GetInterface(_slPlayer, SL_IID_BUFFERQUEUE,
+                                     (void*) &_slPlayerSimpleBufferQueue);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Player Simple Buffer Queue interface");
+        return -1;
+    }
+
+    // Setup to receive buffer queue event callbacks
+    res = (*_slPlayerSimpleBufferQueue)->RegisterCallback(
+        _slPlayerSimpleBufferQueue,
+        PlayerSimpleBufferQueueCallback,
+        this);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to register Player Callback");
+        return -1;
+    }
+    _playIsInitialized = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitRecording() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  Not initialized");
+        return -1;
+    }
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    if (_recIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return 0;
+    }
+
+    // Initialize the microphone
+    if (InitMicrophone() == -1) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    if (_slEngineObject == NULL || _slEngine == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording object is NULL");
+        return -1;
+    }
+    WebRtc_Word32 res(-1);
+    WebRtc_Word32 numInputs(0);
+    WebRtc_UWord32 inputDeviceIDs[N_MAX_INPUT_DEVICES];
+    SLAudioInputDescriptor audioInputDescriptor;
+    SLDataSource audioSource;
+    SLDataLocator_IODevice micLocator;
+    SLDataSink audioSink;
+    SLDataFormat_PCM pcm;
+    SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+    bool micAvailable(false);
+    WebRtc_UWord32 micDeviceID(0);
+
+    /*    // Get the Audio IO DEVICE CAPABILITIES interface, which is also implicit
+     res = (*_slEngineObject)->GetInterface(_slEngineObject,
+     SL_IID_AUDIOIODEVICECAPABILITIES, (void*)&_slAudioIODeviceCapabilities);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Audio IO device Capacilities interface");
+     return -1;
+     }
+     numInputs = N_MAX_OUTPUT_DEVICES;
+
+     res = (*_slAudioIODeviceCapabilities)->GetAvailableAudioInputs(
+     _slAudioIODeviceCapabilities, &numInputs, inputDeviceIDs);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get the number of Input Devices");
+     return -1;
+     }
+
+     // Search for either earpiece microphone or headset microphone input
+     // device - with a preference for the latter
+     for (int i=0;i<numInputs; i++)
+     {
+     res = (*_slAudioIODeviceCapabilities)->QueryAudioInputCapabilities(
+     _slAudioIODeviceCapabilities, inputDeviceIDs[i], &audioInputDescriptor);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to query info for the Input Devices");
+     return -1;
+     }
+     if((audioInputDescriptor.deviceConnection ==
+     SL_DEVCONNECTION_ATTACHED_WIRED)
+     && (audioInputDescriptor.deviceScope == SL_DEVSCOPE_USER)
+     && (audioInputDescriptor.deviceLocation == SL_DEVLOCATION_HEADSET))
+     {
+     micDeviceID = inputDeviceIDs[i];
+     micAvailable = true;
+     break;
+     }
+     else if((audioInputDescriptor.deviceConnection ==
+     SL_DEVCONNECTION_INTEGRATED)
+     && (audioInputDescriptor.deviceScope == SL_DEVSCOPE_USER)
+     && (audioInputDescriptor.deviceLocation == SL_DEVLOCATION_HANDSET))
+     {
+     micDeviceID = inputDeviceIDs[i];
+     micAvailable = true;
+     break;
+     }
+     }
+     // If neither of the preferred input audio devices is available,
+     // no point in continuing
+     if (!micAvailable)
+     {
+     return -1;
+     }
+     */
+
+    // Get the optional DEVICE VOLUME interface from the engine,
+    // should this be done somewhere else
+    /*    res = (*_slEngineObject)->GetInterface(_slEngineObject,
+     * SL_IID_DEVICEVOLUME, (void*)&_slMicVolume);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Microphone Volume interface");
+     return -1;
+     }
+     */
+    // Setup the data source structure
+    micLocator.locatorType = SL_DATALOCATOR_IODEVICE;
+    micLocator.deviceType = SL_IODEVICE_AUDIOINPUT;
+    micLocator.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT; //micDeviceID;
+    micLocator.device = NULL;
+    audioSource.pLocator = (void *) &micLocator;
+    audioSource.pFormat = NULL;
+
+    /* Setup the data source structure for the buffer queue */
+    simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+    simpleBufferQueue.numBuffers = N_REC_QUEUE_BUFFERS;
+    /* Setup the format of the content in the buffer queue */
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = 1;
+    // _samplingRateIn is initialized in initSampleRate()
+    pcm.samplesPerSec = SL_SAMPLINGRATE_16;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = 16;
+    pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audioSink.pFormat = (void *) &pcm;
+    audioSink.pLocator = (void *) &simpleBufferQueue;
+
+    // Create audio recorder 
+    const SLInterfaceID id[1] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
+    const SLboolean req[1] = { SL_BOOLEAN_TRUE };
+    res = (*_slEngine)->CreateAudioRecorder(_slEngine, &_slRecorder,
+                                            &audioSource, &audioSink, 1, id,
+                                            req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create Recorder");
+        return -1;
+    }
+
+    // Realizing the recorder in synchronous mode.
+    res = (*_slRecorder)->Realize(_slRecorder, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize Recorder");
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "  get Recorder interface");
+    // Get the RECORD interface - it is an implicit interface
+    res = (*_slRecorder)->GetInterface(_slRecorder, SL_IID_RECORD,
+                                       (void*) &_slRecorderRecord);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Recorder interface");
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "  get Recorder Simple Buffer Queue 2");
+    // Get the simpleBufferQueue interface
+    res = (*_slRecorder)->GetInterface(_slRecorder,
+                                       SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+                                       (void*) &_slRecorderSimpleBufferQueue);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Recorder Simple Buffer Queue");
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "  register Recorder Callback 2");
+    // Setup to receive buffer queue event callbacks
+    res = (*_slRecorderSimpleBufferQueue)->RegisterCallback(
+        _slRecorderSimpleBufferQueue,
+        RecorderSimpleBufferQueueCallback,
+        this);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to register Recorder Callback");
+        return -1;
+    }
+
+    _recIsInitialized = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StartRecording() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording not initialized");
+        return -1;
+    }
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return 0;
+    }
+
+    if (_slRecorderRecord == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  RecordITF is NULL");
+        return -1;
+    }
+
+    if (_slRecorderSimpleBufferQueue == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recorder Simple Buffer Queue is NULL");
+        return -1;
+    }
+
+    // Reset recording buffer
+    memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer)); // empty the queue
+    _recQueueSeq = 0;
+
+    const char* threadName = "webrtc_opensles_audio_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+            kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                "  failed to start the rec audio thread");
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+    _recThreadIsInitialized = true;
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+    memset(_recLength, 0, sizeof(_recLength));
+    memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+    _recCurrentSeq = 0;
+    _recBufferTotalSize = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Enqueue N_REC_QUEUE_BUFFERS -1 zero buffers to get the ball rolling
+    // find out how it behaves when the sample rate is 44100
+    WebRtc_Word32 res(-1);
+    WebRtc_UWord32 nSample10ms = _adbSampleRate / 100;
+    for (int i = 0; i < (N_REC_QUEUE_BUFFERS - 1); i++) {
+        // We assign 10ms buffer to each queue, size given in bytes.
+        res = (*_slRecorderSimpleBufferQueue)->Enqueue(
+            _slRecorderSimpleBufferQueue,
+            (void*) _recQueueBuffer[_recQueueSeq],
+            2 * nSample10ms);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to Enqueue Empty Buffer to recorder");
+            return -1;
+        }
+        _recQueueSeq++;
+    }
+    // Record the audio
+    res = (*_slRecorderRecord)->SetRecordState(_slRecorderRecord,
+                                               SL_RECORDSTATE_RECORDING);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start recording");
+        return -1;
+    }
+    _recording = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StopRecording() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording is not initialized");
+        return 0;
+    }
+
+    // Stop the recording thread
+    if (_ptrThreadRec != NULL)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                "Stopping capture thread");
+        bool res = _ptrThreadRec->Stop();
+        if (!res) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                                    "Failed to stop Capture thread ");
+        } else {
+            delete _ptrThreadRec;
+            _ptrThreadRec = NULL;
+            _recThreadIsInitialized = false;
+        }
+    }
+
+    if ((_slRecorderRecord != NULL) && (_slRecorder != NULL)) {
+        // Record the audio
+        WebRtc_Word32 res = (*_slRecorderRecord)->SetRecordState(
+            _slRecorderRecord,
+            SL_RECORDSTATE_STOPPED);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to stop recording");
+            return -1;
+        }
+        res = (*_slRecorderSimpleBufferQueue)->Clear(
+              _slRecorderSimpleBufferQueue);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to clear recorder buffer queue");
+            return -1;
+        }
+
+        // Destroy the recorder object 
+        (*_slRecorder)->Destroy(_slRecorder);
+        _slRecorder = NULL;
+        _slRecorderRecord = NULL;
+        _slRecorderRecord = NULL;
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+    _recWarning = 0;
+    _recError = 0;
+    _recQueueSeq = 0;
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingIsInitialized() const {
+
+    return _recIsInitialized;
+}
+
+
+bool AudioDeviceAndroidOpenSLES::Recording() const {
+
+    return _recording;
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutIsInitialized() const {
+
+    return _playIsInitialized;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StartPlayout() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout not initialized");
+        return -1;
+    }
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return 0;
+    }
+
+    if (_slPlayerPlay == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  PlayItf is NULL");
+        return -1;
+    }
+    if (_slPlayerSimpleBufferQueue == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PlayerSimpleBufferQueue is NULL");
+        return -1;
+    }
+
+    _recQueueSeq = 0;
+
+    WebRtc_Word32 res(-1);
+    /* Enqueue a set of zero buffers to get the ball rolling */
+    WebRtc_UWord32 nSample10ms = _adbSampleRate / 100;
+    WebRtc_Word8 playBuffer[2 * nSample10ms];
+    WebRtc_UWord32 noSamplesOut(0);
+    /*    res = (*_slPlayerSimpleBufferQueue)->Clear(_slPlayerSimpleBufferQueue);
+     if (res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+     "  player simpler buffer queue clean failed");
+     //return ; dong return
+     }
+     *///    for (int i = 0; i<(N_PLAY_QUEUE_BUFFERS -1); i++)
+    {
+        noSamplesOut = _ptrAudioBuffer->RequestPlayoutData(nSample10ms);
+        //Lock();
+        // Get data from Audio Device Buffer
+        noSamplesOut = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        // Insert what we have in data buffer
+        memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, 2 * noSamplesOut);
+        //UnLock();
+
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        // "_playQueueSeq (%u)  noSamplesOut (%d)", _playQueueSeq,
+        //noSamplesOut);
+        // write the buffer data we got from VoE into the device
+        res = (*_slPlayerSimpleBufferQueue)->Enqueue(
+            _slPlayerSimpleBufferQueue,
+            (void*) _playQueueBuffer[_playQueueSeq],
+            2 * noSamplesOut);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  player simpler buffer queue Enqueue failed, %d",
+                         noSamplesOut);
+            //return ; dong return
+        }
+        _playQueueSeq = (_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+    }
+
+    // Play the PCM samples using a buffer queue
+    res = (*_slPlayerPlay)->SetPlayState(_slPlayerPlay, SL_PLAYSTATE_PLAYING);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start playout");
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _playing = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StopPlayout() {
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout is not initialized");
+        return 0;
+    }
+
+    if ((_slPlayerPlay != NULL) && (_slOutputMixObject == NULL) && (_slPlayer
+            == NULL)) {
+        // Make sure player is stopped 
+        WebRtc_Word32 res =
+                (*_slPlayerPlay)->SetPlayState(_slPlayerPlay,
+                                               SL_PLAYSTATE_STOPPED);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to stop playout");
+            return -1;
+        }
+        res = (*_slPlayerSimpleBufferQueue)->Clear(_slPlayerSimpleBufferQueue);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to clear recorder buffer queue");
+            return -1;
+        }
+
+        // Destroy the player
+        (*_slPlayer)->Destroy(_slPlayer);
+        // Destroy Output Mix object 
+        (*_slOutputMixObject)->Destroy(_slOutputMixObject);
+        _slPlayer = NULL;
+        _slPlayerPlay = NULL;
+        _slPlayerSimpleBufferQueue = NULL;
+        _slOutputMixObject = NULL;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _playWarning = 0;
+    _playError = 0;
+    _playQueueSeq = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutDelay(WebRtc_UWord16& delayMS) const {
+    delayMS = _playoutDelay;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingDelay(WebRtc_UWord16& delayMS) const {
+    delayMS = _recordingDelay;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::Playing() const {
+
+    return _playing;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType /*type*/,
+    WebRtc_UWord16 /*sizeMS*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const {
+
+    type = AudioDeviceModule::kAdaptiveBufferSize;
+    sizeMS = _playoutDelay; // Set to current playout delay
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::CPULoad(WebRtc_UWord16& /*load*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutWarning() const {
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutError() const {
+    return (_playError > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingWarning() const {
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingError() const {
+    return (_recError > 0);
+}
+
+void AudioDeviceAndroidOpenSLES::ClearPlayoutWarning() {
+    _playWarning = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearPlayoutError() {
+    _playError = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearRecordingWarning() {
+    _recWarning = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearRecordingError() {
+    _recError = 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetLoudspeakerStatus(bool enable) {
+
+    //  if (!_javaContext)
+    //  {
+    //        WEBRTC_TRACE(kTraceError, kTraceUtility, -1, "  Context is not set");
+    //        return -1;
+    //  }
+
+    //  get the JNI env for this thread
+    //   JNIEnv *env;
+    //   bool isAttached = false;
+
+    //  if (_javaVM->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
+    //  {
+    // try to attach the thread and get the env
+    // Attach this thread to JVMslPlayoutCallback
+    //       jint res = _javaVM->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    //       if ((res < 0) || !env)
+    //      {
+    //           WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+    // "  Could not attach thread to JVM (%d, %p)", res, env);
+    //           return -1;
+    //       }
+    //       isAttached = true;
+    //   }
+
+    // get the method ID
+    //   jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass,
+    // "SetPlayoutSpeaker", "(Z)I");
+
+    // call java sc object method
+    //    jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable);
+    //   if (res < 0)
+    //  {
+    //    WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+    // "  SetPlayoutSpeaker failed (%d)", res);
+    //      return -1;
+    //  }
+
+    _loudSpeakerOn = enable;
+
+    //  Detach this thread if it was attached
+    //    if (isAttached)
+    //    {
+    //        if (_javaVM->DetachCurrentThread() < 0)
+    //       {
+    //            WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
+    // "  Could not detach thread from JVM");
+    //       }
+    //    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::GetLoudspeakerStatus(
+    bool& enabled) const {
+
+    enabled = _loudSpeakerOn;
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioDeviceAndroidOpenSLES::PlayerSimpleBufferQueueCallback(
+    SLAndroidSimpleBufferQueueItf queueItf,
+    void *pContext) {
+    AudioDeviceAndroidOpenSLES* ptrThis =
+            static_cast<AudioDeviceAndroidOpenSLES*> (pContext);
+    ptrThis->PlayerSimpleBufferQueueCallbackHandler(queueItf);
+}
+
+void AudioDeviceAndroidOpenSLES::PlayerSimpleBufferQueueCallbackHandler(
+    SLAndroidSimpleBufferQueueItf queueItf) {
+    WebRtc_Word32 res;
+    //Lock();
+    //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+    //"_playQueueSeq (%u)", _playQueueSeq);
+    if (_playing && (_playQueueSeq < N_PLAY_QUEUE_BUFFERS)) {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+        //_id, "playout callback ");
+        unsigned int noSamp10ms = _adbSampleRate / 100;
+        // Max 10 ms @ samplerate kHz / 16 bit
+        WebRtc_Word8 playBuffer[2 * noSamp10ms];
+        int noSamplesOut = 0;
+
+        // Assumption for implementation
+        // assert(PLAYBUFSIZESAMPLES == noSamp10ms);
+
+        // TODO(xians), update the playout delay
+        //UnLock();
+
+        noSamplesOut = _ptrAudioBuffer->RequestPlayoutData(noSamp10ms);
+        //Lock();
+        // Get data from Audio Device Buffer
+        noSamplesOut = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        // Cast OK since only equality comparison
+        if (noSamp10ms != (unsigned int) noSamplesOut) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "noSamp10ms (%u) != noSamplesOut (%d)", noSamp10ms,
+                         noSamplesOut);
+
+            if (_playWarning > 0) {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             "  Pending play warning exists");
+            }
+            _playWarning = 1;
+        }
+        // Insert what we have in data buffer
+        memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, 2 * noSamplesOut);
+        //UnLock();
+
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        //"_playQueueSeq (%u)  noSamplesOut (%d)", _playQueueSeq, noSamplesOut);
+        // write the buffer data we got from VoE into the device
+        res
+                = (*_slPlayerSimpleBufferQueue)->Enqueue(
+                                                         _slPlayerSimpleBufferQueue,
+                                                         _playQueueBuffer[_playQueueSeq],
+                                                         2 * noSamplesOut);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  player simpler buffer queue Enqueue failed, %d",
+                         noSamplesOut);
+            return;
+        }
+        // update the playout delay
+        UpdatePlayoutDelay(noSamplesOut);
+        // update the play buffer sequency
+        _playQueueSeq = (_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::RecorderSimpleBufferQueueCallback(
+    SLAndroidSimpleBufferQueueItf queueItf,
+    void *pContext) {
+    AudioDeviceAndroidOpenSLES* ptrThis =
+            static_cast<AudioDeviceAndroidOpenSLES*> (pContext);
+    ptrThis->RecorderSimpleBufferQueueCallbackHandler(queueItf);
+}
+
+void AudioDeviceAndroidOpenSLES::RecorderSimpleBufferQueueCallbackHandler(
+    SLAndroidSimpleBufferQueueItf queueItf) {
+    WebRtc_Word32 res;
+    //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+    //"  RecorderSimpleBufferQueueCallbackHandler");
+    if (_recording) {
+        // Insert all data in temp buffer into recording buffers
+        // There is zero or one buffer partially full at any given time,
+        // all others are full or empty
+        // Full means filled with noSamp10ms samples.
+
+        const unsigned int noSamp10ms = _adbSampleRate / 100;
+        //        WebRtc_UWord16 queuePos = 0;
+        //        WebRtc_UWord16 checkQueuePos = 0;
+        unsigned int dataPos = 0;
+        WebRtc_UWord16 bufPos = 0;
+        WebRtc_Word16 insertPos = -1;
+        unsigned int nCopy = 0; // Number of samples to copy
+        //        WebRtc_Word32 isData = 0;
+
+        while (dataPos < noSamp10ms)//REC_BUF_SIZE_IN_SAMPLES) //noSamp10ms)
+
+        {
+            // Loop over all recording buffers or until we find the partially
+            // full buffer
+            // First choice is to insert into partially full buffer,
+            // second choice is to insert into empty buffer
+            bufPos = 0;
+            insertPos = -1;
+            nCopy = 0;
+            while (bufPos < N_REC_BUFFERS)
+            {
+                if ((_recLength[bufPos] > 0) && (_recLength[bufPos]
+                                < noSamp10ms))
+                {
+                    // Found the partially full buffer
+                    insertPos = static_cast<WebRtc_Word16> (bufPos);
+                    bufPos = N_REC_BUFFERS; // Don't need to search more
+                }
+                else if ((-1 == insertPos) && (0 == _recLength[bufPos]))
+                {
+                    // Found an empty buffer
+                    insertPos = static_cast<WebRtc_Word16> (bufPos);
+                }
+                ++bufPos;
+            }
+
+            if (insertPos > -1)
+            {
+                // We found a non-full buffer, copy data from the buffer queue
+                // o recBuffer
+                unsigned int dataToCopy = noSamp10ms - dataPos;
+                unsigned int currentRecLen = _recLength[insertPos];
+                unsigned int roomInBuffer = noSamp10ms - currentRecLen;
+                nCopy = (dataToCopy < roomInBuffer ? dataToCopy : roomInBuffer);
+                memcpy(&_recBuffer[insertPos][currentRecLen],
+                        &_recQueueBuffer[_recQueueSeq][dataPos],
+                        nCopy * sizeof(short));
+                if (0 == currentRecLen)
+                {
+                    _recSeqNumber[insertPos] = _recCurrentSeq;
+                    ++_recCurrentSeq;
+                }
+                _recBufferTotalSize += nCopy;
+                // Has to be done last to avoid interrupt problems
+                // between threads
+                _recLength[insertPos] += nCopy;
+                dataPos += nCopy;
+            }
+            else
+            {
+                // Didn't find a non-full buffer
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                        _id, "  Could not insert into recording buffer");
+                if (_recWarning > 0)
+                {
+                    WEBRTC_TRACE(kTraceWarning,
+                            kTraceAudioDevice, _id,
+                            "  Pending rec warning exists");
+                }
+                _recWarning = 1;
+                dataPos = noSamp10ms; // Don't try to insert more
+            }
+        }
+
+        // clean the queue buffer
+        // Start with empty buffer
+        memset(_recQueueBuffer[_recQueueSeq], 0, 2 * REC_BUF_SIZE_IN_SAMPLES);
+        // write the empty buffer to the queue
+        res = (*_slRecorderSimpleBufferQueue)->Enqueue(
+              _slRecorderSimpleBufferQueue,
+              (void*) _recQueueBuffer[_recQueueSeq],
+              2 * noSamp10ms);
+        if (res != SL_RESULT_SUCCESS) {
+            return;
+        }
+        // update the rec queue seq
+        _recQueueSeq = (_recQueueSeq + 1) % N_REC_QUEUE_BUFFERS;
+        // wake up the recording thread
+        _timeEventRec.Set();
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::CheckErr(SLresult res) {
+    if (res != SL_RESULT_SUCCESS) {
+        // Debug printing to be placed here
+        exit(-1);
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::UpdatePlayoutDelay(WebRtc_UWord32 nSamplePlayed) {
+    // currently just do some simple calculation, should we setup a timer for
+    // the callback to have a more accurate delay
+    // Android CCD asks for 10ms as the maximum warm output latency, so we
+    // simply add (nPlayQueueBuffer -1 + 0.5)*10ms
+    // This playout delay should be seldom changed
+    _playoutDelay = (N_PLAY_QUEUE_BUFFERS - 0.5) * 10 + N_PLAY_QUEUE_BUFFERS
+            * nSamplePlayed / (_adbSampleRate / 1000);
+}
+
+void AudioDeviceAndroidOpenSLES::UpdateRecordingDelay() {
+    // // Android CCD asks for 10ms as the maximum warm input latency,
+    // so we simply add 10ms
+    _recordingDelay = 10;
+    const WebRtc_UWord32 noSamp10ms = _adbSampleRate / 100;
+    //    if (_recBufferTotalSize > noSamp10ms)
+    //    {
+    _recordingDelay += (N_REC_QUEUE_BUFFERS * noSamp10ms) / (_adbSampleRate
+            / 1000);
+    //    }
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitSampleRate() {
+
+    WebRtc_Word32 res(-1);
+    WebRtc_Word32 numOutputs(0);
+    WebRtc_UWord32 headsetDeviceID(0);
+    WebRtc_UWord32 earpieceDeviceID(0);
+    bool headsetAvailable(false);
+    bool earpieceAvailable(false);
+    bool foundSampleRate(false);
+    WebRtc_UWord32 outputDeviceIDs[N_MAX_OUTPUT_DEVICES];
+    SLAudioOutputDescriptor audioOutputDescriptor;
+
+    if (_slEngineObject == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  SL Object is NULL");
+        return -1;
+    }
+
+    /*    // Get the Audio IO DEVICE CAPABILITIES interface
+     res = (*_slEngineObject)->GetInterface(_slEngineObject,
+     SL_IID_AUDIOIODEVICECAPABILITIES, (void*)&_slAudioIODeviceCapabilities);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get Device Capabilities interface");
+     return -1;
+     }
+     numOutputs = N_MAX_OUTPUT_DEVICES;
+     res = (*_slAudioIODeviceCapabilities)->GetAvailableAudioOutputs(
+     _slAudioIODeviceCapabilities, &numOutputs, outputDeviceIDs);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to get number of Output Devices");
+     return -1;
+     }
+
+     // Search for headset output and phone handsfree speaker device,
+     // we prefer headset to earpiece
+     for (int i=0;i<numOutputs; i++)
+     {
+     res = (*_slAudioIODeviceCapabilities)->QueryAudioOutputCapabilities(
+     _slAudioIODeviceCapabilities, outputDeviceIDs[i], &audioOutputDescriptor);
+     if ( res != SL_RESULT_SUCCESS)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  failed to query Output Devices info");
+     return -1;
+     }
+     if((audioOutputDescriptor.deviceConnection ==
+     SL_DEVCONNECTION_ATTACHED_WIRED)&&
+     (audioOutputDescriptor.deviceScope == SL_DEVSCOPE_USER)&&
+     (audioOutputDescriptor.deviceLocation == SL_DEVLOCATION_HEADSET))
+     {
+     headsetDeviceID = outputDeviceIDs[i];
+     headsetAvailable = true;
+     break;
+     }
+     else if((audioOutputDescriptor.deviceConnection ==
+     SL_DEVCONNECTION_INTEGRATED)&&
+     (audioOutputDescriptor.deviceScope == SL_DEVSCOPE_ENVIRONMENT)&&
+     (audioOutputDescriptor.deviceLocation == SL_DEVLOCATION_HANDSET))
+     {
+     earpieceDeviceID = outputDeviceIDs[i];
+     earpieceAvailable = true;
+     break;
+     }
+     }
+     // Neither headset output nor phone handsfree speaker is available
+     if (headsetAvailable == false && earpieceAvailable == false)
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  No playout device");
+     return -1;
+     }
+
+     _sampleRateInMilliHz = SL_SAMPLINGRATE_16;
+     if (audioOutputDescriptor.isFreqRangeContinuous == SL_BOOLEAN_FALSE)
+     {
+     while (!foundSampleRate)
+     {
+     for (int i=0; i<audioOutputDescriptor.numOfSamplingRatesSupported; i++)
+     {
+     if (audioOutputDescriptor.samplingRatesSupported[i]
+     == _sampleRateInMilliHz) // supported sampling rate in milliHertz
+     {
+     switch (_sampleRateInMilliHz)
+     {
+     case SL_SAMPLINGRATE_44_1:
+     _adbSampleRate = 44000;
+     break;
+     case SL_SAMPLINGRATE_16:
+     _adbSampleRate = 16000;
+     break;
+     case SL_SAMPLINGRATE_8:
+     _adbSampleRate = 8000;
+     break;
+     default: // error
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  SampleRate(%d) is not supported", _sampleRateInMilliHz);
+     return -1;
+     } // switch
+     foundSampleRate = true;
+     break;
+     } //if(audioOutputDescriptor.samplingRatesSupported[i] == _sampleRate
+     } //for (int i=0; i<audioOutputDescriptor.numOfSamplingRatesSupported; i++)
+     switch (_sampleRateInMilliHz)
+     {
+     case SL_SAMPLINGRATE_16:
+     _sampleRateInMilliHz = SL_SAMPLINGRATE_44_1;
+     break;
+     case SL_SAMPLINGRATE_44_1:
+     _sampleRateInMilliHz = SL_SAMPLINGRATE_8;
+     break;
+     default:
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  SampleRate is not supported");
+     return -1;
+     } // switch
+     } // while (!foundSampleRate)
+     }
+     else //audioOutputDescriptor.isFreqRangeContinuous == SL_BOOLEAN_TRUE
+     {
+     // minSampleRate < SL_SAMPLINGRATE_16 < maxSampleRate
+     if ((audioOutputDescriptor.minSampleRate < SL_SAMPLINGRATE_16) &&
+     (SL_SAMPLINGRATE_16 < audioOutputDescriptor.maxSampleRate))
+     {
+     _adbSampleRate = 16000;
+     } // minSampleRate < SL_SAMPLINGRATE_44_1 < maxSampleRate
+     else if((audioOutputDescriptor.minSampleRate < SL_SAMPLINGRATE_44_1) &&
+     (SL_SAMPLINGRATE_44_1 < audioOutputDescriptor.maxSampleRate))
+     {
+     _adbSampleRate = 44000;
+     } // minSampleRate < SL_SAMPLINGRATE_8 < maxSampleRate
+     else if ((audioOutputDescriptor.minSampleRate < SL_SAMPLINGRATE_8) &&
+     (SL_SAMPLINGRATE_8 < audioOutputDescriptor.maxSampleRate))
+     {
+     _adbSampleRate = 8000;
+     }
+     else
+     {
+     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+     "  SampleRate is not supported");
+     return -1;
+     }
+     } // else
+     */
+    _samplingRateIn = SL_SAMPLINGRATE_16;
+    _samplingRateOut = SL_SAMPLINGRATE_16;
+    _adbSampleRate = 16000;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  sample rate set to (%d)", _adbSampleRate);
+    return 0;
+
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+bool AudioDeviceAndroidOpenSLES::RecThreadFunc(void* pThis) {
+    return (static_cast<AudioDeviceAndroidOpenSLES*> (pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceAndroidOpenSLES::RecThreadProcess() {
+
+    //    Lock();
+    // Wait for 100ms for the signal from device callback
+    // In case no callback comes in 100ms, we check the buffer anyway
+    _timeEventRec.Wait(100);
+
+    int bufPos = 0;
+    unsigned int lowestSeq = 0;
+    int lowestSeqBufPos = 0;
+    bool foundBuf = true;
+    const unsigned int noSamp10ms = _adbSampleRate / 100;
+
+    while (foundBuf)
+    {
+        // Check if we have any buffer with data to insert into the
+        // Audio Device Buffer,
+        // and find the one with the lowest seq number
+        foundBuf = false;
+
+        for (bufPos = 0; bufPos < N_REC_BUFFERS; ++bufPos)
+        {
+            if (noSamp10ms == _recLength[bufPos])
+            {
+                if (!foundBuf) {
+                    lowestSeq = _recSeqNumber[bufPos];
+                    lowestSeqBufPos = bufPos;
+                    foundBuf = true;
+                } else if (_recSeqNumber[bufPos] < lowestSeq)
+                {
+                    lowestSeq = _recSeqNumber[bufPos];
+                    lowestSeqBufPos = bufPos;
+                }
+            }
+        } // for
+
+        // Insert data into the Audio Device Buffer if found any
+        if (foundBuf)
+        {
+            UpdateRecordingDelay();
+            // Set the recorded buffer
+            _ptrAudioBuffer->SetRecordedBuffer(_recBuffer[lowestSeqBufPos],
+                                               noSamp10ms);
+
+            // Don't need to set the current mic level in ADB since we only
+            // support digital AGC,
+            // and besides we cannot get or set the iPhone mic level anyway.
+
+            // Set VQE info, use clockdrift == 0
+            _ptrAudioBuffer->SetVQEData(_playoutDelay, _recordingDelay, 0);
+
+            // Deliver recorded samples at specified sample rate, mic level
+            // etc. to the observer using callback
+            //UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            //Lock();
+
+            // Make buffer available
+            _recSeqNumber[lowestSeqBufPos] = 0;
+            _recBufferTotalSize -= _recLength[lowestSeqBufPos];
+            // Must be done last to avoid interrupt problems between threads
+            _recLength[lowestSeqBufPos] = 0;
+        }
+
+    } // while (foundBuf)
+    //UnLock();
+    return true;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.h b/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.h
new file mode 100644
index 0000000..974ae90
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_android_opensles.h
@@ -0,0 +1,310 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+
+#include <jni.h> // For accessing AudioDeviceAndroid.java
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+namespace webrtc
+{
+class EventWrapper;
+
+const WebRtc_UWord32 N_MAX_INTERFACES = 3;
+const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
+const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000;//44000;  // Default fs
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000;//44000; // Default fs
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
+
+// Number of the buffers in playout queue
+const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 2;
+// Number of buffers in recording queue
+const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 2;
+// Number of 10 ms recording blocks in rec buffer
+const WebRtc_UWord16 N_REC_BUFFERS = 20;
+
+class ThreadWrapper;
+
+class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceAndroidOpenSLES(const WebRtc_Word32 id);
+    ~AudioDeviceAndroidOpenSLES();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+            ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32
+            PlayoutDeviceName(WebRtc_UWord16 index,
+                              char name[kAdmMaxDeviceNameSize],
+                              char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32
+            RecordingDeviceName(WebRtc_UWord16 index,
+                                char name[kAdmMaxDeviceNameSize],
+                                char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32
+            SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32
+            SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    SLPlayItf playItf;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32
+            MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+            SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                             WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    // Error and warning information
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+    // Attach audio buffer
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+    // Speaker audio routing
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+
+private:
+    // Lock
+    void Lock()
+    {
+        _critSect.Enter();
+    };
+    void UnLock()
+    {
+        _critSect.Leave();
+    };
+
+    static void PlayerSimpleBufferQueueCallback(
+            SLAndroidSimpleBufferQueueItf queueItf,
+            void *pContext);
+    void PlayerSimpleBufferQueueCallbackHandler(
+            SLAndroidSimpleBufferQueueItf queueItf);
+    static void RecorderSimpleBufferQueueCallback(
+            SLAndroidSimpleBufferQueueItf queueItf,
+            void *pContext);
+    void RecorderSimpleBufferQueueCallbackHandler(
+            SLAndroidSimpleBufferQueueItf queueItf);
+    void CheckErr(SLresult res);
+
+    // Delay updates
+    void UpdateRecordingDelay();
+    void UpdatePlayoutDelay(WebRtc_UWord32 nSamplePlayed);
+
+    // Init
+    WebRtc_Word32 InitSampleRate();
+
+    // Threads
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+    // Misc
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    // audio unit
+    SLObjectItf _slEngineObject;
+
+    // playout device
+    SLObjectItf _slPlayer;
+    SLEngineItf _slEngine;
+    SLPlayItf _slPlayerPlay;
+    SLAndroidSimpleBufferQueueItf _slPlayerSimpleBufferQueue;
+    SLObjectItf _slOutputMixObject;
+    SLVolumeItf _slSpeakerVolume;
+
+    // recording device
+    SLObjectItf _slRecorder;
+    SLRecordItf _slRecorderRecord;
+    SLAudioIODeviceCapabilitiesItf _slAudioIODeviceCapabilities;
+    SLAndroidSimpleBufferQueueItf _slRecorderSimpleBufferQueue;
+    SLDeviceVolumeItf _slMicVolume;
+
+    WebRtc_UWord32 _micDeviceId;
+
+    // Events
+    EventWrapper& _timeEventRec;
+    // Threads
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    // TODO(xians), remove the following flag
+    bool _recThreadIsInitialized;
+
+    // Playout buffer
+    WebRtc_Word8 _playQueueBuffer[N_PLAY_QUEUE_BUFFERS][2
+            * PLAY_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 _playQueueSeq;
+    // Recording buffer
+    WebRtc_Word8 _recQueueBuffer[N_REC_QUEUE_BUFFERS][2
+            * REC_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 _recQueueSeq;
+    WebRtc_Word8 _recBuffer[N_REC_BUFFERS][2*REC_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 _recLength[N_REC_BUFFERS];
+    WebRtc_UWord32 _recSeqNumber[N_REC_BUFFERS];
+    WebRtc_UWord32 _recCurrentSeq;
+    // Current total size all data in buffers, used for delay estimate
+    WebRtc_UWord32 _recBufferTotalSize;
+
+    // States
+    bool _recordingDeviceIsSpecified;
+    bool _playoutDeviceIsSpecified;
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _micIsInitialized;
+    bool _speakerIsInitialized;
+
+    // Warnings and errors
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    // Delay
+    WebRtc_UWord16 _playoutDelay;
+    WebRtc_UWord16 _recordingDelay;
+
+    // AGC state
+    bool _AGC;
+
+    // The sampling rate to use with Audio Device Buffer
+    WebRtc_UWord32 _adbSampleRate;
+    // Stored device properties
+    WebRtc_UWord32 _samplingRateIn; // Sampling frequency for Mic
+    WebRtc_UWord32 _samplingRateOut; // Sampling frequency for Speaker
+    WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
+    WebRtc_UWord32 _minSpeakerVolume; // The minimum speaker volume value
+    bool _loudSpeakerOn;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.cc b/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.cc
new file mode 100644
index 0000000..84b4c60
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.cc
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device utility implementation
+ */
+
+#include "audio_device_utility_android.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityAndroid::AudioDeviceUtilityAndroid(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()), _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+AudioDeviceUtilityAndroid::~AudioDeviceUtilityAndroid()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+    }
+
+    delete &_critSect;
+}
+
+WebRtc_Word32 AudioDeviceUtilityAndroid::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "Android");
+
+    return 0;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.h b/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.h
new file mode 100644
index 0000000..81f685a
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/audio_device_utility_android.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device utility interface
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityAndroid: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityAndroid(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityAndroid();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    AudioDeviceModule::ErrorCode _lastError;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
diff --git a/trunk/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java b/trunk/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
new file mode 100644
index 0000000..b56085b
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
@@ -0,0 +1,509 @@
+/*

+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+ *

+ *  Use of this source code is governed by a BSD-style license

+ *  that can be found in the LICENSE file in the root of the source

+ *  tree. An additional intellectual property rights grant can be found

+ *  in the file PATENTS.  All contributing project authors may

+ *  be found in the AUTHORS file in the root of the source tree.

+ */

+

+/*

+ *  Android audio device test app

+ */

+

+package org.webrtc.voiceengine;

+

+import java.nio.ByteBuffer;

+import java.util.concurrent.locks.ReentrantLock;

+

+import android.content.Context;

+import android.media.AudioFormat;

+import android.media.AudioManager;

+import android.media.AudioRecord;

+import android.media.AudioTrack;

+import android.util.Log;

+

+

+class AudioDeviceAndroid {

+    private AudioTrack _audioTrack = null;

+    private AudioRecord _audioRecord = null;

+

+    private Context _context;

+    private AudioManager _audioManager;

+

+    private ByteBuffer _playBuffer;

+    private ByteBuffer _recBuffer;

+    private byte[] _tempBufPlay;

+    private byte[] _tempBufRec;

+

+    private final ReentrantLock _playLock = new ReentrantLock();

+    private final ReentrantLock _recLock = new ReentrantLock();

+

+    private boolean _doPlayInit = true;

+    private boolean _doRecInit = true;

+    private boolean _isRecording = false;

+    private boolean _isPlaying = false;

+

+    private int _bufferedRecSamples = 0;

+    private int _bufferedPlaySamples = 0;

+    private int _playPosition = 0;

+

+    AudioDeviceAndroid() {

+        try {

+            _playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48

+                                                              // kHz

+            _recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48

+                                                             // kHz

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+        }

+

+        _tempBufPlay = new byte[2 * 480];

+        _tempBufRec = new byte[2 * 480];

+    }

+

+    @SuppressWarnings("unused")

+    private int InitRecording(int audioSource, int sampleRate) {

+        // get the minimum buffer size that can be used

+        int minRecBufSize =

+                        AudioRecord.getMinBufferSize(sampleRate,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+

+        // DoLog("min rec buf size is " + minRecBufSize);

+

+        // double size to be more safe

+        int recBufSize = minRecBufSize * 2;

+        _bufferedRecSamples = (5 * sampleRate) / 200;

+        // DoLog("rough rec delay set to " + _bufferedRecSamples);

+

+        // release the object

+        if (_audioRecord != null) {

+            _audioRecord.release();

+            _audioRecord = null;

+        }

+

+        try {

+            _audioRecord = new AudioRecord(

+                            audioSource,

+                            sampleRate,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            recBufSize);

+

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+            return -1;

+        }

+

+        // check that the audioRecord is ready to be used

+        if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {

+            // DoLog("rec not initialized " + sampleRate);

+            return -1;

+        }

+

+        // DoLog("rec sample rate set to " + sampleRate);

+

+        return _bufferedRecSamples;

+    }

+

+    @SuppressWarnings("unused")

+    private int StartRecording() {

+        if (_isPlaying == false) {

+            SetAudioMode(true);

+        }

+

+        // start recording

+        try {

+            _audioRecord.startRecording();

+

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+            return -1;

+        }

+

+        _isRecording = true;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int InitPlayback(int sampleRate) {

+        // get the minimum buffer size that can be used

+        int minPlayBufSize =

+                        AudioTrack.getMinBufferSize(sampleRate,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+

+        // DoLog("min play buf size is " + minPlayBufSize);

+

+        int playBufSize = minPlayBufSize;

+        if (playBufSize < 6000) {

+            playBufSize *= 2;

+        }

+        _bufferedPlaySamples = 0;

+        // DoLog("play buf size is " + playBufSize);

+

+        // release the object

+        if (_audioTrack != null) {

+            _audioTrack.release();

+            _audioTrack = null;

+        }

+

+        try {

+            _audioTrack = new AudioTrack(

+                            AudioManager.STREAM_VOICE_CALL,

+                            sampleRate,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            playBufSize, AudioTrack.MODE_STREAM);

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+            return -1;

+        }

+

+        // check that the audioRecord is ready to be used

+        if (_audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {

+            // DoLog("play not initialized " + sampleRate);

+            return -1;

+        }

+

+        // DoLog("play sample rate set to " + sampleRate);

+

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        // Return max playout volume

+        if (_audioManager == null) {

+            // Don't know the max volume but still init is OK for playout,

+            // so we should not return error.

+            return 0;

+        }

+        return _audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);

+    }

+

+    @SuppressWarnings("unused")

+    private int StartPlayback() {

+        if (_isRecording == false) {

+            SetAudioMode(true);

+        }

+

+        // start playout

+        try {

+            _audioTrack.play();

+

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+            return -1;

+        }

+

+        _isPlaying = true;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int StopRecording() {

+        _recLock.lock();

+        try {

+            // only stop if we are recording

+            if (_audioRecord.getRecordingState() ==

+              AudioRecord.RECORDSTATE_RECORDING) {

+                // stop recording

+                try {

+                    _audioRecord.stop();

+                } catch (IllegalStateException e) {

+                    e.printStackTrace();

+                    return -1;

+                }

+            }

+

+            // release the object

+            _audioRecord.release();

+            _audioRecord = null;

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _doRecInit = true;

+            _recLock.unlock();

+        }

+

+        if (_isPlaying == false) {

+            SetAudioMode(false);

+        }

+

+        _isRecording = false;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int StopPlayback() {

+        _playLock.lock();

+        try {

+            // only stop if we are playing

+            if (_audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {

+                // stop playout

+                try {

+                    _audioTrack.stop();

+                } catch (IllegalStateException e) {

+                    e.printStackTrace();

+                    return -1;

+                }

+

+                // flush the buffers

+                _audioTrack.flush();

+            }

+

+            // release the object

+            _audioTrack.release();

+            _audioTrack = null;

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _doPlayInit = true;

+            _playLock.unlock();

+        }

+

+        if (_isRecording == false) {

+            SetAudioMode(false);

+        }

+

+        _isPlaying = false;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int PlayAudio(int lengthInBytes) {

+

+        int bufferedSamples = 0;

+

+        _playLock.lock();

+        try {

+            if (_audioTrack == null) {

+                return -2; // We have probably closed down while waiting for

+                           // play lock

+            }

+

+            // Set priority, only do once

+            if (_doPlayInit == true) {

+                try {

+                    android.os.Process.setThreadPriority(

+                        android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+                } catch (Exception e) {

+                    DoLog("Set play thread priority failed: " + e.getMessage());

+                }

+                _doPlayInit = false;

+            }

+

+            int written = 0;

+            _playBuffer.get(_tempBufPlay);

+            written = _audioTrack.write(_tempBufPlay, 0, lengthInBytes);

+            _playBuffer.rewind(); // Reset the position to start of buffer

+

+            // DoLog("Wrote data to sndCard");

+

+            // increase by number of written samples

+            _bufferedPlaySamples += (written >> 1);

+

+            // decrease by number of played samples

+            int pos = _audioTrack.getPlaybackHeadPosition();

+            if (pos < _playPosition) { // wrap or reset by driver

+                _playPosition = 0; // reset

+            }

+            _bufferedPlaySamples -= (pos - _playPosition);

+            _playPosition = pos;

+

+            if (!_isRecording) {

+                bufferedSamples = _bufferedPlaySamples;

+            }

+

+            if (written != lengthInBytes) {

+                // DoLog("Could not write all data to sc (written = " + written

+                // + ", length = " + lengthInBytes + ")");

+                return -1;

+            }

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _playLock.unlock();

+        }

+

+        return bufferedSamples;

+    }

+

+    @SuppressWarnings("unused")

+    private int RecordAudio(int lengthInBytes) {

+        _recLock.lock();

+

+        try {

+            if (_audioRecord == null) {

+                return -2; // We have probably closed down while waiting for rec

+                           // lock

+            }

+

+            // Set priority, only do once

+            if (_doRecInit == true) {

+                try {

+                    android.os.Process.setThreadPriority(

+                        android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+                } catch (Exception e) {

+                    DoLog("Set rec thread priority failed: " + e.getMessage());

+                }

+                _doRecInit = false;

+            }

+

+            int readBytes = 0;

+            _recBuffer.rewind(); // Reset the position to start of buffer

+            readBytes = _audioRecord.read(_tempBufRec, 0, lengthInBytes);

+            // DoLog("read " + readBytes + "from SC");

+            _recBuffer.put(_tempBufRec);

+

+            if (readBytes != lengthInBytes) {

+                // DoLog("Could not read all data from sc (read = " + readBytes

+                // + ", length = " + lengthInBytes + ")");

+                return -1;

+            }

+

+        } catch (Exception e) {

+            DoLogErr("RecordAudio try failed: " + e.getMessage());

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _recLock.unlock();

+        }

+

+        return (_bufferedPlaySamples);

+    }

+

+    @SuppressWarnings("unused")

+    private int SetPlayoutSpeaker(boolean loudspeakerOn) {

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        if (_audioManager == null) {

+            DoLogErr("Could not change audio routing - no audio manager");

+            return -1;

+        }

+

+        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);

+

+        if ((3 == apiLevel) || (4 == apiLevel)) {

+            // 1.5 and 1.6 devices

+            if (loudspeakerOn) {

+                // route audio to back speaker

+                _audioManager.setMode(AudioManager.MODE_NORMAL);

+            } else {

+                // route audio to earpiece

+                _audioManager.setMode(AudioManager.MODE_IN_CALL);

+            }

+        } else {

+            // 2.x devices

+            if ((android.os.Build.BRAND.equals("Samsung") ||

+                            android.os.Build.BRAND.equals("samsung")) &&

+                            ((5 == apiLevel) || (6 == apiLevel) ||

+                            (7 == apiLevel))) {

+                // Samsung 2.0, 2.0.1 and 2.1 devices

+                if (loudspeakerOn) {

+                    // route audio to back speaker

+                    _audioManager.setMode(AudioManager.MODE_IN_CALL);

+                    _audioManager.setSpeakerphoneOn(loudspeakerOn);

+                } else {

+                    // route audio to earpiece

+                    _audioManager.setSpeakerphoneOn(loudspeakerOn);

+                    _audioManager.setMode(AudioManager.MODE_NORMAL);

+                }

+            } else {

+                // Non-Samsung and Samsung 2.2 and up devices

+                _audioManager.setSpeakerphoneOn(loudspeakerOn);

+            }

+        }

+

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int SetPlayoutVolume(int level) {

+

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        int retVal = -1;

+

+        if (_audioManager != null) {

+            _audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+                            level, 0);

+            retVal = 0;

+        }

+

+        return retVal;

+    }

+

+    @SuppressWarnings("unused")

+    private int GetPlayoutVolume() {

+

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        int level = -1;

+

+        if (_audioManager != null) {

+            level = _audioManager.getStreamVolume(

+                AudioManager.STREAM_VOICE_CALL);

+        }

+

+        return level;

+    }

+

+    private void SetAudioMode(boolean startCall) {

+        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);

+

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        if (_audioManager == null) {

+            DoLogErr("Could not set audio mode - no audio manager");

+            return;

+        }

+

+        // ***IMPORTANT*** When the API level for honeycomb (H) has been

+        // decided,

+        // the condition should be changed to include API level 8 to H-1.

+        if ((android.os.Build.BRAND.equals("Samsung") || android.os.Build.BRAND

+                        .equals("samsung")) && (8 == apiLevel)) {

+            // Set Samsung specific VoIP mode for 2.2 devices

+            int mode =

+                            (startCall ? 4 /* VoIP mode */

+                                            : AudioManager.MODE_NORMAL);

+            _audioManager.setMode(mode);

+            if (_audioManager.getMode() != mode) {

+                DoLogErr("Could not set audio mode for Samsung device");

+            }

+        }

+    }

+

+    final String logTag = "WebRTC AD java";

+

+    private void DoLog(String msg) {

+        Log.d(logTag, msg);

+    }

+

+    private void DoLogErr(String msg) {

+        Log.e(logTag, msg);

+    }

+}

diff --git a/trunk/src/modules/audio_device/main/source/audio_device.gypi b/trunk/src/modules/audio_device/main/source/audio_device.gypi
new file mode 100644
index 0000000..2f3f9b1
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device.gypi
@@ -0,0 +1,210 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_device',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '.',
+        '../../../interface',
+        '../interface',
+        'dummy', # dummy audio device
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../../../../',
+          '../../../interface',
+          '../interface',
+        ],
+      },
+      # TODO(xians): Rename files to e.g. *_linux.{ext}, remove sources in conditions section
+      'sources': [
+        '../interface/audio_device.h',
+        '../interface/audio_device_defines.h',
+        'audio_device_buffer.cc',
+        'audio_device_buffer.h',
+        'audio_device_generic.cc',
+        'audio_device_generic.h',
+        'audio_device_utility.cc',
+        'audio_device_utility.h',
+        'audio_device_impl.cc',
+        'audio_device_impl.h',
+        'audio_device_config.h',
+        'dummy/audio_device_dummy.cc',
+        'dummy/audio_device_dummy.h',
+        'dummy/audio_device_utility_dummy.cc',
+        'dummy/audio_device_utility_dummy.h',
+      ],
+      'conditions': [
+        ['OS=="linux"', {
+          'include_dirs': [
+            'linux',
+          ],
+        }], # OS==linux
+        ['OS=="mac"', {
+            'include_dirs': [
+              'mac',
+            ],
+        }], # OS==mac
+        ['OS=="win"', {
+            'include_dirs': [
+              'win',
+              '../../../../../..',
+            ],
+        }],
+        ['OS=="android"', {
+            'include_dirs': [
+              'android',
+            ],
+        }], # OS==android
+        ['include_internal_audio_device==0', {
+          'defines': [
+            'WEBRTC_DUMMY_AUDIO_BUILD',
+          ],
+        }],
+        ['include_internal_audio_device==1', {
+          'sources': [
+            'linux/alsasymboltable_linux.cc',
+            'linux/alsasymboltable_linux.h',
+            'linux/audio_device_alsa_linux.cc',
+            'linux/audio_device_alsa_linux.h',
+            'linux/audio_device_utility_linux.cc',
+            'linux/audio_device_utility_linux.h',
+            'linux/audio_mixer_manager_alsa_linux.cc',
+            'linux/audio_mixer_manager_alsa_linux.h',
+            'linux/latebindingsymboltable_linux.cc',
+            'linux/latebindingsymboltable_linux.h',
+            'mac/audio_device_mac.cc',
+            'mac/audio_device_mac.h',
+            'mac/audio_device_utility_mac.cc',
+            'mac/audio_device_utility_mac.h',
+            'mac/audio_mixer_manager_mac.cc',
+            'mac/audio_mixer_manager_mac.h',
+            'mac/portaudio/pa_memorybarrier.h',
+            'mac/portaudio/pa_ringbuffer.c',
+            'mac/portaudio/pa_ringbuffer.h',
+            'win/audio_device_core_win.cc',
+            'win/audio_device_core_win.h',
+            'win/audio_device_wave_win.cc',
+            'win/audio_device_wave_win.h',
+            'win/audio_device_utility_win.cc',
+            'win/audio_device_utility_win.h',
+            'win/audio_mixer_manager_win.cc',
+            'win/audio_mixer_manager_win.h',
+            'android/audio_device_android_opensles.cc',
+            'android/audio_device_android_opensles.h',
+            'android/audio_device_utility_android.cc',
+            'android/audio_device_utility_android.h',
+          ],
+          'conditions': [
+            ['OS=="linux"', {
+              'defines': [
+                'LINUX_ALSA',
+              ],
+              'link_settings': {
+                'libraries': [
+                  '-ldl',
+                  '-lasound',
+                ],
+              },
+              'conditions': [
+                ['include_pulse_audio==1', {
+                  'defines': [
+                    'LINUX_PULSE',
+                  ],
+                  'sources': [
+                    'linux/audio_device_pulse_linux.cc',
+                    'linux/audio_device_pulse_linux.h',
+                    'linux/audio_mixer_manager_pulse_linux.cc',
+                    'linux/audio_mixer_manager_pulse_linux.h',
+                    'linux/pulseaudiosymboltable_linux.cc',
+                    'linux/pulseaudiosymboltable_linux.h',
+                  ],
+                  'link_settings': {
+                    'libraries': [
+                      '-lpulse',
+                    ],
+                  },
+                }],
+              ],
+            }],
+            ['OS=="mac"', {
+              'link_settings': {
+                'libraries': [
+                  '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+                  '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+                ],
+              },
+            }],
+            ['OS=="win"', {
+              'link_settings': {
+                'libraries': [
+                  # Required for the built-in WASAPI AEC.
+                  '-ldmoguids.lib',
+                  '-lwmcodecdspuuid.lib',
+                  '-lamstrmid.lib',
+                  '-lmsdmo.lib',
+                ],
+              },
+            }],
+          ], # conditions
+        }], # include_internal_audio_device==1
+      ], # conditions
+    },
+  ],
+  # Exclude the test targets when building with chromium.
+  'conditions': [   
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'audio_device_test_api',
+         'type': 'executable',
+         'dependencies': [
+            'audio_device',
+            'webrtc_utility',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+            '../test/audio_device_test_api.cc',
+            '../test/audio_device_test_defines.h',
+          ],
+        },
+        {
+          'target_name': 'audio_device_test_func',
+          'type': 'executable',
+          'dependencies': [
+            'audio_device',
+            'webrtc_utility',
+            '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../test/test.gyp:test_support',
+          ],
+          'sources': [
+            '../test/audio_device_test_func.cc',
+            '../test/audio_device_test_defines.h',
+            '../test/func_test_manager.cc',
+            '../test/func_test_manager.h',
+          ],
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_buffer.cc b/trunk/src/modules/audio_device/main/source/audio_device_buffer.cc
new file mode 100644
index 0000000..6edec6a
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_buffer.cc
@@ -0,0 +1,652 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "audio_device_buffer.h"
+#include "audio_device_utility.h"
+#include "audio_device_config.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <cassert>
+
+#include "signal_processing_library.h"
+
+namespace webrtc {
+
+// ----------------------------------------------------------------------------
+//  ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceBuffer::AudioDeviceBuffer() :
+    _id(-1),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrCbAudioTransport(NULL),
+    _recSampleRate(0),
+    _playSampleRate(0),
+    _recChannels(0),
+    _playChannels(0),
+    _recChannel(AudioDeviceModule::kChannelBoth),
+    _recBytesPerSample(0),
+    _playBytesPerSample(0),
+    _recSamples(0),
+    _recSize(0),
+    _playSamples(0),
+    _playSize(0),
+    _recFile(*FileWrapper::Create()),
+    _playFile(*FileWrapper::Create()),
+    _currentMicLevel(0),
+    _newMicLevel(0),
+    _playDelayMS(0),
+    _recDelayMS(0),
+    _clockDrift(0),
+    _measureDelay(false),    // should always be 'false' (EXPERIMENTAL)
+    _pulseList(),
+    _lastPulseTime(AudioDeviceUtility::GetTimeInMS())
+{
+    // valid ID will be set later by SetId, use -1 for now
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s created", __FUNCTION__);
+    memset(_recBuffer, 0, kMaxBufferSizeBytes);
+    memset(_playBuffer, 0, kMaxBufferSizeBytes);
+}
+
+// ----------------------------------------------------------------------------
+//  dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceBuffer::~AudioDeviceBuffer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        _recFile.Flush();
+        _recFile.CloseFile();
+        delete &_recFile;
+
+        _playFile.Flush();
+        _playFile.CloseFile();
+        delete &_playFile;
+
+        _EmptyList();
+    }
+
+    delete &_critSect;
+    delete &_critSectCb;
+}
+
+// ----------------------------------------------------------------------------
+//  SetId
+// ----------------------------------------------------------------------------
+
+void AudioDeviceBuffer::SetId(WebRtc_UWord32 id)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "AudioDeviceBuffer::SetId(id=%d)", id);
+    _id = id;
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterAudioCallback
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RegisterAudioCallback(AudioTransport* audioCallback)
+{
+
+    CriticalSectionScoped lock(_critSectCb);
+    _ptrCbAudioTransport = audioCallback;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::InitPlayout()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_measureDelay)
+    {
+        _EmptyList();
+        _lastPulseTime = AudioDeviceUtility::GetTimeInMS();
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::InitRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_measureDelay)
+    {
+        _EmptyList();
+        _lastPulseTime = AudioDeviceUtility::GetTimeInMS();
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingSampleRate(WebRtc_UWord32 fsHz)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingSampleRate(fsHz=%u)", fsHz);
+
+    CriticalSectionScoped lock(_critSect);
+    _recSampleRate = fsHz;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetPlayoutSampleRate(WebRtc_UWord32 fsHz)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutSampleRate(fsHz=%u)", fsHz);
+
+    CriticalSectionScoped lock(_critSect);
+    _playSampleRate = fsHz;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RecordingSampleRate() const
+{
+    return _recSampleRate;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::PlayoutSampleRate() const
+{
+    return _playSampleRate;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannels(WebRtc_UWord8 channels)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingChannels(channels=%u)", channels);
+
+    CriticalSectionScoped lock(_critSect);
+    _recChannels = channels;
+    _recBytesPerSample = 2*channels;  // 16 bits per sample in mono, 32 bits in stereo
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetPlayoutChannels(WebRtc_UWord8 channels)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutChannels(channels=%u)", channels);
+
+    CriticalSectionScoped lock(_critSect);
+    _playChannels = channels;
+    // 16 bits per sample in mono, 32 bits in stereo
+    _playBytesPerSample = 2*channels;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannel
+//
+//  Select which channel to use while recording.
+//  This API requires that stereo is enabled.
+//
+//  Note that, the nChannel parameter in RecordedDataIsAvailable will be
+//  set to 2 even for kChannelLeft and kChannelRight. However, nBytesPerSample
+//  will be 2 instead of 4 four these cases.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannel(const AudioDeviceModule::ChannelType channel)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recChannels == 1)
+    {
+        return -1;
+    }
+
+    if (channel == AudioDeviceModule::kChannelBoth)
+    {
+        // two bytes per channel
+        _recBytesPerSample = 4;
+    }
+    else
+    {
+        // only utilize one out of two possible channels (left or right)
+        _recBytesPerSample = 2;
+    }
+    _recChannel = channel;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RecordingChannel(AudioDeviceModule::ChannelType& channel) const
+{
+    channel = _recChannel;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord8 AudioDeviceBuffer::RecordingChannels() const
+{
+    return _recChannels;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord8 AudioDeviceBuffer::PlayoutChannels() const
+{
+    return _playChannels;
+}
+
+// ----------------------------------------------------------------------------
+//  SetCurrentMicLevel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetCurrentMicLevel(WebRtc_UWord32 level)
+{
+    _currentMicLevel = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  NewMicLevel
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord32 AudioDeviceBuffer::NewMicLevel() const
+{
+    return _newMicLevel;
+}
+
+// ----------------------------------------------------------------------------
+//  SetVQEData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetVQEData(WebRtc_UWord32 playDelayMS, WebRtc_UWord32 recDelayMS, WebRtc_Word32 clockDrift)
+{
+    if ((playDelayMS + recDelayMS) > 300)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "too long delay (play:%i rec:%i)", playDelayMS, recDelayMS, clockDrift);
+    }
+
+    _playDelayMS = playDelayMS;
+    _recDelayMS = recDelayMS;
+    _clockDrift = clockDrift;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StartInputFileRecording(const WebRtc_Word8 fileName[kAdmMaxFileNameSize])
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _recFile.Flush();
+    _recFile.CloseFile();
+
+    return (_recFile.OpenFile(fileName, false, false, false));
+}
+
+// ----------------------------------------------------------------------------
+//  StopInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StopInputFileRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _recFile.Flush();
+    _recFile.CloseFile();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StartOutputFileRecording(const WebRtc_Word8 fileName[kAdmMaxFileNameSize])
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _playFile.Flush();
+    _playFile.CloseFile();
+
+    return (_playFile.OpenFile(fileName, false, false, false));
+}
+
+// ----------------------------------------------------------------------------
+//  StopOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StopOutputFileRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _playFile.Flush();
+    _playFile.CloseFile();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordedBuffer
+//
+//  Store recorded audio buffer in local memory ready for the actual
+//  "delivery" using a callback.
+//
+//  This method can also parse out left or right channel from a stereo
+//  input signal, i.e., emulate mono.
+//
+//  Examples:
+//
+//  16-bit,48kHz mono,  10ms => nSamples=480 => _recSize=2*480=960 bytes
+//  16-bit,48kHz stereo,10ms => nSamples=480 => _recSize=4*480=1920 bytes
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordedBuffer(const WebRtc_Word8* audioBuffer, WebRtc_UWord32 nSamples)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recBytesPerSample == 0)
+    {
+        assert(false);
+        return -1;
+    }
+
+    _recSamples = nSamples;
+    _recSize = _recBytesPerSample*nSamples; // {2,4}*nSamples
+    if (_recSize > kMaxBufferSizeBytes)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (nSamples != _recSamples)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of recorded samples (%d)", nSamples);
+        return -1;
+    }
+
+    if (_recChannel == AudioDeviceModule::kChannelBoth)
+    {
+        // (default) copy the complete input buffer to the local buffer
+        memcpy(&_recBuffer[0], audioBuffer, _recSize);
+    }
+    else
+    {
+        WebRtc_Word16* ptr16In = (WebRtc_Word16*)audioBuffer;
+        WebRtc_Word16* ptr16Out = (WebRtc_Word16*)&_recBuffer[0];
+
+        if (AudioDeviceModule::kChannelRight == _recChannel)
+        {
+            ptr16In++;
+        }
+
+        // exctract left or right channel from input buffer to the local buffer
+        for (WebRtc_UWord32 i = 0; i < _recSamples; i++)
+        {
+            *ptr16Out = *ptr16In;
+            ptr16Out++;
+            ptr16In++;
+            ptr16In++;
+        }
+    }
+
+    if (_recFile.Open())
+    {
+        // write to binary file in mono or stereo (interleaved)
+        _recFile.Write(&_recBuffer[0], _recSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  DeliverRecordedData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::DeliverRecordedData()
+{
+    CriticalSectionScoped lock(_critSectCb);
+
+    // Ensure that user has initialized all essential members
+    if ((_recSampleRate == 0)     ||
+        (_recSamples == 0)        ||
+        (_recBytesPerSample == 0) ||
+        (_recChannels == 0))
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (_ptrCbAudioTransport == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to deliver recorded data (AudioTransport does not exist)");
+        return 0;
+    }
+
+    WebRtc_Word32 res(0);
+    WebRtc_UWord32 newMicLevel(0);
+    WebRtc_UWord32 totalDelayMS = _playDelayMS +_recDelayMS;
+
+    if (_measureDelay)
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        memset(&_recBuffer[0], 0, _recSize);
+        WebRtc_UWord32 time = AudioDeviceUtility::GetTimeInMS();
+        if (time - _lastPulseTime > 500)
+        {
+            _pulseList.PushBack(time);
+            _lastPulseTime = time;
+
+            WebRtc_Word16* ptr16 = (WebRtc_Word16*)&_recBuffer[0];
+            *ptr16 = 30000;
+        }
+    }
+
+    res = _ptrCbAudioTransport->RecordedDataIsAvailable(&_recBuffer[0],
+                                                        _recSamples,
+                                                        _recBytesPerSample,
+                                                        _recChannels,
+                                                        _recSampleRate,
+                                                        totalDelayMS,
+                                                        _clockDrift,
+                                                        _currentMicLevel,
+                                                        newMicLevel);
+    if (res != -1)
+    {
+        _newMicLevel = newMicLevel;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RequestPlayoutData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RequestPlayoutData(WebRtc_UWord32 nSamples)
+{
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        // Ensure that user has initialized all essential members
+        if ((_playBytesPerSample == 0) ||
+            (_playChannels == 0)       ||
+            (_playSampleRate == 0))
+        {
+            assert(false);
+            return -1;
+        }
+
+        _playSamples = nSamples;
+        _playSize = _playBytesPerSample * nSamples;  // {2,4}*nSamples
+        if (_playSize > kMaxBufferSizeBytes)
+        {
+            assert(false);
+            return -1;
+        }
+
+        if (nSamples != _playSamples)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of samples to be played out (%d)", nSamples);
+            return -1;
+        }
+    }
+
+    WebRtc_UWord32 nSamplesOut(0);
+
+    CriticalSectionScoped lock(_critSectCb);
+
+    if (_ptrCbAudioTransport == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to feed data to playout (AudioTransport does not exist)");
+        return 0;
+    }
+
+    if (_ptrCbAudioTransport)
+    {
+        WebRtc_UWord32 res(0);
+
+        res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples,
+                                                     _playBytesPerSample,
+                                                     _playChannels,
+                                                     _playSampleRate,
+                                                     &_playBuffer[0],
+                                                     nSamplesOut);
+        if (res != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "NeedMorePlayData() failed");
+        }
+
+        // --- Experimental delay-measurement implementation
+        // *** not be used in released code ***
+
+        if (_measureDelay)
+        {
+            CriticalSectionScoped lock(_critSect);
+
+            WebRtc_Word16 maxAbs = WebRtcSpl_MaxAbsValueW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels);
+            if (maxAbs > 1000)
+            {
+                WebRtc_UWord32 nowTime = AudioDeviceUtility::GetTimeInMS();
+
+                if (!_pulseList.Empty())
+                {
+                    ListItem* item = _pulseList.First();
+                    if (item)
+                    {
+                        WebRtc_Word16 maxIndex = WebRtcSpl_MaxAbsIndexW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels);
+                        WebRtc_UWord32 pulseTime = item->GetUnsignedItem();
+                        WebRtc_UWord32 diff = nowTime - pulseTime + (10*maxIndex)/(nSamplesOut*_playChannels);
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "diff time in playout delay (%d)", diff);
+                    }
+                    _pulseList.PopFront();
+                }
+            }
+        }
+    }
+
+    return nSamplesOut;
+}
+
+// ----------------------------------------------------------------------------
+//  GetPlayoutData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::GetPlayoutData(WebRtc_Word8* audioBuffer)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playSize > kMaxBufferSizeBytes)
+    {
+       WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "_playSize %i exceeds "
+       "kMaxBufferSizeBytes in AudioDeviceBuffer::GetPlayoutData", _playSize);
+       assert(false);
+       return -1;       
+    } 
+
+    memcpy(audioBuffer, &_playBuffer[0], _playSize);
+
+    if (_playFile.Open())
+    {
+        // write to binary file in mono or stereo (interleaved)
+        _playFile.Write(&_playBuffer[0], _playSize);
+    }
+
+    return _playSamples;
+}
+
+// ----------------------------------------------------------------------------
+//  _EmptyList
+// ----------------------------------------------------------------------------
+
+void AudioDeviceBuffer::_EmptyList()
+{
+    while (!_pulseList.Empty())
+    {
+        ListItem* item = _pulseList.First();
+        if (item)
+        {
+            // WebRtc_UWord32 ts = item->GetUnsignedItem();
+        }
+        _pulseList.PopFront();
+    }
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_buffer.h b/trunk/src/modules/audio_device/main/source/audio_device_buffer.h
new file mode 100644
index 0000000..e7db275
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_buffer.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
+
+#include "typedefs.h"
+#include "../../../../common_audio/resampler/include/resampler.h"
+#include "file_wrapper.h"
+#include "audio_device.h"
+#include "list_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+const WebRtc_UWord32 kPulsePeriodMs = 1000;
+const WebRtc_UWord32 kMaxBufferSizeBytes = 3840; // 10ms in stereo @ 96kHz
+
+class AudioDeviceObserver;
+class MediaFile;
+
+class AudioDeviceBuffer
+{
+public:
+    void SetId(WebRtc_UWord32 id);
+    WebRtc_Word32 RegisterAudioCallback(AudioTransport* audioCallback);
+
+    WebRtc_Word32 InitPlayout();
+    WebRtc_Word32 InitRecording();
+
+    WebRtc_Word32 SetRecordingSampleRate(WebRtc_UWord32 fsHz);
+    WebRtc_Word32 SetPlayoutSampleRate(WebRtc_UWord32 fsHz);
+    WebRtc_Word32 RecordingSampleRate() const;
+    WebRtc_Word32 PlayoutSampleRate() const;
+
+    WebRtc_Word32 SetRecordingChannels(WebRtc_UWord8 channels);
+    WebRtc_Word32 SetPlayoutChannels(WebRtc_UWord8 channels);
+    WebRtc_UWord8 RecordingChannels() const;
+    WebRtc_UWord8 PlayoutChannels() const;
+    WebRtc_Word32 SetRecordingChannel(const AudioDeviceModule::ChannelType channel);
+    WebRtc_Word32 RecordingChannel(AudioDeviceModule::ChannelType& channel) const;
+
+    WebRtc_Word32 SetRecordedBuffer(const WebRtc_Word8* audioBuffer, WebRtc_UWord32 nSamples);
+    WebRtc_Word32 SetCurrentMicLevel(WebRtc_UWord32 level);
+    WebRtc_Word32 SetVQEData(WebRtc_UWord32 playDelayMS, WebRtc_UWord32 recDelayMS, WebRtc_Word32 clockDrift);
+    WebRtc_Word32 DeliverRecordedData();
+    WebRtc_UWord32 NewMicLevel() const;
+
+    WebRtc_Word32 RequestPlayoutData(WebRtc_UWord32 nSamples);
+    WebRtc_Word32 GetPlayoutData(WebRtc_Word8* audioBuffer);
+
+    WebRtc_Word32 StartInputFileRecording(const WebRtc_Word8 fileName[kAdmMaxFileNameSize]);
+    WebRtc_Word32 StopInputFileRecording();
+    WebRtc_Word32 StartOutputFileRecording(const WebRtc_Word8 fileName[kAdmMaxFileNameSize]);
+    WebRtc_Word32 StopOutputFileRecording();
+
+    AudioDeviceBuffer();
+    ~AudioDeviceBuffer();
+
+private:
+    void _EmptyList();
+
+private:
+    WebRtc_Word32                   _id;
+    CriticalSectionWrapper&         _critSect;
+    CriticalSectionWrapper&         _critSectCb;
+
+    AudioTransport*                 _ptrCbAudioTransport;
+
+    WebRtc_UWord32                  _recSampleRate;
+    WebRtc_UWord32                  _playSampleRate;
+
+    WebRtc_UWord8                   _recChannels;
+    WebRtc_UWord8                   _playChannels;
+
+    // selected recording channel (left/right/both)
+    AudioDeviceModule::ChannelType _recChannel;
+
+    // 2 or 4 depending on mono or stereo
+    WebRtc_UWord8                   _recBytesPerSample;
+    WebRtc_UWord8                   _playBytesPerSample;
+
+    // 10ms in stereo @ 96kHz
+    WebRtc_Word8                    _recBuffer[kMaxBufferSizeBytes];
+
+    // one sample <=> 2 or 4 bytes
+    WebRtc_UWord32                  _recSamples;
+    WebRtc_UWord32                  _recSize;           // in bytes
+
+    // 10ms in stereo @ 96kHz
+    WebRtc_Word8                    _playBuffer[kMaxBufferSizeBytes];
+
+    // one sample <=> 2 or 4 bytes
+    WebRtc_UWord32                  _playSamples;
+    WebRtc_UWord32                  _playSize;          // in bytes
+
+    FileWrapper&                    _recFile;
+    FileWrapper&                    _playFile;
+
+    WebRtc_UWord32                  _currentMicLevel;
+    WebRtc_UWord32                  _newMicLevel;
+
+    WebRtc_UWord32                  _playDelayMS;
+    WebRtc_UWord32                  _recDelayMS;
+
+    WebRtc_Word32                   _clockDrift;
+
+    bool                            _measureDelay;
+    ListWrapper                     _pulseList;
+    WebRtc_UWord32                  _lastPulseTime;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_config.h b/trunk/src/modules/audio_device/main/source/audio_device_config.h
new file mode 100644
index 0000000..23b9d55
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_config.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+
+// Enumerators
+//
+enum { kAdmMaxIdleTimeProcess = 1000 };
+enum { GET_MIC_VOLUME_INTERVAL_MS = 1000 };
+
+// Platform specifics
+//
+#if defined(_WIN32)
+#if (_MSC_VER >= 1400)
+// Windows Core Audio is the default audio layer in Windows.
+// Only supported for VS 2005 and higher.
+#define WEBRTC_WINDOWS_CORE_AUDIO_BUILD
+#endif
+#endif
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#include <windows.h>
+#include <tchar.h>
+#include <strsafe.h>
+#define DEBUG_PRINT(...)		            \
+{								            \
+	TCHAR msg[256];				            \
+	StringCchPrintf(msg, 256, __VA_ARGS__);	\
+	OutputDebugString(msg);		            \
+}
+#else
+#define DEBUG_PRINT(exp)		((void)0)
+#endif
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_generic.cc b/trunk/src/modules/audio_device/main/source/audio_device_generic.cc
new file mode 100644
index 0000000..7093d80
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_generic.cc
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_generic.h"
+#include "trace.h"
+
+namespace webrtc {
+
+WebRtc_Word32 AudioDeviceGeneric::SetRecordingSampleRate(
+    const WebRtc_UWord32 samplesPerSec)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set recording sample rate not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::SetPlayoutSampleRate(
+    const WebRtc_UWord32 samplesPerSec)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set playout sample rate not supported on this platform");
+    return -1;
+}
+	
+WebRtc_Word32 AudioDeviceGeneric::SetLoudspeakerStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set loudspeaker status not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::GetLoudspeakerStatus(bool& enable) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Get loudspeaker status not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::ResetAudioDevice()
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Reset audio device not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::SoundDeviceControl(unsigned int par1,
+    unsigned int par2, unsigned int par3, unsigned int par4)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Sound device control not supported on this platform");
+    return -1;
+}
+
+int32_t AudioDeviceGeneric::EnableBuiltInAEC(bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Windows AEC not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceGeneric::BuiltInAECIsEnabled() const
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Windows AEC not supported on this platform");
+    return false;
+}
+
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_generic.h b/trunk/src/modules/audio_device/main/source/audio_device_generic.h
new file mode 100644
index 0000000..13d5f83
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_generic.h
@@ -0,0 +1,183 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+
+#include "audio_device.h"
+#include "audio_device_buffer.h"
+
+namespace webrtc {
+
+class AudioDeviceGeneric
+{
+ public:
+
+	// Retrieve the currently utilized audio layer
+	virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const = 0;
+
+	// Main initializaton and termination
+    virtual WebRtc_Word32 Init() = 0;
+    virtual WebRtc_Word32 Terminate() = 0;
+	virtual bool Initialized() const = 0;
+
+	// Device enumeration
+	virtual WebRtc_Word16 PlayoutDevices() = 0;
+	virtual WebRtc_Word16 RecordingDevices() = 0;
+	virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]) = 0;
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]) = 0;
+
+	// Device selection
+	virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index) = 0;
+	virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device) = 0;
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index) = 0;
+	virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device) = 0;
+
+	// Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitPlayout() = 0;
+    virtual bool PlayoutIsInitialized() const = 0;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitRecording() = 0;
+    virtual bool RecordingIsInitialized() const = 0;
+
+	// Audio transport control
+    virtual WebRtc_Word32 StartPlayout() = 0;
+    virtual WebRtc_Word32 StopPlayout() = 0;
+    virtual bool Playing() const = 0;
+	virtual WebRtc_Word32 StartRecording() = 0;
+    virtual WebRtc_Word32 StopRecording() = 0;
+    virtual bool Recording() const = 0;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable) = 0;
+    virtual bool AGC() const = 0;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight) = 0;
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const = 0;
+
+	// Audio mixer initialization
+	virtual WebRtc_Word32 SpeakerIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitSpeaker() = 0;
+    virtual bool SpeakerIsInitialized() const = 0;
+	virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitMicrophone() = 0;
+    virtual bool MicrophoneIsInitialized() const = 0;
+
+    // Speaker volume controls
+	virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume) = 0;
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const = 0;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const = 0;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const = 0;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(
+        WebRtc_UWord16& stepSize) const = 0;
+
+    // Microphone volume controls
+	virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume) = 0;
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const = 0;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(
+        WebRtc_UWord32& maxVolume) const = 0;
+    virtual WebRtc_Word32 MinMicrophoneVolume(
+        WebRtc_UWord32& minVolume) const = 0;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const = 0;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable) = 0;
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const = 0;
+
+	// Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable) = 0;
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const = 0;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available) = 0;
+	virtual WebRtc_Word32 SetMicrophoneBoost(bool enable) = 0;
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const = 0;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available) = 0;
+	virtual WebRtc_Word32 SetStereoPlayout(bool enable) = 0;
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const = 0;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetStereoRecording(bool enable) = 0;
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const = 0;
+
+    // Delay information and control
+	virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type,
+        WebRtc_UWord16 sizeMS = 0) = 0;
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const = 0;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const = 0;
+	virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const = 0;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const = 0;
+    
+    // Native sample rate controls (samples/sec)
+	virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+	virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+
+    // Speaker audio routing (for mobile devices)
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+    
+    // Reset Audio Device (for mobile devices)
+    virtual WebRtc_Word32 ResetAudioDevice();
+
+    // Sound Audio Device control (for WinCE only)
+    virtual WebRtc_Word32 SoundDeviceControl(unsigned int par1 = 0,
+                                             unsigned int par2 = 0,
+                                             unsigned int par3 = 0,
+                                             unsigned int par4 = 0);
+
+    // Windows Core Audio only.
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    virtual bool PlayoutWarning() const = 0;
+    virtual bool PlayoutError() const = 0;
+    virtual bool RecordingWarning() const = 0;
+    virtual bool RecordingError() const = 0;
+    virtual void ClearPlayoutWarning() = 0;
+    virtual void ClearPlayoutError() = 0;
+    virtual void ClearRecordingWarning() = 0;
+    virtual void ClearRecordingError() = 0;
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+
+    virtual ~AudioDeviceGeneric() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_impl.cc b/trunk/src/modules/audio_device/main/source/audio_device_impl.cc
new file mode 100644
index 0000000..1c4c445
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_impl.cc
@@ -0,0 +1,2062 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_impl.h"
+#include "audio_device_config.h"
+#include "system_wrappers/interface/ref_count.h"
+
+#include <assert.h>
+#include <string.h>
+
+#if defined(_WIN32)
+    #include "audio_device_utility_win.h"
+    #include "audio_device_wave_win.h"
+ #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    #include "audio_device_core_win.h"
+ #endif
+#elif defined(WEBRTC_ANDROID_OPENSLES)
+    #include <stdlib.h>
+    #include "audio_device_utility_android.h"
+    #include "audio_device_android_opensles.h"
+#elif defined(WEBRTC_ANDROID)
+    #include <stdlib.h>
+    #include "audio_device_utility_android.h"
+    #include "audio_device_android_jni.h"
+#elif defined(WEBRTC_LINUX)
+    #include "audio_device_utility_linux.h"
+ #if defined(LINUX_ALSA)
+    #include "audio_device_alsa_linux.h"
+ #endif
+ #if defined(LINUX_PULSE)
+    #include "audio_device_pulse_linux.h"
+ #endif
+#elif defined(MAC_IPHONE)
+    #include "audio_device_utility_iphone.h"
+    #include "audio_device_iphone.h"
+#elif (defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC))
+    #include "audio_device_utility_mac.h"
+    #include "audio_device_mac.h"
+#endif
+#include "audio_device_dummy.h"
+#include "audio_device_utility_dummy.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#define CHECK_INITIALIZED()         \
+{                                   \
+    if (!_initialized) {            \
+        return -1;                  \
+    };                              \
+}
+
+#define CHECK_INITIALIZED_BOOL()    \
+{                                   \
+    if (!_initialized) {            \
+        return false;               \
+    };                              \
+}
+
+namespace webrtc
+{
+
+AudioDeviceModule* CreateAudioDeviceModule(
+    WebRtc_Word32 id, AudioDeviceModule::AudioLayer audioLayer) {
+  return AudioDeviceModuleImpl::Create(id, audioLayer);
+}
+
+
+// ============================================================================
+//                                   Static methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModule::Create()
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule* AudioDeviceModuleImpl::Create(const WebRtc_Word32 id,
+                                                 const AudioLayer audioLayer)
+{
+
+    // Create the generic ref counted (platform independent) implementation.
+    RefCountImpl<AudioDeviceModuleImpl>* audioDevice =
+        new RefCountImpl<AudioDeviceModuleImpl>(id, audioLayer);
+
+    // Ensure that the current platform is supported.
+    if (audioDevice->CheckPlatform() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    // Create the platform-dependent implementation.
+    if (audioDevice->CreatePlatformSpecificObjects() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    // Ensure that the generic audio buffer can communicate with the
+    // platform-specific parts.
+    if (audioDevice->AttachAudioBuffer() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    return audioDevice;
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModuleImpl - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::AudioDeviceModuleImpl(const WebRtc_Word32 id, const AudioLayer audioLayer) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectEventCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectAudioCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrCbAudioDeviceObserver(NULL),
+    _ptrAudioDeviceUtility(NULL),
+    _ptrAudioDevice(NULL),
+    _id(id),
+    _platformAudioLayer(audioLayer),
+    _lastProcessTime(AudioDeviceUtility::GetTimeInMS()),
+    _platformType(kPlatformNotSupported),
+    _initialized(false),
+    _lastError(kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  CheckPlatform
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CheckPlatform()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    // Ensure that the current platform is supported
+    //
+    PlatformType platform(kPlatformNotSupported);
+
+#if defined(_WIN32)
+    platform = kPlatformWin32;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is WIN32");
+#elif defined(WEBRTC_ANDROID)
+    platform = kPlatformAndroid;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID");
+#elif defined(WEBRTC_LINUX)
+    platform = kPlatformLinux;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX");
+#elif (defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC))
+    platform = kPlatformMac;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is MAC");
+#endif
+
+    if (platform == kPlatformNotSupported)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "current platform is not supported => this module will self destruct!");
+        return -1;
+    }
+
+    // Store valid output results
+    //
+    _platformType = platform;
+
+    return 0;
+}
+
+
+// ----------------------------------------------------------------------------
+//  CreatePlatformSpecificObjects
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    AudioDeviceGeneric* ptrAudioDevice(NULL);
+    AudioDeviceUtility* ptrAudioDeviceUtility(NULL);
+
+#if defined(WEBRTC_DUMMY_AUDIO_BUILD)
+    ptrAudioDevice = new AudioDeviceDummy(Id());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Dummy Audio APIs will be utilized");
+
+    if (ptrAudioDevice != NULL)
+    {
+        ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+    }
+#else
+    const AudioLayer audioLayer(PlatformAudioLayer());
+
+    // Create the *Windows* implementation of the Audio Device
+    //
+#if defined(_WIN32)
+    if ((audioLayer == kWindowsWaveAudio)
+#if !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+        // Wave audio is default if Core audio is not supported in this build
+        || (audioLayer == kPlatformDefaultAudio)
+#endif
+        )
+    {
+        // create *Windows Wave Audio* implementation
+        ptrAudioDevice = new AudioDeviceWindowsWave(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Windows Wave APIs will be utilized");
+    }
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    if ((audioLayer == kWindowsCoreAudio) ||
+        (audioLayer == kPlatformDefaultAudio)
+        )
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Windows Core Audio APIs...");
+
+        if (AudioDeviceWindowsCore::CoreAudioIsSupported())
+        {
+            // create *Windows Core Audio* implementation
+            ptrAudioDevice = new AudioDeviceWindowsCore(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Windows Core Audio APIs will be utilized");
+        }
+        else
+        {
+            // create *Windows Wave Audio* implementation
+            ptrAudioDevice = new AudioDeviceWindowsWave(Id());
+            if (ptrAudioDevice != NULL)
+            {
+                // Core Audio was not supported => revert to Windows Wave instead
+                _platformAudioLayer = kWindowsWaveAudio;  // modify the state set at construction
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Windows Core Audio is *not* supported => Wave APIs will be utilized instead");
+            }
+        }
+    }
+#endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Windows implementation of the Device Utility.
+        // This class is independent of the selected audio layer
+        // for Windows.
+        //
+        ptrAudioDeviceUtility = new AudioDeviceUtilityWindows(Id());
+    }
+#endif  // #if defined(_WIN32)
+
+    // Create the *Android OpenSLES* implementation of the Audio Device
+    //
+#if defined(WEBRTC_ANDROID_OPENSLES)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Android OpenELSE Audio* implementation
+        ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Android OpenSLES Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Android implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+    }
+    // END #if defined(WEBRTC_ANDROID_OPENSLES)
+
+    // Create the *Android Java* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_ANDROID)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Android JNI Audio* implementation
+        ptrAudioDevice = new AudioDeviceAndroidJni(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Android implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+    }
+    // END #if defined(WEBRTC_ANDROID)
+
+    // Create the *Linux* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_LINUX)
+    if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
+    {
+#if defined(LINUX_PULSE)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Linux PulseAudio APIs...");
+
+        if (AudioDeviceLinuxPulse::PulseAudioIsSupported())
+        {
+            // create *Linux PulseAudio* implementation
+            ptrAudioDevice = new AudioDeviceLinuxPulse(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Linux PulseAudio APIs will be utilized");
+        }
+        else
+        {
+#endif
+#if defined(LINUX_ALSA)
+            // create *Linux ALSA Audio* implementation
+            ptrAudioDevice = new AudioDeviceLinuxALSA(Id());
+            if (ptrAudioDevice != NULL)
+            {
+                // Pulse Audio was not supported => revert to ALSA instead
+                _platformAudioLayer = kLinuxAlsaAudio;  // modify the state set at construction
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Linux PulseAudio is *not* supported => ALSA APIs will be utilized instead");
+            }
+#endif
+#if defined(LINUX_PULSE)
+        }
+#endif
+    }
+    else if (audioLayer == kLinuxAlsaAudio)
+    {
+#if defined(LINUX_ALSA)
+        // create *Linux ALSA Audio* implementation
+        ptrAudioDevice = new AudioDeviceLinuxALSA(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Linux ALSA APIs will be utilized");
+#endif
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Linux implementation of the Device Utility.
+        // This class is independent of the selected audio layer
+        // for Linux.
+        //
+        ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id());
+    }
+#endif  // #if defined(WEBRTC_LINUX)
+
+    // Create the *iPhone* implementation of the Audio Device
+    //
+#if defined(MAC_IPHONE)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *iPhone Audio* implementation
+        ptrAudioDevice = new AudioDeviceIPhone(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "iPhone Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Mac implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityIPhone(Id());
+    }
+    // END #if defined(MAC_IPHONE)
+
+    // Create the *Mac* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Mac Audio* implementation
+        ptrAudioDevice = new AudioDeviceMac(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Mac OS X Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Mac implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityMac(Id());
+    }
+#endif  // #if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+
+    // Create the *Dummy* implementation of the Audio Device
+    // Available for all platforms
+    //
+    if (audioLayer == kDummyAudio)
+    {
+        // Create *Dummy Audio* implementation
+        assert(!ptrAudioDevice);
+        ptrAudioDevice = new AudioDeviceDummy(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Dummy Audio APIs will be utilized");
+
+        if (ptrAudioDevice != NULL)
+        {
+            ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+        }
+    }
+#endif  // if defined(WEBRTC_DUMMY_AUDIO_BUILD)
+
+    if (ptrAudioDevice == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "unable to create the platform specific audio device implementation");
+        return -1;
+    }
+
+    if (ptrAudioDeviceUtility == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "unable to create the platform specific audio device utility");
+        return -1;
+    }
+
+    // Store valid output pointers
+    //
+    _ptrAudioDevice = ptrAudioDevice;
+    _ptrAudioDeviceUtility = ptrAudioDeviceUtility;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+//
+//  Install "bridge" between the platform implemetation and the generic
+//  implementation. The "child" shall set the native sampling rate and the
+//  number of channels in this function call.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::AttachAudioBuffer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    _audioDeviceBuffer.SetId(_id);
+    _ptrAudioDevice->AttachAudioBuffer(&_audioDeviceBuffer);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ~AudioDeviceModuleImpl - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::~AudioDeviceModuleImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    if (_ptrAudioDevice)
+    {
+        delete _ptrAudioDevice;
+        _ptrAudioDevice = NULL;
+    }
+
+    if (_ptrAudioDeviceUtility)
+    {
+        delete _ptrAudioDeviceUtility;
+        _ptrAudioDeviceUtility = NULL;
+    }
+
+    delete &_critSect;
+    delete &_critSectEventCb;
+    delete &_critSectAudioCb;
+}
+
+// ============================================================================
+//                                  Module
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Module::ChangeUniqueId
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Module::TimeUntilNextProcess
+//
+//  Returns the number of milliseconds until the module want a worker thread
+//  to call Process().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 now = AudioDeviceUtility::GetTimeInMS();
+    WebRtc_Word32 deltaProcess = kAdmMaxIdleTimeProcess - (now - _lastProcessTime);
+    return (deltaProcess);
+}
+
+// ----------------------------------------------------------------------------
+//  Module::Process
+//
+//  Check for posted error and warning reports. Generate callbacks if
+//  new reports exists.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Process()
+{
+
+    _lastProcessTime = AudioDeviceUtility::GetTimeInMS();
+
+    // kPlayoutWarning
+    if (_ptrAudioDevice->PlayoutWarning())
+    {
+        CriticalSectionScoped lock(_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "=> OnWarningIsReported(kPlayoutWarning)");
+            _ptrCbAudioDeviceObserver->OnWarningIsReported(AudioDeviceObserver::kPlayoutWarning);
+        }
+        _ptrAudioDevice->ClearPlayoutWarning();
+    }
+
+    // kPlayoutError
+    if (_ptrAudioDevice->PlayoutError())
+    {
+        CriticalSectionScoped lock(_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "=> OnErrorIsReported(kPlayoutError)");
+            _ptrCbAudioDeviceObserver->OnErrorIsReported(AudioDeviceObserver::kPlayoutError);
+        }
+        _ptrAudioDevice->ClearPlayoutError();
+    }
+
+    // kRecordingWarning
+    if (_ptrAudioDevice->RecordingWarning())
+    {
+        CriticalSectionScoped lock(_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "=> OnWarningIsReported(kRecordingWarning)");
+            _ptrCbAudioDeviceObserver->OnWarningIsReported(AudioDeviceObserver::kRecordingWarning);
+        }
+        _ptrAudioDevice->ClearRecordingWarning();
+    }
+
+    // kRecordingError
+    if (_ptrAudioDevice->RecordingError())
+    {
+        CriticalSectionScoped lock(_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "=> OnErrorIsReported(kRecordingError)");
+            _ptrCbAudioDeviceObserver->OnErrorIsReported(AudioDeviceObserver::kRecordingError);
+        }
+        _ptrAudioDevice->ClearRecordingError();
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                    Public API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ActiveAudioLayer(AudioLayer* audioLayer) const
+{
+
+    AudioLayer activeAudio;
+
+    if (_ptrAudioDevice->ActiveAudioLayer(activeAudio) == -1)
+    {
+        return -1;
+    }
+
+    *audioLayer = activeAudio;
+
+    if (*audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsWaveAudio");
+    }
+    else if (*audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsCoreAudio");
+    }
+    else if (*audioLayer == AudioDeviceModule::kLinuxAlsaAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kLinuxAlsaAudio");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: NOT_SUPPORTED");
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  LastError
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule::ErrorCode AudioDeviceModuleImpl::LastError() const
+{
+    return _lastError;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Init()
+{
+
+    if (_initialized)
+        return 0;
+
+    if (!_ptrAudioDeviceUtility)
+        return -1;
+
+    if (!_ptrAudioDevice)
+        return -1;
+
+    _ptrAudioDeviceUtility->Init();
+
+    if (_ptrAudioDevice->Init() == -1)
+    {
+        return -1;
+    }
+
+    _initialized = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Terminate()
+{
+
+    if (!_initialized)
+        return 0;
+
+    if (_ptrAudioDevice->Terminate() == -1)
+    {
+        return -1;
+    }
+
+    _initialized = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Initialized() const
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", _initialized);
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitSpeaker()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->InitSpeaker());
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitMicrophone()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->InitMicrophone());
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolumeIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerVolumeIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetSpeakerVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolume(WebRtc_UWord32* volume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 level(0);
+
+    if (_ptrAudioDevice->SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    *volume = level;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: volume=%u", *volume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetWaveOutVolume(volumeLeft, volumeRight));
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::WaveOutVolume(WebRtc_UWord16* volumeLeft, WebRtc_UWord16* volumeRight) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 volLeft(0);
+    WebRtc_UWord16 volRight(0);
+
+    if (_ptrAudioDevice->WaveOutVolume(volLeft, volRight) == -1)
+    {
+        return -1;
+    }
+
+    *volumeLeft = volLeft;
+    *volumeRight = volRight;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "outputs: volumeLeft=%u, volumeRight=%u",
+        *volumeLeft, *volumeRight);
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::SpeakerIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    bool isInitialized = _ptrAudioDevice->SpeakerIsInitialized();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", isInitialized);
+    return (isInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::MicrophoneIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    bool isInitialized = _ptrAudioDevice->MicrophoneIsInitialized();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", isInitialized);
+    return (isInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MaxSpeakerVolume(WebRtc_UWord32* maxVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_ptrAudioDevice->MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    *maxVolume = maxVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: maxVolume=%d", *maxVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MinSpeakerVolume(WebRtc_UWord32* minVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_ptrAudioDevice->MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    *minVolume = minVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: minVolume=%u", *minVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolumeStepSize(WebRtc_UWord16* stepSize) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delta(0);
+
+    if (_ptrAudioDevice->SpeakerVolumeStepSize(delta) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the speaker-volume step size");
+        return -1;
+    }
+
+    *stepSize = delta;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: stepSize=%u", *stepSize);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerMuteIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerMuteIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetSpeakerMute(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetSpeakerMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerMute(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool muted(false);
+
+    if (_ptrAudioDevice->SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = muted;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneMuteIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneMuteIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneMute(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneMute(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool muted(false);
+
+    if (_ptrAudioDevice->MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = muted;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneBoostIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneBoostIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneBoost(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneBoost(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneBoost(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool onOff(false);
+
+    if (_ptrAudioDevice->MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = onOff;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolumeIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneVolumeIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolume(WebRtc_UWord32* volume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 level(0);
+
+    if (_ptrAudioDevice->MicrophoneVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    *volume = level;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: volume=%u", *volume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoRecordingIsAvailable(bool* available) const
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->StereoRecordingIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetStereoRecording(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->RecordingIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "recording in stereo is not supported");
+        return -1;
+    }
+
+    if (_ptrAudioDevice->SetStereoRecording(enable) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to enable stereo recording");
+        return -1;
+    }
+
+    WebRtc_Word8 nChannels(1);
+    if (enable)
+    {
+        nChannels = 2;
+    }
+    _audioDeviceBuffer.SetRecordingChannels(nChannels);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoRecording(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoRecording(stereo) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = stereo;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingChannel(const ChannelType channel)
+{
+    if (channel == kChannelBoth)
+    {
+    }
+    else if (channel == kChannelLeft)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoRecording(stereo) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "recording in stereo is not supported");
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.SetRecordingChannel(channel));
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingChannel(ChannelType* channel) const
+{
+    CHECK_INITIALIZED();
+
+    ChannelType chType;
+
+    if (_audioDeviceBuffer.RecordingChannel(chType) == -1)
+    {
+        return -1;
+    }
+
+    *channel = chType;
+
+    if (*channel == kChannelBoth)
+    {
+    }
+    else if (*channel == kChannelLeft)
+    {
+    }
+    else
+    {
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoPlayoutIsAvailable(bool* available) const
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->StereoPlayoutIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetStereoPlayout(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->PlayoutIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "unable to set stereo mode while playing side is initialized");
+        return -1;
+    }
+
+    if (_ptrAudioDevice->SetStereoPlayout(enable))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "stereo playout is not supported");
+        return -1;
+    }
+
+    WebRtc_Word8 nChannels(1);
+    if (enable)
+    {
+        nChannels = 2;
+    }
+    _audioDeviceBuffer.SetPlayoutChannels(nChannels);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoPlayout(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoPlayout(stereo) == -1)
+    {
+        return -1;
+    }
+
+   *enabled = stereo;
+
+   WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+   return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetAGC(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetAGC(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::AGC() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->AGC());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->PlayoutIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->RecordingIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MaxMicrophoneVolume(WebRtc_UWord32* maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_ptrAudioDevice->MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    *maxVolume = maxVol;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: maxVolume=%d", *maxVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MinMicrophoneVolume(WebRtc_UWord32* minVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_ptrAudioDevice->MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    *minVolume = minVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: minVolume=%u", *minVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolumeStepSize(WebRtc_UWord16* stepSize) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delta(0);
+
+    if (_ptrAudioDevice->MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    *stepSize = delta;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: stepSize=%u", *stepSize);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceModuleImpl::PlayoutDevices()
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 nPlayoutDevices = _ptrAudioDevice->PlayoutDevices();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: #playout devices=%d", nPlayoutDevices);
+    return ((WebRtc_Word16)(nPlayoutDevices));
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetPlayoutDevice(index));
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutDevice(WindowsDeviceType device)
+{
+    if (device == kDefaultDevice)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    return (_ptrAudioDevice->SetPlayoutDevice(device));
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+    CHECK_INITIALIZED();
+
+    if (name == NULL)
+    {
+        _lastError = kAdmErrArgument;
+        return -1;
+    }
+
+    if (_ptrAudioDevice->PlayoutDeviceName(index, name, guid) == -1)
+    {
+        return -1;
+    }
+
+    if (name != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: name=%s", name);
+    }
+    if (guid != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: guid=%s", guid);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+    CHECK_INITIALIZED();
+
+    if (name == NULL)
+    {
+        _lastError = kAdmErrArgument;
+        return -1;
+    }
+
+    if (_ptrAudioDevice->RecordingDeviceName(index, name, guid) == -1)
+    {
+        return -1;
+    }
+
+    if (name != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: name=%s", name);
+    }
+    if (guid != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: guid=%s", guid);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceModuleImpl::RecordingDevices()
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 nRecordingDevices = _ptrAudioDevice->RecordingDevices();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: #recording devices=%d", nRecordingDevices);
+    return ((WebRtc_Word16)nRecordingDevices);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingDevice(WebRtc_UWord16 index)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetRecordingDevice(index));
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingDevice(WindowsDeviceType device)
+{
+    if (device == kDefaultDevice)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    return (_ptrAudioDevice->SetRecordingDevice(device));
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitPlayout()
+{
+    CHECK_INITIALIZED();
+    _audioDeviceBuffer.InitPlayout();
+    return (_ptrAudioDevice->InitPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitRecording()
+{
+    CHECK_INITIALIZED();
+    _audioDeviceBuffer.InitRecording();
+    return (_ptrAudioDevice->InitRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::PlayoutIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->PlayoutIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::RecordingIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->RecordingIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartPlayout()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StartPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopPlayout()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StopPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Playing() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->Playing());
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRecording()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StartRecording());
+}
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRecording()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StopRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Recording() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->Recording());
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterEventObserver
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RegisterEventObserver(AudioDeviceObserver* eventCallback)
+{
+
+    CriticalSectionScoped lock(_critSectEventCb);
+    _ptrCbAudioDeviceObserver = eventCallback;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterAudioCallback
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RegisterAudioCallback(AudioTransport* audioCallback)
+{
+
+    CriticalSectionScoped lock(_critSectAudioCb);
+    _audioDeviceBuffer.RegisterAudioCallback(audioCallback);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRawInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRawInputFileRecording(const WebRtc_Word8 pcmFileNameUTF8[kAdmMaxFileNameSize])
+{
+    CHECK_INITIALIZED();
+
+    if (NULL == pcmFileNameUTF8)
+    {
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.StartInputFileRecording(pcmFileNameUTF8));
+}
+
+// ----------------------------------------------------------------------------
+//  StopRawInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRawInputFileRecording()
+{
+    CHECK_INITIALIZED();
+
+    return (_audioDeviceBuffer.StopInputFileRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  StartRawOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRawOutputFileRecording(const WebRtc_Word8 pcmFileNameUTF8[kAdmMaxFileNameSize])
+{
+    CHECK_INITIALIZED();
+
+    if (NULL == pcmFileNameUTF8)
+    {
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.StartOutputFileRecording(pcmFileNameUTF8));
+}
+
+// ----------------------------------------------------------------------------
+//  StopRawOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRawOutputFileRecording()
+{
+    CHECK_INITIALIZED();
+
+    return (_audioDeviceBuffer.StopOutputFileRecording());
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutBuffer(const BufferType type, WebRtc_UWord16 sizeMS)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->PlayoutIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "unable to modify the playout buffer while playing side is initialized");
+        return -1;
+    }
+
+    WebRtc_Word32 ret(0);
+
+    if (kFixedBufferSize == type)
+    {
+        if (sizeMS < kAdmMinPlayoutBufferSizeMs || sizeMS > kAdmMaxPlayoutBufferSizeMs)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "size parameter is out of range");
+            return -1;
+        }
+    }
+
+    if ((ret = _ptrAudioDevice->SetPlayoutBuffer(type, sizeMS)) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to set the playout buffer (error: %d)", LastError());
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutBuffer(BufferType* type, WebRtc_UWord16* sizeMS) const
+{
+    CHECK_INITIALIZED();
+
+    BufferType bufType;
+    WebRtc_UWord16 size(0);
+
+    if (_ptrAudioDevice->PlayoutBuffer(bufType, size) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the buffer type and size");
+        return -1;
+    }
+
+    *type = bufType;
+    *sizeMS = size;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: type=%u, sizeMS=%u", *type, *sizeMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutDelay(WebRtc_UWord16* delayMS) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delay(0);
+
+    if (_ptrAudioDevice->PlayoutDelay(delay) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the playout delay");
+        return -1;
+    }
+
+    *delayMS = delay;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: delayMS=%u", *delayMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingDelay(WebRtc_UWord16* delayMS) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delay(0);
+
+    if (_ptrAudioDevice->RecordingDelay(delay) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the recording delay");
+        return -1;
+    }
+
+    *delayMS = delay;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: delayMS=%u", *delayMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CPULoad(WebRtc_UWord16* load) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 cpuLoad(0);
+
+    if (_ptrAudioDevice->CPULoad(cpuLoad) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the CPU load");
+        return -1;
+    }
+
+    *load = cpuLoad;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: load=%u", *load);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingSampleRate(const WebRtc_UWord32 samplesPerSec)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetRecordingSampleRate(samplesPerSec) != 0)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingSampleRate(WebRtc_UWord32* samplesPerSec) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_Word32 sampleRate = _audioDeviceBuffer.RecordingSampleRate();
+
+    if (sampleRate == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the sample rate");
+        return -1;
+    }
+
+    *samplesPerSec = sampleRate;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: samplesPerSec=%u", *samplesPerSec);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutSampleRate(const WebRtc_UWord32 samplesPerSec)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetPlayoutSampleRate(samplesPerSec) != 0)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutSampleRate(WebRtc_UWord32* samplesPerSec) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_Word32 sampleRate = _audioDeviceBuffer.PlayoutSampleRate();
+
+    if (sampleRate == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the sample rate");
+        return -1;
+    }
+
+    *samplesPerSec = sampleRate;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: samplesPerSec=%u", *samplesPerSec);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  ResetAudioDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ResetAudioDevice()
+{
+    CHECK_INITIALIZED();
+
+
+    if (_ptrAudioDevice->ResetAudioDevice() == -1)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetLoudspeakerStatus(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetLoudspeakerStatus(enable) != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::GetLoudspeakerStatus(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->GetLoudspeakerStatus(*enabled) != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t AudioDeviceModuleImpl::EnableBuiltInAEC(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    return _ptrAudioDevice->EnableBuiltInAEC(enable);
+}
+
+bool AudioDeviceModuleImpl::BuiltInAECIsEnabled() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    return _ptrAudioDevice->BuiltInAECIsEnabled();
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Platform
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const
+{
+    return _platformType;
+}
+
+// ----------------------------------------------------------------------------
+//  PlatformAudioLayer
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule::AudioLayer AudioDeviceModuleImpl::PlatformAudioLayer() const
+{
+
+    switch (_platformAudioLayer)
+    {
+    case kPlatformDefaultAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kPlatformDefaultAudio");
+        break;
+    case kWindowsWaveAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsWaveAudio");
+        break;
+    case kWindowsCoreAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsCoreAudio");
+        break;
+    case kLinuxAlsaAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kLinuxAlsaAudio");
+        break;
+    case kDummyAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kDummyAudio");
+        break;
+    default:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "output: INVALID");
+        break;
+    }
+
+    return _platformAudioLayer;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_impl.h b/trunk/src/modules/audio_device/main/source/audio_device_impl.h
new file mode 100644
index 0000000..5edec0d
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_impl.h
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H
+
+#include "audio_device.h"
+#include "audio_device_buffer.h"
+
+namespace webrtc
+{
+
+class AudioDeviceGeneric;
+class AudioDeviceUtility;
+class CriticalSectionWrapper;
+
+class AudioDeviceModuleImpl : public AudioDeviceModule
+{
+public:
+    enum PlatformType
+    {
+        kPlatformNotSupported = 0,
+        kPlatformWin32 = 1,
+        kPlatformWinCe = 2,
+        kPlatformLinux = 3,
+        kPlatformMac = 4,
+        kPlatformAndroid = 5
+    };
+
+    WebRtc_Word32 CheckPlatform();
+    WebRtc_Word32 CreatePlatformSpecificObjects();
+    WebRtc_Word32 AttachAudioBuffer();
+
+    AudioDeviceModuleImpl(const WebRtc_Word32 id, const AudioLayer audioLayer);
+    virtual ~AudioDeviceModuleImpl();
+
+public: // RefCountedModule
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+public:
+    // Factory methods (resource allocation/deallocation)
+    static AudioDeviceModule* Create(
+        const WebRtc_Word32 id,
+        const AudioLayer audioLayer = kPlatformDefaultAudio);
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioLayer* audioLayer) const;
+
+    // Error handling
+    virtual ErrorCode LastError() const;
+    virtual WebRtc_Word32 RegisterEventObserver(
+        AudioDeviceObserver* eventCallback);
+
+    // Full-duplex transportation of PCM audio
+    virtual WebRtc_Word32 RegisterAudioCallback(
+        AudioTransport* audioCallback);
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16* volumeLeft,
+                                        WebRtc_UWord16* volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32* volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32* maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32* minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(
+        WebRtc_UWord16* stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32* volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(
+        WebRtc_UWord32* maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(
+        WebRtc_UWord32* minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16* stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool* enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool* enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool* enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool* available) const;
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool* enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool* available) const;
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool* enabled) const;
+    virtual WebRtc_Word32 SetRecordingChannel(const ChannelType channel);
+    virtual WebRtc_Word32 RecordingChannel(ChannelType* channel) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const BufferType type,
+                                           WebRtc_UWord16 sizeMS = 0);
+    virtual WebRtc_Word32 PlayoutBuffer(BufferType* type,
+                                        WebRtc_UWord16* sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16* delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16* delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16* load) const;
+
+    // Recording of raw PCM data
+    virtual WebRtc_Word32 StartRawOutputFileRecording(
+        const WebRtc_Word8 pcmFileNameUTF8[kAdmMaxFileNameSize]);
+    virtual WebRtc_Word32 StopRawOutputFileRecording();
+    virtual WebRtc_Word32 StartRawInputFileRecording(
+        const WebRtc_Word8 pcmFileNameUTF8[kAdmMaxFileNameSize]);
+    virtual WebRtc_Word32 StopRawInputFileRecording();
+
+    // Native sample rate controls (samples/sec)
+    virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 RecordingSampleRate(
+        WebRtc_UWord32* samplesPerSec) const;
+    virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 PlayoutSampleRate(
+        WebRtc_UWord32* samplesPerSec) const;
+
+    // Mobile device specific functions
+    virtual WebRtc_Word32 ResetAudioDevice();
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool* enabled) const;
+
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    WebRtc_Word32 Id() {return _id;}
+
+private:
+    PlatformType Platform() const;
+    AudioLayer PlatformAudioLayer() const;
+
+private:
+    CriticalSectionWrapper&     _critSect;
+    CriticalSectionWrapper&     _critSectEventCb;
+    CriticalSectionWrapper&     _critSectAudioCb;
+
+    AudioDeviceObserver*        _ptrCbAudioDeviceObserver;
+
+    AudioDeviceUtility*         _ptrAudioDeviceUtility;
+    AudioDeviceGeneric*         _ptrAudioDevice;
+
+    AudioDeviceBuffer           _audioDeviceBuffer;
+
+    WebRtc_Word32               _id;
+    AudioLayer                  _platformAudioLayer;
+    WebRtc_UWord32              _lastProcessTime;
+    PlatformType                _platformType;
+    bool                        _initialized;
+    mutable ErrorCode           _lastError;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_INTERFACE_AUDIO_DEVICE_IMPL_H_
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_utility.cc b/trunk/src/modules/audio_device/main/source/audio_device_utility.cc
new file mode 100644
index 0000000..c256f9e
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_utility.cc
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+
+#if defined(_WIN32)
+
+// ============================================================================
+//                                     Windows
+// ============================================================================
+
+#include <windows.h>
+#include <conio.h>
+#include <ctype.h>
+#include <stdio.h>
+#include <mmsystem.h>
+
+namespace webrtc
+{
+
+void AudioDeviceUtility::Sleep(WebRtc_UWord32 milliseconds)
+{
+    return ::Sleep(milliseconds);
+}
+
+void AudioDeviceUtility::WaitForKey()
+{
+	_getch();
+}
+
+WebRtc_UWord32 AudioDeviceUtility::GetTimeInMS()
+{
+	return timeGetTime();
+}
+
+bool AudioDeviceUtility::StringCompare(const WebRtc_Word8* str1 , const WebRtc_Word8* str2, const WebRtc_UWord32 length)
+{
+	return ((_strnicmp(str1, str2, length) == 0) ? true : false);
+}
+
+}  // namespace webrtc
+
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+// ============================================================================
+//                                 Linux & Mac
+// ============================================================================
+
+#include <sys/time.h>   // gettimeofday
+#include <time.h>       // nanosleep, gettimeofday
+#include <string.h>     // strncasecmp
+#include <stdio.h>      // getchar
+#include <termios.h>    // tcgetattr
+
+#include <unistd.h> 
+
+namespace webrtc
+{
+
+void AudioDeviceUtility::WaitForKey()
+{
+
+    struct termios oldt, newt;
+
+    tcgetattr( STDIN_FILENO, &oldt );
+
+    // we don't want getchar to echo!
+
+    newt = oldt;
+    newt.c_lflag &= ~( ICANON | ECHO );
+    tcsetattr( STDIN_FILENO, TCSANOW, &newt );
+
+    // catch any newline that's hanging around...
+
+    // you'll have to hit enter twice if you
+
+    // choose enter out of all available keys
+
+    if (getchar() == '\n')
+    {
+        getchar();
+    }
+
+    tcsetattr( STDIN_FILENO, TCSANOW, &oldt );
+}
+
+WebRtc_UWord32 AudioDeviceUtility::GetTimeInMS()
+{
+    struct timeval tv;
+    struct timezone tz;
+    WebRtc_UWord32 val;
+
+    gettimeofday(&tv, &tz);
+    val = (WebRtc_UWord32)(tv.tv_sec*1000 + tv.tv_usec/1000);
+    return val;
+}
+
+void AudioDeviceUtility::Sleep(WebRtc_UWord32 milliseconds)
+{
+    timespec t;
+    t.tv_sec = milliseconds/1000;
+    t.tv_nsec = (milliseconds-(milliseconds/1000)*1000)*1000000;
+    nanosleep(&t,NULL);
+}
+
+bool AudioDeviceUtility::StringCompare(const WebRtc_Word8* str1 , const WebRtc_Word8* str2, const WebRtc_UWord32 length)
+{
+    return (strncasecmp(str1, str2, length) == 0)?true: false;
+}
+
+}  // namespace webrtc
+
+#endif  // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+
diff --git a/trunk/src/modules/audio_device/main/source/audio_device_utility.h b/trunk/src/modules/audio_device/main/source/audio_device_utility.h
new file mode 100644
index 0000000..b967950
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/audio_device_utility.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class AudioDeviceUtility
+{
+public:
+    static WebRtc_UWord32 GetTimeInMS();
+	static void Sleep(WebRtc_UWord32 milliseconds);
+	static void WaitForKey();
+    static bool StringCompare(const WebRtc_Word8* str1,
+                              const WebRtc_Word8* str2,
+                              const WebRtc_UWord32 length);
+	virtual WebRtc_Word32 Init() = 0;
+
+	virtual ~AudioDeviceUtility() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+
diff --git a/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.cc b/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.cc
new file mode 100644
index 0000000..2c8d5c7
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.cc
@@ -0,0 +1,1336 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_dummy.h"
+
+#include <string.h>
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+// Enable to record playout data
+//#define RECORD_PLAYOUT 1
+
+namespace webrtc {
+
+const WebRtc_UWord32 REC_TIMER_PERIOD_MS = 10;
+const WebRtc_UWord32 PLAY_TIMER_PERIOD_MS = 10;
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceDummy() - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceDummy::AudioDeviceDummy(const WebRtc_Word32 id) :
+	  _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _timeEventRec(*EventWrapper::Create()),
+    _timeEventPlay(*EventWrapper::Create()),
+    _recStartEvent(*EventWrapper::Create()),
+    _playStartEvent(*EventWrapper::Create()),
+    _ptrThreadRec(NULL),
+    _ptrThreadPlay(NULL),
+    _recThreadID(0),
+    _playThreadID(0),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _speakerIsInitialized(false),
+    _microphoneIsInitialized(false),
+    _playDataFile(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+    WebRtc_Word16* tmp = (WebRtc_Word16*)_recBuffer;
+
+    // Saw tooth -16000 to 16000, 100 Hz @ fs = 16 kHz
+//    for(int i=0; i<160; ++i)
+//    {
+//        tmp[i] = i*200-16000;
+//    }
+
+    // Rough sinus 2 kHz @ fs = 16 kHz
+    for(int i=0; i<20; ++i)
+    {
+      tmp[i*8] = 0;
+      tmp[i*8+1] = -5000;
+      tmp[i*8+2] = -16000;
+      tmp[i*8+3] = -5000;
+      tmp[i*8+4] = 0;
+      tmp[i*8+5] = 5000;
+      tmp[i*8+6] = 16000;
+      tmp[i*8+7] = 5000;
+    }
+  
+#ifdef RECORD_PLAYOUT
+    _playDataFile = fopen("webrtc_VoiceEngine_playout.pcm", "wb");
+    if (!_playDataFile)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "  Could not open file for writing playout data");
+    }
+#endif
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceDummy() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceDummy::~AudioDeviceDummy()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    _ptrAudioBuffer = NULL;
+
+    delete &_recStartEvent;
+    delete &_playStartEvent;
+    delete &_timeEventRec;
+    delete &_timeEventPlay;
+    delete &_critSect;
+
+    if (_playDataFile)
+    {
+        fclose(_playDataFile);
+    }
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceDummy::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    _ptrAudioBuffer->SetRecordingSampleRate(16000);
+    _ptrAudioBuffer->SetPlayoutSampleRate(16000);
+    _ptrAudioBuffer->SetRecordingChannels(1);
+    _ptrAudioBuffer->SetPlayoutChannels(1);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kDummyAudio;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    const bool periodic(true);
+    unsigned int threadID(0);
+    char threadName[64] = {0};
+
+    // RECORDING
+    strncpy(threadName, "webrtc_audio_module_rec_thread", 63);
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this, kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to start the rec audio thread");
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+    
+    if (!_timeEventRec.StartTimer(periodic, REC_TIMER_PERIOD_MS))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to start the rec timer event");
+        if (_ptrThreadRec->Stop())
+        {
+            delete _ptrThreadRec;
+            _ptrThreadRec = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  unable to stop the activated rec thread");
+        }
+        return -1;
+    }
+
+    // PLAYOUT
+    strncpy(threadName, "webrtc_audio_module_play_thread", 63);
+    _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this, kRealtimePriority, threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to create the play audio thread");
+        return -1;
+    }
+
+    threadID = 0;
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to start the play audio thread");
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+    
+    if (!_timeEventPlay.StartTimer(periodic, PLAY_TIMER_PERIOD_MS))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "  failed to start the play timer event");
+        if (_ptrThreadPlay->Stop())
+        {
+            delete _ptrThreadPlay;
+            _ptrThreadPlay = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  unable to stop the activated play thread");
+        }
+        return -1;
+    }
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::Terminate()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    // RECORDING
+    if (_ptrThreadRec)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEventRec.Set();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  failed to close down the rec audio thread");       
+        }
+
+        _critSect.Enter();
+    }
+
+    _timeEventRec.StopTimer();
+
+    // PLAYOUT
+    if (_ptrThreadPlay)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEventPlay.Set();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  failed to close down the play audio thread");       
+        }
+
+        _critSect.Enter();
+    }
+
+    _timeEventPlay.StopTimer();
+
+    _initialized = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::Initialized() const
+{
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+	_speakerIsInitialized = true;
+
+	return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    _microphoneIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::SpeakerIsInitialized() const
+{
+
+    return (_speakerIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::MicrophoneIsInitialized() const
+{
+
+    return (_microphoneIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+	return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+	
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetSpeakerMute(bool enable)
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SpeakerMute(bool& enabled) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetMicrophoneMute(bool enable)
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneMute(bool& enabled) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    available = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetMicrophoneBoost(bool enable)
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneBoost(bool& enabled) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StereoRecordingIsAvailable(bool& available)
+{
+
+    available = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetStereoRecording(bool enable)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (enable)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StereoRecording(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StereoPlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetStereoPlayout(bool enable)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (enable)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StereoPlayout(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetAGC(bool enable)
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::AGC() const
+{
+    // WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    return false;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    available = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AudioDeviceDummy::SetMicrophoneVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    // WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceDummy::PlayoutDevices()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (index != 0)
+    {
+      return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+	return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    if (index != 0)
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+      memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    if (index != 0)
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceDummy::RecordingDevices()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (index != 0 )
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::PlayoutIsAvailable(bool& available)
+{
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::RecordingIsAvailable(bool& available)
+{
+
+    available = true;
+
+    return 0;
+}
+    
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  InitSpeaker() failed");
+    }
+
+    _playIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "  InitMicrophone() failed");
+    }
+
+    _recIsInitialized = true;
+
+    return 0;
+
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StartRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    _recording = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StopRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::Recording() const
+{
+    return (_recording);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::PlayoutIsInitialized() const
+{
+
+    return (_playIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StartPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    _playing = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::StopPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+    delayMS = 0;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+    delayMS = 0;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Just ignore
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+
+    type = AudioDeviceModule::kAdaptiveBufferSize;
+    sizeMS = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceDummy::CPULoad(WebRtc_UWord16& load) const
+{
+
+    load = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::PlayoutWarning() const
+{
+    return false;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::PlayoutError() const
+{
+    return false;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::RecordingWarning() const
+{
+    return false;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::RecordingError() const
+{
+    return false;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceDummy::ClearPlayoutWarning()
+{
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceDummy::ClearPlayoutError()
+{
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceDummy::ClearRecordingWarning()
+{
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceDummy::ClearRecordingError()
+{
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  PlayThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceDummy*>(pThis)->PlayThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceDummy*>(pThis)->RecThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::PlayThreadProcess()
+{
+    switch (_timeEventPlay.Wait(1000))
+    {
+    case kEventSignaled:
+        break;
+    case kEventError:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "EventWrapper::Wait() failed => restarting timer");
+        _timeEventPlay.StopTimer();
+        _timeEventPlay.StartTimer(true, PLAY_TIMER_PERIOD_MS);
+        return true;
+    case kEventTimeout:
+        return true;
+    }
+
+    Lock();
+    
+    if(_playing)
+    {
+        WebRtc_Word8 playBuffer[2*160];
+
+        UnLock();
+        WebRtc_Word32 nSamples = (WebRtc_Word32)_ptrAudioBuffer->RequestPlayoutData(160);
+        Lock();
+
+        if (!_playing)
+        {
+            UnLock();
+            return true;
+        }
+
+        nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        if (nSamples != 160)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "  invalid number of output samples(%d)", nSamples);
+        }
+        
+        if (_playDataFile)
+        {
+            int wr = fwrite(playBuffer, 2, 160, _playDataFile);
+            if (wr != 160)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                           "  Could not write playout data to file (%d) ferror = %d",
+                           wr, ferror(_playDataFile));
+            }
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceDummy::RecThreadProcess()
+{
+    switch (_timeEventRec.Wait(1000))
+    {
+    case kEventSignaled:
+        break;
+    case kEventError:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "EventWrapper::Wait() failed => restarting timer");
+        _timeEventRec.StopTimer();
+        _timeEventRec.StartTimer(true, REC_TIMER_PERIOD_MS);
+        return true;
+    case kEventTimeout:
+        return true;
+    }
+
+    Lock();
+
+    if (_recording)
+    {
+        // store the recorded buffer
+        _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, 160);
+
+        // store vqe delay values
+        _ptrAudioBuffer->SetVQEData(0, 0, 0);
+
+        // deliver recorded samples at specified sample rate, mic level etc. to the observer using callback
+        UnLock();
+        _ptrAudioBuffer->DeliverRecordedData();
+    }
+    else
+    {
+        UnLock();
+    }
+
+    return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.h b/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.h
new file mode 100644
index 0000000..5a979d2
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/dummy/audio_device_dummy.h
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
+
+#include <stdio.h>
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+
+class AudioDeviceDummy : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceDummy(const WebRtc_Word32 id);
+    ~AudioDeviceDummy();
+    
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+    
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+    
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+    
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+    
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+    
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+    
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const;
+    
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+    
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+    
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+    
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+    
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+    
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+    
+private:
+    void Lock() { _critSect.Enter(); };
+    void UnLock() { _critSect.Leave(); };
+    
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+    
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    CriticalSectionWrapper&	_critSect;
+    WebRtc_Word32 _id;
+    
+    EventWrapper& _timeEventRec;
+    EventWrapper& _timeEventPlay;
+    EventWrapper& _recStartEvent;
+    EventWrapper& _playStartEvent;
+    
+    ThreadWrapper* _ptrThreadRec;
+    ThreadWrapper* _ptrThreadPlay;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+    
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _speakerIsInitialized;
+    bool _microphoneIsInitialized;
+    
+    WebRtc_Word8 _recBuffer[2*160];
+    
+    FILE* _playDataFile;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
diff --git a/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.cc b/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.cc
new file mode 100644
index 0000000..44f2c25
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.cc
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_dummy.h"
+#include "audio_device_config.h" // DEBUG_PRINT()
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityDummy::AudioDeviceUtilityDummy(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+AudioDeviceUtilityDummy::~AudioDeviceUtilityDummy()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+
+WebRtc_Word32 AudioDeviceUtilityDummy::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "Dummy");
+
+    return 0;
+}
+
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h b/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h
new file mode 100644
index 0000000..601c448
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_DUMMY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_DUMMY_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityDummy: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityDummy(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityDummy();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    AudioDeviceModule::ErrorCode _lastError;
+};
+
+} // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_UTILITY_DUMMY_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc b/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc
new file mode 100644
index 0000000..1b1707c
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "alsasymboltable_linux.h"
+
+namespace webrtc_adm_linux_alsa {
+
+LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(AlsaSymbolTable, "libasound.so.2")
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(AlsaSymbolTable, sym)
+ALSA_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DEFINE_END(AlsaSymbolTable)
+
+}  // namespace webrtc_adm_linux_alsa
diff --git a/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h b/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h
new file mode 100644
index 0000000..d25bbd7
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h
@@ -0,0 +1,147 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
+
+#include "latebindingsymboltable_linux.h"
+
+namespace webrtc_adm_linux_alsa {
+
+// The ALSA symbols we need, as an X-Macro list.
+// This list must contain precisely every libasound function that is used in
+// alsasoundsystem.cc.
+#define ALSA_SYMBOLS_LIST \
+  X(snd_device_name_free_hint) \
+  X(snd_device_name_get_hint) \
+  X(snd_device_name_hint) \
+  X(snd_pcm_avail_update) \
+  X(snd_pcm_close) \
+  X(snd_pcm_delay) \
+  X(snd_pcm_drop) \
+  X(snd_pcm_open) \
+  X(snd_pcm_prepare) \
+  X(snd_pcm_readi) \
+  X(snd_pcm_recover) \
+  X(snd_pcm_resume) \
+  X(snd_pcm_reset) \
+  X(snd_pcm_state) \
+  X(snd_pcm_set_params) \
+  X(snd_pcm_get_params) \
+  X(snd_pcm_start) \
+  X(snd_pcm_stream) \
+  X(snd_pcm_frames_to_bytes) \
+  X(snd_pcm_bytes_to_frames) \
+  X(snd_pcm_wait) \
+  X(snd_pcm_writei) \
+  X(snd_pcm_info_get_class) \
+  X(snd_pcm_info_get_subdevices_avail) \
+  X(snd_pcm_info_get_subdevice_name) \
+  X(snd_pcm_info_set_subdevice) \
+  X(snd_pcm_info_get_id) \
+  X(snd_pcm_info_set_device) \
+  X(snd_pcm_info_set_stream) \
+  X(snd_pcm_info_get_name) \
+  X(snd_pcm_info_get_subdevices_count) \
+  X(snd_pcm_info_sizeof) \
+  X(snd_pcm_hw_params) \
+  X(snd_pcm_hw_params_malloc) \
+  X(snd_pcm_hw_params_free) \
+  X(snd_pcm_hw_params_any) \
+  X(snd_pcm_hw_params_set_access) \
+  X(snd_pcm_hw_params_set_format) \
+  X(snd_pcm_hw_params_set_channels) \
+  X(snd_pcm_hw_params_set_rate_near) \
+  X(snd_pcm_hw_params_set_buffer_size_near) \
+  X(snd_card_next) \
+  X(snd_card_get_name) \
+  X(snd_config_update) \
+  X(snd_config_copy) \
+  X(snd_config_get_id) \
+  X(snd_ctl_open) \
+  X(snd_ctl_close) \
+  X(snd_ctl_card_info) \
+  X(snd_ctl_card_info_sizeof) \
+  X(snd_ctl_card_info_get_id) \
+  X(snd_ctl_card_info_get_name) \
+  X(snd_ctl_pcm_next_device) \
+  X(snd_ctl_pcm_info) \
+  X(snd_mixer_load) \
+  X(snd_mixer_free) \
+  X(snd_mixer_detach) \
+  X(snd_mixer_close) \
+  X(snd_mixer_open) \
+  X(snd_mixer_attach) \
+  X(snd_mixer_first_elem) \
+  X(snd_mixer_elem_next) \
+  X(snd_mixer_selem_get_name) \
+  X(snd_mixer_selem_is_active) \
+  X(snd_mixer_selem_register) \
+  X(snd_mixer_selem_set_playback_volume_all) \
+  X(snd_mixer_selem_get_playback_volume) \
+  X(snd_mixer_selem_has_playback_volume) \
+  X(snd_mixer_selem_get_playback_volume_range) \
+  X(snd_mixer_selem_has_playback_switch) \
+  X(snd_mixer_selem_get_playback_switch) \
+  X(snd_mixer_selem_set_playback_switch_all) \
+  X(snd_mixer_selem_has_capture_switch) \
+  X(snd_mixer_selem_get_capture_switch) \
+  X(snd_mixer_selem_set_capture_switch_all) \
+  X(snd_mixer_selem_has_capture_volume) \
+  X(snd_mixer_selem_set_capture_volume_all) \
+  X(snd_mixer_selem_get_capture_volume) \
+  X(snd_mixer_selem_get_capture_volume_range) \
+  X(snd_dlopen) \
+  X(snd_dlclose) \
+  X(snd_config) \
+  X(snd_config_search) \
+  X(snd_config_get_string) \
+  X(snd_config_search_definition) \
+  X(snd_config_get_type) \
+  X(snd_config_delete) \
+  X(snd_config_iterator_entry) \
+  X(snd_config_iterator_first) \
+  X(snd_config_iterator_next) \
+  X(snd_config_iterator_end) \
+  X(snd_config_delete_compound_members) \
+  X(snd_config_get_integer) \
+  X(snd_config_get_bool) \
+  X(snd_dlsym) \
+  X(snd_strerror) \
+  X(snd_lib_error) \
+  X(snd_lib_error_set_handler)
+
+LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(AlsaSymbolTable)
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(AlsaSymbolTable, sym)
+ALSA_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DECLARE_END(AlsaSymbolTable)
+
+}  // namespace webrtc_adm_linux_alsa
+
+#endif  // WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc b/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc
new file mode 100644
index 0000000..f82d502
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc
@@ -0,0 +1,2320 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+#include "audio_device_alsa_linux.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+
+webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
+
+// Accesses ALSA functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libasound, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_alsa::AlsaSymbolTable, &AlsaSymbolTable, sym)
+
+// Redefine these here to be able to do late-binding
+#undef snd_ctl_card_info_alloca
+#define snd_ctl_card_info_alloca(ptr) \
+        do { *ptr = (snd_ctl_card_info_t *) \
+            __builtin_alloca (LATE(snd_ctl_card_info_sizeof)()); \
+            memset(*ptr, 0, LATE(snd_ctl_card_info_sizeof)()); } while (0)
+
+#undef snd_pcm_info_alloca
+#define snd_pcm_info_alloca(pInfo) \
+       do { *pInfo = (snd_pcm_info_t *) \
+       __builtin_alloca (LATE(snd_pcm_info_sizeof)()); \
+       memset(*pInfo, 0, LATE(snd_pcm_info_sizeof)()); } while (0)
+
+// snd_lib_error_handler_t
+void WebrtcAlsaErrorHandler(const char *file,
+                          int line,
+                          const char *function,
+                          int err,
+                          const char *fmt,...){};
+
+namespace webrtc
+{
+static const unsigned int ALSA_PLAYOUT_FREQ = 48000;
+static const unsigned int ALSA_PLAYOUT_CH = 2;
+static const unsigned int ALSA_PLAYOUT_LATENCY = 40*1000; // in us
+static const unsigned int ALSA_CAPTURE_FREQ = 48000;
+static const unsigned int ALSA_CAPTURE_CH = 2;
+static const unsigned int ALSA_CAPTURE_LATENCY = 40*1000; // in us
+static const unsigned int ALSA_PLAYOUT_WAIT_TIMEOUT = 5; // in ms
+static const unsigned int ALSA_CAPTURE_WAIT_TIMEOUT = 5; // in ms
+
+#define FUNC_GET_NUM_OF_DEVICE 0
+#define FUNC_GET_DEVICE_NAME 1
+#define FUNC_GET_DEVICE_NAME_FOR_AN_ENUM 2
+
+AudioDeviceLinuxALSA::AudioDeviceLinuxALSA(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrThreadRec(NULL),
+    _ptrThreadPlay(NULL),
+    _recThreadID(0),
+    _playThreadID(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _handleRecord(NULL),
+    _handlePlayout(NULL),
+    _recordingBuffersizeInFrame(0),
+    _recordingPeriodSizeInFrame(0),
+    _playoutBufferSizeInFrame(0),
+    _playoutPeriodSizeInFrame(0),
+    _recordingBufferSizeIn10MS(0),
+    _playoutBufferSizeIn10MS(0),
+    _recordingFramesIn10MS(0),
+    _playoutFramesIn10MS(0),
+    _recordingFreq(ALSA_CAPTURE_FREQ),
+    _playoutFreq(ALSA_PLAYOUT_FREQ),
+    _recChannels(ALSA_CAPTURE_CH),
+    _playChannels(ALSA_PLAYOUT_CH),
+    _recordingBuffer(NULL),
+    _playoutBuffer(NULL),
+    _recordingFramesLeft(0),
+    _playoutFramesLeft(0),
+    _playbackBufferSize(0),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _AGC(false),
+    _recordingDelay(0),
+    _playoutDelay(0),
+    _writeErrors(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playBufDelay(80),
+    _playBufDelayFixed(80)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceLinuxALSA - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceLinuxALSA::~AudioDeviceLinuxALSA()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    
+    Terminate();
+
+    // Clean up the recording buffer and playout buffer.
+    if (_recordingBuffer)
+    {
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+    }
+    if (_playoutBuffer)
+    {
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+    }
+    delete &_critSect;
+}
+
+void AudioDeviceLinuxALSA::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kLinuxAlsaAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Load libasound
+    if (!AlsaSymbolTable.Load())
+    {
+        // Alsa is not installed on
+        // this system
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "  failed to load symbol table");
+        return -1;
+    }
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    _mixerManager.Close();
+
+    // RECORDING
+    if (_ptrThreadRec)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+
+        _critSect.Enter();
+    }
+
+    // PLAYOUT
+    if (_ptrThreadPlay)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+
+        _critSect.Enter();
+    }
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker
+    // exists
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    char devName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, true, _outputDeviceIndex, devName, kAdmMaxDeviceNameSize);
+    return _mixerManager.OpenSpeaker(devName);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid
+    // microphone exists
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    char devName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, false, _inputDeviceIndex, devName, kAdmMaxDeviceNameSize);
+    return _mixerManager.OpenMicrophone(devName);
+}
+
+bool AudioDeviceLinuxALSA::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceLinuxALSA::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                                     WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0); 
+     
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0); 
+        
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0); 
+        
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneBoostIsAvailable(bool& available)
+{
+    
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0); 
+        
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoRecordingIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    // If we already have initialized in stereo it's obviously available
+    if (_recIsInitialized && (2 == _recChannels))
+    {
+        available = true;
+        return 0;
+    }
+
+    // Save rec states and the number of rec channels
+    bool recIsInitialized = _recIsInitialized;
+    bool recording = _recording;
+    int recChannels = _recChannels;
+
+    available = false;
+    
+    // Stop/uninitialize recording if initialized (and possibly started)
+    if (_recIsInitialized)
+    {
+        StopRecording();
+    }
+
+    // Try init in stereo;
+    _recChannels = 2;
+    if (InitRecording() == 0)
+    {
+        available = true;
+    }
+
+    // Stop/uninitialize recording
+    StopRecording();
+
+    // Recover previous states
+    _recChannels = recChannels;
+    if (recIsInitialized)
+    {
+        InitRecording();
+    }
+    if (recording)
+    {
+        StartRecording();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoPlayoutIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    // If we already have initialized in stereo it's obviously available
+    if (_playIsInitialized && (2 == _playChannels))
+    {
+        available = true;
+        return 0;
+    }
+
+    // Save rec states and the number of rec channels
+    bool playIsInitialized = _playIsInitialized;
+    bool playing = _playing;
+    int playChannels = _playChannels;
+
+    available = false;
+    
+    // Stop/uninitialize recording if initialized (and possibly started)
+    if (_playIsInitialized)
+    {
+        StopPlayout();
+    }
+
+    // Try init in stereo;
+    _playChannels = 2;
+    if (InitPlayout() == 0)
+    {
+        available = true;
+    }
+
+    // Stop/uninitialize recording
+    StopPlayout();
+
+    // Recover previous states
+    _playChannels = playChannels;
+    if (playIsInitialized)
+    {
+        InitPlayout();
+    }
+    if (playing)
+    {
+        StartPlayout();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+ 
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0); 
+        
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxALSA::PlayoutDevices()
+{
+
+    return (WebRtc_Word16)GetDevicesInfo(0, true);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 nDevices = GetDevicesInfo(0, true);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable audio output devices is %u", nDevices);
+
+    if (index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDevicesInfo(1, true, index, name, kAdmMaxDeviceNameSize);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+    
+    return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize);
+}
+
+WebRtc_Word16 AudioDeviceLinuxALSA::RecordingDevices()
+{
+
+    return (WebRtc_Word16)GetDevicesInfo(0, false);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 nDevices = GetDevicesInfo(0, false);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable audio input devices is %u", nDevices);
+
+    if (index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutIsAvailable(bool& available)
+{
+    
+    available = false;
+
+    // Try to initialize the playout side with mono
+    // Assumes that user set num channels after calling this function
+    _playChannels = 1;
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+    else
+    {
+        // It may be possible to play out in stereo
+        res = StereoPlayoutIsAvailable(available);
+        if (available)
+        {
+            // Then set channels to 2 so InitPlayout doesn't fail
+            _playChannels = 2;
+        }
+    }
+    
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available)
+{
+    
+    available = false;
+
+    // Try to initialize the recording side with mono
+    // Assumes that user set num channels after calling this function
+    _recChannels = 1;
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+    else
+    {
+        // It may be possible to record in stereo
+        res = StereoRecordingIsAvailable(available);
+        if (available)
+        {
+            // Then set channels to 2 so InitPlayout doesn't fail
+            _recChannels = 2;
+        }
+    }
+    
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitPlayout()
+{
+
+    int errVal = 0;
+
+    CriticalSectionScoped lock(_critSect);
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // Start by closing any existing wave-output devices
+    //
+    if (_handlePlayout != NULL)
+    {
+        LATE(snd_pcm_close)(_handlePlayout);
+        _handlePlayout = NULL;
+        _playIsInitialized = false;
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Error closing current playout sound device, error:"
+                         " %s", LATE(snd_strerror)(errVal));
+        }
+    }
+
+    // Open PCM device for playout
+    char deviceName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, true, _outputDeviceIndex, deviceName,
+                   kAdmMaxDeviceNameSize);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  InitPlayout open (%s)", deviceName);
+
+    errVal = LATE(snd_pcm_open)
+                 (&_handlePlayout,
+                  deviceName,
+                  SND_PCM_STREAM_PLAYBACK,
+                  SND_PCM_NONBLOCK);
+
+    if (errVal == -EBUSY) // Device busy - try some more!
+    {
+        for (int i=0; i < 5; i++)
+        {
+            sleep(1);
+            errVal = LATE(snd_pcm_open)
+                         (&_handlePlayout,
+                          deviceName,
+                          SND_PCM_STREAM_PLAYBACK,
+                          SND_PCM_NONBLOCK);
+            if (errVal == 0)
+            {
+                break;
+            }
+        }
+    }
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     unable to open playback device: %s (%d)",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        _handlePlayout = NULL;
+        return -1;
+    }
+
+    _playoutFramesIn10MS = _playoutFreq/100;
+    if ((errVal = LATE(snd_pcm_set_params)( _handlePlayout,
+#if defined(WEBRTC_BIG_ENDIAN)
+        SND_PCM_FORMAT_S16_BE,
+#else
+        SND_PCM_FORMAT_S16_LE, //format
+#endif
+        SND_PCM_ACCESS_RW_INTERLEAVED, //access
+        _playChannels, //channels
+        _playoutFreq, //rate
+        1, //soft_resample
+        ALSA_PLAYOUT_LATENCY //40*1000 //latency required overall latency in us
+    )) < 0)
+    {   /* 0.5sec */
+        _playoutFramesIn10MS = 0;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     unable to set playback device: %s (%d)",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        ErrorRecovery(errVal, _handlePlayout);
+        errVal = LATE(snd_pcm_close)(_handlePlayout);
+        _handlePlayout = NULL;
+        return -1;
+    }
+
+    errVal = LATE(snd_pcm_get_params)(_handlePlayout,
+        &_playoutBufferSizeInFrame, &_playoutPeriodSizeInFrame);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    snd_pcm_get_params %s",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        _playoutBufferSizeInFrame = 0;
+        _playoutPeriodSizeInFrame = 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "    playout snd_pcm_get_params "
+                     "buffer_size:%d period_size :%d",
+                     _playoutBufferSizeInFrame, _playoutPeriodSizeInFrame);
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update webrtc audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_playoutFreq);
+        _ptrAudioBuffer->SetPlayoutChannels(_playChannels);
+    }
+
+    // Set play buffer size
+    _playoutBufferSizeIn10MS = LATE(snd_pcm_frames_to_bytes)(
+        _handlePlayout, _playoutFramesIn10MS);
+
+    // Init varaibles used for play
+    _playWarning = 0;
+    _playError = 0;
+
+    if (_handlePlayout != NULL)
+    {
+        _playIsInitialized = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitRecording()
+{
+
+    int errVal = 0;
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "  InitMicrophone() failed");
+    }
+
+    // Start by closing any existing pcm-input devices
+    //
+    if (_handleRecord != NULL)
+    {
+        int errVal = LATE(snd_pcm_close)(_handleRecord);
+        _handleRecord = NULL;
+        _recIsInitialized = false;
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error closing current recording sound device,"
+                         " error: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+    }
+
+    // Open PCM device for recording
+    // The corresponding settings for playout are made after the record settings
+    char deviceName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, false, _inputDeviceIndex, deviceName,
+                   kAdmMaxDeviceNameSize);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "InitRecording open (%s)", deviceName);
+    errVal = LATE(snd_pcm_open)
+                 (&_handleRecord,
+                  deviceName,
+                  SND_PCM_STREAM_CAPTURE,
+                  SND_PCM_NONBLOCK);
+
+    // Available modes: 0 = blocking, SND_PCM_NONBLOCK, SND_PCM_ASYNC
+    if (errVal == -EBUSY) // Device busy - try some more!
+    {
+        for (int i=0; i < 5; i++)
+        {
+            sleep(1);
+            errVal = LATE(snd_pcm_open)
+                         (&_handleRecord,
+                          deviceName,
+                          SND_PCM_STREAM_CAPTURE,
+                          SND_PCM_NONBLOCK);
+            if (errVal == 0)
+            {
+                break;
+            }
+        }
+    }
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    unable to open record device: %s",
+                     LATE(snd_strerror)(errVal));
+        _handleRecord = NULL;
+        return -1;
+    }
+
+    _recordingFramesIn10MS = _recordingFreq/100;
+    if ((errVal = LATE(snd_pcm_set_params)(_handleRecord,
+#if defined(WEBRTC_BIG_ENDIAN)
+        SND_PCM_FORMAT_S16_BE, //format
+#else
+        SND_PCM_FORMAT_S16_LE, //format
+#endif
+        SND_PCM_ACCESS_RW_INTERLEAVED, //access
+        _recChannels, //channels
+        _recordingFreq, //rate
+        1, //soft_resample
+        ALSA_CAPTURE_LATENCY //latency in us
+    )) < 0)
+    {
+         // Fall back to another mode then.
+         if (_recChannels == 1)
+           _recChannels = 2;
+         else
+           _recChannels = 1;
+
+         if ((errVal = LATE(snd_pcm_set_params)(_handleRecord,
+#if defined(WEBRTC_BIG_ENDIAN)
+             SND_PCM_FORMAT_S16_BE, //format
+#else
+             SND_PCM_FORMAT_S16_LE, //format
+#endif
+             SND_PCM_ACCESS_RW_INTERLEAVED, //access
+             _recChannels, //channels
+             _recordingFreq, //rate
+             1, //soft_resample
+             ALSA_CAPTURE_LATENCY //latency in us
+         )) < 0)
+         {
+             _recordingFramesIn10MS = 0;
+             WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                          "    unable to set record settings: %s (%d)",
+                          LATE(snd_strerror)(errVal), errVal);
+             ErrorRecovery(errVal, _handleRecord);
+             errVal = LATE(snd_pcm_close)(_handleRecord);
+             _handleRecord = NULL;
+             return -1;
+         }
+    }
+
+    errVal = LATE(snd_pcm_get_params)(_handleRecord,
+        &_recordingBuffersizeInFrame, &_recordingPeriodSizeInFrame);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    snd_pcm_get_params %s",
+                     LATE(snd_strerror)(errVal), errVal);
+        _recordingBuffersizeInFrame = 0;
+        _recordingPeriodSizeInFrame = 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "    capture snd_pcm_get_params "
+                     "buffer_size:%d period_size:%d",
+                     _recordingBuffersizeInFrame, _recordingPeriodSizeInFrame);
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update webrtc audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_recordingFreq);
+        _ptrAudioBuffer->SetRecordingChannels(_recChannels);
+    }
+
+    // Set rec buffer size and create buffer
+    _recordingBufferSizeIn10MS = LATE(snd_pcm_frames_to_bytes)(
+        _handleRecord, _recordingFramesIn10MS);
+
+    if (_handleRecord != NULL)
+    {
+        // Mark recording side as initialized
+        _recIsInitialized = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    _recording = true;
+
+    int errVal = 0;
+    _recordingFramesLeft = _recordingFramesIn10MS;
+
+    // Make sure we only create the buffer once.
+    if (!_recordingBuffer)
+        _recordingBuffer = new WebRtc_Word8[_recordingBufferSizeIn10MS];
+    if (!_recordingBuffer)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "   failed to alloc recording buffer");
+        _recording = false;
+        return -1;
+    }
+    // RECORDING
+    const char* threadName = "webrtc_audio_module_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
+                                                this,
+                                                kRealtimePriority,
+                                                threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        _recording = false;
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+        _recording = false;
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    errVal = LATE(snd_pcm_prepare)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     capture snd_pcm_prepare failed (%s)\n",
+                     LATE(snd_strerror)(errVal));
+        // just log error
+        // if snd_pcm_open fails will return -1
+    }
+
+    errVal = LATE(snd_pcm_start)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     capture snd_pcm_start err: %s",
+                     LATE(snd_strerror)(errVal));
+        errVal = LATE(snd_pcm_start)(_handleRecord);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     capture snd_pcm_start 2nd try err: %s",
+                         LATE(snd_strerror)(errVal));
+            StopRecording();
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StopRecording()
+{
+
+    {
+      CriticalSectionScoped lock(_critSect);
+
+      if (!_recIsInitialized)
+      {
+          return 0;
+      }
+
+      if (_handleRecord == NULL)
+      {
+          return -1;
+      }
+
+      // Make sure we don't start recording (it's asynchronous).
+      _recIsInitialized = false;
+      _recording = false;
+    }
+
+    if (_ptrThreadRec && !_ptrThreadRec->Stop())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    failed to stop the rec audio thread");
+        return -1;
+    }
+    else {
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+    _recordingFramesLeft = 0;
+    if (_recordingBuffer)
+    {
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+    }
+
+    // Stop and close pcm recording device.
+    int errVal = LATE(snd_pcm_drop)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error stop recording: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    errVal = LATE(snd_pcm_close)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error closing record sound device, error: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Check if we have muted and unmute if so.
+    bool muteEnabled = false;
+    MicrophoneMute(muteEnabled);
+    if (muteEnabled)
+    {
+        SetMicrophoneMute(false);
+    }
+
+    // set the pcm input handle to NULL
+    _handleRecord = NULL;
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "  handle_capture is now set to NULL");
+
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceLinuxALSA::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceLinuxALSA::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StartPlayout()
+{
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+    
+    if (_playing)
+    {
+        return 0;
+    }
+
+    _playing = true;
+
+    _playoutFramesLeft = 0;
+    if (!_playoutBuffer)
+        _playoutBuffer = new WebRtc_Word8[_playoutBufferSizeIn10MS];
+    if (!_playoutBuffer)
+    {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "    failed to alloc playout buf");
+      _playing = false;
+      return -1;
+    }
+
+    // PLAYOUT
+    const char* threadName = "webrtc_audio_module_play_thread";
+    _ptrThreadPlay =  ThreadWrapper::CreateThread(PlayThreadFunc,
+                                                  this,
+                                                  kRealtimePriority,
+                                                  threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "    failed to create the play audio thread");
+        _playing = false;
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+        _playing = false;
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "     playout snd_pcm_prepare failed (%s)\n",
+                     LATE(snd_strerror)(errVal));
+        // just log error
+        // if snd_pcm_open fails will return -1
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StopPlayout()
+{
+
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        if (!_playIsInitialized)
+        {
+            return 0;
+        }
+
+        if (_handlePlayout == NULL)
+        {
+            return -1;
+        }
+
+        _playing = false;
+    }
+
+    // stop playout thread first
+    if (_ptrThreadPlay && !_ptrThreadPlay->Stop())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to stop the play audio thread");
+        return -1;
+    }
+    else {
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    _playoutFramesLeft = 0;
+    delete [] _playoutBuffer;
+    _playoutBuffer = NULL;
+
+    // stop and close pcm playout device
+    int errVal = LATE(snd_pcm_drop)(_handlePlayout);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    Error stop playing: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    errVal = LATE(snd_pcm_close)(_handlePlayout);
+     if (errVal < 0)
+         WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                      "    Error closing playout sound device, error: %s",
+                      LATE(snd_strerror)(errVal));
+
+     // set the pcm input handle to NULL
+     _playIsInitialized = false;
+     _handlePlayout = NULL;
+     WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                  "  handle_playout is now set to NULL");
+
+     return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16)_playoutDelay * 1000 / _playoutFreq;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    // Adding 10ms adjusted value to the record delay due to 10ms buffering.
+    delayMS = (WebRtc_UWord16)(10 + _recordingDelay * 1000 / _recordingFreq);
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+    _playBufType = type;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+    type = _playBufType;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        sizeMS = _playBufDelayFixed; 
+    }
+    else
+    {
+        sizeMS = _playBufDelay; 
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::CPULoad(WebRtc_UWord16& load) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+               "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceLinuxALSA::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceLinuxALSA::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+bool AudioDeviceLinuxALSA::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceLinuxALSA::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+void AudioDeviceLinuxALSA::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32 AudioDeviceLinuxALSA::GetDevicesInfo(
+    const WebRtc_Word32 function,
+    const bool playback,
+    const WebRtc_Word32 enumDeviceNo,
+    char* enumDeviceName,
+    const WebRtc_Word32 ednLen) const
+{
+    
+    // Device enumeration based on libjingle implementation
+    // by Tristan Schmelcher at Google Inc.
+
+    const char *type = playback ? "Output" : "Input";
+    // dmix and dsnoop are only for playback and capture, respectively, but ALSA
+    // stupidly includes them in both lists.
+    const char *ignorePrefix = playback ? "dsnoop:" : "dmix:" ;
+    // (ALSA lists many more "devices" of questionable interest, but we show them
+    // just in case the weird devices may actually be desirable for some
+    // users/systems.)
+
+    int err;
+    int enumCount(0);
+    bool keepSearching(true);
+
+    void **hints;
+    err = LATE(snd_device_name_hint)(-1,     // All cards
+                                     "pcm",  // Only PCM devices
+                                     &hints);
+    if (err != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetDevicesInfo - device name hint error: %s",
+                     LATE(snd_strerror)(err));
+        return -1;
+    }
+
+    enumCount++; // default is 0
+    if (function == FUNC_GET_DEVICE_NAME && enumDeviceNo == 0)
+    {
+        strcpy(enumDeviceName, "default");
+        return 0;
+    }
+    if (function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM && enumDeviceNo == 0)
+    {
+        strcpy(enumDeviceName, "default");
+        return 0;
+    }
+
+    for (void **list = hints; *list != NULL; ++list)
+    {
+        char *actualType = LATE(snd_device_name_get_hint)(*list, "IOID");
+        if (actualType)
+        {   // NULL means it's both.
+            bool wrongType = (strcmp(actualType, type) != 0);
+            free(actualType);
+            if (wrongType)
+            {
+                // Wrong type of device (i.e., input vs. output).
+                continue;
+            }
+        }
+
+        char *name = LATE(snd_device_name_get_hint)(*list, "NAME");
+        if (!name)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "Device has no name");
+            // Skip it.
+            continue;
+        }
+
+        // Now check if we actually want to show this device.
+        if (strcmp(name, "default") != 0 &&
+            strcmp(name, "null") != 0 &&
+            strcmp(name, "pulse") != 0 &&
+            strncmp(name, ignorePrefix, strlen(ignorePrefix)) != 0)
+        {
+            // Yes, we do.
+            char *desc = LATE(snd_device_name_get_hint)(*list, "DESC");
+            if (!desc)
+            {
+                // Virtual devices don't necessarily have descriptions.
+                // Use their names instead
+                desc = name;
+            }
+
+            if (FUNC_GET_NUM_OF_DEVICE == function)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                             "    Enum device %d - %s", enumCount, name);
+
+            }
+            if ((FUNC_GET_DEVICE_NAME == function) &&
+                (enumDeviceNo == enumCount))
+            {
+
+                // We have found the enum device, copy the name to buffer
+                strncpy(enumDeviceName, desc, ednLen);
+                enumDeviceName[ednLen-1] = '\0';
+                keepSearching = false;
+                // replace '\n' with '-'
+                char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
+                if (pret)
+                    *pret = '-';
+            }
+            if ((FUNC_GET_DEVICE_NAME_FOR_AN_ENUM == function) &&
+                (enumDeviceNo == enumCount))
+            {
+                // We have found the enum device, copy the name to buffer
+                strncpy(enumDeviceName, name, ednLen);
+                enumDeviceName[ednLen-1] = '\0';
+                keepSearching = false;
+            }
+            if (keepSearching)
+            {
+                ++enumCount;
+            }
+
+            if (desc != name)
+            {
+                free(desc);
+            }
+        }
+
+        free(name);
+
+        if (!keepSearching)
+        {
+            break;
+        }
+    }
+
+    err = LATE(snd_device_name_free_hint)(hints);
+    if (err != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetDevicesInfo - device name free hint error: %s",
+                     LATE(snd_strerror)(err));
+        // Continue and return true anyways, since we did get the whole list.
+    }
+
+    if (FUNC_GET_NUM_OF_DEVICE == function)
+    {
+        if (enumCount == 1) // only default?
+            enumCount = 0;
+        return enumCount; // Normal return point for function 0
+    }
+
+    if (keepSearching)
+    {
+        // If we get here for function 1 and 2, we didn't find the specified
+        // enum device
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetDevicesInfo - Could not find device name or numbers");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_handleRecord == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  input state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::OutputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_handlePlayout == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  output state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::ErrorRecovery(WebRtc_Word32 error,
+                                                  snd_pcm_t* deviceHandle)
+{
+    int st = LATE(snd_pcm_state)(deviceHandle);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+               "Trying to recover from error: %s (%d) (state %d)",
+               (LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_CAPTURE) ?
+                   "capture" : "playout", LATE(snd_strerror)(error), error, st);
+
+    // It is recommended to use snd_pcm_recover for all errors. If that function
+    // cannot handle the error, the input error code will be returned, otherwise
+    // 0 is returned. From snd_pcm_recover API doc: "This functions handles
+    // -EINTR (4) (interrupted system call), -EPIPE (32) (playout overrun or
+    // capture underrun) and -ESTRPIPE (86) (stream is suspended) error codes
+    // trying to prepare given stream for next I/O."
+
+    /** Open */
+    //    SND_PCM_STATE_OPEN = 0,
+    /** Setup installed */
+    //    SND_PCM_STATE_SETUP,
+    /** Ready to start */
+    //    SND_PCM_STATE_PREPARED,
+    /** Running */
+    //    SND_PCM_STATE_RUNNING,
+    /** Stopped: underrun (playback) or overrun (capture) detected */
+    //    SND_PCM_STATE_XRUN,= 4
+    /** Draining: running (playback) or stopped (capture) */
+    //    SND_PCM_STATE_DRAINING,
+    /** Paused */
+    //    SND_PCM_STATE_PAUSED,
+    /** Hardware is suspended */
+    //    SND_PCM_STATE_SUSPENDED,
+    //  ** Hardware is disconnected */
+    //    SND_PCM_STATE_DISCONNECTED,
+    //    SND_PCM_STATE_LAST = SND_PCM_STATE_DISCONNECTED
+
+    // snd_pcm_recover isn't available in older alsa, e.g. on the FC4 machine
+    // in Sthlm lab.
+
+    int res = LATE(snd_pcm_recover)(deviceHandle, error, 1);
+    if (0 == res)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                   "    Recovery - snd_pcm_recover OK");
+
+        if ((error == -EPIPE || error == -ESTRPIPE) && // Buf underrun/overrun.
+            _recording &&
+            LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_CAPTURE)
+        {
+            // For capture streams we also have to repeat the explicit start()
+            // to get data flowing again.
+            int err = LATE(snd_pcm_start)(deviceHandle);
+            if (err != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "  Recovery - snd_pcm_start error: %u", err);
+                return -1;
+            }
+        }
+
+        if ((error == -EPIPE || error == -ESTRPIPE) &&  // Buf underrun/overrun.
+            _playing &&
+            LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_PLAYBACK)
+        {
+            // For capture streams we also have to repeat the explicit start() to get
+            // data flowing again.
+            int err = LATE(snd_pcm_start)(deviceHandle);
+            if (err != 0)
+            {
+              WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                       "    Recovery - snd_pcm_start error: %s",
+                       LATE(snd_strerror)(err));
+              return -1;
+            }
+        }
+
+        return -EPIPE == error ? 1 : 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Terriable, it shouldn't happen");
+    }
+
+    return res;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+bool AudioDeviceLinuxALSA::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxALSA*>(pThis)->PlayThreadProcess());
+}
+
+bool AudioDeviceLinuxALSA::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxALSA*>(pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceLinuxALSA::PlayThreadProcess()
+{
+    if(!_playing)
+        return false;
+
+    int err;
+    snd_pcm_sframes_t frames;
+    snd_pcm_sframes_t avail_frames;
+
+    Lock();
+    //return a positive number of frames ready otherwise a negative error code
+    avail_frames = LATE(snd_pcm_avail_update)(_handlePlayout);
+    if (avail_frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "playout snd_pcm_avail_update error: %s",
+                   LATE(snd_strerror)(avail_frames));
+        ErrorRecovery(avail_frames, _handlePlayout);
+        UnLock();
+        return true;
+    }
+    else if (avail_frames == 0)
+    {
+        UnLock();
+
+        //maximum tixe in milliseconds to wait, a negative value means infinity
+        err = LATE(snd_pcm_wait)(_handlePlayout, 2);
+        if (err == 0)
+        { //timeout occured
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "playout snd_pcm_wait timeout");
+        }
+
+        return true;
+    }
+
+    if (_playoutFramesLeft <= 0)
+    {
+        UnLock();
+        _ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS);
+        Lock();
+
+        _playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer);
+        assert(_playoutFramesLeft == _playoutFramesIn10MS);
+    }
+
+    if (static_cast<WebRtc_UWord32>(avail_frames) > _playoutFramesLeft)
+        avail_frames = _playoutFramesLeft;
+
+    int size = LATE(snd_pcm_frames_to_bytes)(_handlePlayout,
+        _playoutFramesLeft);
+    frames = LATE(snd_pcm_writei)(
+        _handlePlayout,
+        &_playoutBuffer[_playoutBufferSizeIn10MS - size],
+        avail_frames);
+
+    if (frames < 0)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                     "playout snd_pcm_avail_update error: %s",
+                     LATE(snd_strerror)(frames));
+        _playoutFramesLeft = 0;
+        ErrorRecovery(frames, _handlePlayout);
+        UnLock();
+        return true;
+    }
+    else {
+        assert(frames==avail_frames);
+        _playoutFramesLeft -= frames;
+    }
+
+    UnLock();
+    return true;
+}
+
+bool AudioDeviceLinuxALSA::RecThreadProcess()
+{
+    if (!_recording)
+        return false;
+
+    int err;
+    snd_pcm_sframes_t frames;
+    snd_pcm_sframes_t avail_frames;
+    WebRtc_Word8 buffer[_recordingBufferSizeIn10MS];
+
+    Lock();
+
+    //return a positive number of frames ready otherwise a negative error code
+    avail_frames = LATE(snd_pcm_avail_update)(_handleRecord);
+    if (avail_frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "capture snd_pcm_avail_update error: %s",
+                     LATE(snd_strerror)(avail_frames));
+        ErrorRecovery(avail_frames, _handleRecord);
+        UnLock();
+        return true;
+    }
+    else if (avail_frames == 0)
+    { // no frame is available now
+        UnLock();
+
+        //maximum time in milliseconds to wait, a negative value means infinity
+        err = LATE(snd_pcm_wait)(_handleRecord,
+            ALSA_CAPTURE_WAIT_TIMEOUT);
+        if (err == 0) //timeout occured
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "caputre snd_pcm_wait timeout");
+
+        return true;
+    }
+
+    if (static_cast<WebRtc_UWord32>(avail_frames) > _recordingFramesLeft)
+        avail_frames = _recordingFramesLeft;
+
+    frames = LATE(snd_pcm_readi)(_handleRecord,
+        buffer, avail_frames); // frames to be written
+    if (frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "caputre snd_pcm_readi error: %s",
+                     LATE(snd_strerror)(frames));
+        ErrorRecovery(frames, _handleRecord);
+        UnLock();
+        return true;
+    }
+    else if (frames > 0)
+    {
+        assert(frames == avail_frames);
+
+        int left_size = LATE(snd_pcm_frames_to_bytes)(_handleRecord,
+            _recordingFramesLeft);
+        int size = LATE(snd_pcm_frames_to_bytes)(_handleRecord, frames);
+
+        memcpy(&_recordingBuffer[_recordingBufferSizeIn10MS - left_size],
+               buffer, size);
+        _recordingFramesLeft -= frames;
+
+        if (!_recordingFramesLeft)
+        { // buf is full
+            _recordingFramesLeft = _recordingFramesIn10MS;
+
+            // store the recorded buffer (no action will be taken if the
+            // #recorded samples is not a full buffer)
+            _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
+                                               _recordingFramesIn10MS);
+
+            WebRtc_UWord32 currentMicLevel = 0;
+            WebRtc_UWord32 newMicLevel = 0;
+
+            if (AGC())
+            {
+                // store current mic level in the audio buffer if AGC is enabled
+                if (MicrophoneVolume(currentMicLevel) == 0)
+                {
+                    if (currentMicLevel == 0xffffffff)
+                        currentMicLevel = 100;
+                    // this call does not affect the actual microphone volume
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+                }
+            }
+
+            // calculate delay
+            _playoutDelay = 0;
+            _recordingDelay = 0;
+            if (_handlePlayout)
+            {
+                err = LATE(snd_pcm_delay)(_handlePlayout,
+                    &_playoutDelay); // returned delay in frames
+                if (err < 0)
+                {
+                    // TODO(xians): Shall we call ErrorRecovery() here?
+                    _playoutDelay = 0;
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                                 "playout snd_pcm_delay: %s",
+                                 LATE(snd_strerror)(err));
+                }
+            }
+
+            err = LATE(snd_pcm_delay)(_handleRecord,
+                &_recordingDelay); // returned delay in frames
+            if (err < 0)
+            {
+                // TODO(xians): Shall we call ErrorRecovery() here?
+                _recordingDelay = 0;
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "caputre snd_pcm_delay: %s",
+                             LATE(snd_strerror)(err));
+            }
+
+           // TODO(xians): Shall we add 10ms buffer delay to the record delay?
+            _ptrAudioBuffer->SetVQEData(
+                _playoutDelay * 1000 / _playoutFreq,
+                _recordingDelay * 1000 / _recordingFreq, 0);
+
+            // Deliver recorded samples at specified sample rate, mic level etc.
+            // to the observer using callback.
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+
+            if (AGC())
+            {
+                newMicLevel = _ptrAudioBuffer->NewMicLevel();
+                if (newMicLevel != 0)
+                {
+                    // The VQE will only deliver non-zero microphone levels when a
+                    // change is needed. Set this new mic level (received from the
+                    // observer as return value in the callback).
+                    if (SetMicrophoneVolume(newMicLevel) == -1)
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                                     "  the required modification of the "
+                                     "microphone volume failed");
+                }
+            }
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h b/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h
new file mode 100644
index 0000000..9a5a032
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+#include "audio_mixer_manager_alsa_linux.h"
+
+#include <sys/soundcard.h>
+#include <sys/ioctl.h>
+
+#include <alsa/asoundlib.h>
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+class AudioDeviceLinuxALSA : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceLinuxALSA(const WebRtc_Word32 id);
+    ~AudioDeviceLinuxALSA();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+   
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type,
+        WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type,
+        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    WebRtc_Word32 GetDevicesInfo(const WebRtc_Word32 function,
+                                 const bool playback,
+                                 const WebRtc_Word32 enumDeviceNo = 0,
+                                 char* enumDeviceName = NULL,
+                                 const WebRtc_Word32 ednLen = 0) const;
+    WebRtc_Word32 ErrorRecovery(WebRtc_Word32 error, snd_pcm_t* deviceHandle);
+
+private:
+    void Lock() { _critSect.Enter(); };
+    void UnLock() { _critSect.Leave(); };
+private:
+    inline WebRtc_Word32 InputSanityCheckAfterUnlockedPeriod() const;
+    inline WebRtc_Word32 OutputSanityCheckAfterUnlockedPeriod() const;
+
+private:
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    
+    CriticalSectionWrapper& _critSect;
+
+    ThreadWrapper* _ptrThreadRec;
+    ThreadWrapper* _ptrThreadPlay;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerLinuxALSA _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    snd_pcm_t* _handleRecord;
+    snd_pcm_t* _handlePlayout;
+
+    snd_pcm_uframes_t _recordingBuffersizeInFrame;
+    snd_pcm_uframes_t _recordingPeriodSizeInFrame;
+    snd_pcm_uframes_t _playoutBufferSizeInFrame;
+    snd_pcm_uframes_t _playoutPeriodSizeInFrame;
+
+    ssize_t _recordingBufferSizeIn10MS;
+    ssize_t _playoutBufferSizeIn10MS;
+    WebRtc_UWord32 _recordingFramesIn10MS;
+    WebRtc_UWord32 _playoutFramesIn10MS;
+
+    WebRtc_UWord32 _recordingFreq;
+    WebRtc_UWord32 _playoutFreq;
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    WebRtc_Word8* _recordingBuffer; // in byte
+    WebRtc_Word8* _playoutBuffer; // in byte
+    WebRtc_UWord32 _recordingFramesLeft;
+    WebRtc_UWord32 _playoutFramesLeft;
+
+    WebRtc_UWord32 _playbackBufferSize;
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _AGC;
+
+    snd_pcm_sframes_t _recordingDelay;
+    snd_pcm_sframes_t _playoutDelay;
+
+    WebRtc_Word32 _writeErrors;
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    WebRtc_UWord16 _playBufDelay;                 // playback delay
+    WebRtc_UWord16 _playBufDelayFixed;            // fixed playback delay
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_ALSA_LINUX_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc b/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
new file mode 100644
index 0000000..16e1d33
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
@@ -0,0 +1,3150 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+#include "audio_device_pulse_linux.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+webrtc_adm_linux_pulse::PulseAudioSymbolTable PaSymbolTable;
+
+// Accesses Pulse functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libpulse, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_pulse::PulseAudioSymbolTable, &PaSymbolTable, sym)
+
+namespace webrtc
+{
+
+// ============================================================================
+//                              Static Methods
+// ============================================================================
+
+bool AudioDeviceLinuxPulse::PulseAudioIsSupported()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%s",
+                 __FUNCTION__);
+
+    bool pulseAudioIsSupported(true);
+
+    // Check that we can initialize
+    AudioDeviceLinuxPulse* admPulse = new AudioDeviceLinuxPulse(-1);
+    if (admPulse->InitPulseAudio() == -1)
+    {
+        pulseAudioIsSupported = false;
+    }
+    admPulse->TerminatePulseAudio();
+    delete admPulse;
+
+    if (pulseAudioIsSupported)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "*** Linux Pulse Audio is supported ***");
+    } else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "*** Linux Pulse Audio is NOT supported => will revert to the ALSA API ***");
+    }
+
+    return (pulseAudioIsSupported);
+}
+
+AudioDeviceLinuxPulse::AudioDeviceLinuxPulse(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _timeEventRec(*EventWrapper::Create()),
+    _timeEventPlay(*EventWrapper::Create()),
+    _recStartEvent(*EventWrapper::Create()),
+    _playStartEvent(*EventWrapper::Create()),
+    _ptrThreadPlay(NULL),
+    _ptrThreadRec(NULL),
+    _recThreadID(0),
+    _playThreadID(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _samplingFreq(0),
+    _recChannels(1),
+    _playChannels(1),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _startRec(false),
+    _stopRec(false),
+    _startPlay(false),
+    _stopPlay(false),
+    _AGC(false),
+    _playBufDelayFixed(20),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _writeErrors(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _deviceIndex(-1),
+    _numPlayDevices(0),
+    _numRecDevices(0),
+    _playDeviceName(NULL),
+    _recDeviceName(NULL),
+    _playDisplayDeviceName(NULL),
+    _recDisplayDeviceName(NULL),
+    _playBuffer(NULL),
+    _playbackBufferSize(0),
+    _playbackBufferUnused(0),
+    _tempBufferSpace(0),
+    _recBuffer(NULL),
+    _recordBufferSize(0),
+    _recordBufferUsed(0),
+    _tempSampleData(NULL),
+    _tempSampleDataSize(0),
+    _configuredLatencyPlay(0),
+    _configuredLatencyRec(0),
+    _paDeviceIndex(-1),
+    _paStateChanged(false),
+    _paMainloop(NULL),
+    _paMainloopApi(NULL),
+    _paContext(NULL),
+    _recStream(NULL),
+    _playStream(NULL),
+    _recStreamFlags(0),
+    _playStreamFlags(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    memset(_paServerVersion, 0, sizeof(_paServerVersion));
+    memset(&_playBufferAttr, 0, sizeof(_playBufferAttr));
+    memset(&_recBufferAttr, 0, sizeof(_recBufferAttr));
+}
+
+AudioDeviceLinuxPulse::~AudioDeviceLinuxPulse()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    if (_recBuffer)
+    {
+        delete [] _recBuffer;
+        _recBuffer = NULL;
+    }
+    if (_playBuffer)
+    {
+        delete [] _playBuffer;
+        _playBuffer = NULL;
+    }
+    if (_playDeviceName)
+    {
+        delete [] _playDeviceName;
+        _playDeviceName = NULL;
+    }
+    if (_recDeviceName)
+    {
+        delete [] _recDeviceName;
+        _recDeviceName = NULL;
+    }
+
+    delete &_recStartEvent;
+    delete &_playStartEvent;
+    delete &_timeEventRec;
+    delete &_timeEventPlay;
+    delete &_critSect;
+}
+
+void AudioDeviceLinuxPulse::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kLinuxPulseAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    // Initialize PulseAudio
+    if (InitPulseAudio() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to initialize PulseAudio");
+
+        if (TerminatePulseAudio() < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to terminate PulseAudio");
+        }
+
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // RECORDING
+    const char* threadName = "webrtc_audio_module_rec_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+                                                kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    // PLAYOUT
+    threadName = "webrtc_audio_module_play_thread";
+    _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
+                                                 kRealtimePriority, threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the play audio thread");
+        return -1;
+    }
+
+    threadID = 0;
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    Lock();
+
+    _mixerManager.Close();
+
+    // RECORDING
+    if (_ptrThreadRec)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        UnLock();
+
+        tmpThread->SetNotAlive();
+        _timeEventRec.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+        // Lock again since we need to protect _ptrThreadPlay.
+        Lock();
+    }
+
+    // PLAYOUT
+    if (_ptrThreadPlay)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEventPlay.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+    } else {
+      UnLock();
+    }
+
+    // Terminate PulseAudio
+    if (TerminatePulseAudio() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to terminate PulseAudio");
+        return -1;
+    }
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    // 
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    // check if default device
+    if (_outputDeviceIndex == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        GetDefaultDeviceInfo(false, NULL, deviceIndex);
+        _paDeviceIndex = deviceIndex;
+    } else
+    {
+        // get the PA device index from
+        // the callback
+        _deviceIndex = _outputDeviceIndex;
+
+        // get playout devices
+        PlayoutDevices();
+    }
+
+    // the callback has now set the _paDeviceIndex to
+    // the PulseAudio index of the device
+    if (_mixerManager.OpenSpeaker(_paDeviceIndex) == -1)
+    {
+        return -1;
+    }
+
+    // clear _deviceIndex
+    _deviceIndex = -1;
+    _paDeviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    // Check if default device
+    if (_inputDeviceIndex == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        GetDefaultDeviceInfo(true, NULL, deviceIndex);
+        _paDeviceIndex = deviceIndex;
+    } else
+    {
+        // Get the PA device index from
+        // the callback
+        _deviceIndex = _inputDeviceIndex;
+
+        // get recording devices
+        RecordingDevices();
+    }
+
+    // The callback has now set the _paDeviceIndex to
+    // the PulseAudio index of the device
+    if (_mixerManager.OpenMicrophone(_paDeviceIndex) == -1)
+    {
+        return -1;
+    }
+
+    // Clear _deviceIndex
+    _deviceIndex = -1;
+    _paDeviceIndex = -1;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceLinuxPulse::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control exists
+    available = true;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetWaveOutVolume(
+    WebRtc_UWord16 volumeLeft,
+    WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetSpeakerMute(bool enable)
+{
+
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneMute(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available)
+{
+
+    if (_recChannels == 2 && _recording) {
+      available = true;
+      return 0;
+    }
+
+    available = false;
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+    int error = 0;
+
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+#ifndef WEBRTC_PA_GTALK
+    // Check if the selected microphone can record stereo.
+    bool isAvailable(false);
+    error = _mixerManager.StereoRecordingIsAvailable(isAvailable);
+    if (!error)
+      available = isAvailable;
+#endif
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return error;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoRecording(bool enable)
+{
+
+#ifndef WEBRTC_PA_GTALK
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+#endif
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayoutIsAvailable(bool& available)
+{
+
+    if (_playChannels == 2 && _playing) {
+      available = true;
+      return 0;
+    }
+
+    available = false;
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+    int error = 0;
+
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // Cannot open the specified device.
+        return -1;
+    }
+
+#ifndef WEBRTC_PA_GTALK
+    // Check if the selected speaker can play stereo.
+    bool isAvailable(false);
+    error = _mixerManager.StereoPlayoutIsAvailable(isAvailable);
+    if (!error)
+      available = isAvailable;
+#endif
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return error;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoPlayout(bool enable)
+{
+
+#ifndef WEBRTC_PA_GTALK
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+#endif
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxPulse::PlayoutDevices()
+{
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+    _numPlayDevices = 1; // init to 1 to account for "default"
+
+    // get the whole list of devices and update _numPlayDevices
+    paOperation = LATE(pa_context_get_sink_info_list)(_paContext,
+                                                      PaSinkInfoCallback,
+                                                      this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return _numPlayDevices;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord16 nDevices = PlayoutDevices();
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable output devices is %u", nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices = PlayoutDevices();
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    // Check if default device
+    if (index == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        return GetDefaultDeviceInfo(false, name, deviceIndex);
+    }
+
+    // Tell the callback that we want
+    // The name for this device
+    _playDisplayDeviceName = name;
+    _deviceIndex = index;
+
+    // get playout devices
+    PlayoutDevices();
+
+    // clear device name and index
+    _playDisplayDeviceName = NULL;
+    _deviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    // Check if default device
+    if (index == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        return GetDefaultDeviceInfo(true, name, deviceIndex);
+    }
+
+    // Tell the callback that we want
+    // the name for this device
+    _recDisplayDeviceName = name;
+    _deviceIndex = index;
+
+    // Get recording devices
+    RecordingDevices();
+
+    // Clear device name and index
+    _recDisplayDeviceName = NULL;
+    _deviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxPulse::RecordingDevices()
+{
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+    _numRecDevices = 1; // Init to 1 to account for "default"
+
+    // Get the whole list of devices and update _numRecDevices
+    paOperation = LATE(pa_context_get_source_info_list)(_paContext,
+                                                        PaSourceInfoCallback,
+                                                        this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return _numRecDevices;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable input devices is %u", nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // Set sampling rate to use
+    WebRtc_UWord32 samplingRate = _samplingFreq * 1000;
+    if (samplingRate == 44000)
+    {
+        samplingRate = 44100;
+    }
+
+    // Set the play sample specification
+    pa_sample_spec playSampleSpec;
+    playSampleSpec.channels = _playChannels;
+    playSampleSpec.format = PA_SAMPLE_S16LE;
+    playSampleSpec.rate = samplingRate;
+
+    // Create a new play stream
+    _playStream = LATE(pa_stream_new)(_paContext, "playStream",
+                                      &playSampleSpec, NULL);
+
+    if (!_playStream)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create play stream, err=%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    // Provide the playStream to the mixer
+    _mixerManager.SetPlayStream(_playStream);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreq * 1000);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stream state %d\n", LATE(pa_stream_get_state)(_playStream));
+
+    // Set stream flags
+    _playStreamFlags = (pa_stream_flags_t) (PA_STREAM_AUTO_TIMING_UPDATE
+        | PA_STREAM_INTERPOLATE_TIMING);
+
+    if (_configuredLatencyPlay != WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        // If configuring a specific latency then we want to specify
+        // PA_STREAM_ADJUST_LATENCY to make the server adjust parameters
+        // automatically to reach that target latency. However, that flag doesn't
+        // exist in Ubuntu 8.04 and many people still use that, so we have to check
+        // the protocol version of libpulse.
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            _playStreamFlags |= PA_STREAM_ADJUST_LATENCY;
+        }
+
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_playStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  pa_stream_get_sample_spec()");
+            return -1;
+        }
+
+        size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+        WebRtc_UWord32 latency = bytesPerSec
+            * WEBRTC_PA_PLAYBACK_LATENCY_MINIMUM_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        // Set the play buffer attributes
+        _playBufferAttr.maxlength = latency; // num bytes stored in the buffer
+        _playBufferAttr.tlength = latency; // target fill level of play buffer
+        // minimum free num bytes before server request more data
+        _playBufferAttr.minreq = latency / WEBRTC_PA_PLAYBACK_REQUEST_FACTOR;
+        _playBufferAttr.prebuf = _playBufferAttr.tlength
+            - _playBufferAttr.minreq; // prebuffer tlength before starting playout
+
+        _configuredLatencyPlay = latency;
+    }
+
+    // num samples in bytes * num channels
+    _playbackBufferSize = _samplingFreq * 10 * 2 * _playChannels;
+    _playbackBufferUnused = _playbackBufferSize;
+    _playBuffer = new WebRtc_Word8[_playbackBufferSize];
+
+    // Enable underflow callback
+    LATE(pa_stream_set_underflow_callback)(_playStream,
+                                           PaStreamUnderflowCallback, this);
+
+    // Set the state callback function for the stream
+    LATE(pa_stream_set_state_callback)(_playStream, PaStreamStateCallback, this);
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    // Set sampling rate to use
+    WebRtc_UWord32 samplingRate = _samplingFreq * 1000;
+    if (samplingRate == 44000)
+    {
+        samplingRate = 44100;
+    }
+
+    // Set the rec sample specification
+    pa_sample_spec recSampleSpec;
+    recSampleSpec.channels = _recChannels;
+    recSampleSpec.format = PA_SAMPLE_S16LE;
+    recSampleSpec.rate = samplingRate;
+
+    // Create a new rec stream
+    _recStream = LATE(pa_stream_new)(_paContext, "recStream", &recSampleSpec,
+                                     NULL);
+    if (!_recStream)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create rec stream, err=%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    // Provide the recStream to the mixer
+    _mixerManager.SetRecStream(_recStream);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreq * 1000);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+    }
+
+    if (_configuredLatencyRec != WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        _recStreamFlags = (pa_stream_flags_t) (PA_STREAM_AUTO_TIMING_UPDATE
+            | PA_STREAM_INTERPOLATE_TIMING);
+
+        // If configuring a specific latency then we want to specify
+        // PA_STREAM_ADJUST_LATENCY to make the server adjust parameters
+        // automatically to reach that target latency. However, that flag doesn't
+        // exist in Ubuntu 8.04 and many people still use that, so we have to check
+        // the protocol version of libpulse.
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            _recStreamFlags |= PA_STREAM_ADJUST_LATENCY;
+        }
+
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_recStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  pa_stream_get_sample_spec(rec)");
+            return -1;
+        }
+
+        size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+        WebRtc_UWord32 latency = bytesPerSec
+            * WEBRTC_PA_LOW_CAPTURE_LATENCY_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        // Set the rec buffer attributes
+        // Note: fragsize specifies a maximum transfer size, not a minimum, so
+        // it is not possible to force a high latency setting, only a low one.
+        _recBufferAttr.fragsize = latency; // size of fragment
+        _recBufferAttr.maxlength = latency + bytesPerSec
+            * WEBRTC_PA_CAPTURE_BUFFER_EXTRA_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        _configuredLatencyRec = latency;
+    }
+
+    _recordBufferSize = _samplingFreq * 10 * 2 * _recChannels;
+    _recordBufferUsed = 0;
+    _recBuffer = new WebRtc_Word8[_recordBufferSize];
+
+    // Enable overflow callback
+    LATE(pa_stream_set_overflow_callback)(_recStream, PaStreamOverflowCallback,
+                                          this);
+
+    // Set the state callback function for the stream
+    LATE(pa_stream_set_state_callback)(_recStream, PaStreamStateCallback, this);
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    // set state to ensure that the recording starts from the audio thread
+    _startRec = true;
+
+    // the audio thread will signal when recording has started
+    _timeEventRec.Set();
+    if (kEventTimeout == _recStartEvent.Wait(10000))
+    {
+        _startRec = false;
+        StopRecording();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate recording");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        // the recording state is set by the audio thread after recording has started
+    } else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate recording");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StopRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_recStream == NULL)
+    {
+        return -1;
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stopping recording");
+
+    // Stop Recording
+    PaLock();
+
+    DisableReadCallback();
+    LATE(pa_stream_set_overflow_callback)(_recStream, NULL, NULL);
+
+    // Unset this here so that we don't get a TERMINATED callback
+    LATE(pa_stream_set_state_callback)(_recStream, NULL, NULL);
+
+    if (LATE(pa_stream_get_state)(_recStream) != PA_STREAM_UNCONNECTED)
+    {
+        // Disconnect the stream
+        if (LATE(pa_stream_disconnect)(_recStream) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to disconnect rec stream, err=%d\n",
+                         LATE(pa_context_errno)(_paContext));
+            PaUnLock();
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  disconnected recording");
+    }
+
+    LATE(pa_stream_unref)(_recStream);
+    _recStream = NULL;
+
+    PaUnLock();
+
+    // Provide the recStream to the mixer
+    _mixerManager.SetRecStream(_recStream);
+
+    if (_recBuffer)
+    {
+        delete [] _recBuffer;
+        _recBuffer = NULL;
+    }
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceLinuxPulse::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceLinuxPulse::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    // set state to ensure that playout starts from the audio thread
+    _startPlay = true;
+
+    // the audio thread will signal when playout has started
+    _timeEventPlay.Set();
+    if (kEventTimeout == _playStartEvent.Wait(10000))
+    {
+        _startPlay = false;
+        StopPlayout();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate playout");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        // the playing state is set by the audio thread after playout has started
+    } else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate playing");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StopPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_playStream == NULL)
+    {
+        return -1;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stopping playback");
+
+    // Stop Playout
+    PaLock();
+
+    DisableWriteCallback();
+    LATE(pa_stream_set_underflow_callback)(_playStream, NULL, NULL);
+
+    // Unset this here so that we don't get a TERMINATED callback
+    LATE(pa_stream_set_state_callback)(_playStream, NULL, NULL);
+
+    if (LATE(pa_stream_get_state)(_playStream) != PA_STREAM_UNCONNECTED)
+    {
+        // Disconnect the stream
+        if (LATE(pa_stream_disconnect)(_playStream) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to disconnect play stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+            PaUnLock();
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  disconnected playback");
+    }
+
+    LATE(pa_stream_unref)(_playStream);
+    _playStream = NULL;
+
+    PaUnLock();
+
+    // Provide the playStream to the mixer
+    _mixerManager.SetPlayStream(_playStream);
+
+    if (_playBuffer)
+    {
+        delete [] _playBuffer;
+        _playBuffer = NULL;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16) _sndCardPlayDelay;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16) _sndCardRecDelay;
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::Playing() const
+{
+    return (_playing);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+
+    if (type != AudioDeviceModule::kFixedBufferSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Adaptive buffer size not supported on this platform");
+        return -1;
+    }
+
+    _playBufType = type;
+    _playBufDelayFixed = sizeMS;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+
+    type = _playBufType;
+    sizeMS = _playBufDelayFixed;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceLinuxPulse::PlayoutWarning() const
+{
+  CriticalSectionScoped lock(_critSect);
+  return (_playWarning > 0);
+}
+
+bool AudioDeviceLinuxPulse::PlayoutError() const
+{
+  CriticalSectionScoped lock(_critSect);
+  return (_playError > 0);
+}
+
+bool AudioDeviceLinuxPulse::RecordingWarning() const
+{
+  CriticalSectionScoped lock(_critSect);
+  return (_recWarning > 0);
+}
+
+bool AudioDeviceLinuxPulse::RecordingError() const
+{
+  CriticalSectionScoped lock(_critSect);
+  return (_recError > 0);
+}
+
+void AudioDeviceLinuxPulse::ClearPlayoutWarning()
+{
+  CriticalSectionScoped lock(_critSect);
+  _playWarning = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearPlayoutError()
+{
+  CriticalSectionScoped lock(_critSect);
+  _playError = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearRecordingWarning()
+{
+  CriticalSectionScoped lock(_critSect);
+  _recWarning = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearRecordingError()
+{
+  CriticalSectionScoped lock(_critSect);
+  _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioDeviceLinuxPulse::PaContextStateCallback(pa_context *c, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaContextStateCallbackHandler(
+        c);
+}
+
+// ----------------------------------------------------------------------------
+//  PaSinkInfoCallback
+// ----------------------------------------------------------------------------
+
+void AudioDeviceLinuxPulse::PaSinkInfoCallback(pa_context */*c*/,
+                                               const pa_sink_info *i, int eol,
+                                               void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaSinkInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioDeviceLinuxPulse::PaSourceInfoCallback(pa_context */*c*/,
+                                                 const pa_source_info *i,
+                                                 int eol, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaSourceInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioDeviceLinuxPulse::PaServerInfoCallback(pa_context */*c*/,
+                                                 const pa_server_info *i,
+                                                 void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaServerInfoCallbackHandler(i);
+}
+
+void AudioDeviceLinuxPulse::PaStreamStateCallback(pa_stream *p, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamStateCallbackHandler(p);
+}
+
+void AudioDeviceLinuxPulse::PaContextStateCallbackHandler(pa_context *c)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  context state cb");
+
+    pa_context_state_t state = LATE(pa_context_get_state)(c);
+    switch (state)
+    {
+        case PA_CONTEXT_UNCONNECTED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  unconnected");
+            break;
+        case PA_CONTEXT_CONNECTING:
+        case PA_CONTEXT_AUTHORIZING:
+        case PA_CONTEXT_SETTING_NAME:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  no state");
+            break;
+        case PA_CONTEXT_FAILED:
+        case PA_CONTEXT_TERMINATED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  failed");
+            _paStateChanged = true;
+            LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+            break;
+        case PA_CONTEXT_READY:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  ready");
+            _paStateChanged = true;
+            LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+            break;
+    }
+}
+
+void AudioDeviceLinuxPulse::PaSinkInfoCallbackHandler(const pa_sink_info *i,
+                                                      int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    if (_numPlayDevices == _deviceIndex)
+    {
+        // Convert the device index to the one of the sink
+        _paDeviceIndex = i->index;
+
+        if (_playDeviceName)
+        {
+            // Copy the sink name
+            strncpy(_playDeviceName, i->name, kAdmMaxDeviceNameSize);
+            _playDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+        }
+        if (_playDisplayDeviceName)
+        {
+            // Copy the sink display name
+            strncpy(_playDisplayDeviceName, i->description,
+                    kAdmMaxDeviceNameSize);
+            _playDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+        }
+    }
+
+    _numPlayDevices++;
+}
+
+void AudioDeviceLinuxPulse::PaSourceInfoCallbackHandler(
+    const pa_source_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    // We don't want to list output devices
+     if (i->monitor_of_sink == PA_INVALID_INDEX)
+    {
+        if (_numRecDevices == _deviceIndex)
+        {
+            // Convert the device index to the one of the source
+            _paDeviceIndex = i->index;
+
+            if (_recDeviceName)
+            {
+                // copy the source name
+                strncpy(_recDeviceName, i->name, kAdmMaxDeviceNameSize);
+                _recDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+            }
+            if (_recDisplayDeviceName)
+            {
+                // Copy the source display name
+                strncpy(_recDisplayDeviceName, i->description,
+                        kAdmMaxDeviceNameSize);
+                _recDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+            }
+        }
+
+        _numRecDevices++;
+    }
+}
+
+void AudioDeviceLinuxPulse::PaServerInfoCallbackHandler(const pa_server_info *i)
+{
+    // Use PA native sampling rate
+    WebRtc_UWord32 paSampleRate = i->sample_spec.rate;
+    if (paSampleRate == 44100)
+    {
+#ifdef WEBRTC_PA_GTALK
+        paSampleRate = 48000;
+#else
+        paSampleRate = 44000;
+#endif
+    }
+
+    _samplingFreq = paSampleRate / 1000;
+
+    // Copy the PA server version
+    if (_paServerVersion)
+    {
+        strncpy(_paServerVersion, i->server_version, 31);
+        _paServerVersion[31] = '\0';
+    }
+
+    if (_recDisplayDeviceName)
+    {
+        // Copy the source name
+        strncpy(_recDisplayDeviceName, i->default_source_name,
+                kAdmMaxDeviceNameSize);
+        _recDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+    }
+
+    if (_playDisplayDeviceName)
+    {
+        // Copy the sink name
+        strncpy(_playDisplayDeviceName, i->default_sink_name,
+                kAdmMaxDeviceNameSize);
+        _playDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+    }
+
+    LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+}
+
+void AudioDeviceLinuxPulse::PaStreamStateCallbackHandler(pa_stream *p)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stream state cb");
+
+    pa_stream_state_t state = LATE(pa_stream_get_state)(p);
+    switch (state)
+    {
+        case PA_STREAM_UNCONNECTED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  unconnected");
+            break;
+        case PA_STREAM_CREATING:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  creating");
+            break;
+        case PA_STREAM_FAILED:
+        case PA_STREAM_TERMINATED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  failed");
+            break;
+        case PA_STREAM_READY:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  ready");
+            break;
+    }
+
+    LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::CheckPulseAudioVersion()
+{
+    /*WebRtc_Word32 index = 0;
+     WebRtc_Word32 partIndex = 0;
+     WebRtc_Word32 partNum = 1;
+     WebRtc_Word32 minVersion[3] = {0, 9, 15};
+     bool versionOk = false;
+     char str[8] = {0};*/
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // get the server info and update deviceName
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                 "  checking PulseAudio version: %s", _paServerVersion);
+
+    /* Saved because it may turn out that we need to check the version in the future
+     while (true)
+     {
+     if (_paServerVersion[index] == '.')
+     {
+     index++;
+     str[partIndex] = '\0';
+     partIndex = 0;
+
+     if(partNum == 2)
+     {
+     if (atoi(str) < minVersion[1])
+     {
+     break;
+     }
+     partNum = 3;
+     }
+     else
+     {
+     if (atoi(str) > minVersion[0])
+     {
+     versionOk = true;
+     break;
+     }
+     partNum = 2;
+     }
+     }
+     else if (_paServerVersion[index] == '\0' || _paServerVersion[index] == '-')
+     {
+     str[partIndex] = '\0';
+     if (atoi(str) >= minVersion[2])
+     {
+     versionOk = true;
+     }
+     break;
+     }
+
+     str[partIndex] = _paServerVersion[index];
+     index++;
+     partIndex++;
+     }
+
+     if (!versionOk)
+     {
+     return -1;
+     }
+     */
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitSamplingFrequency()
+{
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // Get the server info and update _samplingFreq
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::GetDefaultDeviceInfo(bool recDevice,
+                                                          WebRtc_Word8* name,
+                                                          WebRtc_UWord16& index)
+{
+    WebRtc_Word8 tmpName[kAdmMaxDeviceNameSize] = {0};
+    // subtract length of "default: "
+    WebRtc_UWord16 nameLen = kAdmMaxDeviceNameSize - 9;
+    WebRtc_Word8* pName = NULL;
+
+    if (name)
+    {
+        // Add "default: "
+        strcpy(name, "default: ");
+        pName = &name[9];
+    }
+
+    // Tell the callback that we want
+    // the name for this device
+    if (recDevice)
+    {
+        _recDisplayDeviceName = tmpName;
+    } else
+    {
+        _playDisplayDeviceName = tmpName;
+    }
+
+    // Set members
+    _paDeviceIndex = -1;
+    _deviceIndex = 0;
+    _numPlayDevices = 0;
+    _numRecDevices = 0;
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // Get the server info and update deviceName
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    // Get the device index
+    if (recDevice)
+    {
+        paOperation
+            = LATE(pa_context_get_source_info_by_name)(_paContext,
+                                                       (char *) tmpName,
+                                                       PaSourceInfoCallback,
+                                                       this);
+    } else
+    {
+        paOperation
+            = LATE(pa_context_get_sink_info_by_name)(_paContext,
+                                                     (char *) tmpName,
+                                                     PaSinkInfoCallback, this);
+    }
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    // Set the index
+    index = _paDeviceIndex;
+
+    if (name)
+    {
+        // Copy to name string
+        strncpy(pName, tmpName, nameLen);
+    }
+
+    // Clear members
+    _playDisplayDeviceName = NULL;
+    _recDisplayDeviceName = NULL;
+    _paDeviceIndex = -1;
+    _deviceIndex = -1;
+    _numPlayDevices = 0;
+    _numRecDevices = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitPulseAudio()
+{
+    int retVal = 0;
+
+    // Load libpulse
+    if (!PaSymbolTable.Load())
+    {
+        // Most likely the Pulse library and sound server are not installed on
+        // this system
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to load symbol table");
+        return -1;
+    }
+
+    // Create a mainloop API and connection to the default server
+    // the mainloop is the internal asynchronous API event loop
+    if (_paMainloop) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PA mainloop has already existed");
+        return -1;
+    }
+    _paMainloop = LATE(pa_threaded_mainloop_new)();
+    if (!_paMainloop)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create mainloop");
+        return -1;
+    }
+
+    // Start the threaded main loop
+    retVal = LATE(pa_threaded_mainloop_start)(_paMainloop);
+    if (retVal != PA_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start main loop, error=%d", retVal);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  mainloop running!");
+
+    PaLock();
+
+    _paMainloopApi = LATE(pa_threaded_mainloop_get_api)(_paMainloop);
+    if (!_paMainloopApi)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create mainloop API");
+        PaUnLock();
+        return -1;
+    }
+
+    // Create a new PulseAudio context
+    if (_paContext){
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PA context has already existed");
+        PaUnLock();
+        return -1;
+    }
+    _paContext = LATE(pa_context_new)(_paMainloopApi, "WEBRTC VoiceEngine");
+
+    if (!_paContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create context");
+        PaUnLock();
+        return -1;
+    }
+
+    // Set state callback function
+    LATE(pa_context_set_state_callback)(_paContext, PaContextStateCallback,
+                                        this);
+
+    // Connect the context to a server (default)
+    _paStateChanged = false;
+    retVal = LATE(pa_context_connect)(_paContext, NULL, PA_CONTEXT_NOAUTOSPAWN,
+                                      NULL);
+
+    if (retVal != PA_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to connect context, error=%d", retVal);
+        PaUnLock();
+        return -1;
+    }
+
+    // Wait for state change
+    while (!_paStateChanged)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    // Now check to see what final state we reached.
+    pa_context_state_t state = LATE(pa_context_get_state)(_paContext);
+
+    if (state != PA_CONTEXT_READY)
+    {
+        if (state == PA_CONTEXT_FAILED)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect to PulseAudio sound server");
+        } else if (state == PA_CONTEXT_TERMINATED)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  PulseAudio connection terminated early");
+        } else
+        {
+            // Shouldn't happen, because we only signal on one of those three
+            // states
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  unknown problem connecting to PulseAudio");
+        }
+        PaUnLock();
+        return -1;
+    }
+
+    PaUnLock();
+
+    // Give the objects to the mixer manager
+    _mixerManager.SetPulseAudioObjects(_paMainloop, _paContext);
+
+    // Check the version
+    if (CheckPulseAudioVersion() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio version %s not supported", _paServerVersion);
+        return -1;
+    }
+
+    // Initialize sampling frequency
+    if (InitSamplingFrequency() < 0 || _samplingFreq == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to initialize sampling frequency, set to %d",
+                     _samplingFreq);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::TerminatePulseAudio()
+{
+    // Do nothing if the instance doesn't exist
+    // likely PaSymbolTable.Load() fails
+    if (!_paMainloop) {
+        return 0;
+    }
+
+    PaLock();
+
+    // Disconnect the context
+    if (_paContext)
+    {
+        LATE(pa_context_disconnect)(_paContext);
+    }
+
+    // Unreference the context
+    if (_paContext)
+    {
+        LATE(pa_context_unref)(_paContext);
+    }
+
+    PaUnLock();
+    _paContext = NULL;
+
+    // Stop the threaded main loop
+    if (_paMainloop)
+    {
+        LATE(pa_threaded_mainloop_stop)(_paMainloop);
+    }
+
+    // Free the mainloop
+    if (_paMainloop)
+    {
+        LATE(pa_threaded_mainloop_free)(_paMainloop);
+    }
+
+    _paMainloop = NULL;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  PulseAudio terminated");
+
+    return 0;
+}
+
+void AudioDeviceLinuxPulse::PaLock()
+{
+    LATE(pa_threaded_mainloop_lock)(_paMainloop);
+}
+
+void AudioDeviceLinuxPulse::PaUnLock()
+{
+    LATE(pa_threaded_mainloop_unlock)(_paMainloop);
+}
+
+void AudioDeviceLinuxPulse::WaitForOperationCompletion(
+    pa_operation* paOperation) const
+{
+    if (!paOperation)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "paOperation NULL in WaitForOperationCompletion");
+        return;
+    }
+
+    while (LATE(pa_operation_get_state)(paOperation) == PA_OPERATION_RUNNING)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    LATE(pa_operation_unref)(paOperation);
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+void AudioDeviceLinuxPulse::EnableWriteCallback()
+{
+    if (LATE(pa_stream_get_state)(_playStream) == PA_STREAM_READY)
+    {
+        // May already have available space. Must check.
+        _tempBufferSpace = LATE(pa_stream_writable_size)(_playStream);
+        if (_tempBufferSpace > 0)
+        {
+            // Yup, there is already space available, so if we register a write
+            // callback then it will not receive any event. So dispatch one ourself
+            // instead
+            _timeEventPlay.Set();
+            return;
+        }
+    }
+
+    LATE(pa_stream_set_write_callback)(_playStream, &PaStreamWriteCallback,
+                                       this);
+}
+
+void AudioDeviceLinuxPulse::DisableWriteCallback()
+{
+    LATE(pa_stream_set_write_callback)(_playStream, NULL, NULL);
+}
+
+void AudioDeviceLinuxPulse::PaStreamWriteCallback(pa_stream */*unused*/,
+                                                  size_t buffer_space,
+                                                  void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamWriteCallbackHandler(
+        buffer_space);
+}
+
+void AudioDeviceLinuxPulse::PaStreamWriteCallbackHandler(size_t bufferSpace)
+{
+    _tempBufferSpace = bufferSpace;
+
+    // Since we write the data asynchronously on a different thread, we have
+    // to temporarily disable the write callback or else Pulse will call it
+    // continuously until we write the data. We re-enable it below.
+    DisableWriteCallback();
+    _timeEventPlay.Set();
+}
+
+void AudioDeviceLinuxPulse::PaStreamUnderflowCallback(pa_stream */*unused*/,
+                                                      void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamUnderflowCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamUnderflowCallbackHandler()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  Playout underflow");
+
+    if (_configuredLatencyPlay == WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        // We didn't configure a pa_buffer_attr before, so switching to one now
+        // would be questionable.
+        return;
+    }
+
+    // Otherwise reconfigure the stream with a higher target latency.
+
+    const pa_sample_spec *spec = LATE(pa_stream_get_sample_spec)(_playStream);
+    if (!spec)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  pa_stream_get_sample_spec()");
+        return;
+    }
+
+    size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+    WebRtc_UWord32 newLatency = _configuredLatencyPlay + bytesPerSec
+        * WEBRTC_PA_PLAYBACK_LATENCY_INCREMENT_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+    // Set the play buffer attributes
+    _playBufferAttr.maxlength = newLatency;
+    _playBufferAttr.tlength = newLatency;
+    _playBufferAttr.minreq = newLatency / WEBRTC_PA_PLAYBACK_REQUEST_FACTOR;
+    _playBufferAttr.prebuf = _playBufferAttr.tlength - _playBufferAttr.minreq;
+
+    pa_operation *op = LATE(pa_stream_set_buffer_attr)(_playStream,
+                                                       &_playBufferAttr, NULL,
+                                                       NULL);
+    if (!op)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  pa_stream_set_buffer_attr()");
+        return;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(op);
+
+    // Save the new latency in case we underflow again.
+    _configuredLatencyPlay = newLatency;
+}
+
+void AudioDeviceLinuxPulse::EnableReadCallback()
+{
+    LATE(pa_stream_set_read_callback)(_recStream, &PaStreamReadCallback, this);
+}
+
+void AudioDeviceLinuxPulse::DisableReadCallback()
+{
+    LATE(pa_stream_set_read_callback)(_recStream, NULL, NULL);
+}
+
+void AudioDeviceLinuxPulse::PaStreamReadCallback(pa_stream */*unused1*/,
+                                                 size_t /*unused2*/,
+                                                 void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamReadCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamReadCallbackHandler()
+{
+    // We get the data pointer and size now in order to save one Lock/Unlock
+    // in the worker thread
+    if (LATE(pa_stream_peek)(_recStream, &_tempSampleData, &_tempSampleDataSize)
+        != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Can't read data!");
+        return;
+    }
+
+    // Since we consume the data asynchronously on a different thread, we have
+    // to temporarily disable the read callback or else Pulse will call it
+    // continuously until we consume the data. We re-enable it below
+    DisableReadCallback();
+    _timeEventRec.Set();
+}
+
+void AudioDeviceLinuxPulse::PaStreamOverflowCallback(pa_stream */*unused*/,
+                                                     void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamOverflowCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamOverflowCallbackHandler()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  Recording overflow");
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::LatencyUsecs(pa_stream *stream)
+{
+    if (!WEBRTC_PA_REPORT_LATENCY)
+    {
+        return 0;
+    }
+
+    if (!stream)
+    {
+        return 0;
+    }
+
+    pa_usec_t latency;
+    int negative;
+    if (LATE(pa_stream_get_latency)(stream, &latency, &negative) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Can't query latency");
+        // We'd rather continue playout/capture with an incorrect delay than stop
+        // it altogether, so return a valid value.
+        return 0;
+    }
+
+    if (negative)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  warning: pa_stream_get_latency reported negative delay");
+
+        // The delay can be negative for monitoring streams if the captured
+        // samples haven't been played yet. In such a case, "latency" contains the
+        // magnitude, so we must negate it to get the real value.
+        WebRtc_Word32 tmpLatency = (WebRtc_Word32) -latency;
+        if (tmpLatency < 0)
+        {
+            // Make sure that we don't use a negative delay
+            tmpLatency = 0;
+        }
+
+        return tmpLatency;
+    } else
+    {
+        return (WebRtc_Word32) latency;
+    }
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
+                                                      size_t bufferSize)
+{
+    size_t size = bufferSize;
+    WebRtc_UWord32 numRecSamples = _recordBufferSize / (2 * _recChannels);
+
+    // Account for the peeked data and the used data
+    WebRtc_UWord32 recDelay = (WebRtc_UWord32) ((LatencyUsecs(_recStream)
+        / 1000) + 10 * ((size + _recordBufferUsed) / _recordBufferSize));
+
+    _sndCardRecDelay = recDelay;
+
+    if (_playStream)
+    {
+        // Get the playout delay
+        _sndCardPlayDelay = (WebRtc_UWord32) (LatencyUsecs(_playStream) / 1000);
+    }
+
+    if (_recordBufferUsed > 0)
+    {
+        // Have to copy to the buffer until it is full
+        size_t copy = _recordBufferSize - _recordBufferUsed;
+        if (size < copy)
+        {
+            copy = size;
+        }
+
+        memcpy(&_recBuffer[_recordBufferUsed], bufferData, copy);
+        _recordBufferUsed += copy;
+        bufferData = static_cast<const char *> (bufferData) + copy;
+        size -= copy;
+
+        if (_recordBufferUsed != _recordBufferSize)
+        {
+            // Not enough data yet to pass to VoE
+            return 0;
+        }
+
+        // Provide data to VoiceEngine
+        if (ProcessRecordedData(_recBuffer, numRecSamples, recDelay) == -1)
+        {
+            // We have stopped recording
+            return -1;
+        }
+
+        _recordBufferUsed = 0;
+    }
+
+    // Now process full 10ms sample sets directly from the input
+    while (size >= _recordBufferSize)
+    {
+        // Provide data to VoiceEngine
+        if (ProcessRecordedData(
+            static_cast<WebRtc_Word8 *> (const_cast<void *> (bufferData)),
+            numRecSamples, recDelay) == -1)
+        {
+            // We have stopped recording
+            return -1;
+        }
+
+        bufferData = static_cast<const char *> (bufferData) + _recordBufferSize;
+        size -= _recordBufferSize;
+
+        // We have consumed 10ms of data
+        recDelay -= 10;
+    }
+
+    // Now save any leftovers for later.
+    if (size > 0)
+    {
+        memcpy(_recBuffer, bufferData, size);
+        _recordBufferUsed = size;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ProcessRecordedData(
+    WebRtc_Word8 *bufferData,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 recDelay)
+{
+    WebRtc_UWord32 currentMicLevel(0);
+    WebRtc_UWord32 newMicLevel(0);
+
+    _ptrAudioBuffer->SetRecordedBuffer(bufferData, bufferSizeInSamples);
+
+    if (AGC())
+    {
+        // Store current mic level in the audio buffer if AGC is enabled
+        if (MicrophoneVolume(currentMicLevel) == 0)
+        {
+            // This call does not affect the actual microphone volume
+            _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+        }
+    }
+
+    // Set vqe data
+    const WebRtc_UWord32 clockDrift(0);
+    _ptrAudioBuffer->SetVQEData(_sndCardPlayDelay, recDelay, clockDrift);
+
+    // Deliver recorded samples at specified sample rate,
+    // mic level etc. to the observer using callback
+    UnLock();
+    _ptrAudioBuffer->DeliverRecordedData();
+    Lock();
+
+    // We have been unlocked - check the flag again
+    if (!_recording)
+    {
+        return -1;
+    }
+
+    if (AGC())
+    {
+        newMicLevel = _ptrAudioBuffer->NewMicLevel();
+        if (newMicLevel != 0)
+        {
+            // The VQE will only deliver non-zero microphone levels when a
+            // change is needed.
+            // Set this new mic level (received from the observer as return
+            // value in the callback).
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "  AGC change of volume: old=%u => new=%u",
+                         currentMicLevel, newMicLevel);
+            if (SetMicrophoneVolume(newMicLevel) == -1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id,
+                             "  the required modification of the microphone "
+                             "volume failed");
+            }
+        }
+    }
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxPulse*> (pThis)->PlayThreadProcess());
+}
+
+bool AudioDeviceLinuxPulse::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxPulse*> (pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceLinuxPulse::PlayThreadProcess()
+{
+    switch (_timeEventPlay.Wait(1000))
+    {
+        case kEventSignaled:
+            _timeEventPlay.Reset();
+            break;
+        case kEventError:
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "EventWrapper::Wait() failed");
+            return true;
+        case kEventTimeout:
+            return true;
+    }
+
+    Lock();
+
+    if (_startPlay)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startPlay true, performing initial actions");
+
+        _startPlay = false;
+        _playDeviceName = NULL;
+
+        // Set if not default device
+        if (_outputDeviceIndex > 0)
+        {
+            // Get the playout device name
+            _playDeviceName = new WebRtc_Word8[kAdmMaxDeviceNameSize];
+            _deviceIndex = _outputDeviceIndex;
+            PlayoutDevices();
+        }
+
+        // Start muted only supported on 0.9.11 and up
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            // Get the currently saved speaker mute status
+            // and set the initial mute status accordingly
+            bool enabled(false);
+            _mixerManager.SpeakerMute(enabled);
+            if (enabled)
+            {
+                _playStreamFlags |= PA_STREAM_START_MUTED;
+            }
+        }
+
+        // Get the currently saved speaker volume
+        WebRtc_UWord32 volume = 0;
+        _mixerManager.SpeakerVolume(volume);
+
+        PaLock();
+
+        // Set the same volume for all channels
+        pa_cvolume cVolumes;
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_playStream);
+        LATE(pa_cvolume_set)(&cVolumes, spec->channels, volume);
+
+        // Connect the stream to a sink
+        if (LATE(pa_stream_connect_playback)(
+            _playStream,
+            _playDeviceName,
+            &_playBufferAttr,
+            (pa_stream_flags_t) _playStreamFlags,
+            &cVolumes, NULL) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect play stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  play stream connected");
+
+        // Wait for state change
+        while (LATE(pa_stream_get_state)(_playStream) != PA_STREAM_READY)
+        {
+            LATE(pa_threaded_mainloop_wait)(_paMainloop);
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  play stream ready");
+
+        // We can now handle write callbacks
+        EnableWriteCallback();
+
+        PaUnLock();
+
+        // Clear device name
+        if (_playDeviceName)
+        {
+            delete [] _playDeviceName;
+            _playDeviceName = NULL;
+        }
+
+        _playing = true;
+        _playStartEvent.Set();
+
+        UnLock();
+        return true;
+    }
+
+    if (_playing)
+    {
+        if (!_recording)
+        {
+            // Update the playout delay
+            _sndCardPlayDelay = (WebRtc_UWord32) (LatencyUsecs(_playStream)
+                / 1000);
+        }
+
+        if (_playbackBufferUnused < _playbackBufferSize)
+        {
+
+            size_t write = _playbackBufferSize - _playbackBufferUnused;
+            if (_tempBufferSpace < write)
+            {
+                write = _tempBufferSpace;
+            }
+
+            PaLock();
+            if (LATE(pa_stream_write)(
+                                      _playStream,
+                                      (void *) &_playBuffer[_playbackBufferUnused],
+                                      write, NULL, (int64_t) 0,
+                                      PA_SEEK_RELATIVE) != PA_OK)
+            {
+                _writeErrors++;
+                if (_writeErrors > 10)
+                {
+                    if (_playError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning,
+                                     kTraceUtility, _id,
+                                     "  pending playout error exists");
+                    }
+                    _playError = 1; // Triggers callback from module process thread
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceUtility,
+                                 _id,
+                                 "  kPlayoutError message posted: "
+                                 "_writeErrors=%u, error=%d",
+                                 _writeErrors,
+                                 LATE(pa_context_errno)(_paContext));
+                    _writeErrors = 0;
+                }
+            }
+            PaUnLock();
+
+            _playbackBufferUnused += write;
+            _tempBufferSpace -= write;
+        }
+
+        WebRtc_UWord32 numPlaySamples = _playbackBufferSize / (2
+            * _playChannels);
+        if (_tempBufferSpace > 0) // Might have been reduced to zero by the above
+        {
+            // Ask for new PCM data to be played out using the AudioDeviceBuffer
+            // ensure that this callback is executed without taking the
+            // audio-thread lock
+            UnLock();
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  requesting data");
+            WebRtc_UWord32 nSamples =
+                _ptrAudioBuffer->RequestPlayoutData(numPlaySamples);
+            Lock();
+
+            // We have been unlocked - check the flag again
+            if (!_playing)
+            {
+                UnLock();
+                return true;
+            }
+
+            nSamples = _ptrAudioBuffer->GetPlayoutData(_playBuffer);
+            if (nSamples != numPlaySamples)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice,
+                             _id, "  invalid number of output samples(%d)",
+                             nSamples);
+            }
+
+            size_t write = _playbackBufferSize;
+            if (_tempBufferSpace < write)
+            {
+                write = _tempBufferSpace;
+            }
+
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  will write");
+            PaLock();
+            if (LATE(pa_stream_write)(_playStream, (void *) &_playBuffer[0],
+                                      write, NULL, (int64_t) 0,
+                                      PA_SEEK_RELATIVE) != PA_OK)
+            {
+                _writeErrors++;
+                if (_writeErrors > 10)
+                {
+                    if (_playError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning,
+                                     kTraceUtility, _id,
+                                     "  pending playout error exists");
+                    }
+                    _playError = 1; // triggers callback from module process thread
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceUtility,
+                                 _id,
+                                 "  kPlayoutError message posted: "
+                                 "_writeErrors=%u, error=%d",
+                                 _writeErrors,
+                                 LATE(pa_context_errno)(_paContext));
+                    _writeErrors = 0;
+                }
+            }
+            PaUnLock();
+
+            _playbackBufferUnused = write;
+        }
+
+        _tempBufferSpace = 0;
+        PaLock();
+        EnableWriteCallback();
+        PaUnLock();
+
+    } // _playing
+
+    UnLock();
+    return true;
+}
+
+bool AudioDeviceLinuxPulse::RecThreadProcess()
+{
+    switch (_timeEventRec.Wait(1000))
+    {
+        case kEventSignaled:
+            _timeEventRec.Reset();
+            break;
+        case kEventError:
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "EventWrapper::Wait() failed");
+            return true;
+        case kEventTimeout:
+            return true;
+    }
+
+    Lock();
+
+    if (_startRec)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startRec true, performing initial actions");
+
+        _recDeviceName = NULL;
+
+        // Set if not default device
+        if (_inputDeviceIndex > 0)
+        {
+            // Get the recording device name
+            _recDeviceName = new WebRtc_Word8[kAdmMaxDeviceNameSize];
+            _deviceIndex = _inputDeviceIndex;
+            RecordingDevices();
+        }
+
+        PaLock();
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  connecting stream");
+
+        // Connect the stream to a source
+        if (LATE(pa_stream_connect_record)(_recStream, _recDeviceName,
+                                           &_recBufferAttr,
+                                           (pa_stream_flags_t) _recStreamFlags)
+            != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect rec stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  connected");
+
+        // Wait for state change
+        while (LATE(pa_stream_get_state)(_recStream) != PA_STREAM_READY)
+        {
+            LATE(pa_threaded_mainloop_wait)(_paMainloop);
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  done");
+
+        // We can now handle read callbacks
+        EnableReadCallback();
+
+        PaUnLock();
+
+        // Clear device name
+        if (_recDeviceName)
+        {
+            delete [] _recDeviceName;
+            _recDeviceName = NULL;
+        }
+
+        _startRec = false;
+        _recording = true;
+        _recStartEvent.Set();
+
+        UnLock();
+        return true;
+    }
+
+    if (_recording)
+    {
+        // Read data and provide it to VoiceEngine
+        if (ReadRecordedData(_tempSampleData, _tempSampleDataSize) == -1)
+        {
+            UnLock();
+            return true;
+        }
+
+        _tempSampleData = NULL;
+        _tempSampleDataSize = 0;
+
+        PaLock();
+        while (true)
+        {
+            // Ack the last thing we read
+            if (LATE(pa_stream_drop)(_recStream) != 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  failed to drop, err=%d\n",
+                             LATE(pa_context_errno)(_paContext));
+            }
+
+            if (LATE(pa_stream_readable_size)(_recStream) <= 0)
+            {
+                // Then that was all the data
+                break;
+            }
+
+            // Else more data.
+            const void *sampleData;
+            size_t sampleDataSize;
+
+            if (LATE(pa_stream_peek)(_recStream, &sampleData, &sampleDataSize)
+                != 0)
+            {
+                _recError = 1; // triggers callback from module process thread
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice,
+                             _id, "  RECORD_ERROR message posted, error = %d",
+                             LATE(pa_context_errno)(_paContext));
+                break;
+            }
+
+            _sndCardRecDelay = (WebRtc_UWord32) (LatencyUsecs(_recStream)
+                / 1000);
+
+            // Drop lock for sigslot dispatch, which could take a while.
+            PaUnLock();
+            // Read data and provide it to VoiceEngine
+            if (ReadRecordedData(sampleData, sampleDataSize) == -1)
+            {
+                UnLock();
+                return true;
+            }
+            PaLock();
+
+            // Return to top of loop for the ack and the check for more data.
+        }
+
+        EnableReadCallback();
+        PaUnLock();
+
+    } // _recording
+
+    UnLock();
+    return true;
+}
+
+}
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h b/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h
new file mode 100644
index 0000000..693a32b
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h
@@ -0,0 +1,385 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
+
+#include "audio_device_generic.h"
+#include "audio_mixer_manager_pulse_linux.h"
+#include "critical_section_wrapper.h"
+
+#include <pulse/pulseaudio.h>
+
+// Set this define to make the code behave like in GTalk/libjingle
+//#define WEBRTC_PA_GTALK
+
+// We define this flag if it's missing from our headers, because we want to be
+// able to compile against old headers but still use PA_STREAM_ADJUST_LATENCY
+// if run against a recent version of the library.
+#ifndef PA_STREAM_ADJUST_LATENCY
+#define PA_STREAM_ADJUST_LATENCY 0x2000U
+#endif
+#ifndef PA_STREAM_START_MUTED
+#define PA_STREAM_START_MUTED 0x1000U
+#endif
+
+// Set this constant to 0 to disable latency reading
+const WebRtc_UWord32 WEBRTC_PA_REPORT_LATENCY = 1;
+
+// Constants from implementation by Tristan Schmelcher [tschmelcher@google.com]
+
+// First PulseAudio protocol version that supports PA_STREAM_ADJUST_LATENCY.
+const WebRtc_UWord32 WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION = 13;
+
+// Some timing constants for optimal operation. See
+// https://tango.0pointer.de/pipermail/pulseaudio-discuss/2008-January/001170.html
+// for a good explanation of some of the factors that go into this.
+
+// Playback.
+
+// For playback, there is a round-trip delay to fill the server-side playback
+// buffer, so setting too low of a latency is a buffer underflow risk. We will
+// automatically increase the latency if a buffer underflow does occur, but we
+// also enforce a sane minimum at start-up time. Anything lower would be
+// virtually guaranteed to underflow at least once, so there's no point in
+// allowing lower latencies.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_LATENCY_MINIMUM_MSECS = 20;
+
+// Every time a playback stream underflows, we will reconfigure it with target
+// latency that is greater by this amount.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_LATENCY_INCREMENT_MSECS = 20;
+
+// We also need to configure a suitable request size. Too small and we'd burn
+// CPU from the overhead of transfering small amounts of data at once. Too large
+// and the amount of data remaining in the buffer right before refilling it
+// would be a buffer underflow risk. We set it to half of the buffer size.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_REQUEST_FACTOR = 2;
+
+// Capture.
+
+// For capture, low latency is not a buffer overflow risk, but it makes us burn
+// CPU from the overhead of transfering small amounts of data at once, so we set
+// a recommended value that we use for the kLowLatency constant (but if the user
+// explicitly requests something lower then we will honour it).
+// 1ms takes about 6-7% CPU. 5ms takes about 5%. 10ms takes about 4.x%.
+const WebRtc_UWord32 WEBRTC_PA_LOW_CAPTURE_LATENCY_MSECS = 10;
+
+// There is a round-trip delay to ack the data to the server, so the
+// server-side buffer needs extra space to prevent buffer overflow. 20ms is
+// sufficient, but there is no penalty to making it bigger, so we make it huge.
+// (750ms is libpulse's default value for the _total_ buffer size in the
+// kNoLatencyRequirements case.)
+const WebRtc_UWord32 WEBRTC_PA_CAPTURE_BUFFER_EXTRA_MSECS = 750;
+
+const WebRtc_UWord32 WEBRTC_PA_MSECS_PER_SEC = 1000;
+
+// Init _configuredLatencyRec/Play to this value to disable latency requirements
+const WebRtc_Word32 WEBRTC_PA_NO_LATENCY_REQUIREMENTS = -1;
+
+// Set this const to 1 to account for peeked and used data in latency calculation
+const WebRtc_UWord32 WEBRTC_PA_CAPTURE_BUFFER_LATENCY_ADJUSTMENT = 0;
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+class AudioDeviceLinuxPulse: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceLinuxPulse(const WebRtc_Word32 id);
+    ~AudioDeviceLinuxPulse();
+
+    static bool PulseAudioIsSupported();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+        ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+        SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                         WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock()
+    {
+        _critSect.Enter();
+    }
+    ;
+    void UnLock()
+    {
+        _critSect.Leave();
+    }
+    ;
+    void WaitForOperationCompletion(pa_operation* paOperation) const;
+    void WaitForSuccess(pa_operation* paOperation) const;
+
+private:
+    static void PaContextStateCallback(pa_context *c, void *pThis);
+    static void PaSinkInfoCallback(pa_context *c, const pa_sink_info *i,
+                                   int eol, void *pThis);
+    static void PaSourceInfoCallback(pa_context *c, const pa_source_info *i,
+                                     int eol, void *pThis);
+    static void PaServerInfoCallback(pa_context *c, const pa_server_info *i,
+                                     void *pThis);
+    static void PaStreamStateCallback(pa_stream *p, void *pThis);
+    void PaContextStateCallbackHandler(pa_context *c);
+    void PaSinkInfoCallbackHandler(const pa_sink_info *i, int eol);
+    void PaSourceInfoCallbackHandler(const pa_source_info *i, int eol);
+    void PaServerInfoCallbackHandler(const pa_server_info *i);
+    void PaStreamStateCallbackHandler(pa_stream *p);
+
+    void EnableWriteCallback();
+    void DisableWriteCallback();
+    static void PaStreamWriteCallback(pa_stream *unused, size_t buffer_space,
+                                      void *pThis);
+    void PaStreamWriteCallbackHandler(size_t buffer_space);
+    static void PaStreamUnderflowCallback(pa_stream *unused, void *pThis);
+    void PaStreamUnderflowCallbackHandler();
+    void EnableReadCallback();
+    void DisableReadCallback();
+    static void PaStreamReadCallback(pa_stream *unused1, size_t unused2,
+                                     void *pThis);
+    void PaStreamReadCallbackHandler();
+    static void PaStreamOverflowCallback(pa_stream *unused, void *pThis);
+    void PaStreamOverflowCallbackHandler();
+    WebRtc_Word32 LatencyUsecs(pa_stream *stream);
+    WebRtc_Word32 ReadRecordedData(const void* bufferData, size_t bufferSize);
+    WebRtc_Word32 ProcessRecordedData(WebRtc_Word8 *bufferData,
+                                      WebRtc_UWord32 bufferSizeInSamples,
+                                      WebRtc_UWord32 recDelay);
+
+    WebRtc_Word32 CheckPulseAudioVersion();
+    WebRtc_Word32 InitSamplingFrequency();
+    WebRtc_Word32 GetDefaultDeviceInfo(bool recDevice, WebRtc_Word8* name,
+                                       WebRtc_UWord16& index);
+    WebRtc_Word32 InitPulseAudio();
+    WebRtc_Word32 TerminatePulseAudio();
+
+    void PaLock();
+    void PaUnLock();
+
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+
+    CriticalSectionWrapper& _critSect;
+    EventWrapper& _timeEventRec;
+    EventWrapper& _timeEventPlay;
+    EventWrapper& _recStartEvent;
+    EventWrapper& _playStartEvent;
+
+    ThreadWrapper* _ptrThreadPlay;
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerLinuxPulse _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    WebRtc_UWord32 _samplingFreq;
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _startRec;
+    bool _stopRec;
+    bool _startPlay;
+    bool _stopPlay;
+    bool _AGC;
+
+private:
+    WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
+
+    WebRtc_UWord32 _sndCardPlayDelay;
+    WebRtc_UWord32 _sndCardRecDelay;
+
+    WebRtc_Word32 _writeErrors;
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    WebRtc_UWord16 _deviceIndex;
+    WebRtc_Word16 _numPlayDevices;
+    WebRtc_Word16 _numRecDevices;
+    WebRtc_Word8* _playDeviceName;
+    WebRtc_Word8* _recDeviceName;
+    WebRtc_Word8* _playDisplayDeviceName;
+    WebRtc_Word8* _recDisplayDeviceName;
+    WebRtc_Word8 _paServerVersion[32];
+
+    WebRtc_Word8* _playBuffer;
+    size_t _playbackBufferSize;
+    size_t _playbackBufferUnused;
+    size_t _tempBufferSpace;
+    WebRtc_Word8* _recBuffer;
+    size_t _recordBufferSize;
+    size_t _recordBufferUsed;
+    const void* _tempSampleData;
+    size_t _tempSampleDataSize;
+    WebRtc_Word32 _configuredLatencyPlay;
+    WebRtc_Word32 _configuredLatencyRec;
+
+    // PulseAudio
+    WebRtc_UWord16 _paDeviceIndex;
+    bool _paStateChanged;
+
+    pa_threaded_mainloop* _paMainloop;
+    pa_mainloop_api* _paMainloopApi;
+    pa_context* _paContext;
+
+    pa_stream* _recStream;
+    pa_stream* _playStream;
+    WebRtc_UWord32 _recStreamFlags;
+    WebRtc_UWord32 _playStreamFlags;
+    pa_buffer_attr _playBufferAttr;
+    pa_buffer_attr _recBufferAttr;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_PULSE_LINUX_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc b/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc
new file mode 100644
index 0000000..0b9dd54
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_linux.h"
+#include "audio_device_config.h"	// DEBUG_PRINT()
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityLinux::AudioDeviceUtilityLinux(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()), _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+AudioDeviceUtilityLinux::~AudioDeviceUtilityLinux()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+
+WebRtc_Word32 AudioDeviceUtilityLinux::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "Linux");
+
+    return 0;
+}
+
+
+} // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h b/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h
new file mode 100644
index 0000000..8df7acc
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_LINUX_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityLinux: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityLinux(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityLinux();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    AudioDeviceModule::ErrorCode _lastError;
+};
+
+} // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_UTILITY_LINUX_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc
new file mode 100644
index 0000000..306ad5d
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc
@@ -0,0 +1,1317 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_mixer_manager_alsa_linux.h"
+#include "trace.h"
+
+extern webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
+
+// Accesses ALSA functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libalsa, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_alsa::AlsaSymbolTable, &AlsaSymbolTable, sym)
+
+namespace webrtc
+{
+
+AudioMixerManagerLinuxALSA::AudioMixerManagerLinuxALSA(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _outputMixerHandle(NULL),
+    _inputMixerHandle(NULL),
+    _outputMixerElement(NULL),
+    _inputMixerElement(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+
+    memset(_outputMixerStr, 0, kAdmMaxDeviceNameSize);
+    memset(_inputMixerStr, 0, kAdmMaxDeviceNameSize);
+}
+
+AudioMixerManagerLinuxALSA::~AudioMixerManagerLinuxALSA()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                    PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    int errVal = 0;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing playout mixer");
+        LATE(snd_mixer_free)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_detach)(_outputMixerHandle, _outputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_close)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+        _outputMixerHandle = NULL;
+        _outputMixerElement = NULL;
+    }
+    memset(_outputMixerStr, 0, kAdmMaxDeviceNameSize);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    int errVal = 0;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        LATE(snd_mixer_free)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 2");
+
+        errVal = LATE(snd_mixer_detach)(_inputMixerHandle, _inputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 3");
+
+        errVal = LATE(snd_mixer_close)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 4");
+        _inputMixerHandle = NULL;
+        _inputMixerElement = NULL;
+    }
+    memset(_inputMixerStr, 0, kAdmMaxDeviceNameSize);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::OpenSpeaker(char* deviceName)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::OpenSpeaker(name=%s)", deviceName);
+
+    CriticalSectionScoped lock(_critSect);
+
+    int errVal = 0;
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing playout mixer");
+
+        LATE(snd_mixer_free)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_detach)(_outputMixerHandle, _outputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_close)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+    }
+    _outputMixerHandle = NULL;
+    _outputMixerElement = NULL;
+
+    errVal = LATE(snd_mixer_open)(&_outputMixerHandle, 0);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "snd_mixer_open(&_outputMixerHandle, 0) - error");
+        return -1;
+    }
+
+    char controlName[kAdmMaxDeviceNameSize] = { 0 };
+    GetControlName(controlName, deviceName);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     snd_mixer_attach(_outputMixerHandle, %s)", controlName);
+
+    errVal = LATE(snd_mixer_attach)(_outputMixerHandle, controlName);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_attach(_outputMixerHandle, %s) error: %s",
+                     controlName, LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+    strcpy(_outputMixerStr, controlName);
+
+    errVal = LATE(snd_mixer_selem_register)(_outputMixerHandle, NULL, NULL);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_selem_register(_outputMixerHandle,"
+                     " NULL, NULL), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+
+    // Load and find the proper mixer element
+    if (LoadSpeakerMixerElement() < 0)
+    {
+        return -1;
+    }
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  the output mixer device is now open (0x%x)",
+                     _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::OpenMicrophone(char *deviceName)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::OpenMicrophone(name=%s)",
+                 deviceName);
+
+    CriticalSectionScoped lock(_critSect);
+
+    int errVal = 0;
+
+    // Close any existing input mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        LATE(snd_mixer_free)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        errVal = LATE(snd_mixer_detach)(_inputMixerHandle, _inputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        errVal = LATE(snd_mixer_close)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+    }
+    _inputMixerHandle = NULL;
+    _inputMixerElement = NULL;
+
+    errVal = LATE(snd_mixer_open)(&_inputMixerHandle, 0);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_open(&_inputMixerHandle, 0) - error");
+        return -1;
+    }
+
+    char controlName[kAdmMaxDeviceNameSize] = { 0 };
+    GetControlName(controlName, deviceName);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     snd_mixer_attach(_inputMixerHandle, %s)", controlName);
+
+    errVal = LATE(snd_mixer_attach)(_inputMixerHandle, controlName);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_attach(_inputMixerHandle, %s) error: %s",
+                     controlName, LATE(snd_strerror)(errVal));
+
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+    strcpy(_inputMixerStr, controlName);
+
+    errVal = LATE(snd_mixer_selem_register)(_inputMixerHandle, NULL, NULL);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_selem_register(_inputMixerHandle,"
+                     " NULL, NULL), error: %s",
+                     LATE(snd_strerror)(errVal));
+
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+    // Load and find the proper mixer element
+    if (LoadMicMixerElement() < 0)
+    {
+        return -1;
+    }
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  the input mixer device is now open (0x%x)",
+                     _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+bool AudioMixerManagerLinuxALSA::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_outputMixerHandle != NULL);
+}
+
+bool AudioMixerManagerLinuxALSA::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_inputMixerHandle != NULL);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetSpeakerVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetSpeakerVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    int errVal =
+        LATE(snd_mixer_selem_set_playback_volume_all)(_outputMixerElement,
+                                                      volume);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error changing master volume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    long int vol(0);
+
+    int
+        errVal = LATE(snd_mixer_selem_get_playback_volume)(
+            _outputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &vol);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting outputvolume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxALSA::SpeakerVolume() => vol=%i",
+                 vol);
+
+    volume = static_cast<WebRtc_UWord32> (vol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avilable output mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_playback_volume_range)(_outputMixerElement,
+                                                        &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Playout hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting get_playback_volume_range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (maxVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_playback_volume_range)(_outputMixerElement,
+                                                        &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Playout hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting get_playback_volume_range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (minVol);
+
+    return 0;
+}
+
+// TL: Have done testnig with these but they don't seem reliable and
+// they were therefore not added
+/*
+ // ----------------------------------------------------------------------------
+ //    SetMaxSpeakerVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMaxSpeakerVolume(
+     WebRtc_UWord32 maxVolume)
+ {
+
+ if (_outputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_playback_volume_range(
+ _outputMixerElement, &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting playback volume range: %s", snd_strerror(errVal));
+ }
+
+ maxVol = maxVolume;
+ errVal = snd_mixer_selem_set_playback_volume_range(
+ _outputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+  "     Playout hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting playback volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+
+ // ----------------------------------------------------------------------------
+ //    SetMinSpeakerVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMinSpeakerVolume(
+     WebRtc_UWord32 minVolume)
+ {
+
+ if (_outputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_playback_volume_range(
+ _outputMixerElement, &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting playback volume range: %s", snd_strerror(errVal));
+ }
+
+ minVol = minVolume;
+ errVal = snd_mixer_selem_set_playback_volume_range(
+ _outputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "     Playout hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "     Error setting playback volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+ */
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer exists");
+        return -1;
+    }
+
+    // The step size is always 1 for ALSA
+    stepSize = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolumeIsAvailable(
+    bool& available)
+{
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_playback_volume)(_outputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerMuteIsAvailable(
+    bool& available)
+{
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_playback_switch)(_outputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetSpeakerMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    bool available(false);
+    SpeakerMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the speaker");
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    int errVal =
+        LATE(snd_mixer_selem_set_playback_switch_all)(_outputMixerElement,
+                                                      !enable);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error setting playback switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    bool available =
+        LATE(snd_mixer_selem_has_playback_switch)(_outputMixerElement);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the speaker");
+        return -1;
+    }
+
+    int value(false);
+
+    // Retrieve one boolean control value for a specified mute-control
+    //
+    int
+        errVal = LATE(snd_mixer_selem_get_playback_switch)(
+            _outputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &value);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting playback switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    enabled = (bool) !value;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneMuteIsAvailable(
+    bool& available)
+{
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_capture_switch)(_inputMixerElement);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available(false);
+    MicrophoneMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the microphone");
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    int errVal =
+        LATE(snd_mixer_selem_set_capture_switch_all)(_inputMixerElement,
+                                                     !enable);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error setting capture switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available =
+        LATE(snd_mixer_selem_has_capture_switch)(_inputMixerElement);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the microphone");
+        return -1;
+    }
+
+    int value(false);
+
+    // Retrieve one boolean control value for a specified mute-control
+    //
+    int
+        errVal = LATE(snd_mixer_selem_get_capture_switch)(
+            _inputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &value);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting capture switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    enabled = (bool) !value;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneBoostIsAvailable(
+    bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled through ALSA Simple Mixer Interface
+    available = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneBoost(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available(false);
+    MicrophoneMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_capture_volume)(_inputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    int
+        errVal =
+            LATE(snd_mixer_selem_set_capture_volume_all)(_inputMixerElement,
+                                                         volume);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error changing microphone volume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+// TL: Have done testnig with these but they don't seem reliable and
+// they were therefore not added
+/*
+ // ----------------------------------------------------------------------------
+ //    SetMaxMicrophoneVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMaxMicrophoneVolume(
+     WebRtc_UWord32 maxVolume)
+ {
+
+ if (_inputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_capture_volume_range(_inputMixerElement,
+  &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting capture volume range: %s", snd_strerror(errVal));
+ }
+
+ maxVol = (long int)maxVolume;
+ printf("min %d max %d", minVol, maxVol);
+ errVal = snd_mixer_selem_set_capture_volume_range(_inputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "     Capture hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting capture volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+
+ // ----------------------------------------------------------------------------
+ //    SetMinMicrophoneVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMinMicrophoneVolume(
+ WebRtc_UWord32 minVolume)
+ {
+
+ if (_inputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_capture_volume_range(
+ _inputMixerElement, &minVol, &maxVol);
+ if (maxVol <= minVol)
+ {
+ //maxVol = 255;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting capture volume range: %s", snd_strerror(errVal));
+ }
+
+ printf("min %d max %d", minVol, maxVol);
+ minVol = (long int)minVolume;
+ errVal = snd_mixer_selem_set_capture_volume_range(
+ _inputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+  "     Capture hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting capture volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+ */
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int vol(0);
+
+    int
+        errVal =
+            LATE(snd_mixer_selem_get_capture_volume)(
+                _inputMixerElement,
+                (snd_mixer_selem_channel_id_t) 0,
+                &vol);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting inputvolume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxALSA::MicrophoneVolume() => vol=%i",
+                 vol);
+
+    volume = static_cast<WebRtc_UWord32> (vol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    // check if we have mic volume at all
+    if (!LATE(snd_mixer_selem_has_capture_volume)(_inputMixerElement))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     No microphone volume available");
+        return -1;
+    }
+
+    int errVal =
+        LATE(snd_mixer_selem_get_capture_volume_range)(_inputMixerElement,
+                                                       &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Microphone hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting microphone volume range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (maxVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_capture_volume_range)(_inputMixerElement,
+                                                       &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Microphone hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting microphone volume range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (minVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // The step size is always 1 for ALSA
+    stepSize = 1;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::LoadMicMixerElement() const
+{
+    int errVal = LATE(snd_mixer_load)(_inputMixerHandle);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "snd_mixer_load(_inputMixerHandle), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+
+    snd_mixer_elem_t *elem = NULL;
+    snd_mixer_elem_t *micElem = NULL;
+    unsigned mixerIdx = 0;
+    const char *selemName = NULL;
+
+    // Find and store handles to the right mixer elements
+    for (elem = LATE(snd_mixer_first_elem)(_inputMixerHandle); elem; elem
+        = LATE(snd_mixer_elem_next)(elem), mixerIdx++)
+    {
+        if (LATE(snd_mixer_selem_is_active)(elem))
+        {
+            selemName = LATE(snd_mixer_selem_get_name)(elem);
+            if (strcmp(selemName, "Capture") == 0) // "Capture", "Mic"
+            {
+                _inputMixerElement = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Capture element set");
+            } else if (strcmp(selemName, "Mic") == 0)
+            {
+                micElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Mic element found");
+            }
+        }
+
+        if (_inputMixerElement)
+        {
+            // Use the first Capture element that is found
+            // The second one may not work
+            break;
+        }
+    }
+
+    if (_inputMixerElement == NULL)
+    {
+        // We didn't find a Capture handle, use Mic.
+        if (micElem != NULL)
+        {
+            _inputMixerElement = micElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Mic as capture volume.");
+        } else
+        {
+            _inputMixerElement = NULL;
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "Could not find capture volume on the mixer.");
+
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::LoadSpeakerMixerElement() const
+{
+    int errVal = LATE(snd_mixer_load)(_outputMixerHandle);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_load(_outputMixerHandle), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+
+    snd_mixer_elem_t *elem = NULL;
+    snd_mixer_elem_t *masterElem = NULL;
+    snd_mixer_elem_t *speakerElem = NULL;
+    unsigned mixerIdx = 0;
+    const char *selemName = NULL;
+
+    // Find and store handles to the right mixer elements
+    for (elem = LATE(snd_mixer_first_elem)(_outputMixerHandle); elem; elem
+        = LATE(snd_mixer_elem_next)(elem), mixerIdx++)
+    {
+        if (LATE(snd_mixer_selem_is_active)(elem))
+        {
+            selemName = LATE(snd_mixer_selem_get_name)(elem);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "snd_mixer_selem_get_name %d: %s =%x", mixerIdx,
+                         selemName, elem);
+
+            // "Master", "PCM", "Wave", "Master Mono", "PC Speaker", "PCM", "Wave"
+            if (strcmp(selemName, "PCM") == 0)
+            {
+                _outputMixerElement = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     PCM element set");
+            } else if (strcmp(selemName, "Master") == 0)
+            {
+                masterElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Master element found");
+            } else if (strcmp(selemName, "Speaker") == 0)
+            {
+                speakerElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Speaker element found");
+            }
+        }
+
+        if (_outputMixerElement)
+        {
+            // We have found the element we want
+            break;
+        }
+    }
+
+    // If we didn't find a PCM Handle, use Master or Speaker
+    if (_outputMixerElement == NULL)
+    {
+        if (masterElem != NULL)
+        {
+            _outputMixerElement = masterElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Master as output volume.");
+        } else if (speakerElem != NULL)
+        {
+            _outputMixerElement = speakerElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Speaker as output volume.");
+        } else
+        {
+            _outputMixerElement = NULL;
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "Could not find output volume in the mixer.");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+void AudioMixerManagerLinuxALSA::GetControlName(char* controlName,
+                                                char* deviceName) const
+{
+    // Example
+    // deviceName: "front:CARD=Intel,DEV=0"
+    // controlName: "hw:CARD=Intel"
+    char* pos1 = strchr(deviceName, ':');
+    char* pos2 = strchr(deviceName, ',');
+    if (!pos2)
+    {
+        // Can also be default:CARD=Intel
+        pos2 = &deviceName[strlen(deviceName)];
+    }
+    if (pos1 && pos2)
+    {
+        strcpy(controlName, "hw");
+        int nChar = (int) (pos2 - pos1);
+        strncpy(&controlName[2], pos1, nChar);
+        controlName[2 + nChar] = '\0';
+    } else
+    {
+        strcpy(controlName, deviceName);
+    }
+
+}
+
+}
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h
new file mode 100644
index 0000000..94ea982
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_ALSA_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_ALSA_LINUX_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include "alsasymboltable_linux.h"
+
+#include <alsa/asoundlib.h>
+
+namespace webrtc
+{
+
+class AudioMixerManagerLinuxALSA
+{
+public:
+    WebRtc_Word32 OpenSpeaker(char* deviceName);
+    WebRtc_Word32 OpenMicrophone(char* deviceName);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerLinuxALSA(const WebRtc_Word32 id);
+    ~AudioMixerManagerLinuxALSA();
+
+private:
+    WebRtc_Word32 LoadMicMixerElement() const;
+    WebRtc_Word32 LoadSpeakerMixerElement() const;
+    void GetControlName(char *controlName, char* deviceName) const;
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    mutable snd_mixer_t* _outputMixerHandle;
+    char _outputMixerStr[kAdmMaxDeviceNameSize];
+    mutable snd_mixer_t* _inputMixerHandle;
+    char _inputMixerStr[kAdmMaxDeviceNameSize];
+    mutable snd_mixer_elem_t* _outputMixerElement;
+    mutable snd_mixer_elem_t* _inputMixerElement;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_MIXER_MANAGER_ALSA_LINUX_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc
new file mode 100644
index 0000000..01f5172
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc
@@ -0,0 +1,1271 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_mixer_manager_pulse_linux.h"
+#include "trace.h"
+
+extern webrtc_adm_linux_pulse::PulseAudioSymbolTable PaSymbolTable;
+
+// Accesses Pulse functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libpulse, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_pulse::PulseAudioSymbolTable, &PaSymbolTable, sym)
+
+namespace webrtc
+{
+
+enum { kMaxRetryOnFailure = 2 };
+
+AudioMixerManagerLinuxPulse::AudioMixerManagerLinuxPulse(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _paOutputDeviceIndex(-1),
+    _paInputDeviceIndex(-1),
+    _paPlayStream(NULL),
+    _paRecStream(NULL),
+    _paMainloop(NULL),
+    _paContext(NULL),
+    _paVolume(0),
+    _paMute(0),
+    _paVolSteps(0),
+    _paSpeakerMute(false),
+    _paSpeakerVolume(0),
+    _paChannels(0),
+    _paObjectsSet(false),
+    _callbackValues(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+}
+
+AudioMixerManagerLinuxPulse::~AudioMixerManagerLinuxPulse()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                    PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetPulseAudioObjects(
+    pa_threaded_mainloop* mainloop,
+    pa_context* context)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!mainloop || !context)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not set PulseAudio objects for mixer");
+        return -1;
+    }
+
+    _paMainloop = mainloop;
+    _paContext = context;
+    _paObjectsSet = true;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the PulseAudio objects for the mixer has been set");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    _paMainloop = NULL;
+    _paContext = NULL;
+    _paObjectsSet = false;
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Reset the index to -1
+    _paOutputDeviceIndex = -1;
+    _paPlayStream = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Reset the index to -1
+    _paInputDeviceIndex = -1;
+    _paRecStream = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetPlayStream(pa_stream* playStream)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetPlayStream(playStream)");
+
+    CriticalSectionScoped lock(_critSect);
+    _paPlayStream = playStream;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetRecStream(pa_stream* recStream)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetRecStream(recStream)");
+
+    CriticalSectionScoped lock(_critSect);
+    _paRecStream = recStream;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::OpenSpeaker(
+    WebRtc_UWord16 deviceIndex)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::OpenSpeaker(deviceIndex=%d)",
+                 deviceIndex);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // No point in opening the speaker
+    // if PA objects have not been set
+    if (!_paObjectsSet)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio objects has not been set");
+        return -1;
+    }
+
+    // Set the index for the PulseAudio
+    // output device to control
+    _paOutputDeviceIndex = deviceIndex;
+
+    // Init the speaker volume to the normal volume
+    _paSpeakerVolume = PA_VOLUME_NORM;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the output mixer device is now open");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::OpenMicrophone(
+    WebRtc_UWord16 deviceIndex)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::OpenMicrophone(deviceIndex=%d)",
+                 deviceIndex);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // No point in opening the microphone
+    // if PA objects have not been set
+    if (!_paObjectsSet)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio objects have not been set");
+        return -1;
+    }
+
+    // Set the index for the PulseAudio
+    // input device to control
+    _paInputDeviceIndex = deviceIndex;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the input mixer device is now open");
+
+    return 0;
+}
+
+bool AudioMixerManagerLinuxPulse::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_paOutputDeviceIndex != -1);
+}
+
+bool AudioMixerManagerLinuxPulse::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_paInputDeviceIndex != -1);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetSpeakerVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetSpeakerVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only really set the volume if we have a connected stream
+        PaLock();
+
+        // Get the number of channels from the sample specification
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_paPlayStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  could not get sample specification");
+            PaUnLock();
+            return -1;
+        }
+
+        // Set the same volume for all channels
+        pa_cvolume cVolumes;
+        LATE(pa_cvolume_set)(&cVolumes, spec->channels, volume);
+
+        pa_operation* paOperation = NULL;
+        paOperation = LATE(pa_context_set_sink_input_volume)(
+            _paContext,
+            LATE(pa_stream_get_index)(_paPlayStream),
+            &cVolumes,
+            PaSetVolumeCallback, NULL);
+        if (!paOperation)
+        {
+            setFailed = true;
+        }
+
+        // Don't need to wait for the completion
+        LATE(pa_operation_unref)(paOperation);
+
+        PaUnLock();
+    } else
+    {
+        // We have not created a stream or it's not connected to the sink
+        // Save the volume to be set at connection
+        _paSpeakerVolume = volume;
+    }
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not set speaker volume, error%d",
+                     LATE(pa_context_errno)(_paContext));
+
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only get the volume if we have a connected stream
+        if (!GetSinkInputInfo())
+          return -1;
+
+        volume = static_cast<WebRtc_UWord32> (_paVolume);
+        ResetCallbackVariables();
+    } else
+    {
+        volume = _paSpeakerVolume;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerVolume() => vol=%i",
+                 volume);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // PA_VOLUME_NORM corresponds to 100% (0db)
+    // but PA allows up to 150 db amplification
+    maxVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_NORM);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_MUTED);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // The sink input (stream) will always have step size = 1
+    // There are PA_VOLUME_NORM+1 steps
+    stepSize = 1;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerVolumeStepSize() => "
+                 "size=%i, stepSize");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetSpeakerMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only really mute if we have a connected stream
+        PaLock();
+
+        pa_operation* paOperation = NULL;
+        paOperation = LATE(pa_context_set_sink_input_mute)(
+            _paContext,
+            LATE(pa_stream_get_index)(_paPlayStream),
+            (int) enable,
+            PaSetVolumeCallback,
+            NULL);
+        if (!paOperation)
+        {
+            setFailed = true;
+        }
+
+        // Don't need to wait for the completion
+        LATE(pa_operation_unref)(paOperation);
+
+        PaUnLock();
+    } else
+    {
+        // We have not created a stream or it's not connected to the sink
+        // Save the mute status to be set at connection
+        _paSpeakerMute = enable;
+    }
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not mute speaker, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SpeakerMute(bool& enabled) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only get the mute status if we have a connected stream
+        if (!GetSinkInputInfo())
+          return -1;
+
+        enabled = static_cast<bool> (_paMute);
+        ResetCallbackVariables();
+    } else
+    {
+        enabled = _paSpeakerMute;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerMute() => "
+                 "enabled=%i, enabled");
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::StereoPlayoutIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paOutputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paPlayStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSinkInfoByIndex(deviceIndex))
+      return -1;
+
+    available = static_cast<bool> (_paChannels == 2);
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable(bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get info for this source
+    // We want to know if the actual device can record in stereo
+    paOperation = LATE(pa_context_get_source_info_by_index)(
+        _paContext, deviceIndex,
+        PaSourceInfoCallback,
+        (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+    PaUnLock();
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting number of input channels: %d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    available = static_cast<bool> (_paChannels == 2);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable()"
+                 " => available=%i, available");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneMuteIsAvailable(
+    bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    // Set mute switch for the source
+    paOperation = LATE(pa_context_set_source_mute_by_index)(
+        _paContext, deviceIndex,
+        enable,
+        PaSetVolumeCallback, NULL);
+
+    if (!paOperation)
+    {
+        setFailed = true;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(paOperation);
+
+    PaUnLock();
+
+    // Reset variables altered by callback
+    ResetCallbackVariables();
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not mute microphone, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneMute(bool& enabled) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSourceInfoByIndex(deviceIndex))
+      return -1;
+
+    enabled = static_cast<bool> (_paMute);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneMute() =>"
+                 " enabled=%i, enabled");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always unavailable in Pulse Audio
+    // Could make it possible to use PA_VOLUME_MAX
+    // but that gives bad audio with some sound cards
+    available = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneBoost(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid boost control
+    bool available(false);
+    MicrophoneBoostIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Unlike output streams, input streams have no concept of a stream volume,
+    // only a device volume. So we have to change the volume of the device
+    // itself.
+
+    // The device may have a different number of channels than the stream and
+    // their mapping may be different, so we don't want to use the channel count
+    // from our sample spec. We could use PA_CHANNELS_MAX to cover our bases,
+    // and the server allows that even if the device's channel count is lower,
+    // but some buggy PA clients don't like that (the pavucontrol on Hardy dies
+    // in an assert if the channel count is different). So instead we look up
+    // the actual number of channels that the device has.
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    bool setFailed(false);
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get the number of channels for this source
+    paOperation
+        = LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
+                                                    PaSourceInfoCallback,
+                                                    (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting input channels: %d",
+                     LATE(pa_context_errno)(_paContext));
+        PaUnLock();
+        return -1;
+    }
+
+    WebRtc_UWord8 channels = _paChannels;
+    ResetCallbackVariables();
+
+    pa_cvolume cVolumes;
+    LATE(pa_cvolume_set)(&cVolumes, channels, volume);
+
+    // Set the volume for the source
+    paOperation
+        = LATE(pa_context_set_source_volume_by_index)(_paContext, deviceIndex,
+                                                      &cVolumes,
+                                                      PaSetVolumeCallback, NULL);
+
+    if (!paOperation)
+    {
+        setFailed = true;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(paOperation);
+
+    PaUnLock();
+
+    // Reset variables altered by callback
+    ResetCallbackVariables();
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not set microphone volume, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSourceInfoByIndex(deviceIndex))
+      return -1;
+
+    volume = static_cast<WebRtc_UWord32> (_paVolume);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneVolume() => vol=%i, volume");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // PA_VOLUME_NORM corresponds to 100% (0db)
+    // PA allows up to 150 db amplification (PA_VOLUME_MAX)
+    // but that doesn't work well for all sound cards
+    maxVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_NORM);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_MUTED);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get info for this source
+    paOperation
+        = LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
+                                                    PaSourceInfoCallback,
+                                                    (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting step size: %d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> ((PA_VOLUME_NORM + 1) / _paVolSteps);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneVolumeStepSize()"
+                 " => size=%i, stepSize");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioMixerManagerLinuxPulse::PaSinkInfoCallback(pa_context */*c*/,
+                                                     const pa_sink_info *i,
+                                                     int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)-> PaSinkInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInputInfoCallback(
+    pa_context */*c*/,
+    const pa_sink_input_info *i,
+    int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)->
+        PaSinkInputInfoCallbackHandler(i, eol);
+}
+
+
+void AudioMixerManagerLinuxPulse::PaSourceInfoCallback(pa_context */*c*/,
+                                                       const pa_source_info *i,
+                                                       int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)->
+        PaSourceInfoCallbackHandler(i, eol);
+}
+
+void AudioMixerManagerLinuxPulse::PaSetVolumeCallback(pa_context * c,
+                                                      int success, void */*pThis*/)
+{
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                     " failed to set volume");
+    }
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInfoCallbackHandler(
+    const pa_sink_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // get the max volume for any channel
+    _paMute = i->mute; // get mute status
+
+    // supported since PA 0.9.15
+    //_paVolSteps = i->n_volume_steps; // get the number of volume steps
+    // default value is PA_VOLUME_NORM+1
+    _paVolSteps = PA_VOLUME_NORM + 1;
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInputInfoCallbackHandler(
+    const pa_sink_input_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done.
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // Get the max volume for any channel
+    _paMute = i->mute; // Get mute status
+}
+
+void AudioMixerManagerLinuxPulse::PaSourceInfoCallbackHandler(
+    const pa_source_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // Get the max volume for any channel
+    _paMute = i->mute; // Get mute status
+
+    // supported since PA 0.9.15
+    //_paVolSteps = i->n_volume_steps; // Get the number of volume steps
+    // default value is PA_VOLUME_NORM+1
+    _paVolSteps = PA_VOLUME_NORM + 1;
+}
+
+void AudioMixerManagerLinuxPulse::ResetCallbackVariables() const
+{
+    _paVolume = 0;
+    _paMute = 0;
+    _paVolSteps = 0;
+    _paChannels = 0;
+    _callbackValues = false;
+}
+
+void AudioMixerManagerLinuxPulse::WaitForOperationCompletion(
+    pa_operation* paOperation) const
+{
+    while (LATE(pa_operation_get_state)(paOperation) == PA_OPERATION_RUNNING)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    LATE(pa_operation_unref)(paOperation);
+}
+
+void AudioMixerManagerLinuxPulse::PaLock() const
+{
+    LATE(pa_threaded_mainloop_lock)(_paMainloop);
+}
+
+void AudioMixerManagerLinuxPulse::PaUnLock() const
+{
+    LATE(pa_threaded_mainloop_unlock)(_paMainloop);
+}
+
+bool AudioMixerManagerLinuxPulse::GetSinkInputInfo() const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+    // Get info for this stream (sink input).
+    paOperation = LATE(pa_context_get_sink_input_info)(
+        _paContext,
+        LATE(pa_stream_get_index)(_paPlayStream),
+        PaSinkInputInfoCallback,
+        (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+  }
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSinkInputInfo failed to get volume info : %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+bool AudioMixerManagerLinuxPulse::GetSinkInfoByIndex(
+    int device_index) const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+    paOperation = LATE(pa_context_get_sink_info_by_index)(_paContext,
+        device_index, PaSinkInfoCallback, (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+  }
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSinkInfoByIndex failed to get volume info: %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+bool AudioMixerManagerLinuxPulse::GetSourceInfoByIndex(
+    int device_index) const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+  paOperation  = LATE(pa_context_get_source_info_by_index)(
+      _paContext, device_index, PaSourceInfoCallback, (void*) this);
+
+  WaitForOperationCompletion(paOperation);
+  }
+
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSourceInfoByIndex error: %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+}
+
diff --git a/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h
new file mode 100644
index 0000000..22d6da5
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_PULSE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_PULSE_LINUX_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include "pulseaudiosymboltable_linux.h"
+
+#include <stdint.h>
+#include <pulse/pulseaudio.h>
+
+#ifndef UINT32_MAX
+#define UINT32_MAX  ((uint32_t)-1)
+#endif
+
+namespace webrtc
+{
+
+class AudioMixerManagerLinuxPulse
+{
+public:
+    WebRtc_Word32 SetPlayStream(pa_stream* playStream);
+    WebRtc_Word32 SetRecStream(pa_stream* recStream);
+    WebRtc_Word32 OpenSpeaker(WebRtc_UWord16 deviceIndex);
+    WebRtc_Word32 OpenMicrophone(WebRtc_UWord16 deviceIndex);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SetPulseAudioObjects(pa_threaded_mainloop* mainloop,
+                                       pa_context* context);
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerLinuxPulse(const WebRtc_Word32 id);
+    ~AudioMixerManagerLinuxPulse();
+
+private:
+    static void PaSinkInfoCallback(pa_context *c, const pa_sink_info *i,
+                                   int eol, void *pThis);
+    static void PaSinkInputInfoCallback(pa_context *c,
+                                        const pa_sink_input_info *i, int eol,
+                                        void *pThis);
+    static void PaSourceInfoCallback(pa_context *c, const pa_source_info *i,
+                                     int eol, void *pThis);
+    static void
+        PaSetVolumeCallback(pa_context * /*c*/, int success, void */*pThis*/);
+    void PaSinkInfoCallbackHandler(const pa_sink_info *i, int eol);
+    void PaSinkInputInfoCallbackHandler(const pa_sink_input_info *i, int eol);
+    void PaSourceInfoCallbackHandler(const pa_source_info *i, int eol);
+
+    void ResetCallbackVariables() const;
+    void WaitForOperationCompletion(pa_operation* paOperation) const;
+    void PaLock() const;
+    void PaUnLock() const;
+
+    bool GetSinkInputInfo() const;
+    bool GetSinkInfoByIndex(int device_index)const ;
+    bool GetSourceInfoByIndex(int device_index) const;
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    WebRtc_Word16 _paOutputDeviceIndex;
+    WebRtc_Word16 _paInputDeviceIndex;
+
+    pa_stream* _paPlayStream;
+    pa_stream* _paRecStream;
+
+    pa_threaded_mainloop* _paMainloop;
+    pa_context* _paContext;
+
+    mutable WebRtc_UWord32 _paVolume;
+    mutable WebRtc_UWord32 _paMute;
+    mutable WebRtc_UWord32 _paVolSteps;
+    bool _paSpeakerMute;
+    mutable WebRtc_UWord32 _paSpeakerVolume;
+    mutable WebRtc_UWord8 _paChannels;
+    bool _paObjectsSet;
+    mutable bool _callbackValues;
+
+    WebRtc_UWord8 _micVolChannels;
+    WebRtc_UWord8 _spkVolChannels;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_MIXER_MANAGER_PULSE_LINUX_H_
diff --git a/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc b/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc
new file mode 100644
index 0000000..8f3c7c8
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "latebindingsymboltable_linux.h"
+
+#ifdef WEBRTC_LINUX
+#include <dlfcn.h>
+#endif
+
+// TODO(grunell): Either put inside webrtc namespace or use webrtc:: instead.
+using namespace webrtc;
+
+namespace webrtc_adm_linux {
+
+inline static const char *GetDllError() {
+#ifdef WEBRTC_LINUX
+  char *err = dlerror();
+  if (err) {
+    return err;
+  } else {
+    return "No error";
+  }
+#else
+#error Not implemented
+#endif
+}
+
+DllHandle InternalLoadDll(const char dll_name[]) {
+#ifdef WEBRTC_LINUX
+  DllHandle handle = dlopen(dll_name, RTLD_NOW);
+#else
+#error Not implemented
+#endif
+  if (handle == kInvalidDllHandle) {
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+               "Can't load %s : %d", dll_name, GetDllError());
+  }
+  return handle;
+}
+
+void InternalUnloadDll(DllHandle handle) {
+#ifdef WEBRTC_LINUX
+  if (dlclose(handle) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "%d", GetDllError());
+  }
+#else
+#error Not implemented
+#endif
+}
+
+static bool LoadSymbol(DllHandle handle,
+                       const char *symbol_name,
+                       void **symbol) {
+#ifdef WEBRTC_LINUX
+  *symbol = dlsym(handle, symbol_name);
+  char *err = dlerror();
+  if (err) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "Error loading symbol %s : %d", symbol_name, err);
+    return false;
+  } else if (!*symbol) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "Symbol %s is NULL", symbol_name);
+    return false;
+  }
+  return true;
+#else
+#error Not implemented
+#endif
+}
+
+// This routine MUST assign SOME value for every symbol, even if that value is
+// NULL, or else some symbols may be left with uninitialized data that the
+// caller may later interpret as a valid address.
+bool InternalLoadSymbols(DllHandle handle,
+                         int num_symbols,
+                         const char *const symbol_names[],
+                         void *symbols[]) {
+#ifdef WEBRTC_LINUX
+  // Clear any old errors.
+  dlerror();
+#endif
+  for (int i = 0; i < num_symbols; ++i) {
+    if (!LoadSymbol(handle, symbol_names[i], &symbols[i])) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc_adm_linux
diff --git a/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h b/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h
new file mode 100644
index 0000000..91d25aa
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h
@@ -0,0 +1,195 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_LATEBINDINGSYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_LATEBINDINGSYMBOLTABLE_LINUX_H
+
+#include <assert.h>
+#include <stddef.h>  // for NULL
+#include <string.h>
+
+#include "constructor_magic.h"
+#include "trace.h"
+
+// This file provides macros for creating "symbol table" classes to simplify the
+// dynamic loading of symbols from DLLs. Currently the implementation only
+// supports Linux and pure C symbols.
+// See talk/sound/pulseaudiosymboltable.(h|cc) for an example.
+
+namespace webrtc_adm_linux {
+
+#ifdef WEBRTC_LINUX
+typedef void *DllHandle;
+
+const DllHandle kInvalidDllHandle = NULL;
+#else
+#error Not implemented
+#endif
+
+// These are helpers for use only by the class below.
+DllHandle InternalLoadDll(const char dll_name[]);
+
+void InternalUnloadDll(DllHandle handle);
+
+bool InternalLoadSymbols(DllHandle handle,
+                         int num_symbols,
+                         const char *const symbol_names[],
+                         void *symbols[]);
+
+template <int SYMBOL_TABLE_SIZE,
+          const char kDllName[],
+          const char *const kSymbolNames[]>
+class LateBindingSymbolTable {
+ public:
+  LateBindingSymbolTable()
+      : handle_(kInvalidDllHandle),
+        undefined_symbols_(false) {
+    memset(symbols_, 0, sizeof(symbols_));
+  }
+
+  ~LateBindingSymbolTable() {
+    Unload();
+  }
+
+  static int NumSymbols() {
+    return SYMBOL_TABLE_SIZE;
+  }
+
+  // We do not use this, but we offer it for theoretical convenience.
+  static const char *GetSymbolName(int index) {
+    assert(index < NumSymbols());
+    return kSymbolNames[index];
+  }
+
+  bool IsLoaded() const {
+    return handle_ != kInvalidDllHandle;
+  }
+
+  // Loads the DLL and the symbol table. Returns true iff the DLL and symbol
+  // table loaded successfully.
+  bool Load() {
+    if (IsLoaded()) {
+      return true;
+    }
+    if (undefined_symbols_) {
+      // We do not attempt to load again because repeated attempts are not
+      // likely to succeed and DLL loading is costly.
+      //WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+      //           "We know there are undefined symbols");
+      return false;
+    }
+    handle_ = InternalLoadDll(kDllName);
+    if (!IsLoaded()) {
+      return false;
+    }
+    if (!InternalLoadSymbols(handle_, NumSymbols(), kSymbolNames, symbols_)) {
+      undefined_symbols_ = true;
+      Unload();
+      return false;
+    }
+    return true;
+  }
+
+  void Unload() {
+    if (!IsLoaded()) {
+      return;
+    }
+    InternalUnloadDll(handle_);
+    handle_ = kInvalidDllHandle;
+    memset(symbols_, 0, sizeof(symbols_));
+  }
+
+  // Retrieves the given symbol. NOTE: Recommended to use LATESYM_GET below
+  // instead of this.
+  void *GetSymbol(int index) const {
+    assert(IsLoaded());
+    assert(index < NumSymbols());
+    return symbols_[index];
+  }
+
+ private:
+  DllHandle handle_;
+  bool undefined_symbols_;
+  void *symbols_[SYMBOL_TABLE_SIZE];
+
+  DISALLOW_COPY_AND_ASSIGN(LateBindingSymbolTable);
+};
+
+// This macro must be invoked in a header to declare a symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(ClassName) \
+enum {
+
+// This macro must be invoked in the header declaration once for each symbol
+// (recommended to use an X-Macro to avoid duplication).
+// This macro defines an enum with names built from the symbols, which
+// essentially creates a hash table in the compiler from symbol names to their
+// indices in the symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(ClassName, sym) \
+  ClassName##_SYMBOL_TABLE_INDEX_##sym,
+
+// This macro completes the header declaration.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_END(ClassName) \
+  ClassName##_SYMBOL_TABLE_SIZE \
+}; \
+\
+extern const char ClassName##_kDllName[]; \
+extern const char *const \
+    ClassName##_kSymbolNames[ClassName##_SYMBOL_TABLE_SIZE]; \
+\
+typedef ::webrtc_adm_linux::LateBindingSymbolTable<ClassName##_SYMBOL_TABLE_SIZE, \
+                                            ClassName##_kDllName, \
+                                            ClassName##_kSymbolNames> \
+    ClassName;
+
+// This macro must be invoked in a .cc file to define a previously-declared
+// symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(ClassName, dllName) \
+const char ClassName##_kDllName[] = dllName; \
+const char *const ClassName##_kSymbolNames[ClassName##_SYMBOL_TABLE_SIZE] = {
+
+// This macro must be invoked in the .cc definition once for each symbol
+// (recommended to use an X-Macro to avoid duplication).
+// This would have to use the mangled name if we were to ever support C++
+// symbols.
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(ClassName, sym) \
+  #sym,
+
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_END(ClassName) \
+};
+
+// Index of a given symbol in the given symbol table class.
+#define LATESYM_INDEXOF(ClassName, sym) \
+  (ClassName##_SYMBOL_TABLE_INDEX_##sym)
+
+// Returns a reference to the given late-binded symbol, with the correct type.
+#define LATESYM_GET(ClassName, inst, sym) \
+  (*reinterpret_cast<typeof(&sym)>( \
+      (inst)->GetSymbol(LATESYM_INDEXOF(ClassName, sym))))
+
+}  // namespace webrtc_adm_linux
+
+#endif  // WEBRTC_ADM_LATEBINDINGSYMBOLTABLE_LINUX_H
diff --git a/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc b/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc
new file mode 100644
index 0000000..ae663f7
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "pulseaudiosymboltable_linux.h"
+
+namespace webrtc_adm_linux_pulse {
+
+LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0")
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym)
+PULSE_AUDIO_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DEFINE_END(PulseAudioSymbolTable)
+
+}  // namespace webrtc_adm_linux_pulse
diff --git a/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h b/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h
new file mode 100644
index 0000000..049509b
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h
@@ -0,0 +1,104 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
+
+#include "latebindingsymboltable_linux.h"
+
+namespace webrtc_adm_linux_pulse {
+
+// The PulseAudio symbols we need, as an X-Macro list.
+// This list must contain precisely every libpulse function that is used in
+// the ADM LINUX PULSE Device and Mixer classes
+#define PULSE_AUDIO_SYMBOLS_LIST \
+  X(pa_bytes_per_second) \
+  X(pa_context_connect) \
+  X(pa_context_disconnect) \
+  X(pa_context_errno) \
+  X(pa_context_get_protocol_version) \
+  X(pa_context_get_server_info) \
+  X(pa_context_get_sink_info_list) \
+  X(pa_context_get_sink_info_by_index) \
+  X(pa_context_get_sink_info_by_name) \
+  X(pa_context_get_sink_input_info) \
+  X(pa_context_get_source_info_by_index) \
+  X(pa_context_get_source_info_by_name) \
+  X(pa_context_get_source_info_list) \
+  X(pa_context_get_state) \
+  X(pa_context_new) \
+  X(pa_context_set_sink_input_volume) \
+  X(pa_context_set_sink_input_mute) \
+  X(pa_context_set_source_volume_by_index) \
+  X(pa_context_set_source_mute_by_index) \
+  X(pa_context_set_state_callback) \
+  X(pa_context_unref) \
+  X(pa_cvolume_set) \
+  X(pa_operation_get_state) \
+  X(pa_operation_unref) \
+  X(pa_stream_connect_playback) \
+  X(pa_stream_connect_record) \
+  X(pa_stream_disconnect) \
+  X(pa_stream_drop) \
+  X(pa_stream_get_device_index) \
+  X(pa_stream_get_index) \
+  X(pa_stream_get_latency) \
+  X(pa_stream_get_sample_spec) \
+  X(pa_stream_get_state) \
+  X(pa_stream_new) \
+  X(pa_stream_peek) \
+  X(pa_stream_readable_size) \
+  X(pa_stream_set_buffer_attr) \
+  X(pa_stream_set_overflow_callback) \
+  X(pa_stream_set_read_callback) \
+  X(pa_stream_set_state_callback) \
+  X(pa_stream_set_underflow_callback) \
+  X(pa_stream_set_write_callback) \
+  X(pa_stream_unref) \
+  X(pa_stream_writable_size) \
+  X(pa_stream_write) \
+  X(pa_strerror) \
+  X(pa_threaded_mainloop_free) \
+  X(pa_threaded_mainloop_get_api) \
+  X(pa_threaded_mainloop_lock) \
+  X(pa_threaded_mainloop_new) \
+  X(pa_threaded_mainloop_signal) \
+  X(pa_threaded_mainloop_start) \
+  X(pa_threaded_mainloop_stop) \
+  X(pa_threaded_mainloop_unlock) \
+  X(pa_threaded_mainloop_wait)
+
+LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(PulseAudioSymbolTable)
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(PulseAudioSymbolTable, sym)
+PULSE_AUDIO_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DECLARE_END(PulseAudioSymbolTable)
+
+}  // namespace webrtc_adm_linux_pulse
+
+#endif  // WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.cc b/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.cc
new file mode 100644
index 0000000..e927ae7
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.cc
@@ -0,0 +1,3241 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility.h"
+#include "audio_device_mac.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+#include <cassert>
+
+#include <sys/sysctl.h>         // sysctlbyname()
+#include <mach/mach.h>          // mach_task_self()
+#include <libkern/OSAtomic.h>   // OSAtomicCompareAndSwap()
+#include "portaudio/pa_ringbuffer.h"
+
+namespace webrtc
+{
+
+#define WEBRTC_CA_RETURN_ON_ERR(expr)                                   \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,               \
+                "Error in " #expr, (const char *)&err);                 \
+            return -1;                                                  \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_ERR(expr)                                         \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,               \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_WARN(expr)                                        \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceWarning, kTraceAudioDevice, _id,             \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+enum
+{
+    MaxNumberDevices = 64
+};
+
+void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue)
+{
+    while (1)
+    {
+        int32_t oldValue = *theValue;
+        if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue)
+            == true)
+        {
+            return;
+        }
+    }
+}
+
+int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue)
+{
+    while (1)
+    {
+        WebRtc_Word32 value = *theValue;
+        if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true)
+        {
+            return value;
+        }
+    }
+}
+
+// CoreAudio errors are best interpreted as four character strings.
+void AudioDeviceMac::logCAMsg(const TraceLevel level,
+                              const TraceModule module,
+                              const WebRtc_Word32 id, const char *msg,
+                              const char *err)
+{
+    assert(msg != NULL);
+    assert(err != NULL);
+
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+#else
+    // We need to flip the characters in this case.
+    WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
+        + 2, err + 1, err);
+#endif
+}
+
+AudioDeviceMac::AudioDeviceMac(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _stopEventRec(*EventWrapper::Create()),
+    _stopEvent(*EventWrapper::Create()),
+    _captureWorkerThread(NULL),
+    _renderWorkerThread(NULL),
+    _captureWorkerThreadId(0),
+    _renderWorkerThreadId(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceID(kAudioObjectUnknown),
+    _outputDeviceID(kAudioObjectUnknown),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _recChannels(N_REC_CHANNELS),
+    _playChannels(N_PLAY_CHANNELS),
+    _captureBufData(NULL),
+    _renderBufData(NULL),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _isShutDown(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _startRec(false),
+    _stopRec(false),
+    _stopPlay(false),
+    _AGC(false),
+    _renderDeviceIsAlive(1),
+    _captureDeviceIsAlive(1),
+    _twoDevices(true),
+    _doStop(false),
+    _doStopRec(false),
+    _macBookPro(false),
+    _macBookProPanRight(false),
+    _captureLatencyUs(0),
+    _renderLatencyUs(0),
+    _captureDelayUs(0),
+    _renderDelayUs(0),
+    _renderDelayOffsetSamples(0),
+    _playBufDelayFixed(20),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _paCaptureBuffer(NULL),
+    _paRenderBuffer(NULL),
+    _captureBufSizeSamples(0),
+    _renderBufSizeSamples(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    assert(&_stopEvent != NULL);
+    assert(&_stopEventRec != NULL);
+
+    memset(_renderConvertData, 0, sizeof(_renderConvertData));
+    memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+}
+
+
+AudioDeviceMac::~AudioDeviceMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    if (!_isShutDown)
+    {
+        Terminate();
+    }
+
+    if (_captureWorkerThread)
+    {
+        delete _captureWorkerThread;
+        _captureWorkerThread = NULL;
+    }
+
+    if (_renderWorkerThread)
+    {
+        delete _renderWorkerThread;
+        _renderWorkerThread = NULL;
+    }
+
+    if (_paRenderBuffer)
+    {
+        delete _paRenderBuffer;
+        _paRenderBuffer = NULL;
+    }
+
+    if (_paCaptureBuffer)
+    {
+        delete _paCaptureBuffer;
+        _paCaptureBuffer = NULL;
+    }
+
+    if (_renderBufData)
+    {
+        delete[] _renderBufData;
+        _renderBufData = NULL;
+    }
+
+    if (_captureBufData)
+    {
+        delete[] _captureBufData;
+        _captureBufData = NULL;
+    }
+
+    kern_return_t kernErr = KERN_SUCCESS;
+    kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_destroy() error: %d", kernErr);
+    }
+
+    kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_destroy() error: %d", kernErr);
+    }
+
+    delete &_stopEvent;
+    delete &_stopEventRec;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+WebRtc_Word32 AudioDeviceMac::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    _isShutDown = false;
+
+    // PortAudio ring buffers require an elementCount which is a power of two.
+    if (_renderBufData == NULL)
+    {
+        UInt32 powerOfTwo = 1;
+        while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES)
+        {
+            powerOfTwo <<= 1;
+        }
+        _renderBufSizeSamples = powerOfTwo;
+        _renderBufData = new SInt16[_renderBufSizeSamples];
+    }
+
+    if (_paRenderBuffer == NULL)
+    {
+        _paRenderBuffer = new PaUtilRingBuffer;
+        ring_buffer_size_t bufSize = -1;
+        bufSize = PaUtil_InitializeRingBuffer(_paRenderBuffer, sizeof(SInt16),
+                                              _renderBufSizeSamples,
+                                              _renderBufData);
+        if (bufSize == -1)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " PaUtil_InitializeRingBuffer() error");
+            return -1;
+        }
+    }
+
+    if (_captureBufData == NULL)
+    {
+        UInt32 powerOfTwo = 1;
+        while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES)
+        {
+            powerOfTwo <<= 1;
+        }
+        _captureBufSizeSamples = powerOfTwo;
+        _captureBufData = new Float32[_captureBufSizeSamples];
+    }
+
+    if (_paCaptureBuffer == NULL)
+    {
+        _paCaptureBuffer = new PaUtilRingBuffer;
+        ring_buffer_size_t bufSize = -1;
+        bufSize = PaUtil_InitializeRingBuffer(_paCaptureBuffer,
+                                              sizeof(Float32),
+                                              _captureBufSizeSamples,
+                                              _captureBufData);
+        if (bufSize == -1)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " PaUtil_InitializeRingBuffer() error");
+            return -1;
+        }
+    }
+
+    if (_renderWorkerThread == NULL)
+    {
+        _renderWorkerThread
+            = ThreadWrapper::CreateThread(RunRender, this, kRealtimePriority,
+                                          "RenderWorkerThread");
+        if (_renderWorkerThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " Render CreateThread() error");
+            return -1;
+        }
+    }
+
+    if (_captureWorkerThread == NULL)
+    {
+        _captureWorkerThread
+            = ThreadWrapper::CreateThread(RunCapture, this, kRealtimePriority,
+                                          "CaptureWorkerThread");
+        if (_captureWorkerThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " Capture CreateThread() error");
+            return -1;
+        }
+    }
+
+    kern_return_t kernErr = KERN_SUCCESS;
+    kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
+                               SYNC_POLICY_FIFO, 0);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     " semaphore_create() error: %d", kernErr);
+        return -1;
+    }
+
+    kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
+                               SYNC_POLICY_FIFO, 0);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     " semaphore_create() error: %d", kernErr);
+        return -1;
+    }
+
+    // Setting RunLoop to NULL here instructs HAL to manage its own thread for 
+    // notifications. This was the default behaviour on OS X 10.5 and earlier, but now 
+    // must be explicitly specified. HAL would otherwise try to use the main thread to
+    // issue notifications.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyRunLoop,
+            kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    CFRunLoopRef runLoop = NULL;
+    UInt32 size = sizeof(CFRunLoopRef);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, size, &runLoop));
+
+    // Listen for any device changes.
+    propertyAddress.mSelector = kAudioHardwarePropertyDevices;
+    WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(kAudioObjectSystemObject,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Determine if this is a MacBook Pro
+    _macBookPro = false;
+    _macBookProPanRight = false;
+    char buf[128];
+    size_t length = sizeof(buf);
+    memset(buf, 0, length);
+
+    int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
+    if (intErr != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Error in sysctlbyname(): %d", err);
+    } else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Hardware model: %s", buf);
+        if (strncmp(buf, "MacBookPro", 10) == 0)
+        {
+            _macBookPro = true;
+        }
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Recording must be stopped");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Playback must be stopped");
+        return -1;
+    }
+
+    _critSect.Enter();
+
+    _mixerManager.Close();
+
+    OSStatus err = noErr;
+    int retVal = 0;
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
+            &propertyAddress, &objectListenerProc, this));
+
+    err = AudioHardwareUnload();
+    if (err != noErr)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Error in AudioHardwareUnload()", (const char*) &err);
+        retVal = -1;
+    }
+
+    _critSect.Leave();
+
+    _isShutDown = true;
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return retVal;
+}
+
+bool AudioDeviceMac::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    // 
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1)
+    {
+        return -1;
+    }
+
+    if (_inputDeviceID == _outputDeviceID)
+    {
+        _twoDevices = false;
+    } else
+    {
+        _twoDevices = true;
+    }
+
+    if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone exists
+    // 
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceMac::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1)
+    {
+        return -1;
+    }
+
+    if (_inputDeviceID == _outputDeviceID)
+    {
+        _twoDevices = false;
+    } else
+    {
+        _twoDevices = true;
+    }
+
+    if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+bool AudioDeviceMac::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceMac::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control exists
+    //
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                               WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32
+AudioDeviceMac::WaveOutVolume(WebRtc_UWord16& /*volumeLeft*/,
+                              WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    //
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    //
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoRecordingIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone can record stereo
+    //
+    _mixerManager.StereoRecordingIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoPlayoutIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone can record stereo
+    //
+    _mixerManager.StereoPlayoutIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceMac::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    //
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceMac::PlayoutDevices()
+{
+
+    AudioDeviceID playDevices[MaxNumberDevices];
+    return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
+                            MaxNumberDevices);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    AudioDeviceID playDevices[MaxNumberDevices];
+    WebRtc_UWord32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
+                                               playDevices, MaxNumberDevices);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable waveform-audio output devices is %u",
+                 nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+    WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
+}
+
+WebRtc_Word16 AudioDeviceMac::RecordingDevices()
+{
+
+    AudioDeviceID recDevices[MaxNumberDevices];
+    return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
+                            MaxNumberDevices);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    AudioDeviceID recDevices[MaxNumberDevices];
+    WebRtc_UWord32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
+                                               recDevices, MaxNumberDevices);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable waveform-audio input devices is %u",
+                 nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+
+WebRtc_Word32
+AudioDeviceMac::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutIsAvailable(bool& available)
+{
+
+    available = true;
+
+    // Try to initialize the playout side
+    if (InitPlayout() == -1)
+    {
+        available = false;
+    }
+
+    // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
+    // We must actually start playout here in order to have the IOProc
+    // deleted by calling StopPlayout().
+    if (StartPlayout() == -1)
+    {
+        available = false;
+    }
+
+    // Cancel effect of initialization
+    if (StopPlayout() == -1)
+    {
+        available = false;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingIsAvailable(bool& available)
+{
+
+    available = true;
+
+    // Try to initialize the recording side
+    if (InitRecording() == -1)
+    {
+        available = false;
+    }
+
+    // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
+    // We must actually start recording here in order to have the IOProc
+    // deleted by calling StopRecording().
+    if (StartRecording() == -1)
+    {
+        available = false;
+    }
+
+    // Cancel effect of initialization
+    if (StopRecording() == -1)
+    {
+        available = false;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    if (!MicrophoneIsInitialized())
+    {
+        // Make this call to check if we are using
+        // one or two devices (_twoDevices)
+        bool available = false;
+        if (MicrophoneIsAvailable(available) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  MicrophoneIsAvailable() failed");
+        }
+    }
+
+    PaUtil_FlushRingBuffer(_paRenderBuffer);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    _renderDelayOffsetSamples = 0;
+    _renderDelayUs = 0;
+    _renderLatencyUs = 0;
+    _renderDeviceIsAlive = 1;
+    _doStop = false;
+
+    // The internal microphone of a MacBook Pro is located under the left speaker
+    // grille. When the internal speakers are in use, we want to fully stereo
+    // pan to the right.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyDataSource,
+                kAudioDevicePropertyScopeOutput, 0 };
+    if (_macBookPro)
+    {
+        _macBookProPanRight = false;
+        Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                     &propertyAddress);
+        if (hasProperty)
+        {
+            UInt32 dataSource = 0;
+            size = sizeof(dataSource);
+            WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, &size, &dataSource));
+
+            if (dataSource == 'ispk')
+            {
+                _macBookProPanRight = true;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id,
+                             "MacBook Pro using internal speakers; stereo"
+                             " panning right");
+            } else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "MacBook Pro not using internal speakers");
+            }
+
+            // Add a listener to determine if the status changes. 
+            WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
+                    &propertyAddress, &objectListenerProc, this));
+        }
+    }
+
+    // Get current stream description  
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
+    size = sizeof(_outStreamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &_outStreamFormat));
+
+    if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable output stream format -> mFormatID",
+                 (const char *) &_outStreamFormat.mFormatID);
+        return -1;
+    }
+
+    if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Too many channels on device -> mChannelsPerFrame = %d",
+                     _outStreamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Non-interleaved audio data is not supported.",
+                     "AudioHardware streams should not have this format.");
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "Ouput stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mSampleRate = %f, mChannelsPerFrame = %u",
+                 _outStreamFormat.mSampleRate,
+                 _outStreamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 _outStreamFormat.mBytesPerPacket,
+                 _outStreamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 _outStreamFormat.mBytesPerFrame,
+                 _outStreamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mFormatFlags = %u, mChannelsPerFrame = %u",
+                 _outStreamFormat.mFormatFlags,
+                 _outStreamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &_outStreamFormat.mFormatID);
+
+    // Our preferred format to work with 
+    _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
+    if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2))
+    {
+        _outDesiredFormat.mChannelsPerFrame = 2;
+    } else
+    {
+        // Disable stereo playout when we only have one channel on the device.
+        _outDesiredFormat.mChannelsPerFrame = 1;
+        _playChannels = 1;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Stereo playout unavailable on this device");
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+    }
+
+    _renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT
+        * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
+
+    _outDesiredFormat.mBytesPerPacket = _outDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _outDesiredFormat.mFramesPerPacket = 1; // In uncompressed audio, 
+    // a packet is one frame.
+    _outDesiredFormat.mBytesPerFrame = _outDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+
+    _outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
+        | kLinearPCMFormatFlagIsPacked;
+#ifdef WEBRTC_BIG_ENDIAN
+    _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+#endif
+    _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+    WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_outDesiredFormat, &_outStreamFormat,
+            &_renderConverter));
+
+    // First try to set buffer size to desired value (_playBufDelayFixed)
+    UInt32 bufByteCount = (UInt32)((_outStreamFormat.mSampleRate / 1000.0)
+        * _playBufDelayFixed * _outStreamFormat.mChannelsPerFrame
+        * sizeof(Float32));
+    if (_outStreamFormat.mFramesPerPacket != 0)
+    {
+        if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0)
+        {
+            bufByteCount = ((UInt32)(bufByteCount
+                / _outStreamFormat.mFramesPerPacket) + 1)
+                * _outStreamFormat.mFramesPerPacket;
+        }
+    }
+
+    // Ensure the buffer size is within the acceptable range provided by the device.
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+    AudioValueRange range;
+    size = sizeof(range);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &range));
+    if (range.mMinimum > bufByteCount)
+    {
+        bufByteCount = range.mMinimum;
+    } else if (range.mMaximum < bufByteCount)
+    {
+        bufByteCount = range.mMaximum;
+    }
+
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+    size = sizeof(bufByteCount);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, size, &bufByteCount));
+
+    // Get render device latency
+    propertyAddress.mSelector = kAudioDevicePropertyLatency;
+    UInt32 latency = 0;
+    size = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _renderLatencyUs = (WebRtc_UWord32) ((1.0e6 * latency)
+        / _outStreamFormat.mSampleRate);
+
+    // Get render stream latency
+    propertyAddress.mSelector = kAudioDevicePropertyStreams;
+    AudioStreamID stream = 0;
+    size = sizeof(AudioStreamID);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &stream));
+    propertyAddress.mSelector = kAudioStreamPropertyLatency;
+    size = sizeof(UInt32);
+    latency = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _renderLatencyUs += (WebRtc_UWord32) ((1.0e6 * latency)
+        / _outStreamFormat.mSampleRate);
+
+    // Listen for format changes
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Listen for processor overloads
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_twoDevices || !_recIsInitialized)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_outputDeviceID,
+                deviceIOProc, this, &_deviceIOProcID));
+    }
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  initial playout status: _renderDelayOffsetSamples=%d,"
+                 " _renderDelayUs=%d, _renderLatencyUs=%d",
+                 _renderDelayOffsetSamples, _renderDelayUs, _renderLatencyUs);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    if (!SpeakerIsInitialized())
+    {
+        // Make this call to check if we are using
+        // one or two devices (_twoDevices)
+        bool available = false;
+        if (SpeakerIsAvailable(available) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  SpeakerIsAvailable() failed");
+        }
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+
+    PaUtil_FlushRingBuffer(_paCaptureBuffer);
+
+    _captureDelayUs = 0;
+    _captureLatencyUs = 0;
+    _captureDeviceIsAlive = 1;
+    _doStopRec = false;
+
+    // Get current stream description  
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyStreamFormat,
+                kAudioDevicePropertyScopeInput, 0 };
+    memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
+    size = sizeof(_inStreamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &_inStreamFormat));
+
+    if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable input stream format -> mFormatID",
+                 (const char *) &_inStreamFormat.mFormatID);
+        return -1;
+    }
+
+    if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     ", Too many channels on device (mChannelsPerFrame = %d)",
+                     _inStreamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " Input stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mSampleRate = %f, mChannelsPerFrame = %u",
+                 _inStreamFormat.mSampleRate, _inStreamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 _inStreamFormat.mBytesPerPacket,
+                 _inStreamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 _inStreamFormat.mBytesPerFrame,
+                 _inStreamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mFormatFlags = %u, mChannelsPerFrame = %u",
+                 _inStreamFormat.mFormatFlags,
+                 _inStreamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &_inStreamFormat.mFormatID);
+
+    // Our preferred format to work with
+    if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
+    {
+        _inDesiredFormat.mChannelsPerFrame = 2;
+    } else
+    {
+        // Disable stereo recording when we only have one channel on the device.
+        _inDesiredFormat.mChannelsPerFrame = 1;
+        _recChannels = 1;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Stereo recording unavailable on this device");
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+    }
+
+    _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
+    _inDesiredFormat.mBytesPerPacket = _inDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _inDesiredFormat.mFramesPerPacket = 1;
+    _inDesiredFormat.mBytesPerFrame = _inDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+
+    _inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
+        | kLinearPCMFormatFlagIsPacked;
+#ifdef WEBRTC_BIG_ENDIAN
+    _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+#endif
+    _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+    WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
+            &_captureConverter));
+
+    // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
+    // TODO(xians): investigate this block.
+    UInt32 bufByteCount = (UInt32)((_inStreamFormat.mSampleRate / 1000.0)
+        * 10.0 * N_BLOCKS_IO * _inStreamFormat.mChannelsPerFrame
+        * sizeof(Float32));
+    if (_inStreamFormat.mFramesPerPacket != 0)
+    {
+        if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0)
+        {
+            bufByteCount = ((UInt32)(bufByteCount
+                / _inStreamFormat.mFramesPerPacket) + 1)
+                * _inStreamFormat.mFramesPerPacket;
+        }
+    }
+
+    // Ensure the buffer size is within the acceptable range provided by the device.
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+    AudioValueRange range;
+    size = sizeof(range);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &range));
+    if (range.mMinimum > bufByteCount)
+    {
+        bufByteCount = range.mMinimum;
+    } else if (range.mMaximum < bufByteCount)
+    {
+        bufByteCount = range.mMaximum;
+    }
+
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+    size = sizeof(bufByteCount);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, size, &bufByteCount));
+
+    // Get capture device latency
+    propertyAddress.mSelector = kAudioDevicePropertyLatency;
+    UInt32 latency = 0;
+    size = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _captureLatencyUs = (UInt32)((1.0e6 * latency)
+        / _inStreamFormat.mSampleRate);
+
+    // Get capture stream latency
+    propertyAddress.mSelector = kAudioDevicePropertyStreams;
+    AudioStreamID stream = 0;
+    size = sizeof(AudioStreamID);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &stream));
+    propertyAddress.mSelector = kAudioStreamPropertyLatency;
+    size = sizeof(UInt32);
+    latency = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _captureLatencyUs += (UInt32)((1.0e6 * latency)
+        / _inStreamFormat.mSampleRate);
+
+    // Listen for format changes
+    // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Listen for processor overloads
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_twoDevices)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
+                inDeviceIOProc, this, &_inDeviceIOProcID));
+    } else if (!_playIsInitialized)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
+                deviceIOProc, this, &_deviceIOProcID));
+    }
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StartRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Recording worker thread has not been started");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    unsigned int threadID(0);
+    if (_captureWorkerThread != NULL)
+    {
+        _captureWorkerThread->Start(threadID);
+    }
+    _captureWorkerThreadId = threadID;
+
+    if (_twoDevices)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
+    } else if (!_playing)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
+    }
+
+    _recording = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StopRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    // Stop device
+    int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
+    if (_twoDevices)
+    {
+        if (_recording && captureDeviceIsAlive == 1)
+        {
+            _recording = false;
+            _doStopRec = true; // Signal to io proc to stop audio device
+            _critSect.Leave(); // Cannot be under lock, risk of deadlock
+            if (kEventTimeout == _stopEventRec.Wait(2000))
+            {
+                CriticalSectionScoped critScoped(_critSect);
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             " Timed out stopping the capture IOProc. "
+                             "We may have failed to detect a device removal.");
+
+                WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID,
+                                                   _inDeviceIOProcID));
+                WEBRTC_CA_LOG_WARN(
+                    AudioDeviceDestroyIOProcID(_inputDeviceID,
+                                               _inDeviceIOProcID));
+            }
+            _critSect.Enter();
+            _doStopRec = false;
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         " Recording stopped");
+        }
+    }
+    else
+    {
+        // We signal a stop for a shared device even when rendering has
+        // not yet ended. This is to ensure the IOProc will return early as
+        // intended (by checking |_recording|) before accessing
+        // resources we free below (e.g. the capture converter).
+        //
+        // In the case of a shared devcie, the IOProc will verify
+        // rendering has ended before stopping itself.
+        if (_recording && captureDeviceIsAlive == 1)
+        {
+            _recording = false;
+            _doStop = true; // Signal to io proc to stop audio device
+            _critSect.Leave(); // Cannot be under lock, risk of deadlock
+            if (kEventTimeout == _stopEvent.Wait(2000))
+            {
+                CriticalSectionScoped critScoped(_critSect);
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             " Timed out stopping the shared IOProc. "
+                             "We may have failed to detect a device removal.");
+
+                // We assume rendering on a shared device has stopped as well if
+                // the IOProc times out.
+                WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
+                                                   _deviceIOProcID));
+                WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                              _deviceIOProcID));
+            }
+            _critSect.Enter();
+            _doStop = false;
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         " Recording stopped (shared)");
+        }
+    }
+
+    // Setting this signal will allow the worker thread to be stopped.
+    AtomicSet32(&_captureDeviceIsAlive, 0);
+    _critSect.Leave();
+    if (_captureWorkerThread != NULL)
+    {
+        if (!_captureWorkerThread->Stop())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Timed out waiting for the render worker thread to "
+                             "stop.");
+        }
+    }
+    _critSect.Enter();
+
+    WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
+
+    // Remove listeners.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyStreamFormat,
+                kAudioDevicePropertyScopeInput, 0 };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    return 0;
+}
+
+bool AudioDeviceMac::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceMac::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceMac::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceMac::StartPlayout()
+{
+    
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    unsigned int threadID(0);
+    if (_renderWorkerThread != NULL)
+    {
+        _renderWorkerThread->Start(threadID);
+    }
+    _renderWorkerThreadId = threadID;
+
+    if (_twoDevices || !_recording)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
+    }
+    _playing = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StopPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
+    if (_playing && renderDeviceIsAlive == 1)
+    {
+        // We signal a stop for a shared device even when capturing has not
+        // yet ended. This is to ensure the IOProc will return early as
+        // intended (by checking |_playing|) before accessing resources we
+        // free below (e.g. the render converter).
+        //
+        // In the case of a shared device, the IOProc will verify capturing
+        // has ended before stopping itself.
+        _playing = false;
+        _doStop = true; // Signal to io proc to stop audio device
+        _critSect.Leave(); // Cannot be under lock, risk of deadlock
+        if (kEventTimeout == _stopEvent.Wait(2000))
+        {
+            CriticalSectionScoped critScoped(_critSect);
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Timed out stopping the render IOProc. "
+                         "We may have failed to detect a device removal.");
+
+            // We assume capturing on a shared device has stopped as well if the
+            // IOProc times out.
+            WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
+                                               _deviceIOProcID));
+            WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                          _deviceIOProcID));
+        }
+        _critSect.Enter();
+        _doStop = false;
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Playout stopped");
+    }
+
+    // Setting this signal will allow the worker thread to be stopped.
+    AtomicSet32(&_renderDeviceIsAlive, 0);
+    _critSect.Leave();
+    if (_renderWorkerThread != NULL)
+    {
+        if (!_renderWorkerThread->Stop())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Timed out waiting for the render worker thread to "
+                         "stop.");
+        }
+    }
+    _critSect.Enter();
+
+    WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
+
+    // Remove listeners.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput,
+            0 };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_macBookPro)
+    {
+        Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                     &propertyAddress);
+        if (hasProperty)
+        {
+            propertyAddress.mSelector = kAudioDevicePropertyDataSource;
+            WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+                    &propertyAddress, &objectListenerProc, this));
+        }
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
+    delayMS = static_cast<WebRtc_UWord16> (1e-3 * (renderDelayUs
+        + _renderLatencyUs) + 0.5);
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+    delayMS = static_cast<WebRtc_UWord16> (1e-3 * (captureDelayUs
+        + _captureLatencyUs) + 0.5);
+    return 0;
+}
+
+bool AudioDeviceMac::Playing() const
+{
+    return (_playing);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+
+    if (type != AudioDeviceModule::kFixedBufferSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Adaptive buffer size not supported on this platform");
+        return -1;
+    }
+
+    _playBufType = type;
+    _playBufDelayFixed = sizeMS;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+
+    type = _playBufType;
+    sizeMS = _playBufDelayFixed;
+
+    return 0;
+}
+
+// Not implemented for Mac.
+WebRtc_Word32 AudioDeviceMac::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+
+    return -1;
+}
+
+bool AudioDeviceMac::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceMac::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+bool AudioDeviceMac::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceMac::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+void AudioDeviceMac::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+void AudioDeviceMac::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+void AudioDeviceMac::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+void AudioDeviceMac::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32
+AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
+                                 AudioDeviceID scopedDeviceIds[],
+                                 const WebRtc_UWord32 deviceListLength)
+{
+    OSStatus err = noErr;
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    UInt32 size = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, &size));
+    if (size == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "No devices");
+        return 0;
+    }
+
+    AudioDeviceID* deviceIds = (AudioDeviceID*) malloc(size);
+    UInt32 numberDevices = size / sizeof(AudioDeviceID);
+    AudioBufferList* bufferList = NULL;
+    UInt32 numberScopedDevices = 0;
+
+    // First check if there is a default device and list it
+    UInt32 hardwareProperty = 0;
+    if (scope == kAudioDevicePropertyScopeOutput)
+    {
+        hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+    } else
+    {
+        hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+    }
+
+    AudioObjectPropertyAddress
+        propertyAddressDefault = { hardwareProperty,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+
+    AudioDeviceID usedID;
+    UInt32 uintSize = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+            &propertyAddressDefault, 0, NULL, &uintSize, &usedID));
+    if (usedID != kAudioDeviceUnknown)
+    {
+        scopedDeviceIds[numberScopedDevices] = usedID;
+        numberScopedDevices++;
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "GetNumberDevices(): Default device unknown");
+    }
+
+    // Then list the rest of the devices
+    bool listOK = true;
+
+    WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, &size, deviceIds));
+    if (err != noErr)
+    {
+        listOK = false;
+    } else
+    {
+        propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
+        propertyAddress.mScope = scope;
+        propertyAddress.mElement = 0;
+        for (UInt32 i = 0; i < numberDevices; i++)
+        {
+            // Check for input channels
+            WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(deviceIds[i],
+                    &propertyAddress, 0, NULL, &size));
+            if (err == kAudioHardwareBadDeviceError)
+            {
+                // This device doesn't actually exist; continue iterating.
+                continue;
+            } else if (err != noErr)
+            {
+                listOK = false;
+                break;
+            }
+
+            bufferList = (AudioBufferList*) malloc(size);
+            WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(deviceIds[i],
+                    &propertyAddress, 0, NULL, &size, bufferList));
+            if (err != noErr)
+            {
+                listOK = false;
+                break;
+            }
+
+            if (bufferList->mNumberBuffers > 0)
+            {
+                if (numberScopedDevices >= deviceListLength)
+                {
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "Device list is not long enough");
+                    listOK = false;
+                    break;
+                }
+
+                scopedDeviceIds[numberScopedDevices] = deviceIds[i];
+                numberScopedDevices++;
+            }
+
+            free(bufferList);
+            bufferList = NULL;
+        } // for
+    }
+
+    if (!listOK)
+    {
+        if (deviceIds)
+        {
+            free(deviceIds);
+            deviceIds = NULL;
+        }
+
+        if (bufferList)
+        {
+            free(bufferList);
+            bufferList = NULL;
+        }
+
+        return -1;
+    }
+
+    // Happy ending   
+    if (deviceIds)
+    {
+        free(deviceIds);
+        deviceIds = NULL;
+    }
+
+    return numberScopedDevices;
+}
+
+WebRtc_Word32
+AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
+                              const WebRtc_UWord16 index,
+                              char* name)
+{
+    OSStatus err = noErr;
+    UInt32 len = kAdmMaxDeviceNameSize;
+    AudioDeviceID deviceIds[MaxNumberDevices];
+
+    int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
+    if (numberDevices < 0)
+    {
+        return -1;
+    } else if (numberDevices == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "No devices");
+        return -1;
+    }
+
+    // If the number is below the number of devices, assume it's "WEBRTC ID"
+    // otherwise assume it's a CoreAudio ID
+    AudioDeviceID usedID;
+
+    // Check if there is a default device
+    bool isDefaultDevice = false;
+    if (index == 0)
+    {
+        UInt32 hardwareProperty = 0;
+        if (scope == kAudioDevicePropertyScopeOutput)
+        {
+            hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+        } else
+        {
+            hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+        }
+        AudioObjectPropertyAddress propertyAddress = { hardwareProperty,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+        UInt32 size = sizeof(UInt32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+                &propertyAddress, 0, NULL, &size, &usedID));
+        if (usedID == kAudioDeviceUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "GetDeviceName(): Default device unknown");
+        } else
+        {
+            isDefaultDevice = true;
+        }
+    }
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyDeviceName, scope, 0 };
+
+    if (isDefaultDevice)
+    {
+        char devName[len];
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
+                &propertyAddress, 0, NULL, &len, devName));
+
+        sprintf(name, "default (%s)", devName);
+    } else
+    {
+        if (index < numberDevices)
+        {
+            usedID = deviceIds[index];
+        } else
+        {
+            usedID = index;
+        }
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
+                &propertyAddress, 0, NULL, &len, name));
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitDevice(const WebRtc_UWord16 userDeviceIndex,
+                                         AudioDeviceID& deviceId,
+                                         const bool isInput)
+{
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    AudioObjectPropertyScope deviceScope;
+    AudioObjectPropertySelector defaultDeviceSelector;
+    AudioDeviceID deviceIds[MaxNumberDevices];
+
+    if (isInput)
+    {
+        deviceScope = kAudioDevicePropertyScopeInput;
+        defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
+    } else
+    {
+        deviceScope = kAudioDevicePropertyScopeOutput;
+        defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
+    }
+
+    AudioObjectPropertyAddress
+        propertyAddress = { defaultDeviceSelector,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+
+    // Get the actual device IDs
+    int numberDevices = GetNumberDevices(deviceScope, deviceIds,
+                                         MaxNumberDevices);
+    if (numberDevices < 0)
+    {
+        return -1;
+    } else if (numberDevices == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitDevice(): No devices");
+        return -1;
+    }
+
+    bool isDefaultDevice = false;
+    deviceId = kAudioDeviceUnknown;
+    if (userDeviceIndex == 0)
+    {
+        // Try to use default system device
+        size = sizeof(AudioDeviceID);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+                &propertyAddress, 0, NULL, &size, &deviceId));
+        if (deviceId == kAudioDeviceUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " No default device exists");
+        } else
+        {
+            isDefaultDevice = true;
+        }
+    }
+
+    if (!isDefaultDevice)
+    {
+        deviceId = deviceIds[userDeviceIndex];
+    }
+
+    // Obtain device name and manufacturer for logging.
+    // Also use this as a test to ensure a user-set device ID is valid. 
+    char devName[128];
+    char devManf[128];
+    memset(devName, 0, sizeof(devName));
+    memset(devManf, 0, sizeof(devManf));
+
+    propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
+    propertyAddress.mScope = deviceScope;
+    propertyAddress.mElement = 0;
+    size = sizeof(devName);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
+            &propertyAddress, 0, NULL, &size, devName));
+
+    propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
+    size = sizeof(devManf);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
+            &propertyAddress, 0, NULL, &size, devManf));
+
+    if (isInput)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Input device: %s %s", devManf, devName);
+    } else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Output device: %s %s", devManf, devName);
+    }
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::objectListenerProc(
+    AudioObjectID objectId,
+    UInt32 numberAddresses,
+    const AudioObjectPropertyAddress addresses[],
+    void* clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
+
+    // AudioObjectPropertyListenerProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implObjectListenerProc(
+    const AudioObjectID objectId,
+    const UInt32 numberAddresses,
+    const AudioObjectPropertyAddress addresses[])
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "AudioDeviceMac::implObjectListenerProc()");
+    
+    for (UInt32 i = 0; i < numberAddresses; i++)
+    {
+        if (addresses[i].mSelector == kAudioHardwarePropertyDevices)
+        {
+            HandleDeviceChange();
+        } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat)
+        {
+            HandleStreamFormatChange(objectId, addresses[i]);
+        } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource)
+        {
+            HandleDataSourceChange(objectId, addresses[i]);
+        } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload)
+        {
+            HandleProcessorOverload(addresses[i]);
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleDeviceChange()
+{
+    OSStatus err = noErr;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "kAudioHardwarePropertyDevices");
+
+    // A device has changed. Check if our registered devices have been removed.
+    // Ensure the devices have been initialized, meaning the IDs are valid.
+    if (MicrophoneIsInitialized())
+    {
+        AudioObjectPropertyAddress propertyAddress = {
+                kAudioDevicePropertyDeviceIsAlive,
+                kAudioDevicePropertyScopeInput, 0 };
+        UInt32 deviceIsAlive = 1;
+        UInt32 size = sizeof(UInt32);
+        err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0,
+                                         NULL, &size, &deviceIsAlive);
+
+        if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "Capture device is not alive (probably removed)");
+            AtomicSet32(&_captureDeviceIsAlive, 0);
+            _mixerManager.CloseMicrophone();
+            if (_recError == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  pending recording error exists");
+            }
+            _recError = 1; // triggers callback from module process thread
+        } else if (err != noErr)
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioDeviceGetProperty()", (const char*) &err);
+            return -1;
+        }      
+    }
+
+    if (SpeakerIsInitialized())
+    {
+        AudioObjectPropertyAddress propertyAddress = {
+                kAudioDevicePropertyDeviceIsAlive,
+                kAudioDevicePropertyScopeOutput, 0 };
+        UInt32 deviceIsAlive = 1;
+        UInt32 size = sizeof(UInt32);
+        err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0,
+                                         NULL, &size, &deviceIsAlive);
+
+        if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "Render device is not alive (probably removed)");
+            AtomicSet32(&_renderDeviceIsAlive, 0);
+            _mixerManager.CloseSpeaker();
+            if (_playError == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  pending playout error exists");
+            }
+            _playError = 1; // triggers callback from module process thread
+        } else if (err != noErr)
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioDeviceGetProperty()", (const char*) &err);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleStreamFormatChange(
+    const AudioObjectID objectId,
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    OSStatus err = noErr;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "Stream format changed");
+
+    if (objectId != _inputDeviceID && objectId != _outputDeviceID)
+    {
+        return 0;
+    }
+
+    // Get the new device format
+    AudioStreamBasicDescription streamFormat;
+    UInt32 size = sizeof(streamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    if (streamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable input stream format -> mFormatID",
+                 (const char *) &streamFormat.mFormatID);
+        return -1;
+    }
+
+    if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Too many channels on device (mChannelsPerFrame = %d)",
+                     streamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "Stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mSampleRate = %f, mChannelsPerFrame = %u",
+                 streamFormat.mSampleRate, streamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 streamFormat.mBytesPerPacket, streamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 streamFormat.mBytesPerFrame, streamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mFormatFlags = %u, mChannelsPerFrame = %u",
+                 streamFormat.mFormatFlags, streamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &streamFormat.mFormatID);
+
+    if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
+    {
+        memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
+
+        if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
+        {
+            _inDesiredFormat.mChannelsPerFrame = 2;
+        } else
+        {
+            // Disable stereo recording when we only have one channel on the device.
+            _inDesiredFormat.mChannelsPerFrame = 1;
+            _recChannels = 1;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Stereo recording unavailable on this device");
+        }
+
+        if (_ptrAudioBuffer)
+        {
+            // Update audio buffer with the selected parameters
+            _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+            _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+        }
+
+        // Recreate the converter with the new format
+        // TODO(xians): make this thread safe
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
+                &_captureConverter));
+    } else
+    {
+        memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
+
+        if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2))
+        {
+            _outDesiredFormat.mChannelsPerFrame = 2;
+        } else
+        {
+            // Disable stereo playout when we only have one channel on the device.
+            _outDesiredFormat.mChannelsPerFrame = 1;
+            _playChannels = 1;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Stereo playout unavailable on this device");
+        }
+
+        if (_ptrAudioBuffer)
+        {
+            // Update audio buffer with the selected parameters
+            _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+            _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+        }
+
+        _renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT
+            * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES
+            * _outDesiredFormat.mChannelsPerFrame;
+
+        // Recreate the converter with the new format
+        // TODO(xians): make this thread safe
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_renderConverter));
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_outDesiredFormat, &streamFormat,
+                &_renderConverter));
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleDataSourceChange(
+    const AudioObjectID objectId,
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    OSStatus err = noErr;
+
+    if (_macBookPro && propertyAddress.mScope
+        == kAudioDevicePropertyScopeOutput)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Data source changed");
+
+        _macBookProPanRight = false;
+        UInt32 dataSource = 0;
+        UInt32 size = sizeof(UInt32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
+                &propertyAddress, 0, NULL, &size, &dataSource));
+        if (dataSource == 'ispk')
+        {
+            _macBookProPanRight = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "MacBook Pro using internal speakers; stereo panning right");
+        } else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "MacBook Pro not using internal speakers");
+        }
+    }
+
+    return 0;
+}
+WebRtc_Word32 AudioDeviceMac::HandleProcessorOverload(
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    // TODO(xians): we probably want to notify the user in some way of the
+    // overload. However, the Windows interpretations of these errors seem to
+    // be more severe than what ProcessorOverload is thrown for.
+    //
+    // We don't log the notification, as it's sent from the HAL's IO thread. We
+    // don't want to slow it down even further.
+    if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
+    {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Capture processor
+        // overload");
+        //_callback->ProblemIsReported(
+        // SndCardStreamObserver::ERecordingProblem);
+    } else
+    {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        // "Render processor overload");
+        //_callback->ProblemIsReported(
+        // SndCardStreamObserver::EPlaybackProblem);
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+                                      const AudioBufferList* inputData,
+                                      const AudioTimeStamp* inputTime,
+                                      AudioBufferList* outputData,
+                                      const AudioTimeStamp* outputTime,
+                                      void *clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
+
+    // AudioDeviceIOProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,
+                                          UInt32 *numberDataPackets,
+                                          AudioBufferList *data,
+                                          AudioStreamPacketDescription **,
+                                          void *userData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) userData;
+    assert(ptrThis != NULL);
+
+    return ptrThis->implOutConverterProc(numberDataPackets, data);
+}
+
+OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+                                        const AudioBufferList* inputData,
+                                        const AudioTimeStamp* inputTime,
+                                        AudioBufferList*,
+                                        const AudioTimeStamp*, void* clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implInDeviceIOProc(inputData, inputTime);
+
+    // AudioDeviceIOProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::inConverterProc(
+    AudioConverterRef,
+    UInt32 *numberDataPackets,
+    AudioBufferList *data,
+    AudioStreamPacketDescription ** /*dataPacketDescription*/,
+    void *userData)
+{
+    AudioDeviceMac *ptrThis = static_cast<AudioDeviceMac*> (userData);
+    assert(ptrThis != NULL);
+
+    return ptrThis->implInConverterProc(numberDataPackets, data);
+}
+
+OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList *inputData,
+                                          const AudioTimeStamp *inputTime,
+                                          AudioBufferList *outputData,
+                                          const AudioTimeStamp *outputTime)
+{
+    OSStatus err = noErr;
+    UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
+    UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
+
+    if (!_twoDevices && _recording)
+    {
+        implInDeviceIOProc(inputData, inputTime);
+    }
+
+    // Check if we should close down audio device
+    // Double-checked locking optimization to remove locking overhead
+    if (_doStop)
+    {
+        _critSect.Enter();
+        if (_doStop)
+        {
+            if (_twoDevices || (!_recording && !_playing))
+            {
+               // In the case of a shared device, the single driving ioProc
+               // is stopped here
+               WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID,
+                                                 _deviceIOProcID));
+               WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                             _deviceIOProcID));
+               if (err == noErr)
+               {
+                  WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                               _id, " Playout or shared device stopped");
+               }
+            }
+
+            _doStop = false;
+            _stopEvent.Set();
+            _critSect.Leave();
+            return 0;
+        }
+        _critSect.Leave();
+    }
+
+    if (!_playing)
+    {
+        // This can be the case when a shared device is capturing but not
+        // rendering. We allow the checks above before returning to avoid a
+        // timeout when capturing is stopped.
+        return 0;
+    }
+
+    assert(_outStreamFormat.mBytesPerFrame != 0);
+    UInt32 size = outputData->mBuffers->mDataByteSize
+        / _outStreamFormat.mBytesPerFrame;
+
+    // TODO(xians): signal an error somehow?
+    err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
+                                          this, &size, outputData, NULL);
+    if (err != noErr)
+    {
+        if (err == 1)
+        {
+            // This is our own error.
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Error in AudioConverterFillComplexBuffer()");
+            return 1;
+        } else
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioConverterFillComplexBuffer()",
+                     (const char *) &err);
+            return 1;
+        }
+    }
+
+    ring_buffer_size_t bufSizeSamples =
+        PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
+
+    int32_t renderDelayUs = static_cast<int32_t> (1e-3 * (outputTimeNs - nowNs)
+        + 0.5);
+    renderDelayUs += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
+        / _outDesiredFormat.mChannelsPerFrame / _outDesiredFormat.mSampleRate
+        + 0.5);
+
+    AtomicSet32(&_renderDelayUs, renderDelayUs);
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implOutConverterProc(UInt32 *numberDataPackets,
+                                              AudioBufferList *data)
+{
+    assert(data->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = *numberDataPackets
+        * _outDesiredFormat.mChannelsPerFrame;
+
+    data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
+    // Always give the converter as much as it wants, zero padding as required.
+    data->mBuffers->mDataByteSize = *numberDataPackets
+        * _outDesiredFormat.mBytesPerPacket;
+    data->mBuffers->mData = _renderConvertData;
+    memset(_renderConvertData, 0, sizeof(_renderConvertData));
+
+    PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
+
+    kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_signal_all() error: %d", kernErr);
+        return 1;
+    }
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList *inputData,
+                                            const AudioTimeStamp *inputTime)
+{
+    OSStatus err = noErr;
+    UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
+    UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
+
+    // Check if we should close down audio device
+    // Double-checked locking optimization to remove locking overhead
+    if (_doStopRec)
+    {
+        _critSect.Enter();
+        if (_doStopRec)
+        {
+            // This will be signalled only when a shared device is not in use.
+            WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
+            WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_inputDeviceID,
+                                                          _inDeviceIOProcID));
+            if (err == noErr)
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, " Recording device stopped");
+            }
+
+            _doStopRec = false;
+            _stopEventRec.Set();
+            _critSect.Leave();
+            return 0;
+        }
+        _critSect.Leave();
+    }
+
+    if (!_recording)
+    {
+        // Allow above checks to avoid a timeout on stopping capture.
+        return 0;
+    }
+
+    ring_buffer_size_t bufSizeSamples =
+        PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
+
+    int32_t captureDelayUs = static_cast<int32_t> (1e-3 * (nowNs - inputTimeNs)
+        + 0.5);
+    captureDelayUs
+        += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
+            / _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mSampleRate
+            + 0.5);
+
+    AtomicSet32(&_captureDelayUs, captureDelayUs);
+
+    assert(inputData->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = inputData->mBuffers->mDataByteSize
+        * _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mBytesPerPacket;
+    PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
+                           numSamples);
+
+    kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_signal_all() error: %d", kernErr);
+    }
+
+    return err;
+}
+
+OSStatus AudioDeviceMac::implInConverterProc(UInt32 *numberDataPackets,
+                                             AudioBufferList *data)
+{
+    assert(data->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = *numberDataPackets
+        * _inStreamFormat.mChannelsPerFrame;
+
+    while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples)
+    {
+        mach_timespec_t timeout;
+        timeout.tv_sec = 0;
+        timeout.tv_nsec = TIMER_PERIOD_MS;
+
+        kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
+        if (kernErr == KERN_OPERATION_TIMED_OUT)
+        {
+            int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
+            if (signal == 0)
+            {
+                // The capture device is no longer alive; stop the worker thread.
+                *numberDataPackets = 0;
+                return 1;
+            }
+        } else if (kernErr != KERN_SUCCESS)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " semaphore_wait() error: %d", kernErr);
+        }
+    }
+
+    // Pass the read pointer directly to the converter to avoid a memcpy.
+    void* dummyPtr;
+    ring_buffer_size_t dummySize;
+    PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
+                                    &data->mBuffers->mData, &numSamples,
+                                    &dummyPtr, &dummySize);
+    PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
+
+    data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
+    *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
+    data->mBuffers->mDataByteSize = *numberDataPackets
+        * _inStreamFormat.mBytesPerPacket;
+
+    return 0;
+}
+
+bool AudioDeviceMac::RunRender(void* ptrThis)
+{
+    return static_cast<AudioDeviceMac*> (ptrThis)->RenderWorkerThread();
+}
+
+bool AudioDeviceMac::RenderWorkerThread()
+{
+    ring_buffer_size_t numSamples = ENGINE_PLAY_BUF_SIZE_IN_SAMPLES
+        * _outDesiredFormat.mChannelsPerFrame;
+    while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer)
+        - _renderDelayOffsetSamples < numSamples)
+    {
+        mach_timespec_t timeout;
+        timeout.tv_sec = 0;
+        timeout.tv_nsec = TIMER_PERIOD_MS;
+
+        kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
+        if (kernErr == KERN_OPERATION_TIMED_OUT)
+        {
+            int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
+            if (signal == 0)
+            {
+                // The render device is no longer alive; stop the worker thread.
+                return false;
+            }
+        } else if (kernErr != KERN_SUCCESS)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " semaphore_timedwait() error: %d", kernErr);
+        }
+    }
+
+    WebRtc_Word8 playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
+
+    if (!_ptrAudioBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  capture AudioBuffer is invalid");
+        return false;
+    }
+
+    // Ask for new PCM data to be played out using the AudioDeviceBuffer.
+    WebRtc_UWord32 nSamples =
+        _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
+
+    nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+    if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  invalid number of output samples(%d)", nSamples);
+    }
+
+    WebRtc_UWord32 nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
+
+    SInt16 *pPlayBuffer = (SInt16 *) &playBuffer;
+    if (_macBookProPanRight && (_playChannels == 2))
+    {
+        // Mix entirely into the right channel and zero the left channel.
+        SInt32 sampleInt32 = 0;
+        for (WebRtc_UWord32 sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx
+            += 2)
+        {
+            sampleInt32 = pPlayBuffer[sampleIdx];
+            sampleInt32 += pPlayBuffer[sampleIdx + 1];
+            sampleInt32 /= 2;
+
+            if (sampleInt32 > 32767)
+            {
+                sampleInt32 = 32767;
+            } else if (sampleInt32 < -32768)
+            {
+                sampleInt32 = -32768;
+            }
+
+            pPlayBuffer[sampleIdx] = 0;
+            pPlayBuffer[sampleIdx + 1] = static_cast<SInt16> (sampleInt32);
+        }
+    }
+
+    PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
+
+    return true;
+}
+
+bool AudioDeviceMac::RunCapture(void* ptrThis)
+{
+    return static_cast<AudioDeviceMac*> (ptrThis)->CaptureWorkerThread();
+}
+
+bool AudioDeviceMac::CaptureWorkerThread()
+{
+    OSStatus err = noErr;
+    UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES
+        * _inDesiredFormat.mChannelsPerFrame;
+    SInt16 recordBuffer[noRecSamples];
+    UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
+
+    AudioBufferList engineBuffer;
+    engineBuffer.mNumberBuffers = 1; // Interleaved channels.
+    engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
+    engineBuffer.mBuffers->mDataByteSize = _inDesiredFormat.mBytesPerPacket
+        * noRecSamples;
+    engineBuffer.mBuffers->mData = recordBuffer;
+
+    err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
+                                          this, &size, &engineBuffer, NULL);
+    if (err != noErr)
+    {
+        if (err == 1)
+        {
+            // This is our own error.
+            return false;
+        } else
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioConverterFillComplexBuffer()",
+                     (const char *) &err);
+            return false;
+        }
+    }
+
+    // TODO(xians): what if the returned size is incorrect?
+    if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES)
+    {
+        WebRtc_UWord32 currentMicLevel(0);
+        WebRtc_UWord32 newMicLevel(0);
+        WebRtc_Word32 msecOnPlaySide;
+        WebRtc_Word32 msecOnRecordSide;
+
+        int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+        int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
+
+        msecOnPlaySide = static_cast<WebRtc_Word32> (1e-3 * (renderDelayUs
+            + _renderLatencyUs) + 0.5);
+        msecOnRecordSide = static_cast<WebRtc_Word32> (1e-3 * (captureDelayUs
+            + _captureLatencyUs) + 0.5);
+
+        if (!_ptrAudioBuffer)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  capture AudioBuffer is invalid");
+            return false;
+        }
+
+        // store the recorded buffer (no action will be taken if the
+        // #recorded samples is not a full buffer)
+        _ptrAudioBuffer->SetRecordedBuffer((WebRtc_Word8*) &recordBuffer,
+                                           (WebRtc_UWord32) size);
+
+        if (AGC())
+        {
+            // store current mic level in the audio buffer if AGC is enabled
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // this call does not affect the actual microphone volume
+                _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+            }
+        }
+
+        _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
+
+        // deliver recorded samples at specified sample rate, mic level etc.
+        // to the observer using callback
+        _ptrAudioBuffer->DeliverRecordedData();
+
+        if (AGC())
+        {
+            newMicLevel = _ptrAudioBuffer->NewMicLevel();
+            if (newMicLevel != 0)
+            {
+                // The VQE will only deliver non-zero microphone levels when
+                // a change is needed.
+                // Set this new mic level (received from the observer as return
+                // value in the callback).
+                WEBRTC_TRACE(kTraceStream, kTraceAudioDevice,
+                             _id, "  AGC change of volume: old=%u => new=%u",
+                             currentMicLevel, newMicLevel);
+                if (SetMicrophoneVolume(newMicLevel) == -1)
+                {
+                    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                                 "  the required modification of the microphone "
+                                 "volume failed");
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+} //  namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.h b/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.h
new file mode 100644
index 0000000..9dc8917
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_device_mac.h
@@ -0,0 +1,399 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_MAC_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+#include "audio_mixer_manager_mac.h"
+
+#include <CoreAudio/CoreAudio.h>
+#include <AudioToolbox/AudioConverter.h>
+#include <mach/semaphore.h>
+
+struct PaUtilRingBuffer;
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 48000;
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 48000;
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 2; // default is stereo playout
+const WebRtc_UWord32 N_DEVICE_CHANNELS = 8;
+
+const WebRtc_UWord32 ENGINE_REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC
+    / 100);
+const WebRtc_UWord32 ENGINE_PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC
+    / 100);
+
+enum
+{
+    N_BLOCKS_IO = 2
+};
+enum
+{
+    N_BUFFERS_IN = 10
+};
+enum
+{
+    N_BUFFERS_OUT = 3
+}; // Must be at least N_BLOCKS_IO
+
+const WebRtc_UWord32 TIMER_PERIOD_MS = (2 * 10 * N_BLOCKS_IO * 1000000);
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = (ENGINE_REC_BUF_SIZE_IN_SAMPLES
+    * N_DEVICE_CHANNELS * N_BUFFERS_IN);
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES =
+    (ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * N_PLAY_CHANNELS * N_BUFFERS_OUT);
+
+class AudioDeviceMac: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceMac(const WebRtc_Word32 id);
+    ~AudioDeviceMac();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+        ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        WebRtc_Word8 name[kAdmMaxDeviceNameSize],
+        WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32
+        MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+        SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                         WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock()
+    {
+        _critSect.Enter();
+    }
+    ;
+    void UnLock()
+    {
+        _critSect.Leave();
+    }
+    ;
+    WebRtc_Word32 Id()
+    {
+        return _id;
+    }
+
+    static void AtomicSet32(int32_t* theValue, int32_t newValue);
+    static int32_t AtomicGet32(int32_t* theValue);
+
+    static void logCAMsg(const TraceLevel level,
+                         const TraceModule module,
+                         const WebRtc_Word32 id, const char *msg,
+                         const char *err);
+
+    WebRtc_Word32 GetNumberDevices(const AudioObjectPropertyScope scope,
+                                   AudioDeviceID scopedDeviceIds[],
+                                   const WebRtc_UWord32 deviceListLength);
+
+    WebRtc_Word32 GetDeviceName(const AudioObjectPropertyScope scope,
+                                const WebRtc_UWord16 index, char* name);
+
+    WebRtc_Word32 InitDevice(WebRtc_UWord16 userDeviceIndex,
+                             AudioDeviceID& deviceId, bool isInput);
+
+    static OSStatus
+        objectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
+                           const AudioObjectPropertyAddress addresses[],
+                           void* clientData);
+
+    OSStatus
+        implObjectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
+                               const AudioObjectPropertyAddress addresses[]);
+
+    WebRtc_Word32 HandleDeviceChange();
+
+    WebRtc_Word32
+        HandleStreamFormatChange(AudioObjectID objectId,
+                                 AudioObjectPropertyAddress propertyAddress);
+
+    WebRtc_Word32
+        HandleDataSourceChange(AudioObjectID objectId,
+                               AudioObjectPropertyAddress propertyAddress);
+
+    WebRtc_Word32
+        HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
+
+private:
+    static OSStatus deviceIOProc(AudioDeviceID device,
+                                 const AudioTimeStamp *now,
+                                 const AudioBufferList *inputData,
+                                 const AudioTimeStamp *inputTime,
+                                 AudioBufferList *outputData,
+                                 const AudioTimeStamp* outputTime,
+                                 void *clientData);
+
+    static OSStatus
+        outConverterProc(AudioConverterRef audioConverter,
+                         UInt32 *numberDataPackets, AudioBufferList *data,
+                         AudioStreamPacketDescription **dataPacketDescription,
+                         void *userData);
+
+    static OSStatus inDeviceIOProc(AudioDeviceID device,
+                                   const AudioTimeStamp *now,
+                                   const AudioBufferList *inputData,
+                                   const AudioTimeStamp *inputTime,
+                                   AudioBufferList *outputData,
+                                   const AudioTimeStamp *outputTime,
+                                   void *clientData);
+
+    static OSStatus
+        inConverterProc(AudioConverterRef audioConverter,
+                        UInt32 *numberDataPackets, AudioBufferList *data,
+                        AudioStreamPacketDescription **dataPacketDescription,
+                        void *inUserData);
+
+    OSStatus implDeviceIOProc(const AudioBufferList *inputData,
+                              const AudioTimeStamp *inputTime,
+                              AudioBufferList *outputData,
+                              const AudioTimeStamp *outputTime);
+
+    OSStatus implOutConverterProc(UInt32 *numberDataPackets,
+                                  AudioBufferList *data);
+
+    OSStatus implInDeviceIOProc(const AudioBufferList *inputData,
+                                const AudioTimeStamp *inputTime);
+
+    OSStatus implInConverterProc(UInt32 *numberDataPackets,
+                                 AudioBufferList *data);
+
+    static bool RunCapture(void*);
+    static bool RunRender(void*);
+    bool CaptureWorkerThread();
+    bool RenderWorkerThread();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+
+    CriticalSectionWrapper& _critSect;
+
+    EventWrapper& _stopEventRec;
+    EventWrapper& _stopEvent;
+
+    ThreadWrapper* _captureWorkerThread;
+    ThreadWrapper* _renderWorkerThread;
+    WebRtc_UWord32 _captureWorkerThreadId;
+    WebRtc_UWord32 _renderWorkerThreadId;
+
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerMac _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    AudioDeviceID _inputDeviceID;
+    AudioDeviceID _outputDeviceID;
+#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 1050
+    AudioDeviceIOProcID _inDeviceIOProcID;
+    AudioDeviceIOProcID _deviceIOProcID;
+#endif
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    Float32* _captureBufData;
+    SInt16* _renderBufData;
+
+    SInt16 _renderConvertData[PLAY_BUF_SIZE_IN_SAMPLES];
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _isShutDown;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _startRec;
+    bool _stopRec;
+    bool _stopPlay;
+    bool _AGC;
+
+    // Atomically set varaibles
+    int32_t _renderDeviceIsAlive;
+    int32_t _captureDeviceIsAlive;
+
+    bool _twoDevices;
+    bool _doStop; // For play if not shared device or play+rec if shared device
+    bool _doStopRec; // For rec if not shared device
+    bool _macBookPro;
+    bool _macBookProPanRight;
+    bool _stereoRender;
+    bool _stereoRenderRequested;
+
+    AudioConverterRef _captureConverter;
+    AudioConverterRef _renderConverter;
+
+    AudioStreamBasicDescription _outStreamFormat;
+    AudioStreamBasicDescription _outDesiredFormat;
+    AudioStreamBasicDescription _inStreamFormat;
+    AudioStreamBasicDescription _inDesiredFormat;
+
+    WebRtc_UWord32 _captureLatencyUs;
+    WebRtc_UWord32 _renderLatencyUs;
+
+    // Atomically set variables
+    mutable int32_t _captureDelayUs;
+    mutable int32_t _renderDelayUs;
+
+    WebRtc_Word32 _renderDelayOffsetSamples;
+
+private:
+    WebRtc_UWord16 _playBufDelay; // playback delay
+    WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
+
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    PaUtilRingBuffer* _paCaptureBuffer;
+    PaUtilRingBuffer* _paRenderBuffer;
+
+    semaphore_t _renderSemaphore;
+    semaphore_t _captureSemaphore;
+
+    WebRtc_UWord32 _captureBufSizeSamples;
+    WebRtc_UWord32 _renderBufSizeSamples;
+};
+
+} //  namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_AUDIO_DEVICE_MAC_H_
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc b/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc
new file mode 100644
index 0000000..94bdf26
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_mac.h"
+#include "audio_device_config.h"    // DEBUG_PRINT()
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityMac::AudioDeviceUtilityMac(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityMac() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityMac::~AudioDeviceUtilityMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+WebRtc_Word32 AudioDeviceUtilityMac::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "OS X");
+
+    return 0;
+}
+
+} //  namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h b/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h
new file mode 100644
index 0000000..ccb3d99
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_MAC_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityMac: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityMac(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityMac();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    AudioDeviceModule::ErrorCode _lastError;
+};
+
+} //  namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_AUDIO_DEVICE_UTILITY_MAC_H_
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc b/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc
new file mode 100644
index 0000000..40850d4
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc
@@ -0,0 +1,1167 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_mixer_manager_mac.h"
+#include "trace.h"
+
+#include <unistd.h>             // getpid()
+
+namespace webrtc {
+	
+#define WEBRTC_CA_RETURN_ON_ERR(expr)                                     \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,    \
+                "Error in " #expr, (const char *)&err);                 \
+            return -1;                                                  \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_ERR(expr)                                           \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,    \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_WARN(expr)                                           \
+    do {                                                                 \
+        err = expr;                                                      \
+        if (err != noErr) {                                              \
+            logCAMsg(kTraceWarning, kTraceAudioDevice, _id,  \
+                "Error in " #expr, (const char *)&err);                  \
+        }                                                                \
+    } while(0)
+
+AudioMixerManagerMac::AudioMixerManagerMac(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _inputDeviceID(kAudioObjectUnknown),
+    _outputDeviceID(kAudioObjectUnknown),
+    _noInputChannels(0),
+    _noOutputChannels(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+}
+
+AudioMixerManagerMac::~AudioMixerManagerMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//	                                PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerMac::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerMac::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _outputDeviceID = kAudioObjectUnknown;
+    _noOutputChannels = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    _inputDeviceID = kAudioObjectUnknown;
+    _noInputChannels = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::OpenSpeaker(AudioDeviceID deviceID)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::OpenSpeaker(id=%d)", deviceID);
+
+    CriticalSectionScoped lock(_critSect);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    pid_t hogPid = -1;
+
+    _outputDeviceID = deviceID;
+
+    // Check which process, if any, has hogged the device. 
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
+            kAudioDevicePropertyScopeOutput, 0 };
+
+    size = sizeof(hogPid);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &hogPid));
+
+    if (hogPid == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " No process has hogged the input device");
+    }
+    // getpid() is apparently "always successful"
+    else if (hogPid == getpid())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Our process has hogged the input device");
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Another process (pid = %d) has hogged the input device",
+                     static_cast<int> (hogPid));
+
+        return -1;
+    }
+
+    // get number of channels from stream format
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+
+    // Get the stream format, to be able to read the number of channels.
+    AudioStreamBasicDescription streamFormat;
+    size = sizeof(AudioStreamBasicDescription);
+    memset(&streamFormat, 0, size);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    _noOutputChannels = streamFormat.mChannelsPerFrame;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::OpenMicrophone(AudioDeviceID deviceID)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::OpenMicrophone(id=%d)", deviceID);
+
+    CriticalSectionScoped lock(_critSect);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    pid_t hogPid = -1;
+
+    _inputDeviceID = deviceID;
+
+    // Check which process, if any, has hogged the device. 
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
+            kAudioDevicePropertyScopeInput, 0 };
+    size = sizeof(hogPid);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &hogPid));
+    if (hogPid == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " No process has hogged the input device");
+    }
+    // getpid() is apparently "always successful"
+    else if (hogPid == getpid())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Our process has hogged the input device");
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Another process (pid = %d) has hogged the input device",
+                     static_cast<int> (hogPid));
+
+        return -1;
+    }
+
+    // get number of channels from stream format
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+
+    // Get the stream format, to be able to read the number of channels.
+    AudioStreamBasicDescription streamFormat;
+    size = sizeof(AudioStreamBasicDescription);
+    memset(&streamFormat, 0, size);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    _noInputChannels = streamFormat.mChannelsPerFrame;
+
+    return 0;
+}
+
+bool AudioMixerManagerMac::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_outputDeviceID != kAudioObjectUnknown);
+}
+
+bool AudioMixerManagerMac::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_inputDeviceID != kAudioObjectUnknown);
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetSpeakerVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    bool success = false;
+
+    // volume range is 0.0 - 1.0, convert from 0 -255
+    const Float32 vol = (Float32)(volume / 255.0);
+
+    assert(vol <= 1.0 && vol >= 0.0);
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, size, &vol));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(vol);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, size, &vol));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set a volume on any output channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    Float32 channelVol = 0;
+    Float32 vol = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, &size, &vol));
+
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (vol * 255 + 0.5);
+    } else
+    {
+        // Otherwise get the average volume across channels.
+        vol = 0;
+        for (UInt32 i = 1; i <= _noOutputChannels; i++)
+        {
+            channelVol = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelVol);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelVol));
+
+                vol += channelVol;
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get a volume on any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (255 * vol / channels + 0.5);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::SpeakerVolume() => vol=%i", vol);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    maxVolume = 255;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    minVolume = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    stepSize = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Volume cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Mute cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetSpeakerMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    UInt32 mute = enable ? 1 : 0;
+    bool success = false;
+
+    // Does the render device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(mute);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, size, &mute));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(mute);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, size, &mute));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set mute on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    UInt32 channelMuted = 0;
+    UInt32 muted = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(muted);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, &size, &muted));
+
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    } else
+    {
+        // Otherwise check if all channels are muted.
+        for (UInt32 i = 1; i <= _noOutputChannels; i++)
+        {
+            muted = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelMuted);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelMuted));
+
+                muted = (muted && channelMuted);
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get mute for any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::SpeakerMute() => enabled=%d, enabled");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::StereoPlayoutIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = (_noOutputChannels == 2);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::StereoRecordingIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = (_noInputChannels == 2);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneMuteIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Mute cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    UInt32 mute = enable ? 1 : 0;
+    bool success = false;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(mute);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, size, &mute));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(mute);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                    &propertyAddress, 0, NULL, size, &mute));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set mute on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    UInt32 channelMuted = 0;
+    UInt32 muted = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(muted);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, &size, &muted));
+
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    } else
+    {
+        // Otherwise check if all channels are muted.
+        for (UInt32 i = 1; i <= _noInputChannels; i++)
+        {
+            muted = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelMuted);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelMuted));
+
+                muted = (muted && channelMuted);
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get mute for any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::MicrophoneMute() => enabled=%d",
+                 enabled);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = false; // No AudioObjectPropertySelector value for Mic Boost
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneBoost(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // Ensure that the selected microphone has a valid boost control.
+    bool available(false);
+    MicrophoneBoostIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneVolumeIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Volume cannot be set for input channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    bool success = false;
+
+    // volume range is 0.0 - 1.0, convert from 0 - 255
+    const Float32 vol = (Float32)(volume / 255.0);
+
+    assert(vol <= 1.0 && vol >= 0.0);
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, size, &vol));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(vol);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                    &propertyAddress, 0, NULL, size, &vol));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set a level on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    Float32 channelVol = 0;
+    Float32 volFloat32 = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(volFloat32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, &size, &volFloat32));
+
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (volFloat32 * 255 + 0.5);
+    } else
+    {
+        // Otherwise get the average volume across channels.
+        volFloat32 = 0;
+        for (UInt32 i = 1; i <= _noInputChannels; i++)
+        {
+            channelVol = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelVol);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelVol));
+
+                volFloat32 += channelVol;
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get a level on any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> 
+            (255 * volFloat32 / channels + 0.5);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::MicrophoneVolume() => vol=%u",
+                 volume);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    maxVolume = 255;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 10
+    minVolume = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 10
+    stepSize = 1;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// CoreAudio errors are best interpreted as four character strings.
+void AudioMixerManagerMac::logCAMsg(const TraceLevel level,
+                                    const TraceModule module,
+                                    const WebRtc_Word32 id, const char *msg,
+                                    const char *err)
+{
+    assert(msg != NULL);
+    assert(err != NULL);
+
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+#else
+    // We need to flip the characters in this case.
+    WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
+        + 2, err + 1, err);
+#endif
+}
+
+} // namespace webrtc
+// EOF
diff --git a/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h b/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h
new file mode 100644
index 0000000..7209f91
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_MAC_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+
+#include <CoreAudio/CoreAudio.h>
+
+namespace webrtc {
+	
+class AudioMixerManagerMac
+{
+public:
+    WebRtc_Word32 OpenSpeaker(AudioDeviceID deviceID);
+    WebRtc_Word32 OpenMicrophone(AudioDeviceID deviceID);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerMac(const WebRtc_Word32 id);
+    ~AudioMixerManagerMac();
+
+private:
+    static void logCAMsg(const TraceLevel level,
+                         const TraceModule module,
+                         const WebRtc_Word32 id, const char *msg,
+                         const char *err);
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    AudioDeviceID _inputDeviceID;
+    AudioDeviceID _outputDeviceID;
+
+    WebRtc_UWord16 _noInputChannels;
+    WebRtc_UWord16 _noOutputChannels;
+
+};
+	
+} //namespace webrtc
+
+#endif  // AUDIO_MIXER_MAC_H
diff --git a/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h
new file mode 100644
index 0000000..f689622
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h
@@ -0,0 +1,127 @@
+/*
+ * $Id: pa_memorybarrier.h 1240 2007-07-17 13:05:07Z bjornroche $
+ * Portable Audio I/O Library
+ * Memory barrier utilities
+ *
+ * Author: Bjorn Roche, XO Audio, LLC
+ *
+ * This program uses the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/**
+ @file pa_memorybarrier.h
+ @ingroup common_src
+*/
+
+/****************
+ * Some memory barrier primitives based on the system.
+ * right now only OS X, FreeBSD, and Linux are supported. In addition to providing
+ * memory barriers, these functions should ensure that data cached in registers
+ * is written out to cache where it can be snooped by other CPUs. (ie, the volatile
+ * keyword should not be required)
+ *
+ * the primitives that must be defined are:
+ *
+ * PaUtil_FullMemoryBarrier()
+ * PaUtil_ReadMemoryBarrier()
+ * PaUtil_WriteMemoryBarrier()
+ *
+ ****************/
+
+#if defined(__APPLE__)
+#   include <libkern/OSAtomic.h>
+    /* Here are the memory barrier functions. Mac OS X only provides
+       full memory barriers, so the three types of barriers are the same,
+       however, these barriers are superior to compiler-based ones. */
+#   define PaUtil_FullMemoryBarrier()  OSMemoryBarrier()
+#   define PaUtil_ReadMemoryBarrier()  OSMemoryBarrier()
+#   define PaUtil_WriteMemoryBarrier() OSMemoryBarrier()
+#elif defined(__GNUC__)
+    /* GCC >= 4.1 has built-in intrinsics. We'll use those */
+#   if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)
+#      define PaUtil_FullMemoryBarrier()  __sync_synchronize()
+#      define PaUtil_ReadMemoryBarrier()  __sync_synchronize()
+#      define PaUtil_WriteMemoryBarrier() __sync_synchronize()
+    /* as a fallback, GCC understands volatile asm and "memory" to mean it
+     * should not reorder memory read/writes */
+    /* Note that it is not clear that any compiler actually defines __PPC__,
+     * it can probably removed safely. */
+#   elif defined( __ppc__ ) || defined( __powerpc__) || defined( __PPC__ )
+#      define PaUtil_FullMemoryBarrier()  asm volatile("sync":::"memory")
+#      define PaUtil_ReadMemoryBarrier()  asm volatile("sync":::"memory")
+#      define PaUtil_WriteMemoryBarrier() asm volatile("sync":::"memory")
+#   elif defined( __i386__ ) || defined( __i486__ ) || defined( __i586__ ) || \
+         defined( __i686__ ) || defined( __x86_64__ )
+#      define PaUtil_FullMemoryBarrier()  asm volatile("mfence":::"memory")
+#      define PaUtil_ReadMemoryBarrier()  asm volatile("lfence":::"memory")
+#      define PaUtil_WriteMemoryBarrier() asm volatile("sfence":::"memory")
+#   else
+#      ifdef ALLOW_SMP_DANGERS
+#         warning Memory barriers not defined on this system or system unknown
+#         warning For SMP safety, you should fix this.
+#         define PaUtil_FullMemoryBarrier()
+#         define PaUtil_ReadMemoryBarrier()
+#         define PaUtil_WriteMemoryBarrier()
+#      else
+#         error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed.
+#      endif
+#   endif
+#elif (_MSC_VER >= 1400) && !defined(_WIN32_WCE)
+#   include <intrin.h>
+#   pragma intrinsic(_ReadWriteBarrier)
+#   pragma intrinsic(_ReadBarrier)
+#   pragma intrinsic(_WriteBarrier)
+#   define PaUtil_FullMemoryBarrier()  _ReadWriteBarrier()
+#   define PaUtil_ReadMemoryBarrier()  _ReadBarrier()
+#   define PaUtil_WriteMemoryBarrier() _WriteBarrier()
+#elif defined(_WIN32_WCE)
+#   define PaUtil_FullMemoryBarrier()
+#   define PaUtil_ReadMemoryBarrier()
+#   define PaUtil_WriteMemoryBarrier()
+#elif defined(_MSC_VER) || defined(__BORLANDC__)
+#   define PaUtil_FullMemoryBarrier()  _asm { lock add    [esp], 0 }
+#   define PaUtil_ReadMemoryBarrier()  _asm { lock add    [esp], 0 }
+#   define PaUtil_WriteMemoryBarrier() _asm { lock add    [esp], 0 }
+#else
+#   ifdef ALLOW_SMP_DANGERS
+#      warning Memory barriers not defined on this system or system unknown
+#      warning For SMP safety, you should fix this.
+#      define PaUtil_FullMemoryBarrier()
+#      define PaUtil_ReadMemoryBarrier()
+#      define PaUtil_WriteMemoryBarrier()
+#   else
+#      error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed.
+#   endif
+#endif
diff --git a/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c
new file mode 100644
index 0000000..310d719
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c
@@ -0,0 +1,227 @@
+/*
+ * $Id: pa_ringbuffer.c 1421 2009-11-18 16:09:05Z bjornroche $
+ * Portable Audio I/O Library
+ * Ring Buffer utility.
+ *
+ * Author: Phil Burk, http://www.softsynth.com
+ * modified for SMP safety on Mac OS X by Bjorn Roche
+ * modified for SMP safety on Linux by Leland Lucius
+ * also, allowed for const where possible
+ * modified for multiple-byte-sized data elements by Sven Fischer 
+ *
+ * Note that this is safe only for a single-thread reader and a
+ * single-thread writer.
+ *
+ * This program uses the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/**
+ @file
+ @ingroup common_src
+*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+#include "pa_ringbuffer.h"
+#include <string.h>
+#include "pa_memorybarrier.h"
+
+/***************************************************************************
+ * Initialize FIFO.
+ * elementCount must be power of 2, returns -1 if not.
+ */
+ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr )
+{
+    if( ((elementCount-1) & elementCount) != 0) return -1; /* Not Power of two. */
+    rbuf->bufferSize = elementCount;
+    rbuf->buffer = (char *)dataPtr;
+    PaUtil_FlushRingBuffer( rbuf );
+    rbuf->bigMask = (elementCount*2)-1;
+    rbuf->smallMask = (elementCount)-1;
+    rbuf->elementSizeBytes = elementSizeBytes;
+    return 0;
+}
+
+/***************************************************************************
+** Return number of elements available for reading. */
+ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( PaUtilRingBuffer *rbuf )
+{
+    PaUtil_ReadMemoryBarrier();
+    return ( (rbuf->writeIndex - rbuf->readIndex) & rbuf->bigMask );
+}
+/***************************************************************************
+** Return number of elements available for writing. */
+ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf )
+{
+    /* Since we are calling PaUtil_GetRingBufferReadAvailable, we don't need an aditional MB */
+    return ( rbuf->bufferSize - PaUtil_GetRingBufferReadAvailable(rbuf));
+}
+
+/***************************************************************************
+** Clear buffer. Should only be called when buffer is NOT being read. */
+void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf )
+{
+    rbuf->writeIndex = rbuf->readIndex = 0;
+}
+
+/***************************************************************************
+** Get address of region(s) to which we can write data.
+** If the region is contiguous, size2 will be zero.
+** If non-contiguous, size2 will be the size of second region.
+** Returns room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                       void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                       void **dataPtr2, ring_buffer_size_t *sizePtr2 )
+{
+    ring_buffer_size_t   index;
+    ring_buffer_size_t   available = PaUtil_GetRingBufferWriteAvailable( rbuf );
+    if( elementCount > available ) elementCount = available;
+    /* Check to see if write is not contiguous. */
+    index = rbuf->writeIndex & rbuf->smallMask;
+    if( (index + elementCount) > rbuf->bufferSize )
+    {
+        /* Write data in two blocks that wrap the buffer. */
+        ring_buffer_size_t   firstHalf = rbuf->bufferSize - index;
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = firstHalf;
+        *dataPtr2 = &rbuf->buffer[0];
+        *sizePtr2 = elementCount - firstHalf;
+    }
+    else
+    {
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = elementCount;
+        *dataPtr2 = NULL;
+        *sizePtr2 = 0;
+    }
+    return elementCount;
+}
+
+
+/***************************************************************************
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount )
+{
+    /* we need to ensure that previous writes are seen before we update the write index */
+    PaUtil_WriteMemoryBarrier();
+    return rbuf->writeIndex = (rbuf->writeIndex + elementCount) & rbuf->bigMask;
+}
+
+/***************************************************************************
+** Get address of region(s) from which we can read data.
+** If the region is contiguous, size2 will be zero.
+** If non-contiguous, size2 will be the size of second region.
+** Returns room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                void **dataPtr2, ring_buffer_size_t *sizePtr2 )
+{
+    ring_buffer_size_t   index;
+    ring_buffer_size_t   available = PaUtil_GetRingBufferReadAvailable( rbuf );
+    if( elementCount > available ) elementCount = available;
+    /* Check to see if read is not contiguous. */
+    index = rbuf->readIndex & rbuf->smallMask;
+    if( (index + elementCount) > rbuf->bufferSize )
+    {
+        /* Write data in two blocks that wrap the buffer. */
+        ring_buffer_size_t firstHalf = rbuf->bufferSize - index;
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = firstHalf;
+        *dataPtr2 = &rbuf->buffer[0];
+        *sizePtr2 = elementCount - firstHalf;
+    }
+    else
+    {
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = elementCount;
+        *dataPtr2 = NULL;
+        *sizePtr2 = 0;
+    }
+    return elementCount;
+}
+/***************************************************************************
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount )
+{
+    /* we need to ensure that previous writes are always seen before updating the index. */
+    PaUtil_WriteMemoryBarrier();
+    return rbuf->readIndex = (rbuf->readIndex + elementCount) & rbuf->bigMask;
+}
+
+/***************************************************************************
+** Return elements written. */
+ring_buffer_size_t PaUtil_WriteRingBuffer( PaUtilRingBuffer *rbuf, const void *data, ring_buffer_size_t elementCount )
+{
+    ring_buffer_size_t size1, size2, numWritten;
+    void *data1, *data2;
+    numWritten = PaUtil_GetRingBufferWriteRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 );
+    if( size2 > 0 )
+    {
+
+        memcpy( data1, data, size1*rbuf->elementSizeBytes );
+        data = ((char *)data) + size1*rbuf->elementSizeBytes;
+        memcpy( data2, data, size2*rbuf->elementSizeBytes );
+    }
+    else
+    {
+        memcpy( data1, data, size1*rbuf->elementSizeBytes );
+    }
+    PaUtil_AdvanceRingBufferWriteIndex( rbuf, numWritten );
+    return numWritten;
+}
+
+/***************************************************************************
+** Return elements read. */
+ring_buffer_size_t PaUtil_ReadRingBuffer( PaUtilRingBuffer *rbuf, void *data, ring_buffer_size_t elementCount )
+{
+    ring_buffer_size_t size1, size2, numRead;
+    void *data1, *data2;
+    numRead = PaUtil_GetRingBufferReadRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 );
+    if( size2 > 0 )
+    {
+        memcpy( data, data1, size1*rbuf->elementSizeBytes );
+        data = ((char *)data) + size1*rbuf->elementSizeBytes;
+        memcpy( data, data2, size2*rbuf->elementSizeBytes );
+    }
+    else
+    {
+        memcpy( data, data1, size1*rbuf->elementSizeBytes );
+    }
+    PaUtil_AdvanceRingBufferReadIndex( rbuf, numRead );
+    return numRead;
+}
diff --git a/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h
new file mode 100644
index 0000000..393f6f8
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h
@@ -0,0 +1,233 @@
+#ifndef WEBRTC_AUDIO_DEVICE_PA_RINGBUFFER_H
+#define WEBRTC_AUDIO_DEVICE_PA_RINGBUFFER_H
+/*
+ * $Id: pa_ringbuffer.h 1421 2009-11-18 16:09:05Z bjornroche $
+ * Portable Audio I/O Library
+ * Ring Buffer utility.
+ *
+ * Author: Phil Burk, http://www.softsynth.com
+ * modified for SMP safety on OS X by Bjorn Roche.
+ * also allowed for const where possible.
+ * modified for multiple-byte-sized data elements by Sven Fischer 
+ *
+ * Note that this is safe only for a single-thread reader
+ * and a single-thread writer.
+ *
+ * This program is distributed with the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/** @file
+ @ingroup common_src
+ @brief Single-reader single-writer lock-free ring buffer
+
+ PaUtilRingBuffer is a ring buffer used to transport samples between
+ different execution contexts (threads, OS callbacks, interrupt handlers)
+ without requiring the use of any locks. This only works when there is
+ a single reader and a single writer (ie. one thread or callback writes
+ to the ring buffer, another thread or callback reads from it).
+
+ The PaUtilRingBuffer structure manages a ring buffer containing N 
+ elements, where N must be a power of two. An element may be any size 
+ (specified in bytes).
+
+ The memory area used to store the buffer elements must be allocated by 
+ the client prior to calling PaUtil_InitializeRingBuffer() and must outlive
+ the use of the ring buffer.
+*/
+
+#if defined(__APPLE__)
+#include <sys/types.h>
+typedef int32_t ring_buffer_size_t;
+#elif defined( __GNUC__ )
+typedef long ring_buffer_size_t;
+#elif (_MSC_VER >= 1400)
+typedef long ring_buffer_size_t;
+#elif defined(_MSC_VER) || defined(__BORLANDC__)
+typedef long ring_buffer_size_t;
+#else
+typedef long ring_buffer_size_t;
+#endif
+
+
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+typedef struct PaUtilRingBuffer
+{
+    ring_buffer_size_t  bufferSize; /**< Number of elements in FIFO. Power of 2. Set by PaUtil_InitRingBuffer. */
+    ring_buffer_size_t  writeIndex; /**< Index of next writable element. Set by PaUtil_AdvanceRingBufferWriteIndex. */
+    ring_buffer_size_t  readIndex;  /**< Index of next readable element. Set by PaUtil_AdvanceRingBufferReadIndex. */
+    ring_buffer_size_t  bigMask;    /**< Used for wrapping indices with extra bit to distinguish full/empty. */
+    ring_buffer_size_t  smallMask;  /**< Used for fitting indices to buffer. */
+    ring_buffer_size_t  elementSizeBytes; /**< Number of bytes per element. */
+    char  *buffer;    /**< Pointer to the buffer containing the actual data. */
+}PaUtilRingBuffer;
+
+/** Initialize Ring Buffer.
+
+ @param rbuf The ring buffer.
+
+ @param elementSizeBytes The size of a single data element in bytes.
+
+ @param elementCount The number of elements in the buffer (must be power of 2).
+
+ @param dataPtr A pointer to a previously allocated area where the data
+ will be maintained.  It must be elementCount*elementSizeBytes long.
+
+ @return -1 if elementCount is not a power of 2, otherwise 0.
+*/
+ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr );
+
+/** Clear buffer. Should only be called when buffer is NOT being read.
+
+ @param rbuf The ring buffer.
+*/
+void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf );
+
+/** Retrieve the number of elements available in the ring buffer for writing.
+
+ @param rbuf The ring buffer.
+
+ @return The number of elements available for writing.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf );
+
+/** Retrieve the number of elements available in the ring buffer for reading.
+
+ @param rbuf The ring buffer.
+
+ @return The number of elements available for reading.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( PaUtilRingBuffer *rbuf );
+
+/** Write data to the ring buffer.
+
+ @param rbuf The ring buffer.
+
+ @param data The address of new data to write to the buffer.
+
+ @param elementCount The number of elements to be written.
+
+ @return The number of elements written.
+*/
+ring_buffer_size_t PaUtil_WriteRingBuffer( PaUtilRingBuffer *rbuf, const void *data, ring_buffer_size_t elementCount );
+
+/** Read data from the ring buffer.
+
+ @param rbuf The ring buffer.
+
+ @param data The address where the data should be stored.
+
+ @param elementCount The number of elements to be read.
+
+ @return The number of elements read.
+*/
+ring_buffer_size_t PaUtil_ReadRingBuffer( PaUtilRingBuffer *rbuf, void *data, ring_buffer_size_t elementCount );
+
+/** Get address of region(s) to which we can write data.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements desired.
+
+ @param dataPtr1 The address where the first (or only) region pointer will be
+ stored.
+
+ @param sizePtr1 The address where the first (or only) region length will be
+ stored.
+
+ @param dataPtr2 The address where the second region pointer will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @param sizePtr2 The address where the second region length will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @return The room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                       void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                       void **dataPtr2, ring_buffer_size_t *sizePtr2 );
+
+/** Advance the write index to the next location to be written.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements to advance.
+
+ @return The new position.
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount );
+
+/** Get address of region(s) from which we can write data.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements desired.
+
+ @param dataPtr1 The address where the first (or only) region pointer will be
+ stored.
+
+ @param sizePtr1 The address where the first (or only) region length will be
+ stored.
+
+ @param dataPtr2 The address where the second region pointer will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @param sizePtr2 The address where the second region length will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @return The number of elements available for reading.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                      void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                      void **dataPtr2, ring_buffer_size_t *sizePtr2 );
+
+/** Advance the read index to the next location to be read.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements to advance.
+
+ @return The new position.
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount );
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+#endif /* MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_PORTAUDIO_PA_RINGBUFFER_H_ */
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.cc b/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.cc
new file mode 100644
index 0000000..d0418d2
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.cc
@@ -0,0 +1,5173 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma warning(disable: 4995)  //  name was marked as #pragma deprecated
+
+#if (_MSC_VER >= 1310) && (_MSC_VER < 1400)
+// Reports the major and minor versions of the compiler.
+// For example, 1310 for Microsoft Visual C++ .NET 2003. 1310 represents version 13 and a 1.0 point release.
+// The Visual C++ 2005 compiler version is 1400.
+// Type cl /? at the command line to see the major and minor versions of your compiler along with the build number.
+#pragma message(">> INFO: Windows Core Audio is not supported in VS 2003")
+#endif
+
+#include "audio_device_config.h"
+
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+#pragma message(">> INFO: WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined")
+#else
+#pragma message(">> INFO: WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined")
+#endif
+
+#ifdef WEBRTC_WINDOWS_CORE_AUDIO_BUILD
+
+#include "audio_device_core_win.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include <windows.h>
+#include <comdef.h>
+#include <dmo.h>
+#include "Functiondiscoverykeys_devpkey.h"
+#include <mmsystem.h>
+#include <strsafe.h>
+#include <uuids.h>
+
+#include "audio_device_utility.h"
+#include "trace.h"
+
+// Macro that calls a COM method returning HRESULT value.
+#define EXIT_ON_ERROR(hres)    do { if (FAILED(hres)) goto Exit; } while(0)
+
+// Macro that releases a COM object if not NULL.
+#define SAFE_RELEASE(p)     do { if ((p)) { (p)->Release(); (p) = NULL; } } while(0)
+
+#define ROUND(x) ((x) >=0 ? (int)((x) + 0.5) : (int)((x) - 0.5))
+
+// REFERENCE_TIME time units per millisecond
+#define REFTIMES_PER_MILLISEC  10000
+
+typedef struct tagTHREADNAME_INFO
+{
+   DWORD dwType;        // must be 0x1000
+   LPCSTR szName;       // pointer to name (in user addr space)
+   DWORD dwThreadID;    // thread ID (-1=caller thread)
+   DWORD dwFlags;       // reserved for future use, must be zero
+} THREADNAME_INFO;
+
+namespace webrtc {
+namespace {
+
+enum { COM_THREADING_MODEL = COINIT_MULTITHREADED };
+
+enum
+{
+    kAecCaptureStreamIndex = 0,
+    kAecRenderStreamIndex = 1
+};
+
+// An implementation of IMediaBuffer, as required for
+// IMediaObject::ProcessOutput(). After consuming data provided by
+// ProcessOutput(), call SetLength() to update the buffer availability.
+//
+// Example implementation:
+// http://msdn.microsoft.com/en-us/library/dd376684(v=vs.85).aspx
+class MediaBufferImpl : public IMediaBuffer
+{
+public:
+    explicit MediaBufferImpl(DWORD maxLength)
+        : _data(new BYTE[maxLength]),
+          _length(0),
+          _maxLength(maxLength),
+          _refCount(0)
+    {}
+
+    // IMediaBuffer methods.
+    STDMETHOD(GetBufferAndLength(BYTE** ppBuffer, DWORD* pcbLength))
+    {
+        if (!ppBuffer || !pcbLength)
+        {
+            return E_POINTER;
+        }
+
+        *ppBuffer = _data;
+        *pcbLength = _length;
+
+        return S_OK;
+    }
+
+    STDMETHOD(GetMaxLength(DWORD* pcbMaxLength))
+    {
+        if (!pcbMaxLength)
+        {
+            return E_POINTER;
+        }
+
+        *pcbMaxLength = _maxLength;
+        return S_OK;
+    }
+
+    STDMETHOD(SetLength(DWORD cbLength))
+    {
+        if (cbLength > _maxLength)
+        {
+            return E_INVALIDARG;
+        }
+
+        _length = cbLength;
+        return S_OK;
+    }
+
+    // IUnknown methods.
+    STDMETHOD_(ULONG, AddRef())
+    {
+        return InterlockedIncrement(&_refCount);
+    }
+
+    STDMETHOD(QueryInterface(REFIID riid, void** ppv))
+    {
+        if (!ppv)
+        {
+            return E_POINTER;
+        }
+        else if (riid != IID_IMediaBuffer && riid != IID_IUnknown)
+        {
+            return E_NOINTERFACE;
+        }
+
+        *ppv = static_cast<IMediaBuffer*>(this);
+        AddRef();
+        return S_OK;
+    }
+
+    STDMETHOD_(ULONG, Release())
+    {
+        LONG refCount = InterlockedDecrement(&_refCount);
+        if (refCount == 0)
+        {
+            delete this;
+        }
+
+        return refCount;
+    }
+
+private:
+    ~MediaBufferImpl()
+    {
+        delete [] _data;
+    }
+
+    BYTE* _data;
+    DWORD _length;
+    const DWORD _maxLength;
+    LONG _refCount;
+};
+}  // namespace
+
+// ============================================================================
+//                              Static Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  CoreAudioIsSupported
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::CoreAudioIsSupported()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%s", __FUNCTION__);
+
+    bool MMDeviceIsAvailable(false);
+    bool coreAudioIsSupported(false);
+
+    HRESULT hr(S_OK);
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR errorText[MAXERRORLENGTH];
+
+    // 1) Check if Windows version is Vista SP1 or later.
+    //
+    // CoreAudio is only available on Vista SP1 and later.
+    //
+    OSVERSIONINFOEX osvi;
+    DWORDLONG dwlConditionMask = 0;
+    int op = VER_LESS_EQUAL;
+
+    // Initialize the OSVERSIONINFOEX structure.
+    ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
+    osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    osvi.dwMajorVersion = 6;
+    osvi.dwMinorVersion = 0;
+    osvi.wServicePackMajor = 0;
+    osvi.wServicePackMinor = 0;
+    osvi.wProductType = VER_NT_WORKSTATION;
+
+    // Initialize the condition mask.
+    VER_SET_CONDITION(dwlConditionMask, VER_MAJORVERSION, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_MINORVERSION, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_SERVICEPACKMAJOR, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_SERVICEPACKMINOR, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_PRODUCT_TYPE, VER_EQUAL);
+
+    DWORD dwTypeMask = VER_MAJORVERSION | VER_MINORVERSION |
+                       VER_SERVICEPACKMAJOR | VER_SERVICEPACKMINOR |
+                       VER_PRODUCT_TYPE;
+
+    // Perform the test.
+    BOOL isVistaRTMorXP = VerifyVersionInfo(&osvi, dwTypeMask,
+                                            dwlConditionMask);
+    if (isVistaRTMorXP != 0)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+            "*** Windows Core Audio is only supported on Vista SP1 or later "
+            "=> will revert to the Wave API ***");
+        return false;
+    }
+
+    // 2) Initializes the COM library for use by the calling thread.
+
+    // The COM init wrapper sets the thread's concurrency model to MTA,
+    // and creates a new apartment for the thread if one is required. The
+    // wrapper also ensures that each call to CoInitializeEx is balanced
+    // by a corresponding call to CoUninitialize.
+    //
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      // Things will work even if an STA thread is calling this method but we
+      // want to ensure that MTA is used and therefore return false here.
+      return false;
+    }
+ 
+    // 3) Check if the MMDevice API is available.
+    //
+    // The Windows Multimedia Device (MMDevice) API enables audio clients to
+    // discover audio endpoint devices, determine their capabilities, and create
+    // driver instances for those devices.
+    // Header file Mmdeviceapi.h defines the interfaces in the MMDevice API.
+    // The MMDevice API consists of several interfaces. The first of these is the
+    // IMMDeviceEnumerator interface. To access the interfaces in the MMDevice API,
+    // a client obtains a reference to the IMMDeviceEnumerator interface of a
+    // device-enumerator object by calling the CoCreateInstance function.
+    //
+    // Through the IMMDeviceEnumerator interface, the client can obtain references
+    // to the other interfaces in the MMDevice API. The MMDevice API implements
+    // the following interfaces:
+    //
+    // IMMDevice            Represents an audio device.
+    // IMMDeviceCollection  Represents a collection of audio devices.
+    // IMMDeviceEnumerator  Provides methods for enumerating audio devices.
+    // IMMEndpoint          Represents an audio endpoint device.
+    //
+    IMMDeviceEnumerator* pIMMD(NULL);
+    const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
+    const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
+
+    hr = CoCreateInstance(
+            CLSID_MMDeviceEnumerator,   // GUID value of MMDeviceEnumerator coclass
+            NULL,
+            CLSCTX_ALL,
+            IID_IMMDeviceEnumerator,    // GUID value of the IMMDeviceEnumerator interface
+            (void**)&pIMMD );
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to create the required COM object", hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() CoCreateInstance(MMDeviceEnumerator) failed (hr=0x%x)", hr);
+
+        const DWORD dwFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+                              FORMAT_MESSAGE_IGNORE_INSERTS;
+        const DWORD dwLangID = MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US);
+    
+        // Gets the system's human readable message string for this HRESULT.
+        // All error message in English by default.
+        DWORD messageLength = ::FormatMessageW(dwFlags, 
+                                               0,
+                                               hr,
+                                               dwLangID,
+                                               errorText,  
+                                               MAXERRORLENGTH,  
+                                               NULL);
+        
+        assert(messageLength <= MAXERRORLENGTH);
+
+        // Trims tailing white space (FormatMessage() leaves a trailing cr-lf.).
+        for (; messageLength && ::isspace(errorText[messageLength - 1]);
+             --messageLength)
+        {
+            errorText[messageLength - 1] = '\0';
+        }
+
+        StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+        StringCchCat(buf, MAXERRORLENGTH, errorText);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%S", buf);
+    }
+    else
+    {
+        MMDeviceIsAvailable = true;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() CoCreateInstance(MMDeviceEnumerator) succeeded", hr);
+        SAFE_RELEASE(pIMMD);
+    }
+
+    // 4) Verify that we can create and initialize our Core Audio class.
+    //
+    // Also, perform a limited "API test" to ensure that Core Audio is supported for all devices.
+    //
+    if (MMDeviceIsAvailable)
+    {
+        coreAudioIsSupported = false;
+
+        AudioDeviceWindowsCore* p = new AudioDeviceWindowsCore(-1);
+        if (p == NULL)
+        {
+            return false;
+        }
+
+        int ok(0);
+        int temp_ok(0);
+        bool available(false);
+
+        ok |= p->Init();
+
+        WebRtc_Word16 numDevsRec = p->RecordingDevices();
+        for (WebRtc_UWord16 i = 0; i < numDevsRec; i++)
+        {
+            ok |= p->SetRecordingDevice(i);
+            temp_ok = p->RecordingIsAvailable(available);
+            ok |= temp_ok;
+            ok |= (available == false);
+            if (available)
+            {
+                ok |= p->InitMicrophone();
+            }
+            if (ok)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+                    "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to use Core Audio Recording for device id=%i", i);
+            }
+        }
+
+        WebRtc_Word16 numDevsPlay = p->PlayoutDevices();
+        for (WebRtc_UWord16 i = 0; i < numDevsPlay; i++)
+        {
+            ok |= p->SetPlayoutDevice(i);
+            temp_ok = p->PlayoutIsAvailable(available);
+            ok |= temp_ok;
+            ok |= (available == false);
+            if (available)
+            {
+                ok |= p->InitSpeaker();
+            }
+            if (ok)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1 ,
+                    "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to use Core Audio Playout for device id=%i", i);
+            }
+        }
+
+        ok |= p->Terminate();
+
+        if (ok == 0)
+        {
+            coreAudioIsSupported = true;
+        }
+
+        delete p;
+    }
+
+    if (coreAudioIsSupported)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, "*** Windows Core Audio is supported ***");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, "*** Windows Core Audio is NOT supported => will revert to the Wave API ***");
+    }
+
+    return (coreAudioIsSupported);
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsCore() - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsCore::AudioDeviceWindowsCore(const WebRtc_Word32 id) :
+    _comInit(ScopedCOMInitializer::kMTA),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _volumeMutex(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _ptrAudioBuffer(NULL),
+    _ptrEnumerator(NULL),
+    _ptrRenderCollection(NULL),
+    _ptrCaptureCollection(NULL),
+    _ptrDeviceOut(NULL),
+    _ptrDeviceIn(NULL),
+    _ptrClientOut(NULL),
+    _ptrClientIn(NULL),
+    _ptrRenderClient(NULL),
+    _ptrCaptureClient(NULL),
+    _ptrCaptureVolume(NULL),
+    _ptrRenderSimpleVolume(NULL),
+    _dmo(NULL),
+    _mediaBuffer(NULL),
+    _builtInAecEnabled(false),
+    _playAudioFrameSize(0),
+    _playSampleRate(0),
+    _playBlockSize(0),
+    _playChannels(2),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _sampleDriftAt48kHz(0),
+    _driftAccumulator(0),
+    _writtenSamples(0),
+    _readSamples(0),
+    _playAcc(0),
+    _recAudioFrameSize(0),
+    _recSampleRate(0),
+    _recBlockSize(0),
+    _recChannels(2),
+    _avrtLibrary(NULL),
+    _winSupportAvrt(false),
+    _hRenderSamplesReadyEvent(NULL),
+    _hPlayThread(NULL),
+    _hCaptureSamplesReadyEvent(NULL),
+    _hRecThread(NULL),
+    _hShutdownRenderEvent(NULL),
+    _hShutdownCaptureEvent(NULL),
+    _hRenderStartedEvent(NULL),
+    _hCaptureStartedEvent(NULL),
+    _hGetCaptureVolumeThread(NULL),
+    _hSetCaptureVolumeThread(NULL),
+    _hSetCaptureVolumeEvent(NULL),
+    _hMmTask(NULL),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _speakerIsInitialized(false),
+    _microphoneIsInitialized(false),
+    _AGC(false),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playBufType(AudioDeviceModule::kAdaptiveBufferSize),
+    _playBufDelay(80),
+    _playBufDelayFixed(80),
+    _usingInputDeviceIndex(false),
+    _usingOutputDeviceIndex(false),
+    _inputDevice(AudioDeviceModule::kDefaultCommunicationDevice),
+    _outputDevice(AudioDeviceModule::kDefaultCommunicationDevice),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _newMicLevel(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+    assert(_comInit.succeeded());
+
+    // Try to load the Avrt DLL
+    if (!_avrtLibrary)
+    {
+        // Get handle to the Avrt DLL module.
+        _avrtLibrary = LoadLibrary(TEXT("Avrt.dll"));
+        if (_avrtLibrary)
+        {
+            // Handle is valid (should only happen if OS larger than vista & win7).
+            // Try to get the function addresses.
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() The Avrt DLL module is now loaded");
+
+            _PAvRevertMmThreadCharacteristics = (PAvRevertMmThreadCharacteristics)GetProcAddress(_avrtLibrary, "AvRevertMmThreadCharacteristics");
+            _PAvSetMmThreadCharacteristicsA = (PAvSetMmThreadCharacteristicsA)GetProcAddress(_avrtLibrary, "AvSetMmThreadCharacteristicsA");
+            _PAvSetMmThreadPriority = (PAvSetMmThreadPriority)GetProcAddress(_avrtLibrary, "AvSetMmThreadPriority");
+
+            if ( _PAvRevertMmThreadCharacteristics &&
+                 _PAvSetMmThreadCharacteristicsA &&
+                 _PAvSetMmThreadPriority)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvRevertMmThreadCharacteristics() is OK");
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvSetMmThreadCharacteristicsA() is OK");
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvSetMmThreadPriority() is OK");
+                _winSupportAvrt = true;
+            }
+        }
+    }
+
+    // Create our samples ready events - we want auto reset events that start in the not-signaled state.
+    // The state of an auto-reset event object remains signaled until a single waiting thread is released,
+    // at which time the system automatically sets the state to nonsignaled. If no threads are waiting,
+    // the event object's state remains signaled.
+    // (Except for _hShutdownCaptureEvent, which is used to shutdown multiple threads).
+    _hRenderSamplesReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hCaptureSamplesReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownRenderEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownCaptureEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
+    _hRenderStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hCaptureStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+
+    _perfCounterFreq.QuadPart = 1;
+    _perfCounterFactor = 0.0;
+    _avgCPULoad = 0.0;
+
+    // list of number of channels to use on recording side
+    _recChannelsPrioList[0] = 2;    // stereo is prio 1
+    _recChannelsPrioList[1] = 1;    // mono is prio 2
+
+    // list of number of channels to use on playout side
+    _playChannelsPrioList[0] = 2;    // stereo is prio 1
+    _playChannelsPrioList[1] = 1;    // mono is prio 2
+
+    HRESULT hr;
+
+    // We know that this API will work since it has already been verified in
+    // CoreAudioIsSupported, hence no need to check for errors here as well.
+
+    // Retrive the IMMDeviceEnumerator API (should load the MMDevAPI.dll)
+    // TODO(henrika): we should probably move this allocation to Init() instead
+    // and deallocate in Terminate() to make the implementation more symmetric.
+    CoCreateInstance(
+      __uuidof(MMDeviceEnumerator),
+      NULL,
+      CLSCTX_ALL,
+      __uuidof(IMMDeviceEnumerator),
+      reinterpret_cast<void**>(&_ptrEnumerator));
+    assert(NULL != _ptrEnumerator);
+
+    // DMO initialization for built-in WASAPI AEC.
+    {
+        IMediaObject* ptrDMO = NULL;
+        hr = CoCreateInstance(CLSID_CWMAudioAEC,
+                              NULL,
+                              CLSCTX_INPROC_SERVER,
+                              IID_IMediaObject,
+                              reinterpret_cast<void**>(&ptrDMO));
+        if (FAILED(hr) || ptrDMO == NULL)
+        {
+            // Since we check that _dmo is non-NULL in EnableBuiltInAEC(), the
+            // feature is prevented from being enabled.
+            _builtInAecEnabled = false;
+            _TraceCOMError(hr);
+        }
+        _dmo = ptrDMO;
+        SAFE_RELEASE(ptrDMO);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsCore() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsCore::~AudioDeviceWindowsCore()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    // The IMMDeviceEnumerator is created during construction. Must release
+    // it here and not in Terminate() since we don't recreate it in Init().
+    SAFE_RELEASE(_ptrEnumerator);
+
+    _ptrAudioBuffer = NULL;
+
+    if (NULL != _hRenderSamplesReadyEvent)
+    {
+        CloseHandle(_hRenderSamplesReadyEvent);
+        _hRenderSamplesReadyEvent = NULL;
+    }
+
+    if (NULL != _hCaptureSamplesReadyEvent)
+    {
+        CloseHandle(_hCaptureSamplesReadyEvent);
+        _hCaptureSamplesReadyEvent = NULL;
+    }
+
+    if (NULL != _hRenderStartedEvent)
+    {
+        CloseHandle(_hRenderStartedEvent);
+        _hRenderStartedEvent = NULL;
+    }
+
+    if (NULL != _hCaptureStartedEvent)
+    {
+        CloseHandle(_hCaptureStartedEvent);
+        _hCaptureStartedEvent = NULL;
+    }
+
+    if (NULL != _hShutdownRenderEvent)
+    {
+        CloseHandle(_hShutdownRenderEvent);
+        _hShutdownRenderEvent = NULL;
+    }
+
+    if (NULL != _hShutdownCaptureEvent)
+    {
+        CloseHandle(_hShutdownCaptureEvent);
+        _hShutdownCaptureEvent = NULL;
+    }
+
+    if (NULL != _hSetCaptureVolumeEvent)
+    {
+        CloseHandle(_hSetCaptureVolumeEvent);
+        _hSetCaptureVolumeEvent = NULL;
+    }
+
+    if (_avrtLibrary)
+    {
+        BOOL freeOK = FreeLibrary(_avrtLibrary);
+        if (!freeOK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "AudioDeviceWindowsCore::~AudioDeviceWindowsCore() failed to free the loaded Avrt DLL module correctly");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "AudioDeviceWindowsCore::~AudioDeviceWindowsCore() the Avrt DLL module is now unloaded");
+        }
+    }
+
+    delete &_critSect;
+    delete &_volumeMutex;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kWindowsCoreAudio;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Enumerate all audio rendering and capturing endpoint devices.
+    // Note that, some of these will not be able to select by the user.
+    // The complete collection is for internal use only.
+    //
+    _EnumerateEndpointDevicesAll(eRender);
+    _EnumerateEndpointDevicesAll(eCapture);
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::Terminate()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_initialized) {
+        return 0;
+    }
+
+    _initialized = false;
+    _speakerIsInitialized = false;
+    _microphoneIsInitialized = false;
+    _playing = false;
+    _recording = false;
+
+    SAFE_RELEASE(_ptrRenderCollection);
+    SAFE_RELEASE(_ptrCaptureCollection);
+    SAFE_RELEASE(_ptrDeviceOut);
+    SAFE_RELEASE(_ptrDeviceIn);
+    SAFE_RELEASE(_ptrClientOut);
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrRenderClient);
+    SAFE_RELEASE(_ptrCaptureClient);
+    SAFE_RELEASE(_ptrCaptureVolume);
+    SAFE_RELEASE(_ptrRenderSimpleVolume);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Initialized() const
+{
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_usingOutputDeviceIndex)
+    {
+        WebRtc_Word16 nDevices = PlayoutDevices();
+        if (_outputDeviceIndex > (nDevices - 1))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "current device selection is invalid => unable to initialize");
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 ret(0);
+
+    SAFE_RELEASE(_ptrDeviceOut);
+    if (_usingOutputDeviceIndex)
+    {
+        // Refresh the selected rendering endpoint device using current index
+        ret = _GetListDevice(eRender, _outputDeviceIndex, &_ptrDeviceOut);
+    }
+    else
+    {
+        ERole role;
+        (_outputDevice == AudioDeviceModule::kDefaultDevice) ? role = eConsole : role = eCommunications;
+        // Refresh the selected rendering endpoint device using role
+        ret = _GetDefaultDevice(eRender, role, &_ptrDeviceOut);
+    }
+
+    if (ret != 0 || (_ptrDeviceOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to initialize the rendering enpoint device");
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    IAudioSessionManager* pManager = NULL;
+    ret = _ptrDeviceOut->Activate(__uuidof(IAudioSessionManager),
+                                  CLSCTX_ALL,
+                                  NULL,
+                                  (void**)&pManager);
+    if (ret != 0 || pManager == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the render manager");
+        SAFE_RELEASE(pManager);
+        return -1;
+    }
+
+    SAFE_RELEASE(_ptrRenderSimpleVolume);
+    ret = pManager->GetSimpleAudioVolume(NULL, FALSE, &_ptrRenderSimpleVolume);
+    if (ret != 0 || _ptrRenderSimpleVolume == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the render simple volume");
+        SAFE_RELEASE(pManager);
+        SAFE_RELEASE(_ptrRenderSimpleVolume);
+        return -1;
+    }
+    SAFE_RELEASE(pManager);
+
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    if (_usingInputDeviceIndex)
+    {
+        WebRtc_Word16 nDevices = RecordingDevices();
+        if (_inputDeviceIndex > (nDevices - 1))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "current device selection is invalid => unable to initialize");
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 ret(0);
+
+    SAFE_RELEASE(_ptrDeviceIn);
+    if (_usingInputDeviceIndex)
+    {
+        // Refresh the selected capture endpoint device using current index
+        ret = _GetListDevice(eCapture, _inputDeviceIndex, &_ptrDeviceIn);
+    }
+    else
+    {
+        ERole role;
+        (_inputDevice == AudioDeviceModule::kDefaultDevice) ? role = eConsole : role = eCommunications;
+        // Refresh the selected capture endpoint device using role
+        ret = _GetDefaultDevice(eCapture, role, &_ptrDeviceIn);
+    }
+
+    if (ret != 0 || (_ptrDeviceIn == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to initialize the capturing enpoint device");
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    ret = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume),
+                                 CLSCTX_ALL,
+                                 NULL,
+                                 reinterpret_cast<void **>(&_ptrCaptureVolume));
+    if (ret != 0 || _ptrCaptureVolume == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the capture volume");
+        SAFE_RELEASE(_ptrCaptureVolume);
+        return -1;
+    }
+
+    _microphoneIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::SpeakerIsInitialized() const
+{
+
+    return (_speakerIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::MicrophoneIsInitialized() const
+{
+
+    return (_microphoneIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioSessionManager* pManager = NULL;
+    ISimpleAudioVolume* pVolume = NULL;
+
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioSessionManager), CLSCTX_ALL, NULL, (void**)&pManager);
+    EXIT_ON_ERROR(hr);
+
+    hr = pManager->GetSimpleAudioVolume(NULL, FALSE, &pVolume);
+    EXIT_ON_ERROR(hr);
+
+    float volume(0.0f);
+    hr = pVolume->GetMasterVolume(&volume);
+    if (FAILED(hr))
+    {
+        available = false;
+    }
+    available = true;
+
+    SAFE_RELEASE(pManager);
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pManager);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        if (!_speakerIsInitialized)
+        {
+        return -1;
+        }
+
+        if (_ptrDeviceOut == NULL)
+        {
+            return -1;
+        }
+    }
+
+    if (volume < (WebRtc_UWord32)MIN_CORE_SPEAKER_VOLUME ||
+        volume > (WebRtc_UWord32)MAX_CORE_SPEAKER_VOLUME)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+
+    // scale input volume to valid range (0.0 to 1.0)
+    const float fLevel = (float)volume/MAX_CORE_SPEAKER_VOLUME;
+    _volumeMutex.Enter();
+    hr = _ptrRenderSimpleVolume->SetMasterVolume(fLevel,NULL);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        if (!_speakerIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceOut == NULL)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    float fLevel(0.0f);
+
+    _volumeMutex.Enter();
+    hr = _ptrRenderSimpleVolume->GetMasterVolume(&fLevel);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    // scale input volume range [0.0,1.0] to valid output range
+    volume = static_cast<WebRtc_UWord32> (fLevel*MAX_CORE_SPEAKER_VOLUME);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const
+{
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+//
+//  The internal range for Core Audio is 0.0 to 1.0, where 0.0 indicates
+//  silence and 1.0 indicates full volume (no attenuation).
+//  We add our (webrtc-internal) own max level to match the Wave API and
+//  how it is used today in VoE.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (MAX_CORE_SPEAKER_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (MIN_CORE_SPEAKER_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    stepSize = CORE_SPEAKER_VOLUME_STEP_SIZE;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume),
+        CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    if (FAILED(hr))
+        available = false;
+    else
+        available = true;
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetSpeakerMute(bool enable)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Set the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    const BOOL mute(enable);
+    hr = pVolume->SetMute(mute, NULL);
+    EXIT_ON_ERROR(hr);
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerMute(bool& enabled) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    EXIT_ON_ERROR(hr);
+
+    enabled = (mute == TRUE) ? true : false;
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    if (FAILED(hr))
+        available = false;
+    else
+        available = true;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneMute(bool enable)
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Set the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    const BOOL mute(enable);
+    hr = pVolume->SetMute(mute, NULL);
+    EXIT_ON_ERROR(hr);
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneMute(bool& enabled) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    EXIT_ON_ERROR(hr);
+
+    enabled = (mute == TRUE) ? true : false;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    available = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneBoost(bool enable)
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneBoost(bool& enabled) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoRecordingIsAvailable(bool& available)
+{
+
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetStereoRecording(bool enable)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (enable)
+    {
+        _recChannelsPrioList[0] = 2;    // try stereo first
+        _recChannelsPrioList[1] = 1;
+        _recChannels = 2;
+    }
+    else
+    {
+        _recChannelsPrioList[0] = 1;    // try mono first
+        _recChannelsPrioList[1] = 2;
+        _recChannels = 1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoPlayoutIsAvailable(bool& available)
+{
+
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetStereoPlayout(bool enable)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (enable)
+    {
+        _playChannelsPrioList[0] = 2;    // try stereo first
+        _playChannelsPrioList[1] = 1;
+        _playChannels = 2;
+    }
+    else
+    {
+        _playChannelsPrioList[0] = 1;    // try mono first
+        _playChannelsPrioList[1] = 2;
+        _playChannels = 1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetAGC(bool enable)
+{
+    CriticalSectionScoped lock(_critSect);
+    _AGC = enable;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::AGC() const
+{
+    CriticalSectionScoped lock(_critSect);
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL, reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    float volume(0.0f);
+    hr = pVolume->GetMasterVolumeLevelScalar(&volume);
+    if (FAILED(hr))
+    {
+        available = false;
+    }
+    available = true;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::SetMicrophoneVolume(volume=%u)", volume);
+
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        if (!_microphoneIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceIn == NULL)
+        {
+            return -1;
+        }
+    }
+
+    if (volume < static_cast<WebRtc_UWord32>(MIN_CORE_MICROPHONE_VOLUME) ||
+        volume > static_cast<WebRtc_UWord32>(MAX_CORE_MICROPHONE_VOLUME))
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    // scale input volume to valid range (0.0 to 1.0)
+    const float fLevel = static_cast<float>(volume)/MAX_CORE_MICROPHONE_VOLUME;
+    _volumeMutex.Enter();
+    _ptrCaptureVolume->SetMasterVolumeLevelScalar(fLevel, NULL);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        if (!_microphoneIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceIn == NULL)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    float fLevel(0.0f);
+    volume = 0;
+    _volumeMutex.Enter();
+    hr = _ptrCaptureVolume->GetMasterVolumeLevelScalar(&fLevel);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    // scale input volume range [0.0,1.0] to valid output range
+    volume = static_cast<WebRtc_UWord32> (fLevel*MAX_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+//
+//  The internal range for Core Audio is 0.0 to 1.0, where 0.0 indicates
+//  silence and 1.0 indicates full volume (no attenuation).
+//  We add our (webrtc-internal) own max level to match the Wave API and
+//  how it is used today in VoE.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (MAX_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (MIN_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    stepSize = CORE_MICROPHONE_VOLUME_STEP_SIZE;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::PlayoutDevices()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_RefreshDeviceList(eRender) != -1)
+    {
+        return (_DeviceListCount(eRender));
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    // Get current number of available rendering endpoint devices and refresh the rendering collection.
+    UINT nDevices = PlayoutDevices();
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrRenderCollection != NULL);
+
+    //  Select an endpoint rendering device given the specified index
+    SAFE_RELEASE(_ptrDeviceOut);
+    hr = _ptrRenderCollection->Item(
+                                 index,
+                                 &_ptrDeviceOut);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceOut, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingOutputDeviceIndex = true;
+    _outputDeviceIndex = index;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    ERole role(eCommunications);
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        role = eConsole;
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        role = eCommunications;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Refresh the list of rendering endpoint devices
+    _RefreshDeviceList(eRender);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    //  Select an endpoint rendering device given the specified role
+    SAFE_RELEASE(_ptrDeviceOut);
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           eRender,
+                           role,
+                           &_ptrDeviceOut);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceOut, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingOutputDeviceIndex = false;
+    _outputDevice = device;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    bool defaultCommunicationDevice(false);
+    const WebRtc_Word16 nDevices(PlayoutDevices());  // also updates the list of devices
+
+    // Special fix for the case when the user selects '-1' as index (<=> Default Communication Device)
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        defaultCommunicationDevice = true;
+        index = 0;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Default Communication endpoint device will be used");
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    HRESULT hr(S_OK);
+    WebRtc_Word32 ret(-1);
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceName(eRender, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceName(eRender, index, szDeviceName, bufferLen);
+    }
+
+    if (ret == 0)
+    {
+        // Convert the endpoint device's friendly-name to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceID(eRender, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceID(eRender, index, szDeviceName, bufferLen);
+    }
+
+    if (guid != NULL && ret == 0)
+    {
+        // Convert the endpoint device's ID string to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    bool defaultCommunicationDevice(false);
+    const WebRtc_Word16 nDevices(RecordingDevices());  // also updates the list of devices
+
+    // Special fix for the case when the user selects '-1' as index (<=> Default Communication Device)
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        defaultCommunicationDevice = true;
+        index = 0;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Default Communication endpoint device will be used");
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    HRESULT hr(S_OK);
+    WebRtc_Word32 ret(-1);
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceName(eCapture, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceName(eCapture, index, szDeviceName, bufferLen);
+    }
+
+    if (ret == 0)
+    {
+        // Convert the endpoint device's friendly-name to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceID(eCapture, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceID(eCapture, index, szDeviceName, bufferLen);
+    }
+
+    if (guid != NULL && ret == 0)
+    {
+        // Convert the endpoint device's ID string to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::RecordingDevices()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_RefreshDeviceList(eCapture) != -1)
+    {
+        return (_DeviceListCount(eCapture));
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    // Get current number of available capture endpoint devices and refresh the capture collection.
+    UINT nDevices = RecordingDevices();
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrCaptureCollection != NULL);
+
+    // Select an endpoint capture device given the specified index
+    SAFE_RELEASE(_ptrDeviceIn);
+    hr = _ptrCaptureCollection->Item(
+                                 index,
+                                 &_ptrDeviceIn);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceIn, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingInputDeviceIndex = true;
+    _inputDeviceIndex = index;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    ERole role(eCommunications);
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        role = eConsole;
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        role = eCommunications;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Refresh the list of capture endpoint devices
+    _RefreshDeviceList(eCapture);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    //  Select an endpoint capture device given the specified role
+    SAFE_RELEASE(_ptrDeviceIn);
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           eCapture,
+                           role,
+                           &_ptrDeviceIn);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceIn, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingInputDeviceIndex = false;
+    _inputDevice = device;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the recording side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitSpeaker() failed");
+    }
+
+    // Ensure that the updated rendering endpoint device is valid
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_builtInAecEnabled && _recIsInitialized)
+    {
+        // Ensure the correct render device is configured in case
+        // InitRecording() was called before InitPlayout().
+        if (SetDMOProperties() == -1)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    WAVEFORMATEX* pWfxOut = NULL;
+    WAVEFORMATEX Wfx;
+    WAVEFORMATEX* pWfxClosestMatch = NULL;
+
+    // Create COM object with IAudioClient interface.
+    SAFE_RELEASE(_ptrClientOut);
+    hr = _ptrDeviceOut->Activate(
+                          __uuidof(IAudioClient),
+                          CLSCTX_ALL,
+                          NULL,
+                          (void**)&_ptrClientOut);
+    EXIT_ON_ERROR(hr);
+
+    // Retrieve the stream format that the audio engine uses for its internal
+    // processing (mixing) of shared-mode streams.
+    hr = _ptrClientOut->GetMixFormat(&pWfxOut);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Audio Engine's current rendering mix format:");
+        // format type
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag     : 0x%X (%u)", pWfxOut->wFormatTag, pWfxOut->wFormatTag);
+        // number of channels (i.e. mono, stereo...)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels      : %d", pWfxOut->nChannels);
+        // sample rate
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d", pWfxOut->nSamplesPerSec);
+        // for buffer estimation
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxOut->nAvgBytesPerSec);
+        // block size of data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign    : %d", pWfxOut->nBlockAlign);
+        // number of bits per sample of mono data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxOut->wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize         : %d", pWfxOut->cbSize);
+    }
+
+    // Set wave format
+    Wfx.wFormatTag = WAVE_FORMAT_PCM;
+    Wfx.wBitsPerSample = 16;
+    Wfx.cbSize = 0;
+
+    const int freqs[] = {48000, 44100, 16000, 96000, 32000, 8000};
+    hr = S_FALSE;
+
+    // Iterate over frequencies and channels, in order of priority
+    for (int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++)
+    {
+        for (int chan = 0; chan < sizeof(_playChannelsPrioList)/sizeof(_playChannelsPrioList[0]); chan++)
+        {
+            Wfx.nChannels = _playChannelsPrioList[chan];
+            Wfx.nSamplesPerSec = freqs[freq];
+            Wfx.nBlockAlign = Wfx.nChannels * Wfx.wBitsPerSample / 8;
+            Wfx.nAvgBytesPerSec = Wfx.nSamplesPerSec * Wfx.nBlockAlign;
+            // If the method succeeds and the audio endpoint device supports the specified stream format,
+            // it returns S_OK. If the method succeeds and provides a closest match to the specified format,
+            // it returns S_FALSE.
+            hr = _ptrClientOut->IsFormatSupported(
+                                  AUDCLNT_SHAREMODE_SHARED,
+                                  &Wfx,
+                                  &pWfxClosestMatch);
+            if (hr == S_OK)
+            {
+                break;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported",
+                    Wfx.nChannels, Wfx.nSamplesPerSec);
+            }
+        }
+        if (hr == S_OK)
+            break;
+    }
+
+    // TODO(andrew): what happens in the event of failure in the above loop?
+    //   Is _ptrClientOut->Initialize expected to fail?
+    //   Same in InitRecording().
+    if (hr == S_OK)
+    {
+        _playAudioFrameSize = Wfx.nBlockAlign;
+        _playBlockSize = Wfx.nSamplesPerSec/100;
+        _playSampleRate = Wfx.nSamplesPerSec;
+        _devicePlaySampleRate = Wfx.nSamplesPerSec; // The device itself continues to run at 44.1 kHz.
+        _devicePlayBlockSize = Wfx.nSamplesPerSec/100;
+        if (_playBlockSize == 441)
+        {
+            _playSampleRate = 44000;    // we are actually running at 44000 Hz and *not* 44100 Hz
+            _playBlockSize = 440;       // adjust to size we can handle
+        }
+        _playChannels = Wfx.nChannels;
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this rendering format:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag         : 0x%X (%u)", Wfx.wFormatTag, Wfx.wFormatTag);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels          : %d", Wfx.nChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec     : %d", Wfx.nSamplesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec    : %d", Wfx.nAvgBytesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign        : %d", Wfx.nBlockAlign);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample     : %d", Wfx.wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize             : %d", Wfx.cbSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playAudioFrameSize: %d", _playAudioFrameSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playBlockSize     : %d", _playBlockSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playChannels      : %d", _playChannels);
+    }
+
+    _Get44kHzDrift();
+
+    // Create a rendering stream.
+    //
+    // ****************************************************************************
+    // For a shared-mode stream that uses event-driven buffering, the caller must
+    // set both hnsPeriodicity and hnsBufferDuration to 0. The Initialize method
+    // determines how large a buffer to allocate based on the scheduling period
+    // of the audio engine. Although the client's buffer processing thread is
+    // event driven, the basic buffer management process, as described previously,
+    // is unaltered.
+    // Each time the thread awakens, it should call IAudioClient::GetCurrentPadding
+    // to determine how much data to write to a rendering buffer or read from a capture
+    // buffer. In contrast to the two buffers that the Initialize method allocates
+    // for an exclusive-mode stream that uses event-driven buffering, a shared-mode
+    // stream requires a single buffer.
+    // ****************************************************************************
+    //
+    REFERENCE_TIME hnsBufferDuration = 0;  // ask for minimum buffer size (default)
+    if (_devicePlaySampleRate == 44100)
+    {
+        // Ask for a larger buffer size (30ms) when using 44.1kHz as render rate.
+        // There seems to be a larger risk of underruns for 44.1 compared
+        // with the default rate (48kHz). When using default, we set the requested
+        // buffer duration to 0, which sets the buffer to the minimum size
+        // required by the engine thread. The actual buffer size can then be
+        // read by GetBufferSize() and it is 20ms on most machines.
+        hnsBufferDuration = 30*10000;
+    }
+    hr = _ptrClientOut->Initialize(
+                          AUDCLNT_SHAREMODE_SHARED,             // share Audio Engine with other applications
+                          AUDCLNT_STREAMFLAGS_EVENTCALLBACK,    // processing of the audio buffer by the client will be event driven
+                          hnsBufferDuration,                    // requested buffer capacity as a time value (in 100-nanosecond units)
+                          0,                                    // periodicity
+                          &Wfx,                                 // selected wave format
+                          NULL);                                // session GUID
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initialize() failed:");
+        if (pWfxClosestMatch != NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix format: #channels=%d, samples/sec=%d, bits/sample=%d",
+                pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, pWfxClosestMatch->wBitsPerSample);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "no format suggested");
+        }
+    }
+    EXIT_ON_ERROR(hr);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update the audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_playSampleRate);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8)_playChannels);
+    }
+    else
+    {
+        // We can enter this state during CoreAudioIsSupported() when no AudioDeviceImplementation
+        // has been created, hence the AudioDeviceBuffer does not exist.
+        // It is OK to end up here since we don't initiate any media in CoreAudioIsSupported().
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    // Get the actual size of the shared (endpoint buffer).
+    // Typical value is 960 audio frames <=> 20ms @ 48kHz sample rate.
+    UINT bufferFrameCount(0);
+    hr = _ptrClientOut->GetBufferSize(
+                          &bufferFrameCount);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "IAudioClient::GetBufferSize() => %u (<=> %u bytes)",
+            bufferFrameCount, bufferFrameCount*_playAudioFrameSize);
+    }
+
+    // Set the event handle that the system signals when an audio buffer is ready
+    // to be processed by the client.
+    hr = _ptrClientOut->SetEventHandle(
+                          _hRenderSamplesReadyEvent);
+    EXIT_ON_ERROR(hr);
+
+    // Get an IAudioRenderClient interface.
+    SAFE_RELEASE(_ptrRenderClient);
+    hr = _ptrClientOut->GetService(
+                          __uuidof(IAudioRenderClient),
+                          (void**)&_ptrRenderClient);
+    EXIT_ON_ERROR(hr);
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    CoTaskMemFree(pWfxOut);
+    CoTaskMemFree(pWfxClosestMatch);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "render side is now initialized");
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pWfxOut);
+    CoTaskMemFree(pWfxClosestMatch);
+    SAFE_RELEASE(_ptrClientOut);
+    SAFE_RELEASE(_ptrRenderClient);
+    return -1;
+}
+
+// Capture initialization when the built-in AEC DirectX Media Object (DMO) is
+// used. Called from InitRecording(), most of which is skipped over. The DMO
+// handles device initialization itself.
+// Reference: http://msdn.microsoft.com/en-us/library/ff819492(v=vs.85).aspx
+WebRtc_Word32 AudioDeviceWindowsCore::InitRecordingDMO()
+{
+    assert(_builtInAecEnabled);
+    assert(_dmo != NULL);
+
+    if (SetDMOProperties() == -1)
+    {
+        return -1;
+    }
+
+    DMO_MEDIA_TYPE mt = {0};
+    HRESULT hr = MoInitMediaType(&mt, sizeof(WAVEFORMATEX));
+    if (FAILED(hr))
+    {
+        MoFreeMediaType(&mt);
+        _TraceCOMError(hr);
+        return -1;
+    }
+    mt.majortype = MEDIATYPE_Audio;
+    mt.subtype = MEDIASUBTYPE_PCM;
+    mt.formattype = FORMAT_WaveFormatEx;
+
+    // Supported formats
+    // nChannels: 1 (in AEC-only mode)
+    // nSamplesPerSec: 8000, 11025, 16000, 22050
+    // wBitsPerSample: 16
+    WAVEFORMATEX* ptrWav = reinterpret_cast<WAVEFORMATEX*>(mt.pbFormat);
+    ptrWav->wFormatTag = WAVE_FORMAT_PCM;
+    ptrWav->nChannels = 1;
+    // 16000 is the highest we can support with our resampler.
+    ptrWav->nSamplesPerSec = 16000;
+    ptrWav->nAvgBytesPerSec = 32000;
+    ptrWav->nBlockAlign = 2;
+    ptrWav->wBitsPerSample = 16;
+    ptrWav->cbSize = 0;
+
+    // Set the VoE format equal to the AEC output format.
+    _recAudioFrameSize = ptrWav->nBlockAlign;
+    _recSampleRate = ptrWav->nSamplesPerSec;
+    _recBlockSize = ptrWav->nSamplesPerSec / 100;
+    _recChannels = ptrWav->nChannels;
+
+    // Set the DMO output format parameters.
+    hr = _dmo->SetOutputType(kAecCaptureStreamIndex, &mt, 0);
+    MoFreeMediaType(&mt);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        _ptrAudioBuffer->SetRecordingSampleRate(_recSampleRate);
+        _ptrAudioBuffer->SetRecordingChannels(_recChannels);
+    }
+    else
+    {
+        // Refer to InitRecording() for comments.
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    _mediaBuffer = new MediaBufferImpl(_recBlockSize * _recAudioFrameSize);
+
+    // Optional, but if called, must be after media types are set.
+    hr = _dmo->AllocateStreamingResources();
+    if (FAILED(hr))
+    {
+         _TraceCOMError(hr);
+        return -1;
+    }
+
+    _recIsInitialized = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "Capture side is now initialized");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (QueryPerformanceFrequency(&_perfCounterFreq) == 0)
+    {
+        return -1;
+    }
+    _perfCounterFactor = 10000000.0 / (double)_perfCounterFreq.QuadPart;
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitMicrophone() failed");
+    }
+
+    // Ensure that the updated capturing endpoint device is valid
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    if (_builtInAecEnabled)
+    {
+        // The DMO will configure the capture device.
+        return InitRecordingDMO();
+    }
+
+    HRESULT hr = S_OK;
+    WAVEFORMATEX* pWfxIn = NULL;
+    WAVEFORMATEX Wfx;
+    WAVEFORMATEX* pWfxClosestMatch = NULL;
+
+    // Create COM object with IAudioClient interface.
+    SAFE_RELEASE(_ptrClientIn);
+    hr = _ptrDeviceIn->Activate(
+                          __uuidof(IAudioClient),
+                          CLSCTX_ALL,
+                          NULL,
+                          (void**)&_ptrClientIn);
+    EXIT_ON_ERROR(hr);
+
+    // Retrieve the stream format that the audio engine uses for its internal
+    // processing (mixing) of shared-mode streams.
+    hr = _ptrClientIn->GetMixFormat(&pWfxIn);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Audio Engine's current capturing mix format:");
+        // format type
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag     : 0x%X (%u)", pWfxIn->wFormatTag, pWfxIn->wFormatTag);
+        // number of channels (i.e. mono, stereo...)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels      : %d", pWfxIn->nChannels);
+        // sample rate
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d", pWfxIn->nSamplesPerSec);
+        // for buffer estimation
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxIn->nAvgBytesPerSec);
+        // block size of data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign    : %d", pWfxIn->nBlockAlign);
+        // number of bits per sample of mono data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxIn->wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize         : %d", pWfxIn->cbSize);
+    }
+
+    // Set wave format
+    Wfx.wFormatTag = WAVE_FORMAT_PCM;
+    Wfx.wBitsPerSample = 16;
+    Wfx.cbSize = 0;
+
+    const int freqs[6] = {48000, 44100, 16000, 96000, 32000, 8000};
+    hr = S_FALSE;
+
+    // Iterate over frequencies and channels, in order of priority
+    for (int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++)
+    {
+        for (int chan = 0; chan < sizeof(_recChannelsPrioList)/sizeof(_recChannelsPrioList[0]); chan++)
+        {
+            Wfx.nChannels = _recChannelsPrioList[chan];
+            Wfx.nSamplesPerSec = freqs[freq];
+            Wfx.nBlockAlign = Wfx.nChannels * Wfx.wBitsPerSample / 8;
+            Wfx.nAvgBytesPerSec = Wfx.nSamplesPerSec * Wfx.nBlockAlign;
+            // If the method succeeds and the audio endpoint device supports the specified stream format,
+            // it returns S_OK. If the method succeeds and provides a closest match to the specified format,
+            // it returns S_FALSE.
+            hr = _ptrClientIn->IsFormatSupported(
+                                  AUDCLNT_SHAREMODE_SHARED,
+                                  &Wfx,
+                                  &pWfxClosestMatch);
+            if (hr == S_OK)
+            {
+                break;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported",
+                    Wfx.nChannels, Wfx.nSamplesPerSec);
+            }
+        }
+        if (hr == S_OK)
+            break;
+    }
+
+    if (hr == S_OK)
+    {
+        _recAudioFrameSize = Wfx.nBlockAlign;
+        _recSampleRate = Wfx.nSamplesPerSec;
+        _recBlockSize = Wfx.nSamplesPerSec/100;
+        _recChannels = Wfx.nChannels;
+        if (_recBlockSize == 441)
+        {
+            _recSampleRate = 44000; // we are actually using 44000 Hz and *not* 44100 Hz
+            _recBlockSize = 440;    // adjust to size we can handle
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this capturing format:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag        : 0x%X (%u)", Wfx.wFormatTag, Wfx.wFormatTag);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels         : %d", Wfx.nChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec    : %d", Wfx.nSamplesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec   : %d", Wfx.nAvgBytesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign       : %d", Wfx.nBlockAlign);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample    : %d", Wfx.wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize            : %d", Wfx.cbSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recAudioFrameSize: %d", _recAudioFrameSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recBlockSize     : %d", _recBlockSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recChannels      : %d", _recChannels);
+    }
+
+    _Get44kHzDrift();
+
+    // Create a capturing stream.
+    hr = _ptrClientIn->Initialize(
+                          AUDCLNT_SHAREMODE_SHARED,             // share Audio Engine with other applications
+                          AUDCLNT_STREAMFLAGS_EVENTCALLBACK |   // processing of the audio buffer by the client will be event driven
+                          AUDCLNT_STREAMFLAGS_NOPERSIST,        // volume and mute settings for an audio session will not persist across system restarts
+                          0,                                    // required for event-driven shared mode
+                          0,                                    // periodicity
+                          &Wfx,                                 // selected wave format
+                          NULL);                                // session GUID
+
+
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initialize() failed:");
+        if (pWfxClosestMatch != NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix format: #channels=%d, samples/sec=%d, bits/sample=%d",
+                pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, pWfxClosestMatch->wBitsPerSample);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "no format suggested");
+        }
+    }
+    EXIT_ON_ERROR(hr);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update the audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_recSampleRate);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8)_recChannels);
+    }
+    else
+    {
+        // We can enter this state during CoreAudioIsSupported() when no AudioDeviceImplementation
+        // has been created, hence the AudioDeviceBuffer does not exist.
+        // It is OK to end up here since we don't initiate any media in CoreAudioIsSupported().
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    // Get the actual size of the shared (endpoint buffer).
+    // Typical value is 960 audio frames <=> 20ms @ 48kHz sample rate.
+    UINT bufferFrameCount(0);
+    hr = _ptrClientIn->GetBufferSize(
+                          &bufferFrameCount);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "IAudioClient::GetBufferSize() => %u (<=> %u bytes)",
+            bufferFrameCount, bufferFrameCount*_recAudioFrameSize);
+    }
+
+    // Set the event handle that the system signals when an audio buffer is ready
+    // to be processed by the client.
+    hr = _ptrClientIn->SetEventHandle(
+                          _hCaptureSamplesReadyEvent);
+    EXIT_ON_ERROR(hr);
+
+    // Get an IAudioCaptureClient interface.
+    SAFE_RELEASE(_ptrCaptureClient);
+    hr = _ptrClientIn->GetService(
+                          __uuidof(IAudioCaptureClient),
+                          (void**)&_ptrCaptureClient);
+    EXIT_ON_ERROR(hr);
+
+    // Mark capture side as initialized
+    _recIsInitialized = true;
+
+    CoTaskMemFree(pWfxIn);
+    CoTaskMemFree(pWfxClosestMatch);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "capture side is now initialized");
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pWfxIn);
+    CoTaskMemFree(pWfxClosestMatch);
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrCaptureClient);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_hRecThread != NULL)
+    {
+        return 0;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    HRESULT hr = S_OK;
+    {
+        CriticalSectionScoped critScoped(_critSect);
+
+        // Create thread which will drive the capturing
+        LPTHREAD_START_ROUTINE lpStartAddress = WSAPICaptureThread;
+        if (_builtInAecEnabled)
+        {
+            // Redirect to the DMO polling method.
+            lpStartAddress = WSAPICaptureThreadPollDMO;
+
+            if (!_playing)
+            {
+                // The DMO won't provide us captured output data unless we
+                // give it render data to process.
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "Playout must be started before recording when using the "
+                    "built-in AEC");
+                return -1;
+            }
+        }
+
+        assert(_hRecThread == NULL);
+        _hRecThread = CreateThread(NULL,
+                                   0,
+                                   lpStartAddress,
+                                   this,
+                                   0,
+                                   NULL);
+        if (_hRecThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "failed to create the recording thread");
+            return -1;
+        }
+
+        // Set thread priority to highest possible
+        SetThreadPriority(_hRecThread, THREAD_PRIORITY_TIME_CRITICAL);
+
+        assert(_hGetCaptureVolumeThread == NULL);
+        _hGetCaptureVolumeThread = CreateThread(NULL,
+                                                0,
+                                                GetCaptureVolumeThread,
+                                                this,
+                                                0,
+                                                NULL);
+        if (_hGetCaptureVolumeThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create the volume getter thread");
+            return -1;
+        }
+
+        assert(_hSetCaptureVolumeThread == NULL);
+        _hSetCaptureVolumeThread = CreateThread(NULL,
+                                                0,
+                                                SetCaptureVolumeThread,
+                                                this,
+                                                0,
+                                                NULL);
+        if (_hSetCaptureVolumeThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create the volume setter thread");
+            return -1;
+        }
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hCaptureStartedEvent, 1000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "capturing did not start up properly");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "capture audio stream has now started...");
+
+    _avgCPULoad = 0.0f;
+    _playAcc = 0;
+    _recording = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StopRecording()
+{
+    WebRtc_Word32 err = 0;
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    _Lock();
+
+    if (_hRecThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "no capturing stream is active => close down WASAPI only");
+        SAFE_RELEASE(_ptrClientIn);
+        SAFE_RELEASE(_ptrCaptureClient);
+        _recIsInitialized = false;
+        _recording = false;
+        _UnLock();
+        return 0;
+    }
+
+    // Stop the driving thread...
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "closing down the webrtc_core_audio_capture_thread...");
+    // Manual-reset event; it will remain signalled to stop all capture threads.
+    SetEvent(_hShutdownCaptureEvent);
+
+    _UnLock();
+    DWORD ret = WaitForSingleObject(_hRecThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "failed to close down webrtc_core_audio_capture_thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "webrtc_core_audio_capture_thread is now closed");
+    }
+
+    ret = WaitForSingleObject(_hGetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to close down volume getter thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "  volume getter thread is now closed");
+    }
+
+    ret = WaitForSingleObject(_hSetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to close down volume setter thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "  volume setter thread is now closed");
+    }
+    _Lock();
+
+    ResetEvent(_hShutdownCaptureEvent); // Must be manually reset.
+    // Ensure that the thread has released these interfaces properly.
+    assert(err == -1 || _ptrClientIn == NULL);
+    assert(err == -1 || _ptrCaptureClient == NULL);
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    // These will create thread leaks in the result of an error,
+    // but we can at least resume the call.
+    CloseHandle(_hRecThread);
+    _hRecThread = NULL;
+
+    CloseHandle(_hGetCaptureVolumeThread);
+    _hGetCaptureVolumeThread = NULL;
+
+    CloseHandle(_hSetCaptureVolumeThread);
+    _hSetCaptureVolumeThread = NULL;
+
+    if (_builtInAecEnabled)
+    {
+        assert(_dmo != NULL);
+        // This is necessary. Otherwise the DMO can generate garbage render
+        // audio even after rendering has stopped.
+        HRESULT hr = _dmo->FreeStreamingResources();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+            err = -1;
+        }
+    }
+
+    _UnLock();
+
+    // Reset the recording delay value.
+    _sndCardRecDelay = 0;
+
+    return err;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Recording() const
+{
+    return (_recording);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutIsInitialized() const
+{
+
+    return (_playIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_hPlayThread != NULL)
+    {
+        return 0;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    HRESULT hr = S_OK;
+    {
+        CriticalSectionScoped critScoped(_critSect);
+
+        // Create thread which will drive the rendering.
+        assert(_hPlayThread == NULL);
+        _hPlayThread = CreateThread(
+                         NULL,
+                         0,
+                         WSAPIRenderThread,
+                         this,
+                         0,
+                         NULL);
+        if (_hPlayThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "failed to create the playout thread");
+            return -1;
+        }
+
+        // Set thread priority to highest possible.
+        SetThreadPriority(_hPlayThread, THREAD_PRIORITY_TIME_CRITICAL);
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hRenderStartedEvent, 1000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "rendering did not start up properly");
+        return -1;
+    }
+
+    _playing = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "rendering audio stream has now started...");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StopPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped critScoped(_critSect) ;
+
+        if (_hPlayThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "no rendering stream is active => close down WASAPI only");
+            SAFE_RELEASE(_ptrClientOut);
+            SAFE_RELEASE(_ptrRenderClient);
+            _playIsInitialized = false;
+            _playing = false;
+            return 0;
+        }
+
+        // stop the driving thread...
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "closing down the webrtc_core_audio_render_thread...");
+        SetEvent(_hShutdownRenderEvent);
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hPlayThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "failed to close down webrtc_core_audio_render_thread");
+        CloseHandle(_hPlayThread);
+        _hPlayThread = NULL;
+        _playIsInitialized = false;
+        _playing = false;
+        return -1;
+    }
+
+    {
+        CriticalSectionScoped critScoped(_critSect);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "webrtc_core_audio_render_thread is now closed");
+        
+        // to reset this event manually at each time we finish with it, 
+        // in case that the render thread has exited before StopPlayout(),
+        // this event might be caught by the new render thread within same VoE instance.
+        ResetEvent(_hShutdownRenderEvent); 
+
+        SAFE_RELEASE(_ptrClientOut);
+        SAFE_RELEASE(_ptrRenderClient);
+       
+        _playIsInitialized = false;
+        _playing = false;
+
+        CloseHandle(_hPlayThread);
+        _hPlayThread = NULL;
+
+        if (_builtInAecEnabled && _recording)
+        {
+            // The DMO won't provide us captured output data unless we
+            // give it render data to process.
+            //
+            // We still permit the playout to shutdown, and trace a warning.
+            // Otherwise, VoE can get into a state which will never permit
+            // playout to stop properly.
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "Recording should be stopped before playout when using the "
+                "built-in AEC");
+        }
+    }  // critScoped
+
+    // Reset the playout delay value.
+    _sndCardPlayDelay = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = static_cast<WebRtc_UWord16>(_sndCardPlayDelay.Value());
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = static_cast<WebRtc_UWord16>(_sndCardRecDelay.Value());
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _playBufType = type;
+
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const
+{
+  {
+    CriticalSectionScoped lock(_critSect);
+    type = _playBufType;
+  }
+
+  if (type == AudioDeviceModule::kFixedBufferSize)
+  {
+    CriticalSectionScoped lock(_critSect);
+    sizeMS = _playBufDelayFixed;
+  }
+  else
+  {
+    // Use same value as for PlayoutDelay
+    sizeMS = static_cast<WebRtc_UWord16>(_sndCardPlayDelay.Value());
+  }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::CPULoad(WebRtc_UWord16& load) const
+{
+
+    load = static_cast<WebRtc_UWord16> (100*_avgCPULoad);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutWarning() const
+{
+    return ( _playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutError() const
+{
+    return ( _playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingWarning() const
+{
+    return ( _recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingError() const
+{
+    return ( _recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  [static] WSAPIRenderThread
+// ----------------------------------------------------------------------------
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPIRenderThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoRenderThread();
+}
+
+// ----------------------------------------------------------------------------
+//  [static] WSAPICaptureThread
+// ----------------------------------------------------------------------------
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPICaptureThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoCaptureThread();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPICaptureThreadPollDMO(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoCaptureThreadPollDMO();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::GetCaptureVolumeThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoGetCaptureVolumeThread();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::SetCaptureVolumeThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoSetCaptureVolumeThread();
+}
+
+DWORD AudioDeviceWindowsCore::DoGetCaptureVolumeThread()
+{
+    HANDLE waitObject = _hShutdownCaptureEvent;
+
+    while (1)
+    {
+        if (AGC())
+        {
+            WebRtc_UWord32 currentMicLevel = 0;
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // This doesn't set the system volume, just stores it.
+                _Lock();
+                if (_ptrAudioBuffer)
+                {
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+                }
+                _UnLock();
+            }
+        }
+
+        DWORD waitResult = WaitForSingleObject(waitObject,
+                                               GET_MIC_VOLUME_INTERVAL_MS);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0: // _hShutdownCaptureEvent
+                return 0;
+            case WAIT_TIMEOUT:  // timeout notification
+                break;
+            default:            // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on get volume thread");
+                return -1;
+        }
+    }
+}
+
+DWORD AudioDeviceWindowsCore::DoSetCaptureVolumeThread()
+{
+    HANDLE waitArray[2] = {_hShutdownCaptureEvent, _hSetCaptureVolumeEvent};
+
+    while (1)
+    {
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0:      // _hShutdownCaptureEvent
+                return 0;
+            case WAIT_OBJECT_0 + 1:  // _hSetCaptureVolumeEvent
+                break;
+            default:                 // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on set volume thread");
+                    return -1;
+        }
+
+        _Lock();
+        WebRtc_UWord32 newMicLevel = _newMicLevel;
+        _UnLock();
+
+        if (SetMicrophoneVolume(newMicLevel) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "  the required modification of the microphone volume failed");
+        }
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  DoRenderThread
+// ----------------------------------------------------------------------------
+
+DWORD AudioDeviceWindowsCore::DoRenderThread()
+{
+
+    bool keepPlaying = true;
+    HANDLE waitArray[2] = {_hShutdownRenderEvent, _hRenderSamplesReadyEvent};
+    HRESULT hr = S_OK;
+    HANDLE hMmTask = NULL;
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+    WebRtc_Word32 time(0);
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+          "failed to initialize COM in render thread");
+      return -1;
+    }
+
+    _SetThreadName(-1, "webrtc_core_audio_render_thread");
+
+    // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread priority.
+    //
+    if (_winSupportAvrt)
+    {
+        DWORD taskIndex(0);
+        hMmTask = _PAvSetMmThreadCharacteristicsA("Pro Audio", &taskIndex);
+        if (hMmTask)
+        {
+            if (FALSE == _PAvSetMmThreadPriority(hMmTask, AVRT_PRIORITY_CRITICAL))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to boost play-thread using MMCSS");
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "render thread is now registered with MMCSS (taskIndex=%d)", taskIndex);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to enable MMCSS on render thread (err=%d)", GetLastError());
+            _TraceCOMError(GetLastError());
+        }
+    }
+
+    _Lock();
+
+    // Get size of rendering buffer (length is expressed as the number of audio frames the buffer can hold).
+    // This value is fixed during the rendering session.
+    //
+    UINT32 bufferLength = 0;
+    hr = _ptrClientOut->GetBufferSize(&bufferLength);
+    EXIT_ON_ERROR(hr);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] size of buffer       : %u", bufferLength);
+
+    // Get maximum latency for the current stream (will not change for the lifetime  of the IAudioClient object).
+    //
+    REFERENCE_TIME latency;
+    _ptrClientOut->GetStreamLatency(&latency);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] max stream latency   : %u (%3.2f ms)",
+        (DWORD)latency, (double)(latency/10000.0));
+
+    // Get the length of the periodic interval separating successive processing passes by
+    // the audio engine on the data in the endpoint buffer.
+    //
+    // The period between processing passes by the audio engine is fixed for a particular
+    // audio endpoint device and represents the smallest processing quantum for the audio engine.
+    // This period plus the stream latency between the buffer and endpoint device represents
+    // the minimum possible latency that an audio application can achieve.
+    // Typical value: 100000 <=> 0.01 sec = 10ms.
+    //
+    REFERENCE_TIME devPeriod = 0;
+    REFERENCE_TIME devPeriodMin = 0;
+    _ptrClientOut->GetDevicePeriod(&devPeriod, &devPeriodMin);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] device period        : %u (%3.2f ms)",
+        (DWORD)devPeriod, (double)(devPeriod/10000.0));
+
+    //  The Event Driven renderer will be woken up every defaultDevicePeriod hundred-nano-seconds.
+    //  Convert that time into a number of frames.
+    //
+    double devicePeriodInSeconds = devPeriod / (10000.0*1000.0);
+    UINT32 devicePeriodInFrames = static_cast<UINT32>(_playSampleRate * devicePeriodInSeconds + 0.5);
+
+    // Derive inital rendering delay.
+    // Example: 10*(960/480) + 15 = 20 + 15 = 35ms
+    //
+    int playout_delay = 10 * (bufferLength / _playBlockSize) +
+        (int)((latency + devPeriod) / 10000);
+    _sndCardPlayDelay = playout_delay;
+    _writtenSamples = 0;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "[REND] initial delay        : %u", playout_delay);
+
+    double endpointBufferSizeMS = 10.0 * ((double)bufferLength / (double)_devicePlayBlockSize);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] endpointBufferSizeMS : %3.2f", endpointBufferSizeMS);
+
+    // Before starting the stream, fill the rendering buffer with silence.
+    //
+    BYTE *pData = NULL;
+    hr = _ptrRenderClient->GetBuffer(bufferLength, &pData);
+    EXIT_ON_ERROR(hr);
+
+    hr = _ptrRenderClient->ReleaseBuffer(bufferLength, AUDCLNT_BUFFERFLAGS_SILENT);
+    EXIT_ON_ERROR(hr);
+
+    _writtenSamples += bufferLength;
+
+    IAudioClock* clock = NULL;
+    hr = _ptrClientOut->GetService(__uuidof(IAudioClock), (void**)&clock);
+    if (FAILED(hr)) {
+      WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "failed to get IAudioClock interface from the IAudioClient");
+    }
+
+    // Start up the rendering audio stream.
+    hr = _ptrClientOut->Start();
+    EXIT_ON_ERROR(hr);
+
+    _UnLock();
+
+    // Set event which will ensure that the calling thread modifies the playing state to true.
+    //
+    SetEvent(_hRenderStartedEvent);
+
+    // >> ------------------ THREAD LOOP ------------------
+
+    while (keepPlaying)
+    {
+        // Wait for a render notification event or a shutdown event
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, 500);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0 + 0:     // _hShutdownRenderEvent
+            keepPlaying = false;
+            break;
+        case WAIT_OBJECT_0 + 1:     // _hRenderSamplesReadyEvent
+            break;
+        case WAIT_TIMEOUT:          // timeout notification
+            _ptrClientOut->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "render event timed out after 0.5 seconds");
+            goto Exit;
+        default:                    // unexpected error
+            _ptrClientOut->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "unknown wait termination on render side");
+            goto Exit;
+        }
+
+        while (keepPlaying)
+        {
+            _Lock();
+
+            // Get the number of frames of padding (queued up to play) in the endpoint buffer.
+            UINT32 padding = 0;
+            hr = _ptrClientOut->GetCurrentPadding(&padding);
+            EXIT_ON_ERROR(hr);
+
+            // Derive the amount of available space in the output buffer
+            WebRtc_UWord32 framesAvailable = bufferLength - padding;
+            // WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "#avaliable audio frames = %u", framesAvailable);
+
+            // Do we have 10 ms available in the render buffer?
+            if (framesAvailable < _playBlockSize)
+            {
+                // Not enough space in render buffer to store next render packet.
+                _UnLock();
+                break;
+            }
+
+            // Write n*10ms buffers to the render buffer
+            const WebRtc_UWord32 n10msBuffers = (framesAvailable / _playBlockSize);
+            for (WebRtc_UWord32 n = 0; n < n10msBuffers; n++)
+            {
+                // Get pointer (i.e., grab the buffer) to next space in the shared render buffer.
+                hr = _ptrRenderClient->GetBuffer(_playBlockSize, &pData);
+                EXIT_ON_ERROR(hr);
+
+                QueryPerformanceCounter(&t1);    // measure time: START
+
+                if (_ptrAudioBuffer)
+                {
+                    // Request data to be played out (#bytes = _playBlockSize*_audioFrameSize)
+                    _UnLock();
+                    WebRtc_Word32 nSamples =
+                    _ptrAudioBuffer->RequestPlayoutData(_playBlockSize);
+                    _Lock();
+
+                    if (nSamples == -1) 
+                    {
+                        _UnLock();
+                        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                                     "failed to read data from render client");
+                        goto Exit;
+                    }
+
+                    // Sanity check to ensure that essential states are not modified during the unlocked period
+                    if (_ptrRenderClient == NULL || _ptrClientOut == NULL)
+                    {
+                        _UnLock();
+                        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "output state has been modified during unlocked period");
+                        goto Exit;
+                    }
+                    if (nSamples != _playBlockSize)
+                    {
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "nSamples(%d) != _playBlockSize(%d)", nSamples, _playBlockSize);
+                    }
+
+                    // Get the actual (stored) data
+                    nSamples = _ptrAudioBuffer->GetPlayoutData((WebRtc_Word8*)pData);
+                }
+
+                QueryPerformanceCounter(&t2);    // measure time: STOP
+                time = (int)(t2.QuadPart-t1.QuadPart);
+                _playAcc += time;
+
+                DWORD dwFlags(0);
+                hr = _ptrRenderClient->ReleaseBuffer(_playBlockSize, dwFlags);
+                // See http://msdn.microsoft.com/en-us/library/dd316605(VS.85).aspx
+                // for more details regarding AUDCLNT_E_DEVICE_INVALIDATED.
+                EXIT_ON_ERROR(hr);
+
+                _writtenSamples += _playBlockSize;
+            }
+
+            // Check the current delay on the playout side.
+            if (clock) {
+              UINT64 pos = 0;
+              UINT64 freq = 1;
+              clock->GetPosition(&pos, NULL);
+              clock->GetFrequency(&freq);
+              playout_delay = ROUND((double(_writtenSamples) /
+                  _devicePlaySampleRate - double(pos) / freq) * 1000.0);
+              _sndCardPlayDelay = playout_delay;
+            }
+
+            _UnLock();
+        }
+    }
+
+    // ------------------ THREAD LOOP ------------------ <<
+
+    Sleep(static_cast<DWORD>(endpointBufferSizeMS+0.5));
+    hr = _ptrClientOut->Stop();
+
+Exit:
+    SAFE_RELEASE(clock);
+
+    if (FAILED(hr))
+    {
+        _UnLock();
+        _ptrClientOut->Stop();
+        _TraceCOMError(hr);
+    }
+
+    if (_winSupportAvrt)
+    {
+        if (NULL != hMmTask)
+        {
+            _PAvRevertMmThreadCharacteristics(hMmTask);
+        }
+    }
+
+    if (keepPlaying)
+    {
+        hr = _ptrClientOut->Stop();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+        }
+        hr = _ptrClientOut->Reset();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+        }
+
+        // Trigger callback from module process thread
+        _playError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kPlayoutError message posted: rendering thread has ended pre-maturely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_Rendering thread is now terminated properly");
+    }
+
+    return (DWORD)hr;
+}
+
+DWORD AudioDeviceWindowsCore::InitCaptureThreadPriority()
+{
+    HRESULT hr = S_OK;
+    _hMmTask = NULL;
+
+    _SetThreadName(-1, "webrtc_core_audio_capture_thread");
+
+    // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread
+    // priority.
+    if (_winSupportAvrt)
+    {
+        DWORD taskIndex(0);
+        _hMmTask = _PAvSetMmThreadCharacteristicsA("Pro Audio", &taskIndex);
+        if (_hMmTask)
+        {
+            if (!_PAvSetMmThreadPriority(_hMmTask, AVRT_PRIORITY_CRITICAL))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "failed to boost rec-thread using MMCSS");
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "capture thread is now registered with MMCSS (taskIndex=%d)",
+                taskIndex);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "failed to enable MMCSS on capture thread (err=%d)",
+                GetLastError());
+            _TraceCOMError(GetLastError());
+        }
+    }
+
+    return S_OK;
+}
+
+void AudioDeviceWindowsCore::RevertCaptureThreadPriority()
+{
+    if (_winSupportAvrt)
+    {
+        if (NULL != _hMmTask)
+        {
+            _PAvRevertMmThreadCharacteristics(_hMmTask);
+        }
+    }
+
+    _hMmTask = NULL;
+}
+
+DWORD AudioDeviceWindowsCore::DoCaptureThreadPollDMO()
+{
+    assert(_mediaBuffer != NULL);
+    bool keepRecording = true;
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "failed to initialize COM in polling DMO thread");
+      return -1;
+    }
+
+    HRESULT hr = InitCaptureThreadPriority();
+    if (FAILED(hr))
+    {
+        return hr;
+    }
+
+    // Set event which will ensure that the calling thread modifies the
+    // recording state to true.
+    SetEvent(_hCaptureStartedEvent);
+
+    // >> ---------------------------- THREAD LOOP ----------------------------
+    while (keepRecording)
+    {
+        // Poll the DMO every 5 ms.
+        // (The same interval used in the Wave implementation.)
+        DWORD waitResult = WaitForSingleObject(_hShutdownCaptureEvent, 5);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0:         // _hShutdownCaptureEvent
+            keepRecording = false;
+            break;
+        case WAIT_TIMEOUT:          // timeout notification
+            break;
+        default:                    // unexpected error
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "Unknown wait termination on capture side");
+            hr = -1; // To signal an error callback.
+            keepRecording = false;
+            break;
+        }
+
+        while (keepRecording)
+        {
+            CriticalSectionScoped critScoped(_critSect);
+
+            DWORD dwStatus = 0;
+            {
+                DMO_OUTPUT_DATA_BUFFER dmoBuffer = {0};
+                dmoBuffer.pBuffer = _mediaBuffer;
+                dmoBuffer.pBuffer->AddRef();
+
+                // Poll the DMO for AEC processed capture data. The DMO will
+                // copy available data to |dmoBuffer|, and should only return
+                // 10 ms frames. The value of |dwStatus| should be ignored.
+                hr = _dmo->ProcessOutput(0, 1, &dmoBuffer, &dwStatus);
+                SAFE_RELEASE(dmoBuffer.pBuffer);
+                dwStatus = dmoBuffer.dwStatus;
+            }
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            ULONG bytesProduced = 0;
+            BYTE* data;
+            // Get a pointer to the data buffer. This should be valid until
+            // the next call to ProcessOutput.
+            hr = _mediaBuffer->GetBufferAndLength(&data, &bytesProduced);
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            // TODO(andrew): handle AGC.
+
+            if (bytesProduced > 0)
+            {
+                const int kSamplesProduced = bytesProduced / _recAudioFrameSize;
+                // TODO(andrew): verify that this is always satisfied. It might
+                // be that ProcessOutput will try to return more than 10 ms if
+                // we fail to call it frequently enough.
+                assert(kSamplesProduced == _recBlockSize);
+                assert(sizeof(BYTE) == sizeof(WebRtc_Word8));
+                _ptrAudioBuffer->SetRecordedBuffer(
+                    reinterpret_cast<WebRtc_Word8*>(data),
+                    kSamplesProduced);
+                _ptrAudioBuffer->SetVQEData(0, 0, 0);
+
+                _UnLock();  // Release lock while making the callback.
+                _ptrAudioBuffer->DeliverRecordedData();
+                _Lock();
+            }
+
+            // Reset length to indicate buffer availability.
+            hr = _mediaBuffer->SetLength(0);
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            if (!(dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE))
+            {
+                // The DMO cannot currently produce more data. This is the
+                // normal case; otherwise it means the DMO had more than 10 ms
+                // of data available and ProcessOutput should be called again.
+                break;
+            }
+        }
+    }
+    // ---------------------------- THREAD LOOP ---------------------------- <<
+
+    RevertCaptureThreadPriority();
+
+    if (FAILED(hr))
+    {
+        // Trigger callback from module process thread
+        _recError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id,
+            "kRecordingError message posted: capturing thread has ended "
+            "prematurely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "Capturing thread is now terminated properly");
+    }
+
+    return hr;
+}
+
+
+// ----------------------------------------------------------------------------
+//  DoCaptureThread
+// ----------------------------------------------------------------------------
+
+DWORD AudioDeviceWindowsCore::DoCaptureThread()
+{
+
+    bool keepRecording = true;
+    HANDLE waitArray[2] = {_hShutdownCaptureEvent, _hCaptureSamplesReadyEvent};
+    HRESULT hr = S_OK;
+    HANDLE hMmTask = NULL;
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+    WebRtc_Word32 time(0);
+
+    BYTE* syncBuffer = NULL;
+    UINT32 syncBufIndex = 0;
+
+    WebRtc_UWord32 newMicLevel(0);
+    WebRtc_UWord32 currentMicLevel(0);
+
+    _readSamples = 0;
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "failed to initialize COM in capture thread");
+      return -1;
+    }
+
+    hr = InitCaptureThreadPriority();
+    if (FAILED(hr))
+    {
+        return hr;
+    }
+
+    _Lock();
+
+    // Get size of capturing buffer (length is expressed as the number of audio frames the buffer can hold).
+    // This value is fixed during the capturing session.
+    //
+    UINT32 bufferLength = 0;
+    hr = _ptrClientIn->GetBufferSize(&bufferLength);
+    EXIT_ON_ERROR(hr);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] size of buffer       : %u", bufferLength);
+
+    // Allocate memory for sync buffer.
+    // It is used for compensation between native 44.1 and internal 44.0 and
+    // for cases when the capture buffer is larger than 10ms.
+    //
+    const UINT32 syncBufferSize = 2*(bufferLength * _recAudioFrameSize);
+    syncBuffer = new BYTE[syncBufferSize];
+    if (syncBuffer == NULL)
+    {
+        return E_POINTER;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] size of sync buffer  : %u [bytes]", syncBufferSize);
+
+    // Get maximum latency for the current stream (will not change for the lifetime of the IAudioClient object).
+    //
+    REFERENCE_TIME latency;
+    _ptrClientIn->GetStreamLatency(&latency);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] max stream latency   : %u (%3.2f ms)",
+        (DWORD)latency, (double)(latency / 10000.0));
+
+    // Get the length of the periodic interval separating successive processing passes by
+    // the audio engine on the data in the endpoint buffer.
+    //
+    REFERENCE_TIME devPeriod = 0;
+    REFERENCE_TIME devPeriodMin = 0;
+    _ptrClientIn->GetDevicePeriod(&devPeriod, &devPeriodMin);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] device period        : %u (%3.2f ms)",
+        (DWORD)devPeriod, (double)(devPeriod / 10000.0));
+
+    double extraDelayMS = (double)((latency + devPeriod) / 10000.0);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] extraDelayMS         : %3.2f", extraDelayMS);
+
+    double endpointBufferSizeMS = 10.0 * ((double)bufferLength / (double)_recBlockSize);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] endpointBufferSizeMS : %3.2f", endpointBufferSizeMS);
+
+    // Start up the capturing stream.
+    //
+    hr = _ptrClientIn->Start();
+    EXIT_ON_ERROR(hr);
+
+    _UnLock();
+
+    // Set event which will ensure that the calling thread modifies the recording state to true.
+    //
+    SetEvent(_hCaptureStartedEvent);
+
+    // >> ---------------------------- THREAD LOOP ----------------------------
+
+    while (keepRecording)
+    {
+        // Wait for a capture notification event or a shutdown event
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, 500);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0 + 0:        // _hShutdownCaptureEvent
+            keepRecording = false;
+            break;
+        case WAIT_OBJECT_0 + 1:        // _hCaptureSamplesReadyEvent
+            break;
+        case WAIT_TIMEOUT:            // timeout notification
+            _ptrClientIn->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "capture event timed out after 0.5 seconds");
+            goto Exit;
+        default:                    // unexpected error
+            _ptrClientIn->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "unknown wait termination on capture side");
+            goto Exit;
+        }
+
+        while (keepRecording)
+        {
+            BYTE *pData = 0;
+            UINT32 framesAvailable = 0;
+            DWORD flags = 0;
+            UINT64 recTime = 0;
+            UINT64 recPos = 0;
+
+            _Lock();
+
+            //  Find out how much capture data is available
+            //
+            hr = _ptrCaptureClient->GetBuffer(&pData,           // packet which is ready to be read by used
+                                              &framesAvailable, // #frames in the captured packet (can be zero)
+                                              &flags,           // support flags (check)
+                                              &recPos,          // device position of first audio frame in data packet
+                                              &recTime);        // value of performance counter at the time of recording the first audio frame
+
+            if (SUCCEEDED(hr))
+            {
+                if (AUDCLNT_S_BUFFER_EMPTY == hr)
+                {
+                    // Buffer was empty => start waiting for a new capture notification event
+                    _UnLock();
+                    break;
+                }
+
+                if (flags & AUDCLNT_BUFFERFLAGS_SILENT)
+                {
+                    // Treat all of the data in the packet as silence and ignore the actual data values.
+                    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "AUDCLNT_BUFFERFLAGS_SILENT");
+                    pData = NULL;
+                }
+
+                assert(framesAvailable != 0);
+
+                if (pData)
+                {
+                    CopyMemory(&syncBuffer[syncBufIndex*_recAudioFrameSize], pData, framesAvailable*_recAudioFrameSize);
+                }
+                else
+                {
+                    ZeroMemory(&syncBuffer[syncBufIndex*_recAudioFrameSize], framesAvailable*_recAudioFrameSize);
+                }
+                assert(syncBufferSize >= (syncBufIndex*_recAudioFrameSize)+framesAvailable*_recAudioFrameSize);
+
+                // Release the capture buffer
+                //
+                hr = _ptrCaptureClient->ReleaseBuffer(framesAvailable);
+                EXIT_ON_ERROR(hr);
+
+                _readSamples += framesAvailable;
+                syncBufIndex += framesAvailable;
+
+                QueryPerformanceCounter(&t1);
+
+                // Get the current recording and playout delay.
+                WebRtc_UWord32 sndCardRecDelay = (WebRtc_UWord32)
+                    (((((UINT64)t1.QuadPart * _perfCounterFactor) - recTime)
+                        / 10000) + (10*syncBufIndex) / _recBlockSize - 10);
+                WebRtc_UWord32 sndCardPlayDelay =
+                    static_cast<WebRtc_UWord32>(_sndCardPlayDelay.Value());
+
+                _sndCardRecDelay = sndCardRecDelay;
+
+                while (syncBufIndex >= _recBlockSize)
+                {
+                    if (_ptrAudioBuffer)
+                    {
+                        _ptrAudioBuffer->SetRecordedBuffer((const WebRtc_Word8*)syncBuffer, _recBlockSize);
+
+                        _driftAccumulator += _sampleDriftAt48kHz;
+                        const WebRtc_Word32 clockDrift =
+                            static_cast<WebRtc_Word32>(_driftAccumulator);
+                        _driftAccumulator -= clockDrift;
+
+                        _ptrAudioBuffer->SetVQEData(sndCardPlayDelay,
+                                                    sndCardRecDelay,
+                                                    clockDrift);
+
+                        QueryPerformanceCounter(&t1);    // measure time: START
+
+                        _UnLock();  // release lock while making the callback
+                        _ptrAudioBuffer->DeliverRecordedData();
+                        _Lock();    // restore the lock
+
+                        QueryPerformanceCounter(&t2);    // measure time: STOP
+
+                        // Measure "average CPU load".
+                        // Basically what we do here is to measure how many percent of our 10ms period
+                        // is used for encoding and decoding. This value shuld be used as a warning indicator
+                        // only and not seen as an absolute value. Running at ~100% will lead to bad QoS.
+                        time = (int)(t2.QuadPart - t1.QuadPart);
+                        _avgCPULoad = (float)(_avgCPULoad*.99 + (time + _playAcc) / (double)(_perfCounterFreq.QuadPart));
+                        _playAcc = 0;
+
+                        // Sanity check to ensure that essential states are not modified during the unlocked period
+                        if (_ptrCaptureClient == NULL || _ptrClientIn == NULL)
+                        {
+                            _UnLock();
+                            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "input state has been modified during unlocked period");
+                            goto Exit;
+                        }
+                    }
+
+                    // store remaining data which was not able to deliver as 10ms segment
+                    MoveMemory(&syncBuffer[0], &syncBuffer[_recBlockSize*_recAudioFrameSize], (syncBufIndex-_recBlockSize)*_recAudioFrameSize);
+                    syncBufIndex -= _recBlockSize;
+                    sndCardRecDelay -= 10;
+                }
+
+                if (_AGC)
+                {
+                    WebRtc_UWord32 newMicLevel = _ptrAudioBuffer->NewMicLevel();
+                    if (newMicLevel != 0)
+                    {
+                        // The VQE will only deliver non-zero microphone levels when a change is needed.
+                        // Set this new mic level (received from the observer as return value in the callback).
+                        WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AGC change of volume: new=%u",  newMicLevel);
+                        // We store this outside of the audio buffer to avoid
+                        // having it overwritten by the getter thread.
+                        _newMicLevel = newMicLevel;
+                        SetEvent(_hSetCaptureVolumeEvent);
+                    }
+                }
+            }
+            else
+            {
+                // If GetBuffer returns AUDCLNT_E_BUFFER_ERROR, the thread consuming the audio samples
+                // must wait for the next processing pass. The client might benefit from keeping a count
+                // of the failed GetBuffer calls. If GetBuffer returns this error repeatedly, the client
+                // can start a new processing loop after shutting down the current client by calling
+                // IAudioClient::Stop, IAudioClient::Reset, and releasing the audio client.
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "IAudioCaptureClient::GetBuffer returned AUDCLNT_E_BUFFER_ERROR, hr = 0x%08X",  hr);
+                goto Exit;
+            }
+
+            _UnLock();
+        }
+    }
+
+    // ---------------------------- THREAD LOOP ---------------------------- <<
+
+    hr = _ptrClientIn->Stop();
+
+Exit:
+    if (FAILED(hr))
+    {
+        _UnLock();
+        _ptrClientIn->Stop();
+        _TraceCOMError(hr);
+    }
+
+    RevertCaptureThreadPriority();
+
+    if (keepRecording)
+    {
+        if (_ptrClientIn != NULL)
+        {
+            hr = _ptrClientIn->Stop();
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+            }
+            hr = _ptrClientIn->Reset();
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+            }
+        }
+
+        // Trigger callback from module process thread
+        _recError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: capturing thread has ended pre-maturely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_Capturing thread is now terminated properly");
+    }
+
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrCaptureClient);
+
+    if (syncBuffer)
+    {
+        delete [] syncBuffer;
+    }
+
+    return (DWORD)hr;
+}
+
+int32_t AudioDeviceWindowsCore::EnableBuiltInAEC(bool enable)
+{
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Attempt to set Windows AEC with recording already initialized");
+        return -1;
+    }
+
+    if (_dmo == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Built-in AEC DMO was not initialized properly at create time");
+        return -1;
+    }
+
+    _builtInAecEnabled = enable;
+    return 0;
+}
+
+bool AudioDeviceWindowsCore::BuiltInAECIsEnabled() const
+{
+    return _builtInAecEnabled;
+}
+
+int AudioDeviceWindowsCore::SetDMOProperties()
+{
+    HRESULT hr = S_OK;
+    assert(_dmo != NULL);
+
+    scoped_refptr<IPropertyStore> ps;
+    {
+        IPropertyStore* ptrPS = NULL;
+        hr = _dmo->QueryInterface(IID_IPropertyStore,
+                                  reinterpret_cast<void**>(&ptrPS));
+        if (FAILED(hr) || ptrPS == NULL)
+        {
+            _TraceCOMError(hr);
+            return -1;
+        }
+        ps = ptrPS;
+        SAFE_RELEASE(ptrPS);
+    }
+
+    // Set the AEC system mode.
+    // SINGLE_CHANNEL_AEC - AEC processing only.
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_SYSTEM_MODE,
+                        SINGLE_CHANNEL_AEC))
+    {
+        return -1;
+    }
+
+    // Set the AEC source mode.
+    // VARIANT_TRUE - Source mode (we poll the AEC for captured data).
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_DMO_SOURCE_MODE,
+                        VARIANT_TRUE) == -1)
+    {
+        return -1;
+    }
+
+    // Enable the feature mode.
+    // This lets us override all the default processing settings below.
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_FEATURE_MODE,
+                        VARIANT_TRUE) == -1)
+    {
+        return -1;
+    }
+
+    // Disable analog AGC (default enabled).
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_MIC_GAIN_BOUNDER,
+                        VARIANT_FALSE) == -1)
+    {
+        return -1;
+    }
+
+    // Disable noise suppression (default enabled).
+    // 0 - Disabled, 1 - Enabled
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_FEATR_NS,
+                        0) == -1)
+    {
+        return -1;
+    }
+
+    // Relevant parameters to leave at default settings:
+    // MFPKEY_WMAAECMA_FEATR_AGC - Digital AGC (disabled).
+    // MFPKEY_WMAAECMA_FEATR_CENTER_CLIP - AEC center clipping (enabled).
+    // MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH - Filter length (256 ms).
+    //   TODO(andrew): investigate decresing the length to 128 ms.
+    // MFPKEY_WMAAECMA_FEATR_FRAME_SIZE - Frame size (0).
+    //   0 is automatic; defaults to 160 samples (or 10 ms frames at the
+    //   selected 16 kHz) as long as mic array processing is disabled.
+    // MFPKEY_WMAAECMA_FEATR_NOISE_FILL - Comfort noise (enabled).
+    // MFPKEY_WMAAECMA_FEATR_VAD - VAD (disabled).
+
+    // Set the devices selected by VoE. If using a default device, we need to
+    // search for the device index.
+    int inDevIndex = _inputDeviceIndex;
+    int outDevIndex = _outputDeviceIndex;
+    if (!_usingInputDeviceIndex)
+    {
+        ERole role = eCommunications;
+        if (_inputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            role = eConsole;
+        }
+
+        if (_GetDefaultDeviceIndex(eCapture, role, &inDevIndex) == -1)
+        {
+            return -1;
+        }
+    }
+
+    if (!_usingOutputDeviceIndex)
+    {
+        ERole role = eCommunications;
+        if (_outputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            role = eConsole;
+        }
+
+        if (_GetDefaultDeviceIndex(eRender, role, &outDevIndex) == -1)
+        {
+            return -1;
+        }
+    }
+
+    DWORD devIndex = static_cast<uint32_t>(outDevIndex << 16) +
+                     static_cast<uint32_t>(0x0000ffff & inDevIndex);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "Capture device index: %d, render device index: %d",
+        inDevIndex, outDevIndex);
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_DEVICE_INDEXES,
+                        devIndex) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int AudioDeviceWindowsCore::SetBoolProperty(IPropertyStore* ptrPS,
+                                            REFPROPERTYKEY key,
+                                            VARIANT_BOOL value)
+{
+    PROPVARIANT pv;
+    PropVariantInit(&pv);
+    pv.vt = VT_BOOL;
+    pv.boolVal = value;
+    HRESULT hr = ptrPS->SetValue(key, pv);
+    PropVariantClear(&pv);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+    return 0;
+}
+
+int AudioDeviceWindowsCore::SetVtI4Property(IPropertyStore* ptrPS,
+                                            REFPROPERTYKEY key,
+                                            LONG value)
+{
+    PROPVARIANT pv;
+    PropVariantInit(&pv);
+    pv.vt = VT_I4;
+    pv.lVal = value;
+    HRESULT hr = ptrPS->SetValue(key, pv);
+    PropVariantClear(&pv);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _RefreshDeviceList
+//
+//  Creates a new list of endpoint rendering or capture devices after
+//  deleting any previously created (and possibly out-of-date) list of
+//  such devices.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_RefreshDeviceList(EDataFlow dir)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDeviceCollection *pCollection = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(_ptrEnumerator != NULL);
+
+    // Create a fresh list of devices using the specified direction
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                           dir,
+                           DEVICE_STATE_ACTIVE,
+                           &pCollection);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    if (dir == eRender)
+    {
+        SAFE_RELEASE(_ptrRenderCollection);
+        _ptrRenderCollection = pCollection;
+    }
+    else
+    {
+        SAFE_RELEASE(_ptrCaptureCollection);
+        _ptrCaptureCollection = pCollection;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _DeviceListCount
+//
+//  Gets a count of the endpoint rendering or capture devices in the
+//  current list of such devices.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::_DeviceListCount(EDataFlow dir)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    UINT count = 0;
+
+    assert(eRender == dir || eCapture == dir);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->GetCount(&count);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->GetCount(&count);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    return static_cast<WebRtc_Word16> (count);
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDeviceName
+//
+//  Gets the friendly name of an endpoint rendering or capture device
+//  from the current list of such devices. The caller uses an index
+//  into the list to identify the device.
+//
+//  Uses: _ptrRenderCollection or _ptrCaptureCollection which is updated
+//  in _RefreshDeviceList().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDeviceName(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->Item(index, &pDevice);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->Item(index, &pDevice);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceName(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDeviceName
+//
+//  Gets the friendly name of an endpoint rendering or capture device
+//  given a specified device role.
+//
+//  Uses: _ptrEnumerator
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceName(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(role == eConsole || role == eCommunications);
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           dir,
+                           role,
+                           &pDevice);
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceName(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDeviceID
+//
+//  Gets the unique ID string of an endpoint rendering or capture device
+//  from the current list of such devices. The caller uses an index
+//  into the list to identify the device.
+//
+//  Uses: _ptrRenderCollection or _ptrCaptureCollection which is updated
+//  in _RefreshDeviceList().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDeviceID(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->Item(index, &pDevice);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->Item(index, &pDevice);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceID(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDeviceID
+//
+//  Gets the uniqe device ID of an endpoint rendering or capture device
+//  given a specified device role.
+//
+//  Uses: _ptrEnumerator
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceID(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(role == eConsole || role == eCommunications);
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           dir,
+                           role,
+                           &pDevice);
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceID(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceIndex(EDataFlow dir,
+                                                             ERole role,
+                                                             int* index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    WCHAR szDefaultDeviceID[MAX_PATH] = {0};
+    WCHAR szDeviceID[MAX_PATH] = {0};
+
+    const size_t kDeviceIDLength = sizeof(szDeviceID)/sizeof(szDeviceID[0]);
+    assert(kDeviceIDLength ==
+        sizeof(szDefaultDeviceID) / sizeof(szDefaultDeviceID[0]));
+
+    if (_GetDefaultDeviceID(dir,
+                            role,
+                            szDefaultDeviceID,
+                            kDeviceIDLength) == -1)
+    {
+        return -1;
+    }
+
+    IMMDeviceCollection* collection = _ptrCaptureCollection;
+    if (dir == eRender)
+    {
+        collection = _ptrRenderCollection;
+    }
+
+    if (!collection)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Device collection not valid");
+        return -1;
+    }
+
+    UINT count = 0;
+    hr = collection->GetCount(&count);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    *index = -1;
+    for (UINT i = 0; i < count; i++)
+    {
+        memset(szDeviceID, 0, sizeof(szDeviceID));
+        scoped_refptr<IMMDevice> device;
+        {
+            IMMDevice* ptrDevice = NULL;
+            hr = collection->Item(i, &ptrDevice);
+            if (FAILED(hr) || ptrDevice == NULL)
+            {
+                _TraceCOMError(hr);
+                return -1;
+            }
+            device = ptrDevice;
+            SAFE_RELEASE(ptrDevice);
+        }
+
+        if (_GetDeviceID(device, szDeviceID, kDeviceIDLength) == -1)
+        {
+           return -1;
+        }
+
+        if (wcsncmp(szDefaultDeviceID, szDeviceID, kDeviceIDLength) == 0)
+        {
+            // Found a match.
+            *index = i;
+            break;
+        }
+
+    }
+
+    if (*index == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Unable to find collection index for default device");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice,
+                                                     LPWSTR pszBuffer,
+                                                     int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    static const WCHAR szDefault[] = L"<Device not available>";
+
+    HRESULT hr = E_FAIL;
+    IPropertyStore *pProps = NULL;
+    PROPVARIANT varName;
+
+    assert(pszBuffer != NULL);
+    assert(bufferLen > 0);
+
+    if (pDevice != NULL)
+    {
+        hr = pDevice->OpenPropertyStore(STGM_READ, &pProps);
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "IMMDevice::OpenPropertyStore failed, hr = 0x%08X", hr);
+        }
+    }
+
+    // Initialize container for property value.
+    PropVariantInit(&varName);
+
+    if (SUCCEEDED(hr))
+    {
+        // Get the endpoint device's friendly-name property.
+        hr = pProps->GetValue(PKEY_Device_FriendlyName, &varName);
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "IPropertyStore::GetValue failed, hr = 0x%08X", hr);
+        }
+    }
+
+    if ((SUCCEEDED(hr)) && (VT_EMPTY == varName.vt))
+    {
+        hr = E_FAIL;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "IPropertyStore::GetValue returned no value, hr = 0x%08X", hr);
+    }
+
+    if ((SUCCEEDED(hr)) && (VT_LPWSTR != varName.vt))
+    {
+        // The returned value is not a wide null terminated string.
+        hr = E_UNEXPECTED;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "IPropertyStore::GetValue returned unexpected type, hr = 0x%08X", hr);
+    }
+
+    if (SUCCEEDED(hr) && (varName.pwszVal != NULL))
+    {
+        // Copy the valid device name to the provided ouput buffer.
+        wcsncpy_s(pszBuffer, bufferLen, varName.pwszVal, _TRUNCATE);
+    }
+    else
+    {
+        // Failed to find the device name.
+        wcsncpy_s(pszBuffer, bufferLen, szDefault, _TRUNCATE);
+    }
+
+    PropVariantClear(&varName);
+    SAFE_RELEASE(pProps);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDeviceID
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    static const WCHAR szDefault[] = L"<Device not available>";
+
+    HRESULT hr = E_FAIL;
+    LPWSTR pwszID = NULL;
+
+    assert(pszBuffer != NULL);
+    assert(bufferLen > 0);
+
+    if (pDevice != NULL)
+    {
+        hr = pDevice->GetId(&pwszID);
+    }
+
+    if (hr == S_OK)
+    {
+        // Found the device ID.
+        wcsncpy_s(pszBuffer, bufferLen, pwszID, _TRUNCATE);
+    }
+    else
+    {
+        // Failed to find the device ID.
+        wcsncpy_s(pszBuffer, bufferLen, szDefault, _TRUNCATE);
+    }
+
+    CoTaskMemFree(pwszID);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDevice(EDataFlow dir, ERole role, IMMDevice** ppDevice)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                                   dir,
+                                   role,
+                                   ppDevice);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDevice(EDataFlow dir, int index, IMMDevice** ppDevice)
+{
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    IMMDeviceCollection *pCollection = NULL;
+
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                               dir,
+                               DEVICE_STATE_ACTIVE,        // only active endpoints are OK
+                               &pCollection);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    hr = pCollection->Item(
+                        index,
+                        ppDevice);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _EnumerateEndpointDevicesAll
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_EnumerateEndpointDevicesAll(EDataFlow dataFlow) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    assert(_ptrEnumerator != NULL);
+
+    HRESULT hr = S_OK;
+    IMMDeviceCollection *pCollection = NULL;
+
+    // Generate a collection of audio endpoint devices in the system.
+    // Get states for *all* endpoint devices.
+    // Output: IMMDeviceCollection interface.
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                                 dataFlow,            // data-flow direction (input parameter)
+                                 DEVICE_STATE_ACTIVE | DEVICE_STATE_DISABLED | DEVICE_STATE_NOTPRESENT | DEVICE_STATE_UNPLUGGED,
+                                 &pCollection);        // release interface when done
+
+    EXIT_ON_ERROR(hr);
+
+    // use the IMMDeviceCollection interface...
+
+    UINT count;
+    IMMDevice *pEndpoint = NULL;
+    IPropertyStore *pProps = NULL;
+    IAudioEndpointVolume* pEndpointVolume = NULL;
+    LPWSTR pwszID = NULL;
+
+    // Retrieve a count of the devices in the device collection.
+    hr = pCollection->GetCount(&count);
+    EXIT_ON_ERROR(hr);
+    if (dataFlow == eRender)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#rendering endpoint devices (counting all): %u", count);
+    else if (dataFlow == eCapture)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#capturing endpoint devices (counting all): %u", count);
+
+    if (count == 0)
+    {
+        return 0;
+    }
+
+    // Each loop prints the name of an endpoint device.
+    for (ULONG i = 0; i < count; i++)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Endpoint %d:", i);
+
+        // Get pointer to endpoint number i.
+        // Output: IMMDevice interface.
+        hr = pCollection->Item(
+                            i,
+                            &pEndpoint);
+        EXIT_ON_ERROR(hr);
+
+        // use the IMMDevice interface of the specified endpoint device...
+
+        // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+        hr = pEndpoint->GetId(&pwszID);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "ID string    : %S", pwszID);
+
+        // Retrieve an interface to the device's property store.
+        // Output: IPropertyStore interface.
+        hr = pEndpoint->OpenPropertyStore(
+                          STGM_READ,
+                          &pProps);
+        EXIT_ON_ERROR(hr);
+
+        // use the IPropertyStore interface...
+
+        PROPVARIANT varName;
+        // Initialize container for property value.
+        PropVariantInit(&varName);
+
+        // Get the endpoint's friendly-name property.
+        // Example: "Speakers (Realtek High Definition Audio)"
+        hr = pProps->GetValue(
+                       PKEY_Device_FriendlyName,
+                       &varName);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", varName.pwszVal);
+
+        // Get the endpoint's current device state
+        DWORD dwState;
+        hr = pEndpoint->GetState(&dwState);
+        EXIT_ON_ERROR(hr);
+        if (dwState & DEVICE_STATE_ACTIVE)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : *ACTIVE*", dwState);
+        if (dwState & DEVICE_STATE_DISABLED)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : DISABLED", dwState);
+        if (dwState & DEVICE_STATE_NOTPRESENT)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : NOTPRESENT", dwState);
+        if (dwState & DEVICE_STATE_UNPLUGGED)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : UNPLUGGED", dwState);
+
+        // Check the hardware volume capabilities.
+        DWORD dwHwSupportMask = 0;
+        hr = pEndpoint->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL,
+                               NULL, (void**)&pEndpointVolume);
+        EXIT_ON_ERROR(hr);
+        hr = pEndpointVolume->QueryHardwareSupport(&dwHwSupportMask);
+        EXIT_ON_ERROR(hr);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_VOLUME)
+            // The audio endpoint device supports a hardware volume control
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_VOLUME", dwHwSupportMask);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_MUTE)
+            // The audio endpoint device supports a hardware mute control
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_MUTE", dwHwSupportMask);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_METER)
+            // The audio endpoint device supports a hardware peak meter
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_METER", dwHwSupportMask);
+
+        // Check the channel count (#channels in the audio stream that enters or leaves the audio endpoint device)
+        UINT nChannelCount(0);
+        hr = pEndpointVolume->GetChannelCount(
+                                &nChannelCount);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#channels    : %u", nChannelCount);
+
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_VOLUME)
+        {
+            // Get the volume range.
+            float fLevelMinDB(0.0);
+            float fLevelMaxDB(0.0);
+            float fVolumeIncrementDB(0.0);
+            hr = pEndpointVolume->GetVolumeRange(
+                                    &fLevelMinDB,
+                                    &fLevelMaxDB,
+                                    &fVolumeIncrementDB);
+            EXIT_ON_ERROR(hr);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "volume range : %4.2f (min), %4.2f (max), %4.2f (inc) [dB]",
+                fLevelMinDB, fLevelMaxDB, fVolumeIncrementDB);
+
+            // The volume range from vmin = fLevelMinDB to vmax = fLevelMaxDB is divided
+            // into n uniform intervals of size vinc = fVolumeIncrementDB, where
+            // n = (vmax ?vmin) / vinc.
+            // The values vmin, vmax, and vinc are measured in decibels. The client can set
+            // the volume level to one of n + 1 discrete values in the range from vmin to vmax.
+            int n = (int)((fLevelMaxDB-fLevelMinDB)/fVolumeIncrementDB);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#intervals   : %d", n);
+
+            // Get information about the current step in the volume range.
+            // This method represents the volume level of the audio stream that enters or leaves
+            // the audio endpoint device as an index or "step" in a range of discrete volume levels.
+            // Output value nStepCount is the number of steps in the range. Output value nStep
+            // is the step index of the current volume level. If the number of steps is n = nStepCount,
+            // then step index nStep can assume values from 0 (minimum volume) to n ?1 (maximum volume).
+            UINT nStep(0);
+            UINT nStepCount(0);
+            hr = pEndpointVolume->GetVolumeStepInfo(
+                                    &nStep,
+                                    &nStepCount);
+            EXIT_ON_ERROR(hr);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "volume steps : %d (nStep), %d (nStepCount)", nStep, nStepCount);
+        }
+
+        CoTaskMemFree(pwszID);
+        pwszID = NULL;
+        PropVariantClear(&varName);
+        SAFE_RELEASE(pProps);
+        SAFE_RELEASE(pEndpoint);
+        SAFE_RELEASE(pEndpointVolume);
+    }
+    SAFE_RELEASE(pCollection);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pwszID);
+    pwszID = NULL;
+    SAFE_RELEASE(pCollection);
+    SAFE_RELEASE(pEndpoint);
+    SAFE_RELEASE(pEndpointVolume);
+    SAFE_RELEASE(pProps);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  _TraceCOMError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_TraceCOMError(HRESULT hr) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR errorText[MAXERRORLENGTH];
+
+    const DWORD dwFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+                          FORMAT_MESSAGE_IGNORE_INSERTS;
+    const DWORD dwLangID = MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US);
+    
+    // Gets the system's human readable message string for this HRESULT.
+    // All error message in English by default.
+    DWORD messageLength = ::FormatMessageW(dwFlags, 
+                                           0,
+                                           hr,
+                                           dwLangID,
+                                           errorText,  
+                                           MAXERRORLENGTH,  
+                                           NULL);
+
+    assert(messageLength <= MAXERRORLENGTH);
+
+    // Trims tailing white space (FormatMessage() leaves a trailing cr-lf.).
+    for (; messageLength && ::isspace(errorText[messageLength - 1]);
+         --messageLength)
+    {
+        errorText[messageLength - 1] = '\0';
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "Core Audio method failed (hr=0x%x)", hr);
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    StringCchCat(buf, MAXERRORLENGTH, errorText);
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  _SetThreadName
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_SetThreadName(DWORD dwThreadID, LPCSTR szThreadName)
+{
+    // See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
+    // in this function. Name of article is "Setting a Thread Name (Unmanaged)".
+
+    THREADNAME_INFO info;
+    info.dwType = 0x1000;
+    info.szName = szThreadName;
+    info.dwThreadID = dwThreadID;
+    info.dwFlags = 0;
+
+    __try
+    {
+        RaiseException( 0x406D1388, 0, sizeof(info)/sizeof(DWORD), (ULONG_PTR *)&info );
+    }
+    __except (EXCEPTION_CONTINUE_EXECUTION)
+    {
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  _Get44kHzDrift
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_Get44kHzDrift()
+{
+    // We aren't able to resample at 44.1 kHz. Instead we run at 44 kHz and push/pull
+    // from the engine faster to compensate. If only one direction is set to 44.1 kHz
+    // the result is indistinguishable from clock drift to the AEC. We can compensate
+    // internally if we inform the AEC about the drift.
+    _sampleDriftAt48kHz = 0;
+    _driftAccumulator = 0;
+
+    if (_playSampleRate == 44000 && _recSampleRate != 44000)
+    {
+        _sampleDriftAt48kHz = 480.0f/440;
+    }
+    else if(_playSampleRate != 44000 && _recSampleRate == 44000)
+    {
+        _sampleDriftAt48kHz = -480.0f/441;
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  WideToUTF8
+// ----------------------------------------------------------------------------
+
+char* AudioDeviceWindowsCore::WideToUTF8(const TCHAR* src) const {
+#ifdef UNICODE
+    const size_t kStrLen = sizeof(_str);
+    memset(_str, 0, kStrLen);
+    // Get required size (in bytes) to be able to complete the conversion.
+    int required_size = WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, 0, 0, 0);
+    if (required_size <= kStrLen)
+    {
+        // Process the entire input string, including the terminating null char.
+        if (WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, kStrLen, 0, 0) == 0)
+            memset(_str, 0, kStrLen);
+    }
+    return _str;
+#else
+    return const_cast<char*>(src);
+#endif
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_WINDOWS_CORE_AUDIO_BUILD
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.h b/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.h
new file mode 100644
index 0000000..da37a8c
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_core_win.h
@@ -0,0 +1,381 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+
+#if (_MSC_VER >= 1400)  // only include for VS 2005 and higher
+
+#include "audio_device_generic.h"
+
+#pragma once
+#include <wmcodecdsp.h>      // CLSID_CWMAudioAEC
+                             // (must be before audioclient.h)
+#include <Audioclient.h>     // WASAPI
+#include <Audiopolicy.h>
+#include <avrt.h>            // Avrt
+#include <endpointvolume.h>
+#include <mediaobj.h>        // IMediaObject
+#include <Mmdeviceapi.h>     // MMDevice
+
+#include "atomic32_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "scoped_refptr.h"
+
+// Use Multimedia Class Scheduler Service (MMCSS) to boost the thread priority
+#pragma comment( lib, "avrt.lib" )
+// AVRT function pointers
+typedef BOOL (WINAPI *PAvRevertMmThreadCharacteristics)(HANDLE);
+typedef HANDLE (WINAPI *PAvSetMmThreadCharacteristicsA)(LPCSTR, LPDWORD);
+typedef BOOL (WINAPI *PAvSetMmThreadPriority)(HANDLE, AVRT_PRIORITY);
+
+namespace webrtc {
+
+const float MAX_CORE_SPEAKER_VOLUME = 255.0f;
+const float MIN_CORE_SPEAKER_VOLUME = 0.0f;
+const float MAX_CORE_MICROPHONE_VOLUME = 255.0f;
+const float MIN_CORE_MICROPHONE_VOLUME = 0.0f;
+const WebRtc_UWord16 CORE_SPEAKER_VOLUME_STEP_SIZE = 1;
+const WebRtc_UWord16 CORE_MICROPHONE_VOLUME_STEP_SIZE = 1;
+
+// Utility class which initializes COM in the constructor (STA or MTA),
+// and uninitializes COM in the destructor.
+class ScopedCOMInitializer {
+ public:
+  // Enum value provided to initialize the thread as an MTA instead of STA.
+  enum SelectMTA { kMTA };
+
+  // Constructor for STA initialization.
+  ScopedCOMInitializer() {
+    Initialize(COINIT_APARTMENTTHREADED);
+  }
+
+  // Constructor for MTA initialization.
+  explicit ScopedCOMInitializer(SelectMTA mta) {
+    Initialize(COINIT_MULTITHREADED);
+  }
+
+  ScopedCOMInitializer::~ScopedCOMInitializer() {
+    if (SUCCEEDED(hr_))
+      CoUninitialize();
+  }
+
+  bool succeeded() const { return SUCCEEDED(hr_); }
+ 
+ private:
+  void Initialize(COINIT init) {
+    hr_ = CoInitializeEx(NULL, init);
+  }
+
+  HRESULT hr_;
+
+  ScopedCOMInitializer(const ScopedCOMInitializer&);
+  void operator=(const ScopedCOMInitializer&);
+};
+
+
+class AudioDeviceWindowsCore : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceWindowsCore(const WebRtc_Word32 id);
+    ~AudioDeviceWindowsCore();
+
+    static bool CoreAudioIsSupported();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:    // avrt function pointers
+    PAvRevertMmThreadCharacteristics    _PAvRevertMmThreadCharacteristics;
+    PAvSetMmThreadCharacteristicsA      _PAvSetMmThreadCharacteristicsA;
+    PAvSetMmThreadPriority              _PAvSetMmThreadPriority;
+    HMODULE                             _avrtLibrary;
+    bool                                _winSupportAvrt;
+
+private:    // thread functions
+    DWORD InitCaptureThreadPriority();
+    void RevertCaptureThreadPriority();
+    static DWORD WINAPI WSAPICaptureThread(LPVOID context);
+    DWORD DoCaptureThread();
+
+    static DWORD WINAPI WSAPICaptureThreadPollDMO(LPVOID context);
+    DWORD DoCaptureThreadPollDMO();
+
+    static DWORD WINAPI WSAPIRenderThread(LPVOID context);
+    DWORD DoRenderThread();
+
+    static DWORD WINAPI GetCaptureVolumeThread(LPVOID context);
+    DWORD DoGetCaptureVolumeThread();
+
+    static DWORD WINAPI SetCaptureVolumeThread(LPVOID context);
+    DWORD DoSetCaptureVolumeThread();
+
+    void _SetThreadName(DWORD dwThreadID, LPCSTR szThreadName);
+    void _Lock() { _critSect.Enter(); };
+    void _UnLock() { _critSect.Leave(); };
+
+private:
+    WebRtc_Word32 Id() {return _id;}
+
+private:
+    int SetDMOProperties();
+
+    int SetBoolProperty(IPropertyStore* ptrPS,
+                        REFPROPERTYKEY key,
+                        VARIANT_BOOL value);
+
+    int SetVtI4Property(IPropertyStore* ptrPS,
+                        REFPROPERTYKEY key,
+                        LONG value);
+
+    WebRtc_Word32 _EnumerateEndpointDevicesAll(EDataFlow dataFlow) const;
+    void _TraceCOMError(HRESULT hr) const;
+
+    WebRtc_Word32 _RefreshDeviceList(EDataFlow dir);
+    WebRtc_Word16 _DeviceListCount(EDataFlow dir);
+    WebRtc_Word32 _GetDefaultDeviceName(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetListDeviceName(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDeviceName(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen);
+    WebRtc_Word32 _GetListDeviceID(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDeviceID(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDeviceIndex(EDataFlow dir, ERole role, int* index);
+    WebRtc_Word32 _GetDeviceID(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDevice(EDataFlow dir, ERole role, IMMDevice** ppDevice);
+    WebRtc_Word32 _GetListDevice(EDataFlow dir, int index, IMMDevice** ppDevice);
+
+    void _Get44kHzDrift();
+
+    // Converts from wide-char to UTF-8 if UNICODE is defined.
+    // Does nothing if UNICODE is undefined.
+    char* WideToUTF8(const TCHAR* src) const;
+
+    WebRtc_Word32 InitRecordingDMO();
+
+private:
+    ScopedCOMInitializer                    _comInit;
+    AudioDeviceBuffer*                      _ptrAudioBuffer;
+    CriticalSectionWrapper&                 _critSect;
+    CriticalSectionWrapper&                 _volumeMutex;
+    WebRtc_Word32                           _id;
+
+private:  // MMDevice
+    IMMDeviceEnumerator*                    _ptrEnumerator;
+    IMMDeviceCollection*                    _ptrRenderCollection;
+    IMMDeviceCollection*                    _ptrCaptureCollection;
+    IMMDevice*                              _ptrDeviceOut;
+    IMMDevice*                              _ptrDeviceIn;
+
+private:  // WASAPI
+    IAudioClient*                           _ptrClientOut;
+    IAudioClient*                           _ptrClientIn;
+    IAudioRenderClient*                     _ptrRenderClient;
+    IAudioCaptureClient*                    _ptrCaptureClient;
+    IAudioEndpointVolume*                   _ptrCaptureVolume;
+    ISimpleAudioVolume*                     _ptrRenderSimpleVolume;
+
+    // DirectX Media Object (DMO) for the built-in AEC.
+    scoped_refptr<IMediaObject>             _dmo;
+    scoped_refptr<IMediaBuffer>             _mediaBuffer;
+    bool                                    _builtInAecEnabled;
+
+    HANDLE                                  _hRenderSamplesReadyEvent;
+    HANDLE                                  _hPlayThread;
+    HANDLE                                  _hRenderStartedEvent;
+    HANDLE                                  _hShutdownRenderEvent;
+
+    HANDLE                                  _hCaptureSamplesReadyEvent;
+    HANDLE                                  _hRecThread;
+    HANDLE                                  _hCaptureStartedEvent;
+    HANDLE                                  _hShutdownCaptureEvent;
+
+    HANDLE                                  _hGetCaptureVolumeThread;
+    HANDLE                                  _hSetCaptureVolumeThread;
+    HANDLE                                  _hSetCaptureVolumeEvent;
+
+    HANDLE                                  _hMmTask;
+
+    UINT                                    _playAudioFrameSize;
+    WebRtc_UWord32                          _playSampleRate;
+    WebRtc_UWord32                          _devicePlaySampleRate;
+    WebRtc_UWord32                          _playBlockSize;
+    WebRtc_UWord32                          _devicePlayBlockSize;
+    WebRtc_UWord32                          _playChannels;
+    Atomic32Wrapper                         _sndCardPlayDelay;
+    UINT64                                  _writtenSamples;
+    LONGLONG                                _playAcc;
+
+    UINT                                    _recAudioFrameSize;
+    WebRtc_UWord32                          _recSampleRate;
+    WebRtc_UWord32                          _recBlockSize;
+    WebRtc_UWord32                          _recChannels;
+    UINT64                                  _readSamples;
+    Atomic32Wrapper                         _sndCardRecDelay;
+
+    float                                   _sampleDriftAt48kHz;
+    float                                   _driftAccumulator;
+
+    WebRtc_UWord16                          _recChannelsPrioList[2];
+    WebRtc_UWord16                          _playChannelsPrioList[2];
+
+    LARGE_INTEGER                           _perfCounterFreq;
+    double                                  _perfCounterFactor;
+    float                                   _avgCPULoad;
+
+private:
+    bool                                    _initialized;
+    bool                                    _recording;
+    bool                                    _playing;
+    bool                                    _recIsInitialized;
+    bool                                    _playIsInitialized;
+    bool                                    _speakerIsInitialized;
+    bool                                    _microphoneIsInitialized;
+
+    bool                                    _usingInputDeviceIndex;
+    bool                                    _usingOutputDeviceIndex;
+    AudioDeviceModule::WindowsDeviceType    _inputDevice;
+    AudioDeviceModule::WindowsDeviceType    _outputDevice;
+    WebRtc_UWord16                          _inputDeviceIndex;
+    WebRtc_UWord16                          _outputDeviceIndex;
+
+    bool                                    _AGC;
+
+    WebRtc_UWord16                          _playWarning;
+    WebRtc_UWord16                          _playError;
+    WebRtc_UWord16                          _recWarning;
+    WebRtc_UWord16                          _recError;
+
+    AudioDeviceModule::BufferType           _playBufType;
+    WebRtc_UWord16                          _playBufDelay;
+    WebRtc_UWord16                          _playBufDelayFixed;
+
+    WebRtc_UWord16                          _newMicLevel;
+
+    mutable char                            _str[512];
+};
+
+#endif    // #if (_MSC_VER >= 1400)
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.cc b/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.cc
new file mode 100644
index 0000000..5a29972
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.cc
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_win.h"
+#include "audio_device_config.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <windows.h>
+#include <tchar.h>
+#include <strsafe.h>
+
+#define STRING_MAX_SIZE 256
+
+typedef void (WINAPI *PGNSI)(LPSYSTEM_INFO);
+typedef BOOL (WINAPI *PGPI)(DWORD, DWORD, DWORD, DWORD, PDWORD);
+
+namespace webrtc
+{
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityWindows() - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityWindows::AudioDeviceUtilityWindows(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityWindows() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityWindows::~AudioDeviceUtilityWindows()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Init()
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceUtilityWindows::Init()
+{
+
+    TCHAR szOS[STRING_MAX_SIZE];
+
+    if (GetOSDisplayString(szOS))
+    {
+#ifdef _UNICODE
+        char os[STRING_MAX_SIZE];
+        if (WideCharToMultiByte(CP_UTF8, 0, szOS, -1, os, STRING_MAX_SIZE, NULL, NULL) == 0)
+        {
+            DWORD err = GetLastError();
+            strncpy(os, "Could not get OS info", STRING_MAX_SIZE);
+        }
+        // DEBUG_PRINTP("OS info: %s\n", os);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "  OS info: %s", os);
+#else
+        // DEBUG_PRINTP("OS info: %s\n", szOS);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "  OS info: %s", szOS);
+#endif
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+BOOL AudioDeviceUtilityWindows::GetOSDisplayString(LPTSTR pszOS)
+{
+    OSVERSIONINFOEX osvi;
+    SYSTEM_INFO si;
+    PGNSI pGNSI;
+    BOOL bOsVersionInfoEx;
+
+    ZeroMemory(&si, sizeof(SYSTEM_INFO));
+    ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
+
+    osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+
+    // Retrieve information about the current operating system
+    //
+    if (!(bOsVersionInfoEx = GetVersionEx((OSVERSIONINFO *) &osvi)))
+        return FALSE;
+
+    // Parse our OS version string
+    //
+    if (VER_PLATFORM_WIN32_NT == osvi.dwPlatformId && osvi.dwMajorVersion > 4)
+    {
+        StringCchCopy(pszOS, STRING_MAX_SIZE, TEXT("Microsoft "));
+
+        // Test for the specific product
+        //
+        //  Operating system	    Version number
+        //  --------------------------------------
+        //  Windows 7	            6.1
+        //  Windows Server 2008 R2	6.1
+        //  Windows Server 2008	    6.0
+        //  Windows Vista	        6.0
+        //  - - - - - - - - - - - - - - - - - - - 
+        //  Windows Server 2003 R2	5.2
+        //  Windows Server 2003	    5.2
+        //  Windows XP	            5.1
+        //  Windows 2000	        5.0
+        //
+        //  see http://msdn.microsoft.com/en-us/library/ms724832(VS.85).aspx for details
+        //
+        if (osvi.dwMajorVersion == 6)
+        {
+            if (osvi.dwMinorVersion == 0)
+            {
+                // Windows Vista or Server 2008
+                if (osvi.wProductType == VER_NT_WORKSTATION)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Vista "));
+                else 
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2008 " ));
+            }
+
+            if (osvi.dwMinorVersion == 1)
+            {
+                // Windows 7 or Server 2008 R2
+                if (osvi.wProductType == VER_NT_WORKSTATION)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows 7 "));
+                else 
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2008 R2 " ));
+            }
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 2)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2003"));
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 1)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows XP "));
+            if (osvi.wSuiteMask & VER_SUITE_PERSONAL)
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Home Edition" ));
+            else 
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Professional" ));
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 0)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows 2000 "));
+
+            if (osvi.wProductType == VER_NT_WORKSTATION )
+            {
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Professional" ));
+            }
+            else 
+            {
+                if (osvi.wSuiteMask & VER_SUITE_DATACENTER)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Datacenter Server" ));
+                else if (osvi.wSuiteMask & VER_SUITE_ENTERPRISE)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Advanced Server" ));
+                else StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Server" ));
+            }
+        }
+
+        // Include service pack (if any)
+        //
+        if (_tcslen(osvi.szCSDVersion) > 0)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT(" "));
+            StringCchCat(pszOS, STRING_MAX_SIZE, osvi.szCSDVersion);
+        }
+
+        TCHAR buf[80];
+
+        // Include build number
+        //
+        StringCchPrintf( buf, 80, TEXT(" (build %d)"), osvi.dwBuildNumber);
+        StringCchCat(pszOS, STRING_MAX_SIZE, buf);
+
+        // Call GetNativeSystemInfo if supported or GetSystemInfo otherwise
+        //
+        pGNSI = (PGNSI) GetProcAddress(GetModuleHandle(TEXT("kernel32.dll")), "GetNativeSystemInfo");
+        if (NULL != pGNSI)
+            pGNSI(&si);
+        else 
+            GetSystemInfo(&si);
+
+        // Add 64-bit or 32-bit for OS versions "later than" Vista
+        //
+        if (osvi.dwMajorVersion >= 6)
+        {
+            if ((si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_AMD64) || 
+                (si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_IA64))
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( ", 64-bit" ));
+            else if (si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_INTEL )
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT(", 32-bit"));
+        }
+      
+        return TRUE; 
+    }
+    else
+    {  
+        return FALSE;
+   }
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.h b/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.h
new file mode 100644
index 0000000..77b4c22
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_utility_win.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+#include <windows.h>
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityWindows : public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityWindows(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityWindows();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    BOOL GetOSDisplayString(LPTSTR pszOS);
+
+private:
+    CriticalSectionWrapper&         _critSect;
+    WebRtc_Word32                   _id;
+    AudioDeviceModule::ErrorCode    _lastError;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.cc b/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.cc
new file mode 100644
index 0000000..509732c
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.cc
@@ -0,0 +1,3818 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility.h"
+#include "audio_device_wave_win.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+#include <windows.h>
+#include <objbase.h>    // CoTaskMemAlloc, CoTaskMemFree
+#include <strsafe.h>    // StringCchCopy(), StringCchCat(), StringCchPrintf()
+#include <cassert>
+
+// Avoids the need of Windows 7 SDK
+#ifndef WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE
+#define WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE   0x0010
+#endif
+
+// Supported in Windows Vista and Windows 7.
+// http://msdn.microsoft.com/en-us/library/dd370819(v=VS.85).aspx
+// Taken from Mmddk.h.
+#define DRV_RESERVED                      0x0800
+#define DRV_QUERYFUNCTIONINSTANCEID       (DRV_RESERVED + 17)
+#define DRV_QUERYFUNCTIONINSTANCEIDSIZE   (DRV_RESERVED + 18)
+
+#define POW2(A) (2 << ((A) - 1))
+
+namespace webrtc {
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsWave - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsWave::AudioDeviceWindowsWave(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _timeEvent(*EventWrapper::Create()),
+    _recStartEvent(*EventWrapper::Create()),
+    _playStartEvent(*EventWrapper::Create()),
+    _hGetCaptureVolumeThread(NULL),
+    _hShutdownGetVolumeEvent(NULL),
+    _hSetCaptureVolumeThread(NULL),
+    _hShutdownSetVolumeEvent(NULL),
+    _hSetCaptureVolumeEvent(NULL),
+    _ptrThread(NULL),
+    _threadID(0),
+    _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _mixerManager(id),
+    _usingInputDeviceIndex(false),
+    _usingOutputDeviceIndex(false),
+    _inputDevice(AudioDeviceModule::kDefaultDevice),
+    _outputDevice(AudioDeviceModule::kDefaultDevice),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _initialized(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _recording(false),
+    _playing(false),
+    _startRec(false),
+    _stopRec(false),
+    _startPlay(false),
+    _stopPlay(false),
+    _AGC(false),
+    _hWaveIn(NULL),
+    _hWaveOut(NULL),
+    _recChannels(N_REC_CHANNELS),
+    _playChannels(N_PLAY_CHANNELS),
+    _recBufCount(0),
+    _recPutBackDelay(0),
+    _recDelayCount(0),
+    _playBufCount(0),
+    _prevPlayTime(0),
+    _prevRecTime(0),
+    _prevTimerCheckTime(0),
+    _timesdwBytes(0),
+    _timerFaults(0),
+    _timerRestartAttempts(0),
+    _no_of_msecleft_warnings(0),
+    _MAX_minBuffer(65),
+    _useHeader(0),
+    _dTcheckPlayBufDelay(10),
+    _playBufDelay(80),
+    _playBufDelayFixed(80),
+    _minPlayBufDelay(20),
+    _avgCPULoad(0),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _plSampOld(0),
+    _rcSampOld(0),
+    _playBufType(AudioDeviceModule::kAdaptiveBufferSize),
+    _recordedBytes(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _newMicLevel(0),
+    _minMicVolume(0),
+    _maxMicVolume(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+
+    // Initialize value, set to 0 if it fails
+    if (!QueryPerformanceFrequency(&_perfFreq))
+    {
+        _perfFreq.QuadPart = 0;
+    }
+
+    _hShutdownGetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownSetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsWave - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsWave::~AudioDeviceWindowsWave()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    delete &_recStartEvent;
+    delete &_playStartEvent;
+    delete &_timeEvent;
+    delete &_critSect;
+    delete &_critSectCb;
+
+    if (NULL != _hShutdownGetVolumeEvent)
+    {
+        CloseHandle(_hShutdownGetVolumeEvent);
+        _hShutdownGetVolumeEvent = NULL;
+    }
+
+    if (NULL != _hShutdownSetVolumeEvent)
+    {
+        CloseHandle(_hShutdownSetVolumeEvent);
+        _hShutdownSetVolumeEvent = NULL;
+    }
+
+    if (NULL != _hSetCaptureVolumeEvent)
+    {
+        CloseHandle(_hSetCaptureVolumeEvent);
+        _hSetCaptureVolumeEvent = NULL;
+    }
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kWindowsWaveAudio;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Init()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    const WebRtc_UWord32 nowTime(AudioDeviceUtility::GetTimeInMS());
+
+    _recordedBytes = 0;
+    _prevRecByteCheckTime = nowTime;
+    _prevRecTime = nowTime;
+    _prevPlayTime = nowTime;
+    _prevTimerCheckTime = nowTime;
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _mixerManager.EnumerateAll();
+
+    if (_ptrThread)
+    {
+        // thread is already created and active
+        return 0;
+    }
+
+    const char* threadName = "webrtc_audio_module_thread";
+    _ptrThread = ThreadWrapper::CreateThread(ThreadFunc, 
+                                             this, 
+                                             kRealtimePriority,
+                                             threadName);
+    if (_ptrThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to create the audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThread->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to start the audio thread");
+        delete _ptrThread;
+        _ptrThread = NULL;
+        return -1;
+    }
+    _threadID = threadID;
+
+    const bool periodic(true);
+    if (!_timeEvent.StartTimer(periodic, TIMER_PERIOD_MS))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to start the timer event");
+        if (_ptrThread->Stop())
+        {
+            delete _ptrThread;
+            _ptrThread = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "unable to stop the activated thread");
+        }
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "periodic timer (dT=%d) is now active", TIMER_PERIOD_MS);
+
+    _hGetCaptureVolumeThread = CreateThread(NULL,
+                                            0,
+                                            GetCaptureVolumeThread,
+                                            this,
+                                            0,
+                                            NULL);
+    if (_hGetCaptureVolumeThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to create the volume getter thread");
+        return -1;
+    }
+
+    SetThreadPriority(_hGetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
+
+    _hSetCaptureVolumeThread = CreateThread(NULL,
+                                            0,
+                                            SetCaptureVolumeThread,
+                                            this,
+                                            0,
+                                            NULL);
+    if (_hSetCaptureVolumeThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to create the volume setter thread");
+        return -1;
+    }
+
+    SetThreadPriority(_hSetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    _critSect.Enter();
+
+    _mixerManager.Close();
+
+    if (_ptrThread)
+    {
+        ThreadWrapper* tmpThread = _ptrThread;
+        _ptrThread = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEvent.Set();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            _critSect.Leave();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "failed to close down the audio thread");
+            return -1;
+        }
+    }
+    else
+    {
+        _critSect.Leave();
+    }
+
+    _critSect.Enter();
+    SetEvent(_hShutdownGetVolumeEvent);
+    _critSect.Leave();
+    WebRtc_Word32 ret = WaitForSingleObject(_hGetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to close down volume getter thread");
+        CloseHandle(_hGetCaptureVolumeThread);
+        _hGetCaptureVolumeThread = NULL;
+        return -1;
+    }
+    _critSect.Enter();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+        "  volume getter thread is now closed");
+
+    SetEvent(_hShutdownSetVolumeEvent);
+    _critSect.Leave();
+    ret = WaitForSingleObject(_hSetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to close down volume setter thread");
+        CloseHandle(_hSetCaptureVolumeThread);
+        _hSetCaptureVolumeThread = NULL;
+        return -1;
+    }
+    _critSect.Enter();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "  volume setter thread is now closed");
+
+    CloseHandle(_hGetCaptureVolumeThread);
+    _hGetCaptureVolumeThread = NULL;
+
+    CloseHandle(_hSetCaptureVolumeThread);
+    _hSetCaptureVolumeThread = NULL;
+
+    _critSect.Leave();
+
+    _timeEvent.StopTimer();
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+
+DWORD WINAPI AudioDeviceWindowsWave::GetCaptureVolumeThread(LPVOID context)
+{
+    return(((AudioDeviceWindowsWave*)context)->DoGetCaptureVolumeThread());
+}
+
+DWORD WINAPI AudioDeviceWindowsWave::SetCaptureVolumeThread(LPVOID context)
+{
+    return(((AudioDeviceWindowsWave*)context)->DoSetCaptureVolumeThread());
+}
+
+DWORD AudioDeviceWindowsWave::DoGetCaptureVolumeThread()
+{
+    HANDLE waitObject = _hShutdownGetVolumeEvent;
+
+    while (1)
+    {
+        DWORD waitResult = WaitForSingleObject(waitObject, 
+                                               GET_MIC_VOLUME_INTERVAL_MS);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0: // _hShutdownGetVolumeEvent
+                return 0;
+            case WAIT_TIMEOUT:	// timeout notification
+                break;
+            default:            // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on get volume thread");
+                return -1;
+        }
+
+        if (AGC())
+        {
+            WebRtc_UWord32 currentMicLevel = 0;
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // This doesn't set the system volume, just stores it.
+                _critSect.Enter();
+                if (_ptrAudioBuffer)
+                {
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);				
+                }
+                _critSect.Leave();
+            }
+        }
+    }
+}
+
+DWORD AudioDeviceWindowsWave::DoSetCaptureVolumeThread()
+{
+    HANDLE waitArray[2] = {_hShutdownSetVolumeEvent, _hSetCaptureVolumeEvent};
+
+    while (1)
+    {
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0:     // _hShutdownSetVolumeEvent
+                return 0;
+            case WAIT_OBJECT_0 + 1: // _hSetCaptureVolumeEvent
+                break;
+            default:                // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on set volume thread");
+                return -1;
+        }
+
+        _critSect.Enter();
+        WebRtc_UWord32 newMicLevel = _newMicLevel;
+        _critSect.Leave();
+
+        if (SetMicrophoneVolume(newMicLevel) == -1)
+        {   
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "  the required modification of the microphone volume failed");
+        }
+    }      
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Initialized() const
+{
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerIsAvailable(bool& available)
+{
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    //
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_mixerManager.EnumerateSpeakers() == -1)
+    {
+        // failed to locate any valid/controllable speaker
+        return -1;
+    }
+
+    if (IsUsingOutputDeviceIndex())
+    {
+        if (_mixerManager.OpenSpeaker(OutputDeviceIndex()) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        if (_mixerManager.OpenSpeaker(OutputDevice()) == -1)
+        {
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneIsAvailable(bool& available)
+{
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone exists
+    //
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_mixerManager.EnumerateMicrophones() == -1)
+    {
+        // failed to locate any valid/controllable microphone
+        return -1;
+    }
+
+    if (IsUsingInputDeviceIndex())
+    {
+        if (_mixerManager.OpenMicrophone(InputDeviceIndex()) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        if (_mixerManager.OpenMicrophone(InputDevice()) == -1)
+        {
+            return -1;
+        }
+    }
+
+    WebRtc_UWord32 maxVol = 0;
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+            "  unable to retrieve max microphone volume");
+    }
+    _maxMicVolume = maxVol;
+
+    WebRtc_UWord32 minVol = 0;
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+            "  unable to retrieve min microphone volume");
+    }
+    _minMicVolume = minVol;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        // failed to find a valid speaker
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a volume control
+    //
+    _mixerManager.SpeakerVolumeIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+//
+//    The low-order word contains the left-channel volume setting, and the
+//    high-order word contains the right-channel setting.
+//    A value of 0xFFFF represents full volume, and a value of 0x0000 is silence.
+//
+//    If a device does not support both left and right volume control,
+//    the low-order word of dwVolume specifies the volume level,
+//    and the high-order word is ignored.
+//
+//    Most devices do not support the full 16 bits of volume-level control
+//    and will not use the least-significant bits of the requested volume setting.
+//    For example, if a device supports 4 bits of volume control, the values
+//    0x4000, 0x4FFF, and 0x43BE will all be truncated to 0x4000.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+
+    MMRESULT res(0);
+    WAVEOUTCAPS caps;
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
+    }
+
+    // To determine whether the device supports volume control on both
+    // the left and right channels, use the WAVECAPS_LRVOLUME flag.
+    //
+    res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    if (!(caps.dwSupport & WAVECAPS_VOLUME))
+    {
+        // this device does not support volume control using the waveOutSetVolume API
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
+        return -1;
+    }
+    if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
+    {
+        // high-order word (right channel) is ignored
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
+    }
+
+    DWORD dwVolume(0x00000000);
+    dwVolume = (DWORD)(((volumeRight & 0xFFFF) << 16) | (volumeLeft & 0xFFFF));
+
+    res = waveOutSetVolume(_hWaveOut, dwVolume);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutSetVolume() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+//
+//    The low-order word of this location contains the left-channel volume setting,
+//    and the high-order word contains the right-channel setting.
+//    A value of 0xFFFF (65535) represents full volume, and a value of 0x0000
+//    is silence.
+//
+//    If a device does not support both left and right volume control,
+//    the low-order word of the specified location contains the mono volume level.
+//
+//    The full 16-bit setting(s) set with the waveOutSetVolume function is returned,
+//    regardless of whether the device supports the full 16 bits of volume-level
+//    control.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const
+{
+
+    MMRESULT res(0);
+    WAVEOUTCAPS caps;
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
+    }
+
+    // To determine whether the device supports volume control on both
+    // the left and right channels, use the WAVECAPS_LRVOLUME flag.
+    //
+    res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    if (!(caps.dwSupport & WAVECAPS_VOLUME))
+    {
+        // this device does not support volume control using the waveOutSetVolume API
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
+        return -1;
+    }
+    if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
+    {
+        // high-order word (right channel) is ignored
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
+    }
+
+    DWORD dwVolume(0x00000000);
+
+    res = waveOutGetVolume(_hWaveOut, &dwVolume);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutGetVolume() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    WORD wVolumeLeft = LOWORD(dwVolume);
+    WORD wVolumeRight = HIWORD(dwVolume);
+
+    volumeLeft = static_cast<WebRtc_UWord16> (wVolumeLeft);
+    volumeRight = static_cast<WebRtc_UWord16> (wVolumeRight);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    //
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    //
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoRecordingIsAvailable(bool& available)
+{
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoPlayoutIsAvailable(bool& available)
+{
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+//
+//  Specifies the number of output channels.
+//
+//  NOTE - the setting will only have an effect after InitPlayout has
+//  been called.
+//
+//  16-bit mono:
+//
+//  Each sample is 2 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
+//  For each sample, the first byte is the low-order byte of channel 0 and the
+//  second byte is the high-order byte of channel 0.
+//
+//  16-bit stereo:
+//
+//  Each sample is 4 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
+//  For each sample, the first byte is the low-order byte of channel 0 (left channel);
+//  the second byte is the high-order byte of channel 0; the third byte is the
+//  low-order byte of channel 1 (right channel); and the fourth byte is the
+//  high-order byte of channel 1.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::AGC() const
+{
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // Failed to find valid microphone
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a volume control
+    //
+    _mixerManager.MicrophoneVolumeIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    // _maxMicVolume can be zero in AudioMixerManager::MaxMicrophoneVolume():
+    // (1) API GetLineControl() returns failure at querying the max Mic level.
+    // (2) API GetLineControl() returns maxVolume as zero in rare cases.
+    // Both cases show we don't have access to the mixer controls.
+    // We return -1 here to indicate that.    
+    if (_maxMicVolume == 0)
+    {
+        return -1;
+    }
+
+    maxVolume = _maxMicVolume;;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+    minVolume = _minMicVolume;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsWave::PlayoutDevices()
+{
+
+    return (waveOutGetNumDevs());
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    UINT nDevices = waveOutGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio output devices is %u", nDevices);
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _usingOutputDeviceIndex = true;
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+    }
+
+    _usingOutputDeviceIndex = false;
+    _outputDevice = device;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    // Special fix for the case when the user asks for the name of the default device.
+    //
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        index = 0;
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    WAVEOUTCAPSW caps;    // szPname member (product name (NULL terminated) is a WCHAR
+    MMRESULT res;
+
+    res = waveOutGetDevCapsW(index, &caps, sizeof(WAVEOUTCAPSW));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCapsW() failed (err=%d)", res);
+        return -1;
+    }
+    if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
+    }
+
+    if (guid == NULL)
+    {
+        return 0;
+    }
+
+    // It is possible to get the unique endpoint ID string using the Wave API.
+    // However, it is only supported on Windows Vista and Windows 7.
+
+    size_t cbEndpointId(0);
+
+    // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
+    // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
+    res = waveOutMessage((HWAVEOUT)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEIDSIZE,
+                         (DWORD_PTR)&cbEndpointId, NULL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
+        TraceWaveOutError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
+        }
+        return 0;
+    }
+
+    // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
+
+    WCHAR *pstrEndpointId = NULL;
+    pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
+
+    // Get the endpoint ID string for this waveOut device.
+    res = waveOutMessage((HWAVEOUT)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEID,
+                         (DWORD_PTR)pstrEndpointId,
+                          cbEndpointId);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
+        TraceWaveOutError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
+        }
+        CoTaskMemFree(pstrEndpointId);
+        return 0;
+    }
+
+    if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
+    }
+    CoTaskMemFree(pstrEndpointId);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize])
+{
+
+    WebRtc_UWord16 nDevices(RecordingDevices());
+
+    // Special fix for the case when the user asks for the name of the default device.
+    //
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        index = 0;
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    WAVEINCAPSW caps;    // szPname member (product name (NULL terminated) is a WCHAR
+    MMRESULT res;
+
+    res = waveInGetDevCapsW(index, &caps, sizeof(WAVEINCAPSW));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCapsW() failed (err=%d)", res);
+        return -1;
+    }
+    if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
+    }
+
+    if (guid == NULL)
+    {
+        return 0;
+    }
+
+    // It is possible to get the unique endpoint ID string using the Wave API.
+    // However, it is only supported on Windows Vista and Windows 7.
+
+    size_t cbEndpointId(0);
+
+    // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
+    // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
+    res = waveInMessage((HWAVEIN)IntToPtr(index),
+                         DRV_QUERYFUNCTIONINSTANCEIDSIZE,
+                        (DWORD_PTR)&cbEndpointId, NULL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
+        TraceWaveInError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
+        }
+        return 0;
+    }
+
+    // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
+
+    WCHAR *pstrEndpointId = NULL;
+    pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
+
+    // Get the endpoint ID string for this waveOut device.
+    res = waveInMessage((HWAVEIN)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEID,
+                         (DWORD_PTR)pstrEndpointId,
+                          cbEndpointId);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
+        TraceWaveInError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
+        }
+        CoTaskMemFree(pstrEndpointId);
+        return 0;
+    }
+
+    if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
+    }
+    CoTaskMemFree(pstrEndpointId);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsWave::RecordingDevices()
+{
+
+    return (waveInGetNumDevs());
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    UINT nDevices = waveInGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio input devices is %u", nDevices);
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _usingInputDeviceIndex = true;
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+    }
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    _usingInputDeviceIndex = false;
+    _inputDevice = device;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the recording side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitSpeaker() failed");
+    }
+
+    // Enumerate all availiable output devices
+    EnumeratePlayoutDevices();
+
+    // Start by closing any existing wave-output devices
+    //
+    MMRESULT res(MMSYSERR_ERROR);
+
+    if (_hWaveOut != NULL)
+    {
+        res = waveOutClose(_hWaveOut);
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
+            TraceWaveOutError(res);
+        }
+    }
+
+    // Set the output wave format
+    //
+    WAVEFORMATEX waveFormat;
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM;
+    waveFormat.nChannels       = _playChannels;  // mono <=> 1, stereo <=> 2
+    waveFormat.nSamplesPerSec  = N_PLAY_SAMPLES_PER_SEC;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // Open the given waveform-audio output device for playout
+    //
+    HWAVEOUT hWaveOut(NULL);
+
+    if (IsUsingOutputDeviceIndex())
+    {
+        // verify settings first
+        res = waveOutOpen(NULL, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // open the given waveform-audio output device for recording
+            res = waveOutOpen(&hWaveOut, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening output device corresponding to device ID %u", _outputDeviceIndex);
+        }
+    }
+    else
+    {
+        if (_outputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
+        {
+            // check if it is possible to open the default communication device (supported on Windows 7)
+            res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                // if so, open the default communication device for real
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |  WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+            }
+            else
+            {
+                // use default device since default communication device was not avaliable
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
+            }
+        }
+        else if (_outputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            // open default device since it has been requested
+            res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default output device");
+            }
+        }
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutOpen() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    // Log information about the aquired output device
+    //
+    WAVEOUTCAPS caps;
+
+    res = waveOutGetDevCaps((UINT_PTR)hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    UINT deviceID(0);
+    res = waveOutGetID(hWaveOut, &deviceID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetID() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name       : %s", caps.szPname);
+
+    // Store valid handle for the open waveform-audio output device
+    _hWaveOut = hWaveOut;
+
+    // Store the input wave header as well
+    _waveFormatOut = waveFormat;
+
+    // Prepare wave-out headers
+    //
+    const WebRtc_UWord8 bytesPerSample = 2*_playChannels;
+
+    for (int n = 0; n < N_BUFFERS_OUT; n++)
+    {
+        // set up the output wave header
+        _waveHeaderOut[n].lpData          = _playBuffer[n];
+        _waveHeaderOut[n].dwBufferLength  = bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES;
+        _waveHeaderOut[n].dwFlags         = 0;
+        _waveHeaderOut[n].dwLoops         = 0;
+
+        memset(_playBuffer[n], 0, bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES);
+
+        // The waveOutPrepareHeader function prepares a waveform-audio data block for playback.
+        // The lpData, dwBufferLength, and dwFlags members of the WAVEHDR structure must be set
+        // before calling this function.
+        //
+        res = waveOutPrepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (err=%d)", n, res);
+            TraceWaveOutError(res);
+        }
+
+        // perform extra check to ensure that the header is prepared
+        if (_waveHeaderOut[n].dwFlags != WHDR_PREPARED)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (dwFlags != WHDR_PREPARED)", n);
+        }
+    }
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    _dTcheckPlayBufDelay = 10;  // check playback buffer delay every 10 ms
+    _playBufCount = 0;          // index of active output wave header (<=> output buffer index)
+    _playBufDelay = 80;         // buffer delay/size is initialized to 80 ms and slowly decreased until er < 25
+    _minPlayBufDelay = 25;      // minimum playout buffer delay
+    _MAX_minBuffer = 65;        // adaptive minimum playout buffer delay cannot be larger than this value
+    _intro = 1;                 // Used to make sure that adaption starts after (2000-1700)/100 seconds
+    _waitCounter = 1700;        // Counter for start of adaption of playback buffer
+    _erZeroCounter = 0;         // Log how many times er = 0 in consequtive calls to RecTimeProc
+    _useHeader = 0;             // Counts number of "useHeader" detections. Stops at 2.
+
+    _writtenSamples = 0;
+    _writtenSamplesOld = 0;
+    _playedSamplesOld = 0;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id,"initial playout status: _playBufDelay=%d, _minPlayBufDelay=%d",
+        _playBufDelay, _minPlayBufDelay);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    _avgCPULoad = 0;
+    _playAcc  = 0;
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitMicrophone() failed");
+    }
+
+    // Enumerate all availiable input devices
+    EnumerateRecordingDevices();
+
+    // Start by closing any existing wave-input devices
+    //
+    MMRESULT res(MMSYSERR_ERROR);
+
+    if (_hWaveIn != NULL)
+    {
+        res = waveInClose(_hWaveIn);
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
+            TraceWaveInError(res);
+        }
+    }
+
+    // Set the input wave format
+    //
+    WAVEFORMATEX waveFormat;
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM;
+    waveFormat.nChannels       = _recChannels;  // mono <=> 1, stereo <=> 2
+    waveFormat.nSamplesPerSec  = N_REC_SAMPLES_PER_SEC;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // Open the given waveform-audio input device for recording
+    //
+    HWAVEIN hWaveIn(NULL);
+
+    if (IsUsingInputDeviceIndex())
+    {
+        // verify settings first
+        res = waveInOpen(NULL, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // open the given waveform-audio input device for recording
+            res = waveInOpen(&hWaveIn, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening input device corresponding to device ID %u", _inputDeviceIndex);
+        }
+    }
+    else
+    {
+        if (_inputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
+        {
+            // check if it is possible to open the default communication device (supported on Windows 7)
+            res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                // if so, open the default communication device for real
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+            }
+            else
+            {
+                // use default device since default communication device was not avaliable
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
+            }
+        }
+        else if (_inputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            // open default device since it has been requested
+            res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default input device");
+            }
+        }
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInOpen() failed (err=%d)", res);
+        TraceWaveInError(res);
+        return -1;
+    }
+
+    // Log information about the aquired input device
+    //
+    WAVEINCAPS caps;
+
+    res = waveInGetDevCaps((UINT_PTR)hWaveIn, &caps, sizeof(WAVEINCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    UINT deviceID(0);
+    res = waveInGetID(hWaveIn, &deviceID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetID() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name       : %s", caps.szPname);
+
+    // Store valid handle for the open waveform-audio input device
+    _hWaveIn = hWaveIn;
+
+    // Store the input wave header as well
+    _waveFormatIn = waveFormat;
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    _recBufCount = 0;     // index of active input wave header (<=> input buffer index)
+    _recDelayCount = 0;   // ensures that input buffers are returned with certain delay
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    // set state to ensure that the recording starts from the audio thread
+    _startRec = true;
+
+    // the audio thread will signal when recording has stopped
+    if (kEventTimeout == _recStartEvent.Wait(10000))
+    {
+        _startRec = false;
+        StopRecording();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        // the recording state is set by the audio thread after recording has started
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StopRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_hWaveIn == NULL)
+    {
+        return -1;
+    }
+
+    bool wasRecording = _recording;
+    _recIsInitialized = false;
+    _recording = false;
+
+    MMRESULT res;
+
+    // Stop waveform-adio input. If there are any buffers in the queue, the
+    // current buffer will be marked as done (the dwBytesRecorded member in
+    // the header will contain the length of data), but any empty buffers in
+    // the queue will remain there.
+    //
+    res = waveInStop(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStop() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Stop input on the given waveform-audio input device and resets the current
+    // position to zero. All pending buffers are marked as done and returned to
+    // the application.
+    //
+    res = waveInReset(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInReset() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Clean up the preparation performed by the waveInPrepareHeader function.
+    // Only unprepare header if recording was ever started (and headers are prepared).
+    //
+    if (wasRecording)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInUnprepareHeader() will be performed");
+        for (int n = 0; n < N_BUFFERS_IN; n++)
+        {
+            res = waveInUnprepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+            if (MMSYSERR_NOERROR != res)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader() failed (err=%d)", res);
+                TraceWaveInError(res);
+            }
+        }
+    }
+
+    // Close the given waveform-audio input device.
+    //
+    res = waveInClose(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Set the wave input handle to NULL
+    //
+    _hWaveIn = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveIn is now set to NULL");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Recording() const
+{
+    return (_recording);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    // set state to ensure that playout starts from the audio thread
+    _startPlay = true;
+
+    // the audio thread will signal when recording has started
+    if (kEventTimeout == _playStartEvent.Wait(10000))
+    {
+        _startPlay = false;
+        StopPlayout();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playout");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        // the playing state is set by the audio thread after playout has started
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playing");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StopPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    MMRESULT res;
+
+    // The waveOutReset function stops playback on the given waveform-audio
+    // output device and resets the current position to zero. All pending
+    // playback buffers are marked as done (WHDR_DONE) and returned to the application.
+    // After this function returns, the application can send new playback buffers
+    // to the device by calling waveOutWrite, or close the device by calling waveOutClose.
+    //
+    res = waveOutReset(_hWaveOut);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutReset() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    // The waveOutUnprepareHeader function cleans up the preparation performed
+    // by the waveOutPrepareHeader function. This function must be called after
+    // the device driver is finished with a data block.
+    // You must call this function before freeing the buffer.
+    //
+    for (int n = 0; n < N_BUFFERS_OUT; n++)
+    {
+        res = waveOutUnprepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutUnprepareHeader() failed (err=%d)", res);
+            TraceWaveOutError(res);
+        }
+    }
+
+    // The waveOutClose function closes the given waveform-audio output device.
+    // The close operation fails if the device is still playing a waveform-audio
+    // buffer that was previously sent by calling waveOutWrite. Before calling
+    // waveOutClose, the application must wait for all buffers to finish playing
+    // or call the waveOutReset function to terminate playback.
+    //
+    res = waveOutClose(_hWaveOut);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    _hWaveOut = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveOut is now set to NULL");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+    delayMS = (WebRtc_UWord16)_sndCardPlayDelay;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+    delayMS = (WebRtc_UWord16)_sndCardRecDelay;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS)
+{
+    CriticalSectionScoped lock(_critSect);
+    _playBufType = type;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const
+{
+    CriticalSectionScoped lock(_critSect);
+    type = _playBufType;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        sizeMS = _playBufDelayFixed;
+    }
+    else
+    {
+        sizeMS = _playBufDelay;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::CPULoad(WebRtc_UWord16& load) const
+{
+
+    load = static_cast<WebRtc_UWord16>(100*_avgCPULoad);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutWarning() const
+{
+    return ( _playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutError() const
+{
+    return ( _playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingWarning() const
+{
+    return ( _recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingError() const
+{
+    return ( _recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  InputSanityCheckAfterUnlockedPeriod
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_hWaveIn == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "input state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OutputSanityCheckAfterUnlockedPeriod
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::OutputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "output state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumeratePlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::EnumeratePlayoutDevices()
+{
+
+    WebRtc_UWord16 nDevices(PlayoutDevices());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#output devices: %u", nDevices);
+
+    WAVEOUTCAPS caps;
+    MMRESULT res;
+
+    for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
+    {
+        res = waveOutGetDevCaps(deviceID, &caps, sizeof(WAVEOUTCAPS));
+        if (res != MMSYSERR_NOERROR)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u",caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", caps.szPname);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats            : 0x%x", caps.dwFormats);
+        if (caps.dwFormats & WAVE_FORMAT_48S16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,stereo,16bit : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit  : *NOT* SUPPORTED");
+        }
+        if (caps.dwFormats & WAVE_FORMAT_48M16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,mono,16bit   : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit    : *NOT* SUPPORTED");
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels            : %u", caps.wChannels);
+        TraceSupportFlags(caps.dwSupport);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateRecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::EnumerateRecordingDevices()
+{
+
+    WebRtc_UWord16 nDevices(RecordingDevices());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#input devices: %u", nDevices);
+
+    WAVEINCAPS caps;
+    MMRESULT res;
+
+    for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
+    {
+        res = waveInGetDevCaps(deviceID, &caps, sizeof(WAVEINCAPS));
+        if (res != MMSYSERR_NOERROR)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u",caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", caps.szPname);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats            : 0x%x", caps.dwFormats);
+        if (caps.dwFormats & WAVE_FORMAT_48S16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,stereo,16bit : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit  : *NOT* SUPPORTED");
+        }
+        if (caps.dwFormats & WAVE_FORMAT_48M16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,mono,16bit   : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit    : *NOT* SUPPORTED");
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels            : %u", caps.wChannels);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceSupportFlags
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceSupportFlags(DWORD dwSupport) const
+{
+    TCHAR buf[256];
+
+    StringCchPrintf(buf, 128, TEXT("support flags        : 0x%x "), dwSupport);
+
+    if (dwSupport & WAVECAPS_PITCH)
+    {
+        // supports pitch control
+        StringCchCat(buf, 256, TEXT("(PITCH)"));
+    }
+    if (dwSupport & WAVECAPS_PLAYBACKRATE)
+    {
+        // supports playback rate control
+        StringCchCat(buf, 256, TEXT("(PLAYBACKRATE)"));
+    }
+    if (dwSupport & WAVECAPS_VOLUME)
+    {
+        // supports volume control
+        StringCchCat(buf, 256, TEXT("(VOLUME)"));
+    }
+    if (dwSupport & WAVECAPS_LRVOLUME)
+    {
+        // supports separate left and right volume control
+        StringCchCat(buf, 256, TEXT("(LRVOLUME)"));
+    }
+    if (dwSupport & WAVECAPS_SYNC)
+    {
+        // the driver is synchronous and will block while playing a buffer
+        StringCchCat(buf, 256, TEXT("(SYNC)"));
+    }
+    if (dwSupport & WAVECAPS_SAMPLEACCURATE)
+    {
+        // returns sample-accurate position information
+        StringCchCat(buf, 256, TEXT("(SAMPLEACCURATE)"));
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveInError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceWaveInError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveInGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveOutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceWaveOutError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveOutGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  PrepareStartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PrepareStartPlayout()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    // A total of 30ms of data is immediately placed in the SC buffer
+    //
+    WebRtc_Word8 zeroVec[4*PLAY_BUF_SIZE_IN_SAMPLES];  // max allocation
+    memset(zeroVec, 0, 4*PLAY_BUF_SIZE_IN_SAMPLES);
+
+    {
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+    }
+
+    _playAcc = 0;
+    _playWarning = 0;
+    _playError = 0;
+    _dc_diff_mean = 0;
+    _dc_y_prev = 0;
+    _dc_penalty_counter = 20;
+    _dc_prevtime = 0;
+    _dc_prevplay = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PrepareStartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PrepareStartRecording()
+{
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_hWaveIn == NULL)
+    {
+        return -1;
+    }
+
+    _playAcc = 0;
+    _recordedBytes = 0;
+    _recPutBackDelay = REC_PUT_BACK_DELAY;
+
+    MMRESULT res;
+    MMTIME mmtime;
+    mmtime.wType = TIME_SAMPLES;
+
+    res = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition(TIME_SAMPLES) failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    _read_samples = mmtime.u.sample;
+    _read_samples_old = _read_samples;
+    _rec_samples_old = mmtime.u.sample;
+    _wrapCounter = 0;
+
+    for (int n = 0; n < N_BUFFERS_IN; n++)
+    {
+        const WebRtc_UWord8 nBytesPerSample = 2*_recChannels;
+
+        // set up the input wave header
+        _waveHeaderIn[n].lpData          = _recBuffer[n];
+        _waveHeaderIn[n].dwBufferLength  = nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
+        _waveHeaderIn[n].dwFlags         = 0;
+        _waveHeaderIn[n].dwBytesRecorded = 0;
+        _waveHeaderIn[n].dwUser          = 0;
+
+        memset(_recBuffer[n], 0, nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES);
+
+        // prepare a buffer for waveform-audio input
+        res = waveInPrepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", n, res);
+            TraceWaveInError(res);
+        }
+
+        // send an input buffer to the given waveform-audio input device
+        res = waveInAddBuffer(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", n, res);
+            TraceWaveInError(res);
+        }
+    }
+
+    // start input on the given waveform-audio input device
+    res = waveInStart(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStart() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetPlayoutBufferDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetPlayoutBufferDelay(WebRtc_UWord32& writtenSamples, WebRtc_UWord32& playedSamples)
+{
+    int i;
+    int ms_Header;
+    long playedDifference;
+    int msecInPlayoutBuffer(0);   // #milliseconds of audio in the playout buffer
+
+    const WebRtc_UWord16 nSamplesPerMs = (WebRtc_UWord16)(N_PLAY_SAMPLES_PER_SEC/1000);  // default is 48000/1000 = 48
+
+    MMRESULT res;
+    MMTIME mmtime;
+
+    if (!_playing)
+    {
+        playedSamples = 0;
+        return (0);
+    }
+
+    // Retrieve the current playback position.
+    //
+    mmtime.wType = TIME_SAMPLES;  // number of waveform-audio samples
+    res = waveOutGetPosition(_hWaveOut, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetPosition() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    writtenSamples = _writtenSamples;   // #samples written to the playout buffer
+    playedSamples = mmtime.u.sample;    // current playout position in the playout buffer
+
+    // derive remaining amount (in ms) of data in the playout buffer
+    msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
+    // DEBUG_PRINTP("msecInPlayoutBuffer=%u\n", msecInPlayoutBuffer);
+
+    playedDifference = (long) (_playedSamplesOld - playedSamples);
+
+    if (playedDifference > 64000)
+    {
+        // If the sound cards number-of-played-out-samples variable wraps around before
+        // written_sampels wraps around this needs to be adjusted. This can happen on
+        // sound cards that uses less than 32 bits to keep track of number of played out
+        // sampels. To avoid being fooled by sound cards that sometimes produces false
+        // output we compare old value minus the new value with a large value. This is
+        // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
+        // would trigger the wrap-around function if we didn't compare with a large value.
+        // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
+
+        i = 31;
+        while((_playedSamplesOld <= (unsigned long)POW2(i)) && (i > 14)) {
+            i--;
+        }
+
+        if((i < 31) && (i > 14)) {
+            // Avoid adjusting when there is 32-bit wrap-around since that is
+            // something neccessary.
+            //
+            WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "msecleft() => wrap around occured: %d bits used by sound card)", (i+1));
+
+            _writtenSamples = _writtenSamples - POW2(i + 1);
+            writtenSamples = _writtenSamples;
+            msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
+        }
+    }
+    else if ((_writtenSamplesOld > POW2(31)) && (writtenSamples < 96000))
+    {
+        // Wrap around as expected after having used all 32 bits. (But we still
+        // test if the wrap around happened earlier which it should not)
+
+        i = 31;
+        while (_writtenSamplesOld <= (unsigned long)POW2(i)) {
+            i--;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "  msecleft() (wrap around occured after having used all 32 bits)");
+
+        _writtenSamplesOld = writtenSamples;
+        _playedSamplesOld = playedSamples;
+        msecInPlayoutBuffer = (int)((writtenSamples + POW2(i + 1) - playedSamples)/nSamplesPerMs);
+
+    }
+    else if ((writtenSamples < 96000) && (playedSamples > POW2(31)))
+    {
+        // Wrap around has, as expected, happened for written_sampels before
+        // playedSampels so we have to adjust for this until also playedSampels
+        // has had wrap around.
+
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "  msecleft() (wrap around occured: correction of output is done)");
+
+        _writtenSamplesOld = writtenSamples;
+        _playedSamplesOld = playedSamples;
+        msecInPlayoutBuffer = (int)((writtenSamples + POW2(32) - playedSamples)/nSamplesPerMs);
+    }
+
+    _writtenSamplesOld = writtenSamples;
+    _playedSamplesOld = playedSamples;
+
+
+    // We use the following formaula to track that playout works as it should
+    // y=playedSamples/48 - timeGetTime();
+    // y represent the clock drift between system clock and sound card clock - should be fairly stable
+    // When the exponential mean value of diff(y) goes away from zero something is wrong
+    // The exponential formula will accept 1% clock drift but not more
+    // The driver error means that we will play to little audio and have a high negative clock drift
+    // We kick in our alternative method when the clock drift reaches 20%
+
+    int diff,y;
+    int unsigned time =0;
+
+    // If we have other problems that causes playout glitches
+    // we don't want to switch playout method.
+    // Check if playout buffer is extremely low, or if we haven't been able to
+    // exectue our code in more than 40 ms
+
+    time = timeGetTime();
+
+    if ((msecInPlayoutBuffer < 20) || (time - _dc_prevtime > 40))
+    {
+        _dc_penalty_counter = 100;
+    }
+
+    if ((playedSamples != 0))
+    {
+        y = playedSamples/48 - time;
+        if ((_dc_y_prev != 0) && (_dc_penalty_counter == 0))
+        {
+            diff = y - _dc_y_prev;
+            _dc_diff_mean = (990*_dc_diff_mean)/1000 + 10*diff;
+        }
+        _dc_y_prev = y;
+    }
+
+    if (_dc_penalty_counter)
+    {
+        _dc_penalty_counter--;
+    }
+
+    if (_dc_diff_mean < -200)
+    {
+        // Always reset the filter
+        _dc_diff_mean = 0;
+
+        // Problem is detected. Switch delay method and set min buffer to 80.
+        // Reset the filter and keep monitoring the filter output.
+        // If issue is detected a second time, increase min buffer to 100.
+        // If that does not help, we must modify this scheme further.
+
+        _useHeader++;
+        if (_useHeader == 1)
+        {
+            _minPlayBufDelay = 80;
+            _playWarning = 1;   // only warn first time
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #1: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
+        }
+        else if (_useHeader == 2)
+        {
+            _minPlayBufDelay = 100;   // add some more safety
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #2: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
+        }
+        else
+        {
+            // This state should not be entered... (HA)
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "further actions are required!");
+        }
+        if (_playWarning == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout warning exists");
+        }
+        _playWarning = 1;  // triggers callback from module process thread
+        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kPlayoutWarning message posted: switching to alternative playout delay method");
+    }
+    _dc_prevtime = time;
+    _dc_prevplay = playedSamples;
+
+    // Try a very rough method of looking at how many buffers are still playing
+    ms_Header = 0;
+    for (i = 0; i < N_BUFFERS_OUT; i++) {
+        if ((_waveHeaderOut[i].dwFlags & WHDR_INQUEUE)!=0) {
+            ms_Header += 10;
+        }
+    }
+
+    if ((ms_Header-50) > msecInPlayoutBuffer) {
+        // Test for cases when GetPosition appears to be screwed up (currently just log....)
+        TCHAR infoStr[300];
+        if (_no_of_msecleft_warnings%20==0)
+        {
+            StringCchPrintf(infoStr, 300, TEXT("writtenSamples=%i, playedSamples=%i, msecInPlayoutBuffer=%i, ms_Header=%i"), writtenSamples, playedSamples, msecInPlayoutBuffer, ms_Header);
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, (const char*)infoStr);
+        }
+        _no_of_msecleft_warnings++;
+    }
+
+    // If this is true we have had a problem with the playout
+    if (_useHeader > 0)
+    {
+        return (ms_Header);
+    }
+
+
+    if (ms_Header < msecInPlayoutBuffer)
+    {
+        if (_no_of_msecleft_warnings % 100 == 0)
+        {
+            TCHAR str[300];
+            StringCchPrintf(str, 300, TEXT("_no_of_msecleft_warnings=%i, msecInPlayoutBuffer=%i ms_Header=%i (minBuffer=%i buffersize=%i writtenSamples=%i playedSamples=%i)"),
+                _no_of_msecleft_warnings, msecInPlayoutBuffer, ms_Header, _minPlayBufDelay, _playBufDelay, writtenSamples, playedSamples);
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, (const char*)str);
+        }
+        _no_of_msecleft_warnings++;
+        ms_Header -= 6; // Round off as we only have 10ms resolution + Header info is usually slightly delayed compared to GetPosition
+
+        if (ms_Header < 0)
+            ms_Header = 0;
+
+        return (ms_Header);
+    }
+    else
+    {
+        return (msecInPlayoutBuffer);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  GetRecordingBufferDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetRecordingBufferDelay(WebRtc_UWord32& readSamples, WebRtc_UWord32& recSamples)
+{
+    long recDifference;
+    MMTIME mmtime;
+    MMRESULT mmr;
+
+    const WebRtc_UWord16 nSamplesPerMs = (WebRtc_UWord16)(N_REC_SAMPLES_PER_SEC/1000);  // default is 48000/1000 = 48
+
+    // Retrieve the current input position of the given waveform-audio input device
+    //
+    mmtime.wType = TIME_SAMPLES;
+    mmr = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != mmr)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition() failed (err=%d)", mmr);
+        TraceWaveInError(mmr);
+    }
+
+    readSamples = _read_samples;    // updated for each full fram in RecProc()
+    recSamples = mmtime.u.sample;   // remaining time in input queue (recorded but not read yet)
+
+    recDifference = (long) (_rec_samples_old - recSamples);
+
+    if( recDifference > 64000) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 1 (recDifference =%d)", recDifference);
+        // If the sound cards number-of-recorded-samples variable wraps around before
+        // read_sampels wraps around this needs to be adjusted. This can happen on
+        // sound cards that uses less than 32 bits to keep track of number of played out
+        // sampels. To avoid being fooled by sound cards that sometimes produces false
+        // output we compare old value minus the new value with a large value. This is
+        // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
+        // would trigger the wrap-around function if we didn't compare with a large value.
+        // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
+        //
+        int i = 31;
+        while((_rec_samples_old <= (unsigned long)POW2(i)) && (i > 14))
+            i--;
+
+        if((i < 31) && (i > 14)) {
+            // Avoid adjusting when there is 32-bit wrap-around since that is
+            // somethying neccessary.
+            //
+            _read_samples = _read_samples - POW2(i + 1);
+            readSamples = _read_samples;
+            _wrapCounter++;
+        } else {
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"AEC (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
+        }
+    }
+
+    if((_wrapCounter>200)){
+        // Do nothing, handled later
+    }
+    else if((_rec_samples_old > POW2(31)) && (recSamples < 96000)) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 2 (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
+        // Wrap around as expected after having used all 32 bits.
+        _read_samples_old = readSamples;
+        _rec_samples_old = recSamples;
+        _wrapCounter++;
+        return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
+
+
+    } else if((recSamples < 96000) && (readSamples > POW2(31))) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 3 (readSamples %d recSamples %d)",readSamples, recSamples);
+        // Wrap around has, as expected, happened for rec_sampels before
+        // readSampels so we have to adjust for this until also readSampels
+        // has had wrap around.
+        _read_samples_old = readSamples;
+        _rec_samples_old = recSamples;
+        _wrapCounter++;
+        return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
+    }
+
+    _read_samples_old = _read_samples;
+    _rec_samples_old = recSamples;
+    int res=(((int)_rec_samples_old - (int)_read_samples_old)/nSamplesPerMs);
+
+    if((res > 2000)||(res < 0)||(_wrapCounter>200)){
+        // Reset everything
+        WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"msec_read error (res %d wrapCounter %d)",res, _wrapCounter);
+        MMTIME mmtime;
+        mmtime.wType = TIME_SAMPLES;
+
+        mmr=waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+        if (mmr != MMSYSERR_NOERROR) {
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "waveInGetPosition failed (mmr=%d)", mmr);
+        }
+        _read_samples=mmtime.u.sample;
+        _read_samples_old=_read_samples;
+        _rec_samples_old=mmtime.u.sample;
+
+        // Guess a decent value
+        res = 20;
+    }
+
+    _wrapCounter = 0;
+    return res;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  ThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::ThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceWindowsWave*>(pThis)->ThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  ThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::ThreadProcess()
+{
+    WebRtc_UWord32 time(0);
+    WebRtc_UWord32 playDiff(0);
+    WebRtc_UWord32 recDiff(0);
+
+    LONGLONG playTime(0);
+    LONGLONG recTime(0);
+
+    switch (_timeEvent.Wait(1000))
+    {
+    case kEventSignaled:
+        break;
+    case kEventError:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "EventWrapper::Wait() failed => restarting timer");
+        _timeEvent.StopTimer();
+        _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
+        return true;
+    case kEventTimeout:
+        return true;
+    }
+
+    time = AudioDeviceUtility::GetTimeInMS();
+
+    if (_startPlay)
+    {
+        if (PrepareStartPlayout() == 0)
+        {
+            _prevTimerCheckTime = time;
+            _prevPlayTime = time;
+            _startPlay = false;
+            _playing = true;
+            _playStartEvent.Set();
+        }
+    }
+
+    if (_startRec)
+    {
+        if (PrepareStartRecording() == 0)
+        {
+            _prevTimerCheckTime = time;
+            _prevRecTime = time;
+            _prevRecByteCheckTime = time;
+            _startRec = false;
+            _recording = true;
+            _recStartEvent.Set();
+        }
+    }
+
+    if (_playing)
+    {
+        playDiff = time - _prevPlayTime;
+    }
+
+    if (_recording)
+    {
+        recDiff = time - _prevRecTime;
+    }
+
+    if (_playing || _recording)
+    {
+        RestartTimerIfNeeded(time);
+    }
+
+    if (_playing &&
+        (playDiff > (WebRtc_UWord32)(_dTcheckPlayBufDelay - 1)) ||
+        (playDiff < 0))
+    {
+        Lock();
+        if (_playing)
+        {
+            if (PlayProc(playTime) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
+            }
+            _prevPlayTime = time;
+            if (playTime != 0)
+                _playAcc += playTime;
+        }
+        UnLock();
+    }
+
+    if (_playing && (playDiff > 12))
+    {
+        // It has been a long time since we were able to play out, try to
+        // compensate by calling PlayProc again.
+        //
+        Lock();
+        if (_playing)
+        {
+            if (PlayProc(playTime))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
+            }
+            _prevPlayTime = time;
+            if (playTime != 0)
+                _playAcc += playTime;
+        }
+        UnLock();
+    }
+
+    if (_recording &&
+       (recDiff > REC_CHECK_TIME_PERIOD_MS) ||
+       (recDiff < 0))
+    {
+        Lock();
+        if (_recording)
+        {
+            WebRtc_Word32 nRecordedBytes(0);
+            WebRtc_UWord16 maxIter(10);
+
+            // Deliver all availiable recorded buffers and update the CPU load measurement.
+            // We use a while loop here to compensate for the fact that the multi-media timer
+            // can sometimed enter a "bad state" after hibernation where the resolution is
+            // reduced from ~1ms to ~10-15 ms.
+            //
+            while ((nRecordedBytes = RecProc(recTime)) > 0)
+            {
+                maxIter--;
+                _recordedBytes += nRecordedBytes;
+                if (recTime && _perfFreq.QuadPart)
+                {
+                    // Measure the average CPU load:
+                    // This is a simplified expression where an exponential filter is used:
+                    //   _avgCPULoad = 0.99 * _avgCPULoad + 0.01 * newCPU,
+                    //   newCPU = (recTime+playAcc)/f is time in seconds
+                    //   newCPU / 0.01 is the fraction of a 10 ms period
+                    // The two 0.01 cancels each other.
+                    // NOTE - assumes 10ms audio buffers.
+                    //
+                    _avgCPULoad = (float)(_avgCPULoad*.99 + (recTime+_playAcc)/(double)(_perfFreq.QuadPart));
+                    _playAcc = 0;
+                }
+                if (maxIter == 0)
+                {
+                    // If we get this message ofte, our compensation scheme is not sufficient.
+                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "failed to compensate for reduced MM-timer resolution");
+                }
+            }
+
+            if (nRecordedBytes == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "RecProc() failed");
+            }
+
+            _prevRecTime = time;
+
+            // Monitor the recording process and generate error/warning callbacks if needed
+            MonitorRecording(time);
+        }
+        UnLock();
+    }
+
+    if (!_recording)
+    {
+        _prevRecByteCheckTime = time;
+        _avgCPULoad = 0;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  RecProc
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecProc(LONGLONG& consumedTime)
+{
+    MMRESULT res;
+    WebRtc_UWord32 bufCount(0);
+    WebRtc_UWord32 nBytesRecorded(0);
+
+    consumedTime = 0;
+
+    // count modulo N_BUFFERS_IN (0,1,2,...,(N_BUFFERS_IN-1),0,1,2,..)
+    if (_recBufCount == N_BUFFERS_IN)
+    {
+        _recBufCount = 0;
+    }
+
+    bufCount = _recBufCount;
+
+    // take mono/stereo mode into account when deriving size of a full buffer
+    const WebRtc_UWord16 bytesPerSample = 2*_recChannels;
+    const WebRtc_UWord32 fullBufferSizeInBytes = bytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
+
+    // read number of recorded bytes for the given input-buffer
+    nBytesRecorded = _waveHeaderIn[bufCount].dwBytesRecorded;
+
+    if (nBytesRecorded == fullBufferSizeInBytes ||
+       (nBytesRecorded > 0))
+    {
+        WebRtc_Word32 msecOnPlaySide;
+        WebRtc_Word32 msecOnRecordSide;
+        WebRtc_UWord32 writtenSamples;
+        WebRtc_UWord32 playedSamples;
+        WebRtc_UWord32 readSamples, recSamples;
+        bool send = true;
+
+        WebRtc_UWord32 nSamplesRecorded = (nBytesRecorded/bytesPerSample);  // divide by 2 or 4 depending on mono or stereo
+
+        if (nBytesRecorded == fullBufferSizeInBytes)
+        {
+            _timesdwBytes = 0;
+        }
+        else
+        {
+            // Test if it is stuck on this buffer
+            _timesdwBytes++;
+            if (_timesdwBytes < 5)
+            {
+                // keep trying
+                return (0);
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id,"nBytesRecorded=%d => don't use", nBytesRecorded);
+                _timesdwBytes = 0;
+                send = false;
+            }
+        }
+
+        // store the recorded buffer (no action will be taken if the #recorded samples is not a full buffer)
+        _ptrAudioBuffer->SetRecordedBuffer(_waveHeaderIn[bufCount].lpData, nSamplesRecorded);
+
+        // update #samples read
+        _read_samples += nSamplesRecorded;
+
+        // Check how large the playout and recording buffers are on the sound card.
+        // This info is needed by the AEC.
+        //
+        msecOnPlaySide = GetPlayoutBufferDelay(writtenSamples, playedSamples);
+        msecOnRecordSide = GetRecordingBufferDelay(readSamples, recSamples);
+
+        // If we use the alternative playout delay method, skip the clock drift compensation
+        // since it will be an unreliable estimate and might degrade AEC performance.
+        WebRtc_Word32 drift = (_useHeader > 0) ? 0 : GetClockDrift(playedSamples, recSamples);
+
+        _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, drift);
+
+        // Store the play and rec delay values for video synchronization
+        _sndCardPlayDelay = msecOnPlaySide;
+        _sndCardRecDelay = msecOnRecordSide;
+
+        LARGE_INTEGER t1,t2;
+
+        if (send)
+        {
+            QueryPerformanceCounter(&t1);
+
+            // deliver recorded samples at specified sample rate, mic level etc. to the observer using callback
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+
+            QueryPerformanceCounter(&t2);
+
+            if (InputSanityCheckAfterUnlockedPeriod() == -1)
+            {
+                // assert(false);
+                return -1;
+            }
+        }
+
+        if (_AGC)
+        {
+            WebRtc_UWord32  newMicLevel = _ptrAudioBuffer->NewMicLevel();
+            if (newMicLevel != 0)
+            {
+                // The VQE will only deliver non-zero microphone levels when a change is needed.
+                WEBRTC_TRACE(kTraceStream, kTraceUtility, _id,"AGC change of volume: => new=%u", newMicLevel);
+
+                // We store this outside of the audio buffer to avoid 
+                // having it overwritten by the getter thread.
+                _newMicLevel = newMicLevel;
+                SetEvent(_hSetCaptureVolumeEvent);
+            }
+        }
+
+        // return utilized buffer to queue after specified delay (default is 4)
+        if (_recDelayCount > (_recPutBackDelay-1))
+        {
+            // deley buffer counter to compensate for "put-back-delay"
+            bufCount = (bufCount + N_BUFFERS_IN - _recPutBackDelay) % N_BUFFERS_IN;
+
+            // reset counter so we can make new detection
+            _waveHeaderIn[bufCount].dwBytesRecorded = 0;
+
+            // return the utilized wave-header after certain delay (given by _recPutBackDelay)
+            res = waveInUnprepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (MMSYSERR_NOERROR != res)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+            }
+
+            // ensure that the utilized header can be used again
+            res = waveInPrepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (res != MMSYSERR_NOERROR)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+                return -1;
+            }
+
+            // add the utilized buffer to the queue again
+            res = waveInAddBuffer(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (res != MMSYSERR_NOERROR)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+                if (_recPutBackDelay < 50)
+                {
+                    _recPutBackDelay++;
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "_recPutBackDelay increased to %d", _recPutBackDelay);
+                }
+                else
+                {
+                    if (_recError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
+                    }
+                    _recError = 1;  // triggers callback from module process thread
+                    WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: _recPutBackDelay=%u", _recPutBackDelay);
+                }
+            }
+        }  // if (_recDelayCount > (_recPutBackDelay-1))
+
+        if (_recDelayCount < (_recPutBackDelay+1))
+        {
+            _recDelayCount++;
+        }
+
+        // increase main buffer count since one complete buffer has now been delivered
+        _recBufCount++;
+
+        if (send) {
+            // Calculate processing time
+            consumedTime = (int)(t2.QuadPart-t1.QuadPart);
+            // handle wraps, time should not be higher than a second
+            if ((consumedTime > _perfFreq.QuadPart) || (consumedTime < 0))
+                consumedTime = 0;
+        }
+
+    }  // if ((nBytesRecorded == fullBufferSizeInBytes))
+
+    return nBytesRecorded;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayProc
+// ----------------------------------------------------------------------------
+
+int AudioDeviceWindowsWave::PlayProc(LONGLONG& consumedTime)
+{
+    WebRtc_Word32 remTimeMS(0);
+    WebRtc_Word8 playBuffer[4*PLAY_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 writtenSamples(0);
+    WebRtc_UWord32 playedSamples(0);
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+
+    consumedTime = 0;
+    _waitCounter++;
+
+    // Get number of ms of sound that remains in the sound card buffer for playback.
+    //
+    remTimeMS = GetPlayoutBufferDelay(writtenSamples, playedSamples);
+
+    // The threshold can be adaptive or fixed. The adaptive scheme is updated
+    // also for fixed mode but the updated threshold is not utilized.
+    //
+    const WebRtc_UWord16 thresholdMS =
+        (_playBufType == AudioDeviceModule::kAdaptiveBufferSize) ? _playBufDelay : _playBufDelayFixed;
+
+    if (remTimeMS < thresholdMS + 9)
+    {
+        _dTcheckPlayBufDelay = 5;
+
+        if (remTimeMS == 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id, "playout buffer is empty => we must adapt...");
+            if (_waitCounter > 30)
+            {
+                _erZeroCounter++;
+                if (_erZeroCounter == 2)
+                {
+                    _playBufDelay += 15;
+                    _minPlayBufDelay += 20;
+                    _waitCounter = 50;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0,erZero=2): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+                else if (_erZeroCounter == 3)
+                {
+                    _erZeroCounter = 0;
+                    _playBufDelay += 30;
+                    _minPlayBufDelay += 25;
+                    _waitCounter = 0;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=3): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+                else
+                {
+                    _minPlayBufDelay += 10;
+                    _playBufDelay += 15;
+                    _waitCounter = 50;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=1): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+            }
+        }
+        else if (remTimeMS < _minPlayBufDelay)
+        {
+            // If there is less than 25 ms of audio in the play out buffer
+            // increase the buffersize limit value. _waitCounter prevents
+            // _playBufDelay to be increased every time this function is called.
+
+            if (_waitCounter > 30)
+            {
+                _playBufDelay += 10;
+                if (_intro == 0)
+                    _waitCounter = 0;
+                WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is increased: playBufDelay=%u", _playBufDelay);
+            }
+        }
+        else if (remTimeMS < thresholdMS - 9)
+        {
+            _erZeroCounter = 0;
+        }
+        else
+        {
+            _erZeroCounter = 0;
+            _dTcheckPlayBufDelay = 10;
+        }
+
+        QueryPerformanceCounter(&t1);   // measure time: START
+
+        // Ask for new PCM data to be played out using the AudioDeviceBuffer.
+        // Ensure that this callback is executed without taking the audio-thread lock.
+        //
+        UnLock();
+        WebRtc_UWord32 nSamples = _ptrAudioBuffer->RequestPlayoutData(PLAY_BUF_SIZE_IN_SAMPLES);
+        Lock();
+
+        if (OutputSanityCheckAfterUnlockedPeriod() == -1)
+        {
+            // assert(false);
+            return -1;
+        }
+
+        nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        if (nSamples != PLAY_BUF_SIZE_IN_SAMPLES)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "invalid number of output samples(%d)", nSamples);
+        }
+
+        QueryPerformanceCounter(&t2);   // measure time: STOP
+        consumedTime = (int)(t2.QuadPart - t1.QuadPart);
+
+        Write(playBuffer, PLAY_BUF_SIZE_IN_SAMPLES);
+
+    }  // if (er < thresholdMS + 9)
+    else if (thresholdMS + 9 < remTimeMS )
+    {
+        _erZeroCounter = 0;
+        _dTcheckPlayBufDelay = 2;    // check buffer more often
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Need to check playout buffer more often (dT=%u, remTimeMS=%u)", _dTcheckPlayBufDelay, remTimeMS);
+    }
+
+    // If the buffersize has been stable for 20 seconds try to decrease the buffer size
+    if (_waitCounter > 2000)
+    {
+        _intro = 0;
+        _playBufDelay--;
+        _waitCounter = 1990;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is decreased: playBufDelay=%u", _playBufDelay);
+    }
+
+    // Limit the minimum sound card (playback) delay to adaptive minimum delay
+    if (_playBufDelay < _minPlayBufDelay)
+    {
+        _playBufDelay = _minPlayBufDelay;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %u", _minPlayBufDelay);
+    }
+
+    // Limit the maximum sound card (playback) delay to 150 ms
+    if (_playBufDelay > 150)
+    {
+        _playBufDelay = 150;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %d", _playBufDelay);
+    }
+
+    // Upper limit of the minimum sound card (playback) delay to 65 ms.
+    // Deactivated during "useHeader mode" (_useHeader > 0).
+    if (_minPlayBufDelay > _MAX_minBuffer &&
+       (_useHeader == 0))
+    {
+        _minPlayBufDelay = _MAX_minBuffer;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Minimum playout threshold is limited to %d", _MAX_minBuffer);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  Write
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Write(WebRtc_Word8* data, WebRtc_UWord16 nSamples)
+{
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        MMRESULT res;
+
+        const WebRtc_UWord16 bufCount(_playBufCount);
+
+        // Place data in the memory associated with _waveHeaderOut[bufCount]
+        //
+        const WebRtc_Word16 nBytes = (2*_playChannels)*nSamples;
+        memcpy(&_playBuffer[bufCount][0], &data[0], nBytes);
+
+        // Send a data block to the given waveform-audio output device.
+        //
+        // When the buffer is finished, the WHDR_DONE bit is set in the dwFlags
+        // member of the WAVEHDR structure. The buffer must be prepared with the
+        // waveOutPrepareHeader function before it is passed to waveOutWrite.
+        // Unless the device is paused by calling the waveOutPause function,
+        // playback begins when the first data block is sent to the device.
+        //
+        res = waveOutWrite(_hWaveOut, &_waveHeaderOut[bufCount], sizeof(_waveHeaderOut[bufCount]));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutWrite(%d) failed (err=%d)", bufCount, res);
+            TraceWaveOutError(res);
+
+            _writeErrors++;
+            if (_writeErrors > 10)
+            {
+                if (_playError == 1)
+                {
+                    WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout error exists");
+                }
+                _playError = 1;  // triggers callback from module process thread
+                WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kPlayoutError message posted: _writeErrors=%u", _writeErrors);
+            }
+
+            return -1;
+        }
+
+        _playBufCount = (_playBufCount+1) % N_BUFFERS_OUT;  // increase buffer counter modulo size of total buffer
+        _writtenSamples += nSamples;                        // each sample is 2 or 4 bytes
+        _writeErrors = 0;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//    GetClockDrift
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetClockDrift(const WebRtc_UWord32 plSamp, const WebRtc_UWord32 rcSamp)
+{
+    int drift = 0;
+    unsigned int plSampDiff = 0, rcSampDiff = 0;
+
+    if (plSamp >= _plSampOld)
+    {
+        plSampDiff = plSamp - _plSampOld;
+    }
+    else
+    {
+        // Wrap
+        int i = 31;
+        while(_plSampOld <= (unsigned int)POW2(i))
+        {
+            i--;
+        }
+
+        // Add the amount remaining prior to wrapping
+        plSampDiff = plSamp +  POW2(i + 1) - _plSampOld;
+    }
+
+    if (rcSamp >= _rcSampOld)
+    {
+        rcSampDiff = rcSamp - _rcSampOld;
+    }
+    else
+    {   // Wrap
+        int i = 31;
+        while(_rcSampOld <= (unsigned int)POW2(i))
+        {
+            i--;
+        }
+
+        rcSampDiff = rcSamp +  POW2(i + 1) - _rcSampOld;
+    }
+
+    drift = plSampDiff - rcSampDiff;
+
+    _plSampOld = plSamp;
+    _rcSampOld = rcSamp;
+
+    return drift;
+}
+
+// ----------------------------------------------------------------------------
+//  MonitorRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MonitorRecording(const WebRtc_UWord32 time)
+{
+    const WebRtc_UWord16 bytesPerSample = 2*_recChannels;
+    const WebRtc_UWord32 nRecordedSamples = _recordedBytes/bytesPerSample;
+
+    if (nRecordedSamples > 5*N_REC_SAMPLES_PER_SEC)
+    {
+        // 5 seconds of audio has been recorded...
+        if ((time - _prevRecByteCheckTime) > 5700)
+        {
+            // ...and it was more than 5.7 seconds since we last did this check <=>
+            // we have not been able to record 5 seconds of audio in 5.7 seconds,
+            // hence a problem should be reported.
+            // This problem can be related to USB overload.
+            //
+            if (_recWarning == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording warning exists");
+            }
+            _recWarning = 1;  // triggers callback from module process thread
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kRecordingWarning message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
+        }
+
+        _recordedBytes = 0;            // restart "check again when 5 seconds are recorded"
+        _prevRecByteCheckTime = time;  // reset timer to measure time for recording of 5 seconds
+    }
+
+    if ((time - _prevRecByteCheckTime) > 8000)
+    {
+        // It has been more than 8 seconds since we able to confirm that 5 seconds of
+        // audio was recorded, hence we have not been able to record 5 seconds in
+        // 8 seconds => the complete recording process is most likely dead.
+        //
+        if (_recError == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
+        }
+        _recError = 1;  // triggers callback from module process thread
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
+
+        _prevRecByteCheckTime = time;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MonitorRecording
+//
+//  Restart timer if needed (they seem to be messed up after a hibernate).
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RestartTimerIfNeeded(const WebRtc_UWord32 time)
+{
+    const WebRtc_UWord32 diffMS = time - _prevTimerCheckTime;
+    _prevTimerCheckTime = time;
+
+    if (diffMS > 7)
+    {
+        // one timer-issue detected...
+        _timerFaults++;
+        if (_timerFaults > 5 && _timerRestartAttempts < 2)
+        {
+            // Reinitialize timer event if event fails to execute at least every 5ms.
+            // On some machines it helps and the timer starts working as it should again;
+            // however, not all machines (we have seen issues on e.g. IBM T60).
+            // Therefore, the scheme below ensures that we do max 2 attempts to restart the timer.
+            // For the cases where restart does not do the trick, we compensate for the reduced
+            // resolution on both the recording and playout sides.
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, " timer issue detected => timer is restarted");
+            _timeEvent.StopTimer();
+            _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
+            // make sure timer gets time to start up and we don't kill/start timer serveral times over and over again
+            _timerFaults = -20;
+            _timerRestartAttempts++;
+        }
+    }
+    else
+    {
+        // restart timer-check scheme since we are OK
+        _timerFaults = 0;
+        _timerRestartAttempts = 0;
+    }
+
+    return 0;
+}
+
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.h b/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.h
new file mode 100644
index 0000000..76607f6
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_device_wave_win.h
@@ -0,0 +1,333 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
+
+#include "audio_device_generic.h"
+#include "audio_mixer_manager_win.h"
+
+#pragma comment( lib, "winmm.lib" )
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+
+const WebRtc_UWord32 TIMER_PERIOD_MS = 2;
+const WebRtc_UWord32 REC_CHECK_TIME_PERIOD_MS = 4;
+const WebRtc_UWord16 REC_PUT_BACK_DELAY = 4;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 48000;
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 48000;
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1;  // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 2; // default is stereo playout
+
+// NOTE - CPU load will not be correct for other sizes than 10ms
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC/100);
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC/100);
+
+enum { N_BUFFERS_IN = 200 };
+enum { N_BUFFERS_OUT = 200 };
+
+class AudioDeviceWindowsWave : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceWindowsWave(const WebRtc_Word32 id);
+    ~AudioDeviceWindowsWave();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(WebRtc_UWord16 index, WebRtc_Word8 name[kAdmMaxDeviceNameSize], WebRtc_Word8 guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock() { _critSect.Enter(); };
+    void UnLock() { _critSect.Leave(); };
+    WebRtc_Word32 Id() {return _id;}
+    bool IsUsingOutputDeviceIndex() const {return _usingOutputDeviceIndex;}
+    AudioDeviceModule::WindowsDeviceType OutputDevice() const {return _outputDevice;}
+    WebRtc_UWord16 OutputDeviceIndex() const {return _outputDeviceIndex;}
+    bool IsUsingInputDeviceIndex() const {return _usingInputDeviceIndex;}
+    AudioDeviceModule::WindowsDeviceType InputDevice() const {return _inputDevice;}
+    WebRtc_UWord16 InputDeviceIndex() const {return _inputDeviceIndex;}
+
+private:
+    inline WebRtc_Word32 InputSanityCheckAfterUnlockedPeriod() const;
+    inline WebRtc_Word32 OutputSanityCheckAfterUnlockedPeriod() const;
+
+private:
+    WebRtc_Word32 EnumeratePlayoutDevices();
+    WebRtc_Word32 EnumerateRecordingDevices();
+    void TraceSupportFlags(DWORD dwSupport) const;
+    void TraceWaveInError(MMRESULT error) const;
+    void TraceWaveOutError(MMRESULT error) const;
+    WebRtc_Word32 PrepareStartRecording();
+    WebRtc_Word32 PrepareStartPlayout();
+
+    WebRtc_Word32 RecProc(LONGLONG& consumedTime);
+    int PlayProc(LONGLONG& consumedTime);
+
+    WebRtc_Word32 GetPlayoutBufferDelay(WebRtc_UWord32& writtenSamples, WebRtc_UWord32& playedSamples);
+    WebRtc_Word32 GetRecordingBufferDelay(WebRtc_UWord32& readSamples, WebRtc_UWord32& recSamples);
+    WebRtc_Word32 Write(WebRtc_Word8* data, WebRtc_UWord16 nSamples);
+    WebRtc_Word32 GetClockDrift(const WebRtc_UWord32 plSamp, const WebRtc_UWord32 rcSamp);
+    WebRtc_Word32 MonitorRecording(const WebRtc_UWord32 time);
+    WebRtc_Word32 RestartTimerIfNeeded(const WebRtc_UWord32 time);
+
+private:
+    static bool ThreadFunc(void*);
+    bool ThreadProcess();
+
+    static DWORD WINAPI GetCaptureVolumeThread(LPVOID context);
+    DWORD DoGetCaptureVolumeThread();
+
+    static DWORD WINAPI SetCaptureVolumeThread(LPVOID context);
+    DWORD DoSetCaptureVolumeThread();
+
+private:
+    AudioDeviceBuffer*                      _ptrAudioBuffer;
+
+    CriticalSectionWrapper&                 _critSect;
+    EventWrapper&                           _timeEvent;
+    EventWrapper&                           _recStartEvent;
+    EventWrapper&                           _playStartEvent;
+
+    HANDLE                                  _hGetCaptureVolumeThread;
+    HANDLE                                  _hShutdownGetVolumeEvent;
+    HANDLE                                  _hSetCaptureVolumeThread;
+    HANDLE                                  _hShutdownSetVolumeEvent;
+    HANDLE                                  _hSetCaptureVolumeEvent;
+
+    ThreadWrapper*                          _ptrThread;
+    WebRtc_UWord32                          _threadID;
+
+    CriticalSectionWrapper&                 _critSectCb;
+
+    WebRtc_Word32                           _id;
+
+    AudioMixerManager                       _mixerManager;
+
+    bool                                    _usingInputDeviceIndex;
+    bool                                    _usingOutputDeviceIndex;
+    AudioDeviceModule::WindowsDeviceType    _inputDevice;
+    AudioDeviceModule::WindowsDeviceType    _outputDevice;
+    WebRtc_UWord16                          _inputDeviceIndex;
+    WebRtc_UWord16                          _outputDeviceIndex;
+    bool                                    _inputDeviceIsSpecified;
+    bool                                    _outputDeviceIsSpecified;
+
+    WAVEFORMATEX                            _waveFormatIn;
+    WAVEFORMATEX                            _waveFormatOut;
+
+    HWAVEIN                                 _hWaveIn;
+    HWAVEOUT                                _hWaveOut;
+
+    WAVEHDR                                 _waveHeaderIn[N_BUFFERS_IN];
+    WAVEHDR                                 _waveHeaderOut[N_BUFFERS_OUT];
+
+    WebRtc_UWord8                           _recChannels;
+    WebRtc_UWord8                           _playChannels;
+    WebRtc_UWord16                          _recBufCount;
+    WebRtc_UWord16                          _recDelayCount;
+    WebRtc_UWord16                          _recPutBackDelay;
+
+    WebRtc_Word8    _recBuffer[N_BUFFERS_IN][4*REC_BUF_SIZE_IN_SAMPLES];
+    WebRtc_Word8    _playBuffer[N_BUFFERS_OUT][4*PLAY_BUF_SIZE_IN_SAMPLES];
+
+    AudioDeviceModule::BufferType           _playBufType;
+
+private:
+    bool                                    _initialized;
+    bool                                    _recording;
+    bool                                    _playing;
+    bool                                    _recIsInitialized;
+    bool                                    _playIsInitialized;
+    bool                                    _startRec;
+    bool                                    _stopRec;
+    bool                                    _startPlay;
+    bool                                    _stopPlay;
+    bool                                    _AGC;
+
+private:
+    WebRtc_UWord32                          _prevPlayTime;
+    WebRtc_UWord32                          _prevRecTime;
+    WebRtc_UWord32                          _prevTimerCheckTime;
+
+    WebRtc_UWord16                          _playBufCount;          // playout buffer index
+    WebRtc_UWord16                          _dTcheckPlayBufDelay;   // dT for check of play buffer, {2,5,10} [ms]
+    WebRtc_UWord16                          _playBufDelay;          // playback delay
+    WebRtc_UWord16                          _playBufDelayFixed;     // fixed playback delay
+    WebRtc_UWord16                          _minPlayBufDelay;       // minimum playback delay
+    WebRtc_UWord16                          _MAX_minBuffer;         // level of (adaptive) min threshold must be < _MAX_minBuffer
+
+    WebRtc_Word32                           _erZeroCounter;         // counts "buffer-is-empty" events
+    WebRtc_Word32                           _intro;
+    WebRtc_Word32                           _waitCounter;
+
+    WebRtc_UWord32                          _writtenSamples;
+    WebRtc_UWord32                          _writtenSamplesOld;
+    WebRtc_UWord32                          _playedSamplesOld;
+
+    WebRtc_UWord32                          _sndCardPlayDelay;
+    WebRtc_UWord32                          _sndCardRecDelay;
+
+    WebRtc_UWord32                          _plSampOld;
+    WebRtc_UWord32                          _rcSampOld;
+
+    WebRtc_UWord32                          _read_samples;
+    WebRtc_UWord32                          _read_samples_old;
+    WebRtc_UWord32                          _rec_samples_old;
+
+    // State that detects driver problems:
+    WebRtc_Word32                           _dc_diff_mean;
+    WebRtc_Word32                           _dc_y_prev;
+    WebRtc_Word32                           _dc_penalty_counter;
+    WebRtc_Word32                           _dc_prevtime;
+    WebRtc_UWord32                          _dc_prevplay;
+
+    WebRtc_UWord32                          _recordedBytes;         // accumulated #recorded bytes (reset periodically)
+    WebRtc_UWord32                          _prevRecByteCheckTime;  // time when we last checked the recording process
+
+    // CPU load measurements
+    LARGE_INTEGER                           _perfFreq;
+    LONGLONG                                _playAcc;               // accumulated time for playout callback
+    float                                   _avgCPULoad;            // average total (rec+play) CPU load
+
+    WebRtc_Word32                           _wrapCounter;
+
+    WebRtc_Word32                           _useHeader;
+    WebRtc_Word16                           _timesdwBytes;
+    WebRtc_Word32                           _no_of_msecleft_warnings;
+    WebRtc_Word32                           _writeErrors;
+    WebRtc_Word32                           _timerFaults;
+    WebRtc_Word32                           _timerRestartAttempts;
+
+    WebRtc_UWord16                          _playWarning;
+    WebRtc_UWord16                          _playError;
+    WebRtc_UWord16                          _recWarning;
+    WebRtc_UWord16                          _recError;
+
+    WebRtc_UWord32                          _newMicLevel;
+    WebRtc_UWord32                          _minMicVolume;
+    WebRtc_UWord32                          _maxMicVolume;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc b/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc
new file mode 100644
index 0000000..c3cf72e
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc
@@ -0,0 +1,2722 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_mixer_manager_win.h"
+#include "trace.h"
+
+#include <strsafe.h>    // StringCchCopy(), StringCchCat(), StringCchPrintf()
+#include <cassert>      // assert()
+
+#ifdef _WIN32
+// removes warning: "reinterpret_cast: conversion from 'UINT' to 'HMIXEROBJ'
+//                of greater size"
+#pragma warning(disable:4312)
+#endif
+
+// Avoids the need of Windows 7 SDK
+#ifndef WAVE_MAPPED_kDefaultCommunicationDevice
+#define  WAVE_MAPPED_kDefaultCommunicationDevice   0x0010
+#endif
+
+namespace webrtc {
+
+// ============================================================================
+//                             CONSTRUCTION/DESTRUCTION
+// ============================================================================
+
+AudioMixerManager::AudioMixerManager(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _inputMixerHandle(NULL),
+    _outputMixerHandle(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s constructed", __FUNCTION__);
+    ClearSpeakerState();
+    ClearMicrophoneState();
+}
+
+AudioMixerManager::~AudioMixerManager()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                 PUBLIC METHODS
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Close
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+    return 0;
+
+}
+
+// ----------------------------------------------------------------------------
+//  CloseSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        return -1;
+    }
+
+    ClearSpeakerState(_outputMixerID);
+
+    mixerClose(_outputMixerHandle);
+    _outputMixerHandle = NULL;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CloseMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        return -1;
+    }
+
+    ClearMicrophoneState(_inputMixerID);
+
+    mixerClose(_inputMixerHandle);
+    _inputMixerHandle = NULL;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateAll
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateAll()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERLINE    sourceLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+    UINT sourceId(0);
+
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        if (!GetCapabilities(mixId, caps, true))
+            continue;
+
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine, true);
+            GetAllLineControls(mixId, destLine, controlArray, true);
+
+            for (sourceId = 0; sourceId < destLine.cConnections; sourceId++)
+            {
+                GetSourceLineInfo(mixId, destId, sourceId, sourceLine, true);
+                GetAllLineControls(mixId, sourceLine, controlArray, true);
+            }
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateSpeakers
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateSpeakers()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    if (nDevices > MAX_NUMBER_MIXER_DEVICES)
+    {
+        assert(false);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+
+    ClearSpeakerState();
+
+    // scan all avaliable mixer devices
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        // get capabilities for the specified mixer ID
+        GetCapabilities(mixId, caps);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[mixerID=%d] %s: ", mixId, WideToUTF8(caps.szPname));
+        // scan all avaliable destinations for this mixer
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine);
+            if ((destLine.cControls == 0)                         ||    // no controls or
+                (destLine.cConnections == 0)                      ||    // no source lines or
+                (destLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED) ||    // disconnected or
+                !(destLine.fdwLine & MIXERLINE_LINEF_ACTIVE))           // inactive
+            {
+                // don't store this line ID since it will not be possible to control
+                continue;
+            }
+            if ((destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_SPEAKERS) ||
+                (destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_HEADPHONES))
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found valid speaker/headphone (name: %s, ID: %u)", WideToUTF8(destLine.szName), destLine.dwLineID);
+                _speakerState[mixId].dwLineID = destLine.dwLineID;
+                _speakerState[mixId].speakerIsValid = true;
+                // retrieve all controls for the speaker component
+                GetAllLineControls(mixId, destLine, controlArray);
+                for (UINT c = 0; c < destLine.cControls; c++)
+                {
+                    if (controlArray[c].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                    {
+                        _speakerState[mixId].dwVolumeControlID = controlArray[c].dwControlID;
+                        _speakerState[mixId].volumeControlIsValid = true;
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[c].szName), controlArray[c].dwControlID);
+                    }
+                    else if (controlArray[c].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                    {
+                        _speakerState[mixId].dwMuteControlID = controlArray[c].dwControlID;
+                        _speakerState[mixId].muteControlIsValid = true;
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[c].szName), controlArray[c].dwControlID);
+                    }
+                }
+                break;
+            }
+        }
+        if (!SpeakerIsValid(mixId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to find a valid speaker destination line", mixId);
+        }
+    }
+
+    if (ValidSpeakers() == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to locate any valid speaker line");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateMicrophones
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateMicrophones()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    if (nDevices > MAX_NUMBER_MIXER_DEVICES)
+    {
+        assert(false);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERLINE    sourceLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+
+    ClearMicrophoneState();
+
+    // scan all avaliable mixer devices
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        // get capabilities for the specified mixer ID
+        GetCapabilities(mixId, caps);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[mixerID=%d] %s: ", mixId, WideToUTF8(caps.szPname));
+        // scan all avaliable destinations for this mixer
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine);
+
+            if ((destLine.cConnections == 0)                      ||    // no source lines or
+                (destLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED) ||    // disconnected or
+               !(destLine.fdwLine & MIXERLINE_LINEF_ACTIVE))            // inactive
+            {
+                // Don't store this line ID since there are no sources connected to this destination.
+                // Compare with the speaker side where we also exclude lines with no controls.
+                continue;
+            }
+
+            if (destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_WAVEIN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found valid Wave In destination (name: %s, ID: %u)", WideToUTF8(destLine.szName), destLine.dwLineID);
+                _microphoneState[mixId].dwLineID = destLine.dwLineID;
+                _microphoneState[mixId].microphoneIsValid = true;
+
+                // retrieve all controls for the identified wave-in destination
+                if (!GetAllLineControls(mixId, destLine, controlArray))
+                {
+                    // This destination has no controls. We must try to control
+                    // one of its sources instead. 
+                    // This is a rare state but has been found for some
+                    // Logitech USB headsets.
+
+                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                    "this destination has no controls => must control source");
+                    for (DWORD sourceId = 0; sourceId < destLine.cConnections; sourceId++)
+                    {
+                        GetSourceLineInfo(mixId, destId, sourceId, sourceLine, false); 
+                        if (sourceLine.dwComponentType == 
+                            MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE)
+                        {
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                            "found microphone source ( name: %s, ID: %u)", 
+                            WideToUTF8(sourceLine.szName), sourceId);
+                            GetAllLineControls(mixId, sourceLine, controlArray, false);
+                            // scan the controls for this source and search for volume, 
+                            // mute and on/off (<=> boost) controls
+                            for (UINT sc = 0; sc < sourceLine.cControls; sc++)
+                            {
+                                if (controlArray[sc].dwControlType == 
+                                    MIXERCONTROL_CONTROLTYPE_VOLUME)
+                                {
+                                    // store this volume control
+                                    _microphoneState[mixId].dwVolumeControlID = 
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].volumeControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found volume control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                }
+                                else if (controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_MUTE)
+                                {
+                                    // store this mute control
+                                    _microphoneState[mixId].dwMuteControlID =
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].muteControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found mute control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                }
+                                else if (controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                         controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_LOUDNESS)
+                                {
+                                    // store this on/off control (most likely a Boost control)
+                                    _microphoneState[mixId].dwOnOffControlID = 
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].onOffControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found on/off control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                 }
+                             }
+                         }
+                    }
+
+                    break;
+                }
+
+                // It seems like there are three different configurations we can find in this state:
+                //
+                // (1) The Wave-in destination contains one MUX control only
+                // (2) The Wave-in destination contains one or more controls where one is a volume control
+                // (3) On Vista and Win 7, it seems like case 2 above is extended.
+                //     It is common that a Wave-in destination has two master controls (volume and mute),
+                //     AND a microphone source as well with its own volume and mute controls with unique
+                //     identifiers. Initial tests have shown that it is sufficient to modify the master
+                //     controls only. The source controls will "follow" the master settings, hence the
+                //     source controls seem to be redundant.
+                //
+                // For case 1, we should locate the selected source and its controls. The MUX setting will
+                // give us the selected source. NOTE - the selecion might not be a microphone.
+                //
+                // For case 2, the volume control works as a master level control and we should use that one.
+                //
+                // For case 3, we use the master controls only and assume that the source control will "follow".
+                //
+                // Examples of case 1: - SigmaTel Audio (built-in)
+                //                     - add more..
+                //
+                // Examples of case 2: - Plantronics USB Headset
+                //                      - Eutectics IPP 200 USB phone
+                //                      - add more...
+                //
+                // Examples of case 3: - Realtek High Definition on Vista (TL)
+                //                     - add more...
+
+                if ((destLine.cControls == 1) &&
+                    (controlArray[0].dwControlType == MIXERCONTROL_CONTROLTYPE_MUX))
+                {
+                    // Case 1: MUX control detected  => locate the selected source and its volume control
+                    //         Note that, the selecion might not be a microphone. A warning is given for
+                    //         this case only, i.e., it is OK to control a selected Line In source as long
+                    //         as it is connected to the wave-in destination.
+
+                    UINT selection(0);
+                    const DWORD nItemsInMux(controlArray[0].cMultipleItems);
+
+                    // decide which source line that is selected in the mux
+                    if (GetSelectedMuxSource(mixId, controlArray[0].dwControlID, nItemsInMux, selection))
+                    {
+                        // selection now contains the index of the selected source =>
+                        // read the line information for this source
+                        // if conditions listed below
+                        // condition 1: invalid source
+                        // condition 2: no controls
+                        // condition 3: disconnected
+                        // condition 4: inactive
+                        if (!GetSourceLineInfo(mixId, destId, selection, sourceLine)  ||
+                           (sourceLine.cControls == 0)                                ||
+                           (sourceLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED)        ||
+                          !(sourceLine.fdwLine & MIXERLINE_LINEF_ACTIVE))               
+                        {
+                            continue;
+                        }
+
+                        if (sourceLine.dwComponentType != MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE)
+                        {
+                            // add more details about the selected source (not a microphone)
+                            TraceComponentType(sourceLine.dwComponentType);
+                            // send a warning just to inform about the fact that a non-microphone source will be controlled
+                            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "the selected (to be controlled) source is not a microphone type");
+                        }
+
+                        // retrieve all controls for the selected source
+                        GetAllLineControls(mixId, sourceLine, controlArray);
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "MUX selection is %u [0,%u]", selection, nItemsInMux-1);
+
+                        // scan the controls for this source and search for volume, mute and on/off (<=> boost) controls
+                        for (UINT sc = 0; sc < sourceLine.cControls; sc++)
+                        {
+                            if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                            {
+                                // store this volume control
+                                _microphoneState[mixId].dwVolumeControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].volumeControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                            else if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                            {
+                                // store this mute control
+                                _microphoneState[mixId].dwMuteControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].muteControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                            else if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                     controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_LOUDNESS)
+                            {
+                                // store this on/off control (most likely a Boost control)
+                                _microphoneState[mixId].dwOnOffControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].onOffControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found on/off control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                        }
+                    }
+                    else
+                    {
+                        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to detect which source to control");
+                    }
+
+                }
+                else if (destLine.cConnections == 1)
+                {
+                    // Case 2 or Case 3:
+
+                    GetSourceLineInfo(mixId, destId, 0, sourceLine);
+                    if ((sourceLine.dwComponentType == MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE) &&
+                        (sourceLine.cControls > 0))
+                    {
+                        // Case 3: same as Case 2 below but we have also detected a Microphone source
+                        //         with its own controls. So far, I have not been able to find any device
+                        //         where it is required to modify these controls. Until I have found such
+                        //         a device, this case will be handled as a Case 2 (see below).
+
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "microphone source controls will not be controlled");
+                    }
+                    else if ((sourceLine.dwComponentType == MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE) &&
+                             (sourceLine.cControls == 0))
+                    {
+                        // default state on non Vista/Win 7 machines
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "microphone source has no controls => use master controls instead");
+                    }
+                    else
+                    {
+                        // add more details about the selected source (not a microphone)
+                        TraceComponentType(sourceLine.dwComponentType);
+                        // send a warning just to inform about the fact that a non-microphone source will be controlled
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "the connected (to be controlled) source is not a microphone type");
+                    }
+
+                    // Case 2 : one source only and no MUX control detected =>
+                    //          locate the master volume control (and mute + boost controls if possible)
+
+                    // scan the controls for this wave-in destination and search for volume, mute and on/off (<=> boost) controls
+                    for (UINT dc = 0; dc < destLine.cControls; dc++)
+                    {
+                        if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                        {
+                            // store this volume control
+                            _microphoneState[mixId].dwVolumeControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].volumeControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                        else if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                        {
+                            // store this mute control
+                            _microphoneState[mixId].dwMuteControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].muteControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                        else if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                 controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_LOUDNESS ||
+                                 controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_BOOLEAN)
+                        {
+                            // store this on/off control
+                            _microphoneState[mixId].dwOnOffControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].onOffControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found on/off control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                    }
+                }
+                else
+                {
+                    // We are in a state where more than one source is connected to the wave-in destination.
+                    // I am bailing out here for now until I understand this case better.
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to locate valid microphone controls for this mixer");
+                }
+                break;
+            }
+        }  // for (destId = 0; destId < caps.cDestinations; destId++)
+
+        if (!MicrophoneIsValid(mixId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to find a valid microphone destination line", mixId);
+        }
+    }  // for (mixId = 0; mixId < nDevices; mixId++)
+
+    if (ValidMicrophones() == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to locate any valid microphone line");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenSpeaker I(II)
+//
+//  Verifies that the mixer contains a valid speaker destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenSpeaker(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(kDefaultDevice)");
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(kDefaultCommunicationDevice)");
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEOUT     hWaveOut(NULL);
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 2;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio output handle for the currently selected output device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the output mixer.
+    //
+    if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        // check if it is possible to open the default communication device (supported on Windows 7)
+        res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |
+            WAVE_MAPPED_kDefaultCommunicationDevice | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // if so, open the default communication device for real
+            res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_kDefaultCommunicationDevice);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+        }
+        else
+        {
+            // use default device since default communication device was not avaliable
+            res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "unable to open default communication device => using default instead");
+        }
+    }
+    else if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        // open default device since it has been requested
+        res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default output device");
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutOpen() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio output handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveOut, &mixerId, MIXER_OBJECTF_HWAVEOUT);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEOUT) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified output device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio output handle is no longer needed.
+    //
+    waveOutClose(hWaveOut);
+
+    // Verify that the mixer contains a valid speaker destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!SpeakerIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the speaker volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the output mixer handle and active mixer identifier
+    //
+    _outputMixerHandle = hMixer;
+    _outputMixerID = mixerId;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the output mixer device is now open (0x%x)", _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenSpeaker II(II)
+//
+//  Verifies that the mixer contains a valid speaker destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenSpeaker(WebRtc_UWord16 index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(index=%d)", index);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEOUT     hWaveOut(NULL);
+
+    const UINT   deviceID(index);  // use index parameter as device identifier
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 2;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio output handle for the currently selected output device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the output mixer.
+    //
+    res = waveOutOpen(&hWaveOut, deviceID, &waveFormat, 0, 0, CALLBACK_NULL);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutOpen(deviceID=%u) failed (err=%d)", index, res);
+        TraceWaveOutError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio output handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveOut, &mixerId, MIXER_OBJECTF_HWAVEOUT);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEOUT) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified output device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio output handle is no longer needed.
+    //
+    waveOutClose(hWaveOut);
+
+    // Verify that the mixer contains a valid speaker destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!SpeakerIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the speaker volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the output mixer handle and active mixer identifier
+    //
+    _outputMixerHandle = hMixer;
+    _outputMixerID = mixerId;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the output mixer device is now open (0x%x)", _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenMicrophone I(II)
+//
+//  Verifies that the mixer contains a valid wave-in destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenMicrophone(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(kDefaultDevice)");
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(kDefaultCommunicationDevice)");
+    }
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEIN         hWaveIn(NULL);
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 1;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0 ;
+
+    // We need a waveform-audio input handle for the currently selected input device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the input mixer.
+    //
+    if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        // check if it is possible to open the default communication device (supported on Windows 7)
+        res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |
+            WAVE_MAPPED_kDefaultCommunicationDevice | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // if so, open the default communication device for real
+            res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_kDefaultCommunicationDevice);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+        }
+        else
+        {
+            // use default device since default communication device was not avaliable
+            res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "unable to open default communication device => using default instead");
+        }
+    }
+    else if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        // open default device since it has been requested
+        res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default input device");
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInOpen() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio input handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveIn, &mixerId, MIXER_OBJECTF_HWAVEIN);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEIN) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified input device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio input handle is no longer needed.
+    //
+    waveInClose(hWaveIn);
+
+    // Verify that the mixer contains a valid wave-in destination line and a volume control.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!MicrophoneIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the microphone volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the input mixer handle and active mixer identifier
+    //
+    _inputMixerHandle = hMixer;
+    _inputMixerID = mixerId;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the input mixer device is now open (0x%x)", _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenMicrophone II(II)
+//
+//  Verifies that the mixer contains a valid wave-in destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenMicrophone(WebRtc_UWord16 index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(index=%d)", index);
+
+    CriticalSectionScoped lock(_critSect);
+
+    // Close any existing input mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEIN         hWaveIn(NULL);
+
+    const UINT   deviceID(index);  // use index parameter as device identifier
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 1;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio input handle for the currently selected input device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the input mixer.
+    //
+    res = waveInOpen(&hWaveIn, deviceID, &waveFormat, 0, 0, CALLBACK_NULL);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInOpen(deviceID=%u) failed (err=%d)", index, res);
+        TraceWaveInError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio input handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveIn, &mixerId, MIXER_OBJECTF_HWAVEIN);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEIN) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified input device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio input handle is no longer needed.
+    //
+    waveInClose(hWaveIn);
+
+    // Verify that the mixer contains a valid wave-in destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!MicrophoneIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the microphone volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the input mixer handle and active mixer identifier
+    //
+    _inputMixerHandle = hMixer;
+    _inputMixerID = mixerId;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the input mixer device is now open (0x%x)", _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_outputMixerHandle != NULL);
+}
+
+// ----------------------------------------------------------------------------
+// MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_inputMixerHandle != NULL);
+}
+
+// ----------------------------------------------------------------------------
+// SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetSpeakerVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    DWORD dwValue(volume);
+
+    // Set one unsigned control value for a specified volume-control identifier
+    //
+    if (!SetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+//
+//  Note that (MIXERCONTROL_CONTROLTYPE_VOLUME & MIXERCONTROL_CT_UNITS_MASK)
+//  always equals MIXERCONTROL_CT_UNITS_UNSIGNED;
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    DWORD dwValue(0);
+
+    // Retrieve one unsigned control value for a specified volume-control identifier
+    //
+    if (!GetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    volume = dwValue;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+//
+//  Note that (MIXERCONTROL_CONTROLTYPE_VOLUME & MIXERCONTROL_CT_UNITS_MASK)
+//  always equals MIXERCONTROL_CT_UNITS_UNSIGNED
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    maxVolume = mixerControl.Bounds.dwMaximum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    minVolume = mixerControl.Bounds.dwMinimum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, _speakerState[mixerID].dwVolumeControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> (mixerControl.Metrics.cSteps);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    available = _speakerState[_outputMixerID].volumeControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    available = _speakerState[_outputMixerID].muteControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+//
+//  This mute function works a master mute for the output speaker.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetSpeakerMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the output mixer handle exists.
+    //
+    if (!_speakerState[_outputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this speaker line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwMuteControlID);
+
+    // Set one boolean control value for the specified mute-control
+    //
+    if (!SetBooleanControlValue(_outputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the output mixer handle exists.
+    //
+    if (!_speakerState[_outputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this speaker line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwMuteControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified mute-control identifier
+    //
+    if (!GetBooleanControlValue(_outputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneMuteIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].muteControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SetMicrophoneMute
+//
+//  This mute function works a master mute for the input microphone.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetMicrophoneMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destinationhas a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this microphone line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwMuteControlID);
+
+    // Set one boolean control value for the specified mute-control
+    //
+    if (!SetBooleanControlValue(_inputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destinationhas a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this microphone line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwMuteControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified mute-control identifier
+    //
+    if (!GetBooleanControlValue(_inputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].onOffControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetMicrophoneBoost(enable=%u)", enable);
+
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destination has a valid boost (on/off) control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].onOffControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no boost control exists for this wave-in line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwOnOffControlID);
+
+    // Set one boolean control value for the specified boost (on/off) control
+    //
+    if (!SetBooleanControlValue(_inputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destination has a valid boost (on/off) control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].onOffControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no boost control exists for this wave-in line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwOnOffControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified boost-control identifier
+    //
+    if (!GetBooleanControlValue(_inputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolumeIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].volumeControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    DWORD dwValue(volume);
+
+    // Set one unsigned control value for a specified volume-control identifier
+    //
+    if (!SetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    DWORD dwValue(0);
+
+    // Retrieve one unsigned control value for a specified volume-control identifier
+    //
+    if (!GetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    volume = dwValue;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    maxVolume = mixerControl.Bounds.dwMaximum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    minVolume = mixerControl.Bounds.dwMinimum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> (mixerControl.Metrics.cSteps);
+
+    return 0;
+}
+
+// ============================================================================
+//                              PRIVATE METHODS
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Devices
+//
+//  A given audio card has one Mixer device associated with it. All of the
+//  various components on that card are controlled through that card's one
+//  Mixer device.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::Devices() const
+{
+    UINT nDevs = mixerGetNumDevs();
+    return nDevs;
+}
+
+// ----------------------------------------------------------------------------
+//  DestinationLines
+//
+//  # destination lines given mixer ID.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::DestinationLines(UINT mixId) const
+{
+    MIXERCAPS caps;
+    if (!GetCapabilities(mixId, caps))
+    {
+        return 0;
+    }
+    return (caps.cDestinations);
+}
+// ----------------------------------------------------------------------------
+//  DestinationLines
+//
+//  # source lines given mixer ID and destination ID.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::SourceLines(UINT mixId, DWORD destId) const
+{
+    MIXERLINE dline;
+    if (!GetDestinationLineInfo(mixId, destId, dline))
+    {
+        return 0;
+    }
+    return (dline.cConnections);
+}
+
+// ----------------------------------------------------------------------------
+//  GetCapabilities
+//
+//  Queries a specified mixer device to determine its capabilities.
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetCapabilities(UINT mixId, MIXERCAPS& caps, bool trace) const
+{
+    MMRESULT res;
+    MIXERCAPS mcaps;
+
+    res = mixerGetDevCaps(mixId, &mcaps, sizeof(MIXERCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetDevCaps() failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&caps, &mcaps, sizeof(MIXERCAPS));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Mixer ID %u:", mixId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u", caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u", caps.vDriverVersion);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", WideToUTF8(caps.szPname));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "misc. support bits   : %u", caps.fdwSupport);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "count of destinations: %u (+)", caps.cDestinations);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    }
+
+    if (caps.cDestinations == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "invalid number of mixer destinations");
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetDestinationLineInfo
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetDestinationLineInfo(UINT mixId, DWORD destId, MIXERLINE& line, bool trace) const
+{
+    MMRESULT  res;
+    MIXERLINE mline;
+
+    mline.cbStruct = sizeof(MIXERLINE);
+    mline.dwDestination = destId;   // max destination index is cDestinations-1
+    mline.dwSource = 0;             // not set for MIXER_GETLINEINFOF_DESTINATION
+
+    // Retrieve information about the specified destination line of a mixer device.
+    // Note that we use the mixer ID here and not a handle to an opened mixer.
+    // It is not required to open the mixer for enumeration purposes only.
+    //
+    res = mixerGetLineInfo(reinterpret_cast<HMIXEROBJ>(mixId), &mline, MIXER_OBJECTF_MIXER | MIXER_GETLINEINFOF_DESTINATION);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineInfo(MIXER_GETLINEINFOF_DESTINATION) failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&line, &mline, sizeof(MIXERLINE));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "> Destination Line ID %u:", destId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "destination line index : %u", mline.dwDestination);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwLineID               : %lu (unique)", mline.dwLineID);
+        TraceStatusAndSupportFlags(mline.fdwLine);
+        TraceComponentType(mline.dwComponentType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "count of channels      : %u", mline.cChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# audio source lines   : %u (+)", mline.cConnections);    // valid only for destinations
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# controls             : %u (*)", mline.cControls);       // can be zero
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "short name             : %s", WideToUTF8(mline.szShortName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(mline.szName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        TraceTargetType(mline.Target.dwType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "target device ID       : %lu", mline.Target.dwDeviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID        : %u", mline.Target.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID             : %u", mline.Target.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "driver version         : %u", mline.Target.vDriverVersion);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name           : %s", WideToUTF8(mline.Target.szPname));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "---------------------------------------------------------------");
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetSourceLineInfo
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetSourceLineInfo(UINT mixId, DWORD destId, DWORD srcId, MIXERLINE& line, bool trace) const
+{
+    MMRESULT  res;
+    MIXERLINE mline;
+
+    mline.cbStruct = sizeof(MIXERLINE);
+    mline.dwDestination = destId;   // we want the source info for this destination
+    mline.dwSource = srcId;         // source index (enumerate over these)
+
+    // Retrieve information about the specified source line of a mixer device.
+    // Note that we use the mixer ID here and not a handle to an opened mixer.
+    // It is not required to open the mixer for enumeration purposes only.
+    //
+    res = mixerGetLineInfo(reinterpret_cast<HMIXEROBJ>(mixId), &mline, MIXER_OBJECTF_MIXER | MIXER_GETLINEINFOF_SOURCE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineInfo(MIXER_GETLINEINFOF_SOURCE) failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&line, &mline, sizeof(MIXERLINE));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " >> Source Line ID %u:", srcId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "destination line index : %u", mline.dwDestination);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwSource               : %u", mline.dwSource);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwLineID               : %lu (unique)", mline.dwLineID);
+        TraceStatusAndSupportFlags(mline.fdwLine);
+        TraceComponentType(mline.dwComponentType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# controls             : %u (*)", mline.cControls);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(mline.szName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        TraceTargetType(mline.Target.dwType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "---------------------------------------------------------------");
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+// GetAllLineControls
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetAllLineControls(UINT mixId, const MIXERLINE& line, MIXERCONTROL* controlArray, bool trace) const
+{
+    // Ensure that we don't try to aquire information if there are no controls for this line
+    //
+    if (line.cControls == 0)
+        return false;
+
+    MMRESULT          res;
+    MIXERLINECONTROLS mlineControls;            // contains information about the controls of an audio line
+
+    mlineControls.dwLineID  = line.dwLineID;    // unique audio line identifier
+    mlineControls.cControls = line.cControls;   // number of controls associated with the line
+    mlineControls.pamxctrl  = controlArray;     // points to the first MIXERCONTROL structure to be filled
+    mlineControls.cbStruct  = sizeof(MIXERLINECONTROLS);
+    mlineControls.cbmxctrl  = sizeof(MIXERCONTROL);
+
+    // Get information on ALL controls associated with the specified audio line
+    //
+    res = mixerGetLineControls(reinterpret_cast<HMIXEROBJ>(mixId), &mlineControls, MIXER_OBJECTF_MIXER | MIXER_GETLINECONTROLSF_ALL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineControls(MIXER_GETLINECONTROLSF_ALL) failed  (err=%d)", res);
+        return false;
+    }
+
+    if (trace)
+    {
+        for (UINT c = 0; c < line.cControls; c++)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " >> Control ID %u:", c);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwControlID            : %u (unique)", controlArray[c].dwControlID);
+            TraceControlType(controlArray[c].dwControlType);
+            TraceControlStatusAndSupportFlags(controlArray[c].fdwControl);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cMultipleItems         : %u", controlArray[c].cMultipleItems);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "short name             : %s", WideToUTF8(controlArray[c].szShortName));
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(controlArray[c].szName));
+            if ((controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_SIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "min signed value       : %d", controlArray[c].Bounds.lMinimum);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "max signed value       : %d", controlArray[c].Bounds.lMaximum);
+            }
+            else if ((controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_UNSIGNED ||
+                     (controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_BOOLEAN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "min unsigned value     : %u",  controlArray[c].Bounds.dwMinimum);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "max unsigned value     : %u", controlArray[c].Bounds.dwMaximum);
+            }
+            if (controlArray[c].dwControlType  != MIXERCONTROL_CONTROLTYPE_CUSTOM)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cSteps                 : %u",  controlArray[c].Metrics.cSteps);
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "...............................................................");
+            GetControlDetails(mixId, controlArray[c], true);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "...............................................................");
+
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLineControls
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetLineControl(UINT mixId, DWORD dwControlID, MIXERCONTROL& control) const
+{
+    MMRESULT          res;
+    MIXERLINECONTROLS mlineControl;
+
+    mlineControl.dwControlID = dwControlID;
+    mlineControl.cControls   = 1;
+    mlineControl.pamxctrl    = &control;
+    mlineControl.cbStruct    = sizeof(MIXERLINECONTROLS);
+    mlineControl.cbmxctrl    = sizeof(MIXERCONTROL);
+
+    // Get information on one controls associated with the specified conrol identifier
+    //
+    res = mixerGetLineControls(reinterpret_cast<HMIXEROBJ>(mixId), &mlineControl, MIXER_OBJECTF_MIXER | MIXER_GETLINECONTROLSF_ONEBYID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineControls(MIXER_GETLINECONTROLSF_ONEBYID) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetControlDetails
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetControlDetails(UINT mixId, MIXERCONTROL& controlArray, bool trace) const
+{
+    assert(controlArray.cMultipleItems <= MAX_NUMBER_OF_MULTIPLE_ITEMS);
+
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    MIXERCONTROLDETAILS_SIGNED   valueSigned[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    MIXERCONTROLDETAILS_BOOLEAN  valueBoolean[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+
+    enum ControlType
+    {
+        CT_UNITS_UNSIGNED,
+        CT_UNITS_SIGNED,
+        CT_UNITS_BOOLEAN
+    };
+
+    ControlType ctype(CT_UNITS_UNSIGNED);
+
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.dwControlID    = controlArray.dwControlID;       // control identifier
+    controlDetails.cChannels      = 1;                              // we need to set values as if they were uniform
+    controlDetails.cMultipleItems = controlArray.cMultipleItems;    // only nonzero for CONTROLF_MULTIPLE controls
+                                                                    // can e.g. happen for CONTROLTYPE_MUX
+    if (controlDetails.cMultipleItems > MAX_NUMBER_OF_MULTIPLE_ITEMS)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "cMultipleItems > %d", MAX_NUMBER_OF_MULTIPLE_ITEMS);
+        controlDetails.cMultipleItems = MAX_NUMBER_OF_MULTIPLE_ITEMS;
+    }
+
+    if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_SIGNED)
+    {
+        ctype = CT_UNITS_SIGNED;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_SIGNED);
+        controlDetails.paDetails = &valueSigned[0];
+    }
+    else if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_UNSIGNED)
+    {
+        ctype = CT_UNITS_UNSIGNED;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+        controlDetails.paDetails = &valueUnsigned[0];
+    }
+    else if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_BOOLEAN)
+    {
+        ctype = CT_UNITS_BOOLEAN;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+        controlDetails.paDetails = &valueBoolean[0];
+    }
+
+    // Retrieve a control's value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    if (trace)
+    {
+        UINT nItems(1);
+        nItems = (controlDetails.cMultipleItems > 0 ? controlDetails.cMultipleItems : 1);
+        for (UINT i = 0; i < nItems; i++)
+        {
+            if (ctype == CT_UNITS_SIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "signed value           : %d", valueSigned[i].lValue);
+            }
+            else if (ctype == CT_UNITS_UNSIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unsigned value         : %u", valueUnsigned[i].dwValue);
+            }
+            else if (ctype == CT_UNITS_BOOLEAN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "boolean value          : %u", valueBoolean[i].fValue);
+            }
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetUnsignedControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD& dwValue) const
+{
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+    controlDetails.paDetails      = &valueUnsigned;
+
+    // Retrieve the unsigned value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Deliver the retrieved value
+    //
+    dwValue = valueUnsigned.dwValue;
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  SetUnsignedControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD dwValue) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AudioMixerManager::SetUnsignedControlValue(mixId=%u, dwControlID=%d, dwValue=%d)", mixId, dwControlID, dwValue);
+
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+    controlDetails.paDetails      = &valueUnsigned;
+
+    valueUnsigned.dwValue         = dwValue;
+
+    // Set the unsigned value
+    //
+    res = mixerSetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerSetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  SetBooleanControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SetBooleanControlValue(UINT mixId, DWORD dwControlID, bool value) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetBooleanControlValue(mixId=%u, dwControlID=%d, value=%d)", mixId, dwControlID, value);
+
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    if (value == true)
+        valueBoolean.fValue = TRUE;
+    else
+        valueBoolean.fValue = FALSE;
+
+    // Set the boolean value
+    //
+    res = mixerSetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerSetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetBooleanControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetBooleanControlValue(UINT mixId, DWORD dwControlID, bool& value) const
+{
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    // Retrieve the boolean value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Deliver the retrieved value
+    //
+    if (valueBoolean.fValue == 0)
+        value = false;
+    else
+        value = true;
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetSelectedMuxSource
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetSelectedMuxSource(UINT mixId, DWORD dwControlID, DWORD cMultipleItems, UINT& index) const
+{
+    assert(cMultipleItems <= MAX_NUMBER_OF_MULTIPLE_ITEMS);
+
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    memset(&valueBoolean, 0, sizeof(valueBoolean));
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = cMultipleItems;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    // Retrieve the boolean values
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Map the current MUX setting to an index corresponding to a source index.
+    // e.g. with cMultipleItems = 3,
+    //  valueBoolean[] = {1,0,0} => index = 2
+    //  valueBoolean[] = {0,1,0} => index = 1
+    //  valueBoolean[] = {0,0,1} => index = 0
+    //
+    // If there is no "1" in the array, we assume index should be 0.
+    index = 0;
+    for (DWORD i = 0; i < cMultipleItems; i++)
+    {
+        if (valueBoolean[i].fValue > 0)
+        {
+            index = (cMultipleItems - 1) - i;
+            break;
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceStatusAndSupportFlags
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceStatusAndSupportFlags(DWORD fdwLine) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("status & support flags : 0x%x "), fdwLine);
+
+    switch (fdwLine)
+    {
+    case MIXERLINE_LINEF_ACTIVE:
+        StringCchCat(buf, 128, TEXT("(ACTIVE DESTINATION)"));
+        break;
+    case MIXERLINE_LINEF_DISCONNECTED:
+        StringCchCat(buf, 128, TEXT("(DISCONNECTED)"));
+        break;
+    case MIXERLINE_LINEF_SOURCE:
+        StringCchCat(buf, 128, TEXT("(INACTIVE SOURCE)"));
+        break;
+    case MIXERLINE_LINEF_SOURCE | MIXERLINE_LINEF_ACTIVE:
+        StringCchCat(buf, 128, TEXT("(ACTIVE SOURCE)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceComponentType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceComponentType(DWORD dwComponentType) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("component type         : 0x%x "), dwComponentType);
+
+    switch (dwComponentType)
+    {
+    // Destination
+    case MIXERLINE_COMPONENTTYPE_DST_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(DST_UNDEFINED)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_DIGITAL:
+        StringCchCat(buf, 128, TEXT("(DST_DIGITAL)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_LINE:
+        StringCchCat(buf, 128, TEXT("(DST_LINE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_MONITOR:
+        StringCchCat(buf, 128, TEXT("(DST_MONITOR)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_SPEAKERS:
+        StringCchCat(buf, 128, TEXT("(DST_SPEAKERS)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_HEADPHONES:
+        StringCchCat(buf, 128, TEXT("(DST_HEADPHONES)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_TELEPHONE:
+        StringCchCat(buf, 128, TEXT("(DST_TELEPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_WAVEIN:
+        StringCchCat(buf, 128, TEXT("(DST_WAVEIN)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_VOICEIN:
+        StringCchCat(buf, 128, TEXT("(DST_VOICEIN)"));
+        break;
+    // Source
+    case MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(SRC_UNDEFINED)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_DIGITAL:
+        StringCchCat(buf, 128, TEXT("(SRC_DIGITAL)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_LINE:
+        StringCchCat(buf, 128, TEXT("(SRC_LINE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE:
+        StringCchCat(buf, 128, TEXT("(SRC_MICROPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER:
+        StringCchCat(buf, 128, TEXT("(SRC_SYNTHESIZER)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC:
+        StringCchCat(buf, 128, TEXT("(SRC_COMPACTDISC)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE:
+        StringCchCat(buf, 128, TEXT("(SRC_TELEPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER:
+        StringCchCat(buf, 128, TEXT("(SRC_PCSPEAKER)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT:
+        StringCchCat(buf, 128, TEXT("(SRC_WAVEOUT)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY:
+        StringCchCat(buf, 128, TEXT("(SRC_AUXILIARY)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_ANALOG:
+        StringCchCat(buf, 128, TEXT("(SRC_ANALOG)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceTargetType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceTargetType(DWORD dwType) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("media device type      : 0x%x "), dwType);
+
+    switch (dwType)
+    {
+    case MIXERLINE_TARGETTYPE_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(UNDEFINED)"));
+        break;
+    case MIXERLINE_TARGETTYPE_WAVEOUT:
+        StringCchCat(buf, 128, TEXT("(WAVEOUT)"));
+        break;
+    case MIXERLINE_TARGETTYPE_WAVEIN:
+        StringCchCat(buf, 128, TEXT("(WAVEIN)"));
+        break;
+    case MIXERLINE_TARGETTYPE_MIDIOUT:
+        StringCchCat(buf, 128, TEXT("(MIDIOUT)"));
+        break;
+    case MIXERLINE_TARGETTYPE_MIDIIN:
+        StringCchCat(buf, 128, TEXT("(MIDIIN)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceControlType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceControlType(DWORD dwControlType) const
+{
+    TCHAR buf[128];
+
+    // Class type classification
+    //
+    StringCchPrintf(buf, 128, TEXT("class type             : 0x%x "), dwControlType);
+
+    switch (dwControlType & MIXERCONTROL_CT_CLASS_MASK)
+    {
+    case MIXERCONTROL_CT_CLASS_CUSTOM:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_CUSTOM)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_METER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_METER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_SWITCH:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_SWITCH)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_NUMBER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_NUMBER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_SLIDER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_SLIDER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_FADER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_FADER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_TIME:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_TIME)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_LIST:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_LIST)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+
+    // Control type (for each class)
+    //
+    StringCchPrintf(buf, 128, TEXT("control type           : 0x%x "), dwControlType);
+
+    switch (dwControlType)
+    {
+    case MIXERCONTROL_CONTROLTYPE_CUSTOM:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_CUSTOM)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BOOLEANMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BOOLEANMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SIGNEDMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SIGNEDMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PEAKMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PEAKMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_UNSIGNEDMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BOOLEAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BOOLEAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_ONOFF:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_ONOFF)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MUTE:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MUTE)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MONO:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MONO)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_LOUDNESS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_LOUDNESS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_STEREOENH:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_STEREOENH)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BASS_BOOST:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BASS_BOOST)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BUTTON:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BUTTON)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_DECIBELS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_DECIBELS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SIGNED:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SIGNED)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_UNSIGNED:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_UNSIGNED)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PERCENT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PERCENT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SLIDER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SLIDER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_QSOUNDPAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_QSOUNDPAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_FADER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_FADER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_VOLUME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_VOLUME)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BASS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BASS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_TREBLE:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_TREBLE)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_EQUALIZER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_EQUALIZER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SINGLESELECT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SINGLESELECT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MUX:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MUX)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MULTIPLESELECT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MIXER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MIXER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MICROTIME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MICROTIME)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MILLITIME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MILLITIME)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceControlStatusAndSupportFlags
+//
+//  fdwControl
+//
+//  Status and support flags for the audio line control. The following values
+//  are defined:
+//
+//  MIXERCONTROL_CONTROLF_DISABLED
+//
+//  The control is disabled, perhaps due to other settings for the mixer hardware,
+//  and cannot be used. An application can read current settings from a
+//  disabled control, but it cannot apply settings.
+//
+//  MIXERCONTROL_CONTROLF_MULTIPLE
+//
+//  The control has two or more settings per channel. An equalizer, for example,
+//  requires this flag because each frequency band can be set to a different value.
+//  An equalizer that affects both channels of a stereo line in a uniform fashion
+//  will also specify the MIXERCONTROL_CONTROLF_UNIFORM flag.
+//
+//  MIXERCONTROL_CONTROLF_UNIFORM
+//
+//  The control acts on all channels of a multichannel line in a uniform fashion.
+//  For example, a control that mutes both channels of a stereo line would set
+//  this flag. Most MIXERCONTROL_CONTROLTYPE_MUX and
+//  MIXERCONTROL_CONTROLTYPE_MIXER controls also specify the
+//  MIXERCONTROL_CONTROLF_UNIFORM flag.
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceControlStatusAndSupportFlags(DWORD fdwControl) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("control support flags  : 0x%x "), fdwControl);
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_DISABLED)
+    {
+        // The control is disabled, perhaps due to other settings for the mixer hardware,
+        // and cannot be used. An application can read current settings from a disabled
+        // control, but it cannot apply settings.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_DISABLED)"));
+    }
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_MULTIPLE)
+    {
+        // The control has two or more settings per channel. An equalizer, for example,
+        // requires this flag because each frequency band can be set to a different
+        // value. An equalizer that affects both channels of a stereo line in a
+        // uniform fashion will also specify the MIXERCONTROL_CONTROLF_UNIFORM flag.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_MULTIPLE)"));
+    }
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_UNIFORM)
+    {
+        // The control acts on all channels of a multichannel line in a uniform
+        // fashion. For example, a control that mutes both channels of a stereo
+        // line would set this flag. Most MIXERCONTROL_CONTROLTYPE_MUX and
+        // MIXERCONTROL_CONTROLTYPE_MIXER controls also specify the
+        // MIXERCONTROL_CONTROLF_UNIFORM flag.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_UNIFORM)"));
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  ClearSpeakerState I (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearSpeakerState(UINT idx)
+{
+    _speakerState[idx].dwLineID = 0L;
+    _speakerState[idx].dwVolumeControlID = 0L;
+    _speakerState[idx].dwMuteControlID = 0L;
+    _speakerState[idx].speakerIsValid = false;
+    _speakerState[idx].muteControlIsValid = false;
+    _speakerState[idx].volumeControlIsValid = false;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearSpeakerState II (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearSpeakerState()
+{
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        ClearSpeakerState(i);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsValid
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SpeakerIsValid(UINT idx) const
+{
+    return (_speakerState[idx].speakerIsValid);
+}
+
+// ----------------------------------------------------------------------------
+//  ValidSpeakers
+//
+//  Counts number of valid speaker destinations for all mixer devices.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::ValidSpeakers() const
+{
+    UINT nSpeakers(0);
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        if (SpeakerIsValid(i))
+            nSpeakers++;
+    }
+    return nSpeakers;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearMicrophoneState I (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearMicrophoneState(UINT idx)
+{
+    _microphoneState[idx].dwLineID = 0L;
+    _microphoneState[idx].dwVolumeControlID = 0L;
+    _microphoneState[idx].dwMuteControlID = 0L;
+    _microphoneState[idx].dwOnOffControlID = 0L;
+    _microphoneState[idx].microphoneIsValid = false;
+    _microphoneState[idx].muteControlIsValid = false;
+    _microphoneState[idx].volumeControlIsValid = false;
+    _microphoneState[idx].onOffControlIsValid = false;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearMicrophoneState II (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearMicrophoneState()
+{
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        ClearMicrophoneState(i);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsValid
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::MicrophoneIsValid(UINT idx) const
+{
+    return (_microphoneState[idx].microphoneIsValid);
+
+}
+
+// ----------------------------------------------------------------------------
+//  ValidMicrophones
+//
+//  Counts number of valid speaker destinations for all mixer devices.
+//  To be valid, a speaker destination line must exist.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::ValidMicrophones() const
+{
+    UINT nMicrophones(0);
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        if (MicrophoneIsValid(i))
+            nMicrophones++;
+    }
+    return nMicrophones;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveInError
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceWaveInError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveInGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveOutError
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceWaveOutError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveOutGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  WideToUTF8
+// ----------------------------------------------------------------------------
+
+char* AudioMixerManager::WideToUTF8(const TCHAR* src) const {
+#ifdef UNICODE
+    const size_t kStrLen = sizeof(_str);
+    memset(_str, 0, kStrLen);
+    // Get required size (in bytes) to be able to complete the conversion.
+    int required_size = WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, 0, 0, 0);
+    if (required_size <= kStrLen)
+    {
+        // Process the entire input string, including the terminating null char.
+        if (WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, kStrLen, 0, 0) == 0)
+            memset(_str, 0, kStrLen);
+    }
+    return _str;
+#else
+    return const_cast<char*>(src);
+#endif
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h b/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h
new file mode 100644
index 0000000..da9de47
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_WIN_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include <Windows.h>
+#include <mmsystem.h>
+
+namespace webrtc {
+
+class AudioMixerManager
+{
+public:
+    enum { MAX_NUMBER_MIXER_DEVICES = 40 };
+    enum { MAX_NUMBER_OF_LINE_CONTROLS = 20 };
+    enum { MAX_NUMBER_OF_MULTIPLE_ITEMS = 20 };
+    struct SpeakerLineInfo
+    {
+        DWORD dwLineID;
+        bool  speakerIsValid;
+        DWORD dwVolumeControlID;
+        bool  volumeControlIsValid;
+        DWORD dwMuteControlID;
+        bool  muteControlIsValid;
+    };
+    struct MicrophoneLineInfo
+    {
+        DWORD dwLineID;
+        bool  microphoneIsValid;
+        DWORD dwVolumeControlID;
+        bool  volumeControlIsValid;
+        DWORD dwMuteControlID;
+        bool  muteControlIsValid;
+        DWORD dwOnOffControlID;
+        bool  onOffControlIsValid;
+    };
+public:
+    WebRtc_Word32 EnumerateAll();
+    WebRtc_Word32 EnumerateSpeakers();
+    WebRtc_Word32 EnumerateMicrophones();
+    WebRtc_Word32 OpenSpeaker(AudioDeviceModule::WindowsDeviceType device);
+    WebRtc_Word32 OpenSpeaker(WebRtc_UWord16 index);
+    WebRtc_Word32 OpenMicrophone(AudioDeviceModule::WindowsDeviceType device);
+    WebRtc_Word32 OpenMicrophone(WebRtc_UWord16 index);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+    UINT Devices() const;
+
+private:
+    UINT DestinationLines(UINT mixId) const;
+    UINT SourceLines(UINT mixId, DWORD destId) const;
+    bool GetCapabilities(UINT mixId, MIXERCAPS& caps, bool trace = false) const;
+    bool GetDestinationLineInfo(UINT mixId, DWORD destId, MIXERLINE& line, bool trace = false) const;
+    bool GetSourceLineInfo(UINT mixId, DWORD destId, DWORD srcId, MIXERLINE& line, bool trace = false) const;
+
+    bool GetAllLineControls(UINT mixId, const MIXERLINE& line, MIXERCONTROL* controlArray, bool trace = false) const;
+    bool GetLineControl(UINT mixId, DWORD dwControlID, MIXERCONTROL& control) const;
+    bool GetControlDetails(UINT mixId, MIXERCONTROL& controlArray, bool trace = false) const;
+    bool GetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD& dwValue) const;
+    bool SetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD dwValue) const;
+    bool SetBooleanControlValue(UINT mixId, DWORD dwControlID, bool value) const;
+    bool GetBooleanControlValue(UINT mixId, DWORD dwControlID, bool& value) const;
+    bool GetSelectedMuxSource(UINT mixId, DWORD dwControlID, DWORD cMultipleItems, UINT& index) const;
+
+private:
+    void ClearSpeakerState();
+    void ClearSpeakerState(UINT idx);
+    void ClearMicrophoneState();
+    void ClearMicrophoneState(UINT idx);
+    bool SpeakerIsValid(UINT idx) const;
+    UINT ValidSpeakers() const;
+    bool MicrophoneIsValid(UINT idx) const;
+    UINT ValidMicrophones() const;
+
+    void TraceStatusAndSupportFlags(DWORD fdwLine) const;
+    void TraceTargetType(DWORD dwType) const;
+    void TraceComponentType(DWORD dwComponentType) const;
+    void TraceControlType(DWORD dwControlType) const;
+    void TraceControlStatusAndSupportFlags(DWORD fdwControl) const;
+    void TraceWaveInError(MMRESULT error) const;
+    void TraceWaveOutError(MMRESULT error) const;
+    // Converts from wide-char to UTF-8 if UNICODE is defined.
+    // Does nothing if UNICODE is undefined.
+    char* WideToUTF8(const TCHAR* src) const;
+
+public:
+    AudioMixerManager(const WebRtc_Word32 id);
+    ~AudioMixerManager();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32           _id;
+    HMIXER                  _outputMixerHandle;
+    UINT                    _outputMixerID;
+    HMIXER                  _inputMixerHandle;
+    UINT                    _inputMixerID;
+    SpeakerLineInfo         _speakerState[MAX_NUMBER_MIXER_DEVICES];
+    MicrophoneLineInfo      _microphoneState[MAX_NUMBER_MIXER_DEVICES];
+    mutable char            _str[MAXERRORLENGTH];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_H
diff --git a/trunk/src/modules/audio_device/main/test/README.txt b/trunk/src/modules/audio_device/main/test/README.txt
new file mode 100644
index 0000000..7435ac5
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/README.txt
@@ -0,0 +1,23 @@
+INSTRUCTIONS:
+
+- Start with test #3 (Device enumeration) to get an overview of the available
+  audio devices.
+- Next, proceed with test #4 (Device selection) to get more details about 
+  the supported functions for each audio device.
+- Verify two-way audio in test #5. 
+  Repeat this test for different selections of playout and recording devices.
+- More detailed tests (volume, mute etc.) can also be performed using #6-#11.
+
+NOTE:
+
+- Some tests requires that the user opens up the audio mixer dialog and 
+  verifies that a certain action (e.g. Mute ON/OFF) is executed correctly.
+- Files can be recorded during some tests to enable off-line analysis.
+- Full support of 'Default Communication' devices requires Windows 7.
+- If a test consists of several sub tests, press any key to start a new sub test.
+
+KNOWN ISSUES:
+
+- Microphone Boost control is not supported on Windows Vista or Windows 7.
+- Speaker and microphone volume controls will not work as intended on Windows
+  Vista if a 'Default Communication' device is selected in any direction.
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath
new file mode 100644
index 0000000..6e9239f
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml
new file mode 100644
index 0000000..d8117f5
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.voiceengine.test">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+    <activity android:label="@string/app_name"
+	      android:name="AudioDeviceAndroidTest">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+  </application>
+
+  <uses-sdk android:minSdkVersion="3"></uses-sdk>
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS">
+  </uses-permission>
+  <uses-permission android:name="android.permission.RECORD_AUDIO">
+  </uses-permission>
+</manifest> 
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties
new file mode 100644
index 0000000..19ddebd
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-3

diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java
new file mode 100644
index 0000000..a295780
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java
@@ -0,0 +1,26 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.voiceengine.test;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050000;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040001;

+    }

+}

diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc
new file mode 100644
index 0000000..f46c6b2
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h> // memset
+#include <android/log.h>
+
+#include "org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h"
+
+#include "../../../../interface/audio_device.h"
+
+#define LOG_TAG "WebRTC ADM Native"
+
+void api_test();
+void func_test(int);
+
+typedef struct
+{
+    // Other
+    JavaVM* jvm;
+} AdmData;
+
+static AdmData admData;
+
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+    if (!vm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG,
+                            "JNI_OnLoad did not receive a valid VM pointer");
+        return -1;
+    }
+
+    // Get JNI
+    JNIEnv* env;
+    if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                             JNI_VERSION_1_4))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG,
+                            "JNI_OnLoad could not get JNI env");
+        return -1;
+    }
+
+    // Get class to register the native functions with
+    // jclass regClass =
+    // env->FindClass("org/webrtc/voiceengine/test/AudioDeviceAndroidTest");
+    // if (!regClass) {
+    // return -1; // Exception thrown
+    // }
+
+    // Register native functions
+    // JNINativeMethod methods[1];
+    // methods[0].name = NULL;
+    // methods[0].signature = NULL;
+    // methods[0].fnPtr = NULL;
+    // if (JNI_OK != env->RegisterNatives(regClass, methods, 1))
+    // {
+    // return -1;
+    // }
+
+    // Init VoiceEngine data
+    memset(&admData, 0, sizeof(admData));
+
+    // Store the JVM
+    admData.jvm = vm;
+
+    return JNI_VERSION_1_4;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_NativeInit(JNIEnv * env,
+                                                                   jclass)
+{
+    // Look up and cache any interesting class, field and method IDs for
+    // any used java class here
+
+    return true;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_RunTest(JNIEnv *env,
+                                                                jobject context,
+                                                                jint test)
+{
+    // Set instance independent Java objects
+    webrtc::AudioDeviceModule::SetAndroidObjects(admData.jvm, env, context);
+
+    // Start test
+    if (0 == test)
+    {
+        api_test();
+    }
+    else
+    {
+        func_test(test);
+    }
+
+    // Clear instance independent Java objects
+    webrtc::AudioDeviceModule::SetAndroidObjects(NULL, NULL, NULL);
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h
new file mode 100644
index 0000000..5cbc56f
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h
@@ -0,0 +1,29 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_voiceengine_test_AudioDeviceAndroidTest */
+
+#ifndef _Included_org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+#define _Included_org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+ * Method:    NativeInit
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_NativeInit
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+ * Method:    RunTest
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_RunTest
+  (JNIEnv *, jobject, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png
Binary files differ
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml
new file mode 100644
index 0000000..6161f1d
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+  <Button android:text="@string/run_button"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+</LinearLayout>
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml
new file mode 100644
index 0000000..bbb6f51
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+    <string name="app_name">WebRTC Audio Device Android Test</string>
+<string name="run_button">Run Test</string>
+</resources>
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java
new file mode 100644
index 0000000..4863168
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java
@@ -0,0 +1 @@
+../../../../../../../source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
diff --git a/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java
new file mode 100644
index 0000000..b87af46
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java
@@ -0,0 +1,69 @@
+package org.webrtc.voiceengine.test;

+

+import android.app.Activity;

+import android.media.AudioManager;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.View;

+import android.widget.Button;

+

+public class AudioDeviceAndroidTest extends Activity {

+    private Thread _testThread;

+

+    /** Called when the activity is first created. */

+    @Override

+    public void onCreate(Bundle savedInstanceState) {

+        super.onCreate(savedInstanceState);

+        setContentView(R.layout.main);

+

+        final Button buttonStart = (Button) findViewById(R.id.Button01);

+        // buttonStart.setWidth(200);

+        // button.layout(50, 50, 100, 40);

+        buttonStart.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+                _testThread = new Thread(_testProc);

+                _testThread.start();

+            }

+        });

+

+        // Suggest to use the voice call audio stream for hardware volume

+        // controls

+        setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);

+

+        DoLog("Started WebRTC Android ADM Test");

+    }

+

+    private Runnable _testProc = new Runnable() {

+        public void run() {

+            // TODO(xians), choose test from GUI

+            // Select test here, 0 for API test, 1-> for Func tests

+            RunTest(5);

+        }

+    };

+

+    private void DoLog(String msg) {

+        Log.d("*WebRTC ADM*", msg);

+    }

+

+    // //////////////// Native function prototypes ////////////////////

+

+    // Init wrapper

+    private native static boolean NativeInit();

+

+    // Function used to call test

+    private native int RunTest(int testType);

+

+    // Load native library

+    static {

+        Log.d("*WebRTC ADM*", "Loading audio_device_android_test...");

+        System.loadLibrary("audio_device_android_test");

+

+        Log.d("*WebRTC ADM*", "Calling native init...");

+        if (!NativeInit()) {

+            Log.e("*WebRTC ADM*", "Native init failed");

+            throw new RuntimeException("Native init failed");

+        } else {

+            Log.d("*WebRTC ADM*", "Native init successful");

+        }

+    }

+}

diff --git a/trunk/src/modules/audio_device/main/test/audio_device_test_api.cc b/trunk/src/modules/audio_device/main/test/audio_device_test_api.cc
new file mode 100644
index 0000000..22ac91a
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/audio_device_test_api.cc
@@ -0,0 +1,2243 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include "audio_device_test_defines.h"
+
+#include "../source/audio_device_config.h"
+#include "../source/audio_device_impl.h"
+#include "../source/audio_device_utility.h"
+
+// Helper functions
+#if defined(ANDROID)
+char filenameStr[2][256] =
+{   0}; // Allow two buffers for those API calls taking two filenames
+int currentStr = 0;
+
+char* GetFilename(char* filename)
+{
+    currentStr = !currentStr;
+    sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", filename);
+    return filenameStr[currentStr];
+}
+const char* GetFilename(const char* filename)
+{
+    currentStr = !currentStr;
+    sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", filename);
+    return filenameStr[currentStr];
+}
+int GetResource(char* resource, char* dest, int destLen)
+{
+    currentStr = !currentStr;
+    sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", resource);
+    strncpy(dest, filenameStr[currentStr], destLen-1);
+    return 0;
+}
+char* GetResource(char* resource)
+{
+    currentStr = !currentStr;
+    sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", resource);
+    return filenameStr[currentStr];
+}
+const char* GetResource(const char* resource)
+{
+    currentStr = !currentStr;
+    sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", resource);
+    return filenameStr[currentStr];
+}
+#elif !defined(MAC_IPHONE)
+char* GetFilename(char* filename)
+{
+    return filename;
+}
+const char* GetFilename(const char* filename)
+{
+    return filename;
+}
+char* GetResource(char* resource)
+{
+    return resource;
+}
+const char* GetResource(const char* resource)
+{
+    return resource;
+}
+#endif
+
+using namespace webrtc;
+
+// ----------------------------------------------------------------------------
+//  AudioEventObserverAPI
+// ----------------------------------------------------------------------------
+
+class AudioEventObserverAPI: public AudioDeviceObserver
+{
+public:
+    AudioEventObserverAPI(AudioDeviceModule* audioDevice) :
+        _error(kRecordingError),
+        _warning(kRecordingWarning),
+        _audioDevice(audioDevice)
+    {
+    }
+    ;
+    ~AudioEventObserverAPI()
+    {
+    }
+    ;
+    virtual void OnErrorIsReported(const ErrorCode error)
+    {
+        TEST_LOG("\n[*** ERROR ***] => OnErrorIsReported(%d)\n\n", error);
+        _error = error;
+        // TEST(_audioDevice->StopRecording() == 0);
+        // TEST(_audioDevice->StopPlayout() == 0);
+    }
+    ;
+    virtual void OnWarningIsReported(const WarningCode warning)
+    {
+        TEST_LOG("\n[*** WARNING ***] => OnWarningIsReported(%d)\n\n", warning);
+        _warning = warning;
+        TEST(_audioDevice->StopRecording() == 0);
+        TEST(_audioDevice->StopPlayout() == 0);
+    }
+    ;
+public:
+    ErrorCode _error;
+    WarningCode _warning;
+private:
+    AudioDeviceModule* _audioDevice;
+};
+
+class AudioTransportAPI: public AudioTransport
+{
+public:
+    AudioTransportAPI(AudioDeviceModule* audioDevice) :
+        _audioDevice(audioDevice), _recCount(0), _playCount(0)
+    {
+    }
+    ;
+
+    ~AudioTransportAPI()
+    {
+    }
+    ;
+
+    virtual WebRtc_Word32 RecordedDataIsAvailable(
+        const WebRtc_Word8* audioSamples,
+        const WebRtc_UWord32 nSamples,
+        const WebRtc_UWord8 nBytesPerSample,
+        const WebRtc_UWord8 nChannels,
+        const WebRtc_UWord32 sampleRate,
+        const WebRtc_UWord32 totalDelay,
+        const WebRtc_Word32 clockSkew,
+        const WebRtc_UWord32 currentMicLevel,
+        WebRtc_UWord32& newMicLevel)
+    {
+        _recCount++;
+        if (_recCount % 100 == 0)
+        {
+            if (nChannels == 1)
+            {
+                // mono
+                TEST_LOG("-");
+            } else if ((nChannels == 2) && (nBytesPerSample == 2))
+            {
+                // stereo but only using one channel
+                TEST_LOG("-|");
+            } else
+            {
+                // stereo
+                TEST_LOG("--");
+            }
+        }
+
+        return 0;
+    }
+
+    virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+                                           const WebRtc_UWord8 nBytesPerSample,
+                                           const WebRtc_UWord8 nChannels,
+                                           const WebRtc_UWord32 sampleRate,
+                                           WebRtc_Word8* audioSamples,
+                                           WebRtc_UWord32& nSamplesOut)
+    {
+        _playCount++;
+        if (_playCount % 100 == 0)
+        {
+            if (nChannels == 1)
+            {
+                TEST_LOG("+");
+            } else
+            {
+                TEST_LOG("++");
+            }
+        }
+
+        nSamplesOut = 480;
+
+        return 0;
+    }
+    ;
+private:
+    AudioDeviceModule* _audioDevice;
+    WebRtc_UWord32 _recCount;
+    WebRtc_UWord32 _playCount;
+};
+
+int api_test();
+
+
+#if !defined(MAC_IPHONE) && !defined(ANDROID)
+int api_test();
+
+int main(int /*argc*/, char* /*argv*/[])
+{
+    api_test();
+}
+#endif
+
+int api_test()
+{
+    int i(0);
+
+    TEST_LOG("========================================\n");
+    TEST_LOG("API Test of the WebRtcAudioDevice Module\n");
+    TEST_LOG("========================================\n\n");
+
+    ProcessThread* processThread = ProcessThread::CreateProcessThread();
+    processThread->Start();
+
+    // =======================================================
+    // AudioDeviceModuleImpl::Create
+    //
+    // Windows:
+    //      if (WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    //          user can select between default (Core) or Wave
+    //      else
+    //          user can select between default (Wave) or Wave
+    // =======================================================
+
+    const WebRtc_Word32 myId = 444;
+    AudioDeviceModule* audioDevice(NULL);
+
+#if defined(_WIN32)
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined!\n\n");
+    // create default implementation (=Core Audio) instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audioDevice->AddRef();
+    TEST(audioDevice->Release() == 0);
+    // create non-default (=Wave Audio) instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
+    audioDevice->AddRef();
+    TEST(audioDevice->Release() == 0);
+    // explicitly specify usage of Core Audio (same as default)
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsCoreAudio)) != NULL);
+#else
+    TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined!\n");
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    // create default implementation (=Wave Audio) instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audioDevice->AddRef();
+    TEST(audioDevice->Release() == 0);
+    // explicitly specify usage of Wave Audio (same as default)
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
+#endif
+#endif
+
+#if defined(ANDROID)
+    // Fails tests
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
+    // Create default implementation instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+#elif defined(WEBRTC_LINUX)
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    // create default implementation instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audioDevice->AddRef();
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Release() == 0);
+    // explicitly specify usage of Pulse Audio (same as default)
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxPulseAudio)) != NULL);
+#endif
+
+#if defined(WEBRTC_MAC)
+    // Fails tests
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
+    // Create default implementation instance
+    TEST((audioDevice = AudioDeviceModuleImpl::Create(
+        myId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+#endif
+
+    if (audioDevice == NULL)
+    {
+#ifdef _WIN32
+        goto Exit;
+#else
+        TEST_LOG("Failed creating audio device object! \n");
+        return 0;
+#endif
+    }
+
+    // The ADM is reference counted.
+    audioDevice->AddRef();
+
+    processThread->RegisterModule(audioDevice);
+
+    AudioDeviceModule::AudioLayer audioLayer =
+        AudioDeviceModule::kPlatformDefaultAudio;
+    TEST(audioDevice->ActiveAudioLayer(&audioLayer) == 0);
+    if (audioLayer == AudioDeviceModule::kLinuxAlsaAudio) {
+      TEST_LOG("API Test is not available on ALSA. \n");
+      processThread->DeRegisterModule(audioDevice);
+      TEST(audioDevice->Terminate() == 0);
+      TEST(audioDevice->Release() == 0);
+      return 0;
+    }
+
+    TEST_LOG("Testing...\n\n");
+
+    // =====================
+    // RegisterEventObserver
+    // =====================
+
+    AudioEventObserverAPI* eventObserver =
+        new AudioEventObserverAPI(audioDevice);
+
+    TEST(audioDevice->RegisterEventObserver(NULL) == 0);
+    TEST(audioDevice->RegisterEventObserver(eventObserver) == 0);
+    TEST(audioDevice->RegisterEventObserver(NULL) == 0);
+
+    // =====================
+    // RegisterAudioCallback
+    // =====================
+
+    AudioTransportAPI* audioTransport = new AudioTransportAPI(audioDevice);
+
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+    TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    // ====
+    // Init
+    // ====
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    // =========
+    // Terminate
+    // =========
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    // ------------------------------------------------------------------------
+    // Ensure that we keep audio device initialized for all API tests:
+    //
+    TEST(audioDevice->Init() == 0);
+    // ------------------------------------------------------------------------
+
+    // goto SHORTCUT;
+
+    WebRtc_Word16 nDevices(0);
+
+    // ==============
+    // PlayoutDevices
+    // ==============
+
+    TEST((nDevices = audioDevice->PlayoutDevices()) > 0);
+    TEST((nDevices = audioDevice->PlayoutDevices()) > 0);
+
+    // ================
+    // RecordingDevices
+    // ================
+
+    TEST((nDevices = audioDevice->RecordingDevices()) > 0);
+    TEST((nDevices = audioDevice->RecordingDevices()) > 0);
+
+    // =================
+    // PlayoutDeviceName
+    // =================
+
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize];
+    WebRtc_Word8 guid[kAdmMaxGuidSize];
+
+    nDevices = audioDevice->PlayoutDevices();
+
+    // fail tests
+    TEST(audioDevice->PlayoutDeviceName(-2, name, guid) == -1);
+    TEST(audioDevice->PlayoutDeviceName(nDevices, name, guid) == -1);
+    TEST(audioDevice->PlayoutDeviceName(0, NULL, guid) == -1);
+
+    // bulk tests
+    TEST(audioDevice->PlayoutDeviceName(0, name, NULL) == 0);
+#ifdef _WIN32
+    TEST(audioDevice->PlayoutDeviceName(-1, name, NULL) == 0); // shall be mapped to 0
+#else
+    TEST(audioDevice->PlayoutDeviceName(-1, name, NULL) == -1);
+#endif
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->PlayoutDeviceName(i, name, guid) == 0);
+        TEST(audioDevice->PlayoutDeviceName(i, name, NULL) == 0);
+    }
+
+    // ===================
+    // RecordingDeviceName
+    // ===================
+
+    nDevices = audioDevice->RecordingDevices();
+
+    // fail tests
+    TEST(audioDevice->RecordingDeviceName(-2, name, guid) == -1);
+    TEST(audioDevice->RecordingDeviceName(nDevices, name, guid) == -1);
+    TEST(audioDevice->RecordingDeviceName(0, NULL, guid) == -1);
+
+    // bulk tests
+    TEST(audioDevice->RecordingDeviceName(0, name, NULL) == 0);
+#ifdef _WIN32
+    TEST(audioDevice->RecordingDeviceName(-1, name, NULL) == 0); // shall me mapped to 0
+#else
+    TEST(audioDevice->RecordingDeviceName(-1, name, NULL) == -1);
+#endif
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->RecordingDeviceName(i, name, guid) == 0);
+        TEST(audioDevice->RecordingDeviceName(i, name, NULL) == 0);
+    }
+
+    // ================
+    // SetPlayoutDevice
+    // ================
+
+    nDevices = audioDevice->PlayoutDevices();
+
+    // fail tests
+    TEST(audioDevice->SetPlayoutDevice(-1) == -1);
+    TEST(audioDevice->SetPlayoutDevice(nDevices) == -1);
+
+    // bulk tests
+#ifdef _WIN32
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == 0);
+#else
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == -1);
+#endif
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+    }
+
+    // ==================
+    // SetRecordingDevice
+    // ==================
+
+    nDevices = audioDevice->RecordingDevices();
+
+    // fail tests
+    TEST(audioDevice->SetRecordingDevice(-1) == -1);
+    TEST(audioDevice->SetRecordingDevice(nDevices) == -1);
+
+    // bulk tests
+#ifdef _WIN32
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice) == 0);
+#else
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultDevice) == -1);
+#endif
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+    }
+
+    // ==================
+    // PlayoutIsAvailable
+    // ==================
+
+    bool available(false);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+#ifdef _WIN32
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == false); // availability check should not initialize
+
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+#endif
+
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+        TEST(audioDevice->PlayoutIsInitialized() == false);
+    }
+
+    // ====================
+    // RecordingIsAvailable
+    // ====================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+#ifdef _WIN32
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+#endif
+
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+        TEST(audioDevice->RecordingIsInitialized() == false);
+    }
+
+    // ===========
+    // InitPlayout
+    // ===========
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial state
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+
+    // ensure that device must be set before we can initialize
+    TEST(audioDevice->InitPlayout() == -1);
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == true);
+
+    // bulk tests
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutIsInitialized() == true);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == -1);
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->PlayoutIsInitialized() == false);
+    }
+
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        // Sleep is needed for e.g. iPhone since we after stopping then starting may
+        // have a hangover time of a couple of ms before initialized.
+        AudioDeviceUtility::Sleep(50);
+        TEST(audioDevice->PlayoutIsInitialized() == true);
+    }
+
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->StopPlayout() == 0);
+            TEST(audioDevice->PlayoutIsInitialized() == false);
+            TEST(audioDevice->SetPlayoutDevice(i) == 0);
+            TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+            if (available)
+            {
+                TEST(audioDevice->InitPlayout() == 0);
+                TEST(audioDevice->PlayoutIsInitialized() == true);
+            }
+        }
+    }
+
+    TEST(audioDevice->StopPlayout() == 0);
+
+    // =============
+    // InitRecording
+    // =============
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial state
+    TEST(audioDevice->RecordingIsInitialized() == false);
+
+    // ensure that device must be set before we can initialize
+    TEST(audioDevice->InitRecording() == -1);
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->RecordingIsInitialized() == true);
+
+    // bulk tests
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->RecordingIsInitialized() == true);
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == -1);
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->RecordingIsInitialized() == false);
+    }
+
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        AudioDeviceUtility::Sleep(50);
+        TEST(audioDevice->RecordingIsInitialized() == true);
+    }
+
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->StopRecording() == 0);
+            TEST(audioDevice->RecordingIsInitialized() == false);
+            TEST(audioDevice->SetRecordingDevice(i) == 0);
+            TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+            if (available)
+            {
+                TEST(audioDevice->InitRecording() == 0);
+                TEST(audioDevice->RecordingIsInitialized() == true);
+            }
+        }
+    }
+
+    TEST(audioDevice->StopRecording() == 0);
+
+    // ============
+    // StartPlayout
+    // StopPlayout
+    // ============
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    TEST(audioDevice->StartPlayout() == -1);
+    TEST(audioDevice->StopPlayout() == 0);
+
+#ifdef _WIN32
+    // kDefaultCommunicationDevice
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->PlayoutIsInitialized() == false);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+        TEST(audioDevice->Playing() == true);
+        TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->Playing() == false);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+    }
+#endif
+
+    // repeat test but for kDefaultDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->PlayoutIsInitialized() == false);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+        TEST(audioDevice->Playing() == true);
+        TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->Playing() == false);
+    }
+
+    // repeat test for all devices
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->PlayoutIsInitialized() == false);
+            TEST(audioDevice->InitPlayout() == 0);
+            TEST(audioDevice->StartPlayout() == 0);
+            TEST(audioDevice->Playing() == true);
+            TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+            TEST(audioDevice->StopPlayout() == 0);
+            TEST(audioDevice->Playing() == false);
+        }
+    }
+
+    // ==============
+    // StartRecording
+    // StopRecording
+    // ==============
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+
+    TEST(audioDevice->StartRecording() == -1);
+    TEST(audioDevice->StopRecording() == 0);
+
+#ifdef _WIN32
+    // kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->RecordingIsInitialized() == false);
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StartRecording() == 0);
+        TEST(audioDevice->Recording() == true);
+        TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->Recording() == false);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+    }
+#endif
+
+    // repeat test but for kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->RecordingIsInitialized() == false);
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StartRecording() == 0);
+        TEST(audioDevice->Recording() == true);
+        TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->Recording() == false);
+    }
+
+    // repeat test for all devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->RecordingIsInitialized() == false);
+            TEST(audioDevice->InitRecording() == 0);
+            TEST(audioDevice->StartRecording() == 0);
+            TEST(audioDevice->Recording() == true);
+            TEST(audioDevice->RegisterAudioCallback(audioTransport) == 0);
+            TEST(audioDevice->StopRecording() == 0);
+            TEST(audioDevice->Recording() == false);
+        }
+    }
+
+    WebRtc_UWord32 vol(0);
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+
+    // ================
+    // SetWaveOutVolume
+    // GetWaveOutVolume
+    // ================
+
+    // NOTE 1: Windows Wave only!
+    // NOTE 2: It seems like the waveOutSetVolume API returns
+    // MMSYSERR_NOTSUPPORTED on some Vista machines!
+
+    const WebRtc_UWord16 maxVol(0xFFFF);
+    WebRtc_UWord16 volL, volR;
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // make dummy test to see if this API is supported
+    WebRtc_Word32 works = audioDevice->SetWaveOutVolume(vol, vol);
+    WARNING(works == 0);
+
+    if (works == 0)
+    {
+        // set volume without open playout device
+        for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+        {
+            TEST(audioDevice->SetWaveOutVolume(vol, vol) == 0);
+            TEST(audioDevice->WaveOutVolume(volL, volR) == 0);
+            TEST((volL==vol) && (volR==vol));
+        }
+
+        // repeat test but this time with an open (default) output device
+        TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == 0);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutIsInitialized() == true);
+        for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+        {
+            TEST(audioDevice->SetWaveOutVolume(vol, vol) == 0);
+            TEST(audioDevice->WaveOutVolume(volL, volR) == 0);
+            TEST((volL==vol) && (volR==vol));
+        }
+
+        // as above but while playout is active
+        TEST(audioDevice->StartPlayout() == 0);
+        TEST(audioDevice->Playing() == true);
+        for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+        {
+            TEST(audioDevice->SetWaveOutVolume(vol, vol) == 0);
+            TEST(audioDevice->WaveOutVolume(volL, volR) == 0);
+            TEST((volL==vol) && (volR==vol));
+        }
+    }
+
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->Playing() == false);
+
+#endif  // defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    // ==================
+    // SpeakerIsAvailable
+    // ==================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+        TEST(audioDevice->SpeakerIsInitialized() == false);
+    }
+
+    // ===========
+    // InitSpeaker
+    // ===========
+
+    // NOTE: we call Terminate followed by Init to ensure that any existing output mixer
+    // handle is set to NULL. The mixer handle is closed and reopend again for each call to
+    // SetPlayoutDevice.
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // kDefaultCommunicationDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+    }
+
+    // fail tests
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+        TEST(audioDevice->InitSpeaker() == -1);
+        TEST(audioDevice->StopPlayout() == 0);
+    }
+
+    // kDefaultDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+    }
+
+    // repeat test for all devices
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->InitSpeaker() == 0);
+        }
+    }
+
+    // =====================
+    // MicrophoneIsAvailable
+    // =====================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+        TEST(audioDevice->MicrophoneIsInitialized() == false);
+    }
+
+    // ==============
+    // InitMicrophone
+    // ==============
+
+    // NOTE: we call Terminate followed by Init to ensure that any existing input mixer
+    // handle is set to NULL. The mixer handle is closed and reopend again for each call to
+    // SetRecordingDevice.
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+    }
+
+    // fail tests
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StartRecording() == 0);
+        TEST(audioDevice->InitMicrophone() == -1);
+        TEST(audioDevice->StopRecording() == 0);
+    }
+
+    // kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+    }
+
+    // repeat test for all devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->InitMicrophone() == 0);
+        }
+    }
+
+    // ========================
+    // SpeakerVolumeIsAvailable
+    // ========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+        TEST(audioDevice->SpeakerIsInitialized() == false);
+    }
+
+    // ================
+    // SetSpeakerVolume
+    // SpeakerVolume
+    // MaxSpeakerVolume
+    // MinSpeakerVolume
+    // ================
+
+    WebRtc_UWord32 volume(0);
+    WebRtc_UWord32 maxVolume(0);
+    WebRtc_UWord32 minVolume(0);
+    WebRtc_UWord16 stepSize(0);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // fail tests
+    TEST(audioDevice->SetSpeakerVolume(0) == -1); // speaker must be initialized first
+    TEST(audioDevice->SpeakerVolume(&volume) == -1);
+    TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == -1);
+    TEST(audioDevice->MinSpeakerVolume(&minVolume) == -1);
+    TEST(audioDevice->SpeakerVolumeStepSize(&stepSize) == -1);
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    // test for warning (can e.g. happen on Vista with Wave API)
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->SetSpeakerVolume(19001) == 0);
+        TEST(audioDevice->SpeakerVolume(&volume) == 0);
+        WARNING(volume == 19001);
+    }
+#endif
+
+#ifdef _WIN32
+    // use kDefaultCommunicationDevice and modify/retrieve the volume
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+        TEST(audioDevice->MinSpeakerVolume(&minVolume) == 0);
+        TEST(audioDevice->SpeakerVolumeStepSize(&stepSize) == 0);
+        for (vol = minVolume; vol < (int)maxVolume; vol += 20*stepSize)
+        {
+            TEST(audioDevice->SetSpeakerVolume(vol) == 0);
+            TEST(audioDevice->SpeakerVolume(&volume) == 0);
+            TEST((volume == vol) || (volume == vol-1));
+        }
+    }
+#endif
+
+    // use kDefaultDevice and modify/retrieve the volume
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+        TEST(audioDevice->MinSpeakerVolume(&minVolume) == 0);
+        TEST(audioDevice->SpeakerVolumeStepSize(&stepSize) == 0);
+        WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+        step = (step < stepSize ? stepSize : step);
+        for (vol = minVolume; vol <= maxVolume; vol += step)
+        {
+            TEST(audioDevice->SetSpeakerVolume(vol) == 0);
+            TEST(audioDevice->SpeakerVolume(&volume) == 0);
+            TEST((volume == vol) || (volume == vol-1));
+        }
+    }
+
+    // use all (indexed) devices and modify/retrieve the volume
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->InitSpeaker() == 0);
+            TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+            TEST(audioDevice->MinSpeakerVolume(&minVolume) == 0);
+            TEST(audioDevice->SpeakerVolumeStepSize(&stepSize) == 0);
+            WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+            step = (step < stepSize ? stepSize : step);
+            for (vol = minVolume; vol <= maxVolume; vol += step)
+            {
+                TEST(audioDevice->SetSpeakerVolume(vol) == 0);
+                TEST(audioDevice->SpeakerVolume(&volume) == 0);
+                TEST((volume == vol) || (volume == vol-1));
+            }
+        }
+    }
+
+    // restore reasonable level
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+        TEST(audioDevice->SetSpeakerVolume(maxVolume < 10 ?
+            maxVolume/3 : maxVolume/10) == 0);
+    }
+
+    // ======
+    // SetAGC
+    // AGC
+    // ======
+
+    // NOTE: The AGC API only enables/disables the AGC. To ensure that it will
+    // have an effect, use it in combination with MicrophoneVolumeIsAvailable.
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+    TEST(audioDevice->AGC() == false);
+
+    // set/get tests
+    TEST(audioDevice->SetAGC(true) == 0);
+    TEST(audioDevice->AGC() == true);
+    TEST(audioDevice->SetAGC(false) == 0);
+    TEST(audioDevice->AGC() == false);
+
+    // ===========================
+    // MicrophoneVolumeIsAvailable
+    // ===========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        TEST(audioDevice->MicrophoneIsInitialized() == false);
+    }
+
+    // ===================
+    // SetMicrophoneVolume
+    // MicrophoneVolume
+    // MaxMicrophoneVolume
+    // MinMicrophoneVolume
+    // ===================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // fail tests
+    TEST(audioDevice->SetMicrophoneVolume(0) == -1); // must be initialized first
+    TEST(audioDevice->MicrophoneVolume(&volume) == -1);
+    TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == -1);
+    TEST(audioDevice->MinMicrophoneVolume(&minVolume) == -1);
+    TEST(audioDevice->MicrophoneVolumeStepSize(&stepSize) == -1);
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    // test for warning (can e.g. happen on Vista with Wave API)
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneVolume(19001) == 0);
+        TEST(audioDevice->MicrophoneVolume(&volume) == 0);
+        WARNING(volume == 19001);
+    }
+#endif
+
+#ifdef _WIN32
+    // initialize kDefaultCommunicationDevice and modify/retrieve the volume
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+        TEST(audioDevice->MinMicrophoneVolume(&minVolume) == 0);
+        TEST(audioDevice->MicrophoneVolumeStepSize(&stepSize) == 0);
+        for (vol = minVolume; vol < (int)maxVolume; vol += 10*stepSize)
+        {
+            TEST(audioDevice->SetMicrophoneVolume(vol) == 0);
+            TEST(audioDevice->MicrophoneVolume(&volume) == 0);
+            TEST((volume == vol) || (volume == vol-1));
+        }
+    }
+#endif
+
+    // reinitialize kDefaultDevice and modify/retrieve the volume
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+        TEST(audioDevice->MinMicrophoneVolume(&minVolume) == 0);
+        TEST(audioDevice->MicrophoneVolumeStepSize(&stepSize) == 0);
+        for (vol = minVolume; vol < maxVolume; vol += 10 * stepSize)
+        {
+            TEST(audioDevice->SetMicrophoneVolume(vol) == 0);
+            TEST(audioDevice->MicrophoneVolume(&volume) == 0);
+            TEST((volume == vol) || (volume == vol-1));
+        }
+    }
+
+    // use all (indexed) devices and modify/retrieve the volume
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->InitMicrophone() == 0);
+            TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(audioDevice->MinMicrophoneVolume(&minVolume) == 0);
+            TEST(audioDevice->MicrophoneVolumeStepSize(&stepSize) == 0);
+            for (vol = minVolume; vol < maxVolume; vol += 20 * stepSize)
+            {
+                TEST(audioDevice->SetMicrophoneVolume(vol) == 0);
+                TEST(audioDevice->MicrophoneVolume(&volume) == 0);
+                TEST((volume == vol) || (volume == vol-1));
+            }
+        }
+    }
+
+    // restore reasonable level
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+        TEST(audioDevice->SetMicrophoneVolume(maxVolume/10) == 0);
+    }
+
+    // ======================
+    // SpeakerMuteIsAvailable
+    // ======================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->PlayoutDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+        TEST(audioDevice->SpeakerIsInitialized() == false);
+    }
+
+    // =========================
+    // MicrophoneMuteIsAvailable
+    // =========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    #endif
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+        TEST(audioDevice->MicrophoneIsInitialized() == false);
+    }
+
+    // ==========================
+    // MicrophoneBoostIsAvailable
+    // ==========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+#ifdef _WIN32
+    // check the kDefaultCommunicationDevice
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    // check for availability should not lead to initialization
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+#endif
+
+    // check the kDefaultDevice
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // check all availiable devices
+    nDevices = audioDevice->RecordingDevices();
+    for (i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+        TEST(audioDevice->MicrophoneIsInitialized() == false);
+    }
+
+    // ==============
+    // SetSpeakerMute
+    // SpeakerMute
+    // ==============
+
+    bool enabled(false);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->SpeakerIsInitialized() == false);
+
+    // fail tests
+    TEST(audioDevice->SetSpeakerMute(true) == -1); // requires initialization
+    TEST(audioDevice->SpeakerMute(&enabled) == -1);
+
+#ifdef _WIN32
+    // initialize kDefaultCommunicationDevice and modify/retrieve the mute state
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->SetSpeakerMute(true) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetSpeakerMute(false) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+#endif
+
+    // reinitialize kDefaultDevice and modify/retrieve the mute state
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->SetSpeakerMute(true) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetSpeakerMute(false) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // reinitialize the default device (0) and modify/retrieve the mute state
+    TEST(audioDevice->SetPlayoutDevice(0) == 0);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitSpeaker() == 0);
+        TEST(audioDevice->SetSpeakerMute(true) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetSpeakerMute(false) == 0);
+        TEST(audioDevice->SpeakerMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // ==================
+    // SetMicrophoneMute
+    // MicrophoneMute
+    // ==================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // fail tests
+    TEST(audioDevice->SetMicrophoneMute(true) == -1); // requires initialization
+    TEST(audioDevice->MicrophoneMute(&enabled) == -1);
+
+#ifdef _WIN32
+    // initialize kDefaultCommunicationDevice and modify/retrieve the mute
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneMute(true) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneMute(false) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+#endif
+
+    // reinitialize kDefaultDevice and modify/retrieve the mute
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneMute(true) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneMute(false) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // reinitialize the default device (0) and modify/retrieve the Mute
+    TEST(audioDevice->SetRecordingDevice(0) == 0);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneMute(true) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneMute(false) == 0);
+        TEST(audioDevice->MicrophoneMute(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // ==================
+    // SetMicrophoneBoost
+    // MicrophoneBoost
+    // ==================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+    TEST(audioDevice->MicrophoneIsInitialized() == false);
+
+    // fail tests
+    TEST(audioDevice->SetMicrophoneBoost(true) == -1); // requires initialization
+    TEST(audioDevice->MicrophoneBoost(&enabled) == -1);
+
+#ifdef _WIN32
+    // initialize kDefaultCommunicationDevice and modify/retrieve the boost
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneBoost(true) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneBoost(false) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == false);
+    }
+#endif
+
+    // reinitialize kDefaultDevice and modify/retrieve the boost
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneBoost(true) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneBoost(false) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // reinitialize the default device (0) and modify/retrieve the boost
+    TEST(audioDevice->SetRecordingDevice(0) == 0);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitMicrophone() == 0);
+        TEST(audioDevice->SetMicrophoneBoost(true) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetMicrophoneBoost(false) == 0);
+        TEST(audioDevice->MicrophoneBoost(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // ================
+    // SetStereoPlayout
+    // StereoPlayout
+    // ================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // fail tests
+    TEST(audioDevice->InitPlayout() == -1);
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == true);
+    // must be performed before initialization
+    TEST(audioDevice->SetStereoPlayout(true) == -1);
+
+    // ensure that we can set the stereo mode for playout
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+
+    // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoPlayout(false) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == false);
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+    }
+
+    // initialize kDefaultDevice and modify/retrieve stereo support
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoPlayout(false) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == false);
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+    }
+
+    // initialize default device (0) and modify/retrieve stereo support
+    TEST(audioDevice->SetPlayoutDevice(0) == 0);
+    TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoPlayout(false) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == false);
+        TEST(audioDevice->SetStereoPlayout(true) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        TEST(enabled == true);
+    }
+
+    // ==================
+    // SetStereoRecording
+    // StereoRecording
+    // ==================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // fail tests
+    TEST(audioDevice->InitRecording() == -1);
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->RecordingIsInitialized() == true);
+    // must be performed before initialization
+    TEST(audioDevice->SetStereoRecording(true) == -1);
+
+    // ensures that we can set the stereo mode for recording
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+
+    // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoRecording(true) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoRecording(false) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // initialize kDefaultDevice and modify/retrieve stereo support
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoRecording(true) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoRecording(false) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // initialize default device (0) and modify/retrieve stereo support
+    TEST(audioDevice->SetRecordingDevice(0) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoRecording(true) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == true);
+        TEST(audioDevice->SetStereoRecording(false) == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        TEST(enabled == false);
+    }
+
+    // ===================
+    // SetRecordingChannel
+    // RecordingChannel
+    // ==================
+
+    // the user in Win Core Audio
+
+    AudioDeviceModule::ChannelType channelType(AudioDeviceModule::kChannelBoth);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // fail tests
+    TEST(audioDevice->SetStereoRecording(false) == 0);
+    TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelBoth) == -1);
+
+    // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetStereoRecording(true) == 0);
+        TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelBoth) == 0);
+        TEST(audioDevice->RecordingChannel(&channelType) == 0);
+        TEST(channelType == AudioDeviceModule::kChannelBoth);
+        TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft) == 0);
+        TEST(audioDevice->RecordingChannel(&channelType) == 0);
+        TEST(channelType == AudioDeviceModule::kChannelLeft);
+        TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelRight) == 0);
+        TEST(audioDevice->RecordingChannel(&channelType) == 0);
+        TEST(channelType == AudioDeviceModule::kChannelRight);
+    }
+
+    // ================
+    // SetPlayoutBuffer
+    // PlayoutBuffer
+    // ================
+
+    AudioDeviceModule::BufferType bufferType;
+    WebRtc_UWord16 sizeMS(0);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+#if defined(_WIN32) || defined(ANDROID) || defined(MAC_IPHONE)
+    TEST(bufferType == AudioDeviceModule::kAdaptiveBufferSize);
+#else
+    TEST(bufferType == AudioDeviceModule::kFixedBufferSize);
+#endif
+
+    // fail tests
+    TEST(audioDevice->InitPlayout() == -1); // must set device first
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->PlayoutIsInitialized() == true);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kAdaptiveBufferSize,
+                                       100) == -1);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       kAdmMinPlayoutBufferSizeMs-1) == -1);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       kAdmMaxPlayoutBufferSizeMs+1) == -1);
+
+    // bulk tests (all should be successful)
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+#ifdef _WIN32
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kAdaptiveBufferSize,
+                                       0) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+    TEST(bufferType == AudioDeviceModule::kAdaptiveBufferSize);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kAdaptiveBufferSize,
+                                       10000) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+    TEST(bufferType == AudioDeviceModule::kAdaptiveBufferSize);
+#endif
+#if defined(ANDROID) || defined(MAC_IPHONE)
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       kAdmMinPlayoutBufferSizeMs) == -1);
+#else
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       kAdmMinPlayoutBufferSizeMs) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+    TEST(bufferType == AudioDeviceModule::kFixedBufferSize);
+    TEST(sizeMS == kAdmMinPlayoutBufferSizeMs);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       kAdmMaxPlayoutBufferSizeMs) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+    TEST(bufferType == AudioDeviceModule::kFixedBufferSize);
+    TEST(sizeMS == kAdmMaxPlayoutBufferSizeMs);
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                       100) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+    TEST(bufferType == AudioDeviceModule::kFixedBufferSize);
+    TEST(sizeMS == 100);
+#endif
+
+#ifdef _WIN32
+    // restore default
+    TEST(audioDevice->SetPlayoutBuffer(AudioDeviceModule::kAdaptiveBufferSize,
+                                       0) == 0);
+    TEST(audioDevice->PlayoutBuffer(&bufferType, &sizeMS) == 0);
+#endif
+
+    // ============
+    // PlayoutDelay
+    // ============
+
+    // NOTE: this API is better tested in a functional test
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // bulk tests
+    TEST(audioDevice->PlayoutDelay(&sizeMS) == 0);
+    TEST(audioDevice->PlayoutDelay(&sizeMS) == 0);
+
+    // ==============
+    // RecordingDelay
+    // ==============
+
+    // NOTE: this API is better tested in a functional test
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+
+    // bulk tests
+    TEST(audioDevice->RecordingDelay(&sizeMS) == 0);
+    TEST(audioDevice->RecordingDelay(&sizeMS) == 0);
+
+    // =======
+    // CPULoad
+    // =======
+
+    // NOTE: this API is better tested in a functional test
+
+    WebRtc_UWord16 load(0);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+
+    // bulk tests
+#ifdef _WIN32
+    TEST(audioDevice->CPULoad(&load) == 0);
+    TEST(load == 0);
+#else
+    TEST(audioDevice->CPULoad(&load) == -1);
+#endif
+
+    // ===========================
+    // StartRawOutputFileRecording
+    // StopRawOutputFileRecording
+    // ===========================
+
+    // NOTE: this API is better tested in a functional test
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // fail tests
+    TEST(audioDevice->StartRawOutputFileRecording(NULL) == -1);
+
+    // bulk tests
+    TEST(audioDevice->StartRawOutputFileRecording(
+        GetFilename("raw_output_not_playing.pcm")) == 0);
+    TEST(audioDevice->StopRawOutputFileRecording() == 0);
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE) == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+    TEST(audioDevice->StartRawOutputFileRecording(
+        GetFilename("raw_output_playing.pcm")) == 0);
+    AudioDeviceUtility::Sleep(100);
+    TEST(audioDevice->StopRawOutputFileRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->StartRawOutputFileRecording(
+        GetFilename("raw_output_not_playing.pcm")) == 0);
+    TEST(audioDevice->StopRawOutputFileRecording() == 0);
+
+    // results after this test:
+    //
+    // - size of raw_output_not_playing.pcm shall be 0
+    // - size of raw_output_playing.pcm shall be > 0
+
+    // ==========================
+    // StartRawInputFileRecording
+    // StopRawInputFileRecording
+    // ==========================
+
+    // NOTE: this API is better tested in a functional test
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    // fail tests
+    TEST(audioDevice->StartRawInputFileRecording(NULL) == -1);
+
+    // bulk tests
+    TEST(audioDevice->StartRawInputFileRecording(
+        GetFilename("raw_input_not_recording.pcm")) == 0);
+    TEST(audioDevice->StopRawInputFileRecording() == 0);
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->StartRecording() == 0);
+    TEST(audioDevice->StartRawInputFileRecording(
+        GetFilename("raw_input_recording.pcm")) == 0);
+    AudioDeviceUtility::Sleep(100);
+    TEST(audioDevice->StopRawInputFileRecording() == 0);
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StartRawInputFileRecording(
+        GetFilename("raw_input_not_recording.pcm")) == 0);
+    TEST(audioDevice->StopRawInputFileRecording() == 0);
+
+    // results after this test:
+    //
+    // - size of raw_input_not_recording.pcm shall be 0
+    // - size of raw_input_not_recording.pcm shall be > 0
+
+    // ===================
+    // RecordingSampleRate
+    // ===================
+
+    WebRtc_UWord32 sampleRate(0);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+
+    // bulk tests
+    TEST(audioDevice->RecordingSampleRate(&sampleRate) == 0);
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    TEST(sampleRate == 48000);
+#elif defined(ANDROID)
+    TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
+    TEST((sampleRate == 44000) || (sampleRate == 16000));
+#elif defined(MAC_IPHONE)
+    TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
+    TEST((sampleRate == 44000) || (sampleRate == 16000) || (sampleRate == 8000));
+#endif
+
+    // @TODO(xians) - add tests for all platforms here...
+
+    // =================
+    // PlayoutSampleRate
+    // =================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+
+    // bulk tests
+    TEST(audioDevice->PlayoutSampleRate(&sampleRate) == 0);
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    TEST(sampleRate == 48000);
+#elif defined(ANDROID)
+    TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
+    TEST((sampleRate == 44000) || (sampleRate == 16000));
+#elif defined(MAC_IPHONE)
+    TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
+    TEST((sampleRate == 44000) || (sampleRate == 16000) || (sampleRate == 8000));
+#endif
+
+    // ==========================
+    // ResetAudioDevice
+    // ==========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+    TEST(audioDevice->RecordingIsInitialized() == false);
+    TEST(audioDevice->Recording() == false);
+
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+    TEST(audioDevice->SetRecordingDevice(MACRO_DEFAULT_DEVICE) == 0);
+
+#if defined(MAC_IPHONE) 
+    // Not playing or recording, should just return 0
+    TEST(audioDevice->ResetAudioDevice() == 0);
+
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->StartRecording() == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+    for (int l=0; l<20; ++l)
+    {
+        TEST_LOG("Resetting sound device several time with pause %d ms\n", l);
+        TEST(audioDevice->ResetAudioDevice() == 0);
+        AudioDeviceUtility::Sleep(l);
+    }
+#else
+    // Fail tests
+    TEST(audioDevice->ResetAudioDevice() == -1);
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->StartRecording() == 0);
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+    TEST(audioDevice->ResetAudioDevice() == -1);
+#endif
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+
+    // ==========================
+    // SetPlayoutSpeaker
+    // ==========================
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Init() == 0);
+
+    // check initial states
+    TEST(audioDevice->Initialized() == true);
+    TEST(audioDevice->PlayoutIsInitialized() == false);
+    TEST(audioDevice->Playing() == false);
+
+    TEST(audioDevice->SetPlayoutDevice(MACRO_DEFAULT_DEVICE) == 0);
+
+    bool loudspeakerOn(false);
+#if defined(MAC_IPHONE)
+    // Not playing or recording, should just return a success
+    TEST(audioDevice->SetLoudspeakerStatus(true) == 0);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == true);
+    TEST(audioDevice->SetLoudspeakerStatus(false) == 0);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == false);
+
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+    TEST(audioDevice->SetLoudspeakerStatus(true) == 0);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == true);
+    TEST(audioDevice->SetLoudspeakerStatus(false) == 0);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == false);
+
+#else
+    // Fail tests
+    TEST(audioDevice->SetLoudspeakerStatus(true) == -1);
+    TEST(audioDevice->SetLoudspeakerStatus(false) == -1);
+    TEST(audioDevice->SetLoudspeakerStatus(true) == -1);
+    TEST(audioDevice->SetLoudspeakerStatus(false) == -1);
+
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+    TEST(audioDevice->GetLoudspeakerStatus(&loudspeakerOn) == -1);
+#endif
+    TEST(audioDevice->StopPlayout() == 0);
+
+#ifdef _WIN32
+    Exit:
+#endif
+
+    // ------------------------------------------------------------------------
+    // Terminate the module when all tests are done:
+    //
+    TEST(audioDevice->Terminate() == 0);
+    // ------------------------------------------------------------------------
+
+    // ===================================================
+    // AudioDeviceModuleImpl::Destroy
+    // ===================================================
+
+
+    // release the ProcessThread object
+    if (processThread)
+    {
+        processThread->DeRegisterModule(audioDevice);
+        processThread->Stop();
+        ProcessThread::DestroyProcessThread(processThread);
+    }
+
+    // delete the event observer
+    if (eventObserver)
+    {
+        delete eventObserver;
+        eventObserver = NULL;
+    }
+
+    // delete the audio transport
+    if (audioTransport)
+    {
+        delete audioTransport;
+        audioTransport = NULL;
+    }
+
+    // release the AudioDeviceModule object
+    if (audioDevice) {
+       TEST(audioDevice->Release() == 0);
+    }
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_device/main/test/audio_device_test_defines.h b/trunk/src/modules/audio_device/main/test/audio_device_test_defines.h
new file mode 100644
index 0000000..5c5f4e2
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/audio_device_test_defines.h
@@ -0,0 +1,116 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+
+#include "common_types.h"
+#include "audio_device.h"
+#include "process_thread.h"
+#include "trace.h"
+
+#ifdef _WIN32
+#define MACRO_DEFAULT_DEVICE AudioDeviceModule::kDefaultDevice
+#define MACRO_DEFAULT_COMMUNICATION_DEVICE AudioDeviceModule::kDefaultCommunicationDevice
+#else
+#define MACRO_DEFAULT_DEVICE 0
+#define MACRO_DEFAULT_COMMUNICATION_DEVICE 0
+#endif
+
+#ifdef ANDROID
+#include <android/log.h>
+#define LOG_TAG "WebRtc ADM TEST"
+#define TEST_LOG(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
+#define TEST_LOG_ERROR(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
+#else
+#define TEST_LOG printf
+#define TEST_LOG_ERROR(...) fprintf(stderr, __VA_ARGS__)
+#endif
+
+static int testCount = 0;
+static int errorCount = 0;
+static int warningCount = 0;
+
+#define RESET_TEST                                              \
+    do {                                                        \
+        testCount = 0;                                          \
+        errorCount = 0;                                         \
+        warningCount = 0;                                       \
+    } while(0)                                                  \
+
+#define PRINT_ERR_MSG(msg)                                      \
+    do {                                                        \
+        TEST_LOG_ERROR("Error at line %i of %s\n%s",            \
+            __LINE__, __FILE__, msg);                           \
+    } while(0)
+
+#define WARNING(expr)                                           \
+    do {                                                        \
+        testCount++;                                            \
+        if (!(expr)) {                                          \
+		TEST_LOG_ERROR("WARNING #%d: at line %i\n\n",			\
+				warningCount+1, __LINE__);						\
+            warningCount++;                                     \
+        }                                                       \
+    } while(0)
+
+
+#define TEST(expr)                                              \
+    do {                                                        \
+        testCount++;                                            \
+        if (!(expr)) {                                          \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\n\n");   \
+            errorCount++;                                       \
+        }                                                       \
+    } while(0)
+
+#define TEST_ERR(expr, err)                                     \
+    do {                                                        \
+        testCount++;                                            \
+        if (!(expr)) {                                          \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\n\n");   \
+            errorCount++;                                       \
+        }                                                       \
+		if (audioDevice->LastError() != err) {					\
+			PRINT_ERR_MSG("Assertion failed: " #err "\n\n");    \
+            errorCount++;                                       \
+        }                                                       \
+    } while(0)
+
+
+#define PRINT_TEST_RESULTS                                      \
+    do {                                                        \
+        TEST_LOG("\n>> %i tests completed <<\n", testCount);    \
+        if (errorCount > 0) {                                   \
+            TEST_LOG(">> %i FAILED! <<\n\n", errorCount);       \
+        }                                                       \
+        else if (warningCount > 0)                              \
+		{														\
+			TEST_LOG(">> ALL PASSED (with %d warnings) <<\n\n",	\
+				warningCount);									\
+		}														\
+		else													\
+		{														\
+            TEST_LOG(">> ALL PASSED <<\n\n");                   \
+        }                                                       \
+    } while(0)
+
+// Helper functions
+// For iPhone, they are defined in iPhone specific test code.
+// For Android, they are defined in API test only (since both
+//   API and Func tests are built into the same lib).
+// For other, they are defined in both API test and Func test.
+char* GetFilename(char* filename);
+const char* GetFilename(const char* filename);
+char* GetResource(char* resource);
+const char* GetResource(const char* resource);
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+
diff --git a/trunk/src/modules/audio_device/main/test/audio_device_test_func.cc b/trunk/src/modules/audio_device/main/test/audio_device_test_func.cc
new file mode 100644
index 0000000..87a70aa
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/audio_device_test_func.cc
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include "audio_device_test_defines.h"
+#include "func_test_manager.h"
+
+#ifndef __GNUC__
+// Disable warning message 4996 ('scanf': This function or variable may be unsafe)
+#pragma warning( disable : 4996 )
+#endif
+
+using namespace webrtc;
+
+int func_test(int);
+
+// ----------------------------------------------------------------------------
+//  main()
+// ----------------------------------------------------------------------------
+
+#if !defined(MAC_IPHONE) && !defined(ANDROID)
+int main(int /*argc*/, char* /*argv*/[])
+{
+    func_test(0);
+}
+#endif
+
+// ----------------------------------------------------------------------------
+//  func_test()
+// ----------------------------------------------------------------------------
+
+int func_test(int sel)
+{
+    TEST_LOG("=========================================\n");
+    TEST_LOG("Func Test of the WebRtcAudioDevice Module\n");
+    TEST_LOG("=========================================\n\n");
+
+    // Initialize the counters here to get rid of "unused variables" warnings.
+    testCount = 0;
+    errorCount = 0;
+    warningCount = 0;
+
+    FuncTestManager funcMgr;
+
+    funcMgr.Init();
+
+    bool quit(false);
+
+    while (!quit)
+    {
+        TEST_LOG("---------------------------------------\n");
+        TEST_LOG("Select type of test\n\n");
+        TEST_LOG("  (0) Quit\n");
+        TEST_LOG("  (1) All\n");
+        TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+        TEST_LOG("  (2) Audio-layer selection\n");
+        TEST_LOG("  (3) Device enumeration\n");
+        TEST_LOG("  (4) Device selection\n");
+        TEST_LOG("  (5) Audio transport\n");
+        TEST_LOG("  (6) Speaker volume\n");
+        TEST_LOG("  (7) Microphone volume\n");
+        TEST_LOG("  (8) Speaker mute\n");
+        TEST_LOG("  (9) Microphone mute\n");
+        TEST_LOG(" (10) Microphone boost\n");
+        TEST_LOG(" (11) Microphone AGC\n");
+        TEST_LOG(" (12) Loopback measurements\n");
+        TEST_LOG(" (13) Device removal\n");
+        TEST_LOG(" (14) Advanced mobile device API\n");
+        TEST_LOG(" (66) XTEST\n");
+        TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+        TEST_LOG("\n: ");
+
+        int selection(0);
+        enum TestType testType(TTInvalid);
+
+SHOW_MENU:
+
+        if (sel > 0)
+        {
+            selection = sel;
+        }
+        else
+        {
+            if (scanf("%d", &selection) < 0) {
+              perror("Failed to get selection.");
+            }
+        }
+
+        switch (selection)
+        {
+            case 0:
+                quit = true;
+                break;
+            case 1:
+                testType = TTAll;
+                break;
+            case 2:
+                testType = TTAudioLayerSelection;
+                break;
+            case 3:
+                testType = TTDeviceEnumeration;
+                break;
+            case 4:
+                testType = TTDeviceSelection;
+                break;
+            case 5:
+                testType = TTAudioTransport;
+                break;
+            case 6:
+                testType = TTSpeakerVolume;
+                break;
+            case 7:
+                testType = TTMicrophoneVolume;
+                break;
+            case 8:
+                testType = TTSpeakerMute;
+                break;
+            case 9:
+                testType = TTMicrophoneMute;
+                break;
+            case 10:
+                testType = TTMicrophoneBoost;
+                break;
+            case 11:
+                testType = TTMicrophoneAGC;
+                break;
+            case 12:
+                testType = TTLoopback;
+                break;
+            case 13:
+                testType = TTDeviceRemoval;
+                break;
+            case 14:
+                testType = TTMobileAPI;
+                break;
+            case 66:
+                testType = TTTest;
+                break;
+            default:
+                testType = TTInvalid;
+                TEST_LOG(": ");
+                goto SHOW_MENU;
+                break;
+           }
+
+        funcMgr.DoTest(testType);
+
+        if (sel > 0)
+        {
+            quit = true;
+        }
+    }
+
+    funcMgr.Close();
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_device/main/test/func_test_manager.cc b/trunk/src/modules/audio_device/main/test/func_test_manager.cc
new file mode 100644
index 0000000..8bc7646
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/func_test_manager.cc
@@ -0,0 +1,2745 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <ctype.h>
+#include <cassert>
+#include <string.h>
+
+#include "func_test_manager.h"
+#include "testsupport/fileutils.h"
+
+#include "../source/audio_device_config.h"
+#include "../source/audio_device_impl.h"
+
+#ifndef __GNUC__
+// Disable warning message ('sprintf': name was marked as #pragma deprecated)
+#pragma warning( disable : 4995 )
+// Disable warning message 4996 ('scanf': This function or variable may be unsafe)
+#pragma warning( disable : 4996 )
+#endif
+
+const char* RecordedMicrophoneFile = "recorded_microphone_mono_48.pcm";
+const char* RecordedMicrophoneVolumeFile =
+"recorded_microphone_volume_mono_48.pcm";
+const char* RecordedMicrophoneMuteFile = "recorded_microphone_mute_mono_48.pcm";
+const char* RecordedMicrophoneBoostFile =
+"recorded_microphone_boost_mono_48.pcm";
+const char* RecordedMicrophoneAGCFile = "recorded_microphone_AGC_mono_48.pcm";
+const char* RecordedSpeakerFile = "recorded_speaker_48.pcm";
+
+struct AudioPacket
+{
+    WebRtc_UWord8 dataBuffer[4 * 960];
+    WebRtc_UWord16 nSamples;
+    WebRtc_UWord16 nBytesPerSample;
+    WebRtc_UWord8 nChannels;
+    WebRtc_UWord32 samplesPerSec;
+};
+
+// Helper functions
+#if !defined(MAC_IPHONE) && !defined(ANDROID)
+char* GetFilename(char* filename)
+{
+    return filename;
+}
+const char* GetFilename(const char* filename)
+{
+    return filename;
+}
+char* GetResource(char* resource)
+{
+    return resource;
+}
+const char* GetResource(const char* resource)
+{
+    return resource;
+}
+#endif
+
+namespace webrtc
+{
+
+AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice) :
+    _audioDevice(audioDevice)
+{
+}
+;
+
+AudioEventObserver::~AudioEventObserver()
+{
+}
+;
+
+void AudioEventObserver::OnErrorIsReported(const ErrorCode error)
+{
+    TEST_LOG("\n[*** ERROR ***] => OnErrorIsReported(%d)\n \n", error);
+    _error = error;
+    // TEST(_audioDevice->StopRecording() == 0);
+    // TEST(_audioDevice->StopPlayout() == 0);
+}
+;
+
+
+void AudioEventObserver::OnWarningIsReported(const WarningCode warning)
+{
+    TEST_LOG("\n[*** WARNING ***] => OnWarningIsReported(%d)\n \n", warning);
+    _warning = warning;
+    //TEST(_audioDevice->StopRecording() == 0);
+    //TEST(_audioDevice->StopPlayout() == 0);
+}
+;
+
+AudioTransportImpl::AudioTransportImpl(AudioDeviceModule* audioDevice) :
+    _audioDevice(audioDevice),
+    _playFromFile(false),
+    _fullDuplex(false),
+    _speakerVolume(false),
+    _speakerMute(false),
+    _microphoneVolume(false),
+    _microphoneMute(false),
+    _microphoneBoost(false),
+    _microphoneAGC(false),
+    _loopBackMeasurements(false),
+    _playFile(*FileWrapper::Create()),
+    _recCount(0),
+    _playCount(0),
+    _audioList()
+{
+    _resampler.Reset(48000, 48000, kResamplerSynchronousStereo);
+}
+;
+
+AudioTransportImpl::~AudioTransportImpl()
+{
+    _playFile.Flush();
+    _playFile.CloseFile();
+    delete &_playFile;
+
+    while (!_audioList.Empty())
+    {
+        ListItem* item = _audioList.First();
+        if (item)
+        {
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                delete packet;
+            }
+        }
+        _audioList.PopFront();
+    }
+}
+;
+
+// ----------------------------------------------------------------------------
+//	AudioTransportImpl::SetFilePlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioTransportImpl::SetFilePlayout(bool enable,
+                                                 const WebRtc_Word8* fileName)
+{
+    _playFromFile = enable;
+    if (enable)
+    {
+        return (_playFile.OpenFile(fileName, true, true, false));
+    } else
+    {
+        _playFile.Flush();
+        return (_playFile.CloseFile());
+    }
+}
+;
+
+void AudioTransportImpl::SetFullDuplex(bool enable)
+{
+    _fullDuplex = enable;
+
+    while (!_audioList.Empty())
+    {
+        ListItem* item = _audioList.First();
+        if (item)
+        {
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                delete packet;
+            }
+        }
+        _audioList.PopFront();
+    }
+}
+;
+
+WebRtc_Word32 AudioTransportImpl::RecordedDataIsAvailable(
+    const WebRtc_Word8* audioSamples,
+    const WebRtc_UWord32 nSamples,
+    const WebRtc_UWord8 nBytesPerSample,
+    const WebRtc_UWord8 nChannels,
+    const WebRtc_UWord32 samplesPerSec,
+    const WebRtc_UWord32 totalDelayMS,
+    const WebRtc_Word32 clockDrift,
+    const WebRtc_UWord32 currentMicLevel,
+    WebRtc_UWord32& newMicLevel)
+{
+    if (_fullDuplex && _audioList.GetSize() < 15)
+    {
+        AudioPacket* packet = new AudioPacket();
+        memcpy(packet->dataBuffer, audioSamples, nSamples * nBytesPerSample);
+        packet->nSamples = (WebRtc_UWord16) nSamples;
+        packet->nBytesPerSample = nBytesPerSample;
+        packet->nChannels = nChannels;
+        packet->samplesPerSec = samplesPerSec;
+        _audioList.PushBack(packet);
+    }
+
+    _recCount++;
+    if (_recCount % 100 == 0)
+    {
+        bool addMarker(true);
+
+        if (_loopBackMeasurements)
+        {
+            addMarker = false;
+        }
+
+        if (_microphoneVolume)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord32 volume(0);
+            WebRtc_UWord16 stepSize(0);
+            TEST(_audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(_audioDevice->MinMicrophoneVolume(&minVolume) == 0);
+            TEST(_audioDevice->MicrophoneVolumeStepSize(&stepSize) == 0);
+            TEST(_audioDevice->MicrophoneVolume(&volume) == 0);
+            if (volume == 0)
+            {
+                TEST_LOG("[0]");
+                addMarker = false;
+            }
+            int stepScale = (int) ((maxVolume - minVolume) / (stepSize * 10));
+            volume += (stepScale * stepSize);
+            if (volume > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                volume = 0;
+                addMarker = false;
+            }
+            TEST(_audioDevice->SetMicrophoneVolume(volume) == 0);
+        }
+
+        if (_microphoneAGC)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord16 stepSize(0);
+            TEST(_audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(_audioDevice->MinMicrophoneVolume(&minVolume) == 0);
+            TEST(_audioDevice->MicrophoneVolumeStepSize(&stepSize) == 0);
+            // emulate real AGC (min->max->min->max etc.)
+            if (currentMicLevel <= 1)
+            {
+                TEST_LOG("[MIN]");
+                addMarker = false;
+            }
+            int stepScale = (int) ((maxVolume - minVolume) / (stepSize * 10));
+            newMicLevel = currentMicLevel + (stepScale * stepSize);
+            if (newMicLevel > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                newMicLevel = 1; // set lowest (non-zero) AGC level
+                addMarker = false;
+            }
+        }
+
+        if (_microphoneMute && (_recCount % 500 == 0))
+        {
+            bool muted(false);
+            TEST(_audioDevice->MicrophoneMute(&muted) == 0);
+            muted = !muted;
+            TEST(_audioDevice->SetMicrophoneMute(muted) == 0);
+            if (muted)
+            {
+                TEST_LOG("[MUTE ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[MUTE OFF]");
+                addMarker = false;
+            }
+        }
+
+        if (_microphoneBoost && (_recCount % 500 == 0))
+        {
+            bool boosted(false);
+            TEST(_audioDevice->MicrophoneBoost(&boosted) == 0);
+            boosted = !boosted;
+            TEST(_audioDevice->SetMicrophoneBoost(boosted) == 0);
+            if (boosted)
+            {
+                TEST_LOG("[BOOST ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[BOOST OFF]");
+                addMarker = false;
+            }
+        }
+
+        if ((nChannels == 1) && addMarker)
+        {
+            // mono
+            TEST_LOG("-");
+        } else if ((nChannels == 2) && (nBytesPerSample == 2) && addMarker)
+        {
+            AudioDeviceModule::ChannelType
+                chType(AudioDeviceModule::kChannelLeft);
+            TEST(_audioDevice->RecordingChannel(&chType) == 0);
+            if (chType == AudioDeviceModule::kChannelLeft)
+                TEST_LOG("-|");
+            else
+                TEST_LOG("|-");
+        } else if (addMarker)
+        {
+            // stereo
+            TEST_LOG("--");
+        }
+
+        if (nChannels == 2 && nBytesPerSample == 2)
+        {
+            // TEST_LOG("=> emulated mono (one channel exctracted from stereo input)\n");
+        }
+    }
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioTransportImpl::NeedMorePlayData(
+    const WebRtc_UWord32 nSamples,
+    const WebRtc_UWord8 nBytesPerSample,
+    const WebRtc_UWord8 nChannels,
+    const WebRtc_UWord32 samplesPerSec,
+    WebRtc_Word8* audioSamples,
+    WebRtc_UWord32& nSamplesOut)
+{
+    if (_fullDuplex)
+    {
+        if (_audioList.Empty())
+        {
+            // use zero stuffing when not enough data
+            memset(audioSamples, 0, nBytesPerSample * nSamples);
+        } else
+        {
+            ListItem* item = _audioList.First();
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                int ret(0);
+                int lenOut(0);
+                WebRtc_Word16 tmpBuf_96kHz[80 * 12];
+                WebRtc_Word16* ptr16In = NULL;
+                WebRtc_Word16* ptr16Out = NULL;
+
+                const WebRtc_UWord16 nSamplesIn = packet->nSamples;
+                const WebRtc_UWord8 nChannelsIn = packet->nChannels;
+                const WebRtc_UWord32 samplesPerSecIn = packet->samplesPerSec;
+                const WebRtc_UWord16 nBytesPerSampleIn =
+                    packet->nBytesPerSample;
+
+                WebRtc_Word32 fsInHz(samplesPerSecIn);
+                WebRtc_Word32 fsOutHz(samplesPerSec);
+
+                if (fsInHz == 44100)
+                    fsInHz = 44000;
+
+                if (fsOutHz == 44100)
+                    fsOutHz = 44000;
+
+                if (nChannelsIn == 2 && nBytesPerSampleIn == 4)
+                {
+                    // input is stereo => we will resample in stereo
+                    ret = _resampler.ResetIfNeeded(fsInHz, fsOutHz,
+                                                   kResamplerSynchronousStereo);
+                    if (ret == 0)
+                    {
+                        if (nChannels == 2)
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                2 * nSamplesIn,
+                                (WebRtc_Word16*) audioSamples, 2
+                                * nSamples, lenOut);
+                        } else
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                2 * nSamplesIn, tmpBuf_96kHz, 2
+                                * nSamples, lenOut);
+
+                            ptr16In = &tmpBuf_96kHz[0];
+                            ptr16Out = (WebRtc_Word16*) audioSamples;
+
+                            // do stereo -> mono
+                            for (unsigned int i = 0; i < nSamples; i++)
+                            {
+                                *ptr16Out = *ptr16In; // use left channel
+                                ptr16Out++;
+                                ptr16In++;
+                                ptr16In++;
+                            }
+                        }
+                        assert(2*nSamples == (WebRtc_UWord32)lenOut);
+                    } else
+                    {
+                        if (_playCount % 100 == 0)
+                            TEST_LOG(
+                                     "ERROR: unable to resample from %d to %d\n",
+                                     samplesPerSecIn, samplesPerSec);
+                    }
+                } else
+                {
+                    // input is mono (can be "reduced from stereo" as well) =>
+                    // we will resample in mono
+                    ret = _resampler.ResetIfNeeded(fsInHz, fsOutHz,
+                                                   kResamplerSynchronous);
+                    if (ret == 0)
+                    {
+                        if (nChannels == 1)
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                nSamplesIn,
+                                (WebRtc_Word16*) audioSamples,
+                                nSamples, lenOut);
+                        } else
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                nSamplesIn, tmpBuf_96kHz, nSamples,
+                                lenOut);
+
+                            ptr16In = &tmpBuf_96kHz[0];
+                            ptr16Out = (WebRtc_Word16*) audioSamples;
+
+                            // do mono -> stereo
+                            for (unsigned int i = 0; i < nSamples; i++)
+                            {
+                                *ptr16Out = *ptr16In; // left
+                                ptr16Out++;
+                                *ptr16Out = *ptr16In; // right (same as left sample)
+                                ptr16Out++;
+                                ptr16In++;
+                            }
+                        }
+                        assert(nSamples == (WebRtc_UWord32)lenOut);
+                    } else
+                    {
+                        if (_playCount % 100 == 0)
+                            TEST_LOG("ERROR: unable to resample from %d to %d\n",
+                                     samplesPerSecIn, samplesPerSec);
+                    }
+                }
+                nSamplesOut = nSamples;
+                delete packet;
+            }
+            _audioList.PopFront();
+        }
+    } // if (_fullDuplex)
+
+    if (_playFromFile && _playFile.Open())
+    {
+        WebRtc_Word16 fileBuf[480];
+
+        // read mono-file
+        WebRtc_Word32 len = _playFile.Read((WebRtc_Word8*) fileBuf, 2
+            * nSamples);
+        if (len != 2 * (WebRtc_Word32) nSamples)
+        {
+            _playFile.Rewind();
+            _playFile.Read((WebRtc_Word8*) fileBuf, 2 * nSamples);
+        }
+
+        // convert to stero if required
+        if (nChannels == 1)
+        {
+            memcpy(audioSamples, fileBuf, 2 * nSamples);
+        } else
+        {
+            // mono sample from file is duplicated and sent to left and right
+            // channels
+            WebRtc_Word16* audio16 = (WebRtc_Word16*) audioSamples;
+            for (unsigned int i = 0; i < nSamples; i++)
+            {
+                (*audio16) = fileBuf[i]; // left
+                audio16++;
+                (*audio16) = fileBuf[i]; // right
+                audio16++;
+            }
+        }
+    } // if (_playFromFile && _playFile.Open())
+
+    _playCount++;
+
+    if (_playCount % 100 == 0)
+    {
+        bool addMarker(true);
+
+        if (_speakerVolume)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord32 volume(0);
+            WebRtc_UWord16 stepSize(0);
+            TEST(_audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+            TEST(_audioDevice->MinSpeakerVolume(&minVolume) == 0);
+            TEST(_audioDevice->SpeakerVolumeStepSize(&stepSize) == 0);
+            TEST(_audioDevice->SpeakerVolume(&volume) == 0);
+            if (volume == 0)
+            {
+                TEST_LOG("[0]");
+                addMarker = false;
+            }
+            WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+            step = (step < stepSize ? stepSize : step);
+            volume += step;
+            if (volume > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                volume = 0;
+                addMarker = false;
+            }
+            TEST(_audioDevice->SetSpeakerVolume(volume) == 0);
+        }
+
+        if (_speakerMute && (_playCount % 500 == 0))
+        {
+            bool muted(false);
+            TEST(_audioDevice->SpeakerMute(&muted) == 0);
+            muted = !muted;
+            TEST(_audioDevice->SetSpeakerMute(muted) == 0);
+            if (muted)
+            {
+                TEST_LOG("[MUTE ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[MUTE OFF]");
+                addMarker = false;
+            }
+        }
+
+        if (_loopBackMeasurements)
+        {
+            WebRtc_UWord16 recDelayMS(0);
+            WebRtc_UWord16 playDelayMS(0);
+            WebRtc_UWord32 nItemsInList(0);
+
+            nItemsInList = _audioList.GetSize();
+            TEST(_audioDevice->RecordingDelay(&recDelayMS) == 0);
+            TEST(_audioDevice->PlayoutDelay(&playDelayMS) == 0);
+            TEST_LOG("Delay (rec+play)+buf: %3u (%3u+%3u)+%3u [ms]\n",
+                     recDelayMS + playDelayMS + 10 * (nItemsInList + 1),
+                     recDelayMS, playDelayMS, 10 * (nItemsInList + 1));
+
+            addMarker = false;
+        }
+
+        if ((nChannels == 1) && addMarker)
+        {
+            TEST_LOG("+");
+        } else if ((nChannels == 2) && addMarker)
+        {
+            TEST_LOG("++");
+        }
+    } // if (_playCount % 100 == 0)
+
+    nSamplesOut = nSamples;
+
+    return 0;
+}
+;
+
+FuncTestManager::FuncTestManager() :
+    _resourcePath(webrtc::test::ProjectRootPath() +
+        "test/data/audio_device/"),
+    _processThread(NULL),
+    _audioDevice(NULL),
+    _audioEventObserver(NULL),
+    _audioTransport(NULL)
+{
+  assert(!_resourcePath.empty());
+  _playoutFile48 = _resourcePath + "audio_short48.pcm";
+  _playoutFile44 = _resourcePath + "audio_short44.pcm";
+  _playoutFile16 = _resourcePath + "audio_short16.pcm";
+  _playoutFile8 = _resourcePath + "audio_short8.pcm";
+}
+
+FuncTestManager::~FuncTestManager()
+{
+}
+
+WebRtc_Word32 FuncTestManager::Init()
+{
+    TEST((_processThread = ProcessThread::CreateProcessThread()) != NULL);
+    if (_processThread == NULL)
+    {
+        return -1;
+    }
+    _processThread->Start();
+
+    // create the Audio Device module
+    TEST((_audioDevice = AudioDeviceModuleImpl::Create(
+        555, ADM_AUDIO_LAYER)) != NULL);
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+    TEST(_audioDevice->AddRef() == 1);
+
+    // register the Audio Device module
+    _processThread->RegisterModule(_audioDevice);
+
+    // register event observer
+    _audioEventObserver = new AudioEventObserver(_audioDevice);
+    TEST(_audioDevice->RegisterEventObserver(_audioEventObserver) == 0);
+
+    // register audio transport
+    _audioTransport = new AudioTransportImpl(_audioDevice);
+    TEST(_audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::Close()
+{
+    TEST(_audioDevice->RegisterEventObserver(NULL) == 0);
+    TEST(_audioDevice->RegisterAudioCallback(NULL) == 0);
+    TEST(_audioDevice->Terminate() == 0);
+
+    // release the ProcessThread object
+    if (_processThread)
+    {
+        _processThread->DeRegisterModule(_audioDevice);
+        _processThread->Stop();
+        ProcessThread::DestroyProcessThread(_processThread);
+    }
+
+    // delete the audio observer
+    if (_audioEventObserver)
+    {
+        delete _audioEventObserver;
+        _audioEventObserver = NULL;
+    }
+
+    // delete the audio transport
+    if (_audioTransport)
+    {
+        delete _audioTransport;
+        _audioTransport = NULL;
+    }
+
+    // release the AudioDeviceModule object
+    if (_audioDevice)
+    {
+        TEST(_audioDevice->Release() == 0);
+        _audioDevice = NULL;
+    }
+
+    // return the ThreadWrapper (singleton)
+    Trace::ReturnTrace();
+
+    // PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::DoTest(const TestType testType)
+{
+    switch (testType)
+    {
+        case TTAll:
+            TestAudioLayerSelection();
+            TestDeviceEnumeration();
+            TestDeviceSelection();
+            TestAudioTransport();
+            TestSpeakerVolume();
+            TestMicrophoneVolume();
+            TestLoopback();
+        case TTAudioLayerSelection:
+            TestAudioLayerSelection();
+            break;
+        case TTDeviceEnumeration:
+            TestDeviceEnumeration();
+            break;
+        case TTDeviceSelection:
+            TestDeviceSelection();
+            break;
+        case TTAudioTransport:
+            TestAudioTransport();
+            break;
+        case TTSpeakerVolume:
+            TestSpeakerVolume();
+            break;
+        case TTMicrophoneVolume:
+            TestMicrophoneVolume();
+            break;
+        case TTSpeakerMute:
+            TestSpeakerMute();
+            break;
+        case TTMicrophoneMute:
+            TestMicrophoneMute();
+            break;
+        case TTMicrophoneBoost:
+            TestMicrophoneBoost();
+            break;
+        case TTMicrophoneAGC:
+            TestMicrophoneAGC();
+            break;
+        case TTLoopback:
+            TestLoopback();
+            break;
+        case TTDeviceRemoval:
+            TestDeviceRemoval();
+            break;
+        case TTMobileAPI:
+            TestAdvancedMBAPI();
+        case TTTest:
+            TestExtra();
+            break;
+        default:
+            break;
+    }
+
+    return 0;
+}
+;
+
+WebRtc_Word32 FuncTestManager::TestAudioLayerSelection()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Audio Layer test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    AudioDeviceModule::AudioLayer audioLayer;
+    TEST(audioDevice->ActiveAudioLayer(&audioLayer) == 0);
+
+    if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kLinuxAlsaAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kLinuxAlsaAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kLinuxPulseAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kLinuxPulseAudio\n \n");
+    } else
+    {
+        TEST_LOG("\nActiveAudioLayer: INVALID\n \n");
+    }
+
+    char ch;
+    bool tryWinWave(false);
+    bool tryWinCore(false);
+
+    if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        TEST_LOG("Would you like to try kWindowsCoreAudio instead "
+            "[requires Win Vista or Win 7] (Y/N)?\n: ");
+        TEST(scanf(" %c", &ch) > 0);
+        ch = toupper(ch);
+        if (ch == 'Y')
+        {
+            tryWinCore = true;
+        }
+    } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        TEST_LOG("Would you like to try kWindowsWaveAudio instead (Y/N)?\n: ");
+        TEST(scanf(" %c", &ch) > 0);
+        ch = toupper(ch);
+        if (ch == 'Y')
+        {
+            tryWinWave = true;
+        }
+    }
+
+    if (tryWinWave || tryWinCore)
+    {
+        // =======================================
+        // First, close down what we have started
+
+        // terminate
+        TEST(_audioDevice->RegisterEventObserver(NULL) == 0);
+        TEST(_audioDevice->RegisterAudioCallback(NULL) == 0);
+        TEST(_audioDevice->Terminate() == 0);
+
+        // release the ProcessThread object
+        if (_processThread)
+        {
+            _processThread->DeRegisterModule(_audioDevice);
+            _processThread->Stop();
+            ProcessThread::DestroyProcessThread(_processThread);
+        }
+
+        // delete the audio observer
+        if (_audioEventObserver)
+        {
+            delete _audioEventObserver;
+            _audioEventObserver = NULL;
+        }
+
+        // delete the audio transport
+        if (_audioTransport)
+        {
+            delete _audioTransport;
+            _audioTransport = NULL;
+        }
+
+        // release the AudioDeviceModule object
+        if (_audioDevice)
+        {
+            TEST(_audioDevice->Release() == 0);
+            _audioDevice = NULL;
+        }
+
+        // ==================================================
+        // Next, try to make fresh start with new audio layer
+
+        TEST((_processThread = ProcessThread::CreateProcessThread()) != NULL);
+        if (_processThread == NULL)
+        {
+            return -1;
+        }
+        _processThread->Start();
+
+        // create the Audio Device module based on selected audio layer
+        if (tryWinWave)
+        {
+            _audioDevice = AudioDeviceModuleImpl::Create(
+                555,
+                AudioDeviceModule::kWindowsWaveAudio);
+        } else if (tryWinCore)
+        {
+            _audioDevice = AudioDeviceModuleImpl::Create(
+                555,
+                AudioDeviceModule::kWindowsCoreAudio);
+        }
+
+        if (_audioDevice == NULL)
+        {
+            TEST_LOG("\nERROR: Switch of audio layer failed!\n");
+            // restore default audio layer instead
+            TEST((_audioDevice = AudioDeviceModuleImpl::Create(
+                555, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+        }
+
+        if (_audioDevice == NULL)
+        {
+            TEST_LOG("\nERROR: Failed to revert back to default audio layer!\n");
+            return -1;
+        }
+
+        TEST(_audioDevice->AddRef() == 1);
+
+        // register the Audio Device module
+        _processThread->RegisterModule(_audioDevice);
+
+        // register event observer
+        _audioEventObserver = new AudioEventObserver(_audioDevice);
+        TEST(_audioDevice->RegisterEventObserver(_audioEventObserver) == 0);
+
+        // register audio transport
+        _audioTransport = new AudioTransportImpl(_audioDevice);
+        TEST(_audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+        TEST(_audioDevice->ActiveAudioLayer(&audioLayer) == 0);
+
+        if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+        {
+            if (tryWinCore)
+                TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio <=> "
+                    "switch was *not* possible\n \n");
+            else
+                TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio <=> "
+                    "switch was possible\n \n");
+        } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+        {
+            if (tryWinWave)
+                TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio <=> "
+                    "switch was *not* possible\n \n");
+            else
+                TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio <=> "
+                    "switch was possible\n \n");
+        }
+    } // if (tryWinWave || tryWinCore)
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceEnumeration()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device Enumeration test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize];
+    WebRtc_Word8 guid[kAdmMaxGuidSize];
+
+    const WebRtc_Word16 nPlayoutDevices(audioDevice->PlayoutDevices());
+    TEST(nPlayoutDevices >= 0);
+    TEST_LOG("\nPlayoutDevices: %u\n \n", nPlayoutDevices);
+    for (int n = 0; n < nPlayoutDevices; n++)
+    {
+        TEST(audioDevice->PlayoutDeviceName(n, name, guid) == 0);
+        TEST_LOG(
+                 "PlayoutDeviceName(%d) :   name=%s \n \
+	                 guid=%s\n",
+                 n, name, guid);
+    }
+
+#ifdef _WIN32
+    // default (-1)
+    TEST(audioDevice->PlayoutDeviceName(-1, name, guid) == 0);
+    TEST_LOG("PlayoutDeviceName(%d):   default name=%s \n \
+	                 default guid=%s\n", -1, name, guid);
+#else
+    // should fail
+    TEST(audioDevice->PlayoutDeviceName(-1, name, guid) == -1);
+#endif
+
+    const WebRtc_Word16 nRecordingDevices(audioDevice->RecordingDevices());
+    TEST(nRecordingDevices >= 0);
+    TEST_LOG("\nRecordingDevices: %u\n \n", nRecordingDevices);
+    for (int n = 0; n < nRecordingDevices; n++)
+    {
+        TEST(audioDevice->RecordingDeviceName(n, name, guid) == 0);
+        TEST_LOG(
+                 "RecordingDeviceName(%d) : name=%s \n \
+	                 guid=%s\n",
+                 n, name, guid);
+    }
+
+#ifdef _WIN32
+    // default (-1)
+    TEST(audioDevice->RecordingDeviceName(-1, name, guid) == 0);
+    TEST_LOG("RecordingDeviceName(%d): default name=%s \n \
+	                 default guid=%s\n", -1, name, guid);
+#else
+    // should fail
+    TEST(audioDevice->PlayoutDeviceName(-1, name, guid) == -1);
+#endif
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceSelection()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device Selection test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+#define PRINT_HEADING(a, b) \
+	{ \
+		TEST_LOG("Set" #a "Device(" #b ") => \n"); \
+	} \
+
+#define PRINT_HEADING_IDX(a, b,c ) \
+	{ \
+		TEST_LOG("Set" #a "Device(%d) (%s) => \n", b, c); \
+	} \
+
+#define PRINT_STR(a, b) \
+	{ \
+                char str[128]; \
+                (b == true) ? (sprintf(str, "  %-17s: available\n", #a)) : (sprintf(str, "  %-17s: NA\n", #a)); \
+                TEST_LOG("%s", str); \
+	} \
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    bool available(false);
+    WebRtc_Word16 nDevices(-1);
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize];
+    WebRtc_Word8 guid[kAdmMaxGuidSize];
+
+    // =======
+    // Playout
+
+    nDevices = audioDevice->PlayoutDevices();
+    TEST(nDevices >= 0);
+
+    TEST_LOG("\n");
+#ifdef _WIN32
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    PRINT_HEADING(Playout, kDefaultCommunicationDevice);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    PRINT_STR(Playout, available);
+    if (available)
+    {
+        TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+        PRINT_STR(Stereo Playout, available);
+    }
+    else
+    {
+        PRINT_STR(Stereo Playout, false);
+    }
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    PRINT_STR(Speaker, available);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    PRINT_STR(Speaker Volume, available);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    PRINT_STR(Speaker Mute, available);
+
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    PRINT_HEADING(Playout, kDefaultDevice);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    PRINT_STR(Playout, available);
+    if (available)
+    {
+        TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+        PRINT_STR(Stereo Playout, available);
+    }
+    else
+    {
+        PRINT_STR(Stereo Playout, false);
+    }
+    TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+    PRINT_STR(Speaker, available);
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    PRINT_STR(Speaker Volume, available);
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    PRINT_STR(Speaker Mute, available);
+#else
+    TEST(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    TEST(audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice) == -1);
+#endif
+
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetPlayoutDevice(i) == 0);
+        TEST(audioDevice->PlayoutDeviceName(i, name, guid) == 0);
+        PRINT_HEADING_IDX(Playout, i, name);
+        TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+        PRINT_STR(Playout, available);
+        if (available)
+        {
+            TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+            PRINT_STR(Stereo Playout, available);
+        } else
+        {
+            PRINT_STR(Stereo Playout, false);
+        }
+        TEST(audioDevice->SpeakerIsAvailable(&available) == 0);
+        PRINT_STR(Speaker, available);
+        TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+        PRINT_STR(Speaker Volume, available);
+        TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+        PRINT_STR(Speaker Mute, available);
+    }
+
+    // =========
+    // Recording
+
+    nDevices = audioDevice->RecordingDevices();
+    TEST(nDevices >= 0);
+
+    TEST_LOG("\n");
+#ifdef _WIN32
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    PRINT_HEADING(Recording, kDefaultCommunicationDevice);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    PRINT_STR(Recording, available);
+    if (available)
+    {
+        TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+        PRINT_STR(Stereo Recording, available);
+    }
+    else
+    {
+        // special fix to ensure that we don't log 'available' when recording is not OK
+        PRINT_STR(Stereo Recording, false);
+    }
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    PRINT_STR(Microphone, available);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Volume, available);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Mute, available);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Boost, available);
+
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice) == 0);
+    PRINT_HEADING(Recording, kDefaultDevice);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    PRINT_STR(Recording, available);
+    if (available)
+    {
+        TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+        PRINT_STR(Stereo Recording, available);
+    }
+    else
+    {
+        // special fix to ensure that we don't log 'available' when recording is not OK
+        PRINT_STR(Stereo Recording, false);
+    }
+    TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+    PRINT_STR(Microphone, available);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Volume, available);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Mute, available);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    PRINT_STR(Microphone Boost, available);
+#else
+    TEST(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    TEST(audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice) == -1);
+#endif
+
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(audioDevice->SetRecordingDevice(i) == 0);
+        TEST(audioDevice->RecordingDeviceName(i, name, guid) == 0);
+        PRINT_HEADING_IDX(Recording, i, name);
+        TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+        PRINT_STR(Recording, available);
+        if (available)
+        {
+            TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+            PRINT_STR(Stereo Recording, available);
+        } else
+        {
+            // special fix to ensure that we don't log 'available' when recording
+            // is not OK
+            PRINT_STR(Stereo Recording, false);
+        }
+        TEST(audioDevice->MicrophoneIsAvailable(&available) == 0);
+        PRINT_STR(Microphone, available);
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        PRINT_STR(Microphone Volume, available);
+        TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+        PRINT_STR(Microphone Mute, available);
+        TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+        PRINT_STR(Microphone Boost, available);
+    }
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestAudioTransport()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Audio Transport test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->RecordingIsAvailable(&recIsAvailable) == 0);
+    if (!recIsAvailable)
+    {
+        TEST_LOG(
+                 "\nWARNING: Recording is not available for the selected device!\n \n");
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&playIsAvailable) == 0);
+    if (recIsAvailable && playIsAvailable)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else if (!playIsAvailable)
+    {
+        TEST_LOG(
+                 "\nWARNING: Playout is not available for the selected device!\n \n");
+    }
+
+    bool available(false);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    if (playIsAvailable)
+    {
+        // =========================================
+        // Start by playing out an existing PCM file
+
+        TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            TEST(audioDevice->MaxSpeakerVolume(&maxVolume) == 0);
+            TEST(audioDevice->SetSpeakerVolume(maxVolume/2) == 0);
+        }
+
+        TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutSampleRate(&samplesPerSec) == 0);
+        if (samplesPerSec == 48000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile48.c_str()));
+        } else if (samplesPerSec == 44100 || samplesPerSec == 44000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile44.c_str()));
+        } else if (samplesPerSec == 16000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile16.c_str()));
+        } else if (samplesPerSec == 8000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile8.c_str()));
+        } else {
+            TEST_LOG("\nERROR: Sample rate (%u) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        TEST(audioDevice->StartPlayout() == 0);
+
+        if (audioDevice->Playing())
+        {
+            TEST_LOG("\n> Listen to the file being played (fs=%d) out "
+                "and verify that the audio quality is OK.\n"
+                "> Press any key to stop playing...\n \n",
+                samplesPerSec);
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+        _audioTransport->SetFilePlayout(false);
+    }
+
+    bool enabled(false);
+    if (recIsAvailable)
+    {
+        // ====================================
+        // Next, record from microphone to file
+
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(audioDevice->SetMicrophoneVolume(maxVolume) == 0);
+        }
+
+        TEST(audioDevice->StartRawInputFileRecording(
+            GetFilename(RecordedMicrophoneFile)) == 0);
+        TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            // ensure file recording in mono
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft) == 0);
+        }
+        TEST(audioDevice->StartRecording() == 0);
+        AudioDeviceUtility::Sleep(100);
+
+        TEST(audioDevice->Recording() == true);
+        if (audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> The microphone input signal is now being recorded "
+                "to a PCM file.\n"
+                "> Speak into the microphone to ensure that your voice is"
+                " recorded.\n> Press any key to stop recording...\n \n");
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelBoth) == 0);
+        }
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+        TEST(audioDevice->StopRawInputFileRecording() == 0);
+    }
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        // ==========================
+        // Play out the recorded file
+
+        _audioTransport->SetFilePlayout(true,
+                                        GetFilename(RecordedMicrophoneFile));
+
+        TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+        TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->InitPlayout() == 0);
+            TEST(audioDevice->StartPlayout() == 0);
+            AudioDeviceUtility::Sleep(100);
+        }
+
+        TEST(audioDevice->Playing() == true);
+        if (audioDevice->Playing())
+        {
+            TEST_LOG("\n \n> Listen to the recorded file and verify that the "
+                "audio quality is OK.\n"
+                "> Press any key to stop listening...\n \n");
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+        _audioTransport->SetFilePlayout(false);
+    }
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        // ==============================
+        // Finally, make full duplex test
+
+        WebRtc_UWord32 playSamplesPerSec(0);
+        WebRtc_UWord32 recSamplesPerSecRec(0);
+
+        TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+        _audioTransport->SetFullDuplex(true);
+
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(audioDevice->SetMicrophoneVolume(maxVolume) == 0);
+        }
+
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutSampleRate(&playSamplesPerSec) == 0);
+        TEST(audioDevice->RecordingSampleRate(&recSamplesPerSecRec) == 0);
+        if (playSamplesPerSec != recSamplesPerSecRec)
+        {
+            TEST_LOG("\nERROR: sample rates does not match (fs_play=%u, fs_rec=%u)",
+                     playSamplesPerSec, recSamplesPerSecRec);
+            TEST(audioDevice->StopRecording() == 0);
+            TEST(audioDevice->StopPlayout() == 0);
+            TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+            _audioTransport->SetFullDuplex(false);
+            return -1;
+        }
+
+        TEST(audioDevice->StartRecording() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+        AudioDeviceUtility::Sleep(100);
+
+        if (audioDevice->Playing() && audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> Full duplex audio (fs=%u) is now active.\n"
+                "> Speak into the microphone and verify that your voice is "
+                "played out in loopback.\n> Press any key to stop...\n \n",
+                     playSamplesPerSec);
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+        _audioTransport->SetFullDuplex(false);
+    }
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestSpeakerVolume()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Speaker Volume test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    WebRtc_UWord32 startVolume(0);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    TEST(audioDevice->SpeakerVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetSpeakerVolume(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Volume control is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    // store initial volume setting
+    TEST(audioDevice->InitSpeaker() == 0);
+    TEST(audioDevice->SpeakerVolume(&startVolume) == 0);
+
+    // start at volume 0
+    TEST(audioDevice->SetSpeakerVolume(0) == 0);
+
+    // ======================================
+    // Start playing out an existing PCM file
+
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutSampleRate(&samplesPerSec) == 0);
+        if (48000 == samplesPerSec) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile48.c_str()));
+        } else if (44100 == samplesPerSec || samplesPerSec == 44000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile44.c_str()));
+        } else if (samplesPerSec == 16000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile16.c_str()));
+        } else if (samplesPerSec == 8000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile8.c_str()));
+        } else {
+            TEST_LOG("\nERROR: Sample rate (%d) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Playing())
+    {
+        TEST_LOG("\n> Listen to the file being played out and verify that the "
+            "selected speaker volume is varied between [~0] and [~MAX].\n"
+            "> The file shall be played out with an increasing volume level "
+            "correlated to the speaker volume.\n"
+            "> Press any key to stop playing...\n \n");
+        PAUSE(10000);
+    }
+
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    _audioTransport->SetSpeakerVolume(false);
+    _audioTransport->SetFilePlayout(false);
+
+    // restore volume setting
+    TEST(audioDevice->SetSpeakerVolume(startVolume) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestSpeakerMute()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Speaker Mute test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    bool startMute(false);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    TEST(audioDevice->SpeakerMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetSpeakerMute(true);
+    } else
+    {
+        TEST_LOG(
+                 "\nERROR: Mute control is not available for the selected"
+                 " device!\n \n");
+        return -1;
+    }
+
+    // store initial mute setting
+    TEST(audioDevice->InitSpeaker() == 0);
+    TEST(audioDevice->SpeakerMute(&startMute) == 0);
+
+    // start with no mute
+    TEST(audioDevice->SetSpeakerMute(false) == 0);
+
+    // ======================================
+    // Start playing out an existing PCM file
+
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutSampleRate(&samplesPerSec) == 0);
+        if (48000 == samplesPerSec)
+            _audioTransport->SetFilePlayout(true, _playoutFile48.c_str());
+        else if (44100 == samplesPerSec || 44000 == samplesPerSec)
+            _audioTransport->SetFilePlayout(true, _playoutFile44.c_str());
+        else
+        {
+            TEST_LOG("\nERROR: Sample rate (%d) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Playing())
+    {
+        TEST_LOG("\n> Listen to the file being played out and verify that the"
+            " selected speaker mute control is toggled between [MUTE ON] and"
+            " [MUTE OFF].\n> You should only hear the file during the"
+            " 'MUTE OFF' periods.\n"
+            "> Press any key to stop playing...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    _audioTransport->SetSpeakerMute(false);
+    _audioTransport->SetFilePlayout(false);
+
+    // restore mute setting
+    TEST(audioDevice->SetSpeakerMute(startMute) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneVolume()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Volume test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetMicrophoneVolume(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Volume control is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this"
+        " test (Y/N)?\n: ",
+             RecordedMicrophoneVolumeFile);
+    char ch;
+    bool fileRecording(false);
+    TEST(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    WebRtc_UWord32 startVolume(0);
+    bool enabled(false);
+
+    // store initial volume setting
+    TEST(audioDevice->InitMicrophone() == 0);
+    TEST(audioDevice->MicrophoneVolume(&startVolume) == 0);
+
+    // start at volume 0
+    TEST(audioDevice->SetMicrophoneVolume(0) == 0);
+
+    // ======================================================================
+    // Start recording from the microphone while the mic volume is changed
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StartRawInputFileRecording(RecordedMicrophoneVolumeFile) == 0);
+    }
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            // ensures a mono file
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelRight) == 0);
+        }
+        TEST(audioDevice->StartRecording() == 0);
+    }
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->Recording() == true);
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone volume is varied between [~0] and [~MAX].\n"
+            "> You should hear your own voice with an increasing volume level"
+            " correlated to the microphone volume.\n"
+            "> After a finalized test (and if file recording was enabled) "
+            "verify the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StopRawInputFileRecording() == 0);
+    }
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+
+    _audioTransport->SetMicrophoneVolume(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    TEST(audioDevice->SetMicrophoneVolume(startVolume) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneMute()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Mute test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    TEST(audioDevice->MicrophoneMuteIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetMicrophoneMute(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Mute control is not available for the selected"
+            " device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this "
+        "test (Y/N)?\n: ",
+        RecordedMicrophoneMuteFile);
+    char ch;
+    bool fileRecording(false);
+    TEST(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    bool startMute(false);
+    bool enabled(false);
+
+    // store initial volume setting
+    TEST(audioDevice->InitMicrophone() == 0);
+    TEST(audioDevice->MicrophoneMute(&startMute) == 0);
+
+    // start at no mute
+    TEST(audioDevice->SetMicrophoneMute(false) == 0);
+
+    // ==================================================================
+    // Start recording from the microphone while the mic mute is toggled
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StartRawInputFileRecording(RecordedMicrophoneMuteFile) == 0);
+    }
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            // ensure file recording in mono
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft) == 0);
+        }
+        TEST(audioDevice->StartRecording() == 0);
+    }
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->Recording() == true);
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone mute control is toggled between [MUTE ON] and [MUTE OFF]."
+            "\n> You should only hear your own voice in loopback during the"
+            " 'MUTE OFF' periods.\n> After a finalized test (and if file "
+            "recording was enabled) verify the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StopRawInputFileRecording() == 0);
+    }
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    _audioTransport->SetMicrophoneMute(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    TEST(audioDevice->SetMicrophoneMute(startMute) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneBoost()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Boost test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    TEST(audioDevice->MicrophoneBoostIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetMicrophoneBoost(true);
+    } else
+    {
+        TEST_LOG(
+                 "\nERROR: Boost control is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this "
+        "test (Y/N)?\n: ",
+        RecordedMicrophoneBoostFile);
+    char ch;
+    bool fileRecording(false);
+    TEST(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    bool startBoost(false);
+    bool enabled(false);
+
+    // store initial volume setting
+    TEST(audioDevice->InitMicrophone() == 0);
+    TEST(audioDevice->MicrophoneBoost(&startBoost) == 0);
+
+    // start at no boost
+    TEST(audioDevice->SetMicrophoneBoost(false) == 0);
+
+    // ==================================================================
+    // Start recording from the microphone while the mic boost is toggled
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StartRawInputFileRecording(RecordedMicrophoneBoostFile) == 0);
+    }
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            // ensure file recording in mono
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft) == 0);
+        }
+        TEST(audioDevice->StartRecording() == 0);
+    }
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->Recording() == true);
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone boost control is toggled between [BOOST ON] and [BOOST OFF].\n"
+            "> You should hear your own voice with an increased volume level "
+            "during the 'BOOST ON' periods.\n \n"
+            "> After a finalized test (and if file recording was enabled) verify"
+            " the recorded result off line.\n"
+        "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StopRawInputFileRecording() == 0);
+    }
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    _audioTransport->SetMicrophoneBoost(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore boost setting
+    TEST(audioDevice->SetMicrophoneBoost(startBoost) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneAGC()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone AGC test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetMicrophoneAGC(true);
+    } else
+    {
+        TEST_LOG("\nERROR: It is not possible to control the microphone volume"
+            " for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during "
+        "this test (Y/N)?\n: ",
+        RecordedMicrophoneAGCFile);
+    char ch;
+    bool fileRecording(false);
+    TEST(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    WebRtc_UWord32 startVolume(0);
+    bool enabled(false);
+
+    // store initial volume setting
+    TEST(audioDevice->InitMicrophone() == 0);
+    TEST(audioDevice->MicrophoneVolume(&startVolume) == 0);
+
+    // ====================================================================
+    // Start recording from the microphone while the mic volume is changed
+    // continuously
+    // by the emulated AGC (implemented by our audio transport).
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StartRawInputFileRecording(RecordedMicrophoneAGCFile) == 0);
+    }
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    TEST(audioDevice->RecordingIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->SetAGC(true) == 0);
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        if (enabled)
+        {
+            // ensures a mono file
+            TEST(audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelRight) == 0);
+        }
+        TEST(audioDevice->StartRecording() == 0);
+    }
+    TEST(audioDevice->PlayoutIsAvailable(&available) == 0);
+    if (available)
+    {
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+    }
+
+    TEST(audioDevice->AGC() == true);
+    TEST(audioDevice->Recording() == true);
+    TEST(audioDevice->Playing() == true);
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the volume of"
+            " the selected microphone is varied between [~0] and [~MAX].\n"
+            "> You should hear your own voice with an increasing volume level"
+            " correlated to an emulated AGC setting.\n"
+            "> After a finalized test (and if file recording was enabled) verify"
+            " the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        TEST(audioDevice->StopRawInputFileRecording() == 0);
+    }
+    TEST(audioDevice->SetAGC(false) == 0);
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+    TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+
+    _audioTransport->SetMicrophoneAGC(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    TEST(audioDevice->SetMicrophoneVolume(startVolume) == 0);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestLoopback()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Loopback measurement test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+    WebRtc_UWord8 nPlayChannels(0);
+    WebRtc_UWord8 nRecChannels(0);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->RecordingIsAvailable(&recIsAvailable) == 0);
+    if (!recIsAvailable)
+    {
+        TEST_LOG("\nERROR: Recording is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    TEST(audioDevice->PlayoutIsAvailable(&playIsAvailable) == 0);
+    if (recIsAvailable && playIsAvailable)
+    {
+        _audioTransport->SetFullDuplex(true);
+        _audioTransport->SetLoopbackMeasurements(true);
+    } else if (!playIsAvailable)
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    bool enabled(false);
+    bool available(false);
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        WebRtc_UWord32 playSamplesPerSec(0);
+        WebRtc_UWord32 recSamplesPerSecRec(0);
+
+        TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+        _audioTransport->SetFullDuplex(true);
+
+        TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->SetStereoRecording(true) == 0);
+        }
+
+        TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+        if (available)
+        {
+            TEST(audioDevice->SetStereoPlayout(true) == 0);
+        }
+
+        TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+            TEST(audioDevice->SetMicrophoneVolume(maxVolume) == 0);
+        }
+
+        TEST(audioDevice->InitRecording() == 0);
+        TEST(audioDevice->InitPlayout() == 0);
+        TEST(audioDevice->PlayoutSampleRate(&playSamplesPerSec) == 0);
+        TEST(audioDevice->RecordingSampleRate(&recSamplesPerSecRec) == 0);
+        TEST(audioDevice->StereoPlayout(&enabled) == 0);
+        enabled ? nPlayChannels = 2 : nPlayChannels = 1;
+        TEST(audioDevice->StereoRecording(&enabled) == 0);
+        enabled ? nRecChannels = 2 : nRecChannels = 1;
+        TEST(audioDevice->StartRecording() == 0);
+        TEST(audioDevice->StartPlayout() == 0);
+
+        if (audioDevice->Playing() && audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> Loopback audio is now active.\n"
+               "> Rec : fs=%u, #channels=%u.\n"
+                "> Play: fs=%u, #channels=%u.\n"
+                "> Speak into the microphone and verify that your voice is"
+                "  played out in loopback.\n"
+                "> Press any key to stop...\n \n",
+                recSamplesPerSecRec, nRecChannels, playSamplesPerSec,
+                nPlayChannels);
+            PAUSE(30000);
+        }
+
+        TEST(audioDevice->StopRecording() == 0);
+        TEST(audioDevice->StopPlayout() == 0);
+        TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+        _audioTransport->SetFullDuplex(false);
+        _audioTransport->SetLoopbackMeasurements(false);
+    }
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceRemoval()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device removal test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+    WebRtc_UWord8 nPlayChannels(0);
+    WebRtc_UWord8 nRecChannels(0);
+    WebRtc_UWord8 loopCount(0);
+
+    while (loopCount < 2)
+    {
+        if (SelectRecordingDevice() == -1)
+        {
+            TEST_LOG("\nERROR: Device selection failed!\n \n");
+            return -1;
+        }
+
+        TEST(audioDevice->RecordingIsAvailable(&recIsAvailable) == 0);
+        if (!recIsAvailable)
+        {
+            TEST_LOG("\nERROR: Recording is not available for the selected device!\n \n");
+            return -1;
+        }
+
+        if (SelectPlayoutDevice() == -1)
+        {
+            TEST_LOG("\nERROR: Device selection failed!\n \n");
+            return -1;
+        }
+
+        TEST(audioDevice->PlayoutIsAvailable(&playIsAvailable) == 0);
+        if (recIsAvailable && playIsAvailable)
+        {
+            _audioTransport->SetFullDuplex(true);
+        } else if (!playIsAvailable)
+        {
+            TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+            return -1;
+        }
+
+        bool available(false);
+        bool enabled(false);
+
+        if (recIsAvailable && playIsAvailable)
+        {
+            WebRtc_UWord32 playSamplesPerSec(0);
+            WebRtc_UWord32 recSamplesPerSecRec(0);
+
+            TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+
+            _audioTransport->SetFullDuplex(true);
+
+            TEST(audioDevice->StereoRecordingIsAvailable(&available) == 0);
+            if (available)
+            {
+                TEST(audioDevice->SetStereoRecording(true) == 0);
+            }
+
+            TEST(audioDevice->StereoPlayoutIsAvailable(&available) == 0);
+            if (available)
+            {
+                TEST(audioDevice->SetStereoPlayout(true) == 0);
+            }
+
+            TEST(audioDevice->MicrophoneVolumeIsAvailable(&available) == 0);
+            if (available)
+            {
+                WebRtc_UWord32 maxVolume(0);
+                TEST(audioDevice->MaxMicrophoneVolume(&maxVolume) == 0);
+                TEST(audioDevice->SetMicrophoneVolume(maxVolume) == 0);
+            }
+
+            TEST(audioDevice->InitRecording() == 0);
+            TEST(audioDevice->InitPlayout() == 0);
+            TEST(audioDevice->PlayoutSampleRate(&playSamplesPerSec) == 0);
+            TEST(audioDevice->RecordingSampleRate(&recSamplesPerSecRec) == 0);
+            TEST(audioDevice->StereoPlayout(&enabled) == 0);
+            enabled ? nPlayChannels = 2 : nPlayChannels = 1;
+            TEST(audioDevice->StereoRecording(&enabled) == 0);
+            enabled ? nRecChannels = 2 : nRecChannels = 1;
+            TEST(audioDevice->StartRecording() == 0);
+            TEST(audioDevice->StartPlayout() == 0);
+
+            AudioDeviceModule::AudioLayer audioLayer;
+            TEST(audioDevice->ActiveAudioLayer(&audioLayer) == 0);
+
+            if (audioLayer == AudioDeviceModule::kLinuxPulseAudio)
+            {
+                TEST_LOG("\n \n> PulseAudio loopback audio is now active.\n"
+                    "> Rec : fs=%u, #channels=%u.\n"
+                    "> Play: fs=%u, #channels=%u.\n"
+                    "> Speak into the microphone and verify that your voice is"
+                    " played out in loopback.\n"
+                    "> Unplug the device and make sure that your voice is played"
+                    " out in loop back on the built-in soundcard.\n"
+                    "> Then press any key...\n",
+                         recSamplesPerSecRec, nRecChannels, playSamplesPerSec,
+                         nPlayChannels);
+
+                PAUSE(DEFAULT_PAUSE_TIME);
+            } else if (audioDevice->Playing() && audioDevice->Recording())
+            {
+                if (loopCount < 1)
+                {
+                    TEST_LOG("\n \n> Loopback audio is now active.\n"
+                        "> Rec : fs=%u, #channels=%u.\n"
+                        "> Play: fs=%u, #channels=%u.\n"
+                        "> Speak into the microphone and verify that your voice"
+                        " is played out in loopback.\n"
+                        "> Unplug the device and wait for the error message...\n",
+                        recSamplesPerSecRec, nRecChannels,
+                        playSamplesPerSec, nPlayChannels);
+
+                    _audioEventObserver->_error
+                        = (AudioDeviceObserver::ErrorCode) (-1);
+                    while (_audioEventObserver->_error
+                        == (AudioDeviceObserver::ErrorCode) (-1))
+                    {
+                        SLEEP(500);
+                    }
+                } else
+                {
+                    TEST_LOG("\n \n> Loopback audio is now active.\n"
+                        "> Rec : fs=%u, #channels=%u.\n"
+                        "> Play: fs=%u, #channels=%u.\n"
+                        "> Speak into the microphone and verify that your voice"
+                        " is played out in loopback.\n"
+                        "> Press any key to stop...\n",
+                             recSamplesPerSecRec, nRecChannels,
+                             playSamplesPerSec, nPlayChannels);
+
+                    PAUSE(DEFAULT_PAUSE_TIME);
+                }
+            }
+
+            TEST(audioDevice->StopRecording() == 0);
+            TEST(audioDevice->StopPlayout() == 0);
+            TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+            _audioTransport->SetFullDuplex(false);
+
+            if (loopCount < 1)
+            {
+                TEST_LOG("\n \n> Stopped!\n");
+                TEST_LOG("> Now reinsert device if you want to enumerate it.\n");
+                TEST_LOG("> Press any key when done.\n");
+                PAUSE(DEFAULT_PAUSE_TIME);
+            }
+
+            loopCount++;
+        }
+    } // loopCount
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestExtra()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Extra test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    TEST(audioDevice->Terminate() == 0);
+    TEST(audioDevice->Initialized() == false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::SelectRecordingDevice()
+{
+    WebRtc_Word16 nDevices = _audioDevice->RecordingDevices();
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize];
+    WebRtc_Word8 guid[kAdmMaxGuidSize];
+    WebRtc_Word32 ret(-1);
+
+#ifdef _WIN32
+    TEST_LOG("\nSelect Recording Device\n \n");
+    TEST_LOG("  (%d) Default\n", 0);
+    TEST_LOG("  (%d) Default Communication [Win 7]\n", 1);
+    TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(_audioDevice->RecordingDeviceName(i, name, guid) == 0);
+        TEST_LOG(" (%d) Device %d (%s)\n", i+10, i, name);
+    }
+    TEST_LOG("\n: ");
+
+    int sel(0);
+
+    scanf("%u", &sel);
+
+    if (sel == 0)
+    {
+        TEST((ret = _audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice)) == 0);
+    }
+    else if (sel == 1)
+    {
+        TEST((ret = _audioDevice->SetRecordingDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice)) == 0);
+    }
+    else if (sel < (nDevices+10))
+    {
+        TEST((ret = _audioDevice->SetRecordingDevice(sel-10)) == 0);
+    }
+    else
+    {
+        return -1;
+    }
+#else
+    TEST_LOG("\nSelect Recording Device\n \n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(_audioDevice->RecordingDeviceName(i, name, guid) == 0);
+        TEST_LOG(" (%d) Device %d (%s)\n", i, i, name);
+    }
+    TEST_LOG("\n: ");
+    int sel(0);
+    TEST(scanf("%u", &sel) > 0);
+    if (sel < (nDevices))
+    {
+        TEST((ret = _audioDevice->SetRecordingDevice(sel)) == 0);
+    } else
+    {
+        return -1;
+    }
+#endif
+
+    return ret;
+}
+
+WebRtc_Word32 FuncTestManager::SelectPlayoutDevice()
+{
+    WebRtc_Word16 nDevices = _audioDevice->PlayoutDevices();
+    WebRtc_Word8 name[kAdmMaxDeviceNameSize];
+    WebRtc_Word8 guid[kAdmMaxGuidSize];
+
+#ifdef _WIN32
+    TEST_LOG("\nSelect Playout Device\n \n");
+    TEST_LOG("  (%d) Default\n", 0);
+    TEST_LOG("  (%d) Default Communication [Win 7]\n", 1);
+    TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(_audioDevice->PlayoutDeviceName(i, name, guid) == 0);
+        TEST_LOG(" (%d) Device %d (%s)\n", i+10, i, name);
+    }
+    TEST_LOG("\n: ");
+
+    int sel(0);
+
+    scanf("%u", &sel);
+
+    WebRtc_Word32 ret(0);
+
+    if (sel == 0)
+    {
+        TEST((ret = _audioDevice->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultDevice)) == 0);
+    }
+    else if (sel == 1)
+    {
+        TEST((ret = _audioDevice->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice)) == 0);
+    }
+    else if (sel < (nDevices+10))
+    {
+        TEST((ret = _audioDevice->SetPlayoutDevice(sel-10)) == 0);
+    }
+    else
+    {
+        return -1;
+    }
+#else
+    TEST_LOG("\nSelect Playout Device\n \n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        TEST(_audioDevice->PlayoutDeviceName(i, name, guid) == 0);
+        TEST_LOG(" (%d) Device %d (%s)\n", i, i, name);
+    }
+    TEST_LOG("\n: ");
+    int sel(0);
+    TEST(scanf("%u", &sel) > 0);
+    WebRtc_Word32 ret(0);
+    if (sel < (nDevices))
+    {
+        TEST((ret = _audioDevice->SetPlayoutDevice(sel)) == 0);
+    } else
+    {
+        return -1;
+    }
+#endif
+
+    return ret;
+}
+
+WebRtc_Word32 FuncTestManager::TestAdvancedMBAPI()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Advanced mobile device API test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    TEST(audioDevice->Init() == 0);
+    TEST(audioDevice->Initialized() == true);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+    _audioTransport->SetFullDuplex(true);
+    _audioTransport->SetLoopbackMeasurements(true);
+
+    TEST(audioDevice->RegisterAudioCallback(_audioTransport) == 0);
+    // Start recording
+    TEST(audioDevice->InitRecording() == 0);
+    TEST(audioDevice->StartRecording() == 0);
+    // Start playout
+    TEST(audioDevice->InitPlayout() == 0);
+    TEST(audioDevice->StartPlayout() == 0);
+
+    TEST(audioDevice->Recording() == true);
+    TEST(audioDevice->Playing() == true);
+
+#if defined(_WIN32_WCE) || defined(MAC_IPHONE)
+    TEST_LOG("\nResetAudioDevice\n \n");
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n\
+> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+    for (int p=0; p<=60; p+=20)
+    {
+        TEST_LOG("Resetting sound device several time with pause %d ms\n", p);
+        for (int l=0; l<20; ++l)
+        {
+            TEST(audioDevice->ResetAudioDevice() == 0);
+            AudioDeviceUtility::Sleep(p);
+        }
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n");
+        AudioDeviceUtility::Sleep(2000);
+    }
+#endif
+
+#if defined(MAC_IPHONE)
+    bool loudspeakerOn(false);
+    TEST_LOG("\nSet playout spaker\n \n");
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n\
+> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    TEST_LOG("Set to use speaker\n");
+    TEST(audioDevice->SetLoudspeakerStatus(true) == 0);
+    TEST_LOG("\n> Speak into the microphone and verify that the audio is"
+        " from the loudspeaker.\n\
+> Press any key to stop...\n \n");
+    PAUSE(DEFAULT_PAUSE_TIME);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == true);
+
+    TEST_LOG("Set to not use speaker\n");
+    TEST(audioDevice->SetLoudspeakerStatus(false) == 0);
+    TEST_LOG("\n> Speak into the microphone and verify that the audio is not"
+        " from the loudspeaker.\n\
+> Press any key to stop...\n \n");
+    PAUSE(DEFAULT_PAUSE_TIME);
+    TEST(audioDevice->GetLoudspeakerStatus(loudspeakerOn) == 0);
+    TEST(loudspeakerOn == false);
+#endif
+
+    TEST(audioDevice->StopRecording() == 0);
+    TEST(audioDevice->StopPlayout() == 0);
+    TEST(audioDevice->RegisterAudioCallback(NULL) == 0);
+
+    _audioTransport->SetFullDuplex(false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+} // namespace webrtc
+
+// EOF
diff --git a/trunk/src/modules/audio_device/main/test/func_test_manager.h b/trunk/src/modules/audio_device/main/test/func_test_manager.h
new file mode 100644
index 0000000..5efae50
--- /dev/null
+++ b/trunk/src/modules/audio_device/main/test/func_test_manager.h
@@ -0,0 +1,228 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
+#define WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
+
+#include "../source/audio_device_utility.h"
+
+#include <string>
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "audio_device_test_defines.h"
+#include "file_wrapper.h"
+#include "list_wrapper.h"
+#include "resampler.h"
+
+#if defined(MAC_IPHONE) || defined(ANDROID)
+#define USE_SLEEP_AS_PAUSE
+#else
+//#define USE_SLEEP_AS_PAUSE
+#endif
+
+// Sets the default pause time if using sleep as pause
+#define DEFAULT_PAUSE_TIME 5000
+
+#if defined(USE_SLEEP_AS_PAUSE)
+#define PAUSE(a) AudioDeviceUtility::Sleep(a);
+#else
+#define PAUSE(a) AudioDeviceUtility::WaitForKey();
+#endif
+
+#define SLEEP(a) AudioDeviceUtility::Sleep(a);
+
+#define ADM_AUDIO_LAYER AudioDeviceModule::kPlatformDefaultAudio
+//#define ADM_AUDIO_LAYER AudioDeviceModule::kLinuxPulseAudio
+
+enum TestType
+{
+    TTInvalid = -1,
+    TTAll = 0,
+    TTAudioLayerSelection = 1,
+    TTDeviceEnumeration = 2,
+    TTDeviceSelection = 3,
+    TTAudioTransport = 4,
+    TTSpeakerVolume = 5,
+    TTMicrophoneVolume = 6,
+    TTSpeakerMute = 7,
+    TTMicrophoneMute = 8,
+    TTMicrophoneBoost = 9,
+    TTMicrophoneAGC = 10,
+    TTLoopback = 11,
+    TTDeviceRemoval = 13,
+    TTMobileAPI = 14,
+    TTTest = 66,
+};
+
+class ProcessThread;
+
+namespace webrtc
+{
+
+class AudioDeviceModule;
+class AudioEventObserver;
+class AudioTransport;
+
+// ----------------------------------------------------------------------------
+//  AudioEventObserver
+// ----------------------------------------------------------------------------
+
+class AudioEventObserver: public AudioDeviceObserver
+{
+public:
+    virtual void OnErrorIsReported(const ErrorCode error);
+    virtual void OnWarningIsReported(const WarningCode warning);
+    AudioEventObserver(AudioDeviceModule* audioDevice);
+    ~AudioEventObserver();
+public:
+    ErrorCode _error;
+    WarningCode _warning;
+private:
+    AudioDeviceModule* _audioDevice;
+};
+
+// ----------------------------------------------------------------------------
+//  AudioTransport
+// ----------------------------------------------------------------------------
+
+class AudioTransportImpl: public AudioTransport
+{
+public:
+    virtual WebRtc_Word32
+        RecordedDataIsAvailable(const WebRtc_Word8* audioSamples,
+                                const WebRtc_UWord32 nSamples,
+                                const WebRtc_UWord8 nBytesPerSample,
+                                const WebRtc_UWord8 nChannels,
+                                const WebRtc_UWord32 samplesPerSec,
+                                const WebRtc_UWord32 totalDelayMS,
+                                const WebRtc_Word32 clockDrift,
+                                const WebRtc_UWord32 currentMicLevel,
+                                WebRtc_UWord32& newMicLevel);
+
+    virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+                                           const WebRtc_UWord8 nBytesPerSample,
+                                           const WebRtc_UWord8 nChannels,
+                                           const WebRtc_UWord32 samplesPerSec,
+                                           WebRtc_Word8* audioSamples,
+                                           WebRtc_UWord32& nSamplesOut);
+
+    AudioTransportImpl(AudioDeviceModule* audioDevice);
+    ~AudioTransportImpl();
+
+public:
+    WebRtc_Word32 SetFilePlayout(bool enable, const WebRtc_Word8* fileName =
+        NULL);
+    void SetFullDuplex(bool enable);
+    void SetSpeakerVolume(bool enable)
+    {
+        _speakerVolume = enable;
+    }
+    ;
+    void SetSpeakerMute(bool enable)
+    {
+        _speakerMute = enable;
+    }
+    ;
+    void SetMicrophoneMute(bool enable)
+    {
+        _microphoneMute = enable;
+    }
+    ;
+    void SetMicrophoneVolume(bool enable)
+    {
+        _microphoneVolume = enable;
+    }
+    ;
+    void SetMicrophoneBoost(bool enable)
+    {
+        _microphoneBoost = enable;
+    }
+    ;
+    void SetLoopbackMeasurements(bool enable)
+    {
+        _loopBackMeasurements = enable;
+    }
+    ;
+    void SetMicrophoneAGC(bool enable)
+    {
+        _microphoneAGC = enable;
+    }
+    ;
+
+private:
+    AudioDeviceModule* _audioDevice;
+
+    bool _playFromFile;
+    bool _fullDuplex;
+    bool _speakerVolume;
+    bool _speakerMute;
+    bool _microphoneVolume;
+    bool _microphoneMute;
+    bool _microphoneBoost;
+    bool _microphoneAGC;
+    bool _loopBackMeasurements;
+
+    FileWrapper& _playFile;
+
+    WebRtc_UWord32 _recCount;
+    WebRtc_UWord32 _playCount;
+
+    ListWrapper _audioList;
+
+    Resampler _resampler;
+};
+
+// ----------------------------------------------------------------------------
+//  FuncTestManager
+// ----------------------------------------------------------------------------
+
+class FuncTestManager
+{
+public:
+    FuncTestManager();
+    ~FuncTestManager();
+    WebRtc_Word32 Init();
+    WebRtc_Word32 Close();
+    WebRtc_Word32 DoTest(const TestType testType);
+private:
+    WebRtc_Word32 TestAudioLayerSelection();
+    WebRtc_Word32 TestDeviceEnumeration();
+    WebRtc_Word32 TestDeviceSelection();
+    WebRtc_Word32 TestAudioTransport();
+    WebRtc_Word32 TestSpeakerVolume();
+    WebRtc_Word32 TestMicrophoneVolume();
+    WebRtc_Word32 TestSpeakerMute();
+    WebRtc_Word32 TestMicrophoneMute();
+    WebRtc_Word32 TestMicrophoneBoost();
+    WebRtc_Word32 TestLoopback();
+    WebRtc_Word32 TestDeviceRemoval();
+    WebRtc_Word32 TestExtra();
+    WebRtc_Word32 TestMicrophoneAGC();
+    WebRtc_Word32 SelectPlayoutDevice();
+    WebRtc_Word32 SelectRecordingDevice();
+    WebRtc_Word32 TestAdvancedMBAPI();
+private:
+    // Paths to where the resource files to be used for this test are located.
+    std::string _resourcePath;
+    std::string _playoutFile48;
+    std::string _playoutFile44;
+    std::string _playoutFile16;
+    std::string _playoutFile8;
+
+    ProcessThread* _processThread;
+    AudioDeviceModule* _audioDevice;
+    AudioEventObserver* _audioEventObserver;
+    AudioTransportImpl* _audioTransport;
+};
+
+} // namespace webrtc
+
+#endif  // #ifndef WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
diff --git a/trunk/src/modules/audio_processing/Android.mk b/trunk/src/modules/audio_processing/Android.mk
new file mode 100644
index 0000000..2ab5bb6
--- /dev/null
+++ b/trunk/src/modules/audio_processing/Android.mk
@@ -0,0 +1,143 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_apm
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    $(call all-proto-files-under, .) \
+    audio_buffer.cc \
+    audio_processing_impl.cc \
+    echo_cancellation_impl.cc \
+    echo_control_mobile_impl.cc \
+    gain_control_impl.cc \
+    high_pass_filter_impl.cc \
+    level_estimator_impl.cc \
+    noise_suppression_impl.cc \
+    splitting_filter.cc \
+    processing_component.cc \
+    voice_detection_impl.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_NS_FIXED'
+#   floating point
+#   -DWEBRTC_NS_FLOAT'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/aec/include \
+    $(LOCAL_PATH)/aecm/include \
+    $(LOCAL_PATH)/agc/include \
+    $(LOCAL_PATH)/ns/include \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../common_audio/vad/include \
+    $(LOCAL_PATH)/../../system_wrappers/interface \
+    external/protobuf/src
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+# apm process test app
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= \
+    $(call all-proto-files-under, .) \
+    test/process_test.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../../system_wrappers/interface \
+    external/gtest/include
+
+LOCAL_STATIC_LIBRARIES := \
+    libgtest \
+    libprotobuf-cpp-2.3.0-lite
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils \
+    libstlport \
+    libwebrtc_audio_preprocessing
+
+LOCAL_MODULE:= webrtc_apm_process_test
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include external/stlport/libstlport.mk
+include $(BUILD_NATIVE_TEST)
+endif
+
+# apm unit test app
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= \
+    $(call all-proto-files-under, test) \
+    test/unit_test.cc \
+    ../../../test/testsupport/fileutils.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_APM_UNIT_TEST_FIXED_PROFILE'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../../../test \
+    $(LOCAL_PATH)/../../system_wrappers/interface \
+    $(LOCAL_PATH)/../../common_audio/signal_processing/include \
+    external/gtest/include \
+    external/protobuf/src
+
+LOCAL_STATIC_LIBRARIES := \
+    libgtest \
+    libprotobuf-cpp-2.3.0-lite
+
+LOCAL_SHARED_LIBRARIES := \
+    libstlport \
+    libwebrtc_audio_preprocessing
+
+LOCAL_MODULE:= webrtc_apm_unit_test
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include external/stlport/libstlport.mk
+include $(BUILD_NATIVE_TEST)
+endif
diff --git a/trunk/src/modules/audio_processing/OWNERS b/trunk/src/modules/audio_processing/OWNERS
new file mode 100644
index 0000000..5a25634
--- /dev/null
+++ b/trunk/src/modules/audio_processing/OWNERS
@@ -0,0 +1,2 @@
+andrew@webrtc.org
+bjornv@webrtc.org
diff --git a/trunk/src/modules/audio_processing/aec/Android.mk b/trunk/src/modules/audio_processing/aec/Android.mk
new file mode 100644
index 0000000..26679d8
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/Android.mk
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_aec
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    echo_cancellation.c \
+    aec_resampler.c \
+    aec_core.c \
+    aec_rdft.c \
+
+ifeq ($(TARGET_ARCH),x86)
+LOCAL_SRC_FILES += \
+    aec_core_sse2.c \
+    aec_rdft_sse2.c
+endif
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../utility \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_processing/aec/aec.gypi b/trunk/src/modules/audio_processing/aec/aec.gypi
new file mode 100644
index 0000000..2e34c5b
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec.gypi
@@ -0,0 +1,74 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'aec',
+      'type': '<(library)',
+      'variables': {
+        # Outputs some low-level debug files.
+        'aec_debug_dump%': 0,
+      },
+      'dependencies': [
+        'apm_util',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/echo_cancellation.h',
+        'echo_cancellation.c',
+        'aec_core.h',
+        'aec_core.c',
+        'aec_rdft.h',
+        'aec_rdft.c',
+        'aec_resampler.h',
+        'aec_resampler.c',
+      ],
+      'conditions': [
+        ['target_arch=="ia32" or target_arch=="x64"', {
+          'dependencies': [ 'aec_sse2', ],
+        }],
+        ['aec_debug_dump==1', {
+          'defines': [ 'WEBRTC_AEC_DEBUG_DUMP', ],
+        }],
+      ],
+    },
+  ],
+  'conditions': [
+    ['target_arch=="ia32" or target_arch=="x64"', {
+      'targets': [
+        {
+          'target_name': 'aec_sse2',
+          'type': '<(library)',
+          'sources': [
+            'aec_core_sse2.c',
+            'aec_rdft_sse2.c',
+          ],
+          'conditions': [
+            ['os_posix==1 and OS!="mac"', {
+              'cflags': [ '-msse2', ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_CFLAGS': [ '-msse2', ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/aec/aec_core.c b/trunk/src/modules/audio_processing/aec/aec_core.c
new file mode 100644
index 0000000..1637e6f
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_core.c
@@ -0,0 +1,1527 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, which is presented with time-aligned signals.
+ */
+
+#include "aec_core.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stddef.h>  // size_t
+#include <stdlib.h>
+#include <string.h>
+
+#include "aec_rdft.h"
+#include "common_audio/signal_processing/include/signal_processing_library.h"
+#include "delay_estimator_wrapper.h"
+#include "ring_buffer.h"
+#include "system_wrappers/interface/cpu_features_wrapper.h"
+#include "typedefs.h"
+
+// Buffer size (samples)
+static const size_t kBufSizePartitions = 250;  // 1 second of audio in 16 kHz.
+
+// Noise suppression
+static const int converged = 250;
+
+// Metrics
+static const int subCountLen = 4;
+static const int countLen = 50;
+
+// Quantities to control H band scaling for SWB input
+static const int flagHbandCn = 1; // flag for adding comfort noise in H band
+static const float cnScaleHband = (float)0.4; // scale for comfort noise in H band
+// Initial bin for averaging nlp gain in low band
+static const int freqAvgIc = PART_LEN / 2;
+
+// Matlab code to produce table:
+// win = sqrt(hanning(63)); win = [0 ; win(1:32)];
+// fprintf(1, '\t%.14f, %.14f, %.14f,\n', win);
+static const float sqrtHanning[65] = {
+    0.00000000000000f, 0.02454122852291f, 0.04906767432742f,
+    0.07356456359967f, 0.09801714032956f, 0.12241067519922f,
+    0.14673047445536f, 0.17096188876030f, 0.19509032201613f,
+    0.21910124015687f, 0.24298017990326f, 0.26671275747490f,
+    0.29028467725446f, 0.31368174039889f, 0.33688985339222f,
+    0.35989503653499f, 0.38268343236509f, 0.40524131400499f,
+    0.42755509343028f, 0.44961132965461f, 0.47139673682600f,
+    0.49289819222978f, 0.51410274419322f, 0.53499761988710f,
+    0.55557023301960f, 0.57580819141785f, 0.59569930449243f,
+    0.61523159058063f, 0.63439328416365f, 0.65317284295378f,
+    0.67155895484702f, 0.68954054473707f, 0.70710678118655f,
+    0.72424708295147f, 0.74095112535496f, 0.75720884650648f,
+    0.77301045336274f, 0.78834642762661f, 0.80320753148064f,
+    0.81758481315158f, 0.83146961230255f, 0.84485356524971f,
+    0.85772861000027f, 0.87008699110871f, 0.88192126434835f,
+    0.89322430119552f, 0.90398929312344f, 0.91420975570353f,
+    0.92387953251129f, 0.93299279883474f, 0.94154406518302f,
+    0.94952818059304f, 0.95694033573221f, 0.96377606579544f,
+    0.97003125319454f, 0.97570213003853f, 0.98078528040323f,
+    0.98527764238894f, 0.98917650996478f, 0.99247953459871f,
+    0.99518472667220f, 0.99729045667869f, 0.99879545620517f,
+    0.99969881869620f, 1.00000000000000f
+};
+
+// Matlab code to produce table:
+// weightCurve = [0 ; 0.3 * sqrt(linspace(0,1,64))' + 0.1];
+// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', weightCurve);
+const float WebRtcAec_weightCurve[65] = {
+    0.0000f, 0.1000f, 0.1378f, 0.1535f, 0.1655f, 0.1756f,
+    0.1845f, 0.1926f, 0.2000f, 0.2069f, 0.2134f, 0.2195f,
+    0.2254f, 0.2309f, 0.2363f, 0.2414f, 0.2464f, 0.2512f,
+    0.2558f, 0.2604f, 0.2648f, 0.2690f, 0.2732f, 0.2773f,
+    0.2813f, 0.2852f, 0.2890f, 0.2927f, 0.2964f, 0.3000f,
+    0.3035f, 0.3070f, 0.3104f, 0.3138f, 0.3171f, 0.3204f,
+    0.3236f, 0.3268f, 0.3299f, 0.3330f, 0.3360f, 0.3390f,
+    0.3420f, 0.3449f, 0.3478f, 0.3507f, 0.3535f, 0.3563f,
+    0.3591f, 0.3619f, 0.3646f, 0.3673f, 0.3699f, 0.3726f,
+    0.3752f, 0.3777f, 0.3803f, 0.3828f, 0.3854f, 0.3878f,
+    0.3903f, 0.3928f, 0.3952f, 0.3976f, 0.4000f
+};
+
+// Matlab code to produce table:
+// overDriveCurve = [sqrt(linspace(0,1,65))' + 1];
+// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', overDriveCurve);
+const float WebRtcAec_overDriveCurve[65] = {
+    1.0000f, 1.1250f, 1.1768f, 1.2165f, 1.2500f, 1.2795f,
+    1.3062f, 1.3307f, 1.3536f, 1.3750f, 1.3953f, 1.4146f,
+    1.4330f, 1.4507f, 1.4677f, 1.4841f, 1.5000f, 1.5154f,
+    1.5303f, 1.5449f, 1.5590f, 1.5728f, 1.5863f, 1.5995f,
+    1.6124f, 1.6250f, 1.6374f, 1.6495f, 1.6614f, 1.6731f,
+    1.6847f, 1.6960f, 1.7071f, 1.7181f, 1.7289f, 1.7395f,
+    1.7500f, 1.7603f, 1.7706f, 1.7806f, 1.7906f, 1.8004f,
+    1.8101f, 1.8197f, 1.8292f, 1.8385f, 1.8478f, 1.8570f,
+    1.8660f, 1.8750f, 1.8839f, 1.8927f, 1.9014f, 1.9100f,
+    1.9186f, 1.9270f, 1.9354f, 1.9437f, 1.9520f, 1.9601f,
+    1.9682f, 1.9763f, 1.9843f, 1.9922f, 2.0000f
+};
+
+// "Private" function prototypes.
+static void ProcessBlock(aec_t* aec);
+
+static void NonLinearProcessing(aec_t *aec, short *output, short *outputH);
+
+static void GetHighbandGain(const float *lambda, float *nlpGainHband);
+
+// Comfort_noise also computes noise for H band returned in comfortNoiseHband
+static void ComfortNoise(aec_t *aec, float efw[2][PART_LEN1],
+                                  complex_t *comfortNoiseHband,
+                                  const float *noisePow, const float *lambda);
+
+static void WebRtcAec_InitLevel(power_level_t *level);
+static void WebRtcAec_InitStats(stats_t *stats);
+static void UpdateLevel(power_level_t* level, float in[2][PART_LEN1]);
+static void UpdateMetrics(aec_t *aec);
+// Convert from time domain to frequency domain. Note that |time_data| are
+// overwritten.
+static void TimeToFrequency(float time_data[PART_LEN2],
+                            float freq_data[2][PART_LEN1],
+                            int window);
+
+__inline static float MulRe(float aRe, float aIm, float bRe, float bIm)
+{
+    return aRe * bRe - aIm * bIm;
+}
+
+__inline static float MulIm(float aRe, float aIm, float bRe, float bIm)
+{
+    return aRe * bIm + aIm * bRe;
+}
+
+static int CmpFloat(const void *a, const void *b)
+{
+    const float *da = (const float *)a;
+    const float *db = (const float *)b;
+
+    return (*da > *db) - (*da < *db);
+}
+
+int WebRtcAec_CreateAec(aec_t **aecInst)
+{
+    aec_t *aec = malloc(sizeof(aec_t));
+    *aecInst = aec;
+    if (aec == NULL) {
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aec->nearFrBuf,
+                            FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aec->outFrBuf,
+                            FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aec->nearFrBufH,
+                            FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aec->outFrBufH,
+                            FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+
+    // Create far-end buffers.
+    if (WebRtc_CreateBuffer(&aec->far_buf, kBufSizePartitions,
+                            sizeof(float) * 2 * PART_LEN1) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+    if (WebRtc_CreateBuffer(&aec->far_buf_windowed, kBufSizePartitions,
+                            sizeof(float) * 2 * PART_LEN1) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    if (WebRtc_CreateBuffer(&aec->far_time_buf, kBufSizePartitions,
+                            sizeof(int16_t) * PART_LEN) == -1) {
+        WebRtcAec_FreeAec(aec);
+        aec = NULL;
+        return -1;
+    }
+#endif
+    if (WebRtc_CreateDelayEstimator(&aec->delay_estimator,
+                                    PART_LEN1,
+                                    kMaxDelayBlocks,
+                                    kLookaheadBlocks) == -1) {
+      WebRtcAec_FreeAec(aec);
+      aec = NULL;
+      return -1;
+    }
+
+    return 0;
+}
+
+int WebRtcAec_FreeAec(aec_t *aec)
+{
+    if (aec == NULL) {
+        return -1;
+    }
+
+    WebRtc_FreeBuffer(aec->nearFrBuf);
+    WebRtc_FreeBuffer(aec->outFrBuf);
+
+    WebRtc_FreeBuffer(aec->nearFrBufH);
+    WebRtc_FreeBuffer(aec->outFrBufH);
+
+    WebRtc_FreeBuffer(aec->far_buf);
+    WebRtc_FreeBuffer(aec->far_buf_windowed);
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    WebRtc_FreeBuffer(aec->far_time_buf);
+#endif
+    WebRtc_FreeDelayEstimator(aec->delay_estimator);
+
+    free(aec);
+    return 0;
+}
+
+static void FilterFar(aec_t *aec, float yf[2][PART_LEN1])
+{
+  int i;
+  for (i = 0; i < NR_PART; i++) {
+    int j;
+    int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+    int pos = i * PART_LEN1;
+    // Check for wrap
+    if (i + aec->xfBufBlockPos >= NR_PART) {
+      xPos -= NR_PART*(PART_LEN1);
+    }
+
+    for (j = 0; j < PART_LEN1; j++) {
+      yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j],
+                        aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]);
+      yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j],
+                        aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]);
+    }
+  }
+}
+
+static void ScaleErrorSignal(aec_t *aec, float ef[2][PART_LEN1])
+{
+  int i;
+  float absEf;
+  for (i = 0; i < (PART_LEN1); i++) {
+    ef[0][i] /= (aec->xPow[i] + 1e-10f);
+    ef[1][i] /= (aec->xPow[i] + 1e-10f);
+    absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
+
+    if (absEf > aec->errThresh) {
+      absEf = aec->errThresh / (absEf + 1e-10f);
+      ef[0][i] *= absEf;
+      ef[1][i] *= absEf;
+    }
+
+    // Stepsize factor
+    ef[0][i] *= aec->mu;
+    ef[1][i] *= aec->mu;
+  }
+}
+
+// Time-unconstrined filter adaptation.
+// TODO(andrew): consider for a low-complexity mode.
+//static void FilterAdaptationUnconstrained(aec_t *aec, float *fft,
+//                                          float ef[2][PART_LEN1]) {
+//  int i, j;
+//  for (i = 0; i < NR_PART; i++) {
+//    int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+//    int pos;
+//    // Check for wrap
+//    if (i + aec->xfBufBlockPos >= NR_PART) {
+//      xPos -= NR_PART * PART_LEN1;
+//    }
+//
+//    pos = i * PART_LEN1;
+//
+//    for (j = 0; j < PART_LEN1; j++) {
+//      aec->wfBuf[pos + j][0] += MulRe(aec->xfBuf[xPos + j][0],
+//                                      -aec->xfBuf[xPos + j][1],
+//                                      ef[j][0], ef[j][1]);
+//      aec->wfBuf[pos + j][1] += MulIm(aec->xfBuf[xPos + j][0],
+//                                      -aec->xfBuf[xPos + j][1],
+//                                      ef[j][0], ef[j][1]);
+//    }
+//  }
+//}
+
+static void FilterAdaptation(aec_t *aec, float *fft, float ef[2][PART_LEN1]) {
+  int i, j;
+  for (i = 0; i < NR_PART; i++) {
+    int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+    int pos;
+    // Check for wrap
+    if (i + aec->xfBufBlockPos >= NR_PART) {
+      xPos -= NR_PART * PART_LEN1;
+    }
+
+    pos = i * PART_LEN1;
+
+    for (j = 0; j < PART_LEN; j++) {
+
+      fft[2 * j] = MulRe(aec->xfBuf[0][xPos + j],
+                         -aec->xfBuf[1][xPos + j],
+                         ef[0][j], ef[1][j]);
+      fft[2 * j + 1] = MulIm(aec->xfBuf[0][xPos + j],
+                             -aec->xfBuf[1][xPos + j],
+                             ef[0][j], ef[1][j]);
+    }
+    fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
+                   -aec->xfBuf[1][xPos + PART_LEN],
+                   ef[0][PART_LEN], ef[1][PART_LEN]);
+
+    aec_rdft_inverse_128(fft);
+    memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
+
+    // fft scaling
+    {
+      float scale = 2.0f / PART_LEN2;
+      for (j = 0; j < PART_LEN; j++) {
+        fft[j] *= scale;
+      }
+    }
+    aec_rdft_forward_128(fft);
+
+    aec->wfBuf[0][pos] += fft[0];
+    aec->wfBuf[0][pos + PART_LEN] += fft[1];
+
+    for (j = 1; j < PART_LEN; j++) {
+      aec->wfBuf[0][pos + j] += fft[2 * j];
+      aec->wfBuf[1][pos + j] += fft[2 * j + 1];
+    }
+  }
+}
+
+static void OverdriveAndSuppress(aec_t *aec, float hNl[PART_LEN1],
+                                 const float hNlFb,
+                                 float efw[2][PART_LEN1]) {
+  int i;
+  for (i = 0; i < PART_LEN1; i++) {
+    // Weight subbands
+    if (hNl[i] > hNlFb) {
+      hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
+          (1 - WebRtcAec_weightCurve[i]) * hNl[i];
+    }
+    hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+    // Suppress error signal
+    efw[0][i] *= hNl[i];
+    efw[1][i] *= hNl[i];
+
+    // Ooura fft returns incorrect sign on imaginary component. It matters here
+    // because we are making an additive change with comfort noise.
+    efw[1][i] *= -1;
+  }
+}
+
+WebRtcAec_FilterFar_t WebRtcAec_FilterFar;
+WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal;
+WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation;
+WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+
+int WebRtcAec_InitAec(aec_t *aec, int sampFreq)
+{
+    int i;
+
+    aec->sampFreq = sampFreq;
+
+    if (sampFreq == 8000) {
+        aec->mu = 0.6f;
+        aec->errThresh = 2e-6f;
+    }
+    else {
+        aec->mu = 0.5f;
+        aec->errThresh = 1.5e-6f;
+    }
+
+    if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) {
+        return -1;
+    }
+
+    if (WebRtc_InitBuffer(aec->outFrBuf) == -1) {
+        return -1;
+    }
+
+    if (WebRtc_InitBuffer(aec->nearFrBufH) == -1) {
+        return -1;
+    }
+
+    if (WebRtc_InitBuffer(aec->outFrBufH) == -1) {
+        return -1;
+    }
+
+    // Initialize far-end buffers.
+    if (WebRtc_InitBuffer(aec->far_buf) == -1) {
+        return -1;
+    }
+    if (WebRtc_InitBuffer(aec->far_buf_windowed) == -1) {
+        return -1;
+    }
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    if (WebRtc_InitBuffer(aec->far_time_buf) == -1) {
+        return -1;
+    }
+#endif
+    aec->system_delay = 0;
+
+    if (WebRtc_InitDelayEstimator(aec->delay_estimator) != 0) {
+      return -1;
+    }
+    aec->delay_logging_enabled = 0;
+    memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram));
+
+    // Default target suppression level
+    aec->targetSupp = -11.5;
+    aec->minOverDrive = 2.0;
+
+    // Sampling frequency multiplier
+    // SWB is processed as 160 frame size
+    if (aec->sampFreq == 32000) {
+      aec->mult = (short)aec->sampFreq / 16000;
+    }
+    else {
+        aec->mult = (short)aec->sampFreq / 8000;
+    }
+
+    aec->farBufWritePos = 0;
+    aec->farBufReadPos = 0;
+
+    aec->inSamples = 0;
+    aec->outSamples = 0;
+    aec->knownDelay = 0;
+
+    // Initialize buffers
+    memset(aec->dBuf, 0, sizeof(aec->dBuf));
+    memset(aec->eBuf, 0, sizeof(aec->eBuf));
+    // For H band
+    memset(aec->dBufH, 0, sizeof(aec->dBufH));
+
+    memset(aec->xPow, 0, sizeof(aec->xPow));
+    memset(aec->dPow, 0, sizeof(aec->dPow));
+    memset(aec->dInitMinPow, 0, sizeof(aec->dInitMinPow));
+    aec->noisePow = aec->dInitMinPow;
+    aec->noiseEstCtr = 0;
+
+    // Initial comfort noise power
+    for (i = 0; i < PART_LEN1; i++) {
+        aec->dMinPow[i] = 1.0e6f;
+    }
+
+    // Holds the last block written to
+    aec->xfBufBlockPos = 0;
+    // TODO: Investigate need for these initializations. Deleting them doesn't
+    //       change the output at all and yields 0.4% overall speedup.
+    memset(aec->xfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
+    memset(aec->wfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
+    memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1);
+    memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1);
+    memset(aec->xfwBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
+    memset(aec->se, 0, sizeof(float) * PART_LEN1);
+
+    // To prevent numerical instability in the first block.
+    for (i = 0; i < PART_LEN1; i++) {
+        aec->sd[i] = 1;
+    }
+    for (i = 0; i < PART_LEN1; i++) {
+        aec->sx[i] = 1;
+    }
+
+    memset(aec->hNs, 0, sizeof(aec->hNs));
+    memset(aec->outBuf, 0, sizeof(float) * PART_LEN);
+
+    aec->hNlFbMin = 1;
+    aec->hNlFbLocalMin = 1;
+    aec->hNlXdAvgMin = 1;
+    aec->hNlNewMin = 0;
+    aec->hNlMinCtr = 0;
+    aec->overDrive = 2;
+    aec->overDriveSm = 2;
+    aec->delayIdx = 0;
+    aec->stNearState = 0;
+    aec->echoState = 0;
+    aec->divergeState = 0;
+
+    aec->seed = 777;
+    aec->delayEstCtr = 0;
+
+    // Metrics disabled by default
+    aec->metricsMode = 0;
+    WebRtcAec_InitMetrics(aec);
+
+    // Assembly optimization
+    WebRtcAec_FilterFar = FilterFar;
+    WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
+    WebRtcAec_FilterAdaptation = FilterAdaptation;
+    WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+    if (WebRtc_GetCPUInfo(kSSE2)) {
+      WebRtcAec_InitAec_SSE2();
+    }
+#endif
+
+    aec_rdft_init();
+
+    return 0;
+}
+
+void WebRtcAec_InitMetrics(aec_t *aec)
+{
+    aec->stateCounter = 0;
+    WebRtcAec_InitLevel(&aec->farlevel);
+    WebRtcAec_InitLevel(&aec->nearlevel);
+    WebRtcAec_InitLevel(&aec->linoutlevel);
+    WebRtcAec_InitLevel(&aec->nlpoutlevel);
+
+    WebRtcAec_InitStats(&aec->erl);
+    WebRtcAec_InitStats(&aec->erle);
+    WebRtcAec_InitStats(&aec->aNlp);
+    WebRtcAec_InitStats(&aec->rerl);
+}
+
+
+void WebRtcAec_BufferFarendPartition(aec_t *aec, const float* farend) {
+  float fft[PART_LEN2];
+  float xf[2][PART_LEN1];
+
+  // Check if the buffer is full, and in that case flush the oldest data.
+  if (WebRtc_available_write(aec->far_buf) < 1) {
+    WebRtc_MoveReadPtr(aec->far_buf, 1);
+    WebRtc_MoveReadPtr(aec->far_buf_windowed, 1);
+    aec->system_delay -= PART_LEN;
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    WebRtc_MoveReadPtr(aec->far_time_buf, 1);
+#endif
+  }
+  // Convert far-end partition to the frequency domain without windowing.
+  memcpy(fft, farend, sizeof(float) * PART_LEN2);
+  TimeToFrequency(fft, xf, 0);
+  WebRtc_WriteBuffer(aec->far_buf, &xf[0][0], 1);
+
+  // Convert far-end partition to the frequency domain with windowing.
+  memcpy(fft, farend, sizeof(float) * PART_LEN2);
+  TimeToFrequency(fft, xf, 1);
+  WebRtc_WriteBuffer(aec->far_buf_windowed, &xf[0][0], 1);
+}
+
+void WebRtcAec_ProcessFrame(aec_t *aec,
+                            const short *nearend,
+                            const short *nearendH,
+                            int knownDelay)
+{
+    // For each frame the process is as follows:
+    // 1) If the system_delay indicates on being too small for processing a
+    //    frame we stuff the buffer with enough data for 10 ms.
+    // 2) Adjust the buffer to the system delay, by moving the read pointer.
+    // 3) If we can't move read pointer due to buffer size limitations we
+    //    flush/stuff the buffer.
+    // 4) Process as many partitions as possible.
+    // 5) Update the |system_delay| with respect to a full frame of FRAME_LEN
+    //    samples. Even though we will have data left to process (we work with
+    //    partitions) we consider updating a whole frame, since that's the
+    //    amount of data we input and output in audio_processing.
+
+    // TODO(bjornv): Investigate how we should round the delay difference; right
+    // now we know that incoming |knownDelay| is underestimated when it's less
+    // than |aec->knownDelay|. We therefore, round (-32) in that direction. In
+    // the other direction, we don't have this situation, but might flush one
+    // partition too little. This can cause non-causality, which should be
+    // investigated. Maybe, allow for a non-symmetric rounding, like -16.
+    int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN;
+    int moved_elements = 0;
+
+    // TODO(bjornv): Change the near-end buffer handling to be the same as for
+    // far-end, that is, with a near_pre_buf.
+    // Buffer the near-end frame.
+    WebRtc_WriteBuffer(aec->nearFrBuf, nearend, FRAME_LEN);
+    // For H band
+    if (aec->sampFreq == 32000) {
+        WebRtc_WriteBuffer(aec->nearFrBufH, nearendH, FRAME_LEN);
+    }
+
+    // 1) At most we process |aec->mult|+1 partitions in 10 ms. Make sure we
+    // have enough far-end data for that by stuffing the buffer if the
+    // |system_delay| indicates others.
+    if (aec->system_delay < FRAME_LEN) {
+      // We don't have enough data so we rewind 10 ms.
+      WebRtc_MoveReadPtr(aec->far_buf_windowed, -(aec->mult + 1));
+      aec->system_delay -= WebRtc_MoveReadPtr(aec->far_buf, -(aec->mult + 1)) *
+          PART_LEN;
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+      WebRtc_MoveReadPtr(aec->far_time_buf, -(aec->mult + 1));
+#endif
+    }
+
+    // 2) Compensate for a possible change in the system delay.
+
+    WebRtc_MoveReadPtr(aec->far_buf_windowed, move_elements);
+    moved_elements = WebRtc_MoveReadPtr(aec->far_buf, move_elements);
+    aec->knownDelay -= moved_elements * PART_LEN;
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+#endif
+
+    // 4) Process as many blocks as possible.
+    while (WebRtc_available_read(aec->nearFrBuf) >= PART_LEN) {
+        ProcessBlock(aec);
+    }
+
+    // 5) Update system delay with respect to the entire frame.
+    aec->system_delay -= FRAME_LEN;
+}
+
+static void ProcessBlock(aec_t* aec) {
+    int i;
+    float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN];
+    float scale;
+
+    float fft[PART_LEN2];
+    float xf[2][PART_LEN1], yf[2][PART_LEN1], ef[2][PART_LEN1];
+    float df[2][PART_LEN1];
+    float far_spectrum = 0.0f;
+    float near_spectrum = 0.0f;
+    float abs_far_spectrum[PART_LEN1];
+    float abs_near_spectrum[PART_LEN1];
+
+    const float gPow[2] = {0.9f, 0.1f};
+
+    // Noise estimate constants.
+    const int noiseInitBlocks = 500 * aec->mult;
+    const float step = 0.1f;
+    const float ramp = 1.0002f;
+    const float gInitNoise[2] = {0.999f, 0.001f};
+
+    int16_t nearend[PART_LEN];
+    int16_t* nearend_ptr = NULL;
+    int16_t output[PART_LEN];
+    int16_t outputH[PART_LEN];
+
+    float* xf_ptr = NULL;
+
+    memset(dH, 0, sizeof(dH));
+    if (aec->sampFreq == 32000) {
+      // Get the upper band first so we can reuse |nearend|.
+      WebRtc_ReadBuffer(aec->nearFrBufH,
+                        (void**) &nearend_ptr,
+                        nearend,
+                        PART_LEN);
+      for (i = 0; i < PART_LEN; i++) {
+          dH[i] = (float) (nearend_ptr[i]);
+      }
+      memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN);
+    }
+    WebRtc_ReadBuffer(aec->nearFrBuf, (void**) &nearend_ptr, nearend, PART_LEN);
+
+    // ---------- Ooura fft ----------
+    // Concatenate old and new nearend blocks.
+    for (i = 0; i < PART_LEN; i++) {
+        d[i] = (float) (nearend_ptr[i]);
+    }
+    memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN);
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    {
+        int16_t farend[PART_LEN];
+        int16_t* farend_ptr = NULL;
+        WebRtc_ReadBuffer(aec->far_time_buf, (void**) &farend_ptr, farend, 1);
+        fwrite(farend_ptr, sizeof(int16_t), PART_LEN, aec->farFile);
+        fwrite(nearend_ptr, sizeof(int16_t), PART_LEN, aec->nearFile);
+    }
+#endif
+
+    // We should always have at least one element stored in |far_buf|.
+    assert(WebRtc_available_read(aec->far_buf) > 0);
+    WebRtc_ReadBuffer(aec->far_buf, (void**) &xf_ptr, &xf[0][0], 1);
+
+    // Near fft
+    memcpy(fft, aec->dBuf, sizeof(float) * PART_LEN2);
+    TimeToFrequency(fft, df, 0);
+
+    // Power smoothing
+    for (i = 0; i < PART_LEN1; i++) {
+      far_spectrum = (xf_ptr[i] * xf_ptr[i]) +
+          (xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]);
+      aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * NR_PART * far_spectrum;
+      // Calculate absolute spectra
+      abs_far_spectrum[i] = sqrtf(far_spectrum);
+
+      near_spectrum = df[0][i] * df[0][i] + df[1][i] * df[1][i];
+      aec->dPow[i] = gPow[0] * aec->dPow[i] + gPow[1] * near_spectrum;
+      // Calculate absolute spectra
+      abs_near_spectrum[i] = sqrtf(near_spectrum);
+    }
+
+    // Estimate noise power. Wait until dPow is more stable.
+    if (aec->noiseEstCtr > 50) {
+        for (i = 0; i < PART_LEN1; i++) {
+            if (aec->dPow[i] < aec->dMinPow[i]) {
+                aec->dMinPow[i] = (aec->dPow[i] + step * (aec->dMinPow[i] -
+                    aec->dPow[i])) * ramp;
+            }
+            else {
+                aec->dMinPow[i] *= ramp;
+            }
+        }
+    }
+
+    // Smooth increasing noise power from zero at the start,
+    // to avoid a sudden burst of comfort noise.
+    if (aec->noiseEstCtr < noiseInitBlocks) {
+        aec->noiseEstCtr++;
+        for (i = 0; i < PART_LEN1; i++) {
+            if (aec->dMinPow[i] > aec->dInitMinPow[i]) {
+                aec->dInitMinPow[i] = gInitNoise[0] * aec->dInitMinPow[i] +
+                    gInitNoise[1] * aec->dMinPow[i];
+            }
+            else {
+                aec->dInitMinPow[i] = aec->dMinPow[i];
+            }
+        }
+        aec->noisePow = aec->dInitMinPow;
+    }
+    else {
+        aec->noisePow = aec->dMinPow;
+    }
+
+    // Block wise delay estimation used for logging
+    if (aec->delay_logging_enabled) {
+      int delay_estimate = 0;
+      // Estimate the delay
+      delay_estimate = WebRtc_DelayEstimatorProcessFloat(aec->delay_estimator,
+                                                         abs_far_spectrum,
+                                                         abs_near_spectrum,
+                                                         PART_LEN1);
+      if (delay_estimate >= 0) {
+        // Update delay estimate buffer.
+        aec->delay_histogram[delay_estimate]++;
+      }
+    }
+
+    // Update the xfBuf block position.
+    aec->xfBufBlockPos--;
+    if (aec->xfBufBlockPos == -1) {
+        aec->xfBufBlockPos = NR_PART - 1;
+    }
+
+    // Buffer xf
+    memcpy(aec->xfBuf[0] + aec->xfBufBlockPos * PART_LEN1, xf_ptr,
+           sizeof(float) * PART_LEN1);
+    memcpy(aec->xfBuf[1] + aec->xfBufBlockPos * PART_LEN1, &xf_ptr[PART_LEN1],
+           sizeof(float) * PART_LEN1);
+
+    memset(yf[0], 0, sizeof(float) * (PART_LEN1 * 2));
+
+    // Filter far
+    WebRtcAec_FilterFar(aec, yf);
+
+    // Inverse fft to obtain echo estimate and error.
+    fft[0] = yf[0][0];
+    fft[1] = yf[0][PART_LEN];
+    for (i = 1; i < PART_LEN; i++) {
+        fft[2 * i] = yf[0][i];
+        fft[2 * i + 1] = yf[1][i];
+    }
+    aec_rdft_inverse_128(fft);
+
+    scale = 2.0f / PART_LEN2;
+    for (i = 0; i < PART_LEN; i++) {
+        y[i] = fft[PART_LEN + i] * scale; // fft scaling
+    }
+
+    for (i = 0; i < PART_LEN; i++) {
+        e[i] = d[i] - y[i];
+    }
+
+    // Error fft
+    memcpy(aec->eBuf + PART_LEN, e, sizeof(float) * PART_LEN);
+    memset(fft, 0, sizeof(float) * PART_LEN);
+    memcpy(fft + PART_LEN, e, sizeof(float) * PART_LEN);
+    // TODO(bjornv): Change to use TimeToFrequency().
+    aec_rdft_forward_128(fft);
+
+    ef[1][0] = 0;
+    ef[1][PART_LEN] = 0;
+    ef[0][0] = fft[0];
+    ef[0][PART_LEN] = fft[1];
+    for (i = 1; i < PART_LEN; i++) {
+        ef[0][i] = fft[2 * i];
+        ef[1][i] = fft[2 * i + 1];
+    }
+
+    if (aec->metricsMode == 1) {
+      // Note that the first PART_LEN samples in fft (before transformation) are
+      // zero. Hence, the scaling by two in UpdateLevel() should not be
+      // performed. That scaling is taken care of in UpdateMetrics() instead.
+      UpdateLevel(&aec->linoutlevel, ef);
+    }
+
+    // Scale error signal inversely with far power.
+    WebRtcAec_ScaleErrorSignal(aec, ef);
+    WebRtcAec_FilterAdaptation(aec, fft, ef);
+    NonLinearProcessing(aec, output, outputH);
+
+    if (aec->metricsMode == 1) {
+        // Update power levels and echo metrics
+        UpdateLevel(&aec->farlevel, (float (*)[PART_LEN1]) xf_ptr);
+        UpdateLevel(&aec->nearlevel, df);
+        UpdateMetrics(aec);
+    }
+
+    // Store the output block.
+    WebRtc_WriteBuffer(aec->outFrBuf, output, PART_LEN);
+    // For H band
+    if (aec->sampFreq == 32000) {
+        WebRtc_WriteBuffer(aec->outFrBufH, outputH, PART_LEN);
+    }
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    {
+        int16_t eInt16[PART_LEN];
+        for (i = 0; i < PART_LEN; i++) {
+            eInt16[i] = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, e[i],
+                WEBRTC_SPL_WORD16_MIN);
+        }
+
+        fwrite(eInt16, sizeof(int16_t), PART_LEN, aec->outLinearFile);
+        fwrite(output, sizeof(int16_t), PART_LEN, aec->outFile);
+    }
+#endif
+}
+
+static void NonLinearProcessing(aec_t *aec, short *output, short *outputH)
+{
+    float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1];
+    complex_t comfortNoiseHband[PART_LEN1];
+    float fft[PART_LEN2];
+    float scale, dtmp;
+    float nlpGainHband;
+    int i, j, pos;
+
+    // Coherence and non-linear filter
+    float cohde[PART_LEN1], cohxd[PART_LEN1];
+    float hNlDeAvg, hNlXdAvg;
+    float hNl[PART_LEN1];
+    float hNlPref[PREF_BAND_SIZE];
+    float hNlFb = 0, hNlFbLow = 0;
+    const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
+    const int prefBandSize = PREF_BAND_SIZE / aec->mult;
+    const int minPrefBand = 4 / aec->mult;
+
+    // Near and error power sums
+    float sdSum = 0, seSum = 0;
+
+    // Power estimate smoothing coefficients
+    const float gCoh[2][2] = {{0.9f, 0.1f}, {0.93f, 0.07f}};
+    const float *ptrGCoh = gCoh[aec->mult - 1];
+
+    // Filter energy
+    float wfEnMax = 0, wfEn = 0;
+    const int delayEstInterval = 10 * aec->mult;
+
+    float* xfw_ptr = NULL;
+
+    aec->delayEstCtr++;
+    if (aec->delayEstCtr == delayEstInterval) {
+        aec->delayEstCtr = 0;
+    }
+
+    // initialize comfort noise for H band
+    memset(comfortNoiseHband, 0, sizeof(comfortNoiseHband));
+    nlpGainHband = (float)0.0;
+    dtmp = (float)0.0;
+
+    // Measure energy in each filter partition to determine delay.
+    // TODO: Spread by computing one partition per block?
+    if (aec->delayEstCtr == 0) {
+        wfEnMax = 0;
+        aec->delayIdx = 0;
+        for (i = 0; i < NR_PART; i++) {
+            pos = i * PART_LEN1;
+            wfEn = 0;
+            for (j = 0; j < PART_LEN1; j++) {
+                wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
+                    aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+            }
+
+            if (wfEn > wfEnMax) {
+                wfEnMax = wfEn;
+                aec->delayIdx = i;
+            }
+        }
+    }
+
+    // We should always have at least one element stored in |far_buf|.
+    assert(WebRtc_available_read(aec->far_buf_windowed) > 0);
+    // NLP
+    WebRtc_ReadBuffer(aec->far_buf_windowed, (void**) &xfw_ptr, &xfw[0][0], 1);
+
+    // TODO(bjornv): Investigate if we can reuse |far_buf_windowed| instead of
+    // |xfwBuf|.
+    // Buffer far.
+    memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
+
+    // Use delayed far.
+    memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1, sizeof(xfw));
+
+    // Windowed near fft
+    for (i = 0; i < PART_LEN; i++) {
+        fft[i] = aec->dBuf[i] * sqrtHanning[i];
+        fft[PART_LEN + i] = aec->dBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i];
+    }
+    aec_rdft_forward_128(fft);
+
+    dfw[1][0] = 0;
+    dfw[1][PART_LEN] = 0;
+    dfw[0][0] = fft[0];
+    dfw[0][PART_LEN] = fft[1];
+    for (i = 1; i < PART_LEN; i++) {
+        dfw[0][i] = fft[2 * i];
+        dfw[1][i] = fft[2 * i + 1];
+    }
+
+    // Windowed error fft
+    for (i = 0; i < PART_LEN; i++) {
+        fft[i] = aec->eBuf[i] * sqrtHanning[i];
+        fft[PART_LEN + i] = aec->eBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i];
+    }
+    aec_rdft_forward_128(fft);
+    efw[1][0] = 0;
+    efw[1][PART_LEN] = 0;
+    efw[0][0] = fft[0];
+    efw[0][PART_LEN] = fft[1];
+    for (i = 1; i < PART_LEN; i++) {
+        efw[0][i] = fft[2 * i];
+        efw[1][i] = fft[2 * i + 1];
+    }
+
+    // Smoothed PSD
+    for (i = 0; i < PART_LEN1; i++) {
+        aec->sd[i] = ptrGCoh[0] * aec->sd[i] + ptrGCoh[1] *
+            (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+        aec->se[i] = ptrGCoh[0] * aec->se[i] + ptrGCoh[1] *
+            (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+        // We threshold here to protect against the ill-effects of a zero farend.
+        // The threshold is not arbitrarily chosen, but balances protection and
+        // adverse interaction with the algorithm's tuning.
+        // TODO: investigate further why this is so sensitive.
+        aec->sx[i] = ptrGCoh[0] * aec->sx[i] + ptrGCoh[1] *
+            WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i], 15);
+
+        aec->sde[i][0] = ptrGCoh[0] * aec->sde[i][0] + ptrGCoh[1] *
+            (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
+        aec->sde[i][1] = ptrGCoh[0] * aec->sde[i][1] + ptrGCoh[1] *
+            (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
+
+        aec->sxd[i][0] = ptrGCoh[0] * aec->sxd[i][0] + ptrGCoh[1] *
+            (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
+        aec->sxd[i][1] = ptrGCoh[0] * aec->sxd[i][1] + ptrGCoh[1] *
+            (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
+
+        sdSum += aec->sd[i];
+        seSum += aec->se[i];
+    }
+
+    // Divergent filter safeguard.
+    if (aec->divergeState == 0) {
+        if (seSum > sdSum) {
+            aec->divergeState = 1;
+        }
+    }
+    else {
+        if (seSum * 1.05f < sdSum) {
+            aec->divergeState = 0;
+        }
+    }
+
+    if (aec->divergeState == 1) {
+        memcpy(efw, dfw, sizeof(efw));
+    }
+
+    // Reset if error is significantly larger than nearend (13 dB).
+    if (seSum > (19.95f * sdSum)) {
+        memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
+    }
+
+    // Subband coherence
+    for (i = 0; i < PART_LEN1; i++) {
+        cohde[i] = (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
+            (aec->sd[i] * aec->se[i] + 1e-10f);
+        cohxd[i] = (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
+            (aec->sx[i] * aec->sd[i] + 1e-10f);
+    }
+
+    hNlXdAvg = 0;
+    for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+        hNlXdAvg += cohxd[i];
+    }
+    hNlXdAvg /= prefBandSize;
+    hNlXdAvg = 1 - hNlXdAvg;
+
+    hNlDeAvg = 0;
+    for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+        hNlDeAvg += cohde[i];
+    }
+    hNlDeAvg /= prefBandSize;
+
+    if (hNlXdAvg < 0.75f && hNlXdAvg < aec->hNlXdAvgMin) {
+        aec->hNlXdAvgMin = hNlXdAvg;
+    }
+
+    if (hNlDeAvg > 0.98f && hNlXdAvg > 0.9f) {
+        aec->stNearState = 1;
+    }
+    else if (hNlDeAvg < 0.95f || hNlXdAvg < 0.8f) {
+        aec->stNearState = 0;
+    }
+
+    if (aec->hNlXdAvgMin == 1) {
+        aec->echoState = 0;
+        aec->overDrive = aec->minOverDrive;
+
+        if (aec->stNearState == 1) {
+            memcpy(hNl, cohde, sizeof(hNl));
+            hNlFb = hNlDeAvg;
+            hNlFbLow = hNlDeAvg;
+        }
+        else {
+            for (i = 0; i < PART_LEN1; i++) {
+                hNl[i] = 1 - cohxd[i];
+            }
+            hNlFb = hNlXdAvg;
+            hNlFbLow = hNlXdAvg;
+        }
+    }
+    else {
+
+        if (aec->stNearState == 1) {
+            aec->echoState = 0;
+            memcpy(hNl, cohde, sizeof(hNl));
+            hNlFb = hNlDeAvg;
+            hNlFbLow = hNlDeAvg;
+        }
+        else {
+            aec->echoState = 1;
+            for (i = 0; i < PART_LEN1; i++) {
+                hNl[i] = WEBRTC_SPL_MIN(cohde[i], 1 - cohxd[i]);
+            }
+
+            // Select an order statistic from the preferred bands.
+            // TODO: Using quicksort now, but a selection algorithm may be preferred.
+            memcpy(hNlPref, &hNl[minPrefBand], sizeof(float) * prefBandSize);
+            qsort(hNlPref, prefBandSize, sizeof(float), CmpFloat);
+            hNlFb = hNlPref[(int)floor(prefBandQuant * (prefBandSize - 1))];
+            hNlFbLow = hNlPref[(int)floor(prefBandQuantLow * (prefBandSize - 1))];
+        }
+    }
+
+    // Track the local filter minimum to determine suppression overdrive.
+    if (hNlFbLow < 0.6f && hNlFbLow < aec->hNlFbLocalMin) {
+        aec->hNlFbLocalMin = hNlFbLow;
+        aec->hNlFbMin = hNlFbLow;
+        aec->hNlNewMin = 1;
+        aec->hNlMinCtr = 0;
+    }
+    aec->hNlFbLocalMin = WEBRTC_SPL_MIN(aec->hNlFbLocalMin + 0.0008f / aec->mult, 1);
+    aec->hNlXdAvgMin = WEBRTC_SPL_MIN(aec->hNlXdAvgMin + 0.0006f / aec->mult, 1);
+
+    if (aec->hNlNewMin == 1) {
+        aec->hNlMinCtr++;
+    }
+    if (aec->hNlMinCtr == 2) {
+        aec->hNlNewMin = 0;
+        aec->hNlMinCtr = 0;
+        aec->overDrive = WEBRTC_SPL_MAX(aec->targetSupp /
+            ((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f), aec->minOverDrive);
+    }
+
+    // Smooth the overdrive.
+    if (aec->overDrive < aec->overDriveSm) {
+      aec->overDriveSm = 0.99f * aec->overDriveSm + 0.01f * aec->overDrive;
+    }
+    else {
+      aec->overDriveSm = 0.9f * aec->overDriveSm + 0.1f * aec->overDrive;
+    }
+
+    WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw);
+
+    // Add comfort noise.
+    ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
+
+    // TODO(bjornv): Investigate how to take the windowing below into account if
+    // needed.
+    if (aec->metricsMode == 1) {
+      // Note that we have a scaling by two in the time domain |eBuf|.
+      // In addition the time domain signal is windowed before transformation,
+      // losing half the energy on the average. We take care of the first
+      // scaling only in UpdateMetrics().
+      UpdateLevel(&aec->nlpoutlevel, efw);
+    }
+    // Inverse error fft.
+    fft[0] = efw[0][0];
+    fft[1] = efw[0][PART_LEN];
+    for (i = 1; i < PART_LEN; i++) {
+        fft[2*i] = efw[0][i];
+        // Sign change required by Ooura fft.
+        fft[2*i + 1] = -efw[1][i];
+    }
+    aec_rdft_inverse_128(fft);
+
+    // Overlap and add to obtain output.
+    scale = 2.0f / PART_LEN2;
+    for (i = 0; i < PART_LEN; i++) {
+        fft[i] *= scale; // fft scaling
+        fft[i] = fft[i]*sqrtHanning[i] + aec->outBuf[i];
+
+        // Saturation protection
+        output[i] = (short)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, fft[i],
+            WEBRTC_SPL_WORD16_MIN);
+
+        fft[PART_LEN + i] *= scale; // fft scaling
+        aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i];
+    }
+
+    // For H band
+    if (aec->sampFreq == 32000) {
+
+        // H band gain
+        // average nlp over low band: average over second half of freq spectrum
+        // (4->8khz)
+        GetHighbandGain(hNl, &nlpGainHband);
+
+        // Inverse comfort_noise
+        if (flagHbandCn == 1) {
+            fft[0] = comfortNoiseHband[0][0];
+            fft[1] = comfortNoiseHband[PART_LEN][0];
+            for (i = 1; i < PART_LEN; i++) {
+                fft[2*i] = comfortNoiseHband[i][0];
+                fft[2*i + 1] = comfortNoiseHband[i][1];
+            }
+            aec_rdft_inverse_128(fft);
+            scale = 2.0f / PART_LEN2;
+        }
+
+        // compute gain factor
+        for (i = 0; i < PART_LEN; i++) {
+            dtmp = (float)aec->dBufH[i];
+            dtmp = (float)dtmp * nlpGainHband; // for variable gain
+
+            // add some comfort noise where Hband is attenuated
+            if (flagHbandCn == 1) {
+                fft[i] *= scale; // fft scaling
+                dtmp += cnScaleHband * fft[i];
+            }
+
+            // Saturation protection
+            outputH[i] = (short)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, dtmp,
+                WEBRTC_SPL_WORD16_MIN);
+         }
+    }
+
+    // Copy the current block to the old position.
+    memcpy(aec->dBuf, aec->dBuf + PART_LEN, sizeof(float) * PART_LEN);
+    memcpy(aec->eBuf, aec->eBuf + PART_LEN, sizeof(float) * PART_LEN);
+
+    // Copy the current block to the old position for H band
+    if (aec->sampFreq == 32000) {
+        memcpy(aec->dBufH, aec->dBufH + PART_LEN, sizeof(float) * PART_LEN);
+    }
+
+    memmove(aec->xfwBuf + PART_LEN1, aec->xfwBuf, sizeof(aec->xfwBuf) -
+        sizeof(complex_t) * PART_LEN1);
+}
+
+static void GetHighbandGain(const float *lambda, float *nlpGainHband)
+{
+    int i;
+
+    nlpGainHband[0] = (float)0.0;
+    for (i = freqAvgIc; i < PART_LEN1 - 1; i++) {
+        nlpGainHband[0] += lambda[i];
+    }
+    nlpGainHband[0] /= (float)(PART_LEN1 - 1 - freqAvgIc);
+}
+
+static void ComfortNoise(aec_t *aec, float efw[2][PART_LEN1],
+    complex_t *comfortNoiseHband, const float *noisePow, const float *lambda)
+{
+    int i, num;
+    float rand[PART_LEN];
+    float noise, noiseAvg, tmp, tmpAvg;
+    WebRtc_Word16 randW16[PART_LEN];
+    complex_t u[PART_LEN1];
+
+    const float pi2 = 6.28318530717959f;
+
+    // Generate a uniform random array on [0 1]
+    WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
+    for (i = 0; i < PART_LEN; i++) {
+        rand[i] = ((float)randW16[i]) / 32768;
+    }
+
+    // Reject LF noise
+    u[0][0] = 0;
+    u[0][1] = 0;
+    for (i = 1; i < PART_LEN1; i++) {
+        tmp = pi2 * rand[i - 1];
+
+        noise = sqrtf(noisePow[i]);
+        u[i][0] = noise * (float)cos(tmp);
+        u[i][1] = -noise * (float)sin(tmp);
+    }
+    u[PART_LEN][1] = 0;
+
+    for (i = 0; i < PART_LEN1; i++) {
+        // This is the proper weighting to match the background noise power
+        tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
+        //tmp = 1 - lambda[i];
+        efw[0][i] += tmp * u[i][0];
+        efw[1][i] += tmp * u[i][1];
+    }
+
+    // For H band comfort noise
+    // TODO: don't compute noise and "tmp" twice. Use the previous results.
+    noiseAvg = 0.0;
+    tmpAvg = 0.0;
+    num = 0;
+    if (aec->sampFreq == 32000 && flagHbandCn == 1) {
+
+        // average noise scale
+        // average over second half of freq spectrum (i.e., 4->8khz)
+        // TODO: we shouldn't need num. We know how many elements we're summing.
+        for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+            num++;
+            noiseAvg += sqrtf(noisePow[i]);
+        }
+        noiseAvg /= (float)num;
+
+        // average nlp scale
+        // average over second half of freq spectrum (i.e., 4->8khz)
+        // TODO: we shouldn't need num. We know how many elements we're summing.
+        num = 0;
+        for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+            num++;
+            tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
+        }
+        tmpAvg /= (float)num;
+
+        // Use average noise for H band
+        // TODO: we should probably have a new random vector here.
+        // Reject LF noise
+        u[0][0] = 0;
+        u[0][1] = 0;
+        for (i = 1; i < PART_LEN1; i++) {
+            tmp = pi2 * rand[i - 1];
+
+            // Use average noise for H band
+            u[i][0] = noiseAvg * (float)cos(tmp);
+            u[i][1] = -noiseAvg * (float)sin(tmp);
+        }
+        u[PART_LEN][1] = 0;
+
+        for (i = 0; i < PART_LEN1; i++) {
+            // Use average NLP weight for H band
+            comfortNoiseHband[i][0] = tmpAvg * u[i][0];
+            comfortNoiseHband[i][1] = tmpAvg * u[i][1];
+        }
+    }
+}
+
+static void WebRtcAec_InitLevel(power_level_t *level)
+{
+    const float bigFloat = 1E17f;
+
+    level->averagelevel = 0;
+    level->framelevel = 0;
+    level->minlevel = bigFloat;
+    level->frsum = 0;
+    level->sfrsum = 0;
+    level->frcounter = 0;
+    level->sfrcounter = 0;
+}
+
+static void WebRtcAec_InitStats(stats_t *stats)
+{
+    stats->instant = offsetLevel;
+    stats->average = offsetLevel;
+    stats->max = offsetLevel;
+    stats->min = offsetLevel * (-1);
+    stats->sum = 0;
+    stats->hisum = 0;
+    stats->himean = offsetLevel;
+    stats->counter = 0;
+    stats->hicounter = 0;
+}
+
+static void UpdateLevel(power_level_t* level, float in[2][PART_LEN1]) {
+  // Do the energy calculation in the frequency domain. The FFT is performed on
+  // a segment of PART_LEN2 samples due to overlap, but we only want the energy
+  // of half that data (the last PART_LEN samples). Parseval's relation states
+  // that the energy is preserved according to
+  //
+  // \sum_{n=0}^{N-1} |x(n)|^2 = 1/N * \sum_{n=0}^{N-1} |X(n)|^2
+  //                           = ENERGY,
+  //
+  // where N = PART_LEN2. Since we are only interested in calculating the energy
+  // for the last PART_LEN samples we approximate by calculating ENERGY and
+  // divide by 2,
+  //
+  // \sum_{n=N/2}^{N-1} |x(n)|^2 ~= ENERGY / 2
+  //
+  // Since we deal with real valued time domain signals we only store frequency
+  // bins [0, PART_LEN], which is what |in| consists of. To calculate ENERGY we
+  // need to add the contribution from the missing part in
+  // [PART_LEN+1, PART_LEN2-1]. These values are, up to a phase shift, identical
+  // with the values in [1, PART_LEN-1], hence multiply those values by 2. This
+  // is the values in the for loop below, but multiplication by 2 and division
+  // by 2 cancel.
+
+  // TODO(bjornv): Investigate reusing energy calculations performed at other
+  // places in the code.
+  int k = 1;
+  // Imaginary parts are zero at end points and left out of the calculation.
+  float energy = (in[0][0] * in[0][0]) / 2;
+  energy += (in[0][PART_LEN] * in[0][PART_LEN]) / 2;
+
+  for (k = 1; k < PART_LEN; k++) {
+    energy += (in[0][k] * in[0][k] + in[1][k] * in[1][k]);
+  }
+  energy /= PART_LEN2;
+
+  level->sfrsum += energy;
+  level->sfrcounter++;
+
+  if (level->sfrcounter > subCountLen) {
+    level->framelevel = level->sfrsum / (subCountLen * PART_LEN);
+    level->sfrsum = 0;
+    level->sfrcounter = 0;
+    if (level->framelevel > 0) {
+      if (level->framelevel < level->minlevel) {
+        level->minlevel = level->framelevel;  // New minimum.
+      } else {
+        level->minlevel *= (1 + 0.001f);  // Small increase.
+      }
+    }
+    level->frcounter++;
+    level->frsum += level->framelevel;
+    if (level->frcounter > countLen) {
+      level->averagelevel = level->frsum / countLen;
+      level->frsum = 0;
+      level->frcounter = 0;
+    }
+  }
+}
+
+static void UpdateMetrics(aec_t *aec)
+{
+    float dtmp, dtmp2;
+
+    const float actThresholdNoisy = 8.0f;
+    const float actThresholdClean = 40.0f;
+    const float safety = 0.99995f;
+    const float noisyPower = 300000.0f;
+
+    float actThreshold;
+    float echo, suppressedEcho;
+
+    if (aec->echoState) {   // Check if echo is likely present
+        aec->stateCounter++;
+    }
+
+    if (aec->farlevel.frcounter == 0) {
+
+        if (aec->farlevel.minlevel < noisyPower) {
+            actThreshold = actThresholdClean;
+        }
+        else {
+            actThreshold = actThresholdNoisy;
+        }
+
+        if ((aec->stateCounter > (0.5f * countLen * subCountLen))
+            && (aec->farlevel.sfrcounter == 0)
+
+            // Estimate in active far-end segments only
+            && (aec->farlevel.averagelevel > (actThreshold * aec->farlevel.minlevel))
+            ) {
+
+            // Subtract noise power
+            echo = aec->nearlevel.averagelevel - safety * aec->nearlevel.minlevel;
+
+            // ERL
+            dtmp = 10 * (float)log10(aec->farlevel.averagelevel /
+                aec->nearlevel.averagelevel + 1e-10f);
+            dtmp2 = 10 * (float)log10(aec->farlevel.averagelevel / echo + 1e-10f);
+
+            aec->erl.instant = dtmp;
+            if (dtmp > aec->erl.max) {
+                aec->erl.max = dtmp;
+            }
+
+            if (dtmp < aec->erl.min) {
+                aec->erl.min = dtmp;
+            }
+
+            aec->erl.counter++;
+            aec->erl.sum += dtmp;
+            aec->erl.average = aec->erl.sum / aec->erl.counter;
+
+            // Upper mean
+            if (dtmp > aec->erl.average) {
+                aec->erl.hicounter++;
+                aec->erl.hisum += dtmp;
+                aec->erl.himean = aec->erl.hisum / aec->erl.hicounter;
+            }
+
+            // A_NLP
+            dtmp = 10 * (float)log10(aec->nearlevel.averagelevel /
+                (2 * aec->linoutlevel.averagelevel) + 1e-10f);
+
+            // subtract noise power
+            suppressedEcho = 2 * (aec->linoutlevel.averagelevel -
+                safety * aec->linoutlevel.minlevel);
+
+            dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f);
+
+            aec->aNlp.instant = dtmp2;
+            if (dtmp > aec->aNlp.max) {
+                aec->aNlp.max = dtmp;
+            }
+
+            if (dtmp < aec->aNlp.min) {
+                aec->aNlp.min = dtmp;
+            }
+
+            aec->aNlp.counter++;
+            aec->aNlp.sum += dtmp;
+            aec->aNlp.average = aec->aNlp.sum / aec->aNlp.counter;
+
+            // Upper mean
+            if (dtmp > aec->aNlp.average) {
+                aec->aNlp.hicounter++;
+                aec->aNlp.hisum += dtmp;
+                aec->aNlp.himean = aec->aNlp.hisum / aec->aNlp.hicounter;
+            }
+
+            // ERLE
+
+            // subtract noise power
+            suppressedEcho = 2 * (aec->nlpoutlevel.averagelevel -
+                safety * aec->nlpoutlevel.minlevel);
+
+            dtmp = 10 * (float)log10(aec->nearlevel.averagelevel /
+                (2 * aec->nlpoutlevel.averagelevel) + 1e-10f);
+            dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f);
+
+            dtmp = dtmp2;
+            aec->erle.instant = dtmp;
+            if (dtmp > aec->erle.max) {
+                aec->erle.max = dtmp;
+            }
+
+            if (dtmp < aec->erle.min) {
+                aec->erle.min = dtmp;
+            }
+
+            aec->erle.counter++;
+            aec->erle.sum += dtmp;
+            aec->erle.average = aec->erle.sum / aec->erle.counter;
+
+            // Upper mean
+            if (dtmp > aec->erle.average) {
+                aec->erle.hicounter++;
+                aec->erle.hisum += dtmp;
+                aec->erle.himean = aec->erle.hisum / aec->erle.hicounter;
+            }
+        }
+
+        aec->stateCounter = 0;
+    }
+}
+
+static void TimeToFrequency(float time_data[PART_LEN2],
+                            float freq_data[2][PART_LEN1],
+                            int window) {
+  int i = 0;
+
+  // TODO(bjornv): Should we have a different function/wrapper for windowed FFT?
+  if (window) {
+    for (i = 0; i < PART_LEN; i++) {
+      time_data[i] *= sqrtHanning[i];
+      time_data[PART_LEN + i] *= sqrtHanning[PART_LEN - i];
+    }
+  }
+
+  aec_rdft_forward_128(time_data);
+  // Reorder.
+  freq_data[1][0] = 0;
+  freq_data[1][PART_LEN] = 0;
+  freq_data[0][0] = time_data[0];
+  freq_data[0][PART_LEN] = time_data[1];
+  for (i = 1; i < PART_LEN; i++) {
+    freq_data[0][i] = time_data[2 * i];
+    freq_data[1][i] = time_data[2 * i + 1];
+  }
+}
diff --git a/trunk/src/modules/audio_processing/aec/aec_core.h b/trunk/src/modules/audio_processing/aec/aec_core.h
new file mode 100644
index 0000000..d326a68
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_core.h
@@ -0,0 +1,180 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Specifies the interface for the AEC core.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+#include <stdio.h>
+#endif
+
+#include "typedefs.h"
+
+#define FRAME_LEN 80
+#define PART_LEN 64 // Length of partition
+#define PART_LEN1 (PART_LEN + 1) // Unique fft coefficients
+#define PART_LEN2 (PART_LEN * 2) // Length of partition * 2
+#define NR_PART 12  // Number of partitions in filter.
+#define PREF_BAND_SIZE 24
+
+// Delay estimator constants, used for logging.
+enum { kMaxDelayBlocks = 60 };
+enum { kLookaheadBlocks = 15 };
+enum { kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks };
+
+typedef float complex_t[2];
+// For performance reasons, some arrays of complex numbers are replaced by twice
+// as long arrays of float, all the real parts followed by all the imaginary
+// ones (complex_t[SIZE] -> float[2][SIZE]). This allows SIMD optimizations and
+// is better than two arrays (one for the real parts and one for the imaginary
+// parts) as this other way would require two pointers instead of one and cause
+// extra register spilling. This also allows the offsets to be calculated at
+// compile time.
+
+// Metrics
+enum {offsetLevel = -100};
+
+typedef struct {
+    float sfrsum;
+    int sfrcounter;
+    float framelevel;
+    float frsum;
+    int frcounter;
+    float minlevel;
+    float averagelevel;
+} power_level_t;
+
+typedef struct {
+    float instant;
+    float average;
+    float min;
+    float max;
+    float sum;
+    float hisum;
+    float himean;
+    int counter;
+    int hicounter;
+} stats_t;
+
+typedef struct {
+    int farBufWritePos, farBufReadPos;
+
+    int knownDelay;
+    int inSamples, outSamples;
+    int delayEstCtr;
+
+    void *nearFrBuf, *outFrBuf;
+
+    void *nearFrBufH;
+    void *outFrBufH;
+
+    float dBuf[PART_LEN2]; // nearend
+    float eBuf[PART_LEN2]; // error
+
+    float dBufH[PART_LEN2]; // nearend
+
+    float xPow[PART_LEN1];
+    float dPow[PART_LEN1];
+    float dMinPow[PART_LEN1];
+    float dInitMinPow[PART_LEN1];
+    float *noisePow;
+
+    float xfBuf[2][NR_PART * PART_LEN1]; // farend fft buffer
+    float wfBuf[2][NR_PART * PART_LEN1]; // filter fft
+    complex_t sde[PART_LEN1]; // cross-psd of nearend and error
+    complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
+    complex_t xfwBuf[NR_PART * PART_LEN1]; // farend windowed fft buffer
+
+    float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near and error psd
+    float hNs[PART_LEN1];
+    float hNlFbMin, hNlFbLocalMin;
+    float hNlXdAvgMin;
+    int hNlNewMin, hNlMinCtr;
+    float overDrive, overDriveSm;
+    float targetSupp, minOverDrive;
+    float outBuf[PART_LEN];
+    int delayIdx;
+
+    short stNearState, echoState;
+    short divergeState;
+
+    int xfBufBlockPos;
+
+    void* far_buf;
+    void* far_buf_windowed;
+    int system_delay;  // Current system delay buffered in AEC.
+
+    int mult;  // sampling frequency multiple
+    int sampFreq;
+    WebRtc_UWord32 seed;
+
+    float mu; // stepsize
+    float errThresh; // error threshold
+
+    int noiseEstCtr;
+
+    power_level_t farlevel;
+    power_level_t nearlevel;
+    power_level_t linoutlevel;
+    power_level_t nlpoutlevel;
+
+    int metricsMode;
+    int stateCounter;
+    stats_t erl;
+    stats_t erle;
+    stats_t aNlp;
+    stats_t rerl;
+
+    // Quantities to control H band scaling for SWB input
+    int freq_avg_ic;         //initial bin for averaging nlp gain
+    int flag_Hband_cn;      //for comfort noise
+    float cn_scale_Hband;   //scale for comfort noise in H band
+
+    int delay_histogram[kHistorySizeBlocks];
+    int delay_logging_enabled;
+    void* delay_estimator;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    void* far_time_buf;
+    FILE *farFile;
+    FILE *nearFile;
+    FILE *outFile;
+    FILE *outLinearFile;
+#endif
+} aec_t;
+
+typedef void (*WebRtcAec_FilterFar_t)(aec_t *aec, float yf[2][PART_LEN1]);
+extern WebRtcAec_FilterFar_t WebRtcAec_FilterFar;
+typedef void (*WebRtcAec_ScaleErrorSignal_t)(aec_t *aec, float ef[2][PART_LEN1]);
+extern WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal;
+typedef void (*WebRtcAec_FilterAdaptation_t)
+  (aec_t *aec, float *fft, float ef[2][PART_LEN1]);
+extern WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation;
+typedef void (*WebRtcAec_OverdriveAndSuppress_t)
+  (aec_t *aec, float hNl[PART_LEN1], const float hNlFb, float efw[2][PART_LEN1]);
+extern WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+
+int WebRtcAec_CreateAec(aec_t **aec);
+int WebRtcAec_FreeAec(aec_t *aec);
+int WebRtcAec_InitAec(aec_t *aec, int sampFreq);
+void WebRtcAec_InitAec_SSE2(void);
+
+void WebRtcAec_InitMetrics(aec_t *aec);
+void WebRtcAec_BufferFarendPartition(aec_t *aec, const float* farend);
+void WebRtcAec_ProcessFrame(aec_t* aec,
+                            const short *nearend,
+                            const short *nearendH,
+                            int knownDelay);
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
diff --git a/trunk/src/modules/audio_processing/aec/aec_core_sse2.c b/trunk/src/modules/audio_processing/aec/aec_core_sse2.c
new file mode 100644
index 0000000..74a1c48
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_core_sse2.c
@@ -0,0 +1,415 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, SSE2 version of speed-critical functions.
+ */
+
+#include "aec_core.h"
+
+#include <emmintrin.h>
+#include <math.h>
+#include <string.h>  // memset
+
+#include "aec_rdft.h"
+
+__inline static float MulRe(float aRe, float aIm, float bRe, float bIm)
+{
+  return aRe * bRe - aIm * bIm;
+}
+
+__inline static float MulIm(float aRe, float aIm, float bRe, float bIm)
+{
+  return aRe * bIm + aIm * bRe;
+}
+
+static void FilterFarSSE2(aec_t *aec, float yf[2][PART_LEN1])
+{
+  int i;
+  for (i = 0; i < NR_PART; i++) {
+    int j;
+    int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+    int pos = i * PART_LEN1;
+    // Check for wrap
+    if (i + aec->xfBufBlockPos >= NR_PART) {
+      xPos -= NR_PART*(PART_LEN1);
+    }
+
+    // vectorized code (four at once)
+    for (j = 0; j + 3 < PART_LEN1; j += 4) {
+      const __m128 xfBuf_re = _mm_loadu_ps(&aec->xfBuf[0][xPos + j]);
+      const __m128 xfBuf_im = _mm_loadu_ps(&aec->xfBuf[1][xPos + j]);
+      const __m128 wfBuf_re = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
+      const __m128 wfBuf_im = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
+      const __m128 yf_re = _mm_loadu_ps(&yf[0][j]);
+      const __m128 yf_im = _mm_loadu_ps(&yf[1][j]);
+      const __m128 a = _mm_mul_ps(xfBuf_re, wfBuf_re);
+      const __m128 b = _mm_mul_ps(xfBuf_im, wfBuf_im);
+      const __m128 c = _mm_mul_ps(xfBuf_re, wfBuf_im);
+      const __m128 d = _mm_mul_ps(xfBuf_im, wfBuf_re);
+      const __m128 e = _mm_sub_ps(a, b);
+      const __m128 f = _mm_add_ps(c, d);
+      const __m128 g = _mm_add_ps(yf_re, e);
+      const __m128 h = _mm_add_ps(yf_im, f);
+      _mm_storeu_ps(&yf[0][j], g);
+      _mm_storeu_ps(&yf[1][j], h);
+    }
+    // scalar code for the remaining items.
+    for (; j < PART_LEN1; j++) {
+      yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j],
+                        aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]);
+      yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j],
+                        aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]);
+    }
+  }
+}
+
+static void ScaleErrorSignalSSE2(aec_t *aec, float ef[2][PART_LEN1])
+{
+  const __m128 k1e_10f = _mm_set1_ps(1e-10f);
+  const __m128 kThresh = _mm_set1_ps(aec->errThresh);
+  const __m128 kMu = _mm_set1_ps(aec->mu);
+
+  int i;
+  // vectorized code (four at once)
+  for (i = 0; i + 3 < PART_LEN1; i += 4) {
+    const __m128 xPow = _mm_loadu_ps(&aec->xPow[i]);
+    const __m128 ef_re_base = _mm_loadu_ps(&ef[0][i]);
+    const __m128 ef_im_base = _mm_loadu_ps(&ef[1][i]);
+
+    const __m128 xPowPlus = _mm_add_ps(xPow, k1e_10f);
+    __m128 ef_re = _mm_div_ps(ef_re_base, xPowPlus);
+    __m128 ef_im = _mm_div_ps(ef_im_base, xPowPlus);
+    const __m128 ef_re2 = _mm_mul_ps(ef_re, ef_re);
+    const __m128 ef_im2 = _mm_mul_ps(ef_im, ef_im);
+    const __m128 ef_sum2 = _mm_add_ps(ef_re2, ef_im2);
+    const __m128 absEf = _mm_sqrt_ps(ef_sum2);
+    const __m128 bigger = _mm_cmpgt_ps(absEf, kThresh);
+    __m128 absEfPlus = _mm_add_ps(absEf, k1e_10f);
+    const __m128 absEfInv = _mm_div_ps(kThresh, absEfPlus);
+    __m128 ef_re_if = _mm_mul_ps(ef_re, absEfInv);
+    __m128 ef_im_if = _mm_mul_ps(ef_im, absEfInv);
+    ef_re_if = _mm_and_ps(bigger, ef_re_if);
+    ef_im_if = _mm_and_ps(bigger, ef_im_if);
+    ef_re = _mm_andnot_ps(bigger, ef_re);
+    ef_im = _mm_andnot_ps(bigger, ef_im);
+    ef_re = _mm_or_ps(ef_re, ef_re_if);
+    ef_im = _mm_or_ps(ef_im, ef_im_if);
+    ef_re = _mm_mul_ps(ef_re, kMu);
+    ef_im = _mm_mul_ps(ef_im, kMu);
+
+    _mm_storeu_ps(&ef[0][i], ef_re);
+    _mm_storeu_ps(&ef[1][i], ef_im);
+  }
+  // scalar code for the remaining items.
+  for (; i < (PART_LEN1); i++) {
+    float absEf;
+    ef[0][i] /= (aec->xPow[i] + 1e-10f);
+    ef[1][i] /= (aec->xPow[i] + 1e-10f);
+    absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
+
+    if (absEf > aec->errThresh) {
+      absEf = aec->errThresh / (absEf + 1e-10f);
+      ef[0][i] *= absEf;
+      ef[1][i] *= absEf;
+    }
+
+    // Stepsize factor
+    ef[0][i] *= aec->mu;
+    ef[1][i] *= aec->mu;
+  }
+}
+
+static void FilterAdaptationSSE2(aec_t *aec, float *fft, float ef[2][PART_LEN1]) {
+  int i, j;
+  for (i = 0; i < NR_PART; i++) {
+    int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+    int pos = i * PART_LEN1;
+    // Check for wrap
+    if (i + aec->xfBufBlockPos >= NR_PART) {
+      xPos -= NR_PART * PART_LEN1;
+    }
+
+    // Process the whole array...
+    for (j = 0; j < PART_LEN; j+= 4) {
+      // Load xfBuf and ef.
+      const __m128 xfBuf_re = _mm_loadu_ps(&aec->xfBuf[0][xPos + j]);
+      const __m128 xfBuf_im = _mm_loadu_ps(&aec->xfBuf[1][xPos + j]);
+      const __m128 ef_re = _mm_loadu_ps(&ef[0][j]);
+      const __m128 ef_im = _mm_loadu_ps(&ef[1][j]);
+      // Calculate the product of conjugate(xfBuf) by ef.
+      //   re(conjugate(a) * b) = aRe * bRe + aIm * bIm
+      //   im(conjugate(a) * b)=  aRe * bIm - aIm * bRe
+      const __m128 a = _mm_mul_ps(xfBuf_re, ef_re);
+      const __m128 b = _mm_mul_ps(xfBuf_im, ef_im);
+      const __m128 c = _mm_mul_ps(xfBuf_re, ef_im);
+      const __m128 d = _mm_mul_ps(xfBuf_im, ef_re);
+      const __m128 e = _mm_add_ps(a, b);
+      const __m128 f = _mm_sub_ps(c, d);
+      // Interleave real and imaginary parts.
+      const __m128 g = _mm_unpacklo_ps(e, f);
+      const __m128 h = _mm_unpackhi_ps(e, f);
+      // Store
+      _mm_storeu_ps(&fft[2*j + 0], g);
+      _mm_storeu_ps(&fft[2*j + 4], h);
+    }
+    // ... and fixup the first imaginary entry.
+    fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
+                   -aec->xfBuf[1][xPos + PART_LEN],
+                   ef[0][PART_LEN], ef[1][PART_LEN]);
+
+    aec_rdft_inverse_128(fft);
+    memset(fft + PART_LEN, 0, sizeof(float)*PART_LEN);
+
+    // fft scaling
+    {
+      float scale = 2.0f / PART_LEN2;
+      const __m128 scale_ps = _mm_load_ps1(&scale);
+      for (j = 0; j < PART_LEN; j+=4) {
+        const __m128 fft_ps = _mm_loadu_ps(&fft[j]);
+        const __m128 fft_scale = _mm_mul_ps(fft_ps, scale_ps);
+        _mm_storeu_ps(&fft[j], fft_scale);
+      }
+    }
+    aec_rdft_forward_128(fft);
+
+    {
+      float wt1 = aec->wfBuf[1][pos];
+      aec->wfBuf[0][pos + PART_LEN] += fft[1];
+      for (j = 0; j < PART_LEN; j+= 4) {
+        __m128 wtBuf_re = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
+        __m128 wtBuf_im = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
+        const __m128 fft0 = _mm_loadu_ps(&fft[2 * j + 0]);
+        const __m128 fft4 = _mm_loadu_ps(&fft[2 * j + 4]);
+        const __m128 fft_re = _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(2, 0, 2 ,0));
+        const __m128 fft_im = _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(3, 1, 3 ,1));
+        wtBuf_re = _mm_add_ps(wtBuf_re, fft_re);
+        wtBuf_im = _mm_add_ps(wtBuf_im, fft_im);
+        _mm_storeu_ps(&aec->wfBuf[0][pos + j], wtBuf_re);
+        _mm_storeu_ps(&aec->wfBuf[1][pos + j], wtBuf_im);
+      }
+      aec->wfBuf[1][pos] = wt1;
+    }
+  }
+}
+
+static __m128 mm_pow_ps(__m128 a, __m128 b)
+{
+  // a^b = exp2(b * log2(a))
+  //   exp2(x) and log2(x) are calculated using polynomial approximations.
+  __m128 log2_a, b_log2_a, a_exp_b;
+
+  // Calculate log2(x), x = a.
+  {
+    // To calculate log2(x), we decompose x like this:
+    //   x = y * 2^n
+    //     n is an integer
+    //     y is in the [1.0, 2.0) range
+    //
+    //   log2(x) = log2(y) + n
+    //     n       can be evaluated by playing with float representation.
+    //     log2(y) in a small range can be approximated, this code uses an order
+    //             five polynomial approximation. The coefficients have been
+    //             estimated with the Remez algorithm and the resulting
+    //             polynomial has a maximum relative error of 0.00086%.
+
+    // Compute n.
+    //    This is done by masking the exponent, shifting it into the top bit of
+    //    the mantissa, putting eight into the biased exponent (to shift/
+    //    compensate the fact that the exponent has been shifted in the top/
+    //    fractional part and finally getting rid of the implicit leading one
+    //    from the mantissa by substracting it out.
+    static const ALIGN16_BEG int float_exponent_mask[4] ALIGN16_END =
+        {0x7F800000, 0x7F800000, 0x7F800000, 0x7F800000};
+    static const ALIGN16_BEG int eight_biased_exponent[4] ALIGN16_END =
+        {0x43800000, 0x43800000, 0x43800000, 0x43800000};
+    static const ALIGN16_BEG int implicit_leading_one[4] ALIGN16_END =
+        {0x43BF8000, 0x43BF8000, 0x43BF8000, 0x43BF8000};
+    static const int shift_exponent_into_top_mantissa = 8;
+    const __m128 two_n = _mm_and_ps(a, *((__m128 *)float_exponent_mask));
+    const __m128 n_1 = _mm_castsi128_ps(_mm_srli_epi32(_mm_castps_si128(two_n),
+        shift_exponent_into_top_mantissa));
+    const __m128 n_0 = _mm_or_ps(n_1, *((__m128 *)eight_biased_exponent));
+    const __m128 n   = _mm_sub_ps(n_0,  *((__m128 *)implicit_leading_one));
+
+    // Compute y.
+    static const ALIGN16_BEG int mantissa_mask[4] ALIGN16_END =
+        {0x007FFFFF, 0x007FFFFF, 0x007FFFFF, 0x007FFFFF};
+    static const ALIGN16_BEG int zero_biased_exponent_is_one[4] ALIGN16_END =
+        {0x3F800000, 0x3F800000, 0x3F800000, 0x3F800000};
+    const __m128 mantissa = _mm_and_ps(a, *((__m128 *)mantissa_mask));
+    const __m128 y        = _mm_or_ps(
+        mantissa,  *((__m128 *)zero_biased_exponent_is_one));
+
+    // Approximate log2(y) ~= (y - 1) * pol5(y).
+    //    pol5(y) = C5 * y^5 + C4 * y^4 + C3 * y^3 + C2 * y^2 + C1 * y + C0
+    static const ALIGN16_BEG float ALIGN16_END C5[4] =
+        {-3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f};
+    static const ALIGN16_BEG float ALIGN16_END C4[4] =
+        {3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f};
+    static const ALIGN16_BEG float ALIGN16_END C3[4] =
+        {-1.2315303f, -1.2315303f, -1.2315303f, -1.2315303f};
+    static const ALIGN16_BEG float ALIGN16_END C2[4] =
+        {2.5988452f, 2.5988452f, 2.5988452f, 2.5988452f};
+    static const ALIGN16_BEG float ALIGN16_END C1[4] =
+        {-3.3241990f, -3.3241990f, -3.3241990f, -3.3241990f};
+    static const ALIGN16_BEG float ALIGN16_END C0[4] =
+        {3.1157899f, 3.1157899f, 3.1157899f, 3.1157899f};
+    const __m128 pol5_y_0 = _mm_mul_ps(y,        *((__m128 *)C5));
+    const __m128 pol5_y_1 = _mm_add_ps(pol5_y_0, *((__m128 *)C4));
+    const __m128 pol5_y_2 = _mm_mul_ps(pol5_y_1, y);
+    const __m128 pol5_y_3 = _mm_add_ps(pol5_y_2, *((__m128 *)C3));
+    const __m128 pol5_y_4 = _mm_mul_ps(pol5_y_3, y);
+    const __m128 pol5_y_5 = _mm_add_ps(pol5_y_4, *((__m128 *)C2));
+    const __m128 pol5_y_6 = _mm_mul_ps(pol5_y_5, y);
+    const __m128 pol5_y_7 = _mm_add_ps(pol5_y_6, *((__m128 *)C1));
+    const __m128 pol5_y_8 = _mm_mul_ps(pol5_y_7, y);
+    const __m128 pol5_y   = _mm_add_ps(pol5_y_8, *((__m128 *)C0));
+    const __m128 y_minus_one = _mm_sub_ps(
+        y, *((__m128 *)zero_biased_exponent_is_one));
+    const __m128 log2_y = _mm_mul_ps(y_minus_one ,  pol5_y);
+
+    // Combine parts.
+    log2_a = _mm_add_ps(n, log2_y);
+  }
+
+  // b * log2(a)
+  b_log2_a = _mm_mul_ps(b, log2_a);
+
+  // Calculate exp2(x), x = b * log2(a).
+  {
+    // To calculate 2^x, we decompose x like this:
+    //   x = n + y
+    //     n is an integer, the value of x - 0.5 rounded down, therefore
+    //     y is in the [0.5, 1.5) range
+    //
+    //   2^x = 2^n * 2^y
+    //     2^n can be evaluated by playing with float representation.
+    //     2^y in a small range can be approximated, this code uses an order two
+    //         polynomial approximation. The coefficients have been estimated
+    //         with the Remez algorithm and the resulting polynomial has a
+    //         maximum relative error of 0.17%.
+
+    // To avoid over/underflow, we reduce the range of input to ]-127, 129].
+    static const ALIGN16_BEG float max_input[4] ALIGN16_END =
+        {129.f, 129.f, 129.f, 129.f};
+    static const ALIGN16_BEG float min_input[4] ALIGN16_END =
+        {-126.99999f, -126.99999f, -126.99999f, -126.99999f};
+    const __m128 x_min = _mm_min_ps(b_log2_a, *((__m128 *)max_input));
+    const __m128 x_max = _mm_max_ps(x_min,    *((__m128 *)min_input));
+    // Compute n.
+    static const ALIGN16_BEG float half[4] ALIGN16_END =
+        {0.5f, 0.5f, 0.5f, 0.5f};
+    const __m128  x_minus_half = _mm_sub_ps(x_max, *((__m128 *)half));
+    const __m128i x_minus_half_floor = _mm_cvtps_epi32(x_minus_half);
+    // Compute 2^n.
+    static const ALIGN16_BEG int float_exponent_bias[4] ALIGN16_END =
+        {127, 127, 127, 127};
+    static const int float_exponent_shift = 23;
+    const __m128i two_n_exponent = _mm_add_epi32(
+        x_minus_half_floor, *((__m128i *)float_exponent_bias));
+    const __m128  two_n = _mm_castsi128_ps(_mm_slli_epi32(
+        two_n_exponent, float_exponent_shift));
+    // Compute y.
+    const __m128 y = _mm_sub_ps(x_max, _mm_cvtepi32_ps(x_minus_half_floor));
+    // Approximate 2^y ~= C2 * y^2 + C1 * y + C0.
+    static const ALIGN16_BEG float C2[4] ALIGN16_END =
+        {3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f};
+    static const ALIGN16_BEG float C1[4] ALIGN16_END =
+        {6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f};
+    static const ALIGN16_BEG float C0[4] ALIGN16_END =
+        {1.0017247f, 1.0017247f, 1.0017247f, 1.0017247f};
+    const __m128 exp2_y_0 = _mm_mul_ps(y,        *((__m128 *)C2));
+    const __m128 exp2_y_1 = _mm_add_ps(exp2_y_0, *((__m128 *)C1));
+    const __m128 exp2_y_2 = _mm_mul_ps(exp2_y_1, y);
+    const __m128 exp2_y   = _mm_add_ps(exp2_y_2, *((__m128 *)C0));
+
+    // Combine parts.
+    a_exp_b = _mm_mul_ps(exp2_y, two_n);
+  }
+  return a_exp_b;
+}
+
+extern const float WebRtcAec_weightCurve[65];
+extern const float WebRtcAec_overDriveCurve[65];
+
+static void OverdriveAndSuppressSSE2(aec_t *aec, float hNl[PART_LEN1],
+                                     const float hNlFb,
+                                     float efw[2][PART_LEN1]) {
+  int i;
+  const __m128 vec_hNlFb = _mm_set1_ps(hNlFb);
+  const __m128 vec_one = _mm_set1_ps(1.0f);
+  const __m128 vec_minus_one = _mm_set1_ps(-1.0f);
+  const __m128 vec_overDriveSm = _mm_set1_ps(aec->overDriveSm);
+  // vectorized code (four at once)
+  for (i = 0; i + 3 < PART_LEN1; i+=4) {
+    // Weight subbands
+    __m128 vec_hNl = _mm_loadu_ps(&hNl[i]);
+    const __m128 vec_weightCurve = _mm_loadu_ps(&WebRtcAec_weightCurve[i]);
+    const __m128 bigger = _mm_cmpgt_ps(vec_hNl, vec_hNlFb);
+    const __m128 vec_weightCurve_hNlFb = _mm_mul_ps(
+        vec_weightCurve, vec_hNlFb);
+    const __m128 vec_one_weightCurve = _mm_sub_ps(vec_one, vec_weightCurve);
+    const __m128 vec_one_weightCurve_hNl = _mm_mul_ps(
+        vec_one_weightCurve, vec_hNl);
+    const __m128 vec_if0 = _mm_andnot_ps(bigger, vec_hNl);
+    const __m128 vec_if1 = _mm_and_ps(
+        bigger, _mm_add_ps(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl));
+    vec_hNl = _mm_or_ps(vec_if0, vec_if1);
+
+    {
+      const __m128 vec_overDriveCurve = _mm_loadu_ps(
+          &WebRtcAec_overDriveCurve[i]);
+      const __m128 vec_overDriveSm_overDriveCurve = _mm_mul_ps(
+          vec_overDriveSm, vec_overDriveCurve);
+      vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve);
+      _mm_storeu_ps(&hNl[i], vec_hNl);
+    }
+
+    // Suppress error signal
+    {
+      __m128 vec_efw_re = _mm_loadu_ps(&efw[0][i]);
+      __m128 vec_efw_im = _mm_loadu_ps(&efw[1][i]);
+      vec_efw_re = _mm_mul_ps(vec_efw_re, vec_hNl);
+      vec_efw_im = _mm_mul_ps(vec_efw_im, vec_hNl);
+
+      // Ooura fft returns incorrect sign on imaginary component. It matters
+      // here because we are making an additive change with comfort noise.
+      vec_efw_im = _mm_mul_ps(vec_efw_im, vec_minus_one);
+      _mm_storeu_ps(&efw[0][i], vec_efw_re);
+      _mm_storeu_ps(&efw[1][i], vec_efw_im);
+    }
+  }
+  // scalar code for the remaining items.
+  for (; i < PART_LEN1; i++) {
+    // Weight subbands
+    if (hNl[i] > hNlFb) {
+      hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
+          (1 - WebRtcAec_weightCurve[i]) * hNl[i];
+    }
+    hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+    // Suppress error signal
+    efw[0][i] *= hNl[i];
+    efw[1][i] *= hNl[i];
+
+    // Ooura fft returns incorrect sign on imaginary component. It matters
+    // here because we are making an additive change with comfort noise.
+    efw[1][i] *= -1;
+  }
+}
+
+void WebRtcAec_InitAec_SSE2(void) {
+  WebRtcAec_FilterFar = FilterFarSSE2;
+  WebRtcAec_ScaleErrorSignal = ScaleErrorSignalSSE2;
+  WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
+  WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
+}
+
diff --git a/trunk/src/modules/audio_processing/aec/aec_rdft.c b/trunk/src/modules/audio_processing/aec/aec_rdft.c
new file mode 100644
index 0000000..19908d8
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_rdft.c
@@ -0,0 +1,587 @@
+/*
+ * http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html
+ * Copyright Takuya OOURA, 1996-2001
+ *
+ * You may use, copy, modify and distribute this code for any purpose (include
+ * commercial use) and without fee. Please refer to this package when you modify
+ * this code.
+ *
+ * Changes by the WebRTC authors:
+ *    - Trivial type modifications.
+ *    - Minimal code subset to do rdft of length 128.
+ *    - Optimizations because of known length.
+ *
+ *  All changes are covered by the WebRTC license and IP grant:
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aec_rdft.h"
+
+#include <math.h>
+
+#include "system_wrappers/interface/cpu_features_wrapper.h"
+#include "typedefs.h"
+
+// constants shared by all paths (C, SSE2).
+float rdft_w[64];
+// constants used by the C path.
+float rdft_wk3ri_first[32];
+float rdft_wk3ri_second[32];
+// constants used by SSE2 but initialized in C path.
+ALIGN16_BEG float ALIGN16_END rdft_wk1r[32];
+ALIGN16_BEG float ALIGN16_END rdft_wk2r[32];
+ALIGN16_BEG float ALIGN16_END rdft_wk3r[32];
+ALIGN16_BEG float ALIGN16_END rdft_wk1i[32];
+ALIGN16_BEG float ALIGN16_END rdft_wk2i[32];
+ALIGN16_BEG float ALIGN16_END rdft_wk3i[32];
+ALIGN16_BEG float ALIGN16_END cftmdl_wk1r[4];
+
+static int ip[16];
+
+static void bitrv2_32or128(int n, int *ip, float *a) {
+  // n is 32 or 128
+  int j, j1, k, k1, m, m2;
+  float xr, xi, yr, yi;
+
+  ip[0] = 0;
+  {
+    int l = n;
+    m = 1;
+    while ((m << 3) < l) {
+      l >>= 1;
+      for (j = 0; j < m; j++) {
+        ip[m + j] = ip[j] + l;
+      }
+      m <<= 1;
+    }
+  }
+  m2 = 2 * m;
+  for (k = 0; k < m; k++) {
+    for (j = 0; j < k; j++) {
+      j1 = 2 * j + ip[k];
+      k1 = 2 * k + ip[j];
+      xr = a[j1];
+      xi = a[j1 + 1];
+      yr = a[k1];
+      yi = a[k1 + 1];
+      a[j1] = yr;
+      a[j1 + 1] = yi;
+      a[k1] = xr;
+      a[k1 + 1] = xi;
+      j1 += m2;
+      k1 += 2 * m2;
+      xr = a[j1];
+      xi = a[j1 + 1];
+      yr = a[k1];
+      yi = a[k1 + 1];
+      a[j1] = yr;
+      a[j1 + 1] = yi;
+      a[k1] = xr;
+      a[k1 + 1] = xi;
+      j1 += m2;
+      k1 -= m2;
+      xr = a[j1];
+      xi = a[j1 + 1];
+      yr = a[k1];
+      yi = a[k1 + 1];
+      a[j1] = yr;
+      a[j1 + 1] = yi;
+      a[k1] = xr;
+      a[k1 + 1] = xi;
+      j1 += m2;
+      k1 += 2 * m2;
+      xr = a[j1];
+      xi = a[j1 + 1];
+      yr = a[k1];
+      yi = a[k1 + 1];
+      a[j1] = yr;
+      a[j1 + 1] = yi;
+      a[k1] = xr;
+      a[k1 + 1] = xi;
+    }
+    j1 = 2 * k + m2 + ip[k];
+    k1 = j1 + m2;
+    xr = a[j1];
+    xi = a[j1 + 1];
+    yr = a[k1];
+    yi = a[k1 + 1];
+    a[j1] = yr;
+    a[j1 + 1] = yi;
+    a[k1] = xr;
+    a[k1 + 1] = xi;
+  }
+}
+
+static void makewt_32(void) {
+  const int nw = 32;
+  int j, nwh;
+  float delta, x, y;
+
+  ip[0] = nw;
+  ip[1] = 1;
+  nwh = nw >> 1;
+  delta = atanf(1.0f) / nwh;
+  rdft_w[0] = 1;
+  rdft_w[1] = 0;
+  rdft_w[nwh] = cosf(delta * nwh);
+  rdft_w[nwh + 1] = rdft_w[nwh];
+  for (j = 2; j < nwh; j += 2) {
+    x = cosf(delta * j);
+    y = sinf(delta * j);
+    rdft_w[j] = x;
+    rdft_w[j + 1] = y;
+    rdft_w[nw - j] = y;
+    rdft_w[nw - j + 1] = x;
+  }
+  bitrv2_32or128(nw, ip + 2, rdft_w);
+
+  // pre-calculate constants used by cft1st_128 and cftmdl_128...
+  cftmdl_wk1r[0] = rdft_w[2];
+  cftmdl_wk1r[1] = rdft_w[2];
+  cftmdl_wk1r[2] = rdft_w[2];
+  cftmdl_wk1r[3] = -rdft_w[2];
+  {
+    int k1;
+
+    for (k1 = 0, j = 0; j < 128; j += 16, k1 += 2) {
+      const int k2 = 2 * k1;
+      const float wk2r = rdft_w[k1 + 0];
+      const float wk2i = rdft_w[k1 + 1];
+      float wk1r, wk1i;
+      // ... scalar version.
+      wk1r = rdft_w[k2 + 0];
+      wk1i = rdft_w[k2 + 1];
+      rdft_wk3ri_first[k1 + 0] = wk1r - 2 * wk2i * wk1i;
+      rdft_wk3ri_first[k1 + 1] = 2 * wk2i * wk1r - wk1i;
+      wk1r = rdft_w[k2 + 2];
+      wk1i = rdft_w[k2 + 3];
+      rdft_wk3ri_second[k1 + 0] = wk1r - 2 * wk2r * wk1i;
+      rdft_wk3ri_second[k1 + 1] = 2 * wk2r * wk1r - wk1i;
+      // ... vector version.
+      rdft_wk1r[k2 + 0] = rdft_w[k2 + 0];
+      rdft_wk1r[k2 + 1] = rdft_w[k2 + 0];
+      rdft_wk1r[k2 + 2] = rdft_w[k2 + 2];
+      rdft_wk1r[k2 + 3] = rdft_w[k2 + 2];
+      rdft_wk2r[k2 + 0] = rdft_w[k1 + 0];
+      rdft_wk2r[k2 + 1] = rdft_w[k1 + 0];
+      rdft_wk2r[k2 + 2] = -rdft_w[k1 + 1];
+      rdft_wk2r[k2 + 3] = -rdft_w[k1 + 1];
+      rdft_wk3r[k2 + 0] = rdft_wk3ri_first[k1 + 0];
+      rdft_wk3r[k2 + 1] = rdft_wk3ri_first[k1 + 0];
+      rdft_wk3r[k2 + 2] = rdft_wk3ri_second[k1 + 0];
+      rdft_wk3r[k2 + 3] = rdft_wk3ri_second[k1 + 0];
+      rdft_wk1i[k2 + 0] = -rdft_w[k2 + 1];
+      rdft_wk1i[k2 + 1] = rdft_w[k2 + 1];
+      rdft_wk1i[k2 + 2] = -rdft_w[k2 + 3];
+      rdft_wk1i[k2 + 3] = rdft_w[k2 + 3];
+      rdft_wk2i[k2 + 0] = -rdft_w[k1 + 1];
+      rdft_wk2i[k2 + 1] = rdft_w[k1 + 1];
+      rdft_wk2i[k2 + 2] = -rdft_w[k1 + 0];
+      rdft_wk2i[k2 + 3] = rdft_w[k1 + 0];
+      rdft_wk3i[k2 + 0] = -rdft_wk3ri_first[k1 + 1];
+      rdft_wk3i[k2 + 1] = rdft_wk3ri_first[k1 + 1];
+      rdft_wk3i[k2 + 2] = -rdft_wk3ri_second[k1 + 1];
+      rdft_wk3i[k2 + 3] = rdft_wk3ri_second[k1 + 1];
+    }
+  }
+}
+
+static void makect_32(void) {
+  float *c = rdft_w + 32;
+  const int nc = 32;
+  int j, nch;
+  float delta;
+
+  ip[1] = nc;
+  nch = nc >> 1;
+  delta = atanf(1.0f) / nch;
+  c[0] = cosf(delta * nch);
+  c[nch] = 0.5f * c[0];
+  for (j = 1; j < nch; j++) {
+    c[j] = 0.5f * cosf(delta * j);
+    c[nc - j] = 0.5f * sinf(delta * j);
+  }
+}
+
+static void cft1st_128_C(float *a) {
+  const int n = 128;
+  int j, k1, k2;
+  float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+  float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+  x0r = a[0] + a[2];
+  x0i = a[1] + a[3];
+  x1r = a[0] - a[2];
+  x1i = a[1] - a[3];
+  x2r = a[4] + a[6];
+  x2i = a[5] + a[7];
+  x3r = a[4] - a[6];
+  x3i = a[5] - a[7];
+  a[0] = x0r + x2r;
+  a[1] = x0i + x2i;
+  a[4] = x0r - x2r;
+  a[5] = x0i - x2i;
+  a[2] = x1r - x3i;
+  a[3] = x1i + x3r;
+  a[6] = x1r + x3i;
+  a[7] = x1i - x3r;
+  wk1r = rdft_w[2];
+  x0r = a[8] + a[10];
+  x0i = a[9] + a[11];
+  x1r = a[8] - a[10];
+  x1i = a[9] - a[11];
+  x2r = a[12] + a[14];
+  x2i = a[13] + a[15];
+  x3r = a[12] - a[14];
+  x3i = a[13] - a[15];
+  a[8] = x0r + x2r;
+  a[9] = x0i + x2i;
+  a[12] = x2i - x0i;
+  a[13] = x0r - x2r;
+  x0r = x1r - x3i;
+  x0i = x1i + x3r;
+  a[10] = wk1r * (x0r - x0i);
+  a[11] = wk1r * (x0r + x0i);
+  x0r = x3i + x1r;
+  x0i = x3r - x1i;
+  a[14] = wk1r * (x0i - x0r);
+  a[15] = wk1r * (x0i + x0r);
+  k1 = 0;
+  for (j = 16; j < n; j += 16) {
+    k1 += 2;
+    k2 = 2 * k1;
+    wk2r = rdft_w[k1 + 0];
+    wk2i = rdft_w[k1 + 1];
+    wk1r = rdft_w[k2 + 0];
+    wk1i = rdft_w[k2 + 1];
+    wk3r = rdft_wk3ri_first[k1 + 0];
+    wk3i = rdft_wk3ri_first[k1 + 1];
+    x0r = a[j + 0] + a[j + 2];
+    x0i = a[j + 1] + a[j + 3];
+    x1r = a[j + 0] - a[j + 2];
+    x1i = a[j + 1] - a[j + 3];
+    x2r = a[j + 4] + a[j + 6];
+    x2i = a[j + 5] + a[j + 7];
+    x3r = a[j + 4] - a[j + 6];
+    x3i = a[j + 5] - a[j + 7];
+    a[j + 0] = x0r + x2r;
+    a[j + 1] = x0i + x2i;
+    x0r -= x2r;
+    x0i -= x2i;
+    a[j + 4] = wk2r * x0r - wk2i * x0i;
+    a[j + 5] = wk2r * x0i + wk2i * x0r;
+    x0r = x1r - x3i;
+    x0i = x1i + x3r;
+    a[j + 2] = wk1r * x0r - wk1i * x0i;
+    a[j + 3] = wk1r * x0i + wk1i * x0r;
+    x0r = x1r + x3i;
+    x0i = x1i - x3r;
+    a[j + 6] = wk3r * x0r - wk3i * x0i;
+    a[j + 7] = wk3r * x0i + wk3i * x0r;
+    wk1r = rdft_w[k2 + 2];
+    wk1i = rdft_w[k2 + 3];
+    wk3r = rdft_wk3ri_second[k1 + 0];
+    wk3i = rdft_wk3ri_second[k1 + 1];
+    x0r = a[j + 8] + a[j + 10];
+    x0i = a[j + 9] + a[j + 11];
+    x1r = a[j + 8] - a[j + 10];
+    x1i = a[j + 9] - a[j + 11];
+    x2r = a[j + 12] + a[j + 14];
+    x2i = a[j + 13] + a[j + 15];
+    x3r = a[j + 12] - a[j + 14];
+    x3i = a[j + 13] - a[j + 15];
+    a[j + 8] = x0r + x2r;
+    a[j + 9] = x0i + x2i;
+    x0r -= x2r;
+    x0i -= x2i;
+    a[j + 12] = -wk2i * x0r - wk2r * x0i;
+    a[j + 13] = -wk2i * x0i + wk2r * x0r;
+    x0r = x1r - x3i;
+    x0i = x1i + x3r;
+    a[j + 10] = wk1r * x0r - wk1i * x0i;
+    a[j + 11] = wk1r * x0i + wk1i * x0r;
+    x0r = x1r + x3i;
+    x0i = x1i - x3r;
+    a[j + 14] = wk3r * x0r - wk3i * x0i;
+    a[j + 15] = wk3r * x0i + wk3i * x0r;
+  }
+}
+
+static void cftmdl_128_C(float *a) {
+  const int l = 8;
+  const int n = 128;
+  const int m = 32;
+  int j0, j1, j2, j3, k, k1, k2, m2;
+  float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+  float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+  for (j0 = 0; j0 < l; j0 += 2) {
+    j1 = j0 +  8;
+    j2 = j0 + 16;
+    j3 = j0 + 24;
+    x0r = a[j0 + 0] + a[j1 + 0];
+    x0i = a[j0 + 1] + a[j1 + 1];
+    x1r = a[j0 + 0] - a[j1 + 0];
+    x1i = a[j0 + 1] - a[j1 + 1];
+    x2r = a[j2 + 0] + a[j3 + 0];
+    x2i = a[j2 + 1] + a[j3 + 1];
+    x3r = a[j2 + 0] - a[j3 + 0];
+    x3i = a[j2 + 1] - a[j3 + 1];
+    a[j0 + 0] = x0r + x2r;
+    a[j0 + 1] = x0i + x2i;
+    a[j2 + 0] = x0r - x2r;
+    a[j2 + 1] = x0i - x2i;
+    a[j1 + 0] = x1r - x3i;
+    a[j1 + 1] = x1i + x3r;
+    a[j3 + 0] = x1r + x3i;
+    a[j3 + 1] = x1i - x3r;
+  }
+  wk1r = rdft_w[2];
+  for (j0 = m; j0 < l + m; j0 += 2) {
+    j1 = j0 +  8;
+    j2 = j0 + 16;
+    j3 = j0 + 24;
+    x0r = a[j0 + 0] + a[j1 + 0];
+    x0i = a[j0 + 1] + a[j1 + 1];
+    x1r = a[j0 + 0] - a[j1 + 0];
+    x1i = a[j0 + 1] - a[j1 + 1];
+    x2r = a[j2 + 0] + a[j3 + 0];
+    x2i = a[j2 + 1] + a[j3 + 1];
+    x3r = a[j2 + 0] - a[j3 + 0];
+    x3i = a[j2 + 1] - a[j3 + 1];
+    a[j0 + 0] = x0r + x2r;
+    a[j0 + 1] = x0i + x2i;
+    a[j2 + 0] = x2i - x0i;
+    a[j2 + 1] = x0r - x2r;
+    x0r = x1r - x3i;
+    x0i = x1i + x3r;
+    a[j1 + 0] = wk1r * (x0r - x0i);
+    a[j1 + 1] = wk1r * (x0r + x0i);
+    x0r = x3i + x1r;
+    x0i = x3r - x1i;
+    a[j3 + 0] = wk1r * (x0i - x0r);
+    a[j3 + 1] = wk1r * (x0i + x0r);
+  }
+  k1 = 0;
+  m2 = 2 * m;
+  for (k = m2; k < n; k += m2) {
+    k1 += 2;
+    k2 = 2 * k1;
+    wk2r = rdft_w[k1 + 0];
+    wk2i = rdft_w[k1 + 1];
+    wk1r = rdft_w[k2 + 0];
+    wk1i = rdft_w[k2 + 1];
+    wk3r = rdft_wk3ri_first[k1 + 0];
+    wk3i = rdft_wk3ri_first[k1 + 1];
+    for (j0 = k; j0 < l + k; j0 += 2) {
+      j1 = j0 +  8;
+      j2 = j0 + 16;
+      j3 = j0 + 24;
+      x0r = a[j0 + 0] + a[j1 + 0];
+      x0i = a[j0 + 1] + a[j1 + 1];
+      x1r = a[j0 + 0] - a[j1 + 0];
+      x1i = a[j0 + 1] - a[j1 + 1];
+      x2r = a[j2 + 0] + a[j3 + 0];
+      x2i = a[j2 + 1] + a[j3 + 1];
+      x3r = a[j2 + 0] - a[j3 + 0];
+      x3i = a[j2 + 1] - a[j3 + 1];
+      a[j0 + 0] = x0r + x2r;
+      a[j0 + 1] = x0i + x2i;
+      x0r -= x2r;
+      x0i -= x2i;
+      a[j2 + 0] = wk2r * x0r - wk2i * x0i;
+      a[j2 + 1] = wk2r * x0i + wk2i * x0r;
+      x0r = x1r - x3i;
+      x0i = x1i + x3r;
+      a[j1 + 0] = wk1r * x0r - wk1i * x0i;
+      a[j1 + 1] = wk1r * x0i + wk1i * x0r;
+      x0r = x1r + x3i;
+      x0i = x1i - x3r;
+      a[j3 + 0] = wk3r * x0r - wk3i * x0i;
+      a[j3 + 1] = wk3r * x0i + wk3i * x0r;
+    }
+    wk1r = rdft_w[k2 + 2];
+    wk1i = rdft_w[k2 + 3];
+    wk3r = rdft_wk3ri_second[k1 + 0];
+    wk3i = rdft_wk3ri_second[k1 + 1];
+    for (j0 = k + m; j0 < l + (k + m); j0 += 2) {
+      j1 = j0 +  8;
+      j2 = j0 + 16;
+      j3 = j0 + 24;
+      x0r = a[j0 + 0] + a[j1 + 0];
+      x0i = a[j0 + 1] + a[j1 + 1];
+      x1r = a[j0 + 0] - a[j1 + 0];
+      x1i = a[j0 + 1] - a[j1 + 1];
+      x2r = a[j2 + 0] + a[j3 + 0];
+      x2i = a[j2 + 1] + a[j3 + 1];
+      x3r = a[j2 + 0] - a[j3 + 0];
+      x3i = a[j2 + 1] - a[j3 + 1];
+      a[j0 + 0] = x0r + x2r;
+      a[j0 + 1] = x0i + x2i;
+      x0r -= x2r;
+      x0i -= x2i;
+      a[j2 + 0] = -wk2i * x0r - wk2r * x0i;
+      a[j2 + 1] = -wk2i * x0i + wk2r * x0r;
+      x0r = x1r - x3i;
+      x0i = x1i + x3r;
+      a[j1 + 0] = wk1r * x0r - wk1i * x0i;
+      a[j1 + 1] = wk1r * x0i + wk1i * x0r;
+      x0r = x1r + x3i;
+      x0i = x1i - x3r;
+      a[j3 + 0] = wk3r * x0r - wk3i * x0i;
+      a[j3 + 1] = wk3r * x0i + wk3i * x0r;
+    }
+  }
+}
+
+static void cftfsub_128(float *a) {
+  int j, j1, j2, j3, l;
+  float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+  cft1st_128(a);
+  cftmdl_128(a);
+  l = 32;
+  for (j = 0; j < l; j += 2) {
+    j1 = j + l;
+    j2 = j1 + l;
+    j3 = j2 + l;
+    x0r = a[j] + a[j1];
+    x0i = a[j + 1] + a[j1 + 1];
+    x1r = a[j] - a[j1];
+    x1i = a[j + 1] - a[j1 + 1];
+    x2r = a[j2] + a[j3];
+    x2i = a[j2 + 1] + a[j3 + 1];
+    x3r = a[j2] - a[j3];
+    x3i = a[j2 + 1] - a[j3 + 1];
+    a[j] = x0r + x2r;
+    a[j + 1] = x0i + x2i;
+    a[j2] = x0r - x2r;
+    a[j2 + 1] = x0i - x2i;
+    a[j1] = x1r - x3i;
+    a[j1 + 1] = x1i + x3r;
+    a[j3] = x1r + x3i;
+    a[j3 + 1] = x1i - x3r;
+  }
+}
+
+static void cftbsub_128(float *a) {
+  int j, j1, j2, j3, l;
+  float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+  cft1st_128(a);
+  cftmdl_128(a);
+  l = 32;
+
+  for (j = 0; j < l; j += 2) {
+    j1 = j + l;
+    j2 = j1 + l;
+    j3 = j2 + l;
+    x0r = a[j] + a[j1];
+    x0i = -a[j + 1] - a[j1 + 1];
+    x1r = a[j] - a[j1];
+    x1i = -a[j + 1] + a[j1 + 1];
+    x2r = a[j2] + a[j3];
+    x2i = a[j2 + 1] + a[j3 + 1];
+    x3r = a[j2] - a[j3];
+    x3i = a[j2 + 1] - a[j3 + 1];
+    a[j] = x0r + x2r;
+    a[j + 1] = x0i - x2i;
+    a[j2] = x0r - x2r;
+    a[j2 + 1] = x0i + x2i;
+    a[j1] = x1r - x3i;
+    a[j1 + 1] = x1i - x3r;
+    a[j3] = x1r + x3i;
+    a[j3 + 1] = x1i + x3r;
+  }
+}
+
+static void rftfsub_128_C(float *a) {
+  const float *c = rdft_w + 32;
+  int j1, j2, k1, k2;
+  float wkr, wki, xr, xi, yr, yi;
+
+  for (j1 = 1, j2 = 2; j2 < 64; j1 += 1, j2 += 2) {
+    k2 = 128 - j2;
+    k1 =  32 - j1;
+    wkr = 0.5f - c[k1];
+    wki = c[j1];
+    xr = a[j2 + 0] - a[k2 + 0];
+    xi = a[j2 + 1] + a[k2 + 1];
+    yr = wkr * xr - wki * xi;
+    yi = wkr * xi + wki * xr;
+    a[j2 + 0] -= yr;
+    a[j2 + 1] -= yi;
+    a[k2 + 0] += yr;
+    a[k2 + 1] -= yi;
+  }
+}
+
+static void rftbsub_128_C(float *a) {
+  const float *c = rdft_w + 32;
+  int j1, j2, k1, k2;
+  float wkr, wki, xr, xi, yr, yi;
+
+  a[1] = -a[1];
+  for (j1 = 1, j2 = 2; j2 < 64; j1 += 1, j2 += 2) {
+    k2 = 128 - j2;
+    k1 =  32 - j1;
+    wkr = 0.5f - c[k1];
+    wki = c[j1];
+    xr = a[j2 + 0] - a[k2 + 0];
+    xi = a[j2 + 1] + a[k2 + 1];
+    yr = wkr * xr + wki * xi;
+    yi = wkr * xi - wki * xr;
+    a[j2 + 0] = a[j2 + 0] - yr;
+    a[j2 + 1] = yi - a[j2 + 1];
+    a[k2 + 0] = yr + a[k2 + 0];
+    a[k2 + 1] = yi - a[k2 + 1];
+  }
+  a[65] = -a[65];
+}
+
+void aec_rdft_forward_128(float *a) {
+  const int n = 128;
+  float xi;
+
+  bitrv2_32or128(n, ip + 2, a);
+  cftfsub_128(a);
+  rftfsub_128(a);
+  xi = a[0] - a[1];
+  a[0] += a[1];
+  a[1] = xi;
+}
+
+void aec_rdft_inverse_128(float *a) {
+  const int n = 128;
+
+  a[1] = 0.5f * (a[0] - a[1]);
+  a[0] -= a[1];
+  rftbsub_128(a);
+  bitrv2_32or128(n, ip + 2, a);
+  cftbsub_128(a);
+}
+
+// code path selection
+rft_sub_128_t cft1st_128;
+rft_sub_128_t cftmdl_128;
+rft_sub_128_t rftfsub_128;
+rft_sub_128_t rftbsub_128;
+
+void aec_rdft_init(void) {
+  cft1st_128 = cft1st_128_C;
+  cftmdl_128 = cftmdl_128_C;
+  rftfsub_128 = rftfsub_128_C;
+  rftbsub_128 = rftbsub_128_C;
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+  if (WebRtc_GetCPUInfo(kSSE2)) {
+    aec_rdft_init_sse2();
+  }
+#endif
+  // init library constants.
+  makewt_32();
+  makect_32();
+}
diff --git a/trunk/src/modules/audio_processing/aec/aec_rdft.h b/trunk/src/modules/audio_processing/aec/aec_rdft.h
new file mode 100644
index 0000000..91bedc9
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_rdft.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_
+
+// These intrinsics were unavailable before VS 2008.
+// TODO(andrew): move to a common file.
+#if defined(_MSC_VER) && _MSC_VER < 1500
+#include <emmintrin.h>
+static __inline __m128 _mm_castsi128_ps(__m128i a) { return *(__m128*)&a; }
+static __inline __m128i _mm_castps_si128(__m128 a) { return *(__m128i*)&a; }
+#endif
+
+#ifdef _MSC_VER /* visual c++ */
+# define ALIGN16_BEG __declspec(align(16))
+# define ALIGN16_END
+#else /* gcc or icc */
+# define ALIGN16_BEG
+# define ALIGN16_END __attribute__((aligned(16)))
+#endif
+
+// constants shared by all paths (C, SSE2).
+extern float rdft_w[64];
+// constants used by the C path.
+extern float rdft_wk3ri_first[32];
+extern float rdft_wk3ri_second[32];
+// constants used by SSE2 but initialized in C path.
+extern float rdft_wk1r[32];
+extern float rdft_wk2r[32];
+extern float rdft_wk3r[32];
+extern float rdft_wk1i[32];
+extern float rdft_wk2i[32];
+extern float rdft_wk3i[32];
+extern float cftmdl_wk1r[4];
+
+// code path selection function pointers
+typedef void (*rft_sub_128_t)(float *a);
+extern rft_sub_128_t rftfsub_128;
+extern rft_sub_128_t rftbsub_128;
+extern rft_sub_128_t cft1st_128;
+extern rft_sub_128_t cftmdl_128;
+
+// entry points
+void aec_rdft_init(void);
+void aec_rdft_init_sse2(void);
+void aec_rdft_forward_128(float *a);
+void aec_rdft_inverse_128(float *a);
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_
diff --git a/trunk/src/modules/audio_processing/aec/aec_rdft_sse2.c b/trunk/src/modules/audio_processing/aec/aec_rdft_sse2.c
new file mode 100644
index 0000000..eeb3152
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_rdft_sse2.c
@@ -0,0 +1,427 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aec_rdft.h"
+
+#include <emmintrin.h>
+
+static const ALIGN16_BEG float ALIGN16_END k_swap_sign[4] =
+  {-1.f, 1.f, -1.f, 1.f};
+
+static void cft1st_128_SSE2(float *a) {
+  const __m128 mm_swap_sign = _mm_load_ps(k_swap_sign);
+  int j, k2;
+
+  for (k2 = 0, j = 0; j < 128; j += 16, k2 += 4) {
+          __m128 a00v   = _mm_loadu_ps(&a[j +  0]);
+          __m128 a04v   = _mm_loadu_ps(&a[j +  4]);
+          __m128 a08v   = _mm_loadu_ps(&a[j +  8]);
+          __m128 a12v   = _mm_loadu_ps(&a[j + 12]);
+          __m128 a01v   = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(1, 0, 1 ,0));
+          __m128 a23v   = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(3, 2, 3 ,2));
+          __m128 a45v   = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(1, 0, 1 ,0));
+          __m128 a67v   = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(3, 2, 3 ,2));
+
+    const __m128 wk1rv  = _mm_load_ps(&rdft_wk1r[k2]);
+    const __m128 wk1iv  = _mm_load_ps(&rdft_wk1i[k2]);
+    const __m128 wk2rv  = _mm_load_ps(&rdft_wk2r[k2]);
+    const __m128 wk2iv  = _mm_load_ps(&rdft_wk2i[k2]);
+    const __m128 wk3rv  = _mm_load_ps(&rdft_wk3r[k2]);
+    const __m128 wk3iv  = _mm_load_ps(&rdft_wk3i[k2]);
+          __m128 x0v    = _mm_add_ps(a01v, a23v);
+    const __m128 x1v    = _mm_sub_ps(a01v, a23v);
+    const __m128 x2v    = _mm_add_ps(a45v, a67v);
+    const __m128 x3v    = _mm_sub_ps(a45v, a67v);
+          __m128 x0w;
+                 a01v   = _mm_add_ps(x0v, x2v);
+                 x0v    = _mm_sub_ps(x0v, x2v);
+                 x0w    = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1));
+    {
+      const __m128 a45_0v = _mm_mul_ps(wk2rv, x0v);
+      const __m128 a45_1v = _mm_mul_ps(wk2iv, x0w);
+                   a45v   = _mm_add_ps(a45_0v, a45_1v);
+    }
+    {
+            __m128 a23_0v, a23_1v;
+      const __m128 x3w    = _mm_shuffle_ps(x3v, x3v, _MM_SHUFFLE(2, 3, 0 ,1));
+      const __m128 x3s    = _mm_mul_ps(mm_swap_sign, x3w);
+                   x0v    = _mm_add_ps(x1v, x3s);
+                   x0w    = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1));
+                   a23_0v = _mm_mul_ps(wk1rv, x0v);
+                   a23_1v = _mm_mul_ps(wk1iv, x0w);
+                   a23v   = _mm_add_ps(a23_0v, a23_1v);
+
+                   x0v    = _mm_sub_ps(x1v, x3s);
+                   x0w    = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1));
+    }
+    {
+      const __m128 a67_0v = _mm_mul_ps(wk3rv, x0v);
+      const __m128 a67_1v = _mm_mul_ps(wk3iv, x0w);
+                   a67v   = _mm_add_ps(a67_0v, a67_1v);
+    }
+
+                 a00v   = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(1, 0, 1 ,0));
+                 a04v   = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(1, 0, 1 ,0));
+                 a08v   = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(3, 2, 3 ,2));
+                 a12v   = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(3, 2, 3 ,2));
+    _mm_storeu_ps(&a[j +  0], a00v);
+    _mm_storeu_ps(&a[j +  4], a04v);
+    _mm_storeu_ps(&a[j +  8], a08v);
+    _mm_storeu_ps(&a[j + 12], a12v);
+  }
+}
+
+static void cftmdl_128_SSE2(float *a) {
+  const int l = 8;
+  const __m128 mm_swap_sign = _mm_load_ps(k_swap_sign);
+  int j0;
+
+  __m128 wk1rv = _mm_load_ps(cftmdl_wk1r);
+  for (j0 = 0; j0 < l; j0 += 2) {
+    const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 +  0]);
+    const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 +  8]);
+    const __m128i a_32 = _mm_loadl_epi64((__m128i*)&a[j0 + 32]);
+    const __m128i a_40 = _mm_loadl_epi64((__m128i*)&a[j0 + 40]);
+    const __m128  a_00_32 = _mm_shuffle_ps(_mm_castsi128_ps(a_00),
+                                           _mm_castsi128_ps(a_32),
+                                           _MM_SHUFFLE(1, 0, 1 ,0));
+    const __m128  a_08_40 = _mm_shuffle_ps(_mm_castsi128_ps(a_08),
+                                           _mm_castsi128_ps(a_40),
+                                           _MM_SHUFFLE(1, 0, 1 ,0));
+          __m128  x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40);
+    const __m128  x1r0_1i0_1r1_x1i1 = _mm_sub_ps(a_00_32, a_08_40);
+
+    const __m128i a_16 = _mm_loadl_epi64((__m128i*)&a[j0 + 16]);
+    const __m128i a_24 = _mm_loadl_epi64((__m128i*)&a[j0 + 24]);
+    const __m128i a_48 = _mm_loadl_epi64((__m128i*)&a[j0 + 48]);
+    const __m128i a_56 = _mm_loadl_epi64((__m128i*)&a[j0 + 56]);
+    const __m128  a_16_48 = _mm_shuffle_ps(_mm_castsi128_ps(a_16),
+                                           _mm_castsi128_ps(a_48),
+                                           _MM_SHUFFLE(1, 0, 1 ,0));
+    const __m128  a_24_56 = _mm_shuffle_ps(_mm_castsi128_ps(a_24),
+                                           _mm_castsi128_ps(a_56),
+                                           _MM_SHUFFLE(1, 0, 1 ,0));
+    const __m128  x2r0_2i0_2r1_x2i1 = _mm_add_ps(a_16_48, a_24_56);
+    const __m128  x3r0_3i0_3r1_x3i1 = _mm_sub_ps(a_16_48, a_24_56);
+
+    const __m128  xx0 = _mm_add_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1);
+    const __m128  xx1 = _mm_sub_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1);
+
+    const __m128  x3i0_3r0_3i1_x3r1 = _mm_castsi128_ps(
+        _mm_shuffle_epi32(_mm_castps_si128(x3r0_3i0_3r1_x3i1),
+                          _MM_SHUFFLE(2, 3, 0, 1)));
+    const __m128  x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1);
+    const __m128  x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped);
+    const __m128  x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped);
+
+    const __m128 yy0 = _mm_shuffle_ps(x1_x3_add, x1_x3_sub,
+                                      _MM_SHUFFLE(2, 2, 2 ,2));
+    const __m128 yy1 = _mm_shuffle_ps(x1_x3_add, x1_x3_sub,
+                                      _MM_SHUFFLE(3, 3, 3 ,3));
+    const __m128 yy2 = _mm_mul_ps(mm_swap_sign, yy1);
+    const __m128 yy3 = _mm_add_ps(yy0, yy2);
+    const __m128 yy4 = _mm_mul_ps(wk1rv, yy3);
+
+    _mm_storel_epi64((__m128i*)&a[j0 +  0], _mm_castps_si128(xx0));
+    _mm_storel_epi64((__m128i*)&a[j0 + 32],
+                     _mm_shuffle_epi32(_mm_castps_si128(xx0),
+                                       _MM_SHUFFLE(3, 2, 3, 2)));
+
+    _mm_storel_epi64((__m128i*)&a[j0 + 16], _mm_castps_si128(xx1));
+    _mm_storel_epi64((__m128i*)&a[j0 + 48],
+                     _mm_shuffle_epi32(_mm_castps_si128(xx1),
+                                       _MM_SHUFFLE(2, 3, 2, 3)));
+    a[j0 + 48] = -a[j0 + 48];
+
+    _mm_storel_epi64((__m128i*)&a[j0 +  8], _mm_castps_si128(x1_x3_add));
+    _mm_storel_epi64((__m128i*)&a[j0 + 24], _mm_castps_si128(x1_x3_sub));
+
+    _mm_storel_epi64((__m128i*)&a[j0 + 40], _mm_castps_si128(yy4));
+    _mm_storel_epi64((__m128i*)&a[j0 + 56],
+                     _mm_shuffle_epi32(_mm_castps_si128(yy4),
+                     _MM_SHUFFLE(2, 3, 2, 3)));
+  }
+
+  {
+    int k = 64;
+    int k1 = 2;
+    int k2 = 2 * k1;
+    const __m128 wk2rv = _mm_load_ps(&rdft_wk2r[k2+0]);
+    const __m128 wk2iv = _mm_load_ps(&rdft_wk2i[k2+0]);
+    const __m128 wk1iv = _mm_load_ps(&rdft_wk1i[k2+0]);
+    const __m128 wk3rv = _mm_load_ps(&rdft_wk3r[k2+0]);
+    const __m128 wk3iv = _mm_load_ps(&rdft_wk3i[k2+0]);
+                 wk1rv = _mm_load_ps(&rdft_wk1r[k2+0]);
+    for (j0 = k; j0 < l + k; j0 += 2) {
+      const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 +  0]);
+      const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 +  8]);
+      const __m128i a_32 = _mm_loadl_epi64((__m128i*)&a[j0 + 32]);
+      const __m128i a_40 = _mm_loadl_epi64((__m128i*)&a[j0 + 40]);
+      const __m128 a_00_32 = _mm_shuffle_ps(_mm_castsi128_ps(a_00),
+                                            _mm_castsi128_ps(a_32),
+                                            _MM_SHUFFLE(1, 0, 1 ,0));
+      const __m128 a_08_40 = _mm_shuffle_ps(_mm_castsi128_ps(a_08),
+                                            _mm_castsi128_ps(a_40),
+                                            _MM_SHUFFLE(1, 0, 1 ,0));
+            __m128 x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40);
+      const __m128 x1r0_1i0_1r1_x1i1 = _mm_sub_ps(a_00_32, a_08_40);
+
+      const __m128i a_16 = _mm_loadl_epi64((__m128i*)&a[j0 + 16]);
+      const __m128i a_24 = _mm_loadl_epi64((__m128i*)&a[j0 + 24]);
+      const __m128i a_48 = _mm_loadl_epi64((__m128i*)&a[j0 + 48]);
+      const __m128i a_56 = _mm_loadl_epi64((__m128i*)&a[j0 + 56]);
+      const __m128 a_16_48 = _mm_shuffle_ps(_mm_castsi128_ps(a_16),
+                                            _mm_castsi128_ps(a_48),
+                                            _MM_SHUFFLE(1, 0, 1 ,0));
+      const __m128 a_24_56 = _mm_shuffle_ps(_mm_castsi128_ps(a_24),
+                                            _mm_castsi128_ps(a_56),
+                                            _MM_SHUFFLE(1, 0, 1 ,0));
+      const __m128 x2r0_2i0_2r1_x2i1 = _mm_add_ps(a_16_48, a_24_56);
+      const __m128 x3r0_3i0_3r1_x3i1 = _mm_sub_ps(a_16_48, a_24_56);
+
+      const __m128 xx = _mm_add_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1);
+      const __m128 xx1 = _mm_sub_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1);
+      const __m128 xx2 = _mm_mul_ps(xx1 , wk2rv);
+      const __m128 xx3 = _mm_mul_ps(wk2iv,
+          _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(xx1),
+                                             _MM_SHUFFLE(2, 3, 0, 1))));
+      const __m128 xx4 = _mm_add_ps(xx2, xx3);
+
+      const __m128  x3i0_3r0_3i1_x3r1 =  _mm_castsi128_ps(
+          _mm_shuffle_epi32(_mm_castps_si128(x3r0_3i0_3r1_x3i1),
+                            _MM_SHUFFLE(2, 3, 0, 1)));
+      const __m128  x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1);
+      const __m128  x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped);
+      const __m128  x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped);
+
+      const __m128 xx10 = _mm_mul_ps(x1_x3_add, wk1rv);
+      const __m128 xx11 = _mm_mul_ps(wk1iv,
+          _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(x1_x3_add),
+                                             _MM_SHUFFLE(2, 3, 0, 1))));
+      const __m128 xx12 = _mm_add_ps(xx10, xx11);
+
+      const __m128 xx20 = _mm_mul_ps(x1_x3_sub, wk3rv);
+      const __m128 xx21 = _mm_mul_ps(wk3iv,
+          _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(x1_x3_sub),
+                           _MM_SHUFFLE(2, 3, 0, 1))));
+      const __m128 xx22 = _mm_add_ps(xx20, xx21);
+
+      _mm_storel_epi64((__m128i*)&a[j0 +  0], _mm_castps_si128(xx));
+      _mm_storel_epi64((__m128i*)&a[j0 + 32],
+                         _mm_shuffle_epi32(_mm_castps_si128(xx),
+                                           _MM_SHUFFLE(3, 2, 3, 2)));
+
+      _mm_storel_epi64((__m128i*)&a[j0 + 16], _mm_castps_si128(xx4));
+      _mm_storel_epi64((__m128i*)&a[j0 + 48],
+                        _mm_shuffle_epi32(_mm_castps_si128(xx4),
+                                          _MM_SHUFFLE(3, 2, 3, 2)));
+
+      _mm_storel_epi64((__m128i*)&a[j0 +  8], _mm_castps_si128(xx12));
+      _mm_storel_epi64((__m128i*)&a[j0 + 40],
+                       _mm_shuffle_epi32(_mm_castps_si128(xx12),
+                                         _MM_SHUFFLE(3, 2, 3, 2)));
+
+      _mm_storel_epi64((__m128i*)&a[j0 + 24], _mm_castps_si128(xx22));
+      _mm_storel_epi64((__m128i*)&a[j0 + 56],
+                       _mm_shuffle_epi32(_mm_castps_si128(xx22),
+                                         _MM_SHUFFLE(3, 2, 3, 2)));
+    }
+  }
+}
+
+static void rftfsub_128_SSE2(float *a) {
+  const float *c = rdft_w + 32;
+  int j1, j2, k1, k2;
+  float wkr, wki, xr, xi, yr, yi;
+
+  static const ALIGN16_BEG float ALIGN16_END k_half[4] =
+    {0.5f, 0.5f, 0.5f, 0.5f};
+  const __m128 mm_half = _mm_load_ps(k_half);
+
+  // Vectorized code (four at once).
+  //    Note: commented number are indexes for the first iteration of the loop.
+  for (j1 = 1, j2 = 2; j2 + 7 < 64; j1 += 4, j2 += 8) {
+    // Load 'wk'.
+    const __m128 c_j1 = _mm_loadu_ps(&c[     j1]);         //  1,  2,  3,  4,
+    const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]);         // 28, 29, 30, 31,
+    const __m128 wkrt = _mm_sub_ps(mm_half, c_k1);         // 28, 29, 30, 31,
+    const __m128 wkr_ =
+      _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28,
+    const __m128 wki_ = c_j1;                              //  1,  2,  3,  4,
+    // Load and shuffle 'a'.
+    const __m128 a_j2_0 = _mm_loadu_ps(&a[0   + j2]);  //   2,   3,   4,   5,
+    const __m128 a_j2_4 = _mm_loadu_ps(&a[4   + j2]);  //   6,   7,   8,   9,
+    const __m128 a_k2_0 = _mm_loadu_ps(&a[122 - j2]);  // 120, 121, 122, 123,
+    const __m128 a_k2_4 = _mm_loadu_ps(&a[126 - j2]);  // 124, 125, 126, 127,
+    const __m128 a_j2_p0 = _mm_shuffle_ps(a_j2_0, a_j2_4,
+                            _MM_SHUFFLE(2, 0, 2 ,0));  //   2,   4,   6,   8,
+    const __m128 a_j2_p1 = _mm_shuffle_ps(a_j2_0, a_j2_4,
+                            _MM_SHUFFLE(3, 1, 3 ,1));  //   3,   5,   7,   9,
+    const __m128 a_k2_p0 = _mm_shuffle_ps(a_k2_4, a_k2_0,
+                            _MM_SHUFFLE(0, 2, 0 ,2));  // 126, 124, 122, 120,
+    const __m128 a_k2_p1 = _mm_shuffle_ps(a_k2_4, a_k2_0,
+                            _MM_SHUFFLE(1, 3, 1 ,3));  // 127, 125, 123, 121,
+    // Calculate 'x'.
+    const __m128 xr_ = _mm_sub_ps(a_j2_p0, a_k2_p0);
+                                               // 2-126, 4-124, 6-122, 8-120,
+    const __m128 xi_ = _mm_add_ps(a_j2_p1, a_k2_p1);
+                                               // 3-127, 5-125, 7-123, 9-121,
+    // Calculate product into 'y'.
+    //    yr = wkr * xr - wki * xi;
+    //    yi = wkr * xi + wki * xr;
+    const __m128 a_ = _mm_mul_ps(wkr_, xr_);
+    const __m128 b_ = _mm_mul_ps(wki_, xi_);
+    const __m128 c_ = _mm_mul_ps(wkr_, xi_);
+    const __m128 d_ = _mm_mul_ps(wki_, xr_);
+    const __m128 yr_ = _mm_sub_ps(a_, b_);     // 2-126, 4-124, 6-122, 8-120,
+    const __m128 yi_ = _mm_add_ps(c_, d_);     // 3-127, 5-125, 7-123, 9-121,
+    // Update 'a'.
+    //    a[j2 + 0] -= yr;
+    //    a[j2 + 1] -= yi;
+    //    a[k2 + 0] += yr;
+    //    a[k2 + 1] -= yi;
+    const __m128 a_j2_p0n = _mm_sub_ps(a_j2_p0, yr_);  //   2,   4,   6,   8,
+    const __m128 a_j2_p1n = _mm_sub_ps(a_j2_p1, yi_);  //   3,   5,   7,   9,
+    const __m128 a_k2_p0n = _mm_add_ps(a_k2_p0, yr_);  // 126, 124, 122, 120,
+    const __m128 a_k2_p1n = _mm_sub_ps(a_k2_p1, yi_);  // 127, 125, 123, 121,
+    // Shuffle in right order and store.
+    const __m128 a_j2_0n = _mm_unpacklo_ps(a_j2_p0n, a_j2_p1n);
+                                                       //   2,   3,   4,   5,
+    const __m128 a_j2_4n = _mm_unpackhi_ps(a_j2_p0n, a_j2_p1n);
+                                                       //   6,   7,   8,   9,
+    const __m128 a_k2_0nt = _mm_unpackhi_ps(a_k2_p0n, a_k2_p1n);
+                                                       // 122, 123, 120, 121,
+    const __m128 a_k2_4nt = _mm_unpacklo_ps(a_k2_p0n, a_k2_p1n);
+                                                       // 126, 127, 124, 125,
+    const __m128 a_k2_0n = _mm_shuffle_ps(a_k2_0nt, a_k2_0nt,
+                            _MM_SHUFFLE(1, 0, 3 ,2));  // 120, 121, 122, 123,
+    const __m128 a_k2_4n = _mm_shuffle_ps(a_k2_4nt, a_k2_4nt,
+                            _MM_SHUFFLE(1, 0, 3 ,2));  // 124, 125, 126, 127,
+    _mm_storeu_ps(&a[0   + j2], a_j2_0n);
+    _mm_storeu_ps(&a[4   + j2], a_j2_4n);
+    _mm_storeu_ps(&a[122 - j2], a_k2_0n);
+    _mm_storeu_ps(&a[126 - j2], a_k2_4n);
+  }
+  // Scalar code for the remaining items.
+  for (; j2 < 64; j1 += 1, j2 += 2) {
+    k2 = 128 - j2;
+    k1 =  32 - j1;
+    wkr = 0.5f - c[k1];
+    wki = c[j1];
+    xr = a[j2 + 0] - a[k2 + 0];
+    xi = a[j2 + 1] + a[k2 + 1];
+    yr = wkr * xr - wki * xi;
+    yi = wkr * xi + wki * xr;
+    a[j2 + 0] -= yr;
+    a[j2 + 1] -= yi;
+    a[k2 + 0] += yr;
+    a[k2 + 1] -= yi;
+  }
+}
+
+static void rftbsub_128_SSE2(float *a) {
+  const float *c = rdft_w + 32;
+  int j1, j2, k1, k2;
+  float wkr, wki, xr, xi, yr, yi;
+
+  static const ALIGN16_BEG float ALIGN16_END k_half[4] =
+    {0.5f, 0.5f, 0.5f, 0.5f};
+  const __m128 mm_half = _mm_load_ps(k_half);
+
+  a[1] = -a[1];
+  // Vectorized code (four at once).
+  //    Note: commented number are indexes for the first iteration of the loop.
+  for (j1 = 1, j2 = 2; j2 + 7 < 64; j1 += 4, j2 += 8) {
+    // Load 'wk'.
+    const __m128 c_j1 = _mm_loadu_ps(&c[     j1]);         //  1,  2,  3,  4,
+    const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]);         // 28, 29, 30, 31,
+    const __m128 wkrt = _mm_sub_ps(mm_half, c_k1);         // 28, 29, 30, 31,
+    const __m128 wkr_ =
+      _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28,
+    const __m128 wki_ = c_j1;                              //  1,  2,  3,  4,
+    // Load and shuffle 'a'.
+    const __m128 a_j2_0 = _mm_loadu_ps(&a[0   + j2]);  //   2,   3,   4,   5,
+    const __m128 a_j2_4 = _mm_loadu_ps(&a[4   + j2]);  //   6,   7,   8,   9,
+    const __m128 a_k2_0 = _mm_loadu_ps(&a[122 - j2]);  // 120, 121, 122, 123,
+    const __m128 a_k2_4 = _mm_loadu_ps(&a[126 - j2]);  // 124, 125, 126, 127,
+    const __m128 a_j2_p0 = _mm_shuffle_ps(a_j2_0, a_j2_4,
+                            _MM_SHUFFLE(2, 0, 2 ,0));  //   2,   4,   6,   8,
+    const __m128 a_j2_p1 = _mm_shuffle_ps(a_j2_0, a_j2_4,
+                            _MM_SHUFFLE(3, 1, 3 ,1));  //   3,   5,   7,   9,
+    const __m128 a_k2_p0 = _mm_shuffle_ps(a_k2_4, a_k2_0,
+                            _MM_SHUFFLE(0, 2, 0 ,2));  // 126, 124, 122, 120,
+    const __m128 a_k2_p1 = _mm_shuffle_ps(a_k2_4, a_k2_0,
+                            _MM_SHUFFLE(1, 3, 1 ,3));  // 127, 125, 123, 121,
+    // Calculate 'x'.
+    const __m128 xr_ = _mm_sub_ps(a_j2_p0, a_k2_p0);
+                                               // 2-126, 4-124, 6-122, 8-120,
+    const __m128 xi_ = _mm_add_ps(a_j2_p1, a_k2_p1);
+                                               // 3-127, 5-125, 7-123, 9-121,
+    // Calculate product into 'y'.
+    //    yr = wkr * xr + wki * xi;
+    //    yi = wkr * xi - wki * xr;
+    const __m128 a_ = _mm_mul_ps(wkr_, xr_);
+    const __m128 b_ = _mm_mul_ps(wki_, xi_);
+    const __m128 c_ = _mm_mul_ps(wkr_, xi_);
+    const __m128 d_ = _mm_mul_ps(wki_, xr_);
+    const __m128 yr_ = _mm_add_ps(a_, b_);     // 2-126, 4-124, 6-122, 8-120,
+    const __m128 yi_ = _mm_sub_ps(c_, d_);     // 3-127, 5-125, 7-123, 9-121,
+    // Update 'a'.
+    //    a[j2 + 0] = a[j2 + 0] - yr;
+    //    a[j2 + 1] = yi - a[j2 + 1];
+    //    a[k2 + 0] = yr + a[k2 + 0];
+    //    a[k2 + 1] = yi - a[k2 + 1];
+    const __m128 a_j2_p0n = _mm_sub_ps(a_j2_p0, yr_);  //   2,   4,   6,   8,
+    const __m128 a_j2_p1n = _mm_sub_ps(yi_, a_j2_p1);  //   3,   5,   7,   9,
+    const __m128 a_k2_p0n = _mm_add_ps(a_k2_p0, yr_);  // 126, 124, 122, 120,
+    const __m128 a_k2_p1n = _mm_sub_ps(yi_, a_k2_p1);  // 127, 125, 123, 121,
+    // Shuffle in right order and store.
+    const __m128 a_j2_0n = _mm_unpacklo_ps(a_j2_p0n, a_j2_p1n);
+                                                       //   2,   3,   4,   5,
+    const __m128 a_j2_4n = _mm_unpackhi_ps(a_j2_p0n, a_j2_p1n);
+                                                       //   6,   7,   8,   9,
+    const __m128 a_k2_0nt = _mm_unpackhi_ps(a_k2_p0n, a_k2_p1n);
+                                                       // 122, 123, 120, 121,
+    const __m128 a_k2_4nt = _mm_unpacklo_ps(a_k2_p0n, a_k2_p1n);
+                                                       // 126, 127, 124, 125,
+    const __m128 a_k2_0n = _mm_shuffle_ps(a_k2_0nt, a_k2_0nt,
+                            _MM_SHUFFLE(1, 0, 3 ,2));  // 120, 121, 122, 123,
+    const __m128 a_k2_4n = _mm_shuffle_ps(a_k2_4nt, a_k2_4nt,
+                            _MM_SHUFFLE(1, 0, 3 ,2));  // 124, 125, 126, 127,
+    _mm_storeu_ps(&a[0   + j2], a_j2_0n);
+    _mm_storeu_ps(&a[4   + j2], a_j2_4n);
+    _mm_storeu_ps(&a[122 - j2], a_k2_0n);
+    _mm_storeu_ps(&a[126 - j2], a_k2_4n);
+  }
+  // Scalar code for the remaining items.
+  for (; j2 < 64; j1 += 1, j2 += 2) {
+    k2 = 128 - j2;
+    k1 =  32 - j1;
+    wkr = 0.5f - c[k1];
+    wki = c[j1];
+    xr = a[j2 + 0] - a[k2 + 0];
+    xi = a[j2 + 1] + a[k2 + 1];
+    yr = wkr * xr + wki * xi;
+    yi = wkr * xi - wki * xr;
+    a[j2 + 0] = a[j2 + 0] - yr;
+    a[j2 + 1] = yi - a[j2 + 1];
+    a[k2 + 0] = yr + a[k2 + 0];
+    a[k2 + 1] = yi - a[k2 + 1];
+  }
+  a[65] = -a[65];
+}
+
+void aec_rdft_init_sse2(void) {
+  cft1st_128 = cft1st_128_SSE2;
+  cftmdl_128 = cftmdl_128_SSE2;
+  rftfsub_128 = rftfsub_128_SSE2;
+  rftbsub_128 = rftbsub_128_SSE2;
+}
+
diff --git a/trunk/src/modules/audio_processing/aec/aec_resampler.c b/trunk/src/modules/audio_processing/aec/aec_resampler.c
new file mode 100644
index 0000000..ea980cd
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_resampler.c
@@ -0,0 +1,233 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* Resamples a signal to an arbitrary rate. Used by the AEC to compensate for clock
+ * skew by resampling the farend signal.
+ */
+
+#include "aec_resampler.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+#include <math.h>
+
+#include "aec_core.h"
+
+enum { kEstimateLengthFrames = 400 };
+
+typedef struct {
+    short buffer[kResamplerBufferSize];
+    float position;
+
+    int deviceSampleRateHz;
+    int skewData[kEstimateLengthFrames];
+    int skewDataIndex;
+    float skewEstimate;
+} resampler_t;
+
+static int EstimateSkew(const int* rawSkew,
+                        int size,
+                        int absLimit,
+                        float *skewEst);
+
+int WebRtcAec_CreateResampler(void **resampInst)
+{
+    resampler_t *obj = malloc(sizeof(resampler_t));
+    *resampInst = obj;
+    if (obj == NULL) {
+        return -1;
+    }
+
+    return 0;
+}
+
+int WebRtcAec_InitResampler(void *resampInst, int deviceSampleRateHz)
+{
+    resampler_t *obj = (resampler_t*) resampInst;
+    memset(obj->buffer, 0, sizeof(obj->buffer));
+    obj->position = 0.0;
+
+    obj->deviceSampleRateHz = deviceSampleRateHz;
+    memset(obj->skewData, 0, sizeof(obj->skewData));
+    obj->skewDataIndex = 0;
+    obj->skewEstimate = 0.0;
+
+    return 0;
+}
+
+int WebRtcAec_FreeResampler(void *resampInst)
+{
+    resampler_t *obj = (resampler_t*) resampInst;
+    free(obj);
+
+    return 0;
+}
+
+int WebRtcAec_ResampleLinear(void *resampInst,
+                             const short *inspeech,
+                             int size,
+                             float skew,
+                             short *outspeech)
+{
+    resampler_t *obj = (resampler_t*) resampInst;
+
+    short *y;
+    float be, tnew, interp;
+    int tn, outsize, mm;
+
+    if (size < 0 || size > 2 * FRAME_LEN) {
+        return -1;
+    }
+
+    // Add new frame data in lookahead
+    memcpy(&obj->buffer[FRAME_LEN + kResamplingDelay],
+           inspeech,
+           size * sizeof(short));
+
+    // Sample rate ratio
+    be = 1 + skew;
+
+    // Loop over input frame
+    mm = 0;
+    y = &obj->buffer[FRAME_LEN]; // Point at current frame
+
+    tnew = be * mm + obj->position;
+    tn = (int) tnew;
+
+    while (tn < size) {
+
+        // Interpolation
+        interp = y[tn] + (tnew - tn) * (y[tn+1] - y[tn]);
+
+        if (interp > 32767) {
+            interp = 32767;
+        }
+        else if (interp < -32768) {
+            interp = -32768;
+        }
+
+        outspeech[mm] = (short) interp;
+        mm++;
+
+        tnew = be * mm + obj->position;
+        tn = (int) tnew;
+    }
+
+    outsize = mm;
+    obj->position += outsize * be - size;
+
+    // Shift buffer
+    memmove(obj->buffer,
+            &obj->buffer[size],
+            (kResamplerBufferSize - size) * sizeof(short));
+
+    return outsize;
+}
+
+int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst)
+{
+    resampler_t *obj = (resampler_t*)resampInst;
+    int err = 0;
+
+    if (obj->skewDataIndex < kEstimateLengthFrames) {
+        obj->skewData[obj->skewDataIndex] = rawSkew;
+        obj->skewDataIndex++;
+    }
+    else if (obj->skewDataIndex == kEstimateLengthFrames) {
+        err = EstimateSkew(obj->skewData,
+                           kEstimateLengthFrames,
+                           obj->deviceSampleRateHz,
+                           skewEst);
+        obj->skewEstimate = *skewEst;
+        obj->skewDataIndex++;
+    }
+    else {
+        *skewEst = obj->skewEstimate;
+    }
+
+    return err;
+}
+
+int EstimateSkew(const int* rawSkew,
+                 int size,
+                 int deviceSampleRateHz,
+                 float *skewEst)
+{
+    const int absLimitOuter = (int)(0.04f * deviceSampleRateHz);
+    const int absLimitInner = (int)(0.0025f * deviceSampleRateHz);
+    int i = 0;
+    int n = 0;
+    float rawAvg = 0;
+    float err = 0;
+    float rawAbsDev = 0;
+    int upperLimit = 0;
+    int lowerLimit = 0;
+    float cumSum = 0;
+    float x = 0;
+    float x2 = 0;
+    float y = 0;
+    float xy = 0;
+    float xAvg = 0;
+    float denom = 0;
+    float skew = 0;
+
+    *skewEst = 0; // Set in case of error below.
+    for (i = 0; i < size; i++) {
+      if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) {
+        n++;
+        rawAvg += rawSkew[i];
+      }
+    }
+
+    if (n == 0) {
+      return -1;
+    }
+    assert(n > 0);
+    rawAvg /= n;
+
+    for (i = 0; i < size; i++) {
+      if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) {
+        err = rawSkew[i] - rawAvg;
+        rawAbsDev += err >= 0 ? err : -err;
+      }
+    }
+    assert(n > 0);
+    rawAbsDev /= n;
+    upperLimit = (int)(rawAvg + 5 * rawAbsDev + 1); // +1 for ceiling.
+    lowerLimit = (int)(rawAvg - 5 * rawAbsDev - 1); // -1 for floor.
+
+    n = 0;
+    for (i = 0; i < size; i++) {
+        if ((rawSkew[i] < absLimitInner && rawSkew[i] > -absLimitInner) ||
+            (rawSkew[i] < upperLimit && rawSkew[i] > lowerLimit)) {
+            n++;
+            cumSum += rawSkew[i];
+            x += n;
+            x2 += n*n;
+            y += cumSum;
+            xy += n * cumSum;
+        }
+    }
+
+    if (n == 0) {
+      return -1;
+    }
+    assert(n > 0);
+    xAvg = x / n;
+    denom = x2 - xAvg*x;
+
+    if (denom != 0) {
+        skew = (xy - xAvg*y) / denom;
+    }
+
+    *skewEst = skew;
+    return 0;
+}
diff --git a/trunk/src/modules/audio_processing/aec/aec_resampler.h b/trunk/src/modules/audio_processing/aec/aec_resampler.h
new file mode 100644
index 0000000..ab4cc6e
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/aec_resampler.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_
+
+#include "aec_core.h"
+
+enum { kResamplingDelay = 1 };
+enum { kResamplerBufferSize = FRAME_LEN * 4 };
+
+// Unless otherwise specified, functions return 0 on success and -1 on error
+int WebRtcAec_CreateResampler(void **resampInst);
+int WebRtcAec_InitResampler(void *resampInst, int deviceSampleRateHz);
+int WebRtcAec_FreeResampler(void *resampInst);
+
+// Estimates skew from raw measurement.
+int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst);
+
+// Resamples input using linear interpolation.
+// Returns size of resampled array.
+int WebRtcAec_ResampleLinear(void *resampInst,
+                             const short *inspeech,
+                             int size,
+                             float skew,
+                             short *outspeech);
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_
diff --git a/trunk/src/modules/audio_processing/aec/echo_cancellation.c b/trunk/src/modules/audio_processing/aec/echo_cancellation.c
new file mode 100644
index 0000000..bde9c87
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/echo_cancellation.c
@@ -0,0 +1,924 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Contains the API functions for the AEC.
+ */
+#include "echo_cancellation.h"
+
+#include <math.h>
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+#include <stdio.h>
+#endif
+#include <stdlib.h>
+#include <string.h>
+
+#include "aec_core.h"
+#include "aec_resampler.h"
+#include "common_audio/signal_processing/include/signal_processing_library.h"
+#include "ring_buffer.h"
+#include "typedefs.h"
+
+// Maximum length of resampled signal. Must be an integer multiple of frames
+// (ceil(1/(1 + MIN_SKEW)*2) + 1)*FRAME_LEN
+// The factor of 2 handles wb, and the + 1 is as a safety margin
+// TODO(bjornv): Replace with kResamplerBufferSize
+#define MAX_RESAMP_LEN (5 * FRAME_LEN)
+
+static const int kMaxBufSizeStart = 62;  // In partitions
+static const int sampMsNb = 8; // samples per ms in nb
+// Target suppression levels for nlp modes
+// log{0.001, 0.00001, 0.00000001}
+static const float targetSupp[3] = {-6.9f, -11.5f, -18.4f};
+static const float minOverDrive[3] = {1.0f, 2.0f, 5.0f};
+static const int initCheck = 42;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+static int instance_count = 0;
+#endif
+
+typedef struct {
+    int delayCtr;
+    int sampFreq;
+    int splitSampFreq;
+    int scSampFreq;
+    float sampFactor; // scSampRate / sampFreq
+    short nlpMode;
+    short autoOnOff;
+    short activity;
+    short skewMode;
+    int bufSizeStart;
+    //short bufResetCtr;  // counts number of noncausal frames
+    int knownDelay;
+
+    short initFlag; // indicates if AEC has been initialized
+
+    // Variables used for averaging far end buffer size
+    short counter;
+    int sum;
+    short firstVal;
+    short checkBufSizeCtr;
+
+    // Variables used for delay shifts
+    short msInSndCardBuf;
+    short filtDelay;  // Filtered delay estimate.
+    int timeForDelayChange;
+    int ECstartup;
+    int checkBuffSize;
+    short lastDelayDiff;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    void* far_pre_buf_s16;  // Time domain far-end pre-buffer in int16_t.
+    FILE *bufFile;
+    FILE *delayFile;
+    FILE *skewFile;
+#endif
+
+    // Structures
+    void *resampler;
+
+    int skewFrCtr;
+    int resample; // if the skew is small enough we don't resample
+    int highSkewCtr;
+    float skew;
+
+    void* far_pre_buf;  // Time domain far-end pre-buffer.
+
+    int lastError;
+
+    aec_t *aec;
+} aecpc_t;
+
+// Estimates delay to set the position of the far-end buffer read pointer
+// (controlled by knownDelay)
+static int EstBufDelay(aecpc_t *aecInst);
+
+WebRtc_Word32 WebRtcAec_Create(void **aecInst)
+{
+    aecpc_t *aecpc;
+    if (aecInst == NULL) {
+        return -1;
+    }
+
+    aecpc = malloc(sizeof(aecpc_t));
+    *aecInst = aecpc;
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (WebRtcAec_CreateAec(&aecpc->aec) == -1) {
+        WebRtcAec_Free(aecpc);
+        aecpc = NULL;
+        return -1;
+    }
+
+    if (WebRtcAec_CreateResampler(&aecpc->resampler) == -1) {
+        WebRtcAec_Free(aecpc);
+        aecpc = NULL;
+        return -1;
+    }
+    // Create far-end pre-buffer. The buffer size has to be large enough for
+    // largest possible drift compensation (kResamplerBufferSize) + "almost" an
+    // FFT buffer (PART_LEN2 - 1).
+    if (WebRtc_CreateBuffer(&aecpc->far_pre_buf,
+                            PART_LEN2 + kResamplerBufferSize,
+                            sizeof(float)) == -1) {
+        WebRtcAec_Free(aecpc);
+        aecpc = NULL;
+        return -1;
+    }
+
+    aecpc->initFlag = 0;
+    aecpc->lastError = 0;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    if (WebRtc_CreateBuffer(&aecpc->far_pre_buf_s16,
+                            PART_LEN2 + kResamplerBufferSize,
+                            sizeof(int16_t)) == -1) {
+        WebRtcAec_Free(aecpc);
+        aecpc = NULL;
+        return -1;
+    }
+    {
+      char filename[64];
+      sprintf(filename, "aec_far%d.pcm", instance_count);
+      aecpc->aec->farFile = fopen(filename, "wb");
+      sprintf(filename, "aec_near%d.pcm", instance_count);
+      aecpc->aec->nearFile = fopen(filename, "wb");
+      sprintf(filename, "aec_out%d.pcm", instance_count);
+      aecpc->aec->outFile = fopen(filename, "wb");
+      sprintf(filename, "aec_out_linear%d.pcm", instance_count);
+      aecpc->aec->outLinearFile = fopen(filename, "wb");
+      sprintf(filename, "aec_buf%d.dat", instance_count);
+      aecpc->bufFile = fopen(filename, "wb");
+      sprintf(filename, "aec_skew%d.dat", instance_count);
+      aecpc->skewFile = fopen(filename, "wb");
+      sprintf(filename, "aec_delay%d.dat", instance_count);
+      aecpc->delayFile = fopen(filename, "wb");
+      instance_count++;
+    }
+#endif
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAec_Free(void *aecInst)
+{
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    WebRtc_FreeBuffer(aecpc->far_pre_buf);
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    WebRtc_FreeBuffer(aecpc->far_pre_buf_s16);
+    fclose(aecpc->aec->farFile);
+    fclose(aecpc->aec->nearFile);
+    fclose(aecpc->aec->outFile);
+    fclose(aecpc->aec->outLinearFile);
+    fclose(aecpc->bufFile);
+    fclose(aecpc->skewFile);
+    fclose(aecpc->delayFile);
+#endif
+
+    WebRtcAec_FreeAec(aecpc->aec);
+    WebRtcAec_FreeResampler(aecpc->resampler);
+    free(aecpc);
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAec_Init(void *aecInst, WebRtc_Word32 sampFreq, WebRtc_Word32 scSampFreq)
+{
+    aecpc_t *aecpc = aecInst;
+    AecConfig aecConfig;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (sampFreq != 8000 && sampFreq != 16000  && sampFreq != 32000) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecpc->sampFreq = sampFreq;
+
+    if (scSampFreq < 1 || scSampFreq > 96000) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecpc->scSampFreq = scSampFreq;
+
+    // Initialize echo canceller core
+    if (WebRtcAec_InitAec(aecpc->aec, aecpc->sampFreq) == -1) {
+        aecpc->lastError = AEC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+    if (WebRtcAec_InitResampler(aecpc->resampler, aecpc->scSampFreq) == -1) {
+        aecpc->lastError = AEC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+    if (WebRtc_InitBuffer(aecpc->far_pre_buf) == -1) {
+        aecpc->lastError = AEC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+    WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN);  // Start overlap.
+
+    aecpc->initFlag = initCheck;  // indicates that initialization has been done
+
+    if (aecpc->sampFreq == 32000) {
+        aecpc->splitSampFreq = 16000;
+    }
+    else {
+        aecpc->splitSampFreq = sampFreq;
+    }
+
+    aecpc->skewFrCtr = 0;
+    aecpc->activity = 0;
+
+    aecpc->delayCtr = 0;
+
+    aecpc->sum = 0;
+    aecpc->counter = 0;
+    aecpc->checkBuffSize = 1;
+    aecpc->firstVal = 0;
+
+    aecpc->ECstartup = 1;
+    aecpc->bufSizeStart = 0;
+    aecpc->checkBufSizeCtr = 0;
+    aecpc->filtDelay = 0;
+    aecpc->timeForDelayChange = 0;
+    aecpc->knownDelay = 0;
+    aecpc->lastDelayDiff = 0;
+
+    aecpc->skew = 0;
+    aecpc->resample = kAecFalse;
+    aecpc->highSkewCtr = 0;
+    aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq;
+
+    // Default settings.
+    aecConfig.nlpMode = kAecNlpModerate;
+    aecConfig.skewMode = kAecFalse;
+    aecConfig.metricsMode = kAecFalse;
+    aecConfig.delay_logging = kAecFalse;
+
+    if (WebRtcAec_set_config(aecpc, aecConfig) == -1) {
+        aecpc->lastError = AEC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    if (WebRtc_InitBuffer(aecpc->far_pre_buf_s16) == -1) {
+        aecpc->lastError = AEC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+    WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN);  // Start overlap.
+#endif
+
+    return 0;
+}
+
+// only buffer L band for farend
+WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst, const WebRtc_Word16 *farend,
+    WebRtc_Word16 nrOfSamples)
+{
+    aecpc_t *aecpc = aecInst;
+    WebRtc_Word32 retVal = 0;
+    int newNrOfSamples = (int) nrOfSamples;
+    short newFarend[MAX_RESAMP_LEN];
+    const int16_t* farend_ptr = farend;
+    float tmp_farend[MAX_RESAMP_LEN];
+    const float* farend_float = tmp_farend;
+    float skew;
+    int i = 0;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (farend == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    // number of samples == 160 for SWB input
+    if (nrOfSamples != 80 && nrOfSamples != 160) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+
+    skew = aecpc->skew;
+
+    if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
+        // Resample and get a new number of samples
+        newNrOfSamples = WebRtcAec_ResampleLinear(aecpc->resampler,
+                                                  farend,
+                                                  nrOfSamples,
+                                                  skew,
+                                                  newFarend);
+        farend_ptr = (const int16_t*) newFarend;
+    }
+
+    aecpc->aec->system_delay += newNrOfSamples;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    WebRtc_WriteBuffer(aecpc->far_pre_buf_s16, farend_ptr,
+                       (size_t) newNrOfSamples);
+#endif
+    // Cast to float and write the time-domain data to |far_pre_buf|.
+    for (i = 0; i < newNrOfSamples; i++) {
+      tmp_farend[i] = (float) farend_ptr[i];
+    }
+    WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_float,
+                       (size_t) newNrOfSamples);
+
+    // Transform to frequency domain if we have enough data.
+    while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) {
+      // We have enough data to pass to the FFT, hence read PART_LEN2 samples.
+      WebRtc_ReadBuffer(aecpc->far_pre_buf, (void**) &farend_float, tmp_farend,
+                        PART_LEN2);
+
+      WebRtcAec_BufferFarendPartition(aecpc->aec, farend_float);
+
+      // Rewind |far_pre_buf| PART_LEN samples for overlap before continuing.
+      WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN);
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+      WebRtc_ReadBuffer(aecpc->far_pre_buf_s16, (void**) &farend_ptr, newFarend,
+                        PART_LEN2);
+      WebRtc_WriteBuffer(aecpc->aec->far_time_buf, &farend_ptr[PART_LEN], 1);
+      WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN);
+#endif
+    }
+
+    return retVal;
+}
+
+WebRtc_Word32 WebRtcAec_Process(void *aecInst, const WebRtc_Word16 *nearend,
+    const WebRtc_Word16 *nearendH, WebRtc_Word16 *out, WebRtc_Word16 *outH,
+    WebRtc_Word16 nrOfSamples, WebRtc_Word16 msInSndCardBuf, WebRtc_Word32 skew)
+{
+    aecpc_t *aecpc = aecInst;
+    WebRtc_Word32 retVal = 0;
+    short i;
+    short nBlocks10ms;
+    short nFrames;
+    // Limit resampling to doubling/halving of signal
+    const float minSkewEst = -0.5f;
+    const float maxSkewEst = 1.0f;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (nearend == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (out == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    // number of samples == 160 for SWB input
+    if (nrOfSamples != 80 && nrOfSamples != 160) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+
+    // Check for valid pointers based on sampling rate
+    if (aecpc->sampFreq == 32000 && nearendH == NULL) {
+       aecpc->lastError = AEC_NULL_POINTER_ERROR;
+       return -1;
+    }
+
+    if (msInSndCardBuf < 0) {
+        msInSndCardBuf = 0;
+        aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
+        retVal = -1;
+    }
+    else if (msInSndCardBuf > 500) {
+        msInSndCardBuf = 500;
+        aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
+        retVal = -1;
+    }
+    // TODO(andrew): we need to investigate if this +10 is really wanted.
+    msInSndCardBuf += 10;
+    aecpc->msInSndCardBuf = msInSndCardBuf;
+
+    if (aecpc->skewMode == kAecTrue) {
+        if (aecpc->skewFrCtr < 25) {
+            aecpc->skewFrCtr++;
+        }
+        else {
+            retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew);
+            if (retVal == -1) {
+                aecpc->skew = 0;
+                aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
+            }
+
+            aecpc->skew /= aecpc->sampFactor*nrOfSamples;
+
+            if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) {
+                aecpc->resample = kAecFalse;
+            }
+            else {
+                aecpc->resample = kAecTrue;
+            }
+
+            if (aecpc->skew < minSkewEst) {
+                aecpc->skew = minSkewEst;
+            }
+            else if (aecpc->skew > maxSkewEst) {
+                aecpc->skew = maxSkewEst;
+            }
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+            fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
+#endif
+        }
+    }
+
+    nFrames = nrOfSamples / FRAME_LEN;
+    nBlocks10ms = nFrames / aecpc->aec->mult;
+
+    if (aecpc->ECstartup) {
+        if (nearend != out) {
+            // Only needed if they don't already point to the same place.
+            memcpy(out, nearend, sizeof(short) * nrOfSamples);
+        }
+
+        // The AEC is in the start up mode
+        // AEC is disabled until the system delay is OK
+
+        // Mechanism to ensure that the system delay is reasonably stable.
+        if (aecpc->checkBuffSize) {
+            aecpc->checkBufSizeCtr++;
+            // Before we fill up the far-end buffer we require the system delay
+            // to be stable (+/-8 ms) compared to the first value. This
+            // comparison is made during the following 6 consecutive 10 ms
+            // blocks. If it seems to be stable then we start to fill up the
+            // far-end buffer.
+            if (aecpc->counter == 0) {
+                aecpc->firstVal = aecpc->msInSndCardBuf;
+                aecpc->sum = 0;
+            }
+
+            if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) <
+                WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) {
+                aecpc->sum += aecpc->msInSndCardBuf;
+                aecpc->counter++;
+            }
+            else {
+                aecpc->counter = 0;
+            }
+
+            if (aecpc->counter * nBlocks10ms >= 6) {
+                // The far-end buffer size is determined in partitions of
+                // PART_LEN samples. Use 75% of the average value of the system
+                // delay as buffer size to start with.
+                aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum *
+                  aecpc->aec->mult * 8) / (4 * aecpc->counter * PART_LEN),
+                  kMaxBufSizeStart);
+                // Buffer size has now been determined.
+                aecpc->checkBuffSize = 0;
+            }
+
+            if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) {
+                // For really bad systems, don't disable the echo canceller for
+                // more than 0.5 sec.
+                aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf *
+                    aecpc->aec->mult * 3) / 40, kMaxBufSizeStart);
+                aecpc->checkBuffSize = 0;
+            }
+        }
+
+        // If |checkBuffSize| changed in the if-statement above.
+        if (!aecpc->checkBuffSize) {
+            // The system delay is now reasonably stable (or has been unstable
+            // for too long). When the far-end buffer is filled with
+            // approximately the same amount of data as reported by the system
+            // we end the startup phase.
+            int overhead_elements = aecpc->aec->system_delay / PART_LEN -
+                aecpc->bufSizeStart;
+            if (overhead_elements == 0) {
+                // Enable the AEC
+                aecpc->ECstartup = 0;
+            } else if (overhead_elements > 0) {
+                WebRtc_MoveReadPtr(aecpc->aec->far_buf_windowed,
+                                   overhead_elements);
+                WebRtc_MoveReadPtr(aecpc->aec->far_buf, overhead_elements);
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+                WebRtc_MoveReadPtr(aecpc->aec->far_time_buf, overhead_elements);
+#endif
+                // TODO(bjornv): Do we need a check on how much we actually
+                // moved the read pointer? It should always be possible to move
+                // the pointer |overhead_elements| since we have only added data
+                // to the buffer and no delay compensation nor AEC processing
+                // has been done.
+                aecpc->aec->system_delay -= overhead_elements * PART_LEN;
+
+                // Enable the AEC
+                aecpc->ECstartup = 0;
+            }
+        }
+    } else {
+        // AEC is enabled.
+
+        int out_elements = 0;
+
+        EstBufDelay(aecpc);
+
+        // Note that 1 frame is supported for NB and 2 frames for WB.
+        for (i = 0; i < nFrames; i++) {
+            int16_t* out_ptr = NULL;
+            int16_t out_tmp[FRAME_LEN];
+
+            // Call the AEC.
+            WebRtcAec_ProcessFrame(aecpc->aec,
+                                   &nearend[FRAME_LEN * i],
+                                   &nearendH[FRAME_LEN * i],
+                                   aecpc->knownDelay);
+            // TODO(bjornv): Re-structure such that we don't have to pass
+            // |aecpc->knownDelay| as input. Change name to something like
+            // |system_buffer_diff|.
+
+            // Stuff the out buffer if we have less than a frame to output.
+            // This should only happen for the first frame.
+            out_elements = (int) WebRtc_available_read(aecpc->aec->outFrBuf);
+            if (out_elements < FRAME_LEN) {
+                WebRtc_MoveReadPtr(aecpc->aec->outFrBuf,
+                                   out_elements - FRAME_LEN);
+                if (aecpc->sampFreq == 32000) {
+                    WebRtc_MoveReadPtr(aecpc->aec->outFrBufH,
+                                       out_elements - FRAME_LEN);
+                }
+            }
+
+            // Obtain an output frame.
+            WebRtc_ReadBuffer(aecpc->aec->outFrBuf, (void**) &out_ptr,
+                              out_tmp, FRAME_LEN);
+            memcpy(&out[FRAME_LEN * i], out_ptr, sizeof(int16_t) * FRAME_LEN);
+            // For H band
+            if (aecpc->sampFreq == 32000) {
+                WebRtc_ReadBuffer(aecpc->aec->outFrBufH, (void**) &out_ptr,
+                                  out_tmp, FRAME_LEN);
+                memcpy(&outH[FRAME_LEN * i], out_ptr,
+                       sizeof(int16_t) * FRAME_LEN);
+            }
+        }
+    }
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+    {
+        int16_t far_buf_size_ms = (int16_t) (aecpc->aec->system_delay /
+            (sampMsNb * aecpc->aec->mult));
+        fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile);
+        fwrite(&(aecpc->knownDelay), sizeof(aecpc->knownDelay), 1, aecpc->delayFile);
+    }
+#endif
+
+    return retVal;
+}
+
+WebRtc_Word32 WebRtcAec_set_config(void *aecInst, AecConfig config)
+{
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    if (config.skewMode != kAecFalse && config.skewMode != kAecTrue) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecpc->skewMode = config.skewMode;
+
+    if (config.nlpMode != kAecNlpConservative && config.nlpMode !=
+            kAecNlpModerate && config.nlpMode != kAecNlpAggressive) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecpc->nlpMode = config.nlpMode;
+    aecpc->aec->targetSupp = targetSupp[aecpc->nlpMode];
+    aecpc->aec->minOverDrive = minOverDrive[aecpc->nlpMode];
+
+    if (config.metricsMode != kAecFalse && config.metricsMode != kAecTrue) {
+        aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecpc->aec->metricsMode = config.metricsMode;
+    if (aecpc->aec->metricsMode == kAecTrue) {
+        WebRtcAec_InitMetrics(aecpc->aec);
+    }
+
+  if (config.delay_logging != kAecFalse && config.delay_logging != kAecTrue) {
+    aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
+    return -1;
+  }
+  aecpc->aec->delay_logging_enabled = config.delay_logging;
+  if (aecpc->aec->delay_logging_enabled == kAecTrue) {
+    memset(aecpc->aec->delay_histogram, 0, sizeof(aecpc->aec->delay_histogram));
+  }
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAec_get_config(void *aecInst, AecConfig *config)
+{
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (config == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    config->nlpMode = aecpc->nlpMode;
+    config->skewMode = aecpc->skewMode;
+    config->metricsMode = aecpc->aec->metricsMode;
+    config->delay_logging = aecpc->aec->delay_logging_enabled;
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAec_get_echo_status(void *aecInst, WebRtc_Word16 *status)
+{
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (status == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    *status = aecpc->aec->echoState;
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAec_GetMetrics(void *aecInst, AecMetrics *metrics)
+{
+    const float upweight = 0.7f;
+    float dtmp;
+    short stmp;
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    if (metrics == NULL) {
+        aecpc->lastError = AEC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecpc->initFlag != initCheck) {
+        aecpc->lastError = AEC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    // ERL
+    metrics->erl.instant = (short) aecpc->aec->erl.instant;
+
+    if ((aecpc->aec->erl.himean > offsetLevel) && (aecpc->aec->erl.average > offsetLevel)) {
+    // Use a mix between regular average and upper part average
+        dtmp = upweight * aecpc->aec->erl.himean + (1 - upweight) * aecpc->aec->erl.average;
+        metrics->erl.average = (short) dtmp;
+    }
+    else {
+        metrics->erl.average = offsetLevel;
+    }
+
+    metrics->erl.max = (short) aecpc->aec->erl.max;
+
+    if (aecpc->aec->erl.min < (offsetLevel * (-1))) {
+        metrics->erl.min = (short) aecpc->aec->erl.min;
+    }
+    else {
+        metrics->erl.min = offsetLevel;
+    }
+
+    // ERLE
+    metrics->erle.instant = (short) aecpc->aec->erle.instant;
+
+    if ((aecpc->aec->erle.himean > offsetLevel) && (aecpc->aec->erle.average > offsetLevel)) {
+        // Use a mix between regular average and upper part average
+        dtmp =  upweight * aecpc->aec->erle.himean + (1 - upweight) * aecpc->aec->erle.average;
+        metrics->erle.average = (short) dtmp;
+    }
+    else {
+        metrics->erle.average = offsetLevel;
+    }
+
+    metrics->erle.max = (short) aecpc->aec->erle.max;
+
+    if (aecpc->aec->erle.min < (offsetLevel * (-1))) {
+        metrics->erle.min = (short) aecpc->aec->erle.min;
+    } else {
+        metrics->erle.min = offsetLevel;
+    }
+
+    // RERL
+    if ((metrics->erl.average > offsetLevel) && (metrics->erle.average > offsetLevel)) {
+        stmp = metrics->erl.average + metrics->erle.average;
+    }
+    else {
+        stmp = offsetLevel;
+    }
+    metrics->rerl.average = stmp;
+
+    // No other statistics needed, but returned for completeness
+    metrics->rerl.instant = stmp;
+    metrics->rerl.max = stmp;
+    metrics->rerl.min = stmp;
+
+    // A_NLP
+    metrics->aNlp.instant = (short) aecpc->aec->aNlp.instant;
+
+    if ((aecpc->aec->aNlp.himean > offsetLevel) && (aecpc->aec->aNlp.average > offsetLevel)) {
+        // Use a mix between regular average and upper part average
+        dtmp =  upweight * aecpc->aec->aNlp.himean + (1 - upweight) * aecpc->aec->aNlp.average;
+        metrics->aNlp.average = (short) dtmp;
+    }
+    else {
+        metrics->aNlp.average = offsetLevel;
+    }
+
+    metrics->aNlp.max = (short) aecpc->aec->aNlp.max;
+
+    if (aecpc->aec->aNlp.min < (offsetLevel * (-1))) {
+        metrics->aNlp.min = (short) aecpc->aec->aNlp.min;
+    }
+    else {
+        metrics->aNlp.min = offsetLevel;
+    }
+
+    return 0;
+}
+
+int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
+  aecpc_t* self = handle;
+  int i = 0;
+  int delay_values = 0;
+  int num_delay_values = 0;
+  int my_median = 0;
+  const int kMsPerBlock = (PART_LEN * 1000) / self->splitSampFreq;
+  float l1_norm = 0;
+
+  if (self == NULL) {
+    return -1;
+  }
+  if (median == NULL) {
+    self->lastError = AEC_NULL_POINTER_ERROR;
+    return -1;
+  }
+  if (std == NULL) {
+    self->lastError = AEC_NULL_POINTER_ERROR;
+    return -1;
+  }
+  if (self->initFlag != initCheck) {
+    self->lastError = AEC_UNINITIALIZED_ERROR;
+    return -1;
+  }
+  if (self->aec->delay_logging_enabled == 0) {
+    // Logging disabled
+    self->lastError = AEC_UNSUPPORTED_FUNCTION_ERROR;
+    return -1;
+  }
+
+  // Get number of delay values since last update
+  for (i = 0; i < kHistorySizeBlocks; i++) {
+    num_delay_values += self->aec->delay_histogram[i];
+  }
+  if (num_delay_values == 0) {
+    // We have no new delay value data. Even though -1 is a valid estimate, it
+    // will practically never be used since multiples of |kMsPerBlock| will
+    // always be returned.
+    *median = -1;
+    *std = -1;
+    return 0;
+  }
+
+  delay_values = num_delay_values >> 1; // Start value for median count down
+  // Get median of delay values since last update
+  for (i = 0; i < kHistorySizeBlocks; i++) {
+    delay_values -= self->aec->delay_histogram[i];
+    if (delay_values < 0) {
+      my_median = i;
+      break;
+    }
+  }
+  // Account for lookahead.
+  *median = (my_median - kLookaheadBlocks) * kMsPerBlock;
+
+  // Calculate the L1 norm, with median value as central moment
+  for (i = 0; i < kHistorySizeBlocks; i++) {
+    l1_norm += (float) (fabs(i - my_median) * self->aec->delay_histogram[i]);
+  }
+  *std = (int) (l1_norm / (float) num_delay_values + 0.5f) * kMsPerBlock;
+
+  // Reset histogram
+  memset(self->aec->delay_histogram, 0, sizeof(self->aec->delay_histogram));
+
+  return 0;
+}
+
+WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst)
+{
+    aecpc_t *aecpc = aecInst;
+
+    if (aecpc == NULL) {
+        return -1;
+    }
+
+    return aecpc->lastError;
+}
+
+static int EstBufDelay(aecpc_t* aecpc) {
+  int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->aec->mult;
+  int current_delay = nSampSndCard - aecpc->aec->system_delay;
+  int delay_difference = 0;
+
+  // Before we proceed with the delay estimate filtering we:
+  // 1) Compensate for the frame that will be read.
+  // 2) Compensate for drift resampling.
+
+  // 1) Compensating for the frame(s) that will be read/processed.
+  current_delay += FRAME_LEN * aecpc->aec->mult;
+
+  // 2) Account for resampling frame delay.
+  if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
+    current_delay -= kResamplingDelay;
+  }
+
+  aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay +
+          0.2 * current_delay));
+
+  delay_difference = aecpc->filtDelay - aecpc->knownDelay;
+  if (delay_difference > 224) {
+    if (aecpc->lastDelayDiff < 96) {
+      aecpc->timeForDelayChange = 0;
+    } else {
+      aecpc->timeForDelayChange++;
+    }
+  } else if (delay_difference < 96 && aecpc->knownDelay > 0) {
+    if (aecpc->lastDelayDiff > 224) {
+      aecpc->timeForDelayChange = 0;
+    } else {
+      aecpc->timeForDelayChange++;
+    }
+  } else {
+    aecpc->timeForDelayChange = 0;
+  }
+  aecpc->lastDelayDiff = delay_difference;
+
+  if (aecpc->timeForDelayChange > 25) {
+    aecpc->knownDelay = WEBRTC_SPL_MAX((int) aecpc->filtDelay - 160, 0);
+  }
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_processing/aec/include/echo_cancellation.h b/trunk/src/modules/audio_processing/aec/include/echo_cancellation.h
new file mode 100644
index 0000000..a266e84
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aec/include/echo_cancellation.h
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
+
+#include "typedefs.h"
+
+// Errors
+#define AEC_UNSPECIFIED_ERROR           12000
+#define AEC_UNSUPPORTED_FUNCTION_ERROR  12001
+#define AEC_UNINITIALIZED_ERROR         12002
+#define AEC_NULL_POINTER_ERROR          12003
+#define AEC_BAD_PARAMETER_ERROR         12004
+
+// Warnings
+#define AEC_BAD_PARAMETER_WARNING       12050
+
+enum {
+    kAecNlpConservative = 0,
+    kAecNlpModerate,
+    kAecNlpAggressive
+};
+
+enum {
+    kAecFalse = 0,
+    kAecTrue
+};
+
+typedef struct {
+    WebRtc_Word16 nlpMode;        // default kAecNlpModerate
+    WebRtc_Word16 skewMode;       // default kAecFalse
+    WebRtc_Word16 metricsMode;    // default kAecFalse
+    int delay_logging;            // default kAecFalse
+    //float realSkew;
+} AecConfig;
+
+typedef struct {
+    WebRtc_Word16 instant;
+    WebRtc_Word16 average;
+    WebRtc_Word16 max;
+    WebRtc_Word16 min;
+} AecLevel;
+
+typedef struct {
+    AecLevel rerl;
+    AecLevel erl;
+    AecLevel erle;
+    AecLevel aNlp;
+} AecMetrics;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AEC. The memory needs to be initialized
+ * separately using the WebRtcAec_Init() function.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void **aecInst               Pointer to the AEC instance to be created
+ *                              and initialized
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return          0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Create(void **aecInst);
+
+/*
+ * This function releases the memory allocated by WebRtcAec_Create().
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void         *aecInst        Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Free(void *aecInst);
+
+/*
+ * Initializes an AEC instance.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * WebRtc_Word32  sampFreq      Sampling frequency of data
+ * WebRtc_Word32  scSampFreq    Soundcard sampling frequency
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return          0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Init(void *aecInst,
+                             WebRtc_Word32 sampFreq,
+                             WebRtc_Word32 scSampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * WebRtc_Word16  *farend       In buffer containing one frame of
+ *                              farend signal for L band
+ * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst,
+                                     const WebRtc_Word16 *farend,
+                                     WebRtc_Word16 nrOfSamples);
+
+/*
+ * Runs the echo canceller on an 80 or 160 sample blocks of data.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void          *aecInst       Pointer to the AEC instance
+ * WebRtc_Word16 *nearend       In buffer containing one frame of
+ *                              nearend+echo signal for L band
+ * WebRtc_Word16 *nearendH      In buffer containing one frame of
+ *                              nearend+echo signal for H band
+ * WebRtc_Word16 nrOfSamples    Number of samples in nearend buffer
+ * WebRtc_Word16 msInSndCardBuf Delay estimate for sound card and
+ *                              system buffers
+ * WebRtc_Word16 skew           Difference between number of samples played
+ *                              and recorded at the soundcard (for clock skew
+ *                              compensation)
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
+ *                              for L band
+ * WebRtc_Word16  *outH         Out buffer, one frame of processed nearend
+ *                              for H band
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Process(void *aecInst,
+                                const WebRtc_Word16 *nearend,
+                                const WebRtc_Word16 *nearendH,
+                                WebRtc_Word16 *out,
+                                WebRtc_Word16 *outH,
+                                WebRtc_Word16 nrOfSamples,
+                                WebRtc_Word16 msInSndCardBuf,
+                                WebRtc_Word32 skew);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * AecConfig      config        Config instance that contains all
+ *                              properties to be set
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_set_config(void *aecInst, AecConfig config);
+
+/*
+ * Gets the on-the-fly paramters.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecConfig      *config       Pointer to the config instance that
+ *                              all properties will be written to
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_get_config(void *aecInst, AecConfig *config);
+
+/*
+ * Gets the current echo status of the nearend signal.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *status       0: Almost certainly nearend single-talk
+ *                              1: Might not be neared single-talk
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_get_echo_status(void *aecInst, WebRtc_Word16 *status);
+
+/*
+ * Gets the current echo metrics for the session.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecMetrics     *metrics      Struct which will be filled out with the
+ *                              current echo metrics.
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_GetMetrics(void *aecInst, AecMetrics *metrics);
+
+/*
+ * Gets the current delay metrics for the session.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*      handle            Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * int*       median            Delay median value.
+ * int*       std               Delay standard deviation.
+ *
+ * int        return             0: OK
+ *                              -1: error
+ */
+int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
+
+/*
+ * Gets the last error code.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        11000-11100: error code
+ */
+WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst);
+
+#ifdef __cplusplus
+}
+#endif
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
diff --git a/trunk/src/modules/audio_processing/aecm/Android.mk b/trunk/src/modules/audio_processing/aecm/Android.mk
new file mode 100644
index 0000000..2d64b85
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/Android.mk
@@ -0,0 +1,78 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+#############################
+# Build the non-neon library.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_aecm
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    echo_control_mobile.c \
+    aecm_core.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../utility \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../system_wrappers/interface
+
+LOCAL_STATIC_LIBRARIES += libwebrtc_system_wrappers
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+#########################
+# Build the neon library.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_aecm_neon
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := aecm_core_neon.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    -mfpu=neon \
+    -mfloat-abi=softfp \
+    -flax-vector-conversions
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+endif # ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
diff --git a/trunk/src/modules/audio_processing/aecm/aecm.gypi b/trunk/src/modules/audio_processing/aecm/aecm.gypi
new file mode 100644
index 0000000..3e4a113
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/aecm.gypi
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'aecm',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        'apm_util'
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/echo_control_mobile.h',
+        'echo_control_mobile.c',
+        'aecm_core.c',
+        'aecm_core.h',
+      ],
+    },
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/aecm/aecm_core.c b/trunk/src/modules/audio_processing/aecm/aecm_core.c
new file mode 100644
index 0000000..4a51d56
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/aecm_core.c
@@ -0,0 +1,2131 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aecm_core.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include "cpu_features_wrapper.h"
+#include "delay_estimator_wrapper.h"
+#include "echo_control_mobile.h"
+#include "ring_buffer.h"
+#include "typedefs.h"
+
+#ifdef ARM_WINM_LOG
+#include <stdio.h>
+#include <windows.h>
+#endif
+
+#ifdef AEC_DEBUG
+FILE *dfile;
+FILE *testfile;
+#endif
+
+#ifdef _MSC_VER // visual c++
+#define ALIGN8_BEG __declspec(align(8))
+#define ALIGN8_END
+#else // gcc or icc
+#define ALIGN8_BEG
+#define ALIGN8_END __attribute__((aligned(8)))
+#endif
+
+#ifdef AECM_SHORT
+
+// Square root of Hanning window in Q14
+const WebRtc_Word16 WebRtcAecm_kSqrtHanning[] =
+{
+    0, 804, 1606, 2404, 3196, 3981, 4756, 5520,
+    6270, 7005, 7723, 8423, 9102, 9760, 10394, 11003,
+    11585, 12140, 12665, 13160, 13623, 14053, 14449, 14811,
+    15137, 15426, 15679, 15893, 16069, 16207, 16305, 16364,
+    16384
+};
+
+#else
+
+// Square root of Hanning window in Q14
+const ALIGN8_BEG WebRtc_Word16 WebRtcAecm_kSqrtHanning[] ALIGN8_END =
+{
+    0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
+    3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224, 6591, 6954, 7313, 7668, 8019, 8364,
+    8705, 9040, 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514, 11795, 12068, 12335,
+    12594, 12845, 13089, 13325, 13553, 13773, 13985, 14189, 14384, 14571, 14749, 14918,
+    15079, 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034, 16111, 16179, 16237,
+    16286, 16325, 16354, 16373, 16384
+};
+
+#endif
+
+//Q15 alpha = 0.99439986968132  const Factor for magnitude approximation
+static const WebRtc_UWord16 kAlpha1 = 32584;
+//Q15 beta = 0.12967166976970   const Factor for magnitude approximation
+static const WebRtc_UWord16 kBeta1 = 4249;
+//Q15 alpha = 0.94234827210087  const Factor for magnitude approximation
+static const WebRtc_UWord16 kAlpha2 = 30879;
+//Q15 beta = 0.33787806009150   const Factor for magnitude approximation
+static const WebRtc_UWord16 kBeta2 = 11072;
+//Q15 alpha = 0.82247698684306  const Factor for magnitude approximation
+static const WebRtc_UWord16 kAlpha3 = 26951;
+//Q15 beta = 0.57762063060713   const Factor for magnitude approximation
+static const WebRtc_UWord16 kBeta3 = 18927;
+
+// Initialization table for echo channel in 8 kHz
+static const WebRtc_Word16 kChannelStored8kHz[PART_LEN1] = {
+    2040,   1815,   1590,   1498,   1405,   1395,   1385,   1418,
+    1451,   1506,   1562,   1644,   1726,   1804,   1882,   1918,
+    1953,   1982,   2010,   2025,   2040,   2034,   2027,   2021,
+    2014,   1997,   1980,   1925,   1869,   1800,   1732,   1683,
+    1635,   1604,   1572,   1545,   1517,   1481,   1444,   1405,
+    1367,   1331,   1294,   1270,   1245,   1239,   1233,   1247,
+    1260,   1282,   1303,   1338,   1373,   1407,   1441,   1470,
+    1499,   1524,   1549,   1565,   1582,   1601,   1621,   1649,
+    1676
+};
+
+// Initialization table for echo channel in 16 kHz
+static const WebRtc_Word16 kChannelStored16kHz[PART_LEN1] = {
+    2040,   1590,   1405,   1385,   1451,   1562,   1726,   1882,
+    1953,   2010,   2040,   2027,   2014,   1980,   1869,   1732,
+    1635,   1572,   1517,   1444,   1367,   1294,   1245,   1233,
+    1260,   1303,   1373,   1441,   1499,   1549,   1582,   1621,
+    1676,   1741,   1802,   1861,   1921,   1983,   2040,   2102,
+    2170,   2265,   2375,   2515,   2651,   2781,   2922,   3075,
+    3253,   3471,   3738,   3976,   4151,   4258,   4308,   4288,
+    4270,   4253,   4237,   4179,   4086,   3947,   3757,   3484,
+    3153
+};
+
+static const WebRtc_Word16 kCosTable[] = {
+    8192,  8190,  8187,  8180,  8172,  8160,  8147,  8130,  8112,
+    8091,  8067,  8041,  8012,  7982,  7948,  7912,  7874,  7834,
+    7791,  7745,  7697,  7647,  7595,  7540,  7483,  7424,  7362,
+    7299,  7233,  7164,  7094,  7021,  6947,  6870,  6791,  6710,
+    6627,  6542,  6455,  6366,  6275,  6182,  6087,  5991,  5892,
+    5792,  5690,  5586,  5481,  5374,  5265,  5155,  5043,  4930,
+    4815,  4698,  4580,  4461,  4341,  4219,  4096,  3971,  3845,
+    3719,  3591,  3462,  3331,  3200,  3068,  2935,  2801,  2667,
+    2531,  2395,  2258,  2120,  1981,  1842,  1703,  1563,  1422,
+    1281,  1140,   998,   856,   713,   571,   428,   285,   142,
+       0,  -142,  -285,  -428,  -571,  -713,  -856,  -998, -1140,
+   -1281, -1422, -1563, -1703, -1842, -1981, -2120, -2258, -2395,
+   -2531, -2667, -2801, -2935, -3068, -3200, -3331, -3462, -3591,
+   -3719, -3845, -3971, -4095, -4219, -4341, -4461, -4580, -4698,
+   -4815, -4930, -5043, -5155, -5265, -5374, -5481, -5586, -5690,
+   -5792, -5892, -5991, -6087, -6182, -6275, -6366, -6455, -6542,
+   -6627, -6710, -6791, -6870, -6947, -7021, -7094, -7164, -7233,
+   -7299, -7362, -7424, -7483, -7540, -7595, -7647, -7697, -7745,
+   -7791, -7834, -7874, -7912, -7948, -7982, -8012, -8041, -8067,
+   -8091, -8112, -8130, -8147, -8160, -8172, -8180, -8187, -8190,
+   -8191, -8190, -8187, -8180, -8172, -8160, -8147, -8130, -8112,
+   -8091, -8067, -8041, -8012, -7982, -7948, -7912, -7874, -7834,
+   -7791, -7745, -7697, -7647, -7595, -7540, -7483, -7424, -7362,
+   -7299, -7233, -7164, -7094, -7021, -6947, -6870, -6791, -6710,
+   -6627, -6542, -6455, -6366, -6275, -6182, -6087, -5991, -5892,
+   -5792, -5690, -5586, -5481, -5374, -5265, -5155, -5043, -4930,
+   -4815, -4698, -4580, -4461, -4341, -4219, -4096, -3971, -3845,
+   -3719, -3591, -3462, -3331, -3200, -3068, -2935, -2801, -2667,
+   -2531, -2395, -2258, -2120, -1981, -1842, -1703, -1563, -1422,
+   -1281, -1140,  -998,  -856,  -713,  -571,  -428,  -285,  -142,
+       0,   142,   285,   428,   571,   713,   856,   998,  1140,
+    1281,  1422,  1563,  1703,  1842,  1981,  2120,  2258,  2395,
+    2531,  2667,  2801,  2935,  3068,  3200,  3331,  3462,  3591,
+    3719,  3845,  3971,  4095,  4219,  4341,  4461,  4580,  4698,
+    4815,  4930,  5043,  5155,  5265,  5374,  5481,  5586,  5690,
+    5792,  5892,  5991,  6087,  6182,  6275,  6366,  6455,  6542,
+    6627,  6710,  6791,  6870,  6947,  7021,  7094,  7164,  7233,
+    7299,  7362,  7424,  7483,  7540,  7595,  7647,  7697,  7745,
+    7791,  7834,  7874,  7912,  7948,  7982,  8012,  8041,  8067,
+    8091,  8112,  8130,  8147,  8160,  8172,  8180,  8187,  8190
+};
+
+static const WebRtc_Word16 kSinTable[] = {
+       0,    142,    285,    428,    571,    713,    856,    998,
+    1140,   1281,   1422,   1563,   1703,   1842,   1981,   2120,
+    2258,   2395,   2531,   2667,   2801,   2935,   3068,   3200,
+    3331,   3462,   3591,   3719,   3845,   3971,   4095,   4219,
+    4341,   4461,   4580,   4698,   4815,   4930,   5043,   5155,
+    5265,   5374,   5481,   5586,   5690,   5792,   5892,   5991,
+    6087,   6182,   6275,   6366,   6455,   6542,   6627,   6710,
+    6791,   6870,   6947,   7021,   7094,   7164,   7233,   7299,
+    7362,   7424,   7483,   7540,   7595,   7647,   7697,   7745,
+    7791,   7834,   7874,   7912,   7948,   7982,   8012,   8041,
+    8067,   8091,   8112,   8130,   8147,   8160,   8172,   8180,
+    8187,   8190,   8191,   8190,   8187,   8180,   8172,   8160,
+    8147,   8130,   8112,   8091,   8067,   8041,   8012,   7982,
+    7948,   7912,   7874,   7834,   7791,   7745,   7697,   7647,
+    7595,   7540,   7483,   7424,   7362,   7299,   7233,   7164,
+    7094,   7021,   6947,   6870,   6791,   6710,   6627,   6542,
+    6455,   6366,   6275,   6182,   6087,   5991,   5892,   5792,
+    5690,   5586,   5481,   5374,   5265,   5155,   5043,   4930,
+    4815,   4698,   4580,   4461,   4341,   4219,   4096,   3971,
+    3845,   3719,   3591,   3462,   3331,   3200,   3068,   2935,
+    2801,   2667,   2531,   2395,   2258,   2120,   1981,   1842,
+    1703,   1563,   1422,   1281,   1140,    998,    856,    713,
+     571,    428,    285,    142,      0,   -142,   -285,   -428,
+    -571,   -713,   -856,   -998,  -1140,  -1281,  -1422,  -1563,
+   -1703,  -1842,  -1981,  -2120,  -2258,  -2395,  -2531,  -2667,
+   -2801,  -2935,  -3068,  -3200,  -3331,  -3462,  -3591,  -3719,
+   -3845,  -3971,  -4095,  -4219,  -4341,  -4461,  -4580,  -4698,
+   -4815,  -4930,  -5043,  -5155,  -5265,  -5374,  -5481,  -5586,
+   -5690,  -5792,  -5892,  -5991,  -6087,  -6182,  -6275,  -6366,
+   -6455,  -6542,  -6627,  -6710,  -6791,  -6870,  -6947,  -7021,
+   -7094,  -7164,  -7233,  -7299,  -7362,  -7424,  -7483,  -7540,
+   -7595,  -7647,  -7697,  -7745,  -7791,  -7834,  -7874,  -7912,
+   -7948,  -7982,  -8012,  -8041,  -8067,  -8091,  -8112,  -8130,
+   -8147,  -8160,  -8172,  -8180,  -8187,  -8190,  -8191,  -8190,
+   -8187,  -8180,  -8172,  -8160,  -8147,  -8130,  -8112,  -8091,
+   -8067,  -8041,  -8012,  -7982,  -7948,  -7912,  -7874,  -7834,
+   -7791,  -7745,  -7697,  -7647,  -7595,  -7540,  -7483,  -7424,
+   -7362,  -7299,  -7233,  -7164,  -7094,  -7021,  -6947,  -6870,
+   -6791,  -6710,  -6627,  -6542,  -6455,  -6366,  -6275,  -6182,
+   -6087,  -5991,  -5892,  -5792,  -5690,  -5586,  -5481,  -5374,
+   -5265,  -5155,  -5043,  -4930,  -4815,  -4698,  -4580,  -4461,
+   -4341,  -4219,  -4096,  -3971,  -3845,  -3719,  -3591,  -3462,
+   -3331,  -3200,  -3068,  -2935,  -2801,  -2667,  -2531,  -2395,
+   -2258,  -2120,  -1981,  -1842,  -1703,  -1563,  -1422,  -1281,
+   -1140,   -998,   -856,   -713,   -571,   -428,   -285,   -142
+};
+
+static const WebRtc_Word16 kNoiseEstQDomain = 15;
+static const WebRtc_Word16 kNoiseEstIncCount = 5;
+
+static void ComfortNoise(AecmCore_t* aecm,
+                         const WebRtc_UWord16* dfa,
+                         complex16_t* out,
+                         const WebRtc_Word16* lambda);
+
+static WebRtc_Word16 CalcSuppressionGain(AecmCore_t * const aecm);
+
+// Moves the pointer to the next entry and inserts |far_spectrum| and
+// corresponding Q-domain in its buffer.
+//
+// Inputs:
+//      - self          : Pointer to the delay estimation instance
+//      - far_spectrum  : Pointer to the far end spectrum
+//      - far_q         : Q-domain of far end spectrum
+//
+static void UpdateFarHistory(AecmCore_t* self,
+                             uint16_t* far_spectrum,
+                             int far_q) {
+  // Get new buffer position
+  self->far_history_pos++;
+  if (self->far_history_pos >= MAX_DELAY) {
+    self->far_history_pos = 0;
+  }
+  // Update Q-domain buffer
+  self->far_q_domains[self->far_history_pos] = far_q;
+  // Update far end spectrum buffer
+  memcpy(&(self->far_history[self->far_history_pos * PART_LEN1]),
+         far_spectrum,
+         sizeof(uint16_t) * PART_LEN1);
+}
+
+// Returns a pointer to the far end spectrum aligned to current near end
+// spectrum. The function WebRtc_DelayEstimatorProcessFix(...) should have been
+// called before AlignedFarend(...). Otherwise, you get the pointer to the
+// previous frame. The memory is only valid until the next call of
+// WebRtc_DelayEstimatorProcessFix(...).
+//
+// Inputs:
+//      - self              : Pointer to the AECM instance.
+//      - delay             : Current delay estimate.
+//
+// Output:
+//      - far_q             : The Q-domain of the aligned far end spectrum
+//
+// Return value:
+//      - far_spectrum      : Pointer to the aligned far end spectrum
+//                            NULL - Error
+//
+static const uint16_t* AlignedFarend(AecmCore_t* self, int* far_q, int delay) {
+  int buffer_position = 0;
+  assert(self != NULL);
+  buffer_position = self->far_history_pos - delay;
+
+  // Check buffer position
+  if (buffer_position < 0) {
+    buffer_position += MAX_DELAY;
+  }
+  // Get Q-domain
+  *far_q = self->far_q_domains[buffer_position];
+  // Return far end spectrum
+  return &(self->far_history[buffer_position * PART_LEN1]);
+}
+
+#ifdef ARM_WINM_LOG
+HANDLE logFile = NULL;
+#endif
+
+// Declare function pointers.
+CalcLinearEnergies WebRtcAecm_CalcLinearEnergies;
+StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
+ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
+WindowAndFFT WebRtcAecm_WindowAndFFT;
+InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow;
+
+int WebRtcAecm_CreateCore(AecmCore_t **aecmInst)
+{
+    AecmCore_t *aecm = malloc(sizeof(AecmCore_t));
+    *aecmInst = aecm;
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aecm->farFrameBuf, FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1)
+    {
+        WebRtcAecm_FreeCore(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aecm->nearNoisyFrameBuf, FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1)
+    {
+        WebRtcAecm_FreeCore(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aecm->nearCleanFrameBuf, FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1)
+    {
+        WebRtcAecm_FreeCore(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aecm->outFrameBuf, FRAME_LEN + PART_LEN,
+                            sizeof(int16_t)) == -1)
+    {
+        WebRtcAecm_FreeCore(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateDelayEstimator(&aecm->delay_estimator,
+                                    PART_LEN1,
+                                    MAX_DELAY,
+                                    0) == -1) {
+      WebRtcAecm_FreeCore(aecm);
+      aecm = NULL;
+      return -1;
+    }
+
+    // Init some aecm pointers. 16 and 32 byte alignment is only necessary
+    // for Neon code currently.
+    aecm->xBuf = (WebRtc_Word16*) (((uintptr_t)aecm->xBuf_buf + 31) & ~ 31);
+    aecm->dBufClean = (WebRtc_Word16*) (((uintptr_t)aecm->dBufClean_buf + 31) & ~ 31);
+    aecm->dBufNoisy = (WebRtc_Word16*) (((uintptr_t)aecm->dBufNoisy_buf + 31) & ~ 31);
+    aecm->outBuf = (WebRtc_Word16*) (((uintptr_t)aecm->outBuf_buf + 15) & ~ 15);
+    aecm->channelStored = (WebRtc_Word16*) (((uintptr_t)
+                                             aecm->channelStored_buf + 15) & ~ 15);
+    aecm->channelAdapt16 = (WebRtc_Word16*) (((uintptr_t)
+                                              aecm->channelAdapt16_buf + 15) & ~ 15);
+    aecm->channelAdapt32 = (WebRtc_Word32*) (((uintptr_t)
+                                              aecm->channelAdapt32_buf + 31) & ~ 31);
+
+    return 0;
+}
+
+void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, const WebRtc_Word16* echo_path)
+{
+    int i = 0;
+
+    // Reset the stored channel
+    memcpy(aecm->channelStored, echo_path, sizeof(WebRtc_Word16) * PART_LEN1);
+    // Reset the adapted channels
+    memcpy(aecm->channelAdapt16, echo_path, sizeof(WebRtc_Word16) * PART_LEN1);
+    for (i = 0; i < PART_LEN1; i++)
+    {
+        aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32(
+            (WebRtc_Word32)(aecm->channelAdapt16[i]), 16);
+    }
+
+    // Reset channel storing variables
+    aecm->mseAdaptOld = 1000;
+    aecm->mseStoredOld = 1000;
+    aecm->mseThreshold = WEBRTC_SPL_WORD32_MAX;
+    aecm->mseChannelCount = 0;
+}
+
+static void WindowAndFFTC(WebRtc_Word16* fft,
+                          const WebRtc_Word16* time_signal,
+                          complex16_t* freq_signal,
+                          int time_signal_scaling)
+{
+    int i, j;
+
+    memset(fft, 0, sizeof(WebRtc_Word16) * PART_LEN4);
+    // FFT of signal
+    for (i = 0, j = 0; i < PART_LEN; i++, j += 2)
+    {
+        // Window time domain signal and insert into real part of
+        // transformation array |fft|
+        fft[j] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+            (time_signal[i] << time_signal_scaling),
+            WebRtcAecm_kSqrtHanning[i],
+            14);
+        fft[PART_LEN2 + j] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+            (time_signal[i + PART_LEN] << time_signal_scaling),
+            WebRtcAecm_kSqrtHanning[PART_LEN - i],
+            14);
+        // Inserting zeros in imaginary parts not necessary since we
+        // initialized the array with all zeros
+    }
+
+    WebRtcSpl_ComplexBitReverse(fft, PART_LEN_SHIFT);
+    WebRtcSpl_ComplexFFT(fft, PART_LEN_SHIFT, 1);
+
+    // Take only the first PART_LEN2 samples
+    for (i = 0, j = 0; j < PART_LEN2; i += 1, j += 2)
+    {
+        freq_signal[i].real = fft[j];
+
+        // The imaginary part has to switch sign
+        freq_signal[i].imag = - fft[j+1];
+    }
+}
+
+static void InverseFFTAndWindowC(AecmCore_t* aecm,
+                                 WebRtc_Word16* fft,
+                                 complex16_t* efw,
+                                 WebRtc_Word16* output,
+                                 const WebRtc_Word16* nearendClean)
+{
+    int i, j, outCFFT;
+    WebRtc_Word32 tmp32no1;
+
+    // Synthesis
+    for (i = 1; i < PART_LEN; i++)
+    {
+        j = WEBRTC_SPL_LSHIFT_W32(i, 1);
+        fft[j] = efw[i].real;
+
+        // mirrored data, even
+        fft[PART_LEN4 - j] = efw[i].real;
+        fft[j + 1] = -efw[i].imag;
+
+        //mirrored data, odd
+        fft[PART_LEN4 - (j - 1)] = efw[i].imag;
+    }
+    fft[0] = efw[0].real;
+    fft[1] = -efw[0].imag;
+
+    fft[PART_LEN2] = efw[PART_LEN].real;
+    fft[PART_LEN2 + 1] = -efw[PART_LEN].imag;
+
+    // inverse FFT, result should be scaled with outCFFT
+    WebRtcSpl_ComplexBitReverse(fft, PART_LEN_SHIFT);
+    outCFFT = WebRtcSpl_ComplexIFFT(fft, PART_LEN_SHIFT, 1);
+
+    //take only the real values and scale with outCFFT
+    for (i = 0; i < PART_LEN2; i++)
+    {
+        j = WEBRTC_SPL_LSHIFT_W32(i, 1);
+        fft[i] = fft[j];
+    }
+
+    for (i = 0; i < PART_LEN; i++)
+    {
+        fft[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                fft[i],
+                WebRtcAecm_kSqrtHanning[i],
+                14);
+        tmp32no1 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)fft[i],
+                outCFFT - aecm->dfaCleanQDomain);
+        fft[i] = (WebRtc_Word16)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX,
+                tmp32no1 + aecm->outBuf[i],
+                WEBRTC_SPL_WORD16_MIN);
+        output[i] = fft[i];
+
+        tmp32no1 = WEBRTC_SPL_MUL_16_16_RSFT(
+                fft[PART_LEN + i],
+                WebRtcAecm_kSqrtHanning[PART_LEN - i],
+                14);
+        tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1,
+                outCFFT - aecm->dfaCleanQDomain);
+        aecm->outBuf[i] = (WebRtc_Word16)WEBRTC_SPL_SAT(
+                WEBRTC_SPL_WORD16_MAX,
+                tmp32no1,
+                WEBRTC_SPL_WORD16_MIN);
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+#endif
+
+    // Copy the current block to the old position (aecm->outBuf is shifted elsewhere)
+    memcpy(aecm->xBuf, aecm->xBuf + PART_LEN, sizeof(WebRtc_Word16) * PART_LEN);
+    memcpy(aecm->dBufNoisy, aecm->dBufNoisy + PART_LEN, sizeof(WebRtc_Word16) * PART_LEN);
+    if (nearendClean != NULL)
+    {
+        memcpy(aecm->dBufClean, aecm->dBufClean + PART_LEN, sizeof(WebRtc_Word16) * PART_LEN);
+    }
+}
+
+static void CalcLinearEnergiesC(AecmCore_t* aecm,
+                                const WebRtc_UWord16* far_spectrum,
+                                WebRtc_Word32* echo_est,
+                                WebRtc_UWord32* far_energy,
+                                WebRtc_UWord32* echo_energy_adapt,
+                                WebRtc_UWord32* echo_energy_stored)
+{
+    int i;
+
+    // Get energy for the delayed far end signal and estimated
+    // echo using both stored and adapted channels.
+    for (i = 0; i < PART_LEN1; i++)
+    {
+        echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i],
+                                           far_spectrum[i]);
+        (*far_energy) += (WebRtc_UWord32)(far_spectrum[i]);
+        (*echo_energy_adapt) += WEBRTC_SPL_UMUL_16_16(aecm->channelAdapt16[i],
+                                          far_spectrum[i]);
+        (*echo_energy_stored) += (WebRtc_UWord32)echo_est[i];
+    }
+}
+
+static void StoreAdaptiveChannelC(AecmCore_t* aecm,
+                                  const WebRtc_UWord16* far_spectrum,
+                                  WebRtc_Word32* echo_est)
+{
+    int i;
+
+    // During startup we store the channel every block.
+    memcpy(aecm->channelStored, aecm->channelAdapt16, sizeof(WebRtc_Word16) * PART_LEN1);
+    // Recalculate echo estimate
+    for (i = 0; i < PART_LEN; i += 4)
+    {
+        echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i],
+                                           far_spectrum[i]);
+        echo_est[i + 1] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i + 1],
+                                           far_spectrum[i + 1]);
+        echo_est[i + 2] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i + 2],
+                                           far_spectrum[i + 2]);
+        echo_est[i + 3] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i + 3],
+                                           far_spectrum[i + 3]);
+    }
+    echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i],
+                                       far_spectrum[i]);
+}
+
+static void ResetAdaptiveChannelC(AecmCore_t* aecm)
+{
+    int i;
+
+    // The stored channel has a significantly lower MSE than the adaptive one for
+    // two consecutive calculations. Reset the adaptive channel.
+    memcpy(aecm->channelAdapt16, aecm->channelStored,
+           sizeof(WebRtc_Word16) * PART_LEN1);
+    // Restore the W32 channel
+    for (i = 0; i < PART_LEN; i += 4)
+    {
+        aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32(
+                (WebRtc_Word32)aecm->channelStored[i], 16);
+        aecm->channelAdapt32[i + 1] = WEBRTC_SPL_LSHIFT_W32(
+                (WebRtc_Word32)aecm->channelStored[i + 1], 16);
+        aecm->channelAdapt32[i + 2] = WEBRTC_SPL_LSHIFT_W32(
+                (WebRtc_Word32)aecm->channelStored[i + 2], 16);
+        aecm->channelAdapt32[i + 3] = WEBRTC_SPL_LSHIFT_W32(
+                (WebRtc_Word32)aecm->channelStored[i + 3], 16);
+    }
+    aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)aecm->channelStored[i], 16);
+}
+
+// WebRtcAecm_InitCore(...)
+//
+// This function initializes the AECM instant created with WebRtcAecm_CreateCore(...)
+// Input:
+//      - aecm            : Pointer to the Echo Suppression instance
+//      - samplingFreq   : Sampling Frequency
+//
+// Output:
+//      - aecm            : Initialized instance
+//
+// Return value         :  0 - Ok
+//                        -1 - Error
+//
+int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq)
+{
+    int i = 0;
+    WebRtc_Word32 tmp32 = PART_LEN1 * PART_LEN1;
+    WebRtc_Word16 tmp16 = PART_LEN1;
+
+    if (samplingFreq != 8000 && samplingFreq != 16000)
+    {
+        samplingFreq = 8000;
+        return -1;
+    }
+    // sanity check of sampling frequency
+    aecm->mult = (WebRtc_Word16)samplingFreq / 8000;
+
+    aecm->farBufWritePos = 0;
+    aecm->farBufReadPos = 0;
+    aecm->knownDelay = 0;
+    aecm->lastKnownDelay = 0;
+
+    WebRtc_InitBuffer(aecm->farFrameBuf);
+    WebRtc_InitBuffer(aecm->nearNoisyFrameBuf);
+    WebRtc_InitBuffer(aecm->nearCleanFrameBuf);
+    WebRtc_InitBuffer(aecm->outFrameBuf);
+
+    memset(aecm->xBuf_buf, 0, sizeof(aecm->xBuf_buf));
+    memset(aecm->dBufClean_buf, 0, sizeof(aecm->dBufClean_buf));
+    memset(aecm->dBufNoisy_buf, 0, sizeof(aecm->dBufNoisy_buf));
+    memset(aecm->outBuf_buf, 0, sizeof(aecm->outBuf_buf));
+
+    aecm->seed = 666;
+    aecm->totCount = 0;
+
+    if (WebRtc_InitDelayEstimator(aecm->delay_estimator) != 0) {
+      return -1;
+    }
+    // Set far end histories to zero
+    memset(aecm->far_history, 0, sizeof(uint16_t) * PART_LEN1 * MAX_DELAY);
+    memset(aecm->far_q_domains, 0, sizeof(int) * MAX_DELAY);
+    aecm->far_history_pos = MAX_DELAY;
+
+    aecm->nlpFlag = 1;
+    aecm->fixedDelay = -1;
+
+    aecm->dfaCleanQDomain = 0;
+    aecm->dfaCleanQDomainOld = 0;
+    aecm->dfaNoisyQDomain = 0;
+    aecm->dfaNoisyQDomainOld = 0;
+
+    memset(aecm->nearLogEnergy, 0, sizeof(aecm->nearLogEnergy));
+    aecm->farLogEnergy = 0;
+    memset(aecm->echoAdaptLogEnergy, 0, sizeof(aecm->echoAdaptLogEnergy));
+    memset(aecm->echoStoredLogEnergy, 0, sizeof(aecm->echoStoredLogEnergy));
+
+    // Initialize the echo channels with a stored shape.
+    if (samplingFreq == 8000)
+    {
+        WebRtcAecm_InitEchoPathCore(aecm, kChannelStored8kHz);
+    }
+    else
+    {
+        WebRtcAecm_InitEchoPathCore(aecm, kChannelStored16kHz);
+    }
+
+    memset(aecm->echoFilt, 0, sizeof(aecm->echoFilt));
+    memset(aecm->nearFilt, 0, sizeof(aecm->nearFilt));
+    aecm->noiseEstCtr = 0;
+
+    aecm->cngMode = AecmTrue;
+
+    memset(aecm->noiseEstTooLowCtr, 0, sizeof(aecm->noiseEstTooLowCtr));
+    memset(aecm->noiseEstTooHighCtr, 0, sizeof(aecm->noiseEstTooHighCtr));
+    // Shape the initial noise level to an approximate pink noise.
+    for (i = 0; i < (PART_LEN1 >> 1) - 1; i++)
+    {
+        aecm->noiseEst[i] = (tmp32 << 8);
+        tmp16--;
+        tmp32 -= (WebRtc_Word32)((tmp16 << 1) + 1);
+    }
+    for (; i < PART_LEN1; i++)
+    {
+        aecm->noiseEst[i] = (tmp32 << 8);
+    }
+
+    aecm->farEnergyMin = WEBRTC_SPL_WORD16_MAX;
+    aecm->farEnergyMax = WEBRTC_SPL_WORD16_MIN;
+    aecm->farEnergyMaxMin = 0;
+    aecm->farEnergyVAD = FAR_ENERGY_MIN; // This prevents false speech detection at the
+                                         // beginning.
+    aecm->farEnergyMSE = 0;
+    aecm->currentVADValue = 0;
+    aecm->vadUpdateCount = 0;
+    aecm->firstVAD = 1;
+
+    aecm->startupState = 0;
+    aecm->supGain = SUPGAIN_DEFAULT;
+    aecm->supGainOld = SUPGAIN_DEFAULT;
+
+    aecm->supGainErrParamA = SUPGAIN_ERROR_PARAM_A;
+    aecm->supGainErrParamD = SUPGAIN_ERROR_PARAM_D;
+    aecm->supGainErrParamDiffAB = SUPGAIN_ERROR_PARAM_A - SUPGAIN_ERROR_PARAM_B;
+    aecm->supGainErrParamDiffBD = SUPGAIN_ERROR_PARAM_B - SUPGAIN_ERROR_PARAM_D;
+
+    assert(PART_LEN % 16 == 0);
+
+    // Initialize function pointers.
+    WebRtcAecm_WindowAndFFT = WindowAndFFTC;
+    WebRtcAecm_InverseFFTAndWindow = InverseFFTAndWindowC;
+    WebRtcAecm_CalcLinearEnergies = CalcLinearEnergiesC;
+    WebRtcAecm_StoreAdaptiveChannel = StoreAdaptiveChannelC;
+    WebRtcAecm_ResetAdaptiveChannel = ResetAdaptiveChannelC;
+
+#ifdef WEBRTC_DETECT_ARM_NEON
+    uint64_t features = WebRtc_GetCPUFeaturesARM();
+    if ((features & kCPUFeatureNEON) != 0)
+    {
+        WebRtcAecm_InitNeon();
+    }
+#elif defined(WEBRTC_ARCH_ARM_NEON)
+    WebRtcAecm_InitNeon();
+#endif
+
+    return 0;
+}
+
+// TODO(bjornv): This function is currently not used. Add support for these
+// parameters from a higher level
+int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag)
+{
+    aecm->nlpFlag = nlpFlag;
+    aecm->fixedDelay = delay;
+
+    return 0;
+}
+
+int WebRtcAecm_FreeCore(AecmCore_t *aecm)
+{
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    WebRtc_FreeBuffer(aecm->farFrameBuf);
+    WebRtc_FreeBuffer(aecm->nearNoisyFrameBuf);
+    WebRtc_FreeBuffer(aecm->nearCleanFrameBuf);
+    WebRtc_FreeBuffer(aecm->outFrameBuf);
+
+    WebRtc_FreeDelayEstimator(aecm->delay_estimator);
+    free(aecm);
+
+    return 0;
+}
+
+int WebRtcAecm_ProcessFrame(AecmCore_t * aecm,
+                            const WebRtc_Word16 * farend,
+                            const WebRtc_Word16 * nearendNoisy,
+                            const WebRtc_Word16 * nearendClean,
+                            WebRtc_Word16 * out)
+{
+    WebRtc_Word16 outBlock_buf[PART_LEN + 8]; // Align buffer to 8-byte boundary.
+    WebRtc_Word16* outBlock = (WebRtc_Word16*) (((uintptr_t) outBlock_buf + 15) & ~ 15);
+
+    WebRtc_Word16 farFrame[FRAME_LEN];
+    const int16_t* out_ptr = NULL;
+    int size = 0;
+
+    // Buffer the current frame.
+    // Fetch an older one corresponding to the delay.
+    WebRtcAecm_BufferFarFrame(aecm, farend, FRAME_LEN);
+    WebRtcAecm_FetchFarFrame(aecm, farFrame, FRAME_LEN, aecm->knownDelay);
+
+    // Buffer the synchronized far and near frames,
+    // to pass the smaller blocks individually.
+    WebRtc_WriteBuffer(aecm->farFrameBuf, farFrame, FRAME_LEN);
+    WebRtc_WriteBuffer(aecm->nearNoisyFrameBuf, nearendNoisy, FRAME_LEN);
+    if (nearendClean != NULL)
+    {
+        WebRtc_WriteBuffer(aecm->nearCleanFrameBuf, nearendClean, FRAME_LEN);
+    }
+
+    // Process as many blocks as possible.
+    while (WebRtc_available_read(aecm->farFrameBuf) >= PART_LEN)
+    {
+        int16_t far_block[PART_LEN];
+        const int16_t* far_block_ptr = NULL;
+        int16_t near_noisy_block[PART_LEN];
+        const int16_t* near_noisy_block_ptr = NULL;
+
+        WebRtc_ReadBuffer(aecm->farFrameBuf, (void**) &far_block_ptr, far_block,
+                          PART_LEN);
+        WebRtc_ReadBuffer(aecm->nearNoisyFrameBuf,
+                          (void**) &near_noisy_block_ptr,
+                          near_noisy_block,
+                          PART_LEN);
+        if (nearendClean != NULL)
+        {
+            int16_t near_clean_block[PART_LEN];
+            const int16_t* near_clean_block_ptr = NULL;
+
+            WebRtc_ReadBuffer(aecm->nearCleanFrameBuf,
+                              (void**) &near_clean_block_ptr,
+                              near_clean_block,
+                              PART_LEN);
+            if (WebRtcAecm_ProcessBlock(aecm,
+                                        far_block_ptr,
+                                        near_noisy_block_ptr,
+                                        near_clean_block_ptr,
+                                        outBlock) == -1)
+            {
+                return -1;
+            }
+        } else
+        {
+            if (WebRtcAecm_ProcessBlock(aecm,
+                                        far_block_ptr,
+                                        near_noisy_block_ptr,
+                                        NULL,
+                                        outBlock) == -1)
+            {
+                return -1;
+            }
+        }
+
+        WebRtc_WriteBuffer(aecm->outFrameBuf, outBlock, PART_LEN);
+    }
+
+    // Stuff the out buffer if we have less than a frame to output.
+    // This should only happen for the first frame.
+    size = (int) WebRtc_available_read(aecm->outFrameBuf);
+    if (size < FRAME_LEN)
+    {
+        WebRtc_MoveReadPtr(aecm->outFrameBuf, size - FRAME_LEN);
+    }
+
+    // Obtain an output frame.
+    WebRtc_ReadBuffer(aecm->outFrameBuf, (void**) &out_ptr, out, FRAME_LEN);
+    if (out_ptr != out) {
+      // ReadBuffer() hasn't copied to |out| in this case.
+      memcpy(out, out_ptr, FRAME_LEN * sizeof(int16_t));
+    }
+
+    return 0;
+}
+
+// WebRtcAecm_AsymFilt(...)
+//
+// Performs asymmetric filtering.
+//
+// Inputs:
+//      - filtOld       : Previous filtered value.
+//      - inVal         : New input value.
+//      - stepSizePos   : Step size when we have a positive contribution.
+//      - stepSizeNeg   : Step size when we have a negative contribution.
+//
+// Output:
+//
+// Return: - Filtered value.
+//
+WebRtc_Word16 WebRtcAecm_AsymFilt(const WebRtc_Word16 filtOld, const WebRtc_Word16 inVal,
+                                  const WebRtc_Word16 stepSizePos,
+                                  const WebRtc_Word16 stepSizeNeg)
+{
+    WebRtc_Word16 retVal;
+
+    if ((filtOld == WEBRTC_SPL_WORD16_MAX) | (filtOld == WEBRTC_SPL_WORD16_MIN))
+    {
+        return inVal;
+    }
+    retVal = filtOld;
+    if (filtOld > inVal)
+    {
+        retVal -= WEBRTC_SPL_RSHIFT_W16(filtOld - inVal, stepSizeNeg);
+    } else
+    {
+        retVal += WEBRTC_SPL_RSHIFT_W16(inVal - filtOld, stepSizePos);
+    }
+
+    return retVal;
+}
+
+// WebRtcAecm_CalcEnergies(...)
+//
+// This function calculates the log of energies for nearend, farend and estimated
+// echoes. There is also an update of energy decision levels, i.e. internal VAD.
+//
+//
+// @param  aecm         [i/o]   Handle of the AECM instance.
+// @param  far_spectrum [in]    Pointer to farend spectrum.
+// @param  far_q        [in]    Q-domain of farend spectrum.
+// @param  nearEner     [in]    Near end energy for current block in
+//                              Q(aecm->dfaQDomain).
+// @param  echoEst      [out]   Estimated echo in Q(xfa_q+RESOLUTION_CHANNEL16).
+//
+void WebRtcAecm_CalcEnergies(AecmCore_t * aecm,
+                             const WebRtc_UWord16* far_spectrum,
+                             const WebRtc_Word16 far_q,
+                             const WebRtc_UWord32 nearEner,
+                             WebRtc_Word32 * echoEst)
+{
+    // Local variables
+    WebRtc_UWord32 tmpAdapt = 0;
+    WebRtc_UWord32 tmpStored = 0;
+    WebRtc_UWord32 tmpFar = 0;
+
+    int i;
+
+    WebRtc_Word16 zeros, frac;
+    WebRtc_Word16 tmp16;
+    WebRtc_Word16 increase_max_shifts = 4;
+    WebRtc_Word16 decrease_max_shifts = 11;
+    WebRtc_Word16 increase_min_shifts = 11;
+    WebRtc_Word16 decrease_min_shifts = 3;
+    WebRtc_Word16 kLogLowValue = WEBRTC_SPL_LSHIFT_W16(PART_LEN_SHIFT, 7);
+
+    // Get log of near end energy and store in buffer
+
+    // Shift buffer
+    memmove(aecm->nearLogEnergy + 1, aecm->nearLogEnergy,
+            sizeof(WebRtc_Word16) * (MAX_BUF_LEN - 1));
+
+    // Logarithm of integrated magnitude spectrum (nearEner)
+    tmp16 = kLogLowValue;
+    if (nearEner)
+    {
+        zeros = WebRtcSpl_NormU32(nearEner);
+        frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(
+                              (WEBRTC_SPL_LSHIFT_U32(nearEner, zeros) & 0x7FFFFFFF),
+                              23);
+        // log2 in Q8
+        tmp16 += WEBRTC_SPL_LSHIFT_W16((31 - zeros), 8) + frac;
+        tmp16 -= WEBRTC_SPL_LSHIFT_W16(aecm->dfaNoisyQDomain, 8);
+    }
+    aecm->nearLogEnergy[0] = tmp16;
+    // END: Get log of near end energy
+
+    WebRtcAecm_CalcLinearEnergies(aecm, far_spectrum, echoEst, &tmpFar, &tmpAdapt, &tmpStored);
+
+    // Shift buffers
+    memmove(aecm->echoAdaptLogEnergy + 1, aecm->echoAdaptLogEnergy,
+            sizeof(WebRtc_Word16) * (MAX_BUF_LEN - 1));
+    memmove(aecm->echoStoredLogEnergy + 1, aecm->echoStoredLogEnergy,
+            sizeof(WebRtc_Word16) * (MAX_BUF_LEN - 1));
+
+    // Logarithm of delayed far end energy
+    tmp16 = kLogLowValue;
+    if (tmpFar)
+    {
+        zeros = WebRtcSpl_NormU32(tmpFar);
+        frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32((WEBRTC_SPL_LSHIFT_U32(tmpFar, zeros)
+                        & 0x7FFFFFFF), 23);
+        // log2 in Q8
+        tmp16 += WEBRTC_SPL_LSHIFT_W16((31 - zeros), 8) + frac;
+        tmp16 -= WEBRTC_SPL_LSHIFT_W16(far_q, 8);
+    }
+    aecm->farLogEnergy = tmp16;
+
+    // Logarithm of estimated echo energy through adapted channel
+    tmp16 = kLogLowValue;
+    if (tmpAdapt)
+    {
+        zeros = WebRtcSpl_NormU32(tmpAdapt);
+        frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32((WEBRTC_SPL_LSHIFT_U32(tmpAdapt, zeros)
+                        & 0x7FFFFFFF), 23);
+        //log2 in Q8
+        tmp16 += WEBRTC_SPL_LSHIFT_W16((31 - zeros), 8) + frac;
+        tmp16 -= WEBRTC_SPL_LSHIFT_W16(RESOLUTION_CHANNEL16 + far_q, 8);
+    }
+    aecm->echoAdaptLogEnergy[0] = tmp16;
+
+    // Logarithm of estimated echo energy through stored channel
+    tmp16 = kLogLowValue;
+    if (tmpStored)
+    {
+        zeros = WebRtcSpl_NormU32(tmpStored);
+        frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32((WEBRTC_SPL_LSHIFT_U32(tmpStored, zeros)
+                        & 0x7FFFFFFF), 23);
+        //log2 in Q8
+        tmp16 += WEBRTC_SPL_LSHIFT_W16((31 - zeros), 8) + frac;
+        tmp16 -= WEBRTC_SPL_LSHIFT_W16(RESOLUTION_CHANNEL16 + far_q, 8);
+    }
+    aecm->echoStoredLogEnergy[0] = tmp16;
+
+    // Update farend energy levels (min, max, vad, mse)
+    if (aecm->farLogEnergy > FAR_ENERGY_MIN)
+    {
+        if (aecm->startupState == 0)
+        {
+            increase_max_shifts = 2;
+            decrease_min_shifts = 2;
+            increase_min_shifts = 8;
+        }
+
+        aecm->farEnergyMin = WebRtcAecm_AsymFilt(aecm->farEnergyMin, aecm->farLogEnergy,
+                                                 increase_min_shifts, decrease_min_shifts);
+        aecm->farEnergyMax = WebRtcAecm_AsymFilt(aecm->farEnergyMax, aecm->farLogEnergy,
+                                                 increase_max_shifts, decrease_max_shifts);
+        aecm->farEnergyMaxMin = (aecm->farEnergyMax - aecm->farEnergyMin);
+
+        // Dynamic VAD region size
+        tmp16 = 2560 - aecm->farEnergyMin;
+        if (tmp16 > 0)
+        {
+            tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16, FAR_ENERGY_VAD_REGION, 9);
+        } else
+        {
+            tmp16 = 0;
+        }
+        tmp16 += FAR_ENERGY_VAD_REGION;
+
+        if ((aecm->startupState == 0) | (aecm->vadUpdateCount > 1024))
+        {
+            // In startup phase or VAD update halted
+            aecm->farEnergyVAD = aecm->farEnergyMin + tmp16;
+        } else
+        {
+            if (aecm->farEnergyVAD > aecm->farLogEnergy)
+            {
+                aecm->farEnergyVAD += WEBRTC_SPL_RSHIFT_W16(aecm->farLogEnergy +
+                                                            tmp16 -
+                                                            aecm->farEnergyVAD,
+                                                            6);
+                aecm->vadUpdateCount = 0;
+            } else
+            {
+                aecm->vadUpdateCount++;
+            }
+        }
+        // Put MSE threshold higher than VAD
+        aecm->farEnergyMSE = aecm->farEnergyVAD + (1 << 8);
+    }
+
+    // Update VAD variables
+    if (aecm->farLogEnergy > aecm->farEnergyVAD)
+    {
+        if ((aecm->startupState == 0) | (aecm->farEnergyMaxMin > FAR_ENERGY_DIFF))
+        {
+            // We are in startup or have significant dynamics in input speech level
+            aecm->currentVADValue = 1;
+        }
+    } else
+    {
+        aecm->currentVADValue = 0;
+    }
+    if ((aecm->currentVADValue) && (aecm->firstVAD))
+    {
+        aecm->firstVAD = 0;
+        if (aecm->echoAdaptLogEnergy[0] > aecm->nearLogEnergy[0])
+        {
+            // The estimated echo has higher energy than the near end signal.
+            // This means that the initialization was too aggressive. Scale
+            // down by a factor 8
+            for (i = 0; i < PART_LEN1; i++)
+            {
+                aecm->channelAdapt16[i] >>= 3;
+            }
+            // Compensate the adapted echo energy level accordingly.
+            aecm->echoAdaptLogEnergy[0] -= (3 << 8);
+            aecm->firstVAD = 1;
+        }
+    }
+}
+
+// WebRtcAecm_CalcStepSize(...)
+//
+// This function calculates the step size used in channel estimation
+//
+//
+// @param  aecm  [in]    Handle of the AECM instance.
+// @param  mu    [out]   (Return value) Stepsize in log2(), i.e. number of shifts.
+//
+//
+WebRtc_Word16 WebRtcAecm_CalcStepSize(AecmCore_t * const aecm)
+{
+
+    WebRtc_Word32 tmp32;
+    WebRtc_Word16 tmp16;
+    WebRtc_Word16 mu = MU_MAX;
+
+    // Here we calculate the step size mu used in the
+    // following NLMS based Channel estimation algorithm
+    if (!aecm->currentVADValue)
+    {
+        // Far end energy level too low, no channel update
+        mu = 0;
+    } else if (aecm->startupState > 0)
+    {
+        if (aecm->farEnergyMin >= aecm->farEnergyMax)
+        {
+            mu = MU_MIN;
+        } else
+        {
+            tmp16 = (aecm->farLogEnergy - aecm->farEnergyMin);
+            tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, MU_DIFF);
+            tmp32 = WebRtcSpl_DivW32W16(tmp32, aecm->farEnergyMaxMin);
+            mu = MU_MIN - 1 - (WebRtc_Word16)(tmp32);
+            // The -1 is an alternative to rounding. This way we get a larger
+            // stepsize, so we in some sense compensate for truncation in NLMS
+        }
+        if (mu < MU_MAX)
+        {
+            mu = MU_MAX; // Equivalent with maximum step size of 2^-MU_MAX
+        }
+    }
+
+    return mu;
+}
+
+// WebRtcAecm_UpdateChannel(...)
+//
+// This function performs channel estimation. NLMS and decision on channel storage.
+//
+//
+// @param  aecm         [i/o]   Handle of the AECM instance.
+// @param  far_spectrum [in]    Absolute value of the farend signal in Q(far_q)
+// @param  far_q        [in]    Q-domain of the farend signal
+// @param  dfa          [in]    Absolute value of the nearend signal (Q[aecm->dfaQDomain])
+// @param  mu           [in]    NLMS step size.
+// @param  echoEst      [i/o]   Estimated echo in Q(far_q+RESOLUTION_CHANNEL16).
+//
+void WebRtcAecm_UpdateChannel(AecmCore_t * aecm,
+                              const WebRtc_UWord16* far_spectrum,
+                              const WebRtc_Word16 far_q,
+                              const WebRtc_UWord16 * const dfa,
+                              const WebRtc_Word16 mu,
+                              WebRtc_Word32 * echoEst)
+{
+
+    WebRtc_UWord32 tmpU32no1, tmpU32no2;
+    WebRtc_Word32 tmp32no1, tmp32no2;
+    WebRtc_Word32 mseStored;
+    WebRtc_Word32 mseAdapt;
+
+    int i;
+
+    WebRtc_Word16 zerosFar, zerosNum, zerosCh, zerosDfa;
+    WebRtc_Word16 shiftChFar, shiftNum, shift2ResChan;
+    WebRtc_Word16 tmp16no1;
+    WebRtc_Word16 xfaQ, dfaQ;
+
+    // This is the channel estimation algorithm. It is base on NLMS but has a variable step
+    // length, which was calculated above.
+    if (mu)
+    {
+        for (i = 0; i < PART_LEN1; i++)
+        {
+            // Determine norm of channel and farend to make sure we don't get overflow in
+            // multiplication
+            zerosCh = WebRtcSpl_NormU32(aecm->channelAdapt32[i]);
+            zerosFar = WebRtcSpl_NormU32((WebRtc_UWord32)far_spectrum[i]);
+            if (zerosCh + zerosFar > 31)
+            {
+                // Multiplication is safe
+                tmpU32no1 = WEBRTC_SPL_UMUL_32_16(aecm->channelAdapt32[i],
+                        far_spectrum[i]);
+                shiftChFar = 0;
+            } else
+            {
+                // We need to shift down before multiplication
+                shiftChFar = 32 - zerosCh - zerosFar;
+                tmpU32no1 = WEBRTC_SPL_UMUL_32_16(
+                    WEBRTC_SPL_RSHIFT_W32(aecm->channelAdapt32[i], shiftChFar),
+                    far_spectrum[i]);
+            }
+            // Determine Q-domain of numerator
+            zerosNum = WebRtcSpl_NormU32(tmpU32no1);
+            if (dfa[i])
+            {
+                zerosDfa = WebRtcSpl_NormU32((WebRtc_UWord32)dfa[i]);
+            } else
+            {
+                zerosDfa = 32;
+            }
+            tmp16no1 = zerosDfa - 2 + aecm->dfaNoisyQDomain -
+                RESOLUTION_CHANNEL32 - far_q + shiftChFar;
+            if (zerosNum > tmp16no1 + 1)
+            {
+                xfaQ = tmp16no1;
+                dfaQ = zerosDfa - 2;
+            } else
+            {
+                xfaQ = zerosNum - 2;
+                dfaQ = RESOLUTION_CHANNEL32 + far_q - aecm->dfaNoisyQDomain -
+                    shiftChFar + xfaQ;
+            }
+            // Add in the same Q-domain
+            tmpU32no1 = WEBRTC_SPL_SHIFT_W32(tmpU32no1, xfaQ);
+            tmpU32no2 = WEBRTC_SPL_SHIFT_W32((WebRtc_UWord32)dfa[i], dfaQ);
+            tmp32no1 = (WebRtc_Word32)tmpU32no2 - (WebRtc_Word32)tmpU32no1;
+            zerosNum = WebRtcSpl_NormW32(tmp32no1);
+            if ((tmp32no1) && (far_spectrum[i] > (CHANNEL_VAD << far_q)))
+            {
+                //
+                // Update is needed
+                //
+                // This is what we would like to compute
+                //
+                // tmp32no1 = dfa[i] - (aecm->channelAdapt[i] * far_spectrum[i])
+                // tmp32norm = (i + 1)
+                // aecm->channelAdapt[i] += (2^mu) * tmp32no1
+                //                        / (tmp32norm * far_spectrum[i])
+                //
+
+                // Make sure we don't get overflow in multiplication.
+                if (zerosNum + zerosFar > 31)
+                {
+                    if (tmp32no1 > 0)
+                    {
+                        tmp32no2 = (WebRtc_Word32)WEBRTC_SPL_UMUL_32_16(tmp32no1,
+                                                                        far_spectrum[i]);
+                    } else
+                    {
+                        tmp32no2 = -(WebRtc_Word32)WEBRTC_SPL_UMUL_32_16(-tmp32no1,
+                                                                         far_spectrum[i]);
+                    }
+                    shiftNum = 0;
+                } else
+                {
+                    shiftNum = 32 - (zerosNum + zerosFar);
+                    if (tmp32no1 > 0)
+                    {
+                        tmp32no2 = (WebRtc_Word32)WEBRTC_SPL_UMUL_32_16(
+                                WEBRTC_SPL_RSHIFT_W32(tmp32no1, shiftNum),
+                                far_spectrum[i]);
+                    } else
+                    {
+                        tmp32no2 = -(WebRtc_Word32)WEBRTC_SPL_UMUL_32_16(
+                                WEBRTC_SPL_RSHIFT_W32(-tmp32no1, shiftNum),
+                                far_spectrum[i]);
+                    }
+                }
+                // Normalize with respect to frequency bin
+                tmp32no2 = WebRtcSpl_DivW32W16(tmp32no2, i + 1);
+                // Make sure we are in the right Q-domain
+                shift2ResChan = shiftNum + shiftChFar - xfaQ - mu - ((30 - zerosFar) << 1);
+                if (WebRtcSpl_NormW32(tmp32no2) < shift2ResChan)
+                {
+                    tmp32no2 = WEBRTC_SPL_WORD32_MAX;
+                } else
+                {
+                    tmp32no2 = WEBRTC_SPL_SHIFT_W32(tmp32no2, shift2ResChan);
+                }
+                aecm->channelAdapt32[i] = WEBRTC_SPL_ADD_SAT_W32(aecm->channelAdapt32[i],
+                        tmp32no2);
+                if (aecm->channelAdapt32[i] < 0)
+                {
+                    // We can never have negative channel gain
+                    aecm->channelAdapt32[i] = 0;
+                }
+                aecm->channelAdapt16[i]
+                        = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(aecm->channelAdapt32[i], 16);
+            }
+        }
+    }
+    // END: Adaptive channel update
+
+    // Determine if we should store or restore the channel
+    if ((aecm->startupState == 0) & (aecm->currentVADValue))
+    {
+        // During startup we store the channel every block,
+        // and we recalculate echo estimate
+        WebRtcAecm_StoreAdaptiveChannel(aecm, far_spectrum, echoEst);
+    } else
+    {
+        if (aecm->farLogEnergy < aecm->farEnergyMSE)
+        {
+            aecm->mseChannelCount = 0;
+        } else
+        {
+            aecm->mseChannelCount++;
+        }
+        // Enough data for validation. Store channel if we can.
+        if (aecm->mseChannelCount >= (MIN_MSE_COUNT + 10))
+        {
+            // We have enough data.
+            // Calculate MSE of "Adapt" and "Stored" versions.
+            // It is actually not MSE, but average absolute error.
+            mseStored = 0;
+            mseAdapt = 0;
+            for (i = 0; i < MIN_MSE_COUNT; i++)
+            {
+                tmp32no1 = ((WebRtc_Word32)aecm->echoStoredLogEnergy[i]
+                        - (WebRtc_Word32)aecm->nearLogEnergy[i]);
+                tmp32no2 = WEBRTC_SPL_ABS_W32(tmp32no1);
+                mseStored += tmp32no2;
+
+                tmp32no1 = ((WebRtc_Word32)aecm->echoAdaptLogEnergy[i]
+                        - (WebRtc_Word32)aecm->nearLogEnergy[i]);
+                tmp32no2 = WEBRTC_SPL_ABS_W32(tmp32no1);
+                mseAdapt += tmp32no2;
+            }
+            if (((mseStored << MSE_RESOLUTION) < (MIN_MSE_DIFF * mseAdapt))
+                    & ((aecm->mseStoredOld << MSE_RESOLUTION) < (MIN_MSE_DIFF
+                            * aecm->mseAdaptOld)))
+            {
+                // The stored channel has a significantly lower MSE than the adaptive one for
+                // two consecutive calculations. Reset the adaptive channel.
+                WebRtcAecm_ResetAdaptiveChannel(aecm);
+            } else if (((MIN_MSE_DIFF * mseStored) > (mseAdapt << MSE_RESOLUTION)) & (mseAdapt
+                    < aecm->mseThreshold) & (aecm->mseAdaptOld < aecm->mseThreshold))
+            {
+                // The adaptive channel has a significantly lower MSE than the stored one.
+                // The MSE for the adaptive channel has also been low for two consecutive
+                // calculations. Store the adaptive channel.
+                WebRtcAecm_StoreAdaptiveChannel(aecm, far_spectrum, echoEst);
+
+                // Update threshold
+                if (aecm->mseThreshold == WEBRTC_SPL_WORD32_MAX)
+                {
+                    aecm->mseThreshold = (mseAdapt + aecm->mseAdaptOld);
+                } else
+                {
+                    aecm->mseThreshold += WEBRTC_SPL_MUL_16_16_RSFT(mseAdapt
+                            - WEBRTC_SPL_MUL_16_16_RSFT(aecm->mseThreshold, 5, 3), 205, 8);
+                }
+
+            }
+
+            // Reset counter
+            aecm->mseChannelCount = 0;
+
+            // Store the MSE values.
+            aecm->mseStoredOld = mseStored;
+            aecm->mseAdaptOld = mseAdapt;
+        }
+    }
+    // END: Determine if we should store or reset channel estimate.
+}
+
+// CalcSuppressionGain(...)
+//
+// This function calculates the suppression gain that is used in the Wiener filter.
+//
+//
+// @param  aecm     [i/n]   Handle of the AECM instance.
+// @param  supGain  [out]   (Return value) Suppression gain with which to scale the noise
+//                          level (Q14).
+//
+//
+static WebRtc_Word16 CalcSuppressionGain(AecmCore_t * const aecm)
+{
+    WebRtc_Word32 tmp32no1;
+
+    WebRtc_Word16 supGain = SUPGAIN_DEFAULT;
+    WebRtc_Word16 tmp16no1;
+    WebRtc_Word16 dE = 0;
+
+    // Determine suppression gain used in the Wiener filter. The gain is based on a mix of far
+    // end energy and echo estimation error.
+    // Adjust for the far end signal level. A low signal level indicates no far end signal,
+    // hence we set the suppression gain to 0
+    if (!aecm->currentVADValue)
+    {
+        supGain = 0;
+    } else
+    {
+        // Adjust for possible double talk. If we have large variations in estimation error we
+        // likely have double talk (or poor channel).
+        tmp16no1 = (aecm->nearLogEnergy[0] - aecm->echoStoredLogEnergy[0] - ENERGY_DEV_OFFSET);
+        dE = WEBRTC_SPL_ABS_W16(tmp16no1);
+
+        if (dE < ENERGY_DEV_TOL)
+        {
+            // Likely no double talk. The better estimation, the more we can suppress signal.
+            // Update counters
+            if (dE < SUPGAIN_EPC_DT)
+            {
+                tmp32no1 = WEBRTC_SPL_MUL_16_16(aecm->supGainErrParamDiffAB, dE);
+                tmp32no1 += (SUPGAIN_EPC_DT >> 1);
+                tmp16no1 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32no1, SUPGAIN_EPC_DT);
+                supGain = aecm->supGainErrParamA - tmp16no1;
+            } else
+            {
+                tmp32no1 = WEBRTC_SPL_MUL_16_16(aecm->supGainErrParamDiffBD,
+                                                (ENERGY_DEV_TOL - dE));
+                tmp32no1 += ((ENERGY_DEV_TOL - SUPGAIN_EPC_DT) >> 1);
+                tmp16no1 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32no1, (ENERGY_DEV_TOL
+                        - SUPGAIN_EPC_DT));
+                supGain = aecm->supGainErrParamD + tmp16no1;
+            }
+        } else
+        {
+            // Likely in double talk. Use default value
+            supGain = aecm->supGainErrParamD;
+        }
+    }
+
+    if (supGain > aecm->supGainOld)
+    {
+        tmp16no1 = supGain;
+    } else
+    {
+        tmp16no1 = aecm->supGainOld;
+    }
+    aecm->supGainOld = supGain;
+    if (tmp16no1 < aecm->supGain)
+    {
+        aecm->supGain += (WebRtc_Word16)((tmp16no1 - aecm->supGain) >> 4);
+    } else
+    {
+        aecm->supGain += (WebRtc_Word16)((tmp16no1 - aecm->supGain) >> 4);
+    }
+
+    // END: Update suppression gain
+
+    return aecm->supGain;
+}
+
+// Transforms a time domain signal into the frequency domain, outputting the
+// complex valued signal, absolute value and sum of absolute values.
+//
+// time_signal          [in]    Pointer to time domain signal
+// freq_signal_real     [out]   Pointer to real part of frequency domain array
+// freq_signal_imag     [out]   Pointer to imaginary part of frequency domain
+//                              array
+// freq_signal_abs      [out]   Pointer to absolute value of frequency domain
+//                              array
+// freq_signal_sum_abs  [out]   Pointer to the sum of all absolute values in
+//                              the frequency domain array
+// return value                 The Q-domain of current frequency values
+//
+static int TimeToFrequencyDomain(const WebRtc_Word16* time_signal,
+                                 complex16_t* freq_signal,
+                                 WebRtc_UWord16* freq_signal_abs,
+                                 WebRtc_UWord32* freq_signal_sum_abs)
+{
+    int i = 0;
+    int time_signal_scaling = 0;
+
+    WebRtc_Word32 tmp32no1;
+    WebRtc_Word32 tmp32no2;
+
+    // In fft_buf, +16 for 32-byte alignment.
+    WebRtc_Word16 fft_buf[PART_LEN4 + 16];
+    WebRtc_Word16 *fft = (WebRtc_Word16 *) (((uintptr_t) fft_buf + 31) & ~31);
+
+    WebRtc_Word16 tmp16no1;
+    WebRtc_Word16 tmp16no2;
+#ifdef AECM_WITH_ABS_APPROX
+    WebRtc_Word16 max_value = 0;
+    WebRtc_Word16 min_value = 0;
+    WebRtc_UWord16 alpha = 0;
+    WebRtc_UWord16 beta = 0;
+#endif
+
+#ifdef AECM_DYNAMIC_Q
+    tmp16no1 = WebRtcSpl_MaxAbsValueW16(time_signal, PART_LEN2);
+    time_signal_scaling = WebRtcSpl_NormW16(tmp16no1);
+#endif
+
+    WebRtcAecm_WindowAndFFT(fft, time_signal, freq_signal, time_signal_scaling);
+
+    // Extract imaginary and real part, calculate the magnitude for all frequency bins
+    freq_signal[0].imag = 0;
+    freq_signal[PART_LEN].imag = 0;
+    freq_signal[PART_LEN].real = fft[PART_LEN2];
+    freq_signal_abs[0] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(
+        freq_signal[0].real);
+    freq_signal_abs[PART_LEN] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(
+        freq_signal[PART_LEN].real);
+    (*freq_signal_sum_abs) = (WebRtc_UWord32)(freq_signal_abs[0]) +
+        (WebRtc_UWord32)(freq_signal_abs[PART_LEN]);
+
+    for (i = 1; i < PART_LEN; i++)
+    {
+        if (freq_signal[i].real == 0)
+        {
+            freq_signal_abs[i] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(
+                freq_signal[i].imag);
+        }
+        else if (freq_signal[i].imag == 0)
+        {
+            freq_signal_abs[i] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(
+                freq_signal[i].real);
+        }
+        else
+        {
+            // Approximation for magnitude of complex fft output
+            // magn = sqrt(real^2 + imag^2)
+            // magn ~= alpha * max(|imag|,|real|) + beta * min(|imag|,|real|)
+            //
+            // The parameters alpha and beta are stored in Q15
+
+#ifdef AECM_WITH_ABS_APPROX
+            tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real);
+            tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag);
+
+            if(tmp16no1 > tmp16no2)
+            {
+                max_value = tmp16no1;
+                min_value = tmp16no2;
+            } else
+            {
+                max_value = tmp16no2;
+                min_value = tmp16no1;
+            }
+
+            // Magnitude in Q(-6)
+            if ((max_value >> 2) > min_value)
+            {
+                alpha = kAlpha1;
+                beta = kBeta1;
+            } else if ((max_value >> 1) > min_value)
+            {
+                alpha = kAlpha2;
+                beta = kBeta2;
+            } else
+            {
+                alpha = kAlpha3;
+                beta = kBeta3;
+            }
+            tmp16no1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(max_value,
+                                                                alpha,
+                                                                15);
+            tmp16no2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(min_value,
+                                                                beta,
+                                                                15);
+            freq_signal_abs[i] = (WebRtc_UWord16)tmp16no1 +
+                (WebRtc_UWord16)tmp16no2;
+#else
+#ifdef WEBRTC_ARCH_ARM_V7A
+            __asm __volatile(
+              "smulbb %[tmp32no1], %[real], %[real]\n\t"
+              "smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t"
+              :[tmp32no1]"=r"(tmp32no1),
+               [tmp32no2]"=r"(tmp32no2)
+              :[real]"r"(freq_signal[i].real),
+               [imag]"r"(freq_signal[i].imag)
+            );
+#else
+            tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real);
+            tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag);
+            tmp32no1 = WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1);
+            tmp32no2 = WEBRTC_SPL_MUL_16_16(tmp16no2, tmp16no2);
+            tmp32no2 = WEBRTC_SPL_ADD_SAT_W32(tmp32no1, tmp32no2);
+#endif // WEBRTC_ARCH_ARM_V7A
+            tmp32no1 = WebRtcSpl_SqrtFloor(tmp32no2);
+
+            freq_signal_abs[i] = (WebRtc_UWord16)tmp32no1;
+#endif // AECM_WITH_ABS_APPROX
+        }
+        (*freq_signal_sum_abs) += (WebRtc_UWord32)freq_signal_abs[i];
+    }
+
+    return time_signal_scaling;
+}
+
+int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
+                            const WebRtc_Word16 * farend,
+                            const WebRtc_Word16 * nearendNoisy,
+                            const WebRtc_Word16 * nearendClean,
+                            WebRtc_Word16 * output)
+{
+    int i;
+
+    WebRtc_UWord32 xfaSum;
+    WebRtc_UWord32 dfaNoisySum;
+    WebRtc_UWord32 dfaCleanSum;
+    WebRtc_UWord32 echoEst32Gained;
+    WebRtc_UWord32 tmpU32;
+
+    WebRtc_Word32 tmp32no1;
+
+    WebRtc_UWord16 xfa[PART_LEN1];
+    WebRtc_UWord16 dfaNoisy[PART_LEN1];
+    WebRtc_UWord16 dfaClean[PART_LEN1];
+    WebRtc_UWord16* ptrDfaClean = dfaClean;
+    const WebRtc_UWord16* far_spectrum_ptr = NULL;
+
+    // 32 byte aligned buffers (with +8 or +16).
+    // TODO (kma): define fft with complex16_t.
+    WebRtc_Word16 fft_buf[PART_LEN4 + 2 + 16]; // +2 to make a loop safe.
+    WebRtc_Word32 echoEst32_buf[PART_LEN1 + 8];
+    WebRtc_Word32 dfw_buf[PART_LEN1 + 8];
+    WebRtc_Word32 efw_buf[PART_LEN1 + 8];
+
+    WebRtc_Word16* fft = (WebRtc_Word16*) (((uintptr_t) fft_buf + 31) & ~ 31);
+    WebRtc_Word32* echoEst32 = (WebRtc_Word32*) (((uintptr_t) echoEst32_buf + 31) & ~ 31);
+    complex16_t* dfw = (complex16_t*) (((uintptr_t) dfw_buf + 31) & ~ 31);
+    complex16_t* efw = (complex16_t*) (((uintptr_t) efw_buf + 31) & ~ 31);
+
+    WebRtc_Word16 hnl[PART_LEN1];
+    WebRtc_Word16 numPosCoef = 0;
+    WebRtc_Word16 nlpGain = ONE_Q14;
+    int delay;
+    WebRtc_Word16 tmp16no1;
+    WebRtc_Word16 tmp16no2;
+    WebRtc_Word16 mu;
+    WebRtc_Word16 supGain;
+    WebRtc_Word16 zeros32, zeros16;
+    WebRtc_Word16 zerosDBufNoisy, zerosDBufClean, zerosXBuf;
+    int far_q;
+    WebRtc_Word16 resolutionDiff, qDomainDiff;
+
+    const int kMinPrefBand = 4;
+    const int kMaxPrefBand = 24;
+    WebRtc_Word32 avgHnl32 = 0;
+
+#ifdef ARM_WINM_LOG_
+    DWORD temp;
+    static int flag0 = 0;
+    __int64 freq, start, end, diff__;
+    unsigned int milliseconds;
+#endif
+
+    // Determine startup state. There are three states:
+    // (0) the first CONV_LEN blocks
+    // (1) another CONV_LEN blocks
+    // (2) the rest
+
+    if (aecm->startupState < 2)
+    {
+        aecm->startupState = (aecm->totCount >= CONV_LEN) + (aecm->totCount >= CONV_LEN2);
+    }
+    // END: Determine startup state
+
+    // Buffer near and far end signals
+    memcpy(aecm->xBuf + PART_LEN, farend, sizeof(WebRtc_Word16) * PART_LEN);
+    memcpy(aecm->dBufNoisy + PART_LEN, nearendNoisy, sizeof(WebRtc_Word16) * PART_LEN);
+    if (nearendClean != NULL)
+    {
+        memcpy(aecm->dBufClean + PART_LEN, nearendClean, sizeof(WebRtc_Word16) * PART_LEN);
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick start
+    QueryPerformanceFrequency((LARGE_INTEGER*)&freq);
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+
+    // Transform far end signal from time domain to frequency domain.
+    far_q = TimeToFrequencyDomain(aecm->xBuf,
+                                  dfw,
+                                  xfa,
+                                  &xfaSum);
+
+    // Transform noisy near end signal from time domain to frequency domain.
+    zerosDBufNoisy = TimeToFrequencyDomain(aecm->dBufNoisy,
+                                           dfw,
+                                           dfaNoisy,
+                                           &dfaNoisySum);
+    aecm->dfaNoisyQDomainOld = aecm->dfaNoisyQDomain;
+    aecm->dfaNoisyQDomain = (WebRtc_Word16)zerosDBufNoisy;
+
+
+    if (nearendClean == NULL)
+    {
+        ptrDfaClean = dfaNoisy;
+        aecm->dfaCleanQDomainOld = aecm->dfaNoisyQDomainOld;
+        aecm->dfaCleanQDomain = aecm->dfaNoisyQDomain;
+        dfaCleanSum = dfaNoisySum;
+    } else
+    {
+        // Transform clean near end signal from time domain to frequency domain.
+        zerosDBufClean = TimeToFrequencyDomain(aecm->dBufClean,
+                                               dfw,
+                                               dfaClean,
+                                               &dfaCleanSum);
+        aecm->dfaCleanQDomainOld = aecm->dfaCleanQDomain;
+        aecm->dfaCleanQDomain = (WebRtc_Word16)zerosDBufClean;
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+    // measure tick start
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+
+    // Get the delay
+    // Save far-end history and estimate delay
+    UpdateFarHistory(aecm, xfa, far_q);
+    delay = WebRtc_DelayEstimatorProcessFix(aecm->delay_estimator,
+                                            xfa,
+                                            dfaNoisy,
+                                            PART_LEN1,
+                                            far_q,
+                                            zerosDBufNoisy);
+    if (delay == -1)
+    {
+        return -1;
+    }
+    else if (delay == -2)
+    {
+        // If the delay is unknown, we assume zero.
+        // NOTE: this will have to be adjusted if we ever add lookahead.
+        delay = 0;
+    }
+
+    if (aecm->fixedDelay >= 0)
+    {
+        // Use fixed delay
+        delay = aecm->fixedDelay;
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+    // measure tick start
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+    // Get aligned far end spectrum
+    far_spectrum_ptr = AlignedFarend(aecm, &far_q, delay);
+    zerosXBuf = (WebRtc_Word16) far_q;
+    if (far_spectrum_ptr == NULL)
+    {
+        return -1;
+    }
+
+    // Calculate log(energy) and update energy threshold levels
+    WebRtcAecm_CalcEnergies(aecm,
+                            far_spectrum_ptr,
+                            zerosXBuf,
+                            dfaNoisySum,
+                            echoEst32);
+
+    // Calculate stepsize
+    mu = WebRtcAecm_CalcStepSize(aecm);
+
+    // Update counters
+    aecm->totCount++;
+
+    // This is the channel estimation algorithm.
+    // It is base on NLMS but has a variable step length, which was calculated above.
+    WebRtcAecm_UpdateChannel(aecm, far_spectrum_ptr, zerosXBuf, dfaNoisy, mu, echoEst32);
+    supGain = CalcSuppressionGain(aecm);
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+    // measure tick start
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+
+    // Calculate Wiener filter hnl[]
+    for (i = 0; i < PART_LEN1; i++)
+    {
+        // Far end signal through channel estimate in Q8
+        // How much can we shift right to preserve resolution
+        tmp32no1 = echoEst32[i] - aecm->echoFilt[i];
+        aecm->echoFilt[i] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32no1, 50), 8);
+
+        zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1;
+        zeros16 = WebRtcSpl_NormW16(supGain) + 1;
+        if (zeros32 + zeros16 > 16)
+        {
+            // Multiplication is safe
+            // Result in Q(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN+aecm->xfaQDomainBuf[diff])
+            echoEst32Gained = WEBRTC_SPL_UMUL_32_16((WebRtc_UWord32)aecm->echoFilt[i],
+                                                    (WebRtc_UWord16)supGain);
+            resolutionDiff = 14 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN;
+            resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf);
+        } else
+        {
+            tmp16no1 = 17 - zeros32 - zeros16;
+            resolutionDiff = 14 + tmp16no1 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN;
+            resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf);
+            if (zeros32 > tmp16no1)
+            {
+                echoEst32Gained = WEBRTC_SPL_UMUL_32_16((WebRtc_UWord32)aecm->echoFilt[i],
+                        (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_W16(supGain,
+                                tmp16no1)); // Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16)
+            } else
+            {
+                // Result in Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16)
+                echoEst32Gained = WEBRTC_SPL_UMUL_32_16(
+                        (WebRtc_UWord32)WEBRTC_SPL_RSHIFT_W32(aecm->echoFilt[i], tmp16no1),
+                        (WebRtc_UWord16)supGain);
+            }
+        }
+
+        zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]);
+        if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld))
+                & (aecm->nearFilt[i]))
+        {
+            tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16);
+            qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld;
+        } else
+        {
+            tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i],
+                                            aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld);
+            qDomainDiff = 0;
+        }
+        tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff);
+        tmp32no1 = (WebRtc_Word32)(tmp16no2 - tmp16no1);
+        tmp16no2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 4);
+        tmp16no2 += tmp16no1;
+        zeros16 = WebRtcSpl_NormW16(tmp16no2);
+        if ((tmp16no2) & (-qDomainDiff > zeros16))
+        {
+            aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX;
+        } else
+        {
+            aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff);
+        }
+
+        // Wiener filter coefficients, resulting hnl in Q14
+        if (echoEst32Gained == 0)
+        {
+            hnl[i] = ONE_Q14;
+        } else if (aecm->nearFilt[i] == 0)
+        {
+            hnl[i] = 0;
+        } else
+        {
+            // Multiply the suppression gain
+            // Rounding
+            echoEst32Gained += (WebRtc_UWord32)(aecm->nearFilt[i] >> 1);
+            tmpU32 = WebRtcSpl_DivU32U16(echoEst32Gained, (WebRtc_UWord16)aecm->nearFilt[i]);
+
+            // Current resolution is
+            // Q-(RESOLUTION_CHANNEL + RESOLUTION_SUPGAIN - max(0, 17 - zeros16 - zeros32))
+            // Make sure we are in Q14
+            tmp32no1 = (WebRtc_Word32)WEBRTC_SPL_SHIFT_W32(tmpU32, resolutionDiff);
+            if (tmp32no1 > ONE_Q14)
+            {
+                hnl[i] = 0;
+            } else if (tmp32no1 < 0)
+            {
+                hnl[i] = ONE_Q14;
+            } else
+            {
+                // 1-echoEst/dfa
+                hnl[i] = ONE_Q14 - (WebRtc_Word16)tmp32no1;
+                if (hnl[i] < 0)
+                {
+                    hnl[i] = 0;
+                }
+            }
+        }
+        if (hnl[i])
+        {
+            numPosCoef++;
+        }
+    }
+    // Only in wideband. Prevent the gain in upper band from being larger than
+    // in lower band.
+    if (aecm->mult == 2)
+    {
+        // TODO(bjornv): Investigate if the scaling of hnl[i] below can cause
+        //               speech distortion in double-talk.
+        for (i = 0; i < PART_LEN1; i++)
+        {
+            hnl[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], hnl[i], 14);
+        }
+
+        for (i = kMinPrefBand; i <= kMaxPrefBand; i++)
+        {
+            avgHnl32 += (WebRtc_Word32)hnl[i];
+        }
+        assert(kMaxPrefBand - kMinPrefBand + 1 > 0);
+        avgHnl32 /= (kMaxPrefBand - kMinPrefBand + 1);
+
+        for (i = kMaxPrefBand; i < PART_LEN1; i++)
+        {
+            if (hnl[i] > (WebRtc_Word16)avgHnl32)
+            {
+                hnl[i] = (WebRtc_Word16)avgHnl32;
+            }
+        }
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+    // measure tick start
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+
+    // Calculate NLP gain, result is in Q14
+    if (aecm->nlpFlag)
+    {
+        for (i = 0; i < PART_LEN1; i++)
+        {
+            // Truncate values close to zero and one.
+            if (hnl[i] > NLP_COMP_HIGH)
+            {
+                hnl[i] = ONE_Q14;
+            } else if (hnl[i] < NLP_COMP_LOW)
+            {
+                hnl[i] = 0;
+            }
+    
+            // Remove outliers
+            if (numPosCoef < 3)
+            {
+                nlpGain = 0;
+            } else
+            {
+                nlpGain = ONE_Q14;
+            }
+
+            // NLP
+            if ((hnl[i] == ONE_Q14) && (nlpGain == ONE_Q14))
+            {
+                hnl[i] = ONE_Q14;
+            } else
+            {
+                hnl[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], nlpGain, 14);
+            }
+
+            // multiply with Wiener coefficients
+            efw[i].real = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real,
+                                                                            hnl[i], 14));
+            efw[i].imag = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag,
+                                                                            hnl[i], 14));
+        }
+    }
+    else
+    {
+        // multiply with Wiener coefficients
+        for (i = 0; i < PART_LEN1; i++)
+        {
+            efw[i].real = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real,
+                                                                           hnl[i], 14));
+            efw[i].imag = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag,
+                                                                           hnl[i], 14));
+        }
+    }
+
+    if (aecm->cngMode == AecmTrue)
+    {
+        ComfortNoise(aecm, ptrDfaClean, efw, hnl);
+    }
+
+#ifdef ARM_WINM_LOG_
+    // measure tick end
+    QueryPerformanceCounter((LARGE_INTEGER*)&end);
+    diff__ = ((end - start) * 1000) / (freq/1000);
+    milliseconds = (unsigned int)(diff__ & 0xffffffff);
+    WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+    // measure tick start
+    QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#endif
+
+    WebRtcAecm_InverseFFTAndWindow(aecm, fft, efw, output, nearendClean);
+
+    return 0;
+}
+
+
+// Generate comfort noise and add to output signal.
+//
+// \param[in]     aecm     Handle of the AECM instance.
+// \param[in]     dfa     Absolute value of the nearend signal (Q[aecm->dfaQDomain]).
+// \param[in,out] outReal Real part of the output signal (Q[aecm->dfaQDomain]).
+// \param[in,out] outImag Imaginary part of the output signal (Q[aecm->dfaQDomain]).
+// \param[in]     lambda  Suppression gain with which to scale the noise level (Q14).
+//
+static void ComfortNoise(AecmCore_t* aecm,
+                         const WebRtc_UWord16* dfa,
+                         complex16_t* out,
+                         const WebRtc_Word16* lambda)
+{
+    WebRtc_Word16 i;
+    WebRtc_Word16 tmp16;
+    WebRtc_Word32 tmp32;
+
+    WebRtc_Word16 randW16[PART_LEN];
+    WebRtc_Word16 uReal[PART_LEN1];
+    WebRtc_Word16 uImag[PART_LEN1];
+    WebRtc_Word32 outLShift32;
+    WebRtc_Word16 noiseRShift16[PART_LEN1];
+
+    WebRtc_Word16 shiftFromNearToNoise = kNoiseEstQDomain - aecm->dfaCleanQDomain;
+    WebRtc_Word16 minTrackShift;
+
+    assert(shiftFromNearToNoise >= 0);
+    assert(shiftFromNearToNoise < 16);
+
+    if (aecm->noiseEstCtr < 100)
+    {
+        // Track the minimum more quickly initially.
+        aecm->noiseEstCtr++;
+        minTrackShift = 6;
+    } else
+    {
+        minTrackShift = 9;
+    }
+
+    // Estimate noise power.
+    for (i = 0; i < PART_LEN1; i++)
+    {
+
+        // Shift to the noise domain.
+        tmp32 = (WebRtc_Word32)dfa[i];
+        outLShift32 = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise);
+
+        if (outLShift32 < aecm->noiseEst[i])
+        {
+            // Reset "too low" counter
+            aecm->noiseEstTooLowCtr[i] = 0;
+            // Track the minimum.
+            if (aecm->noiseEst[i] < (1 << minTrackShift))
+            {
+                // For small values, decrease noiseEst[i] every
+                // |kNoiseEstIncCount| block. The regular approach below can not
+                // go further down due to truncation.
+                aecm->noiseEstTooHighCtr[i]++;
+                if (aecm->noiseEstTooHighCtr[i] >= kNoiseEstIncCount)
+                {
+                    aecm->noiseEst[i]--;
+                    aecm->noiseEstTooHighCtr[i] = 0; // Reset the counter
+                }
+            }
+            else
+            {
+                aecm->noiseEst[i] -= ((aecm->noiseEst[i] - outLShift32) >> minTrackShift);
+            }
+        } else
+        {
+            // Reset "too high" counter
+            aecm->noiseEstTooHighCtr[i] = 0;
+            // Ramp slowly upwards until we hit the minimum again.
+            if ((aecm->noiseEst[i] >> 19) > 0)
+            {
+                // Avoid overflow.
+                // Multiplication with 2049 will cause wrap around. Scale
+                // down first and then multiply
+                aecm->noiseEst[i] >>= 11;
+                aecm->noiseEst[i] *= 2049;
+            }
+            else if ((aecm->noiseEst[i] >> 11) > 0)
+            {
+                // Large enough for relative increase
+                aecm->noiseEst[i] *= 2049;
+                aecm->noiseEst[i] >>= 11;
+            }
+            else
+            {
+                // Make incremental increases based on size every
+                // |kNoiseEstIncCount| block
+                aecm->noiseEstTooLowCtr[i]++;
+                if (aecm->noiseEstTooLowCtr[i] >= kNoiseEstIncCount)
+                {
+                    aecm->noiseEst[i] += (aecm->noiseEst[i] >> 9) + 1;
+                    aecm->noiseEstTooLowCtr[i] = 0; // Reset counter
+                }
+            }
+        }
+    }
+
+    for (i = 0; i < PART_LEN1; i++)
+    {
+        tmp32 = WEBRTC_SPL_RSHIFT_W32(aecm->noiseEst[i], shiftFromNearToNoise);
+        if (tmp32 > 32767)
+        {
+            tmp32 = 32767;
+            aecm->noiseEst[i] = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise);
+        }
+        noiseRShift16[i] = (WebRtc_Word16)tmp32;
+
+        tmp16 = ONE_Q14 - lambda[i];
+        noiseRShift16[i]
+                = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16, noiseRShift16[i], 14);
+    }
+
+    // Generate a uniform random array on [0 2^15-1].
+    WebRtcSpl_RandUArray(randW16, PART_LEN, &aecm->seed);
+
+    // Generate noise according to estimated energy.
+    uReal[0] = 0; // Reject LF noise.
+    uImag[0] = 0;
+    for (i = 1; i < PART_LEN1; i++)
+    {
+        // Get a random index for the cos and sin tables over [0 359].
+        tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(359, randW16[i - 1], 15);
+
+        // Tables are in Q13.
+        uReal[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(noiseRShift16[i],
+                kCosTable[tmp16], 13);
+        uImag[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(-noiseRShift16[i],
+                kSinTable[tmp16], 13);
+    }
+    uImag[PART_LEN] = 0;
+
+#if (!defined ARM_WINM) && (!defined ARM9E_GCC) && (!defined ANDROID_AECOPT)
+    for (i = 0; i < PART_LEN1; i++)
+    {
+        out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]);
+        out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]);
+    }
+#else
+    for (i = 0; i < PART_LEN1 -1; )
+    {
+        out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]);
+        out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]);
+        i++;
+
+        out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]);
+        out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]);
+        i++;
+    }
+    out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]);
+    out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]);
+#endif
+}
+
+void WebRtcAecm_BufferFarFrame(AecmCore_t* const aecm,
+                               const WebRtc_Word16* const farend,
+                               const int farLen)
+{
+    int writeLen = farLen, writePos = 0;
+
+    // Check if the write position must be wrapped
+    while (aecm->farBufWritePos + writeLen > FAR_BUF_LEN)
+    {
+        // Write to remaining buffer space before wrapping
+        writeLen = FAR_BUF_LEN - aecm->farBufWritePos;
+        memcpy(aecm->farBuf + aecm->farBufWritePos, farend + writePos,
+               sizeof(WebRtc_Word16) * writeLen);
+        aecm->farBufWritePos = 0;
+        writePos = writeLen;
+        writeLen = farLen - writeLen;
+    }
+
+    memcpy(aecm->farBuf + aecm->farBufWritePos, farend + writePos,
+           sizeof(WebRtc_Word16) * writeLen);
+    aecm->farBufWritePos += writeLen;
+}
+
+void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm, WebRtc_Word16 * const farend,
+                              const int farLen, const int knownDelay)
+{
+    int readLen = farLen;
+    int readPos = 0;
+    int delayChange = knownDelay - aecm->lastKnownDelay;
+
+    aecm->farBufReadPos -= delayChange;
+
+    // Check if delay forces a read position wrap
+    while (aecm->farBufReadPos < 0)
+    {
+        aecm->farBufReadPos += FAR_BUF_LEN;
+    }
+    while (aecm->farBufReadPos > FAR_BUF_LEN - 1)
+    {
+        aecm->farBufReadPos -= FAR_BUF_LEN;
+    }
+
+    aecm->lastKnownDelay = knownDelay;
+
+    // Check if read position must be wrapped
+    while (aecm->farBufReadPos + readLen > FAR_BUF_LEN)
+    {
+
+        // Read from remaining buffer space before wrapping
+        readLen = FAR_BUF_LEN - aecm->farBufReadPos;
+        memcpy(farend + readPos, aecm->farBuf + aecm->farBufReadPos,
+               sizeof(WebRtc_Word16) * readLen);
+        aecm->farBufReadPos = 0;
+        readPos = readLen;
+        readLen = farLen - readLen;
+    }
+    memcpy(farend + readPos, aecm->farBuf + aecm->farBufReadPos,
+           sizeof(WebRtc_Word16) * readLen);
+    aecm->farBufReadPos += readLen;
+}
+
+
diff --git a/trunk/src/modules/audio_processing/aecm/aecm_core.h b/trunk/src/modules/audio_processing/aecm/aecm_core.h
new file mode 100644
index 0000000..0ec62ec
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/aecm_core.h
@@ -0,0 +1,375 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Performs echo control (suppression) with fft routines in fixed-point
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_SOURCE_AECM_CORE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_SOURCE_AECM_CORE_H_
+
+#define AECM_DYNAMIC_Q // turn on/off dynamic Q-domain
+//#define AECM_WITH_ABS_APPROX
+//#define AECM_SHORT                // for 32 sample partition length (otherwise 64)
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+// Algorithm parameters
+
+#define FRAME_LEN       80              // Total frame length, 10 ms
+#ifdef AECM_SHORT
+
+#define PART_LEN        32              // Length of partition
+#define PART_LEN_SHIFT  6               // Length of (PART_LEN * 2) in base 2
+
+#else
+
+#define PART_LEN        64              // Length of partition
+#define PART_LEN_SHIFT  7               // Length of (PART_LEN * 2) in base 2
+
+#endif
+
+#define PART_LEN1       (PART_LEN + 1)  // Unique fft coefficients
+#define PART_LEN2       (PART_LEN << 1) // Length of partition * 2
+#define PART_LEN4       (PART_LEN << 2) // Length of partition * 4
+#define FAR_BUF_LEN     PART_LEN4       // Length of buffers
+#define MAX_DELAY 100
+
+// Counter parameters
+#ifdef AECM_SHORT
+
+#define CONV_LEN        1024            // Convergence length used at startup
+#else
+
+#define CONV_LEN        512             // Convergence length used at startup
+#endif
+
+#define CONV_LEN2       (CONV_LEN << 1) // Convergence length * 2 used at startup
+// Energy parameters
+#define MAX_BUF_LEN     64              // History length of energy signals
+
+#define FAR_ENERGY_MIN  1025            // Lowest Far energy level: At least 2 in energy
+#define FAR_ENERGY_DIFF 929             // Allowed difference between max and min
+
+#define ENERGY_DEV_OFFSET       0       // The energy error offset in Q8
+#define ENERGY_DEV_TOL  400             // The energy estimation tolerance in Q8
+#define FAR_ENERGY_VAD_REGION   230     // Far VAD tolerance region
+// Stepsize parameters
+#define MU_MIN          10              // Min stepsize 2^-MU_MIN (far end energy dependent)
+#define MU_MAX          1               // Max stepsize 2^-MU_MAX (far end energy dependent)
+#define MU_DIFF         9               // MU_MIN - MU_MAX
+// Channel parameters
+#define MIN_MSE_COUNT   20              // Min number of consecutive blocks with enough far end
+                                        // energy to compare channel estimates
+#define MIN_MSE_DIFF    29              // The ratio between adapted and stored channel to
+                                        // accept a new storage (0.8 in Q-MSE_RESOLUTION)
+#define MSE_RESOLUTION  5               // MSE parameter resolution
+#define RESOLUTION_CHANNEL16    12      // W16 Channel in Q-RESOLUTION_CHANNEL16
+#define RESOLUTION_CHANNEL32    28      // W32 Channel in Q-RESOLUTION_CHANNEL
+#define CHANNEL_VAD     16              // Minimum energy in frequency band to update channel
+// Suppression gain parameters: SUPGAIN_ parameters in Q-(RESOLUTION_SUPGAIN)
+#define RESOLUTION_SUPGAIN      8       // Channel in Q-(RESOLUTION_SUPGAIN)
+#define SUPGAIN_DEFAULT (1 << RESOLUTION_SUPGAIN)   // Default suppression gain
+#define SUPGAIN_ERROR_PARAM_A   3072    // Estimation error parameter (Maximum gain) (8 in Q8)
+#define SUPGAIN_ERROR_PARAM_B   1536    // Estimation error parameter (Gain before going down)
+#define SUPGAIN_ERROR_PARAM_D   SUPGAIN_DEFAULT // Estimation error parameter
+                                                // (Should be the same as Default) (1 in Q8)
+#define SUPGAIN_EPC_DT  200             // = SUPGAIN_ERROR_PARAM_C * ENERGY_DEV_TOL
+// Defines for "check delay estimation"
+#define CORR_WIDTH      31              // Number of samples to correlate over.
+#define CORR_MAX        16              // Maximum correlation offset
+#define CORR_MAX_BUF    63
+#define CORR_DEV        4
+#define CORR_MAX_LEVEL  20
+#define CORR_MAX_LOW    4
+#define CORR_BUF_LEN    (CORR_MAX << 1) + 1
+// Note that CORR_WIDTH + 2*CORR_MAX <= MAX_BUF_LEN
+
+#define ONE_Q14         (1 << 14)
+
+// NLP defines
+#define NLP_COMP_LOW    3277            // 0.2 in Q14
+#define NLP_COMP_HIGH   ONE_Q14         // 1 in Q14
+
+extern const WebRtc_Word16 WebRtcAecm_kSqrtHanning[];
+
+typedef struct {
+    WebRtc_Word16 real;
+    WebRtc_Word16 imag;
+} complex16_t;
+
+typedef struct
+{
+    int farBufWritePos;
+    int farBufReadPos;
+    int knownDelay;
+    int lastKnownDelay;
+    int firstVAD; // Parameter to control poorly initialized channels
+
+    void *farFrameBuf;
+    void *nearNoisyFrameBuf;
+    void *nearCleanFrameBuf;
+    void *outFrameBuf;
+
+    WebRtc_Word16 farBuf[FAR_BUF_LEN];
+
+    WebRtc_Word16 mult;
+    WebRtc_UWord32 seed;
+
+    // Delay estimation variables
+    void* delay_estimator;
+    WebRtc_UWord16 currentDelay;
+    // Far end history variables
+    // TODO(bjornv): Replace |far_history| with ring_buffer.
+    uint16_t far_history[PART_LEN1 * MAX_DELAY];
+    int far_history_pos;
+    int far_q_domains[MAX_DELAY];
+
+    WebRtc_Word16 nlpFlag;
+    WebRtc_Word16 fixedDelay;
+
+    WebRtc_UWord32 totCount;
+
+    WebRtc_Word16 dfaCleanQDomain;
+    WebRtc_Word16 dfaCleanQDomainOld;
+    WebRtc_Word16 dfaNoisyQDomain;
+    WebRtc_Word16 dfaNoisyQDomainOld;
+
+    WebRtc_Word16 nearLogEnergy[MAX_BUF_LEN];
+    WebRtc_Word16 farLogEnergy;
+    WebRtc_Word16 echoAdaptLogEnergy[MAX_BUF_LEN];
+    WebRtc_Word16 echoStoredLogEnergy[MAX_BUF_LEN];
+
+    // The extra 16 or 32 bytes in the following buffers are for alignment based Neon code.
+    // It's designed this way since the current GCC compiler can't align a buffer in 16 or 32
+    // byte boundaries properly.
+    WebRtc_Word16 channelStored_buf[PART_LEN1 + 8];
+    WebRtc_Word16 channelAdapt16_buf[PART_LEN1 + 8];
+    WebRtc_Word32 channelAdapt32_buf[PART_LEN1 + 8];
+    WebRtc_Word16 xBuf_buf[PART_LEN2 + 16]; // farend
+    WebRtc_Word16 dBufClean_buf[PART_LEN2 + 16]; // nearend
+    WebRtc_Word16 dBufNoisy_buf[PART_LEN2 + 16]; // nearend
+    WebRtc_Word16 outBuf_buf[PART_LEN + 8];
+
+    // Pointers to the above buffers
+    WebRtc_Word16 *channelStored;
+    WebRtc_Word16 *channelAdapt16;
+    WebRtc_Word32 *channelAdapt32;
+    WebRtc_Word16 *xBuf;
+    WebRtc_Word16 *dBufClean;
+    WebRtc_Word16 *dBufNoisy;
+    WebRtc_Word16 *outBuf;
+
+    WebRtc_Word32 echoFilt[PART_LEN1];
+    WebRtc_Word16 nearFilt[PART_LEN1];
+    WebRtc_Word32 noiseEst[PART_LEN1];
+    int           noiseEstTooLowCtr[PART_LEN1];
+    int           noiseEstTooHighCtr[PART_LEN1];
+    WebRtc_Word16 noiseEstCtr;
+    WebRtc_Word16 cngMode;
+
+    WebRtc_Word32 mseAdaptOld;
+    WebRtc_Word32 mseStoredOld;
+    WebRtc_Word32 mseThreshold;
+
+    WebRtc_Word16 farEnergyMin;
+    WebRtc_Word16 farEnergyMax;
+    WebRtc_Word16 farEnergyMaxMin;
+    WebRtc_Word16 farEnergyVAD;
+    WebRtc_Word16 farEnergyMSE;
+    int currentVADValue;
+    WebRtc_Word16 vadUpdateCount;
+
+    WebRtc_Word16 startupState;
+    WebRtc_Word16 mseChannelCount;
+    WebRtc_Word16 supGain;
+    WebRtc_Word16 supGainOld;
+
+    WebRtc_Word16 supGainErrParamA;
+    WebRtc_Word16 supGainErrParamD;
+    WebRtc_Word16 supGainErrParamDiffAB;
+    WebRtc_Word16 supGainErrParamDiffBD;
+
+#ifdef AEC_DEBUG
+    FILE *farFile;
+    FILE *nearFile;
+    FILE *outFile;
+#endif
+} AecmCore_t;
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_CreateCore(...)
+//
+// Allocates the memory needed by the AECM. The memory needs to be
+// initialized separately using the WebRtcAecm_InitCore() function.
+//
+// Input:
+//      - aecm          : Instance that should be created
+//
+// Output:
+//      - aecm          : Created instance
+//
+// Return value         :  0 - Ok
+//                        -1 - Error
+//
+int WebRtcAecm_CreateCore(AecmCore_t **aecm);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_InitCore(...)
+//
+// This function initializes the AECM instant created with WebRtcAecm_CreateCore(...)
+// Input:
+//      - aecm          : Pointer to the AECM instance
+//      - samplingFreq  : Sampling Frequency
+//
+// Output:
+//      - aecm          : Initialized instance
+//
+// Return value         :  0 - Ok
+//                        -1 - Error
+//
+int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_FreeCore(...)
+//
+// This function releases the memory allocated by WebRtcAecm_CreateCore()
+// Input:
+//      - aecm          : Pointer to the AECM instance
+//
+// Return value         :  0 - Ok
+//                        -1 - Error
+//           11001-11016: Error
+//
+int WebRtcAecm_FreeCore(AecmCore_t *aecm);
+
+int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_InitEchoPathCore(...)
+//
+// This function resets the echo channel adaptation with the specified channel.
+// Input:
+//      - aecm          : Pointer to the AECM instance
+//      - echo_path     : Pointer to the data that should initialize the echo path
+//
+// Output:
+//      - aecm          : Initialized instance
+//
+void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, const WebRtc_Word16* echo_path);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_ProcessFrame(...)
+//
+// This function processes frames and sends blocks to WebRtcAecm_ProcessBlock(...)
+//
+// Inputs:
+//      - aecm          : Pointer to the AECM instance
+//      - farend        : In buffer containing one frame of echo signal
+//      - nearendNoisy  : In buffer containing one frame of nearend+echo signal without NS
+//      - nearendClean  : In buffer containing one frame of nearend+echo signal with NS
+//
+// Output:
+//      - out           : Out buffer, one frame of nearend signal          :
+//
+//
+int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const WebRtc_Word16 * farend,
+                            const WebRtc_Word16 * nearendNoisy,
+                            const WebRtc_Word16 * nearendClean,
+                            WebRtc_Word16 * out);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_ProcessBlock(...)
+//
+// This function is called for every block within one frame
+// This function is called by WebRtcAecm_ProcessFrame(...)
+//
+// Inputs:
+//      - aecm          : Pointer to the AECM instance
+//      - farend        : In buffer containing one block of echo signal
+//      - nearendNoisy  : In buffer containing one frame of nearend+echo signal without NS
+//      - nearendClean  : In buffer containing one frame of nearend+echo signal with NS
+//
+// Output:
+//      - out           : Out buffer, one block of nearend signal          :
+//
+//
+int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const WebRtc_Word16 * farend,
+                            const WebRtc_Word16 * nearendNoisy,
+                            const WebRtc_Word16 * noisyClean,
+                            WebRtc_Word16 * out);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_BufferFarFrame()
+//
+// Inserts a frame of data into farend buffer.
+//
+// Inputs:
+//      - aecm          : Pointer to the AECM instance
+//      - farend        : In buffer containing one frame of farend signal
+//      - farLen        : Length of frame
+//
+void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm, const WebRtc_Word16 * const farend,
+                               const int farLen);
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// WebRtcAecm_FetchFarFrame()
+//
+// Read the farend buffer to account for known delay
+//
+// Inputs:
+//      - aecm          : Pointer to the AECM instance
+//      - farend        : In buffer containing one frame of farend signal
+//      - farLen        : Length of frame
+//      - knownDelay    : known delay
+//
+void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm, WebRtc_Word16 * const farend,
+                              const int farLen, const int knownDelay);
+
+///////////////////////////////////////////////////////////////////////////////
+// Some function pointers, for internal functions shared by ARM NEON and 
+// generic C code.
+//
+typedef void (*CalcLinearEnergies)(
+    AecmCore_t* aecm,
+    const WebRtc_UWord16* far_spectrum,
+    WebRtc_Word32* echoEst,
+    WebRtc_UWord32* far_energy,
+    WebRtc_UWord32* echo_energy_adapt,
+    WebRtc_UWord32* echo_energy_stored);
+extern CalcLinearEnergies WebRtcAecm_CalcLinearEnergies;
+
+typedef void (*StoreAdaptiveChannel)(
+    AecmCore_t* aecm,
+    const WebRtc_UWord16* far_spectrum,
+    WebRtc_Word32* echo_est);
+extern StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
+
+typedef void (*ResetAdaptiveChannel)(AecmCore_t* aecm);
+extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
+
+typedef void (*WindowAndFFT)(
+    WebRtc_Word16* fft,
+    const WebRtc_Word16* time_signal,
+    complex16_t* freq_signal,
+    int time_signal_scaling);
+extern WindowAndFFT WebRtcAecm_WindowAndFFT;
+
+typedef void (*InverseFFTAndWindow)(
+    AecmCore_t* aecm,
+    WebRtc_Word16* fft, complex16_t* efw,
+    WebRtc_Word16* output,
+    const WebRtc_Word16* nearendClean);
+extern InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow;
+
+// Initialization of the above function pointers for ARM Neon.
+void WebRtcAecm_InitNeon(void);
+
+
+#endif
diff --git a/trunk/src/modules/audio_processing/aecm/aecm_core_neon.c b/trunk/src/modules/audio_processing/aecm/aecm_core_neon.c
new file mode 100644
index 0000000..ab448b4
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/aecm_core_neon.c
@@ -0,0 +1,303 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aecm_core.h"
+
+#include <arm_neon.h>
+#include <assert.h>
+
+
+// Square root of Hanning window in Q14.
+static const WebRtc_Word16 kSqrtHanningReversed[] __attribute__((aligned(8))) = {
+  16384, 16373, 16354, 16325, 
+  16286, 16237, 16179, 16111,
+  16034, 15947, 15851, 15746,
+  15631, 15506, 15373, 15231,
+  15079, 14918, 14749, 14571,
+  14384, 14189, 13985, 13773,
+  13553, 13325, 13089, 12845,
+  12594, 12335, 12068, 11795,
+  11514, 11227, 10933, 10633,
+  10326, 10013, 9695,  9370,
+  9040,  8705,  8364,  8019,
+  7668,  7313,  6954,  6591,
+  6224,  5853,  5478,  5101,
+  4720,  4337,  3951,  3562,
+  3172,  2780,  2386,  1990,
+  1594,  1196,  798,   399
+};
+
+static void WindowAndFFTNeon(WebRtc_Word16* fft,
+                             const WebRtc_Word16* time_signal,
+                             complex16_t* freq_signal,
+                             int time_signal_scaling) {
+  int i, j;
+
+  int16x4_t tmp16x4_scaling = vdup_n_s16(time_signal_scaling);
+  __asm__("vmov.i16 d21, #0" ::: "d21");
+
+  for (i = 0, j = 0; i < PART_LEN; i += 4, j += 8) {
+    int16x4_t tmp16x4_0;
+    int16x4_t tmp16x4_1;
+    int32x4_t tmp32x4_0;
+
+    /* Window near end */
+    // fft[j] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT((time_signal[i]
+    //       << time_signal_scaling), WebRtcAecm_kSqrtHanning[i], 14);
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&time_signal[i]));
+    tmp16x4_0 = vshl_s16(tmp16x4_0, tmp16x4_scaling);
+
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&WebRtcAecm_kSqrtHanning[i]));
+    tmp32x4_0 = vmull_s16(tmp16x4_0, tmp16x4_1);
+
+    __asm__("vshrn.i32 d20, %q0, #14" : : "w"(tmp32x4_0) : "d20");
+    __asm__("vst2.16 {d20, d21}, [%0, :128]" : : "r"(&fft[j]) : "q10");
+
+    // fft[PART_LEN2 + j] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+    //      (time_signal[PART_LEN + i] << time_signal_scaling),
+    //       WebRtcAecm_kSqrtHanning[PART_LEN - i], 14);
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&time_signal[i + PART_LEN]));
+    tmp16x4_0 = vshl_s16(tmp16x4_0, tmp16x4_scaling);
+
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&kSqrtHanningReversed[i]));
+    tmp32x4_0 = vmull_s16(tmp16x4_0, tmp16x4_1);
+
+    __asm__("vshrn.i32 d20, %q0, #14" : : "w"(tmp32x4_0) : "d20");
+    __asm__("vst2.16 {d20, d21}, [%0, :128]" : : "r"(&fft[PART_LEN2 + j]) : "q10");
+  }
+
+  WebRtcSpl_ComplexBitReverse(fft, PART_LEN_SHIFT);
+  WebRtcSpl_ComplexFFT(fft, PART_LEN_SHIFT, 1);
+
+  // Take only the first PART_LEN2 samples, and switch the sign of the imaginary part.
+  for (i = 0, j = 0; j < PART_LEN2; i += 8, j += 16) {
+    __asm__("vld2.16 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&fft[j]) : "q10", "q11");
+    __asm__("vneg.s16 d22, d22" : : : "q10");
+    __asm__("vneg.s16 d23, d23" : : : "q11");
+    __asm__("vst2.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&freq_signal[i].real): "q10", "q11");
+  }
+}
+
+static void InverseFFTAndWindowNeon(AecmCore_t* aecm,
+                                    WebRtc_Word16* fft,
+                                    complex16_t* efw,
+                                    WebRtc_Word16* output,
+                                    const WebRtc_Word16* nearendClean) {
+  int i, j, outCFFT;
+  WebRtc_Word32 tmp32no1;
+
+  // Synthesis
+  for (i = 0, j = 0; i < PART_LEN; i += 4, j += 8) {
+    // We overwrite two more elements in fft[], but it's ok.
+    __asm__("vld2.16 {d20, d21}, [%0, :128]" : : "r"(&(efw[i].real)) : "q10");
+    __asm__("vmov q11, q10" : : : "q10", "q11");
+
+    __asm__("vneg.s16 d23, d23" : : : "q11");
+    __asm__("vst2.16 {d22, d23}, [%0, :128]" : : "r"(&fft[j]): "q11");
+
+    __asm__("vrev64.16 q10, q10" : : : "q10");
+    __asm__("vst2.16 {d20, d21}, [%0]" : : "r"(&fft[PART_LEN4 - j - 6]): "q10");
+  }
+
+  fft[PART_LEN2] = efw[PART_LEN].real;
+  fft[PART_LEN2 + 1] = -efw[PART_LEN].imag;
+
+  // Inverse FFT, result should be scaled with outCFFT.
+  WebRtcSpl_ComplexBitReverse(fft, PART_LEN_SHIFT);
+  outCFFT = WebRtcSpl_ComplexIFFT(fft, PART_LEN_SHIFT, 1);
+
+  // Take only the real values and scale with outCFFT.
+  for (i = 0, j = 0; i < PART_LEN2; i += 8, j += 16) {
+    __asm__("vld2.16 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&fft[j]) : "q10", "q11");
+    __asm__("vst1.16 {d20, d21}, [%0, :128]" : : "r"(&fft[i]): "q10");
+  }
+
+  int32x4_t tmp32x4_2;
+  __asm__("vdup.32 %q0, %1" : "=w"(tmp32x4_2) : "r"((WebRtc_Word32)
+      (outCFFT - aecm->dfaCleanQDomain)));
+  for (i = 0; i < PART_LEN; i += 4) {
+    int16x4_t tmp16x4_0;
+    int16x4_t tmp16x4_1;
+    int32x4_t tmp32x4_0;
+    int32x4_t tmp32x4_1;
+
+    // fft[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+    //        fft[i], WebRtcAecm_kSqrtHanning[i], 14);
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&fft[i]));
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&WebRtcAecm_kSqrtHanning[i]));
+    __asm__("vmull.s16 %q0, %P1, %P2" : "=w"(tmp32x4_0) : "w"(tmp16x4_0), "w"(tmp16x4_1));
+    __asm__("vrshr.s32 %q0, %q1, #14" : "=w"(tmp32x4_0) : "0"(tmp32x4_0));
+
+    // tmp32no1 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)fft[i],
+    //        outCFFT - aecm->dfaCleanQDomain);
+    __asm__("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2));
+
+    // fft[i] = (WebRtc_Word16)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX,
+    //        tmp32no1 + outBuf[i], WEBRTC_SPL_WORD16_MIN);
+    // output[i] = fft[i];
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&aecm->outBuf[i]));
+    __asm__("vmovl.s16 %q0, %P1" : "=w"(tmp32x4_1) : "w"(tmp16x4_0));
+    __asm__("vadd.i32 %q0, %q1" : : "w"(tmp32x4_0), "w"(tmp32x4_1));
+    __asm__("vqshrn.s32 %P0, %q1, #0" : "=w"(tmp16x4_0) : "w"(tmp32x4_0));
+    __asm__("vst1.16 %P0, [%1, :64]" : : "w"(tmp16x4_0), "r"(&fft[i]));
+    __asm__("vst1.16 %P0, [%1, :64]" : : "w"(tmp16x4_0), "r"(&output[i]));
+
+    // tmp32no1 = WEBRTC_SPL_MUL_16_16_RSFT(
+    //        fft[PART_LEN + i], WebRtcAecm_kSqrtHanning[PART_LEN - i], 14);
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&fft[PART_LEN + i]));
+    __asm__("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&kSqrtHanningReversed[i]));
+    __asm__("vmull.s16 %q0, %P1, %P2" : "=w"(tmp32x4_0) : "w"(tmp16x4_0), "w"(tmp16x4_1));
+    __asm__("vshr.s32 %q0, %q1, #14" : "=w"(tmp32x4_0) : "0"(tmp32x4_0));
+
+    // tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, outCFFT - aecm->dfaCleanQDomain);
+    __asm__("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2));
+    // outBuf[i] = (WebRtc_Word16)WEBRTC_SPL_SAT(
+    //        WEBRTC_SPL_WORD16_MAX, tmp32no1, WEBRTC_SPL_WORD16_MIN);
+    __asm__("vqshrn.s32 %P0, %q1, #0" : "=w"(tmp16x4_0) : "w"(tmp32x4_0));
+    __asm__("vst1.16 %P0, [%1, :64]" : : "w"(tmp16x4_0), "r"(&aecm->outBuf[i]));
+  }
+
+  // Copy the current block to the old position (outBuf is shifted elsewhere).
+  for (i = 0; i < PART_LEN; i += 16) {
+    __asm__("vld1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&aecm->xBuf[i + PART_LEN]) : "q10");
+    __asm__("vst1.16 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&aecm->xBuf[i]): "q10");
+  }
+  for (i = 0; i < PART_LEN; i += 16) {
+    __asm__("vld1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&aecm->dBufNoisy[i + PART_LEN]) : "q10");
+    __asm__("vst1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&aecm->dBufNoisy[i]): "q10");
+  }
+  if (nearendClean != NULL) {
+    for (i = 0; i < PART_LEN; i += 16) {
+      __asm__("vld1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+              "r"(&aecm->dBufClean[i + PART_LEN]) : "q10");
+      __asm__("vst1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+              "r"(&aecm->dBufClean[i]): "q10");
+    }
+  }
+}
+
+static void CalcLinearEnergiesNeon(AecmCore_t* aecm,
+                                   const WebRtc_UWord16* far_spectrum,
+                                   WebRtc_Word32* echo_est,
+                                   WebRtc_UWord32* far_energy,
+                                   WebRtc_UWord32* echo_energy_adapt,
+                                   WebRtc_UWord32* echo_energy_stored) {
+  int i;
+
+  register WebRtc_UWord32 far_energy_r;
+  register WebRtc_UWord32 echo_energy_stored_r;
+  register WebRtc_UWord32 echo_energy_adapt_r;
+  uint32x4_t tmp32x4_0;
+
+  __asm__("vmov.i32 q14, #0" : : : "q14"); // far_energy
+  __asm__("vmov.i32 q8,  #0" : : : "q8"); // echo_energy_stored
+  __asm__("vmov.i32 q9,  #0" : : : "q9"); // echo_energy_adapt
+
+  for (i = 0; i < PART_LEN - 7; i += 8) {
+    // far_energy += (WebRtc_UWord32)(far_spectrum[i]);
+    __asm__("vld1.16 {d26, d27}, [%0]" : : "r"(&far_spectrum[i]) : "q13");
+    __asm__("vaddw.u16 q14, q14, d26" : : : "q14", "q13");
+    __asm__("vaddw.u16 q14, q14, d27" : : : "q14", "q13");
+
+    // Get estimated echo energies for adaptive channel and stored channel.
+    // echoEst[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
+    __asm__("vld1.16 {d24, d25}, [%0, :128]" : : "r"(&aecm->channelStored[i]) : "q12");
+    __asm__("vmull.u16 q10, d26, d24" : : : "q12", "q13", "q10");
+    __asm__("vmull.u16 q11, d27, d25" : : : "q12", "q13", "q11");
+    __asm__("vst1.32 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&echo_est[i]):
+            "q10", "q11");
+
+    // echo_energy_stored += (WebRtc_UWord32)echoEst[i];
+    __asm__("vadd.u32 q8, q10" : : : "q10", "q8");
+    __asm__("vadd.u32 q8, q11" : : : "q11", "q8");
+
+    // echo_energy_adapt += WEBRTC_SPL_UMUL_16_16(
+    //     aecm->channelAdapt16[i], far_spectrum[i]);
+    __asm__("vld1.16 {d24, d25}, [%0, :128]" : : "r"(&aecm->channelAdapt16[i]) : "q12");
+    __asm__("vmull.u16 q10, d26, d24" : : : "q12", "q13", "q10");
+    __asm__("vmull.u16 q11, d27, d25" : : : "q12", "q13", "q11");
+    __asm__("vadd.u32 q9, q10" : : : "q9", "q15");
+    __asm__("vadd.u32 q9, q11" : : : "q9", "q11");
+  }
+
+  __asm__("vadd.u32 d28, d29" : : : "q14");
+  __asm__("vpadd.u32 d28, d28" : : : "q14");
+  __asm__("vmov.32 %0, d28[0]" : "=r"(far_energy_r): : "q14");
+
+  __asm__("vadd.u32 d18, d19" : : : "q9");
+  __asm__("vpadd.u32 d18, d18" : : : "q9");
+  __asm__("vmov.32 %0, d18[0]" : "=r"(echo_energy_adapt_r): : "q9");
+
+  __asm__("vadd.u32 d16, d17" : : : "q8");
+  __asm__("vpadd.u32 d16, d16" : : : "q8");
+  __asm__("vmov.32 %0, d16[0]" : "=r"(echo_energy_stored_r): : "q8");
+
+  // Get estimated echo energies for adaptive channel and stored channel.
+  echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
+  *echo_energy_stored = echo_energy_stored_r + (WebRtc_UWord32)echo_est[i];
+  *far_energy = far_energy_r + (WebRtc_UWord32)(far_spectrum[i]);
+  *echo_energy_adapt = echo_energy_adapt_r + WEBRTC_SPL_UMUL_16_16(
+      aecm->channelAdapt16[i], far_spectrum[i]);
+}
+
+static void StoreAdaptiveChannelNeon(AecmCore_t* aecm,
+                                     const WebRtc_UWord16* far_spectrum,
+                                     WebRtc_Word32* echo_est) {
+  int i;
+
+  // During startup we store the channel every block.
+  // Recalculate echo estimate.
+  for (i = 0; i < PART_LEN - 7; i += 8) {
+    // aecm->channelStored[i] = acem->channelAdapt16[i];
+    // echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
+    __asm__("vld1.16 {d26, d27}, [%0]" : : "r"(&far_spectrum[i]) : "q13");
+    __asm__("vld1.16 {d24, d25}, [%0, :128]" : : "r"(&aecm->channelAdapt16[i]) : "q12");
+    __asm__("vst1.16 {d24, d25}, [%0, :128]" : : "r"(&aecm->channelStored[i]) : "q12");
+    __asm__("vmull.u16 q10, d26, d24" : : : "q12", "q13", "q10");
+    __asm__("vmull.u16 q11, d27, d25" : : : "q12", "q13", "q11");
+    __asm__("vst1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&echo_est[i]) : "q10", "q11");
+  }
+  aecm->channelStored[i] = aecm->channelAdapt16[i];
+  echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
+}
+
+static void ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
+  int i;
+
+  for (i = 0; i < PART_LEN - 7; i += 8) {
+    // aecm->channelAdapt16[i] = aecm->channelStored[i];
+    // aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)
+    //                           aecm->channelStored[i], 16);
+    __asm__("vld1.16 {d24, d25}, [%0, :128]" : :
+            "r"(&aecm->channelStored[i]) : "q12");
+    __asm__("vst1.16 {d24, d25}, [%0, :128]" : :
+            "r"(&aecm->channelAdapt16[i]) : "q12");
+    __asm__("vshll.s16 q10, d24, #16" : : : "q12", "q13", "q10");
+    __asm__("vshll.s16 q11, d25, #16" : : : "q12", "q13", "q11");
+    __asm__("vst1.16 {d20, d21, d22, d23}, [%0, :256]" : :
+            "r"(&aecm->channelAdapt32[i]): "q10", "q11");
+  }
+  aecm->channelAdapt16[i] = aecm->channelStored[i];
+  aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32(
+      (WebRtc_Word32)aecm->channelStored[i], 16);
+}
+
+void WebRtcAecm_InitNeon(void) {
+  WebRtcAecm_WindowAndFFT = WindowAndFFTNeon;
+  WebRtcAecm_InverseFFTAndWindow = InverseFFTAndWindowNeon;
+  WebRtcAecm_CalcLinearEnergies = CalcLinearEnergiesNeon;
+  WebRtcAecm_StoreAdaptiveChannel = StoreAdaptiveChannelNeon;
+  WebRtcAecm_ResetAdaptiveChannel = ResetAdaptiveChannelNeon;
+}
diff --git a/trunk/src/modules/audio_processing/aecm/echo_control_mobile.c b/trunk/src/modules/audio_processing/aecm/echo_control_mobile.c
new file mode 100644
index 0000000..566ae00
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/echo_control_mobile.c
@@ -0,0 +1,792 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+//#include <string.h>
+
+#include "echo_control_mobile.h"
+#include "aecm_core.h"
+#include "ring_buffer.h"
+#ifdef AEC_DEBUG
+#include <stdio.h>
+#endif
+#ifdef MAC_IPHONE_PRINT
+#include <time.h>
+#include <stdio.h>
+#elif defined ARM_WINM_LOG
+#include "windows.h"
+extern HANDLE logFile;
+#endif
+
+#define BUF_SIZE_FRAMES 50 // buffer size (frames)
+// Maximum length of resampled signal. Must be an integer multiple of frames
+// (ceil(1/(1 + MIN_SKEW)*2) + 1)*FRAME_LEN
+// The factor of 2 handles wb, and the + 1 is as a safety margin
+#define MAX_RESAMP_LEN (5 * FRAME_LEN)
+
+static const size_t kBufSizeSamp = BUF_SIZE_FRAMES * FRAME_LEN; // buffer size (samples)
+static const int kSampMsNb = 8; // samples per ms in nb
+// Target suppression levels for nlp modes
+// log{0.001, 0.00001, 0.00000001}
+static const int kInitCheck = 42;
+
+typedef struct
+{
+    int sampFreq;
+    int scSampFreq;
+    short bufSizeStart;
+    int knownDelay;
+
+    // Stores the last frame added to the farend buffer
+    short farendOld[2][FRAME_LEN];
+    short initFlag; // indicates if AEC has been initialized
+
+    // Variables used for averaging far end buffer size
+    short counter;
+    short sum;
+    short firstVal;
+    short checkBufSizeCtr;
+
+    // Variables used for delay shifts
+    short msInSndCardBuf;
+    short filtDelay;
+    int timeForDelayChange;
+    int ECstartup;
+    int checkBuffSize;
+    int delayChange;
+    short lastDelayDiff;
+
+    WebRtc_Word16 echoMode;
+
+#ifdef AEC_DEBUG
+    FILE *bufFile;
+    FILE *delayFile;
+    FILE *preCompFile;
+    FILE *postCompFile;
+#endif // AEC_DEBUG
+    // Structures
+    void *farendBuf;
+
+    int lastError;
+
+    AecmCore_t *aecmCore;
+} aecmob_t;
+
+// Estimates delay to set the position of the farend buffer read pointer
+// (controlled by knownDelay)
+static int WebRtcAecm_EstBufDelay(aecmob_t *aecmInst, short msInSndCardBuf);
+
+// Stuffs the farend buffer if the estimated delay is too large
+static int WebRtcAecm_DelayComp(aecmob_t *aecmInst);
+
+WebRtc_Word32 WebRtcAecm_Create(void **aecmInst)
+{
+    aecmob_t *aecm;
+    if (aecmInst == NULL)
+    {
+        return -1;
+    }
+
+    aecm = malloc(sizeof(aecmob_t));
+    *aecmInst = aecm;
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (WebRtcAecm_CreateCore(&aecm->aecmCore) == -1)
+    {
+        WebRtcAecm_Free(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    if (WebRtc_CreateBuffer(&aecm->farendBuf, kBufSizeSamp,
+                            sizeof(int16_t)) == -1)
+    {
+        WebRtcAecm_Free(aecm);
+        aecm = NULL;
+        return -1;
+    }
+
+    aecm->initFlag = 0;
+    aecm->lastError = 0;
+
+#ifdef AEC_DEBUG
+    aecm->aecmCore->farFile = fopen("aecFar.pcm","wb");
+    aecm->aecmCore->nearFile = fopen("aecNear.pcm","wb");
+    aecm->aecmCore->outFile = fopen("aecOut.pcm","wb");
+    //aecm->aecmCore->outLpFile = fopen("aecOutLp.pcm","wb");
+
+    aecm->bufFile = fopen("aecBuf.dat", "wb");
+    aecm->delayFile = fopen("aecDelay.dat", "wb");
+    aecm->preCompFile = fopen("preComp.pcm", "wb");
+    aecm->postCompFile = fopen("postComp.pcm", "wb");
+#endif // AEC_DEBUG
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_Free(void *aecmInst)
+{
+    aecmob_t *aecm = aecmInst;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+#ifdef AEC_DEBUG
+    fclose(aecm->aecmCore->farFile);
+    fclose(aecm->aecmCore->nearFile);
+    fclose(aecm->aecmCore->outFile);
+    //fclose(aecm->aecmCore->outLpFile);
+
+    fclose(aecm->bufFile);
+    fclose(aecm->delayFile);
+    fclose(aecm->preCompFile);
+    fclose(aecm->postCompFile);
+#endif // AEC_DEBUG
+    WebRtcAecm_FreeCore(aecm->aecmCore);
+    WebRtc_FreeBuffer(aecm->farendBuf);
+    free(aecm);
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_Init(void *aecmInst, WebRtc_Word32 sampFreq)
+{
+    aecmob_t *aecm = aecmInst;
+    AecmConfig aecConfig;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (sampFreq != 8000 && sampFreq != 16000)
+    {
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecm->sampFreq = sampFreq;
+
+    // Initialize AECM core
+    if (WebRtcAecm_InitCore(aecm->aecmCore, aecm->sampFreq) == -1)
+    {
+        aecm->lastError = AECM_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+    // Initialize farend buffer
+    if (WebRtc_InitBuffer(aecm->farendBuf) == -1)
+    {
+        aecm->lastError = AECM_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+    aecm->initFlag = kInitCheck; // indicates that initialization has been done
+
+    aecm->delayChange = 1;
+
+    aecm->sum = 0;
+    aecm->counter = 0;
+    aecm->checkBuffSize = 1;
+    aecm->firstVal = 0;
+
+    aecm->ECstartup = 1;
+    aecm->bufSizeStart = 0;
+    aecm->checkBufSizeCtr = 0;
+    aecm->filtDelay = 0;
+    aecm->timeForDelayChange = 0;
+    aecm->knownDelay = 0;
+    aecm->lastDelayDiff = 0;
+
+    memset(&aecm->farendOld[0][0], 0, 160);
+
+    // Default settings.
+    aecConfig.cngMode = AecmTrue;
+    aecConfig.echoMode = 3;
+
+    if (WebRtcAecm_set_config(aecm, aecConfig) == -1)
+    {
+        aecm->lastError = AECM_UNSPECIFIED_ERROR;
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_BufferFarend(void *aecmInst, const WebRtc_Word16 *farend,
+                                      WebRtc_Word16 nrOfSamples)
+{
+    aecmob_t *aecm = aecmInst;
+    WebRtc_Word32 retVal = 0;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (farend == NULL)
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    if (nrOfSamples != 80 && nrOfSamples != 160)
+    {
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+
+    // TODO: Is this really a good idea?
+    if (!aecm->ECstartup)
+    {
+        WebRtcAecm_DelayComp(aecm);
+    }
+
+    WebRtc_WriteBuffer(aecm->farendBuf, farend, (size_t) nrOfSamples);
+
+    return retVal;
+}
+
+WebRtc_Word32 WebRtcAecm_Process(void *aecmInst, const WebRtc_Word16 *nearendNoisy,
+                                 const WebRtc_Word16 *nearendClean, WebRtc_Word16 *out,
+                                 WebRtc_Word16 nrOfSamples, WebRtc_Word16 msInSndCardBuf)
+{
+    aecmob_t *aecm = aecmInst;
+    WebRtc_Word32 retVal = 0;
+    short i;
+    short nmbrOfFilledBuffers;
+    short nBlocks10ms;
+    short nFrames;
+#ifdef AEC_DEBUG
+    short msInAECBuf;
+#endif
+
+#ifdef ARM_WINM_LOG
+    __int64 freq, start, end, diff;
+    unsigned int milliseconds;
+    DWORD temp;
+#elif defined MAC_IPHONE_PRINT
+    //       double endtime = 0, starttime = 0;
+    struct timeval starttime;
+    struct timeval endtime;
+    static long int timeused = 0;
+    static int timecount = 0;
+#endif
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (nearendNoisy == NULL)
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (out == NULL)
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    if (nrOfSamples != 80 && nrOfSamples != 160)
+    {
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+
+    if (msInSndCardBuf < 0)
+    {
+        msInSndCardBuf = 0;
+        aecm->lastError = AECM_BAD_PARAMETER_WARNING;
+        retVal = -1;
+    } else if (msInSndCardBuf > 500)
+    {
+        msInSndCardBuf = 500;
+        aecm->lastError = AECM_BAD_PARAMETER_WARNING;
+        retVal = -1;
+    }
+    msInSndCardBuf += 10;
+    aecm->msInSndCardBuf = msInSndCardBuf;
+
+    nFrames = nrOfSamples / FRAME_LEN;
+    nBlocks10ms = nFrames / aecm->aecmCore->mult;
+
+    if (aecm->ECstartup)
+    {
+        if (nearendClean == NULL)
+        {
+            if (out != nearendNoisy)
+            {
+                memcpy(out, nearendNoisy, sizeof(short) * nrOfSamples);
+            }
+        } else if (out != nearendClean)
+        {
+            memcpy(out, nearendClean, sizeof(short) * nrOfSamples);
+        }
+
+        nmbrOfFilledBuffers =
+            (short) WebRtc_available_read(aecm->farendBuf) / FRAME_LEN;
+        // The AECM is in the start up mode
+        // AECM is disabled until the soundcard buffer and farend buffers are OK
+
+        // Mechanism to ensure that the soundcard buffer is reasonably stable.
+        if (aecm->checkBuffSize)
+        {
+            aecm->checkBufSizeCtr++;
+            // Before we fill up the far end buffer we require the amount of data on the
+            // sound card to be stable (+/-8 ms) compared to the first value. This
+            // comparison is made during the following 4 consecutive frames. If it seems
+            // to be stable then we start to fill up the far end buffer.
+
+            if (aecm->counter == 0)
+            {
+                aecm->firstVal = aecm->msInSndCardBuf;
+                aecm->sum = 0;
+            }
+
+            if (abs(aecm->firstVal - aecm->msInSndCardBuf)
+                    < WEBRTC_SPL_MAX(0.2 * aecm->msInSndCardBuf, kSampMsNb))
+            {
+                aecm->sum += aecm->msInSndCardBuf;
+                aecm->counter++;
+            } else
+            {
+                aecm->counter = 0;
+            }
+
+            if (aecm->counter * nBlocks10ms >= 6)
+            {
+                // The farend buffer size is determined in blocks of 80 samples
+                // Use 75% of the average value of the soundcard buffer
+                aecm->bufSizeStart
+                        = WEBRTC_SPL_MIN((3 * aecm->sum
+                                        * aecm->aecmCore->mult) / (aecm->counter * 40), BUF_SIZE_FRAMES);
+                // buffersize has now been determined
+                aecm->checkBuffSize = 0;
+            }
+
+            if (aecm->checkBufSizeCtr * nBlocks10ms > 50)
+            {
+                // for really bad sound cards, don't disable echocanceller for more than 0.5 sec
+                aecm->bufSizeStart = WEBRTC_SPL_MIN((3 * aecm->msInSndCardBuf
+                                * aecm->aecmCore->mult) / 40, BUF_SIZE_FRAMES);
+                aecm->checkBuffSize = 0;
+            }
+        }
+
+        // if checkBuffSize changed in the if-statement above
+        if (!aecm->checkBuffSize)
+        {
+            // soundcard buffer is now reasonably stable
+            // When the far end buffer is filled with approximately the same amount of
+            // data as the amount on the sound card we end the start up phase and start
+            // to cancel echoes.
+
+            if (nmbrOfFilledBuffers == aecm->bufSizeStart)
+            {
+                aecm->ECstartup = 0; // Enable the AECM
+            } else if (nmbrOfFilledBuffers > aecm->bufSizeStart)
+            {
+                WebRtc_MoveReadPtr(aecm->farendBuf,
+                                   (int) WebRtc_available_read(aecm->farendBuf)
+                                   - (int) aecm->bufSizeStart * FRAME_LEN);
+                aecm->ECstartup = 0;
+            }
+        }
+
+    } else
+    {
+        // AECM is enabled
+
+        // Note only 1 block supported for nb and 2 blocks for wb
+        for (i = 0; i < nFrames; i++)
+        {
+            int16_t farend[FRAME_LEN];
+            const int16_t* farend_ptr = NULL;
+
+            nmbrOfFilledBuffers =
+                (short) WebRtc_available_read(aecm->farendBuf) / FRAME_LEN;
+
+            // Check that there is data in the far end buffer
+            if (nmbrOfFilledBuffers > 0)
+            {
+                // Get the next 80 samples from the farend buffer
+                WebRtc_ReadBuffer(aecm->farendBuf, (void**) &farend_ptr, farend,
+                                  FRAME_LEN);
+
+                // Always store the last frame for use when we run out of data
+                memcpy(&(aecm->farendOld[i][0]), farend_ptr,
+                       FRAME_LEN * sizeof(short));
+            } else
+            {
+                // We have no data so we use the last played frame
+                memcpy(farend, &(aecm->farendOld[i][0]), FRAME_LEN * sizeof(short));
+                farend_ptr = farend;
+            }
+
+            // Call buffer delay estimator when all data is extracted,
+            // i,e. i = 0 for NB and i = 1 for WB
+            if ((i == 0 && aecm->sampFreq == 8000) || (i == 1 && aecm->sampFreq == 16000))
+            {
+                WebRtcAecm_EstBufDelay(aecm, aecm->msInSndCardBuf);
+            }
+
+#ifdef ARM_WINM_LOG
+            // measure tick start
+            QueryPerformanceFrequency((LARGE_INTEGER*)&freq);
+            QueryPerformanceCounter((LARGE_INTEGER*)&start);
+#elif defined MAC_IPHONE_PRINT
+            //            starttime = clock()/(double)CLOCKS_PER_SEC;
+            gettimeofday(&starttime, NULL);
+#endif
+            // Call the AECM
+            /*WebRtcAecm_ProcessFrame(aecm->aecmCore, farend, &nearend[FRAME_LEN * i],
+             &out[FRAME_LEN * i], aecm->knownDelay);*/
+            if (nearendClean == NULL)
+            {
+                if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
+                                            farend_ptr,
+                                            &nearendNoisy[FRAME_LEN * i],
+                                            NULL,
+                                            &out[FRAME_LEN * i]) == -1)
+                {
+                    return -1;
+                }
+            } else
+            {
+                if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
+                                            farend_ptr,
+                                            &nearendNoisy[FRAME_LEN * i],
+                                            &nearendClean[FRAME_LEN * i],
+                                            &out[FRAME_LEN * i]) == -1)
+                {
+                    return -1;
+                }
+            }
+
+#ifdef ARM_WINM_LOG
+
+            // measure tick end
+            QueryPerformanceCounter((LARGE_INTEGER*)&end);
+
+            if(end > start)
+            {
+                diff = ((end - start) * 1000) / (freq/1000);
+                milliseconds = (unsigned int)(diff & 0xffffffff);
+                WriteFile (logFile, &milliseconds, sizeof(unsigned int), &temp, NULL);
+            }
+#elif defined MAC_IPHONE_PRINT
+            //            endtime = clock()/(double)CLOCKS_PER_SEC;
+            //            printf("%f\n", endtime - starttime);
+
+            gettimeofday(&endtime, NULL);
+
+            if( endtime.tv_usec > starttime.tv_usec)
+            {
+                timeused += endtime.tv_usec - starttime.tv_usec;
+            } else
+            {
+                timeused += endtime.tv_usec + 1000000 - starttime.tv_usec;
+            }
+
+            if(++timecount == 1000)
+            {
+                timecount = 0;
+                printf("AEC: %ld\n", timeused);
+                timeused = 0;
+            }
+#endif
+
+        }
+    }
+
+#ifdef AEC_DEBUG
+    msInAECBuf = (short) WebRtc_available_read(aecm->farendBuf) /
+        (kSampMsNb * aecm->aecmCore->mult);
+    fwrite(&msInAECBuf, 2, 1, aecm->bufFile);
+    fwrite(&(aecm->knownDelay), sizeof(aecm->knownDelay), 1, aecm->delayFile);
+#endif
+
+    return retVal;
+}
+
+WebRtc_Word32 WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
+{
+    aecmob_t *aecm = aecmInst;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    if (config.cngMode != AecmFalse && config.cngMode != AecmTrue)
+    {
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecm->aecmCore->cngMode = config.cngMode;
+
+    if (config.echoMode < 0 || config.echoMode > 4)
+    {
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    aecm->echoMode = config.echoMode;
+
+    if (aecm->echoMode == 0)
+    {
+        aecm->aecmCore->supGain = SUPGAIN_DEFAULT >> 3;
+        aecm->aecmCore->supGainOld = SUPGAIN_DEFAULT >> 3;
+        aecm->aecmCore->supGainErrParamA = SUPGAIN_ERROR_PARAM_A >> 3;
+        aecm->aecmCore->supGainErrParamD = SUPGAIN_ERROR_PARAM_D >> 3;
+        aecm->aecmCore->supGainErrParamDiffAB = (SUPGAIN_ERROR_PARAM_A >> 3)
+                - (SUPGAIN_ERROR_PARAM_B >> 3);
+        aecm->aecmCore->supGainErrParamDiffBD = (SUPGAIN_ERROR_PARAM_B >> 3)
+                - (SUPGAIN_ERROR_PARAM_D >> 3);
+    } else if (aecm->echoMode == 1)
+    {
+        aecm->aecmCore->supGain = SUPGAIN_DEFAULT >> 2;
+        aecm->aecmCore->supGainOld = SUPGAIN_DEFAULT >> 2;
+        aecm->aecmCore->supGainErrParamA = SUPGAIN_ERROR_PARAM_A >> 2;
+        aecm->aecmCore->supGainErrParamD = SUPGAIN_ERROR_PARAM_D >> 2;
+        aecm->aecmCore->supGainErrParamDiffAB = (SUPGAIN_ERROR_PARAM_A >> 2)
+                - (SUPGAIN_ERROR_PARAM_B >> 2);
+        aecm->aecmCore->supGainErrParamDiffBD = (SUPGAIN_ERROR_PARAM_B >> 2)
+                - (SUPGAIN_ERROR_PARAM_D >> 2);
+    } else if (aecm->echoMode == 2)
+    {
+        aecm->aecmCore->supGain = SUPGAIN_DEFAULT >> 1;
+        aecm->aecmCore->supGainOld = SUPGAIN_DEFAULT >> 1;
+        aecm->aecmCore->supGainErrParamA = SUPGAIN_ERROR_PARAM_A >> 1;
+        aecm->aecmCore->supGainErrParamD = SUPGAIN_ERROR_PARAM_D >> 1;
+        aecm->aecmCore->supGainErrParamDiffAB = (SUPGAIN_ERROR_PARAM_A >> 1)
+                - (SUPGAIN_ERROR_PARAM_B >> 1);
+        aecm->aecmCore->supGainErrParamDiffBD = (SUPGAIN_ERROR_PARAM_B >> 1)
+                - (SUPGAIN_ERROR_PARAM_D >> 1);
+    } else if (aecm->echoMode == 3)
+    {
+        aecm->aecmCore->supGain = SUPGAIN_DEFAULT;
+        aecm->aecmCore->supGainOld = SUPGAIN_DEFAULT;
+        aecm->aecmCore->supGainErrParamA = SUPGAIN_ERROR_PARAM_A;
+        aecm->aecmCore->supGainErrParamD = SUPGAIN_ERROR_PARAM_D;
+        aecm->aecmCore->supGainErrParamDiffAB = SUPGAIN_ERROR_PARAM_A - SUPGAIN_ERROR_PARAM_B;
+        aecm->aecmCore->supGainErrParamDiffBD = SUPGAIN_ERROR_PARAM_B - SUPGAIN_ERROR_PARAM_D;
+    } else if (aecm->echoMode == 4)
+    {
+        aecm->aecmCore->supGain = SUPGAIN_DEFAULT << 1;
+        aecm->aecmCore->supGainOld = SUPGAIN_DEFAULT << 1;
+        aecm->aecmCore->supGainErrParamA = SUPGAIN_ERROR_PARAM_A << 1;
+        aecm->aecmCore->supGainErrParamD = SUPGAIN_ERROR_PARAM_D << 1;
+        aecm->aecmCore->supGainErrParamDiffAB = (SUPGAIN_ERROR_PARAM_A << 1)
+                - (SUPGAIN_ERROR_PARAM_B << 1);
+        aecm->aecmCore->supGainErrParamDiffBD = (SUPGAIN_ERROR_PARAM_B << 1)
+                - (SUPGAIN_ERROR_PARAM_D << 1);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst, AecmConfig *config)
+{
+    aecmob_t *aecm = aecmInst;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    if (config == NULL)
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    config->cngMode = aecm->aecmCore->cngMode;
+    config->echoMode = aecm->echoMode;
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
+                                      const void* echo_path,
+                                      size_t size_bytes)
+{
+    aecmob_t *aecm = aecmInst;
+    const WebRtc_Word16* echo_path_ptr = echo_path;
+
+    if ((aecm == NULL) || (echo_path == NULL))
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+    if (size_bytes != WebRtcAecm_echo_path_size_bytes())
+    {
+        // Input channel size does not match the size of AECM
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    WebRtcAecm_InitEchoPathCore(aecm->aecmCore, echo_path_ptr);
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst,
+                                     void* echo_path,
+                                     size_t size_bytes)
+{
+    aecmob_t *aecm = aecmInst;
+    WebRtc_Word16* echo_path_ptr = echo_path;
+
+    if ((aecm == NULL) || (echo_path == NULL))
+    {
+        aecm->lastError = AECM_NULL_POINTER_ERROR;
+        return -1;
+    }
+    if (size_bytes != WebRtcAecm_echo_path_size_bytes())
+    {
+        // Input channel size does not match the size of AECM
+        aecm->lastError = AECM_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    if (aecm->initFlag != kInitCheck)
+    {
+        aecm->lastError = AECM_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    memcpy(echo_path_ptr, aecm->aecmCore->channelStored, size_bytes);
+    return 0;
+}
+
+size_t WebRtcAecm_echo_path_size_bytes()
+{
+    return (PART_LEN1 * sizeof(WebRtc_Word16));
+}
+
+WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst)
+{
+    aecmob_t *aecm = aecmInst;
+
+    if (aecm == NULL)
+    {
+        return -1;
+    }
+
+    return aecm->lastError;
+}
+
+static int WebRtcAecm_EstBufDelay(aecmob_t *aecm, short msInSndCardBuf)
+{
+    short delayNew, nSampSndCard;
+    short nSampFar = (short) WebRtc_available_read(aecm->farendBuf);
+    short diff;
+
+    nSampSndCard = msInSndCardBuf * kSampMsNb * aecm->aecmCore->mult;
+
+    delayNew = nSampSndCard - nSampFar;
+
+    if (delayNew < FRAME_LEN)
+    {
+        WebRtc_MoveReadPtr(aecm->farendBuf, FRAME_LEN);
+        delayNew += FRAME_LEN;
+    }
+
+    aecm->filtDelay = WEBRTC_SPL_MAX(0, (8 * aecm->filtDelay + 2 * delayNew) / 10);
+
+    diff = aecm->filtDelay - aecm->knownDelay;
+    if (diff > 224)
+    {
+        if (aecm->lastDelayDiff < 96)
+        {
+            aecm->timeForDelayChange = 0;
+        } else
+        {
+            aecm->timeForDelayChange++;
+        }
+    } else if (diff < 96 && aecm->knownDelay > 0)
+    {
+        if (aecm->lastDelayDiff > 224)
+        {
+            aecm->timeForDelayChange = 0;
+        } else
+        {
+            aecm->timeForDelayChange++;
+        }
+    } else
+    {
+        aecm->timeForDelayChange = 0;
+    }
+    aecm->lastDelayDiff = diff;
+
+    if (aecm->timeForDelayChange > 25)
+    {
+        aecm->knownDelay = WEBRTC_SPL_MAX((int)aecm->filtDelay - 160, 0);
+    }
+    return 0;
+}
+
+static int WebRtcAecm_DelayComp(aecmob_t *aecm)
+{
+    int nSampFar = (int) WebRtc_available_read(aecm->farendBuf);
+    int nSampSndCard, delayNew, nSampAdd;
+    const int maxStuffSamp = 10 * FRAME_LEN;
+
+    nSampSndCard = aecm->msInSndCardBuf * kSampMsNb * aecm->aecmCore->mult;
+    delayNew = nSampSndCard - nSampFar;
+
+    if (delayNew > FAR_BUF_LEN - FRAME_LEN * aecm->aecmCore->mult)
+    {
+        // The difference of the buffer sizes is larger than the maximum
+        // allowed known delay. Compensate by stuffing the buffer.
+        nSampAdd = (int)(WEBRTC_SPL_MAX(((nSampSndCard >> 1) - nSampFar),
+                FRAME_LEN));
+        nSampAdd = WEBRTC_SPL_MIN(nSampAdd, maxStuffSamp);
+
+        WebRtc_MoveReadPtr(aecm->farendBuf, -nSampAdd);
+        aecm->delayChange = 1; // the delay needs to be updated
+    }
+
+    return 0;
+}
diff --git a/trunk/src/modules/audio_processing/aecm/include/echo_control_mobile.h b/trunk/src/modules/audio_processing/aecm/include/echo_control_mobile.h
new file mode 100644
index 0000000..da0ad86
--- /dev/null
+++ b/trunk/src/modules/audio_processing/aecm/include/echo_control_mobile.h
@@ -0,0 +1,233 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
+
+#include "typedefs.h"
+
+enum {
+    AecmFalse = 0,
+    AecmTrue
+};
+
+// Errors
+#define AECM_UNSPECIFIED_ERROR           12000
+#define AECM_UNSUPPORTED_FUNCTION_ERROR  12001
+#define AECM_UNINITIALIZED_ERROR         12002
+#define AECM_NULL_POINTER_ERROR          12003
+#define AECM_BAD_PARAMETER_ERROR         12004
+
+// Warnings
+#define AECM_BAD_PARAMETER_WARNING       12100
+
+typedef struct {
+    WebRtc_Word16 cngMode;            // AECM_FALSE, AECM_TRUE (default)
+    WebRtc_Word16 echoMode;           // 0, 1, 2, 3 (default), 4
+} AecmConfig;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AECM. The memory needs to be
+ * initialized separately using the WebRtcAecm_Init() function.
+ *
+ * Inputs                           Description
+ * -------------------------------------------------------------------
+ * void **aecmInst                  Pointer to the AECM instance to be
+ *                                  created and initialized
+ *
+ * Outputs                          Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return             0: OK
+ *                                 -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Create(void **aecmInst);
+
+/*
+ * This function releases the memory allocated by WebRtcAecm_Create()
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void *aecmInst               Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Free(void *aecmInst);
+
+/*
+ * Initializes an AECM instance.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst     Pointer to the AECM instance
+ * WebRtc_Word32  sampFreq      Sampling frequency of data
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Init(void* aecmInst,
+                              WebRtc_Word32 sampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst     Pointer to the AECM instance
+ * WebRtc_Word16  *farend       In buffer containing one frame of
+ *                              farend signal
+ * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_BufferFarend(void* aecmInst,
+                                      const WebRtc_Word16* farend,
+                                      WebRtc_Word16 nrOfSamples);
+
+/*
+ * Runs the AECM on an 80 or 160 sample blocks of data.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst      Pointer to the AECM instance
+ * WebRtc_Word16  *nearendNoisy  In buffer containing one frame of
+ *                               reference nearend+echo signal. If
+ *                               noise reduction is active, provide
+ *                               the noisy signal here.
+ * WebRtc_Word16  *nearendClean  In buffer containing one frame of
+ *                               nearend+echo signal. If noise
+ *                               reduction is active, provide the
+ *                               clean signal here. Otherwise pass a
+ *                               NULL pointer.
+ * WebRtc_Word16  nrOfSamples    Number of samples in nearend buffer
+ * WebRtc_Word16  msInSndCardBuf Delay estimate for sound card and
+ *                               system buffers
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Process(void* aecmInst,
+                                 const WebRtc_Word16* nearendNoisy,
+                                 const WebRtc_Word16* nearendClean,
+                                 WebRtc_Word16* out,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 msInSndCardBuf);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void     *aecmInst           Pointer to the AECM instance
+ * AecmConfig config            Config instance that contains all
+ *                              properties to be set
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_set_config(void* aecmInst,
+                                    AecmConfig config);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void *aecmInst               Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecmConfig  *config          Pointer to the config instance that
+ *                              all properties will be written to
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst,
+                                    AecmConfig *config);
+
+/*
+ * This function enables the user to set the echo path on-the-fly.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*        aecmInst        Pointer to the AECM instance
+ * void*        echo_path       Pointer to the echo path to be set
+ * size_t       size_bytes      Size in bytes of the echo path
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
+                                      const void* echo_path,
+                                      size_t size_bytes);
+
+/*
+ * This function enables the user to get the currently used echo path
+ * on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*        aecmInst        Pointer to the AECM instance
+ * void*        echo_path       Pointer to echo path
+ * size_t       size_bytes      Size in bytes of the echo path
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst,
+                                     void* echo_path,
+                                     size_t size_bytes);
+
+/*
+ * This function enables the user to get the echo path size in bytes
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * size_t       return           : size in bytes
+ */
+size_t WebRtcAecm_echo_path_size_bytes();
+
+/*
+ * Gets the last error code.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void         *aecmInst       Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        11000-11100: error code
+ */
+WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst);
+
+#ifdef __cplusplus
+}
+#endif
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
diff --git a/trunk/src/modules/audio_processing/agc/Android.mk b/trunk/src/modules/audio_processing/agc/Android.mk
new file mode 100644
index 0000000..5eceb14
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/Android.mk
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_agc
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    analog_agc.c \
+    digital_agc.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_processing/agc/agc.gypi b/trunk/src/modules/audio_processing/agc/agc.gypi
new file mode 100644
index 0000000..f96f237
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/agc.gypi
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'agc',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/gain_control.h',
+        'analog_agc.c',
+        'analog_agc.h',
+        'digital_agc.c',
+        'digital_agc.h',
+      ],
+    },
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/agc/analog_agc.c b/trunk/src/modules/audio_processing/agc/analog_agc.c
new file mode 100644
index 0000000..558a6cb
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/analog_agc.c
@@ -0,0 +1,1694 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* analog_agc.c
+ *
+ * Using a feedback system, determines an appropriate analog volume level
+ * given an input signal and current volume level. Targets a conservative
+ * signal level and is intended for use with a digital AGC to apply
+ * additional gain.
+ *
+ */
+
+#include <assert.h>
+#include <stdlib.h>
+#ifdef AGC_DEBUG //test log
+#include <stdio.h>
+#endif
+#include "analog_agc.h"
+
+/* The slope of in Q13*/
+static const WebRtc_Word16 kSlope1[8] = {21793, 12517, 7189, 4129, 2372, 1362, 472, 78};
+
+/* The offset in Q14 */
+static const WebRtc_Word16 kOffset1[8] = {25395, 23911, 22206, 20737, 19612, 18805, 17951,
+        17367};
+
+/* The slope of in Q13*/
+static const WebRtc_Word16 kSlope2[8] = {2063, 1731, 1452, 1218, 1021, 857, 597, 337};
+
+/* The offset in Q14 */
+static const WebRtc_Word16 kOffset2[8] = {18432, 18379, 18290, 18177, 18052, 17920, 17670,
+        17286};
+
+static const WebRtc_Word16 kMuteGuardTimeMs = 8000;
+static const WebRtc_Word16 kInitCheck = 42;
+
+/* Default settings if config is not used */
+#define AGC_DEFAULT_TARGET_LEVEL 3
+#define AGC_DEFAULT_COMP_GAIN 9
+/* This is the target level for the analog part in ENV scale. To convert to RMS scale you
+ * have to add OFFSET_ENV_TO_RMS.
+ */
+#define ANALOG_TARGET_LEVEL 11
+#define ANALOG_TARGET_LEVEL_2 5 // ANALOG_TARGET_LEVEL / 2
+/* Offset between RMS scale (analog part) and ENV scale (digital part). This value actually
+ * varies with the FIXED_ANALOG_TARGET_LEVEL, hence we should in the future replace it with
+ * a table.
+ */
+#define OFFSET_ENV_TO_RMS 9
+/* The reference input level at which the digital part gives an output of targetLevelDbfs
+ * (desired level) if we have no compression gain. This level should be set high enough not
+ * to compress the peaks due to the dynamics.
+ */
+#define DIGITAL_REF_AT_0_COMP_GAIN 4
+/* Speed of reference level decrease.
+ */
+#define DIFF_REF_TO_ANALOG 5
+
+#ifdef MIC_LEVEL_FEEDBACK
+#define NUM_BLOCKS_IN_SAT_BEFORE_CHANGE_TARGET 7
+#endif
+/* Size of analog gain table */
+#define GAIN_TBL_LEN 32
+/* Matlab code:
+ * fprintf(1, '\t%i, %i, %i, %i,\n', round(10.^(linspace(0,10,32)/20) * 2^12));
+ */
+/* Q12 */
+static const WebRtc_UWord16 kGainTableAnalog[GAIN_TBL_LEN] = {4096, 4251, 4412, 4579, 4752,
+        4932, 5118, 5312, 5513, 5722, 5938, 6163, 6396, 6638, 6889, 7150, 7420, 7701, 7992,
+        8295, 8609, 8934, 9273, 9623, 9987, 10365, 10758, 11165, 11587, 12025, 12480, 12953};
+
+/* Gain/Suppression tables for virtual Mic (in Q10) */
+static const WebRtc_UWord16 kGainTableVirtualMic[128] = {1052, 1081, 1110, 1141, 1172, 1204,
+        1237, 1271, 1305, 1341, 1378, 1416, 1454, 1494, 1535, 1577, 1620, 1664, 1710, 1757,
+        1805, 1854, 1905, 1957, 2010, 2065, 2122, 2180, 2239, 2301, 2364, 2428, 2495, 2563,
+        2633, 2705, 2779, 2855, 2933, 3013, 3096, 3180, 3267, 3357, 3449, 3543, 3640, 3739,
+        3842, 3947, 4055, 4166, 4280, 4397, 4517, 4640, 4767, 4898, 5032, 5169, 5311, 5456,
+        5605, 5758, 5916, 6078, 6244, 6415, 6590, 6770, 6956, 7146, 7341, 7542, 7748, 7960,
+        8178, 8402, 8631, 8867, 9110, 9359, 9615, 9878, 10148, 10426, 10711, 11004, 11305,
+        11614, 11932, 12258, 12593, 12938, 13292, 13655, 14029, 14412, 14807, 15212, 15628,
+        16055, 16494, 16945, 17409, 17885, 18374, 18877, 19393, 19923, 20468, 21028, 21603,
+        22194, 22801, 23425, 24065, 24724, 25400, 26095, 26808, 27541, 28295, 29069, 29864,
+        30681, 31520, 32382};
+static const WebRtc_UWord16 kSuppressionTableVirtualMic[128] = {1024, 1006, 988, 970, 952,
+        935, 918, 902, 886, 870, 854, 839, 824, 809, 794, 780, 766, 752, 739, 726, 713, 700,
+        687, 675, 663, 651, 639, 628, 616, 605, 594, 584, 573, 563, 553, 543, 533, 524, 514,
+        505, 496, 487, 478, 470, 461, 453, 445, 437, 429, 421, 414, 406, 399, 392, 385, 378,
+        371, 364, 358, 351, 345, 339, 333, 327, 321, 315, 309, 304, 298, 293, 288, 283, 278,
+        273, 268, 263, 258, 254, 249, 244, 240, 236, 232, 227, 223, 219, 215, 211, 208, 204,
+        200, 197, 193, 190, 186, 183, 180, 176, 173, 170, 167, 164, 161, 158, 155, 153, 150,
+        147, 145, 142, 139, 137, 134, 132, 130, 127, 125, 123, 121, 118, 116, 114, 112, 110,
+        108, 106, 104, 102};
+
+/* Table for target energy levels. Values in Q(-7)
+ * Matlab code
+ * targetLevelTable = fprintf('%d,\t%d,\t%d,\t%d,\n', round((32767*10.^(-(0:63)'/20)).^2*16/2^7) */
+
+static const WebRtc_Word32 kTargetLevelTable[64] = {134209536, 106606424, 84680493, 67264106,
+        53429779, 42440782, 33711911, 26778323, 21270778, 16895980, 13420954, 10660642,
+        8468049, 6726411, 5342978, 4244078, 3371191, 2677832, 2127078, 1689598, 1342095,
+        1066064, 846805, 672641, 534298, 424408, 337119, 267783, 212708, 168960, 134210,
+        106606, 84680, 67264, 53430, 42441, 33712, 26778, 21271, 16896, 13421, 10661, 8468,
+        6726, 5343, 4244, 3371, 2678, 2127, 1690, 1342, 1066, 847, 673, 534, 424, 337, 268,
+        213, 169, 134, 107, 85, 67};
+
+int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H,
+                     WebRtc_Word16 samples)
+{
+    WebRtc_Word32 nrg, max_nrg, sample, tmp32;
+    WebRtc_Word32 *ptr;
+    WebRtc_UWord16 targetGainIdx, gain;
+    WebRtc_Word16 i, n, L, M, subFrames, tmp16, tmp_speech[16];
+    Agc_t *stt;
+    stt = (Agc_t *)state;
+
+    //default/initial values corresponding to 10ms for wb and swb
+    M = 10;
+    L = 16;
+    subFrames = 160;
+
+    if (stt->fs == 8000)
+    {
+        if (samples == 80)
+        {
+            subFrames = 80;
+            M = 10;
+            L = 8;
+        } else if (samples == 160)
+        {
+            subFrames = 80;
+            M = 20;
+            L = 8;
+        } else
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->add_mic, frame %d: Invalid number of samples\n\n",
+                    (stt->fcount + 1));
+#endif
+            return -1;
+        }
+    } else if (stt->fs == 16000)
+    {
+        if (samples == 160)
+        {
+            subFrames = 160;
+            M = 10;
+            L = 16;
+        } else if (samples == 320)
+        {
+            subFrames = 160;
+            M = 20;
+            L = 16;
+        } else
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->add_mic, frame %d: Invalid number of samples\n\n",
+                    (stt->fcount + 1));
+#endif
+            return -1;
+        }
+    } else if (stt->fs == 32000)
+    {
+        /* SWB is processed as 160 sample for L and H bands */
+        if (samples == 160)
+        {
+            subFrames = 160;
+            M = 10;
+            L = 16;
+        } else
+        {
+#ifdef AGC_DEBUG
+            fprintf(stt->fpt,
+                    "AGC->add_mic, frame %d: Invalid sample rate\n\n",
+                    (stt->fcount + 1));
+#endif
+            return -1;
+        }
+    }
+
+    /* Check for valid pointers based on sampling rate */
+    if ((stt->fs == 32000) && (in_mic_H == NULL))
+    {
+        return -1;
+    }
+    /* Check for valid pointer for low band */
+    if (in_mic == NULL)
+    {
+        return -1;
+    }
+
+    /* apply slowly varying digital gain */
+    if (stt->micVol > stt->maxAnalog)
+    {
+        /* |maxLevel| is strictly >= |micVol|, so this condition should be
+         * satisfied here, ensuring there is no divide-by-zero. */
+        assert(stt->maxLevel > stt->maxAnalog);
+
+        /* Q1 */
+        tmp16 = (WebRtc_Word16)(stt->micVol - stt->maxAnalog);
+        tmp32 = WEBRTC_SPL_MUL_16_16(GAIN_TBL_LEN - 1, tmp16);
+        tmp16 = (WebRtc_Word16)(stt->maxLevel - stt->maxAnalog);
+        targetGainIdx = (WebRtc_UWord16)WEBRTC_SPL_DIV(tmp32, tmp16);
+        assert(targetGainIdx < GAIN_TBL_LEN);
+
+        /* Increment through the table towards the target gain.
+         * If micVol drops below maxAnalog, we allow the gain
+         * to be dropped immediately. */
+        if (stt->gainTableIdx < targetGainIdx)
+        {
+            stt->gainTableIdx++;
+        } else if (stt->gainTableIdx > targetGainIdx)
+        {
+            stt->gainTableIdx--;
+        }
+
+        /* Q12 */
+        gain = kGainTableAnalog[stt->gainTableIdx];
+
+        for (i = 0; i < samples; i++)
+        {
+            // For lower band
+            tmp32 = WEBRTC_SPL_MUL_16_U16(in_mic[i], gain);
+            sample = WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
+            if (sample > 32767)
+            {
+                in_mic[i] = 32767;
+            } else if (sample < -32768)
+            {
+                in_mic[i] = -32768;
+            } else
+            {
+                in_mic[i] = (WebRtc_Word16)sample;
+            }
+
+            // For higher band
+            if (stt->fs == 32000)
+            {
+                tmp32 = WEBRTC_SPL_MUL_16_U16(in_mic_H[i], gain);
+                sample = WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
+                if (sample > 32767)
+                {
+                    in_mic_H[i] = 32767;
+                } else if (sample < -32768)
+                {
+                    in_mic_H[i] = -32768;
+                } else
+                {
+                    in_mic_H[i] = (WebRtc_Word16)sample;
+                }
+            }
+        }
+    } else
+    {
+        stt->gainTableIdx = 0;
+    }
+
+    /* compute envelope */
+    if ((M == 10) && (stt->inQueue > 0))
+    {
+        ptr = stt->env[1];
+    } else
+    {
+        ptr = stt->env[0];
+    }
+
+    for (i = 0; i < M; i++)
+    {
+        /* iterate over samples */
+        max_nrg = 0;
+        for (n = 0; n < L; n++)
+        {
+            nrg = WEBRTC_SPL_MUL_16_16(in_mic[i * L + n], in_mic[i * L + n]);
+            if (nrg > max_nrg)
+            {
+                max_nrg = nrg;
+            }
+        }
+        ptr[i] = max_nrg;
+    }
+
+    /* compute energy */
+    if ((M == 10) && (stt->inQueue > 0))
+    {
+        ptr = stt->Rxx16w32_array[1];
+    } else
+    {
+        ptr = stt->Rxx16w32_array[0];
+    }
+
+    for (i = 0; i < WEBRTC_SPL_RSHIFT_W16(M, 1); i++)
+    {
+        if (stt->fs == 16000)
+        {
+            WebRtcSpl_DownsampleBy2(&in_mic[i * 32], 32, tmp_speech, stt->filterState);
+        } else
+        {
+            memcpy(tmp_speech, &in_mic[i * 16], 16 * sizeof(short));
+        }
+        /* Compute energy in blocks of 16 samples */
+        ptr[i] = WebRtcSpl_DotProductWithScale(tmp_speech, tmp_speech, 16, 4);
+    }
+
+    /* update queue information */
+    if ((stt->inQueue == 0) && (M == 10))
+    {
+        stt->inQueue = 1;
+    } else
+    {
+        stt->inQueue = 2;
+    }
+
+    /* call VAD (use low band only) */
+    for (i = 0; i < samples; i += subFrames)
+    {
+        WebRtcAgc_ProcessVad(&stt->vadMic, &in_mic[i], subFrames);
+    }
+
+    return 0;
+}
+
+int WebRtcAgc_AddFarend(void *state, const WebRtc_Word16 *in_far, WebRtc_Word16 samples)
+{
+    WebRtc_Word32 errHandle = 0;
+    WebRtc_Word16 i, subFrames;
+    Agc_t *stt;
+    stt = (Agc_t *)state;
+
+    if (stt == NULL)
+    {
+        return -1;
+    }
+
+    if (stt->fs == 8000)
+    {
+        if ((samples != 80) && (samples != 160))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
+                    stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 80;
+    } else if (stt->fs == 16000)
+    {
+        if ((samples != 160) && (samples != 320))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
+                    stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 160;
+    } else if (stt->fs == 32000)
+    {
+        if ((samples != 160) && (samples != 320))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
+                    stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 160;
+    } else
+    {
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt,
+                "AGC->add_far_end, frame %d: Invalid sample rate\n\n",
+                stt->fcount + 1);
+#endif
+        return -1;
+    }
+
+    for (i = 0; i < samples; i += subFrames)
+    {
+        errHandle += WebRtcAgc_AddFarendToDigital(&stt->digitalAgc, &in_far[i], subFrames);
+    }
+
+    return errHandle;
+}
+
+int WebRtcAgc_VirtualMic(void *agcInst, WebRtc_Word16 *in_near, WebRtc_Word16 *in_near_H,
+                         WebRtc_Word16 samples, WebRtc_Word32 micLevelIn,
+                         WebRtc_Word32 *micLevelOut)
+{
+    WebRtc_Word32 tmpFlt, micLevelTmp, gainIdx;
+    WebRtc_UWord16 gain;
+    WebRtc_Word16 ii;
+    Agc_t *stt;
+
+    WebRtc_UWord32 nrg;
+    WebRtc_Word16 sampleCntr;
+    WebRtc_UWord32 frameNrg = 0;
+    WebRtc_UWord32 frameNrgLimit = 5500;
+    WebRtc_Word16 numZeroCrossing = 0;
+    const WebRtc_Word16 kZeroCrossingLowLim = 15;
+    const WebRtc_Word16 kZeroCrossingHighLim = 20;
+
+    stt = (Agc_t *)agcInst;
+
+    /*
+     *  Before applying gain decide if this is a low-level signal.
+     *  The idea is that digital AGC will not adapt to low-level
+     *  signals.
+     */
+    if (stt->fs != 8000)
+    {
+        frameNrgLimit = frameNrgLimit << 1;
+    }
+
+    frameNrg = WEBRTC_SPL_MUL_16_16(in_near[0], in_near[0]);
+    for (sampleCntr = 1; sampleCntr < samples; sampleCntr++)
+    {
+
+        // increment frame energy if it is less than the limit
+        // the correct value of the energy is not important
+        if (frameNrg < frameNrgLimit)
+        {
+            nrg = WEBRTC_SPL_MUL_16_16(in_near[sampleCntr], in_near[sampleCntr]);
+            frameNrg += nrg;
+        }
+
+        // Count the zero crossings
+        numZeroCrossing += ((in_near[sampleCntr] ^ in_near[sampleCntr - 1]) < 0);
+    }
+
+    if ((frameNrg < 500) || (numZeroCrossing <= 5))
+    {
+        stt->lowLevelSignal = 1;
+    } else if (numZeroCrossing <= kZeroCrossingLowLim)
+    {
+        stt->lowLevelSignal = 0;
+    } else if (frameNrg <= frameNrgLimit)
+    {
+        stt->lowLevelSignal = 1;
+    } else if (numZeroCrossing >= kZeroCrossingHighLim)
+    {
+        stt->lowLevelSignal = 1;
+    } else
+    {
+        stt->lowLevelSignal = 0;
+    }
+
+    micLevelTmp = WEBRTC_SPL_LSHIFT_W32(micLevelIn, stt->scale);
+    /* Set desired level */
+    gainIdx = stt->micVol;
+    if (stt->micVol > stt->maxAnalog)
+    {
+        gainIdx = stt->maxAnalog;
+    }
+    if (micLevelTmp != stt->micRef)
+    {
+        /* Something has happened with the physical level, restart. */
+        stt->micRef = micLevelTmp;
+        stt->micVol = 127;
+        *micLevelOut = 127;
+        stt->micGainIdx = 127;
+        gainIdx = 127;
+    }
+    /* Pre-process the signal to emulate the microphone level. */
+    /* Take one step at a time in the gain table. */
+    if (gainIdx > 127)
+    {
+        gain = kGainTableVirtualMic[gainIdx - 128];
+    } else
+    {
+        gain = kSuppressionTableVirtualMic[127 - gainIdx];
+    }
+    for (ii = 0; ii < samples; ii++)
+    {
+        tmpFlt = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_U16(in_near[ii], gain), 10);
+        if (tmpFlt > 32767)
+        {
+            tmpFlt = 32767;
+            gainIdx--;
+            if (gainIdx >= 127)
+            {
+                gain = kGainTableVirtualMic[gainIdx - 127];
+            } else
+            {
+                gain = kSuppressionTableVirtualMic[127 - gainIdx];
+            }
+        }
+        if (tmpFlt < -32768)
+        {
+            tmpFlt = -32768;
+            gainIdx--;
+            if (gainIdx >= 127)
+            {
+                gain = kGainTableVirtualMic[gainIdx - 127];
+            } else
+            {
+                gain = kSuppressionTableVirtualMic[127 - gainIdx];
+            }
+        }
+        in_near[ii] = (WebRtc_Word16)tmpFlt;
+        if (stt->fs == 32000)
+        {
+            tmpFlt = WEBRTC_SPL_MUL_16_U16(in_near_H[ii], gain);
+            tmpFlt = WEBRTC_SPL_RSHIFT_W32(tmpFlt, 10);
+            if (tmpFlt > 32767)
+            {
+                tmpFlt = 32767;
+            }
+            if (tmpFlt < -32768)
+            {
+                tmpFlt = -32768;
+            }
+            in_near_H[ii] = (WebRtc_Word16)tmpFlt;
+        }
+    }
+    /* Set the level we (finally) used */
+    stt->micGainIdx = gainIdx;
+//    *micLevelOut = stt->micGainIdx;
+    *micLevelOut = WEBRTC_SPL_RSHIFT_W32(stt->micGainIdx, stt->scale);
+    /* Add to Mic as if it was the output from a true microphone */
+    if (WebRtcAgc_AddMic(agcInst, in_near, in_near_H, samples) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
+{
+
+    WebRtc_Word16 tmp16;
+#ifdef MIC_LEVEL_FEEDBACK
+    int zeros;
+
+    if (stt->micLvlSat)
+    {
+        /* Lower the analog target level since we have reached its maximum */
+        zeros = WebRtcSpl_NormW32(stt->Rxx160_LPw32);
+        stt->targetIdxOffset = WEBRTC_SPL_RSHIFT_W16((3 * zeros) - stt->targetIdx - 2, 2);
+    }
+#endif
+
+    /* Set analog target level in envelope dBOv scale */
+    tmp16 = (DIFF_REF_TO_ANALOG * stt->compressionGaindB) + ANALOG_TARGET_LEVEL_2;
+    tmp16 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32)tmp16, ANALOG_TARGET_LEVEL);
+    stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN + tmp16;
+    if (stt->analogTarget < DIGITAL_REF_AT_0_COMP_GAIN)
+    {
+        stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN;
+    }
+    if (stt->agcMode == kAgcModeFixedDigital)
+    {
+        /* Adjust for different parameter interpretation in FixedDigital mode */
+        stt->analogTarget = stt->compressionGaindB;
+    }
+#ifdef MIC_LEVEL_FEEDBACK
+    stt->analogTarget += stt->targetIdxOffset;
+#endif
+    /* Since the offset between RMS and ENV is not constant, we should make this into a
+     * table, but for now, we'll stick with a constant, tuned for the chosen analog
+     * target level.
+     */
+    stt->targetIdx = ANALOG_TARGET_LEVEL + OFFSET_ENV_TO_RMS;
+#ifdef MIC_LEVEL_FEEDBACK
+    stt->targetIdx += stt->targetIdxOffset;
+#endif
+    /* Analog adaptation limits */
+    /* analogTargetLevel = round((32767*10^(-targetIdx/20))^2*16/2^7) */
+    stt->analogTargetLevel = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx]; /* ex. -20 dBov */
+    stt->startUpperLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 1];/* -19 dBov */
+    stt->startLowerLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 1];/* -21 dBov */
+    stt->upperPrimaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 2];/* -18 dBov */
+    stt->lowerPrimaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 2];/* -22 dBov */
+    stt->upperSecondaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 5];/* -15 dBov */
+    stt->lowerSecondaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 5];/* -25 dBov */
+    stt->upperLimit = stt->startUpperLimit;
+    stt->lowerLimit = stt->startLowerLimit;
+}
+
+void WebRtcAgc_SaturationCtrl(Agc_t *stt, WebRtc_UWord8 *saturated, WebRtc_Word32 *env)
+{
+    WebRtc_Word16 i, tmpW16;
+
+    /* Check if the signal is saturated */
+    for (i = 0; i < 10; i++)
+    {
+        tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(env[i], 20);
+        if (tmpW16 > 875)
+        {
+            stt->envSum += tmpW16;
+        }
+    }
+
+    if (stt->envSum > 25000)
+    {
+        *saturated = 1;
+        stt->envSum = 0;
+    }
+
+    /* stt->envSum *= 0.99; */
+    stt->envSum = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(stt->envSum,
+            (WebRtc_Word16)32440, 15);
+}
+
+void WebRtcAgc_ZeroCtrl(Agc_t *stt, WebRtc_Word32 *inMicLevel, WebRtc_Word32 *env)
+{
+    WebRtc_Word16 i;
+    WebRtc_Word32 tmp32 = 0;
+    WebRtc_Word32 midVal;
+
+    /* Is the input signal zero? */
+    for (i = 0; i < 10; i++)
+    {
+        tmp32 += env[i];
+    }
+
+    /* Each block is allowed to have a few non-zero
+     * samples.
+     */
+    if (tmp32 < 500)
+    {
+        stt->msZero += 10;
+    } else
+    {
+        stt->msZero = 0;
+    }
+
+    if (stt->muteGuardMs > 0)
+    {
+        stt->muteGuardMs -= 10;
+    }
+
+    if (stt->msZero > 500)
+    {
+        stt->msZero = 0;
+
+        /* Increase microphone level only if it's less than 50% */
+        midVal = WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog + stt->minLevel + 1, 1);
+        if (*inMicLevel < midVal)
+        {
+            /* *inMicLevel *= 1.1; */
+            tmp32 = WEBRTC_SPL_MUL(1126, *inMicLevel);
+            *inMicLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 10);
+            /* Reduces risk of a muted mic repeatedly triggering excessive levels due
+             * to zero signal detection. */
+            *inMicLevel = WEBRTC_SPL_MIN(*inMicLevel, stt->zeroCtrlMax);
+            stt->micVol = *inMicLevel;
+        }
+
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt,
+                "\t\tAGC->zeroCntrl, frame %d: 500 ms under threshold, micVol:\n",
+                stt->fcount, stt->micVol);
+#endif
+
+        stt->activeSpeech = 0;
+        stt->Rxx16_LPw32Max = 0;
+
+        /* The AGC has a tendency (due to problems with the VAD parameters), to
+         * vastly increase the volume after a muting event. This timer prevents
+         * upwards adaptation for a short period. */
+        stt->muteGuardMs = kMuteGuardTimeMs;
+    }
+}
+
+void WebRtcAgc_SpeakerInactiveCtrl(Agc_t *stt)
+{
+    /* Check if the near end speaker is inactive.
+     * If that is the case the VAD threshold is
+     * increased since the VAD speech model gets
+     * more sensitive to any sound after a long
+     * silence.
+     */
+
+    WebRtc_Word32 tmp32;
+    WebRtc_Word16 vadThresh;
+
+    if (stt->vadMic.stdLongTerm < 2500)
+    {
+        stt->vadThreshold = 1500;
+    } else
+    {
+        vadThresh = kNormalVadThreshold;
+        if (stt->vadMic.stdLongTerm < 4500)
+        {
+            /* Scale between min and max threshold */
+            vadThresh += WEBRTC_SPL_RSHIFT_W16(4500 - stt->vadMic.stdLongTerm, 1);
+        }
+
+        /* stt->vadThreshold = (31 * stt->vadThreshold + vadThresh) / 32; */
+        tmp32 = (WebRtc_Word32)vadThresh;
+        tmp32 += WEBRTC_SPL_MUL_16_16((WebRtc_Word16)31, stt->vadThreshold);
+        stt->vadThreshold = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 5);
+    }
+}
+
+void WebRtcAgc_ExpCurve(WebRtc_Word16 volume, WebRtc_Word16 *index)
+{
+    // volume in Q14
+    // index in [0-7]
+    /* 8 different curves */
+    if (volume > 5243)
+    {
+        if (volume > 7864)
+        {
+            if (volume > 12124)
+            {
+                *index = 7;
+            } else
+            {
+                *index = 6;
+            }
+        } else
+        {
+            if (volume > 6554)
+            {
+                *index = 5;
+            } else
+            {
+                *index = 4;
+            }
+        }
+    } else
+    {
+        if (volume > 2621)
+        {
+            if (volume > 3932)
+            {
+                *index = 3;
+            } else
+            {
+                *index = 2;
+            }
+        } else
+        {
+            if (volume > 1311)
+            {
+                *index = 1;
+            } else
+            {
+                *index = 0;
+            }
+        }
+    }
+}
+
+WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
+                                        WebRtc_Word32 *outMicLevel,
+                                        WebRtc_Word16 vadLogRatio,
+                                        WebRtc_Word16 echo, WebRtc_UWord8 *saturationWarning)
+{
+    WebRtc_UWord32 tmpU32;
+    WebRtc_Word32 Rxx16w32, tmp32;
+    WebRtc_Word32 inMicLevelTmp, lastMicVol;
+    WebRtc_Word16 i;
+    WebRtc_UWord8 saturated = 0;
+    Agc_t *stt;
+
+    stt = (Agc_t *)state;
+    inMicLevelTmp = WEBRTC_SPL_LSHIFT_W32(inMicLevel, stt->scale);
+
+    if (inMicLevelTmp > stt->maxAnalog)
+    {
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt, "\tAGC->ProcessAnalog, frame %d: micLvl > maxAnalog\n", stt->fcount);
+#endif
+        return -1;
+    } else if (inMicLevelTmp < stt->minLevel)
+    {
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt, "\tAGC->ProcessAnalog, frame %d: micLvl < minLevel\n", stt->fcount);
+#endif
+        return -1;
+    }
+
+    if (stt->firstCall == 0)
+    {
+        WebRtc_Word32 tmpVol;
+        stt->firstCall = 1;
+        tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)51, 9);
+        tmpVol = (stt->minLevel + tmp32);
+
+        /* If the mic level is very low at start, increase it! */
+        if ((inMicLevelTmp < tmpVol) && (stt->agcMode == kAgcModeAdaptiveAnalog))
+        {
+            inMicLevelTmp = tmpVol;
+        }
+        stt->micVol = inMicLevelTmp;
+    }
+
+    /* Set the mic level to the previous output value if there is digital input gain */
+    if ((inMicLevelTmp == stt->maxAnalog) && (stt->micVol > stt->maxAnalog))
+    {
+        inMicLevelTmp = stt->micVol;
+    }
+
+    /* If the mic level was manually changed to a very low value raise it! */
+    if ((inMicLevelTmp != stt->micVol) && (inMicLevelTmp < stt->minOutput))
+    {
+        tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)51, 9);
+        inMicLevelTmp = (stt->minLevel + tmp32);
+        stt->micVol = inMicLevelTmp;
+#ifdef MIC_LEVEL_FEEDBACK
+        //stt->numBlocksMicLvlSat = 0;
+#endif
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt,
+                "\tAGC->ProcessAnalog, frame %d: micLvl < minLevel by manual decrease, raise vol\n",
+                stt->fcount);
+#endif
+    }
+
+    if (inMicLevelTmp != stt->micVol)
+    {
+        // Incoming level mismatch; update our level.
+        // This could be the case if the volume is changed manually, or if the
+        // sound device has a low volume resolution.
+        stt->micVol = inMicLevelTmp;
+    }
+
+    if (inMicLevelTmp > stt->maxLevel)
+    {
+        // Always allow the user to raise the volume above the maxLevel.
+        stt->maxLevel = inMicLevelTmp;
+    }
+
+    // Store last value here, after we've taken care of manual updates etc.
+    lastMicVol = stt->micVol;
+
+    /* Checks if the signal is saturated. Also a check if individual samples
+     * are larger than 12000 is done. If they are the counter for increasing
+     * the volume level is set to -100ms
+     */
+    WebRtcAgc_SaturationCtrl(stt, &saturated, stt->env[0]);
+
+    /* The AGC is always allowed to lower the level if the signal is saturated */
+    if (saturated == 1)
+    {
+        /* Lower the recording level
+         * Rxx160_LP is adjusted down because it is so slow it could
+         * cause the AGC to make wrong decisions. */
+        /* stt->Rxx160_LPw32 *= 0.875; */
+        stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 3), 7);
+
+        stt->zeroCtrlMax = stt->micVol;
+
+        /* stt->micVol *= 0.903; */
+        tmp32 = inMicLevelTmp - stt->minLevel;
+        tmpU32 = WEBRTC_SPL_UMUL(29591, (WebRtc_UWord32)(tmp32));
+        stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
+        if (stt->micVol > lastMicVol - 2)
+        {
+            stt->micVol = lastMicVol - 2;
+        }
+        inMicLevelTmp = stt->micVol;
+
+#ifdef AGC_DEBUG //test log
+        fprintf(stt->fpt,
+                "\tAGC->ProcessAnalog, frame %d: saturated, micVol = %d\n",
+                stt->fcount, stt->micVol);
+#endif
+
+        if (stt->micVol < stt->minOutput)
+        {
+            *saturationWarning = 1;
+        }
+
+        /* Reset counter for decrease of volume level to avoid
+         * decreasing too much. The saturation control can still
+         * lower the level if needed. */
+        stt->msTooHigh = -100;
+
+        /* Enable the control mechanism to ensure that our measure,
+         * Rxx160_LP, is in the correct range. This must be done since
+         * the measure is very slow. */
+        stt->activeSpeech = 0;
+        stt->Rxx16_LPw32Max = 0;
+
+        /* Reset to initial values */
+        stt->msecSpeechInnerChange = kMsecSpeechInner;
+        stt->msecSpeechOuterChange = kMsecSpeechOuter;
+        stt->changeToSlowMode = 0;
+
+        stt->muteGuardMs = 0;
+
+        stt->upperLimit = stt->startUpperLimit;
+        stt->lowerLimit = stt->startLowerLimit;
+#ifdef MIC_LEVEL_FEEDBACK
+        //stt->numBlocksMicLvlSat = 0;
+#endif
+    }
+
+    /* Check if the input speech is zero. If so the mic volume
+     * is increased. On some computers the input is zero up as high
+     * level as 17% */
+    WebRtcAgc_ZeroCtrl(stt, &inMicLevelTmp, stt->env[0]);
+
+    /* Check if the near end speaker is inactive.
+     * If that is the case the VAD threshold is
+     * increased since the VAD speech model gets
+     * more sensitive to any sound after a long
+     * silence.
+     */
+    WebRtcAgc_SpeakerInactiveCtrl(stt);
+
+    for (i = 0; i < 5; i++)
+    {
+        /* Computed on blocks of 16 samples */
+
+        Rxx16w32 = stt->Rxx16w32_array[0][i];
+
+        /* Rxx160w32 in Q(-7) */
+        tmp32 = WEBRTC_SPL_RSHIFT_W32(Rxx16w32 - stt->Rxx16_vectorw32[stt->Rxx16pos], 3);
+        stt->Rxx160w32 = stt->Rxx160w32 + tmp32;
+        stt->Rxx16_vectorw32[stt->Rxx16pos] = Rxx16w32;
+
+        /* Circular buffer */
+        stt->Rxx16pos++;
+        if (stt->Rxx16pos == RXX_BUFFER_LEN)
+        {
+            stt->Rxx16pos = 0;
+        }
+
+        /* Rxx16_LPw32 in Q(-4) */
+        tmp32 = WEBRTC_SPL_RSHIFT_W32(Rxx16w32 - stt->Rxx16_LPw32, kAlphaShortTerm);
+        stt->Rxx16_LPw32 = (stt->Rxx16_LPw32) + tmp32;
+
+        if (vadLogRatio > stt->vadThreshold)
+        {
+            /* Speech detected! */
+
+            /* Check if Rxx160_LP is in the correct range. If
+             * it is too high/low then we set it to the maximum of
+             * Rxx16_LPw32 during the first 200ms of speech.
+             */
+            if (stt->activeSpeech < 250)
+            {
+                stt->activeSpeech += 2;
+
+                if (stt->Rxx16_LPw32 > stt->Rxx16_LPw32Max)
+                {
+                    stt->Rxx16_LPw32Max = stt->Rxx16_LPw32;
+                }
+            } else if (stt->activeSpeech == 250)
+            {
+                stt->activeSpeech += 2;
+                tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx16_LPw32Max, 3);
+                stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, RXX_BUFFER_LEN);
+            }
+
+            tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160w32 - stt->Rxx160_LPw32, kAlphaLongTerm);
+            stt->Rxx160_LPw32 = stt->Rxx160_LPw32 + tmp32;
+
+            if (stt->Rxx160_LPw32 > stt->upperSecondaryLimit)
+            {
+                stt->msTooHigh += 2;
+                stt->msTooLow = 0;
+                stt->changeToSlowMode = 0;
+
+                if (stt->msTooHigh > stt->msecSpeechOuterChange)
+                {
+                    stt->msTooHigh = 0;
+
+                    /* Lower the recording level */
+                    /* Multiply by 0.828125 which corresponds to decreasing ~0.8dB */
+                    tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
+                    stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 53);
+
+                    /* Reduce the max gain to avoid excessive oscillation
+                     * (but never drop below the maximum analog level).
+                     * stt->maxLevel = (15 * stt->maxLevel + stt->micVol) / 16;
+                     */
+                    tmp32 = (15 * stt->maxLevel) + stt->micVol;
+                    stt->maxLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
+                    stt->maxLevel = WEBRTC_SPL_MAX(stt->maxLevel, stt->maxAnalog);
+
+                    stt->zeroCtrlMax = stt->micVol;
+
+                    /* 0.95 in Q15 */
+                    tmp32 = inMicLevelTmp - stt->minLevel;
+                    tmpU32 = WEBRTC_SPL_UMUL(31130, (WebRtc_UWord32)(tmp32));
+                    stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
+                    if (stt->micVol > lastMicVol - 1)
+                    {
+                        stt->micVol = lastMicVol - 1;
+                    }
+                    inMicLevelTmp = stt->micVol;
+
+                    /* Enable the control mechanism to ensure that our measure,
+                     * Rxx160_LP, is in the correct range.
+                     */
+                    stt->activeSpeech = 0;
+                    stt->Rxx16_LPw32Max = 0;
+#ifdef MIC_LEVEL_FEEDBACK
+                    //stt->numBlocksMicLvlSat = 0;
+#endif
+#ifdef AGC_DEBUG //test log
+                    fprintf(stt->fpt,
+                            "\tAGC->ProcessAnalog, frame %d: measure > 2ndUpperLim, micVol = %d, maxLevel = %d\n",
+                            stt->fcount, stt->micVol, stt->maxLevel);
+#endif
+                }
+            } else if (stt->Rxx160_LPw32 > stt->upperLimit)
+            {
+                stt->msTooHigh += 2;
+                stt->msTooLow = 0;
+                stt->changeToSlowMode = 0;
+
+                if (stt->msTooHigh > stt->msecSpeechInnerChange)
+                {
+                    /* Lower the recording level */
+                    stt->msTooHigh = 0;
+                    /* Multiply by 0.828125 which corresponds to decreasing ~0.8dB */
+                    tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
+                    stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 53);
+
+                    /* Reduce the max gain to avoid excessive oscillation
+                     * (but never drop below the maximum analog level).
+                     * stt->maxLevel = (15 * stt->maxLevel + stt->micVol) / 16;
+                     */
+                    tmp32 = (15 * stt->maxLevel) + stt->micVol;
+                    stt->maxLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
+                    stt->maxLevel = WEBRTC_SPL_MAX(stt->maxLevel, stt->maxAnalog);
+
+                    stt->zeroCtrlMax = stt->micVol;
+
+                    /* 0.965 in Q15 */
+                    tmp32 = inMicLevelTmp - stt->minLevel;
+                    tmpU32 = WEBRTC_SPL_UMUL(31621, (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel));
+                    stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
+                    if (stt->micVol > lastMicVol - 1)
+                    {
+                        stt->micVol = lastMicVol - 1;
+                    }
+                    inMicLevelTmp = stt->micVol;
+
+#ifdef MIC_LEVEL_FEEDBACK
+                    //stt->numBlocksMicLvlSat = 0;
+#endif
+#ifdef AGC_DEBUG //test log
+                    fprintf(stt->fpt,
+                            "\tAGC->ProcessAnalog, frame %d: measure > UpperLim, micVol = %d, maxLevel = %d\n",
+                            stt->fcount, stt->micVol, stt->maxLevel);
+#endif
+                }
+            } else if (stt->Rxx160_LPw32 < stt->lowerSecondaryLimit)
+            {
+                stt->msTooHigh = 0;
+                stt->changeToSlowMode = 0;
+                stt->msTooLow += 2;
+
+                if (stt->msTooLow > stt->msecSpeechOuterChange)
+                {
+                    /* Raise the recording level */
+                    WebRtc_Word16 index, weightFIX;
+                    WebRtc_Word16 volNormFIX = 16384; // =1 in Q14.
+
+                    stt->msTooLow = 0;
+
+                    /* Normalize the volume level */
+                    tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
+                    if (stt->maxInit != stt->minLevel)
+                    {
+                        volNormFIX = (WebRtc_Word16)WEBRTC_SPL_DIV(tmp32,
+                                                              (stt->maxInit - stt->minLevel));
+                    }
+
+                    /* Find correct curve */
+                    WebRtcAgc_ExpCurve(volNormFIX, &index);
+
+                    /* Compute weighting factor for the volume increase, 32^(-2*X)/2+1.05 */
+                    weightFIX = kOffset1[index]
+                              - (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(kSlope1[index],
+                                                                         volNormFIX, 13);
+
+                    /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
+                    tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
+                    stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
+
+                    tmp32 = inMicLevelTmp - stt->minLevel;
+                    tmpU32 = ((WebRtc_UWord32)weightFIX * (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel));
+                    stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
+                    if (stt->micVol < lastMicVol + 2)
+                    {
+                        stt->micVol = lastMicVol + 2;
+                    }
+
+                    inMicLevelTmp = stt->micVol;
+
+#ifdef MIC_LEVEL_FEEDBACK
+                    /* Count ms in level saturation */
+                    //if (stt->micVol > stt->maxAnalog) {
+                    if (stt->micVol > 150)
+                    {
+                        /* mic level is saturated */
+                        stt->numBlocksMicLvlSat++;
+                        fprintf(stderr, "Sat mic Level: %d\n", stt->numBlocksMicLvlSat);
+                    }
+#endif
+#ifdef AGC_DEBUG //test log
+                    fprintf(stt->fpt,
+                            "\tAGC->ProcessAnalog, frame %d: measure < 2ndLowerLim, micVol = %d\n",
+                            stt->fcount, stt->micVol);
+#endif
+                }
+            } else if (stt->Rxx160_LPw32 < stt->lowerLimit)
+            {
+                stt->msTooHigh = 0;
+                stt->changeToSlowMode = 0;
+                stt->msTooLow += 2;
+
+                if (stt->msTooLow > stt->msecSpeechInnerChange)
+                {
+                    /* Raise the recording level */
+                    WebRtc_Word16 index, weightFIX;
+                    WebRtc_Word16 volNormFIX = 16384; // =1 in Q14.
+
+                    stt->msTooLow = 0;
+
+                    /* Normalize the volume level */
+                    tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
+                    if (stt->maxInit != stt->minLevel)
+                    {
+                        volNormFIX = (WebRtc_Word16)WEBRTC_SPL_DIV(tmp32,
+                                                              (stt->maxInit - stt->minLevel));
+                    }
+
+                    /* Find correct curve */
+                    WebRtcAgc_ExpCurve(volNormFIX, &index);
+
+                    /* Compute weighting factor for the volume increase, (3.^(-2.*X))/8+1 */
+                    weightFIX = kOffset2[index]
+                              - (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(kSlope2[index],
+                                                                         volNormFIX, 13);
+
+                    /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
+                    tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
+                    stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
+
+                    tmp32 = inMicLevelTmp - stt->minLevel;
+                    tmpU32 = ((WebRtc_UWord32)weightFIX * (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel));
+                    stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
+                    if (stt->micVol < lastMicVol + 1)
+                    {
+                        stt->micVol = lastMicVol + 1;
+                    }
+
+                    inMicLevelTmp = stt->micVol;
+
+#ifdef MIC_LEVEL_FEEDBACK
+                    /* Count ms in level saturation */
+                    //if (stt->micVol > stt->maxAnalog) {
+                    if (stt->micVol > 150)
+                    {
+                        /* mic level is saturated */
+                        stt->numBlocksMicLvlSat++;
+                        fprintf(stderr, "Sat mic Level: %d\n", stt->numBlocksMicLvlSat);
+                    }
+#endif
+#ifdef AGC_DEBUG //test log
+                    fprintf(stt->fpt,
+                            "\tAGC->ProcessAnalog, frame %d: measure < LowerLim, micVol = %d\n",
+                            stt->fcount, stt->micVol);
+#endif
+
+                }
+            } else
+            {
+                /* The signal is inside the desired range which is:
+                 * lowerLimit < Rxx160_LP/640 < upperLimit
+                 */
+                if (stt->changeToSlowMode > 4000)
+                {
+                    stt->msecSpeechInnerChange = 1000;
+                    stt->msecSpeechOuterChange = 500;
+                    stt->upperLimit = stt->upperPrimaryLimit;
+                    stt->lowerLimit = stt->lowerPrimaryLimit;
+                } else
+                {
+                    stt->changeToSlowMode += 2; // in milliseconds
+                }
+                stt->msTooLow = 0;
+                stt->msTooHigh = 0;
+
+                stt->micVol = inMicLevelTmp;
+
+            }
+#ifdef MIC_LEVEL_FEEDBACK
+            if (stt->numBlocksMicLvlSat > NUM_BLOCKS_IN_SAT_BEFORE_CHANGE_TARGET)
+            {
+                stt->micLvlSat = 1;
+                fprintf(stderr, "target before = %d (%d)\n", stt->analogTargetLevel, stt->targetIdx);
+                WebRtcAgc_UpdateAgcThresholds(stt);
+                WebRtcAgc_CalculateGainTable(&(stt->digitalAgc.gainTable[0]),
+                        stt->compressionGaindB, stt->targetLevelDbfs, stt->limiterEnable,
+                        stt->analogTarget);
+                stt->numBlocksMicLvlSat = 0;
+                stt->micLvlSat = 0;
+                fprintf(stderr, "target offset = %d\n", stt->targetIdxOffset);
+                fprintf(stderr, "target after  = %d (%d)\n", stt->analogTargetLevel, stt->targetIdx);
+            }
+#endif
+        }
+    }
+
+    /* Ensure gain is not increased in presence of echo or after a mute event
+     * (but allow the zeroCtrl() increase on the frame of a mute detection).
+     */
+    if (echo == 1 || (stt->muteGuardMs > 0 && stt->muteGuardMs < kMuteGuardTimeMs))
+    {
+        if (stt->micVol > lastMicVol)
+        {
+            stt->micVol = lastMicVol;
+        }
+    }
+
+    /* limit the gain */
+    if (stt->micVol > stt->maxLevel)
+    {
+        stt->micVol = stt->maxLevel;
+    } else if (stt->micVol < stt->minOutput)
+    {
+        stt->micVol = stt->minOutput;
+    }
+
+    *outMicLevel = WEBRTC_SPL_RSHIFT_W32(stt->micVol, stt->scale);
+    if (*outMicLevel > WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog, stt->scale))
+    {
+        *outMicLevel = WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog, stt->scale);
+    }
+
+    return 0;
+}
+
+int WebRtcAgc_Process(void *agcInst, const WebRtc_Word16 *in_near,
+                      const WebRtc_Word16 *in_near_H, WebRtc_Word16 samples,
+                      WebRtc_Word16 *out, WebRtc_Word16 *out_H, WebRtc_Word32 inMicLevel,
+                      WebRtc_Word32 *outMicLevel, WebRtc_Word16 echo,
+                      WebRtc_UWord8 *saturationWarning)
+{
+    Agc_t *stt;
+    WebRtc_Word32 inMicLevelTmp;
+    WebRtc_Word16 subFrames, i;
+    WebRtc_UWord8 satWarningTmp = 0;
+
+    stt = (Agc_t *)agcInst;
+
+    //
+    if (stt == NULL)
+    {
+        return -1;
+    }
+    //
+
+
+    if (stt->fs == 8000)
+    {
+        if ((samples != 80) && (samples != 160))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->Process, frame %d: Invalid number of samples\n\n", stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 80;
+    } else if (stt->fs == 16000)
+    {
+        if ((samples != 160) && (samples != 320))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->Process, frame %d: Invalid number of samples\n\n", stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 160;
+    } else if (stt->fs == 32000)
+    {
+        if ((samples != 160) && (samples != 320))
+        {
+#ifdef AGC_DEBUG //test log
+            fprintf(stt->fpt,
+                    "AGC->Process, frame %d: Invalid number of samples\n\n", stt->fcount);
+#endif
+            return -1;
+        }
+        subFrames = 160;
+    } else
+    {
+#ifdef AGC_DEBUG// test log
+        fprintf(stt->fpt,
+                "AGC->Process, frame %d: Invalid sample rate\n\n", stt->fcount);
+#endif
+        return -1;
+    }
+
+    /* Check for valid pointers based on sampling rate */
+    if (stt->fs == 32000 && in_near_H == NULL)
+    {
+        return -1;
+    }
+    /* Check for valid pointers for low band */
+    if (in_near == NULL)
+    {
+        return -1;
+    }
+
+    *saturationWarning = 0;
+    //TODO: PUT IN RANGE CHECKING FOR INPUT LEVELS
+    *outMicLevel = inMicLevel;
+    inMicLevelTmp = inMicLevel;
+
+    // TODO(andrew): clearly we don't need input and output pointers...
+    //   Change the interface to take a shared input/output.
+    if (in_near != out)
+    {
+        // Only needed if they don't already point to the same place.
+        memcpy(out, in_near, samples * sizeof(WebRtc_Word16));
+    }
+    if (stt->fs == 32000)
+    {
+        if (in_near_H != out_H)
+        {
+            memcpy(out_H, in_near_H, samples * sizeof(WebRtc_Word16));
+        }
+    }
+
+#ifdef AGC_DEBUG//test log
+    stt->fcount++;
+#endif
+
+    for (i = 0; i < samples; i += subFrames)
+    {
+        if (WebRtcAgc_ProcessDigital(&stt->digitalAgc, &in_near[i], &in_near_H[i], &out[i], &out_H[i],
+                           stt->fs, stt->lowLevelSignal) == -1)
+        {
+#ifdef AGC_DEBUG//test log
+            fprintf(stt->fpt, "AGC->Process, frame %d: Error from DigAGC\n\n", stt->fcount);
+#endif
+            return -1;
+        }
+        if ((stt->agcMode < kAgcModeFixedDigital) && ((stt->lowLevelSignal == 0)
+                || (stt->agcMode != kAgcModeAdaptiveDigital)))
+        {
+            if (WebRtcAgc_ProcessAnalog(agcInst, inMicLevelTmp, outMicLevel,
+                                          stt->vadMic.logRatio, echo, saturationWarning) == -1)
+            {
+                return -1;
+            }
+        }
+#ifdef AGC_DEBUG//test log
+        fprintf(stt->agcLog, "%5d\t%d\t%d\t%d\n", stt->fcount, inMicLevelTmp, *outMicLevel, stt->maxLevel, stt->micVol);
+#endif
+
+        /* update queue */
+        if (stt->inQueue > 1)
+        {
+            memcpy(stt->env[0], stt->env[1], 10 * sizeof(WebRtc_Word32));
+            memcpy(stt->Rxx16w32_array[0], stt->Rxx16w32_array[1], 5 * sizeof(WebRtc_Word32));
+        }
+
+        if (stt->inQueue > 0)
+        {
+            stt->inQueue--;
+        }
+
+        /* If 20ms frames are used the input mic level must be updated so that
+         * the analog AGC does not think that there has been a manual volume
+         * change. */
+        inMicLevelTmp = *outMicLevel;
+
+        /* Store a positive saturation warning. */
+        if (*saturationWarning == 1)
+        {
+            satWarningTmp = 1;
+        }
+    }
+
+    /* Trigger the saturation warning if displayed by any of the frames. */
+    *saturationWarning = satWarningTmp;
+
+    return 0;
+}
+
+int WebRtcAgc_set_config(void *agcInst, WebRtcAgc_config_t agcConfig)
+{
+    Agc_t *stt;
+    stt = (Agc_t *)agcInst;
+
+    if (stt == NULL)
+    {
+        return -1;
+    }
+
+    if (stt->initFlag != kInitCheck)
+    {
+        stt->lastError = AGC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    if (agcConfig.limiterEnable != kAgcFalse && agcConfig.limiterEnable != kAgcTrue)
+    {
+        stt->lastError = AGC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    stt->limiterEnable = agcConfig.limiterEnable;
+    stt->compressionGaindB = agcConfig.compressionGaindB;
+    if ((agcConfig.targetLevelDbfs < 0) || (agcConfig.targetLevelDbfs > 31))
+    {
+        stt->lastError = AGC_BAD_PARAMETER_ERROR;
+        return -1;
+    }
+    stt->targetLevelDbfs = agcConfig.targetLevelDbfs;
+
+    if (stt->agcMode == kAgcModeFixedDigital)
+    {
+        /* Adjust for different parameter interpretation in FixedDigital mode */
+        stt->compressionGaindB += agcConfig.targetLevelDbfs;
+    }
+
+    /* Update threshold levels for analog adaptation */
+    WebRtcAgc_UpdateAgcThresholds(stt);
+
+    /* Recalculate gain table */
+    if (WebRtcAgc_CalculateGainTable(&(stt->digitalAgc.gainTable[0]), stt->compressionGaindB,
+                           stt->targetLevelDbfs, stt->limiterEnable, stt->analogTarget) == -1)
+    {
+#ifdef AGC_DEBUG//test log
+        fprintf(stt->fpt, "AGC->set_config, frame %d: Error from calcGainTable\n\n", stt->fcount);
+#endif
+        return -1;
+    }
+    /* Store the config in a WebRtcAgc_config_t */
+    stt->usedConfig.compressionGaindB = agcConfig.compressionGaindB;
+    stt->usedConfig.limiterEnable = agcConfig.limiterEnable;
+    stt->usedConfig.targetLevelDbfs = agcConfig.targetLevelDbfs;
+
+    return 0;
+}
+
+int WebRtcAgc_get_config(void *agcInst, WebRtcAgc_config_t *config)
+{
+    Agc_t *stt;
+    stt = (Agc_t *)agcInst;
+
+    if (stt == NULL)
+    {
+        return -1;
+    }
+
+    if (config == NULL)
+    {
+        stt->lastError = AGC_NULL_POINTER_ERROR;
+        return -1;
+    }
+
+    if (stt->initFlag != kInitCheck)
+    {
+        stt->lastError = AGC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    config->limiterEnable = stt->usedConfig.limiterEnable;
+    config->targetLevelDbfs = stt->usedConfig.targetLevelDbfs;
+    config->compressionGaindB = stt->usedConfig.compressionGaindB;
+
+    return 0;
+}
+
+int WebRtcAgc_Create(void **agcInst)
+{
+    Agc_t *stt;
+    if (agcInst == NULL)
+    {
+        return -1;
+    }
+    stt = (Agc_t *)malloc(sizeof(Agc_t));
+
+    *agcInst = stt;
+    if (stt == NULL)
+    {
+        return -1;
+    }
+
+#ifdef AGC_DEBUG
+    stt->fpt = fopen("./agc_test_log.txt", "wt");
+    stt->agcLog = fopen("./agc_debug_log.txt", "wt");
+    stt->digitalAgc.logFile = fopen("./agc_log.txt", "wt");
+#endif
+
+    stt->initFlag = 0;
+    stt->lastError = 0;
+
+    return 0;
+}
+
+int WebRtcAgc_Free(void *state)
+{
+    Agc_t *stt;
+
+    stt = (Agc_t *)state;
+#ifdef AGC_DEBUG
+    fclose(stt->fpt);
+    fclose(stt->agcLog);
+    fclose(stt->digitalAgc.logFile);
+#endif
+    free(stt);
+
+    return 0;
+}
+
+/* minLevel     - Minimum volume level
+ * maxLevel     - Maximum volume level
+ */
+int WebRtcAgc_Init(void *agcInst, WebRtc_Word32 minLevel, WebRtc_Word32 maxLevel,
+                             WebRtc_Word16 agcMode, WebRtc_UWord32 fs)
+{
+    WebRtc_Word32 max_add, tmp32;
+    WebRtc_Word16 i;
+    int tmpNorm;
+    Agc_t *stt;
+
+    /* typecast state pointer */
+    stt = (Agc_t *)agcInst;
+
+    if (WebRtcAgc_InitDigital(&stt->digitalAgc, agcMode) != 0)
+    {
+        stt->lastError = AGC_UNINITIALIZED_ERROR;
+        return -1;
+    }
+
+    /* Analog AGC variables */
+    stt->envSum = 0;
+
+    /* mode     = 0 - Only saturation protection
+     *            1 - Analog Automatic Gain Control [-targetLevelDbfs (default -3 dBOv)]
+     *            2 - Digital Automatic Gain Control [-targetLevelDbfs (default -3 dBOv)]
+     *            3 - Fixed Digital Gain [compressionGaindB (default 8 dB)]
+     */
+#ifdef AGC_DEBUG//test log
+    stt->fcount = 0;
+    fprintf(stt->fpt, "AGC->Init\n");
+#endif
+    if (agcMode < kAgcModeUnchanged || agcMode > kAgcModeFixedDigital)
+    {
+#ifdef AGC_DEBUG//test log
+        fprintf(stt->fpt, "AGC->Init: error, incorrect mode\n\n");
+#endif
+        return -1;
+    }
+    stt->agcMode = agcMode;
+    stt->fs = fs;
+
+    /* initialize input VAD */
+    WebRtcAgc_InitVad(&stt->vadMic);
+
+    /* If the volume range is smaller than 0-256 then
+     * the levels are shifted up to Q8-domain */
+    tmpNorm = WebRtcSpl_NormU32((WebRtc_UWord32)maxLevel);
+    stt->scale = tmpNorm - 23;
+    if (stt->scale < 0)
+    {
+        stt->scale = 0;
+    }
+    // TODO(bjornv): Investigate if we really need to scale up a small range now when we have
+    // a guard against zero-increments. For now, we do not support scale up (scale = 0).
+    stt->scale = 0;
+    maxLevel = WEBRTC_SPL_LSHIFT_W32(maxLevel, stt->scale);
+    minLevel = WEBRTC_SPL_LSHIFT_W32(minLevel, stt->scale);
+
+    /* Make minLevel and maxLevel static in AdaptiveDigital */
+    if (stt->agcMode == kAgcModeAdaptiveDigital)
+    {
+        minLevel = 0;
+        maxLevel = 255;
+        stt->scale = 0;
+    }
+    /* The maximum supplemental volume range is based on a vague idea
+     * of how much lower the gain will be than the real analog gain. */
+    max_add = WEBRTC_SPL_RSHIFT_W32(maxLevel - minLevel, 2);
+
+    /* Minimum/maximum volume level that can be set */
+    stt->minLevel = minLevel;
+    stt->maxAnalog = maxLevel;
+    stt->maxLevel = maxLevel + max_add;
+    stt->maxInit = stt->maxLevel;
+
+    stt->zeroCtrlMax = stt->maxAnalog;
+
+    /* Initialize micVol parameter */
+    stt->micVol = stt->maxAnalog;
+    if (stt->agcMode == kAgcModeAdaptiveDigital)
+    {
+        stt->micVol = 127; /* Mid-point of mic level */
+    }
+    stt->micRef = stt->micVol;
+    stt->micGainIdx = 127;
+#ifdef MIC_LEVEL_FEEDBACK
+    stt->numBlocksMicLvlSat = 0;
+    stt->micLvlSat = 0;
+#endif
+#ifdef AGC_DEBUG//test log
+    fprintf(stt->fpt,
+            "AGC->Init: minLevel = %d, maxAnalog = %d, maxLevel = %d\n",
+            stt->minLevel, stt->maxAnalog, stt->maxLevel);
+#endif
+
+    /* Minimum output volume is 4% higher than the available lowest volume level */
+    tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)10, 8);
+    stt->minOutput = (stt->minLevel + tmp32);
+
+    stt->msTooLow = 0;
+    stt->msTooHigh = 0;
+    stt->changeToSlowMode = 0;
+    stt->firstCall = 0;
+    stt->msZero = 0;
+    stt->muteGuardMs = 0;
+    stt->gainTableIdx = 0;
+
+    stt->msecSpeechInnerChange = kMsecSpeechInner;
+    stt->msecSpeechOuterChange = kMsecSpeechOuter;
+
+    stt->activeSpeech = 0;
+    stt->Rxx16_LPw32Max = 0;
+
+    stt->vadThreshold = kNormalVadThreshold;
+    stt->inActive = 0;
+
+    for (i = 0; i < RXX_BUFFER_LEN; i++)
+    {
+        stt->Rxx16_vectorw32[i] = (WebRtc_Word32)1000; /* -54dBm0 */
+    }
+    stt->Rxx160w32 = 125 * RXX_BUFFER_LEN; /* (stt->Rxx16_vectorw32[0]>>3) = 125 */
+
+    stt->Rxx16pos = 0;
+    stt->Rxx16_LPw32 = (WebRtc_Word32)16284; /* Q(-4) */
+
+    for (i = 0; i < 5; i++)
+    {
+        stt->Rxx16w32_array[0][i] = 0;
+    }
+    for (i = 0; i < 20; i++)
+    {
+        stt->env[0][i] = 0;
+    }
+    stt->inQueue = 0;
+
+#ifdef MIC_LEVEL_FEEDBACK
+    stt->targetIdxOffset = 0;
+#endif
+
+    WebRtcSpl_MemSetW32(stt->filterState, 0, 8);
+
+    stt->initFlag = kInitCheck;
+    // Default config settings.
+    stt->defaultConfig.limiterEnable = kAgcTrue;
+    stt->defaultConfig.targetLevelDbfs = AGC_DEFAULT_TARGET_LEVEL;
+    stt->defaultConfig.compressionGaindB = AGC_DEFAULT_COMP_GAIN;
+
+    if (WebRtcAgc_set_config(stt, stt->defaultConfig) == -1)
+    {
+        stt->lastError = AGC_UNSPECIFIED_ERROR;
+        return -1;
+    }
+    stt->Rxx160_LPw32 = stt->analogTargetLevel; // Initialize rms value
+
+    stt->lowLevelSignal = 0;
+
+    /* Only positive values are allowed that are not too large */
+    if ((minLevel >= maxLevel) || (maxLevel & 0xFC000000))
+    {
+#ifdef AGC_DEBUG//test log
+        fprintf(stt->fpt, "minLevel, maxLevel value(s) are invalid\n\n");
+#endif
+        return -1;
+    } else
+    {
+#ifdef AGC_DEBUG//test log
+        fprintf(stt->fpt, "\n");
+#endif
+        return 0;
+    }
+}
diff --git a/trunk/src/modules/audio_processing/agc/analog_agc.h b/trunk/src/modules/audio_processing/agc/analog_agc.h
new file mode 100644
index 0000000..b32ac65
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/analog_agc.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_
+
+#include "typedefs.h"
+#include "gain_control.h"
+#include "digital_agc.h"
+
+//#define AGC_DEBUG
+//#define MIC_LEVEL_FEEDBACK
+#ifdef AGC_DEBUG
+#include <stdio.h>
+#endif
+
+/* Analog Automatic Gain Control variables:
+ * Constant declarations (inner limits inside which no changes are done)
+ * In the beginning the range is narrower to widen as soon as the measure
+ * 'Rxx160_LP' is inside it. Currently the starting limits are -22.2+/-1dBm0
+ * and the final limits -22.2+/-2.5dBm0. These levels makes the speech signal
+ * go towards -25.4dBm0 (-31.4dBov). Tuned with wbfile-31.4dBov.pcm
+ * The limits are created by running the AGC with a file having the desired
+ * signal level and thereafter plotting Rxx160_LP in the dBm0-domain defined
+ * by out=10*log10(in/260537279.7); Set the target level to the average level
+ * of our measure Rxx160_LP. Remember that the levels are in blocks of 16 in
+ * Q(-7). (Example matlab code: round(db2pow(-21.2)*16/2^7) )
+ */
+#define RXX_BUFFER_LEN  10
+
+static const WebRtc_Word16 kMsecSpeechInner = 520;
+static const WebRtc_Word16 kMsecSpeechOuter = 340;
+
+static const WebRtc_Word16 kNormalVadThreshold = 400;
+
+static const WebRtc_Word16 kAlphaShortTerm = 6; // 1 >> 6 = 0.0156
+static const WebRtc_Word16 kAlphaLongTerm = 10; // 1 >> 10 = 0.000977
+
+typedef struct
+{
+    // Configurable parameters/variables
+    WebRtc_UWord32      fs;                 // Sampling frequency
+    WebRtc_Word16       compressionGaindB;  // Fixed gain level in dB
+    WebRtc_Word16       targetLevelDbfs;    // Target level in -dBfs of envelope (default -3)
+    WebRtc_Word16       agcMode;            // Hard coded mode (adaptAna/adaptDig/fixedDig)
+    WebRtc_UWord8       limiterEnable;      // Enabling limiter (on/off (default off))
+    WebRtcAgc_config_t  defaultConfig;
+    WebRtcAgc_config_t  usedConfig;
+
+    // General variables
+    WebRtc_Word16       initFlag;
+    WebRtc_Word16       lastError;
+
+    // Target level parameters
+    // Based on the above: analogTargetLevel = round((32767*10^(-22/20))^2*16/2^7)
+    WebRtc_Word32       analogTargetLevel;  // = RXX_BUFFER_LEN * 846805;       -22 dBfs
+    WebRtc_Word32       startUpperLimit;    // = RXX_BUFFER_LEN * 1066064;      -21 dBfs
+    WebRtc_Word32       startLowerLimit;    // = RXX_BUFFER_LEN * 672641;       -23 dBfs
+    WebRtc_Word32       upperPrimaryLimit;  // = RXX_BUFFER_LEN * 1342095;      -20 dBfs
+    WebRtc_Word32       lowerPrimaryLimit;  // = RXX_BUFFER_LEN * 534298;       -24 dBfs
+    WebRtc_Word32       upperSecondaryLimit;// = RXX_BUFFER_LEN * 2677832;      -17 dBfs
+    WebRtc_Word32       lowerSecondaryLimit;// = RXX_BUFFER_LEN * 267783;       -27 dBfs
+    WebRtc_UWord16      targetIdx;          // Table index for corresponding target level
+#ifdef MIC_LEVEL_FEEDBACK
+    WebRtc_UWord16      targetIdxOffset;    // Table index offset for level compensation
+#endif
+    WebRtc_Word16       analogTarget;       // Digital reference level in ENV scale
+
+    // Analog AGC specific variables
+    WebRtc_Word32       filterState[8];     // For downsampling wb to nb
+    WebRtc_Word32       upperLimit;         // Upper limit for mic energy
+    WebRtc_Word32       lowerLimit;         // Lower limit for mic energy
+    WebRtc_Word32       Rxx160w32;          // Average energy for one frame
+    WebRtc_Word32       Rxx16_LPw32;        // Low pass filtered subframe energies
+    WebRtc_Word32       Rxx160_LPw32;       // Low pass filtered frame energies
+    WebRtc_Word32       Rxx16_LPw32Max;     // Keeps track of largest energy subframe
+    WebRtc_Word32       Rxx16_vectorw32[RXX_BUFFER_LEN];// Array with subframe energies
+    WebRtc_Word32       Rxx16w32_array[2][5];// Energy values of microphone signal
+    WebRtc_Word32       env[2][10];         // Envelope values of subframes
+
+    WebRtc_Word16       Rxx16pos;           // Current position in the Rxx16_vectorw32
+    WebRtc_Word16       envSum;             // Filtered scaled envelope in subframes
+    WebRtc_Word16       vadThreshold;       // Threshold for VAD decision
+    WebRtc_Word16       inActive;           // Inactive time in milliseconds
+    WebRtc_Word16       msTooLow;           // Milliseconds of speech at a too low level
+    WebRtc_Word16       msTooHigh;          // Milliseconds of speech at a too high level
+    WebRtc_Word16       changeToSlowMode;   // Change to slow mode after some time at target
+    WebRtc_Word16       firstCall;          // First call to the process-function
+    WebRtc_Word16       msZero;             // Milliseconds of zero input
+    WebRtc_Word16       msecSpeechOuterChange;// Min ms of speech between volume changes
+    WebRtc_Word16       msecSpeechInnerChange;// Min ms of speech between volume changes
+    WebRtc_Word16       activeSpeech;       // Milliseconds of active speech
+    WebRtc_Word16       muteGuardMs;        // Counter to prevent mute action
+    WebRtc_Word16       inQueue;            // 10 ms batch indicator
+
+    // Microphone level variables
+    WebRtc_Word32       micRef;             // Remember ref. mic level for virtual mic
+    WebRtc_UWord16      gainTableIdx;       // Current position in virtual gain table
+    WebRtc_Word32       micGainIdx;         // Gain index of mic level to increase slowly
+    WebRtc_Word32       micVol;             // Remember volume between frames
+    WebRtc_Word32       maxLevel;           // Max possible vol level, incl dig gain
+    WebRtc_Word32       maxAnalog;          // Maximum possible analog volume level
+    WebRtc_Word32       maxInit;            // Initial value of "max"
+    WebRtc_Word32       minLevel;           // Minimum possible volume level
+    WebRtc_Word32       minOutput;          // Minimum output volume level
+    WebRtc_Word32       zeroCtrlMax;        // Remember max gain => don't amp low input
+
+    WebRtc_Word16       scale;              // Scale factor for internal volume levels
+#ifdef MIC_LEVEL_FEEDBACK
+    WebRtc_Word16       numBlocksMicLvlSat;
+    WebRtc_UWord8 micLvlSat;
+#endif
+    // Structs for VAD and digital_agc
+    AgcVad_t            vadMic;
+    DigitalAgc_t        digitalAgc;
+
+#ifdef AGC_DEBUG
+    FILE*               fpt;
+    FILE*               agcLog;
+    WebRtc_Word32       fcount;
+#endif
+
+    WebRtc_Word16       lowLevelSignal;
+} Agc_t;
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_
diff --git a/trunk/src/modules/audio_processing/agc/digital_agc.c b/trunk/src/modules/audio_processing/agc/digital_agc.c
new file mode 100644
index 0000000..3b4b39b
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/digital_agc.c
@@ -0,0 +1,798 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* digital_agc.c
+ *
+ */
+
+#include "digital_agc.h"
+
+#include <assert.h>
+#include <string.h>
+#ifdef AGC_DEBUG
+#include <stdio.h>
+#endif
+
+#include "gain_control.h"
+
+// To generate the gaintable, copy&paste the following lines to a Matlab window:
+// MaxGain = 6; MinGain = 0; CompRatio = 3; Knee = 1;
+// zeros = 0:31; lvl = 2.^(1-zeros);
+// A = -10*log10(lvl) * (CompRatio - 1) / CompRatio;
+// B = MaxGain - MinGain;
+// gains = round(2^16*10.^(0.05 * (MinGain + B * ( log(exp(-Knee*A)+exp(-Knee*B)) - log(1+exp(-Knee*B)) ) / log(1/(1+exp(Knee*B))))));
+// fprintf(1, '\t%i, %i, %i, %i,\n', gains);
+// % Matlab code for plotting the gain and input/output level characteristic (copy/paste the following 3 lines):
+// in = 10*log10(lvl); out = 20*log10(gains/65536);
+// subplot(121); plot(in, out); axis([-30, 0, -5, 20]); grid on; xlabel('Input (dB)'); ylabel('Gain (dB)');
+// subplot(122); plot(in, in+out); axis([-30, 0, -30, 5]); grid on; xlabel('Input (dB)'); ylabel('Output (dB)');
+// zoom on;
+
+// Generator table for y=log2(1+e^x) in Q8.
+enum { kGenFuncTableSize = 128 };
+static const WebRtc_UWord16 kGenFuncTable[kGenFuncTableSize] = {
+          256,   485,   786,  1126,  1484,  1849,  2217,  2586,
+         2955,  3324,  3693,  4063,  4432,  4801,  5171,  5540,
+         5909,  6279,  6648,  7017,  7387,  7756,  8125,  8495,
+         8864,  9233,  9603,  9972, 10341, 10711, 11080, 11449,
+        11819, 12188, 12557, 12927, 13296, 13665, 14035, 14404,
+        14773, 15143, 15512, 15881, 16251, 16620, 16989, 17359,
+        17728, 18097, 18466, 18836, 19205, 19574, 19944, 20313,
+        20682, 21052, 21421, 21790, 22160, 22529, 22898, 23268,
+        23637, 24006, 24376, 24745, 25114, 25484, 25853, 26222,
+        26592, 26961, 27330, 27700, 28069, 28438, 28808, 29177,
+        29546, 29916, 30285, 30654, 31024, 31393, 31762, 32132,
+        32501, 32870, 33240, 33609, 33978, 34348, 34717, 35086,
+        35456, 35825, 36194, 36564, 36933, 37302, 37672, 38041,
+        38410, 38780, 39149, 39518, 39888, 40257, 40626, 40996,
+        41365, 41734, 42104, 42473, 42842, 43212, 43581, 43950,
+        44320, 44689, 45058, 45428, 45797, 46166, 46536, 46905
+};
+
+static const WebRtc_Word16 kAvgDecayTime = 250; // frames; < 3000
+
+WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
+                                           WebRtc_Word16 digCompGaindB, // Q0
+                                           WebRtc_Word16 targetLevelDbfs,// Q0
+                                           WebRtc_UWord8 limiterEnable,
+                                           WebRtc_Word16 analogTarget) // Q0
+{
+    // This function generates the compressor gain table used in the fixed digital part.
+    WebRtc_UWord32 tmpU32no1, tmpU32no2, absInLevel, logApprox;
+    WebRtc_Word32 inLevel, limiterLvl;
+    WebRtc_Word32 tmp32, tmp32no1, tmp32no2, numFIX, den, y32;
+    const WebRtc_UWord16 kLog10 = 54426; // log2(10)     in Q14
+    const WebRtc_UWord16 kLog10_2 = 49321; // 10*log10(2)  in Q14
+    const WebRtc_UWord16 kLogE_1 = 23637; // log2(e)      in Q14
+    WebRtc_UWord16 constMaxGain;
+    WebRtc_UWord16 tmpU16, intPart, fracPart;
+    const WebRtc_Word16 kCompRatio = 3;
+    const WebRtc_Word16 kSoftLimiterLeft = 1;
+    WebRtc_Word16 limiterOffset = 0; // Limiter offset
+    WebRtc_Word16 limiterIdx, limiterLvlX;
+    WebRtc_Word16 constLinApprox, zeroGainLvl, maxGain, diffGain;
+    WebRtc_Word16 i, tmp16, tmp16no1;
+    int zeros, zerosScale;
+
+    // Constants
+//    kLogE_1 = 23637; // log2(e)      in Q14
+//    kLog10 = 54426; // log2(10)     in Q14
+//    kLog10_2 = 49321; // 10*log10(2)  in Q14
+
+    // Calculate maximum digital gain and zero gain level
+    tmp32no1 = WEBRTC_SPL_MUL_16_16(digCompGaindB - analogTarget, kCompRatio - 1);
+    tmp16no1 = analogTarget - targetLevelDbfs;
+    tmp16no1 += WebRtcSpl_DivW32W16ResW16(tmp32no1 + (kCompRatio >> 1), kCompRatio);
+    maxGain = WEBRTC_SPL_MAX(tmp16no1, (analogTarget - targetLevelDbfs));
+    tmp32no1 = WEBRTC_SPL_MUL_16_16(maxGain, kCompRatio);
+    zeroGainLvl = digCompGaindB;
+    zeroGainLvl -= WebRtcSpl_DivW32W16ResW16(tmp32no1 + ((kCompRatio - 1) >> 1),
+                                             kCompRatio - 1);
+    if ((digCompGaindB <= analogTarget) && (limiterEnable))
+    {
+        zeroGainLvl += (analogTarget - digCompGaindB + kSoftLimiterLeft);
+        limiterOffset = 0;
+    }
+
+    // Calculate the difference between maximum gain and gain at 0dB0v:
+    //  diffGain = maxGain + (compRatio-1)*zeroGainLvl/compRatio
+    //           = (compRatio-1)*digCompGaindB/compRatio
+    tmp32no1 = WEBRTC_SPL_MUL_16_16(digCompGaindB, kCompRatio - 1);
+    diffGain = WebRtcSpl_DivW32W16ResW16(tmp32no1 + (kCompRatio >> 1), kCompRatio);
+    if (diffGain < 0 || diffGain >= kGenFuncTableSize)
+    {
+        assert(0);
+        return -1;
+    }
+
+    // Calculate the limiter level and index:
+    //  limiterLvlX = analogTarget - limiterOffset
+    //  limiterLvl  = targetLevelDbfs + limiterOffset/compRatio
+    limiterLvlX = analogTarget - limiterOffset;
+    limiterIdx = 2
+            + WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)limiterLvlX, 13),
+                                        WEBRTC_SPL_RSHIFT_U16(kLog10_2, 1));
+    tmp16no1 = WebRtcSpl_DivW32W16ResW16(limiterOffset + (kCompRatio >> 1), kCompRatio);
+    limiterLvl = targetLevelDbfs + tmp16no1;
+
+    // Calculate (through table lookup):
+    //  constMaxGain = log2(1+2^(log2(e)*diffGain)); (in Q8)
+    constMaxGain = kGenFuncTable[diffGain]; // in Q8
+
+    // Calculate a parameter used to approximate the fractional part of 2^x with a
+    // piecewise linear function in Q14:
+    //  constLinApprox = round(3/2*(4*(3-2*sqrt(2))/(log(2)^2)-0.5)*2^14);
+    constLinApprox = 22817; // in Q14
+
+    // Calculate a denominator used in the exponential part to convert from dB to linear scale:
+    //  den = 20*constMaxGain (in Q8)
+    den = WEBRTC_SPL_MUL_16_U16(20, constMaxGain); // in Q8
+
+    for (i = 0; i < 32; i++)
+    {
+        // Calculate scaled input level (compressor):
+        //  inLevel = fix((-constLog10_2*(compRatio-1)*(1-i)+fix(compRatio/2))/compRatio)
+        tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(kCompRatio - 1, i - 1); // Q0
+        tmp32 = WEBRTC_SPL_MUL_16_U16(tmp16, kLog10_2) + 1; // Q14
+        inLevel = WebRtcSpl_DivW32W16(tmp32, kCompRatio); // Q14
+
+        // Calculate diffGain-inLevel, to map using the genFuncTable
+        inLevel = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)diffGain, 14) - inLevel; // Q14
+
+        // Make calculations on abs(inLevel) and compensate for the sign afterwards.
+        absInLevel = (WebRtc_UWord32)WEBRTC_SPL_ABS_W32(inLevel); // Q14
+
+        // LUT with interpolation
+        intPart = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(absInLevel, 14);
+        fracPart = (WebRtc_UWord16)(absInLevel & 0x00003FFF); // extract the fractional part
+        tmpU16 = kGenFuncTable[intPart + 1] - kGenFuncTable[intPart]; // Q8
+        tmpU32no1 = WEBRTC_SPL_UMUL_16_16(tmpU16, fracPart); // Q22
+        tmpU32no1 += WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)kGenFuncTable[intPart], 14); // Q22
+        logApprox = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 8); // Q14
+        // Compensate for negative exponent using the relation:
+        //  log2(1 + 2^-x) = log2(1 + 2^x) - x
+        if (inLevel < 0)
+        {
+            zeros = WebRtcSpl_NormU32(absInLevel);
+            zerosScale = 0;
+            if (zeros < 15)
+            {
+                // Not enough space for multiplication
+                tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(absInLevel, 15 - zeros); // Q(zeros-1)
+                tmpU32no2 = WEBRTC_SPL_UMUL_32_16(tmpU32no2, kLogE_1); // Q(zeros+13)
+                if (zeros < 9)
+                {
+                    tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 9 - zeros); // Q(zeros+13)
+                    zerosScale = 9 - zeros;
+                } else
+                {
+                    tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, zeros - 9); // Q22
+                }
+            } else
+            {
+                tmpU32no2 = WEBRTC_SPL_UMUL_32_16(absInLevel, kLogE_1); // Q28
+                tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, 6); // Q22
+            }
+            logApprox = 0;
+            if (tmpU32no2 < tmpU32no1)
+            {
+                logApprox = WEBRTC_SPL_RSHIFT_U32(tmpU32no1 - tmpU32no2, 8 - zerosScale); //Q14
+            }
+        }
+        numFIX = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_U16(maxGain, constMaxGain), 6); // Q14
+        numFIX -= WEBRTC_SPL_MUL_32_16((WebRtc_Word32)logApprox, diffGain); // Q14
+
+        // Calculate ratio
+        // Shift |numFIX| as much as possible.
+        // Ensure we avoid wrap-around in |den| as well.
+        if (numFIX > (den >> 8))  // |den| is Q8.
+        {
+            zeros = WebRtcSpl_NormW32(numFIX);
+        } else
+        {
+            zeros = WebRtcSpl_NormW32(den) + 8;
+        }
+        numFIX = WEBRTC_SPL_LSHIFT_W32(numFIX, zeros); // Q(14+zeros)
+
+        // Shift den so we end up in Qy1
+        tmp32no1 = WEBRTC_SPL_SHIFT_W32(den, zeros - 8); // Q(zeros)
+        if (numFIX < 0)
+        {
+            numFIX -= WEBRTC_SPL_RSHIFT_W32(tmp32no1, 1);
+        } else
+        {
+            numFIX += WEBRTC_SPL_RSHIFT_W32(tmp32no1, 1);
+        }
+        y32 = WEBRTC_SPL_DIV(numFIX, tmp32no1); // in Q14
+        if (limiterEnable && (i < limiterIdx))
+        {
+            tmp32 = WEBRTC_SPL_MUL_16_U16(i - 1, kLog10_2); // Q14
+            tmp32 -= WEBRTC_SPL_LSHIFT_W32(limiterLvl, 14); // Q14
+            y32 = WebRtcSpl_DivW32W16(tmp32 + 10, 20);
+        }
+        if (y32 > 39000)
+        {
+            tmp32 = WEBRTC_SPL_MUL(y32 >> 1, kLog10) + 4096; // in Q27
+            tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 13); // in Q14
+        } else
+        {
+            tmp32 = WEBRTC_SPL_MUL(y32, kLog10) + 8192; // in Q28
+            tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 14); // in Q14
+        }
+        tmp32 += WEBRTC_SPL_LSHIFT_W32(16, 14); // in Q14 (Make sure final output is in Q16)
+
+        // Calculate power
+        if (tmp32 > 0)
+        {
+            intPart = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 14);
+            fracPart = (WebRtc_UWord16)(tmp32 & 0x00003FFF); // in Q14
+            if (WEBRTC_SPL_RSHIFT_W32(fracPart, 13))
+            {
+                tmp16 = WEBRTC_SPL_LSHIFT_W16(2, 14) - constLinApprox;
+                tmp32no2 = WEBRTC_SPL_LSHIFT_W32(1, 14) - fracPart;
+                tmp32no2 = WEBRTC_SPL_MUL_32_16(tmp32no2, tmp16);
+                tmp32no2 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, 13);
+                tmp32no2 = WEBRTC_SPL_LSHIFT_W32(1, 14) - tmp32no2;
+            } else
+            {
+                tmp16 = constLinApprox - WEBRTC_SPL_LSHIFT_W16(1, 14);
+                tmp32no2 = WEBRTC_SPL_MUL_32_16(fracPart, tmp16);
+                tmp32no2 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, 13);
+            }
+            fracPart = (WebRtc_UWord16)tmp32no2;
+            gainTable[i] = WEBRTC_SPL_LSHIFT_W32(1, intPart)
+                    + WEBRTC_SPL_SHIFT_W32(fracPart, intPart - 14);
+        } else
+        {
+            gainTable[i] = 0;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *stt, WebRtc_Word16 agcMode)
+{
+
+    if (agcMode == kAgcModeFixedDigital)
+    {
+        // start at minimum to find correct gain faster
+        stt->capacitorSlow = 0;
+    } else
+    {
+        // start out with 0 dB gain
+        stt->capacitorSlow = 134217728; // (WebRtc_Word32)(0.125f * 32768.0f * 32768.0f);
+    }
+    stt->capacitorFast = 0;
+    stt->gain = 65536;
+    stt->gatePrevious = 0;
+    stt->agcMode = agcMode;
+#ifdef AGC_DEBUG
+    stt->frameCounter = 0;
+#endif
+
+    // initialize VADs
+    WebRtcAgc_InitVad(&stt->vadNearend);
+    WebRtcAgc_InitVad(&stt->vadFarend);
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const WebRtc_Word16 *in_far,
+                                           WebRtc_Word16 nrSamples)
+{
+    // Check for valid pointer
+    if (&stt->vadFarend == NULL)
+    {
+        return -1;
+    }
+
+    // VAD for far end
+    WebRtcAgc_ProcessVad(&stt->vadFarend, in_far, nrSamples);
+
+    return 0;
+}
+
+WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *in_near,
+                                       const WebRtc_Word16 *in_near_H, WebRtc_Word16 *out,
+                                       WebRtc_Word16 *out_H, WebRtc_UWord32 FS,
+                                       WebRtc_Word16 lowlevelSignal)
+{
+    // array for gains (one value per ms, incl start & end)
+    WebRtc_Word32 gains[11];
+
+    WebRtc_Word32 out_tmp, tmp32;
+    WebRtc_Word32 env[10];
+    WebRtc_Word32 nrg, max_nrg;
+    WebRtc_Word32 cur_level;
+    WebRtc_Word32 gain32, delta;
+    WebRtc_Word16 logratio;
+    WebRtc_Word16 lower_thr, upper_thr;
+    WebRtc_Word16 zeros, zeros_fast, frac;
+    WebRtc_Word16 decay;
+    WebRtc_Word16 gate, gain_adj;
+    WebRtc_Word16 k, n;
+    WebRtc_Word16 L, L2; // samples/subframe
+
+    // determine number of samples per ms
+    if (FS == 8000)
+    {
+        L = 8;
+        L2 = 3;
+    } else if (FS == 16000)
+    {
+        L = 16;
+        L2 = 4;
+    } else if (FS == 32000)
+    {
+        L = 16;
+        L2 = 4;
+    } else
+    {
+        return -1;
+    }
+
+    // TODO(andrew): again, we don't need input and output pointers...
+    if (in_near != out)
+    {
+        // Only needed if they don't already point to the same place.
+        memcpy(out, in_near, 10 * L * sizeof(WebRtc_Word16));
+    }
+    if (FS == 32000)
+    {
+        if (in_near_H != out_H)
+        {
+            memcpy(out_H, in_near_H, 10 * L * sizeof(WebRtc_Word16));
+        }
+    }
+    // VAD for near end
+    logratio = WebRtcAgc_ProcessVad(&stt->vadNearend, out, L * 10);
+
+    // Account for far end VAD
+    if (stt->vadFarend.counter > 10)
+    {
+        tmp32 = WEBRTC_SPL_MUL_16_16(3, logratio);
+        logratio = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 - stt->vadFarend.logRatio, 2);
+    }
+
+    // Determine decay factor depending on VAD
+    //  upper_thr = 1.0f;
+    //  lower_thr = 0.25f;
+    upper_thr = 1024; // Q10
+    lower_thr = 0; // Q10
+    if (logratio > upper_thr)
+    {
+        // decay = -2^17 / DecayTime;  ->  -65
+        decay = -65;
+    } else if (logratio < lower_thr)
+    {
+        decay = 0;
+    } else
+    {
+        // decay = (WebRtc_Word16)(((lower_thr - logratio)
+        //       * (2^27/(DecayTime*(upper_thr-lower_thr)))) >> 10);
+        // SUBSTITUTED: 2^27/(DecayTime*(upper_thr-lower_thr))  ->  65
+        tmp32 = WEBRTC_SPL_MUL_16_16((lower_thr - logratio), 65);
+        decay = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 10);
+    }
+
+    // adjust decay factor for long silence (detected as low standard deviation)
+    // This is only done in the adaptive modes
+    if (stt->agcMode != kAgcModeFixedDigital)
+    {
+        if (stt->vadNearend.stdLongTerm < 4000)
+        {
+            decay = 0;
+        } else if (stt->vadNearend.stdLongTerm < 8096)
+        {
+            // decay = (WebRtc_Word16)(((stt->vadNearend.stdLongTerm - 4000) * decay) >> 12);
+            tmp32 = WEBRTC_SPL_MUL_16_16((stt->vadNearend.stdLongTerm - 4000), decay);
+            decay = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
+        }
+
+        if (lowlevelSignal != 0)
+        {
+            decay = 0;
+        }
+    }
+#ifdef AGC_DEBUG
+    stt->frameCounter++;
+    fprintf(stt->logFile, "%5.2f\t%d\t%d\t%d\t", (float)(stt->frameCounter) / 100, logratio, decay, stt->vadNearend.stdLongTerm);
+#endif
+    // Find max amplitude per sub frame
+    // iterate over sub frames
+    for (k = 0; k < 10; k++)
+    {
+        // iterate over samples
+        max_nrg = 0;
+        for (n = 0; n < L; n++)
+        {
+            nrg = WEBRTC_SPL_MUL_16_16(out[k * L + n], out[k * L + n]);
+            if (nrg > max_nrg)
+            {
+                max_nrg = nrg;
+            }
+        }
+        env[k] = max_nrg;
+    }
+
+    // Calculate gain per sub frame
+    gains[0] = stt->gain;
+    for (k = 0; k < 10; k++)
+    {
+        // Fast envelope follower
+        //  decay time = -131000 / -1000 = 131 (ms)
+        stt->capacitorFast = AGC_SCALEDIFF32(-1000, stt->capacitorFast, stt->capacitorFast);
+        if (env[k] > stt->capacitorFast)
+        {
+            stt->capacitorFast = env[k];
+        }
+        // Slow envelope follower
+        if (env[k] > stt->capacitorSlow)
+        {
+            // increase capacitorSlow
+            stt->capacitorSlow
+                    = AGC_SCALEDIFF32(500, (env[k] - stt->capacitorSlow), stt->capacitorSlow);
+        } else
+        {
+            // decrease capacitorSlow
+            stt->capacitorSlow
+                    = AGC_SCALEDIFF32(decay, stt->capacitorSlow, stt->capacitorSlow);
+        }
+
+        // use maximum of both capacitors as current level
+        if (stt->capacitorFast > stt->capacitorSlow)
+        {
+            cur_level = stt->capacitorFast;
+        } else
+        {
+            cur_level = stt->capacitorSlow;
+        }
+        // Translate signal level into gain, using a piecewise linear approximation
+        // find number of leading zeros
+        zeros = WebRtcSpl_NormU32((WebRtc_UWord32)cur_level);
+        if (cur_level == 0)
+        {
+            zeros = 31;
+        }
+        tmp32 = (WEBRTC_SPL_LSHIFT_W32(cur_level, zeros) & 0x7FFFFFFF);
+        frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 19); // Q12
+        tmp32 = WEBRTC_SPL_MUL((stt->gainTable[zeros-1] - stt->gainTable[zeros]), frac);
+        gains[k + 1] = stt->gainTable[zeros] + WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
+#ifdef AGC_DEBUG
+        if (k == 0)
+        {
+            fprintf(stt->logFile, "%d\t%d\t%d\t%d\t%d\n", env[0], cur_level, stt->capacitorFast, stt->capacitorSlow, zeros);
+        }
+#endif
+    }
+
+    // Gate processing (lower gain during absence of speech)
+    zeros = WEBRTC_SPL_LSHIFT_W16(zeros, 9) - WEBRTC_SPL_RSHIFT_W16(frac, 3);
+    // find number of leading zeros
+    zeros_fast = WebRtcSpl_NormU32((WebRtc_UWord32)stt->capacitorFast);
+    if (stt->capacitorFast == 0)
+    {
+        zeros_fast = 31;
+    }
+    tmp32 = (WEBRTC_SPL_LSHIFT_W32(stt->capacitorFast, zeros_fast) & 0x7FFFFFFF);
+    zeros_fast = WEBRTC_SPL_LSHIFT_W16(zeros_fast, 9);
+    zeros_fast -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 22);
+
+    gate = 1000 + zeros_fast - zeros - stt->vadNearend.stdShortTerm;
+
+    if (gate < 0)
+    {
+        stt->gatePrevious = 0;
+    } else
+    {
+        tmp32 = WEBRTC_SPL_MUL_16_16(stt->gatePrevious, 7);
+        gate = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)gate + tmp32, 3);
+        stt->gatePrevious = gate;
+    }
+    // gate < 0     -> no gate
+    // gate > 2500  -> max gate
+    if (gate > 0)
+    {
+        if (gate < 2500)
+        {
+            gain_adj = WEBRTC_SPL_RSHIFT_W16(2500 - gate, 5);
+        } else
+        {
+            gain_adj = 0;
+        }
+        for (k = 0; k < 10; k++)
+        {
+            if ((gains[k + 1] - stt->gainTable[0]) > 8388608)
+            {
+                // To prevent wraparound
+                tmp32 = WEBRTC_SPL_RSHIFT_W32((gains[k+1] - stt->gainTable[0]), 8);
+                tmp32 = WEBRTC_SPL_MUL(tmp32, (178 + gain_adj));
+            } else
+            {
+                tmp32 = WEBRTC_SPL_MUL((gains[k+1] - stt->gainTable[0]), (178 + gain_adj));
+                tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 8);
+            }
+            gains[k + 1] = stt->gainTable[0] + tmp32;
+        }
+    }
+
+    // Limit gain to avoid overload distortion
+    for (k = 0; k < 10; k++)
+    {
+        // To prevent wrap around
+        zeros = 10;
+        if (gains[k + 1] > 47453132)
+        {
+            zeros = 16 - WebRtcSpl_NormW32(gains[k + 1]);
+        }
+        gain32 = WEBRTC_SPL_RSHIFT_W32(gains[k+1], zeros) + 1;
+        gain32 = WEBRTC_SPL_MUL(gain32, gain32);
+        // check for overflow
+        while (AGC_MUL32(WEBRTC_SPL_RSHIFT_W32(env[k], 12) + 1, gain32)
+                > WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)32767, 2 * (1 - zeros + 10)))
+        {
+            // multiply by 253/256 ==> -0.1 dB
+            if (gains[k + 1] > 8388607)
+            {
+                // Prevent wrap around
+                gains[k + 1] = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(gains[k+1], 8), 253);
+            } else
+            {
+                gains[k + 1] = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(gains[k+1], 253), 8);
+            }
+            gain32 = WEBRTC_SPL_RSHIFT_W32(gains[k+1], zeros) + 1;
+            gain32 = WEBRTC_SPL_MUL(gain32, gain32);
+        }
+    }
+    // gain reductions should be done 1 ms earlier than gain increases
+    for (k = 1; k < 10; k++)
+    {
+        if (gains[k] > gains[k + 1])
+        {
+            gains[k] = gains[k + 1];
+        }
+    }
+    // save start gain for next frame
+    stt->gain = gains[10];
+
+    // Apply gain
+    // handle first sub frame separately
+    delta = WEBRTC_SPL_LSHIFT_W32(gains[1] - gains[0], (4 - L2));
+    gain32 = WEBRTC_SPL_LSHIFT_W32(gains[0], 4);
+    // iterate over samples
+    for (n = 0; n < L; n++)
+    {
+        // For lower band
+        tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[n], WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7));
+        out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+        if (out_tmp > 4095)
+        {
+            out[n] = (WebRtc_Word16)32767;
+        } else if (out_tmp < -4096)
+        {
+            out[n] = (WebRtc_Word16)-32768;
+        } else
+        {
+            tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[n], WEBRTC_SPL_RSHIFT_W32(gain32, 4));
+            out[n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+        }
+        // For higher band
+        if (FS == 32000)
+        {
+            tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[n],
+                                   WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7));
+            out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+            if (out_tmp > 4095)
+            {
+                out_H[n] = (WebRtc_Word16)32767;
+            } else if (out_tmp < -4096)
+            {
+                out_H[n] = (WebRtc_Word16)-32768;
+            } else
+            {
+                tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[n],
+                                       WEBRTC_SPL_RSHIFT_W32(gain32, 4));
+                out_H[n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+            }
+        }
+        //
+
+        gain32 += delta;
+    }
+    // iterate over subframes
+    for (k = 1; k < 10; k++)
+    {
+        delta = WEBRTC_SPL_LSHIFT_W32(gains[k+1] - gains[k], (4 - L2));
+        gain32 = WEBRTC_SPL_LSHIFT_W32(gains[k], 4);
+        // iterate over samples
+        for (n = 0; n < L; n++)
+        {
+            // For lower band
+            tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[k * L + n],
+                                   WEBRTC_SPL_RSHIFT_W32(gain32, 4));
+            out[k * L + n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+            // For higher band
+            if (FS == 32000)
+            {
+                tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[k * L + n],
+                                       WEBRTC_SPL_RSHIFT_W32(gain32, 4));
+                out_H[k * L + n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
+            }
+            gain32 += delta;
+        }
+    }
+
+    return 0;
+}
+
+void WebRtcAgc_InitVad(AgcVad_t *state)
+{
+    WebRtc_Word16 k;
+
+    state->HPstate = 0; // state of high pass filter
+    state->logRatio = 0; // log( P(active) / P(inactive) )
+    // average input level (Q10)
+    state->meanLongTerm = WEBRTC_SPL_LSHIFT_W16(15, 10);
+
+    // variance of input level (Q8)
+    state->varianceLongTerm = WEBRTC_SPL_LSHIFT_W32(500, 8);
+
+    state->stdLongTerm = 0; // standard deviation of input level in dB
+    // short-term average input level (Q10)
+    state->meanShortTerm = WEBRTC_SPL_LSHIFT_W16(15, 10);
+
+    // short-term variance of input level (Q8)
+    state->varianceShortTerm = WEBRTC_SPL_LSHIFT_W32(500, 8);
+
+    state->stdShortTerm = 0; // short-term standard deviation of input level in dB
+    state->counter = 3; // counts updates
+    for (k = 0; k < 8; k++)
+    {
+        // downsampling filter
+        state->downState[k] = 0;
+    }
+}
+
+WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
+                                   const WebRtc_Word16 *in, // (i) Speech signal
+                                   WebRtc_Word16 nrSamples) // (i) number of samples
+{
+    WebRtc_Word32 out, nrg, tmp32, tmp32b;
+    WebRtc_UWord16 tmpU16;
+    WebRtc_Word16 k, subfr, tmp16;
+    WebRtc_Word16 buf1[8];
+    WebRtc_Word16 buf2[4];
+    WebRtc_Word16 HPstate;
+    WebRtc_Word16 zeros, dB;
+
+    // process in 10 sub frames of 1 ms (to save on memory)
+    nrg = 0;
+    HPstate = state->HPstate;
+    for (subfr = 0; subfr < 10; subfr++)
+    {
+        // downsample to 4 kHz
+        if (nrSamples == 160)
+        {
+            for (k = 0; k < 8; k++)
+            {
+                tmp32 = (WebRtc_Word32)in[2 * k] + (WebRtc_Word32)in[2 * k + 1];
+                tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
+                buf1[k] = (WebRtc_Word16)tmp32;
+            }
+            in += 16;
+
+            WebRtcSpl_DownsampleBy2(buf1, 8, buf2, state->downState);
+        } else
+        {
+            WebRtcSpl_DownsampleBy2(in, 8, buf2, state->downState);
+            in += 8;
+        }
+
+        // high pass filter and compute energy
+        for (k = 0; k < 4; k++)
+        {
+            out = buf2[k] + HPstate;
+            tmp32 = WEBRTC_SPL_MUL(600, out);
+            HPstate = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(tmp32, 10) - buf2[k]);
+            tmp32 = WEBRTC_SPL_MUL(out, out);
+            nrg += WEBRTC_SPL_RSHIFT_W32(tmp32, 6);
+        }
+    }
+    state->HPstate = HPstate;
+
+    // find number of leading zeros
+    if (!(0xFFFF0000 & nrg))
+    {
+        zeros = 16;
+    } else
+    {
+        zeros = 0;
+    }
+    if (!(0xFF000000 & (nrg << zeros)))
+    {
+        zeros += 8;
+    }
+    if (!(0xF0000000 & (nrg << zeros)))
+    {
+        zeros += 4;
+    }
+    if (!(0xC0000000 & (nrg << zeros)))
+    {
+        zeros += 2;
+    }
+    if (!(0x80000000 & (nrg << zeros)))
+    {
+        zeros += 1;
+    }
+
+    // energy level (range {-32..30}) (Q10)
+    dB = WEBRTC_SPL_LSHIFT_W16(15 - zeros, 11);
+
+    // Update statistics
+
+    if (state->counter < kAvgDecayTime)
+    {
+        // decay time = AvgDecTime * 10 ms
+        state->counter++;
+    }
+
+    // update short-term estimate of mean energy level (Q10)
+    tmp32 = (WEBRTC_SPL_MUL_16_16(state->meanShortTerm, 15) + (WebRtc_Word32)dB);
+    state->meanShortTerm = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
+
+    // update short-term estimate of variance in energy level (Q8)
+    tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(dB, dB), 12);
+    tmp32 += WEBRTC_SPL_MUL(state->varianceShortTerm, 15);
+    state->varianceShortTerm = WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
+
+    // update short-term estimate of standard deviation in energy level (Q10)
+    tmp32 = WEBRTC_SPL_MUL_16_16(state->meanShortTerm, state->meanShortTerm);
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceShortTerm, 12) - tmp32;
+    state->stdShortTerm = (WebRtc_Word16)WebRtcSpl_Sqrt(tmp32);
+
+    // update long-term estimate of mean energy level (Q10)
+    tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->counter) + (WebRtc_Word32)dB;
+    state->meanLongTerm = WebRtcSpl_DivW32W16ResW16(tmp32,
+                                                    WEBRTC_SPL_ADD_SAT_W16(state->counter, 1));
+
+    // update long-term estimate of variance in energy level (Q8)
+    tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(dB, dB), 12);
+    tmp32 += WEBRTC_SPL_MUL(state->varianceLongTerm, state->counter);
+    state->varianceLongTerm = WebRtcSpl_DivW32W16(tmp32,
+                                                  WEBRTC_SPL_ADD_SAT_W16(state->counter, 1));
+
+    // update long-term estimate of standard deviation in energy level (Q10)
+    tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->meanLongTerm);
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceLongTerm, 12) - tmp32;
+    state->stdLongTerm = (WebRtc_Word16)WebRtcSpl_Sqrt(tmp32);
+
+    // update voice activity measure (Q10)
+    tmp16 = WEBRTC_SPL_LSHIFT_W16(3, 12);
+    tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, (dB - state->meanLongTerm));
+    tmp32 = WebRtcSpl_DivW32W16(tmp32, state->stdLongTerm);
+    tmpU16 = WEBRTC_SPL_LSHIFT_U16((WebRtc_UWord16)13, 12);
+    tmp32b = WEBRTC_SPL_MUL_16_U16(state->logRatio, tmpU16);
+    tmp32 += WEBRTC_SPL_RSHIFT_W32(tmp32b, 10);
+
+    state->logRatio = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 6);
+
+    // limit
+    if (state->logRatio > 2048)
+    {
+        state->logRatio = 2048;
+    }
+    if (state->logRatio < -2048)
+    {
+        state->logRatio = -2048;
+    }
+
+    return state->logRatio; // Q10
+}
diff --git a/trunk/src/modules/audio_processing/agc/digital_agc.h b/trunk/src/modules/audio_processing/agc/digital_agc.h
new file mode 100644
index 0000000..240b220
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/digital_agc.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_DIGITAL_AGC_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_DIGITAL_AGC_H_
+
+#ifdef AGC_DEBUG
+#include <stdio.h>
+#endif
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+// the 32 most significant bits of A(19) * B(26) >> 13
+#define AGC_MUL32(A, B)             (((B)>>13)*(A) + ( ((0x00001FFF & (B))*(A)) >> 13 ))
+// C + the 32 most significant bits of A * B
+#define AGC_SCALEDIFF32(A, B, C)    ((C) + ((B)>>16)*(A) + ( ((0x0000FFFF & (B))*(A)) >> 16 ))
+
+typedef struct
+{
+    WebRtc_Word32 downState[8];
+    WebRtc_Word16 HPstate;
+    WebRtc_Word16 counter;
+    WebRtc_Word16 logRatio; // log( P(active) / P(inactive) ) (Q10)
+    WebRtc_Word16 meanLongTerm; // Q10
+    WebRtc_Word32 varianceLongTerm; // Q8
+    WebRtc_Word16 stdLongTerm; // Q10
+    WebRtc_Word16 meanShortTerm; // Q10
+    WebRtc_Word32 varianceShortTerm; // Q8
+    WebRtc_Word16 stdShortTerm; // Q10
+} AgcVad_t; // total = 54 bytes
+
+typedef struct
+{
+    WebRtc_Word32 capacitorSlow;
+    WebRtc_Word32 capacitorFast;
+    WebRtc_Word32 gain;
+    WebRtc_Word32 gainTable[32];
+    WebRtc_Word16 gatePrevious;
+    WebRtc_Word16 agcMode;
+    AgcVad_t      vadNearend;
+    AgcVad_t      vadFarend;
+#ifdef AGC_DEBUG
+    FILE*         logFile;
+    int           frameCounter;
+#endif
+} DigitalAgc_t;
+
+WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *digitalAgcInst, WebRtc_Word16 agcMode);
+
+WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *digitalAgcInst, const WebRtc_Word16 *inNear,
+                             const WebRtc_Word16 *inNear_H, WebRtc_Word16 *out,
+                             WebRtc_Word16 *out_H, WebRtc_UWord32 FS,
+                             WebRtc_Word16 lowLevelSignal);
+
+WebRtc_Word32 WebRtcAgc_AddFarendToDigital(DigitalAgc_t *digitalAgcInst, const WebRtc_Word16 *inFar,
+                                 WebRtc_Word16 nrSamples);
+
+void WebRtcAgc_InitVad(AgcVad_t *vadInst);
+
+WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *vadInst, // (i) VAD state
+                            const WebRtc_Word16 *in, // (i) Speech signal
+                            WebRtc_Word16 nrSamples); // (i) number of samples
+
+WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
+                                 WebRtc_Word16 compressionGaindB, // Q0 (in dB)
+                                 WebRtc_Word16 targetLevelDbfs,// Q0 (in dB)
+                                 WebRtc_UWord8 limiterEnable, WebRtc_Word16 analogTarget);
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_
diff --git a/trunk/src/modules/audio_processing/agc/include/gain_control.h b/trunk/src/modules/audio_processing/agc/include/gain_control.h
new file mode 100644
index 0000000..8af5c71
--- /dev/null
+++ b/trunk/src/modules/audio_processing/agc/include/gain_control.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
+
+#include "typedefs.h"
+
+// Errors
+#define AGC_UNSPECIFIED_ERROR           18000
+#define AGC_UNSUPPORTED_FUNCTION_ERROR  18001
+#define AGC_UNINITIALIZED_ERROR         18002
+#define AGC_NULL_POINTER_ERROR          18003
+#define AGC_BAD_PARAMETER_ERROR         18004
+
+// Warnings
+#define AGC_BAD_PARAMETER_WARNING       18050
+
+enum
+{
+    kAgcModeUnchanged,
+    kAgcModeAdaptiveAnalog,
+    kAgcModeAdaptiveDigital,
+    kAgcModeFixedDigital
+};
+
+enum
+{
+    kAgcFalse = 0,
+    kAgcTrue
+};
+
+typedef struct
+{
+    WebRtc_Word16 targetLevelDbfs;   // default 3 (-3 dBOv)
+    WebRtc_Word16 compressionGaindB; // default 9 dB
+    WebRtc_UWord8 limiterEnable;     // default kAgcTrue (on)
+} WebRtcAgc_config_t;
+
+#if defined(__cplusplus)
+extern "C"
+{
+#endif
+
+/*
+ * This function processes a 10/20ms frame of far-end speech to determine
+ * if there is active speech. Far-end speech length can be either 10ms or
+ * 20ms. The length of the input speech vector must be given in samples
+ * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000).
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inFar             : Far-end input speech vector (10 or 20ms)
+ *      - samples           : Number of samples in input vector
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_AddFarend(void* agcInst,
+                        const WebRtc_Word16* inFar,
+                        WebRtc_Word16 samples);
+
+/*
+ * This function processes a 10/20ms frame of microphone speech to determine
+ * if there is active speech. Microphone speech length can be either 10ms or
+ * 20ms. The length of the input speech vector must be given in samples
+ * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000). For very low
+ * input levels, the input signal is increased in level by multiplying and
+ * overwriting the samples in inMic[].
+ *
+ * This function should be called before any further processing of the
+ * near-end microphone signal.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inMic             : Microphone input speech vector (10 or 20 ms) for
+ *                            L band
+ *      - inMic_H           : Microphone input speech vector (10 or 20 ms) for
+ *                            H band
+ *      - samples           : Number of samples in input vector
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_AddMic(void* agcInst,
+                     WebRtc_Word16* inMic,
+                     WebRtc_Word16* inMic_H,
+                     WebRtc_Word16 samples);
+
+/*
+ * This function replaces the analog microphone with a virtual one.
+ * It is a digital gain applied to the input signal and is used in the
+ * agcAdaptiveDigital mode where no microphone level is adjustable.
+ * Microphone speech length can be either 10ms or 20ms. The length of the
+ * input speech vector must be given in samples (80/160 when FS=8000, and
+ * 160/320 when FS=16000 or FS=32000).
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inMic             : Microphone input speech vector for (10 or 20 ms)
+ *                            L band
+ *      - inMic_H           : Microphone input speech vector for (10 or 20 ms)
+ *                            H band
+ *      - samples           : Number of samples in input vector
+ *      - micLevelIn        : Input level of microphone (static)
+ *
+ * Output:
+ *      - inMic             : Microphone output after processing (L band)
+ *      - inMic_H           : Microphone output after processing (H band)
+ *      - micLevelOut       : Adjusted microphone level after processing
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_VirtualMic(void* agcInst,
+                         WebRtc_Word16* inMic,
+                         WebRtc_Word16* inMic_H,
+                         WebRtc_Word16 samples,
+                         WebRtc_Word32 micLevelIn,
+                         WebRtc_Word32* micLevelOut);
+
+/*
+ * This function processes a 10/20ms frame and adjusts (normalizes) the gain
+ * both analog and digitally. The gain adjustments are done only during
+ * active periods of speech. The input speech length can be either 10ms or
+ * 20ms and the output is of the same length. The length of the speech
+ * vectors must be given in samples (80/160 when FS=8000, and 160/320 when
+ * FS=16000 or FS=32000). The echo parameter can be used to ensure the AGC will
+ * not adjust upward in the presence of echo.
+ *
+ * This function should be called after processing the near-end microphone
+ * signal, in any case after any echo cancellation.
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *      - inNear            : Near-end input speech vector (10 or 20 ms) for
+ *                            L band
+ *      - inNear_H          : Near-end input speech vector (10 or 20 ms) for
+ *                            H band
+ *      - samples           : Number of samples in input/output vector
+ *      - inMicLevel        : Current microphone volume level
+ *      - echo              : Set to 0 if the signal passed to add_mic is
+ *                            almost certainly free of echo; otherwise set
+ *                            to 1. If you have no information regarding echo
+ *                            set to 0.
+ *
+ * Output:
+ *      - outMicLevel       : Adjusted microphone volume level
+ *      - out               : Gain-adjusted near-end speech vector (L band)
+ *                          : May be the same vector as the input.
+ *      - out_H             : Gain-adjusted near-end speech vector (H band)
+ *      - saturationWarning : A returned value of 1 indicates a saturation event
+ *                            has occurred and the volume cannot be further
+ *                            reduced. Otherwise will be set to 0.
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_Process(void* agcInst,
+                      const WebRtc_Word16* inNear,
+                      const WebRtc_Word16* inNear_H,
+                      WebRtc_Word16 samples,
+                      WebRtc_Word16* out,
+                      WebRtc_Word16* out_H,
+                      WebRtc_Word32 inMicLevel,
+                      WebRtc_Word32* outMicLevel,
+                      WebRtc_Word16 echo,
+                      WebRtc_UWord8* saturationWarning);
+
+/*
+ * This function sets the config parameters (targetLevelDbfs,
+ * compressionGaindB and limiterEnable).
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *      - config            : config struct
+ *
+ * Output:
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_set_config(void* agcInst, WebRtcAgc_config_t config);
+
+/*
+ * This function returns the config parameters (targetLevelDbfs,
+ * compressionGaindB and limiterEnable).
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *
+ * Output:
+ *      - config            : config struct
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_get_config(void* agcInst, WebRtcAgc_config_t* config);
+
+/*
+ * This function creates an AGC instance, which will contain the state
+ * information for one (duplex) channel.
+ *
+ * Return value             : AGC instance if successful
+ *                          : 0 (i.e., a NULL pointer) if unsuccessful
+ */
+int WebRtcAgc_Create(void **agcInst);
+
+/*
+ * This function frees the AGC instance created at the beginning.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcAgc_Free(void *agcInst);
+
+/*
+ * This function initializes an AGC instance.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - minLevel          : Minimum possible mic level
+ *      - maxLevel          : Maximum possible mic level
+ *      - agcMode           : 0 - Unchanged
+ *                          : 1 - Adaptive Analog Automatic Gain Control -3dBOv
+ *                          : 2 - Adaptive Digital Automatic Gain Control -3dBOv
+ *                          : 3 - Fixed Digital Gain 0dB
+ *      - fs                : Sampling frequency
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcAgc_Init(void *agcInst,
+                   WebRtc_Word32 minLevel,
+                   WebRtc_Word32 maxLevel,
+                   WebRtc_Word16 agcMode,
+                   WebRtc_UWord32 fs);
+
+#if defined(__cplusplus)
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
diff --git a/trunk/src/modules/audio_processing/apm_tests.gypi b/trunk/src/modules/audio_processing/apm_tests.gypi
new file mode 100644
index 0000000..c00a6d2
--- /dev/null
+++ b/trunk/src/modules/audio_processing/apm_tests.gypi
@@ -0,0 +1,75 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audioproc_unittest',
+      'type': 'executable',
+      'conditions': [
+        ['prefer_fixed_point==1', {
+          'defines': [ 'WEBRTC_AUDIOPROC_FIXED_PROFILE' ],
+        }, {
+          'defines': [ 'WEBRTC_AUDIOPROC_FLOAT_PROFILE' ],
+        }],
+        ['enable_protobuf==1', {
+          'defines': [ 'WEBRTC_AUDIOPROC_DEBUG_DUMP' ],
+        }],
+      ],
+      'dependencies': [
+        'audio_processing',
+        'audioproc_unittest_proto',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/../test/test.gyp:test_support',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+      ],
+      'sources': [ 'test/unit_test.cc', ],
+    },
+    {
+      'target_name': 'audioproc_unittest_proto',
+      'type': 'static_library',
+      'sources': [ 'test/unittest.proto', ],
+      'variables': {
+        'proto_in_dir': 'test',
+        # Workaround to protect against gyp's pathname relativization when this
+        # file is included by modules.gyp.
+        'proto_out_protected': 'webrtc/audio_processing',
+        'proto_out_dir': '<(proto_out_protected)',
+      },
+      'includes': [ '../../build/protoc.gypi', ],
+    },
+  ],
+  'conditions': [
+    ['enable_protobuf==1', {
+      'targets': [
+        {
+          'target_name': 'audioproc',
+          'type': 'executable',
+          'dependencies': [
+            'audio_processing',
+            'audioproc_debug_proto',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'sources': [ 'test/process_test.cc', ],
+        },
+        {
+          'target_name': 'unpack_aecdump',
+          'type': 'executable',
+          'dependencies': [
+            'audioproc_debug_proto',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../third_party/google-gflags/google-gflags.gyp:google-gflags',
+          ],
+          'sources': [ 'test/unpack.cc', ],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/audio_buffer.cc b/trunk/src/modules/audio_processing/audio_buffer.cc
new file mode 100644
index 0000000..a7fb04d
--- /dev/null
+++ b/trunk/src/modules/audio_processing/audio_buffer.cc
@@ -0,0 +1,306 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_buffer.h"
+
+#include "signal_processing_library.h"
+
+namespace webrtc {
+namespace {
+
+enum {
+  kSamplesPer8kHzChannel = 80,
+  kSamplesPer16kHzChannel = 160,
+  kSamplesPer32kHzChannel = 320
+};
+
+void StereoToMono(const int16_t* left, const int16_t* right,
+                  int16_t* out, int samples_per_channel) {
+  assert(left != NULL && right != NULL && out != NULL);
+  for (int i = 0; i < samples_per_channel; i++) {
+    int32_t data32 = (static_cast<int32_t>(left[i]) +
+                      static_cast<int32_t>(right[i])) >> 1;
+
+    out[i] = WebRtcSpl_SatW32ToW16(data32);
+  }
+}
+}  // namespace
+
+struct AudioChannel {
+  AudioChannel() {
+    memset(data, 0, sizeof(data));
+  }
+
+  int16_t data[kSamplesPer32kHzChannel];
+};
+
+struct SplitAudioChannel {
+  SplitAudioChannel() {
+    memset(low_pass_data, 0, sizeof(low_pass_data));
+    memset(high_pass_data, 0, sizeof(high_pass_data));
+    memset(analysis_filter_state1, 0, sizeof(analysis_filter_state1));
+    memset(analysis_filter_state2, 0, sizeof(analysis_filter_state2));
+    memset(synthesis_filter_state1, 0, sizeof(synthesis_filter_state1));
+    memset(synthesis_filter_state2, 0, sizeof(synthesis_filter_state2));
+  }
+
+  int16_t low_pass_data[kSamplesPer16kHzChannel];
+  int16_t high_pass_data[kSamplesPer16kHzChannel];
+
+  WebRtc_Word32 analysis_filter_state1[6];
+  WebRtc_Word32 analysis_filter_state2[6];
+  WebRtc_Word32 synthesis_filter_state1[6];
+  WebRtc_Word32 synthesis_filter_state2[6];
+};
+
+// TODO(andrew): check range of input parameters?
+AudioBuffer::AudioBuffer(int max_num_channels,
+                         int samples_per_channel)
+  : max_num_channels_(max_num_channels),
+    num_channels_(0),
+    num_mixed_channels_(0),
+    num_mixed_low_pass_channels_(0),
+    data_was_mixed_(false),
+    samples_per_channel_(samples_per_channel),
+    samples_per_split_channel_(samples_per_channel),
+    reference_copied_(false),
+    activity_(AudioFrame::kVadUnknown),
+    is_muted_(false),
+    data_(NULL),
+    channels_(NULL),
+    split_channels_(NULL),
+    mixed_channels_(NULL),
+    mixed_low_pass_channels_(NULL),
+    low_pass_reference_channels_(NULL) {
+  if (max_num_channels_ > 1) {
+    channels_.reset(new AudioChannel[max_num_channels_]);
+    mixed_channels_.reset(new AudioChannel[max_num_channels_]);
+    mixed_low_pass_channels_.reset(new AudioChannel[max_num_channels_]);
+  }
+  low_pass_reference_channels_.reset(new AudioChannel[max_num_channels_]);
+
+  if (samples_per_channel_ == kSamplesPer32kHzChannel) {
+    split_channels_.reset(new SplitAudioChannel[max_num_channels_]);
+    samples_per_split_channel_ = kSamplesPer16kHzChannel;
+  }
+}
+
+AudioBuffer::~AudioBuffer() {}
+
+int16_t* AudioBuffer::data(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  if (data_ != NULL) {
+    return data_;
+  }
+
+  return channels_[channel].data;
+}
+
+int16_t* AudioBuffer::low_pass_split_data(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  if (split_channels_.get() == NULL) {
+    return data(channel);
+  }
+
+  return split_channels_[channel].low_pass_data;
+}
+
+int16_t* AudioBuffer::high_pass_split_data(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  if (split_channels_.get() == NULL) {
+    return NULL;
+  }
+
+  return split_channels_[channel].high_pass_data;
+}
+
+int16_t* AudioBuffer::mixed_data(int channel) const {
+  assert(channel >= 0 && channel < num_mixed_channels_);
+
+  return mixed_channels_[channel].data;
+}
+
+int16_t* AudioBuffer::mixed_low_pass_data(int channel) const {
+  assert(channel >= 0 && channel < num_mixed_low_pass_channels_);
+
+  return mixed_low_pass_channels_[channel].data;
+}
+
+int16_t* AudioBuffer::low_pass_reference(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  if (!reference_copied_) {
+    return NULL;
+  }
+
+  return low_pass_reference_channels_[channel].data;
+}
+
+WebRtc_Word32* AudioBuffer::analysis_filter_state1(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  return split_channels_[channel].analysis_filter_state1;
+}
+
+WebRtc_Word32* AudioBuffer::analysis_filter_state2(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  return split_channels_[channel].analysis_filter_state2;
+}
+
+WebRtc_Word32* AudioBuffer::synthesis_filter_state1(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  return split_channels_[channel].synthesis_filter_state1;
+}
+
+WebRtc_Word32* AudioBuffer::synthesis_filter_state2(int channel) const {
+  assert(channel >= 0 && channel < num_channels_);
+  return split_channels_[channel].synthesis_filter_state2;
+}
+
+void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
+  activity_ = activity;
+}
+
+AudioFrame::VADActivity AudioBuffer::activity() const {
+  return activity_;
+}
+
+bool AudioBuffer::is_muted() const {
+  return is_muted_;
+}
+
+int AudioBuffer::num_channels() const {
+  return num_channels_;
+}
+
+int AudioBuffer::samples_per_channel() const {
+  return samples_per_channel_;
+}
+
+int AudioBuffer::samples_per_split_channel() const {
+  return samples_per_split_channel_;
+}
+
+// TODO(andrew): Do deinterleaving and mixing in one step?
+void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
+  assert(frame->_audioChannel <= max_num_channels_);
+  assert(frame->_payloadDataLengthInSamples ==  samples_per_channel_);
+
+  num_channels_ = frame->_audioChannel;
+  data_was_mixed_ = false;
+  num_mixed_channels_ = 0;
+  num_mixed_low_pass_channels_ = 0;
+  reference_copied_ = false;
+  activity_ = frame->_vadActivity;
+  is_muted_ = false;
+  if (frame->_energy == 0) {
+    is_muted_ = true;
+  }
+
+  if (num_channels_ == 1) {
+    // We can get away with a pointer assignment in this case.
+    data_ = frame->_payloadData;
+    return;
+  }
+
+  int16_t* interleaved = frame->_payloadData;
+  for (int i = 0; i < num_channels_; i++) {
+    int16_t* deinterleaved = channels_[i].data;
+    int interleaved_idx = i;
+    for (int j = 0; j < samples_per_channel_; j++) {
+      deinterleaved[j] = interleaved[interleaved_idx];
+      interleaved_idx += num_channels_;
+    }
+  }
+}
+
+void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
+  assert(frame->_audioChannel == num_channels_);
+  assert(frame->_payloadDataLengthInSamples == samples_per_channel_);
+  frame->_vadActivity = activity_;
+
+  if (!data_changed) {
+    return;
+  }
+
+  if (num_channels_ == 1) {
+    if (data_was_mixed_) {
+      memcpy(frame->_payloadData,
+             channels_[0].data,
+             sizeof(int16_t) * samples_per_channel_);
+    } else {
+      // These should point to the same buffer in this case.
+      assert(data_ == frame->_payloadData);
+    }
+
+    return;
+  }
+
+  int16_t* interleaved = frame->_payloadData;
+  for (int i = 0; i < num_channels_; i++) {
+    int16_t* deinterleaved = channels_[i].data;
+    int interleaved_idx = i;
+    for (int j = 0; j < samples_per_channel_; j++) {
+      interleaved[interleaved_idx] = deinterleaved[j];
+      interleaved_idx += num_channels_;
+    }
+  }
+}
+
+// TODO(andrew): would be good to support the no-mix case with pointer
+// assignment.
+// TODO(andrew): handle mixing to multiple channels?
+void AudioBuffer::Mix(int num_mixed_channels) {
+  // We currently only support the stereo to mono case.
+  assert(num_channels_ == 2);
+  assert(num_mixed_channels == 1);
+
+  StereoToMono(channels_[0].data,
+               channels_[1].data,
+               channels_[0].data,
+               samples_per_channel_);
+
+  num_channels_ = num_mixed_channels;
+  data_was_mixed_ = true;
+}
+
+void AudioBuffer::CopyAndMix(int num_mixed_channels) {
+  // We currently only support the stereo to mono case.
+  assert(num_channels_ == 2);
+  assert(num_mixed_channels == 1);
+
+  StereoToMono(channels_[0].data,
+               channels_[1].data,
+               mixed_channels_[0].data,
+               samples_per_channel_);
+
+  num_mixed_channels_ = num_mixed_channels;
+}
+
+void AudioBuffer::CopyAndMixLowPass(int num_mixed_channels) {
+  // We currently only support the stereo to mono case.
+  assert(num_channels_ == 2);
+  assert(num_mixed_channels == 1);
+
+  StereoToMono(low_pass_split_data(0),
+               low_pass_split_data(1),
+               mixed_low_pass_channels_[0].data,
+               samples_per_split_channel_);
+
+  num_mixed_low_pass_channels_ = num_mixed_channels;
+}
+
+void AudioBuffer::CopyLowPassToReference() {
+  reference_copied_ = true;
+  for (int i = 0; i < num_channels_; i++) {
+    memcpy(low_pass_reference_channels_[i].data,
+           low_pass_split_data(i),
+           sizeof(int16_t) * samples_per_split_channel_);
+  }
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/audio_buffer.h b/trunk/src/modules/audio_processing/audio_buffer.h
new file mode 100644
index 0000000..87d6972
--- /dev/null
+++ b/trunk/src/modules/audio_processing/audio_buffer.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+
+#include "module_common_types.h"
+#include "scoped_ptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+struct AudioChannel;
+struct SplitAudioChannel;
+
+class AudioBuffer {
+ public:
+  AudioBuffer(int max_num_channels, int samples_per_channel);
+  virtual ~AudioBuffer();
+
+  int num_channels() const;
+  int samples_per_channel() const;
+  int samples_per_split_channel() const;
+
+  int16_t* data(int channel) const;
+  int16_t* low_pass_split_data(int channel) const;
+  int16_t* high_pass_split_data(int channel) const;
+  int16_t* mixed_data(int channel) const;
+  int16_t* mixed_low_pass_data(int channel) const;
+  int16_t* low_pass_reference(int channel) const;
+
+  int32_t* analysis_filter_state1(int channel) const;
+  int32_t* analysis_filter_state2(int channel) const;
+  int32_t* synthesis_filter_state1(int channel) const;
+  int32_t* synthesis_filter_state2(int channel) const;
+
+  void set_activity(AudioFrame::VADActivity activity);
+  AudioFrame::VADActivity activity() const;
+
+  bool is_muted() const;
+
+  void DeinterleaveFrom(AudioFrame* audioFrame);
+  void InterleaveTo(AudioFrame* audioFrame) const;
+  // If |data_changed| is false, only the non-audio data members will be copied
+  // to |frame|.
+  void InterleaveTo(AudioFrame* frame, bool data_changed) const;
+  void Mix(int num_mixed_channels);
+  void CopyAndMix(int num_mixed_channels);
+  void CopyAndMixLowPass(int num_mixed_channels);
+  void CopyLowPassToReference();
+
+ private:
+  const int max_num_channels_;
+  int num_channels_;
+  int num_mixed_channels_;
+  int num_mixed_low_pass_channels_;
+  // Whether the original data was replaced with mixed data.
+  bool data_was_mixed_;
+  const int samples_per_channel_;
+  int samples_per_split_channel_;
+  bool reference_copied_;
+  AudioFrame::VADActivity activity_;
+  bool is_muted_;
+
+  int16_t* data_;
+  scoped_array<AudioChannel> channels_;
+  scoped_array<SplitAudioChannel> split_channels_;
+  scoped_array<AudioChannel> mixed_channels_;
+  // TODO(andrew): improve this, we don't need the full 32 kHz space here.
+  scoped_array<AudioChannel> mixed_low_pass_channels_;
+  scoped_array<AudioChannel> low_pass_reference_channels_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
diff --git a/trunk/src/modules/audio_processing/audio_processing.gypi b/trunk/src/modules/audio_processing/audio_processing.gypi
new file mode 100644
index 0000000..00078c4
--- /dev/null
+++ b/trunk/src/modules/audio_processing/audio_processing.gypi
@@ -0,0 +1,91 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_processing',
+      'type': '<(library)',
+      'conditions': [
+        ['prefer_fixed_point==1', {
+          'dependencies': [ 'ns_fix' ],
+          'defines': [ 'WEBRTC_NS_FIXED' ],
+        }, {
+          'dependencies': [ 'ns' ],
+          'defines': [ 'WEBRTC_NS_FLOAT' ],
+        }],
+        ['enable_protobuf==1', {
+          'dependencies': [ 'audioproc_debug_proto' ],
+          'defines': [ 'WEBRTC_AUDIOPROC_DEBUG_DUMP' ],
+        }],
+      ],
+      'dependencies': [
+        'aec',
+        'aecm',
+        'agc',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        'include',
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+          '../interface',
+        ],
+      },
+      'sources': [
+        'include/audio_processing.h',
+        'audio_buffer.cc',
+        'audio_buffer.h',
+        'audio_processing_impl.cc',
+        'audio_processing_impl.h',
+        'echo_cancellation_impl.cc',
+        'echo_cancellation_impl.h',
+        'echo_control_mobile_impl.cc',
+        'echo_control_mobile_impl.h',
+        'gain_control_impl.cc',
+        'gain_control_impl.h',
+        'high_pass_filter_impl.cc',
+        'high_pass_filter_impl.h',
+        'level_estimator_impl.cc',
+        'level_estimator_impl.h',
+        'noise_suppression_impl.cc',
+        'noise_suppression_impl.h',
+        'splitting_filter.cc',
+        'splitting_filter.h',
+        'processing_component.cc',
+        'processing_component.h',
+        'voice_detection_impl.cc',
+        'voice_detection_impl.h',
+      ],
+    },
+  ],
+  'conditions': [
+    ['enable_protobuf==1', {
+      'targets': [
+        {
+          'target_name': 'audioproc_debug_proto',
+          'type': 'static_library',
+          'sources': [ 'debug.proto', ],
+          'variables': {
+            'proto_in_dir': '.',
+            # Workaround to protect against gyp's pathname relativization when
+            # this file is included by modules.gyp.
+            'proto_out_protected': 'webrtc/audio_processing',
+            'proto_out_dir': '<(proto_out_protected)',
+          },
+          'includes': [ '../../build/protoc.gypi', ],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/audio_processing_impl.cc b/trunk/src/modules/audio_processing/audio_processing_impl.cc
new file mode 100644
index 0000000..35d266d
--- /dev/null
+++ b/trunk/src/modules/audio_processing/audio_processing_impl.cc
@@ -0,0 +1,643 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_processing_impl.h"
+
+#include <assert.h>
+
+#include "audio_buffer.h"
+#include "critical_section_wrapper.h"
+#include "echo_cancellation_impl.h"
+#include "echo_control_mobile_impl.h"
+#include "file_wrapper.h"
+#include "high_pass_filter_impl.h"
+#include "gain_control_impl.h"
+#include "level_estimator_impl.h"
+#include "module_common_types.h"
+#include "noise_suppression_impl.h"
+#include "processing_component.h"
+#include "splitting_filter.h"
+#include "voice_detection_impl.h"
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+// Files generated at build-time by the protobuf compiler.
+#ifdef WEBRTC_ANDROID
+#include "external/webrtc/src/modules/audio_processing/debug.pb.h"
+#else
+#include "webrtc/audio_processing/debug.pb.h"
+#endif
+#endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
+
+namespace webrtc {
+AudioProcessing* AudioProcessing::Create(int id) {
+
+  AudioProcessingImpl* apm = new AudioProcessingImpl(id);
+  if (apm->Initialize() != kNoError) {
+    delete apm;
+    apm = NULL;
+  }
+
+  return apm;
+}
+
+void AudioProcessing::Destroy(AudioProcessing* apm) {
+  delete static_cast<AudioProcessingImpl*>(apm);
+}
+
+AudioProcessingImpl::AudioProcessingImpl(int id)
+    : id_(id),
+      echo_cancellation_(NULL),
+      echo_control_mobile_(NULL),
+      gain_control_(NULL),
+      high_pass_filter_(NULL),
+      level_estimator_(NULL),
+      noise_suppression_(NULL),
+      voice_detection_(NULL),
+      crit_(CriticalSectionWrapper::CreateCriticalSection()),
+      render_audio_(NULL),
+      capture_audio_(NULL),
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+      debug_file_(FileWrapper::Create()),
+      event_msg_(new audioproc::Event()),
+#endif
+      sample_rate_hz_(kSampleRate16kHz),
+      split_sample_rate_hz_(kSampleRate16kHz),
+      samples_per_channel_(sample_rate_hz_ / 100),
+      stream_delay_ms_(0),
+      was_stream_delay_set_(false),
+      num_reverse_channels_(1),
+      num_input_channels_(1),
+      num_output_channels_(1) {
+
+  echo_cancellation_ = new EchoCancellationImpl(this);
+  component_list_.push_back(echo_cancellation_);
+
+  echo_control_mobile_ = new EchoControlMobileImpl(this);
+  component_list_.push_back(echo_control_mobile_);
+
+  gain_control_ = new GainControlImpl(this);
+  component_list_.push_back(gain_control_);
+
+  high_pass_filter_ = new HighPassFilterImpl(this);
+  component_list_.push_back(high_pass_filter_);
+
+  level_estimator_ = new LevelEstimatorImpl(this);
+  component_list_.push_back(level_estimator_);
+
+  noise_suppression_ = new NoiseSuppressionImpl(this);
+  component_list_.push_back(noise_suppression_);
+
+  voice_detection_ = new VoiceDetectionImpl(this);
+  component_list_.push_back(voice_detection_);
+}
+
+AudioProcessingImpl::~AudioProcessingImpl() {
+  while (!component_list_.empty()) {
+    ProcessingComponent* component = component_list_.front();
+    component->Destroy();
+    delete component;
+    component_list_.pop_front();
+  }
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  if (debug_file_->Open()) {
+    debug_file_->CloseFile();
+  }
+#endif
+
+  delete crit_;
+  crit_ = NULL;
+
+  if (render_audio_) {
+    delete render_audio_;
+    render_audio_ = NULL;
+  }
+
+  if (capture_audio_) {
+    delete capture_audio_;
+    capture_audio_ = NULL;
+  }
+}
+
+CriticalSectionWrapper* AudioProcessingImpl::crit() const {
+  return crit_;
+}
+
+int AudioProcessingImpl::split_sample_rate_hz() const {
+  return split_sample_rate_hz_;
+}
+
+int AudioProcessingImpl::Initialize() {
+  CriticalSectionScoped crit_scoped(crit_);
+  return InitializeLocked();
+}
+
+int AudioProcessingImpl::InitializeLocked() {
+  if (render_audio_ != NULL) {
+    delete render_audio_;
+    render_audio_ = NULL;
+  }
+
+  if (capture_audio_ != NULL) {
+    delete capture_audio_;
+    capture_audio_ = NULL;
+  }
+
+  render_audio_ = new AudioBuffer(num_reverse_channels_,
+                                  samples_per_channel_);
+  capture_audio_ = new AudioBuffer(num_input_channels_,
+                                   samples_per_channel_);
+
+  was_stream_delay_set_ = false;
+
+  // Initialize all components.
+  std::list<ProcessingComponent*>::iterator it;
+  for (it = component_list_.begin(); it != component_list_.end(); it++) {
+    int err = (*it)->Initialize();
+    if (err != kNoError) {
+      return err;
+    }
+  }
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  if (debug_file_->Open()) {
+    int err = WriteInitMessage();
+    if (err != kNoError) {
+      return err;
+    }
+  }
+#endif
+
+  return kNoError;
+}
+
+int AudioProcessingImpl::set_sample_rate_hz(int rate) {
+  CriticalSectionScoped crit_scoped(crit_);
+  if (rate != kSampleRate8kHz &&
+      rate != kSampleRate16kHz &&
+      rate != kSampleRate32kHz) {
+    return kBadParameterError;
+  }
+
+  sample_rate_hz_ = rate;
+  samples_per_channel_ = rate / 100;
+
+  if (sample_rate_hz_ == kSampleRate32kHz) {
+    split_sample_rate_hz_ = kSampleRate16kHz;
+  } else {
+    split_sample_rate_hz_ = sample_rate_hz_;
+  }
+
+  return InitializeLocked();
+}
+
+int AudioProcessingImpl::sample_rate_hz() const {
+  return sample_rate_hz_;
+}
+
+int AudioProcessingImpl::set_num_reverse_channels(int channels) {
+  CriticalSectionScoped crit_scoped(crit_);
+  // Only stereo supported currently.
+  if (channels > 2 || channels < 1) {
+    return kBadParameterError;
+  }
+
+  num_reverse_channels_ = channels;
+
+  return InitializeLocked();
+}
+
+int AudioProcessingImpl::num_reverse_channels() const {
+  return num_reverse_channels_;
+}
+
+int AudioProcessingImpl::set_num_channels(
+    int input_channels,
+    int output_channels) {
+  CriticalSectionScoped crit_scoped(crit_);
+  if (output_channels > input_channels) {
+    return kBadParameterError;
+  }
+
+  // Only stereo supported currently.
+  if (input_channels > 2 || input_channels < 1) {
+    return kBadParameterError;
+  }
+
+  if (output_channels > 2 || output_channels < 1) {
+    return kBadParameterError;
+  }
+
+  num_input_channels_ = input_channels;
+  num_output_channels_ = output_channels;
+
+  return InitializeLocked();
+}
+
+int AudioProcessingImpl::num_input_channels() const {
+  return num_input_channels_;
+}
+
+int AudioProcessingImpl::num_output_channels() const {
+  return num_output_channels_;
+}
+
+int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
+  CriticalSectionScoped crit_scoped(crit_);
+  int err = kNoError;
+
+  if (frame == NULL) {
+    return kNullPointerError;
+  }
+
+  if (frame->_frequencyInHz != sample_rate_hz_) {
+    return kBadSampleRateError;
+  }
+
+  if (frame->_audioChannel != num_input_channels_) {
+    return kBadNumberChannelsError;
+  }
+
+  if (frame->_payloadDataLengthInSamples != samples_per_channel_) {
+    return kBadDataLengthError;
+  }
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  if (debug_file_->Open()) {
+    event_msg_->set_type(audioproc::Event::STREAM);
+    audioproc::Stream* msg = event_msg_->mutable_stream();
+    const size_t data_size = sizeof(int16_t) *
+                             frame->_payloadDataLengthInSamples *
+                             frame->_audioChannel;
+    msg->set_input_data(frame->_payloadData, data_size);
+    msg->set_delay(stream_delay_ms_);
+    msg->set_drift(echo_cancellation_->stream_drift_samples());
+    msg->set_level(gain_control_->stream_analog_level());
+  }
+#endif
+
+  capture_audio_->DeinterleaveFrom(frame);
+
+  // TODO(ajm): experiment with mixing and AEC placement.
+  if (num_output_channels_ < num_input_channels_) {
+    capture_audio_->Mix(num_output_channels_);
+    frame->_audioChannel = num_output_channels_;
+  }
+
+  bool data_changed = stream_data_changed();
+  if (analysis_needed(data_changed)) {
+    for (int i = 0; i < num_output_channels_; i++) {
+      // Split into a low and high band.
+      SplittingFilterAnalysis(capture_audio_->data(i),
+                              capture_audio_->low_pass_split_data(i),
+                              capture_audio_->high_pass_split_data(i),
+                              capture_audio_->analysis_filter_state1(i),
+                              capture_audio_->analysis_filter_state2(i));
+    }
+  }
+
+  err = high_pass_filter_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = gain_control_->AnalyzeCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = echo_cancellation_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  if (echo_control_mobile_->is_enabled() &&
+      noise_suppression_->is_enabled()) {
+    capture_audio_->CopyLowPassToReference();
+  }
+
+  err = noise_suppression_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = echo_control_mobile_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = voice_detection_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = gain_control_->ProcessCaptureAudio(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  if (synthesis_needed(data_changed)) {
+    for (int i = 0; i < num_output_channels_; i++) {
+      // Recombine low and high bands.
+      SplittingFilterSynthesis(capture_audio_->low_pass_split_data(i),
+                               capture_audio_->high_pass_split_data(i),
+                               capture_audio_->data(i),
+                               capture_audio_->synthesis_filter_state1(i),
+                               capture_audio_->synthesis_filter_state2(i));
+    }
+  }
+
+  // The level estimator operates on the recombined data.
+  err = level_estimator_->ProcessStream(capture_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  capture_audio_->InterleaveTo(frame, data_changed);
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  if (debug_file_->Open()) {
+    audioproc::Stream* msg = event_msg_->mutable_stream();
+    const size_t data_size = sizeof(int16_t) *
+                             frame->_payloadDataLengthInSamples *
+                             frame->_audioChannel;
+    msg->set_output_data(frame->_payloadData, data_size);
+    err = WriteMessageToDebugFile();
+    if (err != kNoError) {
+      return err;
+    }
+  }
+#endif
+
+  was_stream_delay_set_ = false;
+  return kNoError;
+}
+
+int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
+  CriticalSectionScoped crit_scoped(crit_);
+  int err = kNoError;
+
+  if (frame == NULL) {
+    return kNullPointerError;
+  }
+
+  if (frame->_frequencyInHz != sample_rate_hz_) {
+    return kBadSampleRateError;
+  }
+
+  if (frame->_audioChannel != num_reverse_channels_) {
+    return kBadNumberChannelsError;
+  }
+
+  if (frame->_payloadDataLengthInSamples != samples_per_channel_) {
+    return kBadDataLengthError;
+  }
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  if (debug_file_->Open()) {
+    event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
+    audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
+    const size_t data_size = sizeof(int16_t) *
+                             frame->_payloadDataLengthInSamples *
+                             frame->_audioChannel;
+    msg->set_data(frame->_payloadData, data_size);
+    err = WriteMessageToDebugFile();
+    if (err != kNoError) {
+      return err;
+    }
+  }
+#endif
+
+  render_audio_->DeinterleaveFrom(frame);
+
+  // TODO(ajm): turn the splitting filter into a component?
+  if (sample_rate_hz_ == kSampleRate32kHz) {
+    for (int i = 0; i < num_reverse_channels_; i++) {
+      // Split into low and high band.
+      SplittingFilterAnalysis(render_audio_->data(i),
+                              render_audio_->low_pass_split_data(i),
+                              render_audio_->high_pass_split_data(i),
+                              render_audio_->analysis_filter_state1(i),
+                              render_audio_->analysis_filter_state2(i));
+    }
+  }
+
+  // TODO(ajm): warnings possible from components?
+  err = echo_cancellation_->ProcessRenderAudio(render_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = echo_control_mobile_->ProcessRenderAudio(render_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  err = gain_control_->ProcessRenderAudio(render_audio_);
+  if (err != kNoError) {
+    return err;
+  }
+
+  return err;  // TODO(ajm): this is for returning warnings; necessary?
+}
+
+int AudioProcessingImpl::set_stream_delay_ms(int delay) {
+  was_stream_delay_set_ = true;
+  if (delay < 0) {
+    return kBadParameterError;
+  }
+
+  // TODO(ajm): the max is rather arbitrarily chosen; investigate.
+  if (delay > 500) {
+    stream_delay_ms_ = 500;
+    return kBadStreamParameterWarning;
+  }
+
+  stream_delay_ms_ = delay;
+  return kNoError;
+}
+
+int AudioProcessingImpl::stream_delay_ms() const {
+  return stream_delay_ms_;
+}
+
+bool AudioProcessingImpl::was_stream_delay_set() const {
+  return was_stream_delay_set_;
+}
+
+int AudioProcessingImpl::StartDebugRecording(
+    const char filename[AudioProcessing::kMaxFilenameSize]) {
+  CriticalSectionScoped crit_scoped(crit_);
+  assert(kMaxFilenameSize == FileWrapper::kMaxFileNameSize);
+
+  if (filename == NULL) {
+    return kNullPointerError;
+  }
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  // Stop any ongoing recording.
+  if (debug_file_->Open()) {
+    if (debug_file_->CloseFile() == -1) {
+      return kFileError;
+    }
+  }
+
+  if (debug_file_->OpenFile(filename, false) == -1) {
+    debug_file_->CloseFile();
+    return kFileError;
+  }
+
+  int err = WriteInitMessage();
+  if (err != kNoError) {
+    return err;
+  }
+  return kNoError;
+#else
+  return kUnsupportedFunctionError;
+#endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
+}
+
+int AudioProcessingImpl::StopDebugRecording() {
+  CriticalSectionScoped crit_scoped(crit_);
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  // We just return if recording hasn't started.
+  if (debug_file_->Open()) {
+    if (debug_file_->CloseFile() == -1) {
+      return kFileError;
+    }
+  }
+  return kNoError;
+#else
+  return kUnsupportedFunctionError;
+#endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
+}
+
+EchoCancellation* AudioProcessingImpl::echo_cancellation() const {
+  return echo_cancellation_;
+}
+
+EchoControlMobile* AudioProcessingImpl::echo_control_mobile() const {
+  return echo_control_mobile_;
+}
+
+GainControl* AudioProcessingImpl::gain_control() const {
+  return gain_control_;
+}
+
+HighPassFilter* AudioProcessingImpl::high_pass_filter() const {
+  return high_pass_filter_;
+}
+
+LevelEstimator* AudioProcessingImpl::level_estimator() const {
+  return level_estimator_;
+}
+
+NoiseSuppression* AudioProcessingImpl::noise_suppression() const {
+  return noise_suppression_;
+}
+
+VoiceDetection* AudioProcessingImpl::voice_detection() const {
+  return voice_detection_;
+}
+
+WebRtc_Word32 AudioProcessingImpl::ChangeUniqueId(const WebRtc_Word32 id) {
+  CriticalSectionScoped crit_scoped(crit_);
+  id_ = id;
+
+  return kNoError;
+}
+
+bool AudioProcessingImpl::stream_data_changed() const {
+  int enabled_count = 0;
+  std::list<ProcessingComponent*>::const_iterator it;
+  for (it = component_list_.begin(); it != component_list_.end(); it++) {
+    if ((*it)->is_component_enabled()) {
+      enabled_count++;
+    }
+  }
+
+  // Data is unchanged if no components are enabled, or if only level_estimator_
+  // or voice_detection_ is enabled.
+  if (enabled_count == 0) {
+    return false;
+  } else if (enabled_count == 1) {
+    if (level_estimator_->is_enabled() || voice_detection_->is_enabled()) {
+      return false;
+    }
+  } else if (enabled_count == 2) {
+    if (level_estimator_->is_enabled() && voice_detection_->is_enabled()) {
+      return false;
+    }
+  }
+  return true;
+}
+
+bool AudioProcessingImpl::synthesis_needed(bool stream_data_changed) const {
+  return (stream_data_changed && sample_rate_hz_ == kSampleRate32kHz);
+}
+
+bool AudioProcessingImpl::analysis_needed(bool stream_data_changed) const {
+  if (!stream_data_changed && !voice_detection_->is_enabled()) {
+    // Only level_estimator_ is enabled.
+    return false;
+  } else if (sample_rate_hz_ == kSampleRate32kHz) {
+    // Something besides level_estimator_ is enabled, and we have super-wb.
+    return true;
+  }
+  return false;
+}
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+int AudioProcessingImpl::WriteMessageToDebugFile() {
+  int32_t size = event_msg_->ByteSize();
+  if (size <= 0) {
+    return kUnspecifiedError;
+  }
+#if defined(WEBRTC_BIG_ENDIAN)
+  // TODO(ajm): Use little-endian "on the wire". For the moment, we can be
+  //            pretty safe in assuming little-endian.
+#endif
+
+  if (!event_msg_->SerializeToString(&event_str_)) {
+    return kUnspecifiedError;
+  }
+
+  // Write message preceded by its size.
+  if (!debug_file_->Write(&size, sizeof(int32_t))) {
+    return kFileError;
+  }
+  if (!debug_file_->Write(event_str_.data(), event_str_.length())) {
+    return kFileError;
+  }
+
+  event_msg_->Clear();
+
+  return 0;
+}
+
+int AudioProcessingImpl::WriteInitMessage() {
+  event_msg_->set_type(audioproc::Event::INIT);
+  audioproc::Init* msg = event_msg_->mutable_init();
+  msg->set_sample_rate(sample_rate_hz_);
+  msg->set_device_sample_rate(echo_cancellation_->device_sample_rate_hz());
+  msg->set_num_input_channels(num_input_channels_);
+  msg->set_num_output_channels(num_output_channels_);
+  msg->set_num_reverse_channels(num_reverse_channels_);
+
+  int err = WriteMessageToDebugFile();
+  if (err != kNoError) {
+    return err;
+  }
+
+  return kNoError;
+}
+#endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/audio_processing_impl.h b/trunk/src/modules/audio_processing/audio_processing_impl.h
new file mode 100644
index 0000000..c1ab476
--- /dev/null
+++ b/trunk/src/modules/audio_processing/audio_processing_impl.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_PROCESSING_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_PROCESSING_IMPL_H_
+
+#include "audio_processing.h"
+
+#include <list>
+#include <string>
+
+#include "scoped_ptr.h"
+
+namespace webrtc {
+class AudioBuffer;
+class CriticalSectionWrapper;
+class EchoCancellationImpl;
+class EchoControlMobileImpl;
+class FileWrapper;
+class GainControlImpl;
+class HighPassFilterImpl;
+class LevelEstimatorImpl;
+class NoiseSuppressionImpl;
+class ProcessingComponent;
+class VoiceDetectionImpl;
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+namespace audioproc {
+
+class Event;
+
+}  // namespace audioproc
+#endif
+
+class AudioProcessingImpl : public AudioProcessing {
+ public:
+  enum {
+    kSampleRate8kHz = 8000,
+    kSampleRate16kHz = 16000,
+    kSampleRate32kHz = 32000
+  };
+
+  explicit AudioProcessingImpl(int id);
+  virtual ~AudioProcessingImpl();
+
+  CriticalSectionWrapper* crit() const;
+
+  int split_sample_rate_hz() const;
+  bool was_stream_delay_set() const;
+
+  // AudioProcessing methods.
+  virtual int Initialize();
+  virtual int InitializeLocked();
+  virtual int set_sample_rate_hz(int rate);
+  virtual int sample_rate_hz() const;
+  virtual int set_num_channels(int input_channels, int output_channels);
+  virtual int num_input_channels() const;
+  virtual int num_output_channels() const;
+  virtual int set_num_reverse_channels(int channels);
+  virtual int num_reverse_channels() const;
+  virtual int ProcessStream(AudioFrame* frame);
+  virtual int AnalyzeReverseStream(AudioFrame* frame);
+  virtual int set_stream_delay_ms(int delay);
+  virtual int stream_delay_ms() const;
+  virtual int StartDebugRecording(const char filename[kMaxFilenameSize]);
+  virtual int StopDebugRecording();
+  virtual EchoCancellation* echo_cancellation() const;
+  virtual EchoControlMobile* echo_control_mobile() const;
+  virtual GainControl* gain_control() const;
+  virtual HighPassFilter* high_pass_filter() const;
+  virtual LevelEstimator* level_estimator() const;
+  virtual NoiseSuppression* noise_suppression() const;
+  virtual VoiceDetection* voice_detection() const;
+
+  // Module methods.
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+ private:
+  bool stream_data_changed() const;
+  bool synthesis_needed(bool stream_data_changed) const;
+  bool analysis_needed(bool stream_data_changed) const;
+
+  int id_;
+
+  EchoCancellationImpl* echo_cancellation_;
+  EchoControlMobileImpl* echo_control_mobile_;
+  GainControlImpl* gain_control_;
+  HighPassFilterImpl* high_pass_filter_;
+  LevelEstimatorImpl* level_estimator_;
+  NoiseSuppressionImpl* noise_suppression_;
+  VoiceDetectionImpl* voice_detection_;
+
+  std::list<ProcessingComponent*> component_list_;
+  CriticalSectionWrapper* crit_;
+  AudioBuffer* render_audio_;
+  AudioBuffer* capture_audio_;
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  // TODO(andrew): make this more graceful. Ideally we would split this stuff
+  // out into a separate class with an "enabled" and "disabled" implementation.
+  int WriteMessageToDebugFile();
+  int WriteInitMessage();
+  scoped_ptr<FileWrapper> debug_file_;
+  scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
+  std::string event_str_; // Memory for protobuf serialization.
+#endif
+
+  int sample_rate_hz_;
+  int split_sample_rate_hz_;
+  int samples_per_channel_;
+  int stream_delay_ms_;
+  bool was_stream_delay_set_;
+
+  int num_reverse_channels_;
+  int num_input_channels_;
+  int num_output_channels_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_PROCESSING_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/debug.proto b/trunk/src/modules/audio_processing/debug.proto
new file mode 100644
index 0000000..4b3a163
--- /dev/null
+++ b/trunk/src/modules/audio_processing/debug.proto
@@ -0,0 +1,37 @@
+syntax = "proto2";
+option optimize_for = LITE_RUNTIME;
+package webrtc.audioproc;
+
+message Init {
+  optional int32 sample_rate = 1;
+  optional int32 device_sample_rate = 2;
+  optional int32 num_input_channels = 3;
+  optional int32 num_output_channels = 4;
+  optional int32 num_reverse_channels = 5;
+}
+
+message ReverseStream {
+  optional bytes data = 1;
+}
+
+message Stream {
+  optional bytes input_data = 1;
+  optional bytes output_data = 2;
+  optional int32 delay = 3;
+  optional sint32 drift = 4;
+  optional int32 level = 5;
+}
+
+message Event {
+  enum Type {
+    INIT = 0;
+    REVERSE_STREAM = 1;
+    STREAM = 2;
+  }
+
+  required Type type = 1;
+
+  optional Init init = 2;
+  optional ReverseStream reverse_stream = 3;
+  optional Stream stream = 4;
+}
diff --git a/trunk/src/modules/audio_processing/echo_cancellation_impl.cc b/trunk/src/modules/audio_processing/echo_cancellation_impl.cc
new file mode 100644
index 0000000..d4c5523
--- /dev/null
+++ b/trunk/src/modules/audio_processing/echo_cancellation_impl.cc
@@ -0,0 +1,371 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "echo_cancellation_impl.h"
+
+#include <cassert>
+#include <string.h>
+
+#include "critical_section_wrapper.h"
+#include "echo_cancellation.h"
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+
+typedef void Handle;
+
+namespace {
+WebRtc_Word16 MapSetting(EchoCancellation::SuppressionLevel level) {
+  switch (level) {
+    case EchoCancellation::kLowSuppression:
+      return kAecNlpConservative;
+    case EchoCancellation::kModerateSuppression:
+      return kAecNlpModerate;
+    case EchoCancellation::kHighSuppression:
+      return kAecNlpAggressive;
+  }
+  assert(false);
+  return -1;
+}
+
+AudioProcessing::Error MapError(int err) {
+  switch (err) {
+    case AEC_UNSUPPORTED_FUNCTION_ERROR:
+      return AudioProcessing::kUnsupportedFunctionError;
+    case AEC_BAD_PARAMETER_ERROR:
+      return AudioProcessing::kBadParameterError;
+    case AEC_BAD_PARAMETER_WARNING:
+      return AudioProcessing::kBadStreamParameterWarning;
+    default:
+      // AEC_UNSPECIFIED_ERROR
+      // AEC_UNINITIALIZED_ERROR
+      // AEC_NULL_POINTER_ERROR
+      return AudioProcessing::kUnspecifiedError;
+  }
+}
+}  // namespace
+
+EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm),
+    drift_compensation_enabled_(false),
+    metrics_enabled_(false),
+    suppression_level_(kModerateSuppression),
+    device_sample_rate_hz_(48000),
+    stream_drift_samples_(0),
+    was_stream_drift_set_(false),
+    stream_has_echo_(false),
+    delay_logging_enabled_(false) {}
+
+EchoCancellationImpl::~EchoCancellationImpl() {}
+
+int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == apm_->num_reverse_channels());
+
+  int err = apm_->kNoError;
+
+  // The ordering convention must be followed to pass to the correct AEC.
+  size_t handle_index = 0;
+  for (int i = 0; i < apm_->num_output_channels(); i++) {
+    for (int j = 0; j < audio->num_channels(); j++) {
+      Handle* my_handle = static_cast<Handle*>(handle(handle_index));
+      err = WebRtcAec_BufferFarend(
+          my_handle,
+          audio->low_pass_split_data(j),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()));
+
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);  // TODO(ajm): warning possible?
+      }
+
+      handle_index++;
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  if (!apm_->was_stream_delay_set()) {
+    return apm_->kStreamParameterNotSetError;
+  }
+
+  if (drift_compensation_enabled_ && !was_stream_drift_set_) {
+    return apm_->kStreamParameterNotSetError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == apm_->num_output_channels());
+
+  int err = apm_->kNoError;
+
+  // The ordering convention must be followed to pass to the correct AEC.
+  size_t handle_index = 0;
+  stream_has_echo_ = false;
+  for (int i = 0; i < audio->num_channels(); i++) {
+    for (int j = 0; j < apm_->num_reverse_channels(); j++) {
+      Handle* my_handle = handle(handle_index);
+      err = WebRtcAec_Process(
+          my_handle,
+          audio->low_pass_split_data(i),
+          audio->high_pass_split_data(i),
+          audio->low_pass_split_data(i),
+          audio->high_pass_split_data(i),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()),
+          apm_->stream_delay_ms(),
+          stream_drift_samples_);
+
+      if (err != apm_->kNoError) {
+        err = GetHandleError(my_handle);
+        // TODO(ajm): Figure out how to return warnings properly.
+        if (err != apm_->kBadStreamParameterWarning) {
+          return err;
+        }
+      }
+
+      WebRtc_Word16 status = 0;
+      err = WebRtcAec_get_echo_status(my_handle, &status);
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);
+      }
+
+      if (status == 1) {
+        stream_has_echo_ = true;
+      }
+
+      handle_index++;
+    }
+  }
+
+  was_stream_drift_set_ = false;
+  return apm_->kNoError;
+}
+
+int EchoCancellationImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  // Ensure AEC and AECM are not both enabled.
+  if (enable && apm_->echo_control_mobile()->is_enabled()) {
+    return apm_->kBadParameterError;
+  }
+
+  return EnableComponent(enable);
+}
+
+bool EchoCancellationImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (MapSetting(level) == -1) {
+    return apm_->kBadParameterError;
+  }
+
+  suppression_level_ = level;
+  return Configure();
+}
+
+EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level()
+    const {
+  return suppression_level_;
+}
+
+int EchoCancellationImpl::enable_drift_compensation(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  drift_compensation_enabled_ = enable;
+  return Configure();
+}
+
+bool EchoCancellationImpl::is_drift_compensation_enabled() const {
+  return drift_compensation_enabled_;
+}
+
+int EchoCancellationImpl::set_device_sample_rate_hz(int rate) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (rate < 8000 || rate > 96000) {
+    return apm_->kBadParameterError;
+  }
+
+  device_sample_rate_hz_ = rate;
+  return Initialize();
+}
+
+int EchoCancellationImpl::device_sample_rate_hz() const {
+  return device_sample_rate_hz_;
+}
+
+int EchoCancellationImpl::set_stream_drift_samples(int drift) {
+  was_stream_drift_set_ = true;
+  stream_drift_samples_ = drift;
+  return apm_->kNoError;
+}
+
+int EchoCancellationImpl::stream_drift_samples() const {
+  return stream_drift_samples_;
+}
+
+int EchoCancellationImpl::enable_metrics(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  metrics_enabled_ = enable;
+  return Configure();
+}
+
+bool EchoCancellationImpl::are_metrics_enabled() const {
+  return metrics_enabled_;
+}
+
+// TODO(ajm): we currently just use the metrics from the first AEC. Think more
+//            aboue the best way to extend this to multi-channel.
+int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (metrics == NULL) {
+    return apm_->kNullPointerError;
+  }
+
+  if (!is_component_enabled() || !metrics_enabled_) {
+    return apm_->kNotEnabledError;
+  }
+
+  AecMetrics my_metrics;
+  memset(&my_metrics, 0, sizeof(my_metrics));
+  memset(metrics, 0, sizeof(Metrics));
+
+  Handle* my_handle = static_cast<Handle*>(handle(0));
+  int err = WebRtcAec_GetMetrics(my_handle, &my_metrics);
+  if (err != apm_->kNoError) {
+    return GetHandleError(my_handle);
+  }
+
+  metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant;
+  metrics->residual_echo_return_loss.average = my_metrics.rerl.average;
+  metrics->residual_echo_return_loss.maximum = my_metrics.rerl.max;
+  metrics->residual_echo_return_loss.minimum = my_metrics.rerl.min;
+
+  metrics->echo_return_loss.instant = my_metrics.erl.instant;
+  metrics->echo_return_loss.average = my_metrics.erl.average;
+  metrics->echo_return_loss.maximum = my_metrics.erl.max;
+  metrics->echo_return_loss.minimum = my_metrics.erl.min;
+
+  metrics->echo_return_loss_enhancement.instant = my_metrics.erle.instant;
+  metrics->echo_return_loss_enhancement.average = my_metrics.erle.average;
+  metrics->echo_return_loss_enhancement.maximum = my_metrics.erle.max;
+  metrics->echo_return_loss_enhancement.minimum = my_metrics.erle.min;
+
+  metrics->a_nlp.instant = my_metrics.aNlp.instant;
+  metrics->a_nlp.average = my_metrics.aNlp.average;
+  metrics->a_nlp.maximum = my_metrics.aNlp.max;
+  metrics->a_nlp.minimum = my_metrics.aNlp.min;
+
+  return apm_->kNoError;
+}
+
+bool EchoCancellationImpl::stream_has_echo() const {
+  return stream_has_echo_;
+}
+
+int EchoCancellationImpl::enable_delay_logging(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  delay_logging_enabled_ = enable;
+  return Configure();
+}
+
+bool EchoCancellationImpl::is_delay_logging_enabled() const {
+  return delay_logging_enabled_;
+}
+
+// TODO(bjornv): How should we handle the multi-channel case?
+int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (median == NULL) {
+    return apm_->kNullPointerError;
+  }
+  if (std == NULL) {
+    return apm_->kNullPointerError;
+  }
+
+  if (!is_component_enabled() || !delay_logging_enabled_) {
+    return apm_->kNotEnabledError;
+  }
+
+  Handle* my_handle = static_cast<Handle*>(handle(0));
+  if (WebRtcAec_GetDelayMetrics(my_handle, median, std) !=
+      apm_->kNoError) {
+    return GetHandleError(my_handle);
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoCancellationImpl::Initialize() {
+  int err = ProcessingComponent::Initialize();
+  if (err != apm_->kNoError || !is_component_enabled()) {
+    return err;
+  }
+
+  was_stream_drift_set_ = false;
+
+  return apm_->kNoError;
+}
+
+void* EchoCancellationImpl::CreateHandle() const {
+  Handle* handle = NULL;
+  if (WebRtcAec_Create(&handle) != apm_->kNoError) {
+    handle = NULL;
+  } else {
+    assert(handle != NULL);
+  }
+
+  return handle;
+}
+
+int EchoCancellationImpl::DestroyHandle(void* handle) const {
+  assert(handle != NULL);
+  return WebRtcAec_Free(static_cast<Handle*>(handle));
+}
+
+int EchoCancellationImpl::InitializeHandle(void* handle) const {
+  assert(handle != NULL);
+  return WebRtcAec_Init(static_cast<Handle*>(handle),
+                       apm_->sample_rate_hz(),
+                       device_sample_rate_hz_);
+}
+
+int EchoCancellationImpl::ConfigureHandle(void* handle) const {
+  assert(handle != NULL);
+  AecConfig config;
+  config.metricsMode = metrics_enabled_;
+  config.nlpMode = MapSetting(suppression_level_);
+  config.skewMode = drift_compensation_enabled_;
+  config.delay_logging = delay_logging_enabled_;
+
+  return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
+}
+
+int EchoCancellationImpl::num_handles_required() const {
+  return apm_->num_output_channels() *
+         apm_->num_reverse_channels();
+}
+
+int EchoCancellationImpl::GetHandleError(void* handle) const {
+  assert(handle != NULL);
+  return MapError(WebRtcAec_get_error_code(static_cast<Handle*>(handle)));
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/echo_cancellation_impl.h b/trunk/src/modules/audio_processing/echo_cancellation_impl.h
new file mode 100644
index 0000000..3c2198c
--- /dev/null
+++ b/trunk/src/modules/audio_processing/echo_cancellation_impl.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class EchoCancellationImpl : public EchoCancellation,
+                             public ProcessingComponent {
+ public:
+  explicit EchoCancellationImpl(const AudioProcessingImpl* apm);
+  virtual ~EchoCancellationImpl();
+
+  int ProcessRenderAudio(const AudioBuffer* audio);
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // EchoCancellation implementation.
+  virtual bool is_enabled() const;
+  virtual int device_sample_rate_hz() const;
+  virtual int stream_drift_samples() const;
+
+  // ProcessingComponent implementation.
+  virtual int Initialize();
+
+ private:
+  // EchoCancellation implementation.
+  virtual int Enable(bool enable);
+  virtual int enable_drift_compensation(bool enable);
+  virtual bool is_drift_compensation_enabled() const;
+  virtual int set_device_sample_rate_hz(int rate);
+  virtual int set_stream_drift_samples(int drift);
+  virtual int set_suppression_level(SuppressionLevel level);
+  virtual SuppressionLevel suppression_level() const;
+  virtual int enable_metrics(bool enable);
+  virtual bool are_metrics_enabled() const;
+  virtual bool stream_has_echo() const;
+  virtual int GetMetrics(Metrics* metrics);
+  virtual int enable_delay_logging(bool enable);
+  virtual bool is_delay_logging_enabled() const;
+  virtual int GetDelayMetrics(int* median, int* std);
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+  bool drift_compensation_enabled_;
+  bool metrics_enabled_;
+  SuppressionLevel suppression_level_;
+  int device_sample_rate_hz_;
+  int stream_drift_samples_;
+  bool was_stream_drift_set_;
+  bool stream_has_echo_;
+  bool delay_logging_enabled_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/echo_control_mobile_impl.cc b/trunk/src/modules/audio_processing/echo_control_mobile_impl.cc
new file mode 100644
index 0000000..9427789
--- /dev/null
+++ b/trunk/src/modules/audio_processing/echo_control_mobile_impl.cc
@@ -0,0 +1,300 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "echo_control_mobile_impl.h"
+
+#include <cassert>
+#include <cstring>
+
+#include "critical_section_wrapper.h"
+#include "echo_control_mobile.h"
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+
+typedef void Handle;
+
+namespace {
+WebRtc_Word16 MapSetting(EchoControlMobile::RoutingMode mode) {
+  switch (mode) {
+    case EchoControlMobile::kQuietEarpieceOrHeadset:
+      return 0;
+    case EchoControlMobile::kEarpiece:
+      return 1;
+    case EchoControlMobile::kLoudEarpiece:
+      return 2;
+    case EchoControlMobile::kSpeakerphone:
+      return 3;
+    case EchoControlMobile::kLoudSpeakerphone:
+      return 4;
+  }
+  assert(false);
+  return -1;
+}
+
+AudioProcessing::Error MapError(int err) {
+  switch (err) {
+    case AECM_UNSUPPORTED_FUNCTION_ERROR:
+      return AudioProcessing::kUnsupportedFunctionError;
+    case AECM_NULL_POINTER_ERROR:
+      return AudioProcessing::kNullPointerError;
+    case AECM_BAD_PARAMETER_ERROR:
+      return AudioProcessing::kBadParameterError;
+    case AECM_BAD_PARAMETER_WARNING:
+      return AudioProcessing::kBadStreamParameterWarning;
+    default:
+      // AECM_UNSPECIFIED_ERROR
+      // AECM_UNINITIALIZED_ERROR
+      return AudioProcessing::kUnspecifiedError;
+  }
+}
+}  // namespace
+
+size_t EchoControlMobile::echo_path_size_bytes() {
+    return WebRtcAecm_echo_path_size_bytes();
+}
+
+EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm),
+    routing_mode_(kSpeakerphone),
+    comfort_noise_enabled_(true),
+    external_echo_path_(NULL) {}
+
+EchoControlMobileImpl::~EchoControlMobileImpl() {
+    if (external_echo_path_ != NULL) {
+      delete [] external_echo_path_;
+      external_echo_path_ = NULL;
+    }
+}
+
+int EchoControlMobileImpl::ProcessRenderAudio(const AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == apm_->num_reverse_channels());
+
+  int err = apm_->kNoError;
+
+  // The ordering convention must be followed to pass to the correct AECM.
+  size_t handle_index = 0;
+  for (int i = 0; i < apm_->num_output_channels(); i++) {
+    for (int j = 0; j < audio->num_channels(); j++) {
+      Handle* my_handle = static_cast<Handle*>(handle(handle_index));
+      err = WebRtcAecm_BufferFarend(
+          my_handle,
+          audio->low_pass_split_data(j),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()));
+
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);  // TODO(ajm): warning possible?
+      }
+
+      handle_index++;
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  if (!apm_->was_stream_delay_set()) {
+    return apm_->kStreamParameterNotSetError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == apm_->num_output_channels());
+
+  int err = apm_->kNoError;
+
+  // The ordering convention must be followed to pass to the correct AECM.
+  size_t handle_index = 0;
+  for (int i = 0; i < audio->num_channels(); i++) {
+    // TODO(ajm): improve how this works, possibly inside AECM.
+    //            This is kind of hacked up.
+    WebRtc_Word16* noisy = audio->low_pass_reference(i);
+    WebRtc_Word16* clean = audio->low_pass_split_data(i);
+    if (noisy == NULL) {
+      noisy = clean;
+      clean = NULL;
+    }
+    for (int j = 0; j < apm_->num_reverse_channels(); j++) {
+      Handle* my_handle = static_cast<Handle*>(handle(handle_index));
+      err = WebRtcAecm_Process(
+          my_handle,
+          noisy,
+          clean,
+          audio->low_pass_split_data(i),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()),
+          apm_->stream_delay_ms());
+
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);  // TODO(ajm): warning possible?
+      }
+
+      handle_index++;
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoControlMobileImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  // Ensure AEC and AECM are not both enabled.
+  if (enable && apm_->echo_cancellation()->is_enabled()) {
+    return apm_->kBadParameterError;
+  }
+
+  return EnableComponent(enable);
+}
+
+bool EchoControlMobileImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int EchoControlMobileImpl::set_routing_mode(RoutingMode mode) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (MapSetting(mode) == -1) {
+    return apm_->kBadParameterError;
+  }
+
+  routing_mode_ = mode;
+  return Configure();
+}
+
+EchoControlMobile::RoutingMode EchoControlMobileImpl::routing_mode()
+    const {
+  return routing_mode_;
+}
+
+int EchoControlMobileImpl::enable_comfort_noise(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  comfort_noise_enabled_ = enable;
+  return Configure();
+}
+
+bool EchoControlMobileImpl::is_comfort_noise_enabled() const {
+  return comfort_noise_enabled_;
+}
+
+int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
+                                       size_t size_bytes) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (echo_path == NULL) {
+    return apm_->kNullPointerError;
+  }
+  if (size_bytes != echo_path_size_bytes()) {
+    // Size mismatch
+    return apm_->kBadParameterError;
+  }
+
+  if (external_echo_path_ == NULL) {
+    external_echo_path_ = new unsigned char[size_bytes];
+  }
+  memcpy(external_echo_path_, echo_path, size_bytes);
+
+  return Initialize();
+}
+
+int EchoControlMobileImpl::GetEchoPath(void* echo_path,
+                                       size_t size_bytes) const {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (echo_path == NULL) {
+    return apm_->kNullPointerError;
+  }
+  if (size_bytes != echo_path_size_bytes()) {
+    // Size mismatch
+    return apm_->kBadParameterError;
+  }
+  if (!is_component_enabled()) {
+    return apm_->kNotEnabledError;
+  }
+
+  // Get the echo path from the first channel
+  Handle* my_handle = static_cast<Handle*>(handle(0));
+  if (WebRtcAecm_GetEchoPath(my_handle, echo_path, size_bytes) != 0) {
+      return GetHandleError(my_handle);
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoControlMobileImpl::Initialize() {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  if (apm_->sample_rate_hz() == apm_->kSampleRate32kHz) {
+    // AECM doesn't support super-wideband.
+    return apm_->kBadSampleRateError;
+  }
+
+  return ProcessingComponent::Initialize();
+}
+
+void* EchoControlMobileImpl::CreateHandle() const {
+  Handle* handle = NULL;
+  if (WebRtcAecm_Create(&handle) != apm_->kNoError) {
+    handle = NULL;
+  } else {
+    assert(handle != NULL);
+  }
+
+  return handle;
+}
+
+int EchoControlMobileImpl::DestroyHandle(void* handle) const {
+  return WebRtcAecm_Free(static_cast<Handle*>(handle));
+}
+
+int EchoControlMobileImpl::InitializeHandle(void* handle) const {
+  assert(handle != NULL);
+  Handle* my_handle = static_cast<Handle*>(handle);
+  if (WebRtcAecm_Init(my_handle, apm_->sample_rate_hz()) != 0) {
+    return GetHandleError(my_handle);
+  }
+  if (external_echo_path_ != NULL) {
+    if (WebRtcAecm_InitEchoPath(my_handle,
+                                external_echo_path_,
+                                echo_path_size_bytes()) != 0) {
+      return GetHandleError(my_handle);
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int EchoControlMobileImpl::ConfigureHandle(void* handle) const {
+  AecmConfig config;
+  config.cngMode = comfort_noise_enabled_;
+  config.echoMode = MapSetting(routing_mode_);
+
+  return WebRtcAecm_set_config(static_cast<Handle*>(handle), config);
+}
+
+int EchoControlMobileImpl::num_handles_required() const {
+  return apm_->num_output_channels() *
+         apm_->num_reverse_channels();
+}
+
+int EchoControlMobileImpl::GetHandleError(void* handle) const {
+  assert(handle != NULL);
+  return MapError(WebRtcAecm_get_error_code(static_cast<Handle*>(handle)));
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/echo_control_mobile_impl.h b/trunk/src/modules/audio_processing/echo_control_mobile_impl.h
new file mode 100644
index 0000000..6d9e369
--- /dev/null
+++ b/trunk/src/modules/audio_processing/echo_control_mobile_impl.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class EchoControlMobileImpl : public EchoControlMobile,
+                              public ProcessingComponent {
+ public:
+  explicit EchoControlMobileImpl(const AudioProcessingImpl* apm);
+  virtual ~EchoControlMobileImpl();
+
+  int ProcessRenderAudio(const AudioBuffer* audio);
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // EchoControlMobile implementation.
+  virtual bool is_enabled() const;
+
+  // ProcessingComponent implementation.
+  virtual int Initialize();
+
+ private:
+  // EchoControlMobile implementation.
+  virtual int Enable(bool enable);
+  virtual int set_routing_mode(RoutingMode mode);
+  virtual RoutingMode routing_mode() const;
+  virtual int enable_comfort_noise(bool enable);
+  virtual bool is_comfort_noise_enabled() const;
+  virtual int SetEchoPath(const void* echo_path, size_t size_bytes);
+  virtual int GetEchoPath(void* echo_path, size_t size_bytes) const;
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+  RoutingMode routing_mode_;
+  bool comfort_noise_enabled_;
+  unsigned char* external_echo_path_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/gain_control_impl.cc b/trunk/src/modules/audio_processing/gain_control_impl.cc
new file mode 100644
index 0000000..a518ab5
--- /dev/null
+++ b/trunk/src/modules/audio_processing/gain_control_impl.cc
@@ -0,0 +1,367 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gain_control_impl.h"
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#include "gain_control.h"
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+
+typedef void Handle;
+
+namespace {
+WebRtc_Word16 MapSetting(GainControl::Mode mode) {
+  switch (mode) {
+    case GainControl::kAdaptiveAnalog:
+      return kAgcModeAdaptiveAnalog;
+    case GainControl::kAdaptiveDigital:
+      return kAgcModeAdaptiveDigital;
+    case GainControl::kFixedDigital:
+      return kAgcModeFixedDigital;
+  }
+  assert(false);
+  return -1;
+}
+}  // namespace
+
+GainControlImpl::GainControlImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm),
+    mode_(kAdaptiveAnalog),
+    minimum_capture_level_(0),
+    maximum_capture_level_(255),
+    limiter_enabled_(true),
+    target_level_dbfs_(3),
+    compression_gain_db_(9),
+    analog_capture_level_(0),
+    was_analog_level_set_(false),
+    stream_is_saturated_(false) {}
+
+GainControlImpl::~GainControlImpl() {}
+
+int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+
+  WebRtc_Word16* mixed_data = audio->low_pass_split_data(0);
+  if (audio->num_channels() > 1) {
+    audio->CopyAndMixLowPass(1);
+    mixed_data = audio->mixed_low_pass_data(0);
+  }
+
+  for (int i = 0; i < num_handles(); i++) {
+    Handle* my_handle = static_cast<Handle*>(handle(i));
+    int err = WebRtcAgc_AddFarend(
+        my_handle,
+        mixed_data,
+        static_cast<WebRtc_Word16>(audio->samples_per_split_channel()));
+
+    if (err != apm_->kNoError) {
+      return GetHandleError(my_handle);
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == num_handles());
+
+  int err = apm_->kNoError;
+
+  if (mode_ == kAdaptiveAnalog) {
+    for (int i = 0; i < num_handles(); i++) {
+      Handle* my_handle = static_cast<Handle*>(handle(i));
+      err = WebRtcAgc_AddMic(
+          my_handle,
+          audio->low_pass_split_data(i),
+          audio->high_pass_split_data(i),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()));
+
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);
+      }
+    }
+  } else if (mode_ == kAdaptiveDigital) {
+
+    for (int i = 0; i < num_handles(); i++) {
+      Handle* my_handle = static_cast<Handle*>(handle(i));
+      WebRtc_Word32 capture_level_out = 0;
+
+      err = WebRtcAgc_VirtualMic(
+          my_handle,
+          audio->low_pass_split_data(i),
+          audio->high_pass_split_data(i),
+          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()),
+          //capture_levels_[i],
+          analog_capture_level_,
+          &capture_level_out);
+
+      capture_levels_[i] = capture_level_out;
+
+      if (err != apm_->kNoError) {
+        return GetHandleError(my_handle);
+      }
+
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  if (mode_ == kAdaptiveAnalog && !was_analog_level_set_) {
+    return apm_->kStreamParameterNotSetError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == num_handles());
+
+  stream_is_saturated_ = false;
+  for (int i = 0; i < num_handles(); i++) {
+    Handle* my_handle = static_cast<Handle*>(handle(i));
+    WebRtc_Word32 capture_level_out = 0;
+    WebRtc_UWord8 saturation_warning = 0;
+
+    int err = WebRtcAgc_Process(
+        my_handle,
+        audio->low_pass_split_data(i),
+        audio->high_pass_split_data(i),
+        static_cast<WebRtc_Word16>(audio->samples_per_split_channel()),
+        audio->low_pass_split_data(i),
+        audio->high_pass_split_data(i),
+        capture_levels_[i],
+        &capture_level_out,
+        apm_->echo_cancellation()->stream_has_echo(),
+        &saturation_warning);
+
+    if (err != apm_->kNoError) {
+      return GetHandleError(my_handle);
+    }
+
+    capture_levels_[i] = capture_level_out;
+    if (saturation_warning == 1) {
+      stream_is_saturated_ = true;
+    }
+  }
+
+  if (mode_ == kAdaptiveAnalog) {
+    // Take the analog level to be the average across the handles.
+    analog_capture_level_ = 0;
+    for (int i = 0; i < num_handles(); i++) {
+      analog_capture_level_ += capture_levels_[i];
+    }
+
+    analog_capture_level_ /= num_handles();
+  }
+
+  was_analog_level_set_ = false;
+  return apm_->kNoError;
+}
+
+// TODO(ajm): ensure this is called under kAdaptiveAnalog.
+int GainControlImpl::set_stream_analog_level(int level) {
+  was_analog_level_set_ = true;
+  if (level < minimum_capture_level_ || level > maximum_capture_level_) {
+    return apm_->kBadParameterError;
+  }
+
+  if (mode_ == kAdaptiveAnalog) {
+    if (level != analog_capture_level_) {
+      // The analog level has been changed; update our internal levels.
+      capture_levels_.assign(num_handles(), level);
+    }
+  }
+  analog_capture_level_ = level;
+
+  return apm_->kNoError;
+}
+
+int GainControlImpl::stream_analog_level() {
+  // TODO(ajm): enable this assertion?
+  //assert(mode_ == kAdaptiveAnalog);
+
+  return analog_capture_level_;
+}
+
+int GainControlImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  return EnableComponent(enable);
+}
+
+bool GainControlImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int GainControlImpl::set_mode(Mode mode) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (MapSetting(mode) == -1) {
+    return apm_->kBadParameterError;
+  }
+
+  mode_ = mode;
+  return Initialize();
+}
+
+GainControl::Mode GainControlImpl::mode() const {
+  return mode_;
+}
+
+int GainControlImpl::set_analog_level_limits(int minimum,
+                                             int maximum) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (minimum < 0) {
+    return apm_->kBadParameterError;
+  }
+
+  if (maximum > 65535) {
+    return apm_->kBadParameterError;
+  }
+
+  if (maximum < minimum) {
+    return apm_->kBadParameterError;
+  }
+
+  minimum_capture_level_ = minimum;
+  maximum_capture_level_ = maximum;
+
+  return Initialize();
+}
+
+int GainControlImpl::analog_level_minimum() const {
+  return minimum_capture_level_;
+}
+
+int GainControlImpl::analog_level_maximum() const {
+  return maximum_capture_level_;
+}
+
+bool GainControlImpl::stream_is_saturated() const {
+  return stream_is_saturated_;
+}
+
+int GainControlImpl::set_target_level_dbfs(int level) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (level > 31 || level < 0) {
+    return apm_->kBadParameterError;
+  }
+
+  target_level_dbfs_ = level;
+  return Configure();
+}
+
+int GainControlImpl::target_level_dbfs() const {
+  return target_level_dbfs_;
+}
+
+int GainControlImpl::set_compression_gain_db(int gain) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (gain < 0 || gain > 90) {
+    return apm_->kBadParameterError;
+  }
+
+  compression_gain_db_ = gain;
+  return Configure();
+}
+
+int GainControlImpl::compression_gain_db() const {
+  return compression_gain_db_;
+}
+
+int GainControlImpl::enable_limiter(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  limiter_enabled_ = enable;
+  return Configure();
+}
+
+bool GainControlImpl::is_limiter_enabled() const {
+  return limiter_enabled_;
+}
+
+int GainControlImpl::Initialize() {
+  int err = ProcessingComponent::Initialize();
+  if (err != apm_->kNoError || !is_component_enabled()) {
+    return err;
+  }
+
+  analog_capture_level_ =
+      (maximum_capture_level_ - minimum_capture_level_) >> 1;
+  capture_levels_.assign(num_handles(), analog_capture_level_);
+  was_analog_level_set_ = false;
+
+  return apm_->kNoError;
+}
+
+void* GainControlImpl::CreateHandle() const {
+  Handle* handle = NULL;
+  if (WebRtcAgc_Create(&handle) != apm_->kNoError) {
+    handle = NULL;
+  } else {
+    assert(handle != NULL);
+  }
+
+  return handle;
+}
+
+int GainControlImpl::DestroyHandle(void* handle) const {
+  return WebRtcAgc_Free(static_cast<Handle*>(handle));
+}
+
+int GainControlImpl::InitializeHandle(void* handle) const {
+  return WebRtcAgc_Init(static_cast<Handle*>(handle),
+                          minimum_capture_level_,
+                          maximum_capture_level_,
+                          MapSetting(mode_),
+                          apm_->sample_rate_hz());
+}
+
+int GainControlImpl::ConfigureHandle(void* handle) const {
+  WebRtcAgc_config_t config;
+  // TODO(ajm): Flip the sign here (since AGC expects a positive value) if we
+  //            change the interface.
+  //assert(target_level_dbfs_ <= 0);
+  //config.targetLevelDbfs = static_cast<WebRtc_Word16>(-target_level_dbfs_);
+  config.targetLevelDbfs = static_cast<WebRtc_Word16>(target_level_dbfs_);
+  config.compressionGaindB =
+      static_cast<WebRtc_Word16>(compression_gain_db_);
+  config.limiterEnable = limiter_enabled_;
+
+  return WebRtcAgc_set_config(static_cast<Handle*>(handle), config);
+}
+
+int GainControlImpl::num_handles_required() const {
+  return apm_->num_output_channels();
+}
+
+int GainControlImpl::GetHandleError(void* handle) const {
+  // The AGC has no get_error() function.
+  // (Despite listing errors in its interface...)
+  assert(handle != NULL);
+  return apm_->kUnspecifiedError;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/gain_control_impl.h b/trunk/src/modules/audio_processing/gain_control_impl.h
new file mode 100644
index 0000000..5915eeb
--- /dev/null
+++ b/trunk/src/modules/audio_processing/gain_control_impl.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
+
+#include <vector>
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class GainControlImpl : public GainControl,
+                        public ProcessingComponent {
+ public:
+  explicit GainControlImpl(const AudioProcessingImpl* apm);
+  virtual ~GainControlImpl();
+
+  int ProcessRenderAudio(AudioBuffer* audio);
+  int AnalyzeCaptureAudio(AudioBuffer* audio);
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // ProcessingComponent implementation.
+  virtual int Initialize();
+
+  // GainControl implementation.
+  virtual bool is_enabled() const;
+  virtual int stream_analog_level();
+
+ private:
+  // GainControl implementation.
+  virtual int Enable(bool enable);
+  virtual int set_stream_analog_level(int level);
+  virtual int set_mode(Mode mode);
+  virtual Mode mode() const;
+  virtual int set_target_level_dbfs(int level);
+  virtual int target_level_dbfs() const;
+  virtual int set_compression_gain_db(int gain);
+  virtual int compression_gain_db() const;
+  virtual int enable_limiter(bool enable);
+  virtual bool is_limiter_enabled() const;
+  virtual int set_analog_level_limits(int minimum, int maximum);
+  virtual int analog_level_minimum() const;
+  virtual int analog_level_maximum() const;
+  virtual bool stream_is_saturated() const;
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+  Mode mode_;
+  int minimum_capture_level_;
+  int maximum_capture_level_;
+  bool limiter_enabled_;
+  int target_level_dbfs_;
+  int compression_gain_db_;
+  std::vector<int> capture_levels_;
+  int analog_capture_level_;
+  bool was_analog_level_set_;
+  bool stream_is_saturated_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/high_pass_filter_impl.cc b/trunk/src/modules/audio_processing/high_pass_filter_impl.cc
new file mode 100644
index 0000000..b20fed8
--- /dev/null
+++ b/trunk/src/modules/audio_processing/high_pass_filter_impl.cc
@@ -0,0 +1,173 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "high_pass_filter_impl.h"
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+namespace {
+const WebRtc_Word16 kFilterCoefficients8kHz[5] =
+    {3798, -7596, 3798, 7807, -3733};
+
+const WebRtc_Word16 kFilterCoefficients[5] =
+    {4012, -8024, 4012, 8002, -3913};
+
+struct FilterState {
+  WebRtc_Word16 y[4];
+  WebRtc_Word16 x[2];
+  const WebRtc_Word16* ba;
+};
+
+int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
+  assert(hpf != NULL);
+
+  if (sample_rate_hz == AudioProcessingImpl::kSampleRate8kHz) {
+    hpf->ba = kFilterCoefficients8kHz;
+  } else {
+    hpf->ba = kFilterCoefficients;
+  }
+
+  WebRtcSpl_MemSetW16(hpf->x, 0, 2);
+  WebRtcSpl_MemSetW16(hpf->y, 0, 4);
+
+  return AudioProcessing::kNoError;
+}
+
+int Filter(FilterState* hpf, WebRtc_Word16* data, int length) {
+  assert(hpf != NULL);
+
+  WebRtc_Word32 tmp_int32 = 0;
+  WebRtc_Word16* y = hpf->y;
+  WebRtc_Word16* x = hpf->x;
+  const WebRtc_Word16* ba = hpf->ba;
+
+  for (int i = 0; i < length; i++) {
+    //  y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2]
+    //         + -a[1] * y[i-1] + -a[2] * y[i-2];
+
+    tmp_int32 =
+        WEBRTC_SPL_MUL_16_16(y[1], ba[3]); // -a[1] * y[i-1] (low part)
+    tmp_int32 +=
+        WEBRTC_SPL_MUL_16_16(y[3], ba[4]); // -a[2] * y[i-2] (low part)
+    tmp_int32 = (tmp_int32 >> 15);
+    tmp_int32 +=
+        WEBRTC_SPL_MUL_16_16(y[0], ba[3]); // -a[1] * y[i-1] (high part)
+    tmp_int32 +=
+        WEBRTC_SPL_MUL_16_16(y[2], ba[4]); // -a[2] * y[i-2] (high part)
+    tmp_int32 = (tmp_int32 << 1);
+
+    tmp_int32 += WEBRTC_SPL_MUL_16_16(data[i], ba[0]); // b[0]*x[0]
+    tmp_int32 += WEBRTC_SPL_MUL_16_16(x[0], ba[1]);    // b[1]*x[i-1]
+    tmp_int32 += WEBRTC_SPL_MUL_16_16(x[1], ba[2]);    // b[2]*x[i-2]
+
+    // Update state (input part)
+    x[1] = x[0];
+    x[0] = data[i];
+
+    // Update state (filtered part)
+    y[2] = y[0];
+    y[3] = y[1];
+    y[0] = static_cast<WebRtc_Word16>(tmp_int32 >> 13);
+    y[1] = static_cast<WebRtc_Word16>((tmp_int32 -
+        WEBRTC_SPL_LSHIFT_W32(static_cast<WebRtc_Word32>(y[0]), 13)) << 2);
+
+    // Rounding in Q12, i.e. add 2^11
+    tmp_int32 += 2048;
+
+    // Saturate (to 2^27) so that the HP filtered signal does not overflow
+    tmp_int32 = WEBRTC_SPL_SAT(static_cast<WebRtc_Word32>(134217727),
+                               tmp_int32,
+                               static_cast<WebRtc_Word32>(-134217728));
+
+    // Convert back to Q0 and use rounding
+    data[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp_int32, 12);
+
+  }
+
+  return AudioProcessing::kNoError;
+}
+}  // namespace
+
+typedef FilterState Handle;
+
+HighPassFilterImpl::HighPassFilterImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm) {}
+
+HighPassFilterImpl::~HighPassFilterImpl() {}
+
+int HighPassFilterImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  int err = apm_->kNoError;
+
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  assert(audio->samples_per_split_channel() <= 160);
+
+  for (int i = 0; i < num_handles(); i++) {
+    Handle* my_handle = static_cast<Handle*>(handle(i));
+    err = Filter(my_handle,
+                 audio->low_pass_split_data(i),
+                 audio->samples_per_split_channel());
+
+    if (err != apm_->kNoError) {
+      return GetHandleError(my_handle);
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int HighPassFilterImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  return EnableComponent(enable);
+}
+
+bool HighPassFilterImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+void* HighPassFilterImpl::CreateHandle() const {
+  return new FilterState;
+}
+
+int HighPassFilterImpl::DestroyHandle(void* handle) const {
+  delete static_cast<Handle*>(handle);
+  return apm_->kNoError;
+}
+
+int HighPassFilterImpl::InitializeHandle(void* handle) const {
+  return InitializeFilter(static_cast<Handle*>(handle),
+                          apm_->sample_rate_hz());
+}
+
+int HighPassFilterImpl::ConfigureHandle(void* /*handle*/) const {
+  return apm_->kNoError; // Not configurable.
+}
+
+int HighPassFilterImpl::num_handles_required() const {
+  return apm_->num_output_channels();
+}
+
+int HighPassFilterImpl::GetHandleError(void* handle) const {
+  // The component has no detailed errors.
+  assert(handle != NULL);
+  return apm_->kUnspecifiedError;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/high_pass_filter_impl.h b/trunk/src/modules/audio_processing/high_pass_filter_impl.h
new file mode 100644
index 0000000..94a9c89
--- /dev/null
+++ b/trunk/src/modules/audio_processing/high_pass_filter_impl.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class HighPassFilterImpl : public HighPassFilter,
+                           public ProcessingComponent {
+ public:
+  explicit HighPassFilterImpl(const AudioProcessingImpl* apm);
+  virtual ~HighPassFilterImpl();
+
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // HighPassFilter implementation.
+  virtual bool is_enabled() const;
+
+ private:
+  // HighPassFilter implementation.
+  virtual int Enable(bool enable);
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/include/audio_processing.h b/trunk/src/modules/audio_processing/include/audio_processing.h
new file mode 100644
index 0000000..aefb824
--- /dev/null
+++ b/trunk/src/modules/audio_processing/include/audio_processing.h
@@ -0,0 +1,595 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
+
+#include <stddef.h> // size_t
+
+#include "typedefs.h"
+#include "module.h"
+
+namespace webrtc {
+
+class AudioFrame;
+class EchoCancellation;
+class EchoControlMobile;
+class GainControl;
+class HighPassFilter;
+class LevelEstimator;
+class NoiseSuppression;
+class VoiceDetection;
+
+// The Audio Processing Module (APM) provides a collection of voice processing
+// components designed for real-time communications software.
+//
+// APM operates on two audio streams on a frame-by-frame basis. Frames of the
+// primary stream, on which all processing is applied, are passed to
+// |ProcessStream()|. Frames of the reverse direction stream, which are used for
+// analysis by some components, are passed to |AnalyzeReverseStream()|. On the
+// client-side, this will typically be the near-end (capture) and far-end
+// (render) streams, respectively. APM should be placed in the signal chain as
+// close to the audio hardware abstraction layer (HAL) as possible.
+//
+// On the server-side, the reverse stream will normally not be used, with
+// processing occurring on each incoming stream.
+//
+// Component interfaces follow a similar pattern and are accessed through
+// corresponding getters in APM. All components are disabled at create-time,
+// with default settings that are recommended for most situations. New settings
+// can be applied without enabling a component. Enabling a component triggers
+// memory allocation and initialization to allow it to start processing the
+// streams.
+//
+// Thread safety is provided with the following assumptions to reduce locking
+// overhead:
+//   1. The stream getters and setters are called from the same thread as
+//      ProcessStream(). More precisely, stream functions are never called
+//      concurrently with ProcessStream().
+//   2. Parameter getters are never called concurrently with the corresponding
+//      setter.
+//
+// APM accepts only 16-bit linear PCM audio data in frames of 10 ms. Multiple
+// channels should be interleaved.
+//
+// Usage example, omitting error checking:
+// AudioProcessing* apm = AudioProcessing::Create(0);
+// apm->set_sample_rate_hz(32000); // Super-wideband processing.
+//
+// // Mono capture and stereo render.
+// apm->set_num_channels(1, 1);
+// apm->set_num_reverse_channels(2);
+//
+// apm->high_pass_filter()->Enable(true);
+//
+// apm->echo_cancellation()->enable_drift_compensation(false);
+// apm->echo_cancellation()->Enable(true);
+//
+// apm->noise_reduction()->set_level(kHighSuppression);
+// apm->noise_reduction()->Enable(true);
+//
+// apm->gain_control()->set_analog_level_limits(0, 255);
+// apm->gain_control()->set_mode(kAdaptiveAnalog);
+// apm->gain_control()->Enable(true);
+//
+// apm->voice_detection()->Enable(true);
+//
+// // Start a voice call...
+//
+// // ... Render frame arrives bound for the audio HAL ...
+// apm->AnalyzeReverseStream(render_frame);
+//
+// // ... Capture frame arrives from the audio HAL ...
+// // Call required set_stream_ functions.
+// apm->set_stream_delay_ms(delay_ms);
+// apm->gain_control()->set_stream_analog_level(analog_level);
+//
+// apm->ProcessStream(capture_frame);
+//
+// // Call required stream_ functions.
+// analog_level = apm->gain_control()->stream_analog_level();
+// has_voice = apm->stream_has_voice();
+//
+// // Repeate render and capture processing for the duration of the call...
+// // Start a new call...
+// apm->Initialize();
+//
+// // Close the application...
+// AudioProcessing::Destroy(apm);
+// apm = NULL;
+//
+class AudioProcessing : public Module {
+ public:
+  // Creates a APM instance, with identifier |id|. Use one instance for every
+  // primary audio stream requiring processing. On the client-side, this would
+  // typically be one instance for the near-end stream, and additional instances
+  // for each far-end stream which requires processing. On the server-side,
+  // this would typically be one instance for every incoming stream.
+  static AudioProcessing* Create(int id);
+  virtual ~AudioProcessing() {};
+
+  // TODO(andrew): remove this method. We now allow users to delete instances
+  // directly, useful for scoped_ptr.
+  // Destroys a |apm| instance.
+  static void Destroy(AudioProcessing* apm);
+
+  // Initializes internal states, while retaining all user settings. This
+  // should be called before beginning to process a new audio stream. However,
+  // it is not necessary to call before processing the first stream after
+  // creation.
+  virtual int Initialize() = 0;
+
+  // Sets the sample |rate| in Hz for both the primary and reverse audio
+  // streams. 8000, 16000 or 32000 Hz are permitted.
+  virtual int set_sample_rate_hz(int rate) = 0;
+  virtual int sample_rate_hz() const = 0;
+
+  // Sets the number of channels for the primary audio stream. Input frames must
+  // contain a number of channels given by |input_channels|, while output frames
+  // will be returned with number of channels given by |output_channels|.
+  virtual int set_num_channels(int input_channels, int output_channels) = 0;
+  virtual int num_input_channels() const = 0;
+  virtual int num_output_channels() const = 0;
+
+  // Sets the number of channels for the reverse audio stream. Input frames must
+  // contain a number of channels given by |channels|.
+  virtual int set_num_reverse_channels(int channels) = 0;
+  virtual int num_reverse_channels() const = 0;
+
+  // Processes a 10 ms |frame| of the primary audio stream. On the client-side,
+  // this is the near-end (or captured) audio.
+  //
+  // If needed for enabled functionality, any function with the set_stream_ tag
+  // must be called prior to processing the current frame. Any getter function
+  // with the stream_ tag which is needed should be called after processing.
+  //
+  // The |_frequencyInHz|, |_audioChannel|, and |_payloadDataLengthInSamples|
+  // members of |frame| must be valid, and correspond to settings supplied
+  // to APM.
+  virtual int ProcessStream(AudioFrame* frame) = 0;
+
+  // Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame
+  // will not be modified. On the client-side, this is the far-end (or to be
+  // rendered) audio.
+  //
+  // It is only necessary to provide this if echo processing is enabled, as the
+  // reverse stream forms the echo reference signal. It is recommended, but not
+  // necessary, to provide if gain control is enabled. On the server-side this
+  // typically will not be used. If you're not sure what to pass in here,
+  // chances are you don't need to use it.
+  //
+  // The |_frequencyInHz|, |_audioChannel|, and |_payloadDataLengthInSamples|
+  // members of |frame| must be valid.
+  //
+  // TODO(ajm): add const to input; requires an implementation fix.
+  virtual int AnalyzeReverseStream(AudioFrame* frame) = 0;
+
+  // This must be called if and only if echo processing is enabled.
+  //
+  // Sets the |delay| in ms between AnalyzeReverseStream() receiving a far-end
+  // frame and ProcessStream() receiving a near-end frame containing the
+  // corresponding echo. On the client-side this can be expressed as
+  //   delay = (t_render - t_analyze) + (t_process - t_capture)
+  // where,
+  //   - t_analyze is the time a frame is passed to AnalyzeReverseStream() and
+  //     t_render is the time the first sample of the same frame is rendered by
+  //     the audio hardware.
+  //   - t_capture is the time the first sample of a frame is captured by the
+  //     audio hardware and t_pull is the time the same frame is passed to
+  //     ProcessStream().
+  virtual int set_stream_delay_ms(int delay) = 0;
+  virtual int stream_delay_ms() const = 0;
+
+  // Starts recording debugging information to a file specified by |filename|,
+  // a NULL-terminated string. If there is an ongoing recording, the old file
+  // will be closed, and recording will continue in the newly specified file.
+  // An already existing file will be overwritten without warning.
+  static const size_t kMaxFilenameSize = 1024;
+  virtual int StartDebugRecording(const char filename[kMaxFilenameSize]) = 0;
+
+  // Stops recording debugging information, and closes the file. Recording
+  // cannot be resumed in the same file (without overwriting it).
+  virtual int StopDebugRecording() = 0;
+
+  // These provide access to the component interfaces and should never return
+  // NULL. The pointers will be valid for the lifetime of the APM instance.
+  // The memory for these objects is entirely managed internally.
+  virtual EchoCancellation* echo_cancellation() const = 0;
+  virtual EchoControlMobile* echo_control_mobile() const = 0;
+  virtual GainControl* gain_control() const = 0;
+  virtual HighPassFilter* high_pass_filter() const = 0;
+  virtual LevelEstimator* level_estimator() const = 0;
+  virtual NoiseSuppression* noise_suppression() const = 0;
+  virtual VoiceDetection* voice_detection() const = 0;
+
+  struct Statistic {
+    int instant;  // Instantaneous value.
+    int average;  // Long-term average.
+    int maximum;  // Long-term maximum.
+    int minimum;  // Long-term minimum.
+  };
+
+  enum Error {
+    // Fatal errors.
+    kNoError = 0,
+    kUnspecifiedError = -1,
+    kCreationFailedError = -2,
+    kUnsupportedComponentError = -3,
+    kUnsupportedFunctionError = -4,
+    kNullPointerError = -5,
+    kBadParameterError = -6,
+    kBadSampleRateError = -7,
+    kBadDataLengthError = -8,
+    kBadNumberChannelsError = -9,
+    kFileError = -10,
+    kStreamParameterNotSetError = -11,
+    kNotEnabledError = -12,
+
+    // Warnings are non-fatal.
+    // This results when a set_stream_ parameter is out of range. Processing
+    // will continue, but the parameter may have been truncated.
+    kBadStreamParameterWarning = -13
+  };
+
+  // Inherited from Module.
+  virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; };
+  virtual WebRtc_Word32 Process() { return -1; };
+};
+
+// The acoustic echo cancellation (AEC) component provides better performance
+// than AECM but also requires more processing power and is dependent on delay
+// stability and reporting accuracy. As such it is well-suited and recommended
+// for PC and IP phone applications.
+//
+// Not recommended to be enabled on the server-side.
+class EchoCancellation {
+ public:
+  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
+  // Enabling one will disable the other.
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Differences in clock speed on the primary and reverse streams can impact
+  // the AEC performance. On the client-side, this could be seen when different
+  // render and capture devices are used, particularly with webcams.
+  //
+  // This enables a compensation mechanism, and requires that
+  // |set_device_sample_rate_hz()| and |set_stream_drift_samples()| be called.
+  virtual int enable_drift_compensation(bool enable) = 0;
+  virtual bool is_drift_compensation_enabled() const = 0;
+
+  // Provides the sampling rate of the audio devices. It is assumed the render
+  // and capture devices use the same nominal sample rate. Required if and only
+  // if drift compensation is enabled.
+  virtual int set_device_sample_rate_hz(int rate) = 0;
+  virtual int device_sample_rate_hz() const = 0;
+
+  // Sets the difference between the number of samples rendered and captured by
+  // the audio devices since the last call to |ProcessStream()|. Must be called
+  // if and only if drift compensation is enabled, prior to |ProcessStream()|.
+  virtual int set_stream_drift_samples(int drift) = 0;
+  virtual int stream_drift_samples() const = 0;
+
+  enum SuppressionLevel {
+    kLowSuppression,
+    kModerateSuppression,
+    kHighSuppression
+  };
+
+  // Sets the aggressiveness of the suppressor. A higher level trades off
+  // double-talk performance for increased echo suppression.
+  virtual int set_suppression_level(SuppressionLevel level) = 0;
+  virtual SuppressionLevel suppression_level() const = 0;
+
+  // Returns false if the current frame almost certainly contains no echo
+  // and true if it _might_ contain echo.
+  virtual bool stream_has_echo() const = 0;
+
+  // Enables the computation of various echo metrics. These are obtained
+  // through |GetMetrics()|.
+  virtual int enable_metrics(bool enable) = 0;
+  virtual bool are_metrics_enabled() const = 0;
+
+  // Each statistic is reported in dB.
+  // P_far:  Far-end (render) signal power.
+  // P_echo: Near-end (capture) echo signal power.
+  // P_out:  Signal power at the output of the AEC.
+  // P_a:    Internal signal power at the point before the AEC's non-linear
+  //         processor.
+  struct Metrics {
+    // RERL = ERL + ERLE
+    AudioProcessing::Statistic residual_echo_return_loss;
+
+    // ERL = 10log_10(P_far / P_echo)
+    AudioProcessing::Statistic echo_return_loss;
+
+    // ERLE = 10log_10(P_echo / P_out)
+    AudioProcessing::Statistic echo_return_loss_enhancement;
+
+    // (Pre non-linear processing suppression) A_NLP = 10log_10(P_echo / P_a)
+    AudioProcessing::Statistic a_nlp;
+  };
+
+  // TODO(ajm): discuss the metrics update period.
+  virtual int GetMetrics(Metrics* metrics) = 0;
+
+  // Enables computation and logging of delay values. Statistics are obtained
+  // through |GetDelayMetrics()|.
+  virtual int enable_delay_logging(bool enable) = 0;
+  virtual bool is_delay_logging_enabled() const = 0;
+
+  // The delay metrics consists of the delay |median| and the delay standard
+  // deviation |std|. The values are averaged over the time period since the
+  // last call to |GetDelayMetrics()|.
+  virtual int GetDelayMetrics(int* median, int* std) = 0;
+
+ protected:
+  virtual ~EchoCancellation() {};
+};
+
+// The acoustic echo control for mobile (AECM) component is a low complexity
+// robust option intended for use on mobile devices.
+//
+// Not recommended to be enabled on the server-side.
+class EchoControlMobile {
+ public:
+  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
+  // Enabling one will disable the other.
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Recommended settings for particular audio routes. In general, the louder
+  // the echo is expected to be, the higher this value should be set. The
+  // preferred setting may vary from device to device.
+  enum RoutingMode {
+    kQuietEarpieceOrHeadset,
+    kEarpiece,
+    kLoudEarpiece,
+    kSpeakerphone,
+    kLoudSpeakerphone
+  };
+
+  // Sets echo control appropriate for the audio routing |mode| on the device.
+  // It can and should be updated during a call if the audio routing changes.
+  virtual int set_routing_mode(RoutingMode mode) = 0;
+  virtual RoutingMode routing_mode() const = 0;
+
+  // Comfort noise replaces suppressed background noise to maintain a
+  // consistent signal level.
+  virtual int enable_comfort_noise(bool enable) = 0;
+  virtual bool is_comfort_noise_enabled() const = 0;
+
+  // A typical use case is to initialize the component with an echo path from a
+  // previous call. The echo path is retrieved using |GetEchoPath()|, typically
+  // at the end of a call. The data can then be stored for later use as an
+  // initializer before the next call, using |SetEchoPath()|.
+  //
+  // Controlling the echo path this way requires the data |size_bytes| to match
+  // the internal echo path size. This size can be acquired using
+  // |echo_path_size_bytes()|. |SetEchoPath()| causes an entire reset, worth
+  // noting if it is to be called during an ongoing call.
+  //
+  // It is possible that version incompatibilities may result in a stored echo
+  // path of the incorrect size. In this case, the stored path should be
+  // discarded.
+  virtual int SetEchoPath(const void* echo_path, size_t size_bytes) = 0;
+  virtual int GetEchoPath(void* echo_path, size_t size_bytes) const = 0;
+
+  // The returned path size is guaranteed not to change for the lifetime of
+  // the application.
+  static size_t echo_path_size_bytes();
+
+ protected:
+  virtual ~EchoControlMobile() {};
+};
+
+// The automatic gain control (AGC) component brings the signal to an
+// appropriate range. This is done by applying a digital gain directly and, in
+// the analog mode, prescribing an analog gain to be applied at the audio HAL.
+//
+// Recommended to be enabled on the client-side.
+class GainControl {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // When an analog mode is set, this must be called prior to |ProcessStream()|
+  // to pass the current analog level from the audio HAL. Must be within the
+  // range provided to |set_analog_level_limits()|.
+  virtual int set_stream_analog_level(int level) = 0;
+
+  // When an analog mode is set, this should be called after |ProcessStream()|
+  // to obtain the recommended new analog level for the audio HAL. It is the
+  // users responsibility to apply this level.
+  virtual int stream_analog_level() = 0;
+
+  enum Mode {
+    // Adaptive mode intended for use if an analog volume control is available
+    // on the capture device. It will require the user to provide coupling
+    // between the OS mixer controls and AGC through the |stream_analog_level()|
+    // functions.
+    //
+    // It consists of an analog gain prescription for the audio device and a
+    // digital compression stage.
+    kAdaptiveAnalog,
+
+    // Adaptive mode intended for situations in which an analog volume control
+    // is unavailable. It operates in a similar fashion to the adaptive analog
+    // mode, but with scaling instead applied in the digital domain. As with
+    // the analog mode, it additionally uses a digital compression stage.
+    kAdaptiveDigital,
+
+    // Fixed mode which enables only the digital compression stage also used by
+    // the two adaptive modes.
+    //
+    // It is distinguished from the adaptive modes by considering only a
+    // short time-window of the input signal. It applies a fixed gain through
+    // most of the input level range, and compresses (gradually reduces gain
+    // with increasing level) the input signal at higher levels. This mode is
+    // preferred on embedded devices where the capture signal level is
+    // predictable, so that a known gain can be applied.
+    kFixedDigital
+  };
+
+  virtual int set_mode(Mode mode) = 0;
+  virtual Mode mode() const = 0;
+
+  // Sets the target peak |level| (or envelope) of the AGC in dBFs (decibels
+  // from digital full-scale). The convention is to use positive values. For
+  // instance, passing in a value of 3 corresponds to -3 dBFs, or a target
+  // level 3 dB below full-scale. Limited to [0, 31].
+  //
+  // TODO(ajm): use a negative value here instead, if/when VoE will similarly
+  //            update its interface.
+  virtual int set_target_level_dbfs(int level) = 0;
+  virtual int target_level_dbfs() const = 0;
+
+  // Sets the maximum |gain| the digital compression stage may apply, in dB. A
+  // higher number corresponds to greater compression, while a value of 0 will
+  // leave the signal uncompressed. Limited to [0, 90].
+  virtual int set_compression_gain_db(int gain) = 0;
+  virtual int compression_gain_db() const = 0;
+
+  // When enabled, the compression stage will hard limit the signal to the
+  // target level. Otherwise, the signal will be compressed but not limited
+  // above the target level.
+  virtual int enable_limiter(bool enable) = 0;
+  virtual bool is_limiter_enabled() const = 0;
+
+  // Sets the |minimum| and |maximum| analog levels of the audio capture device.
+  // Must be set if and only if an analog mode is used. Limited to [0, 65535].
+  virtual int set_analog_level_limits(int minimum,
+                                      int maximum) = 0;
+  virtual int analog_level_minimum() const = 0;
+  virtual int analog_level_maximum() const = 0;
+
+  // Returns true if the AGC has detected a saturation event (period where the
+  // signal reaches digital full-scale) in the current frame and the analog
+  // level cannot be reduced.
+  //
+  // This could be used as an indicator to reduce or disable analog mic gain at
+  // the audio HAL.
+  virtual bool stream_is_saturated() const = 0;
+
+ protected:
+  virtual ~GainControl() {};
+};
+
+// A filtering component which removes DC offset and low-frequency noise.
+// Recommended to be enabled on the client-side.
+class HighPassFilter {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+ protected:
+  virtual ~HighPassFilter() {};
+};
+
+// An estimation component used to retrieve level metrics.
+class LevelEstimator {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Returns the root mean square (RMS) level in dBFs (decibels from digital
+  // full-scale), or alternately dBov. It is computed over all primary stream
+  // frames since the last call to RMS(). The returned value is positive but
+  // should be interpreted as negative. It is constrained to [0, 127].
+  //
+  // The computation follows:
+  // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-05
+  // with the intent that it can provide the RTP audio level indication.
+  //
+  // Frames passed to ProcessStream() with an |_energy| of zero are considered
+  // to have been muted. The RMS of the frame will be interpreted as -127.
+  virtual int RMS() = 0;
+
+ protected:
+  virtual ~LevelEstimator() {};
+};
+
+// The noise suppression (NS) component attempts to remove noise while
+// retaining speech. Recommended to be enabled on the client-side.
+//
+// Recommended to be enabled on the client-side.
+class NoiseSuppression {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Determines the aggressiveness of the suppression. Increasing the level
+  // will reduce the noise level at the expense of a higher speech distortion.
+  enum Level {
+    kLow,
+    kModerate,
+    kHigh,
+    kVeryHigh
+  };
+
+  virtual int set_level(Level level) = 0;
+  virtual Level level() const = 0;
+
+ protected:
+  virtual ~NoiseSuppression() {};
+};
+
+// The voice activity detection (VAD) component analyzes the stream to
+// determine if voice is present. A facility is also provided to pass in an
+// external VAD decision.
+//
+// In addition to |stream_has_voice()| the VAD decision is provided through the
+// |AudioFrame| passed to |ProcessStream()|. The |_vadActivity| member will be
+// modified to reflect the current decision.
+class VoiceDetection {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Returns true if voice is detected in the current frame. Should be called
+  // after |ProcessStream()|.
+  virtual bool stream_has_voice() const = 0;
+
+  // Some of the APM functionality requires a VAD decision. In the case that
+  // a decision is externally available for the current frame, it can be passed
+  // in here, before |ProcessStream()| is called.
+  //
+  // VoiceDetection does _not_ need to be enabled to use this. If it happens to
+  // be enabled, detection will be skipped for any frame in which an external
+  // VAD decision is provided.
+  virtual int set_stream_has_voice(bool has_voice) = 0;
+
+  // Specifies the likelihood that a frame will be declared to contain voice.
+  // A higher value makes it more likely that speech will not be clipped, at
+  // the expense of more noise being detected as voice.
+  enum Likelihood {
+    kVeryLowLikelihood,
+    kLowLikelihood,
+    kModerateLikelihood,
+    kHighLikelihood
+  };
+
+  virtual int set_likelihood(Likelihood likelihood) = 0;
+  virtual Likelihood likelihood() const = 0;
+
+  // Sets the |size| of the frames in ms on which the VAD will operate. Larger
+  // frames will improve detection accuracy, but reduce the frequency of
+  // updates.
+  //
+  // This does not impact the size of frames passed to |ProcessStream()|.
+  virtual int set_frame_size_ms(int size) = 0;
+  virtual int frame_size_ms() const = 0;
+
+ protected:
+  virtual ~VoiceDetection() {};
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
diff --git a/trunk/src/modules/audio_processing/level_estimator_impl.cc b/trunk/src/modules/audio_processing/level_estimator_impl.cc
new file mode 100644
index 0000000..42cac99
--- /dev/null
+++ b/trunk/src/modules/audio_processing/level_estimator_impl.cc
@@ -0,0 +1,165 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "level_estimator_impl.h"
+
+#include <assert.h>
+#include <math.h>
+#include <string.h>
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+namespace {
+
+const double kMaxSquaredLevel = 32768.0 * 32768.0;
+
+class Level {
+ public:
+  static const int kMinLevel = 127;
+
+  Level()
+    : sum_square_(0.0),
+      sample_count_(0) {}
+  ~Level() {}
+
+  void Init() {
+    sum_square_ = 0.0;
+    sample_count_ = 0;
+  }
+
+  void Process(int16_t* data, int length) {
+    assert(data != NULL);
+    assert(length > 0);
+    sum_square_ += SumSquare(data, length);
+    sample_count_ += length;
+  }
+
+  void ProcessMuted(int length) {
+    assert(length > 0);
+    sample_count_ += length;
+  }
+
+  int RMS() {
+    if (sample_count_ == 0 || sum_square_ == 0.0) {
+      Init();
+      return kMinLevel;
+    }
+
+    // Normalize by the max level.
+    double rms = sum_square_ / (sample_count_ * kMaxSquaredLevel);
+    // 20log_10(x^0.5) = 10log_10(x)
+    rms = 10 * log10(rms);
+    if (rms > 0)
+      rms = 0;
+    else if (rms < -kMinLevel)
+      rms = -kMinLevel;
+
+    rms = -rms;
+    Init();
+    return static_cast<int>(rms + 0.5);
+  }
+
+ private:
+  static double SumSquare(int16_t* data, int length) {
+    double sum_square = 0.0;
+    for (int i = 0; i < length; ++i) {
+      double data_d = static_cast<double>(data[i]);
+      sum_square += data_d * data_d;
+    }
+    return sum_square;
+  }
+
+  double sum_square_;
+  int sample_count_;
+};
+}  // namespace
+
+LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm) {}
+
+LevelEstimatorImpl::~LevelEstimatorImpl() {}
+
+int LevelEstimatorImpl::ProcessStream(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  Level* level = static_cast<Level*>(handle(0));
+  if (audio->is_muted()) {
+    level->ProcessMuted(audio->samples_per_channel());
+    return apm_->kNoError;
+  }
+
+  int16_t* mixed_data = audio->data(0);
+  if (audio->num_channels() > 1) {
+    audio->CopyAndMix(1);
+    mixed_data = audio->mixed_data(0);
+  }
+
+  level->Process(mixed_data, audio->samples_per_channel());
+
+  return apm_->kNoError;
+}
+
+int LevelEstimatorImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  return EnableComponent(enable);
+}
+
+bool LevelEstimatorImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int LevelEstimatorImpl::RMS() {
+  if (!is_component_enabled()) {
+    return apm_->kNotEnabledError;
+  }
+
+  Level* level = static_cast<Level*>(handle(0));
+  return level->RMS();
+}
+
+void* LevelEstimatorImpl::CreateHandle() const {
+  return new Level;
+}
+
+int LevelEstimatorImpl::DestroyHandle(void* handle) const {
+  assert(handle != NULL);
+  Level* level = static_cast<Level*>(handle);
+  delete level;
+  return apm_->kNoError;
+}
+
+int LevelEstimatorImpl::InitializeHandle(void* handle) const {
+  assert(handle != NULL);
+  Level* level = static_cast<Level*>(handle);
+  level->Init();
+
+  return apm_->kNoError;
+}
+
+int LevelEstimatorImpl::ConfigureHandle(void* /*handle*/) const {
+  return apm_->kNoError;
+}
+
+int LevelEstimatorImpl::num_handles_required() const {
+  return 1;
+}
+
+int LevelEstimatorImpl::GetHandleError(void* handle) const {
+  // The component has no detailed errors.
+  assert(handle != NULL);
+  return apm_->kUnspecifiedError;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/level_estimator_impl.h b/trunk/src/modules/audio_processing/level_estimator_impl.h
new file mode 100644
index 0000000..1a06343
--- /dev/null
+++ b/trunk/src/modules/audio_processing/level_estimator_impl.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class LevelEstimatorImpl : public LevelEstimator,
+                           public ProcessingComponent {
+ public:
+  explicit LevelEstimatorImpl(const AudioProcessingImpl* apm);
+  virtual ~LevelEstimatorImpl();
+
+  int ProcessStream(AudioBuffer* audio);
+
+  // LevelEstimator implementation.
+  virtual bool is_enabled() const;
+
+ private:
+  // LevelEstimator implementation.
+  virtual int Enable(bool enable);
+  virtual int RMS();
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/noise_suppression_impl.cc b/trunk/src/modules/audio_processing/noise_suppression_impl.cc
new file mode 100644
index 0000000..c44d3fe
--- /dev/null
+++ b/trunk/src/modules/audio_processing/noise_suppression_impl.cc
@@ -0,0 +1,165 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "noise_suppression_impl.h"
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#if defined(WEBRTC_NS_FLOAT)
+#include "noise_suppression.h"
+#elif defined(WEBRTC_NS_FIXED)
+#include "noise_suppression_x.h"
+#endif
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_NS_FLOAT)
+typedef NsHandle Handle;
+#elif defined(WEBRTC_NS_FIXED)
+typedef NsxHandle Handle;
+#endif
+
+namespace {
+int MapSetting(NoiseSuppression::Level level) {
+  switch (level) {
+    case NoiseSuppression::kLow:
+      return 0;
+    case NoiseSuppression::kModerate:
+      return 1;
+    case NoiseSuppression::kHigh:
+      return 2;
+    case NoiseSuppression::kVeryHigh:
+      return 3;
+  }
+  assert(false);
+  return -1;
+}
+}  // namespace
+
+NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm),
+    level_(kModerate) {}
+
+NoiseSuppressionImpl::~NoiseSuppressionImpl() {}
+
+int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  int err = apm_->kNoError;
+
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+  assert(audio->samples_per_split_channel() <= 160);
+  assert(audio->num_channels() == num_handles());
+
+  for (int i = 0; i < num_handles(); i++) {
+    Handle* my_handle = static_cast<Handle*>(handle(i));
+#if defined(WEBRTC_NS_FLOAT)
+    err = WebRtcNs_Process(static_cast<Handle*>(handle(i)),
+                           audio->low_pass_split_data(i),
+                           audio->high_pass_split_data(i),
+                           audio->low_pass_split_data(i),
+                           audio->high_pass_split_data(i));
+#elif defined(WEBRTC_NS_FIXED)
+    err = WebRtcNsx_Process(static_cast<Handle*>(handle(i)),
+                            audio->low_pass_split_data(i),
+                            audio->high_pass_split_data(i),
+                            audio->low_pass_split_data(i),
+                            audio->high_pass_split_data(i));
+#endif
+
+    if (err != apm_->kNoError) {
+      return GetHandleError(my_handle);
+    }
+  }
+
+  return apm_->kNoError;
+}
+
+int NoiseSuppressionImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  return EnableComponent(enable);
+}
+
+bool NoiseSuppressionImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int NoiseSuppressionImpl::set_level(Level level) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (MapSetting(level) == -1) {
+    return apm_->kBadParameterError;
+  }
+
+  level_ = level;
+  return Configure();
+}
+
+NoiseSuppression::Level NoiseSuppressionImpl::level() const {
+  return level_;
+}
+
+void* NoiseSuppressionImpl::CreateHandle() const {
+  Handle* handle = NULL;
+#if defined(WEBRTC_NS_FLOAT)
+  if (WebRtcNs_Create(&handle) != apm_->kNoError)
+#elif defined(WEBRTC_NS_FIXED)
+  if (WebRtcNsx_Create(&handle) != apm_->kNoError)
+#endif
+  {
+    handle = NULL;
+  } else {
+    assert(handle != NULL);
+  }
+
+  return handle;
+}
+
+int NoiseSuppressionImpl::DestroyHandle(void* handle) const {
+#if defined(WEBRTC_NS_FLOAT)
+  return WebRtcNs_Free(static_cast<Handle*>(handle));
+#elif defined(WEBRTC_NS_FIXED)
+  return WebRtcNsx_Free(static_cast<Handle*>(handle));
+#endif
+}
+
+int NoiseSuppressionImpl::InitializeHandle(void* handle) const {
+#if defined(WEBRTC_NS_FLOAT)
+  return WebRtcNs_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+#elif defined(WEBRTC_NS_FIXED)
+  return WebRtcNsx_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+#endif
+}
+
+int NoiseSuppressionImpl::ConfigureHandle(void* handle) const {
+#if defined(WEBRTC_NS_FLOAT)
+  return WebRtcNs_set_policy(static_cast<Handle*>(handle),
+                             MapSetting(level_));
+#elif defined(WEBRTC_NS_FIXED)
+  return WebRtcNsx_set_policy(static_cast<Handle*>(handle),
+                              MapSetting(level_));
+#endif
+}
+
+int NoiseSuppressionImpl::num_handles_required() const {
+  return apm_->num_output_channels();
+}
+
+int NoiseSuppressionImpl::GetHandleError(void* handle) const {
+  // The NS has no get_error() function.
+  assert(handle != NULL);
+  return apm_->kUnspecifiedError;
+}
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/audio_processing/noise_suppression_impl.h b/trunk/src/modules/audio_processing/noise_suppression_impl.h
new file mode 100644
index 0000000..7b65b70
--- /dev/null
+++ b/trunk/src/modules/audio_processing/noise_suppression_impl.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class NoiseSuppressionImpl : public NoiseSuppression,
+                             public ProcessingComponent {
+ public:
+  explicit NoiseSuppressionImpl(const AudioProcessingImpl* apm);
+  virtual ~NoiseSuppressionImpl();
+
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // NoiseSuppression implementation.
+  virtual bool is_enabled() const;
+
+ private:
+  // NoiseSuppression implementation.
+  virtual int Enable(bool enable);
+  virtual int set_level(Level level);
+  virtual Level level() const;
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+  Level level_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
diff --git a/trunk/src/modules/audio_processing/ns/Android.mk b/trunk/src/modules/audio_processing/ns/Android.mk
new file mode 100644
index 0000000..5e442f6
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/Android.mk
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+#############################
+# Build the non-neon library.
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_ns
+LOCAL_MODULE_TAGS := optional
+LOCAL_GENERATED_SOURCES :=
+LOCAL_SRC_FILES := \
+    noise_suppression_x.c \
+    nsx_core.c
+
+# Files for floating point.
+# noise_suppression.c ns_core.c 
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../utility \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../system_wrappers/interface
+
+LOCAL_STATIC_LIBRARIES += libwebrtc_system_wrappers
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+#############################
+# Build the neon library.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_ns_neon
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := nsx_core_neon.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    -mfpu=neon \
+    -mfloat-abi=softfp \
+    -flax-vector-conversions
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+endif # ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
diff --git a/trunk/src/modules/audio_processing/ns/defines.h b/trunk/src/modules/audio_processing/ns/defines.h
new file mode 100644
index 0000000..d253967
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/defines.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_DEFINES_H_
+
+//#define PROCESS_FLOW_0    // Use the traditional method.
+//#define PROCESS_FLOW_1    // Use traditional with DD estimate of prior SNR.
+#define PROCESS_FLOW_2    // Use the new method of speech/noise classification.
+
+#define BLOCKL_MAX          160 // max processing block length: 160
+#define ANAL_BLOCKL_MAX     256 // max analysis block length: 256
+#define HALF_ANAL_BLOCKL    129 // half max analysis block length + 1
+
+#define QUANTILE            (float)0.25
+
+#define SIMULT              3
+#define END_STARTUP_LONG    200
+#define END_STARTUP_SHORT   50
+#define FACTOR              (float)40.0
+#define WIDTH               (float)0.01
+
+#define SMOOTH              (float)0.75 // filter smoothing
+// Length of fft work arrays.
+#define IP_LENGTH (ANAL_BLOCKL_MAX >> 1) // must be at least ceil(2 + sqrt(ANAL_BLOCKL_MAX/2))
+#define W_LENGTH (ANAL_BLOCKL_MAX >> 1)
+
+//PARAMETERS FOR NEW METHOD
+#define DD_PR_SNR           (float)0.98 // DD update of prior SNR
+#define LRT_TAVG            (float)0.50 // tavg parameter for LRT (previously 0.90)
+#define SPECT_FL_TAVG       (float)0.30 // tavg parameter for spectral flatness measure
+#define SPECT_DIFF_TAVG     (float)0.30 // tavg parameter for spectral difference measure
+#define PRIOR_UPDATE        (float)0.10 // update parameter of prior model
+#define NOISE_UPDATE        (float)0.90 // update parameter for noise
+#define SPEECH_UPDATE       (float)0.99 // update parameter when likely speech
+#define WIDTH_PR_MAP        (float)4.0  // width parameter in sigmoid map for prior model
+#define LRT_FEATURE_THR     (float)0.5  // default threshold for LRT feature
+#define SF_FEATURE_THR      (float)0.5  // default threshold for Spectral Flatness feature
+#define SD_FEATURE_THR      (float)0.5  // default threshold for Spectral Difference feature
+#define PROB_RANGE          (float)0.20 // probability threshold for noise state in
+                                        // speech/noise likelihood
+#define HIST_PAR_EST         1000       // histogram size for estimation of parameters
+#define GAMMA_PAUSE         (float)0.05 // update for conservative noise estimate
+//
+#define B_LIM               (float)0.5  // threshold in final energy gain factor calculation
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_DEFINES_H_
diff --git a/trunk/src/modules/audio_processing/ns/include/noise_suppression.h b/trunk/src/modules/audio_processing/ns/include/noise_suppression.h
new file mode 100644
index 0000000..1f498c1
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/include/noise_suppression.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
+
+#include "typedefs.h"
+
+typedef struct NsHandleT NsHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance to the noise reduction structure
+ *
+ * Input:
+ *      - NS_inst       : Pointer to noise reduction instance that should be
+ *                        created
+ *
+ * Output:
+ *      - NS_inst       : Pointer to created noise reduction instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Create(NsHandle** NS_inst);
+
+
+/*
+ * This function frees the dynamic memory of a specified Noise Reduction
+ * instance.
+ *
+ * Input:
+ *      - NS_inst       : Pointer to NS instance that should be freed
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Free(NsHandle* NS_inst);
+
+
+/*
+ * This function initializes a NS instance
+ *
+ * Input:
+ *      - NS_inst       : Instance that should be initialized
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - NS_inst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - NS_inst       : Instance that should be initialized
+ *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ *      - NS_inst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
+
+
+/*
+ * This functions does Noise Suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ *      - NS_inst       : NS Instance. Needs to be initiated before call.
+ *      - spframe       : Pointer to speech frame buffer for L band
+ *      - spframe_H     : Pointer to speech frame buffer for H band
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - NS_inst       : Updated NS instance
+ *      - outframe      : Pointer to output frame for L band
+ *      - outframe_H    : Pointer to output frame for H band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+int WebRtcNs_Process(NsHandle* NS_inst,
+                     short* spframe,
+                     short* spframe_H,
+                     short* outframe,
+                     short* outframe_H);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
diff --git a/trunk/src/modules/audio_processing/ns/include/noise_suppression_x.h b/trunk/src/modules/audio_processing/ns/include/noise_suppression_x.h
new file mode 100644
index 0000000..b6eef90
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/include/noise_suppression_x.h
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
+
+#include "typedefs.h"
+
+typedef struct NsxHandleT NsxHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance to the noise reduction structure
+ *
+ * Input:
+ *      - nsxInst       : Pointer to noise reduction instance that should be
+ *                       created
+ *
+ * Output:
+ *      - nsxInst       : Pointer to created noise reduction instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Create(NsxHandle** nsxInst);
+
+
+/*
+ * This function frees the dynamic memory of a specified Noise Suppression
+ * instance.
+ *
+ * Input:
+ *      - nsxInst       : Pointer to NS instance that should be freed
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Free(NsxHandle* nsxInst);
+
+
+/*
+ * This function initializes a NS instance
+ *
+ * Input:
+ *      - nsxInst       : Instance that should be initialized
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - nsxInst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - nsxInst       : Instance that should be initialized
+ *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ *      - nsxInst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode);
+
+/*
+ * This functions does noise suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ *      - nsxInst       : NSx instance. Needs to be initiated before call.
+ *      - speechFrame   : Pointer to speech frame buffer for L band
+ *      - speechFrameHB : Pointer to speech frame buffer for H band
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - nsxInst       : Updated NSx instance
+ *      - outFrame      : Pointer to output frame for L band
+ *      - outFrameHB    : Pointer to output frame for H band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+int WebRtcNsx_Process(NsxHandle* nsxInst,
+                      short* speechFrame,
+                      short* speechFrameHB,
+                      short* outFrame,
+                      short* outFrameHB);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
diff --git a/trunk/src/modules/audio_processing/ns/noise_suppression.c b/trunk/src/modules/audio_processing/ns/noise_suppression.c
new file mode 100644
index 0000000..a1e5ae8
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/noise_suppression.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "noise_suppression.h"
+#include "ns_core.h"
+#include "defines.h"
+
+int WebRtcNs_Create(NsHandle** NS_inst) {
+  *NS_inst = (NsHandle*) malloc(sizeof(NSinst_t));
+  if (*NS_inst != NULL) {
+    (*(NSinst_t**)NS_inst)->initFlag = 0;
+    return 0;
+  } else {
+    return -1;
+  }
+
+}
+
+int WebRtcNs_Free(NsHandle* NS_inst) {
+  free(NS_inst);
+  return 0;
+}
+
+
+int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs) {
+  return WebRtcNs_InitCore((NSinst_t*) NS_inst, fs);
+}
+
+int WebRtcNs_set_policy(NsHandle* NS_inst, int mode) {
+  return WebRtcNs_set_policy_core((NSinst_t*) NS_inst, mode);
+}
+
+
+int WebRtcNs_Process(NsHandle* NS_inst, short* spframe, short* spframe_H,
+                     short* outframe, short* outframe_H) {
+  return WebRtcNs_ProcessCore(
+      (NSinst_t*) NS_inst, spframe, spframe_H, outframe, outframe_H);
+}
diff --git a/trunk/src/modules/audio_processing/ns/noise_suppression_x.c b/trunk/src/modules/audio_processing/ns/noise_suppression_x.c
new file mode 100644
index 0000000..6d27d0e
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/noise_suppression_x.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "noise_suppression_x.h"
+#include "nsx_core.h"
+#include "nsx_defines.h"
+
+int WebRtcNsx_Create(NsxHandle** nsxInst) {
+  *nsxInst = (NsxHandle*)malloc(sizeof(NsxInst_t));
+  if (*nsxInst != NULL) {
+    (*(NsxInst_t**)nsxInst)->initFlag = 0;
+    return 0;
+  } else {
+    return -1;
+  }
+
+}
+
+int WebRtcNsx_Free(NsxHandle* nsxInst) {
+  free(nsxInst);
+  return 0;
+}
+
+int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs) {
+  return WebRtcNsx_InitCore((NsxInst_t*)nsxInst, fs);
+}
+
+int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode) {
+  return WebRtcNsx_set_policy_core((NsxInst_t*)nsxInst, mode);
+}
+
+int WebRtcNsx_Process(NsxHandle* nsxInst, short* speechFrame,
+                      short* speechFrameHB, short* outFrame,
+                      short* outFrameHB) {
+  return WebRtcNsx_ProcessCore(
+      (NsxInst_t*)nsxInst, speechFrame, speechFrameHB, outFrame, outFrameHB);
+}
+
diff --git a/trunk/src/modules/audio_processing/ns/ns.gypi b/trunk/src/modules/audio_processing/ns/ns.gypi
new file mode 100644
index 0000000..147f7ea
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/ns.gypi
@@ -0,0 +1,59 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'ns',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        'apm_util'
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/noise_suppression.h',
+        'noise_suppression.c',
+        'windows_private.h',
+        'defines.h',
+        'ns_core.c',
+        'ns_core.h',
+      ],
+    },
+    {
+      'target_name': 'ns_fix',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/noise_suppression_x.h',
+        'noise_suppression_x.c',
+        'nsx_defines.h',
+        'nsx_core.c',
+        'nsx_core.h',
+      ],
+    },
+  ],
+}
diff --git a/trunk/src/modules/audio_processing/ns/ns_core.c b/trunk/src/modules/audio_processing/ns/ns_core.c
new file mode 100644
index 0000000..e5aa4ff
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/ns_core.c
@@ -0,0 +1,1303 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include <math.h>
+//#include <stdio.h>
+#include <stdlib.h>
+#include "noise_suppression.h"
+#include "ns_core.h"
+#include "windows_private.h"
+#include "fft4g.h"
+#include "signal_processing_library.h"
+
+// Set Feature Extraction Parameters
+void WebRtcNs_set_feature_extraction_parameters(NSinst_t* inst) {
+  //bin size of histogram
+  inst->featureExtractionParams.binSizeLrt      = (float)0.1;
+  inst->featureExtractionParams.binSizeSpecFlat = (float)0.05;
+  inst->featureExtractionParams.binSizeSpecDiff = (float)0.1;
+
+  //range of histogram over which lrt threshold is computed
+  inst->featureExtractionParams.rangeAvgHistLrt = (float)1.0;
+
+  //scale parameters: multiply dominant peaks of the histograms by scale factor to obtain
+  // thresholds for prior model
+  inst->featureExtractionParams.factor1ModelPars = (float)1.20; //for lrt and spectral diff
+  inst->featureExtractionParams.factor2ModelPars = (float)0.9;  //for spectral_flatness:
+  // used when noise is flatter than speech
+
+  //peak limit for spectral flatness (varies between 0 and 1)
+  inst->featureExtractionParams.thresPosSpecFlat = (float)0.6;
+
+  //limit on spacing of two highest peaks in histogram: spacing determined by bin size
+  inst->featureExtractionParams.limitPeakSpacingSpecFlat = 
+      2 * inst->featureExtractionParams.binSizeSpecFlat;
+  inst->featureExtractionParams.limitPeakSpacingSpecDiff =
+      2 * inst->featureExtractionParams.binSizeSpecDiff;
+
+  //limit on relevance of second peak:
+  inst->featureExtractionParams.limitPeakWeightsSpecFlat = (float)0.5;
+  inst->featureExtractionParams.limitPeakWeightsSpecDiff = (float)0.5;
+
+  // fluctuation limit of lrt feature
+  inst->featureExtractionParams.thresFluctLrt = (float)0.05;
+
+  //limit on the max and min values for the feature thresholds
+  inst->featureExtractionParams.maxLrt = (float)1.0;
+  inst->featureExtractionParams.minLrt = (float)0.20;
+
+  inst->featureExtractionParams.maxSpecFlat = (float)0.95;
+  inst->featureExtractionParams.minSpecFlat = (float)0.10;
+
+  inst->featureExtractionParams.maxSpecDiff = (float)1.0;
+  inst->featureExtractionParams.minSpecDiff = (float)0.16;
+
+  //criteria of weight of histogram peak  to accept/reject feature
+  inst->featureExtractionParams.thresWeightSpecFlat = (int)(0.3
+      * (inst->modelUpdatePars[1])); //for spectral flatness
+  inst->featureExtractionParams.thresWeightSpecDiff = (int)(0.3
+      * (inst->modelUpdatePars[1])); //for spectral difference
+}
+
+// Initialize state
+int WebRtcNs_InitCore(NSinst_t* inst, WebRtc_UWord32 fs) {
+  int i;
+  //We only support 10ms frames
+
+  //check for valid pointer
+  if (inst == NULL) {
+    return -1;
+  }
+
+  // Initialization of struct
+  if (fs == 8000 || fs == 16000 || fs == 32000) {
+    inst->fs = fs;
+  } else {
+    return -1;
+  }
+  inst->windShift = 0;
+  if (fs == 8000) {
+    // We only support 10ms frames
+    inst->blockLen = 80;
+    inst->blockLen10ms = 80;
+    inst->anaLen = 128;
+    inst->window = kBlocks80w128;
+    inst->outLen = 0;
+  } else if (fs == 16000) {
+    // We only support 10ms frames
+    inst->blockLen = 160;
+    inst->blockLen10ms = 160;
+    inst->anaLen = 256;
+    inst->window = kBlocks160w256;
+    inst->outLen = 0;
+  } else if (fs == 32000) {
+    // We only support 10ms frames
+    inst->blockLen = 160;
+    inst->blockLen10ms = 160;
+    inst->anaLen = 256;
+    inst->window = kBlocks160w256;
+    inst->outLen = 0;
+  }
+  inst->magnLen = inst->anaLen / 2 + 1; // Number of frequency bins
+
+  // Initialize fft work arrays.
+  inst->ip[0] = 0; // Setting this triggers initialization.
+  memset(inst->dataBuf, 0, sizeof(float) * ANAL_BLOCKL_MAX);
+  WebRtc_rdft(inst->anaLen, 1, inst->dataBuf, inst->ip, inst->wfft);
+
+  memset(inst->dataBuf, 0, sizeof(float) * ANAL_BLOCKL_MAX);
+  memset(inst->syntBuf, 0, sizeof(float) * ANAL_BLOCKL_MAX);
+
+  //for HB processing
+  memset(inst->dataBufHB, 0, sizeof(float) * ANAL_BLOCKL_MAX);
+
+  //for quantile noise estimation
+  memset(inst->quantile, 0, sizeof(float) * HALF_ANAL_BLOCKL);
+  for (i = 0; i < SIMULT * HALF_ANAL_BLOCKL; i++) {
+    inst->lquantile[i] = (float)8.0;
+    inst->density[i] = (float)0.3;
+  }
+
+  for (i = 0; i < SIMULT; i++) {
+    inst->counter[i] = (int)floor((float)(END_STARTUP_LONG * (i + 1)) / (float)SIMULT);
+  }
+
+  inst->updates = 0;
+
+  // Wiener filter initialization
+  for (i = 0; i < HALF_ANAL_BLOCKL; i++) {
+    inst->smooth[i] = (float)1.0;
+  }
+
+  // Set the aggressiveness: default
+  inst->aggrMode = 0;
+
+  //initialize variables for new method
+  inst->priorSpeechProb = (float)0.5; //prior prob for speech/noise
+  for (i = 0; i < HALF_ANAL_BLOCKL; i++) {
+    inst->magnPrev[i]      = (float)0.0; //previous mag spectrum
+    inst->noisePrev[i]     = (float)0.0; //previous noise-spectrum
+    inst->logLrtTimeAvg[i] = LRT_FEATURE_THR; //smooth LR ratio (same as threshold)
+    inst->magnAvgPause[i]  = (float)0.0; //conservative noise spectrum estimate
+    inst->speechProbHB[i]  = (float)0.0; //for estimation of HB in second pass
+    inst->initMagnEst[i]   = (float)0.0; //initial average mag spectrum
+  }
+
+  //feature quantities
+  inst->featureData[0] = SF_FEATURE_THR;  //spectral flatness (start on threshold)
+  inst->featureData[1] = (float)0.0;      //spectral entropy: not used in this version
+  inst->featureData[2] = (float)0.0;      //spectral variance: not used in this version
+  inst->featureData[3] = LRT_FEATURE_THR; //average lrt factor (start on threshold)
+  inst->featureData[4] = SF_FEATURE_THR;  //spectral template diff (start on threshold)
+  inst->featureData[5] = (float)0.0;      //normalization for spectral-diff
+  inst->featureData[6] = (float)0.0;      //window time-average of input magnitude spectrum
+
+  //histogram quantities: used to estimate/update thresholds for features
+  for (i = 0; i < HIST_PAR_EST; i++) {
+    inst->histLrt[i] = 0;
+    inst->histSpecFlat[i] = 0;
+    inst->histSpecDiff[i] = 0;
+  }
+
+  inst->blockInd = -1; //frame counter
+  inst->priorModelPars[0] = LRT_FEATURE_THR; //default threshold for lrt feature
+  inst->priorModelPars[1] = (float)0.5;      //threshold for spectral flatness:
+  // determined on-line
+  inst->priorModelPars[2] = (float)1.0;      //sgn_map par for spectral measure:
+  // 1 for flatness measure
+  inst->priorModelPars[3] = (float)0.5;      //threshold for template-difference feature:
+  // determined on-line
+  inst->priorModelPars[4] = (float)1.0;      //default weighting parameter for lrt feature
+  inst->priorModelPars[5] = (float)0.0;      //default weighting parameter for
+  // spectral flatness feature
+  inst->priorModelPars[6] = (float)0.0;      //default weighting parameter for
+  // spectral difference feature
+
+  inst->modelUpdatePars[0] = 2;   //update flag for parameters:
+  // 0 no update, 1=update once, 2=update every window
+  inst->modelUpdatePars[1] = 500; //window for update
+  inst->modelUpdatePars[2] = 0;   //counter for update of conservative noise spectrum
+  //counter if the feature thresholds are updated during the sequence
+  inst->modelUpdatePars[3] = inst->modelUpdatePars[1];
+
+  inst->signalEnergy = 0.0;
+  inst->sumMagn = 0.0;
+  inst->whiteNoiseLevel = 0.0;
+  inst->pinkNoiseNumerator = 0.0;
+  inst->pinkNoiseExp = 0.0;
+
+  WebRtcNs_set_feature_extraction_parameters(inst); // Set feature configuration
+
+  //default mode
+  WebRtcNs_set_policy_core(inst, 0);
+
+
+  memset(inst->outBuf, 0, sizeof(float) * 3 * BLOCKL_MAX);
+
+  inst->initFlag = 1;
+  return 0;
+}
+
+int WebRtcNs_set_policy_core(NSinst_t* inst, int mode) {
+  // allow for modes:0,1,2,3
+  if (mode < 0 || mode > 3) {
+    return (-1);
+  }
+
+  inst->aggrMode = mode;
+  if (mode == 0) {
+    inst->overdrive = (float)1.0;
+    inst->denoiseBound = (float)0.5;
+    inst->gainmap = 0;
+  } else if (mode == 1) {
+    //inst->overdrive = (float)1.25;
+    inst->overdrive = (float)1.0;
+    inst->denoiseBound = (float)0.25;
+    inst->gainmap = 1;
+  } else if (mode == 2) {
+    //inst->overdrive = (float)1.25;
+    inst->overdrive = (float)1.1;
+    inst->denoiseBound = (float)0.125;
+    inst->gainmap = 1;
+  } else if (mode == 3) {
+    //inst->overdrive = (float)1.30;
+    inst->overdrive = (float)1.25;
+    inst->denoiseBound = (float)0.09;
+    inst->gainmap = 1;
+  }
+  return 0;
+}
+
+// Estimate noise
+void WebRtcNs_NoiseEstimation(NSinst_t* inst, float* magn, float* noise) {
+  int i, s, offset;
+  float lmagn[HALF_ANAL_BLOCKL], delta;
+
+  if (inst->updates < END_STARTUP_LONG) {
+    inst->updates++;
+  }
+
+  for (i = 0; i < inst->magnLen; i++) {
+    lmagn[i] = (float)log(magn[i]);
+  }
+
+  // loop over simultaneous estimates
+  for (s = 0; s < SIMULT; s++) {
+    offset = s * inst->magnLen;
+
+    // newquantest(...)
+    for (i = 0; i < inst->magnLen; i++) {
+      // compute delta
+      if (inst->density[offset + i] > 1.0) {
+        delta = FACTOR * (float)1.0 / inst->density[offset + i];
+      } else {
+        delta = FACTOR;
+      }
+
+      // update log quantile estimate
+      if (lmagn[i] > inst->lquantile[offset + i]) {
+        inst->lquantile[offset + i] += QUANTILE * delta
+                                       / (float)(inst->counter[s] + 1);
+      } else {
+        inst->lquantile[offset + i] -= ((float)1.0 - QUANTILE) * delta
+                                       / (float)(inst->counter[s] + 1);
+      }
+
+      // update density estimate
+      if (fabs(lmagn[i] - inst->lquantile[offset + i]) < WIDTH) {
+        inst->density[offset + i] = ((float)inst->counter[s] * inst->density[offset
+            + i] + (float)1.0 / ((float)2.0 * WIDTH)) / (float)(inst->counter[s] + 1);
+      }
+    } // end loop over magnitude spectrum
+
+    if (inst->counter[s] >= END_STARTUP_LONG) {
+      inst->counter[s] = 0;
+      if (inst->updates >= END_STARTUP_LONG) {
+        for (i = 0; i < inst->magnLen; i++) {
+          inst->quantile[i] = (float)exp(inst->lquantile[offset + i]);
+        }
+      }
+    }
+
+    inst->counter[s]++;
+  } // end loop over simultaneous estimates
+
+  // Sequentially update the noise during startup
+  if (inst->updates < END_STARTUP_LONG) {
+    // Use the last "s" to get noise during startup that differ from zero.
+    for (i = 0; i < inst->magnLen; i++) {
+      inst->quantile[i] = (float)exp(inst->lquantile[offset + i]);
+    }
+  }
+
+  for (i = 0; i < inst->magnLen; i++) {
+    noise[i] = inst->quantile[i];
+  }
+}
+
+// Extract thresholds for feature parameters
+// histograms are computed over some window_size (given by inst->modelUpdatePars[1])
+// thresholds and weights are extracted every window
+// flag 0 means update histogram only, flag 1 means compute the thresholds/weights
+// threshold and weights are returned in: inst->priorModelPars
+void WebRtcNs_FeatureParameterExtraction(NSinst_t* inst, int flag) {
+  int i, useFeatureSpecFlat, useFeatureSpecDiff, numHistLrt;
+  int maxPeak1, maxPeak2;
+  int weightPeak1SpecFlat, weightPeak2SpecFlat, weightPeak1SpecDiff, weightPeak2SpecDiff;
+
+  float binMid, featureSum;
+  float posPeak1SpecFlat, posPeak2SpecFlat, posPeak1SpecDiff, posPeak2SpecDiff;
+  float fluctLrt, avgHistLrt, avgSquareHistLrt, avgHistLrtCompl;
+
+  //3 features: lrt, flatness, difference
+  //lrt_feature = inst->featureData[3];
+  //flat_feature = inst->featureData[0];
+  //diff_feature = inst->featureData[4];
+
+  //update histograms
+  if (flag == 0) {
+    // LRT
+    if ((inst->featureData[3] < HIST_PAR_EST * inst->featureExtractionParams.binSizeLrt)
+        && (inst->featureData[3] >= 0.0)) {
+      i = (int)(inst->featureData[3] / inst->featureExtractionParams.binSizeLrt);
+      inst->histLrt[i]++;
+    }
+    // Spectral flatness
+    if ((inst->featureData[0] < HIST_PAR_EST
+         * inst->featureExtractionParams.binSizeSpecFlat)
+        && (inst->featureData[0] >= 0.0)) {
+      i = (int)(inst->featureData[0] / inst->featureExtractionParams.binSizeSpecFlat);
+      inst->histSpecFlat[i]++;
+    }
+    // Spectral difference
+    if ((inst->featureData[4] < HIST_PAR_EST
+         * inst->featureExtractionParams.binSizeSpecDiff)
+        && (inst->featureData[4] >= 0.0)) {
+      i = (int)(inst->featureData[4] / inst->featureExtractionParams.binSizeSpecDiff);
+      inst->histSpecDiff[i]++;
+    }
+  }
+
+  // extract parameters for speech/noise probability
+  if (flag == 1) {
+    //lrt feature: compute the average over inst->featureExtractionParams.rangeAvgHistLrt
+    avgHistLrt = 0.0;
+    avgHistLrtCompl = 0.0;
+    avgSquareHistLrt = 0.0;
+    numHistLrt = 0;
+    for (i = 0; i < HIST_PAR_EST; i++) {
+      binMid = ((float)i + (float)0.5) * inst->featureExtractionParams.binSizeLrt;
+      if (binMid <= inst->featureExtractionParams.rangeAvgHistLrt) {
+        avgHistLrt += inst->histLrt[i] * binMid;
+        numHistLrt += inst->histLrt[i];
+      }
+      avgSquareHistLrt += inst->histLrt[i] * binMid * binMid;
+      avgHistLrtCompl += inst->histLrt[i] * binMid;
+    }
+    if (numHistLrt > 0) {
+      avgHistLrt = avgHistLrt / ((float)numHistLrt);
+    }
+    avgHistLrtCompl = avgHistLrtCompl / ((float)inst->modelUpdatePars[1]);
+    avgSquareHistLrt = avgSquareHistLrt / ((float)inst->modelUpdatePars[1]);
+    fluctLrt = avgSquareHistLrt - avgHistLrt * avgHistLrtCompl;
+    // get threshold for lrt feature:
+    if (fluctLrt < inst->featureExtractionParams.thresFluctLrt) {
+      //very low fluct, so likely noise
+      inst->priorModelPars[0] = inst->featureExtractionParams.maxLrt;
+    } else {
+      inst->priorModelPars[0] = inst->featureExtractionParams.factor1ModelPars
+                                * avgHistLrt;
+      // check if value is within min/max range
+      if (inst->priorModelPars[0] < inst->featureExtractionParams.minLrt) {
+        inst->priorModelPars[0] = inst->featureExtractionParams.minLrt;
+      }
+      if (inst->priorModelPars[0] > inst->featureExtractionParams.maxLrt) {
+        inst->priorModelPars[0] = inst->featureExtractionParams.maxLrt;
+      }
+    }
+    // done with lrt feature
+
+    //
+    // for spectral flatness and spectral difference: compute the main peaks of histogram
+    maxPeak1 = 0;
+    maxPeak2 = 0;
+    posPeak1SpecFlat = 0.0;
+    posPeak2SpecFlat = 0.0;
+    weightPeak1SpecFlat = 0;
+    weightPeak2SpecFlat = 0;
+
+    // peaks for flatness
+    for (i = 0; i < HIST_PAR_EST; i++) {
+      binMid = ((float)i + (float)0.5) * inst->featureExtractionParams.binSizeSpecFlat;
+      if (inst->histSpecFlat[i] > maxPeak1) {
+        // Found new "first" peak
+        maxPeak2 = maxPeak1;
+        weightPeak2SpecFlat = weightPeak1SpecFlat;
+        posPeak2SpecFlat = posPeak1SpecFlat;
+
+        maxPeak1 = inst->histSpecFlat[i];
+        weightPeak1SpecFlat = inst->histSpecFlat[i];
+        posPeak1SpecFlat = binMid;
+      } else if (inst->histSpecFlat[i] > maxPeak2) {
+        // Found new "second" peak
+        maxPeak2 = inst->histSpecFlat[i];
+        weightPeak2SpecFlat = inst->histSpecFlat[i];
+        posPeak2SpecFlat = binMid;
+      }
+    }
+
+    //compute two peaks for spectral difference
+    maxPeak1 = 0;
+    maxPeak2 = 0;
+    posPeak1SpecDiff = 0.0;
+    posPeak2SpecDiff = 0.0;
+    weightPeak1SpecDiff = 0;
+    weightPeak2SpecDiff = 0;
+    // peaks for spectral difference
+    for (i = 0; i < HIST_PAR_EST; i++) {
+      binMid = ((float)i + (float)0.5) * inst->featureExtractionParams.binSizeSpecDiff;
+      if (inst->histSpecDiff[i] > maxPeak1) {
+        // Found new "first" peak
+        maxPeak2 = maxPeak1;
+        weightPeak2SpecDiff = weightPeak1SpecDiff;
+        posPeak2SpecDiff = posPeak1SpecDiff;
+
+        maxPeak1 = inst->histSpecDiff[i];
+        weightPeak1SpecDiff = inst->histSpecDiff[i];
+        posPeak1SpecDiff = binMid;
+      } else if (inst->histSpecDiff[i] > maxPeak2) {
+        // Found new "second" peak
+        maxPeak2 = inst->histSpecDiff[i];
+        weightPeak2SpecDiff = inst->histSpecDiff[i];
+        posPeak2SpecDiff = binMid;
+      }
+    }
+
+    // for spectrum flatness feature
+    useFeatureSpecFlat = 1;
+    // merge the two peaks if they are close
+    if ((fabs(posPeak2SpecFlat - posPeak1SpecFlat)
+         < inst->featureExtractionParams.limitPeakSpacingSpecFlat)
+        && (weightPeak2SpecFlat
+            > inst->featureExtractionParams.limitPeakWeightsSpecFlat
+            * weightPeak1SpecFlat)) {
+      weightPeak1SpecFlat += weightPeak2SpecFlat;
+      posPeak1SpecFlat = (float)0.5 * (posPeak1SpecFlat + posPeak2SpecFlat);
+    }
+    //reject if weight of peaks is not large enough, or peak value too small
+    if (weightPeak1SpecFlat < inst->featureExtractionParams.thresWeightSpecFlat
+        || posPeak1SpecFlat < inst->featureExtractionParams.thresPosSpecFlat) {
+      useFeatureSpecFlat = 0;
+    }
+    // if selected, get the threshold
+    if (useFeatureSpecFlat == 1) {
+      // compute the threshold
+      inst->priorModelPars[1] = inst->featureExtractionParams.factor2ModelPars
+                                * posPeak1SpecFlat;
+      //check if value is within min/max range
+      if (inst->priorModelPars[1] < inst->featureExtractionParams.minSpecFlat) {
+        inst->priorModelPars[1] = inst->featureExtractionParams.minSpecFlat;
+      }
+      if (inst->priorModelPars[1] > inst->featureExtractionParams.maxSpecFlat) {
+        inst->priorModelPars[1] = inst->featureExtractionParams.maxSpecFlat;
+      }
+    }
+    // done with flatness feature
+
+    // for template feature
+    useFeatureSpecDiff = 1;
+    // merge the two peaks if they are close
+    if ((fabs(posPeak2SpecDiff - posPeak1SpecDiff)
+         < inst->featureExtractionParams.limitPeakSpacingSpecDiff)
+        && (weightPeak2SpecDiff
+            > inst->featureExtractionParams.limitPeakWeightsSpecDiff
+            * weightPeak1SpecDiff)) {
+      weightPeak1SpecDiff += weightPeak2SpecDiff;
+      posPeak1SpecDiff = (float)0.5 * (posPeak1SpecDiff + posPeak2SpecDiff);
+    }
+    // get the threshold value
+    inst->priorModelPars[3] = inst->featureExtractionParams.factor1ModelPars
+                              * posPeak1SpecDiff;
+    //reject if weight of peaks is not large enough
+    if (weightPeak1SpecDiff < inst->featureExtractionParams.thresWeightSpecDiff) {
+      useFeatureSpecDiff = 0;
+    }
+    //check if value is within min/max range
+    if (inst->priorModelPars[3] < inst->featureExtractionParams.minSpecDiff) {
+      inst->priorModelPars[3] = inst->featureExtractionParams.minSpecDiff;
+    }
+    if (inst->priorModelPars[3] > inst->featureExtractionParams.maxSpecDiff) {
+      inst->priorModelPars[3] = inst->featureExtractionParams.maxSpecDiff;
+    }
+    // done with spectral difference feature
+
+    // don't use template feature if fluctuation of lrt feature is very low:
+    //  most likely just noise state
+    if (fluctLrt < inst->featureExtractionParams.thresFluctLrt) {
+      useFeatureSpecDiff = 0;
+    }
+
+    // select the weights between the features
+    // inst->priorModelPars[4] is weight for lrt: always selected
+    // inst->priorModelPars[5] is weight for spectral flatness
+    // inst->priorModelPars[6] is weight for spectral difference
+    featureSum = (float)(1 + useFeatureSpecFlat + useFeatureSpecDiff);
+    inst->priorModelPars[4] = (float)1.0 / featureSum;
+    inst->priorModelPars[5] = ((float)useFeatureSpecFlat) / featureSum;
+    inst->priorModelPars[6] = ((float)useFeatureSpecDiff) / featureSum;
+
+    // set hists to zero for next update
+    if (inst->modelUpdatePars[0] >= 1) {
+      for (i = 0; i < HIST_PAR_EST; i++) {
+        inst->histLrt[i] = 0;
+        inst->histSpecFlat[i] = 0;
+        inst->histSpecDiff[i] = 0;
+      }
+    }
+  } // end of flag == 1
+}
+
+// Compute spectral flatness on input spectrum
+// magnIn is the magnitude spectrum
+// spectral flatness is returned in inst->featureData[0]
+void WebRtcNs_ComputeSpectralFlatness(NSinst_t* inst, float* magnIn) {
+  int i;
+  int shiftLP = 1; //option to remove first bin(s) from spectral measures
+  float avgSpectralFlatnessNum, avgSpectralFlatnessDen, spectralTmp;
+
+  // comute spectral measures
+  // for flatness
+  avgSpectralFlatnessNum = 0.0;
+  avgSpectralFlatnessDen = inst->sumMagn;
+  for (i = 0; i < shiftLP; i++) {
+    avgSpectralFlatnessDen -= magnIn[i];
+  }
+  // compute log of ratio of the geometric to arithmetic mean: check for log(0) case
+  for (i = shiftLP; i < inst->magnLen; i++) {
+    if (magnIn[i] > 0.0) {
+      avgSpectralFlatnessNum += (float)log(magnIn[i]);
+    } else {
+      inst->featureData[0] -= SPECT_FL_TAVG * inst->featureData[0];
+      return;
+    }
+  }
+  //normalize
+  avgSpectralFlatnessDen = avgSpectralFlatnessDen / inst->magnLen;
+  avgSpectralFlatnessNum = avgSpectralFlatnessNum / inst->magnLen;
+
+  //ratio and inverse log: check for case of log(0)
+  spectralTmp = (float)exp(avgSpectralFlatnessNum) / avgSpectralFlatnessDen;
+
+  //time-avg update of spectral flatness feature
+  inst->featureData[0] += SPECT_FL_TAVG * (spectralTmp - inst->featureData[0]);
+  // done with flatness feature
+}
+
+// Compute the difference measure between input spectrum and a template/learned noise spectrum
+// magnIn is the input spectrum
+// the reference/template spectrum is inst->magnAvgPause[i]
+// returns (normalized) spectral difference in inst->featureData[4]
+void WebRtcNs_ComputeSpectralDifference(NSinst_t* inst, float* magnIn) {
+  // avgDiffNormMagn = var(magnIn) - cov(magnIn, magnAvgPause)^2 / var(magnAvgPause)
+  int i;
+  float avgPause, avgMagn, covMagnPause, varPause, varMagn, avgDiffNormMagn;
+
+  avgPause = 0.0;
+  avgMagn = inst->sumMagn;
+  // compute average quantities
+  for (i = 0; i < inst->magnLen; i++) {
+    //conservative smooth noise spectrum from pause frames
+    avgPause += inst->magnAvgPause[i];
+  }
+  avgPause = avgPause / ((float)inst->magnLen);
+  avgMagn = avgMagn / ((float)inst->magnLen);
+
+  covMagnPause = 0.0;
+  varPause = 0.0;
+  varMagn = 0.0;
+  // compute variance and covariance quantities
+  for (i = 0; i < inst->magnLen; i++) {
+    covMagnPause += (magnIn[i] - avgMagn) * (inst->magnAvgPause[i] - avgPause);
+    varPause += (inst->magnAvgPause[i] - avgPause) * (inst->magnAvgPause[i] - avgPause);
+    varMagn += (magnIn[i] - avgMagn) * (magnIn[i] - avgMagn);
+  }
+  covMagnPause = covMagnPause / ((float)inst->magnLen);
+  varPause = varPause / ((float)inst->magnLen);
+  varMagn = varMagn / ((float)inst->magnLen);
+  // update of average magnitude spectrum
+  inst->featureData[6] += inst->signalEnergy;
+
+  avgDiffNormMagn = varMagn - (covMagnPause * covMagnPause) / (varPause + (float)0.0001);
+  // normalize and compute time-avg update of difference feature
+  avgDiffNormMagn = (float)(avgDiffNormMagn / (inst->featureData[5] + (float)0.0001));
+  inst->featureData[4] += SPECT_DIFF_TAVG * (avgDiffNormMagn - inst->featureData[4]);
+}
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//magn is the input magnitude spectrum
+//noise is the noise spectrum
+//snrLocPrior is the prior snr for each freq.
+//snr loc_post is the post snr for each freq.
+void WebRtcNs_SpeechNoiseProb(NSinst_t* inst, float* probSpeechFinal, float* snrLocPrior,
+                              float* snrLocPost) {
+  int i, sgnMap;
+  float invLrt, gainPrior, indPrior;
+  float logLrtTimeAvgKsum, besselTmp;
+  float indicator0, indicator1, indicator2;
+  float tmpFloat1, tmpFloat2;
+  float weightIndPrior0, weightIndPrior1, weightIndPrior2;
+  float threshPrior0, threshPrior1, threshPrior2;
+  float widthPrior, widthPrior0, widthPrior1, widthPrior2;
+
+  widthPrior0 = WIDTH_PR_MAP;
+  widthPrior1 = (float)2.0 * WIDTH_PR_MAP; //width for pause region:
+  // lower range, so increase width in tanh map
+  widthPrior2 = (float)2.0 * WIDTH_PR_MAP; //for spectral-difference measure
+
+  //threshold parameters for features
+  threshPrior0 = inst->priorModelPars[0];
+  threshPrior1 = inst->priorModelPars[1];
+  threshPrior2 = inst->priorModelPars[3];
+
+  //sign for flatness feature
+  sgnMap = (int)(inst->priorModelPars[2]);
+
+  //weight parameters for features
+  weightIndPrior0 = inst->priorModelPars[4];
+  weightIndPrior1 = inst->priorModelPars[5];
+  weightIndPrior2 = inst->priorModelPars[6];
+
+  // compute feature based on average LR factor
+  // this is the average over all frequencies of the smooth log lrt
+  logLrtTimeAvgKsum = 0.0;
+  for (i = 0; i < inst->magnLen; i++) {
+    tmpFloat1 = (float)1.0 + (float)2.0 * snrLocPrior[i];
+    tmpFloat2 = (float)2.0 * snrLocPrior[i] / (tmpFloat1 + (float)0.0001);
+    besselTmp = (snrLocPost[i] + (float)1.0) * tmpFloat2;
+    inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - (float)log(tmpFloat1)
+                                          - inst->logLrtTimeAvg[i]);
+    logLrtTimeAvgKsum += inst->logLrtTimeAvg[i];
+  }
+  logLrtTimeAvgKsum = (float)logLrtTimeAvgKsum / (inst->magnLen);
+  inst->featureData[3] = logLrtTimeAvgKsum;
+  // done with computation of LR factor
+
+  //
+  //compute the indicator functions
+  //
+
+  // average lrt feature
+  widthPrior = widthPrior0;
+  //use larger width in tanh map for pause regions
+  if (logLrtTimeAvgKsum < threshPrior0) {
+    widthPrior = widthPrior1;
+  }
+  // compute indicator function: sigmoid map
+  indicator0 = (float)0.5 * ((float)tanh(widthPrior *
+      (logLrtTimeAvgKsum - threshPrior0)) + (float)1.0);
+
+  //spectral flatness feature
+  tmpFloat1 = inst->featureData[0];
+  widthPrior = widthPrior0;
+  //use larger width in tanh map for pause regions
+  if (sgnMap == 1 && (tmpFloat1 > threshPrior1)) {
+    widthPrior = widthPrior1;
+  }
+  if (sgnMap == -1 && (tmpFloat1 < threshPrior1)) {
+    widthPrior = widthPrior1;
+  }
+  // compute indicator function: sigmoid map
+  indicator1 = (float)0.5 * ((float)tanh((float)sgnMap * 
+      widthPrior * (threshPrior1 - tmpFloat1)) + (float)1.0);
+
+  //for template spectrum-difference
+  tmpFloat1 = inst->featureData[4];
+  widthPrior = widthPrior0;
+  //use larger width in tanh map for pause regions
+  if (tmpFloat1 < threshPrior2) {
+    widthPrior = widthPrior2;
+  }
+  // compute indicator function: sigmoid map
+  indicator2 = (float)0.5 * ((float)tanh(widthPrior * (tmpFloat1 - threshPrior2))
+                             + (float)1.0);
+
+  //combine the indicator function with the feature weights
+  indPrior = weightIndPrior0 * indicator0 + weightIndPrior1 * indicator1 + weightIndPrior2
+             * indicator2;
+  // done with computing indicator function
+
+  //compute the prior probability
+  inst->priorSpeechProb += PRIOR_UPDATE * (indPrior - inst->priorSpeechProb);
+  // make sure probabilities are within range: keep floor to 0.01
+  if (inst->priorSpeechProb > 1.0) {
+    inst->priorSpeechProb = (float)1.0;
+  }
+  if (inst->priorSpeechProb < 0.01) {
+    inst->priorSpeechProb = (float)0.01;
+  }
+
+  //final speech probability: combine prior model with LR factor:
+  gainPrior = ((float)1.0 - inst->priorSpeechProb) / (inst->priorSpeechProb + (float)0.0001);
+  for (i = 0; i < inst->magnLen; i++) {
+    invLrt = (float)exp(-inst->logLrtTimeAvg[i]);
+    invLrt = (float)gainPrior * invLrt;
+    probSpeechFinal[i] = (float)1.0 / ((float)1.0 + invLrt);
+  }
+}
+
+int WebRtcNs_ProcessCore(NSinst_t* inst,
+                         short* speechFrame,
+                         short* speechFrameHB,
+                         short* outFrame,
+                         short* outFrameHB) {
+  // main routine for noise reduction
+
+  int     flagHB = 0;
+  int     i;
+  const int kStartBand = 5; // Skip first frequency bins during estimation.
+  int     updateParsFlag;
+
+  float   energy1, energy2, gain, factor, factor1, factor2;
+  float   signalEnergy, sumMagn;
+  float   snrPrior, currentEstimateStsa;
+  float   tmpFloat1, tmpFloat2, tmpFloat3, probSpeech, probNonSpeech;
+  float   gammaNoiseTmp, gammaNoiseOld;
+  float   noiseUpdateTmp, fTmp, dTmp;
+  float   fin[BLOCKL_MAX], fout[BLOCKL_MAX];
+  float   winData[ANAL_BLOCKL_MAX];
+  float   magn[HALF_ANAL_BLOCKL], noise[HALF_ANAL_BLOCKL];
+  float   theFilter[HALF_ANAL_BLOCKL], theFilterTmp[HALF_ANAL_BLOCKL];
+  float   snrLocPost[HALF_ANAL_BLOCKL], snrLocPrior[HALF_ANAL_BLOCKL];
+  float   probSpeechFinal[HALF_ANAL_BLOCKL], previousEstimateStsa[HALF_ANAL_BLOCKL];
+  float   real[ANAL_BLOCKL_MAX], imag[HALF_ANAL_BLOCKL];
+  // Variables during startup
+  float   sum_log_i = 0.0;
+  float   sum_log_i_square = 0.0;
+  float   sum_log_magn = 0.0;
+  float   sum_log_i_log_magn = 0.0;
+  float   parametric_noise = 0.0;
+  float   parametric_exp = 0.0;
+  float   parametric_num = 0.0;
+
+  // SWB variables
+  int     deltaBweHB = 1;
+  int     deltaGainHB = 1;
+  float   decayBweHB = 1.0;
+  float   gainMapParHB = 1.0;
+  float   gainTimeDomainHB = 1.0;
+  float   avgProbSpeechHB, avgProbSpeechHBTmp, avgFilterGainHB, gainModHB;
+
+  // Check that initiation has been done
+  if (inst->initFlag != 1) {
+    return (-1);
+  }
+  // Check for valid pointers based on sampling rate
+  if (inst->fs == 32000) {
+    if (speechFrameHB == NULL) {
+      return -1;
+    }
+    flagHB = 1;
+    // range for averaging low band quantities for H band gain
+    deltaBweHB = (int)inst->magnLen / 4;
+    deltaGainHB = deltaBweHB;
+  }
+  //
+  updateParsFlag = inst->modelUpdatePars[0];
+  //
+
+  //for LB do all processing
+  // convert to float
+  for (i = 0; i < inst->blockLen10ms; i++) {
+    fin[i] = (float)speechFrame[i];
+  }
+  // update analysis buffer for L band
+  memcpy(inst->dataBuf, inst->dataBuf + inst->blockLen10ms,
+         sizeof(float) * (inst->anaLen - inst->blockLen10ms));
+  memcpy(inst->dataBuf + inst->anaLen - inst->blockLen10ms, fin,
+         sizeof(float) * inst->blockLen10ms);
+
+  if (flagHB == 1) {
+    // convert to float
+    for (i = 0; i < inst->blockLen10ms; i++) {
+      fin[i] = (float)speechFrameHB[i];
+    }
+    // update analysis buffer for H band
+    memcpy(inst->dataBufHB, inst->dataBufHB + inst->blockLen10ms,
+           sizeof(float) * (inst->anaLen - inst->blockLen10ms));
+    memcpy(inst->dataBufHB + inst->anaLen - inst->blockLen10ms, fin,
+           sizeof(float) * inst->blockLen10ms);
+  }
+
+  // check if processing needed
+  if (inst->outLen == 0) {
+    // windowing
+    energy1 = 0.0;
+    for (i = 0; i < inst->anaLen; i++) {
+      winData[i] = inst->window[i] * inst->dataBuf[i];
+      energy1 += winData[i] * winData[i];
+    }
+    if (energy1 == 0.0) {
+      // synthesize the special case of zero input
+      // we want to avoid updating statistics in this case:
+      // Updating feature statistics when we have zeros only will cause thresholds to
+      // move towards zero signal situations. This in turn has the effect that once the
+      // signal is "turned on" (non-zero values) everything will be treated as speech
+      // and there is no noise suppression effect. Depending on the duration of the
+      // inactive signal it takes a considerable amount of time for the system to learn
+      // what is noise and what is speech.
+
+      // read out fully processed segment
+      for (i = inst->windShift; i < inst->blockLen + inst->windShift; i++) {
+        fout[i - inst->windShift] = inst->syntBuf[i];
+      }
+      // update synthesis buffer
+      memcpy(inst->syntBuf, inst->syntBuf + inst->blockLen,
+             sizeof(float) * (inst->anaLen - inst->blockLen));
+      memset(inst->syntBuf + inst->anaLen - inst->blockLen, 0,
+             sizeof(float) * inst->blockLen);
+
+      // out buffer
+      inst->outLen = inst->blockLen - inst->blockLen10ms;
+      if (inst->blockLen > inst->blockLen10ms) {
+        for (i = 0; i < inst->outLen; i++) {
+          inst->outBuf[i] = fout[i + inst->blockLen10ms];
+        }
+      }
+      // convert to short
+      for (i = 0; i < inst->blockLen10ms; i++) {
+        dTmp = fout[i];
+        if (dTmp < WEBRTC_SPL_WORD16_MIN) {
+          dTmp = WEBRTC_SPL_WORD16_MIN;
+        } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
+          dTmp = WEBRTC_SPL_WORD16_MAX;
+        }
+        outFrame[i] = (short)dTmp;
+      }
+
+      // for time-domain gain of HB
+      if (flagHB == 1) {
+        for (i = 0; i < inst->blockLen10ms; i++) {
+          dTmp = inst->dataBufHB[i];
+          if (dTmp < WEBRTC_SPL_WORD16_MIN) {
+            dTmp = WEBRTC_SPL_WORD16_MIN;
+          } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
+            dTmp = WEBRTC_SPL_WORD16_MAX;
+          }
+          outFrameHB[i] = (short)dTmp;
+        }
+      } // end of H band gain computation
+      //
+      return 0;
+    }
+
+    //
+    inst->blockInd++; // Update the block index only when we process a block.
+    // FFT
+    WebRtc_rdft(inst->anaLen, 1, winData, inst->ip, inst->wfft);
+
+    imag[0] = 0;
+    real[0] = winData[0];
+    magn[0] = (float)(fabs(real[0]) + 1.0f);
+    imag[inst->magnLen - 1] = 0;
+    real[inst->magnLen - 1] = winData[1];
+    magn[inst->magnLen - 1] = (float)(fabs(real[inst->magnLen - 1]) + 1.0f);
+    signalEnergy = (float)(real[0] * real[0]) + 
+                   (float)(real[inst->magnLen - 1] * real[inst->magnLen - 1]);
+    sumMagn = magn[0] + magn[inst->magnLen - 1];
+    if (inst->blockInd < END_STARTUP_SHORT) {
+      inst->initMagnEst[0] += magn[0];
+      inst->initMagnEst[inst->magnLen - 1] += magn[inst->magnLen - 1];
+      tmpFloat2 = log((float)(inst->magnLen - 1));
+      sum_log_i = tmpFloat2;
+      sum_log_i_square = tmpFloat2 * tmpFloat2;
+      tmpFloat1 = log(magn[inst->magnLen - 1]);
+      sum_log_magn = tmpFloat1;
+      sum_log_i_log_magn = tmpFloat2 * tmpFloat1;
+    }
+    for (i = 1; i < inst->magnLen - 1; i++) {
+      real[i] = winData[2 * i];
+      imag[i] = winData[2 * i + 1];
+      // magnitude spectrum
+      fTmp = real[i] * real[i];
+      fTmp += imag[i] * imag[i];
+      signalEnergy += fTmp;
+      magn[i] = ((float)sqrt(fTmp)) + 1.0f;
+      sumMagn += magn[i];
+      if (inst->blockInd < END_STARTUP_SHORT) {
+        inst->initMagnEst[i] += magn[i];
+        if (i >= kStartBand) {
+          tmpFloat2 = log((float)i);
+          sum_log_i += tmpFloat2;
+          sum_log_i_square += tmpFloat2 * tmpFloat2;
+          tmpFloat1 = log(magn[i]);
+          sum_log_magn += tmpFloat1;
+          sum_log_i_log_magn += tmpFloat2 * tmpFloat1;
+        }
+      }
+    }
+    signalEnergy = signalEnergy / ((float)inst->magnLen);
+    inst->signalEnergy = signalEnergy;
+    inst->sumMagn = sumMagn;
+
+    //compute spectral flatness on input spectrum
+    WebRtcNs_ComputeSpectralFlatness(inst, magn);
+    // quantile noise estimate
+    WebRtcNs_NoiseEstimation(inst, magn, noise);
+    //compute simplified noise model during startup
+    if (inst->blockInd < END_STARTUP_SHORT) {
+      // Estimate White noise
+      inst->whiteNoiseLevel += sumMagn / ((float)inst->magnLen) * inst->overdrive;
+      // Estimate Pink noise parameters
+      tmpFloat1 = sum_log_i_square * ((float)(inst->magnLen - kStartBand));
+      tmpFloat1 -= (sum_log_i * sum_log_i);
+      tmpFloat2 = (sum_log_i_square * sum_log_magn - sum_log_i * sum_log_i_log_magn);
+      tmpFloat3 = tmpFloat2 / tmpFloat1;
+      // Constrain the estimated spectrum to be positive
+      if (tmpFloat3 < 0.0f) {
+        tmpFloat3 = 0.0f;
+      }
+      inst->pinkNoiseNumerator += tmpFloat3;
+      tmpFloat2 = (sum_log_i * sum_log_magn);
+      tmpFloat2 -= ((float)(inst->magnLen - kStartBand)) * sum_log_i_log_magn;
+      tmpFloat3 = tmpFloat2 / tmpFloat1;
+      // Constrain the pink noise power to be in the interval [0, 1];
+      if (tmpFloat3 < 0.0f) {
+        tmpFloat3 = 0.0f;
+      }
+      if (tmpFloat3 > 1.0f) {
+        tmpFloat3 = 1.0f;
+      }
+      inst->pinkNoiseExp += tmpFloat3;
+
+      // Calculate frequency independent parts of parametric noise estimate.
+      if (inst->pinkNoiseExp == 0.0f) {
+        // Use white noise estimate
+        parametric_noise = inst->whiteNoiseLevel;
+      } else {
+        // Use pink noise estimate
+        parametric_num = exp(inst->pinkNoiseNumerator / (float)(inst->blockInd + 1));
+        parametric_num *= (float)(inst->blockInd + 1);
+        parametric_exp = inst->pinkNoiseExp / (float)(inst->blockInd + 1);
+        parametric_noise = parametric_num / pow((float)kStartBand, parametric_exp);
+      }
+      for (i = 0; i < inst->magnLen; i++) {
+        // Estimate the background noise using the white and pink noise parameters
+        if ((inst->pinkNoiseExp > 0.0f) && (i >= kStartBand)) {
+          // Use pink noise estimate
+          parametric_noise = parametric_num / pow((float)i, parametric_exp);
+        }
+        theFilterTmp[i] = (inst->initMagnEst[i] - inst->overdrive * parametric_noise);
+        theFilterTmp[i] /= (inst->initMagnEst[i] + (float)0.0001);
+        // Weight quantile noise with modeled noise
+        noise[i] *= (inst->blockInd);
+        tmpFloat2 = parametric_noise * (END_STARTUP_SHORT - inst->blockInd);
+        noise[i] += (tmpFloat2 / (float)(inst->blockInd + 1));
+        noise[i] /= END_STARTUP_SHORT;
+      }
+    }
+    //compute average signal during END_STARTUP_LONG time:
+    // used to normalize spectral difference measure
+    if (inst->blockInd < END_STARTUP_LONG) {
+      inst->featureData[5] *= inst->blockInd;
+      inst->featureData[5] += signalEnergy;
+      inst->featureData[5] /= (inst->blockInd + 1);
+    }
+
+#ifdef PROCESS_FLOW_0
+    if (inst->blockInd > END_STARTUP_LONG) {
+      //option: average the quantile noise: for check with AEC2
+      for (i = 0; i < inst->magnLen; i++) {
+        noise[i] = (float)0.6 * inst->noisePrev[i] + (float)0.4 * noise[i];
+      }
+      for (i = 0; i < inst->magnLen; i++) {
+        // Wiener with over sub-substraction:
+        theFilter[i] = (magn[i] - inst->overdrive * noise[i]) / (magn[i] + (float)0.0001);
+      }
+    }
+#else
+    //start processing at frames == converged+1
+    //
+    // STEP 1: compute  prior and post snr based on quantile noise est
+    //
+
+    // compute DD estimate of prior SNR: needed for new method
+    for (i = 0; i < inst->magnLen; i++) {
+      // post snr
+      snrLocPost[i] = (float)0.0;
+      if (magn[i] > noise[i]) {
+        snrLocPost[i] = magn[i] / (noise[i] + (float)0.0001) - (float)1.0;
+      }
+      // previous post snr
+      // previous estimate: based on previous frame with gain filter
+      previousEstimateStsa[i] = inst->magnPrev[i] / (inst->noisePrev[i] + (float)0.0001)
+                                * (inst->smooth[i]);
+      // DD estimate is sum of two terms: current estimate and previous estimate
+      // directed decision update of snrPrior
+      snrLocPrior[i] = DD_PR_SNR * previousEstimateStsa[i] + ((float)1.0 - DD_PR_SNR)
+                       * snrLocPost[i];
+      // post and prior snr needed for step 2
+    } // end of loop over freqs
+#ifdef PROCESS_FLOW_1
+    for (i = 0; i < inst->magnLen; i++) {
+      // gain filter
+      tmpFloat1 = inst->overdrive + snrLocPrior[i];
+      tmpFloat2 = (float)snrLocPrior[i] / tmpFloat1;
+      theFilter[i] = (float)tmpFloat2;
+    } // end of loop over freqs
+#endif
+    // done with step 1: dd computation of prior and post snr
+
+    //
+    //STEP 2: compute speech/noise likelihood
+    //
+#ifdef PROCESS_FLOW_2
+    // compute difference of input spectrum with learned/estimated noise spectrum
+    WebRtcNs_ComputeSpectralDifference(inst, magn);
+    // compute histograms for parameter decisions (thresholds and weights for features)
+    // parameters are extracted once every window time (=inst->modelUpdatePars[1])
+    if (updateParsFlag >= 1) {
+      // counter update
+      inst->modelUpdatePars[3]--;
+      // update histogram
+      if (inst->modelUpdatePars[3] > 0) {
+        WebRtcNs_FeatureParameterExtraction(inst, 0);
+      }
+      // compute model parameters
+      if (inst->modelUpdatePars[3] == 0) {
+        WebRtcNs_FeatureParameterExtraction(inst, 1);
+        inst->modelUpdatePars[3] = inst->modelUpdatePars[1];
+        // if wish to update only once, set flag to zero
+        if (updateParsFlag == 1) {
+          inst->modelUpdatePars[0] = 0;
+        } else {
+          // update every window:
+          // get normalization for spectral difference for next window estimate
+          inst->featureData[6] = inst->featureData[6]
+                                 / ((float)inst->modelUpdatePars[1]);
+          inst->featureData[5] = (float)0.5 * (inst->featureData[6]
+                                               + inst->featureData[5]);
+          inst->featureData[6] = (float)0.0;
+        }
+      }
+    }
+    // compute speech/noise probability
+    WebRtcNs_SpeechNoiseProb(inst, probSpeechFinal, snrLocPrior, snrLocPost);
+    // time-avg parameter for noise update
+    gammaNoiseTmp = NOISE_UPDATE;
+    for (i = 0; i < inst->magnLen; i++) {
+      probSpeech = probSpeechFinal[i];
+      probNonSpeech = (float)1.0 - probSpeech;
+      // temporary noise update:
+      // use it for speech frames if update value is less than previous
+      noiseUpdateTmp = gammaNoiseTmp * inst->noisePrev[i] + ((float)1.0 - gammaNoiseTmp)
+                       * (probNonSpeech * magn[i] + probSpeech * inst->noisePrev[i]);
+      //
+      // time-constant based on speech/noise state
+      gammaNoiseOld = gammaNoiseTmp;
+      gammaNoiseTmp = NOISE_UPDATE;
+      // increase gamma (i.e., less noise update) for frame likely to be speech
+      if (probSpeech > PROB_RANGE) {
+        gammaNoiseTmp = SPEECH_UPDATE;
+      }
+      // conservative noise update
+      if (probSpeech < PROB_RANGE) {
+        inst->magnAvgPause[i] += GAMMA_PAUSE * (magn[i] - inst->magnAvgPause[i]);
+      }
+      // noise update
+      if (gammaNoiseTmp == gammaNoiseOld) {
+        noise[i] = noiseUpdateTmp;
+      } else {
+        noise[i] = gammaNoiseTmp * inst->noisePrev[i] + ((float)1.0 - gammaNoiseTmp)
+                   * (probNonSpeech * magn[i] + probSpeech * inst->noisePrev[i]);
+        // allow for noise update downwards:
+        //  if noise update decreases the noise, it is safe, so allow it to happen
+        if (noiseUpdateTmp < noise[i]) {
+          noise[i] = noiseUpdateTmp;
+        }
+      }
+    } // end of freq loop
+    // done with step 2: noise update
+
+    //
+    // STEP 3: compute dd update of prior snr and post snr based on new noise estimate
+    //
+    for (i = 0; i < inst->magnLen; i++) {
+      // post and prior snr
+      currentEstimateStsa = (float)0.0;
+      if (magn[i] > noise[i]) {
+        currentEstimateStsa = magn[i] / (noise[i] + (float)0.0001) - (float)1.0;
+      }
+      // DD estimate is sume of two terms: current estimate and previous estimate
+      // directed decision update of snrPrior
+      snrPrior = DD_PR_SNR * previousEstimateStsa[i] + ((float)1.0 - DD_PR_SNR)
+                 * currentEstimateStsa;
+      // gain filter
+      tmpFloat1 = inst->overdrive + snrPrior;
+      tmpFloat2 = (float)snrPrior / tmpFloat1;
+      theFilter[i] = (float)tmpFloat2;
+    } // end of loop over freqs
+    // done with step3
+#endif
+#endif
+
+    for (i = 0; i < inst->magnLen; i++) {
+      // flooring bottom
+      if (theFilter[i] < inst->denoiseBound) {
+        theFilter[i] = inst->denoiseBound;
+      }
+      // flooring top
+      if (theFilter[i] > (float)1.0) {
+        theFilter[i] = 1.0;
+      }
+      if (inst->blockInd < END_STARTUP_SHORT) {
+        // flooring bottom
+        if (theFilterTmp[i] < inst->denoiseBound) {
+          theFilterTmp[i] = inst->denoiseBound;
+        }
+        // flooring top
+        if (theFilterTmp[i] > (float)1.0) {
+          theFilterTmp[i] = 1.0;
+        }
+        // Weight the two suppression filters
+        theFilter[i] *= (inst->blockInd);
+        theFilterTmp[i] *= (END_STARTUP_SHORT - inst->blockInd);
+        theFilter[i] += theFilterTmp[i];
+        theFilter[i] /= (END_STARTUP_SHORT);
+      }
+      // smoothing
+#ifdef PROCESS_FLOW_0
+      inst->smooth[i] *= SMOOTH; // value set to 0.7 in define.h file
+      inst->smooth[i] += ((float)1.0 - SMOOTH) * theFilter[i];
+#else
+      inst->smooth[i] = theFilter[i];
+#endif
+      real[i] *= inst->smooth[i];
+      imag[i] *= inst->smooth[i];
+    }
+    // keep track of noise and magn spectrum for next frame
+    for (i = 0; i < inst->magnLen; i++) {
+      inst->noisePrev[i] = noise[i];
+      inst->magnPrev[i] = magn[i];
+    }
+    // back to time domain
+    winData[0] = real[0];
+    winData[1] = real[inst->magnLen - 1];
+    for (i = 1; i < inst->magnLen - 1; i++) {
+      winData[2 * i] = real[i];
+      winData[2 * i + 1] = imag[i];
+    }
+    WebRtc_rdft(inst->anaLen, -1, winData, inst->ip, inst->wfft);
+
+    for (i = 0; i < inst->anaLen; i++) {
+      real[i] = 2.0f * winData[i] / inst->anaLen; // fft scaling
+    }
+
+    //scale factor: only do it after END_STARTUP_LONG time
+    factor = (float)1.0;
+    if (inst->gainmap == 1 && inst->blockInd > END_STARTUP_LONG) {
+      factor1 = (float)1.0;
+      factor2 = (float)1.0;
+
+      energy2 = 0.0;
+      for (i = 0; i < inst->anaLen; i++) {
+        energy2 += (float)real[i] * (float)real[i];
+      }
+      gain = (float)sqrt(energy2 / (energy1 + (float)1.0));
+
+#ifdef PROCESS_FLOW_2
+      // scaling for new version
+      if (gain > B_LIM) {
+        factor1 = (float)1.0 + (float)1.3 * (gain - B_LIM);
+        if (gain * factor1 > (float)1.0) {
+          factor1 = (float)1.0 / gain;
+        }
+      }
+      if (gain < B_LIM) {
+        //don't reduce scale too much for pause regions:
+        // attenuation here should be controlled by flooring
+        if (gain <= inst->denoiseBound) {
+          gain = inst->denoiseBound;
+        }
+        factor2 = (float)1.0 - (float)0.3 * (B_LIM - gain);
+      }
+      //combine both scales with speech/noise prob:
+      // note prior (priorSpeechProb) is not frequency dependent
+      factor = inst->priorSpeechProb * factor1 + ((float)1.0 - inst->priorSpeechProb)
+               * factor2;
+#else
+      if (gain > B_LIM) {
+        factor = (float)1.0 + (float)1.3 * (gain - B_LIM);
+      } else {
+        factor = (float)1.0 + (float)2.0 * (gain - B_LIM);
+      }
+      if (gain * factor > (float)1.0) {
+        factor = (float)1.0 / gain;
+      }
+#endif
+    } // out of inst->gainmap==1
+
+    // synthesis
+    for (i = 0; i < inst->anaLen; i++) {
+      inst->syntBuf[i] += factor * inst->window[i] * (float)real[i];
+    }
+    // read out fully processed segment
+    for (i = inst->windShift; i < inst->blockLen + inst->windShift; i++) {
+      fout[i - inst->windShift] = inst->syntBuf[i];
+    }
+    // update synthesis buffer
+    memcpy(inst->syntBuf, inst->syntBuf + inst->blockLen,
+           sizeof(float) * (inst->anaLen - inst->blockLen));
+    memset(inst->syntBuf + inst->anaLen - inst->blockLen, 0,
+           sizeof(float) * inst->blockLen);
+
+    // out buffer
+    inst->outLen = inst->blockLen - inst->blockLen10ms;
+    if (inst->blockLen > inst->blockLen10ms) {
+      for (i = 0; i < inst->outLen; i++) {
+        inst->outBuf[i] = fout[i + inst->blockLen10ms];
+      }
+    }
+  } // end of if out.len==0
+  else {
+    for (i = 0; i < inst->blockLen10ms; i++) {
+      fout[i] = inst->outBuf[i];
+    }
+    memcpy(inst->outBuf, inst->outBuf + inst->blockLen10ms,
+           sizeof(float) * (inst->outLen - inst->blockLen10ms));
+    memset(inst->outBuf + inst->outLen - inst->blockLen10ms, 0,
+           sizeof(float) * inst->blockLen10ms);
+    inst->outLen -= inst->blockLen10ms;
+  }
+
+  // convert to short
+  for (i = 0; i < inst->blockLen10ms; i++) {
+    dTmp = fout[i];
+    if (dTmp < WEBRTC_SPL_WORD16_MIN) {
+      dTmp = WEBRTC_SPL_WORD16_MIN;
+    } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
+      dTmp = WEBRTC_SPL_WORD16_MAX;
+    }
+    outFrame[i] = (short)dTmp;
+  }
+
+  // for time-domain gain of HB
+  if (flagHB == 1) {
+    for (i = 0; i < inst->magnLen; i++) {
+      inst->speechProbHB[i] = probSpeechFinal[i];
+    }
+    // average speech prob from low band
+    // avg over second half (i.e., 4->8kHz) of freq. spectrum
+    avgProbSpeechHB = 0.0;
+    for (i = inst->magnLen - deltaBweHB - 1; i < inst->magnLen - 1; i++) {
+      avgProbSpeechHB += inst->speechProbHB[i];
+    }
+    avgProbSpeechHB = avgProbSpeechHB / ((float)deltaBweHB);
+    // average filter gain from low band
+    // average over second half (i.e., 4->8kHz) of freq. spectrum
+    avgFilterGainHB = 0.0;
+    for (i = inst->magnLen - deltaGainHB - 1; i < inst->magnLen - 1; i++) {
+      avgFilterGainHB += inst->smooth[i];
+    }
+    avgFilterGainHB = avgFilterGainHB / ((float)(deltaGainHB));
+    avgProbSpeechHBTmp = (float)2.0 * avgProbSpeechHB - (float)1.0;
+    // gain based on speech prob:
+    gainModHB = (float)0.5 * ((float)1.0 + (float)tanh(gainMapParHB * avgProbSpeechHBTmp));
+    //combine gain with low band gain
+    gainTimeDomainHB = (float)0.5 * gainModHB + (float)0.5 * avgFilterGainHB;
+    if (avgProbSpeechHB >= (float)0.5) {
+      gainTimeDomainHB = (float)0.25 * gainModHB + (float)0.75 * avgFilterGainHB;
+    }
+    gainTimeDomainHB = gainTimeDomainHB * decayBweHB;
+    //make sure gain is within flooring range
+    // flooring bottom
+    if (gainTimeDomainHB < inst->denoiseBound) {
+      gainTimeDomainHB = inst->denoiseBound;
+    }
+    // flooring top
+    if (gainTimeDomainHB > (float)1.0) {
+      gainTimeDomainHB = 1.0;
+    }
+    //apply gain
+    for (i = 0; i < inst->blockLen10ms; i++) {
+      dTmp = gainTimeDomainHB * inst->dataBufHB[i];
+      if (dTmp < WEBRTC_SPL_WORD16_MIN) {
+        dTmp = WEBRTC_SPL_WORD16_MIN;
+      } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
+        dTmp = WEBRTC_SPL_WORD16_MAX;
+      }
+      outFrameHB[i] = (short)dTmp;
+    }
+  } // end of H band gain computation
+  //
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_processing/ns/ns_core.h b/trunk/src/modules/audio_processing/ns/ns_core.h
new file mode 100644
index 0000000..2f4c34f
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/ns_core.h
@@ -0,0 +1,179 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NS_CORE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NS_CORE_H_
+
+#include "defines.h"
+
+typedef struct NSParaExtract_t_ {
+
+  //bin size of histogram
+  float binSizeLrt;
+  float binSizeSpecFlat;
+  float binSizeSpecDiff;
+  //range of histogram over which lrt threshold is computed
+  float rangeAvgHistLrt;
+  //scale parameters: multiply dominant peaks of the histograms by scale factor to obtain
+  //thresholds for prior model
+  float factor1ModelPars; //for lrt and spectral difference
+  float factor2ModelPars; //for spectral_flatness: used when noise is flatter than speech
+  //peak limit for spectral flatness (varies between 0 and 1)
+  float thresPosSpecFlat;
+  //limit on spacing of two highest peaks in histogram: spacing determined by bin size
+  float limitPeakSpacingSpecFlat;
+  float limitPeakSpacingSpecDiff;
+  //limit on relevance of second peak:
+  float limitPeakWeightsSpecFlat;
+  float limitPeakWeightsSpecDiff;
+  //limit on fluctuation of lrt feature
+  float thresFluctLrt;
+  //limit on the max and min values for the feature thresholds
+  float maxLrt;
+  float minLrt;
+  float maxSpecFlat;
+  float minSpecFlat;
+  float maxSpecDiff;
+  float minSpecDiff;
+  //criteria of weight of histogram peak  to accept/reject feature
+  int thresWeightSpecFlat;
+  int thresWeightSpecDiff;
+
+} NSParaExtract_t;
+
+typedef struct NSinst_t_ {
+
+  WebRtc_UWord32  fs;
+  int             blockLen;
+  int             blockLen10ms;
+  int             windShift;
+  int             outLen;
+  int             anaLen;
+  int             magnLen;
+  int             aggrMode;
+  const float*    window;
+  float           dataBuf[ANAL_BLOCKL_MAX];
+  float           syntBuf[ANAL_BLOCKL_MAX];
+  float           outBuf[3 * BLOCKL_MAX];
+
+  int             initFlag;
+  // parameters for quantile noise estimation
+  float           density[SIMULT* HALF_ANAL_BLOCKL];
+  float           lquantile[SIMULT* HALF_ANAL_BLOCKL];
+  float           quantile[HALF_ANAL_BLOCKL];
+  int             counter[SIMULT];
+  int             updates;
+  // parameters for Wiener filter
+  float           smooth[HALF_ANAL_BLOCKL];
+  float           overdrive;
+  float           denoiseBound;
+  int             gainmap;
+  // fft work arrays.
+  int             ip[IP_LENGTH];
+  float           wfft[W_LENGTH];
+
+  // parameters for new method: some not needed, will reduce/cleanup later
+  WebRtc_Word32   blockInd;                           //frame index counter
+  int             modelUpdatePars[4];                 //parameters for updating or estimating
+  // thresholds/weights for prior model
+  float           priorModelPars[7];                  //parameters for prior model
+  float           noisePrev[HALF_ANAL_BLOCKL];        //noise spectrum from previous frame
+  float           magnPrev[HALF_ANAL_BLOCKL];         //magnitude spectrum of previous frame
+  float           logLrtTimeAvg[HALF_ANAL_BLOCKL];    //log lrt factor with time-smoothing
+  float           priorSpeechProb;                    //prior speech/noise probability
+  float           featureData[7];                     //data for features
+  float           magnAvgPause[HALF_ANAL_BLOCKL];     //conservative noise spectrum estimate
+  float           signalEnergy;                       //energy of magn
+  float           sumMagn;                            //sum of magn
+  float           whiteNoiseLevel;                    //initial noise estimate
+  float           initMagnEst[HALF_ANAL_BLOCKL];      //initial magnitude spectrum estimate
+  float           pinkNoiseNumerator;                 //pink noise parameter: numerator
+  float           pinkNoiseExp;                       //pink noise parameter: power of freq
+  NSParaExtract_t featureExtractionParams;            //parameters for feature extraction
+  //histograms for parameter estimation
+  int             histLrt[HIST_PAR_EST];
+  int             histSpecFlat[HIST_PAR_EST];
+  int             histSpecDiff[HIST_PAR_EST];
+  //quantities for high band estimate
+  float           speechProbHB[HALF_ANAL_BLOCKL];     //final speech/noise prob: prior + LRT
+  float           dataBufHB[ANAL_BLOCKL_MAX];         //buffering data for HB
+
+} NSinst_t;
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcNs_InitCore(...)
+ *
+ * This function initializes a noise suppression instance
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - fs            : Sampling frequency
+ *
+ * Output:
+ *      - inst          : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_InitCore(NSinst_t* inst, WebRtc_UWord32 fs);
+
+/****************************************************************************
+ * WebRtcNs_set_policy_core(...)
+ *
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - mode          : 0: Mild (6 dB), 1: Medium (10 dB), 2: Aggressive (15 dB)
+ *
+ * Output:
+ *      - NS_inst      : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_set_policy_core(NSinst_t* inst, int mode);
+
+/****************************************************************************
+ * WebRtcNs_ProcessCore
+ *
+ * Do noise suppression.
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - inFrameLow    : Input speech frame for lower band
+ *      - inFrameHigh   : Input speech frame for higher band
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - outFrameLow   : Output speech frame for lower band
+ *      - outFrameHigh  : Output speech frame for higher band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+
+
+int WebRtcNs_ProcessCore(NSinst_t* inst,
+                         short* inFrameLow,
+                         short* inFrameHigh,
+                         short* outFrameLow,
+                         short* outFrameHigh);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NS_CORE_H_
diff --git a/trunk/src/modules/audio_processing/ns/nsx_core.c b/trunk/src/modules/audio_processing/ns/nsx_core.c
new file mode 100644
index 0000000..51bde0c
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/nsx_core.c
@@ -0,0 +1,2444 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "noise_suppression_x.h"
+
+#include <assert.h>
+#include <math.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include "cpu_features_wrapper.h"
+#include "nsx_core.h"
+
+// Skip first frequency bins during estimation. (0 <= value < 64)
+static const int kStartBand = 5;
+
+// Constants to compensate for shifting signal log(2^shifts).
+const WebRtc_Word16 WebRtcNsx_kLogTable[9] = {
+  0, 177, 355, 532, 710, 887, 1065, 1242, 1420
+};
+
+const WebRtc_Word16 WebRtcNsx_kCounterDiv[201] = {
+  32767, 16384, 10923, 8192, 6554, 5461, 4681,
+  4096, 3641, 3277, 2979, 2731, 2521, 2341, 2185, 2048, 1928, 1820, 1725, 1638, 1560,
+  1489, 1425, 1365, 1311, 1260, 1214, 1170, 1130, 1092, 1057, 1024, 993, 964, 936, 910,
+  886, 862, 840, 819, 799, 780, 762, 745, 728, 712, 697, 683, 669, 655, 643, 630, 618,
+  607, 596, 585, 575, 565, 555, 546, 537, 529, 520, 512, 504, 496, 489, 482, 475, 468,
+  462, 455, 449, 443, 437, 431, 426, 420, 415, 410, 405, 400, 395, 390, 386, 381, 377,
+  372, 368, 364, 360, 356, 352, 349, 345, 341, 338, 334, 331, 328, 324, 321, 318, 315,
+  312, 309, 306, 303, 301, 298, 295, 293, 290, 287, 285, 282, 280, 278, 275, 273, 271,
+  269, 266, 264, 262, 260, 258, 256, 254, 252, 250, 248, 246, 245, 243, 241, 239, 237,
+  236, 234, 232, 231, 229, 228, 226, 224, 223, 221, 220, 218, 217, 216, 214, 213, 211,
+  210, 209, 207, 206, 205, 204, 202, 201, 200, 199, 197, 196, 195, 194, 193, 192, 191,
+  189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173,
+  172, 172, 171, 170, 169, 168, 167, 166, 165, 165, 164, 163
+};
+
+const WebRtc_Word16 WebRtcNsx_kLogTableFrac[256] = {
+  0,   1,   3,   4,   6,   7,   9,  10,  11,  13,  14,  16,  17,  18,  20,  21,
+  22,  24,  25,  26,  28,  29,  30,  32,  33,  34,  36,  37,  38,  40,  41,  42,
+  44,  45,  46,  47,  49,  50,  51,  52,  54,  55,  56,  57,  59,  60,  61,  62,
+  63,  65,  66,  67,  68,  69,  71,  72,  73,  74,  75,  77,  78,  79,  80,  81,
+  82,  84,  85,  86,  87,  88,  89,  90,  92,  93,  94,  95,  96,  97,  98,  99,
+  100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117,
+  118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+  134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+  150, 151, 152, 153, 154, 155, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+  165, 166, 167, 168, 169, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 178,
+  179, 180, 181, 182, 183, 184, 185, 185, 186, 187, 188, 189, 190, 191, 192, 192,
+  193, 194, 195, 196, 197, 198, 198, 199, 200, 201, 202, 203, 203, 204, 205, 206,
+  207, 208, 208, 209, 210, 211, 212, 212, 213, 214, 215, 216, 216, 217, 218, 219,
+  220, 220, 221, 222, 223, 224, 224, 225, 226, 227, 228, 228, 229, 230, 231, 231,
+  232, 233, 234, 234, 235, 236, 237, 238, 238, 239, 240, 241, 241, 242, 243, 244,
+  244, 245, 246, 247, 247, 248, 249, 249, 250, 251, 252, 252, 253, 254, 255, 255
+};
+
+static const WebRtc_Word16 kPowTableFrac[1024] = {
+  0,    1,    1,    2,    3,    3,    4,    5,
+  6,    6,    7,    8,    8,    9,   10,   10,
+  11,   12,   13,   13,   14,   15,   15,   16,
+  17,   17,   18,   19,   20,   20,   21,   22,
+  22,   23,   24,   25,   25,   26,   27,   27,
+  28,   29,   30,   30,   31,   32,   32,   33,
+  34,   35,   35,   36,   37,   37,   38,   39,
+  40,   40,   41,   42,   42,   43,   44,   45,
+  45,   46,   47,   48,   48,   49,   50,   50,
+  51,   52,   53,   53,   54,   55,   56,   56,
+  57,   58,   58,   59,   60,   61,   61,   62,
+  63,   64,   64,   65,   66,   67,   67,   68,
+  69,   69,   70,   71,   72,   72,   73,   74,
+  75,   75,   76,   77,   78,   78,   79,   80,
+  81,   81,   82,   83,   84,   84,   85,   86,
+  87,   87,   88,   89,   90,   90,   91,   92,
+  93,   93,   94,   95,   96,   96,   97,   98,
+  99,  100,  100,  101,  102,  103,  103,  104,
+  105,  106,  106,  107,  108,  109,  109,  110,
+  111,  112,  113,  113,  114,  115,  116,  116,
+  117,  118,  119,  119,  120,  121,  122,  123,
+  123,  124,  125,  126,  126,  127,  128,  129,
+  130,  130,  131,  132,  133,  133,  134,  135,
+  136,  137,  137,  138,  139,  140,  141,  141,
+  142,  143,  144,  144,  145,  146,  147,  148,
+  148,  149,  150,  151,  152,  152,  153,  154,
+  155,  156,  156,  157,  158,  159,  160,  160,
+  161,  162,  163,  164,  164,  165,  166,  167,
+  168,  168,  169,  170,  171,  172,  173,  173,
+  174,  175,  176,  177,  177,  178,  179,  180,
+  181,  181,  182,  183,  184,  185,  186,  186,
+  187,  188,  189,  190,  190,  191,  192,  193,
+  194,  195,  195,  196,  197,  198,  199,  200,
+  200,  201,  202,  203,  204,  205,  205,  206,
+  207,  208,  209,  210,  210,  211,  212,  213,
+  214,  215,  215,  216,  217,  218,  219,  220,
+  220,  221,  222,  223,  224,  225,  225,  226,
+  227,  228,  229,  230,  231,  231,  232,  233,
+  234,  235,  236,  237,  237,  238,  239,  240,
+  241,  242,  243,  243,  244,  245,  246,  247,
+  248,  249,  249,  250,  251,  252,  253,  254,
+  255,  255,  256,  257,  258,  259,  260,  261,
+  262,  262,  263,  264,  265,  266,  267,  268,
+  268,  269,  270,  271,  272,  273,  274,  275,
+  276,  276,  277,  278,  279,  280,  281,  282,
+  283,  283,  284,  285,  286,  287,  288,  289,
+  290,  291,  291,  292,  293,  294,  295,  296,
+  297,  298,  299,  299,  300,  301,  302,  303,
+  304,  305,  306,  307,  308,  308,  309,  310,
+  311,  312,  313,  314,  315,  316,  317,  318,
+  318,  319,  320,  321,  322,  323,  324,  325,
+  326,  327,  328,  328,  329,  330,  331,  332,
+  333,  334,  335,  336,  337,  338,  339,  339,
+  340,  341,  342,  343,  344,  345,  346,  347,
+  348,  349,  350,  351,  352,  352,  353,  354,
+  355,  356,  357,  358,  359,  360,  361,  362,
+  363,  364,  365,  366,  367,  367,  368,  369,
+  370,  371,  372,  373,  374,  375,  376,  377,
+  378,  379,  380,  381,  382,  383,  384,  385,
+  385,  386,  387,  388,  389,  390,  391,  392,
+  393,  394,  395,  396,  397,  398,  399,  400,
+  401,  402,  403,  404,  405,  406,  407,  408,
+  409,  410,  410,  411,  412,  413,  414,  415,
+  416,  417,  418,  419,  420,  421,  422,  423,
+  424,  425,  426,  427,  428,  429,  430,  431,
+  432,  433,  434,  435,  436,  437,  438,  439,
+  440,  441,  442,  443,  444,  445,  446,  447,
+  448,  449,  450,  451,  452,  453,  454,  455,
+  456,  457,  458,  459,  460,  461,  462,  463,
+  464,  465,  466,  467,  468,  469,  470,  471,
+  472,  473,  474,  475,  476,  477,  478,  479,
+  480,  481,  482,  483,  484,  485,  486,  487,
+  488,  489,  490,  491,  492,  493,  494,  495,
+  496,  498,  499,  500,  501,  502,  503,  504,
+  505,  506,  507,  508,  509,  510,  511,  512,
+  513,  514,  515,  516,  517,  518,  519,  520,
+  521,  522,  523,  525,  526,  527,  528,  529,
+  530,  531,  532,  533,  534,  535,  536,  537,
+  538,  539,  540,  541,  542,  544,  545,  546,
+  547,  548,  549,  550,  551,  552,  553,  554,
+  555,  556,  557,  558,  560,  561,  562,  563,
+  564,  565,  566,  567,  568,  569,  570,  571,
+  572,  574,  575,  576,  577,  578,  579,  580,
+  581,  582,  583,  584,  585,  587,  588,  589,
+  590,  591,  592,  593,  594,  595,  596,  597,
+  599,  600,  601,  602,  603,  604,  605,  606,
+  607,  608,  610,  611,  612,  613,  614,  615,
+  616,  617,  618,  620,  621,  622,  623,  624,
+  625,  626,  627,  628,  630,  631,  632,  633,
+  634,  635,  636,  637,  639,  640,  641,  642,
+  643,  644,  645,  646,  648,  649,  650,  651,
+  652,  653,  654,  656,  657,  658,  659,  660,
+  661,  662,  664,  665,  666,  667,  668,  669,
+  670,  672,  673,  674,  675,  676,  677,  678,
+  680,  681,  682,  683,  684,  685,  687,  688,
+  689,  690,  691,  692,  693,  695,  696,  697,
+  698,  699,  700,  702,  703,  704,  705,  706,
+  708,  709,  710,  711,  712,  713,  715,  716,
+  717,  718,  719,  720,  722,  723,  724,  725,
+  726,  728,  729,  730,  731,  732,  733,  735,
+  736,  737,  738,  739,  741,  742,  743,  744,
+  745,  747,  748,  749,  750,  751,  753,  754,
+  755,  756,  757,  759,  760,  761,  762,  763,
+  765,  766,  767,  768,  770,  771,  772,  773,
+  774,  776,  777,  778,  779,  780,  782,  783,
+  784,  785,  787,  788,  789,  790,  792,  793,
+  794,  795,  796,  798,  799,  800,  801,  803,
+  804,  805,  806,  808,  809,  810,  811,  813,
+  814,  815,  816,  818,  819,  820,  821,  823,
+  824,  825,  826,  828,  829,  830,  831,  833,
+  834,  835,  836,  838,  839,  840,  841,  843,
+  844,  845,  846,  848,  849,  850,  851,  853,
+  854,  855,  857,  858,  859,  860,  862,  863,
+  864,  866,  867,  868,  869,  871,  872,  873,
+  874,  876,  877,  878,  880,  881,  882,  883,
+  885,  886,  887,  889,  890,  891,  893,  894,
+  895,  896,  898,  899,  900,  902,  903,  904,
+  906,  907,  908,  909,  911,  912,  913,  915,
+  916,  917,  919,  920,  921,  923,  924,  925,
+  927,  928,  929,  931,  932,  933,  935,  936,
+  937,  938,  940,  941,  942,  944,  945,  946,
+  948,  949,  950,  952,  953,  955,  956,  957,
+  959,  960,  961,  963,  964,  965,  967,  968,
+  969,  971,  972,  973,  975,  976,  977,  979,
+  980,  981,  983,  984,  986,  987,  988,  990,
+  991,  992,  994,  995,  996,  998,  999, 1001,
+  1002, 1003, 1005, 1006, 1007, 1009, 1010, 1012,
+  1013, 1014, 1016, 1017, 1018, 1020, 1021, 1023
+};
+
+static const WebRtc_Word16 kIndicatorTable[17] = {
+  0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
+  7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
+};
+
+// hybrib Hanning & flat window
+static const WebRtc_Word16 kBlocks80w128x[128] = {
+  0,    536,   1072,   1606,   2139,   2669,   3196,   3720,   4240,   4756,   5266,
+  5771,   6270,   6762,   7246,   7723,   8192,   8652,   9102,   9543,   9974,  10394,
+  10803,  11200,  11585,  11958,  12318,  12665,  12998,  13318,  13623,  13913,  14189,
+  14449,  14694,  14924,  15137,  15334,  15515,  15679,  15826,  15956,  16069,  16165,
+  16244,  16305,  16349,  16375,  16384,  16384,  16384,  16384,  16384,  16384,  16384,
+  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,
+  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,  16384,
+  16384,  16384,  16384,  16384,  16375,  16349,  16305,  16244,  16165,  16069,  15956,
+  15826,  15679,  15515,  15334,  15137,  14924,  14694,  14449,  14189,  13913,  13623,
+  13318,  12998,  12665,  12318,  11958,  11585,  11200,  10803,  10394,   9974,   9543,
+  9102,   8652,   8192,   7723,   7246,   6762,   6270,   5771,   5266,   4756,   4240,
+  3720,   3196,   2669,   2139,   1606,   1072,    536
+};
+
+// hybrib Hanning & flat window
+static const WebRtc_Word16 kBlocks160w256x[256] = {
+  0,   268,   536,   804,  1072,  1339,  1606,  1872,
+  2139,  2404,  2669,  2933,  3196,  3459,  3720,  3981,
+  4240,  4499,  4756,  5012,  5266,  5520,  5771,  6021,
+  6270,  6517,  6762,  7005,  7246,  7486,  7723,  7959,
+  8192,  8423,  8652,  8878,  9102,  9324,  9543,  9760,
+  9974, 10185, 10394, 10600, 10803, 11003, 11200, 11394,
+  11585, 11773, 11958, 12140, 12318, 12493, 12665, 12833,
+  12998, 13160, 13318, 13472, 13623, 13770, 13913, 14053,
+  14189, 14321, 14449, 14574, 14694, 14811, 14924, 15032,
+  15137, 15237, 15334, 15426, 15515, 15599, 15679, 15754,
+  15826, 15893, 15956, 16015, 16069, 16119, 16165, 16207,
+  16244, 16277, 16305, 16329, 16349, 16364, 16375, 16382,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16384, 16384, 16384, 16384, 16384, 16384, 16384,
+  16384, 16382, 16375, 16364, 16349, 16329, 16305, 16277,
+  16244, 16207, 16165, 16119, 16069, 16015, 15956, 15893,
+  15826, 15754, 15679, 15599, 15515, 15426, 15334, 15237,
+  15137, 15032, 14924, 14811, 14694, 14574, 14449, 14321,
+  14189, 14053, 13913, 13770, 13623, 13472, 13318, 13160,
+  12998, 12833, 12665, 12493, 12318, 12140, 11958, 11773,
+  11585, 11394, 11200, 11003, 10803, 10600, 10394, 10185,
+  9974,  9760,  9543,  9324,  9102,  8878,  8652,  8423,
+  8192,  7959,  7723,  7486,  7246,  7005,  6762,  6517,
+  6270,  6021,  5771,  5520,  5266,  5012,  4756,  4499,
+  4240,  3981,  3720,  3459,  3196,  2933,  2669,  2404,
+  2139,  1872,  1606,  1339,  1072,   804,   536,   268
+};
+
+// Gain factor1 table: Input value in Q8 and output value in Q13
+// original floating point code
+//  if (gain > blim) {
+//    factor1 = 1.0 + 1.3 * (gain - blim);
+//    if (gain * factor1 > 1.0) {
+//      factor1 = 1.0 / gain;
+//    }
+//  } else {
+//    factor1 = 1.0;
+//  }
+static const WebRtc_Word16 kFactor1Table[257] = {
+  8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8233, 8274, 8315, 8355, 8396, 8436, 8475, 8515, 8554, 8592, 8631, 8669,
+  8707, 8745, 8783, 8820, 8857, 8894, 8931, 8967, 9003, 9039, 9075, 9111, 9146, 9181,
+  9216, 9251, 9286, 9320, 9354, 9388, 9422, 9456, 9489, 9523, 9556, 9589, 9622, 9655,
+  9687, 9719, 9752, 9784, 9816, 9848, 9879, 9911, 9942, 9973, 10004, 10035, 10066,
+  10097, 10128, 10158, 10188, 10218, 10249, 10279, 10308, 10338, 10368, 10397, 10426,
+  10456, 10485, 10514, 10543, 10572, 10600, 10629, 10657, 10686, 10714, 10742, 10770,
+  10798, 10826, 10854, 10882, 10847, 10810, 10774, 10737, 10701, 10666, 10631, 10596,
+  10562, 10527, 10494, 10460, 10427, 10394, 10362, 10329, 10297, 10266, 10235, 10203,
+  10173, 10142, 10112, 10082, 10052, 10023, 9994, 9965, 9936, 9908, 9879, 9851, 9824,
+  9796, 9769, 9742, 9715, 9689, 9662, 9636, 9610, 9584, 9559, 9534, 9508, 9484, 9459,
+  9434, 9410, 9386, 9362, 9338, 9314, 9291, 9268, 9245, 9222, 9199, 9176, 9154, 9132,
+  9110, 9088, 9066, 9044, 9023, 9002, 8980, 8959, 8939, 8918, 8897, 8877, 8857, 8836,
+  8816, 8796, 8777, 8757, 8738, 8718, 8699, 8680, 8661, 8642, 8623, 8605, 8586, 8568,
+  8550, 8532, 8514, 8496, 8478, 8460, 8443, 8425, 8408, 8391, 8373, 8356, 8339, 8323,
+  8306, 8289, 8273, 8256, 8240, 8224, 8208, 8192
+};
+
+// For Factor2 tables
+// original floating point code
+// if (gain > blim) {
+//   factor2 = 1.0;
+// } else {
+//   factor2 = 1.0 - 0.3 * (blim - gain);
+//   if (gain <= inst->denoiseBound) {
+//     factor2 = 1.0 - 0.3 * (blim - inst->denoiseBound);
+//   }
+// }
+//
+// Gain factor table: Input value in Q8 and output value in Q13
+static const WebRtc_Word16 kFactor2Aggressiveness1[257] = {
+  7577, 7577, 7577, 7577, 7577, 7577,
+  7577, 7577, 7577, 7577, 7577, 7577, 7577, 7577, 7577, 7577, 7577, 7596, 7614, 7632,
+  7650, 7667, 7683, 7699, 7715, 7731, 7746, 7761, 7775, 7790, 7804, 7818, 7832, 7845,
+  7858, 7871, 7884, 7897, 7910, 7922, 7934, 7946, 7958, 7970, 7982, 7993, 8004, 8016,
+  8027, 8038, 8049, 8060, 8070, 8081, 8091, 8102, 8112, 8122, 8132, 8143, 8152, 8162,
+  8172, 8182, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192
+};
+
+// Gain factor table: Input value in Q8 and output value in Q13
+static const WebRtc_Word16 kFactor2Aggressiveness2[257] = {
+  7270, 7270, 7270, 7270, 7270, 7306,
+  7339, 7369, 7397, 7424, 7448, 7472, 7495, 7517, 7537, 7558, 7577, 7596, 7614, 7632,
+  7650, 7667, 7683, 7699, 7715, 7731, 7746, 7761, 7775, 7790, 7804, 7818, 7832, 7845,
+  7858, 7871, 7884, 7897, 7910, 7922, 7934, 7946, 7958, 7970, 7982, 7993, 8004, 8016,
+  8027, 8038, 8049, 8060, 8070, 8081, 8091, 8102, 8112, 8122, 8132, 8143, 8152, 8162,
+  8172, 8182, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192
+};
+
+// Gain factor table: Input value in Q8 and output value in Q13
+static const WebRtc_Word16 kFactor2Aggressiveness3[257] = {
+  7184, 7184, 7184, 7229, 7270, 7306,
+  7339, 7369, 7397, 7424, 7448, 7472, 7495, 7517, 7537, 7558, 7577, 7596, 7614, 7632,
+  7650, 7667, 7683, 7699, 7715, 7731, 7746, 7761, 7775, 7790, 7804, 7818, 7832, 7845,
+  7858, 7871, 7884, 7897, 7910, 7922, 7934, 7946, 7958, 7970, 7982, 7993, 8004, 8016,
+  8027, 8038, 8049, 8060, 8070, 8081, 8091, 8102, 8112, 8122, 8132, 8143, 8152, 8162,
+  8172, 8182, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192,
+  8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192, 8192
+};
+
+// sum of log2(i) from table index to inst->anaLen2 in Q5
+// Note that the first table value is invalid, since log2(0) = -infinity
+static const WebRtc_Word16 kSumLogIndex[66] = {
+  0,  22917,  22917,  22885,  22834,  22770,  22696,  22613,
+  22524,  22428,  22326,  22220,  22109,  21994,  21876,  21754,
+  21629,  21501,  21370,  21237,  21101,  20963,  20822,  20679,
+  20535,  20388,  20239,  20089,  19937,  19783,  19628,  19470,
+  19312,  19152,  18991,  18828,  18664,  18498,  18331,  18164,
+  17994,  17824,  17653,  17480,  17306,  17132,  16956,  16779,
+  16602,  16423,  16243,  16063,  15881,  15699,  15515,  15331,
+  15146,  14960,  14774,  14586,  14398,  14209,  14019,  13829,
+  13637,  13445
+};
+
+// sum of log2(i)^2 from table index to inst->anaLen2 in Q2
+// Note that the first table value is invalid, since log2(0) = -infinity
+static const WebRtc_Word16 kSumSquareLogIndex[66] = {
+  0,  16959,  16959,  16955,  16945,  16929,  16908,  16881,
+  16850,  16814,  16773,  16729,  16681,  16630,  16575,  16517,
+  16456,  16392,  16325,  16256,  16184,  16109,  16032,  15952,
+  15870,  15786,  15700,  15612,  15521,  15429,  15334,  15238,
+  15140,  15040,  14938,  14834,  14729,  14622,  14514,  14404,
+  14292,  14179,  14064,  13947,  13830,  13710,  13590,  13468,
+  13344,  13220,  13094,  12966,  12837,  12707,  12576,  12444,
+  12310,  12175,  12039,  11902,  11763,  11624,  11483,  11341,
+  11198,  11054
+};
+
+// log2(table index) in Q12
+// Note that the first table value is invalid, since log2(0) = -infinity
+static const WebRtc_Word16 kLogIndex[129] = {
+  0,      0,   4096,   6492,   8192,   9511,  10588,  11499,
+  12288,  12984,  13607,  14170,  14684,  15157,  15595,  16003,
+  16384,  16742,  17080,  17400,  17703,  17991,  18266,  18529,
+  18780,  19021,  19253,  19476,  19691,  19898,  20099,  20292,
+  20480,  20662,  20838,  21010,  21176,  21338,  21496,  21649,
+  21799,  21945,  22087,  22226,  22362,  22495,  22625,  22752,
+  22876,  22998,  23117,  23234,  23349,  23462,  23572,  23680,
+  23787,  23892,  23994,  24095,  24195,  24292,  24388,  24483,
+  24576,  24668,  24758,  24847,  24934,  25021,  25106,  25189,
+  25272,  25354,  25434,  25513,  25592,  25669,  25745,  25820,
+  25895,  25968,  26041,  26112,  26183,  26253,  26322,  26390,
+  26458,  26525,  26591,  26656,  26721,  26784,  26848,  26910,
+  26972,  27033,  27094,  27154,  27213,  27272,  27330,  27388,
+  27445,  27502,  27558,  27613,  27668,  27722,  27776,  27830,
+  27883,  27935,  27988,  28039,  28090,  28141,  28191,  28241,
+  28291,  28340,  28388,  28437,  28484,  28532,  28579,  28626,
+  28672
+};
+
+// determinant of estimation matrix in Q0 corresponding to the log2 tables above
+// Note that the first table value is invalid, since log2(0) = -infinity
+static const WebRtc_Word16 kDeterminantEstMatrix[66] = {
+  0,  29814,  25574,  22640,  20351,  18469,  16873,  15491,
+  14277,  13199,  12233,  11362,  10571,   9851,   9192,   8587,
+  8030,   7515,   7038,   6596,   6186,   5804,   5448,   5115,
+  4805,   4514,   4242,   3988,   3749,   3524,   3314,   3116,
+  2930,   2755,   2590,   2435,   2289,   2152,   2022,   1900,
+  1785,   1677,   1575,   1478,   1388,   1302,   1221,   1145,
+  1073,   1005,    942,    881,    825,    771,    721,    674,
+  629,    587,    547,    510,    475,    442,    411,    382,
+  355,    330
+};
+
+// Declare function pointers.
+NoiseEstimation WebRtcNsx_NoiseEstimation;
+PrepareSpectrum WebRtcNsx_PrepareSpectrum;
+SynthesisUpdate WebRtcNsx_SynthesisUpdate;
+AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+Denormalize WebRtcNsx_Denormalize;
+CreateComplexBuffer WebRtcNsx_CreateComplexBuffer;
+
+// Update the noise estimation information.
+static void UpdateNoiseEstimate(NsxInst_t* inst, int offset) {
+  WebRtc_Word32 tmp32no1 = 0;
+  WebRtc_Word32 tmp32no2 = 0;
+  WebRtc_Word16 tmp16 = 0;
+  const WebRtc_Word16 kExp2Const = 11819; // Q13
+
+  int i = 0;
+
+  tmp16 = WebRtcSpl_MaxValueW16(inst->noiseEstLogQuantile + offset,
+                                   inst->magnLen);
+  // Guarantee a Q-domain as high as possible and still fit in int16
+  inst->qNoise = 14 - (int) WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                   kExp2Const, tmp16, 21);
+  for (i = 0; i < inst->magnLen; i++) {
+    // inst->quantile[i]=exp(inst->lquantile[offset+i]);
+    // in Q21
+    tmp32no2 = WEBRTC_SPL_MUL_16_16(kExp2Const,
+                                    inst->noiseEstLogQuantile[offset + i]);
+    tmp32no1 = (0x00200000 | (tmp32no2 & 0x001FFFFF)); // 2^21 + frac
+    tmp16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32no2, 21);
+    tmp16 -= 21;// shift 21 to get result in Q0
+    tmp16 += (WebRtc_Word16) inst->qNoise; //shift to get result in Q(qNoise)
+    if (tmp16 < 0) {
+      tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no1, -tmp16);
+    } else {
+      tmp32no1 = WEBRTC_SPL_LSHIFT_W32(tmp32no1, tmp16);
+    }
+    inst->noiseEstQuantile[i] = WebRtcSpl_SatW32ToW16(tmp32no1);
+  }
+}
+
+// Noise Estimation
+static void NoiseEstimationC(NsxInst_t* inst,
+                             uint16_t* magn,
+                             uint32_t* noise,
+                             int16_t* q_noise) {
+  WebRtc_Word16 lmagn[HALF_ANAL_BLOCKL], counter, countDiv;
+  WebRtc_Word16 countProd, delta, zeros, frac;
+  WebRtc_Word16 log2, tabind, logval, tmp16, tmp16no1, tmp16no2;
+  const int16_t log2_const = 22713; // Q15
+  const int16_t width_factor = 21845;
+
+  int i, s, offset;
+
+  tabind = inst->stages - inst->normData;
+  assert(tabind < 9);
+  assert(tabind > -9);
+  if (tabind < 0) {
+    logval = -WebRtcNsx_kLogTable[-tabind];
+  } else {
+    logval = WebRtcNsx_kLogTable[tabind];
+  }
+
+  // lmagn(i)=log(magn(i))=log(2)*log2(magn(i))
+  // magn is in Q(-stages), and the real lmagn values are:
+  // real_lmagn(i)=log(magn(i)*2^stages)=log(magn(i))+log(2^stages)
+  // lmagn in Q8
+  for (i = 0; i < inst->magnLen; i++) {
+    if (magn[i]) {
+      zeros = WebRtcSpl_NormU32((WebRtc_UWord32)magn[i]);
+      frac = (WebRtc_Word16)((((WebRtc_UWord32)magn[i] << zeros)
+                              & 0x7FFFFFFF) >> 23);
+      // log2(magn(i))
+      assert(frac < 256);
+      log2 = (WebRtc_Word16)(((31 - zeros) << 8)
+                             + WebRtcNsx_kLogTableFrac[frac]);
+      // log2(magn(i))*log(2)
+      lmagn[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(log2, log2_const, 15);
+      // + log(2^stages)
+      lmagn[i] += logval;
+    } else {
+      lmagn[i] = logval;//0;
+    }
+  }
+
+  // loop over simultaneous estimates
+  for (s = 0; s < SIMULT; s++) {
+    offset = s * inst->magnLen;
+
+    // Get counter values from state
+    counter = inst->noiseEstCounter[s];
+    assert(counter < 201);
+    countDiv = WebRtcNsx_kCounterDiv[counter];
+    countProd = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(counter, countDiv);
+
+    // quant_est(...)
+    for (i = 0; i < inst->magnLen; i++) {
+      // compute delta
+      if (inst->noiseEstDensity[offset + i] > 512) {
+        // Get the value for delta by shifting intead of dividing.
+        int factor = WebRtcSpl_NormW16(inst->noiseEstDensity[offset + i]);
+        delta = (int16_t)(FACTOR_Q16 >> (14 - factor));
+      } else {
+        delta = FACTOR_Q7;
+        if (inst->blockIndex < END_STARTUP_LONG) {
+          // Smaller step size during startup. This prevents from using
+          // unrealistic values causing overflow.
+          delta = FACTOR_Q7_STARTUP;
+        }
+      }
+
+      // update log quantile estimate
+      tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delta, countDiv, 14);
+      if (lmagn[i] > inst->noiseEstLogQuantile[offset + i]) {
+        // +=QUANTILE*delta/(inst->counter[s]+1) QUANTILE=0.25, =1 in Q2
+        // CounterDiv=1/(inst->counter[s]+1) in Q15
+        tmp16 += 2;
+        tmp16no1 = WEBRTC_SPL_RSHIFT_W16(tmp16, 2);
+        inst->noiseEstLogQuantile[offset + i] += tmp16no1;
+      } else {
+        tmp16 += 1;
+        tmp16no1 = WEBRTC_SPL_RSHIFT_W16(tmp16, 1);
+        // *(1-QUANTILE), in Q2 QUANTILE=0.25, 1-0.25=0.75=3 in Q2
+        tmp16no2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, 3, 1);
+        inst->noiseEstLogQuantile[offset + i] -= tmp16no2;
+        if (inst->noiseEstLogQuantile[offset + i] < logval) {
+          // This is the smallest fixed point representation we can
+          // have, hence we limit the output.
+          inst->noiseEstLogQuantile[offset + i] = logval;
+        }
+      }
+
+      // update density estimate
+      if (WEBRTC_SPL_ABS_W16(lmagn[i] - inst->noiseEstLogQuantile[offset + i])
+          < WIDTH_Q8) {
+        tmp16no1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                     inst->noiseEstDensity[offset + i], countProd, 15);
+        tmp16no2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                     width_factor, countDiv, 15);
+        inst->noiseEstDensity[offset + i] = tmp16no1 + tmp16no2;
+      }
+    } // end loop over magnitude spectrum
+
+    if (counter >= END_STARTUP_LONG) {
+      inst->noiseEstCounter[s] = 0;
+      if (inst->blockIndex >= END_STARTUP_LONG) {
+        UpdateNoiseEstimate(inst, offset);
+      }
+    }
+    inst->noiseEstCounter[s]++;
+
+  } // end loop over simultaneous estimates
+
+  // Sequentially update the noise during startup
+  if (inst->blockIndex < END_STARTUP_LONG) {
+    UpdateNoiseEstimate(inst, offset);
+  }
+
+  for (i = 0; i < inst->magnLen; i++) {
+    noise[i] = (WebRtc_UWord32)(inst->noiseEstQuantile[i]); // Q(qNoise)
+  }
+  (*q_noise) = (WebRtc_Word16)inst->qNoise;
+}
+
+// Filter the data in the frequency domain, and create spectrum.
+static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) {
+  int i = 0, j = 0;
+  int16_t tmp16 = 0;
+
+  for (i = 0; i < inst->magnLen; i++) {
+    inst->real[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->real[i],
+        (WebRtc_Word16)(inst->noiseSupFilter[i]), 14); // Q(normData-stages)
+    inst->imag[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->imag[i],
+        (WebRtc_Word16)(inst->noiseSupFilter[i]), 14); // Q(normData-stages)
+  }
+
+  freq_buf[0] = inst->real[0];
+  freq_buf[1] = -inst->imag[0];
+  for (i = 1, j = 2; i < inst->anaLen2; i += 1, j += 2) {
+    tmp16 = (inst->anaLen << 1) - j;
+    freq_buf[j] = inst->real[i];
+    freq_buf[j + 1] = -inst->imag[i];
+    freq_buf[tmp16] = inst->real[i];
+    freq_buf[tmp16 + 1] = inst->imag[i];
+  }
+  freq_buf[inst->anaLen] = inst->real[inst->anaLen2];
+  freq_buf[inst->anaLen + 1] = -inst->imag[inst->anaLen2];
+}
+
+// Denormalize the input buffer.
+static __inline void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) {
+  int i = 0, j = 0;
+  int32_t tmp32 = 0;
+  for (i = 0, j = 0; i < inst->anaLen; i += 1, j += 2) {
+    tmp32 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)in[j],
+                                 factor - inst->normData);
+    inst->real[i] = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+  }
+}
+
+// For the noise supression process, synthesis, read out fully processed
+// segment, and update synthesis buffer.
+static void SynthesisUpdateC(NsxInst_t* inst,
+                             int16_t* out_frame,
+                             int16_t gain_factor) {
+  int i = 0;
+  int16_t tmp16a = 0;
+  int16_t tmp16b = 0;
+  int32_t tmp32 = 0;
+
+  // synthesis
+  for (i = 0; i < inst->anaLen; i++) {
+    tmp16a = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                 inst->window[i], inst->real[i], 14); // Q0, window in Q14
+    tmp32 = WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(tmp16a, gain_factor, 13); // Q0
+    // Down shift with rounding
+    tmp16b = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+    inst->synthesisBuffer[i] = WEBRTC_SPL_ADD_SAT_W16(inst->synthesisBuffer[i],
+                                                      tmp16b); // Q0
+  }
+
+  // read out fully processed segment
+  for (i = 0; i < inst->blockLen10ms; i++) {
+    out_frame[i] = inst->synthesisBuffer[i]; // Q0
+  }
+
+  // update synthesis buffer
+  WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer,
+                        inst->synthesisBuffer + inst->blockLen10ms,
+                        inst->anaLen - inst->blockLen10ms);
+  WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer
+      + inst->anaLen - inst->blockLen10ms, inst->blockLen10ms);
+}
+
+// Update analysis buffer for lower band, and window data before FFT.
+static void AnalysisUpdateC(NsxInst_t* inst,
+                            int16_t* out,
+                            int16_t* new_speech) {
+  int i = 0;
+
+  // For lower band update analysis buffer.
+  WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer,
+                        inst->analysisBuffer + inst->blockLen10ms,
+                        inst->anaLen - inst->blockLen10ms);
+  WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer
+      + inst->anaLen - inst->blockLen10ms, new_speech, inst->blockLen10ms);
+
+  // Window data before FFT.
+  for (i = 0; i < inst->anaLen; i++) {
+    out[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+               inst->window[i], inst->analysisBuffer[i], 14); // Q0
+  }
+}
+
+// Create a complex number buffer (out[]) as the intput (in[]) interleaved with
+// zeros, and normalize it.
+static __inline void CreateComplexBufferC(NsxInst_t* inst,
+                                          int16_t* in,
+                                          int16_t* out) {
+  int i = 0, j = 0;
+  for (i = 0, j = 0; i < inst->anaLen; i += 1, j += 2) {
+    out[j] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData)
+    out[j + 1] = 0; // Insert zeros in imaginary part
+  }
+}
+
+void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst,
+                                           WebRtc_Word16 pink_noise_exp_avg,
+                                           WebRtc_Word32 pink_noise_num_avg,
+                                           int freq_index,
+                                           WebRtc_UWord32* noise_estimate,
+                                           WebRtc_UWord32* noise_estimate_avg) {
+  WebRtc_Word32 tmp32no1 = 0;
+  WebRtc_Word32 tmp32no2 = 0;
+
+  WebRtc_Word16 int_part = 0;
+  WebRtc_Word16 frac_part = 0;
+
+  // Use pink noise estimate
+  // noise_estimate = 2^(pinkNoiseNumerator + pinkNoiseExp * log2(j))
+  assert(freq_index >= 0);
+  assert(freq_index < 129);
+  tmp32no2 = WEBRTC_SPL_MUL_16_16(pink_noise_exp_avg, kLogIndex[freq_index]); // Q26
+  tmp32no2 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, 15); // Q11
+  tmp32no1 = pink_noise_num_avg - tmp32no2; // Q11
+
+  // Calculate output: 2^tmp32no1
+  // Output in Q(minNorm-stages)
+  tmp32no1 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)(inst->minNorm - inst->stages), 11);
+  if (tmp32no1 > 0) {
+    int_part = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 11);
+    frac_part = (WebRtc_Word16)(tmp32no1 & 0x000007ff); // Q11
+    // Piecewise linear approximation of 'b' in
+    // 2^(int_part+frac_part) = 2^int_part * (1 + b)
+    // 'b' is given in Q11 and below stored in frac_part.
+    if (WEBRTC_SPL_RSHIFT_W16(frac_part, 10)) {
+      // Upper fractional part
+      tmp32no2 = WEBRTC_SPL_MUL_16_16(2048 - frac_part, 1244); // Q21
+      tmp32no2 = 2048 - WEBRTC_SPL_RSHIFT_W32(tmp32no2, 10);
+    } else {
+      // Lower fractional part
+      tmp32no2 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(frac_part, 804), 10);
+    }
+    // Shift fractional part to Q(minNorm-stages)
+    tmp32no2 = WEBRTC_SPL_SHIFT_W32(tmp32no2, int_part - 11);
+    *noise_estimate_avg = WEBRTC_SPL_LSHIFT_U32(1, int_part) + (WebRtc_UWord32)tmp32no2;
+    // Scale up to initMagnEst, which is not block averaged
+    *noise_estimate = (*noise_estimate_avg) * (WebRtc_UWord32)(inst->blockIndex + 1);
+  }
+}
+
+// Initialize state
+WebRtc_Word32 WebRtcNsx_InitCore(NsxInst_t* inst, WebRtc_UWord32 fs) {
+  int i;
+
+  //check for valid pointer
+  if (inst == NULL) {
+    return -1;
+  }
+  //
+
+  // Initialization of struct
+  if (fs == 8000 || fs == 16000 || fs == 32000) {
+    inst->fs = fs;
+  } else {
+    return -1;
+  }
+
+  if (fs == 8000) {
+    inst->blockLen10ms = 80;
+    inst->anaLen = 128;
+    inst->stages = 7;
+    inst->window = kBlocks80w128x;
+    inst->thresholdLogLrt = 131072; //default threshold for LRT feature
+    inst->maxLrt = 0x0040000;
+    inst->minLrt = 52429;
+  } else if (fs == 16000) {
+    inst->blockLen10ms = 160;
+    inst->anaLen = 256;
+    inst->stages = 8;
+    inst->window = kBlocks160w256x;
+    inst->thresholdLogLrt = 212644; //default threshold for LRT feature
+    inst->maxLrt = 0x0080000;
+    inst->minLrt = 104858;
+  } else if (fs == 32000) {
+    inst->blockLen10ms = 160;
+    inst->anaLen = 256;
+    inst->stages = 8;
+    inst->window = kBlocks160w256x;
+    inst->thresholdLogLrt = 212644; //default threshold for LRT feature
+    inst->maxLrt = 0x0080000;
+    inst->minLrt = 104858;
+  }
+  inst->anaLen2 = WEBRTC_SPL_RSHIFT_W16(inst->anaLen, 1);
+  inst->magnLen = inst->anaLen2 + 1;
+
+  WebRtcSpl_ZerosArrayW16(inst->analysisBuffer, ANAL_BLOCKL_MAX);
+  WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer, ANAL_BLOCKL_MAX);
+
+  // for HB processing
+  WebRtcSpl_ZerosArrayW16(inst->dataBufHBFX, ANAL_BLOCKL_MAX);
+  // for quantile noise estimation
+  WebRtcSpl_ZerosArrayW16(inst->noiseEstQuantile, HALF_ANAL_BLOCKL);
+  for (i = 0; i < SIMULT * HALF_ANAL_BLOCKL; i++) {
+    inst->noiseEstLogQuantile[i] = 2048; // Q8
+    inst->noiseEstDensity[i] = 153; // Q9
+  }
+  for (i = 0; i < SIMULT; i++) {
+    inst->noiseEstCounter[i] = (WebRtc_Word16)(END_STARTUP_LONG * (i + 1)) / SIMULT;
+  }
+
+  // Initialize suppression filter with ones
+  WebRtcSpl_MemSetW16((WebRtc_Word16*)inst->noiseSupFilter, 16384, HALF_ANAL_BLOCKL);
+
+  // Set the aggressiveness: default
+  inst->aggrMode = 0;
+
+  //initialize variables for new method
+  inst->priorNonSpeechProb = 8192; // Q14(0.5) prior probability for speech/noise
+  for (i = 0; i < HALF_ANAL_BLOCKL; i++) {
+    inst->prevMagnU16[i] = 0;
+    inst->prevNoiseU32[i] = 0; //previous noise-spectrum
+    inst->logLrtTimeAvgW32[i] = 0; //smooth LR ratio
+    inst->avgMagnPause[i] = 0; //conservative noise spectrum estimate
+    inst->initMagnEst[i] = 0; //initial average magnitude spectrum
+  }
+
+  //feature quantities
+  inst->thresholdSpecDiff = 50; //threshold for difference feature: determined on-line
+  inst->thresholdSpecFlat = 20480; //threshold for flatness: determined on-line
+  inst->featureLogLrt = inst->thresholdLogLrt; //average LRT factor (= threshold)
+  inst->featureSpecFlat = inst->thresholdSpecFlat; //spectral flatness (= threshold)
+  inst->featureSpecDiff = inst->thresholdSpecDiff; //spectral difference (= threshold)
+  inst->weightLogLrt = 6; //default weighting par for LRT feature
+  inst->weightSpecFlat = 0; //default weighting par for spectral flatness feature
+  inst->weightSpecDiff = 0; //default weighting par for spectral difference feature
+
+  inst->curAvgMagnEnergy = 0; //window time-average of input magnitude spectrum
+  inst->timeAvgMagnEnergy = 0; //normalization for spectral difference
+  inst->timeAvgMagnEnergyTmp = 0; //normalization for spectral difference
+
+  //histogram quantities: used to estimate/update thresholds for features
+  WebRtcSpl_ZerosArrayW16(inst->histLrt, HIST_PAR_EST);
+  WebRtcSpl_ZerosArrayW16(inst->histSpecDiff, HIST_PAR_EST);
+  WebRtcSpl_ZerosArrayW16(inst->histSpecFlat, HIST_PAR_EST);
+
+  inst->blockIndex = -1; //frame counter
+
+  //inst->modelUpdate    = 500;   //window for update
+  inst->modelUpdate = (1 << STAT_UPDATES); //window for update
+  inst->cntThresUpdate = 0; //counter feature thresholds updates
+
+  inst->sumMagn = 0;
+  inst->magnEnergy = 0;
+  inst->prevQMagn = 0;
+  inst->qNoise = 0;
+  inst->prevQNoise = 0;
+
+  inst->energyIn = 0;
+  inst->scaleEnergyIn = 0;
+
+  inst->whiteNoiseLevel = 0;
+  inst->pinkNoiseNumerator = 0;
+  inst->pinkNoiseExp = 0;
+  inst->minNorm = 15; // Start with full scale
+  inst->zeroInputSignal = 0;
+
+  //default mode
+  WebRtcNsx_set_policy_core(inst, 0);
+
+#ifdef NS_FILEDEBUG
+  inst->infile = fopen("indebug.pcm", "wb");
+  inst->outfile = fopen("outdebug.pcm", "wb");
+  inst->file1 = fopen("file1.pcm", "wb");
+  inst->file2 = fopen("file2.pcm", "wb");
+  inst->file3 = fopen("file3.pcm", "wb");
+  inst->file4 = fopen("file4.pcm", "wb");
+  inst->file5 = fopen("file5.pcm", "wb");
+#endif
+
+  // Initialize function pointers.
+  WebRtcNsx_NoiseEstimation = NoiseEstimationC;
+  WebRtcNsx_PrepareSpectrum = PrepareSpectrumC;
+  WebRtcNsx_SynthesisUpdate = SynthesisUpdateC;
+  WebRtcNsx_AnalysisUpdate = AnalysisUpdateC;
+  WebRtcNsx_Denormalize = DenormalizeC;
+  WebRtcNsx_CreateComplexBuffer = CreateComplexBufferC;
+
+#ifdef WEBRTC_DETECT_ARM_NEON
+    uint64_t features = WebRtc_GetCPUFeaturesARM();
+    if ((features & kCPUFeatureNEON) != 0)
+    {
+        WebRtcNsx_InitNeon();
+    }
+#elif defined(WEBRTC_ARCH_ARM_NEON)
+    WebRtcNsx_InitNeon();
+#endif
+
+  inst->initFlag = 1;
+
+  return 0;
+}
+
+int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode) {
+  // allow for modes:0,1,2,3
+  if (mode < 0 || mode > 3) {
+    return -1;
+  }
+
+  inst->aggrMode = mode;
+  if (mode == 0) {
+    inst->overdrive = 256; // Q8(1.0)
+    inst->denoiseBound = 8192; // Q14(0.5)
+    inst->gainMap = 0; // No gain compensation
+  } else if (mode == 1) {
+    inst->overdrive = 256; // Q8(1.0)
+    inst->denoiseBound = 4096; // Q14(0.25)
+    inst->factor2Table = kFactor2Aggressiveness1;
+    inst->gainMap = 1;
+  } else if (mode == 2) {
+    inst->overdrive = 282; // ~= Q8(1.1)
+    inst->denoiseBound = 2048; // Q14(0.125)
+    inst->factor2Table = kFactor2Aggressiveness2;
+    inst->gainMap = 1;
+  } else if (mode == 3) {
+    inst->overdrive = 320; // Q8(1.25)
+    inst->denoiseBound = 1475; // ~= Q14(0.09)
+    inst->factor2Table = kFactor2Aggressiveness3;
+    inst->gainMap = 1;
+  }
+  return 0;
+}
+
+// Extract thresholds for feature parameters
+// histograms are computed over some window_size (given by window_pars)
+// thresholds and weights are extracted every window
+// flag 0 means update histogram only, flag 1 means compute the thresholds/weights
+// threshold and weights are returned in: inst->priorModelPars
+void WebRtcNsx_FeatureParameterExtraction(NsxInst_t* inst, int flag) {
+  WebRtc_UWord32 tmpU32;
+  WebRtc_UWord32 histIndex;
+  WebRtc_UWord32 posPeak1SpecFlatFX, posPeak2SpecFlatFX;
+  WebRtc_UWord32 posPeak1SpecDiffFX, posPeak2SpecDiffFX;
+
+  WebRtc_Word32 tmp32;
+  WebRtc_Word32 fluctLrtFX, thresFluctLrtFX;
+  WebRtc_Word32 avgHistLrtFX, avgSquareHistLrtFX, avgHistLrtComplFX;
+
+  WebRtc_Word16 j;
+  WebRtc_Word16 numHistLrt;
+
+  int i;
+  int useFeatureSpecFlat, useFeatureSpecDiff, featureSum;
+  int maxPeak1, maxPeak2;
+  int weightPeak1SpecFlat, weightPeak2SpecFlat;
+  int weightPeak1SpecDiff, weightPeak2SpecDiff;
+
+  //update histograms
+  if (!flag) {
+    // LRT
+    // Type casting to UWord32 is safe since negative values will not be wrapped to larger
+    // values than HIST_PAR_EST
+    histIndex = (WebRtc_UWord32)(inst->featureLogLrt);
+    if (histIndex < HIST_PAR_EST) {
+      inst->histLrt[histIndex]++;
+    }
+    // Spectral flatness
+    // (inst->featureSpecFlat*20)>>10 = (inst->featureSpecFlat*5)>>8
+    histIndex = WEBRTC_SPL_RSHIFT_U32(inst->featureSpecFlat * 5, 8);
+    if (histIndex < HIST_PAR_EST) {
+      inst->histSpecFlat[histIndex]++;
+    }
+    // Spectral difference
+    histIndex = HIST_PAR_EST;
+    if (inst->timeAvgMagnEnergy > 0) {
+      // Guard against division by zero
+      // If timeAvgMagnEnergy == 0 we have no normalizing statistics and
+      // therefore can't update the histogram
+      histIndex = WEBRTC_SPL_UDIV((inst->featureSpecDiff * 5) >> inst->stages,
+                                  inst->timeAvgMagnEnergy);
+    }
+    if (histIndex < HIST_PAR_EST) {
+      inst->histSpecDiff[histIndex]++;
+    }
+  }
+
+  // extract parameters for speech/noise probability
+  if (flag) {
+    useFeatureSpecDiff = 1;
+    //for LRT feature:
+    // compute the average over inst->featureExtractionParams.rangeAvgHistLrt
+    avgHistLrtFX = 0;
+    avgSquareHistLrtFX = 0;
+    numHistLrt = 0;
+    for (i = 0; i < BIN_SIZE_LRT; i++) {
+      j = (2 * i + 1);
+      tmp32 = WEBRTC_SPL_MUL_16_16(inst->histLrt[i], j);
+      avgHistLrtFX += tmp32;
+      numHistLrt += inst->histLrt[i];
+      avgSquareHistLrtFX += WEBRTC_SPL_MUL_32_16(tmp32, j);
+    }
+    avgHistLrtComplFX = avgHistLrtFX;
+    for (; i < HIST_PAR_EST; i++) {
+      j = (2 * i + 1);
+      tmp32 = WEBRTC_SPL_MUL_16_16(inst->histLrt[i], j);
+      avgHistLrtComplFX += tmp32;
+      avgSquareHistLrtFX += WEBRTC_SPL_MUL_32_16(tmp32, j);
+    }
+    fluctLrtFX = WEBRTC_SPL_MUL(avgSquareHistLrtFX, numHistLrt);
+    fluctLrtFX -= WEBRTC_SPL_MUL(avgHistLrtFX, avgHistLrtComplFX);
+    thresFluctLrtFX = THRES_FLUCT_LRT * numHistLrt;
+    // get threshold for LRT feature:
+    tmpU32 = (FACTOR_1_LRT_DIFF * (WebRtc_UWord32)avgHistLrtFX);
+    if ((fluctLrtFX < thresFluctLrtFX) || (numHistLrt == 0) ||
+        (tmpU32 > (WebRtc_UWord32)(100 * numHistLrt))) {
+      //very low fluctuation, so likely noise
+      inst->thresholdLogLrt = inst->maxLrt;
+    } else {
+      tmp32 = (WebRtc_Word32)((tmpU32 << (9 + inst->stages)) / numHistLrt /
+                              25);
+      // check if value is within min/max range
+      inst->thresholdLogLrt = WEBRTC_SPL_SAT(inst->maxLrt,
+                                             tmp32,
+                                             inst->minLrt);
+    }
+    if (fluctLrtFX < thresFluctLrtFX) {
+      // Do not use difference feature if fluctuation of LRT feature is very low:
+      // most likely just noise state
+      useFeatureSpecDiff = 0;
+    }
+
+    // for spectral flatness and spectral difference: compute the main peaks of histogram
+    maxPeak1 = 0;
+    maxPeak2 = 0;
+    posPeak1SpecFlatFX = 0;
+    posPeak2SpecFlatFX = 0;
+    weightPeak1SpecFlat = 0;
+    weightPeak2SpecFlat = 0;
+
+    // peaks for flatness
+    for (i = 0; i < HIST_PAR_EST; i++) {
+      if (inst->histSpecFlat[i] > maxPeak1) {
+        // Found new "first" peak
+        maxPeak2 = maxPeak1;
+        weightPeak2SpecFlat = weightPeak1SpecFlat;
+        posPeak2SpecFlatFX = posPeak1SpecFlatFX;
+
+        maxPeak1 = inst->histSpecFlat[i];
+        weightPeak1SpecFlat = inst->histSpecFlat[i];
+        posPeak1SpecFlatFX = (WebRtc_UWord32)(2 * i + 1);
+      } else if (inst->histSpecFlat[i] > maxPeak2) {
+        // Found new "second" peak
+        maxPeak2 = inst->histSpecFlat[i];
+        weightPeak2SpecFlat = inst->histSpecFlat[i];
+        posPeak2SpecFlatFX = (WebRtc_UWord32)(2 * i + 1);
+      }
+    }
+
+    // for spectral flatness feature
+    useFeatureSpecFlat = 1;
+    // merge the two peaks if they are close
+    if ((posPeak1SpecFlatFX - posPeak2SpecFlatFX < LIM_PEAK_SPACE_FLAT_DIFF)
+        && (weightPeak2SpecFlat * LIM_PEAK_WEIGHT_FLAT_DIFF > weightPeak1SpecFlat)) {
+      weightPeak1SpecFlat += weightPeak2SpecFlat;
+      posPeak1SpecFlatFX = (posPeak1SpecFlatFX + posPeak2SpecFlatFX) >> 1;
+    }
+    //reject if weight of peaks is not large enough, or peak value too small
+    if (weightPeak1SpecFlat < THRES_WEIGHT_FLAT_DIFF || posPeak1SpecFlatFX
+        < THRES_PEAK_FLAT) {
+      useFeatureSpecFlat = 0;
+    } else { // if selected, get the threshold
+      // compute the threshold and check if value is within min/max range
+      inst->thresholdSpecFlat = WEBRTC_SPL_SAT(MAX_FLAT_Q10, FACTOR_2_FLAT_Q10
+                                               * posPeak1SpecFlatFX, MIN_FLAT_Q10); //Q10
+    }
+    // done with flatness feature
+
+    if (useFeatureSpecDiff) {
+      //compute two peaks for spectral difference
+      maxPeak1 = 0;
+      maxPeak2 = 0;
+      posPeak1SpecDiffFX = 0;
+      posPeak2SpecDiffFX = 0;
+      weightPeak1SpecDiff = 0;
+      weightPeak2SpecDiff = 0;
+      // peaks for spectral difference
+      for (i = 0; i < HIST_PAR_EST; i++) {
+        if (inst->histSpecDiff[i] > maxPeak1) {
+          // Found new "first" peak
+          maxPeak2 = maxPeak1;
+          weightPeak2SpecDiff = weightPeak1SpecDiff;
+          posPeak2SpecDiffFX = posPeak1SpecDiffFX;
+
+          maxPeak1 = inst->histSpecDiff[i];
+          weightPeak1SpecDiff = inst->histSpecDiff[i];
+          posPeak1SpecDiffFX = (WebRtc_UWord32)(2 * i + 1);
+        } else if (inst->histSpecDiff[i] > maxPeak2) {
+          // Found new "second" peak
+          maxPeak2 = inst->histSpecDiff[i];
+          weightPeak2SpecDiff = inst->histSpecDiff[i];
+          posPeak2SpecDiffFX = (WebRtc_UWord32)(2 * i + 1);
+        }
+      }
+
+      // merge the two peaks if they are close
+      if ((posPeak1SpecDiffFX - posPeak2SpecDiffFX < LIM_PEAK_SPACE_FLAT_DIFF)
+          && (weightPeak2SpecDiff * LIM_PEAK_WEIGHT_FLAT_DIFF > weightPeak1SpecDiff)) {
+        weightPeak1SpecDiff += weightPeak2SpecDiff;
+        posPeak1SpecDiffFX = (posPeak1SpecDiffFX + posPeak2SpecDiffFX) >> 1;
+      }
+      // get the threshold value and check if value is within min/max range
+      inst->thresholdSpecDiff = WEBRTC_SPL_SAT(MAX_DIFF, FACTOR_1_LRT_DIFF
+                                               * posPeak1SpecDiffFX, MIN_DIFF); //5x bigger
+      //reject if weight of peaks is not large enough
+      if (weightPeak1SpecDiff < THRES_WEIGHT_FLAT_DIFF) {
+        useFeatureSpecDiff = 0;
+      }
+      // done with spectral difference feature
+    }
+
+    // select the weights between the features
+    // inst->priorModelPars[4] is weight for LRT: always selected
+    featureSum = 6 / (1 + useFeatureSpecFlat + useFeatureSpecDiff);
+    inst->weightLogLrt = featureSum;
+    inst->weightSpecFlat = useFeatureSpecFlat * featureSum;
+    inst->weightSpecDiff = useFeatureSpecDiff * featureSum;
+
+    // set histograms to zero for next update
+    WebRtcSpl_ZerosArrayW16(inst->histLrt, HIST_PAR_EST);
+    WebRtcSpl_ZerosArrayW16(inst->histSpecDiff, HIST_PAR_EST);
+    WebRtcSpl_ZerosArrayW16(inst->histSpecFlat, HIST_PAR_EST);
+  } // end of flag == 1
+}
+
+
+// Compute spectral flatness on input spectrum
+// magn is the magnitude spectrum
+// spectral flatness is returned in inst->featureSpecFlat
+void WebRtcNsx_ComputeSpectralFlatness(NsxInst_t* inst, WebRtc_UWord16* magn) {
+  WebRtc_UWord32 tmpU32;
+  WebRtc_UWord32 avgSpectralFlatnessNum, avgSpectralFlatnessDen;
+
+  WebRtc_Word32 tmp32;
+  WebRtc_Word32 currentSpectralFlatness, logCurSpectralFlatness;
+
+  WebRtc_Word16 zeros, frac, intPart;
+
+  int i;
+
+  // for flatness
+  avgSpectralFlatnessNum = 0;
+  avgSpectralFlatnessDen = inst->sumMagn - (WebRtc_UWord32)magn[0]; // Q(normData-stages)
+
+  // compute log of ratio of the geometric to arithmetic mean: check for log(0) case
+  // flatness = exp( sum(log(magn[i]))/N - log(sum(magn[i])/N) )
+  //          = exp( sum(log(magn[i]))/N ) * N / sum(magn[i])
+  //          = 2^( sum(log2(magn[i]))/N - (log2(sum(magn[i])) - log2(N)) ) [This is used]
+  for (i = 1; i < inst->magnLen; i++) {
+    // First bin is excluded from spectrum measures. Number of bins is now a power of 2
+    if (magn[i]) {
+      zeros = WebRtcSpl_NormU32((WebRtc_UWord32)magn[i]);
+      frac = (WebRtc_Word16)(((WebRtc_UWord32)((WebRtc_UWord32)(magn[i]) << zeros)
+                              & 0x7FFFFFFF) >> 23);
+      // log2(magn(i))
+      assert(frac < 256);
+      tmpU32 = (WebRtc_UWord32)(((31 - zeros) << 8)
+                                + WebRtcNsx_kLogTableFrac[frac]); // Q8
+      avgSpectralFlatnessNum += tmpU32; // Q8
+    } else {
+      //if at least one frequency component is zero, treat separately
+      tmpU32 = WEBRTC_SPL_UMUL_32_16(inst->featureSpecFlat, SPECT_FLAT_TAVG_Q14); // Q24
+      inst->featureSpecFlat -= WEBRTC_SPL_RSHIFT_U32(tmpU32, 14); // Q10
+      return;
+    }
+  }
+  //ratio and inverse log: check for case of log(0)
+  zeros = WebRtcSpl_NormU32(avgSpectralFlatnessDen);
+  frac = (WebRtc_Word16)(((avgSpectralFlatnessDen << zeros) & 0x7FFFFFFF) >> 23);
+  // log2(avgSpectralFlatnessDen)
+  assert(frac < 256);
+  tmp32 = (WebRtc_Word32)(((31 - zeros) << 8) + WebRtcNsx_kLogTableFrac[frac]); // Q8
+  logCurSpectralFlatness = (WebRtc_Word32)avgSpectralFlatnessNum;
+  logCurSpectralFlatness += ((WebRtc_Word32)(inst->stages - 1) << (inst->stages + 7)); // Q(8+stages-1)
+  logCurSpectralFlatness -= (tmp32 << (inst->stages - 1));
+  logCurSpectralFlatness = WEBRTC_SPL_LSHIFT_W32(logCurSpectralFlatness, 10 - inst->stages); // Q17
+  tmp32 = (WebRtc_Word32)(0x00020000 | (WEBRTC_SPL_ABS_W32(logCurSpectralFlatness)
+                                        & 0x0001FFFF)); //Q17
+  intPart = -(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(logCurSpectralFlatness, 17);
+  intPart += 7; // Shift 7 to get the output in Q10 (from Q17 = -17+10)
+  if (intPart > 0) {
+    currentSpectralFlatness = WEBRTC_SPL_RSHIFT_W32(tmp32, intPart);
+  } else {
+    currentSpectralFlatness = WEBRTC_SPL_LSHIFT_W32(tmp32, -intPart);
+  }
+
+  //time average update of spectral flatness feature
+  tmp32 = currentSpectralFlatness - (WebRtc_Word32)inst->featureSpecFlat; // Q10
+  tmp32 = WEBRTC_SPL_MUL_32_16(SPECT_FLAT_TAVG_Q14, tmp32); // Q24
+  inst->featureSpecFlat = (WebRtc_UWord32)((WebRtc_Word32)inst->featureSpecFlat
+                                           + WEBRTC_SPL_RSHIFT_W32(tmp32, 14)); // Q10
+  // done with flatness feature
+}
+
+
+// Compute the difference measure between input spectrum and a template/learned noise spectrum
+// magn_tmp is the input spectrum
+// the reference/template spectrum is  inst->magn_avg_pause[i]
+// returns (normalized) spectral difference in inst->featureSpecDiff
+void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, WebRtc_UWord16* magnIn) {
+  // This is to be calculated:
+  // avgDiffNormMagn = var(magnIn) - cov(magnIn, magnAvgPause)^2 / var(magnAvgPause)
+
+  WebRtc_UWord32 tmpU32no1, tmpU32no2;
+  WebRtc_UWord32 varMagnUFX, varPauseUFX, avgDiffNormMagnUFX;
+
+  WebRtc_Word32 tmp32no1, tmp32no2;
+  WebRtc_Word32 avgPauseFX, avgMagnFX, covMagnPauseFX;
+  WebRtc_Word32 maxPause, minPause;
+
+  WebRtc_Word16 tmp16no1;
+
+  int i, norm32, nShifts;
+
+  avgPauseFX = 0;
+  maxPause = 0;
+  minPause = inst->avgMagnPause[0]; // Q(prevQMagn)
+  // compute average quantities
+  for (i = 0; i < inst->magnLen; i++) {
+    // Compute mean of magn_pause
+    avgPauseFX += inst->avgMagnPause[i]; // in Q(prevQMagn)
+    maxPause = WEBRTC_SPL_MAX(maxPause, inst->avgMagnPause[i]);
+    minPause = WEBRTC_SPL_MIN(minPause, inst->avgMagnPause[i]);
+  }
+  // normalize by replacing div of "inst->magnLen" with "inst->stages-1" shifts
+  avgPauseFX = WEBRTC_SPL_RSHIFT_W32(avgPauseFX, inst->stages - 1);
+  avgMagnFX = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(inst->sumMagn, inst->stages - 1);
+  // Largest possible deviation in magnPause for (co)var calculations
+  tmp32no1 = WEBRTC_SPL_MAX(maxPause - avgPauseFX, avgPauseFX - minPause);
+  // Get number of shifts to make sure we don't get wrap around in varPause
+  nShifts = WEBRTC_SPL_MAX(0, 10 + inst->stages - WebRtcSpl_NormW32(tmp32no1));
+
+  varMagnUFX = 0;
+  varPauseUFX = 0;
+  covMagnPauseFX = 0;
+  for (i = 0; i < inst->magnLen; i++) {
+    // Compute var and cov of magn and magn_pause
+    tmp16no1 = (WebRtc_Word16)((WebRtc_Word32)magnIn[i] - avgMagnFX);
+    tmp32no2 = inst->avgMagnPause[i] - avgPauseFX;
+    varMagnUFX += (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1); // Q(2*qMagn)
+    tmp32no1 = WEBRTC_SPL_MUL_32_16(tmp32no2, tmp16no1); // Q(prevQMagn+qMagn)
+    covMagnPauseFX += tmp32no1; // Q(prevQMagn+qMagn)
+    tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, nShifts); // Q(prevQMagn-minPause)
+    varPauseUFX += (WebRtc_UWord32)WEBRTC_SPL_MUL(tmp32no1, tmp32no1); // Q(2*(prevQMagn-minPause))
+  }
+  //update of average magnitude spectrum: Q(-2*stages) and averaging replaced by shifts
+  inst->curAvgMagnEnergy += WEBRTC_SPL_RSHIFT_U32(inst->magnEnergy, 2 * inst->normData
+                                                  + inst->stages - 1);
+
+  avgDiffNormMagnUFX = varMagnUFX; // Q(2*qMagn)
+  if ((varPauseUFX) && (covMagnPauseFX)) {
+    tmpU32no1 = (WebRtc_UWord32)WEBRTC_SPL_ABS_W32(covMagnPauseFX); // Q(prevQMagn+qMagn)
+    norm32 = WebRtcSpl_NormU32(tmpU32no1) - 16;
+    if (norm32 > 0) {
+      tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(tmpU32no1, norm32); // Q(prevQMagn+qMagn+norm32)
+    } else {
+      tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, -norm32); // Q(prevQMagn+qMagn+norm32)
+    }
+    tmpU32no2 = WEBRTC_SPL_UMUL(tmpU32no1, tmpU32no1); // Q(2*(prevQMagn+qMagn-norm32))
+
+    nShifts += norm32;
+    nShifts <<= 1;
+    if (nShifts < 0) {
+      varPauseUFX >>= (-nShifts); // Q(2*(qMagn+norm32+minPause))
+      nShifts = 0;
+    }
+    if (varPauseUFX > 0) {
+      // Q(2*(qMagn+norm32-16+minPause))
+      tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no2, varPauseUFX);
+      tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, nShifts);
+
+      // Q(2*qMagn)
+      avgDiffNormMagnUFX -= WEBRTC_SPL_MIN(avgDiffNormMagnUFX, tmpU32no1);
+    } else {
+      avgDiffNormMagnUFX = 0;
+    }
+  }
+  //normalize and compute time average update of difference feature
+  tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(avgDiffNormMagnUFX, 2 * inst->normData);
+  if (inst->featureSpecDiff > tmpU32no1) {
+    tmpU32no2 = WEBRTC_SPL_UMUL_32_16(inst->featureSpecDiff - tmpU32no1,
+                                      SPECT_DIFF_TAVG_Q8); // Q(8-2*stages)
+    inst->featureSpecDiff -= WEBRTC_SPL_RSHIFT_U32(tmpU32no2, 8); // Q(-2*stages)
+  } else {
+    tmpU32no2 = WEBRTC_SPL_UMUL_32_16(tmpU32no1 - inst->featureSpecDiff,
+                                      SPECT_DIFF_TAVG_Q8); // Q(8-2*stages)
+    inst->featureSpecDiff += WEBRTC_SPL_RSHIFT_U32(tmpU32no2, 8); // Q(-2*stages)
+  }
+}
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//snrLocPrior is the prior SNR for each frequency (in Q11)
+//snrLocPost is the post SNR for each frequency (in Q11)
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, WebRtc_UWord16* nonSpeechProbFinal,
+                               WebRtc_UWord32* priorLocSnr, WebRtc_UWord32* postLocSnr) {
+  WebRtc_UWord32 zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
+
+  WebRtc_Word32 invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
+  WebRtc_Word32 frac32, logTmp;
+  WebRtc_Word32 logLrtTimeAvgKsumFX;
+
+  WebRtc_Word16 indPriorFX16;
+  WebRtc_Word16 tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart;
+
+  int i, normTmp, normTmp2, nShifts;
+
+  // compute feature based on average LR factor
+  // this is the average over all frequencies of the smooth log LRT
+  logLrtTimeAvgKsumFX = 0;
+  for (i = 0; i < inst->magnLen; i++) {
+    besselTmpFX32 = (WebRtc_Word32)postLocSnr[i]; // Q11
+    normTmp = WebRtcSpl_NormU32(postLocSnr[i]);
+    num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp)
+    if (normTmp > 10) {
+      den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp)
+    } else {
+      den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp)
+    }
+    if (den > 0) {
+      besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11
+    } else {
+      besselTmpFX32 -= num; // Q11
+    }
+
+    // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) - inst->logLrtTimeAvg[i]);
+    // Here, LRT_TAVG = 0.5
+    zeros = WebRtcSpl_NormU32(priorLocSnr[i]);
+    frac32 = (WebRtc_Word32)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19);
+    tmp32 = WEBRTC_SPL_MUL(frac32, frac32);
+    tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19);
+    tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16)frac32, 5412, 12);
+    frac32 = tmp32 + 37;
+    // tmp32 = log2(priorLocSnr[i])
+    tmp32 = (WebRtc_Word32)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12
+    logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); // log2(priorLocSnr[i])*log(2)
+    tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); // Q12
+    inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12
+
+    logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12
+  }
+  inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, inst->stages + 10); // 5 = BIN_SIZE_LRT / 2
+  // done with computation of LR factor
+
+  //
+  //compute the indicator functions
+  //
+
+  // average LRT feature
+  // FLOAT code
+  // indicator0 = 0.5 * (tanh(widthPrior * (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
+  tmpIndFX = 16384; // Q14(1.0)
+  tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
+  nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
+  //use larger width in tanh map for pause regions
+  if (tmp32no1 < 0) {
+    tmpIndFX = 0;
+    tmp32no1 = -tmp32no1;
+    //widthPrior = widthPrior * 2.0;
+    nShifts++;
+  }
+  tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
+  // compute indicator function: sigmoid map
+  tableIndex = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
+  if ((tableIndex < 16) && (tableIndex >= 0)) {
+    tmp16no2 = kIndicatorTable[tableIndex];
+    tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+    frac = (WebRtc_Word16)(tmp32no1 & 0x00003fff); // Q14
+    tmp16no2 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+    if (tmpIndFX == 0) {
+      tmpIndFX = 8192 - tmp16no2; // Q14
+    } else {
+      tmpIndFX = 8192 + tmp16no2; // Q14
+    }
+  }
+  indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
+
+  //spectral flatness feature
+  if (inst->weightSpecFlat) {
+    tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
+    tmpIndFX = 16384; // Q14(1.0)
+    //use larger width in tanh map for pause regions
+    tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
+    nShifts = 4;
+    if (inst->thresholdSpecFlat < tmpU32no1) {
+      tmpIndFX = 0;
+      tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
+      //widthPrior = widthPrior * 2.0;
+      nShifts++;
+    }
+    tmp32no1 = (WebRtc_Word32)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
+                                                                        nShifts), 25); //Q14
+    tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14
+    // compute indicator function: sigmoid map
+    // FLOAT code
+    // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * (threshPrior1 - tmpFloat1)) + 1.0);
+    tableIndex = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+    if (tableIndex < 16) {
+      tmp16no2 = kIndicatorTable[tableIndex];
+      tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+      frac = (WebRtc_Word16)(tmpU32no1 & 0x00003fff); // Q14
+      tmp16no2 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+      if (tmpIndFX) {
+        tmpIndFX = 8192 + tmp16no2; // Q14
+      } else {
+        tmpIndFX = 8192 - tmp16no2; // Q14
+      }
+    }
+    indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
+  }
+
+  //for template spectral-difference
+  if (inst->weightSpecDiff) {
+    tmpU32no1 = 0;
+    if (inst->featureSpecDiff) {
+      normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
+                               WebRtcSpl_NormU32(inst->featureSpecDiff));
+      tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); // Q(normTmp-2*stages)
+      tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, 20 - inst->stages
+                                        - normTmp);
+      if (tmpU32no2 > 0) {
+        // Q(20 - inst->stages)
+        tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
+      } else {
+        tmpU32no1 = (WebRtc_UWord32)(0x7fffffff);
+      }
+    }
+    tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, 17), 25);
+    tmpU32no2 = tmpU32no1 - tmpU32no3;
+    nShifts = 1;
+    tmpIndFX = 16384; // Q14(1.0)
+    //use larger width in tanh map for pause regions
+    if (tmpU32no2 & 0x80000000) {
+      tmpIndFX = 0;
+      tmpU32no2 = tmpU32no3 - tmpU32no1;
+      //widthPrior = widthPrior * 2.0;
+      nShifts--;
+    }
+    tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
+    // compute indicator function: sigmoid map
+    /* FLOAT code
+     indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
+     */
+    tableIndex = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+    if (tableIndex < 16) {
+      tmp16no2 = kIndicatorTable[tableIndex];
+      tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+      frac = (WebRtc_Word16)(tmpU32no1 & 0x00003fff); // Q14
+      tmp16no2 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                    tmp16no1, frac, 14);
+      if (tmpIndFX) {
+        tmpIndFX = 8192 + tmp16no2;
+      } else {
+        tmpIndFX = 8192 - tmp16no2;
+      }
+    }
+    indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
+  }
+
+  //combine the indicator function with the feature weights
+  // FLOAT code
+  // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * indicator1 + weightIndPrior2 * indicator2);
+  indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
+  // done with computing indicator function
+
+  //compute the prior probability
+  // FLOAT code
+  // inst->priorNonSpeechProb += PRIOR_UPDATE * (indPriorNonSpeech - inst->priorNonSpeechProb);
+  tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
+  inst->priorNonSpeechProb += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+                                PRIOR_UPDATE_Q14, tmp16, 14); // Q14
+
+  //final speech probability: combine prior model with LR factor:
+
+  memset(nonSpeechProbFinal, 0, sizeof(WebRtc_UWord16) * inst->magnLen);
+
+  if (inst->priorNonSpeechProb > 0) {
+    for (i = 0; i < inst->magnLen; i++) {
+      // FLOAT code
+      // invLrt = exp(inst->logLrtTimeAvg[i]);
+      // invLrt = inst->priorSpeechProb * invLrt;
+      // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / (1.0 - inst->priorSpeechProb + invLrt);
+      // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt;
+      // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / (inst->priorNonSpeechProb + invLrt);
+      if (inst->logLrtTimeAvgW32[i] < 65300) {
+        tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(inst->logLrtTimeAvgW32[i], 23637),
+                                         14); // Q12
+        intPart = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12);
+        if (intPart < -8) {
+          intPart = -8;
+        }
+        frac = (WebRtc_Word16)(tmp32no1 & 0x00000fff); // Q12
+
+        // Quadratic approximation of 2^frac
+        tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12
+        tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12
+        invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart)
+                   + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8
+
+        normTmp = WebRtcSpl_NormW32(invLrtFX);
+        normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb));
+        if (normTmp + normTmp2 >= 7) {
+          if (normTmp + normTmp2 < 15) {
+            invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp);
+            // Q(normTmp+normTmp2-7)
+            tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb));
+            // Q(normTmp+normTmp2+7)
+            invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); // Q14
+          } else {
+            tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q22
+            invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14
+          }
+
+          tmp32no1 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inst->priorNonSpeechProb, 8); // Q22
+
+          nonSpeechProbFinal[i] = (WebRtc_UWord16)WEBRTC_SPL_DIV(tmp32no1,
+              (WebRtc_Word32)inst->priorNonSpeechProb + invLrtFX); // Q8
+        }
+      }
+    }
+  }
+}
+
+// Transform input (speechFrame) to frequency domain magnitude (magnU16)
+void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, WebRtc_UWord16* magnU16) {
+
+  WebRtc_UWord32 tmpU32no1, tmpU32no2;
+
+  WebRtc_Word32   tmp_1_w32 = 0;
+  WebRtc_Word32   tmp_2_w32 = 0;
+  WebRtc_Word32   sum_log_magn = 0;
+  WebRtc_Word32   sum_log_i_log_magn = 0;
+
+  WebRtc_UWord16  sum_log_magn_u16 = 0;
+  WebRtc_UWord16  tmp_u16 = 0;
+
+  WebRtc_Word16   sum_log_i = 0;
+  WebRtc_Word16   sum_log_i_square = 0;
+  WebRtc_Word16   frac = 0;
+  WebRtc_Word16   log2 = 0;
+  WebRtc_Word16   matrix_determinant = 0;
+  WebRtc_Word16   winData[ANAL_BLOCKL_MAX], maxWinData;
+  WebRtc_Word16   realImag[ANAL_BLOCKL_MAX << 1];
+
+  int i, j;
+  int zeros;
+  int net_norm = 0;
+  int right_shifts_in_magnU16 = 0;
+  int right_shifts_in_initMagnEst = 0;
+
+  // Update analysis buffer for lower band, and window data before FFT.
+  WebRtcNsx_AnalysisUpdate(inst, winData, speechFrame);
+
+  // Get input energy
+  inst->energyIn = WebRtcSpl_Energy(winData, (int)inst->anaLen, &(inst->scaleEnergyIn));
+
+  // Reset zero input flag
+  inst->zeroInputSignal = 0;
+  // Acquire norm for winData
+  maxWinData = WebRtcSpl_MaxAbsValueW16(winData, inst->anaLen);
+  inst->normData = WebRtcSpl_NormW16(maxWinData);
+  if (maxWinData == 0) {
+    // Treat zero input separately.
+    inst->zeroInputSignal = 1;
+    return;
+  }
+
+  // Determine the net normalization in the frequency domain
+  net_norm = inst->stages - inst->normData;
+  // Track lowest normalization factor and use it to prevent wrap around in shifting
+  right_shifts_in_magnU16 = inst->normData - inst->minNorm;
+  right_shifts_in_initMagnEst = WEBRTC_SPL_MAX(-right_shifts_in_magnU16, 0);
+  inst->minNorm -= right_shifts_in_initMagnEst;
+  right_shifts_in_magnU16 = WEBRTC_SPL_MAX(right_shifts_in_magnU16, 0);
+
+  // create realImag as winData interleaved with zeros (= imag. part), normalize it
+  WebRtcNsx_CreateComplexBuffer(inst, winData, realImag);
+
+  // bit-reverse position of elements in array and FFT the array
+  WebRtcSpl_ComplexBitReverse(realImag, inst->stages); // Q(normData-stages)
+  WebRtcSpl_ComplexFFT(realImag, inst->stages, 1);
+
+  inst->imag[0] = 0; // Q(normData-stages)
+  inst->imag[inst->anaLen2] = 0;
+  inst->real[0] = realImag[0]; // Q(normData-stages)
+  inst->real[inst->anaLen2] = realImag[inst->anaLen];
+  // Q(2*(normData-stages))
+  inst->magnEnergy = (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(inst->real[0], inst->real[0]);
+  inst->magnEnergy += (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(inst->real[inst->anaLen2],
+                                                           inst->real[inst->anaLen2]);
+  magnU16[0] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(inst->real[0]); // Q(normData-stages)
+  magnU16[inst->anaLen2] = (WebRtc_UWord16)WEBRTC_SPL_ABS_W16(inst->real[inst->anaLen2]);
+  inst->sumMagn = (WebRtc_UWord32)magnU16[0]; // Q(normData-stages)
+  inst->sumMagn += (WebRtc_UWord32)magnU16[inst->anaLen2];
+
+  if (inst->blockIndex >= END_STARTUP_SHORT) {
+    for (i = 1, j = 2; i < inst->anaLen2; i += 1, j += 2) {
+      inst->real[i] = realImag[j];
+      inst->imag[i] = -realImag[j + 1];
+      // magnitude spectrum
+      // energy in Q(2*(normData-stages))
+      tmpU32no1 = (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(realImag[j], realImag[j]);
+      tmpU32no1 += (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(realImag[j + 1], realImag[j + 1]);
+      inst->magnEnergy += tmpU32no1; // Q(2*(normData-stages))
+
+      magnU16[i] = (WebRtc_UWord16)WebRtcSpl_SqrtFloor(tmpU32no1); // Q(normData-stages)
+      inst->sumMagn += (WebRtc_UWord32)magnU16[i]; // Q(normData-stages)
+    }
+  } else {
+    //
+    // Gather information during startup for noise parameter estimation
+    //
+
+    // Switch initMagnEst to Q(minNorm-stages)
+    inst->initMagnEst[0] = WEBRTC_SPL_RSHIFT_U32(inst->initMagnEst[0],
+                                                 right_shifts_in_initMagnEst);
+    inst->initMagnEst[inst->anaLen2] =
+      WEBRTC_SPL_RSHIFT_U32(inst->initMagnEst[inst->anaLen2],
+                            right_shifts_in_initMagnEst); // Q(minNorm-stages)
+
+    // Shift magnU16 to same domain as initMagnEst
+    tmpU32no1 = WEBRTC_SPL_RSHIFT_W32((WebRtc_UWord32)magnU16[0],
+                                      right_shifts_in_magnU16); // Q(minNorm-stages)
+    tmpU32no2 = WEBRTC_SPL_RSHIFT_W32((WebRtc_UWord32)magnU16[inst->anaLen2],
+                                      right_shifts_in_magnU16); // Q(minNorm-stages)
+
+    // Update initMagnEst
+    inst->initMagnEst[0] += tmpU32no1; // Q(minNorm-stages)
+    inst->initMagnEst[inst->anaLen2] += tmpU32no2; // Q(minNorm-stages)
+
+    log2 = 0;
+    if (magnU16[inst->anaLen2]) {
+      // Calculate log2(magnU16[inst->anaLen2])
+      zeros = WebRtcSpl_NormU32((WebRtc_UWord32)magnU16[inst->anaLen2]);
+      frac = (WebRtc_Word16)((((WebRtc_UWord32)magnU16[inst->anaLen2] << zeros) &
+                              0x7FFFFFFF) >> 23); // Q8
+      // log2(magnU16(i)) in Q8
+      assert(frac < 256);
+      log2 = (WebRtc_Word16)(((31 - zeros) << 8) + WebRtcNsx_kLogTableFrac[frac]);
+    }
+
+    sum_log_magn = (WebRtc_Word32)log2; // Q8
+    // sum_log_i_log_magn in Q17
+    sum_log_i_log_magn = (WEBRTC_SPL_MUL_16_16(kLogIndex[inst->anaLen2], log2) >> 3);
+
+    for (i = 1, j = 2; i < inst->anaLen2; i += 1, j += 2) {
+      inst->real[i] = realImag[j];
+      inst->imag[i] = -realImag[j + 1];
+      // magnitude spectrum
+      // energy in Q(2*(normData-stages))
+      tmpU32no1 = (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(realImag[j], realImag[j]);
+      tmpU32no1 += (WebRtc_UWord32)WEBRTC_SPL_MUL_16_16(realImag[j + 1], realImag[j + 1]);
+      inst->magnEnergy += tmpU32no1; // Q(2*(normData-stages))
+
+      magnU16[i] = (WebRtc_UWord16)WebRtcSpl_SqrtFloor(tmpU32no1); // Q(normData-stages)
+      inst->sumMagn += (WebRtc_UWord32)magnU16[i]; // Q(normData-stages)
+
+      // Switch initMagnEst to Q(minNorm-stages)
+      inst->initMagnEst[i] = WEBRTC_SPL_RSHIFT_U32(inst->initMagnEst[i],
+                                                   right_shifts_in_initMagnEst);
+
+      // Shift magnU16 to same domain as initMagnEst, i.e., Q(minNorm-stages)
+      tmpU32no1 = WEBRTC_SPL_RSHIFT_W32((WebRtc_UWord32)magnU16[i],
+                                        right_shifts_in_magnU16);
+      // Update initMagnEst
+      inst->initMagnEst[i] += tmpU32no1; // Q(minNorm-stages)
+
+      if (i >= kStartBand) {
+        // For pink noise estimation. Collect data neglecting lower frequency band
+        log2 = 0;
+        if (magnU16[i]) {
+          zeros = WebRtcSpl_NormU32((WebRtc_UWord32)magnU16[i]);
+          frac = (WebRtc_Word16)((((WebRtc_UWord32)magnU16[i] << zeros) &
+                                  0x7FFFFFFF) >> 23);
+          // log2(magnU16(i)) in Q8
+          assert(frac < 256);
+          log2 = (WebRtc_Word16)(((31 - zeros) << 8)
+                                 + WebRtcNsx_kLogTableFrac[frac]);
+        }
+        sum_log_magn += (WebRtc_Word32)log2; // Q8
+        // sum_log_i_log_magn in Q17
+        sum_log_i_log_magn += (WEBRTC_SPL_MUL_16_16(kLogIndex[i], log2) >> 3);
+      }
+    }
+
+    //
+    //compute simplified noise model during startup
+    //
+
+    // Estimate White noise
+
+    // Switch whiteNoiseLevel to Q(minNorm-stages)
+    inst->whiteNoiseLevel = WEBRTC_SPL_RSHIFT_U32(inst->whiteNoiseLevel,
+                                                  right_shifts_in_initMagnEst);
+
+    // Update the average magnitude spectrum, used as noise estimate.
+    tmpU32no1 = WEBRTC_SPL_UMUL_32_16(inst->sumMagn, inst->overdrive);
+    tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, inst->stages + 8);
+
+    // Replacing division above with 'stages' shifts
+    // Shift to same Q-domain as whiteNoiseLevel
+    tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, right_shifts_in_magnU16);
+    // This operation is safe from wrap around as long as END_STARTUP_SHORT < 128
+    assert(END_STARTUP_SHORT < 128);
+    inst->whiteNoiseLevel += tmpU32no1; // Q(minNorm-stages)
+
+    // Estimate Pink noise parameters
+    // Denominator used in both parameter estimates.
+    // The value is only dependent on the size of the frequency band (kStartBand)
+    // and to reduce computational complexity stored in a table (kDeterminantEstMatrix[])
+    assert(kStartBand < 66);
+    matrix_determinant = kDeterminantEstMatrix[kStartBand]; // Q0
+    sum_log_i = kSumLogIndex[kStartBand]; // Q5
+    sum_log_i_square = kSumSquareLogIndex[kStartBand]; // Q2
+    if (inst->fs == 8000) {
+      // Adjust values to shorter blocks in narrow band.
+      tmp_1_w32 = (WebRtc_Word32)matrix_determinant;
+      tmp_1_w32 += WEBRTC_SPL_MUL_16_16_RSFT(kSumLogIndex[65], sum_log_i, 9);
+      tmp_1_w32 -= WEBRTC_SPL_MUL_16_16_RSFT(kSumLogIndex[65], kSumLogIndex[65], 10);
+      tmp_1_w32 -= WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)sum_log_i_square, 4);
+      tmp_1_w32 -= WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16)
+                       (inst->magnLen - kStartBand), kSumSquareLogIndex[65], 2);
+      matrix_determinant = (WebRtc_Word16)tmp_1_w32;
+      sum_log_i -= kSumLogIndex[65]; // Q5
+      sum_log_i_square -= kSumSquareLogIndex[65]; // Q2
+    }
+
+    // Necessary number of shifts to fit sum_log_magn in a word16
+    zeros = 16 - WebRtcSpl_NormW32(sum_log_magn);
+    if (zeros < 0) {
+      zeros = 0;
+    }
+    tmp_1_w32 = WEBRTC_SPL_LSHIFT_W32(sum_log_magn, 1); // Q9
+    sum_log_magn_u16 = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_W32(tmp_1_w32, zeros);//Q(9-zeros)
+
+    // Calculate and update pinkNoiseNumerator. Result in Q11.
+    tmp_2_w32 = WEBRTC_SPL_MUL_16_U16(sum_log_i_square, sum_log_magn_u16); // Q(11-zeros)
+    tmpU32no1 = WEBRTC_SPL_RSHIFT_U32((WebRtc_UWord32)sum_log_i_log_magn, 12); // Q5
+
+    // Shift the largest value of sum_log_i and tmp32no3 before multiplication
+    tmp_u16 = WEBRTC_SPL_LSHIFT_U16((WebRtc_UWord16)sum_log_i, 1); // Q6
+    if ((WebRtc_UWord32)sum_log_i > tmpU32no1) {
+      tmp_u16 = WEBRTC_SPL_RSHIFT_U16(tmp_u16, zeros);
+    } else {
+      tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, zeros);
+    }
+    tmp_2_w32 -= (WebRtc_Word32)WEBRTC_SPL_UMUL_32_16(tmpU32no1, tmp_u16); // Q(11-zeros)
+    matrix_determinant = WEBRTC_SPL_RSHIFT_W16(matrix_determinant, zeros); // Q(-zeros)
+    tmp_2_w32 = WebRtcSpl_DivW32W16(tmp_2_w32, matrix_determinant); // Q11
+    tmp_2_w32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)net_norm, 11); // Q11
+    if (tmp_2_w32 < 0) {
+      tmp_2_w32 = 0;
+    }
+    inst->pinkNoiseNumerator += tmp_2_w32; // Q11
+
+    // Calculate and update pinkNoiseExp. Result in Q14.
+    tmp_2_w32 = WEBRTC_SPL_MUL_16_U16(sum_log_i, sum_log_magn_u16); // Q(14-zeros)
+    tmp_1_w32 = WEBRTC_SPL_RSHIFT_W32(sum_log_i_log_magn, 3 + zeros);
+    tmp_1_w32 = WEBRTC_SPL_MUL((WebRtc_Word32)(inst->magnLen - kStartBand),
+                               tmp_1_w32);
+    tmp_2_w32 -= tmp_1_w32; // Q(14-zeros)
+    if (tmp_2_w32 > 0) {
+      // If the exponential parameter is negative force it to zero, which means a
+      // flat spectrum.
+      tmp_1_w32 = WebRtcSpl_DivW32W16(tmp_2_w32, matrix_determinant); // Q14
+      inst->pinkNoiseExp += WEBRTC_SPL_SAT(16384, tmp_1_w32, 0); // Q14
+    }
+  }
+}
+
+void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) {
+  WebRtc_Word32 energyOut;
+
+  WebRtc_Word16 realImag[ANAL_BLOCKL_MAX << 1];
+  WebRtc_Word16 tmp16no1, tmp16no2;
+  WebRtc_Word16 energyRatio;
+  WebRtc_Word16 gainFactor, gainFactor1, gainFactor2;
+
+  int i;
+  int outCIFFT;
+  int scaleEnergyOut = 0;
+
+  if (inst->zeroInputSignal) {
+    // synthesize the special case of zero input
+    // read out fully processed segment
+    for (i = 0; i < inst->blockLen10ms; i++) {
+      outFrame[i] = inst->synthesisBuffer[i]; // Q0
+    }
+    // update synthesis buffer
+    WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer,
+                          inst->synthesisBuffer + inst->blockLen10ms,
+                          inst->anaLen - inst->blockLen10ms);
+    WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer + inst->anaLen - inst->blockLen10ms,
+                            inst->blockLen10ms);
+    return;
+  }
+
+  // Filter the data in the frequency domain, and create spectrum.
+  WebRtcNsx_PrepareSpectrum(inst, realImag);
+
+  // bit-reverse position of elements in array and IFFT it
+  WebRtcSpl_ComplexBitReverse(realImag, inst->stages);
+  outCIFFT = WebRtcSpl_ComplexIFFT(realImag, inst->stages, 1);
+
+  // Denormalize.
+  WebRtcNsx_Denormalize(inst, realImag, outCIFFT);
+
+  //scale factor: only do it after END_STARTUP_LONG time
+  gainFactor = 8192; // 8192 = Q13(1.0)
+  if (inst->gainMap == 1 &&
+      inst->blockIndex > END_STARTUP_LONG &&
+      inst->energyIn > 0) {
+    energyOut = WebRtcSpl_Energy(inst->real, (int)inst->anaLen, &scaleEnergyOut); // Q(-scaleEnergyOut)
+    if (scaleEnergyOut == 0 && !(energyOut & 0x7f800000)) {
+      energyOut = WEBRTC_SPL_SHIFT_W32(energyOut, 8 + scaleEnergyOut
+                                       - inst->scaleEnergyIn);
+    } else {
+      inst->energyIn = WEBRTC_SPL_RSHIFT_W32(inst->energyIn, 8 + scaleEnergyOut
+                                             - inst->scaleEnergyIn); // Q(-8-scaleEnergyOut)
+    }
+
+    assert(inst->energyIn > 0);
+    energyRatio = (WebRtc_Word16)WEBRTC_SPL_DIV(energyOut
+        + WEBRTC_SPL_RSHIFT_W32(inst->energyIn, 1), inst->energyIn); // Q8
+    // Limit the ratio to [0, 1] in Q8, i.e., [0, 256]
+    energyRatio = WEBRTC_SPL_SAT(256, energyRatio, 0);
+
+    // all done in lookup tables now
+    assert(energyRatio < 257);
+    gainFactor1 = kFactor1Table[energyRatio]; // Q8
+    gainFactor2 = inst->factor2Table[energyRatio]; // Q8
+
+    //combine both scales with speech/noise prob: note prior (priorSpeechProb) is not frequency dependent
+
+    // factor = inst->priorSpeechProb*factor1 + (1.0-inst->priorSpeechProb)*factor2; // original code
+    tmp16no1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(16384 - inst->priorNonSpeechProb,
+                                                        gainFactor1, 14); // Q13 16384 = Q14(1.0)
+    tmp16no2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->priorNonSpeechProb,
+                                                        gainFactor2, 14); // Q13;
+    gainFactor = tmp16no1 + tmp16no2; // Q13
+  } // out of flag_gain_map==1
+
+  // Synthesis, read out fully processed segment, and update synthesis buffer.
+  WebRtcNsx_SynthesisUpdate(inst, outFrame, gainFactor);
+}
+
+int WebRtcNsx_ProcessCore(NsxInst_t* inst, short* speechFrame, short* speechFrameHB,
+                          short* outFrame, short* outFrameHB) {
+  // main routine for noise suppression
+
+  WebRtc_UWord32 tmpU32no1, tmpU32no2, tmpU32no3;
+  WebRtc_UWord32 satMax, maxNoiseU32;
+  WebRtc_UWord32 tmpMagnU32, tmpNoiseU32;
+  WebRtc_UWord32 nearMagnEst;
+  WebRtc_UWord32 noiseUpdateU32;
+  WebRtc_UWord32 noiseU32[HALF_ANAL_BLOCKL];
+  WebRtc_UWord32 postLocSnr[HALF_ANAL_BLOCKL];
+  WebRtc_UWord32 priorLocSnr[HALF_ANAL_BLOCKL];
+  WebRtc_UWord32 prevNearSnr[HALF_ANAL_BLOCKL];
+  WebRtc_UWord32 curNearSnr;
+  WebRtc_UWord32 priorSnr;
+  WebRtc_UWord32 noise_estimate = 0;
+  WebRtc_UWord32 noise_estimate_avg = 0;
+  WebRtc_UWord32 numerator = 0;
+
+  WebRtc_Word32 tmp32no1, tmp32no2;
+  WebRtc_Word32 pink_noise_num_avg = 0;
+
+  WebRtc_UWord16 tmpU16no1;
+  WebRtc_UWord16 magnU16[HALF_ANAL_BLOCKL];
+  WebRtc_UWord16 prevNoiseU16[HALF_ANAL_BLOCKL];
+  WebRtc_UWord16 nonSpeechProbFinal[HALF_ANAL_BLOCKL];
+  WebRtc_UWord16 gammaNoise, prevGammaNoise;
+  WebRtc_UWord16 noiseSupFilterTmp[HALF_ANAL_BLOCKL];
+
+  WebRtc_Word16 qMagn, qNoise;
+  WebRtc_Word16 avgProbSpeechHB, gainModHB, avgFilterGainHB, gainTimeDomainHB;
+  WebRtc_Word16 pink_noise_exp_avg = 0;
+
+  int i;
+  int nShifts, postShifts;
+  int norm32no1, norm32no2;
+  int flag, sign;
+  int q_domain_to_use = 0;
+
+  // Code for ARMv7-Neon platform assumes the following:
+  assert(inst->anaLen % 16 == 0);
+  assert(inst->anaLen2 % 8 == 0);
+  assert(inst->blockLen10ms % 16 == 0);
+  assert(inst->magnLen == inst->anaLen2 + 1);
+
+#ifdef NS_FILEDEBUG
+  fwrite(spframe, sizeof(short), inst->blockLen10ms, inst->infile);
+#endif
+
+  // Check that initialization has been done
+  if (inst->initFlag != 1) {
+    return -1;
+  }
+  // Check for valid pointers based on sampling rate
+  if ((inst->fs == 32000) && (speechFrameHB == NULL)) {
+    return -1;
+  }
+
+  // Store speechFrame and transform to frequency domain
+  WebRtcNsx_DataAnalysis(inst, speechFrame, magnU16);
+
+  if (inst->zeroInputSignal) {
+    WebRtcNsx_DataSynthesis(inst, outFrame);
+
+    if (inst->fs == 32000) {
+      // update analysis buffer for H band
+      // append new data to buffer FX
+      WEBRTC_SPL_MEMCPY_W16(inst->dataBufHBFX, inst->dataBufHBFX + inst->blockLen10ms,
+                            inst->anaLen - inst->blockLen10ms);
+      WEBRTC_SPL_MEMCPY_W16(inst->dataBufHBFX + inst->anaLen - inst->blockLen10ms,
+                            speechFrameHB, inst->blockLen10ms);
+      for (i = 0; i < inst->blockLen10ms; i++) {
+        outFrameHB[i] = inst->dataBufHBFX[i]; // Q0
+      }
+    } // end of H band gain computation
+    return 0;
+  }
+
+  // Update block index when we have something to process
+  inst->blockIndex++;
+  //
+
+  // Norm of magn
+  qMagn = inst->normData - inst->stages;
+
+  // Compute spectral flatness on input spectrum
+  WebRtcNsx_ComputeSpectralFlatness(inst, magnU16);
+
+  // quantile noise estimate
+  WebRtcNsx_NoiseEstimation(inst, magnU16, noiseU32, &qNoise);
+
+  //noise estimate from previous frame
+  for (i = 0; i < inst->magnLen; i++) {
+    prevNoiseU16[i] = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(inst->prevNoiseU32[i], 11); // Q(prevQNoise)
+  }
+
+  if (inst->blockIndex < END_STARTUP_SHORT) {
+    // Noise Q-domain to be used later; see description at end of section.
+    q_domain_to_use = WEBRTC_SPL_MIN((int)qNoise, inst->minNorm - inst->stages);
+
+    // Calculate frequency independent parts in parametric noise estimate and calculate
+    // the estimate for the lower frequency band (same values for all frequency bins)
+    if (inst->pinkNoiseExp) {
+      pink_noise_exp_avg = (WebRtc_Word16)WebRtcSpl_DivW32W16(inst->pinkNoiseExp,
+                                                              (WebRtc_Word16)(inst->blockIndex + 1)); // Q14
+      pink_noise_num_avg = WebRtcSpl_DivW32W16(inst->pinkNoiseNumerator,
+                                               (WebRtc_Word16)(inst->blockIndex + 1)); // Q11
+      WebRtcNsx_CalcParametricNoiseEstimate(inst,
+                                            pink_noise_exp_avg,
+                                            pink_noise_num_avg,
+                                            kStartBand,
+                                            &noise_estimate,
+                                            &noise_estimate_avg);
+    } else {
+      // Use white noise estimate if we have poor pink noise parameter estimates
+      noise_estimate = inst->whiteNoiseLevel; // Q(minNorm-stages)
+      noise_estimate_avg = noise_estimate / (inst->blockIndex + 1); // Q(minNorm-stages)
+    }
+    for (i = 0; i < inst->magnLen; i++) {
+      // Estimate the background noise using the pink noise parameters if permitted
+      if ((inst->pinkNoiseExp) && (i >= kStartBand)) {
+        // Reset noise_estimate
+        noise_estimate = 0;
+        noise_estimate_avg = 0;
+        // Calculate the parametric noise estimate for current frequency bin
+        WebRtcNsx_CalcParametricNoiseEstimate(inst,
+                                              pink_noise_exp_avg,
+                                              pink_noise_num_avg,
+                                              i,
+                                              &noise_estimate,
+                                              &noise_estimate_avg);
+      }
+      // Calculate parametric Wiener filter
+      noiseSupFilterTmp[i] = inst->denoiseBound;
+      if (inst->initMagnEst[i]) {
+        // numerator = (initMagnEst - noise_estimate * overdrive)
+        // Result in Q(8+minNorm-stages)
+        tmpU32no1 = WEBRTC_SPL_UMUL_32_16(noise_estimate, inst->overdrive);
+        numerator = WEBRTC_SPL_LSHIFT_U32(inst->initMagnEst[i], 8);
+        if (numerator > tmpU32no1) {
+          // Suppression filter coefficient larger than zero, so calculate.
+          numerator -= tmpU32no1;
+
+          // Determine number of left shifts in numerator for best accuracy after
+          // division
+          nShifts = WebRtcSpl_NormU32(numerator);
+          nShifts = WEBRTC_SPL_SAT(6, nShifts, 0);
+
+          // Shift numerator to Q(nShifts+8+minNorm-stages)
+          numerator = WEBRTC_SPL_LSHIFT_U32(numerator, nShifts);
+
+          // Shift denominator to Q(nShifts-6+minNorm-stages)
+          tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(inst->initMagnEst[i], 6 - nShifts);
+          if (tmpU32no1 == 0) {
+            // This is only possible if numerator = 0, in which case
+            // we don't need any division.
+            tmpU32no1 = 1;
+          }
+          tmpU32no2 = WEBRTC_SPL_UDIV(numerator, tmpU32no1); // Q14
+          noiseSupFilterTmp[i] = (WebRtc_UWord16)WEBRTC_SPL_SAT(16384, tmpU32no2,
+              (WebRtc_UWord32)(inst->denoiseBound)); // Q14
+        }
+      }
+      // Weight quantile noise 'noiseU32' with modeled noise 'noise_estimate_avg'
+      // 'noiseU32 is in Q(qNoise) and 'noise_estimate' in Q(minNorm-stages)
+      // To guarantee that we do not get wrap around when shifting to the same domain
+      // we use the lowest one. Furthermore, we need to save 6 bits for the weighting.
+      // 'noise_estimate_avg' can handle this operation by construction, but 'noiseU32'
+      // may not.
+
+      // Shift 'noiseU32' to 'q_domain_to_use'
+      tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(noiseU32[i], (int)qNoise - q_domain_to_use);
+      // Shift 'noise_estimate_avg' to 'q_domain_to_use'
+      tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(noise_estimate_avg, inst->minNorm - inst->stages
+                                        - q_domain_to_use);
+      // Make a simple check to see if we have enough room for weighting 'tmpU32no1'
+      // without wrap around
+      nShifts = 0;
+      if (tmpU32no1 & 0xfc000000) {
+        tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 6);
+        tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, 6);
+        nShifts = 6;
+      }
+      tmpU32no1 *= inst->blockIndex;
+      tmpU32no2 *= (END_STARTUP_SHORT - inst->blockIndex);
+      // Add them together and divide by startup length
+      noiseU32[i] = WebRtcSpl_DivU32U16(tmpU32no1 + tmpU32no2, END_STARTUP_SHORT);
+      // Shift back if necessary
+      noiseU32[i] = WEBRTC_SPL_LSHIFT_U32(noiseU32[i], nShifts);
+    }
+    // Update new Q-domain for 'noiseU32'
+    qNoise = q_domain_to_use;
+  }
+  // compute average signal during END_STARTUP_LONG time:
+  // used to normalize spectral difference measure
+  if (inst->blockIndex < END_STARTUP_LONG) {
+    // substituting division with shift ending up in Q(-2*stages)
+    inst->timeAvgMagnEnergyTmp
+    += WEBRTC_SPL_RSHIFT_U32(inst->magnEnergy,
+                             2 * inst->normData + inst->stages - 1);
+    inst->timeAvgMagnEnergy = WebRtcSpl_DivU32U16(inst->timeAvgMagnEnergyTmp,
+                                                  inst->blockIndex + 1);
+  }
+
+  //start processing at frames == converged+1
+  // STEP 1: compute prior and post SNR based on quantile noise estimates
+
+  // compute direct decision (DD) estimate of prior SNR: needed for new method
+  satMax = (WebRtc_UWord32)1048575;// Largest possible value without getting overflow despite shifting 12 steps
+  postShifts = 6 + qMagn - qNoise;
+  nShifts = 5 - inst->prevQMagn + inst->prevQNoise;
+  for (i = 0; i < inst->magnLen; i++) {
+    // FLOAT:
+    // post SNR
+    // postLocSnr[i] = 0.0;
+    // if (magn[i] > noise[i])
+    // {
+    //   postLocSnr[i] = magn[i] / (noise[i] + 0.0001);
+    // }
+    // // previous post SNR
+    // // previous estimate: based on previous frame with gain filter (smooth is previous filter)
+    //
+    // prevNearSnr[i] = inst->prevMagnU16[i] / (inst->noisePrev[i] + 0.0001) * (inst->smooth[i]);
+    //
+    // // DD estimate is sum of two terms: current estimate and previous estimate
+    // // directed decision update of priorSnr (or we actually store [2*priorSnr+1])
+    //
+    // priorLocSnr[i] = DD_PR_SNR * prevNearSnr[i] + (1.0 - DD_PR_SNR) * (postLocSnr[i] - 1.0);
+
+    // calculate post SNR: output in Q11
+    postLocSnr[i] = 2048; // 1.0 in Q11
+    tmpU32no1 = WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)magnU16[i], 6); // Q(6+qMagn)
+    if (postShifts < 0) {
+      tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(noiseU32[i], -postShifts); // Q(6+qMagn)
+    } else {
+      tmpU32no2 = WEBRTC_SPL_LSHIFT_U32(noiseU32[i], postShifts); // Q(6+qMagn)
+    }
+    if (tmpU32no1 > tmpU32no2) {
+      // Current magnitude larger than noise
+      tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(tmpU32no1, 11); // Q(17+qMagn)
+      if (tmpU32no2 > 0) {
+        tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); // Q11
+        postLocSnr[i] = WEBRTC_SPL_MIN(satMax, tmpU32no1); // Q11
+      } else {
+        postLocSnr[i] = satMax;
+      }
+    }
+
+    // calculate prevNearSnr[i] and save for later instead of recalculating it later
+    nearMagnEst = WEBRTC_SPL_UMUL_16_16(inst->prevMagnU16[i], inst->noiseSupFilter[i]); // Q(prevQMagn+14)
+    tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(nearMagnEst, 3); // Q(prevQMagn+17)
+    tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->prevNoiseU32[i], nShifts); // Q(prevQMagn+6)
+
+    if (tmpU32no2 > 0) {
+      tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); // Q11
+      tmpU32no1 = WEBRTC_SPL_MIN(satMax, tmpU32no1); // Q11
+    } else {
+      tmpU32no1 = satMax; // Q11
+    }
+    prevNearSnr[i] = tmpU32no1; // Q11
+
+    //directed decision update of priorSnr
+    tmpU32no1 = WEBRTC_SPL_UMUL_32_16(prevNearSnr[i], DD_PR_SNR_Q11); // Q22
+    tmpU32no2 = WEBRTC_SPL_UMUL_32_16(postLocSnr[i] - 2048, ONE_MINUS_DD_PR_SNR_Q11); // Q22
+    priorSnr = tmpU32no1 + tmpU32no2 + 512; // Q22 (added 512 for rounding)
+    // priorLocSnr = 1 + 2*priorSnr
+    priorLocSnr[i] = 2048 + WEBRTC_SPL_RSHIFT_U32(priorSnr, 10); // Q11
+  } // end of loop over frequencies
+  // done with step 1: DD computation of prior and post SNR
+
+  // STEP 2: compute speech/noise likelihood
+
+  //compute difference of input spectrum with learned/estimated noise spectrum
+  WebRtcNsx_ComputeSpectralDifference(inst, magnU16);
+  //compute histograms for determination of parameters (thresholds and weights for features)
+  //parameters are extracted once every window time (=inst->modelUpdate)
+  //counter update
+  inst->cntThresUpdate++;
+  flag = (int)(inst->cntThresUpdate == inst->modelUpdate);
+  //update histogram
+  WebRtcNsx_FeatureParameterExtraction(inst, flag);
+  //compute model parameters
+  if (flag) {
+    inst->cntThresUpdate = 0; // Reset counter
+    //update every window:
+    // get normalization for spectral difference for next window estimate
+
+    // Shift to Q(-2*stages)
+    inst->curAvgMagnEnergy = WEBRTC_SPL_RSHIFT_U32(inst->curAvgMagnEnergy, STAT_UPDATES);
+
+    tmpU32no1 = (inst->curAvgMagnEnergy + inst->timeAvgMagnEnergy + 1) >> 1; //Q(-2*stages)
+    // Update featureSpecDiff
+    if ((tmpU32no1 != inst->timeAvgMagnEnergy) && (inst->featureSpecDiff) &&
+        (inst->timeAvgMagnEnergy > 0)) {
+      norm32no1 = 0;
+      tmpU32no3 = tmpU32no1;
+      while (0xFFFF0000 & tmpU32no3) {
+        tmpU32no3 >>= 1;
+        norm32no1++;
+      }
+      tmpU32no2 = inst->featureSpecDiff;
+      while (0xFFFF0000 & tmpU32no2) {
+        tmpU32no2 >>= 1;
+        norm32no1++;
+      }
+      tmpU32no3 = WEBRTC_SPL_UMUL(tmpU32no3, tmpU32no2);
+      tmpU32no3 = WEBRTC_SPL_UDIV(tmpU32no3, inst->timeAvgMagnEnergy);
+      if (WebRtcSpl_NormU32(tmpU32no3) < norm32no1) {
+        inst->featureSpecDiff = 0x007FFFFF;
+      } else {
+        inst->featureSpecDiff = WEBRTC_SPL_MIN(0x007FFFFF,
+            WEBRTC_SPL_LSHIFT_U32(tmpU32no3, norm32no1));
+      }
+    }
+
+    inst->timeAvgMagnEnergy = tmpU32no1; // Q(-2*stages)
+    inst->curAvgMagnEnergy = 0;
+  }
+
+  //compute speech/noise probability
+  WebRtcNsx_SpeechNoiseProb(inst, nonSpeechProbFinal, priorLocSnr, postLocSnr);
+
+  //time-avg parameter for noise update
+  gammaNoise = NOISE_UPDATE_Q8; // Q8
+
+  maxNoiseU32 = 0;
+  postShifts = inst->prevQNoise - qMagn;
+  nShifts = inst->prevQMagn - qMagn;
+  for (i = 0; i < inst->magnLen; i++) {
+    // temporary noise update: use it for speech frames if update value is less than previous
+    // the formula has been rewritten into:
+    // noiseUpdate = noisePrev[i] + (1 - gammaNoise) * nonSpeechProb * (magn[i] - noisePrev[i])
+
+    if (postShifts < 0) {
+      tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(magnU16[i], -postShifts); // Q(prevQNoise)
+    } else {
+      tmpU32no2 = WEBRTC_SPL_LSHIFT_U32(magnU16[i], postShifts); // Q(prevQNoise)
+    }
+    if (prevNoiseU16[i] > tmpU32no2) {
+      sign = -1;
+      tmpU32no1 = prevNoiseU16[i] - tmpU32no2;
+    } else {
+      sign = 1;
+      tmpU32no1 = tmpU32no2 - prevNoiseU16[i];
+    }
+    noiseUpdateU32 = inst->prevNoiseU32[i]; // Q(prevQNoise+11)
+    tmpU32no3 = 0;
+    if ((tmpU32no1) && (nonSpeechProbFinal[i])) {
+      // This value will be used later, if gammaNoise changes
+      tmpU32no3 = WEBRTC_SPL_UMUL_32_16(tmpU32no1, nonSpeechProbFinal[i]); // Q(prevQNoise+8)
+      if (0x7c000000 & tmpU32no3) {
+        // Shifting required before multiplication
+        tmpU32no2
+          = WEBRTC_SPL_UMUL_32_16(WEBRTC_SPL_RSHIFT_U32(tmpU32no3, 5), gammaNoise); // Q(prevQNoise+11)
+      } else {
+        // We can do shifting after multiplication
+        tmpU32no2
+          = WEBRTC_SPL_RSHIFT_U32(WEBRTC_SPL_UMUL_32_16(tmpU32no3, gammaNoise), 5); // Q(prevQNoise+11)
+      }
+      if (sign > 0) {
+        noiseUpdateU32 += tmpU32no2; // Q(prevQNoise+11)
+      } else {
+        // This operation is safe. We can never get wrap around, since worst
+        // case scenario means magnU16 = 0
+        noiseUpdateU32 -= tmpU32no2; // Q(prevQNoise+11)
+      }
+    }
+
+    //increase gamma (i.e., less noise update) for frame likely to be speech
+    prevGammaNoise = gammaNoise;
+    gammaNoise = NOISE_UPDATE_Q8;
+    //time-constant based on speech/noise state
+    //increase gamma (i.e., less noise update) for frames likely to be speech
+    if (nonSpeechProbFinal[i] < ONE_MINUS_PROB_RANGE_Q8) {
+      gammaNoise = GAMMA_NOISE_TRANS_AND_SPEECH_Q8;
+    }
+
+    if (prevGammaNoise != gammaNoise) {
+      // new noise update
+      // this line is the same as above, only that the result is stored in a different variable and the gammaNoise
+      // has changed
+      //
+      // noiseUpdate = noisePrev[i] + (1 - gammaNoise) * nonSpeechProb * (magn[i] - noisePrev[i])
+
+      if (0x7c000000 & tmpU32no3) {
+        // Shifting required before multiplication
+        tmpU32no2
+          = WEBRTC_SPL_UMUL_32_16(WEBRTC_SPL_RSHIFT_U32(tmpU32no3, 5), gammaNoise); // Q(prevQNoise+11)
+      } else {
+        // We can do shifting after multiplication
+        tmpU32no2
+          = WEBRTC_SPL_RSHIFT_U32(WEBRTC_SPL_UMUL_32_16(tmpU32no3, gammaNoise), 5); // Q(prevQNoise+11)
+      }
+      if (sign > 0) {
+        tmpU32no1 = inst->prevNoiseU32[i] + tmpU32no2; // Q(prevQNoise+11)
+      } else {
+        tmpU32no1 = inst->prevNoiseU32[i] - tmpU32no2; // Q(prevQNoise+11)
+      }
+      if (noiseUpdateU32 > tmpU32no1) {
+        noiseUpdateU32 = tmpU32no1; // Q(prevQNoise+11)
+      }
+    }
+    noiseU32[i] = noiseUpdateU32; // Q(prevQNoise+11)
+    if (noiseUpdateU32 > maxNoiseU32) {
+      maxNoiseU32 = noiseUpdateU32;
+    }
+
+    // conservative noise update
+    // // original FLOAT code
+    // if (prob_speech < PROB_RANGE) {
+    // inst->avgMagnPause[i] = inst->avgMagnPause[i] + (1.0 - gamma_pause)*(magn[i] - inst->avgMagnPause[i]);
+    // }
+
+    tmp32no2 = WEBRTC_SPL_SHIFT_W32(inst->avgMagnPause[i], -nShifts);
+    if (nonSpeechProbFinal[i] > ONE_MINUS_PROB_RANGE_Q8) {
+      if (nShifts < 0) {
+        tmp32no1 = (WebRtc_Word32)magnU16[i] - tmp32no2; // Q(qMagn)
+        tmp32no1 = WEBRTC_SPL_MUL_32_16(tmp32no1, ONE_MINUS_GAMMA_PAUSE_Q8); // Q(8+prevQMagn+nShifts)
+        tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no1 + 128, 8); // Q(qMagn)
+      } else {
+        tmp32no1 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)magnU16[i], nShifts)
+                   - inst->avgMagnPause[i]; // Q(qMagn+nShifts)
+        tmp32no1 = WEBRTC_SPL_MUL_32_16(tmp32no1, ONE_MINUS_GAMMA_PAUSE_Q8); // Q(8+prevQMagn+nShifts)
+        tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no1 + (128 << nShifts), 8 + nShifts); // Q(qMagn)
+      }
+      tmp32no2 += tmp32no1; // Q(qMagn)
+    }
+    inst->avgMagnPause[i] = tmp32no2;
+  } // end of frequency loop
+
+  norm32no1 = WebRtcSpl_NormU32(maxNoiseU32);
+  qNoise = inst->prevQNoise + norm32no1 - 5;
+  // done with step 2: noise update
+
+  // STEP 3: compute dd update of prior snr and post snr based on new noise estimate
+  nShifts = inst->prevQNoise + 11 - qMagn;
+  for (i = 0; i < inst->magnLen; i++) {
+    // FLOAT code
+    // // post and prior SNR
+    // curNearSnr = 0.0;
+    // if (magn[i] > noise[i])
+    // {
+    // curNearSnr = magn[i] / (noise[i] + 0.0001) - 1.0;
+    // }
+    // // DD estimate is sum of two terms: current estimate and previous estimate
+    // // directed decision update of snrPrior
+    // snrPrior = DD_PR_SNR * prevNearSnr[i] + (1.0 - DD_PR_SNR) * curNearSnr;
+    // // gain filter
+    // tmpFloat1 = inst->overdrive + snrPrior;
+    // tmpFloat2 = snrPrior / tmpFloat1;
+    // theFilter[i] = tmpFloat2;
+
+    // calculate curNearSnr again, this is necessary because a new noise estimate has been made since then. for the original
+    curNearSnr = 0; // Q11
+    if (nShifts < 0) {
+      // This case is equivalent with magn < noise which implies curNearSnr = 0;
+      tmpMagnU32 = (WebRtc_UWord32)magnU16[i]; // Q(qMagn)
+      tmpNoiseU32 = WEBRTC_SPL_LSHIFT_U32(noiseU32[i], -nShifts); // Q(qMagn)
+    } else if (nShifts > 17) {
+      tmpMagnU32 = WEBRTC_SPL_LSHIFT_U32(magnU16[i], 17); // Q(qMagn+17)
+      tmpNoiseU32 = WEBRTC_SPL_RSHIFT_U32(noiseU32[i], nShifts - 17); // Q(qMagn+17)
+    } else {
+      tmpMagnU32 = WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)magnU16[i], nShifts); // Q(qNoise_prev+11)
+      tmpNoiseU32 = noiseU32[i]; // Q(qNoise_prev+11)
+    }
+    if (tmpMagnU32 > tmpNoiseU32) {
+      tmpU32no1 = tmpMagnU32 - tmpNoiseU32; // Q(qCur)
+      norm32no2 = WEBRTC_SPL_MIN(11, WebRtcSpl_NormU32(tmpU32no1));
+      tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(tmpU32no1, norm32no2); // Q(qCur+norm32no2)
+      tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(tmpNoiseU32, 11 - norm32no2); // Q(qCur+norm32no2-11)
+      if (tmpU32no2 > 0) {
+        tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); // Q11
+      }
+      curNearSnr = WEBRTC_SPL_MIN(satMax, tmpU32no1); // Q11
+    }
+
+    //directed decision update of priorSnr
+    // FLOAT
+    // priorSnr = DD_PR_SNR * prevNearSnr + (1.0-DD_PR_SNR) * curNearSnr;
+
+    tmpU32no1 = WEBRTC_SPL_UMUL_32_16(prevNearSnr[i], DD_PR_SNR_Q11); // Q22
+    tmpU32no2 = WEBRTC_SPL_UMUL_32_16(curNearSnr, ONE_MINUS_DD_PR_SNR_Q11); // Q22
+    priorSnr = tmpU32no1 + tmpU32no2; // Q22
+
+    //gain filter
+    tmpU32no1 = (WebRtc_UWord32)(inst->overdrive)
+                + WEBRTC_SPL_RSHIFT_U32(priorSnr + 8192, 14); // Q8
+    assert(inst->overdrive > 0);
+    tmpU16no1 = (WebRtc_UWord16)WEBRTC_SPL_UDIV(priorSnr + (tmpU32no1 >> 1), tmpU32no1); // Q14
+    inst->noiseSupFilter[i] = WEBRTC_SPL_SAT(16384, tmpU16no1, inst->denoiseBound); // 16384 = Q14(1.0) // Q14
+
+    // Weight in the parametric Wiener filter during startup
+    if (inst->blockIndex < END_STARTUP_SHORT) {
+      // Weight the two suppression filters
+      tmpU32no1 = WEBRTC_SPL_UMUL_16_16(inst->noiseSupFilter[i],
+                                        (WebRtc_UWord16)inst->blockIndex);
+      tmpU32no2 = WEBRTC_SPL_UMUL_16_16(noiseSupFilterTmp[i],
+                                        (WebRtc_UWord16)(END_STARTUP_SHORT
+                                                         - inst->blockIndex));
+      tmpU32no1 += tmpU32no2;
+      inst->noiseSupFilter[i] = (WebRtc_UWord16)WebRtcSpl_DivU32U16(tmpU32no1,
+                                                                    END_STARTUP_SHORT);
+    }
+  } // end of loop over frequencies
+  //done with step3
+
+  // save noise and magnitude spectrum for next frame
+  inst->prevQNoise = qNoise;
+  inst->prevQMagn = qMagn;
+  if (norm32no1 > 5) {
+    for (i = 0; i < inst->magnLen; i++) {
+      inst->prevNoiseU32[i] = WEBRTC_SPL_LSHIFT_U32(noiseU32[i], norm32no1 - 5); // Q(qNoise+11)
+      inst->prevMagnU16[i] = magnU16[i]; // Q(qMagn)
+    }
+  } else {
+    for (i = 0; i < inst->magnLen; i++) {
+      inst->prevNoiseU32[i] = WEBRTC_SPL_RSHIFT_U32(noiseU32[i], 5 - norm32no1); // Q(qNoise+11)
+      inst->prevMagnU16[i] = magnU16[i]; // Q(qMagn)
+    }
+  }
+
+  WebRtcNsx_DataSynthesis(inst, outFrame);
+#ifdef NS_FILEDEBUG
+  fwrite(outframe, sizeof(short), inst->blockLen10ms, inst->outfile);
+#endif
+
+  //for H band:
+  // only update data buffer, then apply time-domain gain is applied derived from L band
+  if (inst->fs == 32000) {
+    // update analysis buffer for H band
+    // append new data to buffer FX
+    WEBRTC_SPL_MEMCPY_W16(inst->dataBufHBFX, inst->dataBufHBFX + inst->blockLen10ms, inst->anaLen - inst->blockLen10ms);
+    WEBRTC_SPL_MEMCPY_W16(inst->dataBufHBFX + inst->anaLen - inst->blockLen10ms, speechFrameHB, inst->blockLen10ms);
+    // range for averaging low band quantities for H band gain
+
+    gainTimeDomainHB = 16384; // 16384 = Q14(1.0)
+    //average speech prob from low band
+    //average filter gain from low band
+    //avg over second half (i.e., 4->8kHz) of freq. spectrum
+    tmpU32no1 = 0; // Q12
+    tmpU16no1 = 0; // Q8
+    for (i = inst->anaLen2 - (inst->anaLen2 >> 2); i < inst->anaLen2; i++) {
+      tmpU16no1 += nonSpeechProbFinal[i]; // Q8
+      tmpU32no1 += (WebRtc_UWord32)(inst->noiseSupFilter[i]); // Q14
+    }
+    avgProbSpeechHB = (WebRtc_Word16)(4096
+        - WEBRTC_SPL_RSHIFT_U16(tmpU16no1, inst->stages - 7)); // Q12
+    avgFilterGainHB = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(
+        tmpU32no1, inst->stages - 3); // Q14
+
+    // // original FLOAT code
+    // // gain based on speech probability:
+    // avg_prob_speech_tt=(float)2.0*avg_prob_speech-(float)1.0;
+    // gain_mod=(float)0.5*((float)1.0+(float)tanh(avg_prob_speech_tt)); // between 0 and 1
+
+    // gain based on speech probability:
+    // original expression: "0.5 * (1 + tanh(2x-1))"
+    // avgProbSpeechHB has been anyway saturated to a value between 0 and 1 so the other cases don't have to be dealt with
+    // avgProbSpeechHB and gainModHB are in Q12, 3607 = Q12(0.880615234375) which is a zero point of
+    // |0.5 * (1 + tanh(2x-1)) - x| - |0.5 * (1 + tanh(2x-1)) - 0.880615234375| meaning that from that point the error of approximating
+    // the expression with f(x) = x would be greater than the error of approximating the expression with f(x) = 0.880615234375
+    // error: "|0.5 * (1 + tanh(2x-1)) - x| from x=0 to 0.880615234375" -> http://www.wolframalpha.com/input/?i=|0.5+*+(1+%2B+tanh(2x-1))+-+x|+from+x%3D0+to+0.880615234375
+    // and:  "|0.5 * (1 + tanh(2x-1)) - 0.880615234375| from x=0.880615234375 to 1" -> http://www.wolframalpha.com/input/?i=+|0.5+*+(1+%2B+tanh(2x-1))+-+0.880615234375|+from+x%3D0.880615234375+to+1
+    gainModHB = WEBRTC_SPL_MIN(avgProbSpeechHB, 3607);
+
+    // // original FLOAT code
+    // //combine gain with low band gain
+    // if (avg_prob_speech < (float)0.5) {
+    // gain_time_domain_HB=(float)0.5*gain_mod+(float)0.5*avg_filter_gain;
+    // }
+    // else {
+    // gain_time_domain_HB=(float)0.25*gain_mod+(float)0.75*avg_filter_gain;
+    // }
+
+
+    //combine gain with low band gain
+    if (avgProbSpeechHB < 2048) {
+      // 2048 = Q12(0.5)
+      // the next two lines in float are  "gain_time_domain = 0.5 * gain_mod + 0.5 * avg_filter_gain"; Q2(0.5) = 2 equals one left shift
+      gainTimeDomainHB = (gainModHB << 1) + (avgFilterGainHB >> 1); // Q14
+    } else {
+      // "gain_time_domain = 0.25 * gain_mod + 0.75 * agv_filter_gain;"
+      gainTimeDomainHB = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(3, avgFilterGainHB, 2); // 3 = Q2(0.75); Q14
+      gainTimeDomainHB += gainModHB; // Q14
+    }
+    //make sure gain is within flooring range
+    gainTimeDomainHB
+      = WEBRTC_SPL_SAT(16384, gainTimeDomainHB, (WebRtc_Word16)(inst->denoiseBound)); // 16384 = Q14(1.0)
+
+
+    //apply gain
+    for (i = 0; i < inst->blockLen10ms; i++) {
+      outFrameHB[i]
+        = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gainTimeDomainHB, inst->dataBufHBFX[i], 14); // Q0
+    }
+  } // end of H band gain computation
+
+  return 0;
+}
+
+
diff --git a/trunk/src/modules/audio_processing/ns/nsx_core.h b/trunk/src/modules/audio_processing/ns/nsx_core.h
new file mode 100644
index 0000000..0a0faf9
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/nsx_core.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_CORE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_CORE_H_
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+#include "nsx_defines.h"
+
+#ifdef NS_FILEDEBUG
+#include <stdio.h>
+#endif
+
+typedef struct NsxInst_t_ {
+  WebRtc_UWord32          fs;
+
+  const WebRtc_Word16*    window;
+  WebRtc_Word16           analysisBuffer[ANAL_BLOCKL_MAX];
+  WebRtc_Word16           synthesisBuffer[ANAL_BLOCKL_MAX];
+  WebRtc_UWord16          noiseSupFilter[HALF_ANAL_BLOCKL];
+  WebRtc_UWord16          overdrive; /* Q8 */
+  WebRtc_UWord16          denoiseBound; /* Q14 */
+  const WebRtc_Word16*    factor2Table;
+  WebRtc_Word16           noiseEstLogQuantile[SIMULT* HALF_ANAL_BLOCKL];
+  WebRtc_Word16           noiseEstDensity[SIMULT* HALF_ANAL_BLOCKL];
+  WebRtc_Word16           noiseEstCounter[SIMULT];
+  WebRtc_Word16           noiseEstQuantile[HALF_ANAL_BLOCKL];
+
+  WebRtc_Word16           anaLen;
+  int                     anaLen2;
+  int                     magnLen;
+  int                     aggrMode;
+  int                     stages;
+  int                     initFlag;
+  int                     gainMap;
+
+  WebRtc_Word32           maxLrt;
+  WebRtc_Word32           minLrt;
+  WebRtc_Word32           logLrtTimeAvgW32[HALF_ANAL_BLOCKL]; //log lrt factor with time-smoothing in Q8
+  WebRtc_Word32           featureLogLrt;
+  WebRtc_Word32           thresholdLogLrt;
+  WebRtc_Word16           weightLogLrt;
+
+  WebRtc_UWord32          featureSpecDiff;
+  WebRtc_UWord32          thresholdSpecDiff;
+  WebRtc_Word16           weightSpecDiff;
+
+  WebRtc_UWord32          featureSpecFlat;
+  WebRtc_UWord32          thresholdSpecFlat;
+  WebRtc_Word16           weightSpecFlat;
+
+  WebRtc_Word32           avgMagnPause[HALF_ANAL_BLOCKL]; //conservative estimate of noise spectrum
+  WebRtc_UWord32          magnEnergy;
+  WebRtc_UWord32          sumMagn;
+  WebRtc_UWord32          curAvgMagnEnergy;
+  WebRtc_UWord32          timeAvgMagnEnergy;
+  WebRtc_UWord32          timeAvgMagnEnergyTmp;
+
+  WebRtc_UWord32          whiteNoiseLevel;              //initial noise estimate
+  WebRtc_UWord32          initMagnEst[HALF_ANAL_BLOCKL];//initial magnitude spectrum estimate
+  WebRtc_Word32           pinkNoiseNumerator;           //pink noise parameter: numerator
+  WebRtc_Word32           pinkNoiseExp;                 //pink noise parameter: power of freq
+  int                     minNorm;                      //smallest normalization factor
+  int                     zeroInputSignal;              //zero input signal flag
+
+  WebRtc_UWord32          prevNoiseU32[HALF_ANAL_BLOCKL]; //noise spectrum from previous frame
+  WebRtc_UWord16          prevMagnU16[HALF_ANAL_BLOCKL]; //magnitude spectrum from previous frame
+  WebRtc_Word16           priorNonSpeechProb; //prior speech/noise probability // Q14
+
+  int                     blockIndex; //frame index counter
+  int                     modelUpdate; //parameter for updating or estimating thresholds/weights for prior model
+  int                     cntThresUpdate;
+
+  //histograms for parameter estimation
+  WebRtc_Word16           histLrt[HIST_PAR_EST];
+  WebRtc_Word16           histSpecFlat[HIST_PAR_EST];
+  WebRtc_Word16           histSpecDiff[HIST_PAR_EST];
+
+  //quantities for high band estimate
+  WebRtc_Word16           dataBufHBFX[ANAL_BLOCKL_MAX]; /* Q0 */
+
+  int                     qNoise;
+  int                     prevQNoise;
+  int                     prevQMagn;
+  int                     blockLen10ms;
+
+  WebRtc_Word16           real[ANAL_BLOCKL_MAX];
+  WebRtc_Word16           imag[ANAL_BLOCKL_MAX];
+  WebRtc_Word32           energyIn;
+  int                     scaleEnergyIn;
+  int                     normData;
+
+} NsxInst_t;
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+/****************************************************************************
+ * WebRtcNsx_InitCore(...)
+ *
+ * This function initializes a noise suppression instance
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - fs            : Sampling frequency
+ *
+ * Output:
+ *      - inst          : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+WebRtc_Word32 WebRtcNsx_InitCore(NsxInst_t* inst, WebRtc_UWord32 fs);
+
+/****************************************************************************
+ * WebRtcNsx_set_policy_core(...)
+ *
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - inst       : Instance that should be initialized
+ *      - mode       : 0: Mild (6 dB), 1: Medium (10 dB), 2: Aggressive (15 dB)
+ *
+ * Output:
+ *      - inst       : Initialized instance
+ *
+ * Return value      :  0 - Ok
+ *                     -1 - Error
+ */
+int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode);
+
+/****************************************************************************
+ * WebRtcNsx_ProcessCore
+ *
+ * Do noise suppression.
+ *
+ * Input:
+ *      - inst          : Instance that should be initialized
+ *      - inFrameLow    : Input speech frame for lower band
+ *      - inFrameHigh   : Input speech frame for higher band
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - outFrameLow   : Output speech frame for lower band
+ *      - outFrameHigh  : Output speech frame for higher band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+int WebRtcNsx_ProcessCore(NsxInst_t* inst,
+                          short* inFrameLow,
+                          short* inFrameHigh,
+                          short* outFrameLow,
+                          short* outFrameHigh);
+
+/****************************************************************************
+ * Some function pointers, for internal functions shared by ARM NEON and 
+ * generic C code.
+ */
+// Noise Estimation.
+typedef void (*NoiseEstimation)(NsxInst_t* inst,
+                                uint16_t* magn,
+                                uint32_t* noise,
+                                int16_t* q_noise);
+extern NoiseEstimation WebRtcNsx_NoiseEstimation;
+
+// Filter the data in the frequency domain, and create spectrum.
+typedef void (*PrepareSpectrum)(NsxInst_t* inst,
+                                int16_t* freq_buff);
+extern PrepareSpectrum WebRtcNsx_PrepareSpectrum;
+
+// For the noise supression process, synthesis, read out fully processed
+// segment, and update synthesis buffer.
+typedef void (*SynthesisUpdate)(NsxInst_t* inst,
+                                int16_t* out_frame,
+                                int16_t gain_factor);
+extern SynthesisUpdate WebRtcNsx_SynthesisUpdate;
+
+// Update analysis buffer for lower band, and window data before FFT.
+typedef void (*AnalysisUpdate)(NsxInst_t* inst,
+                               int16_t* out,
+                               int16_t* new_speech);
+extern AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+
+// Denormalize the input buffer.
+typedef void (*Denormalize)(NsxInst_t* inst,
+                            int16_t* in,
+                            int factor);
+extern Denormalize WebRtcNsx_Denormalize;
+
+// Create a complex number buffer, as the intput interleaved with zeros,
+// and normalize it.
+typedef void (*CreateComplexBuffer)(NsxInst_t* inst,
+                                    int16_t* in,
+                                    int16_t* out);
+extern CreateComplexBuffer WebRtcNsx_CreateComplexBuffer;
+
+/****************************************************************************
+ * Initialization of the above function pointers for ARM Neon.
+ */
+void WebRtcNsx_InitNeon(void);
+
+extern const WebRtc_Word16 WebRtcNsx_kLogTable[9];
+extern const WebRtc_Word16 WebRtcNsx_kLogTableFrac[256];
+extern const WebRtc_Word16 WebRtcNsx_kCounterDiv[201];
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_CORE_H_
diff --git a/trunk/src/modules/audio_processing/ns/nsx_core_neon.c b/trunk/src/modules/audio_processing/ns/nsx_core_neon.c
new file mode 100644
index 0000000..2f85abd
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/nsx_core_neon.c
@@ -0,0 +1,734 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "nsx_core.h"
+
+#include <arm_neon.h>
+#include <assert.h>
+
+// Update the noise estimation information.
+static void UpdateNoiseEstimateNeon(NsxInst_t* inst, int offset) {
+  int i = 0;
+  const int16_t kExp2Const = 11819; // Q13
+  int16_t* ptr_noiseEstLogQuantile = NULL;
+  int16_t* ptr_noiseEstQuantile = NULL;
+  int16x4_t kExp2Const16x4 = vdup_n_s16(kExp2Const);
+  int32x4_t twentyOne32x4 = vdupq_n_s32(21);
+  int32x4_t constA32x4 = vdupq_n_s32(0x1fffff);
+  int32x4_t constB32x4 = vdupq_n_s32(0x200000);
+
+  int16_t tmp16 = WebRtcSpl_MaxValueW16(inst->noiseEstLogQuantile + offset,
+                                        inst->magnLen);
+
+  // Guarantee a Q-domain as high as possible and still fit in int16
+  inst->qNoise = 14 - (int) WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2Const,
+                                                                 tmp16,
+                                                                 21);
+
+  int32x4_t qNoise32x4 = vdupq_n_s32(inst->qNoise);
+
+  for (ptr_noiseEstLogQuantile = &inst->noiseEstLogQuantile[offset],
+       ptr_noiseEstQuantile = &inst->noiseEstQuantile[0];
+       ptr_noiseEstQuantile < &inst->noiseEstQuantile[inst->magnLen - 3];
+       ptr_noiseEstQuantile += 4, ptr_noiseEstLogQuantile += 4) {
+
+    // tmp32no2 = WEBRTC_SPL_MUL_16_16(kExp2Const,
+    //                                inst->noiseEstLogQuantile[offset + i]);
+    int16x4_t v16x4 = vld1_s16(ptr_noiseEstLogQuantile);
+    int32x4_t v32x4B = vmull_s16(v16x4, kExp2Const16x4);
+
+    // tmp32no1 = (0x00200000 | (tmp32no2 & 0x001FFFFF)); // 2^21 + frac
+    int32x4_t v32x4A = vandq_s32(v32x4B, constA32x4);
+    v32x4A = vorrq_s32(v32x4A, constB32x4);
+
+    // tmp16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32no2, 21);
+    v32x4B = vshrq_n_s32(v32x4B, 21);
+
+    // tmp16 -= 21;// shift 21 to get result in Q0
+    v32x4B = vsubq_s32(v32x4B, twentyOne32x4);
+
+    // tmp16 += (int16_t) inst->qNoise;
+    // shift to get result in Q(qNoise)
+    v32x4B = vaddq_s32(v32x4B, qNoise32x4);
+
+    // if (tmp16 < 0) {
+    //   tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no1, -tmp16);
+    // } else {
+    //   tmp32no1 = WEBRTC_SPL_LSHIFT_W32(tmp32no1, tmp16);
+    // }
+    v32x4B = vshlq_s32(v32x4A, v32x4B);
+
+    // tmp16 = WebRtcSpl_SatW32ToW16(tmp32no1);
+    v16x4 = vqmovn_s32(v32x4B);
+
+    //inst->noiseEstQuantile[i] = tmp16;
+    vst1_s16(ptr_noiseEstQuantile, v16x4);
+  }
+
+  // Last iteration:
+
+  // inst->quantile[i]=exp(inst->lquantile[offset+i]);
+  // in Q21
+  int32_t tmp32no2 = WEBRTC_SPL_MUL_16_16(kExp2Const,
+                                          *ptr_noiseEstLogQuantile);
+  int32_t tmp32no1 = (0x00200000 | (tmp32no2 & 0x001FFFFF)); // 2^21 + frac
+
+  tmp16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32no2, 21);
+  tmp16 -= 21;// shift 21 to get result in Q0
+  tmp16 += (int16_t) inst->qNoise; //shift to get result in Q(qNoise)
+  if (tmp16 < 0) {
+    tmp32no1 = WEBRTC_SPL_RSHIFT_W32(tmp32no1, -tmp16);
+  } else {
+    tmp32no1 = WEBRTC_SPL_LSHIFT_W32(tmp32no1, tmp16);
+  }
+  *ptr_noiseEstQuantile = WebRtcSpl_SatW32ToW16(tmp32no1);
+}
+
+// Noise Estimation
+static void NoiseEstimationNeon(NsxInst_t* inst,
+                                uint16_t* magn,
+                                uint32_t* noise,
+                                int16_t* q_noise) {
+  int16_t lmagn[HALF_ANAL_BLOCKL], counter, countDiv;
+  int16_t countProd, delta, zeros, frac;
+  int16_t log2, tabind, logval, tmp16, tmp16no1, tmp16no2;
+  const int16_t log2_const = 22713;
+  const int16_t width_factor = 21845;
+
+  int i, s, offset;
+
+  tabind = inst->stages - inst->normData;
+  assert(tabind < 9);
+  assert(tabind > -9);
+  if (tabind < 0) {
+    logval = -WebRtcNsx_kLogTable[-tabind];
+  } else {
+    logval = WebRtcNsx_kLogTable[tabind];
+  }
+
+  int16x8_t logval_16x8 = vdupq_n_s16(logval);
+
+  // lmagn(i)=log(magn(i))=log(2)*log2(magn(i))
+  // magn is in Q(-stages), and the real lmagn values are:
+  // real_lmagn(i)=log(magn(i)*2^stages)=log(magn(i))+log(2^stages)
+  // lmagn in Q8
+  for (i = 0; i < inst->magnLen; i++) {
+    if (magn[i]) {
+      zeros = WebRtcSpl_NormU32((uint32_t)magn[i]);
+      frac = (int16_t)((((uint32_t)magn[i] << zeros)
+                        & 0x7FFFFFFF) >> 23);
+      assert(frac < 256);
+      // log2(magn(i))
+      log2 = (int16_t)(((31 - zeros) << 8)
+                       + WebRtcNsx_kLogTableFrac[frac]);
+      // log2(magn(i))*log(2)
+      lmagn[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(log2, log2_const, 15);
+      // + log(2^stages)
+      lmagn[i] += logval;
+    } else {
+      lmagn[i] = logval;
+    }
+  }
+
+  int16x4_t Q3_16x4  = vdup_n_s16(3);
+  int16x8_t WIDTHQ8_16x8 = vdupq_n_s16(WIDTH_Q8);
+  int16x8_t WIDTHFACTOR_16x8 = vdupq_n_s16(width_factor);
+
+  int16_t factor = FACTOR_Q7;
+  if (inst->blockIndex < END_STARTUP_LONG)
+    factor = FACTOR_Q7_STARTUP;
+
+  // Loop over simultaneous estimates
+  for (s = 0; s < SIMULT; s++) {
+    offset = s * inst->magnLen;
+
+    // Get counter values from state
+    counter = inst->noiseEstCounter[s];
+    assert(counter < 201);
+    countDiv = WebRtcNsx_kCounterDiv[counter];
+    countProd = (int16_t)WEBRTC_SPL_MUL_16_16(counter, countDiv);
+
+    // quant_est(...)
+    int16_t deltaBuff[8];
+    int16x4_t tmp16x4_0;
+    int16x4_t tmp16x4_1;
+    int16x4_t countDiv_16x4 = vdup_n_s16(countDiv);
+    int16x8_t countProd_16x8 = vdupq_n_s16(countProd);
+    int16x8_t tmp16x8_0 = vdupq_n_s16(countDiv);
+    int16x8_t prod16x8 = vqrdmulhq_s16(WIDTHFACTOR_16x8, tmp16x8_0);
+    int16x8_t tmp16x8_1;
+    int16x8_t tmp16x8_2;
+    int16x8_t tmp16x8_3;
+    int16x8_t tmp16x8_4;
+    int16x8_t tmp16x8_5;
+    int32x4_t tmp32x4;
+
+    for (i = 0; i < inst->magnLen - 7; i += 8) {
+      // Compute delta.
+      // Smaller step size during startup. This prevents from using
+      // unrealistic values causing overflow.
+      tmp16x8_0 = vdupq_n_s16(factor);
+      vst1q_s16(deltaBuff, tmp16x8_0);
+
+      int j;
+      for (j = 0; j < 8; j++) {
+        if (inst->noiseEstDensity[offset + i + j] > 512) {
+          // Get values for deltaBuff by shifting intead of dividing.
+          int factor = WebRtcSpl_NormW16(inst->noiseEstDensity[offset + i + j]);
+          deltaBuff[j] = (int16_t)(FACTOR_Q16 >> (14 - factor));
+        }
+      }
+
+      // Update log quantile estimate
+
+      // tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(delta, countDiv, 14);
+      tmp32x4 = vmull_s16(vld1_s16(&deltaBuff[0]), countDiv_16x4);
+      tmp16x4_1 = vshrn_n_s32(tmp32x4, 14);
+      tmp32x4 = vmull_s16(vld1_s16(&deltaBuff[4]), countDiv_16x4);
+      tmp16x4_0 = vshrn_n_s32(tmp32x4, 14);
+      tmp16x8_0 = vcombine_s16(tmp16x4_1, tmp16x4_0); // Keep for several lines.
+
+      // prepare for the "if" branch
+      // tmp16 += 2;
+      // tmp16_1 = (Word16)(tmp16>>2);
+      tmp16x8_1 = vrshrq_n_s16(tmp16x8_0, 2);
+
+      // inst->noiseEstLogQuantile[offset+i] + tmp16_1;
+      tmp16x8_2 = vld1q_s16(&inst->noiseEstLogQuantile[offset + i]); // Keep
+      tmp16x8_1 = vaddq_s16(tmp16x8_2, tmp16x8_1); // Keep for several lines
+
+      // Prepare for the "else" branch
+      // tmp16 += 1;
+      // tmp16_1 = (Word16)(tmp16>>1);
+      tmp16x8_0 = vrshrq_n_s16(tmp16x8_0, 1);
+
+      // tmp16_2 = (Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16_1,3,1);
+      tmp32x4 = vmull_s16(vget_low_s16(tmp16x8_0), Q3_16x4);
+      tmp16x4_1 = vshrn_n_s32(tmp32x4, 1);
+
+      // tmp16_2 = (Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp16_1,3,1);
+      tmp32x4 = vmull_s16(vget_high_s16(tmp16x8_0), Q3_16x4);
+      tmp16x4_0 = vshrn_n_s32(tmp32x4, 1);
+
+      // inst->noiseEstLogQuantile[offset + i] - tmp16_2;
+      tmp16x8_0 = vcombine_s16(tmp16x4_1, tmp16x4_0); // keep
+      tmp16x8_0 = vsubq_s16(tmp16x8_2, tmp16x8_0);
+
+      // logval is the smallest fixed point representation we can have. Values
+      // below that will correspond to values in the interval [0, 1], which
+      // can't possibly occur.
+      tmp16x8_0 = vmaxq_s16(tmp16x8_0, logval_16x8);
+
+      // Do the if-else branches:
+      tmp16x8_3 = vld1q_s16(&lmagn[i]); // keep for several lines
+      tmp16x8_5 = vsubq_s16(tmp16x8_3, tmp16x8_2);
+      __asm__("vcgt.s16 %q0, %q1, #0"::"w"(tmp16x8_4), "w"(tmp16x8_5));
+      __asm__("vbit %q0, %q1, %q2"::
+              "w"(tmp16x8_2), "w"(tmp16x8_1), "w"(tmp16x8_4));
+      __asm__("vbif %q0, %q1, %q2"::
+              "w"(tmp16x8_2), "w"(tmp16x8_0), "w"(tmp16x8_4));
+      vst1q_s16(&inst->noiseEstLogQuantile[offset + i], tmp16x8_2);
+
+      // Update density estimate
+      // tmp16_1 + tmp16_2
+      tmp16x8_1 = vld1q_s16(&inst->noiseEstDensity[offset + i]);
+      tmp16x8_0 = vqrdmulhq_s16(tmp16x8_1, countProd_16x8);
+      tmp16x8_0 = vaddq_s16(tmp16x8_0, prod16x8);
+
+      // lmagn[i] - inst->noiseEstLogQuantile[offset + i]
+      tmp16x8_3 = vsubq_s16(tmp16x8_3, tmp16x8_2);
+      tmp16x8_3 = vabsq_s16(tmp16x8_3);
+      tmp16x8_4 = vcgtq_s16(WIDTHQ8_16x8, tmp16x8_3);
+      __asm__("vbit %q0, %q1, %q2"::
+              "w"(tmp16x8_1), "w"(tmp16x8_0), "w"(tmp16x8_4));
+      vst1q_s16(&inst->noiseEstDensity[offset + i], tmp16x8_1);
+    } // End loop over magnitude spectrum
+
+    // Last iteration over magnitude spectrum:
+    // compute delta
+    if (inst->noiseEstDensity[offset + i] > 512) {
+      // Get values for deltaBuff by shifting intead of dividing.
+      int factor = WebRtcSpl_NormW16(inst->noiseEstDensity[offset + i]);
+      delta = (int16_t)(FACTOR_Q16 >> (14 - factor));
+    } else {
+      delta = FACTOR_Q7;
+      if (inst->blockIndex < END_STARTUP_LONG) {
+        // Smaller step size during startup. This prevents from using
+        // unrealistic values causing overflow.
+        delta = FACTOR_Q7_STARTUP;
+      }
+    }
+    // update log quantile estimate
+    tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(delta, countDiv, 14);
+    if (lmagn[i] > inst->noiseEstLogQuantile[offset + i]) {
+      // +=QUANTILE*delta/(inst->counter[s]+1) QUANTILE=0.25, =1 in Q2
+      // CounterDiv=1/(inst->counter[s]+1) in Q15
+      tmp16 += 2;
+      tmp16no1 = WEBRTC_SPL_RSHIFT_W16(tmp16, 2);
+      inst->noiseEstLogQuantile[offset + i] += tmp16no1;
+    } else {
+      tmp16 += 1;
+      tmp16no1 = WEBRTC_SPL_RSHIFT_W16(tmp16, 1);
+      // *(1-QUANTILE), in Q2 QUANTILE=0.25, 1-0.25=0.75=3 in Q2
+      tmp16no2 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, 3, 1);
+      inst->noiseEstLogQuantile[offset + i] -= tmp16no2;
+      if (inst->noiseEstLogQuantile[offset + i] < logval) {
+        // logval is the smallest fixed point representation we can have.
+        // Values below that will correspond to values in the interval
+        // [0, 1], which can't possibly occur.
+        inst->noiseEstLogQuantile[offset + i] = logval;
+      }
+    }
+
+    // update density estimate
+    if (WEBRTC_SPL_ABS_W16(lmagn[i] - inst->noiseEstLogQuantile[offset + i])
+        < WIDTH_Q8) {
+      tmp16no1 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                   inst->noiseEstDensity[offset + i], countProd, 15);
+      tmp16no2 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                   width_factor, countDiv, 15);
+      inst->noiseEstDensity[offset + i] = tmp16no1 + tmp16no2;
+    }
+
+
+    if (counter >= END_STARTUP_LONG) {
+      inst->noiseEstCounter[s] = 0;
+      if (inst->blockIndex >= END_STARTUP_LONG) {
+        UpdateNoiseEstimateNeon(inst, offset);
+      }
+    }
+    inst->noiseEstCounter[s]++;
+
+  } // end loop over simultaneous estimates
+
+  // Sequentially update the noise during startup
+  if (inst->blockIndex < END_STARTUP_LONG) {
+    UpdateNoiseEstimateNeon(inst, offset);
+  }
+
+  for (i = 0; i < inst->magnLen; i++) {
+    noise[i] = (uint32_t)(inst->noiseEstQuantile[i]); // Q(qNoise)
+  }
+  (*q_noise) = (int16_t)inst->qNoise;
+}
+
+// Filter the data in the frequency domain, and create spectrum.
+static void PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf) {
+
+  // (1) Filtering.
+
+  // Fixed point C code for the next block is as follows:
+  // for (i = 0; i < inst->magnLen; i++) {
+  //   inst->real[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(inst->real[i],
+  //      (int16_t)(inst->noiseSupFilter[i]), 14); // Q(normData-stages)
+  //   inst->imag[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(inst->imag[i],
+  //      (int16_t)(inst->noiseSupFilter[i]), 14); // Q(normData-stages)
+  // }
+
+  int16_t* ptr_real = &inst->real[0];
+  int16_t* ptr_imag = &inst->imag[0];
+  uint16_t* ptr_noiseSupFilter = &inst->noiseSupFilter[0];
+
+  // Filter the rest in the frequency domain.
+  for (; ptr_real < &inst->real[inst->magnLen - 1];) {
+    // Loop unrolled once. Both pointers are incremented by 4 twice.
+    __asm__ __volatile__(
+      "vld1.16 d20, [%[ptr_real]]\n\t"
+      "vld1.16 d22, [%[ptr_imag]]\n\t"
+      "vld1.16 d23, [%[ptr_noiseSupFilter]]!\n\t"
+      "vmull.s16 q10, d20, d23\n\t"
+      "vmull.s16 q11, d22, d23\n\t"
+      "vshrn.s32 d20, q10, #14\n\t"
+      "vshrn.s32 d22, q11, #14\n\t"
+      "vst1.16 d20, [%[ptr_real]]!\n\t"
+      "vst1.16 d22, [%[ptr_imag]]!\n\t"
+
+      "vld1.16 d18, [%[ptr_real]]\n\t"
+      "vld1.16 d24, [%[ptr_imag]]\n\t"
+      "vld1.16 d25, [%[ptr_noiseSupFilter]]!\n\t"
+      "vmull.s16 q9, d18, d25\n\t"
+      "vmull.s16 q12, d24, d25\n\t"
+      "vshrn.s32 d18, q9, #14\n\t"
+      "vshrn.s32 d24, q12, #14\n\t"
+      "vst1.16 d18, [%[ptr_real]]!\n\t"
+      "vst1.16 d24, [%[ptr_imag]]!\n\t"
+
+      // Specify constraints.
+      :[ptr_imag]"+r"(ptr_imag),
+       [ptr_real]"+r"(ptr_real),
+       [ptr_noiseSupFilter]"+r"(ptr_noiseSupFilter)
+      :
+      :"d18", "d19", "d20", "d21", "d22", "d23", "d24", "d25",
+       "q9", "q10", "q11", "q12"
+    );
+  }
+
+  // Filter the last pair of elements in the frequency domain.
+  *ptr_real = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(*ptr_real,
+      (int16_t)(*ptr_noiseSupFilter), 14); // Q(normData-stages)
+  *ptr_imag = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(*ptr_imag,
+      (int16_t)(*ptr_noiseSupFilter), 14); // Q(normData-stages)
+
+  // (2) Create spectrum.
+
+  // Fixed point C code for the rest of the function is as follows:
+  // freq_buf[0] = inst->real[0];
+  // freq_buf[1] = -inst->imag[0];
+  // for (i = 1, j = 2; i < inst->anaLen2; i += 1, j += 2) {
+  //   tmp16 = (inst->anaLen << 1) - j;
+  //   freq_buf[j] = inst->real[i];
+  //   freq_buf[j + 1] = -inst->imag[i];
+  //   freq_buf[tmp16] = inst->real[i];
+  //   freq_buf[tmp16 + 1] = inst->imag[i];
+  // }
+  // freq_buf[inst->anaLen] = inst->real[inst->anaLen2];
+  // freq_buf[inst->anaLen + 1] = -inst->imag[inst->anaLen2];
+
+  freq_buf[0] = inst->real[0];
+  freq_buf[1] = -inst->imag[0];
+
+  int offset = -16;
+  int16_t* ptr_realImag1 = &freq_buf[2];
+  int16_t* ptr_realImag2 = ptr_realImag2 = &freq_buf[(inst->anaLen << 1) - 8];
+  ptr_real = &inst->real[1];
+  ptr_imag = &inst->imag[1];
+  for (; ptr_real < &inst->real[inst->anaLen2 - 11];) {
+    // Loop unrolled once. All pointers are incremented twice.
+    __asm__ __volatile__(
+      "vld1.16 d22, [%[ptr_real]]!\n\t"
+      "vld1.16 d23, [%[ptr_imag]]!\n\t"
+      // Negate and interleave:
+      "vmov.s16 d20, d22\n\t"
+      "vneg.s16 d21, d23\n\t"
+      "vzip.16 d20, d21\n\t"
+      // Write 8 elements to &freq_buf[j]
+      "vst1.16 {d20, d21}, [%[ptr_realImag1]]!\n\t"
+      // Interleave and reverse elements:
+      "vzip.16 d22, d23\n\t"
+      "vrev64.32 d18, d23\n\t"
+      "vrev64.32 d19, d22\n\t"
+      // Write 8 elements to &freq_buf[tmp16]
+      "vst1.16 {d18, d19}, [%[ptr_realImag2]], %[offset]\n\t"
+
+      "vld1.16 d22, [%[ptr_real]]!\n\t"
+      "vld1.16 d23, [%[ptr_imag]]!\n\t"
+      // Negate and interleave:
+      "vmov.s16 d20, d22\n\t"
+      "vneg.s16 d21, d23\n\t"
+      "vzip.16 d20, d21\n\t"
+      // Write 8 elements to &freq_buf[j]
+      "vst1.16 {d20, d21}, [%[ptr_realImag1]]!\n\t"
+      // Interleave and reverse elements:
+      "vzip.16 d22, d23\n\t"
+      "vrev64.32 d18, d23\n\t"
+      "vrev64.32 d19, d22\n\t"
+      // Write 8 elements to &freq_buf[tmp16]
+      "vst1.16 {d18, d19}, [%[ptr_realImag2]], %[offset]\n\t"
+
+      // Specify constraints.
+      :[ptr_imag]"+r"(ptr_imag),
+       [ptr_real]"+r"(ptr_real),
+       [ptr_realImag1]"+r"(ptr_realImag1),
+       [ptr_realImag2]"+r"(ptr_realImag2)
+      :[offset]"r"(offset)
+      :"d18", "d19", "d20", "d21", "d22", "d23"
+    );
+  }
+  for (ptr_realImag2 += 6;
+       ptr_real <= &inst->real[inst->anaLen2];
+       ptr_real += 1, ptr_imag += 1, ptr_realImag1 += 2, ptr_realImag2 -= 2) {
+    *ptr_realImag1 = *ptr_real;
+    *(ptr_realImag1 + 1) = -(*ptr_imag);
+    *ptr_realImag2 = *ptr_real;
+    *(ptr_realImag2 + 1) = *ptr_imag;
+  }
+
+  freq_buf[inst->anaLen] = inst->real[inst->anaLen2];
+  freq_buf[inst->anaLen + 1] = -inst->imag[inst->anaLen2];
+}
+
+// Denormalize the input buffer.
+static __inline void DenormalizeNeon(NsxInst_t* inst, int16_t* in, int factor) {
+  int16_t* ptr_real = &inst->real[0];
+  int16_t* ptr_in = &in[0];
+
+  __asm__ __volatile__("vdup.32 q10, %0" ::
+                       "r"((int32_t)(factor - inst->normData)) : "q10");
+  for (; ptr_real < &inst->real[inst->anaLen];) {
+
+    // Loop unrolled once. Both pointers are incremented.
+    __asm__ __volatile__(
+      // tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t)in[j],
+      //                             factor - inst->normData);
+      "vld2.16 {d24, d25}, [%[ptr_in]]!\n\t"
+      "vmovl.s16 q12, d24\n\t"
+      "vshl.s32 q12, q10\n\t"
+      // inst->real[i] = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+      "vqmovn.s32 d24, q12\n\t"
+      "vst1.16 d24, [%[ptr_real]]!\n\t"
+
+      // tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t)in[j],
+      //                             factor - inst->normData);
+      "vld2.16 {d22, d23}, [%[ptr_in]]!\n\t"
+      "vmovl.s16 q11, d22\n\t"
+      "vshl.s32 q11, q10\n\t"
+      // inst->real[i] = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+      "vqmovn.s32 d22, q11\n\t"
+      "vst1.16 d22, [%[ptr_real]]!\n\t"
+
+      // Specify constraints.
+      :[ptr_in]"+r"(ptr_in),
+       [ptr_real]"+r"(ptr_real)
+      :
+      :"d22", "d23", "d24", "d25"
+    );
+  }
+}
+
+// For the noise supress process, synthesis, read out fully processed segment,
+// and update synthesis buffer.
+static void SynthesisUpdateNeon(NsxInst_t* inst,
+                                int16_t* out_frame,
+                                int16_t gain_factor) {
+  int16_t* ptr_real = &inst->real[0];
+  int16_t* ptr_syn = &inst->synthesisBuffer[0];
+  int16_t* ptr_window = &inst->window[0];
+
+  // synthesis
+  __asm__ __volatile__("vdup.16 d24, %0" : : "r"(gain_factor) : "d24");
+  // Loop unrolled once. All pointers are incremented in the assembly code.
+  for (; ptr_syn < &inst->synthesisBuffer[inst->anaLen];) {
+    __asm__ __volatile__(
+      // Load variables.
+      "vld1.16 d22, [%[ptr_real]]!\n\t"
+      "vld1.16 d23, [%[ptr_window]]!\n\t"
+      "vld1.16 d25, [%[ptr_syn]]\n\t"
+      // tmp16a = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+      //           inst->window[i], inst->real[i], 14); // Q0, window in Q14
+      "vmull.s16 q11, d22, d23\n\t"
+      "vrshrn.i32 d22, q11, #14\n\t"
+      // tmp32 = WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(tmp16a, gain_factor, 13);
+      "vmull.s16 q11, d24, d22\n\t"
+      // tmp16b = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+      "vqrshrn.s32 d22, q11, #13\n\t"
+      // inst->synthesisBuffer[i] = WEBRTC_SPL_ADD_SAT_W16(
+      //     inst->synthesisBuffer[i], tmp16b); // Q0
+      "vqadd.s16 d25, d22\n\t"
+      "vst1.16 d25, [%[ptr_syn]]!\n\t"
+
+      // Load variables.
+      "vld1.16 d26, [%[ptr_real]]!\n\t"
+      "vld1.16 d27, [%[ptr_window]]!\n\t"
+      "vld1.16 d28, [%[ptr_syn]]\n\t"
+      // tmp16a = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+      //           inst->window[i], inst->real[i], 14); // Q0, window in Q14
+      "vmull.s16 q13, d26, d27\n\t"
+      "vrshrn.i32 d26, q13, #14\n\t"
+      // tmp32 = WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(tmp16a, gain_factor, 13);
+      "vmull.s16 q13, d24, d26\n\t"
+      // tmp16b = WebRtcSpl_SatW32ToW16(tmp32); // Q0
+      "vqrshrn.s32 d26, q13, #13\n\t"
+      // inst->synthesisBuffer[i] = WEBRTC_SPL_ADD_SAT_W16(
+      //     inst->synthesisBuffer[i], tmp16b); // Q0
+      "vqadd.s16 d28, d26\n\t"
+      "vst1.16 d28, [%[ptr_syn]]!\n\t"
+
+      // Specify constraints.
+      :[ptr_real]"+r"(ptr_real),
+       [ptr_window]"+r"(ptr_window),
+       [ptr_syn]"+r"(ptr_syn)
+      :
+      :"d22", "d23", "d24", "d25", "d26", "d27", "d28", "q11", "q12", "q13"
+    );
+  }
+
+  int16_t* ptr_out = &out_frame[0];
+  ptr_syn = &inst->synthesisBuffer[0];
+  // read out fully processed segment
+  for (; ptr_syn < &inst->synthesisBuffer[inst->blockLen10ms];) {
+    // Loop unrolled once. Both pointers are incremented in the assembly code.
+    __asm__ __volatile__(
+      // out_frame[i] = inst->synthesisBuffer[i]; // Q0
+      "vld1.16 {d22, d23}, [%[ptr_syn]]!\n\t"
+      "vld1.16 {d24, d25}, [%[ptr_syn]]!\n\t"
+      "vst1.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      "vst1.16 {d24, d25}, [%[ptr_out]]!\n\t"
+      :[ptr_syn]"+r"(ptr_syn),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d22", "d23", "d24", "d25"
+    );
+  }
+
+  // Update synthesis buffer.
+  // C code:
+  // WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer,
+  //                      inst->synthesisBuffer + inst->blockLen10ms,
+  //                      inst->anaLen - inst->blockLen10ms);
+  ptr_out = &inst->synthesisBuffer[0],
+  ptr_syn = &inst->synthesisBuffer[inst->blockLen10ms];
+  for (; ptr_syn < &inst->synthesisBuffer[inst->anaLen];) {
+    // Loop unrolled once. Both pointers are incremented in the assembly code.
+    __asm__ __volatile__(
+      "vld1.16 {d22, d23}, [%[ptr_syn]]!\n\t"
+      "vld1.16 {d24, d25}, [%[ptr_syn]]!\n\t"
+      "vst1.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      "vst1.16 {d24, d25}, [%[ptr_out]]!\n\t"
+      :[ptr_syn]"+r"(ptr_syn),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d22", "d23", "d24", "d25"
+    );
+  }
+
+  // C code:
+  // WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer
+  //    + inst->anaLen - inst->blockLen10ms, inst->blockLen10ms);
+  __asm__ __volatile__("vdup.16 q10, %0" : : "r"(0) : "q10");
+  for (; ptr_out < &inst->synthesisBuffer[inst->anaLen];) {
+    // Loop unrolled once. Pointer is incremented in the assembly code.
+    __asm__ __volatile__(
+      "vst1.16 {d20, d21}, [%[ptr_out]]!\n\t"
+      "vst1.16 {d20, d21}, [%[ptr_out]]!\n\t"
+      :[ptr_out]"+r"(ptr_out)
+      :
+      :"d20", "d21"
+    );
+  }
+}
+
+// Update analysis buffer for lower band, and window data before FFT.
+static void AnalysisUpdateNeon(NsxInst_t* inst,
+                               int16_t* out,
+                               int16_t* new_speech) {
+
+  int16_t* ptr_ana = &inst->analysisBuffer[inst->blockLen10ms];
+  int16_t* ptr_out = &inst->analysisBuffer[0];
+
+  // For lower band update analysis buffer.
+  // WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer,
+  //                      inst->analysisBuffer + inst->blockLen10ms,
+  //                      inst->anaLen - inst->blockLen10ms);
+  for (; ptr_out < &inst->analysisBuffer[inst->anaLen - inst->blockLen10ms];) {
+    // Loop unrolled once, so both pointers are incremented by 8 twice.
+    __asm__ __volatile__(
+      "vld1.16 {d20, d21}, [%[ptr_ana]]!\n\t"
+      "vst1.16 {d20, d21}, [%[ptr_out]]!\n\t"
+      "vld1.16 {d22, d23}, [%[ptr_ana]]!\n\t"
+      "vst1.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      :[ptr_ana]"+r"(ptr_ana),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d20", "d21", "d22", "d23"
+    );
+  }
+
+  // WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer
+  //    + inst->anaLen - inst->blockLen10ms, new_speech, inst->blockLen10ms);
+  for (ptr_ana = new_speech; ptr_out < &inst->analysisBuffer[inst->anaLen];) {
+    // Loop unrolled once, so both pointers are incremented by 8 twice.
+    __asm__ __volatile__(
+      "vld1.16 {d20, d21}, [%[ptr_ana]]!\n\t"
+      "vst1.16 {d20, d21}, [%[ptr_out]]!\n\t"
+      "vld1.16 {d22, d23}, [%[ptr_ana]]!\n\t"
+      "vst1.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      :[ptr_ana]"+r"(ptr_ana),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d20", "d21", "d22", "d23"
+    );
+  }
+
+  // Window data before FFT
+  int16_t* ptr_window = &inst->window[0];
+  ptr_out = &out[0];
+  ptr_ana = &inst->analysisBuffer[0];
+  for (; ptr_out < &out[inst->anaLen];) {
+
+    // Loop unrolled once, so all pointers are incremented by 4 twice.
+    __asm__ __volatile__(
+      "vld1.16 d20, [%[ptr_ana]]!\n\t"
+      "vld1.16 d21, [%[ptr_window]]!\n\t"
+      // out[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+      //           inst->window[i], inst->analysisBuffer[i], 14); // Q0
+      "vmull.s16 q10, d20, d21\n\t"
+      "vrshrn.i32 d20, q10, #14\n\t"
+      "vst1.16 d20, [%[ptr_out]]!\n\t"
+
+      "vld1.16 d22, [%[ptr_ana]]!\n\t"
+      "vld1.16 d23, [%[ptr_window]]!\n\t"
+      // out[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+      //           inst->window[i], inst->analysisBuffer[i], 14); // Q0
+      "vmull.s16 q11, d22, d23\n\t"
+      "vrshrn.i32 d22, q11, #14\n\t"
+      "vst1.16 d22, [%[ptr_out]]!\n\t"
+
+      // Specify constraints.
+      :[ptr_ana]"+r"(ptr_ana),
+       [ptr_window]"+r"(ptr_window),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d20", "d21", "d22", "d23", "q10", "q11"
+    );
+  }
+}
+
+// Create a complex number buffer (out[]) as the intput (in[]) interleaved with
+// zeros, and normalize it.
+static __inline void CreateComplexBufferNeon(NsxInst_t* inst,
+                                             int16_t* in,
+                                             int16_t* out) {
+  int16_t* ptr_out = &out[0];
+  int16_t* ptr_in = &in[0];
+
+  __asm__ __volatile__("vdup.16 d25, %0" : : "r"(0) : "d25");
+  __asm__ __volatile__("vdup.16 q10, %0" : : "r"(inst->normData) : "q10");
+  for (; ptr_in < &in[inst->anaLen];) {
+
+    // Loop unrolled once, so ptr_in is incremented by 8 twice,
+    // and ptr_out is incremented by 8 four times.
+    __asm__ __volatile__(
+      // out[j] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData)
+      "vld1.16 {d22, d23}, [%[ptr_in]]!\n\t"
+      "vshl.s16 q11, q10\n\t"
+      "vmov d24, d23\n\t"
+
+      // out[j + 1] = 0; // Insert zeros in imaginary part
+      "vmov d23, d25\n\t"
+      "vst2.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      "vst2.16 {d24, d25}, [%[ptr_out]]!\n\t"
+
+      // out[j] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData)
+      "vld1.16 {d22, d23}, [%[ptr_in]]!\n\t"
+      "vshl.s16 q11, q10\n\t"
+      "vmov d24, d23\n\t"
+
+      // out[j + 1] = 0; // Insert zeros in imaginary part
+      "vmov d23, d25\n\t"
+      "vst2.16 {d22, d23}, [%[ptr_out]]!\n\t"
+      "vst2.16 {d24, d25}, [%[ptr_out]]!\n\t"
+
+      // Specify constraints.
+      :[ptr_in]"+r"(ptr_in),
+       [ptr_out]"+r"(ptr_out)
+      :
+      :"d22", "d23", "d24", "d25", "q10", "q11"
+    );
+  }
+}
+
+void WebRtcNsx_InitNeon(void) {
+  WebRtcNsx_NoiseEstimation = NoiseEstimationNeon;
+  WebRtcNsx_PrepareSpectrum = PrepareSpectrumNeon;
+  WebRtcNsx_SynthesisUpdate = SynthesisUpdateNeon;
+  WebRtcNsx_AnalysisUpdate = AnalysisUpdateNeon;
+  WebRtcNsx_Denormalize = DenormalizeNeon;
+  WebRtcNsx_CreateComplexBuffer = CreateComplexBufferNeon;
+}
diff --git a/trunk/src/modules/audio_processing/ns/nsx_defines.h b/trunk/src/modules/audio_processing/ns/nsx_defines.h
new file mode 100644
index 0000000..cd1e3bf
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/nsx_defines.h
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
+
+#define ANAL_BLOCKL_MAX         256 // max analysis block length
+#define HALF_ANAL_BLOCKL        129 // half max analysis block length + 1
+#define SIMULT                  3
+#define END_STARTUP_LONG        200
+#define END_STARTUP_SHORT       50
+#define FACTOR_Q16              (WebRtc_Word32)2621440 // 40 in Q16
+#define FACTOR_Q7               (WebRtc_Word16)5120 // 40 in Q7
+#define FACTOR_Q7_STARTUP       (WebRtc_Word16)1024 // 8 in Q7
+#define WIDTH_Q8                3 // 0.01 in Q8 (or 25 )
+//PARAMETERS FOR NEW METHOD
+#define DD_PR_SNR_Q11           2007 // ~= Q11(0.98) DD update of prior SNR
+#define ONE_MINUS_DD_PR_SNR_Q11 41 // DD update of prior SNR
+#define SPECT_FLAT_TAVG_Q14     4915 // (0.30) tavg parameter for spectral flatness measure
+#define SPECT_DIFF_TAVG_Q8      77 // (0.30) tavg parameter for spectral flatness measure
+#define PRIOR_UPDATE_Q14        1638 // Q14(0.1) update parameter of prior model
+#define NOISE_UPDATE_Q8         26 // 26 ~= Q8(0.1) update parameter for noise
+// probability threshold for noise state in speech/noise likelihood
+#define ONE_MINUS_PROB_RANGE_Q8 205 // 205 ~= Q8(0.8)
+#define HIST_PAR_EST            1000 // histogram size for estimation of parameters
+//FEATURE EXTRACTION CONFIG
+//bin size of histogram
+#define BIN_SIZE_LRT            10
+//scale parameters: multiply dominant peaks of the histograms by scale factor to obtain
+// thresholds for prior model
+#define FACTOR_1_LRT_DIFF       6 //for LRT and spectral difference (5 times bigger)
+//for spectral_flatness: used when noise is flatter than speech (10 times bigger)
+#define FACTOR_2_FLAT_Q10       922
+//peak limit for spectral flatness (varies between 0 and 1)
+#define THRES_PEAK_FLAT         24 // * 2 * BIN_SIZE_FLAT_FX
+//limit on spacing of two highest peaks in histogram: spacing determined by bin size
+#define LIM_PEAK_SPACE_FLAT_DIFF    4 // * 2 * BIN_SIZE_DIFF_FX
+//limit on relevance of second peak:
+#define LIM_PEAK_WEIGHT_FLAT_DIFF   2
+#define THRES_FLUCT_LRT         10240 //=20 * inst->modelUpdate; fluctuation limit of LRT feat.
+//limit on the max and min values for the feature thresholds
+#define MAX_FLAT_Q10            38912 //  * 2 * BIN_SIZE_FLAT_FX
+#define MIN_FLAT_Q10            4096 //  * 2 * BIN_SIZE_FLAT_FX
+#define MAX_DIFF                100 // * 2 * BIN_SIZE_DIFF_FX
+#define MIN_DIFF                16 // * 2 * BIN_SIZE_DIFF_FX
+//criteria of weight of histogram peak  to accept/reject feature
+#define THRES_WEIGHT_FLAT_DIFF  154//(int)(0.3*(inst->modelUpdate)) for flatness and difference
+//
+#define STAT_UPDATES            9 // Update every 512 = 1 << 9 block
+#define ONE_MINUS_GAMMA_PAUSE_Q8    13 // ~= Q8(0.05) update for conservative noise estimate
+#define GAMMA_NOISE_TRANS_AND_SPEECH_Q8 3 // ~= Q8(0.01) update for transition and noise region
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
diff --git a/trunk/src/modules/audio_processing/ns/windows_private.h b/trunk/src/modules/audio_processing/ns/windows_private.h
new file mode 100644
index 0000000..44c2e84
--- /dev/null
+++ b/trunk/src/modules/audio_processing/ns/windows_private.h
@@ -0,0 +1,574 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_WINDOWS_PRIVATE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_WINDOWS_PRIVATE_H_
+
+// Hanning window for 4ms 16kHz
+static const float kHanning64w128[128] = {
+  0.00000000000000f, 0.02454122852291f, 0.04906767432742f,
+  0.07356456359967f, 0.09801714032956f, 0.12241067519922f,
+  0.14673047445536f, 0.17096188876030f, 0.19509032201613f,
+  0.21910124015687f, 0.24298017990326f, 0.26671275747490f,
+  0.29028467725446f, 0.31368174039889f, 0.33688985339222f,
+  0.35989503653499f, 0.38268343236509f, 0.40524131400499f,
+  0.42755509343028f, 0.44961132965461f, 0.47139673682600f,
+  0.49289819222978f, 0.51410274419322f, 0.53499761988710f,
+  0.55557023301960f, 0.57580819141785f, 0.59569930449243f,
+  0.61523159058063f, 0.63439328416365f, 0.65317284295378f,
+  0.67155895484702f, 0.68954054473707f, 0.70710678118655f,
+  0.72424708295147f, 0.74095112535496f, 0.75720884650648f,
+  0.77301045336274f, 0.78834642762661f, 0.80320753148064f,
+  0.81758481315158f, 0.83146961230255f, 0.84485356524971f,
+  0.85772861000027f, 0.87008699110871f, 0.88192126434835f,
+  0.89322430119552f, 0.90398929312344f, 0.91420975570353f,
+  0.92387953251129f, 0.93299279883474f, 0.94154406518302f,
+  0.94952818059304f, 0.95694033573221f, 0.96377606579544f,
+  0.97003125319454f, 0.97570213003853f, 0.98078528040323f,
+  0.98527764238894f, 0.98917650996478f, 0.99247953459871f,
+  0.99518472667220f, 0.99729045667869f, 0.99879545620517f,
+  0.99969881869620f, 1.00000000000000f,
+  0.99969881869620f, 0.99879545620517f, 0.99729045667869f,
+  0.99518472667220f, 0.99247953459871f, 0.98917650996478f,
+  0.98527764238894f, 0.98078528040323f, 0.97570213003853f,
+  0.97003125319454f, 0.96377606579544f, 0.95694033573221f,
+  0.94952818059304f, 0.94154406518302f, 0.93299279883474f,
+  0.92387953251129f, 0.91420975570353f, 0.90398929312344f,
+  0.89322430119552f, 0.88192126434835f, 0.87008699110871f,
+  0.85772861000027f, 0.84485356524971f, 0.83146961230255f,
+  0.81758481315158f, 0.80320753148064f, 0.78834642762661f,
+  0.77301045336274f, 0.75720884650648f, 0.74095112535496f,
+  0.72424708295147f, 0.70710678118655f, 0.68954054473707f,
+  0.67155895484702f, 0.65317284295378f, 0.63439328416365f,
+  0.61523159058063f, 0.59569930449243f, 0.57580819141785f,
+  0.55557023301960f, 0.53499761988710f, 0.51410274419322f,
+  0.49289819222978f, 0.47139673682600f, 0.44961132965461f,
+  0.42755509343028f, 0.40524131400499f, 0.38268343236509f,
+  0.35989503653499f, 0.33688985339222f, 0.31368174039889f,
+  0.29028467725446f, 0.26671275747490f, 0.24298017990326f,
+  0.21910124015687f, 0.19509032201613f, 0.17096188876030f,
+  0.14673047445536f, 0.12241067519922f, 0.09801714032956f,
+  0.07356456359967f, 0.04906767432742f, 0.02454122852291f
+};
+
+
+
+// hybrib Hanning & flat window
+static const float kBlocks80w128[128] = {
+  (float)0.00000000, (float)0.03271908, (float)0.06540313, (float)0.09801714, (float)0.13052619,
+  (float)0.16289547, (float)0.19509032, (float)0.22707626, (float)0.25881905, (float)0.29028468,
+  (float)0.32143947, (float)0.35225005, (float)0.38268343, (float)0.41270703, (float)0.44228869,
+  (float)0.47139674, (float)0.50000000, (float)0.52806785, (float)0.55557023, (float)0.58247770,
+  (float)0.60876143, (float)0.63439328, (float)0.65934582, (float)0.68359230, (float)0.70710678,
+  (float)0.72986407, (float)0.75183981, (float)0.77301045, (float)0.79335334, (float)0.81284668,
+  (float)0.83146961, (float)0.84920218, (float)0.86602540, (float)0.88192126, (float)0.89687274,
+  (float)0.91086382, (float)0.92387953, (float)0.93590593, (float)0.94693013, (float)0.95694034,
+  (float)0.96592583, (float)0.97387698, (float)0.98078528, (float)0.98664333, (float)0.99144486,
+  (float)0.99518473, (float)0.99785892, (float)0.99946459, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)0.99946459, (float)0.99785892, (float)0.99518473, (float)0.99144486,
+  (float)0.98664333, (float)0.98078528, (float)0.97387698, (float)0.96592583, (float)0.95694034,
+  (float)0.94693013, (float)0.93590593, (float)0.92387953, (float)0.91086382, (float)0.89687274,
+  (float)0.88192126, (float)0.86602540, (float)0.84920218, (float)0.83146961, (float)0.81284668,
+  (float)0.79335334, (float)0.77301045, (float)0.75183981, (float)0.72986407, (float)0.70710678,
+  (float)0.68359230, (float)0.65934582, (float)0.63439328, (float)0.60876143, (float)0.58247770,
+  (float)0.55557023, (float)0.52806785, (float)0.50000000, (float)0.47139674, (float)0.44228869,
+  (float)0.41270703, (float)0.38268343, (float)0.35225005, (float)0.32143947, (float)0.29028468,
+  (float)0.25881905, (float)0.22707626, (float)0.19509032, (float)0.16289547, (float)0.13052619,
+  (float)0.09801714, (float)0.06540313, (float)0.03271908
+};
+
+// hybrib Hanning & flat window
+static const float kBlocks160w256[256] = {
+  (float)0.00000000, (float)0.01636173, (float)0.03271908, (float)0.04906767, (float)0.06540313,
+  (float)0.08172107, (float)0.09801714, (float)0.11428696, (float)0.13052619, (float)0.14673047,
+  (float)0.16289547, (float)0.17901686, (float)0.19509032, (float)0.21111155, (float)0.22707626,
+  (float)0.24298018, (float)0.25881905, (float)0.27458862, (float)0.29028468, (float)0.30590302,
+  (float)0.32143947, (float)0.33688985, (float)0.35225005, (float)0.36751594, (float)0.38268343,
+  (float)0.39774847, (float)0.41270703, (float)0.42755509, (float)0.44228869, (float)0.45690388,
+  (float)0.47139674, (float)0.48576339, (float)0.50000000, (float)0.51410274, (float)0.52806785,
+  (float)0.54189158, (float)0.55557023, (float)0.56910015, (float)0.58247770, (float)0.59569930,
+  (float)0.60876143, (float)0.62166057, (float)0.63439328, (float)0.64695615, (float)0.65934582,
+  (float)0.67155895, (float)0.68359230, (float)0.69544264, (float)0.70710678, (float)0.71858162,
+  (float)0.72986407, (float)0.74095113, (float)0.75183981, (float)0.76252720, (float)0.77301045,
+  (float)0.78328675, (float)0.79335334, (float)0.80320753, (float)0.81284668, (float)0.82226822,
+  (float)0.83146961, (float)0.84044840, (float)0.84920218, (float)0.85772861, (float)0.86602540,
+  (float)0.87409034, (float)0.88192126, (float)0.88951608, (float)0.89687274, (float)0.90398929,
+  (float)0.91086382, (float)0.91749450, (float)0.92387953, (float)0.93001722, (float)0.93590593,
+  (float)0.94154407, (float)0.94693013, (float)0.95206268, (float)0.95694034, (float)0.96156180,
+  (float)0.96592583, (float)0.97003125, (float)0.97387698, (float)0.97746197, (float)0.98078528,
+  (float)0.98384601, (float)0.98664333, (float)0.98917651, (float)0.99144486, (float)0.99344778,
+  (float)0.99518473, (float)0.99665524, (float)0.99785892, (float)0.99879546, (float)0.99946459,
+  (float)0.99986614, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)0.99986614, (float)0.99946459, (float)0.99879546, (float)0.99785892,
+  (float)0.99665524, (float)0.99518473, (float)0.99344778, (float)0.99144486, (float)0.98917651,
+  (float)0.98664333, (float)0.98384601, (float)0.98078528, (float)0.97746197, (float)0.97387698,
+  (float)0.97003125, (float)0.96592583, (float)0.96156180, (float)0.95694034, (float)0.95206268,
+  (float)0.94693013, (float)0.94154407, (float)0.93590593, (float)0.93001722, (float)0.92387953,
+  (float)0.91749450, (float)0.91086382, (float)0.90398929, (float)0.89687274, (float)0.88951608,
+  (float)0.88192126, (float)0.87409034, (float)0.86602540, (float)0.85772861, (float)0.84920218,
+  (float)0.84044840, (float)0.83146961, (float)0.82226822, (float)0.81284668, (float)0.80320753,
+  (float)0.79335334, (float)0.78328675, (float)0.77301045, (float)0.76252720, (float)0.75183981,
+  (float)0.74095113, (float)0.72986407, (float)0.71858162, (float)0.70710678, (float)0.69544264,
+  (float)0.68359230, (float)0.67155895, (float)0.65934582, (float)0.64695615, (float)0.63439328,
+  (float)0.62166057, (float)0.60876143, (float)0.59569930, (float)0.58247770, (float)0.56910015,
+  (float)0.55557023, (float)0.54189158, (float)0.52806785, (float)0.51410274, (float)0.50000000,
+  (float)0.48576339, (float)0.47139674, (float)0.45690388, (float)0.44228869, (float)0.42755509,
+  (float)0.41270703, (float)0.39774847, (float)0.38268343, (float)0.36751594, (float)0.35225005,
+  (float)0.33688985, (float)0.32143947, (float)0.30590302, (float)0.29028468, (float)0.27458862,
+  (float)0.25881905, (float)0.24298018, (float)0.22707626, (float)0.21111155, (float)0.19509032,
+  (float)0.17901686, (float)0.16289547, (float)0.14673047, (float)0.13052619, (float)0.11428696,
+  (float)0.09801714, (float)0.08172107, (float)0.06540313, (float)0.04906767, (float)0.03271908,
+  (float)0.01636173
+};
+
+// hybrib Hanning & flat window: for 20ms
+static const float kBlocks320w512[512] = {
+  (float)0.00000000, (float)0.00818114, (float)0.01636173, (float)0.02454123, (float)0.03271908,
+  (float)0.04089475, (float)0.04906767, (float)0.05723732, (float)0.06540313, (float)0.07356456,
+  (float)0.08172107, (float)0.08987211, (float)0.09801714, (float)0.10615561, (float)0.11428696,
+  (float)0.12241068, (float)0.13052619, (float)0.13863297, (float)0.14673047, (float)0.15481816,
+  (float)0.16289547, (float)0.17096189, (float)0.17901686, (float)0.18705985, (float)0.19509032,
+  (float)0.20310773, (float)0.21111155, (float)0.21910124, (float)0.22707626, (float)0.23503609,
+  (float)0.24298018, (float)0.25090801, (float)0.25881905, (float)0.26671276, (float)0.27458862,
+  (float)0.28244610, (float)0.29028468, (float)0.29810383, (float)0.30590302, (float)0.31368174,
+  (float)0.32143947, (float)0.32917568, (float)0.33688985, (float)0.34458148, (float)0.35225005,
+  (float)0.35989504, (float)0.36751594, (float)0.37511224, (float)0.38268343, (float)0.39022901,
+  (float)0.39774847, (float)0.40524131, (float)0.41270703, (float)0.42014512, (float)0.42755509,
+  (float)0.43493645, (float)0.44228869, (float)0.44961133, (float)0.45690388, (float)0.46416584,
+  (float)0.47139674, (float)0.47859608, (float)0.48576339, (float)0.49289819, (float)0.50000000,
+  (float)0.50706834, (float)0.51410274, (float)0.52110274, (float)0.52806785, (float)0.53499762,
+  (float)0.54189158, (float)0.54874927, (float)0.55557023, (float)0.56235401, (float)0.56910015,
+  (float)0.57580819, (float)0.58247770, (float)0.58910822, (float)0.59569930, (float)0.60225052,
+  (float)0.60876143, (float)0.61523159, (float)0.62166057, (float)0.62804795, (float)0.63439328,
+  (float)0.64069616, (float)0.64695615, (float)0.65317284, (float)0.65934582, (float)0.66547466,
+  (float)0.67155895, (float)0.67759830, (float)0.68359230, (float)0.68954054, (float)0.69544264,
+  (float)0.70129818, (float)0.70710678, (float)0.71286806, (float)0.71858162, (float)0.72424708,
+  (float)0.72986407, (float)0.73543221, (float)0.74095113, (float)0.74642045, (float)0.75183981,
+  (float)0.75720885, (float)0.76252720, (float)0.76779452, (float)0.77301045, (float)0.77817464,
+  (float)0.78328675, (float)0.78834643, (float)0.79335334, (float)0.79830715, (float)0.80320753,
+  (float)0.80805415, (float)0.81284668, (float)0.81758481, (float)0.82226822, (float)0.82689659,
+  (float)0.83146961, (float)0.83598698, (float)0.84044840, (float)0.84485357, (float)0.84920218,
+  (float)0.85349396, (float)0.85772861, (float)0.86190585, (float)0.86602540, (float)0.87008699,
+  (float)0.87409034, (float)0.87803519, (float)0.88192126, (float)0.88574831, (float)0.88951608,
+  (float)0.89322430, (float)0.89687274, (float)0.90046115, (float)0.90398929, (float)0.90745693,
+  (float)0.91086382, (float)0.91420976, (float)0.91749450, (float)0.92071783, (float)0.92387953,
+  (float)0.92697940, (float)0.93001722, (float)0.93299280, (float)0.93590593, (float)0.93875641,
+  (float)0.94154407, (float)0.94426870, (float)0.94693013, (float)0.94952818, (float)0.95206268,
+  (float)0.95453345, (float)0.95694034, (float)0.95928317, (float)0.96156180, (float)0.96377607,
+  (float)0.96592583, (float)0.96801094, (float)0.97003125, (float)0.97198664, (float)0.97387698,
+  (float)0.97570213, (float)0.97746197, (float)0.97915640, (float)0.98078528, (float)0.98234852,
+  (float)0.98384601, (float)0.98527764, (float)0.98664333, (float)0.98794298, (float)0.98917651,
+  (float)0.99034383, (float)0.99144486, (float)0.99247953, (float)0.99344778, (float)0.99434953,
+  (float)0.99518473, (float)0.99595331, (float)0.99665524, (float)0.99729046, (float)0.99785892,
+  (float)0.99836060, (float)0.99879546, (float)0.99916346, (float)0.99946459, (float)0.99969882,
+  (float)0.99986614, (float)0.99996653, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000, (float)1.00000000,
+  (float)1.00000000, (float)0.99996653, (float)0.99986614, (float)0.99969882, (float)0.99946459,
+  (float)0.99916346, (float)0.99879546, (float)0.99836060, (float)0.99785892, (float)0.99729046,
+  (float)0.99665524, (float)0.99595331, (float)0.99518473, (float)0.99434953, (float)0.99344778,
+  (float)0.99247953, (float)0.99144486, (float)0.99034383, (float)0.98917651, (float)0.98794298,
+  (float)0.98664333, (float)0.98527764, (float)0.98384601, (float)0.98234852, (float)0.98078528,
+  (float)0.97915640, (float)0.97746197, (float)0.97570213, (float)0.97387698, (float)0.97198664,
+  (float)0.97003125, (float)0.96801094, (float)0.96592583, (float)0.96377607, (float)0.96156180,
+  (float)0.95928317, (float)0.95694034, (float)0.95453345, (float)0.95206268, (float)0.94952818,
+  (float)0.94693013, (float)0.94426870, (float)0.94154407, (float)0.93875641, (float)0.93590593,
+  (float)0.93299280, (float)0.93001722, (float)0.92697940, (float)0.92387953, (float)0.92071783,
+  (float)0.91749450, (float)0.91420976, (float)0.91086382, (float)0.90745693, (float)0.90398929,
+  (float)0.90046115, (float)0.89687274, (float)0.89322430, (float)0.88951608, (float)0.88574831,
+  (float)0.88192126, (float)0.87803519, (float)0.87409034, (float)0.87008699, (float)0.86602540,
+  (float)0.86190585, (float)0.85772861, (float)0.85349396, (float)0.84920218, (float)0.84485357,
+  (float)0.84044840, (float)0.83598698, (float)0.83146961, (float)0.82689659, (float)0.82226822,
+  (float)0.81758481, (float)0.81284668, (float)0.80805415, (float)0.80320753, (float)0.79830715,
+  (float)0.79335334, (float)0.78834643, (float)0.78328675, (float)0.77817464, (float)0.77301045,
+  (float)0.76779452, (float)0.76252720, (float)0.75720885, (float)0.75183981, (float)0.74642045,
+  (float)0.74095113, (float)0.73543221, (float)0.72986407, (float)0.72424708, (float)0.71858162,
+  (float)0.71286806, (float)0.70710678, (float)0.70129818, (float)0.69544264, (float)0.68954054,
+  (float)0.68359230, (float)0.67759830, (float)0.67155895, (float)0.66547466, (float)0.65934582,
+  (float)0.65317284, (float)0.64695615, (float)0.64069616, (float)0.63439328, (float)0.62804795,
+  (float)0.62166057, (float)0.61523159, (float)0.60876143, (float)0.60225052, (float)0.59569930,
+  (float)0.58910822, (float)0.58247770, (float)0.57580819, (float)0.56910015, (float)0.56235401,
+  (float)0.55557023, (float)0.54874927, (float)0.54189158, (float)0.53499762, (float)0.52806785,
+  (float)0.52110274, (float)0.51410274, (float)0.50706834, (float)0.50000000, (float)0.49289819,
+  (float)0.48576339, (float)0.47859608, (float)0.47139674, (float)0.46416584, (float)0.45690388,
+  (float)0.44961133, (float)0.44228869, (float)0.43493645, (float)0.42755509, (float)0.42014512,
+  (float)0.41270703, (float)0.40524131, (float)0.39774847, (float)0.39022901, (float)0.38268343,
+  (float)0.37511224, (float)0.36751594, (float)0.35989504, (float)0.35225005, (float)0.34458148,
+  (float)0.33688985, (float)0.32917568, (float)0.32143947, (float)0.31368174, (float)0.30590302,
+  (float)0.29810383, (float)0.29028468, (float)0.28244610, (float)0.27458862, (float)0.26671276,
+  (float)0.25881905, (float)0.25090801, (float)0.24298018, (float)0.23503609, (float)0.22707626,
+  (float)0.21910124, (float)0.21111155, (float)0.20310773, (float)0.19509032, (float)0.18705985,
+  (float)0.17901686, (float)0.17096189, (float)0.16289547, (float)0.15481816, (float)0.14673047,
+  (float)0.13863297, (float)0.13052619, (float)0.12241068, (float)0.11428696, (float)0.10615561,
+  (float)0.09801714, (float)0.08987211, (float)0.08172107, (float)0.07356456, (float)0.06540313,
+  (float)0.05723732, (float)0.04906767, (float)0.04089475, (float)0.03271908, (float)0.02454123,
+  (float)0.01636173, (float)0.00818114
+};
+
+
+// Hanning window: for 15ms at 16kHz with symmetric zeros
+static const float kBlocks240w512[512] = {
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00654494, (float)0.01308960, (float)0.01963369,
+  (float)0.02617695, (float)0.03271908, (float)0.03925982, (float)0.04579887, (float)0.05233596,
+  (float)0.05887080, (float)0.06540313, (float)0.07193266, (float)0.07845910, (float)0.08498218,
+  (float)0.09150162, (float)0.09801714, (float)0.10452846, (float)0.11103531, (float)0.11753740,
+  (float)0.12403446, (float)0.13052620, (float)0.13701233, (float)0.14349262, (float)0.14996676,
+  (float)0.15643448, (float)0.16289547, (float)0.16934951, (float)0.17579629, (float)0.18223552,
+  (float)0.18866697, (float)0.19509032, (float)0.20150533, (float)0.20791170, (float)0.21430916,
+  (float)0.22069745, (float)0.22707628, (float)0.23344538, (float)0.23980446, (float)0.24615330,
+  (float)0.25249159, (float)0.25881904, (float)0.26513544, (float)0.27144045, (float)0.27773386,
+  (float)0.28401536, (float)0.29028466, (float)0.29654160, (float)0.30278578, (float)0.30901700,
+  (float)0.31523499, (float)0.32143945, (float)0.32763019, (float)0.33380687, (float)0.33996925,
+  (float)0.34611708, (float)0.35225007, (float)0.35836795, (float)0.36447051, (float)0.37055743,
+  (float)0.37662852, (float)0.38268346, (float)0.38872197, (float)0.39474389, (float)0.40074885,
+  (float)0.40673664, (float)0.41270703, (float)0.41865975, (float)0.42459452, (float)0.43051112,
+  (float)0.43640924, (float)0.44228873, (float)0.44814920, (float)0.45399052, (float)0.45981237,
+  (float)0.46561453, (float)0.47139674, (float)0.47715878, (float)0.48290035, (float)0.48862126,
+  (float)0.49432120, (float)0.50000000, (float)0.50565743, (float)0.51129311, (float)0.51690692,
+  (float)0.52249855, (float)0.52806789, (float)0.53361452, (float)0.53913832, (float)0.54463905,
+  (float)0.55011642, (float)0.55557024, (float)0.56100029, (float)0.56640625, (float)0.57178795,
+  (float)0.57714522, (float)0.58247769, (float)0.58778524, (float)0.59306765, (float)0.59832460,
+  (float)0.60355598, (float)0.60876143, (float)0.61394083, (float)0.61909395, (float)0.62422055,
+  (float)0.62932038, (float)0.63439333, (float)0.63943899, (float)0.64445734, (float)0.64944810,
+  (float)0.65441096, (float)0.65934587, (float)0.66425246, (float)0.66913062, (float)0.67398012,
+  (float)0.67880076, (float)0.68359232, (float)0.68835455, (float)0.69308740, (float)0.69779050,
+  (float)0.70246369, (float)0.70710677, (float)0.71171963, (float)0.71630198, (float)0.72085363,
+  (float)0.72537440, (float)0.72986406, (float)0.73432255, (float)0.73874950, (float)0.74314487,
+  (float)0.74750835, (float)0.75183982, (float)0.75613910, (float)0.76040596, (float)0.76464027,
+  (float)0.76884186, (float)0.77301043, (float)0.77714598, (float)0.78124821, (float)0.78531694,
+  (float)0.78935206, (float)0.79335338, (float)0.79732066, (float)0.80125386, (float)0.80515265,
+  (float)0.80901700, (float)0.81284672, (float)0.81664157, (float)0.82040149, (float)0.82412618,
+  (float)0.82781565, (float)0.83146966, (float)0.83508795, (float)0.83867061, (float)0.84221727,
+  (float)0.84572780, (float)0.84920216, (float)0.85264021, (float)0.85604161, (float)0.85940641,
+  (float)0.86273444, (float)0.86602545, (float)0.86927933, (float)0.87249607, (float)0.87567532,
+  (float)0.87881714, (float)0.88192129, (float)0.88498765, (float)0.88801610, (float)0.89100653,
+  (float)0.89395881, (float)0.89687276, (float)0.89974827, (float)0.90258533, (float)0.90538365,
+  (float)0.90814316, (float)0.91086388, (float)0.91354549, (float)0.91618794, (float)0.91879123,
+  (float)0.92135513, (float)0.92387950, (float)0.92636442, (float)0.92880958, (float)0.93121493,
+  (float)0.93358046, (float)0.93590593, (float)0.93819135, (float)0.94043654, (float)0.94264150,
+  (float)0.94480604, (float)0.94693011, (float)0.94901365, (float)0.95105654, (float)0.95305866,
+  (float)0.95501995, (float)0.95694035, (float)0.95881975, (float)0.96065807, (float)0.96245527,
+  (float)0.96421117, (float)0.96592581, (float)0.96759909, (float)0.96923089, (float)0.97082120,
+  (float)0.97236991, (float)0.97387701, (float)0.97534233, (float)0.97676587, (float)0.97814763,
+  (float)0.97948742, (float)0.98078531, (float)0.98204112, (float)0.98325491, (float)0.98442656,
+  (float)0.98555607, (float)0.98664331, (float)0.98768836, (float)0.98869103, (float)0.98965138,
+  (float)0.99056935, (float)0.99144489, (float)0.99227792, (float)0.99306846, (float)0.99381649,
+  (float)0.99452192, (float)0.99518472, (float)0.99580491, (float)0.99638247, (float)0.99691731,
+  (float)0.99740952, (float)0.99785894, (float)0.99826562, (float)0.99862951, (float)0.99895066,
+  (float)0.99922901, (float)0.99946457, (float)0.99965733, (float)0.99980724, (float)0.99991435,
+  (float)0.99997860, (float)1.00000000, (float)0.99997860, (float)0.99991435, (float)0.99980724,
+  (float)0.99965733, (float)0.99946457, (float)0.99922901, (float)0.99895066, (float)0.99862951,
+  (float)0.99826562, (float)0.99785894, (float)0.99740946, (float)0.99691731, (float)0.99638247,
+  (float)0.99580491, (float)0.99518472, (float)0.99452192, (float)0.99381644, (float)0.99306846,
+  (float)0.99227792, (float)0.99144489, (float)0.99056935, (float)0.98965138, (float)0.98869103,
+  (float)0.98768836, (float)0.98664331, (float)0.98555607, (float)0.98442656, (float)0.98325491,
+  (float)0.98204112, (float)0.98078525, (float)0.97948742, (float)0.97814757, (float)0.97676587,
+  (float)0.97534227, (float)0.97387695, (float)0.97236991, (float)0.97082120, (float)0.96923089,
+  (float)0.96759909, (float)0.96592581, (float)0.96421117, (float)0.96245521, (float)0.96065807,
+  (float)0.95881969, (float)0.95694029, (float)0.95501995, (float)0.95305860, (float)0.95105648,
+  (float)0.94901365, (float)0.94693011, (float)0.94480604, (float)0.94264150, (float)0.94043654,
+  (float)0.93819129, (float)0.93590593, (float)0.93358046, (float)0.93121493, (float)0.92880952,
+  (float)0.92636436, (float)0.92387950, (float)0.92135507, (float)0.91879123, (float)0.91618794,
+  (float)0.91354543, (float)0.91086382, (float)0.90814310, (float)0.90538365, (float)0.90258527,
+  (float)0.89974827, (float)0.89687276, (float)0.89395875, (float)0.89100647, (float)0.88801610,
+  (float)0.88498759, (float)0.88192123, (float)0.87881714, (float)0.87567532, (float)0.87249595,
+  (float)0.86927933, (float)0.86602539, (float)0.86273432, (float)0.85940641, (float)0.85604161,
+  (float)0.85264009, (float)0.84920216, (float)0.84572780, (float)0.84221715, (float)0.83867055,
+  (float)0.83508795, (float)0.83146954, (float)0.82781565, (float)0.82412612, (float)0.82040137,
+  (float)0.81664157, (float)0.81284660, (float)0.80901700, (float)0.80515265, (float)0.80125374,
+  (float)0.79732066, (float)0.79335332, (float)0.78935200, (float)0.78531694, (float)0.78124815,
+  (float)0.77714586, (float)0.77301049, (float)0.76884180, (float)0.76464021, (float)0.76040596,
+  (float)0.75613904, (float)0.75183970, (float)0.74750835, (float)0.74314481, (float)0.73874938,
+  (float)0.73432249, (float)0.72986400, (float)0.72537428, (float)0.72085363, (float)0.71630186,
+  (float)0.71171951, (float)0.70710677, (float)0.70246363, (float)0.69779032, (float)0.69308734,
+  (float)0.68835449, (float)0.68359220, (float)0.67880070, (float)0.67398006, (float)0.66913044,
+  (float)0.66425240, (float)0.65934575, (float)0.65441096, (float)0.64944804, (float)0.64445722,
+  (float)0.63943905, (float)0.63439327, (float)0.62932026, (float)0.62422055, (float)0.61909389,
+  (float)0.61394072, (float)0.60876143, (float)0.60355592, (float)0.59832448, (float)0.59306765,
+  (float)0.58778518, (float)0.58247757, (float)0.57714522, (float)0.57178789, (float)0.56640613,
+  (float)0.56100023, (float)0.55557019, (float)0.55011630, (float)0.54463905, (float)0.53913826,
+  (float)0.53361434, (float)0.52806783, (float)0.52249849, (float)0.51690674, (float)0.51129305,
+  (float)0.50565726, (float)0.50000006, (float)0.49432117, (float)0.48862115, (float)0.48290038,
+  (float)0.47715873, (float)0.47139663, (float)0.46561456, (float)0.45981231, (float)0.45399037,
+  (float)0.44814920, (float)0.44228864, (float)0.43640912, (float)0.43051112, (float)0.42459446,
+  (float)0.41865960, (float)0.41270703, (float)0.40673658, (float)0.40074870, (float)0.39474386,
+  (float)0.38872188, (float)0.38268328, (float)0.37662849, (float)0.37055734, (float)0.36447033,
+  (float)0.35836792, (float)0.35224995, (float)0.34611690, (float)0.33996922, (float)0.33380675,
+  (float)0.32763001, (float)0.32143945, (float)0.31523487, (float)0.30901679, (float)0.30278572,
+  (float)0.29654145, (float)0.29028472, (float)0.28401530, (float)0.27773371, (float)0.27144048,
+  (float)0.26513538, (float)0.25881892, (float)0.25249159, (float)0.24615324, (float)0.23980433,
+  (float)0.23344538, (float)0.22707619, (float)0.22069728, (float)0.21430916, (float)0.20791161,
+  (float)0.20150517, (float)0.19509031, (float)0.18866688, (float)0.18223536, (float)0.17579627,
+  (float)0.16934940, (float)0.16289529, (float)0.15643445, (float)0.14996666, (float)0.14349243,
+  (float)0.13701232, (float)0.13052608, (float)0.12403426, (float)0.11753736, (float)0.11103519,
+  (float)0.10452849, (float)0.09801710, (float)0.09150149, (float)0.08498220, (float)0.07845904,
+  (float)0.07193252, (float)0.06540315, (float)0.05887074, (float)0.05233581, (float)0.04579888,
+  (float)0.03925974, (float)0.03271893, (float)0.02617695, (float)0.01963361, (float)0.01308943,
+  (float)0.00654493, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000
+};
+
+
+// Hanning window: for 30ms with 1024 fft with symmetric zeros at 16kHz
+static const float kBlocks480w1024[1024] = {
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00327249, (float)0.00654494,
+  (float)0.00981732, (float)0.01308960, (float)0.01636173, (float)0.01963369, (float)0.02290544,
+  (float)0.02617695, (float)0.02944817, (float)0.03271908, (float)0.03598964, (float)0.03925982,
+  (float)0.04252957, (float)0.04579887, (float)0.04906768, (float)0.05233596, (float)0.05560368,
+  (float)0.05887080, (float)0.06213730, (float)0.06540313, (float)0.06866825, (float)0.07193266,
+  (float)0.07519628, (float)0.07845910, (float)0.08172107, (float)0.08498218, (float)0.08824237,
+  (float)0.09150162, (float)0.09475989, (float)0.09801714, (float)0.10127335, (float)0.10452846,
+  (float)0.10778246, (float)0.11103531, (float)0.11428697, (float)0.11753740, (float)0.12078657,
+  (float)0.12403446, (float)0.12728101, (float)0.13052620, (float)0.13376999, (float)0.13701233,
+  (float)0.14025325, (float)0.14349262, (float)0.14673047, (float)0.14996676, (float)0.15320145,
+  (float)0.15643448, (float)0.15966582, (float)0.16289547, (float)0.16612339, (float)0.16934951,
+  (float)0.17257382, (float)0.17579629, (float)0.17901687, (float)0.18223552, (float)0.18545224,
+  (float)0.18866697, (float)0.19187967, (float)0.19509032, (float)0.19829889, (float)0.20150533,
+  (float)0.20470962, (float)0.20791170, (float)0.21111156, (float)0.21430916, (float)0.21750447,
+  (float)0.22069745, (float)0.22388805, (float)0.22707628, (float)0.23026206, (float)0.23344538,
+  (float)0.23662618, (float)0.23980446, (float)0.24298020, (float)0.24615330, (float)0.24932377,
+  (float)0.25249159, (float)0.25565669, (float)0.25881904, (float)0.26197866, (float)0.26513544,
+  (float)0.26828939, (float)0.27144045, (float)0.27458861, (float)0.27773386, (float)0.28087610,
+  (float)0.28401536, (float)0.28715158, (float)0.29028466, (float)0.29341471, (float)0.29654160,
+  (float)0.29966527, (float)0.30278578, (float)0.30590302, (float)0.30901700, (float)0.31212768,
+  (float)0.31523499, (float)0.31833893, (float)0.32143945, (float)0.32453656, (float)0.32763019,
+  (float)0.33072028, (float)0.33380687, (float)0.33688986, (float)0.33996925, (float)0.34304500,
+  (float)0.34611708, (float)0.34918544, (float)0.35225007, (float)0.35531089, (float)0.35836795,
+  (float)0.36142117, (float)0.36447051, (float)0.36751595, (float)0.37055743, (float)0.37359497,
+  (float)0.37662852, (float)0.37965801, (float)0.38268346, (float)0.38570479, (float)0.38872197,
+  (float)0.39173502, (float)0.39474389, (float)0.39774847, (float)0.40074885, (float)0.40374491,
+  (float)0.40673664, (float)0.40972406, (float)0.41270703, (float)0.41568562, (float)0.41865975,
+  (float)0.42162940, (float)0.42459452, (float)0.42755508, (float)0.43051112, (float)0.43346250,
+  (float)0.43640924, (float)0.43935132, (float)0.44228873, (float)0.44522133, (float)0.44814920,
+  (float)0.45107228, (float)0.45399052, (float)0.45690390, (float)0.45981237, (float)0.46271592,
+  (float)0.46561453, (float)0.46850815, (float)0.47139674, (float)0.47428030, (float)0.47715878,
+  (float)0.48003215, (float)0.48290035, (float)0.48576337, (float)0.48862126, (float)0.49147385,
+  (float)0.49432120, (float)0.49716330, (float)0.50000000, (float)0.50283140, (float)0.50565743,
+  (float)0.50847799, (float)0.51129311, (float)0.51410276, (float)0.51690692, (float)0.51970553,
+  (float)0.52249855, (float)0.52528602, (float)0.52806789, (float)0.53084403, (float)0.53361452,
+  (float)0.53637928, (float)0.53913832, (float)0.54189163, (float)0.54463905, (float)0.54738063,
+  (float)0.55011642, (float)0.55284631, (float)0.55557024, (float)0.55828828, (float)0.56100029,
+  (float)0.56370628, (float)0.56640625, (float)0.56910014, (float)0.57178795, (float)0.57446963,
+  (float)0.57714522, (float)0.57981455, (float)0.58247769, (float)0.58513463, (float)0.58778524,
+  (float)0.59042960, (float)0.59306765, (float)0.59569931, (float)0.59832460, (float)0.60094351,
+  (float)0.60355598, (float)0.60616195, (float)0.60876143, (float)0.61135441, (float)0.61394083,
+  (float)0.61652070, (float)0.61909395, (float)0.62166059, (float)0.62422055, (float)0.62677383,
+  (float)0.62932038, (float)0.63186020, (float)0.63439333, (float)0.63691956, (float)0.63943899,
+  (float)0.64195162, (float)0.64445734, (float)0.64695615, (float)0.64944810, (float)0.65193301,
+  (float)0.65441096, (float)0.65688187, (float)0.65934587, (float)0.66180271, (float)0.66425246,
+  (float)0.66669512, (float)0.66913062, (float)0.67155898, (float)0.67398012, (float)0.67639405,
+  (float)0.67880076, (float)0.68120021, (float)0.68359232, (float)0.68597710, (float)0.68835455,
+  (float)0.69072467, (float)0.69308740, (float)0.69544262, (float)0.69779050, (float)0.70013082,
+  (float)0.70246369, (float)0.70478904, (float)0.70710677, (float)0.70941699, (float)0.71171963,
+  (float)0.71401459, (float)0.71630198, (float)0.71858168, (float)0.72085363, (float)0.72311789,
+  (float)0.72537440, (float)0.72762316, (float)0.72986406, (float)0.73209721, (float)0.73432255,
+  (float)0.73653996, (float)0.73874950, (float)0.74095118, (float)0.74314487, (float)0.74533057,
+  (float)0.74750835, (float)0.74967808, (float)0.75183982, (float)0.75399351, (float)0.75613910,
+  (float)0.75827658, (float)0.76040596, (float)0.76252723, (float)0.76464027, (float)0.76674515,
+  (float)0.76884186, (float)0.77093029, (float)0.77301043, (float)0.77508241, (float)0.77714598,
+  (float)0.77920127, (float)0.78124821, (float)0.78328675, (float)0.78531694, (float)0.78733873,
+  (float)0.78935206, (float)0.79135692, (float)0.79335338, (float)0.79534125, (float)0.79732066,
+  (float)0.79929149, (float)0.80125386, (float)0.80320752, (float)0.80515265, (float)0.80708915,
+  (float)0.80901700, (float)0.81093621, (float)0.81284672, (float)0.81474853, (float)0.81664157,
+  (float)0.81852591, (float)0.82040149, (float)0.82226825, (float)0.82412618, (float)0.82597536,
+  (float)0.82781565, (float)0.82964706, (float)0.83146966, (float)0.83328325, (float)0.83508795,
+  (float)0.83688378, (float)0.83867061, (float)0.84044838, (float)0.84221727, (float)0.84397703,
+  (float)0.84572780, (float)0.84746957, (float)0.84920216, (float)0.85092574, (float)0.85264021,
+  (float)0.85434544, (float)0.85604161, (float)0.85772866, (float)0.85940641, (float)0.86107504,
+  (float)0.86273444, (float)0.86438453, (float)0.86602545, (float)0.86765707, (float)0.86927933,
+  (float)0.87089235, (float)0.87249607, (float)0.87409031, (float)0.87567532, (float)0.87725097,
+  (float)0.87881714, (float)0.88037390, (float)0.88192129, (float)0.88345921, (float)0.88498765,
+  (float)0.88650668, (float)0.88801610, (float)0.88951612, (float)0.89100653, (float)0.89248741,
+  (float)0.89395881, (float)0.89542055, (float)0.89687276, (float)0.89831537, (float)0.89974827,
+  (float)0.90117162, (float)0.90258533, (float)0.90398932, (float)0.90538365, (float)0.90676826,
+  (float)0.90814316, (float)0.90950841, (float)0.91086388, (float)0.91220951, (float)0.91354549,
+  (float)0.91487163, (float)0.91618794, (float)0.91749454, (float)0.91879123, (float)0.92007810,
+  (float)0.92135513, (float)0.92262226, (float)0.92387950, (float)0.92512691, (float)0.92636442,
+  (float)0.92759192, (float)0.92880958, (float)0.93001723, (float)0.93121493, (float)0.93240267,
+  (float)0.93358046, (float)0.93474817, (float)0.93590593, (float)0.93705362, (float)0.93819135,
+  (float)0.93931901, (float)0.94043654, (float)0.94154406, (float)0.94264150, (float)0.94372880,
+  (float)0.94480604, (float)0.94587320, (float)0.94693011, (float)0.94797695, (float)0.94901365,
+  (float)0.95004016, (float)0.95105654, (float)0.95206273, (float)0.95305866, (float)0.95404440,
+  (float)0.95501995, (float)0.95598525, (float)0.95694035, (float)0.95788521, (float)0.95881975,
+  (float)0.95974404, (float)0.96065807, (float)0.96156180, (float)0.96245527, (float)0.96333838,
+  (float)0.96421117, (float)0.96507370, (float)0.96592581, (float)0.96676767, (float)0.96759909,
+  (float)0.96842021, (float)0.96923089, (float)0.97003126, (float)0.97082120, (float)0.97160077,
+  (float)0.97236991, (float)0.97312868, (float)0.97387701, (float)0.97461486, (float)0.97534233,
+  (float)0.97605932, (float)0.97676587, (float)0.97746199, (float)0.97814763, (float)0.97882277,
+  (float)0.97948742, (float)0.98014158, (float)0.98078531, (float)0.98141843, (float)0.98204112,
+  (float)0.98265332, (float)0.98325491, (float)0.98384601, (float)0.98442656, (float)0.98499662,
+  (float)0.98555607, (float)0.98610497, (float)0.98664331, (float)0.98717111, (float)0.98768836,
+  (float)0.98819500, (float)0.98869103, (float)0.98917651, (float)0.98965138, (float)0.99011570,
+  (float)0.99056935, (float)0.99101239, (float)0.99144489, (float)0.99186671, (float)0.99227792,
+  (float)0.99267852, (float)0.99306846, (float)0.99344778, (float)0.99381649, (float)0.99417448,
+  (float)0.99452192, (float)0.99485862, (float)0.99518472, (float)0.99550015, (float)0.99580491,
+  (float)0.99609905, (float)0.99638247, (float)0.99665523, (float)0.99691731, (float)0.99716878,
+  (float)0.99740952, (float)0.99763954, (float)0.99785894, (float)0.99806762, (float)0.99826562,
+  (float)0.99845290, (float)0.99862951, (float)0.99879545, (float)0.99895066, (float)0.99909520,
+  (float)0.99922901, (float)0.99935216, (float)0.99946457, (float)0.99956632, (float)0.99965733,
+  (float)0.99973762, (float)0.99980724, (float)0.99986613, (float)0.99991435, (float)0.99995178,
+  (float)0.99997860, (float)0.99999464, (float)1.00000000, (float)0.99999464, (float)0.99997860,
+  (float)0.99995178, (float)0.99991435, (float)0.99986613, (float)0.99980724, (float)0.99973762,
+  (float)0.99965733, (float)0.99956632, (float)0.99946457, (float)0.99935216, (float)0.99922901,
+  (float)0.99909520, (float)0.99895066, (float)0.99879545, (float)0.99862951, (float)0.99845290,
+  (float)0.99826562, (float)0.99806762, (float)0.99785894, (float)0.99763954, (float)0.99740946,
+  (float)0.99716872, (float)0.99691731, (float)0.99665523, (float)0.99638247, (float)0.99609905,
+  (float)0.99580491, (float)0.99550015, (float)0.99518472, (float)0.99485862, (float)0.99452192,
+  (float)0.99417448, (float)0.99381644, (float)0.99344778, (float)0.99306846, (float)0.99267852,
+  (float)0.99227792, (float)0.99186671, (float)0.99144489, (float)0.99101239, (float)0.99056935,
+  (float)0.99011564, (float)0.98965138, (float)0.98917651, (float)0.98869103, (float)0.98819494,
+  (float)0.98768836, (float)0.98717111, (float)0.98664331, (float)0.98610497, (float)0.98555607,
+  (float)0.98499656, (float)0.98442656, (float)0.98384601, (float)0.98325491, (float)0.98265326,
+  (float)0.98204112, (float)0.98141843, (float)0.98078525, (float)0.98014158, (float)0.97948742,
+  (float)0.97882277, (float)0.97814757, (float)0.97746193, (float)0.97676587, (float)0.97605932,
+  (float)0.97534227, (float)0.97461486, (float)0.97387695, (float)0.97312862, (float)0.97236991,
+  (float)0.97160077, (float)0.97082120, (float)0.97003126, (float)0.96923089, (float)0.96842015,
+  (float)0.96759909, (float)0.96676761, (float)0.96592581, (float)0.96507365, (float)0.96421117,
+  (float)0.96333838, (float)0.96245521, (float)0.96156180, (float)0.96065807, (float)0.95974404,
+  (float)0.95881969, (float)0.95788515, (float)0.95694029, (float)0.95598525, (float)0.95501995,
+  (float)0.95404440, (float)0.95305860, (float)0.95206267, (float)0.95105648, (float)0.95004016,
+  (float)0.94901365, (float)0.94797695, (float)0.94693011, (float)0.94587314, (float)0.94480604,
+  (float)0.94372880, (float)0.94264150, (float)0.94154406, (float)0.94043654, (float)0.93931895,
+  (float)0.93819129, (float)0.93705362, (float)0.93590593, (float)0.93474817, (float)0.93358046,
+  (float)0.93240267, (float)0.93121493, (float)0.93001723, (float)0.92880952, (float)0.92759192,
+  (float)0.92636436, (float)0.92512691, (float)0.92387950, (float)0.92262226, (float)0.92135507,
+  (float)0.92007804, (float)0.91879123, (float)0.91749448, (float)0.91618794, (float)0.91487157,
+  (float)0.91354543, (float)0.91220951, (float)0.91086382, (float)0.90950835, (float)0.90814310,
+  (float)0.90676820, (float)0.90538365, (float)0.90398932, (float)0.90258527, (float)0.90117157,
+  (float)0.89974827, (float)0.89831525, (float)0.89687276, (float)0.89542055, (float)0.89395875,
+  (float)0.89248741, (float)0.89100647, (float)0.88951600, (float)0.88801610, (float)0.88650662,
+  (float)0.88498759, (float)0.88345915, (float)0.88192123, (float)0.88037384, (float)0.87881714,
+  (float)0.87725091, (float)0.87567532, (float)0.87409031, (float)0.87249595, (float)0.87089223,
+  (float)0.86927933, (float)0.86765701, (float)0.86602539, (float)0.86438447, (float)0.86273432,
+  (float)0.86107504, (float)0.85940641, (float)0.85772860, (float)0.85604161, (float)0.85434544,
+  (float)0.85264009, (float)0.85092574, (float)0.84920216, (float)0.84746951, (float)0.84572780,
+  (float)0.84397697, (float)0.84221715, (float)0.84044844, (float)0.83867055, (float)0.83688372,
+  (float)0.83508795, (float)0.83328319, (float)0.83146954, (float)0.82964706, (float)0.82781565,
+  (float)0.82597530, (float)0.82412612, (float)0.82226813, (float)0.82040137, (float)0.81852591,
+  (float)0.81664157, (float)0.81474847, (float)0.81284660, (float)0.81093609, (float)0.80901700,
+  (float)0.80708915, (float)0.80515265, (float)0.80320752, (float)0.80125374, (float)0.79929143,
+  (float)0.79732066, (float)0.79534125, (float)0.79335332, (float)0.79135686, (float)0.78935200,
+  (float)0.78733861, (float)0.78531694, (float)0.78328675, (float)0.78124815, (float)0.77920121,
+  (float)0.77714586, (float)0.77508223, (float)0.77301049, (float)0.77093029, (float)0.76884180,
+  (float)0.76674509, (float)0.76464021, (float)0.76252711, (float)0.76040596, (float)0.75827658,
+  (float)0.75613904, (float)0.75399339, (float)0.75183970, (float)0.74967796, (float)0.74750835,
+  (float)0.74533057, (float)0.74314481, (float)0.74095106, (float)0.73874938, (float)0.73653996,
+  (float)0.73432249, (float)0.73209721, (float)0.72986400, (float)0.72762305, (float)0.72537428,
+  (float)0.72311789, (float)0.72085363, (float)0.71858162, (float)0.71630186, (float)0.71401453,
+  (float)0.71171951, (float)0.70941705, (float)0.70710677, (float)0.70478898, (float)0.70246363,
+  (float)0.70013070, (float)0.69779032, (float)0.69544268, (float)0.69308734, (float)0.69072461,
+  (float)0.68835449, (float)0.68597704, (float)0.68359220, (float)0.68120021, (float)0.67880070,
+  (float)0.67639399, (float)0.67398006, (float)0.67155886, (float)0.66913044, (float)0.66669512,
+  (float)0.66425240, (float)0.66180259, (float)0.65934575, (float)0.65688181, (float)0.65441096,
+  (float)0.65193301, (float)0.64944804, (float)0.64695609, (float)0.64445722, (float)0.64195150,
+  (float)0.63943905, (float)0.63691956, (float)0.63439327, (float)0.63186014, (float)0.62932026,
+  (float)0.62677372, (float)0.62422055, (float)0.62166059, (float)0.61909389, (float)0.61652064,
+  (float)0.61394072, (float)0.61135429, (float)0.60876143, (float)0.60616189, (float)0.60355592,
+  (float)0.60094339, (float)0.59832448, (float)0.59569913, (float)0.59306765, (float)0.59042960,
+  (float)0.58778518, (float)0.58513451, (float)0.58247757, (float)0.57981461, (float)0.57714522,
+  (float)0.57446963, (float)0.57178789, (float)0.56910002, (float)0.56640613, (float)0.56370628,
+  (float)0.56100023, (float)0.55828822, (float)0.55557019, (float)0.55284619, (float)0.55011630,
+  (float)0.54738069, (float)0.54463905, (float)0.54189152, (float)0.53913826, (float)0.53637916,
+  (float)0.53361434, (float)0.53084403, (float)0.52806783, (float)0.52528596, (float)0.52249849,
+  (float)0.51970541, (float)0.51690674, (float)0.51410276, (float)0.51129305, (float)0.50847787,
+  (float)0.50565726, (float)0.50283122, (float)0.50000006, (float)0.49716327, (float)0.49432117,
+  (float)0.49147379, (float)0.48862115, (float)0.48576325, (float)0.48290038, (float)0.48003212,
+  (float)0.47715873, (float)0.47428021, (float)0.47139663, (float)0.46850798, (float)0.46561456,
+  (float)0.46271589, (float)0.45981231, (float)0.45690379, (float)0.45399037, (float)0.45107210,
+  (float)0.44814920, (float)0.44522130, (float)0.44228864, (float)0.43935123, (float)0.43640912,
+  (float)0.43346232, (float)0.43051112, (float)0.42755505, (float)0.42459446, (float)0.42162928,
+  (float)0.41865960, (float)0.41568545, (float)0.41270703, (float)0.40972400, (float)0.40673658,
+  (float)0.40374479, (float)0.40074870, (float)0.39774850, (float)0.39474386, (float)0.39173496,
+  (float)0.38872188, (float)0.38570464, (float)0.38268328, (float)0.37965804, (float)0.37662849,
+  (float)0.37359491, (float)0.37055734, (float)0.36751580, (float)0.36447033, (float)0.36142117,
+  (float)0.35836792, (float)0.35531086, (float)0.35224995, (float)0.34918529, (float)0.34611690,
+  (float)0.34304500, (float)0.33996922, (float)0.33688980, (float)0.33380675, (float)0.33072016,
+  (float)0.32763001, (float)0.32453656, (float)0.32143945, (float)0.31833887, (float)0.31523487,
+  (float)0.31212750, (float)0.30901679, (float)0.30590302, (float)0.30278572, (float)0.29966521,
+  (float)0.29654145, (float)0.29341453, (float)0.29028472, (float)0.28715155, (float)0.28401530,
+  (float)0.28087601, (float)0.27773371, (float)0.27458847, (float)0.27144048, (float)0.26828936,
+  (float)0.26513538, (float)0.26197854, (float)0.25881892, (float)0.25565651, (float)0.25249159,
+  (float)0.24932374, (float)0.24615324, (float)0.24298008, (float)0.23980433, (float)0.23662600,
+  (float)0.23344538, (float)0.23026201, (float)0.22707619, (float)0.22388794, (float)0.22069728,
+  (float)0.21750426, (float)0.21430916, (float)0.21111152, (float)0.20791161, (float)0.20470949,
+  (float)0.20150517, (float)0.19829892, (float)0.19509031, (float)0.19187963, (float)0.18866688,
+  (float)0.18545210, (float)0.18223536, (float)0.17901689, (float)0.17579627, (float)0.17257376,
+  (float)0.16934940, (float)0.16612324, (float)0.16289529, (float)0.15966584, (float)0.15643445,
+  (float)0.15320137, (float)0.14996666, (float)0.14673033, (float)0.14349243, (float)0.14025325,
+  (float)0.13701232, (float)0.13376991, (float)0.13052608, (float)0.12728085, (float)0.12403426,
+  (float)0.12078657, (float)0.11753736, (float)0.11428688, (float)0.11103519, (float)0.10778230,
+  (float)0.10452849, (float)0.10127334, (float)0.09801710, (float)0.09475980, (float)0.09150149,
+  (float)0.08824220, (float)0.08498220, (float)0.08172106, (float)0.07845904, (float)0.07519618,
+  (float)0.07193252, (float)0.06866808, (float)0.06540315, (float)0.06213728, (float)0.05887074,
+  (float)0.05560357, (float)0.05233581, (float)0.04906749, (float)0.04579888, (float)0.04252954,
+  (float)0.03925974, (float)0.03598953, (float)0.03271893, (float)0.02944798, (float)0.02617695,
+  (float)0.02290541, (float)0.01963361, (float)0.01636161, (float)0.01308943, (float)0.00981712,
+  (float)0.00654493, (float)0.00327244, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000,
+  (float)0.00000000, (float)0.00000000, (float)0.00000000, (float)0.00000000
+};
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_WINDOWS_PRIVATE_H_
diff --git a/trunk/src/modules/audio_processing/processing_component.cc b/trunk/src/modules/audio_processing/processing_component.cc
new file mode 100644
index 0000000..9ac1257
--- /dev/null
+++ b/trunk/src/modules/audio_processing/processing_component.cc
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "processing_component.h"
+
+#include <cassert>
+
+#include "audio_processing_impl.h"
+
+namespace webrtc {
+
+ProcessingComponent::ProcessingComponent(const AudioProcessingImpl* apm)
+  : apm_(apm),
+    initialized_(false),
+    enabled_(false),
+    num_handles_(0) {}
+
+ProcessingComponent::~ProcessingComponent() {
+  assert(initialized_ == false);
+}
+
+int ProcessingComponent::Destroy() {
+  while (!handles_.empty()) {
+    DestroyHandle(handles_.back());
+    handles_.pop_back();
+  }
+  initialized_ = false;
+
+  return apm_->kNoError;
+}
+
+int ProcessingComponent::EnableComponent(bool enable) {
+  if (enable && !enabled_) {
+    enabled_ = enable; // Must be set before Initialize() is called.
+
+    int err = Initialize();
+    if (err != apm_->kNoError) {
+      enabled_ = false;
+      return err;
+    }
+  } else {
+    enabled_ = enable;
+  }
+
+  return apm_->kNoError;
+}
+
+bool ProcessingComponent::is_component_enabled() const {
+  return enabled_;
+}
+
+void* ProcessingComponent::handle(int index) const {
+  assert(index < num_handles_);
+  return handles_[index];
+}
+
+int ProcessingComponent::num_handles() const {
+  return num_handles_;
+}
+
+int ProcessingComponent::Initialize() {
+  if (!enabled_) {
+    return apm_->kNoError;
+  }
+
+  num_handles_ = num_handles_required();
+  if (num_handles_ > static_cast<int>(handles_.size())) {
+    handles_.resize(num_handles_, NULL);
+  }
+
+  assert(static_cast<int>(handles_.size()) >= num_handles_);
+  for (int i = 0; i < num_handles_; i++) {
+    if (handles_[i] == NULL) {
+      handles_[i] = CreateHandle();
+      if (handles_[i] == NULL) {
+        return apm_->kCreationFailedError;
+      }
+    }
+
+    int err = InitializeHandle(handles_[i]);
+    if (err != apm_->kNoError) {
+      return GetHandleError(handles_[i]);
+    }
+  }
+
+  initialized_ = true;
+  return Configure();
+}
+
+int ProcessingComponent::Configure() {
+  if (!initialized_) {
+    return apm_->kNoError;
+  }
+
+  assert(static_cast<int>(handles_.size()) >= num_handles_);
+  for (int i = 0; i < num_handles_; i++) {
+    int err = ConfigureHandle(handles_[i]);
+    if (err != apm_->kNoError) {
+      return GetHandleError(handles_[i]);
+    }
+  }
+
+  return apm_->kNoError;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/processing_component.h b/trunk/src/modules/audio_processing/processing_component.h
new file mode 100644
index 0000000..b3457b5
--- /dev/null
+++ b/trunk/src/modules/audio_processing/processing_component.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H_
+
+#include <vector>
+
+#include "audio_processing.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+
+class ProcessingComponent {
+ public:
+  explicit ProcessingComponent(const AudioProcessingImpl* apm);
+  virtual ~ProcessingComponent();
+
+  virtual int Initialize();
+  virtual int Destroy();
+
+  bool is_component_enabled() const;
+
+ protected:
+  virtual int Configure();
+  int EnableComponent(bool enable);
+  void* handle(int index) const;
+  int num_handles() const;
+
+ private:
+  virtual void* CreateHandle() const = 0;
+  virtual int InitializeHandle(void* handle) const = 0;
+  virtual int ConfigureHandle(void* handle) const = 0;
+  virtual int DestroyHandle(void* handle) const = 0;
+  virtual int num_handles_required() const = 0;
+  virtual int GetHandleError(void* handle) const = 0;
+
+  const AudioProcessingImpl* apm_;
+  std::vector<void*> handles_;
+  bool initialized_;
+  bool enabled_;
+  int num_handles_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H__
diff --git a/trunk/src/modules/audio_processing/splitting_filter.cc b/trunk/src/modules/audio_processing/splitting_filter.cc
new file mode 100644
index 0000000..1526141
--- /dev/null
+++ b/trunk/src/modules/audio_processing/splitting_filter.cc
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "splitting_filter.h"
+#include "signal_processing_library.h"
+
+namespace webrtc {
+
+void SplittingFilterAnalysis(const WebRtc_Word16* in_data,
+                             WebRtc_Word16* low_band,
+                             WebRtc_Word16* high_band,
+                             WebRtc_Word32* filter_state1,
+                             WebRtc_Word32* filter_state2)
+{
+    WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2);
+}
+
+void SplittingFilterSynthesis(const WebRtc_Word16* low_band,
+                              const WebRtc_Word16* high_band,
+                              WebRtc_Word16* out_data,
+                              WebRtc_Word32* filt_state1,
+                              WebRtc_Word32* filt_state2)
+{
+    WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/splitting_filter.h b/trunk/src/modules/audio_processing/splitting_filter.h
new file mode 100644
index 0000000..661bfb2
--- /dev/null
+++ b/trunk/src/modules/audio_processing/splitting_filter.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+namespace webrtc {
+/*
+ * SplittingFilterbank_analysisQMF(...)
+ *
+ * Splits a super-wb signal into two subbands: 0-8 kHz and 8-16 kHz.
+ *
+ * Input:
+ *    - in_data  : super-wb audio signal
+ *
+ * Input & Output:
+ *    - filt_state1: Filter state for first all-pass filter
+ *    - filt_state2: Filter state for second all-pass filter
+ *
+ * Output:
+ *    - low_band : The signal from the 0-4 kHz band
+ *    - high_band  : The signal from the 4-8 kHz band
+ */
+void SplittingFilterAnalysis(const WebRtc_Word16* in_data,
+                             WebRtc_Word16* low_band,
+                             WebRtc_Word16* high_band,
+                             WebRtc_Word32* filt_state1,
+                             WebRtc_Word32* filt_state2);
+
+/*
+ * SplittingFilterbank_synthesisQMF(...)
+ *
+ * Combines the two subbands (0-8 and 8-16 kHz) into a super-wb signal.
+ *
+ * Input:
+ *    - low_band : The signal with the 0-8 kHz band
+ *    - high_band  : The signal with the 8-16 kHz band
+ *
+ * Input & Output:
+ *    - filt_state1: Filter state for first all-pass filter
+ *    - filt_state2: Filter state for second all-pass filter
+ *
+ * Output:
+ *    - out_data : super-wb speech signal
+ */
+void SplittingFilterSynthesis(const WebRtc_Word16* low_band,
+                              const WebRtc_Word16* high_band,
+                              WebRtc_Word16* out_data,
+                              WebRtc_Word32* filt_state1,
+                              WebRtc_Word32* filt_state2);
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/AndroidManifest.xml b/trunk/src/modules/audio_processing/test/android/apmtest/AndroidManifest.xml
new file mode 100644
index 0000000..c6063b3
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/AndroidManifest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- BEGIN_INCLUDE(manifest) -->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+        package="com.example.native_activity"
+        android:versionCode="1"
+        android:versionName="1.0">
+
+    <!-- This is the platform API where NativeActivity was introduced. -->
+    <uses-sdk android:minSdkVersion="8" />
+
+    <!-- This .apk has no Java code itself, so set hasCode to false. -->
+    <application android:label="@string/app_name" android:hasCode="false" android:debuggable="true">
+
+        <!-- Our activity is the built-in NativeActivity framework class.
+             This will take care of integrating with our NDK code. -->
+        <activity android:name="android.app.NativeActivity"
+                android:label="@string/app_name"
+                android:configChanges="orientation|keyboardHidden">
+            <!-- Tell NativeActivity the name of or .so -->
+            <meta-data android:name="android.app.lib_name"
+                    android:value="apmtest-activity" />
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+
+</manifest> 
+<!-- END_INCLUDE(manifest) -->
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/default.properties b/trunk/src/modules/audio_processing/test/android/apmtest/default.properties
new file mode 100644
index 0000000..9a2c9f6
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+# 
+# This file must be checked in Version Control Systems.
+# 
+# To customize properties used by the Ant build system use,
+# "build.properties", and override values to adapt the script to your
+# project structure.
+
+# Project target.
+target=android-9
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/jni/Android.mk b/trunk/src/modules/audio_processing/test/android/apmtest/jni/Android.mk
new file mode 100644
index 0000000..eaf3c9d
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/jni/Android.mk
@@ -0,0 +1,26 @@
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE    := apmtest-activity
+LOCAL_SRC_FILES := main.c
+LOCAL_LDLIBS    := -llog -landroid -lEGL -lGLESv1_CM
+LOCAL_STATIC_LIBRARIES := android_native_app_glue
+
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,android/native_app_glue)
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/jni/Application.mk b/trunk/src/modules/audio_processing/test/android/apmtest/jni/Application.mk
new file mode 100644
index 0000000..22d188e
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/jni/Application.mk
@@ -0,0 +1 @@
+APP_PLATFORM := android-9
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/jni/main.c b/trunk/src/modules/audio_processing/test/android/apmtest/jni/main.c
new file mode 100644
index 0000000..2e19635
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/jni/main.c
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+//BEGIN_INCLUDE(all)
+#include <jni.h>
+#include <errno.h>
+
+#include <EGL/egl.h>
+#include <GLES/gl.h>
+
+#include <android/sensor.h>
+#include <android/log.h>
+#include <android_native_app_glue.h>
+
+#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__))
+#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__))
+
+/**
+ * Our saved state data.
+ */
+struct saved_state {
+    float angle;
+    int32_t x;
+    int32_t y;
+};
+
+/**
+ * Shared state for our app.
+ */
+struct engine {
+    struct android_app* app;
+
+    ASensorManager* sensorManager;
+    const ASensor* accelerometerSensor;
+    ASensorEventQueue* sensorEventQueue;
+
+    int animating;
+    EGLDisplay display;
+    EGLSurface surface;
+    EGLContext context;
+    int32_t width;
+    int32_t height;
+    struct saved_state state;
+};
+
+/**
+ * Initialize an EGL context for the current display.
+ */
+static int engine_init_display(struct engine* engine) {
+    // initialize OpenGL ES and EGL
+
+    /*
+     * Here specify the attributes of the desired configuration.
+     * Below, we select an EGLConfig with at least 8 bits per color
+     * component compatible with on-screen windows
+     */
+    const EGLint attribs[] = {
+            EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+            EGL_BLUE_SIZE, 8,
+            EGL_GREEN_SIZE, 8,
+            EGL_RED_SIZE, 8,
+            EGL_NONE
+    };
+    EGLint w, h, dummy, format;
+    EGLint numConfigs;
+    EGLConfig config;
+    EGLSurface surface;
+    EGLContext context;
+
+    EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+
+    eglInitialize(display, 0, 0);
+
+    /* Here, the application chooses the configuration it desires. In this
+     * sample, we have a very simplified selection process, where we pick
+     * the first EGLConfig that matches our criteria */
+    eglChooseConfig(display, attribs, &config, 1, &numConfigs);
+
+    /* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
+     * guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
+     * As soon as we picked a EGLConfig, we can safely reconfigure the
+     * ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
+    eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
+
+    ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
+
+    surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
+    context = eglCreateContext(display, config, NULL, NULL);
+
+    if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
+        LOGW("Unable to eglMakeCurrent");
+        return -1;
+    }
+
+    eglQuerySurface(display, surface, EGL_WIDTH, &w);
+    eglQuerySurface(display, surface, EGL_HEIGHT, &h);
+
+    engine->display = display;
+    engine->context = context;
+    engine->surface = surface;
+    engine->width = w;
+    engine->height = h;
+    engine->state.angle = 0;
+
+    // Initialize GL state.
+    glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
+    glEnable(GL_CULL_FACE);
+    glShadeModel(GL_SMOOTH);
+    glDisable(GL_DEPTH_TEST);
+
+    return 0;
+}
+
+/**
+ * Just the current frame in the display.
+ */
+static void engine_draw_frame(struct engine* engine) {
+    if (engine->display == NULL) {
+        // No display.
+        return;
+    }
+
+    // Just fill the screen with a color.
+    glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,
+            ((float)engine->state.y)/engine->height, 1);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    eglSwapBuffers(engine->display, engine->surface);
+}
+
+/**
+ * Tear down the EGL context currently associated with the display.
+ */
+static void engine_term_display(struct engine* engine) {
+    if (engine->display != EGL_NO_DISPLAY) {
+        eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+        if (engine->context != EGL_NO_CONTEXT) {
+            eglDestroyContext(engine->display, engine->context);
+        }
+        if (engine->surface != EGL_NO_SURFACE) {
+            eglDestroySurface(engine->display, engine->surface);
+        }
+        eglTerminate(engine->display);
+    }
+    engine->animating = 0;
+    engine->display = EGL_NO_DISPLAY;
+    engine->context = EGL_NO_CONTEXT;
+    engine->surface = EGL_NO_SURFACE;
+}
+
+/**
+ * Process the next input event.
+ */
+static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {
+    struct engine* engine = (struct engine*)app->userData;
+    if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
+        engine->animating = 1;
+        engine->state.x = AMotionEvent_getX(event, 0);
+        engine->state.y = AMotionEvent_getY(event, 0);
+        return 1;
+    }
+    return 0;
+}
+
+/**
+ * Process the next main command.
+ */
+static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
+    struct engine* engine = (struct engine*)app->userData;
+    switch (cmd) {
+        case APP_CMD_SAVE_STATE:
+            // The system has asked us to save our current state.  Do so.
+            engine->app->savedState = malloc(sizeof(struct saved_state));
+            *((struct saved_state*)engine->app->savedState) = engine->state;
+            engine->app->savedStateSize = sizeof(struct saved_state);
+            break;
+        case APP_CMD_INIT_WINDOW:
+            // The window is being shown, get it ready.
+            if (engine->app->window != NULL) {
+                engine_init_display(engine);
+                engine_draw_frame(engine);
+            }
+            break;
+        case APP_CMD_TERM_WINDOW:
+            // The window is being hidden or closed, clean it up.
+            engine_term_display(engine);
+            break;
+        case APP_CMD_GAINED_FOCUS:
+            // When our app gains focus, we start monitoring the accelerometer.
+            if (engine->accelerometerSensor != NULL) {
+                ASensorEventQueue_enableSensor(engine->sensorEventQueue,
+                        engine->accelerometerSensor);
+                // We'd like to get 60 events per second (in us).
+                ASensorEventQueue_setEventRate(engine->sensorEventQueue,
+                        engine->accelerometerSensor, (1000L/60)*1000);
+            }
+            break;
+        case APP_CMD_LOST_FOCUS:
+            // When our app loses focus, we stop monitoring the accelerometer.
+            // This is to avoid consuming battery while not being used.
+            if (engine->accelerometerSensor != NULL) {
+                ASensorEventQueue_disableSensor(engine->sensorEventQueue,
+                        engine->accelerometerSensor);
+            }
+            // Also stop animating.
+            engine->animating = 0;
+            engine_draw_frame(engine);
+            break;
+    }
+}
+
+/**
+ * This is the main entry point of a native application that is using
+ * android_native_app_glue.  It runs in its own thread, with its own
+ * event loop for receiving input events and doing other things.
+ */
+void android_main(struct android_app* state) {
+    struct engine engine;
+
+    // Make sure glue isn't stripped.
+    app_dummy();
+
+    memset(&engine, 0, sizeof(engine));
+    state->userData = &engine;
+    state->onAppCmd = engine_handle_cmd;
+    state->onInputEvent = engine_handle_input;
+    engine.app = state;
+
+    // Prepare to monitor accelerometer
+    engine.sensorManager = ASensorManager_getInstance();
+    engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
+            ASENSOR_TYPE_ACCELEROMETER);
+    engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
+            state->looper, LOOPER_ID_USER, NULL, NULL);
+
+    if (state->savedState != NULL) {
+        // We are starting with a previous saved state; restore from it.
+        engine.state = *(struct saved_state*)state->savedState;
+    }
+
+    // loop waiting for stuff to do.
+
+    while (1) {
+        // Read all pending events.
+        int ident;
+        int events;
+        struct android_poll_source* source;
+
+        // If not animating, we will block forever waiting for events.
+        // If animating, we loop until all events are read, then continue
+        // to draw the next frame of animation.
+        while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events,
+                (void**)&source)) >= 0) {
+
+            // Process this event.
+            if (source != NULL) {
+                source->process(state, source);
+            }
+
+            // If a sensor has data, process it now.
+            if (ident == LOOPER_ID_USER) {
+                if (engine.accelerometerSensor != NULL) {
+                    ASensorEvent event;
+                    while (ASensorEventQueue_getEvents(engine.sensorEventQueue,
+                            &event, 1) > 0) {
+                        LOGI("accelerometer: x=%f y=%f z=%f",
+                                event.acceleration.x, event.acceleration.y,
+                                event.acceleration.z);
+                    }
+                }
+            }
+
+            // Check if we are exiting.
+            if (state->destroyRequested != 0) {
+                engine_term_display(&engine);
+                return;
+            }
+        }
+
+        if (engine.animating) {
+            // Done with events; draw next animation frame.
+            engine.state.angle += .01f;
+            if (engine.state.angle > 1) {
+                engine.state.angle = 0;
+            }
+
+            // Drawing is throttled to the screen update rate, so there
+            // is no need to do timing here.
+            engine_draw_frame(&engine);
+        }
+    }
+}
+//END_INCLUDE(all)
diff --git a/trunk/src/modules/audio_processing/test/android/apmtest/res/values/strings.xml b/trunk/src/modules/audio_processing/test/android/apmtest/res/values/strings.xml
new file mode 100644
index 0000000..d0bd0f3
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/android/apmtest/res/values/strings.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="app_name">apmtest</string>
+</resources>
diff --git a/trunk/src/modules/audio_processing/test/apmtest.m b/trunk/src/modules/audio_processing/test/apmtest.m
new file mode 100644
index 0000000..3172cd1
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/apmtest.m
@@ -0,0 +1,355 @@
+function apmtest(task, testname, filepath, casenumber, legacy)
+%APMTEST is a tool to process APM file sets and easily display the output.
+%   APMTEST(TASK, TESTNAME, CASENUMBER) performs one of several TASKs:
+%     'test'  Processes the files to produce test output.
+%     'list'  Prints a list of cases in the test set, preceded by their
+%             CASENUMBERs.
+%     'show'  Uses spclab to show the test case specified by the
+%             CASENUMBER parameter.
+%
+%   using a set of test files determined by TESTNAME:
+%     'all'   All tests.
+%     'apm'   The standard APM test set (default).
+%     'apmm'  The mobile APM test set.
+%     'aec'   The AEC test set.
+%     'aecm'  The AECM test set.
+%     'agc'   The AGC test set.
+%     'ns'    The NS test set.
+%     'vad'   The VAD test set.
+%
+%   FILEPATH specifies the path to the test data files.
+%
+%   CASENUMBER can be used to select a single test case. Omit CASENUMBER,
+%   or set to zero, to use all test cases.
+%
+
+if nargin < 5 || isempty(legacy)
+  % Set to true to run old VQE recordings.
+  legacy = false;
+end
+
+if nargin < 4 || isempty(casenumber)
+  casenumber = 0;
+end
+
+if nargin < 3 || isempty(filepath)
+  filepath = 'data/';
+end
+
+if nargin < 2 || isempty(testname)
+  testname = 'all';
+end
+
+if nargin < 1 || isempty(task)
+  task = 'test';
+end
+
+if ~strcmp(task, 'test') && ~strcmp(task, 'list') && ~strcmp(task, 'show')
+  error(['TASK ' task ' is not recognized']);
+end
+
+if casenumber == 0 && strcmp(task, 'show')
+  error(['CASENUMBER must be specified for TASK ' task]);
+end
+
+inpath = [filepath 'input/'];
+outpath = [filepath 'output/'];
+refpath = [filepath 'reference/'];
+
+if strcmp(testname, 'all')
+  tests = {'apm','apmm','aec','aecm','agc','ns','vad'};
+else
+  tests = {testname};
+end
+
+if legacy
+  progname = './test';
+else
+  progname = './process_test';
+end
+
+global farFile;
+global nearFile;
+global eventFile;
+global delayFile;
+global driftFile;
+
+if legacy
+  farFile = 'vqeFar.pcm';
+  nearFile = 'vqeNear.pcm';
+  eventFile = 'vqeEvent.dat';
+  delayFile = 'vqeBuf.dat';
+  driftFile = 'vqeDrift.dat';
+else
+  farFile = 'apm_far.pcm';
+  nearFile = 'apm_near.pcm';
+  eventFile = 'apm_event.dat';
+  delayFile = 'apm_delay.dat';
+  driftFile = 'apm_drift.dat';
+end
+
+simulateMode = false;
+nErr = 0;
+nCases = 0;
+for i=1:length(tests)
+  simulateMode = false;
+
+  if strcmp(tests{i}, 'apm')
+    testdir = ['apm/'];
+    outfile = ['out'];
+    if legacy
+      opt = ['-ec 1 -agc 2 -nc 2 -vad 3'];
+    else
+      opt = ['--no_progress -hpf' ...
+          ' -aec --drift_compensation -agc --fixed_digital' ...
+          ' -ns --ns_moderate -vad'];
+    end
+
+  elseif strcmp(tests{i}, 'apm-swb')
+    simulateMode = true;
+    testdir = ['apm-swb/'];
+    outfile = ['out'];
+    if legacy
+      opt = ['-fs 32000 -ec 1 -agc 2 -nc 2'];
+    else
+      opt = ['--no_progress -fs 32000 -hpf' ...
+          ' -aec --drift_compensation -agc --adaptive_digital' ...
+          ' -ns --ns_moderate -vad'];
+    end
+  elseif strcmp(tests{i}, 'apmm')
+    testdir = ['apmm/'];
+    outfile = ['out'];
+    opt = ['-aec --drift_compensation -agc --fixed_digital -hpf -ns ' ...
+        '--ns_moderate'];
+
+  else
+    error(['TESTNAME ' tests{i} ' is not recognized']);
+  end
+
+  inpathtest = [inpath testdir];
+  outpathtest = [outpath testdir];
+  refpathtest = [refpath testdir];
+
+  if ~exist(inpathtest,'dir')
+    error(['Input directory ' inpathtest ' does not exist']);
+  end
+
+  if ~exist(refpathtest,'dir')
+    warning(['Reference directory ' refpathtest ' does not exist']);
+  end
+
+  [status, errMsg] = mkdir(outpathtest);
+  if (status == 0)
+    error(errMsg);
+  end
+
+  [nErr, nCases] = recurseDir(inpathtest, outpathtest, refpathtest, outfile, ...
+      progname, opt, simulateMode, nErr, nCases, task, casenumber, legacy);
+
+  if strcmp(task, 'test') || strcmp(task, 'show')
+    system(['rm ' farFile]);
+    system(['rm ' nearFile]);
+    if simulateMode == false
+      system(['rm ' eventFile]);
+      system(['rm ' delayFile]);
+      system(['rm ' driftFile]);
+    end
+  end
+end
+
+if ~strcmp(task, 'list')
+  if nErr == 0
+    fprintf(1, '\nAll files are bit-exact to reference\n', nErr);
+  else
+    fprintf(1, '\n%d files are NOT bit-exact to reference\n', nErr);
+  end
+end
+
+
+function [nErrOut, nCases] = recurseDir(inpath, outpath, refpath, ...
+    outfile, progname, opt, simulateMode, nErr, nCases, task, casenumber, ...
+    legacy)
+
+global farFile;
+global nearFile;
+global eventFile;
+global delayFile;
+global driftFile;
+
+dirs = dir(inpath);
+nDirs = 0;
+nErrOut = nErr;
+for i=3:length(dirs) % skip . and ..
+  nDirs = nDirs + dirs(i).isdir;
+end
+
+
+if nDirs == 0
+  nCases = nCases + 1;
+
+  if casenumber == nCases || casenumber == 0
+
+    if strcmp(task, 'list')
+      fprintf([num2str(nCases) '. ' outfile '\n'])
+    else
+      vadoutfile = ['vad_' outfile '.dat'];
+      outfile = [outfile '.pcm'];
+
+      % Check for VAD test
+      vadTest = 0;
+      if ~isempty(findstr(opt, '-vad'))
+        vadTest = 1;
+        if legacy
+          opt = [opt ' ' outpath vadoutfile];
+        else
+          opt = [opt ' --vad_out_file ' outpath vadoutfile];
+        end
+      end
+
+      if exist([inpath 'vqeFar.pcm'])
+        system(['ln -s -f ' inpath 'vqeFar.pcm ' farFile]);
+      elseif exist([inpath 'apm_far.pcm'])
+        system(['ln -s -f ' inpath 'apm_far.pcm ' farFile]);
+      end
+
+      if exist([inpath 'vqeNear.pcm'])
+        system(['ln -s -f ' inpath 'vqeNear.pcm ' nearFile]);
+      elseif exist([inpath 'apm_near.pcm'])
+        system(['ln -s -f ' inpath 'apm_near.pcm ' nearFile]);
+      end
+
+      if exist([inpath 'vqeEvent.dat'])
+        system(['ln -s -f ' inpath 'vqeEvent.dat ' eventFile]);
+      elseif exist([inpath 'apm_event.dat'])
+        system(['ln -s -f ' inpath 'apm_event.dat ' eventFile]);
+      end
+
+      if exist([inpath 'vqeBuf.dat'])
+        system(['ln -s -f ' inpath 'vqeBuf.dat ' delayFile]);
+      elseif exist([inpath 'apm_delay.dat'])
+        system(['ln -s -f ' inpath 'apm_delay.dat ' delayFile]);
+      end
+
+      if exist([inpath 'vqeSkew.dat'])
+        system(['ln -s -f ' inpath 'vqeSkew.dat ' driftFile]);
+      elseif exist([inpath 'vqeDrift.dat'])
+        system(['ln -s -f ' inpath 'vqeDrift.dat ' driftFile]);
+      elseif exist([inpath 'apm_drift.dat'])
+        system(['ln -s -f ' inpath 'apm_drift.dat ' driftFile]);
+      end
+
+      if simulateMode == false
+        command = [progname ' -o ' outpath outfile ' ' opt];
+      else
+        if legacy
+          inputCmd = [' -in ' nearFile];
+        else
+          inputCmd = [' -i ' nearFile];
+        end
+
+        if exist([farFile])
+          if legacy
+            inputCmd = [' -if ' farFile inputCmd];
+          else
+            inputCmd = [' -ir ' farFile inputCmd];
+          end
+        end
+        command = [progname inputCmd ' -o ' outpath outfile ' ' opt];
+      end
+      % This prevents MATLAB from using its own C libraries.
+      shellcmd = ['bash -c "unset LD_LIBRARY_PATH;'];
+      fprintf([command '\n']);
+      [status, result] = system([shellcmd command '"']);
+      fprintf(result);
+
+      fprintf(['Reference file: ' refpath outfile '\n']);
+
+      if vadTest == 1
+        equal_to_ref = are_files_equal([outpath vadoutfile], ...
+                                       [refpath vadoutfile], ...
+                                       'int8');
+        if ~equal_to_ref
+          nErr = nErr + 1;
+        end
+      end
+
+      [equal_to_ref, diffvector] = are_files_equal([outpath outfile], ...
+                                                   [refpath outfile], ...
+                                                   'int16');
+      if ~equal_to_ref
+        nErr = nErr + 1;
+      end
+
+      if strcmp(task, 'show')
+        % Assume the last init gives the sample rate of interest.
+        str_idx = strfind(result, 'Sample rate:');
+        fs = str2num(result(str_idx(end) + 13:str_idx(end) + 17));
+        fprintf('Using %d Hz\n', fs);
+
+        if exist([farFile])
+          spclab(fs, farFile, nearFile, [refpath outfile], ...
+              [outpath outfile], diffvector);
+          %spclab(fs, diffvector);
+        else
+          spclab(fs, nearFile, [refpath outfile], [outpath outfile], ...
+              diffvector);
+          %spclab(fs, diffvector);
+        end
+      end
+    end
+  end
+else
+
+  for i=3:length(dirs)
+    if dirs(i).isdir
+      [nErr, nCases] = recurseDir([inpath dirs(i).name '/'], outpath, ...
+          refpath,[outfile '_' dirs(i).name], progname, opt, ...
+          simulateMode, nErr, nCases, task, casenumber, legacy);
+    end
+  end
+end
+nErrOut = nErr;
+
+function [are_equal, diffvector] = ...
+    are_files_equal(newfile, reffile, precision, diffvector)
+
+are_equal = false;
+diffvector = 0;
+if ~exist(newfile,'file')
+  warning(['Output file ' newfile ' does not exist']);  
+  return
+end
+
+if ~exist(reffile,'file')
+  warning(['Reference file ' reffile ' does not exist']);  
+  return
+end
+
+fid = fopen(newfile,'rb');
+new = fread(fid,inf,precision);
+fclose(fid);
+
+fid = fopen(reffile,'rb');
+ref = fread(fid,inf,precision);
+fclose(fid);
+
+if length(new) ~= length(ref)
+  warning('Reference is not the same length as output');
+  minlength = min(length(new), length(ref));
+  new = new(1:minlength);
+  ref = ref(1:minlength);
+end
+diffvector = new - ref;
+
+if isequal(new, ref)
+  fprintf([newfile ' is bit-exact to reference\n']);
+  are_equal = true;
+else
+  if isempty(new)
+    warning([newfile ' is empty']);
+    return
+  end
+  snr = snrseg(new,ref,80);
+  fprintf('\n');
+  are_equal = false;
+end
diff --git a/trunk/src/modules/audio_processing/test/process_test.cc b/trunk/src/modules/audio_processing/test/process_test.cc
new file mode 100644
index 0000000..2023ddb
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/process_test.cc
@@ -0,0 +1,964 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#ifdef WEBRTC_ANDROID
+#include <sys/stat.h>
+#endif
+
+#include "gtest/gtest.h"
+
+#include "audio_processing.h"
+#include "cpu_features_wrapper.h"
+#include "module_common_types.h"
+#include "scoped_ptr.h"
+#include "tick_util.h"
+#ifdef WEBRTC_ANDROID
+#include "external/webrtc/src/modules/audio_processing/debug.pb.h"
+#else
+#include "webrtc/audio_processing/debug.pb.h"
+#endif
+
+using webrtc::AudioFrame;
+using webrtc::AudioProcessing;
+using webrtc::EchoCancellation;
+using webrtc::GainControl;
+using webrtc::NoiseSuppression;
+using webrtc::scoped_array;
+using webrtc::TickInterval;
+using webrtc::TickTime;
+
+using webrtc::audioproc::Event;
+using webrtc::audioproc::Init;
+using webrtc::audioproc::ReverseStream;
+using webrtc::audioproc::Stream;
+
+namespace {
+// Returns true on success, false on error or end-of-file.
+bool ReadMessageFromFile(FILE* file,
+                        ::google::protobuf::MessageLite* msg) {
+  // The "wire format" for the size is little-endian.
+  // Assume process_test is running on a little-endian machine.
+  int32_t size = 0;
+  if (fread(&size, sizeof(int32_t), 1, file) != 1) {
+    return false;
+  }
+  if (size <= 0) {
+    return false;
+  }
+  const size_t usize = static_cast<size_t>(size);
+
+  scoped_array<char> array(new char[usize]);
+  if (fread(array.get(), sizeof(char), usize, file) != usize) {
+    return false;
+  }
+
+  msg->Clear();
+  return msg->ParseFromArray(array.get(), usize);
+}
+
+void PrintStat(const AudioProcessing::Statistic& stat) {
+  printf("%d, %d, %d\n", stat.average,
+                         stat.maximum,
+                         stat.minimum);
+}
+
+void usage() {
+  printf(
+  "Usage: process_test [options] [-pb PROTOBUF_FILE]\n"
+  "  [-ir REVERSE_FILE] [-i PRIMARY_FILE] [-o OUT_FILE]\n");
+  printf(
+  "process_test is a test application for AudioProcessing.\n\n"
+  "When a protobuf debug file is available, specify it with -pb.\n"
+  "Alternately, when -ir or -i is used, the specified files will be\n"
+  "processed directly in a simulation mode. Otherwise the full set of\n"
+  "legacy test files is expected to be present in the working directory.\n");
+  printf("\n");
+  printf("Options\n");
+  printf("General configuration (only used for the simulation mode):\n");
+  printf("  -fs SAMPLE_RATE_HZ\n");
+  printf("  -ch CHANNELS_IN CHANNELS_OUT\n");
+  printf("  -rch REVERSE_CHANNELS\n");
+  printf("\n");
+  printf("Component configuration:\n");
+  printf(
+  "All components are disabled by default. Each block below begins with a\n"
+  "flag to enable the component with default settings. The subsequent flags\n"
+  "in the block are used to provide configuration settings.\n");
+  printf("\n  -aec     Echo cancellation\n");
+  printf("  --drift_compensation\n");
+  printf("  --no_drift_compensation\n");
+  printf("  --no_echo_metrics\n");
+  printf("  --no_delay_logging\n");
+  printf("\n  -aecm    Echo control mobile\n");
+  printf("  --aecm_echo_path_in_file FILE\n");
+  printf("  --aecm_echo_path_out_file FILE\n");
+  printf("\n  -agc     Gain control\n");
+  printf("  --analog\n");
+  printf("  --adaptive_digital\n");
+  printf("  --fixed_digital\n");
+  printf("  --target_level LEVEL\n");
+  printf("  --compression_gain GAIN\n");
+  printf("  --limiter\n");
+  printf("  --no_limiter\n");
+  printf("\n  -hpf     High pass filter\n");
+  printf("\n  -ns      Noise suppression\n");
+  printf("  --ns_low\n");
+  printf("  --ns_moderate\n");
+  printf("  --ns_high\n");
+  printf("  --ns_very_high\n");
+  printf("\n  -vad     Voice activity detection\n");
+  printf("  --vad_out_file FILE\n");
+  printf("\n Level metrics (enabled by default)\n");
+  printf("  --no_level_metrics\n");
+  printf("\n");
+  printf("Modifiers:\n");
+  printf("  --noasm            Disable SSE optimization.\n");
+  printf("  --delay DELAY      Add DELAY ms to input value.\n");
+  printf("  --perf             Measure performance.\n");
+  printf("  --quiet            Suppress text output.\n");
+  printf("  --no_progress      Suppress progress.\n");
+  printf("  --debug_file FILE  Dump a debug recording.\n");
+}
+
+// void function for gtest.
+void void_main(int argc, char* argv[]) {
+  if (argc > 1 && strcmp(argv[1], "--help") == 0) {
+    usage();
+    return;
+  }
+
+  if (argc < 2) {
+    printf("Did you mean to run without arguments?\n");
+    printf("Try `process_test --help' for more information.\n\n");
+  }
+
+  AudioProcessing* apm = AudioProcessing::Create(0);
+  ASSERT_TRUE(apm != NULL);
+
+  const char* pb_filename = NULL;
+  const char* far_filename = NULL;
+  const char* near_filename = NULL;
+  const char* out_filename = NULL;
+  const char* vad_out_filename = NULL;
+  const char* aecm_echo_path_in_filename = NULL;
+  const char* aecm_echo_path_out_filename = NULL;
+
+  int32_t sample_rate_hz = 16000;
+  int32_t device_sample_rate_hz = 16000;
+
+  int num_capture_input_channels = 1;
+  int num_capture_output_channels = 1;
+  int num_render_channels = 1;
+
+  int samples_per_channel = sample_rate_hz / 100;
+
+  bool simulating = false;
+  bool perf_testing = false;
+  bool verbose = true;
+  bool progress = true;
+  int extra_delay_ms = 0;
+  //bool interleaved = true;
+
+  ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
+  for (int i = 1; i < argc; i++) {
+    if (strcmp(argv[i], "-pb") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify protobuf filename after -pb";
+      pb_filename = argv[i];
+
+    } else if (strcmp(argv[i], "-ir") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after -ir";
+      far_filename = argv[i];
+      simulating = true;
+
+    } else if (strcmp(argv[i], "-i") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after -i";
+      near_filename = argv[i];
+      simulating = true;
+
+    } else if (strcmp(argv[i], "-o") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after -o";
+      out_filename = argv[i];
+
+    } else if (strcmp(argv[i], "-fs") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify sample rate after -fs";
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz));
+      samples_per_channel = sample_rate_hz / 100;
+
+      ASSERT_EQ(apm->kNoError,
+                apm->set_sample_rate_hz(sample_rate_hz));
+
+    } else if (strcmp(argv[i], "-ch") == 0) {
+      i++;
+      ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
+      i++;
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
+
+      ASSERT_EQ(apm->kNoError,
+                apm->set_num_channels(num_capture_input_channels,
+                                      num_capture_output_channels));
+
+    } else if (strcmp(argv[i], "-rch") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify number of channels after -rch";
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
+
+      ASSERT_EQ(apm->kNoError,
+                apm->set_num_reverse_channels(num_render_channels));
+
+    } else if (strcmp(argv[i], "-aec") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_metrics(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_delay_logging(true));
+
+    } else if (strcmp(argv[i], "--drift_compensation") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+      // TODO(ajm): this is enabled in the VQE test app by default. Investigate
+      //            why it can give better performance despite passing zeros.
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_drift_compensation(true));
+    } else if (strcmp(argv[i], "--no_drift_compensation") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_drift_compensation(false));
+
+    } else if (strcmp(argv[i], "--no_echo_metrics") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_metrics(false));
+
+    } else if (strcmp(argv[i], "--no_delay_logging") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->echo_cancellation()->enable_delay_logging(false));
+
+    } else if (strcmp(argv[i], "--no_level_metrics") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
+
+    } else if (strcmp(argv[i], "-aecm") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
+
+    } else if (strcmp(argv[i], "--aecm_echo_path_in_file") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_in_file";
+      aecm_echo_path_in_filename = argv[i];
+
+    } else if (strcmp(argv[i], "--aecm_echo_path_out_file") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_out_file";
+      aecm_echo_path_out_filename = argv[i];
+
+    } else if (strcmp(argv[i], "-agc") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+
+    } else if (strcmp(argv[i], "--analog") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
+
+    } else if (strcmp(argv[i], "--adaptive_digital") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+
+    } else if (strcmp(argv[i], "--fixed_digital") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->set_mode(GainControl::kFixedDigital));
+
+    } else if (strcmp(argv[i], "--target_level") == 0) {
+      i++;
+      int level;
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &level));
+
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->set_target_level_dbfs(level));
+
+    } else if (strcmp(argv[i], "--compression_gain") == 0) {
+      i++;
+      int gain;
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &gain));
+
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->set_compression_gain_db(gain));
+
+    } else if (strcmp(argv[i], "--limiter") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->enable_limiter(true));
+
+    } else if (strcmp(argv[i], "--no_limiter") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+                apm->gain_control()->enable_limiter(false));
+
+    } else if (strcmp(argv[i], "-hpf") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->high_pass_filter()->Enable(true));
+
+    } else if (strcmp(argv[i], "-ns") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+
+    } else if (strcmp(argv[i], "--ns_low") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+          apm->noise_suppression()->set_level(NoiseSuppression::kLow));
+
+    } else if (strcmp(argv[i], "--ns_moderate") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+          apm->noise_suppression()->set_level(NoiseSuppression::kModerate));
+
+    } else if (strcmp(argv[i], "--ns_high") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+          apm->noise_suppression()->set_level(NoiseSuppression::kHigh));
+
+    } else if (strcmp(argv[i], "--ns_very_high") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+      ASSERT_EQ(apm->kNoError,
+          apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
+
+    } else if (strcmp(argv[i], "-vad") == 0) {
+      ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
+
+    } else if (strcmp(argv[i], "--vad_out_file") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after --vad_out_file";
+      vad_out_filename = argv[i];
+
+    } else if (strcmp(argv[i], "--noasm") == 0) {
+      WebRtc_GetCPUInfo = WebRtc_GetCPUInfoNoASM;
+      // We need to reinitialize here if components have already been enabled.
+      ASSERT_EQ(apm->kNoError, apm->Initialize());
+
+    } else if (strcmp(argv[i], "--delay") == 0) {
+      i++;
+      ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms));
+
+    } else if (strcmp(argv[i], "--perf") == 0) {
+      perf_testing = true;
+
+    } else if (strcmp(argv[i], "--quiet") == 0) {
+      verbose = false;
+      progress = false;
+
+    } else if (strcmp(argv[i], "--no_progress") == 0) {
+      progress = false;
+
+    } else if (strcmp(argv[i], "--debug_file") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after --debug_file";
+      ASSERT_EQ(apm->kNoError, apm->StartDebugRecording(argv[i]));
+    } else {
+      FAIL() << "Unrecognized argument " << argv[i];
+    }
+  }
+  // If we're reading a protobuf file, ensure a simulation hasn't also
+  // been requested (which makes no sense...)
+  ASSERT_FALSE(pb_filename && simulating);
+
+  if (verbose) {
+    printf("Sample rate: %d Hz\n", sample_rate_hz);
+    printf("Primary channels: %d (in), %d (out)\n",
+           num_capture_input_channels,
+           num_capture_output_channels);
+    printf("Reverse channels: %d \n", num_render_channels);
+  }
+
+  const char far_file_default[] = "apm_far.pcm";
+  const char near_file_default[] = "apm_near.pcm";
+  const char out_file_default[] = "out.pcm";
+  const char event_filename[] = "apm_event.dat";
+  const char delay_filename[] = "apm_delay.dat";
+  const char drift_filename[] = "apm_drift.dat";
+  const char vad_file_default[] = "vad_out.dat";
+
+  if (!simulating) {
+    far_filename = far_file_default;
+    near_filename = near_file_default;
+  }
+
+  if (!out_filename) {
+    out_filename = out_file_default;
+  }
+
+  if (!vad_out_filename) {
+    vad_out_filename = vad_file_default;
+  }
+
+  FILE* pb_file = NULL;
+  FILE* far_file = NULL;
+  FILE* near_file = NULL;
+  FILE* out_file = NULL;
+  FILE* event_file = NULL;
+  FILE* delay_file = NULL;
+  FILE* drift_file = NULL;
+  FILE* vad_out_file = NULL;
+  FILE* aecm_echo_path_in_file = NULL;
+  FILE* aecm_echo_path_out_file = NULL;
+
+  if (pb_filename) {
+    pb_file = fopen(pb_filename, "rb");
+    ASSERT_TRUE(NULL != pb_file) << "Unable to open protobuf file "
+                                 << pb_filename;
+  } else {
+    if (far_filename) {
+      far_file = fopen(far_filename, "rb");
+      ASSERT_TRUE(NULL != far_file) << "Unable to open far-end audio file "
+                                    << far_filename;
+    }
+
+    near_file = fopen(near_filename, "rb");
+    ASSERT_TRUE(NULL != near_file) << "Unable to open near-end audio file "
+                                   << near_filename;
+    if (!simulating) {
+      event_file = fopen(event_filename, "rb");
+      ASSERT_TRUE(NULL != event_file) << "Unable to open event file "
+                                      << event_filename;
+
+      delay_file = fopen(delay_filename, "rb");
+      ASSERT_TRUE(NULL != delay_file) << "Unable to open buffer file "
+                                      << delay_filename;
+
+      drift_file = fopen(drift_filename, "rb");
+      ASSERT_TRUE(NULL != drift_file) << "Unable to open drift file "
+                                      << drift_filename;
+    }
+  }
+
+  out_file = fopen(out_filename, "wb");
+  ASSERT_TRUE(NULL != out_file) << "Unable to open output audio file "
+                                << out_filename;
+
+  int near_size_bytes = 0;
+  if (pb_file) {
+    struct stat st;
+    stat(pb_filename, &st);
+    // Crude estimate, but should be good enough.
+    near_size_bytes = st.st_size / 3;
+  } else {
+    struct stat st;
+    stat(near_filename, &st);
+    near_size_bytes = st.st_size;
+  }
+
+  if (apm->voice_detection()->is_enabled()) {
+    vad_out_file = fopen(vad_out_filename, "wb");
+    ASSERT_TRUE(NULL != vad_out_file) << "Unable to open VAD output file "
+                                      << vad_out_file;
+  }
+
+  if (aecm_echo_path_in_filename != NULL) {
+    aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
+    ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
+                                                << aecm_echo_path_in_filename;
+
+    const size_t path_size =
+        apm->echo_control_mobile()->echo_path_size_bytes();
+    scoped_array<char> echo_path(new char[path_size]);
+    ASSERT_EQ(path_size, fread(echo_path.get(),
+                               sizeof(char),
+                               path_size,
+                               aecm_echo_path_in_file));
+    EXPECT_EQ(apm->kNoError,
+              apm->echo_control_mobile()->SetEchoPath(echo_path.get(),
+                                                      path_size));
+    fclose(aecm_echo_path_in_file);
+    aecm_echo_path_in_file = NULL;
+  }
+
+  if (aecm_echo_path_out_filename != NULL) {
+    aecm_echo_path_out_file = fopen(aecm_echo_path_out_filename, "wb");
+    ASSERT_TRUE(NULL != aecm_echo_path_out_file) << "Unable to open file "
+                                                 << aecm_echo_path_out_filename;
+  }
+
+  size_t read_count = 0;
+  int reverse_count = 0;
+  int primary_count = 0;
+  int near_read_bytes = 0;
+  TickInterval acc_ticks;
+
+  AudioFrame far_frame;
+  AudioFrame near_frame;
+
+  int delay_ms = 0;
+  int drift_samples = 0;
+  int capture_level = 127;
+  int8_t stream_has_voice = 0;
+
+  TickTime t0 = TickTime::Now();
+  TickTime t1 = t0;
+  WebRtc_Word64 max_time_us = 0;
+  WebRtc_Word64 max_time_reverse_us = 0;
+  WebRtc_Word64 min_time_us = 1e6;
+  WebRtc_Word64 min_time_reverse_us = 1e6;
+
+  // TODO(ajm): Ideally we would refactor this block into separate functions,
+  //            but for now we want to share the variables.
+  if (pb_file) {
+    Event event_msg;
+    while (ReadMessageFromFile(pb_file, &event_msg)) {
+      std::ostringstream trace_stream;
+      trace_stream << "Processed frames: " << reverse_count << " (reverse), "
+                   << primary_count << " (primary)";
+      SCOPED_TRACE(trace_stream.str());
+
+      if (event_msg.type() == Event::INIT) {
+        ASSERT_TRUE(event_msg.has_init());
+        const Init msg = event_msg.init();
+
+        ASSERT_TRUE(msg.has_sample_rate());
+        ASSERT_EQ(apm->kNoError,
+            apm->set_sample_rate_hz(msg.sample_rate()));
+
+        ASSERT_TRUE(msg.has_device_sample_rate());
+        ASSERT_EQ(apm->kNoError,
+                  apm->echo_cancellation()->set_device_sample_rate_hz(
+                      msg.device_sample_rate()));
+
+        ASSERT_TRUE(msg.has_num_input_channels());
+        ASSERT_TRUE(msg.has_num_output_channels());
+        ASSERT_EQ(apm->kNoError,
+            apm->set_num_channels(msg.num_input_channels(),
+                                  msg.num_output_channels()));
+
+        ASSERT_TRUE(msg.has_num_reverse_channels());
+        ASSERT_EQ(apm->kNoError,
+            apm->set_num_reverse_channels(msg.num_reverse_channels()));
+
+        samples_per_channel = msg.sample_rate() / 100;
+        far_frame._frequencyInHz = msg.sample_rate();
+        far_frame._payloadDataLengthInSamples = samples_per_channel;
+        far_frame._audioChannel = msg.num_reverse_channels();
+        near_frame._frequencyInHz = msg.sample_rate();
+        near_frame._payloadDataLengthInSamples = samples_per_channel;
+
+        if (verbose) {
+          printf("Init at frame: %d (primary), %d (reverse)\n",
+              primary_count, reverse_count);
+          printf("  Sample rate: %d Hz\n", msg.sample_rate());
+          printf("  Primary channels: %d (in), %d (out)\n",
+                 msg.num_input_channels(),
+                 msg.num_output_channels());
+          printf("  Reverse channels: %d \n", msg.num_reverse_channels());
+        }
+
+      } else if (event_msg.type() == Event::REVERSE_STREAM) {
+        ASSERT_TRUE(event_msg.has_reverse_stream());
+        const ReverseStream msg = event_msg.reverse_stream();
+        reverse_count++;
+
+        ASSERT_TRUE(msg.has_data());
+        ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
+            far_frame._audioChannel, msg.data().size());
+        memcpy(far_frame._payloadData, msg.data().data(), msg.data().size());
+
+        if (perf_testing) {
+          t0 = TickTime::Now();
+        }
+
+        ASSERT_EQ(apm->kNoError,
+                  apm->AnalyzeReverseStream(&far_frame));
+
+        if (perf_testing) {
+          t1 = TickTime::Now();
+          TickInterval tick_diff = t1 - t0;
+          acc_ticks += tick_diff;
+          if (tick_diff.Microseconds() > max_time_reverse_us) {
+            max_time_reverse_us = tick_diff.Microseconds();
+          }
+          if (tick_diff.Microseconds() < min_time_reverse_us) {
+            min_time_reverse_us = tick_diff.Microseconds();
+          }
+        }
+
+      } else if (event_msg.type() == Event::STREAM) {
+        ASSERT_TRUE(event_msg.has_stream());
+        const Stream msg = event_msg.stream();
+        primary_count++;
+
+        // ProcessStream could have changed this for the output frame.
+        near_frame._audioChannel = apm->num_input_channels();
+
+        ASSERT_TRUE(msg.has_input_data());
+        ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
+            near_frame._audioChannel, msg.input_data().size());
+        memcpy(near_frame._payloadData,
+               msg.input_data().data(),
+               msg.input_data().size());
+
+        near_read_bytes += msg.input_data().size();
+        if (progress && primary_count % 100 == 0) {
+          printf("%.0f%% complete\r",
+              (near_read_bytes * 100.0) / near_size_bytes);
+          fflush(stdout);
+        }
+
+        if (perf_testing) {
+          t0 = TickTime::Now();
+        }
+
+        ASSERT_EQ(apm->kNoError,
+                  apm->gain_control()->set_stream_analog_level(msg.level()));
+        ASSERT_EQ(apm->kNoError,
+                  apm->set_stream_delay_ms(msg.delay() + extra_delay_ms));
+        ASSERT_EQ(apm->kNoError,
+            apm->echo_cancellation()->set_stream_drift_samples(msg.drift()));
+
+        int err = apm->ProcessStream(&near_frame);
+        if (err == apm->kBadStreamParameterWarning) {
+          printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
+        }
+        ASSERT_TRUE(err == apm->kNoError ||
+                    err == apm->kBadStreamParameterWarning);
+        ASSERT_TRUE(near_frame._audioChannel == apm->num_output_channels());
+
+        capture_level = apm->gain_control()->stream_analog_level();
+
+        stream_has_voice =
+            static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
+        if (vad_out_file != NULL) {
+          ASSERT_EQ(1u, fwrite(&stream_has_voice,
+                               sizeof(stream_has_voice),
+                               1,
+                               vad_out_file));
+        }
+
+        if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
+          ASSERT_EQ(msg.level(), capture_level);
+        }
+
+        if (perf_testing) {
+          t1 = TickTime::Now();
+          TickInterval tick_diff = t1 - t0;
+          acc_ticks += tick_diff;
+          if (tick_diff.Microseconds() > max_time_us) {
+            max_time_us = tick_diff.Microseconds();
+          }
+          if (tick_diff.Microseconds() < min_time_us) {
+            min_time_us = tick_diff.Microseconds();
+          }
+        }
+
+        size_t size = samples_per_channel * near_frame._audioChannel;
+        ASSERT_EQ(size, fwrite(near_frame._payloadData,
+                               sizeof(int16_t),
+                               size,
+                               out_file));
+      }
+    }
+
+    ASSERT_TRUE(feof(pb_file));
+
+  } else {
+    enum Events {
+      kInitializeEvent,
+      kRenderEvent,
+      kCaptureEvent,
+      kResetEventDeprecated
+    };
+    int16_t event = 0;
+    while (simulating || feof(event_file) == 0) {
+      std::ostringstream trace_stream;
+      trace_stream << "Processed frames: " << reverse_count << " (reverse), "
+                   << primary_count << " (primary)";
+      SCOPED_TRACE(trace_stream.str());
+
+      if (simulating) {
+        if (far_file == NULL) {
+          event = kCaptureEvent;
+        } else {
+          if (event == kRenderEvent) {
+            event = kCaptureEvent;
+          } else {
+            event = kRenderEvent;
+          }
+        }
+      } else {
+        read_count = fread(&event, sizeof(event), 1, event_file);
+        if (read_count != 1) {
+          break;
+        }
+      }
+
+      far_frame._frequencyInHz = sample_rate_hz;
+      far_frame._payloadDataLengthInSamples = samples_per_channel;
+      far_frame._audioChannel = num_render_channels;
+      near_frame._frequencyInHz = sample_rate_hz;
+      near_frame._payloadDataLengthInSamples = samples_per_channel;
+
+      if (event == kInitializeEvent || event == kResetEventDeprecated) {
+        ASSERT_EQ(1u,
+            fread(&sample_rate_hz, sizeof(sample_rate_hz), 1, event_file));
+        samples_per_channel = sample_rate_hz / 100;
+
+        ASSERT_EQ(1u,
+            fread(&device_sample_rate_hz,
+                  sizeof(device_sample_rate_hz),
+                  1,
+                  event_file));
+
+        ASSERT_EQ(apm->kNoError,
+            apm->set_sample_rate_hz(sample_rate_hz));
+
+        ASSERT_EQ(apm->kNoError,
+                  apm->echo_cancellation()->set_device_sample_rate_hz(
+                      device_sample_rate_hz));
+
+        far_frame._frequencyInHz = sample_rate_hz;
+        far_frame._payloadDataLengthInSamples = samples_per_channel;
+        far_frame._audioChannel = num_render_channels;
+        near_frame._frequencyInHz = sample_rate_hz;
+        near_frame._payloadDataLengthInSamples = samples_per_channel;
+
+        if (verbose) {
+          printf("Init at frame: %d (primary), %d (reverse)\n",
+              primary_count, reverse_count);
+          printf("  Sample rate: %d Hz\n", sample_rate_hz);
+        }
+
+      } else if (event == kRenderEvent) {
+        reverse_count++;
+
+        size_t size = samples_per_channel * num_render_channels;
+        read_count = fread(far_frame._payloadData,
+                           sizeof(int16_t),
+                           size,
+                           far_file);
+
+        if (simulating) {
+          if (read_count != size) {
+            // Read an equal amount from the near file to avoid errors due to
+            // not reaching end-of-file.
+            EXPECT_EQ(0, fseek(near_file, read_count * sizeof(int16_t),
+                      SEEK_CUR));
+            break; // This is expected.
+          }
+        } else {
+          ASSERT_EQ(size, read_count);
+        }
+
+        if (perf_testing) {
+          t0 = TickTime::Now();
+        }
+
+        ASSERT_EQ(apm->kNoError,
+                  apm->AnalyzeReverseStream(&far_frame));
+
+        if (perf_testing) {
+          t1 = TickTime::Now();
+          TickInterval tick_diff = t1 - t0;
+          acc_ticks += tick_diff;
+          if (tick_diff.Microseconds() > max_time_reverse_us) {
+            max_time_reverse_us = tick_diff.Microseconds();
+          }
+          if (tick_diff.Microseconds() < min_time_reverse_us) {
+            min_time_reverse_us = tick_diff.Microseconds();
+          }
+        }
+
+      } else if (event == kCaptureEvent) {
+        primary_count++;
+        near_frame._audioChannel = num_capture_input_channels;
+
+        size_t size = samples_per_channel * num_capture_input_channels;
+        read_count = fread(near_frame._payloadData,
+                           sizeof(int16_t),
+                           size,
+                           near_file);
+
+        near_read_bytes += read_count * sizeof(int16_t);
+        if (progress && primary_count % 100 == 0) {
+          printf("%.0f%% complete\r",
+              (near_read_bytes * 100.0) / near_size_bytes);
+          fflush(stdout);
+        }
+        if (simulating) {
+          if (read_count != size) {
+            break; // This is expected.
+          }
+
+          delay_ms = 0;
+          drift_samples = 0;
+        } else {
+          ASSERT_EQ(size, read_count);
+
+          // TODO(ajm): sizeof(delay_ms) for current files?
+          ASSERT_EQ(1u,
+              fread(&delay_ms, 2, 1, delay_file));
+          ASSERT_EQ(1u,
+              fread(&drift_samples, sizeof(drift_samples), 1, drift_file));
+        }
+
+        if (perf_testing) {
+          t0 = TickTime::Now();
+        }
+
+        // TODO(ajm): fake an analog gain while simulating.
+
+        int capture_level_in = capture_level;
+        ASSERT_EQ(apm->kNoError,
+                  apm->gain_control()->set_stream_analog_level(capture_level));
+        ASSERT_EQ(apm->kNoError,
+                  apm->set_stream_delay_ms(delay_ms + extra_delay_ms));
+        ASSERT_EQ(apm->kNoError,
+            apm->echo_cancellation()->set_stream_drift_samples(drift_samples));
+
+        int err = apm->ProcessStream(&near_frame);
+        if (err == apm->kBadStreamParameterWarning) {
+          printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
+        }
+        ASSERT_TRUE(err == apm->kNoError ||
+                    err == apm->kBadStreamParameterWarning);
+        ASSERT_TRUE(near_frame._audioChannel == apm->num_output_channels());
+
+        capture_level = apm->gain_control()->stream_analog_level();
+
+        stream_has_voice =
+            static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
+        if (vad_out_file != NULL) {
+          ASSERT_EQ(1u, fwrite(&stream_has_voice,
+                               sizeof(stream_has_voice),
+                               1,
+                               vad_out_file));
+        }
+
+        if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
+          ASSERT_EQ(capture_level_in, capture_level);
+        }
+
+        if (perf_testing) {
+          t1 = TickTime::Now();
+          TickInterval tick_diff = t1 - t0;
+          acc_ticks += tick_diff;
+          if (tick_diff.Microseconds() > max_time_us) {
+            max_time_us = tick_diff.Microseconds();
+          }
+          if (tick_diff.Microseconds() < min_time_us) {
+            min_time_us = tick_diff.Microseconds();
+          }
+        }
+
+        size = samples_per_channel * near_frame._audioChannel;
+        ASSERT_EQ(size, fwrite(near_frame._payloadData,
+                               sizeof(int16_t),
+                               size,
+                               out_file));
+      }
+      else {
+        FAIL() << "Event " << event << " is unrecognized";
+      }
+    }
+  }
+  printf("100%% complete\r");
+
+  if (aecm_echo_path_out_file != NULL) {
+    const size_t path_size =
+        apm->echo_control_mobile()->echo_path_size_bytes();
+    scoped_array<char> echo_path(new char[path_size]);
+    apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
+    ASSERT_EQ(path_size, fwrite(echo_path.get(),
+                                sizeof(char),
+                                path_size,
+                                aecm_echo_path_out_file));
+    fclose(aecm_echo_path_out_file);
+    aecm_echo_path_out_file = NULL;
+  }
+
+  if (verbose) {
+    printf("\nProcessed frames: %d (primary), %d (reverse)\n",
+        primary_count, reverse_count);
+
+    if (apm->level_estimator()->is_enabled()) {
+      printf("\n--Level metrics--\n");
+      printf("RMS: %d dBFS\n", -apm->level_estimator()->RMS());
+    }
+    if (apm->echo_cancellation()->are_metrics_enabled()) {
+      EchoCancellation::Metrics metrics;
+      apm->echo_cancellation()->GetMetrics(&metrics);
+      printf("\n--Echo metrics--\n");
+      printf("(avg, max, min)\n");
+      printf("ERL:  ");
+      PrintStat(metrics.echo_return_loss);
+      printf("ERLE: ");
+      PrintStat(metrics.echo_return_loss_enhancement);
+      printf("ANLP: ");
+      PrintStat(metrics.a_nlp);
+    }
+    if (apm->echo_cancellation()->is_delay_logging_enabled()) {
+      int median = 0;
+      int std = 0;
+      apm->echo_cancellation()->GetDelayMetrics(&median, &std);
+      printf("\n--Delay metrics--\n");
+      printf("Median:             %3d\n", median);
+      printf("Standard deviation: %3d\n", std);
+    }
+  }
+
+  if (!pb_file) {
+    int8_t temp_int8;
+    if (far_file) {
+      read_count = fread(&temp_int8, sizeof(temp_int8), 1, far_file);
+      EXPECT_NE(0, feof(far_file)) << "Far-end file not fully processed";
+    }
+
+    read_count = fread(&temp_int8, sizeof(temp_int8), 1, near_file);
+    EXPECT_NE(0, feof(near_file)) << "Near-end file not fully processed";
+
+    if (!simulating) {
+      read_count = fread(&temp_int8, sizeof(temp_int8), 1, event_file);
+      EXPECT_NE(0, feof(event_file)) << "Event file not fully processed";
+      read_count = fread(&temp_int8, sizeof(temp_int8), 1, delay_file);
+      EXPECT_NE(0, feof(delay_file)) << "Delay file not fully processed";
+      read_count = fread(&temp_int8, sizeof(temp_int8), 1, drift_file);
+      EXPECT_NE(0, feof(drift_file)) << "Drift file not fully processed";
+    }
+  }
+
+  if (perf_testing) {
+    if (primary_count > 0) {
+      WebRtc_Word64 exec_time = acc_ticks.Milliseconds();
+      printf("\nTotal time: %.3f s, file time: %.2f s\n",
+        exec_time * 0.001, primary_count * 0.01);
+      printf("Time per frame: %.3f ms (average), %.3f ms (max),"
+             " %.3f ms (min)\n",
+          (exec_time * 1.0) / primary_count,
+          (max_time_us + max_time_reverse_us) / 1000.0,
+          (min_time_us + min_time_reverse_us) / 1000.0);
+    } else {
+      printf("Warning: no capture frames\n");
+    }
+  }
+
+  AudioProcessing::Destroy(apm);
+  apm = NULL;
+}
+}  // namespace
+
+int main(int argc, char* argv[])
+{
+  void_main(argc, argv);
+
+  // Optional, but removes memory leak noise from Valgrind.
+  google::protobuf::ShutdownProtobufLibrary();
+  return 0;
+}
diff --git a/trunk/src/modules/audio_processing/test/unit_test.cc b/trunk/src/modules/audio_processing/test/unit_test.cc
new file mode 100644
index 0000000..4fa5d7c
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/unit_test.cc
@@ -0,0 +1,1322 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "gtest/gtest.h"
+
+#include "audio_processing.h"
+#include "event_wrapper.h"
+#include "module_common_types.h"
+#include "scoped_ptr.h"
+#include "signal_processing_library.h"
+#include "testsupport/fileutils.h"
+#include "thread_wrapper.h"
+#include "trace.h"
+#ifdef WEBRTC_ANDROID
+#include "external/webrtc/src/modules/audio_processing/test/unittest.pb.h"
+#else
+#include "webrtc/audio_processing/unittest.pb.h"
+#endif
+
+#if (defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)) || \
+    (defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && !defined(NDEBUG))
+#  define WEBRTC_AUDIOPROC_BIT_EXACT
+#endif
+
+using webrtc::AudioProcessing;
+using webrtc::AudioFrame;
+using webrtc::GainControl;
+using webrtc::NoiseSuppression;
+using webrtc::EchoCancellation;
+using webrtc::EventWrapper;
+using webrtc::scoped_array;
+using webrtc::Trace;
+using webrtc::LevelEstimator;
+using webrtc::EchoCancellation;
+using webrtc::EchoControlMobile;
+using webrtc::VoiceDetection;
+
+namespace {
+// When false, this will compare the output data with the results stored to
+// file. This is the typical case. When the file should be updated, it can
+// be set to true with the command-line switch --write_ref_data.
+bool write_ref_data = false;
+
+class ApmTest : public ::testing::Test {
+ protected:
+  ApmTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  static void SetUpTestCase() {
+    Trace::CreateTrace();
+    std::string trace_filename = webrtc::test::OutputPath() +
+      "audioproc_trace.txt";
+    ASSERT_EQ(0, Trace::SetTraceFile(trace_filename.c_str()));
+  }
+
+  static void TearDownTestCase() {
+    Trace::ReturnTrace();
+  }
+
+  void Init(int sample_rate_hz, int num_reverse_channels,
+            int num_input_channels, int num_output_channels,
+            bool open_output_file);
+  std::string ResourceFilePath(std::string name, int sample_rate_hz);
+  std::string OutputFilePath(std::string name,
+                             int sample_rate_hz,
+                             int num_reverse_channels,
+                             int num_input_channels,
+                             int num_output_channels);
+
+  const std::string output_path_;
+  const std::string ref_path_;
+  const std::string ref_filename_;
+  webrtc::AudioProcessing* apm_;
+  webrtc::AudioFrame* frame_;
+  webrtc::AudioFrame* revframe_;
+  FILE* far_file_;
+  FILE* near_file_;
+  FILE* out_file_;
+};
+
+ApmTest::ApmTest()
+    : output_path_(webrtc::test::OutputPath()),
+      ref_path_(webrtc::test::ProjectRootPath() +
+                "test/data/audio_processing/"),
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+      ref_filename_(ref_path_ + "output_data_fixed.pb"),
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+      ref_filename_(ref_path_ + "output_data_float.pb"),
+#endif
+      apm_(NULL),
+      frame_(NULL),
+      revframe_(NULL),
+      far_file_(NULL),
+      near_file_(NULL),
+      out_file_(NULL) {}
+
+void ApmTest::SetUp() {
+  apm_ = AudioProcessing::Create(0);
+  ASSERT_TRUE(apm_ != NULL);
+
+  frame_ = new AudioFrame();
+  revframe_ = new AudioFrame();
+
+  Init(32000, 2, 2, 2, false);
+}
+
+void ApmTest::TearDown() {
+  if (frame_) {
+    delete frame_;
+  }
+  frame_ = NULL;
+
+  if (revframe_) {
+    delete revframe_;
+  }
+  revframe_ = NULL;
+
+  if (far_file_) {
+    ASSERT_EQ(0, fclose(far_file_));
+  }
+  far_file_ = NULL;
+
+  if (near_file_) {
+    ASSERT_EQ(0, fclose(near_file_));
+  }
+  near_file_ = NULL;
+
+  if (out_file_) {
+    ASSERT_EQ(0, fclose(out_file_));
+  }
+  out_file_ = NULL;
+
+  if (apm_ != NULL) {
+    AudioProcessing::Destroy(apm_);
+  }
+  apm_ = NULL;
+}
+
+std::string ApmTest::ResourceFilePath(std::string name, int sample_rate_hz) {
+  std::ostringstream ss;
+  // Resource files are all stereo.
+  ss << name << sample_rate_hz / 1000 << "_stereo";
+  return webrtc::test::ResourcePath(ss.str(), "pcm");
+}
+
+std::string ApmTest::OutputFilePath(std::string name,
+                                    int sample_rate_hz,
+                                    int num_reverse_channels,
+                                    int num_input_channels,
+                                    int num_output_channels) {
+  std::ostringstream ss;
+  ss << name << sample_rate_hz / 1000 << "_" << num_reverse_channels << "r" <<
+      num_input_channels << "i" << "_";
+  if (num_output_channels == 1) {
+    ss << "mono";
+  } else if (num_output_channels == 2) {
+    ss << "stereo";
+  } else {
+    assert(false);
+    return "";
+  }
+  ss << ".pcm";
+
+  return output_path_ + ss.str();
+}
+
+
+void ApmTest::Init(int sample_rate_hz, int num_reverse_channels,
+                   int num_input_channels, int num_output_channels,
+                   bool open_output_file) {
+  ASSERT_EQ(apm_->kNoError, apm_->Initialize());
+
+  // Handles error checking of the parameters as well. No need to repeat it.
+  ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(sample_rate_hz));
+  ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(num_input_channels,
+                                                   num_output_channels));
+  ASSERT_EQ(apm_->kNoError,
+            apm_->set_num_reverse_channels(num_reverse_channels));
+
+  // We always use 10 ms frames.
+  const int samples_per_channel = sample_rate_hz / 100;
+  frame_->_payloadDataLengthInSamples = samples_per_channel;
+  frame_->_audioChannel = num_input_channels;
+  frame_->_frequencyInHz = sample_rate_hz;
+  revframe_->_payloadDataLengthInSamples = samples_per_channel;
+  revframe_->_audioChannel = num_reverse_channels;
+  revframe_->_frequencyInHz = sample_rate_hz;
+
+  if (far_file_) {
+    ASSERT_EQ(0, fclose(far_file_));
+  }
+  std::string filename = ResourceFilePath("far", sample_rate_hz);
+  far_file_ = fopen(filename.c_str(), "rb");
+  ASSERT_TRUE(far_file_ != NULL) << "Could not open file " <<
+      filename << "\n";
+
+  if (near_file_) {
+    ASSERT_EQ(0, fclose(near_file_));
+  }
+  filename = ResourceFilePath("near", sample_rate_hz);
+  near_file_ = fopen(filename.c_str(), "rb");
+  ASSERT_TRUE(near_file_ != NULL) << "Could not open file " <<
+        filename << "\n";
+
+  if (open_output_file) {
+    if (out_file_) {
+      ASSERT_EQ(0, fclose(out_file_));
+    }
+    filename = OutputFilePath("out", sample_rate_hz, num_reverse_channels,
+                              num_input_channels, num_output_channels);
+    out_file_ = fopen(filename.c_str(), "wb");
+    ASSERT_TRUE(out_file_ != NULL) << "Could not open file " <<
+          filename << "\n";
+  }
+}
+
+void MixStereoToMono(const int16_t* stereo,
+                     int16_t* mono,
+                     int samples_per_channel) {
+  for (int i = 0; i < samples_per_channel; i++) {
+    int32_t int32 = (static_cast<int32_t>(stereo[i * 2]) +
+                     static_cast<int32_t>(stereo[i * 2 + 1])) >> 1;
+    mono[i] = static_cast<int16_t>(int32);
+  }
+}
+
+template <class T>
+T MaxValue(T a, T b) {
+  return a > b ? a : b;
+}
+
+template <class T>
+T AbsValue(T a) {
+  return a > 0 ? a : -a;
+}
+
+void SetFrameTo(AudioFrame* frame, int16_t value) {
+  for (int i = 0; i < frame->_payloadDataLengthInSamples * frame->_audioChannel;
+      ++i) {
+    frame->_payloadData[i] = value;
+  }
+}
+
+int16_t MaxAudioFrame(const AudioFrame& frame) {
+  const int length = frame._payloadDataLengthInSamples * frame._audioChannel;
+  int16_t max = AbsValue(frame._payloadData[0]);
+  for (int i = 1; i < length; i++) {
+    max = MaxValue(max, AbsValue(frame._payloadData[i]));
+  }
+
+  return max;
+}
+
+bool FrameDataAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
+  if (frame1._payloadDataLengthInSamples !=
+      frame2._payloadDataLengthInSamples) {
+    return false;
+  }
+  if (frame1._audioChannel !=
+      frame2._audioChannel) {
+    return false;
+  }
+  if (memcmp(frame1._payloadData, frame2._payloadData,
+             frame1._payloadDataLengthInSamples * frame1._audioChannel *
+               sizeof(int16_t))) {
+    return false;
+  }
+  return true;
+}
+
+void TestStats(const AudioProcessing::Statistic& test,
+               const webrtc::audioproc::Test::Statistic& reference) {
+  EXPECT_EQ(reference.instant(), test.instant);
+  EXPECT_EQ(reference.average(), test.average);
+  EXPECT_EQ(reference.maximum(), test.maximum);
+  EXPECT_EQ(reference.minimum(), test.minimum);
+}
+
+void WriteStatsMessage(const AudioProcessing::Statistic& output,
+                       webrtc::audioproc::Test::Statistic* message) {
+  message->set_instant(output.instant);
+  message->set_average(output.average);
+  message->set_maximum(output.maximum);
+  message->set_minimum(output.minimum);
+}
+
+void WriteMessageLiteToFile(const std::string filename,
+                            const ::google::protobuf::MessageLite& message) {
+  FILE* file = fopen(filename.c_str(), "wb");
+  ASSERT_TRUE(file != NULL) << "Could not open " << filename;
+  int size = message.ByteSize();
+  ASSERT_GT(size, 0);
+  unsigned char* array = new unsigned char[size];
+  ASSERT_TRUE(message.SerializeToArray(array, size));
+
+  ASSERT_EQ(1u, fwrite(&size, sizeof(int), 1, file));
+  ASSERT_EQ(static_cast<size_t>(size),
+      fwrite(array, sizeof(unsigned char), size, file));
+
+  delete [] array;
+  fclose(file);
+}
+
+void ReadMessageLiteFromFile(const std::string filename,
+                             ::google::protobuf::MessageLite* message) {
+  assert(message != NULL);
+
+  FILE* file = fopen(filename.c_str(), "rb");
+  ASSERT_TRUE(file != NULL) << "Could not open " << filename;
+  int size = 0;
+  ASSERT_EQ(1u, fread(&size, sizeof(int), 1, file));
+  ASSERT_GT(size, 0);
+  unsigned char* array = new unsigned char[size];
+  ASSERT_EQ(static_cast<size_t>(size),
+      fread(array, sizeof(unsigned char), size, file));
+
+  ASSERT_TRUE(message->ParseFromArray(array, size));
+
+  delete [] array;
+  fclose(file);
+}
+
+struct ThreadData {
+  ThreadData(int thread_num_, AudioProcessing* ap_)
+      : thread_num(thread_num_),
+        error(false),
+        ap(ap_) {}
+  int thread_num;
+  bool error;
+  AudioProcessing* ap;
+};
+
+// Don't use GTest here; non-thread-safe on Windows (as of 1.5.0).
+bool DeadlockProc(void* thread_object) {
+  ThreadData* thread_data = static_cast<ThreadData*>(thread_object);
+  AudioProcessing* ap = thread_data->ap;
+  int err = ap->kNoError;
+
+  AudioFrame primary_frame;
+  AudioFrame reverse_frame;
+  primary_frame._payloadDataLengthInSamples = 320;
+  primary_frame._audioChannel = 2;
+  primary_frame._frequencyInHz = 32000;
+  reverse_frame._payloadDataLengthInSamples = 320;
+  reverse_frame._audioChannel = 2;
+  reverse_frame._frequencyInHz = 32000;
+
+  ap->echo_cancellation()->Enable(true);
+  ap->gain_control()->Enable(true);
+  ap->high_pass_filter()->Enable(true);
+  ap->level_estimator()->Enable(true);
+  ap->noise_suppression()->Enable(true);
+  ap->voice_detection()->Enable(true);
+
+  if (thread_data->thread_num % 2 == 0) {
+    err = ap->AnalyzeReverseStream(&reverse_frame);
+    if (err != ap->kNoError) {
+      printf("Error in AnalyzeReverseStream(): %d\n", err);
+      thread_data->error = true;
+      return false;
+    }
+  }
+
+  if (thread_data->thread_num % 2 == 1) {
+    ap->set_stream_delay_ms(0);
+    ap->echo_cancellation()->set_stream_drift_samples(0);
+    ap->gain_control()->set_stream_analog_level(0);
+    err = ap->ProcessStream(&primary_frame);
+    if (err == ap->kStreamParameterNotSetError) {
+      printf("Expected kStreamParameterNotSetError in ProcessStream(): %d\n",
+          err);
+    } else if (err != ap->kNoError) {
+      printf("Error in ProcessStream(): %d\n", err);
+      thread_data->error = true;
+      return false;
+    }
+    ap->gain_control()->stream_analog_level();
+  }
+
+  EventWrapper* event = EventWrapper::Create();
+  event->Wait(1);
+  delete event;
+  event = NULL;
+
+  return true;
+}
+
+/*TEST_F(ApmTest, Deadlock) {
+  const int num_threads = 16;
+  std::vector<ThreadWrapper*> threads(num_threads);
+  std::vector<ThreadData*> thread_data(num_threads);
+
+  ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
+  ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(2, 2));
+  ASSERT_EQ(apm_->kNoError, apm_->set_num_reverse_channels(2));
+
+  for (int i = 0; i < num_threads; i++) {
+    thread_data[i] = new ThreadData(i, apm_);
+    threads[i] = ThreadWrapper::CreateThread(DeadlockProc,
+                                             thread_data[i],
+                                             kNormalPriority,
+                                             0);
+    ASSERT_TRUE(threads[i] != NULL);
+    unsigned int thread_id = 0;
+    threads[i]->Start(thread_id);
+  }
+
+  EventWrapper* event = EventWrapper::Create();
+  ASSERT_EQ(kEventTimeout, event->Wait(5000));
+  delete event;
+  event = NULL;
+
+  for (int i = 0; i < num_threads; i++) {
+    // This will return false if the thread has deadlocked.
+    ASSERT_TRUE(threads[i]->Stop());
+    ASSERT_FALSE(thread_data[i]->error);
+    delete threads[i];
+    threads[i] = NULL;
+    delete thread_data[i];
+    thread_data[i] = NULL;
+  }
+}*/
+
+TEST_F(ApmTest, StreamParameters) {
+  // No errors when the components are disabled.
+  EXPECT_EQ(apm_->kNoError,
+            apm_->ProcessStream(frame_));
+
+  // -- Missing AGC level --
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Resets after successful ProcessStream().
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_stream_analog_level(127));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Other stream parameters set correctly.
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(true));
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError,
+            apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(false));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(false));
+
+  // -- Missing delay --
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Resets after successful ProcessStream().
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Other stream parameters set correctly.
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_stream_analog_level(127));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(false));
+
+  // -- Missing drift --
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Resets after successful ProcessStream().
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // Other stream parameters set correctly.
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_stream_analog_level(127));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_));
+
+  // -- No stream parameters --
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  EXPECT_EQ(apm_->kNoError,
+            apm_->AnalyzeReverseStream(revframe_));
+  EXPECT_EQ(apm_->kStreamParameterNotSetError,
+            apm_->ProcessStream(frame_));
+
+  // -- All there --
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_stream_analog_level(127));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+}
+
+TEST_F(ApmTest, Channels) {
+  // Testing number of invalid channels
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(0, 1));
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(1, 0));
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(3, 1));
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(1, 3));
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_reverse_channels(0));
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_reverse_channels(3));
+  // Testing number of valid channels
+  for (int i = 1; i < 3; i++) {
+    for (int j = 1; j < 3; j++) {
+      if (j > i) {
+        EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(i, j));
+      } else {
+        EXPECT_EQ(apm_->kNoError, apm_->set_num_channels(i, j));
+        EXPECT_EQ(j, apm_->num_output_channels());
+      }
+    }
+    EXPECT_EQ(i, apm_->num_input_channels());
+    EXPECT_EQ(apm_->kNoError, apm_->set_num_reverse_channels(i));
+    EXPECT_EQ(i, apm_->num_reverse_channels());
+  }
+}
+
+TEST_F(ApmTest, SampleRates) {
+  // Testing invalid sample rates
+  EXPECT_EQ(apm_->kBadParameterError, apm_->set_sample_rate_hz(10000));
+  // Testing valid sample rates
+  int fs[] = {8000, 16000, 32000};
+  for (size_t i = 0; i < sizeof(fs) / sizeof(*fs); i++) {
+    EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(fs[i]));
+    EXPECT_EQ(fs[i], apm_->sample_rate_hz());
+  }
+}
+
+
+TEST_F(ApmTest, EchoCancellation) {
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(true));
+  EXPECT_TRUE(apm_->echo_cancellation()->is_drift_compensation_enabled());
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(false));
+  EXPECT_FALSE(apm_->echo_cancellation()->is_drift_compensation_enabled());
+
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->echo_cancellation()->set_device_sample_rate_hz(4000));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->echo_cancellation()->set_device_sample_rate_hz(100000));
+
+  int rate[] = {16000, 44100, 48000};
+  for (size_t i = 0; i < sizeof(rate)/sizeof(*rate); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->echo_cancellation()->set_device_sample_rate_hz(rate[i]));
+    EXPECT_EQ(rate[i],
+        apm_->echo_cancellation()->device_sample_rate_hz());
+  }
+
+  EchoCancellation::SuppressionLevel level[] = {
+    EchoCancellation::kLowSuppression,
+    EchoCancellation::kModerateSuppression,
+    EchoCancellation::kHighSuppression,
+  };
+  for (size_t i = 0; i < sizeof(level)/sizeof(*level); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->echo_cancellation()->set_suppression_level(level[i]));
+    EXPECT_EQ(level[i],
+        apm_->echo_cancellation()->suppression_level());
+  }
+
+  EchoCancellation::Metrics metrics;
+  EXPECT_EQ(apm_->kNotEnabledError,
+            apm_->echo_cancellation()->GetMetrics(&metrics));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_metrics(true));
+  EXPECT_TRUE(apm_->echo_cancellation()->are_metrics_enabled());
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_metrics(false));
+  EXPECT_FALSE(apm_->echo_cancellation()->are_metrics_enabled());
+
+  int median = 0;
+  int std = 0;
+  EXPECT_EQ(apm_->kNotEnabledError,
+            apm_->echo_cancellation()->GetDelayMetrics(&median, &std));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_delay_logging(true));
+  EXPECT_TRUE(apm_->echo_cancellation()->is_delay_logging_enabled());
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_delay_logging(false));
+  EXPECT_FALSE(apm_->echo_cancellation()->is_delay_logging_enabled());
+
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+  EXPECT_TRUE(apm_->echo_cancellation()->is_enabled());
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(false));
+  EXPECT_FALSE(apm_->echo_cancellation()->is_enabled());
+}
+
+TEST_F(ApmTest, EchoControlMobile) {
+  // AECM won't use super-wideband.
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
+  EXPECT_EQ(apm_->kBadSampleRateError, apm_->echo_control_mobile()->Enable(true));
+  // Turn AECM on (and AEC off)
+  Init(16000, 2, 2, 2, false);
+  EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
+  EXPECT_TRUE(apm_->echo_control_mobile()->is_enabled());
+
+  // Toggle routing modes
+  EchoControlMobile::RoutingMode mode[] = {
+      EchoControlMobile::kQuietEarpieceOrHeadset,
+      EchoControlMobile::kEarpiece,
+      EchoControlMobile::kLoudEarpiece,
+      EchoControlMobile::kSpeakerphone,
+      EchoControlMobile::kLoudSpeakerphone,
+  };
+  for (size_t i = 0; i < sizeof(mode)/sizeof(*mode); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->echo_control_mobile()->set_routing_mode(mode[i]));
+    EXPECT_EQ(mode[i],
+        apm_->echo_control_mobile()->routing_mode());
+  }
+  // Turn comfort noise off/on
+  EXPECT_EQ(apm_->kNoError,
+      apm_->echo_control_mobile()->enable_comfort_noise(false));
+  EXPECT_FALSE(apm_->echo_control_mobile()->is_comfort_noise_enabled());
+  EXPECT_EQ(apm_->kNoError,
+      apm_->echo_control_mobile()->enable_comfort_noise(true));
+  EXPECT_TRUE(apm_->echo_control_mobile()->is_comfort_noise_enabled());
+  // Set and get echo path
+  const size_t echo_path_size =
+      apm_->echo_control_mobile()->echo_path_size_bytes();
+  scoped_array<char> echo_path_in(new char[echo_path_size]);
+  scoped_array<char> echo_path_out(new char[echo_path_size]);
+  EXPECT_EQ(apm_->kNullPointerError,
+            apm_->echo_control_mobile()->SetEchoPath(NULL, echo_path_size));
+  EXPECT_EQ(apm_->kNullPointerError,
+            apm_->echo_control_mobile()->GetEchoPath(NULL, echo_path_size));
+  EXPECT_EQ(apm_->kBadParameterError,
+            apm_->echo_control_mobile()->GetEchoPath(echo_path_out.get(), 1));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_control_mobile()->GetEchoPath(echo_path_out.get(),
+                                                     echo_path_size));
+  for (size_t i = 0; i < echo_path_size; i++) {
+    echo_path_in[i] = echo_path_out[i] + 1;
+  }
+  EXPECT_EQ(apm_->kBadParameterError,
+            apm_->echo_control_mobile()->SetEchoPath(echo_path_in.get(), 1));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_control_mobile()->SetEchoPath(echo_path_in.get(),
+                                                     echo_path_size));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_control_mobile()->GetEchoPath(echo_path_out.get(),
+                                                     echo_path_size));
+  for (size_t i = 0; i < echo_path_size; i++) {
+    EXPECT_EQ(echo_path_in[i], echo_path_out[i]);
+  }
+
+  // Process a few frames with NS in the default disabled state. This exercises
+  // a different codepath than with it enabled.
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+
+  // Turn AECM off
+  EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(false));
+  EXPECT_FALSE(apm_->echo_control_mobile()->is_enabled());
+}
+
+TEST_F(ApmTest, GainControl) {
+  // Testing gain modes
+  EXPECT_EQ(apm_->kNoError,
+      apm_->gain_control()->set_mode(
+      apm_->gain_control()->mode()));
+
+  GainControl::Mode mode[] = {
+    GainControl::kAdaptiveAnalog,
+    GainControl::kAdaptiveDigital,
+    GainControl::kFixedDigital
+  };
+  for (size_t i = 0; i < sizeof(mode)/sizeof(*mode); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->gain_control()->set_mode(mode[i]));
+    EXPECT_EQ(mode[i], apm_->gain_control()->mode());
+  }
+  // Testing invalid target levels
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_target_level_dbfs(-3));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_target_level_dbfs(-40));
+  // Testing valid target levels
+  EXPECT_EQ(apm_->kNoError,
+      apm_->gain_control()->set_target_level_dbfs(
+      apm_->gain_control()->target_level_dbfs()));
+
+  int level_dbfs[] = {0, 6, 31};
+  for (size_t i = 0; i < sizeof(level_dbfs)/sizeof(*level_dbfs); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->gain_control()->set_target_level_dbfs(level_dbfs[i]));
+    EXPECT_EQ(level_dbfs[i], apm_->gain_control()->target_level_dbfs());
+  }
+
+  // Testing invalid compression gains
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_compression_gain_db(-1));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_compression_gain_db(100));
+
+  // Testing valid compression gains
+  EXPECT_EQ(apm_->kNoError,
+      apm_->gain_control()->set_compression_gain_db(
+      apm_->gain_control()->compression_gain_db()));
+
+  int gain_db[] = {0, 10, 90};
+  for (size_t i = 0; i < sizeof(gain_db)/sizeof(*gain_db); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->gain_control()->set_compression_gain_db(gain_db[i]));
+    EXPECT_EQ(gain_db[i], apm_->gain_control()->compression_gain_db());
+  }
+
+  // Testing limiter off/on
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->enable_limiter(false));
+  EXPECT_FALSE(apm_->gain_control()->is_limiter_enabled());
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->enable_limiter(true));
+  EXPECT_TRUE(apm_->gain_control()->is_limiter_enabled());
+
+  // Testing invalid level limits
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_analog_level_limits(-1, 512));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_analog_level_limits(100000, 512));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_analog_level_limits(512, -1));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_analog_level_limits(512, 100000));
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->gain_control()->set_analog_level_limits(512, 255));
+
+  // Testing valid level limits
+  EXPECT_EQ(apm_->kNoError,
+      apm_->gain_control()->set_analog_level_limits(
+      apm_->gain_control()->analog_level_minimum(),
+      apm_->gain_control()->analog_level_maximum()));
+
+  int min_level[] = {0, 255, 1024};
+  for (size_t i = 0; i < sizeof(min_level)/sizeof(*min_level); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->gain_control()->set_analog_level_limits(min_level[i], 1024));
+    EXPECT_EQ(min_level[i], apm_->gain_control()->analog_level_minimum());
+  }
+
+  int max_level[] = {0, 1024, 65535};
+  for (size_t i = 0; i < sizeof(min_level)/sizeof(*min_level); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->gain_control()->set_analog_level_limits(0, max_level[i]));
+    EXPECT_EQ(max_level[i], apm_->gain_control()->analog_level_maximum());
+  }
+
+  // TODO(ajm): stream_is_saturated() and stream_analog_level()
+
+  // Turn AGC off
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(false));
+  EXPECT_FALSE(apm_->gain_control()->is_enabled());
+}
+
+TEST_F(ApmTest, NoiseSuppression) {
+  // Test valid suppression levels.
+  NoiseSuppression::Level level[] = {
+    NoiseSuppression::kLow,
+    NoiseSuppression::kModerate,
+    NoiseSuppression::kHigh,
+    NoiseSuppression::kVeryHigh
+  };
+  for (size_t i = 0; i < sizeof(level)/sizeof(*level); i++) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->noise_suppression()->set_level(level[i]));
+    EXPECT_EQ(level[i], apm_->noise_suppression()->level());
+  }
+
+  // Turn NS on/off
+  EXPECT_EQ(apm_->kNoError, apm_->noise_suppression()->Enable(true));
+  EXPECT_TRUE(apm_->noise_suppression()->is_enabled());
+  EXPECT_EQ(apm_->kNoError, apm_->noise_suppression()->Enable(false));
+  EXPECT_FALSE(apm_->noise_suppression()->is_enabled());
+}
+
+TEST_F(ApmTest, HighPassFilter) {
+  // Turn HP filter on/off
+  EXPECT_EQ(apm_->kNoError, apm_->high_pass_filter()->Enable(true));
+  EXPECT_TRUE(apm_->high_pass_filter()->is_enabled());
+  EXPECT_EQ(apm_->kNoError, apm_->high_pass_filter()->Enable(false));
+  EXPECT_FALSE(apm_->high_pass_filter()->is_enabled());
+}
+
+TEST_F(ApmTest, LevelEstimator) {
+  // Turn level estimator on/off
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(false));
+  EXPECT_FALSE(apm_->level_estimator()->is_enabled());
+
+  EXPECT_EQ(apm_->kNotEnabledError, apm_->level_estimator()->RMS());
+
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(true));
+  EXPECT_TRUE(apm_->level_estimator()->is_enabled());
+
+  // Run this test in wideband; in super-wb, the splitting filter distorts the
+  // audio enough to cause deviation from the expectation for small values.
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
+  frame_->_payloadDataLengthInSamples = 160;
+  frame_->_audioChannel = 2;
+  frame_->_frequencyInHz = 16000;
+
+  // Min value if no frames have been processed.
+  EXPECT_EQ(127, apm_->level_estimator()->RMS());
+
+  // Min value on zero frames.
+  SetFrameTo(frame_, 0);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(127, apm_->level_estimator()->RMS());
+
+  // Try a few RMS values.
+  // (These also test that the value resets after retrieving it.)
+  SetFrameTo(frame_, 32767);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(0, apm_->level_estimator()->RMS());
+
+  SetFrameTo(frame_, 30000);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(1, apm_->level_estimator()->RMS());
+
+  SetFrameTo(frame_, 10000);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(10, apm_->level_estimator()->RMS());
+
+  SetFrameTo(frame_, 10);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(70, apm_->level_estimator()->RMS());
+
+  // Min value if _energy == 0.
+  SetFrameTo(frame_, 10000);
+  uint32_t energy = frame_->_energy; // Save default to restore below.
+  frame_->_energy = 0;
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(127, apm_->level_estimator()->RMS());
+  frame_->_energy = energy;
+
+  // Verify reset after enable/disable.
+  SetFrameTo(frame_, 32767);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(false));
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(true));
+  SetFrameTo(frame_, 1);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(90, apm_->level_estimator()->RMS());
+
+  // Verify reset after initialize.
+  SetFrameTo(frame_, 32767);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->Initialize());
+  SetFrameTo(frame_, 1);
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(90, apm_->level_estimator()->RMS());
+}
+
+TEST_F(ApmTest, VoiceDetection) {
+  // Test external VAD
+  EXPECT_EQ(apm_->kNoError,
+            apm_->voice_detection()->set_stream_has_voice(true));
+  EXPECT_TRUE(apm_->voice_detection()->stream_has_voice());
+  EXPECT_EQ(apm_->kNoError,
+            apm_->voice_detection()->set_stream_has_voice(false));
+  EXPECT_FALSE(apm_->voice_detection()->stream_has_voice());
+
+  // Test valid likelihoods
+  VoiceDetection::Likelihood likelihood[] = {
+      VoiceDetection::kVeryLowLikelihood,
+      VoiceDetection::kLowLikelihood,
+      VoiceDetection::kModerateLikelihood,
+      VoiceDetection::kHighLikelihood
+  };
+  for (size_t i = 0; i < sizeof(likelihood)/sizeof(*likelihood); i++) {
+    EXPECT_EQ(apm_->kNoError,
+              apm_->voice_detection()->set_likelihood(likelihood[i]));
+    EXPECT_EQ(likelihood[i], apm_->voice_detection()->likelihood());
+  }
+
+  /* TODO(bjornv): Enable once VAD supports other frame lengths than 10 ms
+  // Test invalid frame sizes
+  EXPECT_EQ(apm_->kBadParameterError,
+      apm_->voice_detection()->set_frame_size_ms(12));
+
+  // Test valid frame sizes
+  for (int i = 10; i <= 30; i += 10) {
+    EXPECT_EQ(apm_->kNoError,
+        apm_->voice_detection()->set_frame_size_ms(i));
+    EXPECT_EQ(i, apm_->voice_detection()->frame_size_ms());
+  }
+  */
+
+  // Turn VAD on/off
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
+  EXPECT_TRUE(apm_->voice_detection()->is_enabled());
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false));
+  EXPECT_FALSE(apm_->voice_detection()->is_enabled());
+
+  // Test that AudioFrame activity is maintained when VAD is disabled.
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false));
+  AudioFrame::VADActivity activity[] = {
+      AudioFrame::kVadActive,
+      AudioFrame::kVadPassive,
+      AudioFrame::kVadUnknown
+  };
+  for (size_t i = 0; i < sizeof(activity)/sizeof(*activity); i++) {
+    frame_->_vadActivity = activity[i];
+    EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+    EXPECT_EQ(activity[i], frame_->_vadActivity);
+  }
+
+  // Test that AudioFrame activity is set when VAD is enabled.
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
+  frame_->_vadActivity = AudioFrame::kVadUnknown;
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_NE(AudioFrame::kVadUnknown, frame_->_vadActivity);
+
+  // TODO(bjornv): Add tests for streamed voice; stream_has_voice()
+}
+
+TEST_F(ApmTest, SplittingFilter) {
+  // Verify the filter is not active through undistorted audio when:
+  // 1. No components are enabled...
+  SetFrameTo(frame_, 1000);
+  AudioFrame frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+
+  // 2. Only the level estimator is enabled...
+  SetFrameTo(frame_, 1000);
+  frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(true));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(false));
+
+  // 3. Only VAD is enabled...
+  SetFrameTo(frame_, 1000);
+  frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false));
+
+  // 4. Both VAD and the level estimator are enabled...
+  SetFrameTo(frame_, 1000);
+  frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(true));
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+  EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(false));
+  EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false));
+
+  // 5. Not using super-wb.
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
+  frame_->_payloadDataLengthInSamples = 160;
+  frame_->_audioChannel = 2;
+  frame_->_frequencyInHz = 16000;
+  // Enable AEC, which would require the filter in super-wb. We rely on the
+  // first few frames of data being unaffected by the AEC.
+  // TODO(andrew): This test, and the one below, rely rather tenuously on the
+  // behavior of the AEC. Think of something more robust.
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+  SetFrameTo(frame_, 1000);
+  frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+
+  // Check the test is valid. We should have distortion from the filter
+  // when AEC is enabled (which won't affect the audio).
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
+  frame_->_payloadDataLengthInSamples = 320;
+  frame_->_audioChannel = 2;
+  frame_->_frequencyInHz = 32000;
+  SetFrameTo(frame_, 1000);
+  frame_copy = *frame_;
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->set_stream_drift_samples(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_FALSE(FrameDataAreEqual(*frame_, frame_copy));
+}
+
+// TODO(andrew): expand test to verify output.
+TEST_F(ApmTest, DebugDump) {
+  const std::string filename = webrtc::test::OutputPath() + "debug.aec";
+  EXPECT_EQ(apm_->kNullPointerError, apm_->StartDebugRecording(NULL));
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+  // Stopping without having started should be OK.
+  EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording());
+
+  EXPECT_EQ(apm_->kNoError, apm_->StartDebugRecording(filename.c_str()));
+  EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording());
+
+  // Verify the file has been written.
+  FILE* fid = fopen(filename.c_str(), "r");
+  ASSERT_TRUE(fid != NULL);
+
+  // Clean it up.
+  ASSERT_EQ(0, fclose(fid));
+  ASSERT_EQ(0, remove(filename.c_str()));
+#else
+  EXPECT_EQ(apm_->kUnsupportedFunctionError,
+            apm_->StartDebugRecording(filename.c_str()));
+  EXPECT_EQ(apm_->kUnsupportedFunctionError, apm_->StopDebugRecording());
+
+  // Verify the file has NOT been written.
+  ASSERT_TRUE(fopen(filename.c_str(), "r") == NULL);
+#endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
+}
+
+// TODO(andrew): Add a test to process a few frames with different combinations
+// of enabled components.
+
+// TODO(andrew): Make this test more robust such that it can be run on multiple
+// platforms. It currently requires bit-exactness.
+#ifdef WEBRTC_AUDIOPROC_BIT_EXACT
+TEST_F(ApmTest, Process) {
+  GOOGLE_PROTOBUF_VERIFY_VERSION;
+  webrtc::audioproc::OutputData ref_data;
+
+  if (!write_ref_data) {
+    ReadMessageLiteFromFile(ref_filename_, &ref_data);
+  } else {
+    // Write the desired tests to the protobuf reference file.
+    const int channels[] = {1, 2};
+    const size_t channels_size = sizeof(channels) / sizeof(*channels);
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+    // AECM doesn't support super-wb.
+    const int sample_rates[] = {8000, 16000};
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+    const int sample_rates[] = {8000, 16000, 32000};
+#endif
+    const size_t sample_rates_size = sizeof(sample_rates) / sizeof(*sample_rates);
+    for (size_t i = 0; i < channels_size; i++) {
+      for (size_t j = 0; j < channels_size; j++) {
+        // We can't have more output than input channels.
+        for (size_t k = 0; k <= j; k++) {
+          for (size_t l = 0; l < sample_rates_size; l++) {
+            webrtc::audioproc::Test* test = ref_data.add_test();
+            test->set_num_reverse_channels(channels[i]);
+            test->set_num_input_channels(channels[j]);
+            test->set_num_output_channels(channels[k]);
+            test->set_sample_rate(sample_rates[l]);
+          }
+        }
+      }
+    }
+  }
+
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
+  EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_metrics(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_delay_logging(true));
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_analog_level_limits(0, 255));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+#endif
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->high_pass_filter()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->level_estimator()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->noise_suppression()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->voice_detection()->Enable(true));
+
+  for (int i = 0; i < ref_data.test_size(); i++) {
+    printf("Running test %d of %d...\n", i + 1, ref_data.test_size());
+
+    webrtc::audioproc::Test* test = ref_data.mutable_test(i);
+    Init(test->sample_rate(), test->num_reverse_channels(),
+         test->num_input_channels(), test->num_output_channels(), true);
+
+    const int samples_per_channel = test->sample_rate() / 100;
+    int frame_count = 0;
+    int has_echo_count = 0;
+    int has_voice_count = 0;
+    int is_saturated_count = 0;
+    int analog_level = 127;
+    int analog_level_average = 0;
+    int max_output_average = 0;
+
+    while (1) {
+      // Read far-end frame, always stereo.
+      size_t frame_size = samples_per_channel * 2;
+      size_t read_count = fread(revframe_->_payloadData,
+                                sizeof(int16_t),
+                                frame_size,
+                                far_file_);
+      if (read_count != frame_size) {
+        // Check that the file really ended.
+        ASSERT_NE(0, feof(far_file_));
+        break; // This is expected.
+      }
+
+      if (revframe_->_audioChannel == 1) {
+        MixStereoToMono(revframe_->_payloadData, revframe_->_payloadData,
+                        samples_per_channel);
+      }
+
+      EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_));
+
+      EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+      EXPECT_EQ(apm_->kNoError,
+          apm_->echo_cancellation()->set_stream_drift_samples(0));
+      EXPECT_EQ(apm_->kNoError,
+          apm_->gain_control()->set_stream_analog_level(analog_level));
+
+      // Read near-end frame, always stereo.
+      read_count = fread(frame_->_payloadData,
+                         sizeof(int16_t),
+                         frame_size,
+                         near_file_);
+      if (read_count != frame_size) {
+        // Check that the file really ended.
+        ASSERT_NE(0, feof(near_file_));
+        break; // This is expected.
+      }
+
+      if (frame_->_audioChannel == 1) {
+        MixStereoToMono(frame_->_payloadData, frame_->_payloadData,
+                        samples_per_channel);
+      }
+      frame_->_vadActivity = AudioFrame::kVadUnknown;
+
+      EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+      // Ensure the frame was downmixed properly.
+      EXPECT_EQ(test->num_output_channels(), frame_->_audioChannel);
+
+      max_output_average += MaxAudioFrame(*frame_);
+
+      if (apm_->echo_cancellation()->stream_has_echo()) {
+        has_echo_count++;
+      }
+
+      analog_level = apm_->gain_control()->stream_analog_level();
+      analog_level_average += analog_level;
+      if (apm_->gain_control()->stream_is_saturated()) {
+        is_saturated_count++;
+      }
+      if (apm_->voice_detection()->stream_has_voice()) {
+        has_voice_count++;
+        EXPECT_EQ(AudioFrame::kVadActive, frame_->_vadActivity);
+      } else {
+        EXPECT_EQ(AudioFrame::kVadPassive, frame_->_vadActivity);
+      }
+
+      frame_size = samples_per_channel * frame_->_audioChannel;
+      size_t write_count = fwrite(frame_->_payloadData,
+                                  sizeof(int16_t),
+                                  frame_size,
+                                  out_file_);
+      ASSERT_EQ(frame_size, write_count);
+
+      // Reset in case of downmixing.
+      frame_->_audioChannel = test->num_input_channels();
+      frame_count++;
+    }
+    max_output_average /= frame_count;
+    analog_level_average /= frame_count;
+
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+    EchoCancellation::Metrics echo_metrics;
+    EXPECT_EQ(apm_->kNoError,
+              apm_->echo_cancellation()->GetMetrics(&echo_metrics));
+    int median = 0;
+    int std = 0;
+    EXPECT_EQ(apm_->kNoError,
+              apm_->echo_cancellation()->GetDelayMetrics(&median, &std));
+
+    int rms_level = apm_->level_estimator()->RMS();
+    EXPECT_LE(0, rms_level);
+    EXPECT_GE(127, rms_level);
+#endif
+
+    if (!write_ref_data) {
+      EXPECT_EQ(test->has_echo_count(), has_echo_count);
+      EXPECT_EQ(test->has_voice_count(), has_voice_count);
+      EXPECT_EQ(test->is_saturated_count(), is_saturated_count);
+
+      EXPECT_EQ(test->analog_level_average(), analog_level_average);
+      EXPECT_EQ(test->max_output_average(), max_output_average);
+
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+      webrtc::audioproc::Test::EchoMetrics reference =
+          test->echo_metrics();
+      TestStats(echo_metrics.residual_echo_return_loss,
+                reference.residual_echo_return_loss());
+      TestStats(echo_metrics.echo_return_loss,
+                reference.echo_return_loss());
+      TestStats(echo_metrics.echo_return_loss_enhancement,
+                reference.echo_return_loss_enhancement());
+      TestStats(echo_metrics.a_nlp,
+                reference.a_nlp());
+
+      webrtc::audioproc::Test::DelayMetrics reference_delay =
+          test->delay_metrics();
+      EXPECT_EQ(reference_delay.median(), median);
+      EXPECT_EQ(reference_delay.std(), std);
+
+      EXPECT_EQ(test->rms_level(), rms_level);
+#endif
+    } else {
+      test->set_has_echo_count(has_echo_count);
+      test->set_has_voice_count(has_voice_count);
+      test->set_is_saturated_count(is_saturated_count);
+
+      test->set_analog_level_average(analog_level_average);
+      test->set_max_output_average(max_output_average);
+
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+      webrtc::audioproc::Test::EchoMetrics* message =
+          test->mutable_echo_metrics();
+      WriteStatsMessage(echo_metrics.residual_echo_return_loss,
+                        message->mutable_residual_echo_return_loss());
+      WriteStatsMessage(echo_metrics.echo_return_loss,
+                        message->mutable_echo_return_loss());
+      WriteStatsMessage(echo_metrics.echo_return_loss_enhancement,
+                        message->mutable_echo_return_loss_enhancement());
+      WriteStatsMessage(echo_metrics.a_nlp,
+                        message->mutable_a_nlp());
+
+      webrtc::audioproc::Test::DelayMetrics* message_delay =
+          test->mutable_delay_metrics();
+      message_delay->set_median(median);
+      message_delay->set_std(std);
+
+      test->set_rms_level(rms_level);
+#endif
+    }
+
+    rewind(far_file_);
+    rewind(near_file_);
+  }
+
+  if (write_ref_data) {
+    WriteMessageLiteToFile(ref_filename_, ref_data);
+  }
+}
+#endif  // WEBRTC_AUDIOPROC_BIT_EXACT
+
+}  // namespace
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+
+  for (int i = 1; i < argc; i++) {
+    if (strcmp(argv[i], "--write_ref_data") == 0) {
+      write_ref_data = true;
+    }
+  }
+
+  int err = RUN_ALL_TESTS();
+
+  // Optional, but removes memory leak noise from Valgrind.
+  google::protobuf::ShutdownProtobufLibrary();
+  return err;
+}
diff --git a/trunk/src/modules/audio_processing/test/unittest.proto b/trunk/src/modules/audio_processing/test/unittest.proto
new file mode 100644
index 0000000..67ba722
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/unittest.proto
@@ -0,0 +1,52 @@
+syntax = "proto2";
+option optimize_for = LITE_RUNTIME;
+package webrtc.audioproc;
+
+message Test {
+  optional int32 num_reverse_channels = 1;
+  optional int32 num_input_channels = 2;
+  optional int32 num_output_channels = 3;
+  optional int32 sample_rate = 4;
+
+  message Frame {
+  }
+
+  repeated Frame frame = 5;
+
+  optional int32 analog_level_average = 6;
+  optional int32 max_output_average = 7;
+
+  optional int32 has_echo_count = 8;
+  optional int32 has_voice_count = 9;
+  optional int32 is_saturated_count = 10;
+
+  message Statistic {
+    optional int32 instant = 1;
+    optional int32 average = 2;
+    optional int32 maximum = 3;
+    optional int32 minimum = 4;
+  }
+
+  message EchoMetrics {
+    optional Statistic residual_echo_return_loss = 1;
+    optional Statistic echo_return_loss = 2;
+    optional Statistic echo_return_loss_enhancement = 3;
+    optional Statistic a_nlp = 4;
+  }
+
+  optional EchoMetrics echo_metrics = 11;
+
+  message DelayMetrics {
+    optional int32 median = 1;
+    optional int32 std = 2;
+  }
+
+  optional DelayMetrics delay_metrics = 12;
+
+  optional int32 rms_level = 13;
+}
+
+message OutputData {
+  repeated Test test = 1;
+}
+
diff --git a/trunk/src/modules/audio_processing/test/unpack.cc b/trunk/src/modules/audio_processing/test/unpack.cc
new file mode 100644
index 0000000..2337131
--- /dev/null
+++ b/trunk/src/modules/audio_processing/test/unpack.cc
@@ -0,0 +1,216 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Commandline tool to unpack audioproc debug files.
+//
+// The debug files are dumped as protobuf blobs. For analysis, it's necessary
+// to unpack the file into its component parts: audio and other data.
+
+#include <stdio.h>
+
+#include "google/gflags.h"
+#include "scoped_ptr.h"
+#include "typedefs.h"
+#include "webrtc/audio_processing/debug.pb.h"
+
+using webrtc::scoped_array;
+
+using webrtc::audioproc::Event;
+using webrtc::audioproc::ReverseStream;
+using webrtc::audioproc::Stream;
+using webrtc::audioproc::Init;
+
+// TODO(andrew): unpack more of the data.
+DEFINE_string(input_file, "input.pcm", "The name of the input stream file.");
+DEFINE_string(output_file, "ref_out.pcm",
+              "The name of the reference output stream file.");
+DEFINE_string(reverse_file, "reverse.pcm",
+              "The name of the reverse input stream file.");
+DEFINE_string(delay_file, "delay.int32", "The name of the delay file.");
+DEFINE_string(drift_file, "drift.int32", "The name of the drift file.");
+DEFINE_string(level_file, "level.int32", "The name of the level file.");
+DEFINE_string(settings_file, "settings.txt", "The name of the settings file.");
+DEFINE_bool(full, false,
+            "Unpack the full set of files (normally not needed).");
+
+// TODO(andrew): move this to a helper class to share with process_test.cc?
+// Returns true on success, false on error or end-of-file.
+bool ReadMessageFromFile(FILE* file,
+                        ::google::protobuf::MessageLite* msg) {
+  // The "wire format" for the size is little-endian.
+  // Assume process_test is running on a little-endian machine.
+  int32_t size = 0;
+  if (fread(&size, sizeof(int32_t), 1, file) != 1) {
+    return false;
+  }
+  if (size <= 0) {
+    return false;
+  }
+  const size_t usize = static_cast<size_t>(size);
+
+  scoped_array<char> array(new char[usize]);
+  if (fread(array.get(), sizeof(char), usize, file) != usize) {
+    return false;
+  }
+
+  msg->Clear();
+  return msg->ParseFromArray(array.get(), usize);
+}
+
+int main(int argc, char* argv[]) {
+  std::string program_name = argv[0];
+  std::string usage = "Commandline tool to unpack audioproc debug files.\n"
+    "Example usage:\n" + program_name + " debug_dump.pb\n";
+  google::SetUsageMessage(usage);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  if (argc < 2) {
+    printf("%s", google::ProgramUsage());
+    return 1;
+  }
+
+  FILE* debug_file = fopen(argv[1], "rb");
+  if (debug_file == NULL) {
+    printf("Unable to open %s\n", argv[1]);
+    return 1;
+  }
+  FILE* input_file = fopen(FLAGS_input_file.c_str(), "wb");
+  if (input_file == NULL) {
+    printf("Unable to open %s\n", FLAGS_input_file.c_str());
+    return 1;
+  }
+  FILE* output_file = fopen(FLAGS_output_file.c_str(), "wb");
+  if (output_file == NULL) {
+    printf("Unable to open %s\n", FLAGS_output_file.c_str());
+    return 1;
+  }
+  FILE* reverse_file = fopen(FLAGS_reverse_file.c_str(), "wb");
+  if (reverse_file == NULL) {
+    printf("Unable to open %s\n", FLAGS_reverse_file.c_str());
+    return 1;
+  }
+  FILE* settings_file = fopen(FLAGS_settings_file.c_str(), "wb");
+  if (settings_file == NULL) {
+    printf("Unable to open %s\n", FLAGS_settings_file.c_str());
+    return 1;
+  }
+
+  FILE* delay_file = NULL;
+  FILE* drift_file = NULL;
+  FILE* level_file = NULL;
+  if (FLAGS_full) {
+    delay_file = fopen(FLAGS_delay_file.c_str(), "wb");
+    if (delay_file == NULL) {
+      printf("Unable to open %s\n", FLAGS_delay_file.c_str());
+      return 1;
+    }
+    drift_file = fopen(FLAGS_drift_file.c_str(), "wb");
+    if (drift_file == NULL) {
+      printf("Unable to open %s\n", FLAGS_drift_file.c_str());
+      return 1;
+    }
+    level_file = fopen(FLAGS_level_file.c_str(), "wb");
+    if (level_file == NULL) {
+      printf("Unable to open %s\n", FLAGS_level_file.c_str());
+      return 1;
+    }
+  }
+
+  Event event_msg;
+  int frame_count = 0;
+  while (ReadMessageFromFile(debug_file, &event_msg)) {
+    if (event_msg.type() == Event::REVERSE_STREAM) {
+      if (!event_msg.has_reverse_stream()) {
+        printf("Corrupted input file: ReverseStream missing.\n");
+        return 1;
+      }
+
+      const ReverseStream msg = event_msg.reverse_stream();
+      if (msg.has_data()) {
+        if (fwrite(msg.data().data(), msg.data().size(), 1, reverse_file) !=
+            1) {
+          printf("Error when writing to %s\n", FLAGS_reverse_file.c_str());
+          return 1;
+        }
+      }
+    } else if (event_msg.type() == Event::STREAM) {
+      frame_count++;
+      if (!event_msg.has_stream()) {
+        printf("Corrupted input file: Stream missing.\n");
+        return 1;
+      }
+
+      const Stream msg = event_msg.stream();
+      if (msg.has_input_data()) {
+        if (fwrite(msg.input_data().data(), msg.input_data().size(), 1,
+                   input_file) != 1) {
+          printf("Error when writing to %s\n", FLAGS_input_file.c_str());
+          return 1;
+        }
+      }
+
+      if (msg.has_output_data()) {
+        if (fwrite(msg.output_data().data(), msg.output_data().size(), 1,
+                   output_file) != 1) {
+          printf("Error when writing to %s\n", FLAGS_output_file.c_str());
+          return 1;
+        }
+      }
+
+      if (FLAGS_full) {
+        if (msg.has_delay()) {
+          int32_t delay = msg.delay();
+          if (fwrite(&delay, sizeof(int32_t), 1, delay_file) != 1) {
+            printf("Error when writing to %s\n", FLAGS_delay_file.c_str());
+            return 1;
+          }
+        }
+
+        if (msg.has_drift()) {
+          int32_t drift = msg.drift();
+          if (fwrite(&drift, sizeof(int32_t), 1, drift_file) != 1) {
+            printf("Error when writing to %s\n", FLAGS_drift_file.c_str());
+            return 1;
+          }
+        }
+
+        if (msg.has_level()) {
+          int32_t level = msg.level();
+          if (fwrite(&level, sizeof(int32_t), 1, level_file) != 1) {
+            printf("Error when writing to %s\n", FLAGS_level_file.c_str());
+            return 1;
+          }
+        }
+      }
+    } else if (event_msg.type() == Event::INIT) {
+      if (!event_msg.has_init()) {
+        printf("Corrupted input file: Init missing.\n");
+        return 1;
+      }
+
+      const Init msg = event_msg.init();
+      // These should print out zeros if they're missing.
+      fprintf(settings_file, "Init at frame: %d\n", frame_count);
+      fprintf(settings_file, "  Sample rate: %d\n", msg.sample_rate());
+      fprintf(settings_file, "  Device sample rate: %d\n",
+              msg.device_sample_rate());
+      fprintf(settings_file, "  Input channels: %d\n",
+              msg.num_input_channels());
+      fprintf(settings_file, "  Output channels: %d\n",
+              msg.num_output_channels());
+      fprintf(settings_file, "  Reverse channels: %d\n",
+              msg.num_reverse_channels());
+
+      fprintf(settings_file, "\n");
+    }
+  }
+
+  return 0;
+}
diff --git a/trunk/src/modules/audio_processing/utility/Android.mk b/trunk/src/modules/audio_processing/utility/Android.mk
new file mode 100644
index 0000000..bd3d039
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/Android.mk
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_apm_utility
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+    fft4g.c \
+    ring_buffer.c \
+    delay_estimator.c \
+    delay_estimator_wrapper.c
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH) \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/audio_processing/utility/delay_estimator.c b/trunk/src/modules/audio_processing/utility/delay_estimator.c
new file mode 100644
index 0000000..24ee74d
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/delay_estimator.c
@@ -0,0 +1,319 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "delay_estimator.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+
+// Number of right shifts for scaling is linearly depending on number of bits in
+// the far-end binary spectrum.
+static const int kShiftsAtZero = 13;  // Right shifts at zero binary spectrum.
+static const int kShiftsLinearSlope = 3;
+
+static const int32_t kProbabilityOffset = 1024;  // 2 in Q9.
+static const int32_t kProbabilityLowerLimit = 8704;  // 17 in Q9.
+static const int32_t kProbabilityMinSpread = 2816;  // 5.5 in Q9.
+
+// Counts and returns number of bits of a 32-bit word.
+static int BitCount(uint32_t u32) {
+  uint32_t tmp = u32 - ((u32 >> 1) & 033333333333) -
+      ((u32 >> 2) & 011111111111);
+  tmp = ((tmp + (tmp >> 3)) & 030707070707);
+  tmp = (tmp + (tmp >> 6));
+  tmp = (tmp + (tmp >> 12) + (tmp >> 24)) & 077;
+
+  return ((int) tmp);
+}
+
+// Compares the |binary_vector| with all rows of the |binary_matrix| and counts
+// per row the number of times they have the same value.
+//
+// Inputs:
+//      - binary_vector     : binary "vector" stored in a long
+//      - binary_matrix     : binary "matrix" stored as a vector of long
+//      - matrix_size       : size of binary "matrix"
+//
+// Output:
+//      - bit_counts        : "Vector" stored as a long, containing for each
+//                            row the number of times the matrix row and the
+//                            input vector have the same value
+//
+static void BitCountComparison(uint32_t binary_vector,
+                               const uint32_t* binary_matrix,
+                               int matrix_size,
+                               int32_t* bit_counts) {
+  int n = 0;
+
+  // Compare |binary_vector| with all rows of the |binary_matrix|
+  for (; n < matrix_size; n++) {
+    bit_counts[n] = (int32_t) BitCount(binary_vector ^ binary_matrix[n]);
+  }
+}
+
+int WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle) {
+  assert(handle != NULL);
+
+  if (handle->mean_bit_counts != NULL) {
+    free(handle->mean_bit_counts);
+    handle->mean_bit_counts = NULL;
+  }
+  if (handle->bit_counts != NULL) {
+    free(handle->bit_counts);
+    handle->bit_counts = NULL;
+  }
+  if (handle->binary_far_history != NULL) {
+    free(handle->binary_far_history);
+    handle->binary_far_history = NULL;
+  }
+  if (handle->binary_near_history != NULL) {
+    free(handle->binary_near_history);
+    handle->binary_near_history = NULL;
+  }
+  if (handle->far_bit_counts != NULL) {
+    free(handle->far_bit_counts);
+    handle->far_bit_counts = NULL;
+  }
+
+  free(handle);
+
+  return 0;
+}
+
+int WebRtc_CreateBinaryDelayEstimator(BinaryDelayEstimator** handle,
+                                      int max_delay,
+                                      int lookahead) {
+  BinaryDelayEstimator* self = NULL;
+  int history_size = max_delay + lookahead;
+
+  if (handle == NULL) {
+    return -1;
+  }
+  if (max_delay < 0) {
+    return -1;
+  }
+  if (lookahead < 0) {
+    return -1;
+  }
+  if (history_size < 2) {
+    // Must be this large for buffer shifting.
+    return -1;
+  }
+
+  self = malloc(sizeof(BinaryDelayEstimator));
+  *handle = self;
+  if (self == NULL) {
+    return -1;
+  }
+
+  self->mean_bit_counts = NULL;
+  self->bit_counts = NULL;
+  self->binary_far_history = NULL;
+  self->far_bit_counts = NULL;
+
+  self->history_size = history_size;
+  self->near_history_size = lookahead + 1;
+
+  // Allocate memory for spectrum buffers.
+  self->mean_bit_counts = malloc(history_size * sizeof(int32_t));
+  if (self->mean_bit_counts == NULL) {
+    WebRtc_FreeBinaryDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  self->bit_counts = malloc(history_size * sizeof(int32_t));
+  if (self->bit_counts == NULL) {
+    WebRtc_FreeBinaryDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  // Allocate memory for history buffers.
+  self->binary_far_history = malloc(history_size * sizeof(uint32_t));
+  if (self->binary_far_history == NULL) {
+    WebRtc_FreeBinaryDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  self->binary_near_history = malloc(self->near_history_size *
+      sizeof(uint32_t));
+  if (self->binary_near_history == NULL) {
+    WebRtc_FreeBinaryDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  self->far_bit_counts = malloc(history_size * sizeof(int));
+  if (self->far_bit_counts == NULL) {
+    WebRtc_FreeBinaryDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+
+  return 0;
+}
+
+int WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle) {
+  int i = 0;
+  assert(handle != NULL);
+
+  memset(handle->bit_counts, 0, sizeof(int32_t) * handle->history_size);
+  memset(handle->binary_far_history, 0,
+         sizeof(uint32_t) * handle->history_size);
+  memset(handle->binary_near_history, 0,
+         sizeof(uint32_t) * handle->near_history_size);
+  memset(handle->far_bit_counts, 0, sizeof(int) * handle->history_size);
+  for (i = 0; i < handle->history_size; ++i) {
+    handle->mean_bit_counts[i] = (20 << 9);  // 20 in Q9.
+  }
+  handle->minimum_probability = (32 << 9);  // 32 in Q9.
+  handle->last_delay_probability = (32 << 9);  // 32 in Q9.
+
+  // Default return value if we're unable to estimate. -1 is used for errors.
+  handle->last_delay = -2;
+
+  return 0;
+}
+
+int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* handle,
+                                 uint32_t binary_far_spectrum,
+                                 uint32_t binary_near_spectrum) {
+  int i = 0;
+  int candidate_delay = -1;
+
+  int32_t value_best_candidate = 16384;  // 32 in Q9, (max |mean_bit_counts|).
+  int32_t value_worst_candidate = 0;
+
+  assert(handle != NULL);
+  // Shift binary spectrum history and insert current |binary_far_spectrum|.
+  memmove(&(handle->binary_far_history[1]), &(handle->binary_far_history[0]),
+          (handle->history_size - 1) * sizeof(uint32_t));
+  handle->binary_far_history[0] = binary_far_spectrum;
+
+  // Shift history of far-end binary spectrum bit counts and insert bit count
+  // of current |binary_far_spectrum|.
+  memmove(&(handle->far_bit_counts[1]), &(handle->far_bit_counts[0]),
+          (handle->history_size - 1) * sizeof(int));
+  handle->far_bit_counts[0] = BitCount(binary_far_spectrum);
+
+  if (handle->near_history_size > 1) {
+    // If we apply lookahead, shift near-end binary spectrum history. Insert
+    // current |binary_near_spectrum| and pull out the delayed one.
+    memmove(&(handle->binary_near_history[1]),
+            &(handle->binary_near_history[0]),
+            (handle->near_history_size - 1) * sizeof(uint32_t));
+    handle->binary_near_history[0] = binary_near_spectrum;
+    binary_near_spectrum =
+        handle->binary_near_history[handle->near_history_size - 1];
+  }
+
+  // Compare with delayed spectra and store the |bit_counts| for each delay.
+  BitCountComparison(binary_near_spectrum,
+                     handle->binary_far_history,
+                     handle->history_size,
+                     handle->bit_counts);
+
+  // Update |mean_bit_counts|, which is the smoothed version of |bit_counts|.
+  for (i = 0; i < handle->history_size; i++) {
+    // |bit_counts| is constrained to [0, 32], meaning we can smooth with a
+    // factor up to 2^26. We use Q9.
+    int32_t bit_count = (handle->bit_counts[i] << 9);  // Q9.
+
+    // Update |mean_bit_counts| only when far-end signal has something to
+    // contribute. If |far_bit_counts| is zero the far-end signal is weak and
+    // we likely have a poor echo condition, hence don't update.
+    if (handle->far_bit_counts[i] > 0) {
+      // Make number of right shifts piecewise linear w.r.t. |far_bit_counts|.
+      int shifts = kShiftsAtZero;
+      shifts -= (kShiftsLinearSlope * handle->far_bit_counts[i]) >> 4;
+      WebRtc_MeanEstimatorFix(bit_count, shifts, &(handle->mean_bit_counts[i]));
+    }
+  }
+
+  // Find |candidate_delay|, |value_best_candidate| and |value_worst_candidate|
+  // of |mean_bit_counts|.
+  for (i = 0; i < handle->history_size; i++) {
+    if (handle->mean_bit_counts[i] < value_best_candidate) {
+      value_best_candidate = handle->mean_bit_counts[i];
+      candidate_delay = i;
+    }
+    if (handle->mean_bit_counts[i] > value_worst_candidate) {
+      value_worst_candidate = handle->mean_bit_counts[i];
+    }
+  }
+
+  // The |value_best_candidate| is a good indicator on the probability of
+  // |candidate_delay| being an accurate delay (a small |value_best_candidate|
+  // means a good binary match). In the following sections we make a decision
+  // whether to update |last_delay| or not.
+  // 1) If the difference bit counts between the best and the worst delay
+  //    candidates is too small we consider the situation to be unreliable and
+  //    don't update |last_delay|.
+  // 2) If the situation is reliable we update |last_delay| if the value of the
+  //    best candidate delay has a value less than
+  //     i) an adaptive threshold |minimum_probability|, or
+  //    ii) this corresponding value |last_delay_probability|, but updated at
+  //        this time instant.
+
+  // Update |minimum_probability|.
+  if ((handle->minimum_probability > kProbabilityLowerLimit) &&
+      (value_worst_candidate - value_best_candidate > kProbabilityMinSpread)) {
+    // The "hard" threshold can't be lower than 17 (in Q9).
+    // The valley in the curve also has to be distinct, i.e., the
+    // difference between |value_worst_candidate| and |value_best_candidate| has
+    // to be large enough.
+    int32_t threshold = value_best_candidate + kProbabilityOffset;
+    if (threshold < kProbabilityLowerLimit) {
+      threshold = kProbabilityLowerLimit;
+    }
+    if (handle->minimum_probability > threshold) {
+      handle->minimum_probability = threshold;
+    }
+  }
+  // Update |last_delay_probability|.
+  // We use a Markov type model, i.e., a slowly increasing level over time.
+  handle->last_delay_probability++;
+  if (value_worst_candidate > value_best_candidate + kProbabilityOffset) {
+    // Reliable delay value for usage.
+    if (value_best_candidate < handle->minimum_probability) {
+      handle->last_delay = candidate_delay;
+    }
+    if (value_best_candidate < handle->last_delay_probability) {
+      handle->last_delay = candidate_delay;
+      // Reset |last_delay_probability|.
+      handle->last_delay_probability = value_best_candidate;
+    }
+  }
+
+  return handle->last_delay;
+}
+
+int WebRtc_binary_last_delay(BinaryDelayEstimator* handle) {
+  assert(handle != NULL);
+  return handle->last_delay;
+}
+
+int WebRtc_history_size(BinaryDelayEstimator* handle) {
+  assert(handle != NULL);
+  return handle->history_size;
+}
+
+void WebRtc_MeanEstimatorFix(int32_t new_value,
+                             int factor,
+                             int32_t* mean_value) {
+  int32_t diff = new_value - *mean_value;
+
+  // mean_new = mean_value + ((new_value - mean_value) >> factor);
+  if (diff < 0) {
+    diff = -((-diff) >> factor);
+  } else {
+    diff = (diff >> factor);
+  }
+  *mean_value += diff;
+}
diff --git a/trunk/src/modules/audio_processing/utility/delay_estimator.h b/trunk/src/modules/audio_processing/utility/delay_estimator.h
new file mode 100644
index 0000000..a376dfe
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/delay_estimator.h
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Performs delay estimation on binary converted spectra.
+// The return value is  0 - OK and -1 - Error, unless otherwise stated.
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_
+
+#include "typedefs.h"
+
+typedef struct {
+  // Pointer to bit counts.
+  int32_t* mean_bit_counts;
+  int* far_bit_counts;
+
+  // Array only used locally in ProcessBinarySpectrum() but whose size is
+  // determined at run-time.
+  int32_t* bit_counts;
+
+  // Binary history variables.
+  uint32_t* binary_far_history;
+  uint32_t* binary_near_history;
+
+  // Delay estimation variables.
+  int32_t minimum_probability;
+  int last_delay_probability;
+
+  // Delay memory.
+  int last_delay;
+
+  // Buffer size.
+  int history_size;
+
+  // Near-end buffer size.
+  int near_history_size;
+} BinaryDelayEstimator;
+
+// Releases the memory allocated by WebRtc_CreateBinaryDelayEstimator(...).
+// Input:
+//    - handle            : Pointer to the delay estimation instance.
+//
+int WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle);
+
+// Refer to WebRtc_CreateDelayEstimator() in delay_estimator_wrapper.h.
+int WebRtc_CreateBinaryDelayEstimator(BinaryDelayEstimator** handle,
+                                      int max_delay,
+                                      int lookahead);
+
+// Initializes the delay estimation instance created with
+// WebRtc_CreateBinaryDelayEstimator(...).
+// Input:
+//    - handle            : Pointer to the delay estimation instance.
+//
+// Output:
+//    - handle            : Initialized instance.
+//
+int WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle);
+
+// Estimates and returns the delay between the binary far-end and binary near-
+// end spectra. The value will be offset by the lookahead (i.e. the lookahead
+// should be subtracted from the returned value).
+// Inputs:
+//    - handle                : Pointer to the delay estimation instance.
+//    - binary_far_spectrum   : Far-end binary spectrum.
+//    - binary_near_spectrum  : Near-end binary spectrum of the current block.
+//
+// Output:
+//    - handle                : Updated instance.
+//
+// Return value:
+//    - delay                 :  >= 0 - Calculated delay value.
+//                              -1    - Error.
+//                              -2    - Insufficient data for estimation.
+//
+int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* handle,
+                                 uint32_t binary_far_spectrum,
+                                 uint32_t binary_near_spectrum);
+
+// Returns the last calculated delay updated by the function
+// WebRtc_ProcessBinarySpectrum(...).
+//
+// Input:
+//    - handle                : Pointer to the delay estimation instance.
+//
+// Return value:
+//    - delay                 :  >= 0 - Last calculated delay value
+//                              -1    - Error
+//                              -2    - Insufficient data for estimation.
+//
+int WebRtc_binary_last_delay(BinaryDelayEstimator* handle);
+
+// Returns the history size used in the far-end buffers to calculate the delay
+// over.
+//
+// Input:
+//    - handle                : Pointer to the delay estimation instance.
+//
+// Return value:
+//    - history_size          :  > 0  - Far-end history size.
+//                              -1    - Error.
+//
+int WebRtc_history_size(BinaryDelayEstimator* handle);
+
+// Updates the |mean_value| recursively with a step size of 2^-|factor|. This
+// function is used internally in the Binary Delay Estimator as well as the
+// Fixed point wrapper.
+//
+// Inputs:
+//    - new_value             : The new value the mean should be updated with.
+//    - factor                : The step size, in number of right shifts.
+//
+// Input/Output:
+//    - mean_value            : Pointer to the mean value.
+//
+void WebRtc_MeanEstimatorFix(int32_t new_value,
+                             int factor,
+                             int32_t* mean_value);
+
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_
diff --git a/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.c b/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.c
new file mode 100644
index 0000000..438c95f
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.c
@@ -0,0 +1,336 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "delay_estimator_wrapper.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "delay_estimator.h"
+
+typedef union {
+  float float_;
+  int32_t int32_;
+} SpectrumType;
+
+typedef struct {
+  // Pointers to mean values of spectrum.
+  SpectrumType* mean_far_spectrum;
+  SpectrumType* mean_near_spectrum;
+  // |mean_*_spectrum| initialization indicator.
+  int far_spectrum_initialized;
+  int near_spectrum_initialized;
+
+  int spectrum_size;
+
+  // Binary spectrum based delay estimator
+  BinaryDelayEstimator* binary_handle;
+} DelayEstimator;
+
+// Only bit |kBandFirst| through bit |kBandLast| are processed and
+// |kBandFirst| - |kBandLast| must be < 32.
+static const int kBandFirst = 12;
+static const int kBandLast = 43;
+
+static __inline uint32_t SetBit(uint32_t in, int pos) {
+  uint32_t mask = (1 << pos);
+  uint32_t out = (in | mask);
+
+  return out;
+}
+
+// Calculates the mean recursively. Same version as WebRtc_MeanEstimatorFix(),
+// but for float.
+//
+// Inputs:
+//    - new_value             : New additional value.
+//    - scale                 : Scale for smoothing (should be less than 1.0).
+//
+// Input/Output:
+//    - mean_value            : Pointer to the mean value for updating.
+//
+static void MeanEstimatorFloat(float new_value,
+                               float scale,
+                               float* mean_value) {
+  assert(scale < 1.0f);
+  *mean_value += (new_value - *mean_value) * scale;
+}
+
+// Computes the binary spectrum by comparing the input |spectrum| with a
+// |threshold_spectrum|. Float and fixed point versions.
+//
+// Inputs:
+//      - spectrum            : Spectrum of which the binary spectrum should be
+//                              calculated.
+//      - threshold_spectrum  : Threshold spectrum with which the input
+//                              spectrum is compared.
+// Return:
+//      - out                 : Binary spectrum.
+//
+static uint32_t BinarySpectrumFix(uint16_t* spectrum,
+                                  SpectrumType* threshold_spectrum,
+                                  int q_domain,
+                                  int* threshold_initialized) {
+  int i = kBandFirst;
+  uint32_t out = 0;
+
+  assert(q_domain < 16);
+
+  if (!(*threshold_initialized)) {
+    // Set the |threshold_spectrum| to half the input |spectrum| as starting
+    // value. This speeds up the convergence.
+    for (i = kBandFirst; i <= kBandLast; i++) {
+      if (spectrum[i] > 0) {
+        // Convert input spectrum from Q(|q_domain|) to Q15.
+        int32_t spectrum_q15 = ((int32_t) spectrum[i]) << (15 - q_domain);
+        threshold_spectrum[i].int32_ = (spectrum_q15 >> 1);
+        *threshold_initialized = 1;
+      }
+    }
+  }
+  for (i = kBandFirst; i <= kBandLast; i++) {
+    // Convert input spectrum from Q(|q_domain|) to Q15.
+    int32_t spectrum_q15 = ((int32_t) spectrum[i]) << (15 - q_domain);
+    // Update the |threshold_spectrum|.
+    WebRtc_MeanEstimatorFix(spectrum_q15, 6, &(threshold_spectrum[i].int32_));
+    // Convert |spectrum| at current frequency bin to a binary value.
+    if (spectrum_q15 > threshold_spectrum[i].int32_) {
+      out = SetBit(out, i - kBandFirst);
+    }
+  }
+
+  return out;
+}
+
+static uint32_t BinarySpectrumFloat(float* spectrum,
+                                    SpectrumType* threshold_spectrum,
+                                    int* threshold_initialized) {
+  int i = kBandFirst;
+  uint32_t out = 0;
+  const float kScale = 1 / 64.0;
+
+  if (!(*threshold_initialized)) {
+    // Set the |threshold_spectrum| to half the input |spectrum| as starting
+    // value. This speeds up the convergence.
+    for (i = kBandFirst; i <= kBandLast; i++) {
+      if (spectrum[i] > 0.0f) {
+        threshold_spectrum[i].float_ = (spectrum[i] / 2);
+        *threshold_initialized = 1;
+      }
+    }
+  }
+
+  for (i = kBandFirst; i <= kBandLast; i++) {
+    // Update the |threshold_spectrum|.
+    MeanEstimatorFloat(spectrum[i], kScale, &(threshold_spectrum[i].float_));
+    // Convert |spectrum| at current frequency bin to a binary value.
+    if (spectrum[i] > threshold_spectrum[i].float_) {
+      out = SetBit(out, i - kBandFirst);
+    }
+  }
+
+  return out;
+}
+
+int WebRtc_FreeDelayEstimator(void* handle) {
+  DelayEstimator* self = (DelayEstimator*) handle;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  if (self->mean_far_spectrum != NULL) {
+    free(self->mean_far_spectrum);
+    self->mean_far_spectrum = NULL;
+  }
+  if (self->mean_near_spectrum != NULL) {
+    free(self->mean_near_spectrum);
+    self->mean_near_spectrum = NULL;
+  }
+
+  WebRtc_FreeBinaryDelayEstimator(self->binary_handle);
+
+  free(self);
+
+  return 0;
+}
+
+int WebRtc_CreateDelayEstimator(void** handle,
+                                int spectrum_size,
+                                int max_delay,
+                                int lookahead) {
+  DelayEstimator* self = NULL;
+
+  // Check if the sub band used in the delay estimation is small enough to fit
+  // the binary spectra in a uint32_t.
+  assert(kBandLast - kBandFirst < 32);
+
+  if (handle == NULL) {
+    return -1;
+  }
+  if (spectrum_size < kBandLast) {
+    return -1;
+  }
+
+  self = malloc(sizeof(DelayEstimator));
+  *handle = self;
+  if (self == NULL) {
+    return -1;
+  }
+
+  self->mean_far_spectrum = NULL;
+  self->mean_near_spectrum = NULL;
+
+  // Create binary delay estimator.
+  if (WebRtc_CreateBinaryDelayEstimator(&self->binary_handle,
+                                        max_delay,
+                                        lookahead) != 0) {
+    WebRtc_FreeDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  // Allocate memory for spectrum buffers.
+  self->mean_far_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+  if (self->mean_far_spectrum == NULL) {
+    WebRtc_FreeDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+  self->mean_near_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+  if (self->mean_near_spectrum == NULL) {
+    WebRtc_FreeDelayEstimator(self);
+    self = NULL;
+    return -1;
+  }
+
+  self->spectrum_size = spectrum_size;
+
+  return 0;
+}
+
+int WebRtc_InitDelayEstimator(void* handle) {
+  DelayEstimator* self = (DelayEstimator*) handle;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  // Initialize binary delay estimator.
+  if (WebRtc_InitBinaryDelayEstimator(self->binary_handle) != 0) {
+    return -1;
+  }
+  // Set averaged far and near end spectra to zero.
+  memset(self->mean_far_spectrum, 0,
+         sizeof(SpectrumType) * self->spectrum_size);
+  memset(self->mean_near_spectrum, 0,
+         sizeof(SpectrumType) * self->spectrum_size);
+  // Reset initialization indicators.
+  self->far_spectrum_initialized = 0;
+  self->near_spectrum_initialized = 0;
+
+  return 0;
+}
+
+int WebRtc_DelayEstimatorProcessFix(void* handle,
+                                    uint16_t* far_spectrum,
+                                    uint16_t* near_spectrum,
+                                    int spectrum_size,
+                                    int far_q,
+                                    int near_q) {
+  DelayEstimator* self = (DelayEstimator*) handle;
+  uint32_t binary_far_spectrum = 0;
+  uint32_t binary_near_spectrum = 0;
+
+  if (self == NULL) {
+    return -1;
+  }
+  if (far_spectrum == NULL) {
+    // Empty far end spectrum.
+    return -1;
+  }
+  if (near_spectrum == NULL) {
+    // Empty near end spectrum.
+    return -1;
+  }
+  if (spectrum_size != self->spectrum_size) {
+    // Data sizes don't match.
+    return -1;
+  }
+  if (far_q > 15) {
+    // If |far_q| is larger than 15 we cannot guarantee no wrap around.
+    return -1;
+  }
+  if (near_q > 15) {
+    // If |near_q| is larger than 15 we cannot guarantee no wrap around.
+    return -1;
+  }
+
+  // Get binary spectra.
+  binary_far_spectrum = BinarySpectrumFix(far_spectrum,
+                                          self->mean_far_spectrum,
+                                          far_q,
+                                          &(self->far_spectrum_initialized));
+  binary_near_spectrum = BinarySpectrumFix(near_spectrum,
+                                           self->mean_near_spectrum,
+                                           near_q,
+                                           &(self->near_spectrum_initialized));
+
+  return WebRtc_ProcessBinarySpectrum(self->binary_handle,
+                                      binary_far_spectrum,
+                                      binary_near_spectrum);
+}
+
+int WebRtc_DelayEstimatorProcessFloat(void* handle,
+                                      float* far_spectrum,
+                                      float* near_spectrum,
+                                      int spectrum_size) {
+  DelayEstimator* self = (DelayEstimator*) handle;
+  uint32_t binary_far_spectrum = 0;
+  uint32_t binary_near_spectrum = 0;
+
+  if (self == NULL) {
+    return -1;
+  }
+  if (far_spectrum == NULL) {
+    // Empty far end spectrum.
+    return -1;
+  }
+  if (near_spectrum == NULL) {
+    // Empty near end spectrum.
+    return -1;
+  }
+  if (spectrum_size != self->spectrum_size) {
+    // Data sizes don't match.
+    return -1;
+  }
+
+  // Get binary spectra.
+  binary_far_spectrum = BinarySpectrumFloat(far_spectrum,
+                                            self->mean_far_spectrum,
+                                            &(self->far_spectrum_initialized));
+  binary_near_spectrum = BinarySpectrumFloat(near_spectrum,
+                                             self->mean_near_spectrum,
+                                             &(self->near_spectrum_initialized));
+
+  return WebRtc_ProcessBinarySpectrum(self->binary_handle,
+                                      binary_far_spectrum,
+                                      binary_near_spectrum);
+}
+
+int WebRtc_last_delay(void* handle) {
+  DelayEstimator* self = (DelayEstimator*) handle;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  return WebRtc_binary_last_delay(self->binary_handle);
+}
diff --git a/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.h b/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.h
new file mode 100644
index 0000000..2a47b5d
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/delay_estimator_wrapper.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Performs delay estimation on block by block basis.
+// The return value is  0 - OK and -1 - Error, unless otherwise stated.
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_
+
+#include "typedefs.h"
+
+// Releases the memory allocated by WebRtc_CreateDelayEstimator(...)
+// Input:
+//      - handle        : Pointer to the delay estimation instance.
+//
+int WebRtc_FreeDelayEstimator(void* handle);
+
+// Allocates the memory needed by the delay estimation. The memory needs to be
+// initialized separately through WebRtc_InitDelayEstimator(...).
+//
+// Inputs:
+//      - handle        : Instance that should be created.
+//      - spectrum_size : Size of the spectrum used both in far-end and
+//                        near-end. Used to allocate memory for spectrum
+//                        specific buffers.
+//      - max_delay     : The maximum delay which can be estimated. Needed to
+//                        allocate memory for history buffers.
+//      - lookahead     : Amount of non-causal lookahead to use. This can
+//                        detect cases in which a near-end signal occurs before
+//                        the corresponding far-end signal. It will delay the
+//                        estimate for the current block by an equal amount,
+//                        and the returned values will be offset by it.
+//
+//                        A value of zero is the typical no-lookahead case.
+//                        This also represents the minimum delay which can be
+//                        estimated.
+//
+// Output:
+//      - handle        : Created instance.
+//
+int WebRtc_CreateDelayEstimator(void** handle,
+                                int spectrum_size,
+                                int max_delay,
+                                int lookahead);
+
+// Initializes the delay estimation instance created with
+// WebRtc_CreateDelayEstimator(...)
+// Input:
+//      - handle        : Pointer to the delay estimation instance.
+//
+// Output:
+//      - handle        : Initialized instance.
+//
+int WebRtc_InitDelayEstimator(void* handle);
+
+// Estimates and returns the delay between the far-end and near-end blocks. The
+// value will be offset by the lookahead (i.e. the lookahead should be
+// subtracted from the returned value).
+// Inputs:
+//      - handle        : Pointer to the delay estimation instance.
+//      - far_spectrum  : Pointer to the far-end spectrum data.
+//      - near_spectrum : Pointer to the near-end spectrum data of the current
+//                        block.
+//      - spectrum_size : The size of the data arrays (same for both far- and
+//                        near-end).
+//      - far_q         : The Q-domain of the far-end data.
+//      - near_q        : The Q-domain of the near-end data.
+//
+// Output:
+//      - handle        : Updated instance.
+//
+// Return value:
+//      - delay         :  >= 0 - Calculated delay value.
+//                        -1    - Error.
+//                        -2    - Insufficient data for estimation.
+//
+int WebRtc_DelayEstimatorProcessFix(void* handle,
+                                    uint16_t* far_spectrum,
+                                    uint16_t* near_spectrum,
+                                    int spectrum_size,
+                                    int far_q,
+                                    int near_q);
+
+// See WebRtc_DelayEstimatorProcessFix() for description.
+int WebRtc_DelayEstimatorProcessFloat(void* handle,
+                                      float* far_spectrum,
+                                      float* near_spectrum,
+                                      int spectrum_size);
+
+// Returns the last calculated delay updated by the function
+// WebRtc_DelayEstimatorProcess(...).
+//
+// Input:
+//      - handle        : Pointer to the delay estimation instance.
+//
+// Return value:
+//      - delay         :  >= 0 - Last calculated delay value.
+//                        -1    - Error.
+//                        -2    - Insufficient data for estimation.
+//
+int WebRtc_last_delay(void* handle);
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_
diff --git a/trunk/src/modules/audio_processing/utility/fft4g.c b/trunk/src/modules/audio_processing/utility/fft4g.c
new file mode 100644
index 0000000..cbc4dc3
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/fft4g.c
@@ -0,0 +1,1326 @@
+/*
+ * http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html
+ * Copyright Takuya OOURA, 1996-2001
+ *
+ * You may use, copy, modify and distribute this code for any purpose (include
+ * commercial use) and without fee. Please refer to this package when you modify
+ * this code.
+ *
+ * Changes:
+ * Trivial type modifications by the WebRTC authors.
+ */
+
+/*
+Fast Fourier/Cosine/Sine Transform
+    dimension   :one
+    data length :power of 2
+    decimation  :frequency
+    radix       :4, 2
+    data        :inplace
+    table       :use
+functions
+    cdft: Complex Discrete Fourier Transform
+    rdft: Real Discrete Fourier Transform
+    ddct: Discrete Cosine Transform
+    ddst: Discrete Sine Transform
+    dfct: Cosine Transform of RDFT (Real Symmetric DFT)
+    dfst: Sine Transform of RDFT (Real Anti-symmetric DFT)
+function prototypes
+    void cdft(int, int, float *, int *, float *);
+    void rdft(int, int, float *, int *, float *);
+    void ddct(int, int, float *, int *, float *);
+    void ddst(int, int, float *, int *, float *);
+    void dfct(int, float *, float *, int *, float *);
+    void dfst(int, float *, float *, int *, float *);
+
+
+-------- Complex DFT (Discrete Fourier Transform) --------
+    [definition]
+        <case1>
+            X[k] = sum_j=0^n-1 x[j]*exp(2*pi*i*j*k/n), 0<=k<n
+        <case2>
+            X[k] = sum_j=0^n-1 x[j]*exp(-2*pi*i*j*k/n), 0<=k<n
+        (notes: sum_j=0^n-1 is a summation from j=0 to n-1)
+    [usage]
+        <case1>
+            ip[0] = 0; // first time only
+            cdft(2*n, 1, a, ip, w);
+        <case2>
+            ip[0] = 0; // first time only
+            cdft(2*n, -1, a, ip, w);
+    [parameters]
+        2*n            :data length (int)
+                        n >= 1, n = power of 2
+        a[0...2*n-1]   :input/output data (float *)
+                        input data
+                            a[2*j] = Re(x[j]),
+                            a[2*j+1] = Im(x[j]), 0<=j<n
+                        output data
+                            a[2*k] = Re(X[k]),
+                            a[2*k+1] = Im(X[k]), 0<=k<n
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n/2-1]   :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            cdft(2*n, -1, a, ip, w);
+        is
+            cdft(2*n, 1, a, ip, w);
+            for (j = 0; j <= 2 * n - 1; j++) {
+                a[j] *= 1.0 / n;
+            }
+        .
+
+
+-------- Real DFT / Inverse of Real DFT --------
+    [definition]
+        <case1> RDFT
+            R[k] = sum_j=0^n-1 a[j]*cos(2*pi*j*k/n), 0<=k<=n/2
+            I[k] = sum_j=0^n-1 a[j]*sin(2*pi*j*k/n), 0<k<n/2
+        <case2> IRDFT (excluding scale)
+            a[k] = (R[0] + R[n/2]*cos(pi*k))/2 +
+                   sum_j=1^n/2-1 R[j]*cos(2*pi*j*k/n) +
+                   sum_j=1^n/2-1 I[j]*sin(2*pi*j*k/n), 0<=k<n
+    [usage]
+        <case1>
+            ip[0] = 0; // first time only
+            rdft(n, 1, a, ip, w);
+        <case2>
+            ip[0] = 0; // first time only
+            rdft(n, -1, a, ip, w);
+    [parameters]
+        n              :data length (int)
+                        n >= 2, n = power of 2
+        a[0...n-1]     :input/output data (float *)
+                        <case1>
+                            output data
+                                a[2*k] = R[k], 0<=k<n/2
+                                a[2*k+1] = I[k], 0<k<n/2
+                                a[1] = R[n/2]
+                        <case2>
+                            input data
+                                a[2*j] = R[j], 0<=j<n/2
+                                a[2*j+1] = I[j], 0<j<n/2
+                                a[1] = R[n/2]
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n/2)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n/2+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n/2-1]   :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            rdft(n, 1, a, ip, w);
+        is
+            rdft(n, -1, a, ip, w);
+            for (j = 0; j <= n - 1; j++) {
+                a[j] *= 2.0 / n;
+            }
+        .
+
+
+-------- DCT (Discrete Cosine Transform) / Inverse of DCT --------
+    [definition]
+        <case1> IDCT (excluding scale)
+            C[k] = sum_j=0^n-1 a[j]*cos(pi*j*(k+1/2)/n), 0<=k<n
+        <case2> DCT
+            C[k] = sum_j=0^n-1 a[j]*cos(pi*(j+1/2)*k/n), 0<=k<n
+    [usage]
+        <case1>
+            ip[0] = 0; // first time only
+            ddct(n, 1, a, ip, w);
+        <case2>
+            ip[0] = 0; // first time only
+            ddct(n, -1, a, ip, w);
+    [parameters]
+        n              :data length (int)
+                        n >= 2, n = power of 2
+        a[0...n-1]     :input/output data (float *)
+                        output data
+                            a[k] = C[k], 0<=k<n
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n/2)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n/2+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n*5/4-1] :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            ddct(n, -1, a, ip, w);
+        is
+            a[0] *= 0.5;
+            ddct(n, 1, a, ip, w);
+            for (j = 0; j <= n - 1; j++) {
+                a[j] *= 2.0 / n;
+            }
+        .
+
+
+-------- DST (Discrete Sine Transform) / Inverse of DST --------
+    [definition]
+        <case1> IDST (excluding scale)
+            S[k] = sum_j=1^n A[j]*sin(pi*j*(k+1/2)/n), 0<=k<n
+        <case2> DST
+            S[k] = sum_j=0^n-1 a[j]*sin(pi*(j+1/2)*k/n), 0<k<=n
+    [usage]
+        <case1>
+            ip[0] = 0; // first time only
+            ddst(n, 1, a, ip, w);
+        <case2>
+            ip[0] = 0; // first time only
+            ddst(n, -1, a, ip, w);
+    [parameters]
+        n              :data length (int)
+                        n >= 2, n = power of 2
+        a[0...n-1]     :input/output data (float *)
+                        <case1>
+                            input data
+                                a[j] = A[j], 0<j<n
+                                a[0] = A[n]
+                            output data
+                                a[k] = S[k], 0<=k<n
+                        <case2>
+                            output data
+                                a[k] = S[k], 0<k<n
+                                a[0] = S[n]
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n/2)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n/2+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n*5/4-1] :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            ddst(n, -1, a, ip, w);
+        is
+            a[0] *= 0.5;
+            ddst(n, 1, a, ip, w);
+            for (j = 0; j <= n - 1; j++) {
+                a[j] *= 2.0 / n;
+            }
+        .
+
+
+-------- Cosine Transform of RDFT (Real Symmetric DFT) --------
+    [definition]
+        C[k] = sum_j=0^n a[j]*cos(pi*j*k/n), 0<=k<=n
+    [usage]
+        ip[0] = 0; // first time only
+        dfct(n, a, t, ip, w);
+    [parameters]
+        n              :data length - 1 (int)
+                        n >= 2, n = power of 2
+        a[0...n]       :input/output data (float *)
+                        output data
+                            a[k] = C[k], 0<=k<=n
+        t[0...n/2]     :work area (float *)
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n/4)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n/4+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n*5/8-1] :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            a[0] *= 0.5;
+            a[n] *= 0.5;
+            dfct(n, a, t, ip, w);
+        is
+            a[0] *= 0.5;
+            a[n] *= 0.5;
+            dfct(n, a, t, ip, w);
+            for (j = 0; j <= n; j++) {
+                a[j] *= 2.0 / n;
+            }
+        .
+
+
+-------- Sine Transform of RDFT (Real Anti-symmetric DFT) --------
+    [definition]
+        S[k] = sum_j=1^n-1 a[j]*sin(pi*j*k/n), 0<k<n
+    [usage]
+        ip[0] = 0; // first time only
+        dfst(n, a, t, ip, w);
+    [parameters]
+        n              :data length + 1 (int)
+                        n >= 2, n = power of 2
+        a[0...n-1]     :input/output data (float *)
+                        output data
+                            a[k] = S[k], 0<k<n
+                        (a[0] is used for work area)
+        t[0...n/2-1]   :work area (float *)
+        ip[0...*]      :work area for bit reversal (int *)
+                        length of ip >= 2+sqrt(n/4)
+                        strictly,
+                        length of ip >=
+                            2+(1<<(int)(log(n/4+0.5)/log(2))/2).
+                        ip[0],ip[1] are pointers of the cos/sin table.
+        w[0...n*5/8-1] :cos/sin table (float *)
+                        w[],ip[] are initialized if ip[0] == 0.
+    [remark]
+        Inverse of
+            dfst(n, a, t, ip, w);
+        is
+            dfst(n, a, t, ip, w);
+            for (j = 1; j <= n - 1; j++) {
+                a[j] *= 2.0 / n;
+            }
+        .
+
+
+Appendix :
+    The cos/sin table is recalculated when the larger table required.
+    w[] and ip[] are compatible with all routines.
+*/
+
+static void makewt(int nw, int *ip, float *w);
+static void makect(int nc, int *ip, float *c);
+static void bitrv2(int n, int *ip, float *a);
+static void bitrv2conj(int n, int *ip, float *a);
+static void cftfsub(int n, float *a, float *w);
+static void cftbsub(int n, float *a, float *w);
+static void cft1st(int n, float *a, float *w);
+static void cftmdl(int n, int l, float *a, float *w);
+static void rftfsub(int n, float *a, int nc, float *c);
+static void rftbsub(int n, float *a, int nc, float *c);
+#if 0  // Not used.
+static void dctsub(int n, float *a, int nc, float *c)
+static void dstsub(int n, float *a, int nc, float *c)
+#endif
+
+
+void WebRtc_cdft(int n, int isgn, float *a, int *ip, float *w)
+{
+    if (n > (ip[0] << 2)) {
+        makewt(n >> 2, ip, w);
+    }
+    if (n > 4) {
+        if (isgn >= 0) {
+            bitrv2(n, ip + 2, a);
+            cftfsub(n, a, w);
+        } else {
+            bitrv2conj(n, ip + 2, a);
+            cftbsub(n, a, w);
+        }
+    } else if (n == 4) {
+        cftfsub(n, a, w);
+    }
+}
+
+
+void WebRtc_rdft(int n, int isgn, float *a, int *ip, float *w)
+{
+    int nw, nc;
+    float xi;
+
+    nw = ip[0];
+    if (n > (nw << 2)) {
+        nw = n >> 2;
+        makewt(nw, ip, w);
+    }
+    nc = ip[1];
+    if (n > (nc << 2)) {
+        nc = n >> 2;
+        makect(nc, ip, w + nw);
+    }
+    if (isgn >= 0) {
+        if (n > 4) {
+            bitrv2(n, ip + 2, a);
+            cftfsub(n, a, w);
+            rftfsub(n, a, nc, w + nw);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+        xi = a[0] - a[1];
+        a[0] += a[1];
+        a[1] = xi;
+    } else {
+        a[1] = 0.5f * (a[0] - a[1]);
+        a[0] -= a[1];
+        if (n > 4) {
+            rftbsub(n, a, nc, w + nw);
+            bitrv2(n, ip + 2, a);
+            cftbsub(n, a, w);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+    }
+}
+
+#if 0  // Not used.
+static void ddct(int n, int isgn, float *a, int *ip, float *w)
+{
+    int j, nw, nc;
+    float xr;
+
+    nw = ip[0];
+    if (n > (nw << 2)) {
+        nw = n >> 2;
+        makewt(nw, ip, w);
+    }
+    nc = ip[1];
+    if (n > nc) {
+        nc = n;
+        makect(nc, ip, w + nw);
+    }
+    if (isgn < 0) {
+        xr = a[n - 1];
+        for (j = n - 2; j >= 2; j -= 2) {
+            a[j + 1] = a[j] - a[j - 1];
+            a[j] += a[j - 1];
+        }
+        a[1] = a[0] - xr;
+        a[0] += xr;
+        if (n > 4) {
+            rftbsub(n, a, nc, w + nw);
+            bitrv2(n, ip + 2, a);
+            cftbsub(n, a, w);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+    }
+    dctsub(n, a, nc, w + nw);
+    if (isgn >= 0) {
+        if (n > 4) {
+            bitrv2(n, ip + 2, a);
+            cftfsub(n, a, w);
+            rftfsub(n, a, nc, w + nw);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+        xr = a[0] - a[1];
+        a[0] += a[1];
+        for (j = 2; j < n; j += 2) {
+            a[j - 1] = a[j] - a[j + 1];
+            a[j] += a[j + 1];
+        }
+        a[n - 1] = xr;
+    }
+}
+
+
+static void ddst(int n, int isgn, float *a, int *ip, float *w)
+{
+    int j, nw, nc;
+    float xr;
+
+    nw = ip[0];
+    if (n > (nw << 2)) {
+        nw = n >> 2;
+        makewt(nw, ip, w);
+    }
+    nc = ip[1];
+    if (n > nc) {
+        nc = n;
+        makect(nc, ip, w + nw);
+    }
+    if (isgn < 0) {
+        xr = a[n - 1];
+        for (j = n - 2; j >= 2; j -= 2) {
+            a[j + 1] = -a[j] - a[j - 1];
+            a[j] -= a[j - 1];
+        }
+        a[1] = a[0] + xr;
+        a[0] -= xr;
+        if (n > 4) {
+            rftbsub(n, a, nc, w + nw);
+            bitrv2(n, ip + 2, a);
+            cftbsub(n, a, w);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+    }
+    dstsub(n, a, nc, w + nw);
+    if (isgn >= 0) {
+        if (n > 4) {
+            bitrv2(n, ip + 2, a);
+            cftfsub(n, a, w);
+            rftfsub(n, a, nc, w + nw);
+        } else if (n == 4) {
+            cftfsub(n, a, w);
+        }
+        xr = a[0] - a[1];
+        a[0] += a[1];
+        for (j = 2; j < n; j += 2) {
+            a[j - 1] = -a[j] - a[j + 1];
+            a[j] -= a[j + 1];
+        }
+        a[n - 1] = -xr;
+    }
+}
+
+
+static void dfct(int n, float *a, float *t, int *ip, float *w)
+{
+    int j, k, l, m, mh, nw, nc;
+    float xr, xi, yr, yi;
+
+    nw = ip[0];
+    if (n > (nw << 3)) {
+        nw = n >> 3;
+        makewt(nw, ip, w);
+    }
+    nc = ip[1];
+    if (n > (nc << 1)) {
+        nc = n >> 1;
+        makect(nc, ip, w + nw);
+    }
+    m = n >> 1;
+    yi = a[m];
+    xi = a[0] + a[n];
+    a[0] -= a[n];
+    t[0] = xi - yi;
+    t[m] = xi + yi;
+    if (n > 2) {
+        mh = m >> 1;
+        for (j = 1; j < mh; j++) {
+            k = m - j;
+            xr = a[j] - a[n - j];
+            xi = a[j] + a[n - j];
+            yr = a[k] - a[n - k];
+            yi = a[k] + a[n - k];
+            a[j] = xr;
+            a[k] = yr;
+            t[j] = xi - yi;
+            t[k] = xi + yi;
+        }
+        t[mh] = a[mh] + a[n - mh];
+        a[mh] -= a[n - mh];
+        dctsub(m, a, nc, w + nw);
+        if (m > 4) {
+            bitrv2(m, ip + 2, a);
+            cftfsub(m, a, w);
+            rftfsub(m, a, nc, w + nw);
+        } else if (m == 4) {
+            cftfsub(m, a, w);
+        }
+        a[n - 1] = a[0] - a[1];
+        a[1] = a[0] + a[1];
+        for (j = m - 2; j >= 2; j -= 2) {
+            a[2 * j + 1] = a[j] + a[j + 1];
+            a[2 * j - 1] = a[j] - a[j + 1];
+        }
+        l = 2;
+        m = mh;
+        while (m >= 2) {
+            dctsub(m, t, nc, w + nw);
+            if (m > 4) {
+                bitrv2(m, ip + 2, t);
+                cftfsub(m, t, w);
+                rftfsub(m, t, nc, w + nw);
+            } else if (m == 4) {
+                cftfsub(m, t, w);
+            }
+            a[n - l] = t[0] - t[1];
+            a[l] = t[0] + t[1];
+            k = 0;
+            for (j = 2; j < m; j += 2) {
+                k += l << 2;
+                a[k - l] = t[j] - t[j + 1];
+                a[k + l] = t[j] + t[j + 1];
+            }
+            l <<= 1;
+            mh = m >> 1;
+            for (j = 0; j < mh; j++) {
+                k = m - j;
+                t[j] = t[m + k] - t[m + j];
+                t[k] = t[m + k] + t[m + j];
+            }
+            t[mh] = t[m + mh];
+            m = mh;
+        }
+        a[l] = t[0];
+        a[n] = t[2] - t[1];
+        a[0] = t[2] + t[1];
+    } else {
+        a[1] = a[0];
+        a[2] = t[0];
+        a[0] = t[1];
+    }
+}
+
+static void dfst(int n, float *a, float *t, int *ip, float *w)
+{
+    int j, k, l, m, mh, nw, nc;
+    float xr, xi, yr, yi;
+
+    nw = ip[0];
+    if (n > (nw << 3)) {
+        nw = n >> 3;
+        makewt(nw, ip, w);
+    }
+    nc = ip[1];
+    if (n > (nc << 1)) {
+        nc = n >> 1;
+        makect(nc, ip, w + nw);
+    }
+    if (n > 2) {
+        m = n >> 1;
+        mh = m >> 1;
+        for (j = 1; j < mh; j++) {
+            k = m - j;
+            xr = a[j] + a[n - j];
+            xi = a[j] - a[n - j];
+            yr = a[k] + a[n - k];
+            yi = a[k] - a[n - k];
+            a[j] = xr;
+            a[k] = yr;
+            t[j] = xi + yi;
+            t[k] = xi - yi;
+        }
+        t[0] = a[mh] - a[n - mh];
+        a[mh] += a[n - mh];
+        a[0] = a[m];
+        dstsub(m, a, nc, w + nw);
+        if (m > 4) {
+            bitrv2(m, ip + 2, a);
+            cftfsub(m, a, w);
+            rftfsub(m, a, nc, w + nw);
+        } else if (m == 4) {
+            cftfsub(m, a, w);
+        }
+        a[n - 1] = a[1] - a[0];
+        a[1] = a[0] + a[1];
+        for (j = m - 2; j >= 2; j -= 2) {
+            a[2 * j + 1] = a[j] - a[j + 1];
+            a[2 * j - 1] = -a[j] - a[j + 1];
+        }
+        l = 2;
+        m = mh;
+        while (m >= 2) {
+            dstsub(m, t, nc, w + nw);
+            if (m > 4) {
+                bitrv2(m, ip + 2, t);
+                cftfsub(m, t, w);
+                rftfsub(m, t, nc, w + nw);
+            } else if (m == 4) {
+                cftfsub(m, t, w);
+            }
+            a[n - l] = t[1] - t[0];
+            a[l] = t[0] + t[1];
+            k = 0;
+            for (j = 2; j < m; j += 2) {
+                k += l << 2;
+                a[k - l] = -t[j] - t[j + 1];
+                a[k + l] = t[j] - t[j + 1];
+            }
+            l <<= 1;
+            mh = m >> 1;
+            for (j = 1; j < mh; j++) {
+                k = m - j;
+                t[j] = t[m + k] + t[m + j];
+                t[k] = t[m + k] - t[m + j];
+            }
+            t[0] = t[m + mh];
+            m = mh;
+        }
+        a[l] = t[0];
+    }
+    a[0] = 0;
+}
+#endif  // Not used.
+
+
+/* -------- initializing routines -------- */
+
+
+#include <math.h>
+
+static void makewt(int nw, int *ip, float *w)
+{
+    int j, nwh;
+    float delta, x, y;
+
+    ip[0] = nw;
+    ip[1] = 1;
+    if (nw > 2) {
+        nwh = nw >> 1;
+        delta = (float)atan(1.0f) / nwh;
+        w[0] = 1;
+        w[1] = 0;
+        w[nwh] = (float)cos(delta * nwh);
+        w[nwh + 1] = w[nwh];
+        if (nwh > 2) {
+            for (j = 2; j < nwh; j += 2) {
+                x = (float)cos(delta * j);
+                y = (float)sin(delta * j);
+                w[j] = x;
+                w[j + 1] = y;
+                w[nw - j] = y;
+                w[nw - j + 1] = x;
+            }
+            bitrv2(nw, ip + 2, w);
+        }
+    }
+}
+
+
+static void makect(int nc, int *ip, float *c)
+{
+    int j, nch;
+    float delta;
+
+    ip[1] = nc;
+    if (nc > 1) {
+        nch = nc >> 1;
+        delta = (float)atan(1.0f) / nch;
+        c[0] = (float)cos(delta * nch);
+        c[nch] = 0.5f * c[0];
+        for (j = 1; j < nch; j++) {
+            c[j] = 0.5f * (float)cos(delta * j);
+            c[nc - j] = 0.5f * (float)sin(delta * j);
+        }
+    }
+}
+
+
+/* -------- child routines -------- */
+
+
+static void bitrv2(int n, int *ip, float *a)
+{
+    int j, j1, k, k1, l, m, m2;
+    float xr, xi, yr, yi;
+
+    ip[0] = 0;
+    l = n;
+    m = 1;
+    while ((m << 3) < l) {
+        l >>= 1;
+        for (j = 0; j < m; j++) {
+            ip[m + j] = ip[j] + l;
+        }
+        m <<= 1;
+    }
+    m2 = 2 * m;
+    if ((m << 3) == l) {
+        for (k = 0; k < m; k++) {
+            for (j = 0; j < k; j++) {
+                j1 = 2 * j + ip[k];
+                k1 = 2 * k + ip[j];
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += 2 * m2;
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 -= m2;
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += 2 * m2;
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+            }
+            j1 = 2 * k + m2 + ip[k];
+            k1 = j1 + m2;
+            xr = a[j1];
+            xi = a[j1 + 1];
+            yr = a[k1];
+            yi = a[k1 + 1];
+            a[j1] = yr;
+            a[j1 + 1] = yi;
+            a[k1] = xr;
+            a[k1 + 1] = xi;
+        }
+    } else {
+        for (k = 1; k < m; k++) {
+            for (j = 0; j < k; j++) {
+                j1 = 2 * j + ip[k];
+                k1 = 2 * k + ip[j];
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += m2;
+                xr = a[j1];
+                xi = a[j1 + 1];
+                yr = a[k1];
+                yi = a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+            }
+        }
+    }
+}
+
+
+static void bitrv2conj(int n, int *ip, float *a)
+{
+    int j, j1, k, k1, l, m, m2;
+    float xr, xi, yr, yi;
+
+    ip[0] = 0;
+    l = n;
+    m = 1;
+    while ((m << 3) < l) {
+        l >>= 1;
+        for (j = 0; j < m; j++) {
+            ip[m + j] = ip[j] + l;
+        }
+        m <<= 1;
+    }
+    m2 = 2 * m;
+    if ((m << 3) == l) {
+        for (k = 0; k < m; k++) {
+            for (j = 0; j < k; j++) {
+                j1 = 2 * j + ip[k];
+                k1 = 2 * k + ip[j];
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += 2 * m2;
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 -= m2;
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += 2 * m2;
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+            }
+            k1 = 2 * k + ip[k];
+            a[k1 + 1] = -a[k1 + 1];
+            j1 = k1 + m2;
+            k1 = j1 + m2;
+            xr = a[j1];
+            xi = -a[j1 + 1];
+            yr = a[k1];
+            yi = -a[k1 + 1];
+            a[j1] = yr;
+            a[j1 + 1] = yi;
+            a[k1] = xr;
+            a[k1 + 1] = xi;
+            k1 += m2;
+            a[k1 + 1] = -a[k1 + 1];
+        }
+    } else {
+        a[1] = -a[1];
+        a[m2 + 1] = -a[m2 + 1];
+        for (k = 1; k < m; k++) {
+            for (j = 0; j < k; j++) {
+                j1 = 2 * j + ip[k];
+                k1 = 2 * k + ip[j];
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+                j1 += m2;
+                k1 += m2;
+                xr = a[j1];
+                xi = -a[j1 + 1];
+                yr = a[k1];
+                yi = -a[k1 + 1];
+                a[j1] = yr;
+                a[j1 + 1] = yi;
+                a[k1] = xr;
+                a[k1 + 1] = xi;
+            }
+            k1 = 2 * k + ip[k];
+            a[k1 + 1] = -a[k1 + 1];
+            a[k1 + m2 + 1] = -a[k1 + m2 + 1];
+        }
+    }
+}
+
+
+static void cftfsub(int n, float *a, float *w)
+{
+    int j, j1, j2, j3, l;
+    float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+    l = 2;
+    if (n > 8) {
+        cft1st(n, a, w);
+        l = 8;
+        while ((l << 2) < n) {
+            cftmdl(n, l, a, w);
+            l <<= 2;
+        }
+    }
+    if ((l << 2) == n) {
+        for (j = 0; j < l; j += 2) {
+            j1 = j + l;
+            j2 = j1 + l;
+            j3 = j2 + l;
+            x0r = a[j] + a[j1];
+            x0i = a[j + 1] + a[j1 + 1];
+            x1r = a[j] - a[j1];
+            x1i = a[j + 1] - a[j1 + 1];
+            x2r = a[j2] + a[j3];
+            x2i = a[j2 + 1] + a[j3 + 1];
+            x3r = a[j2] - a[j3];
+            x3i = a[j2 + 1] - a[j3 + 1];
+            a[j] = x0r + x2r;
+            a[j + 1] = x0i + x2i;
+            a[j2] = x0r - x2r;
+            a[j2 + 1] = x0i - x2i;
+            a[j1] = x1r - x3i;
+            a[j1 + 1] = x1i + x3r;
+            a[j3] = x1r + x3i;
+            a[j3 + 1] = x1i - x3r;
+        }
+    } else {
+        for (j = 0; j < l; j += 2) {
+            j1 = j + l;
+            x0r = a[j] - a[j1];
+            x0i = a[j + 1] - a[j1 + 1];
+            a[j] += a[j1];
+            a[j + 1] += a[j1 + 1];
+            a[j1] = x0r;
+            a[j1 + 1] = x0i;
+        }
+    }
+}
+
+
+static void cftbsub(int n, float *a, float *w)
+{
+    int j, j1, j2, j3, l;
+    float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+    l = 2;
+    if (n > 8) {
+        cft1st(n, a, w);
+        l = 8;
+        while ((l << 2) < n) {
+            cftmdl(n, l, a, w);
+            l <<= 2;
+        }
+    }
+    if ((l << 2) == n) {
+        for (j = 0; j < l; j += 2) {
+            j1 = j + l;
+            j2 = j1 + l;
+            j3 = j2 + l;
+            x0r = a[j] + a[j1];
+            x0i = -a[j + 1] - a[j1 + 1];
+            x1r = a[j] - a[j1];
+            x1i = -a[j + 1] + a[j1 + 1];
+            x2r = a[j2] + a[j3];
+            x2i = a[j2 + 1] + a[j3 + 1];
+            x3r = a[j2] - a[j3];
+            x3i = a[j2 + 1] - a[j3 + 1];
+            a[j] = x0r + x2r;
+            a[j + 1] = x0i - x2i;
+            a[j2] = x0r - x2r;
+            a[j2 + 1] = x0i + x2i;
+            a[j1] = x1r - x3i;
+            a[j1 + 1] = x1i - x3r;
+            a[j3] = x1r + x3i;
+            a[j3 + 1] = x1i + x3r;
+        }
+    } else {
+        for (j = 0; j < l; j += 2) {
+            j1 = j + l;
+            x0r = a[j] - a[j1];
+            x0i = -a[j + 1] + a[j1 + 1];
+            a[j] += a[j1];
+            a[j + 1] = -a[j + 1] - a[j1 + 1];
+            a[j1] = x0r;
+            a[j1 + 1] = x0i;
+        }
+    }
+}
+
+
+static void cft1st(int n, float *a, float *w)
+{
+    int j, k1, k2;
+    float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+    float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+    x0r = a[0] + a[2];
+    x0i = a[1] + a[3];
+    x1r = a[0] - a[2];
+    x1i = a[1] - a[3];
+    x2r = a[4] + a[6];
+    x2i = a[5] + a[7];
+    x3r = a[4] - a[6];
+    x3i = a[5] - a[7];
+    a[0] = x0r + x2r;
+    a[1] = x0i + x2i;
+    a[4] = x0r - x2r;
+    a[5] = x0i - x2i;
+    a[2] = x1r - x3i;
+    a[3] = x1i + x3r;
+    a[6] = x1r + x3i;
+    a[7] = x1i - x3r;
+    wk1r = w[2];
+    x0r = a[8] + a[10];
+    x0i = a[9] + a[11];
+    x1r = a[8] - a[10];
+    x1i = a[9] - a[11];
+    x2r = a[12] + a[14];
+    x2i = a[13] + a[15];
+    x3r = a[12] - a[14];
+    x3i = a[13] - a[15];
+    a[8] = x0r + x2r;
+    a[9] = x0i + x2i;
+    a[12] = x2i - x0i;
+    a[13] = x0r - x2r;
+    x0r = x1r - x3i;
+    x0i = x1i + x3r;
+    a[10] = wk1r * (x0r - x0i);
+    a[11] = wk1r * (x0r + x0i);
+    x0r = x3i + x1r;
+    x0i = x3r - x1i;
+    a[14] = wk1r * (x0i - x0r);
+    a[15] = wk1r * (x0i + x0r);
+    k1 = 0;
+    for (j = 16; j < n; j += 16) {
+        k1 += 2;
+        k2 = 2 * k1;
+        wk2r = w[k1];
+        wk2i = w[k1 + 1];
+        wk1r = w[k2];
+        wk1i = w[k2 + 1];
+        wk3r = wk1r - 2 * wk2i * wk1i;
+        wk3i = 2 * wk2i * wk1r - wk1i;
+        x0r = a[j] + a[j + 2];
+        x0i = a[j + 1] + a[j + 3];
+        x1r = a[j] - a[j + 2];
+        x1i = a[j + 1] - a[j + 3];
+        x2r = a[j + 4] + a[j + 6];
+        x2i = a[j + 5] + a[j + 7];
+        x3r = a[j + 4] - a[j + 6];
+        x3i = a[j + 5] - a[j + 7];
+        a[j] = x0r + x2r;
+        a[j + 1] = x0i + x2i;
+        x0r -= x2r;
+        x0i -= x2i;
+        a[j + 4] = wk2r * x0r - wk2i * x0i;
+        a[j + 5] = wk2r * x0i + wk2i * x0r;
+        x0r = x1r - x3i;
+        x0i = x1i + x3r;
+        a[j + 2] = wk1r * x0r - wk1i * x0i;
+        a[j + 3] = wk1r * x0i + wk1i * x0r;
+        x0r = x1r + x3i;
+        x0i = x1i - x3r;
+        a[j + 6] = wk3r * x0r - wk3i * x0i;
+        a[j + 7] = wk3r * x0i + wk3i * x0r;
+        wk1r = w[k2 + 2];
+        wk1i = w[k2 + 3];
+        wk3r = wk1r - 2 * wk2r * wk1i;
+        wk3i = 2 * wk2r * wk1r - wk1i;
+        x0r = a[j + 8] + a[j + 10];
+        x0i = a[j + 9] + a[j + 11];
+        x1r = a[j + 8] - a[j + 10];
+        x1i = a[j + 9] - a[j + 11];
+        x2r = a[j + 12] + a[j + 14];
+        x2i = a[j + 13] + a[j + 15];
+        x3r = a[j + 12] - a[j + 14];
+        x3i = a[j + 13] - a[j + 15];
+        a[j + 8] = x0r + x2r;
+        a[j + 9] = x0i + x2i;
+        x0r -= x2r;
+        x0i -= x2i;
+        a[j + 12] = -wk2i * x0r - wk2r * x0i;
+        a[j + 13] = -wk2i * x0i + wk2r * x0r;
+        x0r = x1r - x3i;
+        x0i = x1i + x3r;
+        a[j + 10] = wk1r * x0r - wk1i * x0i;
+        a[j + 11] = wk1r * x0i + wk1i * x0r;
+        x0r = x1r + x3i;
+        x0i = x1i - x3r;
+        a[j + 14] = wk3r * x0r - wk3i * x0i;
+        a[j + 15] = wk3r * x0i + wk3i * x0r;
+    }
+}
+
+
+static void cftmdl(int n, int l, float *a, float *w)
+{
+    int j, j1, j2, j3, k, k1, k2, m, m2;
+    float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+    float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+    m = l << 2;
+    for (j = 0; j < l; j += 2) {
+        j1 = j + l;
+        j2 = j1 + l;
+        j3 = j2 + l;
+        x0r = a[j] + a[j1];
+        x0i = a[j + 1] + a[j1 + 1];
+        x1r = a[j] - a[j1];
+        x1i = a[j + 1] - a[j1 + 1];
+        x2r = a[j2] + a[j3];
+        x2i = a[j2 + 1] + a[j3 + 1];
+        x3r = a[j2] - a[j3];
+        x3i = a[j2 + 1] - a[j3 + 1];
+        a[j] = x0r + x2r;
+        a[j + 1] = x0i + x2i;
+        a[j2] = x0r - x2r;
+        a[j2 + 1] = x0i - x2i;
+        a[j1] = x1r - x3i;
+        a[j1 + 1] = x1i + x3r;
+        a[j3] = x1r + x3i;
+        a[j3 + 1] = x1i - x3r;
+    }
+    wk1r = w[2];
+    for (j = m; j < l + m; j += 2) {
+        j1 = j + l;
+        j2 = j1 + l;
+        j3 = j2 + l;
+        x0r = a[j] + a[j1];
+        x0i = a[j + 1] + a[j1 + 1];
+        x1r = a[j] - a[j1];
+        x1i = a[j + 1] - a[j1 + 1];
+        x2r = a[j2] + a[j3];
+        x2i = a[j2 + 1] + a[j3 + 1];
+        x3r = a[j2] - a[j3];
+        x3i = a[j2 + 1] - a[j3 + 1];
+        a[j] = x0r + x2r;
+        a[j + 1] = x0i + x2i;
+        a[j2] = x2i - x0i;
+        a[j2 + 1] = x0r - x2r;
+        x0r = x1r - x3i;
+        x0i = x1i + x3r;
+        a[j1] = wk1r * (x0r - x0i);
+        a[j1 + 1] = wk1r * (x0r + x0i);
+        x0r = x3i + x1r;
+        x0i = x3r - x1i;
+        a[j3] = wk1r * (x0i - x0r);
+        a[j3 + 1] = wk1r * (x0i + x0r);
+    }
+    k1 = 0;
+    m2 = 2 * m;
+    for (k = m2; k < n; k += m2) {
+        k1 += 2;
+        k2 = 2 * k1;
+        wk2r = w[k1];
+        wk2i = w[k1 + 1];
+        wk1r = w[k2];
+        wk1i = w[k2 + 1];
+        wk3r = wk1r - 2 * wk2i * wk1i;
+        wk3i = 2 * wk2i * wk1r - wk1i;
+        for (j = k; j < l + k; j += 2) {
+            j1 = j + l;
+            j2 = j1 + l;
+            j3 = j2 + l;
+            x0r = a[j] + a[j1];
+            x0i = a[j + 1] + a[j1 + 1];
+            x1r = a[j] - a[j1];
+            x1i = a[j + 1] - a[j1 + 1];
+            x2r = a[j2] + a[j3];
+            x2i = a[j2 + 1] + a[j3 + 1];
+            x3r = a[j2] - a[j3];
+            x3i = a[j2 + 1] - a[j3 + 1];
+            a[j] = x0r + x2r;
+            a[j + 1] = x0i + x2i;
+            x0r -= x2r;
+            x0i -= x2i;
+            a[j2] = wk2r * x0r - wk2i * x0i;
+            a[j2 + 1] = wk2r * x0i + wk2i * x0r;
+            x0r = x1r - x3i;
+            x0i = x1i + x3r;
+            a[j1] = wk1r * x0r - wk1i * x0i;
+            a[j1 + 1] = wk1r * x0i + wk1i * x0r;
+            x0r = x1r + x3i;
+            x0i = x1i - x3r;
+            a[j3] = wk3r * x0r - wk3i * x0i;
+            a[j3 + 1] = wk3r * x0i + wk3i * x0r;
+        }
+        wk1r = w[k2 + 2];
+        wk1i = w[k2 + 3];
+        wk3r = wk1r - 2 * wk2r * wk1i;
+        wk3i = 2 * wk2r * wk1r - wk1i;
+        for (j = k + m; j < l + (k + m); j += 2) {
+            j1 = j + l;
+            j2 = j1 + l;
+            j3 = j2 + l;
+            x0r = a[j] + a[j1];
+            x0i = a[j + 1] + a[j1 + 1];
+            x1r = a[j] - a[j1];
+            x1i = a[j + 1] - a[j1 + 1];
+            x2r = a[j2] + a[j3];
+            x2i = a[j2 + 1] + a[j3 + 1];
+            x3r = a[j2] - a[j3];
+            x3i = a[j2 + 1] - a[j3 + 1];
+            a[j] = x0r + x2r;
+            a[j + 1] = x0i + x2i;
+            x0r -= x2r;
+            x0i -= x2i;
+            a[j2] = -wk2i * x0r - wk2r * x0i;
+            a[j2 + 1] = -wk2i * x0i + wk2r * x0r;
+            x0r = x1r - x3i;
+            x0i = x1i + x3r;
+            a[j1] = wk1r * x0r - wk1i * x0i;
+            a[j1 + 1] = wk1r * x0i + wk1i * x0r;
+            x0r = x1r + x3i;
+            x0i = x1i - x3r;
+            a[j3] = wk3r * x0r - wk3i * x0i;
+            a[j3 + 1] = wk3r * x0i + wk3i * x0r;
+        }
+    }
+}
+
+
+static void rftfsub(int n, float *a, int nc, float *c)
+{
+    int j, k, kk, ks, m;
+    float wkr, wki, xr, xi, yr, yi;
+
+    m = n >> 1;
+    ks = 2 * nc / m;
+    kk = 0;
+    for (j = 2; j < m; j += 2) {
+        k = n - j;
+        kk += ks;
+        wkr = 0.5f - c[nc - kk];
+        wki = c[kk];
+        xr = a[j] - a[k];
+        xi = a[j + 1] + a[k + 1];
+        yr = wkr * xr - wki * xi;
+        yi = wkr * xi + wki * xr;
+        a[j] -= yr;
+        a[j + 1] -= yi;
+        a[k] += yr;
+        a[k + 1] -= yi;
+    }
+}
+
+
+static void rftbsub(int n, float *a, int nc, float *c)
+{
+    int j, k, kk, ks, m;
+    float wkr, wki, xr, xi, yr, yi;
+
+    a[1] = -a[1];
+    m = n >> 1;
+    ks = 2 * nc / m;
+    kk = 0;
+    for (j = 2; j < m; j += 2) {
+        k = n - j;
+        kk += ks;
+        wkr = 0.5f - c[nc - kk];
+        wki = c[kk];
+        xr = a[j] - a[k];
+        xi = a[j + 1] + a[k + 1];
+        yr = wkr * xr + wki * xi;
+        yi = wkr * xi - wki * xr;
+        a[j] -= yr;
+        a[j + 1] = yi - a[j + 1];
+        a[k] += yr;
+        a[k + 1] = yi - a[k + 1];
+    }
+    a[m + 1] = -a[m + 1];
+}
+
+#if 0  // Not used.
+static void dctsub(int n, float *a, int nc, float *c)
+{
+    int j, k, kk, ks, m;
+    float wkr, wki, xr;
+
+    m = n >> 1;
+    ks = nc / n;
+    kk = 0;
+    for (j = 1; j < m; j++) {
+        k = n - j;
+        kk += ks;
+        wkr = c[kk] - c[nc - kk];
+        wki = c[kk] + c[nc - kk];
+        xr = wki * a[j] - wkr * a[k];
+        a[j] = wkr * a[j] + wki * a[k];
+        a[k] = xr;
+    }
+    a[m] *= c[0];
+}
+
+
+static void dstsub(int n, float *a, int nc, float *c)
+{
+    int j, k, kk, ks, m;
+    float wkr, wki, xr;
+
+    m = n >> 1;
+    ks = nc / n;
+    kk = 0;
+    for (j = 1; j < m; j++) {
+        k = n - j;
+        kk += ks;
+        wkr = c[kk] - c[nc - kk];
+        wki = c[kk] + c[nc - kk];
+        xr = wki * a[k] - wkr * a[j];
+        a[k] = wkr * a[k] + wki * a[j];
+        a[j] = xr;
+    }
+    a[m] *= c[0];
+}
+#endif  // Not used.
diff --git a/trunk/src/modules/audio_processing/utility/fft4g.h b/trunk/src/modules/audio_processing/utility/fft4g.h
new file mode 100644
index 0000000..14a52a1
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/fft4g.h
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_FFT4G_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_FFT4G_H_
+
+void WebRtc_rdft(int, int, float *, int *, float *);
+void WebRtc_cdft(int, int, float *, int *, float *);
+
+#endif
diff --git a/trunk/src/modules/audio_processing/utility/ring_buffer.c b/trunk/src/modules/audio_processing/utility/ring_buffer.c
new file mode 100644
index 0000000..8b2b436
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/ring_buffer.c
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// A ring buffer to hold arbitrary data. Provides no thread safety. Unless
+// otherwise specified, functions return 0 on success and -1 on error.
+
+#include "ring_buffer.h"
+
+#include <stddef.h> // size_t
+#include <stdlib.h>
+#include <string.h>
+
+enum Wrap {
+  SAME_WRAP,
+  DIFF_WRAP
+};
+
+typedef struct {
+  size_t read_pos;
+  size_t write_pos;
+  size_t element_count;
+  size_t element_size;
+  enum Wrap rw_wrap;
+  char* data;
+} buf_t;
+
+// Get address of region(s) from which we can read data.
+// If the region is contiguous, |data_ptr_bytes_2| will be zero.
+// If non-contiguous, |data_ptr_bytes_2| will be the size in bytes of the second
+// region. Returns room available to be read or |element_count|, whichever is
+// smaller.
+static size_t GetBufferReadRegions(buf_t* buf,
+                                   size_t element_count,
+                                   void** data_ptr_1,
+                                   size_t* data_ptr_bytes_1,
+                                   void** data_ptr_2,
+                                   size_t* data_ptr_bytes_2) {
+
+  const size_t readable_elements = WebRtc_available_read(buf);
+  const size_t read_elements = (readable_elements < element_count ?
+      readable_elements : element_count);
+  const size_t margin = buf->element_count - buf->read_pos;
+
+  // Check to see if read is not contiguous.
+  if (read_elements > margin) {
+    // Write data in two blocks that wrap the buffer.
+    *data_ptr_1 = buf->data + buf->read_pos * buf->element_size;
+    *data_ptr_bytes_1 = margin * buf->element_size;
+    *data_ptr_2 = buf->data;
+    *data_ptr_bytes_2 = (read_elements - margin) * buf->element_size;
+  } else {
+    *data_ptr_1 = buf->data + buf->read_pos * buf->element_size;
+    *data_ptr_bytes_1 = read_elements * buf->element_size;
+    *data_ptr_2 = NULL;
+    *data_ptr_bytes_2 = 0;
+  }
+
+  return read_elements;
+}
+
+int WebRtc_CreateBuffer(void** handle,
+                        size_t element_count,
+                        size_t element_size) {
+  buf_t* self = NULL;
+
+  if (handle == NULL) {
+    return -1;
+  }
+
+  self = malloc(sizeof(buf_t));
+  if (self == NULL) {
+    return -1;
+  }
+  *handle = self;
+
+  self->data = malloc(element_count * element_size);
+  if (self->data == NULL) {
+    free(self);
+    self = NULL;
+    return -1;
+  }
+
+  self->element_count = element_count;
+  self->element_size = element_size;
+
+  return 0;
+}
+
+int WebRtc_InitBuffer(void* handle) {
+  buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  self->read_pos = 0;
+  self->write_pos = 0;
+  self->rw_wrap = SAME_WRAP;
+
+  // Initialize buffer to zeros
+  memset(self->data, 0, self->element_count * self->element_size);
+
+  return 0;
+}
+
+int WebRtc_FreeBuffer(void* handle) {
+  buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return -1;
+  }
+
+  free(self->data);
+  free(self);
+
+  return 0;
+}
+
+size_t WebRtc_ReadBuffer(void* handle,
+                         void** data_ptr,
+                         void* data,
+                         size_t element_count) {
+
+  buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return 0;
+  }
+  if (data == NULL) {
+    return 0;
+  }
+  if (data_ptr == NULL) {
+    return 0;
+  }
+
+  {
+    void* buf_ptr_1 = NULL;
+    void* buf_ptr_2 = NULL;
+    size_t buf_ptr_bytes_1 = 0;
+    size_t buf_ptr_bytes_2 = 0;
+    const size_t read_count = GetBufferReadRegions(self,
+                                                   element_count,
+                                                   &buf_ptr_1,
+                                                   &buf_ptr_bytes_1,
+                                                   &buf_ptr_2,
+                                                   &buf_ptr_bytes_2);
+
+    if (buf_ptr_bytes_2 > 0) {
+      // We have a wrap around when reading the buffer. Copy the buffer data to
+      // |data| and point to it.
+      memcpy(data, buf_ptr_1, buf_ptr_bytes_1);
+      memcpy(((char*) data) + buf_ptr_bytes_1, buf_ptr_2, buf_ptr_bytes_2);
+      *data_ptr = data;
+    } else {
+      *data_ptr = buf_ptr_1;
+    }
+
+    // Update read position
+    WebRtc_MoveReadPtr(handle, (int) read_count);
+
+    return read_count;
+  }
+}
+
+size_t WebRtc_WriteBuffer(void* handle,
+                          const void* data,
+                          size_t element_count) {
+
+  buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return 0;
+  }
+  if (data == NULL) {
+    return 0;
+  }
+
+  {
+    const size_t free_elements = WebRtc_available_write(handle);
+    const size_t write_elements = (free_elements < element_count ? free_elements
+        : element_count);
+    size_t n = write_elements;
+    const size_t margin = self->element_count - self->write_pos;
+
+    if (write_elements > margin) {
+      // Buffer wrap around when writing.
+      memcpy(self->data + self->write_pos * self->element_size,
+             data, margin * self->element_size);
+      self->write_pos = 0;
+      n -= margin;
+      self->rw_wrap = DIFF_WRAP;
+    }
+    memcpy(self->data + self->write_pos * self->element_size,
+           ((const char*) data) + ((write_elements - n) * self->element_size),
+           n * self->element_size);
+    self->write_pos += n;
+
+    return write_elements;
+  }
+}
+
+int WebRtc_MoveReadPtr(void* handle, int element_count) {
+
+  buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return 0;
+  }
+
+  {
+    // We need to be able to take care of negative changes, hence use "int"
+    // instead of "size_t".
+    const int free_elements = (int) WebRtc_available_write(handle);
+    const int readable_elements = (int) WebRtc_available_read(handle);
+    int read_pos = (int) self->read_pos;
+
+    if (element_count > readable_elements) {
+      element_count = readable_elements;
+    }
+    if (element_count < -free_elements) {
+      element_count = -free_elements;
+    }
+
+    read_pos += element_count;
+    if (read_pos > (int) self->element_count) {
+      // Buffer wrap around. Restart read position and wrap indicator.
+      read_pos -= (int) self->element_count;
+      self->rw_wrap = SAME_WRAP;
+    }
+    if (read_pos < 0) {
+      // Buffer wrap around. Restart read position and wrap indicator.
+      read_pos += (int) self->element_count;
+      self->rw_wrap = DIFF_WRAP;
+    }
+
+    self->read_pos = (size_t) read_pos;
+
+    return element_count;
+  }
+}
+
+size_t WebRtc_available_read(const void* handle) {
+  const buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return 0;
+  }
+
+  if (self->rw_wrap == SAME_WRAP) {
+    return self->write_pos - self->read_pos;
+  } else {
+    return self->element_count - self->read_pos + self->write_pos;
+  }
+}
+
+size_t WebRtc_available_write(const void* handle) {
+  const buf_t* self = (buf_t*) handle;
+
+  if (self == NULL) {
+    return 0;
+  }
+
+  return self->element_count - WebRtc_available_read(handle);
+}
diff --git a/trunk/src/modules/audio_processing/utility/ring_buffer.h b/trunk/src/modules/audio_processing/utility/ring_buffer.h
new file mode 100644
index 0000000..3c44029
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/ring_buffer.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// A ring buffer to hold arbitrary data. Provides no thread safety. Unless
+// otherwise specified, functions return 0 on success and -1 on error.
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_RING_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_RING_BUFFER_H_
+
+#include <stddef.h> // size_t
+
+int WebRtc_CreateBuffer(void** handle,
+                        size_t element_count,
+                        size_t element_size);
+int WebRtc_InitBuffer(void* handle);
+int WebRtc_FreeBuffer(void* handle);
+
+// Reads data from the buffer. The |data_ptr| will point to the address where
+// it is located. If all |element_count| data are feasible to read without
+// buffer wrap around |data_ptr| will point to the location in the buffer.
+// Otherwise, the data will be copied to |data| (memory allocation done by the
+// user) and |data_ptr| points to the address of |data|. |data_ptr| is only
+// guaranteed to be valid until the next call to WebRtc_WriteBuffer().
+// Returns number of elements read.
+size_t WebRtc_ReadBuffer(void* handle,
+                         void** data_ptr,
+                         void* data,
+                         size_t element_count);
+
+// Writes |data| to buffer and returns the number of elements written.
+size_t WebRtc_WriteBuffer(void* handle, const void* data, size_t element_count);
+
+// Moves the buffer read position and returns the number of elements moved.
+// Positive |element_count| moves the read position towards the write position,
+// that is, flushing the buffer. Negative |element_count| moves the read
+// position away from the the write position, that is, stuffing the buffer.
+// Returns number of elements moved.
+int WebRtc_MoveReadPtr(void* handle, int element_count);
+
+// Returns number of available elements to read.
+size_t WebRtc_available_read(const void* handle);
+
+// Returns number of available elements for write.
+size_t WebRtc_available_write(const void* handle);
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_RING_BUFFER_H_
diff --git a/trunk/src/modules/audio_processing/utility/util.gypi b/trunk/src/modules/audio_processing/utility/util.gypi
new file mode 100644
index 0000000..3c3024a
--- /dev/null
+++ b/trunk/src/modules/audio_processing/utility/util.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'apm_util',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '.',
+        ],
+      },
+      'sources': [
+        'delay_estimator.c',
+        'delay_estimator.h',
+        'delay_estimator_wrapper.c',
+        'delay_estimator_wrapper.h',
+        'fft4g.c',
+        'fft4g.h',
+        'ring_buffer.c',
+        'ring_buffer.h',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/audio_processing/voice_detection_impl.cc b/trunk/src/modules/audio_processing/voice_detection_impl.cc
new file mode 100644
index 0000000..50b99a0
--- /dev/null
+++ b/trunk/src/modules/audio_processing/voice_detection_impl.cc
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_detection_impl.h"
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#include "webrtc_vad.h"
+
+#include "audio_processing_impl.h"
+#include "audio_buffer.h"
+
+namespace webrtc {
+
+typedef VadInst Handle;
+
+namespace {
+int MapSetting(VoiceDetection::Likelihood likelihood) {
+  switch (likelihood) {
+    case VoiceDetection::kVeryLowLikelihood:
+      return 3;
+    case VoiceDetection::kLowLikelihood:
+      return 2;
+    case VoiceDetection::kModerateLikelihood:
+      return 1;
+    case VoiceDetection::kHighLikelihood:
+      return 0;
+  }
+  assert(false);
+  return -1;
+}
+}  // namespace
+
+VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessingImpl* apm)
+  : ProcessingComponent(apm),
+    apm_(apm),
+    stream_has_voice_(false),
+    using_external_vad_(false),
+    likelihood_(kLowLikelihood),
+    frame_size_ms_(10),
+    frame_size_samples_(0) {}
+
+VoiceDetectionImpl::~VoiceDetectionImpl() {}
+
+int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+  if (!is_component_enabled()) {
+    return apm_->kNoError;
+  }
+
+  if (using_external_vad_) {
+    using_external_vad_ = false;
+    return apm_->kNoError;
+  }
+  assert(audio->samples_per_split_channel() <= 160);
+
+  WebRtc_Word16* mixed_data = audio->low_pass_split_data(0);
+  if (audio->num_channels() > 1) {
+    audio->CopyAndMixLowPass(1);
+    mixed_data = audio->mixed_low_pass_data(0);
+  }
+
+  // TODO(ajm): concatenate data in frame buffer here.
+
+  int vad_ret = WebRtcVad_Process(static_cast<Handle*>(handle(0)),
+                                  apm_->split_sample_rate_hz(),
+                                  mixed_data,
+                                  frame_size_samples_);
+  if (vad_ret == 0) {
+    stream_has_voice_ = false;
+    audio->set_activity(AudioFrame::kVadPassive);
+  } else if (vad_ret == 1) {
+    stream_has_voice_ = true;
+    audio->set_activity(AudioFrame::kVadActive);
+  } else {
+    return apm_->kUnspecifiedError;
+  }
+
+  return apm_->kNoError;
+}
+
+int VoiceDetectionImpl::Enable(bool enable) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  return EnableComponent(enable);
+}
+
+bool VoiceDetectionImpl::is_enabled() const {
+  return is_component_enabled();
+}
+
+int VoiceDetectionImpl::set_stream_has_voice(bool has_voice) {
+  using_external_vad_ = true;
+  stream_has_voice_ = has_voice;
+  return apm_->kNoError;
+}
+
+bool VoiceDetectionImpl::stream_has_voice() const {
+  // TODO(ajm): enable this assertion?
+  //assert(using_external_vad_ || is_component_enabled());
+  return stream_has_voice_;
+}
+
+int VoiceDetectionImpl::set_likelihood(VoiceDetection::Likelihood likelihood) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  if (MapSetting(likelihood) == -1) {
+    return apm_->kBadParameterError;
+  }
+
+  likelihood_ = likelihood;
+  return Configure();
+}
+
+VoiceDetection::Likelihood VoiceDetectionImpl::likelihood() const {
+  return likelihood_;
+}
+
+int VoiceDetectionImpl::set_frame_size_ms(int size) {
+  CriticalSectionScoped crit_scoped(apm_->crit());
+  assert(size == 10); // TODO(ajm): remove when supported.
+  if (size != 10 &&
+      size != 20 &&
+      size != 30) {
+    return apm_->kBadParameterError;
+  }
+
+  frame_size_ms_ = size;
+
+  return Initialize();
+}
+
+int VoiceDetectionImpl::frame_size_ms() const {
+  return frame_size_ms_;
+}
+
+int VoiceDetectionImpl::Initialize() {
+  int err = ProcessingComponent::Initialize();
+  if (err != apm_->kNoError || !is_component_enabled()) {
+    return err;
+  }
+
+  using_external_vad_ = false;
+  frame_size_samples_ = frame_size_ms_ * (apm_->split_sample_rate_hz() / 1000);
+  // TODO(ajm): intialize frame buffer here.
+
+  return apm_->kNoError;
+}
+
+void* VoiceDetectionImpl::CreateHandle() const {
+  Handle* handle = NULL;
+  if (WebRtcVad_Create(&handle) != apm_->kNoError) {
+    handle = NULL;
+  } else {
+    assert(handle != NULL);
+  }
+
+  return handle;
+}
+
+int VoiceDetectionImpl::DestroyHandle(void* handle) const {
+  return WebRtcVad_Free(static_cast<Handle*>(handle));
+}
+
+int VoiceDetectionImpl::InitializeHandle(void* handle) const {
+  return WebRtcVad_Init(static_cast<Handle*>(handle));
+}
+
+int VoiceDetectionImpl::ConfigureHandle(void* handle) const {
+  return WebRtcVad_set_mode(static_cast<Handle*>(handle),
+                            MapSetting(likelihood_));
+}
+
+int VoiceDetectionImpl::num_handles_required() const {
+  return 1;
+}
+
+int VoiceDetectionImpl::GetHandleError(void* handle) const {
+  // The VAD has no get_error() function.
+  assert(handle != NULL);
+  return apm_->kUnspecifiedError;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/audio_processing/voice_detection_impl.h b/trunk/src/modules/audio_processing/voice_detection_impl.h
new file mode 100644
index 0000000..52d92e0
--- /dev/null
+++ b/trunk/src/modules/audio_processing/voice_detection_impl.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
+
+#include "audio_processing.h"
+#include "processing_component.h"
+
+namespace webrtc {
+class AudioProcessingImpl;
+class AudioBuffer;
+
+class VoiceDetectionImpl : public VoiceDetection,
+                           public ProcessingComponent {
+ public:
+  explicit VoiceDetectionImpl(const AudioProcessingImpl* apm);
+  virtual ~VoiceDetectionImpl();
+
+  int ProcessCaptureAudio(AudioBuffer* audio);
+
+  // VoiceDetection implementation.
+  virtual bool is_enabled() const;
+
+  // ProcessingComponent implementation.
+  virtual int Initialize();
+
+ private:
+  // VoiceDetection implementation.
+  virtual int Enable(bool enable);
+  virtual int set_stream_has_voice(bool has_voice);
+  virtual bool stream_has_voice() const;
+  virtual int set_likelihood(Likelihood likelihood);
+  virtual Likelihood likelihood() const;
+  virtual int set_frame_size_ms(int size);
+  virtual int frame_size_ms() const;
+
+  // ProcessingComponent implementation.
+  virtual void* CreateHandle() const;
+  virtual int InitializeHandle(void* handle) const;
+  virtual int ConfigureHandle(void* handle) const;
+  virtual int DestroyHandle(void* handle) const;
+  virtual int num_handles_required() const;
+  virtual int GetHandleError(void* handle) const;
+
+  const AudioProcessingImpl* apm_;
+  bool stream_has_voice_;
+  bool using_external_vad_;
+  Likelihood likelihood_;
+  int frame_size_ms_;
+  int frame_size_samples_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
diff --git a/trunk/src/modules/interface/module.h b/trunk/src/modules/interface/module.h
new file mode 100644
index 0000000..37e5027
--- /dev/null
+++ b/trunk/src/modules/interface/module.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_INTERFACE_MODULE_H_
+#define MODULES_INTERFACE_MODULE_H_
+
+#include <assert.h>
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class Module {
+ public:
+  // Change the unique identifier of this object.
+  virtual int32_t ChangeUniqueId(const int32_t id) = 0;
+
+  // Returns the number of milliseconds until the module want a worker
+  // thread to call Process.
+  virtual int32_t TimeUntilNextProcess() = 0;
+
+  // Process any pending tasks such as timeouts.
+  virtual int32_t Process() = 0;
+
+ protected:
+  virtual ~Module() {}
+};
+
+// Reference counted version of the module interface.
+class RefCountedModule : public Module {
+ public:
+  // Increase the reference count by one.
+  // Returns the incremented reference count.
+  // TODO(perkj): Make this pure virtual when Chromium have implemented  
+  // reference counting ADM and Video capture module.
+  virtual int32_t AddRef() {
+    assert(false && "Not implemented.");
+    return 1;
+  }
+
+  // Decrease the reference count by one.
+  // Returns the decreased reference count.
+  // Returns 0 if the last reference was just released.
+  // When the reference count reach 0 the object will self-destruct.
+  // TODO(perkj): Make this pure virtual when Chromium have implemented  
+  // reference counting ADM and Video capture module.
+  virtual int32_t Release() {
+    assert(false && "Not implemented.");
+    return 1;
+  }
+
+ protected:
+  virtual ~RefCountedModule() {}
+};
+
+}  // namespace webrtc
+
+#endif  // MODULES_INTERFACE_MODULE_H_
diff --git a/trunk/src/modules/interface/module_common_types.h b/trunk/src/modules/interface/module_common_types.h
new file mode 100644
index 0000000..2e1beaa
--- /dev/null
+++ b/trunk/src/modules/interface/module_common_types.h
@@ -0,0 +1,1047 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULE_COMMON_TYPES_H
+#define MODULE_COMMON_TYPES_H
+
+#include <cstring> // memcpy
+#include <assert.h>
+
+#include "typedefs.h"
+#include "common_types.h"
+
+#ifdef _WIN32
+    #pragma warning(disable:4351)       // remove warning "new behavior: elements of array
+                                        // 'array' will be default initialized"
+#endif
+
+namespace webrtc
+{
+struct RTPHeader
+{
+    bool           markerBit;
+    WebRtc_UWord8  payloadType;
+    WebRtc_UWord16 sequenceNumber;
+    WebRtc_UWord32 timestamp;
+    WebRtc_UWord32 ssrc;
+    WebRtc_UWord8  numCSRCs;
+    WebRtc_UWord32 arrOfCSRCs[kRtpCsrcSize];
+    WebRtc_UWord8  paddingLength;
+    WebRtc_UWord16 headerLength;
+};
+
+struct RTPHeaderExtension
+{
+    WebRtc_Word32  transmissionTimeOffset;
+};
+
+struct RTPAudioHeader
+{
+    WebRtc_UWord8  numEnergy;                         // number of valid entries in arrOfEnergy
+    WebRtc_UWord8  arrOfEnergy[kRtpCsrcSize];   // one energy byte (0-9) per channel
+    bool           isCNG;                             // is this CNG
+    WebRtc_UWord8  channel;                           // number of channels 2 = stereo
+};
+
+enum {kNoPictureId = -1};
+enum {kNoTl0PicIdx = -1};
+enum {kNoTemporalIdx = -1};
+enum {kNoKeyIdx = -1};
+enum {kNoSimulcastIdx = 0};
+
+struct RTPVideoHeaderVP8
+{
+    void InitRTPVideoHeaderVP8()
+    {
+        nonReference = false;
+        pictureId = kNoPictureId;
+        tl0PicIdx = kNoTl0PicIdx;
+        temporalIdx = kNoTemporalIdx;
+        layerSync = false;
+        keyIdx = kNoKeyIdx;
+        partitionId = 0;
+        beginningOfPartition = false;
+        frameWidth = 0;
+        frameHeight = 0;
+    }
+
+    bool           nonReference;   // Frame is discardable.
+    WebRtc_Word16  pictureId;      // Picture ID index, 15 bits;
+                                   // kNoPictureId if PictureID does not exist.
+    WebRtc_Word16  tl0PicIdx;      // TL0PIC_IDX, 8 bits;
+                                   // kNoTl0PicIdx means no value provided.
+    WebRtc_Word8   temporalIdx;    // Temporal layer index, or kNoTemporalIdx.
+    bool           layerSync;      // This frame is a layer sync frame.
+                                   // Disabled if temporalIdx == kNoTemporalIdx.
+    int            keyIdx;         // 5 bits; kNoKeyIdx means not used.
+    int            partitionId;    // VP8 partition ID
+    bool           beginningOfPartition;  // True if this packet is the first
+                                          // in a VP8 partition. Otherwise false
+    int            frameWidth;     // Exists for key frames.
+    int            frameHeight;    // Exists for key frames.
+};
+union RTPVideoTypeHeader
+{
+    RTPVideoHeaderVP8       VP8;
+};
+
+enum RTPVideoCodecTypes
+{
+    kRTPVideoGeneric  = 0,
+    kRTPVideoVP8      = 8,
+    kRTPVideoNoVideo  = 10,
+    kRTPVideoFEC      = 11,
+    kRTPVideoI420     = 12
+};
+struct RTPVideoHeader
+{
+    WebRtc_UWord16          width;                  // size
+    WebRtc_UWord16          height;
+
+    bool                    isFirstPacket;   // first packet in frame
+    WebRtc_UWord8           simulcastIdx;    // Index if the simulcast encoder creating
+                                             // this frame, 0 if not using simulcast.
+    RTPVideoCodecTypes      codec;
+    RTPVideoTypeHeader      codecHeader;
+};
+union RTPTypeHeader
+{
+    RTPAudioHeader  Audio;
+    RTPVideoHeader  Video;
+};
+
+struct WebRtcRTPHeader
+{
+    RTPHeader       header;
+    FrameType       frameType;
+    RTPTypeHeader   type;
+    RTPHeaderExtension extension;
+};
+
+class RTPFragmentationHeader
+{
+public:
+    RTPFragmentationHeader() :
+        fragmentationVectorSize(0),
+        fragmentationOffset(NULL),
+        fragmentationLength(NULL),
+        fragmentationTimeDiff(NULL),
+        fragmentationPlType(NULL)
+    {};
+
+    ~RTPFragmentationHeader()
+    {
+        delete [] fragmentationOffset;
+        delete [] fragmentationLength;
+        delete [] fragmentationTimeDiff;
+        delete [] fragmentationPlType;
+    }
+
+    RTPFragmentationHeader& operator=(const RTPFragmentationHeader& header)
+    {
+        if(this == &header)
+        {
+            return *this;
+        }
+
+        if(header.fragmentationVectorSize != fragmentationVectorSize)
+        {
+            // new size of vectors
+
+            // delete old
+            delete [] fragmentationOffset;
+            fragmentationOffset = NULL;
+            delete [] fragmentationLength;
+            fragmentationLength = NULL;
+            delete [] fragmentationTimeDiff;
+            fragmentationTimeDiff = NULL;
+            delete [] fragmentationPlType;
+            fragmentationPlType = NULL;
+
+            if(header.fragmentationVectorSize > 0)
+            {
+                // allocate new
+                if(header.fragmentationOffset)
+                {
+                    fragmentationOffset = new WebRtc_UWord32[header.fragmentationVectorSize];
+                }
+                if(header.fragmentationLength)
+                {
+                    fragmentationLength = new WebRtc_UWord32[header.fragmentationVectorSize];
+                }
+                if(header.fragmentationTimeDiff)
+                {
+                    fragmentationTimeDiff = new WebRtc_UWord16[header.fragmentationVectorSize];
+                }
+                if(header.fragmentationPlType)
+                {
+                    fragmentationPlType = new WebRtc_UWord8[header.fragmentationVectorSize];
+                }
+            }
+            // set new size
+            fragmentationVectorSize =   header.fragmentationVectorSize;
+        }
+
+        if(header.fragmentationVectorSize > 0)
+        {
+            // copy values
+            if(header.fragmentationOffset)
+            {
+                memcpy(fragmentationOffset, header.fragmentationOffset,
+                        header.fragmentationVectorSize * sizeof(WebRtc_UWord32));
+            }
+            if(header.fragmentationLength)
+            {
+                memcpy(fragmentationLength, header.fragmentationLength,
+                        header.fragmentationVectorSize * sizeof(WebRtc_UWord32));
+            }
+            if(header.fragmentationTimeDiff)
+            {
+                memcpy(fragmentationTimeDiff, header.fragmentationTimeDiff,
+                        header.fragmentationVectorSize * sizeof(WebRtc_UWord16));
+            }
+            if(header.fragmentationPlType)
+            {
+                memcpy(fragmentationPlType, header.fragmentationPlType,
+                        header.fragmentationVectorSize * sizeof(WebRtc_UWord8));
+            }
+        }
+        return *this;
+    }
+    void VerifyAndAllocateFragmentationHeader( const WebRtc_UWord16 size)
+    {
+        if( fragmentationVectorSize < size)
+        {
+            WebRtc_UWord16 oldVectorSize = fragmentationVectorSize;
+            {
+                // offset
+                WebRtc_UWord32* oldOffsets = fragmentationOffset;
+                fragmentationOffset = new WebRtc_UWord32[size];
+                memset(fragmentationOffset+oldVectorSize, 0,
+                       sizeof(WebRtc_UWord32)*(size-oldVectorSize));
+                // copy old values
+                memcpy(fragmentationOffset,oldOffsets, sizeof(WebRtc_UWord32) * oldVectorSize);
+                delete[] oldOffsets;
+            }
+            // length
+            {
+                WebRtc_UWord32* oldLengths = fragmentationLength;
+                fragmentationLength = new WebRtc_UWord32[size];
+                memset(fragmentationLength+oldVectorSize, 0,
+                       sizeof(WebRtc_UWord32) * (size- oldVectorSize));
+                memcpy(fragmentationLength, oldLengths,
+                       sizeof(WebRtc_UWord32) * oldVectorSize);
+                delete[] oldLengths;
+            }
+            // time diff
+            {
+                WebRtc_UWord16* oldTimeDiffs = fragmentationTimeDiff;
+                fragmentationTimeDiff = new WebRtc_UWord16[size];
+                memset(fragmentationTimeDiff+oldVectorSize, 0,
+                       sizeof(WebRtc_UWord16) * (size- oldVectorSize));
+                memcpy(fragmentationTimeDiff, oldTimeDiffs,
+                       sizeof(WebRtc_UWord16) * oldVectorSize);
+                delete[] oldTimeDiffs;
+            }
+            // payload type
+            {
+                WebRtc_UWord8* oldTimePlTypes = fragmentationPlType;
+                fragmentationPlType = new WebRtc_UWord8[size];
+                memset(fragmentationPlType+oldVectorSize, 0,
+                       sizeof(WebRtc_UWord8) * (size- oldVectorSize));
+                memcpy(fragmentationPlType, oldTimePlTypes,
+                       sizeof(WebRtc_UWord8) * oldVectorSize);
+                delete[] oldTimePlTypes;
+            }
+            fragmentationVectorSize = size;
+        }
+    }
+
+    WebRtc_UWord16    fragmentationVectorSize;    // Number of fragmentations
+    WebRtc_UWord32*   fragmentationOffset;        // Offset of pointer to data for each fragm.
+    WebRtc_UWord32*   fragmentationLength;        // Data size for each fragmentation
+    WebRtc_UWord16*   fragmentationTimeDiff;      // Timestamp difference relative "now" for
+                                                  // each fragmentation
+    WebRtc_UWord8*    fragmentationPlType;        // Payload type of each fragmentation
+};
+
+struct RTCPVoIPMetric
+{
+    // RFC 3611 4.7
+    WebRtc_UWord8     lossRate;
+    WebRtc_UWord8     discardRate;
+    WebRtc_UWord8     burstDensity;
+    WebRtc_UWord8     gapDensity;
+    WebRtc_UWord16    burstDuration;
+    WebRtc_UWord16    gapDuration;
+    WebRtc_UWord16    roundTripDelay;
+    WebRtc_UWord16    endSystemDelay;
+    WebRtc_UWord8     signalLevel;
+    WebRtc_UWord8     noiseLevel;
+    WebRtc_UWord8     RERL;
+    WebRtc_UWord8     Gmin;
+    WebRtc_UWord8     Rfactor;
+    WebRtc_UWord8     extRfactor;
+    WebRtc_UWord8     MOSLQ;
+    WebRtc_UWord8     MOSCQ;
+    WebRtc_UWord8     RXconfig;
+    WebRtc_UWord16    JBnominal;
+    WebRtc_UWord16    JBmax;
+    WebRtc_UWord16    JBabsMax;
+};
+
+// class describing a complete, or parts of an encoded frame.
+class EncodedVideoData
+{
+public:
+    EncodedVideoData() :
+        completeFrame(false),
+        missingFrame(false),
+        payloadData(NULL),
+        payloadSize(0),
+        bufferSize(0)
+    {};
+
+    EncodedVideoData(const EncodedVideoData& data)
+    {
+        payloadType         = data.payloadType;
+        timeStamp           = data.timeStamp;
+        renderTimeMs        = data.renderTimeMs;
+        encodedWidth        = data.encodedWidth;
+        encodedHeight       = data.encodedHeight;
+        completeFrame       = data.completeFrame;
+        missingFrame        = data.missingFrame;
+        payloadSize         = data.payloadSize;
+        fragmentationHeader = data.fragmentationHeader;
+        frameType           = data.frameType;
+        codec               = data.codec;
+        if (data.payloadSize > 0)
+        {
+            payloadData = new WebRtc_UWord8[data.payloadSize];
+            memcpy(payloadData, data.payloadData, data.payloadSize);
+        }
+        else
+        {
+            payloadData = NULL;
+        }
+    }
+
+
+    ~EncodedVideoData()
+    {
+        delete [] payloadData;
+    };
+
+    EncodedVideoData& operator=(const EncodedVideoData& data)
+    {
+        if (this == &data)
+        {
+            return *this;
+        }
+        payloadType         = data.payloadType;
+        timeStamp           = data.timeStamp;
+        renderTimeMs        = data.renderTimeMs;
+        encodedWidth        = data.encodedWidth;
+        encodedHeight       = data.encodedHeight;
+        completeFrame       = data.completeFrame;
+        missingFrame        = data.missingFrame;
+        payloadSize         = data.payloadSize;
+        fragmentationHeader = data.fragmentationHeader;
+        frameType           = data.frameType;
+        codec               = data.codec;
+        if (data.payloadSize > 0)
+        {
+            delete [] payloadData;
+            payloadData = new WebRtc_UWord8[data.payloadSize];
+            memcpy(payloadData, data.payloadData, data.payloadSize);
+            bufferSize = data.payloadSize;
+        }
+        return *this;
+    };
+    void VerifyAndAllocate( const WebRtc_UWord32 size)
+    {
+        if (bufferSize < size)
+        {
+            WebRtc_UWord8* oldPayload = payloadData;
+            payloadData = new WebRtc_UWord8[size];
+            memcpy(payloadData, oldPayload, sizeof(WebRtc_UWord8) * payloadSize);
+
+            bufferSize = size;
+            delete[] oldPayload;
+        }
+    }
+
+    WebRtc_UWord8               payloadType;
+    WebRtc_UWord32              timeStamp;
+    WebRtc_Word64               renderTimeMs;
+    WebRtc_UWord32              encodedWidth;
+    WebRtc_UWord32              encodedHeight;
+    bool                        completeFrame;
+    bool                        missingFrame;
+    WebRtc_UWord8*              payloadData;
+    WebRtc_UWord32              payloadSize;
+    WebRtc_UWord32              bufferSize;
+    RTPFragmentationHeader      fragmentationHeader;
+    FrameType                   frameType;
+    VideoCodecType              codec;
+};
+
+struct VideoContentMetrics {
+  VideoContentMetrics()
+      : motion_magnitude(0.0f),
+        spatial_pred_err(0.0f),
+        spatial_pred_err_h(0.0f),
+        spatial_pred_err_v(0.0f) {
+  }
+
+  void Reset() {
+    motion_magnitude = 0.0f;
+    spatial_pred_err = 0.0f;
+    spatial_pred_err_h = 0.0f;
+    spatial_pred_err_v = 0.0f;
+  }
+  float motion_magnitude;
+  float spatial_pred_err;
+  float spatial_pred_err_h;
+  float spatial_pred_err_v;
+};
+
+/*************************************************
+ *
+ * VideoFrame class
+ *
+ * The VideoFrame class allows storing and
+ * handling of video frames.
+ *
+ *
+ *************************************************/
+class VideoFrame
+{
+public:
+    VideoFrame();
+    ~VideoFrame();
+    /**
+    * Verifies that current allocated buffer size is larger than or equal to the input size.
+    * If the current buffer size is smaller, a new allocation is made and the old buffer data
+    * is copied to the new buffer.
+    * Buffer size is updated to minimumSize.
+    */
+    WebRtc_Word32 VerifyAndAllocate(const WebRtc_UWord32 minimumSize);
+    /**
+    *    Update length of data buffer in frame. Function verifies that new length is less or
+    *    equal to allocated size.
+    */
+    WebRtc_Word32 SetLength(const WebRtc_UWord32 newLength);
+    /*
+    *    Swap buffer and size data
+    */
+    WebRtc_Word32 Swap(WebRtc_UWord8*& newMemory,
+                       WebRtc_UWord32& newLength,
+                       WebRtc_UWord32& newSize);
+    /*
+    *    Swap buffer and size data
+    */
+    WebRtc_Word32 SwapFrame(VideoFrame& videoFrame);
+    /**
+    *    Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
+    *    is allocated.
+    */
+    WebRtc_Word32 CopyFrame(const VideoFrame& videoFrame);
+    /**
+    *    Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
+    *    is allocated.
+    */
+    WebRtc_Word32 CopyFrame(WebRtc_UWord32 length, const WebRtc_UWord8* sourceBuffer);
+    /**
+    *    Delete VideoFrame and resets members to zero
+    */
+    void Free();
+    /**
+    *   Set frame timestamp (90kHz)
+    */
+    void SetTimeStamp(const WebRtc_UWord32 timeStamp) {_timeStamp = timeStamp;}
+    /**
+    *   Get pointer to frame buffer
+    */
+    WebRtc_UWord8*    Buffer() const {return _buffer;}
+
+    WebRtc_UWord8*&   Buffer() {return _buffer;}
+
+    /**
+    *   Get allocated buffer size
+    */
+    WebRtc_UWord32    Size() const {return _bufferSize;}
+    /**
+    *   Get frame length
+    */
+    WebRtc_UWord32    Length() const {return _bufferLength;}
+    /**
+    *   Get frame timestamp (90kHz)
+    */
+    WebRtc_UWord32    TimeStamp() const {return _timeStamp;}
+    /**
+    *   Get frame width
+    */
+    WebRtc_UWord32    Width() const {return _width;}
+    /**
+    *   Get frame height
+    */
+    WebRtc_UWord32    Height() const {return _height;}
+    /**
+    *   Set frame width
+    */
+    void   SetWidth(const WebRtc_UWord32 width)  {_width = width;}
+    /**
+    *   Set frame height
+    */
+    void  SetHeight(const WebRtc_UWord32 height) {_height = height;}
+    /**
+    *   Set render time in miliseconds
+    */
+    void SetRenderTime(const WebRtc_Word64 renderTimeMs) {_renderTimeMs = renderTimeMs;}
+    /**
+    *  Get render time in miliseconds
+    */
+    WebRtc_Word64    RenderTimeMs() const {return _renderTimeMs;}
+
+private:
+    void Set(WebRtc_UWord8* buffer,
+             WebRtc_UWord32 size,
+             WebRtc_UWord32 length,
+             WebRtc_UWord32 timeStamp);
+
+    WebRtc_UWord8*          _buffer;          // Pointer to frame buffer
+    WebRtc_UWord32          _bufferSize;      // Allocated buffer size
+    WebRtc_UWord32          _bufferLength;    // Length (in bytes) of buffer
+    WebRtc_UWord32          _timeStamp;       // Timestamp of frame (90kHz)
+    WebRtc_UWord32          _width;
+    WebRtc_UWord32          _height;
+    WebRtc_Word64           _renderTimeMs;
+}; // end of VideoFrame class declaration
+
+// inline implementation of VideoFrame class:
+inline
+VideoFrame::VideoFrame():
+    _buffer(0),
+    _bufferSize(0),
+    _bufferLength(0),
+    _timeStamp(0),
+    _width(0),
+    _height(0),
+    _renderTimeMs(0)
+{
+    //
+}
+inline
+VideoFrame::~VideoFrame()
+{
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+}
+
+
+inline
+WebRtc_Word32
+VideoFrame::VerifyAndAllocate(const WebRtc_UWord32 minimumSize)
+{
+    if (minimumSize < 1)
+    {
+        return -1;
+    }
+    if(minimumSize > _bufferSize)
+    {
+        // create buffer of sufficient size
+        WebRtc_UWord8* newBufferBuffer = new WebRtc_UWord8[minimumSize];
+        if(_buffer)
+        {
+            // copy old data
+            memcpy(newBufferBuffer, _buffer, _bufferSize);
+            delete [] _buffer;
+        }
+        else
+        {
+            memset(newBufferBuffer, 0, minimumSize * sizeof(WebRtc_UWord8));
+        }
+        _buffer = newBufferBuffer;
+        _bufferSize = minimumSize;
+    }
+    return 0;
+}
+
+inline
+WebRtc_Word32
+VideoFrame::SetLength(const WebRtc_UWord32 newLength)
+{
+    if (newLength >_bufferSize )
+    { // can't accomodate new value
+        return -1;
+    }
+     _bufferLength = newLength;
+     return 0;
+}
+
+inline
+WebRtc_Word32
+VideoFrame::SwapFrame(VideoFrame& videoFrame)
+{
+    WebRtc_UWord32 tmpTimeStamp  = _timeStamp;
+    WebRtc_UWord32 tmpWidth      = _width;
+    WebRtc_UWord32 tmpHeight     = _height;
+    WebRtc_Word64  tmpRenderTime = _renderTimeMs;
+
+    _timeStamp = videoFrame._timeStamp;
+    _width = videoFrame._width;
+    _height = videoFrame._height;
+    _renderTimeMs = videoFrame._renderTimeMs;
+
+    videoFrame._timeStamp = tmpTimeStamp;
+    videoFrame._width = tmpWidth;
+    videoFrame._height = tmpHeight;
+    videoFrame._renderTimeMs = tmpRenderTime;
+
+    return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize);
+}
+
+inline
+WebRtc_Word32
+VideoFrame::Swap(WebRtc_UWord8*& newMemory, WebRtc_UWord32& newLength, WebRtc_UWord32& newSize)
+{
+    WebRtc_UWord8* tmpBuffer = _buffer;
+    WebRtc_UWord32 tmpLength = _bufferLength;
+    WebRtc_UWord32 tmpSize = _bufferSize;
+    _buffer = newMemory;
+    _bufferLength = newLength;
+    _bufferSize = newSize;
+    newMemory = tmpBuffer;
+    newLength = tmpLength;
+    newSize = tmpSize;
+    return 0;
+}
+
+inline
+WebRtc_Word32
+VideoFrame::CopyFrame(WebRtc_UWord32 length, const WebRtc_UWord8* sourceBuffer)
+{
+    if (length > _bufferSize)
+    {
+        WebRtc_Word32 ret = VerifyAndAllocate(length);
+        if (ret < 0)
+        {
+            return ret;
+        }
+    }
+     memcpy(_buffer, sourceBuffer, length);
+    _bufferLength = length;
+    return 0;
+}
+
+inline
+WebRtc_Word32
+VideoFrame::CopyFrame(const VideoFrame& videoFrame)
+{
+    if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0)
+    {
+        return -1;
+    }
+    _timeStamp = videoFrame._timeStamp;
+    _width = videoFrame._width;
+    _height = videoFrame._height;
+    _renderTimeMs = videoFrame._renderTimeMs;
+    return 0;
+}
+
+inline
+void
+VideoFrame::Free()
+{
+    _timeStamp = 0;
+    _bufferLength = 0;
+    _bufferSize = 0;
+    _height = 0;
+    _width = 0;
+    _renderTimeMs = 0;
+
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+}
+
+
+/*************************************************
+ *
+ * AudioFrame class
+ *
+ * The AudioFrame class holds up to 60 ms wideband
+ * audio. It allows for adding and subtracting frames
+ * while keeping track of the resulting states.
+ *
+ * Note
+ * - The +operator assume that you would never add
+ *   exact opposite frames when deciding the resulting
+ *   state. To do this use the -operator.
+ *
+ * - _audioChannel of 1 indicated mono, and 2
+ *   indicates stereo.
+ *
+ * - _payloadDataLengthInSamples is the number of
+ *   samples per channel. Therefore, the total
+ *   number of samples in _payloadData is
+ *   (_payloadDataLengthInSamples * _audioChannel).
+ *
+ * - Stereo data is stored in interleaved fashion
+ *   starting with the left channel.
+ *
+ *************************************************/
+class AudioFrame
+{
+public:
+    enum{kMaxAudioFrameSizeSamples = 3840}; // stereo 32KHz 60ms 2*32*60
+
+    enum VADActivity
+    {
+        kVadActive  = 0,
+        kVadPassive = 1,
+        kVadUnknown = 2
+    };
+    enum SpeechType
+    {
+        kNormalSpeech = 0,
+        kPLC          = 1,
+        kCNG          = 2,
+        kPLCCNG       = 3,
+        kUndefined    = 4
+    };
+
+    AudioFrame();
+    virtual ~AudioFrame();
+
+    WebRtc_Word32 UpdateFrame(
+        const WebRtc_Word32  id,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_Word16* payloadData,
+        const WebRtc_UWord16 payloadDataLengthInSamples,
+        const int frequencyInHz,
+        const SpeechType     speechType,
+        const VADActivity    vadActivity,
+        const WebRtc_UWord8  audioChannel = 1,
+        const WebRtc_Word32  volume = -1,
+        const WebRtc_Word32  energy = -1);
+
+    AudioFrame& Append(const AudioFrame& rhs);
+
+    void Mute();
+
+    AudioFrame& operator=(const AudioFrame& rhs);
+    AudioFrame& operator>>=(const WebRtc_Word32 rhs);
+    AudioFrame& operator+=(const AudioFrame& rhs);
+    AudioFrame& operator-=(const AudioFrame& rhs);
+
+    WebRtc_Word32  _id;
+    WebRtc_UWord32 _timeStamp;
+
+    // Supporting Stereo, stereo samples are interleaved
+    WebRtc_Word16 _payloadData[kMaxAudioFrameSizeSamples];
+    WebRtc_UWord16 _payloadDataLengthInSamples;
+    int _frequencyInHz;
+    WebRtc_UWord8  _audioChannel;
+    SpeechType   _speechType;
+    VADActivity  _vadActivity;
+
+    WebRtc_UWord32 _energy;
+    WebRtc_Word32  _volume;
+};
+
+inline
+AudioFrame::AudioFrame()
+    :
+    _id(-1),
+    _timeStamp(0),
+    _payloadData(),
+    _payloadDataLengthInSamples(0),
+    _frequencyInHz(0),
+    _audioChannel(1),
+    _speechType(kUndefined),
+    _vadActivity(kVadUnknown),
+    _energy(0xffffffff),
+    _volume(0xffffffff)
+{
+}
+
+inline
+AudioFrame::~AudioFrame()
+{
+}
+
+inline
+WebRtc_Word32
+AudioFrame::UpdateFrame(
+    const WebRtc_Word32  id,
+    const WebRtc_UWord32 timeStamp,
+    const WebRtc_Word16* payloadData,
+    const WebRtc_UWord16 payloadDataLengthInSamples,
+    const int frequencyInHz,
+    const SpeechType     speechType,
+    const VADActivity    vadActivity,
+    const WebRtc_UWord8  audioChannel,
+    const WebRtc_Word32  volume,
+    const WebRtc_Word32  energy)
+{
+    _id            = id;
+    _timeStamp     = timeStamp;
+    _frequencyInHz = frequencyInHz;
+    _speechType    = speechType;
+    _vadActivity   = vadActivity;
+    _volume        = volume;
+    _audioChannel  = audioChannel;
+    _energy        = energy;
+
+    if((payloadDataLengthInSamples > kMaxAudioFrameSizeSamples) ||
+        (audioChannel > 2) || (audioChannel < 1))
+    {
+        _payloadDataLengthInSamples = 0;
+        return -1;
+    }
+    _payloadDataLengthInSamples = payloadDataLengthInSamples;
+    if(payloadData != NULL)
+    {
+        memcpy(_payloadData, payloadData, sizeof(WebRtc_Word16) *
+            payloadDataLengthInSamples * _audioChannel);
+    }
+    else
+    {
+        memset(_payloadData,0,sizeof(WebRtc_Word16) *
+            payloadDataLengthInSamples * _audioChannel);
+    }
+    return 0;
+}
+
+inline
+void
+AudioFrame::Mute()
+{
+  memset(_payloadData, 0, _payloadDataLengthInSamples * sizeof(WebRtc_Word16));
+}
+
+inline
+AudioFrame&
+AudioFrame::operator=(const AudioFrame& rhs)
+{
+    // Sanity Check
+    if((rhs._payloadDataLengthInSamples > kMaxAudioFrameSizeSamples) ||
+        (rhs._audioChannel > 2) ||
+        (rhs._audioChannel < 1))
+    {
+        return *this;
+    }
+    if(this == &rhs)
+    {
+        return *this;
+    }
+    _id               = rhs._id;
+    _timeStamp        = rhs._timeStamp;
+    _frequencyInHz    = rhs._frequencyInHz;
+    _speechType       = rhs._speechType;
+    _vadActivity      = rhs._vadActivity;
+    _volume           = rhs._volume;
+    _audioChannel     = rhs._audioChannel;
+    _energy           = rhs._energy;
+
+    _payloadDataLengthInSamples = rhs._payloadDataLengthInSamples;
+    memcpy(_payloadData, rhs._payloadData,
+        sizeof(WebRtc_Word16) * rhs._payloadDataLengthInSamples * _audioChannel);
+
+    return *this;
+}
+
+inline
+AudioFrame&
+AudioFrame::operator>>=(const WebRtc_Word32 rhs)
+{
+    assert((_audioChannel > 0) && (_audioChannel < 3));
+    if((_audioChannel > 2) ||
+        (_audioChannel < 1))
+    {
+        return *this;
+    }
+    for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+    {
+        _payloadData[i] = WebRtc_Word16(_payloadData[i] >> rhs);
+    }
+    return *this;
+}
+
+inline
+AudioFrame&
+AudioFrame::Append(const AudioFrame& rhs)
+{
+    // Sanity check
+    assert((_audioChannel > 0) && (_audioChannel < 3));
+    if((_audioChannel > 2) ||
+        (_audioChannel < 1))
+    {
+        return *this;
+    }
+    if(_audioChannel != rhs._audioChannel)
+    {
+        return *this;
+    }
+    if((_vadActivity == kVadActive) ||
+        rhs._vadActivity == kVadActive)
+    {
+        _vadActivity = kVadActive;
+    }
+    else if((_vadActivity == kVadUnknown) ||
+        rhs._vadActivity == kVadUnknown)
+    {
+        _vadActivity = kVadUnknown;
+    }
+    if(_speechType != rhs._speechType)
+    {
+        _speechType = kUndefined;
+    }
+
+    WebRtc_UWord16 offset = _payloadDataLengthInSamples * _audioChannel;
+    for(WebRtc_UWord16 i = 0;
+        i < rhs._payloadDataLengthInSamples * rhs._audioChannel;
+        i++)
+    {
+        _payloadData[offset+i] = rhs._payloadData[i];
+    }
+    _payloadDataLengthInSamples += rhs._payloadDataLengthInSamples;
+    return *this;
+}
+
+// merge vectors
+inline
+AudioFrame&
+AudioFrame::operator+=(const AudioFrame& rhs)
+{
+    // Sanity check
+    assert((_audioChannel > 0) && (_audioChannel < 3));
+    if((_audioChannel > 2) ||
+        (_audioChannel < 1))
+    {
+        return *this;
+    }
+    if(_audioChannel != rhs._audioChannel)
+    {
+        return *this;
+    }
+    bool noPrevData = false;
+    if(_payloadDataLengthInSamples != rhs._payloadDataLengthInSamples)
+    {
+        if(_payloadDataLengthInSamples == 0)
+        {
+            // special case we have no data to start with
+            _payloadDataLengthInSamples = rhs._payloadDataLengthInSamples;
+            noPrevData = true;
+        } else
+        {
+          return *this;
+        }
+    }
+
+    if((_vadActivity == kVadActive) ||
+        rhs._vadActivity == kVadActive)
+    {
+        _vadActivity = kVadActive;
+    }
+    else if((_vadActivity == kVadUnknown) ||
+        rhs._vadActivity == kVadUnknown)
+    {
+        _vadActivity = kVadUnknown;
+    }
+
+    if(_speechType != rhs._speechType)
+    {
+        _speechType = kUndefined;
+    }
+
+    if(noPrevData)
+    {
+        memcpy(_payloadData, rhs._payloadData,
+          sizeof(WebRtc_Word16) * rhs._payloadDataLengthInSamples * _audioChannel);
+    } else
+    {
+      // IMPROVEMENT this can be done very fast in assembly
+      for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+      {
+          WebRtc_Word32 wrapGuard = (WebRtc_Word32)_payloadData[i] +
+                  (WebRtc_Word32)rhs._payloadData[i];
+          if(wrapGuard < -32768)
+          {
+              _payloadData[i] = -32768;
+          }else if(wrapGuard > 32767)
+          {
+              _payloadData[i] = 32767;
+          }else
+          {
+              _payloadData[i] = (WebRtc_Word16)wrapGuard;
+          }
+      }
+    }
+    _energy = 0xffffffff;
+    _volume = 0xffffffff;
+    return *this;
+}
+
+inline
+AudioFrame&
+AudioFrame::operator-=(const AudioFrame& rhs)
+{
+    // Sanity check
+    assert((_audioChannel > 0) && (_audioChannel < 3));
+    if((_audioChannel > 2)||
+        (_audioChannel < 1))
+    {
+        return *this;
+    }
+    if((_payloadDataLengthInSamples != rhs._payloadDataLengthInSamples) ||
+        (_audioChannel != rhs._audioChannel))
+    {
+        return *this;
+    }
+    if((_vadActivity != kVadPassive) ||
+        rhs._vadActivity != kVadPassive)
+    {
+        _vadActivity = kVadUnknown;
+    }
+    _speechType = kUndefined;
+
+    for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+    {
+        WebRtc_Word32 wrapGuard = (WebRtc_Word32)_payloadData[i] -
+                (WebRtc_Word32)rhs._payloadData[i];
+        if(wrapGuard < -32768)
+        {
+            _payloadData[i] = -32768;
+        }
+        else if(wrapGuard > 32767)
+        {
+            _payloadData[i] = 32767;
+        }
+        else
+        {
+            _payloadData[i] = (WebRtc_Word16)wrapGuard;
+        }
+    }
+    _energy = 0xffffffff;
+    _volume = 0xffffffff;
+    return *this;
+}
+
+} // namespace webrtc
+
+#endif // MODULE_COMMON_TYPES_H
diff --git a/trunk/src/modules/media_file/OWNERS b/trunk/src/modules/media_file/OWNERS
new file mode 100644
index 0000000..2cc47e4
--- /dev/null
+++ b/trunk/src/modules/media_file/OWNERS
@@ -0,0 +1,4 @@
+pwestin@webrtc.org

+mflodman@webrtc.org

+perkj@webrtc.org

+niklas.enbom@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/media_file/interface/media_file.h b/trunk/src/modules/media_file/interface/media_file.h
new file mode 100644
index 0000000..bc857ad
--- /dev/null
+++ b/trunk/src/modules/media_file/interface/media_file.h
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
+#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+#include "module.h"
+#include "module_common_types.h"
+#include "media_file_defines.h"
+
+namespace webrtc {
+class MediaFile : public Module
+{
+public:
+    // Factory method. Constructor disabled. id is the identifier for the
+    // MediaFile instance.
+    static MediaFile* CreateMediaFile(const WebRtc_Word32 id);
+    static void DestroyMediaFile(MediaFile* module);
+
+    // Set the MediaFile instance identifier.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Put 10-60ms of audio data from file into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes is both an input and output
+    // parameter. As input parameter it indicates the size of audioBuffer.
+    // As output parameter it indicates the number of bytes written to
+    // audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be converted to
+    // mono).
+    virtual WebRtc_Word32 PlayoutAudioData(
+        WebRtc_Word8* audioBuffer,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Put one video frame into videoBuffer. dataLengthInBytes is both an input
+    // and output parameter. As input parameter it indicates the size of
+    // videoBuffer. As output parameter it indicates the number of bytes written
+    // to videoBuffer.
+    virtual WebRtc_Word32 PlayoutAVIVideoData(
+        WebRtc_Word8* videoBuffer,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Put 10-60ms, depending on codec frame size, of audio data from file into
+    // audioBufferLeft and audioBufferRight. The buffers contain the left and
+    // right channel of played out stereo audio.
+    // dataLengthInBytes is both an input and output parameter. As input
+    // parameter it indicates the size of both audioBufferLeft and
+    // audioBufferRight. As output parameter it indicates the number of bytes
+    // written to both audio buffers.
+    // Note: This API can only be successfully called for WAV files with stereo
+    // audio.
+    virtual WebRtc_Word32 PlayoutStereoData(
+        WebRtc_Word8* audioBufferLeft,
+        WebRtc_Word8* audioBufferRight,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Open the file specified by fileName (relative path is allowed) for
+    // reading. FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo audio is only supported for WAV files.
+    virtual WebRtc_Word32 StartPlayingAudioFile(
+        const WebRtc_Word8* fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool loop                         = false,
+        const FileFormats format                = kFileFormatPcm16kHzFile,
+        const CodecInst* codecInst              = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0) = 0;
+
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). If loop is true the file will be played until StopPlaying() is
+    // called. When end of file is reached the file is read from the start.
+    // format specifies the type of file fileName refers to. Only video will be
+    // read if videoOnly is true.
+    virtual WebRtc_Word32 StartPlayingVideoFile(const WebRtc_Word8* fileName,
+                                                const bool loop,
+                                                bool videoOnly,
+                                                const FileFormats format) = 0;
+
+    // Prepare for playing audio from stream.
+    // FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo audio is only supported for WAV files.
+    virtual WebRtc_Word32 StartPlayingAudioStream(
+        InStream& stream,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0) = 0;
+
+    // Stop playing from file or stream.
+    virtual WebRtc_Word32 StopPlaying() = 0;
+
+    // Return true if playing.
+    virtual bool IsPlaying() = 0;
+
+
+    // Set durationMs to the number of ms that has been played from file.
+    virtual WebRtc_Word32 PlayoutPositionMs(
+        WebRtc_UWord32& durationMs) const = 0;
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+    // Note: bufferLength must be exactly one frame.
+    virtual WebRtc_Word32 IncomingAudioData(
+        const WebRtc_Word8*  audioBuffer,
+        const WebRtc_UWord32 bufferLength) = 0;
+
+    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
+    // to file.
+    // Note: videoBuffer can contain encoded data. The codec used must be the
+    // same as what was specified by videoCodecInst for the last successfull
+    // StartRecordingVideoFile(..) call. The videoBuffer must contain exactly
+    // one video frame.
+    virtual WebRtc_Word32 IncomingAVIVideoData(
+        const WebRtc_Word8*  videoBuffer,
+        const WebRtc_UWord32 bufferLength) = 0;
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed). FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. maxSizeBytes
+    // specifies the number of bytes allowed to be written to file if it is
+    // greater than zero.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const WebRtc_Word8*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes       = 0) = 0;
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). format specifies the type of file fileName
+    // should be. codecInst specifies the encoding of the audio data.
+    // videoCodecInst specifies the encoding of the video data. Only video data
+    // will be recorded if videoOnly is true.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const WebRtc_Word8* fileName,
+        const FileFormats   format,
+        const CodecInst&    codecInst,
+        const VideoCodec&   videoCodecInst,
+        bool videoOnly = false) = 0;
+
+    // Prepare for recording audio to stream.
+    // FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that stream should correspond to.
+    // codecInst specifies the encoding of the audio data.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    virtual WebRtc_Word32 StartRecordingAudioStream(
+        OutStream&           stream,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0) = 0;
+
+    // Stop recording to file or stream.
+    virtual WebRtc_Word32 StopRecording() = 0;
+
+    // Return true if recording.
+    virtual bool IsRecording() = 0;
+
+    // Set durationMs to the number of ms that has been recorded to file.
+    virtual WebRtc_Word32 RecordDurationMs(WebRtc_UWord32& durationMs) = 0;
+
+    // Return true if recording or playing is stereo.
+    virtual bool IsStereo() = 0;
+
+    // Register callback to receive media file related notifications. Disables
+    // callbacks if callback is NULL.
+    virtual WebRtc_Word32 SetModuleFileCallback(FileCallback* callback) = 0;
+
+    // Set durationMs to the size of the file (in ms) specified by fileName.
+    // format specifies the type of file fileName refers to. freqInHz specifies
+    // the sampling frequency of the file.
+    virtual WebRtc_Word32 FileDurationMs(
+        const WebRtc_Word8*  fileName,
+        WebRtc_UWord32&      durationMs,
+        const FileFormats    format,
+        const WebRtc_UWord32 freqInHz = 16000) = 0;
+
+    // Update codecInst according to the current audio codec being used for
+    // reading or writing.
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const = 0;
+
+    // Update videoCodecInst according to the current video codec being used for
+    // reading or writing.
+    virtual WebRtc_Word32 VideoCodecInst(VideoCodec& videoCodecInst) const = 0;
+
+protected:
+    MediaFile() {}
+    virtual ~MediaFile() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
diff --git a/trunk/src/modules/media_file/interface/media_file_defines.h b/trunk/src/modules/media_file/interface/media_file_defines.h
new file mode 100644
index 0000000..38af562
--- /dev/null
+++ b/trunk/src/modules/media_file/interface/media_file_defines.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
+#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
+
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+// Callback class for the MediaFile class.
+class FileCallback
+{
+public:
+    virtual ~FileCallback(){}
+
+    // This function is called by MediaFile when a file has been playing for
+    // durationMs ms. id is the identifier for the MediaFile instance calling
+    // the callback.
+    virtual void PlayNotification(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 durationMs) = 0;
+
+    // This function is called by MediaFile when a file has been recording for
+    // durationMs ms. id is the identifier for the MediaFile instance calling
+    // the callback.
+    virtual void RecordNotification(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 durationMs) = 0;
+
+    // This function is called by MediaFile when a file has been stopped
+    // playing. id is the identifier for the MediaFile instance calling the
+    // callback.
+    virtual void PlayFileEnded(const WebRtc_Word32 id) = 0;
+
+    // This function is called by MediaFile when a file has been stopped
+    // recording. id is the identifier for the MediaFile instance calling the
+    // callback.
+    virtual void RecordFileEnded(const WebRtc_Word32 id) = 0;
+
+protected:
+    FileCallback() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
diff --git a/trunk/src/modules/media_file/source/Android.mk b/trunk/src/modules/media_file/source/Android.mk
new file mode 100644
index 0000000..2c408c8
--- /dev/null
+++ b/trunk/src/modules/media_file/source/Android.mk
@@ -0,0 +1,46 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_media_file
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    avi_file.cc \
+    media_file_impl.cc \
+    media_file_utility.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_MODULE_UTILITY_VIDEO'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../utility/interface \
+    $(LOCAL_PATH)/../../audio_coding/main/interface \
+    $(LOCAL_PATH)/../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/media_file/source/avi_file.cc b/trunk/src/modules/media_file/source/avi_file.cc
new file mode 100644
index 0000000..74f7908
--- /dev/null
+++ b/trunk/src/modules/media_file/source/avi_file.cc
@@ -0,0 +1,1773 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(henrike): reassess the error handling in this class. Currently failure
+// is detected by asserts in many places. Also a refactoring of this class would
+// be beneficial.
+
+#include "avi_file.h"
+
+#include <assert.h>
+#include <string.h>
+
+#ifdef _WIN32
+#include <windows.h>
+#endif
+
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "list_wrapper.h"
+#include "trace.h"
+
+// http://msdn2.microsoft.com/en-us/library/ms779636.aspx
+// A chunk has the following form:
+// ckID ckSize ckData
+// where ckID is a FOURCC that identifies the data contained in the
+// chunk, ckData is a 4-byte value giving the size of the data in
+// ckData, and ckData is zero or more bytes of data. The data is always
+// padded to nearest WORD boundary. ckSize gives the size of the valid
+// data in the chunk; it does not include the padding, the size of
+// ckID, or the size of ckSize.
+//http://msdn2.microsoft.com/en-us/library/ms779632.aspx
+//NOTE: Workaround to make MPEG4 files play on WMP. MPEG files
+//      place the config parameters efter the BITMAPINFOHEADER and
+//      *NOT* in the 'strd'!
+// http://msdn.microsoft.com/en-us/library/dd183375.aspx
+// http://msdn.microsoft.com/en-us/library/dd183376.aspx
+
+namespace webrtc {
+namespace {
+static const WebRtc_UWord32 kAvifHasindex       = 0x00000010;
+static const WebRtc_UWord32 kAvifMustuseindex   = 0x00000020;
+static const WebRtc_UWord32 kAvifIsinterleaved  = 0x00000100;
+static const WebRtc_UWord32 kAvifTrustcktype    = 0x00000800;
+static const WebRtc_UWord32 kAvifWascapturefile = 0x00010000;
+
+template <class T>
+T MinValue(T a, T b)
+{
+    return a < b ? a : b;
+}
+}  // namespace
+
+AviFile::AVIMAINHEADER::AVIMAINHEADER()
+    : fcc(                  0),
+      cb(                   0),
+      dwMicroSecPerFrame(   0),
+      dwMaxBytesPerSec(     0),
+      dwPaddingGranularity( 0),
+      dwFlags(              0),
+      dwTotalFrames(        0),
+      dwInitialFrames(      0),
+      dwStreams(            0),
+      dwSuggestedBufferSize(0),
+      dwWidth(              0),
+      dwHeight(             0)
+{
+    dwReserved[0] = 0;
+    dwReserved[1] = 0;
+    dwReserved[2] = 0;
+    dwReserved[3] = 0;
+}
+
+AVISTREAMHEADER::AVISTREAMHEADER()
+    : fcc(                  0),
+      cb(                   0),
+      fccType(              0),
+      fccHandler(           0),
+      dwFlags(              0),
+      wPriority(            0),
+      wLanguage(            0),
+      dwInitialFrames(      0),
+      dwScale(              0),
+      dwRate(               0),
+      dwStart(              0),
+      dwLength(             0),
+      dwSuggestedBufferSize(0),
+      dwQuality(            0),
+      dwSampleSize(         0)
+{
+    rcFrame.left   = 0;
+    rcFrame.top    = 0;
+    rcFrame.right  = 0;
+    rcFrame.bottom = 0;
+}
+
+BITMAPINFOHEADER::BITMAPINFOHEADER()
+    : biSize(         0),
+      biWidth(        0),
+      biHeight(       0),
+      biPlanes(       0),
+      biBitCount(     0),
+      biCompression(  0),
+      biSizeImage(    0),
+      biXPelsPerMeter(0),
+      biYPelsPerMeter(0),
+      biClrUsed(      0),
+      biClrImportant( 0)
+{
+}
+
+WAVEFORMATEX::WAVEFORMATEX()
+    : wFormatTag(     0),
+      nChannels(      0),
+      nSamplesPerSec( 0),
+      nAvgBytesPerSec(0),
+      nBlockAlign(    0),
+      wBitsPerSample( 0),
+      cbSize(         0)
+{
+}
+
+AviFile::AVIINDEXENTRY::AVIINDEXENTRY(WebRtc_UWord32 inckid,
+                                      WebRtc_UWord32 indwFlags,
+                                      WebRtc_UWord32 indwChunkOffset,
+                                      WebRtc_UWord32 indwChunkLength)
+    : ckid(inckid),
+      dwFlags(indwFlags),
+      dwChunkOffset(indwChunkOffset),
+      dwChunkLength(indwChunkLength)
+{
+}
+
+AviFile::AviFile()
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _aviFile(NULL),
+      _aviHeader(),
+      _videoStreamHeader(),
+      _audioStreamHeader(),
+      _videoFormatHeader(),
+      _audioFormatHeader(),
+      _videoConfigParameters(),
+      _videoConfigLength(0),
+      _videoStreamName(),
+      _videoStreamNameLength(0),
+      _audioConfigParameters(),
+      _audioStreamName(),
+      _videoStream(),
+      _audioStream(),
+      _nrStreams(0),
+      _aviLength(0),
+      _dataLength(0),
+      _bytesRead(0),
+      _dataStartByte(0),
+      _framesRead(0),
+      _videoFrames(0),
+      _audioFrames(0),
+      _reading(false),
+      _openedAs(AVI_AUDIO),
+      _loop(false),
+      _writing(false),
+      _bytesWritten(0),
+      _riffSizeMark(0),
+      _moviSizeMark(0),
+      _totNumFramesMark(0),
+      _videoStreamLengthMark(0),
+      _audioStreamLengthMark(0),
+      _moviListOffset(0),
+      _writeAudioStream(false),
+      _writeVideoStream(false),
+      _aviMode(NotSet),
+      _videoCodecConfigParams(NULL),
+      _videoCodecConfigParamsLength(0),
+      _videoStreamDataChunkPrefix(0),
+      _audioStreamDataChunkPrefix(0),
+      _created(false),
+      _indexList(new ListWrapper())
+{
+  ResetComplexMembers();
+}
+
+AviFile::~AviFile()
+{
+    Close();
+
+    delete _indexList;
+    delete[] _videoCodecConfigParams;
+    delete _crit;
+}
+
+WebRtc_Word32 AviFile::Open(AVIStreamType streamType, const char* fileName,
+                            bool loop)
+{
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,  "OpenAVIFile(%s)",
+                 fileName);
+    _crit->Enter();
+
+    if (_aviMode != NotSet)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Read;
+
+    if (!fileName)
+    {
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "\tfileName not valid!");
+        return -1;
+    }
+
+#ifdef _WIN32
+    // fopen does not support wide characters on Windows, ergo _wfopen.
+    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
+    wideFileName[0] = 0;
+    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
+                        wideFileName, FileWrapper::kMaxFileNameSize);
+
+    _aviFile = _wfopen(wideFileName, L"rb");
+#else
+    _aviFile = fopen(fileName, "rb");
+#endif
+
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Could not open file!");
+        return -1;
+    }
+
+    // ReadRIFF verifies that the file is AVI and figures out the file length.
+    WebRtc_Word32 err = ReadRIFF();
+    if (err)
+    {
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+        _crit->Leave();
+        return -1;
+    }
+
+   err = ReadHeaders();
+    if (err)
+    {
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Unsupported or corrupt AVI format");
+        return -1;
+    }
+
+    _dataStartByte = _bytesRead;
+    _reading = true;
+    _openedAs = streamType;
+    _loop = loop;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::Close()
+{
+    _crit->Enter();
+    switch (_aviMode)
+    {
+    case Read:
+        CloseRead();
+        break;
+    case Write:
+        CloseWrite();
+        break;
+    default:
+        break;
+    }
+
+    if (_videoCodecConfigParams)
+    {
+        delete [] _videoCodecConfigParams;
+        _videoCodecConfigParams = 0;
+    }
+    ResetMembers();
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_UWord32 AviFile::MakeFourCc(WebRtc_UWord8 ch0, WebRtc_UWord8 ch1,
+                                   WebRtc_UWord8 ch2, WebRtc_UWord8 ch3)
+{
+    return ((WebRtc_UWord32)(WebRtc_UWord8)(ch0)         |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch1) << 8)  |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch2) << 16) |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch3) << 24 ));
+}
+
+WebRtc_Word32 AviFile::GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
+                                          BITMAPINFOHEADER& bitmapInfo,
+                                          char* codecConfigParameters,
+                                          WebRtc_Word32& configLength)
+{
+    _crit->Enter();
+    if (!_reading && !_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    memcpy(&videoStreamHeader, &_videoStreamHeader, sizeof(_videoStreamHeader));
+    memcpy(&bitmapInfo, &_videoFormatHeader, sizeof(_videoFormatHeader));
+
+    if (configLength <= _videoConfigLength)
+    {
+        memcpy(codecConfigParameters, _videoConfigParameters,
+               _videoConfigLength);
+        configLength = _videoConfigLength;
+    }
+    else
+    {
+        configLength = 0;
+    }
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::GetDuration(WebRtc_Word32& durationMs)
+{
+    _crit->Enter();
+    if (_videoStreamHeader.dwRate==0 || _videoStreamHeader.dwScale==0)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    durationMs = _videoStreamHeader.dwLength * 1000 /
+        (_videoStreamHeader.dwRate/_videoStreamHeader.dwScale);
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::GetAudioStreamInfo(WAVEFORMATEX& waveHeader)
+{
+    _crit->Enter();
+    if (_aviMode != Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_reading && !_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    memcpy(&waveHeader, &_audioFormatHeader, sizeof(_audioFormatHeader));
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAudio(const WebRtc_UWord8* data,
+                                  WebRtc_Word32 length)
+{
+    _crit->Enter();
+    size_t newBytesWritten = _bytesWritten;
+
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_writeAudioStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    // Start of chunk.
+    const WebRtc_UWord32 chunkOffset = ftell(_aviFile) - _moviListOffset;
+    _bytesWritten += PutLE32(_audioStreamDataChunkPrefix);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t chunkSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBuffer(data, length);
+
+    const long chunkSize = PutLE32LengthFromCurrent(
+        static_cast<long>(chunkSizeMark));
+
+    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
+    if (chunkSize % 2)
+    {
+        _bytesWritten += PutByte(0);
+    }
+    // End of chunk
+
+    // Save chunk information for use when closing file.
+    AddChunkToIndexList(_audioStreamDataChunkPrefix, 0, // No flags.
+                        chunkOffset, chunkSize);
+
+    ++_audioFrames;
+    newBytesWritten = _bytesWritten - newBytesWritten;
+    _crit->Leave();
+    return static_cast<WebRtc_Word32>(newBytesWritten);
+}
+
+WebRtc_Word32 AviFile::WriteVideo(const WebRtc_UWord8* data,
+                                  WebRtc_Word32 length)
+{
+    _crit->Enter();
+    size_t newBytesWritten = _bytesWritten;
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_writeVideoStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    // Start of chunk.
+    const WebRtc_UWord32 chunkOffset = ftell(_aviFile) - _moviListOffset;
+    _bytesWritten += PutLE32(_videoStreamDataChunkPrefix);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t chunkSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBuffer(data, length);
+
+    const long chunkSize = PutLE32LengthFromCurrent(
+        static_cast<long>(chunkSizeMark));
+
+    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
+    if (chunkSize % 2)
+    {
+        //Pad one byte, to WORD align.
+        _bytesWritten += PutByte(0);
+    }
+     //End chunk!
+    AddChunkToIndexList(_videoStreamDataChunkPrefix, 0, // No flags.
+                        chunkOffset, static_cast<WebRtc_UWord32>(chunkSize));
+
+    ++_videoFrames;
+    newBytesWritten = _bytesWritten - newBytesWritten;
+    _crit->Leave();
+    return static_cast<WebRtc_Word32>(newBytesWritten);
+}
+
+WebRtc_Word32 AviFile::PrepareDataChunkHeaders()
+{
+    // 00 video stream, 01 audio stream.
+    // db uncompresses video,  dc compressed video, wb WAV audio
+    if (_writeVideoStream)
+    {
+        if (strncmp((const char*) &_videoStreamHeader.fccHandler, "I420", 4) ==
+            0)
+        {
+            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'b');
+        }
+        else
+        {
+            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'c');
+        }
+        _audioStreamDataChunkPrefix = MakeFourCc('0', '1', 'w', 'b');
+    }
+    else
+    {
+        _audioStreamDataChunkPrefix = MakeFourCc('0', '0', 'w', 'b');
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadMoviSubChunk(WebRtc_UWord8* data,
+                                        WebRtc_Word32& length,
+                                        WebRtc_UWord32 tag1,
+                                        WebRtc_UWord32 tag2)
+{
+    if (!_reading)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                     "AviFile::ReadMoviSubChunk(): File not open!");
+        length = 0;
+        return -1;
+    }
+
+    WebRtc_UWord32 size;
+    bool isEOFReached = false;
+    // Try to read one data chunk header
+    while (true)
+    {
+        // TODO (hellner): what happens if an empty AVI file is opened with
+        // _loop set to true? Seems like this while-loop would never exit!
+
+        // tag = db uncompresses video,  dc compressed video or wb WAV audio.
+        WebRtc_UWord32 tag;
+        _bytesRead += GetLE32(tag);
+        _bytesRead += GetLE32(size);
+
+        const WebRtc_Word32 eof = feof(_aviFile);
+        if (!eof)
+        {
+            if (tag == tag1)
+            {
+                // Supported tag found.
+                break;
+            }
+            else if ((tag == tag2) && (tag2 != 0))
+            {
+                // Supported tag found.
+                break;
+            }
+
+            // Jump to next chunk. The size is in bytes but chunks are aligned
+            // on 2 byte boundaries.
+            const WebRtc_UWord32 seekSize = (size % 2) ? size + 1 : size;
+            const WebRtc_Word32 err = fseek(_aviFile, seekSize, SEEK_CUR);
+
+            if (err)
+            {
+                isEOFReached = true;
+            }
+        }
+        else
+        {
+            isEOFReached = true;
+        }
+
+        if (isEOFReached)
+        {
+            clearerr(_aviFile);
+
+            if (_loop)
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
+                              data file, starting from the beginning.");
+
+                fseek(_aviFile, static_cast<long>(_dataStartByte), SEEK_SET);
+
+                _bytesRead = _dataStartByte;
+                _framesRead = 0;
+                isEOFReached = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
+                             file!");
+                length = 0;
+                return -1;
+            }
+        }
+        _bytesRead += size;
+    }
+
+    if (static_cast<WebRtc_Word32>(size) > length)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                     "AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
+
+        // Jump to next chunk. The size is in bytes but chunks are aligned
+        // on 2 byte boundaries.
+        const WebRtc_UWord32 seekSize = (size % 2) ? size + 1 : size;
+        fseek(_aviFile, seekSize, SEEK_CUR);
+        _bytesRead += seekSize;
+        length = 0;
+        return -1;
+    }
+    _bytesRead += GetBuffer(data, size);
+
+    // The size is in bytes but chunks are aligned on 2 byte boundaries.
+    if (size % 2)
+    {
+        WebRtc_UWord8 dummy_byte;
+        _bytesRead += GetByte(dummy_byte);
+    }
+    length = size;
+    ++_framesRead;
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAudio(WebRtc_UWord8* data, WebRtc_Word32& length)
+{
+    _crit->Enter();
+    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "AviFile::ReadAudio()");
+
+    if (_aviMode != Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (_openedAs != AVI_AUDIO)
+    {
+        length = 0;
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "File not open as audio!");
+        return -1;
+    }
+
+    const WebRtc_Word32 ret = ReadMoviSubChunk(
+        data,
+        length,
+        StreamAndTwoCharCodeToTag(_audioStream.streamNumber, "wb"));
+
+    _crit->Leave();
+    return ret;
+}
+
+WebRtc_Word32 AviFile::ReadVideo(WebRtc_UWord8* data, WebRtc_Word32& length)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
+
+    _crit->Enter();
+    if (_aviMode != Read)
+    {
+        //Has to be Read!
+        _crit->Leave();
+        return -1;
+    }
+    if (_openedAs != AVI_VIDEO)
+    {
+        length = 0;
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "File not open as video!");
+        return -1;
+    }
+
+    const WebRtc_Word32 ret = ReadMoviSubChunk(
+        data,
+        length,
+        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "dc"),
+        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "db"));
+    _crit->Leave();
+    return ret;
+}
+
+WebRtc_Word32 AviFile::Create(const char* fileName)
+{
+    _crit->Enter();
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (!_writeVideoStream && !_writeAudioStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+#ifdef _WIN32
+    // fopen does not support wide characters on Windows, ergo _wfopen.
+    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
+    wideFileName[0] = 0;
+
+    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
+                        wideFileName, FileWrapper::kMaxFileNameSize);
+
+    _aviFile = _wfopen(wideFileName, L"w+b");
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        return -1;
+    }
+#else
+    _aviFile = fopen(fileName, "w+b");
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        return -1;
+    }
+#endif
+
+    WriteRIFF();
+    WriteHeaders();
+
+    _created = true;
+
+    PrepareDataChunkHeaders();
+    ClearIndexList();
+    WriteMoviStart();
+    _aviMode = Write;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::CreateVideoStream(
+    const AVISTREAMHEADER& videoStreamHeader,
+    const BITMAPINFOHEADER& bitMapInfoHeader,
+    const WebRtc_UWord8* codecConfigParams,
+    WebRtc_Word32 codecConfigParamsLength)
+{
+    _crit->Enter();
+    if (_aviMode == Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Write;
+    _writeVideoStream = true;
+
+    _videoStreamHeader = videoStreamHeader;
+    _videoFormatHeader = bitMapInfoHeader;
+
+    if (codecConfigParams && codecConfigParamsLength > 0)
+    {
+        if (_videoCodecConfigParams)
+        {
+            delete [] _videoCodecConfigParams;
+            _videoCodecConfigParams = 0;
+        }
+
+        _videoCodecConfigParams = new WebRtc_UWord8[codecConfigParamsLength];
+        _videoCodecConfigParamsLength = codecConfigParamsLength;
+
+        memcpy(_videoCodecConfigParams, codecConfigParams,
+               _videoCodecConfigParamsLength);
+    }
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::CreateAudioStream(
+    const AVISTREAMHEADER& audioStreamHeader,
+    const WAVEFORMATEX& waveFormatHeader)
+{
+    _crit->Enter();
+
+    if (_aviMode == Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Write;
+    _writeAudioStream = true;
+    _audioStreamHeader = audioStreamHeader;
+    _audioFormatHeader = waveFormatHeader;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteRIFF()
+{
+    const WebRtc_UWord32 riffTag = MakeFourCc('R', 'I', 'F', 'F');
+    _bytesWritten += PutLE32(riffTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    _riffSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 aviTag = MakeFourCc('A', 'V', 'I', ' ');
+    _bytesWritten += PutLE32(aviTag);
+
+    return 0;
+}
+
+
+WebRtc_Word32 AviFile::WriteHeaders()
+{
+    // Main AVI header list.
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t listhdrlSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 hdrlTag = MakeFourCc('h', 'd', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIMainHeader();
+    WriteAVIStreamHeaders();
+
+    const long hdrlLen = PutLE32LengthFromCurrent(
+        static_cast<long>(listhdrlSizeMark));
+
+    // Junk chunk to align on 2048 boundry (CD-ROM sector boundary).
+    const WebRtc_UWord32 junkTag = MakeFourCc('J', 'U', 'N', 'K');
+    _bytesWritten += PutLE32(junkTag);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t junkSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 junkBufferSize =
+        0x800     // 2048 byte alignment
+        - 12      // RIFF SIZE 'AVI '
+        - 8       // LIST SIZE
+        - hdrlLen //
+        - 8       // JUNK SIZE
+        - 12;     // LIST SIZE 'MOVI'
+
+    // TODO (hellner): why not just fseek here?
+    WebRtc_UWord8* junkBuffer = new WebRtc_UWord8[junkBufferSize];
+    memset(junkBuffer, 0, junkBufferSize);
+    _bytesWritten += PutBuffer(junkBuffer, junkBufferSize);
+    delete [] junkBuffer;
+
+    PutLE32LengthFromCurrent(static_cast<long>(junkSizeMark));
+    // End of JUNK chunk.
+    // End of main AVI header list.
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIMainHeader()
+{
+    const WebRtc_UWord32 avihTag = MakeFourCc('a', 'v', 'i', 'h');
+    _bytesWritten += PutLE32(avihTag);
+    _bytesWritten += PutLE32(14 * sizeof(WebRtc_UWord32));
+
+    const WebRtc_UWord32 scale = _videoStreamHeader.dwScale ?
+        _videoStreamHeader.dwScale : 1;
+    const WebRtc_UWord32 microSecPerFrame = 1000000 /
+        (_videoStreamHeader.dwRate / scale);
+    _bytesWritten += PutLE32(microSecPerFrame);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+
+    WebRtc_UWord32 numStreams = 0;
+    if (_writeVideoStream)
+    {
+        ++numStreams;
+    }
+    if (_writeAudioStream)
+    {
+        ++numStreams;
+    }
+
+    if (numStreams == 1)
+    {
+        _bytesWritten += PutLE32(
+            kAvifTrustcktype
+            | kAvifHasindex
+            | kAvifWascapturefile);
+    }
+    else
+    {
+        _bytesWritten += PutLE32(
+            kAvifTrustcktype
+            | kAvifHasindex
+            | kAvifWascapturefile
+            | kAvifIsinterleaved);
+    }
+
+    _totNumFramesMark = _bytesWritten;
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(numStreams);
+
+    if (_writeVideoStream)
+    {
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.dwSuggestedBufferSize);
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.rcFrame.right-_videoStreamHeader.rcFrame.left);
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.rcFrame.bottom-_videoStreamHeader.rcFrame.top);
+    } else {
+        _bytesWritten += PutLE32(0);
+        _bytesWritten += PutLE32(0);
+        _bytesWritten += PutLE32(0);
+    }
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIStreamHeaders()
+{
+    if (_writeVideoStream)
+    {
+        WriteAVIVideoStreamHeaders();
+    }
+    if (_writeAudioStream)
+    {
+        WriteAVIAudioStreamHeaders();
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIVideoStreamHeaders()
+{
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t liststrlSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 hdrlTag = MakeFourCc('s', 't', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIVideoStreamHeaderChunks();
+
+    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIVideoStreamHeaderChunks()
+{
+    // Start of strh
+    const WebRtc_UWord32 strhTag = MakeFourCc('s', 't', 'r', 'h');
+    _bytesWritten += PutLE32(strhTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strhSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_videoStreamHeader.fccType);
+    _bytesWritten += PutLE32(_videoStreamHeader.fccHandler);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwFlags);
+    _bytesWritten += PutLE16(_videoStreamHeader.wPriority);
+    _bytesWritten += PutLE16(_videoStreamHeader.wLanguage);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwInitialFrames);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwScale);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwRate);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwStart);
+
+    _videoStreamLengthMark = _bytesWritten;
+    _bytesWritten += PutLE32(_videoStreamHeader.dwLength);
+
+    _bytesWritten += PutLE32(_videoStreamHeader.dwSuggestedBufferSize);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwQuality);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwSampleSize);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.left);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.top);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.right);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.bottom);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
+    // End of strh
+
+    // Start of strf
+    const WebRtc_UWord32 strfTag = MakeFourCc('s', 't', 'r', 'f');
+    _bytesWritten += PutLE32(strfTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strfSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_videoFormatHeader.biSize);
+    _bytesWritten += PutLE32(_videoFormatHeader.biWidth);
+    _bytesWritten += PutLE32(_videoFormatHeader.biHeight);
+    _bytesWritten += PutLE16(_videoFormatHeader.biPlanes);
+    _bytesWritten += PutLE16(_videoFormatHeader.biBitCount);
+    _bytesWritten += PutLE32(_videoFormatHeader.biCompression);
+    _bytesWritten += PutLE32(_videoFormatHeader.biSizeImage);
+    _bytesWritten += PutLE32(_videoFormatHeader.biXPelsPerMeter);
+    _bytesWritten += PutLE32(_videoFormatHeader.biYPelsPerMeter);
+    _bytesWritten += PutLE32(_videoFormatHeader.biClrUsed);
+    _bytesWritten += PutLE32(_videoFormatHeader.biClrImportant);
+
+    const bool isMpegFile = _videoStreamHeader.fccHandler ==
+        AviFile::MakeFourCc('M','4','S','2');
+    if (isMpegFile)
+    {
+        if (_videoCodecConfigParams && _videoCodecConfigParamsLength > 0)
+        {
+            _bytesWritten += PutBuffer(_videoCodecConfigParams,
+                                       _videoCodecConfigParamsLength);
+        }
+    }
+
+    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
+    // End of strf
+
+    if ( _videoCodecConfigParams
+         && (_videoCodecConfigParamsLength > 0)
+         && !isMpegFile)
+    {
+        // Write strd, unless it's an MPEG file
+        const WebRtc_UWord32 strdTag = MakeFourCc('s', 't', 'r', 'd');
+        _bytesWritten += PutLE32(strdTag);
+
+        // Size is unknown at this point. Update later.
+        _bytesWritten += PutLE32(0);
+        const size_t strdSizeMark = _bytesWritten;
+
+        _bytesWritten += PutBuffer(_videoCodecConfigParams,
+                                   _videoCodecConfigParamsLength);
+
+        PutLE32LengthFromCurrent(static_cast<long>(strdSizeMark));
+        // End of strd
+    }
+
+    // Start of strn
+    const WebRtc_UWord32 strnTag = MakeFourCc('s', 't', 'r', 'n');
+    _bytesWritten += PutLE32(strnTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strnSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBufferZ("WebRtc.avi ");
+
+    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
+    // End of strd
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIAudioStreamHeaders()
+{
+    // Start of LIST
+    WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t liststrlSizeMark = _bytesWritten;
+
+    WebRtc_UWord32 hdrlTag = MakeFourCc('s', 't', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIAudioStreamHeaderChunks();
+
+    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
+    //End of LIST
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIAudioStreamHeaderChunks()
+{
+    // Start of strh
+    const WebRtc_UWord32 strhTag = MakeFourCc('s', 't', 'r', 'h');
+    _bytesWritten += PutLE32(strhTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strhSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_audioStreamHeader.fccType);
+    _bytesWritten += PutLE32(_audioStreamHeader.fccHandler);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwFlags);
+    _bytesWritten += PutLE16(_audioStreamHeader.wPriority);
+    _bytesWritten += PutLE16(_audioStreamHeader.wLanguage);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwInitialFrames);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwScale);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwRate);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwStart);
+
+    _audioStreamLengthMark = _bytesWritten;
+    _bytesWritten += PutLE32(_audioStreamHeader.dwLength);
+
+    _bytesWritten += PutLE32(_audioStreamHeader.dwSuggestedBufferSize);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwQuality);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwSampleSize);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.left);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.top);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.right);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.bottom);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
+    // End of strh
+
+    // Start of strf
+    const WebRtc_UWord32 strfTag = MakeFourCc('s', 't', 'r', 'f');
+    _bytesWritten += PutLE32(strfTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strfSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE16(_audioFormatHeader.wFormatTag);
+    _bytesWritten += PutLE16(_audioFormatHeader.nChannels);
+    _bytesWritten += PutLE32(_audioFormatHeader.nSamplesPerSec);
+    _bytesWritten += PutLE32(_audioFormatHeader.nAvgBytesPerSec);
+    _bytesWritten += PutLE16(_audioFormatHeader.nBlockAlign);
+    _bytesWritten += PutLE16(_audioFormatHeader.wBitsPerSample);
+    _bytesWritten += PutLE16(_audioFormatHeader.cbSize);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
+    // End end of strf.
+
+    // Audio doesn't have strd.
+
+    // Start of strn
+    const WebRtc_UWord32 strnTag = MakeFourCc('s', 't', 'r', 'n');
+    _bytesWritten += PutLE32(strnTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strnSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBufferZ("WebRtc.avi ");
+
+    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
+    // End of strd.
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteMoviStart()
+{
+    // Create template movi list. Fill out size when known (i.e. when closing
+    // file).
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    _bytesWritten += PutLE32(0); //Size! Change later!
+    _moviSizeMark = _bytesWritten;
+    _moviListOffset = ftell(_aviFile);
+
+    const WebRtc_UWord32 moviTag = MakeFourCc('m', 'o', 'v', 'i');
+    _bytesWritten += PutLE32(moviTag);
+
+    return 0;
+}
+
+size_t AviFile::PutByte(WebRtc_UWord8 byte)
+{
+    return fwrite(&byte, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord8),
+                  _aviFile);
+}
+
+size_t AviFile::PutLE16(WebRtc_UWord16 word)
+{
+    return fwrite(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord16),
+                  _aviFile);
+}
+
+size_t AviFile::PutLE32(WebRtc_UWord32 word)
+{
+    return fwrite(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord32),
+                  _aviFile);
+}
+
+size_t AviFile::PutBuffer(const WebRtc_UWord8* str, size_t size)
+{
+    return fwrite(str, sizeof(WebRtc_UWord8), size,
+                  _aviFile);
+}
+
+size_t AviFile::PutBufferZ(const char* str)
+{
+    // Include NULL charachter, hence the + 1
+    return PutBuffer(reinterpret_cast<const WebRtc_UWord8*>(str),
+                     strlen(str) + 1);
+}
+
+long AviFile::PutLE32LengthFromCurrent(long startPos)
+{
+    const long endPos = ftell(_aviFile);
+    if (endPos < 0) {
+        return 0;
+    }
+    bool success = (0 == fseek(_aviFile, startPos - 4, SEEK_SET));
+    if (!success) {
+        assert(false);
+        return 0;
+    }
+    const long len = endPos - startPos;
+    if (endPos > startPos) {
+        PutLE32(len);
+    }
+    else {
+        assert(false);
+    }
+    success = (0 == fseek(_aviFile, endPos, SEEK_SET));
+    assert(success);
+    return len;
+}
+
+void AviFile::PutLE32AtPos(long pos, WebRtc_UWord32 word)
+{
+    const long currPos = ftell(_aviFile);
+    if (currPos < 0) {
+        assert(false);
+        return;
+    }
+    bool success = (0 == fseek(_aviFile, pos, SEEK_SET));
+    if (!success) {
+      assert(false);
+      return;
+    }
+    PutLE32(word);
+    success = (0 == fseek(_aviFile, currPos, SEEK_SET));
+    assert(success);
+}
+
+void AviFile::CloseRead()
+{
+    if (_aviFile)
+    {
+        fclose(_aviFile);
+        _aviFile = NULL;
+    }
+}
+
+void AviFile::CloseWrite()
+{
+    if (_created)
+    {
+        // Update everything that isn't known until the file is closed. The
+        // marks indicate where in the headers this update should be.
+        PutLE32LengthFromCurrent(static_cast<long>(_moviSizeMark));
+
+        PutLE32AtPos(static_cast<long>(_totNumFramesMark), _videoFrames);
+
+        if (_writeVideoStream)
+        {
+            PutLE32AtPos(static_cast<long>(_videoStreamLengthMark),
+                         _videoFrames);
+        }
+
+        if (_writeAudioStream)
+        {
+            PutLE32AtPos(static_cast<long>(_audioStreamLengthMark),
+                         _audioFrames);
+        }
+
+        WriteIndex();
+        PutLE32LengthFromCurrent(static_cast<long>(_riffSizeMark));
+        ClearIndexList();
+
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+    }
+}
+
+void AviFile::ResetMembers()
+{
+    ResetComplexMembers();
+
+    _aviFile = NULL;
+
+    _nrStreams     = 0;
+    _aviLength     = 0;
+    _dataLength    = 0;
+    _bytesRead     = 0;
+    _dataStartByte = 0;
+    _framesRead    = 0;
+    _videoFrames   = 0;
+    _audioFrames   = 0;
+
+    _reading = false;
+    _openedAs = AVI_AUDIO;
+    _loop = false;
+    _writing = false;
+
+    _bytesWritten          = 0;
+
+    _riffSizeMark          = 0;
+    _moviSizeMark          = 0;
+    _totNumFramesMark      = 0;
+    _videoStreamLengthMark = 0;
+    _audioStreamLengthMark = 0;
+
+    _writeAudioStream = false;
+    _writeVideoStream = false;
+
+    _aviMode                      = NotSet;
+    _videoCodecConfigParams       = 0;
+    _videoCodecConfigParamsLength = 0;
+
+    _videoStreamDataChunkPrefix = 0;
+    _audioStreamDataChunkPrefix = 0;
+
+    _created = false;
+
+    _moviListOffset = 0;
+
+    _videoConfigLength = 0;
+}
+
+void AviFile::ResetComplexMembers()
+{
+    memset(&_aviHeader, 0, sizeof(AVIMAINHEADER));
+    memset(&_videoStreamHeader, 0, sizeof(AVISTREAMHEADER));
+    memset(&_audioStreamHeader, 0, sizeof(AVISTREAMHEADER));
+    memset(&_videoFormatHeader, 0, sizeof(BITMAPINFOHEADER));
+    memset(&_audioFormatHeader, 0, sizeof(WAVEFORMATEX));
+    memset(_videoConfigParameters, 0, CODEC_CONFIG_LENGTH);
+    memset(_videoStreamName, 0, STREAM_NAME_LENGTH);
+    memset(_audioStreamName, 0, STREAM_NAME_LENGTH);
+    memset(&_videoStream, 0, sizeof(AVIStream));
+    memset(&_audioStream, 0, sizeof(AVIStream));
+}
+
+size_t AviFile::GetByte(WebRtc_UWord8& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord8), _aviFile);
+}
+
+size_t AviFile::GetLE16(WebRtc_UWord16& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord16),
+                 _aviFile);
+}
+
+size_t AviFile::GetLE32(WebRtc_UWord32& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord32),
+                 _aviFile);
+}
+
+size_t AviFile::GetBuffer(WebRtc_UWord8* str, size_t size)
+{
+    return fread(str, sizeof(WebRtc_UWord8), size, _aviFile);
+}
+
+WebRtc_Word32 AviFile::ReadRIFF()
+{
+    WebRtc_UWord32 tag;
+    _bytesRead = GetLE32(tag);
+    if (tag != MakeFourCc('R', 'I', 'F', 'F'))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not a RIFF file!");
+        return -1;
+    }
+
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+    _aviLength = size;
+
+    _bytesRead += GetLE32(tag);
+    if (tag != MakeFourCc('A', 'V', 'I', ' '))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not an AVI file!");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadHeaders()
+{
+    WebRtc_UWord32 tag;
+    _bytesRead += GetLE32(tag);
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+
+    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 listTag;
+    _bytesRead += GetLE32(listTag);
+    if (listTag != MakeFourCc('h', 'd', 'r', 'l'))
+    {
+        return -1;
+    }
+
+    WebRtc_Word32 err = ReadAVIMainHeader();
+    if (err)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIMainHeader()
+{
+    _bytesRead += GetLE32(_aviHeader.fcc);
+    _bytesRead += GetLE32(_aviHeader.cb);
+    _bytesRead += GetLE32(_aviHeader.dwMicroSecPerFrame);
+    _bytesRead += GetLE32(_aviHeader.dwMaxBytesPerSec);
+    _bytesRead += GetLE32(_aviHeader.dwPaddingGranularity);
+    _bytesRead += GetLE32(_aviHeader.dwFlags);
+    _bytesRead += GetLE32(_aviHeader.dwTotalFrames);
+    _bytesRead += GetLE32(_aviHeader.dwInitialFrames);
+    _bytesRead += GetLE32(_aviHeader.dwStreams);
+    _bytesRead += GetLE32(_aviHeader.dwSuggestedBufferSize);
+    _bytesRead += GetLE32(_aviHeader.dwWidth);
+    _bytesRead += GetLE32(_aviHeader.dwHeight);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[0]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[1]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[2]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[3]);
+
+    if (_aviHeader.fcc != MakeFourCc('a', 'v', 'i', 'h'))
+    {
+        return -1;
+    }
+
+    if (_aviHeader.dwFlags & kAvifMustuseindex)
+    {
+        return -1;
+    }
+
+    bool readVideoStreamHeader = false;
+    bool readAudioStreamHeader = false;
+    unsigned int streamsRead = 0;
+    while (_aviHeader.dwStreams > streamsRead)
+    {
+        WebRtc_UWord32 strltag;
+        _bytesRead += GetLE32(strltag);
+        WebRtc_UWord32 strlsize;
+        _bytesRead += GetLE32(strlsize);
+        const long endSeekPos = ftell(_aviFile) +
+            static_cast<WebRtc_Word32>(strlsize);
+
+        if (strltag != MakeFourCc('L', 'I', 'S', 'T'))
+        {
+            return -1;
+        }
+
+        WebRtc_UWord32 listTag;
+        _bytesRead += GetLE32(listTag);
+        if (listTag != MakeFourCc('s', 't', 'r', 'l'))
+        {
+            return -1;
+        }
+
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag != MakeFourCc('s', 't', 'r', 'h'))
+        {
+            return -1;
+        }
+
+        AVISTREAMHEADER tmpStreamHeader;
+        tmpStreamHeader.fcc = chunktag;
+        tmpStreamHeader.cb  = chunksize;
+
+        _bytesRead += GetLE32(tmpStreamHeader.fccType);
+        _bytesRead += GetLE32(tmpStreamHeader.fccHandler);
+        _bytesRead += GetLE32(tmpStreamHeader.dwFlags);
+        _bytesRead += GetLE16(tmpStreamHeader.wPriority);
+        _bytesRead += GetLE16(tmpStreamHeader.wLanguage);
+        _bytesRead += GetLE32(tmpStreamHeader.dwInitialFrames);
+        _bytesRead += GetLE32(tmpStreamHeader.dwScale);
+        _bytesRead += GetLE32(tmpStreamHeader.dwRate);
+        _bytesRead += GetLE32(tmpStreamHeader.dwStart);
+        _bytesRead += GetLE32(tmpStreamHeader.dwLength);
+        _bytesRead += GetLE32(tmpStreamHeader.dwSuggestedBufferSize);
+        _bytesRead += GetLE32(tmpStreamHeader.dwQuality);
+        _bytesRead += GetLE32(tmpStreamHeader.dwSampleSize);
+
+        WebRtc_UWord16 left;
+        _bytesRead += GetLE16(left);
+        tmpStreamHeader.rcFrame.left = left;
+        WebRtc_UWord16 top;
+        _bytesRead += GetLE16(top);
+        tmpStreamHeader.rcFrame.top = top;
+        WebRtc_UWord16 right;
+        _bytesRead += GetLE16(right);
+        tmpStreamHeader.rcFrame.right = right;
+        WebRtc_UWord16 bottom;
+        _bytesRead += GetLE16(bottom);
+        tmpStreamHeader.rcFrame.bottom = bottom;
+
+        if (!readVideoStreamHeader
+            && (tmpStreamHeader.fccType == MakeFourCc('v', 'i', 'd', 's')))
+        {
+            _videoStreamHeader = tmpStreamHeader; //Bitwise copy is OK!
+            const WebRtc_Word32 err = ReadAVIVideoStreamHeader(endSeekPos);
+            if (err)
+            {
+                return -1;
+            }
+            // Make sure there actually is video data in the file...
+            if (_videoStreamHeader.dwLength == 0)
+            {
+                return -1;
+            }
+            readVideoStreamHeader = true;
+        } else if(!readAudioStreamHeader &&
+                  (tmpStreamHeader.fccType == MakeFourCc('a', 'u', 'd', 's'))) {
+            _audioStreamHeader = tmpStreamHeader;
+            const WebRtc_Word32 err = ReadAVIAudioStreamHeader(endSeekPos);
+            if (err)
+            {
+                return -1;
+            }
+            readAudioStreamHeader = true;
+        }
+        else
+        {
+            fseek(_aviFile, endSeekPos, SEEK_SET);
+            _bytesRead += endSeekPos;
+        }
+
+        ++streamsRead;
+    }
+
+    if (!readVideoStreamHeader && !readAudioStreamHeader)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 tag;
+    _bytesRead += GetLE32(tag);
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+
+    if (tag == MakeFourCc('J', 'U', 'N', 'K'))
+    {
+        fseek(_aviFile, size, SEEK_CUR);
+        _bytesRead += size;
+        _bytesRead += GetLE32(tag);
+        _bytesRead += GetLE32(size);
+    }
+    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
+    {
+        return -1;
+    }
+    WebRtc_UWord32 listTag;
+    _bytesRead += GetLE32(listTag);
+    if (listTag != MakeFourCc('m', 'o', 'v', 'i'))
+    {
+        return -1;
+    }
+    _dataLength = size;
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIVideoStreamHeader(WebRtc_Word32 endpos)
+{
+    WebRtc_UWord32 chunktag;
+    _bytesRead += GetLE32(chunktag);
+    WebRtc_UWord32 chunksize;
+    _bytesRead += GetLE32(chunksize);
+
+    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
+    {
+        return -1;
+    }
+
+    _bytesRead += GetLE32(_videoFormatHeader.biSize);
+    _bytesRead += GetLE32(_videoFormatHeader.biWidth);
+    _bytesRead += GetLE32(_videoFormatHeader.biHeight);
+    _bytesRead += GetLE16(_videoFormatHeader.biPlanes);
+    _bytesRead += GetLE16(_videoFormatHeader.biBitCount);
+    _bytesRead += GetLE32(_videoFormatHeader.biCompression);
+    _bytesRead += GetLE32(_videoFormatHeader.biSizeImage);
+    _bytesRead += GetLE32(_videoFormatHeader.biXPelsPerMeter);
+    _bytesRead += GetLE32(_videoFormatHeader.biYPelsPerMeter);
+    _bytesRead += GetLE32(_videoFormatHeader.biClrUsed);
+    _bytesRead += GetLE32(_videoFormatHeader.biClrImportant);
+
+    if (chunksize >  _videoFormatHeader.biSize)
+    {
+        const WebRtc_UWord32 size = chunksize - _videoFormatHeader.biSize;
+        const WebRtc_UWord32 readSize = MinValue(size, CODEC_CONFIG_LENGTH);
+        _bytesRead += GetBuffer(
+            reinterpret_cast<WebRtc_UWord8*>(_videoConfigParameters), readSize);
+        _videoConfigLength = readSize;
+        WebRtc_Word32 skipSize = chunksize - _videoFormatHeader.biSize -
+            readSize;
+        if (skipSize > 0)
+        {
+            fseek(_aviFile, skipSize, SEEK_CUR);
+            _bytesRead += skipSize;
+        }
+    }
+
+    while (static_cast<long>(_bytesRead) < endpos)
+    {
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize, STREAM_NAME_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_videoStreamName), size);
+        }
+        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize,
+                                                 CODEC_CONFIG_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_videoConfigParameters), size);
+            _videoConfigLength = size;
+        }
+        else
+        {
+            fseek(_aviFile, chunksize, SEEK_CUR);
+            _bytesRead += chunksize;
+        }
+
+        if (feof(_aviFile))
+        {
+            return -1;
+        }
+    }
+    _videoStream.streamType = AviFile::AVI_VIDEO;
+    _videoStream.streamNumber = _nrStreams++;
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIAudioStreamHeader(WebRtc_Word32 endpos)
+{
+    WebRtc_UWord32 chunktag;
+    _bytesRead += GetLE32(chunktag);
+    WebRtc_UWord32 chunksize;
+    _bytesRead += GetLE32(chunksize);
+
+    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
+    {
+        return -1;
+    }
+
+    const size_t startRead = _bytesRead;
+    _bytesRead += GetLE16(_audioFormatHeader.wFormatTag);
+    _bytesRead += GetLE16(_audioFormatHeader.nChannels);
+    _bytesRead += GetLE32(_audioFormatHeader.nSamplesPerSec);
+    _bytesRead += GetLE32(_audioFormatHeader.nAvgBytesPerSec);
+    _bytesRead += GetLE16(_audioFormatHeader.nBlockAlign);
+    _bytesRead += GetLE16(_audioFormatHeader.wBitsPerSample);
+    _bytesRead += GetLE16(_audioFormatHeader.cbSize);
+
+    const WebRtc_UWord32 diffRead = chunksize - (_bytesRead - startRead);
+    if (diffRead > 0)
+    {
+        const WebRtc_UWord32 size = MinValue(diffRead, CODEC_CONFIG_LENGTH);
+        _bytesRead += GetBuffer(
+            reinterpret_cast<WebRtc_UWord8*>(_audioConfigParameters), size);
+    }
+
+    while (static_cast<long>(_bytesRead) < endpos)
+    {
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize, STREAM_NAME_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_audioStreamName), size);
+        }
+        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize,
+                                                 CODEC_CONFIG_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_audioConfigParameters), size);
+        }
+        else
+        {
+            fseek(_aviFile, chunksize, SEEK_CUR);
+            _bytesRead += chunksize;
+        }
+
+        if (feof(_aviFile))
+        {
+            return -1;
+        }
+    }
+    _audioStream.streamType = AviFile::AVI_AUDIO;
+    _audioStream.streamNumber = _nrStreams++;
+    return 0;
+}
+
+WebRtc_UWord32 AviFile::StreamAndTwoCharCodeToTag(WebRtc_Word32 streamNum,
+                                                  const char* twoCharCode)
+{
+    WebRtc_UWord8 a = '0';
+    WebRtc_UWord8 b;
+    switch (streamNum)
+    {
+    case 1:
+        b = '1';
+        break;
+    case 2:
+        b = '2';
+        break;
+    default:
+        b = '0';
+    }
+    return MakeFourCc(a, b, twoCharCode[0], twoCharCode[1]);
+}
+
+void AviFile::ClearIndexList()
+{
+    while (!_indexList->Empty())
+    {
+        ListItem* listItem = _indexList->First();
+        if (listItem == 0)
+        {
+            break;
+        }
+
+        AVIINDEXENTRY* item = static_cast<AVIINDEXENTRY*>(listItem->GetItem());
+        if (item != NULL)
+        {
+            delete item;
+        }
+        _indexList->PopFront();
+    }
+}
+
+void AviFile::AddChunkToIndexList(WebRtc_UWord32 inChunkId,
+                                  WebRtc_UWord32 inFlags,
+                                  WebRtc_UWord32 inOffset,
+                                  WebRtc_UWord32 inSize)
+{
+    _indexList->PushBack(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
+                                           inSize));
+}
+
+void AviFile::WriteIndex()
+{
+    const WebRtc_UWord32 idxTag = MakeFourCc('i', 'd', 'x', '1');
+    _bytesWritten += PutLE32(idxTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t idxChunkSize = _bytesWritten;
+
+    for (ListItem* listItem = _indexList->First();
+         listItem != NULL;
+         listItem = _indexList->Next(listItem))
+    {
+        const AVIINDEXENTRY* item =
+            static_cast<AVIINDEXENTRY*>(listItem->GetItem());
+        if (item != NULL)
+        {
+            _bytesWritten += PutLE32(item->ckid);
+            _bytesWritten += PutLE32(item->dwFlags);
+            _bytesWritten += PutLE32(item->dwChunkOffset);
+            _bytesWritten += PutLE32(item->dwChunkLength);
+        }
+    }
+    PutLE32LengthFromCurrent(static_cast<long>(idxChunkSize));
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/media_file/source/avi_file.h b/trunk/src/modules/media_file/source/avi_file.h
new file mode 100644
index 0000000..73a19af
--- /dev/null
+++ b/trunk/src/modules/media_file/source/avi_file.h
@@ -0,0 +1,278 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Class for reading (x)or writing to an AVI file.
+// Note: the class cannot be used for reading and writing at the same time.
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
+
+#include <stdio.h>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class ListWrapper;
+
+struct AVISTREAMHEADER
+{
+    AVISTREAMHEADER();
+    WebRtc_UWord32 fcc;
+    WebRtc_UWord32 cb;
+    WebRtc_UWord32 fccType;
+    WebRtc_UWord32 fccHandler;
+    WebRtc_UWord32 dwFlags;
+    WebRtc_UWord16 wPriority;
+    WebRtc_UWord16 wLanguage;
+    WebRtc_UWord32 dwInitialFrames;
+    WebRtc_UWord32 dwScale;
+    WebRtc_UWord32 dwRate;
+    WebRtc_UWord32 dwStart;
+    WebRtc_UWord32 dwLength;
+    WebRtc_UWord32 dwSuggestedBufferSize;
+    WebRtc_UWord32 dwQuality;
+    WebRtc_UWord32 dwSampleSize;
+    struct
+    {
+        WebRtc_Word16 left;
+        WebRtc_Word16 top;
+        WebRtc_Word16 right;
+        WebRtc_Word16 bottom;
+    } rcFrame;
+};
+
+struct BITMAPINFOHEADER
+{
+    BITMAPINFOHEADER();
+    WebRtc_UWord32 biSize;
+    WebRtc_UWord32 biWidth;
+    WebRtc_UWord32 biHeight;
+    WebRtc_UWord16 biPlanes;
+    WebRtc_UWord16 biBitCount;
+    WebRtc_UWord32 biCompression;
+    WebRtc_UWord32 biSizeImage;
+    WebRtc_UWord32 biXPelsPerMeter;
+    WebRtc_UWord32 biYPelsPerMeter;
+    WebRtc_UWord32 biClrUsed;
+    WebRtc_UWord32 biClrImportant;
+};
+
+struct WAVEFORMATEX
+{
+    WAVEFORMATEX();
+    WebRtc_UWord16 wFormatTag;
+    WebRtc_UWord16 nChannels;
+    WebRtc_UWord32 nSamplesPerSec;
+    WebRtc_UWord32 nAvgBytesPerSec;
+    WebRtc_UWord16 nBlockAlign;
+    WebRtc_UWord16 wBitsPerSample;
+    WebRtc_UWord16 cbSize;
+};
+
+class AviFile
+{
+public:
+    enum AVIStreamType
+    {
+        AVI_AUDIO = 0,
+        AVI_VIDEO = 1
+    };
+
+    // Unsigned, for comparison with must-be-unsigned types.
+    static const unsigned int CODEC_CONFIG_LENGTH = 64;
+    static const unsigned int STREAM_NAME_LENGTH  = 32;
+
+    AviFile();
+    ~AviFile();
+
+    WebRtc_Word32 Open(AVIStreamType streamType, const char* fileName,
+                       bool loop = false);
+
+    WebRtc_Word32 CreateVideoStream(const AVISTREAMHEADER& videoStreamHeader,
+                                    const BITMAPINFOHEADER& bitMapInfoHeader,
+                                    const WebRtc_UWord8* codecConfigParams,
+                                    WebRtc_Word32 codecConfigParamsLength);
+
+    WebRtc_Word32 CreateAudioStream(const AVISTREAMHEADER& audioStreamHeader,
+                                    const WAVEFORMATEX& waveFormatHeader);
+    WebRtc_Word32 Create(const WebRtc_Word8* fileName);
+
+    WebRtc_Word32 WriteAudio(const WebRtc_UWord8* data, WebRtc_Word32 length);
+    WebRtc_Word32 WriteVideo(const WebRtc_UWord8* data, WebRtc_Word32 length);
+
+    WebRtc_Word32 GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
+                                     BITMAPINFOHEADER& bitmapInfo,
+                                     WebRtc_Word8* codecConfigParameters,
+                                     WebRtc_Word32& configLength);
+
+    WebRtc_Word32 GetDuration(WebRtc_Word32& durationMs);
+
+    WebRtc_Word32 GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
+
+    WebRtc_Word32 ReadAudio(WebRtc_UWord8* data, WebRtc_Word32& length);
+    WebRtc_Word32 ReadVideo(WebRtc_UWord8* data, WebRtc_Word32& length);
+
+    WebRtc_Word32 Close();
+
+    static WebRtc_UWord32 MakeFourCc(WebRtc_UWord8 ch0, WebRtc_UWord8 ch1,
+                                     WebRtc_UWord8 ch2, WebRtc_UWord8 ch3);
+
+private:
+    enum AVIFileMode
+    {
+        NotSet,
+        Read,
+        Write
+    };
+
+    struct AVIINDEXENTRY
+    {
+        AVIINDEXENTRY(WebRtc_UWord32 inckid, WebRtc_UWord32 indwFlags,
+                      WebRtc_UWord32 indwChunkOffset,
+                      WebRtc_UWord32 indwChunkLength);
+        WebRtc_UWord32 ckid;
+        WebRtc_UWord32 dwFlags;
+        WebRtc_UWord32 dwChunkOffset;
+        WebRtc_UWord32 dwChunkLength;
+    };
+
+    WebRtc_Word32 PrepareDataChunkHeaders();
+
+    WebRtc_Word32 ReadMoviSubChunk(WebRtc_UWord8* data, WebRtc_Word32& length,
+                                   WebRtc_UWord32 tag1,
+                                   WebRtc_UWord32 tag2 = 0);
+
+    WebRtc_Word32 WriteRIFF();
+    WebRtc_Word32 WriteHeaders();
+    WebRtc_Word32 WriteAVIMainHeader();
+    WebRtc_Word32 WriteAVIStreamHeaders();
+    WebRtc_Word32 WriteAVIVideoStreamHeaders();
+    WebRtc_Word32 WriteAVIVideoStreamHeaderChunks();
+    WebRtc_Word32 WriteAVIAudioStreamHeaders();
+    WebRtc_Word32 WriteAVIAudioStreamHeaderChunks();
+
+    WebRtc_Word32 WriteMoviStart();
+
+    size_t PutByte(WebRtc_UWord8 byte);
+    size_t PutLE16(WebRtc_UWord16 word);
+    size_t PutLE32(WebRtc_UWord32 word);
+    size_t PutBuffer(const WebRtc_UWord8* str, size_t size);
+    size_t PutBufferZ(const char* str);
+    long PutLE32LengthFromCurrent(long startPos);
+    void PutLE32AtPos(long pos, WebRtc_UWord32 word);
+
+    size_t GetByte(WebRtc_UWord8& word);
+    size_t GetLE16(WebRtc_UWord16& word);
+    size_t GetLE32(WebRtc_UWord32& word);
+    size_t GetBuffer(WebRtc_UWord8* str, size_t size);
+
+    void CloseRead();
+    void CloseWrite();
+
+    void ResetMembers();
+    void ResetComplexMembers();
+
+    WebRtc_Word32 ReadRIFF();
+    WebRtc_Word32 ReadHeaders();
+    WebRtc_Word32 ReadAVIMainHeader();
+    WebRtc_Word32 ReadAVIVideoStreamHeader(WebRtc_Word32 endpos);
+    WebRtc_Word32 ReadAVIAudioStreamHeader(WebRtc_Word32 endpos);
+
+    WebRtc_UWord32 StreamAndTwoCharCodeToTag(WebRtc_Word32 streamNum,
+                                             const char* twoCharCode);
+
+    void ClearIndexList();
+    void AddChunkToIndexList(WebRtc_UWord32 inChunkId, WebRtc_UWord32 inFlags,
+                             WebRtc_UWord32 inOffset,  WebRtc_UWord32 inSize);
+
+    void WriteIndex();
+
+private:
+    struct AVIMAINHEADER
+    {
+        AVIMAINHEADER();
+        WebRtc_UWord32 fcc;
+        WebRtc_UWord32 cb;
+        WebRtc_UWord32 dwMicroSecPerFrame;
+        WebRtc_UWord32 dwMaxBytesPerSec;
+        WebRtc_UWord32 dwPaddingGranularity;
+        WebRtc_UWord32 dwFlags;
+        WebRtc_UWord32 dwTotalFrames;
+        WebRtc_UWord32 dwInitialFrames;
+        WebRtc_UWord32 dwStreams;
+        WebRtc_UWord32 dwSuggestedBufferSize;
+        WebRtc_UWord32 dwWidth;
+        WebRtc_UWord32 dwHeight;
+        WebRtc_UWord32 dwReserved[4];
+    };
+
+    struct AVIStream
+    {
+        AVIStreamType streamType;
+        int           streamNumber;
+    };
+
+    CriticalSectionWrapper* _crit;
+    FILE*            _aviFile;
+    AVIMAINHEADER    _aviHeader;
+    AVISTREAMHEADER  _videoStreamHeader;
+    AVISTREAMHEADER  _audioStreamHeader;
+    BITMAPINFOHEADER _videoFormatHeader;
+    WAVEFORMATEX     _audioFormatHeader;
+
+    WebRtc_Word8 _videoConfigParameters[CODEC_CONFIG_LENGTH];
+    WebRtc_Word32 _videoConfigLength;
+    WebRtc_Word8 _videoStreamName[STREAM_NAME_LENGTH];
+    WebRtc_Word32 _videoStreamNameLength;
+    WebRtc_Word8 _audioConfigParameters[CODEC_CONFIG_LENGTH];
+    WebRtc_Word8 _audioStreamName[STREAM_NAME_LENGTH];
+
+    AVIStream _videoStream;
+    AVIStream _audioStream;
+
+    WebRtc_Word32 _nrStreams;
+    WebRtc_Word32 _aviLength;
+    WebRtc_Word32 _dataLength;
+    size_t        _bytesRead;
+    size_t        _dataStartByte;
+    WebRtc_Word32 _framesRead;
+    WebRtc_Word32 _videoFrames;
+    WebRtc_Word32 _audioFrames;
+
+    bool _reading;
+    AVIStreamType _openedAs;
+    bool _loop;
+    bool _writing;
+
+    size_t _bytesWritten;
+
+    size_t _riffSizeMark;
+    size_t _moviSizeMark;
+    size_t _totNumFramesMark;
+    size_t _videoStreamLengthMark;
+    size_t _audioStreamLengthMark;
+    WebRtc_Word32 _moviListOffset;
+
+    bool _writeAudioStream;
+    bool _writeVideoStream;
+
+    AVIFileMode _aviMode;
+    WebRtc_UWord8* _videoCodecConfigParams;
+    WebRtc_Word32 _videoCodecConfigParamsLength;
+
+    WebRtc_UWord32 _videoStreamDataChunkPrefix;
+    WebRtc_UWord32 _audioStreamDataChunkPrefix;
+    bool _created;
+
+    ListWrapper* _indexList; // Elements are of type AVIINDEXENTRY.
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
diff --git a/trunk/src/modules/media_file/source/media_file.gypi b/trunk/src/modules/media_file/source/media_file.gypi
new file mode 100644
index 0000000..2f9ffe9
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file.gypi
@@ -0,0 +1,67 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'media_file',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'defines': [
+        'WEBRTC_MODULE_UTILITY_VIDEO', # for compiling support for video recording
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/media_file.h',
+        '../interface/media_file_defines.h',
+        'avi_file.cc',
+        'avi_file.h',
+        'media_file_impl.cc',
+        'media_file_impl.h',
+        'media_file_utility.cc',
+        'media_file_utility.h',
+      ], # source
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'media_file_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'media_file',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'media_file_unittest.cc',
+          ],
+        }, # media_file_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/media_file/source/media_file_impl.cc b/trunk/src/modules/media_file/source/media_file_impl.cc
new file mode 100644
index 0000000..6c42fa1
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file_impl.cc
@@ -0,0 +1,1372 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "media_file_impl.h"
+#include "tick_util.h"
+#include "trace.h"
+
+#if (defined(WIN32) || defined(WINCE))
+    #define STR_CASE_CMP _stricmp
+    #define STR_NCASE_CMP _strnicmp
+#else
+    #define STR_CASE_CMP strcasecmp
+    #define STR_NCASE_CMP strncasecmp
+#endif
+
+namespace webrtc {
+MediaFile* MediaFile::CreateMediaFile(const WebRtc_Word32 id)
+{
+    return new MediaFileImpl(id);
+}
+
+void MediaFile::DestroyMediaFile(MediaFile* module)
+{
+    delete static_cast<MediaFileImpl*>(module);
+}
+
+MediaFileImpl::MediaFileImpl(const WebRtc_Word32 id)
+    : _id(id),
+      _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _callbackCrit(CriticalSectionWrapper::CreateCriticalSection()),
+      _ptrFileUtilityObj(NULL),
+      codec_info_(),
+      _ptrInStream(NULL),
+      _ptrOutStream(NULL),
+      _fileFormat((FileFormats)-1),
+      _recordDurationMs(0),
+      _playoutPositionMs(0),
+      _notificationMs(0),
+      _playingActive(false),
+      _recordingActive(false),
+      _isStereo(false),
+      _openFile(false),
+      _fileName(),
+      _ptrCallback(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, id, "Created");
+
+    codec_info_.plname[0] = '\0';
+    _fileName[0] = '\0';
+}
+
+
+MediaFileImpl::~MediaFileImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, "~MediaFileImpl()");
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(_playingActive)
+        {
+            StopPlaying();
+        }
+
+        if(_recordingActive)
+        {
+            StopRecording();
+        }
+
+        delete _ptrFileUtilityObj;
+
+        if(_openFile)
+        {
+            delete _ptrInStream;
+            _ptrInStream = NULL;
+            delete _ptrOutStream;
+            _ptrOutStream = NULL;
+        }
+    }
+
+    delete _crit;
+    delete _callbackCrit;
+}
+
+WebRtc_Word32 MediaFileImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::TimeUntilNextProcess()
+{
+    WEBRTC_TRACE(
+        kTraceWarning,
+        kTraceFile,
+        _id,
+        "TimeUntilNextProcess: This method is not used by MediaFile class.");
+    return -1;
+}
+
+WebRtc_Word32 MediaFileImpl::Process()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                 "Process: This method is not used by MediaFile class.");
+    return -1;
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutAVIVideoData(
+    WebRtc_Word8* buffer,
+    WebRtc_UWord32& dataLengthInBytes)
+{
+    return PlayoutData( buffer, dataLengthInBytes, true);
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutAudioData(WebRtc_Word8* buffer,
+                                WebRtc_UWord32& dataLengthInBytes)
+{
+    return PlayoutData( buffer, dataLengthInBytes, false);
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutData(WebRtc_Word8* buffer,
+                                         WebRtc_UWord32& dataLengthInBytes,
+                                         bool video)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %ld)",
+                 buffer, dataLengthInBytes);
+
+    const WebRtc_UWord32 bufferLengthInBytes = dataLengthInBytes;
+    dataLengthInBytes = 0;
+
+    if(buffer == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Buffer pointer or length is NULL!");
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_playingActive)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently playing!");
+            return -1;
+        }
+
+        if(!_ptrFileUtilityObj)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Playing, but no FileUtility object!");
+            StopPlaying();
+            return -1;
+        }
+
+        switch(_fileFormat)
+        {
+            case kFileFormatPcm32kHzFile:
+            case kFileFormatPcm16kHzFile:
+            case kFileFormatPcm8kHzFile:
+                bytesRead = _ptrFileUtilityObj->ReadPCMData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatCompressedFile:
+                bytesRead = _ptrFileUtilityObj->ReadCompressedData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatWavFile:
+                bytesRead = _ptrFileUtilityObj->ReadWavDataAsMono(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatPreencodedFile:
+                bytesRead = _ptrFileUtilityObj->ReadPreEncodedData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                if(bytesRead > 0)
+                {
+                    dataLengthInBytes = bytesRead;
+                    return 0;
+                }
+                break;
+            case kFileFormatAviFile:
+            {
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+                if(video)
+                {
+                    bytesRead = _ptrFileUtilityObj->ReadAviVideoData(
+                        buffer,
+                        bufferLengthInBytes);
+                }
+                else
+                {
+                    bytesRead = _ptrFileUtilityObj->ReadAviAudioData(
+                        buffer,
+                        bufferLengthInBytes);
+                }
+                break;
+#else
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Invalid file format: %d", kFileFormatAviFile);
+                assert(false);
+                break;
+#endif
+            }
+        }
+
+        if( bytesRead > 0)
+        {
+            dataLengthInBytes =(WebRtc_UWord32) bytesRead;
+        }
+    }
+    HandlePlayCallbacks(bytesRead);
+    return 0;
+}
+
+void MediaFileImpl::HandlePlayCallbacks(WebRtc_Word32 bytesRead)
+{
+    bool playEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+
+    if(bytesRead > 0)
+    {
+        // Check if it's time for PlayNotification(..).
+        _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+        if(_notificationMs)
+        {
+            if(_playoutPositionMs >= _notificationMs)
+            {
+                _notificationMs = 0;
+                callbackNotifyMs = _playoutPositionMs;
+            }
+        }
+    }
+    else
+    {
+        // If no bytes were read assume end of file.
+        StopPlaying();
+        playEnded = true;
+    }
+
+    // Only _callbackCrit may and should be taken when making callbacks.
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+        }
+        if(playEnded)
+        {
+            _ptrCallback->PlayFileEnded(_id);
+        }
+    }
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutStereoData(
+    WebRtc_Word8* bufferLeft,
+    WebRtc_Word8* bufferRight,
+    WebRtc_UWord32& dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,\
+ Len= %ld)",
+                 bufferLeft,
+                 bufferRight,
+                 dataLengthInBytes);
+
+    const WebRtc_UWord32 bufferLengthInBytes = dataLengthInBytes;
+    dataLengthInBytes = 0;
+
+    if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "A buffer pointer or the length is NULL!");
+        return -1;
+    }
+
+    bool playEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_playingActive || !_isStereo)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently playing stereo!");
+            return -1;
+        }
+
+        if(!_ptrFileUtilityObj)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceFile,
+                _id,
+                "Playing stereo, but the FileUtility objects is NULL!");
+            StopPlaying();
+            return -1;
+        }
+
+        // Stereo playout only supported for WAV files.
+        WebRtc_Word32 bytesRead = 0;
+        switch(_fileFormat)
+        {
+            case kFileFormatWavFile:
+                    bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo(
+                        *_ptrInStream,
+                        bufferLeft,
+                        bufferRight,
+                        bufferLengthInBytes);
+                    break;
+            default:
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Trying to read non-WAV as stereo audio\
+ (not supported)");
+                break;
+        }
+
+        if(bytesRead > 0)
+        {
+            dataLengthInBytes = bytesRead;
+
+            // Check if it's time for PlayNotification(..).
+            _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+            if(_notificationMs)
+            {
+                if(_playoutPositionMs >= _notificationMs)
+                {
+                    _notificationMs = 0;
+                    callbackNotifyMs = _playoutPositionMs;
+                }
+            }
+        }
+        else
+        {
+            // If no bytes were read assume end of file.
+            StopPlaying();
+            playEnded = true;
+        }
+    }
+
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+        }
+        if(playEnded)
+        {
+            _ptrCallback->PlayFileEnded(_id);
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingAudioFile(
+    const WebRtc_Word8* fileName,
+    const WebRtc_UWord32 notificationTimeMs,
+    const bool loop,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+    const bool videoOnly = false;
+    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
+                            format, codecInst, startPointMs, stopPointMs);
+}
+
+
+WebRtc_Word32 MediaFileImpl::StartPlayingVideoFile(const WebRtc_Word8* fileName,
+                                                   const bool loop,
+                                                   bool videoOnly,
+                                                   const FileFormats format)
+{
+
+    const WebRtc_UWord32 notificationTimeMs = 0;
+    const WebRtc_UWord32 startPointMs       = 0;
+    const WebRtc_UWord32 stopPointMs        = 0;
+    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
+                            format, 0, startPointMs, stopPointMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingFile(
+    const WebRtc_Word8* fileName,
+    const WebRtc_UWord32 notificationTimeMs,
+    const bool loop,
+    bool videoOnly,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFileFormat(format,codecInst))
+    {
+        return -1;
+    }
+    if(!ValidFilePositions(startPointMs,stopPointMs))
+    {
+        return -1;
+    }
+
+    // Check that the file will play longer than notificationTimeMs ms.
+    if((startPointMs && stopPointMs && !loop) &&
+       (notificationTimeMs > (stopPointMs - startPointMs)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "specified notification time is longer than amount of ms that will\
+ be played");
+        return -1;
+    }
+
+    FileWrapper* inputStream = FileWrapper::Create();
+    if(inputStream == NULL)
+    {
+       WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                    "Failed to allocate input stream for file %s", fileName);
+        return -1;
+    }
+
+    // TODO (hellner): make all formats support reading from stream.
+    bool useStream = (format != kFileFormatAviFile);
+    if( useStream)
+    {
+        if(inputStream->OpenFile(fileName, true, loop) != 0)
+        {
+            delete inputStream;
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Could not open input file %s", fileName);
+            return -1;
+        }
+    }
+
+    if(StartPlayingStream(*inputStream, fileName, loop, notificationTimeMs,
+                          format, codecInst, startPointMs, stopPointMs,
+                          videoOnly) == -1)
+    {
+        if( useStream)
+        {
+            inputStream->CloseFile();
+        }
+        delete inputStream;
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    _openFile = true;
+    strncpy(_fileName, fileName, sizeof(_fileName));
+    _fileName[sizeof(_fileName) - 1] = '\0';
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingAudioStream(
+    InStream& stream,
+    const WebRtc_UWord32 notificationTimeMs,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+    return StartPlayingStream(stream, 0, false, notificationTimeMs, format,
+                              codecInst, startPointMs, stopPointMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingStream(
+    InStream& stream,
+    const WebRtc_Word8* filename,
+    bool loop,
+    const WebRtc_UWord32 notificationTimeMs,
+    const FileFormats format,
+    const CodecInst*  codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs,
+    bool videoOnly)
+{
+    if(!ValidFileFormat(format,codecInst))
+    {
+        return -1;
+    }
+
+    if(!ValidFilePositions(startPointMs,stopPointMs))
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    if(_playingActive || _recordingActive)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartPlaying called, but already playing or recording file %s",
+            (_fileName[0] == '\0') ? "(name not set)" : _fileName);
+        return -1;
+    }
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceFile,
+                     _id,
+                     "StartPlaying called, but FileUtilityObj already exists!");
+        StopPlaying();
+        return -1;
+    }
+
+    _ptrFileUtilityObj = new ModuleFileUtility(_id);
+    if(_ptrFileUtilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Failed to create FileUtilityObj!");
+        return -1;
+    }
+
+    switch(format)
+    {
+        case kFileFormatWavFile:
+        {
+            if(_ptrFileUtilityObj->InitWavReading(stream, startPointMs,
+                                                  stopPointMs) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid WAV file!");
+                StopPlaying();
+                return -1;
+            }
+            _fileFormat = kFileFormatWavFile;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            if(_ptrFileUtilityObj->InitCompressedReading(stream, startPointMs,
+                                                         stopPointMs) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid Compressed file!");
+                StopPlaying();
+                return -1;
+            }
+            _fileFormat = kFileFormatCompressedFile;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        case kFileFormatPcm16kHzFile:
+        case kFileFormatPcm32kHzFile:
+        {
+            // ValidFileFormat() called in the beginneing of this function
+            // prevents codecInst from being NULL here.
+            assert(codecInst != NULL);
+            if(!ValidFrequency(codecInst->plfreq) ||
+               _ptrFileUtilityObj->InitPCMReading(stream, startPointMs,
+                                                  stopPointMs,
+                                                  codecInst->plfreq) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid raw 8 or 16 KHz PCM file!");
+                StopPlaying();
+                return -1;
+            }
+
+            _fileFormat = format;
+            break;
+        }
+        case kFileFormatPreencodedFile:
+        {
+            // ValidFileFormat() called in the beginneing of this function
+            // prevents codecInst from being NULL here.
+            assert(codecInst != NULL);
+            if(_ptrFileUtilityObj->InitPreEncodedReading(stream, *codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid PreEncoded file!");
+                StopPlaying();
+                return -1;
+            }
+
+            _fileFormat = kFileFormatPreencodedFile;
+            break;
+        }
+        case kFileFormatAviFile:
+        {
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+            if(_ptrFileUtilityObj->InitAviReading( filename, videoOnly, loop))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid AVI file!");
+                StopPlaying();
+
+                return -1;
+            }
+
+            _ptrFileUtilityObj->codec_info(codec_info_);
+
+            _fileFormat = kFileFormatAviFile;
+            break;
+#else
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Invalid file format: %d", kFileFormatAviFile);
+            assert(false);
+            break;
+#endif
+        }
+    }
+    if(_ptrFileUtilityObj->codec_info(codec_info_) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Failed to retrieve codec info!");
+        StopPlaying();
+        return -1;
+    }
+
+    _isStereo = (codec_info_.channels == 2);
+    if(_isStereo && (_fileFormat != kFileFormatWavFile))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "Stereo is only allowed for WAV files");
+        StopPlaying();
+        return -1;
+    }
+    _playingActive = true;
+    _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+    _ptrInStream = &stream;
+    _notificationMs = notificationTimeMs;
+
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StopPlaying()
+{
+
+    CriticalSectionScoped lock(_crit);
+    _isStereo = false;
+    if(_ptrFileUtilityObj)
+    {
+        delete _ptrFileUtilityObj;
+        _ptrFileUtilityObj = NULL;
+    }
+    if(_ptrInStream)
+    {
+        // If MediaFileImpl opened the InStream it must be reclaimed here.
+        if(_openFile)
+        {
+            delete _ptrInStream;
+            _openFile = false;
+        }
+        _ptrInStream = NULL;
+    }
+
+    codec_info_.pltype = 0;
+    codec_info_.plname[0] = '\0';
+
+    if(!_playingActive)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "playing is not active!");
+        return -1;
+    }
+
+    _playingActive = false;
+    return 0;
+}
+
+bool MediaFileImpl::IsPlaying()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsPlaying()");
+    CriticalSectionScoped lock(_crit);
+    return _playingActive;
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAudioData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAVIVideoData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAudioVideoData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes,
+    const bool video)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "MediaFile::IncomingData(buffer= 0x%x, bufLen= %hd",
+                 buffer, bufferLengthInBytes);
+
+    if(buffer == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Buffer pointer or length is NULL!");
+        return -1;
+    }
+
+    bool recordingEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_recordingActive)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently recording!");
+            return -1;
+        }
+        if(_ptrOutStream == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Recording is active, but output stream is NULL!");
+            assert(false);
+            return -1;
+        }
+
+        WebRtc_Word32 bytesWritten = 0;
+        WebRtc_UWord32 samplesWritten = codec_info_.pacsize;
+        if(_ptrFileUtilityObj)
+        {
+            switch(_fileFormat)
+            {
+                case kFileFormatPcm8kHzFile:
+                case kFileFormatPcm16kHzFile:
+                case kFileFormatPcm32kHzFile:
+                    bytesWritten = _ptrFileUtilityObj->WritePCMData(
+                        *_ptrOutStream,
+                        buffer,
+                        bufferLengthInBytes);
+
+                    // Sample size is 2 bytes.
+                    if(bytesWritten > 0)
+                    {
+                        samplesWritten = bytesWritten/sizeof(WebRtc_Word16);
+                    }
+                    break;
+                case kFileFormatCompressedFile:
+                    bytesWritten = _ptrFileUtilityObj->WriteCompressedData(
+                        *_ptrOutStream, buffer, bufferLengthInBytes);
+                    break;
+                case kFileFormatWavFile:
+                    bytesWritten = _ptrFileUtilityObj->WriteWavData(
+                        *_ptrOutStream,
+                        buffer,
+                        bufferLengthInBytes);
+                    if(bytesWritten > 0 && STR_NCASE_CMP(codec_info_.plname,
+                                                         "L16", 4) == 0)
+                    {
+                        // Sample size is 2 bytes.
+                        samplesWritten = bytesWritten/sizeof(WebRtc_Word16);
+                    }
+                    break;
+                case kFileFormatPreencodedFile:
+                    bytesWritten = _ptrFileUtilityObj->WritePreEncodedData(
+                        *_ptrOutStream, buffer, bufferLengthInBytes);
+                    break;
+                case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+                    if(video)
+                    {
+                        bytesWritten = _ptrFileUtilityObj->WriteAviVideoData(
+                            buffer, bufferLengthInBytes);
+                    }else
+                    {
+                        bytesWritten = _ptrFileUtilityObj->WriteAviAudioData(
+                            buffer, bufferLengthInBytes);
+                    }
+                    break;
+#else
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "Invalid file format: %d", kFileFormatAviFile);
+                    assert(false);
+                    break;
+#endif
+            }
+        } else {
+            // TODO (hellner): quick look at the code makes me think that this
+            //                 code is never executed. Remove?
+            if(_ptrOutStream)
+            {
+                if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
+                {
+                    bytesWritten = bufferLengthInBytes;
+                }
+            }
+        }
+
+        if(!video)
+        {
+            _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
+        }
+
+        // Check if it's time for RecordNotification(..).
+        if(_notificationMs)
+        {
+            if(_recordDurationMs  >= _notificationMs)
+            {
+                _notificationMs = 0;
+                callbackNotifyMs = _recordDurationMs;
+            }
+        }
+        if(bytesWritten < (WebRtc_Word32)bufferLengthInBytes)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Failed to write all requested bytes!");
+            StopRecording();
+            recordingEnded = true;
+        }
+    }
+
+    // Only _callbackCrit may and should be taken when making callbacks.
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->RecordNotification(_id, callbackNotifyMs);
+        }
+        if(recordingEnded)
+        {
+            _ptrCallback->RecordFileEnded(_id);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingAudioFile(
+    const WebRtc_Word8* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    const WebRtc_UWord32 maxSizeBytes)
+{
+    VideoCodec dummyCodecInst;
+    return StartRecordingFile(fileName, format, codecInst, dummyCodecInst,
+                              notificationTimeMs, maxSizeBytes);
+}
+
+
+WebRtc_Word32 MediaFileImpl::StartRecordingVideoFile(
+    const WebRtc_Word8* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    bool videoOnly)
+{
+    const WebRtc_UWord32 notificationTimeMs = 0;
+    const WebRtc_UWord32 maxSizeBytes       = 0;
+
+    return StartRecordingFile(fileName, format, codecInst, videoCodecInst,
+                              notificationTimeMs, maxSizeBytes, videoOnly);
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingFile(
+    const WebRtc_Word8* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    const WebRtc_UWord32 maxSizeBytes,
+    bool videoOnly)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFileFormat(format,&codecInst))
+    {
+        return -1;
+    }
+
+    FileWrapper* outputStream = FileWrapper::Create();
+    if(outputStream == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Failed to allocate memory for output stream");
+        return -1;
+    }
+
+    // TODO (hellner): make all formats support writing to stream.
+    const bool useStream = ( format != kFileFormatAviFile);
+    if( useStream)
+    {
+        if(outputStream->OpenFile(fileName, false) != 0)
+        {
+            delete outputStream;
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Could not open output file '%s' for writing!",
+                         fileName);
+            return -1;
+        }
+    }
+    if(maxSizeBytes)
+    {
+        outputStream->SetMaxFileSize(maxSizeBytes);
+    }
+
+    if(StartRecordingStream(*outputStream, fileName, format, codecInst,
+                            videoCodecInst, notificationTimeMs,
+                            videoOnly) == -1)
+    {
+        if( useStream)
+        {
+            outputStream->CloseFile();
+        }
+        delete outputStream;
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    _openFile = true;
+    strncpy(_fileName, fileName, sizeof(_fileName));
+    _fileName[sizeof(_fileName) - 1] = '\0';
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingAudioStream(
+    OutStream& stream,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const WebRtc_UWord32 notificationTimeMs)
+{
+    VideoCodec dummyCodecInst;
+    return StartRecordingStream(stream, 0, format, codecInst, dummyCodecInst,
+                                notificationTimeMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingStream(
+    OutStream& stream,
+    const WebRtc_Word8* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    bool videoOnly)
+{
+
+    // Check codec info
+    if(!ValidFileFormat(format,&codecInst))
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    if(_recordingActive || _playingActive)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartRecording called, but already recording or playing file %s!",
+                   _fileName);
+        return -1;
+    }
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartRecording called, but fileUtilityObj already exists!");
+        StopRecording();
+        return -1;
+    }
+
+    _ptrFileUtilityObj = new ModuleFileUtility(_id);
+    if(_ptrFileUtilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Cannot allocate fileUtilityObj!");
+        return -1;
+    }
+
+    CodecInst tmpAudioCodec;
+    memcpy(&tmpAudioCodec, &codecInst, sizeof(CodecInst));
+    switch(format)
+    {
+        case kFileFormatWavFile:
+        {
+            if(_ptrFileUtilityObj->InitWavWriting(stream, codecInst) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize WAV file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatWavFile;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            // Write compression codec name at beginning of file
+            if(_ptrFileUtilityObj->InitCompressedWriting(stream, codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize Compressed file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatCompressedFile;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        case kFileFormatPcm16kHzFile:
+        {
+            if(!ValidFrequency(codecInst.plfreq) ||
+               _ptrFileUtilityObj->InitPCMWriting(stream, codecInst.plfreq) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize 8 or 16KHz PCM file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = format;
+            break;
+        }
+        case kFileFormatPreencodedFile:
+        {
+            if(_ptrFileUtilityObj->InitPreEncodedWriting(stream, codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize Pre-Encoded file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+
+            _fileFormat = kFileFormatPreencodedFile;
+            break;
+        }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        case kFileFormatAviFile:
+        {
+            if( (_ptrFileUtilityObj->InitAviWriting(
+                    fileName,
+                    codecInst,
+                    videoCodecInst,videoOnly) == -1) ||
+                    (_ptrFileUtilityObj->codec_info(tmpAudioCodec) != 0))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize AVI file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatAviFile;
+            break;
+        }
+#endif
+        default:
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Invalid file format %d specified!", format);
+            delete _ptrFileUtilityObj;
+            _ptrFileUtilityObj = NULL;
+            return -1;
+        }
+    }
+    _isStereo = (tmpAudioCodec.channels == 2);
+    if(_isStereo)
+    {
+        if(_fileFormat != kFileFormatWavFile)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Stereo is only allowed for WAV files");
+            StopRecording();
+            return -1;
+        }
+        if((STR_NCASE_CMP(tmpAudioCodec.plname, "L16", 4) != 0) &&
+           (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMU", 5) != 0) &&
+           (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMA", 5) != 0))
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceFile,
+                _id,
+                "Stereo is only allowed for codec PCMU, PCMA and L16 ");
+            StopRecording();
+            return -1;
+        }
+    }
+    memcpy(&codec_info_, &tmpAudioCodec, sizeof(CodecInst));
+    _recordingActive = true;
+    _ptrOutStream = &stream;
+    _notificationMs = notificationTimeMs;
+    _recordDurationMs = 0;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StopRecording()
+{
+
+    CriticalSectionScoped lock(_crit);
+    if(!_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "recording is not active!");
+        return -1;
+    }
+
+    _isStereo = false;
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        // Both AVI and WAV header has to be updated before closing the stream
+        // because they contain size information.
+        if((_fileFormat == kFileFormatWavFile) &&
+            (_ptrOutStream != NULL))
+        {
+            _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream);
+        }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        else if( _fileFormat == kFileFormatAviFile)
+        {
+            _ptrFileUtilityObj->CloseAviFile( );
+        }
+#endif
+        delete _ptrFileUtilityObj;
+        _ptrFileUtilityObj = NULL;
+    }
+
+    if(_ptrOutStream != NULL)
+    {
+        // If MediaFileImpl opened the OutStream it must be reclaimed here.
+        if(_openFile)
+        {
+            delete _ptrOutStream;
+            _openFile = false;
+        }
+        _ptrOutStream = NULL;
+    }
+
+    _recordingActive = false;
+    codec_info_.pltype = 0;
+    codec_info_.plname[0] = '\0';
+
+    return 0;
+}
+
+bool MediaFileImpl::IsRecording()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsRecording()");
+    CriticalSectionScoped lock(_crit);
+    return _recordingActive;
+}
+
+WebRtc_Word32 MediaFileImpl::RecordDurationMs(WebRtc_UWord32& durationMs)
+{
+
+    CriticalSectionScoped lock(_crit);
+    if(!_recordingActive)
+    {
+        durationMs = 0;
+        return -1;
+    }
+    durationMs = _recordDurationMs;
+    return 0;
+}
+
+bool MediaFileImpl::IsStereo()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsStereo()");
+    CriticalSectionScoped lock(_crit);
+    return _isStereo;
+}
+
+WebRtc_Word32 MediaFileImpl::SetModuleFileCallback(FileCallback* callback)
+{
+
+    CriticalSectionScoped lock(_callbackCrit);
+
+    _ptrCallback = callback;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::FileDurationMs(const WebRtc_Word8* fileName,
+                                            WebRtc_UWord32& durationMs,
+                                            const FileFormats format,
+                                            const WebRtc_UWord32 freqInHz)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFrequency(freqInHz))
+    {
+        return -1;
+    }
+
+    ModuleFileUtility* utilityObj = new ModuleFileUtility(_id);
+    if(utilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to allocate utility object!");
+        return -1;
+    }
+
+    const WebRtc_Word32 duration = utilityObj->FileDurationMs(fileName, format,
+                                                              freqInHz);
+    delete utilityObj;
+    if(duration == -1)
+    {
+        durationMs = 0;
+        return -1;
+    }
+
+    durationMs = duration;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutPositionMs(WebRtc_UWord32& positionMs) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive)
+    {
+        positionMs = 0;
+        return -1;
+    }
+    positionMs = _playoutPositionMs;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::codec_info(CodecInst& codecInst) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive && !_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Neither playout nor recording has been initialized!");
+        return -1;
+    }
+    if (codec_info_.pltype == 0 && codec_info_.plname[0] == '\0')
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "The CodecInst for %s is unknown!",
+            _playingActive ? "Playback" : "Recording");
+        return -1;
+    }
+    memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::VideoCodecInst(VideoCodec& codecInst) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive && !_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Neither playout nor recording has been initialized!");
+        return -1;
+    }
+    if( _ptrFileUtilityObj == NULL)
+    {
+        return -1;
+    }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    VideoCodec videoCodec;
+    if( _ptrFileUtilityObj->VideoCodecInst( videoCodec) != 0)
+    {
+        return -1;
+    }
+    memcpy(&codecInst,&videoCodec,sizeof(VideoCodec));
+    return 0;
+#else
+    return -1;
+#endif
+}
+
+bool MediaFileImpl::ValidFileFormat(const FileFormats format,
+                                    const CodecInst*  codecInst)
+{
+    if(codecInst == NULL)
+    {
+        if(format == kFileFormatPreencodedFile ||
+           format == kFileFormatPcm8kHzFile    ||
+           format == kFileFormatPcm16kHzFile   ||
+           format == kFileFormatPcm32kHzFile)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                         "Codec info required for file format specified!");
+            return false;
+        }
+    }
+    return true;
+}
+
+bool MediaFileImpl::ValidFileName(const WebRtc_Word8* fileName)
+{
+    if((fileName == NULL) ||(fileName[0] == '\0'))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1, "FileName not specified!");
+        return false;
+    }
+    return true;
+}
+
+
+bool MediaFileImpl::ValidFilePositions(const WebRtc_UWord32 startPointMs,
+                                       const WebRtc_UWord32 stopPointMs)
+{
+    if(startPointMs == 0 && stopPointMs == 0) // Default values
+    {
+        return true;
+    }
+    if(stopPointMs &&(startPointMs >= stopPointMs))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "startPointMs must be less than stopPointMs!");
+        return false;
+    }
+    if(stopPointMs &&((stopPointMs - startPointMs) < 20))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "minimum play duration for files is 20 ms!");
+        return false;
+    }
+    return true;
+}
+
+bool MediaFileImpl::ValidFrequency(const WebRtc_UWord32 frequency)
+{
+    if((frequency == 8000) || (frequency == 16000)|| (frequency == 32000))
+    {
+        return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                 "Frequency should be 8000, 16000 or 32000 (Hz)");
+    return false;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/media_file/source/media_file_impl.h b/trunk/src/modules/media_file/source/media_file_impl.h
new file mode 100644
index 0000000..06e20d0
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file_impl.h
@@ -0,0 +1,246 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
+
+#include "common_types.h"
+#include "media_file.h"
+#include "media_file_defines.h"
+#include "media_file_utility.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+class MediaFileImpl : public MediaFile
+{
+
+public:
+    MediaFileImpl(const WebRtc_Word32 id);
+    ~MediaFileImpl();
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    WebRtc_Word32 Process();
+    WebRtc_Word32 TimeUntilNextProcess();
+
+    // MediaFile functions
+    WebRtc_Word32 PlayoutAudioData(WebRtc_Word8*   audioBuffer,
+                                   WebRtc_UWord32& dataLengthInBytes);
+    WebRtc_Word32 PlayoutAVIVideoData(WebRtc_Word8* videoBuffer,
+                                      WebRtc_UWord32& dataLengthInBytes);
+    WebRtc_Word32 PlayoutStereoData(WebRtc_Word8* audioBufferLeft,
+                                    WebRtc_Word8* audioBufferRight,
+                                    WebRtc_UWord32& dataLengthInBytes);
+    virtual WebRtc_Word32 StartPlayingAudioFile(
+        const WebRtc_Word8*  fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           loop = false,
+        const FileFormats    format = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst = NULL,
+        const WebRtc_UWord32 startPointMs = 0,
+        const WebRtc_UWord32 stopPointMs = 0);
+    WebRtc_Word32 StartPlayingVideoFile(const WebRtc_Word8* fileName,
+                                        const bool          loop,
+                                        bool                videoOnly,
+                                        const FileFormats   format);
+    WebRtc_Word32 StartPlayingAudioStream(
+        InStream&            stream,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst = NULL,
+        const WebRtc_UWord32 startPointMs = 0,
+        const WebRtc_UWord32 stopPointMs = 0);
+    WebRtc_Word32 StopPlaying();
+    bool IsPlaying();
+    WebRtc_Word32 PlayoutPositionMs(WebRtc_UWord32& positionMs) const;
+    WebRtc_Word32 IncomingAudioData(const WebRtc_Word8*  audioBuffer,
+                                    const WebRtc_UWord32 bufferLength);
+    WebRtc_Word32 IncomingAVIVideoData(const WebRtc_Word8*  audioBuffer,
+                                       const WebRtc_UWord32 bufferLength);
+    WebRtc_Word32 StartRecordingAudioFile(
+        const WebRtc_Word8*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes = 0);
+    WebRtc_Word32 StartRecordingVideoFile(
+        const WebRtc_Word8* fileName,
+        const FileFormats   format,
+        const CodecInst&    codecInst,
+        const VideoCodec&   videoCodecInst,
+        bool                videoOnly = false);
+    WebRtc_Word32 StartRecordingAudioStream(
+        OutStream&           stream,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0);
+    WebRtc_Word32 StopRecording();
+    bool IsRecording();
+    WebRtc_Word32 RecordDurationMs(WebRtc_UWord32& durationMs);
+    bool IsStereo();
+    WebRtc_Word32 SetModuleFileCallback(FileCallback* callback);
+    WebRtc_Word32 FileDurationMs(
+        const WebRtc_Word8*  fileName,
+        WebRtc_UWord32&      durationMs,
+        const FileFormats    format,
+        const WebRtc_UWord32 freqInHz = 16000);
+    WebRtc_Word32 codec_info(CodecInst& codecInst) const;
+    WebRtc_Word32 VideoCodecInst(VideoCodec& codecInst) const;
+
+private:
+    // Returns true if the combination of format and codecInst is valid.
+    static bool ValidFileFormat(const FileFormats format,
+                                const CodecInst*  codecInst);
+
+
+    // Returns true if the filename is valid
+    static bool ValidFileName(const WebRtc_Word8* fileName);
+
+  // Returns true if the combination of startPointMs and stopPointMs is valid.
+    static bool ValidFilePositions(const WebRtc_UWord32 startPointMs,
+                                   const WebRtc_UWord32 stopPointMs);
+
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. Only video will be read if videoOnly is
+    // true. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    WebRtc_Word32 StartPlayingFile(
+        const WebRtc_Word8*  fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           loop               = false,
+        bool                 videoOnly          = false,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0);
+
+    // Opens the file specified by fileName for reading (relative path is
+    // allowed) if format is kFileFormatAviFile otherwise use stream for
+    // reading. FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. Only video will be read if videoOnly is
+    // true. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // TODO (hellner): there is no reason why fileName should be needed here.
+    WebRtc_Word32 StartPlayingStream(
+        InStream&            stream,
+        const WebRtc_Word8*  fileName,
+        bool                 loop,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0,
+        bool                 videoOnly          = true);
+
+    // Writes one frame into dataBuffer. dataLengthInBytes is both an input and
+    // output parameter. As input parameter it indicates the size of
+    // audioBuffer. As output parameter it indicates the number of bytes
+    // written to audioBuffer. If video is true the data written is a video
+    // frame otherwise it is an audio frame.
+    WebRtc_Word32 PlayoutData(WebRtc_Word8* dataBuffer,
+                              WebRtc_UWord32& dataLengthInBytes, bool video);
+
+    // Write one frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The frame is an audio frame if video is true otherwise it is an
+    // audio frame.
+    WebRtc_Word32 IncomingAudioVideoData(const WebRtc_Word8*  buffer,
+                                         const WebRtc_UWord32 bufferLength,
+                                         const bool video);
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed) if format is kFileFormatAviFile otherwise use stream for
+    // writing. FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. videoCodecInst
+    // specifies the encoding of the video data. maxSizeBytes specifies the
+    // number of bytes allowed to be written to file if it is greater than zero.
+    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
+    // only contain video frames.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    WebRtc_Word32 StartRecordingFile(
+        const WebRtc_Word8*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const VideoCodec&    videoCodecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes = 0,
+        bool                 videoOnly = false);
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed). FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. videoCodecInst
+    // specifies the encoding of the video data. maxSizeBytes specifies the
+    // number of bytes allowed to be written to file if it is greater than zero.
+    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
+    // only contain video frames.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    // TODO (hellner): there is no reason why fileName should be needed here.
+    WebRtc_Word32 StartRecordingStream(
+        OutStream&           stream,
+        const WebRtc_Word8*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const VideoCodec&    videoCodecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           videoOnly = false);
+
+    // Returns true if frequencyInHz is a supported frequency.
+    static bool ValidFrequency(const WebRtc_UWord32 frequencyInHz);
+
+    void HandlePlayCallbacks(WebRtc_Word32 bytesRead);
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _crit;
+    CriticalSectionWrapper* _callbackCrit;
+
+    ModuleFileUtility* _ptrFileUtilityObj;
+    CodecInst codec_info_;
+
+    InStream*  _ptrInStream;
+    OutStream* _ptrOutStream;
+
+    FileFormats _fileFormat;
+    WebRtc_UWord32 _recordDurationMs;
+    WebRtc_UWord32 _playoutPositionMs;
+    WebRtc_UWord32 _notificationMs;
+
+    bool _playingActive;
+    bool _recordingActive;
+    bool _isStereo;
+    bool _openFile;
+
+    WebRtc_Word8 _fileName[512];
+
+    FileCallback* _ptrCallback;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
diff --git a/trunk/src/modules/media_file/source/media_file_unittest.cc b/trunk/src/modules/media_file/source/media_file_unittest.cc
new file mode 100644
index 0000000..86f37f3
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "media_file.h"
+#include "gtest/gtest.h"
+
+TEST(MediaFileTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/media_file/source/media_file_utility.cc b/trunk/src/modules/media_file/source/media_file_utility.cc
new file mode 100644
index 0000000..eeeb209
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file_utility.cc
@@ -0,0 +1,2563 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "file_wrapper.h"
+#include "media_file_utility.h"
+#include "module_common_types.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "avi_file.h"
+#endif
+
+#if (defined(WIN32) || defined(WINCE))
+    #define STR_CASE_CMP _stricmp
+    #define STR_NCASE_CMP _strnicmp
+#else
+    #define STR_CASE_CMP strcasecmp
+    #define STR_NCASE_CMP strncasecmp
+#endif
+
+namespace {
+enum WaveFormats
+{
+    kWaveFormatPcm   = 0x0001,
+    kWaveFormatALaw  = 0x0006,
+    kWaveFormatMuLaw = 0x0007
+};
+
+// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
+// "WAVE" and ckSize is the chunk size (4 + n)
+struct WAVE_RIFF_header
+{
+    WebRtc_Word8  ckID[4];
+    WebRtc_Word32 ckSize;
+    WebRtc_Word8  wave_ckID[4];
+};
+
+// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
+// the chunk size (16, 18 or 40 byte)
+struct WAVE_CHUNK_header
+{
+   WebRtc_Word8  fmt_ckID[4];
+   WebRtc_Word32 fmt_ckSize;
+};
+} // unnamed namespace
+
+namespace webrtc {
+ModuleFileUtility::ModuleFileUtility(const WebRtc_Word32 id)
+    : _wavFormatObj(),
+      _dataSize(0),
+      _readSizeBytes(0),
+      _id(id),
+      _stopPointInMs(0),
+      _startPointInMs(0),
+      _playoutPositionMs(0),
+      _bytesWritten(0),
+      codec_info_(),
+      _codecId(kCodecNoCodec),
+      _bytesPerSample(0),
+      _readPos(0),
+      _reading(false),
+      _writing(false),
+      _tempData()
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+      ,
+      _aviAudioInFile(0),
+      _aviVideoInFile(0),
+      _aviOutFile(0)
+#endif
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                 "ModuleFileUtility::ModuleFileUtility()");
+    memset(&codec_info_,0,sizeof(CodecInst));
+    codec_info_.pltype = -1;
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    memset(&_videoCodec,0,sizeof(_videoCodec));
+#endif
+}
+
+ModuleFileUtility::~ModuleFileUtility()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                 "ModuleFileUtility::~ModuleFileUtility()");
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    delete _aviAudioInFile;
+    delete _aviVideoInFile;
+#endif
+}
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+WebRtc_Word32 ModuleFileUtility::InitAviWriting(
+    const WebRtc_Word8* filename,
+    const CodecInst& audioCodecInst,
+    const VideoCodec& videoCodecInst,
+    const bool videoOnly /*= false*/)
+{
+    _writing = false;
+
+    delete _aviOutFile;
+    _aviOutFile = new AviFile( );
+
+    AVISTREAMHEADER videoStreamHeader;
+    videoStreamHeader.fccType = AviFile::MakeFourCc('v', 'i', 'd', 's');
+
+#ifdef VIDEOCODEC_I420
+    if (strncmp(videoCodecInst.plName, "I420", 7) == 0)
+    {
+        videoStreamHeader.fccHandler = AviFile::MakeFourCc('I','4','2','0');
+    }
+#endif
+#ifdef VIDEOCODEC_VP8
+    if (strncmp(videoCodecInst.plName, "VP8", 7) == 0)
+    {
+        videoStreamHeader.fccHandler = AviFile::MakeFourCc('V','P','8','0');
+    }
+#endif
+    if (videoStreamHeader.fccHandler == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "InitAviWriting() Codec not supported");
+
+        return -1;
+    }
+    videoStreamHeader.dwScale                = 1;
+    videoStreamHeader.dwRate                 = videoCodecInst.maxFramerate;
+    videoStreamHeader.dwSuggestedBufferSize  = videoCodecInst.height *
+        (videoCodecInst.width >> 1) * 3;
+    videoStreamHeader.dwQuality              = (WebRtc_UWord32)-1;
+    videoStreamHeader.dwSampleSize           = 0;
+    videoStreamHeader.rcFrame.top            = 0;
+    videoStreamHeader.rcFrame.bottom         = videoCodecInst.height;
+    videoStreamHeader.rcFrame.left           = 0;
+    videoStreamHeader.rcFrame.right          = videoCodecInst.width;
+
+    BITMAPINFOHEADER bitMapInfoHeader;
+    bitMapInfoHeader.biSize         = sizeof(BITMAPINFOHEADER);
+    bitMapInfoHeader.biHeight       = videoCodecInst.height;
+    bitMapInfoHeader.biWidth        = videoCodecInst.width;
+    bitMapInfoHeader.biPlanes       = 1;
+    bitMapInfoHeader.biBitCount     = 12;
+    bitMapInfoHeader.biClrImportant = 0;
+    bitMapInfoHeader.biClrUsed      = 0;
+    bitMapInfoHeader.biCompression  = videoStreamHeader.fccHandler;
+    bitMapInfoHeader.biSizeImage    = bitMapInfoHeader.biWidth *
+        bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
+
+    if (_aviOutFile->CreateVideoStream(
+        videoStreamHeader,
+        bitMapInfoHeader,
+        NULL,
+        0) != 0)
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        AVISTREAMHEADER audioStreamHeader;
+        audioStreamHeader.fccType = AviFile::MakeFourCc('a', 'u', 'd', 's');
+        // fccHandler is the FOURCC of the codec for decoding the stream.
+        // It's an optional parameter that is not used by audio streams.
+        audioStreamHeader.fccHandler   = 0;
+        audioStreamHeader.dwScale      = 1;
+
+        WAVEFORMATEX waveFormatHeader;
+        waveFormatHeader.cbSize          = 0;
+        waveFormatHeader.nChannels       = 1;
+
+        if (strncmp(audioCodecInst.plname, "PCMU", 4) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 1;
+            audioStreamHeader.dwRate       = 8000;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize = 80;
+
+            waveFormatHeader.nAvgBytesPerSec = 8000;
+            waveFormatHeader.nSamplesPerSec  = 8000;
+            waveFormatHeader.wBitsPerSample  = 8;
+            waveFormatHeader.nBlockAlign     = 1;
+            waveFormatHeader.wFormatTag      = kWaveFormatMuLaw;
+
+        } else if (strncmp(audioCodecInst.plname, "PCMA", 4) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 1;
+            audioStreamHeader.dwRate       = 8000;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize = 80;
+
+            waveFormatHeader.nAvgBytesPerSec = 8000;
+            waveFormatHeader.nSamplesPerSec  = 8000;
+            waveFormatHeader.wBitsPerSample  = 8;
+            waveFormatHeader.nBlockAlign     = 1;
+            waveFormatHeader.wFormatTag      = kWaveFormatALaw;
+
+        } else if (strncmp(audioCodecInst.plname, "L16", 3) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 2;
+            audioStreamHeader.dwRate       = audioCodecInst.plfreq;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize =
+                (audioCodecInst.plfreq/100) * 2;
+
+            waveFormatHeader.nAvgBytesPerSec = audioCodecInst.plfreq * 2;
+            waveFormatHeader.nSamplesPerSec  = audioCodecInst.plfreq;
+            waveFormatHeader.wBitsPerSample  = 16;
+            waveFormatHeader.nBlockAlign     = 2;
+            waveFormatHeader.wFormatTag      = kWaveFormatPcm;
+        } else
+        {
+            return -1;
+        }
+
+        if(_aviOutFile->CreateAudioStream(
+            audioStreamHeader,
+            waveFormatHeader) != 0)
+        {
+            return -1;
+        }
+
+
+        if( InitWavCodec(waveFormatHeader.nSamplesPerSec,
+            waveFormatHeader.nChannels,
+            waveFormatHeader.wBitsPerSample,
+            waveFormatHeader.wFormatTag) != 0)
+        {
+            return -1;
+        }
+    }
+    _aviOutFile->Create(filename);
+    _writing = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteAviAudioData(
+    const WebRtc_Word8* buffer,
+    WebRtc_UWord32 bufferLengthInBytes)
+{
+    if( _aviOutFile != 0)
+    {
+        return _aviOutFile->WriteAudio(
+            reinterpret_cast<const WebRtc_UWord8*>(buffer),
+            bufferLengthInBytes);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
+        return -1;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteAviVideoData(
+        const WebRtc_Word8* buffer,
+        WebRtc_UWord32 bufferLengthInBytes)
+{
+    if( _aviOutFile != 0)
+    {
+        return _aviOutFile->WriteVideo(
+            reinterpret_cast<const WebRtc_UWord8*>(buffer),
+            bufferLengthInBytes);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
+        return -1;
+    }
+}
+
+
+WebRtc_Word32 ModuleFileUtility::CloseAviFile( )
+{
+    if( _reading && _aviAudioInFile)
+    {
+        delete _aviAudioInFile;
+        _aviAudioInFile = 0;
+    }
+
+    if( _reading && _aviVideoInFile)
+    {
+        delete _aviVideoInFile;
+        _aviVideoInFile = 0;
+    }
+
+    if( _writing && _aviOutFile)
+    {
+        delete _aviOutFile;
+        _aviOutFile = 0;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::InitAviReading(const WebRtc_Word8* filename,
+                                                bool videoOnly, bool loop)
+{
+    _reading = false;
+    delete _aviVideoInFile;
+    _aviVideoInFile = new AviFile( );
+
+    if ((_aviVideoInFile != 0) && _aviVideoInFile->Open(AviFile::AVI_VIDEO,
+                                                        filename, loop) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Unable to open AVI file (video)");
+        return -1;
+    }
+
+
+    AVISTREAMHEADER videoInStreamHeader;
+    BITMAPINFOHEADER bitmapInfo;
+    char codecConfigParameters[AviFile::CODEC_CONFIG_LENGTH] = {};
+    WebRtc_Word32 configLength = 0;
+    if( _aviVideoInFile->GetVideoStreamInfo(videoInStreamHeader, bitmapInfo,
+                                            codecConfigParameters,
+                                            configLength) != 0)
+    {
+        return -1;
+    }
+    _videoCodec.width = static_cast<WebRtc_UWord16>(
+        videoInStreamHeader.rcFrame.right);
+    _videoCodec.height = static_cast<WebRtc_UWord16>(
+        videoInStreamHeader.rcFrame.bottom);
+    _videoCodec.maxFramerate = static_cast<WebRtc_UWord8>(
+        videoInStreamHeader.dwRate);
+
+    const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
+    if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
+    {
+        strncpy(_videoCodec.plName, "I420", plnameLen);
+       _videoCodec.codecType = kVideoCodecI420;
+    }
+    else if (bitmapInfo.biCompression ==
+             AviFile::MakeFourCc('V', 'P', '8', '0'))
+    {
+        strncpy(_videoCodec.plName, "VP8", plnameLen);
+        _videoCodec.codecType = kVideoCodecVP8;
+    }
+    else
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        delete _aviAudioInFile;
+        _aviAudioInFile = new AviFile();
+
+        if ( (_aviAudioInFile != 0) &&
+            _aviAudioInFile->Open(AviFile::AVI_AUDIO, filename, loop) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "Unable to open AVI file (audio)");
+            return -1;
+        }
+
+        WAVEFORMATEX waveHeader;
+        if(_aviAudioInFile->GetAudioStreamInfo(waveHeader) != 0)
+        {
+            return -1;
+        }
+        if(InitWavCodec(waveHeader.nSamplesPerSec, waveHeader.nChannels,
+                        waveHeader.wBitsPerSample, waveHeader.wFormatTag) != 0)
+        {
+            return -1;
+        }
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadAviAudioData(
+    WebRtc_Word8*  outBuffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    if(_aviAudioInFile == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
+        return -1;
+    }
+
+    WebRtc_Word32 length = bufferLengthInBytes;
+    if(_aviAudioInFile->ReadAudio(
+        reinterpret_cast<WebRtc_UWord8*>(outBuffer),
+        length) != 0)
+    {
+        return -1;
+    }
+    else
+    {
+        return length;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadAviVideoData(
+    WebRtc_Word8* outBuffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    if(_aviVideoInFile == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
+        return -1;
+    }
+
+    WebRtc_Word32 length = bufferLengthInBytes;
+    if( _aviVideoInFile->ReadVideo(
+        reinterpret_cast<WebRtc_UWord8*>(outBuffer),
+        length) != 0)
+    {
+        return -1;
+    } else {
+        return length;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::VideoCodecInst(VideoCodec& codecInst)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "ModuleFileUtility::CodecInst(codecInst= 0x%x)", &codecInst);
+
+   if(!_reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst: not currently reading audio file!");
+        return -1;
+    }
+    memcpy(&codecInst,&_videoCodec,sizeof(VideoCodec));
+    return 0;
+}
+#endif
+
+WebRtc_Word32 ModuleFileUtility::ReadWavHeader(InStream& wav)
+{
+    WAVE_RIFF_header RIFFheaderObj;
+    WAVE_CHUNK_header CHUNKheaderObj;
+    // TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
+    WebRtc_Word8  tmpStr[6] = "FOUR";
+    WebRtc_UWord8 tmpStr2[4];
+    WebRtc_Word32 i, len;
+    bool dataFound = false;
+    bool fmtFound = false;
+    WebRtc_Word8 dummyRead;
+
+
+    _dataSize = 0;
+    len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
+    if(len != sizeof(WAVE_RIFF_header))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (too short)");
+        return -1;
+    }
+
+    for (i = 0; i < 4; i++)
+    {
+        tmpStr[i] = RIFFheaderObj.ckID[i];
+    }
+    if(strcmp(tmpStr, "RIFF") != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (does not have RIFF)");
+        return -1;
+    }
+    for (i = 0; i < 4; i++)
+    {
+        tmpStr[i] = RIFFheaderObj.wave_ckID[i];
+    }
+    if(strcmp(tmpStr, "WAVE") != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (does not have WAVE)");
+        return -1;
+    }
+
+    len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+    // WAVE files are stored in little endian byte order. Make sure that the
+    // data can be read on big endian as well.
+    // TODO (hellner): little endian to system byte order should be done in
+    //                 in a subroutine.
+    memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+    CHUNKheaderObj.fmt_ckSize =
+        (WebRtc_Word32) ((WebRtc_UWord32) tmpStr2[0] +
+                         (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                         (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                         (((WebRtc_UWord32)tmpStr2[3])<<24));
+
+    memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+
+    while ((len == sizeof(WAVE_CHUNK_header)) && (!fmtFound || !dataFound))
+    {
+        if(strcmp(tmpStr, "fmt ") == 0)
+        {
+            len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
+
+            memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
+            _wavFormatObj.formatTag =
+                (WaveFormats) ((WebRtc_UWord32)tmpStr2[0] +
+                               (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
+            _wavFormatObj.nChannels =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
+            _wavFormatObj.nSamplesPerSec =
+                (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                                 (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                                 (((WebRtc_UWord32)tmpStr2[3])<<24));
+            memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
+            _wavFormatObj.nAvgBytesPerSec =
+                (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                                 (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                                 (((WebRtc_UWord32)tmpStr2[3])<<24));
+            memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
+            _wavFormatObj.nBlockAlign =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
+            _wavFormatObj.nBitsPerSample =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+
+            for (i = 0;
+                 i < (CHUNKheaderObj.fmt_ckSize -
+                      (WebRtc_Word32)sizeof(WAVE_FMTINFO_header));
+                 i++)
+            {
+                len = wav.Read(&dummyRead, 1);
+                if(len != 1)
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "File corrupted, reached EOF (reading fmt)");
+                    return -1;
+                }
+            }
+            fmtFound = true;
+        }
+        else if(strcmp(tmpStr, "data") == 0)
+        {
+            _dataSize = CHUNKheaderObj.fmt_ckSize;
+            dataFound = true;
+            break;
+        }
+        else
+        {
+            for (i = 0; i < (CHUNKheaderObj.fmt_ckSize); i++)
+            {
+                len = wav.Read(&dummyRead, 1);
+                if(len != 1)
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "File corrupted, reached EOF (reading other)");
+                    return -1;
+                }
+            }
+        }
+
+        len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+        memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+        CHUNKheaderObj.fmt_ckSize =
+            (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                             (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                             (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                             (((WebRtc_UWord32)tmpStr2[3])<<24));
+
+        memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+    }
+
+    // Either a proper format chunk has been read or a data chunk was come
+    // across.
+    if( (_wavFormatObj.formatTag != kWaveFormatPcm) &&
+        (_wavFormatObj.formatTag != kWaveFormatALaw) &&
+        (_wavFormatObj.formatTag != kWaveFormatMuLaw))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Coding formatTag value=%d not supported!",
+                     _wavFormatObj.formatTag);
+        return -1;
+    }
+    if((_wavFormatObj.nChannels < 1) ||
+        (_wavFormatObj.nChannels > 2))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "nChannels value=%d not supported!",
+                     _wavFormatObj.nChannels);
+        return -1;
+    }
+
+    if((_wavFormatObj.nBitsPerSample != 8) &&
+        (_wavFormatObj.nBitsPerSample != 16))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "nBitsPerSample value=%d not supported!",
+                     _wavFormatObj.nBitsPerSample);
+        return -1;
+    }
+
+    // Calculate the number of bytes that 10 ms of audio data correspond to.
+    if(_wavFormatObj.formatTag == kWaveFormatPcm)
+    {
+        // TODO (hellner): integer division for 22050 and 11025 would yield
+        //                 the same result as the else statement. Remove those
+        //                 special cases?
+        if(_wavFormatObj.nSamplesPerSec == 44100)
+        {
+            _readSizeBytes = 440 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else if(_wavFormatObj.nSamplesPerSec == 22050) {
+            _readSizeBytes = 220 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else if(_wavFormatObj.nSamplesPerSec == 11025) {
+            _readSizeBytes = 110 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else {
+            _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
+              _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
+        }
+
+    } else {
+        _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
+            _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavCodec(WebRtc_UWord32 samplesPerSec,
+                                              WebRtc_UWord32 channels,
+                                              WebRtc_UWord32 bitsPerSample,
+                                              WebRtc_UWord32 formatTag)
+{
+    codec_info_.pltype   = -1;
+    codec_info_.plfreq   = samplesPerSec;
+    codec_info_.channels = channels;
+    codec_info_.rate     = bitsPerSample * samplesPerSec;
+
+    // Calculate the packet size for 10ms frames
+    switch(formatTag)
+    {
+    case kWaveFormatALaw:
+        strcpy(codec_info_.plname, "PCMA");
+        _codecId = kCodecPcma;
+        codec_info_.pltype = 8;
+        codec_info_.pacsize  = codec_info_.plfreq / 100;
+        break;
+    case kWaveFormatMuLaw:
+        strcpy(codec_info_.plname, "PCMU");
+        _codecId = kCodecPcmu;
+        codec_info_.pltype = 0;
+        codec_info_.pacsize  = codec_info_.plfreq / 100;
+         break;
+    case kWaveFormatPcm:
+        codec_info_.pacsize  = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
+        if(samplesPerSec == 8000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_8Khz;
+        }
+        else if(samplesPerSec == 16000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+        }
+        else if(samplesPerSec == 32000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_32Khz;
+        }
+        // Set the packet size for "odd" sampling frequencies so that it
+        // properly corresponds to _readSizeBytes.
+        else if(samplesPerSec == 11025)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 110;
+            codec_info_.plfreq = 11000;
+        }
+        else if(samplesPerSec == 22050)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 220;
+            codec_info_.plfreq = 22000;
+        }
+        else if(samplesPerSec == 44100)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 440;
+            codec_info_.plfreq = 44000;
+        }
+        else if(samplesPerSec == 48000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 480;
+            codec_info_.plfreq = 48000;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Unsupported PCM frequency!");
+            return -1;
+        }
+        break;
+        default:
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "unknown WAV format TAG!");
+            return -1;
+            break;
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavReading(InStream& wav,
+                                                const WebRtc_UWord32 start,
+                                                const WebRtc_UWord32 stop)
+{
+
+    _reading = false;
+
+    if(ReadWavHeader(wav) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to read WAV header!");
+        return -1;
+    }
+
+    _playoutPositionMs = 0;
+    _readPos = 0;
+
+    if(start > 0)
+    {
+        WebRtc_UWord8 dummy[WAV_MAX_BUFFER_SIZE];
+        WebRtc_Word32 readLength;
+        if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
+        {
+            while (_playoutPositionMs < start)
+            {
+                readLength = wav.Read(dummy, _readSizeBytes);
+                if(readLength == _readSizeBytes)
+                {
+                    _readPos += readLength;
+                    _playoutPositionMs += 10;
+                }
+                else // Must have reached EOF before start position!
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                       "InitWavReading(), EOF before start position");
+                    return -1;
+                }
+            }
+        }
+        else
+        {
+            return -1;
+        }
+    }
+    if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
+                     _wavFormatObj.nBitsPerSample,
+                     _wavFormatObj.formatTag) != 0)
+    {
+        return -1;
+    }
+    _bytesPerSample = _wavFormatObj.nBitsPerSample / 8;
+
+
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavDataAsMono(
+    InStream& wav,
+    WebRtc_Word8* outData,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d,\
+ bufSize= %ld)",
+        &wav,
+        outData,
+        bufferSize);
+
+    // The number of bytes that should be read from file.
+    const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
+    // The number of bytes that will be written to outData.
+    const WebRtc_UWord32 bytesRequested = (codec_info_.channels == 2) ?
+        totalBytesNeeded >> 1 : totalBytesNeeded;
+    if(bufferSize < bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer is too short!");
+        return -1;
+    }
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer NULL!");
+        return -1;
+    }
+
+    if(!_reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: no longer reading file.");
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = ReadWavData(
+        wav,
+        (codec_info_.channels == 2) ? _tempData : (WebRtc_UWord8*)outData,
+        totalBytesNeeded);
+    if(bytesRead == 0)
+    {
+        return 0;
+    }
+    if(bytesRead < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: failed to read data from WAV file.");
+        return -1;
+    }
+    // Output data is should be mono.
+    if(codec_info_.channels == 2)
+    {
+        for (WebRtc_UWord32 i = 0; i < bytesRequested / _bytesPerSample; i++)
+        {
+            // Sample value is the average of left and right buffer rounded to
+            // closest integer value. Note samples can be either 1 or 2 byte.
+            if(_bytesPerSample == 1)
+            {
+                _tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
+                                 1) >> 1);
+            }
+            else
+            {
+                WebRtc_Word16* sampleData = (WebRtc_Word16*) _tempData;
+                sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
+                                  1) >> 1);
+            }
+        }
+        memcpy(outData, _tempData, bytesRequested);
+    }
+    return bytesRequested;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavDataAsStereo(
+    InStream& wav,
+    WebRtc_Word8* outDataLeft,
+    WebRtc_Word8* outDataRight,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x,\
+ outRight= 0x%x, bufSize= %ld)",
+        &wav,
+        outDataLeft,
+        outDataRight,
+        bufferSize);
+
+    if((outDataLeft == NULL) ||
+       (outDataRight == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: an input buffer is NULL!");
+        return -1;
+    }
+    if(codec_info_.channels != 2)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "ReadWavDataAsStereo: WAV file does not contain stereo data!");
+        return -1;
+    }
+    if(! _reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsStereo: no longer reading file.");
+        return -1;
+    }
+
+    // The number of bytes that should be read from file.
+    const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
+    // The number of bytes that will be written to the left and the right
+    // buffers.
+    const WebRtc_UWord32 bytesRequested = totalBytesNeeded >> 1;
+    if(bufferSize < bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavData: Output buffers are too short!");
+        assert(false);
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
+    if(bytesRead <= 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsStereo: failed to read data from WAV file.");
+        return -1;
+    }
+
+    // Turn interleaved audio to left and right buffer. Note samples can be
+    // either 1 or 2 bytes
+    if(_bytesPerSample == 1)
+    {
+        for (WebRtc_UWord32 i = 0; i < bytesRequested; i++)
+        {
+            outDataLeft[i]  = _tempData[2 * i];
+            outDataRight[i] = _tempData[(2 * i) + 1];
+        }
+    }
+    else if(_bytesPerSample == 2)
+    {
+        WebRtc_Word16* sampleData = reinterpret_cast<WebRtc_Word16*>(_tempData);
+        WebRtc_Word16* outLeft = reinterpret_cast<WebRtc_Word16*>(outDataLeft);
+        WebRtc_Word16* outRight = reinterpret_cast<WebRtc_Word16*>(
+            outDataRight);
+
+        // Bytes requested to samples requested.
+        WebRtc_UWord32 sampleCount = bytesRequested >> 1;
+        for (WebRtc_UWord32 i = 0; i < sampleCount; i++)
+        {
+            outLeft[i] = sampleData[2 * i];
+            outRight[i] = sampleData[(2 * i) + 1];
+        }
+    } else {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "ReadWavStereoData: unsupported sample size %d!",
+                   _bytesPerSample);
+        assert(false);
+        return -1;
+    }
+    return bytesRequested;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavData(
+    InStream& wav,
+    WebRtc_UWord8* buffer,
+    const WebRtc_UWord32 dataLengthInBytes)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, dataLen= %ld)",
+        &wav,
+        buffer,
+        dataLengthInBytes);
+
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer NULL!");
+        return -1;
+    }
+
+    // Make sure that a read won't return too few samples.
+    // TODO (hellner): why not read the remaining bytes needed from the start
+    //                 of the file?
+    if((_dataSize - _readPos) < (WebRtc_Word32)dataLengthInBytes)
+    {
+        // Rewind() being -1 may be due to the file not supposed to be looped.
+        if(wav.Rewind() == -1)
+        {
+            _reading = false;
+            return 0;
+        }
+        if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
+        {
+            _reading = false;
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 bytesRead = wav.Read(buffer, dataLengthInBytes);
+    if(bytesRead < 0)
+    {
+        _reading = false;
+        return -1;
+    }
+
+    // This should never happen due to earlier sanity checks.
+    // TODO (hellner): change to an assert and fail here since this should
+    //                 never happen...
+    if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
+    {
+        if((wav.Rewind() == -1) ||
+            (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+        {
+            _reading = false;
+            return -1;
+        }
+        else
+        {
+            bytesRead = wav.Read(buffer, dataLengthInBytes);
+            if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+    }
+
+    _readPos += bytesRead;
+
+    // TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
+    //                 to read when exactly 10ms should be read?!
+    _playoutPositionMs += 10;
+    if((_stopPointInMs > 0) &&
+        (_playoutPositionMs >= _stopPointInMs))
+    {
+        if((wav.Rewind() == -1) ||
+            (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+        {
+            _reading = false;
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavWriting(OutStream& wav,
+                                                const CodecInst& codecInst)
+{
+
+    if(set_codec_info(codecInst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "codecInst identifies unsupported codec!");
+        return -1;
+    }
+    _writing = false;
+    WebRtc_UWord32 channels = (codecInst.channels == 0) ?
+        1 : codecInst.channels;
+
+    if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+    {
+        _bytesPerSample = 1;
+        if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
+                          kWaveFormatMuLaw, 0) == -1)
+        {
+            return -1;
+        }
+    }else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+    {
+        _bytesPerSample = 1;
+        if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWaveFormatALaw,
+                          0) == -1)
+        {
+            return -1;
+        }
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+    {
+        _bytesPerSample = 2;
+        if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
+                          kWaveFormatPcm, 0) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "codecInst identifies unsupported codec for WAV file!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 0;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteWavData(OutStream& out,
+                                              const WebRtc_Word8*  buffer,
+                                              const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "WriteWavData: input buffer NULL!");
+        return -1;
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    _bytesWritten += dataLength;
+    return dataLength;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::WriteWavHeader(
+    OutStream& wav,
+    const WebRtc_UWord32 freq,
+    const WebRtc_UWord32 bytesPerSample,
+    const WebRtc_UWord32 channels,
+    const WebRtc_UWord32 format,
+    const WebRtc_UWord32 lengthInBytes)
+{
+
+    // Frame size in bytes for 10 ms of audio.
+    // TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
+    //                 be taken into consideration here!
+    WebRtc_Word32 frameSize = (freq / 100) * bytesPerSample * channels;
+
+    // Calculate the number of full frames that the wave file contain.
+    const WebRtc_Word32 dataLengthInBytes = frameSize *
+        (lengthInBytes / frameSize);
+
+    WebRtc_Word8 tmpStr[4];
+    WebRtc_Word8 tmpChar;
+    WebRtc_UWord32 tmpLong;
+
+    memcpy(tmpStr, "RIFF", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpLong = dataLengthInBytes + 36;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    memcpy(tmpStr, "WAVE", 4);
+    wav.Write(tmpStr, 4);
+
+    memcpy(tmpStr, "fmt ", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpChar = 16;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(format);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(channels);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpLong = freq;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    // nAverageBytesPerSec = Sample rate * Bytes per sample * Channels
+    tmpLong = bytesPerSample * freq * channels;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    // nBlockAlign = Bytes per sample * Channels
+    tmpChar = (WebRtc_Word8)(bytesPerSample * channels);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(bytesPerSample*8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    memcpy(tmpStr, "data", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpLong = dataLengthInBytes;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::UpdateWavHeader(OutStream& wav)
+{
+    WebRtc_Word32 res = -1;
+    if(wav.Rewind() == -1)
+    {
+        return -1;
+    }
+    WebRtc_UWord32 channels = (codec_info_.channels == 0) ?
+        1 : codec_info_.channels;
+
+    if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
+    {
+        res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
+                             kWaveFormatPcm, _bytesWritten);
+    } else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
+            res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatMuLaw,
+                                 _bytesWritten);
+    } else if(STR_CASE_CMP(codec_info_.plname, "PCMA") == 0) {
+            res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatALaw,
+                                 _bytesWritten);
+    } else {
+        // Allow calling this API even if not writing to a WAVE file.
+        // TODO (hellner): why?!
+        return 0;
+    }
+    return res;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::InitPreEncodedReading(InStream& in,
+                                                       const CodecInst& cinst)
+{
+
+    WebRtc_UWord8 preEncodedID;
+    in.Read(&preEncodedID, 1);
+
+    MediaFileUtility_CodecType codecType =
+        (MediaFileUtility_CodecType)preEncodedID;
+
+    if(set_codec_info(cinst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Pre-encoded file send codec mismatch!");
+        return -1;
+    }
+    if(codecType != _codecId)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Pre-encoded file format codec mismatch!");
+        return -1;
+    }
+    memcpy(&codec_info_,&cinst,sizeof(CodecInst));
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadPreEncodedData(
+    InStream& in,
+    WebRtc_Word8* outData,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x,\
+ bufferSize= %d)",
+        &in,
+        outData,
+        bufferSize);
+
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
+    }
+
+    WebRtc_UWord32 frameLen;
+    WebRtc_UWord8 buf[64];
+    // Each frame has a two byte header containing the frame length.
+    WebRtc_Word32 res = in.Read(buf, 2);
+    if(res != 2)
+    {
+        if(!in.Rewind())
+        {
+            // The first byte is the codec identifier.
+            in.Read(buf, 1);
+            res = in.Read(buf, 2);
+        }
+        else
+        {
+            return -1;
+        }
+    }
+    frameLen = buf[0] + buf[1] * 256;
+    if(bufferSize < frameLen)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "buffer not large enough to read %d bytes of pre-encoded data!",
+            frameLen);
+        return -1;
+    }
+    return in.Read(outData, frameLen);
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPreEncodedWriting(
+    OutStream& out,
+    const CodecInst& codecInst)
+{
+
+    if(set_codec_info(codecInst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 1;
+     out.Write(&_codecId, 1);
+     return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WritePreEncodedData(
+    OutStream& out,
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x,\
+ dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    WebRtc_Word32 bytesWritten = 0;
+    // The first two bytes is the size of the frame.
+    WebRtc_Word16 lengthBuf;
+    lengthBuf = (WebRtc_Word16)dataLength;
+    if(!out.Write(&lengthBuf, 2))
+    {
+       return -1;
+    }
+    bytesWritten = 2;
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    bytesWritten += dataLength;
+    return bytesWritten;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitCompressedReading(
+    InStream& in,
+    const WebRtc_UWord32 start,
+    const WebRtc_UWord32 stop)
+{
+    WEBRTC_TRACE(
+        kTraceDebug,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::InitCompressedReading(in= 0x%x, start= %d,\
+ stop= %d)",
+        &in,
+        start,
+        stop);
+
+#if defined(WEBRTC_CODEC_GSMAMR) || defined(WEBRTC_CODEC_GSMAMRWB) || \
+    defined(WEBRTC_CODEC_ILBC)
+    WebRtc_Word16 read_len = 0;
+#endif
+    _codecId = kCodecNoCodec;
+    _playoutPositionMs = 0;
+    _reading = false;
+
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    WebRtc_Word32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    WebRtc_Word32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
+#endif
+
+    // Read the codec name
+    WebRtc_Word32 cnt = 0;
+    WebRtc_Word8 buf[64];
+    do
+    {
+        in.Read(&buf[cnt++], 1);
+    } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+    if(cnt==64)
+    {
+        return -1;
+    } else {
+        buf[cnt]=0;
+    }
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(!strcmp("#!AMR\n", buf))
+    {
+        strcpy(codec_info_.plname, "amr");
+        codec_info_.pacsize = 160;
+        _codecId = kCodecAmr;
+        codec_info_.pltype = 112;
+        codec_info_.rate = 12200;
+        codec_info_.plfreq = 8000;
+        codec_info_.channels = 1;
+
+        WebRtc_Word16 mode = 0;
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                // First read byte contain the AMR mode.
+                read_len = in.Read(buf, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                mode = (buf[0]>>3)&0xF;
+                if((mode < 0) || (mode > 8))
+                {
+                    if(mode != 15)
+                    {
+                        return -1;
+                    }
+                }
+                if(mode != 15)
+                {
+                    read_len = in.Read(&buf[1], AMRmode2bytes[mode]);
+                    if(read_len != AMRmode2bytes[mode])
+                    {
+                        return -1;
+                    }
+                }
+                _playoutPositionMs += 20;
+            }
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(!strcmp("#!AMRWB\n", buf))
+    {
+        strcpy(codec_info_.plname, "amr-wb");
+        codec_info_.pacsize = 320;
+        _codecId = kCodecAmrWb;
+        codec_info_.pltype = 120;
+        codec_info_.rate = 20000;
+        codec_info_.plfreq = 16000;
+        codec_info_.channels = 1;
+
+        WebRtc_Word16 mode = 0;
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                // First read byte contain the AMR mode.
+                read_len = in.Read(buf, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                mode = (buf[0]>>3)&0xF;
+                if((mode < 0) || (mode > 9))
+                {
+                    if(mode != 15)
+                    {
+                        return -1;
+                    }
+                }
+                if(mode != 15)
+                {
+                    read_len = in.Read(&buf[1], AMRWBmode2bytes[mode]);
+                    if(read_len != AMRWBmode2bytes[mode])
+                    {
+                        return -1;
+                    }
+                }
+                _playoutPositionMs += 20;
+            }
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if(!strcmp("#!iLBC20\n", buf))
+    {
+        codec_info_.pltype = 102;
+        strcpy(codec_info_.plname, "ilbc");
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 13300;
+        _codecId = kCodecIlbc20Ms;
+
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                read_len = in.Read(buf, 38);
+                if(read_len == 38)
+                {
+                    _playoutPositionMs += 20;
+                }
+                else
+                {
+                    return -1;
+                }
+            }
+        }
+    }
+
+    if(!strcmp("#!iLBC30\n", buf))
+    {
+        codec_info_.pltype = 102;
+        strcpy(codec_info_.plname, "ilbc");
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 240;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 13300;
+        _codecId = kCodecIlbc30Ms;
+
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                read_len = in.Read(buf, 50);
+                if(read_len == 50)
+                {
+                    _playoutPositionMs += 20;
+                }
+                else
+                {
+                    return -1;
+                }
+            }
+        }
+    }
+#endif
+    if(_codecId == kCodecNoCodec)
+    {
+        return -1;
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadCompressedData(InStream& in,
+                                                    WebRtc_Word8* outData,
+                                                    WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x,\
+ bytes=%ld)",
+        &in,
+        outData,
+        bufferSize);
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    WebRtc_UWord32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    WebRtc_UWord32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
+#endif
+    WebRtc_UWord32 bytesRead = 0;
+
+    if(! _reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
+        return -1;
+    }
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(_codecId == kCodecAmr)
+    {
+        WebRtc_Word32 res = in.Read(outData, 1);
+        if(res != 1)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                res = in.Read(outData, 1);
+                if(res != 1)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+         const WebRtc_Word16 mode = (outData[0]>>3)&0xF;
+        if((mode < 0) ||
+           (mode > 8))
+        {
+            if(mode != 15)
+            {
+                return -1;
+            }
+        }
+        if(mode != 15)
+        {
+            if(bufferSize < AMRmode2bytes[mode] + 1)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceFile,
+                    _id,
+                    "output buffer is too short to read AMR compressed data.");
+                assert(false);
+                return -1;
+            }
+            bytesRead = in.Read(&outData[1], AMRmode2bytes[mode]);
+            if(bytesRead != AMRmode2bytes[mode])
+            {
+                _reading = false;
+                return -1;
+            }
+            // Count the mode byte to bytes read.
+            bytesRead++;
+        }
+        else
+        {
+            bytesRead = 1;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(_codecId == kCodecAmrWb)
+    {
+        WebRtc_Word32 res = in.Read(outData, 1);
+        if(res != 1)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                res = in.Read(outData, 1);
+                if(res != 1)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+         WebRtc_Word16 mode = (outData[0]>>3)&0xF;
+        if((mode < 0) ||
+           (mode > 8))
+        {
+            if(mode != 15)
+            {
+                return -1;
+            }
+        }
+        if(mode != 15)
+        {
+            if(bufferSize < AMRWBmode2bytes[mode] + 1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                           "output buffer is too short to read AMRWB\
+ compressed.");
+                assert(false);
+                return -1;
+            }
+             bytesRead = in.Read(&outData[1], AMRWBmode2bytes[mode]);
+            if(bytesRead != AMRWBmode2bytes[mode])
+            {
+                _reading = false;
+                return -1;
+            }
+            bytesRead++;
+        }
+        else
+        {
+            bytesRead = 1;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if((_codecId == kCodecIlbc20Ms) ||
+        (_codecId == kCodecIlbc30Ms))
+    {
+        WebRtc_UWord32 byteSize = 0;
+         if(_codecId == kCodecIlbc30Ms)
+        {
+            byteSize = 50;
+        }
+        if(_codecId == kCodecIlbc20Ms)
+        {
+            byteSize = 38;
+        }
+        if(bufferSize < byteSize)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                           "output buffer is too short to read ILBC compressed\
+ data.");
+            assert(false);
+            return -1;
+        }
+
+        bytesRead = in.Read(outData, byteSize);
+        if(bytesRead != byteSize)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                bytesRead = in.Read(outData, byteSize);
+                if(bytesRead != byteSize)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+    }
+#endif
+    if(bytesRead == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadCompressedData() no bytes read, codec not supported");
+        return -1;
+    }
+
+    _playoutPositionMs += 20;
+    if((_stopPointInMs > 0) &&
+        (_playoutPositionMs >= _stopPointInMs))
+    {
+        if(!in.Rewind())
+        {
+            InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+        }
+        else
+        {
+            _reading = false;
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitCompressedWriting(
+    OutStream& out,
+    const CodecInst& codecInst)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
+               "ModuleFileUtility::InitCompressedWriting(out= 0x%x,\
+ codecName= %s)",
+               &out, codecInst.plname);
+
+    _writing = false;
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+            _codecId = kCodecAmr;
+            out.Write("#!AMR\n",6);
+            _writing = true;
+            return 0;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
+    {
+        if(codecInst.pacsize == 320)
+        {
+            memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+            _codecId = kCodecAmrWb;
+            out.Write("#!AMRWB\n",8);
+            _writing = true;
+            return 0;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            _codecId = kCodecIlbc20Ms;
+            out.Write("#!iLBC20\n",9);
+        }
+        else if(codecInst.pacsize == 240)
+        {
+            _codecId = kCodecIlbc30Ms;
+            out.Write("#!iLBC30\n",9);
+        }
+        else
+        {
+          WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                       "codecInst defines unsupported compression codec!");
+            return -1;
+        }
+        memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+        _writing = true;
+        return 0;
+    }
+#endif
+
+    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                 "codecInst defines unsupported compression codec!");
+    return -1;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteCompressedData(
+    OutStream& out,
+    const WebRtc_Word8* buffer,
+    const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x,\
+ dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    return dataLength;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPCMReading(InStream& pcm,
+                                                const WebRtc_UWord32 start,
+                                                const WebRtc_UWord32 stop,
+                                                WebRtc_UWord32 freq)
+{
+    WEBRTC_TRACE(
+        kTraceInfo,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, stop=%d,\
+ freq=%d)",
+        &pcm,
+        start,
+        stop,
+        freq);
+
+    WebRtc_Word8 dummy[320];
+    WebRtc_Word32 read_len;
+
+    _playoutPositionMs = 0;
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+    _reading = false;
+
+    if(freq == 8000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 128000;
+        _codecId = kCodecL16_8Khz;
+    }
+    else if(freq == 16000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 16000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 256000;
+        _codecId = kCodecL16_16kHz;
+    }
+    else if(freq == 32000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 32000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 512000;
+        _codecId = kCodecL16_32Khz;
+    }
+
+    // Readsize for 10ms of audio data (2 bytes per sample).
+    _readSizeBytes = 2 * codec_info_. plfreq / 100;
+    if(_startPointInMs > 0)
+    {
+        while (_playoutPositionMs < _startPointInMs)
+        {
+            read_len = pcm.Read(dummy, _readSizeBytes);
+            if(read_len == _readSizeBytes)
+            {
+                _playoutPositionMs += 10;
+            }
+            else // Must have reached EOF before start position!
+            {
+                return -1;
+            }
+        }
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadPCMData(InStream& pcm,
+                                             WebRtc_Word8* outData,
+                                             WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %d)",
+        &pcm,
+        outData,
+        bufferSize);
+
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    // Readsize for 10ms of audio data (2 bytes per sample).
+    WebRtc_UWord32 bytesRequested = 2 * codec_info_.plfreq / 100;
+    if(bufferSize <  bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "ReadPCMData: buffer not long enough for a 10ms frame.");
+        assert(false);
+        return -1;
+    }
+
+    WebRtc_UWord32 bytesRead = pcm.Read(outData, bytesRequested);
+    if(bytesRead < bytesRequested)
+    {
+        if(pcm.Rewind() == -1)
+        {
+            _reading = false;
+        }
+        else
+        {
+            if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+                              codec_info_.plfreq) == -1)
+            {
+                _reading = false;
+            }
+            else
+            {
+                WebRtc_Word32 rest = bytesRequested - bytesRead;
+                WebRtc_Word32 len = pcm.Read(&(outData[bytesRead]), rest);
+                if(len == rest)
+                {
+                    bytesRead += len;
+                }
+                else
+                {
+                    _reading = false;
+                }
+            }
+            if(bytesRead <= 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                        "ReadPCMData: Failed to rewind audio file.");
+                return -1;
+            }
+        }
+    }
+
+    if(bytesRead <= 0)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                   "ReadPCMData: end of file");
+        return -1;
+    }
+    _playoutPositionMs += 10;
+    if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
+    {
+        if(!pcm.Rewind())
+        {
+            if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+                              codec_info_.plfreq) == -1)
+            {
+                _reading = false;
+            }
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPCMWriting(OutStream& out,
+                                                WebRtc_UWord32 freq)
+{
+
+    if(freq == 8000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 128000;
+
+        _codecId = kCodecL16_8Khz;
+    }
+    else if(freq == 16000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 16000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 256000;
+
+        _codecId = kCodecL16_16kHz;
+    }
+    else if(freq == 32000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 32000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 512000;
+
+        _codecId = kCodecL16_32Khz;
+    }
+    if((_codecId != kCodecL16_8Khz) &&
+       (_codecId != kCodecL16_16kHz) &&
+       (_codecId != kCodecL16_32Khz))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst is not 8KHz PCM or 16KHz PCM!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 0;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WritePCMData(OutStream& out,
+                                              const WebRtc_Word8*  buffer,
+                                              const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+
+    _bytesWritten += dataLength;
+    return dataLength;
+}
+
+WebRtc_Word32 ModuleFileUtility::codec_info(CodecInst& codecInst)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
+
+    if(!_reading && !_writing)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst: not currently reading audio file!");
+        return -1;
+    }
+    memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
+{
+
+    _codecId = kCodecNoCodec;
+    if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+    {
+        _codecId = kCodecPcmu;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+    {
+        _codecId = kCodecPcma;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+    {
+        if(codecInst.plfreq == 8000)
+        {
+            _codecId = kCodecL16_8Khz;
+        }
+        else if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecL16_16kHz;
+        }
+        else if(codecInst.plfreq == 32000)
+        {
+            _codecId = kCodecL16_32Khz;
+        }
+    }
+#ifdef WEBRTC_CODEC_GSMAMR
+    else if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
+    {
+        _codecId = kCodecAmr;
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    else if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
+    {
+        _codecId = kCodecAmrWb;
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            _codecId = kCodecIlbc20Ms;
+        }
+        else if(codecInst.pacsize == 240)
+        {
+            _codecId = kCodecIlbc30Ms;
+        }
+    }
+#endif
+#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
+    {
+        if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecIsac;
+        }
+        else if(codecInst.plfreq == 32000)
+        {
+            _codecId = kCodecIsacSwb;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ISACLC
+    else if(STR_CASE_CMP(codecInst.plname, "isaclc") == 0)
+    {
+        _codecId = kCodecIsacLc;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G722
+    else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
+    {
+        _codecId = kCodecG722;
+    }
+#endif
+    else if(STR_CASE_CMP(codecInst.plname, "G7221") == 0)
+    {
+#ifdef WEBRTC_CODEC_G722_1
+        if(codecInst.plfreq == 16000)
+        {
+            if(codecInst.rate == 16000)
+            {
+                _codecId = kCodecG722_1_16Kbps;
+            }
+            else if(codecInst.rate == 24000)
+            {
+                _codecId = kCodecG722_1_24Kbps;
+            }
+            else if(codecInst.rate == 32000)
+            {
+                _codecId = kCodecG722_1_32Kbps;
+            }
+        }
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+        if(codecInst.plfreq == 32000)
+        {
+            if(codecInst.rate == 48000)
+            {
+                _codecId = kCodecG722_1c_48;
+            }
+            else if(codecInst.rate == 32000)
+            {
+                _codecId = kCodecG722_1c_32;
+            }
+            else if(codecInst.rate == 24000)
+            {
+                _codecId = kCodecG722_1c_24;
+            }
+        }
+#endif
+    }
+#ifdef WEBRTC_CODEC_G726
+    else if(STR_CASE_CMP(codecInst.plname, "G726-40") == 0)
+    {
+        _codecId = kCodecG726_40;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-32") == 0)
+    {
+        _codecId = kCodecG726_24;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-24") == 0)
+    {
+        _codecId = kCodecG726_32;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-16") == 0)
+    {
+        _codecId = kCodecG726_16;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G729
+    else if(STR_CASE_CMP(codecInst.plname, "G729") == 0)
+    {
+        _codecId = kCodecG729;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    else if(STR_CASE_CMP(codecInst.plname, "G7291") == 0)
+    {
+        _codecId = kCodecG729_1;
+    }
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    else if(STR_CASE_CMP(codecInst.plname, "speex") == 0)
+    {
+        if(codecInst.plfreq == 8000)
+        {
+            _codecId = kCodecSpeex8Khz;
+        }
+        else if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecSpeex16Khz;
+        }
+    }
+#endif
+    if(_codecId == kCodecNoCodec)
+    {
+        return -1;
+    }
+    memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::FileDurationMs(const WebRtc_Word8* fileName,
+                                                const FileFormats  fileFormat,
+                                                const WebRtc_UWord32 freqInHz)
+{
+
+    if(fileName == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
+        return -1;
+    }
+
+    WebRtc_Word32 time_in_ms = -1;
+    struct stat file_size;
+    if(stat(fileName,&file_size) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to retrieve file size with stat!");
+        return -1;
+    }
+    FileWrapper* inStreamObj = FileWrapper::Create();
+    if(inStreamObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "failed to create InStream object!");
+        return -1;
+    }
+    if(inStreamObj->OpenFile(fileName, true) == -1)
+    {
+        delete inStreamObj;
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to open file %s!", fileName);
+        return -1;
+    }
+
+    switch (fileFormat)
+    {
+        case kFileFormatWavFile:
+        {
+            if(ReadWavHeader(*inStreamObj) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "failed to read WAV file header!");
+                return -1;
+            }
+            time_in_ms = ((file_size.st_size - 44) /
+                          (_wavFormatObj.nAvgBytesPerSec/1000));
+            break;
+        }
+        case kFileFormatPcm16kHzFile:
+        {
+            // 16 samples per ms. 2 bytes per sample.
+            WebRtc_Word32 denominator = 16*2;
+            time_in_ms = (file_size.st_size)/denominator;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        {
+            // 8 samples per ms. 2 bytes per sample.
+            WebRtc_Word32 denominator = 8*2;
+            time_in_ms = (file_size.st_size)/denominator;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            WebRtc_Word32 cnt = 0;
+            WebRtc_Word32 read_len = 0;
+            WebRtc_Word8 buf[64];
+            do
+            {
+                read_len = inStreamObj->Read(&buf[cnt++], 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+            } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+            if(cnt == 64)
+            {
+                return -1;
+            }
+            else
+            {
+                buf[cnt] = 0;
+            }
+#ifdef WEBRTC_CODEC_GSMAMR
+            if(!strcmp("#!AMR\n", buf))
+            {
+                WebRtc_UWord8 dummy;
+                read_len = inStreamObj->Read(&dummy, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                WebRtc_Word16 AMRMode = (dummy>>3)&0xF;
+
+                // TODO (hellner): use tables instead of hardcoding like this!
+                //                 Additionally, this calculation does not
+                //                 take octet alignment into consideration.
+                switch (AMRMode)
+                {
+                        // Mode 0: 4.75 kbit/sec -> 95 bits per 20 ms frame.
+                        // 20 ms = 95 bits ->
+                        // file size in bytes * 8 / 95 is the number of
+                        // 20 ms frames in the file ->
+                        // time_in_ms = file size * 8 / 95 * 20
+                    case 0:
+                        time_in_ms = ((file_size.st_size)*160)/95;
+                        break;
+                        // Mode 1: 5.15 kbit/sec -> 103 bits per 20 ms frame.
+                    case 1:
+                        time_in_ms = ((file_size.st_size)*160)/103;
+                        break;
+                        // Mode 2: 5.90 kbit/sec -> 118 bits per 20 ms frame.
+                    case 2:
+                        time_in_ms = ((file_size.st_size)*160)/118;
+                        break;
+                        // Mode 3: 6.70 kbit/sec -> 134 bits per 20 ms frame.
+                    case 3:
+                        time_in_ms = ((file_size.st_size)*160)/134;
+                        break;
+                        // Mode 4: 7.40 kbit/sec -> 148 bits per 20 ms frame.
+                    case 4:
+                        time_in_ms = ((file_size.st_size)*160)/148;
+                        break;
+                        // Mode 5: 7.95 bit/sec -> 159 bits per 20 ms frame.
+                    case 5:
+                        time_in_ms = ((file_size.st_size)*160)/159;
+                        break;
+                        // Mode 6: 10.2 bit/sec -> 204 bits per 20 ms frame.
+                    case 6:
+                        time_in_ms = ((file_size.st_size)*160)/204;
+                        break;
+                        // Mode 7: 12.2 bit/sec -> 244 bits per 20 ms frame.
+                    case 7:
+                        time_in_ms = ((file_size.st_size)*160)/244;
+                        break;
+                        // Mode 8: SID Mode -> 39 bits per 20 ms frame.
+                    case 8:
+                        time_in_ms = ((file_size.st_size)*160)/39;
+                        break;
+                    default:
+                        break;
+                }
+            }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+            if(!strcmp("#!AMRWB\n", buf))
+            {
+                WebRtc_UWord8 dummy;
+                read_len = inStreamObj->Read(&dummy, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                // TODO (hellner): use tables instead of hardcoding like this!
+                WebRtc_Word16 AMRWBMode = (dummy>>3)&0xF;
+                switch(AMRWBMode)
+                {
+                        // Mode 0: 6.6 kbit/sec -> 132 bits per 20 ms frame.
+                    case 0:
+                        time_in_ms = ((file_size.st_size)*160)/132;
+                        break;
+                        // Mode 1: 8.85 kbit/sec -> 177 bits per 20 ms frame.
+                    case 1:
+                        time_in_ms = ((file_size.st_size)*160)/177;
+                        break;
+                        // Mode 2: 12.65 kbit/sec -> 253 bits per 20 ms frame.
+                    case 2:
+                        time_in_ms = ((file_size.st_size)*160)/253;
+                        break;
+                        // Mode 3: 14.25 kbit/sec -> 285 bits per 20 ms frame.
+                    case 3:
+                        time_in_ms = ((file_size.st_size)*160)/285;
+                        break;
+                        // Mode 4: 15.85 kbit/sec -> 317 bits per 20 ms frame.
+                    case 4:
+                        time_in_ms = ((file_size.st_size)*160)/317;
+                        break;
+                        // Mode 5: 18.25 kbit/sec -> 365 bits per 20 ms frame.
+                    case 5:
+                        time_in_ms = ((file_size.st_size)*160)/365;
+                        break;
+                        // Mode 6: 19.85 kbit/sec -> 397 bits per 20 ms frame.
+                    case 6:
+                        time_in_ms = ((file_size.st_size)*160)/397;
+                        break;
+                        // Mode 7: 23.05 kbit/sec -> 461 bits per 20 ms frame.
+                    case 7:
+                        time_in_ms = ((file_size.st_size)*160)/461;
+                        break;
+                        // Mode 8: 23.85 kbit/sec -> 477 bits per 20 ms frame.
+                    case 8:
+                        time_in_ms = ((file_size.st_size)*160)/477;
+                        break;
+                    default:
+                        delete inStreamObj;
+                        return -1;
+                }
+            }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+            if(!strcmp("#!iLBC20\n", buf))
+            {
+                // 20 ms is 304 bits
+                time_in_ms = ((file_size.st_size)*160)/304;
+                break;
+            }
+            if(!strcmp("#!iLBC30\n", buf))
+            {
+                // 30 ms takes 400 bits.
+                // file size in bytes * 8 / 400 is the number of
+                // 30 ms frames in the file ->
+                // time_in_ms = file size * 8 / 400 * 30
+                time_in_ms = ((file_size.st_size)*240)/400;
+                break;
+            }
+#endif
+        }
+        case kFileFormatPreencodedFile:
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "cannot determine duration of Pre-Encoded file!");
+            break;
+        }
+        default:
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "unsupported file format %d!", fileFormat);
+            break;
+    }
+    inStreamObj->CloseFile();
+    delete inStreamObj;
+    return time_in_ms;
+}
+
+WebRtc_UWord32 ModuleFileUtility::PlayoutPositionMs()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "ModuleFileUtility::PlayoutPosition()");
+
+    if(_reading)
+    {
+        return _playoutPositionMs;
+    }
+    else
+    {
+        return 0;
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/media_file/source/media_file_utility.h b/trunk/src/modules/media_file/source/media_file_utility.h
new file mode 100644
index 0000000..cfe0314
--- /dev/null
+++ b/trunk/src/modules/media_file/source/media_file_utility.h
@@ -0,0 +1,349 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Note: the class cannot be used for reading and writing at the same time.
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
+
+#include <stdio.h>
+
+#include "common_types.h"
+#include "media_file_defines.h"
+
+namespace webrtc {
+class AviFile;
+class InStream;
+class OutStream;
+
+class ModuleFileUtility
+{
+public:
+
+    ModuleFileUtility(const WebRtc_Word32 id);
+    ~ModuleFileUtility();
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). If loop is true the file will be played until StopPlaying() is
+    // called. When end of file is reached the file is read from the start.
+    // Only video will be read if videoOnly is true.
+    WebRtc_Word32 InitAviReading(const WebRtc_Word8* fileName, bool videoOnly,
+                                 bool loop);
+
+    // Put 10-60ms of audio data from file into the outBuffer depending on
+    // codec frame size. bufferLengthInBytes indicates the size of outBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be coverted to
+    // mono).
+    WebRtc_Word32 ReadAviAudioData(WebRtc_Word8* outBuffer,
+                                   const WebRtc_UWord32 bufferLengthInBytes);
+
+    // Put one video frame into outBuffer. bufferLengthInBytes indicates the
+    // size of outBuffer.
+    // The return value is the number of bytes written to videoBuffer.
+    WebRtc_Word32 ReadAviVideoData(WebRtc_Word8* videoBuffer,
+                                   const WebRtc_UWord32 bufferLengthInBytes);
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). codecInst specifies the encoding of the audio
+    // data. videoCodecInst specifies the encoding of the video data. Only video
+    // data will be recorded if videoOnly is true.
+    WebRtc_Word32 InitAviWriting(const WebRtc_Word8* filename,
+                                 const CodecInst& codecInst,
+                                 const VideoCodec& videoCodecInst,
+                                 const bool videoOnly);
+
+    // Write one audio frame, i.e. the bufferLengthinBytes first bytes of
+    // audioBuffer, to file. The audio frame size is determined by the
+    // codecInst.pacsize parameter of the last sucessfull
+    // InitAviWriting(..) call.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WriteAviAudioData(const WebRtc_Word8* audioBuffer,
+                                    WebRtc_UWord32 bufferLengthInBytes);
+
+
+    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
+    // to file.
+    // Note: videoBuffer can contain encoded data. The codec used must be the
+    // same as what was specified by videoCodecInst for the last successfull
+    // InitAviWriting(..) call. The videoBuffer must contain exactly
+    // one video frame.
+    WebRtc_Word32 WriteAviVideoData(const WebRtc_Word8* videoBuffer,
+                                    WebRtc_UWord32 bufferLengthInBytes);
+
+    // Stop recording to file or stream.
+    WebRtc_Word32 CloseAviFile();
+
+    WebRtc_Word32 VideoCodecInst(VideoCodec& codecInst);
+#endif // #ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    WebRtc_Word32 InitWavReading(InStream& stream,
+                                 const WebRtc_UWord32 startPointMs = 0,
+                                 const WebRtc_UWord32 stopPointMs = 0);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be converted to
+    // mono).
+    WebRtc_Word32 ReadWavDataAsMono(InStream& stream, WebRtc_Word8* audioBuffer,
+                                    const WebRtc_UWord32 dataLengthInBytes);
+
+    // Put 10-60ms, depending on codec frame size, of audio data from file into
+    // audioBufferLeft and audioBufferRight. The buffers contain the left and
+    // right channel of played out stereo audio.
+    // dataLengthInBytes  indicates the size of both audioBufferLeft and
+    // audioBufferRight.
+    // The return value is the number of bytes read for each buffer.
+    // Note: This API can only be successfully called for WAV files with stereo
+    // audio.
+    WebRtc_Word32 ReadWavDataAsStereo(InStream& wav,
+                                      WebRtc_Word8* audioBufferLeft,
+                                      WebRtc_Word8* audioBufferRight,
+                                      const WebRtc_UWord32 bufferLength);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    WebRtc_Word32 InitWavWriting(OutStream& stream, const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 WriteWavData(OutStream& stream,
+                               const WebRtc_Word8* audioBuffer,
+                               const WebRtc_UWord32 bufferLength);
+
+    // Finalizes the WAV header so that it is correct if nothing more will be
+    // written to stream.
+    // Note: this API must be called before closing stream to ensure that the
+    //       WAVE header is updated with the file size. Don't call this API
+    //       if more samples are to be written to stream.
+    WebRtc_Word32 UpdateWavHeader(OutStream& stream);
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    // freqInHz is the PCM sampling frequency.
+    // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+    WebRtc_Word32 InitPCMReading(InStream& stream,
+                                 const WebRtc_UWord32 startPointMs = 0,
+                                 const WebRtc_UWord32 stopPointMs = 0,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadPCMData(InStream& stream, WebRtc_Word8* audioBuffer,
+                              const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // freqInHz is the PCM sampling frequency.
+    // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+    WebRtc_Word32 InitPCMWriting(OutStream& stream,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Write one 10ms audio frame, i.e. the bufferLength first bytes of
+    // audioBuffer, to file. The audio frame size is determined by the freqInHz
+    // parameter of the last sucessfull InitPCMWriting(..) call.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 WritePCMData(OutStream& stream,
+                               const WebRtc_Word8* audioBuffer,
+                               WebRtc_UWord32 bufferLength);
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    WebRtc_Word32 InitCompressedReading(InStream& stream,
+                                        const WebRtc_UWord32 startPointMs = 0,
+                                        const WebRtc_UWord32 stopPointMs = 0);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadCompressedData(InStream& stream,
+                                     WebRtc_Word8* audioBuffer,
+                                     const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitCompressedWriting(OutStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull InitCompressedWriting(..) call.
+    // The return value is the number of bytes written to stream.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WriteCompressedData(OutStream& stream,
+                                      const WebRtc_Word8* audioBuffer,
+                                      const WebRtc_UWord32 bufferLength);
+
+    // Prepare for playing audio from stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitPreEncodedReading(InStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadPreEncodedData(InStream& stream,
+                                     WebRtc_Word8* audioBuffer,
+                                     const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitPreEncodedWriting(OutStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to stream. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull InitPreEncodedWriting(..) call.
+   // The return value is the number of bytes written to stream.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WritePreEncodedData(OutStream& stream,
+                                      const WebRtc_Word8* inData,
+                                      const WebRtc_UWord32 dataLengthInBytes);
+
+    // Set durationMs to the size of the file (in ms) specified by fileName.
+    // freqInHz specifies the sampling frequency of the file.
+    WebRtc_Word32 FileDurationMs(const WebRtc_Word8* fileName,
+                                 const FileFormats fileFormat,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Return the number of ms that have been played so far.
+    WebRtc_UWord32 PlayoutPositionMs();
+
+    // Update codecInst according to the current audio codec being used for
+    // reading or writing.
+    WebRtc_Word32 codec_info(CodecInst& codecInst);
+
+private:
+    // Biggest WAV frame supported is 10 ms at 48kHz of 2 channel, 16 bit audio.
+    enum{WAV_MAX_BUFFER_SIZE = 480*2*2};
+
+
+    WebRtc_Word32 InitWavCodec(WebRtc_UWord32 samplesPerSec,
+                               WebRtc_UWord32 channels,
+                               WebRtc_UWord32 bitsPerSample,
+                               WebRtc_UWord32 formatTag);
+
+    // Parse the WAV header in stream.
+    WebRtc_Word32 ReadWavHeader(InStream& stream);
+
+    // Update the WAV header. freqInHz, bytesPerSample, channels, format,
+    // lengthInBytes specify characterists of the audio data.
+    // freqInHz is the sampling frequency. bytesPerSample is the sample size in
+    // bytes. channels is the number of channels, e.g. 1 is mono and 2 is
+    // stereo. format is the encode format (e.g. PCMU, PCMA, PCM etc).
+    // lengthInBytes is the number of bytes the audio samples are using up.
+    WebRtc_Word32 WriteWavHeader(OutStream& stream,
+                                 const WebRtc_UWord32 freqInHz,
+                                 const WebRtc_UWord32 bytesPerSample,
+                                 const WebRtc_UWord32 channels,
+                                 const WebRtc_UWord32 format,
+                                 const WebRtc_UWord32 lengthInBytes);
+
+    // Put dataLengthInBytes of audio data from stream into the audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadWavData(InStream& stream, WebRtc_UWord8* audioBuffer,
+                              const WebRtc_UWord32 dataLengthInBytes);
+
+    // Update the current audio codec being used for reading or writing
+    // according to codecInst.
+    WebRtc_Word32 set_codec_info(const CodecInst& codecInst);
+
+    struct WAVE_FMTINFO_header
+    {
+        WebRtc_Word16 formatTag;
+        WebRtc_Word16 nChannels;
+        WebRtc_Word32 nSamplesPerSec;
+        WebRtc_Word32 nAvgBytesPerSec;
+        WebRtc_Word16 nBlockAlign;
+        WebRtc_Word16 nBitsPerSample;
+    };
+    // Identifiers for preencoded files.
+    enum MediaFileUtility_CodecType
+    {
+        kCodecNoCodec  = 0,
+        kCodecIsac,
+        kCodecIsacSwb,
+        kCodecIsacLc,
+        kCodecL16_8Khz,
+        kCodecL16_16kHz,
+        kCodecL16_32Khz,
+        kCodecPcmu,
+        kCodecPcma,
+        kCodecIlbc20Ms,
+        kCodecIlbc30Ms,
+        kCodecG722,
+        kCodecG722_1_32Kbps,
+        kCodecG722_1_24Kbps,
+        kCodecG722_1_16Kbps,
+        kCodecG722_1c_48,
+        kCodecG722_1c_32,
+        kCodecG722_1c_24,
+        kCodecAmr,
+        kCodecAmrWb,
+        kCodecG729,
+        kCodecG729_1,
+        kCodecG726_40,
+        kCodecG726_32,
+        kCodecG726_24,
+        kCodecG726_16,
+        kCodecSpeex8Khz,
+        kCodecSpeex16Khz
+    };
+
+    // TODO (hellner): why store multiple formats. Just store either codec_info_
+    //                 or _wavFormatObj and supply conversion functions.
+    WAVE_FMTINFO_header _wavFormatObj;
+    WebRtc_Word32 _dataSize;      // Chunk size if reading a WAV file
+    // Number of bytes to read. I.e. frame size in bytes. May be multiple
+    // chunks if reading WAV.
+    WebRtc_Word32 _readSizeBytes;
+
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32 _stopPointInMs;
+    WebRtc_UWord32 _startPointInMs;
+    WebRtc_UWord32 _playoutPositionMs;
+    WebRtc_UWord32 _bytesWritten;
+
+    CodecInst codec_info_;
+    MediaFileUtility_CodecType _codecId;
+
+    // The amount of bytes, on average, used for one audio sample.
+    WebRtc_Word32  _bytesPerSample;
+    WebRtc_Word32  _readPos;
+
+    // Only reading or writing can be enabled, not both.
+    bool _reading;
+    bool _writing;
+
+    // Scratch buffer used for turning stereo audio to mono.
+    WebRtc_UWord8 _tempData[WAV_MAX_BUFFER_SIZE];
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    AviFile* _aviAudioInFile;
+    AviFile* _aviVideoInFile;
+    AviFile* _aviOutFile;
+    VideoCodec _videoCodec;
+#endif
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
diff --git a/trunk/src/modules/modules.gyp b/trunk/src/modules/modules.gyp
new file mode 100644
index 0000000..83b5cc6
--- /dev/null
+++ b/trunk/src/modules/modules.gyp
@@ -0,0 +1,60 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'audio_coding/codecs/cng/cng.gypi',
+    'audio_coding/codecs/g711/g711.gypi',
+    'audio_coding/codecs/g722/g722.gypi',
+    'audio_coding/codecs/ilbc/ilbc.gypi',
+    'audio_coding/codecs/iSAC/main/source/isac.gypi',
+    'audio_coding/codecs/iSAC/fix/source/isacfix.gypi',
+    'audio_coding/codecs/pcm16b/pcm16b.gypi',
+    'audio_coding/main/source/audio_coding_module.gypi',
+    'audio_coding/neteq/neteq.gypi',
+    'audio_conference_mixer/source/audio_conference_mixer.gypi',
+    'audio_device/main/source/audio_device.gypi',
+    'audio_processing/audio_processing.gypi',
+    'audio_processing/aec/aec.gypi',
+    'audio_processing/aecm/aecm.gypi',
+    'audio_processing/agc/agc.gypi',
+    'audio_processing/ns/ns.gypi',
+    'audio_processing/utility/util.gypi',
+    'media_file/source/media_file.gypi',
+    'udp_transport/source/udp_transport.gypi',
+    'utility/source/utility.gypi',
+    'video_coding/codecs/i420/main/source/i420.gypi',
+    'video_coding/codecs/test_framework/test_framework.gypi',
+    'video_coding/codecs/vp8/main/source/vp8.gypi',
+    'video_coding/main/source/video_coding.gypi',
+    'video_capture/main/source/video_capture.gypi',
+    'video_processing/main/source/video_processing.gypi',
+    'video_render/main/source/video_render.gypi',
+    'rtp_rtcp/source/rtp_rtcp.gypi',
+  ],
+
+  # Test targets, excluded when building with Chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'includes': [
+        'audio_coding/codecs/iSAC/isac_test.gypi',
+        'audio_coding/codecs/iSAC/isacfix_test.gypi',
+        'audio_processing/apm_tests.gypi',
+        'rtp_rtcp/source/rtp_rtcp_tests.gypi',
+        'rtp_rtcp/test/test_bwe/test_bwe.gypi',
+        'rtp_rtcp/test/testFec/test_fec.gypi',
+        'rtp_rtcp/test/testAPI/test_api.gypi',
+        'video_coding/main/source/video_coding_test.gypi',
+        'video_coding/codecs/test/video_codecs_test_framework.gypi',
+        'video_coding/codecs/tools/video_codecs_tools.gypi',
+        'video_processing/main/test/vpm_tests.gypi',
+      ], # includes
+    }], # build_with_chromium
+  ], # conditions
+}
diff --git a/trunk/src/modules/rtp_rtcp/OWNERS b/trunk/src/modules/rtp_rtcp/OWNERS
new file mode 100644
index 0000000..c968564
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/OWNERS
@@ -0,0 +1,5 @@
+pwestin@webrtc.org

+stefan@webrtc.org

+henrik.lundin@webrtc.org

+mflodman@webrtc.org

+asapersson@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp.h b/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp.h
new file mode 100644
index 0000000..6b55202
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -0,0 +1,1094 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
+
+#include <vector>
+
+#include "module.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+// forward declaration
+class Transport;
+
+class RtpRtcp : public Module
+{
+public:
+    /*
+    *   create a RTP/RTCP module object using the system clock
+    *
+    *   id      - unique identifier of this RTP/RTCP module object
+    *   audio   - true for a audio version of the RTP/RTCP module object false will create a video version
+    */
+    static RtpRtcp* CreateRtpRtcp(const WebRtc_Word32 id,
+                                  const bool audio);
+
+    /*
+    *   create a RTP/RTCP module object
+    *
+    *   id      - unique identifier of this RTP/RTCP module object
+    *   audio   - true for a audio version of the RTP/RTCP module object
+    *             false will create a video version
+    *   clock   - the clock to use to read time; must not be NULL
+    */
+    static RtpRtcp* CreateRtpRtcp(const WebRtc_Word32 id,
+                                  const bool audio,
+                                  RtpRtcpClock* clock);
+
+    /*
+    *   destroy a RTP/RTCP module object
+    *
+    *   module  - object to destroy
+    */
+    static void DestroyRtpRtcp(RtpRtcp* module);
+
+    /*
+    *   Change the unique identifier of this object
+    *
+    *   id      - new unique identifier of this RTP/RTCP module object
+    */
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    /*
+    *   De-muxing functionality for conferencing
+    *
+    *   register a module that will act as a default module for this module
+    *   used for feedback messages back to the encoder when one encoded stream
+    *   is sent to multiple destinations
+    *
+    *   module  - default module
+    */
+    virtual WebRtc_Word32 RegisterDefaultModule(RtpRtcp* module) = 0;
+
+    /*
+    *   unregister the default module
+    *   will stop the demuxing feedback
+    */
+    virtual WebRtc_Word32 DeRegisterDefaultModule() = 0;
+
+    /*
+    *   returns true if a default module is registered, false otherwise
+    */
+    virtual bool DefaultModuleRegistered() = 0;
+
+    /*
+    *   returns number of registered child modules
+    */
+    virtual WebRtc_UWord32 NumberChildModules() = 0;
+
+    /*
+    *   Lip-sync between voice-video
+    *
+    *   module  - audio module
+    *
+    *   Note: only allowed on a video module
+    */
+    virtual WebRtc_Word32 RegisterSyncModule(RtpRtcp* module) = 0;
+
+    /*
+    *   Turn off lip-sync between voice-video
+    */
+    virtual WebRtc_Word32 DeRegisterSyncModule() = 0;
+
+    /**************************************************************************
+    *
+    *   Receiver functions
+    *
+    ***************************************************************************/
+
+    /*
+    *   Initialize receive side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 InitReceiver() = 0;
+
+    /*
+    *   Used by the module to deliver the incoming data to the codec module
+    *
+    *   incomingDataCallback    - callback object that will receive the incoming data
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterIncomingDataCallback(RtpData* incomingDataCallback) = 0;
+
+    /*
+    *   Used by the module to deliver messages to the codec module/appliation
+    *
+    *   incomingMessagesCallback    - callback object that will receive the incoming messages
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback) = 0;
+
+    /*
+    *   configure a RTP packet timeout value
+    *
+    *   RTPtimeoutMS   - time in milliseconds after last received RTP packet
+    *   RTCPtimeoutMS  - time in milliseconds after last received RTCP packet
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 RTPtimeoutMS,
+                                         const WebRtc_UWord32 RTCPtimeoutMS) = 0;
+
+    /*
+    *   Set periodic dead or alive notification
+    *
+    *   enable              - turn periodic dead or alive notification on/off
+    *   sampleTimeSeconds   - sample interval in seconds for dead or alive notifications
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(const bool enable,
+                                                     const WebRtc_UWord8 sampleTimeSeconds) = 0;
+
+    /*
+    *   Get periodic dead or alive notification status
+    *
+    *   enable              - periodic dead or alive notification on/off
+    *   sampleTimeSeconds   - sample interval in seconds for dead or alive notifications
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(bool &enable,
+                                                  WebRtc_UWord8 &sampleTimeSeconds) = 0;
+
+    /*
+    *   set voice codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceivePayload(
+        const CodecInst& voiceCodec) = 0;
+
+    /*
+    *   set video codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceivePayload(
+        const VideoCodec& videoCodec) = 0;
+
+    /*
+    *   get payload type for a voice codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ReceivePayloadType(
+        const CodecInst& voiceCodec,
+        WebRtc_Word8* plType) = 0;
+
+    /*
+    *   get payload type for a video codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ReceivePayloadType(
+        const VideoCodec& videoCodec,
+        WebRtc_Word8* plType) = 0;
+
+    /*
+    *   Remove a registered payload type from list of accepted payloads
+    *
+    *   payloadType - payload type of codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DeRegisterReceivePayload(
+        const WebRtc_Word8 payloadType) = 0;
+
+   /*
+    *   (De)register RTP header extension type and id.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id) = 0;
+
+    virtual WebRtc_Word32 DeregisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type) = 0;
+
+    /*
+    *   Get last received remote timestamp
+    */
+    virtual WebRtc_UWord32 RemoteTimestamp() const = 0;
+
+    /*
+    *   Get the current estimated remote timestamp
+    *
+    *   timestamp   - estimated timestamp
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const = 0;
+
+    /*
+    *   Get incoming SSRC
+    */
+    virtual WebRtc_UWord32 RemoteSSRC() const = 0;
+
+    /*
+    *   Get remote CSRC
+    *
+    *   arrOfCSRC   - array that will receive the CSRCs
+    *
+    *   return -1 on failure else the number of valid entries in the list
+    */
+    virtual WebRtc_Word32 RemoteCSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const  = 0;
+
+    /*
+    *   get the currently configured SSRC filter
+    *
+    *   allowedSSRC - SSRC that will be allowed through
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const = 0;
+
+    /*
+    *   set a SSRC to be used as a filter for incoming RTP streams
+    *
+    *   allowedSSRC - SSRC that will be allowed through
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSSRCFilter(const bool enable,
+                                        const WebRtc_UWord32 allowedSSRC) = 0;
+
+    /*
+    * Turn on/off receiving RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 SetRTXReceiveStatus(const bool enable,
+                                              const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    * Get status of receiving RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 RTXReceiveStatus(bool* enable,
+                                           WebRtc_UWord32* SSRC) const = 0;
+
+    /*
+    *   called by the network module when we receive a packet
+    *
+    *   incomingPacket - incoming packet buffer
+    *   packetLength   - length of incoming buffer
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPacket,
+                                         const WebRtc_UWord16 packetLength) = 0;
+
+
+    /*
+    *    Option when not using the RegisterSyncModule function
+    *
+    *    Inform the module about the received audion NTP
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 IncomingAudioNTP(
+        const WebRtc_UWord32 audioReceivedNTPsecs,
+        const WebRtc_UWord32 audioReceivedNTPfrac,
+        const WebRtc_UWord32 audioRTCPArrivalTimeSecs,
+        const WebRtc_UWord32 audioRTCPArrivalTimeFrac) = 0;
+
+    /**************************************************************************
+    *
+    *   Sender
+    *
+    ***************************************************************************/
+
+    /*
+    *   Initialize send side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 InitSender() = 0;
+
+    /*
+    *   Used by the module to send RTP and RTCP packet to the network module
+    *
+    *   outgoingTransport   - transport object that will be called when packets are ready to be sent out on the network
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport) = 0;
+
+    /*
+    *   set MTU
+    *
+    *   size    -  Max transfer unit in bytes, default is 1500
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetMaxTransferUnit(const WebRtc_UWord16 size) = 0;
+
+    /*
+    *   set transtport overhead
+    *   default is IPv4 and UDP with no encryption
+    *
+    *   TCP                     - true for TCP false UDP
+    *   IPv6                    - true for IP version 6 false for version 4
+    *   authenticationOverhead  - number of bytes to leave for an authentication header
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTransportOverhead(const bool TCP,
+                                             const bool IPV6,
+                                             const WebRtc_UWord8 authenticationOverhead = 0) = 0;
+
+    /*
+    *   Get max payload length
+    *
+    *   A combination of the configuration MaxTransferUnit and TransportOverhead.
+    *   Does not account FEC/ULP/RED overhead if FEC is enabled.
+    *   Does not account for RTP headers
+    */
+    virtual WebRtc_UWord16 MaxPayloadLength() const = 0;
+
+    /*
+    *   Get max data payload length
+    *
+    *   A combination of the configuration MaxTransferUnit, headers and TransportOverhead.
+    *   Takes into account FEC/ULP/RED overhead if FEC is enabled.
+    *   Takes into account RTP headers
+    */
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
+
+    /*
+    *   set RTPKeepaliveStatus
+    *
+    *   enable              - on/off
+    *   unknownPayloadType  - payload type to use for RTP keepalive
+    *   deltaTransmitTimeMS - delta time between RTP keepalive packets
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTPKeepaliveStatus(
+        const bool enable,
+        const WebRtc_Word8 unknownPayloadType,
+        const WebRtc_UWord16 deltaTransmitTimeMS) = 0;
+
+    /*
+    *   Get RTPKeepaliveStatus
+    *
+    *   enable              - on/off
+    *   unknownPayloadType  - payload type in use for RTP keepalive
+    *   deltaTransmitTimeMS - delta time between RTP keepalive packets
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RTPKeepaliveStatus(
+        bool* enable,
+        WebRtc_Word8* unknownPayloadType,
+        WebRtc_UWord16* deltaTransmitTimeMS) const = 0;
+
+    /*
+    *   check if RTPKeepaliveStatus is enabled
+    */
+    virtual bool RTPKeepalive() const = 0;
+
+    /*
+    *   set codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendPayload(
+        const CodecInst& voiceCodec) = 0;
+
+    /*
+    *   set codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendPayload(
+        const VideoCodec& videoCodec) = 0;
+
+    /*
+    *   Unregister a send payload
+    *
+    *   payloadType - payload type of codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DeRegisterSendPayload(
+        const WebRtc_Word8 payloadType) = 0;
+
+   /*
+    *   (De)register RTP header extension type and id.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id) = 0;
+
+    virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
+        const RTPExtensionType type) = 0;
+
+   /*
+    *   Enable/disable traffic smoothing of sending stream.
+    */
+    virtual void SetTransmissionSmoothingStatus(const bool enable) = 0;
+
+    virtual bool TransmissionSmoothingStatus() const = 0;
+
+    /*
+    *   get start timestamp
+    */
+    virtual WebRtc_UWord32 StartTimestamp() const = 0;
+
+    /*
+    *   configure start timestamp, default is a random number
+    *
+    *   timestamp   - start timestamp
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetStartTimestamp(
+        const WebRtc_UWord32 timestamp) = 0;
+
+    /*
+    *   Get SequenceNumber
+    */
+    virtual WebRtc_UWord16 SequenceNumber() const = 0;
+
+    /*
+    *   Set SequenceNumber, default is a random number
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSequenceNumber(const WebRtc_UWord16 seq) = 0;
+
+    /*
+    *   Get SSRC
+    */
+    virtual WebRtc_UWord32 SSRC() const = 0;
+
+    /*
+    *   configure SSRC, default is a random number
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSSRC(const WebRtc_UWord32 ssrc) = 0;
+
+    /*
+    *   Get CSRC
+    *
+    *   arrOfCSRC   - array of CSRCs
+    *
+    *   return -1 on failure else number of valid entries in the array
+    */
+    virtual WebRtc_Word32 CSRCs(
+        WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const = 0;
+
+    /*
+    *   Set CSRC
+    *
+    *   arrOfCSRC   - array of CSRCs
+    *   arrLength   - number of valid entries in the array
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCSRCs(
+        const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+        const WebRtc_UWord8 arrLength) = 0;
+
+    /*
+    *   includes CSRCs in RTP header if enabled
+    *
+    *   include CSRC - on/off
+    *
+    *    default:on
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCSRCStatus(const bool include) = 0;
+
+    /*
+    * Turn on/off sending RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 SetRTXSendStatus(const bool enable,
+                                           const bool setSSRC,
+                                           const WebRtc_UWord32 SSRC) = 0;
+
+
+    /*
+    * Get status of sending RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 RTXSendStatus(bool* enable,
+                                        WebRtc_UWord32* SSRC) const = 0;
+
+    /*
+    *   sends kRtcpByeCode when going from true to false
+    *
+    *   sending - on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendingStatus(const bool sending) = 0;
+
+    /*
+    *   get send status
+    */
+    virtual bool Sending() const = 0;
+
+    /*
+    *   Starts/Stops media packets, on by default
+    *
+    *   sending - on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendingMediaStatus(const bool sending) = 0;
+
+    /*
+    *   get send status
+    */
+    virtual bool SendingMedia() const = 0;
+
+    /*
+    *   get sent bitrate in Kbit/s
+    */
+    virtual void BitrateSent(WebRtc_UWord32* totalRate,
+                             WebRtc_UWord32* videoRate,
+                             WebRtc_UWord32* fecRate,
+                             WebRtc_UWord32* nackRate) const = 0;
+
+    /*
+     *  Get the send-side estimate of the available bandwidth.
+     */
+    virtual int EstimatedSendBandwidth(
+        WebRtc_UWord32* available_bandwidth) const = 0;
+
+    /*
+     *  Get the receive-side estimate of the available bandwidth.
+     */
+    virtual int EstimatedReceiveBandwidth(
+        WebRtc_UWord32* available_bandwidth) const = 0;
+
+    /*
+    *   Used by the codec module to deliver a video or audio frame for packetization
+    *
+    *   frameType       - type of frame to send
+    *   payloadType     - payload type of frame to send
+    *   timestamp       - timestamp of frame to send
+    *   payloadData     - payload buffer of frame to send
+    *   payloadSize     - size of payload buffer to send
+    *   fragmentation   - fragmentation offset data for fragmented frames such as layers or RED
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendOutgoingData(
+        const FrameType frameType,
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader* fragmentation = NULL,
+        const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
+
+    /**************************************************************************
+    *
+    *   RTCP
+    *
+    ***************************************************************************/
+
+    /*
+    *   RegisterIncomingRTCPCallback
+    *
+    *   incomingMessagesCallback    - callback object that will receive messages from RTCP
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback) = 0;
+
+    /*
+    *    Get RTCP status
+    */
+    virtual RTCPMethod RTCP() const = 0;
+
+    /*
+    *   configure RTCP status i.e on(compound or non- compound)/off
+    *
+    *   method  - RTCP method to use
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPStatus(const RTCPMethod method) = 0;
+
+    /*
+    *   Set RTCP CName (i.e unique identifier)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   Get RTCP CName (i.e unique identifier)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   Get remote CName
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteCNAME(
+        const WebRtc_UWord32 remoteSSRC,
+        char cName[RTCP_CNAME_SIZE]) const = 0;
+
+    /*
+    *   Get remote NTP
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteNTP(
+        WebRtc_UWord32 *ReceivedNTPsecs,
+        WebRtc_UWord32 *ReceivedNTPfrac,
+        WebRtc_UWord32 *RTCPArrivalTimeSecs,
+        WebRtc_UWord32 *RTCPArrivalTimeFrac) const  = 0;
+
+    /*
+    *   AddMixedCNAME
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 AddMixedCNAME(
+        const WebRtc_UWord32 SSRC,
+        const char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   RemoveMixedCNAME
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    *   Get RoundTripTime
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                              WebRtc_UWord16* RTT,
+                              WebRtc_UWord16* avgRTT,
+                              WebRtc_UWord16* minRTT,
+                              WebRtc_UWord16* maxRTT) const = 0 ;
+
+    /*
+    *   Reset RoundTripTime statistics
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC)= 0 ;
+
+    /*
+    *   Force a send of a RTCP packet
+    *   normal SR and RR are triggered via the process function
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendRTCP(WebRtc_UWord32 rtcpPacketType = kRtcpReport) = 0;
+
+    /*
+    *    Good state of RTP receiver inform sender
+    */
+    virtual WebRtc_Word32 SendRTCPReferencePictureSelection(const WebRtc_UWord64 pictureID) = 0;
+
+    /*
+    *    Send a RTCP Slice Loss Indication (SLI)
+    *    6 least significant bits of pictureID
+    */
+    virtual WebRtc_Word32 SendRTCPSliceLossIndication(const WebRtc_UWord8 pictureID) = 0;
+
+    /*
+    *   Reset RTP statistics
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetStatisticsRTP() = 0;
+
+    /*
+    *   statistics of our localy created statistics of the received RTP stream
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 StatisticsRTP(WebRtc_UWord8  *fraction_lost,  // scale 0 to 255
+                                      WebRtc_UWord32 *cum_lost,       // number of lost packets
+                                      WebRtc_UWord32 *ext_max,        // highest sequence number received
+                                      WebRtc_UWord32 *jitter,
+                                      WebRtc_UWord32 *max_jitter = NULL) const = 0;
+
+    /*
+    *   Reset RTP data counters for the receiving side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetReceiveDataCountersRTP() = 0;
+
+    /*
+    *   Reset RTP data counters for the sending side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetSendDataCountersRTP() = 0;
+
+    /*
+    *   statistics of the amount of data sent and received
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DataCountersRTP(WebRtc_UWord32 *bytesSent,
+                                        WebRtc_UWord32 *packetsSent,
+                                        WebRtc_UWord32 *bytesReceived,
+                                        WebRtc_UWord32 *packetsReceived) const = 0;
+    /*
+    *   Get received RTCP sender info
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteRTCPStat(RTCPSenderInfo* senderInfo) = 0;
+
+    /*
+    *   Get received RTCP report block
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteRTCPStat(
+        std::vector<RTCPReportBlock>* receiveBlocks) const = 0;
+    /*
+    *   Set received RTCP report block
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 AddRTCPReportBlock(
+        const WebRtc_UWord32 SSRC,
+        const RTCPReportBlock* receiveBlock) = 0;
+
+    /*
+    *   RemoveRTCPReportBlock
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoveRTCPReportBlock(const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    *   (APP) Application specific data
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPApplicationSpecificData(const WebRtc_UWord8 subType,
+                                                       const WebRtc_UWord32 name,
+                                                       const WebRtc_UWord8* data,
+                                                       const WebRtc_UWord16 length) = 0;
+    /*
+    *   (XR) VOIP metric
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric) = 0;
+
+    /*
+    *  (REMB) Receiver Estimated Max Bitrate
+    */
+    virtual bool REMB() const = 0;
+
+    virtual WebRtc_Word32 SetREMBStatus(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                                      const WebRtc_UWord8 numberOfSSRC,
+                                      const WebRtc_UWord32* SSRC) = 0;
+
+    // Used to set maximum bitrate estimate received in a REMB packet.
+    virtual WebRtc_Word32 SetMaximumBitrateEstimate(
+        const WebRtc_UWord32 bitrate) = 0;
+
+    // Registers an observer to call when the estimate of the incoming channel
+    // changes.
+    virtual bool SetRemoteBitrateObserver(
+        RtpRemoteBitrateObserver* observer) = 0;
+
+    /*
+    *   (IJ) Extended jitter report.
+    */
+    virtual bool IJ() const = 0;
+
+    virtual WebRtc_Word32 SetIJStatus(const bool enable) = 0;
+
+    /*
+    *   (TMMBR) Temporary Max Media Bit Rate
+    */
+    virtual bool TMMBR() const = 0;
+
+    /*
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTMMBRStatus(const bool enable) = 0;
+
+    /*
+    *    local bw estimation changed
+    *
+    *    for video called by internal estimator
+    *    for audio (iSAC) called by engine, geting the data from the decoder
+    */
+    virtual void OnBandwidthEstimateUpdate(WebRtc_UWord16 bandWidthKbit) = 0;
+
+    /*
+    *   (NACK)
+    */
+    virtual NACKMethod NACK() const  = 0;
+
+    /*
+    *   Turn negative acknowledgement requests on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetNACKStatus(const NACKMethod method) = 0;
+
+    /*
+     *  TODO(holmer): Propagate this API to VideoEngine.
+     *  Returns the currently configured selective retransmission settings.
+     */
+    virtual int SelectiveRetransmissions() const = 0;
+
+    /*
+     *  TODO(holmer): Propagate this API to VideoEngine.
+     *  Sets the selective retransmission settings, which will decide which
+     *  packets will be retransmitted if NACKed. Settings are constructed by
+     *  combining the constants in enum RetransmissionMode with bitwise OR.
+     *  All packets are retransmitted if kRetransmitAllPackets is set, while no
+     *  packets are retransmitted if kRetransmitOff is set.
+     *  By default all packets except FEC packets are retransmitted. For VP8
+     *  with temporal scalability only base layer packets are retransmitted.
+     *
+     *  Returns -1 on failure, otherwise 0.
+     */
+    virtual int SetSelectiveRetransmissions(uint8_t settings) = 0;
+
+    /*
+    *   Send a Negative acknowledgement packet
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList,
+                                   const WebRtc_UWord16 size) = 0;
+
+    /*
+    *   Store the sent packets, needed to answer to a Negative acknowledgement requests
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetStorePacketsStatus(const bool enable, const WebRtc_UWord16 numberToStore = 200) = 0;
+
+    /**************************************************************************
+    *
+    *   Audio
+    *
+    ***************************************************************************/
+
+    /*
+    *   RegisterAudioCallback
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback) = 0;
+
+    /*
+    *   set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples) = 0;
+
+    /*
+    *   Outband TelephoneEvent(DTMF) detection
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
+                                                const bool forwardToDecoder,
+                                                const bool detectEndOfTone = false) = 0;
+
+    /*
+    *   Is outband TelephoneEvent(DTMF) turned on/off?
+    */
+    virtual bool TelephoneEvent() const = 0;
+
+    /*
+    *   Returns true if received DTMF events are forwarded to the decoder using
+    *    the OnPlayTelephoneEvent callback.
+    */
+    virtual bool TelephoneEventForwardToDecoder() const = 0;
+
+    /*
+    *   SendTelephoneEventActive
+    *
+    *   return true if we currently send a telephone event and 100 ms after an event is sent
+    *   used to prevent teh telephone event tone to be recorded by the microphone and send inband
+    *   just after the tone has ended
+    */
+    virtual bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const = 0;
+
+    /*
+    *   Send a TelephoneEvent tone using RFC 2833 (4733)
+    *
+    *   return -1 on failure else 0
+    */
+      virtual WebRtc_Word32 SendTelephoneEventOutband(const WebRtc_UWord8 key,
+                                                  const WebRtc_UWord16 time_ms,
+                                                  const WebRtc_UWord8 level) = 0;
+
+    /*
+    *   Set payload type for Redundant Audio Data RFC 2198
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendREDPayloadType(const WebRtc_Word8 payloadType) = 0;
+
+    /*
+    *   Get payload type for Redundant Audio Data RFC 2198
+    *
+    *   return -1 on failure else 0
+    */
+     virtual WebRtc_Word32 SendREDPayloadType(WebRtc_Word8& payloadType) const = 0;
+
+     /*
+     * Set status and ID for header-extension-for-audio-level-indication.
+     * See https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+     * for more details.
+     *
+     * return -1 on failure else 0
+     */
+     virtual WebRtc_Word32 SetRTPAudioLevelIndicationStatus(const bool enable,
+                                                          const WebRtc_UWord8 ID) = 0;
+
+     /*
+     * Get status and ID for header-extension-for-audio-level-indication.
+     *
+     * return -1 on failure else 0
+     */
+     virtual WebRtc_Word32 GetRTPAudioLevelIndicationStatus(bool& enable,
+                                                          WebRtc_UWord8& ID) const = 0;
+
+     /*
+     * Store the audio level in dBov for header-extension-for-audio-level-indication.
+     * This API shall be called before transmision of an RTP packet to ensure
+     * that the |level| part of the extended RTP header is updated.
+     *
+     * return -1 on failure else 0.
+     */
+     virtual WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov) = 0;
+
+    /**************************************************************************
+    *
+    *   Video
+    *
+    ***************************************************************************/
+
+    /*
+    *   Register a callback object that will receive callbacks for video related events
+    *   such as an incoming key frame request.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback) = 0;
+
+    /*
+    *   Set the estimated camera delay in MS
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS) = 0;
+
+    /*
+    *   Set the start and max send bitrate
+    *   used by the bandwidth management
+    *
+    *   Not calling this or setting startBitrateKbit to 0 disables the bandwidth management
+    *
+    *   minBitrateKbit = 0 equals no min bitrate
+    *   maxBitrateKbit = 0 equals no max bitrate
+    *
+    *   return -1 on failure else 0
+    */
+    virtual void SetSendBitrate(const WebRtc_UWord32 startBitrate,
+                                const WebRtc_UWord16 minBitrateKbit,
+                                const WebRtc_UWord16 maxBitrateKbit) = 0;
+
+    /*
+    *   Turn on/off generic FEC
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                            const WebRtc_UWord8 payloadTypeRED,
+                                            const WebRtc_UWord8 payloadTypeFEC) = 0;
+
+    /*
+    *   Get generic FEC setting
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 GenericFECStatus(bool& enable,
+                                         WebRtc_UWord8& payloadTypeRED,
+                                         WebRtc_UWord8& payloadTypeFEC) = 0;
+
+
+    /*
+    *   Set FEC code rate of key and delta frames
+    *   codeRate on a scale of 0 to 255 where 255 is 100% added packets, hence protect up to 50% packet loss
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                                       const WebRtc_UWord8 deltaFrameCodeRate) = 0;
+
+
+    /*
+    *   Set FEC unequal protection (UEP) across packets,
+    *   for key and delta frames.
+    *
+    *   If keyUseUepProtection is true UEP is enabled for key frames.
+    *   If deltaUseUepProtection is true UEP is enabled for delta frames.
+    *
+    *   UEP skews the FEC protection towards being spent more on the
+    *   important packets, at the cost of less FEC protection for the
+    *   non-important packets.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
+                                          const bool deltaUseUepProtection) = 0;
+
+
+    /*
+    *   Set method for requestion a new key frame
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method) = 0;
+
+    /*
+    *   send a request for a keyframe
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RequestKeyFrame() = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
diff --git a/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
new file mode 100644
index 0000000..b6a8a54
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -0,0 +1,282 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+#ifndef NULL
+    #define NULL    0
+#endif
+
+#define RTCP_CNAME_SIZE 256    // RFC 3550 page 44, including null termination
+#define IP_PACKET_SIZE 1500    // we assume ethernet
+#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
+#define TIMEOUT_SEI_MESSAGES_MS 30000   // in milliseconds
+
+namespace webrtc{
+enum RTCPMethod
+{
+    kRtcpOff          = 0,
+    kRtcpCompound     = 1,
+    kRtcpNonCompound = 2
+};
+
+enum RTPAliveType
+{
+    kRtpDead   = 0,
+    kRtpNoRtp = 1,
+    kRtpAlive  = 2
+};
+
+enum StorageType {
+  kDontStore,
+  kDontRetransmit,
+  kAllowRetransmission
+};
+
+enum RTPExtensionType
+{
+   kRtpExtensionNone,
+   kRtpExtensionTransmissionTimeOffset,
+   kRtpExtensionAudioLevel,
+};
+
+enum RTCPAppSubTypes
+{
+    kAppSubtypeBwe     = 0x00
+};
+
+enum RTCPPacketType
+{
+    kRtcpReport         = 0x0001,
+    kRtcpSr             = 0x0002,
+    kRtcpRr             = 0x0004,
+    kRtcpBye            = 0x0008,
+    kRtcpPli            = 0x0010,
+    kRtcpNack           = 0x0020,
+    kRtcpFir            = 0x0040,
+    kRtcpTmmbr          = 0x0080,
+    kRtcpTmmbn          = 0x0100,
+    kRtcpSrReq          = 0x0200,
+    kRtcpXrVoipMetric   = 0x0400,
+    kRtcpApp            = 0x0800,
+    kRtcpSli            = 0x4000,
+    kRtcpRpsi           = 0x8000,
+    kRtcpRemb           = 0x10000,
+    kRtcpTransmissionTimeOffset = 0x20000
+};
+
+enum KeyFrameRequestMethod
+{
+    kKeyFrameReqFirRtp    = 1,
+    kKeyFrameReqPliRtcp   = 2,
+    kKeyFrameReqFirRtcp   = 3
+};
+
+enum RtpRtcpPacketType
+{
+    kPacketRtp        = 0,
+    kPacketKeepAlive = 1
+};
+
+enum NACKMethod
+{
+    kNackOff      = 0,
+    kNackRtcp     = 2
+};
+
+enum RetransmissionMode {
+  kRetransmitOff          = 0x0,
+  kRetransmitFECPackets   = 0x1,
+  kRetransmitBaseLayer    = 0x2,
+  kRetransmitHigherLayers = 0x4,
+  kRetransmitAllPackets   = 0xFF
+};
+
+struct RTCPSenderInfo
+{
+    WebRtc_UWord32 NTPseconds;
+    WebRtc_UWord32 NTPfraction;
+    WebRtc_UWord32 RTPtimeStamp;
+    WebRtc_UWord32 sendPacketCount;
+    WebRtc_UWord32 sendOctetCount;
+};
+
+struct RTCPReportBlock
+{
+  // Fields as described by RFC 3550 6.4.2.
+    WebRtc_UWord32 remoteSSRC;  // SSRC of sender of this report.
+    WebRtc_UWord32 sourceSSRC;  // SSRC of the RTP packet sender.
+    WebRtc_UWord8 fractionLost;
+    WebRtc_UWord32 cumulativeLost;  // 24 bits valid
+    WebRtc_UWord32 extendedHighSeqNum;
+    WebRtc_UWord32 jitter;
+    WebRtc_UWord32 lastSR;
+    WebRtc_UWord32 delaySinceLastSR;
+};
+
+class RtpData
+{
+public:
+    virtual WebRtc_Word32 OnReceivedPayloadData(
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadSize,
+        const WebRtcRTPHeader* rtpHeader) = 0;
+protected:
+    virtual ~RtpData() {}
+};
+
+class RtcpFeedback
+{
+public:
+    // if audioVideoOffset > 0 video is behind audio
+    virtual void OnLipSyncUpdate(const WebRtc_Word32 /*id*/,
+                                 const WebRtc_Word32 /*audioVideoOffset*/)  {};
+
+    virtual void OnApplicationDataReceived(const WebRtc_Word32 /*id*/,
+                                           const WebRtc_UWord8 /*subType*/,
+                                           const WebRtc_UWord32 /*name*/,
+                                           const WebRtc_UWord16 /*length*/,
+                                           const WebRtc_UWord8* /*data*/)  {};
+
+    virtual void OnXRVoIPMetricReceived(
+        const WebRtc_Word32 /*id*/,
+        const RTCPVoIPMetric* /*metric*/,
+        const WebRtc_Word8 /*VoIPmetricBuffer*/[28])  {};
+
+    virtual void OnRTCPPacketTimeout(const WebRtc_Word32 /*id*/)  {};
+
+    virtual void OnTMMBRReceived(const WebRtc_Word32 /*id*/,
+                                 const WebRtc_UWord16 /*bwEstimateKbit*/)  {};
+
+    virtual void OnSLIReceived(const WebRtc_Word32 /*id*/,
+                               const WebRtc_UWord8 /*pictureId*/) {};
+
+    virtual void OnRPSIReceived(const WebRtc_Word32 /*id*/,
+                                const WebRtc_UWord64 /*pictureId*/) {};
+
+    virtual void OnReceiverEstimatedMaxBitrateReceived(
+        const WebRtc_Word32 /*id*/,
+        const WebRtc_UWord32 /*bitRate*/) {};
+
+    virtual void OnSendReportReceived(const WebRtc_Word32 id,
+                                      const WebRtc_UWord32 senderSSRC)  {};
+
+    virtual void OnReceiveReportReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord32 senderSSRC)  {};
+
+protected:
+    virtual ~RtcpFeedback() {}
+};
+
+class RtpFeedback
+{
+public:
+    // Receiving payload change or SSRC change. (return success!)
+    /*
+    *   channels    - number of channels in codec (1 = mono, 2 = stereo)
+    */
+    virtual WebRtc_Word32 OnInitializeDecoder(
+        const WebRtc_Word32 id,
+        const WebRtc_Word8 payloadType,
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const int frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate) = 0;
+
+    virtual void OnPacketTimeout(const WebRtc_Word32 id) = 0;
+
+    virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packetType) = 0;
+
+    virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) = 0;
+
+    virtual void OnIncomingSSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 SSRC) = 0;
+
+    virtual void OnIncomingCSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 CSRC,
+                                        const bool added) = 0;
+
+protected:
+    virtual ~RtpFeedback() {}
+};
+
+class RtpAudioFeedback
+{
+public:
+    virtual void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                          const WebRtc_UWord8 event,
+                                          const bool endOfEvent) = 0;
+
+    virtual void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                                      const WebRtc_UWord8 event,
+                                      const WebRtc_UWord16 lengthMs,
+                                      const WebRtc_UWord8 volume) = 0;
+
+protected:
+    virtual ~RtpAudioFeedback() {}
+};
+
+
+class RtpVideoFeedback
+{
+public:
+    // this function should call codec module to inform it about the request
+    virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+                                             const FrameType type,
+                                             const WebRtc_UWord8 streamIdx) = 0;
+
+    virtual void OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 bitrateBps,
+                                  const WebRtc_UWord8 fractionLost,
+                                  const WebRtc_UWord16 roundTripTimeMs) = 0;
+
+protected:
+    virtual ~RtpVideoFeedback() {}
+};
+
+// A clock interface that allows reading of absolute and relative
+// timestamps in an RTP/RTCP module.
+class RtpRtcpClock {
+public:
+    virtual ~RtpRtcpClock() {}
+
+    // Return a timestamp in milliseconds relative to some arbitrary
+    // source; the source is fixed for this clock.
+    virtual WebRtc_UWord32 GetTimeInMS() = 0;
+
+    // Retrieve an NTP absolute timestamp.
+    virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) = 0;
+};
+
+// RtpReceiveBitrateUpdate is used to signal changes in bitrate estimates for
+// the incoming stream.
+class RtpRemoteBitrateObserver
+{
+public:
+    // Called when a receive channel has a new bitrate estimate for the incoming
+    // stream.
+    virtual void OnReceiveBitrateChanged(unsigned int ssrc,
+                                         unsigned int bitrate) = 0;
+
+    // Called when a REMB packet has been received.
+    virtual void OnReceivedRemb(unsigned int bitrate) = 0;
+
+    virtual ~RtpRemoteBitrateObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
diff --git a/trunk/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/trunk/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
new file mode 100644
index 0000000..cc71e7b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -0,0 +1,288 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../testing/gmock/include/gmock/gmock.h"
+
+#include "modules/interface/module.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class MockRtpRtcp : public RtpRtcp {
+ public:
+  MOCK_METHOD1(ChangeUniqueId,
+      WebRtc_Word32(const WebRtc_Word32 id));
+  MOCK_METHOD1(RegisterDefaultModule,
+      WebRtc_Word32(RtpRtcp* module));
+  MOCK_METHOD0(DeRegisterDefaultModule,
+      WebRtc_Word32());
+  MOCK_METHOD0(DefaultModuleRegistered,
+      bool());
+  MOCK_METHOD0(NumberChildModules,
+      WebRtc_UWord32());
+  MOCK_METHOD1(RegisterSyncModule,
+      WebRtc_Word32(RtpRtcp* module));
+  MOCK_METHOD0(DeRegisterSyncModule,
+      WebRtc_Word32());
+  MOCK_METHOD0(InitReceiver,
+      WebRtc_Word32());
+  MOCK_METHOD1(RegisterIncomingDataCallback,
+      WebRtc_Word32(RtpData* incomingDataCallback));
+  MOCK_METHOD1(RegisterIncomingRTPCallback,
+      WebRtc_Word32(RtpFeedback* incomingMessagesCallback));
+  MOCK_METHOD2(SetPacketTimeout,
+      WebRtc_Word32(const WebRtc_UWord32 RTPtimeoutMS, const WebRtc_UWord32 RTCPtimeoutMS));
+  MOCK_METHOD2(SetPeriodicDeadOrAliveStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 sampleTimeSeconds));
+  MOCK_METHOD2(PeriodicDeadOrAliveStatus,
+      WebRtc_Word32(bool &enable, WebRtc_UWord8 &sampleTimeSeconds));
+  MOCK_METHOD1(RegisterReceivePayload,
+      WebRtc_Word32(const CodecInst& voiceCodec));
+  MOCK_METHOD1(RegisterReceivePayload,
+      WebRtc_Word32(const VideoCodec& videoCodec));
+  MOCK_METHOD2(ReceivePayloadType,
+      WebRtc_Word32(const CodecInst& voiceCodec, WebRtc_Word8* plType));
+  MOCK_METHOD2(ReceivePayloadType,
+      WebRtc_Word32(const VideoCodec& videoCodec, WebRtc_Word8* plType));
+  MOCK_METHOD1(DeRegisterReceivePayload,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_METHOD2(RegisterReceiveRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type, const WebRtc_UWord8 id));
+  MOCK_METHOD1(DeregisterReceiveRtpHeaderExtension,
+               WebRtc_Word32(const RTPExtensionType type));
+  MOCK_CONST_METHOD0(RemoteTimestamp,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD1(EstimatedRemoteTimeStamp,
+      WebRtc_Word32(WebRtc_UWord32& timestamp));
+  MOCK_CONST_METHOD0(RemoteSSRC,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD1(RemoteCSRCs,
+      WebRtc_Word32(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]));
+  MOCK_CONST_METHOD1(SSRCFilter,
+      WebRtc_Word32(WebRtc_UWord32& allowedSSRC));
+  MOCK_METHOD2(SetSSRCFilter,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord32 allowedSSRC));
+  MOCK_METHOD2(SetRTXReceiveStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord32 SSRC));
+  MOCK_CONST_METHOD2(RTXReceiveStatus,
+      WebRtc_Word32(bool* enable, WebRtc_UWord32* SSRC));
+  MOCK_METHOD2(IncomingPacket,
+      WebRtc_Word32(const WebRtc_UWord8* incomingPacket, const WebRtc_UWord16 packetLength));
+  MOCK_METHOD4(IncomingAudioNTP,
+      WebRtc_Word32(const WebRtc_UWord32 audioReceivedNTPsecs, const WebRtc_UWord32 audioReceivedNTPfrac, const WebRtc_UWord32 audioRTCPArrivalTimeSecs, const WebRtc_UWord32 audioRTCPArrivalTimeFrac));
+  MOCK_METHOD0(InitSender,
+      WebRtc_Word32());
+  MOCK_METHOD1(RegisterSendTransport,
+      WebRtc_Word32(Transport* outgoingTransport));
+  MOCK_METHOD1(SetMaxTransferUnit,
+      WebRtc_Word32(const WebRtc_UWord16 size));
+  MOCK_METHOD3(SetTransportOverhead,
+      WebRtc_Word32(const bool TCP, const bool IPV6, const WebRtc_UWord8 authenticationOverhead));
+  MOCK_CONST_METHOD0(MaxPayloadLength,
+      WebRtc_UWord16());
+  MOCK_CONST_METHOD0(MaxDataPayloadLength,
+      WebRtc_UWord16());
+  MOCK_METHOD3(SetRTPKeepaliveStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_Word8 unknownPayloadType, const WebRtc_UWord16 deltaTransmitTimeMS));
+  MOCK_CONST_METHOD3(RTPKeepaliveStatus,
+      WebRtc_Word32(bool* enable, WebRtc_Word8* unknownPayloadType, WebRtc_UWord16* deltaTransmitTimeMS));
+  MOCK_CONST_METHOD0(RTPKeepalive,
+      bool());
+  MOCK_METHOD1(RegisterSendPayload,
+      WebRtc_Word32(const CodecInst& voiceCodec));
+  MOCK_METHOD1(RegisterSendPayload,
+      WebRtc_Word32(const VideoCodec& videoCodec));
+  MOCK_METHOD1(DeRegisterSendPayload,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_METHOD2(RegisterSendRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type, const WebRtc_UWord8 id));
+  MOCK_METHOD1(DeregisterSendRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type));
+  MOCK_METHOD1(SetTransmissionSmoothingStatus,
+      void(const bool enable));
+  MOCK_CONST_METHOD0(TransmissionSmoothingStatus,
+      bool());
+  MOCK_CONST_METHOD0(StartTimestamp,
+      WebRtc_UWord32());
+  MOCK_METHOD1(SetStartTimestamp,
+      WebRtc_Word32(const WebRtc_UWord32 timestamp));
+  MOCK_CONST_METHOD0(SequenceNumber,
+      WebRtc_UWord16());
+  MOCK_METHOD1(SetSequenceNumber,
+      WebRtc_Word32(const WebRtc_UWord16 seq));
+  MOCK_CONST_METHOD0(SSRC,
+      WebRtc_UWord32());
+  MOCK_METHOD1(SetSSRC,
+      WebRtc_Word32(const WebRtc_UWord32 ssrc));
+  MOCK_CONST_METHOD1(CSRCs,
+      WebRtc_Word32(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]));
+  MOCK_METHOD2(SetCSRCs,
+      WebRtc_Word32(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize], const WebRtc_UWord8 arrLength));
+  MOCK_METHOD1(SetCSRCStatus,
+      WebRtc_Word32(const bool include));
+  MOCK_METHOD3(SetRTXSendStatus,
+      WebRtc_Word32(const bool enable, const bool setSSRC, const WebRtc_UWord32 SSRC));
+ MOCK_CONST_METHOD2(RTXSendStatus,
+      WebRtc_Word32(bool* enable, WebRtc_UWord32* SSRC));
+  MOCK_METHOD1(SetSendingStatus,
+      WebRtc_Word32(const bool sending));
+  MOCK_CONST_METHOD0(Sending,
+      bool());
+  MOCK_METHOD1(SetSendingMediaStatus,
+      WebRtc_Word32(const bool sending));
+  MOCK_CONST_METHOD0(SendingMedia,
+      bool());
+  MOCK_CONST_METHOD4(BitrateSent,
+      void(WebRtc_UWord32* totalRate, WebRtc_UWord32* videoRate, WebRtc_UWord32* fecRate, WebRtc_UWord32* nackRate));
+  MOCK_CONST_METHOD1(EstimatedSendBandwidth,
+      int(WebRtc_UWord32* available_bandwidth));
+  MOCK_CONST_METHOD1(EstimatedReceiveBandwidth,
+      int(WebRtc_UWord32* available_bandwidth));
+  MOCK_METHOD7(SendOutgoingData,
+      WebRtc_Word32(const FrameType frameType, const WebRtc_Word8 payloadType, const WebRtc_UWord32 timeStamp, const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize, const RTPFragmentationHeader* fragmentation, const RTPVideoHeader* rtpVideoHdr));
+  MOCK_METHOD1(RegisterIncomingRTCPCallback,
+      WebRtc_Word32(RtcpFeedback* incomingMessagesCallback));
+  MOCK_CONST_METHOD0(RTCP,
+      RTCPMethod());
+  MOCK_METHOD1(SetRTCPStatus,
+      WebRtc_Word32(const RTCPMethod method));
+  MOCK_METHOD1(SetCNAME,
+      WebRtc_Word32(const char cName[RTCP_CNAME_SIZE]));
+  MOCK_METHOD1(CNAME,
+      WebRtc_Word32(char cName[RTCP_CNAME_SIZE]));
+  MOCK_CONST_METHOD2(RemoteCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC,
+                    char cName[RTCP_CNAME_SIZE]));
+  MOCK_CONST_METHOD4(RemoteNTP,
+      WebRtc_Word32(WebRtc_UWord32 *ReceivedNTPsecs, WebRtc_UWord32 *ReceivedNTPfrac, WebRtc_UWord32 *RTCPArrivalTimeSecs, WebRtc_UWord32 *RTCPArrivalTimeFrac));
+  MOCK_METHOD2(AddMixedCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC, const WebRtc_Word8 cName[RTCP_CNAME_SIZE]));
+  MOCK_METHOD1(RemoveMixedCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC));
+  MOCK_CONST_METHOD5(RTT,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC, WebRtc_UWord16* RTT, WebRtc_UWord16* avgRTT, WebRtc_UWord16* minRTT, WebRtc_UWord16* maxRTT));
+  MOCK_METHOD1(ResetRTT,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC));
+  MOCK_METHOD1(SendRTCP,
+      WebRtc_Word32(WebRtc_UWord32 rtcpPacketType));
+  MOCK_METHOD1(SendRTCPReferencePictureSelection,
+      WebRtc_Word32(const WebRtc_UWord64 pictureID));
+  MOCK_METHOD1(SendRTCPSliceLossIndication,
+      WebRtc_Word32(const WebRtc_UWord8 pictureID));
+  MOCK_METHOD0(ResetStatisticsRTP,
+      WebRtc_Word32());
+  MOCK_CONST_METHOD5(StatisticsRTP,
+      WebRtc_Word32(WebRtc_UWord8 *fraction_lost, WebRtc_UWord32 *cum_lost, WebRtc_UWord32 *ext_max, WebRtc_UWord32 *jitter, WebRtc_UWord32 *max_jitter));
+  MOCK_METHOD0(ResetReceiveDataCountersRTP,
+      WebRtc_Word32());
+  MOCK_METHOD0(ResetSendDataCountersRTP,
+      WebRtc_Word32());
+  MOCK_CONST_METHOD4(DataCountersRTP,
+      WebRtc_Word32(WebRtc_UWord32 *bytesSent, WebRtc_UWord32 *packetsSent, WebRtc_UWord32 *bytesReceived, WebRtc_UWord32 *packetsReceived));
+  MOCK_METHOD1(RemoteRTCPStat,
+      WebRtc_Word32(RTCPSenderInfo* senderInfo));
+  MOCK_CONST_METHOD1(RemoteRTCPStat,
+      WebRtc_Word32(std::vector<RTCPReportBlock>* receiveBlocks));
+  MOCK_METHOD2(AddRTCPReportBlock,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC, const RTCPReportBlock* receiveBlock));
+  MOCK_METHOD1(RemoveRTCPReportBlock,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC));
+  MOCK_METHOD4(SetRTCPApplicationSpecificData,
+      WebRtc_Word32(const WebRtc_UWord8 subType, const WebRtc_UWord32 name, const WebRtc_UWord8* data, const WebRtc_UWord16 length));
+  MOCK_METHOD1(SetRTCPVoIPMetrics,
+      WebRtc_Word32(const RTCPVoIPMetric* VoIPMetric));
+  MOCK_CONST_METHOD0(REMB,
+      bool());
+  MOCK_METHOD1(SetREMBStatus,
+      WebRtc_Word32(const bool enable));
+  MOCK_METHOD3(SetREMBData,
+      WebRtc_Word32(const WebRtc_UWord32 bitrate, const WebRtc_UWord8 numberOfSSRC, const WebRtc_UWord32* SSRC));
+  MOCK_METHOD1(SetMaximumBitrateEstimate,
+       WebRtc_Word32(const WebRtc_UWord32 bitrate));
+  MOCK_METHOD1(SetRemoteBitrateObserver,
+      bool(RtpRemoteBitrateObserver*));
+  MOCK_CONST_METHOD0(IJ,
+      bool());
+  MOCK_METHOD1(SetIJStatus,
+      WebRtc_Word32(const bool));
+  MOCK_CONST_METHOD0(TMMBR,
+      bool());
+  MOCK_METHOD1(SetTMMBRStatus,
+      WebRtc_Word32(const bool enable));
+  MOCK_METHOD1(OnBandwidthEstimateUpdate,
+      void(WebRtc_UWord16 bandWidthKbit));
+  MOCK_CONST_METHOD0(NACK,
+      NACKMethod());
+  MOCK_METHOD1(SetNACKStatus,
+      WebRtc_Word32(const NACKMethod method));
+  MOCK_CONST_METHOD0(SelectiveRetransmissions,
+      int());
+  MOCK_METHOD1(SetSelectiveRetransmissions,
+      int(uint8_t settings));
+  MOCK_METHOD2(SendNACK,
+      WebRtc_Word32(const WebRtc_UWord16* nackList, const WebRtc_UWord16 size));
+  MOCK_METHOD2(SetStorePacketsStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord16 numberToStore));
+  MOCK_METHOD1(RegisterAudioCallback,
+      WebRtc_Word32(RtpAudioFeedback* messagesCallback));
+  MOCK_METHOD1(SetAudioPacketSize,
+      WebRtc_Word32(const WebRtc_UWord16 packetSizeSamples));
+  MOCK_METHOD3(SetTelephoneEventStatus,
+      WebRtc_Word32(const bool enable, const bool forwardToDecoder, const bool detectEndOfTone));
+  MOCK_CONST_METHOD0(TelephoneEvent,
+      bool());
+  MOCK_CONST_METHOD0(TelephoneEventForwardToDecoder,
+      bool());
+  MOCK_CONST_METHOD1(SendTelephoneEventActive,
+      bool(WebRtc_Word8& telephoneEvent));
+  MOCK_METHOD3(SendTelephoneEventOutband,
+      WebRtc_Word32(const WebRtc_UWord8 key, const WebRtc_UWord16 time_ms, const WebRtc_UWord8 level));
+  MOCK_METHOD1(SetSendREDPayloadType,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_CONST_METHOD1(SendREDPayloadType,
+      WebRtc_Word32(WebRtc_Word8& payloadType));
+  MOCK_METHOD2(SetRTPAudioLevelIndicationStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 ID));
+  MOCK_CONST_METHOD2(GetRTPAudioLevelIndicationStatus,
+      WebRtc_Word32(bool& enable, WebRtc_UWord8& ID));
+  MOCK_METHOD1(SetAudioLevel,
+      WebRtc_Word32(const WebRtc_UWord8 level_dBov));
+  MOCK_METHOD1(RegisterIncomingVideoCallback,
+      WebRtc_Word32(RtpVideoFeedback* incomingMessagesCallback));
+  MOCK_METHOD1(SetCameraDelay,
+      WebRtc_Word32(const WebRtc_Word32 delayMS));
+  MOCK_METHOD3(SetSendBitrate,
+      void(const WebRtc_UWord32 startBitrate, const WebRtc_UWord16 minBitrateKbit, const WebRtc_UWord16 maxBitrateKbit));
+  MOCK_METHOD3(SetGenericFECStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 payloadTypeRED, const WebRtc_UWord8 payloadTypeFEC));
+  MOCK_METHOD3(GenericFECStatus,
+      WebRtc_Word32(bool& enable, WebRtc_UWord8& payloadTypeRED, WebRtc_UWord8& payloadTypeFEC));
+  MOCK_METHOD2(SetFECCodeRate,
+      WebRtc_Word32(const WebRtc_UWord8 keyFrameCodeRate, const WebRtc_UWord8 deltaFrameCodeRate));
+  MOCK_METHOD2(SetFECUepProtection,
+      WebRtc_Word32(const bool keyUseUepProtection, const bool deltaUseUepProtection));
+  MOCK_METHOD1(SetKeyFrameRequestMethod,
+      WebRtc_Word32(const KeyFrameRequestMethod method));
+  MOCK_METHOD0(RequestKeyFrame,
+      WebRtc_Word32());
+
+  MOCK_CONST_METHOD3(Version,
+      int32_t(char* version, uint32_t& remaining_buffer_in_bytes, uint32_t& position));
+  MOCK_METHOD0(TimeUntilNextProcess,
+        int32_t());
+  MOCK_METHOD0(Process,
+        int32_t());
+
+  // Members.
+  unsigned int remote_ssrc_;
+};
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/Android.mk b/trunk/src/modules/rtp_rtcp/source/Android.mk
new file mode 100644
index 0000000..68295be
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/Android.mk
@@ -0,0 +1,67 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_rtp_rtcp
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_GENERATED_SOURCES :=
+LOCAL_SRC_FILES := \
+    bitrate.cc \
+    rtp_rtcp_impl.cc \
+    rtcp_receiver.cc \
+    rtcp_receiver_help.cc \
+    rtcp_sender.cc \
+    rtcp_utility.cc \
+    rtp_receiver.cc \
+    rtp_sender.cc \
+    rtp_utility.cc \
+    rtp_header_extension.cc \
+    ssrc_database.cc \
+    tmmbr_help.cc \
+    dtmf_queue.cc \
+    rtp_receiver_audio.cc \
+    rtp_sender_audio.cc \
+    bandwidth_management.cc \
+    forward_error_correction.cc \
+    forward_error_correction_internal.cc \
+    overuse_detector.cc \
+    remote_rate_control.cc \
+    rtp_packet_history.cc \
+    receiver_fec.cc \
+    rtp_receiver_video.cc \
+    rtp_sender_video.cc \
+    rtp_format_vp8.cc \
+    transmission_bucket.cc \
+    vp8_partition_aggregator.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/rtp_rtcp/source/Bitrate.h b/trunk/src/modules/rtp_rtcp/source/Bitrate.h
new file mode 100644
index 0000000..ab5637b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/Bitrate.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"     // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "common_types.h"            // Transport
+#include <stdio.h>
+#include <list>
+
+namespace webrtc {
+class RtpRtcpClock;
+
+class Bitrate
+{
+public:
+    Bitrate(RtpRtcpClock* clock);
+
+    // initialize members
+    void Init();
+
+    // calculate rates
+    void Process();
+
+    // update with a packet
+    void Update(const WebRtc_Word32 bytes);
+
+    // packet rate last second, updated roughly every 100 ms
+    WebRtc_UWord32 PacketRate() const;
+
+    // bitrate last second, updated roughly every 100 ms
+    WebRtc_UWord32 BitrateLast() const;
+
+    // bitrate last second, updated now
+    WebRtc_UWord32 BitrateNow() const;
+
+protected:
+    RtpRtcpClock&             _clock;
+
+private:
+    WebRtc_UWord32            _packetRate;
+    WebRtc_UWord32            _bitrate;
+    WebRtc_UWord8             _bitrateNextIdx;
+    WebRtc_UWord32            _packetRateArray[10];
+    WebRtc_UWord32            _bitrateArray[10];
+    WebRtc_UWord32            _bitrateDiffMS[10];
+    WebRtc_UWord32            _timeLastRateUpdate;
+    WebRtc_UWord32            _bytesCount;
+    WebRtc_UWord32            _packetCount;
+};
+
+struct DataTimeSizeTuple
+{
+    DataTimeSizeTuple(WebRtc_UWord32 sizeBytes, WebRtc_Word64 timeCompleteMs) :
+                            _sizeBytes(sizeBytes),
+                            _timeCompleteMs(timeCompleteMs) {}
+
+    WebRtc_UWord32    _sizeBytes;
+    WebRtc_Word64     _timeCompleteMs;
+};
+
+class BitRateStats
+{
+public:
+    BitRateStats();
+    ~BitRateStats();
+
+    void Init();
+    void Update(WebRtc_UWord32 packetSizeBytes, WebRtc_Word64 nowMs);
+    WebRtc_UWord32 BitRate(WebRtc_Word64 nowMs);
+
+private:
+    void EraseOld(WebRtc_Word64 nowMs);
+
+    std::list<DataTimeSizeTuple*> _dataSamples;
+    WebRtc_UWord32                _accumulatedBytes;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc
new file mode 100644
index 0000000..05b7e2f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc
@@ -0,0 +1,580 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bitstream_builder.h"
+
+#include <string.h>
+
+namespace webrtc {
+BitstreamBuilder::BitstreamBuilder(WebRtc_UWord8* data, const WebRtc_UWord32 dataSize) :
+    _data(data),
+    _dataSize(dataSize),
+    _byteOffset(0),
+    _bitOffset(0)
+{
+    memset(data, 0, dataSize);
+}
+
+WebRtc_UWord32
+BitstreamBuilder::Length() const
+{
+    return _byteOffset+ (_bitOffset?1:0);
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add1Bit(const WebRtc_UWord8 bit)
+{
+    // sanity
+    if(_bitOffset + 1 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bit);
+    return 0;
+}
+
+void
+BitstreamBuilder::Add1BitWithoutSanity(const WebRtc_UWord8 bit)
+{
+    if(bit & 0x1)
+    {
+        _data[_byteOffset] += (1 << (7-_bitOffset));
+    }
+
+    if(_bitOffset == 7)
+    {
+        // last bit in byte
+        _bitOffset = 0;
+        _byteOffset++;
+    } else
+    {
+        _bitOffset++;
+    }
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add2Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 2 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add3Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 3 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add4Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 4 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add5Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 5 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add6Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 6 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 5);
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add7Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 7 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 6);
+    Add1BitWithoutSanity(bits >> 5);
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add8Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_dataSize < Length()+1)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = bits;
+    } else
+    {
+        _data[_byteOffset] += (bits >> _bitOffset);
+        _data[_byteOffset+1] += (bits << (8-_bitOffset));
+    }
+    _byteOffset++;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add16Bits(const WebRtc_UWord16 bits)
+{
+    // sanity
+    if(_dataSize < Length()+2)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset] += (WebRtc_UWord8)(bits >> (_bitOffset + 8));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> _bitOffset);
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 2;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add24Bits(const WebRtc_UWord32 bits)
+{
+    // sanity
+    if(_dataSize < Length()+3)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = (WebRtc_UWord8)(bits >> 16);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+2] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset]   += (WebRtc_UWord8)(bits >> (_bitOffset+16));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> (_bitOffset+8));
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits >> (_bitOffset));
+        _data[_byteOffset+3] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 3;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add32Bits(const WebRtc_UWord32 bits)
+{
+    // sanity
+    if(_dataSize < Length()+4)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset]   = (WebRtc_UWord8)(bits >> 24);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits >> 16);
+        _data[_byteOffset+2] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+3] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset]   += (WebRtc_UWord8)(bits >> (_bitOffset+24));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> (_bitOffset+16));
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits >> (_bitOffset+8));
+        _data[_byteOffset+3] += (WebRtc_UWord8)(bits >> (_bitOffset));
+        _data[_byteOffset+4] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 4;
+    return 0;
+}
+
+// Exp-Golomb codes
+/*
+    with "prefix" and "suffix" bits and assignment to codeNum ranges (informative)
+    Bit string form Range of codeNum
+              1                0
+            0 1 x0             1..2      2bits-1
+          0 0 1 x1 x0          3..6      3bits-1
+        0 0 0 1 x2 x1 x0       7..14     4bits-1
+      0 0 0 0 1 x3 x2 x1 x0    15..30
+    0 0 0 0 0 1 x4 x3 x2 x1 x0 31..62
+*/
+WebRtc_Word32
+BitstreamBuilder::AddUE(const WebRtc_UWord32 value)
+{
+    // un-rolled on 8 bit base to avoid too deep if else chain
+    if(value < 0x0000ffff)
+    {
+        if(value < 0x000000ff)
+        {
+            if(value == 0)
+            {
+                if(AddPrefix(0) != 0)
+                {
+                    return -1;
+                }
+            } else if(value < 3)
+            {
+                if(AddPrefix(1) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(1, value-1);
+            } else if(value < 7)
+            {
+                if(AddPrefix(2) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(2, value-3);
+            } else if(value < 15)
+            {
+                if(AddPrefix(3) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(3, value-7);
+            } else if(value < 31)
+            {
+                if(AddPrefix(4) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(4, value-15);
+            } else if(value < 63)
+            {
+                if(AddPrefix(5) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(5, value-31);
+            } else if(value < 127)
+            {
+                if(AddPrefix(6) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(6, value-63);
+            } else
+            {
+                if(AddPrefix(7) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(7, value-127);
+            }
+        }else
+        {
+            if(value < 0x000001ff)
+            {
+                if(AddPrefix(8) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(8, value-0x000000ff);
+            } else if(value < 0x000003ff)
+            {
+                if(AddPrefix(9) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(9, value-0x000001ff);
+            } else if(value < 0x000007ff)
+            {
+                if(AddPrefix(10) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(10, value-0x000003ff);
+            } else if(value < 0x00000fff)
+            {
+                if(AddPrefix(11) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(1, value-0x000007ff);
+            } else if(value < 0x00001fff)
+            {
+                if(AddPrefix(12) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(12, value-0x00000fff);
+            } else if(value < 0x00003fff)
+            {
+                if(AddPrefix(13) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(13, value-0x00001fff);
+            } else if(value < 0x00007fff)
+            {
+                if(AddPrefix(14) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(14, value-0x00003fff);
+            } else
+            {
+                if(AddPrefix(15) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(15, value-0x00007fff);
+            }
+        }
+    }else
+    {
+        if(value < 0x00ffffff)
+        {
+            if(value < 0x0001ffff)
+            {
+                if(AddPrefix(16) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(16, value-0x0000ffff);
+            } else if(value < 0x0003ffff)
+            {
+                if(AddPrefix(17) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(17, value-0x0001ffff);
+            } else if(value < 0x0007ffff)
+            {
+                if(AddPrefix(18) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(18, value-0x0003ffff);
+            } else if(value < 0x000fffff)
+            {
+                if(AddPrefix(19) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(19, value-0x0007ffff);
+            } else if(value < 0x001fffff)
+            {
+                if(AddPrefix(20) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(20, value-0x000fffff);
+            } else if(value < 0x003fffff)
+            {
+                if(AddPrefix(21) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(21, value-0x001fffff);
+            } else if(value < 0x007fffff)
+            {
+                if(AddPrefix(22) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(22, value-0x003fffff);
+            } else
+            {
+                if(AddPrefix(23) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(23, value-0x007fffff);
+            }
+        } else
+        {
+            if(value < 0x01ffffff)
+            {
+                if(AddPrefix(24) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(24, value-0x00ffffff);
+            } else if(value < 0x03ffffff)
+            {
+                if(AddPrefix(25) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(25, value-0x01ffffff);
+            } else if(value < 0x07ffffff)
+            {
+                if(AddPrefix(26) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(26, value-0x03ffffff);
+            } else if(value < 0x0fffffff)
+            {
+                if(AddPrefix(27) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(27, value-0x07ffffff);
+            } else if(value < 0x1fffffff)
+            {
+                if(AddPrefix(28) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(28, value-0x0fffffff);
+            } else if(value < 0x3fffffff)
+            {
+                if(AddPrefix(29) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(29, value-0x1fffffff);
+            } else if(value < 0x7fffffff)
+            {
+                if(AddPrefix(30) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(30, value-0x3fffffff);
+            } else if(value < 0xffffffff)
+            {
+                if(AddPrefix(31) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(31, value-0x7ffffff);
+            } else
+            {
+                if(AddPrefix(32) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(32, 0); // exactly 0xffffffff
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::AddPrefix(const WebRtc_UWord8 numZeros)
+{
+    // sanity for the sufix too
+    WebRtc_UWord32 numBitsToAdd = numZeros * 2 + 1;
+    if(((_dataSize - _byteOffset) *8 + 8-_bitOffset) < numBitsToAdd)
+    {
+        return -1;
+    }
+
+    // add numZeros
+    for (WebRtc_UWord32 i = 0; i < numZeros; i++)
+    {
+        Add1Bit(0);
+    }
+    Add1Bit(1);
+    return 0;
+}
+
+void
+BitstreamBuilder::AddSuffix(const WebRtc_UWord8 numBits, const WebRtc_UWord32 rest)
+{
+    // most significant bit first
+    for(WebRtc_Word32 i = numBits - 1; i >= 0; i--)
+    {
+        if(( rest >> i) & 0x1)
+        {
+            Add1Bit(1);
+        }else
+        {
+            Add1Bit(0);
+        }
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.h b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.h
new file mode 100644
index 0000000..c88ef8f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_builder.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class BitstreamBuilder
+{
+public:
+    BitstreamBuilder(WebRtc_UWord8* data, const WebRtc_UWord32 dataSize);
+
+    WebRtc_UWord32 Length() const;
+
+    WebRtc_Word32 Add1Bit(const WebRtc_UWord8 bit);
+    WebRtc_Word32 Add2Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add3Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add4Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add5Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add6Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add7Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add8Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add16Bits(const WebRtc_UWord16 bits);
+    WebRtc_Word32 Add24Bits(const WebRtc_UWord32 bits);
+    WebRtc_Word32 Add32Bits(const WebRtc_UWord32 bits);
+
+    // Exp-Golomb codes
+    WebRtc_Word32 AddUE(const WebRtc_UWord32 value);
+
+private:
+    WebRtc_Word32 AddPrefix(const WebRtc_UWord8 numZeros);
+    void AddSuffix(const WebRtc_UWord8 numBits, const WebRtc_UWord32 rest);
+    void Add1BitWithoutSanity(const WebRtc_UWord8 bit);
+
+    WebRtc_UWord8* _data;
+    WebRtc_UWord32 _dataSize;
+
+    WebRtc_UWord32 _byteOffset;
+    WebRtc_UWord8  _bitOffset;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc
new file mode 100644
index 0000000..79ec967
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bitstream_parser.h"
+
+namespace webrtc {
+BitstreamParser::BitstreamParser(const WebRtc_UWord8* data, const WebRtc_UWord32 dataLength) :
+    _data(data),
+    _dataLength(dataLength),
+    _byteOffset(0),
+    _bitOffset(0)
+{
+}
+    // todo should we have any error codes from this?
+
+WebRtc_UWord8
+BitstreamParser::Get1Bit()
+{
+    WebRtc_UWord8 retVal = 0x1 & (_data[_byteOffset] >> (7-_bitOffset++));
+
+    // prepare next byte
+    if(_bitOffset == 8)
+    {
+        _bitOffset = 0;
+        _byteOffset++;
+    }
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get2Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get3Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get4Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get5Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get6Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 5);
+    retVal += (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get7Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 6);
+    retVal += (Get1Bit() << 5);
+    retVal += (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get8Bits()
+{
+    WebRtc_UWord16 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 16 bits
+        retVal = (_data[_byteOffset] << 8)+ (_data[_byteOffset+1]) ;
+        retVal = retVal >> (8-_bitOffset);
+    } else
+    {
+        retVal = _data[_byteOffset];
+    }
+    _byteOffset++;
+    return (WebRtc_UWord8)retVal;
+}
+
+WebRtc_UWord16
+BitstreamParser::Get16Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 24 bits
+        retVal = (_data[_byteOffset] << 16) + (_data[_byteOffset+1] << 8) + (_data[_byteOffset+2]);
+        retVal = retVal >> (8-_bitOffset);
+    }else
+    {
+        // read 16 bits
+        retVal = (_data[_byteOffset] << 8) + (_data[_byteOffset+1]) ;
+    }
+    _byteOffset += 2;
+    return (WebRtc_UWord16)retVal;
+}
+
+WebRtc_UWord32
+BitstreamParser::Get24Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 32 bits
+        retVal = (_data[_byteOffset] << 24) + (_data[_byteOffset+1] << 16) + (_data[_byteOffset+2] << 8) + (_data[_byteOffset+3]);
+        retVal = retVal >> (8-_bitOffset);
+    }else
+    {
+        // read 24 bits
+        retVal = (_data[_byteOffset] << 16) + (_data[_byteOffset+1] << 8) + (_data[_byteOffset+2]) ;
+    }
+    _byteOffset += 3;
+    return retVal & 0x00ffffff; // we need to clean up the high 8 bits
+}
+
+WebRtc_UWord32
+BitstreamParser::Get32Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 40 bits
+        WebRtc_UWord64 tempVal = _data[_byteOffset];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+1];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+2];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+3];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+4];
+        tempVal >>= (8-_bitOffset);
+
+        retVal = WebRtc_UWord32(tempVal);
+    }else
+    {
+        // read 32  bits
+        retVal = (_data[_byteOffset]<< 24) + (_data[_byteOffset+1] << 16) + (_data[_byteOffset+2] << 8) + (_data[_byteOffset+3]) ;
+    }
+    _byteOffset += 4;
+    return retVal;
+}
+
+// Exp-Golomb codes
+/*
+    with "prefix" and "suffix" bits and assignment to codeNum ranges (informative)
+    Bit string form Range of codeNum
+              1                0
+            0 1 x0             1..2
+          0 0 1 x1 x0          3..6
+        0 0 0 1 x2 x1 x0       7..14
+      0 0 0 0 1 x3 x2 x1 x0    15..30
+    0 0 0 0 0 1 x4 x3 x2 x1 x0 31..62
+*/
+
+WebRtc_UWord32
+BitstreamParser::GetUE()
+{
+    WebRtc_UWord32 retVal = 0;
+    WebRtc_UWord8 numLeadingZeros = 0;
+
+    while (Get1Bit() != 1)
+    {
+        numLeadingZeros++;
+    }
+    // prefix
+    retVal = (1 << numLeadingZeros) - 1;
+
+    // suffix
+    while (numLeadingZeros)
+    {
+        retVal += (Get1Bit() << --numLeadingZeros);
+    }
+    return retVal;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.h b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.h
new file mode 100644
index 0000000..3d8f9ef
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/bitstream_parser.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class BitstreamParser
+{
+public:
+    BitstreamParser(const WebRtc_UWord8* data, const WebRtc_UWord32 dataLength);
+
+    WebRtc_UWord8 Get1Bit();
+    WebRtc_UWord8 Get2Bits();
+    WebRtc_UWord8 Get3Bits();
+    WebRtc_UWord8 Get4Bits();
+    WebRtc_UWord8 Get5Bits();
+    WebRtc_UWord8 Get6Bits();
+    WebRtc_UWord8 Get7Bits();
+    WebRtc_UWord8 Get8Bits();
+    WebRtc_UWord16 Get16Bits();
+    WebRtc_UWord32 Get24Bits();
+    WebRtc_UWord32 Get32Bits();
+
+    // Exp-Golomb codes
+    WebRtc_UWord32 GetUE();
+
+private:
+    const WebRtc_UWord8* _data;
+    const WebRtc_UWord32 _dataLength;
+
+    WebRtc_UWord32 _byteOffset;
+    WebRtc_UWord8  _bitOffset;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/h264_information.cc b/trunk/src/modules/rtp_rtcp/source/H264/h264_information.cc
new file mode 100644
index 0000000..cf6b549
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/h264_information.cc
@@ -0,0 +1,818 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include "h264_information.h"
+
+//#define DEBUG_SEI_MESSAGE 1
+
+#ifdef DEBUG_SEI_MESSAGE
+    #include "bitstream_parser.h"
+    #include <stdio.h>
+    #include <math.h>
+
+    WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+    {
+        return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+    }
+
+#endif
+
+namespace webrtc {
+H264Information::H264Information(const bool SVC)
+    : _SVC(SVC)
+
+{
+}
+
+H264Information::~H264Information()
+{
+
+}
+
+void
+H264Information::Reset()
+{
+    _parsedLength = 0;
+    _remLength = 0;
+    _length = 0;
+    _info.numNALUs = 0;
+    _info.numLayers = 0;
+
+    memset(_info.startCodeSize, 0, sizeof(_info.startCodeSize));
+    memset(_info.payloadSize, 0, sizeof(_info.payloadSize));
+    memset(_info.NRI, 0, sizeof(_info.NRI));
+    memset(_info.type, 0, sizeof(_info.type));
+    memset(_info.accLayerSize, 0, sizeof(_info.accLayerSize));
+
+    for (WebRtc_Word32 i = 0; i < KMaxNumberOfNALUs; i++)
+    {
+        _info.SVCheader[i].idr =            0;
+        _info.SVCheader[i].priorityID =     0;
+        _info.SVCheader[i].interLayerPred = 0;
+        _info.SVCheader[i].dependencyID =   0;
+        _info.SVCheader[i].qualityID =      0;
+        _info.SVCheader[i].temporalID =     0;
+        _info.SVCheader[i].useRefBasePic =  0;
+        _info.SVCheader[i].discardable =    0;
+        _info.SVCheader[i].output =         0;
+
+        _info.PACSI[i].X = 0;
+        _info.PACSI[i].Y = 0;
+//      _info.PACSI[i].T = 0;
+        _info.PACSI[i].A = 0;
+        _info.PACSI[i].P = 0;
+        _info.PACSI[i].C = 0;
+        _info.PACSI[i].S = 0;
+        _info.PACSI[i].E = 0;
+        _info.PACSI[i].TL0picIDx =   0;
+        _info.PACSI[i].IDRpicID =    0;
+        _info.PACSI[i].DONC =        0;
+        _info.PACSI[i].numSEINALUs = 0;
+        _info.PACSI[i].NALlength =   5;
+    }
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 GetInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+ *             const WebRtc_UWord32 length,
+ *             const H264Info*& ptrInfo);
+ *
+ * Gets information from an encoded stream.
+ *
+ * Input:
+ *          - ptrEncodedBuffer  : Pointer to encoded stream.
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Output:
+ *          - ptrInfo           : Pointer to struct with H.264 info.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::GetInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+                             const WebRtc_UWord32 length,
+                             const H264Info*& ptrInfo)
+{
+    if (!ptrEncodedBuffer || length < 4)
+    {
+        return -1;
+    }
+
+    if (!HasInfo(length))
+    {
+        if (-1 == FindInfo(ptrEncodedBuffer, length))
+        {
+            Reset();
+            return -1;
+        }
+    }
+    ptrInfo = &_info;
+    return 0;
+}
+
+RtpVideoCodecTypes
+H264Information::Type()
+{
+    if(_SVC)
+    {
+        return RTP_H264_SVCVideo;
+    }
+    return RTP_H264Video;
+}
+
+
+/*******************************************************************************
+ * bool HasInfo(const WebRtc_UWord32 length);
+ *
+ * Checks if information has already been stored for this encoded stream.
+ *
+ * Input:
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Return value:
+ *          - true (false)      : Information has (not) been stored.
+ */
+
+bool
+H264Information::HasInfo(const WebRtc_UWord32 length)
+{
+    if (!_info.numNALUs)
+    {
+        return false;
+    }
+
+    // has info, make sure current length matches info length
+    if (length != _length)
+    {
+        Reset();
+        return false;
+    }
+
+    return true;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+ *              const WebRtc_UWord32 length);
+ *
+ * Parses the encoded stream.
+ *
+ * Input:
+ *          - ptrEncodedBuffer  : Pointer to encoded stream.
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length)
+{
+    _ptrData = ptrEncodedBuffer;
+    _length = length;
+    _parsedLength = 0;
+    _remLength = length;
+
+    do
+    {
+        // Get start code length
+        if (FindNALUStartCodeSize() == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Get NAL unit payload size
+        WebRtc_Word32 foundLast = FindNALU();
+        if (foundLast == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Validate parsed length
+        if (_parsedLength > _length)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Get NRI
+        GetNRI();
+
+        // Get type
+        if (FindNALUType() == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Set layer start end bit
+        SetLayerSEBit(foundLast);
+
+
+        // Last NAL unit found?
+        if (foundLast == 1)
+        {
+            if (_parsedLength != _length)
+            {
+                Reset();
+                return -1;
+            }
+            _info.numNALUs++;
+            return SetLayerLengths();
+        }
+
+        // Next NAL unit
+        _ptrData   += (_info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs]);
+        _remLength -= (_info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs]);
+        _info.numNALUs++;
+
+        // Validate memory allocation
+        if (_info.numNALUs >= KMaxNumberOfNALUs)
+        {
+            Reset();
+            return -1;
+        }
+    }
+    while(true);
+
+    return 0;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALUStartCodeSize();
+ *
+ * Finds the start code length of the current NAL unit.
+ *
+ * Output:
+ *          - _info.startCodeSize[currentNALU]  : Start code length in bytes of NAL unit.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindNALUStartCodeSize()
+{
+    // NAL unit start code. Ex. {0,0,1} or {0,0,0,1}
+    for (WebRtc_UWord32 i = 2; i < _remLength; i++)
+    {
+        if (_ptrData[i] == 1 && _ptrData[i - 1] == 0 && _ptrData[i - 2] == 0)
+        {
+            _info.startCodeSize[_info.numNALUs] = WebRtc_UWord8(i + 1);
+            return 0;
+        }
+    }
+    return -1;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALU();
+ *
+ * Finds the length of the current NAL unit.
+ *
+ * Output:
+ *          - _info.payloadSize[currentNALU]  : Payload length in bytes of NAL unit
+ *                                              (start code length not included).
+ *          - _parsedLength                   : Current parsed length in bytes.
+ *
+ * Return value:
+ *          - 1                 : ok. Last NAL unit found.
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindNALU()
+{
+    for (WebRtc_UWord32 i = _info.startCodeSize[_info.numNALUs]; i < _remLength - 2; i += 2)
+    {
+        if (_ptrData[i] == 0)
+        {
+            WebRtc_Word32 size = 0;
+            if ((_ptrData[i + 1] == 1 && _ptrData[i - 1] == 0) ||
+                (_ptrData[i + 2] == 1 && _ptrData[i + 1] == 0))
+            {
+                // Found a header
+                // Reduce size by preceding zeroes
+                while (_ptrData[i - 1] == 0)
+                {
+                    i--;
+                }
+                size = i;
+            }
+            if (size > 0)
+            {
+                _info.payloadSize[_info.numNALUs] = size - _info.startCodeSize[_info.numNALUs];
+                _parsedLength += _info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs];
+                return 0;
+            }
+        }
+    }
+    // Last NAL unit
+    _info.payloadSize[_info.numNALUs] = _remLength - _info.startCodeSize[_info.numNALUs];
+    if (_info.payloadSize[_info.numNALUs] > 0)
+    {
+        _parsedLength += _info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs];
+        return 1;
+    }
+    return -1;
+}
+
+/*******************************************************************************
+ * void GetNRI();
+ *
+ * Finds the NRI of the current NAL unit.
+ *
+ * Output:
+ *          - _info.NRI[currentNALU]   : NRI of NAL unit.
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ */
+void
+H264Information::GetNRI()
+{
+    //  NAL unit header (1 byte)
+    //  ---------------------------------
+    // |   start code    |F|NRI|  Type   |
+    //  ---------------------------------
+
+    // NRI (2 bits) - nal_ref_idc. '00' - the NAL unit is not used to reconstruct reference pictures.
+    //                             >00  - the NAL unit is required to reconstruct reference pictures
+    //                                    in the same layer, or contains a parameter set.
+
+
+    const WebRtc_UWord8 type = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x1f;
+
+    // NALU type of 5, 7 and 8 shoud have NRI to b011
+    if( type == 5 ||
+        type == 7 ||
+        type == 8)
+    {
+        _info.NRI[_info.numNALUs] = 0x60;
+    }else
+    {
+        _info.NRI[_info.numNALUs] = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x60;
+    }
+}
+
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALUType();
+ *
+ * Finds the type of the current NAL unit.
+ *
+ * Output:
+ *          - _info.type[currentNALU]  : Type of NAL unit
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ */
+WebRtc_Word32
+H264Information::FindNALUType()
+{
+    //  NAL unit header (1 byte)
+    //  ---------------------------------
+    // |   start code    |F|NRI|  Type   |
+    //  ---------------------------------
+
+    _info.type[_info.numNALUs] = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x1f;
+
+    if (_info.type[_info.numNALUs] == 0)
+    {
+        return -1;
+    }
+
+    // SVC NAL units, extended header
+    if (ParseSVCNALUHeader() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 ParseSVCNALUHeader();
+ *
+ * Finds the extended header of the current NAL unit. Included for NAL unit types 14 and 20.
+ *
+ * Output:
+ *          - _info.SVCheader[currentNALU]  : SVC header of NAL unit.
+ *
+ * Return value:
+ *          - 0                             : ok
+ *          - (-1)                          : Error
+ */
+WebRtc_Word32
+H264Information::ParseSVCNALUHeader()
+{
+    if (_info.type[_info.numNALUs] == 5)
+    {
+        _info.SVCheader[_info.numNALUs].idr = 1;
+    }
+    if (_info.type[_info.numNALUs] == 6)
+    {
+        WebRtc_UWord32 seiPayloadSize;
+        do
+        {
+            // SEI message
+            seiPayloadSize = 0;
+
+            WebRtc_UWord32 curByte = _info.startCodeSize[_info.numNALUs] + 1;
+            const WebRtc_UWord32 seiStartOffset = curByte;
+
+            WebRtc_UWord32 seiPayloadType = 0;
+            while(_ptrData[curByte] == 0xff)
+            {
+                seiPayloadType += 255;
+                curByte++;
+            }
+            seiPayloadType += _ptrData[curByte++];
+
+            while(_ptrData[curByte] == 0xff)
+            {
+                seiPayloadSize += 255;
+                curByte++;
+            }
+            seiPayloadSize += _ptrData[curByte++];
+
+            if(_info.payloadSize[_info.numNALUs] < _info.startCodeSize[_info.numNALUs] + seiPayloadSize)
+            {
+                // sanity of remaining buffer
+                // return 0 since no one "need" SEI messages
+                assert(false);
+               return 0;
+            }
+
+            if(seiPayloadType == 24)
+            {
+                // we add this to NALU 0 to be signaled in the first PACSI packet
+                _info.PACSI[0].numSEINALUs = 1; // we allways add this to NALU 0 to send it in the first packet
+                if(_info.PACSI[0].seiMessageLength[0] != seiPayloadSize)
+                {
+                    _info.PACSI[0].seiMessageLength[0] = seiPayloadSize;
+                    delete [] _info.PACSI[0].seiMessageData[0];
+                    _info.PACSI[0].seiMessageData[0] = new WebRtc_UWord8[seiPayloadSize];
+                }
+                memcpy(_info.PACSI[0].seiMessageData[0], _ptrData+seiStartOffset, seiPayloadSize);
+
+                _info.PACSI[0].NALlength += seiPayloadSize + 2; // additional 2 is the length
+
+#ifdef DEBUG_SEI_MESSAGE
+                const WebRtc_UWord8 numberOfLayers = 10;
+                WebRtc_UWord16 avgBitrate[numberOfLayers]= {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrateLayer[numberOfLayers]= {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrateLayerRepresentation[numberOfLayers] = {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrareCalcWindow[numberOfLayers] = {0,0,0,0,0,0,0,0,0,0};
+
+                BitstreamParser parserScalabilityInfo(_ptrData+curByte, seiPayloadSize);
+
+                parserScalabilityInfo.Get1Bit(); // not used in futher parsing
+                const WebRtc_UWord8 priority_layer_info_present = parserScalabilityInfo.Get1Bit();
+                const WebRtc_UWord8 priority_id_setting_flag = parserScalabilityInfo.Get1Bit();
+
+                WebRtc_UWord32 numberOfLayersMinusOne = parserScalabilityInfo.GetUE();
+                for(WebRtc_UWord32 j = 0; j<= numberOfLayersMinusOne; j++)
+                {
+                    printf("\nLayer ID:%d \n",parserScalabilityInfo.GetUE());
+                    printf("Priority ID:%d \n", parserScalabilityInfo.Get6Bits());
+                    printf("Discardable:%d \n", parserScalabilityInfo.Get1Bit());
+
+                    printf("Dependency ID:%d \n", parserScalabilityInfo.Get3Bits());
+                    printf("Quality ID:%d \n", parserScalabilityInfo.Get4Bits());
+                    printf("Temporal ID:%d \n", parserScalabilityInfo.Get3Bits());
+
+                    const WebRtc_UWord8 sub_pic_layer_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 sub_region_layer_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 iroi_division_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 profile_level_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 bitrate_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 frm_rate_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 frm_size_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 layer_dependency_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 parameter_sets_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 bitstream_restriction_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 exact_inter_layer_pred_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+                    if(sub_pic_layer_flag || iroi_division_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get1Bit();
+                    }
+                    const WebRtc_UWord8 layer_conversion_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 layer_output_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+                    if(profile_level_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get24Bits();
+                    }
+                    if(bitrate_info_present_flag)
+                    {
+                        // this is what we want
+                        avgBitrate[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrateLayer[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrateLayerRepresentation[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrareCalcWindow[j] = parserScalabilityInfo.Get16Bits();
+
+                        printf("\tAvg:%d\n", BitRateBPS(avgBitrate[j]));
+                        printf("\tmaxBitrate:%d\n", BitRateBPS(maxBitrateLayer[j]));
+                        printf("\tmaxBitrate rep:%d\n", BitRateBPS(maxBitrateLayerRepresentation[j]));
+                        printf("\tCalcWindow:%d\n", maxBitrareCalcWindow[j]);
+                    }
+                    if(frm_rate_info_present_flag)
+                    {
+                        printf("\tFrame rate constant:%d\n", parserScalabilityInfo.Get2Bits()); // 0 = not constant, 1 = constant, 2 = maybe...
+                        printf("\tFrame rate avg:%d\n", parserScalabilityInfo.Get16Bits()/256);
+                    }
+                    if(frm_size_info_present_flag || iroi_division_info_present_flag)
+                    {
+                        printf("\tFrame Width:%d\n",(parserScalabilityInfo.GetUE()+1)*16);
+                        printf("\tFrame Height:%d\n",(parserScalabilityInfo.GetUE()+1)*16);
+                    }
+                    if(sub_region_layer_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                        if(parserScalabilityInfo.Get1Bit())
+                        {
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                        }
+                    }
+                    if(sub_pic_layer_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(iroi_division_info_present_flag)
+                    {
+                        if(parserScalabilityInfo.Get1Bit())
+                        {
+                            parserScalabilityInfo.GetUE();
+                            parserScalabilityInfo.GetUE();
+                        }else
+                        {
+                            const WebRtc_UWord32 numRoisMinusOne = parserScalabilityInfo.GetUE();
+                            for(WebRtc_UWord32 k = 0; k <= numRoisMinusOne; k++)
+                            {
+                                parserScalabilityInfo.GetUE();
+                                parserScalabilityInfo.GetUE();
+                                parserScalabilityInfo.GetUE();
+                            }
+                        }
+                    }
+                    if(layer_dependency_info_present_flag)
+                    {
+                        const WebRtc_UWord32 numDirectlyDependentLayers = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k < numDirectlyDependentLayers; k++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                    } else
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(parameter_sets_info_present_flag)
+                    {
+                        const WebRtc_UWord32 numSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k <= numSeqParameterSetMinusOne; k++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                        const WebRtc_UWord32 numSubsetSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 l = 0; l <= numSubsetSeqParameterSetMinusOne; l++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                        const WebRtc_UWord32 numPicParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 m = 0; m <= numPicParameterSetMinusOne; m++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                    }else
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(bitstream_restriction_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get1Bit();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(layer_conversion_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k <2;k++)
+                        {
+                            if(parserScalabilityInfo.Get1Bit())
+                            {
+                                parserScalabilityInfo.Get24Bits();
+                                parserScalabilityInfo.Get16Bits();
+                                parserScalabilityInfo.Get16Bits();
+                            }
+                        }
+                    }
+                }
+                if(priority_layer_info_present)
+                {
+                    const WebRtc_UWord32 prNumDidMinusOne = parserScalabilityInfo.GetUE();
+                    for(WebRtc_UWord32 k = 0; k <= prNumDidMinusOne;k++)
+                    {
+                        parserScalabilityInfo.Get3Bits();
+                        const WebRtc_UWord32 prNumMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 l = 0; l <= prNumMinusOne; l++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                            parserScalabilityInfo.Get24Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                        }
+                    }
+                }
+                if(priority_id_setting_flag)
+                {
+                    WebRtc_UWord8 priorityIdSettingUri;
+                    WebRtc_UWord32 priorityIdSettingUriIdx = 0;
+                    do
+                    {
+                        priorityIdSettingUri = parserScalabilityInfo.Get8Bits();
+                    } while (priorityIdSettingUri != 0);
+                }
+#endif
+            } else
+            {
+                // not seiPayloadType 24 ignore
+            }
+            //check if we have more SEI in NALU
+        } while (_info.payloadSize[_info.numNALUs] > _info.startCodeSize[_info.numNALUs] + seiPayloadSize);
+    }
+
+   // Extended NAL unit header (3 bytes).
+   // +---------------+---------------+---------------+
+   // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+   // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   // |R|I|   PRID    |N| DID |  QID  | TID |U|D|O| RR|
+   // +---------------+---------------+---------------+
+
+   // R    - Reserved for future extensions (MUST be 1). Receivers SHOULD ignore the value of R.
+   // I    - Is layer representation an IDR layer (1) or not (0).
+   // PRID - Priority identifier for the NAL unit.
+   // N    - Specifies whether inter-layer prediction may be used for decoding the coded slice (1) or not (0).
+   // DID  - Indicates the inter-layer coding dependency level of a layer representation.
+   // QID  - Indicates the quality level of an MGS layer representation.
+   // TID  - Indicates the temporal level of a layer representation.
+   // U    - Use only reference base pictures during the inter prediction process (1) or not (0).
+   // D    - Discardable flag.
+   // O    - Output_flag. Affects the decoded picture output process as defined in Annex C of [H.264].
+   // RR   - Reserved_three_2bits (MUST be '11'). Receivers SHOULD ignore the value of RR.
+
+    if (_info.type[_info.numNALUs] == 14 ||
+        _info.type[_info.numNALUs] == 20)
+    {
+        WebRtc_UWord32 curByte = _info.startCodeSize[_info.numNALUs] + 1;
+
+        if (_remLength < curByte + 3)
+        {
+                return -1;
+        }
+
+        _info.SVCheader[_info.numNALUs].idr        = (_ptrData[curByte] >> 6) & 0x01;
+        _info.SVCheader[_info.numNALUs].priorityID = (_ptrData[curByte++] & 0x3F);
+
+        _info.SVCheader[_info.numNALUs].interLayerPred = (_ptrData[curByte] >> 7) & 0x01;
+        _info.SVCheader[_info.numNALUs].dependencyID   = (_ptrData[curByte] >> 4) & 0x07;
+        _info.SVCheader[_info.numNALUs].qualityID      = (_ptrData[curByte++] & 0x0F);
+
+        _info.SVCheader[_info.numNALUs].temporalID     = (_ptrData[curByte] >> 5) & 0x07;
+        _info.SVCheader[_info.numNALUs].useRefBasePic  = (_ptrData[curByte] >> 4) & 0x01;
+        _info.SVCheader[_info.numNALUs].discardable    = (_ptrData[curByte] >> 3) & 0x01;
+        _info.SVCheader[_info.numNALUs].output         = (_ptrData[curByte] >> 2) & 0x01;
+
+        if (_info.type[_info.numNALUs] == 14)
+        {
+            // inform the next NALU
+            memcpy(&(_info.SVCheader[_info.numNALUs+1]), &(_info.SVCheader[_info.numNALUs]), sizeof(_H264_SVC_NALUHeader));
+        }
+    }
+   return 0;
+}
+
+
+/*******************************************************************************
+ * void SetLayerSEBit();
+ *
+ * Sets start and end bits for the current NAL unit.
+ *
+ * Output:
+ *          - _info.PACSI[currentNALU].S    : First NAL unit in a layer (S = 1).
+ *          - _info.PACSI[currentNALU].E    : Last NAL unit in a layer (E = 1).
+ *
+ */
+void
+H264Information::SetLayerSEBit(WebRtc_Word32 foundLast)
+{
+    if (_info.numNALUs == 0)
+    {
+        // First NAL unit
+        _info.PACSI[_info.numNALUs].S = 1;
+    }
+
+    if (_info.numNALUs > 0)
+    {
+        if (_info.type[_info.numNALUs] != _info.type[_info.numNALUs-1] &&
+           (_info.type[_info.numNALUs] == 20))
+        {
+            // First layer in scalable extension
+            _info.PACSI[_info.numNALUs].S   = 1;
+            _info.PACSI[_info.numNALUs-1].E = 1;
+        }
+
+        if (_info.type[_info.numNALUs] == 20 && _info.type[_info.numNALUs-1] == 20)
+        {
+            if (_info.SVCheader[_info.numNALUs].temporalID   != _info.SVCheader[_info.numNALUs-1].temporalID ||
+                _info.SVCheader[_info.numNALUs].dependencyID != _info.SVCheader[_info.numNALUs-1].dependencyID ||
+                _info.SVCheader[_info.numNALUs].qualityID    != _info.SVCheader[_info.numNALUs-1].qualityID)
+            {
+                // New layer in scalable extension
+                _info.PACSI[_info.numNALUs].S   = 1;
+                _info.PACSI[_info.numNALUs-1].E = 1;
+            }
+        }
+    }
+
+    if (foundLast)
+    {
+        // Last NAL unit
+        _info.PACSI[_info.numNALUs].E = 1;
+    }
+
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 SetLayerLengths();
+ *
+ * Sets the accumulated layer length.
+ *
+ * Output:
+ *          - _info.accLayerSize[currentLayer]   : Size in bytes of layer: 0 - currentLayer.
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ *
+ */
+WebRtc_Word32
+H264Information::SetLayerLengths()
+{
+    for (WebRtc_UWord32 curNALU = 0; curNALU < _info.numNALUs; curNALU++)
+    {
+        _info.accLayerSize[_info.numLayers] += _info.startCodeSize[curNALU] + _info.payloadSize[curNALU];
+
+        if (_info.PACSI[curNALU].E == 1)
+        {
+            _info.numLayers++;
+            if (curNALU == WebRtc_UWord32(_info.numNALUs - 1))
+            {
+                break;
+            }
+            if (_info.numLayers >= KMaxNumberOfLayers)
+            {
+                Reset();
+                return -1;
+            }
+            _info.accLayerSize[_info.numLayers] += _info.accLayerSize[_info.numLayers - 1];
+        }
+    }
+
+    if (_info.numLayers < 1 && _info.numLayers > KMaxNumberOfLayers)
+    {
+        Reset();
+        return -1;
+    }
+
+    if (_info.accLayerSize[_info.numLayers - 1] != WebRtc_Word32(_length))
+    {
+        Reset();
+        return -1;
+    }
+
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/h264_information.h b/trunk/src/modules/rtp_rtcp/source/H264/h264_information.h
new file mode 100644
index 0000000..c7f5214
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/h264_information.h
@@ -0,0 +1,170 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
+
+#include "VideoCodecInformation.h"
+#include "typedefs.h"
+
+namespace webrtc {
+enum
+{
+    KMaxNumberOfNALUs = 128,
+    KMaxNumberOfSEINALUs = 2,
+    KMaxNumberOfLayers = 16
+};
+
+struct H264_SVC_NALUHeader
+{
+    H264_SVC_NALUHeader()
+    :
+    r(1),
+    idr(0),
+    priorityID(0),
+    interLayerPred(0),
+    dependencyID(0),
+    qualityID(0),
+    temporalID(0),
+    useRefBasePic(0),
+    discardable(0),
+    output(0),
+    rr(3),
+    length(3)
+    {
+    }
+    const WebRtc_UWord8 r;
+    WebRtc_UWord8       idr;
+    WebRtc_UWord8       priorityID;
+    WebRtc_UWord8       interLayerPred;
+    WebRtc_UWord8       dependencyID;
+    WebRtc_UWord8       qualityID;
+    WebRtc_UWord8       temporalID;
+    WebRtc_UWord8       useRefBasePic;
+    WebRtc_UWord8       discardable;
+    WebRtc_UWord8       output;
+    const WebRtc_UWord8 rr;
+    const WebRtc_UWord8 length;
+};
+
+class H264_PACSI_NALU
+{
+public:
+    H264_PACSI_NALU() :
+        NALlength(5),
+        type(30),
+        X(0),
+        Y(0),
+//        T(0),
+        A(0),
+        P(0),
+        C(0),
+        S(0),
+        E(0),
+        TL0picIDx(0),
+        IDRpicID(0),
+        DONC(0),
+        numSEINALUs(0)
+    {
+        memset(seiMessageLength, 0, sizeof(seiMessageLength));
+        memset(seiMessageData, 0, sizeof(seiMessageData));
+    }
+    ~H264_PACSI_NALU()
+    {
+        for(int i = 0; i<KMaxNumberOfSEINALUs; i++)
+        {
+            if(seiMessageData[i])
+            {
+                delete [] seiMessageData[i];
+            }
+        }
+    }
+
+    WebRtc_UWord32        NALlength;
+    const WebRtc_UWord8   type;
+    WebRtc_UWord8         X;
+    WebRtc_UWord8         Y;
+//  WebRtc_UWord8         T;
+    WebRtc_UWord8         A;
+    WebRtc_UWord8         P;
+    WebRtc_UWord8         C;
+    WebRtc_UWord8         S;
+    WebRtc_UWord8         E;
+    WebRtc_UWord8         TL0picIDx;
+    WebRtc_UWord16        IDRpicID;
+    WebRtc_UWord16        DONC;
+    WebRtc_UWord32        numSEINALUs;
+    WebRtc_UWord32        seiMessageLength[KMaxNumberOfSEINALUs]; // we allow KMaxNumberOfSEINALUs SEI messages
+    WebRtc_UWord8*        seiMessageData[KMaxNumberOfSEINALUs];
+};
+
+struct H264Info
+{
+    H264Info()
+        :
+        numNALUs(0),
+        numLayers(0)
+        {
+            memset(startCodeSize, 0, sizeof(startCodeSize));
+            memset(payloadSize, 0, sizeof(payloadSize));
+            memset(NRI, 0, sizeof(NRI));
+            memset(type, 0, sizeof(type));
+            memset(accLayerSize, 0, sizeof(accLayerSize));
+        }
+    WebRtc_UWord16             numNALUs;
+    WebRtc_UWord8              numLayers;
+    WebRtc_UWord8              startCodeSize[KMaxNumberOfNALUs];
+    WebRtc_UWord32             payloadSize[KMaxNumberOfNALUs];
+    WebRtc_UWord8              NRI[KMaxNumberOfNALUs];
+    WebRtc_UWord8              type[KMaxNumberOfNALUs];
+    H264_SVC_NALUHeader SVCheader[KMaxNumberOfNALUs];
+    H264_PACSI_NALU     PACSI[KMaxNumberOfNALUs];
+    WebRtc_Word32              accLayerSize[KMaxNumberOfLayers];
+};
+
+
+class H264Information : public VideoCodecInformation
+{
+public:
+    H264Information(const bool SVC);
+    ~H264Information();
+
+    virtual void Reset();
+
+    virtual RtpVideoCodecTypes Type();
+
+    virtual WebRtc_Word32 GetInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length, const H264Info*& ptrInfo);
+
+
+protected:
+    bool HasInfo(const WebRtc_UWord32 length);
+    WebRtc_Word32  FindInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length);
+
+    void GetNRI();
+    WebRtc_Word32 FindNALU();
+    WebRtc_Word32 FindNALUStartCodeSize();
+    WebRtc_Word32 FindNALUType();
+
+    WebRtc_Word32 ParseSVCNALUHeader();
+
+    void SetLayerSEBit(WebRtc_Word32 foundLast);
+    WebRtc_Word32 SetLayerLengths();
+
+private:
+    const bool            _SVC;
+    const WebRtc_UWord8*    _ptrData;
+    WebRtc_UWord32          _length;
+    WebRtc_UWord32          _parsedLength;
+    WebRtc_UWord32          _remLength;
+    H264Info          _info;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc b/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
new file mode 100644
index 0000000..1f35526
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
@@ -0,0 +1,1280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_h264.h"
+
+#include "rtp_utility.h"
+
+namespace webrtc {
+RTPSenderH264::RTPSenderH264(RTPSenderInterface* rtpSender) :
+    // H264
+    _rtpSender(*rtpSender),
+    _h264Mode(H264_SINGLE_NAL_MODE),
+    _h264SendPPS_SPS(true),
+    _h264SVCPayloadType(-1),
+    _h264SVCRelaySequenceNumber(0),
+    _h264SVCRelayTimeStamp(0),
+    _h264SVCRelayLayerComplete(false),
+
+    _useHighestSendLayer(false),
+    _highestDependencyLayerOld(MAX_NUMBER_OF_TEMPORAL_ID-1),
+    _highestDependencyQualityIDOld(MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID-1),
+    _highestDependencyLayer(0),
+    _highestDependencyQualityID(0),
+    _highestTemporalLayer(0)
+{
+}
+
+RTPSenderH264::~RTPSenderH264()
+{
+}
+
+WebRtc_Word32
+RTPSenderH264::Init()
+{
+    _h264SendPPS_SPS = true;
+    _h264Mode = H264_SINGLE_NAL_MODE;
+    return 0;
+}
+
+/*
+    multi-session
+    3 modes supported
+    NI-T        timestamps
+    NI-TC        timestamps/CS-DON
+    NI-C        CS-DON
+
+    Non-interleaved timestamp based mode (NI-T)
+    Non-interleaved cross-session decoding order number (CS-DON) based mode (NI-C)
+    Non-interleaved combined timestamp and CS-DON mode (NI-TC)
+
+    NOT supported  Interleaved CS-DON (I-C) mode.
+
+    NI-T and NI-TC modes both use timestamps to recover the decoding
+    order.  In order to be able to do so, it is necessary for the RTP
+    packet stream to contain data for all sampling instances of a given
+    RTP session in all enhancement RTP sessions that depend on the given
+    RTP session.  The NI-C and I-C modes do not have this limitation,
+    and use the CS-DON values as a means to explicitly indicate decoding
+    order, either directly coded in PACSI NAL units, or inferred from
+    them using the packetization rules.  It is noted that the NI-TC mode
+    offers both alternatives and it is up to the receiver to select
+    which one to use.
+*/
+
+bool
+RTPSenderH264::AddH264SVCNALUHeader(const H264_SVC_NALUHeader& svc,
+                                    WebRtc_UWord8* databuffer,
+                                    WebRtc_Word32& curByte) const
+{
+   // +---------------+---------------+---------------+
+   // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+   // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   // |R|I|   PRID    |N| DID |  QID  | TID |U|D|O| RR|
+   // +---------------+---------------+---------------+
+
+   // R    - Reserved for future extensions (MUST be 1). Receivers SHOULD ignore the value of R.
+   // I    - Is layer representation an IDR layer (1) or not (0).
+   // PRID - Priority identifier for the NAL unit.
+   // N    - Specifies whether inter-layer prediction may be used for decoding the coded slice (1) or not (0).
+   // DID  - Indicates the WebRtc_Word32er-layer coding dependency level of a layer representation.
+   // QID  - Indicates the quality level of an MGS layer representation.
+   // TID  - Indicates the temporal level of a layer representation.
+   // U    - Use only reference base pictures during the WebRtc_Word32er prediction process (1) or not (0).
+   // D    - Discardable flag.
+   // O    - Output_flag. Affects the decoded picture output process as defined in Annex C of [H.264].
+   // RR   - Reserved_three_2bits (MUST be '11'). Receivers SHOULD ignore the value of RR.
+
+   // Add header data
+   databuffer[curByte++] = (svc.r << 7)              + (svc.idr << 6)           + (svc.priorityID & 0x3F);
+   databuffer[curByte++] = (svc.interLayerPred << 7) + (svc.dependencyID << 4)  + (svc.qualityID & 0x0F);
+   databuffer[curByte++] = (svc.temporalID << 5)     + (svc.useRefBasePic << 4) + (svc.discardable << 3) +
+                           (svc.output << 2)         + (svc.rr & 0x03);
+   return true;
+}
+
+WebRtc_Word32
+RTPSenderH264::AddH264PACSINALU(const bool firstPacketInNALU,
+                                const bool lastPacketInNALU,
+                                const H264_PACSI_NALU& pacsi,
+                                const H264_SVC_NALUHeader& svc,
+                                const WebRtc_UWord16 DONC,
+                                WebRtc_UWord8* databuffer,
+                                WebRtc_Word32& curByte) const
+{
+    //  0                   1                   2                   3
+    //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |F|NRI|Type(30) |              SVC NAL unit header              |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |X|Y|T|A|P|C|S|E| TL0PICIDX (o.)|        IDRPICID (o.)          |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |          DONC (o.)            |        NAL unit size 1        |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |                                                               |
+    // |                 SEI NAL unit 1                                |
+    // |                                                               |
+    // |                         +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |                         |        NAL unit size 2        |     |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+     |
+    // |                                                               |
+    // |            SEI NAL unit 2                                     |
+    // |                                           +-+-+-+-+-+-+-+-+-+-+
+    // |                                           |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+    // If present, MUST be first NAL unit in aggregation packet + there MUST be at least
+    // one additional unit in the same packet! The RTPHeader and payload header are set as if the 2nd NAL unit
+    // (first non-PACSI NAL unit) is encapsulated in the same packet.
+    // contains scalability info common for all remaining NAL units.
+
+    // todo add API to configure this required for multisession
+    const bool addDONC = false;
+
+    if (svc.length == 0 || pacsi.NALlength == 0)
+    {
+      return 0;
+    }
+
+    WebRtc_Word32 startByte = curByte;
+
+    // NAL unit header
+    databuffer[curByte++] = 30; // NRI will be added later
+
+    // Extended SVC header
+    AddH264SVCNALUHeader(svc, databuffer, curByte);
+
+    // Flags
+    databuffer[curByte++] = (pacsi.X << 7) +
+                            (pacsi.Y << 6) +
+                            (addDONC << 5) +
+                            (pacsi.A << 4) +
+                            (pacsi.P << 3) +
+                            (pacsi.C << 2) +
+                            firstPacketInNALU?(pacsi.S << 1):0 +
+                            lastPacketInNALU?(pacsi.E):0;
+
+    // Optional fields
+    if (pacsi.Y)
+    {
+        databuffer[curByte++] = pacsi.TL0picIDx;
+        databuffer[curByte++] = (WebRtc_UWord8)(pacsi.IDRpicID >> 8);
+        databuffer[curByte++] = (WebRtc_UWord8)(pacsi.IDRpicID);
+    }
+    // Decoding order number
+    if (addDONC) // pacsi.T
+    {
+        databuffer[curByte++] = (WebRtc_UWord8)(DONC >> 8);
+        databuffer[curByte++] = (WebRtc_UWord8)(DONC);
+    }
+
+    // SEI NALU
+    if(firstPacketInNALU) // IMPROVEMENT duplicate it to make sure it arrives...
+    {
+        // we only set this for NALU 0 to make sure we send it only once per frame
+        for (WebRtc_UWord32 i = 0; i < pacsi.numSEINALUs; i++)
+        {
+            // NALU size
+            databuffer[curByte++] = (WebRtc_UWord8)(pacsi.seiMessageLength[i] >> 8);
+            databuffer[curByte++] = (WebRtc_UWord8)(pacsi.seiMessageLength[i]);
+
+            // NALU data
+            memcpy(databuffer + curByte, pacsi.seiMessageData[i], pacsi.seiMessageLength[i]);
+            curByte += pacsi.seiMessageLength[i];
+        }
+    }
+    return curByte - startByte;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264RelaySequenceNumber(const WebRtc_UWord16 seqNum)
+{
+    _h264SVCRelaySequenceNumber = seqNum;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264RelayCompleteLayer(const bool complete)
+{
+    _h264SVCRelayLayerComplete = complete;
+    return 0;
+}
+
+/*
+    12  Filler data
+
+        The only restriction of filler data NAL units within an
+        access unit is that they shall not precede the first VCL
+        NAL unit with the same access unit.
+*/
+WebRtc_Word32
+RTPSenderH264::SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
+                                  const WebRtc_UWord16 bytesToSend,
+                                  const WebRtc_UWord32 ssrc)
+{
+    WebRtc_UWord16 fillerLength = bytesToSend - 12 - 1;
+
+    if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
+    {
+        return 0;
+    }
+
+    if (fillerLength == 0)
+    {
+        // do not send an empty packet, will not reach JB
+        fillerLength = 1;
+    }
+
+    // send codec valid data, H.264 has defined data which is binary 1111111
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);            // version 2
+    dataBuffer[1] = rtpHeader->header.payloadType;
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
+
+    // set filler NALU type
+    dataBuffer[12] = 12;        // NRI field = 0, type 12
+
+    // fill with 0xff
+    memset(dataBuffer + 12 + 1, 0xff, fillerLength);
+
+    return _rtpSender.SendToNetwork(dataBuffer,
+                        fillerLength,
+                        12 + 1);
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264FillerData(const WebRtc_UWord32 captureTimestamp,
+                                  const WebRtc_UWord8 payloadType,
+                                  const WebRtc_UWord32 bytes
+                                  )
+{
+
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+    WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - _rtpSender.RTPHeaderLength();
+
+    WebRtc_Word32 bytesToSend=bytes;
+    WebRtc_UWord16 fillerLength=0;
+
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    while(bytesToSend>0)
+    {
+        fillerLength=maxLength;
+        if(fillerLength<maxLength)
+        {
+            fillerLength = (WebRtc_UWord16) bytesToSend;
+        }
+
+        bytesToSend-=fillerLength;
+
+        if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
+        {
+            return 0;
+        }
+
+        if (fillerLength == 0)
+        {
+            // do not send an empty packet, will not reach JB
+            fillerLength = 1;
+        }
+
+        // send paded data
+        // correct seq num, time stamp and payloadtype
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, false,captureTimestamp, true, true);
+
+        // set filler NALU type
+        dataBuffer[12] = 12;        // NRI field = 0, type 12
+
+        // send codec valid data, H.264 has defined data which is binary 1111111
+        // fill with 0xff
+        memset(dataBuffer + 12 + 1, 0xff, fillerLength-1);
+
+        if( _rtpSender.SendToNetwork(dataBuffer,
+                            fillerLength,
+                            12)<0)
+        {
+
+            return -1;;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
+                                      const WebRtc_UWord8* incomingRTPPacket,
+                                      const WebRtc_UWord16 incomingRTPPacketSize,
+                                      const WebRtc_UWord32 ssrc,
+                                      const bool higestLayer)
+{
+    if (rtpHeader->header.sequenceNumber != (WebRtc_UWord16)(_h264SVCRelaySequenceNumber + 1))
+    {
+         // not continous, signal loss
+         _rtpSender.IncrementSequenceNumber();
+    }
+    _h264SVCRelaySequenceNumber = rtpHeader->header.sequenceNumber;
+
+
+    if (rtpHeader->header.timestamp != _h264SVCRelayTimeStamp)
+    {
+        // new frame
+        _h264SVCRelayLayerComplete = false;
+    }
+
+    if (rtpHeader->header.timestamp == _h264SVCRelayTimeStamp &&
+        _h264SVCRelayLayerComplete)
+    {
+        // sanity, end of layer already sent
+        // Could happened for fragmented packet with missing PACSI info (PACSI packet reorded and received after packet it belongs to)
+        // fragmented packet has no layer info set (default info 0)
+        return 0;
+    }
+    _h264SVCRelayTimeStamp = rtpHeader->header.timestamp;
+
+    // re-packetize H.264-SVC packets
+    // we keep the timestap unchanged
+    // make a copy and only change the SSRC and seqNum
+
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    memcpy(dataBuffer, incomingRTPPacket, incomingRTPPacketSize);
+
+    // _sequenceNumber initiated in Init()
+    // _ssrc initiated in constructor
+
+    // re-write payload type
+    if(_h264SVCPayloadType != -1)
+    {
+        dataBuffer[1] &= kRtpMarkerBitMask;
+        dataBuffer[1] += _h264SVCPayloadType;
+    }
+
+    // _sequenceNumber will not work for re-ordering by NACK from original sender
+    // engine responsible for this
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
+    //ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, ssrc);
+
+    // how do we know it's the last relayed packet in a frame?
+    // 1) packets arrive in order, the engine manages that
+    // 2) highest layer that we relay
+    // 3) the end bit is set for the highest layer
+
+    if(higestLayer && rtpHeader->type.Video.codecHeader.H264.relayE)
+    {
+        // set marker bit
+        dataBuffer[1] |= kRtpMarkerBitMask;
+
+        // set relayed layer as complete
+        _h264SVCRelayLayerComplete = true;
+    }
+    return _rtpSender.SendToNetwork(dataBuffer,
+                         incomingRTPPacketSize - rtpHeader->header.headerLength,
+                         rtpHeader->header.headerLength);
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_STAP_A(const FrameType frameType,
+                                const H264Info* ptrH264Info,
+                                WebRtc_UWord16 &idxNALU,
+                                const WebRtc_Word8 payloadType,
+                                const WebRtc_UWord32 captureTimeStamp,
+                                bool& switchToFUA,
+                                WebRtc_Word32 &payloadBytesToSend,
+                                const WebRtc_UWord8*& data,
+                                const WebRtc_UWord16 rtpHeaderLength)
+{
+    const WebRtc_Word32 H264_NALU_LENGTH = 2;
+
+    WebRtc_UWord16 h264HeaderLength = 1; // normal header length
+    WebRtc_UWord16 maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() -
+                                          FECPacketOverhead() - rtpHeaderLength -
+                                          h264HeaderLength - H264_NALU_LENGTH;
+
+    WebRtc_Word32 dataOffset = rtpHeaderLength + h264HeaderLength;
+    WebRtc_UWord8 NRI = 0;
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
+    {
+        // we need to fragment NAL switch to mode FU-A
+        switchToFUA = true;
+    } else
+    {
+        // combine as many NAL units in every IP packet
+        do
+        {
+            if(!_h264SendPPS_SPS)
+            {
+                // don't send NALU of type 7 and 8 SPS and PPS
+                if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+                {
+                    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    idxNALU++;
+                    continue;
+                }
+            }
+            if(ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A)
+            {
+                if(ptrH264Info->NRI[idxNALU] > NRI)
+                {
+                    NRI = ptrH264Info->NRI[idxNALU];
+                }
+                // put NAL size into packet
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] >> 8);
+                dataOffset++;
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] & 0xff);
+                dataOffset++;
+                // Put payload in packet
+                memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+                dataOffset += ptrH264Info->payloadSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                payloadBytesInPacket += (WebRtc_UWord16)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            } else
+            {
+                // we don't fitt the next NALU in this packet
+                break;
+            }
+            idxNALU++;
+        }while(payloadBytesToSend);
+    }
+
+    // sanity
+    // don't send empty packets
+    if (payloadBytesInPacket)
+    {
+        // add RTP header
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
+        dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
+        WebRtc_UWord16 payloadLength = payloadBytesInPacket + h264HeaderLength;
+
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength))
+        {
+            return -1;
+        }
+    }
+    return 0;
+} // end STAP-A
+
+// STAP-A for H.264 SVC
+WebRtc_Word32
+RTPSenderH264::SendH264_STAP_A_PACSI(const FrameType frameType,
+                                      const H264Info* ptrH264Info,
+                                      WebRtc_UWord16 &idxNALU,
+                                      const WebRtc_Word8 payloadType,
+                                      const WebRtc_UWord32 captureTimeStamp,
+                                      bool& switchToFUA,
+                                      WebRtc_Word32 &payloadBytesToSend,
+                                      const WebRtc_UWord8*& data,
+                                      const WebRtc_UWord16 rtpHeaderLength,
+                                      WebRtc_UWord16& decodingOrderNumber)
+{
+    const WebRtc_Word32 H264_NALU_LENGTH = 2;
+
+    WebRtc_UWord16 h264HeaderLength = 1; // normal header length
+    WebRtc_UWord16 maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength - h264HeaderLength - H264_NALU_LENGTH;
+    WebRtc_Word32 dataOffset = rtpHeaderLength + h264HeaderLength;
+    WebRtc_UWord8 NRI = 0;
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    bool firstNALUNotIDR = true; //delta
+
+    // Put PACSI NAL unit into packet
+    WebRtc_Word32 lengthPACSI = 0;
+    WebRtc_UWord32 PACSI_NALlength = ptrH264Info->PACSI[idxNALU].NALlength;
+    if (PACSI_NALlength > maxPayloadLengthSTAP_A)
+    {
+        return -1;
+    }
+    dataBuffer[dataOffset++] = (WebRtc_UWord8)(PACSI_NALlength >> 8);
+    dataBuffer[dataOffset++] = (WebRtc_UWord8)(PACSI_NALlength & 0xff);
+
+    // end bit will be updated later, since another NALU in this packet might be the last
+    WebRtc_Word32 lengthPASCINALU = AddH264PACSINALU(true,
+                                                   false,
+                                                   ptrH264Info->PACSI[idxNALU],
+                                                   ptrH264Info->SVCheader[idxNALU],
+                           decodingOrderNumber,
+                           dataBuffer,
+                                                   dataOffset);
+    if (lengthPASCINALU <= 0)
+    {
+        return -1;
+    }
+    decodingOrderNumber++;
+
+    lengthPACSI = H264_NALU_LENGTH + lengthPASCINALU;
+    maxPayloadLengthSTAP_A -= (WebRtc_UWord16)lengthPACSI;
+    if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
+    {
+        // we need to fragment NAL switch to mode FU-A
+        switchToFUA = true;
+        return 0;
+    }
+    if(!ptrH264Info->SVCheader[idxNALU].idr)
+    {
+        firstNALUNotIDR = true;
+    }
+
+    WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                         (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                          ptrH264Info->SVCheader[idxNALU].temporalID;
+
+    {
+        // combine as many NAL units in every IP packet, with the same priorityID
+        // Improvement we could allow several very small MGS NALU from different layers to be sent in one packet
+
+        do
+        {
+            if(!_h264SendPPS_SPS)
+            {
+                // Don't send NALU of type 7 and 8 SPS and PPS,
+                // they could be signaled outofband
+                if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+                {
+                    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    idxNALU++;
+                    continue;
+                }
+            }
+            //    don't send NALU type 6 (SEI message) not allowed when we send it in PACSI
+            if(ptrH264Info->type[idxNALU] == 6)
+            {
+                // SEI NALU Don't send, not allowed when we send it in PACSI
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                idxNALU++;
+                continue;
+            }
+
+            const WebRtc_UWord32 layerNALU = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                                           (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                            ptrH264Info->SVCheader[idxNALU].temporalID;
+
+            // we need to break on a new layer
+            if( ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A &&
+                layerNALU == layer)
+            {
+                if(ptrH264Info->NRI[idxNALU] > NRI)
+                {
+                    NRI = ptrH264Info->NRI[idxNALU];
+                }
+                // put NAL size into packet
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] >> 8);
+                dataOffset++;
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] & 0xff);
+                dataOffset++;
+                // Put payload in packet
+                memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+                dataOffset += ptrH264Info->payloadSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                payloadBytesInPacket += (WebRtc_UWord16)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            } else
+            {
+                // we don't fitt the next NALU in this packet or,
+                // it's the next layer
+
+                // check if we should send this NALU
+                // based on the layer
+
+                if(_useHighestSendLayer && layerNALU != layer)
+                {
+                    // we don't send this NALU due to it's a new layer
+                    // check if we should send the next or if this is the last
+                    const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[idxNALU].dependencyID << 4) + ptrH264Info->SVCheader[idxNALU].qualityID;
+
+                    bool highestLayer;
+                    if(SendH264SVCLayer(frameType,
+                                        ptrH264Info->SVCheader[idxNALU].temporalID,
+                                        dependencyQualityID,
+                                        highestLayer) == false)
+                    {
+                        // will trigger markerbit and stop sending this frame
+                        payloadBytesToSend = 0;
+                    }
+                }
+                break;
+            }
+            idxNALU++;
+
+        }while(payloadBytesToSend);
+    }
+
+    // sanity, don't send empty packets
+    if (payloadBytesInPacket)
+    {
+        // add RTP header
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
+
+        dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
+
+        // NRI for PACSI
+        dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] &= 0x1f;   // zero out NRI field
+        dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] |= NRI;
+
+        if(ptrH264Info->PACSI[idxNALU-1].E)
+        {
+            // update end bit
+            dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 5] |= 0x01;
+        }
+        if(firstNALUNotIDR)
+        {
+            // we have to check if any of the NALU in this packet is an IDR NALU
+            bool setIBit = false;
+            for(int i = 0; i < idxNALU; i++)
+            {
+                if(ptrH264Info->SVCheader[i].idr)
+                {
+                    setIBit = true;
+                    break;
+                }
+            }
+            if(setIBit)
+            {
+                // update I bit
+                dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 2] |= 0x40;
+            }
+        }
+        const WebRtc_UWord16 payloadLength = payloadBytesInPacket + h264HeaderLength + (WebRtc_UWord16)lengthPACSI;
+        if(-1 == SendVideoPacket(frameType,
+                                 dataBuffer,
+                                 payloadLength,
+                                 rtpHeaderLength,
+                                 layer==0))
+        {
+            return -1;
+        }
+    }
+    return 0;
+} // end STAP-A
+
+WebRtc_Word32
+RTPSenderH264::SendH264_FU_A(const FrameType frameType,
+                              const H264Info* ptrH264Info,
+                              WebRtc_UWord16 &idxNALU,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              WebRtc_Word32 &payloadBytesToSend,
+                              const WebRtc_UWord8*& data,
+                              const WebRtc_UWord16 rtpHeaderLength,
+                              WebRtc_UWord16& decodingOrderNumber,
+                              const bool sendSVCPACSI)
+{
+
+    // FUA for the rest of the frame
+    WebRtc_UWord16 maxPayloadLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    WebRtc_UWord32 payloadBytesRemainingInNALU = ptrH264Info->payloadSize[idxNALU];
+
+    bool isBaseLayer=false;
+
+    if(payloadBytesRemainingInNALU > maxPayloadLength)
+    {
+        // we need to fragment NALU
+        const WebRtc_UWord16 H264_FUA_LENGTH = 2; // FU-a H.264 header is 2 bytes
+
+        if(sendSVCPACSI)
+        {
+            SendH264_SinglePACSI(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 true,
+                                 false);
+
+            WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                                 (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                  ptrH264Info->SVCheader[idxNALU].temporalID;
+            isBaseLayer=(layer==0);
+        }
+
+        // First packet
+        _rtpSender.BuildRTPheader(dataBuffer,payloadType, false, captureTimeStamp);
+
+        WebRtc_UWord16 maxPayloadLengthFU_A = maxPayloadLength - H264_FUA_LENGTH ;
+        WebRtc_UWord8 fuaIndc = 28 + ptrH264Info->NRI[idxNALU];
+        dataBuffer[rtpHeaderLength] = fuaIndc;                                                     // FU-A indicator
+        dataBuffer[rtpHeaderLength+1] = (WebRtc_UWord8)(ptrH264Info->type[idxNALU] + 0x80)/*start*/; // FU-A header
+
+        memcpy(&dataBuffer[rtpHeaderLength + H264_FUA_LENGTH], &data[ptrH264Info->startCodeSize[idxNALU]+1], maxPayloadLengthFU_A);
+        WebRtc_UWord16 payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength, isBaseLayer))
+        {
+            return -1;
+        }
+
+        //+1 is from the type that is coded into the FU-a header
+        data += maxPayloadLengthFU_A + 1 + ptrH264Info->startCodeSize[idxNALU];             // inc data ptr
+        payloadBytesToSend -= maxPayloadLengthFU_A+1+ptrH264Info->startCodeSize[idxNALU];
+        payloadBytesRemainingInNALU -= maxPayloadLengthFU_A+1;
+
+        // all non first/last packets
+        while(payloadBytesRemainingInNALU  > maxPayloadLengthFU_A)
+        {
+            if(sendSVCPACSI)
+            {
+                SendH264_SinglePACSI(frameType,
+                                     ptrH264Info,
+                                     idxNALU,
+                                     payloadType,
+                                     captureTimeStamp,
+                                     false,
+                                     false);
+            }
+
+            // prepare next header
+            _rtpSender.BuildRTPheader(dataBuffer, payloadType, false, captureTimeStamp);
+
+            dataBuffer[rtpHeaderLength] = (WebRtc_UWord8)fuaIndc;           // FU-A indicator
+            dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];   // FU-A header
+
+            memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, maxPayloadLengthFU_A);
+            payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
+
+            if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
+            {
+                return -1;
+            }
+            data += maxPayloadLengthFU_A; // inc data ptr
+            payloadBytesToSend -= maxPayloadLengthFU_A;
+            payloadBytesRemainingInNALU -= maxPayloadLengthFU_A;
+            dataBuffer[rtpHeaderLength] = fuaIndc;                         // FU-A indicator
+            dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];    // FU-A header
+        }
+        if(sendSVCPACSI)
+        {
+            SendH264_SinglePACSI(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 false,
+                                 true); // last packet in NALU
+
+            if(_useHighestSendLayer && idxNALU+1 < ptrH264Info->numNALUs)
+            {
+                // not last NALU in frame
+                // check if it's the the next layer should not be sent
+
+                // check if we should send the next or if this is the last
+                const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[idxNALU+1].dependencyID << 4) +
+                                                         ptrH264Info->SVCheader[idxNALU+1].qualityID;
+
+                bool highestLayer;
+                if(SendH264SVCLayer(frameType,
+                                    ptrH264Info->SVCheader[idxNALU+1].temporalID,
+                                    dependencyQualityID,
+                                    highestLayer) == false)
+                {
+                    // will trigger markerbit and stop sending this frame
+                    payloadBytesToSend = payloadBytesRemainingInNALU;
+                }
+            }
+        }
+        // last packet in NALU
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType,(payloadBytesToSend == (WebRtc_Word32)payloadBytesRemainingInNALU)?true:false, captureTimeStamp);
+        dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU] + 0x40/*stop*/; // FU-A header
+
+        memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, payloadBytesRemainingInNALU);
+        payloadLength = (WebRtc_UWord16)payloadBytesRemainingInNALU + H264_FUA_LENGTH;
+        payloadBytesToSend -= payloadBytesRemainingInNALU;
+        if(payloadBytesToSend != 0)
+        {
+            data += payloadBytesRemainingInNALU; // inc data ptr
+        }
+        idxNALU++;
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
+        {
+            return -1;
+        }
+    } else
+    {
+        // send NAL unit in singel mode
+        return SendH264_SingleMode(frameType,
+                                   ptrH264Info,
+                                   idxNALU,
+                                   payloadType,
+                                   captureTimeStamp,
+                                   payloadBytesToSend,
+                                   data,
+                                   rtpHeaderLength,
+                                   sendSVCPACSI);
+    }
+    // end FU-a
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_SingleMode(const FrameType frameType,
+                                    const H264Info* ptrH264Info,
+                                    WebRtc_UWord16 &idxNALU,
+                                    const WebRtc_Word8 payloadType,
+                                    const WebRtc_UWord32 captureTimeStamp,
+                                    WebRtc_Word32 &payloadBytesToSend,
+                                    const WebRtc_UWord8*& data,
+                                    const WebRtc_UWord16 rtpHeaderLength,
+                                    WebRtc_UWord16& decodingOrderNumber,
+                                    const bool sendSVCPACSI)
+{
+    // no H.264 header lenght in single mode
+    // we use WEBRTC_IP_PACKET_SIZE instead of the configured MTU since it's better to send fragmented UDP than not to send
+    const WebRtc_UWord16 maxPayloadLength = WEBRTC_IP_PACKET_SIZE - _rtpSender.PacketOverHead() - FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    bool isBaseLayer=false;
+
+    if(ptrH264Info->payloadSize[idxNALU] > maxPayloadLength)
+    {
+        return -3;
+    }
+    if(!_h264SendPPS_SPS)
+    {
+        // don't send NALU of type 7 and 8 SPS and PPS
+        if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+        {
+            payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            idxNALU++;
+            return 0;
+        }
+    }
+    if(sendSVCPACSI)
+    {
+        SendH264_SinglePACSI(frameType,
+                             ptrH264Info,
+                             idxNALU,
+                             payloadType,
+                             captureTimeStamp,
+                             true,
+                             true);
+
+        WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                             (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                              ptrH264Info->SVCheader[idxNALU].temporalID;
+        isBaseLayer=(layer==0);
+    }
+
+    // Put payload in packet
+    memcpy(&dataBuffer[rtpHeaderLength], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+
+    WebRtc_UWord16 payloadBytesInPacket = (WebRtc_UWord16)ptrH264Info->payloadSize[idxNALU];
+    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU]; // left to send
+
+    //
+    _rtpSender.BuildRTPheader(dataBuffer,payloadType,(payloadBytesToSend ==0)?true:false, captureTimeStamp);
+
+    dataBuffer[rtpHeaderLength] &= 0x1f; // zero out NRI field
+    dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
+    if(payloadBytesToSend > 0)
+    {
+        data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+    }
+    idxNALU++;
+    if(-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,isBaseLayer))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_SinglePACSI(const FrameType frameType,
+                                    const H264Info* ptrH264Info,
+                                     const WebRtc_UWord16 idxNALU,
+                                     const WebRtc_Word8 payloadType,
+                                     const WebRtc_UWord32 captureTimeStamp,
+                                     const bool firstPacketInNALU,
+                                     const bool lastPacketInNALU);
+{
+    // Send PACSI in single mode
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)_rtpSender.BuildRTPheader(dataBuffer, payloadType,false, captureTimeStamp);
+    WebRtc_Word32 dataOffset = rtpHeaderLength;
+
+    WebRtc_Word32 lengthPASCINALU = AddH264PACSINALU(firstPacketInNALU,
+                                                   lastPacketInNALU,
+                                                   ptrH264Info->PACSI[idxNALU],
+                                                   ptrH264Info->SVCheader[idxNALU],
+                                                   decodingOrderNumber,
+                                                   dataBuffer,
+                                                   dataOffset);
+
+    if (lengthPASCINALU <= 0)
+    {
+        return -1;
+    }
+    decodingOrderNumber++;
+
+    WebRtc_UWord16 payloadBytesInPacket = (WebRtc_UWord16)lengthPASCINALU;
+
+    // Set payload header (first payload byte co-serves as the payload header)
+    dataBuffer[rtpHeaderLength] &= 0x1f;        // zero out NRI field
+    dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
+
+    const WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                               (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                ptrH264Info->SVCheader[idxNALU].temporalID;
+
+    if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,layer==0))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+RTPSenderH264::SendH264SVC(const FrameType frameType,
+                            const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize,
+                            H264Information& h264Information,
+                            WebRtc_UWord16& decodingOrderNumber)
+{
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    const H264Info* ptrH264Info = NULL;
+    if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
+    {
+        return -1;
+    }
+    if(_useHighestSendLayer)
+    {
+        // we need to check if we should drop the frame
+        // it could be a temporal layer (aka a temporal frame)
+        const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[0].dependencyID << 4) + ptrH264Info->SVCheader[0].qualityID;
+
+        bool dummyHighestLayer;
+        if(SendH264SVCLayer(frameType,
+                            ptrH264Info->SVCheader[0].temporalID,
+                            dependencyQualityID,
+                            dummyHighestLayer) == false)
+        {
+            // skip send this frame
+            return 0;
+        }
+    }
+
+    WebRtc_UWord16 idxNALU = 0;
+    while (payloadBytesToSend > 0)
+    {
+        bool switchToFUA = false;
+        if (SendH264_STAP_A_PACSI(frameType,
+                                  ptrH264Info,
+                                  idxNALU,
+                                  payloadType,
+                                  captureTimeStamp,
+                                  switchToFUA,
+                                  payloadBytesToSend,
+                                  payloadData,
+                                  rtpHeaderLength,
+                                  decodingOrderNumber) != 0)
+        {
+            return -1;
+        }
+        if(switchToFUA)
+        {
+            // FU_A for this NALU
+            if (SendH264_FU_A(frameType,
+                              ptrH264Info,
+                              idxNALU,
+                              payloadType,
+                              captureTimeStamp,
+                              payloadBytesToSend,
+                              payloadData,
+                              rtpHeaderLength,
+                              true) != 0)
+            {
+                return -1;
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264PacketizationMode(const H264PacketizationMode mode)
+{
+    _h264Mode = mode;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264SendModeNALU_PPS_SPS(const bool dontSend)
+{
+    _h264SendPPS_SPS = !dontSend;
+    return 0;
+}
+
+bool
+RTPSenderH264::SendH264SVCLayer(const FrameType frameType,
+                                  const WebRtc_UWord8 temporalID,
+                                  const WebRtc_UWord8 dependencyQualityID,
+                                  bool& higestLayer)
+{
+    WebRtc_UWord8 dependencyID  = dependencyQualityID >> 4;
+
+    // keyframe required to switch between dependency layers not quality and temporal
+    if( _highestDependencyLayer != _highestDependencyLayerOld)
+    {
+        // we want to switch dependency layer
+        if(frameType == kVideoFrameKey)
+        {
+            // key frame we can change layer if it's correct layer
+            if(_highestDependencyLayer > _highestDependencyLayerOld)
+            {
+                // we want to switch up
+                // does this packet belong to a new layer?
+
+                if( dependencyID > _highestDependencyLayerOld &&
+                    dependencyID <= _highestDependencyLayer)
+                {
+                    _highestDependencyLayerOld = dependencyID;
+                    _highestDependencyQualityIDOld = _highestDependencyQualityID;
+
+                    if( dependencyID == _highestDependencyLayer &&
+                        dependencyQualityID == _highestDependencyQualityID)
+                    {
+                        higestLayer = true;
+                    }
+                    // relay
+                    return true;
+                }
+            }
+            if(_highestDependencyLayer < _highestDependencyLayerOld)
+            {
+                // we want to switch down
+                // does this packet belong to a low layer?
+                if( dependencyID <= _highestDependencyLayer)
+                {
+                    _highestDependencyLayerOld = dependencyID;
+                    _highestDependencyQualityIDOld = _highestDependencyQualityID;
+                    if( dependencyID == _highestDependencyLayer &&
+                        dependencyQualityID == _highestDependencyQualityID)
+                    {
+                        higestLayer = true;
+                    }
+                    // relay
+                    return true;
+                }
+            }
+        } else
+        {
+            // Delta frame and we are waiting to switch dependency layer
+            if(_highestDependencyLayer > _highestDependencyLayerOld)
+            {
+                // we want to switch up to a higher dependency layer
+                // use old setting until we get a key-frame
+
+                // filter based on old dependency
+                // we could have allowed to add a MGS layer lower than the dependency ID
+                // but then we can't know the highest layer relayed we assume that the user
+                // will add one layer at a time
+                if( _highestTemporalLayer < temporalID ||
+                    _highestDependencyLayerOld < dependencyID ||
+                    _highestDependencyQualityIDOld < dependencyQualityID)
+                {
+                    // drop
+                    return false;
+                }
+                // highest layer based on old
+                if( dependencyID == _highestDependencyLayerOld &&
+                    dependencyQualityID == _highestDependencyQualityIDOld)
+                {
+                    higestLayer = true;
+                }
+            } else
+            {
+                // we want to switch down to a lower dependency layer,
+                // use old setting, done bellow
+                // drop all temporal layers while waiting for the key-frame
+                if(temporalID > 0)
+                {
+                    // drop
+                    return false;
+                }
+                // we can't drop a lower MGS layer since this might depend on it
+                // however we can drop MGS layers larger than dependecyQualityId
+                // with dependency from old and quality 0
+                if( _highestDependencyLayerOld < dependencyID ||
+                    (_highestDependencyQualityIDOld & 0xf0) < dependencyQualityID)
+                {
+                    // drop
+                    return false;
+                }
+                if( dependencyID == _highestDependencyLayerOld &&
+                    dependencyQualityID == (_highestDependencyQualityIDOld & 0xf0))
+                {
+                    higestLayer = true;
+                }
+            }
+        }
+    } else
+    {
+        // filter based on current state
+        if( _highestTemporalLayer < temporalID ||
+            _highestDependencyLayer < dependencyID ||
+            _highestDependencyQualityID < dependencyQualityID)
+        {
+            // drop
+            return false;
+        }
+        if( dependencyID == _highestDependencyLayer &&
+            dependencyQualityID == _highestDependencyQualityID)
+        {
+            higestLayer = true;
+        }
+    }
+    return true;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetHighestSendLayer(const WebRtc_UWord8 dependencyQualityLayer,
+                                   const WebRtc_UWord8 temporalLayer)
+{
+    const WebRtc_UWord8 dependencyLayer = (dependencyQualityLayer >> 4);
+
+    if(_highestDependencyLayerOld != _highestDependencyLayer)
+    {
+        // we have not switched to the new dependency yet
+    } else
+    {
+        if(_highestDependencyLayer == dependencyLayer)
+        {
+            // no change of dependency
+            // switch now _highestDependencyQualityIDOld
+            _highestDependencyQualityIDOld = dependencyQualityLayer;
+        }else
+        {
+            // change of dependency, update _highestDependencyQualityIDOld store as old
+            _highestDependencyQualityIDOld = _highestDependencyQualityID;
+        }
+    }
+    _useHighestSendLayer = true;
+    _highestDependencyLayer = dependencyLayer;
+    _highestDependencyQualityID = dependencyQualityLayer;
+    _highestTemporalLayer = temporalLayer;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::HighestSendLayer(WebRtc_UWord8& dependencyQualityLayer,
+                                WebRtc_UWord8& temporalLayer)
+{
+    if (!_useHighestSendLayer)
+    {
+        // No information set
+        return -1;
+    }
+    dependencyQualityLayer = _highestDependencyQualityID;
+    temporalLayer = _highestTemporalLayer;
+    return 0;
+}
+/*
+*   H.264
+*/
+WebRtc_Word32
+RTPSenderH264::SendH264(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const WebRtc_UWord32 captureTimeStamp,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        H264Information& h264Information)
+{
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord8* data = payloadData;
+    bool switchToFUA = false;
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    const H264Info* ptrH264Info = NULL;
+    if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 idxNALU = 0;
+    WebRtc_UWord16 DONCdummy = 0;
+
+    while (payloadBytesToSend > 0)
+    {
+        switch(_h264Mode)
+        {
+        case H264_NON_INTERLEAVED_MODE:
+
+            if(!switchToFUA)
+            {
+                if(SendH264_STAP_A(frameType,
+                                   ptrH264Info,
+                                   idxNALU,
+                                   payloadType,
+                                   captureTimeStamp,
+                                   switchToFUA,
+                                   payloadBytesToSend,
+                                   data,
+                                   rtpHeaderLength) != 0)
+                {
+                    return -1;
+                }
+            }
+            else
+            {
+                // FUA for the rest of the frame
+                if(SendH264_FU_A(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 payloadBytesToSend,
+                                 data,
+                                 rtpHeaderLength,
+                                 DONCdummy) != 0)
+                {
+                    return -1;
+                }
+                // try to go back to STAP_A
+                switchToFUA = false;
+            }
+            break;
+        case H264_SINGLE_NAL_MODE:
+            {
+                // modeSingleU
+                if(SendH264_SingleMode(frameType,
+                                       ptrH264Info,
+                                       idxNALU,
+                                       payloadType,
+                                       captureTimeStamp,
+                                       payloadBytesToSend,
+                                       data,
+                                       rtpHeaderLength,
+                                       DONCdummy) != 0)
+                {
+                    return -1;
+                }
+                break;
+            }
+        case H264_INTERLEAVED_MODE:
+            // not supported
+            assert(false);
+            return -1;
+        }
+    }
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h b/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h
new file mode 100644
index 0000000..564b870
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h
@@ -0,0 +1,179 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
+
+#include "typedefs.h"
+#include "ModuleRTPRTCPConfig.h"
+#include "rtp_rtcp_defines.h"
+#include "h264_information.h"
+
+#include "RTPSender.h"
+
+namespace webrtc {
+class RTPSenderH264
+{
+public:
+    WebRtc_Word32 SendH264(const FrameType frameType,
+                  const WebRtc_Word8 payloadType,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          H264Information& h264Information);
+
+    WebRtc_Word32 SendH264SVC(const FrameType frameType,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              const WebRtc_UWord8* payloadData,
+                              const WebRtc_UWord32 payloadSize,
+                              H264Information& h264Information);
+
+    // H.264 AVC
+    WebRtc_Word32 SetH264PacketizationMode(const H264PacketizationMode mode);
+
+    WebRtc_Word32 SetH264SendModeNALU_PPS_SPS(const bool dontSend);
+
+    // H.264 SVC
+    WebRtc_Word32 SetHighestSendLayer(const WebRtc_UWord8 dependencyQualityLayer,
+                                    const WebRtc_UWord8 temporalLayer);
+
+    WebRtc_Word32 HighestSendLayer(WebRtc_UWord8& dependencyQualityLayer,
+                                 WebRtc_UWord8& temporalLayer);
+
+protected:
+    RTPSenderH264(RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderH264();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord16 FECPacketOverhead() const = 0;
+    virtual RtpVideoCodecTypes VideoCodecType() const = 0;
+
+    virtual WebRtc_Word32 SendVideoPacket(const FrameType frameType,
+                                        const WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        bool baseLayerVideoPacket=false) = 0;
+
+
+    bool SendH264SVCLayer(const FrameType frameType,
+                          const WebRtc_UWord8 temporalID,
+                          const WebRtc_UWord8 dependencyQualityID,
+                          bool& higestLayer);
+
+    // H.264 SVC
+    WebRtc_Word32 AddH264PACSINALU(const bool firstPacketInNALU,
+                                 const bool lastPacketInNALU,
+                                 const H264_PACSI_NALU& paci,
+                                 const H264_SVC_NALUHeader& svc,
+                                 const WebRtc_UWord16 DONC,
+                                 WebRtc_UWord8* databuffer,
+                                 WebRtc_Word32& curByte) const;
+
+    WebRtc_Word32 SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
+                                   const WebRtc_UWord16 bytesToSend,
+                                   const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SendH264FillerData(const WebRtc_UWord32 captureTimestamp,
+                                   const WebRtc_UWord8 payloadType,
+                                   const WebRtc_UWord32 bytesToSend);
+
+    WebRtc_Word32 SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
+                                       const WebRtc_UWord8* incomingRTPPacket,
+                                       const WebRtc_UWord16 incomingRTPPacketSize,
+                                       const WebRtc_UWord32 ssrc,
+                                       const bool higestLayer);
+
+    WebRtc_Word32 SetH264RelaySequenceNumber(const WebRtc_UWord16 seqNum);
+
+    WebRtc_Word32 SetH264RelayCompleteLayer(const bool complete);
+
+    // H.264
+    H264PacketizationMode _h264Mode;
+    bool                      _h264SendPPS_SPS;
+
+    // H.264-SVC
+    WebRtc_Word8                _h264SVCPayloadType;
+    WebRtc_UWord16              _h264SVCRelaySequenceNumber;
+    WebRtc_UWord32              _h264SVCRelayTimeStamp;
+    bool                      _h264SVCRelayLayerComplete;
+
+
+private:
+    // H.264
+    WebRtc_Word32 SendH264_SingleMode(const FrameType frameType,
+                                const H264Info* ptrH264Info,
+                                    WebRtc_UWord16 &idxNALU,
+                                    const WebRtc_Word8 payloadType,
+                                    const WebRtc_UWord32 captureTimeStamp,
+                                    WebRtc_Word32 &payloadBytesToSend,
+                                    const WebRtc_UWord8*& data,
+                                    const WebRtc_UWord16 rtpHeaderLength,
+                                    const bool sendSVCPACSI=false);
+
+    WebRtc_Word32 SendH264_FU_A(const FrameType frameType,
+                              const H264Info* ptrH264Info,
+                              WebRtc_UWord16 &idxNALU,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              WebRtc_Word32 &payloadBytesToSend,
+                              const WebRtc_UWord8*& data,
+                              const WebRtc_UWord16 rtpHeaderLength,
+                              const bool sendSVCPACSI = false);
+
+    WebRtc_Word32 SendH264_STAP_A(const FrameType frameType,
+                            const H264Info* ptrH264Info,
+                                WebRtc_UWord16 &idxNALU,
+                                const WebRtc_Word8 payloadType,
+                                const WebRtc_UWord32 captureTimeStamp,
+                                bool& switchToFUA,
+                                WebRtc_Word32 &payloadBytesToSend,
+                                const WebRtc_UWord8*& data,
+                                const WebRtc_UWord16 rtpHeaderLength);
+
+    WebRtc_Word32 SendH264_STAP_A_PACSI(const FrameType frameType,
+                                      const H264Info* ptrH264Info,
+                                      WebRtc_UWord16 &idxNALU,
+                                      const WebRtc_Word8 payloadType,
+                                      const WebRtc_UWord32 captureTimeStamp,
+                                      bool& switchToFUA,
+                                      WebRtc_Word32 &payloadBytesToSend,
+                                      const WebRtc_UWord8*& data,
+                                      const WebRtc_UWord16 rtpHeaderLengh)
+
+    WebRtc_Word32 SendH264_SinglePACSI(const FrameType frameType,
+                                 const H264Info* ptrH264Info,
+                                     const WebRtc_UWord16 idxNALU,
+                                     const WebRtc_Word8 payloadType,
+                                     const WebRtc_UWord32 captureTimeStamp,
+                                     const bool firstPacketInNALU,
+                                     const bool lastPacketInNALU);
+
+    bool AddH264SVCNALUHeader(const H264_SVC_NALUHeader& svc,
+                              WebRtc_UWord8* databuffer,
+                              WebRtc_Word32& curByte) const;
+
+    RTPSenderInterface&        _rtpSender;
+
+    // relay
+    bool                    _useHighestSendLayer;
+    WebRtc_UWord8             _highestDependencyLayerOld;
+    WebRtc_UWord8             _highestDependencyQualityIDOld;
+    WebRtc_UWord8             _highestDependencyLayer;
+    WebRtc_UWord8             _highestDependencyQualityID;
+    WebRtc_UWord8             _highestTemporalLayer;
+
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/bandwidth_management.cc b/trunk/src/modules/rtp_rtcp/source/bandwidth_management.cc
new file mode 100644
index 0000000..a14a2ef
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/bandwidth_management.cc
@@ -0,0 +1,316 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bandwidth_management.h"
+#include "trace.h"
+#include "rtp_utility.h"
+#include "rtp_rtcp_config.h"
+
+#include <math.h>   // sqrt()
+
+namespace webrtc {
+
+BandwidthManagement::BandwidthManagement(const WebRtc_Word32 id) :
+    _id(id),
+    _critsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _lastPacketLossExtendedHighSeqNum(0),
+    _lastReportAllLost(false),
+    _lastLoss(0),
+    _accumulateLostPacketsQ8(0),
+    _accumulateExpectedPackets(0),
+    _bitRate(0),
+    _minBitRateConfigured(0),
+    _maxBitRateConfigured(0),
+    _last_fraction_loss(0),
+    _last_round_trip_time(0),
+    _bwEstimateIncoming(0),
+    _smoothedFractionLostQ4(-1), // indicate uninitialized
+    _sFLFactorQ4(14),            // 0.875 in Q4
+    _timeLastIncrease(0)
+{
+}
+
+BandwidthManagement::~BandwidthManagement()
+{
+    delete _critsect;
+}
+
+void
+BandwidthManagement::SetSendBitrate(const WebRtc_UWord32 startBitrate,
+                                    const WebRtc_UWord16 minBitrateKbit,
+                                    const WebRtc_UWord16 maxBitrateKbit)
+{
+    CriticalSectionScoped cs(_critsect);
+
+    _bitRate = startBitrate;
+    _minBitRateConfigured = minBitrateKbit*1000;
+    if(maxBitrateKbit == 0)
+    {
+        // no max configured use 1Gbit/s
+        _maxBitRateConfigured = 1000000000;
+    } else
+    {
+        _maxBitRateConfigured = maxBitrateKbit*1000;
+    }
+}
+
+WebRtc_Word32
+BandwidthManagement::MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit)
+{
+    CriticalSectionScoped cs(_critsect);
+
+    if(_maxBitRateConfigured == 0)
+    {
+        return -1;
+    }
+    *maxBitrateKbit = (WebRtc_UWord16)(_maxBitRateConfigured/1000);
+    return 0;
+}
+
+WebRtc_Word32
+BandwidthManagement::UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
+                                             WebRtc_UWord32* newBitrate,
+                                             WebRtc_UWord8* fractionLost,
+                                             WebRtc_UWord16* roundTripTime)
+{
+    *newBitrate = 0;
+    CriticalSectionScoped cs(_critsect);
+
+    _bwEstimateIncoming = bandWidthKbit*1000;
+
+    if(_bitRate == 0)
+    {
+        // BandwidthManagement off
+        return -1;
+    }
+    if (_bwEstimateIncoming > 0 && _bitRate > _bwEstimateIncoming)
+    {
+        _bitRate   = _bwEstimateIncoming;
+    } else
+    {
+        return -1;
+    }
+    *newBitrate = _bitRate;
+    *fractionLost = _last_fraction_loss;
+    *roundTripTime = _last_round_trip_time;
+    return 0;
+}
+
+WebRtc_Word32 BandwidthManagement::UpdatePacketLoss(
+    const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
+    WebRtc_UWord32 sentBitrate,
+    const WebRtc_UWord16 rtt,
+    WebRtc_UWord8* loss,
+    WebRtc_UWord32* newBitrate,
+    WebRtc_Word64 nowMS)
+{
+    CriticalSectionScoped cs(_critsect);
+
+    _last_fraction_loss = *loss;
+    _last_round_trip_time = rtt;
+
+    if(_bitRate == 0)
+    {
+        // BandwidthManagement off
+        return -1;
+    }
+
+    // Check sequence number diff and weight loss report
+    if (_lastPacketLossExtendedHighSeqNum > 0 &&
+        (lastReceivedExtendedHighSeqNum >= _lastPacketLossExtendedHighSeqNum))
+    {
+        // This is not the first loss report and the sequence number is
+        // non-decreasing. Calculate sequence number diff.
+        WebRtc_UWord32 seqNumDiff = lastReceivedExtendedHighSeqNum
+            - _lastPacketLossExtendedHighSeqNum;
+
+        // Check if this report and the last was 100% loss, then report
+        // 100% loss even though seqNumDiff is small.
+        // If not, go on with the checks.
+        if (!(_lastReportAllLost && *loss == 255))
+        {
+            _lastReportAllLost = (*loss == 255);
+
+            // Calculate number of lost packets.
+            // loss = 256 * numLostPackets / expectedPackets.
+            const int numLostPacketsQ8 = *loss * seqNumDiff;
+
+            // Accumulate reports.
+            _accumulateLostPacketsQ8 += numLostPacketsQ8;
+            _accumulateExpectedPackets += seqNumDiff;
+
+            // Report loss if the total report is based on sufficiently
+            // many packets.
+            const int limitNumPackets = 10;
+            if (_accumulateExpectedPackets >= limitNumPackets)
+            {
+                *loss = _accumulateLostPacketsQ8 / _accumulateExpectedPackets;
+
+                // Reset accumulators
+                _accumulateLostPacketsQ8 = 0;
+                _accumulateExpectedPackets = 0;
+            }
+            else
+            {
+                // Report zero loss until we have enough data to estimate
+                // the loss rate.
+                *loss = 0;
+            }
+        }
+    }
+    // Keep for next time.
+    _lastLoss = *loss;
+
+    // Remember the sequence number until next time
+    _lastPacketLossExtendedHighSeqNum = lastReceivedExtendedHighSeqNum;
+
+    WebRtc_UWord32 bitRate = ShapeSimple(*loss, rtt, sentBitrate, nowMS);
+    if (bitRate == 0)
+    {
+        // no change
+        return -1;
+    }
+    _bitRate = bitRate;
+    *newBitrate = bitRate;
+    return 0;
+}
+
+WebRtc_Word32 BandwidthManagement::AvailableBandwidth(
+    WebRtc_UWord32* bandwidthKbit) const {
+  CriticalSectionScoped cs(_critsect);
+  if (_bitRate == 0) {
+    return -1;
+  }
+  if (!bandwidthKbit) {
+    return -1;
+  }
+  *bandwidthKbit = _bitRate;
+  return 0;
+}
+
+/* Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
+ * The formula in RFC 3448, Section 3.1, is used.
+ */
+
+// protected
+WebRtc_Word32 BandwidthManagement::CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes,
+                                               WebRtc_Word32 rttMs,
+                                               WebRtc_Word32 packetLoss)
+{
+    if (avgPackSizeBytes <= 0 || rttMs <= 0 || packetLoss <= 0)
+    {
+        // input variables out of range; return -1
+        return -1;
+    }
+
+    double R = static_cast<double>(rttMs)/1000; // RTT in seconds
+    int b = 1; // number of packets acknowledged by a single TCP acknowledgement; recommended = 1
+    double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds; recommended = 4*R
+    double p = static_cast<double>(packetLoss)/255; // packet loss rate in [0, 1)
+    double s = static_cast<double>(avgPackSizeBytes);
+
+    // calculate send rate in bytes/second
+    double X = s / (R * sqrt(2 * b * p / 3) + (t_RTO * (3 * sqrt( 3 * b * p / 8) * p * (1 + 32 * p * p))));
+
+    return (static_cast<WebRtc_Word32>(X*8)); // bits/second
+}
+
+/*
+*  Simple bandwidth estimation. Depends a lot on bwEstimateIncoming and packetLoss.
+*/
+// protected
+WebRtc_UWord32 BandwidthManagement::ShapeSimple(WebRtc_Word32 packetLoss,
+                                                WebRtc_Word32 rtt,
+                                                WebRtc_UWord32 sentBitrate,
+                                                WebRtc_Word64 nowMS)
+{
+    WebRtc_UWord32 newBitRate = 0;
+    bool reducing = false;
+
+    // Limit the rate increases to once a second.
+    if (packetLoss <= 5)
+    {
+        if ((nowMS - _timeLastIncrease) <
+            kBWEUpdateIntervalMs)
+        {
+            return _bitRate;
+        }
+        _timeLastIncrease = nowMS;
+    }
+
+    if (packetLoss > 5 && packetLoss <= 26)
+    {
+        // 2% - 10%
+        newBitRate = _bitRate;
+    }
+    else if (packetLoss > 26)
+    {
+        // 26/256 ~= 10%
+        // reduce rate: newRate = rate * (1 - 0.5*lossRate)
+        // packetLoss = 256*lossRate
+        newBitRate = static_cast<WebRtc_UWord32>(
+            (sentBitrate * static_cast<double>(512 - packetLoss)) / 512.0);
+        reducing = true;
+    }
+    else
+    {
+        // increase rate by 8%
+        newBitRate = static_cast<WebRtc_UWord32>(_bitRate * 1.08 + 0.5);
+
+        // add 1 kbps extra, just to make sure that we do not get stuck
+        // (gives a little extra increase at low rates, negligible at higher rates)
+        newBitRate += 1000;
+    }
+
+    // Calculate smoothed loss number
+    if (_smoothedFractionLostQ4 < 0)
+    {
+        // startup
+        _smoothedFractionLostQ4 = static_cast<WebRtc_UWord16>(packetLoss);
+    }
+    else
+    {
+        _smoothedFractionLostQ4 = ((_sFLFactorQ4 * _smoothedFractionLostQ4 + 8) >> 4) // Q4*Q4 = Q8; down to Q4 again with proper rounding
+            + (16 - _sFLFactorQ4) * static_cast<WebRtc_UWord16>(packetLoss);  // Q4 * Q0 = Q4
+    }
+
+    // Calculate what rate TFRC would apply in this situation
+    //WebRtc_Word32 tfrcRate = CalcTFRCbps(1000, rtt, _smoothedFractionLostQ4 >> 4); // scale loss to Q0 (back to [0, 255])
+    WebRtc_Word32 tfrcRate = CalcTFRCbps(1000, rtt, packetLoss); // scale loss to Q0 (back to [0, 255])
+
+    if (reducing &&
+        tfrcRate > 0 &&
+        static_cast<WebRtc_UWord32>(tfrcRate) > newBitRate)
+    {
+        // do not reduce further if rate is below TFRC rate
+        newBitRate = _bitRate;
+    }
+
+    if (_bwEstimateIncoming > 0 && newBitRate > _bwEstimateIncoming)
+    {
+        newBitRate = _bwEstimateIncoming;
+    }
+    if (newBitRate > _maxBitRateConfigured)
+    {
+        newBitRate = _maxBitRateConfigured;
+    }
+    if (newBitRate < _minBitRateConfigured)
+    {
+        WEBRTC_TRACE(kTraceWarning,
+                     kTraceRtpRtcp,
+                     _id,
+                     "The configured min bitrate (%u kbps) is greater than the "
+                     "estimated available bandwidth (%u kbps).\n",
+                     _minBitRateConfigured / 1000, newBitRate / 1000);
+        newBitRate = _minBitRateConfigured;
+    }
+    return newBitRate;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/bandwidth_management.h b/trunk/src/modules/rtp_rtcp/source/bandwidth_management.h
new file mode 100644
index 0000000..2516b86
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/bandwidth_management.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"
+#include "critical_section_wrapper.h"
+
+/*
+*   FEC and NACK added bitrate is handled outside class
+*/
+
+namespace webrtc {
+class BandwidthManagement
+{
+public:
+    BandwidthManagement(const WebRtc_Word32 id);
+    ~BandwidthManagement();
+
+    // Call when we receive a RTCP message with TMMBR or REMB
+    WebRtc_Word32 UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
+                                          WebRtc_UWord32* newBitrate,
+                                          WebRtc_UWord8* fractionLost,
+                                          WebRtc_UWord16* roundTripTime);
+
+   // Call when we receive a RTCP message with a ReceiveBlock
+    WebRtc_Word32 UpdatePacketLoss(
+        const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
+        WebRtc_UWord32 sentBitrate,
+        const WebRtc_UWord16 rtt,
+        WebRtc_UWord8* loss,
+        WebRtc_UWord32* newBitrate,
+        WebRtc_Word64 nowMS);
+
+    // If no bandwidth estimate is available or if |bandwidthKbit| is NULL,
+    // -1 is returned.
+    WebRtc_Word32 AvailableBandwidth(WebRtc_UWord32* bandwidthKbit) const;
+
+    void SetSendBitrate(const WebRtc_UWord32 startBitrate,
+                        const WebRtc_UWord16 minBitrateKbit,
+                        const WebRtc_UWord16 maxBitrateKbit);
+
+    WebRtc_Word32 MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit);
+
+protected:
+    WebRtc_UWord32 ShapeSimple(WebRtc_Word32 packetLoss,
+                               WebRtc_Word32 rtt,
+                               WebRtc_UWord32 sentBitrate,
+                               WebRtc_Word64 nowMS);
+
+    WebRtc_Word32 CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes,
+                              WebRtc_Word32 rttMs,
+                              WebRtc_Word32 packetLoss);
+
+private:
+    enum { kBWEUpdateIntervalMs = 1000 };
+
+    WebRtc_Word32         _id;
+
+    CriticalSectionWrapper* _critsect;
+
+    // incoming filters
+    WebRtc_UWord32        _lastPacketLossExtendedHighSeqNum;
+    bool                  _lastReportAllLost;
+    WebRtc_UWord8         _lastLoss;
+    int                   _accumulateLostPacketsQ8;
+    int                   _accumulateExpectedPackets;
+
+    // bitrate
+    WebRtc_UWord32        _bitRate;
+    WebRtc_UWord32        _minBitRateConfigured;
+    WebRtc_UWord32        _maxBitRateConfigured;
+
+    WebRtc_UWord8         _last_fraction_loss;
+    WebRtc_UWord16        _last_round_trip_time;
+
+    // bandwidth estimate
+    WebRtc_UWord32        _bwEstimateIncoming;
+    WebRtc_Word16         _smoothedFractionLostQ4;
+    WebRtc_Word16         _sFLFactorQ4;  // forgetting factor for _smoothedFractionLostQ4
+    WebRtc_Word64         _timeLastIncrease;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/bitrate.cc b/trunk/src/modules/rtp_rtcp/source/bitrate.cc
new file mode 100644
index 0000000..be0c7dd
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/bitrate.cc
@@ -0,0 +1,203 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "Bitrate.h"
+#include "rtp_utility.h"
+
+#define BITRATE_AVERAGE_WINDOW 2000
+
+namespace webrtc {
+Bitrate::Bitrate(RtpRtcpClock* clock) :
+    _clock(*clock),
+    _packetRate(0),
+    _bitrate(0),
+    _bitrateNextIdx(0),
+    _timeLastRateUpdate(0),
+    _bytesCount(0),
+    _packetCount(0)
+{
+    memset(_packetRateArray, 0, sizeof(_packetRateArray));
+    memset(_bitrateDiffMS, 0, sizeof(_bitrateDiffMS));
+    memset(_bitrateArray, 0, sizeof(_bitrateArray));
+}
+
+void
+Bitrate::Init()
+{
+    _packetRate = 0;
+    _bitrate = 0;
+    _timeLastRateUpdate = 0;
+    _bytesCount = 0;
+    _packetCount = 0;
+    _bitrateNextIdx = 0;
+
+    memset(_packetRateArray, 0, sizeof(_packetRateArray));
+    memset(_bitrateDiffMS, 0, sizeof(_bitrateDiffMS));
+    memset(_bitrateArray, 0, sizeof(_bitrateArray));
+}
+
+void
+Bitrate::Update(const WebRtc_Word32 bytes)
+{
+    _bytesCount += bytes;
+    _packetCount++;
+}
+
+WebRtc_UWord32
+Bitrate::PacketRate() const
+{
+    return _packetRate;
+}
+
+WebRtc_UWord32
+Bitrate::BitrateLast() const
+{
+    return _bitrate;
+}
+
+WebRtc_UWord32
+Bitrate::BitrateNow() const
+{
+    WebRtc_UWord32 now = _clock.GetTimeInMS();
+    WebRtc_UWord32 diffMS = now -_timeLastRateUpdate;
+
+    if(diffMS > 10000) // 10 sec
+    {
+        // too high diff ignore
+        return _bitrate; // bits/s
+    }
+    WebRtc_UWord64 bitsSinceLastRateUpdate = 8*_bytesCount*1000;
+
+    // have to consider the time when the measurement was done
+    // ((bits/sec * sec) + (bits)) / sec
+    WebRtc_UWord64 bitrate = (((WebRtc_UWord64)_bitrate * 1000) + bitsSinceLastRateUpdate)/(1000+diffMS);
+    return (WebRtc_UWord32)bitrate;
+}
+
+void
+Bitrate::Process()
+{
+    // triggered by timer
+    WebRtc_UWord32 now = _clock.GetTimeInMS();
+    WebRtc_UWord32 diffMS = now -_timeLastRateUpdate;
+
+    if(diffMS > 100)
+    {
+        if(diffMS > 10000) // 10 sec
+        {
+            // too high diff ignore
+            _timeLastRateUpdate = now;
+            _bytesCount = 0;
+            _packetCount = 0;
+            return;
+        }
+        _packetRateArray[_bitrateNextIdx] = (_packetCount*1000)/diffMS;
+        _bitrateArray[_bitrateNextIdx]    = 8*((_bytesCount*1000)/diffMS);
+        // will overflow at ~34 Mbit/s
+        _bitrateDiffMS[_bitrateNextIdx]   = diffMS;
+        _bitrateNextIdx++;
+        if(_bitrateNextIdx >= 10)
+        {
+            _bitrateNextIdx = 0;
+        }
+
+        WebRtc_UWord32 sumDiffMS = 0;
+        WebRtc_UWord64 sumBitrateMS = 0;
+        WebRtc_UWord32 sumPacketrateMS = 0;
+        for(int i= 0; i <10; i++)
+        {
+            // sum of time
+            sumDiffMS += _bitrateDiffMS[i];
+            sumBitrateMS += _bitrateArray[i] * _bitrateDiffMS[i];
+            sumPacketrateMS += _packetRateArray[i] * _bitrateDiffMS[i];
+        }
+        _timeLastRateUpdate = now;
+        _bytesCount = 0;
+        _packetCount = 0;
+
+        _packetRate = sumPacketrateMS/sumDiffMS;
+        _bitrate = WebRtc_UWord32(sumBitrateMS / sumDiffMS);
+    }
+}
+
+BitRateStats::BitRateStats()
+    :_dataSamples(), _accumulatedBytes(0)
+{
+}
+
+BitRateStats::~BitRateStats()
+{
+    while (_dataSamples.size() > 0)
+    {
+        delete _dataSamples.front();
+        _dataSamples.pop_front();
+    }
+}
+
+void BitRateStats::Init()
+{
+    _accumulatedBytes = 0;
+    while (_dataSamples.size() > 0)
+    {
+        delete _dataSamples.front();
+        _dataSamples.pop_front();
+    }
+}
+
+void BitRateStats::Update(WebRtc_UWord32 packetSizeBytes, WebRtc_Word64 nowMs)
+{
+    // Find an empty slot for storing the new sample and at the same time
+    // accumulate the history.
+    _dataSamples.push_back(new DataTimeSizeTuple(packetSizeBytes, nowMs));
+    _accumulatedBytes += packetSizeBytes;
+    EraseOld(nowMs);
+}
+
+void BitRateStats::EraseOld(WebRtc_Word64 nowMs)
+{
+    while (_dataSamples.size() > 0)
+    {
+        if (nowMs - _dataSamples.front()->_timeCompleteMs >
+                                                        BITRATE_AVERAGE_WINDOW)
+        {
+            // Delete old sample
+            _accumulatedBytes -= _dataSamples.front()->_sizeBytes;
+            delete _dataSamples.front();
+            _dataSamples.pop_front();
+        }
+        else
+        {
+            break;
+        }
+    }
+}
+
+WebRtc_UWord32 BitRateStats::BitRate(WebRtc_Word64 nowMs)
+{
+    // Calculate the average bit rate the past BITRATE_AVERAGE_WINDOW ms.
+    // Removes any old samples from the list.
+    EraseOld(nowMs);
+    WebRtc_Word64 timeOldest = nowMs;
+    if (_dataSamples.size() > 0)
+    {
+        timeOldest = _dataSamples.front()->_timeCompleteMs;
+    }
+    // Update average bit rate
+    float denom = static_cast<float>(nowMs - timeOldest);
+    if (nowMs == timeOldest)
+    {
+        // Calculate with a one second window when we haven't
+        // received more than one packet.
+        denom = 1000.0;
+    }
+    return static_cast<WebRtc_UWord32>(_accumulatedBytes * 8.0f * 1000.0f /
+                                       denom + 0.5f);
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/bwe_defines.h b/trunk/src/modules/rtp_rtcp/source/bwe_defines.h
new file mode 100644
index 0000000..8fdc985
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/bwe_defines.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+
+#include "typedefs.h"
+
+#define BWE_MAX(a,b) ((a)>(b)?(a):(b))
+#define BWE_MIN(a,b) ((a)<(b)?(a):(b))
+
+namespace webrtc {
+enum BandwidthUsage
+{
+    kBwNormal,
+    kBwOverusing,
+    kBwUnderUsing
+};
+
+enum RateControlState
+{
+    kRcHold,
+    kRcIncrease,
+    kRcDecrease
+};
+
+enum RateControlRegion
+{
+    kRcNearMax,
+    kRcAboveMax,
+    kRcMaxUnknown
+};
+
+class RateControlInput
+{
+public:
+    RateControlInput(BandwidthUsage bwState,
+                     WebRtc_UWord32 incomingBitRate,
+                     double noiseVar) :
+        _bwState(bwState), _incomingBitRate(incomingBitRate), _noiseVar(noiseVar)
+    {};
+
+    BandwidthUsage  _bwState;
+    WebRtc_UWord32      _incomingBitRate;
+    double              _noiseVar;
+};
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/dtmf_queue.cc b/trunk/src/modules/rtp_rtcp/source/dtmf_queue.cc
new file mode 100644
index 0000000..749309b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_queue.h"
+
+#include <string.h> //memset
+
+namespace webrtc {
+DTMFqueue::DTMFqueue():
+    _DTMFCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _nextEmptyIndex(0)
+{
+    memset(_DTMFKey,0, sizeof(_DTMFKey));
+    memset(_DTMFLen,0, sizeof(_DTMFLen));
+    memset(_DTMFLevel,0, sizeof(_DTMFLevel));
+}
+
+DTMFqueue::~DTMFqueue()
+{
+    delete _DTMFCritsect;
+}
+
+WebRtc_Word32
+DTMFqueue::AddDTMF(WebRtc_UWord8 key, WebRtc_UWord16 len, WebRtc_UWord8 level)
+{
+    CriticalSectionScoped lock(_DTMFCritsect);
+
+    if(_nextEmptyIndex >= DTMF_OUTBAND_MAX)
+    {
+        return -1;
+    }
+    WebRtc_Word32 index = _nextEmptyIndex;
+    _DTMFKey[index] = key;
+    _DTMFLen[index] = len;
+    _DTMFLevel[index] = level;
+    _nextEmptyIndex++;
+    return 0;
+}
+
+WebRtc_Word8
+DTMFqueue::NextDTMF(WebRtc_UWord8* DTMFKey, WebRtc_UWord16* len, WebRtc_UWord8* level)
+{
+    CriticalSectionScoped lock(_DTMFCritsect);
+
+    if(!PendingDTMF())
+    {
+        return -1;
+    }
+    *DTMFKey=_DTMFKey[0];
+    *len=_DTMFLen[0];
+    *level=_DTMFLevel[0];
+
+    memmove(&(_DTMFKey[0]), &(_DTMFKey[1]), _nextEmptyIndex*sizeof(WebRtc_UWord8));
+    memmove(&(_DTMFLen[0]), &(_DTMFLen[1]), _nextEmptyIndex*sizeof(WebRtc_UWord16));
+    memmove(&(_DTMFLevel[0]), &(_DTMFLevel[1]), _nextEmptyIndex*sizeof(WebRtc_UWord8));
+
+    _nextEmptyIndex--;
+    return 0;
+}
+
+bool
+DTMFqueue::PendingDTMF()
+{
+    return(_nextEmptyIndex>0);
+}
+
+void
+DTMFqueue::ResetDTMF()
+{
+    _nextEmptyIndex = 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/dtmf_queue.h b/trunk/src/modules/rtp_rtcp/source/dtmf_queue.h
new file mode 100644
index 0000000..8451a21
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/dtmf_queue.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"
+
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+class DTMFqueue
+{
+public:
+    DTMFqueue();
+    virtual ~DTMFqueue();
+
+    WebRtc_Word32 AddDTMF(WebRtc_UWord8 DTMFKey, WebRtc_UWord16 len, WebRtc_UWord8 level);
+    WebRtc_Word8 NextDTMF(WebRtc_UWord8* DTMFKey, WebRtc_UWord16 * len, WebRtc_UWord8 * level);
+    bool PendingDTMF();
+    void ResetDTMF();
+
+private:
+    CriticalSectionWrapper* _DTMFCritsect;
+    WebRtc_UWord8        _nextEmptyIndex;
+    WebRtc_UWord8        _DTMFKey[DTMF_OUTBAND_MAX];
+    WebRtc_UWord16       _DTMFLen[DTMF_OUTBAND_MAX];
+    WebRtc_UWord8        _DTMFLevel[DTMF_OUTBAND_MAX];
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/fec_private_tables.h b/trunk/src/modules/rtp_rtcp/source/fec_private_tables.h
new file mode 100644
index 0000000..699ee5a
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/fec_private_tables.h
@@ -0,0 +1,25741 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+
+// Ensure the tables have internal linkage, as we only require them in one file.
+namespace
+{
+    const WebRtc_UWord8 mask10_1[2] =
+    {
+        0xff, 0xc0
+    };
+
+    const WebRtc_UWord8 mask10_10[20] =
+    {
+        0x4c, 0x00,
+        0x51, 0x00,
+        0xa0, 0x40,
+        0x04, 0xc0,
+        0x03, 0x80,
+        0x86, 0x00,
+        0x29, 0x00,
+        0x42, 0x40,
+        0x98, 0x00,
+        0x30, 0x80
+    };
+
+    const WebRtc_UWord8 mask10_2[4] =
+    {
+        0xaa, 0x80,
+        0xd5, 0x40
+    };
+
+    const WebRtc_UWord8 mask10_3[6] =
+    {
+        0xa4, 0x40,
+        0xc9, 0x00,
+        0x52, 0x80
+    };
+
+    const WebRtc_UWord8 mask10_4[8] =
+    {
+        0xca, 0x00,
+        0x32, 0x80,
+        0xa1, 0x40,
+        0x55, 0x00
+    };
+
+    const WebRtc_UWord8 mask10_5[10] =
+    {
+        0xca, 0x00,
+        0x32, 0x80,
+        0xa1, 0x40,
+        0x55, 0x00,
+        0x08, 0xc0
+    };
+
+    const WebRtc_UWord8 mask10_6[12] =
+    {
+        0x0e, 0x00,
+        0x33, 0x00,
+        0x10, 0xc0,
+        0x45, 0x40,
+        0x88, 0x80,
+        0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask10_7[14] =
+    {
+        0x46, 0x00,
+        0x33, 0x00,
+        0x80, 0xc0,
+        0x0c, 0x40,
+        0x28, 0x80,
+        0x94, 0x00,
+        0xc1, 0x00
+    };
+
+    const WebRtc_UWord8 mask10_8[16] =
+    {
+        0x2c, 0x00,
+        0x81, 0x80,
+        0xa0, 0x40,
+        0x05, 0x40,
+        0x18, 0x80,
+        0xc2, 0x00,
+        0x22, 0x80,
+        0x50, 0x40
+    };
+
+    const WebRtc_UWord8 mask10_9[18] =
+    {
+        0x4c, 0x00,
+        0x23, 0x00,
+        0x88, 0xc0,
+        0x21, 0x40,
+        0x52, 0x80,
+        0x94, 0x00,
+        0x26, 0x00,
+        0x48, 0x40,
+        0x91, 0x80
+    };
+
+    const WebRtc_UWord8 mask11_1[2] =
+    {
+        0xff, 0xe0
+    };
+
+    const WebRtc_UWord8 mask11_10[20] =
+    {
+        0x64, 0x40,
+        0x51, 0x40,
+        0xa9, 0x00,
+        0x04, 0xc0,
+        0xd0, 0x00,
+        0x82, 0x40,
+        0x21, 0x20,
+        0x0c, 0x20,
+        0x4a, 0x00,
+        0x12, 0xa0
+    };
+
+    const WebRtc_UWord8 mask11_11[22] =
+    {
+        0x46, 0x40,
+        0x33, 0x20,
+        0x99, 0x00,
+        0x05, 0x80,
+        0x80, 0xa0,
+        0x84, 0x40,
+        0x40, 0x60,
+        0x0a, 0x80,
+        0x68, 0x00,
+        0x10, 0x20,
+        0x30, 0x40
+    };
+
+    const WebRtc_UWord8 mask11_2[4] =
+    {
+        0xec, 0xc0,
+        0x9b, 0xa0
+    };
+
+    const WebRtc_UWord8 mask11_3[6] =
+    {
+        0xca, 0xc0,
+        0xf1, 0x40,
+        0xb6, 0x20
+    };
+
+    const WebRtc_UWord8 mask11_4[8] =
+    {
+        0xc4, 0xc0,
+        0x31, 0x60,
+        0x4b, 0x20,
+        0x2c, 0xa0
+    };
+
+    const WebRtc_UWord8 mask11_5[10] =
+    {
+        0x86, 0x80,
+        0x23, 0x20,
+        0x16, 0x20,
+        0x4c, 0x20,
+        0x41, 0xc0
+    };
+
+    const WebRtc_UWord8 mask11_6[12] =
+    {
+        0x64, 0x40,
+        0x51, 0x40,
+        0x0c, 0xa0,
+        0xa1, 0x20,
+        0x12, 0xa0,
+        0x8a, 0x40
+    };
+
+    const WebRtc_UWord8 mask11_7[14] =
+    {
+        0x46, 0x40,
+        0x33, 0x20,
+        0x91, 0x80,
+        0xa4, 0x20,
+        0x50, 0xa0,
+        0x84, 0xc0,
+        0x09, 0x60
+    };
+
+    const WebRtc_UWord8 mask11_8[16] =
+    {
+        0x0c, 0x80,
+        0x80, 0x60,
+        0xa0, 0x80,
+        0x05, 0x40,
+        0x43, 0x00,
+        0x1a, 0x00,
+        0x60, 0x20,
+        0x14, 0x20
+    };
+
+    const WebRtc_UWord8 mask11_9[18] =
+    {
+        0x46, 0x40,
+        0x62, 0x60,
+        0x8c, 0x00,
+        0x01, 0x60,
+        0x07, 0x80,
+        0xa0, 0x80,
+        0x18, 0xa0,
+        0x91, 0x00,
+        0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask12_1[2] =
+    {
+        0xff, 0xf0
+    };
+
+    const WebRtc_UWord8 mask12_10[20] =
+    {
+        0x51, 0x40,
+        0x45, 0x10,
+        0x80, 0xd0,
+        0x24, 0x20,
+        0x0a, 0x20,
+        0x00, 0xe0,
+        0xb8, 0x00,
+        0x09, 0x10,
+        0x56, 0x00,
+        0xa2, 0x80
+    };
+
+    const WebRtc_UWord8 mask12_11[22] =
+    {
+        0x53, 0x60,
+        0x21, 0x30,
+        0x10, 0x90,
+        0x00, 0x70,
+        0x0c, 0x10,
+        0x40, 0xc0,
+        0x6a, 0x00,
+        0x86, 0x00,
+        0x24, 0x80,
+        0x89, 0x00,
+        0xc0, 0x20
+    };
+
+    const WebRtc_UWord8 mask12_12[24] =
+    {
+        0x10, 0x60,
+        0x02, 0x30,
+        0x40, 0x50,
+        0x21, 0x80,
+        0x81, 0x10,
+        0x14, 0x80,
+        0x98, 0x00,
+        0x08, 0x90,
+        0x62, 0x00,
+        0x24, 0x20,
+        0x8a, 0x00,
+        0x84, 0x40
+    };
+
+    const WebRtc_UWord8 mask12_2[4] =
+    {
+        0xec, 0xc0,
+        0x93, 0xb0
+    };
+
+    const WebRtc_UWord8 mask12_3[6] =
+    {
+        0x9b, 0x80,
+        0x4f, 0x10,
+        0x3c, 0x60
+    };
+
+    const WebRtc_UWord8 mask12_4[8] =
+    {
+        0x8b, 0x20,
+        0x14, 0xb0,
+        0x22, 0xd0,
+        0x45, 0x50
+    };
+
+    const WebRtc_UWord8 mask12_5[10] =
+    {
+        0x53, 0x60,
+        0x64, 0x20,
+        0x0c, 0xc0,
+        0x82, 0xa0,
+        0x09, 0x30
+    };
+
+    const WebRtc_UWord8 mask12_6[12] =
+    {
+        0x51, 0x40,
+        0xc5, 0x10,
+        0x21, 0x80,
+        0x12, 0x30,
+        0x08, 0xe0,
+        0x2e, 0x00
+    };
+
+    const WebRtc_UWord8 mask12_7[14] =
+    {
+        0x53, 0x60,
+        0x21, 0x30,
+        0x90, 0x90,
+        0x02, 0x50,
+        0x06, 0xa0,
+        0x2c, 0x00,
+        0x88, 0x60
+    };
+
+    const WebRtc_UWord8 mask12_8[16] =
+    {
+        0x20, 0x60,
+        0x80, 0x30,
+        0x42, 0x40,
+        0x01, 0x90,
+        0x14, 0x10,
+        0x0a, 0x80,
+        0x38, 0x00,
+        0xc5, 0x00
+    };
+
+    const WebRtc_UWord8 mask12_9[18] =
+    {
+        0x53, 0x60,
+        0xe4, 0x20,
+        0x24, 0x40,
+        0xa1, 0x10,
+        0x18, 0x30,
+        0x03, 0x90,
+        0x8a, 0x10,
+        0x04, 0x90,
+        0x00, 0xe0
+    };
+
+    const WebRtc_UWord8 mask13_1[2] =
+    {
+        0xff, 0xf8
+    };
+
+    const WebRtc_UWord8 mask13_10[20] =
+    {
+        0xd1, 0x00,
+        0x44, 0x50,
+        0x10, 0x98,
+        0xa0, 0x50,
+        0x4a, 0x08,
+        0x40, 0x30,
+        0x80, 0x28,
+        0x0c, 0x90,
+        0x05, 0x88,
+        0x62, 0x20
+    };
+
+    const WebRtc_UWord8 mask13_11[22] =
+    {
+        0x51, 0x20,
+        0x22, 0x10,
+        0x13, 0x40,
+        0x25, 0x00,
+        0x18, 0x18,
+        0x0a, 0x20,
+        0x88, 0x88,
+        0x06, 0x80,
+        0xe0, 0x20,
+        0x84, 0x40,
+        0x44, 0x18
+    };
+
+    const WebRtc_UWord8 mask13_12[24] =
+    {
+        0x28, 0x28,
+        0x84, 0x50,
+        0x60, 0x40,
+        0x05, 0x48,
+        0x02, 0x98,
+        0x01, 0x30,
+        0x48, 0x10,
+        0x24, 0x80,
+        0x94, 0x00,
+        0x8a, 0x00,
+        0x11, 0x80,
+        0x52, 0x20
+    };
+
+    const WebRtc_UWord8 mask13_13[26] =
+    {
+        0x51, 0x20,
+        0x66, 0x40,
+        0x05, 0x48,
+        0x81, 0x20,
+        0x94, 0x00,
+        0x30, 0x80,
+        0x21, 0x10,
+        0x03, 0xc0,
+        0xe8, 0x00,
+        0x0a, 0x10,
+        0x80, 0x18,
+        0x04, 0x90,
+        0x08, 0xa8
+    };
+
+    const WebRtc_UWord8 mask13_2[4] =
+    {
+        0xec, 0xc0,
+        0x1b, 0x38
+    };
+
+    const WebRtc_UWord8 mask13_3[6] =
+    {
+        0x99, 0xb0,
+        0x46, 0xd8,
+        0x37, 0x28
+    };
+
+    const WebRtc_UWord8 mask13_4[8] =
+    {
+        0x49, 0xb0,
+        0x26, 0xd0,
+        0x85, 0x68,
+        0x52, 0x58
+    };
+
+    const WebRtc_UWord8 mask13_5[10] =
+    {
+        0x51, 0x30,
+        0x66, 0x40,
+        0x0c, 0x68,
+        0xa1, 0xc0,
+        0x22, 0x98
+    };
+
+    const WebRtc_UWord8 mask13_6[12] =
+    {
+        0xd1, 0x20,
+        0x46, 0xd0,
+        0x15, 0x48,
+        0x21, 0x70,
+        0x28, 0xc8,
+        0xaa, 0x20
+    };
+
+    const WebRtc_UWord8 mask13_7[14] =
+    {
+        0x59, 0x20,
+        0x26, 0x50,
+        0xb1, 0x40,
+        0x2b, 0x08,
+        0x14, 0xc8,
+        0xc8, 0x88,
+        0x84, 0xb0
+    };
+
+    const WebRtc_UWord8 mask13_8[16] =
+    {
+        0x80, 0xa8,
+        0x30, 0x90,
+        0x16, 0x08,
+        0x03, 0x30,
+        0x44, 0x60,
+        0x08, 0x18,
+        0xd8, 0x00,
+        0xa1, 0x40
+    };
+
+    const WebRtc_UWord8 mask13_9[18] =
+    {
+        0x59, 0x20,
+        0x66, 0x40,
+        0x14, 0x40,
+        0x21, 0x48,
+        0x02, 0xc8,
+        0x94, 0x10,
+        0x80, 0xa8,
+        0x0a, 0x90,
+        0x40, 0x18
+    };
+
+    const WebRtc_UWord8 mask14_1[2] =
+    {
+        0xff, 0xfc
+    };
+
+    const WebRtc_UWord8 mask14_10[20] =
+    {
+        0xc0, 0xd4,
+        0x1d, 0x40,
+        0xd4, 0x08,
+        0x02, 0x60,
+        0x04, 0x28,
+        0x20, 0x98,
+        0x40, 0x44,
+        0x08, 0x84,
+        0x68, 0x00,
+        0x23, 0x10
+    };
+
+    const WebRtc_UWord8 mask14_11[22] =
+    {
+        0x62, 0xd0,
+        0x35, 0x20,
+        0x14, 0x14,
+        0xc5, 0x08,
+        0x22, 0x0c,
+        0x88, 0xb8,
+        0x42, 0x54,
+        0x28, 0xa4,
+        0x94, 0x20,
+        0x1b, 0x04,
+        0x22, 0xc0
+    };
+
+    const WebRtc_UWord8 mask14_12[24] =
+    {
+        0x81, 0x04,
+        0x40, 0x68,
+        0x90, 0x24,
+        0x28, 0x28,
+        0x52, 0x10,
+        0x41, 0x88,
+        0x09, 0x30,
+        0x48, 0x44,
+        0x04, 0x44,
+        0x0e, 0x80,
+        0xa5, 0x90,
+        0x12, 0x0c
+    };
+
+    const WebRtc_UWord8 mask14_13[26] =
+    {
+        0x62, 0x54,
+        0x34, 0x60,
+        0x48, 0x04,
+        0x00, 0xac,
+        0x28, 0x08,
+        0x81, 0x08,
+        0x23, 0x04,
+        0x06, 0x80,
+        0x80, 0x14,
+        0x30, 0x10,
+        0x8c, 0x20,
+        0x54, 0x00,
+        0x80, 0xc0
+    };
+
+    const WebRtc_UWord8 mask14_14[28] =
+    {
+        0x40, 0x54,
+        0x15, 0x40,
+        0xc0, 0x04,
+        0x28, 0x10,
+        0x05, 0x0c,
+        0x64, 0x80,
+        0x81, 0x80,
+        0x10, 0x98,
+        0x84, 0x20,
+        0x12, 0x30,
+        0x62, 0x00,
+        0x28, 0x60,
+        0x0e, 0x08,
+        0x10, 0x84
+    };
+
+    const WebRtc_UWord8 mask14_2[4] =
+    {
+        0xec, 0xe8,
+        0x3b, 0x9c
+    };
+
+    const WebRtc_UWord8 mask14_3[6] =
+    {
+        0xac, 0xd8,
+        0x55, 0x6c,
+        0x27, 0xb4
+    };
+
+    const WebRtc_UWord8 mask14_4[8] =
+    {
+        0x2c, 0xd8,
+        0x93, 0x68,
+        0x1a, 0xb4,
+        0x47, 0x2c
+    };
+
+    const WebRtc_UWord8 mask14_5[10] =
+    {
+        0x64, 0xd8,
+        0xa5, 0x68,
+        0x52, 0xb4,
+        0x1d, 0xa8,
+        0x9c, 0x54
+    };
+
+    const WebRtc_UWord8 mask14_6[12] =
+    {
+        0x4a, 0x54,
+        0x95, 0x48,
+        0x14, 0xb4,
+        0x51, 0xa8,
+        0x22, 0x6c,
+        0x88, 0x8c
+    };
+
+    const WebRtc_UWord8 mask14_7[14] =
+    {
+        0x62, 0x54,
+        0xb9, 0x20,
+        0x18, 0xb4,
+        0x54, 0x98,
+        0x06, 0x6c,
+        0x85, 0x54,
+        0xaa, 0x88
+    };
+
+    const WebRtc_UWord8 mask14_8[16] =
+    {
+        0xc0, 0x14,
+        0x41, 0x60,
+        0x88, 0x30,
+        0x20, 0xa4,
+        0x0a, 0x48,
+        0x04, 0x98,
+        0x94, 0x40,
+        0x72, 0x00
+    };
+
+    const WebRtc_UWord8 mask14_9[18] =
+    {
+        0xa2, 0x54,
+        0x34, 0x60,
+        0x4a, 0x24,
+        0x20, 0xa8,
+        0x11, 0x84,
+        0x49, 0x08,
+        0x86, 0x0c,
+        0x20, 0xd4,
+        0x88, 0x48
+    };
+
+    const WebRtc_UWord8 mask15_1[2] =
+    {
+        0xff, 0xfe
+    };
+
+    const WebRtc_UWord8 mask15_10[20] =
+    {
+        0xc0, 0xa0,
+        0x15, 0x56,
+        0x74, 0x40,
+        0x00, 0x9c,
+        0x01, 0x2c,
+        0x44, 0x92,
+        0x88, 0x50,
+        0x20, 0xa4,
+        0xaa, 0x04,
+        0x02, 0x62
+    };
+
+    const WebRtc_UWord8 mask15_11[22] =
+    {
+        0x62, 0x22,
+        0xf1, 0x10,
+        0x10, 0x0e,
+        0x10, 0xb0,
+        0x24, 0x24,
+        0x01, 0x12,
+        0x00, 0xc4,
+        0x04, 0xa2,
+        0x02, 0x58,
+        0x2b, 0x00,
+        0x98, 0x40
+    };
+
+    const WebRtc_UWord8 mask15_12[24] =
+    {
+        0x88, 0x90,
+        0x40, 0x54,
+        0x82, 0x62,
+        0x21, 0xa4,
+        0x10, 0x64,
+        0x44, 0x0a,
+        0x10, 0xc8,
+        0x4d, 0x2a,
+        0x38, 0x02,
+        0x17, 0x48,
+        0x90, 0x84,
+        0x72, 0x14
+    };
+
+    const WebRtc_UWord8 mask15_13[26] =
+    {
+        0x62, 0xa2,
+        0x34, 0x44,
+        0x40, 0x4a,
+        0xc4, 0x04,
+        0x08, 0x60,
+        0x94, 0x12,
+        0x88, 0xc0,
+        0x21, 0x32,
+        0xc1, 0x40,
+        0x10, 0x68,
+        0x06, 0x90,
+        0x59, 0x00,
+        0x0a, 0x0c
+    };
+
+    const WebRtc_UWord8 mask15_14[28] =
+    {
+        0x40, 0x82,
+        0x15, 0x54,
+        0x88, 0x12,
+        0xc0, 0x10,
+        0x80, 0xa0,
+        0x01, 0x22,
+        0x40, 0x2c,
+        0x22, 0x02,
+        0x90, 0x04,
+        0x12, 0x40,
+        0x5d, 0x00,
+        0x20, 0x54,
+        0x86, 0x08,
+        0x28, 0x88
+    };
+
+    const WebRtc_UWord8 mask15_15[30] =
+    {
+        0x62, 0x22,
+        0x31, 0x10,
+        0x58, 0x00,
+        0x01, 0x12,
+        0x88, 0x20,
+        0x44, 0x02,
+        0x29, 0x04,
+        0x82, 0xa0,
+        0x0a, 0x1a,
+        0x11, 0xe0,
+        0x84, 0x04,
+        0x86, 0x40,
+        0x00, 0x86,
+        0x44, 0x48,
+        0x10, 0x98
+    };
+
+    const WebRtc_UWord8 mask15_2[4] =
+    {
+        0xec, 0xea,
+        0xbb, 0x9c
+    };
+
+    const WebRtc_UWord8 mask15_3[6] =
+    {
+        0xac, 0x92,
+        0x55, 0x4a,
+        0x43, 0x36
+    };
+
+    const WebRtc_UWord8 mask15_4[8] =
+    {
+        0x25, 0xaa,
+        0x95, 0x54,
+        0x1a, 0x6a,
+        0x43, 0xd4
+    };
+
+    const WebRtc_UWord8 mask15_5[10] =
+    {
+        0x64, 0xa2,
+        0x25, 0x54,
+        0x49, 0x68,
+        0x53, 0x90,
+        0x8e, 0x30
+    };
+
+    const WebRtc_UWord8 mask15_6[12] =
+    {
+        0x62, 0x8a,
+        0x15, 0x54,
+        0x4c, 0x46,
+        0x52, 0x94,
+        0x23, 0x64,
+        0x8a, 0x58
+    };
+
+    const WebRtc_UWord8 mask15_7[14] =
+    {
+        0x62, 0xa2,
+        0xb1, 0x14,
+        0x18, 0x6a,
+        0x44, 0xd4,
+        0x13, 0x64,
+        0x49, 0x1a,
+        0x86, 0x8c
+    };
+
+    const WebRtc_UWord8 mask15_8[16] =
+    {
+        0x90, 0x22,
+        0x09, 0x50,
+        0x00, 0x6a,
+        0x20, 0x34,
+        0x14, 0x44,
+        0xc2, 0x10,
+        0x00, 0xc6,
+        0x65, 0x80
+    };
+
+    const WebRtc_UWord8 mask15_9[18] =
+    {
+        0x62, 0x22,
+        0x24, 0x44,
+        0xc0, 0x50,
+        0x03, 0x0c,
+        0x16, 0x28,
+        0x89, 0x00,
+        0x82, 0x90,
+        0x08, 0xa4,
+        0x90, 0x48
+    };
+
+    const WebRtc_UWord8 mask16_1[2] =
+    {
+        0xff, 0xff
+    };
+
+    const WebRtc_UWord8 mask16_10[20] =
+    {
+        0x45, 0x51,
+        0x10, 0xa2,
+        0x01, 0x25,
+        0x0b, 0x42,
+        0xd8, 0x20,
+        0x82, 0x8c,
+        0x24, 0x4a,
+        0x38, 0x18,
+        0x2a, 0x25,
+        0x84, 0x92
+    };
+
+    const WebRtc_UWord8 mask16_11[22] =
+    {
+        0x55, 0x55,
+        0x2a, 0x22,
+        0x31, 0x11,
+        0x83, 0x42,
+        0x06, 0x98,
+        0x40, 0xe1,
+        0x2c, 0x44,
+        0xd8, 0x28,
+        0x92, 0x81,
+        0x84, 0x32,
+        0x68, 0x0c
+    };
+
+    const WebRtc_UWord8 mask16_12[24] =
+    {
+        0x84, 0x31,
+        0x18, 0xa2,
+        0x4e, 0x01,
+        0x44, 0xc8,
+        0x0e, 0x90,
+        0x20, 0xcc,
+        0x93, 0x40,
+        0x2d, 0x10,
+        0x31, 0x44,
+        0xc0, 0x23,
+        0x11, 0x25,
+        0xe8, 0x80
+    };
+
+    const WebRtc_UWord8 mask16_13[26] =
+    {
+        0x45, 0x15,
+        0x22, 0x22,
+        0x96, 0x0c,
+        0x0c, 0x50,
+        0x62, 0x04,
+        0x49, 0x06,
+        0x11, 0x82,
+        0x12, 0x38,
+        0x40, 0x71,
+        0xa8, 0x8a,
+        0x08, 0xa1,
+        0xa0, 0xc0,
+        0xc5, 0x10
+    };
+
+    const WebRtc_UWord8 mask16_14[28] =
+    {
+        0x45, 0x51,
+        0x22, 0x0a,
+        0x84, 0xd0,
+        0x0c, 0x8a,
+        0x18, 0x06,
+        0x30, 0x03,
+        0x61, 0x08,
+        0x40, 0x11,
+        0x10, 0x2c,
+        0x09, 0x60,
+        0x00, 0x94,
+        0x52, 0x40,
+        0xa4, 0x24,
+        0x82, 0x88
+    };
+
+    const WebRtc_UWord8 mask16_15[30] =
+    {
+        0x55, 0x11,
+        0x22, 0x22,
+        0x11, 0x11,
+        0x80, 0x45,
+        0x20, 0x1a,
+        0x08, 0x68,
+        0x22, 0x84,
+        0x48, 0x09,
+        0x07, 0x01,
+        0x94, 0x20,
+        0x82, 0x06,
+        0x60, 0x48,
+        0x89, 0x80,
+        0x00, 0x8e,
+        0x18, 0x22
+    };
+
+    const WebRtc_UWord8 mask16_16[32] =
+    {
+        0xa4, 0x10,
+        0x01, 0x2a,
+        0x06, 0x42,
+        0x08, 0x68,
+        0x81, 0x90,
+        0x00, 0xf0,
+        0x50, 0x05,
+        0x20, 0x51,
+        0x43, 0x08,
+        0x68, 0x80,
+        0x80, 0x0b,
+        0x10, 0x4c,
+        0x12, 0x30,
+        0x40, 0x85,
+        0x0e, 0x04,
+        0x18, 0x12
+    };
+
+    const WebRtc_UWord8 mask16_2[4] =
+    {
+        0xae, 0xae,
+        0x79, 0x79
+    };
+
+    const WebRtc_UWord8 mask16_3[6] =
+    {
+        0xad, 0x2d,
+        0x76, 0x36,
+        0x26, 0xdb
+    };
+
+    const WebRtc_UWord8 mask16_4[8] =
+    {
+        0x55, 0x55,
+        0xaa, 0xaa,
+        0x35, 0x35,
+        0xca, 0xca
+    };
+
+    const WebRtc_UWord8 mask16_5[10] =
+    {
+        0x55, 0x55,
+        0x2a, 0x2a,
+        0x24, 0x25,
+        0x84, 0xc8,
+        0x10, 0xb6
+    };
+
+    const WebRtc_UWord8 mask16_6[12] =
+    {
+        0x51, 0x51,
+        0x0a, 0x2a,
+        0xa2, 0x15,
+        0x84, 0x4a,
+        0x30, 0x92,
+        0x04, 0xac
+    };
+
+    const WebRtc_UWord8 mask16_7[14] =
+    {
+        0x45, 0x51,
+        0x22, 0x2a,
+        0x91, 0x11,
+        0x2e, 0x08,
+        0x48, 0x34,
+        0x90, 0x29,
+        0x09, 0x86
+    };
+
+    const WebRtc_UWord8 mask16_8[16] =
+    {
+        0x20, 0x54,
+        0x18, 0x88,
+        0x84, 0x07,
+        0x60, 0x48,
+        0x12, 0x82,
+        0x81, 0x41,
+        0x40, 0x62,
+        0x16, 0x30
+    };
+
+    const WebRtc_UWord8 mask16_9[18] =
+    {
+        0x55, 0x51,
+        0x22, 0x2a,
+        0x05, 0x85,
+        0x09, 0x4a,
+        0x84, 0x32,
+        0xc0, 0x0d,
+        0x20, 0xa6,
+        0x1a, 0x09,
+        0x44, 0x64
+    };
+
+    const WebRtc_UWord8 mask17_1[6] =
+    {
+        0xff, 0xff, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_10[60] =
+    {
+        0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+        0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+        0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+        0x8e, 0xcc, 0x00, 0x00, 0x00, 0x00,
+        0x6a, 0x2b, 0x00, 0x00, 0x00, 0x00,
+        0x36, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0xd1, 0x25, 0x80, 0x00, 0x00, 0x00,
+        0xc8, 0x02, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_11[66] =
+    {
+        0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+        0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+        0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+        0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+        0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x89, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_12[72] =
+    {
+        0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+        0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+        0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+        0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+        0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+        0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x35, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_13[78] =
+    {
+        0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+        0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+        0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+        0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+        0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+        0x83, 0x34, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_14[84] =
+    {
+        0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+        0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+        0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+        0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+        0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+        0xb0, 0xde, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_15[90] =
+    {
+        0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+        0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+        0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+        0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+        0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+        0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_16[96] =
+    {
+        0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+        0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+        0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+        0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00,
+        0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+        0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+        0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x0a, 0x1c, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_17[102] =
+    {
+        0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+        0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+        0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+        0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0x4c, 0x00, 0x00, 0x00, 0x00,
+        0x8a, 0x66, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x91, 0x00, 0x00, 0x00, 0x00,
+        0x68, 0x42, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0xa4, 0x00, 0x00, 0x00, 0x00,
+        0x43, 0x13, 0x00, 0x00, 0x00, 0x00,
+        0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x1c, 0x88, 0x80, 0x00, 0x00, 0x00,
+        0x3c, 0x09, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_2[12] =
+    {
+        0xce, 0xce, 0x00, 0x00, 0x00, 0x00,
+        0xb9, 0x39, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_3[18] =
+    {
+        0xcd, 0xcc, 0x00, 0x00, 0x00, 0x00,
+        0x97, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0xb8, 0xd1, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_4[24] =
+    {
+        0xca, 0xec, 0x00, 0x00, 0x00, 0x00,
+        0xa9, 0x67, 0x00, 0x00, 0x00, 0x00,
+        0x3a, 0xb1, 0x80, 0x00, 0x00, 0x00,
+        0x55, 0x5a, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_5[30] =
+    {
+        0x55, 0x44, 0x80, 0x00, 0x00, 0x00,
+        0x2a, 0x66, 0x00, 0x00, 0x00, 0x00,
+        0x25, 0xa1, 0x80, 0x00, 0x00, 0x00,
+        0xe2, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x99, 0x98, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_6[36] =
+    {
+        0xd1, 0x4c, 0x00, 0x00, 0x00, 0x00,
+        0xa2, 0xc5, 0x00, 0x00, 0x00, 0x00,
+        0x95, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0xca, 0x0a, 0x80, 0x00, 0x00, 0x00,
+        0xa4, 0xaa, 0x00, 0x00, 0x00, 0x00,
+        0x78, 0x15, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_7[42] =
+    {
+        0x15, 0x44, 0x80, 0x00, 0x00, 0x00,
+        0x8a, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x85, 0x91, 0x00, 0x00, 0x00, 0x00,
+        0x32, 0x0a, 0x80, 0x00, 0x00, 0x00,
+        0x58, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x43, 0xc8, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_8[48] =
+    {
+        0x64, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0xa2, 0xc2, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x60, 0x80, 0x00, 0x00, 0x00,
+        0x4a, 0x85, 0x00, 0x00, 0x00, 0x00,
+        0x38, 0x4c, 0x00, 0x00, 0x00, 0x00,
+        0x89, 0x29, 0x00, 0x00, 0x00, 0x00,
+        0x07, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x94, 0xb0, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask17_9[54] =
+    {
+        0x8e, 0xcc, 0x00, 0x00, 0x00, 0x00,
+        0x6a, 0x2b, 0x00, 0x00, 0x00, 0x00,
+        0x36, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0xd1, 0x25, 0x80, 0x00, 0x00, 0x00,
+        0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+        0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+        0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_1[6] =
+    {
+        0xff, 0xff, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_10[60] =
+    {
+        0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+        0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+        0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+        0xcc, 0x66, 0x00, 0x00, 0x00, 0x00,
+        0x2b, 0x15, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+        0x25, 0x92, 0xc0, 0x00, 0x00, 0x00,
+        0xfd, 0x9d, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_11[66] =
+    {
+        0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+        0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+        0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+        0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+        0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x89, 0x44, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_12[72] =
+    {
+        0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+        0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+        0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+        0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+        0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+        0x5b, 0x0c, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_13[78] =
+    {
+        0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+        0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+        0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+        0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+        0x34, 0x1a, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_14[84] =
+    {
+        0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+        0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+        0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+        0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+        0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+        0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+        0x7f, 0x4f, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_15[90] =
+    {
+        0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+        0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+        0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+        0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+        0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x51, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_16[96] =
+    {
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+        0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+        0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+        0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+        0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+        0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0xef, 0xf2, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_17[102] =
+    {
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+        0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+        0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+        0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0x42, 0xa1, 0x40, 0x00, 0x00, 0x00,
+        0xa4, 0x52, 0x00, 0x00, 0x00, 0x00,
+        0x13, 0x09, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0x88, 0xc4, 0x40, 0x00, 0x00, 0x00,
+        0x09, 0x04, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_18[108] =
+    {
+        0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0x42, 0xa1, 0x40, 0x00, 0x00, 0x00,
+        0xa4, 0x52, 0x00, 0x00, 0x00, 0x00,
+        0x13, 0x09, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0x88, 0xc4, 0x40, 0x00, 0x00, 0x00,
+        0x09, 0x04, 0x80, 0x00, 0x00, 0x00,
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+        0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+        0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+        0xd0, 0x03, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_2[12] =
+    {
+        0xce, 0x67, 0x00, 0x00, 0x00, 0x00,
+        0x39, 0x9c, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_3[18] =
+    {
+        0xcc, 0x66, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x15, 0x80, 0x00, 0x00, 0x00,
+        0x92, 0xc9, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_4[24] =
+    {
+        0xec, 0x76, 0x00, 0x00, 0x00, 0x00,
+        0x67, 0x33, 0x80, 0x00, 0x00, 0x00,
+        0xb1, 0xd8, 0xc0, 0x00, 0x00, 0x00,
+        0x5a, 0xad, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_5[30] =
+    {
+        0x4c, 0xa6, 0x40, 0x00, 0x00, 0x00,
+        0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+        0x19, 0xd0, 0xc0, 0x00, 0x00, 0x00,
+        0x9c, 0x89, 0x40, 0x00, 0x00, 0x00,
+        0xe3, 0x4c, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_6[36] =
+    {
+        0xcc, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x62, 0x80, 0x00, 0x00, 0x00,
+        0xb0, 0x98, 0x40, 0x00, 0x00, 0x00,
+        0x8a, 0x85, 0x40, 0x00, 0x00, 0x00,
+        0x29, 0x53, 0x00, 0x00, 0x00, 0x00,
+        0xa6, 0x0a, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_7[42] =
+    {
+        0x44, 0xa2, 0x40, 0x00, 0x00, 0x00,
+        0x23, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+        0x0a, 0x85, 0x40, 0x00, 0x00, 0x00,
+        0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x0b, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0xe0, 0x64, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_8[48] =
+    {
+        0x16, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0xc2, 0x61, 0x00, 0x00, 0x00, 0x00,
+        0x60, 0xb0, 0x40, 0x00, 0x00, 0x00,
+        0x85, 0x42, 0x80, 0x00, 0x00, 0x00,
+        0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x29, 0x14, 0x80, 0x00, 0x00, 0x00,
+        0x11, 0x88, 0xc0, 0x00, 0x00, 0x00,
+        0xb0, 0x58, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask18_9[54] =
+    {
+        0x44, 0xa2, 0x40, 0x00, 0x00, 0x00,
+        0x66, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x90, 0x49, 0x40, 0x00, 0x00, 0x00,
+        0x01, 0xa5, 0x80, 0x00, 0x00, 0x00,
+        0x0e, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x13, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0x20, 0xd0, 0x40, 0x00, 0x00, 0x00,
+        0xc2, 0x51, 0x00, 0x00, 0x00, 0x00,
+        0x29, 0x0c, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_1[6] =
+    {
+        0xff, 0xff, 0xe0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_10[60] =
+    {
+        0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+        0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+        0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+        0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+        0xcc, 0x75, 0x00, 0x00, 0x00, 0x00,
+        0x2b, 0x19, 0xc0, 0x00, 0x00, 0x00,
+        0x32, 0xd2, 0x60, 0x00, 0x00, 0x00,
+        0x25, 0x8e, 0xa0, 0x00, 0x00, 0x00,
+        0x50, 0x88, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_11[66] =
+    {
+        0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+        0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+        0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+        0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+        0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+        0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+        0x89, 0x70, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_12[72] =
+    {
+        0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+        0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+        0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+        0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+        0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+        0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+        0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+        0x90, 0xc8, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_13[78] =
+    {
+        0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+        0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+        0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+        0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+        0x34, 0x60, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_14[84] =
+    {
+        0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+        0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+        0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+        0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+        0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+        0x6e, 0x27, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_15[90] =
+    {
+        0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+        0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+        0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+        0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+        0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x28, 0x20, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_16[96] =
+    {
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+        0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+        0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+        0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+        0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+        0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+        0x7e, 0x75, 0xe0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_17[102] =
+    {
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+        0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+        0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+        0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+        0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+        0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+        0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+        0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+        0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_18[108] =
+    {
+        0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+        0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+        0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+        0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+        0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+        0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+        0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+        0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+        0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x51, 0x97, 0x20, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_19[114] =
+    {
+        0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+        0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+        0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+        0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+        0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+        0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x28, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x50, 0x20, 0x00, 0x00, 0x00,
+        0x42, 0x82, 0x60, 0x00, 0x00, 0x00,
+        0xa4, 0x01, 0xc0, 0x00, 0x00, 0x00,
+        0x13, 0x43, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x94, 0x80, 0x00, 0x00, 0x00,
+        0x88, 0xa1, 0x20, 0x00, 0x00, 0x00,
+        0x09, 0x4c, 0x00, 0x00, 0x00, 0x00,
+        0xcd, 0x98, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_2[12] =
+    {
+        0xce, 0x77, 0x00, 0x00, 0x00, 0x00,
+        0x39, 0xcc, 0xe0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_3[18] =
+    {
+        0xcc, 0x67, 0x00, 0x00, 0x00, 0x00,
+        0x27, 0x2c, 0xc0, 0x00, 0x00, 0x00,
+        0x92, 0xd2, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_4[24] =
+    {
+        0xec, 0x73, 0x00, 0x00, 0x00, 0x00,
+        0x67, 0x19, 0xc0, 0x00, 0x00, 0x00,
+        0xb1, 0xcc, 0x60, 0x00, 0x00, 0x00,
+        0x5a, 0x96, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_5[30] =
+    {
+        0x4c, 0xe7, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x31, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0xcc, 0x60, 0x00, 0x00, 0x00,
+        0x92, 0xa6, 0xa0, 0x00, 0x00, 0x00,
+        0xb8, 0x99, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_6[36] =
+    {
+        0x4c, 0x36, 0x00, 0x00, 0x00, 0x00,
+        0x45, 0x68, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0xd0, 0x60, 0x00, 0x00, 0x00,
+        0x8a, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0x26, 0x0b, 0x40, 0x00, 0x00, 0x00,
+        0x95, 0x45, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_7[42] =
+    {
+        0xc4, 0xa3, 0x00, 0x00, 0x00, 0x00,
+        0x23, 0x19, 0x80, 0x00, 0x00, 0x00,
+        0x91, 0x1c, 0x20, 0x00, 0x00, 0x00,
+        0x4a, 0x82, 0xa0, 0x00, 0x00, 0x00,
+        0x34, 0x49, 0x40, 0x00, 0x00, 0x00,
+        0x8b, 0x4a, 0x00, 0x00, 0x00, 0x00,
+        0xc8, 0x24, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_8[48] =
+    {
+        0x16, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0xc2, 0x44, 0xc0, 0x00, 0x00, 0x00,
+        0x60, 0xe8, 0x20, 0x00, 0x00, 0x00,
+        0x85, 0x12, 0x60, 0x00, 0x00, 0x00,
+        0xcc, 0x21, 0x40, 0x00, 0x00, 0x00,
+        0x29, 0x63, 0x00, 0x00, 0x00, 0x00,
+        0x11, 0x98, 0xc0, 0x00, 0x00, 0x00,
+        0xb0, 0x0c, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask19_9[54] =
+    {
+        0x44, 0xa7, 0x00, 0x00, 0x00, 0x00,
+        0x66, 0x70, 0x80, 0x00, 0x00, 0x00,
+        0x12, 0xc0, 0xe0, 0x00, 0x00, 0x00,
+        0xc3, 0x10, 0xa0, 0x00, 0x00, 0x00,
+        0x8c, 0x29, 0x40, 0x00, 0x00, 0x00,
+        0x11, 0x5b, 0x00, 0x00, 0x00, 0x00,
+        0x21, 0x93, 0x40, 0x00, 0x00, 0x00,
+        0xa2, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x18, 0x0c, 0xe0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask1_1[2] =
+    {
+        0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_1[6] =
+    {
+        0xff, 0xff, 0xf0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_10[60] =
+    {
+        0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+        0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+        0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+        0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+        0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x8c, 0x20, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_11[66] =
+    {
+        0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+        0x23, 0x88, 0xe0, 0x00, 0x00, 0x00,
+        0x1a, 0x46, 0x90, 0x00, 0x00, 0x00,
+        0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+        0x71, 0x1c, 0x40, 0x00, 0x00, 0x00,
+        0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+        0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+        0xe0, 0x38, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_12[72] =
+    {
+        0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+        0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+        0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+        0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+        0x23, 0x88, 0xe0, 0x00, 0x00, 0x00,
+        0x1a, 0x46, 0x90, 0x00, 0x00, 0x00,
+        0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+        0x71, 0x1c, 0x40, 0x00, 0x00, 0x00,
+        0xf5, 0xdc, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_13[78] =
+    {
+        0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+        0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+        0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+        0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+        0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+        0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+        0xc1, 0x30, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_14[84] =
+    {
+        0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+        0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+        0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+        0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+        0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+        0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+        0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+        0x56, 0x3e, 0x20, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_15[90] =
+    {
+        0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+        0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+        0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+        0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+        0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+        0x50, 0x54, 0x10, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_16[96] =
+    {
+        0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+        0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+        0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+        0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+        0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+        0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+        0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+        0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+        0x28, 0x1c, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_17[102] =
+    {
+        0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+        0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+        0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+        0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+        0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+        0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+        0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+        0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+        0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+        0x19, 0x86, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_18[108] =
+    {
+        0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+        0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+        0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+        0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+        0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+        0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+        0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+        0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+        0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+        0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+        0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+        0x21, 0x7b, 0xf0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_19[114] =
+    {
+        0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+        0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+        0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+        0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+        0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+        0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+        0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+        0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+        0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+        0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+        0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+        0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+        0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x8c, 0x20, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_2[12] =
+    {
+        0xee, 0x3b, 0x80, 0x00, 0x00, 0x00,
+        0x99, 0xe6, 0x70, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_20[120] =
+    {
+        0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+        0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+        0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+        0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+        0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x8c, 0x20, 0x00, 0x00, 0x00,
+        0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+        0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+        0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+        0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+        0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+        0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+        0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+        0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+        0xf7, 0x8d, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_3[18] =
+    {
+        0xce, 0x33, 0x80, 0x00, 0x00, 0x00,
+        0x55, 0x95, 0x60, 0x00, 0x00, 0x00,
+        0xb1, 0x6a, 0x30, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_4[24] =
+    {
+        0xe6, 0x39, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x8c, 0xe0, 0x00, 0x00, 0x00,
+        0x98, 0xe6, 0x30, 0x00, 0x00, 0x00,
+        0x2d, 0x4b, 0x50, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_5[30] =
+    {
+        0xce, 0x33, 0x80, 0x00, 0x00, 0x00,
+        0x63, 0x98, 0xe0, 0x00, 0x00, 0x00,
+        0x98, 0xe5, 0x30, 0x00, 0x00, 0x00,
+        0x2b, 0x53, 0x50, 0x00, 0x00, 0x00,
+        0xb4, 0x5c, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_6[36] =
+    {
+        0x4c, 0x1b, 0x00, 0x00, 0x00, 0x00,
+        0x51, 0x34, 0x40, 0x00, 0x00, 0x00,
+        0x20, 0xe8, 0x30, 0x00, 0x00, 0x00,
+        0x85, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x06, 0x86, 0xa0, 0x00, 0x00, 0x00,
+        0x9a, 0x21, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_7[42] =
+    {
+        0x4e, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x33, 0x2c, 0x00, 0x00, 0x00, 0x00,
+        0x10, 0x0e, 0xb0, 0x00, 0x00, 0x00,
+        0x81, 0x51, 0x50, 0x00, 0x00, 0x00,
+        0x24, 0xc4, 0xa0, 0x00, 0x00, 0x00,
+        0xd4, 0x23, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0xa2, 0x60, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_8[48] =
+    {
+        0x27, 0x09, 0xc0, 0x00, 0x00, 0x00,
+        0x89, 0xa2, 0x60, 0x00, 0x00, 0x00,
+        0xd0, 0x74, 0x10, 0x00, 0x00, 0x00,
+        0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+        0xe2, 0x90, 0xa0, 0x00, 0x00, 0x00,
+        0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+        0x31, 0x8c, 0x60, 0x00, 0x00, 0x00,
+        0x18, 0xc6, 0x30, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask20_9[54] =
+    {
+        0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+        0x62, 0x38, 0xc0, 0x00, 0x00, 0x00,
+        0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+        0xe1, 0x48, 0x50, 0x00, 0x00, 0x00,
+        0x13, 0x94, 0xa0, 0x00, 0x00, 0x00,
+        0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+        0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+        0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+        0x49, 0x86, 0x50, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_1[6] =
+    {
+        0xff, 0xff, 0xf8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_10[60] =
+    {
+        0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+        0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+        0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+        0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+        0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x84, 0xa8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_11[66] =
+    {
+        0xc6, 0x21, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x88, 0xc8, 0x00, 0x00, 0x00,
+        0x1a, 0x45, 0x88, 0x00, 0x00, 0x00,
+        0x24, 0xd3, 0x08, 0x00, 0x00, 0x00,
+        0x71, 0x10, 0x70, 0x00, 0x00, 0x00,
+        0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+        0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+        0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0xe0, 0x22, 0x90, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_12[72] =
+    {
+        0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+        0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+        0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+        0xc6, 0x21, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x88, 0xc8, 0x00, 0x00, 0x00,
+        0x1a, 0x45, 0x88, 0x00, 0x00, 0x00,
+        0x24, 0xd3, 0x08, 0x00, 0x00, 0x00,
+        0x71, 0x10, 0x70, 0x00, 0x00, 0x00,
+        0xa0, 0x65, 0x18, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_13[78] =
+    {
+        0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+        0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+        0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+        0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+        0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+        0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+        0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+        0xc1, 0x02, 0x58, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_14[84] =
+    {
+        0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+        0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+        0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+        0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+        0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+        0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+        0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+        0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+        0x4d, 0xd0, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_15[90] =
+    {
+        0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+        0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+        0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+        0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+        0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+        0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+        0x50, 0x45, 0x08, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_16[96] =
+    {
+        0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+        0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+        0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+        0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+        0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+        0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+        0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+        0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+        0x3b, 0xf5, 0x38, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_17[102] =
+    {
+        0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+        0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+        0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+        0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+        0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+        0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+        0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+        0x19, 0x9e, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_18[108] =
+    {
+        0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+        0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+        0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+        0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+        0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+        0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+        0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+        0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+        0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+        0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+        0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+        0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+        0x5a, 0x56, 0x58, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_19[114] =
+    {
+        0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+        0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+        0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+        0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+        0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+        0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+        0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+        0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+        0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+        0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+        0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x84, 0xa8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_2[12] =
+    {
+        0xee, 0x3b, 0x30, 0x00, 0x00, 0x00,
+        0x99, 0xe6, 0xe8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_20[120] =
+    {
+        0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+        0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+        0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+        0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+        0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+        0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+        0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+        0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+        0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+        0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+        0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+        0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+        0x2a, 0x03, 0x30, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_21[126] =
+    {
+        0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+        0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+        0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+        0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+        0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+        0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+        0x30, 0x84, 0xa8, 0x00, 0x00, 0x00,
+        0x4c, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x51, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+        0xa0, 0x66, 0x40, 0x00, 0x00, 0x00,
+        0x04, 0xc1, 0x60, 0x00, 0x00, 0x00,
+        0x03, 0xa0, 0x28, 0x00, 0x00, 0x00,
+        0x86, 0x21, 0x10, 0x00, 0x00, 0x00,
+        0x29, 0x10, 0x18, 0x00, 0x00, 0x00,
+        0x42, 0x42, 0xa0, 0x00, 0x00, 0x00,
+        0x98, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x30, 0x84, 0x08, 0x00, 0x00, 0x00,
+        0xdf, 0x4c, 0x10, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_3[18] =
+    {
+        0xce, 0x32, 0xb0, 0x00, 0x00, 0x00,
+        0x55, 0xdc, 0x50, 0x00, 0x00, 0x00,
+        0xa8, 0xed, 0x88, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_4[24] =
+    {
+        0xe6, 0x31, 0x30, 0x00, 0x00, 0x00,
+        0x33, 0x8c, 0x58, 0x00, 0x00, 0x00,
+        0x98, 0xd2, 0xc8, 0x00, 0x00, 0x00,
+        0x2d, 0x4b, 0x28, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_5[30] =
+    {
+        0xce, 0x31, 0xb0, 0x00, 0x00, 0x00,
+        0x63, 0x98, 0xd8, 0x00, 0x00, 0x00,
+        0x98, 0xc7, 0x68, 0x00, 0x00, 0x00,
+        0x4d, 0x6b, 0x50, 0x00, 0x00, 0x00,
+        0xb2, 0x6c, 0xa8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_6[36] =
+    {
+        0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+        0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+        0x20, 0xea, 0x08, 0x00, 0x00, 0x00,
+        0x85, 0x41, 0x28, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0xd8, 0x00, 0x00, 0x00,
+        0x8a, 0x24, 0x30, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_7[42] =
+    {
+        0xc6, 0x11, 0x90, 0x00, 0x00, 0x00,
+        0x33, 0x04, 0xc8, 0x00, 0x00, 0x00,
+        0x18, 0x67, 0x40, 0x00, 0x00, 0x00,
+        0x45, 0x42, 0xd0, 0x00, 0x00, 0x00,
+        0x12, 0xd4, 0x28, 0x00, 0x00, 0x00,
+        0xb4, 0x28, 0x30, 0x00, 0x00, 0x00,
+        0x29, 0x92, 0x18, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_8[48] =
+    {
+        0x07, 0x0a, 0x70, 0x00, 0x00, 0x00,
+        0x49, 0xa8, 0x28, 0x00, 0x00, 0x00,
+        0xb0, 0x7a, 0x00, 0x00, 0x00, 0x00,
+        0x24, 0xc5, 0xc0, 0x00, 0x00, 0x00,
+        0x52, 0x80, 0xe8, 0x00, 0x00, 0x00,
+        0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+        0x31, 0x94, 0x18, 0x00, 0x00, 0x00,
+        0x18, 0xc7, 0x08, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask21_9[54] =
+    {
+        0x4e, 0x11, 0x10, 0x00, 0x00, 0x00,
+        0x62, 0x1a, 0x08, 0x00, 0x00, 0x00,
+        0x80, 0xe9, 0x40, 0x00, 0x00, 0x00,
+        0xa1, 0x50, 0x50, 0x00, 0x00, 0x00,
+        0x53, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0xa4, 0x24, 0x30, 0x00, 0x00, 0x00,
+        0x16, 0xa0, 0x88, 0x00, 0x00, 0x00,
+        0x58, 0x45, 0x20, 0x00, 0x00, 0x00,
+        0x29, 0x86, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_1[6] =
+    {
+        0xff, 0xff, 0xfc, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_10[60] =
+    {
+        0xc0, 0x38, 0x88, 0x00, 0x00, 0x00,
+        0x30, 0x0e, 0x28, 0x00, 0x00, 0x00,
+        0xe8, 0x07, 0x00, 0x00, 0x00, 0x00,
+        0x85, 0x08, 0xa8, 0x00, 0x00, 0x00,
+        0xd0, 0x92, 0x10, 0x00, 0x00, 0x00,
+        0x86, 0x50, 0x48, 0x00, 0x00, 0x00,
+        0x4a, 0x68, 0x0c, 0x00, 0x00, 0x00,
+        0x01, 0xa0, 0x74, 0x00, 0x00, 0x00,
+        0x4c, 0x81, 0x90, 0x00, 0x00, 0x00,
+        0x62, 0x24, 0x04, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_11[66] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+        0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+        0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+        0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+        0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+        0x30, 0x46, 0x08, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_12[72] =
+    {
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+        0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+        0x86, 0x90, 0xd0, 0x00, 0x00, 0x00,
+        0x23, 0x24, 0x64, 0x00, 0x00, 0x00,
+        0x16, 0x22, 0xc4, 0x00, 0x00, 0x00,
+        0x4c, 0x29, 0x84, 0x00, 0x00, 0x00,
+        0x41, 0xc8, 0x38, 0x00, 0x00, 0x00,
+        0xf4, 0x18, 0x9c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_13[78] =
+    {
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+        0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+        0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+        0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+        0x09, 0x61, 0x2c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_14[84] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+        0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+        0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+        0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+        0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+        0xc6, 0xca, 0xe8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_15[90] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+        0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+        0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+        0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+        0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+        0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+        0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+        0x14, 0x22, 0x84, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_16[96] =
+    {
+        0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+        0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+        0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+        0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+        0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+        0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+        0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+        0x86, 0xc1, 0x44, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_17[102] =
+    {
+        0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+        0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+        0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+        0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+        0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+        0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+        0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+        0x78, 0x0f, 0x00, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_18[108] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+        0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+        0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+        0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+        0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+        0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+        0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+        0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+        0xe4, 0xd4, 0x6c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_19[114] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+        0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+        0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+        0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+        0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+        0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+        0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+        0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+        0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_2[12] =
+    {
+        0xec, 0xdd, 0x98, 0x00, 0x00, 0x00,
+        0x9b, 0xb3, 0x74, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_20[120] =
+    {
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+        0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+        0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+        0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+        0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+        0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+        0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+        0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+        0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+        0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+        0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+        0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+        0x3b, 0x48, 0xc4, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_21[126] =
+    {
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+        0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+        0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+        0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+        0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+        0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+        0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+        0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+        0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+        0x30, 0x46, 0x08, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_22[132] =
+    {
+        0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+        0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+        0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+        0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+        0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+        0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+        0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+        0x30, 0x46, 0x08, 0x00, 0x00, 0x00,
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+        0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+        0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+        0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+        0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+        0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+        0x9e, 0xce, 0x88, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_3[18] =
+    {
+        0xca, 0xd9, 0x58, 0x00, 0x00, 0x00,
+        0xf1, 0x5e, 0x28, 0x00, 0x00, 0x00,
+        0xb6, 0x35, 0xc4, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_4[24] =
+    {
+        0xc4, 0xd8, 0x98, 0x00, 0x00, 0x00,
+        0x31, 0x66, 0x2c, 0x00, 0x00, 0x00,
+        0x4b, 0x29, 0x64, 0x00, 0x00, 0x00,
+        0x2c, 0xa5, 0x94, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_5[30] =
+    {
+        0xc6, 0xd8, 0xd8, 0x00, 0x00, 0x00,
+        0x63, 0x6c, 0x6c, 0x00, 0x00, 0x00,
+        0x1d, 0xa3, 0xb4, 0x00, 0x00, 0x00,
+        0xad, 0x55, 0xa8, 0x00, 0x00, 0x00,
+        0xb2, 0xb6, 0x54, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_6[36] =
+    {
+        0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+        0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0xa8, 0x35, 0x04, 0x00, 0x00, 0x00,
+        0xc4, 0xa0, 0x94, 0x00, 0x00, 0x00,
+        0x03, 0x60, 0x6c, 0x00, 0x00, 0x00,
+        0x90, 0xd2, 0x18, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_7[42] =
+    {
+        0xc6, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x13, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x8d, 0x13, 0xa0, 0x00, 0x00, 0x00,
+        0x8b, 0x41, 0x68, 0x00, 0x00, 0x00,
+        0x52, 0xaa, 0x14, 0x00, 0x00, 0x00,
+        0xa2, 0xd4, 0x18, 0x00, 0x00, 0x00,
+        0x61, 0xa8, 0x2c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_8[48] =
+    {
+        0x28, 0x85, 0x38, 0x00, 0x00, 0x00,
+        0x21, 0xf4, 0x04, 0x00, 0x00, 0x00,
+        0xe9, 0x1d, 0x00, 0x00, 0x00, 0x00,
+        0x17, 0x02, 0xe0, 0x00, 0x00, 0x00,
+        0x83, 0xa0, 0x54, 0x00, 0x00, 0x00,
+        0x46, 0x18, 0xe8, 0x00, 0x00, 0x00,
+        0x50, 0x6a, 0x0c, 0x00, 0x00, 0x00,
+        0x1c, 0x23, 0x84, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask22_9[54] =
+    {
+        0x44, 0x48, 0xc8, 0x00, 0x00, 0x00,
+        0x28, 0x2d, 0x0c, 0x00, 0x00, 0x00,
+        0x25, 0x14, 0xa0, 0x00, 0x00, 0x00,
+        0x59, 0x0a, 0x20, 0x00, 0x00, 0x00,
+        0x03, 0xa0, 0x34, 0x00, 0x00, 0x00,
+        0xc0, 0xd0, 0x18, 0x00, 0x00, 0x00,
+        0xa2, 0x30, 0x44, 0x00, 0x00, 0x00,
+        0x14, 0x82, 0xd0, 0x00, 0x00, 0x00,
+        0x9a, 0x03, 0x80, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_1[6] =
+    {
+        0xff, 0xff, 0xfe, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_10[60] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+        0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+        0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+        0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+        0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+        0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+        0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0xb4, 0x50, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_11[66] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+        0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+        0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+        0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+        0x30, 0x58, 0x04, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_12[72] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+        0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+        0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+        0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+        0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+        0x86, 0x8a, 0x6c, 0x00, 0x00, 0x00,
+        0x23, 0x2c, 0x84, 0x00, 0x00, 0x00,
+        0x16, 0x21, 0x98, 0x00, 0x00, 0x00,
+        0x4c, 0x30, 0x54, 0x00, 0x00, 0x00,
+        0x41, 0xc1, 0x26, 0x00, 0x00, 0x00,
+        0x19, 0x56, 0xe4, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_13[78] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+        0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+        0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+        0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+        0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+        0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+        0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+        0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x71, 0x0c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_14[84] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+        0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+        0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+        0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+        0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+        0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+        0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+        0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+        0x9c, 0x3f, 0xb2, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_15[90] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+        0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+        0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+        0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+        0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+        0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+        0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x14, 0x38, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_16[96] =
+    {
+        0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+        0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+        0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+        0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+        0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+        0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+        0xfa, 0xd9, 0xf4, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_17[102] =
+    {
+        0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+        0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+        0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+        0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+        0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+        0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+        0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+        0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+        0x78, 0x00, 0x1c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_18[108] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+        0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+        0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+        0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+        0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+        0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+        0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+        0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+        0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+        0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+        0x82, 0x32, 0x56, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_19[114] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+        0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+        0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+        0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+        0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+        0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+        0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+        0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+        0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+        0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+        0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+        0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+        0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0xb4, 0x50, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_2[12] =
+    {
+        0xec, 0xdd, 0x98, 0x00, 0x00, 0x00,
+        0x9b, 0xb2, 0x76, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_20[120] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+        0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+        0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+        0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+        0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+        0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+        0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+        0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+        0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+        0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+        0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+        0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+        0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+        0xdb, 0x4a, 0x7a, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_21[126] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+        0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+        0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+        0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+        0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+        0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+        0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+        0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+        0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+        0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+        0x30, 0x58, 0x04, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_22[132] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+        0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+        0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+        0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+        0x30, 0x58, 0x04, 0x00, 0x00, 0x00,
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+        0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+        0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+        0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+        0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+        0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+        0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+        0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+        0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+        0xea, 0x8d, 0x1a, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_23[138] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+        0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+        0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+        0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+        0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+        0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+        0x30, 0x58, 0x04, 0x00, 0x00, 0x00,
+        0x46, 0x42, 0x0c, 0x00, 0x00, 0x00,
+        0x33, 0x20, 0x46, 0x00, 0x00, 0x00,
+        0x99, 0x08, 0x0a, 0x00, 0x00, 0x00,
+        0x05, 0x84, 0x30, 0x00, 0x00, 0x00,
+        0x80, 0xb0, 0x22, 0x00, 0x00, 0x00,
+        0x84, 0x42, 0x90, 0x00, 0x00, 0x00,
+        0x40, 0x73, 0x00, 0x00, 0x00, 0x00,
+        0x0a, 0x81, 0x12, 0x00, 0x00, 0x00,
+        0x68, 0x0c, 0x40, 0x00, 0x00, 0x00,
+        0x10, 0x24, 0x84, 0x00, 0x00, 0x00,
+        0x30, 0x51, 0x40, 0x00, 0x00, 0x00,
+        0x5f, 0x50, 0x88, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_3[18] =
+    {
+        0xca, 0xd3, 0x64, 0x00, 0x00, 0x00,
+        0xf1, 0x49, 0x3a, 0x00, 0x00, 0x00,
+        0x76, 0x27, 0xd0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_4[24] =
+    {
+        0xc4, 0xd1, 0x64, 0x00, 0x00, 0x00,
+        0x31, 0x62, 0x96, 0x00, 0x00, 0x00,
+        0x4b, 0x24, 0x5a, 0x00, 0x00, 0x00,
+        0x2c, 0xa8, 0xaa, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_5[30] =
+    {
+        0xc6, 0xca, 0x6c, 0x00, 0x00, 0x00,
+        0x63, 0x6c, 0x96, 0x00, 0x00, 0x00,
+        0x1d, 0xa1, 0xdc, 0x00, 0x00, 0x00,
+        0xad, 0x55, 0x38, 0x00, 0x00, 0x00,
+        0xb2, 0xb7, 0x06, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_6[36] =
+    {
+        0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+        0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+        0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+        0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+        0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+        0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_7[42] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+        0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+        0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+        0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+        0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x71, 0x0c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_8[48] =
+    {
+        0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+        0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+        0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+        0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+        0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+        0x14, 0x38, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask23_9[54] =
+    {
+        0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+        0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+        0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+        0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+        0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+        0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+        0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+        0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+        0x78, 0x00, 0x1c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_1[6] =
+    {
+        0xff, 0xff, 0xff, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_10[60] =
+    {
+        0x11, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x34, 0x53, 0x00, 0x00, 0x00,
+        0x00, 0x48, 0x05, 0x00, 0x00, 0x00,
+        0x10, 0x83, 0x09, 0x00, 0x00, 0x00,
+        0x4a, 0x14, 0xa1, 0x00, 0x00, 0x00,
+        0x40, 0xa4, 0x0a, 0x00, 0x00, 0x00,
+        0xa0, 0x6a, 0x02, 0x00, 0x00, 0x00,
+        0x88, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x54, 0x0d, 0x40, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_11[66] =
+    {
+        0x53, 0x65, 0x34, 0x00, 0x00, 0x00,
+        0xa0, 0x32, 0x11, 0x00, 0x00, 0x00,
+        0x15, 0x11, 0x41, 0x00, 0x00, 0x00,
+        0x03, 0x50, 0x15, 0x00, 0x00, 0x00,
+        0x8c, 0x88, 0xc8, 0x00, 0x00, 0x00,
+        0x28, 0x82, 0x88, 0x00, 0x00, 0x00,
+        0x08, 0x48, 0x84, 0x00, 0x00, 0x00,
+        0x99, 0x01, 0x90, 0x00, 0x00, 0x00,
+        0x22, 0x92, 0x29, 0x00, 0x00, 0x00,
+        0x46, 0x04, 0x60, 0x00, 0x00, 0x00,
+        0x8c, 0x2c, 0x02, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_12[72] =
+    {
+        0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+        0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+        0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+        0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+        0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+        0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+        0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+        0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+        0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x44, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_13[78] =
+    {
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0xc5, 0x1c, 0x51, 0x00, 0x00, 0x00,
+        0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+        0x12, 0x31, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0xe0, 0x8e, 0x00, 0x00, 0x00,
+        0x2e, 0x02, 0xe0, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+        0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+        0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+        0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+        0x88, 0x68, 0x86, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_14[84] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+        0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+        0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+        0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+        0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0xc5, 0x1c, 0x51, 0x00, 0x00, 0x00,
+        0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+        0x12, 0x31, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0xe0, 0x8e, 0x00, 0x00, 0x00,
+        0x2e, 0x02, 0xe0, 0x00, 0x00, 0x00,
+        0xf2, 0xd6, 0x8e, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_15[90] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+        0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+        0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+        0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+        0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+        0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+        0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+        0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+        0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+        0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+        0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_16[96] =
+    {
+        0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+        0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+        0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+        0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+        0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+        0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+        0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+        0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+        0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+        0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+        0xff, 0x6e, 0x0a, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_17[102] =
+    {
+        0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+        0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+        0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+        0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+        0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+        0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+        0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+        0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+        0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+        0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_18[108] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+        0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+        0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+        0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+        0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+        0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+        0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+        0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+        0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+        0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+        0x34, 0x50, 0xae, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_19[114] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+        0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+        0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+        0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+        0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+        0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+        0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+        0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_2[12] =
+    {
+        0xec, 0xce, 0xcc, 0x00, 0x00, 0x00,
+        0x93, 0xb9, 0x3b, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_20[120] =
+    {
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+        0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+        0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+        0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+        0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+        0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+        0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+        0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0x98, 0xa2, 0x95, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_21[126] =
+    {
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+        0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+        0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+        0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+        0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+        0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+        0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+        0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+        0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+        0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_22[132] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+        0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+        0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+        0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+        0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+        0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+        0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+        0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+        0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+        0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+        0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+        0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+        0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+        0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+        0x1a, 0xaa, 0xee, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_23[138] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+        0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+        0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+        0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+        0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+        0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+        0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+        0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+        0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+        0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+        0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+        0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+        0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+        0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+        0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+        0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x44, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_24[144] =
+    {
+        0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+        0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+        0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+        0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+        0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+        0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+        0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+        0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+        0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+        0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+        0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+        0x84, 0x48, 0x44, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+        0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+        0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+        0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+        0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+        0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+        0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+        0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+        0x88, 0x32, 0x59, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_3[18] =
+    {
+        0x9b, 0x29, 0xb2, 0x00, 0x00, 0x00,
+        0x49, 0xd4, 0x9d, 0x00, 0x00, 0x00,
+        0x3e, 0x83, 0xe8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_4[24] =
+    {
+        0x8b, 0x28, 0xb2, 0x00, 0x00, 0x00,
+        0x14, 0xb1, 0x4b, 0x00, 0x00, 0x00,
+        0x22, 0xd2, 0x2d, 0x00, 0x00, 0x00,
+        0x45, 0x54, 0x55, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_5[30] =
+    {
+        0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x64, 0xb6, 0x4b, 0x00, 0x00, 0x00,
+        0x0e, 0xe0, 0xee, 0x00, 0x00, 0x00,
+        0xa9, 0xca, 0x9c, 0x00, 0x00, 0x00,
+        0xb8, 0x3b, 0x83, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_6[36] =
+    {
+        0xd1, 0x4d, 0x14, 0x00, 0x00, 0x00,
+        0x45, 0x34, 0x53, 0x00, 0x00, 0x00,
+        0x22, 0xd2, 0x2d, 0x00, 0x00, 0x00,
+        0x16, 0xc1, 0x6c, 0x00, 0x00, 0x00,
+        0x0b, 0xa0, 0xba, 0x00, 0x00, 0x00,
+        0xe8, 0x8e, 0x88, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_7[42] =
+    {
+        0xd3, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x25, 0x32, 0x53, 0x00, 0x00, 0x00,
+        0x30, 0xd3, 0x05, 0x00, 0x00, 0x00,
+        0x06, 0x48, 0x6c, 0x00, 0x00, 0x00,
+        0xc0, 0xb8, 0x1b, 0x00, 0x00, 0x00,
+        0x2a, 0xa2, 0xaa, 0x00, 0x00, 0x00,
+        0xa8, 0x4e, 0x84, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_8[48] =
+    {
+        0x81, 0x60, 0x16, 0x00, 0x00, 0x00,
+        0x40, 0x3c, 0x03, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+        0x06, 0x50, 0x65, 0x00, 0x00, 0x00,
+        0x20, 0x4a, 0x84, 0x00, 0x00, 0x00,
+        0x8a, 0xa0, 0xaa, 0x00, 0x00, 0x00,
+        0x33, 0x03, 0x30, 0x00, 0x00, 0x00,
+        0x4c, 0x84, 0xc8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask24_9[54] =
+    {
+        0xd3, 0x65, 0x36, 0x00, 0x00, 0x00,
+        0x64, 0x26, 0x42, 0x00, 0x00, 0x00,
+        0x18, 0x41, 0xc4, 0x00, 0x00, 0x00,
+        0xa0, 0x4a, 0x04, 0x00, 0x00, 0x00,
+        0x81, 0x38, 0x13, 0x00, 0x00, 0x00,
+        0x22, 0xa2, 0x2a, 0x00, 0x00, 0x00,
+        0x08, 0x70, 0x87, 0x00, 0x00, 0x00,
+        0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+        0x01, 0xc0, 0x1c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_1[6] =
+    {
+        0xff, 0xff, 0xff, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_10[60] =
+    {
+        0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+        0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+        0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+        0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+        0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+        0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+        0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+        0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+        0xa2, 0x86, 0x22, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_11[66] =
+    {
+        0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+        0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+        0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+        0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+        0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+        0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+        0xc0, 0x24, 0x41, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_12[72] =
+    {
+        0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+        0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+        0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+        0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+        0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+        0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+        0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+        0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+        0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+        0x84, 0x45, 0x22, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_13[78] =
+    {
+        0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+        0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+        0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+        0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+        0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+        0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+        0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+        0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+        0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+        0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+        0x88, 0x68, 0x4b, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_14[84] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+        0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+        0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+        0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+        0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+        0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+        0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+        0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+        0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+        0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+        0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+        0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00,
+        0x73, 0x76, 0x61, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_15[90] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+        0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+        0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+        0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+        0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+        0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+        0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+        0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+        0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+        0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+        0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+        0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_16[96] =
+    {
+        0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+        0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+        0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+        0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+        0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+        0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+        0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+        0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+        0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+        0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+        0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+        0x16, 0xe8, 0xdc, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_17[102] =
+    {
+        0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+        0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+        0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+        0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+        0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+        0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+        0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+        0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+        0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+        0x00, 0xe4, 0x01, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_18[108] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+        0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+        0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+        0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+        0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+        0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+        0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+        0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+        0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+        0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+        0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+        0xce, 0x9b, 0xe1, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_19[114] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+        0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+        0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+        0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+        0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+        0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+        0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+        0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+        0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+        0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+        0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+        0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+        0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+        0xa2, 0x86, 0x22, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_2[12] =
+    {
+        0xec, 0xce, 0xcc, 0x00, 0x00, 0x00,
+        0x93, 0xb1, 0xb3, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_20[120] =
+    {
+        0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+        0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+        0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+        0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+        0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+        0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+        0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+        0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+        0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+        0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+        0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+        0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+        0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+        0x1b, 0x8a, 0xa0, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_21[126] =
+    {
+        0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+        0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+        0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+        0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+        0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+        0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+        0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+        0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+        0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+        0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+        0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+        0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+        0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+        0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+        0xc0, 0x24, 0x41, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_22[132] =
+    {
+        0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+        0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+        0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+        0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+        0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+        0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+        0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+        0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+        0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+        0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+        0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+        0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+        0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+        0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+        0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+        0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+        0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+        0x15, 0xa2, 0x99, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_23[138] =
+    {
+        0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+        0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+        0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+        0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+        0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+        0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+        0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+        0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+        0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+        0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+        0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+        0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+        0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+        0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+        0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+        0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+        0x84, 0x45, 0x22, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_24[144] =
+    {
+        0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+        0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+        0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+        0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+        0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+        0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+        0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+        0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+        0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+        0x84, 0x45, 0x22, 0x00, 0x00, 0x00,
+        0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+        0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+        0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+        0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+        0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+        0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+        0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+        0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+        0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+        0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+        0xf9, 0x0c, 0x14, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_25[150] =
+    {
+        0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+        0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+        0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+        0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+        0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+        0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+        0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+        0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+        0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+        0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+        0x84, 0x45, 0x22, 0x00, 0x00, 0x00,
+        0x10, 0x65, 0x12, 0x00, 0x00, 0x00,
+        0x02, 0x36, 0x64, 0x00, 0x00, 0x00,
+        0x40, 0x50, 0x54, 0x80, 0x00, 0x00,
+        0x21, 0x88, 0x12, 0x00, 0x00, 0x00,
+        0x81, 0x19, 0x40, 0x00, 0x00, 0x00,
+        0x14, 0x83, 0x08, 0x00, 0x00, 0x00,
+        0x98, 0x02, 0x11, 0x00, 0x00, 0x00,
+        0x08, 0x90, 0x3c, 0x00, 0x00, 0x00,
+        0x62, 0x0e, 0x80, 0x00, 0x00, 0x00,
+        0x24, 0x20, 0xa1, 0x00, 0x00, 0x00,
+        0x8a, 0x08, 0x01, 0x80, 0x00, 0x00,
+        0x84, 0x40, 0x49, 0x00, 0x00, 0x00,
+        0x1c, 0x20, 0x8a, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_3[18] =
+    {
+        0x9b, 0x89, 0x9b, 0x00, 0x00, 0x00,
+        0x4f, 0x14, 0x6d, 0x80, 0x00, 0x00,
+        0x3c, 0x63, 0x72, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_4[24] =
+    {
+        0x8b, 0x24, 0x9b, 0x00, 0x00, 0x00,
+        0x14, 0xb2, 0x6d, 0x00, 0x00, 0x00,
+        0x22, 0xd8, 0x56, 0x80, 0x00, 0x00,
+        0x45, 0x55, 0x25, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_5[30] =
+    {
+        0x53, 0x65, 0x13, 0x00, 0x00, 0x00,
+        0x64, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x0c, 0xc0, 0xc6, 0x80, 0x00, 0x00,
+        0x82, 0xaa, 0x1c, 0x00, 0x00, 0x00,
+        0x09, 0x32, 0x29, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_6[36] =
+    {
+        0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+        0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+        0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+        0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+        0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+        0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_7[42] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+        0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+        0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+        0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+        0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+        0x88, 0x68, 0x4b, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_8[48] =
+    {
+        0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+        0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+        0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+        0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+        0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+        0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+        0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+        0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask25_9[54] =
+    {
+        0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+        0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+        0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+        0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+        0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+        0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+        0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+        0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+        0x00, 0xe4, 0x01, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_10[60] =
+    {
+        0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+        0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+        0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+        0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+        0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+        0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+        0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+        0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+        0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+        0x62, 0x23, 0x11, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_11[66] =
+    {
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+        0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+        0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+        0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+        0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+        0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+        0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+        0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+        0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+        0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_12[72] =
+    {
+        0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+        0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+        0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+        0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+        0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+        0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+        0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x52, 0x22, 0x91, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_13[78] =
+    {
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+        0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+        0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+        0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+        0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+        0x08, 0xa8, 0x45, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_14[84] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+        0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+        0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+        0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+        0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+        0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+        0xd1, 0x26, 0x89, 0x00, 0x00, 0x00,
+        0x46, 0xd2, 0x36, 0x80, 0x00, 0x00,
+        0x15, 0x48, 0xaa, 0x40, 0x00, 0x00,
+        0x21, 0x71, 0x0b, 0x80, 0x00, 0x00,
+        0x28, 0xc9, 0x46, 0x40, 0x00, 0x00,
+        0xaa, 0x25, 0x51, 0x00, 0x00, 0x00,
+        0x5d, 0xa7, 0x78, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_15[90] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+        0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+        0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+        0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+        0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+        0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+        0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+        0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+        0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+        0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_16[96] =
+    {
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+        0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+        0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+        0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+        0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+        0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+        0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+        0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+        0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+        0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+        0x3c, 0xaf, 0x88, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_17[102] =
+    {
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+        0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+        0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+        0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+        0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+        0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+        0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+        0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_18[108] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+        0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+        0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+        0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+        0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+        0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+        0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+        0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+        0xaa, 0x0c, 0x83, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_19[114] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+        0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+        0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+        0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+        0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+        0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+        0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+        0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+        0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+        0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+        0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+        0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+        0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+        0x62, 0x23, 0x11, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_2[12] =
+    {
+        0xec, 0xc7, 0x66, 0x00, 0x00, 0x00,
+        0x1b, 0x38, 0xd9, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_20[120] =
+    {
+        0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+        0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+        0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+        0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+        0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+        0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+        0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+        0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+        0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+        0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+        0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+        0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+        0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+        0xf4, 0x08, 0xec, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_21[126] =
+    {
+        0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+        0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+        0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+        0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+        0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+        0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+        0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+        0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+        0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+        0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+        0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+        0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+        0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+        0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+        0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+        0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+        0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+        0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+        0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_22[132] =
+    {
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+        0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+        0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+        0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+        0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+        0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+        0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+        0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+        0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+        0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+        0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+        0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+        0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+        0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+        0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+        0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+        0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+        0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+        0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+        0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+        0x13, 0xc6, 0x6b, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_23[138] =
+    {
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+        0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+        0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+        0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+        0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+        0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+        0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+        0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+        0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+        0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+        0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+        0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+        0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+        0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+        0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+        0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+        0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x52, 0x22, 0x91, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_24[144] =
+    {
+        0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+        0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+        0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+        0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+        0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+        0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+        0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+        0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+        0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+        0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+        0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+        0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+        0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+        0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+        0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+        0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+        0xdb, 0x4d, 0xd8, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_25[150] =
+    {
+        0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+        0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+        0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+        0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+        0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+        0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+        0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+        0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+        0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+        0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+        0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+        0x08, 0xa8, 0x45, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_26[156] =
+    {
+        0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+        0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+        0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+        0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+        0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+        0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+        0x08, 0xa8, 0x45, 0x40, 0x00, 0x00,
+        0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+        0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+        0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+        0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+        0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+        0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+        0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+        0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+        0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+        0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+        0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+        0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+        0xf9, 0x13, 0x51, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_3[18] =
+    {
+        0x99, 0xb4, 0xcd, 0x80, 0x00, 0x00,
+        0x46, 0xda, 0x36, 0xc0, 0x00, 0x00,
+        0x37, 0x29, 0xb9, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_4[24] =
+    {
+        0x49, 0xb2, 0x4d, 0x80, 0x00, 0x00,
+        0x26, 0xd1, 0x36, 0x80, 0x00, 0x00,
+        0x85, 0x6c, 0x2b, 0x40, 0x00, 0x00,
+        0x52, 0x5a, 0x92, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_5[30] =
+    {
+        0x51, 0x32, 0x89, 0x80, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x0c, 0x68, 0x63, 0x40, 0x00, 0x00,
+        0xa1, 0xc5, 0x0e, 0x00, 0x00, 0x00,
+        0x22, 0x99, 0x14, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_6[36] =
+    {
+        0xd1, 0x26, 0x89, 0x00, 0x00, 0x00,
+        0x46, 0xd2, 0x36, 0x80, 0x00, 0x00,
+        0x15, 0x48, 0xaa, 0x40, 0x00, 0x00,
+        0x21, 0x71, 0x0b, 0x80, 0x00, 0x00,
+        0x28, 0xc9, 0x46, 0x40, 0x00, 0x00,
+        0xaa, 0x25, 0x51, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_7[42] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+        0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+        0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+        0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+        0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+        0x84, 0xb4, 0x25, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_8[48] =
+    {
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+        0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+        0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+        0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+        0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+        0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+        0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask26_9[54] =
+    {
+        0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+        0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+        0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+        0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+        0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+        0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+        0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xe0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_10[60] =
+    {
+        0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+        0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+        0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+        0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+        0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+        0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+        0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+        0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+        0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+        0x62, 0x21, 0x18, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_11[66] =
+    {
+        0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+        0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+        0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+        0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+        0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+        0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+        0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+        0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+        0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+        0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+        0x44, 0x19, 0x16, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_12[72] =
+    {
+        0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+        0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+        0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+        0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+        0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+        0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+        0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+        0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+        0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+        0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+        0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+        0x52, 0x20, 0x90, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_13[78] =
+    {
+        0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+        0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+        0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+        0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+        0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+        0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+        0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+        0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+        0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+        0x08, 0xac, 0x06, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_14[84] =
+    {
+        0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+        0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+        0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+        0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+        0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+        0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+        0xd1, 0x22, 0x52, 0xa0, 0x00, 0x00,
+        0x46, 0xd4, 0xaa, 0x40, 0x00, 0x00,
+        0x15, 0x48, 0xa5, 0xa0, 0x00, 0x00,
+        0x21, 0x72, 0x8d, 0x40, 0x00, 0x00,
+        0x28, 0xc9, 0x13, 0x60, 0x00, 0x00,
+        0xaa, 0x24, 0x44, 0x60, 0x00, 0x00,
+        0x0a, 0xe7, 0x3b, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_15[90] =
+    {
+        0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+        0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+        0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+        0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+        0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+        0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+        0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+        0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+        0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+        0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+        0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+        0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+        0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+        0xa1, 0x43, 0x90, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_16[96] =
+    {
+        0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+        0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+        0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+        0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+        0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+        0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+        0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+        0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+        0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+        0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+        0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+        0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+        0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+        0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+        0x01, 0x50, 0xfb, 0xe0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_17[102] =
+    {
+        0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+        0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+        0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+        0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+        0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+        0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+        0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+        0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+        0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+        0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+        0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+        0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+        0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+        0x40, 0x1c, 0x42, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_18[108] =
+    {
+        0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+        0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+        0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+        0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+        0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+        0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+        0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+        0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+        0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+        0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+        0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+        0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+        0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+        0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+        0x53, 0xc3, 0x33, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_19[114] =
+    {
+        0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+        0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+        0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+        0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+        0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+        0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+        0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+        0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+        0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+        0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+        0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+        0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+        0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+        0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+        0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+        0x62, 0x21, 0x18, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_2[12] =
+    {
+        0xec, 0xc7, 0x67, 0x40, 0x00, 0x00,
+        0x1b, 0x39, 0xdc, 0xe0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_20[120] =
+    {
+        0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+        0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+        0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+        0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+        0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+        0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+        0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+        0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+        0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+        0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+        0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+        0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+        0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+        0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+        0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+        0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+        0xcb, 0xff, 0x6f, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_21[126] =
+    {
+        0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+        0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+        0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+        0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+        0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+        0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+        0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+        0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+        0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+        0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+        0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+        0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+        0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+        0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+        0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+        0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+        0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+        0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+        0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+        0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+        0x44, 0x19, 0x16, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_22[132] =
+    {
+        0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+        0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+        0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+        0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+        0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+        0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+        0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+        0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+        0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+        0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+        0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+        0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+        0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+        0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+        0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+        0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+        0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+        0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+        0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+        0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+        0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+        0xf5, 0x2d, 0x52, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_23[138] =
+    {
+        0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+        0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+        0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+        0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+        0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+        0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+        0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+        0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+        0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+        0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+        0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+        0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+        0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+        0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+        0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+        0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+        0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+        0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+        0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+        0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+        0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+        0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+        0x52, 0x20, 0x90, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_24[144] =
+    {
+        0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+        0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+        0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+        0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+        0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+        0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+        0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+        0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+        0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+        0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+        0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+        0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+        0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+        0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+        0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+        0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+        0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+        0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+        0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+        0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+        0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+        0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+        0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+        0xa2, 0x85, 0xdb, 0xa0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_25[150] =
+    {
+        0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+        0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+        0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+        0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+        0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+        0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+        0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+        0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+        0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+        0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+        0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+        0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+        0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+        0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+        0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+        0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+        0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+        0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+        0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+        0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+        0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+        0x08, 0xac, 0x06, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_26[156] =
+    {
+        0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+        0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+        0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+        0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+        0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+        0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+        0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+        0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+        0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+        0x08, 0xac, 0x06, 0x00, 0x00, 0x00,
+        0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+        0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+        0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+        0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+        0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+        0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+        0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+        0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+        0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+        0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+        0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+        0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+        0xcd, 0x41, 0xa2, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_27[162] =
+    {
+        0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+        0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+        0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+        0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+        0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+        0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+        0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+        0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+        0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+        0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+        0x08, 0xac, 0x06, 0x00, 0x00, 0x00,
+        0x51, 0x22, 0x02, 0xa0, 0x00, 0x00,
+        0x66, 0x40, 0xaa, 0x00, 0x00, 0x00,
+        0x05, 0x4e, 0x00, 0x20, 0x00, 0x00,
+        0x81, 0x21, 0x40, 0x80, 0x00, 0x00,
+        0x94, 0x00, 0x28, 0x60, 0x00, 0x00,
+        0x30, 0x83, 0x24, 0x00, 0x00, 0x00,
+        0x21, 0x14, 0x0c, 0x00, 0x00, 0x00,
+        0x03, 0xc0, 0x84, 0xc0, 0x00, 0x00,
+        0xe8, 0x04, 0x21, 0x00, 0x00, 0x00,
+        0x0a, 0x10, 0x91, 0x80, 0x00, 0x00,
+        0x80, 0x1b, 0x10, 0x00, 0x00, 0x00,
+        0x04, 0x91, 0x43, 0x00, 0x00, 0x00,
+        0x08, 0xa8, 0x70, 0x40, 0x00, 0x00,
+        0x9c, 0xc0, 0x84, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_3[18] =
+    {
+        0x99, 0xb5, 0x66, 0xc0, 0x00, 0x00,
+        0x46, 0xda, 0xab, 0x60, 0x00, 0x00,
+        0x37, 0x29, 0x3d, 0xa0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_4[24] =
+    {
+        0x49, 0xb1, 0x66, 0xc0, 0x00, 0x00,
+        0x26, 0xd4, 0x9b, 0x40, 0x00, 0x00,
+        0x85, 0x68, 0xd5, 0xa0, 0x00, 0x00,
+        0x52, 0x5a, 0x39, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_5[30] =
+    {
+        0x51, 0x33, 0x26, 0xc0, 0x00, 0x00,
+        0x66, 0x45, 0x2b, 0x40, 0x00, 0x00,
+        0x0c, 0x6a, 0x95, 0xa0, 0x00, 0x00,
+        0xa1, 0xc0, 0xed, 0x40, 0x00, 0x00,
+        0x22, 0x9c, 0xe2, 0xa0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_6[36] =
+    {
+        0xd1, 0x22, 0x52, 0xa0, 0x00, 0x00,
+        0x46, 0xd4, 0xaa, 0x40, 0x00, 0x00,
+        0x15, 0x48, 0xa5, 0xa0, 0x00, 0x00,
+        0x21, 0x72, 0x8d, 0x40, 0x00, 0x00,
+        0x28, 0xc9, 0x13, 0x60, 0x00, 0x00,
+        0xaa, 0x24, 0x44, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_7[42] =
+    {
+        0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+        0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+        0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+        0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+        0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+        0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+        0x84, 0xb5, 0x54, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_8[48] =
+    {
+        0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+        0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+        0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+        0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+        0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+        0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+        0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+        0xa1, 0x43, 0x90, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask27_9[54] =
+    {
+        0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+        0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+        0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+        0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+        0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+        0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+        0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+        0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+        0x40, 0x1c, 0x42, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xf0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_10[60] =
+    {
+        0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+        0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+        0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+        0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+        0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+        0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+        0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+        0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x10, 0x8c, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_11[66] =
+    {
+        0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+        0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+        0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+        0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+        0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+        0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+        0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+        0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+        0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+        0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+        0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_12[72] =
+    {
+        0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+        0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+        0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+        0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+        0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+        0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+        0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+        0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+        0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+        0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+        0x12, 0x0c, 0x48, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_13[78] =
+    {
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+        0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+        0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+        0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+        0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+        0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+        0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+        0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+        0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+        0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x80, 0xc2, 0x03, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_14[84] =
+    {
+        0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+        0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+        0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+        0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+        0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+        0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+        0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+        0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+        0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+        0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+        0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0x42, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_15[90] =
+    {
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+        0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+        0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+        0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+        0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+        0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00,
+        0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+        0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+        0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+        0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+        0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+        0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+        0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+        0x72, 0x01, 0xc8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_16[96] =
+    {
+        0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+        0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+        0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+        0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+        0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+        0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+        0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+        0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+        0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+        0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+        0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+        0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+        0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00,
+        0xed, 0x76, 0x36, 0x50, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_17[102] =
+    {
+        0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+        0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+        0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+        0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+        0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+        0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+        0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+        0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+        0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+        0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+        0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+        0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+        0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+        0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+        0x88, 0x4a, 0x21, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_18[108] =
+    {
+        0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+        0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+        0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+        0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+        0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+        0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+        0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+        0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+        0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+        0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+        0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+        0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+        0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+        0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+        0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+        0x6e, 0x9f, 0x98, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_19[114] =
+    {
+        0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+        0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+        0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+        0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+        0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+        0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+        0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+        0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+        0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+        0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+        0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+        0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+        0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+        0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+        0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x10, 0x8c, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_2[12] =
+    {
+        0xec, 0xeb, 0xb3, 0xa0, 0x00, 0x00,
+        0x3b, 0x9c, 0xee, 0x70, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_20[120] =
+    {
+        0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+        0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+        0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+        0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+        0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+        0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+        0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+        0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+        0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+        0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+        0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+        0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+        0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+        0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+        0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+        0xea, 0x1b, 0x3a, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_21[126] =
+    {
+        0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+        0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+        0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+        0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+        0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+        0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+        0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+        0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+        0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+        0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+        0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+        0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+        0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+        0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+        0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+        0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+        0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+        0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+        0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_22[132] =
+    {
+        0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+        0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+        0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+        0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+        0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+        0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+        0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+        0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+        0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+        0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+        0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+        0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+        0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+        0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+        0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+        0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+        0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+        0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+        0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+        0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+        0x45, 0x05, 0x10, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_23[138] =
+    {
+        0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+        0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+        0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+        0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+        0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+        0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+        0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+        0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+        0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+        0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+        0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+        0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+        0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+        0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+        0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+        0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+        0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+        0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+        0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+        0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+        0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+        0x12, 0x0c, 0x48, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_24[144] =
+    {
+        0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+        0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+        0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+        0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+        0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+        0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+        0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+        0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+        0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+        0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+        0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+        0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+        0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+        0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+        0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+        0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+        0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+        0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+        0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+        0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+        0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+        0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+        0x6f, 0xd8, 0xee, 0xa0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_25[150] =
+    {
+        0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+        0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+        0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+        0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+        0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+        0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+        0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+        0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+        0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+        0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+        0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+        0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+        0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+        0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+        0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+        0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+        0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+        0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+        0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+        0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x80, 0xc2, 0x03, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_26[156] =
+    {
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+        0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+        0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+        0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+        0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+        0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+        0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+        0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+        0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+        0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+        0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+        0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+        0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+        0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+        0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+        0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+        0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+        0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+        0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+        0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+        0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+        0xf1, 0x64, 0xbe, 0x40, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_27[162] =
+    {
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+        0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+        0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+        0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+        0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+        0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+        0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+        0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+        0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+        0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+        0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+        0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+        0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+        0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+        0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+        0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+        0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+        0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+        0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+        0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+        0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0x42, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_28[168] =
+    {
+        0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+        0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+        0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+        0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+        0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+        0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+        0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+        0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+        0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+        0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+        0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0x42, 0x10, 0x00, 0x00,
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+        0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+        0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+        0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+        0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+        0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+        0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+        0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+        0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+        0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+        0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+        0x36, 0x4f, 0x1f, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_3[18] =
+    {
+        0xac, 0xda, 0xb3, 0x60, 0x00, 0x00,
+        0x55, 0x6d, 0x55, 0xb0, 0x00, 0x00,
+        0x27, 0xb4, 0x9e, 0xd0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_4[24] =
+    {
+        0x2c, 0xd8, 0xb3, 0x60, 0x00, 0x00,
+        0x93, 0x6a, 0x4d, 0xa0, 0x00, 0x00,
+        0x1a, 0xb4, 0x6a, 0xd0, 0x00, 0x00,
+        0x47, 0x2d, 0x1c, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_5[30] =
+    {
+        0x64, 0xd9, 0x93, 0x60, 0x00, 0x00,
+        0xa5, 0x6a, 0x95, 0xa0, 0x00, 0x00,
+        0x52, 0xb5, 0x4a, 0xd0, 0x00, 0x00,
+        0x1d, 0xa8, 0x76, 0xa0, 0x00, 0x00,
+        0x9c, 0x56, 0x71, 0x50, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_6[36] =
+    {
+        0x4a, 0x55, 0x29, 0x50, 0x00, 0x00,
+        0x95, 0x4a, 0x55, 0x20, 0x00, 0x00,
+        0x14, 0xb4, 0x52, 0xd0, 0x00, 0x00,
+        0x51, 0xa9, 0x46, 0xa0, 0x00, 0x00,
+        0x22, 0x6c, 0x89, 0xb0, 0x00, 0x00,
+        0x88, 0x8e, 0x22, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_7[42] =
+    {
+        0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+        0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+        0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+        0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+        0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+        0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+        0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_8[48] =
+    {
+        0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+        0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+        0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+        0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+        0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+        0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+        0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+        0x72, 0x01, 0xc8, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask28_9[54] =
+    {
+        0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+        0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+        0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+        0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+        0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+        0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+        0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+        0x88, 0x4a, 0x21, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xf8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_10[60] =
+    {
+        0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+        0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+        0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+        0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+        0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+        0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+        0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+        0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+        0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+        0x23, 0x10, 0x09, 0x88, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_11[66] =
+    {
+        0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+        0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+        0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+        0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+        0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+        0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+        0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+        0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+        0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+        0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+        0x22, 0xc2, 0x61, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_12[72] =
+    {
+        0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+        0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+        0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+        0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+        0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+        0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+        0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+        0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+        0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+        0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+        0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_13[78] =
+    {
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+        0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+        0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+        0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+        0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+        0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+        0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+        0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+        0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+        0x80, 0xc0, 0x28, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_14[84] =
+    {
+        0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+        0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+        0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+        0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+        0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+        0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+        0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+        0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+        0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+        0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+        0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0xa2, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_15[90] =
+    {
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+        0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+        0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+        0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+        0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+        0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00,
+        0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+        0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+        0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+        0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+        0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+        0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+        0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+        0x72, 0x01, 0x96, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_16[96] =
+    {
+        0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+        0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+        0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+        0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+        0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+        0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+        0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+        0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+        0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+        0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+        0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+        0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+        0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00,
+        0x0d, 0x2c, 0xf2, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_17[102] =
+    {
+        0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+        0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+        0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+        0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+        0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+        0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+        0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+        0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+        0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+        0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+        0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+        0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+        0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+        0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+        0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+        0x88, 0x4a, 0x41, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_18[108] =
+    {
+        0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+        0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+        0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+        0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+        0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+        0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+        0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+        0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+        0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+        0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+        0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+        0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+        0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+        0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+        0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+        0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+        0x71, 0x36, 0xf2, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_19[114] =
+    {
+        0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+        0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+        0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+        0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+        0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+        0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+        0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+        0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+        0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+        0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+        0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+        0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+        0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+        0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+        0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+        0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+        0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+        0x23, 0x10, 0x09, 0x88, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_2[12] =
+    {
+        0xec, 0xeb, 0xb3, 0xa8, 0x00, 0x00,
+        0x3b, 0x9e, 0xee, 0x70, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_20[120] =
+    {
+        0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+        0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+        0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+        0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+        0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+        0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+        0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+        0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+        0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+        0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+        0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+        0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+        0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+        0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+        0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+        0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+        0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+        0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+        0xe7, 0xec, 0xdc, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_21[126] =
+    {
+        0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+        0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+        0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+        0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+        0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+        0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+        0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+        0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+        0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+        0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+        0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+        0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+        0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+        0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+        0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+        0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+        0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+        0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+        0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+        0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+        0x22, 0xc2, 0x61, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_22[132] =
+    {
+        0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+        0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+        0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+        0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+        0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+        0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+        0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+        0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+        0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+        0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+        0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+        0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+        0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+        0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+        0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+        0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+        0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+        0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+        0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+        0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+        0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+        0x1c, 0x90, 0xa9, 0xa0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_23[138] =
+    {
+        0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+        0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+        0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+        0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+        0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+        0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+        0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+        0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+        0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+        0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+        0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+        0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+        0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+        0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+        0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+        0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+        0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+        0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+        0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+        0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+        0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+        0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_24[144] =
+    {
+        0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+        0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+        0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+        0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+        0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+        0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+        0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+        0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+        0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+        0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+        0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+        0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+        0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+        0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+        0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+        0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+        0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+        0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+        0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+        0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+        0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+        0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+        0xbd, 0x86, 0x97, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_25[150] =
+    {
+        0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+        0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+        0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+        0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+        0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+        0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+        0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+        0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+        0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+        0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+        0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+        0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+        0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+        0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+        0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+        0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+        0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+        0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+        0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+        0x80, 0xc0, 0x28, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_26[156] =
+    {
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+        0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+        0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+        0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+        0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+        0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+        0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+        0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+        0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+        0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+        0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+        0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+        0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+        0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+        0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+        0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+        0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+        0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+        0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+        0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+        0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+        0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+        0xb5, 0x4c, 0xa9, 0x70, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_27[162] =
+    {
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+        0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+        0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+        0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+        0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+        0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+        0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+        0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+        0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+        0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+        0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+        0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+        0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+        0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+        0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+        0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+        0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+        0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+        0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+        0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+        0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0xa2, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_28[168] =
+    {
+        0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+        0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+        0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+        0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+        0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+        0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+        0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+        0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+        0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+        0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+        0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0xa2, 0x20, 0x00, 0x00,
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+        0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+        0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+        0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+        0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+        0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+        0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+        0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+        0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+        0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+        0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+        0xbe, 0x1f, 0x99, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_29[174] =
+    {
+        0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+        0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+        0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+        0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+        0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+        0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+        0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+        0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+        0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+        0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+        0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+        0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+        0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+        0x10, 0x84, 0xa2, 0x20, 0x00, 0x00,
+        0x40, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x15, 0x40, 0xc4, 0x40, 0x00, 0x00,
+        0xc0, 0x05, 0x60, 0x00, 0x00, 0x00,
+        0x28, 0x10, 0x04, 0x48, 0x00, 0x00,
+        0x05, 0x0e, 0x20, 0x80, 0x00, 0x00,
+        0x64, 0x81, 0x10, 0x08, 0x00, 0x00,
+        0x81, 0x80, 0xa4, 0x10, 0x00, 0x00,
+        0x10, 0x9a, 0x0a, 0x80, 0x00, 0x00,
+        0x84, 0x20, 0x28, 0x68, 0x00, 0x00,
+        0x12, 0x30, 0x47, 0x80, 0x00, 0x00,
+        0x62, 0x02, 0x10, 0x10, 0x00, 0x00,
+        0x28, 0x62, 0x19, 0x00, 0x00, 0x00,
+        0x0e, 0x08, 0x02, 0x18, 0x00, 0x00,
+        0x10, 0x85, 0x11, 0x20, 0x00, 0x00,
+        0x29, 0x50, 0x42, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_3[18] =
+    {
+        0xac, 0xda, 0xb2, 0x48, 0x00, 0x00,
+        0x55, 0x6d, 0x55, 0x28, 0x00, 0x00,
+        0x27, 0xb5, 0x0c, 0xd8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_4[24] =
+    {
+        0x2c, 0xd8, 0x96, 0xa8, 0x00, 0x00,
+        0x93, 0x6a, 0x55, 0x50, 0x00, 0x00,
+        0x1a, 0xb4, 0x69, 0xa8, 0x00, 0x00,
+        0x47, 0x2d, 0x0f, 0x50, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_5[30] =
+    {
+        0x64, 0xd9, 0x92, 0x88, 0x00, 0x00,
+        0xa5, 0x68, 0x95, 0x50, 0x00, 0x00,
+        0x52, 0xb5, 0x25, 0xa0, 0x00, 0x00,
+        0x1d, 0xa9, 0x4e, 0x40, 0x00, 0x00,
+        0x9c, 0x56, 0x38, 0xc0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_6[36] =
+    {
+        0x4a, 0x55, 0x8a, 0x28, 0x00, 0x00,
+        0x95, 0x48, 0x55, 0x50, 0x00, 0x00,
+        0x14, 0xb5, 0x31, 0x18, 0x00, 0x00,
+        0x51, 0xa9, 0x4a, 0x50, 0x00, 0x00,
+        0x22, 0x6c, 0x8d, 0x90, 0x00, 0x00,
+        0x88, 0x8e, 0x29, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_7[42] =
+    {
+        0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+        0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+        0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+        0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+        0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+        0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+        0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_8[48] =
+    {
+        0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+        0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+        0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+        0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+        0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+        0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+        0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+        0x72, 0x01, 0x96, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask29_9[54] =
+    {
+        0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+        0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+        0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+        0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+        0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+        0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+        0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+        0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+        0x88, 0x4a, 0x41, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask2_1[2] =
+    {
+        0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask2_2[4] =
+    {
+        0xc0, 0x00,
+        0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xfc, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_10[60] =
+    {
+        0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+        0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+        0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+        0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+        0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+        0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+        0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+        0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+        0x02, 0x62, 0x04, 0xc4, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_11[66] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+        0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+        0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+        0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+        0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+        0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+        0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+        0x98, 0x41, 0x30, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_12[72] =
+    {
+        0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+        0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+        0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+        0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+        0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+        0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+        0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+        0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+        0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+        0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+        0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+        0x72, 0x14, 0xe4, 0x28, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_13[78] =
+    {
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+        0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+        0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+        0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+        0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+        0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+        0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+        0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+        0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+        0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+        0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+        0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_14[84] =
+    {
+        0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+        0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+        0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+        0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+        0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+        0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+        0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+        0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+        0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+        0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+        0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+        0x28, 0x88, 0x51, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_15[90] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+        0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+        0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+        0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+        0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+        0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+        0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+        0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+        0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+        0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+        0x10, 0x98, 0x21, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_16[96] =
+    {
+        0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+        0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+        0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+        0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+        0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+        0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+        0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+        0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0xb1, 0x15, 0x62, 0x28, 0x00, 0x00,
+        0x18, 0x6a, 0x30, 0xd4, 0x00, 0x00,
+        0x44, 0xd4, 0x89, 0xa8, 0x00, 0x00,
+        0x13, 0x64, 0x26, 0xc8, 0x00, 0x00,
+        0x49, 0x1a, 0x92, 0x34, 0x00, 0x00,
+        0x86, 0x8d, 0x0d, 0x18, 0x00, 0x00,
+        0xce, 0x58, 0xa0, 0x14, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_17[102] =
+    {
+        0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+        0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+        0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+        0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+        0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+        0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+        0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+        0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+        0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+        0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+        0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+        0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+        0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+        0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+        0x90, 0x49, 0x20, 0x90, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_18[108] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+        0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+        0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+        0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+        0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+        0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+        0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+        0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+        0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+        0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+        0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+        0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+        0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+        0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+        0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+        0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+        0x00, 0xb2, 0x47, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_19[114] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+        0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+        0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+        0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+        0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+        0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+        0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+        0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+        0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+        0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+        0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+        0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+        0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+        0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+        0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+        0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+        0x02, 0x62, 0x04, 0xc4, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_2[12] =
+    {
+        0xec, 0xeb, 0xd9, 0xd4, 0x00, 0x00,
+        0xbb, 0x9d, 0x77, 0x38, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_20[120] =
+    {
+        0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+        0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+        0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+        0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+        0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+        0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+        0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+        0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+        0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+        0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+        0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+        0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+        0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+        0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+        0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+        0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+        0x51, 0x88, 0xd1, 0x78, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_21[126] =
+    {
+        0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+        0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+        0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+        0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+        0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+        0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+        0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+        0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+        0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+        0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+        0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+        0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+        0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+        0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+        0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+        0x98, 0x41, 0x30, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_22[132] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+        0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+        0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+        0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+        0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+        0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+        0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+        0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+        0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+        0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+        0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+        0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+        0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+        0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+        0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+        0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+        0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+        0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+        0x03, 0x10, 0x18, 0x74, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_23[138] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+        0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+        0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+        0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+        0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+        0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+        0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+        0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+        0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+        0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+        0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+        0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+        0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+        0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+        0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+        0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+        0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+        0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+        0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+        0x72, 0x14, 0xe4, 0x28, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_24[144] =
+    {
+        0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+        0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+        0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+        0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+        0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+        0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+        0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+        0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+        0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+        0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+        0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+        0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+        0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+        0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+        0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+        0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+        0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+        0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+        0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+        0xf3, 0x4d, 0x1c, 0x70, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_25[150] =
+    {
+        0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+        0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+        0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+        0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+        0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+        0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+        0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+        0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+        0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+        0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+        0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+        0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+        0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+        0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+        0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+        0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+        0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+        0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+        0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+        0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+        0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+        0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+        0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_26[156] =
+    {
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+        0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+        0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+        0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+        0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+        0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+        0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+        0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+        0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+        0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+        0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+        0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+        0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+        0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+        0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+        0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+        0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+        0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+        0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+        0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+        0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+        0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+        0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+        0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+        0x83, 0x11, 0xad, 0xe8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_27[162] =
+    {
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+        0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+        0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+        0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+        0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+        0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+        0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+        0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+        0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+        0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+        0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+        0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+        0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+        0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+        0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+        0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+        0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+        0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+        0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+        0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+        0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+        0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+        0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+        0x28, 0x88, 0x51, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_28[168] =
+    {
+        0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+        0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+        0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+        0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+        0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+        0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+        0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+        0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+        0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+        0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+        0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+        0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+        0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+        0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+        0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+        0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+        0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+        0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+        0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+        0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+        0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+        0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+        0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+        0x94, 0x59, 0x03, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_29[174] =
+    {
+        0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+        0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+        0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+        0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+        0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+        0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+        0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+        0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+        0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+        0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+        0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+        0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+        0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+        0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+        0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+        0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+        0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+        0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+        0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+        0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+        0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+        0x10, 0x98, 0x21, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_3[18] =
+    {
+        0xac, 0x93, 0x59, 0x24, 0x00, 0x00,
+        0x55, 0x4a, 0xaa, 0x94, 0x00, 0x00,
+        0x43, 0x36, 0x86, 0x6c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_30[180] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+        0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+        0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+        0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+        0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+        0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+        0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+        0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+        0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+        0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+        0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+        0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+        0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+        0x10, 0x98, 0x21, 0x30, 0x00, 0x00,
+        0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+        0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+        0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+        0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+        0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+        0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+        0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+        0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+        0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+        0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+        0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+        0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+        0x46, 0xf1, 0xef, 0xec, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_4[24] =
+    {
+        0x25, 0xaa, 0x4b, 0x54, 0x00, 0x00,
+        0x95, 0x55, 0x2a, 0xa8, 0x00, 0x00,
+        0x1a, 0x6a, 0x34, 0xd4, 0x00, 0x00,
+        0x43, 0xd4, 0x87, 0xa8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_5[30] =
+    {
+        0x64, 0xa2, 0xc9, 0x44, 0x00, 0x00,
+        0x25, 0x54, 0x4a, 0xa8, 0x00, 0x00,
+        0x49, 0x68, 0x92, 0xd0, 0x00, 0x00,
+        0x53, 0x90, 0xa7, 0x20, 0x00, 0x00,
+        0x8e, 0x31, 0x1c, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_6[36] =
+    {
+        0x62, 0x8a, 0xc5, 0x14, 0x00, 0x00,
+        0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+        0x4c, 0x46, 0x98, 0x8c, 0x00, 0x00,
+        0x52, 0x94, 0xa5, 0x28, 0x00, 0x00,
+        0x23, 0x64, 0x46, 0xc8, 0x00, 0x00,
+        0x8a, 0x59, 0x14, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_7[42] =
+    {
+        0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+        0xb1, 0x15, 0x62, 0x28, 0x00, 0x00,
+        0x18, 0x6a, 0x30, 0xd4, 0x00, 0x00,
+        0x44, 0xd4, 0x89, 0xa8, 0x00, 0x00,
+        0x13, 0x64, 0x26, 0xc8, 0x00, 0x00,
+        0x49, 0x1a, 0x92, 0x34, 0x00, 0x00,
+        0x86, 0x8d, 0x0d, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_8[48] =
+    {
+        0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+        0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+        0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+        0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+        0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+        0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+        0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+        0x65, 0x80, 0xcb, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask30_9[54] =
+    {
+        0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+        0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+        0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+        0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+        0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+        0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+        0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+        0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+        0x90, 0x49, 0x20, 0x90, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xfe, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_10[60] =
+    {
+        0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+        0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+        0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+        0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+        0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+        0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+        0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+        0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+        0x02, 0x63, 0x09, 0x24, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_11[66] =
+    {
+        0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+        0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+        0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+        0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+        0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+        0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+        0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+        0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+        0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+        0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+        0x98, 0x40, 0xd0, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_12[72] =
+    {
+        0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+        0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+        0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+        0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+        0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+        0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+        0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+        0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+        0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+        0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+        0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+        0x72, 0x15, 0xd1, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_13[78] =
+    {
+        0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+        0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+        0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+        0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+        0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+        0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+        0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+        0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+        0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+        0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+        0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+        0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+        0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_14[84] =
+    {
+        0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+        0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+        0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+        0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+        0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+        0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+        0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+        0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+        0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+        0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+        0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+        0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+        0x28, 0x89, 0x05, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_15[90] =
+    {
+        0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+        0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+        0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+        0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+        0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+        0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+        0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+        0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+        0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+        0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+        0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+        0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+        0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+        0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+        0x10, 0x98, 0x30, 0x44, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_16[96] =
+    {
+        0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+        0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+        0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+        0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+        0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+        0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+        0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+        0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+        0x62, 0xa2, 0x8a, 0xa2, 0x00, 0x00,
+        0xb1, 0x14, 0x44, 0x54, 0x00, 0x00,
+        0x18, 0x6b, 0x22, 0x22, 0x00, 0x00,
+        0x44, 0xd4, 0x5c, 0x10, 0x00, 0x00,
+        0x13, 0x64, 0x90, 0x68, 0x00, 0x00,
+        0x49, 0x1b, 0x20, 0x52, 0x00, 0x00,
+        0x86, 0x8c, 0x13, 0x0c, 0x00, 0x00,
+        0x8d, 0x94, 0xa9, 0xe0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_17[102] =
+    {
+        0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+        0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+        0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+        0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+        0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+        0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+        0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+        0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+        0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+        0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+        0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+        0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+        0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+        0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+        0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+        0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+        0x90, 0x48, 0x88, 0xc8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_18[108] =
+    {
+        0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+        0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+        0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+        0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+        0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+        0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+        0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+        0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+        0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+        0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+        0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+        0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+        0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+        0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+        0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+        0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+        0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+        0xe3, 0xd1, 0x2e, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_19[114] =
+    {
+        0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+        0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+        0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+        0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+        0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+        0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+        0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+        0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+        0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+        0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+        0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+        0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+        0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+        0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+        0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+        0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+        0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+        0x02, 0x63, 0x09, 0x24, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_2[12] =
+    {
+        0xec, 0xeb, 0x5d, 0x5c, 0x00, 0x00,
+        0xbb, 0x9c, 0xf2, 0xf2, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_20[120] =
+    {
+        0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+        0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+        0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+        0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+        0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+        0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+        0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+        0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+        0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+        0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+        0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+        0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+        0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+        0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+        0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+        0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+        0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+        0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+        0x9a, 0xd4, 0x6a, 0x36, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_21[126] =
+    {
+        0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+        0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+        0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+        0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+        0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+        0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+        0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+        0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+        0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+        0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+        0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+        0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+        0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+        0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+        0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+        0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+        0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+        0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+        0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+        0x98, 0x40, 0xd0, 0x18, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_22[132] =
+    {
+        0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+        0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+        0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+        0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+        0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+        0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+        0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+        0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+        0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+        0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+        0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+        0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+        0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+        0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+        0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+        0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+        0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+        0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+        0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+        0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+        0x32, 0x23, 0x73, 0x8e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_23[138] =
+    {
+        0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+        0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+        0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+        0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+        0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+        0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+        0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+        0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+        0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+        0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+        0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+        0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+        0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+        0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+        0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+        0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+        0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+        0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+        0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+        0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+        0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+        0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+        0x72, 0x15, 0xd1, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_24[144] =
+    {
+        0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+        0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+        0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+        0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+        0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+        0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+        0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+        0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+        0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+        0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+        0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+        0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+        0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+        0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+        0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+        0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+        0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+        0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+        0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+        0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+        0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+        0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+        0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+        0xf0, 0xdf, 0x91, 0xb6, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_25[150] =
+    {
+        0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+        0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+        0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+        0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+        0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+        0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+        0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+        0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+        0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+        0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+        0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+        0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+        0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+        0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+        0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+        0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+        0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+        0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+        0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+        0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+        0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+        0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+        0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+        0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+        0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_26[156] =
+    {
+        0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+        0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+        0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+        0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+        0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+        0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+        0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+        0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+        0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+        0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+        0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+        0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+        0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+        0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+        0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+        0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+        0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+        0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+        0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+        0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+        0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+        0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+        0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+        0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+        0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+        0xc5, 0x75, 0x48, 0xba, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_27[162] =
+    {
+        0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+        0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+        0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+        0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+        0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+        0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+        0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+        0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+        0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+        0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+        0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+        0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+        0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+        0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+        0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+        0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+        0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+        0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+        0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+        0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+        0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+        0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+        0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+        0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+        0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+        0x28, 0x89, 0x05, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_28[168] =
+    {
+        0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+        0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+        0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+        0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+        0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+        0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+        0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+        0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+        0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+        0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+        0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+        0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+        0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+        0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+        0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+        0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+        0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+        0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+        0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+        0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+        0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+        0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+        0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+        0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+        0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+        0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+        0xbc, 0x0d, 0xca, 0x28, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_29[174] =
+    {
+        0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+        0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+        0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+        0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+        0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+        0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+        0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+        0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+        0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+        0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+        0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+        0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+        0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+        0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+        0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+        0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+        0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+        0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+        0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+        0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+        0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+        0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+        0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+        0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+        0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+        0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+        0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+        0x10, 0x98, 0x30, 0x44, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_3[18] =
+    {
+        0xac, 0x93, 0x5a, 0x5a, 0x00, 0x00,
+        0x55, 0x4a, 0xec, 0x6c, 0x00, 0x00,
+        0x43, 0x36, 0x4d, 0xb6, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_30[180] =
+    {
+        0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+        0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+        0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+        0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+        0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+        0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+        0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+        0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+        0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+        0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+        0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+        0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+        0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+        0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+        0x10, 0x98, 0x30, 0x44, 0x00, 0x00,
+        0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+        0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+        0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+        0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+        0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+        0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+        0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+        0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+        0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+        0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+        0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+        0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+        0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+        0xe1, 0x4f, 0xe0, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_31[186] =
+    {
+        0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+        0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+        0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+        0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+        0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+        0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+        0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+        0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+        0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+        0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+        0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+        0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+        0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+        0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+        0x10, 0x98, 0x30, 0x44, 0x00, 0x00,
+        0x62, 0x23, 0x48, 0x20, 0x00, 0x00,
+        0x31, 0x10, 0x02, 0x54, 0x00, 0x00,
+        0x58, 0x00, 0x0c, 0x84, 0x00, 0x00,
+        0x01, 0x12, 0x10, 0xd0, 0x00, 0x00,
+        0x88, 0x21, 0x03, 0x20, 0x00, 0x00,
+        0x44, 0x02, 0x01, 0xe0, 0x00, 0x00,
+        0x29, 0x04, 0xa0, 0x0a, 0x00, 0x00,
+        0x82, 0xa0, 0x40, 0xa2, 0x00, 0x00,
+        0x0a, 0x1a, 0x86, 0x10, 0x00, 0x00,
+        0x11, 0xe0, 0xd1, 0x00, 0x00, 0x00,
+        0x84, 0x05, 0x00, 0x16, 0x00, 0x00,
+        0x86, 0x40, 0x20, 0x98, 0x00, 0x00,
+        0x00, 0x86, 0x24, 0x60, 0x00, 0x00,
+        0x44, 0x48, 0x81, 0x0a, 0x00, 0x00,
+        0x10, 0x98, 0x1c, 0x08, 0x00, 0x00,
+        0x87, 0x74, 0x30, 0x24, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_4[24] =
+    {
+        0x25, 0xaa, 0xaa, 0xaa, 0x00, 0x00,
+        0x95, 0x55, 0x55, 0x54, 0x00, 0x00,
+        0x1a, 0x6a, 0x6a, 0x6a, 0x00, 0x00,
+        0x43, 0xd5, 0x95, 0x94, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_5[30] =
+    {
+        0x64, 0xa2, 0xaa, 0xaa, 0x00, 0x00,
+        0x25, 0x54, 0x54, 0x54, 0x00, 0x00,
+        0x49, 0x68, 0x48, 0x4a, 0x00, 0x00,
+        0x53, 0x91, 0x09, 0x90, 0x00, 0x00,
+        0x8e, 0x30, 0x21, 0x6c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_6[36] =
+    {
+        0x62, 0x8a, 0xa2, 0xa2, 0x00, 0x00,
+        0x15, 0x54, 0x14, 0x54, 0x00, 0x00,
+        0x4c, 0x47, 0x44, 0x2a, 0x00, 0x00,
+        0x52, 0x95, 0x08, 0x94, 0x00, 0x00,
+        0x23, 0x64, 0x61, 0x24, 0x00, 0x00,
+        0x8a, 0x58, 0x09, 0x58, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_7[42] =
+    {
+        0x62, 0xa2, 0x8a, 0xa2, 0x00, 0x00,
+        0xb1, 0x14, 0x44, 0x54, 0x00, 0x00,
+        0x18, 0x6b, 0x22, 0x22, 0x00, 0x00,
+        0x44, 0xd4, 0x5c, 0x10, 0x00, 0x00,
+        0x13, 0x64, 0x90, 0x68, 0x00, 0x00,
+        0x49, 0x1b, 0x20, 0x52, 0x00, 0x00,
+        0x86, 0x8c, 0x13, 0x0c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_8[48] =
+    {
+        0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+        0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+        0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+        0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+        0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+        0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+        0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+        0x65, 0x80, 0x2c, 0x60, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask31_9[54] =
+    {
+        0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+        0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+        0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+        0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+        0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+        0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+        0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+        0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+        0x90, 0x48, 0x88, 0xc8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_10[60] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+        0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+        0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+        0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+        0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+        0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+        0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+        0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+        0x84, 0x92, 0x84, 0x92, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_11[66] =
+    {
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+        0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+        0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+        0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+        0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+        0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+        0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+        0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_12[72] =
+    {
+        0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+        0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+        0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+        0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+        0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+        0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+        0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+        0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+        0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+        0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+        0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+        0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_13[78] =
+    {
+        0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+        0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+        0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+        0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+        0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+        0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+        0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+        0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+        0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+        0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+        0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_14[84] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+        0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+        0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+        0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+        0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+        0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+        0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+        0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+        0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+        0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+        0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+        0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+        0x82, 0x88, 0x82, 0x88, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_15[90] =
+    {
+        0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+        0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+        0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+        0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+        0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+        0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+        0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+        0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+        0x18, 0x22, 0x18, 0x22, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_16[96] =
+    {
+        0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+        0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+        0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+        0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+        0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+        0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+        0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+        0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+        0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+        0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+        0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+        0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+        0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+        0x18, 0x12, 0x18, 0x12, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_17[102] =
+    {
+        0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+        0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+        0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+        0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+        0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+        0x16, 0x30, 0x16, 0x30, 0x00, 0x00,
+        0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+        0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+        0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+        0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+        0x44, 0x64, 0x44, 0x64, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_18[108] =
+    {
+        0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+        0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+        0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+        0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+        0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+        0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+        0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+        0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+        0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+        0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+        0x16, 0x30, 0x16, 0x30, 0x00, 0x00,
+        0x1e, 0xb2, 0xd8, 0x53, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_19[114] =
+    {
+        0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+        0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+        0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+        0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+        0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+        0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+        0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+        0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+        0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+        0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+        0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+        0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+        0x84, 0x92, 0x84, 0x92, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_2[12] =
+    {
+        0xae, 0xae, 0xae, 0xae, 0x00, 0x00,
+        0x79, 0x79, 0x79, 0x79, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_20[120] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+        0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+        0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+        0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+        0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+        0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+        0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+        0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+        0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+        0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+        0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+        0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+        0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+        0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+        0x96, 0xd3, 0xf6, 0xac, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_21[126] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+        0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+        0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+        0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+        0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+        0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+        0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+        0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+        0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+        0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+        0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+        0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+        0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+        0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+        0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+        0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_22[132] =
+    {
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+        0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+        0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+        0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+        0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+        0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+        0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+        0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+        0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+        0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+        0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+        0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+        0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+        0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+        0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+        0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+        0xeb, 0xb2, 0x22, 0x89, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_23[138] =
+    {
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+        0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+        0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+        0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+        0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+        0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+        0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+        0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+        0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+        0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+        0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+        0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+        0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+        0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+        0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+        0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+        0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+        0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+        0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+        0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_24[144] =
+    {
+        0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+        0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+        0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+        0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+        0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+        0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+        0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+        0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+        0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+        0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+        0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+        0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+        0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+        0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+        0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+        0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+        0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+        0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+        0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+        0xf3, 0x5a, 0x2f, 0x5d, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_25[150] =
+    {
+        0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+        0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+        0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+        0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+        0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+        0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+        0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+        0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+        0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+        0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+        0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+        0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+        0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+        0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+        0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+        0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+        0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+        0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+        0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+        0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+        0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+        0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+        0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_26[156] =
+    {
+        0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+        0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+        0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+        0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+        0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+        0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+        0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+        0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+        0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+        0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+        0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+        0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+        0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+        0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+        0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+        0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+        0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+        0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+        0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+        0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+        0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+        0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+        0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+        0x52, 0x15, 0x62, 0x0a, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_27[162] =
+    {
+        0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+        0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+        0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+        0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+        0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+        0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+        0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+        0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+        0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+        0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+        0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+        0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+        0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+        0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+        0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+        0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+        0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+        0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+        0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+        0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+        0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+        0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+        0x82, 0x88, 0x82, 0x88, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_28[168] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+        0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+        0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+        0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+        0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+        0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+        0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+        0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+        0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+        0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+        0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+        0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+        0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+        0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+        0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+        0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+        0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+        0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+        0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+        0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+        0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+        0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+        0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+        0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+        0x7f, 0xe2, 0xbc, 0x01, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_29[174] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+        0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+        0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+        0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+        0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+        0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+        0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+        0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+        0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+        0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+        0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+        0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+        0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+        0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+        0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+        0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+        0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+        0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+        0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+        0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+        0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+        0x18, 0x22, 0x18, 0x22, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_3[18] =
+    {
+        0xad, 0x2d, 0xad, 0x2d, 0x00, 0x00,
+        0x76, 0x36, 0x76, 0x36, 0x00, 0x00,
+        0x26, 0xdb, 0x26, 0xdb, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_30[180] =
+    {
+        0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+        0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+        0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+        0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+        0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+        0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+        0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+        0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+        0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+        0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+        0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+        0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+        0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+        0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+        0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+        0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+        0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+        0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+        0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+        0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+        0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+        0x1e, 0x27, 0xe2, 0xd8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_31[186] =
+    {
+        0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+        0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+        0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+        0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+        0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+        0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+        0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+        0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+        0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+        0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+        0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+        0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+        0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+        0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+        0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+        0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+        0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+        0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+        0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+        0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+        0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+        0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+        0x18, 0x12, 0x18, 0x12, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_32[192] =
+    {
+        0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+        0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+        0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+        0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+        0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+        0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+        0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+        0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+        0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+        0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+        0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+        0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+        0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+        0x18, 0x12, 0x18, 0x12, 0x00, 0x00,
+        0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+        0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+        0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+        0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+        0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+        0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+        0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+        0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+        0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+        0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+        0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+        0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+        0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+        0x60, 0xc4, 0x02, 0x02, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_4[24] =
+    {
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0xaa, 0xaa, 0xaa, 0xaa, 0x00, 0x00,
+        0x35, 0x35, 0x35, 0x35, 0x00, 0x00,
+        0xca, 0xca, 0xca, 0xca, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_5[30] =
+    {
+        0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+        0x2a, 0x2a, 0x2a, 0x2a, 0x00, 0x00,
+        0x24, 0x25, 0x24, 0x25, 0x00, 0x00,
+        0x84, 0xc8, 0x84, 0xc8, 0x00, 0x00,
+        0x10, 0xb6, 0x10, 0xb6, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_6[36] =
+    {
+        0x51, 0x51, 0x51, 0x51, 0x00, 0x00,
+        0x0a, 0x2a, 0x0a, 0x2a, 0x00, 0x00,
+        0xa2, 0x15, 0xa2, 0x15, 0x00, 0x00,
+        0x84, 0x4a, 0x84, 0x4a, 0x00, 0x00,
+        0x30, 0x92, 0x30, 0x92, 0x00, 0x00,
+        0x04, 0xac, 0x04, 0xac, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_7[42] =
+    {
+        0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x91, 0x11, 0x91, 0x11, 0x00, 0x00,
+        0x2e, 0x08, 0x2e, 0x08, 0x00, 0x00,
+        0x48, 0x34, 0x48, 0x34, 0x00, 0x00,
+        0x90, 0x29, 0x90, 0x29, 0x00, 0x00,
+        0x09, 0x86, 0x09, 0x86, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_8[48] =
+    {
+        0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+        0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+        0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+        0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+        0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+        0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+        0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+        0x16, 0x30, 0x16, 0x30, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask32_9[54] =
+    {
+        0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+        0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+        0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+        0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+        0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+        0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+        0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+        0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+        0x44, 0x64, 0x44, 0x64, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_10[60] =
+    {
+        0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+        0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+        0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+        0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+        0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+        0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+        0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+        0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x92, 0xc8, 0x02, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_11[66] =
+    {
+        0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+        0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+        0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+        0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+        0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+        0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+        0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+        0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+        0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_12[72] =
+    {
+        0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+        0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+        0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+        0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+        0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+        0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+        0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+        0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+        0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+        0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+        0xe8, 0x80, 0x51, 0x35, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_13[78] =
+    {
+        0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+        0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+        0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+        0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+        0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+        0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+        0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+        0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+        0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+        0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+        0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+        0xc5, 0x10, 0x83, 0x34, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_14[84] =
+    {
+        0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+        0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+        0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+        0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+        0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+        0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+        0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+        0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+        0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+        0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+        0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+        0x82, 0x88, 0xb0, 0xde, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_15[90] =
+    {
+        0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+        0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+        0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+        0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+        0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+        0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+        0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+        0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+        0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+        0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+        0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+        0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+        0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_16[96] =
+    {
+        0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+        0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+        0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+        0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+        0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+        0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+        0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+        0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+        0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+        0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+        0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+        0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+        0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+        0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+        0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+        0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_17[102] =
+    {
+        0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+        0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+        0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+        0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+        0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+        0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+        0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+        0x16, 0x30, 0x94, 0xb0, 0x00, 0x00,
+        0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+        0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+        0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+        0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+        0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+        0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+        0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+        0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_18[108] =
+    {
+        0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+        0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+        0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+        0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+        0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+        0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+        0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+        0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+        0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+        0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+        0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+        0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+        0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+        0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+        0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+        0x16, 0x30, 0x94, 0xb0, 0x00, 0x00,
+        0x89, 0x53, 0x03, 0xad, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_19[114] =
+    {
+        0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+        0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+        0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+        0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+        0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+        0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+        0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+        0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+        0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+        0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+        0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+        0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+        0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+        0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+        0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+        0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x92, 0xc8, 0x02, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_2[12] =
+    {
+        0xae, 0xae, 0xce, 0xce, 0x00, 0x00,
+        0x79, 0x79, 0xb9, 0x39, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_20[120] =
+    {
+        0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+        0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+        0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+        0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+        0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+        0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+        0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+        0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+        0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+        0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+        0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+        0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+        0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+        0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+        0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+        0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+        0x73, 0x5f, 0x5b, 0x0e, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_21[126] =
+    {
+        0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+        0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+        0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+        0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+        0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+        0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+        0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+        0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+        0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+        0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+        0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+        0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+        0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+        0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+        0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+        0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+        0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_22[132] =
+    {
+        0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+        0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+        0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+        0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+        0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+        0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+        0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+        0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+        0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+        0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+        0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+        0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+        0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+        0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+        0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+        0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+        0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+        0xcc, 0xe3, 0x42, 0x6b, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_23[138] =
+    {
+        0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+        0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+        0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+        0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+        0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+        0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+        0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+        0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+        0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+        0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+        0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+        0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+        0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+        0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+        0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+        0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+        0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+        0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+        0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+        0xe8, 0x80, 0x51, 0x35, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_24[144] =
+    {
+        0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+        0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+        0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+        0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+        0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+        0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+        0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+        0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+        0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+        0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+        0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+        0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+        0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+        0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+        0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+        0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+        0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+        0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+        0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+        0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+        0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+        0xdc, 0x4e, 0xfc, 0x70, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_25[150] =
+    {
+        0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+        0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+        0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+        0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+        0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+        0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+        0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+        0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+        0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+        0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+        0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+        0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+        0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+        0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+        0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+        0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+        0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+        0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+        0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+        0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+        0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+        0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+        0xc5, 0x10, 0x83, 0x34, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_26[156] =
+    {
+        0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+        0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+        0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+        0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+        0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+        0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+        0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+        0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+        0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+        0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+        0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+        0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+        0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+        0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+        0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+        0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+        0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+        0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+        0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+        0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+        0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+        0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+        0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+        0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+        0xa4, 0xa4, 0xfc, 0x91, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_27[162] =
+    {
+        0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+        0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+        0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+        0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+        0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+        0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+        0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+        0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+        0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+        0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+        0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+        0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+        0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+        0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+        0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+        0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+        0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+        0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+        0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+        0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+        0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+        0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+        0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+        0x82, 0x88, 0xb0, 0xde, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_28[168] =
+    {
+        0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+        0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+        0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+        0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+        0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+        0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+        0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+        0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+        0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+        0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+        0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+        0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+        0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+        0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+        0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+        0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+        0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+        0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+        0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+        0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+        0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+        0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+        0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+        0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+        0x1b, 0xf4, 0xaa, 0xec, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_29[174] =
+    {
+        0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+        0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+        0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+        0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+        0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+        0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+        0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+        0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+        0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+        0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+        0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+        0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+        0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+        0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+        0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+        0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+        0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+        0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+        0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+        0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+        0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+        0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+        0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+        0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+        0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_3[18] =
+    {
+        0xad, 0x2d, 0xcd, 0xcc, 0x00, 0x00,
+        0x76, 0x36, 0x97, 0x27, 0x00, 0x00,
+        0x26, 0xdb, 0xb8, 0xd1, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_30[180] =
+    {
+        0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+        0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+        0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+        0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+        0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+        0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+        0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+        0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+        0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+        0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+        0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+        0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+        0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+        0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+        0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+        0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+        0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+        0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+        0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+        0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+        0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+        0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+        0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+        0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+        0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+        0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+        0x6d, 0xd2, 0x8c, 0x00, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_31[186] =
+    {
+        0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+        0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+        0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+        0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+        0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+        0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+        0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+        0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+        0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+        0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+        0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+        0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+        0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+        0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+        0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+        0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+        0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+        0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+        0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+        0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+        0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+        0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+        0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+        0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+        0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+        0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+        0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+        0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+        0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_32[192] =
+    {
+        0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+        0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+        0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+        0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+        0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+        0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+        0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+        0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+        0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+        0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+        0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+        0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+        0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+        0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+        0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+        0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00,
+        0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+        0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+        0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+        0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+        0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+        0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+        0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+        0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+        0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+        0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+        0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+        0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+        0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+        0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+        0x73, 0x8e, 0x12, 0xca, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_33[198] =
+    {
+        0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+        0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+        0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+        0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+        0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+        0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+        0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+        0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+        0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+        0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+        0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+        0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+        0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+        0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+        0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+        0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00,
+        0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+        0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+        0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+        0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+        0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+        0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+        0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+        0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+        0x43, 0x08, 0x25, 0x4c, 0x00, 0x00,
+        0x68, 0x80, 0x8a, 0x66, 0x00, 0x00,
+        0x80, 0x0b, 0x91, 0x91, 0x00, 0x00,
+        0x10, 0x4c, 0x68, 0x42, 0x80, 0x00,
+        0x12, 0x30, 0x32, 0xa4, 0x00, 0x00,
+        0x40, 0x85, 0x43, 0x13, 0x00, 0x00,
+        0x0e, 0x04, 0xc4, 0x30, 0x80, 0x00,
+        0x18, 0x12, 0x1c, 0x88, 0x80, 0x00,
+        0xdb, 0x10, 0x3c, 0x09, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_4[24] =
+    {
+        0x55, 0x55, 0xca, 0xec, 0x00, 0x00,
+        0xaa, 0xaa, 0xa9, 0x67, 0x00, 0x00,
+        0x35, 0x35, 0x3a, 0xb1, 0x80, 0x00,
+        0xca, 0xca, 0x55, 0x5a, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_5[30] =
+    {
+        0x55, 0x55, 0x55, 0x44, 0x80, 0x00,
+        0x2a, 0x2a, 0x2a, 0x66, 0x00, 0x00,
+        0x24, 0x25, 0x25, 0xa1, 0x80, 0x00,
+        0x84, 0xc8, 0xe2, 0x12, 0x80, 0x00,
+        0x10, 0xb6, 0x99, 0x98, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_6[36] =
+    {
+        0x51, 0x51, 0xd1, 0x4c, 0x00, 0x00,
+        0x0a, 0x2a, 0xa2, 0xc5, 0x00, 0x00,
+        0xa2, 0x15, 0x95, 0x30, 0x80, 0x00,
+        0x84, 0x4a, 0xca, 0x0a, 0x80, 0x00,
+        0x30, 0x92, 0xa4, 0xaa, 0x00, 0x00,
+        0x04, 0xac, 0x78, 0x15, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_7[42] =
+    {
+        0x45, 0x51, 0x15, 0x44, 0x80, 0x00,
+        0x22, 0x2a, 0x8a, 0x23, 0x00, 0x00,
+        0x91, 0x11, 0x85, 0x91, 0x00, 0x00,
+        0x2e, 0x08, 0x32, 0x0a, 0x80, 0x00,
+        0x48, 0x34, 0x58, 0x34, 0x00, 0x00,
+        0x90, 0x29, 0x2c, 0x0d, 0x00, 0x00,
+        0x09, 0x86, 0x43, 0xc8, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_8[48] =
+    {
+        0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+        0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+        0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+        0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+        0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+        0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+        0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+        0x16, 0x30, 0x94, 0xb0, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask33_9[54] =
+    {
+        0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+        0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+        0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+        0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+        0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+        0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+        0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+        0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+        0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_10[60] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_11[66] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_12[72] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x35, 0x28, 0x9a, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_13[78] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_14[84] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_15[90] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_16[96] =
+    {
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_17[102] =
+    {
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+        0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+        0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+        0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+        0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+        0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+        0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_18[108] =
+    {
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x64, 0x16, 0x32, 0x0b, 0x00, 0x00,
+        0xa2, 0xc2, 0x51, 0x61, 0x00, 0x00,
+        0x51, 0x60, 0xa8, 0xb0, 0x40, 0x00,
+        0x4a, 0x85, 0x25, 0x42, 0x80, 0x00,
+        0x38, 0x4c, 0x1c, 0x26, 0x00, 0x00,
+        0x89, 0x29, 0x44, 0x94, 0x80, 0x00,
+        0x07, 0x11, 0x83, 0x88, 0xc0, 0x00,
+        0x94, 0xb0, 0x4a, 0x58, 0x00, 0x00,
+        0x89, 0x70, 0xf3, 0xf7, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_19[114] =
+    {
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_2[12] =
+    {
+        0xce, 0xce, 0x67, 0x67, 0x00, 0x00,
+        0xb9, 0x39, 0xdc, 0x9c, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_20[120] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x5d, 0xc5, 0xfe, 0xd8, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_21[126] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_22[132] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+        0x2a, 0xf7, 0x4f, 0xf5, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_23[138] =
+    {
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x35, 0x28, 0x9a, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_24[144] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x4c, 0xb8, 0x04, 0x74, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_25[150] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_26[156] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+        0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+        0x95, 0x20, 0xe9, 0xef, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_27[162] =
+    {
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_28[168] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x10, 0x6c, 0xff, 0x60, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_29[174] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_3[18] =
+    {
+        0xcd, 0xcc, 0x66, 0xe6, 0x00, 0x00,
+        0x97, 0x27, 0x4b, 0x93, 0x80, 0x00,
+        0xb8, 0xd1, 0xdc, 0x68, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_30[180] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+        0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+        0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+        0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+        0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+        0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+        0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+        0x87, 0x93, 0x96, 0xc7, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_31[186] =
+    {
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_32[192] =
+    {
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0xa6, 0x27, 0xa9, 0x4a, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_33[198] =
+    {
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+        0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+        0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+        0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+        0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+        0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+        0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_34[204] =
+    {
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+        0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+        0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+        0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+        0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+        0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+        0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00,
+        0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+        0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+        0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+        0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+        0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+        0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+        0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+        0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+        0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+        0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+        0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+        0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+        0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+        0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+        0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+        0x30, 0x3c, 0xb3, 0xe6, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_4[24] =
+    {
+        0xca, 0xec, 0x65, 0x76, 0x00, 0x00,
+        0xa9, 0x67, 0x54, 0xb3, 0x80, 0x00,
+        0x3a, 0xb1, 0x9d, 0x58, 0xc0, 0x00,
+        0x55, 0x5a, 0xaa, 0xad, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_5[30] =
+    {
+        0x55, 0x44, 0xaa, 0xa2, 0x40, 0x00,
+        0x2a, 0x66, 0x15, 0x33, 0x00, 0x00,
+        0x25, 0xa1, 0x92, 0xd0, 0xc0, 0x00,
+        0xe2, 0x12, 0xf1, 0x09, 0x40, 0x00,
+        0x99, 0x98, 0x4c, 0xcc, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_6[36] =
+    {
+        0xd1, 0x4c, 0x68, 0xa6, 0x00, 0x00,
+        0xa2, 0xc5, 0x51, 0x62, 0x80, 0x00,
+        0x95, 0x30, 0xca, 0x98, 0x40, 0x00,
+        0xca, 0x0a, 0xe5, 0x05, 0x40, 0x00,
+        0xa4, 0xaa, 0x52, 0x55, 0x00, 0x00,
+        0x78, 0x15, 0x3c, 0x0a, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_7[42] =
+    {
+        0x15, 0x44, 0x8a, 0xa2, 0x40, 0x00,
+        0x8a, 0x23, 0x45, 0x11, 0x80, 0x00,
+        0x85, 0x91, 0x42, 0xc8, 0x80, 0x00,
+        0x32, 0x0a, 0x99, 0x05, 0x40, 0x00,
+        0x58, 0x34, 0x2c, 0x1a, 0x00, 0x00,
+        0x2c, 0x0d, 0x16, 0x06, 0x80, 0x00,
+        0x43, 0xc8, 0x21, 0xe4, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_8[48] =
+    {
+        0x64, 0x16, 0x32, 0x0b, 0x00, 0x00,
+        0xa2, 0xc2, 0x51, 0x61, 0x00, 0x00,
+        0x51, 0x60, 0xa8, 0xb0, 0x40, 0x00,
+        0x4a, 0x85, 0x25, 0x42, 0x80, 0x00,
+        0x38, 0x4c, 0x1c, 0x26, 0x00, 0x00,
+        0x89, 0x29, 0x44, 0x94, 0x80, 0x00,
+        0x07, 0x11, 0x83, 0x88, 0xc0, 0x00,
+        0x94, 0xb0, 0x4a, 0x58, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask34_9[54] =
+    {
+        0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+        0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+        0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+        0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+        0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+        0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+        0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+        0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+        0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_10[60] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+        0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+        0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_11[66] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_12[72] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x35, 0x2d, 0x86, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_13[78] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_14[84] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_15[90] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_16[96] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_17[102] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+        0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+        0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+        0x3c, 0x09, 0x04, 0x82, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_18[108] =
+    {
+        0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+        0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+        0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+        0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+        0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+        0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+        0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+        0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+        0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+        0x64, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0xa2, 0xc2, 0x61, 0x30, 0x80, 0x00,
+        0x51, 0x60, 0xb0, 0x58, 0x20, 0x00,
+        0x4a, 0x85, 0x42, 0xa1, 0x40, 0x00,
+        0x38, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x89, 0x29, 0x14, 0x8a, 0x40, 0x00,
+        0x07, 0x11, 0x88, 0xc4, 0x60, 0x00,
+        0x94, 0xb0, 0x58, 0x2c, 0x00, 0x00,
+        0x40, 0xc9, 0x65, 0xbe, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_19[114] =
+    {
+        0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+        0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+        0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+        0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+        0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+        0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+        0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+        0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+        0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+        0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+        0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_2[12] =
+    {
+        0xce, 0xce, 0x67, 0x33, 0x80, 0x00,
+        0xb9, 0x39, 0x9c, 0xce, 0x60, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_20[120] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+        0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+        0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+        0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+        0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+        0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+        0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+        0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+        0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+        0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+        0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+        0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+        0x63, 0x36, 0x5c, 0xd3, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_21[126] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+        0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+        0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_22[132] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+        0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+        0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+        0x84, 0xc7, 0xbc, 0xcc, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_23[138] =
+    {
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x35, 0x2d, 0x86, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_24[144] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x0d, 0xfb, 0x06, 0x89, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_25[150] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_26[156] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+        0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+        0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+        0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+        0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+        0xc4, 0x57, 0x70, 0x47, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_27[162] =
+    {
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_28[168] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x34, 0x4a, 0x80, 0x94, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_29[174] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_3[18] =
+    {
+        0xcd, 0xcc, 0x66, 0x33, 0x00, 0x00,
+        0x97, 0x27, 0x13, 0x8a, 0xc0, 0x00,
+        0xb8, 0xd1, 0xc9, 0x64, 0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_30[180] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+        0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+        0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+        0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+        0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+        0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+        0x32, 0x1b, 0x9f, 0x09, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_31[186] =
+    {
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_32[192] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0xeb, 0x31, 0x7b, 0x80, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_33[198] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+        0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+        0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+        0x3c, 0x09, 0x04, 0x82, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_34[204] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+        0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+        0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+        0x3c, 0x09, 0x04, 0x82, 0x40, 0x00,
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+        0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+        0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+        0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+        0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+        0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+        0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+        0x70, 0x07, 0xcd, 0x8c, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_35[210] =
+    {
+        0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+        0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+        0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+        0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+        0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+        0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+        0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+        0x3c, 0x09, 0x04, 0x82, 0x40, 0x00,
+        0x25, 0x2c, 0x26, 0x13, 0x00, 0x00,
+        0x8a, 0x91, 0x33, 0x19, 0x80, 0x00,
+        0x91, 0xc0, 0xc8, 0xa4, 0x40, 0x00,
+        0x68, 0x06, 0xa1, 0x50, 0xa0, 0x00,
+        0x32, 0xc8, 0x52, 0x29, 0x00, 0x00,
+        0x43, 0x45, 0x09, 0x84, 0xc0, 0x00,
+        0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+        0x1c, 0xa2, 0x44, 0x62, 0x20, 0x00,
+        0x25, 0x4c, 0x04, 0x82, 0x40, 0x00,
+        0x8a, 0x66, 0x16, 0x0b, 0x00, 0x00,
+        0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x68, 0x42, 0xe0, 0x70, 0x20, 0x00,
+        0x32, 0xa4, 0x03, 0x41, 0xa0, 0x00,
+        0x43, 0x13, 0x64, 0x32, 0x00, 0x00,
+        0xc4, 0x30, 0xa2, 0x91, 0x40, 0x00,
+        0x1c, 0x88, 0x98, 0x4c, 0x20, 0x00,
+        0x3c, 0x09, 0x51, 0x28, 0x80, 0x00,
+        0xc2, 0x1c, 0x68, 0x01, 0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_4[24] =
+    {
+        0xca, 0xec, 0x76, 0x3b, 0x00, 0x00,
+        0xa9, 0x67, 0x33, 0x99, 0xc0, 0x00,
+        0x3a, 0xb1, 0xd8, 0xec, 0x60, 0x00,
+        0x55, 0x5a, 0xad, 0x56, 0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_5[30] =
+    {
+        0x55, 0x44, 0xa6, 0x53, 0x20, 0x00,
+        0x2a, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x25, 0xa1, 0x8c, 0xe8, 0x60, 0x00,
+        0xe2, 0x12, 0xce, 0x44, 0xa0, 0x00,
+        0x99, 0x98, 0x71, 0xa6, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_6[36] =
+    {
+        0xd1, 0x4c, 0x66, 0x13, 0x00, 0x00,
+        0xa2, 0xc5, 0x22, 0xb1, 0x40, 0x00,
+        0x95, 0x30, 0xd8, 0x4c, 0x20, 0x00,
+        0xca, 0x0a, 0xc5, 0x42, 0xa0, 0x00,
+        0xa4, 0xaa, 0x14, 0xa9, 0x80, 0x00,
+        0x78, 0x15, 0x53, 0x05, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_7[42] =
+    {
+        0x15, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8a, 0x23, 0x11, 0x88, 0xc0, 0x00,
+        0x85, 0x91, 0x48, 0xa4, 0x40, 0x00,
+        0x32, 0x0a, 0x85, 0x42, 0xa0, 0x00,
+        0x58, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x2c, 0x0d, 0x05, 0x83, 0x40, 0x00,
+        0x43, 0xc8, 0x70, 0x32, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_8[48] =
+    {
+        0x64, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0xa2, 0xc2, 0x61, 0x30, 0x80, 0x00,
+        0x51, 0x60, 0xb0, 0x58, 0x20, 0x00,
+        0x4a, 0x85, 0x42, 0xa1, 0x40, 0x00,
+        0x38, 0x4c, 0x26, 0x13, 0x00, 0x00,
+        0x89, 0x29, 0x14, 0x8a, 0x40, 0x00,
+        0x07, 0x11, 0x88, 0xc4, 0x60, 0x00,
+        0x94, 0xb0, 0x58, 0x2c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask35_9[54] =
+    {
+        0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+        0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+        0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+        0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+        0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+        0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+        0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+        0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+        0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_10[60] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+        0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_11[66] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_12[72] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_13[78] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_14[84] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_15[90] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_16[96] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_17[102] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_18[108] =
+    {
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_19[114] =
+    {
+        0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+        0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+        0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+        0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+        0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+        0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+        0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+        0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+        0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_2[12] =
+    {
+        0xce, 0x67, 0x33, 0x99, 0xc0, 0x00,
+        0x39, 0x9c, 0xce, 0x67, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_20[120] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+        0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+        0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+        0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+        0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+        0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+        0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+        0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+        0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+        0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00,
+        0x45, 0xb9, 0x08, 0x16, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_21[126] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+        0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_22[132] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+        0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+        0x71, 0x04, 0xba, 0x7b, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_23[138] =
+    {
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_24[144] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x76, 0x3a, 0xeb, 0x17, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_25[150] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_26[156] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+        0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+        0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+        0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+        0xec, 0x58, 0x0e, 0x6c, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_27[162] =
+    {
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_28[168] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x29, 0xfd, 0x91, 0x6f, 0xd0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_29[174] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_3[18] =
+    {
+        0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+        0x27, 0x15, 0x89, 0xc5, 0x60, 0x00,
+        0x92, 0xc9, 0x64, 0xb2, 0x50, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_30[180] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+        0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+        0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+        0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+        0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+        0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+        0xc5, 0x38, 0xbb, 0x98, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_31[186] =
+    {
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_32[192] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x3a, 0x28, 0x9c, 0x2f, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_33[198] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_34[204] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+        0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+        0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+        0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+        0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+        0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+        0xf7, 0x5e, 0x66, 0x5b, 0x60, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_35[210] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_36[216] =
+    {
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+        0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+        0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+        0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+        0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+        0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+        0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+        0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+        0xa4, 0x9c, 0x31, 0x13, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_4[24] =
+    {
+        0xec, 0x76, 0x3b, 0x1d, 0x80, 0x00,
+        0x67, 0x33, 0x99, 0xcc, 0xe0, 0x00,
+        0xb1, 0xd8, 0xec, 0x76, 0x30, 0x00,
+        0x5a, 0xad, 0x56, 0xab, 0x50, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_5[30] =
+    {
+        0x4c, 0xa6, 0x53, 0x29, 0x90, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+        0x19, 0xd0, 0xc6, 0x74, 0x30, 0x00,
+        0x9c, 0x89, 0x67, 0x22, 0x50, 0x00,
+        0xe3, 0x4c, 0x38, 0xd3, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_6[36] =
+    {
+        0xcc, 0x26, 0x33, 0x09, 0x80, 0x00,
+        0x45, 0x62, 0x91, 0x58, 0xa0, 0x00,
+        0xb0, 0x98, 0x6c, 0x26, 0x10, 0x00,
+        0x8a, 0x85, 0x62, 0xa1, 0x50, 0x00,
+        0x29, 0x53, 0x0a, 0x54, 0xc0, 0x00,
+        0xa6, 0x0a, 0xa9, 0x82, 0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_7[42] =
+    {
+        0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+        0x23, 0x11, 0x88, 0xc4, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+        0x0a, 0x85, 0x42, 0xa1, 0x50, 0x00,
+        0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+        0x0b, 0x06, 0x82, 0xc1, 0xa0, 0x00,
+        0xe0, 0x64, 0x38, 0x19, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_8[48] =
+    {
+        0x16, 0x0b, 0x05, 0x82, 0xc0, 0x00,
+        0xc2, 0x61, 0x30, 0x98, 0x40, 0x00,
+        0x60, 0xb0, 0x58, 0x2c, 0x10, 0x00,
+        0x85, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+        0x29, 0x14, 0x8a, 0x45, 0x20, 0x00,
+        0x11, 0x88, 0xc4, 0x62, 0x30, 0x00,
+        0xb0, 0x58, 0x2c, 0x16, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask36_9[54] =
+    {
+        0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+        0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+        0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+        0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+        0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+        0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+        0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+        0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xf8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_10[60] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+        0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+        0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+        0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_11[66] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_12[72] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_13[78] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_14[84] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_15[90] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_16[96] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_17[102] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_18[108] =
+    {
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_19[114] =
+    {
+        0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+        0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+        0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+        0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+        0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+        0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+        0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+        0x29, 0x0c, 0x86, 0x03, 0x38, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+        0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+        0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+        0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_2[12] =
+    {
+        0xce, 0x67, 0x33, 0x9d, 0xc0, 0x00,
+        0x39, 0x9c, 0xce, 0x73, 0x38, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_20[120] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+        0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+        0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+        0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+        0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+        0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+        0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+        0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+        0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+        0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+        0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+        0x29, 0x0c, 0x86, 0x03, 0x38, 0x00,
+        0xe5, 0x44, 0xda, 0x3a, 0xc8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_21[126] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+        0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+        0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+        0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_22[132] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+        0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+        0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+        0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+        0xe4, 0xd3, 0xff, 0x5a, 0x28, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_23[138] =
+    {
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_24[144] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0xad, 0x58, 0xb2, 0x36, 0x68, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_25[150] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_26[156] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+        0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+        0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+        0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+        0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+        0x7f, 0xb2, 0x5a, 0xaa, 0x20, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_27[162] =
+    {
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_28[168] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x7b, 0xc4, 0x24, 0xbf, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_29[174] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_3[18] =
+    {
+        0xcc, 0x66, 0x33, 0x19, 0xc0, 0x00,
+        0x27, 0x15, 0x89, 0xcb, 0x30, 0x00,
+        0x92, 0xc9, 0x64, 0xb4, 0x98, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_30[180] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+        0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+        0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+        0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+        0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+        0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+        0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+        0x1d, 0x8e, 0x11, 0xb0, 0xe8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_31[186] =
+    {
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_32[192] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0xf7, 0x95, 0x57, 0x8c, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_33[198] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_34[204] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+        0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+        0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+        0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+        0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+        0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+        0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+        0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+        0x31, 0x9c, 0xfb, 0x37, 0xc0, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_35[210] =
+    {
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_36[216] =
+    {
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0xc3, 0xc7, 0xce, 0xd8, 0x50, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_37[222] =
+    {
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+        0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+        0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+        0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+        0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+        0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+        0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00,
+        0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+        0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+        0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+        0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+        0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+        0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+        0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+        0x2c, 0x16, 0x13, 0x09, 0x80, 0x00,
+        0x91, 0x48, 0x99, 0x8a, 0x20, 0x00,
+        0xc0, 0xe0, 0x64, 0x54, 0x08, 0x00,
+        0x06, 0x83, 0x50, 0xa0, 0x98, 0x00,
+        0xc8, 0x64, 0x29, 0x00, 0x70, 0x00,
+        0x45, 0x22, 0x84, 0xd0, 0xc0, 0x00,
+        0x30, 0x98, 0x4c, 0x25, 0x20, 0x00,
+        0xa2, 0x51, 0x22, 0x28, 0x48, 0x00,
+        0xd0, 0x03, 0x42, 0x53, 0x00, 0x00,
+        0xee, 0xf5, 0xb3, 0x66, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_4[24] =
+    {
+        0xec, 0x76, 0x3b, 0x1c, 0xc0, 0x00,
+        0x67, 0x33, 0x99, 0xc6, 0x70, 0x00,
+        0xb1, 0xd8, 0xec, 0x73, 0x18, 0x00,
+        0x5a, 0xad, 0x56, 0xa5, 0xa8, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_5[30] =
+    {
+        0x4c, 0xa6, 0x53, 0x39, 0xc0, 0x00,
+        0x66, 0x33, 0x19, 0x8c, 0x70, 0x00,
+        0x19, 0xd0, 0xe8, 0x73, 0x18, 0x00,
+        0x9c, 0x89, 0x64, 0xa9, 0xa8, 0x00,
+        0xe3, 0x4c, 0x2e, 0x26, 0x60, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_6[36] =
+    {
+        0xcc, 0x26, 0x13, 0x0d, 0x80, 0x00,
+        0x45, 0x62, 0x91, 0x5a, 0x20, 0x00,
+        0xb0, 0x98, 0x4c, 0x34, 0x18, 0x00,
+        0x8a, 0x85, 0x62, 0xa0, 0xa8, 0x00,
+        0x29, 0x53, 0x09, 0x82, 0xd0, 0x00,
+        0xa6, 0x0a, 0xa5, 0x51, 0x40, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_7[42] =
+    {
+        0x44, 0xa2, 0x71, 0x28, 0xc0, 0x00,
+        0x23, 0x11, 0x88, 0xc6, 0x60, 0x00,
+        0x91, 0x48, 0xa4, 0x47, 0x08, 0x00,
+        0x0a, 0x85, 0x52, 0xa0, 0xa8, 0x00,
+        0x34, 0x1a, 0x0d, 0x12, 0x50, 0x00,
+        0x0b, 0x06, 0xa2, 0xd2, 0x80, 0x00,
+        0xe0, 0x64, 0x32, 0x09, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_8[48] =
+    {
+        0x16, 0x0b, 0x05, 0x84, 0xe0, 0x00,
+        0xc2, 0x61, 0x30, 0x91, 0x30, 0x00,
+        0x60, 0xb0, 0x58, 0x3a, 0x08, 0x00,
+        0x85, 0x42, 0xa1, 0x44, 0x98, 0x00,
+        0x4c, 0x26, 0x33, 0x08, 0x50, 0x00,
+        0x29, 0x14, 0x8a, 0x58, 0xc0, 0x00,
+        0x11, 0x88, 0xc4, 0x66, 0x30, 0x00,
+        0xb0, 0x58, 0x2c, 0x03, 0x18, 0x00
+    };
+
+    const WebRtc_UWord8 mask37_9[54] =
+    {
+        0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+        0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+        0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+        0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+        0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+        0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+        0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+        0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+        0x29, 0x0c, 0x86, 0x03, 0x38, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xfc, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_10[60] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+        0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+        0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+        0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+        0x50, 0x88, 0xca, 0x11, 0x18, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_11[66] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_12[72] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x90, 0xc8, 0x92, 0x19, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_13[78] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_14[84] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_15[90] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_16[96] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_17[102] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_18[108] =
+    {
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_19[114] =
+    {
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+        0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+        0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+        0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+        0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+        0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+        0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+        0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+        0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+        0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_2[12] =
+    {
+        0xce, 0x77, 0x19, 0xce, 0xe0, 0x00,
+        0x39, 0xcc, 0xe7, 0x39, 0x9c, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_20[120] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+        0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+        0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+        0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+        0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+        0x44, 0xa7, 0x08, 0x94, 0xe0, 0x00,
+        0x66, 0x70, 0x8c, 0xce, 0x10, 0x00,
+        0x12, 0xc0, 0xe2, 0x58, 0x1c, 0x00,
+        0xc3, 0x10, 0xb8, 0x62, 0x14, 0x00,
+        0x8c, 0x29, 0x51, 0x85, 0x28, 0x00,
+        0x11, 0x5b, 0x02, 0x2b, 0x60, 0x00,
+        0x21, 0x93, 0x44, 0x32, 0x68, 0x00,
+        0xa2, 0x2c, 0x14, 0x45, 0x80, 0x00,
+        0x18, 0x0c, 0xe3, 0x01, 0x9c, 0x00,
+        0xe6, 0xbc, 0x88, 0xe3, 0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_21[126] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+        0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+        0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+        0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+        0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_22[132] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+        0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+        0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+        0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+        0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+        0x0c, 0x3c, 0x48, 0x3d, 0x58, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_23[138] =
+    {
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x90, 0xc8, 0x92, 0x19, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_24[144] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x93, 0xc8, 0xb3, 0xbe, 0x5c, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_25[150] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_26[156] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+        0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+        0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+        0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+        0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+        0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+        0x4b, 0xab, 0xfc, 0xe6, 0xe8, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_27[162] =
+    {
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_28[168] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x70, 0x1b, 0x5b, 0x2c, 0x0c, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_29[174] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_3[18] =
+    {
+        0xcc, 0x67, 0x19, 0x8c, 0xe0, 0x00,
+        0x27, 0x2c, 0xc4, 0xe5, 0x98, 0x00,
+        0x92, 0xd2, 0x72, 0x5a, 0x4c, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_30[180] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+        0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+        0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+        0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+        0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+        0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+        0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+        0x5b, 0x16, 0xdf, 0xb8, 0xd0, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_31[186] =
+    {
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_32[192] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x33, 0x10, 0x02, 0x4e, 0x54, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_33[198] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_34[204] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+        0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+        0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+        0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+        0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+        0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+        0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+        0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+        0x91, 0x48, 0xfa, 0xf0, 0xd8, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_35[210] =
+    {
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_36[216] =
+    {
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x80, 0x95, 0xc2, 0x68, 0x28, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_37[222] =
+    {
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+        0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+        0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+        0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+        0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+        0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+        0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+        0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+        0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+        0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_38[228] =
+    {
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+        0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+        0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+        0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+        0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+        0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+        0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+        0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+        0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+        0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00,
+        0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+        0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+        0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+        0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+        0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+        0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+        0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+        0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+        0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+        0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+        0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+        0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+        0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+        0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+        0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+        0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+        0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+        0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+        0x8c, 0xed, 0x11, 0x5f, 0x24, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_4[24] =
+    {
+        0xec, 0x73, 0x1d, 0x8e, 0x60, 0x00,
+        0x67, 0x19, 0xcc, 0xe3, 0x38, 0x00,
+        0xb1, 0xcc, 0x76, 0x39, 0x8c, 0x00,
+        0x5a, 0x96, 0xab, 0x52, 0xd4, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_5[30] =
+    {
+        0x4c, 0xe7, 0x09, 0x9c, 0xe0, 0x00,
+        0x66, 0x31, 0xcc, 0xc6, 0x38, 0x00,
+        0xa1, 0xcc, 0x74, 0x39, 0x8c, 0x00,
+        0x92, 0xa6, 0xb2, 0x54, 0xd4, 0x00,
+        0xb8, 0x99, 0x97, 0x13, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_6[36] =
+    {
+        0x4c, 0x36, 0x09, 0x86, 0xc0, 0x00,
+        0x45, 0x68, 0x88, 0xad, 0x10, 0x00,
+        0x30, 0xd0, 0x66, 0x1a, 0x0c, 0x00,
+        0x8a, 0x82, 0xb1, 0x50, 0x54, 0x00,
+        0x26, 0x0b, 0x44, 0xc1, 0x68, 0x00,
+        0x95, 0x45, 0x12, 0xa8, 0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_7[42] =
+    {
+        0xc4, 0xa3, 0x18, 0x94, 0x60, 0x00,
+        0x23, 0x19, 0x84, 0x63, 0x30, 0x00,
+        0x91, 0x1c, 0x32, 0x23, 0x84, 0x00,
+        0x4a, 0x82, 0xa9, 0x50, 0x54, 0x00,
+        0x34, 0x49, 0x46, 0x89, 0x28, 0x00,
+        0x8b, 0x4a, 0x11, 0x69, 0x40, 0x00,
+        0xc8, 0x24, 0xd9, 0x04, 0x98, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_8[48] =
+    {
+        0x16, 0x13, 0x82, 0xc2, 0x70, 0x00,
+        0xc2, 0x44, 0xd8, 0x48, 0x98, 0x00,
+        0x60, 0xe8, 0x2c, 0x1d, 0x04, 0x00,
+        0x85, 0x12, 0x70, 0xa2, 0x4c, 0x00,
+        0xcc, 0x21, 0x59, 0x84, 0x28, 0x00,
+        0x29, 0x63, 0x05, 0x2c, 0x60, 0x00,
+        0x11, 0x98, 0xc2, 0x33, 0x18, 0x00,
+        0xb0, 0x0c, 0x76, 0x01, 0x8c, 0x00
+    };
+
+    const WebRtc_UWord8 mask38_9[54] =
+    {
+        0x44, 0xa7, 0x08, 0x94, 0xe0, 0x00,
+        0x66, 0x70, 0x8c, 0xce, 0x10, 0x00,
+        0x12, 0xc0, 0xe2, 0x58, 0x1c, 0x00,
+        0xc3, 0x10, 0xb8, 0x62, 0x14, 0x00,
+        0x8c, 0x29, 0x51, 0x85, 0x28, 0x00,
+        0x11, 0x5b, 0x02, 0x2b, 0x60, 0x00,
+        0x21, 0x93, 0x44, 0x32, 0x68, 0x00,
+        0xa2, 0x2c, 0x14, 0x45, 0x80, 0x00,
+        0x18, 0x0c, 0xe3, 0x01, 0x9c, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xfe, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_10[60] =
+    {
+        0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+        0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+        0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+        0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+        0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+        0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+        0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+        0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+        0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+        0x50, 0x88, 0xc6, 0x11, 0x84, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_11[66] =
+    {
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_12[72] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_13[78] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_14[84] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_15[90] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_16[96] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_17[102] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_18[108] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_19[114] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+        0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+        0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+        0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+        0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+        0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+        0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+        0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+        0xcd, 0x98, 0x46, 0x11, 0x84, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_2[12] =
+    {
+        0xce, 0x77, 0x1d, 0xc7, 0x70, 0x00,
+        0x39, 0xcc, 0xf3, 0x3c, 0xce, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_20[120] =
+    {
+        0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+        0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+        0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+        0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+        0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+        0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+        0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+        0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+        0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+        0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+        0x44, 0xa7, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x70, 0x8c, 0x47, 0x18, 0x00,
+        0x12, 0xc0, 0xf0, 0x3c, 0x0e, 0x00,
+        0xc3, 0x10, 0xbc, 0x29, 0x0a, 0x00,
+        0x8c, 0x29, 0x42, 0x72, 0x94, 0x00,
+        0x11, 0x5b, 0x16, 0x85, 0xa0, 0x00,
+        0x21, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0xa2, 0x2c, 0x0b, 0x0a, 0xc2, 0x00,
+        0x18, 0x0c, 0xe9, 0x30, 0xca, 0x00,
+        0x0d, 0xba, 0x52, 0x38, 0xbc, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_21[126] =
+    {
+        0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+        0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+        0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+        0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+        0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+        0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+        0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+        0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+        0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+        0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_22[132] =
+    {
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+        0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+        0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+        0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+        0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+        0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+        0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+        0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+        0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+        0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+        0xfc, 0x5a, 0xb2, 0x13, 0x12, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_23[138] =
+    {
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_24[144] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0xac, 0xbc, 0xf0, 0xff, 0x62, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_25[150] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_26[156] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+        0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+        0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+        0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+        0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+        0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+        0x10, 0x17, 0x44, 0x72, 0xec, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_27[162] =
+    {
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_28[168] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x86, 0xb6, 0x04, 0xbc, 0x1e, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_29[174] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_3[18] =
+    {
+        0xcc, 0x67, 0x19, 0xc6, 0x70, 0x00,
+        0x27, 0x2c, 0xca, 0xb2, 0xac, 0x00,
+        0x92, 0xd2, 0x76, 0x2d, 0x46, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_30[180] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+        0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+        0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+        0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+        0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+        0xb3, 0x1d, 0x13, 0x03, 0x5a, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_31[186] =
+    {
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_32[192] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x83, 0x1a, 0x3c, 0x2a, 0x7a, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_33[198] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_34[204] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+        0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+        0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+        0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+        0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+        0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+        0xc6, 0xbb, 0x7e, 0xd9, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_35[210] =
+    {
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_36[216] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x11, 0x78, 0xfe, 0x43, 0xd6, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_37[222] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+        0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+        0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+        0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+        0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+        0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+        0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+        0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+        0xcd, 0x98, 0x46, 0x11, 0x84, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_38[228] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+        0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+        0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+        0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+        0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+        0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+        0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+        0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+        0xcd, 0x98, 0x46, 0x11, 0x84, 0x00,
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+        0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+        0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+        0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+        0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+        0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+        0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+        0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+        0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+        0x9e, 0xd8, 0x3c, 0x7e, 0x2e, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_39[234] =
+    {
+        0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+        0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+        0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+        0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+        0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+        0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+        0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+        0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+        0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+        0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+        0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+        0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+        0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+        0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+        0xcd, 0x98, 0x46, 0x11, 0x84, 0x00,
+        0x4c, 0x27, 0x09, 0x82, 0x60, 0x00,
+        0x66, 0x71, 0x8a, 0x22, 0x88, 0x00,
+        0x91, 0x40, 0xf4, 0x0d, 0x02, 0x00,
+        0x42, 0x90, 0xa0, 0x98, 0x26, 0x00,
+        0xa4, 0x29, 0x40, 0x70, 0x1c, 0x00,
+        0x13, 0x5a, 0x10, 0xc4, 0x30, 0x00,
+        0x30, 0x93, 0x45, 0x21, 0x48, 0x00,
+        0x88, 0xac, 0x28, 0x4a, 0x12, 0x00,
+        0x09, 0x0c, 0xd3, 0x04, 0xc0, 0x00,
+        0x4c, 0x26, 0x06, 0x11, 0x84, 0x00,
+        0x66, 0x28, 0x89, 0xc2, 0x70, 0x00,
+        0x91, 0x50, 0x3c, 0x67, 0x18, 0x00,
+        0x42, 0x82, 0x70, 0x3c, 0x0e, 0x00,
+        0xa4, 0x01, 0xc4, 0x29, 0x0a, 0x00,
+        0x13, 0x43, 0x0a, 0x52, 0x94, 0x00,
+        0x30, 0x94, 0x96, 0x85, 0xa0, 0x00,
+        0x88, 0xa1, 0x24, 0xd1, 0x34, 0x00,
+        0x09, 0x4c, 0x0b, 0x0a, 0xc2, 0x00,
+        0xcd, 0x98, 0x43, 0x30, 0xcc, 0x00,
+        0x1d, 0x04, 0x3e, 0xf1, 0xb4, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_4[24] =
+    {
+        0xec, 0x73, 0x1c, 0xc7, 0x30, 0x00,
+        0x67, 0x19, 0xc6, 0x71, 0x9c, 0x00,
+        0xb1, 0xcc, 0x73, 0x1c, 0xc6, 0x00,
+        0x5a, 0x96, 0xa5, 0xa9, 0x6a, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_5[30] =
+    {
+        0x4c, 0xe7, 0x19, 0xc6, 0x70, 0x00,
+        0x66, 0x31, 0xcc, 0x73, 0x1c, 0x00,
+        0xa1, 0xcc, 0x73, 0x1c, 0xa6, 0x00,
+        0x92, 0xa6, 0xa5, 0x6a, 0x6a, 0x00,
+        0xb8, 0x99, 0x96, 0x8b, 0x94, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_6[36] =
+    {
+        0x4c, 0x36, 0x09, 0x83, 0x60, 0x00,
+        0x45, 0x68, 0x8a, 0x26, 0x88, 0x00,
+        0x30, 0xd0, 0x64, 0x1d, 0x06, 0x00,
+        0x8a, 0x82, 0xb0, 0xa8, 0x2a, 0x00,
+        0x26, 0x0b, 0x40, 0xd0, 0xd4, 0x00,
+        0x95, 0x45, 0x13, 0x44, 0x30, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_7[42] =
+    {
+        0xc4, 0xa3, 0x09, 0xc2, 0x30, 0x00,
+        0x23, 0x19, 0x86, 0x65, 0x80, 0x00,
+        0x91, 0x1c, 0x22, 0x01, 0xd6, 0x00,
+        0x4a, 0x82, 0xb0, 0x2a, 0x2a, 0x00,
+        0x34, 0x49, 0x44, 0x98, 0x94, 0x00,
+        0x8b, 0x4a, 0x1a, 0x84, 0x60, 0x00,
+        0xc8, 0x24, 0xc1, 0x94, 0x4c, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_8[48] =
+    {
+        0x16, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xc2, 0x44, 0xd1, 0x34, 0x4c, 0x00,
+        0x60, 0xe8, 0x3a, 0x0e, 0x82, 0x00,
+        0x85, 0x12, 0x64, 0x99, 0x26, 0x00,
+        0xcc, 0x21, 0x5c, 0x52, 0x14, 0x00,
+        0x29, 0x63, 0x18, 0xc6, 0x30, 0x00,
+        0x11, 0x98, 0xc6, 0x31, 0x8c, 0x00,
+        0xb0, 0x0c, 0x63, 0x18, 0xc6, 0x00
+    };
+
+    const WebRtc_UWord8 mask39_9[54] =
+    {
+        0x44, 0xa7, 0x09, 0xc2, 0x70, 0x00,
+        0x66, 0x70, 0x8c, 0x47, 0x18, 0x00,
+        0x12, 0xc0, 0xf0, 0x3c, 0x0e, 0x00,
+        0xc3, 0x10, 0xbc, 0x29, 0x0a, 0x00,
+        0x8c, 0x29, 0x42, 0x72, 0x94, 0x00,
+        0x11, 0x5b, 0x16, 0x85, 0xa0, 0x00,
+        0x21, 0x93, 0x44, 0xd1, 0x34, 0x00,
+        0xa2, 0x2c, 0x0b, 0x0a, 0xc2, 0x00,
+        0x18, 0x0c, 0xe9, 0x30, 0xca, 0x00
+    };
+
+    const WebRtc_UWord8 mask3_1[2] =
+    {
+        0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask3_2[4] =
+    {
+        0xc0, 0x00,
+        0xa0, 0x00
+    };
+
+    const WebRtc_UWord8 mask3_3[6] =
+    {
+        0xc0, 0x00,
+        0xa0, 0x00,
+        0x60, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_10[60] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_11[66] =
+    {
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_12[72] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_13[78] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_14[84] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_15[90] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_16[96] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_17[102] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_18[108] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_19[114] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_2[12] =
+    {
+        0xee, 0x3b, 0x8e, 0xe3, 0xb8, 0x00,
+        0x99, 0xe6, 0x79, 0x9e, 0x67, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_20[120] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_21[126] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_22[132] =
+    {
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x89, 0xee, 0x1f, 0x38, 0xca, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_23[138] =
+    {
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_24[144] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x68, 0xde, 0x83, 0xa9, 0xcf, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_25[150] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_26[156] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+        0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+        0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+        0x06, 0x8e, 0x8c, 0x1a, 0xd2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_27[162] =
+    {
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_28[168] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x68, 0x0e, 0x9b, 0x52, 0xb6, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_29[174] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_3[18] =
+    {
+        0xce, 0x33, 0x8c, 0xe3, 0x38, 0x00,
+        0x55, 0x95, 0x65, 0x59, 0x56, 0x00,
+        0xb1, 0x6a, 0x3b, 0x16, 0xa3, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_30[180] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+        0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+        0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+        0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+        0xe1, 0x47, 0x04, 0x05, 0x47, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_31[186] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_32[192] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x03, 0x0c, 0x46, 0x10, 0xc5, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_33[198] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_34[204] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+        0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+        0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+        0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+        0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+        0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+        0x87, 0x3c, 0x08, 0x19, 0x31, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_35[210] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_36[216] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x1e, 0xb9, 0x3d, 0x25, 0xcc, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_37[222] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_38[228] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+        0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+        0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+        0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+        0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+        0xea, 0xaa, 0x20, 0xa2, 0x1b, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_39[234] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_4[24] =
+    {
+        0xe6, 0x39, 0x8e, 0x63, 0x98, 0x00,
+        0x33, 0x8c, 0xe3, 0x38, 0xce, 0x00,
+        0x98, 0xe6, 0x39, 0x8e, 0x63, 0x00,
+        0x2d, 0x4b, 0x52, 0xd4, 0xb5, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_40[240] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+        0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+        0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+        0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+        0xa6, 0xf3, 0xab, 0x1b, 0x87, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_5[30] =
+    {
+        0xce, 0x33, 0x8c, 0xe3, 0x38, 0x00,
+        0x63, 0x98, 0xe6, 0x39, 0x8e, 0x00,
+        0x98, 0xe5, 0x39, 0x8e, 0x53, 0x00,
+        0x2b, 0x53, 0x52, 0xb5, 0x35, 0x00,
+        0xb4, 0x5c, 0xab, 0x45, 0xca, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_6[36] =
+    {
+        0x4c, 0x1b, 0x04, 0xc1, 0xb0, 0x00,
+        0x51, 0x34, 0x45, 0x13, 0x44, 0x00,
+        0x20, 0xe8, 0x32, 0x0e, 0x83, 0x00,
+        0x85, 0x41, 0x58, 0x54, 0x15, 0x00,
+        0x06, 0x86, 0xa0, 0x68, 0x6a, 0x00,
+        0x9a, 0x21, 0x89, 0xa2, 0x18, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_7[42] =
+    {
+        0x4e, 0x11, 0x84, 0xe1, 0x18, 0x00,
+        0x33, 0x2c, 0x03, 0x32, 0xc0, 0x00,
+        0x10, 0x0e, 0xb1, 0x00, 0xeb, 0x00,
+        0x81, 0x51, 0x58, 0x15, 0x15, 0x00,
+        0x24, 0xc4, 0xa2, 0x4c, 0x4a, 0x00,
+        0xd4, 0x23, 0x0d, 0x42, 0x30, 0x00,
+        0x0c, 0xa2, 0x60, 0xca, 0x26, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_8[48] =
+    {
+        0x27, 0x09, 0xc2, 0x70, 0x9c, 0x00,
+        0x89, 0xa2, 0x68, 0x9a, 0x26, 0x00,
+        0xd0, 0x74, 0x1d, 0x07, 0x41, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+        0xe2, 0x90, 0xae, 0x29, 0x0a, 0x00,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x31, 0x8c, 0x63, 0x18, 0xc6, 0x00,
+        0x18, 0xc6, 0x31, 0x8c, 0x63, 0x00
+    };
+
+    const WebRtc_UWord8 mask40_9[54] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+        0x62, 0x38, 0xc6, 0x23, 0x8c, 0x00,
+        0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+        0xe1, 0x48, 0x5e, 0x14, 0x85, 0x00,
+        0x13, 0x94, 0xa1, 0x39, 0x4a, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+        0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+        0x49, 0x86, 0x54, 0x98, 0x65, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_10[60] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_11[66] =
+    {
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_12[72] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_13[78] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_14[84] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_15[90] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_16[96] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_17[102] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_18[108] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_19[114] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_2[12] =
+    {
+        0xee, 0x3b, 0x8e, 0xe3, 0xb3, 0x00,
+        0x99, 0xe6, 0x79, 0x9e, 0x6e, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_20[120] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_21[126] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_22[132] =
+    {
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x33, 0x09, 0x6e, 0x49, 0x6b, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_23[138] =
+    {
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_24[144] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x45, 0xa6, 0xef, 0xc9, 0xc3, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_25[150] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_26[156] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+        0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+        0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+        0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+        0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+        0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+        0x6f, 0x72, 0xf1, 0xe7, 0x1a, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_27[162] =
+    {
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_28[168] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x61, 0x2c, 0xfa, 0x25, 0x38, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_29[174] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_3[18] =
+    {
+        0xce, 0x33, 0x8c, 0xe3, 0x2b, 0x00,
+        0x55, 0x95, 0x65, 0x5d, 0xc5, 0x00,
+        0xb1, 0x6a, 0x3a, 0x8e, 0xd8, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_30[180] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+        0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+        0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+        0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+        0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+        0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+        0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+        0x59, 0x53, 0x31, 0x62, 0x15, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_31[186] =
+    {
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_32[192] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0xca, 0xbb, 0xcb, 0x6d, 0xaa, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_33[198] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_34[204] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+        0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+        0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+        0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+        0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+        0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+        0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+        0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+        0xbd, 0x37, 0x3f, 0x75, 0x36, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_35[210] =
+    {
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_36[216] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0xc1, 0xb1, 0x80, 0xbe, 0x3e, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_37[222] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_38[228] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+        0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+        0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+        0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+        0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+        0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+        0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+        0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+        0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+        0xea, 0xc8, 0xbb, 0xd4, 0x5d, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_39[234] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_4[24] =
+    {
+        0xe6, 0x39, 0x8e, 0x63, 0x13, 0x00,
+        0x33, 0x8c, 0xe3, 0x38, 0xc5, 0x80,
+        0x98, 0xe6, 0x39, 0x8d, 0x2c, 0x80,
+        0x2d, 0x4b, 0x52, 0xd4, 0xb2, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_40[240] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0xe8, 0x07, 0x18, 0x9a, 0x02, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_41[246] =
+    {
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+        0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+        0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+        0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+        0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+        0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+        0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+        0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+        0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+        0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00,
+        0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+        0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+        0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+        0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+        0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+        0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+        0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+        0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+        0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+        0x4e, 0x13, 0x84, 0xc1, 0x19, 0x00,
+        0xe3, 0x38, 0xc5, 0x10, 0xcc, 0x80,
+        0x81, 0xe0, 0x7a, 0x06, 0x64, 0x00,
+        0x21, 0x48, 0x50, 0x4c, 0x16, 0x00,
+        0x52, 0x94, 0xa0, 0x3a, 0x02, 0x80,
+        0xb4, 0x2d, 0x08, 0x62, 0x11, 0x00,
+        0x26, 0x89, 0xa2, 0x91, 0x01, 0x80,
+        0x58, 0x56, 0x14, 0x24, 0x2a, 0x00,
+        0x19, 0x86, 0x69, 0x81, 0xa0, 0x00,
+        0xf7, 0x8d, 0xa3, 0x08, 0x40, 0x80,
+        0x2b, 0xea, 0x4d, 0xf4, 0xc1, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_5[30] =
+    {
+        0xce, 0x33, 0x8c, 0xe3, 0x1b, 0x00,
+        0x63, 0x98, 0xe6, 0x39, 0x8d, 0x80,
+        0x98, 0xe5, 0x39, 0x8c, 0x76, 0x80,
+        0x2b, 0x53, 0x54, 0xd6, 0xb5, 0x00,
+        0xb4, 0x5c, 0xab, 0x26, 0xca, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_6[36] =
+    {
+        0x4c, 0x1b, 0x04, 0xc1, 0x91, 0x00,
+        0x51, 0x34, 0x45, 0x11, 0x45, 0x00,
+        0x20, 0xe8, 0x32, 0x0e, 0xa0, 0x80,
+        0x85, 0x41, 0x58, 0x54, 0x12, 0x80,
+        0x06, 0x86, 0xa0, 0x68, 0x0d, 0x80,
+        0x9a, 0x21, 0x88, 0xa2, 0x43, 0x00
+    };
+
+    const WebRtc_UWord8 mask41_7[42] =
+    {
+        0x4e, 0x11, 0x8c, 0x61, 0x19, 0x00,
+        0x33, 0x2c, 0x03, 0x30, 0x4c, 0x80,
+        0x10, 0x0e, 0xb1, 0x86, 0x74, 0x00,
+        0x81, 0x51, 0x54, 0x54, 0x2d, 0x00,
+        0x24, 0xc4, 0xa1, 0x2d, 0x42, 0x80,
+        0xd4, 0x23, 0x0b, 0x42, 0x83, 0x00,
+        0x0c, 0xa2, 0x62, 0x99, 0x21, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_8[48] =
+    {
+        0x27, 0x09, 0xc0, 0x70, 0xa7, 0x00,
+        0x89, 0xa2, 0x64, 0x9a, 0x82, 0x80,
+        0xd0, 0x74, 0x1b, 0x07, 0xa0, 0x00,
+        0x24, 0xc9, 0x32, 0x4c, 0x5c, 0x00,
+        0xe2, 0x90, 0xa5, 0x28, 0x0e, 0x80,
+        0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+        0x31, 0x8c, 0x63, 0x19, 0x41, 0x80,
+        0x18, 0xc6, 0x31, 0x8c, 0x70, 0x80
+    };
+
+    const WebRtc_UWord8 mask41_9[54] =
+    {
+        0x4e, 0x13, 0x84, 0xe1, 0x11, 0x00,
+        0x62, 0x38, 0xc6, 0x21, 0xa0, 0x80,
+        0x81, 0xe0, 0x78, 0x0e, 0x94, 0x00,
+        0xe1, 0x48, 0x5a, 0x15, 0x05, 0x00,
+        0x13, 0x94, 0xa5, 0x30, 0x06, 0x80,
+        0xb4, 0x2d, 0x0a, 0x42, 0x43, 0x00,
+        0x26, 0x89, 0xa1, 0x6a, 0x08, 0x80,
+        0x58, 0x56, 0x15, 0x84, 0x52, 0x00,
+        0x49, 0x86, 0x52, 0x98, 0x68, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_10[60] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_11[66] =
+    {
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_12[72] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_13[78] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_14[84] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_15[90] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_16[96] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_17[102] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_18[108] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_19[114] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_2[12] =
+    {
+        0xee, 0x3b, 0x37, 0x71, 0xd9, 0x80,
+        0x99, 0xe6, 0xec, 0xcf, 0x37, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_20[120] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2a, 0x03, 0x31, 0x50, 0x19, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_21[126] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+        0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+        0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+        0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+        0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+        0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+        0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+        0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+        0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+        0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+        0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_22[132] =
+    {
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0xdb, 0x36, 0xb0, 0x33, 0x14, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_23[138] =
+    {
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_24[144] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x2e, 0x1c, 0x92, 0xbb, 0x07, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_25[150] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_26[156] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+        0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+        0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+        0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+        0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+        0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+        0xb8, 0x41, 0xed, 0xa3, 0x77, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_27[162] =
+    {
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_28[168] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0xc3, 0x3c, 0x56, 0xc2, 0x30, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_29[174] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_3[18] =
+    {
+        0xce, 0x32, 0xb6, 0x71, 0x95, 0x80,
+        0x55, 0xdc, 0x52, 0xae, 0xe2, 0x80,
+        0xa8, 0xed, 0x8d, 0x47, 0x6c, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_30[180] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+        0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+        0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+        0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+        0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+        0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+        0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+        0xf5, 0xdd, 0x0d, 0x58, 0xeb, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_31[186] =
+    {
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_32[192] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0xf9, 0x1f, 0xb6, 0xe1, 0x09, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_33[198] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_34[204] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+        0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+        0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+        0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+        0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+        0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+        0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+        0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+        0xf8, 0xbf, 0xf6, 0x76, 0x1b, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_35[210] =
+    {
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_36[216] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x57, 0xc7, 0x03, 0xf9, 0xc6, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_37[222] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_38[228] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+        0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+        0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+        0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+        0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+        0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+        0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+        0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+        0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+        0x05, 0x19, 0x55, 0xee, 0xe2, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_39[234] =
+    {
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2a, 0x03, 0x31, 0x50, 0x19, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_4[24] =
+    {
+        0xe6, 0x31, 0x37, 0x31, 0x89, 0x80,
+        0x33, 0x8c, 0x59, 0x9c, 0x62, 0xc0,
+        0x98, 0xd2, 0xcc, 0xc6, 0x96, 0x40,
+        0x2d, 0x4b, 0x29, 0x6a, 0x59, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_40[240] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0xf9, 0xdb, 0x5d, 0x7a, 0xd4, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_41[246] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+        0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+        0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+        0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+        0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+        0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+        0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+        0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+        0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+        0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+        0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_42[252] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+        0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+        0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+        0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+        0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+        0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+        0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+        0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+        0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+        0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+        0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80,
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+        0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+        0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+        0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+        0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+        0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+        0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+        0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+        0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+        0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+        0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+        0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+        0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+        0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+        0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+        0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+        0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+        0xea, 0x9e, 0x23, 0xb3, 0x65, 0x00
+    };
+
+    const WebRtc_UWord8 mask42_5[30] =
+    {
+        0xce, 0x31, 0xb6, 0x71, 0x8d, 0x80,
+        0x63, 0x98, 0xdb, 0x1c, 0xc6, 0xc0,
+        0x98, 0xc7, 0x6c, 0xc6, 0x3b, 0x40,
+        0x4d, 0x6b, 0x52, 0x6b, 0x5a, 0x80,
+        0xb2, 0x6c, 0xad, 0x93, 0x65, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_6[36] =
+    {
+        0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+        0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+        0x20, 0xea, 0x09, 0x07, 0x50, 0x40,
+        0x85, 0x41, 0x2c, 0x2a, 0x09, 0x40,
+        0x06, 0x80, 0xd8, 0x34, 0x06, 0xc0,
+        0x8a, 0x24, 0x34, 0x51, 0x21, 0x80
+    };
+
+    const WebRtc_UWord8 mask42_7[42] =
+    {
+        0xc6, 0x11, 0x96, 0x30, 0x8c, 0x80,
+        0x33, 0x04, 0xc9, 0x98, 0x26, 0x40,
+        0x18, 0x67, 0x40, 0xc3, 0x3a, 0x00,
+        0x45, 0x42, 0xd2, 0x2a, 0x16, 0x80,
+        0x12, 0xd4, 0x28, 0x96, 0xa1, 0x40,
+        0xb4, 0x28, 0x35, 0xa1, 0x41, 0x80,
+        0x29, 0x92, 0x19, 0x4c, 0x90, 0xc0
+    };
+
+    const WebRtc_UWord8 mask42_8[48] =
+    {
+        0x07, 0x0a, 0x70, 0x38, 0x53, 0x80,
+        0x49, 0xa8, 0x2a, 0x4d, 0x41, 0x40,
+        0xb0, 0x7a, 0x05, 0x83, 0xd0, 0x00,
+        0x24, 0xc5, 0xc1, 0x26, 0x2e, 0x00,
+        0x52, 0x80, 0xea, 0x94, 0x07, 0x40,
+        0xc6, 0x31, 0x86, 0x31, 0x8c, 0x00,
+        0x31, 0x94, 0x19, 0x8c, 0xa0, 0xc0,
+        0x18, 0xc7, 0x08, 0xc6, 0x38, 0x40
+    };
+
+    const WebRtc_UWord8 mask42_9[54] =
+    {
+        0x4e, 0x11, 0x12, 0x70, 0x88, 0x80,
+        0x62, 0x1a, 0x0b, 0x10, 0xd0, 0x40,
+        0x80, 0xe9, 0x44, 0x07, 0x4a, 0x00,
+        0xa1, 0x50, 0x55, 0x0a, 0x82, 0x80,
+        0x53, 0x00, 0x6a, 0x98, 0x03, 0x40,
+        0xa4, 0x24, 0x35, 0x21, 0x21, 0x80,
+        0x16, 0xa0, 0x88, 0xb5, 0x04, 0x40,
+        0x58, 0x45, 0x22, 0xc2, 0x29, 0x00,
+        0x29, 0x86, 0x81, 0x4c, 0x34, 0x00
+    };
+
+    const WebRtc_UWord8 mask43_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xe0
+    };
+
+    const WebRtc_UWord8 mask43_10[60] =
+    {
+        0x4c, 0x19, 0x16, 0x01, 0xc4, 0x40,
+        0x51, 0x14, 0x51, 0x80, 0x71, 0x40,
+        0xa0, 0x6a, 0x47, 0x40, 0x38, 0x00,
+        0x04, 0xc1, 0x34, 0x28, 0x45, 0x40,
+        0x03, 0xb4, 0x06, 0x84, 0x90, 0x80,
+        0x86, 0x20, 0x94, 0x32, 0x82, 0x40,
+        0x29, 0x08, 0x4a, 0x53, 0x40, 0x60,
+        0x42, 0x43, 0x08, 0x0d, 0x03, 0xa0,
+        0x98, 0x12, 0x82, 0x64, 0x0c, 0x80,
+        0x30, 0x84, 0xab, 0x11, 0x20, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_11[66] =
+    {
+        0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+        0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+        0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+        0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+        0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+        0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+        0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+        0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+        0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+        0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+        0xe0, 0x22, 0x91, 0x82, 0x30, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_12[72] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+        0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+        0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+        0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+        0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+        0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0
+    };
+
+    const WebRtc_UWord8 mask43_13[78] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_14[84] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_15[90] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_16[96] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_17[102] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask43_18[108] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_19[114] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0
+    };
+
+    const WebRtc_UWord8 mask43_2[12] =
+    {
+        0xee, 0x3b, 0x37, 0x66, 0xec, 0xc0,
+        0x99, 0xe6, 0xec, 0xdd, 0x9b, 0xa0
+    };
+
+    const WebRtc_UWord8 mask43_20[120] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2a, 0x03, 0x31, 0xda, 0x46, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_21[126] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+        0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+        0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+        0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+        0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+        0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+        0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+        0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_22[132] =
+    {
+        0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+        0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+        0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+        0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+        0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+        0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+        0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+        0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+        0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+        0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+        0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+        0x4c, 0x19, 0x16, 0x01, 0xc4, 0x40,
+        0x51, 0x14, 0x51, 0x80, 0x71, 0x40,
+        0xa0, 0x6a, 0x47, 0x40, 0x38, 0x00,
+        0x04, 0xc1, 0x34, 0x28, 0x45, 0x40,
+        0x03, 0xb4, 0x06, 0x84, 0x90, 0x80,
+        0x86, 0x20, 0x94, 0x32, 0x82, 0x40,
+        0x29, 0x08, 0x4a, 0x53, 0x40, 0x60,
+        0x42, 0x43, 0x08, 0x0d, 0x03, 0xa0,
+        0x98, 0x12, 0x82, 0x64, 0x0c, 0x80,
+        0x30, 0x84, 0xab, 0x11, 0x20, 0x20,
+        0xfe, 0x2c, 0x85, 0xcc, 0x24, 0x80
+    };
+
+    const WebRtc_UWord8 mask43_23[138] =
+    {
+        0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+        0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+        0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+        0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+        0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+        0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+        0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+        0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+        0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+        0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+        0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+        0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+        0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+        0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+        0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+        0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0
+    };
+
+    const WebRtc_UWord8 mask43_24[144] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+        0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+        0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+        0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+        0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+        0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+        0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+        0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+        0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+        0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+        0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+        0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+        0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+        0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+        0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+        0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+        0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+        0xf9, 0xb1, 0x26, 0x6c, 0x51, 0xe0
+    };
+
+    const WebRtc_UWord8 mask43_25[150] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+        0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+        0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+        0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+        0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+        0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_26[156] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+        0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+        0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+        0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+        0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+        0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+        0xef, 0x84, 0x77, 0xca, 0x0d, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_27[162] =
+    {
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_28[168] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x16, 0xc9, 0x53, 0x1e, 0xc4, 0x00
+    };
+
+    const WebRtc_UWord8 mask43_29[174] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_3[18] =
+    {
+        0xce, 0x32, 0xb6, 0x56, 0xca, 0xc0,
+        0x55, 0xdc, 0x57, 0x8a, 0xf1, 0x40,
+        0xa8, 0xed, 0x8d, 0xb1, 0xae, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_30[180] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+        0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+        0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+        0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+        0x79, 0x4a, 0x8f, 0x42, 0x79, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_31[186] =
+    {
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_32[192] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0xd1, 0xd1, 0x11, 0xa4, 0xed, 0xc0
+    };
+
+    const WebRtc_UWord8 mask43_33[198] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00
+    };
+
+    const WebRtc_UWord8 mask43_34[204] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+        0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+        0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+        0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+        0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+        0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+        0x76, 0x81, 0x4d, 0x33, 0x66, 0x00
+    };
+
+    const WebRtc_UWord8 mask43_35[210] =
+    {
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_36[216] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0xa3, 0x85, 0x0a, 0xb5, 0x11, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_37[222] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0
+    };
+
+    const WebRtc_UWord8 mask43_38[228] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+        0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+        0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+        0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+        0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+        0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+        0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+        0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+        0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+        0x9a, 0x16, 0x97, 0x21, 0xb9, 0x80
+    };
+
+    const WebRtc_UWord8 mask43_39[234] =
+    {
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2a, 0x03, 0x31, 0xda, 0x46, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_4[24] =
+    {
+        0xe6, 0x31, 0x36, 0x26, 0xc4, 0xc0,
+        0x33, 0x8c, 0x59, 0x8b, 0x31, 0x60,
+        0x98, 0xd2, 0xca, 0x59, 0x4b, 0x20,
+        0x2d, 0x4b, 0x29, 0x65, 0x2c, 0xa0
+    };
+
+    const WebRtc_UWord8 mask43_40[240] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x3a, 0xab, 0x77, 0x63, 0xef, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_41[246] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+        0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+        0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+        0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+        0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+        0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+        0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+        0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_42[252] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+        0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+        0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+        0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+        0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+        0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+        0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+        0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40,
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+        0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+        0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+        0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+        0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+        0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+        0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+        0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+        0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+        0x26, 0x84, 0x10, 0xcd, 0xf7, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_43[258] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+        0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+        0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+        0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+        0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+        0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+        0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+        0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+        0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+        0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+        0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+        0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+        0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+        0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+        0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+        0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+        0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+        0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40,
+        0x4c, 0x19, 0x12, 0x32, 0x46, 0x40,
+        0x51, 0x14, 0x51, 0x99, 0x33, 0x20,
+        0xa0, 0x6a, 0x44, 0xc8, 0x99, 0x00,
+        0x04, 0xc1, 0x30, 0x2c, 0x05, 0x80,
+        0x03, 0xb4, 0x04, 0x05, 0x80, 0xa0,
+        0x86, 0x20, 0x94, 0x22, 0x84, 0x40,
+        0x29, 0x08, 0x4a, 0x03, 0x40, 0x60,
+        0x42, 0x43, 0x08, 0x54, 0x0a, 0x80,
+        0x98, 0x12, 0x83, 0x40, 0x68, 0x00,
+        0x30, 0x84, 0xa8, 0x81, 0x10, 0x20,
+        0x4c, 0x11, 0x91, 0x82, 0x30, 0x40,
+        0x51, 0x0c, 0xcb, 0x22, 0x64, 0x40,
+        0xa0, 0x66, 0x42, 0x8a, 0x51, 0x40,
+        0x04, 0xc1, 0x65, 0x48, 0xa9, 0x00,
+        0x03, 0xa0, 0x28, 0x26, 0x04, 0xc0,
+        0x86, 0x21, 0x16, 0x80, 0xd0, 0x00,
+        0x29, 0x10, 0x1c, 0x12, 0x82, 0x40,
+        0x42, 0x42, 0xa1, 0x09, 0x21, 0x20,
+        0x98, 0x1a, 0x00, 0x61, 0x0c, 0x20,
+        0x30, 0x84, 0x0a, 0x50, 0x4a, 0x00,
+        0xdf, 0x4c, 0x10, 0x95, 0x12, 0xa0,
+        0x72, 0x06, 0x94, 0xf6, 0x74, 0x40
+    };
+
+    const WebRtc_UWord8 mask43_5[30] =
+    {
+        0xce, 0x31, 0xb6, 0x36, 0xc6, 0xc0,
+        0x63, 0x98, 0xdb, 0x1b, 0x63, 0x60,
+        0x98, 0xc7, 0x68, 0xed, 0x1d, 0xa0,
+        0x4d, 0x6b, 0x55, 0x6a, 0xad, 0x40,
+        0xb2, 0x6c, 0xad, 0x95, 0xb2, 0xa0
+    };
+
+    const WebRtc_UWord8 mask43_6[36] =
+    {
+        0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+        0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+        0x20, 0xea, 0x0d, 0x41, 0xa8, 0x20,
+        0x85, 0x41, 0x2e, 0x25, 0x04, 0xa0,
+        0x06, 0x80, 0xd8, 0x1b, 0x03, 0x60,
+        0x8a, 0x24, 0x34, 0x86, 0x90, 0xc0
+    };
+
+    const WebRtc_UWord8 mask43_7[42] =
+    {
+        0xc6, 0x11, 0x96, 0x32, 0x46, 0x40,
+        0x33, 0x04, 0xc8, 0x99, 0x33, 0x20,
+        0x18, 0x67, 0x44, 0x68, 0x9d, 0x00,
+        0x45, 0x42, 0xd4, 0x5a, 0x0b, 0x40,
+        0x12, 0xd4, 0x2a, 0x95, 0x50, 0xa0,
+        0xb4, 0x28, 0x35, 0x16, 0xa0, 0xc0,
+        0x29, 0x92, 0x1b, 0x0d, 0x41, 0x60
+    };
+
+    const WebRtc_UWord8 mask43_8[48] =
+    {
+        0x07, 0x0a, 0x71, 0x44, 0x29, 0xc0,
+        0x49, 0xa8, 0x29, 0x0f, 0xa0, 0x20,
+        0xb0, 0x7a, 0x07, 0x48, 0xe8, 0x00,
+        0x24, 0xc5, 0xc0, 0xb8, 0x17, 0x00,
+        0x52, 0x80, 0xec, 0x1d, 0x02, 0xa0,
+        0xc6, 0x31, 0x82, 0x30, 0xc7, 0x40,
+        0x31, 0x94, 0x1a, 0x83, 0x50, 0x60,
+        0x18, 0xc7, 0x08, 0xe1, 0x1c, 0x20
+    };
+
+    const WebRtc_UWord8 mask43_9[54] =
+    {
+        0x4e, 0x11, 0x12, 0x22, 0x46, 0x40,
+        0x62, 0x1a, 0x09, 0x41, 0x68, 0x60,
+        0x80, 0xe9, 0x41, 0x28, 0xa5, 0x00,
+        0xa1, 0x50, 0x52, 0xc8, 0x51, 0x00,
+        0x53, 0x00, 0x68, 0x1d, 0x01, 0xa0,
+        0xa4, 0x24, 0x36, 0x06, 0x80, 0xc0,
+        0x16, 0xa0, 0x8d, 0x11, 0x82, 0x20,
+        0x58, 0x45, 0x20, 0xa4, 0x16, 0x80,
+        0x29, 0x86, 0x84, 0xd0, 0x1c, 0x00
+    };
+
+    const WebRtc_UWord8 mask44_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xf0
+    };
+
+    const WebRtc_UWord8 mask44_10[60] =
+    {
+        0xc0, 0x38, 0x8b, 0x00, 0xe2, 0x20,
+        0x30, 0x0e, 0x28, 0xc0, 0x38, 0xa0,
+        0xe8, 0x07, 0x03, 0xa0, 0x1c, 0x00,
+        0x85, 0x08, 0xaa, 0x14, 0x22, 0xa0,
+        0xd0, 0x92, 0x13, 0x42, 0x48, 0x40,
+        0x86, 0x50, 0x4a, 0x19, 0x41, 0x20,
+        0x4a, 0x68, 0x0d, 0x29, 0xa0, 0x30,
+        0x01, 0xa0, 0x74, 0x06, 0x81, 0xd0,
+        0x4c, 0x81, 0x91, 0x32, 0x06, 0x40,
+        0x62, 0x24, 0x05, 0x88, 0x90, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_11[66] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_12[72] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+        0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+        0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+        0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+        0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+        0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70
+    };
+
+    const WebRtc_UWord8 mask44_13[78] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0
+    };
+
+    const WebRtc_UWord8 mask44_14[84] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0
+    };
+
+    const WebRtc_UWord8 mask44_15[90] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_16[96] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_17[102] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00
+    };
+
+    const WebRtc_UWord8 mask44_18[108] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0
+    };
+
+    const WebRtc_UWord8 mask44_19[114] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50
+    };
+
+    const WebRtc_UWord8 mask44_2[12] =
+    {
+        0xec, 0xdd, 0x9b, 0xb3, 0x76, 0x60,
+        0x9b, 0xb3, 0x76, 0x6e, 0xcd, 0xd0
+    };
+
+    const WebRtc_UWord8 mask44_20[120] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_21[126] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_22[132] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_23[138] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+        0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+        0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+        0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+        0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+        0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70
+    };
+
+    const WebRtc_UWord8 mask44_24[144] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+        0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+        0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+        0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+        0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+        0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x15, 0x0f, 0x44, 0x6d, 0x9d, 0xa0
+    };
+
+    const WebRtc_UWord8 mask44_25[150] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+        0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+        0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+        0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+        0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+        0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0
+    };
+
+    const WebRtc_UWord8 mask44_26[156] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+        0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+        0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+        0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+        0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+        0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+        0x02, 0xcb, 0x64, 0xb8, 0x55, 0x80
+    };
+
+    const WebRtc_UWord8 mask44_27[162] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0
+    };
+
+    const WebRtc_UWord8 mask44_28[168] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x66, 0x26, 0x6c, 0x91, 0xc7, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_29[174] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_3[18] =
+    {
+        0xca, 0xd9, 0x5b, 0x2b, 0x65, 0x60,
+        0xf1, 0x5e, 0x2b, 0xc5, 0x78, 0xa0,
+        0xb6, 0x35, 0xc6, 0xd8, 0xd7, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_30[180] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+        0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+        0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+        0x60, 0xf4, 0x75, 0x84, 0x90, 0xc0
+    };
+
+    const WebRtc_UWord8 mask44_31[186] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_32[192] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x3e, 0x39, 0x86, 0x5c, 0xd9, 0xd0
+    };
+
+    const WebRtc_UWord8 mask44_33[198] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00
+    };
+
+    const WebRtc_UWord8 mask44_34[204] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+        0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+        0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+        0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+        0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+        0xb5, 0xc7, 0xe8, 0x0c, 0xb9, 0x90
+    };
+
+    const WebRtc_UWord8 mask44_35[210] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0
+    };
+
+    const WebRtc_UWord8 mask44_36[216] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0xa6, 0x92, 0x01, 0x65, 0x91, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_37[222] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50
+    };
+
+    const WebRtc_UWord8 mask44_38[228] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+        0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+        0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+        0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+        0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+        0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+        0x43, 0x64, 0xf2, 0xe5, 0x5d, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_39[234] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_4[24] =
+    {
+        0xc4, 0xd8, 0x9b, 0x13, 0x62, 0x60,
+        0x31, 0x66, 0x2c, 0xc5, 0x98, 0xb0,
+        0x4b, 0x29, 0x65, 0x2c, 0xa5, 0x90,
+        0x2c, 0xa5, 0x94, 0xb2, 0x96, 0x50
+    };
+
+    const WebRtc_UWord8 mask44_40[240] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0xd8, 0x2a, 0x16, 0x26, 0x51, 0x40
+    };
+
+    const WebRtc_UWord8 mask44_41[246] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_42[252] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+        0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+        0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+        0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+        0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+        0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+        0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+        0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+        0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+        0xd9, 0xc1, 0x6f, 0xa8, 0x1c, 0x90
+    };
+
+    const WebRtc_UWord8 mask44_43[258] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20
+    };
+
+    const WebRtc_UWord8 mask44_44[264] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20,
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+        0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+        0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+        0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+        0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+        0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+        0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+        0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+        0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+        0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+        0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+        0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+        0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+        0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+        0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+        0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+        0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+        0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+        0xb5, 0x1c, 0x1c, 0x21, 0xac, 0xa0
+    };
+
+    const WebRtc_UWord8 mask44_5[30] =
+    {
+        0xc6, 0xd8, 0xdb, 0x1b, 0x63, 0x60,
+        0x63, 0x6c, 0x6d, 0x8d, 0xb1, 0xb0,
+        0x1d, 0xa3, 0xb4, 0x76, 0x8e, 0xd0,
+        0xad, 0x55, 0xaa, 0xb5, 0x56, 0xa0,
+        0xb2, 0xb6, 0x56, 0xca, 0xd9, 0x50
+    };
+
+    const WebRtc_UWord8 mask44_6[36] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+        0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+        0xa8, 0x35, 0x06, 0xa0, 0xd4, 0x10,
+        0xc4, 0xa0, 0x97, 0x12, 0x82, 0x50,
+        0x03, 0x60, 0x6c, 0x0d, 0x81, 0xb0,
+        0x90, 0xd2, 0x1a, 0x43, 0x48, 0x60
+    };
+
+    const WebRtc_UWord8 mask44_7[42] =
+    {
+        0xc6, 0x48, 0xcb, 0x19, 0x23, 0x20,
+        0x13, 0x26, 0x64, 0x4c, 0x99, 0x90,
+        0x8d, 0x13, 0xa2, 0x34, 0x4e, 0x80,
+        0x8b, 0x41, 0x6a, 0x2d, 0x05, 0xa0,
+        0x52, 0xaa, 0x15, 0x4a, 0xa8, 0x50,
+        0xa2, 0xd4, 0x1a, 0x8b, 0x50, 0x60,
+        0x61, 0xa8, 0x2d, 0x86, 0xa0, 0xb0
+    };
+
+    const WebRtc_UWord8 mask44_8[48] =
+    {
+        0x28, 0x85, 0x38, 0xa2, 0x14, 0xe0,
+        0x21, 0xf4, 0x04, 0x87, 0xd0, 0x10,
+        0xe9, 0x1d, 0x03, 0xa4, 0x74, 0x00,
+        0x17, 0x02, 0xe0, 0x5c, 0x0b, 0x80,
+        0x83, 0xa0, 0x56, 0x0e, 0x81, 0x50,
+        0x46, 0x18, 0xe9, 0x18, 0x63, 0xa0,
+        0x50, 0x6a, 0x0d, 0x41, 0xa8, 0x30,
+        0x1c, 0x23, 0x84, 0x70, 0x8e, 0x10
+    };
+
+    const WebRtc_UWord8 mask44_9[54] =
+    {
+        0x44, 0x48, 0xc9, 0x11, 0x23, 0x20,
+        0x28, 0x2d, 0x0c, 0xa0, 0xb4, 0x30,
+        0x25, 0x14, 0xa0, 0x94, 0x52, 0x80,
+        0x59, 0x0a, 0x21, 0x64, 0x28, 0x80,
+        0x03, 0xa0, 0x34, 0x0e, 0x80, 0xd0,
+        0xc0, 0xd0, 0x1b, 0x03, 0x40, 0x60,
+        0xa2, 0x30, 0x46, 0x88, 0xc1, 0x10,
+        0x14, 0x82, 0xd0, 0x52, 0x0b, 0x40,
+        0x9a, 0x03, 0x82, 0x68, 0x0e, 0x00
+    };
+
+    const WebRtc_UWord8 mask45_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xf8
+    };
+
+    const WebRtc_UWord8 mask45_10[60] =
+    {
+        0xc0, 0x38, 0x89, 0x91, 0x28, 0xa0,
+        0x30, 0x0e, 0x29, 0x45, 0x22, 0x88,
+        0xe8, 0x07, 0x02, 0xa4, 0x40, 0x68,
+        0x85, 0x08, 0xa8, 0x13, 0x12, 0x10,
+        0xd0, 0x92, 0x13, 0x40, 0x05, 0x10,
+        0x86, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x4a, 0x68, 0x0c, 0x84, 0xdc, 0x00,
+        0x01, 0xa0, 0x74, 0x30, 0x84, 0x88,
+        0x4c, 0x81, 0x91, 0x28, 0x2b, 0x00,
+        0x62, 0x24, 0x04, 0x4a, 0xd1, 0x40
+    };
+
+    const WebRtc_UWord8 mask45_11[66] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+    };
+
+    const WebRtc_UWord8 mask45_12[72] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+        0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+        0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+        0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+        0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+        0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90
+    };
+
+    const WebRtc_UWord8 mask45_13[78] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30
+    };
+
+    const WebRtc_UWord8 mask45_14[84] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8
+    };
+
+    const WebRtc_UWord8 mask45_15[90] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80
+    };
+
+    const WebRtc_UWord8 mask45_16[96] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0
+    };
+
+    const WebRtc_UWord8 mask45_17[102] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70
+    };
+
+    const WebRtc_UWord8 mask45_18[108] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58
+    };
+
+    const WebRtc_UWord8 mask45_19[114] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40
+    };
+
+    const WebRtc_UWord8 mask45_2[12] =
+    {
+        0xec, 0xdd, 0x9b, 0xb3, 0x76, 0x60,
+        0x9b, 0xb3, 0x76, 0x6e, 0xc9, 0xd8
+    };
+
+    const WebRtc_UWord8 mask45_20[120] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8
+    };
+
+    const WebRtc_UWord8 mask45_21[126] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+    };
+
+    const WebRtc_UWord8 mask45_22[132] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68
+    };
+
+    const WebRtc_UWord8 mask45_23[138] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+        0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+        0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+        0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+        0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+        0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90
+    };
+
+    const WebRtc_UWord8 mask45_24[144] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+        0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+        0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+        0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+        0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+        0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x95, 0x91, 0xad, 0xd9, 0x86, 0x98
+    };
+
+    const WebRtc_UWord8 mask45_25[150] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+        0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+        0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+        0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+        0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+        0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30
+    };
+
+    const WebRtc_UWord8 mask45_26[156] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+        0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+        0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+        0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+        0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+        0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+        0xb0, 0xfd, 0xb2, 0xf3, 0x8a, 0xc0
+    };
+
+    const WebRtc_UWord8 mask45_27[162] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8
+    };
+
+    const WebRtc_UWord8 mask45_28[168] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x44, 0x46, 0x28, 0xfb, 0x66, 0x80
+    };
+
+    const WebRtc_UWord8 mask45_29[174] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80
+    };
+
+    const WebRtc_UWord8 mask45_3[18] =
+    {
+        0xca, 0xd9, 0x5b, 0x2b, 0x4d, 0x90,
+        0xf1, 0x5e, 0x2b, 0xc5, 0x24, 0xe8,
+        0xb6, 0x35, 0xc5, 0xd8, 0x9f, 0x40
+    };
+
+    const WebRtc_UWord8 mask45_30[180] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+        0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+        0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+        0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+        0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+        0x1c, 0xc9, 0x43, 0x25, 0xa7, 0x00
+    };
+
+    const WebRtc_UWord8 mask45_31[186] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0
+    };
+
+    const WebRtc_UWord8 mask45_32[192] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x40, 0x7e, 0xc1, 0x30, 0x29, 0x50
+    };
+
+    const WebRtc_UWord8 mask45_33[198] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70
+    };
+
+    const WebRtc_UWord8 mask45_34[204] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+        0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+        0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+        0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+        0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+        0x1f, 0x78, 0x45, 0x5e, 0x46, 0x50
+    };
+
+    const WebRtc_UWord8 mask45_35[210] =
+    {
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58
+    };
+
+    const WebRtc_UWord8 mask45_36[216] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0xd0, 0x1a, 0xf0, 0x14, 0xf0, 0xe8
+    };
+
+    const WebRtc_UWord8 mask45_37[222] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40
+    };
+
+    const WebRtc_UWord8 mask45_38[228] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+        0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+        0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+        0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+        0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+        0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+        0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+        0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+        0x04, 0x67, 0x1b, 0xba, 0x1d, 0xa0
+    };
+
+    const WebRtc_UWord8 mask45_39[234] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8
+    };
+
+    const WebRtc_UWord8 mask45_4[24] =
+    {
+        0xc4, 0xd8, 0x9b, 0x13, 0x45, 0x90,
+        0x31, 0x66, 0x2c, 0xc5, 0x8a, 0x58,
+        0x4b, 0x29, 0x65, 0x2c, 0x91, 0x68,
+        0x2c, 0xa5, 0x94, 0xb2, 0xa2, 0xa8
+    };
+
+    const WebRtc_UWord8 mask45_40[240] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0xd9, 0x40, 0x46, 0xe6, 0x4f, 0xd8
+    };
+
+    const WebRtc_UWord8 mask45_41[246] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+    };
+
+    const WebRtc_UWord8 mask45_42[252] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+        0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+        0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+        0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+        0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+        0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+        0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+        0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+        0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+        0xac, 0xcc, 0x04, 0x41, 0x97, 0x30
+    };
+
+    const WebRtc_UWord8 mask45_43[258] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68
+    };
+
+    const WebRtc_UWord8 mask45_44[264] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0xf8, 0x40, 0xe3, 0x2e, 0x16, 0x00
+    };
+
+    const WebRtc_UWord8 mask45_45[270] =
+    {
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+        0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+        0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+        0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+        0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+        0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+        0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+        0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+        0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+        0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68,
+        0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+        0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+        0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+        0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+        0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+        0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+        0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+        0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+        0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+        0x64, 0x4c, 0x89, 0x19, 0x08, 0x30,
+        0x51, 0x4a, 0x28, 0xcc, 0x81, 0x18,
+        0xa9, 0x15, 0x22, 0x64, 0x20, 0x28,
+        0x04, 0xc0, 0x98, 0x16, 0x10, 0xc0,
+        0xd0, 0x1a, 0x02, 0x02, 0xc0, 0x88,
+        0x82, 0x50, 0x4a, 0x11, 0x0a, 0x40,
+        0x21, 0x24, 0x25, 0x01, 0xcc, 0x00,
+        0x0c, 0x21, 0x84, 0x2a, 0x04, 0x48,
+        0x4a, 0x09, 0x41, 0xa0, 0x31, 0x00,
+        0x12, 0xa2, 0x54, 0x40, 0x92, 0x10,
+        0x9e, 0xce, 0x88, 0xc1, 0x45, 0x00,
+        0xfb, 0x97, 0x5d, 0x7d, 0x42, 0x20
+    };
+
+    const WebRtc_UWord8 mask45_5[30] =
+    {
+        0xc6, 0xd8, 0xdb, 0x1b, 0x29, 0xb0,
+        0x63, 0x6c, 0x6d, 0x8d, 0xb2, 0x58,
+        0x1d, 0xa3, 0xb4, 0x76, 0x87, 0x70,
+        0xad, 0x55, 0xaa, 0xb5, 0x54, 0xe0,
+        0xb2, 0xb6, 0x56, 0xca, 0xdc, 0x18
+    };
+
+    const WebRtc_UWord8 mask45_6[36] =
+    {
+        0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+        0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+        0xa8, 0x35, 0x04, 0x32, 0x90, 0xc0,
+        0xc4, 0xa0, 0x96, 0x84, 0x89, 0x18,
+        0x03, 0x60, 0x6c, 0x4a, 0x84, 0x70,
+        0x90, 0xd2, 0x1a, 0x29, 0x17, 0x00
+    };
+
+    const WebRtc_UWord8 mask45_7[42] =
+    {
+        0xc6, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x13, 0x26, 0x64, 0xcc, 0x90, 0x98,
+        0x8d, 0x13, 0xa2, 0x46, 0x48, 0x48,
+        0x8b, 0x41, 0x6a, 0x90, 0x81, 0x28,
+        0x52, 0xaa, 0x15, 0x42, 0x83, 0x50,
+        0xa2, 0xd4, 0x1a, 0x13, 0x16, 0x00,
+        0x61, 0xa8, 0x2c, 0x25, 0xc4, 0x30
+    };
+
+    const WebRtc_UWord8 mask45_8[48] =
+    {
+        0x28, 0x85, 0x38, 0x32, 0x10, 0x30,
+        0x21, 0xf4, 0x06, 0x01, 0xc0, 0x18,
+        0xe9, 0x1d, 0x02, 0x82, 0x21, 0x20,
+        0x17, 0x02, 0xe0, 0x15, 0x00, 0xc8,
+        0x83, 0xa0, 0x55, 0x0c, 0x0a, 0x08,
+        0x46, 0x18, 0xe8, 0x68, 0x05, 0x40,
+        0x50, 0x6a, 0x0d, 0x80, 0x9c, 0x00,
+        0x1c, 0x23, 0x84, 0x50, 0xe2, 0x80
+    };
+
+    const WebRtc_UWord8 mask45_9[54] =
+    {
+        0x44, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+        0x28, 0x2d, 0x0d, 0x89, 0xf2, 0x10,
+        0x25, 0x14, 0xa2, 0x30, 0x12, 0x20,
+        0x59, 0x0a, 0x20, 0x05, 0xd0, 0x88,
+        0x03, 0xa0, 0x34, 0x1e, 0x0c, 0x18,
+        0xc0, 0xd0, 0x1a, 0x82, 0x01, 0xc8,
+        0xa2, 0x30, 0x44, 0x62, 0xc5, 0x08,
+        0x14, 0x82, 0xd2, 0x44, 0x02, 0x48,
+        0x9a, 0x03, 0x81, 0xe0, 0x00, 0x70
+    };
+
+    const WebRtc_UWord8 mask46_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xfc
+    };
+
+    const WebRtc_UWord8 mask46_10[60] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+    };
+
+    const WebRtc_UWord8 mask46_11[66] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+    };
+
+    const WebRtc_UWord8 mask46_12[72] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+        0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+        0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+        0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+        0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+        0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8
+    };
+
+    const WebRtc_UWord8 mask46_13[78] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+    };
+
+    const WebRtc_UWord8 mask46_14[84] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64
+    };
+
+    const WebRtc_UWord8 mask46_15[90] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+    };
+
+    const WebRtc_UWord8 mask46_16[96] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8
+    };
+
+    const WebRtc_UWord8 mask46_17[102] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+    };
+
+    const WebRtc_UWord8 mask46_18[108] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x82, 0x32, 0x57, 0x04, 0x64, 0xac
+    };
+
+    const WebRtc_UWord8 mask46_19[114] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+    };
+
+    const WebRtc_UWord8 mask46_2[12] =
+    {
+        0xec, 0xdd, 0x99, 0xd9, 0xbb, 0x30,
+        0x9b, 0xb2, 0x77, 0x37, 0x64, 0xec
+    };
+
+    const WebRtc_UWord8 mask46_20[120] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4
+    };
+
+    const WebRtc_UWord8 mask46_21[126] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+    };
+
+    const WebRtc_UWord8 mask46_22[132] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34
+    };
+
+    const WebRtc_UWord8 mask46_23[138] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+        0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+        0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+        0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+        0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+        0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+        0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+        0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+        0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+        0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+        0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+        0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10
+    };
+
+    const WebRtc_UWord8 mask46_24[144] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+        0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+        0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+        0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+        0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+        0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x18, 0x8b, 0x03, 0xb4, 0x3b, 0x10
+    };
+
+    const WebRtc_UWord8 mask46_25[150] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+        0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+        0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+        0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+        0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+        0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+    };
+
+    const WebRtc_UWord8 mask46_26[156] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+        0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+        0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+        0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+        0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+        0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+        0x2d, 0x6d, 0xd2, 0x57, 0xd6, 0x2c
+    };
+
+    const WebRtc_UWord8 mask46_27[162] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64
+    };
+
+    const WebRtc_UWord8 mask46_28[168] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0x52, 0xf9, 0x72, 0xd9, 0x68
+    };
+
+    const WebRtc_UWord8 mask46_29[174] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+    };
+
+    const WebRtc_UWord8 mask46_3[18] =
+    {
+        0xca, 0xd3, 0x65, 0x95, 0xa6, 0xc8,
+        0xf1, 0x49, 0x3b, 0xe2, 0x92, 0x74,
+        0x76, 0x27, 0xd0, 0xec, 0x4f, 0xa0
+    };
+
+    const WebRtc_UWord8 mask46_30[180] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+        0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+        0x99, 0xf6, 0x0a, 0xdd, 0x16, 0xb0
+    };
+
+    const WebRtc_UWord8 mask46_31[186] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8
+    };
+
+    const WebRtc_UWord8 mask46_32[192] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x69, 0xcd, 0xeb, 0x51, 0xc9, 0xa8
+    };
+
+    const WebRtc_UWord8 mask46_33[198] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+    };
+
+    const WebRtc_UWord8 mask46_34[204] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+        0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+        0x60, 0xf0, 0x13, 0xf0, 0x4d, 0xe0
+    };
+
+    const WebRtc_UWord8 mask46_35[210] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x82, 0x32, 0x57, 0x04, 0x64, 0xac
+    };
+
+    const WebRtc_UWord8 mask46_36[216] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x6c, 0x3a, 0x45, 0x70, 0xd7, 0x00
+    };
+
+    const WebRtc_UWord8 mask46_37[222] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+    };
+
+    const WebRtc_UWord8 mask46_38[228] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+        0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+        0x72, 0x2b, 0xa5, 0xd4, 0xb9, 0x30
+    };
+
+    const WebRtc_UWord8 mask46_39[234] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4
+    };
+
+    const WebRtc_UWord8 mask46_4[24] =
+    {
+        0xc4, 0xd1, 0x65, 0x89, 0xa2, 0xc8,
+        0x31, 0x62, 0x96, 0x62, 0xc5, 0x2c,
+        0x4b, 0x24, 0x5a, 0x96, 0x48, 0xb4,
+        0x2c, 0xa8, 0xaa, 0x59, 0x51, 0x54
+    };
+
+    const WebRtc_UWord8 mask46_40[240] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x7c, 0xc8, 0x93, 0x63, 0x3c, 0x80
+    };
+
+    const WebRtc_UWord8 mask46_41[246] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+    };
+
+    const WebRtc_UWord8 mask46_42[252] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+        0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+        0xfc, 0x6e, 0x89, 0x54, 0x4f, 0x00
+    };
+
+    const WebRtc_UWord8 mask46_43[258] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34
+    };
+
+    const WebRtc_UWord8 mask46_44[264] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x1a, 0x8a, 0x00, 0x1c, 0x89, 0x54
+    };
+
+    const WebRtc_UWord8 mask46_45[270] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+        0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+        0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+        0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+        0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+        0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+        0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+        0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+        0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+        0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+        0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+        0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10
+    };
+
+    const WebRtc_UWord8 mask46_46[276] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+        0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+        0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+        0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+        0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+        0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+        0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+        0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+        0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+        0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+        0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+        0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10,
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+        0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+        0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+        0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+        0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+        0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+        0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+        0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+        0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+        0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+        0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+        0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+        0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+        0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+        0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+        0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+        0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+        0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+        0xd5, 0xdf, 0x59, 0xb9, 0xba, 0x10
+    };
+
+    const WebRtc_UWord8 mask46_5[30] =
+    {
+        0xc6, 0xca, 0x6d, 0x8d, 0x94, 0xd8,
+        0x63, 0x6c, 0x96, 0xc6, 0xd9, 0x2c,
+        0x1d, 0xa1, 0xdc, 0x3b, 0x43, 0xb8,
+        0xad, 0x55, 0x39, 0x5a, 0xaa, 0x70,
+        0xb2, 0xb7, 0x07, 0x65, 0x6e, 0x0c
+    };
+
+    const WebRtc_UWord8 mask46_6[36] =
+    {
+        0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+        0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+        0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+        0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+        0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+        0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80
+    };
+
+    const WebRtc_UWord8 mask46_7[42] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+        0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+        0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+        0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+        0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+        0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+    };
+
+    const WebRtc_UWord8 mask46_8[48] =
+    {
+        0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+        0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+        0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+        0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+        0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+        0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+        0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+        0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+    };
+
+    const WebRtc_UWord8 mask46_9[54] =
+    {
+        0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+        0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+        0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+        0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+        0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+        0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+        0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+        0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+        0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+    };
+
+    const WebRtc_UWord8 mask47_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xfe
+    };
+
+    const WebRtc_UWord8 mask47_10[60] =
+    {
+        0x64, 0x4a, 0x28, 0x22, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x68, 0xa6,
+        0xa9, 0x10, 0x1a, 0x00, 0x90, 0x0a,
+        0x04, 0xc4, 0x84, 0x21, 0x06, 0x12,
+        0xd0, 0x01, 0x44, 0x94, 0x29, 0x42,
+        0x82, 0x40, 0x1c, 0x81, 0x48, 0x14,
+        0x21, 0x37, 0x01, 0x40, 0xd4, 0x04,
+        0x0c, 0x21, 0x23, 0x11, 0x01, 0x18,
+        0x4a, 0x0a, 0xc1, 0x0c, 0x10, 0xc0,
+        0x12, 0xb4, 0x50, 0xa8, 0x1a, 0x80
+    };
+
+    const WebRtc_UWord8 mask47_11[66] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x68,
+        0x33, 0x24, 0x27, 0x40, 0x64, 0x22,
+        0x99, 0x02, 0x12, 0x2a, 0x22, 0x82,
+        0x05, 0x80, 0x0e, 0x06, 0xa0, 0x2a,
+        0x80, 0xa1, 0x83, 0x19, 0x11, 0x90,
+        0x84, 0x48, 0x18, 0x51, 0x05, 0x10,
+        0x40, 0x6d, 0x40, 0x10, 0x91, 0x08,
+        0x0a, 0x90, 0xc1, 0x32, 0x03, 0x20,
+        0x68, 0x04, 0x90, 0x45, 0x24, 0x52,
+        0x10, 0x31, 0x20, 0x8c, 0x08, 0xc0,
+        0x30, 0x58, 0x05, 0x18, 0x58, 0x04
+    };
+
+    const WebRtc_UWord8 mask47_12[72] =
+    {
+        0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+        0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+        0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+        0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+        0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+        0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+        0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+        0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+        0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+        0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+        0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+        0x19, 0x56, 0xe5, 0x08, 0x90, 0x88
+    };
+
+    const WebRtc_UWord8 mask47_13[78] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c
+    };
+
+    const WebRtc_UWord8 mask47_14[84] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c
+    };
+
+    const WebRtc_UWord8 mask47_15[90] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0
+    };
+
+    const WebRtc_UWord8 mask47_16[96] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14
+    };
+
+    const WebRtc_UWord8 mask47_17[102] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c
+    };
+
+    const WebRtc_UWord8 mask47_18[108] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c
+    };
+
+    const WebRtc_UWord8 mask47_19[114] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50
+    };
+
+    const WebRtc_UWord8 mask47_2[12] =
+    {
+        0xec, 0xdd, 0x99, 0xd9, 0x9d, 0x98,
+        0x9b, 0xb2, 0x77, 0x27, 0x72, 0x76
+    };
+
+    const WebRtc_UWord8 mask47_20[120] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a
+    };
+
+    const WebRtc_UWord8 mask47_21[126] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04
+    };
+
+    const WebRtc_UWord8 mask47_22[132] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc
+    };
+
+    const WebRtc_UWord8 mask47_23[138] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+        0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+        0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+        0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+        0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+        0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+        0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+        0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+        0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+        0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+        0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+        0x5f, 0x50, 0x89, 0x08, 0x90, 0x88
+    };
+
+    const WebRtc_UWord8 mask47_24[144] =
+    {
+        0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+        0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+        0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+        0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+        0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+        0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+        0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+        0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+        0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+        0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+        0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+        0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x68,
+        0x33, 0x24, 0x27, 0x40, 0x64, 0x22,
+        0x99, 0x02, 0x12, 0x2a, 0x22, 0x82,
+        0x05, 0x80, 0x0e, 0x06, 0xa0, 0x2a,
+        0x80, 0xa1, 0x83, 0x19, 0x11, 0x90,
+        0x84, 0x48, 0x18, 0x51, 0x05, 0x10,
+        0x40, 0x6d, 0x40, 0x10, 0x91, 0x08,
+        0x0a, 0x90, 0xc1, 0x32, 0x03, 0x20,
+        0x68, 0x04, 0x90, 0x45, 0x24, 0x52,
+        0x10, 0x31, 0x20, 0x8c, 0x08, 0xc0,
+        0x30, 0x58, 0x05, 0x18, 0x58, 0x04,
+        0x27, 0x41, 0x35, 0x57, 0x7e, 0x6a
+    };
+
+    const WebRtc_UWord8 mask47_25[150] =
+    {
+        0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+        0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+        0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+        0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+        0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+        0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+        0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+        0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+        0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+        0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+        0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+        0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c
+    };
+
+    const WebRtc_UWord8 mask47_26[156] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+        0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+        0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+        0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+        0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+        0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+        0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+        0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+        0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+        0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+        0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+        0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+        0x6c, 0xea, 0xc4, 0x42, 0x20, 0x9e
+    };
+
+    const WebRtc_UWord8 mask47_27[162] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c
+    };
+
+    const WebRtc_UWord8 mask47_28[168] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x86, 0x1e, 0xa6, 0xaf, 0x3d, 0x04
+    };
+
+    const WebRtc_UWord8 mask47_29[174] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0
+    };
+
+    const WebRtc_UWord8 mask47_3[18] =
+    {
+        0xca, 0xd3, 0x65, 0x36, 0x53, 0x64,
+        0xf1, 0x49, 0x3a, 0x93, 0xa9, 0x3a,
+        0x76, 0x27, 0xd0, 0x7d, 0x07, 0xd0
+    };
+
+    const WebRtc_UWord8 mask47_30[180] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+        0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+        0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+        0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+        0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+        0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+        0x97, 0x43, 0x63, 0xc6, 0x09, 0x9c
+    };
+
+    const WebRtc_UWord8 mask47_31[186] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14
+    };
+
+    const WebRtc_UWord8 mask47_32[192] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0xe5, 0x50, 0x45, 0x63, 0xc2, 0xf4
+    };
+
+    const WebRtc_UWord8 mask47_33[198] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c
+    };
+
+    const WebRtc_UWord8 mask47_34[204] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+        0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+        0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+        0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+        0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+        0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+        0xef, 0xbb, 0xa6, 0x23, 0x5c, 0xbe
+    };
+
+    const WebRtc_UWord8 mask47_35[210] =
+    {
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c
+    };
+
+    const WebRtc_UWord8 mask47_36[216] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0e, 0xd7, 0x38, 0x20, 0x87, 0x66
+    };
+
+    const WebRtc_UWord8 mask47_37[222] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50
+    };
+
+    const WebRtc_UWord8 mask47_38[228] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+        0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+        0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+        0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+        0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+        0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+        0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+        0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+        0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+        0x7b, 0x47, 0xa5, 0xde, 0x9a, 0xd4
+    };
+
+    const WebRtc_UWord8 mask47_39[234] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a
+    };
+
+    const WebRtc_UWord8 mask47_4[24] =
+    {
+        0xc4, 0xd1, 0x65, 0x16, 0x51, 0x64,
+        0x31, 0x62, 0x96, 0x29, 0x62, 0x96,
+        0x4b, 0x24, 0x5a, 0x45, 0xa4, 0x5a,
+        0x2c, 0xa8, 0xaa, 0x8a, 0xa8, 0xaa
+    };
+
+    const WebRtc_UWord8 mask47_40[240] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xc4, 0xae, 0x5e, 0x33, 0xf5, 0x1a
+    };
+
+    const WebRtc_UWord8 mask47_41[246] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04
+    };
+
+    const WebRtc_UWord8 mask47_42[252] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+        0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+        0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+        0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+        0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+        0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+        0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+        0x3c, 0xb0, 0x36, 0x3b, 0x14, 0xa2
+    };
+
+    const WebRtc_UWord8 mask47_43[258] =
+    {
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc
+    };
+
+    const WebRtc_UWord8 mask47_44[264] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0xd4, 0x8a, 0xd4, 0xd3, 0x3f, 0xe6
+    };
+
+    const WebRtc_UWord8 mask47_45[270] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+        0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+        0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+        0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+        0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+        0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+        0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+        0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+        0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+        0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+        0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+        0x5f, 0x50, 0x89, 0x08, 0x90, 0x88
+    };
+
+    const WebRtc_UWord8 mask47_46[276] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+        0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+        0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+        0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+        0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+        0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+        0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+        0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+        0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+        0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+        0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+        0x5f, 0x50, 0x89, 0x08, 0x90, 0x88,
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+        0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+        0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+        0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+        0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+        0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+        0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+        0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+        0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+        0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+        0x37, 0x9d, 0xcf, 0xe0, 0xe4, 0x20
+    };
+
+    const WebRtc_UWord8 mask47_47[282] =
+    {
+        0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+        0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+        0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+        0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+        0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+        0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+        0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+        0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+        0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+        0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+        0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+        0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+        0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+        0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+        0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+        0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+        0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+        0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+        0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+        0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+        0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+        0x5f, 0x50, 0x89, 0x08, 0x90, 0x88,
+        0x46, 0x4a, 0x6c, 0x20, 0xc2, 0x0c,
+        0x33, 0x24, 0x26, 0x04, 0x60, 0x46,
+        0x99, 0x02, 0x12, 0x80, 0xa8, 0x0a,
+        0x05, 0x80, 0x0e, 0x43, 0x04, 0x30,
+        0x80, 0xa1, 0x83, 0x02, 0x30, 0x22,
+        0x84, 0x48, 0x18, 0x29, 0x02, 0x90,
+        0x40, 0x6d, 0x41, 0x30, 0x13, 0x00,
+        0x0a, 0x90, 0xc0, 0x11, 0x21, 0x12,
+        0x68, 0x04, 0x90, 0xc4, 0x0c, 0x40,
+        0x10, 0x31, 0x20, 0x48, 0x44, 0x84,
+        0x30, 0x58, 0x05, 0x14, 0x11, 0x40,
+        0x46, 0x42, 0x0d, 0x08, 0x90, 0x88,
+        0x33, 0x20, 0x46, 0xa6, 0xca, 0x6c,
+        0x99, 0x08, 0x0a, 0x42, 0x64, 0x26,
+        0x05, 0x84, 0x30, 0x21, 0x22, 0x12,
+        0x80, 0xb0, 0x22, 0x00, 0xe0, 0x0e,
+        0x84, 0x42, 0x90, 0x18, 0x21, 0x82,
+        0x40, 0x73, 0x00, 0x81, 0x88, 0x18,
+        0x0a, 0x81, 0x12, 0xd4, 0x0d, 0x40,
+        0x68, 0x0c, 0x41, 0x0c, 0x10, 0xc0,
+        0x10, 0x24, 0x84, 0x49, 0x04, 0x90,
+        0x30, 0x51, 0x41, 0x12, 0x11, 0x20,
+        0x5f, 0x50, 0x89, 0x80, 0x58, 0x04,
+        0x1f, 0x2f, 0x63, 0x10, 0x64, 0xb2
+    };
+
+    const WebRtc_UWord8 mask47_5[30] =
+    {
+        0xc6, 0xca, 0x6c, 0xa6, 0xca, 0x6c,
+        0x63, 0x6c, 0x96, 0xc9, 0x6c, 0x96,
+        0x1d, 0xa1, 0xdc, 0x1d, 0xc1, 0xdc,
+        0xad, 0x55, 0x39, 0x53, 0x95, 0x38,
+        0xb2, 0xb7, 0x07, 0x70, 0x77, 0x06
+    };
+
+    const WebRtc_UWord8 mask47_6[36] =
+    {
+        0x64, 0x4a, 0x29, 0xa2, 0x9a, 0x28,
+        0x51, 0x58, 0xa2, 0x8a, 0x68, 0xa6,
+        0x0c, 0xa4, 0x30, 0x45, 0xa4, 0x5a,
+        0xa1, 0x22, 0x46, 0x2d, 0x82, 0xd8,
+        0x12, 0xa1, 0x1c, 0x17, 0x41, 0x74,
+        0x8a, 0x45, 0xc1, 0xd1, 0x1d, 0x10
+    };
+
+    const WebRtc_UWord8 mask47_7[42] =
+    {
+        0x46, 0x4a, 0x6d, 0xa6, 0xca, 0x6c,
+        0x33, 0x24, 0x26, 0x4a, 0x64, 0xa6,
+        0x91, 0x92, 0x12, 0x61, 0xa6, 0x0a,
+        0xa4, 0x20, 0x4a, 0x0c, 0x90, 0xd8,
+        0x50, 0xa0, 0xd5, 0x81, 0x70, 0x36,
+        0x84, 0xc5, 0x80, 0x55, 0x45, 0x54,
+        0x09, 0x71, 0x0d, 0x50, 0x9d, 0x08
+    };
+
+    const WebRtc_UWord8 mask47_8[48] =
+    {
+        0x0c, 0x84, 0x0d, 0x02, 0xc0, 0x2c,
+        0x80, 0x70, 0x06, 0x80, 0x78, 0x06,
+        0xa0, 0x88, 0x48, 0x21, 0x22, 0x12,
+        0x05, 0x40, 0x32, 0x0c, 0xa0, 0xca,
+        0x43, 0x02, 0x82, 0x40, 0x95, 0x08,
+        0x1a, 0x01, 0x51, 0x15, 0x41, 0x54,
+        0x60, 0x27, 0x00, 0x66, 0x06, 0x60,
+        0x14, 0x38, 0xa0, 0x99, 0x09, 0x90
+    };
+
+    const WebRtc_UWord8 mask47_9[54] =
+    {
+        0x46, 0x4a, 0x6d, 0xa6, 0xca, 0x6c,
+        0x62, 0x7c, 0x84, 0xc8, 0x4c, 0x84,
+        0x8c, 0x04, 0x88, 0x30, 0x83, 0x88,
+        0x01, 0x74, 0x23, 0x40, 0x94, 0x08,
+        0x07, 0x83, 0x07, 0x02, 0x70, 0x26,
+        0xa0, 0x80, 0x72, 0x45, 0x44, 0x54,
+        0x18, 0xb1, 0x42, 0x10, 0xe1, 0x0e,
+        0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+        0x78, 0x00, 0x1c, 0x03, 0x80, 0x38
+    };
+
+    const WebRtc_UWord8 mask48_1[6] =
+    {
+        0xff, 0xff, 0xff, 0xff, 0xff, 0xff
+    };
+
+    const WebRtc_UWord8 mask48_10[60] =
+    {
+        0x11, 0x45, 0x14, 0x11, 0x45, 0x14,
+        0x45, 0x34, 0x53, 0x45, 0x34, 0x53,
+        0x00, 0x48, 0x05, 0x00, 0x48, 0x05,
+        0x10, 0x83, 0x09, 0x10, 0x83, 0x09,
+        0x4a, 0x14, 0xa1, 0x4a, 0x14, 0xa1,
+        0x40, 0xa4, 0x0a, 0x40, 0xa4, 0x0a,
+        0xa0, 0x6a, 0x02, 0xa0, 0x6a, 0x02,
+        0x88, 0x80, 0x8c, 0x88, 0x80, 0x8c,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x54, 0x0d, 0x40, 0x54, 0x0d, 0x40
+    };
+
+    const WebRtc_UWord8 mask48_11[66] =
+    {
+        0x53, 0x65, 0x34, 0x53, 0x65, 0x34,
+        0xa0, 0x32, 0x11, 0xa0, 0x32, 0x11,
+        0x15, 0x11, 0x41, 0x15, 0x11, 0x41,
+        0x03, 0x50, 0x15, 0x03, 0x50, 0x15,
+        0x8c, 0x88, 0xc8, 0x8c, 0x88, 0xc8,
+        0x28, 0x82, 0x88, 0x28, 0x82, 0x88,
+        0x08, 0x48, 0x84, 0x08, 0x48, 0x84,
+        0x99, 0x01, 0x90, 0x99, 0x01, 0x90,
+        0x22, 0x92, 0x29, 0x22, 0x92, 0x29,
+        0x46, 0x04, 0x60, 0x46, 0x04, 0x60,
+        0x8c, 0x2c, 0x02, 0x8c, 0x2c, 0x02
+    };
+
+    const WebRtc_UWord8 mask48_12[72] =
+    {
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+    };
+
+    const WebRtc_UWord8 mask48_13[78] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86
+    };
+
+    const WebRtc_UWord8 mask48_14[84] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e
+    };
+
+    const WebRtc_UWord8 mask48_15[90] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50
+    };
+
+    const WebRtc_UWord8 mask48_16[96] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a
+    };
+
+    const WebRtc_UWord8 mask48_17[102] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e
+    };
+
+    const WebRtc_UWord8 mask48_18[108] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x34, 0x50, 0xae, 0x34, 0x50, 0xae
+    };
+
+    const WebRtc_UWord8 mask48_19[114] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28
+    };
+
+    const WebRtc_UWord8 mask48_2[12] =
+    {
+        0xec, 0xce, 0xcc, 0xec, 0xce, 0xcc,
+        0x93, 0xb9, 0x3b, 0x93, 0xb9, 0x3b
+    };
+
+    const WebRtc_UWord8 mask48_20[120] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95
+    };
+
+    const WebRtc_UWord8 mask48_21[126] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02
+    };
+
+    const WebRtc_UWord8 mask48_22[132] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee
+    };
+
+    const WebRtc_UWord8 mask48_23[138] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+    };
+
+    const WebRtc_UWord8 mask48_24[144] =
+    {
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x88, 0x32, 0x59, 0x88, 0x32, 0x59
+    };
+
+    const WebRtc_UWord8 mask48_25[150] =
+    {
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86
+    };
+
+    const WebRtc_UWord8 mask48_26[156] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x3e, 0x20, 0x79, 0xe5, 0x55, 0x70
+    };
+
+    const WebRtc_UWord8 mask48_27[162] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e
+    };
+
+    const WebRtc_UWord8 mask48_28[168] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x32, 0xe3, 0xc0, 0x4a, 0xf2, 0x2a
+    };
+
+    const WebRtc_UWord8 mask48_29[174] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50
+    };
+
+    const WebRtc_UWord8 mask48_3[18] =
+    {
+        0x9b, 0x29, 0xb2, 0x9b, 0x29, 0xb2,
+        0x49, 0xd4, 0x9d, 0x49, 0xd4, 0x9d,
+        0x3e, 0x83, 0xe8, 0x3e, 0x83, 0xe8
+    };
+
+    const WebRtc_UWord8 mask48_30[180] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+        0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+        0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+        0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+        0x66, 0xf3, 0x9a, 0xdd, 0x68, 0x93
+    };
+
+    const WebRtc_UWord8 mask48_31[186] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a
+    };
+
+    const WebRtc_UWord8 mask48_32[192] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0xd5, 0x4a, 0x4f, 0x48, 0xb5, 0x31
+    };
+
+    const WebRtc_UWord8 mask48_33[198] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e
+    };
+
+    const WebRtc_UWord8 mask48_34[204] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+        0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+        0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+        0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+        0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+        0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+        0x40, 0x72, 0x4c, 0xe8, 0xf2, 0x42
+    };
+
+    const WebRtc_UWord8 mask48_35[210] =
+    {
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x34, 0x50, 0xae, 0x34, 0x50, 0xae
+    };
+
+    const WebRtc_UWord8 mask48_36[216] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x71, 0xba, 0x8b, 0xf3, 0xfa, 0x9d
+    };
+
+    const WebRtc_UWord8 mask48_37[222] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28
+    };
+
+    const WebRtc_UWord8 mask48_38[228] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+        0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+        0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+        0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+        0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+        0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+        0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+        0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+        0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+        0x2a, 0x7a, 0xf6, 0x8c, 0xde, 0x51
+    };
+
+    const WebRtc_UWord8 mask48_39[234] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95
+    };
+
+    const WebRtc_UWord8 mask48_4[24] =
+    {
+        0x8b, 0x28, 0xb2, 0x8b, 0x28, 0xb2,
+        0x14, 0xb1, 0x4b, 0x14, 0xb1, 0x4b,
+        0x22, 0xd2, 0x2d, 0x22, 0xd2, 0x2d,
+        0x45, 0x54, 0x55, 0x45, 0x54, 0x55
+    };
+
+    const WebRtc_UWord8 mask48_40[240] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x20, 0x5f, 0x68, 0xd5, 0xa2, 0x1b
+    };
+
+    const WebRtc_UWord8 mask48_41[246] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02
+    };
+
+    const WebRtc_UWord8 mask48_42[252] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+        0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+        0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+        0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+        0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+        0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+        0x66, 0xcf, 0xa3, 0x47, 0x69, 0x00
+    };
+
+    const WebRtc_UWord8 mask48_43[258] =
+    {
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee
+    };
+
+    const WebRtc_UWord8 mask48_44[264] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0xc6, 0x40, 0x1f, 0x57, 0xc6, 0xe6
+    };
+
+    const WebRtc_UWord8 mask48_45[270] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+    };
+
+    const WebRtc_UWord8 mask48_46[276] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+        0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+        0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+        0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+        0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+        0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+        0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+        0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+        0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+        0x10, 0xf9, 0xab, 0x12, 0x14, 0xef
+    };
+
+    const WebRtc_UWord8 mask48_47[282] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x88, 0x32, 0x59, 0x88, 0x32, 0x59
+    };
+
+    const WebRtc_UWord8 mask48_48[288] =
+    {
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x88, 0x32, 0x59, 0x88, 0x32, 0x59,
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+        0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+        0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+        0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+        0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+        0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+        0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+        0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+        0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+        0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+        0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+        0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+        0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+        0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+        0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+        0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+        0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+        0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+        0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+        0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+        0xff, 0x9b, 0xdf, 0xec, 0xae, 0x0e
+    };
+
+    const WebRtc_UWord8 mask48_5[30] =
+    {
+        0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+        0x64, 0xb6, 0x4b, 0x64, 0xb6, 0x4b,
+        0x0e, 0xe0, 0xee, 0x0e, 0xe0, 0xee,
+        0xa9, 0xca, 0x9c, 0xa9, 0xca, 0x9c,
+        0xb8, 0x3b, 0x83, 0xb8, 0x3b, 0x83
+    };
+
+    const WebRtc_UWord8 mask48_6[36] =
+    {
+        0xd1, 0x4d, 0x14, 0xd1, 0x4d, 0x14,
+        0x45, 0x34, 0x53, 0x45, 0x34, 0x53,
+        0x22, 0xd2, 0x2d, 0x22, 0xd2, 0x2d,
+        0x16, 0xc1, 0x6c, 0x16, 0xc1, 0x6c,
+        0x0b, 0xa0, 0xba, 0x0b, 0xa0, 0xba,
+        0xe8, 0x8e, 0x88, 0xe8, 0x8e, 0x88
+    };
+
+    const WebRtc_UWord8 mask48_7[42] =
+    {
+        0xd3, 0x65, 0x36, 0xd3, 0x65, 0x36,
+        0x25, 0x32, 0x53, 0x25, 0x32, 0x53,
+        0x30, 0xd3, 0x05, 0x30, 0xd3, 0x05,
+        0x06, 0x48, 0x6c, 0x06, 0x48, 0x6c,
+        0xc0, 0xb8, 0x1b, 0xc0, 0xb8, 0x1b,
+        0x2a, 0xa2, 0xaa, 0x2a, 0xa2, 0xaa,
+        0xa8, 0x4e, 0x84, 0xa8, 0x4e, 0x84
+    };
+
+    const WebRtc_UWord8 mask48_8[48] =
+    {
+        0x81, 0x60, 0x16, 0x81, 0x60, 0x16,
+        0x40, 0x3c, 0x03, 0x40, 0x3c, 0x03,
+        0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+        0x06, 0x50, 0x65, 0x06, 0x50, 0x65,
+        0x20, 0x4a, 0x84, 0x20, 0x4a, 0x84,
+        0x8a, 0xa0, 0xaa, 0x8a, 0xa0, 0xaa,
+        0x33, 0x03, 0x30, 0x33, 0x03, 0x30,
+        0x4c, 0x84, 0xc8, 0x4c, 0x84, 0xc8
+    };
+
+    const WebRtc_UWord8 mask48_9[54] =
+    {
+        0xd3, 0x65, 0x36, 0xd3, 0x65, 0x36,
+        0x64, 0x26, 0x42, 0x64, 0x26, 0x42,
+        0x18, 0x41, 0xc4, 0x18, 0x41, 0xc4,
+        0xa0, 0x4a, 0x04, 0xa0, 0x4a, 0x04,
+        0x81, 0x38, 0x13, 0x81, 0x38, 0x13,
+        0x22, 0xa2, 0x2a, 0x22, 0xa2, 0x2a,
+        0x08, 0x70, 0x87, 0x08, 0x70, 0x87,
+        0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+        0x01, 0xc0, 0x1c, 0x01, 0xc0, 0x1c
+    };
+
+    const WebRtc_UWord8 mask4_1[2] =
+    {
+        0xf0, 0x00
+    };
+
+    const WebRtc_UWord8 mask4_2[4] =
+    {
+        0xc0, 0x00,
+        0xb0, 0x00
+    };
+
+    const WebRtc_UWord8 mask4_3[6] =
+    {
+        0xc0, 0x00,
+        0xb0, 0x00,
+        0x60, 0x00
+    };
+
+    const WebRtc_UWord8 mask4_4[8] =
+    {
+        0xc0, 0x00,
+        0xa0, 0x00,
+        0x30, 0x00,
+        0x50, 0x00
+    };
+
+    const WebRtc_UWord8 mask5_1[2] =
+    {
+        0xf8, 0x00
+    };
+
+    const WebRtc_UWord8 mask5_2[4] =
+    {
+        0xa8, 0x00,
+        0xd0, 0x00
+    };
+
+    const WebRtc_UWord8 mask5_3[6] =
+    {
+        0xb0, 0x00,
+        0xc8, 0x00,
+        0x50, 0x00
+    };
+
+    const WebRtc_UWord8 mask5_4[8] =
+    {
+        0xc8, 0x00,
+        0xb0, 0x00,
+        0x50, 0x00,
+        0x28, 0x00
+    };
+
+    const WebRtc_UWord8 mask5_5[10] =
+    {
+        0xc0, 0x00,
+        0x30, 0x00,
+        0x18, 0x00,
+        0xa0, 0x00,
+        0x48, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_1[2] =
+    {
+        0xfc, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_2[4] =
+    {
+        0xa8, 0x00,
+        0xd4, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_3[6] =
+    {
+        0xd0, 0x00,
+        0x68, 0x00,
+        0xa4, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_4[8] =
+    {
+        0xa8, 0x00,
+        0x58, 0x00,
+        0x64, 0x00,
+        0x94, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_5[10] =
+    {
+        0xa8, 0x00,
+        0x84, 0x00,
+        0x64, 0x00,
+        0x90, 0x00,
+        0x58, 0x00
+    };
+
+    const WebRtc_UWord8 mask6_6[12] =
+    {
+        0x98, 0x00,
+        0x64, 0x00,
+        0x50, 0x00,
+        0x14, 0x00,
+        0xa8, 0x00,
+        0xe0, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_1[2] =
+    {
+        0xfe, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_2[4] =
+    {
+        0xd4, 0x00,
+        0xaa, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_3[6] =
+    {
+        0xd0, 0x00,
+        0xaa, 0x00,
+        0x64, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_4[8] =
+    {
+        0xd0, 0x00,
+        0xaa, 0x00,
+        0x64, 0x00,
+        0x1c, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_5[10] =
+    {
+        0x0c, 0x00,
+        0xb0, 0x00,
+        0x1a, 0x00,
+        0xc4, 0x00,
+        0x62, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_6[12] =
+    {
+        0x8c, 0x00,
+        0x4a, 0x00,
+        0x64, 0x00,
+        0xd0, 0x00,
+        0xa0, 0x00,
+        0x32, 0x00
+    };
+
+    const WebRtc_UWord8 mask7_7[14] =
+    {
+        0x4a, 0x00,
+        0x94, 0x00,
+        0x1a, 0x00,
+        0xc4, 0x00,
+        0x28, 0x00,
+        0xc2, 0x00,
+        0x34, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_1[2] =
+    {
+        0xff, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_2[4] =
+    {
+        0xaa, 0x00,
+        0xd5, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_3[6] =
+    {
+        0xc5, 0x00,
+        0x92, 0x00,
+        0x6a, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_4[8] =
+    {
+        0x45, 0x00,
+        0xb4, 0x00,
+        0x6a, 0x00,
+        0x89, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_5[10] =
+    {
+        0x8c, 0x00,
+        0x92, 0x00,
+        0x2b, 0x00,
+        0x51, 0x00,
+        0x64, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_6[12] =
+    {
+        0xa1, 0x00,
+        0x52, 0x00,
+        0x91, 0x00,
+        0x2a, 0x00,
+        0xc4, 0x00,
+        0x4c, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_7[14] =
+    {
+        0x15, 0x00,
+        0xc2, 0x00,
+        0x25, 0x00,
+        0x62, 0x00,
+        0x58, 0x00,
+        0x8c, 0x00,
+        0xa3, 0x00
+    };
+
+    const WebRtc_UWord8 mask8_8[16] =
+    {
+        0x25, 0x00,
+        0x8a, 0x00,
+        0x91, 0x00,
+        0x68, 0x00,
+        0x32, 0x00,
+        0x43, 0x00,
+        0xc4, 0x00,
+        0x1c, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_1[2] =
+    {
+        0xff, 0x80
+    };
+
+    const WebRtc_UWord8 mask9_2[4] =
+    {
+        0xaa, 0x80,
+        0xd5, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_3[6] =
+    {
+        0xa5, 0x00,
+        0xc8, 0x00,
+        0x52, 0x80
+    };
+
+    const WebRtc_UWord8 mask9_4[8] =
+    {
+        0xa2, 0x00,
+        0xc9, 0x00,
+        0x52, 0x80,
+        0x24, 0x80
+    };
+
+    const WebRtc_UWord8 mask9_5[10] =
+    {
+        0x8c, 0x00,
+        0x25, 0x00,
+        0x92, 0x80,
+        0x41, 0x80,
+        0x58, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_6[12] =
+    {
+        0x84, 0x80,
+        0x27, 0x00,
+        0x51, 0x80,
+        0x1a, 0x00,
+        0x68, 0x00,
+        0x89, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_7[14] =
+    {
+        0x8c, 0x00,
+        0x47, 0x00,
+        0x81, 0x80,
+        0x12, 0x80,
+        0x58, 0x00,
+        0x28, 0x80,
+        0xb4, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_8[16] =
+    {
+        0x2c, 0x00,
+        0x91, 0x00,
+        0x40, 0x80,
+        0x06, 0x80,
+        0xc8, 0x00,
+        0x45, 0x00,
+        0x30, 0x80,
+        0xa2, 0x00
+    };
+
+    const WebRtc_UWord8 mask9_9[18] =
+    {
+        0x4c, 0x00,
+        0x62, 0x00,
+        0x91, 0x00,
+        0x42, 0x80,
+        0xa4, 0x00,
+        0x13, 0x00,
+        0x30, 0x80,
+        0x88, 0x80,
+        0x09, 0x00
+    };
+
+    const WebRtc_UWord8* packetMask1[1] =
+    {
+        mask1_1
+    };
+
+    const WebRtc_UWord8* packetMask2[2] =
+    {
+        mask2_1,
+        mask2_2
+    };
+
+    const WebRtc_UWord8* packetMask3[3] =
+    {
+        mask3_1,
+        mask3_2,
+        mask3_3
+    };
+
+    const WebRtc_UWord8* packetMask4[4] =
+    {
+        mask4_1,
+        mask4_2,
+        mask4_3,
+        mask4_4
+    };
+
+    const WebRtc_UWord8* packetMask5[5] =
+    {
+        mask5_1,
+        mask5_2,
+        mask5_3,
+        mask5_4,
+        mask5_5
+    };
+
+    const WebRtc_UWord8* packetMask6[6] =
+    {
+        mask6_1,
+        mask6_2,
+        mask6_3,
+        mask6_4,
+        mask6_5,
+        mask6_6
+    };
+
+    const WebRtc_UWord8* packetMask7[7] =
+    {
+        mask7_1,
+        mask7_2,
+        mask7_3,
+        mask7_4,
+        mask7_5,
+        mask7_6,
+        mask7_7
+    };
+
+    const WebRtc_UWord8* packetMask8[8] =
+    {
+        mask8_1,
+        mask8_2,
+        mask8_3,
+        mask8_4,
+        mask8_5,
+        mask8_6,
+        mask8_7,
+        mask8_8
+    };
+
+    const WebRtc_UWord8* packetMask9[9] =
+    {
+        mask9_1,
+        mask9_2,
+        mask9_3,
+        mask9_4,
+        mask9_5,
+        mask9_6,
+        mask9_7,
+        mask9_8,
+        mask9_9
+    };
+
+    const WebRtc_UWord8* packetMask10[10] =
+    {
+        mask10_1,
+        mask10_2,
+        mask10_3,
+        mask10_4,
+        mask10_5,
+        mask10_6,
+        mask10_7,
+        mask10_8,
+        mask10_9,
+        mask10_10
+    };
+
+    const WebRtc_UWord8* packetMask11[11] =
+    {
+        mask11_1,
+        mask11_2,
+        mask11_3,
+        mask11_4,
+        mask11_5,
+        mask11_6,
+        mask11_7,
+        mask11_8,
+        mask11_9,
+        mask11_10,
+        mask11_11
+    };
+
+    const WebRtc_UWord8* packetMask12[12] =
+    {
+        mask12_1,
+        mask12_2,
+        mask12_3,
+        mask12_4,
+        mask12_5,
+        mask12_6,
+        mask12_7,
+        mask12_8,
+        mask12_9,
+        mask12_10,
+        mask12_11,
+        mask12_12
+    };
+
+    const WebRtc_UWord8* packetMask13[13] =
+    {
+        mask13_1,
+        mask13_2,
+        mask13_3,
+        mask13_4,
+        mask13_5,
+        mask13_6,
+        mask13_7,
+        mask13_8,
+        mask13_9,
+        mask13_10,
+        mask13_11,
+        mask13_12,
+        mask13_13
+    };
+
+    const WebRtc_UWord8* packetMask14[14] =
+    {
+        mask14_1,
+        mask14_2,
+        mask14_3,
+        mask14_4,
+        mask14_5,
+        mask14_6,
+        mask14_7,
+        mask14_8,
+        mask14_9,
+        mask14_10,
+        mask14_11,
+        mask14_12,
+        mask14_13,
+        mask14_14
+    };
+
+    const WebRtc_UWord8* packetMask15[15] =
+    {
+        mask15_1,
+        mask15_2,
+        mask15_3,
+        mask15_4,
+        mask15_5,
+        mask15_6,
+        mask15_7,
+        mask15_8,
+        mask15_9,
+        mask15_10,
+        mask15_11,
+        mask15_12,
+        mask15_13,
+        mask15_14,
+        mask15_15
+    };
+
+    const WebRtc_UWord8* packetMask16[16] =
+    {
+        mask16_1,
+        mask16_2,
+        mask16_3,
+        mask16_4,
+        mask16_5,
+        mask16_6,
+        mask16_7,
+        mask16_8,
+        mask16_9,
+        mask16_10,
+        mask16_11,
+        mask16_12,
+        mask16_13,
+        mask16_14,
+        mask16_15,
+        mask16_16
+    };
+
+    const WebRtc_UWord8* packetMask17[17] =
+    {
+        mask17_1,
+        mask17_2,
+        mask17_3,
+        mask17_4,
+        mask17_5,
+        mask17_6,
+        mask17_7,
+        mask17_8,
+        mask17_9,
+        mask17_10,
+        mask17_11,
+        mask17_12,
+        mask17_13,
+        mask17_14,
+        mask17_15,
+        mask17_16,
+        mask17_17
+    };
+
+    const WebRtc_UWord8* packetMask18[18] =
+    {
+        mask18_1,
+        mask18_2,
+        mask18_3,
+        mask18_4,
+        mask18_5,
+        mask18_6,
+        mask18_7,
+        mask18_8,
+        mask18_9,
+        mask18_10,
+        mask18_11,
+        mask18_12,
+        mask18_13,
+        mask18_14,
+        mask18_15,
+        mask18_16,
+        mask18_17,
+        mask18_18
+    };
+
+    const WebRtc_UWord8* packetMask19[19] =
+    {
+        mask19_1,
+        mask19_2,
+        mask19_3,
+        mask19_4,
+        mask19_5,
+        mask19_6,
+        mask19_7,
+        mask19_8,
+        mask19_9,
+        mask19_10,
+        mask19_11,
+        mask19_12,
+        mask19_13,
+        mask19_14,
+        mask19_15,
+        mask19_16,
+        mask19_17,
+        mask19_18,
+        mask19_19
+    };
+
+    const WebRtc_UWord8* packetMask20[20] =
+    {
+        mask20_1,
+        mask20_2,
+        mask20_3,
+        mask20_4,
+        mask20_5,
+        mask20_6,
+        mask20_7,
+        mask20_8,
+        mask20_9,
+        mask20_10,
+        mask20_11,
+        mask20_12,
+        mask20_13,
+        mask20_14,
+        mask20_15,
+        mask20_16,
+        mask20_17,
+        mask20_18,
+        mask20_19,
+        mask20_20
+    };
+
+    const WebRtc_UWord8* packetMask21[21] =
+    {
+        mask21_1,
+        mask21_2,
+        mask21_3,
+        mask21_4,
+        mask21_5,
+        mask21_6,
+        mask21_7,
+        mask21_8,
+        mask21_9,
+        mask21_10,
+        mask21_11,
+        mask21_12,
+        mask21_13,
+        mask21_14,
+        mask21_15,
+        mask21_16,
+        mask21_17,
+        mask21_18,
+        mask21_19,
+        mask21_20,
+        mask21_21
+    };
+
+    const WebRtc_UWord8* packetMask22[22] =
+    {
+        mask22_1,
+        mask22_2,
+        mask22_3,
+        mask22_4,
+        mask22_5,
+        mask22_6,
+        mask22_7,
+        mask22_8,
+        mask22_9,
+        mask22_10,
+        mask22_11,
+        mask22_12,
+        mask22_13,
+        mask22_14,
+        mask22_15,
+        mask22_16,
+        mask22_17,
+        mask22_18,
+        mask22_19,
+        mask22_20,
+        mask22_21,
+        mask22_22
+    };
+
+    const WebRtc_UWord8* packetMask23[23] =
+    {
+        mask23_1,
+        mask23_2,
+        mask23_3,
+        mask23_4,
+        mask23_5,
+        mask23_6,
+        mask23_7,
+        mask23_8,
+        mask23_9,
+        mask23_10,
+        mask23_11,
+        mask23_12,
+        mask23_13,
+        mask23_14,
+        mask23_15,
+        mask23_16,
+        mask23_17,
+        mask23_18,
+        mask23_19,
+        mask23_20,
+        mask23_21,
+        mask23_22,
+        mask23_23
+    };
+
+    const WebRtc_UWord8* packetMask24[24] =
+    {
+        mask24_1,
+        mask24_2,
+        mask24_3,
+        mask24_4,
+        mask24_5,
+        mask24_6,
+        mask24_7,
+        mask24_8,
+        mask24_9,
+        mask24_10,
+        mask24_11,
+        mask24_12,
+        mask24_13,
+        mask24_14,
+        mask24_15,
+        mask24_16,
+        mask24_17,
+        mask24_18,
+        mask24_19,
+        mask24_20,
+        mask24_21,
+        mask24_22,
+        mask24_23,
+        mask24_24
+    };
+
+    const WebRtc_UWord8* packetMask25[25] =
+    {
+        mask25_1,
+        mask25_2,
+        mask25_3,
+        mask25_4,
+        mask25_5,
+        mask25_6,
+        mask25_7,
+        mask25_8,
+        mask25_9,
+        mask25_10,
+        mask25_11,
+        mask25_12,
+        mask25_13,
+        mask25_14,
+        mask25_15,
+        mask25_16,
+        mask25_17,
+        mask25_18,
+        mask25_19,
+        mask25_20,
+        mask25_21,
+        mask25_22,
+        mask25_23,
+        mask25_24,
+        mask25_25
+    };
+
+    const WebRtc_UWord8* packetMask26[26] =
+    {
+        mask26_1,
+        mask26_2,
+        mask26_3,
+        mask26_4,
+        mask26_5,
+        mask26_6,
+        mask26_7,
+        mask26_8,
+        mask26_9,
+        mask26_10,
+        mask26_11,
+        mask26_12,
+        mask26_13,
+        mask26_14,
+        mask26_15,
+        mask26_16,
+        mask26_17,
+        mask26_18,
+        mask26_19,
+        mask26_20,
+        mask26_21,
+        mask26_22,
+        mask26_23,
+        mask26_24,
+        mask26_25,
+        mask26_26
+    };
+
+    const WebRtc_UWord8* packetMask27[27] =
+    {
+        mask27_1,
+        mask27_2,
+        mask27_3,
+        mask27_4,
+        mask27_5,
+        mask27_6,
+        mask27_7,
+        mask27_8,
+        mask27_9,
+        mask27_10,
+        mask27_11,
+        mask27_12,
+        mask27_13,
+        mask27_14,
+        mask27_15,
+        mask27_16,
+        mask27_17,
+        mask27_18,
+        mask27_19,
+        mask27_20,
+        mask27_21,
+        mask27_22,
+        mask27_23,
+        mask27_24,
+        mask27_25,
+        mask27_26,
+        mask27_27
+    };
+
+    const WebRtc_UWord8* packetMask28[28] =
+    {
+        mask28_1,
+        mask28_2,
+        mask28_3,
+        mask28_4,
+        mask28_5,
+        mask28_6,
+        mask28_7,
+        mask28_8,
+        mask28_9,
+        mask28_10,
+        mask28_11,
+        mask28_12,
+        mask28_13,
+        mask28_14,
+        mask28_15,
+        mask28_16,
+        mask28_17,
+        mask28_18,
+        mask28_19,
+        mask28_20,
+        mask28_21,
+        mask28_22,
+        mask28_23,
+        mask28_24,
+        mask28_25,
+        mask28_26,
+        mask28_27,
+        mask28_28
+    };
+
+    const WebRtc_UWord8* packetMask29[29] =
+    {
+        mask29_1,
+        mask29_2,
+        mask29_3,
+        mask29_4,
+        mask29_5,
+        mask29_6,
+        mask29_7,
+        mask29_8,
+        mask29_9,
+        mask29_10,
+        mask29_11,
+        mask29_12,
+        mask29_13,
+        mask29_14,
+        mask29_15,
+        mask29_16,
+        mask29_17,
+        mask29_18,
+        mask29_19,
+        mask29_20,
+        mask29_21,
+        mask29_22,
+        mask29_23,
+        mask29_24,
+        mask29_25,
+        mask29_26,
+        mask29_27,
+        mask29_28,
+        mask29_29
+    };
+
+    const WebRtc_UWord8* packetMask30[30] =
+    {
+        mask30_1,
+        mask30_2,
+        mask30_3,
+        mask30_4,
+        mask30_5,
+        mask30_6,
+        mask30_7,
+        mask30_8,
+        mask30_9,
+        mask30_10,
+        mask30_11,
+        mask30_12,
+        mask30_13,
+        mask30_14,
+        mask30_15,
+        mask30_16,
+        mask30_17,
+        mask30_18,
+        mask30_19,
+        mask30_20,
+        mask30_21,
+        mask30_22,
+        mask30_23,
+        mask30_24,
+        mask30_25,
+        mask30_26,
+        mask30_27,
+        mask30_28,
+        mask30_29,
+        mask30_30
+    };
+
+    const WebRtc_UWord8* packetMask31[31] =
+    {
+        mask31_1,
+        mask31_2,
+        mask31_3,
+        mask31_4,
+        mask31_5,
+        mask31_6,
+        mask31_7,
+        mask31_8,
+        mask31_9,
+        mask31_10,
+        mask31_11,
+        mask31_12,
+        mask31_13,
+        mask31_14,
+        mask31_15,
+        mask31_16,
+        mask31_17,
+        mask31_18,
+        mask31_19,
+        mask31_20,
+        mask31_21,
+        mask31_22,
+        mask31_23,
+        mask31_24,
+        mask31_25,
+        mask31_26,
+        mask31_27,
+        mask31_28,
+        mask31_29,
+        mask31_30,
+        mask31_31
+    };
+
+    const WebRtc_UWord8* packetMask32[32] =
+    {
+        mask32_1,
+        mask32_2,
+        mask32_3,
+        mask32_4,
+        mask32_5,
+        mask32_6,
+        mask32_7,
+        mask32_8,
+        mask32_9,
+        mask32_10,
+        mask32_11,
+        mask32_12,
+        mask32_13,
+        mask32_14,
+        mask32_15,
+        mask32_16,
+        mask32_17,
+        mask32_18,
+        mask32_19,
+        mask32_20,
+        mask32_21,
+        mask32_22,
+        mask32_23,
+        mask32_24,
+        mask32_25,
+        mask32_26,
+        mask32_27,
+        mask32_28,
+        mask32_29,
+        mask32_30,
+        mask32_31,
+        mask32_32
+    };
+
+    const WebRtc_UWord8* packetMask33[33] =
+    {
+        mask33_1,
+        mask33_2,
+        mask33_3,
+        mask33_4,
+        mask33_5,
+        mask33_6,
+        mask33_7,
+        mask33_8,
+        mask33_9,
+        mask33_10,
+        mask33_11,
+        mask33_12,
+        mask33_13,
+        mask33_14,
+        mask33_15,
+        mask33_16,
+        mask33_17,
+        mask33_18,
+        mask33_19,
+        mask33_20,
+        mask33_21,
+        mask33_22,
+        mask33_23,
+        mask33_24,
+        mask33_25,
+        mask33_26,
+        mask33_27,
+        mask33_28,
+        mask33_29,
+        mask33_30,
+        mask33_31,
+        mask33_32,
+        mask33_33
+    };
+
+    const WebRtc_UWord8* packetMask34[34] =
+    {
+        mask34_1,
+        mask34_2,
+        mask34_3,
+        mask34_4,
+        mask34_5,
+        mask34_6,
+        mask34_7,
+        mask34_8,
+        mask34_9,
+        mask34_10,
+        mask34_11,
+        mask34_12,
+        mask34_13,
+        mask34_14,
+        mask34_15,
+        mask34_16,
+        mask34_17,
+        mask34_18,
+        mask34_19,
+        mask34_20,
+        mask34_21,
+        mask34_22,
+        mask34_23,
+        mask34_24,
+        mask34_25,
+        mask34_26,
+        mask34_27,
+        mask34_28,
+        mask34_29,
+        mask34_30,
+        mask34_31,
+        mask34_32,
+        mask34_33,
+        mask34_34
+    };
+
+    const WebRtc_UWord8* packetMask35[35] =
+    {
+        mask35_1,
+        mask35_2,
+        mask35_3,
+        mask35_4,
+        mask35_5,
+        mask35_6,
+        mask35_7,
+        mask35_8,
+        mask35_9,
+        mask35_10,
+        mask35_11,
+        mask35_12,
+        mask35_13,
+        mask35_14,
+        mask35_15,
+        mask35_16,
+        mask35_17,
+        mask35_18,
+        mask35_19,
+        mask35_20,
+        mask35_21,
+        mask35_22,
+        mask35_23,
+        mask35_24,
+        mask35_25,
+        mask35_26,
+        mask35_27,
+        mask35_28,
+        mask35_29,
+        mask35_30,
+        mask35_31,
+        mask35_32,
+        mask35_33,
+        mask35_34,
+        mask35_35
+    };
+
+    const WebRtc_UWord8* packetMask36[36] =
+    {
+        mask36_1,
+        mask36_2,
+        mask36_3,
+        mask36_4,
+        mask36_5,
+        mask36_6,
+        mask36_7,
+        mask36_8,
+        mask36_9,
+        mask36_10,
+        mask36_11,
+        mask36_12,
+        mask36_13,
+        mask36_14,
+        mask36_15,
+        mask36_16,
+        mask36_17,
+        mask36_18,
+        mask36_19,
+        mask36_20,
+        mask36_21,
+        mask36_22,
+        mask36_23,
+        mask36_24,
+        mask36_25,
+        mask36_26,
+        mask36_27,
+        mask36_28,
+        mask36_29,
+        mask36_30,
+        mask36_31,
+        mask36_32,
+        mask36_33,
+        mask36_34,
+        mask36_35,
+        mask36_36
+    };
+
+    const WebRtc_UWord8* packetMask37[37] =
+    {
+        mask37_1,
+        mask37_2,
+        mask37_3,
+        mask37_4,
+        mask37_5,
+        mask37_6,
+        mask37_7,
+        mask37_8,
+        mask37_9,
+        mask37_10,
+        mask37_11,
+        mask37_12,
+        mask37_13,
+        mask37_14,
+        mask37_15,
+        mask37_16,
+        mask37_17,
+        mask37_18,
+        mask37_19,
+        mask37_20,
+        mask37_21,
+        mask37_22,
+        mask37_23,
+        mask37_24,
+        mask37_25,
+        mask37_26,
+        mask37_27,
+        mask37_28,
+        mask37_29,
+        mask37_30,
+        mask37_31,
+        mask37_32,
+        mask37_33,
+        mask37_34,
+        mask37_35,
+        mask37_36,
+        mask37_37
+    };
+
+    const WebRtc_UWord8* packetMask38[38] =
+    {
+        mask38_1,
+        mask38_2,
+        mask38_3,
+        mask38_4,
+        mask38_5,
+        mask38_6,
+        mask38_7,
+        mask38_8,
+        mask38_9,
+        mask38_10,
+        mask38_11,
+        mask38_12,
+        mask38_13,
+        mask38_14,
+        mask38_15,
+        mask38_16,
+        mask38_17,
+        mask38_18,
+        mask38_19,
+        mask38_20,
+        mask38_21,
+        mask38_22,
+        mask38_23,
+        mask38_24,
+        mask38_25,
+        mask38_26,
+        mask38_27,
+        mask38_28,
+        mask38_29,
+        mask38_30,
+        mask38_31,
+        mask38_32,
+        mask38_33,
+        mask38_34,
+        mask38_35,
+        mask38_36,
+        mask38_37,
+        mask38_38
+    };
+
+    const WebRtc_UWord8* packetMask39[39] =
+    {
+        mask39_1,
+        mask39_2,
+        mask39_3,
+        mask39_4,
+        mask39_5,
+        mask39_6,
+        mask39_7,
+        mask39_8,
+        mask39_9,
+        mask39_10,
+        mask39_11,
+        mask39_12,
+        mask39_13,
+        mask39_14,
+        mask39_15,
+        mask39_16,
+        mask39_17,
+        mask39_18,
+        mask39_19,
+        mask39_20,
+        mask39_21,
+        mask39_22,
+        mask39_23,
+        mask39_24,
+        mask39_25,
+        mask39_26,
+        mask39_27,
+        mask39_28,
+        mask39_29,
+        mask39_30,
+        mask39_31,
+        mask39_32,
+        mask39_33,
+        mask39_34,
+        mask39_35,
+        mask39_36,
+        mask39_37,
+        mask39_38,
+        mask39_39
+    };
+
+    const WebRtc_UWord8* packetMask40[40] =
+    {
+        mask40_1,
+        mask40_2,
+        mask40_3,
+        mask40_4,
+        mask40_5,
+        mask40_6,
+        mask40_7,
+        mask40_8,
+        mask40_9,
+        mask40_10,
+        mask40_11,
+        mask40_12,
+        mask40_13,
+        mask40_14,
+        mask40_15,
+        mask40_16,
+        mask40_17,
+        mask40_18,
+        mask40_19,
+        mask40_20,
+        mask40_21,
+        mask40_22,
+        mask40_23,
+        mask40_24,
+        mask40_25,
+        mask40_26,
+        mask40_27,
+        mask40_28,
+        mask40_29,
+        mask40_30,
+        mask40_31,
+        mask40_32,
+        mask40_33,
+        mask40_34,
+        mask40_35,
+        mask40_36,
+        mask40_37,
+        mask40_38,
+        mask40_39,
+        mask40_40
+    };
+
+    const WebRtc_UWord8* packetMask41[41] =
+    {
+        mask41_1,
+        mask41_2,
+        mask41_3,
+        mask41_4,
+        mask41_5,
+        mask41_6,
+        mask41_7,
+        mask41_8,
+        mask41_9,
+        mask41_10,
+        mask41_11,
+        mask41_12,
+        mask41_13,
+        mask41_14,
+        mask41_15,
+        mask41_16,
+        mask41_17,
+        mask41_18,
+        mask41_19,
+        mask41_20,
+        mask41_21,
+        mask41_22,
+        mask41_23,
+        mask41_24,
+        mask41_25,
+        mask41_26,
+        mask41_27,
+        mask41_28,
+        mask41_29,
+        mask41_30,
+        mask41_31,
+        mask41_32,
+        mask41_33,
+        mask41_34,
+        mask41_35,
+        mask41_36,
+        mask41_37,
+        mask41_38,
+        mask41_39,
+        mask41_40,
+        mask41_41
+    };
+
+    const WebRtc_UWord8* packetMask42[42] =
+    {
+        mask42_1,
+        mask42_2,
+        mask42_3,
+        mask42_4,
+        mask42_5,
+        mask42_6,
+        mask42_7,
+        mask42_8,
+        mask42_9,
+        mask42_10,
+        mask42_11,
+        mask42_12,
+        mask42_13,
+        mask42_14,
+        mask42_15,
+        mask42_16,
+        mask42_17,
+        mask42_18,
+        mask42_19,
+        mask42_20,
+        mask42_21,
+        mask42_22,
+        mask42_23,
+        mask42_24,
+        mask42_25,
+        mask42_26,
+        mask42_27,
+        mask42_28,
+        mask42_29,
+        mask42_30,
+        mask42_31,
+        mask42_32,
+        mask42_33,
+        mask42_34,
+        mask42_35,
+        mask42_36,
+        mask42_37,
+        mask42_38,
+        mask42_39,
+        mask42_40,
+        mask42_41,
+        mask42_42
+    };
+
+    const WebRtc_UWord8* packetMask43[43] =
+    {
+        mask43_1,
+        mask43_2,
+        mask43_3,
+        mask43_4,
+        mask43_5,
+        mask43_6,
+        mask43_7,
+        mask43_8,
+        mask43_9,
+        mask43_10,
+        mask43_11,
+        mask43_12,
+        mask43_13,
+        mask43_14,
+        mask43_15,
+        mask43_16,
+        mask43_17,
+        mask43_18,
+        mask43_19,
+        mask43_20,
+        mask43_21,
+        mask43_22,
+        mask43_23,
+        mask43_24,
+        mask43_25,
+        mask43_26,
+        mask43_27,
+        mask43_28,
+        mask43_29,
+        mask43_30,
+        mask43_31,
+        mask43_32,
+        mask43_33,
+        mask43_34,
+        mask43_35,
+        mask43_36,
+        mask43_37,
+        mask43_38,
+        mask43_39,
+        mask43_40,
+        mask43_41,
+        mask43_42,
+        mask43_43
+    };
+
+    const WebRtc_UWord8* packetMask44[44] =
+    {
+        mask44_1,
+        mask44_2,
+        mask44_3,
+        mask44_4,
+        mask44_5,
+        mask44_6,
+        mask44_7,
+        mask44_8,
+        mask44_9,
+        mask44_10,
+        mask44_11,
+        mask44_12,
+        mask44_13,
+        mask44_14,
+        mask44_15,
+        mask44_16,
+        mask44_17,
+        mask44_18,
+        mask44_19,
+        mask44_20,
+        mask44_21,
+        mask44_22,
+        mask44_23,
+        mask44_24,
+        mask44_25,
+        mask44_26,
+        mask44_27,
+        mask44_28,
+        mask44_29,
+        mask44_30,
+        mask44_31,
+        mask44_32,
+        mask44_33,
+        mask44_34,
+        mask44_35,
+        mask44_36,
+        mask44_37,
+        mask44_38,
+        mask44_39,
+        mask44_40,
+        mask44_41,
+        mask44_42,
+        mask44_43,
+        mask44_44
+    };
+
+    const WebRtc_UWord8* packetMask45[45] =
+    {
+        mask45_1,
+        mask45_2,
+        mask45_3,
+        mask45_4,
+        mask45_5,
+        mask45_6,
+        mask45_7,
+        mask45_8,
+        mask45_9,
+        mask45_10,
+        mask45_11,
+        mask45_12,
+        mask45_13,
+        mask45_14,
+        mask45_15,
+        mask45_16,
+        mask45_17,
+        mask45_18,
+        mask45_19,
+        mask45_20,
+        mask45_21,
+        mask45_22,
+        mask45_23,
+        mask45_24,
+        mask45_25,
+        mask45_26,
+        mask45_27,
+        mask45_28,
+        mask45_29,
+        mask45_30,
+        mask45_31,
+        mask45_32,
+        mask45_33,
+        mask45_34,
+        mask45_35,
+        mask45_36,
+        mask45_37,
+        mask45_38,
+        mask45_39,
+        mask45_40,
+        mask45_41,
+        mask45_42,
+        mask45_43,
+        mask45_44,
+        mask45_45
+    };
+
+    const WebRtc_UWord8* packetMask46[46] =
+    {
+        mask46_1,
+        mask46_2,
+        mask46_3,
+        mask46_4,
+        mask46_5,
+        mask46_6,
+        mask46_7,
+        mask46_8,
+        mask46_9,
+        mask46_10,
+        mask46_11,
+        mask46_12,
+        mask46_13,
+        mask46_14,
+        mask46_15,
+        mask46_16,
+        mask46_17,
+        mask46_18,
+        mask46_19,
+        mask46_20,
+        mask46_21,
+        mask46_22,
+        mask46_23,
+        mask46_24,
+        mask46_25,
+        mask46_26,
+        mask46_27,
+        mask46_28,
+        mask46_29,
+        mask46_30,
+        mask46_31,
+        mask46_32,
+        mask46_33,
+        mask46_34,
+        mask46_35,
+        mask46_36,
+        mask46_37,
+        mask46_38,
+        mask46_39,
+        mask46_40,
+        mask46_41,
+        mask46_42,
+        mask46_43,
+        mask46_44,
+        mask46_45,
+        mask46_46
+    };
+
+    const WebRtc_UWord8* packetMask47[47] =
+    {
+        mask47_1,
+        mask47_2,
+        mask47_3,
+        mask47_4,
+        mask47_5,
+        mask47_6,
+        mask47_7,
+        mask47_8,
+        mask47_9,
+        mask47_10,
+        mask47_11,
+        mask47_12,
+        mask47_13,
+        mask47_14,
+        mask47_15,
+        mask47_16,
+        mask47_17,
+        mask47_18,
+        mask47_19,
+        mask47_20,
+        mask47_21,
+        mask47_22,
+        mask47_23,
+        mask47_24,
+        mask47_25,
+        mask47_26,
+        mask47_27,
+        mask47_28,
+        mask47_29,
+        mask47_30,
+        mask47_31,
+        mask47_32,
+        mask47_33,
+        mask47_34,
+        mask47_35,
+        mask47_36,
+        mask47_37,
+        mask47_38,
+        mask47_39,
+        mask47_40,
+        mask47_41,
+        mask47_42,
+        mask47_43,
+        mask47_44,
+        mask47_45,
+        mask47_46,
+        mask47_47
+    };
+
+    const WebRtc_UWord8* packetMask48[48] =
+    {
+        mask48_1,
+        mask48_2,
+        mask48_3,
+        mask48_4,
+        mask48_5,
+        mask48_6,
+        mask48_7,
+        mask48_8,
+        mask48_9,
+        mask48_10,
+        mask48_11,
+        mask48_12,
+        mask48_13,
+        mask48_14,
+        mask48_15,
+        mask48_16,
+        mask48_17,
+        mask48_18,
+        mask48_19,
+        mask48_20,
+        mask48_21,
+        mask48_22,
+        mask48_23,
+        mask48_24,
+        mask48_25,
+        mask48_26,
+        mask48_27,
+        mask48_28,
+        mask48_29,
+        mask48_30,
+        mask48_31,
+        mask48_32,
+        mask48_33,
+        mask48_34,
+        mask48_35,
+        mask48_36,
+        mask48_37,
+        mask48_38,
+        mask48_39,
+        mask48_40,
+        mask48_41,
+        mask48_42,
+        mask48_43,
+        mask48_44,
+        mask48_45,
+        mask48_46,
+        mask48_47,
+        mask48_48
+    };
+
+    const WebRtc_UWord8** packetMaskTbl[48] =
+    {
+        packetMask1,
+        packetMask2,
+        packetMask3,
+        packetMask4,
+        packetMask5,
+        packetMask6,
+        packetMask7,
+        packetMask8,
+        packetMask9,
+        packetMask10,
+        packetMask11,
+        packetMask12,
+        packetMask13,
+        packetMask14,
+        packetMask15,
+        packetMask16,
+        packetMask17,
+        packetMask18,
+        packetMask19,
+        packetMask20,
+        packetMask21,
+        packetMask22,
+        packetMask23,
+        packetMask24,
+        packetMask25,
+        packetMask26,
+        packetMask27,
+        packetMask28,
+        packetMask29,
+        packetMask30,
+        packetMask31,
+        packetMask32,
+        packetMask33,
+        packetMask34,
+        packetMask35,
+        packetMask36,
+        packetMask37,
+        packetMask38,
+        packetMask39,
+        packetMask40,
+        packetMask41,
+        packetMask42,
+        packetMask43,
+        packetMask44,
+        packetMask45,
+        packetMask46,
+        packetMask47,
+        packetMask48
+    };
+} // Anonymous namespace
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/forward_error_correction.cc b/trunk/src/modules/rtp_rtcp/source/forward_error_correction.cc
new file mode 100644
index 0000000..22e1a38
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -0,0 +1,699 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <cassert>
+#include <cstring>
+#include <iterator>
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
+// FEC header size in bytes.
+const uint8_t kFecHeaderSize = 10;
+
+// ULP header size in bytes (L bit is set).
+const uint8_t kUlpHeaderSizeLBitSet = (2 + kMaskSizeLBitSet);
+
+// ULP header size in bytes (L bit is cleared).
+const uint8_t kUlpHeaderSizeLBitClear = (2 + kMaskSizeLBitClear);
+
+// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
+const uint8_t kTransportOverhead = 28;
+
+enum { kMaxFecPackets = ForwardErrorCorrection::kMaxMediaPackets };
+
+// Used to link media packets to their protecting FEC packets.
+//
+// TODO(holmer): Refactor into a proper class.
+class ProtectedPacket : public ForwardErrorCorrection::SortablePacket {
+ public:
+  scoped_refptr<ForwardErrorCorrection::Packet> pkt;
+};
+
+typedef std::list<ProtectedPacket*> ProtectedPacketList;
+
+//
+// Used for internal storage of FEC packets in a list.
+//
+// TODO(holmer): Refactor into a proper class.
+class FecPacket : public ForwardErrorCorrection::SortablePacket {
+ public:
+    ProtectedPacketList protectedPktList;
+    uint32_t ssrc;  // SSRC of the current frame.
+    scoped_refptr<ForwardErrorCorrection::Packet> pkt;
+};
+
+bool ForwardErrorCorrection::SortablePacket::LessThan(
+    const SortablePacket* first,
+    const SortablePacket* second) {
+  return (first->seqNum != second->seqNum &&
+      LatestSequenceNumber(first->seqNum, second->seqNum) == second->seqNum);
+}
+
+ForwardErrorCorrection::ForwardErrorCorrection(int32_t id)
+    : _id(id),
+      _generatedFecPackets(kMaxMediaPackets),
+      _fecPacketReceived(false) {
+}
+
+ForwardErrorCorrection::~ForwardErrorCorrection() {
+}
+
+// Input packet
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                    RTP Header (12 octets)                     |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                         RTP Payload                           |
+//   |                                                               |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+// Output packet
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                    FEC Header (10 octets)                     |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                      FEC Level 0 Header                       |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                     FEC Level 0 Payload                       |
+//   |                                                               |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+int32_t ForwardErrorCorrection::GenerateFEC(
+    const PacketList& mediaPacketList,
+    uint8_t protectionFactor,
+    int numImportantPackets,
+    bool useUnequalProtection,
+    PacketList* fecPacketList) {
+  if (mediaPacketList.empty()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s media packet list is empty", __FUNCTION__);
+    return -1;
+  }
+  if (!fecPacketList->empty()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s FEC packet list is not empty", __FUNCTION__);
+    return -1;
+  }
+  const uint16_t numMediaPackets = mediaPacketList.size();
+  const uint8_t lBit = numMediaPackets > 16 ? 1 : 0;
+  const uint16_t numMaskBytes = (lBit == 1)?
+      kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+  if (numMediaPackets > kMaxMediaPackets) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s can only protect %d media packets per frame; %d requested",
+                 __FUNCTION__, kMaxMediaPackets, numMediaPackets);
+    return -1;
+  }
+
+  // Error checking on the number of important packets.
+  // Can't have more important packets than media packets.
+  if (numImportantPackets > numMediaPackets) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+        "Number of important packets (%d) greater than number of media "
+        "packets (%d)", numImportantPackets, numMediaPackets);
+    return -1;
+  }
+  if (numImportantPackets < 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "Number of important packets (%d) less than zero",
+                 numImportantPackets);
+    return -1;
+  }
+  // Do some error checking on the media packets.
+  PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+  while (mediaListIt != mediaPacketList.end()) {
+    Packet* mediaPacket = *mediaListIt;
+    assert(mediaPacket);
+
+    if (mediaPacket->length < kRtpHeaderSize) {
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                   "%s media packet (%d bytes) is smaller than RTP header",
+                   __FUNCTION__, mediaPacket->length);
+      return -1;
+    }
+
+    // Ensure our FEC packets will fit in a typical MTU.
+    if (mediaPacket->length + PacketOverhead() + kTransportOverhead >
+        IP_PACKET_SIZE) {
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+          "%s media packet (%d bytes) with overhead is larger than MTU(%d)",
+          __FUNCTION__, mediaPacket->length, IP_PACKET_SIZE);
+      return -1;
+    }
+    mediaListIt++;
+  }
+  // Result in Q0 with an unsigned round.
+  uint32_t numFecPackets = (numMediaPackets * protectionFactor + (1 << 7)) >> 8;
+  // Generate at least one FEC packet if we need protection.
+  if (protectionFactor > 0 && numFecPackets == 0) {
+    numFecPackets = 1;
+  }
+  if (numFecPackets == 0) {
+    return 0;
+  }
+  assert(numFecPackets <= numMediaPackets);
+
+  // Prepare FEC packets by setting them to 0.
+  for (uint32_t i = 0; i < numFecPackets; i++) {
+    memset(_generatedFecPackets[i].data, 0, IP_PACKET_SIZE);
+    _generatedFecPackets[i].length = 0;  // Use this as a marker for untouched
+    // packets.
+    fecPacketList->push_back(&_generatedFecPackets[i]);
+  }
+
+  // -- Generate packet masks --
+  uint8_t* packetMask = new uint8_t[numFecPackets * numMaskBytes];
+  memset(packetMask, 0, numFecPackets * numMaskBytes);
+  internal::GeneratePacketMasks(numMediaPackets, numFecPackets,
+                                numImportantPackets, useUnequalProtection,
+                                packetMask);
+
+  GenerateFecBitStrings(mediaPacketList, packetMask, numFecPackets);
+
+  GenerateFecUlpHeaders(mediaPacketList, packetMask, numFecPackets);
+
+  delete [] packetMask;
+  return 0;
+}
+
+void ForwardErrorCorrection::GenerateFecBitStrings(
+    const PacketList& mediaPacketList,
+    uint8_t* packetMask,
+    uint32_t numFecPackets) {
+  uint8_t mediaPayloadLength[2];
+  const uint8_t lBit = mediaPacketList.size() > 16 ? 1 : 0;
+  const uint16_t numMaskBytes = (lBit == 1) ?
+      kMaskSizeLBitSet : kMaskSizeLBitClear;
+  const uint16_t ulpHeaderSize = (lBit == 1) ?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
+  const uint16_t fecRtpOffset = kFecHeaderSize + ulpHeaderSize - kRtpHeaderSize;
+
+  for (uint32_t i = 0; i < numFecPackets; i++) {
+    PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+    uint32_t pktMaskIdx = i * numMaskBytes;
+    uint32_t mediaPktIdx = 0;
+    uint16_t fecPacketLength = 0;
+    while (mediaListIt != mediaPacketList.end()) {
+      // Each FEC packet has a multiple byte mask.
+      if (packetMask[pktMaskIdx] & (1 << (7 - mediaPktIdx))) {
+        Packet* mediaPacket = *mediaListIt;
+
+        // Assign network-ordered media payload length.
+        ModuleRTPUtility::AssignUWord16ToBuffer(
+            mediaPayloadLength,
+            mediaPacket->length - kRtpHeaderSize);
+
+        fecPacketLength = mediaPacket->length + fecRtpOffset;
+        // On the first protected packet, we don't need to XOR.
+        if (_generatedFecPackets[i].length == 0) {
+          // Copy the first 2 bytes of the RTP header.
+          memcpy(_generatedFecPackets[i].data, mediaPacket->data, 2);
+          // Copy the 5th to 8th bytes of the RTP header.
+          memcpy(&_generatedFecPackets[i].data[4], &mediaPacket->data[4], 4);
+          // Copy network-ordered payload size.
+          memcpy(&_generatedFecPackets[i].data[8], mediaPayloadLength, 2);
+
+          // Copy RTP payload, leaving room for the ULP header.
+          memcpy(&_generatedFecPackets[i].data[kFecHeaderSize + ulpHeaderSize],
+                 &mediaPacket->data[kRtpHeaderSize],
+                 mediaPacket->length - kRtpHeaderSize);
+        } else {
+          // XOR with the first 2 bytes of the RTP header.
+          _generatedFecPackets[i].data[0] ^= mediaPacket->data[0];
+          _generatedFecPackets[i].data[1] ^= mediaPacket->data[1];
+
+          // XOR with the 5th to 8th bytes of the RTP header.
+          for (uint32_t j = 4; j < 8; j++) {
+            _generatedFecPackets[i].data[j] ^= mediaPacket->data[j];
+          }
+
+          // XOR with the network-ordered payload size.
+          _generatedFecPackets[i].data[8] ^= mediaPayloadLength[0];
+          _generatedFecPackets[i].data[9] ^= mediaPayloadLength[1];
+
+          // XOR with RTP payload, leaving room for the ULP header.
+          for (int32_t j = kFecHeaderSize + ulpHeaderSize;
+              j < fecPacketLength; j++) {
+            _generatedFecPackets[i].data[j] ^=
+                mediaPacket->data[j - fecRtpOffset];
+          }
+        }
+        if (fecPacketLength > _generatedFecPackets[i].length) {
+          _generatedFecPackets[i].length = fecPacketLength;
+        }
+      }
+      mediaListIt++;
+      mediaPktIdx++;
+      if (mediaPktIdx == 8) {
+        // Switch to the next mask byte.
+        mediaPktIdx = 0;
+        pktMaskIdx++;
+      }
+    }
+    assert(_generatedFecPackets[i].length);
+    //Note: This shouldn't happen: means packet mask is wrong or poorly designed
+  }
+}
+
+void ForwardErrorCorrection::GenerateFecUlpHeaders(
+    const PacketList& mediaPacketList,
+    uint8_t* packetMask,
+    uint32_t numFecPackets) {
+  // -- Generate FEC and ULP headers --
+  //
+  // FEC Header, 10 bytes
+  //    0                   1                   2                   3
+  //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |E|L|P|X|  CC   |M| PT recovery |            SN base            |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |                          TS recovery                          |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |        length recovery        |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //
+  // ULP Header, 4 bytes (for L = 0)
+  //    0                   1                   2                   3
+  //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |       Protection Length       |             mask              |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |              mask cont. (present only when L = 1)             |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+  Packet* mediaPacket = *mediaListIt;
+  assert(mediaPacket != NULL);
+  const uint8_t lBit = mediaPacketList.size() > 16 ? 1 : 0;
+  const uint16_t numMaskBytes = (lBit == 1)?
+      kMaskSizeLBitSet : kMaskSizeLBitClear;
+  const uint16_t ulpHeaderSize = (lBit == 1)?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
+
+  for (uint32_t i = 0; i < numFecPackets; i++) {
+    // -- FEC header --
+    _generatedFecPackets[i].data[0] &= 0x7f; // Set E to zero.
+    if (lBit == 0) {
+      _generatedFecPackets[i].data[0] &= 0xbf; // Clear the L bit.
+    } else {
+      _generatedFecPackets[i].data[0] |= 0x40; // Set the L bit.
+    }
+    // Two byte sequence number from first RTP packet to SN base.
+    // We use the same sequence number base for every FEC packet,
+    // but that's not required in general.
+    memcpy(&_generatedFecPackets[i].data[2], &mediaPacket->data[2], 2);
+
+    // -- ULP header --
+    // Copy the payload size to the protection length field.
+    // (We protect the entire packet.)
+    ModuleRTPUtility::AssignUWord16ToBuffer(&_generatedFecPackets[i].data[10],
+        _generatedFecPackets[i].length - kFecHeaderSize - ulpHeaderSize);
+
+    // Copy the packet mask.
+    memcpy(&_generatedFecPackets[i].data[12], &packetMask[i * numMaskBytes],
+           numMaskBytes);
+  }
+}
+
+void ForwardErrorCorrection::ResetState(
+    RecoveredPacketList* recoveredPacketList) {
+  _fecPacketReceived = false;
+
+  // Free the memory for any existing recovered packets, if the user hasn't.
+  while (!recoveredPacketList->empty()) {
+    delete recoveredPacketList->front();
+    recoveredPacketList->pop_front();
+  }
+  assert(recoveredPacketList->empty());
+
+  // Free the FEC packet list.
+  while (!_fecPacketList.empty()) {
+    FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+    FecPacket* fecPacket = *fecPacketListIt;
+    ProtectedPacketList::iterator protectedPacketListIt;
+    protectedPacketListIt = fecPacket->protectedPktList.begin();
+    while (protectedPacketListIt != fecPacket->protectedPktList.end()) {
+      delete *protectedPacketListIt;
+      protectedPacketListIt =
+          fecPacket->protectedPktList.erase(protectedPacketListIt);
+    }
+    assert(fecPacket->protectedPktList.empty());
+    delete fecPacket;
+    _fecPacketList.pop_front();
+  }
+  assert(_fecPacketList.empty());
+}
+
+void ForwardErrorCorrection::InsertMediaPacket(
+    ReceivedPacket* rxPacket,
+    RecoveredPacketList* recoveredPacketList) {
+  RecoveredPacketList::iterator recoveredPacketListIt =
+      recoveredPacketList->begin();
+
+  // Search for duplicate packets.
+  while (recoveredPacketListIt != recoveredPacketList->end()) {
+    if (rxPacket->seqNum == (*recoveredPacketListIt)->seqNum) {
+      // Duplicate packet, no need to add to list.
+      // Delete duplicate media packet data.
+      rxPacket->pkt = NULL;
+      return;
+    }
+    recoveredPacketListIt++;
+  }
+  RecoveredPacket* recoverdPacketToInsert = new RecoveredPacket;
+  recoverdPacketToInsert->wasRecovered = false;
+  recoverdPacketToInsert->returned = false;
+  recoverdPacketToInsert->seqNum = rxPacket->seqNum;
+  recoverdPacketToInsert->pkt = rxPacket->pkt;
+  recoverdPacketToInsert->pkt->length = rxPacket->pkt->length;
+
+  // TODO(holmer): Consider replacing this with a binary search for the right
+  // position, and then just insert the new packet. Would get rid of the sort.
+  recoveredPacketList->push_back(recoverdPacketToInsert);
+  recoveredPacketList->sort(SortablePacket::LessThan);
+  UpdateCoveringFECPackets(recoverdPacketToInsert);
+}
+
+void ForwardErrorCorrection::UpdateCoveringFECPackets(RecoveredPacket* packet) {
+  for (FecPacketList::iterator it = _fecPacketList.begin();
+      it != _fecPacketList.end(); ++it) {
+    // Is this FEC packet protecting the media packet |packet|?
+    ProtectedPacketList::iterator protected_it = std::lower_bound(
+        (*it)->protectedPktList.begin(),
+        (*it)->protectedPktList.end(),
+        packet,
+        SortablePacket::LessThan);
+    if (protected_it != (*it)->protectedPktList.end() &&
+        (*protected_it)->seqNum == packet->seqNum) {
+      // Found an FEC packet which is protecting |packet|.
+      (*protected_it)->pkt = packet->pkt;
+    }
+  }
+}
+
+void ForwardErrorCorrection::InsertFECPacket(
+    ReceivedPacket* rxPacket,
+    const RecoveredPacketList* recoveredPacketList) {
+  _fecPacketReceived = true;
+
+  // Check for duplicate.
+  FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+  while (fecPacketListIt != _fecPacketList.end()) {
+    if (rxPacket->seqNum == (*fecPacketListIt)->seqNum) {
+      // Delete duplicate FEC packet data.
+      rxPacket->pkt = NULL;
+      return;
+    }
+    fecPacketListIt++;
+  }
+  FecPacket* fecPacket = new FecPacket;
+  fecPacket->pkt = rxPacket->pkt;
+  fecPacket->seqNum = rxPacket->seqNum;
+  fecPacket->ssrc = rxPacket->ssrc;
+
+  const uint16_t seqNumBase = ModuleRTPUtility::BufferToUWord16(
+      &fecPacket->pkt->data[2]);
+  const uint16_t maskSizeBytes = (fecPacket->pkt->data[0] & 0x40) ?
+      kMaskSizeLBitSet : kMaskSizeLBitClear;  // L bit set?
+
+  for (uint16_t byteIdx = 0; byteIdx < maskSizeBytes; byteIdx++) {
+    uint8_t packetMask = fecPacket->pkt->data[12 + byteIdx];
+    for (uint16_t bitIdx = 0; bitIdx < 8; bitIdx++) {
+      if (packetMask & (1 << (7 - bitIdx))) {
+        ProtectedPacket* protectedPacket = new ProtectedPacket;
+        fecPacket->protectedPktList.push_back(protectedPacket);
+        // This wraps naturally with the sequence number.
+        protectedPacket->seqNum = static_cast<uint16_t>(seqNumBase +
+            (byteIdx << 3) + bitIdx);
+        protectedPacket->pkt = NULL;
+      }
+    }
+  }
+  if (fecPacket->protectedPktList.empty()) {
+    // All-zero packet mask; we can discard this FEC packet.
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "FEC packet %u has an all-zero packet mask.",
+                 fecPacket->seqNum, __FUNCTION__);
+    delete fecPacket;
+  } else {
+    AssignRecoveredPackets(fecPacket,
+                           recoveredPacketList);
+    // TODO(holmer): Consider replacing this with a binary search for the right
+    // position, and then just insert the new packet. Would get rid of the sort.
+    _fecPacketList.push_back(fecPacket);
+    _fecPacketList.sort(SortablePacket::LessThan);
+    if (_fecPacketList.size() > kMaxFecPackets) {
+      DiscardFECPacket(_fecPacketList.front());
+      _fecPacketList.pop_front();
+    }
+    assert(_fecPacketList.size() <= kMaxFecPackets);
+  }
+}
+
+void ForwardErrorCorrection::AssignRecoveredPackets(
+    FecPacket* fec_packet,
+    const RecoveredPacketList* recovered_packets) {
+  // Search for missing packets which have arrived or have been recovered by
+  // another FEC packet.
+  ProtectedPacketList* not_recovered = &fec_packet->protectedPktList;
+  RecoveredPacketList already_recovered;
+  std::set_intersection(
+      recovered_packets->begin(), recovered_packets->end(),
+      not_recovered->begin(), not_recovered->end(),
+      std::inserter(already_recovered, already_recovered.end()),
+      SortablePacket::LessThan);
+  // Set the FEC pointers to all recovered packets so that we don't have to
+  // search for them when we are doing recovery.
+  ProtectedPacketList::iterator not_recovered_it = not_recovered->begin();
+  for (RecoveredPacketList::iterator it = already_recovered.begin();
+      it != already_recovered.end(); ++it) {
+    // Search for the next recovered packet in |not_recovered|.
+    while ((*not_recovered_it)->seqNum != (*it)->seqNum)
+      ++not_recovered_it;
+    (*not_recovered_it)->pkt = (*it)->pkt;
+  }
+}
+
+void ForwardErrorCorrection::InsertPackets(
+    ReceivedPacketList* receivedPacketList,
+    RecoveredPacketList* recoveredPacketList) {
+
+  while (!receivedPacketList->empty()) {
+    ReceivedPacket* rxPacket = receivedPacketList->front();
+
+    if (rxPacket->isFec) {
+      InsertFECPacket(rxPacket, recoveredPacketList);
+    } else {
+      // Insert packet at the end of |recoveredPacketList|.
+      InsertMediaPacket(rxPacket, recoveredPacketList);
+    }
+    // Delete the received packet "wrapper", but not the packet data.
+    delete rxPacket;
+    receivedPacketList->pop_front();
+  }
+  assert(receivedPacketList->empty());
+  DiscardOldPackets(recoveredPacketList);
+}
+
+void ForwardErrorCorrection::InitRecovery(
+    const FecPacket* fec_packet,
+    RecoveredPacket* recovered) {
+  // This is the first packet which we try to recover with.
+  const uint16_t ulpHeaderSize = fec_packet->pkt->data[0] & 0x40 ?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;  // L bit set?
+  recovered->pkt = new Packet;
+  memset(recovered->pkt->data, 0, IP_PACKET_SIZE);
+  recovered->returned = false;
+  recovered->wasRecovered = true;
+  uint8_t protectionLength[2];
+  // Copy the protection length from the ULP header.
+  memcpy(protectionLength, &fec_packet->pkt->data[10], 2);
+  // Copy FEC payload, skipping the ULP header.
+  memcpy(&recovered->pkt->data[kRtpHeaderSize],
+         &fec_packet->pkt->data[kFecHeaderSize + ulpHeaderSize],
+         ModuleRTPUtility::BufferToUWord16(protectionLength));
+  // Copy the length recovery field.
+  memcpy(recovered->length_recovery, &fec_packet->pkt->data[8], 2);
+  // Copy the first 2 bytes of the FEC header.
+  memcpy(recovered->pkt->data, fec_packet->pkt->data, 2);
+  // Copy the 5th to 8th bytes of the FEC header.
+  memcpy(&recovered->pkt->data[4], &fec_packet->pkt->data[4], 4);
+  // Set the SSRC field.
+  ModuleRTPUtility::AssignUWord32ToBuffer(&recovered->pkt->data[8],
+                                          fec_packet->ssrc);
+}
+
+void ForwardErrorCorrection::FinishRecovery(RecoveredPacket* recovered) {
+  // Set the RTP version to 2.
+  recovered->pkt->data[0] |= 0x80;  // Set the 1st bit.
+  recovered->pkt->data[0] &= 0xbf;  // Clear the 2nd bit.
+
+  // Set the SN field.
+  ModuleRTPUtility::AssignUWord16ToBuffer(&recovered->pkt->data[2],
+                                          recovered->seqNum);
+  // Recover the packet length.
+  recovered->pkt->length = ModuleRTPUtility::BufferToUWord16(
+      recovered->length_recovery) + kRtpHeaderSize;
+}
+
+void ForwardErrorCorrection::XorPackets(const Packet* src_packet,
+                                        RecoveredPacket* dst_packet) {
+  // XOR with the first 2 bytes of the RTP header.
+  for (uint32_t i = 0; i < 2; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+  // XOR with the 5th to 8th bytes of the RTP header.
+  for (uint32_t i = 4; i < 8; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+  // XOR with the network-ordered payload size.
+  uint8_t mediaPayloadLength[2];
+  ModuleRTPUtility::AssignUWord16ToBuffer(
+      mediaPayloadLength,
+      src_packet->length - kRtpHeaderSize);
+  dst_packet->length_recovery[0] ^= mediaPayloadLength[0];
+  dst_packet->length_recovery[1] ^= mediaPayloadLength[1];
+
+  // XOR with RTP payload.
+  // TODO(marpan/ajm): Are we doing more XORs than required here?
+  for (int32_t i = kRtpHeaderSize; i < src_packet->length; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+}
+
+void ForwardErrorCorrection::RecoverPacket(
+    const FecPacket* fecPacket,
+    RecoveredPacket* recPacketToInsert) {
+  InitRecovery(fecPacket, recPacketToInsert);
+  ProtectedPacketList::const_iterator protected_it =
+      fecPacket->protectedPktList.begin();
+  while (protected_it != fecPacket->protectedPktList.end()) {
+    if ((*protected_it)->pkt == NULL) {
+      // This is the packet we're recovering.
+      recPacketToInsert->seqNum = (*protected_it)->seqNum;
+    } else {
+      XorPackets((*protected_it)->pkt, recPacketToInsert);
+    }
+    ++protected_it;
+  }
+  FinishRecovery(recPacketToInsert);
+}
+
+void ForwardErrorCorrection::AttemptRecover(
+    RecoveredPacketList* recoveredPacketList) {
+  FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+  while (fecPacketListIt != _fecPacketList.end()) {
+    // Search for each FEC packet's protected media packets.
+    int packets_missing = NumCoveredPacketsMissing(*fecPacketListIt);
+
+    // We can only recover one packet with an FEC packet.
+   if (packets_missing == 1) {
+      // Recovery possible.
+      RecoveredPacket* packetToInsert = new RecoveredPacket;
+      packetToInsert->pkt = NULL;
+      RecoverPacket(*fecPacketListIt, packetToInsert);
+
+      // Add recovered packet to the list of recovered packets and update any
+      // FEC packets covering this packet with a pointer to the data.
+      // TODO(holmer): Consider replacing this with a binary search for the
+      // right position, and then just insert the new packet. Would get rid of
+      // the sort.
+      recoveredPacketList->push_back(packetToInsert);
+      recoveredPacketList->sort(SortablePacket::LessThan);
+      UpdateCoveringFECPackets(packetToInsert);
+      DiscardOldPackets(recoveredPacketList);
+      DiscardFECPacket(*fecPacketListIt);
+      fecPacketListIt = _fecPacketList.erase(fecPacketListIt);
+
+      // A packet has been recovered. We need to check the FEC list again, as
+      // this may allow additional packets to be recovered.
+      // Restart for first FEC packet.
+      fecPacketListIt = _fecPacketList.begin();
+    } else if (packets_missing == 0) {
+        // Either all protected packets arrived or have been recovered. We can
+        // discard this FEC packet.
+        DiscardFECPacket(*fecPacketListIt);
+        fecPacketListIt = _fecPacketList.erase(fecPacketListIt);
+    } else {
+      fecPacketListIt++;
+    }
+  }
+}
+
+int ForwardErrorCorrection::NumCoveredPacketsMissing(
+    const FecPacket* fec_packet) {
+  int packets_missing = 0;
+  ProtectedPacketList::const_iterator it = fec_packet->protectedPktList.begin();
+  for (; it != fec_packet->protectedPktList.end(); ++it) {
+    if ((*it)->pkt == NULL) {
+      ++packets_missing;
+      if (packets_missing > 1) {
+        break;  // We can't recover more than one packet.
+      }
+    }
+  }
+  return packets_missing;
+}
+
+void ForwardErrorCorrection::DiscardFECPacket(FecPacket* fec_packet) {
+  while (!fec_packet->protectedPktList.empty()) {
+    delete fec_packet->protectedPktList.front();
+    fec_packet->protectedPktList.pop_front();
+  }
+  assert(fec_packet->protectedPktList.empty());
+  delete fec_packet;
+}
+
+void ForwardErrorCorrection::DiscardOldPackets(
+    RecoveredPacketList* recoveredPacketList) {
+  while (recoveredPacketList->size() > kMaxMediaPackets) {
+    ForwardErrorCorrection::RecoveredPacket* packet =
+        recoveredPacketList->front();
+    delete packet;
+    recoveredPacketList->pop_front();
+  }
+  assert(recoveredPacketList->size() <= kMaxMediaPackets);
+}
+
+int32_t ForwardErrorCorrection::DecodeFEC(
+    ReceivedPacketList* receivedPacketList,
+    RecoveredPacketList* recoveredPacketList) {
+  // TODO(marpan/ajm): can we check for multiple ULP headers, and return an
+  // error?
+  InsertPackets(receivedPacketList, recoveredPacketList);
+  AttemptRecover(recoveredPacketList);
+  return 0;
+}
+
+uint16_t ForwardErrorCorrection::PacketOverhead() {
+  return kFecHeaderSize + kUlpHeaderSizeLBitSet;
+}
+
+uint16_t ForwardErrorCorrection::LatestSequenceNumber(uint16_t first,
+                                                      uint16_t second) {
+  bool wrap = (first < 0x00ff && second > 0xff00) ||
+          (first > 0xff00 && second < 0x00ff);
+  if (second > first && !wrap)
+    return second;
+  else if (second <= first && !wrap)
+    return first;
+  else if (second < first && wrap)
+    return second;
+  else
+    return first;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/forward_error_correction.h b/trunk/src/modules/rtp_rtcp/source/forward_error_correction.h
new file mode 100644
index 0000000..a7588a8
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/forward_error_correction.h
@@ -0,0 +1,289 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
+
+#include <list>
+#include <vector>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "system_wrappers/interface/ref_count.h"
+#include "system_wrappers/interface/scoped_refptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class FecPacket;
+
+/**
+ * Performs codec-independent forward error correction (FEC), based on RFC 5109.
+ * Option exists to enable unequal protection (UEP) across packets.
+ * This is not to be confused with protection within packets
+ * (referred to as uneven level protection (ULP) in RFC 5109).
+ */
+class ForwardErrorCorrection {
+ public:
+  // Maximum number of media packets we can protect
+  static const unsigned int kMaxMediaPackets = 48u;
+
+  // TODO(holmer): As a next step all these struct-like packet classes should be
+  // refactored into proper classes, and their members should be made private.
+  // This will require parts of the functionality in forward_error_correction.cc
+  // and receiver_fec.cc to be refactored into the packet classes.
+  class Packet {
+   public:
+    Packet() : length(0), data(), ref_count_(0) {}
+    virtual ~Packet() {}
+
+    // Add a reference.
+    virtual int32_t AddRef() {
+      return ++ref_count_;
+    }
+
+    // Release a reference. Will delete the object if the reference count
+    // reaches zero.
+    virtual int32_t Release() {
+      int32_t ref_count;
+      ref_count = --ref_count_;
+      if (ref_count == 0)
+        delete this;
+      return ref_count;
+    }
+
+    uint16_t length;  // Length of packet in bytes.
+    uint8_t data[IP_PACKET_SIZE];  // Packet data.
+
+   private:
+    int32_t ref_count_;  // Counts the number of references to a packet.
+  };
+
+  // TODO(holmer): Refactor into a proper class.
+  class SortablePacket {
+   public:
+    // True if first is <= than second.
+    static bool LessThan(const SortablePacket* first,
+                         const SortablePacket* second);
+
+    uint16_t seqNum;
+  };
+
+  /**
+   * The received list parameter of #DecodeFEC() must reference structs of this
+   * type. The lastMediaPktInFrame is not required to be used for correct
+   * recovery, but will reduce delay by allowing #DecodeFEC() to pre-emptively
+   * determine frame completion. If set, we assume a FEC stream, and the
+   * following assumptions must hold:\n
+   *
+   * 1. The media packets in a frame have contiguous sequence numbers, i.e. the
+   *    frame's FEC packets have sequence numbers either lower than the first
+   *    media packet or higher than the last media packet.\n
+   * 2. All FEC packets have a sequence number base equal to the first media
+   *    packet in the corresponding frame.\n
+   *
+   * The ssrc member is needed to ensure we can restore the SSRC field of
+   * recovered packets. In most situations this could be retrieved from other
+   * media packets, but in the case of an FEC packet protecting a single
+   * missing media packet, we have no other means of obtaining it.
+   */
+  // TODO(holmer): Refactor into a proper class.
+  class ReceivedPacket : public SortablePacket {
+   public:
+    uint32_t ssrc;  // SSRC of the current frame. Must be set for FEC
+                    // packets, but not required for media packets.
+    bool isFec;  // Set to true if this is an FEC packet and false
+                 // otherwise.
+    scoped_refptr<Packet> pkt;  // Pointer to the packet storage.
+  };
+
+  /**
+   * The recovered list parameter of #DecodeFEC() will reference structs of
+   * this type.
+   */
+  // TODO(holmer): Refactor into a proper class.
+  class RecoveredPacket : public SortablePacket {
+   public:
+    bool wasRecovered;  // Will be true if this packet was recovered by
+                        // the FEC. Otherwise it was a media packet passed in
+                        // through the received packet list.
+    bool returned;  // True when the packet already has been returned to the
+                    // caller through the callback.
+    uint8_t length_recovery[2];  // Two bytes used for recovering the packet
+                                 // length with XOR operations.
+    scoped_refptr<Packet> pkt;  // Pointer to the packet storage.
+  };
+
+  typedef std::list<Packet*> PacketList;
+  typedef std::list<ReceivedPacket*> ReceivedPacketList;
+  typedef std::list<RecoveredPacket*> RecoveredPacketList;
+
+  /**
+   * \param[in] id Module ID
+   */
+  ForwardErrorCorrection(int32_t id);
+
+  virtual ~ForwardErrorCorrection();
+
+  /**
+   * Generates a list of FEC packets from supplied media packets.
+   *
+   * \param[in]  mediaPacketList     List of media packets to protect, of type
+   *                                 #Packet. All packets must belong to the
+   *                                 same frame and the list must not be empty.
+   * \param[in]  protectionFactor    FEC protection overhead in the [0, 255]
+   *                                 domain. To obtain 100% overhead, or an
+   *                                 equal number of FEC packets as media
+   *                                 packets, use 255.
+   * \param[in] numImportantPackets  The number of "important" packets in the
+   *                                 frame. These packets may receive greater
+   *                                 protection than the remaining packets. The
+   *                                 important packets must be located at the
+   *                                 start of the media packet list. For codecs
+   *                                 with data partitioning, the important
+   *                                 packets may correspond to first partition
+   *                                 packets.
+   * \param[in] useUnequalProtection Parameter to enable/disable unequal
+   *                                 protection  (UEP) across packets. Enabling
+   *                                 UEP will allocate more protection to the
+   *                                 numImportantPackets from the start of the
+   *                                 mediaPacketList.
+   * \param[out] fecPacketList       List of FEC packets, of type #Packet. Must
+   *                                 be empty on entry. The memory available
+   *                                 through the list will be valid until the
+   *                                 next call to GenerateFEC().
+   *
+   * \return 0 on success, -1 on failure.
+   */
+  int32_t GenerateFEC(const PacketList& mediaPacketList,
+                      uint8_t protectionFactor,
+                      int numImportantPackets,
+                      bool useUnequalProtection,
+                      PacketList* fecPacketList);
+
+  /**
+   *  Decodes a list of media and FEC packets. It will parse the input received
+   *  packet list, storing FEC packets internally and inserting media packets to
+   *  the output recovered packet list. The recovered list will be sorted by
+   *  ascending sequence number and have duplicates removed. The function
+   *  should be called as new packets arrive, with the recovered list being
+   *  progressively assembled with each call. The received packet list will be
+   *  empty at output.\n
+   *
+   *  The user will allocate packets submitted through the received list. The
+   *  function will handle allocation of recovered packets and optionally
+   *  deleting of all packet memory. The user may delete the recovered list
+   *  packets, in which case they must remove deleted packets from the
+   *  recovered list.\n
+   *
+   * \param[in]  receivedPacketList  List of new received packets, of type
+   *                                 #ReceivedPacket, belonging to a single
+   *                                 frame. At output the list will be empty,
+   *                                 with packets  either stored internally,
+   *                                 or accessible through the recovered list.
+   * \param[out] recoveredPacketList List of recovered media packets, of type
+   *                                 #RecoveredPacket, belonging to a single
+   *                                 frame. The memory available through the
+   *                                 list will be valid until the next call to
+   *                                 DecodeFEC().
+   *
+   * \return 0 on success, -1 on failure.
+   */
+  int32_t DecodeFEC(ReceivedPacketList* receivedPacketList,
+                    RecoveredPacketList* recoveredPacketList);
+  /**
+   * Gets the size in bytes of the FEC/ULP headers, which must be accounted for
+   * as packet overhead.
+   * \return Packet overhead in bytes.
+   */
+  static uint16_t PacketOverhead();
+
+  // Reset internal states from last frame and clear the recoveredPacketList.
+  // Frees all memory allocated by this class.
+  void ResetState(RecoveredPacketList* recoveredPacketList);
+
+ private:
+  typedef std::list<FecPacket*> FecPacketList;
+
+  void GenerateFecUlpHeaders(const PacketList& mediaPacketList,
+                             uint8_t* packetMask,
+                             uint32_t numFecPackets);
+
+  void GenerateFecBitStrings(const PacketList& mediaPacketList,
+                             uint8_t* packetMask,
+                             uint32_t numFecPackets);
+
+  // Insert received packets into FEC or recovered list.
+  void InsertPackets(ReceivedPacketList* receivedPacketList,
+                     RecoveredPacketList* recoveredPacketList);
+
+  // Insert media packet into recovered packet list. We delete duplicates.
+  void InsertMediaPacket(ReceivedPacket* rxPacket,
+                         RecoveredPacketList* recoveredPacketList);
+
+  // Assigns pointers to the recovered packet from all FEC packets which cover
+  // it.
+  // Note: This reduces the complexity when we want to try to recover a packet
+  // since we don't have to find the intersection between recovered packets and
+  // packets covered by the FEC packet.
+  void UpdateCoveringFECPackets(RecoveredPacket* packet);
+
+  // Insert packet into FEC list. We delete duplicates.
+  void InsertFECPacket(ReceivedPacket* rxPacket,
+                       const RecoveredPacketList* recoveredPacketList);
+
+  // Assigns pointers to already recovered packets covered by this FEC packet.
+  static void AssignRecoveredPackets(
+      FecPacket* fec_packet,
+      const RecoveredPacketList* recovered_packets);
+
+  // Insert into recovered list in correct position.
+  void InsertRecoveredPacket(
+      RecoveredPacket* recPacketToInsert,
+      RecoveredPacketList* recoveredPacketList);
+
+  // Attempt to recover missing packets.
+  void AttemptRecover(RecoveredPacketList* recoveredPacketList);
+
+  // Initializes the packet recovery using the FEC packet.
+  static  void InitRecovery(const FecPacket* fec_packet,
+                            RecoveredPacket* recovered);
+
+  // Performs XOR between |src_packet| and |dst_packet| and stores the result
+  // in |dst_packet|.
+  static void XorPackets(const Packet* src_packet,
+                         RecoveredPacket* dst_packet);
+
+  // Finish up the recovery of a packet.
+  static  void FinishRecovery(RecoveredPacket* recovered);
+
+  // Recover a missing packet.
+  void RecoverPacket(const FecPacket* fecPacket,
+                     RecoveredPacket* recPacketToInsert);
+
+  // Get the number of missing media packets which are covered by this
+  // FEC packet. An FEC packet can recover at most one packet, and if zero
+  // packets are missing the FEC packet can be discarded.
+  // This function returns 2 when two or more packets are missing.
+  static int NumCoveredPacketsMissing(const FecPacket* fec_packet);
+
+  static uint16_t LatestSequenceNumber(uint16_t first,
+                                       uint16_t second);
+
+  static void DiscardFECPacket(FecPacket* fec_packet);
+  static void DiscardOldPackets(RecoveredPacketList* recoveredPacketList);
+
+  int32_t _id;
+  std::vector<Packet> _generatedFecPackets;
+  FecPacketList _fecPacketList;
+  bool _fecPacketReceived;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc b/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc
new file mode 100644
index 0000000..5338174
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc
@@ -0,0 +1,381 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "forward_error_correction_internal.h"
+#include "fec_private_tables.h"
+
+#include <cassert>
+#include <cstring>
+
+namespace {
+
+// Allow for different modes of protection for packets in UEP case.
+enum ProtectionMode
+{
+    kModeNoOverlap,
+    kModeOverlap,
+    kModeBiasFirstPacket,
+};
+
+/**
+  * Fits an input mask (subMask) to an output mask.
+  * The mask is a matrix where the rows are the FEC packets,
+  * and the columns are the source packets the FEC is applied to.
+  * Each row of the mask is represented by a number of mask bytes.
+  *
+  * \param[in]  numMaskBytes    The number of mask bytes of output mask.
+  * \param[in]  numSubMaskBytes The number of mask bytes of input mask.
+  * \param[in]  numRows         The number of rows of the input mask.
+  * \param[in]  subMask         A pointer to hold the input mask, of size
+  *                             [0, numRows * numSubMaskBytes]
+  * \param[out] packetMask      A pointer to hold the output mask, of size
+  *                             [0, x * numMaskBytes], where x >= numRows.
+  */
+void FitSubMask(int numMaskBytes,
+                int numSubMaskBytes,
+                int numRows,
+                const uint8_t* subMask,
+                uint8_t* packetMask)
+{
+    if (numMaskBytes == numSubMaskBytes)
+    {
+        memcpy(packetMask, subMask, numRows * numSubMaskBytes);
+    }
+    else
+    {
+        for (int i = 0; i < numRows; i++)
+        {
+            int pktMaskIdx = i * numMaskBytes;
+            int pktMaskIdx2 = i * numSubMaskBytes;
+            for (int j = 0; j < numSubMaskBytes; j++)
+            {
+                packetMask[pktMaskIdx] = subMask[pktMaskIdx2];
+                pktMaskIdx++;
+                pktMaskIdx2++;
+            }
+        }
+    }
+}
+
+/**
+  * Shifts a mask by number of columns (bits), and fits it to an output mask.
+  * The mask is a matrix where the rows are the FEC packets,
+  * and the columns are the source packets the FEC is applied to.
+  * Each row of the mask is represented by a number of mask bytes.
+  *
+  * \param[in]  numMaskBytes     The number of mask bytes of output mask.
+  * \param[in]  numSubMaskBytes  The number of mask bytes of input mask.
+  * \param[in]  numColumnShift   The number columns to be shifted, and
+  *                              the starting row for the output mask.
+  * \param[in]  endRow           The ending row for the output mask.
+  * \param[in]  subMask          A pointer to hold the input mask, of size
+  *                              [0, (endRowFEC - startRowFec) * numSubMaskBytes]
+  * \param[out] packetMask       A pointer to hold the output mask, of size
+  *                              [0, x * numMaskBytes], where x >= endRowFEC.
+  */
+// TODO (marpan): This function is doing three things at the same time:
+// shift within a byte, byte shift and resizing.
+// Split up into subroutines.
+void ShiftFitSubMask(int numMaskBytes,
+                     int resMaskBytes,
+                     int numColumnShift,
+                     int endRow,
+                     const uint8_t* subMask,
+                     uint8_t* packetMask)
+{
+
+    // Number of bit shifts within a byte
+    const int numBitShifts = (numColumnShift % 8);
+    const int numByteShifts = numColumnShift >> 3;
+
+    // Modify new mask with sub-mask21.
+
+    // Loop over the remaining FEC packets.
+    for (int i = numColumnShift; i < endRow; i++)
+    {
+        // Byte index of new mask, for row i and column resMaskBytes,
+        // offset by the number of bytes shifts
+        int pktMaskIdx = i * numMaskBytes + resMaskBytes - 1 + numByteShifts;
+        // Byte index of subMask, for row i and column resMaskBytes
+        int pktMaskIdx2 =
+            (i - numColumnShift) * resMaskBytes + resMaskBytes - 1;
+
+        uint8_t shiftRightCurrByte = 0;
+        uint8_t shiftLeftPrevByte = 0;
+        uint8_t combNewByte = 0;
+
+        // Handle case of numMaskBytes > resMaskBytes:
+        // For a given row, copy the rightmost "numBitShifts" bits
+        // of the last byte of subMask into output mask.
+        if (numMaskBytes > resMaskBytes)
+        {
+            shiftLeftPrevByte =
+                (subMask[pktMaskIdx2] << (8 - numBitShifts));
+            packetMask[pktMaskIdx + 1] = shiftLeftPrevByte;
+        }
+
+        // For each row i (FEC packet), shift the bit-mask of the subMask.
+        // Each row of the mask contains "resMaskBytes" of bytes.
+        // We start from the last byte of the subMask and move to first one.
+        for (int j = resMaskBytes - 1; j > 0; j--)
+        {
+            // Shift current byte of sub21 to the right by "numBitShifts".
+            shiftRightCurrByte =
+                subMask[pktMaskIdx2] >> numBitShifts;
+
+            // Fill in shifted bits with bits from the previous (left) byte:
+            // First shift the previous byte to the left by "8-numBitShifts".
+            shiftLeftPrevByte =
+                (subMask[pktMaskIdx2 - 1] << (8 - numBitShifts));
+
+            // Then combine both shifted bytes into new mask byte.
+            combNewByte = shiftRightCurrByte | shiftLeftPrevByte;
+
+            // Assign to new mask.
+            packetMask[pktMaskIdx] = combNewByte;
+            pktMaskIdx--;
+            pktMaskIdx2--;
+        }
+        // For the first byte in the row (j=0 case).
+        shiftRightCurrByte = subMask[pktMaskIdx2] >> numBitShifts;
+        packetMask[pktMaskIdx] = shiftRightCurrByte;
+
+    }
+}
+
+} //namespace
+
+namespace webrtc {
+namespace internal {
+
+// Remaining protection after important (first partition) packet protection
+void RemainingPacketProtection(int numMediaPackets,
+                               int numFecRemaining,
+                               int numFecForImpPackets,
+                               int numMaskBytes,
+                               ProtectionMode mode,
+                               uint8_t* packetMask)
+{
+    if (mode == kModeNoOverlap)
+    {
+        // subMask21
+
+        const int lBit =
+            (numMediaPackets - numFecForImpPackets) > 16 ? 1 : 0;
+
+        const int resMaskBytes =
+            (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+        const uint8_t* packetMaskSub21 =
+            packetMaskTbl[numMediaPackets - numFecForImpPackets - 1]
+                         [numFecRemaining - 1];
+
+        ShiftFitSubMask(numMaskBytes, resMaskBytes, numFecForImpPackets,
+                        (numFecForImpPackets + numFecRemaining),
+                        packetMaskSub21, packetMask);
+
+    }
+    else if (mode == kModeOverlap || mode == kModeBiasFirstPacket)
+    {
+        // subMask22
+
+        const uint8_t* packetMaskSub22 =
+            packetMaskTbl[numMediaPackets - 1][numFecRemaining - 1];
+
+        FitSubMask(numMaskBytes, numMaskBytes, numFecRemaining, packetMaskSub22,
+                   &packetMask[numFecForImpPackets * numMaskBytes]);
+
+        if (mode == kModeBiasFirstPacket)
+        {
+            for (int i = 0; i < numFecRemaining; i++)
+            {
+                int pktMaskIdx = i * numMaskBytes;
+                packetMask[pktMaskIdx] = packetMask[pktMaskIdx] | (1 << 7);
+            }
+        }
+    }
+    else
+    {
+        assert(false);
+    }
+
+}
+
+// Protection for important (first partition) packets
+void ImportantPacketProtection(int numFecForImpPackets,
+                               int numImpPackets,
+                               int numMaskBytes,
+                               uint8_t* packetMask)
+{
+    const int lBit = numImpPackets > 16 ? 1 : 0;
+    const int numImpMaskBytes =
+        (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+    // Get subMask1 from table
+    const uint8_t* packetMaskSub1 =
+        packetMaskTbl[numImpPackets - 1][numFecForImpPackets - 1];
+
+    FitSubMask(numMaskBytes, numImpMaskBytes,
+               numFecForImpPackets, packetMaskSub1, packetMask);
+
+}
+
+// This function sets the protection allocation: i.e., how many FEC packets
+// to use for numImp (1st partition) packets, given the: number of media
+// packets, number of FEC packets, and number of 1st partition packets.
+int SetProtectionAllocation(int numMediaPackets,
+                            int numFecPackets,
+                            int numImpPackets)
+{
+
+    // TODO (marpan): test different cases for protection allocation:
+
+    // Use at most (allocPar * numFecPackets) for important packets.
+    float allocPar = 0.5;
+    int maxNumFecForImp = allocPar * numFecPackets;
+
+    int numFecForImpPackets = (numImpPackets < maxNumFecForImp) ?
+        numImpPackets : maxNumFecForImp;
+
+    // Fall back to equal protection in this case
+    if (numFecPackets == 1 && (numMediaPackets > 2 * numImpPackets))
+    {
+        numFecForImpPackets = 0;
+    }
+
+    return numFecForImpPackets;
+}
+
+// Modification for UEP: reuse the off-line tables for the packet masks.
+// Note: these masks were designed for equal packet protection case,
+// assuming random packet loss.
+
+// Current version has 3 modes (options) to build UEP mask from existing ones.
+// Various other combinations may be added in future versions.
+// Longer-term, we may add another set of tables specifically for UEP cases.
+// TODO (marpan): also consider modification of masks for bursty loss cases.
+
+// Mask is characterized as (#packets_to_protect, #fec_for_protection).
+// Protection factor defined as: (#fec_for_protection / #packets_to_protect).
+
+// Let k=numMediaPackets, n=total#packets, (n-k)=numFecPackets, m=numImpPackets.
+
+// For ProtectionMode 0 and 1:
+// one mask (subMask1) is used for 1st partition packets,
+// the other mask (subMask21/22, for 0/1) is for the remaining FEC packets.
+
+// In both mode 0 and 1, the packets of 1st partition (numImpPackets) are
+// treated equally important, and are afforded more protection than the
+// residual partition packets.
+
+// For numImpPackets:
+// subMask1 = (m, t): protection = t/(m), where t=F(k,n-k,m).
+// t=F(k,n-k,m) is the number of packets used to protect first partition in
+// subMask1. This is determined from the function SetProtectionAllocation().
+
+// For the left-over protection:
+// Mode 0: subMask21 = (k-m,n-k-t): protection = (n-k-t)/(k-m)
+// mode 0 has no protection overlap between the two partitions.
+// For mode 0, we would typically set t = min(m, n-k).
+
+
+// Mode 1: subMask22 = (k, n-k-t), with protection (n-k-t)/(k)
+// mode 1 has protection overlap between the two partitions (preferred).
+
+// For ProtectionMode 2:
+// This gives 1st packet of list (which is 1st packet of 1st partition) more
+// protection. In mode 2, the equal protection mask (which is obtained from
+// mode 1 for t=0) is modified (more "1s" added in 1st column of packet mask)
+// to bias higher protection for the 1st source packet.
+
+// Protection Mode 2 may be extended for a sort of sliding protection
+// (i.e., vary the number/density of "1s" across columns) across packets.
+
+void UnequalProtectionMask(int numMediaPackets,
+                           int numFecPackets,
+                           int numImpPackets,
+                           int numMaskBytes,
+                           uint8_t* packetMask)
+{
+
+    // Set Protection type and allocation
+    // TODO (marpan): test/update for best mode and some combinations thereof.
+
+    ProtectionMode mode = kModeOverlap;
+    int numFecForImpPackets = 0;
+
+    if (mode != kModeBiasFirstPacket)
+    {
+        numFecForImpPackets = SetProtectionAllocation(numMediaPackets,
+                                                      numFecPackets,
+                                                      numImpPackets);
+    }
+
+    int numFecRemaining = numFecPackets - numFecForImpPackets;
+    // Done with setting protection type and allocation
+
+    //
+    // Generate subMask1
+    //
+    if (numFecForImpPackets > 0)
+    {
+        ImportantPacketProtection(numFecForImpPackets, numImpPackets,
+                                  numMaskBytes, packetMask);
+    }
+
+    //
+    // Generate subMask2
+    //
+    if (numFecRemaining > 0)
+    {
+        RemainingPacketProtection(numMediaPackets, numFecRemaining,
+                                  numFecForImpPackets, numMaskBytes,
+                                  mode, packetMask);
+    }
+
+}
+
+void GeneratePacketMasks(int numMediaPackets,
+                         int numFecPackets,
+                         int numImpPackets,
+                         bool useUnequalProtection,
+                         uint8_t* packetMask)
+{
+    assert(numMediaPackets <= static_cast<int>(sizeof(packetMaskTbl) /
+            sizeof(*packetMaskTbl)));
+    assert(numMediaPackets > 0);
+    assert(numFecPackets <= numMediaPackets && numFecPackets > 0);
+    assert(numImpPackets <= numMediaPackets && numImpPackets >= 0);
+
+    int lBit = numMediaPackets > 16 ? 1 : 0;
+    const int numMaskBytes =
+        (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+    // Equal-protection for these cases
+    if (!useUnequalProtection || numImpPackets == 0)
+    {
+        // Retrieve corresponding mask table directly:for equal-protection case.
+        // Mask = (k,n-k), with protection factor = (n-k)/k,
+        // where k = numMediaPackets, n=total#packets, (n-k)=numFecPackets.
+        memcpy(packetMask,
+               packetMaskTbl[numMediaPackets - 1][numFecPackets - 1],
+               numFecPackets * numMaskBytes);
+    }
+    else  //UEP case
+    {
+        UnequalProtectionMask(numMediaPackets, numFecPackets, numImpPackets,
+                              numMaskBytes, packetMask);
+
+    } // End of UEP modification
+
+} //End of GetPacketMasks
+
+}  // namespace internal
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.h b/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.h
new file mode 100644
index 0000000..e137ef2
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/forward_error_correction_internal.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Packet mask size in bytes (L bit is set).
+static const int kMaskSizeLBitSet = 6;
+// Packet mask size in bytes (L bit is cleared).
+static const int kMaskSizeLBitClear = 2;
+
+namespace internal {
+
+ /**
+  * Returns an array of packet masks. The mask of a single FEC packet
+  * corresponds to a number of mask bytes. The mask indicates which
+  * media packets should be protected by the FEC packet.
+
+  * \param[in]  numMediaPackets       The number of media packets to protect.
+  *                                    [1, maxMediaPackets].
+  * \param[in]  numFecPackets         The number of FEC packets which will
+  *                                    be generated. [1, numMediaPackets].
+  * \param[in]  numImpPackets         The number of important packets.
+  *                                    [0, numMediaPackets].
+  *                                   numImpPackets = 0 is the equal
+  *                                    protection scenario.
+  * \param[in]  useUnequalProtection  Enables unequal protection: allocates
+  *                                    more protection to the numImpPackets.
+  * \param[out] packetMask            A pointer to hold the packet mask array,
+  *                                    of size:
+  *                                    numFecPackets * "number of mask bytes".
+  */
+void GeneratePacketMasks(int numMediaPackets,
+                         int numFecPackets,
+                         int numImpPackets,
+                         bool useUnequalProtection,
+                         uint8_t* packetMask);
+
+} // namespace internal
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h b/trunk/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h
new file mode 100644
index 0000000..cb7ebba
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_receiver_video.h"
+
+namespace webrtc {
+
+class MockRTPReceiverVideo : public RTPReceiverVideo {
+ public:
+  MOCK_METHOD1(ChangeUniqueId,
+      void(const WebRtc_Word32 id));
+  MOCK_METHOD3(ReceiveRecoveredPacketCallback,
+      WebRtc_Word32(WebRtcRTPHeader* rtpHeader,
+                    const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadDataLength));
+  MOCK_METHOD3(CallbackOfReceivedPayloadData,
+      WebRtc_Word32(const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadSize,
+                    const WebRtcRTPHeader* rtpHeader));
+  MOCK_CONST_METHOD0(TimeStamp,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD0(SequenceNumber,
+      WebRtc_UWord16());
+  MOCK_CONST_METHOD2(PayloadTypeToPayload,
+      WebRtc_UWord32(const WebRtc_UWord8 payloadType,
+                     ModuleRTPUtility::Payload*& payload));
+  MOCK_CONST_METHOD2(RetransmitOfOldPacket,
+      bool(const WebRtc_UWord16 sequenceNumber,
+           const WebRtc_UWord32 rtpTimeStamp));
+  MOCK_CONST_METHOD0(REDPayloadType,
+      WebRtc_Word8());
+};
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/overuse_detector.cc b/trunk/src/modules/rtp_rtcp/source/overuse_detector.cc
new file mode 100644
index 0000000..4cffb0f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/overuse_detector.cc
@@ -0,0 +1,450 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if _WIN32
+#include <windows.h>
+#endif
+
+#include "trace.h"
+#include "overuse_detector.h"
+#include "remote_rate_control.h"
+#include "rtp_utility.h"
+#include <math.h>
+#include <stdlib.h> //abs
+
+#ifdef WEBRTC_BWE_MATLAB
+extern MatlabEngine eng; // global variable defined elsewhere
+#endif
+
+#define INIT_CAPACITY_SLOPE 8.0/512.0
+#define DETECTOR_THRESHOLD 25.0
+#define OVER_USING_TIME_THRESHOLD 100
+#define MIN_FRAME_PERIOD_HISTORY_LEN 60
+
+namespace webrtc {
+OverUseDetector::OverUseDetector()
+:
+_firstPacket(true),
+_currentFrame(),
+_prevFrame(),
+_numOfDeltas(0),
+_slope(INIT_CAPACITY_SLOPE),
+_offset(0),
+_E(),
+_processNoise(),
+_avgNoise(0.0),
+_varNoise(500),
+_threshold(DETECTOR_THRESHOLD),
+_tsDeltaHist(),
+_prevOffset(0.0),
+_timeOverUsing(-1),
+_overUseCounter(0),
+_hypothesis(kBwNormal)
+#ifdef WEBRTC_BWE_MATLAB
+,_plot1(NULL),
+_plot2(NULL),
+_plot3(NULL),
+_plot4(NULL)
+#endif
+{
+    _E[0][0] = 100;
+    _E[1][1] = 1e-1;
+    _E[0][1] = _E[1][0] = 0;
+    _processNoise[0] = 1e-10;
+    _processNoise[1] = 1e-2;
+}
+
+OverUseDetector::~OverUseDetector()
+{
+#ifdef WEBRTC_BWE_MATLAB
+    if (_plot1)
+    {
+        eng.DeletePlot(_plot1);
+        _plot1 = NULL;
+    }
+    if (_plot2)
+    {
+        eng.DeletePlot(_plot2);
+        _plot2 = NULL;
+    }
+    if (_plot3)
+    {
+        eng.DeletePlot(_plot3);
+        _plot3 = NULL;
+    }
+    if (_plot4)
+    {
+        eng.DeletePlot(_plot4);
+        _plot4 = NULL;
+    }
+#endif
+
+    _tsDeltaHist.clear();
+}
+
+void OverUseDetector::Reset()
+{
+    _firstPacket = true;
+    _currentFrame._size = 0;
+    _currentFrame._completeTimeMs = -1;
+    _currentFrame._timestamp = -1;
+    _prevFrame._size = 0;
+    _prevFrame._completeTimeMs = -1;
+    _prevFrame._timestamp = -1;
+    _numOfDeltas = 0;
+    _slope = INIT_CAPACITY_SLOPE;
+    _offset = 0;
+    _E[0][0] = 100;
+    _E[1][1] = 1e-1;
+    _E[0][1] = _E[1][0] = 0;
+    _processNoise[0] = 1e-10;
+    _processNoise[1] = 1e-2;
+    _avgNoise = 0.0;
+    _varNoise = 500;
+    _threshold = DETECTOR_THRESHOLD;
+    _prevOffset = 0.0;
+    _timeOverUsing = -1;
+    _overUseCounter = 0;
+    _hypothesis = kBwNormal;
+    _tsDeltaHist.clear();
+}
+
+bool OverUseDetector::Update(const WebRtcRTPHeader& rtpHeader,
+                             const WebRtc_UWord16 packetSize,
+                             const WebRtc_Word64 nowMS)
+{
+#ifdef WEBRTC_BWE_MATLAB
+    // Create plots
+    const WebRtc_Word64 startTimeMs = nowMS;
+    if (_plot1 == NULL)
+    {
+        _plot1 = eng.NewPlot(new MatlabPlot());
+        _plot1->AddLine(1000, "b.", "scatter");
+    }
+    if (_plot2 == NULL)
+    {
+        _plot2 = eng.NewPlot(new MatlabPlot());
+        _plot2->AddTimeLine(30, "b", "offset", startTimeMs);
+        _plot2->AddTimeLine(30, "r--", "limitPos", startTimeMs);
+        _plot2->AddTimeLine(30, "k.", "trigger", startTimeMs);
+        _plot2->AddTimeLine(30, "ko", "detection", startTimeMs);
+        //_plot2->AddTimeLine(30, "g", "slowMean", startTimeMs);
+    }
+    if (_plot3 == NULL)
+    {
+        _plot3 = eng.NewPlot(new MatlabPlot());
+        _plot3->AddTimeLine(30, "b", "noiseVar", startTimeMs);
+    }
+    if (_plot4 == NULL)
+    {
+        _plot4 = eng.NewPlot(new MatlabPlot());
+        //_plot4->AddTimeLine(60, "b", "p11", startTimeMs);
+        //_plot4->AddTimeLine(60, "r", "p12", startTimeMs);
+        _plot4->AddTimeLine(60, "g", "p22", startTimeMs);
+        //_plot4->AddTimeLine(60, "g--", "p22_hat", startTimeMs);
+        //_plot4->AddTimeLine(30, "b.-", "deltaFs", startTimeMs);
+    }
+
+#endif
+
+    bool wrapped = false;
+    bool completeFrame = false;
+    if (_currentFrame._timestamp == -1)
+    {
+        _currentFrame._timestamp = rtpHeader.header.timestamp;
+    }
+    else if (ModuleRTPUtility::OldTimestamp(
+                 rtpHeader.header.timestamp,
+                 static_cast<WebRtc_UWord32>(_currentFrame._timestamp),
+                 &wrapped))
+    {
+        // Don't update with old data
+        return completeFrame;
+    }
+    else if (rtpHeader.header.timestamp != _currentFrame._timestamp)
+    {
+        // First packet of a later frame, the previous frame sample is ready
+        WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "Frame complete at %I64i", _currentFrame._completeTimeMs);
+        if (_prevFrame._completeTimeMs >= 0) // This is our second frame
+        {
+            WebRtc_Word64 tDelta = 0;
+            double tsDelta = 0;
+            // Check for wrap
+            ModuleRTPUtility::OldTimestamp(
+                static_cast<WebRtc_UWord32>(_prevFrame._timestamp),
+                static_cast<WebRtc_UWord32>(_currentFrame._timestamp),
+                &wrapped);
+            CompensatedTimeDelta(_currentFrame, _prevFrame, tDelta, tsDelta, wrapped);
+            UpdateKalman(tDelta, tsDelta, _currentFrame._size, _prevFrame._size);
+        }
+        // The new timestamp is now the current frame,
+        // and the old timestamp becomes the previous frame.
+        _prevFrame = _currentFrame;
+        _currentFrame._timestamp = rtpHeader.header.timestamp;
+        _currentFrame._size = 0;
+        _currentFrame._completeTimeMs = -1;
+        completeFrame = true;
+    }
+    // Accumulate the frame size
+    _currentFrame._size += packetSize;
+    _currentFrame._completeTimeMs = nowMS;
+    return completeFrame;
+}
+
+BandwidthUsage OverUseDetector::State() const
+{
+    return _hypothesis;
+}
+
+double OverUseDetector::NoiseVar() const
+{
+    return _varNoise;
+}
+
+void OverUseDetector::SetRateControlRegion(RateControlRegion region)
+{
+    switch (region)
+    {
+    case kRcMaxUnknown:
+        {
+            _threshold = DETECTOR_THRESHOLD;
+            break;
+        }
+    case kRcAboveMax:
+    case kRcNearMax:
+        {
+            _threshold = DETECTOR_THRESHOLD / 2;
+            break;
+        }
+    }
+}
+
+void OverUseDetector::CompensatedTimeDelta(const FrameSample& currentFrame, const FrameSample& prevFrame, WebRtc_Word64& tDelta,
+                                           double& tsDelta, bool wrapped)
+{
+    _numOfDeltas++;
+    if (_numOfDeltas > 1000)
+    {
+        _numOfDeltas = 1000;
+    }
+    // Add wrap-around compensation
+    WebRtc_Word64 wrapCompensation = 0;
+    if (wrapped)
+    {
+        wrapCompensation = static_cast<WebRtc_Word64>(1)<<32;
+    }
+    tsDelta = (currentFrame._timestamp + wrapCompensation - prevFrame._timestamp) / 90.0;
+    tDelta = currentFrame._completeTimeMs - prevFrame._completeTimeMs;
+    assert(tsDelta > 0);
+}
+
+double OverUseDetector::CurrentDrift()
+{
+    return 1.0;
+}
+
+void OverUseDetector::UpdateKalman(WebRtc_Word64 tDelta, double tsDelta, WebRtc_UWord32 frameSize, WebRtc_UWord32 prevFrameSize)
+{
+    const double minFramePeriod = UpdateMinFramePeriod(tsDelta);
+    const double drift = CurrentDrift();
+    // Compensate for drift
+    const double tTsDelta = tDelta - tsDelta / drift;
+    double fsDelta = static_cast<double>(frameSize) - prevFrameSize;
+
+    // Update the Kalman filter
+    const double scaleFactor =  minFramePeriod / (1000.0 / 30.0);
+    _E[0][0] += _processNoise[0] * scaleFactor;
+    _E[1][1] += _processNoise[1] * scaleFactor;
+
+    if ((_hypothesis == kBwOverusing && _offset < _prevOffset) ||
+        (_hypothesis == kBwUnderUsing && _offset > _prevOffset))
+    {
+        _E[1][1] += 10 * _processNoise[1] * scaleFactor;
+    }
+
+    const double h[2] = {fsDelta, 1.0};
+    const double Eh[2] = {_E[0][0]*h[0] + _E[0][1]*h[1],
+                         _E[1][0]*h[0] + _E[1][1]*h[1]};
+
+    const double residual = tTsDelta - _slope*h[0] - _offset;
+
+    const bool stableState = (BWE_MIN(_numOfDeltas, 60) * abs(_offset) < _threshold);
+    // We try to filter out very late frames. For instance periodic key
+    // frames doesn't fit the Gaussian model well.
+    if (abs(residual) < 3 * sqrt(_varNoise))
+    {
+        UpdateNoiseEstimate(residual, minFramePeriod, stableState);
+    }
+    else
+    {
+        UpdateNoiseEstimate(3 * sqrt(_varNoise), minFramePeriod, stableState);
+    }
+
+    const double denom = _varNoise + h[0]*Eh[0] + h[1]*Eh[1];
+
+    const double K[2] = {Eh[0] / denom,
+                        Eh[1] / denom};
+
+    const double IKh[2][2] = {{1.0 - K[0]*h[0], -K[0]*h[1]},
+                             {-K[1]*h[0], 1.0 - K[1]*h[1]}};
+    const double e00 = _E[0][0];
+    const double e01 = _E[0][1];
+
+    // Update state
+    _E[0][0] = e00 * IKh[0][0] + _E[1][0] * IKh[0][1];
+    _E[0][1] = e01 * IKh[0][0] + _E[1][1] * IKh[0][1];
+    _E[1][0] = e00 * IKh[1][0] + _E[1][0] * IKh[1][1];
+    _E[1][1] = e01 * IKh[1][0] + _E[1][1] * IKh[1][1];
+
+    // Covariance matrix, must be positive semi-definite
+    assert(_E[0][0] + _E[1][1] >= 0 &&
+           _E[0][0] * _E[1][1] - _E[0][1] * _E[1][0] >= 0 &&
+           _E[0][0] >= 0);
+
+#ifdef WEBRTC_BWE_MATLAB
+    //_plot4->Append("p11",_E[0][0]);
+    //_plot4->Append("p12",_E[0][1]);
+    _plot4->Append("p22",_E[1][1]);
+    //_plot4->Append("p22_hat", 0.5*(_processNoise[1] +
+    //    sqrt(_processNoise[1]*(_processNoise[1] + 4*_varNoise))));
+    //_plot4->Append("deltaFs", fsDelta);
+    _plot4->Plot();
+#endif
+    _slope = _slope + K[0] * residual;
+    _prevOffset = _offset;
+    _offset = _offset + K[1] * residual;
+
+    Detect(tsDelta);
+
+#ifdef WEBRTC_BWE_MATLAB
+    _plot1->Append("scatter", static_cast<double>(_currentFrame._size) - _prevFrame._size,
+        static_cast<double>(tDelta-tsDelta));
+    _plot1->MakeTrend("scatter", "slope", _slope, _offset, "k-");
+    _plot1->MakeTrend("scatter", "thresholdPos", _slope, _offset + 2 * sqrt(_varNoise), "r-");
+    _plot1->MakeTrend("scatter", "thresholdNeg", _slope, _offset - 2 * sqrt(_varNoise), "r-");
+    _plot1->Plot();
+
+    _plot2->Append("offset", _offset);
+    _plot2->Append("limitPos", _threshold/BWE_MIN(_numOfDeltas, 60));
+    _plot2->Plot();
+
+    _plot3->Append("noiseVar", _varNoise);
+    _plot3->Plot();
+#endif
+}
+
+double OverUseDetector::UpdateMinFramePeriod(double tsDelta) {
+  double minFramePeriod = tsDelta;
+  if (_tsDeltaHist.size() >= MIN_FRAME_PERIOD_HISTORY_LEN) {
+    std::list<double>::iterator firstItem = _tsDeltaHist.begin();
+    _tsDeltaHist.erase(firstItem);
+  }
+  std::list<double>::iterator it = _tsDeltaHist.begin();
+  for (; it != _tsDeltaHist.end(); it++) {
+    minFramePeriod = BWE_MIN(*it, minFramePeriod);
+  }
+  _tsDeltaHist.push_back(tsDelta);
+  return minFramePeriod;
+}
+
+void OverUseDetector::UpdateNoiseEstimate(double residual, double tsDelta, bool stableState)
+{
+    if (!stableState)
+    {
+        return;
+    }
+    // Faster filter during startup to faster adapt to the jitter level of the network
+    // alpha is tuned for 30 frames per second, but
+    double alpha = 0.01;
+    if (_numOfDeltas > 10*30)
+    {
+        alpha = 0.002;
+    }
+    // Only update the noise estimate if we're not over-using
+    // beta is a function of alpha and the time delta since
+    // the previous update.
+    const double beta = pow(1 - alpha, tsDelta * 30.0 / 1000.0);
+    _avgNoise = beta * _avgNoise + (1 - beta) * residual;
+    _varNoise = beta * _varNoise + (1 - beta) * (_avgNoise - residual) * (_avgNoise - residual);
+    if (_varNoise < 1e-7)
+    {
+        _varNoise = 1e-7;
+    }
+}
+
+BandwidthUsage OverUseDetector::Detect(double tsDelta)
+{
+    if (_numOfDeltas < 2)
+    {
+        return kBwNormal;
+    }
+    const double T = BWE_MIN(_numOfDeltas, 60) * _offset;
+    if (abs(T) > _threshold)
+    {
+        if (_offset > 0)
+        {
+            if (_timeOverUsing == -1)
+            {
+                // Initialize the timer. Assume that we've been
+                // over-using half of the time since the previous
+                // sample.
+                _timeOverUsing = tsDelta / 2;
+            }
+            else
+            {
+                // Increment timer
+                _timeOverUsing += tsDelta;
+            }
+            _overUseCounter++;
+            if (_timeOverUsing > OVER_USING_TIME_THRESHOLD && _overUseCounter > 1)
+            {
+                if (_offset >= _prevOffset)
+                {
+#ifdef _DEBUG
+                    if (_hypothesis != kBwOverusing)
+                        WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwOverusing");
+#endif
+                    _timeOverUsing = 0;
+                    _overUseCounter = 0;
+                    _hypothesis = kBwOverusing;
+#ifdef WEBRTC_BWE_MATLAB
+                    _plot2->Append("detection",_offset); // plot it later
+#endif
+                }
+            }
+#ifdef WEBRTC_BWE_MATLAB
+            _plot2->Append("trigger",_offset); // plot it later
+#endif
+        }
+        else
+        {
+#ifdef _DEBUG
+            if (_hypothesis != kBwUnderUsing)
+                    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwUnderUsing");
+#endif
+            _timeOverUsing = -1;
+            _overUseCounter = 0;
+            _hypothesis = kBwUnderUsing;
+        }
+    }
+    else
+    {
+#ifdef _DEBUG
+        if (_hypothesis != kBwNormal)
+                WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwNormal");
+#endif
+        _timeOverUsing = -1;
+        _overUseCounter = 0;
+        _hypothesis = kBwNormal;
+    }
+    return _hypothesis;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/overuse_detector.h b/trunk/src/modules/rtp_rtcp/source/overuse_detector.h
new file mode 100644
index 0000000..b250e01
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/overuse_detector.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
+
+#include <list>
+
+#include "bwe_defines.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+#ifdef WEBRTC_BWE_MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+#endif
+
+namespace webrtc {
+enum RateControlRegion;
+
+class OverUseDetector
+{
+public:
+    OverUseDetector();
+    ~OverUseDetector();
+    bool Update(const WebRtcRTPHeader& rtpHeader,
+                const WebRtc_UWord16 packetSize,
+                const WebRtc_Word64 nowMS);
+    BandwidthUsage State() const;
+    void Reset();
+    double NoiseVar() const;
+    void SetRateControlRegion(RateControlRegion region);
+
+private:
+    struct FrameSample
+    {
+        FrameSample() : _size(0), _completeTimeMs(-1), _timestamp(-1) {}
+
+        WebRtc_UWord32 _size;
+        WebRtc_Word64  _completeTimeMs;
+        WebRtc_Word64  _timestamp;
+    };
+
+    void CompensatedTimeDelta(const FrameSample& currentFrame,
+                              const FrameSample& prevFrame,
+                              WebRtc_Word64& tDelta,
+                              double& tsDelta,
+                              bool wrapped);
+    void UpdateKalman(WebRtc_Word64 tDelta,
+                      double tsDelta,
+                      WebRtc_UWord32 frameSize,
+                      WebRtc_UWord32 prevFrameSize);
+    double UpdateMinFramePeriod(double tsDelta);
+    void UpdateNoiseEstimate(double residual, double tsDelta, bool stableState);
+    BandwidthUsage Detect(double tsDelta);
+    double CurrentDrift();
+
+    bool _firstPacket;
+    FrameSample _currentFrame;
+    FrameSample _prevFrame;
+    WebRtc_UWord16 _numOfDeltas;
+    double _slope;
+    double _offset;
+    double _E[2][2];
+    double _processNoise[2];
+    double _avgNoise;
+    double _varNoise;
+    double _threshold;
+    std::list<double> _tsDeltaHist;
+    double _prevOffset;
+    double _timeOverUsing;
+    WebRtc_UWord16 _overUseCounter;
+    BandwidthUsage _hypothesis;
+
+#ifdef WEBRTC_BWE_MATLAB
+    MatlabPlot* _plot1;
+    MatlabPlot* _plot2;
+    MatlabPlot* _plot3;
+    MatlabPlot* _plot4;
+#endif
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/receiver_fec.cc b/trunk/src/modules/rtp_rtcp/source/receiver_fec.cc
new file mode 100644
index 0000000..77a3465
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/receiver_fec.cc
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/receiver_fec.h"
+
+#include <cassert>
+
+#include "modules/rtp_rtcp/source/rtp_receiver_video.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/trace.h"
+
+// RFC 5109
+namespace webrtc {
+ReceiverFEC::ReceiverFEC(const WebRtc_Word32 id, RTPReceiverVideo* owner)
+    : _id(id),
+      _owner(owner),
+      _fec(new ForwardErrorCorrection(id)),
+      _payloadTypeFEC(-1) {
+}
+
+ReceiverFEC::~ReceiverFEC() {
+  // Clean up DecodeFEC()
+  while (!_receivedPacketList.empty()){
+    ForwardErrorCorrection::ReceivedPacket* receivedPacket =
+        _receivedPacketList.front();
+    delete receivedPacket;
+    _receivedPacketList.pop_front();
+  }
+  assert(_receivedPacketList.empty());
+
+  if (_fec != NULL) {
+    _fec->ResetState(&_recoveredPacketList);
+    delete _fec;
+  }
+}
+
+void ReceiverFEC::SetPayloadTypeFEC(const WebRtc_Word8 payloadType) {
+  _payloadTypeFEC = payloadType;
+}
+
+/*
+    0                   1                    2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3  4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |F|   block PT  |  timestamp offset         |   block length    |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+RFC 2198          RTP Payload for Redundant Audio Data    September 1997
+
+   The bits in the header are specified as follows:
+
+   F: 1 bit First bit in header indicates whether another header block
+       follows.  If 1 further header blocks follow, if 0 this is the
+       last header block.
+       If 0 there is only 1 byte RED header
+
+   block PT: 7 bits RTP payload type for this block.
+
+   timestamp offset:  14 bits Unsigned offset of timestamp of this block
+       relative to timestamp given in RTP header.  The use of an unsigned
+       offset implies that redundant data must be sent after the primary
+       data, and is hence a time to be subtracted from the current
+       timestamp to determine the timestamp of the data for which this
+       block is the redundancy.
+
+   block length:  10 bits Length in bytes of the corresponding data
+       block excluding header.
+*/
+
+WebRtc_Word32 ReceiverFEC::AddReceivedFECPacket(
+    const WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* incomingRtpPacket,
+    const WebRtc_UWord16 payloadDataLength,
+    bool& FECpacket) {
+  if (_payloadTypeFEC == -1) {
+    return -1;
+  }
+
+  WebRtc_UWord8 REDHeaderLength = 1;
+
+  // Add to list without RED header, aka a virtual RTP packet
+  // we remove the RED header
+
+  ForwardErrorCorrection::ReceivedPacket* receivedPacket =
+      new ForwardErrorCorrection::ReceivedPacket;
+  receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+  // get payload type from RED header
+  WebRtc_UWord8 payloadType =
+      incomingRtpPacket[rtpHeader->header.headerLength] & 0x7f;
+
+  // use the payloadType to decide if it's FEC or coded data
+  if (_payloadTypeFEC == payloadType) {
+    receivedPacket->isFec = true;
+    FECpacket = true;
+  } else {
+    receivedPacket->isFec = false;
+    FECpacket = false;
+  }
+  receivedPacket->seqNum = rtpHeader->header.sequenceNumber;
+
+  WebRtc_UWord16 blockLength = 0;
+  if(incomingRtpPacket[rtpHeader->header.headerLength] & 0x80) {
+    // f bit set in RED header
+    REDHeaderLength = 4;
+    WebRtc_UWord16 timestampOffset =
+        (incomingRtpPacket[rtpHeader->header.headerLength + 1]) << 8;
+    timestampOffset += incomingRtpPacket[rtpHeader->header.headerLength+2];
+    timestampOffset = timestampOffset >> 2;
+    if(timestampOffset != 0) {
+      // |timestampOffset| should be 0. However, it's possible this is the first
+      // location a corrupt payload can be caught, so don't assert.
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "Corrupt payload found in %s", __FUNCTION__);
+      delete receivedPacket;
+      return -1;
+    }
+
+    blockLength =
+        (0x03 & incomingRtpPacket[rtpHeader->header.headerLength + 2]) << 8;
+    blockLength += (incomingRtpPacket[rtpHeader->header.headerLength + 3]);
+
+    // check next RED header
+    if(incomingRtpPacket[rtpHeader->header.headerLength+4] & 0x80) {
+      // more than 2 blocks in packet not supported
+      delete receivedPacket;
+      assert(false);
+      return -1;
+    }
+    if(blockLength > payloadDataLength - REDHeaderLength) {
+      // block length longer than packet
+      delete receivedPacket;
+      assert(false);
+      return -1;
+    }
+  }
+
+  ForwardErrorCorrection::ReceivedPacket* secondReceivedPacket = NULL;
+  if (blockLength > 0) {
+    // handle block length, split into 2 packets
+    REDHeaderLength = 5;
+
+    // copy the RTP header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket,
+           rtpHeader->header.headerLength);
+
+    // replace the RED payload type
+    receivedPacket->pkt->data[1] &= 0x80;         // reset the payload
+    receivedPacket->pkt->data[1] += payloadType;  // set the media payload type
+
+    // copy the payload data
+    memcpy(receivedPacket->pkt->data + rtpHeader->header.headerLength,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           blockLength);
+
+    receivedPacket->pkt->length = blockLength;
+
+    secondReceivedPacket = new ForwardErrorCorrection::ReceivedPacket;
+    secondReceivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+    secondReceivedPacket->isFec = true;
+    secondReceivedPacket->seqNum = rtpHeader->header.sequenceNumber;
+
+    // copy the FEC payload data
+    memcpy(secondReceivedPacket->pkt->data,
+           incomingRtpPacket + rtpHeader->header.headerLength +
+               REDHeaderLength + blockLength,
+           payloadDataLength - REDHeaderLength - blockLength);
+
+    secondReceivedPacket->pkt->length = payloadDataLength - REDHeaderLength -
+        blockLength;
+
+  } else if(receivedPacket->isFec) {
+    // everything behind the RED header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           payloadDataLength - REDHeaderLength);
+    receivedPacket->pkt->length = payloadDataLength - REDHeaderLength;
+    receivedPacket->ssrc =
+        ModuleRTPUtility::BufferToUWord32(&incomingRtpPacket[8]);
+
+  } else {
+    // copy the RTP header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket,
+           rtpHeader->header.headerLength);
+
+    // replace the RED payload type
+    receivedPacket->pkt->data[1] &= 0x80;         // reset the payload
+    receivedPacket->pkt->data[1] += payloadType;  // set the media payload type
+
+    // copy the media payload data
+    memcpy(receivedPacket->pkt->data + rtpHeader->header.headerLength,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           payloadDataLength - REDHeaderLength);
+
+    receivedPacket->pkt->length = rtpHeader->header.headerLength +
+        payloadDataLength - REDHeaderLength;
+  }
+
+  if(receivedPacket->pkt->length == 0) {
+    delete secondReceivedPacket;
+    delete receivedPacket;
+    return 0;
+  }
+
+  _receivedPacketList.push_back(receivedPacket);
+  if (secondReceivedPacket) {
+    _receivedPacketList.push_back(secondReceivedPacket);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ReceiverFEC::ProcessReceivedFEC() {
+  if (!_receivedPacketList.empty()) {
+    if (_fec->DecodeFEC(&_receivedPacketList, &_recoveredPacketList) != 0) {
+      return -1;
+    }
+    assert(_receivedPacketList.empty());
+  }
+  ForwardErrorCorrection::RecoveredPacketList::iterator it =
+      _recoveredPacketList.begin();
+  for (; it != _recoveredPacketList.end(); ++it) {
+    if ((*it)->returned)  // Already sent to the VCM and the jitter buffer.
+      continue;
+    if (ParseAndReceivePacket((*it)->pkt) != 0) {
+      return -1;
+    }
+    (*it)->returned = true;
+  }
+  return 0;
+}
+
+int ReceiverFEC::ParseAndReceivePacket(
+    const ForwardErrorCorrection::Packet* packet) {
+  WebRtcRTPHeader header;
+  memset(&header, 0, sizeof(header));
+  ModuleRTPUtility::RTPHeaderParser parser(packet->data,
+                                           packet->length);
+  if (!parser.Parse(header)) {
+    return -1;
+  }
+  if (_owner->ReceiveRecoveredPacketCallback(
+      &header,
+      &packet->data[header.header.headerLength],
+      packet->length - header.header.headerLength) != 0) {
+    return -1;
+  }
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/receiver_fec.h b/trunk/src/modules/rtp_rtcp/source/receiver_fec.h
new file mode 100644
index 0000000..63aaa72
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/receiver_fec.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
+
+#include "rtp_rtcp_defines.h"
+// This header is included to get the nested declaration of Packet structure.
+#include "forward_error_correction.h"
+
+#include "typedefs.h"
+
+namespace webrtc {
+class RTPReceiverVideo;
+
+class ReceiverFEC
+{
+public:
+    ReceiverFEC(const WebRtc_Word32 id, RTPReceiverVideo* owner);
+    virtual ~ReceiverFEC();
+
+    WebRtc_Word32 AddReceivedFECPacket(const WebRtcRTPHeader* rtpHeader,
+                                       const WebRtc_UWord8* incomingRtpPacket,
+                                       const WebRtc_UWord16 payloadDataLength,
+                                       bool& FECpacket);
+
+    WebRtc_Word32 ProcessReceivedFEC();
+
+    void SetPayloadTypeFEC(const WebRtc_Word8 payloadType);
+
+private:
+    int ParseAndReceivePacket(const ForwardErrorCorrection::Packet* packet);
+
+    int _id;
+    RTPReceiverVideo* _owner;
+    ForwardErrorCorrection* _fec;
+    // TODO(holmer): In the current version _receivedPacketList is never more
+    // than one packet, since we process FEC every time a new packet
+    // arrives. We should remove the list.
+    ForwardErrorCorrection::ReceivedPacketList _receivedPacketList;
+    ForwardErrorCorrection::RecoveredPacketList _recoveredPacketList;
+    WebRtc_Word8 _payloadTypeFEC;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc b/trunk/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc
new file mode 100644
index 0000000..669018e
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc
@@ -0,0 +1,523 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include <list>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h"
+#include "modules/rtp_rtcp/source/receiver_fec.h"
+
+using ::testing::_;
+using ::testing::Args;
+using ::testing::ElementsAreArray;
+using ::testing::InSequence;
+
+namespace webrtc {
+
+typedef ForwardErrorCorrection::Packet Packet;
+
+enum { kRtpHeaderSize = 12 };
+enum { kFecPayloadType = 96 };
+enum { kRedPayloadType = 97 };
+enum { kVp8PayloadType = 120 };
+
+struct RtpPacket : public Packet {
+  WebRtcRTPHeader header;
+};
+
+class FrameGenerator {
+ public:
+  FrameGenerator() : num_packets_(0), seq_num_(0), timestamp_(0) {}
+
+  void NewFrame(int num_packets) {
+    num_packets_ = num_packets;
+    timestamp_ += 3000;
+  }
+
+  RtpPacket* NextPacket(int offset, size_t length) {
+    RtpPacket* rtp_packet = new RtpPacket;
+    for (size_t i = 0; i < length; ++i)
+      rtp_packet->data[i] = offset + i;
+    rtp_packet->length = length;
+    memset(&rtp_packet->header, 0, sizeof(WebRtcRTPHeader));
+    rtp_packet->header.frameType = kVideoFrameDelta;
+    rtp_packet->header.header.headerLength = kRtpHeaderSize;
+    rtp_packet->header.header.markerBit = (num_packets_ == 1);
+    rtp_packet->header.header.sequenceNumber = seq_num_;
+    rtp_packet->header.header.timestamp = timestamp_;
+    rtp_packet->header.header.payloadType = kVp8PayloadType;
+    BuildRtpHeader(rtp_packet->data, rtp_packet->header.header);
+    ++seq_num_;
+    --num_packets_;
+    return rtp_packet;
+  }
+
+  // Creates a new RtpPacket with the RED header added to the packet.
+  RtpPacket* BuildMediaRedPacket(const RtpPacket* packet) {
+    const int kHeaderLength = packet->header.header.headerLength;
+    RtpPacket* red_packet = new RtpPacket;
+    red_packet->header = packet->header;
+    red_packet->length = packet->length + 1;  // 1 byte RED header.
+    memset(red_packet->data, 0, red_packet->length);
+    // Copy RTP header.
+    memcpy(red_packet->data, packet->data, kHeaderLength);
+    SetRedHeader(red_packet, red_packet->data[1] & 0x7f, kHeaderLength);
+    memcpy(red_packet->data + kHeaderLength + 1, packet->data + kHeaderLength,
+           packet->length - kHeaderLength);
+    return red_packet;
+  }
+
+  // Creates a new RtpPacket with FEC payload and red header. Does this by
+  // creating a new fake media RtpPacket, clears the marker bit and adds a RED
+  // header. Finally replaces the payload with the content of |packet->data|.
+  RtpPacket* BuildFecRedPacket(const Packet* packet) {
+    // Create a fake media packet to get a correct header. 1 byte RED header.
+    ++num_packets_;
+    RtpPacket* red_packet = NextPacket(0, packet->length + 1);
+    red_packet->data[1] &= ~0x80;  // Clear marker bit.
+    const int kHeaderLength = red_packet->header.header.headerLength;
+    SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
+    memcpy(red_packet->data + kHeaderLength + 1, packet->data,
+           packet->length);
+    red_packet->length = kHeaderLength + 1 + packet->length;
+    return red_packet;
+  }
+
+  void SetRedHeader(Packet* red_packet, uint8_t payload_type,
+                    int header_length) const {
+    // Replace pltype.
+    red_packet->data[1] &= 0x80;  // Reset.
+    red_packet->data[1] += kRedPayloadType;  // Replace.
+
+    // Add RED header, f-bit always 0.
+    red_packet->data[header_length] = payload_type;
+  }
+
+ private:
+  void BuildRtpHeader(uint8_t* data, RTPHeader header) {
+    data[0] = 0x80;  // Version 2.
+    data[1] = header.payloadType;
+    data[1] |= (header.markerBit ? kRtpMarkerBitMask : 0);
+    ModuleRTPUtility::AssignUWord16ToBuffer(data+2, header.sequenceNumber);
+    ModuleRTPUtility::AssignUWord32ToBuffer(data+4, header.timestamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(data+8, header.ssrc);
+  }
+
+  int num_packets_;
+  uint16_t seq_num_;
+  uint32_t timestamp_;
+};
+
+class ReceiverFecTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    fec_ = new ForwardErrorCorrection(0);
+    receiver_fec_ = new ReceiverFEC(0, &rtp_receiver_video_);
+    generator_ = new FrameGenerator();
+    receiver_fec_->SetPayloadTypeFEC(kFecPayloadType);
+  }
+
+  virtual void TearDown() {
+    delete fec_;
+    delete receiver_fec_;
+    delete generator_;
+  }
+
+  void GenerateAndAddFrames(int num_frames,
+                            int num_packets_per_frame,
+                            std::list<RtpPacket*>* media_rtp_packets,
+                            std::list<Packet*>* media_packets) {
+    for (int i = 0; i < num_frames; ++i) {
+      GenerateFrame(num_packets_per_frame, i, media_rtp_packets,
+                    media_packets);
+    }
+    for (std::list<RtpPacket*>::iterator it = media_rtp_packets->begin();
+        it != media_rtp_packets->end(); ++it) {
+      BuildAndAddRedMediaPacket(*it);
+    }
+  }
+
+  void GenerateFEC(std::list<Packet*>* media_packets,
+                   std::list<Packet*>* fec_packets,
+                   unsigned int num_fec_packets) {
+    EXPECT_EQ(0, fec_->GenerateFEC(
+        *media_packets,
+        num_fec_packets * 255 / media_packets->size(),
+        0,
+        false,
+        fec_packets));
+    ASSERT_EQ(num_fec_packets, fec_packets->size());
+  }
+
+  void GenerateFrame(int num_media_packets,
+                     int frame_offset,
+                     std::list<RtpPacket*>* media_rtp_packets,
+                     std::list<Packet*>* media_packets) {
+    generator_->NewFrame(num_media_packets);
+    for (int i = 0; i < num_media_packets; ++i) {
+      media_rtp_packets->push_back(generator_->NextPacket(frame_offset + i,
+                                                          kRtpHeaderSize + 10));
+      media_packets->push_back(media_rtp_packets->back());
+    }
+  }
+
+  void VerifyReconstructedMediaPacket(const RtpPacket* packet, int times) {
+    // Verify that the content of the reconstructed packet is equal to the
+    // content of |packet|, and that the same content is received |times| number
+    // of times in a row.
+    EXPECT_CALL(rtp_receiver_video_,
+                ReceiveRecoveredPacketCallback(_, _,
+                                               packet->length - kRtpHeaderSize))
+        .With(Args<1, 2>(ElementsAreArray(packet->data + kRtpHeaderSize,
+                                          packet->length - kRtpHeaderSize)))
+        .Times(times);
+  }
+
+  void BuildAndAddRedMediaPacket(RtpPacket* packet) {
+    RtpPacket* red_packet = generator_->BuildMediaRedPacket(packet);
+    bool is_fec = false;
+    EXPECT_EQ(0, receiver_fec_->AddReceivedFECPacket(&red_packet->header,
+                                                     red_packet->data,
+                                                     red_packet->length -
+                                                     kRtpHeaderSize,
+                                                     is_fec));
+    delete red_packet;
+    EXPECT_FALSE(is_fec);
+  }
+
+  void BuildAndAddRedFecPacket(Packet* packet) {
+    RtpPacket* red_packet = generator_->BuildFecRedPacket(packet);
+    bool is_fec = false;
+    EXPECT_EQ(0, receiver_fec_->AddReceivedFECPacket(&red_packet->header,
+                                                     red_packet->data,
+                                                     red_packet->length -
+                                                     kRtpHeaderSize,
+                                                     is_fec));
+    delete red_packet;
+    EXPECT_TRUE(is_fec);
+  }
+
+  ForwardErrorCorrection* fec_;
+  MockRTPReceiverVideo rtp_receiver_video_;
+  ReceiverFEC* receiver_fec_;
+  FrameGenerator* generator_;
+};
+
+void DeletePackets(std::list<Packet*>* packets) {
+  while (!packets->empty()) {
+    delete packets->front();
+    packets->pop_front();
+  }
+}
+
+TEST_F(ReceiverFecTest, TwoMediaOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(2, 0, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator media_it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(*media_it);
+  // Drop one media packet.
+  std::list<Packet*>::iterator fec_it = fec_packets.begin();
+  BuildAndAddRedFecPacket(*fec_it);
+  {
+    InSequence s;
+    std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+    VerifyReconstructedMediaPacket(*it, 1);
+    ++it;
+    VerifyReconstructedMediaPacket(*it, 1);
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TwoMediaTwoFec) {
+  const unsigned int kNumFecPackets = 2u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(2, 0, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  // Drop both media packets.
+  std::list<Packet*>::iterator fec_it = fec_packets.begin();
+  BuildAndAddRedFecPacket(*fec_it);
+  ++fec_it;
+  BuildAndAddRedFecPacket(*fec_it);
+  {
+    InSequence s;
+    std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+    VerifyReconstructedMediaPacket(*it, 1);
+    ++it;
+    VerifyReconstructedMediaPacket(*it, 1);
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TwoFramesOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
+  GenerateFrame(1, 1, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  BuildAndAddRedMediaPacket(media_rtp_packets.front());
+  // Drop one media packet.
+  BuildAndAddRedFecPacket(fec_packets.front());
+  {
+    InSequence s;
+    std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+    VerifyReconstructedMediaPacket(*it, 1);
+    ++it;
+    VerifyReconstructedMediaPacket(*it, 1);
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, OneCompleteOneUnrecoverableFrame) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
+  GenerateFrame(2, 1, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(*it);  // First frame
+  BuildAndAddRedMediaPacket(*it);  // First packet of second frame.
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, MaxFramesOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  const unsigned int kNumMediaPackets = 48u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets; ++i)
+    GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  ++it;  // Drop first packet.
+  for (; it != media_rtp_packets.end(); ++it)
+    BuildAndAddRedMediaPacket(*it);
+  BuildAndAddRedFecPacket(fec_packets.front());
+  {
+    InSequence s;
+    std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+    for (; it != media_rtp_packets.end(); ++it)
+      VerifyReconstructedMediaPacket(*it, 1);
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TooManyFrames) {
+  const unsigned int kNumFecPackets = 1u;
+  const unsigned int kNumMediaPackets = 49u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets; ++i)
+    GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packets,
+                                  kNumFecPackets * 255 / kNumMediaPackets,
+                                  0,
+                                  false,
+                                  &fec_packets));
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
+  // 1 frame with 2 media packets and one FEC packet. One media packet missing.
+  // Delay the FEC packet.
+  Packet* delayed_fec = NULL;
+  const unsigned int kNumFecPacketsBatch1 = 1u;
+  const unsigned int kNumMediaPacketsBatch1 = 2u;
+  std::list<RtpPacket*> media_rtp_packets_batch1;
+  std::list<Packet*> media_packets_batch1;
+  GenerateFrame(kNumMediaPacketsBatch1, 0, &media_rtp_packets_batch1,
+                &media_packets_batch1);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
+
+  BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  delayed_fec = fec_packets.front();
+
+  // Fill the FEC decoder. No packets should be dropped.
+  const unsigned int kNumMediaPacketsBatch2 = 47u;
+  std::list<RtpPacket*> media_rtp_packets_batch2;
+  std::list<Packet*> media_packets_batch2;
+  GenerateAndAddFrames(kNumMediaPacketsBatch2, 1, &media_rtp_packets_batch2,
+                       &media_packets_batch2);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(media_packets_batch2.size());
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  // Add the delayed FEC packet. One packet should be reconstructed.
+  BuildAndAddRedFecPacket(delayed_fec);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets_batch1);
+  DeletePackets(&media_packets_batch2);
+}
+
+TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
+  // 1 frame with 2 media packets and one FEC packet. One media packet missing.
+  // Delay the FEC packet.
+  Packet* delayed_fec = NULL;
+  const unsigned int kNumFecPacketsBatch1 = 1u;
+  const unsigned int kNumMediaPacketsBatch1 = 2u;
+  std::list<RtpPacket*> media_rtp_packets_batch1;
+  std::list<Packet*> media_packets_batch1;
+  GenerateFrame(kNumMediaPacketsBatch1, 0, &media_rtp_packets_batch1,
+                &media_packets_batch1);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
+
+  BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  delayed_fec = fec_packets.front();
+
+  // Fill the FEC decoder and force the last packet to be dropped.
+  const unsigned int kNumMediaPacketsBatch2 = 48u;
+  std::list<RtpPacket*> media_rtp_packets_batch2;
+  std::list<Packet*> media_packets_batch2;
+  GenerateAndAddFrames(kNumMediaPacketsBatch2, 1, &media_rtp_packets_batch2,
+                       &media_packets_batch2);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(media_packets_batch2.size());
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  // Add the delayed FEC packet. No packet should be reconstructed since the
+  // first media packet of that frame has been dropped due to being too old.
+  BuildAndAddRedFecPacket(delayed_fec);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(0);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets_batch1);
+  DeletePackets(&media_packets_batch2);
+}
+
+TEST_F(ReceiverFecTest, OldFecPacketDropped) {
+  // 49 frames with 2 media packets and one FEC packet. All media packets
+  // missing.
+  const unsigned int kNumMediaPackets = 49 * 2;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets / 2; ++i) {
+    std::list<RtpPacket*> frame_media_rtp_packets;
+    std::list<Packet*> frame_media_packets;
+    std::list<Packet*> fec_packets;
+    GenerateFrame(2, 0, &frame_media_rtp_packets, &frame_media_packets);
+    GenerateFEC(&frame_media_packets, &fec_packets, 1);
+    for (std::list<Packet*>::iterator it = fec_packets.begin();
+        it != fec_packets.end(); ++it) {
+      BuildAndAddRedFecPacket(*it);
+    }
+    media_packets.insert(media_packets.end(),
+                         frame_media_packets.begin(),
+                         frame_media_packets.end());
+    media_rtp_packets.insert(media_rtp_packets.end(),
+                             frame_media_rtp_packets.begin(),
+                             frame_media_rtp_packets.end());
+  }
+  // Don't insert any media packets.
+  // Only FEC packets inserted. No packets should be recoverable at this time.
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+        .Times(0);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  // Insert the oldest media packet. The corresponding FEC packet is too old
+  // and should've been dropped. Only the media packet we inserted will be
+  // returned.
+  BuildAndAddRedMediaPacket(media_rtp_packets.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+        .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, PacketsOnlyReturnedOnce) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
+  GenerateFrame(2, 1, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator media_it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(*media_it);  // First frame.
+  {
+    std::list<RtpPacket*>::iterator verify_it = media_rtp_packets.begin();
+    VerifyReconstructedMediaPacket(*verify_it, 1);  // First frame
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  ++media_it;
+  BuildAndAddRedMediaPacket(*media_it);  // 1st packet of 2nd frame.
+  BuildAndAddRedFecPacket(fec_packets.front());  // Insert FEC packet.
+  {
+    InSequence s;
+    std::list<RtpPacket*>::iterator verify_it = media_rtp_packets.begin();
+    ++verify_it;  // First frame has already been returned.
+    VerifyReconstructedMediaPacket(*verify_it, 1);  // 1st packet of 2nd frame.
+    ++verify_it;
+    VerifyReconstructedMediaPacket(*verify_it, 1);  // 2nd packet of 2nd frame.
+  }
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  ++media_it;
+  BuildAndAddRedMediaPacket(*media_it);  // 2nd packet of 2nd frame.
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(0);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/remote_rate_control.cc b/trunk/src/modules/rtp_rtcp/source/remote_rate_control.cc
new file mode 100644
index 0000000..fe8477c
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/remote_rate_control.cc
@@ -0,0 +1,483 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if _WIN32
+#include <windows.h>
+#endif
+
+#include "remote_rate_control.h"
+#include "trace.h"
+#include <math.h>
+#include <string.h>
+
+#ifdef MATLAB
+extern MatlabEngine eng; // global variable defined elsewhere
+#endif
+
+namespace webrtc {
+RemoteRateControl::RemoteRateControl()
+:
+_minConfiguredBitRate(30000),
+_maxConfiguredBitRate(30000000),
+_currentBitRate(_maxConfiguredBitRate),
+_maxHoldRate(0),
+_avgMaxBitRate(-1.0f),
+_varMaxBitRate(0.4f),
+_rcState(kRcHold),
+_cameFromState(kRcDecrease),
+_rcRegion(kRcMaxUnknown),
+_lastBitRateChange(-1),
+_currentInput(kBwNormal, 0, 1.0),
+_updated(false),
+_timeFirstIncomingEstimate(-1),
+_initializedBitRate(false),
+_avgChangePeriod(1000.0f),
+_lastChangeMs(-1),
+_beta(0.9f)
+#ifdef MATLAB
+,_plot1(NULL),
+_plot2(NULL)
+#endif
+{
+}
+
+RemoteRateControl::~RemoteRateControl()
+{
+#ifdef MATLAB
+    eng.DeletePlot(_plot1);
+    eng.DeletePlot(_plot2);
+#endif
+}
+
+void RemoteRateControl::Reset()
+{
+    _minConfiguredBitRate = 30000;
+    _maxConfiguredBitRate = 30000000;
+    _currentBitRate = _maxConfiguredBitRate;
+    _maxHoldRate = 0;
+    _avgMaxBitRate = -1.0f;
+    _varMaxBitRate = 0.4f;
+    _rcState = kRcHold;
+    _cameFromState = kRcHold;
+    _rcRegion = kRcMaxUnknown;
+    _lastBitRateChange = -1;
+    _avgChangePeriod = 1000.0f;
+    _lastChangeMs = -1;
+    _beta = 0.9f;
+    _currentInput._bwState = kBwNormal;
+    _currentInput._incomingBitRate = 0;
+    _currentInput._noiseVar = 1.0;
+    _updated = false;
+    _timeFirstIncomingEstimate = -1;
+    _initializedBitRate = false;
+}
+
+bool RemoteRateControl::ValidEstimate() const {
+  return _initializedBitRate;
+}
+
+WebRtc_Word32 RemoteRateControl::SetConfiguredBitRates(WebRtc_UWord32 minBitRateBps, WebRtc_UWord32 maxBitRateBps)
+{
+    if (minBitRateBps > maxBitRateBps)
+    {
+        return -1;
+    }
+    _minConfiguredBitRate = minBitRateBps;
+    _maxConfiguredBitRate = maxBitRateBps;
+    _currentBitRate = BWE_MIN(BWE_MAX(minBitRateBps, _currentBitRate), maxBitRateBps);
+    return 0;
+}
+
+WebRtc_UWord32 RemoteRateControl::LatestEstimate() const {
+  return _currentBitRate;
+}
+
+WebRtc_UWord32 RemoteRateControl::UpdateBandwidthEstimate(WebRtc_UWord32 RTT,
+                                                          WebRtc_Word64 nowMS)
+{
+    _currentBitRate = ChangeBitRate(_currentBitRate, _currentInput._incomingBitRate,
+        _currentInput._noiseVar, RTT, nowMS);
+    return _currentBitRate;
+}
+
+RateControlRegion RemoteRateControl::Update(const RateControlInput& input,
+                                            bool& firstOverUse,
+                                            WebRtc_Word64 nowMS)
+{
+#ifdef MATLAB
+    // Create plots
+    if (_plot1 == NULL)
+    {
+        _plot1 = eng.NewPlot(new MatlabPlot());
+
+        _plot1->AddTimeLine(30, "b", "current");
+        _plot1->AddTimeLine(30, "r-", "avgMax");
+        _plot1->AddTimeLine(30, "r--", "pStdMax");
+        _plot1->AddTimeLine(30, "r--", "nStdMax");
+        _plot1->AddTimeLine(30, "r+", "max");
+        _plot1->AddTimeLine(30, "g", "incoming");
+        _plot1->AddTimeLine(30, "b+", "recovery");
+    }
+    if (_plot2 == NULL)
+    {
+        _plot2 = eng.NewPlot(new MatlabPlot());
+
+        _plot2->AddTimeLine(30, "b", "alpha");
+    }
+#endif
+
+    firstOverUse = (_currentInput._bwState != kBwOverusing &&
+                   input._bwState == kBwOverusing);
+
+    // Set the initial bit rate value to what we're receiving the first second
+    if (!_initializedBitRate)
+    {
+        if (_timeFirstIncomingEstimate < 0)
+        {
+            if (input._incomingBitRate > 0)
+            {
+                _timeFirstIncomingEstimate = nowMS;
+            }
+        }
+        else if (nowMS - _timeFirstIncomingEstimate > 1000 &&
+            input._incomingBitRate > 0)
+        {
+            _currentBitRate = input._incomingBitRate;
+            _initializedBitRate = true;
+        }
+    }
+
+    if (_updated && _currentInput._bwState == kBwOverusing)
+    {
+        // Only update delay factor and incoming bit rate. We always want to react on an over-use.
+        _currentInput._noiseVar = input._noiseVar;
+        _currentInput._incomingBitRate = input._incomingBitRate;
+        return _rcRegion;
+    }
+    _updated = true;
+    _currentInput = input;
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Incoming rate = %u kbps", input._incomingBitRate/1000);
+    return _rcRegion;
+}
+
+WebRtc_UWord32 RemoteRateControl::ChangeBitRate(WebRtc_UWord32 currentBitRate,
+                                                WebRtc_UWord32 incomingBitRate,
+                                                double noiseVar,
+                                                WebRtc_UWord32 RTT,
+                                                WebRtc_Word64 nowMS)
+{
+    if (!_updated)
+    {
+        return _currentBitRate;
+    }
+    _updated = false;
+    UpdateChangePeriod(nowMS);
+    ChangeState(_currentInput, nowMS);
+    // calculated here because it's used in multiple places
+    const float incomingBitRateKbps = incomingBitRate / 1000.0f;
+    // Calculate the max bit rate std dev given the normalized
+    // variance and the current incoming bit rate.
+    const float stdMaxBitRate = sqrt(_varMaxBitRate * _avgMaxBitRate);
+    bool recovery = false;
+    switch (_rcState)
+    {
+    case kRcHold:
+        {
+            _maxHoldRate = BWE_MAX(_maxHoldRate, incomingBitRate);
+            break;
+        }
+    case kRcIncrease:
+        {
+            if (_avgMaxBitRate >= 0)
+            {
+                if (incomingBitRateKbps > _avgMaxBitRate + 3 * stdMaxBitRate)
+                {
+                    ChangeRegion(kRcMaxUnknown);
+                    _avgMaxBitRate = -1.0;
+                }
+                else if (incomingBitRateKbps > _avgMaxBitRate + 2.5 * stdMaxBitRate)
+                {
+                    ChangeRegion(kRcAboveMax);
+                }
+            }
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                         "BWE: Response time: %f + %i + 10*33\n",
+                         _avgChangePeriod, RTT);
+            const WebRtc_UWord32 responseTime = static_cast<WebRtc_UWord32>(_avgChangePeriod + 0.5f) + RTT + 300;
+            double alpha = RateIncreaseFactor(nowMS, _lastBitRateChange,
+                                              responseTime, noiseVar);
+
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                "BWE: _avgChangePeriod = %f ms; RTT = %u ms", _avgChangePeriod, RTT);
+
+            currentBitRate = static_cast<WebRtc_UWord32>(currentBitRate * alpha) + 1000;
+            if (_maxHoldRate > 0 && _beta * _maxHoldRate > currentBitRate)
+            {
+                currentBitRate = static_cast<WebRtc_UWord32>(_beta * _maxHoldRate);
+                _avgMaxBitRate = _beta * _maxHoldRate / 1000.0f;
+                ChangeRegion(kRcNearMax);
+                recovery = true;
+#ifdef MATLAB
+                _plot1->Append("recovery", _maxHoldRate/1000);
+#endif
+            }
+            _maxHoldRate = 0;
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                "BWE: Increase rate to currentBitRate = %u kbps", currentBitRate/1000);
+            _lastBitRateChange = nowMS;
+            break;
+        }
+    case kRcDecrease:
+        {
+            if (incomingBitRate < _minConfiguredBitRate)
+            {
+                currentBitRate = _minConfiguredBitRate;
+            }
+            else
+            {
+                // Set bit rate to something slightly lower than max
+                // to get rid of any self-induced delay.
+                currentBitRate = static_cast<WebRtc_UWord32>(_beta * incomingBitRate + 0.5);
+                if (currentBitRate > _currentBitRate)
+                {
+                    // Avoid increasing the rate when over-using.
+                    if (_rcRegion != kRcMaxUnknown)
+                    {
+                        currentBitRate = static_cast<WebRtc_UWord32>(_beta * _avgMaxBitRate * 1000 + 0.5f);
+                    }
+                    currentBitRate = BWE_MIN(currentBitRate, _currentBitRate);
+                }
+                ChangeRegion(kRcNearMax);
+
+                if (incomingBitRateKbps < _avgMaxBitRate - 3 * stdMaxBitRate)
+                {
+                    _avgMaxBitRate = -1.0f;
+                }
+
+                UpdateMaxBitRateEstimate(incomingBitRateKbps);
+
+#ifdef MATLAB
+                _plot1->Append("max", incomingBitRateKbps);
+#endif
+
+                WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Decrease rate to currentBitRate = %u kbps", currentBitRate/1000);
+            }
+            // Stay on hold until the pipes are cleared.
+            ChangeState(kRcHold);
+            _lastBitRateChange = nowMS;
+            break;
+        }
+    }
+    if (!recovery && (incomingBitRate > 100000 || currentBitRate > 150000) &&
+        currentBitRate > 1.5 * incomingBitRate)
+    {
+        // Allow changing the bit rate if we are operating at very low rates
+        // Don't change the bit rate if the send side is too far off
+        currentBitRate = _currentBitRate;
+        _lastBitRateChange = nowMS;
+    }
+#ifdef MATLAB
+    if (_avgMaxBitRate >= 0.0f)
+    {
+        _plot1->Append("avgMax", _avgMaxBitRate);
+        _plot1->Append("pStdMax", _avgMaxBitRate + 3*stdMaxBitRate);
+        _plot1->Append("nStdMax", _avgMaxBitRate - 3*stdMaxBitRate);
+    }
+    _plot1->Append("incoming", incomingBitRate/1000);
+    _plot1->Append("current", currentBitRate/1000);
+    _plot1->Plot();
+#endif
+    return currentBitRate;
+}
+
+double RemoteRateControl::RateIncreaseFactor(WebRtc_Word64 nowMs, WebRtc_Word64 lastMs, WebRtc_UWord32 reactionTimeMs, double noiseVar) const
+{
+    // alpha = 1.02 + B ./ (1 + exp(b*(tr - (c1*s2 + c2))))
+    // Parameters
+    const double B = 0.0407;
+    const double b = 0.0025;
+    const double c1 = -6700.0 / (33 * 33);
+    const double c2 = 800.0;
+    const double d = 0.85;
+
+    double alpha = 1.005 + B / (1 + exp( b * (d * reactionTimeMs - (c1 * noiseVar + c2))));
+
+    if (alpha < 1.005)
+    {
+        alpha = 1.005;
+    }
+    else if (alpha > 1.3)
+    {
+        alpha = 1.3;
+    }
+
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "BWE: alpha = %f", alpha);
+#ifdef MATLAB
+            _plot2->Append("alpha", alpha);
+            _plot2->Plot();
+#endif
+
+    if (lastMs > -1)
+    {
+        alpha = pow(alpha, (nowMs - lastMs) / 1000.0);
+    }
+
+    if (_rcRegion == kRcNearMax)
+    {
+        // We're close to our previous maximum. Try to stabilize the
+        // bit rate in this region, by increasing in smaller steps.
+        alpha = alpha - (alpha - 1.0) / 2.0;
+    }
+    else if (_rcRegion == kRcMaxUnknown)
+    {
+        alpha = alpha + (alpha - 1.0) * 2.0;
+    }
+
+    return alpha;
+}
+
+void RemoteRateControl::UpdateChangePeriod(WebRtc_Word64 nowMs)
+{
+    WebRtc_Word64 changePeriod = 0;
+    if (_lastChangeMs > -1)
+    {
+        changePeriod = nowMs - _lastChangeMs;
+    }
+    _lastChangeMs = nowMs;
+    _avgChangePeriod = 0.9f * _avgChangePeriod + 0.1f * changePeriod;
+}
+
+void RemoteRateControl::UpdateMaxBitRateEstimate(float incomingBitRateKbps)
+{
+    const float alpha = 0.05f;
+    if (_avgMaxBitRate == -1.0f)
+    {
+        _avgMaxBitRate = incomingBitRateKbps;
+    }
+    else
+    {
+        _avgMaxBitRate = (1 - alpha) * _avgMaxBitRate +
+                            alpha * incomingBitRateKbps;
+    }
+    // Estimate the max bit rate variance and normalize the variance
+    // with the average max bit rate.
+    const float norm = BWE_MAX(_avgMaxBitRate, 1.0f);
+    _varMaxBitRate = (1 - alpha) * _varMaxBitRate +
+               alpha * (_avgMaxBitRate - incomingBitRateKbps) *
+                       (_avgMaxBitRate - incomingBitRateKbps) /
+                       norm;
+    // 0.4 ~= 14 kbit/s at 500 kbit/s
+    if (_varMaxBitRate < 0.4f)
+    {
+        _varMaxBitRate = 0.4f;
+    }
+    // 2.5f ~= 35 kbit/s at 500 kbit/s
+    if (_varMaxBitRate > 2.5f)
+    {
+        _varMaxBitRate = 2.5f;
+    }
+}
+
+void RemoteRateControl::ChangeState(const RateControlInput& input, WebRtc_Word64 nowMs)
+{
+    switch (_currentInput._bwState)
+    {
+    case kBwNormal:
+        {
+            if (_rcState == kRcHold)
+            {
+                _lastBitRateChange = nowMs;
+                ChangeState(kRcIncrease);
+            }
+            break;
+        }
+    case kBwOverusing:
+        {
+            if (_rcState != kRcDecrease)
+            {
+                ChangeState(kRcDecrease);
+            }
+            break;
+        }
+    case kBwUnderUsing:
+        {
+            ChangeState(kRcHold);
+            break;
+        }
+    }
+}
+
+void RemoteRateControl::ChangeRegion(RateControlRegion region)
+{
+    _rcRegion = region;
+    switch (_rcRegion)
+    {
+    case kRcAboveMax:
+    case kRcMaxUnknown:
+        {
+            _beta = 0.9f;
+            break;
+        }
+    case kRcNearMax:
+        {
+            _beta = 0.95f;
+            break;
+        }
+    }
+}
+
+void RemoteRateControl::ChangeState(RateControlState newState)
+{
+    _cameFromState = _rcState;
+    _rcState = newState;
+    char state1[15];
+    char state2[15];
+    char state3[15];
+    StateStr(_cameFromState, state1);
+    StateStr(_rcState, state2);
+    StateStr(_currentInput._bwState, state3);
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                 "\t%s => %s due to %s\n", state1, state2, state3);
+}
+
+void RemoteRateControl::StateStr(RateControlState state, char* str)
+{
+    switch (state)
+    {
+    case kRcDecrease:
+        strncpy(str, "DECREASE", 9);
+        break;
+    case kRcHold:
+        strncpy(str, "HOLD", 5);
+        break;
+    case kRcIncrease:
+        strncpy(str, "INCREASE", 9);
+        break;
+    }
+}
+
+void RemoteRateControl::StateStr(BandwidthUsage state, char* str)
+{
+    switch (state)
+    {
+    case kBwNormal:
+        strncpy(str, "NORMAL", 7);
+        break;
+    case kBwOverusing:
+        strncpy(str, "OVER USING", 11);
+        break;
+    case kBwUnderUsing:
+        strncpy(str, "UNDER USING", 12);
+        break;
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/remote_rate_control.h b/trunk/src/modules/rtp_rtcp/source/remote_rate_control.h
new file mode 100644
index 0000000..197bf22
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/remote_rate_control.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
+
+#include "bwe_defines.h"
+#include "typedefs.h"
+
+#ifdef MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+#endif
+
+namespace webrtc {
+class RemoteRateControl
+{
+public:
+    RemoteRateControl();
+    ~RemoteRateControl();
+    WebRtc_Word32 SetConfiguredBitRates(WebRtc_UWord32 minBitRate,
+                                        WebRtc_UWord32 maxBitRate);
+    WebRtc_UWord32 LatestEstimate() const;
+    WebRtc_UWord32 UpdateBandwidthEstimate(WebRtc_UWord32 RTT,
+                                           WebRtc_Word64 nowMS);
+    RateControlRegion Update(const RateControlInput& input, bool& firstOverUse,
+                             WebRtc_Word64 nowMS);
+    void Reset();
+
+    // Returns true if there is a valid estimate of the incoming bitrate, false
+    // otherwise.
+    bool ValidEstimate() const;
+
+private:
+    WebRtc_UWord32 ChangeBitRate(WebRtc_UWord32 currentBitRate,
+                                 WebRtc_UWord32 incomingBitRate,
+                                 double delayFactor, WebRtc_UWord32 RTT,
+                                 WebRtc_Word64 nowMS);
+    double RateIncreaseFactor(WebRtc_Word64 nowMs,
+                              WebRtc_Word64 lastMs,
+                              WebRtc_UWord32 reactionTimeMs,
+                              double noiseVar) const;
+    void UpdateChangePeriod(WebRtc_Word64 nowMs);
+    void UpdateMaxBitRateEstimate(float incomingBitRateKbps);
+    void ChangeState(const RateControlInput& input, WebRtc_Word64 nowMs);
+    void ChangeState(RateControlState newState);
+    void ChangeRegion(RateControlRegion region);
+    static void StateStr(RateControlState state, char* str);
+    static void StateStr(BandwidthUsage state, char* str);
+
+    WebRtc_UWord32        _minConfiguredBitRate;
+    WebRtc_UWord32        _maxConfiguredBitRate;
+    WebRtc_UWord32        _currentBitRate;
+    WebRtc_UWord32        _maxHoldRate;
+    float               _avgMaxBitRate;
+    float               _varMaxBitRate;
+    RateControlState    _rcState;
+    RateControlState    _cameFromState;
+    RateControlRegion   _rcRegion;
+    WebRtc_Word64         _lastBitRateChange;
+    RateControlInput    _currentInput;
+    bool                _updated;
+    WebRtc_Word64         _timeFirstIncomingEstimate;
+    bool                _initializedBitRate;
+
+    float               _avgChangePeriod;
+    WebRtc_Word64         _lastChangeMs;
+    float               _beta;
+#ifdef MATLAB
+    MatlabPlot          *_plot1;
+    MatlabPlot          *_plot2;
+#endif
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
new file mode 100644
index 0000000..9fdd6ca
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <gtest/gtest.h>
+
+#include "typedefs.h"
+#include "common_types.h"
+#include "rtp_utility.h"
+#include "rtcp_sender.h"
+#include "rtcp_receiver.h"
+#include "rtp_rtcp_impl.h"
+#include "bwe_defines.h"
+
+namespace {
+
+using namespace webrtc;
+
+
+class TestTransport : public Transport {
+ public:
+  TestTransport(RTCPReceiver* rtcp_receiver) :
+    rtcp_receiver_(rtcp_receiver) {
+  }
+
+  virtual int SendPacket(int /*channel*/, const void* /*data*/, int /*len*/) {
+    return -1;
+  }
+  virtual int SendRTCPPacket(int /*channel*/,
+                             const void *packet,
+                             int packetLength) {
+    RTCPUtility::RTCPParserV2 rtcpParser((WebRtc_UWord8*)packet,
+                                         (WebRtc_Word32)packetLength,
+                                         true); // Allow non-compound RTCP
+
+    EXPECT_TRUE(rtcpParser.IsValid());
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    EXPECT_EQ(0, rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation,
+                                                    &rtcpParser));
+
+    EXPECT_EQ((WebRtc_UWord32)kRtcpRemb,
+              rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb);
+    EXPECT_EQ((WebRtc_UWord32)1234,
+              rtcpPacketInformation.receiverEstimatedMaxBitrate);
+    return packetLength;
+  }
+ private:
+  RTCPReceiver* rtcp_receiver_;
+};
+
+
+class RtcpFormatRembTest : public ::testing::Test {
+ protected:
+  RtcpFormatRembTest() {};
+  virtual void SetUp();
+  virtual void TearDown();
+
+  RtpRtcpClock* system_clock_;
+  ModuleRtpRtcpImpl* dummy_rtp_rtcp_impl_;
+  RTCPSender* rtcp_sender_;
+  RTCPReceiver* rtcp_receiver_;
+  TestTransport* test_transport_;
+};
+
+void RtcpFormatRembTest::SetUp() {
+  system_clock_ = ModuleRTPUtility::GetSystemClock();
+  dummy_rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(0, false, system_clock_);
+  rtcp_sender_ = new RTCPSender(0, false, system_clock_, dummy_rtp_rtcp_impl_);
+  rtcp_receiver_ = new RTCPReceiver(0, system_clock_, dummy_rtp_rtcp_impl_);
+  test_transport_ = new TestTransport(rtcp_receiver_);
+
+  EXPECT_EQ(0, rtcp_sender_->Init());
+  EXPECT_EQ(0, rtcp_sender_->RegisterSendTransport(test_transport_));
+}
+
+void RtcpFormatRembTest::TearDown() {
+  delete rtcp_sender_;
+  delete rtcp_receiver_;
+  delete dummy_rtp_rtcp_impl_;
+  delete test_transport_;
+  delete system_clock_;
+}
+
+TEST_F(RtcpFormatRembTest, TestBasicAPI) {
+  EXPECT_FALSE(rtcp_sender_->REMB());
+  EXPECT_EQ(0, rtcp_sender_->SetREMBStatus(true));
+  EXPECT_TRUE(rtcp_sender_->REMB());
+  EXPECT_EQ(0, rtcp_sender_->SetREMBStatus(false));
+  EXPECT_FALSE(rtcp_sender_->REMB());
+
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 0, NULL));
+}
+
+TEST_F(RtcpFormatRembTest, TestNonCompund) {
+  WebRtc_UWord32 SSRC = 456789;
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpNonCompound));
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 1, &SSRC));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
+}
+
+TEST_F(RtcpFormatRembTest, TestCompund) {
+  WebRtc_UWord32 SSRCs[2] = {456789, 98765};
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 2, SSRCs));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
+}
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+
+  return RUN_ALL_TESTS();
+}
+
+} // namespace
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.cc
new file mode 100644
index 0000000..f4eca71
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -0,0 +1,1500 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_receiver.h"
+
+#include <string.h> //memset
+#include <cassert> //assert
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "rtcp_utility.h"
+#include "rtp_rtcp_impl.h"
+
+namespace
+{
+    const float FRAC = 4.294967296E9;
+}
+
+namespace webrtc {
+using namespace RTCPUtility;
+using namespace RTCPHelp;
+
+RTCPReceiver::RTCPReceiver(const WebRtc_Word32 id,
+                           RtpRtcpClock* clock,
+                           ModuleRtpRtcpImpl* owner) :
+    _id(id),
+    _clock(*clock),
+    _method(kRtcpOff),
+    _lastReceived(0),
+    _rtpRtcp(*owner),
+    _criticalSectionFeedbacks(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbRtcpFeedback(NULL),
+    _cbVideoFeedback(NULL),
+    _criticalSectionRTCPReceiver(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _SSRC(0),
+    _remoteSSRC(0),
+    _remoteSenderInfo(),
+    _lastReceivedSRNTPsecs(0),
+    _lastReceivedSRNTPfrac(0),
+    _receivedInfoMap(),
+    _packetTimeOutMS(0),
+    _rtt(0)
+{
+    memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTCPReceiver::~RTCPReceiver() {
+  delete _criticalSectionRTCPReceiver;
+  delete _criticalSectionFeedbacks;
+
+  while (!_receivedReportBlockMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator first =
+        _receivedReportBlockMap.begin();
+    delete first->second;
+    _receivedReportBlockMap.erase(first);
+  }
+  while (!_receivedInfoMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator first =
+        _receivedInfoMap.begin();
+    delete first->second;
+    _receivedInfoMap.erase(first);
+  }
+  while (!_receivedCnameMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator first =
+        _receivedCnameMap.begin();
+    delete first->second;
+    _receivedCnameMap.erase(first);
+  }
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id,
+               "%s deleted", __FUNCTION__);
+}
+
+void
+RTCPReceiver::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+RTCPMethod
+RTCPReceiver::Status() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    return _method;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetRTCPStatus(const RTCPMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _method = method;
+    return 0;
+}
+
+WebRtc_UWord32
+RTCPReceiver::LastReceived()
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    return _lastReceived;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetRemoteSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    // new SSRC reset old reports
+    memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
+    _lastReceivedSRNTPsecs = 0;
+    _lastReceivedSRNTPfrac = 0;
+
+    _remoteSSRC = ssrc;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    _cbRtcpFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    _cbVideoFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+void
+RTCPReceiver::SetSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _SSRC = ssrc;
+}
+
+WebRtc_Word32 RTCPReceiver::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  RTCPReportBlockInformation* reportBlock =
+      GetReportBlockInformation(remoteSSRC);
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tfailed to GetReportBlockInformation(%u)", remoteSSRC);
+    return -1;
+  }
+  reportBlock->RTT = 0;
+  reportBlock->avgRTT = 0;
+  reportBlock->minRTT = 0;
+  reportBlock->maxRTT = 0;
+
+  return 0;
+}
+
+WebRtc_Word32 RTCPReceiver::RTT(const WebRtc_UWord32 remoteSSRC,
+                                WebRtc_UWord16* RTT,
+                                WebRtc_UWord16* avgRTT,
+                                WebRtc_UWord16* minRTT,
+                                WebRtc_UWord16* maxRTT) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  RTCPReportBlockInformation* reportBlock =
+      GetReportBlockInformation(remoteSSRC);
+
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tfailed to GetReportBlockInformation(%u)",
+                 remoteSSRC);
+    return -1;
+  }
+  if (RTT) {
+    *RTT = reportBlock->RTT;
+  }
+  if (avgRTT) {
+    *avgRTT = reportBlock->avgRTT;
+  }
+  if (minRTT) {
+    *minRTT = reportBlock->minRTT;
+  }
+  if (maxRTT) {
+    *maxRTT = reportBlock->maxRTT;
+  }
+  return 0;
+}
+
+WebRtc_UWord16 RTCPReceiver::RTT() const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  if (!_receivedReportBlockMap.empty()) {
+    return 0;
+  }
+  return _rtt;
+}
+
+int RTCPReceiver::SetRTT(WebRtc_UWord16 rtt) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  if (!_receivedReportBlockMap.empty()) {
+    return -1;
+  }
+  _rtt = rtt;
+  return 0;
+}
+
+void
+RTCPReceiver::UpdateLipSync(const WebRtc_Word32 audioVideoOffset) const
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    if(_cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnLipSyncUpdate(_id,audioVideoOffset);
+    }
+};
+
+WebRtc_Word32
+RTCPReceiver::NTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                  WebRtc_UWord32 *ReceivedNTPfrac,
+                  WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                  WebRtc_UWord32 *RTCPArrivalTimeFrac) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    if(ReceivedNTPsecs)
+    {
+        *ReceivedNTPsecs = _remoteSenderInfo.NTPseconds; // NTP from incoming SendReport
+    }
+    if(ReceivedNTPfrac)
+    {
+        *ReceivedNTPfrac = _remoteSenderInfo.NTPfraction;
+    }
+    if(RTCPArrivalTimeFrac)
+    {
+        *RTCPArrivalTimeFrac = _lastReceivedSRNTPfrac; // local NTP time when we received a RTCP packet with a send block
+    }
+    if(RTCPArrivalTimeSecs)
+    {
+        *RTCPArrivalTimeSecs = _lastReceivedSRNTPsecs;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const
+{
+    if(senderInfo == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    if(_lastReceivedSRNTPsecs == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s No received SR", __FUNCTION__);
+        return -1;
+    }
+    memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
+    return 0;
+}
+
+// statistics
+// we can get multiple receive reports when we receive the report from a CE
+WebRtc_Word32 RTCPReceiver::StatisticsReceived(
+    std::vector<RTCPReportBlock>* receiveBlocks) const {
+  assert(receiveBlocks);
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::const_iterator it =
+      _receivedReportBlockMap.begin();
+
+  while (it != _receivedReportBlockMap.end()) {
+    receiveBlocks->push_back(it->second->remoteReceiveBlock);
+    it++;
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
+                                 RTCPUtility::RTCPParserV2* rtcpParser)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    _lastReceived = _clock.GetTimeInMS();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser->Begin();
+    while (pktType != RTCPUtility::kRtcpNotValidCode)
+    {
+        // Each "case" is responsible for iterate the parser to the
+        // next top level packet.
+        switch (pktType)
+        {
+        case RTCPUtility::kRtcpSrCode:
+        case RTCPUtility::kRtcpRrCode:
+            HandleSenderReceiverReport(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpSdesCode:
+            HandleSDES(*rtcpParser);
+            break;
+        case RTCPUtility::kRtcpXrVoipMetricCode:
+            HandleXRVOIPMetric(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpByeCode:
+            HandleBYE(*rtcpParser);
+            break;
+        case RTCPUtility::kRtcpRtpfbNackCode:
+            HandleNACK(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpRtpfbTmmbrCode:
+            HandleTMMBR(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpRtpfbTmmbnCode:
+            HandleTMMBN(*rtcpParser);
+            break;
+        case RTCPUtility::kRtcpRtpfbSrReqCode:
+            HandleSR_REQ(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbPliCode:
+            HandlePLI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbSliCode:
+            HandleSLI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbRpsiCode:
+            HandleRPSI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpExtendedIjCode:
+            HandleIJ(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbFirCode:
+            HandleFIR(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbAppCode:
+            HandlePsfbApp(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpAppCode:
+            // generic application messages
+            HandleAPP(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpAppItemCode:
+            // generic application messages
+            HandleAPPItem(*rtcpParser, rtcpPacketInformation);
+            break;
+        default:
+            rtcpParser->Iterate();
+            break;
+        }
+        pktType = rtcpParser->PacketType();
+    }
+    return 0;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
+                                         RTCPPacketInformation& rtcpPacketInformation)
+{
+    RTCPUtility::RTCPPacketTypes rtcpPacketType = rtcpParser.PacketType();
+    const RTCPUtility::RTCPPacket& rtcpPacket   = rtcpParser.Packet();
+
+    assert((rtcpPacketType == RTCPUtility::kRtcpRrCode) || (rtcpPacketType == RTCPUtility::kRtcpSrCode));
+
+    // SR.SenderSSRC
+    // The synchronization source identifier for the originator of this SR packet
+
+    // rtcpPacket.RR.SenderSSRC
+    // The source of the packet sender, same as of SR? or is this a CE?
+
+    const WebRtc_UWord32 remoteSSRC = (rtcpPacketType == RTCPUtility::kRtcpRrCode) ? rtcpPacket.RR.SenderSSRC:rtcpPacket.SR.SenderSSRC;
+    const WebRtc_UWord8  numberOfReportBlocks = (rtcpPacketType == RTCPUtility::kRtcpRrCode) ? rtcpPacket.RR.NumberOfReportBlocks:rtcpPacket.SR.NumberOfReportBlocks;
+
+    rtcpPacketInformation.remoteSSRC = remoteSSRC;
+
+    RTCPReceiveInformation* ptrReceiveInfo = CreateReceiveInformation(remoteSSRC);
+    if (!ptrReceiveInfo)
+    {
+        rtcpParser.Iterate();
+        return;
+    }
+
+    if (rtcpPacketType == RTCPUtility::kRtcpSrCode)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+            "Received SR(%d). SSRC:0x%x, from SSRC:0x%x, to us %d.", _id, _SSRC, remoteSSRC, (_remoteSSRC == remoteSSRC)?1:0);
+
+        if (_remoteSSRC == remoteSSRC) // have I received RTP packets from this party
+        {
+            // only signal that we have received a SR when we accept one
+            rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSr;
+
+            // We will only store the send report from one source, but
+            // we will store all the receive block
+
+            // Save the NTP time of this report
+            _remoteSenderInfo.NTPseconds = rtcpPacket.SR.NTPMostSignificant;
+            _remoteSenderInfo.NTPfraction = rtcpPacket.SR.NTPLeastSignificant;
+            _remoteSenderInfo.RTPtimeStamp = rtcpPacket.SR.RTPTimestamp;
+            _remoteSenderInfo.sendPacketCount = rtcpPacket.SR.SenderPacketCount;
+            _remoteSenderInfo.sendOctetCount = rtcpPacket.SR.SenderOctetCount;
+
+            _clock.CurrentNTP(_lastReceivedSRNTPsecs, _lastReceivedSRNTPfrac);
+        }
+        else
+        {
+            rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRr;
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+            "Received RR(%d). SSRC:0x%x, from SSRC:0x%x", _id, _SSRC, remoteSSRC);
+
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRr;
+    }
+    UpdateReceiveInformation(*ptrReceiveInfo);
+
+    rtcpPacketType = rtcpParser.Iterate();
+
+    while (rtcpPacketType == RTCPUtility::kRtcpReportBlockItemCode)
+    {
+        HandleReportBlock(rtcpPacket, rtcpPacketInformation, remoteSSRC, numberOfReportBlocks);
+        rtcpPacketType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
+                                RTCPPacketInformation& rtcpPacketInformation,
+                                const WebRtc_UWord32 remoteSSRC,
+                                const WebRtc_UWord8 numberOfReportBlocks) {
+  // This will be called once per report block in the RTCP packet.
+  // We filter out all report blocks that are not for us.
+  // Each packet has max 31 RR blocks.
+  //
+  // We can calc RTT if we send a send report and get a report block back.
+
+  // |rtcpPacket.ReportBlockItem.SSRC| is the SSRC identifier of the source to
+  // which the information in this reception report block pertains.
+
+  // Filter out all report blocks that are not for us.
+  if (rtcpPacket.ReportBlockItem.SSRC != _SSRC) {
+    // This block is not for us ignore it.
+    return;
+  }
+
+  // To avoid problem with acquiring _criticalSectionRTCPSender while holding
+  // _criticalSectionRTCPReceiver.
+  _criticalSectionRTCPReceiver->Leave();
+  WebRtc_UWord32 sendTimeMS =
+      _rtpRtcp.SendTimeOfSendReport(rtcpPacket.ReportBlockItem.LastSR);
+  _criticalSectionRTCPReceiver->Enter();
+
+  RTCPReportBlockInformation* reportBlock =
+      CreateReportBlockInformation(remoteSSRC);
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tfailed to CreateReportBlockInformation(%u)", remoteSSRC);
+    return;
+  }
+  const RTCPPacketReportBlockItem& rb = rtcpPacket.ReportBlockItem;
+  reportBlock->remoteReceiveBlock.remoteSSRC = remoteSSRC;
+  reportBlock->remoteReceiveBlock.sourceSSRC = rb.SSRC;
+  reportBlock->remoteReceiveBlock.fractionLost = rb.FractionLost;
+  reportBlock->remoteReceiveBlock.cumulativeLost =
+      rb.CumulativeNumOfPacketsLost;
+  reportBlock->remoteReceiveBlock.extendedHighSeqNum =
+      rb.ExtendedHighestSequenceNumber;
+  reportBlock->remoteReceiveBlock.jitter = rb.Jitter;
+  reportBlock->remoteReceiveBlock.delaySinceLastSR = rb.DelayLastSR;
+  reportBlock->remoteReceiveBlock.lastSR = rb.LastSR;
+
+  if (rtcpPacket.ReportBlockItem.Jitter > reportBlock->remoteMaxJitter) {
+    reportBlock->remoteMaxJitter = rtcpPacket.ReportBlockItem.Jitter;
+  }
+
+  WebRtc_UWord32 delaySinceLastSendReport =
+      rtcpPacket.ReportBlockItem.DelayLastSR;
+
+  // local NTP time when we received this
+  WebRtc_UWord32 lastReceivedRRNTPsecs = 0;
+  WebRtc_UWord32 lastReceivedRRNTPfrac = 0;
+
+  _clock.CurrentNTP(lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
+
+  // time when we received this in MS
+  WebRtc_UWord32 receiveTimeMS = ModuleRTPUtility::ConvertNTPTimeToMS(
+      lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
+
+  // Estimate RTT
+  WebRtc_UWord32 d = (delaySinceLastSendReport & 0x0000ffff) * 1000;
+  d /= 65536;
+  d += ((delaySinceLastSendReport & 0xffff0000) >> 16) * 1000;
+
+  WebRtc_Word32 RTT = 0;
+
+  if (sendTimeMS > 0) {
+    RTT = receiveTimeMS - d - sendTimeMS;
+    if (RTT <= 0) {
+      RTT = 1;
+    }
+    if (RTT > reportBlock->maxRTT) {
+      // store max RTT
+      reportBlock->maxRTT = (WebRtc_UWord16) RTT;
+    }
+    if (reportBlock->minRTT == 0) {
+      // first RTT
+      reportBlock->minRTT = (WebRtc_UWord16) RTT;
+    } else if (RTT < reportBlock->minRTT) {
+      // Store min RTT
+      reportBlock->minRTT = (WebRtc_UWord16) RTT;
+    }
+    // store last RTT
+    reportBlock->RTT = (WebRtc_UWord16) RTT;
+
+    // store average RTT
+    if (reportBlock->numAverageCalcs != 0) {
+      float ac = static_cast<float> (reportBlock->numAverageCalcs);
+      float newAverage = ((ac / (ac + 1)) * reportBlock->avgRTT)
+          + ((1 / (ac + 1)) * RTT);
+      reportBlock->avgRTT = static_cast<int> (newAverage + 0.5f);
+    } else {
+      // first RTT
+      reportBlock->avgRTT = (WebRtc_UWord16) RTT;
+    }
+    reportBlock->numAverageCalcs++;
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+               " -> Received report block(%d), from SSRC:0x%x, RTT:%d, loss:%d",
+               _id, remoteSSRC, RTT, rtcpPacket.ReportBlockItem.FractionLost);
+
+  // rtcpPacketInformation
+  rtcpPacketInformation.AddReportInfo(
+      reportBlock->remoteReceiveBlock.fractionLost, (WebRtc_UWord16) RTT,
+      reportBlock->remoteReceiveBlock.extendedHighSeqNum,
+      reportBlock->remoteReceiveBlock.jitter);
+}
+
+RTCPReportBlockInformation*
+RTCPReceiver::CreateReportBlockInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator it =
+      _receivedReportBlockMap.find(remoteSSRC);
+
+  RTCPReportBlockInformation* ptrReportBlockInfo = NULL;
+  if (it != _receivedReportBlockMap.end()) {
+    ptrReportBlockInfo = it->second;
+  } else {
+    ptrReportBlockInfo = new RTCPReportBlockInformation;
+    _receivedReportBlockMap[remoteSSRC] = ptrReportBlockInfo;
+  }
+  return ptrReportBlockInfo;
+}
+
+RTCPReportBlockInformation*
+RTCPReceiver::GetReportBlockInformation(WebRtc_UWord32 remoteSSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::const_iterator it =
+      _receivedReportBlockMap.find(remoteSSRC);
+
+  if (it == _receivedReportBlockMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+RTCPCnameInformation*
+RTCPReceiver::CreateCnameInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+      _receivedCnameMap.find(remoteSSRC);
+
+  if (it != _receivedCnameMap.end()) {
+    return it->second;
+  }
+  RTCPCnameInformation* cnameInfo = new RTCPCnameInformation;
+  memset(cnameInfo->name, 0, RTCP_CNAME_SIZE);
+  _receivedCnameMap[remoteSSRC] = cnameInfo;
+  return cnameInfo;
+}
+
+RTCPCnameInformation*
+RTCPReceiver::GetCnameInformation(WebRtc_UWord32 remoteSSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::const_iterator it =
+      _receivedCnameMap.find(remoteSSRC);
+
+  if (it == _receivedCnameMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+RTCPReceiveInformation*
+RTCPReceiver::CreateReceiveInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator it =
+      _receivedInfoMap.find(remoteSSRC);
+
+  if (it != _receivedInfoMap.end()) {
+    return it->second;
+  }
+  RTCPReceiveInformation* receiveInfo = new RTCPReceiveInformation;
+  _receivedInfoMap[remoteSSRC] = receiveInfo;
+  return receiveInfo;
+}
+
+RTCPReceiveInformation*
+RTCPReceiver::GetReceiveInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator it =
+      _receivedInfoMap.find(remoteSSRC);
+  if (it == _receivedInfoMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+void RTCPReceiver::UpdateReceiveInformation(
+    RTCPReceiveInformation& receiveInformation) {
+  // Update that this remote is alive
+  receiveInformation.lastTimeReceived = _clock.GetTimeInMS();
+}
+
+bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  bool updateBoundingSet = false;
+  WebRtc_UWord32 timeNow = _clock.GetTimeInMS();
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.begin();
+
+  while (receiveInfoIt != _receivedInfoMap.end()) {
+    RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+    if (receiveInfo == NULL) {
+      return updateBoundingSet;
+    }
+    // time since last received rtcp packet
+    // when we dont have a lastTimeReceived and the object is marked
+    // readyForDelete it's removed from the map
+    if (receiveInfo->lastTimeReceived) {
+      /// use audio define since we don't know what interval the remote peer is
+      // using
+      if ((timeNow - receiveInfo->lastTimeReceived) >
+          5 * RTCP_INTERVAL_AUDIO_MS) {
+        // no rtcp packet for the last five regular intervals, reset limitations
+        receiveInfo->TmmbrSet.lengthOfSet = 0;
+        // prevent that we call this over and over again
+        receiveInfo->lastTimeReceived = 0;
+        // send new TMMBN to all channels using the default codec
+        updateBoundingSet = true;
+      }
+      receiveInfoIt++;
+    } else if (receiveInfo->readyForDelete) {
+      // store our current receiveInfoItem
+      std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator
+      receiveInfoItemToBeErased = receiveInfoIt;
+      receiveInfoIt++;
+      delete receiveInfoItemToBeErased->second;
+      _receivedInfoMap.erase(receiveInfoItemToBeErased);
+    } else {
+      receiveInfoIt++;
+    }
+  }
+  return updateBoundingSet;
+}
+
+WebRtc_Word32 RTCPReceiver::BoundingSet(bool &tmmbrOwner,
+                                        TMMBRSet*& boundingSetRec) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.find(_remoteSSRC);
+
+  if (receiveInfoIt == _receivedInfoMap.end()) {
+    return -1;
+  }
+  RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+  if (receiveInfo == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s failed to get RTCPReceiveInformation",
+                 __FUNCTION__);
+    return -1;
+  }
+  if (receiveInfo->TmmbnBoundingSet.lengthOfSet > 0) {
+    boundingSetRec->VerifyAndAllocateSet(
+        receiveInfo->TmmbnBoundingSet.lengthOfSet + 1);
+    for(WebRtc_UWord32 i=0; i< receiveInfo->TmmbnBoundingSet.lengthOfSet; i++) {
+      if(receiveInfo->TmmbnBoundingSet.ptrSsrcSet[i] == _SSRC) {
+        // owner of bounding set
+        tmmbrOwner = true;
+      }
+      boundingSetRec->ptrTmmbrSet[i] =
+          receiveInfo->TmmbnBoundingSet.ptrTmmbrSet[i];
+      boundingSetRec->ptrPacketOHSet[i] =
+          receiveInfo->TmmbnBoundingSet.ptrPacketOHSet[i];
+      boundingSetRec->ptrSsrcSet[i] =
+          receiveInfo->TmmbnBoundingSet.ptrSsrcSet[i];
+    }
+  }
+  return receiveInfo->TmmbnBoundingSet.lengthOfSet;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSDES(RTCPUtility::RTCPParserV2& rtcpParser)
+{
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpSdesChunkCode)
+    {
+        HandleSDESChunk(rtcpParser);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+  RTCPCnameInformation* cnameInfo =
+      CreateCnameInformation(rtcpPacket.CName.SenderSSRC);
+  assert(cnameInfo);
+
+  cnameInfo->name[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cnameInfo->name, rtcpPacket.CName.CName, RTCP_CNAME_SIZE - 1);
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
+                         RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.NACK.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    if (_SSRC != rtcpPacket.NACK.MediaSSRC)
+    {
+        // Not to us.
+        rtcpParser.Iterate();
+        return;
+    }
+
+    rtcpPacketInformation.ResetNACKPacketIdArray();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbNackItemCode)
+    {
+        HandleNACKItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleNACKItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                             RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.AddNACKPacket(rtcpPacket.NACKItem.PacketID);
+
+    WebRtc_UWord16 bitMask = rtcpPacket.NACKItem.BitMask;
+    if(bitMask)
+    {
+        for(int i=1; i <= 16; ++i)
+        {
+            if(bitMask & 0x01)
+            {
+                rtcpPacketInformation.AddNACKPacket(rtcpPacket.NACKItem.PacketID + i);
+            }
+            bitMask = bitMask >>1;
+        }
+    }
+
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpNack;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleBYE(RTCPUtility::RTCPParserV2& rtcpParser) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+  // clear our lists
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator
+      reportBlockInfoIt = _receivedReportBlockMap.find(
+          rtcpPacket.BYE.SenderSSRC);
+
+  if (reportBlockInfoIt != _receivedReportBlockMap.end()) {
+    delete reportBlockInfoIt->second;
+    _receivedReportBlockMap.erase(reportBlockInfoIt);
+  }
+  //  we can't delete it due to TMMBR
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.find(rtcpPacket.BYE.SenderSSRC);
+
+  if (receiveInfoIt != _receivedInfoMap.end()) {
+    receiveInfoIt->second->readyForDelete = true;
+  }
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator cnameInfoIt =
+      _receivedCnameMap.find(rtcpPacket.BYE.SenderSSRC);
+
+  if (cnameInfoIt != _receivedCnameMap.end()) {
+    delete cnameInfoIt->second;
+    _receivedCnameMap.erase(cnameInfoIt);
+  }
+  rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser,
+                                 RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    if(rtcpPacket.XRVOIPMetricItem.SSRC == _SSRC)
+    {
+        // Store VoIP metrics block if it's about me
+        // from OriginatorSSRC do we filter it?
+        // rtcpPacket.XR.OriginatorSSRC;
+
+        RTCPVoIPMetric receivedVoIPMetrics;
+        receivedVoIPMetrics.burstDensity = rtcpPacket.XRVOIPMetricItem.burstDensity;
+        receivedVoIPMetrics.burstDuration = rtcpPacket.XRVOIPMetricItem.burstDuration;
+        receivedVoIPMetrics.discardRate = rtcpPacket.XRVOIPMetricItem.discardRate;
+        receivedVoIPMetrics.endSystemDelay = rtcpPacket.XRVOIPMetricItem.endSystemDelay;
+        receivedVoIPMetrics.extRfactor = rtcpPacket.XRVOIPMetricItem.extRfactor;
+        receivedVoIPMetrics.gapDensity = rtcpPacket.XRVOIPMetricItem.gapDensity;
+        receivedVoIPMetrics.gapDuration = rtcpPacket.XRVOIPMetricItem.gapDuration;
+        receivedVoIPMetrics.Gmin = rtcpPacket.XRVOIPMetricItem.Gmin;
+        receivedVoIPMetrics.JBabsMax = rtcpPacket.XRVOIPMetricItem.JBabsMax;
+        receivedVoIPMetrics.JBmax = rtcpPacket.XRVOIPMetricItem.JBmax;
+        receivedVoIPMetrics.JBnominal = rtcpPacket.XRVOIPMetricItem.JBnominal;
+        receivedVoIPMetrics.lossRate = rtcpPacket.XRVOIPMetricItem.lossRate;
+        receivedVoIPMetrics.MOSCQ = rtcpPacket.XRVOIPMetricItem.MOSCQ;
+        receivedVoIPMetrics.MOSLQ = rtcpPacket.XRVOIPMetricItem.MOSLQ;
+        receivedVoIPMetrics.noiseLevel = rtcpPacket.XRVOIPMetricItem.noiseLevel;
+        receivedVoIPMetrics.RERL = rtcpPacket.XRVOIPMetricItem.RERL;
+        receivedVoIPMetrics.Rfactor = rtcpPacket.XRVOIPMetricItem.Rfactor;
+        receivedVoIPMetrics.roundTripDelay = rtcpPacket.XRVOIPMetricItem.roundTripDelay;
+        receivedVoIPMetrics.RXconfig = rtcpPacket.XRVOIPMetricItem.RXconfig;
+        receivedVoIPMetrics.signalLevel = rtcpPacket.XRVOIPMetricItem.signalLevel;
+
+        rtcpPacketInformation.AddVoIPMetric(&receivedVoIPMetrics);
+
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpXrVoipMetric; // received signal
+    }
+    rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.PLI.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    if (_SSRC != rtcpPacket.PLI.MediaSSRC)
+    {
+        // Not to us.
+        rtcpParser.Iterate();
+        return;
+    }
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpPli; // received signal that we need to send a new key frame
+    rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBR(RTCPUtility::RTCPParserV2& rtcpParser,
+                          RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    WebRtc_UWord32 senderSSRC = rtcpPacket.TMMBR.SenderSSRC;
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(senderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    if(rtcpPacket.TMMBR.MediaSSRC)
+    {
+        // rtcpPacket.TMMBR.MediaSSRC SHOULD be 0 if same as SenderSSRC
+        // in relay mode this is a valid number
+        senderSSRC = rtcpPacket.TMMBR.MediaSSRC;
+    }
+
+    // Use packet length to calc max number of TMMBR blocks
+    // each TMMBR block is 8 bytes
+    ptrdiff_t maxNumOfTMMBRBlocks = rtcpParser.LengthLeft() / 8;
+
+    // sanity
+    if(maxNumOfTMMBRBlocks > 200) // we can't have more than what's in one packet
+    {
+        assert(false);
+        rtcpParser.Iterate();
+        return;
+    }
+    ptrReceiveInfo->VerifyAndAllocateTMMBRSet((WebRtc_UWord32)maxNumOfTMMBRBlocks);
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbTmmbrItemCode)
+    {
+        HandleTMMBRItem(*ptrReceiveInfo, rtcpPacket, rtcpPacketInformation, senderSSRC);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBRItem(RTCPReceiveInformation& receiveInfo,
+                              const RTCPUtility::RTCPPacket& rtcpPacket,
+                              RTCPPacketInformation& rtcpPacketInformation,
+                              const WebRtc_UWord32 senderSSRC)
+{
+    if (_SSRC == rtcpPacket.TMMBRItem.SSRC &&
+        rtcpPacket.TMMBRItem.MaxTotalMediaBitRate > 0)
+    {
+        receiveInfo.InsertTMMBRItem(senderSSRC, rtcpPacket.TMMBRItem,
+                                    _clock.GetTimeInMS());
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpTmmbr;
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBN(RTCPUtility::RTCPParserV2& rtcpParser)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.TMMBN.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    // Use packet length to calc max number of TMMBN blocks
+    // each TMMBN block is 8 bytes
+    ptrdiff_t maxNumOfTMMBNBlocks = rtcpParser.LengthLeft() / 8;
+
+    // sanity
+    if(maxNumOfTMMBNBlocks > 200) // we cant have more than what's in one packet
+    {
+        assert(false);
+        rtcpParser.Iterate();
+        return;
+    }
+
+    ptrReceiveInfo->VerifyAndAllocateBoundingSet((WebRtc_UWord32)maxNumOfTMMBNBlocks);
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbTmmbnItemCode)
+    {
+        HandleTMMBNItem(*ptrReceiveInfo, rtcpPacket);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSR_REQ(RTCPUtility::RTCPParserV2& rtcpParser,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSrReq;
+    rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBNItem(RTCPReceiveInformation& receiveInfo,
+                              const RTCPUtility::RTCPPacket& rtcpPacket)
+{
+    const unsigned int idx = receiveInfo.TmmbnBoundingSet.lengthOfSet;
+
+    receiveInfo.TmmbnBoundingSet.ptrTmmbrSet[idx]    = rtcpPacket.TMMBNItem.MaxTotalMediaBitRate;
+    receiveInfo.TmmbnBoundingSet.ptrPacketOHSet[idx] = rtcpPacket.TMMBNItem.MeasuredOverhead;
+    receiveInfo.TmmbnBoundingSet.ptrSsrcSet[idx]     = rtcpPacket.TMMBNItem.SSRC;
+
+    ++receiveInfo.TmmbnBoundingSet.lengthOfSet;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.SLI.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpPsfbSliItemCode)
+    {
+        HandleSLIItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSLIItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                            RTCPPacketInformation& rtcpPacketInformation)
+{
+    // in theory there could be multiple slices lost
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSli; // received signal that we need to refresh a slice
+    rtcpPacketInformation.sliPictureId = rtcpPacket.SLIItem.PictureId;
+}
+
+void
+RTCPReceiver::HandleRPSI(RTCPUtility::RTCPParserV2& rtcpParser,
+                         RTCPHelp::RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.RPSI.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    if(pktType == RTCPUtility::kRtcpPsfbRpsiCode)
+    {
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRpsi; // received signal that we have a confirmed reference picture
+        if(rtcpPacket.RPSI.NumberOfValidBits%8 != 0)
+        {
+            // to us unknown
+            // continue
+            rtcpParser.Iterate();
+            return;
+        }
+        rtcpPacketInformation.rpsiPictureId = 0;
+
+        // convert NativeBitString to rpsiPictureId
+        WebRtc_UWord8 numberOfBytes = rtcpPacket.RPSI.NumberOfValidBits /8;
+        for(WebRtc_UWord8 n = 0; n < (numberOfBytes-1); n++)
+        {
+            rtcpPacketInformation.rpsiPictureId += (rtcpPacket.RPSI.NativeBitString[n] & 0x7f);
+            rtcpPacketInformation.rpsiPictureId <<= 7; // prepare next
+        }
+        rtcpPacketInformation.rpsiPictureId += (rtcpPacket.RPSI.NativeBitString[numberOfBytes-1] & 0x7f);
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
+                            RTCPPacketInformation& rtcpPacketInformation)
+{
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    if (pktType == RTCPUtility::kRtcpPsfbRembItemCode)
+    {
+        HandleREMBItem(rtcpParser, rtcpPacketInformation);
+        rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleIJ(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpExtendedIjItemCode)
+    {
+        HandleIJItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+void
+RTCPReceiver::HandleIJItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpTransmissionTimeOffset;
+    rtcpPacketInformation.interArrivalJitter =
+    rtcpPacket.ExtendedJitterReportItem.Jitter;
+}
+
+void
+RTCPReceiver::HandleREMBItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                             RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpParser.Iterate();
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb;
+    rtcpPacketInformation.receiverEstimatedMaxBitrate = rtcpPacket.REMB.BitRate;
+    // TODO(pwestin) send up SSRCs and do a sanity check
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleFIR(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.FIR.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpPsfbFirItemCode)
+    {
+        HandleFIRItem(*ptrReceiveInfo, rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleFIRItem(RTCPReceiveInformation& receiveInfo,
+                            const RTCPUtility::RTCPPacket& rtcpPacket,
+                            RTCPPacketInformation& rtcpPacketInformation)
+{
+    if (_SSRC == rtcpPacket.FIRItem.SSRC) // is it our sender that is requested to generate a new keyframe
+    {
+        // rtcpPacket.FIR.MediaSSRC SHOULD be 0 but we ignore to check it
+        // we don't know who this originate from
+
+        // check if we have reported this FIRSequenceNumber before
+        if (rtcpPacket.FIRItem.CommandSequenceNumber != receiveInfo.lastFIRSequenceNumber)
+        {
+            //
+            WebRtc_UWord32 now = _clock.GetTimeInMS();
+
+            // extra sanity don't go crazy with the callbacks
+            if( (now - receiveInfo.lastFIRRequest) > RTCP_MIN_FRAME_LENGTH_MS)
+            {
+                receiveInfo.lastFIRRequest = now;
+                receiveInfo.lastFIRSequenceNumber = rtcpPacket.FIRItem.CommandSequenceNumber;
+
+                rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpFir; // received signal that we need to send a new key frame
+            }
+        }
+    }
+}
+
+void
+RTCPReceiver::HandleAPP(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpApp;
+    rtcpPacketInformation.applicationSubType = rtcpPacket.APP.SubType;
+    rtcpPacketInformation.applicationName = rtcpPacket.APP.Name;
+
+    rtcpParser.Iterate();
+}
+
+void
+RTCPReceiver::HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    rtcpPacketInformation.AddApplicationData(rtcpPacket.APP.Data, rtcpPacket.APP.Size);
+
+    rtcpParser.Iterate();
+}
+
+void
+RTCPReceiver::OnReceivedIntraFrameRequest(const FrameType frameType,
+                                          const WebRtc_UWord8 streamIdx) const
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+    if(_cbVideoFeedback)
+    {
+        _cbVideoFeedback->OnReceivedIntraFrameRequest(_id, frameType, streamIdx);
+    }
+}
+
+void
+RTCPReceiver::OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+    if(_cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnSLIReceived(_id, pitureID);
+    }
+}
+
+void
+RTCPReceiver::OnReceivedReferencePictureSelectionIndication(const WebRtc_UWord64 pitureID) const
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+    if(_cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnRPSIReceived(_id, pitureID);
+    }
+}
+
+// Holding no Critical section
+void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
+    RTCPPacketInformation& rtcpPacketInformation)
+{
+    // Process TMMBR and REMB first to avoid multiple callbacks
+    // to OnNetworkChanged.  
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                     "SIG [RTCP] Incoming TMMBR to id:%d", _id);
+
+        // Might trigger a OnReceivedBandwidthEstimateUpdate.
+        _rtpRtcp.OnReceivedTMMBR();
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                     "SIG [RTCP] Incoming REMB to id:%d", _id);
+
+       // We need to bounce this to the default channel.
+        _rtpRtcp.OnReceivedEstimatedMaxBitrate(
+            rtcpPacketInformation.receiverEstimatedMaxBitrate);
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr ||
+        rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRr)
+    {
+        if (rtcpPacketInformation.reportBlock)
+        {
+            _rtpRtcp.OnPacketLossStatisticsUpdate(
+                rtcpPacketInformation.fractionLost,
+                rtcpPacketInformation.roundTripTime,
+                rtcpPacketInformation.lastReceivedExtendedHighSeqNum);
+        }
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr)
+    {
+        _rtpRtcp.OnReceivedNTP();
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSrReq)
+    {
+        _rtpRtcp.OnRequestSendReport();
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack)
+    {
+        if (rtcpPacketInformation.nackSequenceNumbersLength > 0)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                         "SIG [RTCP] Incoming NACK to id:%d", _id);
+            _rtpRtcp.OnReceivedNACK(
+                rtcpPacketInformation.nackSequenceNumbersLength,
+                rtcpPacketInformation.nackSequenceNumbers);
+        }
+    }
+    if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
+        (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir))
+    {
+        if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                         "SIG [RTCP] Incoming PLI to id:%d", _id);
+        } else
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                         "SIG [RTCP] Incoming FIR to id:%d", _id);
+        }
+        _rtpRtcp.OnReceivedIntraFrameRequest(&_rtpRtcp);
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli)
+    {
+         // we need use a bounce it up to handle default channel
+        _rtpRtcp.OnReceivedSliceLossIndication(
+            rtcpPacketInformation.sliPictureId);
+    }
+    if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi)
+    {
+         // we need use a bounce it up to handle default channel
+        _rtpRtcp.OnReceivedReferencePictureSelectionIndication(
+            rtcpPacketInformation.rpsiPictureId);
+    }
+    {
+        CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+        // we need a feedback that we have received a report block(s) so that we can generate a new packet
+        // in a conference relay scenario, one received report can generate several RTCP packets, based
+        // on number relayed/mixed
+        // a send report block should go out to all receivers
+        if(_cbRtcpFeedback)
+        {
+            if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr)
+            {
+                _cbRtcpFeedback->OnSendReportReceived(_id, rtcpPacketInformation.remoteSSRC);
+            } else
+            {
+                _cbRtcpFeedback->OnReceiveReportReceived(_id, rtcpPacketInformation.remoteSSRC);
+            }
+            if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb)
+            {
+                _cbRtcpFeedback->OnReceiverEstimatedMaxBitrateReceived(_id,
+                    rtcpPacketInformation.receiverEstimatedMaxBitrate);
+            }
+            if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpXrVoipMetric)
+            {
+                WebRtc_Word8 VoIPmetricBuffer[7*4];
+                VoIPmetricBuffer[0] = rtcpPacketInformation.VoIPMetric->lossRate;
+                VoIPmetricBuffer[1] = rtcpPacketInformation.VoIPMetric->discardRate;
+                VoIPmetricBuffer[2] = rtcpPacketInformation.VoIPMetric->burstDensity;
+                VoIPmetricBuffer[3] = rtcpPacketInformation.VoIPMetric->gapDensity;
+
+                VoIPmetricBuffer[4] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->burstDuration >> 8);
+                VoIPmetricBuffer[5] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->burstDuration);
+                VoIPmetricBuffer[6] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->gapDuration >> 8);
+                VoIPmetricBuffer[7] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->gapDuration);
+
+                VoIPmetricBuffer[8] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->roundTripDelay >> 8);
+                VoIPmetricBuffer[9] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->roundTripDelay);
+                VoIPmetricBuffer[10] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->endSystemDelay >> 8);
+                VoIPmetricBuffer[11] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->endSystemDelay);
+
+                VoIPmetricBuffer[12] = rtcpPacketInformation.VoIPMetric->signalLevel;
+                VoIPmetricBuffer[13] = rtcpPacketInformation.VoIPMetric->noiseLevel;
+                VoIPmetricBuffer[14] = rtcpPacketInformation.VoIPMetric->RERL;
+                VoIPmetricBuffer[15] = rtcpPacketInformation.VoIPMetric->Gmin;
+
+                VoIPmetricBuffer[16] = rtcpPacketInformation.VoIPMetric->Rfactor;
+                VoIPmetricBuffer[17] = rtcpPacketInformation.VoIPMetric->extRfactor;
+                VoIPmetricBuffer[18] = rtcpPacketInformation.VoIPMetric->MOSLQ;
+                VoIPmetricBuffer[19] = rtcpPacketInformation.VoIPMetric->MOSCQ;
+
+                VoIPmetricBuffer[20] = rtcpPacketInformation.VoIPMetric->RXconfig;
+                VoIPmetricBuffer[21] = 0; // reserved
+                VoIPmetricBuffer[22] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBnominal >> 8);
+                VoIPmetricBuffer[23] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBnominal);
+
+                VoIPmetricBuffer[24] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBmax >> 8);
+                VoIPmetricBuffer[25] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBmax);
+                VoIPmetricBuffer[26] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBabsMax >> 8);
+                VoIPmetricBuffer[27] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBabsMax);
+
+                _cbRtcpFeedback->OnXRVoIPMetricReceived(_id, rtcpPacketInformation.VoIPMetric, VoIPmetricBuffer);
+            }
+            if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpApp)
+            {
+                _cbRtcpFeedback->OnApplicationDataReceived(_id,
+                                                           rtcpPacketInformation.applicationSubType,
+                                                           rtcpPacketInformation.applicationName,
+                                                           rtcpPacketInformation.applicationLength,
+                                                           rtcpPacketInformation.applicationData);
+            }
+        }
+    }
+}
+
+void
+RTCPReceiver::UpdateBandwidthEstimate(const WebRtc_UWord16 bwEstimateKbit)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+    if(_cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnTMMBRReceived(_id, bwEstimateKbit);
+    }
+
+}
+
+WebRtc_Word32 RTCPReceiver::CNAME(const WebRtc_UWord32 remoteSSRC,
+                                  char cName[RTCP_CNAME_SIZE]) const {
+  if (cName == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  RTCPCnameInformation* cnameInfo = GetCnameInformation(remoteSSRC);
+  assert(cnameInfo);
+
+  cName[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cName, cnameInfo->name, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+// no callbacks allowed inside this function
+WebRtc_Word32 RTCPReceiver::TMMBRReceived(const WebRtc_UWord32 size,
+                                          const WebRtc_UWord32 accNumCandidates,
+                                          TMMBRSet* candidateSet) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::const_iterator
+      receiveInfoIt = _receivedInfoMap.begin();
+  if (receiveInfoIt == _receivedInfoMap.end()) {
+    return -1;
+  }
+  WebRtc_UWord32 num = accNumCandidates;
+  if (candidateSet) {
+    while( num < size && receiveInfoIt != _receivedInfoMap.end()) {
+      RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+      if (receiveInfo == NULL) {
+        return 0;
+      }
+      for (WebRtc_UWord32 i = 0;
+          (num < size) && (i < receiveInfo->TmmbrSet.lengthOfSet); i++) {
+        if (receiveInfo->GetTMMBRSet(i, num, candidateSet,
+                                     _clock.GetTimeInMS()) == 0) {
+          num++;
+        }
+      }
+      receiveInfoIt++;
+    }
+  } else {
+    while (receiveInfoIt != _receivedInfoMap.end()) {
+      RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+      if(receiveInfo == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                     "%s failed to get RTCPReceiveInformation",
+                     __FUNCTION__);
+        return -1;
+      }
+      num += receiveInfo->TmmbrSet.lengthOfSet;
+      receiveInfoIt++;
+    }
+  }
+  return num;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _packetTimeOutMS = timeoutMS;
+    return 0;
+}
+
+void RTCPReceiver::PacketTimeout()
+{
+    if(_packetTimeOutMS == 0)
+    {
+        // not configured
+        return;
+    }
+
+    bool packetTimeOut = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+        if(_lastReceived == 0)
+        {
+            // not active
+            return;
+        }
+
+        WebRtc_UWord32 now = _clock.GetTimeInMS();
+
+        if(now - _lastReceived > _packetTimeOutMS)
+        {
+            packetTimeOut = true;
+            _lastReceived = 0;  // only one callback
+        }
+    }
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    if(packetTimeOut && _cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnRTCPPacketTimeout(_id);
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.h b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.h
new file mode 100644
index 0000000..eb1b658
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -0,0 +1,233 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
+
+#include <map>
+#include <vector>
+
+#include "typedefs.h"
+#include "rtp_utility.h"
+#include "rtcp_utility.h"
+#include "rtp_rtcp_defines.h"
+#include "rtcp_receiver_help.h"
+
+namespace webrtc {
+class ModuleRtpRtcpImpl;
+
+class RTCPReceiver
+{
+public:
+    RTCPReceiver(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                 ModuleRtpRtcpImpl* owner);
+    virtual ~RTCPReceiver();
+
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    RTCPMethod Status() const;
+    WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    WebRtc_UWord32 LastReceived();
+
+    void SetSSRC( const WebRtc_UWord32 ssrc);
+    void SetRelaySSRC( const WebRtc_UWord32 ssrc);
+    WebRtc_Word32 SetRemoteSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_UWord32 RelaySSRC() const;
+
+    WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback);
+
+    WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback);
+
+    WebRtc_Word32 IncomingRTCPPacket(RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                                   RTCPUtility::RTCPParserV2 *rtcpParser);
+
+    void TriggerCallbacksFromRTCPPacket(RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    // get received cname
+    WebRtc_Word32 CNAME(const WebRtc_UWord32 remoteSSRC,
+                        char cName[RTCP_CNAME_SIZE]) const;
+
+    // get received NTP
+    WebRtc_Word32 NTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                      WebRtc_UWord32 *ReceivedNTPfrac,
+                      WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                      WebRtc_UWord32 *RTCPArrivalTimeFrac) const;
+
+    // get rtt
+    WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                      WebRtc_UWord16* RTT,
+                      WebRtc_UWord16* avgRTT,
+                      WebRtc_UWord16* minRTT,
+                      WebRtc_UWord16* maxRTT) const;
+
+    WebRtc_UWord16 RTT() const;
+
+    int SetRTT(WebRtc_UWord16 rtt);
+
+    WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC);
+
+    void UpdateLipSync(const WebRtc_Word32 audioVideoOffset) const;
+
+    WebRtc_Word32 SenderInfoReceived(RTCPSenderInfo* senderInfo) const;
+
+    void OnReceivedIntraFrameRequest(const FrameType frameType,
+                                     const WebRtc_UWord8 streamIdx) const;
+
+    void OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const;
+    void OnReceivedReferencePictureSelectionIndication(
+        const WebRtc_UWord64 pitureID) const;
+
+    // get statistics
+    WebRtc_Word32 StatisticsReceived(
+        std::vector<RTCPReportBlock>* receiveBlocks) const;
+
+    // Get TMMBR
+    WebRtc_Word32 TMMBRReceived(const WebRtc_UWord32 size,
+                                const WebRtc_UWord32 accNumCandidates,
+                                TMMBRSet* candidateSet) const;
+
+    bool UpdateRTCPReceiveInformationTimers();
+
+    void UpdateBandwidthEstimate(const WebRtc_UWord16 bwEstimateKbit);
+
+    WebRtc_Word32 BoundingSet(bool &tmmbrOwner,
+                              TMMBRSet*& boundingSetRec);
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+    void PacketTimeout();
+
+protected:
+    RTCPHelp::RTCPReportBlockInformation* CreateReportBlockInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPHelp::RTCPReportBlockInformation* GetReportBlockInformation(const WebRtc_UWord32 remoteSSRC) const;
+
+    RTCPUtility::RTCPCnameInformation* CreateCnameInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPUtility::RTCPCnameInformation* GetCnameInformation(const WebRtc_UWord32 remoteSSRC) const;
+
+    RTCPHelp::RTCPReceiveInformation* CreateReceiveInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPHelp::RTCPReceiveInformation* GetReceiveInformation(const WebRtc_UWord32 remoteSSRC);
+
+    void UpdateReceiveInformation( RTCPHelp::RTCPReceiveInformation& receiveInformation);
+
+    void HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
+                                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
+                           RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                           const WebRtc_UWord32 remoteSSRC,
+                           const WebRtc_UWord8 numberOfReportBlocks);
+
+    void HandleSDES(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser,
+                            RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
+                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleNACKItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                        RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleBYE(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleSLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleSLIItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleRPSI(RTCPUtility::RTCPParserV2& rtcpParser,
+                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleREMBItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleIJ(RTCPUtility::RTCPParserV2& rtcpParser,
+                  RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleIJItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                      RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBR(RTCPUtility::RTCPParserV2& rtcpParser,
+                     RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBRItem(RTCPHelp::RTCPReceiveInformation& receiveInfo,
+                         const RTCPUtility::RTCPPacket& rtcpPacket,
+                         RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                         const WebRtc_UWord32 senderSSRC);
+
+    void HandleTMMBN(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandleSR_REQ(RTCPUtility::RTCPParserV2& rtcpParser,
+                      RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBNItem(RTCPHelp::RTCPReceiveInformation& receiveInfo,
+                         const RTCPUtility::RTCPPacket& rtcpPacket);
+
+    void HandleFIR(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleFIRItem(RTCPHelp::RTCPReceiveInformation& receiveInfo,
+                       const RTCPUtility::RTCPPacket& rtcpPacket,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleAPP(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+ private:
+  WebRtc_Word32           _id;
+  RtpRtcpClock&           _clock;
+  RTCPMethod              _method;
+  WebRtc_UWord32          _lastReceived;
+  ModuleRtpRtcpImpl&      _rtpRtcp;
+
+  CriticalSectionWrapper* _criticalSectionFeedbacks;
+  RtcpFeedback*           _cbRtcpFeedback;
+  RtpVideoFeedback*       _cbVideoFeedback;
+
+  CriticalSectionWrapper* _criticalSectionRTCPReceiver;
+  WebRtc_UWord32          _SSRC;
+  WebRtc_UWord32          _remoteSSRC;
+
+  // Received send report
+  RTCPSenderInfo _remoteSenderInfo;
+  // when did we receive the last send report
+  WebRtc_UWord32 _lastReceivedSRNTPsecs;
+  WebRtc_UWord32 _lastReceivedSRNTPfrac;
+
+  // Received report blocks.
+  std::map<WebRtc_UWord32, RTCPHelp::RTCPReportBlockInformation*>
+      _receivedReportBlockMap;
+  std::map<WebRtc_UWord32, RTCPHelp::RTCPReceiveInformation*>
+      _receivedInfoMap;
+  std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*>
+      _receivedCnameMap;
+
+  WebRtc_UWord32            _packetTimeOutMS;
+
+  // Externally set RTT. This value can only be used if there are no valid
+  // RTT estimates.
+  WebRtc_UWord16 _rtt;
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc
new file mode 100644
index 0000000..0058aca
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc
@@ -0,0 +1,258 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_receiver_help.h"
+#include "rtp_utility.h"
+
+#include <string.h> //memset
+#include <cassert> //assert
+
+namespace webrtc {
+using namespace RTCPHelp;
+
+RTCPPacketInformation::RTCPPacketInformation()
+    : rtcpPacketTypeFlags(0),
+      remoteSSRC(0),
+      nackSequenceNumbers(0),
+      nackSequenceNumbersLength(0),
+      applicationSubType(0),
+      applicationName(0),
+      applicationData(),
+      applicationLength(0),
+      reportBlock(false),
+      fractionLost(0),
+      roundTripTime(0),
+      lastReceivedExtendedHighSeqNum(0),
+      jitter(0),
+      interArrivalJitter(0),
+      sliPictureId(0),
+      rpsiPictureId(0),
+      receiverEstimatedMaxBitrate(0),
+      VoIPMetric(NULL) {
+}
+
+RTCPPacketInformation::~RTCPPacketInformation()
+{
+    delete [] nackSequenceNumbers;
+    delete [] applicationData;
+    delete VoIPMetric;
+}
+
+void
+RTCPPacketInformation::AddVoIPMetric(const RTCPVoIPMetric* metric)
+{
+    VoIPMetric = new RTCPVoIPMetric();
+    memcpy(VoIPMetric, metric, sizeof(RTCPVoIPMetric));
+}
+
+void
+RTCPPacketInformation::AddApplicationData(const WebRtc_UWord8* data, const WebRtc_UWord16 size)
+{
+    WebRtc_UWord8* oldData = applicationData;
+    WebRtc_UWord16 oldLength = applicationLength;
+
+    // Don't copy more than kRtcpAppCode_DATA_SIZE bytes.
+    WebRtc_UWord16 copySize = size;
+    if (size > kRtcpAppCode_DATA_SIZE) {
+        copySize = kRtcpAppCode_DATA_SIZE;
+    }
+
+    applicationLength += copySize;
+    applicationData = new WebRtc_UWord8[applicationLength];
+
+    if(oldData)
+    {
+        memcpy(applicationData, oldData, oldLength);
+        memcpy(applicationData+oldLength, data, copySize);
+        delete [] oldData;
+    } else
+    {
+        memcpy(applicationData, data, copySize);
+    }
+}
+
+void
+RTCPPacketInformation::ResetNACKPacketIdArray()
+{
+    if(NULL == nackSequenceNumbers)
+    {
+        nackSequenceNumbers = new WebRtc_UWord16[NACK_PACKETS_MAX_SIZE];
+    }
+    nackSequenceNumbersLength = 0;
+}
+
+void
+RTCPPacketInformation::AddNACKPacket(const WebRtc_UWord16 packetID)
+{
+    assert(nackSequenceNumbers);
+
+    WebRtc_UWord16& idx = nackSequenceNumbersLength;
+    if (idx < NACK_PACKETS_MAX_SIZE)
+    {
+        nackSequenceNumbers[idx++] = packetID;
+    }
+}
+
+void
+RTCPPacketInformation::AddReportInfo(const WebRtc_UWord8 fraction,
+                                     const WebRtc_UWord16 rtt,
+                                     const WebRtc_UWord32 extendedHighSeqNum,
+                                     const WebRtc_UWord32 j)
+{
+    reportBlock = true;
+    fractionLost = fraction;
+    roundTripTime = rtt;
+    jitter = j;
+    lastReceivedExtendedHighSeqNum = extendedHighSeqNum;
+}
+
+RTCPReportBlockInformation::RTCPReportBlockInformation():
+    remoteReceiveBlock(),
+    remoteMaxJitter(0),
+    RTT(0),
+    minRTT(0),
+    maxRTT(0),
+    avgRTT(0),
+    numAverageCalcs(0)
+{
+    memset(&remoteReceiveBlock,0,sizeof(remoteReceiveBlock));
+}
+
+RTCPReportBlockInformation::~RTCPReportBlockInformation()
+{
+}
+
+RTCPReceiveInformation::RTCPReceiveInformation() :
+
+    lastTimeReceived(0),
+    lastFIRSequenceNumber(-1),
+    lastFIRRequest(0),
+    readyForDelete(false),
+    _tmmbrSetTimeouts(NULL)
+{
+}
+
+RTCPReceiveInformation::~RTCPReceiveInformation()
+{
+    if(_tmmbrSetTimeouts)
+    {
+        delete [] _tmmbrSetTimeouts;
+    }
+}
+
+// don't use TmmbrSet.VerifyAndAllocate this version keeps the data
+void
+RTCPReceiveInformation::VerifyAndAllocateTMMBRSet(const WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > TmmbrSet.sizeOfSet)
+    {
+        // make sure that our buffers are big enough
+        WebRtc_UWord32* ptrTmmbrSet = new WebRtc_UWord32[minimumSize];
+        WebRtc_UWord32* ptrTmmbrPacketOHSet = new WebRtc_UWord32[minimumSize];
+        WebRtc_UWord32* ptrTmmbrSsrcSet = new WebRtc_UWord32[minimumSize];
+        WebRtc_UWord32* tmmbrSetTimeouts = new WebRtc_UWord32[minimumSize];
+
+        if(TmmbrSet.lengthOfSet > 0)
+        {
+            // copy old values
+            memcpy(ptrTmmbrSet, TmmbrSet.ptrTmmbrSet, sizeof(WebRtc_UWord32) * TmmbrSet.lengthOfSet);
+            memcpy(ptrTmmbrPacketOHSet, TmmbrSet.ptrPacketOHSet, sizeof(WebRtc_UWord32) * TmmbrSet.lengthOfSet);
+            memcpy(ptrTmmbrSsrcSet, TmmbrSet.ptrSsrcSet, sizeof(WebRtc_UWord32) * TmmbrSet.lengthOfSet);
+            memcpy(tmmbrSetTimeouts, _tmmbrSetTimeouts, sizeof(WebRtc_UWord32) * TmmbrSet.lengthOfSet);
+        }
+        if(TmmbrSet.ptrTmmbrSet)
+        {
+            delete [] TmmbrSet.ptrTmmbrSet;
+            delete [] TmmbrSet.ptrPacketOHSet;
+            delete [] TmmbrSet.ptrSsrcSet;
+        }
+        if(_tmmbrSetTimeouts)
+        {
+            delete [] _tmmbrSetTimeouts;
+        }
+        TmmbrSet.ptrTmmbrSet = ptrTmmbrSet;
+        TmmbrSet.ptrPacketOHSet = ptrTmmbrPacketOHSet;
+        TmmbrSet.ptrSsrcSet = ptrTmmbrSsrcSet;
+        TmmbrSet.sizeOfSet = minimumSize;
+        _tmmbrSetTimeouts = tmmbrSetTimeouts;
+    }
+}
+
+void
+RTCPReceiveInformation::InsertTMMBRItem(const WebRtc_UWord32 senderSSRC,
+                                        const RTCPUtility::RTCPPacketRTPFBTMMBRItem& TMMBRItem,
+                                        const WebRtc_UWord32 currentTimeMS)
+{
+    // serach to see if we have it in our list
+    for(WebRtc_UWord32 i = 0; i < TmmbrSet.lengthOfSet; i++)
+    {
+        if(TmmbrSet.ptrSsrcSet[i] == senderSSRC)
+        {
+            // we already have this SSRC in our list
+            // update it
+            TmmbrSet.ptrPacketOHSet[i] = TMMBRItem.MeasuredOverhead;
+            TmmbrSet.ptrTmmbrSet[i] = TMMBRItem.MaxTotalMediaBitRate;
+            _tmmbrSetTimeouts[i] = currentTimeMS;
+            return;
+        }
+    }
+    VerifyAndAllocateTMMBRSet(TmmbrSet.lengthOfSet+1);
+
+    const WebRtc_UWord32 idx = TmmbrSet.lengthOfSet;
+    TmmbrSet.ptrPacketOHSet[idx] = TMMBRItem.MeasuredOverhead;
+    TmmbrSet.ptrTmmbrSet[idx] = TMMBRItem.MaxTotalMediaBitRate;
+    TmmbrSet.ptrSsrcSet[idx] = senderSSRC;
+    _tmmbrSetTimeouts[idx] = currentTimeMS;
+    TmmbrSet.lengthOfSet++;
+}
+
+WebRtc_Word32
+RTCPReceiveInformation::GetTMMBRSet(const WebRtc_UWord32 sourceIdx,
+                                    const WebRtc_UWord32 targetIdx,
+                                    TMMBRSet* candidateSet,
+                                    const WebRtc_UWord32 currentTimeMS)
+{
+    if(sourceIdx >= TmmbrSet.lengthOfSet)
+    {
+        return -1;
+    }
+    if(targetIdx >= candidateSet->sizeOfSet)
+    {
+        return -1;
+    }
+    WebRtc_UWord32 timeNow = currentTimeMS;
+
+    // use audio define since we don't know what interval the remote peer is using
+    if(timeNow - _tmmbrSetTimeouts[sourceIdx] > 5*RTCP_INTERVAL_AUDIO_MS)
+    {
+        // value timed out
+        const WebRtc_UWord32 move = TmmbrSet.lengthOfSet - (sourceIdx + 1);
+        if(move > 0)
+        {
+            memmove(&(TmmbrSet.ptrTmmbrSet[sourceIdx]), &(TmmbrSet.ptrTmmbrSet[sourceIdx+1]), move* sizeof(WebRtc_UWord32));
+            memmove(&(TmmbrSet.ptrPacketOHSet[sourceIdx]),&(TmmbrSet.ptrPacketOHSet[sourceIdx+1]), move* sizeof(WebRtc_UWord32));
+            memmove(&(TmmbrSet.ptrSsrcSet[sourceIdx]),&(TmmbrSet.ptrSsrcSet[sourceIdx+1]), move* sizeof(WebRtc_UWord32));
+            memmove(&(_tmmbrSetTimeouts[sourceIdx]),&(_tmmbrSetTimeouts[sourceIdx+1]), move* sizeof(WebRtc_UWord32));
+        }
+        TmmbrSet.lengthOfSet--;
+        return -1;
+    }
+
+    candidateSet->ptrTmmbrSet[targetIdx] = TmmbrSet.ptrTmmbrSet[sourceIdx];
+    candidateSet->ptrPacketOHSet[targetIdx] = TmmbrSet.ptrPacketOHSet[sourceIdx];
+    candidateSet->ptrSsrcSet[targetIdx] = TmmbrSet.ptrSsrcSet[sourceIdx];
+    return 0;
+}
+
+void RTCPReceiveInformation::VerifyAndAllocateBoundingSet(const WebRtc_UWord32 minimumSize)
+{
+    TmmbnBoundingSet.VerifyAndAllocateSet(minimumSize);
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.h b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.h
new file mode 100644
index 0000000..6dcaca8
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+
+#include "typedefs.h"
+
+#include "rtp_rtcp_defines.h"   // RTCPReportBlock
+#include "rtp_rtcp_config.h"           // RTCP_MAX_REPORT_BLOCKS
+#include "rtcp_utility.h"
+#include "tmmbr_help.h"
+
+namespace webrtc {
+namespace RTCPHelp
+{
+
+class RTCPPacketInformation
+{
+public:
+    RTCPPacketInformation();
+    ~RTCPPacketInformation();
+
+    void AddVoIPMetric(const RTCPVoIPMetric*  metric);
+
+    void AddApplicationData(const WebRtc_UWord8* data, const WebRtc_UWord16 size);
+
+    void AddNACKPacket(const WebRtc_UWord16 packetID);
+    void ResetNACKPacketIdArray();
+
+    void AddReportInfo(const WebRtc_UWord8 fractionLost,
+                       const WebRtc_UWord16 rtt,
+                       const WebRtc_UWord32 extendedHighSeqNum,
+                       const WebRtc_UWord32 jitter);
+
+    WebRtc_UWord32  rtcpPacketTypeFlags; // RTCPPacketTypeFlags bit field
+    WebRtc_UWord32  remoteSSRC;
+
+    WebRtc_UWord16* nackSequenceNumbers;
+    WebRtc_UWord16  nackSequenceNumbersLength;
+
+    WebRtc_UWord8   applicationSubType;
+    WebRtc_UWord32  applicationName;
+    WebRtc_UWord8*  applicationData;
+    WebRtc_UWord16  applicationLength;
+
+    bool            reportBlock;
+    WebRtc_UWord8   fractionLost;
+    WebRtc_UWord16  roundTripTime;
+    WebRtc_UWord32  lastReceivedExtendedHighSeqNum;
+    WebRtc_UWord32  jitter;
+
+    WebRtc_UWord32  interArrivalJitter;
+
+    WebRtc_UWord8   sliPictureId;
+    WebRtc_UWord64  rpsiPictureId;
+    WebRtc_UWord32  receiverEstimatedMaxBitrate;
+
+    RTCPVoIPMetric*  VoIPMetric;
+};
+
+
+class RTCPReportBlockInformation
+{
+public:
+    RTCPReportBlockInformation();
+    ~RTCPReportBlockInformation();
+
+    // Statistics
+    RTCPReportBlock remoteReceiveBlock;
+    WebRtc_UWord32        remoteMaxJitter;
+
+    // RTT
+    WebRtc_UWord16    RTT;
+    WebRtc_UWord16    minRTT;
+    WebRtc_UWord16    maxRTT;
+    WebRtc_UWord16    avgRTT;
+    WebRtc_UWord32    numAverageCalcs;
+};
+
+class RTCPReceiveInformation
+{
+public:
+    RTCPReceiveInformation();
+    ~RTCPReceiveInformation();
+
+    void VerifyAndAllocateBoundingSet(const WebRtc_UWord32 minimumSize);
+    void VerifyAndAllocateTMMBRSet(const WebRtc_UWord32 minimumSize);
+
+    void InsertTMMBRItem(const WebRtc_UWord32 senderSSRC,
+                         const RTCPUtility::RTCPPacketRTPFBTMMBRItem& TMMBRItem,
+                         const WebRtc_UWord32 currentTimeMS);
+
+    // get
+    WebRtc_Word32 GetTMMBRSet(const WebRtc_UWord32 sourceIdx,
+                              const WebRtc_UWord32 targetIdx,
+                              TMMBRSet* candidateSet,
+                              const WebRtc_UWord32 currentTimeMS);
+
+    WebRtc_UWord32    lastTimeReceived;
+
+    // FIR
+    WebRtc_Word32     lastFIRSequenceNumber;
+    WebRtc_UWord32    lastFIRRequest;
+
+    // TMMBN
+    TMMBRSet        TmmbnBoundingSet;
+
+    // TMMBR
+    TMMBRSet        TmmbrSet;
+
+    bool            readyForDelete;
+private:
+    WebRtc_UWord32*   _tmmbrSetTimeouts;
+};
+
+} // end namespace RTCPHelp
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_sender.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_sender.cc
new file mode 100644
index 0000000..58e849f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -0,0 +1,2189 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_sender.h"
+
+#include <string.h> // memcpy
+#include <cassert> // assert
+#include <cstdlib> // rand
+
+#include "trace.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+
+#include "rtp_rtcp_impl.h"
+
+namespace webrtc {
+
+using RTCPUtility::RTCPCnameInformation;
+
+RTCPSender::RTCPSender(const WebRtc_Word32 id,
+                       const bool audio,
+                       RtpRtcpClock* clock,
+                       ModuleRtpRtcpImpl* owner) :
+    _id(id),
+    _audio(audio),
+    _clock(*clock),
+    _method(kRtcpOff),
+    _rtpRtcp(*owner),
+    _criticalSectionTransport(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbTransport(NULL),
+
+    _criticalSectionRTCPSender(CriticalSectionWrapper::CreateCriticalSection()),
+    _usingNack(false),
+    _sending(false),
+    _sendTMMBN(false),
+    _REMB(false),
+    _sendREMB(false),
+    _TMMBR(false),
+    _IJ(false),
+    _nextTimeToSendRTCP(0),
+    _SSRC(0),
+    _remoteSSRC(0),
+    _CNAME(),
+    _reportBlocks(),
+    _csrcCNAMEs(),
+
+    _cameraDelayMS(0),
+
+    _lastSendReport(),
+    _lastRTCPTime(),
+
+    _CSRCs(0),
+    _CSRC(),
+    _includeCSRCs(true),
+
+    _sequenceNumberFIR(0),
+
+    _lengthRembSSRC(0),
+    _sizeRembSSRC(0),
+    _rembSSRC(NULL),
+    _rembBitrate(0),
+    _bitrate_observer(NULL),
+
+    _tmmbrHelp(audio),
+    _tmmbr_Send(0),
+    _packetOH_Send(0),
+    _remoteRateControl(),
+
+    _appSend(false),
+    _appSubType(0),
+    _appName(),
+    _appData(NULL),
+    _appLength(0),
+    _xrSendVoIPMetric(false),
+    _xrVoIPMetric()
+{
+    memset(_CNAME, 0, sizeof(_CNAME));
+    memset(_lastSendReport, 0, sizeof(_lastSendReport));
+    memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTCPSender::~RTCPSender() {
+  delete [] _rembSSRC;
+  delete [] _appData;
+
+  while (!_reportBlocks.empty()) {
+    std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+        _reportBlocks.begin();
+    delete it->second;
+    _reportBlocks.erase(it);
+  }
+  while (!_csrcCNAMEs.empty()) {
+    std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+        _csrcCNAMEs.begin();
+    delete it->second;
+    _csrcCNAMEs.erase(it);
+  }
+  delete _criticalSectionTransport;
+  delete _criticalSectionRTCPSender;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32
+RTCPSender::Init()
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    _method = kRtcpOff;
+    _cbTransport = NULL;
+    _usingNack = false;
+    _sending = false;
+    _sendTMMBN = false;
+    _TMMBR = false;
+    _IJ = false;
+    _REMB = false;
+    _sendREMB = false;
+    _SSRC = 0;
+    _remoteSSRC = 0;
+    _cameraDelayMS = 0;
+    _sequenceNumberFIR = 0;
+    _tmmbr_Send = 0;
+    _packetOH_Send = 0;
+    _remoteRateControl.Reset();
+    _nextTimeToSendRTCP = 0;
+    _CSRCs = 0;
+    _appSend = false;
+    _appSubType = 0;
+
+    if(_appData)
+    {
+        delete [] _appData;
+        _appData = NULL;
+    }
+    _appLength = 0;
+
+    _xrSendVoIPMetric = false;
+
+    memset(&_xrVoIPMetric, 0, sizeof(_xrVoIPMetric));
+    memset(_CNAME, 0, sizeof(_CNAME));
+    memset(_lastSendReport, 0, sizeof(_lastSendReport));
+    memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
+    return 0;
+}
+
+void
+RTCPSender::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+WebRtc_Word32
+RTCPSender::RegisterSendTransport(Transport* outgoingTransport)
+{
+    CriticalSectionScoped lock(_criticalSectionTransport);
+    _cbTransport = outgoingTransport;
+    return 0;
+}
+
+RTCPMethod
+RTCPSender::Status() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _method;
+}
+
+WebRtc_Word32
+RTCPSender::SetRTCPStatus(const RTCPMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    if(method != kRtcpOff)
+    {
+        if(_audio)
+        {
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + (RTCP_INTERVAL_AUDIO_MS/2);
+        } else
+        {
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + (RTCP_INTERVAL_VIDEO_MS/2);
+        }
+    }
+    _method = method;
+    return 0;
+}
+
+bool
+RTCPSender::Sending() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _sending;
+}
+
+WebRtc_Word32
+RTCPSender::SetSendingStatus(const bool sending)
+{
+    bool sendRTCPBye = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+        if(_method != kRtcpOff)
+        {
+            if(sending == false && _sending == true)
+            {
+                // Trigger RTCP bye
+                sendRTCPBye = true;
+            }
+        }
+        _sending = sending;
+    }
+    if(sendRTCPBye)
+    {
+        return SendRTCP(kRtcpBye);
+    }
+    return 0;
+}
+
+bool
+RTCPSender::REMB() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _REMB;
+}
+
+WebRtc_Word32
+RTCPSender::SetREMBStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _REMB = enable;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetREMBData(const WebRtc_UWord32 bitrate,
+                        const WebRtc_UWord8 numberOfSSRC,
+                        const WebRtc_UWord32* SSRC)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _rembBitrate = bitrate;
+ 
+    if(_sizeRembSSRC < numberOfSSRC)
+    {
+        delete [] _rembSSRC;
+        _rembSSRC = new WebRtc_UWord32[numberOfSSRC];
+        _sizeRembSSRC = numberOfSSRC;
+    } 
+
+    _lengthRembSSRC = numberOfSSRC;
+    for (int i = 0; i < numberOfSSRC; i++)
+    {  
+        _rembSSRC[i] = SSRC[i];
+    }
+    _sendREMB = true;
+    return 0;
+}
+
+bool RTCPSender::SetRemoteBitrateObserver(RtpRemoteBitrateObserver* observer) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  if (observer && _bitrate_observer) {
+    return false;
+  }
+  _bitrate_observer = observer;
+  return true;
+}
+
+void RTCPSender::UpdateRemoteBitrateEstimate(unsigned int target_bitrate) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  if (_bitrate_observer) {
+    _bitrate_observer->OnReceiveBitrateChanged(_remoteSSRC, target_bitrate);
+  }
+}
+
+void RTCPSender::ReceivedRemb(unsigned int estimated_bitrate) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  if (_bitrate_observer) {
+    _bitrate_observer->OnReceivedRemb(estimated_bitrate);
+  }
+}
+
+bool
+RTCPSender::TMMBR() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _TMMBR;
+}
+
+WebRtc_Word32
+RTCPSender::SetTMMBRStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _TMMBR = enable;
+    return 0;
+}
+
+bool
+RTCPSender::IJ() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _IJ;
+}
+
+WebRtc_Word32
+RTCPSender::SetIJStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _IJ = enable;
+    return 0;
+}
+
+void
+RTCPSender::SetSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_SSRC != 0)
+    {
+        // not first SetSSRC, probably due to a collision
+        // schedule a new RTCP report
+        // make sure that we send a RTP packet
+        _nextTimeToSendRTCP = _clock.GetTimeInMS() + 100;
+    }
+    _SSRC = ssrc;
+}
+
+WebRtc_Word32
+RTCPSender::SetRemoteSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _remoteSSRC = ssrc;
+    _remoteRateControl.Reset();
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetCameraDelay(const WebRtc_Word32 delayMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    if(delayMS > 1000 || delayMS < -1000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument, delay can't be larger than 1 sec", __FUNCTION__);
+        return -1;
+    }
+    _cameraDelayMS = delayMS;
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::CNAME(char cName[RTCP_CNAME_SIZE]) {
+  assert(cName);
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  cName[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cName, _CNAME, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::SetCNAME(const char cName[RTCP_CNAME_SIZE]) {
+  assert(cName);
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  _CNAME[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(_CNAME, cName, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                        const char cName[RTCP_CNAME_SIZE]) {
+  assert(cName);
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  if (_csrcCNAMEs.size() >= kRtpCsrcSize) {
+    return -1;
+  }
+  RTCPCnameInformation* ptr = new RTCPCnameInformation();
+  ptr->name[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(ptr->name, cName, RTCP_CNAME_SIZE - 1);
+  _csrcCNAMEs[SSRC] = ptr;
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::RemoveMixedCNAME(const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+      _csrcCNAMEs.find(SSRC);
+
+  if (it == _csrcCNAMEs.end()) {
+    return -1;
+  }
+  delete it->second;
+  _csrcCNAMEs.erase(it);
+  return 0;
+}
+
+bool
+RTCPSender::TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP) const
+{
+/*
+    For audio we use a fix 5 sec interval
+
+    For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
+        technicaly we break the max 5% RTCP BW for video below 10 kbit/s but that should be extreamly rare
+
+
+From RFC 3550
+
+    MAX RTCP BW is 5% if the session BW
+        A send report is approximately 65 bytes inc CNAME
+        A report report is approximately 28 bytes
+
+    The RECOMMENDED value for the reduced minimum in seconds is 360
+      divided by the session bandwidth in kilobits/second.  This minimum
+      is smaller than 5 seconds for bandwidths greater than 72 kb/s.
+
+    If the participant has not yet sent an RTCP packet (the variable
+      initial is true), the constant Tmin is set to 2.5 seconds, else it
+      is set to 5 seconds.
+
+    The interval between RTCP packets is varied randomly over the
+      range [0.5,1.5] times the calculated interval to avoid unintended
+      synchronization of all participants
+
+    if we send
+    If the participant is a sender (we_sent true), the constant C is
+      set to the average RTCP packet size (avg_rtcp_size) divided by 25%
+      of the RTCP bandwidth (rtcp_bw), and the constant n is set to the
+      number of senders.
+
+    if we receive only
+      If we_sent is not true, the constant C is set
+      to the average RTCP packet size divided by 75% of the RTCP
+      bandwidth.  The constant n is set to the number of receivers
+      (members - senders).  If the number of senders is greater than
+      25%, senders and receivers are treated together.
+
+    reconsideration NOT required for peer-to-peer
+      "timer reconsideration" is
+      employed.  This algorithm implements a simple back-off mechanism
+      which causes users to hold back RTCP packet transmission if the
+      group sizes are increasing.
+
+      n = number of members
+      C = avg_size/(rtcpBW/4)
+
+   3. The deterministic calculated interval Td is set to max(Tmin, n*C).
+
+   4. The calculated interval T is set to a number uniformly distributed
+      between 0.5 and 1.5 times the deterministic calculated interval.
+
+   5. The resulting value of T is divided by e-3/2=1.21828 to compensate
+      for the fact that the timer reconsideration algorithm converges to
+      a value of the RTCP bandwidth below the intended average
+*/
+
+    WebRtc_UWord32 now = _clock.GetTimeInMS();
+
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_method == kRtcpOff)
+    {
+        return false;
+    }
+
+    if(!_audio && sendKeyframeBeforeRTP)
+    {
+        // for video key-frames we want to send the RTCP before the large key-frame
+        // if we have a 100 ms margin
+        now += RTCP_SEND_BEFORE_KEY_FRAME_MS;
+    }
+
+    if(now > _nextTimeToSendRTCP)
+    {
+        return true;
+
+    } else if(now < 0x0000ffff && _nextTimeToSendRTCP > 0xffff0000) // 65 sec margin
+    {
+        // wrap
+        return true;
+    }
+    return false;
+}
+
+WebRtc_UWord32
+RTCPSender::LastSendReport( WebRtc_UWord32& lastRTCPTime)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    lastRTCPTime = _lastRTCPTime[0];
+    return _lastSendReport[0];
+}
+
+WebRtc_UWord32
+RTCPSender::SendTimeOfSendReport(const WebRtc_UWord32 sendReport)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    // This is only saved when we are the sender
+    if((_lastSendReport[0] == 0) || (sendReport == 0))
+    {
+        return 0; // will be ignored
+    } else
+    {
+        for(int i = 0; i < RTCP_NUMBER_OF_SR; ++i)
+        {
+            if( _lastSendReport[i] == sendReport)
+            {
+                return _lastRTCPTime[i];
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::AddReportBlock(const WebRtc_UWord32 SSRC,
+                                         const RTCPReportBlock* reportBlock) {
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+  if (_reportBlocks.size() >= RTCP_MAX_REPORT_BLOCKS) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  RTCPReportBlock* copyReportBlock = new RTCPReportBlock();
+  memcpy(copyReportBlock, reportBlock, sizeof(RTCPReportBlock));
+  _reportBlocks[SSRC] = copyReportBlock;
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::RemoveReportBlock(const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+  std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+      _reportBlocks.find(SSRC);
+
+  if (it == _reportBlocks.end()) {
+    return -1;
+  }
+  delete it->second;
+  _reportBlocks.erase(it);
+  return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildSR(WebRtc_UWord8* rtcpbuffer,
+                    WebRtc_UWord32& pos,
+                    const WebRtc_UWord32 NTPsec,
+                    const WebRtc_UWord32 NTPfrac,
+                    const RTCPReportBlock* received)
+{
+    // sanity
+    if(pos + 52 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    WebRtc_UWord32 RTPtime;
+    WebRtc_UWord32 BackTimedNTPsec;
+    WebRtc_UWord32 BackTimedNTPfrac;
+
+    WebRtc_UWord32 posNumberOfReportBlocks = pos;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+
+    // Sender report
+    rtcpbuffer[pos++]=(WebRtc_UWord8)200;
+
+    for(int i = (RTCP_NUMBER_OF_SR-2); i >= 0; i--)
+    {
+        // shift old
+        _lastSendReport[i+1] = _lastSendReport[i];
+        _lastRTCPTime[i+1] =_lastRTCPTime[i];
+    }
+
+    _lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac); // before video cam compensation
+
+    if(_cameraDelayMS >= 0)
+    {
+        // fraction of a second as an unsigned word32 4.294 967 296E9
+        WebRtc_UWord32 cameraDelayFixFrac =  (WebRtc_UWord32)_cameraDelayMS* 4294967; // note camera delay can't be larger than +/-1000ms
+        if(NTPfrac > cameraDelayFixFrac)
+        {
+            // no problem just reduce the fraction part
+            BackTimedNTPfrac = NTPfrac - cameraDelayFixFrac;
+            BackTimedNTPsec = NTPsec;
+        } else
+        {
+            // we need to reduce the sec and add that sec to the frac
+            BackTimedNTPsec = NTPsec - 1;
+            BackTimedNTPfrac = 0xffffffff - (cameraDelayFixFrac - NTPfrac);
+        }
+    } else
+    {
+        // fraction of a second as an unsigned word32 4.294 967 296E9
+        WebRtc_UWord32 cameraDelayFixFrac =  (WebRtc_UWord32)(-_cameraDelayMS)* 4294967; // note camera delay can't be larger than +/-1000ms
+        if(NTPfrac > 0xffffffff - cameraDelayFixFrac)
+        {
+            // we need to add the sec and add that sec to the frac
+            BackTimedNTPsec = NTPsec + 1;
+            BackTimedNTPfrac = cameraDelayFixFrac + NTPfrac; // this will wrap but that is ok
+        } else
+        {
+            // no problem just add the fraction part
+            BackTimedNTPsec = NTPsec;
+            BackTimedNTPfrac = NTPfrac + cameraDelayFixFrac;
+        }
+    }
+    _lastSendReport[0] = (BackTimedNTPsec <<16) + (BackTimedNTPfrac >> 16);
+
+    // RTP timestamp
+    // This should have a ramdom start value added
+    // RTP is counted from NTP not the acctual RTP
+    // This reflects the perfect RTP time
+    // we solve this by initiating RTP to our NTP :)
+
+    WebRtc_UWord32 freqHz = 90000; // For video
+    if(_audio)
+    {
+        freqHz =  _rtpRtcp.CurrentSendFrequencyHz();
+        RTPtime = ModuleRTPUtility::GetCurrentRTP(&_clock, freqHz);
+    }
+    else // video 
+    {
+        // used to be (WebRtc_UWord32)(((float)BackTimedNTPfrac/(float)FRAC)* 90000)
+        WebRtc_UWord32 tmp = 9*(BackTimedNTPfrac/429496);
+        RTPtime = BackTimedNTPsec*freqHz + tmp;
+    }
+
+    
+    
+
+    // Add sender data
+    // Save  for our length field
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+    // NTP
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, BackTimedNTPsec);
+    pos += 4;
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, BackTimedNTPfrac);
+    pos += 4;
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, RTPtime);
+    pos += 4;
+
+    //sender's packet count
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rtpRtcp.PacketCountSent());
+    pos += 4;
+
+    //sender's octet count
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rtpRtcp.ByteCountSent());
+    pos += 4;
+
+    WebRtc_UWord8 numberOfReportBlocks = 0;
+    WebRtc_Word32 retVal = AddReportBlocks(rtcpbuffer, pos, numberOfReportBlocks, received, NTPsec, NTPfrac);
+    if(retVal < 0)
+    {
+        //
+        return retVal ;
+    }
+    rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
+
+    WebRtc_UWord16 len = WebRtc_UWord16((pos/4) -1);
+    ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+    return 0;
+}
+
+
+WebRtc_Word32 RTCPSender::BuildSDEC(WebRtc_UWord8* rtcpbuffer,
+                                    WebRtc_UWord32& pos) {
+  size_t lengthCname = strlen(_CNAME);
+  assert(lengthCname < RTCP_CNAME_SIZE);
+
+  // sanity
+  if(pos + 12 + lengthCname  >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -2;
+  }
+  // SDEC Source Description
+
+  // We always need to add SDES CNAME
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(0x80 + 1 + _csrcCNAMEs.size());
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(202);
+
+  // handle SDES length later on
+  WebRtc_UWord32 SDESLengthPos = pos;
+  pos++;
+  pos++;
+
+  // Add our own SSRC
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+  pos += 4;
+
+  // CNAME = 1
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(1);
+
+  //
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(lengthCname);
+
+  WebRtc_UWord16 SDESLength = 10;
+
+  memcpy(&rtcpbuffer[pos], _CNAME, lengthCname);
+  pos += lengthCname;
+  SDESLength += (WebRtc_UWord16)lengthCname;
+
+  WebRtc_UWord16 padding = 0;
+  // We must have a zero field even if we have an even multiple of 4 bytes
+  if ((pos % 4) == 0) {
+    padding++;
+    rtcpbuffer[pos++]=0;
+  }
+  while ((pos % 4) != 0) {
+    padding++;
+    rtcpbuffer[pos++]=0;
+  }
+  SDESLength += padding;
+
+  std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*>::iterator it =
+      _csrcCNAMEs.begin();
+
+  for(; it != _csrcCNAMEs.end(); it++) {
+    RTCPCnameInformation* cname = it->second;
+    WebRtc_UWord32 SSRC = it->first;
+
+    // Add SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, SSRC);
+    pos += 4;
+
+    // CNAME = 1
+    rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(1);
+
+    size_t length = strlen(cname->name);
+    assert(length < RTCP_CNAME_SIZE);
+
+    rtcpbuffer[pos++]= static_cast<WebRtc_UWord8>(length);
+    SDESLength += 6;
+
+    memcpy(&rtcpbuffer[pos],cname->name, length);
+
+    pos += length;
+    SDESLength += length;
+    WebRtc_UWord16 padding = 0;
+
+    // We must have a zero field even if we have an even multiple of 4 bytes
+    if((pos % 4) == 0){
+      padding++;
+      rtcpbuffer[pos++]=0;
+    }
+    while((pos % 4) != 0){
+      padding++;
+      rtcpbuffer[pos++] = 0;
+    }
+    SDESLength += padding;
+  }
+  // in 32-bit words minus one and we don't count the header
+  WebRtc_UWord16 buffer_length = (SDESLength / 4) - 1;
+  ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer + SDESLengthPos,
+                                          buffer_length);
+  return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildRR(WebRtc_UWord8* rtcpbuffer,
+                    WebRtc_UWord32& pos,
+                    const WebRtc_UWord32 NTPsec,
+                    const WebRtc_UWord32 NTPfrac,
+                    const RTCPReportBlock* received)
+{
+    // sanity one block
+    if(pos + 32 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    WebRtc_UWord32 posNumberOfReportBlocks = pos;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)201;
+
+    // Save  for our length field
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    WebRtc_UWord8 numberOfReportBlocks = 0;
+    WebRtc_Word32 retVal = AddReportBlocks(rtcpbuffer, pos, numberOfReportBlocks, received, NTPsec, NTPfrac);
+    if(retVal < 0)
+    {
+        return retVal;
+    }
+    rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
+
+    WebRtc_UWord16 len = WebRtc_UWord16((pos)/4 -1);
+    ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+    return 0;
+}
+
+// From RFC 5450: Transmission Time Offsets in RTP Streams.
+//        0                   1                   2                   3
+//        0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   hdr |V=2|P|    RC   |   PT=IJ=195   |             length            |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//       |                      inter-arrival jitter                     |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//       .                                                               .
+//       .                                                               .
+//       .                                                               .
+//       |                      inter-arrival jitter                     |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+//  If present, this RTCP packet must be placed after a receiver report
+//  (inside a compound RTCP packet), and MUST have the same value for RC
+//  (reception report count) as the receiver report.
+
+WebRtc_Word32
+RTCPSender::BuildExtendedJitterReport(
+    WebRtc_UWord8* rtcpbuffer,
+    WebRtc_UWord32& pos,
+    const WebRtc_UWord32 jitterTransmissionTimeOffset)
+{
+    if (_reportBlocks.size() > 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "Not implemented.");
+        return 0;
+    }
+
+    // sanity
+    if(pos + 8 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add picture loss indicator
+    WebRtc_UWord8 RC = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + RC;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)195;
+
+    // Used fixed length of 2
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(1);
+
+    // Add inter-arrival jitter
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
+                                            jitterTransmissionTimeOffset);
+    pos += 4;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildPLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 12 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add picture loss indicator
+    WebRtc_UWord8 FMT = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    //Used fixed length of 2
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(2);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::BuildFIR(WebRtc_UWord8* rtcpbuffer,
+                                   WebRtc_UWord32& pos,
+                                   bool repeat) {
+  // sanity
+  if(pos + 20 >= IP_PACKET_SIZE)  {
+    return -2;
+  }
+  if (!repeat) {
+    _sequenceNumberFIR++;   // do not increase if repetition
+  }
+
+  // add full intra request indicator
+  WebRtc_UWord8 FMT = 4;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0x80 + FMT;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)206;
+
+  //Length of 4
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)(4);
+
+  // Add our own SSRC
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
+  pos += 4;
+
+  // RFC 5104     4.3.1.2.  Semantics
+  // SSRC of media source
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+
+  // Additional Feedback Control Information (FCI)
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
+  pos += 4;
+
+  rtcpbuffer[pos++] = (WebRtc_UWord8)(_sequenceNumberFIR);
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  return 0;
+}
+
+/*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |            First        |        Number           | PictureID |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+WebRtc_Word32
+RTCPSender::BuildSLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos, const WebRtc_UWord8 pictureID)
+{
+    // sanity
+    if(pos + 16 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add slice loss indicator
+    WebRtc_UWord8 FMT = 2;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    //Used fixed length of 3
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(3);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // Add first, number & picture ID 6 bits
+    // first  = 0, 13 - bits
+    // number = 0x1fff, 13 - bits only ones for now
+    WebRtc_UWord32 sliField = (0x1fff << 6)+ (0x3f & pictureID);
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, sliField);
+    pos += 4;
+    return 0;
+}
+
+/*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |      PB       |0| Payload Type|    Native RPSI bit string     |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   defined per codec          ...                | Padding (0) |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+/*
+*    Note: not generic made for VP8
+*/
+WebRtc_Word32
+RTCPSender::BuildRPSI(WebRtc_UWord8* rtcpbuffer,
+                     WebRtc_UWord32& pos,
+                     const WebRtc_UWord64 pictureID,
+                     const WebRtc_UWord8 payloadType)
+{
+    // sanity
+    if(pos + 24 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add Reference Picture Selection Indication
+    WebRtc_UWord8 FMT = 3;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    // calc length
+    WebRtc_UWord32 bitsRequired = 7;
+    WebRtc_UWord8 bytesRequired = 1;
+    while((pictureID>>bitsRequired) > 0)
+    {
+        bitsRequired += 7;
+        bytesRequired++;
+    }
+
+    WebRtc_UWord8 size = 3;
+    if(bytesRequired > 6)
+    {
+        size = 5;
+    } else if(bytesRequired > 2)
+    {
+        size = 4;
+    }
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=size;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // calc padding length
+    WebRtc_UWord8 paddingBytes = 4-((2+bytesRequired)%4);
+    if(paddingBytes == 4)
+    {
+        paddingBytes = 0;
+    }
+    // add padding length in bits
+    rtcpbuffer[pos] = paddingBytes*8; // padding can be 0, 8, 16 or 24
+    pos++;
+
+    // add payload type
+    rtcpbuffer[pos] = payloadType;
+    pos++;
+
+    // add picture ID
+    for(int i = bytesRequired-1; i > 0; i--)
+    {
+        rtcpbuffer[pos] = 0x80 | WebRtc_UWord8(pictureID >> (i*7));
+        pos++;
+    }
+    // add last byte of picture ID
+    rtcpbuffer[pos] = WebRtc_UWord8(pictureID & 0x7f);
+    pos++;
+
+    // add padding
+    for(int j = 0; j <paddingBytes; j++)
+    {
+        rtcpbuffer[pos] = 0;
+        pos++;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildREMB(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 20 + 4 * _lengthRembSSRC >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add application layer feedback
+    WebRtc_UWord8 FMT = 15;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=_lengthRembSSRC + 4;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Remote SSRC must be 0
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, 0);
+    pos += 4;
+
+    rtcpbuffer[pos++]='R';
+    rtcpbuffer[pos++]='E';
+    rtcpbuffer[pos++]='M';
+    rtcpbuffer[pos++]='B';
+
+    rtcpbuffer[pos++] = _lengthRembSSRC;
+    // 6 bit Exp
+    // 18 bit mantissa
+    WebRtc_UWord8 brExp = 0;
+    for(WebRtc_UWord32 i=0; i<64; i++)
+    {
+        if(_rembBitrate <= ((WebRtc_UWord32)262143 << i))
+        {
+            brExp = i;
+            break;
+        }
+    }
+    const WebRtc_UWord32 brMantissa = (_rembBitrate >> brExp);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)((brExp << 2) + ((brMantissa >> 16) & 0x03));
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(brMantissa >> 8);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(brMantissa);
+
+    for (int i = 0; i < _lengthRembSSRC; i++) 
+    { 
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rembSSRC[i]);
+        pos += 4;
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+RTCPSender::CalculateNewTargetBitrate(WebRtc_UWord32 RTT)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    WebRtc_UWord32 target_bitrate =
+        _remoteRateControl.UpdateBandwidthEstimate(RTT, _clock.GetTimeInMS());
+    _tmmbr_Send = target_bitrate / 1000;
+    return target_bitrate;
+}
+
+WebRtc_UWord32 RTCPSender::LatestBandwidthEstimate() const {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  return _remoteRateControl.LatestEstimate();
+}
+
+bool
+RTCPSender::ValidBitrateEstimate() const {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  return _remoteRateControl.ValidEstimate();
+}
+
+WebRtc_Word32
+RTCPSender::BuildTMMBR(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // Before sending the TMMBR check the received TMMBN, only an owner is allowed to raise the bitrate
+    // If the sender is an owner of the TMMBN -> send TMMBR
+    // If not an owner but the TMMBR would enter the TMMBN -> send TMMBR
+
+    // get current bounding set from RTCP receiver
+    bool tmmbrOwner = false;
+    TMMBRSet* candidateSet = _tmmbrHelp.CandidateSet(); // store in candidateSet, allocates one extra slot
+
+    // holding _criticalSectionRTCPSender while calling RTCPreceiver which will accuire _criticalSectionRTCPReceiver
+    // is a potental deadlock but since RTCPreceiver is not doing the revese we should be fine
+    WebRtc_Word32 lengthOfBoundingSet = _rtpRtcp.BoundingSet(tmmbrOwner, candidateSet);
+
+    if(lengthOfBoundingSet > 0)
+    {
+        for (WebRtc_Word32 i = 0; i < lengthOfBoundingSet; i++)
+        {
+            if( candidateSet->ptrTmmbrSet[i] == _tmmbr_Send &&
+                candidateSet->ptrPacketOHSet[i] == _packetOH_Send)
+            {
+                // do not send the same tuple
+                return 0;
+            }
+        }
+        if(!tmmbrOwner)
+        {
+            // use received bounding set as candidate set
+            // add current tuple
+            candidateSet->ptrTmmbrSet[lengthOfBoundingSet]    = _tmmbr_Send;
+            candidateSet->ptrPacketOHSet[lengthOfBoundingSet] = _packetOH_Send;
+            candidateSet->ptrSsrcSet[lengthOfBoundingSet] = _SSRC;
+            int numCandidates = lengthOfBoundingSet+ 1;
+
+            // find bounding set
+            TMMBRSet* boundingSet = NULL;
+            int numBoundingSet = _tmmbrHelp.FindTMMBRBoundingSet(boundingSet);
+            if(numBoundingSet > 0 || numBoundingSet <= numCandidates)
+            {
+                tmmbrOwner = _tmmbrHelp.IsOwner(_SSRC, numBoundingSet);
+            }
+            if(!tmmbrOwner)
+            {
+                // did not enter bounding set, no meaning to send this request
+                return 0;
+            }
+        }
+    }
+
+    if(_tmmbr_Send)
+    {
+        // sanity
+        if(pos + 20 >= IP_PACKET_SIZE)
+        {
+            return -2;
+        }
+        // add TMMBR indicator
+        WebRtc_UWord8 FMT = 3;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+        //Length of 4
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(4);
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+
+        // RFC 5104     4.2.1.2.  Semantics
+
+        // SSRC of media source
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+
+        // Additional Feedback Control Information (FCI)
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+        pos += 4;
+
+        WebRtc_UWord32 bitRate = _tmmbr_Send*1000;
+        WebRtc_UWord32 mmbrExp = 0;
+        for(WebRtc_UWord32 i=0;i<64;i++)
+        {
+            if(bitRate <= ((WebRtc_UWord32)131071 << i))
+            {
+                mmbrExp = i;
+                break;
+            }
+        }
+        WebRtc_UWord32 mmbrMantissa = (bitRate >> mmbrExp);
+
+        rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrExp << 2) + ((mmbrMantissa >> 15) & 0x03));
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(mmbrMantissa >> 7);
+        rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrMantissa << 1) + ((_packetOH_Send >> 8)& 0x01));
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(_packetOH_Send);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildTMMBN(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    TMMBRSet* boundingSet = _tmmbrHelp.BoundingSetToSend();
+    if(boundingSet == NULL)
+    {
+        return -1;
+    }
+    // sanity
+    if(pos + 12 + boundingSet->lengthOfSet*8 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    WebRtc_UWord8 FMT = 4;
+    // add TMMBN indicator
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+    //Add length later
+    int posLength = pos;
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // RFC 5104     4.2.2.2.  Semantics
+
+    // SSRC of media source
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+
+    // Additional Feedback Control Information (FCI)
+    int numBoundingSet = 0;
+    for(WebRtc_UWord32 n=0; n< boundingSet->lengthOfSet; n++)
+    {
+        if (boundingSet->ptrTmmbrSet[n] > 0)
+        {
+            WebRtc_UWord32 tmmbrSSRC = boundingSet->ptrSsrcSet[n];
+            ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, tmmbrSSRC);
+            pos += 4;
+
+            WebRtc_UWord32 bitRate = boundingSet->ptrTmmbrSet[n] * 1000;
+            WebRtc_UWord32 mmbrExp = 0;
+            for(int i=0; i<64; i++)
+            {
+                if(bitRate <=  ((WebRtc_UWord32)131071 << i))
+                {
+                    mmbrExp = i;
+                    break;
+                }
+            }
+            WebRtc_UWord32 mmbrMantissa = (bitRate >> mmbrExp);
+            WebRtc_UWord32 measuredOH = boundingSet->ptrPacketOHSet[n];
+
+            rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrExp << 2) + ((mmbrMantissa >> 15) & 0x03));
+            rtcpbuffer[pos++]=(WebRtc_UWord8)(mmbrMantissa >> 7);
+            rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrMantissa << 1) + ((measuredOH >> 8)& 0x01));
+            rtcpbuffer[pos++]=(WebRtc_UWord8)(measuredOH);
+            numBoundingSet++;
+        }
+    }
+    WebRtc_UWord16 length= (WebRtc_UWord16)(2+2*numBoundingSet);
+    rtcpbuffer[posLength++]=(WebRtc_UWord8)(length>>8);
+    rtcpbuffer[posLength]=(WebRtc_UWord8)(length);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildAPP(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(_appData == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+        return -1;
+    }
+    if(pos + 12 + _appLength >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + _appSubType;
+
+    // Add APP ID
+    rtcpbuffer[pos++]=(WebRtc_UWord8)204;
+
+    WebRtc_UWord16 length = (_appLength>>2) + 2; // include SSRC and name
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(length>>8);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(length);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add our application name
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _appName);
+    pos += 4;
+
+    // Add the data
+    memcpy(rtcpbuffer +pos, _appData,_appLength);
+    pos += _appLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildNACK(WebRtc_UWord8* rtcpbuffer,
+                      WebRtc_UWord32& pos,
+                      const WebRtc_Word32 nackSize,
+                      const WebRtc_UWord16* nackList)
+{
+    // sanity
+    if(pos + 16 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+
+    // int size, WebRtc_UWord16* nackList
+    // add nack list
+    WebRtc_UWord8 FMT = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8) 0;
+    int nackSizePos = pos;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(3); //setting it to one kNACK signal as default
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // add the list
+    int i = 0;
+    int numOfNackFields = 0;
+    while(nackSize > i && numOfNackFields < 253)
+    {
+        WebRtc_UWord16 nack = nackList[i];
+        // put dow our sequence number
+        ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+pos, nack);
+        pos += 2;
+
+        i++;
+        numOfNackFields++;
+        if(nackSize > i)
+        {
+            bool moreThan16Away = (WebRtc_UWord16(nack+16) < nackList[i])?true: false;
+            if(!moreThan16Away)
+            {
+                // check for a wrap
+                if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                {
+                    // wrap
+                    moreThan16Away = true;
+                }
+            }
+            if(moreThan16Away)
+            {
+                // next is more than 16 away
+                rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+                rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+            } else
+            {
+                // build our bitmask
+                WebRtc_UWord16 bitmask = 0;
+
+                bool within16Away = (WebRtc_UWord16(nack+16) > nackList[i])?true: false;
+                if(within16Away)
+                {
+                   // check for a wrap
+                    if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                    {
+                        // wrap
+                        within16Away = false;
+                    }
+                }
+
+                while( nackSize > i && within16Away)
+                {
+                    WebRtc_Word16 shift = (nackList[i]-nack)-1;
+                    assert(!(shift > 15) && !(shift < 0));
+
+                    bitmask += (1<< shift);
+                    i++;
+                    if(nackSize > i)
+                    {
+                        within16Away = (WebRtc_UWord16(nack+16) > nackList[i])?true: false;
+                        if(within16Away)
+                        {
+                            // check for a wrap
+                            if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                            {
+                                // wrap
+                                within16Away = false;
+                            }
+                        }
+                    }
+                }
+                ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+pos, bitmask);
+                pos += 2;
+            }
+            // sanity do we have room from one more 4 byte block?
+            if(pos + 4 >= IP_PACKET_SIZE)
+            {
+                return -2;
+            }
+        } else
+        {
+            // no more in the list
+            rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+            rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        }
+    }
+    rtcpbuffer[nackSizePos]=(WebRtc_UWord8)(2+numOfNackFields);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildBYE(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 8 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    if(_includeCSRCs)
+    {
+        // Add a bye packet
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + 1 + _CSRCs;  // number of SSRC+CSRCs
+        rtcpbuffer[pos++]=(WebRtc_UWord8)203;
+
+        // length
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(1 + _CSRCs);
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+
+        // add CSRCs
+        for(int i = 0; i < _CSRCs; i++)
+        {
+            ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _CSRC[i]);
+            pos += 4;
+        }
+    } else
+    {
+        // Add a bye packet
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + 1;  // number of SSRC+CSRCs
+        rtcpbuffer[pos++]=(WebRtc_UWord8)203;
+
+        // length
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)1;
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildVoIPMetric(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 44 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+
+    // Add XR header
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)207;
+
+    WebRtc_UWord32 XRLengthPos = pos;
+
+    // handle length later on
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add a VoIP metrics block
+    rtcpbuffer[pos++]=7;
+    rtcpbuffer[pos++]=0;
+    rtcpbuffer[pos++]=0;
+    rtcpbuffer[pos++]=8;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.lossRate;
+    rtcpbuffer[pos++] = _xrVoIPMetric.discardRate;
+    rtcpbuffer[pos++] = _xrVoIPMetric.burstDensity;
+    rtcpbuffer[pos++] = _xrVoIPMetric.gapDensity;
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.burstDuration >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.burstDuration);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.gapDuration >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.gapDuration);
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.roundTripDelay >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.roundTripDelay);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.endSystemDelay >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.endSystemDelay);
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.signalLevel;
+    rtcpbuffer[pos++] = _xrVoIPMetric.noiseLevel;
+    rtcpbuffer[pos++] = _xrVoIPMetric.RERL;
+    rtcpbuffer[pos++] = _xrVoIPMetric.Gmin;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.Rfactor;
+    rtcpbuffer[pos++] = _xrVoIPMetric.extRfactor;
+    rtcpbuffer[pos++] = _xrVoIPMetric.MOSLQ;
+    rtcpbuffer[pos++] = _xrVoIPMetric.MOSCQ;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.RXconfig;
+    rtcpbuffer[pos++] = 0; // reserved
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBnominal >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBnominal);
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBmax >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBmax);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBabsMax >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBabsMax);
+
+    rtcpbuffer[XRLengthPos]=(WebRtc_UWord8)(0);
+    rtcpbuffer[XRLengthPos+1]=(WebRtc_UWord8)(10);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SendRTCP(const WebRtc_UWord32 packetTypeFlags,
+                     const WebRtc_Word32 nackSize,       // NACK
+                     const WebRtc_UWord16* nackList,     // NACK
+                     const bool repeat,                  // FIR
+                     const WebRtc_UWord64 pictureID)     // SLI & RPSI
+{
+    WebRtc_UWord32 rtcpPacketTypeFlags = packetTypeFlags;
+    WebRtc_UWord32 pos = 0;
+    WebRtc_UWord8 rtcpbuffer[IP_PACKET_SIZE];
+
+    do  // only to be able to use break :) (and the critsect must be inside its own scope)
+    {
+        // collect the received information
+        RTCPReportBlock received;
+        bool hasReceived = false;
+        WebRtc_UWord32 NTPsec = 0;
+        WebRtc_UWord32 NTPfrac = 0;
+        bool rtcpCompound = false;
+        WebRtc_UWord32 jitterTransmissionOffset = 0;
+
+        {
+          CriticalSectionScoped lock(_criticalSectionRTCPSender);
+          if(_method == kRtcpOff)
+          {
+              WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                           "%s invalid state", __FUNCTION__);
+              return -1;
+          }
+          rtcpCompound = (_method == kRtcpCompound) ? true : false;
+        }
+
+        if (rtcpCompound ||
+            rtcpPacketTypeFlags & kRtcpReport ||
+            rtcpPacketTypeFlags & kRtcpSr ||
+            rtcpPacketTypeFlags & kRtcpRr)
+        {
+            // get statistics from our RTPreceiver outside critsect
+            if(_rtpRtcp.ReportBlockStatistics(&received.fractionLost,
+                                              &received.cumulativeLost,
+                                              &received.extendedHighSeqNum,
+                                              &received.jitter,
+                                              &jitterTransmissionOffset) == 0)
+            {
+                hasReceived = true;
+
+                WebRtc_UWord32 lastReceivedRRNTPsecs = 0;
+                WebRtc_UWord32 lastReceivedRRNTPfrac = 0;
+                WebRtc_UWord32 remoteSR = 0;
+
+                // ok even if we have not received a SR, we will send 0 in that case
+                _rtpRtcp.LastReceivedNTP(lastReceivedRRNTPsecs,
+                                         lastReceivedRRNTPfrac,
+                                         remoteSR);
+
+                // get our NTP as late as possible to avoid a race
+                _clock.CurrentNTP(NTPsec, NTPfrac);
+
+                // Delay since last received report
+                WebRtc_UWord32 delaySinceLastReceivedSR = 0;
+                if((lastReceivedRRNTPsecs !=0) || (lastReceivedRRNTPfrac !=0))
+                {
+                    // get the 16 lowest bits of seconds and the 16 higest bits of fractions
+                    WebRtc_UWord32 now=NTPsec&0x0000FFFF;
+                    now <<=16;
+                    now += (NTPfrac&0xffff0000)>>16;
+
+                    WebRtc_UWord32 receiveTime = lastReceivedRRNTPsecs&0x0000FFFF;
+                    receiveTime <<=16;
+                    receiveTime += (lastReceivedRRNTPfrac&0xffff0000)>>16;
+
+                    delaySinceLastReceivedSR = now-receiveTime;
+                }
+                received.delaySinceLastSR = delaySinceLastReceivedSR;
+                received.lastSR = remoteSR;
+            } else
+            {
+                // we need to send our NTP even if we dont have received any reports
+                _clock.CurrentNTP(NTPsec, NTPfrac);
+            }
+        }
+
+        CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+        if(_TMMBR ) // attach TMMBR to send and receive reports
+        {
+            rtcpPacketTypeFlags |= kRtcpTmmbr;
+        }
+        if(_appSend)
+        {
+            rtcpPacketTypeFlags |= kRtcpApp;
+            _appSend = false;
+        }
+        if(_REMB && _sendREMB)
+        {
+            // Always attach REMB to SR if that is configured. Note that REMB is
+            // only sent on one of the RTP modules in the REMB group.
+            rtcpPacketTypeFlags |= kRtcpRemb;
+        }        
+        if(_xrSendVoIPMetric)
+        {
+            rtcpPacketTypeFlags |= kRtcpXrVoipMetric;
+            _xrSendVoIPMetric = false;
+        }
+        if(_sendTMMBN)  // set when having received a TMMBR
+        {
+            rtcpPacketTypeFlags |= kRtcpTmmbn;
+            _sendTMMBN = false;
+        }
+
+        if(_method == kRtcpCompound)
+        {
+            if(_sending)
+            {
+                rtcpPacketTypeFlags |= kRtcpSr;
+            } else
+            {
+                rtcpPacketTypeFlags |= kRtcpRr;
+            }
+            if (_IJ && hasReceived)
+            {
+                rtcpPacketTypeFlags |= kRtcpTransmissionTimeOffset;
+            }
+        } else if(_method == kRtcpNonCompound)
+        {
+            if(rtcpPacketTypeFlags & kRtcpReport)
+            {
+                if(_sending)
+                {
+                    rtcpPacketTypeFlags |= kRtcpSr;
+                } else
+                {
+                    rtcpPacketTypeFlags |= kRtcpRr;
+                }
+            }
+        }
+        if( rtcpPacketTypeFlags & kRtcpRr ||
+            rtcpPacketTypeFlags & kRtcpSr)
+        {
+            // generate next time to send a RTCP report
+            // seeded from RTP constructor
+            WebRtc_Word32 random = rand() % 1000;
+            WebRtc_Word32 timeToNext = RTCP_INTERVAL_AUDIO_MS;
+
+            if(_audio)
+            {
+                timeToNext = (RTCP_INTERVAL_AUDIO_MS/2) + (RTCP_INTERVAL_AUDIO_MS*random/1000);
+            }else
+            {
+                WebRtc_UWord32 minIntervalMs = RTCP_INTERVAL_AUDIO_MS;
+                if(_sending)
+                {
+                    // calc bw for video 360/sendBW in kbit/s
+                    WebRtc_UWord32 sendBitrateKbit = 0;
+                    WebRtc_UWord32 videoRate = 0;
+                    WebRtc_UWord32 fecRate = 0;
+                    WebRtc_UWord32 nackRate = 0;
+                    _rtpRtcp.BitrateSent(&sendBitrateKbit,
+                                         &videoRate,
+                                         &fecRate,
+                                         &nackRate);
+                    sendBitrateKbit /= 1000;
+                    if(sendBitrateKbit != 0)
+                    {
+                        minIntervalMs = 360000/sendBitrateKbit;
+                    }
+                }
+                if(minIntervalMs > RTCP_INTERVAL_VIDEO_MS)
+                {
+                    minIntervalMs = RTCP_INTERVAL_VIDEO_MS;
+                }
+                timeToNext = (minIntervalMs/2) + (minIntervalMs*random/1000);
+            }
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + timeToNext;
+        }
+
+        // if the data does not fitt in the packet we fill it as much as possible
+        WebRtc_Word32 buildVal = 0;
+
+        if(rtcpPacketTypeFlags & kRtcpSr)
+        {
+            if(hasReceived)
+            {
+                buildVal = BuildSR(rtcpbuffer, pos, NTPsec, NTPfrac, &received);
+            } else
+            {
+                buildVal = BuildSR(rtcpbuffer, pos, NTPsec, NTPfrac);
+            }
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+            buildVal = BuildSDEC(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+
+        }else if(rtcpPacketTypeFlags & kRtcpRr)
+        {
+            if(hasReceived)
+            {
+                buildVal = BuildRR(rtcpbuffer, pos, NTPsec, NTPfrac,&received);
+            }else
+            {
+                buildVal = BuildRR(rtcpbuffer, pos, NTPsec, NTPfrac);
+            }
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+            // only of set
+            if(_CNAME[0] != 0)
+            {
+                buildVal = BuildSDEC(rtcpbuffer, pos);
+                if(buildVal == -1)
+                {
+                    return -1; // error
+                }
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTransmissionTimeOffset)
+        {
+            // If present, this RTCP packet must be placed after a
+            // receiver report.
+            buildVal = BuildExtendedJitterReport(rtcpbuffer,
+                                                 pos,
+                                                 jitterTransmissionOffset);
+            if(buildVal == -1)
+            {
+                return -1; // error
+            }
+            else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpPli)
+        {
+            buildVal = BuildPLI(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpFir)
+        {
+            buildVal = BuildFIR(rtcpbuffer, pos, repeat);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpSli)
+        {
+            buildVal = BuildSLI(rtcpbuffer, pos, (WebRtc_UWord8)pictureID);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpRpsi)
+        {
+            const WebRtc_Word8 payloadType = _rtpRtcp.SendPayloadType();
+            if(payloadType == -1)
+            {
+                return -1;
+            }
+            buildVal = BuildRPSI(rtcpbuffer, pos, pictureID, (WebRtc_UWord8)payloadType);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpRemb)
+        {
+            buildVal = BuildREMB(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpBye)
+        {
+            buildVal = BuildBYE(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpApp)
+        {
+            buildVal = BuildAPP(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTmmbr)
+        {
+            buildVal = BuildTMMBR(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTmmbn)
+        {
+            buildVal = BuildTMMBN(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpNack)
+        {
+            buildVal = BuildNACK(rtcpbuffer, pos, nackSize, nackList);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpXrVoipMetric)
+        {
+            buildVal = BuildVoIPMetric(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+    }while (false);
+    // Sanity don't send empty packets.
+    if (pos == 0)
+    {
+        return -1;
+    }
+    return SendToNetwork(rtcpbuffer, (WebRtc_UWord16)pos);
+}
+
+WebRtc_Word32
+RTCPSender::SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                          const WebRtc_UWord16 length)
+{
+    CriticalSectionScoped lock(_criticalSectionTransport);
+    if(_cbTransport)
+    {
+        if(_cbTransport->SendRTCPPacket(_id, dataBuffer, length) > 0)
+        {
+            return 0;
+        }
+    }
+    return -1;
+}
+
+WebRtc_Word32
+RTCPSender::SetCSRCStatus(const bool include)
+{
+    _includeCSRCs = include;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                    const WebRtc_UWord8 arrLength)
+{
+    if(arrLength > kRtpCsrcSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        assert(false);
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    for(int i = 0; i < arrLength;i++)
+    {
+        _CSRC[i] = arrOfCSRC[i];
+    }
+    _CSRCs = arrLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetApplicationSpecificData(const WebRtc_UWord8 subType,
+                                       const WebRtc_UWord32 name,
+                                       const WebRtc_UWord8* data,
+                                       const WebRtc_UWord16 length)
+{
+    if(length %4 != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_appData)
+    {
+        delete [] _appData;
+    }
+
+    _appSend = true;
+    _appSubType = subType;
+    _appName = name;
+    _appData = new WebRtc_UWord8[length];
+    _appLength = length;
+    memcpy(_appData, data, length);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    memcpy(&_xrVoIPMetric, VoIPMetric, sizeof(RTCPVoIPMetric));
+
+    _xrSendVoIPMetric = true;
+    return 0;
+}
+
+// called under critsect _criticalSectionRTCPSender
+WebRtc_Word32 RTCPSender::AddReportBlocks(WebRtc_UWord8* rtcpbuffer,
+                                          WebRtc_UWord32& pos,
+                                          WebRtc_UWord8& numberOfReportBlocks,
+                                          const RTCPReportBlock* received,
+                                          const WebRtc_UWord32 NTPsec,
+                                          const WebRtc_UWord32 NTPfrac) {
+  // sanity one block
+  if(pos + 24 >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  numberOfReportBlocks = _reportBlocks.size();
+  if (received) {
+    // add our multiple RR to numberOfReportBlocks
+    numberOfReportBlocks++;
+  }
+  if (received) {
+    // answer to the one that sends to me
+    _lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac);
+
+    // Remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // fraction lost
+    rtcpbuffer[pos++]=received->fractionLost;
+
+    // cumulative loss
+    ModuleRTPUtility::AssignUWord24ToBuffer(rtcpbuffer+pos,
+                                            received->cumulativeLost);
+    pos += 3;
+    // extended highest seq_no, contain the highest sequence number received
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                            received->extendedHighSeqNum);
+    pos += 4;
+
+    //Jitter
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, received->jitter);
+    pos += 4;
+
+    // Last SR timestamp, our NTP time when we received the last report
+    // This is the value that we read from the send report packet not when we
+    // received it...
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, received->lastSR);
+    pos += 4;
+
+    // Delay since last received report,time since we received the report
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                            received->delaySinceLastSR);
+    pos += 4;
+  }
+  if ((pos + _reportBlocks.size() * 24) >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+      _reportBlocks.begin();
+
+  for (; it != _reportBlocks.end(); it++) {
+    // we can have multiple report block in a conference
+    WebRtc_UWord32 remoteSSRC = it->first;
+    RTCPReportBlock* reportBlock = it->second;
+    if (reportBlock) {
+      // Remote SSRC
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, remoteSSRC);
+      pos += 4;
+
+      // fraction lost
+      rtcpbuffer[pos++] = reportBlock->fractionLost;
+
+      // cumulative loss
+      ModuleRTPUtility::AssignUWord24ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->cumulativeLost);
+      pos += 3;
+
+      // extended highest seq_no, contain the highest sequence number received
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->extendedHighSeqNum);
+      pos += 4;
+
+      //Jitter
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->jitter);
+      pos += 4;
+
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->lastSR);
+      pos += 4;
+
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->delaySinceLastSR);
+      pos += 4;
+    }
+  }
+  return pos;
+}
+
+// no callbacks allowed inside this function
+WebRtc_Word32
+RTCPSender::SetTMMBN(const TMMBRSet* boundingSet,
+                     const WebRtc_UWord32 maxBitrateKbit)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if (0 == _tmmbrHelp.SetTMMBRBoundingSetToSend(boundingSet, maxBitrateKbit))
+    {
+        _sendTMMBN = true;
+        return 0;
+    }
+    return -1;
+}
+
+WebRtc_Word32
+RTCPSender::RequestTMMBR(WebRtc_UWord32 estimatedBW, WebRtc_UWord32 packetOH)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    if(_TMMBR)
+    {
+        _tmmbr_Send = estimatedBW;
+        _packetOH_Send = packetOH;
+
+        return 0;
+    }
+    return -1;
+}
+
+RateControlRegion
+RTCPSender::UpdateOverUseState(const RateControlInput& rateControlInput, bool& firstOverUse)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _remoteRateControl.Update(rateControlInput, firstOverUse,
+                                     _clock.GetTimeInMS());
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_sender.h b/trunk/src/modules/rtp_rtcp/source/rtcp_sender.h
new file mode 100644
index 0000000..966b64f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_sender.h
@@ -0,0 +1,268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
+
+#include <map>
+
+#include "typedefs.h"
+#include "rtcp_utility.h"
+#include "rtp_utility.h"
+#include "rtp_rtcp_defines.h"
+#include "remote_rate_control.h"
+#include "tmmbr_help.h"
+
+namespace webrtc {
+
+class ModuleRtpRtcpImpl; 
+
+class RTCPSender
+{
+public:
+    RTCPSender(const WebRtc_Word32 id, const bool audio,
+               RtpRtcpClock* clock, ModuleRtpRtcpImpl* owner);
+    virtual ~RTCPSender();
+
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport);
+
+    RTCPMethod Status() const;
+    WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    bool Sending() const;
+    WebRtc_Word32 SetSendingStatus(const bool enabled); // combine the functions
+
+    WebRtc_Word32 SetNackStatus(const bool enable);
+
+    void SetSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SetRemoteSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS);
+
+    WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]);
+    WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]);
+
+    WebRtc_Word32 AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                const char cName[RTCP_CNAME_SIZE]);
+
+    WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC);
+
+    WebRtc_UWord32 SendTimeOfSendReport(const WebRtc_UWord32 sendReport);
+
+    bool TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP = false) const;
+
+    WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
+
+    WebRtc_Word32 SendRTCP(const WebRtc_UWord32 rtcpPacketTypeFlags,
+                           const WebRtc_Word32 nackSize = 0,
+                           const WebRtc_UWord16* nackList = 0,
+                           const bool repeat = false,
+                           const WebRtc_UWord64 pictureID = 0);
+
+    WebRtc_Word32 AddReportBlock(const WebRtc_UWord32 SSRC,
+                                 const RTCPReportBlock* receiveBlock);
+
+    WebRtc_Word32 RemoveReportBlock(const WebRtc_UWord32 SSRC);
+
+    /*
+    *  REMB
+    */
+    bool REMB() const;
+
+    WebRtc_Word32 SetREMBStatus(const bool enable);
+
+    WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                              const WebRtc_UWord8 numberOfSSRC,
+                              const WebRtc_UWord32* SSRC);
+
+    bool SetRemoteBitrateObserver(RtpRemoteBitrateObserver* observer);
+
+    void UpdateRemoteBitrateEstimate(unsigned int target_bitrate);
+
+    void ReceivedRemb(unsigned int estimated_bitrate);
+
+    /*
+    *   TMMBR
+    */
+    bool TMMBR() const;
+
+    WebRtc_Word32 SetTMMBRStatus(const bool enable);
+
+    WebRtc_Word32 SetTMMBN(const TMMBRSet* boundingSet,
+                           const WebRtc_UWord32 maxBitrateKbit);
+
+    WebRtc_Word32 RequestTMMBR(const WebRtc_UWord32 estimatedBW,
+                               const WebRtc_UWord32 packetOH);
+
+    /*
+    *   Extended jitter report
+    */
+    bool IJ() const;
+
+    WebRtc_Word32 SetIJStatus(const bool enable);
+
+    /*
+    *
+    */
+
+    WebRtc_Word32 SetApplicationSpecificData(const WebRtc_UWord8 subType,
+                                             const WebRtc_UWord32 name,
+                                             const WebRtc_UWord8* data,
+                                             const WebRtc_UWord16 length);
+
+    WebRtc_Word32 SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
+
+    WebRtc_Word32 SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                           const WebRtc_UWord8 arrLength);
+
+    WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    /*
+    *   New bandwidth estimation
+    */
+
+    RateControlRegion UpdateOverUseState(const RateControlInput& rateControlInput, bool& firstOverUse);
+
+    WebRtc_UWord32 CalculateNewTargetBitrate(WebRtc_UWord32 RTT);
+
+    WebRtc_UWord32 LatestBandwidthEstimate() const;
+
+    // Returns true if there is a valid estimate of the incoming bitrate, false
+    // otherwise.
+    bool ValidBitrateEstimate() const;
+
+private:
+    WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                                const WebRtc_UWord16 length);
+
+    void UpdatePacketRate();
+
+    WebRtc_Word32 AddReportBlocks(WebRtc_UWord8* rtcpbuffer,
+                                WebRtc_UWord32& pos,
+                                WebRtc_UWord8& numberOfReportBlocks,
+                                const RTCPReportBlock* received,
+                                const WebRtc_UWord32 NTPsec,
+                                const WebRtc_UWord32 NTPfrac);
+
+    WebRtc_Word32 BuildSR(WebRtc_UWord8* rtcpbuffer,
+                        WebRtc_UWord32& pos,
+                        const WebRtc_UWord32 NTPsec,
+                        const WebRtc_UWord32 NTPfrac,
+                        const RTCPReportBlock* received = NULL);
+
+    WebRtc_Word32 BuildRR(WebRtc_UWord8* rtcpbuffer,
+                        WebRtc_UWord32& pos,
+                        const WebRtc_UWord32 NTPsec,
+                        const WebRtc_UWord32 NTPfrac,
+                        const RTCPReportBlock* received = NULL);
+
+    WebRtc_Word32 BuildExtendedJitterReport(
+        WebRtc_UWord8* rtcpbuffer,
+        WebRtc_UWord32& pos,
+        const WebRtc_UWord32 jitterTransmissionTimeOffset);
+
+    WebRtc_Word32 BuildSDEC(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildPLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildREMB(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildTMMBR(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildTMMBN(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildAPP(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildVoIPMetric(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildBYE(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildFIR(WebRtc_UWord8* rtcpbuffer,
+                           WebRtc_UWord32& pos,
+                           bool repeat);
+    WebRtc_Word32 BuildSLI(WebRtc_UWord8* rtcpbuffer,
+                         WebRtc_UWord32& pos,
+                         const WebRtc_UWord8 pictureID);
+    WebRtc_Word32 BuildRPSI(WebRtc_UWord8* rtcpbuffer,
+                         WebRtc_UWord32& pos,
+                         const WebRtc_UWord64 pictureID,
+                         const WebRtc_UWord8 payloadType);
+
+    WebRtc_Word32 BuildNACK(WebRtc_UWord8* rtcpbuffer,
+                          WebRtc_UWord32& pos,
+                          const WebRtc_Word32 nackSize,
+                          const WebRtc_UWord16* nackList);
+
+private:
+    WebRtc_Word32            _id;
+    const bool               _audio;
+    RtpRtcpClock&            _clock;
+    RTCPMethod               _method;
+
+    ModuleRtpRtcpImpl&      _rtpRtcp;
+
+    CriticalSectionWrapper* _criticalSectionTransport;
+    Transport*              _cbTransport;
+
+    CriticalSectionWrapper* _criticalSectionRTCPSender;
+    bool                    _usingNack;
+    bool                    _sending;
+    bool                    _sendTMMBN;
+    bool                    _REMB;
+    bool                    _sendREMB;
+    bool                    _TMMBR;
+    bool                    _IJ;
+
+    WebRtc_UWord32        _nextTimeToSendRTCP;
+
+    WebRtc_UWord32 _SSRC;
+    WebRtc_UWord32 _remoteSSRC;  // SSRC that we receive on our RTP channel
+    char _CNAME[RTCP_CNAME_SIZE];
+
+    std::map<WebRtc_UWord32, RTCPReportBlock*> _reportBlocks;
+    std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*> _csrcCNAMEs;
+
+    WebRtc_Word32         _cameraDelayMS;
+
+    // Sent
+    WebRtc_UWord32        _lastSendReport[RTCP_NUMBER_OF_SR];  // allow packet loss and RTT above 1 sec
+    WebRtc_UWord32        _lastRTCPTime[RTCP_NUMBER_OF_SR];
+
+    // send CSRCs
+    WebRtc_UWord8         _CSRCs;
+    WebRtc_UWord32        _CSRC[kRtpCsrcSize];
+    bool                _includeCSRCs;
+
+    // Full intra request
+    WebRtc_UWord8         _sequenceNumberFIR;
+
+    // REMB    
+    WebRtc_UWord8       _lengthRembSSRC;
+    WebRtc_UWord8       _sizeRembSSRC;
+    WebRtc_UWord32*     _rembSSRC;
+    WebRtc_UWord32      _rembBitrate;
+    RtpRemoteBitrateObserver* _bitrate_observer;
+
+    TMMBRHelp           _tmmbrHelp;
+    WebRtc_UWord32      _tmmbr_Send;
+    WebRtc_UWord32      _packetOH_Send;
+    RemoteRateControl   _remoteRateControl;
+
+    // APP
+    bool                 _appSend;
+    WebRtc_UWord8        _appSubType;
+    WebRtc_UWord32       _appName;
+    WebRtc_UWord8*       _appData;
+    WebRtc_UWord16       _appLength;
+
+    // XR VoIP metric
+    bool                _xrSendVoIPMetric;
+    RTCPVoIPMetric      _xrVoIPMetric;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_sender_test.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_sender_test.cc
new file mode 100644
index 0000000..7b5b057
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_sender_test.cc
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the RTCPSender.
+ */
+
+#include <gtest/gtest.h>
+
+#include "common_types.h"
+#include "rtp_utility.h"
+#include "rtcp_sender.h"
+#include "rtcp_receiver.h"
+#include "rtp_rtcp_impl.h"
+
+namespace webrtc {
+
+void CreateRtpPacket(const bool marker_bit, const WebRtc_UWord8 payload,
+    const WebRtc_UWord16 seq_num, const WebRtc_UWord32 timestamp,
+    const WebRtc_UWord32 ssrc, WebRtc_UWord8* array,
+    WebRtc_UWord16* cur_pos) {
+  ASSERT_TRUE(payload <= 127);
+  array[(*cur_pos)++] = 0x80;
+  array[(*cur_pos)++] = payload | (marker_bit ? 0x80 : 0);
+  array[(*cur_pos)++] = seq_num >> 8;
+  array[(*cur_pos)++] = seq_num;
+  array[(*cur_pos)++] = timestamp >> 24;
+  array[(*cur_pos)++] = timestamp >> 16;
+  array[(*cur_pos)++] = timestamp >> 8;
+  array[(*cur_pos)++] = timestamp;
+  array[(*cur_pos)++] = ssrc >> 24;
+  array[(*cur_pos)++] = ssrc >> 16;
+  array[(*cur_pos)++] = ssrc >> 8;
+  array[(*cur_pos)++] = ssrc;
+  // VP8 payload header
+  array[(*cur_pos)++] = 0x90;  // X bit = 1
+  array[(*cur_pos)++] = 0x20;  // T bit = 1
+  array[(*cur_pos)++] = 0x00;  // TID = 0
+  array[(*cur_pos)++] = 0x00;  // Key frame
+  array[(*cur_pos)++] = 0x00;
+  array[(*cur_pos)++] = 0x00;
+  array[(*cur_pos)++] = 0x9d;
+  array[(*cur_pos)++] = 0x01;
+  array[(*cur_pos)++] = 0x2a;
+  array[(*cur_pos)++] = 128;
+  array[(*cur_pos)++] = 0;
+  array[(*cur_pos)++] = 96;
+  array[(*cur_pos)++] = 0;
+}
+
+class TestTransport : public Transport,
+                      public RtpData {
+ public:
+  TestTransport(RTCPReceiver* rtcp_receiver) :
+    rtcp_receiver_(rtcp_receiver) {
+  }
+
+  virtual int SendPacket(int /*ch*/, const void* /*data*/, int /*len*/) {
+    return -1;
+  }
+
+  virtual int SendRTCPPacket(int /*ch*/, const void *packet, int packet_len) {
+    RTCPUtility::RTCPParserV2 rtcpParser((WebRtc_UWord8*)packet,
+                                         (WebRtc_Word32)packet_len,
+                                         true); // Allow non-compound RTCP
+
+    EXPECT_TRUE(rtcpParser.IsValid());
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    EXPECT_EQ(0, rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation,
+                                                   &rtcpParser));
+    rtcp_packet_info_ = rtcpPacketInformation;
+
+    return packet_len;
+  }
+
+  virtual int OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadSize,
+                                    const WebRtcRTPHeader* rtpHeader)
+                                    {return 0;}
+  RTCPReceiver* rtcp_receiver_;
+  RTCPHelp::RTCPPacketInformation rtcp_packet_info_;
+};
+
+class RtcpSenderTest : public ::testing::Test {
+ protected:
+  RtcpSenderTest() {
+    system_clock_ = ModuleRTPUtility::GetSystemClock();
+    rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(0, false, system_clock_);
+    rtcp_sender_ = new RTCPSender(0, false, system_clock_, rtp_rtcp_impl_);
+    rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_);
+    test_transport_ = new TestTransport(rtcp_receiver_);
+    // Initialize
+    EXPECT_EQ(0, rtcp_sender_->Init());
+    EXPECT_EQ(0, rtcp_sender_->RegisterSendTransport(test_transport_));
+    EXPECT_EQ(0, rtp_rtcp_impl_->RegisterIncomingDataCallback(test_transport_));
+  }
+  ~RtcpSenderTest() {
+    delete rtcp_sender_;
+    delete rtcp_receiver_;
+    delete rtp_rtcp_impl_;
+    delete test_transport_;
+    delete system_clock_;
+  }
+
+  RtpRtcpClock* system_clock_;
+  ModuleRtpRtcpImpl* rtp_rtcp_impl_;
+  RTCPSender* rtcp_sender_;
+  RTCPReceiver* rtcp_receiver_;
+  TestTransport* test_transport_;
+
+  enum {kMaxPacketLength = 1500};
+  uint8_t packet_[kMaxPacketLength];
+};
+
+TEST_F(RtcpSenderTest, RtcpOff) {
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpOff));
+  EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr));
+}
+
+TEST_F(RtcpSenderTest, IJStatus) {
+  ASSERT_FALSE(rtcp_sender_->IJ());
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  ASSERT_TRUE(rtcp_sender_->IJ());
+}
+
+TEST_F(RtcpSenderTest, TestCompound) {
+  const bool marker_bit = false;
+  const WebRtc_UWord8 payload = 100;
+  const WebRtc_UWord16 seq_num = 11111;
+  const WebRtc_UWord32 timestamp = 1234567;
+  const WebRtc_UWord32 ssrc = 0x11111111;
+  WebRtc_UWord16 packet_length = 0;
+  CreateRtpPacket(marker_bit, payload, seq_num, timestamp, ssrc, packet_,
+      &packet_length);
+  EXPECT_EQ(25, packet_length);
+
+  VideoCodec codec_inst;
+  strncpy(codec_inst.plName, "VP8", webrtc::kPayloadNameSize - 1);
+  codec_inst.codecType = webrtc::kVideoCodecVP8;
+  codec_inst.plType = payload;
+  EXPECT_EQ(0, rtp_rtcp_impl_->RegisterReceivePayload(codec_inst));
+
+  // Make sure RTP packet has been received.
+  EXPECT_EQ(0, rtp_rtcp_impl_->IncomingPacket(packet_, packet_length));
+
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
+
+  // Transmission time offset packet should be received.
+  ASSERT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
+      kRtcpTransmissionTimeOffset);
+}
+
+TEST_F(RtcpSenderTest, TestCompound_NoRtpReceived) {
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
+
+  // Transmission time offset packet should not be received.
+  ASSERT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
+      kRtcpTransmissionTimeOffset);
+}
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+
+  return RUN_ALL_TESTS();
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_utility.cc b/trunk/src/modules/rtp_rtcp/source/rtcp_utility.cc
new file mode 100644
index 0000000..f421672
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -0,0 +1,1509 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_utility.h"
+
+#include <cstring> // memcpy
+#include <cmath>   // ceil
+#include <cassert>
+
+
+namespace webrtc {
+// RTCPParserV2 : currently read only
+
+RTCPUtility::RTCPParserV2::RTCPParserV2(const WebRtc_UWord8* rtcpData,
+                                        size_t rtcpDataLength,
+                                        bool rtcpReducedSizeEnable)
+    : _ptrRTCPDataBegin(rtcpData),
+      _RTCPReducedSizeEnable(rtcpReducedSizeEnable),
+      _ptrRTCPDataEnd(rtcpData + rtcpDataLength),
+      _validPacket(false),
+      _ptrRTCPData(rtcpData),
+      _ptrRTCPBlockEnd(NULL),
+      _state(State_TopLevel),
+      _numberOfBlocks(0),
+      _packetType(kRtcpNotValidCode) {
+  Validate();
+}
+
+RTCPUtility::RTCPParserV2::~RTCPParserV2() {
+}
+
+ptrdiff_t
+RTCPUtility::RTCPParserV2::LengthLeft() const
+{
+    return (_ptrRTCPDataEnd- _ptrRTCPData);
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::PacketType() const
+{
+    return _packetType;
+}
+
+const RTCPUtility::RTCPPacket&
+RTCPUtility::RTCPParserV2::Packet() const
+{
+    return _packet;
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::Begin()
+{
+    _ptrRTCPData = _ptrRTCPDataBegin;
+
+    return Iterate();
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::Iterate()
+{
+    // Reset packet type
+    _packetType = kRtcpNotValidCode;
+
+    if (IsValid())
+    {
+        switch (_state)
+        {
+        case State_TopLevel:
+            IterateTopLevel();
+            break;
+        case State_ReportBlockItem:
+            IterateReportBlockItem();
+            break;
+        case State_SDESChunk:
+            IterateSDESChunk();
+            break;
+        case State_BYEItem:
+            IterateBYEItem();
+            break;
+        case State_ExtendedJitterItem:
+            IterateExtendedJitterItem();
+            break;
+        case State_RTPFB_NACKItem:
+            IterateNACKItem();
+            break;
+        case State_RTPFB_TMMBRItem:
+            IterateTMMBRItem();
+            break;
+        case State_RTPFB_TMMBNItem:
+            IterateTMMBNItem();
+            break;
+        case State_PSFB_SLIItem:
+            IterateSLIItem();
+            break;
+        case State_PSFB_RPSIItem:
+            IterateRPSIItem();
+            break;
+        case State_PSFB_FIRItem:
+            IterateFIRItem();
+            break;
+        case State_PSFB_AppItem:
+            IteratePsfbAppItem();
+            break;
+        case State_PSFB_REMBItem:
+            IteratePsfbREMBItem();
+            break;
+        case State_AppItem:
+            IterateAppItem();
+            break;
+        default:
+            assert(false); // Invalid state!
+            break;
+        }
+    }
+    return _packetType;
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTopLevel()
+{
+    for (;;)
+    {
+        RTCPCommonHeader header;
+
+        const bool success = RTCPParseCommonHeader(_ptrRTCPData,
+                                                    _ptrRTCPDataEnd,
+                                                    header);
+
+        if (!success)
+        {
+            return;
+        }
+        _ptrRTCPBlockEnd = _ptrRTCPData + header.LengthInOctets;
+        if (_ptrRTCPBlockEnd > _ptrRTCPDataEnd)
+        {
+            // Bad block!
+            return;
+        }
+
+        switch (header.PT)
+        {
+        case PT_SR:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseSR();
+            return;
+        }
+        case PT_RR:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseRR();
+            return;
+        }
+        case PT_SDES:
+        {
+            // number of SDES blocks
+            _numberOfBlocks = header.IC;
+            const bool ok = ParseSDES();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_BYE:
+        {
+            _numberOfBlocks = header.IC;
+            const bool ok = ParseBYE();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_IJ:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseIJ();
+            return;
+        }
+        case PT_RTPFB: // Fall through!
+        case PT_PSFB:
+        {
+            const bool ok = ParseFBCommon(header);
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+
+            return;
+        }
+        case PT_APP:
+        {
+            const bool ok = ParseAPP(header);
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_XR:
+        {
+            const bool ok = ParseXR();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        default:
+            // Not supported! Skip!
+            EndCurrentBlock();
+            break;
+        }
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateReportBlockItem()
+{
+    const bool success = ParseReportBlockItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateSDESChunk()
+{
+    const bool success = ParseSDESChunk();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateBYEItem()
+{
+    const bool success = ParseBYEItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateExtendedJitterItem()
+{
+    const bool success = ParseIJItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateNACKItem()
+{
+    const bool success = ParseNACKItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTMMBRItem()
+{
+    const bool success = ParseTMMBRItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTMMBNItem()
+{
+    const bool success = ParseTMMBNItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateSLIItem()
+{
+    const bool success = ParseSLIItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateRPSIItem()
+{
+    const bool success = ParseRPSIItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateFIRItem()
+{
+    const bool success = ParseFIRItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IteratePsfbAppItem()
+{
+    const bool success = ParsePsfbAppItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IteratePsfbREMBItem()
+{
+    const bool success = ParsePsfbREMBItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateAppItem()
+{
+    const bool success = ParseAPPItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::Validate()
+{
+    if (_ptrRTCPData == NULL)
+    {
+        return; // NOT VALID
+    }
+
+    RTCPCommonHeader header;
+    const bool success = RTCPParseCommonHeader(_ptrRTCPDataBegin,
+                                               _ptrRTCPDataEnd,
+                                               header);
+
+    if (!success)
+    {
+        return; // NOT VALID!
+    }
+
+    // * if (!reducedSize) : first packet must be RR or SR.
+    //
+    // * The padding bit (P) should be zero for the first packet of a
+    //   compound RTCP packet because padding should only be applied,
+    //   if it is needed, to the last packet. (NOT CHECKED!)
+    //
+    // * The length fields of the individual RTCP packets must add up
+    //   to the overall length of the compound RTCP packet as
+    //   received.  This is a fairly strong check. (NOT CHECKED!)
+
+    if (!_RTCPReducedSizeEnable)
+    {
+        if ((header.PT != PT_SR) && (header.PT != PT_RR))
+        {
+            return; // NOT VALID
+        }
+    }
+
+    _validPacket = true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::IsValid() const
+{
+    return _validPacket;
+}
+
+void
+RTCPUtility::RTCPParserV2::EndCurrentBlock()
+{
+    _ptrRTCPData = _ptrRTCPBlockEnd;
+}
+
+bool
+RTCPUtility::RTCPParseCommonHeader( const WebRtc_UWord8* ptrDataBegin,
+                                    const WebRtc_UWord8* ptrDataEnd,
+                                    RTCPCommonHeader& parsedHeader)
+{
+    if (!ptrDataBegin || !ptrDataEnd)
+    {
+        return false;
+    }
+
+    //  0                   1                   2                   3
+    //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |V=2|P|    IC   |      PT       |             length            |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    //
+    // Common header for all RTCP packets, 4 octets.
+
+    if ((ptrDataEnd - ptrDataBegin) < 4)
+    {
+        return false;
+    }
+
+    parsedHeader.V              = ptrDataBegin[0] >> 6;
+    parsedHeader.P              = ((ptrDataBegin[0] & 0x20) == 0) ? false : true;
+    parsedHeader.IC             = ptrDataBegin[0] & 0x1f;
+    parsedHeader.PT             = ptrDataBegin[1];
+
+    parsedHeader.LengthInOctets = (ptrDataBegin[2] << 8) + ptrDataBegin[3] + 1;
+    parsedHeader.LengthInOctets *= 4;
+
+    if(parsedHeader.LengthInOctets == 0)
+    {
+        return false;
+    }
+    // Check if RTP version field == 2
+    if (parsedHeader.V != 2)
+    {
+        return false;
+    }
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseRR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        return false;
+    }
+
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpRrCode;
+
+    _packet.RR.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++;
+
+    _packet.RR.NumberOfReportBlocks = _numberOfBlocks;
+
+    // State transition
+    _state = State_ReportBlockItem;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 28)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpSrCode;
+
+    _packet.SR.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++;
+
+    _packet.SR.NTPMostSignificant = *_ptrRTCPData++ << 24;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++ << 16;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++ << 8;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++;
+
+    _packet.SR.NTPLeastSignificant = *_ptrRTCPData++ << 24;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++ << 16;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++ << 8;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++;
+
+    _packet.SR.RTPTimestamp = *_ptrRTCPData++ << 24;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++ << 16;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++ << 8;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++;
+
+    _packet.SR.SenderPacketCount = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++;
+
+    _packet.SR.SenderOctetCount = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++;
+
+    _packet.SR.NumberOfReportBlocks = _numberOfBlocks;
+
+    // State transition
+    if(_numberOfBlocks != 0)
+    {
+        _state = State_ReportBlockItem;
+    }else
+    {
+        // don't go to state report block item if 0 report blocks
+        _state = State_TopLevel;
+        EndCurrentBlock();
+    }
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseReportBlockItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 24 || _numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packet.ReportBlockItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.FractionLost = *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost = *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.Jitter = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.LastSR = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.DelayLastSR = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++;
+
+    _numberOfBlocks--;
+    _packetType = kRtcpReportBlockItemCode;
+    return true;
+}
+
+/* From RFC 5450: Transmission Time Offsets in RTP Streams.
+      0                   1                   2                   3
+      0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ hdr |V=2|P|    RC   |   PT=IJ=195   |             length            |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     |                      inter-arrival jitter                     |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     .                                                               .
+     .                                                               .
+     .                                                               .
+     |                      inter-arrival jitter                     |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+
+bool
+RTCPUtility::RTCPParserV2::ParseIJ()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpExtendedIjCode;
+
+    // State transition
+    _state = State_ExtendedJitterItem;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseIJItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4 || _numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packet.ExtendedJitterReportItem.Jitter = *_ptrRTCPData++ << 24;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++ << 16;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++ << 8;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++;
+
+    _numberOfBlocks--;
+    _packetType = kRtcpExtendedIjItemCode;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDES()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _ptrRTCPData += 4; // Skip header
+
+    _state = State_SDESChunk;
+    _packetType = kRtcpSdesCode;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDESChunk()
+{
+    if(_numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _numberOfBlocks--;
+
+    // Find CName item in a SDES chunk.
+    while (_ptrRTCPData < _ptrRTCPBlockEnd)
+    {
+        const ptrdiff_t dataLen = _ptrRTCPBlockEnd - _ptrRTCPData;
+        if (dataLen < 4)
+        {
+            _state = State_TopLevel;
+
+            EndCurrentBlock();
+            return false;
+        }
+
+        WebRtc_UWord32 SSRC = *_ptrRTCPData++ << 24;
+        SSRC += *_ptrRTCPData++ << 16;
+        SSRC += *_ptrRTCPData++ << 8;
+        SSRC += *_ptrRTCPData++;
+
+        const bool foundCname = ParseSDESItem();
+        if (foundCname)
+        {
+            _packet.CName.SenderSSRC = SSRC; // Add SSRC
+            return true;
+        }
+    }
+    _state = State_TopLevel;
+
+    EndCurrentBlock();
+    return false;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDESItem()
+{
+    // Find CName
+    // Only the CNAME item is mandatory. RFC 3550 page 46
+    bool foundCName = false;
+
+    size_t itemOctetsRead = 0;
+    while (_ptrRTCPData < _ptrRTCPBlockEnd)
+    {
+        const WebRtc_UWord8 tag = *_ptrRTCPData++;
+        ++itemOctetsRead;
+
+        if (tag == 0)
+        {
+            // End tag! 4 oct aligned
+            while ((itemOctetsRead++ % 4) != 0)
+            {
+                ++_ptrRTCPData;
+            }
+            return foundCName;
+        }
+
+        if (_ptrRTCPData < _ptrRTCPBlockEnd)
+        {
+            const WebRtc_UWord8 len = *_ptrRTCPData++;
+            ++itemOctetsRead;
+
+            if (tag == 1)
+            {
+                // CNAME
+
+                // Sanity
+                if ((_ptrRTCPData + len) >= _ptrRTCPBlockEnd)
+                {
+                    _state = State_TopLevel;
+
+                    EndCurrentBlock();
+                    return false;
+                }
+                WebRtc_UWord8 i = 0;
+                for (; i < len; ++i)
+                {
+                    const WebRtc_UWord8 c = _ptrRTCPData[i];
+                    if ((c < ' ') || (c > '{') || (c == '%') || (c == '\\'))
+                    {
+                        // Illegal char
+                        _state = State_TopLevel;
+
+                        EndCurrentBlock();
+                        return false;
+                    }
+                    _packet.CName.CName[i] = c;
+                }
+                // Make sure we are null terminated.
+                _packet.CName.CName[i] = 0;
+                _packetType = kRtcpSdesChunkCode;
+
+                foundCName = true;
+            }
+            _ptrRTCPData += len;
+            itemOctetsRead += len;
+        }
+    }
+
+    // No end tag found!
+    _state = State_TopLevel;
+
+    EndCurrentBlock();
+    return false;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseBYE()
+{
+    _ptrRTCPData += 4; // Skip header
+
+    _state = State_BYEItem;
+
+    return ParseBYEItem();
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseBYEItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+    if (length < 4 || _numberOfBlocks == 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpByeCode;
+
+    _packet.BYE.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++;
+
+    // we can have several CSRCs attached
+
+    // sanity
+    if(length >= 4*_numberOfBlocks)
+    {
+        _ptrRTCPData += (_numberOfBlocks -1)*4;
+    }
+    _numberOfBlocks = 0;
+
+    return true;
+}
+/*
+  0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |V=2|P|reserved |   PT=XR=207   |             length            |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |                              SSRC                             |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   :                         report blocks                         :
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+bool RTCPUtility::RTCPParserV2::ParseXR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packet.XR.OriginatorSSRC = *_ptrRTCPData++ << 24;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++ << 16;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++ << 8;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++;
+
+    return ParseXRItem();
+}
+/*
+    0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      BT       | type-specific |         block length          |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    :             type-specific block contents                      :
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+
+bool
+RTCPUtility::RTCPParserV2::ParseXRItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4) //
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    WebRtc_UWord8 blockType = *_ptrRTCPData++;
+    WebRtc_UWord8 typeSpecific = *_ptrRTCPData++;
+
+    WebRtc_UWord16 blockLength = *_ptrRTCPData++ << 8;
+    blockLength = *_ptrRTCPData++;
+
+    if(blockType == 7 && typeSpecific == 0)
+    {
+        if(blockLength != 8)
+        {
+            EndCurrentBlock();
+            return false;
+        }
+        return ParseXRVOIPMetricItem();
+    }else
+    {
+        EndCurrentBlock();
+        return false;
+    }
+}
+/*
+ 0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |     BT=7      |   reserved    |       block length = 8        |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |                        SSRC of source                         |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   loss rate   | discard rate  | burst density |  gap density  |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |       burst duration          |         gap duration          |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |     round trip delay          |       end system delay        |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   | signal level  |  noise level  |     RERL      |     Gmin      |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   R factor    | ext. R factor |    MOS-LQ     |    MOS-CQ     |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   RX config   |   reserved    |          JB nominal           |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |          JB maximum           |          JB abs max           |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+bool
+RTCPUtility::RTCPParserV2::ParseXRVOIPMetricItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 28)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpXrVoipMetricCode;
+
+    _packet.XRVOIPMetricItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.lossRate = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.discardRate = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.burstDensity = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.gapDensity = *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.burstDuration = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.burstDuration += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.gapDuration = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.gapDuration += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.roundTripDelay = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.roundTripDelay += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.endSystemDelay = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.endSystemDelay += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.signalLevel = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.noiseLevel = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.RERL = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.Gmin = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.Rfactor = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.extRfactor = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.MOSLQ = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.MOSCQ = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.RXconfig = *_ptrRTCPData++;
+    _ptrRTCPData++; // skip reserved
+
+    _packet.XRVOIPMetricItem.JBnominal = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBnominal += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.JBmax = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBmax += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.JBabsMax = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBabsMax += *_ptrRTCPData++;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseFBCommon(const RTCPCommonHeader& header)
+{
+    assert((header.PT == PT_RTPFB) || (header.PT == PT_PSFB)); // Parser logic check
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 12) // 4 * 3, RFC4585 section 6.1
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip RTCP header
+
+    WebRtc_UWord32 senderSSRC = *_ptrRTCPData++ << 24;
+    senderSSRC += *_ptrRTCPData++ << 16;
+    senderSSRC += *_ptrRTCPData++ << 8;
+    senderSSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord32 mediaSSRC = *_ptrRTCPData++ << 24;
+    mediaSSRC += *_ptrRTCPData++ << 16;
+    mediaSSRC += *_ptrRTCPData++ << 8;
+    mediaSSRC += *_ptrRTCPData++;
+
+    if (header.PT == PT_RTPFB)
+    {
+        // Transport layer feedback
+
+        switch (header.IC)
+        {
+        case 1:
+        {
+            // NACK
+            _packetType             = kRtcpRtpfbNackCode;
+            _packet.NACK.SenderSSRC = senderSSRC;
+            _packet.NACK.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_NACKItem;
+
+            return true;
+        }
+        case 2:
+        {
+            // used to be ACK is this code point, which is removed
+            // conficts with http://tools.ietf.org/html/draft-levin-avt-rtcp-burst-00
+            break;
+        }
+        case 3:
+        {
+            // TMMBR
+            _packetType              = kRtcpRtpfbTmmbrCode;
+            _packet.TMMBR.SenderSSRC = senderSSRC;
+            _packet.TMMBR.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_TMMBRItem;
+
+            return true;
+        }
+        case 4:
+        {
+            // TMMBN
+            _packetType              = kRtcpRtpfbTmmbnCode;
+            _packet.TMMBN.SenderSSRC = senderSSRC;
+            _packet.TMMBN.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_TMMBNItem;
+
+            return true;
+        }
+        case 5:
+         {
+            // RTCP-SR-REQ Rapid Synchronisation of RTP Flows
+            // draft-perkins-avt-rapid-rtp-sync-03.txt
+            // trigger a new RTCP SR
+            _packetType = kRtcpRtpfbSrReqCode;
+
+            // Note: No state transition, SR REQ is empty!
+            return true;
+        }
+        default:
+            break;
+        }
+        EndCurrentBlock();
+        return false;
+    }
+    else if (header.PT == PT_PSFB)
+    {
+        // Payload specific feedback
+        switch (header.IC)
+        {
+        case 1:
+            // PLI
+            _packetType            = kRtcpPsfbPliCode;
+            _packet.PLI.SenderSSRC = senderSSRC;
+            _packet.PLI.MediaSSRC  = mediaSSRC;
+
+            // Note: No state transition, PLI FCI is empty!
+            return true;
+        case 2:
+            // SLI
+            _packetType            = kRtcpPsfbSliCode;
+            _packet.SLI.SenderSSRC = senderSSRC;
+            _packet.SLI.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_SLIItem;
+
+            return true;
+        case 3:
+            _packetType             = kRtcpPsfbRpsiCode;
+            _packet.RPSI.SenderSSRC = senderSSRC;
+            _packet.RPSI.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_RPSIItem;
+            return true;
+        case 4:
+            // FIR
+            _packetType            = kRtcpPsfbFirCode;
+            _packet.FIR.SenderSSRC = senderSSRC;
+            _packet.FIR.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_FIRItem;
+            return true;
+        case 15:
+            _packetType            = kRtcpPsfbAppCode;
+
+            _state = State_PSFB_AppItem;
+            return true;
+        default:
+            break;
+        }
+
+        EndCurrentBlock();
+        return false;
+    }
+    else
+    {
+        assert(false);
+
+        EndCurrentBlock();
+        return false;
+    }
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseRPSIItem()
+{
+    // RFC 4585 6.3.3.  Reference Picture Selection Indication (RPSI)
+    /*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      PB       |0| Payload Type|    Native RPSI bit string     |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |   defined per codec          ...                | Padding (0) |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    */
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(length > 2+RTCP_RPSI_DATA_SIZE)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpPsfbRpsiCode;
+
+    WebRtc_UWord8 paddingBits = *_ptrRTCPData++;
+    _packet.RPSI.PayloadType = *_ptrRTCPData++;
+
+    memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length-2);
+
+    _packet.RPSI.NumberOfValidBits = WebRtc_UWord16(length-2)*8 - paddingBits;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseNACKItem()
+{
+    // RFC 4585 6.2.1. Generic NACK
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbNackItemCode;
+
+    _packet.NACKItem.PacketID = *_ptrRTCPData++ << 8;
+    _packet.NACKItem.PacketID += *_ptrRTCPData++;
+
+    _packet.NACKItem.BitMask = *_ptrRTCPData++ << 8;
+    _packet.NACKItem.BitMask += *_ptrRTCPData++;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParsePsfbAppItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'R')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'E')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'M')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'B')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpPsfbRembItemCode;
+    _state = State_PSFB_REMBItem;
+    return true;
+}
+ 
+bool
+RTCPUtility::RTCPParserV2::ParsePsfbREMBItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    const WebRtc_UWord8 numSSRC = *_ptrRTCPData++;
+    const WebRtc_UWord8 brExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 brMantissa = (_ptrRTCPData[0] & 0x03) << 16;
+    brMantissa += (_ptrRTCPData[1] << 8);
+    brMantissa += (_ptrRTCPData[2]);
+
+    _ptrRTCPData += 3; // Fwd read data
+    _packet.REMB.BitRate = (brMantissa << brExp);
+
+    _ptrRTCPData += 4 * numSSRC; // Ignore the SSRCs for now
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseTMMBRItem()
+{
+    // RFC 5104 4.2.1. Temporary Maximum Media Stream Bit Rate Request (TMMBR)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbTmmbrItemCode;
+
+    _packet.TMMBRItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord8 mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
+    mxtbrMantissa += (_ptrRTCPData[1] << 7);
+    mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+
+    WebRtc_UWord32 measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
+    measuredOH += _ptrRTCPData[3];
+
+    _ptrRTCPData += 4; // Fwd read data
+
+    _packet.TMMBRItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+    _packet.TMMBRItem.MeasuredOverhead     = measuredOH;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseTMMBNItem()
+{
+    // RFC 5104 4.2.2. Temporary Maximum Media Stream Bit Rate Notification (TMMBN)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbTmmbnItemCode;
+
+    _packet.TMMBNItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord8 mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
+    mxtbrMantissa += (_ptrRTCPData[1] << 7);
+    mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+
+    WebRtc_UWord32 measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
+    measuredOH += _ptrRTCPData[3];
+
+    _ptrRTCPData += 4; // Fwd read data
+
+    _packet.TMMBNItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+    _packet.TMMBNItem.MeasuredOverhead     = measuredOH;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSLIItem()
+{
+    // RFC 5104 6.3.2.  Slice Loss Indication (SLI)
+    /*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |            First        |        Number           | PictureID |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    */
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpPsfbSliItemCode;
+
+    WebRtc_UWord32 buffer;
+    buffer = *_ptrRTCPData++ << 24;
+    buffer += *_ptrRTCPData++ << 16;
+    buffer += *_ptrRTCPData++ << 8;
+    buffer += *_ptrRTCPData++;
+
+    _packet.SLIItem.FirstMB = WebRtc_UWord16((buffer>>19) & 0x1fff);
+    _packet.SLIItem.NumberOfMB = WebRtc_UWord16((buffer>>6) & 0x1fff);
+    _packet.SLIItem.PictureId = WebRtc_UWord8(buffer & 0x3f);
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseFIRItem()
+{
+    // RFC 5104 4.3.1. Full Intra Request (FIR)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpPsfbFirItemCode;
+
+    _packet.FIRItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++;
+
+    _packet.FIRItem.CommandSequenceNumber = *_ptrRTCPData++;
+    _ptrRTCPData += 3; // Skip "Reserved" bytes.
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseAPP( const RTCPCommonHeader& header)
+{
+    ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 12) // 4 * 3, RFC 3550 6.7 APP: Application-Defined RTCP Packet
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip RTCP header
+
+    WebRtc_UWord32 senderSSRC = *_ptrRTCPData++ << 24;
+    senderSSRC += *_ptrRTCPData++ << 16;
+    senderSSRC += *_ptrRTCPData++ << 8;
+    senderSSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord32 name = *_ptrRTCPData++ << 24;
+    name += *_ptrRTCPData++ << 16;
+    name += *_ptrRTCPData++ << 8;
+    name += *_ptrRTCPData++;
+
+    length  = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    _packetType = kRtcpAppCode;
+
+    _packet.APP.SubType = header.IC;
+    _packet.APP.Name = name;
+
+    _state = State_AppItem;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseAPPItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpAppItemCode;
+
+    if(length > kRtcpAppCode_DATA_SIZE)
+    {
+        memcpy(_packet.APP.Data, _ptrRTCPData, kRtcpAppCode_DATA_SIZE);
+        _packet.APP.Size = kRtcpAppCode_DATA_SIZE;
+        _ptrRTCPData += kRtcpAppCode_DATA_SIZE;
+    }else
+    {
+        memcpy(_packet.APP.Data, _ptrRTCPData, length);
+        _packet.APP.Size = (WebRtc_UWord16)length;
+        _ptrRTCPData += length;
+    }
+    return true;
+}
+
+RTCPUtility::RTCPPacketIterator::RTCPPacketIterator(WebRtc_UWord8* rtcpData,
+                                                    size_t rtcpDataLength)
+    : _ptrBegin(rtcpData),
+      _ptrEnd(rtcpData + rtcpDataLength),
+      _ptrBlock(NULL) {
+  memset(&_header, 0, sizeof(_header));
+}
+
+RTCPUtility::RTCPPacketIterator::~RTCPPacketIterator() {
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Begin()
+{
+    _ptrBlock = _ptrBegin;
+
+    return Iterate();
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Iterate()
+{
+    const bool success = RTCPParseCommonHeader(_ptrBlock, _ptrEnd, _header);
+    if (!success)
+    {
+        _ptrBlock = NULL;
+        return NULL;
+    }
+    _ptrBlock += _header.LengthInOctets;
+
+    if (_ptrBlock > _ptrEnd)
+    {
+        _ptrBlock = NULL;
+        return  NULL;
+    }
+
+    return &_header;
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Current()
+{
+    if (!_ptrBlock)
+    {
+        return NULL;
+    }
+
+    return &_header;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtcp_utility.h b/trunk/src/modules/rtp_rtcp/source/rtcp_utility.h
new file mode 100644
index 0000000..faba2ef
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtcp_utility.h
@@ -0,0 +1,437 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
+
+#include <cstddef> // size_t, ptrdiff_t
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+namespace RTCPUtility {
+    // CNAME
+    struct RTCPCnameInformation
+    {
+        char name[RTCP_CNAME_SIZE];
+    };
+    struct RTCPPacketRR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord8  NumberOfReportBlocks;
+    };
+    struct RTCPPacketSR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord8  NumberOfReportBlocks;
+
+        // sender info
+        WebRtc_UWord32 NTPMostSignificant;
+        WebRtc_UWord32 NTPLeastSignificant;
+        WebRtc_UWord32 RTPTimestamp;
+        WebRtc_UWord32 SenderPacketCount;
+        WebRtc_UWord32 SenderOctetCount;
+    };
+    struct RTCPPacketReportBlockItem
+    {
+        // report block
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord8  FractionLost;
+        WebRtc_UWord32 CumulativeNumOfPacketsLost;
+        WebRtc_UWord32 ExtendedHighestSequenceNumber;
+        WebRtc_UWord32 Jitter;
+        WebRtc_UWord32 LastSR;
+        WebRtc_UWord32 DelayLastSR;
+    };
+    struct RTCPPacketSDESCName
+    {
+        // RFC3550
+        WebRtc_UWord32 SenderSSRC;
+        char CName[RTCP_CNAME_SIZE];
+    };
+
+    struct RTCPPacketExtendedJitterReportItem
+    {
+        // RFC 5450
+        WebRtc_UWord32 Jitter;
+    };
+
+    struct RTCPPacketBYE
+    {
+        WebRtc_UWord32 SenderSSRC;
+    };
+    struct RTCPPacketXR
+    {
+        // RFC 3611
+        WebRtc_UWord32 OriginatorSSRC;
+    };
+    struct RTCPPacketXRVOIPMetricItem
+    {
+        // RFC 3611 4.7
+        WebRtc_UWord32    SSRC;
+        WebRtc_UWord8     lossRate;
+        WebRtc_UWord8     discardRate;
+        WebRtc_UWord8     burstDensity;
+        WebRtc_UWord8     gapDensity;
+        WebRtc_UWord16    burstDuration;
+        WebRtc_UWord16    gapDuration;
+        WebRtc_UWord16    roundTripDelay;
+        WebRtc_UWord16    endSystemDelay;
+        WebRtc_UWord8     signalLevel;
+        WebRtc_UWord8     noiseLevel;
+        WebRtc_UWord8     RERL;
+        WebRtc_UWord8     Gmin;
+        WebRtc_UWord8     Rfactor;
+        WebRtc_UWord8     extRfactor;
+        WebRtc_UWord8     MOSLQ;
+        WebRtc_UWord8     MOSCQ;
+        WebRtc_UWord8     RXconfig;
+        WebRtc_UWord16    JBnominal;
+        WebRtc_UWord16    JBmax;
+        WebRtc_UWord16    JBabsMax;
+    };
+
+    struct RTCPPacketRTPFBNACK
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+    struct RTCPPacketRTPFBNACKItem
+    {
+        // RFC4585
+        WebRtc_UWord16 PacketID;
+        WebRtc_UWord16 BitMask;
+    };
+
+    struct RTCPPacketRTPFBTMMBR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketRTPFBTMMBRItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord32 MaxTotalMediaBitRate; // In Kbit/s
+        WebRtc_UWord32 MeasuredOverhead;
+    };
+
+    struct RTCPPacketRTPFBTMMBN
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketRTPFBTMMBNItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC; // "Owner"
+        WebRtc_UWord32 MaxTotalMediaBitRate;
+        WebRtc_UWord32 MeasuredOverhead;
+    };
+
+    struct RTCPPacketPSFBFIR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketPSFBFIRItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord8  CommandSequenceNumber;
+    };
+
+    struct RTCPPacketPSFBPLI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+
+    struct RTCPPacketPSFBSLI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+    struct RTCPPacketPSFBSLIItem
+    {
+        // RFC4585
+        WebRtc_UWord16 FirstMB;
+        WebRtc_UWord16 NumberOfMB;
+        WebRtc_UWord8 PictureId;
+    };
+    struct RTCPPacketPSFBRPSI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+        WebRtc_UWord8  PayloadType;
+        WebRtc_UWord16 NumberOfValidBits;
+        WebRtc_UWord8  NativeBitString[RTCP_RPSI_DATA_SIZE];
+    };
+    struct RTCPPacketPSFBREMB
+    {
+        WebRtc_UWord32 BitRate;
+    };
+    // generic name APP
+    struct RTCPPacketAPP
+    {
+        WebRtc_UWord8     SubType;
+        WebRtc_UWord32    Name;
+        WebRtc_UWord8     Data[kRtcpAppCode_DATA_SIZE];
+        WebRtc_UWord16    Size;
+    };
+
+    union RTCPPacket
+    {
+        RTCPPacketRR              RR;
+        RTCPPacketSR              SR;
+        RTCPPacketReportBlockItem ReportBlockItem;
+
+        RTCPPacketSDESCName       CName;
+        RTCPPacketBYE             BYE;
+
+        RTCPPacketExtendedJitterReportItem ExtendedJitterReportItem;
+
+        RTCPPacketRTPFBNACK       NACK;
+        RTCPPacketRTPFBNACKItem   NACKItem;
+
+        RTCPPacketPSFBPLI         PLI;
+        RTCPPacketPSFBSLI         SLI;
+        RTCPPacketPSFBSLIItem     SLIItem;
+        RTCPPacketPSFBRPSI        RPSI;
+        RTCPPacketPSFBREMB        REMB;
+
+        RTCPPacketRTPFBTMMBR      TMMBR;
+        RTCPPacketRTPFBTMMBRItem  TMMBRItem;
+        RTCPPacketRTPFBTMMBN      TMMBN;
+        RTCPPacketRTPFBTMMBNItem  TMMBNItem;
+        RTCPPacketPSFBFIR         FIR;
+        RTCPPacketPSFBFIRItem     FIRItem;
+
+        RTCPPacketXR               XR;
+        RTCPPacketXRVOIPMetricItem XRVOIPMetricItem;
+
+        RTCPPacketAPP             APP;
+    };
+
+    enum RTCPPacketTypes
+    {
+        kRtcpNotValidCode,
+
+        // RFC3550
+        kRtcpRrCode,
+        kRtcpSrCode,
+        kRtcpReportBlockItemCode,
+
+        kRtcpSdesCode,
+        kRtcpSdesChunkCode,
+        kRtcpByeCode,
+
+        // RFC5450
+        kRtcpExtendedIjCode,
+        kRtcpExtendedIjItemCode,
+
+        // RFC4585
+        kRtcpRtpfbNackCode,
+        kRtcpRtpfbNackItemCode,
+
+        kRtcpPsfbPliCode,
+        kRtcpPsfbRpsiCode,
+        kRtcpPsfbSliCode,
+        kRtcpPsfbSliItemCode,
+        kRtcpPsfbAppCode,
+        kRtcpPsfbRembItemCode,
+
+        // RFC5104
+        kRtcpRtpfbTmmbrCode,
+        kRtcpRtpfbTmmbrItemCode,
+        kRtcpRtpfbTmmbnCode,
+        kRtcpRtpfbTmmbnItemCode,
+        kRtcpPsfbFirCode,
+        kRtcpPsfbFirItemCode,
+
+        // draft-perkins-avt-rapid-rtp-sync
+        kRtcpRtpfbSrReqCode,
+
+        // RFC 3611
+        kRtcpXrVoipMetricCode,
+
+        kRtcpAppCode,
+        kRtcpAppItemCode,
+    };
+
+    struct RTCPRawPacket
+    {
+        const WebRtc_UWord8* _ptrPacketBegin;
+        const WebRtc_UWord8* _ptrPacketEnd;
+    };
+
+    struct RTCPModRawPacket
+    {
+        WebRtc_UWord8* _ptrPacketBegin;
+        WebRtc_UWord8* _ptrPacketEnd;
+    };
+
+    struct RTCPCommonHeader
+    {
+        WebRtc_UWord8  V;  // Version
+        bool           P;  // Padding
+        WebRtc_UWord8  IC; // Item count/subtype
+        WebRtc_UWord8  PT; // Packet Type
+        WebRtc_UWord16 LengthInOctets;
+    };
+
+    enum RTCPPT
+    {
+        PT_IJ    = 195,
+        PT_SR    = 200,
+        PT_RR    = 201,
+        PT_SDES  = 202,
+        PT_BYE   = 203,
+        PT_APP   = 204,
+        PT_RTPFB = 205,
+        PT_PSFB  = 206,
+        PT_XR    = 207
+    };
+
+    bool RTCPParseCommonHeader( const WebRtc_UWord8* ptrDataBegin,
+                                const WebRtc_UWord8* ptrDataEnd,
+                                RTCPCommonHeader& parsedHeader);
+
+    class RTCPParserV2
+    {
+    public:
+        RTCPParserV2(const WebRtc_UWord8* rtcpData,
+                     size_t rtcpDataLength,
+                     bool rtcpReducedSizeEnable); // Set to true, to allow non-compound RTCP!
+        ~RTCPParserV2();
+
+        RTCPPacketTypes PacketType() const;
+        const RTCPPacket& Packet() const;
+        const RTCPRawPacket& RawPacket() const;
+        ptrdiff_t LengthLeft() const;
+
+        bool IsValid() const;
+
+        RTCPPacketTypes Begin();
+        RTCPPacketTypes Iterate();
+
+    private:
+        enum ParseState
+        {
+            State_TopLevel,        // Top level packet
+            State_ReportBlockItem, // SR/RR report block
+            State_SDESChunk,       // SDES chunk
+            State_BYEItem,         // BYE item
+            State_ExtendedJitterItem, // Extended jitter report item
+            State_RTPFB_NACKItem,  // NACK FCI item
+            State_RTPFB_TMMBRItem, // TMMBR FCI item
+            State_RTPFB_TMMBNItem, // TMMBN FCI item
+            State_PSFB_SLIItem,    // SLI FCI item
+            State_PSFB_RPSIItem,   // RPSI FCI item
+            State_PSFB_FIRItem,    // FIR FCI item
+            State_PSFB_AppItem,    // Application specific FCI item
+            State_PSFB_REMBItem,   // Application specific REMB item
+            State_XRItem,
+            State_AppItem
+        };
+
+    private:
+        void IterateTopLevel();
+        void IterateReportBlockItem();
+        void IterateSDESChunk();
+        void IterateBYEItem();
+        void IterateExtendedJitterItem();
+        void IterateNACKItem();
+        void IterateTMMBRItem();
+        void IterateTMMBNItem();
+        void IterateSLIItem();
+        void IterateRPSIItem();
+        void IterateFIRItem();
+        void IteratePsfbAppItem();
+        void IteratePsfbREMBItem();
+        void IterateAppItem();
+
+        void Validate();
+        void EndCurrentBlock();
+
+        bool ParseRR();
+        bool ParseSR();
+        bool ParseReportBlockItem();
+
+        bool ParseSDES();
+        bool ParseSDESChunk();
+        bool ParseSDESItem();
+
+        bool ParseBYE();
+        bool ParseBYEItem();
+
+        bool ParseIJ();
+        bool ParseIJItem();
+
+        bool ParseXR();
+        bool ParseXRItem();
+        bool ParseXRVOIPMetricItem();
+
+        bool ParseFBCommon(const RTCPCommonHeader& header);
+        bool ParseNACKItem();
+        bool ParseTMMBRItem();
+        bool ParseTMMBNItem();
+        bool ParseSLIItem();
+        bool ParseRPSIItem();
+        bool ParseFIRItem();
+        bool ParsePsfbAppItem();
+        bool ParsePsfbREMBItem();
+
+        bool ParseAPP(const RTCPCommonHeader& header);
+        bool ParseAPPItem();
+
+    private:
+        const WebRtc_UWord8* const _ptrRTCPDataBegin;
+        const bool                 _RTCPReducedSizeEnable;
+        const WebRtc_UWord8* const _ptrRTCPDataEnd;
+
+        bool                     _validPacket;
+        const WebRtc_UWord8*     _ptrRTCPData;
+        const WebRtc_UWord8*     _ptrRTCPBlockEnd;
+
+        ParseState               _state;
+        WebRtc_UWord8            _numberOfBlocks;
+
+        RTCPPacketTypes          _packetType;
+        RTCPPacket               _packet;
+    };
+
+    class RTCPPacketIterator
+    {
+    public:
+        RTCPPacketIterator(WebRtc_UWord8* rtcpData,
+                            size_t rtcpDataLength);
+        ~RTCPPacketIterator();
+
+        const RTCPCommonHeader* Begin();
+        const RTCPCommonHeader* Iterate();
+        const RTCPCommonHeader* Current();
+
+    private:
+        WebRtc_UWord8* const     _ptrBegin;
+        WebRtc_UWord8* const     _ptrEnd;
+
+        WebRtc_UWord8*           _ptrBlock;
+
+        RTCPCommonHeader         _header;
+    };
+} // RTCPUtility
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/trunk/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc
new file mode 100644
index 0000000..d794701
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -0,0 +1,550 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+#include <gtest/gtest.h>
+#include <list>
+
+#include "rtp_utility.h"
+
+using webrtc::ForwardErrorCorrection;
+
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
+// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
+const uint8_t kTransportOverhead = 28;
+
+// Maximum number of media packets used in the FEC (RFC 5109).
+const uint8_t kMaxNumberMediaPackets = ForwardErrorCorrection::kMaxMediaPackets;
+
+template<typename T> void ClearList(std::list<T*>* my_list) {
+  T* packet = NULL;
+  while (!my_list->empty()) {
+    packet = my_list->front();
+    delete packet;
+    my_list->pop_front();
+  }
+}
+
+class RtpFecTest : public ::testing::Test {
+ protected:
+  RtpFecTest()
+      :  fec_(new ForwardErrorCorrection(0)),
+         ssrc_(rand()),
+         fec_seq_num_(0) {
+  }
+
+  ForwardErrorCorrection* fec_;
+  int ssrc_;
+  uint16_t fec_seq_num_;
+
+  std::list<ForwardErrorCorrection::Packet*> media_packet_list_;
+  std::list<ForwardErrorCorrection::Packet*> fec_packet_list_;
+  std::list<ForwardErrorCorrection::ReceivedPacket*> received_packet_list_;
+  std::list<ForwardErrorCorrection::RecoveredPacket*> recovered_packet_list_;
+
+  // Media packet "i" is lost if media_loss_mask_[i] = 1,
+  // received if media_loss_mask_[i] = 0.
+  int media_loss_mask_[kMaxNumberMediaPackets];
+
+  // FEC packet "i" is lost if fec_loss_mask_[i] = 1,
+  // received if fec_loss_mask_[i] = 0.
+  int fec_loss_mask_[kMaxNumberMediaPackets];
+
+  // Construct the media packet list, up to |num_media_packets| packets.
+  // Returns the next sequence number after the last media packet.
+  // (this will be the sequence of the first FEC packet)
+  int ConstructMediaPackets(int num_media_packets);
+
+  // Construct the received packet list: a subset of the media and FEC packets.
+  void NetworkReceivedPackets();
+
+  // Add packet from |packet_list| to list of received packets, using the
+  // |loss_mask|.
+  // The |packet_list| may be a media packet list (is_fec = false), or a
+  // FEC packet list (is_fec = true).
+  void ReceivedPackets(
+      const std::list<ForwardErrorCorrection::Packet*>& packet_list,
+      int* loss_mask,
+      bool is_fec);
+
+  // Check for complete recovery after FEC decoding.
+  bool IsRecoveryComplete();
+
+  // Delete the received packets.
+  void FreeRecoveredPacketList();
+
+  // Delete the media and FEC packets.
+  void TearDown();
+};
+
+TEST_F(RtpFecTest, HandleIncorrectInputs) {
+  int num_important_packets = 0;
+  bool use_unequal_protection =  false;
+  uint8_t protection_factor = 60;
+
+  // Media packet list is empty.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  protection_factor,
+                                  num_important_packets,
+                                  use_unequal_protection,
+                                  &fec_packet_list_));
+
+  int num_media_packets = 10;
+  ConstructMediaPackets(num_media_packets);
+
+  num_important_packets = -1;
+  // Number of important packets below 0.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  protection_factor,
+                                  num_important_packets,
+                                  use_unequal_protection,
+                                  &fec_packet_list_));
+
+  num_important_packets = 12;
+  // Number of important packets greater than number of media packets.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  protection_factor,
+                                  num_important_packets,
+                                  use_unequal_protection,
+                                  &fec_packet_list_));
+
+  num_media_packets = kMaxNumberMediaPackets + 1;
+  ConstructMediaPackets(num_media_packets);
+
+  num_important_packets = 0;
+  // Number of media packet is above maximum allowed (kMaxNumberMediaPackets).
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  protection_factor,
+                                  num_important_packets,
+                                  use_unequal_protection,
+                                  &fec_packet_list_));
+}
+
+TEST_F(RtpFecTest, FecRecoveryNoLoss) {
+  const int num_important_packets = 0;
+  const bool use_unequal_protection =  false;
+  const int num_media_packets = 4;
+  uint8_t protection_factor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // No packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // No packets lost, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLoss) {
+  const int num_important_packets = 0;
+  const bool use_unequal_protection = false;
+  const int num_media_packets = 4;
+  uint8_t protection_factor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // 1 media packet lost
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 2 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 2 packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLoss50perc) {
+  const int num_important_packets = 0;
+  const bool use_unequal_protection =  false;
+  const int num_media_packets = 4;
+  const uint8_t protection_factor = 255;
+
+  // Packet Mask for (4,4,0) code:
+  // (num_media_packets = 4; num_fec_packets = 4, num_important_packets = 0)
+
+  //         media#0   media#1  media#2    media#3
+  // fec#0:    1          1        0          0
+  // fec#1:    1          0        1          0
+  // fec#2:    0          1        0          1
+  // fec#3:    0          0        1          1
+  //
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 4 FEC packets.
+  EXPECT_EQ(4, static_cast<int>(fec_packet_list_.size()));
+
+  // 4 packets lost: 3 media packets and one FEC packet#2 lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[2] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // With media packet#1 and FEC packets #0, #1, #3, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 4 packets lost: all media packets
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 1, sizeof(fec_loss_mask_));
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Cannot get complete recovery for this loss configuration.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryNoLossUep) {
+  const int num_important_packets = 2;
+  const bool use_unequal_protection =  true;
+  const int num_media_packets = 4;
+  const uint8_t protection_factor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // No packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // No packets lost, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLossUep) {
+  const int num_important_packets = 2;
+  const bool use_unequal_protection =  true;
+  const int num_media_packets = 4;
+  const uint8_t protection_factor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // 1 media packet lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 2 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 2 packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLoss50percUep) {
+  const int num_important_packets = 1;
+  const bool use_unequal_protection =  true;
+  const int num_media_packets = 4;
+  const uint8_t protection_factor = 255;
+
+  // Packet Mask for (4,4,1) code:
+  // (num_media_packets = 4; num_fec_packets = 4, num_important_packets = 2)
+
+  //         media#0   media#1  media#2    media#3
+  // fec#0:    1          0        0          0
+  // fec#1:    1          1        0          0
+  // fec#2:    1          0        1          1
+  // fec#3:    0          1        1          0
+  //
+
+  fec_seq_num_ = ConstructMediaPackets(num_media_packets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 protection_factor,
+                                 num_important_packets,
+                                 use_unequal_protection,
+                                 &fec_packet_list_));
+
+  // Expect 4 FEC packets.
+  EXPECT_EQ(4, static_cast<int>(fec_packet_list_.size()));
+
+  // 4 packets lost: 3 media packets and FEC packet#1 lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[1] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // With media packet#1 and FEC packets #0, #2, #3, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 4 packets lost: 3 media packets and one FEC packet#2 lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[2] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Cannot get complete recovery for this loss configuration.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+// TODO(marpan): Add more test cases.
+
+void RtpFecTest::TearDown() {
+  fec_->ResetState(&recovered_packet_list_);
+  delete fec_;
+  FreeRecoveredPacketList();
+  ClearList(&media_packet_list_);
+  EXPECT_TRUE(media_packet_list_.empty());
+}
+
+void RtpFecTest::FreeRecoveredPacketList() {
+  ClearList(&recovered_packet_list_);
+}
+
+bool RtpFecTest::IsRecoveryComplete() {
+  // Check that the number of media and recovered packets are equal.
+  if (media_packet_list_.size() != recovered_packet_list_.size()) {
+    return false;
+  }
+
+  ForwardErrorCorrection::Packet* media_packet;
+  ForwardErrorCorrection::RecoveredPacket* recovered_packet;
+
+  bool recovery = true;
+
+  std::list<ForwardErrorCorrection::Packet*>::iterator
+    media_packet_list_item = media_packet_list_.begin();
+  std::list<ForwardErrorCorrection::RecoveredPacket*>::iterator
+    recovered_packet_list_item = recovered_packet_list_.begin();
+  while (media_packet_list_item != media_packet_list_.end()) {
+    if (recovered_packet_list_item == recovered_packet_list_.end()) {
+      return false;
+    }
+    media_packet = *media_packet_list_item;
+    recovered_packet = *recovered_packet_list_item;
+    if (recovered_packet->pkt->length != media_packet->length) {
+      return false;
+    }
+    if (memcmp(recovered_packet->pkt->data, media_packet->data,
+               media_packet->length) != 0) {
+      return false;
+    }
+    media_packet_list_item++;
+    recovered_packet_list_item++;
+  }
+  return recovery;
+}
+
+void RtpFecTest::NetworkReceivedPackets() {
+  const bool kFecPacket = true;
+  ReceivedPackets(media_packet_list_, media_loss_mask_, !kFecPacket);
+  ReceivedPackets(fec_packet_list_, fec_loss_mask_, kFecPacket);
+}
+
+void RtpFecTest:: ReceivedPackets(
+    const std::list<ForwardErrorCorrection::Packet*>& packet_list,
+    int* loss_mask,
+    bool is_fec) {
+  ForwardErrorCorrection::Packet* packet;
+  ForwardErrorCorrection::ReceivedPacket* received_packet;
+  int seq_num = fec_seq_num_;
+  int packet_idx = 0;
+
+  std::list<ForwardErrorCorrection::Packet*>::const_iterator
+  packet_list_item = packet_list.begin();
+
+  while (packet_list_item != packet_list.end()) {
+    packet = *packet_list_item;
+    if (loss_mask[packet_idx] == 0) {
+      received_packet = new ForwardErrorCorrection::ReceivedPacket;
+      received_packet->pkt = new ForwardErrorCorrection::Packet;
+      received_packet_list_.push_back(received_packet);
+      received_packet->pkt->length = packet->length;
+      memcpy(received_packet->pkt->data, packet->data,
+             packet->length);
+      received_packet->isFec = is_fec;
+      if (!is_fec) {
+        // For media packets, the sequence number and marker bit is
+        // obtained from RTP header. These were set in ConstructMediaPackets().
+        received_packet->seqNum =
+            webrtc::ModuleRTPUtility::BufferToUWord16(&packet->data[2]);
+      }
+      else {
+        // The sequence number, marker bit, and ssrc number are defined in the
+        // RTP header of the FEC packet, which is not constructed in this test.
+        // So we set these values below based on the values generated in
+        // ConstructMediaPackets().
+        received_packet->seqNum = seq_num;
+        // The ssrc value for FEC packets is set to the one used for the
+        // media packets in ConstructMediaPackets().
+        received_packet->ssrc = ssrc_;
+      }
+    }
+    packet_idx++;
+    packet_list_item ++;
+    // Sequence number of FEC packets are defined as increment by 1 from
+    // last media packet in frame.
+    if (is_fec) seq_num++;
+  }
+}
+
+int RtpFecTest::ConstructMediaPackets(int num_media_packets) {
+  assert(num_media_packets > 0);
+  ForwardErrorCorrection::Packet* media_packet = NULL;
+  int sequence_number = rand();
+  int time_stamp = rand();
+
+  for (int i = 0; i < num_media_packets; i++) {
+    media_packet = new ForwardErrorCorrection::Packet;
+    media_packet_list_.push_back(media_packet);
+    media_packet->length =
+        static_cast<uint16_t>((static_cast<float>(rand()) / RAND_MAX) *
+        (IP_PACKET_SIZE - kRtpHeaderSize - kTransportOverhead -
+            ForwardErrorCorrection::PacketOverhead()));
+
+    if (media_packet->length < kRtpHeaderSize) {
+      media_packet->length = kRtpHeaderSize;
+    }
+    // Generate random values for the first 2 bytes
+    media_packet->data[0] = static_cast<uint8_t>(rand() % 256);
+    media_packet->data[1] = static_cast<uint8_t>(rand() % 256);
+
+    // The first two bits are assumed to be 10 by the FEC encoder.
+    // In fact the FEC decoder will set the two first bits to 10 regardless of
+    // what they actually were. Set the first two bits to 10 so that a memcmp
+    // can be performed for the whole restored packet.
+    media_packet->data[0] |= 0x80;
+    media_packet->data[0] &= 0xbf;
+
+    // FEC is applied to a whole frame.
+    // A frame is signaled by multiple packets without the marker bit set
+    // followed by the last packet of the frame for which the marker bit is set.
+    // Only push one (fake) frame to the FEC.
+    media_packet->data[1] &= 0x7f;
+
+    webrtc::ModuleRTPUtility::AssignUWord16ToBuffer(&media_packet->data[2],
+                                                    sequence_number);
+    webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[4],
+                                                    time_stamp);
+    webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[8],
+                                                    ssrc_);
+
+    // Generate random values for payload.
+    for (int j = 12; j < media_packet->length; j++) {
+      media_packet->data[j] = static_cast<uint8_t> (rand() % 256);
+    }
+    sequence_number++;
+  }
+  // Last packet, set marker bit.
+  assert(media_packet != NULL);
+  media_packet->data[1] |= 0x80;
+  return sequence_number;
+}
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.cc b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.cc
new file mode 100644
index 0000000..f066ae2
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.cc
@@ -0,0 +1,469 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+
+#include <string.h>  // memcpy
+
+#include <cassert>   // assert
+#include <vector>
+
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+namespace webrtc {
+
+// Define how the VP8PacketizerModes are implemented.
+// Modes are: kStrict, kAggregate, kEqualSize.
+const RtpFormatVp8::AggregationMode RtpFormatVp8::aggr_modes_[kNumModes] =
+    { kAggrNone, kAggrPartitions, kAggrFragments };
+const bool RtpFormatVp8::balance_modes_[kNumModes] =
+    { true, true, true };
+const bool RtpFormatVp8::separate_first_modes_[kNumModes] =
+    { true, false, false };
+
+RtpFormatVp8::RtpFormatVp8(const WebRtc_UWord8* payload_data,
+                           WebRtc_UWord32 payload_size,
+                           const RTPVideoHeaderVP8& hdr_info,
+                           int max_payload_len,
+                           const RTPFragmentationHeader& fragmentation,
+                           VP8PacketizerMode mode)
+    : payload_data_(payload_data),
+      payload_size_(static_cast<int>(payload_size)),
+      vp8_fixed_payload_descriptor_bytes_(1),
+      aggr_mode_(aggr_modes_[mode]),
+      balance_(balance_modes_[mode]),
+      separate_first_(separate_first_modes_[mode]),
+      hdr_info_(hdr_info),
+      num_partitions_(fragmentation.fragmentationVectorSize),
+      max_payload_len_(max_payload_len),
+      packets_calculated_(false) {
+  part_info_ = fragmentation;
+}
+
+RtpFormatVp8::RtpFormatVp8(const WebRtc_UWord8* payload_data,
+                           WebRtc_UWord32 payload_size,
+                           const RTPVideoHeaderVP8& hdr_info,
+                           int max_payload_len)
+    : payload_data_(payload_data),
+      payload_size_(static_cast<int>(payload_size)),
+      part_info_(),
+      vp8_fixed_payload_descriptor_bytes_(1),
+      aggr_mode_(aggr_modes_[kEqualSize]),
+      balance_(balance_modes_[kEqualSize]),
+      separate_first_(separate_first_modes_[kEqualSize]),
+      hdr_info_(hdr_info),
+      num_partitions_(1),
+      max_payload_len_(max_payload_len),
+      packets_calculated_(false) {
+    part_info_.VerifyAndAllocateFragmentationHeader(1);
+    part_info_.fragmentationLength[0] = payload_size;
+    part_info_.fragmentationOffset[0] = 0;
+}
+
+int RtpFormatVp8::NextPacket(WebRtc_UWord8* buffer,
+                             int* bytes_to_send,
+                             bool* last_packet) {
+  if (!packets_calculated_) {
+    int ret = 0;
+    if (aggr_mode_ == kAggrPartitions && balance_) {
+      ret = GeneratePacketsBalancedAggregates();
+    } else {
+      ret = GeneratePackets();
+    }
+    if (ret < 0) {
+      return ret;
+    }
+  }
+  if (packets_.empty()) {
+    return -1;
+  }
+  InfoStruct packet_info = packets_.front();
+  packets_.pop();
+
+  *bytes_to_send = WriteHeaderAndPayload(packet_info, buffer, max_payload_len_);
+  if (*bytes_to_send < 0) {
+    return -1;
+  }
+
+  *last_packet = packets_.empty();
+  return packet_info.first_partition_ix;
+}
+
+int RtpFormatVp8::CalcNextSize(int max_payload_len, int remaining_bytes,
+                               bool split_payload) const {
+  if (max_payload_len == 0 || remaining_bytes == 0) {
+    return 0;
+  }
+  if (!split_payload) {
+    return max_payload_len >= remaining_bytes ? remaining_bytes : 0;
+  }
+
+  if (balance_) {
+    // Balance payload sizes to produce (almost) equal size
+    // fragments.
+    // Number of fragments for remaining_bytes:
+    int num_frags = remaining_bytes / max_payload_len + 1;
+    // Number of bytes in this fragment:
+    return static_cast<int>(static_cast<double>(remaining_bytes)
+                            / num_frags + 0.5);
+  } else {
+    return max_payload_len >= remaining_bytes ? remaining_bytes
+        : max_payload_len;
+  }
+}
+
+int RtpFormatVp8::GeneratePackets() {
+  if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+      + PayloadDescriptorExtraLength() + 1) {
+    // The provided payload length is not long enough for the payload
+    // descriptor and one payload byte. Return an error.
+    return -1;
+  }
+  int total_bytes_processed = 0;
+  bool start_on_new_fragment = true;
+  bool beginning = true;
+  int part_ix = 0;
+  while (total_bytes_processed < payload_size_) {
+    int packet_bytes = 0;  // How much data to send in this packet.
+    bool split_payload = true;  // Splitting of partitions is initially allowed.
+    int remaining_in_partition = part_info_.fragmentationOffset[part_ix] -
+        total_bytes_processed + part_info_.fragmentationLength[part_ix];
+    int rem_payload_len = max_payload_len_ -
+        (vp8_fixed_payload_descriptor_bytes_ + PayloadDescriptorExtraLength());
+    int first_partition_in_packet = part_ix;
+
+    while (int next_size = CalcNextSize(rem_payload_len, remaining_in_partition,
+                                        split_payload)) {
+      packet_bytes += next_size;
+      rem_payload_len -= next_size;
+      remaining_in_partition -= next_size;
+
+      if (remaining_in_partition == 0 && !(beginning && separate_first_)) {
+        // Advance to next partition?
+        // Check that there are more partitions; verify that we are either
+        // allowed to aggregate fragments, or that we are allowed to
+        // aggregate intact partitions and that we started this packet
+        // with an intact partition (indicated by first_fragment_ == true).
+        if (part_ix + 1 < num_partitions_ &&
+            ((aggr_mode_ == kAggrFragments) ||
+                (aggr_mode_ == kAggrPartitions && start_on_new_fragment))) {
+          assert(part_ix < num_partitions_);
+          remaining_in_partition = part_info_.fragmentationLength[++part_ix];
+          // Disallow splitting unless kAggrFragments. In kAggrPartitions,
+          // we can only aggregate intact partitions.
+          split_payload = (aggr_mode_ == kAggrFragments);
+        }
+      } else if (balance_ && remaining_in_partition > 0) {
+        break;
+      }
+    }
+    if (remaining_in_partition == 0) {
+      ++part_ix;  // Advance to next partition.
+    }
+    assert(packet_bytes > 0);
+
+    QueuePacket(total_bytes_processed, packet_bytes, first_partition_in_packet,
+                start_on_new_fragment);
+    total_bytes_processed += packet_bytes;
+    start_on_new_fragment = (remaining_in_partition == 0);
+    beginning = false;  // Next packet cannot be first packet in frame.
+  }
+  packets_calculated_ = true;
+  assert(total_bytes_processed == payload_size_);
+  return 0;
+}
+
+int RtpFormatVp8::GeneratePacketsBalancedAggregates() {
+  if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+      + PayloadDescriptorExtraLength() + 1) {
+    // The provided payload length is not long enough for the payload
+    // descriptor and one payload byte. Return an error.
+    return -1;
+  }
+  std::vector<int> partition_decision;
+  const int overhead = vp8_fixed_payload_descriptor_bytes_ +
+      PayloadDescriptorExtraLength();
+  const uint32_t max_payload_len = max_payload_len_ - overhead;
+  int min_size, max_size;
+  AggregateSmallPartitions(&partition_decision, &min_size, &max_size);
+
+  int total_bytes_processed = 0;
+  int part_ix = 0;
+  while (part_ix < num_partitions_) {
+    if (partition_decision[part_ix] == -1) {
+      // Split large partitions.
+      int remaining_partition = part_info_.fragmentationLength[part_ix];
+      int num_fragments = Vp8PartitionAggregator::CalcNumberOfFragments(
+          remaining_partition, max_payload_len, overhead, min_size, max_size);
+      const int packet_bytes =
+          (remaining_partition + num_fragments - 1) / num_fragments;
+      for (int n = 0; n < num_fragments; ++n) {
+        const int this_packet_bytes = packet_bytes < remaining_partition ?
+            packet_bytes : remaining_partition;
+        QueuePacket(total_bytes_processed, this_packet_bytes, part_ix,
+                    (n == 0));
+        remaining_partition -= this_packet_bytes;
+        total_bytes_processed += this_packet_bytes;
+        if (this_packet_bytes < min_size) {
+          min_size = this_packet_bytes;
+        }
+        if (this_packet_bytes > max_size) {
+          max_size = this_packet_bytes;
+        }
+      }
+      assert(remaining_partition == 0);
+      ++part_ix;
+    } else {
+      int this_packet_bytes = 0;
+      const int first_partition_in_packet = part_ix;
+      const int aggregation_index = partition_decision[part_ix];
+      while (static_cast<size_t>(part_ix) < partition_decision.size() &&
+          partition_decision[part_ix] == aggregation_index) {
+        // Collect all partitions that were aggregated into the same packet.
+        this_packet_bytes += part_info_.fragmentationLength[part_ix];
+        ++part_ix;
+      }
+      QueuePacket(total_bytes_processed, this_packet_bytes,
+                  first_partition_in_packet, true);
+      total_bytes_processed += this_packet_bytes;
+    }
+  }
+  packets_calculated_ = true;
+  return 0;
+}
+
+void RtpFormatVp8::AggregateSmallPartitions(std::vector<int>* partition_vec,
+                                            int* min_size,
+                                            int* max_size) {
+  assert(min_size && max_size);
+  *min_size = -1;
+  *max_size = -1;
+  assert(partition_vec);
+  partition_vec->assign(num_partitions_, -1);
+  const int overhead = vp8_fixed_payload_descriptor_bytes_ +
+      PayloadDescriptorExtraLength();
+  const uint32_t max_payload_len = max_payload_len_ - overhead;
+  int first_in_set = 0;
+  int last_in_set = 0;
+  int num_aggregate_packets = 0;
+  // Find sets of partitions smaller than max_payload_len_.
+  while (first_in_set < num_partitions_) {
+    if (part_info_.fragmentationLength[first_in_set] < max_payload_len) {
+      // Found start of a set.
+      last_in_set = first_in_set;
+      while (last_in_set + 1 < num_partitions_ &&
+          part_info_.fragmentationLength[last_in_set + 1] < max_payload_len) {
+        ++last_in_set;
+      }
+      // Found end of a set. Run optimized aggregator. It is ok if start == end.
+      Vp8PartitionAggregator aggregator(part_info_, first_in_set,
+                                        last_in_set);
+      if (*min_size >= 0 && *max_size >= 0) {
+        aggregator.SetPriorMinMax(*min_size, *max_size);
+      }
+      Vp8PartitionAggregator::ConfigVec optimal_config =
+          aggregator.FindOptimalConfiguration(max_payload_len, overhead);
+      aggregator.CalcMinMax(optimal_config, min_size, max_size);
+      for (int i = first_in_set, j = 0; i <= last_in_set; ++i, ++j) {
+        // Transfer configuration for this set of partitions to the joint
+        // partition vector representing all partitions in the frame.
+        (*partition_vec)[i] = num_aggregate_packets + optimal_config[j];
+      }
+      num_aggregate_packets += optimal_config.back() + 1;
+      first_in_set = last_in_set;
+    }
+    ++first_in_set;
+  }
+}
+
+void RtpFormatVp8::QueuePacket(int start_pos,
+                               int packet_size,
+                               int first_partition_in_packet,
+                               bool start_on_new_fragment) {
+  // Write info to packet info struct and store in packet info queue.
+  InfoStruct packet_info;
+  packet_info.payload_start_pos = start_pos;
+  packet_info.size = packet_size;
+  packet_info.first_partition_ix = first_partition_in_packet;
+  packet_info.first_fragment = start_on_new_fragment;
+  packets_.push(packet_info);
+}
+
+int RtpFormatVp8::WriteHeaderAndPayload(const InfoStruct& packet_info,
+                                        WebRtc_UWord8* buffer,
+                                        int buffer_length) const {
+  // Write the VP8 payload descriptor.
+  //       0
+  //       0 1 2 3 4 5 6 7 8
+  //      +-+-+-+-+-+-+-+-+-+
+  //      |X| |N|S| PART_ID |
+  //      +-+-+-+-+-+-+-+-+-+
+  // X:   |I|L|T|K|         | (mandatory if any of the below are used)
+  //      +-+-+-+-+-+-+-+-+-+
+  // I:   |PictureID (8/16b)| (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+  // L:   |   TL0PIC_IDX    | (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+  // T/K: |TID:Y|  KEYIDX   | (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+
+  assert(packet_info.size > 0);
+  buffer[0] = 0;
+  if (XFieldPresent())            buffer[0] |= kXBit;
+  if (hdr_info_.nonReference)     buffer[0] |= kNBit;
+  if (packet_info.first_fragment) buffer[0] |= kSBit;
+  buffer[0] |= (packet_info.first_partition_ix & kPartIdField);
+
+  const int extension_length = WriteExtensionFields(buffer, buffer_length);
+
+  memcpy(&buffer[vp8_fixed_payload_descriptor_bytes_ + extension_length],
+         &payload_data_[packet_info.payload_start_pos], packet_info.size);
+
+  // Return total length of written data.
+  return packet_info.size + vp8_fixed_payload_descriptor_bytes_
+      + extension_length;
+}
+
+int RtpFormatVp8::WriteExtensionFields(WebRtc_UWord8* buffer,
+                                       int buffer_length) const {
+  int extension_length = 0;
+  if (XFieldPresent()) {
+    WebRtc_UWord8* x_field = buffer + vp8_fixed_payload_descriptor_bytes_;
+    *x_field = 0;
+    extension_length = 1;  // One octet for the X field.
+    if (PictureIdPresent()) {
+      if (WritePictureIDFields(x_field, buffer, buffer_length,
+                               &extension_length) < 0) {
+        return -1;
+      }
+    }
+    if (TL0PicIdxFieldPresent()) {
+      if (WriteTl0PicIdxFields(x_field, buffer, buffer_length,
+                               &extension_length) < 0) {
+        return -1;
+      }
+    }
+    if (TIDFieldPresent() || KeyIdxFieldPresent()) {
+      if (WriteTIDAndKeyIdxFields(x_field, buffer, buffer_length,
+                                  &extension_length) < 0) {
+        return -1;
+      }
+    }
+    assert(extension_length == PayloadDescriptorExtraLength());
+  }
+  return extension_length;
+}
+
+int RtpFormatVp8::WritePictureIDFields(WebRtc_UWord8* x_field,
+                                       WebRtc_UWord8* buffer,
+                                       int buffer_length,
+                                       int* extension_length) const {
+  *x_field |= kIBit;
+  const int pic_id_length = WritePictureID(
+      buffer + vp8_fixed_payload_descriptor_bytes_ + *extension_length,
+      buffer_length - vp8_fixed_payload_descriptor_bytes_
+      - *extension_length);
+  if (pic_id_length < 0) return -1;
+  *extension_length += pic_id_length;
+  return 0;
+}
+
+int RtpFormatVp8::WritePictureID(WebRtc_UWord8* buffer,
+                                 int buffer_length) const {
+  const WebRtc_UWord16 pic_id =
+      static_cast<WebRtc_UWord16> (hdr_info_.pictureId);
+  int picture_id_len = PictureIdLength();
+  if (picture_id_len > buffer_length) return -1;
+  if (picture_id_len == 2) {
+    buffer[0] = 0x80 | ((pic_id >> 8) & 0x7F);
+    buffer[1] = pic_id & 0xFF;
+  } else if (picture_id_len == 1) {
+    buffer[0] = pic_id & 0x7F;
+  }
+  return picture_id_len;
+}
+
+int RtpFormatVp8::WriteTl0PicIdxFields(WebRtc_UWord8* x_field,
+                                       WebRtc_UWord8* buffer,
+                                       int buffer_length,
+                                       int* extension_length) const {
+  if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+      + 1) {
+    return -1;
+  }
+  *x_field |= kLBit;
+  buffer[vp8_fixed_payload_descriptor_bytes_
+         + *extension_length] = hdr_info_.tl0PicIdx;
+  ++*extension_length;
+  return 0;
+}
+
+int RtpFormatVp8::WriteTIDAndKeyIdxFields(WebRtc_UWord8* x_field,
+                                          WebRtc_UWord8* buffer,
+                                          int buffer_length,
+                                          int* extension_length) const {
+  if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+      + 1) {
+    return -1;
+  }
+  WebRtc_UWord8* data_field =
+      &buffer[vp8_fixed_payload_descriptor_bytes_ + *extension_length];
+  *data_field = 0;
+  if (TIDFieldPresent()) {
+    *x_field |= kTBit;
+    assert(hdr_info_.temporalIdx >= 0 && hdr_info_.temporalIdx <= 3);
+    *data_field |= hdr_info_.temporalIdx << 6;
+    *data_field |= hdr_info_.layerSync ? kYBit : 0;
+  }
+  if (KeyIdxFieldPresent()) {
+    *x_field |= kKBit;
+    *data_field |= (hdr_info_.keyIdx & kKeyIdxField);
+  }
+  ++*extension_length;
+  return 0;
+}
+
+int RtpFormatVp8::PayloadDescriptorExtraLength() const {
+  int length_bytes = PictureIdLength();
+  if (TL0PicIdxFieldPresent()) ++length_bytes;
+  if (TIDFieldPresent() || KeyIdxFieldPresent()) ++length_bytes;
+  if (length_bytes > 0) ++length_bytes;  // Include the extension field.
+  return length_bytes;
+}
+
+int RtpFormatVp8::PictureIdLength() const {
+  if (hdr_info_.pictureId == kNoPictureId) {
+    return 0;
+  }
+  if (hdr_info_.pictureId <= 0x7F) {
+    return 1;
+  }
+  return 2;
+}
+
+bool RtpFormatVp8::XFieldPresent() const {
+  return (TIDFieldPresent() || TL0PicIdxFieldPresent() || PictureIdPresent()
+      || KeyIdxFieldPresent());
+}
+
+bool RtpFormatVp8::TIDFieldPresent() const {
+  assert((hdr_info_.layerSync == false) ||
+         (hdr_info_.temporalIdx != kNoTemporalIdx));
+  return (hdr_info_.temporalIdx != kNoTemporalIdx);
+}
+
+bool RtpFormatVp8::KeyIdxFieldPresent() const {
+  return (hdr_info_.keyIdx != kNoKeyIdx);
+}
+
+bool RtpFormatVp8::TL0PicIdxFieldPresent() const {
+  return (hdr_info_.tl0PicIdx != kNoTl0PicIdx);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.h b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.h
new file mode 100644
index 0000000..f568f4d
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8.h
@@ -0,0 +1,206 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the declaration of the VP8 packetizer class.
+ * A packetizer object is created for each encoded video frame. The
+ * constructor is called with the payload data and size,
+ * together with the fragmentation information and a packetizer mode
+ * of choice. Alternatively, if no fragmentation info is available, the
+ * second constructor can be used with only payload data and size; in that
+ * case the mode kEqualSize is used.
+ *
+ * After creating the packetizer, the method NextPacket is called
+ * repeatedly to get all packets for the frame. The method returns
+ * false as long as there are more packets left to fetch.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
+
+#include <queue>
+#include <vector>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+enum VP8PacketizerMode {
+  kStrict = 0,  // Split partitions if too large;
+                // never aggregate, balance size.
+  kAggregate,   // Split partitions if too large; aggregate whole partitions.
+  kEqualSize,   // Split entire payload without considering partition limits.
+                // This will produce equal size packets for the whole frame.
+  kNumModes,
+};
+
+// Packetizer for VP8.
+class RtpFormatVp8 {
+ public:
+  // Initialize with payload from encoder and fragmentation info.
+  // The payload_data must be exactly one encoded VP8 frame.
+  RtpFormatVp8(const WebRtc_UWord8* payload_data,
+               WebRtc_UWord32 payload_size,
+               const RTPVideoHeaderVP8& hdr_info,
+               int max_payload_len,
+               const RTPFragmentationHeader& fragmentation,
+               VP8PacketizerMode mode);
+
+  // Initialize without fragmentation info. Mode kEqualSize will be used.
+  // The payload_data must be exactly one encoded VP8 frame.
+  RtpFormatVp8(const WebRtc_UWord8* payload_data,
+               WebRtc_UWord32 payload_size,
+               const RTPVideoHeaderVP8& hdr_info,
+               int max_payload_len);
+
+  // Get the next payload with VP8 payload header.
+  // max_payload_len limits the sum length of payload and VP8 payload header.
+  // buffer is a pointer to where the output will be written.
+  // bytes_to_send is an output variable that will contain number of bytes
+  // written to buffer. Parameter last_packet is true for the last packet of
+  // the frame, false otherwise (i.e., call the function again to get the
+  // next packet).
+  // For the kStrict and kAggregate mode: returns the partition index from which
+  // the first payload byte in the packet is taken, with the first partition
+  // having index 0; returns negative on error.
+  // For the kEqualSize mode: returns 0 on success, return negative on error.
+  int NextPacket(WebRtc_UWord8* buffer,
+                 int* bytes_to_send,
+                 bool* last_packet);
+
+ private:
+  typedef struct {
+    int payload_start_pos;
+    int size;
+    bool first_fragment;
+    int first_partition_ix;
+  } InfoStruct;
+  typedef std::queue<InfoStruct> InfoQueue;
+  enum AggregationMode {
+    kAggrNone = 0,    // No aggregation.
+    kAggrPartitions,  // Aggregate intact partitions.
+    kAggrFragments    // Aggregate intact and fragmented partitions.
+  };
+
+  static const AggregationMode aggr_modes_[kNumModes];
+  static const bool balance_modes_[kNumModes];
+  static const bool separate_first_modes_[kNumModes];
+  static const int kXBit        = 0x80;
+  static const int kNBit        = 0x20;
+  static const int kSBit        = 0x10;
+  static const int kPartIdField = 0x0F;
+  static const int kKeyIdxField = 0x1F;
+  static const int kIBit        = 0x80;
+  static const int kLBit        = 0x40;
+  static const int kTBit        = 0x20;
+  static const int kKBit        = 0x10;
+  static const int kYBit        = 0x20;
+
+  // Calculate size of next chunk to send. Returns 0 if none can be sent.
+  int CalcNextSize(int max_payload_len, int remaining_bytes,
+                   bool split_payload) const;
+
+  // Calculate all packet sizes and load to packet info queue.
+  int GeneratePackets();
+
+  // Calculate all packet sizes using Vp8PartitionAggregator and load to packet
+  // info queue.
+  int GeneratePacketsBalancedAggregates();
+
+  // Helper function to GeneratePacketsBalancedAggregates(). Find all
+  // continuous sets of partitions smaller than the max payload size (not
+  // max_size), and aggregate them into balanced packets. The result is written
+  // to partition_vec, which is of the same length as the number of partitions.
+  // A value of -1 indicates that the partition is too large and must be split.
+  // Aggregates are numbered 0, 1, 2, etc. For each set of small partitions,
+  // the aggregate numbers restart at 0. Output values min_size and max_size
+  // will hold the smallest and largest resulting aggregates (i.e., not counting
+  // those that must be split).
+  void AggregateSmallPartitions(std::vector<int>* partition_vec,
+                                int* min_size,
+                                int* max_size);
+
+  // Insert packet into packet queue.
+  void QueuePacket(int start_pos,
+                   int packet_size,
+                   int first_partition_in_packet,
+                   bool start_on_new_fragment);
+
+  // Write the payload header and copy the payload to the buffer.
+  // The info in packet_info determines which part of the payload is written
+  // and what to write in the header fields.
+  int WriteHeaderAndPayload(const InfoStruct& packet_info,
+                            WebRtc_UWord8* buffer,
+                            int buffer_length) const;
+
+
+  // Write the X field and the appropriate extension fields to buffer.
+  // The function returns the extension length (including X field), or -1
+  // on error.
+  int WriteExtensionFields(WebRtc_UWord8* buffer, int buffer_length) const;
+
+  // Set the I bit in the x_field, and write PictureID to the appropriate
+  // position in buffer. The function returns 0 on success, -1 otherwise.
+  int WritePictureIDFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                           int buffer_length, int* extension_length) const;
+
+  // Set the L bit in the x_field, and write Tl0PicIdx to the appropriate
+  // position in buffer. The function returns 0 on success, -1 otherwise.
+  int WriteTl0PicIdxFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                           int buffer_length, int* extension_length) const;
+
+  // Set the T and K bits in the x_field, and write TID, Y and KeyIdx to the
+  // appropriate position in buffer. The function returns 0 on success,
+  // -1 otherwise.
+  int WriteTIDAndKeyIdxFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                              int buffer_length, int* extension_length) const;
+
+  // Write the PictureID from codec_specific_info_ to buffer. One or two
+  // bytes are written, depending on magnitude of PictureID. The function
+  // returns the number of bytes written.
+  int WritePictureID(WebRtc_UWord8* buffer, int buffer_length) const;
+
+  // Calculate and return length (octets) of the variable header fields in
+  // the next header (i.e., header length in addition to vp8_header_bytes_).
+  int PayloadDescriptorExtraLength() const;
+
+  // Calculate and return length (octets) of PictureID field in the next
+  // header. Can be 0, 1, or 2.
+  int PictureIdLength() const;
+
+  // Check whether each of the optional fields will be included in the header.
+  bool XFieldPresent() const;
+  bool TIDFieldPresent() const;
+  bool KeyIdxFieldPresent() const;
+  bool TL0PicIdxFieldPresent() const;
+  bool PictureIdPresent() const { return (PictureIdLength() > 0); }
+
+  const WebRtc_UWord8* payload_data_;
+  const int payload_size_;
+  RTPFragmentationHeader part_info_;
+  const int vp8_fixed_payload_descriptor_bytes_;  // Length of VP8 payload
+                                                  // descriptors's fixed part.
+  const AggregationMode aggr_mode_;
+  const bool balance_;
+  const bool separate_first_;
+  const RTPVideoHeaderVP8 hdr_info_;
+  const int num_partitions_;
+  const int max_payload_len_;
+  InfoQueue packets_;
+  bool packets_calculated_;
+
+  DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8);
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc
new file mode 100644
index 0000000..59e76ee
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc
@@ -0,0 +1,248 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+namespace test {
+
+RtpFormatVp8TestHelper::RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr)
+    : payload_data_(NULL),
+      buffer_(NULL),
+      fragmentation_(NULL),
+      hdr_info_(hdr),
+      sloppy_partitioning_(false),
+      inited_(false) {}
+
+RtpFormatVp8TestHelper::~RtpFormatVp8TestHelper() {
+  delete fragmentation_;
+  delete [] payload_data_;
+  delete [] buffer_;
+}
+
+bool RtpFormatVp8TestHelper::Init(const int* partition_sizes,
+                                  int num_partitions) {
+  if (inited_) return false;
+  fragmentation_ = new RTPFragmentationHeader;
+  fragmentation_->VerifyAndAllocateFragmentationHeader(num_partitions);
+  payload_size_ = 0;
+  // Calculate sum payload size.
+  for (int p = 0; p < num_partitions; ++p) {
+    payload_size_ += partition_sizes[p];
+  }
+  buffer_size_ = payload_size_ + 6;  // Add space for payload descriptor.
+  payload_data_ = new WebRtc_UWord8[payload_size_];
+  buffer_ = new WebRtc_UWord8[buffer_size_];
+  int j = 0;
+  // Loop through the partitions again.
+  for (int p = 0; p < num_partitions; ++p) {
+    fragmentation_->fragmentationLength[p] = partition_sizes[p];
+    fragmentation_->fragmentationOffset[p] = j;
+    for (int i = 0; i < partition_sizes[p]; ++i) {
+      assert(j < payload_size_);
+      payload_data_[j++] = p;  // Set the payload value to the partition index.
+    }
+  }
+  data_ptr_ = payload_data_;
+  inited_ = true;
+  return true;
+}
+
+void RtpFormatVp8TestHelper::GetAllPacketsAndCheck(
+    RtpFormatVp8* packetizer,
+    const int* expected_sizes,
+    const int* expected_part,
+    const bool* expected_frag_start,
+    int expected_num_packets) {
+  ASSERT_TRUE(inited_);
+  int send_bytes = 0;
+  bool last;
+  for (int i = 0; i < expected_num_packets; ++i) {
+    std::ostringstream ss;
+    ss << "Checking packet " << i;
+    SCOPED_TRACE(ss.str());
+    EXPECT_EQ(expected_part[i],
+              packetizer->NextPacket(buffer_, &send_bytes, &last));
+    CheckPacket(send_bytes, expected_sizes[i], last,
+                expected_frag_start[i]);
+  }
+  EXPECT_TRUE(last);
+}
+
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: | TID | KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+
+// First octet tests.
+#define EXPECT_BIT_EQ(x, n, a) EXPECT_EQ((((x) >> (n)) & 0x1), a)
+
+#define EXPECT_RSV_ZERO(x) EXPECT_EQ(((x) & 0xE0), 0)
+
+#define EXPECT_BIT_X_EQ(x, a) EXPECT_BIT_EQ(x, 7, a)
+
+#define EXPECT_BIT_N_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_BIT_S_EQ(x, a) EXPECT_BIT_EQ(x, 4, a)
+
+#define EXPECT_PART_ID_EQ(x, a) EXPECT_EQ(((x) & 0x0F), a)
+
+// Extension fields tests
+#define EXPECT_BIT_I_EQ(x, a) EXPECT_BIT_EQ(x, 7, a)
+
+#define EXPECT_BIT_L_EQ(x, a) EXPECT_BIT_EQ(x, 6, a)
+
+#define EXPECT_BIT_T_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_BIT_K_EQ(x, a) EXPECT_BIT_EQ(x, 4, a)
+
+#define EXPECT_TID_EQ(x, a) EXPECT_EQ((((x) & 0xC0) >> 6), a)
+
+#define EXPECT_BIT_Y_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_KEYIDX_EQ(x, a) EXPECT_EQ(((x) & 0x1F), a)
+
+void RtpFormatVp8TestHelper::CheckHeader(bool frag_start) {
+  payload_start_ = 1;
+  EXPECT_BIT_EQ(buffer_[0], 6, 0);  // Check reserved bit.
+
+  if (hdr_info_->pictureId != kNoPictureId ||
+      hdr_info_->temporalIdx != kNoTemporalIdx ||
+      hdr_info_->tl0PicIdx != kNoTl0PicIdx ||
+      hdr_info_->keyIdx != kNoKeyIdx) {
+    EXPECT_BIT_X_EQ(buffer_[0], 1);
+    ++payload_start_;
+    CheckPictureID();
+    CheckTl0PicIdx();
+    CheckTIDAndKeyIdx();
+  } else {
+    EXPECT_BIT_X_EQ(buffer_[0], 0);
+  }
+
+  EXPECT_BIT_N_EQ(buffer_[0], hdr_info_->nonReference ? 1 : 0);
+  EXPECT_BIT_S_EQ(buffer_[0], frag_start ? 1 : 0);
+
+  // Check partition index.
+  if (!sloppy_partitioning_) {
+    // The test payload data is constructed such that the payload value is the
+    // same as the partition index.
+    EXPECT_EQ(buffer_[0] & 0x0F, buffer_[payload_start_]);
+  } else {
+    // Partition should be set to 0.
+    EXPECT_EQ(buffer_[0] & 0x0F, 0);
+  }
+}
+
+// Verify that the I bit and the PictureID field are both set in accordance
+// with the information in hdr_info_->pictureId.
+void RtpFormatVp8TestHelper::CheckPictureID() {
+  if (hdr_info_->pictureId != kNoPictureId) {
+    EXPECT_BIT_I_EQ(buffer_[1], 1);
+    if (hdr_info_->pictureId > 0x7F) {
+      EXPECT_BIT_EQ(buffer_[payload_start_], 7, 1);
+      EXPECT_EQ(buffer_[payload_start_] & 0x7F,
+                (hdr_info_->pictureId >> 8) & 0x7F);
+      EXPECT_EQ(buffer_[payload_start_ + 1],
+                hdr_info_->pictureId & 0xFF);
+      payload_start_ += 2;
+    } else {
+      EXPECT_BIT_EQ(buffer_[payload_start_], 7, 0);
+      EXPECT_EQ(buffer_[payload_start_] & 0x7F,
+                (hdr_info_->pictureId) & 0x7F);
+      payload_start_ += 1;
+    }
+  } else {
+    EXPECT_BIT_I_EQ(buffer_[1], 0);
+  }
+}
+
+// Verify that the L bit and the TL0PICIDX field are both set in accordance
+// with the information in hdr_info_->tl0PicIdx.
+void RtpFormatVp8TestHelper::CheckTl0PicIdx() {
+  if (hdr_info_->tl0PicIdx != kNoTl0PicIdx) {
+    EXPECT_BIT_L_EQ(buffer_[1], 1);
+    EXPECT_EQ(buffer_[payload_start_], hdr_info_->tl0PicIdx);
+    ++payload_start_;
+  } else {
+    EXPECT_BIT_L_EQ(buffer_[1], 0);
+  }
+}
+
+// Verify that the T bit and the TL0PICIDX field, and the K bit and KEYIDX
+// field are all set in accordance with the information in
+// hdr_info_->temporalIdx and hdr_info_->keyIdx, respectively.
+void RtpFormatVp8TestHelper::CheckTIDAndKeyIdx() {
+  if (hdr_info_->temporalIdx == kNoTemporalIdx &&
+      hdr_info_->keyIdx == kNoKeyIdx) {
+    EXPECT_BIT_T_EQ(buffer_[1], 0);
+    EXPECT_BIT_K_EQ(buffer_[1], 0);
+    return;
+  }
+  if (hdr_info_->temporalIdx != kNoTemporalIdx) {
+    EXPECT_BIT_T_EQ(buffer_[1], 1);
+    EXPECT_TID_EQ(buffer_[payload_start_], hdr_info_->temporalIdx);
+    EXPECT_BIT_Y_EQ(buffer_[payload_start_], hdr_info_->layerSync ? 1 : 0);
+  } else {
+    EXPECT_BIT_T_EQ(buffer_[1], 0);
+    EXPECT_TID_EQ(buffer_[payload_start_], 0);
+    EXPECT_BIT_Y_EQ(buffer_[payload_start_], 0);
+  }
+  if (hdr_info_->keyIdx != kNoKeyIdx) {
+    EXPECT_BIT_K_EQ(buffer_[1], 1);
+    EXPECT_KEYIDX_EQ(buffer_[payload_start_], hdr_info_->keyIdx);
+  } else {
+    EXPECT_BIT_K_EQ(buffer_[1], 0);
+    EXPECT_KEYIDX_EQ(buffer_[payload_start_], 0);
+  }
+  ++payload_start_;
+}
+
+// Verify that the payload (i.e., after the headers) of the packet stored in
+// buffer_ is identical to the expected (as found in data_ptr_).
+void RtpFormatVp8TestHelper::CheckPayload(int payload_end) {
+  for (int i = payload_start_; i < payload_end; ++i, ++data_ptr_)
+    EXPECT_EQ(buffer_[i], *data_ptr_);
+}
+
+// Verify that the input variable "last" agrees with the position of data_ptr_.
+// If data_ptr_ has advanced payload_size_ bytes from the start (payload_data_)
+// we are at the end and last should be true. Otherwise, it should be false.
+void RtpFormatVp8TestHelper::CheckLast(bool last) const {
+  EXPECT_EQ(last, data_ptr_ == payload_data_ + payload_size_);
+}
+
+// Verify the contents of a packet. Check the length versus expected_bytes,
+// the header, payload, and "last" flag.
+void RtpFormatVp8TestHelper::CheckPacket(int send_bytes,
+                                         int expect_bytes,
+                                         bool last,
+                                         bool frag_start) {
+  EXPECT_EQ(expect_bytes, send_bytes);
+  CheckHeader(frag_start);
+  CheckPayload(send_bytes);
+  CheckLast(last);
+}
+
+}  // namespace test
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
new file mode 100644
index 0000000..f9bf686
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains the class RtpFormatVp8TestHelper. The class is
+// responsible for setting up a fake VP8 bitstream according to the
+// RTPVideoHeaderVP8 header, and partition information. After initialization,
+// an RTPFragmentationHeader is provided so that the tester can create a
+// packetizer. The packetizer can then be provided to this helper class, which
+// will then extract all packets and compare to the expected outcome.
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+namespace test {
+
+class RtpFormatVp8TestHelper {
+ public:
+  explicit RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr);
+  ~RtpFormatVp8TestHelper();
+  bool Init(const int* partition_sizes, int num_partitions);
+  void GetAllPacketsAndCheck(RtpFormatVp8* packetizer,
+                             const int* expected_sizes,
+                             const int* expected_part,
+                             const bool* expected_frag_start,
+                             int expected_num_packets);
+
+  uint8_t* payload_data() const { return payload_data_; }
+  int payload_size() const { return payload_size_; }
+  RTPFragmentationHeader* fragmentation() const { return fragmentation_; }
+  int buffer_size() const { return buffer_size_; }
+  void set_sloppy_partitioning(bool value) { sloppy_partitioning_ = value; }
+
+ private:
+  void CheckHeader(bool frag_start);
+  void CheckPictureID();
+  void CheckTl0PicIdx();
+  void CheckTIDAndKeyIdx();
+  void CheckPayload(int payload_end);
+  void CheckLast(bool last) const;
+  void CheckPacket(int send_bytes, int expect_bytes, bool last,
+                   bool frag_start);
+
+  uint8_t* payload_data_;
+  uint8_t* buffer_;
+  uint8_t* data_ptr_;
+  RTPFragmentationHeader* fragmentation_;
+  const RTPVideoHeaderVP8* hdr_info_;
+  int payload_start_;
+  int payload_size_;
+  int buffer_size_;
+  bool sloppy_partitioning_;
+  bool inited_;
+
+  DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8TestHelper);
+};
+
+}  // namespace test
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
new file mode 100644
index 0000000..a6f9632
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
@@ -0,0 +1,364 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the VP8 packetizer.
+ */
+
+#include <gtest/gtest.h>
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+template <bool>
+struct CompileAssert {
+};
+
+#undef COMPILE_ASSERT
+#define COMPILE_ASSERT(expr, msg) \
+  typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
+
+class RtpFormatVp8Test : public ::testing::Test {
+ protected:
+  RtpFormatVp8Test() : helper_(NULL) {}
+  virtual void TearDown() { delete helper_; }
+  bool Init(const int* partition_sizes, int num_partitions) {
+    hdr_info_.pictureId = kNoPictureId;
+    hdr_info_.nonReference = false;
+    hdr_info_.temporalIdx = kNoTemporalIdx;
+    hdr_info_.layerSync = false;
+    hdr_info_.tl0PicIdx = kNoTl0PicIdx;
+    hdr_info_.keyIdx = kNoKeyIdx;
+    if (helper_ != NULL) return false;
+    helper_ = new test::RtpFormatVp8TestHelper(&hdr_info_);
+    return helper_->Init(partition_sizes, num_partitions);
+  }
+
+  RTPVideoHeaderVP8 hdr_info_;
+  test::RtpFormatVp8TestHelper* helper_;
+};
+
+TEST_F(RtpFormatVp8Test, TestStrictMode) {
+  const int kSizeVector[] = {10, 8, 27};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 200;  // > 0x7F should produce 2-byte PictureID.
+  const int kMaxSize = 13;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kStrict);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {9, 9, 12, 11, 11, 11, 10};
+  const int kExpectedPart[] = {0, 0, 1, 2, 2, 2, 2};
+  const bool kExpectedFragStart[] =
+      {true, false, true, true, false, false, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateMode) {
+  const int kSizeVector[] = {60, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 25;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {23, 23, 23, 23};
+  const int kExpectedPart[] = {0, 0, 0, 1};
+  const bool kExpectedFragStart[] = {true, false, false, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions1) {
+  const int kSizeVector[] = {1600, 200, 200, 200, 200, 200, 200, 200, 200};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1500;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {803, 803, 803, 803};
+  const int kExpectedPart[] = {0, 0, 1, 5};
+  const bool kExpectedFragStart[] = {true, false, true, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions2) {
+  const int kSizeVector[] = {1599, 200, 200, 200, 1600, 200, 200, 200, 200};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1500;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {803, 802, 603, 803, 803, 803};
+  const int kExpectedPart[] = {0, 0, 1, 4, 4, 5};
+  const bool kExpectedFragStart[] = {true, false, true, true, false, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeTwoLargePartitions) {
+  const int kSizeVector[] = {1654, 2268};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1460;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {830, 830, 1137, 1137};
+  const int kExpectedPart[] = {0, 0, 1, 1};
+  const bool kExpectedFragStart[] = {true, false, true, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify that EqualSize mode is forced if fragmentation info is missing.
+TEST_F(RtpFormatVp8Test, TestEqualSizeModeFallback) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 200;  // > 0x7F should produce 2-byte PictureID
+  const int kMaxSize = 12;  // Small enough to produce 4 packets.
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize);
+
+  // Expecting three full packets, and one with the remainder.
+  const int kExpectedSizes[] = {12, 11, 12, 11};
+  const int kExpectedPart[] = {0, 0, 0, 0};  // Always 0 for equal size mode.
+  // Frag start only true for first packet in equal size mode.
+  const bool kExpectedFragStart[] = {true, false, false, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->set_sloppy_partitioning(true);
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify that non-reference bit is set. EqualSize mode fallback is expected.
+TEST_F(RtpFormatVp8Test, TestNonReferenceBit) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.nonReference = true;
+  const int kMaxSize = 25;  // Small enough to produce two packets.
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize);
+
+  // EqualSize mode => First packet full; other not.
+  const int kExpectedSizes[] = {16, 16};
+  const int kExpectedPart[] = {0, 0};  // Always 0 for equal size mode.
+  // Frag start only true for first packet in equal size mode.
+  const bool kExpectedFragStart[] = {true, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->set_sloppy_partitioning(true);
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify Tl0PicIdx and TID fields, and layerSync bit.
+TEST_F(RtpFormatVp8Test, TestTl0PicIdxAndTID) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.tl0PicIdx = 117;
+  hdr_info_.temporalIdx = 2;
+  hdr_info_.layerSync = true;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 4 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 4};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify KeyIdx field.
+TEST_F(RtpFormatVp8Test, TestKeyIdx) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.keyIdx = 17;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 3 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 3};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify TID field and KeyIdx field in combination.
+TEST_F(RtpFormatVp8Test, TestTIDAndKeyIdx) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.temporalIdx = 1;
+  hdr_info_.keyIdx = 5;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 3 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 3};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]),
+      kExpectedPart_wrong_size);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]),
+      kExpectedFragStart_wrong_size);
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+}  // namespace
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.cc b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.cc
new file mode 100644
index 0000000..37a87d7
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -0,0 +1,176 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "common_types.h"
+#include "rtp_header_extension.h"
+
+namespace webrtc {
+
+RtpHeaderExtensionMap::RtpHeaderExtensionMap() {
+}
+
+RtpHeaderExtensionMap::~RtpHeaderExtensionMap() {
+  Erase();
+}
+
+void RtpHeaderExtensionMap::Erase() {
+  while (!extensionMap_.empty()) {
+    std::map<uint8_t, HeaderExtension*>::iterator it =
+        extensionMap_.begin();
+    delete it->second;
+    extensionMap_.erase(it);
+  }
+}
+
+int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
+                                        const uint8_t id) {
+  if (id < 1 || id > 14) {
+    return -1;
+  }
+  std::map<uint8_t, HeaderExtension*>::iterator it =
+      extensionMap_.find(id);
+  if (it != extensionMap_.end()) {
+    return -1;
+  }
+  extensionMap_[id] = new HeaderExtension(type);
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    return -1;
+  }
+  std::map<uint8_t, HeaderExtension*>::iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return -1;
+  }
+  delete it->second;
+  extensionMap_.erase(it);
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::GetType(const uint8_t id,
+                                       RTPExtensionType* type) const {
+  assert(type);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return -1;
+  }
+  HeaderExtension* extension = it->second;
+  *type = extension->type;
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
+                                     uint8_t* id) const {
+  assert(id);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    if (extension->type == type) {
+      *id = it->first;
+      return 0;
+    }
+    it++;
+  }
+  return -1;
+}
+
+uint16_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
+  // Get length for each extension block.
+  uint16_t length = 0;
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    length += extension->length;
+    it++;
+  }
+  // Add RTP extension header length.
+  if (length > 0) {
+    length += RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+  }
+  return length;
+}
+
+int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
+    const RTPExtensionType type) const {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    // Not registered.
+    return -1;
+  }
+  // Get length until start of extension block type.
+  uint16_t length = RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    if (extension->type == type) {
+      break;
+    } else {
+      length += extension->length;
+    }
+    it++;
+  }
+  return length;
+}
+
+int32_t RtpHeaderExtensionMap::Size() const {
+  return extensionMap_.size();
+}
+
+RTPExtensionType RtpHeaderExtensionMap::First() const {
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  if (it == extensionMap_.end()) {
+     return kRtpExtensionNone;
+  }
+  HeaderExtension* extension = it->second;
+  return extension->type;
+}
+
+RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    return kRtpExtensionNone;
+  }
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return kRtpExtensionNone;
+  }
+  it++;
+  if (it == extensionMap_.end()) {
+    return kRtpExtensionNone;
+  }
+  HeaderExtension* extension = it->second;
+  return extension->type;
+}
+
+void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
+  assert(map);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    map->Register(extension->type, it->first);
+    it++;
+  }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.h b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.h
new file mode 100644
index 0000000..b1612cc
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
+
+#include <map>
+
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+enum {RTP_ONE_BYTE_HEADER_EXTENSION = 0xbede};
+
+enum ExtensionLength {
+   RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES = 4,
+   TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES = 4
+};
+
+struct HeaderExtension {
+  HeaderExtension(RTPExtensionType extension_type)
+    : type(extension_type),
+      length(0) {
+     if (type == kRtpExtensionTransmissionTimeOffset) {
+       length = TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+     }
+   }
+
+   const RTPExtensionType type;
+   uint8_t length;
+};
+
+class RtpHeaderExtensionMap {
+ public:
+  RtpHeaderExtensionMap();
+  ~RtpHeaderExtensionMap();
+
+  void Erase();
+
+  int32_t Register(const RTPExtensionType type, const uint8_t id);
+
+  int32_t Deregister(const RTPExtensionType type);
+
+  int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
+
+  int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
+
+  uint16_t GetTotalLengthInBytes() const;
+
+  int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const;
+
+  void GetCopy(RtpHeaderExtensionMap* map) const;
+
+  int32_t Size() const;
+
+  RTPExtensionType First() const;
+
+  RTPExtensionType Next(RTPExtensionType type) const;
+
+ private:
+  std::map<uint8_t, HeaderExtension*> extensionMap_;
+};
+}
+#endif // WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_header_extension_test.cc b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension_test.cc
new file mode 100644
index 0000000..003e92c
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_header_extension_test.cc
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the RtpHeaderExtensionMap.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class RtpHeaderExtensionTest : public ::testing::Test {
+ protected:
+  RtpHeaderExtensionTest() {}
+  ~RtpHeaderExtensionTest() {}
+
+  RtpHeaderExtensionMap map_;
+  enum {kId = 3};
+};
+
+TEST_F(RtpHeaderExtensionTest, Register) {
+  EXPECT_EQ(0, map_.Size());
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(1, map_.Size());
+  EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
+  EXPECT_EQ(0, map_.Size());
+}
+
+TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) {
+  // Valid range for id: [1-14].
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, 0));
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, 15));
+}
+
+TEST_F(RtpHeaderExtensionTest, DeregisterIllegalArg) {
+  // Not registered.
+  EXPECT_EQ(-1, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
+}
+
+TEST_F(RtpHeaderExtensionTest, NonUniqueId) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
+  EXPECT_EQ(0, map_.GetTotalLengthInBytes());
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES +
+            TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES,
+            map_.GetTotalLengthInBytes());
+}
+
+TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) {
+  EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset));
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES,
+      map_.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetType) {
+  RTPExtensionType typeOut;
+  EXPECT_EQ(-1, map_.GetType(kId, &typeOut));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(0, map_.GetType(kId, &typeOut));
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, typeOut);
+}
+
+TEST_F(RtpHeaderExtensionTest, GetId) {
+  uint8_t idOut;
+  EXPECT_EQ(-1, map_.GetId(kRtpExtensionTransmissionTimeOffset, &idOut));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(0, map_.GetId(kRtpExtensionTransmissionTimeOffset, &idOut));
+  EXPECT_EQ(kId, idOut);
+}
+
+TEST_F(RtpHeaderExtensionTest, IterateTypes) {
+  EXPECT_EQ(kRtpExtensionNone, map_.First());
+  EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First());
+  EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetCopy) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+
+  RtpHeaderExtensionMap mapOut;
+  map_.GetCopy(&mapOut);
+  EXPECT_EQ(1, mapOut.Size());
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, mapOut.First());
+}
+
+TEST_F(RtpHeaderExtensionTest, Erase) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(1, map_.Size());
+  map_.Erase();
+  EXPECT_EQ(0, map_.Size());
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.cc b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.cc
new file mode 100644
index 0000000..0de7553
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -0,0 +1,278 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Class for storing RTP packets.
+ */
+
+#include "rtp_packet_history.h"
+
+#include <assert.h>
+#include <cstring>   // memset
+
+#include "critical_section_wrapper.h"
+#include "rtp_utility.h"
+#include "trace.h"
+
+namespace webrtc {
+
+RTPPacketHistory::RTPPacketHistory(RtpRtcpClock* clock)
+  : clock_(*clock),
+    critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+    store_(false),
+    prev_index_(0),
+    max_packet_length_(0) {
+}
+
+RTPPacketHistory::~RTPPacketHistory() {
+  Free();
+  delete critsect_;
+}
+
+void RTPPacketHistory::SetStorePacketsStatus(bool enable, 
+                                             uint16_t number_to_store) {
+  if (enable) {
+    Allocate(number_to_store);
+  } else {
+    Free();
+  }
+}
+
+void RTPPacketHistory::Allocate(uint16_t number_to_store) {
+  assert(number_to_store > 0);
+  webrtc::CriticalSectionScoped cs(*critsect_); 
+  if (store_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+        "SetStorePacketsStatus already set, number: %d", number_to_store);
+    return;
+  }
+
+  store_ = true;
+  stored_packets_.resize(number_to_store);
+  stored_seq_nums_.resize(number_to_store);
+  stored_lengths_.resize(number_to_store);
+  stored_times_.resize(number_to_store);
+  stored_resend_times_.resize(number_to_store);
+  stored_types_.resize(number_to_store);
+}
+
+void RTPPacketHistory::Free() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return;
+  }
+
+  std::vector<std::vector<uint8_t> >::iterator it;
+  for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {   
+    it->clear();
+  }
+
+  stored_packets_.clear();
+  stored_seq_nums_.clear();
+  stored_lengths_.clear();
+  stored_times_.clear();
+  stored_resend_times_.clear();
+  stored_types_.clear();
+
+  store_ = false;
+  prev_index_ = 0;
+  max_packet_length_ = 0;
+}
+
+bool RTPPacketHistory::StorePackets() const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  return store_;
+}
+
+// private, lock should already be taken
+void RTPPacketHistory::VerifyAndAllocatePacketLength(uint16_t packet_length) {
+  assert(packet_length > 0);
+  if (!store_) {
+    return;
+  }
+
+  if (packet_length <= max_packet_length_) {
+    return;
+  }
+
+  std::vector<std::vector<uint8_t> >::iterator it;
+  for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
+    it->resize(packet_length);
+  }
+  max_packet_length_ = packet_length;
+}
+
+int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
+                                       uint16_t packet_length,
+                                       uint16_t max_packet_length,
+                                       StorageType type) {
+  if (type == kDontStore) {
+    return 0;
+  }
+
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return 0;
+  }
+
+  assert(packet);
+  assert(packet_length > 3);
+
+  VerifyAndAllocatePacketLength(max_packet_length);
+
+  if (packet_length > max_packet_length_) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, -1,
+        "Failed to store RTP packet, length: %d", packet_length);
+    return -1;
+  }
+
+  const uint16_t seq_num = (packet[2] << 8) + packet[3];
+
+  // Store packet
+  std::vector<std::vector<uint8_t> >::iterator it =
+      stored_packets_.begin() + prev_index_;
+  std::copy(packet, packet + packet_length, it->begin());
+
+  stored_seq_nums_[prev_index_] = seq_num;
+  stored_lengths_[prev_index_] = packet_length;
+  stored_times_[prev_index_] = clock_.GetTimeInMS();
+  stored_resend_times_[prev_index_] = 0;  // packet not resent
+  stored_types_[prev_index_] = type;
+
+  ++prev_index_;
+  if (prev_index_ >= stored_seq_nums_.size()) {
+    prev_index_ = 0;
+  }
+  return 0;
+}
+
+bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return false;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    return false;
+  }
+ 
+  uint16_t length = stored_lengths_.at(index);
+  if (length == 0 || length > max_packet_length_) {
+    // Invalid length.
+    return false;
+  }
+  return true;
+}
+
+bool RTPPacketHistory::GetRTPPacket(uint16_t sequence_number,
+                                    uint32_t min_elapsed_time_ms,
+                                    uint8_t* packet,
+                                    uint16_t* packet_length,
+                                    uint32_t* stored_time_ms,
+                                    StorageType* type) const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return false;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "No match for getting seqNum %u", sequence_number);
+    return false;
+  }
+
+  uint16_t length = stored_lengths_.at(index);
+  if (length == 0 || length > max_packet_length_) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "No match for getting seqNum %u, len %d", sequence_number, length);
+    return false;
+  }
+
+ if (length > *packet_length) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, 
+        "Input buffer too short for packet %u", sequence_number);
+    return false;
+ }
+
+  // Verify elapsed time since last retrieve. 
+  uint32_t now = clock_.GetTimeInMS();
+  if (min_elapsed_time_ms > 0 &&
+      ((now - stored_resend_times_.at(index)) < min_elapsed_time_ms)) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, 
+        "Skip getting packet %u, packet recently resent.", sequence_number);
+    *packet_length = 0;
+    return true;
+  }
+
+  // Get packet.
+  std::vector<std::vector<uint8_t> >::const_iterator it_found_packet =
+      stored_packets_.begin() + index;
+  std::copy(it_found_packet->begin(), it_found_packet->begin() + length, packet);
+  *packet_length = stored_lengths_.at(index);
+  *stored_time_ms = stored_times_.at(index);
+  *type = stored_types_.at(index);
+  return true;
+}
+
+void RTPPacketHistory::UpdateResendTime(uint16_t sequence_number) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+        "Failed to update resend time, seq num: %u.", sequence_number);
+    return;
+  }
+  stored_resend_times_[index] = clock_.GetTimeInMS();
+}
+
+// private, lock should already be taken
+bool RTPPacketHistory::FindSeqNum(uint16_t sequence_number,
+                                  int32_t* index) const {
+  uint16_t temp_sequence_number = 0;
+  if (prev_index_ > 0) {
+    *index = prev_index_ - 1;
+    temp_sequence_number = stored_seq_nums_[*index];
+  } else {
+    *index = stored_seq_nums_.size() - 1;
+    temp_sequence_number = stored_seq_nums_[*index];  // wrap
+  }
+
+  int32_t idx = (prev_index_ - 1) - (temp_sequence_number - sequence_number);
+  if (idx >= 0 && idx < static_cast<int>(stored_seq_nums_.size())) {
+    *index = idx;
+    temp_sequence_number = stored_seq_nums_[*index];
+  }
+
+  if (temp_sequence_number != sequence_number) {
+    // We did not found a match, search all.
+    for (uint16_t m = 0; m < stored_seq_nums_.size(); m++) {
+      if (stored_seq_nums_[m] == sequence_number) {
+        *index = m;
+        temp_sequence_number = stored_seq_nums_[*index];
+        break;
+      }
+    }
+  }
+  if (temp_sequence_number == sequence_number) {
+    // We found a match.
+    return true;
+  }
+  return false;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.h b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.h
new file mode 100644
index 0000000..c61fd0b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Class for storing RTP packets.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+
+#include <vector>
+
+#include "module_common_types.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class RtpRtcpClock;
+class CriticalSectionWrapper;
+
+class RTPPacketHistory {
+ public:
+  RTPPacketHistory(RtpRtcpClock* clock);
+  ~RTPPacketHistory();
+
+  void SetStorePacketsStatus(bool enable, uint16_t number_to_store);
+
+  bool StorePackets() const;
+
+  // Stores RTP packet.
+  int32_t PutRTPPacket(const uint8_t* packet,
+                       uint16_t packet_length,
+                       uint16_t max_packet_length,
+                       StorageType type);
+
+  // Gets stored RTP packet corresponding to the input sequence number.
+  // The packet is copied to the buffer pointed to by ptr_rtp_packet.
+  // The rtp_packet_length should show the available buffer size.
+  // Returns true if packet is found.
+  // rtp_packet_length: returns the copied packet length on success.
+  // min_elapsed_time_ms: the minimum time that must have elapsed since the last
+  // time the packet was resent (parameter is ignored if set to zero).
+  // If the packet is found but the minimum time has not elaped, no bytes are
+  // copied.
+  // stored_time_ms: returns the time when the packet was stored.
+  // type: returns the storage type set in PutRTPPacket.
+  bool GetRTPPacket(uint16_t sequence_number,
+                    uint32_t min_elapsed_time_ms,
+                    uint8_t* packet,
+                    uint16_t* packet_length,
+                    uint32_t* stored_time_ms,
+                    StorageType* type) const;
+
+  bool HasRTPPacket(uint16_t sequence_number) const;
+
+  void UpdateResendTime(uint16_t sequence_number);
+
+ private:
+  void Allocate(uint16_t number_to_store);
+  void Free();
+  void VerifyAndAllocatePacketLength(uint16_t packet_length);
+  bool FindSeqNum(uint16_t sequence_number, int32_t* index) const;
+
+ private:
+  RtpRtcpClock& clock_;
+  CriticalSectionWrapper* critsect_;
+  bool store_;
+  uint32_t prev_index_;
+  uint16_t max_packet_length_;
+
+  std::vector<std::vector<uint8_t> > stored_packets_;
+  std::vector<uint16_t> stored_seq_nums_;
+  std::vector<uint16_t> stored_lengths_;
+  std::vector<uint32_t> stored_times_;
+  std::vector<uint32_t> stored_resend_times_;
+  std::vector<StorageType> stored_types_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_packet_history_test.cc b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history_test.cc
new file mode 100644
index 0000000..c849bc2
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_packet_history_test.cc
@@ -0,0 +1,214 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the RTPPacketHistory.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_packet_history.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class FakeClock : public RtpRtcpClock {
+ public:
+  FakeClock() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_UWord32 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_UWord32 time_in_ms_;
+};
+
+class RtpPacketHistoryTest : public ::testing::Test {
+ protected:
+  RtpPacketHistoryTest()
+     : hist_(new RTPPacketHistory(&fake_clock_)) {
+  }
+  ~RtpPacketHistoryTest() {
+    delete hist_;
+  }
+  
+  FakeClock fake_clock_;
+  RTPPacketHistory* hist_;
+  enum {kPayload = 127};
+  enum {kSsrc = 12345678};
+  enum {kSeqNum = 88};
+  enum {kTimestamp = 127};
+  enum {kMaxPacketLength = 1500};
+  uint8_t packet_[kMaxPacketLength];
+  uint8_t packet_out_[kMaxPacketLength];
+
+  void CreateRtpPacket(uint16_t seq_num, uint32_t ssrc, uint8_t payload,
+      uint32_t timestamp, uint8_t* array, uint16_t* cur_pos) {
+    array[(*cur_pos)++] = 0x80;
+    array[(*cur_pos)++] = payload;
+    array[(*cur_pos)++] = seq_num >> 8;
+    array[(*cur_pos)++] = seq_num;
+    array[(*cur_pos)++] = timestamp >> 24;
+    array[(*cur_pos)++] = timestamp >> 16;
+    array[(*cur_pos)++] = timestamp >> 8;
+    array[(*cur_pos)++] = timestamp;
+    array[(*cur_pos)++] = ssrc >> 24;
+    array[(*cur_pos)++] = ssrc >> 16;
+    array[(*cur_pos)++] = ssrc >> 8;
+    array[(*cur_pos)++] = ssrc;
+  } 
+};
+
+TEST_F(RtpPacketHistoryTest, SetStoreStatus) {
+  EXPECT_FALSE(hist_->StorePackets());
+  hist_->SetStorePacketsStatus(true, 10);
+  EXPECT_TRUE(hist_->StorePackets());
+  hist_->SetStorePacketsStatus(false, 0);
+  EXPECT_FALSE(hist_->StorePackets());
+}
+
+TEST_F(RtpPacketHistoryTest, NoStoreStatus) {
+  EXPECT_FALSE(hist_->StorePackets());
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kAllowRetransmission));
+  // Packet should not be stored.
+  len = kMaxPacketLength;
+  uint32_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, DontStore) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength, kDontStore));
+
+  // Packet should not be stored.
+  len = kMaxPacketLength;
+  uint32_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) {
+  hist_->SetStorePacketsStatus(true, 10);
+  EXPECT_EQ(-1, hist_->PutRTPPacket(packet_,
+                                    kMaxPacketLength + 1,
+                                    kMaxPacketLength,
+                                    kAllowRetransmission));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kAllowRetransmission));
+  uint16_t len_out = len - 1;
+  uint32_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len_out, &time,
+                                   &type));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = kMaxPacketLength;
+  uint32_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(0, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, PutRtpPacket) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+
+  EXPECT_FALSE(hist_->HasRTPPacket(kSeqNum));
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kAllowRetransmission));
+  EXPECT_TRUE(hist_->HasRTPPacket(kSeqNum));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kAllowRetransmission));
+
+  uint16_t len_out = kMaxPacketLength;
+  uint32_t time;
+  StorageType type;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
+                                  &type));
+  EXPECT_EQ(len, len_out);
+  EXPECT_EQ(kAllowRetransmission, type);
+  for (int i = 0; i < len; i++)  {
+    EXPECT_EQ(packet_[i], packet_out_[i]);
+  }
+}
+
+TEST_F(RtpPacketHistoryTest, DontRetransmit) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kDontRetransmit));
+
+  uint16_t len_out = kMaxPacketLength;
+  uint32_t time;
+  StorageType type;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
+                                  &type));
+  EXPECT_EQ(len, len_out);
+  EXPECT_EQ(kDontRetransmit, type);
+}
+
+TEST_F(RtpPacketHistoryTest, MinResendTime) {
+  hist_->SetStorePacketsStatus(true, 10);
+  WebRtc_UWord32 store_time = fake_clock_.GetTimeInMS();
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   kAllowRetransmission));
+
+  hist_->UpdateResendTime(kSeqNum);
+  fake_clock_.IncrementTime(100);
+
+  // Time has elapsed.
+  len = kMaxPacketLength;
+  StorageType type;
+  uint32_t time;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 100, packet_, &len, &time, &type));
+  EXPECT_GT(len, 0);
+  EXPECT_EQ(store_time, time);
+
+  // Time has not elapsed. Packet should be found, but no bytes copied.
+  len = kMaxPacketLength;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 101, packet_, &len, &time, &type));
+  EXPECT_EQ(0, len);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver.cc b/trunk/src/modules/rtp_rtcp/source/rtp_receiver.cc
new file mode 100644
index 0000000..f421966
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver.cc
@@ -0,0 +1,1685 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "rtp_receiver.h"
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_rtcp_impl.h"
+#include "critical_section_wrapper.h"
+
+#include <cassert>
+#include <string.h> //memcpy
+#include <math.h>   // floor
+#include <stdlib.h> // abs
+
+namespace webrtc {
+
+using ModuleRTPUtility::AudioPayload;
+using ModuleRTPUtility::GetCurrentRTP;
+using ModuleRTPUtility::Payload;
+using ModuleRTPUtility::RTPPayloadParser;
+using ModuleRTPUtility::StringCompare;
+using ModuleRTPUtility::VideoPayload;
+
+RTPReceiver::RTPReceiver(const WebRtc_Word32 id,
+                         const bool audio,
+                         RtpRtcpClock* clock,
+                         ModuleRtpRtcpImpl* owner) :
+    RTPReceiverAudio(id),
+    RTPReceiverVideo(id, owner),
+    Bitrate(clock),
+    _id(id),
+    _audio(audio),
+    _rtpRtcp(*owner),
+    _criticalSectionCbs(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbRtpFeedback(NULL),
+    _cbRtpData(NULL),
+
+    _criticalSectionRTPReceiver(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _lastReceiveTime(0),
+    _lastReceivedPayloadLength(0),
+    _lastReceivedPayloadType(-1),
+    _lastReceivedMediaPayloadType(-1),
+    _lastReceivedAudioSpecific(),
+    _lastReceivedVideoSpecific(),
+
+    _packetTimeOutMS(0),
+
+    _redPayloadType(-1),
+    _payloadTypeMap(),
+    _rtpHeaderExtensionMap(),
+    _SSRC(0),
+    _numCSRCs(0),
+    _currentRemoteCSRC(),
+    _numEnergy(0),
+    _currentRemoteEnergy(),
+    _useSSRCFilter(false),
+    _SSRCFilter(0),
+
+    _jitterQ4(0),
+    _jitterMaxQ4(0),
+    _cumulativeLoss(0),
+    _jitterQ4TransmissionTimeOffset(0),
+    _localTimeLastReceivedTimestamp(0),
+    _lastReceivedTimestamp(0),
+    _lastReceivedSequenceNumber(0),
+    _lastReceivedTransmissionTimeOffset(0),
+
+    _receivedSeqFirst(0),
+    _receivedSeqMax(0),
+    _receivedSeqWraps(0),
+
+    _receivedPacketOH(12), // RTP header
+    _receivedByteCount(0),
+    _receivedOldPacketCount(0),
+    _receivedInorderPacketCount(0),
+
+    _lastReportInorderPackets(0),
+    _lastReportOldPackets(0),
+    _lastReportSeqMax(0),
+    _lastReportFractionLost(0),
+    _lastReportCumulativeLost(0),
+    _lastReportExtendedHighSeqNum(0),
+    _lastReportJitter(0),
+    _lastReportJitterTransmissionTimeOffset(0),
+
+    _nackMethod(kNackOff),
+    _RTX(false),
+    _ssrcRTX(0) {
+  memset(_currentRemoteCSRC, 0, sizeof(_currentRemoteCSRC));
+  memset(_currentRemoteEnergy, 0, sizeof(_currentRemoteEnergy));
+  memset(&_lastReceivedAudioSpecific, 0, sizeof(_lastReceivedAudioSpecific));
+
+  _lastReceivedAudioSpecific.channels = 1;
+  _lastReceivedVideoSpecific.maxRate = 0;
+  _lastReceivedVideoSpecific.videoCodecType = kRtpNoVideo;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTPReceiver::~RTPReceiver() {
+  if (_cbRtpFeedback) {
+    for (int i = 0; i < _numCSRCs; i++) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id,_currentRemoteCSRC[i], false);
+    }
+  }
+  delete _criticalSectionCbs;
+  delete _criticalSectionRTPReceiver;
+
+  while (!_payloadTypeMap.empty()) {
+    std::map<WebRtc_Word8, Payload*>::iterator it = _payloadTypeMap.begin();
+    delete it->second;
+    _payloadTypeMap.erase(it);
+  }
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 RTPReceiver::Init() {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  _lastReceiveTime = 0;
+  _lastReceivedPayloadLength = 0;
+  _packetTimeOutMS = 0;
+  _lastReceivedPayloadType = -1;
+  _lastReceivedMediaPayloadType = -1;
+  _redPayloadType = -1;
+
+  memset(&_lastReceivedAudioSpecific, 0, sizeof(_lastReceivedAudioSpecific));
+  _lastReceivedAudioSpecific.channels = 1;
+
+  _lastReceivedVideoSpecific.videoCodecType = kRtpNoVideo;
+  _lastReceivedVideoSpecific.maxRate = 0;
+  _SSRC = 0;
+  _numCSRCs = 0;
+  _numEnergy = 0;
+  _jitterQ4 = 0;
+  _jitterMaxQ4 = 0;
+  _cumulativeLoss = 0;
+  _jitterQ4TransmissionTimeOffset = 0;
+  _useSSRCFilter = false;
+  _SSRCFilter = 0;
+
+  _localTimeLastReceivedTimestamp = 0;
+  _lastReceivedTimestamp = 0;
+  _lastReceivedSequenceNumber = 0;
+  _lastReceivedTransmissionTimeOffset = 0;
+
+  _receivedSeqFirst = 0;
+  _receivedSeqMax = 0;
+  _receivedSeqWraps = 0;
+
+  _receivedPacketOH = 12; // RTP header
+  _receivedByteCount = 0;
+  _receivedOldPacketCount = 0;
+  _receivedInorderPacketCount = 0;
+
+  _lastReportInorderPackets = 0;
+  _lastReportOldPackets = 0;
+  _lastReportSeqMax = 0;
+  _lastReportFractionLost = 0;
+  _lastReportCumulativeLost = 0;
+  _lastReportExtendedHighSeqNum = 0;
+  _lastReportJitter = 0;
+  _lastReportJitterTransmissionTimeOffset = 0;
+
+  _rtpHeaderExtensionMap.Erase();
+
+  while (!_payloadTypeMap.empty()) {
+    std::map<WebRtc_Word8, Payload*>::iterator it = _payloadTypeMap.begin();
+    delete it->second;
+    _payloadTypeMap.erase(it);
+  }
+
+  Bitrate::Init();
+  RTPReceiverAudio::Init();
+  return RTPReceiverVideo::Init();
+}
+
+void
+RTPReceiver::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    RTPReceiverAudio::ChangeUniqueId(id);
+    RTPReceiverVideo::ChangeUniqueId(id);
+}
+
+RtpVideoCodecTypes
+RTPReceiver::VideoCodecType() const
+{
+    return _lastReceivedVideoSpecific.videoCodecType;
+}
+
+WebRtc_UWord32
+RTPReceiver::MaxConfiguredBitrate() const
+{
+    return _lastReceivedVideoSpecific.maxRate;
+}
+
+bool
+RTPReceiver::REDPayloadType(const WebRtc_Word8 payloadType) const
+{
+    return (_redPayloadType == payloadType)?true:false;
+}
+
+WebRtc_Word8
+RTPReceiver::REDPayloadType() const
+{
+    return _redPayloadType;
+}
+
+    // configure a timeout value
+WebRtc_Word32
+RTPReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    _packetTimeOutMS = timeoutMS;
+    return 0;
+}
+
+void RTPReceiver::PacketTimeout()
+{
+    bool packetTimeOut = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+        if(_packetTimeOutMS == 0)
+        {
+            // not configured
+            return;
+        }
+
+        if(_lastReceiveTime == 0)
+        {
+            // not active
+            return;
+        }
+
+        WebRtc_UWord32 now = _clock.GetTimeInMS();
+
+        if(now - _lastReceiveTime > _packetTimeOutMS)
+        {
+            packetTimeOut = true;
+            _lastReceiveTime = 0;  // only one callback
+            _lastReceivedPayloadType = -1; // makes RemotePayload return -1, which we want
+            _lastReceivedMediaPayloadType = -1;
+        }
+    }
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(packetTimeOut && _cbRtpFeedback)
+    {
+        _cbRtpFeedback->OnPacketTimeout(_id);
+    }
+}
+
+void
+RTPReceiver::ProcessDeadOrAlive(const bool RTCPalive, const WebRtc_UWord32 now)
+{
+    if(_cbRtpFeedback == NULL)
+    {
+        // no callback
+        return;
+    }
+    RTPAliveType alive = kRtpDead;
+
+    if(_lastReceiveTime + 1000 > now)
+    {
+        // always alive if we have received a RTP packet the last sec
+        alive = kRtpAlive;
+
+    } else
+    {
+        if(RTCPalive)
+        {
+            if(_audio)
+            {
+                // alive depends on CNG
+                // if last received size < 10 likely CNG
+                if(_lastReceivedPayloadLength < 10) // our CNG is 9 bytes
+                {
+                    // potential CNG
+                    // receiver need to check kRtpNoRtp against NetEq speechType kOutputPLCtoCNG
+                    alive = kRtpNoRtp;
+                } else
+                {
+                    // dead
+                }
+            } else
+            {
+                // dead for video
+            }
+        }else
+        {
+            // no RTP packet for 1 sec and no RTCP
+            // dead
+        }
+    }
+
+
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(_cbRtpFeedback)
+    {
+        _cbRtpFeedback->OnPeriodicDeadOrAlive(_id, alive);
+    }
+}
+
+WebRtc_UWord16
+RTPReceiver::PacketOHReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedPacketOH;
+}
+
+WebRtc_UWord32
+RTPReceiver::PacketCountReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedInorderPacketCount;
+}
+
+WebRtc_UWord32
+RTPReceiver::ByteCountReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedByteCount;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    _cbRtpFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterIncomingDataCallback(RtpData* incomingDataCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    _cbRtpData = incomingDataCallback;
+    return 0;
+}
+
+WebRtc_Word32 RTPReceiver::RegisterReceivePayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  assert(payloadName);
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  // sanity
+  switch (payloadType) {
+    // reserved payload types to avoid RTCP conflicts when marker bit is set
+    case 64:        //  192 Full INTRA-frame request
+    case 72:        //  200 Sender report
+    case 73:        //  201 Receiver report
+    case 74:        //  202 Source description
+    case 75:        //  203 Goodbye
+    case 76:        //  204 Application-defined
+    case 77:        //  205 Transport layer FB message
+    case 78:        //  206 Payload-specific FB message
+    case 79:        //  207 Extended report
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                   "%s invalid payloadtype:%d",
+                   __FUNCTION__, payloadType);
+      return -1;
+    default:
+      break;
+  }
+  size_t payloadNameLength = strlen(payloadName);
+
+  std::map<WebRtc_Word8, Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+  if (it != _payloadTypeMap.end()) {
+    // we already use this payload type
+    Payload* payload = it->second;
+    assert(payload);
+
+    size_t nameLength = strlen(payload->name);
+
+    // check if it's the same as we already have
+    // if same ignore sending an error
+    if (payloadNameLength == nameLength &&
+        StringCompare(payload->name, payloadName, payloadNameLength)) {
+      if (_audio &&
+          payload->audio &&
+          payload->typeSpecific.Audio.frequency == frequency &&
+          payload->typeSpecific.Audio.channels == channels &&
+          (payload->typeSpecific.Audio.rate == rate ||
+              payload->typeSpecific.Audio.rate == 0 || rate == 0)) {
+        payload->typeSpecific.Audio.rate = rate;
+        // Ensure that we update the rate if new or old is zero
+        return 0;
+      }
+      if (!_audio && !payload->audio) {
+        // update maxBitrate for video
+        payload->typeSpecific.Video.maxRate = rate;
+        return 0;
+      }
+    }
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument payloadType:%d already registered",
+                 __FUNCTION__, payloadType);
+    return -1;
+  }
+  if (_audio) {
+    // remove existing item, hence search for the name
+    // only for audio, for video we allow a codecs to use multiple pltypes
+    std::map<WebRtc_Word8, Payload*>::iterator audio_it =
+        _payloadTypeMap.begin();
+    while (audio_it != _payloadTypeMap.end()) {
+      Payload* payload = audio_it->second;
+      size_t nameLength = strlen(payload->name);
+
+      if (payloadNameLength == nameLength &&
+          StringCompare(payload->name, payloadName, payloadNameLength)) {
+        // we found the payload name in the list
+        // if audio check frequency and rate
+        if (payload->audio) {
+          if (payload->typeSpecific.Audio.frequency == frequency &&
+              (payload->typeSpecific.Audio.rate == rate ||
+                  payload->typeSpecific.Audio.rate == 0 || rate == 0)) {
+            // remove old setting
+            delete payload;
+            _payloadTypeMap.erase(audio_it);
+            break;
+          }
+        } else if(StringCompare(payloadName,"red",3)) {
+          delete payload;
+          _payloadTypeMap.erase(audio_it);
+          break;
+        }
+      }
+      audio_it++;
+    }
+  }
+  Payload* payload = NULL;
+
+  // save the RED payload type
+  // used in both audio and video
+  if (StringCompare(payloadName,"red",3)) {
+    _redPayloadType = payloadType;
+    payload = new Payload;
+    payload->audio = false;
+    payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+    strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  } else {
+    if (_audio) {
+      payload = RegisterReceiveAudioPayload(payloadName, payloadType,
+                                            frequency, channels, rate);
+    } else {
+      payload = RegisterReceiveVideoPayload(payloadName, payloadType, rate);
+    }
+  }
+  if (payload == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s filed to register payload",
+                 __FUNCTION__);
+    return -1;
+  }
+  _payloadTypeMap[payloadType] = payload;
+
+  // Successful set of payload type, clear the value of last receivedPT,
+  // since it might mean something else
+  _lastReceivedPayloadType = -1;
+  _lastReceivedMediaPayloadType = -1;
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::DeRegisterReceivePayload(
+    const WebRtc_Word8 payloadType) {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s failed to find payloadType:%d",
+                 __FUNCTION__, payloadType);
+    return -1;
+  }
+  delete it->second;
+  _payloadTypeMap.erase(it);
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::ReceivePayloadType(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate,
+    WebRtc_Word8* payloadType) const {
+  if (payloadType == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  size_t payloadNameLength = strlen(payloadName);
+
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.begin();
+
+  while (it != _payloadTypeMap.end()) {
+    Payload* payload = it->second;
+    assert(payload);
+
+    size_t nameLength = strlen(payload->name);
+    if (payloadNameLength == nameLength &&
+        StringCompare(payload->name, payloadName, payloadNameLength)) {
+      // name match
+      if( payload->audio) {
+        if (rate == 0) {
+          // [default] audio, check freq and channels
+          if (payload->typeSpecific.Audio.frequency == frequency &&
+              payload->typeSpecific.Audio.channels == channels) {
+            *payloadType = it->first;
+            return 0;
+          }
+        } else {
+          // audio, check freq, channels and rate
+          if( payload->typeSpecific.Audio.frequency == frequency &&
+              payload->typeSpecific.Audio.channels == channels &&
+              payload->typeSpecific.Audio.rate == rate) {
+            // extra rate condition added
+            *payloadType = it->first;
+            return 0;
+          }
+        }
+      } else {
+        // video
+        *payloadType = it->first;
+        return 0;
+      }
+    }
+    it++;
+  }
+  return -1;
+}
+
+WebRtc_Word32 RTPReceiver::ReceivePayload(
+    const WebRtc_Word8 payloadType,
+    char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    WebRtc_UWord32* frequency,
+    WebRtc_UWord8* channels,
+    WebRtc_UWord32* rate) const {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  Payload* payload = it->second;
+  assert(payload);
+
+  if(frequency) {
+    if(payload->audio) {
+      *frequency = payload->typeSpecific.Audio.frequency;
+    } else {
+      *frequency = 90000;
+    }
+  }
+  if (channels) {
+    if(payload->audio) {
+      *channels = payload->typeSpecific.Audio.channels;
+    } else {
+      *channels = 1;
+    }
+  }
+  if (rate) {
+    if(payload->audio) {
+      *rate = payload->typeSpecific.Audio.rate;
+    } else {
+      assert(false);
+      *rate = 0;
+    }
+  }
+  if (payloadName) {
+    payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+    strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::RemotePayload(
+    char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    WebRtc_Word8* payloadType,
+    WebRtc_UWord32* frequency,
+    WebRtc_UWord8* channels) const {
+  if(_lastReceivedPayloadType == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "%s invalid state", __FUNCTION__);
+    return -1;
+  }
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(_lastReceivedPayloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  Payload* payload = it->second;
+  assert(payload);
+  payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+
+  if (payloadType) {
+    *payloadType = _lastReceivedPayloadType;
+  }
+  if (frequency) {
+    if (payload->audio) {
+      *frequency = payload->typeSpecific.Audio.frequency;
+    } else {
+      *frequency = 90000;
+    }
+  }
+  if (channels) {
+    if (payload->audio) {
+      *channels = payload->typeSpecific.Audio.channels;
+    } else {
+      *channels = 1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                        const WebRtc_UWord8 id)
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    return _rtpHeaderExtensionMap.Register(type, id);
+}
+
+WebRtc_Word32
+RTPReceiver::DeregisterRtpHeaderExtension(const RTPExtensionType type)
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    return _rtpHeaderExtensionMap.Deregister(type);
+}
+
+void RTPReceiver::GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    _rtpHeaderExtensionMap.GetCopy(map);
+}
+
+NACKMethod
+RTPReceiver::NACK() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _nackMethod;
+}
+
+    // Turn negative acknowledgement requests on/off
+WebRtc_Word32
+RTPReceiver::SetNACKStatus(const NACKMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    _nackMethod = method;
+    return 0;
+}
+
+void RTPReceiver::SetRTXStatus(const bool enable,
+                               const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+  _RTX = enable;
+  _ssrcRTX = SSRC;
+}
+
+void RTPReceiver::RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+  *enable = _RTX;
+  *SSRC = _ssrcRTX;
+}
+
+WebRtc_UWord32
+RTPReceiver::SSRC() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _SSRC;
+}
+
+    // Get remote CSRC
+WebRtc_Word32
+RTPReceiver::CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    assert(_numCSRCs <= kRtpCsrcSize);
+
+    if(_numCSRCs >0)
+    {
+        memcpy(arrOfCSRC, _currentRemoteCSRC, sizeof(WebRtc_UWord32)*_numCSRCs);
+    }
+    return _numCSRCs;
+}
+
+WebRtc_Word32
+RTPReceiver::Energy( WebRtc_UWord8 arrOfEnergy[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    assert(_numEnergy <= kRtpCsrcSize);
+
+    if(_numEnergy >0)
+    {
+        memcpy(arrOfEnergy, _currentRemoteEnergy, sizeof(WebRtc_UWord8)*_numCSRCs);
+    }
+    return _numEnergy;
+}
+
+WebRtc_Word32 RTPReceiver::IncomingRTPPacket(
+    WebRtcRTPHeader* rtp_header,
+    const WebRtc_UWord8* packet,
+    const WebRtc_UWord16 packet_length) {
+  // rtp_header contains the parsed RTP header.
+  // Adjust packet length w r t RTP padding.
+  int length = packet_length - rtp_header->header.paddingLength;
+
+  // length sanity
+  if ((length - rtp_header->header.headerLength) < 0) {
+     WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                  "%s invalid argument",
+                  __FUNCTION__);
+     return -1;
+  }
+  if (_RTX) {
+    if (_ssrcRTX == rtp_header->header.ssrc) {
+      // Sanity check.
+      if (rtp_header->header.headerLength + 2 > packet_length) {
+        return -1;
+      }
+      rtp_header->header.ssrc = _SSRC;
+      rtp_header->header.sequenceNumber =
+          (packet[rtp_header->header.headerLength] << 8) +
+          packet[1 + rtp_header->header.headerLength];
+      // Count the RTX header as part of the RTP header.
+      rtp_header->header.headerLength += 2;
+    }
+  }
+  if (_useSSRCFilter) {
+    if (rtp_header->header.ssrc != _SSRCFilter) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "%s drop packet due to SSRC filter",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+  if (_lastReceiveTime == 0) {
+    // trigger only once
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if (_cbRtpFeedback) {
+      if (length - rtp_header->header.headerLength == 0) {
+        // keepalive packet
+        _cbRtpFeedback->OnReceivedPacket(_id, kPacketKeepAlive);
+      } else {
+        _cbRtpFeedback->OnReceivedPacket(_id, kPacketRtp);
+      }
+    }
+  }
+  WebRtc_Word8 first_payload_byte = 0;
+  if (length > 0) {
+    first_payload_byte = packet[rtp_header->header.headerLength];
+  }
+  // trigger our callbacks
+  CheckSSRCChanged(rtp_header);
+
+  bool is_red = false;
+  VideoPayload video_specific;
+  video_specific.maxRate = 0;
+  video_specific.videoCodecType = kRtpNoVideo;
+
+  AudioPayload audio_specific;
+  audio_specific.bitsPerSample = 0;
+  audio_specific.channels = 0;
+  audio_specific.frequency = 0;
+
+  if (CheckPayloadChanged(rtp_header,
+                          first_payload_byte,
+                          is_red,
+                          audio_specific,
+                          video_specific) == -1) {
+    if (length - rtp_header->header.headerLength == 0)
+    {
+      // ok keepalive packet
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+                   "%s received keepalive",
+                   __FUNCTION__);
+      return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "%s received invalid payloadtype",
+                 __FUNCTION__);
+    return -1;
+  }
+  CheckCSRC(rtp_header);
+
+  const WebRtc_UWord8* payload_data =
+      packet + rtp_header->header.headerLength;
+
+  WebRtc_UWord16 payload_data_length =
+      static_cast<WebRtc_UWord16>(length - rtp_header->header.headerLength);
+
+  WebRtc_Word32 retVal = 0;
+  if(_audio) {
+    retVal = ParseAudioCodecSpecific(rtp_header,
+                                     payload_data,
+                                     payload_data_length,
+                                     audio_specific,
+                                     is_red);
+  } else {
+    retVal = ParseVideoCodecSpecific(rtp_header,
+                                     payload_data,
+                                     payload_data_length,
+                                     video_specific.videoCodecType,
+                                     is_red,
+                                     packet,
+                                     packet_length,
+                                     _clock.GetTimeInMS());
+  }
+  if(retVal < 0) {
+    return retVal;
+  }
+
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  // this compare to _receivedSeqMax
+  // we store the last received after we have done the callback
+  bool old_packet = RetransmitOfOldPacket(rtp_header->header.sequenceNumber,
+                                          rtp_header->header.timestamp);
+
+  // this updates _receivedSeqMax and other members
+  UpdateStatistics(rtp_header, payload_data_length, old_packet);
+
+  // Need to be updated after RetransmitOfOldPacket &
+  // RetransmitOfOldPacketUpdateStatistics
+  _lastReceiveTime = _clock.GetTimeInMS();
+  _lastReceivedPayloadLength = payload_data_length;
+
+  if (!old_packet) {
+    if (_lastReceivedTimestamp != rtp_header->header.timestamp) {
+      _lastReceivedTimestamp = rtp_header->header.timestamp;
+    }
+    _lastReceivedSequenceNumber = rtp_header->header.sequenceNumber;
+    _lastReceivedTransmissionTimeOffset =
+        rtp_header->extension.transmissionTimeOffset;
+  }
+  return retVal;
+}
+
+// must not have critsect when called
+WebRtc_Word32
+RTPReceiver::CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                           const WebRtc_UWord16 payloadSize,
+                                           const WebRtcRTPHeader* rtpHeader)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(_cbRtpData)
+    {
+        return _cbRtpData->OnReceivedPayloadData(payloadData, payloadSize, rtpHeader);
+    }
+    return -1;
+}
+
+// we already have the _criticalSectionRTPReceiver critsect when we call this
+void
+RTPReceiver::UpdateStatistics(const WebRtcRTPHeader* rtpHeader,
+                              const WebRtc_UWord16 bytes,
+                              const bool oldPacket)
+{
+    WebRtc_UWord32 freq = 90000;
+    if(_audio)
+    {
+        freq = AudioFrequency();
+    }
+
+    Bitrate::Update(bytes);
+
+    _receivedByteCount += bytes;
+
+    if (_receivedSeqMax == 0 && _receivedSeqWraps == 0)
+    {
+        // First received report
+        _receivedSeqFirst = rtpHeader->header.sequenceNumber;
+        _receivedSeqMax = rtpHeader->header.sequenceNumber;
+        _receivedInorderPacketCount = 1;
+        _localTimeLastReceivedTimestamp =
+            GetCurrentRTP(&_clock, freq); //time in samples
+        return;
+    }
+
+    // count only the new packets received
+    if(InOrderPacket(rtpHeader->header.sequenceNumber))
+    {
+        const WebRtc_UWord32 RTPtime =
+            GetCurrentRTP(&_clock, freq); //time in samples
+        _receivedInorderPacketCount++;
+
+        // wrong if we use RetransmitOfOldPacket
+        WebRtc_Word32 seqDiff = rtpHeader->header.sequenceNumber - _receivedSeqMax;
+        if (seqDiff < 0)
+        {
+            // Wrap around detected
+            _receivedSeqWraps++;
+        }
+        // new max
+        _receivedSeqMax = rtpHeader->header.sequenceNumber;
+
+        if (rtpHeader->header.timestamp != _lastReceivedTimestamp &&
+            _receivedInorderPacketCount > 1)
+        {
+            WebRtc_Word32 timeDiffSamples = (RTPtime - _localTimeLastReceivedTimestamp) -
+                                          (rtpHeader->header.timestamp - _lastReceivedTimestamp);
+
+            timeDiffSamples = abs(timeDiffSamples);
+
+            // libJingle sometimes deliver crazy jumps in TS for the same stream
+            // If this happen don't update jitter value
+            if(timeDiffSamples < 450000)  // Use 5 secs video frequency as border
+            {
+                // note we calculate in Q4 to avoid using float
+                WebRtc_Word32 jitterDiffQ4 = (timeDiffSamples << 4) - _jitterQ4;
+                _jitterQ4 += ((jitterDiffQ4 + 8) >> 4);
+            }
+
+            // Extended jitter report, RFC 5450.
+            // Actual network jitter, excluding the source-introduced jitter.
+            WebRtc_Word32 timeDiffSamplesExt =
+                (RTPtime - _localTimeLastReceivedTimestamp) -
+                ((rtpHeader->header.timestamp +
+                  rtpHeader->extension.transmissionTimeOffset) -
+                (_lastReceivedTimestamp +
+                 _lastReceivedTransmissionTimeOffset));
+
+            timeDiffSamplesExt = abs(timeDiffSamplesExt);
+
+            if(timeDiffSamplesExt < 450000)  // Use 5 secs video freq as border
+            {
+                // note we calculate in Q4 to avoid using float
+                WebRtc_Word32 jitterDiffQ4TransmissionTimeOffset =
+                    (timeDiffSamplesExt << 4) - _jitterQ4TransmissionTimeOffset;
+                _jitterQ4TransmissionTimeOffset +=
+                    ((jitterDiffQ4TransmissionTimeOffset + 8) >> 4);
+            }
+        }
+        _localTimeLastReceivedTimestamp = RTPtime;
+    } else
+    {
+        if(oldPacket)
+        {
+            _receivedOldPacketCount++;
+        }else
+        {
+            _receivedInorderPacketCount++;
+        }
+    }
+
+    WebRtc_UWord16 packetOH = rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
+
+    // our measured overhead
+    // filter from RFC 5104     4.2.1.2
+    // avg_OH (new) = 15/16*avg_OH (old) + 1/16*pckt_OH,
+    _receivedPacketOH =  (15*_receivedPacketOH + packetOH) >> 4;
+}
+
+// we already have the _criticalSectionRTPReceiver critsect when we call this
+bool RTPReceiver::RetransmitOfOldPacket(
+    const WebRtc_UWord16 sequenceNumber,
+    const WebRtc_UWord32 rtpTimeStamp) const {
+  if (InOrderPacket(sequenceNumber)) {
+    return false;
+  }
+  WebRtc_UWord32 frequencyKHz = 90;  // Video frequency.
+  if (_audio) {
+    frequencyKHz = AudioFrequency() / 1000;
+  }
+  WebRtc_UWord32 timeDiffMS = _clock.GetTimeInMS() - _lastReceiveTime;
+  // Diff in time stamp since last received in order.
+  WebRtc_Word32 rtpTimeStampDiffMS = static_cast<WebRtc_Word32>(
+      rtpTimeStamp - _lastReceivedTimestamp) / frequencyKHz;
+
+  WebRtc_UWord16 minRTT = 0;
+  WebRtc_Word32 maxDelayMs = 0;
+  _rtpRtcp.RTT(_SSRC, NULL, NULL, &minRTT, NULL);
+  if (minRTT == 0) {
+    float jitter = _jitterQ4 >> 4;  // Jitter variance in samples.
+    // Jitter standard deviation in samples.
+    float jitterStd = sqrt(jitter);
+    // 2 times the std deviation => 95% confidence.
+    // And transform to ms by dividing by the frequency in kHz.
+    maxDelayMs = static_cast<WebRtc_Word32>((2 * jitterStd) / frequencyKHz);
+
+    // Min maxDelayMs is 1.
+    if (maxDelayMs == 0) {
+      maxDelayMs = 1; 
+    }
+  } else {
+    maxDelayMs = (minRTT / 3) + 1;
+  }
+  if (static_cast<WebRtc_Word32>(timeDiffMS) >
+      rtpTimeStampDiffMS + maxDelayMs) {
+    return true;
+  }
+  return false;
+}
+
+bool
+RTPReceiver::InOrderPacket(const WebRtc_UWord16 sequenceNumber) const
+{
+    if(_receivedSeqMax >= sequenceNumber)
+    {
+        if(!(_receivedSeqMax > 0xff00 && sequenceNumber < 0x0ff ))//detect wrap around
+        {
+            if(_receivedSeqMax - NACK_PACKETS_MAX_SIZE > sequenceNumber)
+            {
+                // we have a restart of the remote side
+            }else
+            {
+                // we received a retransmit of a packet we already have
+                return false;
+            }
+        }
+    }else
+    {
+        // check for a wrap
+        if(sequenceNumber > 0xff00 && _receivedSeqMax < 0x0ff )//detect wrap around
+        {
+            if(_receivedSeqMax - NACK_PACKETS_MAX_SIZE > sequenceNumber)
+            {
+                // we have a restart of the remote side
+            }else
+            {
+                // we received a retransmit of a packet we already have
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+WebRtc_UWord16
+RTPReceiver::SequenceNumber() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _lastReceivedSequenceNumber;
+}
+
+WebRtc_UWord32
+RTPReceiver::TimeStamp() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _lastReceivedTimestamp;
+}
+
+WebRtc_UWord32 RTPReceiver::PayloadTypeToPayload(
+    const WebRtc_UWord8 payloadType,
+    Payload*& payload) const {
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  // check that this is a registered payload type
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  payload = it->second;
+  return 0;
+}
+
+// timeStamp of the last incoming packet that is the first packet of its frame
+WebRtc_Word32
+RTPReceiver::EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    WebRtc_UWord32 freq = 90000;
+    if(_audio)
+    {
+        freq = AudioFrequency();
+    }
+    if(_localTimeLastReceivedTimestamp == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+        return -1;
+    }
+    //time in samples
+    WebRtc_UWord32 diff = GetCurrentRTP(&_clock, freq)
+        - _localTimeLastReceivedTimestamp;
+
+    timestamp = _lastReceivedTimestamp + diff;
+    return 0;
+}
+
+    // get the currently configured SSRC filter
+WebRtc_Word32
+RTPReceiver::SSRCFilter(WebRtc_UWord32& allowedSSRC) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    if(_useSSRCFilter)
+    {
+        allowedSSRC = _SSRCFilter;
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+    return -1;
+}
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+WebRtc_Word32
+RTPReceiver::SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _useSSRCFilter = enable;
+    if(enable)
+    {
+        _SSRCFilter = allowedSSRC;
+    } else
+    {
+        _SSRCFilter = 0;
+    }
+    return 0;
+}
+
+// no criticalsection when called
+void RTPReceiver::CheckSSRCChanged(const WebRtcRTPHeader* rtpHeader) {
+  bool newSSRC = false;
+  bool reInitializeDecoder = false;
+  char payloadName[RTP_PAYLOAD_NAME_SIZE];
+  WebRtc_UWord32 frequency = 90000; // default video freq
+  WebRtc_UWord8 channels = 1;
+  WebRtc_UWord32 rate = 0;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (_SSRC != rtpHeader->header.ssrc ||
+        (_lastReceivedPayloadType == -1 && _SSRC == 0)) {
+      // we need the _payloadType to make the call if the remote SSRC is 0
+      newSSRC = true;
+
+      // reset last report
+      ResetStatistics();
+      RTPReceiverVideo::ResetOverUseDetector();
+
+      _lastReceivedTimestamp      = 0;
+      _lastReceivedSequenceNumber = 0;
+      _lastReceivedTransmissionTimeOffset = 0;
+
+      if (_SSRC) {  // do we have a SSRC? then the stream is restarted
+        //  if we have the same codec? reinit decoder
+        if (rtpHeader->header.payloadType == _lastReceivedPayloadType) {
+          reInitializeDecoder = true;
+
+          std::map<WebRtc_Word8, Payload*>::iterator it =
+              _payloadTypeMap.find(rtpHeader->header.payloadType);
+
+          if (it == _payloadTypeMap.end()) {
+            return;
+          }
+          Payload* payload = it->second;
+          assert(payload);
+          payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+          strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+          if(payload->audio) {
+            frequency = payload->typeSpecific.Audio.frequency;
+            channels =  payload->typeSpecific.Audio.channels;
+            rate = payload->typeSpecific.Audio.rate;
+          } else {
+            frequency = 90000;
+          }
+        }
+      }
+      _SSRC = rtpHeader->header.ssrc;
+    }
+  }
+  if(newSSRC) {
+    // we need to get this to our RTCP sender and receiver
+    // need to do this outside critical section
+    _rtpRtcp.SetRemoteSSRC(rtpHeader->header.ssrc);
+  }
+  CriticalSectionScoped lock(_criticalSectionCbs);
+  if(_cbRtpFeedback) {
+    if(newSSRC) {
+      _cbRtpFeedback->OnIncomingSSRCChanged(_id, rtpHeader->header.ssrc);
+    }
+    if(reInitializeDecoder) {
+      if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id,
+          rtpHeader->header.payloadType, payloadName, frequency, channels,
+          rate)) {  // new stream same codec
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                     "Failed to create decoder for payload type:%d",
+                     rtpHeader->header.payloadType);
+      }
+    }
+  }
+}
+
+// no criticalsection when called
+WebRtc_Word32 RTPReceiver::CheckPayloadChanged(
+    const WebRtcRTPHeader* rtpHeader,
+    const WebRtc_Word8 firstPayloadByte,
+    bool& isRED,
+    AudioPayload& audioSpecificPayload,
+    VideoPayload& videoSpecificPayload) {
+  bool reInitializeDecoder = false;
+
+  char payloadName[RTP_PAYLOAD_NAME_SIZE];
+  WebRtc_Word8 payloadType = rtpHeader->header.payloadType;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (payloadType != _lastReceivedPayloadType) {
+      if (REDPayloadType(payloadType)) {
+        // get the real codec payload type
+        payloadType = firstPayloadByte & 0x7f;
+        isRED = true;
+
+        if (REDPayloadType(payloadType)) {
+            // Invalid payload type, traced by caller. If we proceeded here,
+            // this would be set as |_lastReceivedPayloadType|, and we would no
+            // longer catch corrupt packets at this level.
+            return -1;
+        }
+
+        //when we receive RED we need to check the real payload type
+        if (payloadType == _lastReceivedPayloadType) {
+          if(_audio)
+          {
+            memcpy(&audioSpecificPayload, &_lastReceivedAudioSpecific,
+                   sizeof(_lastReceivedAudioSpecific));
+          } else {
+            memcpy(&videoSpecificPayload, &_lastReceivedVideoSpecific,
+                   sizeof(_lastReceivedVideoSpecific));
+          }
+          return 0;
+        }
+      }
+      if (_audio) {
+        if (TelephoneEventPayloadType(payloadType)) {
+          // don't do callbacks for DTMF packets
+          isRED = false;
+          return 0;
+        }
+        // frequency is updated for CNG
+        if (CNGPayloadType(payloadType, audioSpecificPayload.frequency)) {
+          // don't do callbacks for DTMF packets
+          isRED = false;
+          return 0;
+        }
+      }
+      std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+          _payloadTypeMap.find(payloadType);
+
+      // check that this is a registered payload type
+      if (it == _payloadTypeMap.end()) {
+        return -1;
+      }
+      Payload* payload = it->second;
+      assert(payload);
+      payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+      strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+      _lastReceivedPayloadType = payloadType;
+
+      reInitializeDecoder = true;
+
+      if(payload->audio) {
+        memcpy(&_lastReceivedAudioSpecific, &(payload->typeSpecific.Audio),
+               sizeof(_lastReceivedAudioSpecific));
+        memcpy(&audioSpecificPayload, &(payload->typeSpecific.Audio),
+               sizeof(_lastReceivedAudioSpecific));
+      } else {
+        memcpy(&_lastReceivedVideoSpecific, &(payload->typeSpecific.Video),
+               sizeof(_lastReceivedVideoSpecific));
+        memcpy(&videoSpecificPayload, &(payload->typeSpecific.Video),
+               sizeof(_lastReceivedVideoSpecific));
+
+        if (_lastReceivedVideoSpecific.videoCodecType == kRtpFecVideo)
+        {
+          // Only reset the decoder on media packets.
+          reInitializeDecoder = false;
+        } else {
+          if (_lastReceivedMediaPayloadType == _lastReceivedPayloadType) {
+            // Only reset the decoder if the media codec type has changed.
+            reInitializeDecoder = false;
+          }
+          _lastReceivedMediaPayloadType = _lastReceivedPayloadType;
+        }
+      }
+      if (reInitializeDecoder) {
+        // reset statistics
+        ResetStatistics();
+      }
+    } else {
+      if(_audio)
+      {
+        memcpy(&audioSpecificPayload, &_lastReceivedAudioSpecific,
+               sizeof(_lastReceivedAudioSpecific));
+      } else
+      {
+        memcpy(&videoSpecificPayload, &_lastReceivedVideoSpecific,
+               sizeof(_lastReceivedVideoSpecific));
+      }
+      isRED = false;
+    }
+  }   // end critsect
+  if (reInitializeDecoder) {
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if (_cbRtpFeedback) {
+      // create new decoder instance
+      if(_audio) {
+        if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id, payloadType,
+            payloadName, audioSpecificPayload.frequency,
+            audioSpecificPayload.channels, audioSpecificPayload.rate)) {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "Failed to create audio decoder for payload type:%d",
+                       payloadType);
+          return -1; // Wrong payload type
+        }
+      } else {
+        if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id, payloadType,
+            payloadName, 90000, 1, 0)) {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "Failed to create video decoder for payload type:%d",
+                       payloadType);
+          return -1; // Wrong payload type
+        }
+      }
+    }
+  }
+  return 0;
+}
+
+// no criticalsection when called
+void RTPReceiver::CheckCSRC(const WebRtcRTPHeader* rtpHeader) {
+  WebRtc_Word32 numCSRCsDiff = 0;
+  WebRtc_UWord32 oldRemoteCSRC[kRtpCsrcSize];
+  WebRtc_UWord8 oldNumCSRCs = 0;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (TelephoneEventPayloadType(rtpHeader->header.payloadType)) {
+      // Don't do this for DTMF packets
+      return;
+    }
+    _numEnergy = rtpHeader->type.Audio.numEnergy;
+    if (rtpHeader->type.Audio.numEnergy > 0 &&
+        rtpHeader->type.Audio.numEnergy <= kRtpCsrcSize) {
+      memcpy(_currentRemoteEnergy,
+             rtpHeader->type.Audio.arrOfEnergy,
+             rtpHeader->type.Audio.numEnergy);
+    }
+    oldNumCSRCs  = _numCSRCs;
+    if (oldNumCSRCs > 0) {
+      // Make a copy of old.
+      memcpy(oldRemoteCSRC, _currentRemoteCSRC,
+             _numCSRCs * sizeof(WebRtc_UWord32));
+    }
+    const WebRtc_UWord8 numCSRCs = rtpHeader->header.numCSRCs;
+    if ((numCSRCs > 0) && (numCSRCs <= kRtpCsrcSize)) {
+      // Copy new
+      memcpy(_currentRemoteCSRC,
+             rtpHeader->header.arrOfCSRCs,
+             numCSRCs * sizeof(WebRtc_UWord32));
+    }
+    if (numCSRCs > 0 || oldNumCSRCs > 0) {
+      numCSRCsDiff = numCSRCs - oldNumCSRCs;
+      _numCSRCs = numCSRCs;  // Update stored CSRCs.
+    } else {
+      // No change.
+      return;
+    }
+  }  // End scoped CriticalSection.
+
+  CriticalSectionScoped lock(_criticalSectionCbs);
+  if (_cbRtpFeedback == NULL) {
+    return;
+  }
+  bool haveCalledCallback = false;
+  // Search for new CSRC in old array.
+  for (WebRtc_UWord8 i = 0; i < rtpHeader->header.numCSRCs; ++i) {
+    const WebRtc_UWord32 csrc = rtpHeader->header.arrOfCSRCs[i];
+
+    bool foundMatch = false;
+    for (WebRtc_UWord8 j = 0; j < oldNumCSRCs; ++j) {
+      if (csrc == oldRemoteCSRC[j]) {  // old list
+        foundMatch = true;
+        break;
+      }
+    }
+    if (!foundMatch && csrc) {
+      // Didn't find it, report it as new.
+      haveCalledCallback = true;
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, csrc, true);
+    }
+  }
+  // Search for old CSRC in new array.
+  for (WebRtc_UWord8 i = 0; i < oldNumCSRCs; ++i) {
+    const WebRtc_UWord32 csrc = oldRemoteCSRC[i];
+
+    bool foundMatch = false;
+    for (WebRtc_UWord8 j = 0; j < rtpHeader->header.numCSRCs; ++j) {
+      if (csrc == rtpHeader->header.arrOfCSRCs[j]) {
+        foundMatch = true;
+        break;
+      }
+    }
+    if (!foundMatch && csrc) {
+      // Did not find it, report as removed.
+      haveCalledCallback = true;
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, csrc, false);
+    }
+  }
+  if (!haveCalledCallback) {
+    // If the CSRC list contain non-unique entries we will endup here.
+    // Using CSRC 0 to signal this event, not interop safe, other
+    // implementations might have CSRC 0 as avalid value.
+    if (numCSRCsDiff > 0) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, 0, true);
+    } else if (numCSRCsDiff < 0) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, 0, false);
+    }
+  }
+}
+
+WebRtc_Word32
+RTPReceiver::ResetStatistics()
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _lastReportInorderPackets = 0;
+    _lastReportOldPackets = 0;
+    _lastReportSeqMax = 0;
+    _lastReportFractionLost = 0;
+    _lastReportCumulativeLost = 0;
+    _lastReportExtendedHighSeqNum = 0;
+    _lastReportJitter = 0;
+    _lastReportJitterTransmissionTimeOffset = 0;
+    _jitterQ4 = 0;
+    _jitterMaxQ4 = 0;
+    _cumulativeLoss = 0;
+    _jitterQ4TransmissionTimeOffset = 0;
+    _receivedSeqWraps = 0;
+    _receivedSeqMax = 0;
+    _receivedSeqFirst = 0;
+    _receivedByteCount = 0;
+    _receivedOldPacketCount = 0;
+    _receivedInorderPacketCount = 0;
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::ResetDataCounters()
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _receivedByteCount = 0;
+    _receivedOldPacketCount = 0;
+    _receivedInorderPacketCount = 0;
+    _lastReportInorderPackets = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::Statistics(WebRtc_UWord8  *fraction_lost,
+                       WebRtc_UWord32 *cum_lost,
+                       WebRtc_UWord32 *ext_max,
+                       WebRtc_UWord32 *jitter,
+                       WebRtc_UWord32 *max_jitter,
+                       WebRtc_UWord32 *jitter_transmission_time_offset,
+                       bool reset) const
+{
+    WebRtc_Word32 missing;
+    return Statistics(fraction_lost,
+                      cum_lost,
+                      ext_max,
+                      jitter,
+                      max_jitter,
+                      jitter_transmission_time_offset,
+                      &missing,
+                      reset);
+}
+
+WebRtc_Word32
+RTPReceiver::Statistics(WebRtc_UWord8  *fraction_lost,
+                        WebRtc_UWord32 *cum_lost,
+                        WebRtc_UWord32 *ext_max,
+                        WebRtc_UWord32 *jitter,
+                        WebRtc_UWord32 *max_jitter,
+                        WebRtc_UWord32 *jitter_transmission_time_offset,
+                        WebRtc_Word32  *missing,
+                        bool reset) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (missing == NULL)
+    {
+        return -1;
+    }
+    if(_receivedSeqFirst == 0 && _receivedByteCount == 0)
+    {
+        // we have not received anything
+        // -1 required by RTCP sender
+        return -1;
+    }
+    if(!reset)
+    {
+        if(_lastReportInorderPackets == 0)
+        {
+            // no report
+            return -1;
+        }
+        // just get last report
+        if(fraction_lost)
+        {
+            *fraction_lost = _lastReportFractionLost;
+        }
+        if(cum_lost)
+        {
+            *cum_lost = _lastReportCumulativeLost;  // 24 bits valid
+        }
+        if(ext_max)
+        {
+            *ext_max = _lastReportExtendedHighSeqNum;
+        }
+        if(jitter)
+        {
+            *jitter =_lastReportJitter;
+        }
+        if(max_jitter)
+        {
+            // note that the internal jitter value is in Q4
+            // and needs to be scaled by 1/16
+            *max_jitter = (_jitterMaxQ4 >> 4);
+        }
+        if(jitter_transmission_time_offset)
+        {
+            *jitter_transmission_time_offset =
+               _lastReportJitterTransmissionTimeOffset;
+        }
+        return 0;
+    }
+
+    if (_lastReportInorderPackets == 0)
+    {
+        // First time we send a report
+        _lastReportSeqMax = _receivedSeqFirst-1;
+    }
+    /*
+    *   calc fraction lost
+    */
+    WebRtc_UWord16 expSinceLast = (_receivedSeqMax - _lastReportSeqMax);
+
+    if(_lastReportSeqMax > _receivedSeqMax)
+    {
+        // can we assume that the seqNum can't go decrease over a full RTCP period ?
+        expSinceLast = 0;
+    }
+
+    // number of received RTP packets since last report, counts all packets but not re-transmissions
+    WebRtc_UWord32 recSinceLast = _receivedInorderPacketCount - _lastReportInorderPackets;
+
+    if(_nackMethod == kNackOff)
+    {
+        // this is needed for re-ordered packets
+        WebRtc_UWord32 oldPackets = _receivedOldPacketCount - _lastReportOldPackets;
+        recSinceLast += oldPackets;
+    }else
+    {
+        // with NACK we don't know the expected retransmitions during the last second
+        // we know how many "old" packets we have received we just count the numer of
+        // old received to estimate the loss but it still does not guarantee an exact number
+        // since we run this based on time triggered by sending of a RTP packet this
+        // should have a minimum effect
+
+        // with NACK we don't count old packets as received since they are re-transmitted
+        // we use RTT to decide if a packet is re-ordered or re-transmitted
+    }
+
+    *missing = 0;
+    if(expSinceLast > recSinceLast)
+    {
+        *missing = (expSinceLast - recSinceLast);
+    }
+    WebRtc_UWord8 fractionLost = 0;
+    if(expSinceLast)
+    {
+        // scale 0 to 255, where 255 is 100% loss
+        fractionLost = (WebRtc_UWord8) ((255 * (*missing)) / expSinceLast);
+    }
+    if(fraction_lost)
+    {
+        *fraction_lost = fractionLost;
+    }
+    // we need a counter for cumulative loss too
+    _cumulativeLoss += *missing;
+
+    if(_jitterQ4 > _jitterMaxQ4)
+    {
+        _jitterMaxQ4 = _jitterQ4;
+    }
+    if(cum_lost)
+    {
+        *cum_lost =  _cumulativeLoss;
+    }
+    if(ext_max)
+    {
+        *ext_max = (_receivedSeqWraps<<16) + _receivedSeqMax;
+    }
+    if(jitter)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *jitter = (_jitterQ4 >> 4);
+    }
+    if(max_jitter)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *max_jitter = (_jitterMaxQ4 >> 4);
+    }
+    if(jitter_transmission_time_offset)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *jitter_transmission_time_offset =
+            (_jitterQ4TransmissionTimeOffset >> 4);
+    }
+    if(reset)
+    {
+        // store this report
+        _lastReportFractionLost = fractionLost;
+        _lastReportCumulativeLost = _cumulativeLoss;  // 24 bits valid
+        _lastReportExtendedHighSeqNum = (_receivedSeqWraps<<16) + _receivedSeqMax;
+        _lastReportJitter  = (_jitterQ4 >> 4);
+        _lastReportJitterTransmissionTimeOffset =
+            (_jitterQ4TransmissionTimeOffset >> 4);
+
+        // only for report blocks in RTCP SR and RR
+        _lastReportInorderPackets = _receivedInorderPacketCount;
+        _lastReportOldPackets = _receivedOldPacketCount;
+        _lastReportSeqMax = _receivedSeqMax;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::DataCounters(WebRtc_UWord32 *bytesReceived,
+                          WebRtc_UWord32 *packetsReceived) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if(bytesReceived)
+    {
+        *bytesReceived = _receivedByteCount;
+    }
+    if(packetsReceived)
+    {
+        *packetsReceived = _receivedOldPacketCount + _receivedInorderPacketCount;
+    }
+    return 0;
+}
+
+void
+RTPReceiver::ProcessBitrate()
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+
+    Bitrate::Process();
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver.h b/trunk/src/modules/rtp_rtcp/source/rtp_receiver.h
new file mode 100644
index 0000000..4a14c25
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver.h
@@ -0,0 +1,264 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
+
+#include <map>
+
+#include "typedefs.h"
+#include "rtp_utility.h"
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+#include "rtp_receiver_audio.h"
+#include "rtp_receiver_video.h"
+#include "rtcp_receiver_help.h"
+#include "Bitrate.h"
+
+namespace webrtc {
+class RtpRtcpFeedback;
+class ModuleRtpRtcpImpl;
+class Trace;
+
+class RTPReceiver : public RTPReceiverAudio, public RTPReceiverVideo, public Bitrate
+{
+public:
+    RTPReceiver(const WebRtc_Word32 id,
+                const bool audio,
+                RtpRtcpClock* clock,
+                ModuleRtpRtcpImpl* owner);
+
+    virtual ~RTPReceiver();
+
+    virtual void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    RtpVideoCodecTypes VideoCodecType() const;
+    WebRtc_UWord32 MaxConfiguredBitrate() const;
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+    void PacketTimeout();
+
+    void ProcessDeadOrAlive(const bool RTCPalive, const WebRtc_UWord32 now);
+
+    void ProcessBitrate();
+
+    WebRtc_Word32 RegisterIncomingDataCallback(RtpData* incomingDataCallback);
+    WebRtc_Word32 RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback);
+
+    WebRtc_Word32 RegisterReceivePayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType);
+
+    WebRtc_Word32 ReceivePayloadType(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate,
+        WebRtc_Word8* payloadType) const;
+
+    WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType,
+                                 char payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                 WebRtc_UWord32* frequency,
+                                 WebRtc_UWord8* channels,
+                                 WebRtc_UWord32* rate) const;
+
+    WebRtc_Word32 RemotePayload(char payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                WebRtc_Word8* payloadType,
+                                WebRtc_UWord32* frequency,
+                                WebRtc_UWord8* channels) const;
+
+    WebRtc_Word32 IncomingRTPPacket(WebRtcRTPHeader* rtpheader,
+                                    const WebRtc_UWord8* incomingRtpPacket,
+                                    const WebRtc_UWord16 incomingRtpPacketLengt);
+
+    NACKMethod NACK() const ;
+
+    // Turn negative acknowledgement requests on/off
+    WebRtc_Word32 SetNACKStatus(const NACKMethod method);
+
+
+    // last received
+    virtual WebRtc_UWord32 TimeStamp() const;
+    virtual WebRtc_UWord16 SequenceNumber() const;
+
+    WebRtc_Word32 EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const;
+
+    WebRtc_UWord32 SSRC() const;
+
+    WebRtc_Word32 CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const;
+
+    WebRtc_Word32 Energy( WebRtc_UWord8 arrOfEnergy[kRtpCsrcSize]) const;
+
+    // get the currently configured SSRC filter
+    WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const;
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+    WebRtc_Word32 SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC);
+
+    WebRtc_Word32 Statistics(WebRtc_UWord8  *fraction_lost,
+                             WebRtc_UWord32 *cum_lost,
+                             WebRtc_UWord32 *ext_max,
+                             WebRtc_UWord32 *jitter,  // will be moved from JB
+                             WebRtc_UWord32 *max_jitter,
+                             WebRtc_UWord32 *jitter_transmission_time_offset,
+                             bool reset) const;
+
+    WebRtc_Word32 Statistics(WebRtc_UWord8  *fraction_lost,
+                             WebRtc_UWord32 *cum_lost,
+                             WebRtc_UWord32 *ext_max,
+                             WebRtc_UWord32 *jitter,  // will be moved from JB
+                             WebRtc_UWord32 *max_jitter,
+                             WebRtc_UWord32 *jitter_transmission_time_offset,
+                             WebRtc_Word32 *missing,
+                             bool reset) const;
+
+    WebRtc_Word32 DataCounters(WebRtc_UWord32 *bytesReceived,
+                               WebRtc_UWord32 *packetsReceived) const;
+
+    WebRtc_Word32 ResetStatistics();
+
+    WebRtc_Word32 ResetDataCounters();
+
+    WebRtc_UWord16 PacketOHReceived() const;
+
+    WebRtc_UWord32 PacketCountReceived() const;
+
+    WebRtc_UWord32 ByteCountReceived() const;
+
+    WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                             const WebRtc_UWord8 id);
+
+    WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
+
+    void GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const;
+
+    virtual WebRtc_UWord32 PayloadTypeToPayload(const WebRtc_UWord8 payloadType,
+                                                ModuleRTPUtility::Payload*& payload) const;
+    /*
+    *  RTX
+    */
+    void SetRTXStatus(const bool enable, const WebRtc_UWord32 SSRC);
+
+    void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
+
+protected:
+    virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                        const WebRtc_UWord16 payloadSize,
+                                                        const WebRtcRTPHeader* rtpHeader);
+
+    virtual bool RetransmitOfOldPacket(const WebRtc_UWord16 sequenceNumber,
+                                       const WebRtc_UWord32 rtpTimeStamp) const;
+
+
+    void UpdateStatistics(const WebRtcRTPHeader* rtpHeader,
+                          const WebRtc_UWord16 bytes,
+                          const bool oldPacket);
+
+    virtual WebRtc_Word8 REDPayloadType() const;
+
+private:
+    // Is RED configured with payload type payloadType
+    bool REDPayloadType(const WebRtc_Word8 payloadType) const;
+
+    bool InOrderPacket(const WebRtc_UWord16 sequenceNumber) const;
+
+    void CheckSSRCChanged(const WebRtcRTPHeader* rtpHeader);
+    void CheckCSRC(const WebRtcRTPHeader* rtpHeader);
+    WebRtc_Word32 CheckPayloadChanged(const WebRtcRTPHeader* rtpHeader,
+                                      const WebRtc_Word8 firstPayloadByte,
+                                      bool& isRED,
+                                      ModuleRTPUtility::AudioPayload& audioSpecific,
+                                      ModuleRTPUtility::VideoPayload& videoSpecific);
+
+    void UpdateNACKBitRate(WebRtc_Word32 bytes, WebRtc_UWord32 now);
+    bool ProcessNACKBitRate(WebRtc_UWord32 now);
+
+private:
+    WebRtc_Word32           _id;
+    const bool              _audio;
+    ModuleRtpRtcpImpl&      _rtpRtcp;
+
+    CriticalSectionWrapper*    _criticalSectionCbs;
+    RtpFeedback*        _cbRtpFeedback;
+    RtpData*            _cbRtpData;
+
+    CriticalSectionWrapper*    _criticalSectionRTPReceiver;
+    mutable WebRtc_UWord32    _lastReceiveTime;
+    WebRtc_UWord16            _lastReceivedPayloadLength;
+    WebRtc_Word8              _lastReceivedPayloadType;
+    WebRtc_Word8              _lastReceivedMediaPayloadType;
+
+    ModuleRTPUtility::AudioPayload _lastReceivedAudioSpecific;
+    ModuleRTPUtility::VideoPayload _lastReceivedVideoSpecific;
+
+    WebRtc_UWord32            _packetTimeOutMS;
+
+    WebRtc_Word8              _redPayloadType;
+
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
+    RtpHeaderExtensionMap     _rtpHeaderExtensionMap;
+
+    // SSRCs
+    WebRtc_UWord32            _SSRC;
+    WebRtc_UWord8             _numCSRCs;
+    WebRtc_UWord32            _currentRemoteCSRC[kRtpCsrcSize];
+    WebRtc_UWord8             _numEnergy;
+    WebRtc_UWord8             _currentRemoteEnergy[kRtpCsrcSize];
+
+    bool                      _useSSRCFilter;
+    WebRtc_UWord32            _SSRCFilter;
+
+    // stats on received RTP packets
+    WebRtc_UWord32            _jitterQ4;
+    mutable WebRtc_UWord32    _jitterMaxQ4;
+    mutable WebRtc_UWord32    _cumulativeLoss;
+    WebRtc_UWord32            _jitterQ4TransmissionTimeOffset;
+
+    WebRtc_UWord32            _localTimeLastReceivedTimestamp;
+    WebRtc_UWord32            _lastReceivedTimestamp;
+    WebRtc_UWord16            _lastReceivedSequenceNumber;
+    WebRtc_Word32             _lastReceivedTransmissionTimeOffset;
+    WebRtc_UWord16            _receivedSeqFirst;
+    WebRtc_UWord16            _receivedSeqMax;
+    WebRtc_UWord16            _receivedSeqWraps;
+
+    // current counter values
+    WebRtc_UWord16            _receivedPacketOH;
+    WebRtc_UWord32            _receivedByteCount;
+    WebRtc_UWord32            _receivedOldPacketCount;
+    WebRtc_UWord32            _receivedInorderPacketCount;
+
+    // counter values when we sent the last report
+    mutable WebRtc_UWord32    _lastReportInorderPackets;
+    mutable WebRtc_UWord32    _lastReportOldPackets;
+    mutable WebRtc_UWord16    _lastReportSeqMax;
+    mutable WebRtc_UWord8     _lastReportFractionLost;
+    mutable WebRtc_UWord32    _lastReportCumulativeLost;  // 24 bits valid
+    mutable WebRtc_UWord32    _lastReportExtendedHighSeqNum;
+    mutable WebRtc_UWord32    _lastReportJitter;
+    mutable WebRtc_UWord32    _lastReportJitterTransmissionTimeOffset;
+
+    NACKMethod _nackMethod;
+
+    bool _RTX;
+    WebRtc_UWord32 _ssrcRTX;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc
new file mode 100644
index 0000000..b61157d
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -0,0 +1,590 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_receiver_audio.h"
+
+#include <cassert> //assert
+#include <cstring> // memcpy()
+#include <math.h>    // pow()
+
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id):
+    _id(id),
+    _lastReceivedFrequency(8000),
+    _telephoneEvent(false),
+    _telephoneEventForwardToDecoder(false),
+    _telephoneEventDetectEndOfTone(false),
+    _telephoneEventPayloadType(-1),
+    _cngNBPayloadType(-1),
+    _cngWBPayloadType(-1),
+    _cngSWBPayloadType(-1),
+    _cngPayloadType(-1),
+    _G722PayloadType(-1),
+    _lastReceivedG722(false),
+    _criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbAudioFeedback(NULL)
+{
+}
+
+RTPReceiverAudio::~RTPReceiverAudio()
+{
+    delete _criticalSectionFeedback;
+}
+
+WebRtc_Word32 RTPReceiverAudio::Init() {
+  _lastReceivedFrequency = 8000;
+  _telephoneEvent = false;
+  _telephoneEventForwardToDecoder = false;
+  _telephoneEventDetectEndOfTone = false;
+  _telephoneEventPayloadType = -1;
+
+  _telephoneEventReported.clear();
+
+  _cngNBPayloadType = -1;
+  _cngWBPayloadType = -1;
+  _cngSWBPayloadType = -1;
+  _cngPayloadType = -1;
+  _G722PayloadType = -1;
+  _lastReceivedG722 = false;
+  return 0;
+}
+
+void
+RTPReceiverAudio::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+WebRtc_Word32
+RTPReceiverAudio::RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedback);
+    _cbAudioFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+WebRtc_UWord32
+RTPReceiverAudio::AudioFrequency() const
+{
+    if(_lastReceivedG722)
+    {
+        return 8000;
+    }
+    return _lastReceivedFrequency;
+}
+
+// Outband TelephoneEvent(DTMF) detection
+WebRtc_Word32
+RTPReceiverAudio::SetTelephoneEventStatus(const bool enable,
+                                          const bool forwardToDecoder,
+                                          const bool detectEndOfTone)
+{
+    _telephoneEvent= enable;
+    _telephoneEventDetectEndOfTone = detectEndOfTone;
+    _telephoneEventForwardToDecoder = forwardToDecoder;
+    return 0;
+}
+
+ // Is outband TelephoneEvent(DTMF) turned on/off?
+bool
+RTPReceiverAudio::TelephoneEvent() const
+{
+    return _telephoneEvent;
+}
+
+// Is forwarding of outband telephone events turned on/off?
+bool
+RTPReceiverAudio::TelephoneEventForwardToDecoder() const
+{
+    return _telephoneEventForwardToDecoder;
+}
+
+bool
+RTPReceiverAudio::TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const
+{
+    return (_telephoneEventPayloadType == payloadType)?true:false;
+}
+
+bool
+RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType,
+                                 WebRtc_UWord32& frequency)
+{
+    //  we can have three CNG on 8000Hz, 16000Hz and 32000Hz
+    if(_cngNBPayloadType == payloadType)
+    {
+        frequency = 8000;
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngNBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngNBPayloadType;
+        return true;
+    } else if(_cngWBPayloadType == payloadType)
+    {
+        // if last received codec is G.722 we must use frequency 8000
+        if(_lastReceivedG722)
+        {
+            frequency = 8000;
+        } else
+        {
+            frequency = 16000;
+        }
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngWBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngWBPayloadType;
+        return true;
+    }else if(_cngSWBPayloadType == payloadType)
+    {
+        frequency = 32000;
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngSWBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngSWBPayloadType;
+        return true;
+    }else
+    {
+        //  not CNG
+        if(_G722PayloadType == payloadType)
+        {
+            _lastReceivedG722 = true;
+        }else
+        {
+            _lastReceivedG722 = false;
+        }
+    }
+    return false;
+}
+
+/*
+   Sample based or frame based codecs based on RFC 3551
+
+   NOTE! There is one error in the RFC, stating G.722 uses 8 bits/samples.
+   The correct rate is 4 bits/sample.
+
+   name of                              sampling              default
+   encoding  sample/frame  bits/sample      rate  ms/frame  ms/packet
+
+   Sample based audio codecs
+   DVI4      sample        4                var.                   20
+   G722      sample        4              16,000                   20
+   G726-40   sample        5               8,000                   20
+   G726-32   sample        4               8,000                   20
+   G726-24   sample        3               8,000                   20
+   G726-16   sample        2               8,000                   20
+   L8        sample        8                var.                   20
+   L16       sample        16               var.                   20
+   PCMA      sample        8                var.                   20
+   PCMU      sample        8                var.                   20
+
+   Frame based audio codecs
+   G723      frame         N/A             8,000        30         30
+   G728      frame         N/A             8,000       2.5         20
+   G729      frame         N/A             8,000        10         20
+   G729D     frame         N/A             8,000        10         20
+   G729E     frame         N/A             8,000        10         20
+   GSM       frame         N/A             8,000        20         20
+   GSM-EFR   frame         N/A             8,000        20         20
+   LPC       frame         N/A             8,000        20         20
+   MPA       frame         N/A              var.      var.
+
+   G7221     frame         N/A
+*/
+
+ModuleRTPUtility::Payload* RTPReceiverAudio::RegisterReceiveAudioPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
+    _telephoneEventPayloadType = payloadType;
+  }
+  if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2)) {
+    //  we can have three CNG on 8000Hz, 16000Hz and 32000Hz
+    if(frequency == 8000){
+      _cngNBPayloadType = payloadType;
+    } else if(frequency == 16000) {
+      _cngWBPayloadType = payloadType;
+    } else if(frequency == 32000) {
+      _cngSWBPayloadType = payloadType;
+    } else {
+      assert(false);
+      return NULL;
+    }
+  }
+  WebRtc_UWord8 bitsPerSample = 0; // zero implies frame based
+  bool isTrueStereo = false; // Default value
+  if (ModuleRTPUtility::StringCompare(payloadName, "DVI4", 4)) {
+    bitsPerSample = 4;
+  } else if(ModuleRTPUtility::StringCompare(payloadName, "G722", 4)) {
+    if(ModuleRTPUtility::StringCompare(payloadName, "G7221", 5)) {
+      // frame based
+    } else {
+      _G722PayloadType = payloadType;
+      bitsPerSample = 4;
+    }
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"G726-40",7)) {
+    bitsPerSample = 5;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"G726-32",7)) {
+    bitsPerSample = 4;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"G726-24",7)) {
+    bitsPerSample = 3;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"G726-16",7)) {
+    bitsPerSample = 2;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"L8",2)) {
+    bitsPerSample = 8;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"L16",3)) {
+    bitsPerSample = 16;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"PCMU",4)) {
+    bitsPerSample = 8;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"PCMA",4)) {
+    bitsPerSample = 8;
+  } else if(ModuleRTPUtility::StringCompare(payloadName,"CELT",4))
+  {
+    isTrueStereo = true;
+  }
+  ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Audio.frequency = frequency;
+  payload->typeSpecific.Audio.channels = channels;
+  payload->typeSpecific.Audio.bitsPerSample = bitsPerSample;
+  payload->typeSpecific.Audio.rate = rate;
+  payload->typeSpecific.Audio.trueStereoCodec = isTrueStereo;
+  payload->audio = true;
+  return payload;
+}
+
+// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
+WebRtc_Word32
+RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
+                                          const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadLength,
+                                          const ModuleRTPUtility::AudioPayload& audioSpecific,
+                                          const bool isRED)
+{
+    WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
+    WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
+    WebRtc_UWord8 numberOfNewEvents = 0;
+    WebRtc_UWord8 numberOfRemovedEvents = 0;
+    bool telephoneEventPacket = TelephoneEventPayloadType(rtpHeader->header.payloadType);
+
+    if(payloadLength == 0)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped lock(_criticalSectionFeedback);
+
+        if(telephoneEventPacket)
+        {
+            // RFC 4733 2.3
+            /*
+                0                   1                   2                   3
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |     event     |E|R| volume    |          duration             |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            if(payloadLength % 4 != 0)
+            {
+                return -1;
+            }
+            WebRtc_UWord8 numberOfEvents = payloadLength / 4;
+
+            // sanity
+            if(numberOfEvents >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS)
+            {
+                numberOfEvents = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
+            }
+            for (int n = 0; n < numberOfEvents; n++)
+            {
+                bool end = (payloadData[(4*n)+1] & 0x80)? true:false;
+
+                std::set<WebRtc_UWord8>::iterator event =
+                    _telephoneEventReported.find(payloadData[4*n]);
+
+                if(event != _telephoneEventReported.end())
+                {
+                    // we have already seen this event
+                    if(end)
+                    {
+                        removedEvents[numberOfRemovedEvents]= payloadData[4*n];
+                        numberOfRemovedEvents++;
+                        _telephoneEventReported.erase(payloadData[4*n]);
+                    }
+                }else
+                {
+                    if(end)
+                    {
+                        // don't add if it's a end of a tone
+                    }else
+                    {
+                        newEvents[numberOfNewEvents] = payloadData[4*n];
+                        numberOfNewEvents++;
+                        _telephoneEventReported.insert(payloadData[4*n]);
+                    }
+                }
+            }
+
+            // RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
+            // should not be a problem since we don't care about the duration
+
+            // RFC 4733 See 2.5.1.5. & 2.5.2.4.  Multiple Events in a Packet
+        }
+
+        if(_telephoneEvent && _cbAudioFeedback)
+        {
+            for (int n = 0; n < numberOfNewEvents; n++)
+            {
+                _cbAudioFeedback->OnReceivedTelephoneEvent(_id, newEvents[n], false);
+            }
+            if(_telephoneEventDetectEndOfTone)
+            {
+                for (int n = 0; n < numberOfRemovedEvents; n++)
+                {
+                    _cbAudioFeedback->OnReceivedTelephoneEvent(_id, removedEvents[n], true);
+                }
+            }
+        }
+    }
+    if(! telephoneEventPacket )
+    {
+        _lastReceivedFrequency = audioSpecific.frequency;
+    }
+
+    // Check if this is a CNG packet, receiver might want to know
+    WebRtc_UWord32 dummy;
+    if(CNGPayloadType(rtpHeader->header.payloadType, dummy))
+    {
+        rtpHeader->type.Audio.isCNG=true;
+        rtpHeader->frameType = kAudioFrameCN;
+    }else
+    {
+        rtpHeader->frameType = kAudioFrameSpeech;
+        rtpHeader->type.Audio.isCNG=false;
+    }
+
+    // check if it's a DTMF event, hence something we can playout
+    if(telephoneEventPacket)
+    {
+        if(!_telephoneEventForwardToDecoder)
+        {
+            // don't forward event to decoder
+            return 0;
+        }
+        std::set<WebRtc_UWord8>::iterator first =
+            _telephoneEventReported.begin();
+        if(first != _telephoneEventReported.end() && *first > 15)
+        {
+            // don't forward non DTMF events
+            return 0;
+        }
+    }
+    if(isRED && !(payloadData[0] & 0x80))
+    {
+        // we recive only one frame packed in a RED packet remove the RED wrapper
+        rtpHeader->header.payloadType = payloadData[0];
+
+        // only one frame in the RED strip the one byte to help NetEq
+        return CallbackOfReceivedPayloadData(payloadData+1,
+                                             payloadLength-1,
+                                             rtpHeader);
+    }
+    if(audioSpecific.channels > 1)
+    {
+        WebRtc_Word32 retVal = 0;
+        WebRtc_UWord16 channelLength = payloadLength/audioSpecific.channels;
+
+        if(audioSpecific.bitsPerSample > 0)
+        {
+            // sanity
+            assert((payloadLength*8)%audioSpecific.bitsPerSample == 0);
+
+            // sample based codec
+
+            // build matrix
+            WebRtc_UWord8 matrix[IP_PACKET_SIZE];
+            WebRtc_UWord32 offsetBytes = 0;
+            WebRtc_UWord32 offsetBytesInsert = 0;
+            // initialize matrix to 0
+            memset(matrix, 0, audioSpecific.channels*channelLength);
+
+            switch(audioSpecific.bitsPerSample)
+            {
+            case 1:
+            case 2:
+            case 3:
+            case 4:
+            case 5:
+            case 6:
+            case 7:
+                {
+                    WebRtc_UWord32 offsetSamples = 0;
+                    WebRtc_UWord32 offsetSamplesInsert = 0;
+                    WebRtc_UWord16 bitMask = (WebRtc_UWord16)ModuleRTPUtility::pow2(audioSpecific.bitsPerSample)-1;
+                    WebRtc_UWord16 samplesPerChannel =payloadLength*8/audioSpecific.bitsPerSample/audioSpecific.channels;
+
+                    for(WebRtc_UWord32 i = 0; i < samplesPerChannel; i++)
+                    {
+                        WebRtc_UWord8 insertShift = (WebRtc_UWord8)((offsetSamplesInsert+audioSpecific.bitsPerSample)%16);
+                        insertShift = 16 - insertShift;  // inverse the calculation
+
+                        for(WebRtc_UWord32 j = 0; j < audioSpecific.channels; j++)
+                        {
+                            // get sample
+                            WebRtc_UWord16 s = payloadData[offsetBytes] << 8;
+
+                            // check that we don't read outside the memory
+                            if(offsetBytes < (WebRtc_UWord32)payloadLength -2)
+                            {
+                                s += payloadData[offsetBytes+1];
+                            }
+
+                            WebRtc_UWord8 readShift = (WebRtc_UWord8)((offsetSamples+audioSpecific.bitsPerSample)%16);
+                            readShift = 16 - readShift;  // inverse the calculation
+                            s >>= readShift;
+                            s &= bitMask;
+
+                            // prepare for reading next sample
+                            offsetSamples += audioSpecific.bitsPerSample;
+                            if(readShift <= audioSpecific.bitsPerSample)
+                            {
+                                // next does not fitt
+                                // or fitt exactly
+                                offsetSamples -= 8;
+                                offsetBytes++;
+                            }
+
+                            // insert sample into matrix
+                            WebRtc_UWord32 columOffset = j*channelLength;
+
+                            WebRtc_UWord16 insert = s << insertShift;
+#if defined(WEBRTC_LITTLE_ENDIAN)
+                            matrix[columOffset+offsetBytesInsert]   |= static_cast<WebRtc_UWord8>(insert>>8);
+                            matrix[columOffset+offsetBytesInsert+1] |= static_cast<WebRtc_UWord8>(insert);
+#else
+                            WebRtc_UWord16* matrixU16 = (WebRtc_UWord16*)&(matrix[columOffset+offsetBytesInsert]);
+                            matrixU16[0] |= (s << insertShift);
+#endif
+                        }
+                        // prepare for writing next sample
+                        offsetSamplesInsert += audioSpecific.bitsPerSample;
+                        if(insertShift <= audioSpecific.bitsPerSample)
+                        {
+                            // next does not fitt
+                            // or fitt exactly
+                            offsetSamplesInsert -= 8;
+                            offsetBytesInsert++;
+                        }
+                    }
+                }
+                break;
+            case 8:
+                {
+                    WebRtc_UWord32 sample = 0;
+                    for(WebRtc_UWord32 i = 0; i < channelLength; i++)
+                    {
+                        for(WebRtc_UWord32 j = 0; j < audioSpecific.channels; j++)
+                        {
+                            WebRtc_UWord32 columOffset = j*channelLength;
+                            matrix[columOffset + i] = payloadData[sample++];
+                        }
+                    }
+                }
+                break;
+            case 16:
+                {
+                    WebRtc_UWord32 sample = 0;
+                    for(WebRtc_UWord32 i = 0; i < channelLength; i +=2)
+                    {
+                        for(WebRtc_UWord32 j = 0; j < audioSpecific.channels; j++)
+                        {
+                            WebRtc_UWord32 columOffset = j*channelLength;
+                            matrix[columOffset + i] = payloadData[sample++];
+                            matrix[columOffset + i + 1] = payloadData[sample++];
+                        }
+                    }
+                }
+                break;
+            default:
+                assert(false);
+                return -1;
+            }
+            // we support 16 bits sample
+            // callback for all channels
+            for(int channel = 0; channel < audioSpecific.channels && retVal == 0; channel++)
+            {
+                // one callback per channel
+                rtpHeader->type.Audio.channel = channel+1;
+
+                if(channel == 0)
+                {
+                    // include the original packet only in the first callback
+                    retVal = CallbackOfReceivedPayloadData(&matrix[channel*channelLength],
+                                                           channelLength,
+                                                           rtpHeader);
+                } else
+                {
+                    retVal = CallbackOfReceivedPayloadData(&matrix[channel*channelLength],
+                                                           channelLength,
+                                                           rtpHeader);
+                }
+            }
+        } else if (audioSpecific.trueStereoCodec)
+        {
+            // One callback with the whole payload for each channel.
+            for(int channel = 1; (channel <= audioSpecific.channels) &&
+                (retVal == 0); channel++)
+            {
+                // One callback per channel.
+                rtpHeader->type.Audio.channel = channel;
+                retVal = CallbackOfReceivedPayloadData(payloadData,
+                                                       payloadLength,
+                                                       rtpHeader);
+            }
+        } else
+        {
+            for(int channel = 1; channel <= audioSpecific.channels && retVal == 0; channel++)
+            {
+                // one callback per channel
+                rtpHeader->type.Audio.channel = channel;
+
+                if(channel == 1)
+                {
+                    // include the original packet only in the first callback
+                    retVal = CallbackOfReceivedPayloadData(payloadData,
+                                                           channelLength,
+                                                           rtpHeader);
+                } else
+                {
+                    retVal = CallbackOfReceivedPayloadData(payloadData,
+                                                           channelLength,
+                                                           rtpHeader);
+                }
+                payloadData += channelLength;
+            }
+        }
+        return retVal;
+    }else
+    {
+        rtpHeader->type.Audio.channel = 1;
+        return CallbackOfReceivedPayloadData(payloadData,
+                                             payloadLength,
+                                             rtpHeader);
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.h b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.h
new file mode 100644
index 0000000..89cd062
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
+
+#include <set>
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class RTPReceiverAudio
+{
+public:
+    RTPReceiverAudio(const WebRtc_Word32 id);
+    virtual ~RTPReceiverAudio();
+
+    virtual void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    WebRtc_Word32 RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback);
+
+    ModuleRTPUtility::Payload* RegisterReceiveAudioPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_UWord32 AudioFrequency() const;
+
+    // Outband TelephoneEvent (DTMF) detection
+    WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
+                                        const bool forwardToDecoder,
+                                        const bool detectEndOfTone);
+
+    // Is outband DTMF(AVT) turned on/off?
+    bool TelephoneEvent() const ;
+
+    // Is forwarding of outband telephone events turned on/off?
+    bool TelephoneEventForwardToDecoder() const ;
+
+    // Is TelephoneEvent configured with payload type payloadType
+    bool TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const;
+
+    // Is CNG configured with payload type payloadType
+    bool CNGPayloadType(const WebRtc_Word8 payloadType, WebRtc_UWord32& frequency);
+
+    WebRtc_Word32 ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
+                                        const WebRtc_UWord8* payloadData,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const ModuleRTPUtility::AudioPayload& audioSpecific,
+                                        const bool isRED);
+
+    virtual WebRtc_Word32 ResetStatistics() = 0;
+
+protected:
+    virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                      const WebRtc_UWord16 payloadSize,
+                                                      const WebRtcRTPHeader* rtpHeader) = 0;
+private:
+    WebRtc_Word32             _id;
+
+    WebRtc_UWord32            _lastReceivedFrequency;
+
+    bool                    _telephoneEvent;
+    bool                    _telephoneEventForwardToDecoder;
+    bool                    _telephoneEventDetectEndOfTone;
+    WebRtc_Word8            _telephoneEventPayloadType;
+    std::set<WebRtc_UWord8> _telephoneEventReported;
+
+    WebRtc_Word8              _cngNBPayloadType;
+    WebRtc_Word8              _cngWBPayloadType;
+    WebRtc_Word8              _cngSWBPayloadType;
+    WebRtc_Word8                _cngPayloadType;
+
+    // G722 is special since it use the wrong number of RTP samples in timestamp VS. number of samples in the frame
+    WebRtc_Word8              _G722PayloadType;
+    bool                    _lastReceivedG722;
+
+    CriticalSectionWrapper* _criticalSectionFeedback;
+    RtpAudioFeedback*   _cbAudioFeedback;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.cc b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.cc
new file mode 100644
index 0000000..d6a3c17
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -0,0 +1,517 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_receiver_video.h"
+
+#include <cassert> //assert
+#include <cstring>  // memcpy()
+#include <math.h>
+
+#include "critical_section_wrapper.h"
+#include "receiver_fec.h"
+#include "rtp_rtcp_impl.h"
+#include "rtp_utility.h"
+
+namespace webrtc {
+WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+{
+    return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+}
+
+RTPReceiverVideo::RTPReceiverVideo():
+    _id(0),
+    _rtpRtcp(NULL),
+    _criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbVideoFeedback(NULL),
+    _criticalSectionReceiverVideo(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _completeFrame(false),
+    _packetStartTimeMs(0),
+    _receivedBW(),
+    _estimatedBW(0),
+    _currentFecFrameDecoded(false),
+    _receiveFEC(NULL),
+    _overUseDetector(),
+    _videoBitRate(),
+    _lastBitRateChange(0),
+    _packetOverHead(28)
+{
+    memset(_receivedBW, 0,sizeof(_receivedBW));
+}
+
+RTPReceiverVideo::RTPReceiverVideo(const WebRtc_Word32 id,
+                                   ModuleRtpRtcpImpl* owner):
+    _id(id),
+    _rtpRtcp(owner),
+    _criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbVideoFeedback(NULL),
+    _criticalSectionReceiverVideo(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _completeFrame(false),
+    _packetStartTimeMs(0),
+    _receivedBW(),
+    _estimatedBW(0),
+    _currentFecFrameDecoded(false),
+    _receiveFEC(NULL),
+    _overUseDetector(),
+    _videoBitRate(),
+    _lastBitRateChange(0),
+    _packetOverHead(28)
+{
+    memset(_receivedBW, 0,sizeof(_receivedBW));
+}
+
+RTPReceiverVideo::~RTPReceiverVideo()
+{
+    delete _criticalSectionFeedback;
+    delete _criticalSectionReceiverVideo;
+    delete _receiveFEC;
+}
+
+WebRtc_Word32
+RTPReceiverVideo::Init()
+{
+    _completeFrame = false;
+    _packetStartTimeMs = 0;
+    _estimatedBW = 0;
+    _currentFecFrameDecoded = false;
+    _packetOverHead = 28;
+    for (int i = 0; i < BW_HISTORY_SIZE; i++)
+    {
+        _receivedBW[i] = 0;
+    }
+    ResetOverUseDetector();
+    return 0;
+}
+
+void
+RTPReceiverVideo::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+WebRtc_Word32
+RTPReceiverVideo::RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedback);
+    _cbVideoFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+void
+RTPReceiverVideo::UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
+                                            const WebRtc_UWord8 fractionLost,
+                                            const WebRtc_UWord16 roundTripTimeMs)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedback);
+    if(_cbVideoFeedback)
+    {
+        _cbVideoFeedback->OnNetworkChanged(_id,
+                                           bitrateBps,
+                                           fractionLost,
+                                           roundTripTimeMs);
+    }
+}
+
+ModuleRTPUtility::Payload* RTPReceiverVideo::RegisterReceiveVideoPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 maxRate) {
+  RtpVideoCodecTypes videoType = kRtpNoVideo;
+  if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
+    videoType = kRtpVp8Video;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+    videoType = kRtpNoVideo;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
+    // store this
+    if (_receiveFEC == NULL) {
+      _receiveFEC = new ReceiverFEC(_id, this);
+    }
+    _receiveFEC->SetPayloadTypeFEC(payloadType);
+    videoType = kRtpFecVideo;
+  } else {
+    return NULL;
+  }
+  ModuleRTPUtility::Payload* payload =  new ModuleRTPUtility::Payload;
+
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Video.videoCodecType = videoType;
+  payload->typeSpecific.Video.maxRate = maxRate;
+  payload->audio = false;
+  return payload;
+}
+
+void RTPReceiverVideo::ResetOverUseDetector()
+{
+    _overUseDetector.Reset();
+    _videoBitRate.Init();
+    _lastBitRateChange = 0;
+}
+
+// called under _criticalSectionReceiverVideo
+WebRtc_UWord16
+RTPReceiverVideo::EstimateBandwidth(const WebRtc_UWord16 bandwidth)
+{
+    // received fragments
+    // estimate BW
+
+    WebRtc_UWord16 bwSort[BW_HISTORY_SIZE];
+    for(int i = 0; i < BW_HISTORY_SIZE-1; i++)
+    {
+        _receivedBW[i] = _receivedBW[i+1];
+        bwSort[i] = _receivedBW[i+1];
+    }
+    _receivedBW[BW_HISTORY_SIZE-1] = bandwidth;
+    bwSort[BW_HISTORY_SIZE-1] = bandwidth;
+
+    WebRtc_UWord16 temp;
+    for (int i = BW_HISTORY_SIZE-1; i >= 0; i--)
+    {
+        for (int j = 1; j <= i; j++)
+        {
+            if (bwSort[j-1] > bwSort[j])
+            {
+                temp = bwSort[j-1];
+                bwSort[j-1] = bwSort[j];
+                bwSort[j] = temp;
+            }
+        }
+    }
+    int zeroCount = 0;
+    for (; zeroCount < BW_HISTORY_SIZE; zeroCount++)
+    {
+        if (bwSort[zeroCount]!= 0)
+        {
+            break;
+        }
+    }
+    WebRtc_UWord32 indexMedian = (BW_HISTORY_SIZE -1) - (BW_HISTORY_SIZE-zeroCount)/2;
+    WebRtc_UWord16 bandwidthMedian = bwSort[indexMedian];
+
+    if (bandwidthMedian > 0)
+    {
+        if (_estimatedBW == bandwidth)
+        {
+            // don't trigger a callback
+            bandwidthMedian = 0;
+        } else
+        {
+            _estimatedBW = bandwidthMedian;
+        }
+    } else
+    {
+        // can't be negative
+        bandwidthMedian = 0;
+    }
+
+    return bandwidthMedian;
+}
+
+// we have no critext when calling this
+// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
+WebRtc_Word32
+RTPReceiverVideo::ParseVideoCodecSpecific(WebRtcRTPHeader* rtpHeader,
+                                          const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadDataLength,
+                                          const RtpVideoCodecTypes videoType,
+                                          const bool isRED,
+                                          const WebRtc_UWord8* incomingRtpPacket,
+                                          const WebRtc_UWord16 incomingRtpPacketSize,
+                                          const WebRtc_Word64 nowMS)
+{
+    WebRtc_Word32 retVal = 0;
+
+    _criticalSectionReceiverVideo->Enter();
+
+    _videoBitRate.Update(payloadDataLength + rtpHeader->header.paddingLength,
+                         nowMS);
+
+    // Add headers, ideally we would like to include for instance
+    // Ethernet header here as well.
+    const WebRtc_UWord16 packetSize = payloadDataLength + _packetOverHead +
+        rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
+    _overUseDetector.Update(*rtpHeader, packetSize, nowMS);
+
+    if (isRED)
+    {
+        if(_receiveFEC == NULL)
+        {
+            _criticalSectionReceiverVideo->Leave();
+            return -1;
+        }
+        bool FECpacket = false;
+        retVal = _receiveFEC->AddReceivedFECPacket(
+            rtpHeader,
+            incomingRtpPacket,
+            payloadDataLength,
+            FECpacket);
+        if (retVal != -1)
+          retVal = _receiveFEC->ProcessReceivedFEC();
+        _criticalSectionReceiverVideo->Leave();
+
+        if(retVal == 0 && FECpacket)
+        {
+            // Callback with the received FEC packet.
+            // The normal packets are delivered after parsing.
+            // This contains the original RTP packet header but with
+            // empty payload and data length.
+            rtpHeader->frameType = kFrameEmpty;
+            // We need this for the routing.
+            WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
+            if(retVal != 0)
+            {
+                return retVal;
+            }
+            retVal = CallbackOfReceivedPayloadData(NULL, 0, rtpHeader);
+        }
+    }else
+    {
+        // will leave the _criticalSectionReceiverVideo critsect
+        retVal = ParseVideoCodecSpecificSwitch(rtpHeader,
+                                               payloadData,
+                                               payloadDataLength,
+                                               videoType);
+    }
+
+    // Update the remote rate control object and update the overuse
+    // detector with the current rate control region.
+    _criticalSectionReceiverVideo->Enter();
+    const RateControlInput input(_overUseDetector.State(),
+                                 _videoBitRate.BitRate(nowMS),
+                                 _overUseDetector.NoiseVar());
+    _criticalSectionReceiverVideo->Leave();
+
+    // Call the callback outside critical section
+    if (_rtpRtcp) {
+      const RateControlRegion region = _rtpRtcp->OnOverUseStateUpdate(input);
+
+      _criticalSectionReceiverVideo->Enter();
+      _overUseDetector.SetRateControlRegion(region);
+      _criticalSectionReceiverVideo->Leave();
+    }
+
+    return retVal;
+}
+
+WebRtc_Word32
+RTPReceiverVideo::BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
+                                 WebRtc_UWord8* dataBuffer) const
+{
+    dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);            // version 2
+    dataBuffer[1] = static_cast<WebRtc_UWord8>(rtpHeader->header.payloadType);
+    if (rtpHeader->header.markerBit)
+    {
+        dataBuffer[1] |= kRtpMarkerBitMask;  // MarkerBit is 1
+    }
+
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, rtpHeader->header.sequenceNumber);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
+
+    WebRtc_Word32 rtpHeaderLength = 12;
+
+    // Add the CSRCs if any
+    if (rtpHeader->header.numCSRCs > 0)
+    {
+        if(rtpHeader->header.numCSRCs > 16)
+        {
+            // error
+            assert(false);
+        }
+        WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
+        for (WebRtc_UWord32 i = 0; i < rtpHeader->header.numCSRCs; ++i)
+        {
+            ModuleRTPUtility::AssignUWord32ToBuffer(ptr, rtpHeader->header.arrOfCSRCs[i]);
+            ptr +=4;
+        }
+        dataBuffer[0] = (dataBuffer[0]&0xf0) | rtpHeader->header.numCSRCs;
+
+        // Update length of header
+        rtpHeaderLength += sizeof(WebRtc_UWord32)*rtpHeader->header.numCSRCs;
+    }
+    return rtpHeaderLength;
+}
+
+WebRtc_Word32
+RTPReceiverVideo::ReceiveRecoveredPacketCallback(WebRtcRTPHeader* rtpHeader,
+                                                 const WebRtc_UWord8* payloadData,
+                                                 const WebRtc_UWord16 payloadDataLength)
+{
+     _criticalSectionReceiverVideo->Enter();
+
+    _currentFecFrameDecoded = true;
+
+    ModuleRTPUtility::Payload* payload = NULL;
+    if (PayloadTypeToPayload(rtpHeader->header.payloadType, payload) != 0)
+    {
+        return -1;
+    }
+    // here we can re-create the original lost packet so that we can use it for the relay
+    // we need to re-create the RED header too
+    WebRtc_UWord8 recoveredPacket[IP_PACKET_SIZE];
+    WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)BuildRTPheader(rtpHeader, recoveredPacket);
+
+    const WebRtc_UWord8 REDForFECHeaderLength = 1;
+
+    // replace pltype
+    recoveredPacket[1] &= 0x80;             // reset
+    recoveredPacket[1] += REDPayloadType(); // replace with RED payload type
+
+    // add RED header
+    recoveredPacket[rtpHeaderLength] = rtpHeader->header.payloadType; // f-bit always 0
+
+    memcpy(recoveredPacket + rtpHeaderLength + REDForFECHeaderLength, payloadData, payloadDataLength);
+
+    return ParseVideoCodecSpecificSwitch(rtpHeader,
+                                         payloadData,
+                                         payloadDataLength,
+                                         payload->typeSpecific.Video.videoCodecType);
+}
+
+WebRtc_Word32 RTPReceiverVideo::SetCodecType(const RtpVideoCodecTypes videoType,
+                                             WebRtcRTPHeader* rtpHeader) const {
+  switch (videoType) {
+    case kRtpNoVideo:
+      rtpHeader->type.Video.codec = kRTPVideoGeneric;
+      break;
+    case kRtpVp8Video:
+      rtpHeader->type.Video.codec = kRTPVideoVP8;
+      break;
+    case kRtpFecVideo:
+      rtpHeader->type.Video.codec = kRTPVideoFEC;
+      break;
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength,
+    const RtpVideoCodecTypes videoType) {
+  WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
+  if (retVal != 0) {
+    return retVal;
+  }
+  // All receive functions release _criticalSectionReceiverVideo before
+  // returning.
+  switch (videoType) {
+    case kRtpNoVideo:
+      return ReceiveGenericCodec(rtpHeader, payloadData, payloadDataLength);
+    case kRtpVp8Video:
+      return ReceiveVp8Codec(rtpHeader, payloadData, payloadDataLength);
+    case kRtpFecVideo:
+      break;
+  }
+  _criticalSectionReceiverVideo->Leave();
+  return -1;
+}
+
+WebRtc_Word32
+RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtpHeader,
+                                  const WebRtc_UWord8* payloadData,
+                                  const WebRtc_UWord16 payloadDataLength)
+{
+    bool success;
+    ModuleRTPUtility::RTPPayload parsedPacket;
+    if (payloadDataLength == 0)
+    {
+        success = true;
+        parsedPacket.info.VP8.dataLength = 0;
+    } else
+    {
+        ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpVp8Video,
+                                                            payloadData,
+                                                            payloadDataLength,
+                                                           _id);
+ 
+        success = rtpPayloadParser.Parse(parsedPacket);
+    }
+    // from here down we only work on local data
+    _criticalSectionReceiverVideo->Leave();
+
+    if (!success)
+    {
+        return -1;
+    }
+    if (parsedPacket.info.VP8.dataLength == 0)
+    {
+        // we have an "empty" VP8 packet, it's ok, could be one way video
+        // Inform the jitter buffer about this packet.
+        rtpHeader->frameType = kFrameEmpty;
+        if (CallbackOfReceivedPayloadData(NULL, 0, rtpHeader) != 0)
+        {
+            return -1;
+        }
+        return 0;
+    }
+    rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ? kVideoFrameKey : kVideoFrameDelta;
+
+    RTPVideoHeaderVP8 *toHeader = &rtpHeader->type.Video.codecHeader.VP8;
+    ModuleRTPUtility::RTPPayloadVP8 *fromHeader = &parsedPacket.info.VP8;
+
+    rtpHeader->type.Video.isFirstPacket = fromHeader->beginningOfPartition
+        && (fromHeader->partitionID == 0);
+    toHeader->pictureId = fromHeader->hasPictureID ? fromHeader->pictureID :
+                          kNoPictureId;
+    toHeader->tl0PicIdx = fromHeader->hasTl0PicIdx ? fromHeader->tl0PicIdx :
+                          kNoTl0PicIdx;
+    if (fromHeader->hasTID) {
+      toHeader->temporalIdx = fromHeader->tID;
+      toHeader->layerSync = fromHeader->layerSync;
+    } else {
+      toHeader->temporalIdx = kNoTemporalIdx;
+      toHeader->layerSync = false;
+    }
+    toHeader->keyIdx = fromHeader->hasKeyIdx ? fromHeader->keyIdx : kNoKeyIdx;
+
+    toHeader->frameWidth = fromHeader->frameWidth;
+    toHeader->frameHeight = fromHeader->frameHeight;
+
+    toHeader->partitionId = fromHeader->partitionID;
+    toHeader->beginningOfPartition = fromHeader->beginningOfPartition;
+
+    if(CallbackOfReceivedPayloadData(parsedPacket.info.VP8.data,
+                                     parsedPacket.info.VP8.dataLength,
+                                     rtpHeader) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+RTPReceiverVideo::ReceiveGenericCodec(WebRtcRTPHeader* rtpHeader,
+                                      const WebRtc_UWord8* payloadData,
+                                      const WebRtc_UWord16 payloadDataLength)
+{
+    rtpHeader->frameType = kVideoFrameKey;
+
+    if(((SequenceNumber() + 1) == rtpHeader->header.sequenceNumber) &&
+        (TimeStamp() != rtpHeader->header.timestamp))
+    {
+        rtpHeader->type.Video.isFirstPacket = true;
+    }
+    _criticalSectionReceiverVideo->Leave();
+
+    if(CallbackOfReceivedPayloadData(payloadData, payloadDataLength, rtpHeader) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+void RTPReceiverVideo::SetPacketOverHead(WebRtc_UWord16 packetOverHead)
+{
+    _packetOverHead = packetOverHead;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.h b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.h
new file mode 100644
index 0000000..5e0138a
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "typedefs.h"
+
+#include "overuse_detector.h"
+#include "remote_rate_control.h"
+#include "Bitrate.h"
+
+namespace webrtc {
+class ReceiverFEC;
+class ModuleRtpRtcpImpl;
+class CriticalSectionWrapper;
+
+class RTPReceiverVideo
+{
+public:
+    RTPReceiverVideo();
+    RTPReceiverVideo(const WebRtc_Word32 id, ModuleRtpRtcpImpl* owner);
+
+    virtual ~RTPReceiverVideo();
+
+    virtual void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    WebRtc_Word32 RegisterIncomingVideoCallback(
+        RtpVideoFeedback* incomingMessagesCallback);
+
+    void UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
+                                   const WebRtc_UWord8 fractionLost,
+                                   const WebRtc_UWord16 roundTripTimeMs);
+
+    ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 maxRate);
+
+    WebRtc_Word32 ParseVideoCodecSpecific(
+        WebRtcRTPHeader* rtpHeader,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadDataLength,
+        const RtpVideoCodecTypes videoType,
+        const bool isRED,
+        const WebRtc_UWord8* incomingRtpPacket,
+        const WebRtc_UWord16 incomingRtpPacketSize,
+        const WebRtc_Word64 nowMS);
+
+    virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
+        WebRtcRTPHeader* rtpHeader,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadDataLength);
+
+    void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
+
+protected:
+    void ResetOverUseDetector();
+
+    WebRtc_UWord16 EstimateBandwidth( const WebRtc_UWord16 bufferLength);
+
+    virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadSize,
+        const WebRtcRTPHeader* rtpHeader) = 0;
+
+    virtual WebRtc_UWord32 TimeStamp() const = 0;
+    virtual WebRtc_UWord16 SequenceNumber() const = 0;
+
+    virtual WebRtc_UWord32 PayloadTypeToPayload(
+        const WebRtc_UWord8 payloadType,
+        ModuleRTPUtility::Payload*& payload) const = 0;
+
+    virtual bool RetransmitOfOldPacket(
+        const WebRtc_UWord16 sequenceNumber,
+        const WebRtc_UWord32 rtpTimeStamp) const  = 0;
+
+    virtual WebRtc_Word8 REDPayloadType() const = 0;
+
+    WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
+                               WebRtcRTPHeader* rtpHeader) const;
+
+    WebRtc_Word32 ParseVideoCodecSpecificSwitch(
+        WebRtcRTPHeader* rtpHeader,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadDataLength,
+        const RtpVideoCodecTypes videoType);
+
+    WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
+                                      const WebRtc_UWord8* payloadData,
+                                      const WebRtc_UWord16 payloadDataLength);
+
+    WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
+                                  const WebRtc_UWord8* payloadData,
+                                  const WebRtc_UWord16 payloadDataLength);
+
+    WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
+                                 WebRtc_UWord8* dataBuffer) const;
+
+private:
+    WebRtc_Word32             _id;
+    ModuleRtpRtcpImpl*        _rtpRtcp;
+
+    CriticalSectionWrapper*   _criticalSectionFeedback;
+    RtpVideoFeedback*         _cbVideoFeedback;
+
+    CriticalSectionWrapper*   _criticalSectionReceiverVideo;
+
+    // bandwidth
+    bool                      _completeFrame;
+    WebRtc_UWord32            _packetStartTimeMs;
+    WebRtc_UWord16            _receivedBW[BW_HISTORY_SIZE];
+    WebRtc_UWord16            _estimatedBW;
+
+      // FEC
+    bool                      _currentFecFrameDecoded;
+    ReceiverFEC*              _receiveFEC;
+
+    // BWE
+    OverUseDetector           _overUseDetector;
+    BitRateStats              _videoBitRate;
+    WebRtc_Word64             _lastBitRateChange;
+    WebRtc_UWord16            _packetOverHead;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_rtcp.gypi b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp.gypi
new file mode 100644
index 0000000..48fc05f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp.gypi
@@ -0,0 +1,102 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'rtp_rtcp',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        # Common
+        '../interface/rtp_rtcp.h',
+        '../interface/rtp_rtcp_defines.h',
+        'bitrate.cc',
+        'Bitrate.h',
+        'rtp_rtcp_config.h',
+        'rtp_rtcp_impl.cc',
+        'rtp_rtcp_impl.h',
+        'rtcp_receiver.cc',
+        'rtcp_receiver.h',
+        'rtcp_receiver_help.cc',
+        'rtcp_receiver_help.h',
+        'rtcp_sender.cc',
+        'rtcp_sender.h',
+        'rtcp_utility.cc',
+        'rtcp_utility.h',
+        'rtp_header_extension.cc',
+        'rtp_header_extension.h',
+        'rtp_receiver.cc',
+        'rtp_receiver.h',
+        'rtp_sender.cc',
+        'rtp_sender.h',
+        'rtp_utility.cc',
+        'rtp_utility.h',
+        'ssrc_database.cc',
+        'ssrc_database.h',
+        'tmmbr_help.cc',
+        'tmmbr_help.h',
+        # Audio Files
+        'dtmf_queue.cc',
+        'dtmf_queue.h',
+        'rtp_receiver_audio.cc',
+        'rtp_receiver_audio.h',
+        'rtp_sender_audio.cc',
+        'rtp_sender_audio.h',
+        # Video Files
+        'bandwidth_management.cc',
+        'bandwidth_management.h',
+        'bwe_defines.h',
+        'fec_private_tables.h',
+        'forward_error_correction.cc',
+        'forward_error_correction.h',
+        'forward_error_correction_internal.cc',
+        'forward_error_correction_internal.h',
+        'overuse_detector.cc',
+        'overuse_detector.h',
+        'remote_rate_control.cc',
+        'remote_rate_control.h',
+        'rtp_packet_history.cc',
+        'rtp_packet_history.h',
+        'rtp_receiver_video.cc',
+        'rtp_receiver_video.h',
+        'rtp_sender_video.cc',
+        'rtp_sender_video.h',
+        'receiver_fec.cc',
+        'receiver_fec.h',
+        'video_codec_information.h',
+        'rtp_format_vp8.cc',
+        'rtp_format_vp8.h',
+        'transmission_bucket.cc',
+        'transmission_bucket.h',
+        'vp8_partition_aggregator.cc',
+        'vp8_partition_aggregator.h',
+        # Mocks
+        '../mocks/mock_rtp_rtcp.h',
+      ], # source
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_config.h b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_config.h
new file mode 100644
index 0000000..a0eed88
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_config.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+
+// Configuration file for RTP utilities (RTPSender, RTPReceiver ...)
+namespace webrtc {
+enum { kRtpRtcpMaxIdleTimeProcess = 5,
+       kRtpRtcpBitrateProcessTimeMs = 10,
+       kRtpRtcpPacketTimeoutProcessTimeMs = 100 };
+
+enum { NACK_PACKETS_MAX_SIZE    = 256 }; // in packets
+enum { NACK_BYTECOUNT_SIZE      = 60};   // size of our NACK history
+
+enum { RTCP_INTERVAL_VIDEO_MS       = 1000 };
+enum { RTCP_INTERVAL_AUDIO_MS       = 5000 };
+enum { RTCP_SEND_BEFORE_KEY_FRAME_MS= 100 };
+enum { RTCP_MAX_REPORT_BLOCKS       = 31};      // RFC 3550 page 37
+enum { RTCP_MIN_FRAME_LENGTH_MS     = 17};
+enum { kRtcpAppCode_DATA_SIZE           = 32*4};    // multiple of 4, this is not a limitation of the size
+enum { RTCP_RPSI_DATA_SIZE          = 30};
+enum { RTCP_NUMBER_OF_SR            = 60 };
+
+enum { MAX_NUMBER_OF_TEMPORAL_ID    = 8 };          // RFC
+enum { MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID  = 128 };// RFC
+
+enum { BW_HISTORY_SIZE          = 35};
+
+#define MIN_AUDIO_BW_MANAGEMENT_BITRATE   6
+#define MIN_VIDEO_BW_MANAGEMENT_BITRATE   30
+
+enum { DTMF_OUTBAND_MAX         = 20};
+
+enum { RTP_MAX_BURST_SLEEP_TIME = 500 };
+enum { RTP_AUDIO_LEVEL_UNIQUE_ID = 0xbede };
+enum { RTP_MAX_PACKETS_PER_FRAME= 512 }; // must be multiple of 32
+} // namespace webrtc
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
new file mode 100644
index 0000000..08da8f8
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -0,0 +1,2971 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "rtp_rtcp_impl.h"
+#include "trace.h"
+
+#ifdef MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+extern MatlabEngine eng; // global variable defined elsewhere
+#endif
+
+#include <string.h> //memcpy
+#include <cassert> //assert
+
+// local for this file
+namespace {
+
+const float FracMS = 4.294967296E6f;
+
+}  // namepace
+
+#ifdef _WIN32
+// disable warning C4355: 'this' : used in base member initializer list
+#pragma warning(disable : 4355)
+#endif
+
+namespace webrtc {
+
+const WebRtc_UWord16 kDefaultRtt = 200;
+
+RtpRtcp* RtpRtcp::CreateRtpRtcp(const WebRtc_Word32 id,
+                                bool audio) {
+  if(audio) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id, "CreateRtpRtcp(audio)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id, "CreateRtpRtcp(video)");
+  }
+  // ModuleRTPUtility::GetSystemClock() creates a new instance of a system
+  // clock implementation. The OwnsClock() function informs the module that
+  // it is responsible for deleting the instance.
+  ModuleRtpRtcpImpl* rtp_rtcp_instance = new ModuleRtpRtcpImpl(id,
+      audio, ModuleRTPUtility::GetSystemClock());
+  rtp_rtcp_instance->OwnsClock();
+  return rtp_rtcp_instance;
+}
+
+RtpRtcp* RtpRtcp::CreateRtpRtcp(const WebRtc_Word32 id,
+                                const bool audio,
+                                RtpRtcpClock* clock) {
+  if (audio) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 id,
+                 "CreateRtpRtcp(audio)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 id,
+                 "CreateRtpRtcp(video)");
+  }
+  return new ModuleRtpRtcpImpl(id, audio, clock);
+}
+
+void RtpRtcp::DestroyRtpRtcp(RtpRtcp* module) {
+  if (module) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 static_cast<ModuleRtpRtcpImpl*>(module)->Id(),
+                 "DestroyRtpRtcp()");
+    delete static_cast<ModuleRtpRtcpImpl*>(module);
+  }
+}
+
+ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const WebRtc_Word32 id,
+                                     const bool audio,
+                                     RtpRtcpClock* clock):
+  TMMBRHelp(audio),
+  _rtpSender(id, audio, clock),
+  _rtpReceiver(id, audio, clock, this),
+  _rtcpSender(id, audio, clock, this),
+  _rtcpReceiver(id, clock, this),
+  _owns_clock(false),
+  _clock(*clock),
+  _id(id),
+  _audio(audio),
+  _collisionDetected(false),
+  _lastProcessTime(clock->GetTimeInMS()),
+  _lastBitrateProcessTime(clock->GetTimeInMS()),
+  _lastPacketTimeoutProcessTime(clock->GetTimeInMS()),
+  _packetOverHead(28), // IPV4 UDP
+  _criticalSectionModulePtrs(CriticalSectionWrapper::CreateCriticalSection()),
+  _criticalSectionModulePtrsFeedback(
+    CriticalSectionWrapper::CreateCriticalSection()),
+  _defaultModule(NULL),
+  _audioModule(NULL),
+  _videoModule(NULL),
+  _deadOrAliveActive(false),
+  _deadOrAliveTimeoutMS(0),
+  _deadOrAliveLastTimer(0),
+  _bandwidthManagement(id),
+  _receivedNTPsecsAudio(0),
+  _receivedNTPfracAudio(0),
+  _RTCPArrivalTimeSecsAudio(0),
+  _RTCPArrivalTimeFracAudio(0),
+  _nackMethod(kNackOff),
+  _nackLastTimeSent(0),
+  _nackLastSeqNumberSent(0),
+  _simulcast(false),
+  _keyFrameReqMethod(kKeyFrameReqFirRtp)
+#ifdef MATLAB
+  , _plot1(NULL)
+#endif
+{
+  _sendVideoCodec.codecType = kVideoCodecUnknown;
+  // make sure that RTCP objects are aware of our SSRC
+  WebRtc_UWord32 SSRC = _rtpSender.SSRC();
+  _rtcpSender.SetSSRC(SSRC);
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+
+  // make sure to unregister this module from other modules
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // deregister for the default module
+    // will go in to the child modules and remove it self
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      _childModules.erase(it);
+      if (module) {
+        module->DeRegisterDefaultModule();
+      }
+      it = _childModules.begin();
+    }
+  } else {
+    // deregister for the child modules
+    // will go in to the default and remove it self
+    DeRegisterDefaultModule();
+  }
+
+  if (_audio) {
+    DeRegisterVideoModule();
+  } else {
+    DeRegisterSyncModule();
+  }
+
+#ifdef MATLAB
+  if (_plot1) {
+    eng.DeletePlot(_plot1);
+    _plot1 = NULL;
+  }
+#endif
+
+  delete _criticalSectionModulePtrs;
+  delete _criticalSectionModulePtrsFeedback;
+  if (_owns_clock) {
+    delete &_clock;
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ChangeUniqueId(const WebRtc_Word32 id) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "ChangeUniqueId(new id:%d)", id);
+
+  _id = id;
+
+  _rtpReceiver.ChangeUniqueId(id);
+  _rtcpReceiver.ChangeUniqueId(id);
+  _rtpSender.ChangeUniqueId(id);
+  _rtcpSender.ChangeUniqueId(id);
+  return 0;
+}
+
+// default encoder that we need to multiplex out
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterDefaultModule(RtpRtcp* module) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterDefaultModule(module:0x%x)", module);
+
+  if (module == NULL) {
+    return -1;
+  }
+  if (module == this) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceRtpRtcp,
+                 _id,
+                 "RegisterDefaultModule can't register self as default");
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+  if (_defaultModule) {
+    _defaultModule->DeRegisterChildModule(this);
+  }
+  _defaultModule = (ModuleRtpRtcpImpl*)module;
+  _defaultModule->RegisterChildModule(this);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterDefaultModule() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterDefaultModule()");
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  if (_defaultModule) {
+    _defaultModule->DeRegisterChildModule(this);
+    _defaultModule = NULL;
+  }
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::DefaultModuleRegistered() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DefaultModuleRegistered()");
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  if (_defaultModule) {
+    return true;
+  }
+  return false;
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::NumberChildModules() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "NumberChildModules");
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  CriticalSectionScoped doubleLock(_criticalSectionModulePtrsFeedback);
+  // we use two locks for protecting _childModules one
+  // (_criticalSectionModulePtrsFeedback) for incoming  messages
+  // (BitrateSent and UpdateTMMBR) and _criticalSectionModulePtrs for
+  //  all outgoing messages sending packets etc
+
+  return _childModules.size();
+}
+
+void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterChildModule(module:0x%x)",
+               module);
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+  CriticalSectionScoped doubleLock(_criticalSectionModulePtrsFeedback);
+  // we use two locks for protecting _childModules one
+  // (_criticalSectionModulePtrsFeedback) for incoming
+  // messages (BitrateSent and UpdateTMMBR) and _criticalSectionModulePtrs
+  //  for all outgoing messages sending packets etc
+  _childModules.push_back((ModuleRtpRtcpImpl*)module);
+}
+
+void ModuleRtpRtcpImpl::DeRegisterChildModule(RtpRtcp* removeModule) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterChildModule(module:0x%x)", removeModule);
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+  CriticalSectionScoped doubleLock(_criticalSectionModulePtrsFeedback);
+
+  std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+  while (it != _childModules.end()) {
+    RtpRtcp* module = *it;
+    if (module == removeModule) {
+      _childModules.erase(it);
+      return;
+    }
+    it++;
+  }
+}
+
+// Lip-sync between voice-video engine,
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSyncModule(RtpRtcp* audioModule) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSyncModule(module:0x%x)",
+               audioModule);
+
+  if (audioModule == NULL) {
+    return -1;
+  }
+  if (_audio) {
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  _audioModule = (ModuleRtpRtcpImpl*)audioModule;
+  return _audioModule->RegisterVideoModule(this);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterSyncModule() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterSyncModule()");
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  if (_audioModule) {
+    ModuleRtpRtcpImpl* audioModule = _audioModule;
+    _audioModule = NULL;
+    _receivedNTPsecsAudio = 0;
+    _receivedNTPfracAudio = 0;
+    _RTCPArrivalTimeSecsAudio = 0;
+    _RTCPArrivalTimeFracAudio = 0;
+    audioModule->DeRegisterVideoModule();
+  }
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterVideoModule(RtpRtcp* videoModule) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterVideoModule(module:0x%x)",
+               videoModule);
+
+  if (videoModule == NULL) {
+    return -1;
+  }
+  if (!_audio) {
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  _videoModule = (ModuleRtpRtcpImpl*)videoModule;
+  return 0;
+}
+
+void ModuleRtpRtcpImpl::DeRegisterVideoModule() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterVideoModule()");
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  if (_videoModule) {
+    ModuleRtpRtcpImpl* videoModule = _videoModule;
+    _videoModule = NULL;
+    videoModule->DeRegisterSyncModule();
+  }
+}
+
+// returns the number of milliseconds until the module want a worker thread
+// to call Process
+WebRtc_Word32 ModuleRtpRtcpImpl::TimeUntilNextProcess() {
+  const WebRtc_UWord32 now = _clock.GetTimeInMS();
+  return kRtpRtcpMaxIdleTimeProcess - (now - _lastProcessTime);
+}
+
+// Process any pending tasks such as timeouts
+// non time critical events
+WebRtc_Word32 ModuleRtpRtcpImpl::Process() {
+  const WebRtc_UWord32 now = _clock.GetTimeInMS();
+  _lastProcessTime = now;
+
+  _rtpSender.ProcessSendToNetwork();
+
+  if (now >= _lastPacketTimeoutProcessTime +
+      kRtpRtcpPacketTimeoutProcessTimeMs) {
+    _rtpReceiver.PacketTimeout();
+    _rtcpReceiver.PacketTimeout();
+    _lastPacketTimeoutProcessTime = now;
+  }
+
+  if (now >= _lastBitrateProcessTime + kRtpRtcpBitrateProcessTimeMs) {
+    _rtpSender.ProcessBitrate();
+    _rtpReceiver.ProcessBitrate();
+    _lastBitrateProcessTime = now;
+  }
+
+  ProcessDeadOrAliveTimer();
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (!defaultInstance && _rtcpSender.TimeToSendRTCPReport()) {
+    WebRtc_UWord16 max_rtt = 0;
+    if (_rtcpSender.Sending()) {
+      std::vector<RTCPReportBlock> receive_blocks;
+      _rtcpReceiver.StatisticsReceived(&receive_blocks);
+      for (std::vector<RTCPReportBlock>::iterator it = receive_blocks.begin();
+           it != receive_blocks.end(); ++it) {
+        WebRtc_UWord16 rtt = 0;
+        _rtcpReceiver.RTT(it->remoteSSRC, &max_rtt, NULL, NULL, NULL);
+        max_rtt = (rtt > max_rtt) ? rtt : max_rtt;
+      }
+    } else {
+      // We're only receiving, i.e. this module doesn't have its own RTT
+      // estimate. Use the RTT set by a sending channel using the same default
+      // module.
+      max_rtt = _rtcpReceiver.RTT();
+    }
+    if (max_rtt == 0) {
+      // No valid estimate available, i.e. no sending channel using the same
+      // default module or no RTCP received yet.
+      max_rtt = kDefaultRtt;
+    }
+    if (REMB() && _rtcpSender.ValidBitrateEstimate()) {
+      unsigned int target_bitrate =
+        _rtcpSender.CalculateNewTargetBitrate(max_rtt);
+      _rtcpSender.UpdateRemoteBitrateEstimate(target_bitrate);
+    } else if (TMMBR()) {
+      _rtcpSender.CalculateNewTargetBitrate(max_rtt);
+    }
+    _rtcpSender.SendRTCP(kRtcpReport);
+  }
+
+  if (_rtpSender.RTPKeepalive()) {
+    // check time to send RTP keep alive
+    if (_rtpSender.TimeToSendRTPKeepalive()) {
+      _rtpSender.SendRTPKeepalivePacket();
+    }
+  }
+
+  if (UpdateRTCPReceiveInformationTimers()) {
+    // a receiver has timed out
+    UpdateTMMBR();
+  }
+  return 0;
+}
+
+/**
+*   Receiver
+*/
+
+WebRtc_Word32 ModuleRtpRtcpImpl::InitReceiver() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "InitReceiver()");
+
+  _packetOverHead = 28; // default is IPV4 UDP
+  _receivedNTPsecsAudio = 0;
+  _receivedNTPfracAudio = 0;
+  _RTCPArrivalTimeSecsAudio = 0;
+  _RTCPArrivalTimeFracAudio = 0;
+
+  WebRtc_Word32 ret = _rtpReceiver.Init();
+  if (ret < 0) {
+    return ret;
+  }
+  _rtpReceiver.SetPacketOverHead(_packetOverHead);
+  return ret;
+}
+
+void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
+  if (_deadOrAliveActive) {
+    const WebRtc_UWord32 now = _clock.GetTimeInMS();
+    if (now > _deadOrAliveTimeoutMS + _deadOrAliveLastTimer) {
+      // RTCP is alive if we have received a report the last 12 seconds
+      _deadOrAliveLastTimer += _deadOrAliveTimeoutMS;
+
+      bool RTCPalive = false;
+      if (_rtcpReceiver.LastReceived() + 12000 > now) {
+        RTCPalive = true;
+      }
+      _rtpReceiver.ProcessDeadOrAlive(RTCPalive, now);
+    }
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
+  const bool enable,
+  const WebRtc_UWord8 sampleTimeSeconds) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetPeriodicDeadOrAliveStatus(enable, %d)",
+                 sampleTimeSeconds);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetPeriodicDeadOrAliveStatus(disable)");
+  }
+  if (sampleTimeSeconds == 0) {
+    return -1;
+  }
+  _deadOrAliveActive = enable;
+  _deadOrAliveTimeoutMS = sampleTimeSeconds * 1000;
+  // trigger the first after one period
+  _deadOrAliveLastTimer = _clock.GetTimeInMS();
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::PeriodicDeadOrAliveStatus(
+    bool& enable,
+    WebRtc_UWord8& sampleTimeSeconds) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "PeriodicDeadOrAliveStatus()");
+
+  enable = _deadOrAliveActive;
+  sampleTimeSeconds = (WebRtc_UWord8)(_deadOrAliveTimeoutMS / 1000);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetPacketTimeout(
+    const WebRtc_UWord32 RTPtimeoutMS,
+    const WebRtc_UWord32 RTCPtimeoutMS) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetPacketTimeout(%u,%u)",
+               RTPtimeoutMS,
+               RTCPtimeoutMS);
+
+  if (_rtpReceiver.SetPacketTimeout(RTPtimeoutMS) == 0) {
+    return _rtcpReceiver.SetPacketTimeout(RTCPtimeoutMS);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceivePayload(
+  const CodecInst& voiceCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterReceivePayload(voiceCodec)");
+
+  return _rtpReceiver.RegisterReceivePayload(
+           voiceCodec.plname,
+           voiceCodec.pltype,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceivePayload(
+  const VideoCodec& videoCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterReceivePayload(videoCodec)");
+
+  return _rtpReceiver.RegisterReceivePayload(videoCodec.plName,
+                                             videoCodec.plType,
+                                             90000,
+                                             0,
+                                             videoCodec.maxBitrate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReceivePayloadType(
+  const CodecInst& voiceCodec,
+  WebRtc_Word8* plType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "ReceivePayloadType(voiceCodec)");
+
+  return _rtpReceiver.ReceivePayloadType(
+           voiceCodec.plname,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate,
+           plType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReceivePayloadType(
+  const VideoCodec& videoCodec,
+  WebRtc_Word8* plType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "ReceivePayloadType(videoCodec)");
+
+  return _rtpReceiver.ReceivePayloadType(videoCodec.plName,
+                                         90000,
+                                         0,
+                                         videoCodec.maxBitrate,
+                                         plType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterReceivePayload(
+    const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterReceivePayload(%d)",
+               payloadType);
+
+  return _rtpReceiver.DeRegisterReceivePayload(payloadType);
+}
+
+// get the currently configured SSRC filter
+WebRtc_Word32 ModuleRtpRtcpImpl::SSRCFilter(WebRtc_UWord32& allowedSSRC) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SSRCFilter()");
+
+  return _rtpReceiver.SSRCFilter(allowedSSRC);
+}
+
+// set a SSRC to be used as a filter for incoming RTP streams
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSSRCFilter(
+  const bool enable,
+  const WebRtc_UWord32 allowedSSRC) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetSSRCFilter(enable, 0x%x)",
+                 allowedSSRC);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetSSRCFilter(disable)");
+  }
+
+  return _rtpReceiver.SetSSRCFilter(enable, allowedSSRC);
+}
+
+// Get last received remote timestamp
+WebRtc_UWord32 ModuleRtpRtcpImpl::RemoteTimestamp() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteTimestamp()");
+
+  return _rtpReceiver.TimeStamp();
+}
+
+// Get the current estimated remote timestamp
+WebRtc_Word32 ModuleRtpRtcpImpl::EstimatedRemoteTimeStamp(
+    WebRtc_UWord32& timestamp) const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "EstimatedRemoteTimeStamp()");
+
+  return _rtpReceiver.EstimatedRemoteTimeStamp(timestamp);
+}
+
+// Get incoming SSRC
+WebRtc_UWord32 ModuleRtpRtcpImpl::RemoteSSRC() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteSSRC()");
+
+  return _rtpReceiver.SSRC();
+}
+
+// Get remote CSRC
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteCSRCs(
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteCSRCs()");
+
+  return _rtpReceiver.CSRCs(arrOfCSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTXSendStatus(
+  const bool enable,
+  const bool setSSRC,
+  const WebRtc_UWord32 SSRC) {
+  _rtpSender.SetRTXStatus(enable, setSSRC, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RTXSendStatus(bool* enable,
+                                               WebRtc_UWord32* SSRC) const {
+  _rtpSender.RTXStatus(enable, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTXReceiveStatus(
+  const bool enable,
+  const WebRtc_UWord32 SSRC) {
+  _rtpReceiver.SetRTXStatus(enable, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RTXReceiveStatus(bool* enable,
+                                                  WebRtc_UWord32* SSRC) const {
+  _rtpReceiver.RTXStatus(enable, SSRC);
+  return 0;
+}
+
+// called by the network module when we receive a packet
+WebRtc_Word32 ModuleRtpRtcpImpl::IncomingPacket(
+    const WebRtc_UWord8* incomingPacket,
+    const WebRtc_UWord16 incomingPacketLength) {
+  WEBRTC_TRACE(kTraceStream,
+               kTraceRtpRtcp,
+               _id,
+               "IncomingPacket(packetLength:%u)",
+               incomingPacketLength);
+
+  // minimum RTP is 12 bytes
+  // minimum RTCP is 8 bytes (RTCP BYE)
+  if (incomingPacketLength < 8 || incomingPacket == NULL) {
+    WEBRTC_TRACE(kTraceDebug,
+                 kTraceRtpRtcp,
+                 _id,
+                 "IncomingPacket invalid buffer or length");
+    return -1;
+  }
+  // check RTP version
+  const WebRtc_UWord8  version  = incomingPacket[0] >> 6 ;
+  if (version != 2) {
+    WEBRTC_TRACE(kTraceDebug,
+                 kTraceRtpRtcp,
+                 _id,
+                 "IncomingPacket invalid RTP version");
+    return -1;
+  }
+
+  ModuleRTPUtility::RTPHeaderParser rtpParser(incomingPacket,
+                                              incomingPacketLength);
+
+  if (rtpParser.RTCP()) {
+    // Allow receive of non-compound RTCP packets.
+    RTCPUtility::RTCPParserV2 rtcpParser(incomingPacket,
+                                         incomingPacketLength,
+                                         true);
+
+    const bool validRTCPHeader = rtcpParser.IsValid();
+    if (!validRTCPHeader) {
+      WEBRTC_TRACE(kTraceDebug,
+                   kTraceRtpRtcp,
+                   _id,
+                   "IncomingPacket invalid RTCP packet");
+      return -1;
+    }
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    WebRtc_Word32 retVal = _rtcpReceiver.IncomingRTCPPacket(
+                             rtcpPacketInformation,
+                             &rtcpParser);
+    if (retVal == 0) {
+      _rtcpReceiver.TriggerCallbacksFromRTCPPacket(rtcpPacketInformation);
+    }
+    return retVal;
+
+  } else {
+    WebRtcRTPHeader rtpHeader;
+    memset(&rtpHeader, 0, sizeof(rtpHeader));
+
+    RtpHeaderExtensionMap map;
+    _rtpReceiver.GetHeaderExtensionMapCopy(&map);
+
+    const bool validRTPHeader = rtpParser.Parse(rtpHeader, &map);
+    if (!validRTPHeader) {
+      WEBRTC_TRACE(kTraceDebug,
+                   kTraceRtpRtcp,
+                   _id,
+                   "IncomingPacket invalid RTP header");
+      return -1;
+    }
+    return _rtpReceiver.IncomingRTPPacket(&rtpHeader,
+                                          incomingPacket,
+                                          incomingPacketLength);
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::IncomingAudioNTP(
+  const WebRtc_UWord32 audioReceivedNTPsecs,
+  const WebRtc_UWord32 audioReceivedNTPfrac,
+  const WebRtc_UWord32 audioRTCPArrivalTimeSecs,
+  const WebRtc_UWord32 audioRTCPArrivalTimeFrac) {
+  _receivedNTPsecsAudio = audioReceivedNTPsecs;
+  _receivedNTPfracAudio = audioReceivedNTPfrac;
+  _RTCPArrivalTimeSecsAudio = audioRTCPArrivalTimeSecs;
+  _RTCPArrivalTimeFracAudio = audioRTCPArrivalTimeFrac;
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingDataCallback(
+  RtpData* incomingDataCallback) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterIncomingDataCallback(incomingDataCallback:0x%x)",
+               incomingDataCallback);
+
+  return _rtpReceiver.RegisterIncomingDataCallback(incomingDataCallback);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingRTPCallback(
+  RtpFeedback* incomingMessagesCallback) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterIncomingRTPCallback(incomingMessagesCallback:0x%x)",
+               incomingMessagesCallback);
+
+  return _rtpReceiver.RegisterIncomingRTPCallback(incomingMessagesCallback);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingRTCPCallback(
+  RtcpFeedback* incomingMessagesCallback) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterIncomingRTCPCallback(incomingMessagesCallback:0x%x)",
+               incomingMessagesCallback);
+
+  return _rtcpReceiver.RegisterIncomingRTCPCallback(incomingMessagesCallback);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingVideoCallback(
+  RtpVideoFeedback* incomingMessagesCallback) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterIncomingVideoCallback(incomingMessagesCallback:0x%x)",
+               incomingMessagesCallback);
+
+  if (_rtcpReceiver.RegisterIncomingVideoCallback(incomingMessagesCallback)
+      == 0) {
+    return _rtpReceiver.RegisterIncomingVideoCallback(
+             incomingMessagesCallback);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterAudioCallback(
+  RtpAudioFeedback* messagesCallback) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterAudioCallback(messagesCallback:0x%x)",
+               messagesCallback);
+
+  if (_rtpSender.RegisterAudioCallback(messagesCallback) == 0) {
+    return _rtpReceiver.RegisterIncomingAudioCallback(messagesCallback);
+  }
+  return -1;
+}
+
+/**
+*   Sender
+*/
+
+WebRtc_Word32 ModuleRtpRtcpImpl::InitSender() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "InitSender()");
+
+  _collisionDetected = false;
+
+  // if we are already receiving inform our sender to avoid collision
+  if (_rtpSender.Init(_rtpReceiver.SSRC()) != 0) {
+    return -1;
+  }
+  WebRtc_Word32 retVal = _rtcpSender.Init();
+
+  // make sure that RTCP objects are aware of our SSRC
+  // (it could have changed due to collision)
+  WebRtc_UWord32 SSRC = _rtpSender.SSRC();
+  _rtcpReceiver.SetSSRC(SSRC);
+  _rtcpSender.SetSSRC(SSRC);
+  return retVal;
+}
+
+bool ModuleRtpRtcpImpl::RTPKeepalive() const {
+  WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "RTPKeepalive()");
+
+  return _rtpSender.RTPKeepalive();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RTPKeepaliveStatus(
+    bool* enable,
+    WebRtc_Word8* unknownPayloadType,
+    WebRtc_UWord16* deltaTransmitTimeMS) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTPKeepaliveStatus()");
+
+  return _rtpSender.RTPKeepaliveStatus(enable,
+                                       unknownPayloadType,
+                                       deltaTransmitTimeMS);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTPKeepaliveStatus(
+  bool enable,
+  WebRtc_Word8 unknownPayloadType,
+  WebRtc_UWord16 deltaTransmitTimeMS) {
+  if (enable) {
+    WEBRTC_TRACE(
+      kTraceModuleCall,
+      kTraceRtpRtcp,
+      _id,
+      "SetRTPKeepaliveStatus(true, plType:%d deltaTransmitTimeMS:%u)",
+      unknownPayloadType,
+      deltaTransmitTimeMS);
+
+    // check the transmit keepalive delta time [1,60]
+    if (deltaTransmitTimeMS < 1000 || deltaTransmitTimeMS > 60000) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceRtpRtcp,
+                   _id,
+                   "\tinvalid deltaTransmitTimeSeconds (%d)",
+                   deltaTransmitTimeMS);
+      return -1;
+    }
+
+    // check the payload time [0,127]
+    if (unknownPayloadType < 0) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceRtpRtcp,
+                   _id,
+                   "\tinvalid unknownPayloadType (%d)",
+                   unknownPayloadType);
+      return -1;
+    }
+    // enable RTP keepalive mechanism
+    return _rtpSender.EnableRTPKeepalive(unknownPayloadType,
+                                         deltaTransmitTimeMS);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetRTPKeepaliveStatus(disable)");
+    return _rtpSender.DisableRTPKeepalive();
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendPayload(
+  const CodecInst& voiceCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSendPayload(plName:%s plType:%d frequency:%u)",
+               voiceCodec.plname,
+               voiceCodec.pltype,
+               voiceCodec.plfreq);
+
+  return _rtpSender.RegisterPayload(
+           voiceCodec.plname,
+           voiceCodec.pltype,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendPayload(
+  const VideoCodec& videoCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSendPayload(plName:%s plType:%d)",
+               videoCodec.plName,
+               videoCodec.plType);
+
+  _sendVideoCodec = videoCodec;
+  _simulcast = (videoCodec.numberOfSimulcastStreams > 1) ? true : false;
+  return _rtpSender.RegisterPayload(videoCodec.plName,
+                                    videoCodec.plType,
+                                    90000,
+                                    0,
+                                    videoCodec.maxBitrate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterSendPayload(
+    const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterSendPayload(%d)", payloadType);
+
+  return _rtpSender.DeRegisterSendPayload(payloadType);
+}
+
+WebRtc_Word8 ModuleRtpRtcpImpl::SendPayloadType() const {
+  return _rtpSender.SendPayloadType();
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::StartTimestamp() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "StartTimestamp()");
+
+  return _rtpSender.StartTimestamp();
+}
+
+// configure start timestamp, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetStartTimestamp(
+    const WebRtc_UWord32 timestamp) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetStartTimestamp(%d)",
+               timestamp);
+
+  return _rtpSender.SetStartTimestamp(timestamp, true);
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::SequenceNumber() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SequenceNumber()");
+
+  return _rtpSender.SequenceNumber();
+}
+
+// Set SequenceNumber, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSequenceNumber(
+    const WebRtc_UWord16 seqNum) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSequenceNumber(%d)",
+               seqNum);
+
+  return _rtpSender.SetSequenceNumber(seqNum);
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::SSRC() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SSRC()");
+
+  return _rtpSender.SSRC();
+}
+
+// configure SSRC, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSSRC(const WebRtc_UWord32 ssrc) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetSSRC(%d)", ssrc);
+
+  if (_rtpSender.SetSSRC(ssrc) == 0) {
+    _rtcpReceiver.SetSSRC(ssrc);
+    _rtcpSender.SetSSRC(ssrc);
+    return 0;
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
+  _rtcpSender.SetCSRCStatus(include);
+  return _rtpSender.SetCSRCStatus(include);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::CSRCs(
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "CSRCs()");
+
+  return _rtpSender.CSRCs(arrOfCSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCSRCs(
+    const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+    const WebRtc_UWord8 arrLength) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetCSRCs(arrLength:%d)",
+               arrLength);
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetCSRCs(arrOfCSRC, arrLength);
+      }
+      it++;
+    }
+    return 0;
+
+  } else {
+    for (int i = 0; i < arrLength; i++) {
+      WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "\tidx:%d CSRC:%u", i,
+                   arrOfCSRC[i]);
+    }
+    _rtcpSender.SetCSRCs(arrOfCSRC, arrLength);
+    return _rtpSender.SetCSRCs(arrOfCSRC, arrLength);
+  }
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::PacketCountSent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "PacketCountSent()");
+
+  return _rtpSender.Packets();
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::ByteCountSent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ByteCountSent()");
+
+  return _rtpSender.Bytes();
+}
+
+int ModuleRtpRtcpImpl::CurrentSendFrequencyHz() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "CurrentSendFrequencyHz()");
+
+  return _rtpSender.SendPayloadFrequency();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
+  if (sending) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingStatus(sending)");
+  } else {
+    if (_rtpSender.RTPKeepalive()) {
+      WEBRTC_TRACE(
+          kTraceWarning,
+          kTraceRtpRtcp,
+          _id,
+          "Can't SetSendingStatus(stopped) when RTP Keepalive is active");
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingStatus(stopped)");
+  }
+  if (_rtcpSender.Sending() != sending) {
+    // sends RTCP BYE when going from true to false
+    if (_rtcpSender.SetSendingStatus(sending) != 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "Failed to send RTCP BYE");
+    }
+
+    _collisionDetected = false;
+
+    // generate a new timeStamp if true and not configured via API
+    // generate a new SSRC for the next "call" if false
+    _rtpSender.SetSendingStatus(sending);
+
+    // make sure that RTCP objects are aware of our SSRC (it could have changed
+    // due to collision)
+    WebRtc_UWord32 SSRC = _rtpSender.SSRC();
+    _rtcpReceiver.SetSSRC(SSRC);
+    _rtcpSender.SetSSRC(SSRC);
+    return 0;
+  }
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::Sending() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "Sending()");
+
+  return _rtcpSender.Sending();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) {
+  if (sending) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingMediaStatus(sending)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingMediaStatus(stopped)");
+  }
+  _rtpSender.SetSendingMediaStatus(sending);
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::SendingMedia() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "Sending()");
+
+  const bool haveChildModules(_childModules.empty() ? false : true);
+  if (!haveChildModules) {
+    return _rtpSender.SendingMedia();
+  }
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs);
+  std::list<ModuleRtpRtcpImpl*>::const_iterator it = _childModules.begin();
+  while (it != _childModules.end()) {
+    RTPSender& rtpSender = (*it)->_rtpSender;
+    if (rtpSender.SendingMedia()) {
+      return true;
+    }
+    it++;
+  }
+  return false;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendTransport(
+  Transport* outgoingTransport) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSendTransport(0x%x)", outgoingTransport);
+
+  if (_rtpSender.RegisterSendTransport(outgoingTransport) == 0) {
+    return _rtcpSender.RegisterSendTransport(outgoingTransport);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
+    FrameType frameType,
+    WebRtc_Word8 payloadType,
+    WebRtc_UWord32 timeStamp,
+    const WebRtc_UWord8* payloadData,
+    WebRtc_UWord32 payloadSize,
+    const RTPFragmentationHeader* fragmentation,
+    const RTPVideoHeader* rtpVideoHdr) {
+  WEBRTC_TRACE(
+    kTraceStream,
+    kTraceRtpRtcp,
+    _id,
+    "SendOutgoingData(frameType:%d payloadType:%d timeStamp:%u size:%u)",
+    frameType, payloadType, timeStamp, payloadSize);
+
+  const bool haveChildModules(_childModules.empty() ? false : true);
+  if (!haveChildModules) {
+    // Don't sent RTCP from default module
+    if (_rtcpSender.TimeToSendRTCPReport(kVideoFrameKey == frameType)) {
+      _rtcpSender.SendRTCP(kRtcpReport);
+    }
+    return _rtpSender.SendOutgoingData(frameType,
+                                       payloadType,
+                                       timeStamp,
+                                       payloadData,
+                                       payloadSize,
+                                       fragmentation,
+                                       NULL,
+                                       &(rtpVideoHdr->codecHeader));
+  }
+  WebRtc_Word32 retVal = -1;
+  if (_simulcast) {
+    if (rtpVideoHdr == NULL) {
+      return -1;
+    }
+    int idx = 0;
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    for (; idx < rtpVideoHdr->simulcastIdx; idx++) {
+      it++;
+      if (it == _childModules.end()) {
+        return -1;
+      }
+    }
+    RTPSender& rtpSender = (*it)->_rtpSender;
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SendOutgoingData(SimulcastIdx:%u size:%u, ssrc:0x%x)",
+                 idx, payloadSize, rtpSender.SSRC());
+    return rtpSender.SendOutgoingData(frameType,
+                                      payloadType,
+                                      timeStamp,
+                                      payloadData,
+                                      payloadSize,
+                                      fragmentation,
+                                      NULL,
+                                      &(rtpVideoHdr->codecHeader));
+  } else {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    // TODO(pwestin) remove codecInfo from SendOutgoingData
+    VideoCodecInformation* codecInfo = NULL;
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    if (it != _childModules.end()) {
+      RTPSender& rtpSender = (*it)->_rtpSender;
+      retVal = rtpSender.SendOutgoingData(frameType,
+                                          payloadType,
+                                          timeStamp,
+                                          payloadData,
+                                          payloadSize,
+                                          fragmentation,
+                                          NULL,
+                                          &(rtpVideoHdr->codecHeader));
+
+      it++;
+    }
+
+    // send to all remaining "child" modules
+    while (it != _childModules.end()) {
+      RTPSender& rtpSender = (*it)->_rtpSender;
+      retVal = rtpSender.SendOutgoingData(frameType,
+                                          payloadType,
+                                          timeStamp,
+                                          payloadData,
+                                          payloadSize,
+                                          fragmentation,
+                                          codecInfo,
+                                          &(rtpVideoHdr->codecHeader));
+
+      it++;
+    }
+  }
+  return retVal;
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::MaxPayloadLength() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "MaxPayloadLength()");
+
+  return _rtpSender.MaxPayloadLength();
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::MaxDataPayloadLength() const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "MaxDataPayloadLength()");
+
+  WebRtc_UWord16 minDataPayloadLength = IP_PACKET_SIZE - 28; // Assuming IP/UDP
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        WebRtc_UWord16 dataPayloadLength =
+          module->MaxDataPayloadLength();
+        if (dataPayloadLength < minDataPayloadLength) {
+          minDataPayloadLength = dataPayloadLength;
+        }
+      }
+      it++;
+    }
+  }
+
+  WebRtc_UWord16 dataPayloadLength = _rtpSender.MaxDataPayloadLength();
+  if (dataPayloadLength < minDataPayloadLength) {
+    minDataPayloadLength = dataPayloadLength;
+  }
+  return minDataPayloadLength;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTransportOverhead(
+  const bool TCP,
+  const bool IPV6,
+  const WebRtc_UWord8 authenticationOverhead) {
+  WEBRTC_TRACE(
+    kTraceModuleCall,
+    kTraceRtpRtcp,
+    _id,
+    "SetTransportOverhead(TCP:%d, IPV6:%d authenticationOverhead:%u)",
+    TCP, IPV6, authenticationOverhead);
+
+  WebRtc_UWord16 packetOverHead = 0;
+  if (IPV6) {
+    packetOverHead = 40;
+  } else {
+    packetOverHead = 20;
+  }
+  if (TCP) {
+    // TCP
+    packetOverHead += 20;
+  } else {
+    // UDP
+    packetOverHead += 8;
+  }
+  packetOverHead += authenticationOverhead;
+
+  if (packetOverHead == _packetOverHead) {
+    // ok same as before
+    return 0;
+  }
+  // calc diff
+  WebRtc_Word16 packetOverHeadDiff = packetOverHead - _packetOverHead;
+
+  // store new
+  _packetOverHead = packetOverHead;
+
+  _rtpReceiver.SetPacketOverHead(_packetOverHead);
+  WebRtc_UWord16 length = _rtpSender.MaxPayloadLength() - packetOverHeadDiff;
+  return _rtpSender.SetMaxPayloadLength(length, _packetOverHead);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetMaxTransferUnit(const WebRtc_UWord16 MTU) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetMaxTransferUnit(%u)",
+               MTU);
+
+  if (MTU > IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "Invalid in argument to SetMaxTransferUnit(%u)", MTU);
+    return -1;
+  }
+  return _rtpSender.SetMaxPayloadLength(MTU - _packetOverHead,
+                                        _packetOverHead);
+}
+
+/*
+*   RTCP
+*/
+RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTCP()");
+
+  if (_rtcpSender.Status() != kRtcpOff) {
+    return _rtcpReceiver.Status();
+  }
+  return kRtcpOff;
+}
+
+// configure RTCP status i.e on/off
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPStatus(const RTCPMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetRTCPStatus(%d)",
+               method);
+
+  if (_rtcpSender.SetRTCPStatus(method) == 0) {
+    return _rtcpReceiver.SetRTCPStatus(method);
+  }
+  return -1;
+}
+
+// only for internal test
+WebRtc_UWord32 ModuleRtpRtcpImpl::LastSendReport(WebRtc_UWord32& lastRTCPTime) {
+  return _rtcpSender.LastSendReport(lastRTCPTime);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCNAME(const char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetCNAME(%s)", cName);
+  return _rtcpSender.SetCNAME(cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::CNAME(char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "CNAME()");
+  return _rtcpSender.CNAME(cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::AddMixedCNAME(
+  const WebRtc_UWord32 SSRC,
+  const char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "AddMixedCNAME(SSRC:%u)", SSRC);
+
+  return _rtcpSender.AddMixedCNAME(SSRC, cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoveMixedCNAME(const WebRtc_UWord32 SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "RemoveMixedCNAME(SSRC:%u)", SSRC);
+  return _rtcpSender.RemoveMixedCNAME(SSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteCNAME(
+  const WebRtc_UWord32 remoteSSRC,
+  char cName[RTCP_CNAME_SIZE]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "RemoteCNAME(SSRC:%u)", remoteSSRC);
+
+  return _rtcpReceiver.CNAME(remoteSSRC, cName);
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::RemoteSequenceNumber() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteSequenceNumber()");
+
+  return _rtpReceiver.SequenceNumber();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteNTP(
+    WebRtc_UWord32* receivedNTPsecs,
+    WebRtc_UWord32* receivedNTPfrac,
+    WebRtc_UWord32* RTCPArrivalTimeSecs,
+    WebRtc_UWord32* RTCPArrivalTimeFrac) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteNTP()");
+
+  return _rtcpReceiver.NTP(receivedNTPsecs,
+                           receivedNTPfrac,
+                           RTCPArrivalTimeSecs,
+                           RTCPArrivalTimeFrac);
+}
+
+// Get RoundTripTime
+WebRtc_Word32 ModuleRtpRtcpImpl::RTT(const WebRtc_UWord32 remoteSSRC,
+                                     WebRtc_UWord16* RTT,
+                                     WebRtc_UWord16* avgRTT,
+                                     WebRtc_UWord16* minRTT,
+                                     WebRtc_UWord16* maxRTT) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTT()");
+
+  return _rtcpReceiver.RTT(remoteSSRC, RTT, avgRTT, minRTT, maxRTT);
+}
+
+// Reset RoundTripTime statistics
+WebRtc_Word32
+ModuleRtpRtcpImpl::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ResetRTT(SSRC:%u)",
+               remoteSSRC);
+
+  return _rtcpReceiver.ResetRTT(remoteSSRC);
+}
+
+// Reset RTP statistics
+WebRtc_Word32
+ModuleRtpRtcpImpl::ResetStatisticsRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ResetStatisticsRTP()");
+
+  return _rtpReceiver.ResetStatistics();
+}
+
+// Reset RTP data counters for the receiving side
+WebRtc_Word32 ModuleRtpRtcpImpl::ResetReceiveDataCountersRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "ResetReceiveDataCountersRTP()");
+
+  return _rtpReceiver.ResetDataCounters();
+}
+
+// Reset RTP data counters for the sending side
+WebRtc_Word32 ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "ResetSendDataCountersRTP()");
+
+  return _rtpSender.ResetDataCounters();
+}
+
+// Force a send of an RTCP packet
+// normal SR and RR are triggered via the process function
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCP(WebRtc_UWord32 rtcpPacketType) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SendRTCP(0x%x)",
+               rtcpPacketType);
+
+  return  _rtcpSender.SendRTCP(rtcpPacketType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
+    const WebRtc_UWord8 subType,
+    const WebRtc_UWord32 name,
+    const WebRtc_UWord8* data,
+    const WebRtc_UWord16 length) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetRTCPApplicationSpecificData(subType:%d name:0x%x)", subType,
+               name);
+
+  return  _rtcpSender.SetApplicationSpecificData(subType, name, data, length);
+}
+
+/*
+*   (XR) VOIP metric
+*/
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
+    const RTCPVoIPMetric* VoIPMetric) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetRTCPVoIPMetrics()");
+
+  return  _rtcpSender.SetRTCPVoIPMetrics(VoIPMetric);
+}
+
+// our localy created statistics of the received RTP stream
+WebRtc_Word32 ModuleRtpRtcpImpl::StatisticsRTP(
+    WebRtc_UWord8*  fraction_lost,
+    WebRtc_UWord32* cum_lost,
+    WebRtc_UWord32* ext_max,
+    WebRtc_UWord32* jitter,
+    WebRtc_UWord32* max_jitter) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "StatisticsRTP()");
+
+  WebRtc_UWord32 jitter_transmission_time_offset = 0;
+
+  WebRtc_Word32 retVal = _rtpReceiver.Statistics(
+      fraction_lost,
+      cum_lost,
+      ext_max,
+      jitter,
+      max_jitter,
+      &jitter_transmission_time_offset,
+      (_rtcpSender.Status() == kRtcpOff));
+  if (retVal == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "StatisticsRTP() no statisitics availble");
+  }
+  return retVal;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DataCountersRTP(
+    WebRtc_UWord32* bytesSent,
+    WebRtc_UWord32* packetsSent,
+    WebRtc_UWord32* bytesReceived,
+    WebRtc_UWord32* packetsReceived) const {
+  WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "DataCountersRTP()");
+
+  if (bytesSent) {
+    *bytesSent = _rtpSender.Bytes();
+  }
+  if (packetsSent) {
+    *packetsSent = _rtpSender.Packets();
+  }
+  return _rtpReceiver.DataCounters(bytesReceived, packetsReceived);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReportBlockStatistics(
+    WebRtc_UWord8* fraction_lost,
+    WebRtc_UWord32* cum_lost,
+    WebRtc_UWord32* ext_max,
+    WebRtc_UWord32* jitter,
+    WebRtc_UWord32* jitter_transmission_time_offset) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ReportBlockStatistics()");
+  WebRtc_Word32 missing = 0;
+  WebRtc_Word32 ret = _rtpReceiver.Statistics(fraction_lost,
+                                              cum_lost,
+                                              ext_max,
+                                              jitter,
+                                              NULL,
+                                              jitter_transmission_time_offset,
+                                              &missing,
+                                              true);
+
+#ifdef MATLAB
+  if (_plot1 == NULL) {
+    _plot1 = eng.NewPlot(new MatlabPlot());
+    _plot1->AddTimeLine(30, "b", "lost", _clock.GetTimeInMS());
+  }
+  _plot1->Append("lost", missing);
+  _plot1->Plot();
+#endif
+
+  return ret;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* senderInfo) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteRTCPStat()");
+
+  return _rtcpReceiver.SenderInfoReceived(senderInfo);
+}
+
+// received RTCP report
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteRTCPStat(
+  std::vector<RTCPReportBlock>* receiveBlocks) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteRTCPStat()");
+
+  return _rtcpReceiver.StatisticsReceived(receiveBlocks);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::AddRTCPReportBlock(
+    const WebRtc_UWord32 SSRC,
+    const RTCPReportBlock* reportBlock) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "AddRTCPReportBlock()");
+
+  return _rtcpSender.AddReportBlock(SSRC, reportBlock);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoveRTCPReportBlock(
+    const WebRtc_UWord32 SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoveRTCPReportBlock()");
+
+  return _rtcpSender.RemoveReportBlock(SSRC);
+}
+
+/*
+ *  (REMB) Receiver Estimated Max Bitrate
+ */
+bool ModuleRtpRtcpImpl::REMB() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "REMB()");
+
+  return _rtcpSender.REMB();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetREMBStatus(const bool enable) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetREMBStatus(enable)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetREMBStatus(disable)");
+  }
+  return _rtcpSender.SetREMBStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetREMBData(const WebRtc_UWord32 bitrate,
+                                             const WebRtc_UWord8 numberOfSSRC,
+                                             const WebRtc_UWord32* SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetREMBData(bitrate:%d,?,?)", bitrate);
+  return _rtcpSender.SetREMBData(bitrate, numberOfSSRC, SSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetMaximumBitrateEstimate(
+        const WebRtc_UWord32 bitrate) {
+  if(!_rtcpSender.REMB()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "SetMaximumBitrateEstimate - REMB not enabled.");
+    return -1;
+  }
+  OnReceivedEstimatedMaxBitrate(bitrate);
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::SetRemoteBitrateObserver(
+  RtpRemoteBitrateObserver* observer) {
+  return _rtcpSender.SetRemoteBitrateObserver(observer);
+}
+
+/*
+ *   (IJ) Extended jitter report.
+ */
+bool ModuleRtpRtcpImpl::IJ() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "IJ()");
+
+  return _rtcpSender.IJ();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetIJStatus(const bool enable) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetIJStatus(%s)", enable ? "true" : "false");
+
+  return _rtcpSender.SetIJStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendRtpHeaderExtension(
+  const RTPExtensionType type,
+  const WebRtc_UWord8 id) {
+  return _rtpSender.RegisterRtpHeaderExtension(type, id);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension(
+  const RTPExtensionType type) {
+  return _rtpSender.DeregisterRtpHeaderExtension(type);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceiveRtpHeaderExtension(
+  const RTPExtensionType type,
+  const WebRtc_UWord8 id) {
+  return _rtpReceiver.RegisterRtpHeaderExtension(type, id);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeregisterReceiveRtpHeaderExtension(
+  const RTPExtensionType type) {
+  return _rtpReceiver.DeregisterRtpHeaderExtension(type);
+}
+
+void ModuleRtpRtcpImpl::SetTransmissionSmoothingStatus(const bool enable) {
+  _rtpSender.SetTransmissionSmoothingStatus(enable);
+}
+
+bool ModuleRtpRtcpImpl::TransmissionSmoothingStatus() const {
+  return _rtpSender.TransmissionSmoothingStatus();
+}
+
+/*
+*   (TMMBR) Temporary Max Media Bit Rate
+*/
+bool ModuleRtpRtcpImpl::TMMBR() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TMMBR()");
+
+  return _rtcpSender.TMMBR();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetTMMBRStatus(enable)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetTMMBRStatus(disable)");
+  }
+  return _rtcpSender.SetTMMBRStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::TMMBRReceived(
+    const WebRtc_UWord32 size,
+    const WebRtc_UWord32 accNumCandidates,
+    TMMBRSet* candidateSet) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TMMBRReceived()");
+
+  return _rtcpReceiver.TMMBRReceived(size, accNumCandidates, candidateSet);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* boundingSet,
+                                          const WebRtc_UWord32 maxBitrateKbit) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetTMMBN()");
+
+  return _rtcpSender.SetTMMBN(boundingSet, maxBitrateKbit);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RequestTMMBR(const WebRtc_UWord32 estimatedBW,
+                                              const WebRtc_UWord32 packetOH) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RequestTMMBR()");
+
+  return _rtcpSender.RequestTMMBR(estimatedBW, packetOH);
+}
+
+/*
+*   (NACK) Negative acknowledgement
+*/
+
+// Is Negative acknowledgement requests on/off?
+NACKMethod ModuleRtpRtcpImpl::NACK() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "NACK()");
+
+  NACKMethod childMethod = kNackOff;
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to check all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        NACKMethod nackMethod = module->NACK();
+        if (nackMethod != kNackOff) {
+          childMethod = nackMethod;
+          break;
+        }
+      }
+      it++;
+    }
+  }
+
+  NACKMethod method = _nackMethod;
+  if (childMethod != kNackOff) {
+    method = childMethod;
+  }
+  return method;
+}
+
+// Turn negative acknowledgement requests on/off
+WebRtc_Word32 ModuleRtpRtcpImpl::SetNACKStatus(NACKMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetNACKStatus(%u)", method);
+
+  _nackMethod = method;
+  _rtpReceiver.SetNACKStatus(method);
+  return 0;
+}
+
+// Returns the currently configured retransmission mode.
+int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SelectiveRetransmissions()");
+  return _rtpSender.SelectiveRetransmissions();
+}
+
+// Enable or disable a retransmission mode, which decides which packets will
+// be retransmitted if NACKed.
+int ModuleRtpRtcpImpl::SetSelectiveRetransmissions(uint8_t settings) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSelectiveRetransmissions(%u)",
+               settings);
+  return _rtpSender.SetSelectiveRetransmissions(settings);
+}
+
+// Send a Negative acknowledgement packet
+WebRtc_Word32 ModuleRtpRtcpImpl::SendNACK(const WebRtc_UWord16* nackList,
+                                          const WebRtc_UWord16 size) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendNACK(size:%u)", size);
+
+  if (size > NACK_PACKETS_MAX_SIZE) {
+    RequestKeyFrame();
+    return -1;
+  }
+  WebRtc_UWord16 avgRTT = 0;
+  _rtcpReceiver.RTT(_rtpReceiver.SSRC(), NULL, &avgRTT, NULL, NULL);
+
+  WebRtc_UWord32 waitTime = 5 + ((avgRTT * 3) >> 1); // 5 + RTT*1.5
+  if (waitTime == 5) {
+    waitTime = 100; //During startup we don't have an RTT
+  }
+  const WebRtc_UWord32 now = _clock.GetTimeInMS();
+  const WebRtc_UWord32 timeLimit = now - waitTime;
+
+  if (_nackLastTimeSent < timeLimit) {
+    // send list
+  } else {
+    // only send if extended list
+    if (_nackLastSeqNumberSent == nackList[size - 1]) {
+      // last seq num is the same don't send list
+      return 0;
+    } else {
+      // send list
+    }
+  }
+  _nackLastTimeSent =  now;
+  _nackLastSeqNumberSent = nackList[size - 1];
+
+  switch (_nackMethod) {
+    case kNackRtcp:
+      return _rtcpSender.SendRTCP(kRtcpNack, size, nackList);
+    case kNackOff:
+      return -1;
+  };
+  return -1;
+}
+
+// Store the sent packets, needed to answer to a Negative acknowledgement
+// requests
+WebRtc_Word32 ModuleRtpRtcpImpl::SetStorePacketsStatus(
+  const bool enable,
+  const WebRtc_UWord16 numberToStore) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetStorePacketsStatus(enable, numberToStore:%d)",
+                 numberToStore);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetStorePacketsStatus(disable)");
+  }
+  return _rtpSender.SetStorePacketsStatus(enable, numberToStore);
+}
+
+/*
+*   Audio
+*/
+
+// Outband TelephoneEvent detection
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTelephoneEventStatus(
+  const bool enable,
+  const bool forwardToDecoder,
+  const bool detectEndOfTone) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetTelephoneEventStatus(enable:%d forwardToDecoder:%d"
+               " detectEndOfTone:%d)", enable, forwardToDecoder,
+               detectEndOfTone);
+
+  return _rtpReceiver.SetTelephoneEventStatus(enable, forwardToDecoder,
+                                              detectEndOfTone);
+}
+
+// Is outband TelephoneEvent turned on/off?
+bool ModuleRtpRtcpImpl::TelephoneEvent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TelephoneEvent()");
+
+  return _rtpReceiver.TelephoneEvent();
+}
+
+// Is forwarding of outband telephone events turned on/off?
+bool ModuleRtpRtcpImpl::TelephoneEventForwardToDecoder() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "TelephoneEventForwardToDecoder()");
+
+  return _rtpReceiver.TelephoneEventForwardToDecoder();
+}
+
+// Send a TelephoneEvent tone using RFC 2833 (4733)
+WebRtc_Word32 ModuleRtpRtcpImpl::SendTelephoneEventOutband(
+    const WebRtc_UWord8 key,
+    const WebRtc_UWord16 timeMs,
+    const WebRtc_UWord8 level) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SendTelephoneEventOutband(key:%u, timeMs:%u, level:%u)", key,
+               timeMs, level);
+
+  return _rtpSender.SendTelephoneEvent(key, timeMs, level);
+}
+
+bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
+  WebRtc_Word8& telephoneEvent) const {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendTelephoneEventActive()");
+
+  return _rtpSender.SendTelephoneEventActive(telephoneEvent);
+}
+
+// set audio packet size, used to determine when it's time to send a DTMF
+// packet in silence (CNG)
+WebRtc_Word32 ModuleRtpRtcpImpl::SetAudioPacketSize(
+  const WebRtc_UWord16 packetSizeSamples) {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetAudioPacketSize(%u)",
+               packetSizeSamples);
+
+  return _rtpSender.SetAudioPacketSize(packetSizeSamples);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTPAudioLevelIndicationStatus(
+  const bool enable,
+  const WebRtc_UWord8 ID) {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetRTPAudioLevelIndicationStatus(enable=%d, ID=%u)",
+               enable,
+               ID);
+
+  if (enable) {
+    _rtpReceiver.RegisterRtpHeaderExtension(kRtpExtensionAudioLevel, ID);
+  } else {
+    _rtpReceiver.DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
+  }
+  return _rtpSender.SetAudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::GetRTPAudioLevelIndicationStatus(
+  bool& enable,
+  WebRtc_UWord8& ID) const {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "GetRTPAudioLevelIndicationStatus()");
+  return _rtpSender.AudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetAudioLevel(const WebRtc_UWord8 level_dBov) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetAudioLevel(level_dBov:%u)",
+               level_dBov);
+  return _rtpSender.SetAudioLevel(level_dBov);
+}
+
+// Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendREDPayloadType(
+  const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSendREDPayloadType(%d)",
+               payloadType);
+
+  return _rtpSender.SetRED(payloadType);
+}
+
+// Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32 ModuleRtpRtcpImpl::SendREDPayloadType(
+    WebRtc_Word8& payloadType) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SendREDPayloadType()");
+
+  return _rtpSender.RED(payloadType);
+}
+
+
+/*
+*   Video
+*/
+RtpVideoCodecTypes ModuleRtpRtcpImpl::ReceivedVideoCodec() const {
+  return _rtpReceiver.VideoCodecType();
+}
+
+RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
+  return _rtpSender.VideoCodecType();
+}
+
+void ModuleRtpRtcpImpl::SetSendBitrate(const WebRtc_UWord32 startBitrate,
+                                       const WebRtc_UWord16 minBitrateKbit,
+                                       const WebRtc_UWord16 maxBitrateKbit) {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSendBitrate start:%ubit/s min:%uKbit/s max:%uKbit/s",
+               startBitrate, minBitrateKbit, maxBitrateKbit);
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetSendBitrate(startBitrate,
+                               minBitrateKbit,
+                               maxBitrateKbit);
+      }
+      it++;
+    }
+  }
+  // TODO(henrike): this function also returns a value. It never fails so
+  // make it return void.
+  _rtpSender.SetTargetSendBitrate(startBitrate);
+
+  _bandwidthManagement.SetSendBitrate(startBitrate, minBitrateKbit,
+                                      maxBitrateKbit);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetKeyFrameRequestMethod(
+  const KeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetKeyFrameRequestMethod(method:%u)",
+               method);
+
+  _keyFrameReqMethod = method;
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RequestKeyFrame() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RequestKeyFrame");
+
+  switch (_keyFrameReqMethod) {
+    case kKeyFrameReqFirRtp:
+      return _rtpSender.SendRTPIntraRequest();
+    case kKeyFrameReqPliRtcp:
+      return _rtcpSender.SendRTCP(kRtcpPli);
+    case kKeyFrameReqFirRtcp:
+      return _rtcpSender.SendRTCP(kRtcpFir);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
+  const WebRtc_UWord8 pictureID) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendRTCPSliceLossIndication (pictureID:%d)",
+               pictureID);
+  return _rtcpSender.SendRTCP(kRtcpSli, 0, 0, false, pictureID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCameraDelay(const WebRtc_Word32 delayMS) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetCameraDelay(%d)",
+               delayMS);
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetCameraDelay(delayMS);
+      }
+      it++;
+    }
+    return 0;
+  }
+  return _rtcpSender.SetCameraDelay(delayMS);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetGenericFECStatus(
+  const bool enable,
+  const WebRtc_UWord8 payloadTypeRED,
+  const WebRtc_UWord8 payloadTypeFEC) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetGenericFECStatus(enable, %u)",
+                 payloadTypeRED);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetGenericFECStatus(disable)");
+  }
+  return _rtpSender.SetGenericFECStatus(enable,
+                                        payloadTypeRED,
+                                        payloadTypeFEC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::GenericFECStatus(
+  bool& enable,
+  WebRtc_UWord8& payloadTypeRED,
+  WebRtc_UWord8& payloadTypeFEC) {
+
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "GenericFECStatus()");
+
+  bool childEnabled = false;
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to check all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module)  {
+        bool enabled = false;
+        WebRtc_UWord8 dummyPTypeRED = 0;
+        WebRtc_UWord8 dummyPTypeFEC = 0;
+        if (module->GenericFECStatus(enabled,
+                                     dummyPTypeRED,
+                                     dummyPTypeFEC) == 0 && enabled) {
+          childEnabled = true;
+          break;
+        }
+      }
+      it++;
+    }
+  }
+  WebRtc_Word32 retVal = _rtpSender.GenericFECStatus(enable,
+                                                     payloadTypeRED,
+                                                     payloadTypeFEC);
+  if (childEnabled) {
+    // returns true if enabled for any child module
+    enable = childEnabled;
+  }
+  return retVal;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetFECCodeRate(
+  const WebRtc_UWord8 keyFrameCodeRate,
+  const WebRtc_UWord8 deltaFrameCodeRate) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetFECCodeRate(%u, %u)",
+               keyFrameCodeRate,
+               deltaFrameCodeRate);
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance)  {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
+      }
+      it++;
+    }
+    return 0;
+  }
+  return _rtpSender.SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetFECUepProtection(
+  const bool keyUseUepProtection,
+  const bool deltaUseUepProtection) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp, _id,
+               "SetFECUepProtection(%d, %d)",
+               keyUseUepProtection,
+               deltaUseUepProtection);
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance)  {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetFECUepProtection(keyUseUepProtection,
+                                    deltaUseUepProtection);
+      }
+      it++;
+    }
+    return 0;
+  }
+  return _rtpSender.SetFECUepProtection(keyUseUepProtection,
+                                        deltaUseUepProtection);
+}
+
+void ModuleRtpRtcpImpl::SetRemoteSSRC(const WebRtc_UWord32 SSRC) {
+  // inform about the incoming SSRC
+  _rtcpSender.SetRemoteSSRC(SSRC);
+  _rtcpReceiver.SetRemoteSSRC(SSRC);
+
+  // check for a SSRC collision
+  if (_rtpSender.SSRC() == SSRC && !_collisionDetected) {
+    // if we detect a collision change the SSRC but only once
+    _collisionDetected = true;
+    WebRtc_UWord32 newSSRC = _rtpSender.GenerateNewSSRC();
+    if (newSSRC == 0) {
+      // configured via API ignore
+      return;
+    }
+    if (kRtcpOff != _rtcpSender.Status()) {
+      // send RTCP bye on the current SSRC
+      _rtcpSender.SendRTCP(kRtcpBye);
+    }
+    // change local SSRC
+
+    // inform all objects about the new SSRC
+    _rtcpSender.SetSSRC(newSSRC);
+    _rtcpReceiver.SetSSRC(newSSRC);
+  }
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::BitrateReceivedNow() const {
+  return _rtpReceiver.BitrateNow();
+}
+
+void ModuleRtpRtcpImpl::BitrateSent(WebRtc_UWord32* totalRate,
+                                    WebRtc_UWord32* videoRate,
+                                    WebRtc_UWord32* fecRate,
+                                    WebRtc_UWord32* nackRate) const {
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // for default we need to update the send bitrate
+    CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
+
+    if (totalRate != NULL)
+      *totalRate = 0;
+    if (videoRate != NULL)
+      *videoRate = 0;
+    if (fecRate != NULL)
+      *fecRate = 0;
+    if (nackRate != NULL)
+      *nackRate = 0;
+
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        WebRtc_UWord32 childTotalRate = 0;
+        WebRtc_UWord32 childVideoRate = 0;
+        WebRtc_UWord32 childFecRate = 0;
+        WebRtc_UWord32 childNackRate = 0;
+        module->BitrateSent(&childTotalRate,
+                            &childVideoRate,
+                            &childFecRate,
+                            &childNackRate);
+        if (totalRate != NULL && childTotalRate > *totalRate)
+          *totalRate = childTotalRate;
+        if (videoRate != NULL && childVideoRate > *videoRate)
+          *videoRate = childVideoRate;
+        if (fecRate != NULL && childFecRate > *fecRate)
+          *fecRate = childFecRate;
+        if (nackRate != NULL && childNackRate > *nackRate)
+          *nackRate = childNackRate;
+      }
+      it++;
+    }
+    return;
+  }
+  if (totalRate != NULL)
+    *totalRate = _rtpSender.BitrateLast();
+  if (videoRate != NULL)
+    *videoRate = _rtpSender.VideoBitrateSent();
+  if (fecRate != NULL)
+    *fecRate = _rtpSender.FecOverheadRate();
+  if (nackRate != NULL)
+    *nackRate = _rtpSender.NackOverheadRate();
+}
+
+int ModuleRtpRtcpImpl::EstimatedSendBandwidth(
+    WebRtc_UWord32* available_bandwidth) const {
+  return _bandwidthManagement.AvailableBandwidth(available_bandwidth);
+}
+
+int ModuleRtpRtcpImpl::EstimatedReceiveBandwidth(
+    WebRtc_UWord32* available_bandwidth) const {
+  if (!_rtcpSender.ValidBitrateEstimate())
+    return -1;
+  *available_bandwidth = _rtcpSender.LatestBandwidthEstimate();
+  return 0;
+}
+
+// for lip sync
+void ModuleRtpRtcpImpl::OnReceivedNTP() {
+  // don't do anything if we are the audio module
+  // video module is responsible for sync
+  if (!_audio) {
+    WebRtc_Word32 diff = 0;
+    WebRtc_UWord32 receivedNTPsecs = 0;
+    WebRtc_UWord32 receivedNTPfrac = 0;
+    WebRtc_UWord32 RTCPArrivalTimeSecs = 0;
+    WebRtc_UWord32 RTCPArrivalTimeFrac = 0;
+
+    if (0 == _rtcpReceiver.NTP(&receivedNTPsecs,
+                               &receivedNTPfrac,
+                               &RTCPArrivalTimeSecs,
+                               &RTCPArrivalTimeFrac)) {
+      CriticalSectionScoped lock(_criticalSectionModulePtrs);
+
+      if (_audioModule) {
+        if (0 != _audioModule->RemoteNTP(&_receivedNTPsecsAudio,
+                                         &_receivedNTPfracAudio,
+                                         &_RTCPArrivalTimeSecsAudio,
+                                         &_RTCPArrivalTimeFracAudio)) {
+          // failed ot get audio NTP
+          return;
+        }
+      }
+      if (_receivedNTPfracAudio != 0) {
+        // ReceivedNTPxxx is NTP at sender side when sent.
+        // RTCPArrivalTimexxx is NTP at receiver side when received.
+        // can't use ConvertNTPTimeToMS since calculation can be
+        //  negative
+
+        WebRtc_Word32 NTPdiff = (WebRtc_Word32)
+                                ((_receivedNTPsecsAudio - receivedNTPsecs) *
+                                 1000); // ms
+        NTPdiff += (WebRtc_Word32)
+                   (_receivedNTPfracAudio / FracMS - receivedNTPfrac / FracMS);
+
+        WebRtc_Word32 RTCPdiff =
+            static_cast<WebRtc_Word32> ((_RTCPArrivalTimeSecsAudio -
+                                         RTCPArrivalTimeSecs) * 1000);
+        RTCPdiff += (WebRtc_Word32)
+                    (_RTCPArrivalTimeFracAudio / FracMS -
+                     RTCPArrivalTimeFrac / FracMS);
+
+        diff = NTPdiff - RTCPdiff;
+        // if diff is + video is behind
+        if (diff < -1000 || diff > 1000) {
+          // unresonable ignore value.
+          diff = 0;
+          return;
+        }
+      }
+    }
+    // export via callback
+    // after release of critsect
+    _rtcpReceiver.UpdateLipSync(diff);
+  }
+}
+
+// our local BW estimate is updated
+void ModuleRtpRtcpImpl::OnBandwidthEstimateUpdate(
+  WebRtc_UWord16 bandWidthKbit) {
+
+  WebRtc_UWord32 maxBitrateKbit = _rtpReceiver.MaxConfiguredBitrate() / 1000;
+  if (maxBitrateKbit) {
+    // the app has set a max bitrate
+    if (maxBitrateKbit < bandWidthKbit) {
+      // cap TMMBR at max configured bitrate
+      bandWidthKbit = (WebRtc_UWord16)maxBitrateKbit;
+    }
+  }
+  if (_rtcpSender.TMMBR()) {
+    /* Maximum total media bit rate:
+        The upper limit on total media bit rate for a given media
+        stream at a particular receiver and for its selected protocol
+        layer.  Note that this value cannot be measured on the
+        received media stream.  Instead, it needs to be calculated or
+        determined through other means, such as quality of service
+        (QoS) negotiations or local resource limitations.  Also note
+        that this value is an average (on a timescale that is
+        reasonable for the application) and that it may be different
+        from the instantaneous bit rate seen by packets in the media
+        stream.
+    */
+    /* Overhead:
+        All protocol header information required to convey a packet
+        with media data from sender to receiver, from the application
+        layer down to a pre-defined protocol level (for example, down
+        to, and including, the IP header).  Overhead may include, for
+        example, IP, UDP, and RTP headers, any layer 2 headers, any
+        Contributing Sources (CSRCs), RTP padding, and RTP header
+        extensions.  Overhead excludes any RTP payload headers and the
+        payload itself.
+    */
+    _rtpReceiver.PacketOHReceived();
+
+    // call RequestTMMBR when our localy created estimate changes
+    _rtcpSender.RequestTMMBR(bandWidthKbit, 0);
+  }
+}
+
+RateControlRegion ModuleRtpRtcpImpl::OnOverUseStateUpdate(
+  const RateControlInput& rateControlInput) {
+
+  bool firstOverUse = false;
+  RateControlRegion region = _rtcpSender.UpdateOverUseState(rateControlInput,
+                                                            firstOverUse);
+  if (firstOverUse) {
+    // Send TMMBR or REMB immediately.
+    WebRtc_UWord16 RTT = 0;
+    _rtcpReceiver.RTT(_rtpReceiver.SSRC(), &RTT, NULL, NULL, NULL);
+    // About to send TMMBR, first run remote rate control
+    // to get a target bit rate.
+    unsigned int target_bitrate =
+      _rtcpSender.CalculateNewTargetBitrate(RTT);
+    if (REMB()) {
+      _rtcpSender.UpdateRemoteBitrateEstimate(target_bitrate);
+    } else if (TMMBR()) {
+      _rtcpSender.SendRTCP(kRtcpTmmbr);
+    }
+  }
+  return region;
+}
+
+// bad state of RTP receiver request a keyframe
+void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
+  RequestKeyFrame();
+}
+
+void ModuleRtpRtcpImpl::OnReceivedIntraFrameRequest(const RtpRtcp* caller) {
+  if (_defaultModule) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    if (_defaultModule) {
+      // if we use a default module pass this info to the default module
+      _defaultModule->OnReceivedIntraFrameRequest(caller);
+      return;
+    }
+  }
+
+  WebRtc_UWord8 streamIdx = 0;
+  FrameType frameType = kVideoFrameKey;
+  if (_simulcast) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    // loop though child modules and count idx
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      ModuleRtpRtcpImpl* childModule = *it;
+      if (childModule == caller) {
+        break;
+      }
+      streamIdx++;
+      it++;
+    }
+  }
+  _rtcpReceiver.OnReceivedIntraFrameRequest(frameType, streamIdx);
+}
+
+void ModuleRtpRtcpImpl::OnReceivedEstimatedMaxBitrate(
+  const WebRtc_UWord32 maxBitrate) {
+  // TODO(mflodman) Split this function in two parts. One for the child module
+  // and one for the default module.
+
+  // We received a REMB.
+  if (_defaultModule) {
+    // Send this update to the REMB instance to take actions.
+    _rtcpSender.ReceivedRemb(maxBitrate);
+    return;
+  }
+
+  WebRtc_UWord32 newBitrate = 0;
+  WebRtc_UWord8 fractionLost = 0;
+  WebRtc_UWord16 roundTripTime = 0;
+  WebRtc_UWord16 bwEstimateKbit = WebRtc_UWord16(maxBitrate / 1000);
+  if (_bandwidthManagement.UpdateBandwidthEstimate(bwEstimateKbit,
+                                                   &newBitrate,
+                                                   &fractionLost,
+                                                   &roundTripTime) == 0) {
+    _rtpReceiver.UpdateBandwidthManagement(newBitrate,
+                                           fractionLost,
+                                           roundTripTime);
+
+    // We've received a new bandwidth estimate lower than the current send
+    // bitrate. For simulcast we need to update the sending bitrate for all
+    // streams.
+    if (_simulcast) {
+      CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
+      WebRtc_UWord8 idx = 0;
+      for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+           it != _childModules.end(); ++it) {
+        // sanity
+        if (idx >= (_sendVideoCodec.numberOfSimulcastStreams - 1)) {
+          return;
+        }
+        ModuleRtpRtcpImpl* module = *it;
+        if (newBitrate >= _sendVideoCodec.simulcastStream[idx].maxBitrate) {
+          module->_bandwidthManagement.SetSendBitrate(
+            _sendVideoCodec.simulcastStream[idx].maxBitrate, 0, 0);
+          module->_rtpSender.SetTargetSendBitrate(
+            _sendVideoCodec.simulcastStream[idx].maxBitrate);
+
+          newBitrate -= _sendVideoCodec.simulcastStream[idx].maxBitrate;
+        } else {
+          module->_bandwidthManagement.SetSendBitrate(newBitrate, 0, 0);
+          module->_rtpSender.SetTargetSendBitrate(newBitrate);
+          newBitrate -= newBitrate;
+        }
+        idx++;
+      }
+    }
+  }
+  // For non-simulcast, update all child modules with the new bandwidth estimate
+  // regardless of the new estimate.
+  if (!_simulcast) {
+    // Update all child modules with the new max bitrate before exiting.
+    CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
+    for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+         it != _childModules.end(); ++it) {
+      // Update all child modules with the maximum bitrate estimate.
+      ModuleRtpRtcpImpl* module = *it;
+      WebRtc_UWord32 ignoreBitrate = 0;
+      WebRtc_UWord8 ignoreFractionLost = 0;
+      WebRtc_UWord16 ignoreRoundTripTime = 0;
+      module->_bandwidthManagement.UpdateBandwidthEstimate(
+        bwEstimateKbit,
+        &ignoreBitrate,
+        &ignoreFractionLost,
+        &ignoreRoundTripTime);
+      // We don't need to take care of a possible lowered bitrate, that is
+      // handled earlier in this function for the default module.
+    }
+  }
+}
+
+// received a request for a new SLI
+void ModuleRtpRtcpImpl::OnReceivedSliceLossIndication(
+  const WebRtc_UWord8 pictureID) {
+
+  if (_defaultModule) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    if (_defaultModule) {
+      // if we use a default module pass this info to the default module
+      _defaultModule->OnReceivedSliceLossIndication(pictureID);
+      return;
+    }
+  }
+  _rtcpReceiver.OnReceivedSliceLossIndication(pictureID);
+}
+
+// received a new refereence frame
+void ModuleRtpRtcpImpl::OnReceivedReferencePictureSelectionIndication(
+  const WebRtc_UWord64 pictureID) {
+
+  if (_defaultModule) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    if (_defaultModule) {
+      // if we use a default module pass this info to the default module
+      _defaultModule->OnReceivedReferencePictureSelectionIndication(
+        pictureID);
+      return;
+    }
+  }
+  _rtcpReceiver.OnReceivedReferencePictureSelectionIndication(pictureID);
+}
+
+void ModuleRtpRtcpImpl::OnReceivedBandwidthEstimateUpdate(
+  const WebRtc_UWord16 bwEstimateKbit) {
+
+  // We received a TMMBR
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    ProcessDefaultModuleBandwidth();
+    return;
+  }
+  if (_audio) {
+    _rtcpReceiver.UpdateBandwidthEstimate(bwEstimateKbit);
+  } else {
+    WebRtc_UWord32 newBitrate = 0;
+    WebRtc_UWord8 fractionLost = 0;
+    WebRtc_UWord16 roundTripTime = 0;
+    if (_bandwidthManagement.UpdateBandwidthEstimate(bwEstimateKbit,
+                                                     &newBitrate,
+                                                     &fractionLost,
+                                                     &roundTripTime) == 0) {
+      if (!_defaultModule) {
+        // No default module check if we should trigger OnNetworkChanged
+        // via video callback
+        _rtpReceiver.UpdateBandwidthManagement(newBitrate,
+                                               fractionLost,
+                                               roundTripTime);
+      }
+      if (newBitrate > 0) {
+        // update bitrate
+        _rtpSender.SetTargetSendBitrate(newBitrate);
+      }
+    }
+  }
+  if (_defaultModule) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    if (_defaultModule) {
+      // if we use a default module pass this info to the default module
+      _defaultModule->OnReceivedBandwidthEstimateUpdate(bwEstimateKbit);
+      return;
+    }
+  }
+}
+
+// bw estimation
+// We received a RTCP report block
+void ModuleRtpRtcpImpl::OnPacketLossStatisticsUpdate(
+  const WebRtc_UWord8 fractionLost,
+  const WebRtc_UWord16 roundTripTime,
+  const WebRtc_UWord32 lastReceivedExtendedHighSeqNum) {
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (!defaultInstance) {
+    WebRtc_UWord32 newBitrate = 0;
+    WebRtc_UWord8 loss = fractionLost;  // local copy since it can change
+    WebRtc_UWord32 videoRate = 0;
+    WebRtc_UWord32 fecRate = 0;
+    WebRtc_UWord32 nackRate = 0;
+    BitrateSent(NULL, &videoRate, &fecRate, &nackRate);
+    if (_bandwidthManagement.UpdatePacketLoss(
+          lastReceivedExtendedHighSeqNum,
+          videoRate + fecRate + nackRate,
+          roundTripTime,
+          &loss,
+          &newBitrate,
+          _clock.GetTimeInMS()) != 0) {
+      // ignore this update
+      return;
+    }
+    // We need to do update RTP sender before calling default module in
+    // case we'll strip any layers.
+    if (!_simulcast) {
+      // the default module will inform all child modules about
+      //  their bitrate
+      _rtpSender.SetTargetSendBitrate(newBitrate);
+    }
+    if (_defaultModule) {
+      // if we have a default module update it
+      CriticalSectionScoped lock(_criticalSectionModulePtrs);
+      if (_defaultModule) {  // we need to check again inside the critsect
+        // if we use a default module pass this info to the
+        // default module
+        _defaultModule->OnPacketLossStatisticsUpdate(
+          loss,  // send in the filtered loss
+          roundTripTime,
+          lastReceivedExtendedHighSeqNum);
+      }
+      return;
+    }
+    _rtpReceiver.UpdateBandwidthManagement(newBitrate,
+                                           fractionLost,
+                                           roundTripTime);
+  } else {
+    if (!_simulcast) {
+      ProcessDefaultModuleBandwidth();
+    } else {
+      // default and simulcast
+      WebRtc_UWord32 newBitrate = 0;
+      WebRtc_UWord8 loss = fractionLost;  // local copy
+      WebRtc_UWord32 videoRate = 0;
+      WebRtc_UWord32 fecRate = 0;
+      WebRtc_UWord32 nackRate = 0;
+      BitrateSent(NULL, &videoRate, &fecRate, &nackRate);
+      if (_bandwidthManagement.UpdatePacketLoss(0,  // we can't use this
+                                                videoRate + fecRate + nackRate,
+                                                roundTripTime,
+                                                &loss,
+                                                &newBitrate,
+                                                _clock.GetTimeInMS()) != 0) {
+        // ignore this update
+        return;
+      }
+      _rtpSender.SetTargetSendBitrate(newBitrate);
+      _rtpReceiver.UpdateBandwidthManagement(newBitrate,
+                                             loss,
+                                             roundTripTime);
+      // sanity
+      if (_sendVideoCodec.codecType == kVideoCodecUnknown) {
+        return;
+      }
+      CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
+      std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+      WebRtc_UWord8 idx = 0;
+      while (it != _childModules.end()) {
+        // sanity
+        if (idx >= (_sendVideoCodec.numberOfSimulcastStreams - 1)) {
+          return;
+        }
+        ModuleRtpRtcpImpl* module = *it;
+        // update all child modules
+        if (newBitrate >=
+            _sendVideoCodec.simulcastStream[idx].maxBitrate) {
+          module->_bandwidthManagement.SetSendBitrate(
+            _sendVideoCodec.simulcastStream[idx].maxBitrate, 0, 0);
+          module->_rtpSender.SetTargetSendBitrate(
+            _sendVideoCodec.simulcastStream[idx].maxBitrate);
+
+          newBitrate -=
+            _sendVideoCodec.simulcastStream[idx].maxBitrate;
+        } else {
+          module->_bandwidthManagement.SetSendBitrate(newBitrate,
+                                                      0,
+                                                      0);
+          module->_rtpSender.SetTargetSendBitrate(newBitrate);
+          newBitrate -= newBitrate;
+        }
+        idx++;
+      }
+    }
+  }
+}
+
+void ModuleRtpRtcpImpl::ProcessDefaultModuleBandwidth() {
+
+  WebRtc_UWord32 minBitrateBps = 0xffffffff;
+  WebRtc_UWord32 maxBitrateBps = 0;
+  WebRtc_UWord32 count = 0;
+  WebRtc_UWord32 fractionLostAcc = 0;
+  WebRtc_UWord16 maxRoundTripTime = 0;
+  {
+    // get min and max for the sending channels
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+         it != _childModules.end(); ++ it) {
+      // Get child RTP sender and ask for bitrate estimate.
+      ModuleRtpRtcpImpl* childModule = *it;
+      if (childModule->Sending()) {
+        RTPSender& childRtpSender = (*it)->_rtpSender;
+        const WebRtc_UWord32 childEstimateBps =
+          1000 * childRtpSender.TargetSendBitrateKbit();
+        if (childEstimateBps < minBitrateBps) {
+          minBitrateBps = childEstimateBps;
+        }
+        if (childEstimateBps > maxBitrateBps) {
+          maxBitrateBps = childEstimateBps;
+        }
+        RTCPReceiver& childRtcpReceiver = (*it)->_rtcpReceiver;
+
+        std::vector<RTCPReportBlock> rtcp_blocks;
+        childRtcpReceiver.StatisticsReceived(&rtcp_blocks);
+        for (std::vector<RTCPReportBlock>::iterator rit = rtcp_blocks.begin();
+             rit != rtcp_blocks.end(); ++rit) {
+          count++;
+          fractionLostAcc += rit->fractionLost;
+          WebRtc_UWord16 RTT = 0;
+          childRtcpReceiver.RTT(rit->remoteSSRC, &RTT, NULL, NULL, NULL);
+          maxRoundTripTime = (RTT > maxRoundTripTime) ? RTT : maxRoundTripTime;
+        }
+      }
+    }
+  }  // end critsect
+
+  if (count == 0) {
+    // No sending modules and no bitrate estimate.
+    return;
+  }
+
+  // Update RTT to all receive only child modules, they won't have their own RTT
+  // estimate. Assume the receive only channels are on similar links as the
+  // sending channel and have approximately the same RTT.
+  {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs);
+    for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+        it != _childModules.end(); ++it) {
+      if (!(*it)->Sending()) {
+        (*it)->_rtcpReceiver.SetRTT(maxRoundTripTime);
+      }
+    }
+  }
+
+  _bandwidthManagement.SetSendBitrate(minBitrateBps, 0, 0);
+
+  // Update default module bitrate. Don't care about min max.
+  WebRtc_UWord8 fractionLostAvg = WebRtc_UWord8(fractionLostAcc / count);
+  _rtpReceiver.UpdateBandwidthManagement(minBitrateBps,
+                                         fractionLostAvg ,
+                                         maxRoundTripTime);
+}
+
+void ModuleRtpRtcpImpl::OnRequestSendReport() {
+  _rtcpSender.SendRTCP(kRtcpSr);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCPReferencePictureSelection(
+  const WebRtc_UWord64 pictureID) {
+  return _rtcpSender.SendRTCP(kRtcpRpsi, 0, 0, false, pictureID);
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::SendTimeOfSendReport(
+  const WebRtc_UWord32 sendReport) {
+  return _rtcpSender.SendTimeOfSendReport(sendReport);
+}
+
+void ModuleRtpRtcpImpl::OnReceivedNACK(
+  const WebRtc_UWord16 nackSequenceNumbersLength,
+  const WebRtc_UWord16* nackSequenceNumbers) {
+  if (!_rtpSender.StorePackets() ||
+      nackSequenceNumbers == NULL ||
+      nackSequenceNumbersLength == 0) {
+    return;
+  }
+  WebRtc_UWord16 avgRTT = 0;
+  _rtcpReceiver.RTT(_rtpReceiver.SSRC(), NULL, &avgRTT, NULL, NULL);
+  _rtpSender.OnReceivedNACK(nackSequenceNumbersLength,
+                            nackSequenceNumbers,
+                            avgRTT);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::LastReceivedNTP(
+  WebRtc_UWord32& RTCPArrivalTimeSecs,  // when we received the last report
+  WebRtc_UWord32& RTCPArrivalTimeFrac,
+  WebRtc_UWord32& remoteSR) {
+  // remote SR: NTP inside the last received (mid 16 bits from sec and frac)
+  WebRtc_UWord32 NTPsecs = 0;
+  WebRtc_UWord32 NTPfrac = 0;
+
+  if (-1 == _rtcpReceiver.NTP(&NTPsecs,
+                              &NTPfrac,
+                              &RTCPArrivalTimeSecs,
+                              &RTCPArrivalTimeFrac)) {
+    return -1;
+  }
+  remoteSR = ((NTPsecs & 0x0000ffff) << 16) + ((NTPfrac & 0xffff0000) >> 16);
+  return 0;
+}
+
+void ModuleRtpRtcpImpl::OnReceivedTMMBR() {
+  // we received a TMMBR in a RTCP packet
+  // answer with a TMMBN
+  UpdateTMMBR();
+}
+
+bool ModuleRtpRtcpImpl::UpdateRTCPReceiveInformationTimers() {
+  // if this returns true this channel has timed out
+  // periodically check if this is true and if so call UpdateTMMBR
+  return _rtcpReceiver.UpdateRTCPReceiveInformationTimers();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::UpdateTMMBR() {
+  WebRtc_Word32 numBoundingSet = 0;
+  WebRtc_Word32 newBitrates = 0;
+  WebRtc_UWord32 minBitrateKbit = 0;
+  WebRtc_UWord32 maxBitrateKbit = 0;
+  WebRtc_UWord32 accNumCandidates = 0;
+
+  if (!_childModules.empty()) {
+    // Default module should not handle this
+    return -1;
+  }
+
+  WebRtc_Word32 size = _rtcpReceiver.TMMBRReceived(0, 0, NULL);
+  if (size > 0) {
+    TMMBRSet* candidateSet = VerifyAndAllocateCandidateSet(size);
+    // get candidate set from receiver
+    accNumCandidates = _rtcpReceiver.TMMBRReceived(size,
+                                                   accNumCandidates,
+                                                   candidateSet);
+  } else {
+    // candidate set empty
+    VerifyAndAllocateCandidateSet(0);  // resets candidate set
+  }
+  // Find bounding set
+  TMMBRSet* boundingSet = NULL;
+  numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+  if (numBoundingSet == -1) {
+    WEBRTC_TRACE(kTraceWarning,
+                 kTraceRtpRtcp,
+                 _id,
+                 "Failed to find TMMBR bounding set.");
+    return -1;
+  }
+  // Set bounding set
+  // Inform remote clients about the new bandwidth
+  // inform the remote client
+  _rtcpSender.SetTMMBN(boundingSet,
+                       _rtpSender.MaxConfiguredBitrateVideo() / 1000);
+  // might trigger a TMMBN
+  if (numBoundingSet == 0) {
+    // owner of max bitrate request has timed out
+    // empty bounding set has been sent
+    return 0;
+  }
+  // Get net bitrate from bounding set depending on sent packet rate
+  newBitrates = CalcMinMaxBitRate(_rtpSender.PacketRate(),
+                                  (WebRtc_UWord32)numBoundingSet,
+                                  minBitrateKbit,
+                                  maxBitrateKbit);
+
+  // no critsect when calling out to "unknown" code
+  if (newBitrates == 0) {
+    // we have new bitrate
+    // Set new max bitrate
+    // we have a new bandwidth estimate on this channel
+    OnReceivedBandwidthEstimateUpdate((WebRtc_UWord16)minBitrateKbit);
+    WEBRTC_TRACE(kTraceStream,
+                 kTraceRtpRtcp,
+                 _id,
+                 "Set TMMBR request min:%d kbps max:%d kbps, channel: %d",
+                 minBitrateKbit, maxBitrateKbit, _id);
+  }
+  return 0;
+}
+
+// called from RTCPsender
+WebRtc_Word32 ModuleRtpRtcpImpl::BoundingSet(bool& tmmbrOwner,
+                                             TMMBRSet*& boundingSet) {
+  return _rtcpReceiver.BoundingSet(tmmbrOwner,
+                                   boundingSet);
+}
+
+void ModuleRtpRtcpImpl::SendKeyFrame() {
+  WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "SendKeyFrame()");
+  OnReceivedIntraFrameRequest(0);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h
new file mode 100644
index 0000000..e16bf71
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -0,0 +1,622 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
+
+#include <list>
+
+#include "bandwidth_management.h"
+#include "rtcp_receiver.h"
+#include "rtcp_sender.h"
+#include "rtp_receiver.h"
+#include "rtp_rtcp.h"
+#include "rtp_sender.h"
+
+#ifdef MATLAB
+class MatlabPlot;
+#endif
+
+namespace webrtc {
+
+class ModuleRtpRtcpImpl : public RtpRtcp, private TMMBRHelp
+{
+public:
+    ModuleRtpRtcpImpl(const WebRtc_Word32 id,
+                      const bool audio,
+                      RtpRtcpClock* clock);
+
+    virtual ~ModuleRtpRtcpImpl();
+
+    // get Module ID
+    WebRtc_Word32 Id()   {return _id;}
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    // De-muxing functionality for
+    virtual WebRtc_Word32 RegisterDefaultModule(RtpRtcp* module);
+    virtual WebRtc_Word32 DeRegisterDefaultModule();
+    virtual bool DefaultModuleRegistered();
+
+    virtual WebRtc_UWord32 NumberChildModules();
+
+    // Lip-sync between voice-video
+    virtual WebRtc_Word32 RegisterSyncModule(RtpRtcp* module);
+    virtual WebRtc_Word32 DeRegisterSyncModule();
+
+    virtual WebRtc_Word32 RegisterVideoModule(RtpRtcp* videoModule);
+    virtual void DeRegisterVideoModule();
+
+    // returns the number of milliseconds until the module want a worker thread to call Process
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+
+    // Process any pending tasks such as timeouts
+    virtual WebRtc_Word32 Process();
+
+    /**
+    *   Receiver
+    */
+    virtual WebRtc_Word32 InitReceiver();
+
+    // configure a timeout value
+    virtual WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 RTPtimeoutMS,
+                                           const WebRtc_UWord32 RTCPtimeoutMS);
+
+    // Set periodic dead or alive notification
+    virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
+        const bool enable,
+        const WebRtc_UWord8 sampleTimeSeconds);
+
+    // Get periodic dead or alive notification status
+    virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(
+        bool &enable,
+        WebRtc_UWord8 &sampleTimeSeconds);
+
+    virtual WebRtc_Word32 RegisterReceivePayload(const CodecInst& voiceCodec);
+
+    virtual WebRtc_Word32 RegisterReceivePayload(const VideoCodec& videoCodec);
+
+    virtual WebRtc_Word32 ReceivePayloadType(const CodecInst& voiceCodec,
+                                             WebRtc_Word8* plType);
+
+    virtual WebRtc_Word32 ReceivePayloadType(const VideoCodec& videoCodec,
+                                             WebRtc_Word8* plType);
+
+    virtual WebRtc_Word32 DeRegisterReceivePayload(
+        const WebRtc_Word8 payloadType);
+
+    // register RTP header extension
+    virtual WebRtc_Word32 RegisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id);
+
+    virtual WebRtc_Word32 DeregisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type);
+
+    // get the currently configured SSRC filter
+    virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const;
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+    virtual WebRtc_Word32 SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC);
+
+    // Get last received remote timestamp
+    virtual WebRtc_UWord32 RemoteTimestamp() const;
+
+    // Get the current estimated remote timestamp
+    virtual WebRtc_Word32 EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const;
+
+    virtual WebRtc_UWord32 RemoteSSRC() const;
+
+    virtual WebRtc_Word32 RemoteCSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const ;
+
+    virtual WebRtc_Word32 SetRTXReceiveStatus(const bool enable,
+                                              const WebRtc_UWord32 SSRC);
+
+    virtual WebRtc_Word32 RTXReceiveStatus(bool* enable,
+                                           WebRtc_UWord32* SSRC) const;
+
+    // called by the network module when we receive a packet
+    virtual WebRtc_Word32 IncomingPacket( const WebRtc_UWord8* incomingPacket,
+                                        const WebRtc_UWord16 packetLength);
+
+    virtual WebRtc_Word32 IncomingAudioNTP(const WebRtc_UWord32 audioReceivedNTPsecs,
+                                         const WebRtc_UWord32 audioReceivedNTPfrac,
+                                         const WebRtc_UWord32 audioRTCPArrivalTimeSecs,
+                                         const WebRtc_UWord32 audioRTCPArrivalTimeFrac);
+
+    // Used by the module to deliver the incoming data to the codec module
+    virtual WebRtc_Word32 RegisterIncomingDataCallback(RtpData* incomingDataCallback);
+
+    // Used by the module to deliver messages to the codec module/appliation
+    virtual WebRtc_Word32 RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback);
+
+    virtual WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback);
+
+    virtual WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback);
+
+    virtual WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
+
+    /**
+    *   Sender
+    */
+    virtual WebRtc_Word32 InitSender();
+
+    virtual WebRtc_Word32 SetRTPKeepaliveStatus(const bool enable,
+                                              const WebRtc_Word8 unknownPayloadType,
+                                              const WebRtc_UWord16 deltaTransmitTimeMS);
+
+    virtual WebRtc_Word32 RTPKeepaliveStatus(bool* enable,
+                                           WebRtc_Word8* unknownPayloadType,
+                                           WebRtc_UWord16* deltaTransmitTimeMS) const;
+
+    virtual bool RTPKeepalive() const;
+
+    virtual WebRtc_Word32 RegisterSendPayload(const CodecInst& voiceCodec);
+
+    virtual WebRtc_Word32 RegisterSendPayload(const VideoCodec& videoCodec);
+
+    virtual WebRtc_Word32 DeRegisterSendPayload(const WebRtc_Word8 payloadType);
+
+    virtual WebRtc_Word8 SendPayloadType() const;
+
+    // register RTP header extension
+    virtual WebRtc_Word32 RegisterSendRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id);
+
+    virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
+        const RTPExtensionType type);
+
+    virtual void SetTransmissionSmoothingStatus(const bool enable);
+
+    virtual bool TransmissionSmoothingStatus() const;
+
+    // get start timestamp
+    virtual WebRtc_UWord32 StartTimestamp() const;
+
+    // configure start timestamp, default is a random number
+    virtual WebRtc_Word32 SetStartTimestamp(const WebRtc_UWord32 timestamp);
+
+    virtual WebRtc_UWord16 SequenceNumber() const;
+
+    // Set SequenceNumber, default is a random number
+    virtual WebRtc_Word32 SetSequenceNumber(const WebRtc_UWord16 seq);
+
+    virtual WebRtc_UWord32 SSRC() const;
+
+    // configure SSRC, default is a random number
+    virtual WebRtc_Word32 SetSSRC(const WebRtc_UWord32 ssrc);
+
+    virtual WebRtc_Word32 CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const ;
+
+    virtual WebRtc_Word32 SetCSRCs( const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                                  const WebRtc_UWord8 arrLength);
+
+    virtual WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    virtual WebRtc_UWord32 PacketCountSent() const;
+
+    virtual int CurrentSendFrequencyHz() const;
+
+    virtual WebRtc_UWord32 ByteCountSent() const;
+
+    virtual WebRtc_Word32 SetRTXSendStatus(const bool enable,
+                                           const bool setSSRC,
+                                           const WebRtc_UWord32 SSRC);
+
+    virtual WebRtc_Word32 RTXSendStatus(bool* enable,
+                                        WebRtc_UWord32* SSRC) const;
+
+    // sends kRtcpByeCode when going from true to false
+    virtual WebRtc_Word32 SetSendingStatus(const bool sending);
+
+    virtual bool Sending() const;
+
+    // Drops or relays media packets
+    virtual WebRtc_Word32 SetSendingMediaStatus(const bool sending);
+
+    virtual bool SendingMedia() const;
+
+    // Used by the module to send RTP and RTCP packet to the network module
+    virtual WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport);
+
+    // Used by the codec module to deliver a video or audio frame for packetization
+    virtual WebRtc_Word32 SendOutgoingData(
+        const FrameType frameType,
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader* fragmentation = NULL,
+        const RTPVideoHeader* rtpVideoHdr = NULL);
+
+    /*
+    *   RTCP
+    */
+
+    // Get RTCP status
+    virtual RTCPMethod RTCP() const;
+
+    // configure RTCP status i.e on/off
+    virtual WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    // Set RTCP CName
+    virtual WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]);
+
+    // Get RTCP CName
+    virtual WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]);
+
+    // Get remote CName
+    virtual WebRtc_Word32 RemoteCNAME(const WebRtc_UWord32 remoteSSRC,
+                                      char cName[RTCP_CNAME_SIZE]) const;
+
+    // Get remote NTP
+    virtual WebRtc_Word32 RemoteNTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                                  WebRtc_UWord32 *ReceivedNTPfrac,
+                                  WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                                  WebRtc_UWord32 *RTCPArrivalTimeFrac) const ;
+
+    virtual WebRtc_Word32 AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                        const char cName[RTCP_CNAME_SIZE]);
+
+    virtual WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC);
+
+    // Get RoundTripTime
+    virtual WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                            WebRtc_UWord16* RTT,
+                            WebRtc_UWord16* avgRTT,
+                            WebRtc_UWord16* minRTT,
+                            WebRtc_UWord16* maxRTT) const;
+
+    // Reset RoundTripTime statistics
+    virtual WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC);
+
+    // Force a send of an RTCP packet
+    // normal SR and RR are triggered via the process function
+    virtual WebRtc_Word32 SendRTCP(WebRtc_UWord32 rtcpPacketType = kRtcpReport);
+
+    // statistics of our localy created statistics of the received RTP stream
+    virtual WebRtc_Word32 StatisticsRTP(WebRtc_UWord8  *fraction_lost,
+                                      WebRtc_UWord32 *cum_lost,
+                                      WebRtc_UWord32 *ext_max,
+                                      WebRtc_UWord32 *jitter,
+                                      WebRtc_UWord32 *max_jitter = NULL) const;
+
+    // Reset RTP statistics
+    virtual WebRtc_Word32 ResetStatisticsRTP();
+
+    virtual WebRtc_Word32 ResetReceiveDataCountersRTP();
+
+    virtual WebRtc_Word32 ResetSendDataCountersRTP();
+
+    // statistics of the amount of data sent and received
+    virtual WebRtc_Word32 DataCountersRTP(WebRtc_UWord32 *bytesSent,
+                                          WebRtc_UWord32 *packetsSent,
+                                          WebRtc_UWord32 *bytesReceived,
+                                          WebRtc_UWord32 *packetsReceived) const;
+
+    virtual WebRtc_Word32 ReportBlockStatistics(
+        WebRtc_UWord8 *fraction_lost,
+        WebRtc_UWord32 *cum_lost,
+        WebRtc_UWord32 *ext_max,
+        WebRtc_UWord32 *jitter,
+        WebRtc_UWord32 *jitter_transmission_time_offset);
+
+    // Get received RTCP report, sender info
+    virtual WebRtc_Word32 RemoteRTCPStat( RTCPSenderInfo* senderInfo);
+
+    // Get received RTCP report, report block
+    virtual WebRtc_Word32 RemoteRTCPStat(
+        std::vector<RTCPReportBlock>* receiveBlocks) const;
+
+    // Set received RTCP report block
+    virtual WebRtc_Word32 AddRTCPReportBlock(const WebRtc_UWord32 SSRC,
+                                           const RTCPReportBlock* receiveBlock);
+
+    virtual WebRtc_Word32 RemoveRTCPReportBlock(const WebRtc_UWord32 SSRC);
+
+    /*
+    *  (REMB) Receiver Estimated Max Bitrate
+    */
+    virtual bool REMB() const;
+
+    virtual WebRtc_Word32 SetREMBStatus(const bool enable);
+
+    virtual WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                                      const WebRtc_UWord8 numberOfSSRC,
+                                      const WebRtc_UWord32* SSRC);
+
+    virtual WebRtc_Word32 SetMaximumBitrateEstimate(
+        const WebRtc_UWord32 bitrate);
+
+    virtual bool SetRemoteBitrateObserver(RtpRemoteBitrateObserver* observer);
+    /*
+    *   (IJ) Extended jitter report.
+    */
+    virtual bool IJ() const;
+
+    virtual WebRtc_Word32 SetIJStatus(const bool enable);
+
+    /*
+    *   (TMMBR) Temporary Max Media Bit Rate
+    */
+    virtual bool TMMBR() const ;
+
+    virtual WebRtc_Word32 SetTMMBRStatus(const bool enable);
+
+    virtual WebRtc_Word32 TMMBRReceived(const WebRtc_UWord32 size,
+                                      const WebRtc_UWord32 accNumCandidates,
+                                      TMMBRSet* candidateSet) const;
+
+    virtual WebRtc_Word32 SetTMMBN(const TMMBRSet* boundingSet,
+                                 const WebRtc_UWord32 maxBitrateKbit);
+
+    virtual WebRtc_Word32 RequestTMMBR(const WebRtc_UWord32 estimatedBW,
+                                     const WebRtc_UWord32 packetOH);
+
+    virtual WebRtc_UWord16 MaxPayloadLength() const;
+
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const;
+
+    virtual WebRtc_Word32 SetMaxTransferUnit(const WebRtc_UWord16 size);
+
+    virtual WebRtc_Word32 SetTransportOverhead(const bool TCP,
+                                             const bool IPV6,
+                                             const WebRtc_UWord8 authenticationOverhead = 0);
+
+    /*
+    *   (NACK) Negative acknowledgement
+    */
+
+    // Is Negative acknowledgement requests on/off?
+    virtual NACKMethod NACK() const ;
+
+    // Turn negative acknowledgement requests on/off
+    virtual WebRtc_Word32 SetNACKStatus(const NACKMethod method);
+
+    virtual int SelectiveRetransmissions() const;
+
+    virtual int SetSelectiveRetransmissions(uint8_t settings);
+
+    // Send a Negative acknowledgement packet
+    virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList,
+                                   const WebRtc_UWord16 size);
+
+    // Store the sent packets, needed to answer to a Negative acknowledgement requests
+    virtual WebRtc_Word32 SetStorePacketsStatus(const bool enable, const WebRtc_UWord16 numberToStore = 200);
+
+    /*
+    *   (APP) Application specific data
+    */
+    virtual WebRtc_Word32 SetRTCPApplicationSpecificData(const WebRtc_UWord8 subType,
+                                                       const WebRtc_UWord32 name,
+                                                       const WebRtc_UWord8* data,
+                                                       const WebRtc_UWord16 length);
+    /*
+    *   (XR) VOIP metric
+    */
+    virtual WebRtc_Word32 SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
+
+    /*
+    *   Audio
+    */
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    virtual WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Outband DTMF detection
+    virtual WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
+                                                const bool forwardToDecoder,
+                                                const bool detectEndOfTone = false);
+
+    // Is outband DTMF turned on/off?
+    virtual bool TelephoneEvent() const;
+
+    // Is forwarding of outband telephone events turned on/off?
+    virtual bool TelephoneEventForwardToDecoder() const;
+
+    virtual bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    // Send a TelephoneEvent tone using RFC 2833 (4733)
+    virtual WebRtc_Word32 SendTelephoneEventOutband(const WebRtc_UWord8 key,
+                                                  const WebRtc_UWord16 time_ms,
+                                                  const WebRtc_UWord8 level);
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    virtual WebRtc_Word32 SetSendREDPayloadType(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    virtual WebRtc_Word32 SendREDPayloadType(WebRtc_Word8& payloadType) const;
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 SetRTPAudioLevelIndicationStatus(const bool enable,
+                                                         const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 GetRTPAudioLevelIndicationStatus(bool& enable,
+                                                         WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    /*
+    *   Video
+    */
+    virtual RtpVideoCodecTypes ReceivedVideoCodec() const;
+
+    virtual RtpVideoCodecTypes SendVideoCodec() const;
+
+    virtual WebRtc_Word32 SendRTCPSliceLossIndication(const WebRtc_UWord8 pictureID);
+
+    // Set method for requestion a new key frame
+    virtual WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
+
+    // send a request for a keyframe
+    virtual WebRtc_Word32 RequestKeyFrame();
+
+    virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS);
+
+    virtual void SetSendBitrate(const WebRtc_UWord32 startBitrate,
+                                const WebRtc_UWord16 minBitrateKbit,
+                                const WebRtc_UWord16 maxBitrateKbit);
+
+    virtual WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                            const WebRtc_UWord8 payloadTypeRED,
+                                            const WebRtc_UWord8 payloadTypeFEC);
+
+    virtual WebRtc_Word32 GenericFECStatus(bool& enable,
+                                         WebRtc_UWord8& payloadTypeRED,
+                                         WebRtc_UWord8& payloadTypeFEC);
+
+
+    virtual WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                                         const WebRtc_UWord8 deltaFrameCodeRate);
+
+    virtual WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
+                                              const bool deltaUseUepProtection);
+
+    virtual WebRtc_Word32 LastReceivedNTP(WebRtc_UWord32& NTPsecs,
+                                          WebRtc_UWord32& NTPfrac,
+                                          WebRtc_UWord32& remoteSR);
+
+    virtual WebRtc_Word32 BoundingSet(bool &tmmbrOwner,
+                                      TMMBRSet*& boundingSetRec);
+
+    virtual void BitrateSent(WebRtc_UWord32* totalRate,
+                             WebRtc_UWord32* videoRate,
+                             WebRtc_UWord32* fecRate,
+                             WebRtc_UWord32* nackRate) const;
+
+    virtual int EstimatedSendBandwidth(
+        WebRtc_UWord32* available_bandwidth) const;
+
+    virtual int EstimatedReceiveBandwidth(
+        WebRtc_UWord32* available_bandwidth) const;
+
+    virtual void SetRemoteSSRC(const WebRtc_UWord32 SSRC);
+    
+    virtual WebRtc_UWord32 SendTimeOfSendReport(const WebRtc_UWord32 sendReport);
+
+    virtual RateControlRegion OnOverUseStateUpdate(const RateControlInput& rateControlInput);
+
+    // good state of RTP receiver inform sender
+    virtual WebRtc_Word32 SendRTCPReferencePictureSelection(const WebRtc_UWord64 pictureID);
+
+    virtual void OnBandwidthEstimateUpdate(WebRtc_UWord16 bandWidthKbit);
+
+    void OnReceivedNTP() ;
+
+    // bw estimation
+    void OnPacketLossStatisticsUpdate(
+        const WebRtc_UWord8 fractionLost,
+        const WebRtc_UWord16 roundTripTime,
+        const WebRtc_UWord32 lastReceivedExtendedHighSeqNum);
+
+    void OnReceivedTMMBR();
+
+    void OnReceivedEstimatedMaxBitrate(const WebRtc_UWord32 maxBitrate);
+
+    void OnReceivedBandwidthEstimateUpdate(const WebRtc_UWord16 bwEstimateKbit);
+
+    // bad state of RTP receiver request a keyframe
+    void OnRequestIntraFrame();
+
+    void OnReceivedIntraFrameRequest(const RtpRtcp* caller);
+
+    // received a request for a new SLI
+    void OnReceivedSliceLossIndication(const WebRtc_UWord8 pictureID);
+
+    // received a new refereence frame
+    void OnReceivedReferencePictureSelectionIndication(
+        const WebRtc_UWord64 pitureID);
+
+    void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                        const WebRtc_UWord16* nackSequenceNumbers);
+
+    void OnRequestSendReport();
+
+    // Following function is only called when constructing the object so no
+    // need to worry about data race.
+    void OwnsClock() { _owns_clock = true; }
+
+protected:
+    void RegisterChildModule(RtpRtcp* module);
+
+    void DeRegisterChildModule(RtpRtcp* module);
+
+    bool UpdateRTCPReceiveInformationTimers();
+
+    void ProcessDeadOrAliveTimer();
+
+    WebRtc_UWord32 BitrateReceivedNow() const;
+
+    // Get remote SequenceNumber
+    WebRtc_UWord16 RemoteSequenceNumber() const;
+
+    WebRtc_Word32 UpdateTMMBR();
+
+    // only for internal testing
+    WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
+
+    RTPSender                 _rtpSender;
+    RTPReceiver               _rtpReceiver;
+
+    RTCPSender                _rtcpSender;
+    RTCPReceiver              _rtcpReceiver;
+
+    bool                      _owns_clock;
+    RtpRtcpClock&             _clock;
+private:
+    void SendKeyFrame();
+    void ProcessDefaultModuleBandwidth();
+
+    WebRtc_Word32             _id;
+    const bool                _audio;
+    bool                      _collisionDetected;
+    WebRtc_UWord32            _lastProcessTime;
+    WebRtc_UWord32            _lastBitrateProcessTime;
+    WebRtc_UWord32            _lastPacketTimeoutProcessTime;
+    WebRtc_UWord16            _packetOverHead;
+
+    CriticalSectionWrapper*       _criticalSectionModulePtrs;
+    CriticalSectionWrapper*       _criticalSectionModulePtrsFeedback;
+    ModuleRtpRtcpImpl*            _defaultModule;
+    ModuleRtpRtcpImpl*            _audioModule;
+    ModuleRtpRtcpImpl*            _videoModule;
+    std::list<ModuleRtpRtcpImpl*> _childModules;
+
+    // Dead or alive
+    bool                  _deadOrAliveActive;
+    WebRtc_UWord32        _deadOrAliveTimeoutMS;
+    WebRtc_UWord32        _deadOrAliveLastTimer;
+
+    // receive side
+    BandwidthManagement   _bandwidthManagement;
+
+    WebRtc_UWord32        _receivedNTPsecsAudio;
+    WebRtc_UWord32        _receivedNTPfracAudio;
+    WebRtc_UWord32        _RTCPArrivalTimeSecsAudio;
+    WebRtc_UWord32        _RTCPArrivalTimeFracAudio;
+
+    // send side
+    NACKMethod            _nackMethod;
+    WebRtc_UWord32        _nackLastTimeSent;
+    WebRtc_UWord16        _nackLastSeqNumberSent;
+
+    bool                  _simulcast;
+    VideoCodec            _sendVideoCodec;
+    KeyFrameRequestMethod _keyFrameReqMethod;
+
+#ifdef MATLAB
+    MatlabPlot*           _plot1;
+#endif
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
new file mode 100644
index 0000000..ea705a8
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
@@ -0,0 +1,47 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'rtp_rtcp_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/../testing/gmock.gyp:gmock',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../../',
+      ],
+      'sources': [
+        'receiver_fec_unittest.cc',
+        'rtp_fec_unittest.cc',
+        'rtp_format_vp8_unittest.cc',
+        'rtp_format_vp8_test_helper.cc',
+        'rtp_format_vp8_test_helper.h',
+        'rtcp_format_remb_unittest.cc',
+        'rtp_packet_history_test.cc',
+        'rtp_utility_test.cc',
+        'rtp_header_extension_test.cc',
+        'rtp_sender_test.cc',
+        'rtcp_sender_test.cc',
+        'transmission_bucket_test.cc',
+        'vp8_partition_aggregator_unittest.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender.cc b/trunk/src/modules/rtp_rtcp/source/rtp_sender.cc
new file mode 100644
index 0000000..374ce53
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender.cc
@@ -0,0 +1,1768 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdlib> // srand
+
+#include "rtp_sender.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include "rtp_packet_history.h"
+#include "rtp_sender_audio.h"
+#include "rtp_sender_video.h"
+
+namespace webrtc {
+RTPSender::RTPSender(const WebRtc_Word32 id,
+                     const bool audio,
+                     RtpRtcpClock* clock) :
+    Bitrate(clock),
+    _id(id),
+    _audioConfigured(audio),
+    _audio(NULL),
+    _video(NULL),
+    _sendCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _transportCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+
+    _transport(NULL),
+
+    _sendingMedia(true), // Default to sending media
+
+    _maxPayloadLength(IP_PACKET_SIZE-28), // default is IP/UDP
+    _targetSendBitrate(0),
+    _packetOverHead(28),
+
+    _payloadType(-1),
+    _payloadTypeMap(),
+
+    _rtpHeaderExtensionMap(),
+    _transmissionTimeOffset(0),
+
+    _keepAliveIsActive(false),
+    _keepAlivePayloadType(-1),
+    _keepAliveLastSent(0),
+    _keepAliveDeltaTimeSend(0),
+
+    // NACK
+    _nackByteCountTimes(),
+    _nackByteCount(),
+    _nackBitrate(clock),
+
+    _packetHistory(new RTPPacketHistory(clock)),
+    _sendBucket(),
+    _timeLastSendToNetworkUpdate(clock->GetTimeInMS()),
+    _transmissionSmoothing(false),
+
+    // statistics
+    _packetsSent(0),
+    _payloadBytesSent(0),
+
+    // RTP variables
+    _startTimeStampForced(false),
+    _startTimeStamp(0),
+    _ssrcDB(*SSRCDatabase::GetSSRCDatabase()),
+    _remoteSSRC(0),
+    _sequenceNumberForced(false),
+    _sequenceNumber(0),
+    _sequenceNumberRTX(0),
+    _ssrcForced(false),
+    _ssrc(0),
+    _timeStamp(0),
+    _CSRCs(0),
+    _CSRC(),
+    _includeCSRCs(true),
+    _RTX(false),
+    _ssrcRTX(0)
+{
+    memset(_nackByteCountTimes, 0, sizeof(_nackByteCountTimes));
+    memset(_nackByteCount, 0, sizeof(_nackByteCount));
+
+    memset(_CSRC, 0, sizeof(_CSRC));
+
+    // we need to seed the random generator, otherwise we get 26500 each time, hardly a random value :)
+    srand( (WebRtc_UWord32)_clock.GetTimeInMS() );
+
+    _ssrc = _ssrcDB.CreateSSRC(); // can't be 0
+
+    if(audio)
+    {
+        _audio = new RTPSenderAudio(id, &_clock, this);
+    } else
+    {
+        _video = new RTPSenderVideo(id, &_clock, this);
+    }
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTPSender::~RTPSender() {
+  if(_remoteSSRC != 0) {
+    _ssrcDB.ReturnSSRC(_remoteSSRC);
+  }
+  _ssrcDB.ReturnSSRC(_ssrc);
+
+  SSRCDatabase::ReturnSSRCDatabase();
+  delete _sendCritsect;
+  delete _transportCritsect;
+  while (!_payloadTypeMap.empty()) {
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+        _payloadTypeMap.begin();
+    delete it->second;
+    _payloadTypeMap.erase(it);
+  }
+  delete _packetHistory;
+  delete _audio;
+  delete _video;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32
+RTPSender::Init(const WebRtc_UWord32 remoteSSRC)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    // reset to default generation
+    _ssrcForced = false;
+    _startTimeStampForced = false;
+
+    // register a remote SSRC if we have it to avoid collisions
+    if(remoteSSRC != 0)
+    {
+        if(_ssrc == remoteSSRC)
+        {
+            // collision detected
+            _ssrc = _ssrcDB.CreateSSRC(); // can't be 0
+        }
+        _remoteSSRC = remoteSSRC;
+        _ssrcDB.RegisterSSRC(remoteSSRC);
+    }
+    _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    _sequenceNumberRTX = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    _packetsSent = 0;
+    _payloadBytesSent = 0;
+    _packetOverHead = 28;
+
+    _keepAlivePayloadType = -1;
+
+    _rtpHeaderExtensionMap.Erase();
+
+    while (!_payloadTypeMap.empty()) {
+      std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+          _payloadTypeMap.begin();
+      delete it->second;
+      _payloadTypeMap.erase(it);
+    }
+
+    memset(_CSRC, 0, sizeof(_CSRC));
+
+    memset(_nackByteCount, 0, sizeof(_nackByteCount));
+    memset(_nackByteCountTimes, 0, sizeof(_nackByteCountTimes));
+    _nackBitrate.Init();
+
+    SetStorePacketsStatus(false, 0);
+    _sendBucket.Reset();
+
+    Bitrate::Init();
+
+    if(_audioConfigured)
+    {
+        _audio->Init();
+    } else
+    {
+        _video->Init();
+    }
+    return(0);
+}
+
+void
+RTPSender::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    if(_audioConfigured)
+    {
+        _audio->ChangeUniqueId(id);
+    } else
+    {
+        _video->ChangeUniqueId(id);
+    }
+}
+
+WebRtc_Word32
+RTPSender::SetTargetSendBitrate(const WebRtc_UWord32 bits)
+{
+    _targetSendBitrate = (WebRtc_UWord16)(bits/1000);
+    return 0;
+}
+
+WebRtc_UWord16
+RTPSender::TargetSendBitrateKbit() const
+{
+    return _targetSendBitrate;
+}
+
+WebRtc_UWord16
+RTPSender::ActualSendBitrateKbit() const
+{
+    return (WebRtc_UWord16) (Bitrate::BitrateNow()/1000);
+}
+
+WebRtc_UWord32
+RTPSender::VideoBitrateSent() const {
+  if (_video)
+    return _video->VideoBitrateSent();
+  else
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::FecOverheadRate() const {
+  if (_video)
+    return _video->FecOverheadRate();
+  else
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::NackOverheadRate() const {
+  return _nackBitrate.BitrateLast();
+}
+
+WebRtc_Word32
+RTPSender::SetTransmissionTimeOffset(
+    const WebRtc_Word32 transmissionTimeOffset)
+{
+    if (transmissionTimeOffset > (0x800000 - 1) ||
+        transmissionTimeOffset < -(0x800000 - 1))  // Word24
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendCritsect);
+    _transmissionTimeOffset = transmissionTimeOffset;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                      const WebRtc_UWord8 id)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.Register(type, id);
+}
+
+WebRtc_Word32
+RTPSender::DeregisterRtpHeaderExtension(const RTPExtensionType type)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.Deregister(type);
+}
+
+WebRtc_UWord16
+RTPSender::RtpHeaderExtensionTotalLength() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.GetTotalLengthInBytes();
+}
+
+//can be called multiple times
+WebRtc_Word32 RTPSender::RegisterPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadNumber,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  assert(payloadName);
+  CriticalSectionScoped cs(_sendCritsect);
+
+  if (payloadNumber == _keepAlivePayloadType) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "invalid state",
+                 __FUNCTION__);
+    return -1;
+  }
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadNumber);
+
+  if (_payloadTypeMap.end() != it) {
+    // we already use this payload type
+    ModuleRTPUtility::Payload* payload = it->second;
+    assert(payload);
+
+    // check if it's the same as we already have
+    if (ModuleRTPUtility::StringCompare(payload->name, payloadName,
+                                        RTP_PAYLOAD_NAME_SIZE - 1)) {
+      if (_audioConfigured && payload->audio &&
+          payload->typeSpecific.Audio.frequency == frequency &&
+          (payload->typeSpecific.Audio.rate == rate ||
+              payload->typeSpecific.Audio.rate == 0 || rate == 0)) {
+        payload->typeSpecific.Audio.rate = rate;
+        // Ensure that we update the rate if new or old is zero
+        return 0;
+      }
+      if(!_audioConfigured && !payload->audio) {
+        return 0;
+      }
+    }
+    return -1;
+  }
+  WebRtc_Word32 retVal = -1;
+  ModuleRTPUtility::Payload* payload = NULL;
+  if (_audioConfigured) {
+    retVal = _audio->RegisterAudioPayload(payloadName, payloadNumber, frequency,
+                                          channels, rate, payload);
+  } else {
+    retVal = _video->RegisterVideoPayload(payloadName, payloadNumber, rate,
+                                          payload);
+  }
+  if(payload) {
+    _payloadTypeMap[payloadNumber] = payload;
+  }
+  return retVal;
+}
+
+WebRtc_Word32 RTPSender::DeRegisterSendPayload(const WebRtc_Word8 payloadType) {
+  CriticalSectionScoped lock(_sendCritsect);
+
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (_payloadTypeMap.end() == it) return -1;
+
+  ModuleRTPUtility::Payload* payload = it->second;
+  delete payload;
+  _payloadTypeMap.erase(it);
+  return 0;
+}
+
+WebRtc_Word8 RTPSender::SendPayloadType() const
+{
+    return _payloadType;
+}
+
+
+int RTPSender::SendPayloadFrequency() const
+{
+    return _audio->AudioFrequency();
+}
+
+
+//  See http://www.ietf.org/internet-drafts/draft-ietf-avt-app-rtp-keepalive-04.txt
+//  for details about this method. Only Section 4.6 is implemented so far.
+bool
+RTPSender::RTPKeepalive() const
+{
+    return _keepAliveIsActive;
+}
+
+WebRtc_Word32
+RTPSender::RTPKeepaliveStatus(bool* enable,
+                              WebRtc_Word8* unknownPayloadType,
+                              WebRtc_UWord16* deltaTransmitTimeMS) const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if(enable)
+    {
+        *enable = _keepAliveIsActive;
+    }
+    if(unknownPayloadType)
+    {
+        *unknownPayloadType = _keepAlivePayloadType;
+    }
+    if(deltaTransmitTimeMS)
+    {
+        *deltaTransmitTimeMS =_keepAliveDeltaTimeSend;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTPSender::EnableRTPKeepalive(
+    const WebRtc_Word8 unknownPayloadType,
+    const WebRtc_UWord16 deltaTransmitTimeMS) {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(unknownPayloadType);
+
+  if (it != _payloadTypeMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument",
+                 __FUNCTION__);
+    return -1;
+  }
+  _keepAliveIsActive = true;
+  _keepAlivePayloadType = unknownPayloadType;
+  _keepAliveLastSent = _clock.GetTimeInMS();
+  _keepAliveDeltaTimeSend = deltaTransmitTimeMS;
+  return 0;
+}
+
+WebRtc_Word32
+RTPSender::DisableRTPKeepalive()
+{
+    _keepAliveIsActive = false;
+    return 0;
+}
+
+bool
+RTPSender::TimeToSendRTPKeepalive() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    bool timeToSend(false);
+
+    WebRtc_UWord32 dT = _clock.GetTimeInMS() - _keepAliveLastSent;
+    if (dT > _keepAliveDeltaTimeSend)
+    {
+        timeToSend = true;
+    }
+    return timeToSend;
+}
+
+// ----------------------------------------------------------------------------
+//  From the RFC draft:
+//
+//  4.6.  RTP Packet with Unknown Payload Type
+//
+//     The application sends an RTP packet of 0 length with a dynamic
+//     payload type that has not been negotiated by the peers (e.g. not
+//     negotiated within the SDP offer/answer, and thus not mapped to any
+//     media format).
+//
+//     The sequence number is incremented by one for each packet, as it is
+//     sent within the same RTP session as the actual media.  The timestamp
+//     contains the same value a media packet would have at this time.  The
+//     marker bit is not significant for the keepalive packets and is thus
+//     set to zero.
+//
+//     Normally the peer will ignore this packet, as RTP [RFC3550] states
+//     that "a receiver MUST ignore packets with payload types that it does
+//     not understand".
+//
+//     Cons:
+//     o  [RFC4566] and [RFC3264] mandate not to send media with inactive
+//        and recvonly attributes, however this is mitigated as no real
+//        media is sent with this mechanism.
+//
+//     Recommendation:
+//     o  This method should be used for RTP keepalive.
+//
+//  7.  Timing and Transport Considerations
+//
+//     An application supporting this specification must transmit keepalive
+//     packets every Tr seconds during the whole duration of the media
+//     session.  Tr SHOULD be configurable, and otherwise MUST default to 15
+//     seconds.
+//
+//     Keepalives packets within a particular RTP session MUST use the tuple
+//     (source IP address, source TCP/UDP ports, target IP address, target
+//     TCP/UDP Port) of the regular RTP packets.
+//
+//     The agent SHOULD only send RTP keepalive when it does not send
+//     regular RTP packets.
+//
+//  http://www.ietf.org/internet-drafts/draft-ietf-avt-app-rtp-keepalive-04.txt
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32
+RTPSender::SendRTPKeepalivePacket()
+{
+    // RFC summary:
+    //
+    // - Send an RTP packet of 0 length;
+    // - dynamic payload type has not been negotiated (not mapped to any media);
+    // - sequence number is incremented by one for each packet;
+    // - timestamp contains the same value a media packet would have at this time;
+    // - marker bit is set to zero.
+
+    WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
+    WebRtc_UWord16 rtpHeaderLength = 12;
+    {
+        CriticalSectionScoped cs(_sendCritsect);
+
+        WebRtc_UWord32 now = _clock.GetTimeInMS();
+        WebRtc_UWord32 dT = now -_keepAliveLastSent; // delta time in MS
+
+        WebRtc_UWord32 freqKHz = 90; // video
+        if(_audioConfigured)
+        {
+            freqKHz = _audio->AudioFrequency()/1000;
+        }
+        WebRtc_UWord32 dSamples = dT*freqKHz;
+
+        // set timestamp
+        _timeStamp += dSamples;
+        _keepAliveLastSent = now;
+
+        rtpHeaderLength = RTPHeaderLength();
+
+        // correct seq num, time stamp and payloadtype
+        BuildRTPheader(dataBuffer, _keepAlivePayloadType, false, 0, false);
+    }
+
+    return SendToNetwork(dataBuffer, 0, rtpHeaderLength, kAllowRetransmission);
+}
+
+WebRtc_Word32
+RTPSender::SetMaxPayloadLength(const WebRtc_UWord16 maxPayloadLength, const WebRtc_UWord16 packetOverHead)
+{
+    // sanity check
+    if(maxPayloadLength < 100 || maxPayloadLength > IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_sendCritsect);
+    _maxPayloadLength = maxPayloadLength;
+    _packetOverHead = packetOverHead;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceRtpRtcp, _id, "SetMaxPayloadLength to %d.", maxPayloadLength);
+    return 0;
+}
+
+WebRtc_UWord16 RTPSender::MaxDataPayloadLength() const {
+  if(_audioConfigured) {
+    return _maxPayloadLength - RTPHeaderLength();
+  } else {
+    return _maxPayloadLength - RTPHeaderLength() -
+        _video->FECPacketOverhead() - ((_RTX) ? 2 : 0);
+        // Include the FEC/ULP/RED overhead.
+  }
+}
+
+WebRtc_UWord16
+RTPSender::MaxPayloadLength() const
+{
+    return _maxPayloadLength;
+}
+
+WebRtc_UWord16
+RTPSender::PacketOverHead() const
+{
+    return _packetOverHead;
+}
+
+void RTPSender::SetTransmissionSmoothingStatus(const bool enable) {
+  CriticalSectionScoped cs(_sendCritsect);
+  _transmissionSmoothing = enable;
+}
+
+bool RTPSender::TransmissionSmoothingStatus() const {
+  CriticalSectionScoped cs(_sendCritsect);
+  return _transmissionSmoothing;
+}
+
+void RTPSender::SetRTXStatus(const bool enable,
+                             const bool setSSRC,
+                             const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped cs(_sendCritsect);
+  _RTX = enable;
+  if (enable) {
+    if (setSSRC) {
+     _ssrcRTX = SSRC;
+    } else {
+     _ssrcRTX = _ssrcDB.CreateSSRC();   // can't be 0
+    }
+  }
+}
+
+void RTPSender::RTXStatus(bool* enable,
+                          WebRtc_UWord32* SSRC) const {
+  CriticalSectionScoped cs(_sendCritsect);
+  *enable = _RTX;
+  *SSRC = _ssrcRTX;
+}
+
+WebRtc_Word32 RTPSender::CheckPayloadType(const WebRtc_Word8 payloadType,
+                                          RtpVideoCodecTypes& videoType) {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  if (payloadType < 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tinvalid payloadType (%d)", payloadType);
+    return -1;
+  }
+  if (_audioConfigured) {
+    WebRtc_Word8 redPlType = -1;
+    if (_audio->RED(redPlType) == 0) {
+      // We have configured RED.
+      if(redPlType == payloadType) {
+        // And it's a match...
+        return 0;
+      }
+    }
+  }
+  if (_payloadType == payloadType) {
+    if (!_audioConfigured) {
+      videoType = _video->VideoCodecType();
+    }
+    return 0;
+  }
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+  if (it == _payloadTypeMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tpayloadType:%d not registered", payloadType);
+    return -1;
+  }
+  _payloadType = payloadType;
+  ModuleRTPUtility::Payload* payload = it->second;
+  assert(payload);
+  if (payload->audio) {
+    if (_audioConfigured) {
+      // Extract payload frequency
+      int payloadFreqHz;
+      if (ModuleRTPUtility::StringCompare(payload->name,"g722",4)&&
+          (payload->name[4] == 0)) {
+        //Check that strings end there, g722.1...
+        // Special case for G.722, bug in spec
+        payloadFreqHz=8000;
+      } else {
+        payloadFreqHz=payload->typeSpecific.Audio.frequency;
+      }
+
+      //we don't do anything if it's CN
+      if ((_audio->AudioFrequency() != payloadFreqHz)&&
+          (!ModuleRTPUtility::StringCompare(payload->name,"cn",2))) {
+        _audio->SetAudioFrequency(payloadFreqHz);
+        // We need to correct the timestamp again,
+        // since this might happen after we've set it
+        WebRtc_UWord32 RTPtime =
+            ModuleRTPUtility::GetCurrentRTP(&_clock, payloadFreqHz);
+        SetStartTimestamp(RTPtime);
+        // will be ignored if it's already configured via API
+      }
+    }
+  } else {
+    if(!_audioConfigured) {
+      _video->SetVideoCodecType(payload->typeSpecific.Video.videoCodecType);
+      videoType = payload->typeSpecific.Video.videoCodecType;
+      _video->SetMaxConfiguredBitrateVideo(
+          payload->typeSpecific.Video.maxRate);
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTPSender::SendOutgoingData(const FrameType frameType,
+                            const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize,
+                            const RTPFragmentationHeader* fragmentation,
+                            VideoCodecInformation* codecInfo,
+                            const RTPVideoTypeHeader* rtpTypeHdr)
+{
+    {
+        // Drop this packet if we're not sending media packets
+        CriticalSectionScoped cs(_sendCritsect);
+        if (!_sendingMedia)
+        {
+            return 0;
+        }
+    }
+    RtpVideoCodecTypes videoType = kRtpNoVideo;
+    if(CheckPayloadType(payloadType, videoType) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument failed to find payloadType:%d", __FUNCTION__, payloadType);
+        return -1;
+    }
+    // update keepalive so that we don't trigger keepalive messages while sending data
+    _keepAliveLastSent = _clock.GetTimeInMS();
+
+    if(_audioConfigured)
+    {
+        // assert video frameTypes
+        assert(frameType == kAudioFrameSpeech ||
+               frameType == kAudioFrameCN ||
+               frameType == kFrameEmpty);
+
+        return _audio->SendAudio(frameType, payloadType, captureTimeStamp, payloadData, payloadSize,fragmentation);
+    } else
+    {
+        // assert audio frameTypes
+        assert(frameType == kVideoFrameKey ||
+               frameType == kVideoFrameDelta ||
+               frameType == kVideoFrameGolden ||
+               frameType == kVideoFrameAltRef);
+
+        return _video->SendVideo(videoType,
+                                 frameType,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 payloadData,
+                                 payloadSize,
+                                 fragmentation,
+                                 codecInfo,
+                                 rtpTypeHdr);
+    }
+}
+
+WebRtc_Word32 RTPSender::SendPadData(WebRtc_Word8 payload_type,
+                                     WebRtc_UWord32 capture_timestamp,
+                                     WebRtc_Word32 bytes) {
+  // Drop this packet if we're not sending media packets
+  if (!_sendingMedia) {
+    return 0;
+  }
+  // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP.
+  int max_length = 224;
+  WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+
+  for (; bytes > 0; bytes -= max_length) {
+    WebRtc_Word32 header_length;
+    {
+      // Correct seq num, timestamp and payload type.
+      header_length = BuildRTPheader(data_buffer,
+                                     payload_type,
+                                     false,  // No markerbit.
+                                     capture_timestamp,
+                                     true,  // Timestamp provided.
+                                     true);  // Increment sequence number.
+    }
+    data_buffer[0] |= 0x20;  // Set padding bit.
+    WebRtc_Word32* data =
+        reinterpret_cast<WebRtc_Word32*>(&(data_buffer[header_length]));
+
+    int padding_bytes_in_packet = max_length;
+    if (bytes < max_length) {
+      padding_bytes_in_packet = (bytes + 16) & 0xffe0;  // Keep our modulus 32.
+    }
+    if (padding_bytes_in_packet < 32) {
+       // Sanity don't send empty packets.
+       break;
+    }
+    // Fill data buffer with random data.
+    for(int j = 0; j < (padding_bytes_in_packet >> 2); j++) {
+      data[j] = rand();
+    }
+    // Set number of padding bytes in the last byte of the packet.
+    data_buffer[header_length + padding_bytes_in_packet - 1] =
+        padding_bytes_in_packet;
+    // Send the packet
+    if (0 > SendToNetwork(data_buffer,
+                          padding_bytes_in_packet,
+                          header_length,
+                          kDontRetransmit)) {
+      // Error sending the packet.
+      break;
+    }
+  }
+  if (bytes > 31) {  // 31 due to our modulus 32.
+    // We did not manage to send all bytes.
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPSender::SetStorePacketsStatus(
+    const bool enable,
+    const WebRtc_UWord16 numberToStore) {
+  _packetHistory->SetStorePacketsStatus(enable, numberToStore);
+  return 0;
+}
+
+bool RTPSender::StorePackets() const {
+  return _packetHistory->StorePackets();
+}
+
+WebRtc_Word32 RTPSender::ReSendPacket(WebRtc_UWord16 packet_id,
+                                      WebRtc_UWord32 min_resend_time) {
+
+  WebRtc_UWord16 length = IP_PACKET_SIZE;
+  WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+  WebRtc_UWord8* buffer_to_send_ptr = data_buffer;
+
+  WebRtc_UWord32 stored_time_in_ms;
+  StorageType type;
+  bool found = _packetHistory->GetRTPPacket(packet_id,
+      min_resend_time, data_buffer, &length, &stored_time_in_ms, &type);
+  if (!found) {
+    // Packet not found.
+    return -1;
+  }
+
+  if (length == 0 || type == kDontRetransmit) {
+    // No bytes copied (packet recently resent, skip resending) or
+    // packet should not be retransmitted.
+    return 0;
+  }
+
+  WebRtc_UWord8 data_buffer_rtx[IP_PACKET_SIZE];
+  if (_RTX) {
+    buffer_to_send_ptr = data_buffer_rtx;
+
+    CriticalSectionScoped cs(_sendCritsect);
+    // Add RTX header.
+    ModuleRTPUtility::RTPHeaderParser rtpParser(
+        reinterpret_cast<const WebRtc_UWord8*>(data_buffer),
+        length);
+
+    WebRtcRTPHeader rtp_header;
+    rtpParser.Parse(rtp_header);
+
+    // Add original RTP header.
+    memcpy(data_buffer_rtx, data_buffer, rtp_header.header.headerLength);
+
+    // Replace sequence number.
+    WebRtc_UWord8* ptr = data_buffer_rtx + 2;
+    ModuleRTPUtility::AssignUWord16ToBuffer(ptr, _sequenceNumberRTX++);
+
+    // Replace SSRC.
+    ptr += 6;
+    ModuleRTPUtility::AssignUWord32ToBuffer(ptr, _ssrcRTX);
+
+    // Add OSN (original sequence number).
+    ptr = data_buffer_rtx + rtp_header.header.headerLength;
+    ModuleRTPUtility::AssignUWord16ToBuffer(
+        ptr, rtp_header.header.sequenceNumber);
+    ptr += 2;
+
+    // Add original payload data.
+    memcpy(ptr,
+           data_buffer + rtp_header.header.headerLength,
+           length - rtp_header.header.headerLength);
+    length += 2;
+  }
+
+  WebRtc_Word32 bytes_sent = ReSendToNetwork(buffer_to_send_ptr, length);
+  if (bytes_sent <= 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "Transport failed to resend packet_id %u", packet_id);
+    return -1;
+  }
+
+  // Store the time when the packet was last resent.
+  _packetHistory->UpdateResendTime(packet_id);
+
+  return bytes_sent;
+}
+
+WebRtc_Word32 RTPSender::ReSendToNetwork(const WebRtc_UWord8* packet,
+                                         const WebRtc_UWord32 size) {
+  WebRtc_Word32 bytes_sent = -1;
+  {
+    CriticalSectionScoped lock(_transportCritsect);
+    if (_transport) {
+      bytes_sent = _transport->SendPacket(_id, packet, size);
+    }
+  }
+
+  if (bytes_sent <= 0) {
+    return -1;
+  }
+
+  // Update send statistics
+  CriticalSectionScoped cs(_sendCritsect);
+  Bitrate::Update(bytes_sent);
+  _packetsSent++;
+  // We on purpose don't add to _payloadBytesSent since this is a
+  // re-transmit and not new payload data.
+  return bytes_sent;
+}
+
+int RTPSender::SelectiveRetransmissions() const {
+  if (!_video) return -1;
+  return _video->SelectiveRetransmissions();
+}
+
+int RTPSender::SetSelectiveRetransmissions(uint8_t settings) {
+  if (!_video) return -1;
+  return _video->SetSelectiveRetransmissions(settings);
+}
+
+void
+RTPSender::OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                          const WebRtc_UWord16* nackSequenceNumbers,
+                          const WebRtc_UWord16 avgRTT) {
+    const WebRtc_UWord32 now = _clock.GetTimeInMS();
+    WebRtc_UWord32 bytesReSent = 0;
+
+  // Enough bandwidth to send NACK?
+  if (!ProcessNACKBitRate(now)) {
+    WEBRTC_TRACE(kTraceStream,
+                 kTraceRtpRtcp,
+                 _id,
+                 "NACK bitrate reached. Skip sending NACK response. Target %d",
+                 TargetSendBitrateKbit());
+    return;
+  }
+
+  for (WebRtc_UWord16 i = 0; i < nackSequenceNumbersLength; ++i) {
+    const WebRtc_Word32 bytesSent = ReSendPacket(nackSequenceNumbers[i],
+                                                 5+avgRTT);
+    if (bytesSent > 0) {
+      bytesReSent += bytesSent;
+    } else if (bytesSent == 0) {
+      // The packet has previously been resent.
+      // Try resending next packet in the list.
+      continue;
+    } else if (bytesSent < 0) {
+      // Failed to send one Sequence number. Give up the rest in this nack.
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceRtpRtcp,
+                   _id,
+                   "Failed resending RTP packet %d, Discard rest of packets",
+                   nackSequenceNumbers[i]);
+      break;
+    }
+    // delay bandwidth estimate (RTT * BW)
+    if (TargetSendBitrateKbit() != 0 && avgRTT) {
+      // kbits/s * ms = bits => bits/8 = bytes
+      WebRtc_UWord32 targetBytes =
+          (static_cast<WebRtc_UWord32>(TargetSendBitrateKbit()) * avgRTT) >> 3;
+      if (bytesReSent > targetBytes) {
+        break; // ignore the rest of the packets in the list
+      }
+    }
+  }
+  if (bytesReSent > 0) {
+    // TODO(pwestin) consolidate these two methods.
+    UpdateNACKBitRate(bytesReSent, now);
+    _nackBitrate.Update(bytesReSent);
+  }
+}
+
+/**
+*    @return true if the nack bitrate is lower than the requested max bitrate
+*/
+bool RTPSender::ProcessNACKBitRate(const WebRtc_UWord32 now) {
+  WebRtc_UWord32 num = 0;
+  WebRtc_Word32 byteCount = 0;
+  const WebRtc_UWord32 avgInterval=1000;
+
+  CriticalSectionScoped cs(_sendCritsect);
+
+  if (_targetSendBitrate == 0) {
+    return true;
+  }
+  for (num = 0; num < NACK_BYTECOUNT_SIZE; num++) {
+    if ((now - _nackByteCountTimes[num]) > avgInterval) {
+      // don't use data older than 1sec
+      break;
+    } else {
+      byteCount += _nackByteCount[num];
+    }
+  }
+  WebRtc_Word32 timeInterval = avgInterval;
+  if (num == NACK_BYTECOUNT_SIZE) {
+    // More than NACK_BYTECOUNT_SIZE nack messages has been received
+    // during the last msgInterval
+    timeInterval = now - _nackByteCountTimes[num-1];
+    if(timeInterval < 0) {
+      timeInterval = avgInterval;
+    }
+  }
+  return (byteCount*8) < (_targetSendBitrate * timeInterval);
+}
+
+void RTPSender::UpdateNACKBitRate(const WebRtc_UWord32 bytes,
+                                  const WebRtc_UWord32 now) {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  // save bitrate statistics
+  if(bytes > 0) {
+    if(now == 0) {
+      // add padding length
+      _nackByteCount[0] += bytes;
+    } else {
+      if(_nackByteCountTimes[0] == 0) {
+        // first no shift
+      } else {
+        // shift
+        for(int i = (NACK_BYTECOUNT_SIZE-2); i >= 0 ; i--) {
+          _nackByteCount[i+1] = _nackByteCount[i];
+          _nackByteCountTimes[i+1] = _nackByteCountTimes[i];
+        }
+      }
+      _nackByteCount[0] = bytes;
+      _nackByteCountTimes[0] = now;
+    }
+  }
+}
+
+void RTPSender::ProcessSendToNetwork() {
+
+  // triggered by timer
+  WebRtc_UWord32 delta_time_ms;
+  {
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if (!_transmissionSmoothing) {
+      return;
+    }
+
+    WebRtc_UWord32 now = _clock.GetTimeInMS();
+    delta_time_ms = now - _timeLastSendToNetworkUpdate;
+    _timeLastSendToNetworkUpdate = now;
+  }
+
+  _sendBucket.UpdateBytesPerInterval(delta_time_ms, _targetSendBitrate);
+
+  while (!_sendBucket.Empty()) {
+
+    WebRtc_Word32 seq_num = _sendBucket.GetNextPacket();
+    if (seq_num < 0) {
+      break;
+    }
+
+    WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+    WebRtc_UWord16 length = IP_PACKET_SIZE;
+    WebRtc_UWord32 stored_time_ms;
+    StorageType type;
+    bool found = _packetHistory->GetRTPPacket(seq_num, 0, data_buffer, &length,
+        &stored_time_ms, &type);
+    if (!found) {
+      assert(false);
+      return;
+    }
+    assert(length > 0);
+
+    WebRtc_UWord32 diff_ms = _clock.GetTimeInMS() - stored_time_ms;
+
+    ModuleRTPUtility::RTPHeaderParser rtpParser(data_buffer, length);
+    WebRtcRTPHeader rtp_header;
+    rtpParser.Parse(rtp_header);
+
+    UpdateTransmissionTimeOffset(data_buffer, length, rtp_header, diff_ms);
+
+    // Send packet
+    WebRtc_Word32 bytes_sent = -1;
+    {
+      CriticalSectionScoped cs(_transportCritsect);
+      if (_transport) {
+        bytes_sent = _transport->SendPacket(_id, data_buffer, length);
+      }
+    }
+
+    // Update send statistics
+    if (bytes_sent > 0) {
+      CriticalSectionScoped cs(_sendCritsect);
+      Bitrate::Update(bytes_sent);
+      _packetsSent++;
+      if (bytes_sent > rtp_header.header.headerLength) {
+        _payloadBytesSent += bytes_sent - rtp_header.header.headerLength;
+      }
+    }
+  } 
+}
+
+WebRtc_Word32
+RTPSender::SendToNetwork(const WebRtc_UWord8* buffer,
+                         const WebRtc_UWord16 length,
+                         const WebRtc_UWord16 rtpLength,
+                         const StorageType storage)
+{
+  // Used for NACK or to spead out the transmission of packets.
+  if (_packetHistory->PutRTPPacket(
+      buffer, rtpLength + length, _maxPayloadLength, storage) != 0) {
+    return -1;
+  }
+
+  if (_transmissionSmoothing) {
+    const WebRtc_UWord16 sequenceNumber = (buffer[2] << 8) + buffer[3];
+    _sendBucket.Fill(sequenceNumber, rtpLength + length);
+    // Packet will be sent at a later time.
+    return 0;
+  }
+
+  // Send packet
+  WebRtc_Word32 bytes_sent = -1;
+  {
+    CriticalSectionScoped cs(_transportCritsect);
+    if (_transport) {
+      bytes_sent = _transport->SendPacket(_id, buffer, length + rtpLength);
+    }
+  }
+
+  if (bytes_sent <= 0) {
+    return -1;
+  }
+
+  // Update send statistics
+  CriticalSectionScoped cs(_sendCritsect);
+  Bitrate::Update(bytes_sent);
+  _packetsSent++;
+  if (bytes_sent > rtpLength) {
+    _payloadBytesSent += bytes_sent - rtpLength;
+  }
+  return 0;
+}
+
+void
+RTPSender::ProcessBitrate()
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    Bitrate::Process();
+    _nackBitrate.Process();
+
+    if (_audioConfigured)
+      return;
+    _video->ProcessBitrate();
+}
+
+WebRtc_UWord16
+RTPSender::RTPHeaderLength() const
+{
+    WebRtc_UWord16 rtpHeaderLength = 12;
+
+    if(_includeCSRCs)
+    {
+        rtpHeaderLength += sizeof(WebRtc_UWord32)*_CSRCs;
+    }
+    rtpHeaderLength += RtpHeaderExtensionTotalLength();
+
+    return rtpHeaderLength;
+}
+
+WebRtc_UWord16
+RTPSender::IncrementSequenceNumber()
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sequenceNumber++;
+}
+
+WebRtc_Word32
+RTPSender::ResetDataCounters()
+{
+    _packetsSent = 0;
+    _payloadBytesSent = 0;
+
+    return 0;
+}
+
+// number of sent RTP packets
+// dont use critsect to avoid potental deadlock
+WebRtc_UWord32
+RTPSender::Packets() const
+{
+    return _packetsSent;
+}
+
+// number of sent RTP bytes
+// dont use critsect to avoid potental deadlock
+WebRtc_UWord32
+RTPSender::Bytes() const
+{
+    return _payloadBytesSent;
+}
+
+WebRtc_Word32
+RTPSender::BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                          const WebRtc_Word8 payloadType,
+                          const bool markerBit,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const bool timeStampProvided,
+                          const bool incSequenceNumber)
+{
+    assert(payloadType>=0);
+
+    CriticalSectionScoped cs(_sendCritsect);
+
+    dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);            // version 2
+    dataBuffer[1] = static_cast<WebRtc_UWord8>(payloadType);
+    if (markerBit)
+    {
+        dataBuffer[1] |= kRtpMarkerBitMask;  // MarkerBit is set
+    }
+
+    if(timeStampProvided)
+    {
+        _timeStamp = _startTimeStamp + captureTimeStamp;
+    } else
+    {
+        // make a unique time stamp
+        // used for inband signaling
+        // we can't inc by the actual time, since then we increase the risk of back timing
+        _timeStamp++;
+    }
+
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _sequenceNumber);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, _timeStamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, _ssrc);
+
+    WebRtc_Word32 rtpHeaderLength = 12;
+
+    // Add the CSRCs if any
+    if (_includeCSRCs && _CSRCs > 0)
+    {
+        if(_CSRCs > kRtpCsrcSize)
+        {
+            // error
+            assert(false);
+            return -1;
+        }
+        WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
+        for (WebRtc_UWord32 i = 0; i < _CSRCs; ++i)
+        {
+            ModuleRTPUtility::AssignUWord32ToBuffer(ptr, _CSRC[i]);
+            ptr +=4;
+        }
+        dataBuffer[0] = (dataBuffer[0]&0xf0) | _CSRCs;
+
+        // Update length of header
+        rtpHeaderLength += sizeof(WebRtc_UWord32)*_CSRCs;
+    }
+    {
+        _sequenceNumber++; // prepare for next packet
+    }
+
+    WebRtc_UWord16 len = BuildRTPHeaderExtension(dataBuffer + rtpHeaderLength);
+    if (len)
+    {
+      dataBuffer[0] |= 0x10;  // set eXtension bit
+      rtpHeaderLength += len;
+    }
+
+    return rtpHeaderLength;
+}
+
+WebRtc_UWord16
+RTPSender::BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const
+{
+    if (_rtpHeaderExtensionMap.Size() <= 0) {
+       return 0;
+    }
+
+    /* RTP header extension, RFC 3550.
+     0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      defined by profile       |           length              |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |                        header extension                       |
+    |                             ....                              |
+    */
+
+    const WebRtc_UWord32 kPosLength = 2;
+    const WebRtc_UWord32 kHeaderLength = RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+
+    // Add extension ID (0xBEDE).
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer,
+                                            RTP_ONE_BYTE_HEADER_EXTENSION);
+
+    // Add extensions.
+    WebRtc_UWord16 total_block_length = 0;
+
+    RTPExtensionType type = _rtpHeaderExtensionMap.First();
+    while (type != kRtpExtensionNone)
+    {
+        WebRtc_UWord8 block_length = 0;
+        if (type == kRtpExtensionTransmissionTimeOffset)
+        {
+            block_length = BuildTransmissionTimeOffsetExtension(
+                dataBuffer + kHeaderLength + total_block_length);
+        }
+        total_block_length += block_length;
+        type = _rtpHeaderExtensionMap.Next(type);
+    }
+
+    if (total_block_length == 0)
+    {
+        // No extension added.
+        return 0;
+    }
+
+    // Set header length (in number of Word32, header excluded).
+    assert(total_block_length % 4 == 0);
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + kPosLength,
+                                            total_block_length / 4);
+
+    // Total added length.
+    return kHeaderLength + total_block_length;
+}
+
+WebRtc_UWord8
+RTPSender::BuildTransmissionTimeOffsetExtension(WebRtc_UWord8* dataBuffer) const
+{
+   // From RFC 5450: Transmission Time Offsets in RTP Streams.
+   //
+   // The transmission time is signaled to the receiver in-band using the
+   // general mechanism for RTP header extensions [RFC5285]. The payload
+   // of this extension (the transmitted value) is a 24-bit signed integer.
+   // When added to the RTP timestamp of the packet, it represents the
+   // "effective" RTP transmission time of the packet, on the RTP
+   // timescale.
+   //
+   // The form of the transmission offset extension block:
+   //
+   //    0                   1                   2                   3
+   //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   //   |  ID   | len=2 |              transmission offset              |
+   //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+    // Get id defined by user.
+    WebRtc_UWord8 id;
+    if (_rtpHeaderExtensionMap.GetId(kRtpExtensionTransmissionTimeOffset, &id)
+        != 0) {
+      // Not registered.
+      return 0;
+    }
+
+    int pos = 0;
+    const WebRtc_UWord8 len = 2;
+    dataBuffer[pos++] = (id << 4) + len;
+    ModuleRTPUtility::AssignUWord24ToBuffer(dataBuffer + pos,
+                                            _transmissionTimeOffset);
+    pos += 3;
+    assert(pos == TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES);
+    return TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+}
+
+void RTPSender::UpdateTransmissionTimeOffset(
+    WebRtc_UWord8* rtp_packet,
+    const WebRtc_UWord16 rtp_packet_length,
+    const WebRtcRTPHeader& rtp_header,
+    const WebRtc_UWord32 time_ms) const {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  // Get length until start of transmission block.
+  int transmission_block_pos =
+      _rtpHeaderExtensionMap.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset);
+  if (transmission_block_pos < 0) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, not registered.");
+    return;
+  }
+
+  int block_pos = 12 + rtp_header.header.numCSRCs + transmission_block_pos;
+  if ((rtp_packet_length < block_pos + 4)) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, invalid length.");
+    return;
+  }
+
+  // Verify that header contains extension.
+  if (!((rtp_packet[12 + rtp_header.header.numCSRCs] == 0xBE) &&
+        (rtp_packet[12 + rtp_header.header.numCSRCs + 1] == 0xDE))) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, hdr extension not found.");
+    return;
+  }
+
+  // Get id.
+  WebRtc_UWord8 id = 0;
+  if (_rtpHeaderExtensionMap.GetId(kRtpExtensionTransmissionTimeOffset,
+                                   &id) != 0) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, no id.");
+    return;
+  }
+  
+  // Verify first byte in block.
+  const WebRtc_UWord8 first_block_byte = (id << 4) + 2;
+  if (rtp_packet[block_pos] != first_block_byte) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset.");
+    return;
+  }
+
+  // Update transmission offset field.
+  ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
+                                          time_ms * 90);  // RTP timestamp
+}
+
+WebRtc_Word32
+RTPSender::RegisterSendTransport(Transport* transport)
+{
+     CriticalSectionScoped cs(_transportCritsect);
+    _transport = transport;
+    return 0;
+}
+
+void
+RTPSender::SetSendingStatus(const bool enabled)
+{
+    if(enabled)
+    {
+        WebRtc_UWord32 freq;
+        if(_audioConfigured)
+        {
+            WebRtc_UWord32 frequency = _audio->AudioFrequency();
+
+            // sanity
+            switch(frequency)
+            {
+            case 8000:
+            case 12000:
+            case 16000:
+            case 24000:
+            case 32000:
+                break;
+            default:
+                assert(false);
+                return;
+            }
+            freq = frequency;
+        } else
+        {
+            freq = 90000; // 90 KHz for all video
+        }
+        WebRtc_UWord32 RTPtime = ModuleRTPUtility::GetCurrentRTP(&_clock, freq);
+
+        SetStartTimestamp(RTPtime); // will be ignored if it's already configured via API
+
+    } else
+    {
+        if(!_ssrcForced)
+        {
+            // generate a new SSRC
+            _ssrcDB.ReturnSSRC(_ssrc);
+            _ssrc = _ssrcDB.CreateSSRC();   // can't be 0
+
+        }
+        if(!_sequenceNumberForced && !_ssrcForced) // don't initialize seq number if SSRC passed externally
+        {
+            // generate a new sequence number
+            _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+        }
+    }
+}
+
+void
+RTPSender::SetSendingMediaStatus(const bool enabled)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    _sendingMedia = enabled;
+}
+
+bool
+RTPSender::SendingMedia() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sendingMedia;
+}
+
+WebRtc_UWord32
+RTPSender::Timestamp() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _timeStamp;
+}
+
+
+WebRtc_Word32
+RTPSender::SetStartTimestamp( const WebRtc_UWord32 timestamp, const bool force)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    if(force)
+    {
+        _startTimeStampForced = force;
+        _startTimeStamp = timestamp;
+    } else
+    {
+        if(!_startTimeStampForced)
+        {
+            _startTimeStamp = timestamp;
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::StartTimestamp() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _startTimeStamp;
+}
+
+WebRtc_UWord32
+RTPSender::GenerateNewSSRC()
+{
+    // if configured via API, return 0
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if(_ssrcForced)
+    {
+        return 0;
+    }
+    _ssrc = _ssrcDB.CreateSSRC();   // can't be 0
+    return _ssrc;
+}
+
+WebRtc_Word32
+RTPSender::SetSSRC(WebRtc_UWord32 ssrc)
+{
+    // this is configured via the API
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if (_ssrc == ssrc && _ssrcForced)
+    {
+        return 0; // since it's same ssrc, don't reset anything
+    }
+
+    _ssrcForced = true;
+
+    _ssrcDB.ReturnSSRC(_ssrc);
+    _ssrcDB.RegisterSSRC(ssrc);
+    _ssrc = ssrc;
+
+    if(!_sequenceNumberForced)
+    {
+        _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::SSRC() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _ssrc;
+}
+
+WebRtc_Word32
+RTPSender::SetCSRCStatus(const bool include)
+{
+    _includeCSRCs = include;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                    const WebRtc_UWord8 arrLength)
+{
+    if(arrLength > kRtpCsrcSize)
+    {
+        assert(false);
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_sendCritsect);
+
+    for(int i = 0; i < arrLength;i++)
+    {
+        _CSRC[i] = arrOfCSRC[i];
+    }
+    _CSRCs = arrLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::CSRCs(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if(arrOfCSRC == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    for(int i = 0; i < _CSRCs && i < kRtpCsrcSize;i++)
+    {
+        arrOfCSRC[i] = _CSRC[i];
+    }
+    return _CSRCs;
+}
+
+WebRtc_Word32
+RTPSender::SetSequenceNumber(WebRtc_UWord16 seq)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    _sequenceNumberForced = true;
+    _sequenceNumber = seq;
+    return 0;
+}
+
+WebRtc_UWord16
+RTPSender::SequenceNumber() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sequenceNumber;
+}
+
+
+    /*
+    *    Audio
+    */
+WebRtc_Word32
+RTPSender::RegisterAudioCallback(RtpAudioFeedback* messagesCallback)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->RegisterAudioCallback(messagesCallback);
+}
+
+    // Send a DTMF tone, RFC 2833 (4733)
+WebRtc_Word32
+RTPSender::SendTelephoneEvent(const WebRtc_UWord8 key,
+                              const WebRtc_UWord16 time_ms,
+                              const WebRtc_UWord8 level)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SendTelephoneEvent(key, time_ms, level);
+}
+
+bool
+RTPSender::SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const
+{
+    if(!_audioConfigured)
+    {
+        return false;
+    }
+    return _audio->SendTelephoneEventActive(telephoneEvent);
+}
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+WebRtc_Word32
+RTPSender::SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetAudioPacketSize(packetSizeSamples);
+}
+
+WebRtc_Word32
+RTPSender::SetAudioLevelIndicationStatus(const bool enable,
+                                         const WebRtc_UWord8 ID)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetAudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32
+RTPSender::AudioLevelIndicationStatus(bool& enable,
+                                      WebRtc_UWord8& ID) const
+{
+    return _audio->AudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32
+RTPSender::SetAudioLevel(const WebRtc_UWord8 level_dBov)
+{
+    return _audio->SetAudioLevel(level_dBov);
+}
+
+    // Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSender::SetRED(const WebRtc_Word8 payloadType)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetRED(payloadType);
+}
+
+    // Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSender::RED(WebRtc_Word8& payloadType) const
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->RED(payloadType);
+}
+
+    /*
+    *    Video
+    */
+VideoCodecInformation*
+RTPSender::CodecInformationVideo()
+{
+    if(_audioConfigured)
+    {
+        return NULL;
+    }
+    return _video->CodecInformationVideo();
+}
+
+RtpVideoCodecTypes
+RTPSender::VideoCodecType() const
+{
+    if(_audioConfigured)
+    {
+        return kRtpNoVideo;
+    }
+    return _video->VideoCodecType();
+}
+
+WebRtc_UWord32
+RTPSender::MaxConfiguredBitrateVideo() const
+{
+    if(_audioConfigured)
+    {
+        return 0;
+    }
+    return _video->MaxConfiguredBitrateVideo();
+}
+
+WebRtc_Word32
+RTPSender::SendRTPIntraRequest()
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SendRTPIntraRequest();
+}
+
+// FEC
+WebRtc_Word32
+RTPSender::SetGenericFECStatus(const bool enable,
+                               const WebRtc_UWord8 payloadTypeRED,
+                               const WebRtc_UWord8 payloadTypeFEC)
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SetGenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
+}
+
+WebRtc_Word32
+RTPSender::GenericFECStatus(bool& enable,
+                            WebRtc_UWord8& payloadTypeRED,
+                            WebRtc_UWord8& payloadTypeFEC) const
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->GenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
+}
+
+WebRtc_Word32
+RTPSender::SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                          const WebRtc_UWord8 deltaFrameCodeRate)
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
+}
+
+WebRtc_Word32
+RTPSender::SetFECUepProtection(const bool keyUseUepProtection,
+                               const bool deltaUseUepProtection)
+
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SetFECUepProtection(keyUseUepProtection,
+                                       deltaUseUepProtection);
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender.h b/trunk/src/modules/rtp_rtcp/source/rtp_sender.h
new file mode 100644
index 0000000..00012e1
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender.h
@@ -0,0 +1,376 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
+
+#include <cassert>
+#include <cmath>
+#include <map>
+
+#include "rtp_rtcp_config.h"       // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "rtp_rtcp_defines.h"
+#include "common_types.h"          // Encryption
+#include "ssrc_database.h"
+#include "Bitrate.h"
+#include "rtp_header_extension.h"
+#include "video_codec_information.h"
+#include "transmission_bucket.h"
+
+#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class RTPPacketHistory;
+class RTPSenderAudio;
+class RTPSenderVideo;
+
+class RTPSenderInterface
+{
+public:
+    RTPSenderInterface() {}
+    virtual ~RTPSenderInterface() {}
+
+    virtual WebRtc_UWord32 SSRC() const = 0;
+    virtual WebRtc_UWord32 Timestamp() const = 0;
+
+    virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                                       const WebRtc_Word8 payloadType,
+                                       const bool markerBit,
+                                       const WebRtc_UWord32 captureTimeStamp,
+                                       const bool timeStampProvided = true,
+                                       const bool incSequenceNumber = true) = 0;
+
+    virtual WebRtc_UWord16 RTPHeaderLength() const = 0;
+    virtual WebRtc_UWord16 IncrementSequenceNumber() = 0;
+    virtual WebRtc_UWord16 SequenceNumber()   const = 0;
+    virtual WebRtc_UWord16 MaxPayloadLength() const = 0;
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
+    virtual WebRtc_UWord16 PacketOverHead() const = 0;
+    virtual WebRtc_UWord16 TargetSendBitrateKbit() const = 0;
+    virtual WebRtc_UWord16 ActualSendBitrateKbit() const = 0;
+
+    virtual WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        const StorageType storage) = 0;
+};
+
+class RTPSender : public Bitrate, public RTPSenderInterface
+{
+public:
+    RTPSender(const WebRtc_Word32 id, const bool audio, RtpRtcpClock* clock);
+    virtual ~RTPSender();
+
+    WebRtc_Word32 Init(const WebRtc_UWord32 remoteSSRC);
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    void ProcessBitrate();
+    void ProcessSendToNetwork();
+
+    WebRtc_UWord16 TargetSendBitrateKbit() const;
+    WebRtc_UWord16 ActualSendBitrateKbit() const;
+
+    WebRtc_UWord32 VideoBitrateSent() const;
+    WebRtc_UWord32 FecOverheadRate() const;
+    WebRtc_UWord32 NackOverheadRate() const;
+
+    WebRtc_Word32 SetTargetSendBitrate(const WebRtc_UWord32 bits);
+
+    WebRtc_UWord16 MaxDataPayloadLength() const; // with RTP and FEC headers
+
+    // callback
+    WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport);
+
+    WebRtc_Word32 RegisterPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_Word32 DeRegisterSendPayload(const WebRtc_Word8 payloadType);
+
+    WebRtc_Word8 SendPayloadType() const;
+
+    int SendPayloadFrequency() const;
+
+    void SetSendingStatus(const bool enabled);
+
+    void SetSendingMediaStatus(const bool enabled);
+    bool SendingMedia() const;
+
+    // number of sent RTP packets
+    WebRtc_UWord32 Packets() const;
+
+    // number of sent RTP bytes
+    WebRtc_UWord32 Bytes() const;
+
+    WebRtc_Word32 ResetDataCounters();
+
+    WebRtc_UWord32 StartTimestamp() const;
+    WebRtc_Word32 SetStartTimestamp(const WebRtc_UWord32 timestamp,
+                                    const bool force = false);
+
+    WebRtc_UWord32 GenerateNewSSRC();
+    WebRtc_Word32 SetSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_UWord16 SequenceNumber() const;
+    WebRtc_Word32 SetSequenceNumber( WebRtc_UWord16 seq);
+
+    WebRtc_Word32 CSRCs(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const;
+
+    WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    WebRtc_Word32 SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                           const WebRtc_UWord8 arrLength);
+
+    WebRtc_Word32 SetMaxPayloadLength(const WebRtc_UWord16 length,
+                                      const WebRtc_UWord16 packetOverHead);
+
+    WebRtc_Word32 SendOutgoingData(const FrameType frameType,
+                                   const WebRtc_Word8 payloadType,
+                                   const WebRtc_UWord32 timeStamp,
+                                   const WebRtc_UWord8* payloadData,
+                                   const WebRtc_UWord32 payloadSize,
+                                   const RTPFragmentationHeader* fragmentation,
+                                   VideoCodecInformation* codecInfo = NULL,
+                                   const RTPVideoTypeHeader* rtpTypeHdr = NULL);
+
+    WebRtc_Word32 SendPadData(WebRtc_Word8 payload_type,
+                              WebRtc_UWord32 capture_timestamp,
+                              WebRtc_Word32 bytes);
+    /*
+    * RTP header extension
+    */
+    WebRtc_Word32 SetTransmissionTimeOffset(
+        const WebRtc_Word32 transmissionTimeOffset);
+
+    WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                             const WebRtc_UWord8 id);
+
+    WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
+
+    WebRtc_UWord16 RtpHeaderExtensionTotalLength() const;
+
+    WebRtc_UWord16 BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const;
+
+    WebRtc_UWord8 BuildTransmissionTimeOffsetExtension(
+        WebRtc_UWord8* dataBuffer) const;
+
+    void UpdateTransmissionTimeOffset(WebRtc_UWord8* rtp_packet,
+                                      const WebRtc_UWord16 rtp_packet_length,
+                                      const WebRtcRTPHeader& rtp_header,
+                                      const WebRtc_UWord32 time_ms) const;
+
+    void SetTransmissionSmoothingStatus(const bool enable);
+
+    bool TransmissionSmoothingStatus() const;
+
+    /*
+    *    NACK
+    */
+    int SelectiveRetransmissions() const;
+    int SetSelectiveRetransmissions(uint8_t settings);
+    void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                        const WebRtc_UWord16* nackSequenceNumbers,
+                        const WebRtc_UWord16 avgRTT);
+
+    WebRtc_Word32 SetStorePacketsStatus(const bool enable,
+                                        const WebRtc_UWord16 numberToStore);
+
+    bool StorePackets() const;
+
+    WebRtc_Word32 ReSendPacket(WebRtc_UWord16 packet_id,
+                               WebRtc_UWord32 min_resend_time = 0);
+
+    WebRtc_Word32 ReSendToNetwork(const WebRtc_UWord8* packet,
+                                  const WebRtc_UWord32 size);
+
+    bool ProcessNACKBitRate(const WebRtc_UWord32 now);
+
+    /*
+    *    Keep alive
+    */
+    WebRtc_Word32 EnableRTPKeepalive( const WebRtc_Word8 unknownPayloadType,
+                                    const WebRtc_UWord16 deltaTransmitTimeMS);
+
+    WebRtc_Word32 RTPKeepaliveStatus(bool* enable,
+                                   WebRtc_Word8* unknownPayloadType,
+                                   WebRtc_UWord16* deltaTransmitTimeMS) const;
+
+    WebRtc_Word32 DisableRTPKeepalive();
+
+    bool RTPKeepalive() const;
+
+    bool TimeToSendRTPKeepalive() const;
+
+    WebRtc_Word32 SendRTPKeepalivePacket();
+
+    /*
+    *  RTX
+    */
+    void SetRTXStatus(const bool enable,
+                      const bool setSSRC,
+                      const WebRtc_UWord32 SSRC);
+
+    void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
+
+    /*
+    * Functions wrapping RTPSenderInterface
+    */
+    virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                                       const WebRtc_Word8 payloadType,
+                                       const bool markerBit,
+                                       const WebRtc_UWord32 captureTimeStamp,
+                                       const bool timeStampProvided = true,
+                                       const bool incSequenceNumber = true);
+
+    virtual WebRtc_UWord16 RTPHeaderLength() const ;
+    virtual WebRtc_UWord16 IncrementSequenceNumber();
+    virtual WebRtc_UWord16 MaxPayloadLength() const;
+    virtual WebRtc_UWord16 PacketOverHead() const;
+
+    // current timestamp
+    virtual WebRtc_UWord32 Timestamp() const;
+    virtual WebRtc_UWord32 SSRC() const;
+
+    virtual WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        const StorageType storage);
+
+    /*
+    *    Audio
+    */
+    WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
+
+    // Send a DTMF tone using RFC 2833 (4733)
+    WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
+                                     const WebRtc_UWord16 time_ms,
+                                     const WebRtc_UWord8 level);
+
+    bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
+
+    /*
+    *    Video
+    */
+    VideoCodecInformation* CodecInformationVideo();
+
+    RtpVideoCodecTypes VideoCodecType() const;
+
+    WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
+
+    WebRtc_Word32 SendRTPIntraRequest();
+
+    // FEC
+    WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC);
+
+    WebRtc_Word32 GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const;
+
+    WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                                 const WebRtc_UWord8 deltaFrameCodeRate);
+
+    WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
+                                      const bool deltaUseUepProtection);
+
+protected:
+    WebRtc_Word32 CheckPayloadType(const WebRtc_Word8 payloadType,
+                                   RtpVideoCodecTypes& videoType);
+
+private:
+    void UpdateNACKBitRate(const WebRtc_UWord32 bytes,
+                           const WebRtc_UWord32 now);
+
+    WebRtc_Word32              _id;
+    const bool                 _audioConfigured;
+    RTPSenderAudio*            _audio;
+    RTPSenderVideo*            _video;
+ 
+    CriticalSectionWrapper*    _sendCritsect;
+
+    CriticalSectionWrapper*    _transportCritsect;
+    Transport*                 _transport;
+
+    bool                      _sendingMedia;
+
+    WebRtc_UWord16            _maxPayloadLength;
+    WebRtc_UWord16            _targetSendBitrate;
+    WebRtc_UWord16            _packetOverHead;
+
+    WebRtc_Word8              _payloadType;
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
+
+    RtpHeaderExtensionMap     _rtpHeaderExtensionMap;
+    WebRtc_Word32             _transmissionTimeOffset;
+
+    bool                      _keepAliveIsActive;
+    WebRtc_Word8              _keepAlivePayloadType;
+    WebRtc_UWord32            _keepAliveLastSent;
+    WebRtc_UWord16            _keepAliveDeltaTimeSend;
+
+    // NACK
+    WebRtc_UWord32            _nackByteCountTimes[NACK_BYTECOUNT_SIZE];
+    WebRtc_Word32             _nackByteCount[NACK_BYTECOUNT_SIZE];
+    Bitrate                   _nackBitrate;
+
+    RTPPacketHistory*         _packetHistory;
+    TransmissionBucket        _sendBucket;
+    WebRtc_UWord32            _timeLastSendToNetworkUpdate;
+    bool                      _transmissionSmoothing;
+
+    // statistics
+    WebRtc_UWord32            _packetsSent;
+    WebRtc_UWord32            _payloadBytesSent;
+
+    // RTP variables
+    bool                      _startTimeStampForced;
+    WebRtc_UWord32            _startTimeStamp;
+    SSRCDatabase&             _ssrcDB;
+    WebRtc_UWord32            _remoteSSRC;
+    bool                      _sequenceNumberForced;
+    WebRtc_UWord16            _sequenceNumber;
+    WebRtc_UWord16            _sequenceNumberRTX;
+    bool                      _ssrcForced;
+    WebRtc_UWord32            _ssrc;
+    WebRtc_UWord32            _timeStamp;
+    WebRtc_UWord8             _CSRCs;
+    WebRtc_UWord32            _CSRC[kRtpCsrcSize];
+    bool                      _includeCSRCs;
+    bool                      _RTX;
+    WebRtc_UWord32            _ssrcRTX;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.cc b/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.cc
new file mode 100644
index 0000000..ee3c414
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -0,0 +1,627 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_audio.h"
+
+#include <string.h> //memcpy
+#include <cassert> //assert
+
+namespace webrtc {
+RTPSenderAudio::RTPSenderAudio(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                               RTPSenderInterface* rtpSender) :
+    _id(id),
+    _clock(*clock),
+    _rtpSender(rtpSender),
+    _audioFeedbackCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _audioFeedback(NULL),
+    _sendAudioCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _frequency(8000),
+    _packetSizeSamples(160),
+    _dtmfEventIsOn(false),
+    _dtmfEventFirstPacketSent(false),
+    _dtmfPayloadType(-1),
+    _dtmfTimestamp(0),
+    _dtmfKey(0),
+    _dtmfLengthSamples(0),
+    _dtmfLevel(0),
+    _dtmfTimeLastSent(0),
+    _dtmfTimestampLastSent(0),
+    _REDPayloadType(-1),
+    _inbandVADactive(false),
+    _cngNBPayloadType(-1),
+    _cngWBPayloadType(-1),
+    _cngSWBPayloadType(-1),
+    _lastPayloadType(-1),
+    _includeAudioLevelIndication(false),    // @TODO - reset at Init()?
+    _audioLevelIndicationID(0),
+    _audioLevel_dBov(0) {
+};
+
+RTPSenderAudio::~RTPSenderAudio()
+{
+    delete _sendAudioCritsect;
+    delete _audioFeedbackCritsect;
+}
+
+WebRtc_Word32
+RTPSenderAudio::Init()
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    _dtmfPayloadType = -1;
+    _inbandVADactive = false;
+    _cngNBPayloadType = -1;
+    _cngWBPayloadType = -1;
+    _cngSWBPayloadType = -1;
+    _lastPayloadType = -1;
+    _REDPayloadType = -1;
+    _dtmfTimeLastSent = 0;
+    _dtmfTimestampLastSent = 0;
+    ResetDTMF();
+    return 0;
+}
+
+void
+RTPSenderAudio::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+WebRtc_Word32
+RTPSenderAudio::RegisterAudioCallback(RtpAudioFeedback* messagesCallback)
+{
+    CriticalSectionScoped cs(_audioFeedbackCritsect);
+    _audioFeedback = messagesCallback;
+    return 0;
+}
+
+void
+RTPSenderAudio::SetAudioFrequency(const WebRtc_UWord32 f)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    _frequency = f;
+}
+
+int
+RTPSenderAudio::AudioFrequency() const
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    return _frequency;
+}
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+WebRtc_Word32
+RTPSenderAudio::SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    _packetSizeSamples = packetSizeSamples;
+    return 0;
+}
+
+WebRtc_Word32 RTPSenderAudio::RegisterAudioPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate,
+    ModuleRTPUtility::Payload*& payload) {
+  CriticalSectionScoped cs(_sendAudioCritsect);
+
+  if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2))  {
+    //  we can have multiple CNG payload types
+    if (frequency == 8000) {
+      _cngNBPayloadType = payloadType;
+
+    } else if (frequency == 16000) {
+      _cngWBPayloadType = payloadType;
+
+    } else if (frequency == 32000) {
+      _cngSWBPayloadType = payloadType;
+    } else {
+      return -1;
+    }
+  }
+  if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
+    // Don't add it to the list
+    // we dont want to allow send with a DTMF payloadtype
+    _dtmfPayloadType = payloadType;
+    return 0;
+    // The default timestamp rate is 8000 Hz, but other rates may be defined.
+  }
+  payload = new ModuleRTPUtility::Payload;
+  payload->typeSpecific.Audio.frequency = frequency;
+  payload->typeSpecific.Audio.channels = channels;
+  payload->typeSpecific.Audio.rate = rate;
+  payload->audio = true;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  return 0;
+}
+
+bool
+RTPSenderAudio::MarkerBit(const FrameType frameType,
+                          const WebRtc_Word8 payloadType)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    // for audio true for first packet in a speech burst
+    bool markerBit = false;
+    if(_lastPayloadType != payloadType)
+    {
+        if(_cngNBPayloadType != -1)
+        {
+            // we have configured NB CNG
+            if(_cngNBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        if(_cngWBPayloadType != -1)
+        {
+            // we have configured WB CNG
+            if(_cngWBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        if(_cngSWBPayloadType != -1)
+        {
+            // we have configured SWB CNG
+            if(_cngSWBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        // payloadType differ
+        if(_lastPayloadType == -1)
+        {
+            if(frameType != kAudioFrameCN)
+            {
+                // first packet and NOT CNG
+                return true;
+
+            }else
+            {
+                // first packet and CNG
+                _inbandVADactive = true;
+                return false;
+            }
+        }
+        // not first packet AND
+        // not CNG AND
+        // payloadType changed
+
+        // set a marker bit when we change payload type
+        markerBit = true;
+    }
+
+    // For G.723 G.729, AMR etc we can have inband VAD
+    if(frameType == kAudioFrameCN)
+    {
+        _inbandVADactive = true;
+
+    } else if(_inbandVADactive)
+    {
+        _inbandVADactive = false;
+        markerBit = true;
+    }
+    return markerBit;
+}
+
+bool
+RTPSenderAudio::SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const
+{
+    if(_dtmfEventIsOn)
+    {
+        telephoneEvent = _dtmfKey;
+        return true;
+    }
+    WebRtc_UWord32 delaySinceLastDTMF = (_clock.GetTimeInMS() - _dtmfTimeLastSent);
+    if(delaySinceLastDTMF < 100)
+    {
+        telephoneEvent = _dtmfKey;
+        return true;
+    }
+    telephoneEvent = -1;
+    return false;
+}
+
+WebRtc_Word32 RTPSenderAudio::SendAudio(
+    const FrameType frameType,
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 captureTimeStamp,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord32 dataSize,
+    const RTPFragmentationHeader* fragmentation) {
+  // TODO(pwestin) Breakup function in smaller functions.
+  WebRtc_UWord16 payloadSize = static_cast<WebRtc_UWord16>(dataSize);
+  WebRtc_UWord16 maxPayloadLength = _rtpSender->MaxPayloadLength();
+  bool dtmfToneStarted = false;
+  WebRtc_UWord16 dtmfLengthMS = 0;
+  WebRtc_UWord8 key = 0;
+
+  // Check if we have pending DTMFs to send
+  if (!_dtmfEventIsOn && PendingDTMF()) {
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    WebRtc_UWord32 delaySinceLastDTMF = _clock.GetTimeInMS() -
+        _dtmfTimeLastSent;
+
+    if (delaySinceLastDTMF > 100) {
+      // New tone to play
+      _dtmfTimestamp = captureTimeStamp;
+      if (NextDTMF(&key, &dtmfLengthMS, &_dtmfLevel) >= 0) {
+        _dtmfEventFirstPacketSent = false;
+        _dtmfKey = key;
+        _dtmfLengthSamples = (_frequency / 1000) * dtmfLengthMS;
+        dtmfToneStarted = true;
+        _dtmfEventIsOn = true;
+      }
+    }
+  }
+  if (dtmfToneStarted) {
+    CriticalSectionScoped cs(_audioFeedbackCritsect);
+    if (_audioFeedback) {
+      _audioFeedback->OnPlayTelephoneEvent(_id, key, dtmfLengthMS, _dtmfLevel);
+    }
+  }
+
+  // A source MAY send events and coded audio packets for the same time
+  // but we don't support it
+  {
+    _sendAudioCritsect->Enter();
+
+    if (_dtmfEventIsOn) {
+      if (frameType == kFrameEmpty) {
+        // kFrameEmpty is used to drive the DTMF when in CN mode
+        // it can be triggered more frequently than we want to send the
+        // DTMF packets.
+        if (_packetSizeSamples > (captureTimeStamp - _dtmfTimestampLastSent)) {
+          // not time to send yet
+          _sendAudioCritsect->Leave();
+          return 0;
+        }
+      }
+      _dtmfTimestampLastSent = captureTimeStamp;
+      WebRtc_UWord32 dtmfDurationSamples = captureTimeStamp - _dtmfTimestamp;
+      bool ended = false;
+      bool send = true;
+
+      if (_dtmfLengthSamples > dtmfDurationSamples) {
+        if (dtmfDurationSamples <= 0) {
+          // Skip send packet at start, since we shouldn't use duration 0
+          send = false;
+        }
+      } else {
+        ended = true;
+        _dtmfEventIsOn = false;
+        _dtmfTimeLastSent = _clock.GetTimeInMS();
+      }
+      // don't hold the critsect while calling SendTelephoneEventPacket
+      _sendAudioCritsect->Leave();
+      if (send) {
+        if (dtmfDurationSamples > 0xffff) {
+          // RFC 4733 2.5.2.3 Long-Duration Events
+          SendTelephoneEventPacket(ended, _dtmfTimestamp,
+                                   static_cast<WebRtc_UWord16>(0xffff), false);
+
+          // set new timestap for this segment
+          _dtmfTimestamp = captureTimeStamp;
+          dtmfDurationSamples -= 0xffff;
+          _dtmfLengthSamples -= 0xffff;
+
+          return SendTelephoneEventPacket(
+              ended,
+              _dtmfTimestamp,
+              static_cast<WebRtc_UWord16>(dtmfDurationSamples),
+              false);
+        } else {
+          // set markerBit on the first packet in the burst
+          _dtmfEventFirstPacketSent = true;
+          return SendTelephoneEventPacket(
+              ended,
+              _dtmfTimestamp,
+              static_cast<WebRtc_UWord16>(dtmfDurationSamples),
+              !_dtmfEventFirstPacketSent);
+        }
+      }
+      return 0;
+    }
+    _sendAudioCritsect->Leave();
+  }
+  if (payloadSize == 0 || payloadData == NULL) {
+    if (frameType == kFrameEmpty) {
+      // we don't send empty audio RTP packets
+      // no error since we use it to drive DTMF when we use VAD
+      return 0;
+    }
+    return -1;
+  }
+  WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
+  bool markerBit = MarkerBit(frameType, payloadType);
+
+  WebRtc_Word32 rtpHeaderLength = 0;
+  WebRtc_UWord16 timestampOffset = 0;
+
+  if (_REDPayloadType >= 0 && fragmentation && !markerBit &&
+      fragmentation->fragmentationVectorSize > 1) {
+    // have we configured RED? use its payload type
+    // we need to get the current timestamp to calc the diff
+    WebRtc_UWord32 oldTimeStamp = _rtpSender->Timestamp();
+    rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, _REDPayloadType,
+                                                 markerBit, captureTimeStamp);
+
+    timestampOffset = WebRtc_UWord16(_rtpSender->Timestamp() - oldTimeStamp);
+  } else {
+    rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, payloadType,
+                                                 markerBit, captureTimeStamp);
+  }
+  if (rtpHeaderLength <= 0) {
+    return -1;
+  }
+  {
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+    if (_includeAudioLevelIndication) {
+      dataBuffer[0] |= 0x10; // set eXtension bit
+      /*
+        0                   1                   2                   3
+        0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |      0xBE     |      0xDE     |            length=1           |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |  ID   | len=0 |V|   level     |      0x00     |      0x00     |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       */
+      // add our ID (0xBEDE)
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
+                                              RTP_AUDIO_LEVEL_UNIQUE_ID);
+      rtpHeaderLength += 2;
+
+      // add the length (length=1) in number of word32
+      const WebRtc_UWord8 length = 1;
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
+                                              length);
+      rtpHeaderLength += 2;
+
+      // add ID (defined by the user) and len(=0) byte
+      const WebRtc_UWord8 id = _audioLevelIndicationID;
+      const WebRtc_UWord8 len = 0;
+      dataBuffer[rtpHeaderLength++] = (id << 4) + len;
+
+      // add voice-activity flag (V) bit and the audio level (in dBov)
+      const WebRtc_UWord8 V = (frameType == kAudioFrameSpeech);
+      WebRtc_UWord8 level = _audioLevel_dBov;
+      dataBuffer[rtpHeaderLength++] = (V << 7) + level;
+
+      // add two bytes zero padding
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength, 0);
+      rtpHeaderLength += 2;
+    }
+
+    if(maxPayloadLength < rtpHeaderLength + payloadSize ) {
+      // too large payload buffer
+      return -1;
+    }
+
+    if (_REDPayloadType >= 0 &&  // Have we configured RED?
+        fragmentation &&
+        fragmentation->fragmentationVectorSize > 1 &&
+        !markerBit) {
+      if (timestampOffset <= 0x3fff) {
+        if(fragmentation->fragmentationVectorSize != 2) {
+          // we only support 2 codecs when using RED
+          return -1;
+        }
+        // only 0x80 if we have multiple blocks
+        dataBuffer[rtpHeaderLength++] = 0x80 +
+            fragmentation->fragmentationPlType[1];
+        WebRtc_UWord32 blockLength = fragmentation->fragmentationLength[1];
+
+        // sanity blockLength
+        if(blockLength > 0x3ff) {  // block length 10 bits 1023 bytes
+          return -1;
+        }
+        WebRtc_UWord32 REDheader = (timestampOffset << 10) + blockLength;
+        ModuleRTPUtility::AssignUWord24ToBuffer(dataBuffer + rtpHeaderLength,
+                                                REDheader);
+        rtpHeaderLength += 3;
+
+        dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+        // copy the RED data
+        memcpy(dataBuffer+rtpHeaderLength,
+               payloadData + fragmentation->fragmentationOffset[1],
+               fragmentation->fragmentationLength[1]);
+
+        // copy the normal data
+        memcpy(dataBuffer+rtpHeaderLength +
+               fragmentation->fragmentationLength[1],
+               payloadData + fragmentation->fragmentationOffset[0],
+               fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0] +
+            fragmentation->fragmentationLength[1]);
+      } else {
+        // silence for too long send only new data
+        dataBuffer[rtpHeaderLength++] = static_cast<WebRtc_UWord8>(payloadType);
+        memcpy(dataBuffer+rtpHeaderLength,
+               payloadData + fragmentation->fragmentationOffset[0],
+               fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0]);
+      }
+    } else {
+      if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
+        // use the fragment info if we have one
+        memcpy( dataBuffer+rtpHeaderLength,
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0]);
+      } else {
+        memcpy(dataBuffer+rtpHeaderLength, payloadData, payloadSize);
+      }
+    }
+    _lastPayloadType = payloadType;
+  }   // end critical section
+  return _rtpSender->SendToNetwork(dataBuffer,
+                                   payloadSize,
+                                   static_cast<WebRtc_UWord16>(rtpHeaderLength),
+                                   kAllowRetransmission);
+}
+
+WebRtc_Word32
+RTPSenderAudio::SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID)
+{
+    if(ID < 1 || ID > 14)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    _includeAudioLevelIndication = enable;
+    _audioLevelIndicationID = ID;
+
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderAudio::AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    enable = _includeAudioLevelIndication;
+    ID = _audioLevelIndicationID;
+    return 0;
+}
+
+    // Audio level magnitude and voice activity flag are set for each RTP packet
+WebRtc_Word32
+RTPSenderAudio::SetAudioLevel(const WebRtc_UWord8 level_dBov)
+{
+    if (level_dBov > 127)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    _audioLevel_dBov = level_dBov;
+    return 0;
+}
+
+    // Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSenderAudio::SetRED(const WebRtc_Word8 payloadType)
+{
+    if(payloadType < -1 )
+    {
+        return -1;
+    }
+    _REDPayloadType = payloadType;
+    return 0;
+}
+
+    // Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSenderAudio::RED(WebRtc_Word8& payloadType) const
+{
+    if(_REDPayloadType == -1)
+    {
+        // not configured
+        return -1;
+    }
+    payloadType = _REDPayloadType;
+    return 0;
+}
+
+// Send a TelephoneEvent tone using RFC 2833 (4733)
+WebRtc_Word32
+RTPSenderAudio::SendTelephoneEvent(const WebRtc_UWord8 key,
+                                   const WebRtc_UWord16 time_ms,
+                                   const WebRtc_UWord8 level)
+{
+    // DTMF is protected by its own critsect
+    if(_dtmfPayloadType < 0)
+    {
+        // TelephoneEvent payloadtype not configured
+        return -1;
+    }
+    return AddDTMF(key, time_ms, level);
+}
+
+WebRtc_Word32
+RTPSenderAudio::SendTelephoneEventPacket(const bool ended,
+                                         const WebRtc_UWord32 dtmfTimeStamp,
+                                         const WebRtc_UWord16 duration,
+                                         const bool markerBit)
+{
+    WebRtc_UWord8 dtmfbuffer[IP_PACKET_SIZE];
+    WebRtc_UWord8 sendCount = 1;
+    WebRtc_Word32 retVal = 0;
+
+    if(ended)
+    {
+        // resend last packet in an event 3 times
+        sendCount = 3;
+    }
+    do
+    {
+        _sendAudioCritsect->Enter();
+
+        //Send DTMF data
+        _rtpSender->BuildRTPheader(dtmfbuffer, _dtmfPayloadType, markerBit, dtmfTimeStamp);
+
+        // reset CSRC and X bit
+        dtmfbuffer[0] &= 0xe0;
+
+        //Create DTMF data
+        /*    From RFC 2833:
+
+         0                   1                   2                   3
+         0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |     event     |E|R| volume    |          duration             |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        */
+        // R bit always cleared
+        WebRtc_UWord8 R = 0x00;
+        WebRtc_UWord8 volume = _dtmfLevel;
+
+        // First packet un-ended
+          WebRtc_UWord8 E = 0x00;
+
+        if(ended)
+        {
+            E = 0x80;
+        }
+
+        // First byte is Event number, equals key number
+        dtmfbuffer[12] = _dtmfKey;
+        dtmfbuffer[13] = E|R|volume;
+        ModuleRTPUtility::AssignUWord16ToBuffer(dtmfbuffer+14, duration);
+
+        _sendAudioCritsect->Leave();
+        retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12,
+                                           kAllowRetransmission);
+        sendCount--;
+
+    }while (sendCount > 0 && retVal == 0);
+
+    return retVal;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.h b/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.h
new file mode 100644
index 0000000..5fda2ef
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
+
+#include "rtp_rtcp_config.h"          // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "common_types.h"             // Transport
+#include "typedefs.h"
+
+#include "dtmf_queue.h"
+#include "rtp_utility.h"
+
+#include "rtp_sender.h"
+
+namespace webrtc {
+class RTPSenderAudio: public DTMFqueue
+{
+public:
+    RTPSenderAudio(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                   RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderAudio();
+
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    WebRtc_Word32 RegisterAudioPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate,
+        ModuleRTPUtility::Payload*& payload);
+
+    WebRtc_Word32 SendAudio(const FrameType frameType,
+                            const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize,
+                            const RTPFragmentationHeader* fragmentation);
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    // Valid ID range is [1,14].
+    WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    // Valid range is [0,100]. Actual value is negative.
+    WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    // Send a DTMF tone using RFC 2833 (4733)
+      WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
+                                   const WebRtc_UWord16 time_ms,
+                                   const WebRtc_UWord8 level);
+
+    bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    void SetAudioFrequency(const WebRtc_UWord32 f);
+
+    int AudioFrequency() const;
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
+
+    WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
+
+protected:
+    WebRtc_Word32 SendTelephoneEventPacket(const bool ended,
+                                         const WebRtc_UWord32 dtmfTimeStamp,
+                                         const WebRtc_UWord16 duration,
+                                         const bool markerBit); // set on first packet in talk burst
+
+    bool MarkerBit(const FrameType frameType,
+                   const WebRtc_Word8 payloadType);
+
+private:
+    WebRtc_Word32             _id;
+    RtpRtcpClock&             _clock;
+    RTPSenderInterface*     _rtpSender;
+    CriticalSectionWrapper* _audioFeedbackCritsect;
+    RtpAudioFeedback*   _audioFeedback;
+
+    CriticalSectionWrapper*   _sendAudioCritsect;
+
+    WebRtc_UWord32            _frequency;
+    WebRtc_UWord16            _packetSizeSamples;
+
+    // DTMF
+    bool            _dtmfEventIsOn;
+    bool            _dtmfEventFirstPacketSent;
+    WebRtc_Word8      _dtmfPayloadType;
+    WebRtc_UWord32    _dtmfTimestamp;
+    WebRtc_UWord8     _dtmfKey;
+    WebRtc_UWord32    _dtmfLengthSamples;
+    WebRtc_UWord8     _dtmfLevel;
+    WebRtc_UWord32    _dtmfTimeLastSent;
+    WebRtc_UWord32    _dtmfTimestampLastSent;
+
+    WebRtc_Word8      _REDPayloadType;
+
+    // VAD detection, used for markerbit
+    bool            _inbandVADactive;
+    WebRtc_Word8      _cngNBPayloadType;
+    WebRtc_Word8      _cngWBPayloadType;
+    WebRtc_Word8      _cngSWBPayloadType;
+    WebRtc_Word8      _lastPayloadType;
+
+    // Audio level indication (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
+    bool            _includeAudioLevelIndication;
+    WebRtc_UWord8     _audioLevelIndicationID;
+    WebRtc_UWord8     _audioLevel_dBov;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender_test.cc b/trunk/src/modules/rtp_rtcp/source/rtp_sender_test.cc
new file mode 100644
index 0000000..4e8c78c
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender_test.cc
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the RTPSender.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_defines.h"
+#include "rtp_sender.h"
+#include "rtp_utility.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+namespace {
+const int kId = 1;
+const int kTypeLength = TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+const int kPayload = 100;
+const uint32_t kTimestamp = 10;
+const uint16_t kSeqNum = 33;
+const int kTimeOffset = 22222;
+const int kMaxPacketLength = 1500;
+}  // namespace
+
+class FakeClockTest : public RtpRtcpClock {
+ public:
+  FakeClockTest() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_UWord32 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_UWord32 time_in_ms_;
+};
+
+class LoopbackTransportTest : public webrtc::Transport {
+ public:
+  LoopbackTransportTest()
+    : packets_sent_(0),
+      last_sent_packet_len_(0) {
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    packets_sent_++;
+    memcpy(last_sent_packet_, data, len);
+    last_sent_packet_len_ = len;
+    return len;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    return -1;
+  }
+  int packets_sent_;
+  int last_sent_packet_len_;
+  uint8_t last_sent_packet_[kMaxPacketLength];
+};
+
+class RtpSenderTest : public ::testing::Test {
+ protected:
+  RtpSenderTest()
+    : fake_clock_(),
+      rtp_sender_(new RTPSender(0, false, &fake_clock_)),
+      transport_(),
+      kMarkerBit(true),
+      kType(kRtpExtensionTransmissionTimeOffset),
+      packet_() {
+    EXPECT_EQ(0, rtp_sender_->SetSequenceNumber(kSeqNum));
+  }
+  ~RtpSenderTest() {
+    delete rtp_sender_;
+  }
+
+  FakeClockTest fake_clock_;
+  RTPSender* rtp_sender_;
+  LoopbackTransportTest transport_;
+  const bool kMarkerBit;
+  RTPExtensionType kType;
+  uint8_t packet_[kMaxPacketLength];
+
+  void VerifyRTPHeaderCommon(const WebRtcRTPHeader& rtp_header) {
+    EXPECT_EQ(kMarkerBit, rtp_header.header.markerBit);
+    EXPECT_EQ(kPayload, rtp_header.header.payloadType);
+    EXPECT_EQ(kSeqNum, rtp_header.header.sequenceNumber);
+    EXPECT_EQ(kTimestamp, rtp_header.header.timestamp);
+    EXPECT_EQ(rtp_sender_->SSRC(), rtp_header.header.ssrc);
+    EXPECT_EQ(0, rtp_header.header.numCSRCs);
+    EXPECT_EQ(0, rtp_header.header.paddingLength);
+  }
+};
+
+TEST_F(RtpSenderTest, RegisterRtpHeaderExtension) {
+  EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES + kTypeLength,
+            rtp_sender_->RtpHeaderExtensionTotalLength());
+  EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(kType));
+  EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+}
+
+TEST_F(RtpSenderTest, BuildRTPPacket) {
+  WebRtc_Word32 length = rtp_sender_->BuildRTPheader(packet_,
+                                                     kPayload,
+                                                     kMarkerBit,
+                                                     kTimestamp);
+  EXPECT_EQ(12, length);
+
+  // Verify
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtpParser(packet_, length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtpParser.Parse(rtp_header, &map);
+
+  ASSERT_TRUE(valid_rtp_header);
+  ASSERT_FALSE(rtpParser.RTCP());
+  VerifyRTPHeaderCommon(rtp_header);
+  EXPECT_EQ(length, rtp_header.header.headerLength);
+  EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
+}
+
+TEST_F(RtpSenderTest, BuildRTPPacketWithExtension) {
+  EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kTimeOffset));
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+
+  WebRtc_Word32 length = rtp_sender_->BuildRTPheader(packet_,
+                                                     kPayload,
+                                                     kMarkerBit,
+                                                     kTimestamp);
+  EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+
+  // Verify
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtpParser(packet_, length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtpParser.Parse(rtp_header, &map);
+
+  ASSERT_TRUE(valid_rtp_header);
+  ASSERT_FALSE(rtpParser.RTCP());
+  VerifyRTPHeaderCommon(rtp_header);
+  EXPECT_EQ(length, rtp_header.header.headerLength);
+  EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
+
+  // Parse without map extension
+  webrtc::WebRtcRTPHeader rtp_header2;
+  const bool valid_rtp_header2 = rtpParser.Parse(rtp_header2, NULL);
+
+  ASSERT_TRUE(valid_rtp_header2);
+  VerifyRTPHeaderCommon(rtp_header2);
+  EXPECT_EQ(length, rtp_header2.header.headerLength);
+  EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
+}
+
+TEST_F(RtpSenderTest, NoTrafficSmoothing) {
+  EXPECT_EQ(0, rtp_sender_->RegisterSendTransport(&transport_));
+
+  WebRtc_Word32 rtp_length = rtp_sender_->BuildRTPheader(packet_,
+                                                         kPayload,
+                                                         kMarkerBit,
+                                                         kTimestamp);
+
+  // Packet should be sent immediately.
+  EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, 0, rtp_length,
+                                          kAllowRetransmission));
+  EXPECT_EQ(1, transport_.packets_sent_);
+  EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
+}
+
+TEST_F(RtpSenderTest, TrafficSmoothing) {
+  rtp_sender_->SetTransmissionSmoothingStatus(true);
+  EXPECT_EQ(0, rtp_sender_->SetStorePacketsStatus(true, 10));
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+  EXPECT_EQ(0, rtp_sender_->RegisterSendTransport(&transport_));
+
+  WebRtc_Word32 rtp_length = rtp_sender_->BuildRTPheader(packet_,
+                                                         kPayload,
+                                                         kMarkerBit,
+                                                         kTimestamp);
+
+  // Packet should be stored in a send bucket.
+  EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, 0, rtp_length,
+                                          kAllowRetransmission));
+  EXPECT_EQ(0, transport_.packets_sent_);
+
+  const int kStoredTimeInMs = 100;
+  fake_clock_.IncrementTime(kStoredTimeInMs);
+
+  // Process send bucket. Packet should now be sent.
+  rtp_sender_->ProcessSendToNetwork();
+  EXPECT_EQ(1, transport_.packets_sent_);
+  EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
+
+  // Parse sent packet.
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtpParser(
+      transport_.last_sent_packet_, rtp_length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtpParser.Parse(rtp_header, &map);
+  ASSERT_TRUE(valid_rtp_header);
+
+  // Verify transmission time offset.
+  EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.cc b/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.cc
new file mode 100644
index 0000000..7a15208
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -0,0 +1,619 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_video.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include "rtp_utility.h"
+
+#include <string.h> // memcpy
+#include <cassert>  // assert
+#include <cstdlib>  // srand
+
+#include "rtp_format_vp8.h"
+
+namespace webrtc {
+enum { REDForFECHeaderLength = 1 };
+
+RTPSenderVideo::RTPSenderVideo(const WebRtc_Word32 id,
+                               RtpRtcpClock* clock,
+                               RTPSenderInterface* rtpSender) :
+    _id(id),
+    _rtpSender(*rtpSender),
+    _sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+
+    _videoType(kRtpNoVideo),
+    _videoCodecInformation(NULL),
+    _maxBitrate(0),
+    _retransmissionSettings(kRetransmitBaseLayer),
+
+    // Generic FEC
+    _fec(id),
+    _fecEnabled(false),
+    _payloadTypeRED(-1),
+    _payloadTypeFEC(-1),
+    _codeRateKey(0),
+    _codeRateDelta(0),
+    _useUepProtectionKey(false),
+    _useUepProtectionDelta(false),
+    _fecProtectionFactor(0),
+    _fecUseUepProtection(false),
+    _numberFirstPartition(0),
+    _fecOverheadRate(clock),
+    _videoBitrate(clock) {
+}
+
+RTPSenderVideo::~RTPSenderVideo()
+{
+    if(_videoCodecInformation)
+    {
+        delete _videoCodecInformation;
+    }
+    delete _sendVideoCritsect;
+}
+
+WebRtc_Word32
+RTPSenderVideo::Init()
+{
+    CriticalSectionScoped cs(_sendVideoCritsect);
+
+    _retransmissionSettings = kRetransmitBaseLayer;
+    _fecEnabled = false;
+    _payloadTypeRED = -1;
+    _payloadTypeFEC = -1;
+    _codeRateKey = 0;
+    _codeRateDelta = 0;
+    _useUepProtectionKey = false;
+    _useUepProtectionDelta = false;
+    _fecProtectionFactor = 0;
+    _fecUseUepProtection = false;
+    _numberFirstPartition = 0;
+    _fecOverheadRate.Init();
+    return 0;
+}
+
+void
+RTPSenderVideo::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+void
+RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType)
+{
+    CriticalSectionScoped cs(_sendVideoCritsect);
+    _videoType = videoType;
+}
+
+RtpVideoCodecTypes
+RTPSenderVideo::VideoCodecType() const
+{
+    return _videoType;
+}
+
+WebRtc_Word32 RTPSenderVideo::RegisterVideoPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 maxBitRate,
+    ModuleRTPUtility::Payload*& payload) {
+  CriticalSectionScoped cs(_sendVideoCritsect);
+
+  RtpVideoCodecTypes videoType = kRtpNoVideo;
+  if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
+    videoType = kRtpVp8Video;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+    videoType = kRtpNoVideo;
+  } else {
+    return -1;
+  }
+  payload = new ModuleRTPUtility::Payload;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Video.videoCodecType = videoType;
+  payload->typeSpecific.Video.maxRate = maxBitRate;
+  payload->audio = false;
+  return 0;
+}
+
+struct RtpPacket
+{
+    WebRtc_UWord16 rtpHeaderLength;
+    ForwardErrorCorrection::Packet* pkt;
+};
+
+WebRtc_Word32
+RTPSenderVideo::SendVideoPacket(const FrameType frameType,
+                                const WebRtc_UWord8* dataBuffer,
+                                const WebRtc_UWord16 payloadLength,
+                                const WebRtc_UWord16 rtpHeaderLength,
+                                StorageType storage)
+{
+    if(_fecEnabled)
+    {
+        WebRtc_Word32 retVal = 0;
+
+        const bool markerBit = (dataBuffer[1] & kRtpMarkerBitMask)?true:false;
+        RtpPacket* ptrGenericFEC = new RtpPacket;
+        ptrGenericFEC->pkt = new ForwardErrorCorrection::Packet;
+        ptrGenericFEC->pkt->length = payloadLength + rtpHeaderLength;
+        ptrGenericFEC->rtpHeaderLength = rtpHeaderLength;
+        memcpy(ptrGenericFEC->pkt->data, dataBuffer,
+               ptrGenericFEC->pkt->length);
+
+        // Add packet to FEC list
+        _rtpPacketListFec.push_back(ptrGenericFEC);
+        // FEC can only protect up to kMaxMediaPackets packets
+        if (_mediaPacketListFec.size() <
+            ForwardErrorCorrection::kMaxMediaPackets)
+        {
+            _mediaPacketListFec.push_back(ptrGenericFEC->pkt);
+        }
+
+        // Last packet in frame
+        if (markerBit)
+        {
+
+            // Retain the RTP header of the last media packet to construct FEC
+            // packet RTP headers.
+            ForwardErrorCorrection::Packet lastMediaRtpHeader;
+            memcpy(lastMediaRtpHeader.data,
+                   ptrGenericFEC->pkt->data,
+                   ptrGenericFEC->rtpHeaderLength);
+
+            lastMediaRtpHeader.length = ptrGenericFEC->rtpHeaderLength;
+            // Replace payload and clear marker bit.
+            lastMediaRtpHeader.data[1] = _payloadTypeRED;
+
+            // Number of first partition packets cannot exceed kMaxMediaPackets
+            if (_numberFirstPartition >
+                ForwardErrorCorrection::kMaxMediaPackets)
+            {
+                _numberFirstPartition =
+                    ForwardErrorCorrection::kMaxMediaPackets;
+            }
+
+            std::list<ForwardErrorCorrection::Packet*> fecPacketList;
+            retVal = _fec.GenerateFEC(_mediaPacketListFec,
+                                      _fecProtectionFactor,
+                                      _numberFirstPartition,
+                                      _fecUseUepProtection,
+                                      &fecPacketList);
+
+            int fecOverheadSent = 0;
+            int videoSent = 0;
+
+            while(!_rtpPacketListFec.empty())
+            {
+                WebRtc_UWord8 newDataBuffer[IP_PACKET_SIZE];
+                memset(newDataBuffer, 0, sizeof(newDataBuffer));
+
+                RtpPacket* packetToSend = _rtpPacketListFec.front();
+
+                // Copy RTP header
+                memcpy(newDataBuffer, packetToSend->pkt->data,
+                       packetToSend->rtpHeaderLength);
+
+                // Get codec pltype
+                WebRtc_UWord8 payloadType = newDataBuffer[1] & 0x7f;
+
+                // Replace pltype
+                newDataBuffer[1] &= 0x80;            // reset
+                newDataBuffer[1] += _payloadTypeRED; // replace
+
+                // Add RED header
+                // f-bit always 0
+                newDataBuffer[packetToSend->rtpHeaderLength] = payloadType;
+
+                // Copy payload data
+                memcpy(newDataBuffer + packetToSend->rtpHeaderLength +
+                           REDForFECHeaderLength,
+                       packetToSend->pkt->data + packetToSend->rtpHeaderLength,
+                       packetToSend->pkt->length -
+                           packetToSend->rtpHeaderLength);
+
+                _rtpPacketListFec.pop_front();
+                // Check if _mediaPacketListFec is non-empty.
+                // This list may be smaller than rtpPacketList, if the frame
+                // has more than kMaxMediaPackets.
+                if (!_mediaPacketListFec.empty()) {
+                  _mediaPacketListFec.pop_front();
+                }
+
+                // Send normal packet with RED header
+                int packetSuccess = _rtpSender.SendToNetwork(
+                    newDataBuffer,
+                    packetToSend->pkt->length - packetToSend->rtpHeaderLength +
+                    REDForFECHeaderLength,
+                    packetToSend->rtpHeaderLength,
+                    storage);
+
+                retVal |= packetSuccess;
+
+                if (packetSuccess == 0)
+                {
+                    videoSent += packetToSend->pkt->length +
+                        REDForFECHeaderLength;
+                }
+
+                delete packetToSend->pkt;
+                delete packetToSend;
+                packetToSend = NULL;
+            }
+            assert(_mediaPacketListFec.empty());
+            assert(_rtpPacketListFec.empty());
+
+            while(!fecPacketList.empty())
+            {
+                WebRtc_UWord8 newDataBuffer[IP_PACKET_SIZE];
+
+                // Build FEC packets
+                ForwardErrorCorrection::Packet* packetToSend = fecPacketList.front();
+
+                // The returned FEC packets have no RTP headers.
+                // Copy the last media packet's modified RTP header.
+                memcpy(newDataBuffer, lastMediaRtpHeader.data,
+                       lastMediaRtpHeader.length);
+
+                // Add sequence number
+                ModuleRTPUtility::AssignUWord16ToBuffer(
+                    &newDataBuffer[2], _rtpSender.IncrementSequenceNumber());
+
+                // Add RED header
+                // f-bit always 0
+                newDataBuffer[lastMediaRtpHeader.length] = _payloadTypeFEC;
+
+                // Copy payload data
+                memcpy(newDataBuffer + lastMediaRtpHeader.length +
+                           REDForFECHeaderLength,
+                       packetToSend->data,
+                       packetToSend->length);
+
+                fecPacketList.pop_front();
+
+                // Invalid FEC packet
+                assert(packetToSend->length != 0);
+
+                StorageType storage = kDontRetransmit;
+                if (_retransmissionSettings & kRetransmitFECPackets) {
+                  storage = kAllowRetransmission;
+                }
+
+                // No marker bit on FEC packets, last media packet have the
+                // marker send FEC packet with RED header
+                int packetSuccess = _rtpSender.SendToNetwork(
+                        newDataBuffer,
+                        packetToSend->length + REDForFECHeaderLength,
+                        lastMediaRtpHeader.length,
+                        storage);
+
+                retVal |= packetSuccess;
+
+                if (packetSuccess == 0)
+                {
+                    fecOverheadSent += packetToSend->length +
+                      REDForFECHeaderLength + lastMediaRtpHeader.length;
+                }
+            }
+            _videoBitrate.Update(videoSent);
+            _fecOverheadRate.Update(fecOverheadSent);
+        }
+        return retVal;
+    }
+    int retVal = _rtpSender.SendToNetwork(dataBuffer,
+                                          payloadLength,
+                                          rtpHeaderLength,
+                                          storage);
+    if (retVal == 0)
+    {
+        _videoBitrate.Update(payloadLength + rtpHeaderLength);
+    }
+    return retVal;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendRTPIntraRequest()
+{
+    // RFC 2032
+    // 5.2.1.  Full intra-frame Request (FIR) packet
+
+    WebRtc_UWord16 length = 8;
+    WebRtc_UWord8 data[8];
+    data[0] = 0x80;
+    data[1] = 192;
+    data[2] = 0;
+    data[3] = 1; // length
+
+    ModuleRTPUtility::AssignUWord32ToBuffer(data+4, _rtpSender.SSRC());
+
+    return _rtpSender.SendToNetwork(data, 0, length, kAllowRetransmission);
+}
+
+WebRtc_Word32
+RTPSenderVideo::SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC)
+{
+    _fecEnabled = enable;
+    _payloadTypeRED = payloadTypeRED;
+    _payloadTypeFEC = payloadTypeFEC;
+    _codeRateKey = 0;
+    _codeRateDelta = 0;
+    _useUepProtectionKey = false;
+    _useUepProtectionDelta = false;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const
+{
+    enable = _fecEnabled;
+    payloadTypeRED = _payloadTypeRED;
+    payloadTypeFEC = _payloadTypeFEC;
+    return 0;
+}
+
+WebRtc_UWord16
+RTPSenderVideo::FECPacketOverhead() const
+{
+    if (_fecEnabled)
+    {
+        return ForwardErrorCorrection::PacketOverhead() +
+            REDForFECHeaderLength;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                               const WebRtc_UWord8 deltaFrameCodeRate)
+{
+    _codeRateKey = keyFrameCodeRate;
+    _codeRateDelta = deltaFrameCodeRate;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SetFECUepProtection(const bool keyUseUepProtection,
+                                    const bool deltaUseUepProtection)
+{
+    _useUepProtectionKey = keyUseUepProtection;
+    _useUepProtectionDelta = deltaUseUepProtection;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
+                          const FrameType frameType,
+                          const WebRtc_Word8 payloadType,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          const RTPFragmentationHeader* fragmentation,
+                          VideoCodecInformation* codecInfo,
+                          const RTPVideoTypeHeader* rtpTypeHdr)
+{
+    if( payloadSize == 0)
+    {
+        return -1;
+    }
+
+    if (frameType == kVideoFrameKey)
+    {
+        _fecProtectionFactor = _codeRateKey;
+        _fecUseUepProtection = _useUepProtectionKey;
+    } else if (videoType == kRtpVp8Video && rtpTypeHdr->VP8.temporalIdx > 0)
+    {
+        // In current version, we only apply FEC on the base layer.
+        _fecProtectionFactor = 0;
+        _fecUseUepProtection = false;
+    } else
+    {
+        _fecProtectionFactor = _codeRateDelta;
+        _fecUseUepProtection = _useUepProtectionDelta;
+    }
+
+    // Default setting for number of first partition packets:
+    // Will be extracted in SendVP8 for VP8 codec; other codecs use 0
+    _numberFirstPartition = 0;
+
+    WebRtc_Word32 retVal = -1;
+    switch(videoType)
+    {
+    case kRtpNoVideo:
+        retVal = SendGeneric(payloadType,captureTimeStamp, payloadData,
+                             payloadSize);
+        break;
+    case kRtpVp8Video:
+        retVal = SendVP8(frameType, payloadType, captureTimeStamp,
+                payloadData, payloadSize, fragmentation, rtpTypeHdr);
+        break;
+    default:
+        assert(false);
+        break;
+    }
+    if(retVal <= 0)
+    {
+        return retVal;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize)
+{
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord32 bytesSent = 0;
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+
+    const WebRtc_UWord8* data = payloadData;
+    WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+    WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() -
+        FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
+
+    // Fragment packet into packets of max MaxPayloadLength bytes payload.
+    while (payloadBytesToSend > 0)
+    {
+        if (payloadBytesToSend > maxLength)
+        {
+            payloadBytesInPacket = maxLength;
+            payloadBytesToSend -= payloadBytesInPacket;
+            // MarkerBit is 0
+            if(_rtpSender.BuildRTPheader(dataBuffer,
+                                         payloadType,
+                                         false,
+                                         captureTimeStamp) != rtpHeaderLength)
+            {
+                return -1;
+           }
+        }
+        else
+        {
+            payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
+            payloadBytesToSend = 0;
+            // MarkerBit is 1
+            if(_rtpSender.BuildRTPheader(dataBuffer, payloadType, true,
+                                         captureTimeStamp) != rtpHeaderLength)
+            {
+                return -1;
+            }
+        }
+
+        // Put payload in packet
+        memcpy(&dataBuffer[rtpHeaderLength], &data[bytesSent],
+               payloadBytesInPacket);
+        bytesSent += payloadBytesInPacket;
+
+        if(-1 == SendVideoPacket(kVideoFrameKey,
+                                 dataBuffer,
+                                 payloadBytesInPacket,
+                                 rtpHeaderLength,
+                                 kAllowRetransmission))
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+VideoCodecInformation*
+RTPSenderVideo::CodecInformationVideo()
+{
+    return _videoCodecInformation;
+}
+
+void
+RTPSenderVideo::SetMaxConfiguredBitrateVideo(const WebRtc_UWord32 maxBitrate)
+{
+    _maxBitrate = maxBitrate;
+}
+
+WebRtc_UWord32
+RTPSenderVideo::MaxConfiguredBitrateVideo() const
+{
+    return _maxBitrate;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendVP8(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const WebRtc_UWord32 captureTimeStamp,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        const RTPFragmentationHeader* fragmentation,
+                        const RTPVideoTypeHeader* rtpTypeHdr)
+{
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord8* data = payloadData;
+
+    WebRtc_UWord16 maxPayloadLengthVP8 = _rtpSender.MaxDataPayloadLength();
+
+    assert(rtpTypeHdr);
+    // Initialize disregarding partition boundaries: this will use kEqualSize
+    // packetization mode, which produces ~equal size packets for each frame.
+    RtpFormatVp8 packetizer(data, payloadBytesToSend, rtpTypeHdr->VP8,
+                            maxPayloadLengthVP8);
+
+    StorageType storage = kAllowRetransmission;
+    if (rtpTypeHdr->VP8.temporalIdx == 0 &&
+        !(_retransmissionSettings & kRetransmitBaseLayer)) {
+      storage = kDontRetransmit;
+    }
+    if (rtpTypeHdr->VP8.temporalIdx > 0 &&
+        !(_retransmissionSettings & kRetransmitHigherLayers)) {
+      storage = kDontRetransmit;
+    }
+
+    bool last = false;
+    _numberFirstPartition = 0;
+    while (!last)
+    {
+        // Write VP8 Payload Descriptor and VP8 payload.
+        WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE] = {0};
+        int payloadBytesInPacket = 0;
+        int packetStartPartition =
+            packetizer.NextPacket(&dataBuffer[rtpHeaderLength],
+                                  &payloadBytesInPacket, &last);
+        if (packetStartPartition == 0)
+        {
+            ++_numberFirstPartition;
+        }
+        else if (packetStartPartition < 0)
+        {
+            return -1;
+        }
+
+        // Write RTP header.
+        // Set marker bit true if this is the last packet in frame.
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, last,
+            captureTimeStamp);
+        if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket,
+            rtpHeaderLength, storage))
+        {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "RTPSenderVideo::SendVP8 failed to send packet number"
+                       " %d", _rtpSender.SequenceNumber());
+        }
+    }
+    return 0;
+}
+
+void RTPSenderVideo::ProcessBitrate() {
+  _videoBitrate.Process();
+  _fecOverheadRate.Process();
+}
+
+WebRtc_UWord32 RTPSenderVideo::VideoBitrateSent() const {
+  return _videoBitrate.BitrateLast();
+}
+
+WebRtc_UWord32 RTPSenderVideo::FecOverheadRate() const {
+  return _fecOverheadRate.BitrateLast();
+}
+
+int RTPSenderVideo::SelectiveRetransmissions() const {
+  return _retransmissionSettings;
+}
+
+int RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
+  _retransmissionSettings = settings;
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.h b/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.h
new file mode 100644
index 0000000..1bf7142
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
+
+#include <list>
+
+#include "typedefs.h"
+#include "common_types.h"               // Transport
+#include "rtp_rtcp_config.h"
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "video_codec_information.h"
+#include "forward_error_correction.h"
+#include "Bitrate.h"
+#include "rtp_sender.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+struct RtpPacket;
+
+class RTPSenderVideo
+{
+public:
+    RTPSenderVideo(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                   RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderVideo();
+
+    WebRtc_Word32 Init();
+
+    virtual void ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual RtpVideoCodecTypes VideoCodecType() const;
+
+    WebRtc_UWord16 FECPacketOverhead() const;
+
+    WebRtc_Word32 RegisterVideoPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 maxBitRate,
+        ModuleRTPUtility::Payload*& payload);
+
+    WebRtc_Word32 SendVideo(const RtpVideoCodecTypes videoType,
+                          const FrameType frameType,
+                          const WebRtc_Word8 payloadType,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          const RTPFragmentationHeader* fragmentation,
+                          VideoCodecInformation* codecInfo,
+                          const RTPVideoTypeHeader* rtpTypeHdr);
+
+    WebRtc_Word32 SendRTPIntraRequest();
+
+    void SetVideoCodecType(RtpVideoCodecTypes type);
+
+    VideoCodecInformation* CodecInformationVideo();
+
+    void SetMaxConfiguredBitrateVideo(const WebRtc_UWord32 maxBitrate);
+
+    WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
+
+    // FEC
+    WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC);
+
+    WebRtc_Word32 GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const;
+
+    WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
+                                 const WebRtc_UWord8 deltaFrameCodeRate);
+
+    WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
+                                      const bool deltaUseUepProtection);
+
+    void ProcessBitrate();
+
+    WebRtc_UWord32 VideoBitrateSent() const;
+    WebRtc_UWord32 FecOverheadRate() const;
+
+    int SelectiveRetransmissions() const;
+    int SetSelectiveRetransmissions(uint8_t settings);
+
+protected:
+    virtual WebRtc_Word32 SendVideoPacket(const FrameType frameType,
+                                          const WebRtc_UWord8* dataBuffer,
+                                          const WebRtc_UWord16 payloadLength,
+                                          const WebRtc_UWord16 rtpHeaderLength,
+                                          StorageType storage);
+
+private:
+    WebRtc_Word32 SendGeneric(const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize);
+
+    WebRtc_Word32 SendVP8(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const WebRtc_UWord32 captureTimeStamp,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        const RTPFragmentationHeader* fragmentation,
+                        const RTPVideoTypeHeader* rtpTypeHdr);
+
+private:
+    WebRtc_Word32             _id;
+    RTPSenderInterface&        _rtpSender;
+
+    CriticalSectionWrapper*   _sendVideoCritsect;
+    RtpVideoCodecTypes  _videoType;
+    VideoCodecInformation*  _videoCodecInformation;
+    WebRtc_UWord32            _maxBitrate;
+    WebRtc_Word32             _retransmissionSettings;
+
+    // FEC
+    ForwardErrorCorrection  _fec;
+    bool                    _fecEnabled;
+    WebRtc_Word8              _payloadTypeRED;
+    WebRtc_Word8              _payloadTypeFEC;
+    WebRtc_UWord8             _codeRateKey;
+    WebRtc_UWord8             _codeRateDelta;
+    bool                      _useUepProtectionKey;
+    bool                      _useUepProtectionDelta;
+    WebRtc_UWord8             _fecProtectionFactor;
+    bool                      _fecUseUepProtection;
+    unsigned int              _numberFirstPartition;
+    std::list<ForwardErrorCorrection::Packet*> _mediaPacketListFec;
+    std::list<RtpPacket*> _rtpPacketListFec;
+    // Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
+    // and any padding overhead.
+    Bitrate                   _fecOverheadRate;
+    // Bitrate used for video payload and RTP headers
+    Bitrate                   _videoBitrate;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_utility.cc b/trunk/src/modules/rtp_rtcp/source/rtp_utility.cc
new file mode 100644
index 0000000..947796d
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_utility.cc
@@ -0,0 +1,914 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_utility.h"
+
+#include <cassert>
+#include <cmath>  // ceil
+#include <cstring>  // memcpy
+
+#if defined(_WIN32)
+#include <Windows.h>  // FILETIME
+#include <WinSock.h>  // timeval
+#include <MMSystem.h>  // timeGetTime
+#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
+#include <sys/time.h>  // gettimeofday
+#include <time.h>
+#endif
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#include <stdio.h>
+#endif
+
+#include "trace.h"
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#define DEBUG_PRINT(...)           \
+  {                                \
+    char msg[256];                 \
+    sprintf(msg, __VA_ARGS__);     \
+    OutputDebugString(msg);        \
+  }
+#else
+// special fix for visual 2003
+#define DEBUG_PRINT(exp)        ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+namespace webrtc {
+
+namespace ModuleRTPUtility {
+
+/*
+ * Time routines.
+ */
+
+#if defined(_WIN32)
+
+struct reference_point {
+  FILETIME      file_time;
+  LARGE_INTEGER counterMS;
+};
+
+struct WindowsHelpTimer {
+  volatile LONG _timeInMs;
+  volatile LONG _numWrapTimeInMs;
+  reference_point _ref_point;
+
+  volatile LONG _sync_flag;
+};
+
+void Synchronize(WindowsHelpTimer* help_timer) {
+  const LONG start_value = 0;
+  const LONG new_value = 1;
+  const LONG synchronized_value = 2;
+
+  LONG compare_flag = new_value;
+  while (help_timer->_sync_flag == start_value) {
+    const LONG new_value = 1;
+    compare_flag = InterlockedCompareExchange(
+        &help_timer->_sync_flag, new_value, start_value);
+  }
+  if (compare_flag != start_value) {
+    // This thread was not the one that incremented the sync flag.
+    // Block until synchronization finishes.
+    while (compare_flag != synchronized_value) {
+      ::Sleep(0);
+    }
+    return;
+  }
+  // Only the synchronizing thread gets here so this part can be
+  // considered single threaded.
+
+  // set timer accuracy to 1 ms
+  timeBeginPeriod(1);
+  FILETIME    ft0 = { 0, 0 },
+              ft1 = { 0, 0 };
+  //
+  // Spin waiting for a change in system time. Get the matching
+  // performance counter value for that time.
+  //
+  ::GetSystemTimeAsFileTime(&ft0);
+  do {
+    ::GetSystemTimeAsFileTime(&ft1);
+
+    help_timer->_ref_point.counterMS.QuadPart = ::timeGetTime();
+    ::Sleep(0);
+  } while ((ft0.dwHighDateTime == ft1.dwHighDateTime) &&
+          (ft0.dwLowDateTime == ft1.dwLowDateTime));
+    help_timer->_ref_point.file_time = ft1;
+}
+
+void get_time(WindowsHelpTimer* help_timer, FILETIME& current_time) {
+  // we can't use query performance counter due to speed stepping
+  DWORD t = timeGetTime();
+  // NOTE: we have a missmatch in sign between _timeInMs(LONG) and
+  // (DWORD) however we only use it here without +- etc
+  volatile LONG* timeInMsPtr = &help_timer->_timeInMs;
+  // Make sure that we only inc wrapper once.
+  DWORD old = InterlockedExchange(timeInMsPtr, t);
+  if(old > t) {
+    // wrap
+    help_timer->_numWrapTimeInMs++;
+  }
+  LARGE_INTEGER elapsedMS;
+  elapsedMS.HighPart = help_timer->_numWrapTimeInMs;
+  elapsedMS.LowPart = t;
+
+  elapsedMS.QuadPart = elapsedMS.QuadPart -
+      help_timer->_ref_point.counterMS.QuadPart;
+
+  // Translate to 100-nanoseconds intervals (FILETIME resolution)
+  // and add to reference FILETIME to get current FILETIME.
+  ULARGE_INTEGER filetime_ref_as_ul;
+
+  filetime_ref_as_ul.HighPart =
+      help_timer->_ref_point.file_time.dwHighDateTime;
+  filetime_ref_as_ul.LowPart =
+      help_timer->_ref_point.file_time.dwLowDateTime;
+  filetime_ref_as_ul.QuadPart +=
+      (ULONGLONG)((elapsedMS.QuadPart)*1000*10);
+
+  // Copy to result
+  current_time.dwHighDateTime = filetime_ref_as_ul.HighPart;
+  current_time.dwLowDateTime = filetime_ref_as_ul.LowPart;
+  }
+
+  // A clock reading times from the Windows API.
+  class WindowsSystemClock : public RtpRtcpClock {
+  public:
+    WindowsSystemClock(WindowsHelpTimer* helpTimer)
+      : _helpTimer(helpTimer) {}
+
+    virtual ~WindowsSystemClock() {}
+
+    virtual WebRtc_UWord32 GetTimeInMS();
+
+    virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
+
+  private:
+    WindowsHelpTimer* _helpTimer;
+};
+
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+// A clock reading times from the POSIX API.
+class UnixSystemClock : public RtpRtcpClock {
+public:
+  UnixSystemClock() {}
+  virtual ~UnixSystemClock() {}
+
+  virtual WebRtc_UWord32 GetTimeInMS();
+
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
+};
+#endif
+
+#if defined(_WIN32)
+WebRtc_UWord32 WindowsSystemClock::GetTimeInMS() {
+  return timeGetTime();
+}
+
+// Use the system time (roughly synchronised to the tick, and
+// extrapolated using the system performance counter.
+void WindowsSystemClock::CurrentNTP(WebRtc_UWord32& secs,
+                                    WebRtc_UWord32& frac) {
+  const WebRtc_UWord64 FILETIME_1970 = 0x019db1ded53e8000;
+
+  FILETIME StartTime;
+  WebRtc_UWord64 Time;
+  struct timeval tv;
+
+  // We can't use query performance counter since they can change depending on
+  // speed steping
+  get_time(_helpTimer, StartTime);
+
+  Time = (((WebRtc_UWord64) StartTime.dwHighDateTime) << 32) +
+         (WebRtc_UWord64) StartTime.dwLowDateTime;
+
+  // Convert the hecto-nano second time to tv format
+  Time -= FILETIME_1970;
+
+  tv.tv_sec = (WebRtc_UWord32)(Time / (WebRtc_UWord64)10000000);
+  tv.tv_usec = (WebRtc_UWord32)((Time % (WebRtc_UWord64)10000000) / 10);
+
+  double dtemp;
+
+  secs = tv.tv_sec + NTP_JAN_1970;
+  dtemp = tv.tv_usec / 1e6;
+
+  if (dtemp >= 1) {
+    dtemp -= 1;
+    secs++;
+  } else if (dtemp < -1) {
+    dtemp += 1;
+    secs--;
+  }
+  dtemp *= NTP_FRAC;
+  frac = (WebRtc_UWord32)dtemp;
+}
+
+#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
+
+WebRtc_UWord32 UnixSystemClock::GetTimeInMS() {
+  struct timeval tv;
+  struct timezone tz;
+  WebRtc_UWord32 val;
+
+  gettimeofday(&tv, &tz);
+  val = (WebRtc_UWord32)(tv.tv_sec * 1000 + tv.tv_usec / 1000);
+  return val;
+}
+
+// Use the system time.
+void UnixSystemClock::CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+  double dtemp;
+  struct timeval tv;
+  struct timezone tz;
+  tz.tz_minuteswest  = 0;
+  tz.tz_dsttime = 0;
+  gettimeofday(&tv, &tz);
+
+  secs = tv.tv_sec + NTP_JAN_1970;
+  dtemp = tv.tv_usec / 1e6;
+  if (dtemp >= 1) {
+    dtemp -= 1;
+    secs++;
+  } else if (dtemp < -1) {
+    dtemp += 1;
+    secs--;
+  }
+  dtemp *= NTP_FRAC;
+  frac = (WebRtc_UWord32)dtemp;
+}
+#endif
+
+#if defined(_WIN32)
+// Keeps the global state for the Windows implementation of RtpRtcpClock.
+// Note that this is a POD. Only PODs are allowed to have static storage
+// duration according to the Google Style guide.
+static WindowsHelpTimer global_help_timer = {0, 0, {{ 0, 0}, 0}, 0};
+#endif
+
+RtpRtcpClock* GetSystemClock() {
+#if defined(_WIN32)
+  return new WindowsSystemClock(&global_help_timer);
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+  return new UnixSystemClock();
+#else
+  return NULL;
+#endif
+}
+
+WebRtc_UWord32 GetCurrentRTP(RtpRtcpClock* clock, WebRtc_UWord32 freq) {
+  const bool use_global_clock = (clock == NULL);
+  RtpRtcpClock* local_clock = clock;
+  if (use_global_clock) {
+    local_clock = GetSystemClock();
+  }
+  WebRtc_UWord32 secs = 0, frac = 0;
+  local_clock->CurrentNTP(secs, frac);
+  if (use_global_clock) {
+    delete local_clock;
+  }
+  return ConvertNTPTimeToRTP(secs, frac, freq);
+}
+
+WebRtc_UWord32 ConvertNTPTimeToRTP(WebRtc_UWord32 NTPsec,
+                                   WebRtc_UWord32 NTPfrac,
+                                   WebRtc_UWord32 freq) {
+  float ftemp = (float)NTPfrac / (float)NTP_FRAC;
+  WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
+  return NTPsec * freq + tmp;
+}
+
+WebRtc_UWord32 ConvertNTPTimeToMS(WebRtc_UWord32 NTPsec,
+                                  WebRtc_UWord32 NTPfrac) {
+  int freq = 1000;
+  float ftemp = (float)NTPfrac / (float)NTP_FRAC;
+  WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
+  WebRtc_UWord32 MStime = NTPsec * freq + tmp;
+  return MStime;
+}
+
+bool OldTimestamp(uint32_t newTimestamp,
+                  uint32_t existingTimestamp,
+                  bool* wrapped) {
+  bool tmpWrapped =
+    (newTimestamp < 0x0000ffff && existingTimestamp > 0xffff0000) ||
+    (newTimestamp > 0xffff0000 && existingTimestamp < 0x0000ffff);
+  *wrapped = tmpWrapped;
+  if (existingTimestamp > newTimestamp && !tmpWrapped) {
+    return true;
+  } else if (existingTimestamp <= newTimestamp && !tmpWrapped) {
+    return false;
+  } else if (existingTimestamp < newTimestamp && tmpWrapped) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+/*
+ * Misc utility routines
+ */
+
+#if defined(_WIN32)
+bool StringCompare(const char* str1, const char* str2,
+                   const WebRtc_UWord32 length) {
+  return (_strnicmp(str1, str2, length) == 0) ? true : false;
+}
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+bool StringCompare(const char* str1, const char* str2,
+                   const WebRtc_UWord32 length) {
+  return (strncasecmp(str1, str2, length) == 0) ? true : false;
+}
+#endif
+
+#if !defined(WEBRTC_LITTLE_ENDIAN) && !defined(WEBRTC_BIG_ENDIAN)
+#error Either WEBRTC_LITTLE_ENDIAN or WEBRTC_BIG_ENDIAN must be defined
+#endif
+
+/* for RTP/RTCP
+    All integer fields are carried in network byte order, that is, most
+    significant byte (octet) first.  AKA big-endian.
+*/
+void AssignUWord32ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 24);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 16);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[3] = static_cast<WebRtc_UWord8>(value);
+#else
+  WebRtc_UWord32* ptr = reinterpret_cast<WebRtc_UWord32*>(dataBuffer);
+  ptr[0] = value;
+#endif
+}
+
+void AssignUWord24ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 16);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value);
+#else
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value >> 16);
+#endif
+}
+
+void AssignUWord16ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord16 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value);
+#else
+  WebRtc_UWord16* ptr = reinterpret_cast<WebRtc_UWord16*>(dataBuffer);
+  ptr[0] = value;
+#endif
+}
+
+WebRtc_UWord16 BufferToUWord16(const WebRtc_UWord8* dataBuffer) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  return (dataBuffer[0] << 8) + dataBuffer[1];
+#else
+  return *reinterpret_cast<const WebRtc_UWord16*>(dataBuffer);
+#endif
+}
+
+WebRtc_UWord32 BufferToUWord24(const WebRtc_UWord8* dataBuffer) {
+  return (dataBuffer[0] << 16) + (dataBuffer[1] << 8) + dataBuffer[2];
+}
+
+WebRtc_UWord32 BufferToUWord32(const WebRtc_UWord8* dataBuffer) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  return (dataBuffer[0] << 24) + (dataBuffer[1] << 16) + (dataBuffer[2] << 8) +
+      dataBuffer[3];
+#else
+  return *reinterpret_cast<const WebRtc_UWord32*>(dataBuffer);
+#endif
+}
+
+WebRtc_UWord32 pow2(WebRtc_UWord8 exp) {
+  return 1 << exp;
+}
+
+void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
+  type = videoType;
+
+  switch (type) {
+    case kRtpNoVideo:
+      break;
+    case kRtpVp8Video: {
+      info.VP8.nonReferenceFrame = false;
+      info.VP8.beginningOfPartition = false;
+      info.VP8.partitionID = 0;
+      info.VP8.hasPictureID = false;
+      info.VP8.hasTl0PicIdx = false;
+      info.VP8.hasTID = false;
+      info.VP8.hasKeyIdx = false;
+      info.VP8.pictureID = -1;
+      info.VP8.tl0PicIdx = -1;
+      info.VP8.tID = -1;
+      info.VP8.layerSync = false;
+      info.VP8.frameWidth = 0;
+      info.VP8.frameHeight = 0;
+      break;
+    }
+    default:
+      break;
+  }
+}
+
+RTPHeaderParser::RTPHeaderParser(const WebRtc_UWord8* rtpData,
+                                 const WebRtc_UWord32 rtpDataLength)
+  : _ptrRTPDataBegin(rtpData),
+    _ptrRTPDataEnd(rtpData ? (rtpData + rtpDataLength) : NULL) {
+}
+
+RTPHeaderParser::~RTPHeaderParser() {
+}
+
+bool RTPHeaderParser::RTCP() const {
+  // 72 to 76 is reserved for RTP
+  // 77 to 79 is not reserver but  they are not assigned we will block them
+  // for RTCP 200 SR  == marker bit + 72
+  // for RTCP 204 APP == marker bit + 76
+  /*
+  *       RTCP
+  *
+  * FIR      full INTRA-frame request             192     [RFC2032]   supported
+  * NACK     negative acknowledgement             193     [RFC2032]
+  * IJ       Extended inter-arrival jitter report 195     [RFC-ietf-avt-rtp-toff
+  * set-07.txt] http://tools.ietf.org/html/draft-ietf-avt-rtp-toffset-07
+  * SR       sender report                        200     [RFC3551]   supported
+  * RR       receiver report                      201     [RFC3551]   supported
+  * SDES     source description                   202     [RFC3551]   supported
+  * BYE      goodbye                              203     [RFC3551]   supported
+  * APP      application-defined                  204     [RFC3551]   ignored
+  * RTPFB    Transport layer FB message           205     [RFC4585]   supported
+  * PSFB     Payload-specific FB message          206     [RFC4585]   supported
+  * XR       extended report                      207     [RFC3611]   supported
+  */
+
+  /* 205       RFC 5104
+   * FMT 1      NACK       supported
+   * FMT 2      reserved
+   * FMT 3      TMMBR      supported
+   * FMT 4      TMMBN      supported
+   */
+
+  /* 206      RFC 5104
+  * FMT 1:     Picture Loss Indication (PLI)                      supported
+  * FMT 2:     Slice Lost Indication (SLI)
+  * FMT 3:     Reference Picture Selection Indication (RPSI)
+  * FMT 4:     Full Intra Request (FIR) Command                   supported
+  * FMT 5:     Temporal-Spatial Trade-off Request (TSTR)
+  * FMT 6:     Temporal-Spatial Trade-off Notification (TSTN)
+  * FMT 7:     Video Back Channel Message (VBCM)
+  * FMT 15:    Application layer FB message
+  */
+
+  const WebRtc_UWord8  payloadType = _ptrRTPDataBegin[1];
+
+  bool RTCP = false;
+
+  // check if this is a RTCP packet
+  switch (payloadType) {
+    case 192:
+      RTCP = true;
+      break;
+    case 193:
+      // not supported
+      // pass through and check for a potential RTP packet
+      break;
+    case 195:
+    case 200:
+    case 201:
+    case 202:
+    case 203:
+    case 204:
+    case 205:
+    case 206:
+    case 207:
+      RTCP = true;
+      break;
+  }
+  return RTCP;
+}
+
+bool RTPHeaderParser::Parse(WebRtcRTPHeader& parsedPacket,
+                            RtpHeaderExtensionMap* ptrExtensionMap) const {
+  const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
+
+  if (length < 12) {
+    return false;
+  }
+
+  // Version
+  const WebRtc_UWord8 V  = _ptrRTPDataBegin[0] >> 6;
+  // Padding
+  const bool          P  = ((_ptrRTPDataBegin[0] & 0x20) == 0) ? false : true;
+  // eXtension
+  const bool          X  = ((_ptrRTPDataBegin[0] & 0x10) == 0) ? false : true;
+  const WebRtc_UWord8 CC = _ptrRTPDataBegin[0] & 0x0f;
+  const bool          M  = ((_ptrRTPDataBegin[1] & 0x80) == 0) ? false : true;
+
+  const WebRtc_UWord8 PT = _ptrRTPDataBegin[1] & 0x7f;
+
+  const WebRtc_UWord16 sequenceNumber = (_ptrRTPDataBegin[2] << 8) +
+      _ptrRTPDataBegin[3];
+
+  const WebRtc_UWord8* ptr = &_ptrRTPDataBegin[4];
+
+  WebRtc_UWord32 RTPTimestamp = *ptr++ << 24;
+  RTPTimestamp += *ptr++ << 16;
+  RTPTimestamp += *ptr++ << 8;
+  RTPTimestamp += *ptr++;
+
+  WebRtc_UWord32 SSRC = *ptr++ << 24;
+  SSRC += *ptr++ << 16;
+  SSRC += *ptr++ << 8;
+  SSRC += *ptr++;
+
+  if (V != 2) {
+    return false;
+  }
+
+  const WebRtc_UWord8 CSRCocts = CC * 4;
+
+  if ((ptr + CSRCocts) > _ptrRTPDataEnd) {
+    return false;
+  }
+
+  parsedPacket.header.markerBit      = M;
+  parsedPacket.header.payloadType    = PT;
+  parsedPacket.header.sequenceNumber = sequenceNumber;
+  parsedPacket.header.timestamp      = RTPTimestamp;
+  parsedPacket.header.ssrc           = SSRC;
+  parsedPacket.header.numCSRCs       = CC;
+  parsedPacket.header.paddingLength  = P ? *(_ptrRTPDataEnd - 1) : 0;
+
+  for (unsigned int i = 0; i < CC; ++i) {
+    WebRtc_UWord32 CSRC = *ptr++ << 24;
+    CSRC += *ptr++ << 16;
+    CSRC += *ptr++ << 8;
+    CSRC += *ptr++;
+    parsedPacket.header.arrOfCSRCs[i] = CSRC;
+  }
+  parsedPacket.type.Audio.numEnergy = parsedPacket.header.numCSRCs;
+
+  parsedPacket.header.headerLength   = 12 + CSRCocts;
+
+  // If in effect, MAY be omitted for those packets for which the offset
+  // is zero.
+  parsedPacket.extension.transmissionTimeOffset = 0;
+
+  if (X) {
+    /* RTP header extension, RFC 3550.
+     0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      defined by profile       |           length              |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |                        header extension                       |
+    |                             ....                              |
+    */
+    const ptrdiff_t remain = _ptrRTPDataEnd - ptr;
+    if (remain < 4) {
+      return false;
+    }
+
+    parsedPacket.header.headerLength += 4;
+
+    WebRtc_UWord16 definedByProfile = *ptr++ << 8;
+    definedByProfile += *ptr++;
+
+    WebRtc_UWord16 XLen = *ptr++ << 8;
+    XLen += *ptr++; // in 32 bit words
+    XLen *= 4; // in octs
+
+    if (remain < (4 + XLen)) {
+      return false;
+    }
+    if (definedByProfile == RTP_ONE_BYTE_HEADER_EXTENSION) {
+      const WebRtc_UWord8* ptrRTPDataExtensionEnd = ptr + XLen;
+      ParseOneByteExtensionHeader(parsedPacket,
+                                  ptrExtensionMap,
+                                  ptrRTPDataExtensionEnd,
+                                  ptr);
+    }
+    parsedPacket.header.headerLength += XLen;
+  }
+  return true;
+}
+
+void RTPHeaderParser::ParseOneByteExtensionHeader(
+    WebRtcRTPHeader& parsedPacket,
+    const RtpHeaderExtensionMap* ptrExtensionMap,
+    const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+    const WebRtc_UWord8* ptr) const {
+  if (!ptrExtensionMap) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, "No extension map.");
+    return;
+  }
+
+  while (ptrRTPDataExtensionEnd - ptr > 0) {
+    //  0
+    //  0 1 2 3 4 5 6 7
+    // +-+-+-+-+-+-+-+-+
+    // |  ID   |  len  |
+    // +-+-+-+-+-+-+-+-+
+
+    const WebRtc_UWord8 id = (*ptr & 0xf0) >> 4;
+    const WebRtc_UWord8 len = (*ptr & 0x0f);
+    ptr++;
+
+    if (id == 15) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                   "Ext id: 15 encountered, parsing terminated.");
+      return;
+    }
+
+    RTPExtensionType type;
+    if (ptrExtensionMap->GetType(id, &type) != 0) {
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                   "Failed to find extension id: %d", id);
+      return;
+    }
+
+    switch (type) {
+      case kRtpExtensionTransmissionTimeOffset: {
+        if (len != 2) {
+          WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                       "Incorrect transmission time offset len: %d", len);
+          return;
+        }
+        //  0                   1                   2                   3
+        //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        // |  ID   | len=2 |              transmission offset              |
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+        WebRtc_Word32 transmissionTimeOffset = *ptr++ << 16;
+        transmissionTimeOffset += *ptr++ << 8;
+        transmissionTimeOffset += *ptr++;
+        parsedPacket.extension.transmissionTimeOffset = transmissionTimeOffset;
+        break;
+      }
+      case kRtpExtensionAudioLevel: {
+        //   --- Only used for debugging ---
+        //  0                   1                   2                   3
+        //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        // |  ID   | len=0 |V|   level     |      0x00     |      0x00     |
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        //
+
+        // Parse out the fields but only use it for debugging for now.
+        // const WebRtc_UWord8 V = (*ptr & 0x80) >> 7;
+        // const WebRtc_UWord8 level = (*ptr & 0x7f);
+        // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
+        // level=%u", ID, len, V, level);
+        break;
+      }
+      default: {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                     "Extension type not implemented.");
+        return;
+      }
+    }
+    WebRtc_UWord8 num_bytes = ParsePaddingBytes(ptrRTPDataExtensionEnd, ptr);
+    ptr += num_bytes;
+  }
+}
+
+WebRtc_UWord8 RTPHeaderParser::ParsePaddingBytes(
+  const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+  const WebRtc_UWord8* ptr) const {
+
+  WebRtc_UWord8 num_zero_bytes = 0;
+  while (ptrRTPDataExtensionEnd - ptr > 0) {
+    if (*ptr != 0) {
+      return num_zero_bytes;
+    }
+    ptr++;
+    num_zero_bytes++;
+  }
+  return num_zero_bytes;
+}
+
+// RTP payload parser
+RTPPayloadParser::RTPPayloadParser(const RtpVideoCodecTypes videoType,
+                                   const WebRtc_UWord8* payloadData,
+                                   WebRtc_UWord16 payloadDataLength,
+                                   WebRtc_Word32 id)
+  :
+  _id(id),
+  _dataPtr(payloadData),
+  _dataLength(payloadDataLength),
+  _videoType(videoType) {
+}
+
+RTPPayloadParser::~RTPPayloadParser() {
+}
+
+bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
+  parsedPacket.SetType(_videoType);
+
+  switch (_videoType) {
+    case kRtpNoVideo:
+      return ParseGeneric(parsedPacket);
+    case kRtpVp8Video:
+      return ParseVP8(parsedPacket);
+    default:
+      return false;
+  }
+}
+
+bool RTPPayloadParser::ParseGeneric(RTPPayload& /*parsedPacket*/) const {
+  return false;
+}
+
+//
+// VP8 format:
+//
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+//
+// Payload header (considered part of the actual payload, sent to decoder)
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |Size0|H| VER |P|
+//      +-+-+-+-+-+-+-+-+
+//      |      ...      |
+//      +               +
+
+bool RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const {
+  RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
+  const WebRtc_UWord8* dataPtr = _dataPtr;
+  int dataLength = _dataLength;
+
+  // Parse mandatory first byte of payload descriptor
+  bool extension = (*dataPtr & 0x80) ? true : false;            // X bit
+  vp8->nonReferenceFrame = (*dataPtr & 0x20) ? true : false;    // N bit
+  vp8->beginningOfPartition = (*dataPtr & 0x10) ? true : false; // S bit
+  vp8->partitionID = (*dataPtr & 0x0F);          // PartID field
+
+  if (vp8->partitionID > 8) {
+    // Weak check for corrupt data: PartID MUST NOT be larger than 8.
+    return false;
+  }
+
+  // Advance dataPtr and decrease remaining payload size
+  dataPtr++;
+  dataLength--;
+
+  if (extension) {
+    const int parsedBytes = ParseVP8Extension(vp8, dataPtr, dataLength);
+    if (parsedBytes < 0) return false;
+    dataPtr += parsedBytes;
+    dataLength -= parsedBytes;
+  }
+
+  if (dataLength <= 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "Error parsing VP8 payload descriptor; payload too short");
+    return false;
+  }
+
+  // Read P bit from payload header (only at beginning of first partition)
+  if (dataLength > 0 && vp8->beginningOfPartition && vp8->partitionID == 0) {
+    parsedPacket.frameType = (*dataPtr & 0x01) ? kPFrame : kIFrame;
+  } else {
+    parsedPacket.frameType = kPFrame;
+  }
+  if (0 != ParseVP8FrameSize(parsedPacket, dataPtr, dataLength)) {
+    return false;
+  }
+  parsedPacket.info.VP8.data       = dataPtr;
+  parsedPacket.info.VP8.dataLength = dataLength;
+  return true;
+}
+
+int RTPPayloadParser::ParseVP8FrameSize(RTPPayload& parsedPacket,
+                                        const WebRtc_UWord8* dataPtr,
+                                        int dataLength) const {
+  if (parsedPacket.frameType != kIFrame) {
+    // Included in payload header for I-frames.
+    return 0;
+  }
+  if (dataLength < 10) {
+    // For an I-frame we should always have the uncompressed VP8 header
+    // in the beginning of the partition.
+    return -1;
+  }
+  RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
+  vp8->frameWidth = ((dataPtr[7] << 8) + dataPtr[6]) & 0x3FFF;
+  vp8->frameHeight = ((dataPtr[9] << 8) + dataPtr[8]) & 0x3FFF;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8Extension(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8* dataPtr,
+                                        int dataLength) const {
+  int parsedBytes = 0;
+  if (dataLength <= 0) return -1;
+  // Optional X field is present
+  vp8->hasPictureID = (*dataPtr & 0x80) ? true : false; // I bit
+  vp8->hasTl0PicIdx = (*dataPtr & 0x40) ? true : false; // L bit
+  vp8->hasTID = (*dataPtr & 0x20) ? true : false;       // T bit
+  vp8->hasKeyIdx = (*dataPtr & 0x10) ? true : false;    // K bit
+
+  // Advance dataPtr and decrease remaining payload size
+  dataPtr++;
+  parsedBytes++;
+  dataLength--;
+
+  if (vp8->hasPictureID) {
+    if (ParseVP8PictureID(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+
+  if (vp8->hasTl0PicIdx) {
+    if (ParseVP8Tl0PicIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+
+  if (vp8->hasTID || vp8->hasKeyIdx) {
+    if (ParseVP8TIDAndKeyIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+  return parsedBytes;
+}
+
+int RTPPayloadParser::ParseVP8PictureID(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8** dataPtr,
+                                        int* dataLength,
+                                        int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  vp8->pictureID = (**dataPtr & 0x7F);
+  if (**dataPtr & 0x80) {
+    (*dataPtr)++;
+    (*parsedBytes)++;
+    if (--(*dataLength) <= 0) return -1;
+    // PictureID is 15 bits
+    vp8->pictureID = (vp8->pictureID << 8) +** dataPtr;
+  }
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8Tl0PicIdx(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8** dataPtr,
+                                        int* dataLength,
+                                        int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  vp8->tl0PicIdx = **dataPtr;
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8TIDAndKeyIdx(RTPPayloadVP8* vp8,
+                                           const WebRtc_UWord8** dataPtr,
+                                           int* dataLength,
+                                           int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  if (vp8->hasTID) {
+    vp8->tID = ((**dataPtr >> 6) & 0x03);
+    vp8->layerSync = (**dataPtr & 0x20) ? true : false;  // Y bit
+  }
+  if (vp8->hasKeyIdx) {
+    vp8->keyIdx = (**dataPtr & 0x1F);
+  }
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+}  // namespace ModuleRTPUtility
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_utility.h b/trunk/src/modules/rtp_rtcp/source/rtp_utility.h
new file mode 100644
index 0000000..ad88f11
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_utility.h
@@ -0,0 +1,245 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
+
+#include <cstddef> // size_t, ptrdiff_t
+
+#include "typedefs.h"
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_config.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+enum RtpVideoCodecTypes
+{
+    kRtpNoVideo       = 0,
+    kRtpFecVideo      = 10,
+    kRtpVp8Video      = 11
+};
+
+const WebRtc_UWord8 kRtpMarkerBitMask = 0x80;
+
+namespace ModuleRTPUtility
+{
+    // January 1970, in NTP seconds.
+    const uint32_t NTP_JAN_1970 = 2208988800UL;
+
+    // Magic NTP fractional unit.
+    const double NTP_FRAC = 4.294967296E+9;
+
+    struct AudioPayload
+    {
+        WebRtc_UWord32    frequency;
+        WebRtc_UWord8     channels;
+        WebRtc_UWord8     bitsPerSample;
+        WebRtc_UWord32    rate;
+        bool              trueStereoCodec;
+    };
+    struct VideoPayload
+    {
+        RtpVideoCodecTypes   videoCodecType;
+        WebRtc_UWord32       maxRate;
+    };
+    union PayloadUnion
+    {
+        AudioPayload Audio;
+        VideoPayload Video;
+    };
+    struct Payload
+    {
+        char name[RTP_PAYLOAD_NAME_SIZE];
+        bool audio;
+        PayloadUnion typeSpecific;
+    };
+
+    // Return a clock that reads the time as reported by the operating
+    // system. The returned instances are guaranteed to read the same
+    // times; in particular, they return relative times relative to
+    // the same base.
+    // Note that even though the instances returned by this function
+    // read the same times a new object is created every time this
+    // API is called. The ownership of this object belongs to the
+    // caller.
+    RtpRtcpClock* GetSystemClock();
+
+    // Return the current RTP timestamp from the NTP timestamp
+    // returned by the specified clock.
+    WebRtc_UWord32 GetCurrentRTP(RtpRtcpClock* clock, WebRtc_UWord32 freq);
+
+    // Return the current RTP absolute timestamp.
+    WebRtc_UWord32 ConvertNTPTimeToRTP(WebRtc_UWord32 NTPsec,
+                                       WebRtc_UWord32 NTPfrac,
+                                       WebRtc_UWord32 freq);
+
+    // Return the time in milliseconds corresponding to the specified
+    // NTP timestamp.
+    WebRtc_UWord32 ConvertNTPTimeToMS(WebRtc_UWord32 NTPsec,
+                                      WebRtc_UWord32 NTPfrac);
+
+    WebRtc_UWord32 pow2(WebRtc_UWord8 exp);
+
+    // Returns true if |newTimestamp| is older than |existingTimestamp|.
+    // |wrapped| will be set to true if there has been a wraparound between the
+    // two timestamps.
+    bool OldTimestamp(uint32_t newTimestamp,
+                      uint32_t existingTimestamp,
+                      bool* wrapped);
+
+    bool StringCompare(const char* str1,
+                       const char* str2,
+                       const WebRtc_UWord32 length);
+
+    void AssignUWord32ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value);
+    void AssignUWord24ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value);
+    void AssignUWord16ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord16 value);
+
+    /**
+     * Converts a network-ordered two-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered two-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord16 BufferToUWord16(const WebRtc_UWord8* dataBuffer);
+
+    /**
+     * Converts a network-ordered three-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered three-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord32 BufferToUWord24(const WebRtc_UWord8* dataBuffer);
+
+    /**
+     * Converts a network-ordered four-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered four-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord32 BufferToUWord32(const WebRtc_UWord8* dataBuffer);
+
+    class RTPHeaderParser
+    {
+    public:
+        RTPHeaderParser(const WebRtc_UWord8* rtpData,
+                        const WebRtc_UWord32 rtpDataLength);
+        ~RTPHeaderParser();
+
+        bool RTCP() const;
+        bool Parse(WebRtcRTPHeader& parsedPacket,
+                   RtpHeaderExtensionMap* ptrExtensionMap = NULL) const;
+
+    private:
+        void ParseOneByteExtensionHeader(
+            WebRtcRTPHeader& parsedPacket,
+            const RtpHeaderExtensionMap* ptrExtensionMap,
+            const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+            const WebRtc_UWord8* ptr) const;
+
+        WebRtc_UWord8 ParsePaddingBytes(
+            const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+            const WebRtc_UWord8* ptr) const;
+
+        const WebRtc_UWord8* const _ptrRTPDataBegin;
+        const WebRtc_UWord8* const _ptrRTPDataEnd;
+    };
+
+    enum FrameTypes
+    {
+        kIFrame,    // key frame
+        kPFrame         // Delta frame
+    };
+
+    struct RTPPayloadVP8
+    {
+        bool                 nonReferenceFrame;
+        bool                 beginningOfPartition;
+        int                  partitionID;
+        bool                 hasPictureID;
+        bool                 hasTl0PicIdx;
+        bool                 hasTID;
+        bool                 hasKeyIdx;
+        int                  pictureID;
+        int                  tl0PicIdx;
+        int                  tID;
+        bool                 layerSync;
+        int                  keyIdx;
+        int                  frameWidth;
+        int                  frameHeight;
+
+        const WebRtc_UWord8*   data; 
+        WebRtc_UWord16         dataLength;
+    };
+
+    union RTPPayloadUnion
+    {
+        RTPPayloadVP8   VP8;
+    };
+
+    struct RTPPayload
+    {
+        void SetType(RtpVideoCodecTypes videoType);
+
+        RtpVideoCodecTypes  type;
+        FrameTypes          frameType;
+        RTPPayloadUnion     info;
+    };
+
+    // RTP payload parser
+    class RTPPayloadParser
+    {
+    public:
+        RTPPayloadParser(const RtpVideoCodecTypes payloadType,
+                         const WebRtc_UWord8* payloadData,
+                         const WebRtc_UWord16 payloadDataLength, // Length w/o padding.
+                         const WebRtc_Word32 id);
+
+        ~RTPPayloadParser();
+
+        bool Parse(RTPPayload& parsedPacket) const;
+
+    private:
+        bool ParseGeneric(RTPPayload& parsedPacket) const;
+
+        bool ParseVP8(RTPPayload& parsedPacket) const;
+
+        int ParseVP8Extension(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 *dataPtr,
+                              int dataLength) const;
+
+        int ParseVP8PictureID(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 **dataPtr,
+                              int *dataLength,
+                              int *parsedBytes) const;
+
+        int ParseVP8Tl0PicIdx(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 **dataPtr,
+                              int *dataLength,
+                              int *parsedBytes) const;
+
+        int ParseVP8TIDAndKeyIdx(RTPPayloadVP8 *vp8,
+                                 const WebRtc_UWord8 **dataPtr,
+                                 int *dataLength,
+                                 int *parsedBytes) const;
+
+        int ParseVP8FrameSize(RTPPayload& parsedPacket,
+                              const WebRtc_UWord8 *dataPtr,
+                              int dataLength) const;
+
+    private:
+        WebRtc_Word32               _id;
+        const WebRtc_UWord8*        _dataPtr;
+        const WebRtc_UWord16        _dataLength;
+        const RtpVideoCodecTypes    _videoType;
+    };
+
+}  // namespace ModuleRTPUtility
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/rtp_utility_test.cc b/trunk/src/modules/rtp_rtcp/source/rtp_utility_test.cc
new file mode 100644
index 0000000..eabc812
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/rtp_utility_test.cc
@@ -0,0 +1,288 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file conatins unit tests for the ModuleRTPUtility.
+ */
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+using ModuleRTPUtility::RTPPayloadParser;
+using ModuleRTPUtility::RTPPayload;
+using ModuleRTPUtility::RTPPayloadVP8;
+
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+//
+// Payload header
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |Size0|H| VER |P|
+//      +-+-+-+-+-+-+-+-+
+//      |     Size1     |
+//      +-+-+-+-+-+-+-+-+
+//      |     Size2     |
+//      +-+-+-+-+-+-+-+-+
+//      | Bytes 4..N of |
+//      | VP8 payload   |
+//      :               :
+//      +-+-+-+-+-+-+-+-+
+//      | OPTIONAL RTP  |
+//      | padding       |
+//      :               :
+//      +-+-+-+-+-+-+-+-+
+
+void VerifyBasicHeader(const RTPPayloadVP8 &header,
+                       bool N, bool S, int PartID) {
+  EXPECT_EQ(N, header.nonReferenceFrame);
+  EXPECT_EQ(S, header.beginningOfPartition);
+  EXPECT_EQ(PartID, header.partitionID);
+}
+
+void VerifyExtensions(const RTPPayloadVP8 &header,
+                      bool I, bool L, bool T, bool K) {
+  EXPECT_EQ(I, header.hasPictureID);
+  EXPECT_EQ(L, header.hasTl0PicIdx);
+  EXPECT_EQ(T, header.hasTID);
+  EXPECT_EQ(K, header.hasKeyIdx);
+}
+
+TEST(ParseVP8Test, BasicHeader) {
+  WebRtc_UWord8 payload[4] = {0};
+  payload[0] = 0x14;  // Binary 0001 0100; S = 1, PartID = 4.
+  payload[1] = 0x01;  // P frame.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(payload + 1, parsedPacket.info.VP8.data);
+  EXPECT_EQ(4 - 1, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, PictureID) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0xA0;
+  payload[1] = 0x80;
+  payload[2] = 17;
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.pictureID);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+
+
+  // Re-use payload, but change to long PictureID.
+  payload[2] = 0x80 | 17;
+  payload[3] = 17;
+  RTPPayloadParser rtpPayloadParser2(kRtpVp8Video, payload, 10, 0);
+
+  ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
+
+  EXPECT_EQ(payload + 4, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 4, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, Tl0PicIdx) {
+  WebRtc_UWord8 payload[13] = {0};
+  payload[0] = 0x90;
+  payload[1] = 0x40;
+  payload[2] = 17;
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 13, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 1 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.tl0PicIdx);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(13 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, TIDAndLayerSync) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x20;
+  payload[2] = 0x80;  // TID(2) + LayerSync(false)
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 1 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(2, parsedPacket.info.VP8.tID);
+  EXPECT_FALSE(parsedPacket.info.VP8.layerSync);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, KeyIdx) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x10;  // K = 1.
+  payload[2] = 0x11;  // KEYIDX = 17 decimal.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 1 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, MultipleExtensions) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x80 | 0x40 | 0x20 | 0x10;
+  payload[2] = 0x80 | 17;    // PictureID, high 7 bits.
+  payload[3] = 17;           // PictureID, low 8 bits.
+  payload[4] = 42;           // Tl0PicIdx.
+  payload[5] = 0x40 | 0x20 | 0x11;  // TID(1) + LayerSync(true) + KEYIDX(17).
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 1 /*L*/, 1 /*T*/, 1 /*K*/);
+
+  EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
+  EXPECT_EQ(42, parsedPacket.info.VP8.tl0PicIdx);
+  EXPECT_EQ(1, parsedPacket.info.VP8.tID);
+  EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(payload + 6, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 6, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, TooShortHeader) {
+  WebRtc_UWord8 payload[4] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x80 | 0x40 | 0x20 | 0x10;  // All extensions are enabled...
+  payload[2] = 0x80 | 17;  // ... but only 2 bytes PictureID is provided.
+  payload[3] = 17;  // PictureID, low 8 bits.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
+
+  RTPPayload parsedPacket;
+  EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
+}
+
+TEST(ParseVP8Test, TestWithPacketizer) {
+  WebRtc_UWord8 payload[10] = {0};
+  WebRtc_UWord8 packet[20] = {0};
+  RTPVideoHeaderVP8 inputHeader;
+  inputHeader.nonReference = true;
+  inputHeader.pictureId = 300;
+  inputHeader.temporalIdx = 1;
+  inputHeader.layerSync = false;
+  inputHeader.tl0PicIdx = kNoTl0PicIdx;  // Disable.
+  inputHeader.keyIdx = 31;
+  RtpFormatVp8 packetizer(payload, 10, inputHeader, 20);
+  bool last;
+  int send_bytes;
+  ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
+  ASSERT_TRUE(last);
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, packet, send_bytes, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8,
+                    inputHeader.nonReference /*N*/,
+                    1 /*S*/,
+                    0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8,
+                   1 /*I*/,
+                   0 /*L*/,
+                   1 /*T*/,
+                   1 /*K*/);
+
+  EXPECT_EQ(inputHeader.pictureId, parsedPacket.info.VP8.pictureID);
+  EXPECT_EQ(inputHeader.temporalIdx, parsedPacket.info.VP8.tID);
+  EXPECT_EQ(inputHeader.layerSync, parsedPacket.info.VP8.layerSync);
+  EXPECT_EQ(inputHeader.keyIdx, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(packet + 5, parsedPacket.info.VP8.data);
+  EXPECT_EQ(send_bytes - 5, parsedPacket.info.VP8.dataLength);
+}
+
+}  // namespace
diff --git a/trunk/src/modules/rtp_rtcp/source/ssrc_database.cc b/trunk/src/modules/rtp_rtcp/source/ssrc_database.cc
new file mode 100644
index 0000000..b3e9ab0
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/ssrc_database.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ssrc_database.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <stdlib.h>
+#include <cassert>
+
+#ifdef _WIN32
+    #include <windows.h>
+    #include <MMSystem.h> //timeGetTime
+
+// TODO(hellner): investigate if it is necessary to disable these warnings.
+    #pragma warning(disable:4311)
+    #pragma warning(disable:4312)
+#else
+    #include <stdio.h>
+    #include <string.h>
+    #include <time.h>
+    #include <sys/time.h>
+#endif
+
+namespace webrtc {
+SSRCDatabase*
+SSRCDatabase::StaticInstance(CountOperation count_operation)
+{
+  SSRCDatabase* impl =
+      GetStaticInstance<SSRCDatabase>(count_operation);
+  return impl;
+}
+
+SSRCDatabase*
+SSRCDatabase::GetSSRCDatabase()
+{
+    return StaticInstance(kAddRef);
+}
+
+void
+SSRCDatabase::ReturnSSRCDatabase()
+{
+    StaticInstance(kRelease);
+}
+
+WebRtc_UWord32
+SSRCDatabase::CreateSSRC()
+{
+    CriticalSectionScoped lock(_critSect);
+
+    WebRtc_UWord32 ssrc = GenerateRandom();
+
+#ifndef WEBRTC_NO_STL
+
+    while(_ssrcMap.find(ssrc) != _ssrcMap.end())
+    {
+        ssrc = GenerateRandom();
+    }
+    _ssrcMap[ssrc] = 0;
+
+#else
+    if(_sizeOfSSRC <= _numberOfSSRC)
+    {
+        // allocate more space
+        const int newSize = _sizeOfSSRC + 10;
+        WebRtc_UWord32* tempSSRCVector = new WebRtc_UWord32[newSize];
+        memcpy(tempSSRCVector, _ssrcVector, _sizeOfSSRC*sizeof(WebRtc_UWord32));
+        delete [] _ssrcVector;
+
+        _ssrcVector = tempSSRCVector;
+        _sizeOfSSRC = newSize;
+    }
+
+    // check if in DB
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                i = 0; // start over with a new ssrc
+                ssrc = GenerateRandom();
+            }
+
+        }
+        //  add to database
+        _ssrcVector[_numberOfSSRC] = ssrc;
+        _numberOfSSRC++;
+    }
+#endif
+    return ssrc;
+}
+
+WebRtc_Word32
+SSRCDatabase::RegisterSSRC(const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_critSect);
+
+#ifndef WEBRTC_NO_STL
+
+    _ssrcMap[ssrc] = 0;
+
+#else
+    if(_sizeOfSSRC <= _numberOfSSRC)
+    {
+        // allocate more space
+        const int newSize = _sizeOfSSRC + 10;
+        WebRtc_UWord32* tempSSRCVector = new WebRtc_UWord32[newSize];
+        memcpy(tempSSRCVector, _ssrcVector, _sizeOfSSRC*sizeof(WebRtc_UWord32));
+        delete [] _ssrcVector;
+
+        _ssrcVector = tempSSRCVector;
+        _sizeOfSSRC = newSize;
+    }
+    // check if in DB
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                return -1;
+            }
+        }
+        //  add to database
+        _ssrcVector[_numberOfSSRC] = ssrc;
+        _numberOfSSRC++;
+    }
+#endif
+    return 0;
+}
+
+WebRtc_Word32
+SSRCDatabase::ReturnSSRC(const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_critSect);
+
+#ifndef WEBRTC_NO_STL
+    _ssrcMap.erase(ssrc);
+
+#else
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                // remove from database
+                _ssrcVector[i] = _ssrcVector[_numberOfSSRC-1];
+                _numberOfSSRC--;
+                break;
+            }
+        }
+    }
+#endif
+    return 0;
+}
+
+SSRCDatabase::SSRCDatabase()
+{
+    // we need to seed the random generator, otherwise we get 26500 each time, hardly a random value :)
+#ifdef _WIN32
+    srand(timeGetTime());
+#else
+    struct timeval tv;
+    struct timezone tz;
+    gettimeofday(&tv, &tz);
+    srand(tv.tv_usec);
+#endif
+
+#ifdef WEBRTC_NO_STL
+    _sizeOfSSRC = 10;
+    _numberOfSSRC = 0;
+    _ssrcVector = new WebRtc_UWord32[10];
+#endif
+    _critSect = CriticalSectionWrapper::CreateCriticalSection();
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s created", __FUNCTION__);
+}
+
+SSRCDatabase::~SSRCDatabase()
+{
+#ifdef WEBRTC_NO_STL
+    delete [] _ssrcVector;
+#else
+    _ssrcMap.clear();
+#endif
+    delete _critSect;
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_UWord32 SSRCDatabase::GenerateRandom()
+{
+    WebRtc_UWord32 ssrc = 0;
+    do
+    {
+        ssrc = rand();
+        ssrc = ssrc <<16;
+        ssrc += rand();
+
+    } while (ssrc == 0 || ssrc == 0xffffffff);
+
+    return ssrc;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/ssrc_database.h b/trunk/src/modules/rtp_rtcp/source/ssrc_database.h
new file mode 100644
index 0000000..370e549
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/ssrc_database.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
+
+#ifndef WEBRTC_NO_STL
+    #include <map>
+#endif
+
+#include "system_wrappers/interface/static_instance.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class SSRCDatabase
+{
+public:
+    static SSRCDatabase* GetSSRCDatabase();
+    static void ReturnSSRCDatabase();
+
+    WebRtc_UWord32 CreateSSRC();
+    WebRtc_Word32 RegisterSSRC(const WebRtc_UWord32 ssrc);
+    WebRtc_Word32 ReturnSSRC(const WebRtc_UWord32 ssrc);
+
+protected:
+    SSRCDatabase();
+    virtual ~SSRCDatabase();
+
+    static SSRCDatabase* CreateInstance() { return new SSRCDatabase(); }
+
+private:
+    // Friend function to allow the SSRC destructor to be accessed from the
+    // template class.
+    friend SSRCDatabase* GetStaticInstance<SSRCDatabase>(
+        CountOperation count_operation);
+    static SSRCDatabase* StaticInstance(CountOperation count_operation);
+
+    WebRtc_UWord32 GenerateRandom();
+
+#ifdef WEBRTC_NO_STL
+    int _numberOfSSRC;
+    int _sizeOfSSRC;
+
+    WebRtc_UWord32* _ssrcVector;
+#else
+    std::map<WebRtc_UWord32, WebRtc_UWord32>    _ssrcMap;
+#endif
+
+    CriticalSectionWrapper* _critSect;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/tmmbr_help.cc b/trunk/src/modules/rtp_rtcp/source/tmmbr_help.cc
new file mode 100644
index 0000000..cf34e08
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/tmmbr_help.cc
@@ -0,0 +1,507 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tmmbr_help.h"
+
+#include "rtp_rtcp_config.h"
+
+namespace webrtc {
+TMMBRSet::TMMBRSet() :
+    ptrTmmbrSet(0),
+    ptrPacketOHSet(0),
+    ptrSsrcSet(0),
+    sizeOfSet(0),
+    lengthOfSet(0)
+{
+}
+
+TMMBRSet::~TMMBRSet()
+{
+    delete [] ptrTmmbrSet;
+    delete [] ptrPacketOHSet;
+    delete [] ptrSsrcSet;
+    ptrTmmbrSet = 0;
+    ptrPacketOHSet = 0;
+    ptrSsrcSet = 0;
+    sizeOfSet = 0;
+    lengthOfSet = 0;
+}
+
+void
+TMMBRSet::VerifyAndAllocateSet(WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > sizeOfSet)
+    {
+        // make sure that our buffers are big enough
+        if(ptrTmmbrSet)
+        {
+            delete [] ptrTmmbrSet;
+            delete [] ptrPacketOHSet;
+            delete [] ptrSsrcSet;
+        }
+        ptrTmmbrSet = new WebRtc_UWord32[minimumSize];
+        ptrPacketOHSet = new WebRtc_UWord32[minimumSize];
+        ptrSsrcSet = new WebRtc_UWord32[minimumSize];
+        sizeOfSet = minimumSize;
+    }
+    // reset memory
+    for(WebRtc_UWord32 i = 0; i < sizeOfSet; i++)
+    {
+        ptrTmmbrSet[i] = 0;
+        ptrPacketOHSet[i] = 0;
+        ptrSsrcSet[i] = 0;
+    }
+    lengthOfSet = 0;
+}
+
+TMMBRHelp::TMMBRHelp(const bool audio) :
+    _criticalSection(CriticalSectionWrapper::CreateCriticalSection()),
+    _audio(audio),
+    _candidateSet(),
+    _boundingSet(),
+    _boundingSetToSend(),
+    _ptrIntersectionBoundingSet(NULL),
+    _ptrMaxPRBoundingSet(NULL)
+{
+}
+
+TMMBRHelp::~TMMBRHelp()
+{
+    delete [] _ptrIntersectionBoundingSet;
+    delete [] _ptrMaxPRBoundingSet;
+    _ptrIntersectionBoundingSet = 0;
+    _ptrMaxPRBoundingSet = 0;
+    delete _criticalSection;
+}
+
+TMMBRSet*
+TMMBRHelp::VerifyAndAllocateBoundingSet(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if(minimumSize > _boundingSet.sizeOfSet)
+    {
+        // make sure that our buffers are big enough
+        if(_ptrIntersectionBoundingSet)
+        {
+            delete [] _ptrIntersectionBoundingSet;
+            delete [] _ptrMaxPRBoundingSet;
+        }
+        _ptrIntersectionBoundingSet = new float[minimumSize];
+        _ptrMaxPRBoundingSet = new float[minimumSize];
+    }
+    _boundingSet.VerifyAndAllocateSet(minimumSize);
+    return &_boundingSet;
+}
+
+TMMBRSet*
+TMMBRHelp::BoundingSet()
+{
+    return &_boundingSet;
+}
+
+WebRtc_Word32
+TMMBRHelp::SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend,
+                                     const WebRtc_UWord32 maxBitrateKbit)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if (boundingSetToSend == NULL)
+    {
+        _boundingSetToSend.lengthOfSet = 0;
+        return 0;
+    }
+
+    VerifyAndAllocateBoundingSetToSend(boundingSetToSend->lengthOfSet);
+
+    for (WebRtc_UWord32 i = 0; i < boundingSetToSend->lengthOfSet; i++)
+    {
+        // cap at our configured max bitrate
+        WebRtc_UWord32 bitrate = boundingSetToSend->ptrTmmbrSet[i];
+        if(maxBitrateKbit)
+        {
+            // do we have a configured max bitrate?
+            if(bitrate > maxBitrateKbit)
+            {
+                bitrate = maxBitrateKbit;
+            }
+        }
+
+        _boundingSetToSend.ptrTmmbrSet[i]    = bitrate;
+        _boundingSetToSend.ptrPacketOHSet[i] = boundingSetToSend->ptrPacketOHSet[i];
+        _boundingSetToSend.ptrSsrcSet[i]     = boundingSetToSend->ptrSsrcSet[i];
+    }
+    _boundingSetToSend.lengthOfSet = boundingSetToSend->lengthOfSet;
+    return 0;
+}
+
+WebRtc_Word32
+TMMBRHelp::VerifyAndAllocateBoundingSetToSend(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    _boundingSetToSend.VerifyAndAllocateSet(minimumSize);
+    return 0;
+}
+
+TMMBRSet*
+TMMBRHelp::VerifyAndAllocateCandidateSet(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    _candidateSet.VerifyAndAllocateSet(minimumSize);
+    return &_candidateSet;
+}
+
+TMMBRSet*
+TMMBRHelp::CandidateSet()
+{
+    return &_candidateSet;
+}
+
+TMMBRSet*
+TMMBRHelp::BoundingSetToSend()
+{
+    return &_boundingSetToSend;
+}
+
+WebRtc_Word32
+TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    // Work on local variable, will be modified
+    TMMBRSet    candidateSet;
+    candidateSet.VerifyAndAllocateSet(_candidateSet.sizeOfSet);
+
+    // Number of set candidates
+    WebRtc_Word32 numSetCandidates = 0;
+    for (WebRtc_UWord32 i = 0; i < _candidateSet.sizeOfSet; i++)
+    {
+        if(_candidateSet.ptrTmmbrSet[i])
+        {
+            numSetCandidates++;
+            candidateSet.ptrTmmbrSet[i]    = _candidateSet.ptrTmmbrSet[i];
+            candidateSet.ptrPacketOHSet[i] = _candidateSet.ptrPacketOHSet[i];
+            candidateSet.ptrSsrcSet[i]     = _candidateSet.ptrSsrcSet[i];
+        }
+        else
+        {
+            // make sure this is zero if tmmbr = 0
+            _candidateSet.ptrPacketOHSet[i] = 0;
+        }
+    }
+    candidateSet.lengthOfSet = numSetCandidates;
+
+    // Find bounding set
+    WebRtc_UWord32 numBoundingSet = 0;
+    if (numSetCandidates > 0)
+    {
+        numBoundingSet =  FindTMMBRBoundingSet(numSetCandidates, candidateSet);
+        if(numBoundingSet < 1 || (numBoundingSet > _candidateSet.sizeOfSet))
+        {
+            return -1;
+        }
+        boundingSet = &_boundingSet;
+    }
+    return numBoundingSet;
+}
+
+
+WebRtc_Word32
+TMMBRHelp::FindTMMBRBoundingSet(WebRtc_Word32 numCandidates, TMMBRSet& candidateSet)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    WebRtc_UWord32 numBoundingSet = 0;
+    VerifyAndAllocateBoundingSet(candidateSet.sizeOfSet);
+
+    if (numCandidates == 1)
+    {
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+        {
+            if(candidateSet.ptrTmmbrSet[i] > 0)
+            {
+                _boundingSet.ptrTmmbrSet[numBoundingSet]    = candidateSet.ptrTmmbrSet[i];
+                _boundingSet.ptrPacketOHSet[numBoundingSet] = candidateSet.ptrPacketOHSet[i];
+                _boundingSet.ptrSsrcSet[numBoundingSet]     = candidateSet.ptrSsrcSet[i];
+                numBoundingSet++;
+            }
+        }
+        if (numBoundingSet != 1)
+        {
+            numBoundingSet = -1;
+        }
+    } else
+    {
+        // 1. Sort by increasing packetOH
+        WebRtc_UWord32 temp;
+        for (int i = candidateSet.sizeOfSet - 1; i >= 0; i--)
+        {
+            for (int j = 1; j <= i; j++)
+            {
+                if (candidateSet.ptrPacketOHSet[j-1] > candidateSet.ptrPacketOHSet[j])
+                {
+                    temp = candidateSet.ptrPacketOHSet[j-1];
+                    candidateSet.ptrPacketOHSet[j-1] = candidateSet.ptrPacketOHSet[j];
+                    candidateSet.ptrPacketOHSet[j] = temp;
+                    temp = candidateSet.ptrTmmbrSet[j-1];
+                    candidateSet.ptrTmmbrSet[j-1] = candidateSet.ptrTmmbrSet[j];
+                    candidateSet.ptrTmmbrSet[j] = temp;
+                    temp = candidateSet.ptrSsrcSet[j-1];
+                    candidateSet.ptrSsrcSet[j-1] = candidateSet.ptrSsrcSet[j];
+                    candidateSet.ptrSsrcSet[j] = temp;
+                }
+            }
+        }
+        // 2. For tuples with same OH, keep the one w/ the lowest bitrate
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+        {
+            if (candidateSet.ptrTmmbrSet[i] > 0)
+            {
+                // get min bitrate for packets w/ same OH
+                WebRtc_UWord32 currentPacketOH = candidateSet.ptrPacketOHSet[i];
+                WebRtc_UWord32 currentMinTMMBR = candidateSet.ptrTmmbrSet[i];
+                WebRtc_UWord32 currentMinIndexTMMBR = i;
+                for (WebRtc_UWord32 j = i+1; j < candidateSet.sizeOfSet; j++)
+                {
+                    if(candidateSet.ptrPacketOHSet[j] == currentPacketOH)
+                    {
+                        if(candidateSet.ptrTmmbrSet[j] < currentMinTMMBR)
+                        {
+                            currentMinTMMBR = candidateSet.ptrTmmbrSet[j];
+                            currentMinIndexTMMBR = j;
+                        }
+                    }
+                }
+                // keep lowest bitrate
+                for (WebRtc_UWord32 j = 0; j < candidateSet.sizeOfSet; j++)
+                {
+                    if(candidateSet.ptrPacketOHSet[j] == currentPacketOH && j != currentMinIndexTMMBR)
+                    {
+                        candidateSet.ptrTmmbrSet[j]    = 0;
+                        candidateSet.ptrPacketOHSet[j] = 0;
+                        candidateSet.ptrSsrcSet[j]     = 0;
+                        numCandidates--;
+                    }
+                }
+            }
+        }
+        // 3. Select and remove tuple w/ lowest tmmbr. (If more than 1, choose the one w/ highest OH).
+        WebRtc_UWord32 minTMMBR = 0;
+        WebRtc_UWord32 minIndexTMMBR = 0;
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+        {
+            if (candidateSet.ptrTmmbrSet[i] > 0)
+            {
+                minTMMBR = candidateSet.ptrTmmbrSet[i];
+                minIndexTMMBR = i;
+                break;
+            }
+        }
+
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+        {
+            if (candidateSet.ptrTmmbrSet[i] > 0 && candidateSet.ptrTmmbrSet[i] <= minTMMBR)
+            {
+                // get min bitrate
+                minTMMBR = candidateSet.ptrTmmbrSet[i];
+                minIndexTMMBR = i;
+            }
+        }
+        // first member of selected list
+        _boundingSet.ptrTmmbrSet[numBoundingSet]    = candidateSet.ptrTmmbrSet[minIndexTMMBR];
+        _boundingSet.ptrPacketOHSet[numBoundingSet] = candidateSet.ptrPacketOHSet[minIndexTMMBR];
+        _boundingSet.ptrSsrcSet[numBoundingSet]     = candidateSet.ptrSsrcSet[minIndexTMMBR];
+        // set intersection value
+        _ptrIntersectionBoundingSet[numBoundingSet] = 0;
+        // calculate its maximum packet rate (where its line crosses x-axis)
+        _ptrMaxPRBoundingSet[numBoundingSet] = _boundingSet.ptrTmmbrSet[numBoundingSet]*1000 / float(8*_boundingSet.ptrPacketOHSet[numBoundingSet]);
+        numBoundingSet++;
+        // remove from candidate list
+        candidateSet.ptrTmmbrSet[minIndexTMMBR]    = 0;
+        candidateSet.ptrPacketOHSet[minIndexTMMBR] = 0;
+        candidateSet.ptrSsrcSet[minIndexTMMBR]     = 0;
+        numCandidates--;
+
+        // 4. Discard from candidate list all tuple w/ lower OH (next tuple must be steeper)
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+        {
+            if(candidateSet.ptrTmmbrSet[i] > 0 && candidateSet.ptrPacketOHSet[i] < _boundingSet.ptrPacketOHSet[0])
+            {
+                candidateSet.ptrTmmbrSet[i]    = 0;
+                candidateSet.ptrPacketOHSet[i] = 0;
+                candidateSet.ptrSsrcSet[i]     = 0;
+                numCandidates--;
+            }
+        }
+
+        if (numCandidates == 0)
+        {
+            _boundingSet.lengthOfSet = numBoundingSet;
+            return numBoundingSet;
+        }
+
+        bool getNewCandidate = true;
+        int curCandidateTMMBR = 0;
+        int curCandidateIndex = 0;
+        int curCandidatePacketOH = 0;
+        int curCandidateSSRC = 0;
+        do
+        {
+            if (getNewCandidate)
+            {
+                // 5. Remove first remaining tuple from candidate list
+                for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet; i++)
+                {
+                    if (candidateSet.ptrTmmbrSet[i] > 0)
+                    {
+                        curCandidateTMMBR    = candidateSet.ptrTmmbrSet[i];
+                        curCandidatePacketOH = candidateSet.ptrPacketOHSet[i];
+                        curCandidateSSRC     = candidateSet.ptrSsrcSet[i];
+                        curCandidateIndex    = i;
+                        candidateSet.ptrTmmbrSet[curCandidateIndex]    = 0;
+                        candidateSet.ptrPacketOHSet[curCandidateIndex] = 0;
+                        candidateSet.ptrSsrcSet[curCandidateIndex]     = 0;
+                        break;
+                    }
+                }
+            }
+
+            // 6. Calculate packet rate and intersection of the current line with line of last tuple in selected list
+            float packetRate = float(curCandidateTMMBR - _boundingSet.ptrTmmbrSet[numBoundingSet-1])*1000 / (8*(curCandidatePacketOH - _boundingSet.ptrPacketOHSet[numBoundingSet-1]));
+
+            // 7. If the packet rate is equal or lower than intersection of last tuple in selected list,
+            //    remove last tuple in selected list & go back to step 6
+            if(packetRate <= _ptrIntersectionBoundingSet[numBoundingSet-1])
+            {
+                // remove last tuple and goto step 6
+                numBoundingSet--;
+                _boundingSet.ptrTmmbrSet[numBoundingSet]    = 0;
+                _boundingSet.ptrPacketOHSet[numBoundingSet] = 0;
+                _boundingSet.ptrSsrcSet[numBoundingSet]     = 0;
+                _ptrIntersectionBoundingSet[numBoundingSet] = 0;
+                _ptrMaxPRBoundingSet[numBoundingSet]        = 0;
+                getNewCandidate = false;
+            } else
+            {
+                // 8. If packet rate is lower than maximum packet rate of last tuple in selected list, add current tuple to selected list
+                if (packetRate < _ptrMaxPRBoundingSet[numBoundingSet-1])
+                {
+                    _boundingSet.ptrTmmbrSet[numBoundingSet]    = curCandidateTMMBR;
+                    _boundingSet.ptrPacketOHSet[numBoundingSet] = curCandidatePacketOH;
+                    _boundingSet.ptrSsrcSet[numBoundingSet]     = curCandidateSSRC;
+                    _ptrIntersectionBoundingSet[numBoundingSet] = packetRate;
+                    _ptrMaxPRBoundingSet[numBoundingSet] = _boundingSet.ptrTmmbrSet[numBoundingSet]*1000 / float(8*_boundingSet.ptrPacketOHSet[numBoundingSet]);
+                    numBoundingSet++;
+                }
+                numCandidates--;
+                getNewCandidate = true;
+            }
+
+            // 9. Go back to step 5 if any tuple remains in candidate list
+        } while (numCandidates > 0);
+    }
+    _boundingSet.lengthOfSet = numBoundingSet;
+    return numBoundingSet;
+}
+
+bool
+TMMBRHelp::IsOwner(const WebRtc_UWord32 ssrc,
+                   const WebRtc_UWord32 length) const
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if (length == 0)
+    {
+        // empty bounding set
+        return false;
+    }
+
+    for(WebRtc_UWord32 i = 0; (i < length) && (i < _boundingSet.sizeOfSet); ++i)
+    {
+        if(_boundingSet.ptrSsrcSet[i] == ssrc)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+WebRtc_Word32
+TMMBRHelp::CalcMinMaxBitRate(const WebRtc_UWord32 totalPacketRate,
+                             const WebRtc_UWord32 lengthOfBoundingSet,
+                             WebRtc_UWord32& minBitrateKbit,
+                             WebRtc_UWord32& maxBitrateKbit) const
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if (lengthOfBoundingSet <= 0 || _candidateSet.sizeOfSet == 0)
+    {
+        // empty bounding set
+        return -1;
+    }
+
+    minBitrateKbit = 0xFFFFFFFF;
+    maxBitrateKbit = 0;
+
+    for (WebRtc_UWord32 i = 0; i < _candidateSet.sizeOfSet; ++i)
+    {
+        if (_candidateSet.ptrTmmbrSet[i])
+        {
+            WebRtc_Word32 curNetBitRate = static_cast<WebRtc_Word32>((_candidateSet.ptrTmmbrSet[i]*1000.0
+                            - (totalPacketRate * (_candidateSet.ptrPacketOHSet[i] << 3)))/1000 + 0.5);
+
+        if (curNetBitRate < 0)
+        {
+            // could be negative for a large packet rate
+            if(_audio)
+            {
+                curNetBitRate = MIN_AUDIO_BW_MANAGEMENT_BITRATE;
+            }else
+            {
+                curNetBitRate = MIN_VIDEO_BW_MANAGEMENT_BITRATE;
+            }
+        }
+            minBitrateKbit = (WebRtc_UWord32(curNetBitRate) < minBitrateKbit) ? curNetBitRate : minBitrateKbit;
+        }
+    }
+    maxBitrateKbit = minBitrateKbit;
+
+    if (maxBitrateKbit == 0 || maxBitrateKbit < minBitrateKbit)
+    {
+        return -1;
+    }
+
+    if(_audio)
+    {
+        if (minBitrateKbit < MIN_AUDIO_BW_MANAGEMENT_BITRATE)
+        {
+            minBitrateKbit = MIN_AUDIO_BW_MANAGEMENT_BITRATE;
+        }
+        if (maxBitrateKbit < MIN_AUDIO_BW_MANAGEMENT_BITRATE)
+        {
+            maxBitrateKbit = MIN_AUDIO_BW_MANAGEMENT_BITRATE;
+        }
+    }else
+    {
+        if (minBitrateKbit < MIN_VIDEO_BW_MANAGEMENT_BITRATE)
+        {
+            minBitrateKbit = MIN_VIDEO_BW_MANAGEMENT_BITRATE;
+        }
+        if (maxBitrateKbit < MIN_VIDEO_BW_MANAGEMENT_BITRATE)
+        {
+            maxBitrateKbit = MIN_VIDEO_BW_MANAGEMENT_BITRATE;
+        }
+    }
+
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/tmmbr_help.h b/trunk/src/modules/rtp_rtcp/source/tmmbr_help.h
new file mode 100644
index 0000000..35704fe
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/tmmbr_help.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
+
+#include "typedefs.h"
+
+#include "critical_section_wrapper.h"
+
+#ifndef NULL
+    #define NULL    0
+#endif
+
+namespace webrtc {
+class TMMBRSet
+{
+public:
+    TMMBRSet();
+    ~TMMBRSet();
+
+    void VerifyAndAllocateSet(WebRtc_UWord32 minimumSize);
+
+    WebRtc_UWord32*   ptrTmmbrSet;
+    WebRtc_UWord32*   ptrPacketOHSet;
+    WebRtc_UWord32*   ptrSsrcSet;
+    WebRtc_UWord32    sizeOfSet;
+    WebRtc_UWord32    lengthOfSet;
+};
+
+class TMMBRHelp
+{
+public:
+    TMMBRHelp(const bool audio);
+    virtual ~TMMBRHelp();
+
+    TMMBRSet* BoundingSet(); // used for debuging
+    TMMBRSet* CandidateSet();
+    TMMBRSet* BoundingSetToSend();
+
+    TMMBRSet* VerifyAndAllocateCandidateSet(const WebRtc_UWord32 minimumSize);
+    WebRtc_Word32 FindTMMBRBoundingSet(TMMBRSet*& boundingSet);
+    WebRtc_Word32 SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend,
+                                          const WebRtc_UWord32 maxBitrateKbit);
+
+    bool        IsOwner(const WebRtc_UWord32 ssrc,
+                        const WebRtc_UWord32 length) const;
+
+    WebRtc_Word32 CalcMinMaxBitRate(const WebRtc_UWord32 totalPacketRate,
+                                  const WebRtc_UWord32 lengthOfBoundingSet,
+                                  WebRtc_UWord32& minBitrateKbit,
+                                  WebRtc_UWord32& maxBitrateKbit) const;
+
+protected:
+    TMMBRSet*   VerifyAndAllocateBoundingSet(WebRtc_UWord32 minimumSize);
+    WebRtc_Word32 VerifyAndAllocateBoundingSetToSend(WebRtc_UWord32 minimumSize);
+
+    WebRtc_Word32 FindTMMBRBoundingSet(WebRtc_Word32 numCandidates, TMMBRSet& candidateSet);
+
+private:
+    CriticalSectionWrapper* _criticalSection;
+    const bool              _audio;
+    TMMBRSet                _candidateSet;
+    TMMBRSet                _boundingSet;
+    TMMBRSet                _boundingSetToSend;
+
+    float*                  _ptrIntersectionBoundingSet;
+    float*                  _ptrMaxPRBoundingSet;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/transmission_bucket.cc b/trunk/src/modules/rtp_rtcp/source/transmission_bucket.cc
new file mode 100644
index 0000000..e79d227
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/transmission_bucket.cc
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "transmission_bucket.h"
+
+#include <assert.h>
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+
+TransmissionBucket::TransmissionBucket()
+  : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+    accumulator_(0),
+    bytes_rem_total_(0),
+    bytes_rem_interval_(0),
+    packets_(),
+    first_(true) {
+}
+
+TransmissionBucket::~TransmissionBucket() {
+  packets_.clear();
+  delete critsect_;
+}
+
+void TransmissionBucket::Reset() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  accumulator_ = 0;
+  bytes_rem_total_ = 0;
+  bytes_rem_interval_ = 0;
+  packets_.clear();
+  first_ = true;
+}
+
+void TransmissionBucket::Fill(const uint16_t seq_num,
+                              const uint32_t num_bytes) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  accumulator_ += num_bytes;
+
+  Packet p(seq_num, num_bytes);
+  packets_.push_back(p);
+}
+
+bool TransmissionBucket::Empty() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  return packets_.empty();
+}
+
+void TransmissionBucket::UpdateBytesPerInterval(
+    const uint32_t delta_time_ms,
+    const uint16_t target_bitrate_kbps) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+
+  const float kMargin = 1.05f;
+  uint32_t bytes_per_interval = 
+      kMargin * (target_bitrate_kbps * delta_time_ms / 8);
+
+  if (bytes_rem_interval_ < 0) {
+    bytes_rem_interval_ += bytes_per_interval;
+  } else {
+    bytes_rem_interval_ = bytes_per_interval;
+  }
+
+  if (accumulator_) {
+    bytes_rem_total_ += bytes_per_interval;
+    return;
+  }
+  bytes_rem_total_ = bytes_per_interval;
+}
+
+int32_t TransmissionBucket::GetNextPacket() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+
+  if (accumulator_ == 0) {
+    // Empty.
+    return -1;
+  }
+
+  std::vector<Packet>::const_iterator it_begin = packets_.begin();
+  const uint16_t num_bytes = (*it_begin).length_;
+  const uint16_t seq_num = (*it_begin).sequence_number_;
+
+  if (first_) {
+    // Ok to transmit first packet.
+    first_ = false;
+    packets_.erase(packets_.begin());
+    return seq_num;
+  }
+
+  const float kFrameComplete = 0.80f;
+  if (num_bytes * kFrameComplete > bytes_rem_total_) {
+    // Packet does not fit.
+    return -1;
+  }
+
+  if (bytes_rem_interval_ <= 0) {
+    // All bytes consumed for this interval.
+    return -1;
+  }
+
+  // Ok to transmit packet.
+  bytes_rem_total_ -= num_bytes;
+  bytes_rem_interval_ -= num_bytes;
+
+  assert(accumulator_ >= num_bytes);
+  accumulator_ -= num_bytes;
+
+  packets_.erase(packets_.begin());
+  return seq_num;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/transmission_bucket.h b/trunk/src/modules/rtp_rtcp/source/transmission_bucket.h
new file mode 100644
index 0000000..79e45d8
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/transmission_bucket.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
+
+#include <vector>
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class TransmissionBucket {
+ public:
+  TransmissionBucket();
+  ~TransmissionBucket();
+
+  // Resets members to initial state.
+  void Reset();
+
+  // Adds packet to be sent.
+  void Fill(const uint16_t seq_num, const uint32_t num_bytes);
+
+  // Returns true if there is no packet to be sent.
+  bool Empty();
+
+  // Updates the number of bytes that can be sent for the next time interval.
+  void UpdateBytesPerInterval(const uint32_t delta_time_in_ms,
+                              const uint16_t target_bitrate_kbps);
+
+  // Checks if next packet in line can be transmitted. Returns the sequence
+  // number of the packet on success, -1 otherwise. The packet is removed from
+  // the vector on success.
+  int32_t GetNextPacket();
+
+ private:
+   struct Packet {
+     Packet(uint16_t sequence_number, uint16_t length_in_bytes)
+       : sequence_number_(sequence_number),
+         length_(length_in_bytes) {
+     }
+     uint16_t sequence_number_;
+     uint16_t length_;
+   };
+
+   CriticalSectionWrapper* critsect_;
+   uint32_t accumulator_;
+   int32_t bytes_rem_total_;
+   int32_t bytes_rem_interval_;
+   std::vector<Packet> packets_;
+   bool first_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/transmission_bucket_test.cc b/trunk/src/modules/rtp_rtcp/source/transmission_bucket_test.cc
new file mode 100644
index 0000000..a8c9247
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/transmission_bucket_test.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the TransmissionBucket.
+ */
+
+#include <gtest/gtest.h>
+
+#include "transmission_bucket.h"
+
+namespace webrtc {
+
+class TransmissionBucketTest : public ::testing::Test {
+ protected:  
+  TransmissionBucket send_bucket_;
+};
+
+TEST_F(TransmissionBucketTest, Fill) {
+  EXPECT_TRUE(send_bucket_.Empty());
+  send_bucket_.Fill(1, 100);
+  EXPECT_FALSE(send_bucket_.Empty());
+}
+
+TEST_F(TransmissionBucketTest, Reset) {
+  send_bucket_.Fill(1, 100);
+  EXPECT_FALSE(send_bucket_.Empty());
+  send_bucket_.Reset();
+  EXPECT_TRUE(send_bucket_.Empty());
+}
+
+TEST_F(TransmissionBucketTest, GetNextPacket) {
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // empty
+  send_bucket_.Fill(1234, 100);
+  EXPECT_EQ(1234, send_bucket_.GetNextPacket());  // first packet ok
+  send_bucket_.Fill(1235, 100);
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // packet does not fit
+}
+
+TEST_F(TransmissionBucketTest, UpdateBytesPerInterval) {
+  const int delta_time_ms = 1;
+  const int target_bitrate_kbps = 800;
+  send_bucket_.UpdateBytesPerInterval(delta_time_ms, target_bitrate_kbps);
+
+  send_bucket_.Fill(1234, 50);
+  send_bucket_.Fill(1235, 50);
+  send_bucket_.Fill(1236, 50);
+
+  EXPECT_EQ(1234, send_bucket_.GetNextPacket());  // first packet ok
+  EXPECT_EQ(1235, send_bucket_.GetNextPacket());  // ok
+  EXPECT_EQ(1236, send_bucket_.GetNextPacket());  // ok
+  EXPECT_TRUE(send_bucket_.Empty());
+
+  send_bucket_.Fill(1237, 50);
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // packet does not fit
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/rtp_rtcp/source/video_codec_information.h b/trunk/src/modules/rtp_rtcp/source/video_codec_information.h
new file mode 100644
index 0000000..4364f0b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/video_codec_information.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
+
+#include "rtp_rtcp_config.h"
+#include "rtp_utility.h"
+
+namespace webrtc {
+class VideoCodecInformation
+{
+public:
+    virtual void Reset() = 0;
+
+    virtual RtpVideoCodecTypes Type() = 0;
+    virtual ~VideoCodecInformation(){};
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
new file mode 100644
index 0000000..6d5bb75
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
@@ -0,0 +1,268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+#include <assert.h>
+#include <stdlib.h>  // NULL
+
+#include <algorithm>
+#include <limits>
+
+namespace webrtc {
+
+PartitionTreeNode::PartitionTreeNode(PartitionTreeNode* parent,
+                                     const int* size_vector,
+                                     int num_partitions,
+                                     int this_size)
+    : parent_(parent),
+      this_size_(this_size),
+      size_vector_(size_vector),
+      num_partitions_(num_partitions),
+      max_parent_size_(0),
+      min_parent_size_(std::numeric_limits<int>::max()),
+      packet_start_(false) {
+  assert(num_partitions >= 0);
+  children_[kLeftChild] = NULL;
+  children_[kRightChild] = NULL;
+}
+
+PartitionTreeNode* PartitionTreeNode::CreateRootNode(const int* size_vector,
+                                                     int num_partitions) {
+  PartitionTreeNode* root_node =
+      new PartitionTreeNode(NULL, &size_vector[1], num_partitions - 1,
+                            size_vector[0]);
+  root_node->set_packet_start(true);
+  return root_node;
+}
+
+PartitionTreeNode::~PartitionTreeNode() {
+  delete children_[kLeftChild];
+  delete children_[kRightChild];
+}
+
+int PartitionTreeNode::Cost(int penalty) {
+  assert(penalty >= 0);
+  int cost = 0;
+  if (num_partitions_ == 0) {
+    // This is a solution node.
+    cost = std::max(max_parent_size_, this_size_) -
+        std::min(min_parent_size_, this_size_);
+  } else {
+    cost = std::max(max_parent_size_, this_size_) - min_parent_size_;
+  }
+  return cost + NumPackets() * penalty;
+}
+
+bool PartitionTreeNode::CreateChildren(int max_size) {
+  assert(max_size > 0);
+  bool children_created = false;
+  if (num_partitions_ > 0) {
+    if (this_size_ + size_vector_[0] <= max_size) {
+      assert(!children_[kLeftChild]);
+      children_[kLeftChild] =
+          new PartitionTreeNode(this,
+                                &size_vector_[1],
+                                num_partitions_ - 1,
+                                this_size_ + size_vector_[0]);
+      children_[kLeftChild]->set_max_parent_size(max_parent_size_);
+      children_[kLeftChild]->set_min_parent_size(min_parent_size_);
+      // "Left" child is continuation of same packet.
+      children_[kLeftChild]->set_packet_start(false);
+      children_created = true;
+    }
+    if (this_size_ > 0) {
+      assert(!children_[kRightChild]);
+      children_[kRightChild] = new PartitionTreeNode(this,
+                                                     &size_vector_[1],
+                                                     num_partitions_ - 1,
+                                                     size_vector_[0]);
+      children_[kRightChild]->set_max_parent_size(
+          std::max(max_parent_size_, this_size_));
+      children_[kRightChild]->set_min_parent_size(
+          std::min(min_parent_size_, this_size_));
+      // "Right" child starts a new packet.
+      children_[kRightChild]->set_packet_start(true);
+      children_created = true;
+    }
+  }
+  return children_created;
+}
+
+int PartitionTreeNode::NumPackets() {
+  if (parent_ == NULL) {
+    // Root node is a "right" child by definition.
+    return 1;
+  }
+  if (parent_->children_[kLeftChild] == this) {
+    // This is a "left" child.
+    return parent_->NumPackets();
+  } else {
+    // This is a "right" child.
+    return 1 + parent_->NumPackets();
+  }
+}
+
+PartitionTreeNode* PartitionTreeNode::GetOptimalNode(int max_size,
+                                                     int penalty) {
+  CreateChildren(max_size);
+  PartitionTreeNode* left = children_[kLeftChild];
+  PartitionTreeNode* right = children_[kRightChild];
+  if ((left == NULL) && (right == NULL)) {
+    // This is a solution node; return it.
+    return this;
+  } else if (left == NULL) {
+    // One child empty, return the other.
+    return right->GetOptimalNode(max_size, penalty);
+  } else if (right == NULL) {
+    // One child empty, return the other.
+    return left->GetOptimalNode(max_size, penalty);
+  } else {
+    PartitionTreeNode* first;
+    PartitionTreeNode* second;
+    if (left->Cost(penalty) <= right->Cost(penalty)) {
+      first = left;
+      second = right;
+    } else {
+      first = right;
+      second = left;
+    }
+    first = first->GetOptimalNode(max_size, penalty);
+    if (second->Cost(penalty) <= first->Cost(penalty)) {
+      second = second->GetOptimalNode(max_size, penalty);
+      // Compare cost estimate for "second" with actual cost for "first".
+      if (second->Cost(penalty) < first->Cost(penalty)) {
+        return second;
+      }
+    }
+    return first;
+  }
+}
+
+Vp8PartitionAggregator::Vp8PartitionAggregator(
+    const RTPFragmentationHeader& fragmentation,
+    int first_partition_idx, int last_partition_idx)
+    : root_(NULL),
+      num_partitions_(last_partition_idx - first_partition_idx + 1),
+      size_vector_(new int[num_partitions_]),
+      largest_partition_size_(0) {
+  assert(first_partition_idx >= 0);
+  assert(last_partition_idx >= first_partition_idx);
+  assert(last_partition_idx < fragmentation.fragmentationVectorSize);
+  for (size_t i = 0; i < num_partitions_; ++i) {
+    size_vector_[i] =
+        fragmentation.fragmentationLength[i + first_partition_idx];
+    largest_partition_size_ = std::max(largest_partition_size_,
+                                       size_vector_[i]);
+  }
+  root_ = PartitionTreeNode::CreateRootNode(size_vector_, num_partitions_);
+}
+
+Vp8PartitionAggregator::~Vp8PartitionAggregator() {
+  delete [] size_vector_;
+  delete root_;
+}
+
+void Vp8PartitionAggregator::SetPriorMinMax(int min_size, int max_size) {
+  assert(root_);
+  assert(min_size >= 0);
+  assert(max_size >= min_size);
+  root_->set_min_parent_size(min_size);
+  root_->set_max_parent_size(max_size);
+}
+
+Vp8PartitionAggregator::ConfigVec
+Vp8PartitionAggregator::FindOptimalConfiguration(int max_size, int penalty) {
+  assert(root_);
+  assert(max_size >= largest_partition_size_);
+  PartitionTreeNode* opt = root_->GetOptimalNode(max_size, penalty);
+  ConfigVec config_vector(num_partitions_, 0);
+  PartitionTreeNode* temp_node = opt;
+  int packet_index = opt->NumPackets() - 1;
+  for (int i = num_partitions_ - 1; i >= 0; --i) {
+    assert(packet_index >= 0);
+    assert(temp_node != NULL);
+    config_vector[i] = packet_index;
+    if (temp_node->packet_start()) --packet_index;
+    temp_node = temp_node->parent();
+  }
+  return config_vector;
+}
+
+void Vp8PartitionAggregator::CalcMinMax(const ConfigVec& config,
+                                        int* min_size, int* max_size) const {
+  if (*min_size < 0) {
+    *min_size = std::numeric_limits<int>::max();
+  }
+  if (*max_size < 0) {
+    *max_size = 0;
+  }
+  unsigned int i = 0;
+  while (i < config.size()) {
+    int this_size = 0;
+    unsigned int j = i;
+    while (j < config.size() && config[i] == config[j]) {
+      this_size += size_vector_[j];
+      ++j;
+    }
+    i = j;
+    if (this_size < *min_size) {
+      *min_size = this_size;
+    }
+    if (this_size > *max_size) {
+      *max_size = this_size;
+    }
+  }
+}
+
+int Vp8PartitionAggregator::CalcNumberOfFragments(int large_partition_size,
+                                                  int max_payload_size,
+                                                  int penalty,
+                                                  int min_size,
+                                                  int max_size) {
+  assert(max_size <= max_payload_size);
+  assert(min_size <= max_size);
+  assert(max_payload_size > 0);
+  // Divisions with rounding up.
+  const int min_number_of_fragments =
+      (large_partition_size + max_payload_size - 1) / max_payload_size;
+  if (min_size < 0 || max_size < 0) {
+    // No aggregates produced, so we do not have any size boundaries.
+    // Simply split in as few partitions as possible.
+    return min_number_of_fragments;
+  }
+  const int max_number_of_fragments =
+      (large_partition_size + min_size - 1) / min_size;
+  int num_fragments = -1;
+  int best_cost = std::numeric_limits<int>::max();
+  for (int n = min_number_of_fragments; n <= max_number_of_fragments; ++n) {
+    // Round up so that we use the largest fragment.
+    int fragment_size = (large_partition_size + n - 1) / n;
+    int cost = 0;
+    if (fragment_size < min_size) {
+      cost = min_size - fragment_size + n * penalty;
+    } else if (fragment_size > max_size) {
+      cost = fragment_size - max_size + n * penalty;
+    } else {
+      cost = n * penalty;
+    }
+    if (fragment_size <= max_payload_size && cost < best_cost) {
+      num_fragments = n;
+      best_cost = cost;
+    }
+  }
+  assert(num_fragments > 0);
+  // TODO(mflodman) Assert disabled since it's falsely triggered, see issue 293.
+  //assert(large_partition_size / num_fragments + 1 <= max_payload_size);
+  return num_fragments;
+}
+
+}  // namespace
+
diff --git a/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h
new file mode 100644
index 0000000..c5d47de
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h
@@ -0,0 +1,135 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
+
+#include <vector>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+// Class used to solve the VP8 aggregation problem.
+class PartitionTreeNode {
+ public:
+  // Create a tree node.
+  PartitionTreeNode(PartitionTreeNode* parent,
+                    const int* size_vector,
+                    int num_partitions,
+                    int this_size);
+
+  // Create a root node.
+  static PartitionTreeNode* CreateRootNode(const int* size_vector,
+                                           int num_partitions);
+
+  ~PartitionTreeNode();
+
+  // Calculate the cost for the node. If the node is a solution node, the cost
+  // will be the actual cost associated with that solution. If not, the cost
+  // will be the cost accumulated so far along the current branch (which is a
+  // lower bound for any solution along the branch).
+  int Cost(int penalty);
+
+  // Create the two children for this node.
+  bool CreateChildren(int max_size);
+
+  // Get the number of packets for the configuration that this node represents.
+  int NumPackets();
+
+  // Find the optimal solution given a maximum packet size and a per-packet
+  // penalty. The method will be recursively called while the solver is
+  // probing down the tree of nodes.
+  PartitionTreeNode* GetOptimalNode(int max_size, int penalty);
+
+  // Setters and getters.
+  void set_max_parent_size(int size) { max_parent_size_ = size; }
+  void set_min_parent_size(int size) { min_parent_size_ = size; }
+  PartitionTreeNode* parent() const { return parent_; }
+  PartitionTreeNode* left_child() const { return children_[kLeftChild]; }
+  PartitionTreeNode* right_child() const { return children_[kRightChild]; }
+  int this_size() const { return this_size_; }
+  bool packet_start() const { return packet_start_; }
+
+ private:
+  enum Children {
+    kLeftChild = 0,
+    kRightChild = 1
+  };
+
+  void set_packet_start(bool value) { packet_start_ = value; }
+
+  PartitionTreeNode* parent_;
+  PartitionTreeNode* children_[2];
+  int this_size_;
+  const int* size_vector_;
+  int num_partitions_;
+  int max_parent_size_;
+  int min_parent_size_;
+  bool packet_start_;
+
+  DISALLOW_COPY_AND_ASSIGN(PartitionTreeNode);
+};
+
+// Class that calculates the optimal aggregation of VP8 partitions smaller than
+// the maximum packet size.
+class Vp8PartitionAggregator {
+ public:
+  typedef std::vector<int> ConfigVec;
+
+  // Constructor. All partitions in the fragmentation header from index
+  // first_partition_idx to last_partition_idx must be smaller than
+  // maximum packet size to be used in FindOptimalConfiguration.
+  Vp8PartitionAggregator(const RTPFragmentationHeader& fragmentation,
+                         int first_partition_idx, int last_partition_idx);
+
+  ~Vp8PartitionAggregator();
+
+  // Set the smallest and largest payload sizes produces so far.
+  void SetPriorMinMax(int min_size, int max_size);
+
+  // Find the aggregation of VP8 partitions that produces the smallest cost.
+  // The result is given as a vector of the same length as the number of
+  // partitions given to the constructor (i.e., last_partition_idx -
+  // first_partition_idx + 1), where each element indicates the packet index
+  // for that partition. Thus, the output vector starts at 0 and is increasing
+  // up to the number of packets - 1.
+  ConfigVec FindOptimalConfiguration(int max_size, int penalty);
+
+  // Calculate minimum and maximum packet sizes for a given aggregation config.
+  // The extreme packet sizes of the given aggregation are compared with the
+  // values given in min_size and max_size, and if either of these are exceeded,
+  // the new extreme value will be written to the corresponding variable.
+  void CalcMinMax(const ConfigVec& config, int* min_size, int* max_size) const;
+
+  // Calculate the number of fragments to divide a large partition into.
+  // The large partition is of size large_partition_size. The payload must not
+  // be larger than max_payload_size. Each fragment comes at an overhead cost
+  // of penalty bytes. If the size of the fragments fall outside the range
+  // [min_size, max_size], an extra cost is inflicted.
+  static int CalcNumberOfFragments(int large_partition_size,
+                                   int max_payload_size,
+                                   int penalty,
+                                   int min_size,
+                                   int max_size);
+
+ private:
+  PartitionTreeNode* root_;
+  size_t num_partitions_;
+  int* size_vector_;
+  int largest_partition_size_;
+
+  DISALLOW_COPY_AND_ASSIGN(Vp8PartitionAggregator);
+};
+}  // namespace
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
diff --git a/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
new file mode 100644
index 0000000..3c274d1
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>  // NULL
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+namespace webrtc {
+
+TEST(PartitionTreeNode, CreateAndDelete) {
+  const int kVector[] = {1, 2, 3};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  PartitionTreeNode* node1 =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  PartitionTreeNode* node2 =
+      new PartitionTreeNode(node1, kVector, kNumPartitions, 17);
+  delete node1;
+  delete node2;
+}
+
+TEST(PartitionTreeNode, CreateChildrenAndDelete) {
+  const int kVector[] = {1, 2, 3};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 10;
+  const int kPenalty = 5;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  EXPECT_TRUE(root->CreateChildren(kMaxSize));
+  ASSERT_TRUE(NULL != root->left_child());
+  ASSERT_TRUE(NULL != root->right_child());
+  EXPECT_EQ(3, root->left_child()->this_size());
+  EXPECT_EQ(2, root->right_child()->this_size());
+  EXPECT_EQ(11, root->right_child()->Cost(kPenalty));
+  EXPECT_FALSE(root->left_child()->packet_start());
+  EXPECT_TRUE(root->right_child()->packet_start());
+  delete root;
+}
+
+TEST(PartitionTreeNode, FindOptimalConfig) {
+  const int kVector[] = {197, 194, 213, 215, 184, 199, 197, 207};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 1500;
+  const int kPenalty = 1;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  root->set_max_parent_size(500);
+  root->set_min_parent_size(300);
+  PartitionTreeNode* opt = root->GetOptimalNode(kMaxSize, kPenalty);
+  ASSERT_TRUE(opt != NULL);
+  EXPECT_EQ(4, opt->NumPackets());
+  // Expect optimal sequence to be {1, 0, 1, 0, 1, 0, 1, 0}, which corresponds
+  // to (right)-left-right-left-right-left-right-left, where the root node is
+  // implicitly a "right" node by definition.
+  EXPECT_TRUE(opt->parent()->parent()->parent()->parent()->parent()->
+              parent()->parent()->packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->parent()->parent()->parent()->
+               parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->parent()->parent()->parent()->parent()->
+              packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->parent()->parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->parent()->parent()->packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->packet_start());
+  EXPECT_FALSE(opt->packet_start());
+  EXPECT_TRUE(opt == root->left_child()->right_child()->left_child()->
+              right_child()->left_child()->right_child()->left_child());
+  delete root;
+}
+
+TEST(PartitionTreeNode, FindOptimalConfigSinglePartition) {
+  const int kVector[] = {17};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 1500;
+  const int kPenalty = 1;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  PartitionTreeNode* opt = root->GetOptimalNode(kMaxSize, kPenalty);
+  ASSERT_TRUE(opt != NULL);
+  EXPECT_EQ(1, opt->NumPackets());
+  EXPECT_TRUE(opt == root);
+  delete root;
+}
+
+static void VerifyConfiguration(const int* expected_config,
+                                size_t expected_config_len,
+                                const std::vector<int>& opt_config,
+                                const RTPFragmentationHeader& fragmentation) {
+  ASSERT_EQ(expected_config_len, fragmentation.fragmentationVectorSize);
+  EXPECT_EQ(expected_config_len, opt_config.size());
+  for (size_t i = 0; i < expected_config_len; ++i) {
+    EXPECT_EQ(expected_config[i], opt_config[i]);
+  }
+}
+
+static void VerifyMinMax(const Vp8PartitionAggregator& aggregator,
+                         const std::vector<int>& opt_config,
+                         int expected_min,
+                         int expected_max) {
+  int min_size = -1;
+  int max_size = -1;
+  aggregator.CalcMinMax(opt_config, &min_size, &max_size);
+  EXPECT_EQ(expected_min, min_size);
+  EXPECT_EQ(expected_max, max_size);
+}
+
+TEST(Vp8PartitionAggregator, CreateAndDelete) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(3);
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 2);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfig) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(8);
+  fragmentation.fragmentationLength[0] = 197;
+  fragmentation.fragmentationLength[1] = 194;
+  fragmentation.fragmentationLength[2] = 213;
+  fragmentation.fragmentationLength[3] = 215;
+  fragmentation.fragmentationLength[4] = 184;
+  fragmentation.fragmentationLength[5] = 199;
+  fragmentation.fragmentationLength[6] = 197;
+  fragmentation.fragmentationLength[7] = 207;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 7);
+  aggregator->SetPriorMinMax(300, 500);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0, 0, 1, 1, 2, 2, 3, 3};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 383, 428);
+  // Change min and max and run method again. This time, we expect it to leave
+  // the values unchanged.
+  int min_size = 382;
+  int max_size = 429;
+  aggregator->CalcMinMax(opt_config, &min_size, &max_size);
+  EXPECT_EQ(382, min_size);
+  EXPECT_EQ(429, max_size);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfigEqualFragments) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(8);
+  fragmentation.fragmentationLength[0] = 200;
+  fragmentation.fragmentationLength[1] = 200;
+  fragmentation.fragmentationLength[2] = 200;
+  fragmentation.fragmentationLength[3] = 200;
+  fragmentation.fragmentationLength[4] = 200;
+  fragmentation.fragmentationLength[5] = 200;
+  fragmentation.fragmentationLength[6] = 200;
+  fragmentation.fragmentationLength[7] = 200;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 7);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0, 0, 0, 0, 1, 1, 1, 1};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 800, 800);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfigSinglePartition) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(1);
+  fragmentation.fragmentationLength[0] = 17;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 0);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 17, 17);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, TestCalcNumberOfFragments) {
+  const int kMTU = 1500;
+  EXPECT_EQ(2,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 300, 900));
+  EXPECT_EQ(3,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 300, 798));
+  EXPECT_EQ(2,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 900, 1000));
+}
+
+}  // namespace
+
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc
new file mode 100644
index 0000000..1a55e4e
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWEConvergenceTest.h"
+
+#include <fstream>
+#include <string>
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+
+
+BWEConvergenceTestUp::BWEConvergenceTestUp(std::string testName, int startRateKbps, int availBWkbps)
+:
+_availBWkbps(availBWkbps),
+BWEOneWayTest(testName, startRateKbps)
+{
+}
+
+
+BWEConvergenceTestUp::~BWEConvergenceTestUp()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWEConvergenceTestUp::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+
+    if (_master)
+    {
+        _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+        if (!_gen)
+        {
+            return (-1);
+        }
+    }
+
+    return BWEOneWayTest::Init(ip, port);
+}
+
+
+bool BWEConvergenceTestUp::StoppingCriterionMaster()
+{
+    return ((_sendrec->BitrateSent() / 1000.0) > (0.9 * _availBWkbps));
+}
+
+
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h
new file mode 100644
index 0000000..b830d14
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
+
+#include <string>
+
+#include "BWETestBase.h"
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+class BWEConvergenceTestUp : public BWEOneWayTest
+{
+public:
+    BWEConvergenceTestUp(std::string testName, int startRateKbps, int availBWkbps);
+    virtual ~BWEConvergenceTestUp();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+
+private:
+    int _availBWkbps;
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc
new file mode 100644
index 0000000..1fd19fe
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <fstream>
+#include <math.h>
+
+#include "BWEStabilityTest.h"
+#include "TestLoadGenerator.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+
+
+BWEStabilityTest::BWEStabilityTest(std::string testName, int rateKbps, int testDurationSeconds)
+:
+_testDurationSeconds(testDurationSeconds),
+BWEOneWayTest(testName, rateKbps)
+{
+}
+
+
+BWEStabilityTest::~BWEStabilityTest()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWEStabilityTest::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+    const double keyToDeltaRatio = 7;
+    const int keyFramePeriod = 300;
+
+    if (_master)
+    {
+        _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+        //_gen = new PeriodicKeyFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate,
+        //                                     spreadFactor, keyToDeltaRatio, keyFramePeriod);
+        if (!_gen)
+        {
+            return (-1);
+        }
+
+    }
+
+    return BWEOneWayTest::Init(ip, port);
+}
+
+
+void BWEStabilityTest::Report(std::fstream &log)
+{
+    // cannot report on a running test
+    if(_running) return;
+
+    BWETest::Report(log);
+
+    CriticalSectionScoped cs(_statCritSect);
+
+    log << "Bitrate statistics\n";
+    log << "\tAverage = " <<  _rateVecKbps.Mean() << " kbps\n";
+    log << "\tMin     = " <<  _rateVecKbps.Min() << " kbps\n";
+    log << "\tMax     = " <<  _rateVecKbps.Max() << " kbps\n";
+    log << "\tStd     = " <<  _rateVecKbps.Std() << " kbps\n";
+
+}
+
+
+bool BWEStabilityTest::StoppingCriterionMaster()
+{
+    return (TickTime::MillisecondTimestamp() - _startTimeMs >= _testDurationSeconds * 1000);
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h
new file mode 100644
index 0000000..8f213b1
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
+
+#include <string>
+
+#include "BWETestBase.h"
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+class BWEStabilityTest : public BWEOneWayTest
+{
+public:
+    BWEStabilityTest(std::string testName, int rateKbps, int testDurationSeconds);
+    virtual ~BWEStabilityTest();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual void Report(std::fstream &log);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+
+private:
+    int _testDurationSeconds;
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
new file mode 100644
index 0000000..471ea5f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
@@ -0,0 +1,200 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// BWEStandAlone.cpp : Defines the entry point for the console application.
+//
+
+#include <string>
+#include <stdio.h>
+
+#include "event_wrapper.h"
+#include "udp_transport.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+
+#include "MatlabPlot.h"
+
+//#include "vld.h"
+
+class myTransportCB: public UdpTransportData
+{
+public:
+    myTransportCB (RtpRtcp *rtpMod) : _rtpMod(rtpMod) {};
+protected:
+    // Inherited from UdpTransportData
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+        const WebRtc_Word32 rtpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+        const WebRtc_Word32 rtcpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+private:
+    RtpRtcp *_rtpMod;
+};
+
+void myTransportCB::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                      const WebRtc_Word32 rtpPacketLength,
+                                      const WebRtc_Word8* fromIP,
+                                      const WebRtc_UWord16 fromPort)
+{
+    printf("Receiving RTP from IP %s, port %u\n", fromIP, fromPort);
+    _rtpMod->IncomingPacket((WebRtc_UWord8 *) incomingRtpPacket, static_cast<WebRtc_UWord16>(rtpPacketLength));
+}
+
+void myTransportCB::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                       const WebRtc_Word32 rtcpPacketLength,
+                                       const WebRtc_Word8* fromIP,
+                                       const WebRtc_UWord16 fromPort)
+{
+    printf("Receiving RTCP from IP %s, port %u\n", fromIP, fromPort);
+    _rtpMod->IncomingPacket((WebRtc_UWord8 *) incomingRtcpPacket, static_cast<WebRtc_UWord16>(rtcpPacketLength));
+}
+
+
+int main(int argc, char* argv[])
+{
+    bool isSender = false;
+    bool isReceiver = false;
+    WebRtc_UWord16 port;
+    std::string ip;
+    TestSenderReceiver *sendrec = new TestSenderReceiver();
+    TestLoadGenerator *gen;
+
+    if (argc == 2)
+    {
+        // receiver only
+        isReceiver = true;
+
+        // read port
+        port = atoi(argv[1]);
+    }
+    else if (argc == 3)
+    {
+        // sender and receiver
+        isSender = true;
+        isReceiver = true;
+
+        // read IP
+        ip = argv[1];
+
+        // read port
+        port = atoi(argv[2]);
+    }
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile("BWEStandAloneTrace.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    sendrec->InitReceiver(port);
+
+    sendrec->Start();
+
+    if (isSender)
+    {
+        const WebRtc_UWord32 startRateKbps = 1000;
+        //gen = new CBRGenerator(sendrec, 1000, 500);
+        gen = new CBRFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2);
+        //gen = new PeriodicKeyFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2, 7, 300);
+        //const WebRtc_UWord16 numFrameRates = 5;
+        //const WebRtc_UWord8 frameRates[numFrameRates] = {30, 15, 20, 23, 25};
+        //gen = new CBRVarFRGenerator(sendrec, 1000, frameRates, numFrameRates, 90000, 4.0, 0.1, 0.2);
+        //gen = new CBRFrameDropGenerator(sendrec, startRateKbps, 90000, 0.2);
+        sendrec->SetLoadGenerator(gen);
+        sendrec->InitSender(startRateKbps, ip.c_str(), port);
+        gen->Start();
+    }
+
+    while (1)
+    {
+    }
+
+    if (isSender)
+    {
+        gen->Stop();
+        delete gen;
+    }
+
+    delete sendrec;
+
+    //WebRtc_UWord8 numberOfSocketThreads = 1;
+    //UdpTransport* transport = UdpTransport::Create(0, numberOfSocketThreads);
+
+    //RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(1, false);
+    //if (rtp->InitSender() != 0)
+    //{
+    //    exit(1);
+    //}
+    //if (rtp->RegisterSendTransport(transport) != 0)
+    //{
+    //    exit(1);
+    //}
+
+//    transport->InitializeSendSockets("192.168.200.39", 8000);
+    //transport->InitializeSendSockets("127.0.0.1", 10000);
+    //transport->InitializeSourcePorts(8000);
+
+
+    return(0);
+ //   myTransportCB *tp = new myTransportCB(rtp);
+ //   transport->InitializeReceiveSockets(tp, 10000, "0.0.0.0");
+ //   transport->StartReceiving(500);
+
+ //   WebRtc_Word8 data[100];
+ //   for (int i = 0; i < 100; data[i] = i++);
+
+ //   for (int i = 0; i < 100; i++)
+ //   {
+ //       transport->SendRaw(data, 100, false);
+ //   }
+
+
+
+ //   WebRtc_Word32 totTime = 0;
+ //   while (totTime < 10000)
+ //   {
+ //       transport->Process();
+ //       WebRtc_Word32 wTime = transport->TimeUntilNextProcess();
+ //       totTime += wTime;
+ //       Sleep(wTime);
+ //   }
+
+
+    //if (transport)
+    //{
+    //    // Destroy the Socket Transport module
+    //    transport->StopReceiving();
+    //    transport->InitializeReceiveSockets(NULL,0);// deregister callback
+ //       UdpTransport::Destroy(transport);
+    //    transport = NULL;
+ //   }
+
+ //   if (tp)
+ //   {
+ //       delete tp;
+ //       tp = NULL;
+ //   }
+
+ //   if (rtp)
+ //   {
+ //       RtpRtcp::DestroyRtpRtcp(rtp);
+ //       rtp = NULL;
+ //   }
+
+
+    //return 0;
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc
new file mode 100644
index 0000000..2940abd
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc
@@ -0,0 +1,453 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWETestBase.h"
+
+#include <algorithm> // sort
+#include <fstream>
+#include <string>
+#include <vector>
+#include <math.h>
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+
+
+double StatVec::Mean()
+{
+    double sum = 0;
+
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it;
+    for (it = begin(); it < end(); ++it)
+    {
+        sum += (*it);
+    }
+
+    return (sum / size());
+}
+
+double StatVec::Variance()
+{
+    double sumSqaure = 0;
+    double sum = 0;
+
+    std::vector<double>::iterator it;
+    for (it = begin(); it < end(); ++it)
+    {
+        sum += (*it);
+        sumSqaure += (*it) * (*it);
+    }
+
+    // Normalizes by N-1. This produces the best unbiased estimate of the
+    // variance if X is a sample from a normal distribution.
+    int M = static_cast<int> (size() - 1);
+
+    if (M > 0)
+    {
+        double var = (sumSqaure / M) - (sum / (M+1)) * (sum / M);
+        assert(var >= 0);
+        return (var);
+    }
+    else
+    {
+        return (0);
+    }
+}
+
+double StatVec::Std()
+{
+    return (sqrt(Variance()));
+}
+
+double StatVec::Max()
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it = begin();
+    double maxVal = (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        if ((*it) > maxVal) maxVal = (*it);
+    }
+
+    return (maxVal);
+}
+
+double StatVec::Min()
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it = begin();
+    double minVal = (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        if ((*it) < minVal) minVal = (*it);
+    }
+
+    return (minVal);
+}
+
+double StatVec::Median()
+{
+    double median;
+
+    // sanity
+    if (size() <= 0) return (0);
+
+    // sort the vector
+    sort(begin(), end());
+
+    if ((size() % 2) == 0)
+    {
+        // even size; use average of two center elements
+        median = (at(size()/2 - 1) + at(size()/2)) / 2.0;
+    }
+    else
+    {
+        // odd size; take center element
+        median = at(size()/2);
+    }
+
+    return (median);
+}
+
+double StatVec::Percentile(double p)
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    // sort the vector
+    sort(begin(), end());
+
+    int rank = static_cast<int> (((size() - 1) * p) / 100 + 0.5); // between 1 and size()
+    rank -= 1; // between 0 and size()-1
+
+    assert(rank >= 0);
+    assert(rank < static_cast<int>(size()));
+
+    return (at(rank));
+}
+
+void StatVec::Export(std::fstream &file, bool colVec /*= false*/)
+{
+    // sanity
+    if (size() <= 0) return;
+
+    std::string separator;
+    if (colVec) separator = "\n";
+    else separator = ", ";
+
+    std::vector<double>::iterator it = begin();
+    file << (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        file << separator << (*it);
+    }
+
+    file << std::endl;
+}
+
+
+bool BWETestProcThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    BWETest *theObj = static_cast<BWETest *>(obj);
+
+    theObj->ProcLoop();
+
+    theObj->Stop();
+
+    return(true);
+}
+
+
+BWETest::BWETest(std::string testName, int startRateKbps):
+_testName(testName),
+_startRateKbps(startRateKbps),
+_master(false),
+_sendrec(NULL),
+_gen(NULL),
+_initialized(false),
+_started(false),
+_running(false),
+_eventPtr(NULL),
+_procThread(NULL),
+_startTimeMs(-1),
+_stopTimeMs(-1),
+_statCritSect(CriticalSectionWrapper::CreateCriticalSection())
+{
+    _sendrec = new TestSenderReceiver();
+}
+
+
+BWETest::~BWETest()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    _statCritSect->Enter();
+    delete &_statCritSect;
+
+    if (_sendrec)
+    {
+        delete _sendrec;
+        _sendrec = NULL;
+    }
+}
+
+
+bool BWETest::SetMaster(bool isMaster /*= true*/)
+{
+    if (!_initialized)
+    {
+        // Can only set status before initializing.
+        _master = isMaster;
+    }
+
+    return (_master);
+}
+
+
+int BWETest::Init(std::string ip, WebRtc_UWord16 port)
+{
+    if (_initialized)
+    {
+        // cannot init twice
+        return (-1);
+    }
+
+    if (!_sendrec)
+    {
+        throw "SenderReceiver must be created";
+        exit(1);
+    }
+
+    if (_started)
+    {
+        // cannot init after start
+        return (-1);
+    }
+
+    // initialize receiver port (for feedback)
+    _sendrec->InitReceiver(port);
+
+    // initialize sender
+    _sendrec->SetLoadGenerator(_gen);
+    _sendrec->InitSender(_startRateKbps, ip.c_str(), port);
+    //_gen->Start();
+
+    _sendrec->SetCallback(this);
+
+    _initialized = true;
+
+    return 0;
+}
+
+
+bool BWETest::Start()
+{
+    if (!_initialized)
+    {
+        // must init first
+        return (false);
+    }
+    if (_started)
+    {
+        // already started, do nothing
+        return (true);
+    }
+
+    if (_sendrec->Start() != 0)
+    {
+        // failed
+        return (false);
+    }
+
+    if (_gen)
+    {
+        if (_gen->Start() != 0)
+        {
+            // failed
+            return (false);
+        }
+    }
+
+    _eventPtr = EventWrapper::Create();
+
+    _startTimeMs = TickTime::MillisecondTimestamp();
+    _started = true;
+    _running = true;
+
+    return (true);
+}
+
+
+bool BWETest::Stop()
+{
+    if (_procThread)
+    {
+        _stopTimeMs = TickTime::MillisecondTimestamp();
+        _procThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_procThread->Stop())
+        {
+            ;
+        }
+
+        delete _procThread;
+        _procThread = NULL;
+
+    }
+
+    if (_eventPtr)
+    {
+        delete _eventPtr;
+        _eventPtr = NULL;
+    }
+
+    _procThread = NULL;
+
+    if(_gen)
+    {
+        _gen->Stop();
+    }
+
+    return(true);
+}
+
+
+bool BWETest::ProcLoop(void)
+{
+    bool receiving = false;
+
+    // no critSect
+    while (_running)
+    {
+
+        // check stopping criterions
+        if (_master && StoppingCriterionMaster())
+        {
+            printf("StoppingCriterionMaster()\n");
+            _stopTimeMs = TickTime::MillisecondTimestamp();
+            _running = false;
+        }
+        else if (!_master && StoppingCriterionSlave())
+        {
+            printf("StoppingCriterionSlave()\n");
+            _running = false;
+        }
+
+        // wait
+        _eventPtr->Wait(1000); // 1000 ms
+
+    }
+
+    return true;
+}
+
+
+void BWETest::Report(std::fstream &log)
+{
+    // cannot report on a running test
+    if(_running) return;
+
+    CriticalSectionScoped cs(_statCritSect);
+
+    log << "\n\n*** Test name = " << _testName << "\n";
+    log << "Execution time = " <<  static_cast<double>(_stopTimeMs - _startTimeMs) / 1000 << " s\n";
+    log << "\n";
+    log << "RTT statistics\n";
+    log << "\tMin     = " << _rttVecMs.Min() << " ms\n";
+    log << "\tMax     = " << _rttVecMs.Max() << " ms\n";
+    log << "\n";
+    log << "Loss statistics\n";
+    log << "\tAverage = " << _lossVec.Mean() << "%\n";
+    log << "\tMax     = " << _lossVec.Max() << "%\n";
+
+    log << "\n" << "Rates" << "\n";
+    _rateVecKbps.Export(log);
+
+    log << "\n" << "RTT" << "\n";
+    _rttVecMs.Export(log);
+
+}
+
+
+// SenderReceiver callback
+void BWETest::OnOnNetworkChanged(const WebRtc_UWord32 bitrateTargetBps,
+                                 const WebRtc_UWord8 fractionLost,
+                                 const WebRtc_UWord16 roundTripTimeMs,
+                                 const WebRtc_UWord32 jitterMS,
+                                 const WebRtc_UWord16 bwEstimateKbitMin,
+                                 const WebRtc_UWord16 bwEstimateKbitMax)
+{
+    CriticalSectionScoped cs(_statCritSect);
+
+    // bitrate statistics
+    WebRtc_Word32 newBitrateKbps = bitrateTargetBps/1000;
+
+    _rateVecKbps.push_back(newBitrateKbps);
+    _rttVecMs.push_back(roundTripTimeMs);
+    _lossVec.push_back(static_cast<double>(fractionLost) / 255.0);
+}
+
+
+int BWEOneWayTest::Init(std::string ip, WebRtc_UWord16 port)
+{
+
+    if (!_master)
+    {
+        // Use timeout stopping criterion by default for receiver
+        UseRecvTimeout();
+    }
+
+    return (BWETest::Init(ip, port));
+
+}
+
+
+bool BWEOneWayTest::Start()
+{
+    bool ret = BWETest::Start();
+
+    if (!_master)
+    {
+        // send one dummy RTP packet to enable RTT measurements
+        const WebRtc_UWord8 dummy = 0;
+        //_gen->sendPayload(TickTime::MillisecondTimestamp(), &dummy, 0);
+        _sendrec->SendOutgoingData(
+            static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp()*90),
+            &dummy, 1, webrtc::kVideoFrameDelta);
+    }
+
+    return ret;
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h
new file mode 100644
index 0000000..bab1b94
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
+
+#include <string>
+#include <vector>
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+
+class StatVec : public std::vector<double>
+{
+public:
+    double Mean();
+    double Variance();
+    double Std();
+    double Max();
+    double Min();
+    double Median();
+    double Percentile(double p); // 0 <= p <= 100%
+    void Export(std::fstream &file, bool colVec = false);
+};
+
+
+class BWETest : public SendRecCB
+{
+public:
+    BWETest(std::string testName, int startRateKbps);
+    virtual ~BWETest();
+
+    bool SetMaster(bool isMaster = true);
+    void UseRecvTimeout() { _sendrec->SetPacketTimeout(1000); };
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual bool Start();
+    virtual bool Stop();
+    bool ProcLoop(void);
+    virtual void Report(std::fstream &log);
+    std::string TestName() { return (_testName); };
+
+    // SenderReceiver callback
+    virtual void OnOnNetworkChanged(const WebRtc_UWord32 bitrateTargetBps,
+        const WebRtc_UWord8 fractionLost,
+        const WebRtc_UWord16 roundTripTimeMs,
+        const WebRtc_UWord32 jitterMS,
+        const WebRtc_UWord16 bwEstimateKbitMin,
+        const WebRtc_UWord16 bwEstimateKbitMax);
+
+
+protected:
+    virtual bool StoppingCriterionMaster() = 0;
+    virtual bool StoppingCriterionSlave() { return (_sendrec->timeOutTriggered()); };
+
+    TestSenderReceiver * _sendrec;
+    TestLoadGenerator * _gen;
+
+    std::string _testName;
+    int _startRateKbps;
+    bool _master;
+    bool _initialized;
+    bool _started;
+    bool _running;
+    EventWrapper *_eventPtr;
+    ThreadWrapper* _procThread;
+    WebRtc_Word64 _startTimeMs;
+    WebRtc_Word64 _stopTimeMs;
+
+    // Statistics, protected by separate CritSect
+    CriticalSectionWrapper* _statCritSect;
+    StatVec _rateVecKbps;
+    StatVec _rttVecMs;
+    StatVec _lossVec;
+};
+
+
+class BWEOneWayTest : public BWETest
+{
+public:
+    BWEOneWayTest(std::string testName, int startRateKbps) :
+      BWETest(testName, startRateKbps) {};
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual bool Start();
+
+protected:
+    virtual bool StoppingCriterionSlave() {return ( _sendrec->timeOutTriggered()); };
+
+private:
+
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc
new file mode 100644
index 0000000..f1d79fe
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc
@@ -0,0 +1,274 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// BWETester.cpp : Defines the entry point for the console application.
+//
+
+#include <fstream>
+#include <string>
+#include <iostream>
+#include <ctime>
+
+#include "event_wrapper.h"
+#include "trace.h"
+
+#include "BWEStabilityTest.h"
+#include "BWEConvergenceTest.h"
+#include "BWETwoWayLimitFinding.h"
+
+#include "MatlabPlot.h"
+
+//#include "vld.h"
+
+#ifdef MATLAB
+MatlabEngine eng;
+#endif
+
+
+class testContainer
+{
+public:
+    testContainer(BWETest *test, bool waitForKeyStroke, int delayStartSec,
+        std::string instruction) :
+    _test(test),
+        _waitMaster(waitForKeyStroke),
+        _waitSlave(waitForKeyStroke),
+        _delayMaster(delayStartSec),
+        _delaySlave(delayStartSec),
+        _instr(instruction) {};
+
+    testContainer(BWETest *test,
+        bool waitForKeyStrokeMaster,
+        bool waitForKeyStrokeSlave,
+        int delayStartSecMaster,
+        int delayStartSecSlave,
+        std::string instruction) :
+    _test(test),
+        _waitMaster(waitForKeyStrokeMaster),
+        _waitSlave(waitForKeyStrokeSlave),
+        _delayMaster(delayStartSecMaster),
+        _delaySlave(delayStartSecSlave),
+        _instr(instruction) {};
+
+    ~testContainer() { if(_test) delete _test; _test = NULL; };
+
+    BWETest *_test;
+    bool _waitMaster;
+    bool _waitSlave;
+    int _delayMaster;
+    int _delaySlave;
+    std::string _instr;
+};
+
+
+// This is were the test cases are created.
+// Syntax:
+// tests->push_back(new testContainer(
+//    new _BWETestConstructor_, // constructor for the test case
+//    _wait_,                   // wait for user key press before start
+//    _delay_,                  // delay test start (after a key press if enabled)
+//    "Intruction to user."));  // message to show in console before starting
+//
+// Or:
+// tests->push_back(new testContainer(
+//    new _BWETestConstructor_, // constructor for the test case
+//    _waitMaster_,             // master will wait for user key press before start
+//    _waitSlave_,              // slave will wait for user key press before start
+//    _delayMaster_,            // delay master test start (after a key press if enabled)
+//    _delaySlave_,             // delay slave test start (after a key press if enabled)
+//    "Intruction to user."));  // message to show in console before starting
+//
+// Valid test cases are:
+// BWEConvergenceTestUp
+// BWEStabilityTest
+// BWETwoWayLimitFinding
+
+
+void PopulateTests(std::vector<testContainer *>* tests, bool isMaster)
+{
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 400, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 512 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 4000, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 5120 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 400, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 512 kbps and a normal distributed delay\
+        with mean 100 ms and std dev 15 ms"));
+
+    tests->push_back(new testContainer(
+        new BWEConvergenceTestUp("Convergence 256->512", 256, 512),
+        true,
+        0,
+        "Set bandwith limit to 512 kbps"));
+
+        tests->push_back(new testContainer(
+        new BWEConvergenceTestUp("Convergence 1024->5120", 1024, 5120),
+        true,
+        0,
+        "Set bandwith limit to 5120 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWETwoWayLimitFinding("Asymmetric limit finding {1024, 2048} kbps",
+        500, 1024,
+        500, 2048,
+        isMaster),
+        true,
+        0,
+        "Set bandwith limit to {1024, 2048} kbps asymmetric"));
+
+    tests->push_back(new testContainer(
+        new BWETwoWayLimitFinding("Symmetric limit finding {1024, 1024} kbps",
+        500, 1024,
+        500, 1024,
+        isMaster),
+        true,
+        0,
+        "Set bandwith limit to 1024 kbps symmetric"));
+}
+
+
+int main(int argc, char* argv[])
+{
+
+    bool isMaster = false;
+    WebRtc_UWord16 port;
+    std::string ip;
+    std::fstream log;
+    log.open("TestLog.txt", std::fstream::out | std::fstream::app);
+
+    log << "\n\nBWE TESTER\n";
+
+    time_t t = time(0);   // get time now
+    struct tm * now = localtime( & t );
+    log << (now->tm_year + 1900) << '-'
+        << (now->tm_mon + 1) << '-'
+        <<  now->tm_mday << " "
+        <<  now->tm_hour << ":" << now->tm_min
+        << "\n";
+
+    if (argc == 4)
+    {
+        // read IP
+        ip = argv[1];
+
+        // read port
+        port = atoi(argv[2]);
+
+        // read master/slave
+        isMaster = (atoi(argv[3]) != 0);
+
+        std::cout << "Destination: " << ip << "\n";
+        log << "Destination: " << ip << "\n";
+        std::cout << "Port: " << port << "\n";
+        log << "Port: " << port << "\n";
+        if (isMaster)
+        {
+            std::cout << "Master\n";
+            log << "Master\n";
+        }
+        else
+        {
+            std::cout << "Slave\n";
+            log << "Slave\n";
+        }
+
+    }
+    else
+    {
+        printf("Usage\nBWETester dstIP port master\n");
+        exit(1);
+    }
+
+    std::vector<testContainer*> tests;
+    PopulateTests(&tests, isMaster);
+
+    int testIndex = 0;
+    EventWrapper* event = EventWrapper::Create();
+    std::vector<testContainer*>::iterator it;
+    for (it=tests.begin() ; it < tests.end(); it++)
+    {
+        ++testIndex;
+
+        BWETest *theTest = (*it)->_test;
+
+        if (theTest)
+        {
+            std::cout << "\nTest " << testIndex << ": " << theTest->TestName() << "\n";
+        }
+
+        // Print instructions
+        std::cout << "--> " << (*it)->_instr << std::endl;
+
+        if ((isMaster && (*it)->_waitMaster)
+            || (!isMaster && (*it)->_waitSlave))
+        {
+            // Wait for a key press
+            std::cout << "Press enter to start test\n";
+            getc(stdin);
+        }
+
+        if (isMaster)
+        {
+            if ((*it)->_delayMaster > 0)
+            {
+                // Wait
+                std::cout << "Test starting in "
+                    << (*it)->_delayMaster
+                    << " seconds" << std::endl;
+                event->Wait((*it)->_delayMaster * 1000);
+            }
+        }
+        else
+        {
+            if ((*it)->_delaySlave > 0)
+            {
+                // Wait
+                std::cout << "Test starting in "
+                    << (*it)->_delaySlave
+                    << " seconds" << std::endl;
+                event->Wait((*it)->_delaySlave * 1000);
+            }
+        }
+
+        // Start execution
+        if (theTest)
+        {
+            theTest->SetMaster(isMaster);
+            if (theTest->Init(ip, port) != 0)
+            {
+                throw "Error initializing sender";
+                exit (1);
+            }
+
+            theTest->Start();
+            theTest->ProcLoop();
+            theTest->Stop();
+            theTest->Report(log);
+            log << std::flush;
+        }
+
+        delete (*it); // deletes the test too
+    }
+    delete event;
+    event = NULL;
+
+    log.close();
+    return (0);
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc
new file mode 100644
index 0000000..043c7b0
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWETwoWayLimitFinding.h"
+#include "TestLoadGenerator.h"
+
+
+BWETwoWayLimitFinding::BWETwoWayLimitFinding(
+    std::string testName,
+    int masterStartRateKbps, int masterAvailBWkbps,
+    int slaveStartRateKbps, int slaveAvailBWkbps,
+    bool isMaster /*= false*/)
+    :
+BWETest(testName, (isMaster ? masterStartRateKbps : slaveStartRateKbps)),
+_availBWkbps(isMaster ? masterAvailBWkbps : slaveAvailBWkbps),
+_incomingAvailBWkbps(isMaster ? slaveAvailBWkbps : masterAvailBWkbps),
+_forwLimitReached(false),
+_revLimitReached(false)
+{
+    _master = isMaster;
+}
+
+
+BWETwoWayLimitFinding::~BWETwoWayLimitFinding()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWETwoWayLimitFinding::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+
+    _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+    if (!_gen)
+    {
+        return (-1);
+    }
+
+    if (!_master) UseRecvTimeout(); // slave shuts down when incoming stream dies
+
+    return BWETest::Init(ip, port);
+}
+
+
+bool BWETwoWayLimitFinding::StoppingCriterionMaster()
+{
+    if ((_sendrec->BitrateSent() / 1000.0) > (0.95 * _availBWkbps))
+    {
+        _forwLimitReached = true;
+    }
+
+    WebRtc_Word32 revRateKbps = _sendrec->ReceiveBitrateKbps();
+    if (revRateKbps > (0.95 * _incomingAvailBWkbps))
+    {
+        _revLimitReached = true;
+    }
+
+    return (_forwLimitReached && _revLimitReached);
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h
new file mode 100644
index 0000000..fc790e5
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
+
+#include "BWETestBase.h"
+
+class BWETwoWayLimitFinding : public BWETest
+{
+public:
+    BWETwoWayLimitFinding(std::string testName,
+        int masterStartRateKbps, int masterAvailBWkbps,
+        int slaveStartRateKbps, int slaveAvailBWkbps,
+        bool isMaster = false);
+
+    virtual ~BWETwoWayLimitFinding();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+    //virtual bool StoppingCriterionSlave();
+
+private:
+    int _availBWkbps;
+    int _incomingAvailBWkbps;
+    bool _forwLimitReached;
+    bool _revLimitReached;
+
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
new file mode 100644
index 0000000..9c81fd0
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
@@ -0,0 +1,1071 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "MatlabPlot.h"
+#ifdef MATLAB
+#include "engine.h"
+#endif
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+#include <sstream>
+#include <algorithm>
+#include <math.h>
+#include <stdio.h>
+
+using namespace webrtc;
+
+#ifdef MATLAB
+MatlabEngine eng;
+
+MatlabLine::MatlabLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
+:
+_xArray(NULL),
+_yArray(NULL),
+_maxLen(maxLen),
+_plotAttribute(),
+_name()
+{
+    if (_maxLen > 0)
+    {
+        _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+    }
+
+    if (plotAttrib)
+    {
+        _plotAttribute = plotAttrib;
+    }
+
+    if (name)
+    {
+        _name = name;
+    }
+}
+
+MatlabLine::~MatlabLine()
+{
+    if (_xArray != NULL)
+    {
+        mxDestroyArray(_xArray);
+    }
+    if (_yArray != NULL)
+    {
+        mxDestroyArray(_yArray);
+    }
+}
+
+void MatlabLine::Append(double x, double y)
+{
+    if (_maxLen > 0 && _xData.size() > static_cast<WebRtc_UWord32>(_maxLen))
+    {
+        _xData.resize(_maxLen);
+        _yData.resize(_maxLen);
+    }
+
+    _xData.push_front(x);
+    _yData.push_front(y);
+}
+
+
+// append y-data with running integer index as x-data
+void MatlabLine::Append(double y)
+{
+    if (_xData.empty())
+    {
+        // first element is index 0
+        Append(0, y);
+    }
+    else
+    {
+        // take last x-value and increment
+        double temp = _xData.back(); // last x-value
+        Append(temp + 1, y);
+    }
+}
+
+
+void MatlabLine::SetMaxLen(int maxLen)
+{
+    if (maxLen <= 0)
+    {
+        // means no maxLen
+        _maxLen = -1;
+    }
+    else
+    {
+        _maxLen = maxLen;
+
+        if (_xArray != NULL)
+        {
+            mxDestroyArray(_xArray);
+            mxDestroyArray(_yArray);
+        }
+        _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+
+        maxLen = ((unsigned int)maxLen <= _xData.size()) ? maxLen : (int)_xData.size();
+        _xData.resize(maxLen);
+        _yData.resize(maxLen);
+
+        //// reserve the right amount of memory
+        //_xData.reserve(_maxLen);
+        //_yData.reserve(_maxLen);
+    }
+}
+
+void MatlabLine::SetAttribute(char *plotAttrib)
+{
+    _plotAttribute = plotAttrib;
+}
+
+void MatlabLine::SetName(char *name)
+{
+    _name = name;
+}
+
+void MatlabLine::GetPlotData(mxArray** xData, mxArray** yData)
+{
+    // Make sure we have enough Matlab allocated memory.
+    // Assuming both arrays (x and y) are of the same size.
+    if (_xData.empty())
+    {
+        return; // No data
+    }
+    unsigned int size = 0;
+    if (_xArray != NULL)
+    {
+        size = (unsigned int)mxGetNumberOfElements(_xArray);
+    }
+    if (size < _xData.size())
+    {
+        if (_xArray != NULL)
+        {
+            mxDestroyArray(_xArray);
+            mxDestroyArray(_yArray);
+        }
+        _xArray = mxCreateDoubleMatrix(1, _xData.size(), mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _yData.size(), mxREAL);
+    }
+
+    if (!_xData.empty())
+    {
+        double* x = mxGetPr(_xArray);
+
+        std::list<double>::iterator it = _xData.begin();
+
+        for (int i = 0; it != _xData.end(); it++, i++)
+        {
+            x[i] = *it;
+        }
+    }
+
+    if (!_yData.empty())
+    {
+        double* y = mxGetPr(_yArray);
+
+        std::list<double>::iterator it = _yData.begin();
+
+        for (int i = 0; it != _yData.end(); it++, i++)
+        {
+            y[i] = *it;
+        }
+    }
+    *xData = _xArray;
+    *yData = _yArray;
+}
+
+std::string MatlabLine::GetXName()
+{
+    std::ostringstream xString;
+    xString << "x_" << _name;
+    return xString.str();
+}
+
+std::string MatlabLine::GetYName()
+{
+    std::ostringstream yString;
+    yString << "y_" << _name;
+    return yString.str();
+}
+
+std::string MatlabLine::GetPlotString()
+{
+
+    std::ostringstream s;
+
+    if (_xData.size() == 0)
+    {
+        s << "[0 1], [0 1]"; // To get an empty plot
+    }
+    else
+    {
+        s << GetXName() << "(1:" << _xData.size() << "),";
+        s << GetYName() << "(1:" << _yData.size() << ")";
+    }
+
+    s << ", '";
+    s << _plotAttribute;
+    s << "'";
+
+    return s.str();
+}
+
+std::string MatlabLine::GetRefreshString()
+{
+    std::ostringstream s;
+
+    if (_xData.size() > 0)
+    {
+        s << "set(h,'xdata',"<< GetXName() <<"(1:" << _xData.size() << "),'ydata',"<< GetYName() << "(1:" << _yData.size() << "));";
+    }
+    else
+    {
+        s << "set(h,'xdata',[NaN],'ydata',[NaN]);";
+    }
+    return s.str();
+}
+
+std::string MatlabLine::GetLegendString()
+{
+    return ("'" + _name + "'");
+}
+
+bool MatlabLine::hasLegend()
+{
+    return (!_name.empty());
+}
+
+
+// remove data points, but keep attributes
+void MatlabLine::Reset()
+{
+    _xData.clear();
+    _yData.clear();
+}
+
+
+void MatlabLine::UpdateTrendLine(MatlabLine * sourceData, double slope, double offset)
+{
+    Reset(); // reset data, not attributes and name
+
+    double thexMin = sourceData->xMin();
+    double thexMax = sourceData->xMax();
+    Append(thexMin, thexMin * slope + offset);
+    Append(thexMax, thexMax * slope + offset);
+}
+
+double MatlabLine::xMin()
+{
+    if (!_xData.empty())
+    {
+        std::list<double>::iterator theStart = _xData.begin();
+        std::list<double>::iterator theEnd = _xData.end();
+        return(*min_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::xMax()
+{
+    if (!_xData.empty())
+    {
+        std::list<double>::iterator theStart = _xData.begin();
+        std::list<double>::iterator theEnd = _xData.end();
+        return(*max_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::yMin()
+{
+    if (!_yData.empty())
+    {
+        std::list<double>::iterator theStart = _yData.begin();
+        std::list<double>::iterator theEnd = _yData.end();
+        return(*min_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::yMax()
+{
+    if (!_yData.empty())
+    {
+        std::list<double>::iterator theStart = _yData.begin();
+        std::list<double>::iterator theEnd = _yData.end();
+        return(*max_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+
+
+MatlabTimeLine::MatlabTimeLine(int horizonSeconds /*= -1*/, const char *plotAttrib /*= NULL*/,
+                               const char *name /*= NULL*/,
+                               WebRtc_Word64 refTimeMs /* = -1*/)
+                               :
+_timeHorizon(horizonSeconds),
+MatlabLine(-1, plotAttrib, name) // infinite number of elements
+{
+    if (refTimeMs < 0)
+        _refTimeMs = TickTime::MillisecondTimestamp();
+    else
+        _refTimeMs = refTimeMs;
+}
+
+void MatlabTimeLine::Append(double y)
+{
+    MatlabLine::Append(static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0, y);
+
+    PurgeOldData();
+}
+
+
+void MatlabTimeLine::PurgeOldData()
+{
+    if (_timeHorizon > 0)
+    {
+        // remove old data
+        double historyLimit = static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0
+            - _timeHorizon; // remove data points older than this
+
+        std::list<double>::reverse_iterator ritx = _xData.rbegin();
+        WebRtc_UWord32 removeCount = 0;
+        while (ritx != _xData.rend())
+        {
+            if (*ritx >= historyLimit)
+            {
+                break;
+            }
+            ritx++;
+            removeCount++;
+        }
+        if (removeCount == 0)
+        {
+            return;
+        }
+
+        // remove the range [begin, it).
+        //if (removeCount > 10)
+        //{
+        //    printf("Removing %lu elements\n", removeCount);
+        //}
+        _xData.resize(_xData.size() - removeCount);
+        _yData.resize(_yData.size() - removeCount);
+    }
+}
+
+
+WebRtc_Word64 MatlabTimeLine::GetRefTime()
+{
+    return(_refTimeMs);
+}
+
+
+
+
+MatlabPlot::MatlabPlot()
+:
+_figHandle(-1),
+_smartAxis(false),
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_timeToPlot(false),
+_plotting(false),
+_enabled(true),
+_firstPlot(true),
+_legendEnabled(true),
+_donePlottingEvent(EventWrapper::Create())
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _xlim[0] = 0;
+    _xlim[1] = 0;
+    _ylim[0] = 0;
+    _ylim[1] = 0;
+
+#ifdef PLOT_TESTING
+    _plotStartTime = -1;
+    _plotDelay = 0;
+#endif
+
+}
+
+
+MatlabPlot::~MatlabPlot()
+{
+    _critSect->Enter();
+
+    // delete all line objects
+    while (!_line.empty())
+    {
+        delete *(_line.end() - 1);
+        _line.pop_back();
+    }
+
+    delete _critSect;
+    delete _donePlottingEvent;
+}
+
+
+int MatlabPlot::AddLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    MatlabLine *newLine = new MatlabLine(maxLen, plotAttrib, name);
+    _line.push_back(newLine);
+
+    return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
+}
+
+
+int MatlabPlot::AddTimeLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/,
+                            WebRtc_Word64 refTimeMs /*= -1*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    MatlabTimeLine *newLine = new MatlabTimeLine(maxLen, plotAttrib, name, refTimeMs);
+    _line.push_back(newLine);
+
+    return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
+}
+
+
+int MatlabPlot::GetLineIx(const char *name)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    bool matchFound = false;
+    int lineIx = 0;
+
+    for (; it != _line.end(); it++, lineIx++)
+    {
+        if ((*it)->_name == name)
+        {
+            matchFound = true;
+            break;
+        }
+    }
+
+    if (matchFound)
+    {
+        return (lineIx);
+    }
+    else
+    {
+        return (-1);
+    }
+}
+
+
+void MatlabPlot::Append(int lineIndex, double x, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    // sanity for index
+    if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
+    {
+        throw "Line index out of range";
+        exit(1);
+    }
+
+    return (_line[lineIndex]->Append(x, y));
+}
+
+
+void MatlabPlot::Append(int lineIndex, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    // sanity for index
+    if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
+    {
+        throw "Line index out of range";
+        exit(1);
+    }
+
+    return (_line[lineIndex]->Append(y));
+}
+
+
+int MatlabPlot::Append(const char *name, double x, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    int lineIx = GetLineIx(name);
+
+    if (lineIx < 0) //(!matchFound)
+    {
+        // no match; append new line
+        lineIx = AddLine(-1, NULL, name);
+    }
+
+    // append data to line
+    Append(lineIx, x, y);
+    return (lineIx);
+}
+
+int MatlabPlot::Append(const char *name, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    int lineIx = GetLineIx(name);
+
+    if (lineIx < 0) //(!matchFound)
+    {
+        // no match; append new line
+        lineIx = AddLine(-1, NULL, name);
+    }
+
+    // append data to line
+    Append(lineIx, y);
+    return (lineIx);
+}
+
+int MatlabPlot::Length(char *name)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    int ix = GetLineIx(name);
+    if (ix >= 0)
+    {
+        return (static_cast<int>(_line[ix]->_xData.size()));
+    }
+    else
+    {
+        return (-1);
+    }
+}
+
+
+void MatlabPlot::SetPlotAttribute(char *name, char *plotAttrib)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    int lineIx = GetLineIx(name);
+
+    if (lineIx >= 0)
+    {
+        _line[lineIx]->SetAttribute(plotAttrib);
+    }
+}
+
+// Must be called under critical section _critSect
+void MatlabPlot::UpdateData(Engine* ep)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        mxArray* xData = NULL;
+        mxArray* yData = NULL;
+        (*it)->GetPlotData(&xData, &yData);
+        if (xData != NULL)
+        {
+            std::string xName = (*it)->GetXName();
+            std::string yName = (*it)->GetYName();
+            _critSect->Leave();
+#ifdef MATLAB6
+            mxSetName(xData, xName.c_str());
+            mxSetName(yData, yName.c_str());
+            engPutArray(ep, xData);
+            engPutArray(ep, yData);
+#else
+            int ret = engPutVariable(ep, xName.c_str(), xData);
+            assert(ret == 0);
+            ret = engPutVariable(ep, yName.c_str(), yData);
+            assert(ret == 0);
+#endif
+            _critSect->Enter();
+        }
+    }
+}
+
+bool MatlabPlot::GetPlotCmd(std::ostringstream & cmd, Engine* ep)
+{
+    _critSect->Enter();
+
+    if (!DataAvailable())
+    {
+        return false;
+    }
+
+    if (_firstPlot)
+    {
+        GetPlotCmd(cmd);
+        _firstPlot = false;
+    }
+    else
+    {
+        GetRefreshCmd(cmd);
+    }
+
+    UpdateData(ep);
+
+    _critSect->Leave();
+
+    return true;
+}
+
+// Call inside critsect
+void MatlabPlot::GetPlotCmd(std::ostringstream & cmd)
+{
+    // we have something to plot
+    // empty the stream
+    cmd.str(""); // (this seems to be the only way)
+
+    cmd << "figure; h" << _figHandle << "= plot(";
+
+    // first line
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    cmd << (*it)->GetPlotString();
+
+    it++;
+
+    // remaining lines
+    for (; it != _line.end(); it++)
+    {
+        cmd << ", ";
+        cmd << (*it)->GetPlotString();
+    }
+
+    cmd << "); ";
+
+    if (_legendEnabled)
+    {
+        GetLegendCmd(cmd);
+    }
+
+    if (_smartAxis)
+    {
+        double xMin = _xlim[0];
+        double xMax = _xlim[1];
+        double yMax = _ylim[1];
+        for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+        {
+            xMax = std::max(xMax, (*it)->xMax());
+            xMin = std::min(xMin, (*it)->xMin());
+
+            yMax = std::max(yMax, (*it)->yMax());
+            yMax = std::max(yMax, fabs((*it)->yMin()));
+        }
+        _xlim[0] = xMin;
+        _xlim[1] = xMax;
+        _ylim[0] = -yMax;
+        _ylim[1] = yMax;
+
+        cmd << "axis([" << _xlim[0] << ", " << _xlim[1] << ", " << _ylim[0] << ", " << _ylim[1] << "]);";
+    }
+
+    int i=1;
+    for (it = _line.begin(); it != _line.end(); i++, it++)
+    {
+        cmd << "set(h" << _figHandle << "(" << i << "), 'Tag', " << (*it)->GetLegendString() << ");";
+    }
+}
+
+// Call inside critsect
+void MatlabPlot::GetRefreshCmd(std::ostringstream & cmd)
+{
+    cmd.str(""); // (this seems to be the only way)
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    for (it = _line.begin(); it != _line.end(); it++)
+    {
+        cmd << "h = findobj(0, 'Tag', " << (*it)->GetLegendString() << ");";
+        cmd << (*it)->GetRefreshString();
+    }
+    //if (_legendEnabled)
+    //{
+    //    GetLegendCmd(cmd);
+    //}
+}
+
+void MatlabPlot::GetLegendCmd(std::ostringstream & cmd)
+{
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    bool anyLegend = false;
+    for (; it != _line.end(); it++)
+    {
+        anyLegend = anyLegend || (*it)->hasLegend();
+    }
+    if (anyLegend)
+    {
+        // create the legend
+
+        cmd << "legend(h" << _figHandle << ",{";
+
+
+        // iterate lines
+        int i = 0;
+        for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+        {
+            if (i > 0)
+            {
+                cmd << ", ";
+            }
+            cmd << (*it)->GetLegendString();
+            i++;
+        }
+
+        cmd << "}, 2); "; // place legend in upper-left corner
+    }
+}
+
+// Call inside critsect
+bool MatlabPlot::DataAvailable()
+{
+    if (!_enabled)
+    {
+        return false;
+    }
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        (*it)->PurgeOldData();
+    }
+
+    return true;
+}
+
+void MatlabPlot::Plot()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _timeToPlot = true;
+
+#ifdef PLOT_TESTING
+    _plotStartTime = TickTime::MillisecondTimestamp();
+#endif
+}
+
+
+void MatlabPlot::Reset()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _enabled = true;
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        (*it)->Reset();
+    }
+
+}
+
+void MatlabPlot::SetFigHandle(int handle)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (handle > 0)
+        _figHandle = handle;
+}
+
+bool
+MatlabPlot::TimeToPlot()
+{
+    CriticalSectionScoped cs(_critSect);
+    return _enabled && _timeToPlot;
+}
+
+void
+MatlabPlot::Plotting()
+{
+    CriticalSectionScoped cs(_critSect);
+    _plotting = true;
+}
+
+void
+MatlabPlot::DonePlotting()
+{
+    CriticalSectionScoped cs(_critSect);
+    _timeToPlot = false;
+    _plotting = false;
+    _donePlottingEvent->Set();
+}
+
+void
+MatlabPlot::DisablePlot()
+{
+    _critSect->Enter();
+    while (_plotting)
+    {
+        _critSect->Leave();
+        _donePlottingEvent->Wait(WEBRTC_EVENT_INFINITE);
+        _critSect->Enter();
+    }
+    _enabled = false;
+}
+
+int MatlabPlot::MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    int sourceIx;
+    int trendIx;
+
+    sourceIx = GetLineIx(sourceName);
+    if (sourceIx < 0)
+    {
+        // could not find source
+        return (-1);
+    }
+
+    trendIx = GetLineIx(trendName);
+    if (trendIx < 0)
+    {
+        // no trend found; add new line
+        trendIx = AddLine(2 /*maxLen*/, plotAttrib, trendName);
+    }
+
+    _line[trendIx]->UpdateTrendLine(_line[sourceIx], slope, offset);
+
+    return (trendIx);
+
+}
+
+
+MatlabEngine::MatlabEngine()
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_plotThread(NULL),
+_running(false),
+_numPlots(0)
+{
+    _eventPtr = EventWrapper::Create();
+
+    _plotThread = ThreadWrapper::CreateThread(MatlabEngine::PlotThread, this, kLowPriority, "MatlabPlot");
+
+    if (_plotThread == NULL)
+    {
+        throw "Unable to start MatlabEngine thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    unsigned int tid;
+    _plotThread->Start(tid);
+
+}
+
+MatlabEngine::~MatlabEngine()
+{
+    _critSect->Enter();
+
+    if (_plotThread)
+    {
+        _plotThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_plotThread->Stop())
+        {
+            ;
+        }
+
+        delete _plotThread;
+    }
+
+    _plots.clear();
+
+    _plotThread = NULL;
+
+    delete _eventPtr;
+    _eventPtr = NULL;
+
+    _critSect->Leave();
+    delete _critSect;
+
+}
+
+MatlabPlot * MatlabEngine::NewPlot(MatlabPlot *newPlot)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //MatlabPlot *newPlot = new MatlabPlot();
+
+    if (newPlot)
+    {
+        newPlot->SetFigHandle(++_numPlots); // first plot is number 1
+        _plots.push_back(newPlot);
+    }
+
+    return (newPlot);
+
+}
+
+
+void MatlabEngine::DeletePlot(MatlabPlot *plot)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (plot == NULL)
+    {
+        return;
+    }
+
+    std::vector<MatlabPlot *>::iterator it;
+    for (it = _plots.begin(); it < _plots.end(); it++)
+    {
+        if (plot == *it)
+        {
+            break;
+        }
+    }
+
+    assert (plot == *it);
+
+    (*it)->DisablePlot();
+
+    _plots.erase(it);
+    --_numPlots;
+
+    delete plot;
+}
+
+
+bool MatlabEngine::PlotThread(void *obj)
+{
+    if (!obj)
+    {
+        return (false);
+    }
+
+    MatlabEngine *eng = (MatlabEngine *) obj;
+
+    Engine *ep = engOpen(NULL);
+    if (!ep)
+    {
+        throw "Cannot open Matlab engine";
+        return (false);
+    }
+
+    engSetVisible(ep, true);
+    engEvalString(ep, "close all;");
+
+    while (eng->_running)
+    {
+        eng->_critSect->Enter();
+
+        // iterate through all plots
+        for (unsigned int ix = 0; ix < eng->_plots.size(); ix++)
+        {
+            MatlabPlot *plot = eng->_plots[ix];
+            if (plot->TimeToPlot())
+            {
+                plot->Plotting();
+                eng->_critSect->Leave();
+                std::ostringstream cmd;
+
+                if (engEvalString(ep, cmd.str().c_str()))
+                {
+                    // engine dead
+                    return (false);
+                }
+
+                // empty the stream
+                cmd.str(""); // (this seems to be the only way)
+                if (plot->GetPlotCmd(cmd, ep))
+                {
+                    // things to plot, we have already accessed what we need in the plot
+                    plot->DonePlotting();
+
+                    WebRtc_Word64 start = TickTime::MillisecondTimestamp();
+                    // plot it
+                    int ret = engEvalString(ep, cmd.str().c_str());
+                    printf("time=%I64i\n", TickTime::MillisecondTimestamp() - start);
+                    if (ret)
+                    {
+                        // engine dead
+                        return (false);
+                    }
+
+#ifdef PLOT_TESTING
+                    if(plot->_plotStartTime >= 0)
+                    {
+                        plot->_plotDelay = TickTime::MillisecondTimestamp() - plot->_plotStartTime;
+                        plot->_plotStartTime = -1;
+                    }
+#endif
+                }
+                eng->_critSect->Enter();
+            }
+        }
+
+        eng->_critSect->Leave();
+        // wait a while
+        eng->_eventPtr->Wait(66); // 33 ms
+    }
+
+    if (ep)
+    {
+        engClose(ep);
+        ep = NULL;
+    }
+
+    return (true);
+
+}
+
+#endif // MATLAB
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
new file mode 100644
index 0000000..08c7006
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
@@ -0,0 +1,170 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
+
+#include <list>
+#include <string>
+#include <vector>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+//#define PLOT_TESTING
+
+#ifdef MATLAB
+
+typedef struct engine Engine;
+typedef struct mxArray_tag mxArray;
+
+class MatlabLine
+{
+    friend class MatlabPlot;
+
+public:
+    MatlabLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
+    ~MatlabLine();
+    virtual void Append(double x, double y);
+    virtual void Append(double y);
+    void SetMaxLen(int maxLen);
+    void SetAttribute(char *plotAttrib);
+    void SetName(char *name);
+    void Reset();
+    virtual void PurgeOldData() {};
+
+    void UpdateTrendLine(MatlabLine * sourceData, double slope, double offset);
+
+    double xMin();
+    double xMax();
+    double yMin();
+    double yMax();
+
+protected:
+    void GetPlotData(mxArray** xData, mxArray** yData);
+    std::string GetXName();
+    std::string GetYName();
+    std::string GetPlotString();
+    std::string GetRefreshString();
+    std::string GetLegendString();
+    bool hasLegend();
+    std::list<double> _xData;
+    std::list<double> _yData;
+    mxArray* _xArray;
+    mxArray* _yArray;
+    int _maxLen;
+    std::string _plotAttribute;
+    std::string _name;
+};
+
+
+class MatlabTimeLine : public MatlabLine
+{
+public:
+    MatlabTimeLine(int horizonSeconds = -1, const char *plotAttrib = NULL, const char *name = NULL,
+        WebRtc_Word64 refTimeMs = -1);
+    ~MatlabTimeLine() {};
+    void Append(double y);
+    void PurgeOldData();
+    WebRtc_Word64 GetRefTime();
+
+private:
+    WebRtc_Word64 _refTimeMs;
+    int _timeHorizon;
+};
+
+
+class MatlabPlot
+{
+    friend class MatlabEngine;
+
+public:
+    MatlabPlot();
+    ~MatlabPlot();
+
+    int AddLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
+    int AddTimeLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL,
+        WebRtc_Word64 refTimeMs = -1);
+    int GetLineIx(const char *name);
+    void Append(int lineIndex, double x, double y);
+    void Append(int lineIndex, double y);
+    int Append(const char *name, double x, double y);
+    int Append(const char *name, double y);
+    int Length(char *name);
+    void SetPlotAttribute(char *name, char *plotAttrib);
+    void Plot();
+    void Reset();
+    void SmartAxis(bool status = true) { _smartAxis = status; };
+    void SetFigHandle(int handle);
+    void EnableLegend(bool enable) { _legendEnabled = enable; };
+
+    bool TimeToPlot();
+    void Plotting();
+    void DonePlotting();
+    void DisablePlot();
+
+    int MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib = NULL);
+
+#ifdef PLOT_TESTING
+    WebRtc_Word64 _plotStartTime;
+    WebRtc_Word64 _plotDelay;
+#endif
+
+private:
+    void UpdateData(Engine* ep);
+    bool GetPlotCmd(std::ostringstream & cmd, Engine* ep);
+    void GetPlotCmd(std::ostringstream & cmd); // call inside crit sect
+    void GetRefreshCmd(std::ostringstream & cmd); // call inside crit sect
+    void GetLegendCmd(std::ostringstream & cmd);
+    bool DataAvailable();
+
+    std::vector<MatlabLine *> _line;
+    int _figHandle;
+    bool _smartAxis;
+    double _xlim[2];
+    double _ylim[2];
+    webrtc::CriticalSectionWrapper *_critSect;
+    bool _timeToPlot;
+    bool _plotting;
+    bool _enabled;
+    bool _firstPlot;
+    bool _legendEnabled;
+    webrtc::EventWrapper* _donePlottingEvent;
+};
+
+
+class MatlabEngine
+{
+public:
+    MatlabEngine();
+    ~MatlabEngine();
+
+    MatlabPlot * NewPlot(MatlabPlot *newPlot);
+    void DeletePlot(MatlabPlot *plot);
+
+private:
+    static bool PlotThread(void *obj);
+
+    std::vector<MatlabPlot *> _plots;
+    webrtc::CriticalSectionWrapper *_critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _plotThread;
+    bool _running;
+    int _numPlots;
+};
+
+#endif //MATLAB
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
new file mode 100644
index 0000000..d322242
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
@@ -0,0 +1,438 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm> // for max function
+#include <stdio.h>
+
+#include "TestLoadGenerator.h"
+#include "TestSenderReceiver.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+
+bool SenderThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    TestLoadGenerator *_genObj = static_cast<TestLoadGenerator *>(obj);
+
+    return _genObj->GeneratorLoop();
+}
+
+
+TestLoadGenerator::TestLoadGenerator(TestSenderReceiver *sender, WebRtc_Word32 rtpSampleRate)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_genThread(NULL),
+_bitrateKbps(0),
+_sender(sender),
+_running(false),
+_rtpSampleRate(rtpSampleRate)
+{
+}
+
+TestLoadGenerator::~TestLoadGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    delete _critSect;
+}
+
+WebRtc_Word32 TestLoadGenerator::SetBitrate (WebRtc_Word32 newBitrateKbps)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (newBitrateKbps < 0)
+    {
+        return -1;
+    }
+
+    _bitrateKbps = newBitrateKbps;
+
+    printf("New bitrate = %i kbps\n", _bitrateKbps);
+
+    return _bitrateKbps;
+}
+
+
+WebRtc_Word32 TestLoadGenerator::Start (const char *threadName)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _eventPtr = EventWrapper::Create();
+
+    _genThread = ThreadWrapper::CreateThread(SenderThreadFunction, this, kRealtimePriority, threadName);
+    if (_genThread == NULL)
+    {
+        throw "Unable to start generator thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    unsigned int tid;
+    _genThread->Start(tid);
+
+    return 0;
+}
+
+
+WebRtc_Word32 TestLoadGenerator::Stop ()
+{
+    _critSect.Enter();
+
+    if (_genThread)
+    {
+        _genThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_genThread->Stop())
+        {
+            _critSect.Leave();
+            _critSect.Enter();
+        }
+
+        delete _genThread;
+        _genThread = NULL;
+
+        delete _eventPtr;
+        _eventPtr = NULL;
+    }
+
+    _genThread = NULL;
+    _critSect.Leave();
+    return (0);
+}
+
+
+int TestLoadGenerator::generatePayload ()
+{
+    return(generatePayload( static_cast<WebRtc_UWord32>( TickTime::MillisecondTimestamp() * _rtpSampleRate / 1000 )));
+}
+
+
+int TestLoadGenerator::sendPayload (const WebRtc_UWord32 timeStamp,
+                                    const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord32 payloadSize,
+                                    const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
+{
+
+    return (_sender->SendOutgoingData(timeStamp, payloadData, payloadSize, frameType));
+}
+
+
+CBRGenerator::CBRGenerator (TestSenderReceiver *sender, WebRtc_Word32 payloadSizeBytes, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate)
+:
+//_eventPtr(NULL),
+_payloadSizeBytes(payloadSizeBytes),
+_payload(new WebRtc_UWord8[payloadSizeBytes]),
+TestLoadGenerator(sender, rtpSampleRate)
+{
+    SetBitrate (bitrateKbps);
+}
+
+CBRGenerator::~CBRGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    if (_payload)
+    {
+        delete [] _payload;
+    }
+
+}
+
+bool CBRGenerator::GeneratorLoop ()
+{
+    double periodMs;
+    WebRtc_Word64 nextSendTime = TickTime::MillisecondTimestamp();
+
+
+    // no critSect
+    while (_running)
+    {
+        // send data (critSect inside)
+        generatePayload( static_cast<WebRtc_UWord32>(nextSendTime * _rtpSampleRate / 1000) );
+
+        // calculate wait time
+        periodMs = 8.0 * _payloadSizeBytes / ( _bitrateKbps );
+
+        nextSendTime = static_cast<WebRtc_Word64>(nextSendTime + periodMs);
+
+        WebRtc_Word32 waitTime = static_cast<WebRtc_Word32>(nextSendTime - TickTime::MillisecondTimestamp());
+        if (waitTime < 0)
+        {
+            waitTime = 0;
+        }
+        // wait
+        _eventPtr->Wait(static_cast<WebRtc_Word32>(waitTime));
+    }
+
+    return true;
+}
+
+int CBRGenerator::generatePayload ( WebRtc_UWord32 timestamp )
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //WebRtc_UWord8 *payload = new WebRtc_UWord8[_payloadSizeBytes];
+
+    int ret = sendPayload(timestamp, _payload, _payloadSizeBytes);
+
+    //delete [] payload;
+    return ret;
+}
+
+
+
+
+/////////////////////
+
+CBRFixFRGenerator::CBRFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                      WebRtc_Word32 rtpSampleRate, WebRtc_Word32 frameRateFps /*= 30*/,
+                                      double spread /*= 0.0*/)
+:
+//_eventPtr(NULL),
+_payloadSizeBytes(0),
+_payload(NULL),
+_payloadAllocLen(0),
+_frameRateFps(frameRateFps),
+_spreadFactor(spread),
+TestLoadGenerator(sender, rtpSampleRate)
+{
+    SetBitrate (bitrateKbps);
+}
+
+CBRFixFRGenerator::~CBRFixFRGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    if (_payload)
+    {
+        delete [] _payload;
+        _payloadAllocLen = 0;
+    }
+
+}
+
+bool CBRFixFRGenerator::GeneratorLoop ()
+{
+    double periodMs;
+    WebRtc_Word64 nextSendTime = TickTime::MillisecondTimestamp();
+
+    _critSect.Enter();
+
+    if (_frameRateFps <= 0)
+    {
+        return false;
+    }
+
+    _critSect.Leave();
+
+    // no critSect
+    while (_running)
+    {
+        _critSect.Enter();
+
+        // calculate payload size
+        _payloadSizeBytes = nextPayloadSize();
+
+        if (_payloadSizeBytes > 0)
+        {
+
+            if (_payloadAllocLen < _payloadSizeBytes * (1 + _spreadFactor))
+            {
+                // re-allocate _payload
+                if (_payload)
+                {
+                    delete [] _payload;
+                    _payload = NULL;
+                }
+
+                _payloadAllocLen = static_cast<WebRtc_Word32>((_payloadSizeBytes * (1 + _spreadFactor) * 3) / 2 + .5); // 50% extra to avoid frequent re-alloc
+                _payload = new WebRtc_UWord8[_payloadAllocLen];
+            }
+
+
+            // send data (critSect inside)
+            generatePayload( static_cast<WebRtc_UWord32>(nextSendTime * _rtpSampleRate / 1000) );
+        }
+
+        _critSect.Leave();
+
+        // calculate wait time
+        periodMs = 1000.0 / _frameRateFps;
+        nextSendTime = static_cast<WebRtc_Word64>(nextSendTime + periodMs + 0.5);
+
+        WebRtc_Word32 waitTime = static_cast<WebRtc_Word32>(nextSendTime - TickTime::MillisecondTimestamp());
+        if (waitTime < 0)
+        {
+            waitTime = 0;
+        }
+        // wait
+        _eventPtr->Wait(waitTime);
+    }
+
+    return true;
+}
+
+WebRtc_Word32 CBRFixFRGenerator::nextPayloadSize()
+{
+    const double periodMs = 1000.0 / _frameRateFps;
+    return static_cast<WebRtc_Word32>(_bitrateKbps * periodMs / 8 + 0.5);
+}
+
+int CBRFixFRGenerator::generatePayload ( WebRtc_UWord32 timestamp )
+{
+    CriticalSectionScoped cs(_critSect);
+
+    double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
+    factor = 1 + 2 * _spreadFactor * factor; // [1 - _spreadFactor ; 1 + _spreadFactor]
+
+    WebRtc_Word32 thisPayloadBytes = static_cast<WebRtc_Word32>(_payloadSizeBytes * factor);
+    // sanity
+    if (thisPayloadBytes > _payloadAllocLen)
+    {
+        thisPayloadBytes = _payloadAllocLen;
+    }
+
+    int ret = sendPayload(timestamp, _payload, thisPayloadBytes);
+    return ret;
+}
+
+
+/////////////////////
+
+PeriodicKeyFixFRGenerator::PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                                      WebRtc_Word32 rtpSampleRate, WebRtc_Word32 frameRateFps /*= 30*/,
+                                                      double spread /*= 0.0*/, double keyFactor /*= 4.0*/, WebRtc_UWord32 keyPeriod /*= 300*/)
+:
+_keyFactor(keyFactor),
+_keyPeriod(keyPeriod),
+_frameCount(0),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRateFps, spread)
+{
+}
+
+WebRtc_Word32 PeriodicKeyFixFRGenerator::nextPayloadSize()
+{
+    // calculate payload size for a delta frame
+    WebRtc_Word32 payloadSizeBytes = static_cast<WebRtc_Word32>(1000 * _bitrateKbps / (8.0 * _frameRateFps * (1.0 + (_keyFactor - 1.0) / _keyPeriod)) + 0.5);
+
+    if (_frameCount % _keyPeriod == 0)
+    {
+        // this is a key frame, scale the payload size
+        payloadSizeBytes = static_cast<WebRtc_Word32>(_keyFactor * _payloadSizeBytes + 0.5);
+    }
+    _frameCount++;
+
+    return payloadSizeBytes;
+}
+
+////////////////////
+
+CBRVarFRGenerator::CBRVarFRGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, const WebRtc_UWord8* frameRates,
+                                     WebRtc_UWord16 numFrameRates, WebRtc_Word32 rtpSampleRate, double avgFrPeriodMs,
+                                     double frSpreadFactor, double spreadFactor)
+:
+_avgFrPeriodMs(avgFrPeriodMs),
+_frSpreadFactor(frSpreadFactor),
+_frameRates(NULL),
+_numFrameRates(numFrameRates),
+_frChangeTimeMs(TickTime::MillisecondTimestamp() + _avgFrPeriodMs),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRates[0], spreadFactor)
+{
+    _frameRates = new WebRtc_UWord8[_numFrameRates];
+    memcpy(_frameRates, frameRates, _numFrameRates);
+}
+
+CBRVarFRGenerator::~CBRVarFRGenerator()
+{
+    delete [] _frameRates;
+}
+
+void CBRVarFRGenerator::ChangeFrameRate()
+{
+    const WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp();
+    if (nowMs < _frChangeTimeMs)
+    {
+        return;
+    }
+    // Time to change frame rate
+    WebRtc_UWord16 frIndex = static_cast<WebRtc_UWord16>(static_cast<double>(rand()) / RAND_MAX
+                                            * (_numFrameRates - 1) + 0.5) ;
+    assert(frIndex < _numFrameRates);
+    _frameRateFps = _frameRates[frIndex];
+    // Update the next frame rate change time
+    double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
+    factor = 1 + 2 * _frSpreadFactor * factor; // [1 - _frSpreadFactor ; 1 + _frSpreadFactor]
+    _frChangeTimeMs = nowMs + static_cast<WebRtc_Word64>(1000.0 * factor *
+                                    _avgFrPeriodMs + 0.5);
+
+    printf("New frame rate: %d\n", _frameRateFps);
+}
+
+WebRtc_Word32 CBRVarFRGenerator::nextPayloadSize()
+{
+    ChangeFrameRate();
+    return CBRFixFRGenerator::nextPayloadSize();
+}
+
+////////////////////
+
+CBRFrameDropGenerator::CBRFrameDropGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                         WebRtc_Word32 rtpSampleRate, double spreadFactor)
+:
+_accBits(0),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, 30, spreadFactor)
+{
+}
+
+CBRFrameDropGenerator::~CBRFrameDropGenerator()
+{
+}
+
+WebRtc_Word32 CBRFrameDropGenerator::nextPayloadSize()
+{
+    _accBits -= 1000 * _bitrateKbps / _frameRateFps;
+    if (_accBits < 0)
+    {
+        _accBits = 0;
+    }
+    if (_accBits > 0.3 * _bitrateKbps * 1000)
+    {
+        //printf("drop\n");
+        return 0;
+    }
+    else
+    {
+        //printf("keep\n");
+        const double periodMs = 1000.0 / _frameRateFps;
+        WebRtc_Word32 frameSize = static_cast<WebRtc_Word32>(_bitrateKbps * periodMs / 8 + 0.5);
+        frameSize = std::max(frameSize, static_cast<WebRtc_Word32>(300 * periodMs / 8 + 0.5));
+        _accBits += frameSize * 8;
+        return frameSize;
+    }
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
new file mode 100644
index 0000000..c22591c
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
@@ -0,0 +1,146 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
+
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+class TestSenderReceiver;
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+class TestLoadGenerator
+{
+public:
+    TestLoadGenerator (TestSenderReceiver *sender, WebRtc_Word32 rtpSampleRate = 90000);
+    virtual ~TestLoadGenerator ();
+
+    WebRtc_Word32 SetBitrate (WebRtc_Word32 newBitrateKbps);
+    virtual WebRtc_Word32 Start (const char *threadName = NULL);
+    virtual WebRtc_Word32 Stop ();
+    virtual bool GeneratorLoop () = 0;
+
+protected:
+    virtual int generatePayload ( WebRtc_UWord32 timestamp ) = 0;
+    int generatePayload ();
+    int sendPayload (const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
+
+    webrtc::CriticalSectionWrapper* _critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _genThread;
+    WebRtc_Word32 _bitrateKbps;
+    TestSenderReceiver *_sender;
+    bool _running;
+    WebRtc_Word32 _rtpSampleRate;
+};
+
+
+class CBRGenerator : public TestLoadGenerator
+{
+public:
+    CBRGenerator (TestSenderReceiver *sender, WebRtc_Word32 payloadSizeBytes, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000);
+    virtual ~CBRGenerator ();
+
+    virtual WebRtc_Word32 Start () {return (TestLoadGenerator::Start("CBRGenerator"));};
+
+    virtual bool GeneratorLoop ();
+
+protected:
+    virtual int generatePayload ( WebRtc_UWord32 timestamp );
+
+    WebRtc_Word32 _payloadSizeBytes;
+    WebRtc_UWord8 *_payload;
+};
+
+
+class CBRFixFRGenerator : public TestLoadGenerator // constant bitrate and fixed frame rate
+{
+public:
+    CBRFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000,
+        WebRtc_Word32 frameRateFps = 30, double spread = 0.0);
+    virtual ~CBRFixFRGenerator ();
+
+    virtual WebRtc_Word32 Start () {return (TestLoadGenerator::Start("CBRFixFRGenerator"));};
+
+    virtual bool GeneratorLoop ();
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize ();
+    virtual int generatePayload ( WebRtc_UWord32 timestamp );
+
+    WebRtc_Word32 _payloadSizeBytes;
+    WebRtc_UWord8 *_payload;
+    WebRtc_Word32 _payloadAllocLen;
+    WebRtc_Word32 _frameRateFps;
+    double      _spreadFactor;
+};
+
+class PeriodicKeyFixFRGenerator : public CBRFixFRGenerator // constant bitrate and fixed frame rate with periodically large frames
+{
+public:
+    PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000,
+        WebRtc_Word32 frameRateFps = 30, double spread = 0.0, double keyFactor = 4.0, WebRtc_UWord32 keyPeriod = 300);
+    virtual ~PeriodicKeyFixFRGenerator () {}
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize ();
+
+    double          _keyFactor;
+    WebRtc_UWord32    _keyPeriod;
+    WebRtc_UWord32    _frameCount;
+};
+
+// Probably better to inherit CBRFixFRGenerator from CBRVarFRGenerator, but since
+// the fix FR version already existed this was easier.
+class CBRVarFRGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
+{
+public:
+    CBRVarFRGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, const WebRtc_UWord8* frameRates,
+        WebRtc_UWord16 numFrameRates, WebRtc_Word32 rtpSampleRate = 90000, double avgFrPeriodMs = 5.0,
+        double frSpreadFactor = 0.05, double spreadFactor = 0.0);
+
+    ~CBRVarFRGenerator();
+
+protected:
+    virtual void ChangeFrameRate();
+    virtual WebRtc_Word32 nextPayloadSize ();
+
+    double       _avgFrPeriodMs;
+    double       _frSpreadFactor;
+    WebRtc_UWord8* _frameRates;
+    WebRtc_UWord16 _numFrameRates;
+    WebRtc_Word64  _frChangeTimeMs;
+};
+
+class CBRFrameDropGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
+{
+public:
+    CBRFrameDropGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                    WebRtc_Word32 rtpSampleRate = 90000, double spreadFactor = 0.0);
+
+    ~CBRFrameDropGenerator();
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize();
+
+    double       _accBits;
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
new file mode 100644
index 0000000..1fc0fd3
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
@@ -0,0 +1,442 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "rtp_rtcp.h"
+#include "udp_transport.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include <stdlib.h>
+
+#define NR_OF_SOCKET_BUFFERS 500
+
+
+bool ProcThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    TestSenderReceiver *theObj = static_cast<TestSenderReceiver *>(obj);
+
+    return theObj->ProcLoop();
+}
+
+
+TestSenderReceiver::TestSenderReceiver (void)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_procThread(NULL),
+_running(false),
+_payloadType(0),
+_loadGenerator(NULL),
+_isSender(false),
+_isReceiver(false),
+_timeOut(false),
+_sendRecCB(NULL),
+_lastBytesReceived(0),
+_lastTime(-1)
+{
+    // RTP/RTCP module
+    _rtp = RtpRtcp::CreateRtpRtcp(0, false);
+    if (!_rtp)
+    {
+        throw "Could not create RTP/RTCP module";
+        exit(1);
+    }
+
+    if (_rtp->InitReceiver() != 0)
+    {
+        throw "_rtp->InitReceiver()";
+        exit(1);
+    }
+
+    if (_rtp->InitSender() != 0)
+    {
+        throw "_rtp->InitSender()";
+        exit(1);
+    }
+
+    // SocketTransport module
+    WebRtc_UWord8 numberOfThreads = 1;
+    _transport = UdpTransport::Create(0, numberOfThreads);
+    if (!_transport)
+    {
+        throw "Could not create transport module";
+        exit(1);
+    }
+}
+
+TestSenderReceiver::~TestSenderReceiver (void)
+{
+
+    Stop(); // N.B. without critSect
+
+    _critSect->Enter();
+
+    if (_rtp)
+    {
+        RtpRtcp::DestroyRtpRtcp(_rtp);
+        _rtp = NULL;
+    }
+
+    if (_transport)
+    {
+        UdpTransport::Destroy(_transport);
+        _transport = NULL;
+    }
+
+    delete _critSect;
+
+}
+
+
+WebRtc_Word32 TestSenderReceiver::InitReceiver (const WebRtc_UWord16 rtpPort,
+                                              const WebRtc_UWord16 rtcpPort,
+                                              const WebRtc_Word8 payloadType /*= 127*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    // init transport
+    if (_transport->InitializeReceiveSockets(this, rtpPort/*, 0, NULL, 0, true*/) != 0)
+    {
+        throw "_transport->InitializeReceiveSockets";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingRTPCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingRTPCallback";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingDataCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingRTPCallback";
+        exit(1);
+    }
+
+    if (_rtp->SetRTCPStatus(kRtcpNonCompound) != 0)
+    {
+        throw "_rtp->SetRTCPStatus";
+        exit(1);
+    }
+
+    if (_rtp->SetTMMBRStatus(true) != 0)
+    {
+        throw "_rtp->SetTMMBRStatus";
+        exit(1);
+    }
+
+    if (_rtp->RegisterReceivePayload("I420", payloadType, 90000) != 0)
+    {
+        throw "_rtp->RegisterReceivePayload";
+        exit(1);
+    }
+
+    _isReceiver = true;
+
+    return (0);
+}
+
+
+WebRtc_Word32 TestSenderReceiver::Start()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _eventPtr = EventWrapper::Create();
+
+    if (_rtp->SetSendingStatus(true) != 0)
+    {
+        throw "_rtp->SetSendingStatus";
+        exit(1);
+    }
+
+    _procThread = ThreadWrapper::CreateThread(ProcThreadFunction, this, kRealtimePriority, "TestSenderReceiver");
+    if (_procThread == NULL)
+    {
+        throw "Unable to create process thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    if (_isReceiver)
+    {
+        if (_transport->StartReceiving(NR_OF_SOCKET_BUFFERS) != 0)
+        {
+            throw "_transport->StartReceiving";
+            exit(1);
+        }
+    }
+
+    unsigned int tid;
+    _procThread->Start(tid);
+
+    return 0;
+
+}
+
+
+WebRtc_Word32 TestSenderReceiver::Stop ()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _transport->StopReceiving();
+
+    if (_procThread)
+    {
+        _procThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_procThread->Stop())
+        {
+            ;
+        }
+
+        delete _eventPtr;
+
+        delete _procThread;
+    }
+
+    _procThread = NULL;
+
+    return (0);
+}
+
+
+bool TestSenderReceiver::ProcLoop(void)
+{
+
+    // process RTP/RTCP module
+    _rtp->Process();
+
+    // process SocketTransport module
+    _transport->Process();
+
+    // no critSect
+    while (_running)
+    {
+        // ask RTP/RTCP module for wait time
+        WebRtc_Word32 rtpWait = _rtp->TimeUntilNextProcess();
+
+        // ask SocketTransport module for wait time
+        WebRtc_Word32 tpWait = _transport->TimeUntilNextProcess();
+
+        WebRtc_Word32 minWait = (rtpWait < tpWait) ? rtpWait: tpWait;
+        minWait = (minWait > 0) ? minWait : 0;
+        // wait
+        _eventPtr->Wait(minWait);
+
+        // process RTP/RTCP module
+        _rtp->Process();
+
+        // process SocketTransport module
+        _transport->Process();
+
+    }
+
+    return true;
+}
+
+
+WebRtc_Word32 TestSenderReceiver::ReceiveBitrateKbps ()
+{
+    WebRtc_UWord32 bytesSent;
+    WebRtc_UWord32 packetsSent;
+    WebRtc_UWord32 bytesReceived;
+    WebRtc_UWord32 packetsReceived;
+
+    if (_rtp->DataCountersRTP(&bytesSent, &packetsSent, &bytesReceived, &packetsReceived) == 0)
+    {
+        WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+        WebRtc_Word32 kbps = 0;
+        if (now > _lastTime)
+        {
+            if (_lastTime > 0)
+            {
+                // 8 * bytes / ms = kbps
+                kbps = static_cast<WebRtc_Word32>(
+                    (8 * (bytesReceived - _lastBytesReceived)) / (now - _lastTime));
+            }
+            _lastTime = now;
+            _lastBytesReceived = bytesReceived;
+        }
+        return (kbps);
+    }
+
+    return (-1);
+}
+
+
+WebRtc_Word32 TestSenderReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    return (_rtp->SetPacketTimeout(timeoutMS, 0 /* RTCP timeout */));
+}
+
+
+void TestSenderReceiver::OnPacketTimeout(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    _timeOut = true;
+}
+
+
+void TestSenderReceiver::OnReceivedPacket(const WebRtc_Word32 id,
+                                    const RtpRtcpPacketType packetType)
+{
+    // do nothing
+    //printf("OnReceivedPacket\n");
+
+}
+
+WebRtc_Word32 TestSenderReceiver::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadSize,
+                                          const webrtc::WebRtcRTPHeader* rtpHeader)
+{
+    //printf("OnReceivedPayloadData\n");
+    return (0);
+}
+
+
+void TestSenderReceiver::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                      const WebRtc_Word32 rtpPacketLength,
+                                      const WebRtc_Word8* fromIP,
+                                      const WebRtc_UWord16 fromPort)
+{
+    _rtp->IncomingPacket((WebRtc_UWord8 *) incomingRtpPacket, static_cast<WebRtc_UWord16>(rtpPacketLength));
+}
+
+
+
+void TestSenderReceiver::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                       const WebRtc_Word32 rtcpPacketLength,
+                                       const WebRtc_Word8* fromIP,
+                                       const WebRtc_UWord16 fromPort)
+{
+    _rtp->IncomingPacket((WebRtc_UWord8 *) incomingRtcpPacket, static_cast<WebRtc_UWord16>(rtcpPacketLength));
+}
+
+
+
+
+
+///////////////////
+
+
+WebRtc_Word32 TestSenderReceiver::InitSender (const WebRtc_UWord32 startBitrateKbps,
+                                            const WebRtc_Word8* ipAddr,
+                                            const WebRtc_UWord16 rtpPort,
+                                            const WebRtc_UWord16 rtcpPort /*= 0*/,
+                                            const WebRtc_Word8 payloadType /*= 127*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _payloadType = payloadType;
+
+    // check load generator valid
+    if (_loadGenerator)
+    {
+        _loadGenerator->SetBitrate(startBitrateKbps);
+    }
+
+    if (_rtp->RegisterSendTransport(_transport) != 0)
+    {
+        throw "_rtp->RegisterSendTransport";
+        exit(1);
+    }
+    if (_rtp->RegisterSendPayload("I420", _payloadType, 90000) != 0)
+    {
+        throw "_rtp->RegisterSendPayload";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingVideoCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingVideoCallback";
+        exit(1);
+    }
+
+    if (_rtp->SetRTCPStatus(kRtcpNonCompound) != 0)
+    {
+        throw "_rtp->SetRTCPStatus";
+        exit(1);
+    }
+
+    if (_rtp->SetSendBitrate(startBitrateKbps*1000, 0, MAX_BITRATE_KBPS) != 0)
+    {
+        throw "_rtp->SetSendBitrate";
+        exit(1);
+    }
+
+
+    // SocketTransport
+    if (_transport->InitializeSendSockets(ipAddr, rtpPort, rtcpPort))
+    {
+        throw "_transport->InitializeSendSockets";
+        exit(1);
+    }
+
+    _isSender = true;
+
+    return (0);
+}
+
+
+
+WebRtc_Word32
+TestSenderReceiver::SendOutgoingData(const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord32 payloadSize,
+                                     const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
+{
+    return (_rtp->SendOutgoingData(frameType, _payloadType, timeStamp, payloadData, payloadSize));
+}
+
+
+WebRtc_Word32 TestSenderReceiver::SetLoadGenerator(TestLoadGenerator *generator)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _loadGenerator = generator;
+    return(0);
+
+}
+
+void TestSenderReceiver::OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 minBitrateBps,
+                                  const WebRtc_UWord32 maxBitrateBps,
+                                  const WebRtc_UWord8 fractionLost,
+                                  const WebRtc_UWord16 roundTripTimeMs,
+                                  const WebRtc_UWord16 bwEstimateKbitMin,
+                                  const WebRtc_UWord16 bwEstimateKbitMax)
+{
+    if (_loadGenerator)
+    {
+        _loadGenerator->SetBitrate(maxBitrateBps/1000);
+    }
+
+    if (_sendRecCB)
+    {
+        _sendRecCB->OnOnNetworkChanged(maxBitrateBps,
+            fractionLost,
+            roundTripTimeMs,
+            bwEstimateKbitMin,
+            bwEstimateKbitMax);
+    }
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
new file mode 100644
index 0000000..7f7f2f0
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+#include "udp_transport.h"
+
+class TestLoadGenerator;
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+using namespace webrtc;
+
+#define MAX_BITRATE_KBPS 50000
+
+
+class SendRecCB
+{
+public:
+    virtual void OnOnNetworkChanged(const WebRtc_UWord32 bitrateTarget,
+        const WebRtc_UWord8 fractionLost,
+        const WebRtc_UWord16 roundTripTimeMs,
+        const WebRtc_UWord16 bwEstimateKbitMin,
+        const WebRtc_UWord16 bwEstimateKbitMax) = 0;
+
+    virtual ~SendRecCB() {};
+};
+
+
+class TestSenderReceiver : public RtpFeedback, public RtpData, public UdpTransportData, public RtpVideoFeedback
+{
+
+public:
+    TestSenderReceiver (void);
+
+    ~TestSenderReceiver (void);
+
+    void SetCallback (SendRecCB *cb) { _sendRecCB = cb; };
+
+    WebRtc_Word32 Start();
+
+    WebRtc_Word32 Stop();
+
+    bool ProcLoop();
+
+    /////////////////////////////////////////////
+    // Receiver methods
+
+    WebRtc_Word32 InitReceiver (const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0,
+        const WebRtc_Word8 payloadType = 127);
+
+    WebRtc_Word32 ReceiveBitrateKbps ();
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+
+    bool timeOutTriggered () { return (_timeOut); };
+
+    // Inherited from RtpFeedback
+    virtual WebRtc_Word32 OnInitializeDecoder(const WebRtc_Word32 id,
+                                            const WebRtc_Word8 payloadType,
+                                            const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                            const WebRtc_UWord32 frequency,
+                                            const WebRtc_UWord8 channels,
+                                            const WebRtc_UWord32 rate) { return(0);};
+
+    virtual void OnPacketTimeout(const WebRtc_Word32 id);
+
+    virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packetType);
+
+    virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) {};
+
+    virtual void OnIncomingSSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 SSRC) {};
+
+    virtual void OnIncomingCSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 CSRC,
+                                        const bool added) {};
+
+
+    // Inherited from RtpData
+
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                const WebRtc_UWord16 payloadSize,
+                                                const webrtc::WebRtcRTPHeader* rtpHeader);
+
+
+    // Inherited from UdpTransportData
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+        const WebRtc_Word32 rtpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+        const WebRtc_Word32 rtcpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+
+
+    /////////////////////////////////
+    // Sender methods
+
+    WebRtc_Word32 InitSender (const WebRtc_UWord32 startBitrateKbps,
+        const WebRtc_Word8* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0,
+        const WebRtc_Word8 payloadType = 127);
+
+    WebRtc_Word32 SendOutgoingData(const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
+
+    WebRtc_Word32 SetLoadGenerator(TestLoadGenerator *generator);
+
+    WebRtc_UWord32 BitrateSent() { return (_rtp->BitrateSent()); };
+
+
+    // Inherited from RtpVideoFeedback
+    virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+        const WebRtc_UWord8 message = 0) {};
+
+    virtual void OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 minBitrateBps,
+                                  const WebRtc_UWord32 maxBitrateBps,
+                                  const WebRtc_UWord8 fractionLost,
+                                  const WebRtc_UWord16 roundTripTimeMs,
+                                  const WebRtc_UWord16 bwEstimateKbitMin,
+                                  const WebRtc_UWord16 bwEstimateKbitMax);
+
+private:
+    RtpRtcp* _rtp;
+    UdpTransport* _transport;
+    webrtc::CriticalSectionWrapper* _critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _procThread;
+    bool _running;
+    WebRtc_Word8 _payloadType;
+    TestLoadGenerator* _loadGenerator;
+    bool _isSender;
+    bool _isReceiver;
+    bool _timeOut;
+    SendRecCB * _sendRecCB;
+    WebRtc_UWord32 _lastBytesReceived;
+    WebRtc_Word64 _lastTime;
+
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
diff --git a/trunk/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc b/trunk/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc
new file mode 100644
index 0000000..38b6e15
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc
@@ -0,0 +1,538 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../../source/BitstreamBuilder.h"
+#include "../../source/BitstreamParser.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <math.h>
+#include <tchar.h>
+#include <windows.h>
+
+WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+{
+    return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+}
+
+WebRtc_UWord16 BitRateBPSInv(WebRtc_UWord32 x )
+{
+    // 16383 0x3fff
+    //     1 638 300    exp 0
+    //    16 383 000    exp 1
+    //   163 830 000    exp 2
+    // 1 638 300 000    exp 3
+    const float exp = log10(float(x>>14)) - 2;
+    if(exp < 0.0)
+    {
+        return WebRtc_UWord16(x /100);
+    }else if(exp < 1.0)
+    {
+        return 0x4000 + WebRtc_UWord16(x /1000);
+    }else if(exp < 2.0)
+    {
+        return 0x8000 + WebRtc_UWord16(x /10000);
+    }else if(exp < 3.0)
+    {
+        return 0xC000 + WebRtc_UWord16(x /100000);
+    } else
+    {
+        assert(false);
+        return 0;
+    }
+}
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+    WebRtc_UWord8 dataBuffer[128];
+    BitstreamBuilder builder(dataBuffer, sizeof(dataBuffer));
+
+    // test 1 to 4 bits
+    builder.Add1Bit(1);
+    builder.Add1Bit(0);
+    builder.Add1Bit(1);
+
+    builder.Add2Bits(1);
+    builder.Add2Bits(2);
+    builder.Add2Bits(3);
+
+    builder.Add3Bits(1);
+    builder.Add3Bits(3);
+    builder.Add3Bits(7);
+
+    builder.Add4Bits(1);
+    builder.Add4Bits(5);
+    builder.Add4Bits(15);
+
+    assert(4 == builder.Length());
+
+    BitstreamParser parser(dataBuffer, sizeof(dataBuffer));
+
+    assert(1 == parser.Get1Bit());
+    assert(0 == parser.Get1Bit());
+    assert(1 == parser.Get1Bit());
+
+    assert(1 == parser.Get2Bits());
+    assert(2 == parser.Get2Bits());
+    assert(3 == parser.Get2Bits());
+
+    assert(1 == parser.Get3Bits());
+    assert(3 == parser.Get3Bits());
+    assert(7 == parser.Get3Bits());
+
+    assert(1 == parser.Get4Bits());
+    assert(5 == parser.Get4Bits());
+    assert(15 == parser.Get4Bits());
+
+    printf("Test of 1 to 4 bits done\n");
+
+    // test 5 to 7 bits
+    builder.Add5Bits(1);
+    builder.Add5Bits(15);
+    builder.Add5Bits(30);
+
+    builder.Add6Bits(1);
+    builder.Add6Bits(30);
+    builder.Add6Bits(60);
+
+    builder.Add7Bits(1);
+    builder.Add7Bits(60);
+    builder.Add7Bits(120);
+
+    assert(1 == parser.Get5Bits());
+    assert(15 == parser.Get5Bits());
+    assert(30 == parser.Get5Bits());
+
+    assert(1 == parser.Get6Bits());
+    assert(30 == parser.Get6Bits());
+    assert(60 == parser.Get6Bits());
+
+    assert(1 == parser.Get7Bits());
+    assert(60 == parser.Get7Bits());
+    assert(120 == parser.Get7Bits());
+
+    printf("Test of 5 to 7 bits done\n");
+
+    builder.Add8Bits(1);
+    builder.Add1Bit(1);
+    builder.Add8Bits(255);
+    builder.Add1Bit(0);
+    builder.Add8Bits(127);
+    builder.Add1Bit(1);
+    builder.Add8Bits(60);
+    builder.Add1Bit(0);
+    builder.Add8Bits(30);
+    builder.Add1Bit(1);
+    builder.Add8Bits(120);
+    builder.Add1Bit(0);
+    builder.Add8Bits(160);
+    builder.Add1Bit(1);
+    builder.Add8Bits(180);
+
+    assert(1 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(127 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(30 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(120 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(180 == parser.Get8Bits());
+
+    printf("Test of 8 bits done\n");
+
+    builder.Add16Bits(1);
+    builder.Add1Bit(1);
+    builder.Add16Bits(255);
+    builder.Add1Bit(0);
+    builder.Add16Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add16Bits(60);
+    builder.Add1Bit(0);
+    builder.Add16Bits(30);
+    builder.Add1Bit(1);
+    builder.Add16Bits(30120);
+    builder.Add1Bit(0);
+    builder.Add16Bits(160);
+    builder.Add1Bit(1);
+    builder.Add16Bits(180);
+
+    assert(1 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(30 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(30120 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(180 == parser.Get16Bits());
+
+    printf("Test of 16 bits done\n");
+
+    builder.Add24Bits(1);
+    builder.Add1Bit(1);
+    builder.Add24Bits(255);
+    builder.Add1Bit(0);
+    builder.Add24Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add24Bits(60);
+    builder.Add1Bit(0);
+    builder.Add24Bits(303333);
+    builder.Add1Bit(1);
+    builder.Add24Bits(30120);
+    builder.Add1Bit(0);
+    builder.Add24Bits(160);
+    builder.Add1Bit(1);
+    builder.Add24Bits(8018018);
+
+    assert(1 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(303333 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(30120 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(8018018 == parser.Get24Bits());
+
+    printf("Test of 24 bits done\n");
+
+    builder.Add32Bits(1);
+    builder.Add1Bit(1);
+    builder.Add32Bits(255);
+    builder.Add1Bit(0);
+    builder.Add32Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add32Bits(60);
+    builder.Add1Bit(0);
+    builder.Add32Bits(303333);
+    builder.Add1Bit(1);
+    builder.Add32Bits(3012000012);
+    builder.Add1Bit(0);
+    builder.Add32Bits(1601601601);
+    builder.Add1Bit(1);
+    builder.Add32Bits(8018018);
+
+    assert(1 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(303333 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(3012000012 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(1601601601 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(8018018 == parser.Get32Bits());
+
+    printf("Test of 32 bits done\n");
+
+    builder.AddUE(1);
+    builder.AddUE(4);
+    builder.AddUE(9809706);
+    builder.AddUE(2);
+    builder.AddUE(15);
+    builder.AddUE(16998);
+
+    assert( 106 == builder.Length());
+
+    assert(1 == parser.GetUE());
+    assert(4 == parser.GetUE());
+    assert(9809706 == parser.GetUE());
+    assert(2 == parser.GetUE());
+    assert(15 == parser.GetUE());
+    assert(16998 == parser.GetUE());
+
+    printf("Test UE bits done\n");
+
+    BitstreamBuilder builderScalabilityInfo(dataBuffer, sizeof(dataBuffer));
+    BitstreamParser parserScalabilityInfo(dataBuffer, sizeof(dataBuffer));
+
+    const WebRtc_UWord8 numberOfLayers = 4;
+    const WebRtc_UWord8 layerId[numberOfLayers] = {0,1,2,3};
+    const WebRtc_UWord8 priorityId[numberOfLayers] = {0,1,2,3};
+    const WebRtc_UWord8 discardableId[numberOfLayers] = {0,1,1,1};
+
+    const WebRtc_UWord8 dependencyId[numberOfLayers]= {0,1,1,1};
+    const WebRtc_UWord8 qualityId[numberOfLayers]= {0,0,0,1};
+    const WebRtc_UWord8 temporalId[numberOfLayers]= {0,0,1,1};
+
+    const WebRtc_UWord16 avgBitrate[numberOfLayers]= {BitRateBPSInv(100000),
+                                                    BitRateBPSInv(200000),
+                                                    BitRateBPSInv(400000),
+                                                    BitRateBPSInv(800000)};
+
+    // todo which one is the sum?
+    const WebRtc_UWord16 maxBitrateLayer[numberOfLayers]= {BitRateBPSInv(150000),
+                                                         BitRateBPSInv(300000),
+                                                         BitRateBPSInv(500000),
+                                                         BitRateBPSInv(900000)};
+
+    const WebRtc_UWord16 maxBitrateLayerRepresentation[numberOfLayers] = {BitRateBPSInv(150000),
+                                                                        BitRateBPSInv(450000),
+                                                                        BitRateBPSInv(950000),
+                                                                        BitRateBPSInv(1850000)};
+
+    assert( 16300 == BitRateBPS(BitRateBPSInv(16383)));
+    assert( 163800 == BitRateBPS(BitRateBPSInv(163830)));
+    assert( 1638300 == BitRateBPS(BitRateBPSInv(1638300)));
+    assert( 1638000 == BitRateBPS(BitRateBPSInv(1638400)));
+
+    assert( 18500 == BitRateBPS(BitRateBPSInv(18500)));
+    assert( 185000 == BitRateBPS(BitRateBPSInv(185000)));
+    assert( 1850000 == BitRateBPS(BitRateBPSInv(1850000)));
+    assert( 18500000 == BitRateBPS(BitRateBPSInv(18500000)));
+    assert( 185000000 == BitRateBPS(BitRateBPSInv(185000000)));
+
+    const WebRtc_UWord16 maxBitrareCalcWindow[numberOfLayers] = {200, 200,200,200};// in 1/100 of second
+
+    builderScalabilityInfo.Add1Bit(0);  // temporal_id_nesting_flag
+    builderScalabilityInfo.Add1Bit(0);    // priority_layer_info_present_flag
+    builderScalabilityInfo.Add1Bit(0);  // priority_id_setting_flag
+
+    builderScalabilityInfo.AddUE(numberOfLayers-1);
+
+    for(int i = 0; i<= numberOfLayers-1; i++)
+    {
+        builderScalabilityInfo.AddUE(layerId[i]);
+        builderScalabilityInfo.Add6Bits(priorityId[i]);
+        builderScalabilityInfo.Add1Bit(discardableId[i]);
+        builderScalabilityInfo.Add3Bits(dependencyId[i]);
+        builderScalabilityInfo.Add4Bits(qualityId[i]);
+        builderScalabilityInfo.Add3Bits(temporalId[i]);
+
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+
+        builderScalabilityInfo.Add1Bit(1);    // bitrate_info_present_flag
+
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+
+        builderScalabilityInfo.Add16Bits(avgBitrate[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrateLayer[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrateLayerRepresentation[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrareCalcWindow[i]);
+
+        builderScalabilityInfo.AddUE(0); // layer_dependency_info_src_layer_id_delta
+        builderScalabilityInfo.AddUE(0); // parameter_sets_info_src_layer_id_delta
+    }
+
+    printf("Test builderScalabilityInfo done\n");
+
+    // Scalability Info parser
+    parserScalabilityInfo.Get1Bit(); // not used in futher parsing
+    const WebRtc_UWord8 priority_layer_info_present = parserScalabilityInfo.Get1Bit();
+    const WebRtc_UWord8 priority_id_setting_flag = parserScalabilityInfo.Get1Bit();
+
+    WebRtc_UWord32 numberOfLayersMinusOne = parserScalabilityInfo.GetUE();
+    for(WebRtc_UWord32 j = 0; j<= numberOfLayersMinusOne; j++)
+    {
+        parserScalabilityInfo.GetUE();
+        parserScalabilityInfo.Get6Bits();
+        parserScalabilityInfo.Get1Bit();
+        parserScalabilityInfo.Get3Bits();
+        parserScalabilityInfo.Get4Bits();
+        parserScalabilityInfo.Get3Bits();
+
+        const WebRtc_UWord8 sub_pic_layer_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 sub_region_layer_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 iroi_division_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 profile_level_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 bitrate_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 frm_rate_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 frm_size_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 layer_dependency_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 parameter_sets_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 bitstream_restriction_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 exact_inter_layer_pred_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+        if(sub_pic_layer_flag || iroi_division_info_present_flag)
+        {
+            parserScalabilityInfo.Get1Bit();
+        }
+        const WebRtc_UWord8 layer_conversion_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 layer_output_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+        if(profile_level_info_present_flag)
+        {
+            parserScalabilityInfo.Get24Bits();
+        }
+        if(bitrate_info_present_flag)
+        {
+            // this is what we want
+            assert(avgBitrate[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrateLayer[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrateLayerRepresentation[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrareCalcWindow[j] == parserScalabilityInfo.Get16Bits());
+        }else
+        {
+            assert(false);
+        }
+        if(frm_rate_info_present_flag)
+        {
+            parserScalabilityInfo.Get2Bits();
+            parserScalabilityInfo.Get16Bits();
+        }
+        if(frm_size_info_present_flag || iroi_division_info_present_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+        }
+        if(sub_region_layer_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            if(parserScalabilityInfo.Get1Bit())
+            {
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+            }
+        }
+        if(sub_pic_layer_flag)
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(iroi_division_info_present_flag)
+        {
+            if(parserScalabilityInfo.Get1Bit())
+            {
+                parserScalabilityInfo.GetUE();
+                parserScalabilityInfo.GetUE();
+            }else
+            {
+                const WebRtc_UWord32 numRoisMinusOne = parserScalabilityInfo.GetUE();
+                for(WebRtc_UWord32 k = 0; k <= numRoisMinusOne; k++)
+                {
+                    parserScalabilityInfo.GetUE();
+                    parserScalabilityInfo.GetUE();
+                    parserScalabilityInfo.GetUE();
+                }
+            }
+        }
+        if(layer_dependency_info_present_flag)
+        {
+            const WebRtc_UWord32 numDirectlyDependentLayers = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k < numDirectlyDependentLayers; k++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+        } else
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(parameter_sets_info_present_flag)
+        {
+            const WebRtc_UWord32 numSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k <= numSeqParameterSetMinusOne; k++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+            const WebRtc_UWord32 numSubsetSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 l = 0; l <= numSubsetSeqParameterSetMinusOne; l++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+            const WebRtc_UWord32 numPicParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 m = 0; m <= numPicParameterSetMinusOne; m++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+        }else
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(bitstream_restriction_info_present_flag)
+        {
+            parserScalabilityInfo.Get1Bit();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+        }
+        if(layer_conversion_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k <2;k++)
+            {
+                if(parserScalabilityInfo.Get1Bit())
+                {
+                    parserScalabilityInfo.Get24Bits();
+                    parserScalabilityInfo.Get16Bits();
+                    parserScalabilityInfo.Get16Bits();
+                }
+            }
+        }
+    }
+    if(priority_layer_info_present)
+    {
+        const WebRtc_UWord32 prNumDidMinusOne = parserScalabilityInfo.GetUE();
+        for(WebRtc_UWord32 k = 0; k <= prNumDidMinusOne;k++)
+        {
+            parserScalabilityInfo.Get3Bits();
+            const WebRtc_UWord32 prNumMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 l = 0; l <= prNumMinusOne; l++)
+            {
+                parserScalabilityInfo.GetUE();
+                parserScalabilityInfo.Get24Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+            }
+        }
+    }
+    if(priority_id_setting_flag)
+    {
+        WebRtc_UWord8 priorityIdSettingUri;
+        WebRtc_UWord32 priorityIdSettingUriIdx = 0;
+        do
+        {
+            priorityIdSettingUri = parserScalabilityInfo.Get8Bits();
+        } while (priorityIdSettingUri != 0);
+    }
+    printf("Test parserScalabilityInfo done\n");
+
+    printf("\nAPI test of parser for ScalabilityInfo done\n");
+
+    ::Sleep(5000);
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/bwe_standalone.gypi b/trunk/src/modules/rtp_rtcp/test/bwe_standalone.gypi
new file mode 100644
index 0000000..36a50de
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/bwe_standalone.gypi
@@ -0,0 +1,111 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'bwe_standalone',
+      'type': 'executable',
+      'dependencies': [
+        'matlab_plotting',
+        'rtp_rtcp',
+        'udp_transport',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'sources': [
+        'BWEStandAlone/BWEStandAlone.cc',
+        'BWEStandAlone/TestLoadGenerator.cc',
+        'BWEStandAlone/TestLoadGenerator.h',
+        'BWEStandAlone/TestSenderReceiver.cc',
+        'BWEStandAlone/TestSenderReceiver.h',
+      ], # source
+      'conditions': [
+          ['OS=="linux"', {
+              'cflags': [
+                  '-fexceptions', # enable exceptions
+                  ],
+              },
+           ],
+          ],
+
+      'include_dirs': [
+          ],
+      'link_settings': {
+          },
+    },
+
+    {
+      'target_name': 'matlab_plotting',
+      'type': '<(library)',
+      'dependencies': [
+        'matlab_plotting_include',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+          '/opt/matlab2010a/extern/include',
+          ],
+      # 'direct_dependent_settings': {
+      #     'defines': [
+      #         'MATLAB',
+      #         ],
+      #     'include_dirs': [
+      #         'BWEStandAlone',
+      #         ],
+      #     },
+      'export_dependent_settings': [
+          'matlab_plotting_include',
+          ],
+      'sources': [
+          'BWEStandAlone/MatlabPlot.cc',
+          'BWEStandAlone/MatlabPlot.h',
+          ],
+      'link_settings': {
+          'ldflags' : [
+              '-L/opt/matlab2010a/bin/glnxa64',
+              '-leng',
+              '-lmx',
+              '-Wl,-rpath,/opt/matlab2010a/bin/glnxa64',
+              ],
+          },
+      'defines': [
+          'MATLAB',
+          ],
+      'conditions': [
+          ['OS=="linux"', {
+              'cflags': [
+                  '-fexceptions', # enable exceptions
+                  ],
+              },
+           ],
+          ],
+      },
+
+    {
+      'target_name': 'matlab_plotting_include',
+      'type': 'none',
+      'direct_dependent_settings': {
+          'defines': [
+#              'MATLAB',
+              ],
+          'include_dirs': [
+              'BWEStandAlone',
+              ],
+          },
+      },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.cc b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.cc
new file mode 100644
index 0000000..3b5390e
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.cc
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+class RtpRtcpAPITest : public ::testing::Test {
+ protected:
+  RtpRtcpAPITest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpAPITest() {}
+
+  virtual void SetUp() {
+    module = RtpRtcp::CreateRtpRtcp(test_id, true, &fake_clock);
+    EXPECT_EQ(0, module->InitReceiver());
+    EXPECT_EQ(0, module->InitSender());
+  }
+
+  virtual void TearDown() {
+    RtpRtcp::DestroyRtpRtcp(module);
+  }
+
+  int test_id;
+  RtpRtcp* module;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpAPITest, Basic) {
+  EXPECT_EQ(0, module->SetSequenceNumber(test_sequence_number));
+  EXPECT_EQ(test_sequence_number, module->SequenceNumber());
+
+  EXPECT_EQ(0, module->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(test_timestamp, module->StartTimestamp());
+
+  EXPECT_FALSE(module->Sending());
+  EXPECT_EQ(0, module->SetSendingStatus(true));
+  EXPECT_TRUE(module->Sending());
+}
+
+TEST_F(RtpRtcpAPITest, MTU) {
+  EXPECT_EQ(-1, module->SetMaxTransferUnit(10));
+  EXPECT_EQ(-1, module->SetMaxTransferUnit(IP_PACKET_SIZE + 1));
+  EXPECT_EQ(0, module->SetMaxTransferUnit(1234));
+  EXPECT_EQ(1234-20-8, module->MaxPayloadLength());
+
+  EXPECT_EQ(0, module->SetTransportOverhead(true, true, 12));
+  EXPECT_EQ(1234 - 20- 20 -20 - 12, module->MaxPayloadLength());
+
+  EXPECT_EQ(0, module->SetTransportOverhead(false, false, 0));
+  EXPECT_EQ(1234 - 20 - 8, module->MaxPayloadLength());
+}
+
+TEST_F(RtpRtcpAPITest, SSRC) {
+  EXPECT_EQ(0, module->SetSSRC(test_ssrc));
+  EXPECT_EQ(test_ssrc, module->SSRC());
+}
+
+TEST_F(RtpRtcpAPITest, CSRC) {
+  EXPECT_EQ(0, module->SetCSRCs(test_CSRC, 2));
+  WebRtc_UWord32 testOfCSRC[webrtc::kRtpCsrcSize];
+  EXPECT_EQ(2, module->CSRCs(testOfCSRC));
+  EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
+  EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
+}
+
+TEST_F(RtpRtcpAPITest, TrafficSmoothing) {
+  EXPECT_FALSE(module->TransmissionSmoothingStatus());
+  module->SetTransmissionSmoothingStatus(true);
+  EXPECT_TRUE(module->TransmissionSmoothingStatus());
+}
+
+TEST_F(RtpRtcpAPITest, RTCP) {
+  EXPECT_EQ(kRtcpOff, module->RTCP());
+  EXPECT_EQ(0, module->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(kRtcpCompound, module->RTCP());
+
+  EXPECT_EQ(0, module->SetCNAME("john.doe@test.test"));
+  EXPECT_EQ(-1, module->SetCNAME(NULL));
+
+  char cName[RTCP_CNAME_SIZE];
+  EXPECT_EQ(0, module->CNAME(cName));
+  EXPECT_STRCASEEQ(cName, "john.doe@test.test");
+  EXPECT_EQ(-1, module->CNAME(NULL));
+
+  EXPECT_FALSE(module->TMMBR());
+  EXPECT_EQ(0, module->SetTMMBRStatus(true));
+  EXPECT_TRUE(module->TMMBR());
+  EXPECT_EQ(0, module->SetTMMBRStatus(false));
+  EXPECT_FALSE(module->TMMBR());
+
+  EXPECT_EQ(kNackOff, module->NACK());
+  EXPECT_EQ(0, module->SetNACKStatus(kNackRtcp));
+  EXPECT_EQ(kNackRtcp, module->NACK());
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.gypi b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.gypi
new file mode 100644
index 0000000..d47e6d4
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_rtp_rtcp_api',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+      ],
+      
+      'include_dirs': [
+        '../../interface',
+        '../../source',
+        '../../../../system_wrappers/interface',
+      ],
+   
+      'sources': [
+        'test_api.cc',
+        'test_api_audio.cc',
+        'test_api_nack.cc',
+        'test_api_rtcp.cc',
+        'test_api_video.cc',
+      ],
+      
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.h b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.h
new file mode 100644
index 0000000..c4ff916
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class FakeRtpRtcpClock : public RtpRtcpClock {
+ public:
+  FakeRtpRtcpClock() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_UWord32 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_UWord32 time_in_ms_;
+};
+
+// This class sends all its packet straight to the provided RtpRtcp module.
+// with optional packet loss.
+class LoopBackTransport : public webrtc::Transport {
+ public:
+  LoopBackTransport(RtpRtcp* rtpRtcpModule)
+    : _count(0),
+      _packetLoss(0),
+      _rtpRtcpModule(rtpRtcpModule) {
+  }
+  void DropEveryNthPacket(int n) {
+    _packetLoss = n;
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    _count++;
+    if (_packetLoss > 0) {
+      if ((_count % _packetLoss) == 0) {
+        return len;
+      }
+    }
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+ private:
+  int _count;
+  int _packetLoss;
+  RtpRtcp* _rtpRtcpModule;
+};
+
+class RtpReceiver : public RtpData {
+ public:
+   virtual WebRtc_Word32 OnReceivedPayloadData(
+       const WebRtc_UWord8* payloadData,
+       const WebRtc_UWord16 payloadSize,
+       const webrtc::WebRtcRTPHeader* rtpHeader) {
+    return 0;
+  }
+};
+
+}  // namespace webrtc
+ 
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
new file mode 100644
index 0000000..d066864
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
@@ -0,0 +1,446 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+#define test_rate 64000u
+
+class VerifyingAudioReceiver : public RtpData {
+ public:
+  VerifyingAudioReceiver(RtpRtcp* rtpRtcpModule) {}
+
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadSize,
+      const webrtc::WebRtcRTPHeader* rtpHeader) {
+    if (rtpHeader->header.payloadType == 98 ||
+        rtpHeader->header.payloadType == 99) {
+      EXPECT_EQ(4, payloadSize);
+      char str[5];
+      memcpy(str, payloadData, payloadSize);
+      str[4] = 0;
+      // All our test vectors for payload type 96 and 97 even the stereo is on
+      // a per channel base equal to the 4 chars "test".
+      // Note there is no null termination so we add that to use the
+      // test EXPECT_STRCASEEQ.
+      EXPECT_STRCASEEQ("test", str);
+      return 0;
+    }
+    if (rtpHeader->header.payloadType == 100 ||
+        rtpHeader->header.payloadType == 101 ||
+        rtpHeader->header.payloadType == 102) {
+      if (rtpHeader->type.Audio.channel == 1) {
+        if (payloadData[0] == 0xff) {
+          // All our test vectors for payload type 100, 101 and 102 have the
+          // first channel data being equal to 0xff.
+          return 0;
+        }
+      } else if (rtpHeader->type.Audio.channel == 2) {
+        if (payloadData[0] == 0x0) {
+          // All our test vectors for payload type 100, 101 and 102 have the
+          // second channel data being equal to 0x00.
+          return 0;
+        }
+      } else if (rtpHeader->type.Audio.channel == 3) {
+        // All our test vectors for payload type 100, 101 and 102 have the
+        // third channel data being equal to 0xaa.
+        if (payloadData[0] == 0xaa) {
+          return 0;
+        }
+      }
+      ADD_FAILURE() << "This code path should never happen.";
+      return -1;
+    }
+    return 0;
+  }
+};
+
+class RTPCallback : public RtpFeedback {
+ public:
+  virtual WebRtc_Word32 OnInitializeDecoder(
+      const WebRtc_Word32 id,
+      const WebRtc_Word8 payloadType,
+      const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+      const int frequency,
+      const WebRtc_UWord8 channels,
+      const WebRtc_UWord32 rate) {
+    if (payloadType == 96) {
+      EXPECT_EQ(test_rate, rate) <<
+          "The rate should be 64K for this payloadType";
+    }
+    return 0;
+  }
+  virtual void OnPacketTimeout(const WebRtc_Word32 id) {
+  }
+  virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                const RtpRtcpPacketType packetType) {
+  }
+  virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                     const RTPAliveType alive) {
+  }
+  virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 SSRC) {
+  }
+  virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 CSRC,
+                                     const bool added) {
+  }
+};
+
+class AudioFeedback : public RtpAudioFeedback {
+  virtual void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                        const WebRtc_UWord8 event,
+                                        const bool end) {
+    static WebRtc_UWord8 expectedEvent = 0;
+
+    if (end) {
+      WebRtc_UWord8 oldEvent = expectedEvent-1;
+      if (expectedEvent == 32) {
+        oldEvent = 15;
+      }
+      EXPECT_EQ(oldEvent, event);
+    } else {
+      EXPECT_EQ(expectedEvent, event);
+      expectedEvent++;
+    }
+    if (expectedEvent == 16) {
+      expectedEvent = 32;
+    }
+  }
+  virtual void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                                    const WebRtc_UWord8 event,
+                                    const WebRtc_UWord16 lengthMs,
+                                    const WebRtc_UWord8 volume) {
+  };
+};
+
+class RtpRtcpAudioTest : public ::testing::Test {
+ protected:
+  RtpRtcpAudioTest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpAudioTest() {}
+
+  virtual void SetUp() {
+    module1 = RtpRtcp::CreateRtpRtcp(test_id, true, &fake_clock);
+    module2 = RtpRtcp::CreateRtpRtcp(test_id+1, true, &fake_clock);
+
+    EXPECT_EQ(0, module1->InitReceiver());
+    EXPECT_EQ(0, module1->InitSender());
+    EXPECT_EQ(0, module2->InitReceiver());
+    EXPECT_EQ(0, module2->InitSender());
+    data_receiver1 = new VerifyingAudioReceiver(module1);
+    EXPECT_EQ(0, module1->RegisterIncomingDataCallback(data_receiver1));
+    data_receiver2 = new VerifyingAudioReceiver(module2);
+    EXPECT_EQ(0, module2->RegisterIncomingDataCallback(data_receiver2));
+    transport1 = new LoopBackTransport(module2);
+    EXPECT_EQ(0, module1->RegisterSendTransport(transport1));
+    transport2 = new LoopBackTransport(module1);
+    EXPECT_EQ(0, module2->RegisterSendTransport(transport2));
+    rtp_callback = new RTPCallback();
+    EXPECT_EQ(0, module2->RegisterIncomingRTPCallback(rtp_callback));
+  }
+
+  virtual void TearDown() {
+    RtpRtcp::DestroyRtpRtcp(module1);
+    RtpRtcp::DestroyRtpRtcp(module2);
+    delete transport1;
+    delete transport2;
+    delete data_receiver1;
+    delete data_receiver2;
+    delete rtp_callback;
+  }
+
+  int test_id;
+  RtpRtcp* module1;
+  RtpRtcp* module2;
+  VerifyingAudioReceiver* data_receiver1;
+  VerifyingAudioReceiver* data_receiver2;
+  LoopBackTransport* transport1;
+  LoopBackTransport* transport2;
+  RTPCallback* rtp_callback;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpAudioTest, Basic) {
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+
+  EXPECT_FALSE(module1->TelephoneEvent());
+
+  // Test detection at the end of a DTMF tone.
+  EXPECT_EQ(0, module2->SetTelephoneEventStatus(true, true, true));
+  EXPECT_EQ(true, module2->TelephoneEvent());
+
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  // Start basic RTP test.
+
+  // Send an empty RTP packet.
+  // Should fail since we have not registerd the payload type.
+  EXPECT_EQ(-1, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
+                                          96, 0, NULL, 0));
+
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+  printf("4\n");
+
+  const WebRtc_UWord8 test[5] = "test";
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                         0, test, 4));
+
+  EXPECT_EQ(test_ssrc, module2->RemoteSSRC());
+  EXPECT_EQ(test_timestamp, module2->RemoteTimestamp());
+}
+
+TEST_F(RtpRtcpAudioTest, RED) {
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  voiceCodec.pltype = 127;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "RED", 4);
+
+  EXPECT_EQ(0, module1->SetSendREDPayloadType(voiceCodec.pltype));
+  WebRtc_Word8 red = 0;
+  EXPECT_EQ(0, module1->SendREDPayloadType(red));
+  EXPECT_EQ(voiceCodec.pltype, red);
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  RTPFragmentationHeader fragmentation;
+  fragmentation.fragmentationVectorSize = 2;
+  fragmentation.fragmentationLength = new WebRtc_UWord32[2];
+  fragmentation.fragmentationLength[0] = 4;
+  fragmentation.fragmentationLength[1] = 4;
+  fragmentation.fragmentationOffset = new WebRtc_UWord32[2];
+  fragmentation.fragmentationOffset[0] = 0;
+  fragmentation.fragmentationOffset[1] = 4;
+  fragmentation.fragmentationTimeDiff = new WebRtc_UWord16[2];
+  fragmentation.fragmentationTimeDiff[0] = 0;
+  fragmentation.fragmentationTimeDiff[1] = 0;
+  fragmentation.fragmentationPlType = new WebRtc_UWord8[2];
+  fragmentation.fragmentationPlType[0] = 96;
+  fragmentation.fragmentationPlType[1] = 96;
+
+  const WebRtc_UWord8 test[5] = "test";
+  // Send a RTP packet.
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
+                                         96, 160, test, 4,
+                                         &fragmentation));
+
+  EXPECT_EQ(0, module1->SetSendREDPayloadType(-1));
+  EXPECT_EQ(-1, module1->SendREDPayloadType(red));
+}
+
+TEST_F(RtpRtcpAudioTest, DTMF) {
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  AudioFeedback* audioFeedback = new AudioFeedback();
+  EXPECT_EQ(0, module2->RegisterAudioCallback(audioFeedback));
+
+  // Prepare for DTMF.
+  voiceCodec.pltype = 97;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "telephone-event", 16);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Start DTMF test.
+  WebRtc_UWord32 timeStamp = 160;
+
+  // Send a DTMF tone using RFC 2833 (4733).
+  for (int i = 0; i < 16; i++) {
+    EXPECT_EQ(0, module1->SendTelephoneEventOutband(i, timeStamp, 10));
+  }
+  timeStamp += 160;  // Prepare for next packet.
+
+  const WebRtc_UWord8 test[9] = "test";
+
+  // Send RTP packets for 16 tones a 160 ms  100ms
+  // pause between = 2560ms + 1600ms = 4160ms
+  for (;timeStamp <= 250 * 160; timeStamp += 160) {
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           timeStamp, test, 4));
+    fake_clock.IncrementTime(20);
+    module1->Process();
+  }
+  EXPECT_EQ(0, module1->SendTelephoneEventOutband(32, 9000, 10));
+
+  for (;timeStamp <= 740 * 160; timeStamp += 160) {
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           timeStamp, test, 4));
+    fake_clock.IncrementTime(20);
+    module1->Process();
+  }
+  delete audioFeedback;
+}
+
+TEST_F(RtpRtcpAudioTest, Stereo) {
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  // Prepare for 3 channel audio 8 bits per sample.
+  voiceCodec.pltype = 98;
+  voiceCodec.channels = 3;
+  memcpy(voiceCodec.plname, "PCMA", 5);
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Prepare for 3 channel audio 16 bits per sample.
+  voiceCodec.pltype = 99;
+  memcpy(voiceCodec.plname, "L16", 4);
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Prepare for 3 channel audio 5 bits per sample.
+  voiceCodec.pltype = 100;
+  memcpy(voiceCodec.plname, "G726-40",8);
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Prepare for 3 channel audio 3 bits per sample.
+  voiceCodec.pltype = 101;
+  memcpy(voiceCodec.plname, "G726-24",8);
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Prepare for 3 channel audio 2 bits per sample.
+  voiceCodec.pltype = 102;
+  memcpy(voiceCodec.plname, "G726-16",8);
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Test sample based multi channel codec, 3 channels 8 bits.
+  WebRtc_UWord8 test3channels[15] = "ttteeesssttt";
+  WebRtc_UWord32 timeStamp = 160;
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 98,
+                                         timeStamp, test3channels, 12));
+  fake_clock.IncrementTime(20);
+  module1->Process();
+  timeStamp += 160;  // Prepare for next packet.
+
+  // Test sample based multi channel codec, 3 channels 16 bits.
+  const WebRtc_UWord8 test3channels16[13] = "teteteststst";
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 99,
+                                         timeStamp, test3channels16, 12));
+  fake_clock.IncrementTime(20);
+  module1->Process();
+  timeStamp += 160;  // Prepare for next packet.
+
+  // Test sample based multi channel codec, 3 channels 5 bits.
+  test3channels[0] = 0xf8;  // 5 ones 3 zeros.
+  test3channels[1] = 0x2b;  // 2 zeros 5 10 1 one.
+  test3channels[2] = 0xf0;  // 4 ones 4 zeros.
+  test3channels[3] = 0x2b;  // 1 zero 5 01 2 ones.
+  test3channels[4] = 0xe0;  // 3 ones 5 zeros.
+  test3channels[5] = 0x0;
+  test3channels[6] = 0x0;
+  test3channels[7] = 0x0;
+  test3channels[8] = 0x0;
+  test3channels[9] = 0x0;
+  test3channels[10] = 0x0;
+  test3channels[11] = 0x0;
+  test3channels[12] = 0x0;
+  test3channels[13] = 0x0;
+  test3channels[14] = 0x0;
+
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 100,
+                                         timeStamp, test3channels, 15));
+  fake_clock.IncrementTime(20);
+  module1->Process();
+  timeStamp += 160;  // Prepare for next packet.
+
+  // Test sample based multi channel codec, 3 channels 3 bits.
+  test3channels[0] = 0xe2;  // 3 ones    3 zeros     2 10
+  test3channels[1] = 0xf0;  // 1 1       3 ones      3 zeros     1 0
+  test3channels[2] = 0xb8;  // 2 10      3 ones      3 zeros
+  test3channels[3] = 0xa0;  // 3 101     5 zeros
+  test3channels[4] = 0x0;
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 101,
+                                         timeStamp, test3channels, 15));
+  fake_clock.IncrementTime(20);
+  module1->Process();
+  timeStamp += 160;  // Prepare for next packet.
+
+  // Test sample based multi channel codec, 3 channels 2 bits.
+  test3channels[0] = 0xcb;  // 2 ones    2 zeros     2 10        2 ones
+  test3channels[1] = 0x2c;  // 2 zeros   2 10        2 ones      2 zeros
+  test3channels[2] = 0xb2;  // 2 10      2 ones      2 zeros     2 10
+  test3channels[3] = 0xcb;  // 2 ones    2 zeros     2 10        2 ones
+  test3channels[4] = 0x2c;  // 2 zeros   2 10        2 ones      2 zeros
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 102,
+                                         timeStamp, test3channels, 15));
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc
new file mode 100644
index 0000000..fd3c3df
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc
@@ -0,0 +1,249 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+ 
+const int kVideoNackListSize = 10;
+const int kTestId = 123;
+const WebRtc_UWord32 kTestSsrc = 3456;
+const WebRtc_UWord16 kTestSequenceNumber = 2345;
+const WebRtc_UWord32 kTestNumberOfPackets = 450;
+const int kTestNumberOfRtxPackets = 49;
+
+class VerifyingNackReceiver : public RtpData
+{
+ public:
+  VerifyingNackReceiver() {}
+
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* data,
+      const WebRtc_UWord16 size,
+      const webrtc::WebRtcRTPHeader* rtp_header) {
+    
+    EXPECT_EQ(kTestSsrc, rtp_header->header.ssrc);
+    EXPECT_EQ(find(sequence_numbers_.begin(),
+                    sequence_numbers_.end(),
+                    rtp_header->header.sequenceNumber),
+              sequence_numbers_.end());
+    sequence_numbers_.push_back(rtp_header->header.sequenceNumber);
+    return 0;
+  }
+  std::vector<WebRtc_UWord16 > sequence_numbers_;
+};
+
+class NackLoopBackTransport : public webrtc::Transport {
+ public:
+  NackLoopBackTransport(RtpRtcp* rtp_rtcp_module, uint32_t rtx_ssrc)
+    : count_(0),
+      packet_loss_(0),
+      rtx_ssrc_(rtx_ssrc),
+      count_rtx_ssrc_(0),
+      module_(rtp_rtcp_module) {
+  }
+  void DropEveryNthPacket(int n) {
+    packet_loss_ = n;
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    count_++;
+    const unsigned char* ptr = static_cast<const unsigned  char*>(data);
+    uint32_t ssrc = (ptr[8] << 24) + (ptr[9] << 16) + (ptr[10] << 8) + ptr[11];
+    if (ssrc == rtx_ssrc_) count_rtx_ssrc_++;
+
+    if (packet_loss_ > 0) {
+      if ((count_ % packet_loss_) == 0) {
+        return len;
+      }
+    }
+    if (module_->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    if (module_->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  int count_;
+  int packet_loss_;
+  uint32_t rtx_ssrc_;
+  int count_rtx_ssrc_;
+  RtpRtcp* module_;
+};
+
+class RtpRtcpNackTest : public ::testing::Test {
+ protected:
+  RtpRtcpNackTest() {}
+  ~RtpRtcpNackTest() {}
+
+  virtual void SetUp() {
+    video_module_ = RtpRtcp::CreateRtpRtcp(kTestId, false, &fake_clock);
+    EXPECT_EQ(0, video_module_->InitReceiver());
+    EXPECT_EQ(0, video_module_->InitSender());
+    EXPECT_EQ(0, video_module_->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, video_module_->SetSSRC(kTestSsrc));
+    EXPECT_EQ(0, video_module_->SetNACKStatus(kNackRtcp));
+    EXPECT_EQ(0, video_module_->SetStorePacketsStatus(true));
+    EXPECT_EQ(0, video_module_->SetSendingStatus(true));
+    EXPECT_EQ(0, video_module_->SetSequenceNumber(kTestSequenceNumber));
+    EXPECT_EQ(0, video_module_->SetStartTimestamp(111111));
+
+    transport_ = new NackLoopBackTransport(video_module_, kTestSsrc + 1);
+    EXPECT_EQ(0, video_module_->RegisterSendTransport(transport_));
+
+    nack_receiver_ = new VerifyingNackReceiver();
+    EXPECT_EQ(0, video_module_->RegisterIncomingDataCallback(nack_receiver_));
+
+    VideoCodec video_codec;
+    memset(&video_codec, 0, sizeof(video_codec));
+    video_codec.plType = 123;
+    memcpy(video_codec.plName, "I420", 5);
+
+    EXPECT_EQ(0, video_module_->RegisterSendPayload(video_codec));
+    EXPECT_EQ(0, video_module_->RegisterReceivePayload(video_codec));
+
+    payload_data_length = sizeof(payload_data);
+
+    for (int n = 0; n < payload_data_length; n++) {
+      payload_data[n] = n % 10;
+    }
+  }
+
+  virtual void TearDown() {
+    RtpRtcp::DestroyRtpRtcp(video_module_);
+    delete transport_;
+    delete nack_receiver_;
+  }
+
+  RtpRtcp* video_module_;
+  NackLoopBackTransport* transport_;
+  VerifyingNackReceiver* nack_receiver_;
+  WebRtc_UWord8  payload_data[65000];
+  int payload_data_length;
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpNackTest, RTCP) {
+  WebRtc_UWord32 timestamp = 3000;
+  WebRtc_UWord16 nack_list[kVideoNackListSize];
+  transport_->DropEveryNthPacket(10);
+
+  for (int frame = 0; frame < 10; ++frame) {
+    EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
+                                                timestamp,
+                                                payload_data,
+                                                payload_data_length));
+
+    std::sort(nack_receiver_->sequence_numbers_.begin(),
+              nack_receiver_->sequence_numbers_.end());
+
+    std::vector<WebRtc_UWord16> missing_sequence_numbers;
+    std::vector<WebRtc_UWord16>::iterator it =
+        nack_receiver_->sequence_numbers_.begin();
+
+    while (it != nack_receiver_->sequence_numbers_.end()) {
+      WebRtc_UWord16 sequence_number_1 = *it;
+      ++it;
+      if (it != nack_receiver_->sequence_numbers_.end()) {
+        WebRtc_UWord16 sequence_number_2 = *it;
+        // Add all missing sequence numbers to list
+        for (WebRtc_UWord16 i = sequence_number_1 + 1; i < sequence_number_2;
+            ++i) {
+          missing_sequence_numbers.push_back(i);
+        }
+      }
+    }
+    int n = 0;
+    for (it = missing_sequence_numbers.begin();
+        it != missing_sequence_numbers.end(); ++it) {
+      nack_list[n++] = (*it);
+    }
+    video_module_->SendNACK(nack_list, n);
+    fake_clock.IncrementTime(33);
+    video_module_->Process();
+
+    // Prepare next frame.
+    timestamp += 3000;
+  }
+  std::sort(nack_receiver_->sequence_numbers_.begin(),
+            nack_receiver_->sequence_numbers_.end());
+  EXPECT_EQ(kTestSequenceNumber, *(nack_receiver_->sequence_numbers_.begin()));
+  EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1,
+            *(nack_receiver_->sequence_numbers_.rbegin()));
+  EXPECT_EQ(kTestNumberOfPackets, nack_receiver_->sequence_numbers_.size());
+  EXPECT_EQ(0, transport_->count_rtx_ssrc_);
+}
+
+TEST_F(RtpRtcpNackTest, RTX) {
+  EXPECT_EQ(0, video_module_->SetRTXReceiveStatus(true, kTestSsrc + 1));
+  EXPECT_EQ(0, video_module_->SetRTXSendStatus(true, true, kTestSsrc + 1));
+
+  transport_->DropEveryNthPacket(10);
+
+  WebRtc_UWord32 timestamp = 3000;
+  WebRtc_UWord16 nack_list[kVideoNackListSize];
+
+  for (int frame = 0; frame < 10; ++frame) {
+    EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
+                                               timestamp,
+                                               payload_data,
+                                               payload_data_length));
+
+    std::sort(nack_receiver_->sequence_numbers_.begin(),
+              nack_receiver_->sequence_numbers_.end());
+
+    std::vector<WebRtc_UWord16> missing_sequence_numbers;
+
+
+    std::vector<WebRtc_UWord16>::iterator it =
+        nack_receiver_->sequence_numbers_.begin();
+    while (it != nack_receiver_->sequence_numbers_.end()) {
+      int sequence_number_1 = *it;
+      ++it;
+      if (it != nack_receiver_->sequence_numbers_.end()) {
+        int sequence_number_2 = *it;
+        // Add all missing sequence numbers to list.
+        for (int i = sequence_number_1 + 1; i < sequence_number_2; ++i) {
+          missing_sequence_numbers.push_back(i);
+        }
+      }
+    }
+    int n = 0;
+    for (it = missing_sequence_numbers.begin();
+        it != missing_sequence_numbers.end(); ++it) {
+      nack_list[n++] = (*it);
+    }
+    video_module_->SendNACK(nack_list, n);
+    fake_clock.IncrementTime(33);
+    video_module_->Process();
+
+    // Prepare next frame.
+    timestamp += 3000;
+  }
+  std::sort(nack_receiver_->sequence_numbers_.begin(),
+            nack_receiver_->sequence_numbers_.end());
+  EXPECT_EQ(kTestSequenceNumber, *(nack_receiver_->sequence_numbers_.begin()));
+  EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1,
+            *(nack_receiver_->sequence_numbers_.rbegin()));
+  EXPECT_EQ(kTestNumberOfPackets, nack_receiver_->sequence_numbers_.size());
+  EXPECT_EQ(kTestNumberOfRtxPackets, transport_->count_rtx_ssrc_);
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
new file mode 100644
index 0000000..306d47f
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -0,0 +1,333 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+ 
+const WebRtc_UWord64 kTestPictureId = 12345678;
+
+class RtcpCallback : public RtcpFeedback {
+ public:
+  RtcpCallback(RtpRtcp* module) {
+    _rtpRtcpModule = module;
+  };
+  virtual void OnRTCPPacketTimeout(const WebRtc_Word32 id) {
+  }
+  virtual void OnLipSyncUpdate(const WebRtc_Word32 id,
+                               const WebRtc_Word32 audioVideoOffset) {
+  };
+  virtual void OnTMMBRReceived(const WebRtc_Word32 id,
+                               const WebRtc_UWord16 bwEstimateKbit) {
+  };
+  virtual void OnXRVoIPMetricReceived(
+      const WebRtc_Word32 id,
+      const RTCPVoIPMetric* metric,
+      const WebRtc_Word8 VoIPmetricBuffer[28]) {
+  };
+  virtual void OnSLIReceived(const WebRtc_Word32 id,
+                             const WebRtc_UWord8 pictureId) {
+    EXPECT_EQ(28, pictureId);
+  };
+
+  virtual void OnRPSIReceived(const WebRtc_Word32 id,
+                              const WebRtc_UWord64 pictureId) {
+    EXPECT_EQ(kTestPictureId, pictureId);
+  };
+  virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord8 subType,
+                                         const WebRtc_UWord32 name,
+                                         const WebRtc_UWord16 length,
+                                         const WebRtc_UWord8* data) {
+    char print_name[5];
+    print_name[0] = static_cast<char>(name >> 24);
+    print_name[1] = static_cast<char>(name >> 16);
+    print_name[2] = static_cast<char>(name >> 8);
+    print_name[3] = static_cast<char>(name);
+    print_name[4] = 0;
+
+    EXPECT_STRCASEEQ("test", print_name);
+  };
+
+  virtual void OnSendReportReceived(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 senderSSRC) {
+    RTCPSenderInfo senderInfo;
+    EXPECT_EQ(0, _rtpRtcpModule->RemoteRTCPStat(&senderInfo));
+  };
+
+  virtual void OnReceiveReportReceived(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 senderSSRC) {
+  };
+ private:
+  RtpRtcp* _rtpRtcpModule;
+};
+
+class RtpRtcpRtcpTest : public ::testing::Test {
+ protected:
+  RtpRtcpRtcpTest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpRtcpTest() {}
+
+  virtual void SetUp() {
+    module1 = RtpRtcp::CreateRtpRtcp(test_id, true, &fake_clock);
+    module2 = RtpRtcp::CreateRtpRtcp(test_id+1, true, &fake_clock);
+
+    EXPECT_EQ(0, module1->InitReceiver());
+    EXPECT_EQ(0, module1->InitSender());
+    EXPECT_EQ(0, module2->InitReceiver());
+    EXPECT_EQ(0, module2->InitSender());
+    receiver = new RtpReceiver();
+    EXPECT_EQ(0, module2->RegisterIncomingDataCallback(receiver));
+    transport1 = new LoopBackTransport(module2);
+    EXPECT_EQ(0, module1->RegisterSendTransport(transport1));
+    transport2 = new LoopBackTransport(module1);
+    EXPECT_EQ(0, module2->RegisterSendTransport(transport2));
+  }
+
+  virtual void TearDown() {
+    RtpRtcp::DestroyRtpRtcp(module1);
+    RtpRtcp::DestroyRtpRtcp(module2);
+    delete transport1;
+    delete transport2;
+    delete receiver;
+  }
+
+  void SetUpCallFromModule1(RtcpCallback* feedback1, RtcpCallback* feedback2 ) {
+    EXPECT_EQ(0, module1->RegisterIncomingRTCPCallback(feedback1));
+    EXPECT_EQ(0, module2->RegisterIncomingRTCPCallback(feedback2));
+
+    EXPECT_EQ(0, module1->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, module2->SetRTCPStatus(kRtcpCompound));
+
+    EXPECT_EQ(0, module2->SetSSRC(test_ssrc + 1));
+    EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+    EXPECT_EQ(0, module1->SetSequenceNumber(test_sequence_number));
+    EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+    EXPECT_EQ(0, module1->SetCSRCs(test_CSRC, 2));
+    EXPECT_EQ(0, module1->SetCNAME("john.doe@test.test"));
+
+    EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+    CodecInst voiceCodec;
+    voiceCodec.pltype = 96;
+    voiceCodec.plfreq = 8000;
+    voiceCodec.rate = 64000;
+    memcpy(voiceCodec.plname, "PCMU", 5);
+
+    EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+    EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+    EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+    EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+    // We need to send one RTP packet to get the RTCP packet to be accepted by
+    // the receiving module.
+    // send RTP packet with the data "testtest"
+    const WebRtc_UWord8 test[9] = "testtest";
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           0, test, 8));
+  }
+
+  int test_id;
+  RtpRtcp* module1;
+  RtpRtcp* module2;
+  RtpReceiver* receiver;
+  LoopBackTransport* transport1;
+  LoopBackTransport* transport2;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpRtcpTest, RTCP) {
+  RtcpCallback* myRTCPFeedback1 = new RtcpCallback(module1);
+  RtcpCallback* myRTCPFeedback2 = new RtcpCallback(module2);
+
+  SetUpCallFromModule1(myRTCPFeedback1, myRTCPFeedback2);
+
+  EXPECT_EQ(0, module1->SendRTCPReferencePictureSelection(kTestPictureId));
+  EXPECT_EQ(0, module1->SendRTCPSliceLossIndication(156));
+
+  WebRtc_UWord32 testOfCSRC[webrtc::kRtpCsrcSize];
+  EXPECT_EQ(2, module2->RemoteCSRCs(testOfCSRC));
+  EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
+  EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
+
+  // Set cname of mixed.
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[0], "john@192.168.0.1"));
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
+  EXPECT_EQ(-1, module1->AddMixedCNAME(test_CSRC[0], NULL));
+
+  EXPECT_EQ(-1, module1->RemoveMixedCNAME(test_CSRC[0] + 1));
+  EXPECT_EQ(0, module1->RemoveMixedCNAME(test_CSRC[1]));
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
+
+  RTCPReportBlock reportBlock;
+  reportBlock.cumulativeLost = 1;
+  reportBlock.delaySinceLastSR = 2;
+  reportBlock.extendedHighSeqNum = 3;
+  reportBlock.fractionLost= 4;
+  reportBlock.jitter = 5;
+  reportBlock.lastSR = 6;
+
+  // Set report blocks.
+  EXPECT_EQ(-1, module1->AddRTCPReportBlock(test_CSRC[0], NULL));
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[0], &reportBlock));
+
+  reportBlock.lastSR= 7;
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[1], &reportBlock));
+
+  WebRtc_UWord32 name = 't' << 24;
+  name += 'e' << 16;
+  name += 's' << 8;
+  name += 't';
+  EXPECT_EQ(0, module1->SetRTCPApplicationSpecificData(
+      3,
+      name,
+      (const WebRtc_UWord8 *)"test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test ",
+          300));
+
+  // send RTCP packet, triggered by timer
+  fake_clock.IncrementTime(7500);
+  module1->Process();
+  fake_clock.IncrementTime(100);
+  module2->Process();
+
+  WebRtc_UWord32 receivedNTPsecs = 0;
+  WebRtc_UWord32 receivedNTPfrac = 0;
+  WebRtc_UWord32 RTCPArrivalTimeSecs = 0;
+  WebRtc_UWord32 RTCPArrivalTimeFrac = 0;
+  char cName[RTCP_CNAME_SIZE];
+
+  EXPECT_EQ(0, module2->RemoteNTP(&receivedNTPsecs, &receivedNTPfrac,
+                                  &RTCPArrivalTimeSecs, &RTCPArrivalTimeFrac));
+
+  EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
+  EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), NULL));
+
+  // Check multiple CNAME.
+  EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
+  EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
+
+  EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
+  EXPECT_EQ(0, strncmp(cName, "john@192.168.0.1", RTCP_CNAME_SIZE));
+
+  EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[1], cName));
+  EXPECT_EQ(0, strncmp(cName, "jane@192.168.0.2", RTCP_CNAME_SIZE));
+
+  // get all report blocks
+  std::vector<RTCPReportBlock> report_blocks;
+  EXPECT_EQ(-1, module1->RemoteRTCPStat(NULL));
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  EXPECT_EQ(1u, report_blocks.size());
+  const RTCPReportBlock& reportBlockReceived = report_blocks[0];
+
+  float secSinceLastReport =
+      static_cast<float>(reportBlockReceived.delaySinceLastSR) / 65536.0f;
+  EXPECT_GE(0.101f, secSinceLastReport);
+  EXPECT_LE(0.100f, secSinceLastReport);
+  EXPECT_EQ(test_sequence_number, reportBlockReceived.extendedHighSeqNum);
+  EXPECT_EQ(0, reportBlockReceived.fractionLost);
+
+  EXPECT_EQ(static_cast<WebRtc_UWord32>(0),
+            reportBlockReceived.cumulativeLost);
+
+  WebRtc_UWord8  fraction_lost = 0;  // scale 0 to 255
+  WebRtc_UWord32 cum_lost = 0;       // number of lost packets
+  WebRtc_UWord32 ext_max = 0;        // highest sequence number received
+  WebRtc_UWord32 jitter = 0;
+  WebRtc_UWord32 max_jitter = 0;
+  EXPECT_EQ(0, module2->StatisticsRTP(&fraction_lost,
+                                      &cum_lost,
+                                      &ext_max,
+                                      &jitter,
+                                      &max_jitter));
+  EXPECT_EQ(0, fraction_lost);
+  EXPECT_EQ((WebRtc_UWord32)0, cum_lost);
+  EXPECT_EQ(test_sequence_number, ext_max);
+  EXPECT_EQ(reportBlockReceived.jitter, jitter);
+
+  WebRtc_UWord16 RTT;
+  WebRtc_UWord16 avgRTT;
+  WebRtc_UWord16 minRTT;
+  WebRtc_UWord16 maxRTT;
+
+  // Get RoundTripTime.
+  EXPECT_EQ(0, module1->RTT(test_ssrc + 1, &RTT, &avgRTT, &minRTT, &maxRTT));
+  EXPECT_GE(10, RTT);
+  EXPECT_GE(10, avgRTT);
+  EXPECT_GE(10, minRTT);
+  EXPECT_GE(10, maxRTT);
+
+  // Set report blocks.
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[0], &reportBlock));
+
+  // Test receive report.
+  EXPECT_EQ(0, module1->SetSendingStatus(false));
+
+  // Test that BYE clears the CNAME
+  EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
+
+  // Send RTCP packet, triggered by timer.
+  fake_clock.IncrementTime(5000);
+  module1->Process();
+  module2->Process();
+
+  delete myRTCPFeedback1;
+  delete myRTCPFeedback2;
+}
+
+TEST_F(RtpRtcpRtcpTest, RemoteRTCPStatRemote) {
+  std::vector<RTCPReportBlock> report_blocks;
+
+  RtcpCallback feedback1(module1);
+  RtcpCallback feedback2(module2);
+
+  SetUpCallFromModule1(&feedback1, &feedback2);
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  EXPECT_EQ(0u, report_blocks.size());
+
+  // send RTCP packet, triggered by timer
+  fake_clock.IncrementTime(7500);
+  module1->Process();
+  fake_clock.IncrementTime(100);
+  module2->Process();
+
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  ASSERT_EQ(1u, report_blocks.size());
+
+  // |test_ssrc+1| is the SSRC of module2 that send the report.
+  EXPECT_EQ(test_ssrc+1, report_blocks[0].remoteSSRC);
+  EXPECT_EQ(test_ssrc, report_blocks[0].sourceSSRC);
+
+  EXPECT_EQ(0u, report_blocks[0].cumulativeLost);
+  EXPECT_LT(0u, report_blocks[0].delaySinceLastSR);
+  EXPECT_EQ(test_sequence_number, report_blocks[0].extendedHighSeqNum);
+  EXPECT_EQ(0u, report_blocks[0].fractionLost);
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc
new file mode 100644
index 0000000..7a06ef7
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+class RtpRtcpVideoTest : public ::testing::Test {
+ protected:
+  RtpRtcpVideoTest() {
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpVideoTest() {}
+
+  virtual void SetUp() {
+    video_module = RtpRtcp::CreateRtpRtcp(test_id, false, &fake_clock);
+    EXPECT_EQ(0, video_module->InitReceiver());
+    EXPECT_EQ(0, video_module->InitSender());
+    EXPECT_EQ(0, video_module->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, video_module->SetSSRC(test_ssrc));
+    EXPECT_EQ(0, video_module->SetNACKStatus(kNackRtcp));
+    EXPECT_EQ(0, video_module->SetStorePacketsStatus(true));
+    EXPECT_EQ(0, video_module->SetSendingStatus(true));
+
+    transport = new LoopBackTransport(video_module);
+    EXPECT_EQ(0, video_module->RegisterSendTransport(transport));
+
+    receiver = new RtpReceiver();
+    EXPECT_EQ(0, video_module->RegisterIncomingDataCallback(receiver));
+
+    VideoCodec video_codec;
+    memset(&video_codec, 0, sizeof(video_codec));
+    video_codec.plType = 123;
+    memcpy(video_codec.plName, "I420", 5);
+
+    EXPECT_EQ(0, video_module->RegisterSendPayload(video_codec));
+    EXPECT_EQ(0, video_module->RegisterReceivePayload(video_codec));
+
+    payload_data_length = sizeof(payload_data);
+
+    for (int n = 0; n < payload_data_length; n++) {
+      payload_data[n] = n%10;
+    }
+  }
+
+  virtual void TearDown() {
+    RtpRtcp::DestroyRtpRtcp(video_module);
+    delete transport;
+    delete receiver;
+  }
+
+  int test_id;
+  RtpRtcp* video_module;
+  LoopBackTransport* transport;
+  RtpReceiver* receiver;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord8  payload_data[65000];
+  int payload_data_length;
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpVideoTest, BasicVideo) {
+  WebRtc_UWord32 timestamp = 3000;
+  EXPECT_EQ(0, video_module->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
+                                             timestamp,
+                                             payload_data,
+                                             payload_data_length));
+
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.cc b/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.cc
new file mode 100644
index 0000000..867c502
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.cc
@@ -0,0 +1,492 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Test application for core FEC algorithm. Calls encoding and decoding
+ * functions in ForwardErrorCorrection directly.
+ */
+
+#include <cassert>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <ctime>
+#include <list>
+
+#include "forward_error_correction.h"
+#include "forward_error_correction_internal.h"
+
+#include "rtp_utility.h"
+#include "testsupport/fileutils.h"
+
+//#define VERBOSE_OUTPUT
+
+using namespace webrtc;
+
+void ReceivePackets(
+    ForwardErrorCorrection::ReceivedPacketList* toDecodeList,
+    ForwardErrorCorrection::ReceivedPacketList* receivedPacketList,
+    WebRtc_UWord32 numPacketsToDecode, float reorderRate, float duplicateRate);
+
+int main() {
+  enum { kMaxNumberMediaPackets = 48 };
+  enum { kMaxNumberFecPackets = 48 };
+
+  const WebRtc_UWord32 kNumMaskBytesL0 = 2;
+  const WebRtc_UWord32 kNumMaskBytesL1 = 6;
+
+  // FOR UEP
+  const bool kUseUnequalProtection = true;
+
+  WebRtc_UWord32 id = 0;
+  ForwardErrorCorrection fec(id);
+
+  ForwardErrorCorrection::PacketList mediaPacketList;
+  ForwardErrorCorrection::PacketList fecPacketList;
+  ForwardErrorCorrection::ReceivedPacketList toDecodeList;
+  ForwardErrorCorrection::ReceivedPacketList receivedPacketList;
+  ForwardErrorCorrection::RecoveredPacketList recoveredPacketList;
+  std::list<WebRtc_UWord8*> fecMaskList;
+
+  ForwardErrorCorrection::Packet* mediaPacket;
+  // Running over only one loss rate to limit execution time.
+  const float lossRate[] = {0.5f};
+  const WebRtc_UWord32 lossRateSize = sizeof(lossRate)/sizeof(*lossRate);
+  const float reorderRate = 0.1f;
+  const float duplicateRate = 0.1f;
+
+  WebRtc_UWord8 mediaLossMask[kMaxNumberMediaPackets];
+  WebRtc_UWord8 fecLossMask[kMaxNumberFecPackets];
+  WebRtc_UWord8 fecPacketMasks[kMaxNumberFecPackets][kMaxNumberMediaPackets];
+
+  // Seed the random number generator, storing the seed to file in order to
+  // reproduce past results.
+  const unsigned int randomSeed = static_cast<unsigned int>(time(NULL));
+  srand(randomSeed);
+  std::string filename = webrtc::test::OutputPath() + "randomSeedLog.txt";
+  FILE* randomSeedFile = fopen(filename.c_str(), "a");
+  fprintf(randomSeedFile, "%u\n", randomSeed);
+  fclose(randomSeedFile);
+  randomSeedFile = NULL;
+
+  WebRtc_UWord16 seqNum = static_cast<WebRtc_UWord16>(rand());
+  WebRtc_UWord32 timeStamp = static_cast<WebRtc_UWord32>(rand());
+  const WebRtc_UWord32 ssrc = static_cast<WebRtc_UWord32>(rand());
+
+  for (WebRtc_UWord32 lossRateIdx = 0; lossRateIdx < lossRateSize;
+      lossRateIdx++) {
+    WebRtc_UWord8* packetMask =
+        new WebRtc_UWord8[kMaxNumberMediaPackets * kNumMaskBytesL1];
+
+    printf("Loss rate: %.2f\n", lossRate[lossRateIdx]);
+    for (WebRtc_UWord32 numMediaPackets = 1;
+        numMediaPackets <= kMaxNumberMediaPackets;
+        numMediaPackets++) {
+
+      for (WebRtc_UWord32 numFecPackets = 1;
+          numFecPackets <= numMediaPackets &&
+          numFecPackets <= kMaxNumberFecPackets;
+          numFecPackets++) {
+
+        // Loop over numImpPackets: these are usually <= (0.3*numMediaPackets).
+        // For this test we check up to ~ (0.5*numMediaPackets).
+        WebRtc_UWord32 maxNumImpPackets = numMediaPackets / 2 + 1;
+        for (WebRtc_UWord32 numImpPackets = 0;
+            numImpPackets <= maxNumImpPackets &&
+            numImpPackets <= kMaxNumberMediaPackets;
+            numImpPackets++) {
+
+          WebRtc_UWord8 protectionFactor = static_cast<WebRtc_UWord8>
+              (numFecPackets * 255 / numMediaPackets);
+
+          const WebRtc_UWord32 maskBytesPerFecPacket =
+              (numMediaPackets > 16) ? kNumMaskBytesL1 : kNumMaskBytesL0;
+
+          memset(packetMask, 0, numMediaPackets * maskBytesPerFecPacket);
+
+          // Transfer packet masks from bit-mask to byte-mask.
+          internal::GeneratePacketMasks(numMediaPackets,
+                                                numFecPackets,
+                                                numImpPackets,
+                                                kUseUnequalProtection,
+                                                packetMask);
+
+#ifdef VERBOSE_OUTPUT
+          printf("%u media packets, %u FEC packets, %u numImpPackets, "
+              "loss rate = %.2f \n",
+              numMediaPackets, numFecPackets, numImpPackets, lossRate[lossRateIdx]);
+          printf("Packet mask matrix \n");
+#endif
+
+          for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+            for (WebRtc_UWord32 j = 0; j < numMediaPackets; j++) {
+              const WebRtc_UWord8 byteMask =
+                  packetMask[i * maskBytesPerFecPacket + j / 8];
+              const WebRtc_UWord32 bitPosition = (7 - j % 8);
+              fecPacketMasks[i][j] =
+                  (byteMask & (1 << bitPosition)) >> bitPosition;
+#ifdef VERBOSE_OUTPUT
+              printf("%u ", fecPacketMasks[i][j]);
+#endif
+            }
+#ifdef VERBOSE_OUTPUT
+            printf("\n");
+#endif
+          }
+#ifdef VERBOSE_OUTPUT
+          printf("\n");
+#endif
+          // Check for all zero rows or columns: indicates incorrect mask
+          WebRtc_UWord32 rowLimit = numMediaPackets;
+          for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+            WebRtc_UWord32 rowSum = 0;
+            for (WebRtc_UWord32 j = 0; j < rowLimit; j++) {
+              rowSum += fecPacketMasks[i][j];
+            }
+            if (rowSum == 0) {
+              printf("ERROR: row is all zero %d \n",i);
+              return -1;
+            }
+          }
+          for (WebRtc_UWord32 j = 0; j < rowLimit; j++) {
+            WebRtc_UWord32 columnSum = 0;
+            for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+              columnSum += fecPacketMasks[i][j];
+            }
+            if (columnSum == 0) {
+              printf("ERROR: column is all zero %d \n",j);
+              return -1;
+            }
+          }
+          // Construct media packets.
+          for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++)  {
+            mediaPacket = new ForwardErrorCorrection::Packet;
+            mediaPacketList.push_back(mediaPacket);
+            mediaPacket->length =
+                static_cast<WebRtc_UWord16>((static_cast<float>(rand()) /
+                    RAND_MAX) * (IP_PACKET_SIZE - 12 - 28 -
+                        ForwardErrorCorrection::PacketOverhead()));
+            if (mediaPacket->length < 12) {
+              mediaPacket->length = 12;
+            }
+            // Generate random values for the first 2 bytes
+            mediaPacket->data[0] = static_cast<WebRtc_UWord8>(rand() % 256);
+            mediaPacket->data[1] = static_cast<WebRtc_UWord8>(rand() % 256);
+
+            // The first two bits are assumed to be 10 by the
+            // FEC encoder. In fact the FEC decoder will set the
+            // two first bits to 10 regardless of what they
+            // actually were. Set the first two bits to 10
+            // so that a memcmp can be performed for the
+            // whole restored packet.
+            mediaPacket->data[0] |= 0x80;
+            mediaPacket->data[0] &= 0xbf;
+
+            // FEC is applied to a whole frame.
+            // A frame is signaled by multiple packets without
+            // the marker bit set followed by the last packet of
+            // the frame for which the marker bit is set.
+            // Only push one (fake) frame to the FEC.
+            mediaPacket->data[1] &= 0x7f;
+
+            ModuleRTPUtility::AssignUWord16ToBuffer(&mediaPacket->data[2],
+                                                    seqNum);
+            ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[4],
+                                                    timeStamp);
+            ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[8],
+                                                    ssrc);
+            // Generate random values for payload
+            for (WebRtc_Word32 j = 12; j < mediaPacket->length; j++)  {
+              mediaPacket->data[j] =
+                  static_cast<WebRtc_UWord8> (rand() % 256);
+            }
+            seqNum++;
+          }
+          mediaPacket->data[1] |= 0x80;
+
+          if (fec.GenerateFEC(mediaPacketList, protectionFactor,
+                              numImpPackets, kUseUnequalProtection,
+                              &fecPacketList) != 0) {
+            printf("Error: GenerateFEC() failed\n");
+            return -1;
+          }
+
+          if (fecPacketList.size() != numFecPackets) {
+            printf("Error: we requested %u FEC packets, "
+                "but GenerateFEC() produced %u\n",
+                numFecPackets, 
+                static_cast<WebRtc_UWord32>(fecPacketList.size()));
+            return -1;
+          }
+          memset(mediaLossMask, 0, sizeof(mediaLossMask));
+          ForwardErrorCorrection::PacketList::iterator
+              mediaPacketListItem = mediaPacketList.begin();
+          ForwardErrorCorrection::ReceivedPacket* receivedPacket;
+          WebRtc_UWord32 mediaPacketIdx = 0;
+
+          while (mediaPacketListItem != mediaPacketList.end()) {
+            mediaPacket = *mediaPacketListItem;
+            const float lossRandomVariable = (static_cast<float>(rand()) /
+                (RAND_MAX));
+
+            if (lossRandomVariable >= lossRate[lossRateIdx])
+            {
+              mediaLossMask[mediaPacketIdx] = 1;
+              receivedPacket =
+                  new ForwardErrorCorrection::ReceivedPacket;
+              receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+              receivedPacketList.push_back(receivedPacket);
+
+              receivedPacket->pkt->length = mediaPacket->length;
+              memcpy(receivedPacket->pkt->data, mediaPacket->data,
+                     mediaPacket->length);
+              receivedPacket->seqNum =
+                  ModuleRTPUtility::BufferToUWord16(&mediaPacket->data[2]);
+              receivedPacket->isFec = false;
+            }
+            mediaPacketIdx++;
+            ++mediaPacketListItem;
+          }
+          memset(fecLossMask, 0, sizeof(fecLossMask));
+          ForwardErrorCorrection::PacketList::iterator
+              fecPacketListItem = fecPacketList.begin();
+          ForwardErrorCorrection::Packet* fecPacket;
+          WebRtc_UWord32 fecPacketIdx = 0;
+          while (fecPacketListItem != fecPacketList.end()) {
+            fecPacket = *fecPacketListItem;
+            const float lossRandomVariable =
+                (static_cast<float>(rand()) / (RAND_MAX));
+            if (lossRandomVariable >= lossRate[lossRateIdx]) {
+              fecLossMask[fecPacketIdx] = 1;
+              receivedPacket =
+                  new ForwardErrorCorrection::ReceivedPacket;
+              receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+              receivedPacketList.push_back(receivedPacket);
+
+              receivedPacket->pkt->length = fecPacket->length;
+              memcpy(receivedPacket->pkt->data, fecPacket->data,
+                     fecPacket->length);
+
+              receivedPacket->seqNum = seqNum;
+              receivedPacket->isFec = true;
+              receivedPacket->ssrc = ssrc;
+
+              fecMaskList.push_back(fecPacketMasks[fecPacketIdx]);
+            }
+            fecPacketIdx++;
+            seqNum++;
+            ++fecPacketListItem;
+          }
+
+#ifdef VERBOSE_OUTPUT
+          printf("Media loss mask:\n");
+          for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++) {
+            printf("%u ", mediaLossMask[i]);
+          }
+          printf("\n\n");
+
+          printf("FEC loss mask:\n");
+          for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+            printf("%u ", fecLossMask[i]);
+          }
+          printf("\n\n");
+#endif
+
+          std::list<WebRtc_UWord8*>::iterator fecMaskIt = fecMaskList.begin();
+          WebRtc_UWord8* fecMask;
+          while (fecMaskIt != fecMaskList.end()) {
+            fecMask = *fecMaskIt;
+            WebRtc_UWord32 hammingDist = 0;
+            WebRtc_UWord32 recoveryPosition = 0;
+            for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++) {
+              if (mediaLossMask[i] == 0 && fecMask[i] == 1) {
+                recoveryPosition = i;
+                hammingDist++;
+              }
+            }
+            std::list<WebRtc_UWord8*>::iterator itemToDelete = fecMaskIt;
+            ++fecMaskIt;
+
+            if (hammingDist == 1) {
+              // Recovery possible. Restart search.
+              mediaLossMask[recoveryPosition] = 1;
+              fecMaskIt = fecMaskList.begin();
+            } else if (hammingDist == 0)  {
+              // FEC packet cannot provide further recovery.
+              fecMaskList.erase(itemToDelete);
+            }
+          }
+#ifdef VERBOSE_OUTPUT
+          printf("Recovery mask:\n");
+          for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++) {
+            printf("%u ", mediaLossMask[i]);
+          }
+          printf("\n\n");
+#endif
+          bool fecPacketReceived = false; // For error-checking frame completion.
+          while (!receivedPacketList.empty()) {
+            WebRtc_UWord32 numPacketsToDecode = static_cast<WebRtc_UWord32>
+                ((static_cast<float>(rand()) / RAND_MAX) *
+                receivedPacketList.size() + 0.5);
+            if (numPacketsToDecode < 1) {
+              numPacketsToDecode = 1;
+            }
+            ReceivePackets(&toDecodeList, &receivedPacketList,
+                           numPacketsToDecode, reorderRate, duplicateRate);
+
+            if (fecPacketReceived == false) {
+              ForwardErrorCorrection::ReceivedPacketList::iterator
+                  toDecodeIt = toDecodeList.begin();
+              while (toDecodeIt != toDecodeList.end()) {
+                receivedPacket = *toDecodeIt;
+                if (receivedPacket->isFec) {
+                  fecPacketReceived = true;
+                }
+                ++toDecodeIt;
+              }
+            }
+            if (fec.DecodeFEC(&toDecodeList, &recoveredPacketList)
+                != 0) {
+              printf("Error: DecodeFEC() failed\n");
+              return -1;
+            }
+            if (!toDecodeList.empty()) {
+              printf("Error: received packet list is not empty\n");
+              return -1;
+            }
+          }
+          mediaPacketListItem = mediaPacketList.begin();
+          mediaPacketIdx = 0;
+          while (mediaPacketListItem != mediaPacketList.end()) {
+            if (mediaLossMask[mediaPacketIdx] == 1) {
+              // Should have recovered this packet.
+              ForwardErrorCorrection::RecoveredPacketList::iterator
+                  recoveredPacketListItem = recoveredPacketList.begin();
+
+              if (recoveredPacketListItem == recoveredPacketList.end()) {
+                printf("Error: insufficient number of recovered packets.\n");
+                return -1;
+              }
+              mediaPacket = *mediaPacketListItem;
+              ForwardErrorCorrection::RecoveredPacket* recoveredPacket =
+                  *recoveredPacketListItem;
+
+              if (recoveredPacket->pkt->length != mediaPacket->length) {
+                printf("Error: recovered packet length not identical to "
+                    "original media packet\n");
+                return -1;
+              }
+              if (memcmp(recoveredPacket->pkt->data, mediaPacket->data,
+                         mediaPacket->length) != 0) {
+                printf("Error: recovered packet payload not identical to "
+                    "original media packet\n");
+                return -1;
+              }
+              delete recoveredPacket;
+              recoveredPacketList.pop_front();
+            }
+            mediaPacketIdx++;
+            ++mediaPacketListItem;
+          }
+          fec.ResetState(&recoveredPacketList);
+          if (!recoveredPacketList.empty()) {
+            printf("Error: excessive number of recovered packets.\n");
+            printf("\t size is:%u\n",
+                static_cast<WebRtc_UWord32>(recoveredPacketList.size()));
+            return -1;
+          }
+          // -- Teardown --
+          mediaPacketListItem = mediaPacketList.begin();
+          while (mediaPacketListItem != mediaPacketList.end()) {
+            delete *mediaPacketListItem;
+            ++mediaPacketListItem;
+            mediaPacketList.pop_front();
+          }
+          assert(mediaPacketList.empty());
+
+          fecPacketListItem = fecPacketList.begin();
+          while (fecPacketListItem != fecPacketList.end()) {
+            ++fecPacketListItem;
+            fecPacketList.pop_front();
+          }
+
+          // Delete received packets we didn't pass to DecodeFEC(), due to early
+          // frame completion.
+          ForwardErrorCorrection::ReceivedPacketList::iterator
+              receivedPacketIt = receivedPacketList.begin();
+          while (receivedPacketIt != receivedPacketList.end()) {
+            receivedPacket = *receivedPacketIt;
+            delete receivedPacket;
+            ++receivedPacketIt;
+            receivedPacketList.pop_front();
+          }
+          assert(receivedPacketList.empty());
+
+          while (!fecMaskList.empty()) {
+            fecMaskList.pop_front();
+          }
+          timeStamp += 90000 / 30;
+        } //loop over numImpPackets
+      } //loop over FecPackets
+    } //loop over numMediaPackets
+    delete [] packetMask;
+  } // loop over loss rates
+
+  // Have DecodeFEC free allocated memory.
+  fec.ResetState(&recoveredPacketList);
+  if (!recoveredPacketList.empty()) {
+    printf("Error: recovered packet list is not empty\n");
+    return -1;
+  }
+  printf("\nAll tests passed successfully\n");
+  return 0;
+}
+
+void ReceivePackets(
+    ForwardErrorCorrection::ReceivedPacketList* toDecodeList,
+    ForwardErrorCorrection::ReceivedPacketList* receivedPacketList,
+    WebRtc_UWord32 numPacketsToDecode, float reorderRate, float duplicateRate) {
+  assert(toDecodeList->empty());
+  assert(numPacketsToDecode <= receivedPacketList->size());
+
+  ForwardErrorCorrection::ReceivedPacketList::iterator it;
+  for (WebRtc_UWord32 i = 0; i < numPacketsToDecode; i++) {
+    it = receivedPacketList->begin();
+    // Reorder packets.
+    float randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    while (randomVariable < reorderRate) {
+      ++it;
+      if (it == receivedPacketList->end()) {
+        --it;
+        break;
+      }
+      randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    }
+    ForwardErrorCorrection::ReceivedPacket* receivedPacket = *it;
+    toDecodeList->push_back(receivedPacket);
+
+    // Duplicate packets.
+    randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    while (randomVariable < duplicateRate) {
+      ForwardErrorCorrection::ReceivedPacket* duplicatePacket =
+          new ForwardErrorCorrection::ReceivedPacket;
+      *duplicatePacket = *receivedPacket;
+      duplicatePacket->pkt = new ForwardErrorCorrection::Packet;
+      memcpy(duplicatePacket->pkt->data, receivedPacket->pkt->data,
+             receivedPacket->pkt->length);
+      duplicatePacket->pkt->length = receivedPacket->pkt->length;
+
+      toDecodeList->push_back(duplicatePacket);
+      randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    }
+    receivedPacketList->erase(it);
+  }
+}
diff --git a/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.gypi b/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.gypi
new file mode 100644
index 0000000..b7f933b
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testFec/test_fec.gypi
@@ -0,0 +1,36 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_fec',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+      ],
+      
+      'include_dirs': [
+        '../../source',
+        '../../../../system_wrappers/interface',
+      ],
+   
+      'sources': [
+        'test_fec.cc',
+      ],
+      
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc b/trunk/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc
new file mode 100644
index 0000000..e4c36d1
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc
@@ -0,0 +1,580 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "h263_information.h"
+
+#define TEST_STR "Test H263 parser."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+void PrintInfo(const H263Info* ptr)
+{
+    std::cout << "info =    GOBs: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << ptr->ptrGOBbuffer[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+    std::cout << "          sBit: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << (int)ptr->ptrGOBbufferSBit[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+
+    std::cout << "uiH263PTypeFmt: " << (int)ptr->uiH263PTypeFmt << std::endl;
+    std::cout << "     codecBits: " << (int)ptr->codecBits << std::endl;
+    std::cout << "        pQuant: " << (int)ptr->pQuant << std::endl;
+    std::cout << "         fType: " << (int)ptr->fType << std::endl;
+    std::cout << "        cpmBit: " << (int)ptr->cpmBit << std::endl;
+    std::cout << "     numOfGOBs: " << (int)ptr->numOfGOBs << std::endl;
+    std::cout << "      numOfMBs: " << (int)ptr->totalNumOfMBs << std::endl;
+    std::cout << "                " <<  std::endl;
+};
+
+void PrintMBInfo(
+    const H263Info* ptr,
+    const H263MBInfo* ptrMB)
+{
+    std::cout << "        gQuant: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << (int)ptr->ptrGQuant[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+    std::cout << "                " <<  std::endl;
+
+    std::cout << "MBs:{";
+    int k = 0;
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << " {";
+        for (int j = 0; j < ptr->ptrNumOfMBs[i]; j++)
+        {
+            std::cout << ptrMB->ptrBuffer[k++] << " ";
+        }
+        std::cout << "}" << std::endl;
+    }
+    std::cout << "}" << std::endl;
+    PRINT_LINE;
+};
+
+void ValidateResults(
+    const H263Info* ptr,
+    const H263Info* ptrRef)
+{
+    assert(ptr->uiH263PTypeFmt == ptrRef->uiH263PTypeFmt);
+    assert(ptr->codecBits      == ptrRef->codecBits);
+    assert(ptr->pQuant         == ptrRef->pQuant);
+    assert(ptr->fType          == ptrRef->fType);
+    assert(ptr->numOfGOBs      == ptrRef->numOfGOBs);
+    assert(ptr->totalNumOfMBs  == ptrRef->totalNumOfMBs);
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        assert(ptr->ptrGOBbuffer[i]     == ptrRef->ptrGOBbuffer[i]);
+        assert(ptr->ptrGOBbufferSBit[i] == ptrRef->ptrGOBbufferSBit[i]);
+    }
+    PrintInfo(ptr);
+}
+
+void ValidateMBResults(
+    const H263Info* ptr,
+    const H263MBInfo* ptrMB,
+    const H263Info* ptrRef,
+    bool  printRes = true)
+{
+    int offset = 0;
+    int numBytes = 0;
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        offset = ptr->CalculateMBOffset(i+1);
+        numBytes += ptrMB->ptrBuffer[offset - 1] / 8;
+        int numBytesRem = ptrMB->ptrBuffer[offset - 1] % 8;
+        if (numBytesRem)
+        {
+            numBytes++;
+        }
+        assert(ptr->ptrGQuant[i] == ptrRef->ptrGQuant[i]);
+    }
+    assert(ptr->ptrGOBbuffer[ptr->numOfGOBs] == numBytes);
+    assert(unsigned int( ptr->totalNumOfMBs) <= ptrMB->bufferSize);
+    if (printRes)
+    {
+        PrintMBInfo(ptr, ptrMB);
+    }
+}
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "--------------------------------" << std::endl;
+    std::cout << "------- Test H.263 Parser ------" << std::endl;
+    std::cout << "--------------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // -----------------------------------------------------
+    // Input data - H.263 encoded stream SQCIF (P-frame)
+    // -----------------------------------------------------
+    const int lengthRefSQCIF = 77;
+    const unsigned char encodedStreamSQCIF[lengthRefSQCIF] = {
+    0x00, 0x00, 0x81, 0xf6, 0x06, 0x04, 0x3f, 0xb7, 0xbc, 0x00, 0x00, 0x86, 0x23,
+    0x5b, 0xdb, 0xdf, 0xb1, 0x93, 0xdb, 0xde, 0xd6, 0xf0, 0x00, 0x00, 0x8a, 0x27,
+    0xdb, 0xcf, 0xad, 0xbe, 0x00, 0x00, 0x8e, 0x27, 0xed, 0xef, 0x80, 0x00, 0x00,
+    0x92, 0x27, 0x6f, 0x7f, 0x80, 0x00, 0x00, 0x96, 0x20, 0xfc, 0xe2, 0xdb, 0xfe,
+    0xb7, 0x7d, 0xea, 0x5f, 0xf8, 0xab, 0xd2, 0xff, 0xf6, 0xc9, 0xe5, 0x5e, 0x97,
+    0xf7, 0xff, 0xad, 0x4f, 0x49, 0x3b, 0xff, 0xd6, 0xa6, 0x75, 0x82, 0x60};
+
+    // Expected results
+    H263Info infoRefSQCIF;
+    infoRefSQCIF.uiH263PTypeFmt = 1;
+    infoRefSQCIF.codecBits      = 8;
+    infoRefSQCIF.pQuant         = 4;
+    infoRefSQCIF.fType          = 1;
+    infoRefSQCIF.cpmBit         = 0;
+    infoRefSQCIF.numOfGOBs      = 6;
+    infoRefSQCIF.totalNumOfMBs  = 8*6;
+
+    infoRefSQCIF.ptrGOBbuffer[0] = 0;  infoRefSQCIF.ptrGOBbufferSBit[0] = 0; infoRefSQCIF.ptrGQuant[0] = 0;
+    infoRefSQCIF.ptrGOBbuffer[1] = 9;  infoRefSQCIF.ptrGOBbufferSBit[1] = 0; infoRefSQCIF.ptrGQuant[1] = 4;
+    infoRefSQCIF.ptrGOBbuffer[2] = 22; infoRefSQCIF.ptrGOBbufferSBit[2] = 0; infoRefSQCIF.ptrGQuant[2] = 4;
+    infoRefSQCIF.ptrGOBbuffer[3] = 30; infoRefSQCIF.ptrGOBbufferSBit[3] = 0; infoRefSQCIF.ptrGQuant[3] = 4;
+    infoRefSQCIF.ptrGOBbuffer[4] = 37; infoRefSQCIF.ptrGOBbufferSBit[4] = 0; infoRefSQCIF.ptrGQuant[4] = 4;
+    infoRefSQCIF.ptrGOBbuffer[5] = 44; infoRefSQCIF.ptrGOBbufferSBit[5] = 0; infoRefSQCIF.ptrGQuant[5] = 4;
+
+    // ----------------------------------------------------
+    // Input data - H.263 encoded stream QCIF (P-frame)
+    // ----------------------------------------------------
+    const int lengthRefQCIF = 123;
+    const unsigned char encodedStreamQCIF[lengthRefQCIF] = {
+    0x00, 0x00, 0x81, 0x02, 0x0a, 0x04, 0x3f, 0xf8, 0x00, 0x00, 0x86, 0x27, 0x8b,
+    0xc6, 0x9f, 0x17, 0x9c, 0x00, 0x00, 0x8a, 0x20, 0xbc, 0x22, 0xf8, 0x5f, 0x46,
+    0x03, 0xc1, 0x77, 0x15, 0xe0, 0xb8, 0x38, 0x3f, 0x05, 0xa0, 0xbf, 0x8f, 0x00,
+    0x00, 0x8e, 0x27, 0xfc, 0x5e, 0x5a, 0x33, 0x80, 0x00, 0x00, 0x92, 0x25, 0x8c,
+    0x1e, 0xbf, 0xfc, 0x7e, 0x35, 0xfc, 0x00, 0x00, 0x96, 0x27, 0xff, 0x00, 0x00,
+    0x9a, 0x20, 0xdb, 0x34, 0xef, 0xfc, 0x00, 0x00, 0x9e, 0x20, 0xaf, 0x17, 0x0d,
+    0x3e, 0xde, 0x0f, 0x8f, 0xff, 0x80, 0x00, 0x00, 0xa2, 0x22, 0xbb, 0x27, 0x81,
+    0xeb, 0xff, 0x5b, 0x07, 0xab, 0xff, 0xad, 0x9e, 0xd8, 0xc9, 0x6b, 0x75, 0x54,
+    0xbf, 0xbe, 0x8a, 0xbd, 0xf2, 0xfb, 0xfb, 0x3d, 0x3d, 0x25, 0xb7, 0xf7, 0xfc,
+    0x92, 0x4c, 0xdb, 0x6d, 0x69, 0xc0};
+
+    // Expected results
+    H263Info infoRefQCIF;
+    infoRefQCIF.uiH263PTypeFmt = 2;
+    infoRefQCIF.codecBits      = 8;
+    infoRefQCIF.pQuant         = 4;
+    infoRefQCIF.fType          = 1;
+    infoRefQCIF.cpmBit         = 0;
+    infoRefQCIF.numOfGOBs      = 9;
+    infoRefQCIF.totalNumOfMBs  = 11*9;
+
+    infoRefQCIF.ptrGOBbuffer[0] = 0;   infoRefQCIF.ptrGOBbufferSBit[0] = 0; infoRefQCIF.ptrGQuant[0] = 0;
+    infoRefQCIF.ptrGOBbuffer[1] = 8;   infoRefQCIF.ptrGOBbufferSBit[1] = 0; infoRefQCIF.ptrGQuant[1] = 4;
+    infoRefQCIF.ptrGOBbuffer[2] = 17;  infoRefQCIF.ptrGOBbufferSBit[2] = 0; infoRefQCIF.ptrGQuant[2] = 4;
+    infoRefQCIF.ptrGOBbuffer[3] = 38;  infoRefQCIF.ptrGOBbufferSBit[3] = 0; infoRefQCIF.ptrGQuant[3] = 4;
+    infoRefQCIF.ptrGOBbuffer[4] = 47;  infoRefQCIF.ptrGOBbufferSBit[4] = 0; infoRefQCIF.ptrGQuant[4] = 4;
+    infoRefQCIF.ptrGOBbuffer[5] = 58;  infoRefQCIF.ptrGOBbufferSBit[5] = 0; infoRefQCIF.ptrGQuant[5] = 4;
+    infoRefQCIF.ptrGOBbuffer[6] = 63;  infoRefQCIF.ptrGOBbufferSBit[6] = 0; infoRefQCIF.ptrGQuant[6] = 4;
+    infoRefQCIF.ptrGOBbuffer[7] = 71;  infoRefQCIF.ptrGOBbufferSBit[7] = 0; infoRefQCIF.ptrGQuant[7] = 4;
+    infoRefQCIF.ptrGOBbuffer[8] = 84;  infoRefQCIF.ptrGOBbufferSBit[8] = 0; infoRefQCIF.ptrGQuant[8] = 4;
+
+    // ---------------------------------------------------
+    // Input data - H.263 encoded stream CIF (P-frame)
+    // ---------------------------------------------------
+    const int lengthRefCIF = 212;
+    const unsigned char encodedStreamCIF[lengthRefCIF] = {
+    0x00, 0x00, 0x82, 0x9a, 0x0e, 0x04, 0x3f, 0xff, 0xff, 0x00, 0x00, 0x86, 0x27,
+    0xff, 0xff, 0xe0, 0x00, 0x00, 0x8a, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0x8e,
+    0x27, 0xff, 0xff, 0x6b, 0x09, 0x70, 0x00, 0x00, 0x92, 0x27, 0xff, 0xff, 0xe0,
+    0x00, 0x00, 0x96, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0x9a, 0x27, 0x6f, 0x7f,
+    0xff, 0xfe, 0x00, 0x00, 0x9e, 0x27, 0xff, 0xfe, 0xc6, 0x31, 0xe0, 0x00, 0x00,
+    0xa2, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0xa6, 0x27, 0xf6, 0xb7, 0xfe, 0xa6,
+    0x14, 0x95, 0xb4, 0xc6, 0x41, 0x6b, 0x3a, 0x2e, 0x8d, 0x42, 0xef, 0xc0, 0x00,
+    0x00, 0xaa, 0x27, 0xff, 0xb1, 0x95, 0x05, 0x0c, 0xe3, 0x4a, 0x17, 0xff, 0x80,
+    0x00, 0x00, 0xae, 0x27, 0xff, 0xf6, 0xf7, 0xfe, 0x00, 0x00, 0xb2, 0x27, 0xff,
+    0x8b, 0xdf, 0xff, 0x00, 0x00, 0xb6, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0xba,
+    0x26, 0x2f, 0x7f, 0xff, 0xfb, 0x58, 0x5b, 0x80, 0x00, 0x00, 0xbe, 0x20, 0xbc,
+    0xe4, 0x5e, 0x6f, 0xff, 0xfe, 0xce, 0xf1, 0x94, 0x00, 0x00, 0xc2, 0x23, 0x18,
+    0x4b, 0x17, 0x87, 0x0f, 0xff, 0xb3, 0xb6, 0x09, 0x63, 0x46, 0x73, 0x40, 0xb2,
+    0x5f, 0x0a, 0xc6, 0xbe, 0xd7, 0x7a, 0x61, 0xbc, 0x68, 0xde, 0xf0, 0x00, 0x00,
+    0xc6, 0x26, 0x2f, 0x0a, 0xff, 0xff, 0x17, 0x9a, 0x17, 0x8c, 0xb1, 0x37, 0x67,
+    0xaf, 0xec, 0xf7, 0xa5, 0xbf, 0xb6, 0x49, 0x27, 0x6d, 0xb7, 0x92, 0x49, 0x1b,
+    0xb4, 0x9a, 0xe0, 0x62};
+
+    // Expected results
+    H263Info infoRefCIF;
+    infoRefCIF.uiH263PTypeFmt = 3;
+    infoRefCIF.codecBits      = 8;
+    infoRefCIF.pQuant         = 4;
+    infoRefCIF.fType          = 1;
+    infoRefCIF.cpmBit         = 0;
+    infoRefCIF.numOfGOBs      = 18;
+    infoRefCIF.totalNumOfMBs  = 22*18;
+
+    infoRefCIF.ptrGOBbuffer[0]  = 0;   infoRefCIF.ptrGOBbufferSBit[0]  = 0; infoRefCIF.ptrGQuant[0]  = 0;
+    infoRefCIF.ptrGOBbuffer[1]  = 9;   infoRefCIF.ptrGOBbufferSBit[1]  = 0; infoRefCIF.ptrGQuant[1]  = 4;
+    infoRefCIF.ptrGOBbuffer[2]  = 16;  infoRefCIF.ptrGOBbufferSBit[2]  = 0; infoRefCIF.ptrGQuant[2]  = 4;
+    infoRefCIF.ptrGOBbuffer[3]  = 23;  infoRefCIF.ptrGOBbufferSBit[3]  = 0; infoRefCIF.ptrGQuant[3]  = 4;
+    infoRefCIF.ptrGOBbuffer[4]  = 32;  infoRefCIF.ptrGOBbufferSBit[4]  = 0; infoRefCIF.ptrGQuant[4]  = 4;
+    infoRefCIF.ptrGOBbuffer[5]  = 39;  infoRefCIF.ptrGOBbufferSBit[5]  = 0; infoRefCIF.ptrGQuant[5]  = 4;
+    infoRefCIF.ptrGOBbuffer[6]  = 46;  infoRefCIF.ptrGOBbufferSBit[6]  = 0; infoRefCIF.ptrGQuant[6]  = 4;
+    infoRefCIF.ptrGOBbuffer[7]  = 54;  infoRefCIF.ptrGOBbufferSBit[7]  = 0; infoRefCIF.ptrGQuant[7]  = 4;
+    infoRefCIF.ptrGOBbuffer[8]  = 63;  infoRefCIF.ptrGOBbufferSBit[8]  = 0; infoRefCIF.ptrGQuant[8]  = 4;
+    infoRefCIF.ptrGOBbuffer[9]  = 70;  infoRefCIF.ptrGOBbufferSBit[9]  = 0; infoRefCIF.ptrGQuant[9]  = 4;
+    infoRefCIF.ptrGOBbuffer[10] = 90;  infoRefCIF.ptrGOBbufferSBit[10]  = 0; infoRefCIF.ptrGQuant[10]  = 4;
+    infoRefCIF.ptrGOBbuffer[11] = 104; infoRefCIF.ptrGOBbufferSBit[11] = 0; infoRefCIF.ptrGQuant[11] = 4;
+    infoRefCIF.ptrGOBbuffer[12] = 112; infoRefCIF.ptrGOBbufferSBit[12] = 0; infoRefCIF.ptrGQuant[12] = 4;
+    infoRefCIF.ptrGOBbuffer[13] = 120; infoRefCIF.ptrGOBbufferSBit[13] = 0; infoRefCIF.ptrGQuant[13] = 4;
+    infoRefCIF.ptrGOBbuffer[14] = 127; infoRefCIF.ptrGOBbufferSBit[14] = 0; infoRefCIF.ptrGQuant[14] = 4;
+    infoRefCIF.ptrGOBbuffer[15] = 138; infoRefCIF.ptrGOBbufferSBit[15] = 0; infoRefCIF.ptrGQuant[15] = 4;
+    infoRefCIF.ptrGOBbuffer[16] = 151; infoRefCIF.ptrGOBbufferSBit[16] = 0; infoRefCIF.ptrGQuant[16] = 4;
+    infoRefCIF.ptrGOBbuffer[17] = 180; infoRefCIF.ptrGOBbufferSBit[17] = 0; infoRefCIF.ptrGQuant[17] = 4;
+
+    // -----------------------------------------------------------------------
+    // Input data - H.263 encoded stream QCIF (I-frame). Non byte aligned GOBs
+    // -----------------------------------------------------------------------
+    const int lengthRefQCIF_N = 2020;
+    const unsigned char encodedStreamQCIF_N[lengthRefQCIF_N] = {
+    0x00,0x00,0x80,0x06,0x08,0x07,0x34,0xe4,0xf7,0x66,0x75,0x12,0x9b,0x64,0x83,0xe9,0x4c,0xc7,0x3c,0x77,0x83,0xcf,0x67,0x96,
+    0xe0,0x06,0x69,0x95,0x70,0x60,0x43,0x53,0x96,0x8a,0xa2,0x9e,0x96,0xf8,0x79,0xf0,0xf2,0xb8,0x30,0x21,0xb0,0x0c,0xc0,0x22,
+    0x0e,0x70,0x12,0xb0,0x30,0x21,0x9e,0x03,0x3e,0x02,0x22,0xa8,0x83,0xcf,0x7d,0xe8,0xf3,0x85,0x8d,0x01,0x47,0x29,0x03,0x02,
+    0x16,0x5c,0x06,0x7c,0x84,0x56,0x9c,0x0c,0x08,0x49,0x70,0x29,0xe1,0x68,0x39,0xbf,0xd0,0xef,0x6f,0x6e,0x8e,0x64,0x72,0x70,
+    0x30,0x21,0x03,0xe0,0x53,0x82,0xab,0xe2,0xd4,0xa8,0x3e,0x12,0x80,0xc8,0x2a,0x9f,0x07,0x23,0xdf,0x4f,0xaa,0x5b,0x72,0xaa,
+    0x22,0x81,0x21,0x7a,0x80,0x54,0x85,0x82,0x50,0x17,0x40,0x2a,0x7f,0x3c,0xfe,0xa5,0x49,0x39,0x08,0x18,0x20,0x65,0x95,0x8c,
+    0x8f,0x67,0xc7,0xc0,0x63,0xe1,0x1b,0xf1,0xef,0xfd,0x25,0x13,0x24,0x9c,0xa9,0x1f,0x02,0x9b,0xe1,0x19,0xae,0x97,0x02,0x9a,
+    0x84,0x6f,0xbf,0xfd,0x14,0xf2,0x09,0xe3,0x10,0x38,0x0a,0x61,0x99,0x6e,0xf8,0x14,0xce,0x7e,0xff,0xe7,0xa7,0x8e,0x4f,0x14,
+    0xa9,0x80,0xa6,0x71,0x3a,0x98,0x06,0x1c,0xfc,0xff,0xcd,0x8f,0x18,0x9e,0x24,0x52,0x05,0x9c,0x42,0xa5,0x33,0x9f,0xdf,0x80,
+    0x00,0x21,0x0e,0xf6,0x61,0xe2,0x7a,0x35,0x64,0x8e,0x61,0xa2,0x58,0x2a,0x0b,0x45,0x4c,0xe0,0xa1,0xa0,0xa6,0x83,0x58,0xf0,
+    0x08,0x77,0x3e,0xb5,0xc6,0x05,0x6b,0x6e,0x69,0x06,0x07,0xe6,0x82,0x9b,0xc8,0x7e,0x2d,0x6d,0x06,0x07,0xe3,0xe0,0xa6,0x2e,
+    0x05,0x58,0x39,0xbf,0xd1,0xc7,0x9e,0x15,0x29,0x39,0x31,0xbc,0x18,0x1f,0x62,0xf5,0x95,0x20,0xa0,0xe6,0xff,0x38,0x83,0x03,
+    0xea,0x3f,0x02,0xc8,0x05,0x71,0x31,0xef,0x79,0x49,0x48,0x73,0x06,0x07,0xd4,0x4b,0x5f,0xc0,0xe4,0x81,0x3b,0x03,0x03,0xea,
+    0x24,0x81,0x90,0x55,0x41,0x64,0x2b,0xea,0xd1,0xcc,0xa5,0x99,0x79,0x06,0x07,0xd8,0x49,0x05,0x39,0x64,0x16,0x3d,0x03,0x03,
+    0xee,0x24,0x83,0x0b,0xf9,0x46,0x50,0x27,0xdf,0xd4,0xa6,0xa6,0x3d,0x83,0x03,0xf0,0x25,0x83,0x0b,0xf8,0x19,0xbe,0x83,0x03,
+    0xf2,0x3f,0x06,0x17,0xf1,0x58,0xd6,0x04,0xff,0xfa,0x94,0xb4,0x87,0xe0,0x43,0x2f,0x06,0x17,0xf1,0x58,0x11,0x7b,0xe0,0x21,
+    0x97,0x82,0x9d,0x58,0x11,0x7b,0xf9,0xf8,0x15,0xa2,0x9f,0x40,0x3e,0xf4,0x07,0xd5,0x81,0x9f,0xa1,0x7b,0xca,0x9a,0xbb,0xdf,
+    0xcf,0xc0,0xf3,0xb0,0x09,0x37,0x00,0x97,0x63,0xee,0x47,0xff,0xff,0xe8,0x68,0xa5,0xc6,0x43,0xdf,0x2f,0x06,0xad,0xad,0x3f,
+    0xef,0xf6,0x30,0x03,0xea,0x80,0xcf,0x80,0x8b,0x8a,0xe2,0xb5,0x40,0xa7,0x2e,0x06,0x27,0xf7,0xc2,0xd6,0x47,0x2f,0xc1,0x3f,
+    0x80,0x00,0x44,0x1f,0xb5,0xbd,0xc5,0xed,0x4f,0x70,0x7c,0x43,0xec,0x8e,0xcf,0x6e,0xcf,0x7b,0x88,0x7d,0x9b,0xfc,0x13,0xfc,
+    0x24,0x3b,0xf4,0xe1,0x61,0xe1,0xe2,0x61,0x5f,0xa7,0x13,0x15,0x17,0x1b,0x08,0xff,0x38,0x98,0x98,0xb8,0xb8,0x2f,0xf9,0xc3,
+    0xc2,0xc5,0x43,0xc0,0xff,0xce,0x09,0xfa,0x16,0x06,0x07,0xfe,0x6f,0x4e,0xcf,0xaf,0x30,0x3f,0xf3,0x70,0x6c,0x74,0x6e,0x81,
+    0xff,0x9b,0x33,0x13,0x53,0x2c,0x0f,0xf8,0x00,0x04,0x61,0xf0,0xd4,0xf7,0x17,0xb6,0x3a,0x3e,0x1d,0xfa,0x6f,0x0f,0xaf,0x4f,
+    0xf0,0xef,0xd3,0x81,0x85,0x84,0x88,0x86,0x7e,0x9c,0x4c,0x64,0x64,0x84,0x2b,0xfc,0xe3,0xa4,0x24,0xa4,0xe0,0xdf,0xe7,0x1f,
+    0x1d,0x27,0x25,0x03,0xff,0x38,0xd8,0xb9,0x19,0x0f,0xff,0xf9,0xc4,0x42,0xc6,0xc4,0xff,0xc0,0xce,0x05,0xf6,0x16,0x05,0xfe,
+    0x06,0x6f,0x0e,0x4f,0x8e,0xef,0xf0,0x59,0x6d,0x68,0x72,0x3e,0xd9,0x1f,0xff,0x02,0x00,0x01,0x20,0x7c,0x3a,0x29,0x4c,0xf7,
+    0xa5,0x34,0x0c,0x3d,0xb5,0xae,0x3a,0x1d,0xf5,0x6f,0xc3,0xb0,0x30,0x33,0x82,0x03,0xa0,0x60,0x66,0xbf,0x01,0xc0,0x04,0x5b,
+    0x07,0x04,0x16,0x10,0xf3,0xf2,0xe1,0x94,0x17,0x81,0x82,0xd1,0x54,0x40,0xf0,0x7e,0xb4,0x2d,0x16,0x2d,0x83,0x82,0x0a,0xb8,
+    0x0e,0x08,0x2c,0x11,0xe7,0xf5,0xc5,0x02,0x10,0x96,0xb7,0x8b,0x45,0x91,0x60,0xc0,0x83,0x89,0x60,0xa8,0x07,0x2b,0xf9,0x70,
+    0x1c,0x10,0x65,0xb0,0x70,0x41,0xa0,0x8f,0x3f,0xae,0x28,0x18,0x10,0x91,0xf8,0x19,0x54,0x81,0xf1,0x20,0xc0,0x84,0x8f,0xc0,
+    0xcf,0x90,0x3d,0x6c,0x1c,0x10,0x55,0xa0,0x70,0x41,0x7f,0xcf,0x7f,0xf8,0x90,0x60,0x43,0x0b,0xc1,0x85,0xfb,0x80,0xc4,0x81,
+    0x8a,0xe1,0x81,0x81,0x0c,0x56,0x0c,0x2f,0xd0,0x31,0x40,0x62,0xb5,0x80,0x70,0x41,0x55,0xc1,0xc1,0x05,0x7e,0xff,0x5c,0x18,
+    0x30,0x21,0xbf,0x06,0x17,0xe8,0x18,0xa0,0x31,0x5b,0xec,0x06,0x00,0x34,0x18,0x6f,0xbf,0xa0,0x0c,0x95,0x01,0xc1,0x04,0x51,
+    0x07,0x04,0x11,0xf4,0xf4,0x0f,0x77,0x06,0x04,0x34,0x18,0x6f,0xc5,0x60,0xaa,0x07,0x34,0x09,0xd0,0x18,0x10,0xef,0x03,0x0b,
+    0xf8,0x5e,0x0a,0xa0,0x73,0x40,0x93,0xc7,0x8e,0x4d,0x51,0xe5,0xdc,0xf9,0x03,0xdc,0x01,0x81,0x0e,0x54,0x0a,0x71,0xf8,0x11,
+    0x82,0xc6,0x98,0x0c,0x00,0x69,0x72,0xe5,0xe0,0xe4,0xff,0x25,0x67,0x80,0xcf,0x90,0xb9,0x1f,0x15,0x01,0x9f,0x21,0x73,0xe4,
+    0x17,0x63,0x06,0x04,0x34,0x7c,0x05,0xc0,0x8b,0xd7,0x41,0x81,0x0c,0x12,0x00,0xc0,0x2a,0xfe,0x2d,0x42,0x03,0x80,0x66,0x21,
+    0x71,0xf0,0x1c,0x03,0x30,0x6a,0xfb,0x05,0xd5,0xc1,0x81,0x0c,0x12,0x01,0x4c,0x0a,0xbf,0x8b,0x54,0xa0,0x06,0x09,0x00,0xa6,
+    0x05,0x5f,0xc5,0xa7,0x40,0xa2,0x03,0x2e,0x38,0xd2,0xe2,0x77,0xe8,0x10,0x00,0x09,0x43,0xa6,0x72,0xbf,0x3c,0xea,0x0a,0x1c,
+    0x60,0xb4,0x54,0xee,0x5e,0x6a,0x16,0x21,0xcf,0xb4,0x75,0x06,0x07,0xdb,0xec,0x14,0x0b,0x1d,0xc1,0x81,0xf4,0x1f,0x87,0xc2,
+    0xc8,0x98,0xe8,0x37,0xfa,0x3d,0x83,0x03,0xe6,0x3f,0x27,0x7f,0x06,0x07,0xd4,0x4b,0x03,0x25,0x62,0xd9,0x19,0x38,0x27,0xfe,
+    0xfe,0x0c,0x0f,0xb0,0xfc,0x0c,0xd4,0x3f,0x16,0xbc,0x41,0x24,0xbf,0x04,0x7a,0x04,0x7e,0x2d,0x90,0x74,0x2c,0xfc,0xcc,0x4f,
+    0x28,0xc6,0x09,0xfd,0x6e,0x80,0xa1,0x2f,0x03,0x34,0x08,0xfc,0x5a,0xe2,0x07,0xd5,0x81,0x9a,0x85,0xf0,0x60,0x7c,0x32,0x82,
+    0x06,0x07,0xe4,0x32,0xff,0x77,0xff,0x6f,0x53,0xf2,0x76,0xdd,0x7c,0x08,0x30,0x3e,0xe0,0x15,0xfe,0x0c,0x0f,0xaa,0xa0,0xc1,
+    0xf3,0xff,0x2d,0x6d,0x0f,0xc0,0xc0,0xfa,0x2a,0x0c,0x1f,0x81,0x81,0xf3,0x54,0x20,0xb9,0xee,0x07,0x4c,0x86,0x18,0xdf,0x01,
+    0x81,0xf3,0xf0,0x9d,0xf0,0x18,0x1f,0x30,0xc9,0xe6,0x0b,0x2c,0x0b,0xaf,0x40,0xc0,0xf9,0x86,0x4f,0x00,0xc0,0xf9,0xc2,0x67,
+    0x98,0x3c,0xac,0xaa,0xba,0x83,0x03,0xe5,0x16,0x01,0xce,0x20,0xe0,0xf9,0xbd,0x41,0xed,0x45,0x2c,0xc3,0x68,0x38,0x3e,0x6d,
+    0x00,0xe0,0xfa,0x3d,0xc1,0x00,0x00,0x4c,0x1d,0xab,0xda,0xb3,0x51,0x2f,0xaa,0xcd,0x45,0xc5,0x39,0xf1,0xf3,0x8c,0x90,0x8e,
+    0x92,0x86,0x7f,0x9c,0xa4,0xbc,0xb4,0xcc,0x1f,0xfa,0xb9,0x48,0xf1,0xd0,0x8b,0xb8,0x9b,0x94,0xb6,0x51,0x84,0xbb,0xa6,0x2c,
+    0x52,0xaf,0xde,0xad,0x58,0x2b,0x82,0x32,0xfe,0xb9,0x50,0x60,0x81,0xc1,0x86,0xfb,0x06,0x27,0xf5,0x50,0x39,0xa0,0x93,0x20,
+    0xc1,0x04,0x01,0x81,0xf8,0x31,0x3f,0x7e,0x07,0x34,0x12,0x4a,0x84,0x21,0xf6,0x88,0xa0,0x46,0x0b,0x25,0xc1,0x81,0x08,0x2e,
+    0xe8,0x31,0xbf,0xc3,0x60,0x95,0xe5,0x1f,0x90,0x40,0x73,0x40,0xa0,0xd7,0x35,0xa1,0x00,0x18,0x50,0x42,0xf0,0x62,0x7e,0x81,
+    0xce,0x03,0x9a,0x52,0x01,0xc0,0xc2,0x83,0x2b,0x06,0x27,0xe2,0x83,0x9a,0x05,0x32,0x0c,0x08,0x41,0x72,0xd7,0x51,0xa4,0x2d,
+    0x60,0x59,0x36,0x0c,0x08,0x38,0x94,0xb1,0x6f,0xfa,0xca,0xed,0x62,0x33,0x6f,0x18,0x18,0x42,0xae,0x6c,0x7a,0x01,0x80,0xc2,
+    0x83,0xfc,0x18,0x9f,0x7f,0x8b,0x26,0x47,0xa0,0x80,0x0c,0x28,0x47,0xc1,0x89,0xf7,0x56,0x2c,0x9d,0x05,0x00,0x91,0xa2,0x35,
+    0x2d,0x1c,0xf0,0x5d,0x3a,0x0a,0x01,0x2b,0xa3,0xaa,0x04,0x41,0x3d,0xdc,0x2f,0x85,0x5c,0xb0,0x21,0x80,0x60,0x30,0xa1,0x00,
+    0xc5,0x7c,0x86,0x72,0x80,0xc0,0xfc,0x84,0x00,0x61,0x42,0x3c,0x0c,0x4f,0xd8,0x67,0x36,0x0c,0x08,0x38,0xfb,0x04,0x60,0x55,
+    0x83,0x45,0xfe,0x6a,0x5c,0x14,0x02,0x50,0x29,0x86,0x6e,0xfc,0x5c,0x5b,0x0c,0x79,0x10,0x60,0x7e,0x02,0x10,0x30,0xa1,0x05,
+    0xc0,0x45,0xf1,0xa0,0xc0,0xfb,0x89,0x00,0xc2,0x84,0x03,0x80,0x0d,0x25,0x04,0xa1,0xf0,0x29,0x86,0x31,0xc0,0x84,0x3e,0x05,
+    0x34,0x06,0xb0,0x04,0xd3,0xb4,0x36,0x85,0xcb,0x88,0x06,0x07,0xdc,0x4a,0x06,0x14,0x20,0x21,0x20,0x15,0xc2,0x03,0x03,0xec,
+    0x3e,0x06,0x14,0x20,0x20,0x01,0x01,0x5c,0x40,0x1e,0x1f,0x02,0x9b,0xc8,0x01,0x7d,0x07,0x15,0x0f,0x81,0x4d,0xe0,0x20,0x0b,
+    0x37,0x74,0x25,0xa0,0xb2,0x86,0x5b,0xf8,0x30,0x3e,0xc5,0xc0,0xc2,0x83,0x84,0x00,0x62,0x40,0xc5,0x6f,0x10,0x03,0x7c,0x0c,
+    0x28,0x28,0x91,0x01,0xc8,0x04,0x7e,0x50,0x5c,0x0a,0x65,0x48,0x1c,0xeb,0x15,0x17,0x01,0x8f,0x0d,0x1e,0x11,0xfd,0x08,0xae,
+    0x18,0x00,0x02,0x70,0xe5,0x5f,0x95,0x9a,0x8e,0x7e,0x56,0x23,0x3e,0x42,0x31,0xed,0xf3,0x90,0x94,0x92,0x96,0x87,0x7f,0x9c,
+    0xc4,0xdc,0xdc,0xf4,0x1f,0xfa,0x79,0xa9,0x78,0x0c,0x1f,0xd9,0x72,0x32,0xaa,0x1f,0x32,0x6a,0x76,0x7e,0xfe,0x79,0x59,0x75,
+    0xb7,0x83,0xb0,0x62,0xc0,0x9f,0x03,0x0d,0xc0,0xbe,0x19,0x91,0xbc,0x05,0xd4,0x6e,0xf3,0xe3,0x90,0x46,0x25,0x9a,0x06,0x07,
+    0xe0,0x7c,0x06,0x4b,0x28,0x34,0xdf,0xef,0xa2,0xa1,0x05,0x56,0x03,0x07,0xf9,0x00,0x82,0xb4,0x8c,0x26,0x1c,0x12,0xbc,0x23,
+    0xa5,0xa3,0x98,0x8c,0x9a,0x14,0x16,0x2f,0x92,0xad,0x25,0xa0,0x5c,0x3e,0xc4,0x13,0x63,0x6f,0x97,0x05,0x28,0xdb,0x8d,0x08,
+    0x48,0xcf,0x53,0x03,0x03,0xef,0xe1,0xd0,0x28,0xe0,0x31,0x5f,0x8b,0x32,0x8d,0xb4,0xfd,0x68,0xfd,0x58,0x30,0x3e,0xea,0x94,
+    0xe8,0xf6,0x25,0xc5,0x69,0x98,0xc4,0xa1,0x83,0xc6,0x21,0xe2,0xc2,0xb8,0x60,0x68,0x80,0x5c,0xf9,0x75,0x15,0x32,0x05,0xb4,
+    0x11,0x74,0x9a,0x55,0xa8,0x21,0x02,0x30,0x13,0x24,0xac,0x06,0x07,0xd9,0x50,0x19,0x2c,0xa0,0xd2,0xff,0x9f,0xae,0x06,0x07,
+    0xd5,0x56,0x08,0xe0,0xaa,0x4a,0x0a,0xe3,0x4e,0xa8,0x83,0x96,0x0f,0x43,0x83,0x43,0xfe,0xf5,0xc9,0x83,0x5a,0x03,0x18,0x07,
+    0x08,0x0a,0x44,0x1b,0x50,0x12,0x5a,0xe0,0x60,0x7d,0x4b,0x97,0xf8,0xd2,0xb0,0x18,0x1f,0x42,0xe5,0xfe,0x34,0x75,0x0a,0xe2,
+    0x3c,0x80,0x3e,0x7f,0xf1,0x80,0xd9,0x80,0x9d,0xac,0x06,0x07,0xd1,0x50,0x17,0x07,0xcf,0xfe,0xa4,0x18,0x1f,0x45,0x40,0x5c,
+    0x23,0x76,0x88,0xdc,0x3c,0x18,0x70,0x52,0x20,0x9a,0xa7,0x06,0x07,0xcd,0x52,0x70,0x8e,0x90,0x18,0x1f,0x32,0xe0,0x2e,0x58,
+    0x39,0x20,0x76,0x88,0x3b,0xf1,0xf7,0x7d,0x04,0x89,0xf0,0x60,0x7c,0x8b,0x81,0x52,0x82,0x03,0x1b,0xfc,0x1d,0x12,0xcc,0x03,
+    0x03,0xe3,0xe5,0x83,0x37,0x88,0x70,0xe0,0x56,0x00,0x01,0x40,0x73,0x4f,0xbf,0x11,0x9f,0x22,0xfe,0xc5,0x22,0x8f,0x20,0x8d,
+    0x7b,0x7e,0x65,0x26,0x64,0x78,0x4d,0x2d,0x6c,0x26,0x87,0x7f,0xf4,0x0f,0xa7,0x7c,0xe5,0xb1,0x68,0x5a,0x2d,0xa2,0xdf,0x01,
+    0x82,0xd7,0xc1,0x41,0x7a,0xf7,0xdd,0x7f,0xad,0xae,0x2e,0xa3,0x57,0x7c,0x06,0x23,0x40,0x5f,0x89,0x9b,0x46,0x43,0x6b,0xfc,
+    0xff,0x32,0x44,0x9a,0x53,0x5e,0xfb,0x0b,0xf2,0x6e,0x03,0x8b,0xfd,0xa3,0x6c,0x07,0x17,0xfb,0x1a,0xaa,0x76,0x60,0xf1,0xcf,
+    0x10,0xff,0x5b,0x60,0x38,0xbf,0xdb,0x3b,0x17,0x18,0xe7,0x8f,0x1d,0xa4,0x3c,0x0c,0x9d,0x22,0x3e,0xd7,0x00,0xc9,0x63,0xf7,
+    0x38,0xb8,0xc7,0x4c,0x78,0xed,0x31,0xe3,0x9d,0x22,0x3e,0xe7,0x17,0x19,0x6d,0xec,0x27,0xd0,0x8a,0xbc,0x3b,0xe3,0x05,0x2e,
+    0xd2,0x51,0xe2,0x67,0x58,0x8f,0xb5,0xb0,0x97,0x63,0x61,0x2e,0x82,0x88,0x4d,0x9a,0xa3,0xc4,0xce,0x91,0x3f,0x57,0x61,0x2e,
+    0x96,0xc2,0x5c,0x95,0x10,0x9b,0x13,0x21,0x33,0xa4,0x4f,0xcd,0x9f,0x84,0xd8,0x92,0xc5,0x86,0x77,0xd2,0x3a,0xc7,0xc2,0x32,
+    0x38,0x2f,0x73,0x89};
+
+    // Expected results
+    H263Info infoRefQCIF_N;
+    infoRefQCIF_N.uiH263PTypeFmt = 2;
+    infoRefQCIF_N.codecBits      = 0;
+    infoRefQCIF_N.pQuant         = 7;
+    infoRefQCIF_N.fType          = 0;
+    infoRefQCIF_N.cpmBit         = 0;
+    infoRefQCIF_N.numOfGOBs      = 9;
+    infoRefQCIF_N.totalNumOfMBs  = 11*9;
+
+    infoRefQCIF_N.ptrGOBbuffer[0] = 0;    infoRefQCIF_N.ptrGOBbufferSBit[0] = 0; infoRefQCIF_N.ptrGQuant[0] = 0;
+    infoRefQCIF_N.ptrGOBbuffer[1] = 215;  infoRefQCIF_N.ptrGOBbufferSBit[1] = 2; infoRefQCIF_N.ptrGQuant[1] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[2] = 456;  infoRefQCIF_N.ptrGOBbufferSBit[2] = 1; infoRefQCIF_N.ptrGQuant[2] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[3] = 535;  infoRefQCIF_N.ptrGOBbufferSBit[3] = 5; infoRefQCIF_N.ptrGQuant[3] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[4] = 615;  infoRefQCIF_N.ptrGOBbufferSBit[4] = 7; infoRefQCIF_N.ptrGQuant[4] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[5] = 925;  infoRefQCIF_N.ptrGOBbufferSBit[5] = 4; infoRefQCIF_N.ptrGQuant[5] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[6] = 1133; infoRefQCIF_N.ptrGOBbufferSBit[6] = 1; infoRefQCIF_N.ptrGQuant[6] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[7] = 1512; infoRefQCIF_N.ptrGOBbufferSBit[7] = 6; infoRefQCIF_N.ptrGQuant[7] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[8] = 1832; infoRefQCIF_N.ptrGOBbufferSBit[8] = 7; infoRefQCIF_N.ptrGQuant[8] = 7;
+
+    // --------------------------------------------------
+    // Input data - H.263 encoded stream CIF (I-frame)
+    // --------------------------------------------------
+
+    FILE* openFile = fopen("H263Foreman_CIF_Iframe.bin", "rb");
+
+    fseek(openFile, 0, SEEK_END);
+    int lengthRefCIF_I = ftell(openFile);
+    fseek(openFile, 0, SEEK_SET);
+
+    unsigned char* encodedStreamCIF_I = new unsigned char[lengthRefCIF_I];
+    fread(encodedStreamCIF_I, 1, lengthRefCIF_I, openFile);
+    fclose(openFile);
+
+    // Expected results
+    H263Info infoRefCIF_I;
+    infoRefCIF_I.uiH263PTypeFmt = 3;
+    infoRefCIF_I.codecBits      = 0;
+    infoRefCIF_I.pQuant         = 5;
+    infoRefCIF_I.fType          = 0;
+    infoRefCIF_I.cpmBit         = 0;
+    infoRefCIF_I.numOfGOBs      = 18;
+    infoRefCIF_I.totalNumOfMBs  = 22*18;
+
+    infoRefCIF_I.ptrGOBbuffer[0]  = 0;     infoRefCIF_I.ptrGOBbufferSBit[0]  = 0; infoRefCIF_I.ptrGQuant[0]  = 0;
+    infoRefCIF_I.ptrGOBbuffer[1]  = 1607;  infoRefCIF_I.ptrGOBbufferSBit[1]  = 0; infoRefCIF_I.ptrGQuant[1]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[2]  = 2759;  infoRefCIF_I.ptrGOBbufferSBit[2]  = 0; infoRefCIF_I.ptrGQuant[2]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[3]  = 3699;  infoRefCIF_I.ptrGOBbufferSBit[3]  = 0; infoRefCIF_I.ptrGQuant[3]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[4]  = 4506;  infoRefCIF_I.ptrGOBbufferSBit[4]  = 0; infoRefCIF_I.ptrGQuant[4]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[5]  = 5260;  infoRefCIF_I.ptrGOBbufferSBit[5]  = 0; infoRefCIF_I.ptrGQuant[5]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[6]  = 6254;  infoRefCIF_I.ptrGOBbufferSBit[6]  = 0; infoRefCIF_I.ptrGQuant[6]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[7]  = 7117;  infoRefCIF_I.ptrGOBbufferSBit[7]  = 0; infoRefCIF_I.ptrGQuant[7]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[8]  = 7804;  infoRefCIF_I.ptrGOBbufferSBit[8]  = 0; infoRefCIF_I.ptrGQuant[8]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[9]  = 8600;  infoRefCIF_I.ptrGOBbufferSBit[9]  = 0; infoRefCIF_I.ptrGQuant[9]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[10]  = 9419; infoRefCIF_I.ptrGOBbufferSBit[10] = 0; infoRefCIF_I.ptrGQuant[10] = 5;
+    infoRefCIF_I.ptrGOBbuffer[11] = 10126; infoRefCIF_I.ptrGOBbufferSBit[11] = 0; infoRefCIF_I.ptrGQuant[11] = 5;
+    infoRefCIF_I.ptrGOBbuffer[12] = 10723; infoRefCIF_I.ptrGOBbufferSBit[12] = 0; infoRefCIF_I.ptrGQuant[12] = 5;
+    infoRefCIF_I.ptrGOBbuffer[13] = 11280; infoRefCIF_I.ptrGOBbufferSBit[13] = 0; infoRefCIF_I.ptrGQuant[13] = 5;
+    infoRefCIF_I.ptrGOBbuffer[14] = 11910; infoRefCIF_I.ptrGOBbufferSBit[14] = 0; infoRefCIF_I.ptrGQuant[14] = 5;
+    infoRefCIF_I.ptrGOBbuffer[15] = 12430; infoRefCIF_I.ptrGOBbufferSBit[15] = 0; infoRefCIF_I.ptrGQuant[15] = 5;
+    infoRefCIF_I.ptrGOBbuffer[16] = 12925; infoRefCIF_I.ptrGOBbufferSBit[16] = 0; infoRefCIF_I.ptrGQuant[16] = 5;
+    infoRefCIF_I.ptrGOBbuffer[17] = 13506; infoRefCIF_I.ptrGOBbufferSBit[17] = 0; infoRefCIF_I.ptrGQuant[17] = 5;
+
+    // --------------------------------------------------
+    // Input data - H.263 encoded stream CIF (P-frame)
+    // --------------------------------------------------
+
+    openFile = fopen("H263Foreman_CIF_Pframe.bin", "rb");
+
+    fseek(openFile, 0, SEEK_END);
+    int lengthRefCIF_P = ftell(openFile);
+    fseek(openFile, 0, SEEK_SET);
+
+    unsigned char* encodedStreamCIF_P = new unsigned char[lengthRefCIF_P];
+    fread(encodedStreamCIF_P, 1, lengthRefCIF_P, openFile);
+    fclose(openFile);
+
+    // Expected results
+    H263Info infoRefCIF_P;
+    infoRefCIF_P.uiH263PTypeFmt = 3;
+    infoRefCIF_P.codecBits      = 8;
+    infoRefCIF_P.pQuant         = 4;
+    infoRefCIF_P.fType          = 1;
+    infoRefCIF_P.cpmBit         = 0;
+    infoRefCIF_P.numOfGOBs      = 18;
+    infoRefCIF_P.totalNumOfMBs  = 22*18;
+
+    infoRefCIF_P.ptrGOBbuffer[0]  = 0;    infoRefCIF_P.ptrGOBbufferSBit[0]  = 0; infoRefCIF_P.ptrGQuant[0]  = 0;
+    infoRefCIF_P.ptrGOBbuffer[1]  = 252;  infoRefCIF_P.ptrGOBbufferSBit[1]  = 0; infoRefCIF_P.ptrGQuant[1]  = 5;
+    infoRefCIF_P.ptrGOBbuffer[2]  = 482;  infoRefCIF_P.ptrGOBbufferSBit[2]  = 0; infoRefCIF_P.ptrGQuant[2]  = 6;
+    infoRefCIF_P.ptrGOBbuffer[3]  = 581;  infoRefCIF_P.ptrGOBbufferSBit[3]  = 0; infoRefCIF_P.ptrGQuant[3]  = 6;
+    infoRefCIF_P.ptrGOBbuffer[4]  = 676;  infoRefCIF_P.ptrGOBbufferSBit[4]  = 0; infoRefCIF_P.ptrGQuant[4]  = 7;
+    infoRefCIF_P.ptrGOBbuffer[5]  = 756;  infoRefCIF_P.ptrGOBbufferSBit[5]  = 0; infoRefCIF_P.ptrGQuant[5]  = 7;
+    infoRefCIF_P.ptrGOBbuffer[6]  = 855;  infoRefCIF_P.ptrGOBbufferSBit[6]  = 0; infoRefCIF_P.ptrGQuant[6]  = 8;
+    infoRefCIF_P.ptrGOBbuffer[7]  = 949;  infoRefCIF_P.ptrGOBbufferSBit[7]  = 0; infoRefCIF_P.ptrGQuant[7]  = 9;
+    infoRefCIF_P.ptrGOBbuffer[8]  = 1004; infoRefCIF_P.ptrGOBbufferSBit[8]  = 0; infoRefCIF_P.ptrGQuant[8]  = 10;
+    infoRefCIF_P.ptrGOBbuffer[9]  = 1062; infoRefCIF_P.ptrGOBbufferSBit[9]  = 0; infoRefCIF_P.ptrGQuant[9]  = 11;
+    infoRefCIF_P.ptrGOBbuffer[10] = 1115; infoRefCIF_P.ptrGOBbufferSBit[10] = 0; infoRefCIF_P.ptrGQuant[10] = 11;
+    infoRefCIF_P.ptrGOBbuffer[11] = 1152; infoRefCIF_P.ptrGOBbufferSBit[11] = 0; infoRefCIF_P.ptrGQuant[11] = 13;
+    infoRefCIF_P.ptrGOBbuffer[12] = 1183; infoRefCIF_P.ptrGOBbufferSBit[12] = 0; infoRefCIF_P.ptrGQuant[12] = 14;
+    infoRefCIF_P.ptrGOBbuffer[13] = 1214; infoRefCIF_P.ptrGOBbufferSBit[13] = 0; infoRefCIF_P.ptrGQuant[13] = 15;
+    infoRefCIF_P.ptrGOBbuffer[14] = 1257; infoRefCIF_P.ptrGOBbufferSBit[14] = 0; infoRefCIF_P.ptrGQuant[14] = 16;
+    infoRefCIF_P.ptrGOBbuffer[15] = 1286; infoRefCIF_P.ptrGOBbufferSBit[15] = 0; infoRefCIF_P.ptrGQuant[15] = 16;
+    infoRefCIF_P.ptrGOBbuffer[16] = 1321; infoRefCIF_P.ptrGOBbufferSBit[16] = 0; infoRefCIF_P.ptrGQuant[16] = 16;
+    infoRefCIF_P.ptrGOBbuffer[17] = 1352; infoRefCIF_P.ptrGOBbufferSBit[17] = 0; infoRefCIF_P.ptrGQuant[17] = 14;
+
+    //---------------------------------------------------------------
+    //---------------------------------------------------------------
+    //---------------------------------------------------------------
+    // Start test
+    const H263Info* ptrInfoSQCIF = NULL;
+    const H263MBInfo* ptrMBInfoSQCIF = NULL;
+    const H263Info* ptrInfoQCIF = NULL;
+    const H263MBInfo* ptrMBInfoQCIF = NULL;
+    const H263Info* ptrInfoCIF = NULL;
+    const H263MBInfo* ptrMBInfoCIF = NULL;
+    const H263Info* ptrInfoQCIF_N = NULL;
+    const H263MBInfo* ptrMBInfoQCIF_N = NULL;
+    const H263Info* ptrInfoCIF_I = NULL;
+    const H263MBInfo* ptrMBInfoCIF_I = NULL;
+    const H263Info* ptrInfoCIF_P = NULL;
+    const H263MBInfo* ptrMBInfoCIF_P = NULL;
+    H263Information h263Information;
+
+    // Input buffer
+    const int length = 3000;
+    unsigned char* encodedBuffer = new unsigned char[lengthRefCIF_P];
+
+    // Test invalid inputs
+    assert(-1 == h263Information.GetInfo(NULL, length, ptrInfoSQCIF));
+    assert(-1 == h263Information.GetInfo(encodedBuffer, 0, ptrInfoSQCIF));
+    assert(-1 == h263Information.GetInfo(encodedBuffer, length, ptrInfoSQCIF)); // invalid H.263 stream
+//    assert(-1 == h263Information.GetInfo(encodedStreamSQCIF, lengthRefSQCIF/2, ptrInfoSQCIF)); // invalid H.263 stream
+
+    assert(-1 == h263Information.GetMBInfo(NULL, length, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, 0, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, -1, ptrMBInfoSQCIF)); // incorrect group number
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, 8, ptrMBInfoSQCIF));  // incorrect group number
+
+    // ----------------------------------------------
+    // Get info from encoded H.263 stream - SQCIF
+    // ----------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamSQCIF, lengthRefSQCIF, ptrInfoSQCIF));
+    ValidateResults(ptrInfoSQCIF, &infoRefSQCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoSQCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamSQCIF, lengthRefSQCIF, i, ptrMBInfoSQCIF));
+    }
+    ValidateMBResults(ptrInfoSQCIF, ptrMBInfoSQCIF, &infoRefSQCIF);
+
+    // ---------------------------------------------
+    // Get info from encoded H.263 stream - QCIF
+    // ---------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamQCIF, lengthRefQCIF, ptrInfoQCIF));
+    ValidateResults(ptrInfoQCIF, &infoRefQCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoQCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamQCIF, lengthRefQCIF, i, ptrMBInfoQCIF));
+    }
+    ValidateMBResults(ptrInfoQCIF, ptrMBInfoQCIF, &infoRefQCIF);
+
+    // --------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // --------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF, lengthRefCIF, ptrInfoCIF));
+    ValidateResults(ptrInfoCIF, &infoRefCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF, lengthRefCIF, i, ptrMBInfoCIF));
+    }
+    ValidateMBResults(ptrInfoCIF, ptrMBInfoCIF, &infoRefCIF);
+
+    // ----------------------------------------------------------------------
+    // Get info from encoded H.263 stream - QCIF - non byte aligned GOBs
+    // ----------------------------------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamQCIF_N, lengthRefQCIF_N, ptrInfoQCIF_N));
+    ValidateResults(ptrInfoQCIF_N, &infoRefQCIF_N);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoQCIF_N->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamQCIF_N, lengthRefQCIF_N, i, ptrMBInfoQCIF_N));
+    }
+    ValidateMBResults(ptrInfoQCIF_N, ptrMBInfoQCIF_N, &infoRefQCIF_N);
+
+    // -------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // -------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF_I, lengthRefCIF_I, ptrInfoCIF_I));
+    ValidateResults(ptrInfoCIF_I, &infoRefCIF_I);
+
+    // Get MB info
+    unsigned int start = timeGetTime();
+    for (int i = 0; i < ptrInfoCIF_I->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF_I, lengthRefCIF_I, i, ptrMBInfoCIF_I));
+    }
+    unsigned int endMB = timeGetTime();
+    ValidateMBResults(ptrInfoCIF_I, ptrMBInfoCIF_I, &infoRefCIF_I, false);
+
+    std::cout << "I-frame, length: " << lengthRefCIF_I << " bytes. Time: " << endMB - start << " ms." << std::endl;
+    PRINT_LINE;
+
+    // -------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // -------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF_P, lengthRefCIF_P, ptrInfoCIF_P));
+    ValidateResults(ptrInfoCIF_P, &infoRefCIF_P);
+
+    // Get MB info
+    start = timeGetTime();
+    for (int i = 0; i < ptrInfoCIF_P->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF_P, lengthRefCIF_P, i, ptrMBInfoCIF_P));
+    }
+    endMB = timeGetTime();
+    ValidateMBResults(ptrInfoCIF_P, ptrMBInfoCIF_P, &infoRefCIF_P, false);
+
+    std::cout << "P-frame, length:  " << lengthRefCIF_P << " bytes. Time: " << endMB - start << " ms." << std::endl;
+    PRINT_LINE;
+
+    delete [] encodedStreamCIF_I;
+    delete [] encodedStreamCIF_P;
+    delete [] encodedBuffer;
+
+    TEST_PASSED();
+    ::Sleep(5000);
+    return 0;
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc b/trunk/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc
new file mode 100644
index 0000000..c282557
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "rtp_rtcp.h"
+#include "common_types.h"
+#include "RateControlDetector.h"
+/*#include "rtcp_utility.h"
+#include "tmmbr_help.h"*/
+
+#define TEST_STR "Test RateControl."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+const int maxFileLen = 200;
+WebRtc_UWord8* dataFile[maxFileLen];
+
+
+struct InputSet
+{
+    WebRtc_UWord32 TMMBR;
+    WebRtc_UWord32 packetOH;
+    WebRtc_UWord32 SSRC;
+};
+
+const InputSet set0   = {220,  80, 11111};  // bitRate, packetOH, ssrc
+const InputSet set1   = {180,  90, 22222};
+const InputSet set2   = {100, 210, 33333};
+const InputSet set3   = { 35,  40, 44444};
+const InputSet set4   = { 40,  60, 55555};
+const InputSet set4_1 = {100,  60, 55555};
+const InputSet set4_2 = { 10,  60, 55555};
+const InputSet set5   = {200,  40, 66666};
+const InputSet set00  = {  0,  40, 66666};
+
+
+
+
+WebRtc_Word32 GetFile(char* fileName)
+{
+    if (!fileName[0])
+    {
+        return 0;
+    }
+
+    FILE* openFile = fopen(fileName, "rb");
+    assert(openFile != NULL);
+    fseek(openFile, 0, SEEK_END);
+    int len = (WebRtc_Word16)(ftell(openFile));
+    rewind(openFile);
+    assert(len > 0 && len < maxFileLen);
+    fread(dataFile, 1, len, openFile);
+    fclose(openFile);
+    return len;
+};
+
+
+class LoopBackTransport2 : public webrtc::Transport
+{
+public:
+    LoopBackTransport2(RtpRtcp* rtpRtcpModule)  :
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len);
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 0;
+        }
+
+        // Send in bitrate request
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len);
+    }
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+
+class LoopBackTransportVideo : public webrtc::Transport
+{
+public:
+    LoopBackTransportVideo(RtpRtcp* rtpRtcpModule)  :
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len);
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+
+        strcpy(fileName, "RTCPPacketTMMBR0.bin");
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 0;
+        }
+
+        // Send in bitrate request*/
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len);
+    }
+
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+class TestRateControl : private RateControlDetector
+{
+public:
+    TestRateControl():RateControlDetector(0)
+    {
+    }
+    ~TestRateControl()
+    {
+    }
+    void Start()
+    {
+        //Test perfect conditions
+        // But only one packet per frame
+        SetLastUsedBitRate(500);
+        WebRtc_UWord32 rtpTs=1234*90;
+        WebRtc_UWord32 framePeriod=33; // In Ms
+        WebRtc_UWord32 rtpDelta=framePeriod*90;
+        WebRtc_UWord32 netWorkDelay=10;
+        WebRtc_UWord32 arrivalTime=rtpTs/90+netWorkDelay;
+        WebRtc_UWord32 newBitRate=0;
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay;
+            }
+            newBitRate=RateControl(2*netWorkDelay);
+            SetLastUsedBitRate(newBitRate);
+            Sleep(10*framePeriod);
+            std::cout << "RTCP Packet " << k << " new bitrate " << newBitRate << std::endl;
+        }
+        Reset();
+
+
+        //Test increasing RTT
+        std::cout << "Test increasing RTT - No Receive timing changes" << std::endl;
+        SetLastUsedBitRate(500);
+
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay;
+            }
+            WebRtc_UWord32 rtt=2*netWorkDelay+k*20;
+            newBitRate=RateControl(rtt);
+            Sleep(10*framePeriod);
+            SetLastUsedBitRate(newBitRate);
+            std::cout << "RTCP Packet " << k << " RTT "<< rtt << " new bitrate " << newBitRate << std::endl;
+
+        }
+
+        Reset();
+
+
+        //Test increasing RTT
+        std::cout << "Test increasing RTT - Changed receive timing" << std::endl;
+        SetLastUsedBitRate(500);
+
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay+i+(k*20);
+            }
+            WebRtc_UWord32 rtt=2*netWorkDelay+k*20;
+            newBitRate=RateControl(rtt);
+            Sleep(10*framePeriod);
+            SetLastUsedBitRate(newBitRate);
+            std::cout << "RTCP Packet " << k << " RTT "<< rtt << " new bitrate " << newBitRate << std::endl;
+
+        }
+
+
+
+    };
+};
+
+class NULLDataZink: public RtpData
+{
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                const WebRtc_UWord16 payloadSize,
+                                                const webrtc::WebRtcRTPHeader* rtpHeader)
+    {
+        return 0;
+    };
+};
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "---Test RateControl ----" << std::endl;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // --------------------
+    // Test TMMBRHelp class
+
+    // --------------------
+    TestRateControl test;
+    test.Start();
+
+    printf("RateControl-class test done.\n");
+
+    // ------------------------
+    // Test RateControl single module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideo = RtpRtcp::CreateRtpRtcp(0, false);
+
+    LoopBackTransportVideo* myLoopBackTransportVideo = new LoopBackTransportVideo(rtpRtcpModuleVideo);
+    assert(0 == rtpRtcpModuleVideo->RegisterSendTransport(myLoopBackTransportVideo));
+    printf("Multi module test done.\n");
+
+
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo);
+    delete myLoopBackTransportVideo;
+
+    TEST_PASSED();
+    ::Sleep(5000);
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc b/trunk/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc
new file mode 100644
index 0000000..d1e1572
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc
@@ -0,0 +1,1034 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "rtp_rtcp.h"
+#include "common_types.h"
+#include "rtcp_utility.h"
+#include "tmmbr_help.h"
+
+#define TEST_STR "Test TMMBR."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+const int maxFileLen = 200;
+WebRtc_UWord8* dataFile[maxFileLen];
+
+
+struct InputSet
+{
+    WebRtc_UWord32 TMMBR;
+    WebRtc_UWord32 packetOH;
+    WebRtc_UWord32 SSRC;
+};
+
+const InputSet set0   = {220,  80, 11111};  // bitRate, packetOH, ssrc
+const InputSet set1   = {180,  90, 22222};
+const InputSet set2   = {100, 210, 33333};
+const InputSet set3   = { 35,  40, 44444};
+const InputSet set4   = { 40,  60, 55555};
+const InputSet set4_1 = {100,  60, 55555};
+const InputSet set4_2 = { 10,  60, 55555};
+const InputSet set5   = {200,  40, 66666};
+const InputSet set00  = {  0,  40, 66666};
+
+const int maxBitrate = 230;  // if this is lower than max in the list above test should fail
+
+void Verify(TMMBRSet* boundingSet, int index, InputSet set)
+{
+    assert(boundingSet->ptrTmmbrSet[index]    == set.TMMBR);
+    assert(boundingSet->ptrPacketOHSet[index] == set.packetOH);
+    assert(boundingSet->ptrSsrcSet[index]     == set.SSRC);
+};
+
+int ParseRTCPPacket(const void *data, int len, TMMBRSet*& boundingSet)
+{
+    int numItems = -1;
+    RTCPUtility::RTCPParserV2 rtcpParser((const WebRtc_UWord8*)data, len, true);
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Begin();
+    while (pktType != RTCPUtility::kRtcpNotValidCode)
+    {
+        const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+        if (pktType == RTCPUtility::kRtcpRtpfbTmmbnCode)
+        {
+            assert(0 == rtcpPacket.TMMBN.SenderSSRC);
+            assert(0 == rtcpPacket.TMMBN.MediaSSRC);
+            numItems = 0;
+        }
+        if (pktType == RTCPUtility::kRtcpRtpfbTmmbnItemCode)
+        {
+            boundingSet->ptrTmmbrSet[numItems]    = rtcpPacket.TMMBNItem.MaxTotalMediaBitRate;
+            boundingSet->ptrPacketOHSet[numItems] = rtcpPacket.TMMBNItem.MeasuredOverhead;
+            boundingSet->ptrSsrcSet[numItems]     = rtcpPacket.TMMBNItem.SSRC;
+            ++numItems;
+        }
+        pktType = rtcpParser.Iterate();
+    }
+    return numItems;
+};
+
+WebRtc_Word32 GetFile(char* fileName)
+{
+    if (!fileName[0])
+    {
+        return 0;
+    }
+
+    FILE* openFile = fopen(fileName, "rb");
+    assert(openFile != NULL);
+    fseek(openFile, 0, SEEK_END);
+    int len = (WebRtc_Word16)(ftell(openFile));
+    rewind(openFile);
+    assert(len > 0 && len < maxFileLen);
+    fread(dataFile, 1, len, openFile);
+    fclose(openFile);
+    return len;
+};
+
+
+class LoopBackTransport2 : public webrtc::Transport, private TMMBRHelp
+{
+public:
+    LoopBackTransport2(RtpRtcp* rtpRtcpModule)  :
+      TMMBRHelp(false),
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        if( 0  == _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len))
+        {
+            return len;
+        }
+        return -1;
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+        TMMBRSet* boundingSet = BoundingSet();
+        boundingSet->VerifyAndAllocateSet(3);
+
+        if (_cnt == 0)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {2,4,0} -> {4,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR3.bin");
+        }
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 1;
+        }
+
+        // Send in bitrate request
+        if(_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len) == 0)
+        {
+            return len;
+        }
+        return -1;
+    }
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+
+class LoopBackTransportVideo : public webrtc::Transport, private TMMBRHelp
+{
+public:
+    LoopBackTransportVideo(RtpRtcp* rtpRtcpModule)  :
+      TMMBRHelp(false),
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        if(_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len)== 0)
+        {
+            return len;
+        }
+        return -1;
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+        TMMBRSet* boundingSet = BoundingSet();
+        boundingSet->VerifyAndAllocateSet(3);
+
+        if (_cnt == 0)
+        {
+            strcpy(fileName, "RTCPPacketTMMBR0.bin");
+        }
+        else if (_cnt == 1)
+        {
+            // TMMBN {0} -> {0}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set0);
+
+            strcpy(fileName, "RTCPPacketTMMBR1.bin");
+        }
+        else if (_cnt == 2)
+        {
+            // TMMBN {0,1} -> {1}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set1);
+
+            strcpy(fileName, "RTCPPacketTMMBR2.bin");
+        }
+        else if (_cnt == 3)
+        {
+            // TMMBN {0,1,2} -> {2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR3.bin");
+        }
+        else if (_cnt == 4)
+        {
+            // TMMBN {0,1,2,3} -> {3,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR4.bin");
+        }
+        else if (_cnt == 5)
+        {
+            // TMMBN {0,1,2,3,4} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR5.bin");
+        }
+        else if (_cnt == 6)
+        {
+            // TMMBN {0,1,2,3,4,5} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR4_2.bin");
+        }
+        else if (_cnt == 7)
+        {
+            // TMMBN {0,1,2,3,4_2,5} -> {4_2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4_2);
+
+            ++_cnt;
+            ::Sleep(5*RTCP_INTERVAL_AUDIO_MS + 1000); // time out receiver
+            _rtpRtcpModule->Process();             // SendRTCP() (_cnt == 8)
+                                                   // a receiver has timed out -> UpdateTMMBR()
+        }
+        else if (_cnt == 8)
+        {
+            // No TMMBN in this packet
+            assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+        }
+        else if (_cnt == 10)
+        {
+            // TMMBN {} -> {}, empty set
+            assert(0 == ParseRTCPPacket(data, len, boundingSet));
+
+            strcpy(fileName, "RTCPPacketTMMBR2.bin");
+        }
+        else if (_cnt == 11)
+        {
+            // TMMBN {2} -> {2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set2);
+        }
+        else if (_cnt == 12) // ----- multi module -------------
+        {
+            // No TMMBN in this packet
+            assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+
+            strcpy(fileName, "RTCPPacketTMMBR4.bin");
+        }
+        else if (_cnt == 13)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {2,4} -> {4,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR0.bin");
+        }
+        else if (_cnt == 14)
+        {
+            // TMMBN {}
+            // TMMBN {3}
+            // TMMBN {}
+            // TMMBN {2,4,0} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR1.bin");
+        }
+        //else if (_cnt == 15)
+        //{
+        //    // TMMBN {}
+        //    // TMMBN {}
+        //    // TMMBN {}
+        //    // TMMBN {2,4,0,1} -> {4,2}
+        //    //assert(2 == ParseRTCPPacket(data, len, boundingSet));
+        //    //Verify(boundingSet, 0, set4);
+        //    //Verify(boundingSet, 1, set2);
+        //}
+        //else if (_cnt == 15)
+        //{
+        //    // No TMMBN in this packet
+        //    assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+        //}
+        else if (_cnt == 15)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {} -> {}, empty set
+            assert(0 == ParseRTCPPacket(data, len, boundingSet));
+        }
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 1;
+        }
+
+        // Send in bitrate request
+        if( 0 == _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len))
+        {
+            return len;
+        }
+        return -1;
+    }
+
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+class TestTMMBR : private TMMBRHelp
+{
+public:
+    TestTMMBR() : TMMBRHelp(false) {};
+
+    void Add(TMMBRSet* candidateSet, int index, InputSet set)
+    {
+        candidateSet->ptrTmmbrSet[index]    = set.TMMBR;
+        candidateSet->ptrPacketOHSet[index] = set.packetOH;
+        candidateSet->ptrSsrcSet[index]     = set.SSRC;
+    };
+
+    void Start()
+    {
+        // Get sets
+        TMMBRSet* candidateSet = CandidateSet();
+        assert(0 == candidateSet->sizeOfSet);
+        TMMBRSet* boundingSet = BoundingSet();
+        assert(0 == boundingSet->sizeOfSet);
+        TMMBRSet* boundingSetToSend = BoundingSetToSend();
+        assert(0 == boundingSetToSend->sizeOfSet);
+
+        WebRtc_Word32 numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(0 == numBoundingSet); // should be empty
+
+        assert( 0 == SetTMMBRBoundingSetToSend(NULL,0));        // ok to send empty set
+        assert( 0 == SetTMMBRBoundingSetToSend(boundingSet,0)); // ok to send empty set
+
+        WebRtc_UWord32 minBitrateKbit = 0;
+        WebRtc_UWord32 maxBitrateKbit = 0;
+        assert(-1 == CalcMinMaxBitRate(0, 0, 1, false, minBitrateKbit, maxBitrateKbit)); // no bounding set
+
+        // ---------------------------------
+        // Test candidate set {0} -> {0}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(1);
+        assert(1 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set0);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, 0));   // incorrect length
+        assert(!IsOwner(set1.SSRC, 100)); // incorrect length
+
+        assert( IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set0);
+
+        // Get net bitrate depending on packet rate
+        assert( 0 == CalcMinMaxBitRate(0, numBoundingSet, false,0, minBitrateKbit, maxBitrateKbit));
+        assert(set0.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+        assert(0 == CalcMinMaxBitRate(0, 100, false,0, minBitrateKbit, maxBitrateKbit));  // incorrect length
+        assert(set0.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1} -> {1}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(2);
+        assert(2 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set1);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert( IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set1);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set1.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2} -> {2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(3);
+        assert(3 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set2.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(4 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3,4} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(5 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+        Add(candidateSet, 4, set4);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3,4,5} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(6);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+        Add(candidateSet, 4, set4);
+        Add(candidateSet, 5, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4,5} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+
+        // ---------------------------------
+        // Test candidate set {1,3,4,5} -> {3,4}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set3);
+        Add(candidateSet, 2, set4);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet,true,0,  minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,4,5} -> {4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set4);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set4);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set4.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,5} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4_1,5} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4_1);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4_2,5} -> {4_2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4_2);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set4_2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4_2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(MIN_VIDEO_BW_MANAGEMENT_BITRATE == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {} -> {}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(0);
+        assert(6 == candidateSet->sizeOfSet);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(0 == numBoundingSet);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+
+        // Get net bitrate depending on packet rate
+        assert(-1 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+
+        // ---------------------------------
+        // Test candidate set {x0,5} -> {5}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(2);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set00);
+        Add(candidateSet, 1, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set5);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert( IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set5);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set5.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {x0,4,2} -> {4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(3);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set00);
+        Add(candidateSet, 1, set4);
+        Add(candidateSet, 2, set2);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set4);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set4.TMMBR == minBitrateKbit);
+        assert(set2.TMMBR == maxBitrateKbit);
+    };
+};
+
+class NULLDataZink: public RtpData
+{
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                              const WebRtc_UWord16 payloadSize,
+                                              const webrtc::WebRtcRTPHeader* rtpHeader,
+                                              const WebRtc_UWord8* incomingRtpPacket,
+                                              const WebRtc_UWord16 incomingRtpPacketLengt)
+    {
+        return 0;
+    };
+};
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "------ Test TMMBR ------" << std::endl;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // --------------------
+    // Test TMMBRHelp class
+    // --------------------
+    TestTMMBR test;
+    test.Start();
+
+    printf("TMMBRHelp-class test done.\n");
+
+    // ------------------------
+    // Test TMMBR single module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideo = RtpRtcp::CreateRtpRtcp(0, false);
+
+    LoopBackTransportVideo* myLoopBackTransportVideo = new LoopBackTransportVideo(rtpRtcpModuleVideo);
+    assert(0 == rtpRtcpModuleVideo->RegisterSendTransport(myLoopBackTransportVideo));
+
+    assert(false == rtpRtcpModuleVideo->TMMBR());
+    rtpRtcpModuleVideo->SetTMMBRStatus(true);
+    assert(true == rtpRtcpModuleVideo->TMMBR());
+
+    assert(0 == rtpRtcpModuleVideo->RegisterSendPayload( "I420", 96));
+    assert(0 == rtpRtcpModuleVideo->RegisterReceivePayload( "I420", 96));
+
+    // send a RTP packet with SSRC 11111 to get 11111 as the received SSRC
+    assert(0 == rtpRtcpModuleVideo->SetSSRC(11111));
+    const WebRtc_UWord8 testStream[9] = "testtest";
+    assert(0 == rtpRtcpModuleVideo->RegisterIncomingDataCallback(new NULLDataZink())); // needed to avoid error from parsing the incoming stream
+    assert(0 == rtpRtcpModuleVideo->SendOutgoingData(webrtc::kVideoFrameKey,96, 0, testStream, 8));
+
+    // set the SSRC to 0
+    assert(0 == rtpRtcpModuleVideo->SetSSRC(0));
+
+    //
+    assert(0 == rtpRtcpModuleVideo->SetRTCPStatus(kRtcpCompound));
+
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {0}                     // should this make us remember a TMMBR?
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {1},   verify TMMBN {0}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {2},   verify TMMBN {1}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {3},   verify TMMBN {2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {4},   verify TMMBN {3,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {5},   verify TMMBN {3,4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {4_2}, verify TMMBN {3,4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP()); // -> time out receivers,   verify TMMBN {4_2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // ->                       verify TMMBN {2}
+
+    printf("Single module test done.\n");
+
+    // ------------------------
+    // Test TMMBR multi module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideoDef = RtpRtcp::CreateRtpRtcp(10, false);
+    assert(0 == rtpRtcpModuleVideo->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo1 = RtpRtcp::CreateRtpRtcp(1, false);
+    assert(0 == rtpRtcpModuleVideo1->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo2 = RtpRtcp::CreateRtpRtcp(2, false);
+    assert(0 == rtpRtcpModuleVideo2->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo3 = RtpRtcp::CreateRtpRtcp(3, false);
+    assert(0 == rtpRtcpModuleVideo3->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    LoopBackTransport2* myLoopBackTransport2 = new LoopBackTransport2(rtpRtcpModuleVideo2);
+    assert(0 == rtpRtcpModuleVideo2->RegisterSendTransport(myLoopBackTransport2));
+
+    assert(0 == rtpRtcpModuleVideo2->SetRTCPStatus(kRtcpCompound));
+
+    // set the SSRC to 0
+    assert(0 == rtpRtcpModuleVideo2->SetSSRC(0));
+
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {4}, verify no TMMBN in this packet
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {0}, verify TMMBN {4,2}
+    assert(0 == rtpRtcpModuleVideo2->SendRTCP());  // -> incoming TMMBR {3}, verify TMMBN {4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {1}, verify TMMBN {3,4,2}
+    ::Sleep(5*RTCP_INTERVAL_AUDIO_MS + 1000);
+    rtpRtcpModuleVideo2->Process();                // time out receiver2 -> UpdateTMMBR()
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // verify TMMBN {}
+
+    printf("Multi module test done.\n");
+
+
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo1);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo2);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo3);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideoDef);
+
+    TEST_PASSED();
+    ::Sleep(5000);
+
+    return 0;
+}
+
diff --git a/trunk/src/modules/rtp_rtcp/test/test_bwe/test_bwe.gypi b/trunk/src/modules/rtp_rtcp/test/test_bwe/test_bwe.gypi
new file mode 100644
index 0000000..d165e7d
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/test_bwe/test_bwe.gypi
@@ -0,0 +1,35 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_bwe',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../source',
+      ],
+      'sources': [
+        'unit_test.cc',
+        '../../source/bitrate.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/rtp_rtcp/test/test_bwe/unit_test.cc b/trunk/src/modules/rtp_rtcp/test/test_bwe/unit_test.cc
new file mode 100644
index 0000000..3adb099
--- /dev/null
+++ b/trunk/src/modules/rtp_rtcp/test/test_bwe/unit_test.cc
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the bandwidth estimation and management
+ */
+
+#include <gtest/gtest.h>
+
+#include "typedefs.h"
+#include "Bitrate.h"
+
+namespace {
+
+using webrtc::BitRateStats;
+
+class BitRateStatsTest : public ::testing::Test
+{
+protected:
+    BitRateStatsTest() {};
+    BitRateStats   bitRate;
+};
+
+TEST_F(BitRateStatsTest, TestStrictMode)
+{
+    WebRtc_Word64 nowMs = 0;
+    // Should be initialized to 0.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+    bitRate.Update(1500, nowMs);
+    // Expecting 12 kbps given a 1000 window with one 1500 bytes packet.
+    EXPECT_EQ(12000u, bitRate.BitRate(nowMs));
+    bitRate.Init();
+    // Expecting 0 after init.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+    for (int i = 0; i < 100000; ++i)
+    {
+        if (nowMs % 10 == 0)
+            bitRate.Update(1500, nowMs);
+        // Approximately 1200 kbps expected. Not exact since when packets
+        // are removed we will jump 10 ms to the next packet.
+        if (nowMs > 0 && nowMs % 2000 == 0)
+            EXPECT_NEAR(1200000u, bitRate.BitRate(nowMs), 6000u);
+        nowMs += 1;
+    }
+    nowMs += 2000;
+    // The window is 2 seconds. If nothing has been received for that time
+    // the estimate should be 0.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+}
+
+}
diff --git a/trunk/src/modules/udp_transport/OWNERS b/trunk/src/modules/udp_transport/OWNERS
new file mode 100644
index 0000000..3b2a444
--- /dev/null
+++ b/trunk/src/modules/udp_transport/OWNERS
@@ -0,0 +1,4 @@
+pwestin@webrtc.org

+henrikg@webrtc.org

+mallinath@webrtc.org

+tomasl@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/udp_transport/interface/udp_transport.h b/trunk/src/modules/udp_transport/interface/udp_transport.h
new file mode 100644
index 0000000..6596fde
--- /dev/null
+++ b/trunk/src/modules/udp_transport/interface/udp_transport.h
@@ -0,0 +1,386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
+
+#include "common_types.h"
+#include "module.h"
+#include "typedefs.h"
+
+#define SS_MAXSIZE 128
+#define SS_ALIGNSIZE (sizeof (WebRtc_UWord64))
+#define SS_PAD1SIZE  (SS_ALIGNSIZE - sizeof(WebRtc_Word16))
+#define SS_PAD2SIZE  (SS_MAXSIZE - (sizeof(WebRtc_Word16) + SS_PAD1SIZE +\
+                                    SS_ALIGNSIZE))
+
+// BSD requires use of HAVE_STRUCT_SOCKADDR_SA_LEN
+namespace webrtc {
+struct SocketAddressIn
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8      sin_length;
+    WebRtc_Word8      sin_family;
+#else
+    WebRtc_Word16     sin_family;
+#endif
+    WebRtc_UWord16    sin_port;
+    WebRtc_UWord32    sin_addr;
+    WebRtc_Word8      sin_zero[8];
+};
+
+struct Version6InAddress
+{
+    union
+    {
+        WebRtc_UWord8     _s6_u8[16];
+        WebRtc_UWord32    _s6_u32[4];
+        WebRtc_UWord64    _s6_u64[2];
+    } Version6AddressUnion;
+};
+
+struct SocketAddressInVersion6
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8      sin_length;
+    WebRtc_Word8      sin_family;
+#else
+    WebRtc_Word16     sin_family;
+#endif
+    // Transport layer port number.
+    WebRtc_UWord16 sin6_port;
+    // IPv6 traffic class and flow info or ip4 address.
+    WebRtc_UWord32 sin6_flowinfo;
+    // IPv6 address
+    struct Version6InAddress sin6_addr;
+    // Set of interfaces for a scope.
+    WebRtc_UWord32 sin6_scope_id;
+};
+
+struct SocketAddressStorage
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8   sin_length;
+    WebRtc_Word8   sin_family;
+#else
+    WebRtc_Word16  sin_family;
+#endif
+    WebRtc_Word8   __ss_pad1[SS_PAD1SIZE];
+    WebRtc_UWord64 __ss_align;
+    WebRtc_Word8   __ss_pad2[SS_PAD2SIZE];
+};
+
+struct SocketAddress
+{
+    union
+    {
+        struct SocketAddressIn _sockaddr_in;
+        struct SocketAddressInVersion6 _sockaddr_in6;
+        struct SocketAddressStorage _sockaddr_storage;
+    };
+};
+
+// Callback class that receives packets from UdpTransport.
+class UdpTransportData
+{
+public:
+    virtual ~UdpTransportData()  {};
+
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort) = 0;
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort) = 0;
+};
+
+
+class UdpTransport : public Module, public Transport
+{
+public:
+    enum
+    {
+        kIpAddressVersion6Length = 64,
+        kIpAddressVersion4Length = 16
+    };
+    enum ErrorCode
+    {
+        kNoSocketError            = 0,
+        kFailedToBindPort         = 1,
+        kIpAddressInvalid         = 2,
+        kAddressInvalid           = 3,
+        kSocketInvalid            = 4,
+        kPortInvalid              = 5,
+        kTosInvalid               = 6,
+        kMulticastAddressInvalid  = 7,
+        kQosError                 = 8,
+        kSocketAlreadyInitialized = 9,
+        kIpVersion6Error          = 10,
+        FILTER_ERROR              = 11,
+        kStartReceiveError        = 12,
+        kStopReceiveError         = 13,
+        kCannotFindLocalIp        = 14,
+        kTosError                 = 16,
+        kNotInitialized           = 17,
+        kPcpError                 = 18
+    };
+
+    // Factory method. Constructor disabled.
+    static UdpTransport* Create(const WebRtc_Word32 id,
+                                WebRtc_UWord8& numSocketThreads);
+    static void Destroy(UdpTransport* module);
+
+    // Prepares the class for sending RTP packets to ipAddr:rtpPort and RTCP
+    // packets to ipAddr:rtpPort+1 if rtcpPort is zero. Otherwise to
+    // ipAddr:rtcpPort.
+    virtual WebRtc_Word32 InitializeSendSockets(
+        const char* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Register packetCallback for receiving incoming packets. Set the local
+    // RTP port to rtpPort. Bind local IP address to ipAddr. If ipAddr is NULL
+    // bind to local IP ANY. Set the local rtcp port to rtcpPort or rtpPort + 1
+    // if rtcpPort is 0.
+    virtual WebRtc_Word32 InitializeReceiveSockets(
+        UdpTransportData* const packetCallback,
+        const WebRtc_UWord16 rtpPort,
+        const char* ipAddr = NULL,
+        const char* multicastIpAddr = NULL,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Set local RTP port to rtpPort and RTCP port to rtcpPort or rtpPort + 1 if
+    // rtcpPort is 0. These ports will be used for sending instead of the local
+    // ports set by InitializeReceiveSockets(..).
+    virtual WebRtc_Word32 InitializeSourcePorts(
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Retrieve local ports used for sending if other than the ports specified
+    // by InitializeReceiveSockets(..). rtpPort is set to the RTP port.
+    // rtcpPort is set to the RTCP port.
+    virtual WebRtc_Word32 SourcePorts(WebRtc_UWord16& rtpPort,
+                                      WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Set ipAddr to the IP address that is currently being listened on. rtpPort
+    // to the RTP port listened to. rtcpPort to the RTCP port listened on.
+    // multicastIpAddr to the multicast IP address group joined (the address
+    // is NULL terminated).
+    virtual WebRtc_Word32 ReceiveSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort,
+        char multicastIpAddr[kIpAddressVersion6Length]) const = 0;
+
+    // Set ipAddr to the IP address being sent from. rtpPort to the local RTP
+    // port used for sending and rtcpPort to the local RTCP port used for
+    // sending.
+    virtual WebRtc_Word32 SendSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Put the IP address, RTP port and RTCP port from the last received packet
+    // into ipAddr, rtpPort and rtcpPort respectively.
+    virtual WebRtc_Word32 RemoteSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Enable/disable quality of service if QoS is true or false respectively.
+    // Set the type of service to serviceType, max bitrate in kbit/s to
+    // maxBitrate and override DSCP if overrideDSCP is not 0.
+    // Note: Must be called both InitializeSendSockets() and
+    // InitializeReceiveSockets() has been called.
+    virtual WebRtc_Word32 SetQoS(const bool QoS,
+                                 const WebRtc_Word32 serviceType,
+                                 const WebRtc_UWord32 maxBitrate = 0,
+                                 const WebRtc_Word32 overrideDSCP = 0,
+                                 const bool audio = false) = 0;
+
+    // Set QoS to true if quality of service has been turned on. If QoS is true,
+    // also set serviceType to type of service and overrideDSCP to override
+    // DSCP.
+    virtual WebRtc_Word32 QoS(bool& QoS,
+                              WebRtc_Word32& serviceType,
+                              WebRtc_Word32& overrideDSCP) const = 0;
+
+    // Set type of service.
+    virtual WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP,
+                                 const bool useSetSockOpt = false) = 0;
+
+    // Get type of service configuration.
+    virtual WebRtc_Word32 ToS(WebRtc_Word32& DSCP,
+                              bool& useSetSockOpt) const = 0;
+
+    // Set Priority Code Point (IEEE 802.1Q)
+    // Note: for Linux this function will set the priority for the socket,
+    // which then can be mapped to a PCP value with vconfig.
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 PCP) = 0;
+
+    // Get Priority Code Point
+    virtual WebRtc_Word32 PCP(WebRtc_Word32& PCP) const = 0;
+
+    // Enable IPv6.
+    // Note: this API must be called before any call to
+    // InitializeReceiveSockets() or InitializeSendSockets(). It is not
+    // possible to go back to IPv4 (default) after this call.
+    virtual WebRtc_Word32 EnableIpV6() = 0;
+
+    // Return true if IPv6 has been enabled.
+    virtual bool IpV6Enabled() const = 0;
+
+    // Only allow packets received from filterIPAddress to be processed.
+    // Note: must be called after EnableIPv6(), if IPv6 is used.
+    virtual WebRtc_Word32 SetFilterIP(
+        const char filterIPAddress[kIpAddressVersion6Length]) = 0;
+
+    // Write the filter IP address (if any) to filterIPAddress.
+    virtual WebRtc_Word32 FilterIP(
+        char filterIPAddress[kIpAddressVersion6Length]) const = 0;
+
+    // Only allow RTP packets from rtpFilterPort and RTCP packets from
+    // rtcpFilterPort be processed.
+    // Note: must be called after EnableIPv6(), if IPv6 is used.
+    virtual WebRtc_Word32 SetFilterPorts(
+        const WebRtc_UWord16 rtpFilterPort,
+        const WebRtc_UWord16 rtcpFilterPort) = 0;
+
+    // Set rtpFilterPort to the filter RTP port and rtcpFilterPort to the
+    // filter RTCP port (if filtering based on port is enabled).
+    virtual WebRtc_Word32 FilterPorts(WebRtc_UWord16& rtpFilterPort,
+                                      WebRtc_UWord16& rtcpFilterPort) const = 0;
+
+    // Set the number of buffers that the socket implementation may use for
+    // receiving packets to numberOfSocketBuffers. I.e. the number of packets
+    // that can be received in parallell.
+    // Note: this API only has effect on Windows.
+    virtual WebRtc_Word32 StartReceiving(
+        const WebRtc_UWord32 numberOfSocketBuffers) = 0;
+
+    // Stop receive incoming packets.
+    virtual WebRtc_Word32 StopReceiving() = 0;
+
+    // Return true incoming packets are received.
+    virtual bool Receiving() const = 0;
+
+    // Return true if send sockets have been initialized.
+    virtual bool SendSocketsInitialized() const = 0;
+
+    // Return true if local ports for sending has been set.
+    virtual bool SourcePortsInitialized() const = 0;
+
+    // Return true if receive sockets have been initialized.
+    virtual bool ReceiveSocketsInitialized() const = 0;
+
+    // Send data with size length to ip:portnr. The same port as the set
+    // with InitializeSendSockets(..) is used if portnr is 0. The same IP
+    // address as set with InitializeSendSockets(..) is used if ip is NULL.
+    // If isRTCP is true the port used will be the RTCP port.
+    virtual WebRtc_Word32 SendRaw(const WebRtc_Word8* data,
+                                  WebRtc_UWord32 length,
+                                  WebRtc_Word32 isRTCP,
+                                  WebRtc_UWord16 portnr = 0,
+                                  const char* ip = NULL) = 0;
+
+    // Send RTP data with size length to the address specified by to.
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8* data,
+                                          WebRtc_UWord32 length,
+                                          const SocketAddress& to) = 0;
+
+
+    // Send RTCP data with size length to the address specified by to.
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8* data,
+                                           WebRtc_UWord32 length,
+                                           const SocketAddress& to) = 0;
+
+    // Send RTP data with size length to ip:rtpPort where ip is the ip set by
+    // the InitializeSendSockets(..) call.
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8* data,
+                                          WebRtc_UWord32 length,
+                                          WebRtc_UWord16 rtpPort) = 0;
+
+
+    // Send RTCP data with size length to ip:rtcpPort where ip is the ip set by
+    // the InitializeSendSockets(..) call.
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8* data,
+                                           WebRtc_UWord32 length,
+                                           WebRtc_UWord16 rtcpPort) = 0;
+
+    // Set the IP address to which packets are sent to ipaddr.
+    virtual WebRtc_Word32 SetSendIP(
+        const char ipaddr[kIpAddressVersion6Length]) = 0;
+
+    // Set the send RTP and RTCP port to rtpPort and rtcpPort respectively.
+    virtual WebRtc_Word32 SetSendPorts(const WebRtc_UWord16 rtpPort,
+                                       const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Retreive the last registered error code.
+    virtual ErrorCode LastError() const = 0;
+
+    // Put the local IPv4 address in localIP.
+    // Note: this API is for IPv4 only.
+    static WebRtc_Word32 LocalHostAddress(WebRtc_UWord32& localIP);
+
+    // Put the local IP6 address in localIP.
+    // Note: this API is for IPv6 only.
+    static WebRtc_Word32 LocalHostAddressIPV6(char localIP[16]);
+
+    // Return a copy of hostOrder (host order) in network order.
+    static WebRtc_UWord16 Htons(WebRtc_UWord16 hostOrder);
+
+    // Return a copy of hostOrder (host order) in network order.
+    static WebRtc_UWord32 Htonl(WebRtc_UWord32 hostOrder);
+
+    // Return IPv4 address in ip as 32 bit integer.
+    static WebRtc_UWord32 InetAddrIPV4(const char* ip);
+
+    // Convert the character string src into a network address structure in
+    // the af address family and put it in dst.
+    // Note: same functionality as inet_pton(..)
+    static WebRtc_Word32 InetPresentationToNumeric(WebRtc_Word32 af,
+                                                   const char* src,
+                                                   void* dst);
+
+    // Set ip and sourcePort according to address. As input parameter ipSize
+    // is the length of ip. As output parameter it's the number of characters
+    // written to ip (not counting the '\0' character).
+    // Note: this API is only implemented on Windows and Linux.
+    static WebRtc_Word32 IPAddress(const SocketAddress& address,
+                                   char* ip,
+                                   WebRtc_UWord32& ipSize,
+                                   WebRtc_UWord16& sourcePort);
+
+    // Set ip and sourcePort according to address. As input parameter ipSize
+    // is the length of ip. As output parameter it's the number of characters
+    // written to ip (not counting the '\0' character).
+    // Note: this API is only implemented on Windows and Linux.
+    // Additional note: this API caches the address of the last call to it. If
+    // address is likley to be the same for multiple calls it may be beneficial
+    // to call this API instead of IPAddress().
+    virtual WebRtc_Word32 IPAddressCached(const SocketAddress& address,
+                                          char* ip,
+                                          WebRtc_UWord32& ipSize,
+                                          WebRtc_UWord16& sourcePort) = 0;
+
+    // Return true if ipaddr is a valid IP address.
+    // If ipV6 is false ipaddr is interpreted as an IPv4 address otherwise it
+    // is interptreted as IPv6.
+    static bool IsIpAddressValid(const char* ipaddr, const bool ipV6);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
diff --git a/trunk/src/modules/udp_transport/source/Android.mk b/trunk/src/modules/udp_transport/source/Android.mk
new file mode 100644
index 0000000..f3cb0e8
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/Android.mk
@@ -0,0 +1,45 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_udp_transport
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    udp_transport_impl.cc \
+    udp_socket_wrapper.cc \
+    udp_socket_manager_wrapper.cc \
+    udp_socket_manager_posix.cc \
+    udp_socket_posix.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/udp_transport/source/traffic_control_windows.cc b/trunk/src/modules/udp_transport/source/traffic_control_windows.cc
new file mode 100644
index 0000000..09038c0
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/traffic_control_windows.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "traffic_control_windows.h"
+
+#include <assert.h>
+
+#include "trace.h"
+
+namespace webrtc {
+TrafficControlWindows* TrafficControlWindows::instance = NULL;
+WebRtc_UWord32 TrafficControlWindows::refCounter = 0;
+
+TrafficControlWindows::TrafficControlWindows(const WebRtc_Word32 id) : _id(id)
+{
+}
+
+TrafficControlWindows* TrafficControlWindows::GetInstance(
+    const WebRtc_Word32 id)
+{
+    if(instance != NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - Returning already created object");
+        refCounter++;
+        return instance;
+    }
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "TrafficControlWindows - Creating new object");
+    instance = new TrafficControlWindows(id);
+    if(instance == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                     "TrafficControlWindows - Error allocating memory");
+        return NULL;
+    }
+
+    instance->tcRegister = NULL;
+    instance->tcDeregister = NULL;
+
+    instance->tcEnumerate = NULL;
+    instance->tcOpenInterface = NULL;
+    instance->tcCloseInterface = NULL;
+
+    instance->tcAddFlow = NULL;
+    instance->tcDeleteFlow = NULL;
+
+    instance->tcAddFilter = NULL;
+    instance->tcDeleteFilter = NULL;
+
+    HMODULE trafficLib = LoadLibrary(TEXT("traffic.dll"));
+    if(trafficLib == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - No QOS support, LoadLibrary returned NULL,\
+ last error: %d\n",
+            GetLastError());
+        delete instance;
+        instance = NULL;
+        return NULL;
+    }
+
+    instance->tcRegister = (registerFn)GetProcAddress(trafficLib,
+                                                      "TcRegisterClient");
+    instance->tcDeregister = (deregisterFn)GetProcAddress(trafficLib,
+                                                          "TcDeregisterClient");
+    instance->tcEnumerate = (enumerateFn)GetProcAddress(
+        trafficLib,
+        "TcEnumerateInterfaces");
+    instance->tcOpenInterface = (openInterfaceFn)GetProcAddress(
+        trafficLib,
+        "TcOpenInterfaceW");
+    instance->tcCloseInterface = (closeInterfaceFn)GetProcAddress(
+        trafficLib,
+        "TcCloseInterface");
+    instance->tcAddFlow = (flowAddFn)GetProcAddress(trafficLib,
+                                                    "TcAddFlow");
+    instance->tcDeleteFlow = (flowDeleteFn)GetProcAddress(trafficLib,
+                                                          "TcDeleteFlow");
+
+    instance->tcAddFilter = (filterAddFn)GetProcAddress(trafficLib,
+                                                        "TcAddFilter");
+    instance->tcDeleteFilter = (filterDeleteFn)GetProcAddress(trafficLib,
+                                                              "TcDeleteFilter");
+
+    if(instance->tcRegister       == NULL ||
+       instance->tcDeregister     == NULL ||
+       instance->tcEnumerate      == NULL ||
+       instance->tcOpenInterface  == NULL ||
+       instance->tcCloseInterface == NULL ||
+       instance->tcAddFlow        == NULL ||
+       instance->tcAddFilter      == NULL ||
+       instance->tcDeleteFlow     == NULL ||
+       instance->tcDeleteFilter   == NULL)
+    {
+        delete instance;
+        instance = NULL;
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - Could not find function pointer for\
+ traffic control functions");
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            id,
+            "Tcregister    : %x, tcDeregister: %x, tcEnumerate: %x,\
+ tcOpenInterface: %x, tcCloseInterface: %x, tcAddFlow: %x, tcAddFilter: %x,\
+ tcDeleteFlow: %x, tcDeleteFilter: %x",
+            instance->tcRegister,
+            instance->tcDeregister,
+            instance->tcEnumerate,
+            instance->tcOpenInterface,
+            instance->tcCloseInterface,
+            instance->tcAddFlow,
+            instance->tcAddFilter,
+            instance->tcDeleteFlow,
+            instance->tcDeleteFilter );
+        return NULL;
+    }
+    refCounter++;
+    return instance;
+}
+
+void TrafficControlWindows::Release(TrafficControlWindows* gtc)
+{
+    if (0 == refCounter)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, -1,
+                     "TrafficControlWindows - Cannot release, refCounter is 0");
+        return;
+    }
+    if (NULL == gtc)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, -1,
+                     "TrafficControlWindows - Not releasing, gtc is NULL");
+        return;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, gtc->_id,
+                 "TrafficControlWindows - Releasing object");
+    refCounter--;
+    if ((0 == refCounter) && instance)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceTransport, gtc->_id,
+                     "TrafficControlWindows - Deleting object");
+        delete instance;
+        instance = NULL;
+    }
+}
+WebRtc_Word32 TrafficControlWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+ULONG TrafficControlWindows::TcRegisterClient(
+    ULONG TciVersion,
+    HANDLE ClRegCtx,
+    PTCI_CLIENT_FUNC_LIST ClientHandlerList,
+    PHANDLE pClientHandle)
+{
+    assert(tcRegister != NULL);
+
+    return tcRegister(TciVersion, ClRegCtx, ClientHandlerList, pClientHandle);
+}
+
+ULONG TrafficControlWindows::TcDeregisterClient(HANDLE clientHandle)
+{
+    assert(tcDeregister != NULL);
+
+    return tcDeregister(clientHandle);
+}
+
+
+ULONG TrafficControlWindows::TcEnumerateInterfaces(
+    HANDLE ClientHandle,
+    PULONG pBufferSize,
+    PTC_IFC_DESCRIPTOR interfaceBuffer)
+{
+    assert(tcEnumerate != NULL);
+
+    return tcEnumerate(ClientHandle, pBufferSize, interfaceBuffer);
+}
+
+
+ULONG TrafficControlWindows::TcOpenInterfaceW(LPWSTR pInterfaceName,
+                                              HANDLE ClientHandle,
+                                              HANDLE ClIfcCtx,
+                                              PHANDLE pIfcHandle)
+{
+    assert(tcOpenInterface != NULL);
+
+    return tcOpenInterface(pInterfaceName, ClientHandle, ClIfcCtx, pIfcHandle);
+
+}
+
+ULONG TrafficControlWindows::TcCloseInterface(HANDLE IfcHandle)
+{
+    assert(tcCloseInterface != NULL);
+
+    return tcCloseInterface(IfcHandle);
+}
+
+ULONG TrafficControlWindows::TcAddFlow(HANDLE IfcHandle, HANDLE ClFlowCtx,
+                                       ULONG  Flags, PTC_GEN_FLOW pGenericFlow,
+                                       PHANDLE pFlowHandle)
+{
+    assert(tcAddFlow != NULL);
+    return tcAddFlow(IfcHandle, ClFlowCtx, Flags, pGenericFlow, pFlowHandle);
+}
+
+ULONG TrafficControlWindows::TcAddFilter(HANDLE FlowHandle,
+                                         PTC_GEN_FILTER pGenericFilter,
+                                         PHANDLE pFilterHandle)
+{
+    assert(tcAddFilter != NULL);
+    return tcAddFilter(FlowHandle, pGenericFilter, pFilterHandle);
+}
+
+ULONG TrafficControlWindows::TcDeleteFlow(HANDLE FlowHandle)
+{
+    assert(tcDeleteFlow != NULL);
+    return tcDeleteFlow(FlowHandle);
+
+}
+
+ULONG TrafficControlWindows::TcDeleteFilter(HANDLE FilterHandle)
+{
+    assert(tcDeleteFilter != NULL);
+    return tcDeleteFilter(FilterHandle);
+}
+
+void MyClNotifyHandler(HANDLE ClRegCtx, HANDLE ClIfcCtx, ULONG Event,
+                       HANDLE SubCode, ULONG BufSize, PVOID Buffer)
+{
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/traffic_control_windows.h b/trunk/src/modules/udp_transport/source/traffic_control_windows.h
new file mode 100644
index 0000000..cfa52ce
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/traffic_control_windows.h
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
+
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+
+// Disable deprication warning from traffic.h
+#pragma warning(disable : 4995)
+
+#include <windows.h>
+#include <qos.h>
+#include <ntddndis.h>
+#include <traffic.h>
+
+#include "trace.h"
+
+namespace webrtc {
+void MyClNotifyHandler(HANDLE ClRegCtx, HANDLE ClIfcCtx, ULONG Event,
+                       HANDLE SubCode, ULONG BufSize, PVOID Buffer);
+
+
+typedef ULONG (WINAPI *registerFn)(ULONG, HANDLE, PTCI_CLIENT_FUNC_LIST,
+                                   PHANDLE);
+typedef ULONG (WINAPI *deregisterFn)(HANDLE);
+typedef ULONG (WINAPI *enumerateFn)(HANDLE, PULONG, PTC_IFC_DESCRIPTOR);
+typedef ULONG (WINAPI *openInterfaceFn)(LPWSTR, HANDLE, HANDLE, PHANDLE);
+typedef ULONG (WINAPI *closeInterfaceFn)(HANDLE);
+typedef ULONG (WINAPI *flowAddFn)(HANDLE, HANDLE, ULONG, PTC_GEN_FLOW, PHANDLE);
+typedef ULONG (WINAPI *filterAddFn)(HANDLE, PTC_GEN_FILTER, PHANDLE);
+typedef ULONG (WINAPI *flowDeleteFn)(HANDLE);
+typedef ULONG (WINAPI *filterDeleteFn)(HANDLE);
+
+class TrafficControlWindows
+{
+ public:
+    // Factory method. Constructor disabled.
+    static TrafficControlWindows* GetInstance(const WebRtc_Word32 id);
+    static void Release(TrafficControlWindows* gtc);
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    ULONG TcRegisterClient(ULONG TciVersion, HANDLE ClRegCtx,
+                           PTCI_CLIENT_FUNC_LIST ClientHandlerList,
+                           PHANDLE pClientHandle);
+
+    ULONG TcDeregisterClient(HANDLE clientHandle);
+
+    ULONG TcEnumerateInterfaces(HANDLE ClientHandle, PULONG pBufferSize,
+                                PTC_IFC_DESCRIPTOR interfaceBuffer);
+
+    ULONG TcOpenInterfaceW(LPWSTR pInterfaceName, HANDLE ClientHandle,
+                           HANDLE ClIfcCtx, PHANDLE pIfcHandle);
+
+    ULONG TcCloseInterface(HANDLE IfcHandle);
+
+    ULONG TcAddFlow(HANDLE IfcHandle, HANDLE ClFlowCtx, ULONG Flags,
+                    PTC_GEN_FLOW pGenericFlow, PHANDLE pFlowHandle);
+
+    ULONG TcAddFilter(HANDLE FlowHandle, PTC_GEN_FILTER pGenericFilter,
+                      PHANDLE pFilterHandle);
+
+    ULONG TcDeleteFlow(HANDLE FlowHandle);
+    ULONG TcDeleteFilter(HANDLE FilterHandle);
+private:
+    TrafficControlWindows(const WebRtc_Word32 id);
+    WebRtc_Word32 _id;
+    TCI_CLIENT_FUNC_LIST QoSFunctions;
+
+    static TrafficControlWindows* instance;
+
+    registerFn tcRegister;
+    deregisterFn tcDeregister;
+
+    enumerateFn tcEnumerate;
+    openInterfaceFn tcOpenInterface;
+    closeInterfaceFn tcCloseInterface;
+
+    flowAddFn tcAddFlow;
+    flowDeleteFn tcDeleteFlow;
+
+    filterAddFn tcAddFilter;
+    filterDeleteFn tcDeleteFilter;
+
+    static WebRtc_UWord32 refCounter;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.cc b/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.cc
new file mode 100644
index 0000000..0356331
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.cc
@@ -0,0 +1,658 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket2_manager_windows.h"
+
+#include <assert.h>
+#include <stdio.h>
+
+#include "aligned_malloc.h"
+#include "udp_socket2_windows.h"
+
+namespace webrtc {
+WebRtc_UWord32 UdpSocket2ManagerWindows::_numOfActiveManagers = 0;
+bool UdpSocket2ManagerWindows::_wsaInit = false;
+
+UdpSocket2ManagerWindows::UdpSocket2ManagerWindows()
+    : UdpSocketManager(),
+      _id(-1),
+      _stopped(false),
+      _init(false),
+      _pCrit(CriticalSectionWrapper::CreateCriticalSection()),
+      _ioCompletionHandle(NULL),
+      _numActiveSockets(0),
+      _event(EventWrapper::Create())
+{
+    _managerNumber = _numOfActiveManagers++;
+
+    if(_numOfActiveManagers == 1)
+    {
+        WORD wVersionRequested = MAKEWORD(2, 2);
+        WSADATA wsaData;
+        _wsaInit = WSAStartup(wVersionRequested, &wsaData) == 0;
+        // TODO (hellner): seems safer to use RAII for this. E.g. what happens
+        //                 if a UdpSocket2ManagerWindows() created and destroyed
+        //                 without being initialized.
+    }
+}
+
+UdpSocket2ManagerWindows::~UdpSocket2ManagerWindows()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::~UdpSocket2ManagerWindows()",
+                 _managerNumber);
+
+    if(_init)
+    {
+        _pCrit->Enter();
+        if(_numActiveSockets)
+        {
+            _pCrit->Leave();
+            _event->Wait(INFINITE);
+        }
+        else
+        {
+            _pCrit->Leave();
+        }
+        StopWorkerThreads();
+
+        // All threads are stopped. Safe to delete them.
+        ListItem* pItem = NULL;
+        while((pItem = _workerThreadsList.First()) != NULL)
+        {
+            delete static_cast<UdpSocket2WorkerWindows*>(pItem->GetItem());
+            _workerThreadsList.PopFront();
+        }
+
+        _ioContextPool.Free();
+
+        _numOfActiveManagers--;
+        if(_ioCompletionHandle)
+        {
+            CloseHandle(_ioCompletionHandle);
+        }
+        if (_numOfActiveManagers == 0)
+        {
+            if(_wsaInit)
+            {
+                WSACleanup();
+            }
+        }
+    }
+    if(_pCrit)
+    {
+        delete _pCrit;
+    }
+    if(_event)
+    {
+        delete _event;
+    }
+}
+
+bool UdpSocket2ManagerWindows::Init(WebRtc_Word32 id,
+                                    WebRtc_UWord8& numOfWorkThreads) {
+  CriticalSectionScoped cs(_pCrit);
+  if ((_id != -1) || (_numOfWorkThreads != 0)) {
+      assert(_id != -1);
+      assert(_numOfWorkThreads != 0);
+      return false;
+  }
+  _id = id;
+  _numOfWorkThreads = numOfWorkThreads;
+  return true;
+}
+
+WebRtc_Word32 UdpSocket2ManagerWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocket2ManagerWindows::Start()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::Start()",_managerNumber);
+    if(!_init)
+    {
+        StartWorkerThreads();
+    }
+
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    // Start worker threads.
+    _stopped = false;
+    WebRtc_Word32 i = 0;
+    WebRtc_Word32 error = 0;
+    ListItem* pItem = _workerThreadsList.First();
+    UdpSocket2WorkerWindows* pWorker;
+    while(pItem != NULL && !error)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        if(!pWorker->Start())
+            error = 1;
+        pItem = _workerThreadsList.Next(pItem);
+    }
+    if(error)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::Start() error starting worker\
+ threads",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    _pCrit->Leave();
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::StartWorkerThreads()
+{
+    if(!_init)
+    {
+        _pCrit->Enter();
+
+        _ioCompletionHandle = CreateIoCompletionPort(INVALID_HANDLE_VALUE, NULL,
+                                                     0, 0);
+        if(_ioCompletionHandle == NULL)
+        {
+            WebRtc_Word32 error = GetLastError();
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads()"
+                "_ioCompletioHandle == NULL: error:%d",
+                _managerNumber,error);
+            _pCrit->Leave();
+            return false;
+        }
+
+        // Create worker threads.
+        WebRtc_UWord32 i = 0;
+        bool error = false;
+        while(i < _numOfWorkThreads && !error)
+        {
+            UdpSocket2WorkerWindows* pWorker =
+                new UdpSocket2WorkerWindows(_ioCompletionHandle);
+            if(pWorker->Init() != 0)
+            {
+                error = true;
+                delete pWorker;
+                break;
+            }
+            _workerThreadsList.PushFront(pWorker);
+            i++;
+        }
+        if(error)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads() error "
+                "creating work threads",
+                _managerNumber);
+            // Delete worker threads.
+            ListItem* pItem = NULL;
+            while((pItem = _workerThreadsList.First()) != NULL)
+            {
+                delete static_cast<UdpSocket2WorkerWindows*>(pItem->GetItem());
+                _workerThreadsList.PopFront();
+            }
+            _pCrit->Leave();
+            return false;
+        }
+        if(_ioContextPool.Init())
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads() error "
+                "initiating _ioContextPool",
+                _managerNumber);
+            _pCrit->Leave();
+            return false;
+        }
+        _init = true;
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows::StartWorkerThreads %d number of work "
+            "threads created and initialized",
+            _numOfWorkThreads);
+        _pCrit->Leave();
+    }
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::Stop()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::Stop()",_managerNumber);
+
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    _stopped = true;
+    if(_numActiveSockets)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::Stop() there is still active\
+ sockets",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    // No active sockets. Stop all worker threads.
+    bool result = StopWorkerThreads();
+    _pCrit->Leave();
+    return result;
+}
+
+bool UdpSocket2ManagerWindows::StopWorkerThreads()
+{
+    WebRtc_Word32 error = 0;
+    WEBRTC_TRACE(
+        kTraceDebug,
+        kTraceTransport,
+        _id,
+        "UdpSocket2ManagerWindows(%d)::StopWorkerThreads() Worker\
+ threadsStoped, numActicve Sockets=%d",
+        _managerNumber,
+        _numActiveSockets);
+    UdpSocket2WorkerWindows* pWorker;
+    ListItem* pItem = _workerThreadsList.First();
+
+    // Set worker threads to not alive so that they will stop calling
+    // UdpSocket2WorkerWindows::Run().
+    while(pItem != NULL)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        pWorker->SetNotAlive();
+        pItem = _workerThreadsList.Next(pItem);
+    }
+    // Release all threads waiting for GetQueuedCompletionStatus(..).
+    if(_ioCompletionHandle)
+    {
+        WebRtc_UWord32 i = 0;
+        for(i = 0; i < _workerThreadsList.GetSize(); i++)
+        {
+            PostQueuedCompletionStatus(_ioCompletionHandle, 0 ,0 , NULL);
+        }
+    }
+    pItem = _workerThreadsList.First();
+
+    while(pItem != NULL)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        if(pWorker->Stop() == false)
+        {
+            error = -1;
+            WEBRTC_TRACE(kTraceWarning,  kTraceTransport, -1,
+                         "failed to stop worker thread");
+        }
+        pItem = _workerThreadsList.Next(pItem);
+    }
+
+    if(error)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::StopWorkerThreads() error stopping\
+ worker threads",
+            _managerNumber);
+        return false;
+    }
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::AddSocketPrv(UdpSocket2Windows* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::AddSocketPrv()",_managerNumber);
+    if(!_init)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() manager not\
+ initialized",
+            _managerNumber);
+        return false;
+    }
+    _pCrit->Enter();
+    if(s == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() socket == NULL",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    if(s->GetFd() == NULL || s->GetFd() == INVALID_SOCKET)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() socket->GetFd() ==\
+ %d",
+            _managerNumber,
+            (WebRtc_Word32)s->GetFd());
+        _pCrit->Leave();
+        return false;
+
+    }
+    _ioCompletionHandle = CreateIoCompletionPort((HANDLE)s->GetFd(),
+                                                 _ioCompletionHandle,
+                                                 (ULONG_PTR)(s), 0);
+    if(_ioCompletionHandle == NULL)
+    {
+        WebRtc_Word32 error = GetLastError();
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() Error adding to IO\
+ completion: %d",
+            _managerNumber,
+            error);
+        _pCrit->Leave();
+        return false;
+    }
+    _numActiveSockets++;
+    _pCrit->Leave();
+    return true;
+}
+bool UdpSocket2ManagerWindows::RemoveSocketPrv(UdpSocket2Windows* s)
+{
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    _numActiveSockets--;
+    if(_numActiveSockets == 0)
+    {
+        _event->Set();
+    }
+    _pCrit->Leave();
+    return true;
+}
+
+PerIoContext* UdpSocket2ManagerWindows::PopIoContext()
+{
+    if(!_init)
+    {
+        return NULL;
+    }
+
+    PerIoContext* pIoC = NULL;
+    if(!_stopped)
+    {
+        pIoC = _ioContextPool.PopIoContext();
+    }else
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::PopIoContext() Manager Not started",
+            _managerNumber);
+    }
+    return pIoC;
+}
+
+WebRtc_Word32 UdpSocket2ManagerWindows::PushIoContext(PerIoContext* pIoContext)
+{
+    return _ioContextPool.PushIoContext(pIoContext);
+}
+
+IoContextPool::IoContextPool()
+    : _pListHead(NULL),
+      _init(false),
+      _size(0),
+      _inUse(0)
+{
+}
+
+IoContextPool::~IoContextPool()
+{
+    Free();
+    assert(_size.Value() == 0);
+    AlignedFree(_pListHead);
+}
+
+WebRtc_Word32 IoContextPool::Init(WebRtc_UWord32 /*increaseSize*/)
+{
+    if(_init)
+    {
+        return 0;
+    }
+
+    _pListHead = (PSLIST_HEADER)AlignedMalloc(sizeof(SLIST_HEADER),
+                                              MEMORY_ALLOCATION_ALIGNMENT);
+    if(_pListHead == NULL)
+    {
+        return -1;
+    }
+    InitializeSListHead(_pListHead);
+    _init = true;
+    return 0;
+}
+
+PerIoContext* IoContextPool::PopIoContext()
+{
+    if(!_init)
+    {
+        return NULL;
+    }
+
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    if(pListEntry == NULL)
+    {
+        IoContextPoolItem* item = (IoContextPoolItem*)
+            AlignedMalloc(
+                sizeof(IoContextPoolItem),
+                MEMORY_ALLOCATION_ALIGNMENT);
+        if(item == NULL)
+        {
+            return NULL;
+        }
+        memset(&item->payload.ioContext,0,sizeof(PerIoContext));
+        item->payload.base = item;
+        pListEntry = &(item->itemEntry);
+        ++_size;
+    }
+    ++_inUse;
+    return &((IoContextPoolItem*)pListEntry)->payload.ioContext;
+}
+
+WebRtc_Word32 IoContextPool::PushIoContext(PerIoContext* pIoContext)
+{
+    // TODO (hellner): Overlapped IO should be completed at this point. Perhaps
+    //                 add an assert?
+    const bool overlappedIOCompleted = HasOverlappedIoCompleted(
+        (LPOVERLAPPED)pIoContext);
+
+    IoContextPoolItem* item = ((IoContextPoolItemPayload*)pIoContext)->base;
+
+    const WebRtc_Word32 usedItems = --_inUse;
+    const WebRtc_Word32 totalItems = _size.Value();
+    const WebRtc_Word32 freeItems = totalItems - usedItems;
+    if(freeItems < 0)
+    {
+        assert(false);
+        AlignedFree(item);
+        return -1;
+    }
+    if((freeItems >= totalItems>>1) &&
+        overlappedIOCompleted)
+    {
+        AlignedFree(item);
+        --_size;
+        return 0;
+    }
+    InterlockedPushEntrySList(_pListHead, &(item->itemEntry));
+    return 0;
+}
+
+WebRtc_Word32 IoContextPool::Free()
+{
+    if(!_init)
+    {
+        return 0;
+    }
+
+    WebRtc_Word32 itemsFreed = 0;
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    while(pListEntry != NULL)
+    {
+        IoContextPoolItem* item = ((IoContextPoolItem*)pListEntry);
+        AlignedFree(item);
+        --_size;
+        itemsFreed++;
+        pListEntry = InterlockedPopEntrySList(_pListHead);
+    }
+    return itemsFreed;
+}
+
+WebRtc_Word32 UdpSocket2WorkerWindows::_numOfWorkers = 0;
+
+UdpSocket2WorkerWindows::UdpSocket2WorkerWindows(HANDLE ioCompletionHandle)
+    : _ioCompletionHandle(ioCompletionHandle),
+      _pThread(NULL),
+      _init(false)
+{
+    _workerNumber = _numOfWorkers++;
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocket2WorkerWindows created");
+}
+
+UdpSocket2WorkerWindows::~UdpSocket2WorkerWindows()
+{
+    if(_pThread)
+    {
+        delete _pThread;
+    }
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocket2WorkerWindows deleted");
+}
+
+bool UdpSocket2WorkerWindows::Start()
+{
+    unsigned int id = 0;
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Start UdpSocket2WorkerWindows");
+    return _pThread->Start(id);
+}
+
+bool UdpSocket2WorkerWindows::Stop()
+{
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Stop UdpSocket2WorkerWindows");
+    return _pThread->Stop();
+}
+
+void UdpSocket2WorkerWindows::SetNotAlive()
+{
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "SetNotAlive UdpSocket2WorkerWindows");
+    _pThread->SetNotAlive();
+}
+
+WebRtc_Word32 UdpSocket2WorkerWindows::Init()
+{
+    if(!_init)
+    {
+        const WebRtc_Word8* threadName = "UdpSocket2ManagerWindows_thread";
+        _pThread = ThreadWrapper::CreateThread(Run, this, kRealtimePriority,
+                                               threadName);
+        if(_pThread == NULL)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                -1,
+                "UdpSocket2WorkerWindows(%d)::Init(), error creating thread!",
+                _workerNumber);
+            return -1;
+        }
+        _init = true;
+    }
+    return 0;
+}
+
+bool UdpSocket2WorkerWindows::Run(ThreadObj obj)
+{
+    UdpSocket2WorkerWindows* pWorker =
+        static_cast<UdpSocket2WorkerWindows*>(obj);
+    return pWorker->Process();
+}
+
+// Process should always return true. Stopping the worker threads is done in
+// the UdpSocket2ManagerWindows::StopWorkerThreads() function.
+bool UdpSocket2WorkerWindows::Process()
+{
+    WebRtc_Word32 success = 0;
+    DWORD ioSize = 0;
+    UdpSocket2Windows* pSocket = NULL;
+    PerIoContext* pIOContext = 0;
+    OVERLAPPED* pOverlapped = 0;
+    success = GetQueuedCompletionStatus(_ioCompletionHandle,
+                                        &ioSize,
+                                       (ULONG_PTR*)&pSocket, &pOverlapped, 200);
+
+    WebRtc_UWord32 error = 0;
+    if(!success)
+    {
+        error = GetLastError();
+        if(error == WAIT_TIMEOUT)
+        {
+            return true;
+        }
+        // This may happen if e.g. PostQueuedCompletionStatus() has been called.
+        // The IO context still needs to be reclaimed or re-used which is done
+        // in UdpSocket2Windows::IOCompleted(..).
+    }
+    if(pSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            -1,
+            "UdpSocket2WorkerWindows(%d)::Process(), pSocket == 0, end thread",
+            _workerNumber);
+        return true;
+    }
+    pIOContext = (PerIoContext*)pOverlapped;
+    pSocket->IOCompleted(pIOContext,ioSize,error);
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.h b/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.h
new file mode 100644
index 0000000..5adb293
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket2_manager_windows.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+#include <winsock2.h>
+
+#include "atomic32_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "thread_wrapper.h"
+#include "udp_socket2_windows.h"
+#include "udp_socket_manager_wrapper.h"
+
+#define MAX_IO_BUFF_SIZE 1600
+
+namespace webrtc {
+enum IO_OPERATION {
+    OP_READ,
+    OP_WRITE
+};
+
+class UdpSocket2Windows;
+
+// Struct used for all socket I/O operations.
+struct PerIoContext {
+    WSAOVERLAPPED overlapped;
+    char buffer[MAX_IO_BUFF_SIZE];
+    WSABUF wsabuf;
+    int nTotalBytes;
+    int nSentBytes;
+    int bytes;
+    IO_OPERATION ioOperation;
+    SocketAddress from;
+    int fromLen;
+    // Should be set to true if the I/O context was passed to the system by
+    // a thread not controlled by the socket implementation.
+    bool ioInitiatedByThreadWrapper;
+    // TODO (hellner): Not used. Delete it.
+    PerIoContext* pNextFree;
+};
+
+struct IoContextPoolItem;
+struct IoContextPoolItemPayload
+{
+    PerIoContext    ioContext;
+    IoContextPoolItem* base;
+};
+
+struct IoContextPoolItem
+{
+    // Atomic single linked list entry header.
+    SLIST_ENTRY itemEntry;
+    // Atomic single linked list payload
+    IoContextPoolItemPayload payload;
+};
+
+class IoContextPool
+{
+public:
+    IoContextPool();
+    virtual ~IoContextPool();
+    virtual WebRtc_Word32 Init(WebRtc_UWord32 increaseSize = 128);
+    // Re-use an old unused IO context or create a new one.
+    virtual PerIoContext* PopIoContext();
+    virtual WebRtc_Word32 PushIoContext(PerIoContext* pIoContext);
+    virtual inline WebRtc_Word32 GetSize(WebRtc_UWord32* inUse = 0)
+    {return _size.Value();}
+    virtual WebRtc_Word32 Free();
+private:
+    // Sample code for use of msfts single linked atomic list can be found here:
+    // http://msdn.microsoft.com/en-us/library/ms686962(VS.85).aspx
+
+    // Atomic single linked list head.
+    PSLIST_HEADER _pListHead;
+
+    bool _init;
+    Atomic32Wrapper _size;
+    Atomic32Wrapper _inUse;
+};
+
+
+class UdpSocket2ManagerWindows : public UdpSocketManager
+{
+public:
+    UdpSocket2ManagerWindows();
+    virtual ~UdpSocket2ManagerWindows();
+
+    virtual bool Init(WebRtc_Word32 id, WebRtc_UWord8& numOfWorkThreads);
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual inline bool AddSocket(UdpSocketWrapper* s)
+    {if(s) return AddSocketPrv(reinterpret_cast<UdpSocket2Windows*>(s));
+     return false;}
+    virtual bool RemoveSocket(UdpSocketWrapper* s)
+    {if(s) return RemoveSocketPrv(reinterpret_cast<UdpSocket2Windows*>(s));
+     return false;}
+
+    PerIoContext* PopIoContext(void);
+    WebRtc_Word32 PushIoContext(PerIoContext* pIoContext);
+
+private:
+    bool StopWorkerThreads();
+    bool StartWorkerThreads();
+    bool AddSocketPrv(UdpSocket2Windows* s);
+    bool RemoveSocketPrv(UdpSocket2Windows* s);
+
+    static WebRtc_UWord32 _numOfActiveManagers;
+    static bool _wsaInit;
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _pCrit;
+    WebRtc_Word32 _managerNumber;
+    volatile bool _stopped;
+    bool _init;
+    WebRtc_Word32 _numActiveSockets;
+    ListWrapper _workerThreadsList;
+    EventWrapper* _event;
+
+    HANDLE _ioCompletionHandle;
+    IoContextPool _ioContextPool;
+};
+
+class UdpSocket2WorkerWindows
+{
+public:
+    UdpSocket2WorkerWindows(HANDLE ioCompletionHandle);
+    virtual ~UdpSocket2WorkerWindows();
+
+    virtual bool Start();
+    virtual bool Stop();
+    virtual WebRtc_Word32 Init();
+    virtual void SetNotAlive();
+protected:
+    static bool Run(ThreadObj obj);
+    bool Process();
+private:
+    HANDLE _ioCompletionHandle;
+    ThreadWrapper*_pThread;
+    static WebRtc_Word32 _numOfWorkers;
+    WebRtc_Word32 _workerNumber;
+    volatile bool _stop;
+    bool _init;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket2_windows.cc b/trunk/src/modules/udp_transport/source/udp_socket2_windows.cc
new file mode 100644
index 0000000..32e3598
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket2_windows.cc
@@ -0,0 +1,1411 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket2_windows.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <winsock2.h>
+
+#include "traffic_control_windows.h"
+#include "udp_socket2_manager_windows.h"
+
+#pragma warning(disable : 4311)
+
+namespace webrtc {
+typedef struct _QOS_DESTADDR
+{
+    QOS_OBJECT_HDR ObjectHdr;
+    const struct sockaddr* SocketAddress;
+    ULONG SocketAddressLength;
+} QOS_DESTADDR, *LPQOS_DESTADDR;
+
+typedef const QOS_DESTADDR* LPCQOS_DESTADDR;
+
+// TODO (patrikw): seems to be defined in ws2ipdef.h as 3. How come it's
+//                 redefined here (as a different value)?
+#define IP_TOS 8
+
+#define QOS_GENERAL_ID_BASE 2000
+#define QOS_OBJECT_DESTADDR (0x00000004 + QOS_GENERAL_ID_BASE)
+
+UdpSocket2Windows::UdpSocket2Windows(const WebRtc_Word32 id,
+                                     UdpSocketManager* mgr, bool ipV6Enable,
+                                     bool disableGQOS)
+    : _id(id),
+      _qos(true),
+      _iProtocol(0),
+      _outstandingCalls(0),
+      _outstandingCallComplete(0),
+      _terminate(false),
+      _addedToMgr(false),
+      _safeTodelete(false),
+      _outstandingCallsDisabled(0),
+      _clientHandle(NULL),
+      _flowHandle(NULL),
+      _filterHandle(NULL),
+      _flow(NULL),
+      _gtc(NULL),
+      _pcp(-2),
+      _receiveBuffers(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocket2Windows::UdpSocket2Windows()");
+
+    _wantsIncoming = false;
+    _mgr = static_cast<UdpSocket2ManagerWindows *>(mgr);
+
+    _obj = NULL;
+    _incomingCb = NULL;
+    _socket = INVALID_SOCKET;
+    _pCrit = CriticalSectionWrapper::CreateCriticalSection();
+    _ptrCbRWLock     = RWLockWrapper::CreateRWLock();
+    _ptrDestRWLock   = RWLockWrapper::CreateRWLock();
+    _ptrSocketRWLock = RWLockWrapper::CreateRWLock();
+    _ptrDeleteCrit   = CriticalSectionWrapper::CreateCriticalSection();
+    _ptrDeleteCond   = ConditionVariableWrapper::CreateConditionVariable();
+
+    // Check if QoS is supported.
+    BOOL bProtocolFound = FALSE;
+    WSAPROTOCOL_INFO *lpProtocolBuf = NULL;
+    WSAPROTOCOL_INFO    pProtocolInfo;
+
+    if(!disableGQOS)
+    {
+        DWORD dwBufLen = 0;
+        // Set dwBufLen to the size needed to retreive all the requested
+        // information from WSAEnumProtocols.
+        WebRtc_Word32 nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+        lpProtocolBuf = (WSAPROTOCOL_INFO*)malloc(dwBufLen);
+        nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+
+        if (ipV6Enable)
+        {
+            _iProtocol=AF_INET6;
+        } else {
+            _iProtocol=AF_INET;
+        }
+
+        for (WebRtc_Word32 i=0; i<nRet; i++)
+        {
+            if (_iProtocol == lpProtocolBuf[i].iAddressFamily &&
+                IPPROTO_UDP == lpProtocolBuf[i].iProtocol)
+            {
+                if ((XP1_QOS_SUPPORTED ==
+                     (XP1_QOS_SUPPORTED & lpProtocolBuf[i].dwServiceFlags1)))
+                {
+                    pProtocolInfo = lpProtocolBuf[i];
+                    bProtocolFound = TRUE;
+                    break;
+                }
+            }
+         }
+    }
+
+    if(!bProtocolFound)
+    {
+        free(lpProtocolBuf);
+        _qos=false;
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR_NO_QOS,\
+ !bProtocolFound");
+    } else {
+
+        _socket = WSASocket(FROM_PROTOCOL_INFO, FROM_PROTOCOL_INFO,
+                            FROM_PROTOCOL_INFO,&pProtocolInfo, 0,
+                            WSA_FLAG_OVERLAPPED);
+        free(lpProtocolBuf);
+
+        if (_socket != INVALID_SOCKET)
+        {
+            return;
+        } else {
+            _qos = false;
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR_NO_QOS");
+        }
+    }
+    // QoS not supported.
+    if(ipV6Enable)
+    {
+        _socket = WSASocket(AF_INET6, SOCK_DGRAM, IPPROTO_UDP, 0 , 0,
+                            WSA_FLAG_OVERLAPPED);
+    }else
+    {
+        _socket = WSASocket(AF_INET, SOCK_DGRAM, IPPROTO_UDP, 0 , 0,
+                            WSA_FLAG_OVERLAPPED);
+    }
+    if (_socket == INVALID_SOCKET)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), INVALID_SOCKET,\
+ WSAerror: %d",
+            WSAGetLastError());
+    }
+
+    // Disable send buffering on the socket to improve CPU usage.
+    // This is done by setting SO_SNDBUF to 0.
+    WebRtc_Word32 nZero = 0;
+    WebRtc_Word32 nRet = setsockopt(_socket, SOL_SOCKET, SO_SNDBUF,
+                                    (WebRtc_Word8*)&nZero, sizeof(nZero));
+    if( nRet == SOCKET_ERROR )
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR,\
+ WSAerror: %d",
+            WSAGetLastError());
+    }
+}
+
+UdpSocket2Windows::~UdpSocket2Windows()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocket2Windows::~UdpSocket2Windows()");
+
+    WaitForOutstandingCalls();
+
+    delete _ptrCbRWLock;
+    delete _ptrDeleteCrit;
+    delete _ptrDeleteCond;
+    delete _ptrDestRWLock;
+    delete _ptrSocketRWLock;
+
+    if(_pCrit)
+        delete _pCrit;
+
+    if (_flow)
+    {
+        free(_flow);
+        _flow = NULL;
+    }
+
+    if (_gtc)
+    {
+        if(_filterHandle)
+        {
+            _gtc->TcDeleteFilter(_filterHandle);
+        }
+        if(_flowHandle)
+        {
+            _gtc->TcDeleteFlow(_flowHandle);
+        }
+        TrafficControlWindows::Release( _gtc);
+    }
+}
+
+WebRtc_Word32 UdpSocket2Windows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    if (_gtc)
+    {
+        _gtc->ChangeUniqueId(id);
+    }
+    return 0;
+}
+
+bool UdpSocket2Windows::ValidHandle()
+{
+    return GetFd() != INVALID_SOCKET;
+}
+
+bool UdpSocket2Windows::SetCallback(CallbackObj obj, IncomingSocketCallback cb)
+{
+    _ptrCbRWLock->AcquireLockExclusive();
+    _obj = obj;
+    _incomingCb = cb;
+    _ptrCbRWLock->ReleaseLockExclusive();
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::SetCallback ",(WebRtc_Word32)this);
+    if(_addedToMgr)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SetCallback alreadey added",
+                     (WebRtc_Word32) this);
+        return false;
+
+    }
+    if (_mgr->AddSocket(this))
+    {
+        WEBRTC_TRACE(
+            kTraceDebug, kTraceTransport, _id,
+            "UdpSocket2Windows(%d)::SetCallback socket added to manager",
+            (WebRtc_Word32)this);
+        _addedToMgr = true;
+        return true;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::SetCallback error adding me to mgr",
+                 (WebRtc_Word32) this);
+    return false;
+}
+
+bool UdpSocket2Windows::SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                                   const WebRtc_Word8* optval,
+                                   WebRtc_Word32 optlen)
+{
+    bool returnValue = true;
+    if(!AquireSocket())
+    {
+        return false;
+    }
+    if(0 != setsockopt(_socket, level, optname, optval, optlen ))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetSockopt(), WSAerror:%d",
+                     WSAGetLastError());
+        returnValue = false;
+    }
+    ReleaseSocket();
+    return returnValue;
+}
+
+bool UdpSocket2Windows::StartReceiving(WebRtc_UWord32 receiveBuffers)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::StartReceiving(%d)",
+                 (WebRtc_Word32)this, receiveBuffers);
+
+    _wantsIncoming = true;
+
+    WebRtc_Word32 numberOfReceiveBuffersToCreate =
+        receiveBuffers - _receiveBuffers.Value();
+    numberOfReceiveBuffersToCreate = (numberOfReceiveBuffersToCreate < 0) ?
+        0 : numberOfReceiveBuffersToCreate;
+
+    WebRtc_Word32 error = 0;
+    for(WebRtc_Word32 i = 0;
+        i < numberOfReceiveBuffersToCreate;
+        i++)
+    {
+        if(PostRecv())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "UdpSocket2Windows::StartReceiving() i=%d", i);
+            error = -1;
+            break;
+        }
+        ++_receiveBuffers;
+    }
+    if(error == -1)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "Socket receiving using:%d number of buffers",
+                 _receiveBuffers.Value());
+    return true;
+}
+
+bool UdpSocket2Windows::StopReceiving()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows::StopReceiving()");
+    _wantsIncoming = false;
+    return true;
+}
+
+bool UdpSocket2Windows::Bind(const SocketAddress& name)
+{
+    const struct sockaddr* addr =
+        reinterpret_cast<const struct sockaddr*>(&name);
+    bool returnValue = true;
+    if(!AquireSocket())
+    {
+        return false;
+    }
+    if (0 != bind(_socket, addr, sizeof(SocketAddress)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::Bind() WSAerror: %d",
+                     WSAGetLastError());
+        returnValue = false;
+    }
+    ReleaseSocket();
+    return returnValue;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SendTo(const WebRtc_Word8* buf,
+                                        WebRtc_Word32 len,
+                                        const SocketAddress& to)
+{
+    WebRtc_Word32 retVal = 0;
+    WebRtc_Word32 error = 0;
+    if(len < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SendTo(), len= %d < 0",
+                     (WebRtc_Word32)this, len);
+        return -1;
+    }
+
+    PerIoContext* pIoContext = _mgr->PopIoContext();
+    if(pIoContext == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SendTo(), pIoContext==0",
+                     (WebRtc_Word32) this);
+        return -1;
+    }
+    // sizeof(pIoContext->buffer) is smaller than the highest number that
+    // can be represented by a WebRtc_Word32.
+    if(len >= (WebRtc_Word32) sizeof(pIoContext->buffer))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::SendTo(), len= %d > buffer_size = %d",
+            (WebRtc_Word32) this,
+            len,sizeof(pIoContext->buffer));
+        len = sizeof(pIoContext->buffer);
+    }
+
+    memcpy(pIoContext->buffer,buf,len);
+    pIoContext->wsabuf.buf = pIoContext->buffer;
+    pIoContext->wsabuf.len = len;
+    pIoContext->fromLen=sizeof(SocketAddress);
+    pIoContext->ioOperation = OP_WRITE;
+    pIoContext->nTotalBytes = len;
+    pIoContext->nSentBytes=0;
+
+    DWORD numOfbytesSent = 0;
+    const struct sockaddr* addr = reinterpret_cast<const struct sockaddr*>(&to);
+
+    if(!AquireSocket())
+    {
+        _mgr->PushIoContext(pIoContext);
+        return -1;
+    }
+    // Assume that the WSASendTo call will be successfull to make sure that
+    // _outstandingCalls is positive. Roll back if WSASendTo failed.
+    if(!NewOutstandingCall())
+    {
+        _mgr->PushIoContext(pIoContext);
+        ReleaseSocket();
+        return -1;
+    }
+    retVal = WSASendTo(_socket, &pIoContext->wsabuf, 1, &numOfbytesSent,
+                       0, addr, sizeof(SocketAddress),
+                       &(pIoContext->overlapped), 0);
+    ReleaseSocket();
+
+    if( retVal == SOCKET_ERROR  )
+    {
+        error =  WSAGetLastError();
+        if(error != ERROR_IO_PENDING)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "UdpSocket2Windows::SendTo() WSAerror: %d",error);
+        }
+    }
+    if(retVal == 0 || (retVal == SOCKET_ERROR && error == ERROR_IO_PENDING))
+    {
+        return len;
+    }
+    if((error = _mgr->PushIoContext(pIoContext)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::SendTo(), error:%d pushing ioContext",
+            (WebRtc_Word32)this, error);
+    }
+
+    // Roll back.
+    OutstandingCallCompleted();
+    return -1;
+}
+
+void UdpSocket2Windows::IOCompleted(PerIoContext* pIOContext,
+                                    WebRtc_UWord32 ioSize, WebRtc_UWord32 error)
+{
+    if(pIOContext == NULL || error == ERROR_OPERATION_ABORTED)
+    {
+        if ((pIOContext != NULL) &&
+            !pIOContext->ioInitiatedByThreadWrapper &&
+            (error == ERROR_OPERATION_ABORTED) &&
+            (pIOContext->ioOperation == OP_READ) &&
+            _outstandingCallsDisabled.Value() == 1)
+        {
+            // !pIOContext->initiatedIOByThreadWrapper indicate that the I/O
+            // was not initiaded by a ThreadWrapper thread.
+            // This may happen if the thread that initiated receiving (e.g.
+            // by calling StartListen())) is deleted before any packets have
+            // been received.
+            // In this case there is no packet in the PerIoContext. Re-use it
+            // to post a new PostRecv(..).
+            // Note 1: the PerIoContext will henceforth be posted by a thread
+            //         that is controlled by the socket implementation.
+            // Note 2: This is more likely to happen to RTCP packets as
+            //         they are less frequent than RTP packets.
+            // Note 3: _outstandingCallsDisabled being false (= 1) indicates
+            //         that the socket isn't being shut down.
+            // Note 4: This should only happen buffers set to recevie packets
+            //         (OP_READ).
+            if (_outstandingCallsDisabled.Value() != 1)
+            {
+                WEBRTC_TRACE(
+                    kTraceDebug,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows::IOCompleted(pIOContext=%p,\
+ ioSize=%.lu, error=%.lu) Received operation aborted but continuing since\
+ pIOContext->ioInitiatedByThreadWrapper == false",
+                    pIOContext,
+                    ioSize,
+                    error);
+                WebRtc_Word32 ioOp = pIOContext ?
+                    (WebRtc_Word32)pIOContext->ioOperation : -1;
+                WebRtc_Word32 ioInit = pIOContext ?
+                    (WebRtc_Word32)pIOContext->ioInitiatedByThreadWrapper : -1;
+                WEBRTC_TRACE(
+                    kTraceDebug,
+                    kTraceTransport,
+                    _id,
+                    "pIOContext->ioOperation=%d,\
+ pIOContext->ioInitiatedByThreadWrapper=%d, _outstandingCallsDisabled=%d,\
+ _incomingCb=%p, this=%p",
+                    ioOp,
+                    ioInit,
+                    (WebRtc_Word32)_outstandingCallsDisabled.Value(),
+                    _incomingCb,
+                    this);
+            }
+        } else {
+            if(pIOContext == NULL)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows::IOCompleted(%d,%d,%d), %d",
+                    (WebRtc_Word32)pIOContext,
+                    ioSize,
+                    error,
+                    pIOContext ? (WebRtc_Word32)pIOContext->ioOperation : -1);
+            } else {
+                WEBRTC_TRACE(
+                    kTraceDebug,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows::IOCompleted() Operation aborted");
+            }
+            if(pIOContext)
+            {
+                WebRtc_Word32 remainingReceiveBuffers = --_receiveBuffers;
+                if(remainingReceiveBuffers < 0)
+                {
+                    assert(false);
+                }
+                WebRtc_Word32 err = 0;
+                if((err = _mgr->PushIoContext(pIOContext)))
+                {
+                    WEBRTC_TRACE(
+                        kTraceError,
+                        kTraceTransport,
+                        _id,
+                        "UdpSocket2Windows::IOCompleted(), err = %d, when\
+ pushing ioContext after error",
+                        err);
+                }
+            }
+            OutstandingCallCompleted();
+            return;
+        }
+    } // if (pIOContext == NULL || error == ERROR_OPERATION_ABORTED)
+
+    if(pIOContext->ioOperation == OP_WRITE)
+    {
+        _mgr->PushIoContext(pIOContext);
+    }
+    else if(pIOContext->ioOperation == OP_READ)
+    {
+        if(!error && ioSize != 0)
+        {
+            _ptrCbRWLock->AcquireLockShared();
+            if(_wantsIncoming && _incomingCb)
+            {
+                _incomingCb(_obj,pIOContext->wsabuf.buf, ioSize,
+                            &pIOContext->from);
+            }
+            _ptrCbRWLock->ReleaseLockShared();
+        }
+        WebRtc_Word32 err = PostRecv(pIOContext);
+        if(err == 0)
+        {
+            // The PerIoContext was posted by a thread controlled by the socket
+            // implementation.
+            pIOContext->ioInitiatedByThreadWrapper = true;
+        }
+        OutstandingCallCompleted();
+        return;
+    } else {
+        // Unknown operation. Should not happen. Return pIOContext to avoid
+        // memory leak.
+        assert(false);
+        _mgr->PushIoContext(pIOContext);
+    }
+    OutstandingCallCompleted();
+    // Don't touch any members after OutstandingCallCompleted() since the socket
+    // may be deleted at this point.
+}
+
+WebRtc_Word32 UdpSocket2Windows::PostRecv()
+{
+    PerIoContext* pIoContext=_mgr->PopIoContext();
+    if(pIoContext == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::PostRecv(), pIoContext == 0",
+                     (WebRtc_Word32)this);
+        return -1;
+    }
+    // This function may have been called by thread not controlled by the socket
+    // implementation.
+    pIoContext->ioInitiatedByThreadWrapper = false;
+    return PostRecv(pIoContext);
+}
+
+WebRtc_Word32 UdpSocket2Windows::PostRecv(PerIoContext* pIoContext)
+{
+    if(pIoContext==0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::PostRecv(?), pIoContext==0",
+                     (WebRtc_Word32)this);
+        return -1;
+    }
+
+    DWORD numOfRecivedBytes = 0;
+    DWORD flags = 0;
+    pIoContext->wsabuf.buf = pIoContext->buffer;
+    pIoContext->wsabuf.len = sizeof(pIoContext->buffer);
+    pIoContext->fromLen = sizeof(SocketAddress);
+    pIoContext->ioOperation = OP_READ;
+    WebRtc_Word32 rxError = 0;
+    WebRtc_Word32 nRet = 0;
+    WebRtc_Word32 postingSucessfull = false;
+
+    if(!AquireSocket())
+    {
+        _mgr->PushIoContext(pIoContext);
+        return -1;
+    }
+
+    // Assume that the WSARecvFrom() call will be successfull to make sure that
+    // _outstandingCalls is positive. Roll back if WSARecvFrom() failed.
+    if(!NewOutstandingCall())
+    {
+        _mgr->PushIoContext(pIoContext);
+        ReleaseSocket();
+        return -1;
+    }
+    for(WebRtc_Word32 tries = 0; tries < 10; tries++)
+    {
+        nRet = WSARecvFrom(
+            _socket,
+            &(pIoContext->wsabuf),
+            1,
+            &numOfRecivedBytes,
+            &flags,
+            reinterpret_cast<struct sockaddr*>(&(pIoContext->from)),
+            &(pIoContext->fromLen),
+            &(pIoContext->overlapped),
+            0);
+
+        if( nRet == SOCKET_ERROR)
+        {
+            rxError = WSAGetLastError();
+            if(rxError != ERROR_IO_PENDING)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows(%d)::PostRecv(?), WSAerror:%d when\
+ posting new recieve,trie:%d",
+                    (WebRtc_Word32)this,
+                    rxError,
+                    tries);
+                // Tell the OS that this is a good place to context switch if
+                // it wants to.
+                Sleep(0);
+            }
+        }
+        if((rxError == ERROR_IO_PENDING) || (nRet == 0))
+        {
+            postingSucessfull = true;
+            break;
+        }
+    }
+    ReleaseSocket();
+
+    if(postingSucessfull)
+    {
+        return 0;
+    }
+    WebRtc_Word32 remainingReceiveBuffers = --_receiveBuffers;
+    if(remainingReceiveBuffers < 0)
+    {
+        assert(false);
+    }
+    WebRtc_Word32 error = 0;
+    if((error = _mgr->PushIoContext(pIoContext)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::PostRecv(?), error:%d when PushIoContext",
+            (WebRtc_Word32)this,
+            error);
+    }
+    // Roll back.
+    OutstandingCallCompleted();
+    return -1;
+}
+
+void UdpSocket2Windows::CloseBlocking()
+{
+    LINGER  lingerStruct;
+
+    lingerStruct.l_onoff = 1;
+    lingerStruct.l_linger = 0;
+    if(AquireSocket())
+    {
+        setsockopt(_socket, SOL_SOCKET, SO_LINGER,
+                   (WebRtc_Word8 *)&lingerStruct, sizeof(lingerStruct));
+        ReleaseSocket();
+    }
+
+    _wantsIncoming = false;
+    // Reclaims the socket and prevents it from being used again.
+    InvalidateSocket();
+    DisableNewOutstandingCalls();
+    WaitForOutstandingCalls();
+    delete this;
+}
+
+bool UdpSocket2Windows::SetQos(WebRtc_Word32 serviceType,
+                               WebRtc_Word32 tokenRate,
+                               WebRtc_Word32 bucketSize,
+                               WebRtc_Word32 peekBandwith,
+                               WebRtc_Word32 minPolicedSize,
+                               WebRtc_Word32 maxSduSize,
+                               const SocketAddress &stRemName,
+                               WebRtc_Word32 overrideDSCP)
+{
+    if(_qos == false)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetQos(), socket not capable of QOS");
+        return false;
+    }
+    if(overrideDSCP != 0)
+    {
+        FLOWSPEC f;
+        WebRtc_Word32 err = CreateFlowSpec(serviceType, tokenRate, bucketSize,
+                                           peekBandwith, minPolicedSize,
+                                           maxSduSize, &f);
+        if(err == -1)
+        {
+            return false;
+        }
+
+        SocketAddress socketName;
+        struct sockaddr_in* name =
+            reinterpret_cast<struct sockaddr_in*>(&socketName);
+        int nameLength = sizeof(SocketAddress);
+        if(AquireSocket())
+        {
+            getsockname(_socket, (struct sockaddr*)name, &nameLength);
+            ReleaseSocket();
+        }
+
+        if(serviceType == 0)
+        {
+            // Disable TOS byte setting.
+            return SetTrafficControl(0, -1, name, &f, &f) == 0;
+        }
+        return SetTrafficControl(overrideDSCP, -1, name, &f, &f) == 0;
+    }
+
+    QOS Qos;
+    WebRtc_Word32 result ;
+    DWORD BytesRet;
+    QOS_DESTADDR QosDestaddr;
+
+    memset (&Qos, QOS_NOT_SPECIFIED, sizeof(QOS));
+
+    Qos.SendingFlowspec.ServiceType        = serviceType;
+    Qos.SendingFlowspec.TokenRate          = tokenRate;
+    Qos.SendingFlowspec.TokenBucketSize    = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.PeakBandwidth      = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.MaxSduSize         = QOS_NOT_SPECIFIED;
+
+    // Only ServiceType is needed for receiving.
+    Qos.ReceivingFlowspec.ServiceType        = serviceType;
+    Qos.ReceivingFlowspec.TokenRate          = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.TokenBucketSize    = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.PeakBandwidth      = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MaxSduSize         = QOS_NOT_SPECIFIED;
+
+    Qos.ProviderSpecific.len = 0;
+
+    Qos.ProviderSpecific.buf = NULL;
+
+    ZeroMemory((WebRtc_Word8 *)&QosDestaddr, sizeof(QosDestaddr));
+
+    OSVERSIONINFOEX osvie;
+    osvie.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    GetVersionEx((LPOSVERSIONINFO)&osvie);
+
+//    Operating system        Version number    dwMajorVersion    dwMinorVersion
+//    Windows 7                6.1                6                1
+//    Windows Server 2008 R2   6.1                6                1
+//    Windows Server 2008      6.0                6                0
+//    Windows Vista            6.0                6                0
+//    Windows Server 2003 R2   5.2                5                2
+//    Windows Server 2003      5.2                5                2
+//    Windows XP               5.1                5                1
+//    Windows 2000             5.0                5                0
+
+    // SERVICE_NO_QOS_SIGNALING and QOS_DESTADDR should not be used if version
+    // is 6.0 or greater.
+    if(osvie.dwMajorVersion >= 6)
+    {
+        Qos.SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        Qos.ReceivingFlowspec.ServiceType = serviceType;
+
+    } else {
+        Qos.SendingFlowspec.MinimumPolicedSize =
+            QOS_NOT_SPECIFIED | SERVICE_NO_QOS_SIGNALING;
+        Qos.ReceivingFlowspec.ServiceType =
+            serviceType | SERVICE_NO_QOS_SIGNALING;
+
+        QosDestaddr.ObjectHdr.ObjectType   = QOS_OBJECT_DESTADDR;
+        QosDestaddr.ObjectHdr.ObjectLength = sizeof(QosDestaddr);
+        QosDestaddr.SocketAddress = (SOCKADDR *)&stRemName;
+        if (AF_INET6 == _iProtocol)
+        {
+            QosDestaddr.SocketAddressLength = sizeof(SocketAddressInVersion6);
+        } else {
+            QosDestaddr.SocketAddressLength = sizeof(SocketAddressIn);
+        }
+
+        Qos.ProviderSpecific.len = QosDestaddr.ObjectHdr.ObjectLength;
+        Qos.ProviderSpecific.buf = (WebRtc_Word8*)&QosDestaddr;
+    }
+
+    if(AquireSocket())
+    {
+        // To set QoS with SIO_SET_QOS the socket must be locally bound first
+        // or the call will fail with error code 10022.
+        result = WSAIoctl(GetFd(), SIO_SET_QOS, &Qos, sizeof(QOS), NULL, 0,
+                          &BytesRet, NULL,NULL);
+        ReleaseSocket();
+    }
+    if (result == SOCKET_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetQos() WSAerror : %d",
+                     WSAGetLastError());
+        return false;
+    }
+    return true;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetTOS(WebRtc_Word32 serviceType)
+{
+    SocketAddress socketName;
+
+    struct sockaddr_in* name =
+        reinterpret_cast<struct sockaddr_in*>(&socketName);
+    int nameLength = sizeof(SocketAddress);
+    if(AquireSocket())
+    {
+        getsockname(_socket, (struct sockaddr*)name, &nameLength);
+        ReleaseSocket();
+    }
+
+    WebRtc_Word32 res = SetTrafficControl(serviceType, -1, name);
+    if (res == -1)
+    {
+        OSVERSIONINFO OsVersion;
+        OsVersion.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+        GetVersionEx (&OsVersion);
+
+        if ((OsVersion.dwMajorVersion == 4)) // NT 4.0
+        {
+            if(SetSockopt(IPPROTO_IP,IP_TOS ,
+                          (WebRtc_Word8*)&serviceType, 4) != 0)
+            {
+                return -1;
+            }
+        }
+    }
+    return res;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetPCP(WebRtc_Word32 pcp)
+{
+    SocketAddress socketName;
+    struct sockaddr_in* name =
+        reinterpret_cast<struct sockaddr_in*>(&socketName);
+    int nameLength = sizeof(SocketAddress);
+    if(AquireSocket())
+    {
+        getsockname(_socket, (struct sockaddr*)name, &nameLength);
+        ReleaseSocket();
+    }
+    return SetTrafficControl(-1, pcp, name);
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetTrafficControl(
+    WebRtc_Word32 dscp,
+    WebRtc_Word32 pcp,
+    const struct sockaddr_in* name,
+    FLOWSPEC* send, FLOWSPEC* recv)
+{
+    if (pcp == _pcp)
+    {
+        // No change.
+        pcp = -1;
+    }
+    if ((-1 == pcp) && (-1 == dscp))
+    {
+        return 0;
+    }
+    if (!_gtc)
+    {
+        _gtc = TrafficControlWindows::GetInstance(_id);
+    }
+    if (!_gtc)
+    {
+        return -1;
+    }
+    if(_filterHandle)
+    {
+        _gtc->TcDeleteFilter(_filterHandle);
+        _filterHandle = NULL;
+    }
+    if(_flowHandle)
+    {
+        _gtc->TcDeleteFlow(_flowHandle);
+        _flowHandle = NULL;
+    }
+    if(_clientHandle)
+    {
+        _gtc->TcDeregisterClient(_clientHandle);
+        _clientHandle = NULL;
+    }
+    if ((0 == dscp) && (-2 == _pcp) && (-1 == pcp))
+    {
+        // TODO (pwestin): why is this not done before deleting old filter and
+        //                 flow? This scenario should probably be documented in
+        //                 the function declaration.
+        return 0;
+    }
+
+    TCI_CLIENT_FUNC_LIST QoSFunctions;
+    QoSFunctions.ClAddFlowCompleteHandler = NULL;
+    QoSFunctions.ClDeleteFlowCompleteHandler = NULL;
+    QoSFunctions.ClModifyFlowCompleteHandler = NULL;
+    QoSFunctions.ClNotifyHandler = (TCI_NOTIFY_HANDLER)MyClNotifyHandler;
+    // Register the client with Traffic control interface.
+    HANDLE ClientHandle;
+    ULONG result = _gtc->TcRegisterClient(CURRENT_TCI_VERSION, NULL,
+                                          &QoSFunctions,&ClientHandle);
+    if(result != NO_ERROR)
+    {
+        // This is likely caused by the application not being run as
+        // administrator.
+      WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                   "TcRegisterClient returned %d", result);
+        return result;
+    }
+
+    // Find traffic control-enabled network interfaces that matches this
+    // socket's IP address.
+    ULONG BufferSize = 0;
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize, NULL);
+
+    if(result != NO_ERROR && result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        _gtc->TcDeregisterClient(ClientHandle);
+        return result;
+    }
+
+    if(result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        // Empty buffer contains all control-enabled network interfaces. I.e.
+        // QoS is not enabled.
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "QOS faild since QOS is not installed on the interface");
+
+        _gtc->TcDeregisterClient(ClientHandle);
+        return -1;
+    }
+
+    PTC_IFC_DESCRIPTOR pInterfaceBuffer =
+        (PTC_IFC_DESCRIPTOR)malloc(BufferSize);
+    if(pInterfaceBuffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Out ot memory failure");
+        _gtc->TcDeregisterClient(ClientHandle);
+        return ERROR_NOT_ENOUGH_MEMORY;
+    }
+
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize,
+                                         pInterfaceBuffer);
+
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "Critical: error enumerating interfaces when passing in correct\
+ buffer size: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    PTC_IFC_DESCRIPTOR oneinterface;
+    HANDLE ifcHandle, iFilterHandle, iflowHandle;
+    bool addrFound = false;
+    ULONG filterSourceAddress = ULONG_MAX;
+
+    // Find the interface corresponding to the local address.
+    for(oneinterface = pInterfaceBuffer;
+        oneinterface != (PTC_IFC_DESCRIPTOR)
+            (((WebRtc_Word8*)pInterfaceBuffer) + BufferSize);
+        oneinterface = (PTC_IFC_DESCRIPTOR)
+            ((WebRtc_Word8 *)oneinterface + oneinterface->Length))
+    {
+
+        WebRtc_Word8 interfaceName[500];
+        WideCharToMultiByte(CP_ACP, 0, oneinterface->pInterfaceName, -1,
+                            interfaceName, sizeof(interfaceName), 0, 0 );
+
+        PNETWORK_ADDRESS_LIST addresses =
+            &(oneinterface->AddressListDesc.AddressList);
+        for(LONG i = 0; i < addresses->AddressCount ; i++)
+        {
+            // Only look at TCP/IP addresses.
+            if(addresses->Address[i].AddressType != NDIS_PROTOCOL_ID_TCP_IP)
+            {
+                continue;
+            }
+
+            NETWORK_ADDRESS_IP* pIpAddr =
+                (NETWORK_ADDRESS_IP*)&(addresses->Address[i].Address);
+            struct in_addr in;
+            in.S_un.S_addr = pIpAddr->in_addr;
+            if(pIpAddr->in_addr == name->sin_addr.S_un.S_addr)
+            {
+                filterSourceAddress = pIpAddr->in_addr;
+                addrFound = true;
+            }
+        }
+        if(!addrFound)
+        {
+            continue;
+        } else
+        {
+            break;
+        }
+    }
+    if(!addrFound)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "QOS faild since address is not found");
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+    result = _gtc->TcOpenInterfaceW(oneinterface->pInterfaceName, ClientHandle,
+                                    NULL, &ifcHandle);
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error opening interface: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    // Create flow if one doesn't exist.
+    if (!_flow)
+    {
+        bool addPCP = ((pcp >= 0) || ((-1 == pcp) && (_pcp >= 0)));
+        int allocSize = sizeof(TC_GEN_FLOW) + sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        _flow = (PTC_GEN_FLOW)malloc(allocSize);
+
+        _flow->SendingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.Latency = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+        _flow->SendingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+
+        _flow->ReceivingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.Latency = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+        _flow->ReceivingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        dsClass->DSField = 0;
+        dsClass->ObjectHdr.ObjectType = QOS_OBJECT_DS_CLASS;
+        dsClass->ObjectHdr.ObjectLength = sizeof(QOS_DS_CLASS);
+
+        if (addPCP)
+        {
+            QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+            trafficClass->TrafficClass = 0;
+            trafficClass->ObjectHdr.ObjectType = QOS_OBJECT_TRAFFIC_CLASS;
+            trafficClass->ObjectHdr.ObjectLength = sizeof(QOS_TRAFFIC_CLASS);
+        }
+
+        _flow->TcObjectsLength = sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+    } else if (-1 != pcp) {
+        // Reallocate memory since pcp has changed.
+        PTC_GEN_FLOW oldFlow = _flow;
+        bool addPCP = (pcp >= 0);
+        int allocSize = sizeof(TC_GEN_FLOW) + sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        _flow = (PTC_GEN_FLOW)malloc(allocSize);
+
+        // Copy old flow.
+        _flow->ReceivingFlowspec = oldFlow->ReceivingFlowspec;
+        _flow->SendingFlowspec = oldFlow->SendingFlowspec;
+        // The DS info is always the first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        QOS_DS_CLASS* oldDsClass = (QOS_DS_CLASS*)oldFlow->TcObjects;
+        dsClass->DSField = oldDsClass->DSField;
+        dsClass->ObjectHdr.ObjectType = oldDsClass->ObjectHdr.ObjectType;
+        dsClass->ObjectHdr.ObjectLength = oldDsClass->ObjectHdr.ObjectLength;
+
+        if (addPCP)
+        {
+            QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+            trafficClass->TrafficClass = 0;
+            trafficClass->ObjectHdr.ObjectType = QOS_OBJECT_TRAFFIC_CLASS;
+            trafficClass->ObjectHdr.ObjectLength = sizeof(QOS_TRAFFIC_CLASS);
+        }
+
+        _flow->TcObjectsLength = sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        free(oldFlow);
+    }
+
+    // Setup send and receive flow and DS object.
+    if (dscp >= 0)
+    {
+        if (!send || (0 == dscp))
+        {
+            _flow->SendingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.Latency = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.PeakBandwidth =
+                (0 == dscp ? QOS_NOT_SPECIFIED : POSITIVE_INFINITY_RATE);
+            _flow->SendingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+            _flow->SendingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+            // 128000 * 10 is 10mbit/s.
+            _flow->SendingFlowspec.TokenRate =
+                (0 == dscp ? QOS_NOT_SPECIFIED : 128000 * 10);
+        }
+        else
+        {
+            _flow->SendingFlowspec.DelayVariation = send->DelayVariation;
+            _flow->SendingFlowspec.Latency = send->Latency;
+            _flow->SendingFlowspec.MaxSduSize = send->MaxSduSize;
+            _flow->SendingFlowspec.MinimumPolicedSize =
+                send->MinimumPolicedSize;
+            _flow->SendingFlowspec.PeakBandwidth = send->PeakBandwidth;
+            _flow->SendingFlowspec.PeakBandwidth = POSITIVE_INFINITY_RATE;
+            _flow->SendingFlowspec.ServiceType = send->ServiceType;
+            _flow->SendingFlowspec.TokenBucketSize = send->TokenBucketSize;
+            _flow->SendingFlowspec.TokenRate = send->TokenRate;
+        }
+
+        if (!recv  || (0 == dscp))
+        {
+            _flow->ReceivingFlowspec.DelayVariation =
+                _flow->SendingFlowspec.DelayVariation;
+            _flow->ReceivingFlowspec.Latency = _flow->SendingFlowspec.Latency;
+            _flow->ReceivingFlowspec.MaxSduSize =
+                _flow->SendingFlowspec.MaxSduSize;
+            _flow->ReceivingFlowspec.MinimumPolicedSize =
+                _flow->SendingFlowspec.MinimumPolicedSize;
+            _flow->ReceivingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+            _flow->ReceivingFlowspec.ServiceType =
+                0 == dscp ? SERVICETYPE_BESTEFFORT : SERVICETYPE_CONTROLLEDLOAD;
+            _flow->ReceivingFlowspec.TokenBucketSize =
+                _flow->SendingFlowspec.TokenBucketSize;
+            _flow->ReceivingFlowspec.TokenRate =
+                _flow->SendingFlowspec.TokenRate;
+        } else {
+            _flow->ReceivingFlowspec.DelayVariation = recv->DelayVariation;
+            _flow->ReceivingFlowspec.Latency = recv->Latency;
+            _flow->ReceivingFlowspec.MaxSduSize = recv->MaxSduSize;
+            _flow->ReceivingFlowspec.MinimumPolicedSize =
+                recv->MinimumPolicedSize;
+            _flow->ReceivingFlowspec.PeakBandwidth = recv->PeakBandwidth;
+            _flow->ReceivingFlowspec.ServiceType = recv->ServiceType;
+            _flow->ReceivingFlowspec.TokenBucketSize = recv->TokenBucketSize;
+            _flow->ReceivingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+        }
+
+        // Setup DS (for DSCP value).
+        // DS is always the first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        dsClass->DSField = dscp;
+    }
+
+    // Setup PCP (802.1p priority in 802.1Q/VLAN tagging)
+    if (pcp >= 0)
+    {
+        // DS is always first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+        trafficClass->TrafficClass = pcp;
+    }
+
+    result = _gtc->TcAddFlow(ifcHandle, NULL, 0, _flow, &iflowHandle);
+    if(result != NO_ERROR)
+    {
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+
+    IP_PATTERN filterPattern, mask;
+
+    ZeroMemory((WebRtc_Word8*)&filterPattern, sizeof(IP_PATTERN));
+    ZeroMemory((WebRtc_Word8*)&mask, sizeof(IP_PATTERN));
+
+    filterPattern.ProtocolId = IPPROTO_UDP;
+    // "name" fields already in network order.
+    filterPattern.S_un.S_un_ports.s_srcport = name->sin_port;
+    filterPattern.SrcAddr = filterSourceAddress;
+
+    // Unsigned max of a type corresponds to a bitmask with all bits set to 1.
+    // I.e. the filter should allow all ProtocolIds, any source port and any
+    // IP address
+    mask.ProtocolId = UCHAR_MAX;
+    mask.S_un.S_un_ports.s_srcport = USHRT_MAX;
+    mask.SrcAddr = ULONG_MAX;
+
+    TC_GEN_FILTER filter;
+
+    filter.AddressType = NDIS_PROTOCOL_ID_TCP_IP;
+    filter.Mask = (LPVOID)&mask;
+    filter.Pattern = (LPVOID)&filterPattern;
+    filter.PatternSize = sizeof(IP_PATTERN);
+
+    result = _gtc->TcAddFilter(iflowHandle, &filter, &iFilterHandle);
+    if(result != NO_ERROR)
+    {
+        _gtc->TcDeleteFlow(iflowHandle);
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    _flowHandle = iflowHandle;
+    _filterHandle = iFilterHandle;
+    _clientHandle = ClientHandle;
+    if (-1 != pcp)
+    {
+        _pcp = pcp;
+    }
+
+    _gtc->TcCloseInterface(ifcHandle);
+    free(pInterfaceBuffer);
+
+    return 0;
+}
+
+WebRtc_Word32 UdpSocket2Windows::CreateFlowSpec(WebRtc_Word32 serviceType,
+                                                WebRtc_Word32 tokenRate,
+                                                WebRtc_Word32 bucketSize,
+                                                WebRtc_Word32 peekBandwith,
+                                                WebRtc_Word32 minPolicedSize,
+                                                WebRtc_Word32 maxSduSize,
+                                                FLOWSPEC* f)
+{
+    if (!f)
+    {
+        return -1;
+    }
+
+    f->ServiceType        = serviceType;
+    f->TokenRate          = tokenRate;
+    f->TokenBucketSize    = QOS_NOT_SPECIFIED;
+    f->PeakBandwidth      = QOS_NOT_SPECIFIED;
+    f->DelayVariation     = QOS_NOT_SPECIFIED;
+    f->Latency            = QOS_NOT_SPECIFIED;
+    f->MaxSduSize         = QOS_NOT_SPECIFIED;
+    f->MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    return 0;
+}
+
+bool UdpSocket2Windows::NewOutstandingCall()
+{
+    assert(_outstandingCallsDisabled.Value() == 0);
+
+    const WebRtc_Word32 outstandingCalls = ++_outstandingCalls;
+    return true;
+}
+
+void UdpSocket2Windows::OutstandingCallCompleted()
+{
+    _ptrDestRWLock->AcquireLockShared();
+    ++_outstandingCallComplete;
+    if((--_outstandingCalls == 0) && (_outstandingCallsDisabled.Value() == 1))
+    {
+        // When there are no outstanding calls and new outstandning calls are
+        // disabled it is time to terminate.
+        _terminate = true;
+    }
+    _ptrDestRWLock->ReleaseLockShared();
+
+    if((--_outstandingCallComplete == 0) &&
+        (_terminate))
+    {
+        // Only one thread will enter here. The thread with the last outstanding
+        // call.
+        CriticalSectionScoped cs(_ptrDeleteCrit);
+        _safeTodelete = true;
+        _ptrDeleteCond->Wake();
+    }
+}
+
+void UdpSocket2Windows::DisableNewOutstandingCalls()
+{
+    _ptrDestRWLock->AcquireLockExclusive();
+    if(_outstandingCallsDisabled.Value() == 1)
+    {
+        // Outstandning calls are already disabled.
+        _ptrDestRWLock->ReleaseLockExclusive();
+        return;
+    }
+    _outstandingCallsDisabled = 1;
+    const bool noOutstandingCalls = (_outstandingCalls.Value() == 0);
+    _ptrDestRWLock->ReleaseLockExclusive();
+
+    RemoveSocketFromManager();
+
+    if(noOutstandingCalls)
+    {
+        CriticalSectionScoped cs(_ptrDeleteCrit);
+        _safeTodelete = true;
+        _ptrDeleteCond->Wake();
+    }
+}
+
+void UdpSocket2Windows::WaitForOutstandingCalls()
+{
+    CriticalSectionScoped cs(_ptrDeleteCrit);
+    while(!_safeTodelete)
+    {
+        _ptrDeleteCond->SleepCS(*_ptrDeleteCrit);
+    }
+}
+
+void UdpSocket2Windows::RemoveSocketFromManager()
+{
+    // New outstanding calls should be disabled at this point.
+    assert(_outstandingCallsDisabled.Value() != 0);
+
+    if(_addedToMgr)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "calling UdpSocketManager::RemoveSocket()");
+        if(_mgr->RemoveSocket(this))
+        {
+            _addedToMgr=false;
+        }
+    }
+}
+
+bool UdpSocket2Windows::AquireSocket()
+{
+    _ptrSocketRWLock->AcquireLockShared();
+    const bool returnValue = _socket != INVALID_SOCKET;
+    if(!returnValue)
+    {
+        _ptrSocketRWLock->ReleaseLockShared();
+    }
+    return returnValue;
+}
+
+void UdpSocket2Windows::ReleaseSocket()
+{
+    _ptrSocketRWLock->ReleaseLockShared();
+}
+
+bool UdpSocket2Windows::InvalidateSocket()
+{
+    _ptrSocketRWLock->AcquireLockExclusive();
+    if(_socket == INVALID_SOCKET)
+    {
+        _ptrSocketRWLock->ReleaseLockExclusive();
+        return true;
+    }
+    // Give the socket back to the system. All socket calls will fail from now
+    // on.
+    if(closesocket(_socket) == SOCKET_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::InvalidateSocket() WSAerror: %d",
+                     (WebRtc_Word32)this, WSAGetLastError());
+    }
+    _socket = INVALID_SOCKET;
+    _ptrSocketRWLock->ReleaseLockExclusive();
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket2_windows.h b/trunk/src/modules/udp_transport/source/udp_socket2_windows.h
new file mode 100644
index 0000000..c0d68bb
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket2_windows.h
@@ -0,0 +1,171 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
+
+// Disable deprication warning from traffic.h
+#pragma warning(disable : 4995)
+
+// Don't change include order for these header files.
+#include <Winsock2.h>
+#include <Ntddndis.h>
+#include <traffic.h>
+
+#include "atomic32_wrapper.h"
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "udp_socket_wrapper.h"
+#include "udp_socket2_manager_windows.h"
+
+namespace webrtc {
+class UdpSocket2ManagerWindows;
+class TrafficControlWindows;
+struct PerIoContext;
+
+class UdpSocket2Windows : public UdpSocketWrapper
+{
+public:
+    UdpSocket2Windows(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                      bool ipV6Enable = false, bool disableGQOS = false);
+    virtual ~UdpSocket2Windows();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool ValidHandle();
+
+    virtual bool SetCallback(CallbackObj, IncomingSocketCallback);
+
+    virtual bool Bind(const SocketAddress& name);
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen);
+
+    virtual bool StartReceiving(const WebRtc_UWord32 receiveBuffers);
+    virtual inline bool StartReceiving() {return StartReceiving(8);}
+    virtual bool StopReceiving();
+
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to);
+
+    virtual void CloseBlocking();
+
+    virtual SOCKET GetFd() { return _socket;}
+    virtual bool SetQos(WebRtc_Word32 serviceType, WebRtc_Word32 tokenRate,
+                        WebRtc_Word32 bucketSize, WebRtc_Word32 peekBandwith,
+                        WebRtc_Word32 minPolicedSize, WebRtc_Word32 maxSduSize,
+                        const SocketAddress &stRemName,
+                        WebRtc_Word32 overrideDSCP = 0);
+
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType);
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 pcp);
+
+    virtual WebRtc_UWord32 ReceiveBuffers(){return _receiveBuffers.Value();}
+
+protected:
+    void IOCompleted(PerIoContext* pIOContext, WebRtc_UWord32 ioSize,
+                     WebRtc_UWord32 error);
+
+    WebRtc_Word32 PostRecv();
+    // Use pIoContext to post a new WSARecvFrom(..).
+    WebRtc_Word32 PostRecv(PerIoContext* pIoContext);
+
+private:
+    friend class UdpSocket2WorkerWindows;
+
+    // Set traffic control (TC) flow adding it the interface that matches this
+    // sockets address.
+    // A filter is created and added to the flow.
+    // The flow consists of:
+    // (1) QoS send and receive information (flow specifications).
+    // (2) A DS object (for specifying exact DSCP value).
+    // (3) Possibly a traffic object (for specifying exact 802.1p priority (PCP)
+    //     value).
+    //
+    // dscp values:
+    // -1   don't change the current dscp value.
+    // 0    don't add any flow to TC, unless pcp is specified.
+    // 1-63 Add a flow to TC with the specified dscp value.
+    // pcp values:
+    // -2  Don't add pcp info to the flow, (3) will not be added.
+    // -1  Don't change the current value.
+    // 0-7 Add pcp info to the flow with the specified value,
+    //     (3) will be added.
+    //
+    // If both dscp and pcp are -1 no flow will be created or added to TC.
+    // If dscp is 0 and pcp is 0-7 (1), (2) and (3) will be created.
+    // Note: input parameter values are assumed to be in valid range, checks
+    // must be done by caller.
+    WebRtc_Word32 SetTrafficControl(WebRtc_Word32 dscp, WebRtc_Word32 pcp,
+                                    const struct sockaddr_in* name,
+                                    FLOWSPEC* send = NULL,
+                                    FLOWSPEC* recv = NULL);
+    WebRtc_Word32 CreateFlowSpec(WebRtc_Word32 serviceType,
+                                 WebRtc_Word32 tokenRate,
+                                 WebRtc_Word32 bucketSize,
+                                 WebRtc_Word32 peekBandwith,
+                                 WebRtc_Word32 minPolicedSize,
+                                 WebRtc_Word32 maxSduSize, FLOWSPEC *f);
+
+    WebRtc_Word32 _id;
+    RWLockWrapper* _ptrCbRWLock;
+    IncomingSocketCallback _incomingCb;
+    CallbackObj _obj;
+    bool _qos;
+
+    SocketAddress _remoteAddr;
+    SOCKET _socket;
+    WebRtc_Word32 _iProtocol;
+    UdpSocket2ManagerWindows* _mgr;
+
+    CriticalSectionWrapper* _pCrit;
+    Atomic32Wrapper _outstandingCalls;
+    Atomic32Wrapper _outstandingCallComplete;
+    volatile bool _terminate;
+    volatile bool _addedToMgr;
+
+    CriticalSectionWrapper* _ptrDeleteCrit;
+    ConditionVariableWrapper* _ptrDeleteCond;
+    bool _safeTodelete;
+
+    RWLockWrapper* _ptrDestRWLock;
+    Atomic32Wrapper _outstandingCallsDisabled; // 0 = false, 1 = true
+    bool NewOutstandingCall();
+    void OutstandingCallCompleted();
+    void DisableNewOutstandingCalls();
+    void WaitForOutstandingCalls();
+
+    void RemoveSocketFromManager();
+
+    // RWLockWrapper is used as a reference counter for the socket. Write lock
+    // is used for creating and deleting socket. Read lock is used for
+    // accessing the socket.
+    RWLockWrapper* _ptrSocketRWLock;
+    bool AquireSocket();
+    void ReleaseSocket();
+    bool InvalidateSocket();
+
+    // Traffic control handles and structure pointers.
+    HANDLE _clientHandle;
+    HANDLE _flowHandle;
+    HANDLE _filterHandle;
+    PTC_GEN_FLOW _flow;
+    // TrafficControlWindows implements TOS and PCP.
+    TrafficControlWindows* _gtc;
+    // Holds the current pcp value. Can be -2 or 0 - 7.
+    int _pcp;
+
+    Atomic32Wrapper _receiveBuffers;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.cc b/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.cc
new file mode 100644
index 0000000..889fb2f
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.cc
@@ -0,0 +1,429 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_manager_posix.h"
+
+#include <strings.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+
+#include "trace.h"
+#include "udp_socket_posix.h"
+
+namespace webrtc {
+UdpSocketManagerPosix::UdpSocketManagerPosix()
+    : UdpSocketManager(),
+      _id(-1),
+      _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _numberOfSocketMgr(-1),
+      _incSocketMgrNextTime(0),
+      _nextSocketMgrToAssign(0),
+      _socketMgr()
+{
+}
+
+bool UdpSocketManagerPosix::Init(WebRtc_Word32 id,
+                                 WebRtc_UWord8& numOfWorkThreads) {
+    CriticalSectionScoped cs(_critSect);
+    if ((_id != -1) || (_numOfWorkThreads != 0)) {
+        assert(_id != -1);
+        assert(_numOfWorkThreads != 0);
+        return false;
+    }
+
+    _id = id;
+    _numberOfSocketMgr = numOfWorkThreads;
+    _numOfWorkThreads = numOfWorkThreads;
+
+    if(MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX < _numberOfSocketMgr)
+    {
+        _numberOfSocketMgr = MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX;
+    }
+    for(int i = 0;i < _numberOfSocketMgr; i++)
+    {
+        _socketMgr[i] = new UdpSocketManagerPosixImpl();
+    }
+    return true;
+}
+
+
+UdpSocketManagerPosix::~UdpSocketManagerPosix()
+{
+    Stop();
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::UdpSocketManagerPosix()",
+                 _numberOfSocketMgr);
+
+    for(int i = 0;i < _numberOfSocketMgr; i++)
+    {
+        delete _socketMgr[i];
+    }
+    delete _critSect;
+}
+
+WebRtc_Word32 UdpSocketManagerPosix::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocketManagerPosix::Start()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::Start()",
+                 _numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = true;
+    for(int i = 0;i < _numberOfSocketMgr && retVal; i++)
+    {
+        retVal = _socketMgr[i]->Start();
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::Start() error starting socket managers",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::Stop()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::Stop()",_numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = true;
+    for(int i = 0; i < _numberOfSocketMgr && retVal; i++)
+    {
+        retVal = _socketMgr[i]->Stop();
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::Stop() there are still active socket "
+            "managers",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::AddSocket(UdpSocketWrapper* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::AddSocket()",_numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = _socketMgr[_nextSocketMgrToAssign]->AddSocket(s);
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::AddSocket() failed to add socket to\
+ manager",
+            _numberOfSocketMgr);
+    }
+
+    // Distribute sockets on UdpSocketManagerPosixImpls in a round-robin
+    // fashion.
+    if(_incSocketMgrNextTime == 0)
+    {
+        _incSocketMgrNextTime++;
+    } else {
+        _incSocketMgrNextTime = 0;
+        _nextSocketMgrToAssign++;
+        if(_nextSocketMgrToAssign >= _numberOfSocketMgr)
+        {
+            _nextSocketMgrToAssign = 0;
+        }
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::RemoveSocket(UdpSocketWrapper* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::RemoveSocket()",
+                 _numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = false;
+    for(int i = 0;i < _numberOfSocketMgr && (retVal == false); i++)
+    {
+        retVal = _socketMgr[i]->RemoveSocket(s);
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::RemoveSocket() failed to remove socket\
+ from manager",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+
+UdpSocketManagerPosixImpl::UdpSocketManagerPosixImpl()
+{
+    _critSectList = CriticalSectionWrapper::CreateCriticalSection();
+    _thread = ThreadWrapper::CreateThread(UdpSocketManagerPosixImpl::Run, this,
+                                          kRealtimePriority,
+                                          "UdpSocketManagerPosixImplThread");
+    FD_ZERO(&_readFds);
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocketManagerPosix created");
+}
+
+UdpSocketManagerPosixImpl::~UdpSocketManagerPosixImpl()
+{
+    if(_thread != NULL)
+    {
+        delete _thread;
+    }
+
+    if (_critSectList != NULL)
+    {
+        UpdateSocketMap();
+
+        _critSectList->Enter();
+
+        MapItem* item = _socketMap.First();
+        while(item)
+        {
+            UdpSocketPosix* s = static_cast<UdpSocketPosix*>(item->GetItem());
+            _socketMap.Erase(item);
+            item = _socketMap.First();
+            delete s;
+        }
+        _critSectList->Leave();
+
+        delete _critSectList;
+    }
+
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocketManagerPosix deleted");
+}
+
+bool UdpSocketManagerPosixImpl::Start()
+{
+    unsigned int id = 0;
+    if (_thread == NULL)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Start UdpSocketManagerPosix");
+    return _thread->Start(id);
+}
+
+bool UdpSocketManagerPosixImpl::Stop()
+{
+    if (_thread == NULL)
+    {
+        return true;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Stop UdpSocketManagerPosix");
+    return _thread->Stop();
+}
+
+bool UdpSocketManagerPosixImpl::Process()
+{
+    bool doSelect = false;
+    // Timeout = 1 second.
+    struct timeval timeout;
+    timeout.tv_sec = 0;
+    timeout.tv_usec = 10000;
+    MapItem* it;
+
+    FD_ZERO(&_readFds);
+
+    UpdateSocketMap();
+
+    unsigned int maxFd = 0;
+    for (it = _socketMap.First(); it != NULL; it=_socketMap.Next(it))
+    {
+        doSelect = true;
+        maxFd = maxFd > it->GetUnsignedId() ? maxFd : it->GetUnsignedId();
+        FD_SET(it->GetUnsignedId(), &_readFds);
+
+        maxFd = maxFd > it->GetUnsignedId() ? maxFd : it->GetUnsignedId();
+        doSelect = true;
+    }
+
+    int num = 0;
+    if (doSelect)
+    {
+        num = select(maxFd+1, &_readFds, NULL, NULL, &timeout);
+
+        if (num == SOCKET_ERROR)
+        {
+            // Timeout = 10 ms.
+            timespec t;
+            t.tv_sec = 0;
+            t.tv_nsec = 10000*1000;
+            nanosleep(&t, NULL);
+            return true;
+        }
+    }else
+    {
+        // Timeout = 10 ms.
+        timespec t;
+        t.tv_sec = 0;
+        t.tv_nsec = 10000*1000;
+        nanosleep(&t, NULL);
+        return true;
+    }
+
+    for (it = _socketMap.First(); it != NULL && num > 0;
+         it = _socketMap.Next(it))
+    {
+        UdpSocketPosix* s = static_cast<UdpSocketPosix*>(it->GetItem());
+        if (FD_ISSET(it->GetUnsignedId(), &_readFds))
+        {
+            s->HasIncoming();
+            num--;
+        }
+    }
+    return true;
+}
+
+bool UdpSocketManagerPosixImpl::Run(ThreadObj obj)
+{
+    UdpSocketManagerPosixImpl* mgr =
+        static_cast<UdpSocketManagerPosixImpl*>(obj);
+    return mgr->Process();
+}
+
+bool UdpSocketManagerPosixImpl::AddSocket(UdpSocketWrapper* s)
+{
+    UdpSocketPosix* sl = static_cast<UdpSocketPosix*>(s);
+    if(sl->GetFd() == INVALID_SOCKET || !(sl->GetFd() < FD_SETSIZE))
+    {
+        return false;
+    }
+    _critSectList->Enter();
+    _addList.PushBack(s);
+    _critSectList->Leave();
+    return true;
+}
+
+bool UdpSocketManagerPosixImpl::RemoveSocket(UdpSocketWrapper* s)
+{
+    // Put in remove list if this is the correct UdpSocketManagerPosixImpl.
+    _critSectList->Enter();
+
+    // If the socket is in the add list it's safe to remove and delete it.
+    ListItem* addListItem = _addList.First();
+    while(addListItem)
+    {
+        UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem();
+        unsigned int addFD = addSocket->GetFd();
+        unsigned int removeFD = static_cast<UdpSocketPosix*>(s)->GetFd();
+        if(removeFD == addFD)
+        {
+            _removeList.PushBack(removeFD);
+            _critSectList->Leave();
+            return true;
+        }
+        addListItem = _addList.Next(addListItem);
+    }
+
+    // Checking the socket map is safe since all Erase and Insert calls to this
+    // map are also protected by _critSectList.
+    if(_socketMap.Find(static_cast<UdpSocketPosix*>(s)->GetFd()) != NULL)
+    {
+        _removeList.PushBack(static_cast<UdpSocketPosix*>(s)->GetFd());
+        _critSectList->Leave();
+         return true;
+    }
+    _critSectList->Leave();
+    return false;
+}
+
+void UdpSocketManagerPosixImpl::UpdateSocketMap()
+{
+    // Remove items in remove list.
+    _critSectList->Enter();
+    while(!_removeList.Empty())
+    {
+        UdpSocketPosix* deleteSocket = NULL;
+        unsigned int removeFD = _removeList.First()->GetUnsignedItem();
+
+        // If the socket is in the add list it hasn't been added to the socket
+        // map yet. Just remove the socket from the add list.
+        ListItem* addListItem = _addList.First();
+        while(addListItem)
+        {
+            UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem();
+            unsigned int addFD = addSocket->GetFd();
+            if(removeFD == addFD)
+            {
+                deleteSocket = addSocket;
+                _addList.Erase(addListItem);
+                break;
+            }
+            addListItem = _addList.Next(addListItem);
+        }
+
+        // Find and remove socket from _socketMap.
+        MapItem* it = _socketMap.Find(removeFD);
+        if(it != NULL)
+        {
+            UdpSocketPosix* socket =
+                static_cast<UdpSocketPosix*>(it->GetItem());
+            if(socket)
+            {
+                deleteSocket = socket;
+            }
+            _socketMap.Erase(it);
+        }
+        if(deleteSocket)
+        {
+            deleteSocket->ReadyForDeletion();
+            delete deleteSocket;
+        }
+        _removeList.PopFront();
+    }
+
+    // Add sockets from add list.
+    while(!_addList.Empty())
+    {
+        UdpSocketPosix* s =
+            static_cast<UdpSocketPosix*>(_addList.First()->GetItem());
+        if(s)
+        {
+            _socketMap.Insert(s->GetFd(), s);
+        }
+        _addList.PopFront();
+    }
+    _critSectList->Leave();
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.h b/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.h
new file mode 100644
index 0000000..c89aa13
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_posix.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "critical_section_wrapper.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+#include "thread_wrapper.h"
+#include "udp_socket_manager_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+#define MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX 8
+
+namespace webrtc {
+
+class ConditionVariableWrapper;
+class UdpSocketManagerPosixImpl;
+
+class UdpSocketManagerPosix : public UdpSocketManager
+{
+public:
+    UdpSocketManagerPosix();
+    virtual ~UdpSocketManagerPosix();
+
+    virtual bool Init(WebRtc_Word32 id,
+                      WebRtc_UWord8& numOfWorkThreads);
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual bool AddSocket(UdpSocketWrapper* s);
+    virtual bool RemoveSocket(UdpSocketWrapper* s);
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _critSect;
+    WebRtc_UWord8 _numberOfSocketMgr;
+    WebRtc_UWord8 _incSocketMgrNextTime;
+    WebRtc_UWord8 _nextSocketMgrToAssign;
+    UdpSocketManagerPosixImpl* _socketMgr[MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX];
+};
+
+class UdpSocketManagerPosixImpl
+{
+public:
+    UdpSocketManagerPosixImpl();
+    virtual ~UdpSocketManagerPosixImpl();
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual bool AddSocket(UdpSocketWrapper* s);
+    virtual bool RemoveSocket(UdpSocketWrapper* s);
+
+protected:
+    static bool Run(ThreadObj obj);
+    bool Process();
+    void UpdateSocketMap();
+
+private:
+    ThreadWrapper* _thread;
+    CriticalSectionWrapper* _critSectList;
+
+    fd_set _readFds;
+
+    MapWrapper _socketMap;
+    ListWrapper _addList;
+    ListWrapper _removeList;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.cc b/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.cc
new file mode 100644
index 0000000..d817d74
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.cc
@@ -0,0 +1,283 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_manager_windows.h"
+#include "udp_socket_windows.h"
+
+namespace webrtc {
+WebRtc_UWord32 UdpSocketManagerWindows::_numOfActiveManagers = 0;
+
+UdpSocketManagerWindows::UdpSocketManagerWindows()
+    : UdpSocketManager(),
+      _id(-1)
+{
+    const WebRtc_Word8* threadName = "UdpSocketManagerWindows_Thread";
+    _critSectList = CriticalSectionWrapper::CreateCriticalSection();
+    _thread = ThreadWrapper::CreateThread(UdpSocketManagerWindows::Run,
+                                          this, kRealtimePriority, threadName);
+    FD_ZERO(&_readFds);
+    FD_ZERO(&_writeFds);
+    FD_ZERO(&_exceptFds);
+    _numOfActiveManagers++;
+}
+
+bool UdpSocketManagerWindows::Init(WebRtc_Word32 id,
+                                   WebRtc_UWord8& numOfWorkThreads) {
+    CriticalSectionScoped cs(_critSectList);
+    if ((_id != -1) || (_numOfWorkThreads != 0)) {
+        assert(_id == -1);
+        assert(_numOfWorkThreads == 0);
+        return false;
+    }
+    _id = id;
+    _numOfWorkThreads = numOfWorkThreads;
+    return true;
+}
+
+UdpSocketManagerWindows::~UdpSocketManagerWindows()
+{
+    Stop();
+    if(_thread != NULL)
+    {
+        delete _thread;
+    }
+
+    if (_critSectList != NULL)
+    {
+        _critSectList->Enter();
+
+        while(!_socketMap.empty())
+        {
+            std::map<SOCKET, UdpSocketWindows*>::iterator it =
+                _socketMap.begin();
+            UdpSocketWindows* s = static_cast<UdpSocketWindows*>(it->second);
+            _socketMap.erase(it);
+            delete s;
+        }
+        _removeList.erase(_removeList.begin(), _removeList.end());
+
+        while(!_addList.empty())
+        {
+            std::list<UdpSocketWindows*>::iterator it = _addList.begin();
+            UdpSocketWindows* s = static_cast<UdpSocketWindows*>(*it);
+            _addList.erase(it);
+            delete s;
+        }
+        _critSectList->Leave();
+
+        delete _critSectList;
+    }
+
+    _numOfActiveManagers--;
+
+    if (_numOfActiveManagers == 0)
+        WSACleanup();
+}
+
+WebRtc_Word32 UdpSocketManagerWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocketManagerWindows::Start()
+{
+    unsigned int id;
+    if (_thread == NULL)
+        return false;
+
+    return _thread->Start(id);
+}
+
+bool UdpSocketManagerWindows::Stop()
+{
+    if (_thread == NULL)
+        return true;
+
+    return _thread->Stop();
+}
+
+bool UdpSocketManagerWindows::Process()
+{
+    bool doSelect = false;
+    // Timeout = 1 second.
+    timeval timeout;
+    timeout.tv_sec = 0;
+    timeout.tv_usec = 10000;
+
+    FD_ZERO(&_readFds);
+    FD_ZERO(&_writeFds);
+    FD_ZERO(&_exceptFds);
+
+    _critSectList->Enter();
+    // Remove sockets that have been registered for removal.
+    while(!_removeList.empty())
+    {
+        SOCKET id = *_removeList.begin();
+        std::map<SOCKET, UdpSocketWindows*>::iterator it = _socketMap.find(id);
+        if(it != _socketMap.end())
+        {
+            UdpSocketWindows* s = static_cast<UdpSocketWindows*>(it->second);
+            _socketMap.erase(it);
+            _removeList.pop_front();
+            delete s;
+        }
+    }
+
+    // Add sockets that have been registered for being added.
+    while (!_addList.empty())
+    {
+        UdpSocketWindows* s = *_addList.begin();
+        if(s)
+        {
+            _socketMap[s->GetFd()] = s;
+        }
+        _addList.pop_front();
+    }
+    _critSectList->Leave();
+
+    std::map<SOCKET, UdpSocketWindows*>::iterator it = _socketMap.begin();
+    while(it != _socketMap.end())
+    {
+        UdpSocketWindows* s = it->second;
+        if (s->WantsIncoming())
+        {
+            doSelect = true;
+            FD_SET(it->first, &_readFds);
+        }
+        if(!s->IsWritable())
+        {
+            FD_SET(it->first, &_writeFds);
+            doSelect = true;
+        }
+        it++;
+    }
+
+    WebRtc_Word32 num = 0;
+    if (doSelect)
+    {
+        num = select(0, &_readFds, &_writeFds, &_exceptFds, &timeout);
+        if (num == SOCKET_ERROR)
+        {
+            Sleep(10);
+            return true;
+        }
+    }else
+    {
+        Sleep(10);
+        return true;
+    }
+
+    it = _socketMap.begin();
+    while (it != _socketMap.end() && num > 0)
+    {
+        if (FD_ISSET(it->first, &_readFds))
+        {
+            static_cast<UdpSocketWindows*>(it->second)->HasIncoming();
+            num--;
+        }
+        if (FD_ISSET(it->first, &_writeFds))
+        {
+            // Socket available for writing.
+            static_cast<UdpSocketWindows*>(it->second)->SetWritable();
+            num--;
+        }
+    }
+    return true;
+}
+
+bool UdpSocketManagerWindows::Run(ThreadObj obj)
+{
+    UdpSocketManagerWindows* mgr = static_cast<UdpSocketManagerWindows*>(obj);
+    return mgr->Process();
+};
+
+bool UdpSocketManagerWindows::AddSocket(UdpSocketWrapper* s)
+{
+    UdpSocketWindows* winSock = static_cast<UdpSocketWindows*>(s);
+
+    _critSectList->Enter();
+    std::map<SOCKET, UdpSocketWindows*>::iterator it =
+        _socketMap.find(winSock->GetFd());
+    if (it != _socketMap.end())
+    {
+        if (!_removeList.empty())
+        {
+            // File descriptors are re-used so it's possible that a socket has
+            // been added with the same file descriptor as a socket that is to
+            // be removed. I.e. the socket that is to be removed is no longer
+            // in use, delete it.
+            // TODO (hellner): removing items from _socketMap may cause race
+            // condition. Fix this.
+            std::list<SOCKET>::iterator removeIt = _removeList.begin();
+            while(removeIt != _removeList.end())
+            {
+                if (*removeIt == winSock->GetFd())
+                {
+                    it = _socketMap.find(*removeIt);
+                    UdpSocketWindows* delete_socket = it->second;
+                    _socketMap.erase(it);
+                    _removeList.erase(removeIt);
+                    delete delete_socket;
+                    _addList.push_back(winSock);
+                    _critSectList->Leave();
+                    return true;
+                }
+                removeIt++;
+            }
+        }
+        _critSectList->Leave();
+        return false;
+    }
+
+    _addList.push_back(winSock);
+    _critSectList->Leave();
+    return true;
+}
+
+bool UdpSocketManagerWindows::RemoveSocket(UdpSocketWrapper* s)
+{
+    UdpSocketWindows* winSock = static_cast<UdpSocketWindows*>(s);
+
+    _critSectList->Enter();
+    // If socket is in the add list its safe to just remove it from the list.
+    if (!_addList.empty())
+    {
+        std::list<UdpSocketWindows*>::iterator it = _addList.begin();
+        while(it != _addList.end())
+        {
+            UdpSocketWindows* tempSocket = (*it);
+            if (tempSocket->GetFd() == winSock->GetFd())
+            {
+                _addList.erase(it);
+                delete winSock;
+                _critSectList->Leave();
+                return true;
+            }
+            it++;
+        }
+    }
+
+    // If the socket is not even added to the UdpSocketManagerWindows it's
+    // safe to delete the socket.
+    std::map<SOCKET, UdpSocketWindows*>::iterator findIt =
+        _socketMap.find(winSock->GetFd());
+    if (findIt == _socketMap.end())
+    {
+        delete winSock;
+        _critSectList->Leave();
+        return false;
+    }
+
+    _removeList.push_back(winSock->GetFd());
+    _critSectList->Leave();
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.h b/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.h
new file mode 100644
index 0000000..06d905a
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_windows.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WINDOWS_H_
+
+#define FD_SETSIZE 1024
+
+#include <winsock2.h>
+#include <map>
+#include <list>
+
+// Don't change the include order.
+// TODO (hellner): all header files should be compilable separately. Fix this.
+#include "udp_socket_manager_wrapper.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+class UdpSocketWindows;
+
+class UdpSocketManagerWindows : public UdpSocketManager
+{
+public:
+    UdpSocketManagerWindows();
+    virtual ~UdpSocketManagerWindows();
+
+    virtual bool Init(WebRtc_Word32 id,
+                      WebRtc_UWord8& numOfWorkThreads);
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual bool AddSocket(UdpSocketWrapper* s);
+    virtual bool RemoveSocket(UdpSocketWrapper* s);
+
+protected:
+    static bool Run(ThreadObj obj);
+    bool Process();
+
+private:
+    WebRtc_Word32 _id;
+    ThreadWrapper* _thread;
+
+    fd_set _readFds;
+    fd_set _writeFds;
+    fd_set _exceptFds;
+
+    CriticalSectionWrapper* _critSectList;
+
+    std::map<SOCKET, UdpSocketWindows*> _socketMap;
+    std::list<UdpSocketWindows*> _addList;
+    std::list<SOCKET> _removeList;
+
+    static WebRtc_UWord32 _numOfActiveManagers;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WINDOWS_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc b/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc
new file mode 100644
index 0000000..9f1162e
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_manager_wrapper.h"
+
+#include <cassert>
+
+#ifdef _WIN32
+#include "fix_interlocked_exchange_pointer_win.h"
+#include "udp_socket_manager_windows.h"
+#include "udp_socket2_manager_windows.h"
+#else
+#include "udp_socket_manager_posix.h"
+#endif
+
+namespace webrtc {
+UdpSocketManager* UdpSocketManager::CreateInstance()
+{
+#if defined(_WIN32)
+    #if (defined(USE_WINSOCK2))
+        return static_cast<UdpSocketManager*>(
+            new UdpSocket2ManagerWindows());
+    #else
+        numOfWorkThreads = 1;
+        return static_cast<UdpSocketManager*>(
+            new UdpSocketManagerWindows());
+    #endif
+#else
+    return new UdpSocketManagerPosix();
+#endif
+}
+
+UdpSocketManager* UdpSocketManager::StaticInstance(
+    CountOperation count_operation,
+    const WebRtc_Word32 id,
+    WebRtc_UWord8& numOfWorkThreads)
+{
+    UdpSocketManager* impl =
+        GetStaticInstance<UdpSocketManager>(count_operation);
+    if (count_operation == kAddRef && impl != NULL) {
+        if (impl->Init(id, numOfWorkThreads)) {
+            impl->Start();
+        }
+    }
+    return impl;
+}
+
+UdpSocketManager* UdpSocketManager::Create(const WebRtc_Word32 id,
+                                           WebRtc_UWord8& numOfWorkThreads)
+{
+    return UdpSocketManager::StaticInstance(kAddRef, id,
+                                            numOfWorkThreads);
+}
+
+void UdpSocketManager::Return()
+{
+    WebRtc_UWord8 numOfWorkThreads = 0;
+    UdpSocketManager::StaticInstance(kRelease, -1,
+                                     numOfWorkThreads);
+}
+
+UdpSocketManager::UdpSocketManager() : _numOfWorkThreads(0)
+{
+}
+
+WebRtc_UWord8 UdpSocketManager::WorkThreads() const
+{
+    return _numOfWorkThreads;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.h b/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.h
new file mode 100644
index 0000000..e7bd09e
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_manager_wrapper.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
+
+#include "system_wrappers/interface/static_instance.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class UdpSocketWrapper;
+
+class UdpSocketManager
+{
+public:
+    static UdpSocketManager* Create(const WebRtc_Word32 id,
+                                    WebRtc_UWord8& numOfWorkThreads);
+    static void Return();
+
+    // Initializes the socket manager. Returns true if the manager wasn't
+    // already initialized.
+    virtual bool Init(WebRtc_Word32 id,
+                      WebRtc_UWord8& numOfWorkThreads) = 0;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Start listening to sockets that have been registered via the
+    // AddSocket(..) API.
+    virtual bool Start() = 0;
+    // Stop listening to sockets.
+    virtual bool Stop() = 0;
+
+    virtual WebRtc_UWord8 WorkThreads() const;
+
+    // Register a socket with the socket manager.
+    virtual bool AddSocket(UdpSocketWrapper* s) = 0;
+    // Unregister a socket from the manager.
+    virtual bool RemoveSocket(UdpSocketWrapper* s) = 0;
+
+protected:
+    UdpSocketManager();
+    virtual ~UdpSocketManager() {}
+
+    WebRtc_UWord8 _numOfWorkThreads;
+
+    // Factory method.
+    static UdpSocketManager* CreateInstance();
+
+private:
+    // Friend function to allow the UDP destructor to be accessed from the
+    // instance template.
+    friend UdpSocketManager*
+    GetStaticInstance<UdpSocketManager>(CountOperation count_operation);
+
+    static UdpSocketManager* StaticInstance(
+        CountOperation count_operation,
+        const WebRtc_Word32 id,
+        WebRtc_UWord8& numOfWorkThreads);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_posix.cc b/trunk/src/modules/udp_transport/source/udp_socket_posix.cc
new file mode 100644
index 0000000..9d3564d
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_posix.cc
@@ -0,0 +1,275 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_posix.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <netdb.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+
+#include "trace.h"
+#include "udp_socket_manager_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+namespace webrtc {
+UdpSocketPosix::UdpSocketPosix(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                               bool ipV6Enable)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "UdpSocketPosix::UdpSocketPosix()");
+
+    _wantsIncoming = false;
+    _error = 0;
+    _mgr = mgr;
+
+    _id = id;
+    _obj = NULL;
+    _incomingCb = NULL;
+    _readyForDeletionCond = ConditionVariableWrapper::CreateConditionVariable();
+    _closeBlockingCompletedCond =
+        ConditionVariableWrapper::CreateConditionVariable();
+    _cs = CriticalSectionWrapper::CreateCriticalSection();
+    _readyForDeletion = false;
+    _closeBlockingActive = false;
+    _closeBlockingCompleted= false;
+    if(ipV6Enable)
+    {
+        _socket = socket(AF_INET6, SOCK_DGRAM, IPPROTO_UDP);
+    }
+    else {
+        _socket = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
+    }
+
+    // Set socket to nonblocking mode.
+    int enable_non_blocking = 1;
+    if(ioctl(_socket, FIONBIO, &enable_non_blocking) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, id,
+                     "Failed to make socket nonblocking");
+    }
+    // Enable close on fork for file descriptor so that it will not block until
+    // forked process terminates.
+    if(fcntl(_socket, F_SETFD, FD_CLOEXEC) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, id,
+                     "Failed to set FD_CLOEXEC for socket");
+    }
+}
+
+UdpSocketPosix::~UdpSocketPosix()
+{
+    if(_socket != INVALID_SOCKET)
+    {
+        close(_socket);
+        _socket = INVALID_SOCKET;
+    }
+    if(_readyForDeletionCond)
+    {
+        delete _readyForDeletionCond;
+    }
+
+    if(_closeBlockingCompletedCond)
+    {
+        delete _closeBlockingCompletedCond;
+    }
+
+    if(_cs)
+    {
+        delete _cs;
+    }
+}
+
+WebRtc_Word32 UdpSocketPosix::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocketPosix::SetCallback(CallbackObj obj, IncomingSocketCallback cb)
+{
+    _obj = obj;
+    _incomingCb = cb;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketPosix(%p)::SetCallback", this);
+
+    if (_mgr->AddSocket(this))
+      {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "UdpSocketPosix(%p)::SetCallback socket added to manager",
+                     this);
+        return true;   // socket is now ready for action
+      }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketPosix(%p)::SetCallback error adding me to mgr",
+                 this);
+    return false;
+}
+
+bool UdpSocketPosix::SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen)
+{
+   if(0 == setsockopt(_socket, level, optname, optval, optlen ))
+   {
+       return true;
+   }
+
+   _error = errno;
+   WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                "UdpSocketPosix::SetSockopt(), error:%d", _error);
+   return false;
+}
+
+WebRtc_Word32 UdpSocketPosix::SetTOS(WebRtc_Word32 serviceType)
+{
+    if (SetSockopt(IPPROTO_IP, IP_TOS ,(WebRtc_Word8*)&serviceType ,4) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+bool UdpSocketPosix::Bind(const SocketAddress& name)
+{
+    int size = sizeof(sockaddr);
+    if (0 == bind(_socket, reinterpret_cast<const sockaddr*>(&name),size))
+    {
+        return true;
+    }
+    _error = errno;
+    WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                 "UdpSocketPosix::Bind() error: %d",_error);
+    return false;
+}
+
+WebRtc_Word32 UdpSocketPosix::SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                     const SocketAddress& to)
+{
+    int size = sizeof(sockaddr);
+    int retVal = sendto(_socket,buf, len, 0,
+                        reinterpret_cast<const sockaddr*>(&to), size);
+    if(retVal == SOCKET_ERROR)
+    {
+        _error = errno;
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocketPosix::SendTo() error: %d", _error);
+    }
+
+    return retVal;
+}
+
+bool UdpSocketPosix::ValidHandle()
+{
+    return _socket != INVALID_SOCKET;
+}
+
+void UdpSocketPosix::HasIncoming()
+{
+    char buf[2048];
+    int retval;
+    SocketAddress from;
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+    sockaddr sockaddrfrom;
+    memset(&from, 0, sizeof(from));
+    memset(&sockaddrfrom, 0, sizeof(sockaddrfrom));
+    socklen_t fromlen = sizeof(sockaddrfrom);
+#else
+    memset(&from, 0, sizeof(from));
+    socklen_t fromlen = sizeof(from);
+#endif
+
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+        retval = recvfrom(_socket,buf, sizeof(buf), 0,
+                          reinterpret_cast<sockaddr*>(&sockaddrfrom), &fromlen);
+        memcpy(&from, &sockaddrfrom, fromlen);
+        from._sockaddr_storage.sin_family = sockaddrfrom.sa_family;
+#else
+        retval = recvfrom(_socket,buf, sizeof(buf), 0,
+                          reinterpret_cast<sockaddr*>(&from), &fromlen);
+#endif
+
+    switch(retval)
+    {
+    case 0:
+        // The peer has performed an orderly shutdown.
+        break;
+    case SOCKET_ERROR:
+        break;
+    default:
+        if(_wantsIncoming && _incomingCb)
+        {
+                _incomingCb(_obj,buf, retval, &from);
+        }
+        break;
+    }
+}
+
+void UdpSocketPosix::CloseBlocking()
+{
+    _cs->Enter();
+    _closeBlockingActive = true;
+    if(!CleanUp())
+    {
+        _closeBlockingActive = false;
+        _cs->Leave();
+        return;
+    }
+
+    while(!_readyForDeletion)
+    {
+        _readyForDeletionCond->SleepCS(*_cs);
+    }
+    _closeBlockingCompleted = true;
+    _closeBlockingCompletedCond->Wake();
+    _cs->Leave();
+}
+
+void UdpSocketPosix::ReadyForDeletion()
+{
+    _cs->Enter();
+    if(!_closeBlockingActive)
+    {
+        _cs->Leave();
+        return;
+    }
+    close(_socket);
+    _socket = INVALID_SOCKET;
+    _readyForDeletion = true;
+    _readyForDeletionCond->Wake();
+    while(!_closeBlockingCompleted)
+    {
+        _closeBlockingCompletedCond->SleepCS(*_cs);
+    }
+    _cs->Leave();
+}
+
+bool UdpSocketPosix::CleanUp()
+{
+    _wantsIncoming = false;
+
+    if (_socket == INVALID_SOCKET)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "calling UdpSocketManager::RemoveSocket()...");
+    _mgr->RemoveSocket(this);
+    // After this, the socket should may be or will be as deleted. Return
+    // immediately.
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_posix.h b/trunk/src/modules/udp_transport/source/udp_socket_posix.h
new file mode 100644
index 0000000..ee76abb
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_posix.h
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
+
+#include <arpa/inet.h>
+#include <netinet/in.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+#define SOCKET_ERROR -1
+
+namespace webrtc {
+class UdpSocketPosix : public UdpSocketWrapper
+{
+public:
+    UdpSocketPosix(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                   bool ipV6Enable = false);
+
+    virtual ~UdpSocketPosix();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool SetCallback(CallbackObj obj, IncomingSocketCallback cb);
+
+    virtual bool Bind(const SocketAddress& name);
+
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen);
+
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType);
+
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to);
+
+    // Deletes socket in addition to closing it.
+    // TODO (hellner): make destructor protected.
+    virtual void CloseBlocking();
+
+    virtual SOCKET GetFd() {return _socket;}
+    virtual WebRtc_Word32 GetError() {return _error;}
+
+    virtual bool ValidHandle();
+
+    virtual bool SetQos(WebRtc_Word32 /*serviceType*/,
+                        WebRtc_Word32 /*tokenRate*/,
+                        WebRtc_Word32 /*bucketSize*/,
+                        WebRtc_Word32 /*peekBandwith*/,
+                        WebRtc_Word32 /*minPolicedSize*/,
+                        WebRtc_Word32 /*maxSduSize*/,
+                        const SocketAddress& /*stRemName*/,
+                        WebRtc_Word32 /*overrideDSCP*/) {return false;}
+
+    bool CleanUp();
+    void HasIncoming();
+    bool WantsIncoming() {return _wantsIncoming;}
+    void ReadyForDeletion();
+private:
+    friend class UdpSocketManagerPosix;
+
+    WebRtc_Word32 _id;
+    IncomingSocketCallback _incomingCb;
+    CallbackObj _obj;
+    WebRtc_Word32 _error;
+
+    SOCKET _socket;
+    UdpSocketManager* _mgr;
+    ConditionVariableWrapper* _closeBlockingCompletedCond;
+    ConditionVariableWrapper* _readyForDeletionCond;
+
+    bool _closeBlockingActive;
+    bool _closeBlockingCompleted;
+    bool _readyForDeletion;
+
+    CriticalSectionWrapper* _cs;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_windows.cc b/trunk/src/modules/udp_transport/source/udp_socket_windows.cc
new file mode 100644
index 0000000..05381e1
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_windows.cc
@@ -0,0 +1,772 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_windows.h"
+
+// Disable deprication warning from traffic.h
+#pragma warning(disable : 4995)
+
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+
+#include <assert.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <windows.h>
+#include <Qos.h>
+// Don't change include order for these header files.
+#include <Winsock2.h>
+#include <Ntddndis.h>
+#include <traffic.h>
+
+#include "traffic_control_windows.h"
+#include "udp_socket_manager_wrapper.h"
+
+namespace webrtc {
+typedef struct _QOS_DESTADDR
+{
+    QOS_OBJECT_HDR ObjectHdr;
+    const struct sockaddr* SocketAddress;
+    ULONG SocketAddressLength;
+} QOS_DESTADDR, *LPQOS_DESTADDR;
+
+typedef const QOS_DESTADDR* LPCQOS_DESTADDR;
+
+#define QOS_GENERAL_ID_BASE 2000
+#define QOS_OBJECT_DESTADDR (0x00000004 + QOS_GENERAL_ID_BASE)
+
+#define MAX_PACKET_SIZE 2048
+
+class UDPPacket
+{
+public:
+    UDPPacket()
+    {
+        _length = 0;
+    }
+    WebRtc_Word32 Set(const WebRtc_Word8* buf, WebRtc_Word32 length)
+    {
+        if(length > MAX_PACKET_SIZE)
+            return 0;
+
+        _length = length;
+        memcpy(_buffer,buf,length);
+        return length;
+    }
+    WebRtc_Word32 Set(const WebRtc_Word8* buf, WebRtc_Word32 length,
+                      const SocketAddress* addr)
+    {
+        if(length > MAX_PACKET_SIZE)
+        {
+            return 0;
+        }
+
+        _length = length;
+        memcpy(&_remoteAddr,addr,sizeof(SocketAddress));
+        memcpy(_buffer,buf,length);
+        return length;
+    }
+
+    SocketAddress _remoteAddr;
+    WebRtc_Word8 _buffer[MAX_PACKET_SIZE];
+    WebRtc_Word32 _length;
+};
+
+UdpSocketWindows::UdpSocketWindows(const WebRtc_Word32 id,
+                                   UdpSocketManager* mgr, bool ipV6Enable)
+    : _id(id),
+      _qos(true)
+{
+    _wantsIncoming = false;
+    _error = 0;
+    _mgr = mgr;
+    _addedToMgr = false;
+
+    _obj = NULL;
+    _incomingCb = NULL;
+    _socket = INVALID_SOCKET;
+    _terminate=false;
+
+    _clientHandle = INVALID_HANDLE_VALUE;
+    _flowHandle = INVALID_HANDLE_VALUE;
+    _filterHandle = INVALID_HANDLE_VALUE;
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocketWindows::UdpSocketWindows()");
+
+    _gtc = NULL;
+
+    // Check if QoS is supported.
+    WSAPROTOCOL_INFO  pProtocolInfo;
+    DWORD dwBufLen = 0;
+    BOOL bProtocolFound = FALSE;
+    WSAPROTOCOL_INFO* lpProtocolBuf = NULL;
+
+    // Set dwBufLen to the size needed to retreive all the requested information
+    // from WSAEnumProtocols.
+    WebRtc_Word32 nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+    lpProtocolBuf = (WSAPROTOCOL_INFO*)malloc(dwBufLen);
+    nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+
+    WebRtc_Word32 iProtocol;
+    if (ipV6Enable)
+    {
+        iProtocol = AF_INET6;
+    } else {
+        iProtocol = AF_INET;
+    }
+
+    for (WebRtc_Word32 i = 0; i < nRet; i++)
+    {
+        if (iProtocol == lpProtocolBuf[i].iAddressFamily && IPPROTO_UDP ==
+            lpProtocolBuf[i].iProtocol)
+        {
+            if ((XP1_QOS_SUPPORTED ==
+                 (XP1_QOS_SUPPORTED & lpProtocolBuf[i].dwServiceFlags1)))
+            {
+                pProtocolInfo = lpProtocolBuf[i];
+                bProtocolFound = TRUE;
+                break;
+            }
+        }
+     }
+
+    if(!bProtocolFound)
+    {
+        _socket = INVALID_SOCKET;
+        _qos = false;
+        free(lpProtocolBuf);
+        _error = SOCKET_ERROR_NO_QOS;
+    }else {
+        _socket = WSASocket(FROM_PROTOCOL_INFO, FROM_PROTOCOL_INFO,
+                            FROM_PROTOCOL_INFO,&pProtocolInfo, 0,
+                            WSA_FLAG_OVERLAPPED);
+        free(lpProtocolBuf);
+        if (_socket != INVALID_SOCKET)
+        {
+            return;
+        }else
+        {
+            _qos = false;
+            _error = SOCKET_ERROR_NO_QOS;
+        }
+    }
+    // QoS not supported.
+    if(ipV6Enable)
+    {
+        _socket = socket(AF_INET6, SOCK_DGRAM, IPPROTO_UDP);
+    }else
+    {
+        _socket = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
+    }
+    // Non-blocking mode.
+    WebRtc_Word32 iMode = 1;
+    ioctlsocket(_socket, FIONBIO, (u_long FAR*) &iMode);
+}
+
+UdpSocketWindows::~UdpSocketWindows()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocketWindows::~UdpSocketWindows()");
+    if (_gtc)
+    {
+        TrafficControlWindows::Release(_gtc);
+    }
+}
+
+WebRtc_Word32 UdpSocketWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    if (_gtc)
+    {
+        _gtc->ChangeUniqueId(id);
+    }
+    return 0;
+}
+
+bool UdpSocketWindows::ValidHandle()
+{
+    return GetFd() != INVALID_SOCKET;
+}
+
+bool UdpSocketWindows::SetCallback(CallbackObj obj, IncomingSocketCallback cb)
+{
+    _obj = obj;
+    _incomingCb = cb;
+
+    if (_mgr->AddSocket(this))
+    {
+        _addedToMgr = true;
+        return true;
+    }
+    return false;
+}
+
+bool UdpSocketWindows::SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                                  const WebRtc_Word8* optval,
+                                  WebRtc_Word32 optlen)
+{
+    if(0 == setsockopt(_socket, level, optname, optval, optlen))
+    {
+        return true;
+    }
+    _error = WSAGetLastError();
+    return false;
+}
+
+bool UdpSocketWindows::Bind(const SocketAddress& name)
+{
+    const struct sockaddr* socketName =
+        reinterpret_cast<const struct sockaddr*>(&name);
+
+    if (0 == bind(_socket, socketName, sizeof(SocketAddress)))
+    {
+        _localAddr = name;
+        return true;
+    }
+    _error = WSAGetLastError();
+    return false;
+}
+
+WebRtc_Word32 UdpSocketWindows::SendTo(const WebRtc_Word8* buf,
+                                       WebRtc_Word32 len,
+                                       const SocketAddress& to)
+{
+    // Don't try to send this packet if there are older packets queued up.
+    if(!_notSentPackets.Empty())
+    {
+        UDPPacket* packet = new UDPPacket();
+        packet->Set(buf, len, &to);
+        if(!_notSentPackets.Empty())
+        {
+            _notSentPackets.PushBack(packet);
+            return len;
+        }else {
+            // No old packets queued up. Free to try to send.
+            delete packet;
+        }
+    }
+
+    WebRtc_Word32 retVal;
+    retVal = sendto(_socket, buf, len, 0,
+                    reinterpret_cast<const struct sockaddr*>(&to),
+                    sizeof(SocketAddress));
+
+    if(retVal == SOCKET_ERROR)
+    {
+        _error = WSAGetLastError();
+        if (_error == WSAEWOULDBLOCK)
+        {
+            UDPPacket* packet = new UDPPacket();
+            packet->Set(buf,len, &to);
+            _notSentPackets.PushBack(packet);
+            return len;
+        }
+    }
+    return retVal;
+}
+
+void UdpSocketWindows::HasIncoming()
+{
+    WebRtc_Word8 buf[MAX_PACKET_SIZE];
+    SocketAddress from;
+    int fromlen = sizeof(from);
+    WebRtc_Word32 retval = recvfrom(_socket,buf, sizeof(buf), 0,
+                                    reinterpret_cast<struct sockaddr*>(&from),
+                                    &fromlen);
+
+    switch(retval)
+    {
+    case 0:
+        // The connection has been gracefully closed.
+        break;
+    case SOCKET_ERROR:
+        _error = WSAGetLastError();
+        break;
+    default:
+        if(_wantsIncoming && _incomingCb)
+            _incomingCb(_obj,buf, retval, &from);
+        break;
+    }
+}
+
+void UdpSocketWindows::CleanUp()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketWindows::CleanUp()");
+    _wantsIncoming = false;
+
+    if(_clientHandle != INVALID_HANDLE_VALUE)
+    {
+        assert(_filterHandle != INVALID_HANDLE_VALUE);
+        assert(_flowHandle != INVALID_HANDLE_VALUE);
+
+        if (_gtc)
+        {
+            _gtc->TcDeleteFilter(_filterHandle);
+            _gtc->TcDeleteFlow(_flowHandle);
+            _gtc->TcDeregisterClient(_clientHandle);
+        }
+
+        _clientHandle = INVALID_HANDLE_VALUE;
+        _filterHandle = INVALID_HANDLE_VALUE;
+        _flowHandle = INVALID_HANDLE_VALUE;
+    }
+
+    while(!_notSentPackets.Empty())
+    {
+        UDPPacket* packet = (UDPPacket*)_notSentPackets.First()->GetItem();
+        if(!packet)
+        {
+            break;
+        }
+        delete packet;
+        _notSentPackets.PopFront();
+    }
+
+    if (_socket != INVALID_SOCKET)
+    {
+        if (closesocket(_socket) == SOCKET_ERROR)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "closesocket() => error = %d", WSAGetLastError());
+        }
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "WinSock::closesocket() done");
+
+        if(_addedToMgr)
+        {
+            WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                         "calling UdpSocketManager::RemoveSocket()");
+            _mgr->RemoveSocket(this);
+        }
+    }
+}
+
+void UdpSocketWindows::SetWritable()
+{
+    // Try to send packets that have been queued up.
+    while(!_notSentPackets.Empty())
+    {
+        UDPPacket* packet = (UDPPacket*)_notSentPackets.First()->GetItem();
+        if(!packet)
+        {
+            break;
+        }
+        if(sendto(
+               _socket,packet->_buffer,
+               packet->_length,
+               0,
+               reinterpret_cast<const struct sockaddr*>(
+                   &(packet->_remoteAddr)),
+               sizeof(SocketAddress)) == SOCKET_ERROR)
+        {
+            _error = WSAGetLastError();
+            if (_error == WSAEWOULDBLOCK)
+            {
+                return;
+            }
+        } else {
+            delete packet;
+            _notSentPackets.PopFront();
+        }
+    }
+}
+
+bool UdpSocketWindows::SetQos(WebRtc_Word32 serviceType,
+                              WebRtc_Word32 tokenRate,
+                              WebRtc_Word32 bucketSize,
+                              WebRtc_Word32 peekBandwith,
+                              WebRtc_Word32 minPolicedSize,
+                              WebRtc_Word32 maxSduSize,
+                              const SocketAddress &stRemName,
+                              WebRtc_Word32 overrideDSCP)
+{
+    if(_qos == false)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetQos(), socket not capable of QOS");
+        return false;
+    }
+    QOS Qos;
+    WebRtc_Word32 result;
+    DWORD BytesRet;
+
+    if(overrideDSCP != 0)
+    {
+        FLOWSPEC f;
+        WebRtc_Word32 err = CreateFlowSpec(serviceType, tokenRate, bucketSize,
+                                           peekBandwith, minPolicedSize,
+                                           maxSduSize, &f);
+        if(err == -1)
+        {
+            return false;
+        }
+        return SetTOSByte(overrideDSCP, &f, &f) == 0;
+    }
+    memset(&Qos, QOS_NOT_SPECIFIED, sizeof(QOS));
+
+    Qos.SendingFlowspec.ServiceType        = serviceType;
+    Qos.SendingFlowspec.TokenRate          = tokenRate;
+    Qos.SendingFlowspec.TokenBucketSize    = bucketSize;
+    Qos.SendingFlowspec.PeakBandwidth      = peekBandwith;
+    Qos.SendingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.MinimumPolicedSize = minPolicedSize;
+    Qos.SendingFlowspec.MaxSduSize         = maxSduSize;
+
+    // Only ServiceType is needed for receiving.
+    Qos.ReceivingFlowspec.ServiceType        = serviceType;
+    Qos.ReceivingFlowspec.TokenRate          = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.TokenBucketSize    = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.PeakBandwidth      = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MaxSduSize         = QOS_NOT_SPECIFIED;
+
+    Qos.ProviderSpecific.len = 0;
+    Qos.ProviderSpecific.buf = NULL;
+    WebRtc_Word8* p = (WebRtc_Word8*)malloc(sizeof(QOS_DESTADDR) +
+                                            sizeof(QOS_DS_CLASS));
+
+    QOS_DESTADDR* QosDestaddr = (QOS_DESTADDR*)p;
+    ZeroMemory((WebRtc_Word8 *)QosDestaddr, sizeof(QOS_DESTADDR));
+    QosDestaddr->ObjectHdr.ObjectType = QOS_OBJECT_DESTADDR;
+    QosDestaddr->ObjectHdr.ObjectLength = sizeof(QOS_DESTADDR);
+    QosDestaddr->SocketAddress = (SOCKADDR*)&stRemName;
+    QosDestaddr->SocketAddressLength = sizeof(SocketAddress);
+    Qos.ProviderSpecific.len = QosDestaddr->ObjectHdr.ObjectLength;
+    Qos.ProviderSpecific.buf = (WebRtc_Word8*)p;
+
+    // Socket must be bound for this call to be successfull. If socket is not
+    // bound WSAGetLastError() will return 10022.
+    result = WSAIoctl(GetFd(),SIO_SET_QOS, &Qos,sizeof(QOS),NULL, 0, &BytesRet,
+                      NULL,NULL);
+    if (result == SOCKET_ERROR)
+    {
+        _error = WSAGetLastError();
+        free(p);
+        return false;
+    }
+    free(p);
+    return true;
+}
+
+WebRtc_Word32 UdpSocketWindows::SetTOS(WebRtc_Word32 serviceType)
+{
+    WebRtc_Word32 res = SetTOSByte(serviceType, NULL, NULL);
+
+    if (res == -1)
+    {
+        OSVERSIONINFO OsVersion;
+        OsVersion.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+        GetVersionEx (&OsVersion);
+
+        if ((OsVersion.dwMajorVersion == 4))
+        {
+            return -1;
+        }
+    }
+    return res;
+}
+
+WebRtc_Word32 UdpSocketWindows::CreateFlowSpec(WebRtc_Word32 serviceType,
+                                               WebRtc_Word32 tokenRate,
+                                               WebRtc_Word32 bucketSize,
+                                               WebRtc_Word32 peekBandwith,
+                                               WebRtc_Word32 minPolicedSize,
+                                               WebRtc_Word32 maxSduSize,
+                                               FLOWSPEC *f)
+{
+    if(!f)
+    {
+        return -1;
+    }
+
+    f->ServiceType        = serviceType;
+    f->TokenRate          = tokenRate;
+    f->TokenBucketSize    = bucketSize;
+    f->PeakBandwidth      = peekBandwith;
+    f->DelayVariation     = QOS_NOT_SPECIFIED;
+    f->Latency            = QOS_NOT_SPECIFIED;
+    f->MinimumPolicedSize = minPolicedSize;
+    f->MaxSduSize         = maxSduSize;
+    return 0;
+}
+
+WebRtc_Word32 UdpSocketWindows::SetTOSByte(WebRtc_Word32 serviceType,
+                                           FLOWSPEC* send, FLOWSPEC* recv)
+{
+    if(_socket == INVALID_SOCKET)
+    {
+        return -1;
+    }
+    if (!_gtc)
+    {
+        _gtc = TrafficControlWindows::GetInstance(_id);
+    }
+    if (!_gtc)
+    {
+        return -1;
+    }
+
+    TCI_CLIENT_FUNC_LIST QoSFunctions;
+    QoSFunctions.ClAddFlowCompleteHandler = NULL;
+    QoSFunctions.ClDeleteFlowCompleteHandler = NULL;
+    QoSFunctions.ClModifyFlowCompleteHandler = NULL;
+    QoSFunctions.ClNotifyHandler = (TCI_NOTIFY_HANDLER)MyClNotifyHandler;
+    // Register the client with Traffic control interface.
+    HANDLE ClientHandle;
+    ULONG result = _gtc->TcRegisterClient(CURRENT_TCI_VERSION, NULL,
+                                          &QoSFunctions,&ClientHandle);
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "TcRegisterClient returned %d", result);
+        return result;
+    }
+
+    // Find traffic control-enabled network interfaces.
+    ULONG BufferSize = 0;
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize, NULL);
+
+    if(result != NO_ERROR && result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error enumerating interfaces, %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        return result;
+    }
+
+    if(result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        // Empty buffer contains all control-enabled network interfaces. I.e.
+        // ToS is not enabled.
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "Error enumerating interfaces: passed in 0 and received\
+ NO_ERROR when expecting INSUFFICIENT_BUFFER, %d");
+        _gtc->TcDeregisterClient(ClientHandle);
+        return -1;
+    }
+
+    PTC_IFC_DESCRIPTOR pInterfaceBuffer =
+        (PTC_IFC_DESCRIPTOR)malloc(BufferSize);
+    if(pInterfaceBuffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Out ot memory failure");
+        _gtc->TcDeregisterClient(ClientHandle);
+        return ERROR_NOT_ENOUGH_MEMORY;
+    }
+
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize,
+                                         pInterfaceBuffer);
+
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "Critical: error enumerating interfaces when passing in correct\
+ buffer size: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+
+    PTC_IFC_DESCRIPTOR oneinterface;
+    HANDLE ifcHandle, iFilterHandle, iflowHandle;
+    bool addrFound = false;
+    ULONG filterSourceAddress = ULONG_MAX;
+
+    const struct sockaddr_in* name;
+    name = reinterpret_cast<const struct sockaddr_in*>(&_localAddr);
+
+    // Find the interface corresponding to the local address.
+    for(oneinterface = pInterfaceBuffer;
+        oneinterface != (PTC_IFC_DESCRIPTOR)
+            (((WebRtc_Word8*)pInterfaceBuffer) + BufferSize);
+        oneinterface = (PTC_IFC_DESCRIPTOR)
+            ((WebRtc_Word8*)oneinterface + oneinterface->Length))
+    {
+
+        WebRtc_Word8 interfaceName[500];
+        WideCharToMultiByte(CP_ACP, 0, oneinterface->pInterfaceName, -1,
+                            interfaceName, sizeof(interfaceName), 0, 0);
+
+        PNETWORK_ADDRESS_LIST addresses =
+            &(oneinterface->AddressListDesc.AddressList);
+        for(LONG i = 0; i < addresses->AddressCount; i++)
+        {
+            // Only look at TCP/IP addresses.
+            if(addresses->Address[i].AddressType != NDIS_PROTOCOL_ID_TCP_IP)
+            {
+                continue;
+            }
+
+            NETWORK_ADDRESS_IP* pIpAddr =
+                (NETWORK_ADDRESS_IP*)&(addresses->Address[i].Address);
+
+            WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                         "Examining Interface %s", interfaceName);
+            if(pIpAddr->in_addr == name->sin_addr.S_un.S_addr)
+            {
+                filterSourceAddress = pIpAddr->in_addr;
+                WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                             "Found ip addr: %s", inet_ntoa(name->sin_addr));
+                addrFound = true;
+            }
+        }
+        if(!addrFound)
+        {
+            continue;
+        }
+        else
+        {
+            break;
+        }
+    }
+
+    if(!addrFound)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "IP Address not found");
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+
+
+    result = _gtc->TcOpenInterfaceW(oneinterface->pInterfaceName, ClientHandle,
+                                    NULL, &ifcHandle);
+
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error opening interface: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    FLOWSPEC defaultSend, defaultRecv;
+    if(send == NULL)
+    {
+        defaultSend.DelayVariation = QOS_NOT_SPECIFIED;
+        defaultSend.Latency = QOS_NOT_SPECIFIED;
+        defaultSend.MaxSduSize = QOS_NOT_SPECIFIED;
+        defaultSend.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        defaultSend.PeakBandwidth = QOS_NOT_SPECIFIED;
+        defaultSend.ServiceType = SERVICETYPE_BESTEFFORT;
+        defaultSend.TokenBucketSize = QOS_NOT_SPECIFIED;
+        defaultSend.TokenRate = 10000;
+    }
+    else
+    {
+        defaultSend = *send;
+    }
+     if(recv == NULL)
+    {
+        defaultRecv = defaultSend;
+        defaultRecv.ServiceType = SERVICETYPE_CONTROLLEDLOAD;
+    }
+    else
+    {
+        defaultRecv = *recv;
+    }
+
+
+    PTC_GEN_FLOW flow =
+        (PTC_GEN_FLOW)malloc(sizeof(TC_GEN_FLOW) + sizeof(QOS_DS_CLASS));
+
+    flow->ReceivingFlowspec = defaultRecv;
+    flow->SendingFlowspec = defaultSend;
+
+    QOS_DS_CLASS dsClass;
+
+    ZeroMemory((WebRtc_Word8*)&dsClass, sizeof(QOS_DS_CLASS));
+
+    dsClass.DSField = serviceType;
+
+    dsClass.ObjectHdr.ObjectType = QOS_OBJECT_DS_CLASS;
+    dsClass.ObjectHdr.ObjectLength = sizeof(dsClass);
+
+    memcpy(flow->TcObjects, (void*)&dsClass, sizeof(QOS_DS_CLASS));
+    flow->TcObjectsLength = sizeof(dsClass);
+
+    result = _gtc->TcAddFlow(ifcHandle, NULL, 0, flow, &iflowHandle);
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error adding flow: %d", result);
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+
+    free(flow);
+    IP_PATTERN filterPattern, mask;
+
+    ZeroMemory((WebRtc_Word8*)&filterPattern, sizeof(IP_PATTERN));
+    ZeroMemory((WebRtc_Word8*)&mask, sizeof(IP_PATTERN));
+
+    filterPattern.ProtocolId = IPPROTO_UDP;
+    // "name" fields are in network order.
+    filterPattern.S_un.S_un_ports.s_srcport = name->sin_port;
+    filterPattern.SrcAddr = filterSourceAddress;
+
+    // Unsigned max of a type corresponds to a bitmask with all bits set to 1.
+    // I.e. the filter should allow all ProtocolIds, any source port and any
+    // IP address.
+    mask.ProtocolId = UCHAR_MAX;
+    mask.S_un.S_un_ports.s_srcport = USHRT_MAX;
+    mask.SrcAddr = ULONG_MAX;
+
+    TC_GEN_FILTER filter;
+    filter.AddressType = NDIS_PROTOCOL_ID_TCP_IP;
+    filter.Mask = (LPVOID)&mask;
+    filter.Pattern = (LPVOID)&filterPattern;
+    filter.PatternSize = sizeof(IP_PATTERN);
+    if(_filterHandle != INVALID_HANDLE_VALUE)
+    {
+        _gtc->TcDeleteFilter(_filterHandle);
+    }
+
+    result = _gtc->TcAddFilter(iflowHandle, &filter, &iFilterHandle);
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error adding filter: %d", result);
+        _gtc->TcDeleteFlow(iflowHandle);
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    _flowHandle = iflowHandle;
+    _filterHandle = iFilterHandle;
+    _clientHandle = ClientHandle;
+
+    _gtc->TcCloseInterface(ifcHandle);
+    free(pInterfaceBuffer);
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "Successfully created flow and filter.");
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_windows.h b/trunk/src/modules/udp_transport/source/udp_socket_windows.h
new file mode 100644
index 0000000..0bbd20d
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_windows.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WINDOWS_H_
+
+#include <Winsock2.h>
+
+#include "list_wrapper.h"
+#include "udp_socket_manager_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+namespace webrtc {
+class TrafficControlWindows;
+
+class UdpSocketWindows : public UdpSocketWrapper
+{
+public:
+    UdpSocketWindows(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                     bool ipV6Enable = false);
+    virtual ~UdpSocketWindows();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool ValidHandle();
+
+    virtual bool SetCallback(CallbackObj obj, IncomingSocketCallback cb);
+
+    virtual bool Bind(const SocketAddress& name);
+
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen);
+
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to);
+
+    virtual SOCKET GetFd() {return _socket;}
+    virtual WebRtc_Word32 GetError() {return _error;}
+
+    virtual bool SetQos(WebRtc_Word32 serviceType, WebRtc_Word32 tokenRate,
+                        WebRtc_Word32 bucketSize, WebRtc_Word32 peekBandwith,
+                        WebRtc_Word32 minPolicedSize, WebRtc_Word32 maxSduSize,
+                        const SocketAddress& stRemName,
+                        WebRtc_Word32 overrideDSCP = 0);
+
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType);
+
+protected:
+    void CleanUp();
+    void HasIncoming();
+    // Socket is free to process pending packets.
+    void SetWritable();
+    bool IsWritable() {return _notSentPackets.Empty();}
+    bool WantsIncoming() {return _wantsIncoming;}
+
+private:
+    friend class UdpSocketManagerWindows;
+
+    WebRtc_Word32 CreateFlowSpec(WebRtc_Word32 serviceType,
+                                 WebRtc_Word32 tokenRate,
+                                 WebRtc_Word32 bucketSize,
+                                 WebRtc_Word32 peekBandwith,
+                                 WebRtc_Word32 minPolicedSize,
+                                 WebRtc_Word32 maxSduSize, FLOWSPEC* f);
+
+    WebRtc_Word32 SetTOSByte(WebRtc_Word32 serviceType, FLOWSPEC* send,
+                             FLOWSPEC* recv);
+
+    WebRtc_Word32 _id;
+    IncomingSocketCallback _incomingCb;
+
+    CallbackObj _obj;
+    bool _qos;
+    WebRtc_Word32 _error;
+
+    volatile bool _addedToMgr;
+
+    SocketAddress _remoteAddr;
+    SocketAddress _localAddr;
+    SOCKET _socket;
+    UdpSocketManager* _mgr;
+
+    ListWrapper _notSentPackets;
+    bool _terminate;
+
+    // QoS handles.
+    HANDLE _clientHandle;
+    HANDLE _flowHandle;
+    HANDLE _filterHandle;
+
+    // TOS implementation.
+    TrafficControlWindows* _gtc;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WINDOWS_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_wrapper.cc b/trunk/src/modules/udp_transport/source/udp_socket_wrapper.cc
new file mode 100644
index 0000000..53093a1
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_wrapper.cc
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_wrapper.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "udp_socket_manager_wrapper.h"
+
+#if defined(_WIN32)
+    #include "udp_socket_windows.h"
+    #include "udp_socket2_windows.h"
+#else
+    #include "udp_socket_posix.h"
+#endif
+
+
+namespace webrtc {
+bool UdpSocketWrapper::_initiated = false;
+
+// Temporary Android hack. The value 1024 is taken from
+// <ndk>/build/platforms/android-1.5/arch-arm/usr/include/linux/posix_types.h
+// TODO (tomasl): can we remove this now?
+#ifndef FD_SETSIZE
+#define FD_SETSIZE 1024
+#endif
+
+UdpSocketWrapper::UdpSocketWrapper()
+    : _wantsIncoming(false),
+      _deleteEvent(NULL)
+{
+}
+
+UdpSocketWrapper::~UdpSocketWrapper()
+{
+    if(_deleteEvent)
+    {
+      _deleteEvent->Set();
+      _deleteEvent = NULL;
+    }
+}
+
+void UdpSocketWrapper::SetEventToNull()
+{
+    if (_deleteEvent)
+    {
+        _deleteEvent = NULL;
+    }
+}
+
+#ifdef USE_WINSOCK2
+UdpSocketWrapper* UdpSocketWrapper::CreateSocket(const WebRtc_Word32 id,
+                                                 UdpSocketManager* mgr,
+                                                 CallbackObj obj,
+                                                 IncomingSocketCallback cb,
+                                                 bool ipV6Enable,
+                                                 bool disableGQOS)
+#else
+UdpSocketWrapper* UdpSocketWrapper::CreateSocket(const WebRtc_Word32 id,
+                                                 UdpSocketManager* mgr,
+                                                 CallbackObj obj,
+                                                 IncomingSocketCallback cb,
+                                                 bool ipV6Enable,
+                                                 bool /*disableGQOS*/)
+#endif
+
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "UdpSocketWrapper::CreateSocket");
+
+    UdpSocketWrapper* s = 0;
+
+#ifdef _WIN32
+    if (!_initiated)
+    {
+        WSADATA wsaData;
+        WORD wVersionRequested = MAKEWORD( 2, 2 );
+        WebRtc_Word32 err = WSAStartup( wVersionRequested, &wsaData);
+        if (err != 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to initialize sockets\
+ WSAStartup error:%d",
+                err);
+            return NULL;
+        }
+
+        _initiated = true;
+    }
+
+#ifdef USE_WINSOCK2
+    s = new UdpSocket2Windows(id, mgr, ipV6Enable, disableGQOS);
+#else
+    #pragma message("Error: No non-Winsock2 implementation for WinCE")
+    s = new UdpSocketWindows(id, mgr, ipV6Enable);
+#endif
+
+#else
+    if (!_initiated)
+    {
+        _initiated = true;
+    }
+    s = new UdpSocketPosix(id, mgr, ipV6Enable);
+    if (s)
+    {
+        UdpSocketPosix* sl = static_cast<UdpSocketPosix*>(s);
+        if (sl->GetFd() != INVALID_SOCKET && sl->GetFd() < FD_SETSIZE)
+        {
+            // ok
+        } else
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to initialize socket");
+            delete s;
+            s = NULL;
+        }
+    }
+#endif
+    if (s)
+    {
+        s->_deleteEvent = NULL;
+        if (!s->SetCallback(obj, cb))
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to ser callback");
+            return(NULL);
+        }
+    }
+    return s;
+}
+
+bool UdpSocketWrapper::StartReceiving()
+{
+    _wantsIncoming = true;
+    return true;
+}
+
+bool UdpSocketWrapper::StopReceiving()
+{
+    _wantsIncoming = false;
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_socket_wrapper.h b/trunk/src/modules/udp_transport/source/udp_socket_wrapper.h
new file mode 100644
index 0000000..9a3b585
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_socket_wrapper.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
+
+#include "udp_transport.h"
+
+namespace webrtc {
+class EventWrapper;
+class UdpSocketManager;
+
+#define SOCKET_ERROR_NO_QOS -1000
+
+#ifndef _WIN32
+typedef int SOCKET;
+#endif
+
+#ifndef INVALID_SOCKET
+#define INVALID_SOCKET  (SOCKET)(~0)
+
+#ifndef AF_INET
+#define AF_INET 2
+#endif
+
+#endif
+
+typedef void* CallbackObj;
+typedef void(*IncomingSocketCallback)(CallbackObj obj, const WebRtc_Word8* buf,
+                                      WebRtc_Word32 len,
+                                      const SocketAddress* from);
+
+class UdpSocketWrapper
+{
+public:
+    virtual ~UdpSocketWrapper();
+    static UdpSocketWrapper* CreateSocket(const WebRtc_Word32 id,
+                                          UdpSocketManager* mgr,
+                                          CallbackObj obj,
+                                          IncomingSocketCallback cb,
+                                          bool ipV6Enable = false,
+                                          bool disableGQOS = false);
+
+    // Set the unique identifier of this class to id.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Register cb for receiving callbacks when there are incoming packets.
+    // Register obj so that it will be passed in calls to cb.
+    virtual bool SetCallback(CallbackObj obj, IncomingSocketCallback cb) = 0;
+
+    // Socket to local address specified by name.
+    virtual bool Bind(const SocketAddress& name) = 0;
+
+    // Start receiving UDP data.
+    virtual bool StartReceiving();
+    virtual inline bool StartReceiving(const WebRtc_UWord32 /*receiveBuffers*/)
+    {return StartReceiving();}
+    // Stop receiving UDP data.
+    virtual bool StopReceiving();
+
+    virtual bool ValidHandle() = 0;
+
+    // Set socket options.
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval,
+                            WebRtc_Word32 optlen) = 0;
+
+    // Set TOS for outgoing packets.
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType) = 0;
+
+    // Set 802.1Q PCP field (802.1p) for outgoing VLAN traffic.
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 /*pcp*/) {return -1;}
+
+    // Send buf of length len to the address specified by to.
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to) = 0;
+
+    virtual void SetEventToNull();
+
+    // Close socket and don't return until completed.
+    virtual void CloseBlocking() {}
+
+    // tokenRate is in bit/s. peakBandwidt is in byte/s
+    virtual bool SetQos(WebRtc_Word32 serviceType, WebRtc_Word32 tokenRate,
+                        WebRtc_Word32 bucketSize, WebRtc_Word32 peekBandwith,
+                        WebRtc_Word32 minPolicedSize, WebRtc_Word32 maxSduSize,
+                        const SocketAddress &stRemName,
+                        WebRtc_Word32 overrideDSCP = 0) = 0;
+
+    virtual WebRtc_UWord32 ReceiveBuffers() {return 0;};
+
+protected:
+    UdpSocketWrapper();
+
+    bool _wantsIncoming;
+    EventWrapper*  _deleteEvent;
+
+private:
+    static bool _initiated;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_transport.gypi b/trunk/src/modules/udp_transport/source/udp_transport.gypi
new file mode 100644
index 0000000..c30e167
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_transport.gypi
@@ -0,0 +1,120 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'udp_transport',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        # PLATFORM INDEPENDENT SOURCE FILES
+        '../interface/udp_transport.h',
+        'udp_transport_impl.cc',
+        'udp_socket_wrapper.cc',
+        'udp_socket_manager_wrapper.cc',
+        'udp_transport_impl.h',
+        'udp_socket_wrapper.h',
+        'udp_socket_manager_wrapper.h',
+        # PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
+        # Posix (Linux/Mac)
+        'udp_socket_posix.cc',
+        'udp_socket_posix.h',
+        'udp_socket_manager_posix.cc',
+        'udp_socket_manager_posix.h',
+        # Windows
+        'udp_socket_manager_windows.cc',
+        'udp_socket_manager_windows.h',
+        'udp_socket2_manager_windows.cc',
+        'udp_socket2_manager_windows.h',
+        'udp_socket_windows.cc',
+        'udp_socket_windows.h',
+        'udp_socket2_windows.cc',
+        'udp_socket2_windows.h',
+        'traffic_control_windows.cc',
+        'traffic_control_windows.h',
+      ], # source
+      'conditions': [
+        # DEFINE PLATFORM SPECIFIC SOURCE FILES
+        ['os_posix==0', {
+          'sources!': [
+            'udp_socket_posix.cc',
+            'udp_socket_posix.h',
+            'udp_socket_manager_posix.cc',
+            'udp_socket_manager_posix.h',
+          ],
+        }],
+        ['OS!="win"', {
+          'sources!': [
+            'udp_socket_manager_windows.cc',
+            'udp_socket_manager_windows.h',
+            'udp_socket2_manager_windows.cc',
+            'udp_socket2_manager_windows.h',
+            'udp_socket_windows.cc',
+            'udp_socket_windows.h',
+            'udp_socket2_windows.cc',
+            'udp_socket2_windows.h',
+            'traffic_control_windows.cc',
+            'traffic_control_windows.h',
+          ],
+        }],
+        ['OS=="linux"', {
+          'cflags': [
+            '-fno-strict-aliasing',
+          ],
+        }],
+        ['OS=="mac"', {
+          'xcode_settings': {
+            'OTHER_CPLUSPLUSFLAGS': '-fno-strict-aliasing',
+          },
+        }],
+        ['OS=="win"', {
+          'defines': [
+            'USE_WINSOCK2',
+          ],
+        }],
+      ] # conditions
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'udp_transport_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'udp_transport',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'udp_transport_unittest.cc',
+          ],
+        }, # udp_transport_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/udp_transport/source/udp_transport_impl.cc b/trunk/src/modules/udp_transport/source/udp_transport_impl.cc
new file mode 100644
index 0000000..9ce2d92
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_transport_impl.cc
@@ -0,0 +1,3009 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_transport_impl.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#if defined(_WIN32)
+    #include <winsock2.h>
+    #include <ws2tcpip.h>
+    // Disable warning for default initialized arrays on VS2005
+    #pragma warning(disable:4351)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    #include <arpa/inet.h>
+    #include <ctype.h>
+    #include <fcntl.h>
+    #include <netdb.h>
+    #include <net/if.h>
+    #include <netinet/in.h>
+    #include <stdlib.h>
+    #include <sys/ioctl.h>
+    #include <sys/socket.h>
+    #include <sys/time.h>
+    #include <unistd.h>
+#ifndef MAC_IPHONE
+    #include <net/if_arp.h>
+#endif
+#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+#if defined(WEBRTC_MAC)
+#include <ifaddrs.h>
+#include <machine/types.h>
+#endif
+#if defined(WEBRTC_LINUX)
+#include <linux/netlink.h>
+#include <linux/rtnetlink.h>
+#endif
+
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "typedefs.h"
+#include "udp_socket_manager_wrapper.h"
+
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#define GetLastError() errno
+
+#define IFRSIZE ((int)(size * sizeof (struct ifreq)))
+
+#define NLMSG_OK_NO_WARNING(nlh,len)                                    \
+  ((len) >= (int)sizeof(struct nlmsghdr) &&                             \
+   (int)(nlh)->nlmsg_len >= (int)sizeof(struct nlmsghdr) &&             \
+   (int)(nlh)->nlmsg_len <= (len))
+
+#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+namespace webrtc {
+UdpTransport* UdpTransport::Create(const WebRtc_Word32 id,
+                                   WebRtc_UWord8& numSocketThreads)
+{
+    return new UdpTransportImpl(id, numSocketThreads);
+}
+
+void UdpTransport::Destroy(UdpTransport* module)
+{
+    if(module)
+    {
+        delete module;
+    }
+}
+
+UdpTransportImpl::UdpTransportImpl(const WebRtc_Word32 id,
+                                   WebRtc_UWord8& numSocketThreads)
+    : _id(id),
+      _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _critFilter(CriticalSectionWrapper::CreateCriticalSection()),
+      _critPacketCallback(CriticalSectionWrapper::CreateCriticalSection()),
+      _mgr(UdpSocketManager::Create(id, numSocketThreads)),
+      _lastError(kNoSocketError),
+      _destPort(0),
+      _destPortRTCP(0),
+      _localPort(0),
+      _localPortRTCP(0),
+      _srcPort(0),
+      _srcPortRTCP(0),
+      _fromPort(0),
+      _fromPortRTCP(0),
+      _fromIP(),
+      _destIP(),
+      _localIP(),
+      _localMulticastIP(),
+      _ptrRtpSocket(NULL),
+      _ptrRtcpSocket(NULL),
+      _ptrSendRtpSocket(NULL),
+      _ptrSendRtcpSocket(NULL),
+      _remoteRTPAddr(),
+      _remoteRTCPAddr(),
+      _localRTPAddr(),
+      _localRTCPAddr(),
+      _tos(0),
+      _receiving(false),
+      _useSetSockOpt(false),
+      _qos(false),
+      _pcp(0),
+      _ipV6Enabled(false),
+      _serviceType(0),
+      _overrideDSCP(0),
+      _maxBitrate(0),
+      _cachLock(RWLockWrapper::CreateRWLock()),
+      _previousAddress(),
+      _previousIP(),
+      _previousIPSize(0),
+      _previousSourcePort(0),
+      _filterIPAddress(),
+      _rtpFilterPort(0),
+      _rtcpFilterPort(0),
+      _packetCallback(0)
+{
+    memset(&_remoteRTPAddr, 0, sizeof(_remoteRTPAddr));
+    memset(&_remoteRTCPAddr, 0, sizeof(_remoteRTCPAddr));
+    memset(&_localRTPAddr, 0, sizeof(_localRTPAddr));
+    memset(&_localRTCPAddr, 0, sizeof(_localRTCPAddr));
+
+    memset(_fromIP, 0, sizeof(_fromIP));
+    memset(_destIP, 0, sizeof(_destIP));
+    memset(_localIP, 0, sizeof(_localIP));
+    memset(_localMulticastIP, 0, sizeof(_localMulticastIP));
+
+    memset(&_filterIPAddress, 0, sizeof(_filterIPAddress));
+    if(_mgr == NULL)
+    {
+        _mgr = UdpSocketManager::Create(id, numSocketThreads);
+    }
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id, "%s created", __FUNCTION__);
+}
+
+UdpTransportImpl::~UdpTransportImpl()
+{
+    CloseSendSockets();
+    CloseReceiveSockets();
+    delete _crit;
+    delete _critFilter;
+    delete _critPacketCallback;
+    delete _cachLock;
+
+    UdpSocketManager::Return();
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id, "%s deleted",
+                 __FUNCTION__);
+}
+
+WebRtc_Word32 UdpTransportImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(_crit);
+    _id = id;
+    if(_mgr)
+    {
+        _mgr->ChangeUniqueId(id);
+    }
+    if(_ptrRtpSocket)
+    {
+        _ptrRtpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrRtcpSocket)
+    {
+        _ptrRtcpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrSendRtpSocket)
+    {
+        _ptrSendRtpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        _ptrSendRtcpSocket->ChangeUniqueId(id);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::TimeUntilNextProcess()
+{
+    return 100;
+}
+
+WebRtc_Word32 UdpTransportImpl::Process()
+{
+    return 0;
+}
+
+UdpTransport::ErrorCode UdpTransportImpl::LastError() const
+{
+    return _lastError;
+}
+
+bool SameAddress(const SocketAddress& address1, const SocketAddress& address2)
+{
+    return (memcmp(&address1,&address2,sizeof(address1)) == 0);
+}
+
+void UdpTransportImpl::GetCachedAddress(char* ip,
+                                        WebRtc_UWord32& ipSize,
+                                        WebRtc_UWord16& sourcePort)
+{
+    const WebRtc_UWord32 originalIPSize = ipSize;
+    // If the incoming string is too small, fill it as much as there is room
+    // for. Make sure that there is room for the '\0' character.
+    ipSize = (ipSize - 1 < _previousIPSize) ? ipSize - 1 : _previousIPSize;
+    memcpy(ip,_previousIP,sizeof(WebRtc_Word8)*(ipSize + 1));
+    ip[originalIPSize - 1] = '\0';
+    sourcePort = _previousSourcePort;
+}
+
+WebRtc_Word32 UdpTransportImpl::IPAddressCached(const SocketAddress& address,
+                                                char* ip,
+                                                WebRtc_UWord32& ipSize,
+                                                WebRtc_UWord16& sourcePort)
+{
+    {
+        ReadLockScoped rl(*_cachLock);
+        // Check if the old address can be re-used (is the same).
+        if(SameAddress(address,_previousAddress))
+        {
+            GetCachedAddress(ip,ipSize,sourcePort);
+            return 0;
+        }
+    }
+    // Get the new address and store it.
+    WriteLockScoped wl(*_cachLock);
+    ipSize = kIpAddressVersion6Length;
+    if(IPAddress(address,_previousIP,ipSize,_previousSourcePort) != 0)
+    {
+        return -1;
+    }
+    _previousIPSize = ipSize;
+    memcpy(&_previousAddress, &address, sizeof(address));
+    // Address has been cached at this point.
+    GetCachedAddress(ip,ipSize,sourcePort);
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeReceiveSockets(
+    UdpTransportData* const packetCallback,
+    const WebRtc_UWord16 portnr,
+    const char* ip,
+    const char* multicastIpAddr,
+    const WebRtc_UWord16 rtcpPort)
+{
+
+    {
+        CriticalSectionScoped cs(_critPacketCallback);
+        _packetCallback = packetCallback;
+
+        if(packetCallback == NULL)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                         "Closing down receive sockets");
+            return 0;
+        }
+    }
+
+    CriticalSectionScoped cs(_crit);
+    CloseReceiveSockets();
+
+    if(portnr == 0)
+    {
+        // TODO (hellner): why not just fail here?
+        if(_destPort == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets port 0 not allowed");
+            _lastError = kPortInvalid;
+            return -1;
+        }
+        _localPort = _destPort;
+    } else {
+        _localPort = portnr;
+    }
+    if(rtcpPort)
+    {
+        _localPortRTCP = rtcpPort;
+    }else {
+        _localPortRTCP = _localPort + 1;
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "InitializeReceiveSockets RTCP port not configured using RTP\
+ port+1=%d",
+            _localPortRTCP);
+    }
+
+    if(ip)
+    {
+        if(IsIpAddressValid(ip,IpV6Enabled()))
+        {
+            strncpy(_localIP, ip,kIpAddressVersion6Length);
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets invalid IP address");
+            _lastError = kIpAddressInvalid;
+            return -1;
+        }
+    }else
+    {
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+    }
+    if(multicastIpAddr && !IpV6Enabled())
+    {
+        if(IsIpAddressValid(multicastIpAddr,IpV6Enabled()))
+        {
+            strncpy(_localMulticastIP, multicastIpAddr,
+                    kIpAddressVersion6Length);
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets invalid IP address");
+            _lastError =  kIpAddressInvalid;
+            return -1;
+        }
+    }
+    if(_mgr == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets no socket manager");
+        return -1;
+    }
+
+    _useSetSockOpt=false;
+    _tos=0;
+    _pcp=0;
+
+    _ptrRtpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, this,
+                                                   IncomingRTPCallback,
+                                                   IpV6Enabled());
+
+    _ptrRtcpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, this,
+                                                    IncomingRTCPCallback,
+                                                    IpV6Enabled());
+
+    ErrorCode retVal = BindLocalRTPSocket();
+    if(retVal != kNoSocketError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets faild to bind RTP socket");
+        _lastError = retVal;
+        CloseReceiveSockets();
+        return -1;
+    }
+    retVal = BindLocalRTCPSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets faild to bind RTCP socket");
+        CloseReceiveSockets();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::ReceiveSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort,
+    char multicastIpAddr[kIpAddressVersion6Length]) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _localPort;
+    rtcpPort = _localPortRTCP;
+    if (ipAddr)
+    {
+        strncpy(ipAddr, _localIP, IpV6Enabled() ?
+                UdpTransport::kIpAddressVersion6Length :
+                UdpTransport::kIpAddressVersion4Length);
+    }
+    if (multicastIpAddr)
+    {
+        strncpy(multicastIpAddr, _localMulticastIP, IpV6Enabled() ?
+                UdpTransport::kIpAddressVersion6Length :
+                UdpTransport::kIpAddressVersion4Length);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _destPort;
+    rtcpPort = _destPortRTCP;
+    strncpy(ipAddr, _destIP, IpV6Enabled() ?
+            UdpTransport::kIpAddressVersion6Length :
+            UdpTransport::kIpAddressVersion4Length);
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::RemoteSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _fromPort;
+    rtcpPort = _fromPortRTCP;
+    if(ipAddr)
+    {
+        strncpy(ipAddr, _fromIP, IpV6Enabled() ?
+                kIpAddressVersion6Length :
+                kIpAddressVersion4Length);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::FilterPorts(
+    WebRtc_UWord16& rtpFilterPort,
+    WebRtc_UWord16& rtcpFilterPort) const
+{
+    CriticalSectionScoped cs(_critFilter);
+    rtpFilterPort = _rtpFilterPort;
+    rtcpFilterPort = _rtcpFilterPort;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetQoS(bool QoS, WebRtc_Word32 serviceType,
+                                       WebRtc_UWord32 maxBitrate,
+                                       WebRtc_Word32 overrideDSCP, bool audio)
+{
+    if(QoS)
+    {
+        return EnableQoS(serviceType, audio, maxBitrate, overrideDSCP);
+    }else
+    {
+        return DisableQoS();
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::EnableQoS(WebRtc_Word32 serviceType,
+                                          bool audio, WebRtc_UWord32 maxBitrate,
+                                          WebRtc_Word32 overrideDSCP)
+{
+    if (_ipV6Enabled)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but will be ignored since IPv6 is enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (_tos)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "TOS already enabled, can't use TOS and QoS at the same time");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (_pcp)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "PCP already enabled, can't use PCP and QoS at the same time");
+        _lastError = kQosError;
+        return -1;
+    }
+    if(_destPort == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet configured\
+ the send destination");
+        return -1;
+    }
+    if(_qos)
+    {
+        if(_overrideDSCP == 0 && overrideDSCP != 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "QOS is already enabled and overrideDSCP differs, not allowed");
+            return -1;
+        }
+    }
+    CriticalSectionScoped cs(_crit);
+
+    UdpSocketWrapper* rtpSock = _ptrSendRtpSocket ?
+        _ptrSendRtpSocket :
+        _ptrRtpSocket;
+    if (!rtpSock || !rtpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTP socket");
+        return -1;
+    }
+    UdpSocketWrapper* rtcpSock = _ptrSendRtcpSocket ?
+        _ptrSendRtcpSocket :
+        _ptrRtcpSocket;
+    if (!rtcpSock || !rtcpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTCP socket");
+        return -1;
+    }
+
+    // Minimum packet size in bytes for which the requested quality of service
+    // will be provided. The smallest RTP header is 12 byte.
+    const WebRtc_Word32 min_policed_size = 12;
+    // Max SDU, maximum packet size permitted or used in the traffic flow, in
+    // bytes.
+    const WebRtc_Word32 max_sdu_size = 1500;
+
+    // Enable QoS for RTP sockets.
+    if(maxBitrate)
+    {
+        // Note: 1 kbit is 125 bytes.
+        // Token Rate is typically set to the average bit rate from peak to
+        // peak.
+        // Bucket size is normally set to the largest average frame size.
+        if(audio)
+        {
+            WEBRTC_TRACE(kTraceStateInfo,
+                         kTraceTransport,
+                         _id,
+                         "Enable QOS for audio with max bitrate:%d",
+                         maxBitrate);
+
+            const WebRtc_Word32 token_rate = maxBitrate*125;
+            // The largest audio packets are 60ms frames. This is a fraction
+            // more than 16 packets/second. These 16 frames are sent, at max,
+            // at a bitrate of maxBitrate*125 -> 1 frame is maxBitrate*125/16 ~
+            // maxBitrate * 8.
+            const WebRtc_Word32 bucket_size = maxBitrate * 8;
+            const WebRtc_Word32 peek_bandwith =  maxBitrate * 125;
+            if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                                 peek_bandwith, min_policed_size,
+                                 max_sdu_size, _remoteRTPAddr, overrideDSCP))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "QOS failed on the RTP socket");
+                _lastError = kQosError;
+                return -1;
+            }
+        }else
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                         "Enable QOS for video with max bitrate:%d",
+                         maxBitrate);
+
+            // Allow for a token rate that is twice that of the maximum bitrate
+            // (in bytes).
+            const WebRtc_Word32 token_rate = maxBitrate*250;
+            // largest average frame size (key frame size). Assuming that a
+            // keyframe is 25% of the bitrate during the second its sent
+            // Assume that a key frame is 25% of the bitrate the second that it
+            // is sent. The largest frame size is then maxBitrate* 125 * 0.25 ~
+            // 31.
+            const WebRtc_Word32 bucket_size = maxBitrate*31;
+            const WebRtc_Word32 peek_bandwith = maxBitrate*125;
+            if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                                peek_bandwith, min_policed_size, max_sdu_size,
+                                _remoteRTPAddr, overrideDSCP))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "QOS failed on the RTP socket");
+                _lastError = kQosError;
+                return -1;
+            }
+        }
+    } else if(audio)
+    {
+        // No max bitrate set. Audio.
+        WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                     "Enable QOS for audio with default max bitrate");
+
+        // Let max bitrate be 240kbit/s.
+        const WebRtc_Word32 token_rate = 30000;
+        const WebRtc_Word32 bucket_size = 2000;
+        const WebRtc_Word32 peek_bandwith = 30000;
+        if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                             peek_bandwith, min_policed_size, max_sdu_size,
+                             _remoteRTPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "QOS failed on the RTP socket");
+            _lastError = kQosError;
+            return -1;
+        }
+    }else
+    {
+        // No max bitrate set. Video.
+        WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                     "Enable QOS for video with default max bitrate");
+
+        // Let max bitrate be 10mbit/s.
+        const WebRtc_Word32 token_rate = 128000*10;
+        const WebRtc_Word32 bucket_size = 32000;
+        const WebRtc_Word32 peek_bandwith = 256000;
+        if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                             peek_bandwith, min_policed_size, max_sdu_size,
+                             _remoteRTPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "QOS failed on the RTP socket");
+            _lastError = kQosError;
+            return -1;
+        }
+    }
+
+    // Enable QoS for RTCP sockets.
+    // TODO (hellner): shouldn't RTCP be based on 5% of the maximum bandwidth?
+    if(audio)
+    {
+        const WebRtc_Word32 token_rate = 200;
+        const WebRtc_Word32 bucket_size = 200;
+        const WebRtc_Word32 peek_bandwith = 400;
+        if (!rtcpSock->SetQos(serviceType, token_rate, bucket_size,
+                              peek_bandwith, min_policed_size, max_sdu_size,
+                              _remoteRTCPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "QOS failed on the RTCP socket");
+            _lastError = kQosError;
+        }
+    }else
+    {
+        const WebRtc_Word32 token_rate = 5000;
+        const WebRtc_Word32 bucket_size = 100;
+        const WebRtc_Word32 peek_bandwith = 10000;
+        if (!rtcpSock->SetQos(serviceType, token_rate, bucket_size,
+                              peek_bandwith, min_policed_size, max_sdu_size,
+                            _remoteRTCPAddr, _overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "QOS failed on the RTCP socket");
+            _lastError = kQosError;
+        }
+    }
+    _qos = true;
+    _serviceType = serviceType;
+    _maxBitrate = maxBitrate;
+    _overrideDSCP = overrideDSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::DisableQoS()
+{
+    if(_qos == false)
+    {
+        return 0;
+    }
+    CriticalSectionScoped cs(_crit);
+
+    UdpSocketWrapper* rtpSock = (_ptrSendRtpSocket ?
+                                 _ptrSendRtpSocket : _ptrRtpSocket);
+    if (!rtpSock || !rtpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTP socket");
+        return -1;
+    }
+    UdpSocketWrapper* rtcpSock = (_ptrSendRtcpSocket ?
+                                  _ptrSendRtcpSocket : _ptrRtcpSocket);
+    if (!rtcpSock || !rtcpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTCP socket");
+        return -1;
+    }
+
+    const WebRtc_Word32 service_type = 0;   // = SERVICETYPE_NOTRAFFIC
+    const WebRtc_Word32 not_specified = -1;
+    if (!rtpSock->SetQos(service_type, not_specified, not_specified,
+                         not_specified, not_specified, not_specified,
+                         _remoteRTPAddr, _overrideDSCP))
+    {
+        _lastError = kQosError;
+        return -1;
+    }
+    if (!rtcpSock->SetQos(service_type, not_specified, not_specified,
+                         not_specified, not_specified, not_specified,
+                         _remoteRTCPAddr,_overrideDSCP))
+    {
+        _lastError = kQosError;
+    }
+    _qos = false;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::QoS(bool& QoS, WebRtc_Word32& serviceType,
+                                    WebRtc_Word32& overrideDSCP) const
+{
+    CriticalSectionScoped cs(_crit);
+    QoS = _qos;
+    serviceType = _serviceType;
+    overrideDSCP = _overrideDSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetToS(WebRtc_Word32 DSCP, bool useSetSockOpt)
+{
+    if (_qos)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "QoS already enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (DSCP < 0 || DSCP > 63)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Invalid DSCP");
+        _lastError = kTosInvalid;
+        return -1;
+    }
+    if(_tos)
+    {
+        if(useSetSockOpt != _useSetSockOpt)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "Can't switch SetSockOpt method without disabling TOS first");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+    }
+    CriticalSectionScoped cs(_crit);
+    UdpSocketWrapper* rtpSock = NULL;
+    UdpSocketWrapper* rtcpSock = NULL;
+    if(_ptrSendRtpSocket)
+    {
+        rtpSock = _ptrSendRtpSocket;
+    }else
+    {
+        rtpSock = _ptrRtpSocket;
+    }
+    if (rtpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        rtcpSock = _ptrSendRtcpSocket;
+    }else
+    {
+        rtcpSock = _ptrRtcpSocket;
+    }
+    if (rtcpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtcpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+
+    if (useSetSockOpt)
+    {
+#ifdef _WIN32
+        OSVERSIONINFO OsVersion;
+        OsVersion.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+        GetVersionEx(&OsVersion);
+        // Disable QoS before setting ToS on Windows XP. This is done by closing
+        // and re-opening the sockets.
+        // TODO (hellner): why not just fail here and force the user to
+        //                 re-initialize sockets? Doing this may trick the user
+        //                 into thinking that the sockets are in a state which
+        //                 they aren't.
+        if (OsVersion.dwMajorVersion == 5 &&
+            OsVersion.dwMinorVersion == 1)
+        {
+            if(!_useSetSockOpt)
+            {
+                if(_ptrSendRtpSocket)
+                {
+                    CloseSendSockets();
+                    _ptrSendRtpSocket =
+                        UdpSocketWrapper::CreateSocket(_id, _mgr, NULL,
+                                                       NULL, IpV6Enabled(),
+                                                       true);
+                    _ptrSendRtcpSocket =
+                        UdpSocketWrapper::CreateSocket(_id, _mgr, NULL,
+                                                       NULL, IpV6Enabled(),
+                                                       true);
+                    rtpSock=_ptrSendRtpSocket;
+                    rtcpSock=_ptrSendRtcpSocket;
+                    ErrorCode retVal = BindRTPSendSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    retVal = BindRTCPSendSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                }
+                else
+                {
+                    bool receiving=_receiving;
+                    WebRtc_UWord32 noOfReceiveBuffers = 0;
+                    if(receiving)
+                    {
+                        noOfReceiveBuffers=_ptrRtpSocket->ReceiveBuffers();
+                        if(StopReceiving()!=0)
+                        {
+                            return -1;
+                        }
+                    }
+                    CloseReceiveSockets();
+                    _ptrRtpSocket = UdpSocketWrapper::CreateSocket(
+                        _id, _mgr, this, IncomingRTPCallback,
+                        IpV6Enabled(), true);
+                    _ptrRtcpSocket = UdpSocketWrapper::CreateSocket(
+                        _id, _mgr, this, IncomingRTCPCallback,
+                        IpV6Enabled(),true);
+                    rtpSock=_ptrRtpSocket;
+                    rtcpSock=_ptrRtcpSocket;
+                    ErrorCode retVal = BindLocalRTPSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    retVal = BindLocalRTCPSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    if(receiving)
+                    {
+                        if(StartReceiving(noOfReceiveBuffers) !=
+                           kNoSocketError)
+                        {
+                            return -1;
+                        }
+                    }
+                }
+            }
+        }
+#endif // #ifdef _WIN32
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "Setting TOS using SetSockopt");
+        WebRtc_Word32 TOSShifted = DSCP << 2;
+        if (!rtpSock->SetSockopt(IPPROTO_IP, IP_TOS,
+                                 (WebRtc_Word8*) &TOSShifted, 4))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not SetSockopt tos value on RTP socket");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+        if (!rtcpSock->SetSockopt(IPPROTO_IP, IP_TOS,
+                                  (WebRtc_Word8*) &TOSShifted, 4))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not sSetSockopt tos value on RTCP socket");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "Setting TOS NOT using SetSockopt");
+        if (rtpSock->SetTOS(DSCP) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not set tos value on RTP socket");
+            _lastError = kTosError;
+            return -1;
+        }
+        if (rtcpSock->SetTOS(DSCP) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not set tos value on RTCP socket");
+            _lastError = kTosError;
+            return -1;
+        }
+    }
+    _useSetSockOpt = useSetSockOpt;
+    _tos = DSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::ToS(WebRtc_Word32& DSCP,
+                                    bool& useSetSockOpt) const
+{
+    CriticalSectionScoped cs(_crit);
+    DSCP = _tos;
+    useSetSockOpt = _useSetSockOpt;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetPCP(WebRtc_Word32 PCP)
+{
+
+    if (_qos)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "QoS already enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if ((PCP < 0) || (PCP > 7))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Invalid PCP");
+        _lastError = kPcpError;
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_crit);
+    UdpSocketWrapper* rtpSock = NULL;
+    UdpSocketWrapper* rtcpSock = NULL;
+    if(_ptrSendRtpSocket)
+    {
+        rtpSock = _ptrSendRtpSocket;
+    }else
+    {
+        rtpSock = _ptrRtpSocket;
+    }
+    if (rtpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        rtcpSock = _ptrSendRtcpSocket;
+    }else
+    {
+        rtcpSock = _ptrRtcpSocket;
+    }
+    if (rtcpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtcpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+
+#if defined(_WIN32)
+    if (rtpSock->SetPCP(PCP) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not set PCP value on RTP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+    if (rtcpSock->SetPCP(PCP) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not set PCP value on RTCP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+
+#elif defined(WEBRTC_LINUX)
+    if (!rtpSock->SetSockopt(SOL_SOCKET, SO_PRIORITY, (WebRtc_Word8*) &PCP,
+                             sizeof(PCP)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not SetSockopt PCP value on RTP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+    if (!rtcpSock->SetSockopt(SOL_SOCKET, SO_PRIORITY, (WebRtc_Word8*) &PCP,
+                              sizeof(PCP)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not SetSockopt PCP value on RTCP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+#else
+    // Not supported on other platforms (WEBRTC_MAC)
+    _lastError = kPcpError;
+    return -1;
+#endif
+    _pcp = PCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::PCP(WebRtc_Word32& PCP) const
+{
+    CriticalSectionScoped cs(_crit);
+    PCP = _pcp;
+    return 0;
+}
+
+bool UdpTransportImpl::SetSockOptUsed()
+{
+    return _useSetSockOpt;
+}
+
+WebRtc_Word32 UdpTransportImpl::EnableIpV6() {
+
+  CriticalSectionScoped cs(_crit);
+  const bool initialized = (_ptrSendRtpSocket || _ptrRtpSocket);
+
+  if (_ipV6Enabled) {
+    return 0;
+  }
+  if (initialized) {
+    _lastError = kIpVersion6Error;
+    return -1;
+  }
+  _ipV6Enabled = true;
+  return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::FilterIP(
+    char filterIPAddress[kIpAddressVersion6Length]) const
+{
+
+    if(filterIPAddress == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "FilterIP: Invalid argument");
+        return -1;
+    }
+    if(_filterIPAddress._sockaddr_storage.sin_family == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "No Filter configured");
+        return -1;
+    }
+    CriticalSectionScoped cs(_critFilter);
+    WebRtc_UWord32 ipSize = kIpAddressVersion6Length;
+    WebRtc_UWord16 sourcePort;
+    return IPAddress(_filterIPAddress, filterIPAddress, ipSize, sourcePort);
+}
+
+WebRtc_Word32 UdpTransportImpl::SetFilterIP(
+    const char filterIPAddress[kIpAddressVersion6Length])
+{
+    if(filterIPAddress == NULL)
+    {
+        memset(&_filterIPAddress, 0, sizeof(_filterIPAddress));
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id, "Filter IP reset");
+        return 0;
+    }
+    CriticalSectionScoped cs(_critFilter);
+    if (_ipV6Enabled)
+    {
+        _filterIPAddress._sockaddr_storage.sin_family = AF_INET6;
+
+        if (InetPresentationToNumeric(
+                AF_INET6,
+                filterIPAddress,
+                &_filterIPAddress._sockaddr_in6.sin6_addr) < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Failed to set\
+ filter IP for IPv6");
+            _lastError = FILTER_ERROR;
+            return -1;
+        }
+    }
+    else
+    {
+        _filterIPAddress._sockaddr_storage.sin_family = AF_INET;
+
+        if(InetPresentationToNumeric(
+               AF_INET,
+               filterIPAddress,
+               &_filterIPAddress._sockaddr_in.sin_addr) < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to set filter IP for IPv4");
+            _lastError = FILTER_ERROR;
+            return -1;
+        }
+    }
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id, "Filter IP set");
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetFilterPorts(WebRtc_UWord16 rtpFilterPort,
+                                               WebRtc_UWord16 rtcpFilterPort)
+{
+    CriticalSectionScoped cs(_critFilter);
+    _rtpFilterPort = rtpFilterPort;
+    _rtcpFilterPort = rtcpFilterPort;
+    return 0;
+}
+
+bool UdpTransportImpl::SendSocketsInitialized() const
+{
+    CriticalSectionScoped cs(_crit);
+    if(_ptrSendRtpSocket)
+    {
+        return true;
+    }
+    if(_destPort !=0)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::ReceiveSocketsInitialized() const
+{
+    if(_ptrRtpSocket)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::SourcePortsInitialized() const
+{
+    if(_ptrSendRtpSocket)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::IpV6Enabled() const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceTransport, _id, "%s", __FUNCTION__);
+    return _ipV6Enabled;
+}
+
+void UdpTransportImpl::BuildRemoteRTPAddr()
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTPAddr.sin_length = 0;
+        _remoteRTPAddr.sin_family = PF_INET6;
+#else
+        _remoteRTPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+
+        _remoteRTPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _remoteRTPAddr._sockaddr_in6.sin6_scope_id=0;
+        _remoteRTPAddr._sockaddr_in6.sin6_port = Htons(_destPort);
+        InetPresentationToNumeric(AF_INET6,_destIP,
+                                  &_remoteRTPAddr._sockaddr_in6.sin6_addr);
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTPAddr.sin_length = 0;
+        _remoteRTPAddr.sin_family = PF_INET;
+#else
+        _remoteRTPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _remoteRTPAddr._sockaddr_in.sin_port = Htons(_destPort);
+        _remoteRTPAddr._sockaddr_in.sin_addr = InetAddrIPV4(_destIP);
+    }
+}
+
+void UdpTransportImpl::BuildRemoteRTCPAddr()
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTCPAddr.sin_length = 0;
+        _remoteRTCPAddr.sin_family = PF_INET6;
+#else
+        _remoteRTCPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+
+        _remoteRTCPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _remoteRTCPAddr._sockaddr_in6.sin6_scope_id=0;
+        _remoteRTCPAddr._sockaddr_in6.sin6_port = Htons(_destPortRTCP);
+        InetPresentationToNumeric(AF_INET6,_destIP,
+                                  &_remoteRTCPAddr._sockaddr_in6.sin6_addr);
+
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTCPAddr.sin_length = 0;
+        _remoteRTCPAddr.sin_family = PF_INET;
+#else
+        _remoteRTCPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _remoteRTCPAddr._sockaddr_in.sin_port = Htons(_destPortRTCP);
+        _remoteRTCPAddr._sockaddr_in.sin_addr= InetAddrIPV4(_destIP);
+    }
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindRTPSendSocket()
+{
+    if(!_ptrSendRtpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(!_ptrSendRtpSocket->ValidHandle())
+    {
+        return kIpAddressInvalid;
+    }
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTPAddr.sin_length = 0;
+        _localRTPAddr.sin_family = PF_INET6;
+#else
+        _localRTPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        _localRTPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _localRTPAddr._sockaddr_in6.sin6_scope_id=0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[0] =
+            0; // = INADDR_ANY
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[1] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[2] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[3] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_port = Htons(_srcPort);
+        if(_ptrSendRtpSocket->Bind(_localRTPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPort);
+            return kFailedToBindPort;
+        }
+    } else {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTPAddr.sin_length = 0;
+        _localRTPAddr.sin_family = PF_INET;
+#else
+        _localRTPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _localRTPAddr._sockaddr_in.sin_addr = 0;
+        _localRTPAddr._sockaddr_in.sin_port = Htons(_srcPort);
+        if(_ptrSendRtpSocket->Bind(_localRTPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPort);
+            return kFailedToBindPort;
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindRTCPSendSocket()
+{
+    if(!_ptrSendRtcpSocket)
+    {
+        return kSocketInvalid;
+    }
+
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTCPAddr.sin_length = 0;
+        _localRTCPAddr.sin_family = PF_INET6;
+#else
+        _localRTCPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        _localRTCPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _localRTCPAddr._sockaddr_in6.sin6_scope_id=0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[0] =
+            0; // = INADDR_ANY
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[1] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[2] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[3] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_port = Htons(_srcPortRTCP);
+        if(_ptrSendRtcpSocket->Bind(_localRTCPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPortRTCP);
+            return kFailedToBindPort;
+        }
+    } else {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTCPAddr.sin_length = 0;
+        _localRTCPAddr.sin_family = PF_INET;
+#else
+        _localRTCPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _localRTCPAddr._sockaddr_in.sin_addr= 0;
+        _localRTCPAddr._sockaddr_in.sin_port = Htons(_srcPortRTCP);
+        if(_ptrSendRtcpSocket->Bind(_localRTCPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindLocalRTPSocket()
+{
+    if(!_ptrRtpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(!IpV6Enabled())
+    {
+        SocketAddress recAddr;
+        memset(&recAddr, 0, sizeof(SocketAddress));
+        recAddr._sockaddr_storage.sin_family = AF_INET;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        recAddr.sin_length = 0;
+        recAddr.sin_family = PF_INET;
+#else
+        recAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        recAddr._sockaddr_in.sin_addr = InetAddrIPV4(_localIP);
+        recAddr._sockaddr_in.sin_port = Htons(_localPort);
+
+        if (!_ptrRtpSocket->Bind(recAddr))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPort);
+            return kFailedToBindPort;
+        }
+    }
+    else
+    {
+        SocketAddress stLclName;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        stLclName.sin_lenght = 0;
+        stLclName.sin_family = PF_INET6;
+#else
+        stLclName._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        InetPresentationToNumeric(AF_INET6,_localIP,
+                                  &stLclName._sockaddr_in6.sin6_addr);
+        stLclName._sockaddr_in6.sin6_port = Htons(_localPort);
+        stLclName._sockaddr_in6.sin6_flowinfo = 0;
+        stLclName._sockaddr_in6.sin6_scope_id = 0;
+
+        if (!_ptrRtpSocket->Bind(stLclName))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPort);
+            return kFailedToBindPort;
+        }
+    }
+
+    if(_localMulticastIP[0] != 0)
+    {
+        // Join the multicast group from which to receive datagrams.
+        struct ip_mreq mreq;
+        mreq.imr_multiaddr.s_addr = InetAddrIPV4(_localMulticastIP);
+        mreq.imr_interface.s_addr = INADDR_ANY;
+
+        if (!_ptrRtpSocket->SetSockopt(IPPROTO_IP,IP_ADD_MEMBERSHIP,
+                                       (WebRtc_Word8*)&mreq,sizeof (mreq)))
+        {
+           WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "setsockopt() for multicast failed, not closing socket");
+        }else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceTransport, _id,
+                         "multicast group successfully joined");
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindLocalRTCPSocket()
+{
+    if(!_ptrRtcpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(! IpV6Enabled())
+    {
+        SocketAddress recAddr;
+        memset(&recAddr, 0, sizeof(SocketAddress));
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        recAddr.sin_length = 0;
+        recAddr.sin_family = AF_INET;
+#else
+        recAddr._sockaddr_storage.sin_family = AF_INET;
+#endif
+        recAddr._sockaddr_in.sin_addr = InetAddrIPV4(_localIP);
+        recAddr._sockaddr_in.sin_port = Htons(_localPortRTCP);
+
+        if (!_ptrRtcpSocket->Bind(recAddr))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    else
+    {
+        SocketAddress stLclName;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        stLclName.sin_length = 0;
+        stLclName.sin_family = PF_INET6;
+#else
+        stLclName._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        stLclName._sockaddr_in6.sin6_flowinfo = 0;
+        stLclName._sockaddr_in6.sin6_scope_id = 0;
+        stLclName._sockaddr_in6.sin6_port = Htons(_localPortRTCP);
+
+        InetPresentationToNumeric(AF_INET6,_localIP,
+                                  &stLclName._sockaddr_in6.sin6_addr);
+        if (!_ptrRtcpSocket->Bind(stLclName))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    if(_localMulticastIP[0] != 0)
+    {
+        // Join the multicast group from which to receive datagrams.
+        struct ip_mreq mreq;
+        mreq.imr_multiaddr.s_addr = InetAddrIPV4(_localMulticastIP);
+        mreq.imr_interface.s_addr = INADDR_ANY;
+
+        if (!_ptrRtcpSocket->SetSockopt(IPPROTO_IP,IP_ADD_MEMBERSHIP,
+                                        (WebRtc_Word8*)&mreq,sizeof (mreq)))
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "setsockopt() for multicast failed, not closing socket");
+        }else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceTransport, _id,
+                         "multicast group successfully joined");
+        }
+    }
+    return kNoSocketError;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeSourcePorts(WebRtc_UWord16 rtpPort,
+                                                      WebRtc_UWord16 rtcpPort)
+{
+
+    if(rtpPort == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeSourcePorts port 0 not allowed");
+        _lastError = kPortInvalid;
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_crit);
+
+    CloseSendSockets();
+
+    if(_mgr == NULL)
+    {
+        return -1;
+    }
+
+    _srcPort = rtpPort;
+    if(rtcpPort == 0)
+    {
+        _srcPortRTCP = rtpPort+1;
+    } else
+    {
+        _srcPortRTCP = rtcpPort;
+    }
+    _useSetSockOpt =false;
+    _tos=0;
+    _pcp=0;
+
+    _ptrSendRtpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, NULL, NULL,
+                                                       IpV6Enabled());
+    _ptrSendRtcpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, NULL, NULL,
+                                                        IpV6Enabled());
+
+    ErrorCode retVal = BindRTPSendSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        return -1;
+    }
+    retVal = BindRTCPSendSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SourcePorts(WebRtc_UWord16& rtpPort,
+                                            WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+
+    rtpPort  = (_srcPort != 0) ? _srcPort : _localPort;
+    rtcpPort = (_srcPortRTCP != 0) ? _srcPortRTCP : _localPortRTCP;
+    return 0;
+}
+
+
+#ifdef _WIN32
+WebRtc_Word32 UdpTransportImpl::StartReceiving(
+    WebRtc_UWord32 numberOfSocketBuffers)
+#else
+WebRtc_Word32 UdpTransportImpl::StartReceiving(
+    WebRtc_UWord32 /*numberOfSocketBuffers*/)
+#endif
+{
+    CriticalSectionScoped cs(_crit);
+    if(_receiving)
+    {
+        return 0;
+    }
+    if(_ptrRtpSocket)
+    {
+#ifdef _WIN32
+        if(!_ptrRtpSocket->StartReceiving(numberOfSocketBuffers))
+#else
+        if(!_ptrRtpSocket->StartReceiving())
+#endif
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to start receive on RTP socket");
+            _lastError = kStartReceiveError;
+            return -1;
+        }
+    }
+    if(_ptrRtcpSocket)
+    {
+        if(!_ptrRtcpSocket->StartReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to start receive on RTCP socket");
+            _lastError = kStartReceiveError;
+            return -1;
+        }
+    }
+    if( _ptrRtpSocket == NULL &&
+        _ptrRtcpSocket == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Failed to StartReceiving, no socket initialized");
+        _lastError = kStartReceiveError;
+        return -1;
+    }
+    _receiving = true;
+    return 0;
+}
+
+bool UdpTransportImpl::Receiving() const
+{
+   return _receiving;
+}
+
+WebRtc_Word32 UdpTransportImpl::StopReceiving()
+{
+
+    CriticalSectionScoped cs(_crit);
+
+    _receiving = false;
+
+    if (_ptrRtpSocket)
+    {
+        if (!_ptrRtpSocket->StopReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to stop receiving on RTP socket");
+            _lastError = kStopReceiveError;
+            return -1;
+        }
+    }
+    if (_ptrRtcpSocket)
+    {
+        if (!_ptrRtcpSocket->StopReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to stop receiving on RTCP socket");
+            _lastError = kStopReceiveError;
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeSendSockets(
+    const char* ipaddr,
+    const WebRtc_UWord16 rtpPort,
+    const WebRtc_UWord16 rtcpPort)
+{
+    {
+        CriticalSectionScoped cs(_crit);
+        _destPort = rtpPort;
+        if(rtcpPort == 0)
+        {
+            _destPortRTCP = _destPort+1;
+        } else
+        {
+            _destPortRTCP = rtcpPort;
+        }
+
+        if(ipaddr == NULL)
+        {
+            if (!IsIpAddressValid(_destIP, IpV6Enabled()))
+            {
+                _destPort = 0;
+                _destPortRTCP = 0;
+                _lastError = kIpAddressInvalid;
+                return -1;
+            }
+        } else
+        {
+            if (IsIpAddressValid(ipaddr, IpV6Enabled()))
+            {
+                strncpy(
+                    _destIP,
+                    ipaddr,
+                    IpV6Enabled() ? kIpAddressVersion6Length :
+                    kIpAddressVersion4Length);
+            } else {
+                _destPort = 0;
+                _destPortRTCP = 0;
+                _lastError = kIpAddressInvalid;
+                return -1;
+            }
+        }
+        BuildRemoteRTPAddr();
+        BuildRemoteRTCPAddr();
+    }
+
+    if (_ipV6Enabled)
+    {
+        if (_qos)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceTransport,
+                _id,
+                "QOS is enabled but will be ignored since IPv6 is enabled");
+        }
+    }else
+    {
+        // TODO (grunell): Multicast support is experimantal.
+
+        // Put the first digit of the remote address in val.
+        WebRtc_Word32 val = ntohl(_remoteRTPAddr._sockaddr_in.sin_addr)>> 24;
+
+        if((val > 223) && (val < 240))
+        {
+            // Multicast address.
+            CriticalSectionScoped cs(_crit);
+
+            UdpSocketWrapper* rtpSock = (_ptrSendRtpSocket ?
+                                         _ptrSendRtpSocket : _ptrRtpSocket);
+            if (!rtpSock || !rtpSock->ValidHandle())
+            {
+                _lastError = kSocketInvalid;
+                return -1;
+            }
+            UdpSocketWrapper* rtcpSock = (_ptrSendRtcpSocket ?
+                                          _ptrSendRtcpSocket : _ptrRtcpSocket);
+            if (!rtcpSock || !rtcpSock->ValidHandle())
+            {
+                _lastError = kSocketInvalid;
+                return -1;
+            }
+
+            // Set Time To Live to same region
+            WebRtc_Word32 iOptVal = 64;
+            if (!rtpSock->SetSockopt(IPPROTO_IP, IP_MULTICAST_TTL,
+                                     (WebRtc_Word8*)&iOptVal,
+                                     sizeof (WebRtc_Word32)))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "setsockopt for multicast error on RTP socket");
+                _ptrRtpSocket->CloseBlocking();
+                _ptrRtpSocket = NULL;
+                _lastError = kMulticastAddressInvalid;
+                return -1;
+            }
+            if (!rtcpSock->SetSockopt(IPPROTO_IP, IP_MULTICAST_TTL,
+                                      (WebRtc_Word8*)&iOptVal,
+                                      sizeof (WebRtc_Word32)))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "setsockopt for multicast error on RTCP socket");
+                _ptrRtpSocket->CloseBlocking();
+                _ptrRtpSocket = NULL;
+                _lastError = kMulticastAddressInvalid;
+                return -1;
+            }
+        }
+    }
+    return 0;
+}
+
+void UdpTransportImpl::BuildSockaddrIn(WebRtc_UWord16 portnr,
+                                       const char* ip,
+                                       SocketAddress& remoteAddr) const
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        remoteAddr.sin_length = 0;
+        remoteAddr.sin_family = PF_INET6;
+#else
+        remoteAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        remoteAddr._sockaddr_in6.sin6_port = Htons(portnr);
+        InetPresentationToNumeric(AF_INET6, ip,
+                                  &remoteAddr._sockaddr_in6.sin6_addr);
+        remoteAddr._sockaddr_in6.sin6_flowinfo=0;
+        remoteAddr._sockaddr_in6.sin6_scope_id=0;
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        remoteAddr.sin_length = 0;
+        remoteAddr.sin_family = PF_INET;
+#else
+        remoteAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        remoteAddr._sockaddr_in.sin_port = Htons(portnr);
+        remoteAddr._sockaddr_in.sin_addr= InetAddrIPV4(
+            const_cast<char*>(ip));
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRaw(const WebRtc_Word8 *data,
+                                        WebRtc_UWord32 length,
+                                        WebRtc_Word32 isRTCP,
+                                        WebRtc_UWord16 portnr,
+                                        const char* ip)
+{
+    CriticalSectionScoped cs(_crit);
+    if(isRTCP)
+    {
+        UdpSocketWrapper* rtcpSock = NULL;
+        if(_ptrSendRtcpSocket)
+        {
+            rtcpSock = _ptrSendRtcpSocket;
+        } else if(_ptrRtcpSocket)
+        {
+            rtcpSock = _ptrRtcpSocket;
+        } else
+        {
+            return -1;
+        }
+        if(portnr == 0 && ip == NULL)
+        {
+            return rtcpSock->SendTo(data,length,_remoteRTCPAddr);
+
+        } else if(portnr != 0 && ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, ip, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        } else if(ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(_destPortRTCP, ip, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        } else
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, _destIP, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        }
+    } else {
+        UdpSocketWrapper* rtpSock = NULL;
+        if(_ptrSendRtpSocket)
+        {
+            rtpSock = _ptrSendRtpSocket;
+
+        } else if(_ptrRtpSocket)
+        {
+            rtpSock = _ptrRtpSocket;
+        } else
+        {
+            return -1;
+        }
+        if(portnr == 0 && ip == NULL)
+        {
+            return rtpSock->SendTo(data,length,_remoteRTPAddr);
+
+        } else if(portnr != 0 && ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, ip, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        } else if(ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(_destPort, ip, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        } else
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, _destIP, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        }
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTPPacketTo(const WebRtc_Word8* data,
+                                                WebRtc_UWord32 length,
+                                                const SocketAddress& to)
+{
+    CriticalSectionScoped cs(_crit);
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTCPPacketTo(const WebRtc_Word8* data,
+                                                 WebRtc_UWord32 length,
+                                                 const SocketAddress& to)
+{
+
+    CriticalSectionScoped cs(_crit);
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTPPacketTo(const WebRtc_Word8* data,
+                                                WebRtc_UWord32 length,
+                                                const WebRtc_UWord16 rtpPort)
+{
+
+    CriticalSectionScoped cs(_crit);
+    // Use the current SocketAdress but update it with rtpPort.
+    SocketAddress to;
+    memcpy(&to, &_remoteRTPAddr, sizeof(SocketAddress));
+
+    if(_ipV6Enabled)
+    {
+        to._sockaddr_in6.sin6_port = Htons(rtpPort);
+    } else
+    {
+        to._sockaddr_in.sin_port = Htons(rtpPort);
+    }
+
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTCPPacketTo(const WebRtc_Word8* data,
+                                                 WebRtc_UWord32 length,
+                                                 const WebRtc_UWord16 rtcpPort)
+{
+    CriticalSectionScoped cs(_crit);
+
+    // Use the current SocketAdress but update it with rtcpPort.
+    SocketAddress to;
+    memcpy(&to, &_remoteRTCPAddr, sizeof(SocketAddress));
+
+    if(_ipV6Enabled)
+    {
+        to._sockaddr_in6.sin6_port = Htons(rtcpPort);
+    } else
+    {
+        to._sockaddr_in.sin_port = Htons(rtcpPort);
+    }
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+int UdpTransportImpl::SendPacket(int /*channel*/, const void* data, int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceTransport, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped cs(_crit);
+
+    if(_destIP[0] == 0)
+    {
+        return -1;
+    }
+    if(_destPort == 0)
+    {
+        return -1;
+    }
+
+    // Create socket if it hasn't been set up already.
+    // TODO (hellner): why not fail here instead. Sockets not being initialized
+    //                 indicates that there is a problem somewhere.
+    if( _ptrSendRtpSocket == NULL &&
+        _ptrRtpSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "Creating RTP socket since no receive or source socket is\
+ configured");
+
+        _ptrRtpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, this,
+                                                       IncomingRTPCallback,
+                                                       IpV6Enabled());
+
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+        _localPort = _destPort;
+
+        ErrorCode retVal = BindLocalRTPSocket();
+        if(retVal != kNoSocketError)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "SendPacket() failed to bind RTP socket");
+            _lastError = retVal;
+            CloseReceiveSockets();
+            return -1;
+        }
+    }
+
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                         _remoteRTPAddr);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                     _remoteRTPAddr);
+    }
+    return -1;
+}
+
+int UdpTransportImpl::SendRTCPPacket(int /*channel*/, const void* data,
+                                     int length)
+{
+
+    CriticalSectionScoped cs(_crit);
+    if(_destIP[0] == 0)
+    {
+        return -1;
+    }
+    if(_destPortRTCP == 0)
+    {
+        return -1;
+    }
+
+    // Create socket if it hasn't been set up already.
+    // TODO (hellner): why not fail here instead. Sockets not being initialized
+    //                 indicates that there is a problem somewhere.
+    if( _ptrSendRtcpSocket == NULL &&
+        _ptrRtcpSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "Creating RTCP socket since no receive or source socket is\
+ configured");
+
+        _ptrRtcpSocket = UdpSocketWrapper::CreateSocket(_id, _mgr, this,
+                                                        IncomingRTCPCallback,
+                                                        IpV6Enabled());
+
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+        _localPortRTCP = _destPortRTCP;
+
+        ErrorCode retVal = BindLocalRTCPSocket();
+        if(retVal != kNoSocketError)
+        {
+            _lastError = retVal;
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "SendRTCPPacket() failed to bind RTCP socket");
+            CloseReceiveSockets();
+            return -1;
+        }
+    }
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                          _remoteRTCPAddr);
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                      _remoteRTCPAddr);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetSendIP(const char* ipaddr)
+{
+    if(!IsIpAddressValid(ipaddr,IpV6Enabled()))
+    {
+        return kIpAddressInvalid;
+    }
+    CriticalSectionScoped cs(_crit);
+    strncpy(_destIP, ipaddr,kIpAddressVersion6Length);
+    BuildRemoteRTPAddr();
+    BuildRemoteRTCPAddr();
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetSendPorts(WebRtc_UWord16 rtpPort,
+                                             WebRtc_UWord16 rtcpPort)
+{
+    CriticalSectionScoped cs(_crit);
+    _destPort = rtpPort;
+    if(rtcpPort == 0)
+    {
+        _destPortRTCP = _destPort+1;
+    } else
+    {
+        _destPortRTCP = rtcpPort;
+    }
+    BuildRemoteRTPAddr();
+    BuildRemoteRTCPAddr();
+    return 0;
+}
+
+void UdpTransportImpl::IncomingRTPCallback(CallbackObj obj,
+                                           const WebRtc_Word8* rtpPacket,
+                                           WebRtc_Word32 rtpPacketLength,
+                                           const SocketAddress* from)
+{
+    if (rtpPacket && rtpPacketLength > 0)
+    {
+        UdpTransportImpl* socketTransport = (UdpTransportImpl*) obj;
+        socketTransport->IncomingRTPFunction(rtpPacket, rtpPacketLength, from);
+    }
+}
+
+void UdpTransportImpl::IncomingRTCPCallback(CallbackObj obj,
+                                            const WebRtc_Word8* rtcpPacket,
+                                            WebRtc_Word32 rtcpPacketLength,
+                                            const SocketAddress* from)
+{
+    if (rtcpPacket && rtcpPacketLength > 0)
+    {
+        UdpTransportImpl* socketTransport = (UdpTransportImpl*) obj;
+        socketTransport->IncomingRTCPFunction(rtcpPacket, rtcpPacketLength,
+                                              from);
+    }
+}
+
+void UdpTransportImpl::IncomingRTPFunction(const WebRtc_Word8* rtpPacket,
+                                           WebRtc_Word32 rtpPacketLength,
+                                           const SocketAddress* fromSocket)
+{
+    char ipAddress[kIpAddressVersion6Length];
+    WebRtc_UWord32 ipAddressLength = kIpAddressVersion6Length;
+    WebRtc_UWord16 portNr = 0;
+
+    {
+        CriticalSectionScoped cs(_critFilter);
+        if (FilterIPAddress(fromSocket) == false)
+        {
+            // Packet should be filtered out. Drop it.
+            WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                         "Incoming RTP packet blocked by IP filter");
+            return;
+        }
+
+        if (IPAddressCached(*fromSocket, ipAddress, ipAddressLength, portNr) <
+            0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::IncomingRTPFunction - Cannot get sender\
+ information");
+        }else
+        {
+            strncpy(_fromIP, ipAddress, kIpAddressVersion6Length);
+        }
+
+        // Filter based on port.
+        if (_rtpFilterPort != 0 &&
+            _rtpFilterPort != portNr)
+        {
+            // Drop packet.
+            memset(_fromIP, 0, sizeof(_fromIP));
+            WEBRTC_TRACE(
+                kTraceStream,
+                kTraceTransport,
+                _id,
+                "Incoming RTP packet blocked by filter incoming from port:%d\
+ allowed port:%d",
+                portNr,
+                _rtpFilterPort);
+            return;
+        }
+        _fromPort = portNr;
+    }
+
+    CriticalSectionScoped cs(_critPacketCallback);
+    if (_packetCallback)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+            "Incoming RTP packet from ip:%s port:%d", ipAddress, portNr);
+        _packetCallback->IncomingRTPPacket(rtpPacket, rtpPacketLength,
+                                           ipAddress, portNr);
+    }
+}
+
+void UdpTransportImpl::IncomingRTCPFunction(const WebRtc_Word8* rtcpPacket,
+                                            WebRtc_Word32 rtcpPacketLength,
+                                            const SocketAddress* fromSocket)
+{
+    char ipAddress[kIpAddressVersion6Length];
+    WebRtc_UWord32 ipAddressLength = kIpAddressVersion6Length;
+    WebRtc_UWord16 portNr = 0;
+
+    {
+        CriticalSectionScoped cs(_critFilter);
+        if (FilterIPAddress(fromSocket) == false)
+        {
+            // Packet should be filtered out. Drop it.
+            WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                         "Incoming RTCP packet blocked by IP filter");
+            return;
+        }
+        if (IPAddress(*fromSocket, ipAddress, ipAddressLength, portNr) < 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::IncomingRTCPFunction - Cannot get sender\
+ information");
+        }else {
+            strncpy(_fromIP, ipAddress, kIpAddressVersion6Length);
+        }
+
+        // Filter based on port.
+        if (_rtcpFilterPort != 0 &&
+            _rtcpFilterPort != portNr)
+        {
+            // Drop packet.
+            WEBRTC_TRACE(
+                kTraceStream,
+                kTraceTransport,
+                _id,
+                "Incoming RTCP packet blocked by filter incoming from port:%d\
+ allowed port:%d",
+                portNr,
+                _rtpFilterPort);
+            return;
+        }
+        _fromPortRTCP = portNr;
+    }
+
+    CriticalSectionScoped cs(_critPacketCallback);
+    if (_packetCallback)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                     "Incoming RTCP packet from ip:%s port:%d", ipAddress,
+                     portNr);
+        _packetCallback->IncomingRTCPPacket(rtcpPacket, rtcpPacketLength,
+                                            ipAddress, portNr);
+    }
+}
+
+bool UdpTransportImpl::FilterIPAddress(const SocketAddress* fromAddress)
+{
+    if(fromAddress->_sockaddr_storage.sin_family == AF_INET)
+    {
+        if (_filterIPAddress._sockaddr_storage.sin_family == AF_INET)
+        {
+            // IP is stored in sin_addr.
+            if (_filterIPAddress._sockaddr_in.sin_addr != 0 &&
+                (_filterIPAddress._sockaddr_in.sin_addr !=
+                 fromAddress->_sockaddr_in.sin_addr))
+            {
+                return false;
+            }
+        }
+    }
+    else if(fromAddress->_sockaddr_storage.sin_family == AF_INET6)
+    {
+        if (_filterIPAddress._sockaddr_storage.sin_family == AF_INET6)
+        {
+            // IP is stored in sin_6addr.
+            for (WebRtc_Word32 i = 0; i < 4; i++)
+            {
+                if (_filterIPAddress._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i] != 0 &&
+                    _filterIPAddress._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i] != fromAddress->_sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i])
+                {
+                    return false;
+                }
+            }
+        }
+    }
+    else
+    {
+      WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::FilterIPAddress() unknown address family");
+        return false;
+    }
+    return true;
+}
+
+void UdpTransportImpl::CloseReceiveSockets()
+{
+    if(_ptrRtpSocket)
+    {
+        _ptrRtpSocket->CloseBlocking();
+        _ptrRtpSocket = NULL;
+    }
+    if(_ptrRtcpSocket)
+    {
+        _ptrRtcpSocket->CloseBlocking();
+        _ptrRtcpSocket = NULL;
+    }
+    _receiving = false;
+}
+
+void UdpTransportImpl::CloseSendSockets()
+{
+    if(_ptrSendRtpSocket)
+    {
+        _ptrSendRtpSocket->CloseBlocking();
+        _ptrSendRtpSocket = 0;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        _ptrSendRtcpSocket->CloseBlocking();
+        _ptrSendRtcpSocket = 0;
+    }
+}
+
+WebRtc_UWord16 UdpTransport::Htons(const WebRtc_UWord16 port)
+{
+    return htons(port);
+}
+
+WebRtc_UWord32 UdpTransport::Htonl(const WebRtc_UWord32 a)
+{
+    return htonl(a);
+}
+
+WebRtc_UWord32 UdpTransport::InetAddrIPV4(const char* ip)
+{
+    return ::inet_addr(ip);
+}
+
+WebRtc_Word32 UdpTransport::InetPresentationToNumeric(WebRtc_Word32 af,
+                                                      const char* src,
+                                                      void* dst)
+{
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    const WebRtc_Word32 result = inet_pton(af, src, dst);
+    return result > 0 ? 0 : -1;
+
+#elif defined(_WIN32)
+    SocketAddress temp;
+    int length=sizeof(SocketAddress);
+
+    if(af == AF_INET)
+    {
+        WebRtc_Word32 result = WSAStringToAddressA(
+            (const LPSTR)src,
+            af,
+            0,
+            reinterpret_cast<struct sockaddr*>(&temp),
+            &length);
+        if(result != 0)
+        {
+            return -1;
+        }
+        memcpy(dst,&(temp._sockaddr_in.sin_addr),
+               sizeof(temp._sockaddr_in.sin_addr));
+        return 0;
+    }
+    else if(af == AF_INET6)
+    {
+        WebRtc_Word32 result = WSAStringToAddressA(
+            (const LPSTR)src,
+            af,
+            0,
+            reinterpret_cast<struct sockaddr*>(&temp),
+            &length);
+        if(result !=0)
+        {
+            return -1;
+        }
+        memcpy(dst,&(temp._sockaddr_in6.sin6_addr),
+               sizeof(temp._sockaddr_in6.sin6_addr));
+        return 0;
+
+    }else
+    {
+        return -1;
+    }
+#else
+    return -1;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::LocalHostAddressIPV6(char n_localIP[16])
+{
+
+#if defined(_WIN32)
+    struct addrinfo *result = NULL;
+    struct addrinfo *ptr = NULL;
+    struct addrinfo hints;
+
+    ZeroMemory(&hints, sizeof(hints));
+    hints.ai_family = AF_INET6;
+
+    char szHostName[256] = "";
+    if(::gethostname(szHostName, sizeof(szHostName) - 1))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1, "gethostname failed");
+        return -1;
+    }
+
+    DWORD dwRetval = getaddrinfo(szHostName, NULL, &hints, &result);
+    if ( dwRetval != 0 )
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                     "getaddrinfo failed, error:%d", dwRetval);
+        return -1;
+    }
+    for(ptr=result; ptr != NULL ;ptr=ptr->ai_next)
+    {
+        switch (ptr->ai_family)
+        {
+            case AF_INET6:
+                {
+                    for(int i = 0; i< 16; i++)
+                    {
+                        n_localIP[i] = (*(SocketAddress*)ptr->ai_addr).
+                            _sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u8[i];
+                    }
+                    bool islocalIP = true;
+
+                    for(int n = 0; n< 15; n++)
+                    {
+                        if(n_localIP[n] != 0)
+                        {
+                            islocalIP = false;
+                            break;
+                        }
+                    }
+
+                    if(islocalIP && n_localIP[15] != 1)
+                    {
+                        islocalIP = false;
+                    }
+
+                    if(islocalIP && ptr->ai_next)
+                    {
+                        continue;
+                    }
+                    if(n_localIP[0] == 0xfe && 
+                       n_localIP[1] == 0x80 && ptr->ai_next)
+                    {
+                        continue;
+                    }
+                    freeaddrinfo(result);
+                }
+                return 0;
+            default:
+                break;
+        };
+    }
+    freeaddrinfo(result);
+    WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                 "getaddrinfo failed to find address");
+    return -1;
+
+#elif defined(WEBRTC_MAC)
+    struct ifaddrs* ptrIfAddrs = NULL;
+    struct ifaddrs* ptrIfAddrsStart = NULL;
+
+    getifaddrs(&ptrIfAddrsStart);
+    ptrIfAddrs = ptrIfAddrsStart;
+    while(ptrIfAddrs)
+    {
+        if(ptrIfAddrs->ifa_addr->sa_family == AF_INET6)
+        {
+            const struct sockaddr_in6* sock_in6 = 
+                reinterpret_cast<struct sockaddr_in6*>(ptrIfAddrs->ifa_addr);
+            const struct in6_addr* sin6_addr = &sock_in6->sin6_addr;
+
+            if (IN6_IS_ADDR_LOOPBACK(sin6_addr) || 
+                IN6_IS_ADDR_LINKLOCAL(sin6_addr)) {
+                ptrIfAddrs = ptrIfAddrs->ifa_next;
+                continue;
+            }
+            memcpy(n_localIP, sin6_addr->s6_addr, sizeof(sin6_addr->s6_addr));
+            freeifaddrs(ptrIfAddrsStart);
+            return 0;
+        }
+        ptrIfAddrs = ptrIfAddrs->ifa_next;
+    }
+    freeifaddrs(ptrIfAddrsStart);
+    return -1;
+#elif defined(WEBRTC_ANDROID)
+    return -1;
+#else // WEBRTC_LINUX
+    struct
+    {
+        struct nlmsghdr n;
+        struct ifaddrmsg r;
+    } req;
+
+    struct rtattr* rta = NULL;
+    int status;
+    char buf[16384]; // = 16 * 1024 (16 kB)
+    struct nlmsghdr* nlmp;
+    struct ifaddrmsg* rtmp;
+    struct rtattr* rtatp;
+    int rtattrlen;
+    struct in6_addr* in6p;
+
+    int fd = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_ROUTE);
+    if (fd == -1)
+    {
+        return -1;
+    }
+
+    // RTM_GETADDR is used to fetch the ip address from the kernel interface
+    // table. Populate the msg structure (req) the size of the message buffer
+    // is specified to netlinkmessage header, and flags values are set as
+    // NLM_F_ROOT | NLM_F_REQUEST.
+    // The request flag must be set for all messages requesting the data from
+    // kernel. The root flag is used to notify the kernel to return the full
+    // tabel. Another flag (not used) is NLM_F_MATCH. This is used to get only
+    // specified entries in the table. At the time of writing this program this
+    // flag is not implemented in kernel
+
+    memset(&req, 0, sizeof(req));
+    req.n.nlmsg_len = NLMSG_LENGTH(sizeof(struct ifaddrmsg));
+    req.n.nlmsg_flags = NLM_F_REQUEST | NLM_F_ROOT;
+    req.n.nlmsg_type = RTM_GETADDR;
+    req.r.ifa_family = AF_INET6;
+
+    // Fill up all the attributes for the rtnetlink header.
+    // The lenght is very important. 16 signifies the ipv6 address.
+    rta = (struct rtattr*)(((char*)&req) + NLMSG_ALIGN(req.n.nlmsg_len));
+    rta->rta_len = RTA_LENGTH(16);
+
+    status = send(fd, &req, req.n.nlmsg_len, 0);
+    if (status < 0)
+    {
+        close(fd);
+        return -1;
+    }
+    status = recv(fd, buf, sizeof(buf), 0);
+    if (status < 0)
+    {
+        close(fd);
+        return -1;
+    }
+    if(status == 0)
+    {
+        close(fd);
+        return -1;
+    }
+    close(fd);
+
+    // The message is stored in buff. Parse the message to get the requested
+    // data.
+    {
+        nlmp = (struct nlmsghdr*)buf;
+        int len = nlmp->nlmsg_len;
+        int req_len = len - sizeof(*nlmp);
+
+        if (req_len < 0 || len > status)
+        {
+            return -1;
+        }
+        if (!NLMSG_OK_NO_WARNING(nlmp, status))
+        {
+            return -1;
+        }
+        rtmp = (struct ifaddrmsg*)NLMSG_DATA(nlmp);
+        rtatp = (struct rtattr*)IFA_RTA(rtmp);
+
+        rtattrlen = IFA_PAYLOAD(nlmp);
+
+        for (; RTA_OK(rtatp, rtattrlen); rtatp = RTA_NEXT(rtatp, rtattrlen))
+        {
+
+            // Here we hit the fist chunk of the message. Time to validate the
+            // type. For more info on the different types see
+            // "man(7) rtnetlink" The table below is taken from man pages.
+            // Attributes
+            // rta_type        value type             description
+            // -------------------------------------------------------------
+            // IFA_UNSPEC      -                      unspecified.
+            // IFA_ADDRESS     raw protocol address   interface address
+            // IFA_LOCAL       raw protocol address   local address
+            // IFA_LABEL       asciiz string          name of the interface
+            // IFA_BROADCAST   raw protocol address   broadcast address.
+            // IFA_ANYCAST     raw protocol address   anycast address
+            // IFA_CACHEINFO   struct ifa_cacheinfo   Address information.
+
+            if(rtatp->rta_type == IFA_ADDRESS)
+            {
+                bool islocalIP = true;
+                in6p = (struct in6_addr*)RTA_DATA(rtatp);
+                for(int n = 0; n< 15; n++)
+                {
+                    if(in6p->s6_addr[n] != 0)
+                    {
+                        islocalIP = false;
+                        break;
+                    }
+                }
+                if(islocalIP && in6p->s6_addr[15] != 1)
+                {
+                    islocalIP = false;
+                }
+                if(!islocalIP)
+                {
+                    for(int i = 0; i< 16; i++)
+                    {
+                        n_localIP[i] = in6p->s6_addr[i];
+                    }
+                    if(n_localIP[0] == static_cast<char> (0xfe)
+                       && n_localIP[1] == static_cast<char>(0x80) )
+                    {
+                        // Auto configured IP.
+                        continue;
+                    }
+                    break;
+                }
+            }
+        }
+    }
+    return 0;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::LocalHostAddress(WebRtc_UWord32& localIP)
+{
+ #if defined(_WIN32)
+    hostent* localHost;
+    localHost = gethostbyname( "" );
+    if(localHost)
+    {
+        if(localHost->h_addrtype != AF_INET)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                -1,
+                "LocalHostAddress can only get local IP for IP Version 4");
+            return -1;
+        }
+        localIP= Htonl(
+            (*(struct in_addr *)localHost->h_addr_list[0]).S_un.S_addr);
+        return 0;
+    }
+    else
+    {
+        WebRtc_Word32 error = WSAGetLastError();
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                     "gethostbyname failed, error:%d", error);
+        return -1;
+    }
+#elif (defined(WEBRTC_MAC))
+    char localname[255];
+    if (gethostname(localname, 255) != -1)
+    {
+        hostent* localHost;
+        localHost = gethostbyname(localname);
+        if(localHost)
+        {
+            if(localHost->h_addrtype != AF_INET)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    -1,
+                    "LocalHostAddress can only get local IP for IP Version 4");
+                return -1;
+            }
+            localIP = Htonl((*(struct in_addr*)*localHost->h_addr_list).s_addr);
+            return 0;
+        }
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1, "gethostname failed");
+    return -1;
+#else // WEBRTC_LINUX
+    int sockfd, size  = 1;
+    struct ifreq* ifr;
+    struct ifconf ifc;
+
+    if (0 > (sockfd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP)))
+    {
+      return -1;
+    }
+    ifc.ifc_len = IFRSIZE;
+    ifc.ifc_req = NULL;
+    do
+    {
+        ++size;
+        // Buffer size needed is unknown. Try increasing it until no overflow
+        // occurs.
+        if (NULL == (ifc.ifc_req = (ifreq*)realloc(ifc.ifc_req, IFRSIZE))) {
+          fprintf(stderr, "Out of memory.\n");
+          exit(EXIT_FAILURE);
+        }
+        ifc.ifc_len = IFRSIZE;
+        if (ioctl(sockfd, SIOCGIFCONF, &ifc))
+        {
+            close(sockfd);
+            return -1;
+        }
+    } while  (IFRSIZE <= ifc.ifc_len);
+
+    ifr = ifc.ifc_req;
+    for (;(char *) ifr < (char *) ifc.ifc_req + ifc.ifc_len; ++ifr)
+    {
+        if (ifr->ifr_addr.sa_data == (ifr+1)->ifr_addr.sa_data)
+        {
+          continue;  // duplicate, skip it
+        }
+        if (ioctl(sockfd, SIOCGIFFLAGS, ifr))
+        {
+          continue;  // failed to get flags, skip it
+        }
+        if(strncmp(ifr->ifr_name, "lo",3) == 0)
+        {
+            continue;
+        }else
+        {
+            struct sockaddr* saddr = &(ifr->ifr_addr);
+            SocketAddress* socket_addess = reinterpret_cast<SocketAddress*>(
+                saddr);
+            localIP = Htonl(socket_addess->_sockaddr_in.sin_addr);
+            close(sockfd);
+            return 0;
+        }
+    }
+    close(sockfd);
+    return -1;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::IPAddress(const SocketAddress& address,
+                                      char* ip,
+                                      WebRtc_UWord32& ipSize,
+                                      WebRtc_UWord16& sourcePort)
+{
+ #if defined(_WIN32)
+    DWORD dwIPSize = ipSize;
+    WebRtc_Word32 returnvalue = WSAAddressToStringA((LPSOCKADDR)(&address),
+                                         sizeof(SocketAddress),
+                                         NULL,
+                                         ip,
+                                         &dwIPSize);
+    if(returnvalue == -1)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 source_port = 0;
+    if(address._sockaddr_storage.sin_family == AF_INET)
+    {
+        // Parse IP assuming format "a.b.c.d:port".
+        WebRtc_Word8* ipEnd = strchr(ip,':');
+        if(ipEnd != NULL)
+        {
+            *ipEnd = '\0';
+        }
+        ipSize = (WebRtc_Word32)strlen(ip);
+        if(ipSize == 0)
+        {
+            return -1;
+        }
+        source_port = address._sockaddr_in.sin_port;
+    }
+    else
+    {
+        // Parse IP assuming format "[address]:port".
+        WebRtc_Word8* ipEnd = strchr(ip,']');
+        if(ipEnd != NULL)
+        {
+          // Calculate length
+            WebRtc_Word32 adrSize = WebRtc_Word32(ipEnd - ip) - 1;
+            memmove(ip, &ip[1], adrSize);   // Remove '['
+            *(ipEnd - 1) = '\0';
+        }
+        ipSize = (WebRtc_Word32)strlen(ip);
+        if(ipSize == 0)
+        {
+            return -1;
+        }
+
+        source_port = address._sockaddr_in6.sin6_port;
+    }
+    // Convert port number to network byte order.
+    sourcePort = htons(source_port);
+    return 0;
+
+ #elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    WebRtc_Word32 ipFamily = address._sockaddr_storage.sin_family;
+    const void* ptrNumericIP = NULL;
+
+    if(ipFamily == AF_INET)
+    {
+        ptrNumericIP = &(address._sockaddr_in.sin_addr);
+    }
+    else if(ipFamily == AF_INET6)
+    {
+        ptrNumericIP = &(address._sockaddr_in6.sin6_addr);
+    }
+    else
+    {
+        return -1;
+    }
+    if(inet_ntop(ipFamily, ptrNumericIP, ip, ipSize) == NULL)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 source_port;
+    if(ipFamily == AF_INET)
+    {
+        source_port = address._sockaddr_in.sin_port;
+    } else
+    {
+        source_port = address._sockaddr_in6.sin6_port;
+    }
+    // Convert port number to network byte order.
+    sourcePort = htons(source_port);
+    return 0;
+ #else
+    return -1;
+ #endif
+}
+
+bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
+{
+    if(ipV6)
+    {
+        WebRtc_Word32 len = (WebRtc_Word32)strlen(ipadr);
+        if( len>39 || len == 0)
+        {
+            return false;
+        }
+
+        WebRtc_Word32 i;
+        WebRtc_Word32 colonPos[7] = {0,0,0,0,0,0,0};
+        WebRtc_Word32 lastColonPos = -2;
+        WebRtc_Word32 nColons = 0;
+        WebRtc_Word32 nDubbleColons = 0;
+        WebRtc_Word32 nDots = 0;
+        WebRtc_Word32 error = 0;
+        char c;
+        for(i = 0; i < len ; i++)
+        {
+            c=ipadr[i];
+            if(isxdigit(c))
+                ;
+            else if(c == ':')
+            {
+                if(nColons < 7)
+                    colonPos[nColons] = i;
+                if((i-lastColonPos)==1)
+                    nDubbleColons++;
+                lastColonPos=i;
+                if(nDots != 0)
+                {
+                    error = 1;
+                }
+                nColons++;
+            }
+            else if(c == '.')
+            {
+                nDots++;
+            }
+            else
+            {
+                error = 1;
+            }
+
+        }
+        if(error)
+        {
+            return false;
+        }
+        if(nDubbleColons > 1)
+        {
+            return false;
+        }
+        if(nColons > 7 || nColons < 2)
+        {
+            return false;
+        }
+        if(!(nDots == 3 || nDots == 0))
+        {
+            return false;
+        }
+        lastColonPos = -1;
+        WebRtc_Word32 charsBeforeColon = 0;
+        for(i = 0; i < nColons; i++)
+        {
+            charsBeforeColon=colonPos[i]-lastColonPos-1;
+            if(charsBeforeColon > 4)
+            {
+                return false;
+            }
+            lastColonPos=colonPos[i];
+        }
+        WebRtc_Word32 lengthAfterLastColon = len - lastColonPos - 1;
+        if(nDots == 0)
+        {
+            if(lengthAfterLastColon > 4)
+                return false;
+        }
+        if(nDots == 3 && lengthAfterLastColon > 0)
+        {
+            return IsIpAddressValid((ipadr+lastColonPos+1),false);
+        }
+
+    }
+    else
+    {
+        WebRtc_Word32 len = (WebRtc_Word32)strlen(ipadr);
+        if((len>15)||(len==0))
+        {
+            return false;
+        }
+
+        // IPv4 should be [0-255].[0-255].[0-255].[0-255]
+        WebRtc_Word32 i;
+        WebRtc_Word32 nDots = 0;
+        WebRtc_Word32 iDotPos[4] = {0,0,0,0};
+
+        for (i = 0; (i < len) && (nDots < 4); i++)
+        {
+            if (ipadr[i] == (char)'.')
+            {
+                // Store index of dots and count number of dots.
+                iDotPos[nDots++] = i;
+            }
+        }
+
+        bool allUnder256 = false;
+        // TODO (hellner): while loop seems to be abused here to get
+        // label like functionality. Fix later to avoid introducing bugs now.
+
+        // Check that all numbers are smaller than 256.
+        do
+        {
+            if (nDots != 3 )
+            {
+                break;
+            }
+
+            if (iDotPos[0] <= 3)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[0],iDotPos[0]);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                {
+                    break;
+                }
+            } else {
+                break;
+            }
+
+            if (iDotPos[1] - iDotPos[0] <= 4)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[0]+1], iDotPos[1] - iDotPos[0] - 1);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                    break;
+            } else {
+                break;
+            }
+
+            if (iDotPos[2] - iDotPos[1] <= 4)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[1]+1], iDotPos[1] - iDotPos[0] - 1);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                    break;
+
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[2]+1], len - iDotPos[2] -1);
+                num = atoi(nr);
+                if (num > 255)
+                    break;
+                else
+                    allUnder256 = true;
+            } else
+                break;
+        } while(false);
+
+        if (nDots != 3 || !allUnder256)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/udp_transport/source/udp_transport_impl.h b/trunk/src/modules/udp_transport/source/udp_transport_impl.h
new file mode 100644
index 0000000..c5a73f9
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_transport_impl.h
@@ -0,0 +1,246 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
+
+#include "udp_transport.h"
+#include "udp_socket_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class RWLockWrapper;
+class UdpSocketManager;
+
+class UdpTransportImpl : public UdpTransport
+{
+public:
+    // Factory method. Constructor disabled.
+    UdpTransportImpl(const WebRtc_Word32 id, WebRtc_UWord8& numSocketThreads);
+    virtual ~UdpTransportImpl();
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // UdpTransport functions
+    virtual WebRtc_Word32 InitializeSendSockets(
+        const char* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 InitializeReceiveSockets(
+        UdpTransportData* const packetCallback,
+        const WebRtc_UWord16 rtpPort,
+        const char* ipAddr = NULL,
+        const char* multicastIpAddr = NULL,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 InitializeSourcePorts(
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 SourcePorts(WebRtc_UWord16& rtpPort,
+                                      WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 ReceiveSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort,
+        char multicastIpAddr[kIpAddressVersion6Length]) const;
+    virtual WebRtc_Word32 SendSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 RemoteSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 SetQoS(const bool QoS,
+                                 const WebRtc_Word32 serviceType,
+                                 const WebRtc_UWord32 maxBitrate = 0,
+                                 const WebRtc_Word32 overrideDSCP = 0,
+                                 const bool audio = false);
+    virtual WebRtc_Word32 QoS(bool& QoS, WebRtc_Word32& serviceType,
+                              WebRtc_Word32& overrideDSCP) const;
+    virtual WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP,
+                                 const bool useSetSockOpt = false);
+    virtual WebRtc_Word32 ToS(WebRtc_Word32& DSCP,
+                              bool& useSetSockOpt) const;
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 PCP);
+    virtual WebRtc_Word32 PCP(WebRtc_Word32& PCP) const;
+    virtual WebRtc_Word32 EnableIpV6();
+    virtual bool IpV6Enabled() const;
+    virtual WebRtc_Word32 SetFilterIP(
+        const char filterIPAddress[kIpAddressVersion6Length]);
+    virtual WebRtc_Word32 FilterIP(
+        char filterIPAddress[kIpAddressVersion6Length]) const;
+    virtual WebRtc_Word32 SetFilterPorts(const WebRtc_UWord16 rtpFilterPort,
+                                         const WebRtc_UWord16 rtcpFilterPort);
+    virtual WebRtc_Word32 FilterPorts(WebRtc_UWord16& rtpFilterPort,
+                                      WebRtc_UWord16& rtcpFilterPort) const;
+    virtual WebRtc_Word32 StartReceiving(
+        const WebRtc_UWord32 numberOfSocketBuffers);
+    virtual WebRtc_Word32 StopReceiving();
+    virtual bool Receiving() const;
+    virtual bool SendSocketsInitialized() const;
+    virtual bool SourcePortsInitialized() const;
+    virtual bool ReceiveSocketsInitialized() const;
+    virtual WebRtc_Word32 SendRaw(const WebRtc_Word8* data,
+                                  WebRtc_UWord32 length, WebRtc_Word32 isRTCP,
+                                  WebRtc_UWord16 portnr = 0,
+                                  const char* ip = NULL);
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8 *data,
+                                          WebRtc_UWord32 length,
+                                          const SocketAddress& to);
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8 *data,
+                                           WebRtc_UWord32 length,
+                                           const SocketAddress& to);
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8 *data,
+                                          WebRtc_UWord32 length,
+                                          WebRtc_UWord16 rtpPort);
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8 *data,
+                                           WebRtc_UWord32 length,
+                                           WebRtc_UWord16 rtcpPort);
+    // Transport functions
+    virtual int SendPacket(int channel, const void* data, int length);
+    virtual int SendRTCPPacket(int channel, const void* data, int length);
+
+    // UdpTransport functions continue.
+    virtual WebRtc_Word32 SetSendIP(const char* ipaddr);
+    virtual WebRtc_Word32 SetSendPorts(const WebRtc_UWord16 rtpPort,
+                                       const WebRtc_UWord16 rtcpPort = 0);
+
+    virtual ErrorCode LastError() const;
+
+    virtual WebRtc_Word32 IPAddressCached(const SocketAddress& address,
+                                          char* ip,
+                                          WebRtc_UWord32& ipSize,
+                                          WebRtc_UWord16& sourcePort);
+
+    WebRtc_Word32 Id() const {return _id;}
+protected:
+    // IncomingSocketCallback signature functions for receiving callbacks from
+    // UdpSocketWrapper.
+    static void IncomingRTPCallback(CallbackObj obj,
+                                    const WebRtc_Word8* rtpPacket,
+                                    WebRtc_Word32 rtpPacketLength,
+                                    const SocketAddress* from);
+    static void IncomingRTCPCallback(CallbackObj obj,
+                                     const WebRtc_Word8* rtcpPacket,
+                                     WebRtc_Word32 rtcpPacketLength,
+                                     const SocketAddress* from);
+
+    void CloseSendSockets();
+    void CloseReceiveSockets();
+
+    // Update _remoteRTPAddr according to _destPort and _destIP
+    void BuildRemoteRTPAddr();
+    // Update _remoteRTCPAddr according to _destPortRTCP and _destIP
+    void BuildRemoteRTCPAddr();
+
+    void BuildSockaddrIn(WebRtc_UWord16 portnr, const char* ip,
+                         SocketAddress& remoteAddr) const;
+
+    ErrorCode BindLocalRTPSocket();
+    ErrorCode BindLocalRTCPSocket();
+
+    ErrorCode BindRTPSendSocket();
+    ErrorCode BindRTCPSendSocket();
+
+    void IncomingRTPFunction(const WebRtc_Word8* rtpPacket,
+                             WebRtc_Word32 rtpPacketLength,
+                             const SocketAddress* from);
+    void IncomingRTCPFunction(const WebRtc_Word8* rtcpPacket,
+                              WebRtc_Word32 rtcpPacketLength,
+                              const SocketAddress* from);
+
+    bool FilterIPAddress(const SocketAddress* fromAddress);
+
+    bool SetSockOptUsed();
+
+    WebRtc_Word32 EnableQoS(WebRtc_Word32 serviceType, bool audio,
+                            WebRtc_UWord32 maxBitrate,
+                            WebRtc_Word32 overrideDSCP);
+
+    WebRtc_Word32 DisableQoS();
+
+private:
+    void GetCachedAddress(char* ip, WebRtc_UWord32& ipSize,
+                          WebRtc_UWord16& sourcePort);
+
+    WebRtc_Word32 _id;
+    // Protects the sockets from being re-configured while receiving packets.
+    CriticalSectionWrapper* _crit;
+    CriticalSectionWrapper* _critFilter;
+    // _packetCallback's critical section.
+    CriticalSectionWrapper* _critPacketCallback;
+    UdpSocketManager* _mgr;
+    ErrorCode _lastError;
+
+    // Remote RTP and RTCP ports.
+    WebRtc_UWord16 _destPort;
+    WebRtc_UWord16 _destPortRTCP;
+
+    // Local RTP and RTCP ports.
+    WebRtc_UWord16 _localPort;
+    WebRtc_UWord16 _localPortRTCP;
+
+    // Local port number when the local port for receiving and local port number
+    // for sending are not the same.
+    WebRtc_UWord16 _srcPort;
+    WebRtc_UWord16 _srcPortRTCP;
+
+    // Remote port from which last received packet was sent.
+    WebRtc_UWord16 _fromPort;
+    WebRtc_UWord16 _fromPortRTCP;
+
+    char _fromIP[kIpAddressVersion6Length];
+    char _destIP[kIpAddressVersion6Length];
+    char _localIP[kIpAddressVersion6Length];
+    char _localMulticastIP[kIpAddressVersion6Length];
+
+    UdpSocketWrapper* _ptrRtpSocket;
+    UdpSocketWrapper* _ptrRtcpSocket;
+
+    // Local port when the local port for receiving and local port for sending
+    // are not the same.
+    UdpSocketWrapper* _ptrSendRtpSocket;
+    UdpSocketWrapper* _ptrSendRtcpSocket;
+
+    SocketAddress _remoteRTPAddr;
+    SocketAddress _remoteRTCPAddr;
+
+    SocketAddress _localRTPAddr;
+    SocketAddress _localRTCPAddr;
+
+    WebRtc_Word32 _tos;
+    bool _receiving;
+    bool _useSetSockOpt;
+    bool _qos;
+    WebRtc_Word32 _pcp;
+    bool _ipV6Enabled;
+    WebRtc_Word32 _serviceType;
+    WebRtc_Word32 _overrideDSCP;
+    WebRtc_UWord32 _maxBitrate;
+
+    // Cache used by GetCachedAddress(..).
+    RWLockWrapper* _cachLock;
+    SocketAddress _previousAddress;
+    char _previousIP[kIpAddressVersion6Length];
+    WebRtc_UWord32 _previousIPSize;
+    WebRtc_UWord16 _previousSourcePort;
+
+    SocketAddress _filterIPAddress;
+    WebRtc_UWord16 _rtpFilterPort;
+    WebRtc_UWord16 _rtcpFilterPort;
+
+    UdpTransportData* _packetCallback;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
diff --git a/trunk/src/modules/udp_transport/source/udp_transport_unittest.cc b/trunk/src/modules/udp_transport/source/udp_transport_unittest.cc
new file mode 100644
index 0000000..b894435
--- /dev/null
+++ b/trunk/src/modules/udp_transport/source/udp_transport_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "udp_transport.h"
+#include "gtest/gtest.h"
+
+TEST(UDPTransportTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/udp_transport/test/SocketManagerTest.cc b/trunk/src/modules/udp_transport/test/SocketManagerTest.cc
new file mode 100644
index 0000000..03119be
--- /dev/null
+++ b/trunk/src/modules/udp_transport/test/SocketManagerTest.cc
@@ -0,0 +1,449 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <iostream>
+
+#ifdef _WIN32
+#include <windows.h>
+#include <tchar.h>
+#else
+#include <stdio.h>
+#define Sleep(x) usleep(x*1000)
+#endif
+
+#include "udp_transport.h"
+#include "common_types.h"
+#include "trace.h"
+
+//#define QOS_TEST
+//#define QOS_TEST_WITH_OVERRIDE // require admin on Win7
+//#define TOS_TEST               // require admin on Win7
+//#define TOS_TEST_USING_SETSOCKOPT
+//#define PCP_TEST
+
+class UdpTransportDataA: public UdpTransportData
+{
+public:
+    UdpTransportDataA() :
+        _counterRTP(0),
+        _counterRTCP(0)
+    {
+    };
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incommingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort)
+    {
+        _counterRTP++;
+    };
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incommingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort)
+    {
+        _counterRTCP++;
+    };
+    WebRtc_UWord32    _counterRTP;
+    WebRtc_UWord32    _counterRTCP;
+};
+
+class UdpTransportDataB: public UdpTransportData
+{
+public:
+    UdpTransportDataB() :
+        _counterRTP(0),
+        _counterRTCP(0)
+    {
+    };
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incommingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort)
+    {
+        _counterRTP++;
+    };
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incommingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort)
+    {
+        _counterRTCP++;
+    };
+    WebRtc_UWord32    _counterRTP;
+    WebRtc_UWord32    _counterRTCP;
+};
+
+#ifdef _WIN32
+int _tmain(int argc, _TCHAR* argv[])
+#else
+int main(int argc, char* argv[])
+#endif
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile("testTrace.txt");
+    Trace::SetEncryptedTraceFile("testTraceDebug.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    printf("Start UdpTransport test\n");
+
+    WebRtc_UWord8 numberOfSocketThreads = 5;
+    UdpTransport* client1 = UdpTransport::Create(1,numberOfSocketThreads,NULL);
+    numberOfSocketThreads = 0;
+    UdpTransport* client2 = UdpTransport::Create(2,numberOfSocketThreads,NULL);
+    assert(5 == numberOfSocketThreads);
+
+    UdpTransportDataA* client1Callback = new UdpTransportDataA();
+    UdpTransportDataB* client2Callback = new UdpTransportDataB();
+
+    WebRtc_UWord32 localIP = 0;
+    char localIPAddr[64];
+    assert( 0 == client1->LocalHostAddress(localIP)); // network host order aka big-endian
+
+    sprintf(localIPAddr,"%lu.%lu.%lu.%lu",(localIP>>24)& 0x0ff,(localIP>>16)& 0x0ff ,(localIP>>8)& 0x0ff, localIP & 0x0ff);
+    printf("\tLocal IP:%s\n", localIPAddr);
+
+    char localIPV6[16];
+    char localIPAddrV6[128];
+    if( 0 == client1->LocalHostAddressIPV6(localIPV6))
+    {
+        sprintf(localIPAddrV6,"%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x", localIPV6[0],localIPV6[1],localIPV6[2],localIPV6[3],localIPV6[4],localIPV6[5],localIPV6[6],localIPV6[7], localIPV6[8],localIPV6[9],localIPV6[10],localIPV6[11],localIPV6[12],localIPV6[13],localIPV6[14],localIPV6[15]);
+        printf("\tLocal IPV6:%s\n", localIPAddrV6);
+    }
+
+    char test[9] = "testtest";
+    assert( 0 == client1->InitializeReceiveSockets(client1Callback,1234, localIPAddr));
+
+#if defined QOS_TEST_WITH_OVERRIDE || defined QOS_TEST || defined TOS_TEST || defined TOS_TEST_USING_SETSOCKOPT
+    assert( -1 == client1->SetQoS(true, 3, 1000));  //  should fail
+    assert( 0 == client1->InitializeSendSockets("192.168.200.1", 1236,1237));
+#else
+    assert( 0 == client1->InitializeSendSockets(localIPAddr, 1236,1237));
+#endif
+    assert( 0 == client1->StartReceiving(20));
+
+    assert( 0 == client2->InitializeReceiveSockets(client2Callback,1236));
+    assert( 0 == client2->InitializeSendSockets(localIPAddr, 1234,1235));
+    assert( 0 == client2->StartReceiving(20));
+
+    Sleep(10);
+
+#ifdef TOS_TEST
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetToS(2));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(3));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(0));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested TOS  \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef TOS_TEST_USING_SETSOCKOPT
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetToS(2, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(3, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(0, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested TOS using setsockopt \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef QOS_TEST
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetQoS(true, 2, 1000));  // SERVICETYPE_CONTROLLEDLOAD 2
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(true, 3, 1000));  // SERVICETYPE_GUARANTEED 3
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(false, 0));  //
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested QOS  \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef QOS_TEST_WITH_OVERRIDE
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetQoS(true, 2, 1000, 1));  // SERVICETYPE_CONTROLLEDLOAD 2
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(true, 2, 1000, 2));  // SERVICETYPE_GUARANTEED 3
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(false, 0));  //
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    printf("Tested QOS with override \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef PCP_TEST
+    // Note: We currently don't know how to test that the bits are actually set in the frame,
+    // this test simply tests the API and that we can send a packet after setting PCP.
+    assert( -1 == client1->SetPCP(-1)); // should fail
+    assert( -1 == client1->SetPCP(8)); // should fail
+    printf("Setting PCP to 7 returned %d \n", client1->SetPCP(7));
+    printf("(Failing is normal, requires the CAP_NET_ADMIN capability to succeed.) \n");
+    Sleep(10);
+    for (int pcp = 6; pcp >= 0; --pcp)
+    {
+        assert( 0 == client1->SetPCP(pcp));
+        Sleep(10);
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+    printf("Tested PCP \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+    Sleep(10);
+
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    // test start rec after a socket has revceived data
+    // result: packets received before first startReceive is saved by the OS
+/*
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+    Sleep(10);
+    assert( 0 == client2->StartReceiving(20));
+
+//    assert( 0 == client2->StopReceiving());
+
+    Sleep(10);
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+
+    assert( 0 == client2->StartReceiving(20));
+
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+*/
+    Sleep(10);
+
+    assert( 0 == client1Callback->_counterRTP);
+    assert( 1 == client2Callback->_counterRTP);
+    assert( 0 == client1Callback->_counterRTCP);
+    assert( 0 == client2Callback->_counterRTCP);
+
+    printf("Sent 1 packet on one socket \n");
+
+    char ipAddr[64];
+    char tempIpAddr[64];
+    char ipMulticastAddr[64];
+    WebRtc_UWord16 rtpPort = 0;
+    WebRtc_UWord16 rtcpPort = 0;
+    bool reusableSocket = true;
+    assert( 0 == client2->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1234);
+    assert( strncmp(ipAddr, localIPAddr, 16) == 0);
+
+    assert( 0 == client2->ReceiveSocketInformation(ipAddr, rtpPort, rtcpPort, ipMulticastAddr, reusableSocket));
+    assert( rtpPort == 1236);
+    assert( rtcpPort == 1237);
+    assert( strncmp(ipAddr, "0.0.0.0", 16) == 0);
+    assert( ipMulticastAddr[0] == 0);
+    assert( reusableSocket == false);
+
+    assert( 0 == client2->SendSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1234);
+    assert( rtcpPort == 1235);
+    assert( strncmp(ipAddr,localIPAddr, 16) == 0);
+
+    const int numberOfPackets = 1000;
+    int n = 0;
+    while(n < numberOfPackets)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+        assert( 9 == client2->SendPacket(-1, test, 9));
+        assert( 9 == client1->SendRTCPPacket(-1, test, 9));
+        assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+        n++;
+    }
+    int loops = 0;
+    for(; loops < 100 &&
+        !(client1Callback->_counterRTP == numberOfPackets &&
+        client1Callback->_counterRTCP == numberOfPackets &&
+        client2Callback->_counterRTP == numberOfPackets+1 &&
+        client2Callback->_counterRTCP == numberOfPackets);
+        loops++)
+    {
+        Sleep(10);
+    }
+    printf("\tSent %d packets on 4 sockets in:%d ms\n", numberOfPackets, loops*10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+1 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->StopReceiving());
+    assert( 0 == client2->StopReceiving());
+
+    printf("Tear down client 2\n");
+
+    // configure that fail
+    assert( -1 == client2->InitializeReceiveSockets(client2Callback,1234, localIPAddr)); // port in use
+    assert( !client2->ReceiveSocketsInitialized());
+    assert( 0 == client2->InitializeReceiveSockets(client2Callback,1236));
+    assert( 0 == client2->StartReceiving(20));
+
+    printf("Client 2 re-configured\n");
+
+    assert( client1->SendSocketsInitialized());
+    assert( client1->ReceiveSocketsInitialized());
+    assert( client2->SendSocketsInitialized());
+    assert( client2->ReceiveSocketsInitialized());
+
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    // this should not be received since we dont receive in client 1
+    assert( 9 == client2->SendPacket(-1, test, 9));
+
+    Sleep(10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+    printf("\tSent 1 packet on one socket \n");
+
+    printf("Start filter test\n");
+
+    assert( 0 == client1->StartReceiving(20));
+
+    assert( 0 == client1->SetFilterPorts(1234, 1235)); // should filter out what we send
+    assert( 0 == client1->SetFilterIP(localIPAddr));
+
+    assert( 0 == client1->FilterIP(tempIpAddr));
+    assert( strncmp(tempIpAddr, localIPAddr, 16) == 0);
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+
+    Sleep(10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterPorts(1236, 1237)); // should pass through
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+1 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+1 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterIP("127.0.0.2"));
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+1 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+1 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterIP(NULL));
+    assert( 0 == client1->SetFilterPorts(0, 0));
+
+    printf("Tested filter \n");
+
+    assert( 0 == client2->InitializeSourcePorts(1238, 1239));
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+2 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+2 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1238);
+    assert( rtcpPort == 1239);
+    assert( strncmp(ipAddr, localIPAddr, 16) == 0);
+
+    printf("Tested source port \n");
+
+    assert( 0 == client2->InitializeSourcePorts(1240 ));
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( 0 == client1->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1240);
+    assert( rtcpPort == 1241);
+
+    printf("Tested SetSendPorts source port \n");
+
+    UdpTransport::Destroy(client1);
+    UdpTransport::Destroy(client2);
+
+    printf("\n\nUdpTransport test done\n");
+
+    delete client1Callback;
+    delete client2Callback;
+
+    Sleep(5000);
+    Trace::ReturnTrace();
+};
diff --git a/trunk/src/modules/utility/OWNERS b/trunk/src/modules/utility/OWNERS
new file mode 100644
index 0000000..674c738
--- /dev/null
+++ b/trunk/src/modules/utility/OWNERS
@@ -0,0 +1,4 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+asapersson@webrtc.org
+perkj@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/utility/interface/file_player.h b/trunk/src/modules/utility/interface/file_player.h
new file mode 100644
index 0000000..29da8f1
--- /dev/null
+++ b/trunk/src/modules/utility/interface/file_player.h
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class FileCallback;
+
+class FilePlayer
+{
+public:
+    // The largest decoded frame size in samples (60ms with 32kHz sample rate).
+    enum {MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+    enum {MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+
+    // Note: will return NULL for video file formats (e.g. AVI) if the flag
+    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    static FilePlayer* CreateFilePlayer(const WebRtc_UWord32 instanceID,
+                                        const FileFormats fileFormat);
+
+    static void DestroyFilePlayer(FilePlayer* player);
+
+    virtual WebRtc_Word32 Get10msAudioFromFile(
+        WebRtc_Word16* decodedDataBuffer,
+        WebRtc_UWord32& decodedDataLengthInSamples,
+        const WebRtc_UWord32 frequencyInHz) = 0;
+
+    // Register callback for receiving file playing notifications.
+    virtual WebRtc_Word32 RegisterModuleFileCallback(
+        FileCallback* callback) = 0;
+
+    // API for playing audio from fileName to channel.
+    // Note: codecInst is used for pre-encoded files.
+    virtual WebRtc_Word32 StartPlayingFile(
+        const char* fileName,
+        bool loop,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL) = 0;
+
+    // Note: codecInst is used for pre-encoded files.
+    virtual WebRtc_Word32 StartPlayingFile(
+        InStream& sourceStream,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL) = 0;
+
+    virtual WebRtc_Word32 StopPlayingFile() = 0;
+
+    virtual bool IsPlayingFile() const = 0;
+
+    virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs) = 0;
+
+    // Set audioCodec to the currently used audio codec.
+    virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const = 0;
+
+    virtual WebRtc_Word32 Frequency() const = 0;
+
+    // Note: scaleFactor is in the range [0.0 - 2.0]
+    virtual WebRtc_Word32 SetAudioScaling(float scaleFactor) = 0;
+
+    // Return the time in ms until next video frame should be pulled (by
+    // calling GetVideoFromFile(..)).
+    // Note: this API reads one video frame from file. This means that it should
+    //       be called exactly once per GetVideoFromFile(..) API call.
+    virtual WebRtc_Word32 TimeUntilNextVideoFrame() { return -1;}
+
+    virtual WebRtc_Word32 StartPlayingVideoFile(
+        const char* /*fileName*/,
+        bool /*loop*/,
+        bool /*videoOnly*/) { return -1;}
+
+    virtual WebRtc_Word32 video_codec_info(VideoCodec& /*videoCodec*/) const
+    {return -1;}
+
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/)
+    { return -1;}
+
+    // Same as GetVideoFromFile(). videoFrame will have the resolution specified
+    // by the width outWidth and height outHeight in pixels.
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/,
+                                           const WebRtc_UWord32 /*outWidth*/,
+                                           const WebRtc_UWord32 /*outHeight*/)
+    {return -1;}
+protected:
+    virtual ~FilePlayer() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
diff --git a/trunk/src/modules/utility/interface/file_recorder.h b/trunk/src/modules/utility/interface/file_recorder.h
new file mode 100644
index 0000000..eb460ae
--- /dev/null
+++ b/trunk/src/modules/utility/interface/file_recorder.h
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "media_file_defines.h"
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class FileRecorder
+{
+public:
+
+    // Note: will return NULL for video file formats (e.g. AVI) if the flag
+    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    static FileRecorder* CreateFileRecorder(const WebRtc_UWord32 instanceID,
+                                            const FileFormats fileFormat);
+
+    static void DestroyFileRecorder(FileRecorder* recorder);
+
+    virtual WebRtc_Word32 RegisterModuleFileCallback(
+        FileCallback* callback) = 0;
+
+    virtual FileFormats RecordingFileFormat() const = 0;
+
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const char* fileName,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notification,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage) = 0;
+
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        OutStream& destStream,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notification,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage) = 0;
+
+    // Stop recording.
+    // Note: this API is for both audio and video.
+    virtual WebRtc_Word32 StopRecording() = 0;
+
+    // Return true if recording.
+    // Note: this API is for both audio and video.
+    virtual bool IsRecording() const = 0;
+
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const = 0;
+
+    // Write frame to file. Frame should contain 10ms of un-ecoded audio data.
+    virtual WebRtc_Word32 RecordAudioToFile(
+        const AudioFrame& frame,
+        const TickTime* playoutTS = NULL) = 0;
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). audioCodecInst specifies the encoding of the
+    // audio data. videoCodecInst specifies the encoding of the video data.
+    // Only video data will be recorded if videoOnly is true. amrFormat
+    // specifies the amr/amrwb storage format.
+    // Note: the file format is AVI.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false) = 0;
+
+    // Record the video frame in videoFrame to AVI file.
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame) = 0;
+
+protected:
+    virtual ~FileRecorder() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
diff --git a/trunk/src/modules/utility/interface/process_thread.h b/trunk/src/modules/utility/interface/process_thread.h
new file mode 100644
index 0000000..6c51404
--- /dev/null
+++ b/trunk/src/modules/utility/interface/process_thread.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class Module;
+
+class ProcessThread
+{
+public:
+    static ProcessThread* CreateProcessThread();
+    static void DestroyProcessThread(ProcessThread* module);
+
+    virtual WebRtc_Word32 Start() = 0;
+    virtual WebRtc_Word32 Stop() = 0;
+
+    virtual WebRtc_Word32 RegisterModule(const Module* module) = 0;
+    virtual WebRtc_Word32 DeRegisterModule(const Module* module) = 0;
+protected:
+    virtual ~ProcessThread();
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
diff --git a/trunk/src/modules/utility/interface/rtp_dump.h b/trunk/src/modules/utility/interface/rtp_dump.h
new file mode 100644
index 0000000..9291a1c
--- /dev/null
+++ b/trunk/src/modules/utility/interface/rtp_dump.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file implements a class that writes a stream of RTP and RTCP packets
+// to a file according to the format specified by rtpplay. See
+// http://www.cs.columbia.edu/irt/software/rtptools/.
+// Notes: supported platforms are Windows, Linux and Mac OSX
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
+
+#include "typedefs.h"
+#include "file_wrapper.h"
+
+namespace webrtc {
+class RtpDump
+{
+public:
+    // Factory method.
+    static RtpDump* CreateRtpDump();
+
+    // Delete function. Destructor disabled.
+    static void DestroyRtpDump(RtpDump* object);
+
+    // Open the file fileNameUTF8 for writing RTP/RTCP packets.
+    // Note: this API also adds the rtpplay header.
+    virtual WebRtc_Word32 Start(const char* fileNameUTF8) = 0;
+
+    // Close the existing file. No more packets will be recorded.
+    virtual WebRtc_Word32 Stop() = 0;
+
+    // Return true if a file is open for recording RTP/RTCP packets.
+    virtual bool IsActive() const = 0;
+
+    // Writes the RTP/RTCP packet in packet with length packetLength in bytes.
+    // Note: packet should contain the RTP/RTCP part of the packet. I.e. the
+    // first bytes of packet should be the RTP/RTCP header.
+    virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
+                                     WebRtc_UWord16 packetLength) = 0;
+
+protected:
+    virtual ~RtpDump();
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
diff --git a/trunk/src/modules/utility/source/Android.mk b/trunk/src/modules/utility/source/Android.mk
new file mode 100644
index 0000000..452a95b
--- /dev/null
+++ b/trunk/src/modules/utility/source/Android.mk
@@ -0,0 +1,55 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_utility
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := coder.cc \
+    file_player_impl.cc \
+    file_recorder_impl.cc \
+    process_thread_impl.cc \
+    rtp_dump_impl.cc \
+    frame_scaler.cc \
+    video_coder.cc \
+    video_frames_queue.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_MODULE_UTILITY_VIDEO'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../audio_coding/main/interface \
+    $(LOCAL_PATH)/../../media_file/interface \
+    $(LOCAL_PATH)/../../video_coding/main/interface \
+    $(LOCAL_PATH)/../../video_coding/codecs/interface \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../common_audio/resampler/include \
+    $(LOCAL_PATH)/../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
+
+
diff --git a/trunk/src/modules/utility/source/coder.cc b/trunk/src/modules/utility/source/coder.cc
new file mode 100644
index 0000000..b858da1
--- /dev/null
+++ b/trunk/src/modules/utility/source/coder.cc
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "coder.h"
+#include "common_types.h"
+#include "module_common_types.h"
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+AudioCoder::AudioCoder(WebRtc_UWord32 instanceID)
+    : _instanceID(instanceID),
+      _acm(AudioCodingModule::Create(instanceID)),
+      _receiveCodec(),
+      _encodeTimestamp(0),
+      _encodedData(NULL),
+      _encodedLengthInBytes(0),
+      _decodeTimestamp(0)
+{
+    _acm->InitializeSender();
+    _acm->InitializeReceiver();
+    _acm->RegisterTransportCallback(this);
+}
+
+AudioCoder::~AudioCoder()
+{
+    AudioCodingModule::Destroy(_acm);
+}
+
+WebRtc_Word32 AudioCoder::SetEncodeCodec(const CodecInst& codecInst,
+					 ACMAMRPackingFormat amrFormat)
+{
+    if(_acm->RegisterSendCodec((CodecInst&)codecInst) == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::SetDecodeCodec(const CodecInst& codecInst,
+					 ACMAMRPackingFormat amrFormat)
+{
+    if(_acm->RegisterReceiveCodec((CodecInst&)codecInst) == -1)
+    {
+        return -1;
+    }
+    memcpy(&_receiveCodec,&codecInst,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::Decode(AudioFrame& decodedAudio,
+				 WebRtc_UWord32 sampFreqHz,
+				 const WebRtc_Word8*  incomingPayload,
+				 WebRtc_Word32  payloadLength)
+{
+    if (payloadLength > 0)
+    {
+        const WebRtc_UWord8 payloadType = _receiveCodec.pltype;
+        _decodeTimestamp += _receiveCodec.pacsize;
+        if(_acm->IncomingPayload(incomingPayload,
+                                 payloadLength,
+                                 payloadType,
+                                 _decodeTimestamp) == -1)
+        {
+            return -1;
+        }
+    }
+    return _acm->PlayoutData10Ms((WebRtc_UWord16)sampFreqHz,
+				 (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::PlayoutData(AudioFrame& decodedAudio,
+				      WebRtc_UWord16& sampFreqHz)
+{
+    return _acm->PlayoutData10Ms(sampFreqHz, (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::Encode(const AudioFrame& audio,
+				 WebRtc_Word8* encodedData,
+				 WebRtc_UWord32& encodedLengthInBytes)
+{
+    // Fake a timestamp in case audio doesn't contain a correct timestamp.
+    // Make a local copy of the audio frame since audio is const
+    AudioFrame audioFrame = audio;
+    audioFrame._timeStamp = _encodeTimestamp;
+    _encodeTimestamp += audioFrame._payloadDataLengthInSamples;
+
+    // For any codec with a frame size that is longer than 10 ms the encoded
+    // length in bytes should be zero until a a full frame has been encoded.
+    _encodedLengthInBytes = 0;
+    if(_acm->Add10MsData((AudioFrame&)audioFrame) == -1)
+    {
+        return -1;
+    }
+    _encodedData = encodedData;
+    if(_acm->Process() == -1)
+    {
+        return -1;
+    }
+    encodedLengthInBytes = _encodedLengthInBytes;
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::SendData(
+    FrameType /* frameType */,
+    WebRtc_UWord8   /* payloadType */,
+    WebRtc_UWord32  /* timeStamp */,
+    const WebRtc_UWord8*  payloadData,
+    WebRtc_UWord16  payloadSize,
+    const RTPFragmentationHeader* /* fragmentation*/)
+{
+    memcpy(_encodedData,payloadData,sizeof(WebRtc_UWord8) * payloadSize);
+    _encodedLengthInBytes = payloadSize;
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/utility/source/coder.h b/trunk/src/modules/utility/source/coder.h
new file mode 100644
index 0000000..e96f455
--- /dev/null
+++ b/trunk/src/modules/utility/source/coder.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class AudioFrame;
+
+class AudioCoder : public AudioPacketizationCallback
+{
+public:
+    AudioCoder(WebRtc_UWord32 instanceID);
+    ~AudioCoder();
+
+    WebRtc_Word32 SetEncodeCodec(
+        const CodecInst& codecInst,
+	ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+    WebRtc_Word32 SetDecodeCodec(
+        const CodecInst& codecInst,
+	ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+    WebRtc_Word32 Decode(AudioFrame& decodedAudio, WebRtc_UWord32 sampFreqHz,
+			 const WebRtc_Word8* incomingPayload,
+			 WebRtc_Word32  payloadLength);
+
+    WebRtc_Word32 PlayoutData(AudioFrame& decodedAudio,
+			      WebRtc_UWord16& sampFreqHz);
+
+    WebRtc_Word32 Encode(const AudioFrame& audio,
+                         WebRtc_Word8*   encodedData,
+			 WebRtc_UWord32& encodedLengthInBytes);
+
+protected:
+    virtual WebRtc_Word32 SendData(FrameType frameType,
+				   WebRtc_UWord8 payloadType,
+				   WebRtc_UWord32 timeStamp,
+				   const WebRtc_UWord8* payloadData,
+				   WebRtc_UWord16 payloadSize,
+				   const RTPFragmentationHeader* fragmentation);
+
+private:
+    WebRtc_UWord32 _instanceID;
+
+    AudioCodingModule* _acm;
+
+    CodecInst _receiveCodec;
+
+    WebRtc_UWord32 _encodeTimestamp;
+    WebRtc_Word8*  _encodedData;
+    WebRtc_UWord32 _encodedLengthInBytes;
+
+    WebRtc_UWord32 _decodeTimestamp;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
diff --git a/trunk/src/modules/utility/source/file_player_impl.cc b/trunk/src/modules/utility/source/file_player_impl.cc
new file mode 100644
index 0000000..0e22872
--- /dev/null
+++ b/trunk/src/modules/utility/source/file_player_impl.cc
@@ -0,0 +1,723 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "file_player_impl.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "cpu_wrapper.h"
+    #include "frame_scaler.h"
+    #include "tick_util.h"
+    #include "video_coder.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FilePlayer* FilePlayer::CreateFilePlayer(WebRtc_UWord32 instanceID,
+                                         FileFormats fileFormat)
+{
+    switch(fileFormat)
+    {
+    case kFileFormatWavFile:
+    case kFileFormatCompressedFile:
+    case kFileFormatPreencodedFile:
+    case kFileFormatPcm16kHzFile:
+    case kFileFormatPcm8kHzFile:
+    case kFileFormatPcm32kHzFile:
+        // audio formats
+        return new FilePlayerImpl(instanceID, fileFormat);
+    case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        return new VideoFilePlayerImpl(instanceID, fileFormat);
+#else
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "Invalid file format: %d", kFileFormatAviFile);
+        assert(false);
+        return NULL;
+#endif
+    }
+    assert(false);
+    return NULL;
+}
+
+void FilePlayer::DestroyFilePlayer(FilePlayer* player)
+{
+    delete player;
+}
+
+FilePlayerImpl::FilePlayerImpl(const WebRtc_UWord32 instanceID,
+                               const FileFormats fileFormat)
+    : _instanceID(instanceID),
+      _fileFormat(fileFormat),
+      _fileModule(*MediaFile::CreateMediaFile(instanceID)),
+      _decodedLengthInMS(0),
+      _decodedAudioBuffer(),
+      _audioDecoder(instanceID),
+      _codec(),
+      _numberOf10MsPerFrame(0),
+      _numberOf10MsInDecoder(0),
+      _resampler(),
+      _scaling(1.0)
+{
+    _codec.plfreq = 0;
+}
+
+FilePlayerImpl::~FilePlayerImpl()
+{
+    MediaFile::DestroyMediaFile(&_fileModule);
+}
+
+WebRtc_Word32 FilePlayerImpl::Frequency() const
+{
+    if(_codec.plfreq == 0)
+    {
+        return -1;
+    }
+    // Make sure that sample rate is 8,16 or 32 kHz. E.g. WAVE files may have
+    // other sampling rates.
+    if(_codec.plfreq == 11000)
+    {
+        return 16000;
+    }
+    else if(_codec.plfreq == 22000)
+    {
+        return 32000;
+    }
+    else if(_codec.plfreq == 44000)
+    {
+        return 32000;
+    }
+    else if(_codec.plfreq == 48000)
+    {
+        return 32000;
+    }
+    else
+    {
+        return _codec.plfreq;
+    }
+}
+
+WebRtc_Word32 FilePlayerImpl::AudioCodec(CodecInst& audioCodec) const
+{
+    audioCodec = _codec;
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::Get10msAudioFromFile(
+    WebRtc_Word16* outBuffer,
+    WebRtc_UWord32& lengthInSamples,
+    WebRtc_UWord32 frequencyInHz)
+{
+    if(_codec.plfreq == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+           "FilePlayerImpl::Get10msAudioFromFile() playing not started!\
+ codecFreq = %d, wantedFreq = %d",
+           _codec.plfreq, frequencyInHz);
+        return -1;
+    }
+
+    AudioFrame unresampledAudioFrame;
+    if(STR_CASE_CMP(_codec.plname, "L16") == 0)
+    {
+        unresampledAudioFrame._frequencyInHz = _codec.plfreq;
+
+        // L16 is un-encoded data. Just pull 10 ms.
+        WebRtc_UWord32 lengthInBytes =
+            sizeof(unresampledAudioFrame._payloadData);
+        if (_fileModule.PlayoutAudioData(
+                (WebRtc_Word8*)unresampledAudioFrame._payloadData,
+                lengthInBytes) == -1)
+        {
+            // End of file reached.
+            return -1;
+        }
+        if(lengthInBytes == 0)
+        {
+            lengthInSamples = 0;
+            return 0;
+        }
+        // One sample is two bytes.
+        unresampledAudioFrame._payloadDataLengthInSamples =
+            (WebRtc_UWord16)lengthInBytes >> 1;
+
+    }else {
+        // Decode will generate 10 ms of audio data. PlayoutAudioData(..)
+        // expects a full frame. If the frame size is larger than 10 ms,
+        // PlayoutAudioData(..) data should be called proportionally less often.
+        WebRtc_Word16 encodedBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+        WebRtc_UWord32 encodedLengthInBytes = 0;
+        if(++_numberOf10MsInDecoder >= _numberOf10MsPerFrame)
+        {
+            _numberOf10MsInDecoder = 0;
+            WebRtc_UWord32 bytesFromFile = sizeof(encodedBuffer);
+            if (_fileModule.PlayoutAudioData((WebRtc_Word8*)encodedBuffer,
+                                             bytesFromFile) == -1)
+            {
+                // End of file reached.
+                return -1;
+            }
+            encodedLengthInBytes = bytesFromFile;
+        }
+        if(_audioDecoder.Decode(unresampledAudioFrame,frequencyInHz,
+                                (WebRtc_Word8*)encodedBuffer,
+                                encodedLengthInBytes) == -1)
+        {
+            return -1;
+        }
+    }
+
+    int outLen = 0;
+    if(_resampler.ResetIfNeeded(unresampledAudioFrame._frequencyInHz,
+                                frequencyInHz, kResamplerSynchronous))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+           "FilePlayerImpl::Get10msAudioFromFile() unexpected codec");
+
+        // New sampling frequency. Update state.
+        outLen = frequencyInHz / 100;
+        memset(outBuffer, 0, outLen * sizeof(WebRtc_Word16));
+        return 0;
+    }
+    _resampler.Push(unresampledAudioFrame._payloadData,
+                    unresampledAudioFrame._payloadDataLengthInSamples,
+                    outBuffer,
+                    MAX_AUDIO_BUFFER_IN_SAMPLES,
+                    outLen);
+
+    lengthInSamples = outLen;
+
+    if(_scaling != 1.0)
+    {
+        for (int i = 0;i < outLen; i++)
+        {
+            outBuffer[i] = (WebRtc_Word16)(outBuffer[i] * _scaling);
+        }
+    }
+    _decodedLengthInMS += 10;
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::RegisterModuleFileCallback(FileCallback* callback)
+{
+    return _fileModule.SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetAudioScaling(float scaleFactor)
+{
+    if((scaleFactor >= 0)&&(scaleFactor <= 2.0))
+    {
+        _scaling = scaleFactor;
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+              "FilePlayerImpl::SetAudioScaling() not allowed scale factor");
+    return -1;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(const char* fileName,
+                                               bool loop,
+                                               WebRtc_UWord32 startPosition,
+                                               float volumeScaling,
+                                               WebRtc_UWord32 notification,
+                                               WebRtc_UWord32 stopPosition,
+                                               const CodecInst* codecInst)
+{
+    if (_fileFormat == kFileFormatPcm16kHzFile ||
+        _fileFormat == kFileFormatPcm8kHzFile||
+        _fileFormat == kFileFormatPcm32kHzFile )
+    {
+        CodecInst codecInstL16;
+        strncpy(codecInstL16.plname,"L16",32);
+        codecInstL16.pltype   = 93;
+        codecInstL16.channels = 1;
+
+        if (_fileFormat == kFileFormatPcm8kHzFile)
+        {
+            codecInstL16.rate     = 128000;
+            codecInstL16.plfreq   = 8000;
+            codecInstL16.pacsize  = 80;
+
+        } else if(_fileFormat == kFileFormatPcm16kHzFile)
+        {
+            codecInstL16.rate     = 256000;
+            codecInstL16.plfreq   = 16000;
+            codecInstL16.pacsize  = 160;
+
+        }else if(_fileFormat == kFileFormatPcm32kHzFile)
+        {
+            codecInstL16.rate     = 512000;
+            codecInstL16.plfreq   = 32000;
+            codecInstL16.pacsize  = 160;
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+                       "FilePlayerImpl::StartPlayingFile() sample frequency\
+ specifed not supported for PCM format.");
+            return -1;
+        }
+
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, &codecInstL16,
+                                              startPosition,
+                                              stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+            return -1;
+        }
+        SetAudioScaling(volumeScaling);
+    }else if(_fileFormat == kFileFormatPreencodedFile)
+    {
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, codecInst) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingPreEncodedFile() failed to\
+ initialize pre-encoded file %s playout.",
+                fileName);
+            return -1;
+        }
+    } else
+    {
+        CodecInst* no_inst = NULL;
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, no_inst,
+                                              startPosition,
+                                              stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+            return -1;
+        }
+        SetAudioScaling(volumeScaling);
+    }
+    if (SetUpAudioDecoder() == -1)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
+                                               WebRtc_UWord32 startPosition,
+                                               float volumeScaling,
+                                               WebRtc_UWord32 notification,
+                                               WebRtc_UWord32 stopPosition,
+                                               const CodecInst* codecInst)
+{
+    if (_fileFormat == kFileFormatPcm16kHzFile ||
+        _fileFormat == kFileFormatPcm32kHzFile ||
+        _fileFormat == kFileFormatPcm8kHzFile)
+    {
+        CodecInst codecInstL16;
+        strncpy(codecInstL16.plname,"L16",32);
+        codecInstL16.pltype   = 93;
+        codecInstL16.channels = 1;
+
+        if (_fileFormat == kFileFormatPcm8kHzFile)
+        {
+            codecInstL16.rate     = 128000;
+            codecInstL16.plfreq   = 8000;
+            codecInstL16.pacsize  = 80;
+
+        }else if (_fileFormat == kFileFormatPcm16kHzFile)
+        {
+            codecInstL16.rate     = 256000;
+            codecInstL16.plfreq   = 16000;
+            codecInstL16.pacsize  = 160;
+
+        }else if (_fileFormat == kFileFormatPcm32kHzFile)
+        {
+            codecInstL16.rate     = 512000;
+            codecInstL16.plfreq   = 32000;
+            codecInstL16.pacsize  = 160;
+        }else
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() sample frequency specifed\
+ not supported for PCM format.");
+            return -1;
+        }
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, &codecInstL16,
+                                                startPosition,
+                                                stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+            return -1;
+        }
+
+    }else if(_fileFormat == kFileFormatPreencodedFile)
+    {
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, codecInst) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+            return -1;
+        }
+    } else {
+        CodecInst* no_inst = NULL;
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, no_inst,
+                                                startPosition,
+                                                stopPosition) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+                       "FilePlayerImpl::StartPlayingFile() failed to initialize\
+ stream playout.");
+            return -1;
+        }
+    }
+    SetAudioScaling(volumeScaling);
+
+    if (SetUpAudioDecoder() == -1)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StopPlayingFile()
+{
+    memset(&_codec, 0, sizeof(CodecInst));
+    _numberOf10MsPerFrame  = 0;
+    _numberOf10MsInDecoder = 0;
+    return _fileModule.StopPlaying();
+}
+
+bool FilePlayerImpl::IsPlayingFile() const
+{
+    return _fileModule.IsPlaying();
+}
+
+WebRtc_Word32 FilePlayerImpl::GetPlayoutPosition(WebRtc_UWord32& durationMs)
+{
+    return _fileModule.PlayoutPositionMs(durationMs);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetUpAudioDecoder()
+{
+    if ((_fileModule.codec_info(_codec) == -1))
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FilePlayerImpl::StartPlayingFile() failed to retrieve Codec info\
+ of file data.");
+        return -1;
+    }
+    if( STR_CASE_CMP(_codec.plname, "L16") != 0 &&
+        _audioDecoder.SetDecodeCodec(_codec,AMRFileStorage) == -1)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FilePlayerImpl::StartPlayingFile() codec %s not supported",
+            _codec.plname);
+        return -1;
+    }
+    _numberOf10MsPerFrame = _codec.pacsize / (_codec.plfreq / 100);
+    _numberOf10MsInDecoder = 0;
+    return 0;
+}
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+VideoFilePlayerImpl::VideoFilePlayerImpl(WebRtc_UWord32 instanceID,
+                                         FileFormats fileFormat)
+    : FilePlayerImpl(instanceID,fileFormat),
+      _videoDecoder(*new VideoCoder(instanceID)),
+      video_codec_info_(),
+      _decodedVideoFrames(0),
+      _encodedData(*new EncodedVideoData()),
+      _frameScaler(*new FrameScaler()),
+      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
+      _startTime(),
+      _accumulatedRenderTimeMs(0),
+      _frameLengthMS(0),
+      _numberOfFramesRead(0),
+      _videoOnly(false)
+{
+    memset(&video_codec_info_, 0, sizeof(video_codec_info_));
+}
+
+VideoFilePlayerImpl::~VideoFilePlayerImpl()
+{
+    delete _critSec;
+    delete &_frameScaler;
+    delete &_videoDecoder;
+    delete &_encodedData;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StartPlayingVideoFile(
+    const char* fileName,
+    bool loop,
+    bool videoOnly)
+{
+    CriticalSectionScoped lock( _critSec);
+
+    if(_fileModule.StartPlayingVideoFile(fileName, loop, videoOnly,
+                                         _fileFormat) != 0)
+    {
+        return -1;
+    }
+
+    _decodedVideoFrames = 0;
+    _accumulatedRenderTimeMs = 0;
+    _frameLengthMS = 0;
+    _numberOfFramesRead = 0;
+    _videoOnly = videoOnly;
+
+    // Set up video_codec_info_ according to file,
+    if(SetUpVideoDecoder() != 0)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    if(!videoOnly)
+    {
+        // Set up _codec according to file,
+        if(SetUpAudioDecoder() != 0)
+        {
+            StopPlayingFile();
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile()
+{
+    CriticalSectionScoped lock( _critSec);
+
+    _decodedVideoFrames = 0;
+    _videoDecoder.ResetDecoder();
+
+    return FilePlayerImpl::StopPlayingFile();
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
+                                                    WebRtc_UWord32 outWidth,
+                                                    WebRtc_UWord32 outHeight)
+{
+    CriticalSectionScoped lock( _critSec);
+
+    WebRtc_Word32 retVal = GetVideoFromFile(videoFrame);
+    if(retVal != 0)
+    {
+        return retVal;
+    }
+    if( videoFrame.Length() > 0)
+    {
+        retVal = _frameScaler.ResizeFrameIfNeeded(&videoFrame, outWidth,
+                                                  outHeight);
+    }
+    return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame)
+{
+    CriticalSectionScoped lock( _critSec);
+    // No new video data read from file.
+    if(_encodedData.payloadSize == 0)
+    {
+        videoFrame.SetLength(0);
+        return -1;
+    }
+    WebRtc_Word32 retVal = 0;
+    if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
+    {
+        videoFrame.CopyFrame(_encodedData.payloadSize,_encodedData.payloadData);
+        videoFrame.SetLength(_encodedData.payloadSize);
+        videoFrame.SetWidth(video_codec_info_.width);
+        videoFrame.SetHeight(video_codec_info_.height);
+    }else
+    {
+        // Set the timestamp manually since there is no timestamp in the file.
+        // Update timestam according to 90 kHz stream.
+        _encodedData.timeStamp += (90000 / video_codec_info_.maxFramerate);
+        retVal = _videoDecoder.Decode(videoFrame, _encodedData);
+    }
+
+    WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp();
+    videoFrame.SetRenderTime(renderTimeMs);
+
+     // Indicate that the current frame in the encoded buffer is old/has
+     // already been read.
+    _encodedData.payloadSize = 0;
+    if( retVal == 0)
+    {
+        _decodedVideoFrames++;
+    }
+    return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::video_codec_info(
+    VideoCodec& videoCodec) const
+{
+    if(video_codec_info_.plName[0] == 0)
+    {
+        return -1;
+    }
+    memcpy(&videoCodec, &video_codec_info_, sizeof(VideoCodec));
+    return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::TimeUntilNextVideoFrame()
+{
+    if(_fileFormat != kFileFormatAviFile)
+    {
+        return -1;
+    }
+    if(!_fileModule.IsPlaying())
+    {
+        return -1;
+    }
+    if(_encodedData.payloadSize <= 0)
+    {
+        // Read next frame from file.
+        CriticalSectionScoped lock( _critSec);
+
+        if(_fileFormat == kFileFormatAviFile)
+        {
+            // Get next video frame
+            WebRtc_UWord32 encodedBufferLengthInBytes = _encodedData.bufferSize;
+            if(_fileModule.PlayoutAVIVideoData(
+                   reinterpret_cast< WebRtc_Word8*>(_encodedData.payloadData),
+                   encodedBufferLengthInBytes) != 0)
+            {
+                 WEBRTC_TRACE(
+                     kTraceWarning,
+                     kTraceVideo,
+                     _instanceID,
+                     "FilePlayerImpl::TimeUntilNextVideoFrame() error reading\
+ video data");
+                return -1;
+            }
+            _encodedData.payloadSize = encodedBufferLengthInBytes;
+            _encodedData.codec = video_codec_info_.codecType;
+            _numberOfFramesRead++;
+
+            if(_accumulatedRenderTimeMs == 0)
+            {
+                _startTime = TickTime::Now();
+                // This if-statement should only trigger once.
+                _accumulatedRenderTimeMs = 1;
+            } else {
+                // A full seconds worth of frames have been read.
+                if(_numberOfFramesRead % video_codec_info_.maxFramerate == 0)
+                {
+                    // Frame rate is in frames per seconds. Frame length is
+                    // calculated as an integer division which means it may
+                    // be rounded down. Compensate for this every second.
+                    WebRtc_UWord32 rest = 1000%_frameLengthMS;
+                    _accumulatedRenderTimeMs += rest;
+                }
+                _accumulatedRenderTimeMs += _frameLengthMS;
+            }
+        }
+    }
+
+    WebRtc_Word64 timeToNextFrame;
+    if(_videoOnly)
+    {
+        timeToNextFrame = _accumulatedRenderTimeMs -
+            (TickTime::Now() - _startTime).Milliseconds();
+
+    } else {
+        // Synchronize with the audio stream instead of system clock.
+        timeToNextFrame = _accumulatedRenderTimeMs - _decodedLengthInMS;
+    }
+    if(timeToNextFrame < 0)
+    {
+        return 0;
+
+    } else if(timeToNextFrame > 0x0fffffff)
+    {
+        // Wraparound or audio stream has gone to far ahead of the video stream.
+        return -1;
+    }
+    return static_cast<WebRtc_Word32>(timeToNextFrame);
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::SetUpVideoDecoder()
+{
+    if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVideo,
+            _instanceID,
+            "FilePlayerImpl::SetVideoDecoder() failed to retrieve Codec info of\
+ file data.");
+        return -1;
+    }
+
+    WebRtc_Word32 useNumberOfCores = 1;
+    if(_videoDecoder.SetDecodeCodec(video_codec_info_, useNumberOfCores) != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVideo,
+            _instanceID,
+            "FilePlayerImpl::SetUpVideoDecoder() codec %s not supported",
+            video_codec_info_.plName);
+        return -1;
+    }
+
+    _frameLengthMS = 1000/video_codec_info_.maxFramerate;
+
+    // Size of unencoded data (I420) should be the largest possible frame size
+    // in a file.
+    const WebRtc_UWord32 KReadBufferSize = 3 * video_codec_info_.width *
+        video_codec_info_.height / 2;
+    _encodedData.VerifyAndAllocate(KReadBufferSize);
+    _encodedData.encodedHeight = video_codec_info_.height;
+    _encodedData.encodedWidth = video_codec_info_.width;
+    _encodedData.payloadType = video_codec_info_.plType;
+    _encodedData.timeStamp = 0;
+    return 0;
+}
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/trunk/src/modules/utility/source/file_player_impl.h b/trunk/src/modules/utility/source/file_player_impl.h
new file mode 100644
index 0000000..e15cb09
--- /dev/null
+++ b/trunk/src/modules/utility/source/file_player_impl.h
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "file_player.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "resampler.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoCoder;
+class FrameScaler;
+
+class FilePlayerImpl : public FilePlayer
+{
+public:
+    FilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    ~FilePlayerImpl();
+
+    // FilePlayer functions.
+    virtual WebRtc_Word32 Get10msAudioFromFile(
+        WebRtc_Word16* decodedDataBuffer,
+        WebRtc_UWord32& decodedDataLengthInSamples,
+        const WebRtc_UWord32 frequencyInHz);
+    virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+    virtual WebRtc_Word32 StartPlayingFile(
+        const char* fileName,
+        bool loop,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL);
+    virtual WebRtc_Word32 StartPlayingFile(
+        InStream& sourceStream,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL);
+    virtual WebRtc_Word32 StopPlayingFile();
+    virtual bool IsPlayingFile() const;
+    virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs);
+    virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const;
+    virtual WebRtc_Word32 Frequency() const;
+    virtual WebRtc_Word32 SetAudioScaling(float scaleFactor);
+
+protected:
+    WebRtc_Word32 SetUpAudioDecoder();
+
+    WebRtc_UWord32 _instanceID;
+    const FileFormats _fileFormat;
+    MediaFile& _fileModule;
+
+    WebRtc_UWord32 _decodedLengthInMS;
+
+private:
+    WebRtc_Word16 _decodedAudioBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+    AudioCoder _audioDecoder;
+
+    CodecInst _codec;
+    WebRtc_Word32 _numberOf10MsPerFrame;
+    WebRtc_Word32 _numberOf10MsInDecoder;
+
+    Resampler _resampler;
+    float _scaling;
+};
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class VideoFilePlayerImpl: public FilePlayerImpl
+{
+public:
+    VideoFilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    ~VideoFilePlayerImpl();
+
+    // FilePlayer functions.
+    virtual WebRtc_Word32 TimeUntilNextVideoFrame();
+    virtual WebRtc_Word32 StartPlayingVideoFile(const char* fileName,
+                                                bool loop,
+                                                bool videoOnly);
+    virtual WebRtc_Word32 StopPlayingFile();
+    virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const;
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame);
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame,
+                                           const WebRtc_UWord32 outWidth,
+                                           const WebRtc_UWord32 outHeight);
+
+private:
+    WebRtc_Word32 SetUpVideoDecoder();
+
+    VideoCoder& _videoDecoder;
+    VideoCodec video_codec_info_;
+    WebRtc_Word32 _decodedVideoFrames;
+
+    EncodedVideoData& _encodedData;
+
+    FrameScaler& _frameScaler;
+    CriticalSectionWrapper* _critSec;
+    TickTime _startTime;
+    WebRtc_Word64 _accumulatedRenderTimeMs;
+    WebRtc_UWord32 _frameLengthMS;
+
+    WebRtc_Word32 _numberOfFramesRead;
+    bool _videoOnly;
+};
+#endif //WEBRTC_MODULE_UTILITY_VIDEO
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
diff --git a/trunk/src/modules/utility/source/file_player_unittest.cc b/trunk/src/modules/utility/source/file_player_unittest.cc
new file mode 100644
index 0000000..2e76905
--- /dev/null
+++ b/trunk/src/modules/utility/source/file_player_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "file_player.h"
+#include "gtest/gtest.h"
+
+TEST(FilePlayerTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/modules/utility/source/file_recorder_impl.cc b/trunk/src/modules/utility/source/file_recorder_impl.cc
new file mode 100644
index 0000000..07d16aa
--- /dev/null
+++ b/trunk/src/modules/utility/source/file_recorder_impl.cc
@@ -0,0 +1,794 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#include "file_recorder_impl.h"
+#include "media_file.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "cpu_wrapper.h"
+    #include "critical_section_wrapper.h"
+    #include "frame_scaler.h"
+    #include "video_coder.h"
+    #include "video_frames_queue.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FileRecorder* FileRecorder::CreateFileRecorder(WebRtc_UWord32 instanceID,
+                                               FileFormats fileFormat)
+{
+    switch(fileFormat)
+    {
+    case kFileFormatWavFile:
+    case kFileFormatCompressedFile:
+    case kFileFormatPreencodedFile:
+    case kFileFormatPcm16kHzFile:
+    case kFileFormatPcm8kHzFile:
+    case kFileFormatPcm32kHzFile:
+        return new FileRecorderImpl(instanceID, fileFormat);
+    case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        return new AviRecorder(instanceID, fileFormat);
+#else
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                             "Invalid file format: %d", kFileFormatAviFile);
+        assert(false);
+        return NULL;
+#endif
+    }
+    assert(false);
+    return NULL;
+}
+
+void FileRecorder::DestroyFileRecorder(FileRecorder* recorder)
+{
+    delete recorder;
+}
+
+FileRecorderImpl::FileRecorderImpl(WebRtc_UWord32 instanceID,
+                                   FileFormats fileFormat)
+    : _instanceID(instanceID),
+      _fileFormat(fileFormat),
+      _moduleFile(MediaFile::CreateMediaFile(_instanceID)),
+      _stream(NULL),
+      codec_info_(),
+      _amrFormat(AMRFileStorage),
+      _audioBuffer(),
+      _audioEncoder(instanceID),
+      _audioResampler()
+{
+}
+
+FileRecorderImpl::~FileRecorderImpl()
+{
+    MediaFile::DestroyMediaFile(_moduleFile);
+}
+
+FileFormats FileRecorderImpl::RecordingFileFormat() const
+{
+    return _fileFormat;
+}
+
+WebRtc_Word32 FileRecorderImpl::RegisterModuleFileCallback(
+    FileCallback* callback)
+{
+    if(_moduleFile == NULL)
+    {
+        return -1;
+    }
+    return _moduleFile->SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+    const char* fileName,
+    const CodecInst& codecInst,
+    WebRtc_UWord32 notificationTimeMs,
+    ACMAMRPackingFormat amrFormat)
+{
+    if(_moduleFile == NULL)
+    {
+        return -1;
+    }
+    codec_info_ = codecInst;
+    _amrFormat = amrFormat;
+
+    WebRtc_Word32 retVal = 0;
+    if(_fileFormat != kFileFormatAviFile)
+    {
+        // AVI files should be started using StartRecordingVideoFile(..) all
+        // other formats should use this API.
+        retVal =_moduleFile->StartRecordingAudioFile(fileName, _fileFormat,
+                                                     codecInst,
+                                                     notificationTimeMs);
+    }
+
+    if( retVal == 0)
+    {
+        retVal = SetUpAudioEncoder();
+    }
+    if( retVal != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::StartRecording() failed to initialize file %s for\
+ recording.",
+            fileName);
+
+        if(IsRecording())
+        {
+            StopRecording();
+        }
+    }
+    return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+    OutStream& destStream,
+    const CodecInst& codecInst,
+    WebRtc_UWord32 notificationTimeMs,
+    ACMAMRPackingFormat amrFormat)
+{
+    codec_info_ = codecInst;
+    _amrFormat = amrFormat;
+
+    WebRtc_Word32 retVal = _moduleFile->StartRecordingAudioStream(
+        destStream,
+        _fileFormat,
+        codecInst,
+        notificationTimeMs);
+
+    if( retVal == 0)
+    {
+        retVal = SetUpAudioEncoder();
+    }
+    if( retVal != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::StartRecording() failed to initialize outStream for\
+ recording.");
+
+        if(IsRecording())
+        {
+            StopRecording();
+        }
+    }
+    return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StopRecording()
+{
+    memset(&codec_info_, 0, sizeof(CodecInst));
+    return _moduleFile->StopRecording();
+}
+
+bool FileRecorderImpl::IsRecording() const
+{
+    return _moduleFile->IsRecording();
+}
+
+WebRtc_Word32 FileRecorderImpl::RecordAudioToFile(
+    const AudioFrame& incomingAudioFrame,
+    const TickTime* playoutTS)
+{
+    if (codec_info_.plfreq == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::RecordAudioToFile() recording audio is not turned\
+ on");
+        return -1;
+    }
+    AudioFrame tempAudioFrame;
+    tempAudioFrame._payloadDataLengthInSamples = 0;
+    if( incomingAudioFrame._audioChannel == 2 &&
+        !_moduleFile->IsStereo())
+    {
+        // Recording mono but incoming audio is (interleaved) stereo.
+        tempAudioFrame._audioChannel = 1;
+        tempAudioFrame._frequencyInHz = incomingAudioFrame._frequencyInHz;
+        tempAudioFrame._payloadDataLengthInSamples =
+          incomingAudioFrame._payloadDataLengthInSamples;
+        for (WebRtc_UWord16 i = 0;
+             i < (incomingAudioFrame._payloadDataLengthInSamples); i++)
+        {
+            // Sample value is the average of left and right buffer rounded to
+            // closest integer value. Note samples can be either 1 or 2 byte.
+             tempAudioFrame._payloadData[i] =
+                 ((incomingAudioFrame._payloadData[2 * i] +
+                   incomingAudioFrame._payloadData[(2 * i) + 1] + 1) >> 1);
+        }
+    }
+    else if( incomingAudioFrame._audioChannel == 1 &&
+        _moduleFile->IsStereo())
+    {
+        // Recording stereo but incoming audio is mono.
+        tempAudioFrame._audioChannel = 2;
+        tempAudioFrame._frequencyInHz = incomingAudioFrame._frequencyInHz;
+        tempAudioFrame._payloadDataLengthInSamples =
+          incomingAudioFrame._payloadDataLengthInSamples;
+        for (WebRtc_UWord16 i = 0;
+             i < (incomingAudioFrame._payloadDataLengthInSamples); i++)
+        {
+            // Duplicate sample to both channels
+             tempAudioFrame._payloadData[2*i] =
+               incomingAudioFrame._payloadData[i];
+             tempAudioFrame._payloadData[2*i+1] =
+               incomingAudioFrame._payloadData[i];
+        }
+    }
+
+    const AudioFrame* ptrAudioFrame = &incomingAudioFrame;
+    if(tempAudioFrame._payloadDataLengthInSamples != 0)
+    {
+        // If ptrAudioFrame is not empty it contains the audio to be recorded.
+        ptrAudioFrame = &tempAudioFrame;
+    }
+
+    // Encode the audio data before writing to file. Don't encode if the codec
+    // is PCM.
+    // NOTE: stereo recording is only supported for WAV files.
+    // TODO (hellner): WAV expect PCM in little endian byte order. Not
+    // "encoding" with PCM coder should be a problem for big endian systems.
+    WebRtc_UWord32 encodedLenInBytes = 0;
+    if (_fileFormat == kFileFormatPreencodedFile ||
+        STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+    {
+        if (_audioEncoder.Encode(*ptrAudioFrame, _audioBuffer,
+                                 encodedLenInBytes) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FileRecorder::RecordAudioToFile() codec %s not supported or\
+ failed to encode stream",
+                codec_info_.plname);
+            return -1;
+        }
+    } else {
+        int outLen = 0;
+        if(ptrAudioFrame->_audioChannel == 2)
+        {
+            // ptrAudioFrame contains interleaved stereo audio.
+            _audioResampler.ResetIfNeeded(ptrAudioFrame->_frequencyInHz,
+                                          codec_info_.plfreq,
+                                          kResamplerSynchronousStereo);
+            _audioResampler.Push(ptrAudioFrame->_payloadData,
+                                 ptrAudioFrame->_payloadDataLengthInSamples *
+                                 ptrAudioFrame->_audioChannel,
+                                 (WebRtc_Word16*)_audioBuffer,
+                                 MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+        } else {
+            _audioResampler.ResetIfNeeded(ptrAudioFrame->_frequencyInHz,
+                                          codec_info_.plfreq,
+                                          kResamplerSynchronous);
+            _audioResampler.Push(ptrAudioFrame->_payloadData,
+                                 ptrAudioFrame->_payloadDataLengthInSamples,
+                                 (WebRtc_Word16*)_audioBuffer,
+                                 MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+        }
+        encodedLenInBytes = outLen * sizeof(WebRtc_Word16);
+    }
+
+    // Codec may not be operating at a frame rate of 10 ms. Whenever enough
+    // 10 ms chunks of data has been pushed to the encoder an encoded frame
+    // will be available. Wait until then.
+    if (encodedLenInBytes)
+    {
+        WebRtc_UWord16 msOfData =
+            ptrAudioFrame->_payloadDataLengthInSamples /
+            WebRtc_UWord16(ptrAudioFrame->_frequencyInHz / 1000);
+        if (WriteEncodedAudioData(_audioBuffer,
+                                  (WebRtc_UWord16)encodedLenInBytes,
+                                  msOfData, playoutTS) == -1)
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::SetUpAudioEncoder()
+{
+    if (_fileFormat == kFileFormatPreencodedFile ||
+        STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+    {
+        if(_audioEncoder.SetEncodeCodec(codec_info_,_amrFormat) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FileRecorder::StartRecording() codec %s not supported",
+                codec_info_.plname);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::codec_info(CodecInst& codecInst) const
+{
+    if(codec_info_.plfreq == 0)
+    {
+        return -1;
+    }
+    codecInst = codec_info_;
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::WriteEncodedAudioData(
+    const WebRtc_Word8* audioBuffer,
+    WebRtc_UWord16 bufferLength,
+    WebRtc_UWord16 /*millisecondsOfData*/,
+    const TickTime* /*playoutTS*/)
+{
+    return _moduleFile->IncomingAudioData(audioBuffer, bufferLength);
+}
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AudioFrameFileInfo
+{
+    public:
+       AudioFrameFileInfo(const WebRtc_Word8* audioData,
+                     const WebRtc_UWord16 audioSize,
+                     const WebRtc_UWord16 audioMS,
+                     const TickTime& playoutTS)
+           : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
+             _playoutTS(playoutTS)
+       {
+           if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
+           {
+               assert(false);
+               _audioSize = 0;
+               return;
+           }
+           memcpy(_audioData, audioData, audioSize);
+       };
+    // TODO (hellner): either turn into a struct or provide get/set functions.
+    WebRtc_Word8   _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+    WebRtc_UWord16 _audioSize;
+    WebRtc_UWord16 _audioMS;
+    TickTime _playoutTS;
+};
+
+AviRecorder::AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat)
+    : FileRecorderImpl(instanceID, fileFormat),
+      _videoOnly(false),
+      _thread( 0),
+      _timeEvent(*EventWrapper::Create()),
+      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
+      _writtenVideoFramesCounter(0),
+      _writtenAudioMS(0),
+      _writtenVideoMS(0)
+{
+    _videoEncoder = new VideoCoder(instanceID);
+    _frameScaler = new FrameScaler();
+    _videoFramesQueue = new VideoFramesQueue();
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "AviRecorder()");
+}
+
+AviRecorder::~AviRecorder( )
+{
+    StopRecording( );
+
+    delete _videoEncoder;
+    delete _frameScaler;
+    delete _videoFramesQueue;
+    delete _thread;
+    delete &_timeEvent;
+    delete _critSec;
+}
+
+WebRtc_Word32 AviRecorder::StartRecordingVideoFile(
+    const char* fileName,
+    const CodecInst& audioCodecInst,
+    const VideoCodec& videoCodecInst,
+    ACMAMRPackingFormat amrFormat,
+    bool videoOnly)
+{
+    _firstAudioFrameReceived = false;
+    _videoCodecInst = videoCodecInst;
+    _videoOnly = videoOnly;
+
+    if(_moduleFile->StartRecordingVideoFile(fileName, _fileFormat,
+                                            audioCodecInst, videoCodecInst,
+                                            videoOnly) != 0)
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        if(FileRecorderImpl::StartRecordingAudioFile(fileName,audioCodecInst, 0,
+                                                     amrFormat) !=0)
+        {
+            StopRecording();
+            return -1;
+        }
+    }
+    if( SetUpVideoEncoder() != 0)
+    {
+        StopRecording();
+        return -1;
+    }
+    if(_videoOnly)
+    {
+        // Writing to AVI file is non-blocking.
+        // Start non-blocking timer if video only. If recording both video and
+        // audio let the pushing of audio frames be the timer.
+        _timeEvent.StartTimer(true, 1000 / _videoCodecInst.maxFramerate);
+    }
+    StartThread();
+    return 0;
+}
+
+WebRtc_Word32 AviRecorder::StopRecording()
+{
+    _timeEvent.StopTimer();
+
+    StopThread();
+    return FileRecorderImpl::StopRecording();
+}
+
+WebRtc_Word32 AviRecorder::CalcI420FrameSize( ) const
+{
+    return 3 * _videoCodecInst.width * _videoCodecInst.height / 2;
+}
+
+WebRtc_Word32 AviRecorder::SetUpVideoEncoder()
+{
+    // Size of unencoded data (I420) should be the largest possible frame size
+    // in a file.
+    _videoMaxPayloadSize = CalcI420FrameSize();
+    _videoEncodedData.VerifyAndAllocate(_videoMaxPayloadSize);
+
+    _videoCodecInst.plType = _videoEncoder->DefaultPayloadType(
+        _videoCodecInst.plName);
+
+    WebRtc_Word32 useNumberOfCores = 1;
+    // Set the max payload size to 16000. This means that the codec will try to
+    // create slices that will fit in 16000 kByte packets. However, the
+    // Encode() call will still generate one full frame.
+    if(_videoEncoder->SetEncodeCodec(_videoCodecInst, useNumberOfCores,
+                                     16000))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviRecorder::RecordVideoToFile(const VideoFrame& videoFrame)
+{
+    CriticalSectionScoped lock(_critSec);
+
+    if(!IsRecording() || ( videoFrame.Length() == 0))
+    {
+        return -1;
+    }
+    // The frame is written to file in AviRecorder::Process().
+    WebRtc_Word32 retVal = _videoFramesQueue->AddFrame(videoFrame);
+    if(retVal != 0)
+    {
+        StopRecording();
+    }
+    return retVal;
+}
+
+bool AviRecorder::StartThread()
+{
+    unsigned int id;
+    if( _thread == 0)
+    {
+        return false;
+    }
+
+    return _thread->Start(id);
+}
+
+bool AviRecorder::StopThread()
+{
+    _critSec->Enter();
+
+    if(_thread)
+    {
+        _thread->SetNotAlive();
+
+        ThreadWrapper* thread = _thread;
+        _thread = NULL;
+
+        _timeEvent.Set();
+
+        _critSec->Leave();
+
+        if(thread->Stop())
+        {
+            delete thread;
+        } else {
+            return false;
+        }
+    } else {
+        _critSec->Leave();
+    }
+    return true;
+}
+
+bool AviRecorder::Run( ThreadObj threadObj)
+{
+    return static_cast<AviRecorder*>( threadObj)->Process();
+}
+
+WebRtc_Word32 AviRecorder::ProcessAudio()
+{
+    if (_writtenVideoFramesCounter == 0)
+    {
+        // Get the most recent frame that is due for writing to file. Since
+        // frames are unencoded it's safe to throw away frames if necessary
+        // for synchronizing audio and video.
+        VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+        if(frameToProcess)
+        {
+            // Syncronize audio to the current frame to process by throwing away
+            // audio samples with older timestamp than the video frame.
+            WebRtc_UWord32 numberOfAudioElements =
+                _audioFramesToWrite.GetSize();
+            for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+            {
+                AudioFrameFileInfo* frameInfo =
+                    (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+                if(frameInfo)
+                {
+                    if(TickTime::TicksToMilliseconds(
+                           frameInfo->_playoutTS.Ticks()) <
+                       frameToProcess->RenderTimeMs())
+                    {
+                        delete frameInfo;
+                        _audioFramesToWrite.PopFront();
+                    } else
+                    {
+                        break;
+                    }
+                }
+            }
+        }
+    }
+    // Write all audio up to current timestamp.
+    WebRtc_Word32 error = 0;
+    WebRtc_UWord32 numberOfAudioElements = _audioFramesToWrite.GetSize();
+    for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+    {
+        AudioFrameFileInfo* frameInfo =
+            (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+        if(frameInfo)
+        {
+            if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
+            {
+                _moduleFile->IncomingAudioData(frameInfo->_audioData,
+                                               frameInfo->_audioSize);
+                _writtenAudioMS += frameInfo->_audioMS;
+                delete frameInfo;
+                _audioFramesToWrite.PopFront();
+            } else {
+                break;
+            }
+        } else {
+            _audioFramesToWrite.PopFront();
+        }
+    }
+    return error;
+}
+
+bool AviRecorder::Process()
+{
+    switch(_timeEvent.Wait(500))
+    {
+    case kEventSignaled:
+        if(_thread == NULL)
+        {
+            return false;
+        }
+        break;
+    case kEventError:
+        return false;
+    case kEventTimeout:
+        // No events triggered. No work to do.
+        return true;
+    }
+    CriticalSectionScoped lock( _critSec);
+
+    // Get the most recent frame to write to file (if any). Synchronize it with
+    // the audio stream (if any). Synchronization the video based on its render
+    // timestamp (i.e. VideoFrame::RenderTimeMS())
+    VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+    if( frameToProcess == NULL)
+    {
+        return true;
+    }
+    WebRtc_Word32 error = 0;
+    if(!_videoOnly)
+    {
+        if(!_firstAudioFrameReceived)
+        {
+            // Video and audio can only be synchronized if both have been
+            // received.
+            return true;
+        }
+        error = ProcessAudio();
+
+        while (_writtenAudioMS > _writtenVideoMS)
+        {
+            error = EncodeAndWriteVideoToFile( *frameToProcess);
+            if( error != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                        "AviRecorder::Process() error writing to file.");
+                break;
+            } else {
+                WebRtc_UWord32 frameLengthMS = 1000 /
+                    _videoCodecInst.maxFramerate;
+                _writtenVideoFramesCounter++;
+                _writtenVideoMS += frameLengthMS;
+                // A full seconds worth of frames have been written.
+                if(_writtenVideoFramesCounter%_videoCodecInst.maxFramerate == 0)
+                {
+                    // Frame rate is in frames per seconds. Frame length is
+                    // calculated as an integer division which means it may
+                    // be rounded down. Compensate for this every second.
+                    WebRtc_UWord32 rest = 1000 % frameLengthMS;
+                    _writtenVideoMS += rest;
+                }
+            }
+        }
+    } else {
+        // Frame rate is in frames per seconds. Frame length is calculated as an
+        // integer division which means it may be rounded down. This introduces
+        // drift. Once a full frame worth of drift has happened, skip writing
+        // one frame. Note that frame rate is in frames per second so the
+        // drift is completely compensated for.
+        WebRtc_UWord32 frameLengthMS = 1000/_videoCodecInst.maxFramerate;
+        WebRtc_UWord32 restMS = 1000 % frameLengthMS;
+        WebRtc_UWord32 frameSkip = (_videoCodecInst.maxFramerate *
+                                    frameLengthMS) / restMS;
+
+        _writtenVideoFramesCounter++;
+        if(_writtenVideoFramesCounter % frameSkip == 0)
+        {
+            _writtenVideoMS += frameLengthMS;
+            return true;
+        }
+
+        error = EncodeAndWriteVideoToFile( *frameToProcess);
+        if(error != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                    "AviRecorder::Process() error writing to file.");
+        } else {
+            _writtenVideoMS += frameLengthMS;
+        }
+    }
+    return error == 0;
+}
+
+WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame)
+{
+    if(!IsRecording() || (videoFrame.Length() == 0))
+    {
+        return -1;
+    }
+
+    if(_frameScaler->ResizeFrameIfNeeded(&videoFrame, _videoCodecInst.width,
+                                         _videoCodecInst.height) != 0)
+    {
+        return -1;
+    }
+
+    _videoEncodedData.payloadSize = 0;
+
+    if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0)
+    {
+        _videoEncodedData.VerifyAndAllocate(videoFrame.Length());
+
+        // I420 is raw data. No encoding needed (each sample is represented by
+        // 1 byte so there is no difference depending on endianness).
+        memcpy(_videoEncodedData.payloadData, videoFrame.Buffer(),
+               videoFrame.Length());
+
+        _videoEncodedData.payloadSize = videoFrame.Length();
+        _videoEncodedData.frameType = kVideoFrameKey;
+    }else {
+        if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0)
+        {
+            return -1;
+        }
+    }
+
+    if(_videoEncodedData.payloadSize > 0)
+    {
+        if(_moduleFile->IncomingAVIVideoData(
+               (WebRtc_Word8*)(_videoEncodedData.payloadData),
+               _videoEncodedData.payloadSize))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                         "Error writing AVI file");
+            return -1;
+        }
+    } else {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceVideo,
+            _instanceID,
+            "FileRecorder::RecordVideoToFile() frame dropped by encoder bitrate\
+ likely to low.");
+    }
+    return 0;
+}
+
+// Store audio frame in the _audioFramesToWrite buffer. The writing to file
+// happens in AviRecorder::Process().
+WebRtc_Word32 AviRecorder::WriteEncodedAudioData(
+    const WebRtc_Word8* audioBuffer,
+    WebRtc_UWord16 bufferLength,
+    WebRtc_UWord16 millisecondsOfData,
+    const TickTime* playoutTS)
+{
+    if (!IsRecording())
+    {
+        return -1;
+    }
+    if (bufferLength > MAX_AUDIO_BUFFER_IN_BYTES)
+    {
+        return -1;
+    }
+    if (_videoOnly)
+    {
+        return -1;
+    }
+    if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength)
+    {
+        StopRecording();
+        return -1;
+    }
+    _firstAudioFrameReceived = true;
+
+    if(playoutTS)
+    {
+        _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+                                                            bufferLength,
+                                                            millisecondsOfData,
+                                                            *playoutTS));
+    } else {
+        _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+                                                            bufferLength,
+                                                            millisecondsOfData,
+                                                            TickTime::Now()));
+    }
+    _timeEvent.Set();
+    return 0;
+}
+
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/trunk/src/modules/utility/source/file_recorder_impl.h b/trunk/src/modules/utility/source/file_recorder_impl.h
new file mode 100644
index 0000000..3684166
--- /dev/null
+++ b/trunk/src/modules/utility/source/file_recorder_impl.h
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains a class that can write audio and/or video to file in
+// multiple file formats. The unencoded input data is written to file in the
+// encoded format specified.
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "event_wrapper.h"
+#include "file_recorder.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "module_common_types.h"
+#include "resampler.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "frame_scaler.h"
+    #include "video_coder.h"
+    #include "video_frames_queue.h"
+#endif
+
+namespace webrtc {
+// The largest decoded frame size in samples (60ms with 32kHz sample rate).
+enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+enum { kMaxAudioBufferQueueLength = 100 };
+
+class FileRecorderImpl : public FileRecorder
+{
+public:
+    FileRecorderImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    virtual ~FileRecorderImpl();
+
+    // FileRecorder functions.
+    virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+    virtual FileFormats RecordingFileFormat() const;
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const char* fileName,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notificationTimeMs,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage);
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        OutStream& destStream,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notificationTimeMs,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage);
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool IsRecording() const;
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const;
+    virtual WebRtc_Word32 RecordAudioToFile(
+        const AudioFrame& frame,
+        const TickTime* playoutTS = NULL);
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false)
+    {
+        return -1;
+    }
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame)
+    {
+        return -1;
+    }
+
+protected:
+    virtual WebRtc_Word32 WriteEncodedAudioData(
+        const WebRtc_Word8* audioBuffer,
+        WebRtc_UWord16 bufferLength,
+        WebRtc_UWord16 millisecondsOfData,
+        const TickTime* playoutTS);
+
+    WebRtc_Word32 SetUpAudioEncoder();
+
+    WebRtc_UWord32 _instanceID;
+    FileFormats _fileFormat;
+    MediaFile* _moduleFile;
+
+private:
+    OutStream* _stream;
+    CodecInst codec_info_;
+    ACMAMRPackingFormat _amrFormat;
+
+    WebRtc_Word8 _audioBuffer[MAX_AUDIO_BUFFER_IN_BYTES];
+    AudioCoder _audioEncoder;
+    Resampler _audioResampler;
+};
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AviRecorder : public FileRecorderImpl
+{
+public:
+    AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    virtual ~AviRecorder();
+
+    // FileRecorder functions.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false);
+    virtual WebRtc_Word32 StopRecording();
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame);
+
+protected:
+    virtual WebRtc_Word32 WriteEncodedAudioData(
+        const WebRtc_Word8*  audioBuffer,
+        WebRtc_UWord16 bufferLength,
+        WebRtc_UWord16 millisecondsOfData,
+        const TickTime* playoutTS);
+private:
+    static bool Run(ThreadObj threadObj);
+    bool Process();
+
+    bool StartThread();
+    bool StopThread();
+
+    WebRtc_Word32 EncodeAndWriteVideoToFile(VideoFrame& videoFrame);
+    WebRtc_Word32 ProcessAudio();
+
+    WebRtc_Word32 CalcI420FrameSize() const;
+    WebRtc_Word32 SetUpVideoEncoder();
+
+    VideoCodec _videoCodecInst;
+    bool _videoOnly;
+
+    ListWrapper _audioFramesToWrite;
+    bool _firstAudioFrameReceived;
+
+    VideoFramesQueue* _videoFramesQueue;
+
+    FrameScaler* _frameScaler;
+    VideoCoder* _videoEncoder;
+    WebRtc_Word32 _videoMaxPayloadSize;
+    EncodedVideoData _videoEncodedData;
+
+    ThreadWrapper* _thread;
+    EventWrapper& _timeEvent;
+    CriticalSectionWrapper* _critSec;
+    WebRtc_Word64 _writtenVideoFramesCounter;
+    WebRtc_Word64 _writtenAudioMS;
+    WebRtc_Word64 _writtenVideoMS;
+};
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
diff --git a/trunk/src/modules/utility/source/frame_scaler.cc b/trunk/src/modules/utility/source/frame_scaler.cc
new file mode 100644
index 0000000..c012e89
--- /dev/null
+++ b/trunk/src/modules/utility/source/frame_scaler.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/utility/source/frame_scaler.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "common_video/libyuv/include/scaler.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+FrameScaler::FrameScaler()
+    : scaler_(new Scaler()),
+      scaled_frame_() {}
+
+FrameScaler::~FrameScaler() {}
+
+int FrameScaler::ResizeFrameIfNeeded(VideoFrame* video_frame,
+                                     WebRtc_UWord32 out_width,
+                                     WebRtc_UWord32 out_height) {
+  if (video_frame->Length() == 0) {
+    return -1;
+  }
+
+  if ((video_frame->Width() != out_width) ||
+      (video_frame->Height() != out_height)) {
+    // Set correct scale settings and scale |video_frame| into |scaled_frame_|.
+    scaler_->Set(video_frame->Width(), video_frame->Height(), out_width,
+                 out_height, kI420, kI420, kScaleBox);
+    int out_length = CalcBufferSize(kI420, out_width, out_height);
+    scaled_frame_.VerifyAndAllocate(out_length);
+    int ret = scaler_->Scale(video_frame->Buffer(), scaled_frame_.Buffer(),
+                             out_length);
+    if (ret < 0) {
+      return ret;
+    }
+
+    scaled_frame_.SetWidth(out_width);
+    scaled_frame_.SetHeight(out_height);
+    scaled_frame_.SetLength(out_length);
+    scaled_frame_.SetRenderTime(video_frame->RenderTimeMs());
+    scaled_frame_.SetTimeStamp(video_frame->TimeStamp());
+    video_frame->SwapFrame(scaled_frame_);
+  }
+  return 0;
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/trunk/src/modules/utility/source/frame_scaler.h b/trunk/src/modules/utility/source/frame_scaler.h
new file mode 100644
index 0000000..f86a933
--- /dev/null
+++ b/trunk/src/modules/utility/source/frame_scaler.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file implements a class that can be used for scaling frames.
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Scaler;
+class VideoFrame;
+
+class FrameScaler {
+ public:
+    FrameScaler();
+    ~FrameScaler();
+
+    // Re-sizes |video_frame| so that it has the width |out_width| and height
+    // |out_height|.
+    int ResizeFrameIfNeeded(VideoFrame* video_frame,
+                            WebRtc_UWord32 out_width,
+                            WebRtc_UWord32 out_height);
+
+ private:
+    scoped_ptr<Scaler> scaler_;
+    VideoFrame scaled_frame_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULE_UTILITY_VIDEO
+
+#endif  // WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
diff --git a/trunk/src/modules/utility/source/process_thread_impl.cc b/trunk/src/modules/utility/source/process_thread_impl.cc
new file mode 100644
index 0000000..9028c44
--- /dev/null
+++ b/trunk/src/modules/utility/source/process_thread_impl.cc
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "process_thread_impl.h"
+#include "module.h"
+#include "trace.h"
+
+namespace webrtc {
+ProcessThread::~ProcessThread()
+{
+}
+
+ProcessThread* ProcessThread::CreateProcessThread()
+{
+    return new ProcessThreadImpl();
+}
+
+void ProcessThread::DestroyProcessThread(ProcessThread* module)
+{
+    delete module;
+}
+
+ProcessThreadImpl::ProcessThreadImpl()
+    : _timeEvent(*EventWrapper::Create()),
+      _critSectModules(CriticalSectionWrapper::CreateCriticalSection()),
+      _thread(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+ProcessThreadImpl::~ProcessThreadImpl()
+{
+    delete _critSectModules;
+    delete &_timeEvent;
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 ProcessThreadImpl::Start()
+{
+    CriticalSectionScoped lock(_critSectModules);
+    if(_thread)
+    {
+        return -1;
+    }
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "ProcessThread");
+    unsigned int id;
+    WebRtc_Word32 retVal = _thread->Start(id);
+    if(retVal >= 0)
+    {
+        return 0;
+    }
+    delete _thread;
+    _thread = NULL;
+    return -1;
+}
+
+WebRtc_Word32 ProcessThreadImpl::Stop()
+{
+    _critSectModules->Enter();
+    if(_thread)
+    {
+        _thread->SetNotAlive();
+
+        ThreadWrapper* thread = _thread;
+        _thread = NULL;
+
+        _timeEvent.Set();
+        _critSectModules->Leave();
+
+        if(thread->Stop())
+        {
+            delete thread;
+        } else {
+            return -1;
+        }
+    } else {
+        _critSectModules->Leave();
+    }
+    return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::RegisterModule(const Module* module)
+{
+    CriticalSectionScoped lock(_critSectModules);
+
+    // Only allow module to be registered once.
+    ListItem* item = _modules.First();
+    for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+    {
+        if(module == item->GetItem())
+        {
+            return -1;
+        }
+        item = _modules.Next(item);
+    }
+
+    _modules.PushFront(module);
+    WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+                 "number of registered modules has increased to %d",
+                 _modules.GetSize());
+    // Wake the thread calling ProcessThreadImpl::Process() to update the
+    // waiting time. The waiting time for the just registered module may be
+    // shorter than all other registered modules.
+    _timeEvent.Set();
+    return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::DeRegisterModule(const Module* module)
+{
+    CriticalSectionScoped lock(_critSectModules);
+
+    ListItem* item = _modules.First();
+    for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+    {
+        if(module == item->GetItem())
+        {
+            int res = _modules.Erase(item);
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+                         "number of registered modules has decreased to %d",
+                         _modules.GetSize());
+            return res;
+        }
+        item = _modules.Next(item);
+    }
+    return -1;
+}
+
+bool ProcessThreadImpl::Run(void* obj)
+{
+    return static_cast<ProcessThreadImpl*>(obj)->Process();
+}
+
+bool ProcessThreadImpl::Process()
+{
+    // Wait for the module that should be called next, but don't block thread
+    // longer than 100 ms.
+    WebRtc_Word32 minTimeToNext = 100;
+    {
+        CriticalSectionScoped lock(_critSectModules);
+        ListItem* item = _modules.First();
+        for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+        {
+            WebRtc_Word32 timeToNext =
+                static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+            if(minTimeToNext > timeToNext)
+            {
+                minTimeToNext = timeToNext;
+            }
+            item = _modules.Next(item);
+        }
+    }
+
+    if(minTimeToNext > 0)
+    {
+        if(kEventError == _timeEvent.Wait(minTimeToNext))
+        {
+            return true;
+        }
+        if(!_thread)
+        {
+            return false;
+        }
+    }
+    {
+        CriticalSectionScoped lock(_critSectModules);
+        ListItem* item = _modules.First();
+        for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+        {
+            WebRtc_Word32 timeToNext =
+                static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+            if(timeToNext < 1)
+            {
+                static_cast<Module*>(item->GetItem())->Process();
+            }
+            item = _modules.Next(item);
+        }
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/utility/source/process_thread_impl.h b/trunk/src/modules/utility/source/process_thread_impl.h
new file mode 100644
index 0000000..79b1272
--- /dev/null
+++ b/trunk/src/modules/utility/source/process_thread_impl.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "process_thread.h"
+#include "thread_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class ProcessThreadImpl : public ProcessThread
+{
+public:
+    ProcessThreadImpl();
+    virtual ~ProcessThreadImpl();
+
+    virtual WebRtc_Word32 Start();
+    virtual WebRtc_Word32 Stop();
+
+    virtual WebRtc_Word32 RegisterModule(const Module* module);
+    virtual WebRtc_Word32 DeRegisterModule(const Module* module);
+
+protected:
+    static bool Run(void* obj);
+
+    bool Process();
+
+private:
+    EventWrapper&           _timeEvent;
+    CriticalSectionWrapper* _critSectModules;
+    ListWrapper             _modules;
+    ThreadWrapper*          _thread;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
diff --git a/trunk/src/modules/utility/source/rtp_dump_impl.cc b/trunk/src/modules/utility/source/rtp_dump_impl.cc
new file mode 100644
index 0000000..69a52ec
--- /dev/null
+++ b/trunk/src/modules/utility/source/rtp_dump_impl.cc
@@ -0,0 +1,282 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_dump_impl.h"
+
+#include <cassert>
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+#include <Windows.h>
+#include <mmsystem.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <string.h>
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#if (defined(_DEBUG) && defined(_WIN32))
+#define DEBUG_PRINT(expr)   OutputDebugString(##expr)
+#define DEBUG_PRINTP(expr, p)   \
+{                               \
+    char msg[128];              \
+    sprintf(msg, ##expr, p);    \
+    OutputDebugString(msg);     \
+}
+#else
+#define DEBUG_PRINT(expr)    ((void)0)
+#define DEBUG_PRINTP(expr,p) ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+namespace webrtc {
+const char RTPFILE_VERSION[] = "1.0";
+const WebRtc_UWord32 MAX_UWORD32 = 0xffffffff;
+
+// This stucture is specified in the rtpdump documentation.
+// This struct corresponds to RD_packet_t in
+// http://www.cs.columbia.edu/irt/software/rtptools/
+typedef struct
+{
+    // Length of packet, including this header (may be smaller than plen if not
+    // whole packet recorded).
+    WebRtc_UWord16 length;
+    // Actual header+payload length for RTP, 0 for RTCP.
+    WebRtc_UWord16 plen;
+    // Milliseconds since the start of recording.
+    WebRtc_UWord32 offset;
+} rtpDumpPktHdr_t;
+
+RtpDump* RtpDump::CreateRtpDump()
+{
+    return new RtpDumpImpl();
+}
+
+void RtpDump::DestroyRtpDump(RtpDump* object)
+{
+    delete object;
+}
+
+RtpDumpImpl::RtpDumpImpl()
+    : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _file(*FileWrapper::Create()),
+      _startTime(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+RtpDump::~RtpDump()
+{
+}
+
+RtpDumpImpl::~RtpDumpImpl()
+{
+    _file.Flush();
+    _file.CloseFile();
+    delete &_file;
+    delete _critSect;
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 RtpDumpImpl::Start(const char* fileNameUTF8)
+{
+
+    if (fileNameUTF8 == NULL)
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+    _file.Flush();
+    _file.CloseFile();
+    if (_file.OpenFile(fileNameUTF8, false, false, false) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "failed to open the specified file");
+        return -1;
+    }
+
+    // Store start of RTP dump (to be used for offset calculation later).
+    _startTime = GetTimeInMS();
+
+    // All rtp dump files start with #!rtpplay.
+    char magic[16];
+    sprintf(magic, "#!rtpplay%s \n", RTPFILE_VERSION);
+    if (_file.WriteText(magic) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+
+    // The header according to the rtpdump documentation is sizeof(RD_hdr_t)
+    // which is 8 + 4 + 2 = 14 bytes for 32-bit architecture (and 22 bytes on
+    // 64-bit architecture). However, Wireshark use 16 bytes for the header
+    // regardless of if the binary is 32-bit or 64-bit. Go by the same approach
+    // as Wireshark since it makes more sense.
+    // http://wiki.wireshark.org/rtpdump explains that an additional 2 bytes
+    // of padding should be added to the header.
+    char dummyHdr[16];
+    memset(dummyHdr, 0, 16);
+    if (!_file.Write(dummyHdr, sizeof(dummyHdr)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RtpDumpImpl::Stop()
+{
+    CriticalSectionScoped lock(_critSect);
+    _file.Flush();
+    _file.CloseFile();
+    return 0;
+}
+
+bool RtpDumpImpl::IsActive() const
+{
+    CriticalSectionScoped lock(_critSect);
+    return _file.Open();
+}
+
+WebRtc_Word32 RtpDumpImpl::DumpPacket(const WebRtc_UWord8* packet,
+                                      WebRtc_UWord16 packetLength)
+{
+    CriticalSectionScoped lock(_critSect);
+    if (!IsActive())
+    {
+        return 0;
+    }
+
+    if (packet == NULL)
+    {
+        return -1;
+    }
+
+    if (packetLength < 1)
+    {
+        return -1;
+    }
+
+    // If the packet doesn't contain a valid RTCP header the packet will be
+    // considered RTP (without further verification).
+    bool isRTCP = RTCP(packet);
+
+    rtpDumpPktHdr_t hdr;
+    WebRtc_UWord32 offset;
+
+    // Offset is relative to when recording was started.
+    offset = GetTimeInMS();
+    if (offset < _startTime)
+    {
+        // Compensate for wraparound.
+        offset += MAX_UWORD32 - _startTime + 1;
+    } else {
+        offset -= _startTime;
+    }
+    hdr.offset = RtpDumpHtonl(offset);
+
+    hdr.length = RtpDumpHtons((WebRtc_UWord16)(packetLength + sizeof(hdr)));
+    if (isRTCP)
+    {
+        hdr.plen = 0;
+    }
+    else
+    {
+        hdr.plen = RtpDumpHtons((WebRtc_UWord16)packetLength);
+    }
+
+    if (!_file.Write(&hdr, sizeof(hdr)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+    if (!_file.Write(packet, packetLength))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+
+    return 0;
+}
+
+bool RtpDumpImpl::RTCP(const WebRtc_UWord8* packet) const
+{
+    const WebRtc_UWord8 payloadType = packet[1];
+    bool is_rtcp = false;
+
+    switch(payloadType)
+    {
+    case 192:
+        is_rtcp = true;
+        break;
+    case 193: case 195:
+        break;
+    case 200: case 201: case 202: case 203:
+    case 204: case 205: case 206: case 207:
+        is_rtcp = true;
+        break;
+    }
+    return is_rtcp;
+}
+
+// TODO (hellner): why is TickUtil not used here?
+inline WebRtc_UWord32 RtpDumpImpl::GetTimeInMS() const
+{
+#if defined(_WIN32)
+    return timeGetTime();
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    struct timeval tv;
+    struct timezone tz;
+    unsigned long val;
+
+    gettimeofday(&tv, &tz);
+    val = tv.tv_sec * 1000 + tv.tv_usec / 1000;
+    return val;
+#else
+    #error Either _WIN32 or LINUX or WEBRTC_MAC has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+
+inline WebRtc_UWord32 RtpDumpImpl::RtpDumpHtonl(WebRtc_UWord32 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+    return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+    return (x >> 24) + ((((x >> 16) & 0xFF) << 8) + ((((x >> 8) & 0xFF) << 16) +
+                                                     ((x & 0xFF) << 24)));
+#else
+#error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+
+inline WebRtc_UWord16 RtpDumpImpl::RtpDumpHtons(WebRtc_UWord16 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+    return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+    return (x >> 8) + ((x & 0xFF) << 8);
+#else
+    #error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/utility/source/rtp_dump_impl.h b/trunk/src/modules/utility/source/rtp_dump_impl.h
new file mode 100644
index 0000000..9715c35
--- /dev/null
+++ b/trunk/src/modules/utility/source/rtp_dump_impl.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+
+#include "rtp_dump.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class FileWrapper;
+class RtpDumpImpl : public RtpDump
+{
+public:
+    RtpDumpImpl();
+    virtual ~RtpDumpImpl();
+
+    virtual WebRtc_Word32 Start(const char* fileNameUTF8);
+    virtual WebRtc_Word32 Stop();
+    virtual bool IsActive() const;
+    virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
+                                     WebRtc_UWord16 packetLength);
+private:
+    // Return the system time in ms.
+    inline WebRtc_UWord32 GetTimeInMS() const;
+    // Return x in network byte order (big endian).
+    inline WebRtc_UWord32 RtpDumpHtonl(WebRtc_UWord32 x) const;
+    // Return x in network byte order (big endian).
+    inline WebRtc_UWord16 RtpDumpHtons(WebRtc_UWord16 x) const;
+
+    // Return true if the packet starts with a valid RTCP header.
+    // Note: See ModuleRTPUtility::RTPHeaderParser::RTCP() for details on how
+    //       to determine if the packet is an RTCP packet.
+    bool RTCP(const WebRtc_UWord8* packet) const;
+
+private:
+    CriticalSectionWrapper* _critSect;
+    FileWrapper& _file;
+    WebRtc_UWord32 _startTime;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
diff --git a/trunk/src/modules/utility/source/utility.gypi b/trunk/src/modules/utility/source/utility.gypi
new file mode 100644
index 0000000..7dcce8e
--- /dev/null
+++ b/trunk/src/modules/utility/source/utility.gypi
@@ -0,0 +1,92 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_utility',
+      'type': '<(library)',
+      'dependencies': [
+        'audio_coding_module',
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+        '../../media_file/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+          '../../audio_coding/main/interface',
+        ],
+      },
+      'sources': [
+        '../interface/file_player.h',
+        '../interface/file_recorder.h',
+        '../interface/process_thread.h',
+        '../interface/rtp_dump.h',
+        'coder.cc',
+        'coder.h',
+        'file_player_impl.cc',
+        'file_player_impl.h',
+        'file_recorder_impl.cc',
+        'file_recorder_impl.h',
+        'process_thread_impl.cc',
+        'process_thread_impl.h',
+        'rtp_dump_impl.cc',
+        'rtp_dump_impl.h',
+      ],
+      'conditions': [
+        ['enable_video==1', {
+          # Adds support for video recording.
+          'defines': [
+            'WEBRTC_MODULE_UTILITY_VIDEO',
+          ],
+          'dependencies': [
+            'webrtc_video_coding',
+          ],
+          'include_dirs': [
+            '../../video_coding/main/interface',
+          ],
+          'sources': [
+            'frame_scaler.cc',
+            'video_coder.cc',
+            'video_frames_queue.cc',
+          ],
+        }],
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'webrtc_utility_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'webrtc_utility',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'file_player_unittest.cc',
+          ],
+        }, # webrtc_utility_unittests
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/utility/source/video_coder.cc b/trunk/src/modules/utility/source/video_coder.cc
new file mode 100644
index 0000000..e17c3e0
--- /dev/null
+++ b/trunk/src/modules/utility/source/video_coder.cc
@@ -0,0 +1,152 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "video_coder.h"
+
+namespace webrtc {
+VideoCoder::VideoCoder(WebRtc_UWord32 instanceID)
+    : _instanceID( instanceID),
+      _vcm(VideoCodingModule::Create(instanceID)),
+      _decodedVideo(0)
+{
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+
+    _vcm->RegisterTransportCallback(this);
+    _vcm->RegisterReceiveCallback(this);
+}
+
+VideoCoder::~VideoCoder()
+{
+    VideoCodingModule::Destroy(_vcm);
+}
+
+WebRtc_Word32 VideoCoder::ResetDecoder()
+{
+    _vcm->ResetDecoder();
+
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+
+    _vcm->RegisterTransportCallback(this);
+    _vcm->RegisterReceiveCallback(this);
+    return 0;
+}
+
+WebRtc_Word32 VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
+                                         WebRtc_UWord32 numberOfCores,
+                                         WebRtc_UWord32 maxPayloadSize)
+{
+    if(_vcm->RegisterSendCodec(&videoCodecInst, numberOfCores,
+                               maxPayloadSize) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
+                                         WebRtc_Word32 numberOfCores)
+{
+    if (videoCodecInst.plType == 0)
+    {
+        WebRtc_Word8 plType = DefaultPayloadType(videoCodecInst.plName);
+        if (plType == -1)
+        {
+            return -1;
+        }
+        videoCodecInst.plType = plType;
+    }
+
+    if(_vcm->RegisterReceiveCodec(&videoCodecInst, numberOfCores) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
+                                 const EncodedVideoData& encodedData)
+{
+    decodedVideo.SetLength(0);
+    if(encodedData.payloadSize <= 0)
+    {
+        return -1;
+    }
+
+    _decodedVideo = &decodedVideo;
+    if(_vcm->DecodeFromStorage(encodedData) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::Encode(const VideoFrame& videoFrame,
+                                 EncodedVideoData& videoEncodedData)
+{
+    // The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
+    // pointer to videoFrame so that it can be updated.
+    _videoEncodedData = &videoEncodedData;
+    videoEncodedData.payloadSize = 0;
+    if(_vcm->AddVideoFrame(videoFrame) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word8 VideoCoder::DefaultPayloadType(const char* plName)
+{
+    VideoCodec tmpCodec;
+    WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
+    for (WebRtc_UWord8 i = 0; i < numberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &tmpCodec);
+        if(strncmp(tmpCodec.plName, plName, kPayloadNameSize) == 0)
+        {
+            return tmpCodec.plType;
+        }
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame)
+{
+    return _decodedVideo->CopyFrame(videoFrame);
+}
+
+WebRtc_Word32 VideoCoder::SendData(
+    FrameType frameType,
+    WebRtc_UWord8  payloadType,
+    WebRtc_UWord32 timeStamp,
+    const WebRtc_UWord8* payloadData,
+    WebRtc_UWord32 payloadSize,
+    const RTPFragmentationHeader& fragmentationHeader,
+    const RTPVideoHeader* /*rtpVideoHdr*/)
+{
+    // Store the data in _videoEncodedData which is a pointer to videoFrame in
+    // Encode(..)
+    _videoEncodedData->VerifyAndAllocate(payloadSize);
+    _videoEncodedData->frameType = frameType;
+    _videoEncodedData->payloadType = payloadType;
+    _videoEncodedData->timeStamp = timeStamp;
+    _videoEncodedData->fragmentationHeader = fragmentationHeader;
+    memcpy(_videoEncodedData->payloadData, payloadData,
+           sizeof(WebRtc_UWord8) * payloadSize);
+    _videoEncodedData->payloadSize = payloadSize;
+    return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/trunk/src/modules/utility/source/video_coder.h b/trunk/src/modules/utility/source/video_coder.h
new file mode 100644
index 0000000..5c4b0ae
--- /dev/null
+++ b/trunk/src/modules/utility/source/video_coder.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "video_coding.h"
+
+namespace webrtc {
+class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
+{
+public:
+    VideoCoder(WebRtc_UWord32 instanceID);
+    ~VideoCoder();
+
+    WebRtc_Word32 ResetDecoder();
+
+    WebRtc_Word32 SetEncodeCodec(VideoCodec& videoCodecInst,
+                                 WebRtc_UWord32 numberOfCores,
+                                 WebRtc_UWord32 maxPayloadSize);
+
+
+    // Select the codec that should be used for decoding. videoCodecInst.plType
+    // will be set to the codec's default payload type.
+    WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
+                                 WebRtc_Word32 numberOfCores);
+
+    WebRtc_Word32 Decode(VideoFrame& decodedVideo,
+                         const EncodedVideoData& encodedData);
+
+    WebRtc_Word32 Encode(const VideoFrame& videoFrame,
+                         EncodedVideoData& videoEncodedData);
+
+    WebRtc_Word8 DefaultPayloadType(const char* plName);
+
+private:
+    // VCMReceiveCallback function.
+    // Note: called by VideoCodingModule when decoding finished.
+    WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
+
+    // VCMPacketizationCallback function.
+    // Note: called by VideoCodingModule when encoding finished.
+    WebRtc_Word32 SendData(
+        const FrameType /*frameType*/,
+        const WebRtc_UWord8 /*payloadType*/,
+        const WebRtc_UWord32 /*timeStamp*/,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /* fragmentationHeader*/,
+        const RTPVideoHeader* rtpTypeHdr);
+
+    WebRtc_UWord32 _instanceID;
+    VideoCodingModule* _vcm;
+    VideoFrame* _decodedVideo;
+    EncodedVideoData* _videoEncodedData;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
diff --git a/trunk/src/modules/utility/source/video_frames_queue.cc b/trunk/src/modules/utility/source/video_frames_queue.cc
new file mode 100644
index 0000000..ab590c4
--- /dev/null
+++ b/trunk/src/modules/utility/source/video_frames_queue.cc
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_frames_queue.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include <cassert>
+
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "trace.h"
+
+namespace webrtc {
+VideoFramesQueue::VideoFramesQueue()
+    : _incomingFrames(),
+      _renderDelayMs(10)
+{
+}
+
+VideoFramesQueue::~VideoFramesQueue()
+{
+    while (!_incomingFrames.Empty())
+    {
+        ListItem* item = _incomingFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _incomingFrames.Erase(item);
+    }
+    while (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _emptyFrames.Erase(item);
+    }
+}
+
+WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
+{
+    VideoFrame* ptrFrameToAdd = NULL;
+    // Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
+    if (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            ptrFrameToAdd = static_cast<VideoFrame*>(item->GetItem());
+            _emptyFrames.Erase(item);
+        }
+    }
+    if (!ptrFrameToAdd)
+    {
+        if (_emptyFrames.GetSize() + _incomingFrames.GetSize() >
+            KMaxNumberOfFrames)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                         "%s: too many frames, limit: %d", __FUNCTION__,
+                         KMaxNumberOfFrames);
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
+                     "%s: allocating buffer %d", __FUNCTION__,
+                     _emptyFrames.GetSize() + _incomingFrames.GetSize());
+
+        ptrFrameToAdd = new VideoFrame();
+        if (!ptrFrameToAdd)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                         "%s: could not create new frame for", __FUNCTION__);
+            return -1;
+        }
+    }
+    ptrFrameToAdd->CopyFrame(newFrame);
+    _incomingFrames.PushBack(ptrFrameToAdd);
+    return 0;
+}
+
+// Find the most recent frame that has a VideoFrame::RenderTimeMs() that is
+// lower than current time in ms (TickTime::MillisecondTimestamp()).
+// Note _incomingFrames is sorted so that the oldest frame is first.
+// Recycle all frames that are older than the most recent frame.
+VideoFrame* VideoFramesQueue::FrameToRecord()
+{
+    VideoFrame* ptrRenderFrame = NULL;
+    ListItem* item = _incomingFrames.First();
+    while(item)
+    {
+        VideoFrame* ptrOldestFrameInList =
+            static_cast<VideoFrame*>(item->GetItem());
+        if (ptrOldestFrameInList->RenderTimeMs() <=
+            TickTime::MillisecondTimestamp() + _renderDelayMs)
+        {
+            if (ptrRenderFrame)
+            {
+                // List is traversed beginning to end. If ptrRenderFrame is not
+                // NULL it must be the first, and thus oldest, VideoFrame in the
+                // queue. It can be recycled.
+                ReturnFrame(ptrRenderFrame);
+                _incomingFrames.PopFront();
+            }
+            item = _incomingFrames.Next(item);
+            ptrRenderFrame = ptrOldestFrameInList;
+        }else
+        {
+            // All VideoFrames following this one will be even newer. No match
+            // will be found.
+            break;
+        }
+    }
+    return ptrRenderFrame;
+}
+
+WebRtc_Word32 VideoFramesQueue::ReturnFrame(VideoFrame* ptrOldFrame)
+{
+    ptrOldFrame->SetTimeStamp(0);
+    ptrOldFrame->SetWidth(0);
+    ptrOldFrame->SetHeight(0);
+    ptrOldFrame->SetRenderTime(0);
+    ptrOldFrame->SetLength(0);
+    _emptyFrames.PushBack(ptrOldFrame);
+    return 0;
+}
+
+//
+WebRtc_Word32 VideoFramesQueue::SetRenderDelay(WebRtc_UWord32 renderDelay)
+{
+     _renderDelayMs = renderDelay;
+     return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/trunk/src/modules/utility/source/video_frames_queue.h b/trunk/src/modules/utility/source/video_frames_queue.h
new file mode 100644
index 0000000..6c9be1c
--- /dev/null
+++ b/trunk/src/modules/utility/source/video_frames_queue.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "list_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoFrame;
+
+class VideoFramesQueue
+{
+public:
+    VideoFramesQueue();
+    ~VideoFramesQueue();
+
+    // Put newFrame (last) in the queue.
+    WebRtc_Word32 AddFrame(const VideoFrame& newFrame);
+
+    // Return the most current frame. I.e. the frame with the highest
+    // VideoFrame::RenderTimeMs() that is lower than
+    // TickTime::MillisecondTimestamp().
+    VideoFrame* FrameToRecord();
+
+    // Set the render delay estimate to renderDelay ms.
+    WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay);
+
+protected:
+    // Make ptrOldFrame available for re-use. I.e. put it in the empty frames
+    // queue.
+    WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
+
+private:
+    // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
+    // 300 frames correspond to 10 seconds worth of frames at 30 fps.
+    enum {KMaxNumberOfFrames = 300};
+
+    // List of VideoFrame pointers. The list is sorted in the order of when the
+    // VideoFrame was inserted into the list. The first VideoFrame in the list
+    // was inserted first.
+    ListWrapper    _incomingFrames;
+    // A list of frames that are free to be re-used.
+    ListWrapper    _emptyFrames;
+
+    // Estimated render delay.
+    WebRtc_UWord32 _renderDelayMs;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif  // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
diff --git a/trunk/src/modules/utility/test/testAPI.cc b/trunk/src/modules/utility/test/testAPI.cc
new file mode 100644
index 0000000..96664a3
--- /dev/null
+++ b/trunk/src/modules/utility/test/testAPI.cc
@@ -0,0 +1,368 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// testAPI.cpp : Defines the entry point for the console application.
+//
+// NOTES:
+//          1. MediaFile library and testAPI.cpp must be built in DEBUG mode for testing.
+//
+
+#include <iostream>
+#include <stdio.h>
+#include <assert.h>
+
+#ifdef WIN32
+    #include <windows.h>
+    #include <tchar.h>
+#endif
+
+#include "common_types.h"
+#include "trace.h"
+
+#include "Engineconfigurations.h"
+#include "media_file.h"
+#include "file_player.h"
+#include "file_recorder.h"
+
+
+bool notify = false, playing = false, recording = false;
+
+// callback class for FileModule
+class MyFileModuleCallback : public FileCallback
+{
+public:
+    virtual void PlayNotification( const WebRtc_Word32 id,
+                                   const WebRtc_UWord32 durationMs )
+    {
+        printf("\tReceived PlayNotification from module %ld, durationMs = %ld\n",
+               id, durationMs);
+        notify = true;
+    };
+
+    virtual void RecordNotification( const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs )
+    {
+        printf("\tReceived RecordNotification from module %ld, durationMs = %ld\n",
+               id, durationMs);
+        notify = true;
+    };
+
+    virtual void PlayFileEnded(const WebRtc_Word32 id)
+    {
+        printf("\tReceived PlayFileEnded notification from module %ld.\n", id);
+        playing = false;
+    };
+
+    virtual void RecordFileEnded(const WebRtc_Word32 id)
+    {
+        printf("\tReceived RecordFileEnded notification from module %ld.\n", id);
+        recording = false;
+    }
+};
+
+// main test app
+#ifdef WIN32
+int _tmain(int argc, _TCHAR* argv[])
+#else
+int main(int /*argc*/, char** /*argv*/)
+#endif
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile("testTrace.txt");
+    Trace::SetEncryptedTraceFile("testTraceDebug.txt");
+
+    int playId = 1;
+    int recordId = 2;
+
+    printf("Welcome to test of FilePlayer and FileRecorder\n");
+
+
+    ///////////////////////////////////////////////
+    //
+    // avi test case 1
+    //
+    ///////////////////////////////////////////////
+
+
+    // todo PW we need more AVI tests Mp4
+
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(1, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(1, webrtc::kFileFormatAviFile));
+
+        const char* KFileName = "./tmpAviFileTestCase1_audioI420CIF30fps.avi";
+
+        printf("\tReading from an avi file and writing the information to another \n");
+        printf("\tin the same format (I420 CIF 30fps) \n");
+        printf("\t\t check file named %s\n", KFileName);
+
+        assert(filePlayer.StartPlayingVideoFile(
+           "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi",
+           false, false) == 0);
+
+        // init codecs
+         webrtc::VideoCodec videoCodec;
+        webrtc::VideoCodec recVideoCodec;
+        webrtc::CodecInst audioCodec;
+        assert(filePlayer.VideoCodec( videoCodec ) == 0);
+        assert(filePlayer.AudioCodec( audioCodec) == 0);
+
+        recVideoCodec = videoCodec;
+
+        assert( fileRecorder.StartRecordingVideoFile(KFileName,
+                                                     audioCodec,
+                                                     recVideoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
+
+        webrtc::VideoFrame videoFrame;
+        videoFrame.VerifyAndAllocate(videoReadSize);
+
+        int  frameCount   = 0;
+        bool audioNotDone = true;
+        bool videoNotDone =    true;
+        AudioFrame audioFrame;
+
+        while( audioNotDone || videoNotDone)
+        {
+            if(filePlayer.TimeUntilNextVideoFrame() <= 0)
+            {
+                if(filePlayer.GetVideoFromFile( videoFrame) != 0)
+                {
+                    // no more video frames
+                    break;
+                }
+                frameCount++;
+                videoNotDone = ( videoFrame.Length() > 0);
+                videoFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+                if( videoNotDone)
+                {
+                    assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+                    ::Sleep(10);
+                }
+            }
+             WebRtc_UWord32 decodedDataLengthInSamples;
+            if( 0 !=  filePlayer.Get10msAudioFromFile( audioFrame._payloadData, decodedDataLengthInSamples, audioCodec.plfreq))
+            {
+                audioNotDone = false;
+            } else
+            {
+                audioFrame._frequencyInHz = filePlayer.Frequency();
+                audioFrame._payloadDataLengthInSamples = (WebRtc_UWord16)decodedDataLengthInSamples;
+                fileRecorder.RecordAudioToFile(audioFrame, &TickTime::Now());
+            }
+       }
+        ::Sleep(100);
+        assert(fileRecorder.StopRecording() == 0);
+        assert( !fileRecorder.IsRecording());
+        assert(frameCount == 135);
+        printf("\tGenerated %s\n\n", KFileName);
+    }
+    ///////////////////////////////////////////////
+    //
+    // avi test case 2
+    //
+    ///////////////////////////////////////////////
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(2, webrtc::kFileFormatAviFile));
+
+        const char* KFileName = "./tmpAviFileTestCase2_audioI420CIF20fps.avi";
+
+        printf("\tWriting information to a avi file and check the written file by \n");
+        printf("\treopening it and control codec information.\n");
+        printf("\t\t check file named %s all frames should be light green.\n", KFileName);
+        // init codecs
+        webrtc::VideoCodec videoCodec;
+        webrtc::CodecInst      audioCodec;
+
+        memset(&videoCodec, 0, sizeof(videoCodec));
+
+        const char* KVideoCodecName = "I420";
+        strcpy(videoCodec.plName, KVideoCodecName);
+        videoCodec.plType    = 124;
+        videoCodec.maxFramerate = 20;
+        videoCodec.height    = 288;
+        videoCodec.width     = 352;
+
+        const char* KAudioCodecName = "PCMU";
+        strcpy(audioCodec.plname, KAudioCodecName);
+        audioCodec.pltype   = 0;
+        audioCodec.plfreq   = 8000;
+        audioCodec.pacsize  = 80;
+        audioCodec.channels = 1;
+        audioCodec.rate     = 64000;
+
+        assert( fileRecorder.StartRecordingVideoFile(
+            KFileName,
+            audioCodec,
+            videoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        const WebRtc_UWord32 KVideoWriteSize = static_cast< WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3) / 2);
+        webrtc::VideoFrame videoFrame;
+
+        // 10 ms
+        AudioFrame audioFrame;
+        audioFrame._payloadDataLengthInSamples = audioCodec.plfreq/100;
+        memset(audioFrame._payloadData, 0, 2*audioFrame._payloadDataLengthInSamples);
+        audioFrame._frequencyInHz = 8000;
+
+        // prepare the video frame
+        videoFrame.VerifyAndAllocate(KVideoWriteSize);
+        memset(videoFrame.Buffer(), 127, videoCodec.width * videoCodec.height);
+        memset(videoFrame.Buffer() +(videoCodec.width * videoCodec.height), 0, videoCodec.width * videoCodec.height/2);
+        videoFrame.SetLength(KVideoWriteSize);
+        videoFrame.SetHeight(videoCodec.height);
+        videoFrame.SetWidth(videoCodec.width);
+
+        // write avi file, with 20 video frames
+        const int KWriteNumFrames = 20;
+        int       writeFrameCount = 0;
+        while(writeFrameCount < KWriteNumFrames)
+        {
+            // add a video frame
+            assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+
+            // add 50 ms of audio
+            for(int i=0; i<5; i++)
+            {
+                assert( fileRecorder.RecordAudioToFile(audioFrame) == 0);
+            }// for i
+            writeFrameCount++;
+        }
+        ::Sleep(10); // enough tim eto write the queued data to the file
+        assert(writeFrameCount == 20);
+        assert(fileRecorder.StopRecording() == 0);
+        assert( ! fileRecorder.IsRecording());
+
+        assert(filePlayer.StartPlayingVideoFile(KFileName,false, false) == 0);
+        assert(filePlayer.IsPlayingFile( ));
+
+        // compare codecs read from file to the ones used when writing the file
+        webrtc::VideoCodec readVideoCodec;
+        assert(filePlayer.VideoCodec( readVideoCodec ) == 0);
+        assert(strcmp(readVideoCodec.plName, videoCodec.plName) == 0);
+        assert(readVideoCodec.width      == videoCodec.width);
+        assert(readVideoCodec.height     == videoCodec.height);
+        assert(readVideoCodec.maxFramerate  == videoCodec.maxFramerate);
+
+        webrtc::CodecInst readAudioCodec;
+        assert(filePlayer.AudioCodec( readAudioCodec) == 0);
+        assert(strcmp(readAudioCodec.plname, audioCodec.plname) == 0);
+        assert(readAudioCodec.pltype     == audioCodec.pltype);
+        assert(readAudioCodec.plfreq     == audioCodec.plfreq);
+        assert(readAudioCodec.pacsize    == audioCodec.pacsize);
+        assert(readAudioCodec.channels   == audioCodec.channels);
+        assert(readAudioCodec.rate       == audioCodec.rate);
+
+        assert(filePlayer.StopPlayingFile() == 0);
+        assert( ! filePlayer.IsPlayingFile());
+        printf("\tGenerated %s\n\n", KFileName);
+    }
+    ///////////////////////////////////////////////
+    //
+    // avi test case 3
+    //
+    ///////////////////////////////////////////////
+
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(3, webrtc::kFileFormatAviFile));
+
+        printf("\tReading from an avi file and writing the information to another \n");
+        printf("\tin a different format (H.263 CIF 30fps) \n");
+        printf("\t\t check file named tmpAviFileTestCase1_audioH263CIF30fps.avi\n");
+
+        assert(filePlayer.StartPlayingVideoFile(
+           "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi",
+           false,
+           false) == 0);
+
+        // init codecs
+         webrtc::VideoCodec videoCodec;
+        webrtc::VideoCodec recVideoCodec;
+        webrtc::CodecInst      audioCodec;
+        assert(filePlayer.VideoCodec( videoCodec ) == 0);
+        assert(filePlayer.AudioCodec( audioCodec) == 0);
+        recVideoCodec = videoCodec;
+
+        memcpy(recVideoCodec.plName, "H263",5);
+        recVideoCodec.startBitrate = 1000;
+        recVideoCodec.codecSpecific.H263.quality = 1;
+        recVideoCodec.plType = 34;
+        recVideoCodec.codecType = webrtc::kVideoCodecH263;
+
+        assert( fileRecorder.StartRecordingVideoFile(
+            "./tmpAviFileTestCase1_audioH263CIF30fps.avi",
+            audioCodec,
+            recVideoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
+
+        webrtc::VideoFrame videoFrame;
+        videoFrame.VerifyAndAllocate(videoReadSize);
+
+        int  videoFrameCount   = 0;
+        int  audioFrameCount   = 0;
+        bool audioNotDone = true;
+        bool videoNotDone =    true;
+        AudioFrame audioFrame;
+
+        while( audioNotDone || videoNotDone)
+        {
+            if(filePlayer.TimeUntilNextVideoFrame() <= 0)
+            {
+                if(filePlayer.GetVideoFromFile( videoFrame) != 0)
+                {
+                    break;
+                }
+                videoFrameCount++;
+                videoNotDone = ( videoFrame.Length() > 0);
+                if( videoNotDone)
+                {
+                    assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+                }
+            }
+
+            WebRtc_UWord32 decodedDataLengthInSamples;
+            if( 0 != filePlayer.Get10msAudioFromFile( audioFrame._payloadData, decodedDataLengthInSamples, audioCodec.plfreq))
+            {
+                audioNotDone = false;
+
+            } else
+            {
+                ::Sleep(5);
+                audioFrame._frequencyInHz = filePlayer.Frequency();
+                audioFrame._payloadDataLengthInSamples = (WebRtc_UWord16)decodedDataLengthInSamples;
+                assert(0 == fileRecorder.RecordAudioToFile(audioFrame));
+
+                audioFrameCount++;
+            }
+        }
+        assert(videoFrameCount == 135);
+        assert(audioFrameCount == 446); // we will start & stop with a video frame
+
+        assert(fileRecorder.StopRecording() == 0);
+        assert( !fileRecorder.IsRecording());
+        printf("\tGenerated ./tmpAviFileTestCase1_audioH263CIF30fps.avi\n\n");
+    }
+
+
+    printf("\nTEST completed.\n");
+
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/trunk/src/modules/video_capture/OWNERS b/trunk/src/modules/video_capture/OWNERS
new file mode 100644
index 0000000..9034747
--- /dev/null
+++ b/trunk/src/modules/video_capture/OWNERS
@@ -0,0 +1,4 @@
+mallinath@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/modules/video_capture/main/interface/video_capture.h b/trunk/src/modules/video_capture/main/interface/video_capture.h
new file mode 100644
index 0000000..e2f2f23
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/interface/video_capture.h
@@ -0,0 +1,160 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
+
+#include "module.h"
+#include "video_capture_defines.h"
+
+namespace webrtc {
+
+class VideoCaptureModule: public RefCountedModule {
+ public:
+  // Interface for receiving information about available camera devices.
+  class DeviceInfo {
+   public:
+    virtual WebRtc_UWord32 NumberOfDevices() = 0;
+
+    // Returns the available capture devices.
+    // deviceNumber   - Index of capture device.
+    // deviceNameUTF8 - Friendly name of the capture device.
+    // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
+    //                      Otherwise same as deviceNameUTF8.
+    // productUniqueIdUTF8 - Unique product id if it exist.
+    //                       Null terminated otherwise.
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber,
+        WebRtc_UWord8* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength,
+        WebRtc_UWord8* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        WebRtc_UWord8* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0) = 0;
+
+
+    // Returns the number of capabilities this device.
+    virtual WebRtc_Word32 NumberOfCapabilities(
+        const WebRtc_UWord8* deviceUniqueIdUTF8) = 0;
+
+    // Gets the capabilities of the named device.
+    virtual WebRtc_Word32 GetCapability(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability) = 0;
+
+    // Gets clockwise angle the captured frames should be rotated in order
+    // to be displayed correctly on a normally rotated display.
+    virtual WebRtc_Word32 GetOrientation(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        VideoCaptureRotation& orientation) = 0;
+
+    // Gets the capability that best matches the requested width, height and
+    // frame rate.
+    // Returns the deviceCapabilityNumber on success.
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const WebRtc_UWord8*deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting) = 0;
+
+     // Display OS /capture device specific settings dialog
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord8* dialogTitleUTF8,
+        void* parentWindow,
+        WebRtc_UWord32 positionX,
+        WebRtc_UWord32 positionY) = 0;
+
+    virtual ~DeviceInfo() {}
+  };
+
+  class VideoCaptureEncodeInterface {
+   public:
+    virtual WebRtc_Word32 ConfigureEncoder(const VideoCodec& codec,
+                                           WebRtc_UWord32 maxPayloadSize) = 0;
+    // Inform the encoder about the new target bit rate.
+    //  - newBitRate       : New target bit rate in Kbit/s.
+    //  - frameRate        : The target frame rate.
+    virtual WebRtc_Word32 SetRates(WebRtc_Word32 newBitRate,
+                                   WebRtc_Word32 frameRate) = 0;
+    // Inform the encoder about the packet loss and the round-trip time.
+    //   - packetLoss   : Fraction lost
+    //                    (loss rate in percent = 100 * packetLoss / 255).
+    //   - rtt          : Round-trip time in milliseconds.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt) = 0;
+
+    // Encode the next frame as key frame.
+    virtual WebRtc_Word32 EncodeFrameType(const FrameType type) = 0;
+  protected:
+    virtual ~VideoCaptureEncodeInterface() {
+    }
+  };
+
+  //   Register capture data callback
+  virtual WebRtc_Word32 RegisterCaptureDataCallback(
+      VideoCaptureDataCallback& dataCallback) = 0;
+
+  //  Remove capture data callback
+  virtual WebRtc_Word32 DeRegisterCaptureDataCallback() = 0;
+
+  // Register capture callback.
+  virtual WebRtc_Word32 RegisterCaptureCallback(
+      VideoCaptureFeedBack& callBack) = 0;
+
+  //  Remove capture callback.
+  virtual WebRtc_Word32 DeRegisterCaptureCallback() = 0;
+
+  // Start capture device
+  virtual WebRtc_Word32 StartCapture(
+      const VideoCaptureCapability& capability) = 0;
+
+  virtual WebRtc_Word32 StopCapture() = 0;
+
+  // Send an image when the capture device is not running.
+  virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
+                                       WebRtc_Word32 frameRate = 1) = 0;
+
+  virtual WebRtc_Word32 StopSendImage() = 0;
+
+  // Returns the name of the device used by this module.
+  virtual const WebRtc_UWord8* CurrentDeviceName() const = 0;
+
+  // Returns true if the capture device is running
+  virtual bool CaptureStarted() = 0;
+
+  // Gets the current configuration.
+  virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings) = 0;
+
+  virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS) = 0;
+
+  // Returns the current CaptureDelay. Only valid when the camera is running.
+  virtual WebRtc_Word32 CaptureDelay() = 0;
+
+  // Set the rotation of the captured frames.
+  // If the rotation is set to the same as returned by
+  // DeviceInfo::GetOrientation the captured frames are
+  // displayed correctly if rendered.
+  virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation) = 0;
+
+  // Gets a pointer to an encode interface if the capture device supports the
+  // requested type and size.  NULL otherwise.
+  virtual VideoCaptureEncodeInterface* GetEncodeInterface(
+      const VideoCodec& codec) = 0;
+
+  virtual WebRtc_Word32 EnableFrameRateCallback(const bool enable) = 0;
+  virtual WebRtc_Word32 EnableNoPictureAlarm(const bool enable) = 0;
+
+protected:
+  virtual ~VideoCaptureModule() {};
+};
+
+} // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
diff --git a/trunk/src/modules/video_capture/main/interface/video_capture_defines.h b/trunk/src/modules/video_capture/main/interface/video_capture_defines.h
new file mode 100644
index 0000000..2a3408b
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/interface/video_capture_defines.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
+
+// Includes
+#include "typedefs.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+    #define NULL    0
+#endif
+
+enum {kVideoCaptureUniqueNameLength =1024}; //Max unique capture device name lenght
+enum {kVideoCaptureDeviceNameLength =256}; //Max capture device name lenght
+enum {kVideoCaptureProductIdLength =128}; //Max product id length
+
+// Enums
+enum VideoCaptureRotation
+{
+    kCameraRotate0 = 0,
+    kCameraRotate90 = 5,
+    kCameraRotate180 = 10,
+    kCameraRotate270 = 15
+};
+
+struct VideoCaptureCapability
+{
+    WebRtc_Word32 width;
+    WebRtc_Word32 height;
+    WebRtc_Word32 maxFPS;
+    WebRtc_Word32 expectedCaptureDelay;
+    RawVideoType rawType;
+    VideoCodecType codecType;
+    bool interlaced;
+
+    VideoCaptureCapability()
+    {
+        width = 0;
+        height = 0;
+        maxFPS = 0;
+        expectedCaptureDelay = 0;
+        rawType = kVideoUnknown;
+        codecType = kVideoCodecUnknown;
+        interlaced = false;
+    }
+    ;
+    bool operator!=(const VideoCaptureCapability &other) const
+    {
+        if (width != other.width)
+            return true;
+        if (height != other.height)
+            return true;
+        if (maxFPS != other.maxFPS)
+            return true;
+        if (rawType != other.rawType)
+            return true;
+        if (codecType != other.codecType)
+            return true;
+        if (interlaced != other.interlaced)
+            return true;
+        return false;
+    }
+    bool operator==(const VideoCaptureCapability &other) const
+    {
+        return !operator!=(other);
+    }
+};
+
+enum VideoCaptureAlarm
+{
+    Raised = 0,
+    Cleared = 1
+};
+
+// VideoFrameI420 doesn't take the ownership of the buffer.
+// It's mostly used to group the parameters for external capture.
+struct VideoFrameI420
+{
+  VideoFrameI420() {
+    y_plane = NULL;
+    u_plane = NULL;
+    v_plane = NULL;
+    y_pitch = 0;
+    u_pitch = 0;
+    v_pitch = 0;
+    width = 0;
+    height = 0;
+  }
+
+  unsigned char* y_plane;
+  unsigned char* u_plane;
+  unsigned char* v_plane;
+
+  int y_pitch;
+  int u_pitch;
+  int v_pitch;
+
+  unsigned short width;
+  unsigned short height;
+};
+
+/* External Capture interface. Returned by Create
+ and implemented by the capture module.
+ */
+class VideoCaptureExternal
+{
+public:
+    virtual WebRtc_Word32 IncomingFrame(WebRtc_UWord8* videoFrame,
+                                        WebRtc_Word32 videoFrameLength,
+                                        const VideoCaptureCapability& frameInfo,
+                                        WebRtc_Word64 captureTime = 0) = 0;
+    virtual WebRtc_Word32 IncomingFrameI420(const VideoFrameI420& video_frame,
+                                            WebRtc_Word64 captureTime = 0) = 0;
+protected:
+    ~VideoCaptureExternal() {}
+};
+
+// Callback class to be implemented by module user
+class VideoCaptureDataCallback
+{
+public:
+    virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                         VideoFrame& videoFrame,
+                                         VideoCodecType codecType) = 0;
+    virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                       const WebRtc_Word32 delay) = 0;
+protected:
+    virtual ~VideoCaptureDataCallback(){}
+};
+
+class VideoCaptureFeedBack
+{
+public:
+    virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 frameRate) = 0;
+    virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                  const VideoCaptureAlarm alarm) = 0;
+protected:
+    virtual ~VideoCaptureFeedBack(){}
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/trunk/src/modules/video_capture/main/interface/video_capture_factory.h b/trunk/src/modules/video_capture/main/interface/video_capture_factory.h
new file mode 100644
index 0000000..6cb7a77
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/interface/video_capture_factory.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
+
+#include "video_capture.h"
+
+namespace webrtc {
+
+class VideoCaptureFactory {
+ public:
+  // Create a video capture module object
+  // id - unique identifier of this video capture module object.
+  // deviceUniqueIdUTF8 - name of the device.
+  //                      Available names can be found by using GetDeviceName
+  static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                    const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+  // Create a video capture module object used for external capture.
+  // id - unique identifier of this video capture module object
+  // externalCapture - [out] interface to call when a new frame is captured.
+  static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                    VideoCaptureExternal*& externalCapture);
+
+  static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
+      const WebRtc_Word32 id);
+
+#ifdef WEBRTC_ANDROID
+  static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
+#endif
+
+ private:
+  ~VideoCaptureFactory();
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/trunk/src/modules/video_capture/main/source/Android.mk b/trunk/src/modules/video_capture/main/source/Android.mk
new file mode 100644
index 0000000..8976f19
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android.mk
@@ -0,0 +1,50 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_video_capture
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    video_capture_impl.cc \
+    device_info_impl.cc \
+    video_capture_factory.cc \
+    Android/video_capture_android.cc \
+    Android/device_info_android.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/Android \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../source \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../utility/interface \
+    $(LOCAL_PATH)/../../../audio_coding/main/interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_capture/main/source/Android/device_info_android.cc b/trunk/src/modules/video_capture/main/source/Android/device_info_android.cc
new file mode 100644
index 0000000..3c5e052
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/device_info_android.cc
@@ -0,0 +1,363 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_android.h"
+
+#include <stdio.h>
+
+#include "ref_count.h"
+#include "trace.h"
+#include "video_capture_android.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
+    const WebRtc_Word32 id)
+{
+    videocapturemodule::DeviceInfoAndroid *deviceInfo =
+                                new videocapturemodule::DeviceInfoAndroid(id);
+    if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
+    {
+        delete deviceInfo;
+        deviceInfo = NULL;
+    }
+    return deviceInfo;
+}
+
+DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
+    DeviceInfoImpl(id)
+{
+}
+
+WebRtc_Word32 DeviceInfoAndroid::Init()
+{
+    return 0;
+}
+
+DeviceInfoAndroid::~DeviceInfoAndroid()
+{
+}
+
+WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
+{
+
+    JNIEnv *env;
+    jclass javaCmDevInfoClass;
+    jobject javaCmDevInfoObject;
+    bool attached = false;
+    if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+                                                                  env,
+                                                                  javaCmDevInfoClass,
+                                                                  javaCmDevInfoObject,
+                                                                  attached) != 0)
+    {
+        return 0;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s GetMethodId", __FUNCTION__);
+    // get the method ID for the Android Java GetDeviceUniqueName name.
+    jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+                                     "NumberOfDevices",
+                                     "()I");
+
+    jint numberOfDevices = 0;
+    if (cid != NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                     "%s Calling Number of devices", __FUNCTION__);
+        numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
+    }
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+    if (numberOfDevices > 0)
+        return numberOfDevices;
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
+                                       WebRtc_UWord32 deviceNumber,
+                                       WebRtc_UWord8* deviceNameUTF8,
+                                       WebRtc_UWord32 deviceNameLength,
+                                       WebRtc_UWord8* deviceUniqueIdUTF8,
+                                       WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                       WebRtc_UWord8* /*productUniqueIdUTF8*/,
+                                       WebRtc_UWord32 /*productUniqueIdUTF8Length*/)
+{
+
+    JNIEnv *env;
+    jclass javaCmDevInfoClass;
+    jobject javaCmDevInfoObject;
+    WebRtc_Word32 result = 0;
+    bool attached = false;
+    if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+                                                              env,
+                                                              javaCmDevInfoClass,
+                                                              javaCmDevInfoObject,
+                                                              attached)!= 0)
+    {
+        return -1;
+    }
+
+    // get the method ID for the Android Java GetDeviceUniqueName name.
+    jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
+                                     "(I)Ljava/lang/String;");
+    if (cid != NULL)
+    {
+
+        jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
+                                                          cid, deviceNumber);
+        if (javaDeviceNameObj == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s: Failed to get device name for device %d.",
+                         __FUNCTION__, (int) deviceNumber);
+            result = -1;
+        }
+        else
+        {
+            jboolean isCopy;
+            const char* javaDeviceNameChar = env->GetStringUTFChars(
+                                                    (jstring) javaDeviceNameObj
+                                                    ,&isCopy);
+            const jsize javaDeviceNameCharLength = env->GetStringUTFLength(
+                                                    (jstring) javaDeviceNameObj);
+            if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceUniqueIdUTF8Length)
+            {
+                memcpy(deviceUniqueIdUTF8,
+                       javaDeviceNameChar,
+                       javaDeviceNameCharLength + 1);
+            }
+            else
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id, "%s: deviceUniqueIdUTF8 to short.",
+                             __FUNCTION__);
+                result = -1;
+            }
+            if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
+            {
+                memcpy(deviceNameUTF8,
+                       javaDeviceNameChar,
+                       javaDeviceNameCharLength + 1);
+            }
+            env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
+                                       javaDeviceNameChar);
+        }//javaDeviceNameObj==NULL
+
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: Failed to find GetDeviceUniqueName function id",
+                     __FUNCTION__);
+        result = -1;
+    }
+
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: result %d", __FUNCTION__, (int) result);
+    return result;
+
+}
+
+WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
+                                        const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    MapItem* item = NULL;
+    while ((item = _captureCapabilities.Last()))
+    {
+        delete (VideoCaptureCapability*) item->GetItem();
+        _captureCapabilities.Erase(item);
+    }
+
+    JNIEnv *env;
+    jclass javaCmDevInfoClass;
+    jobject javaCmDevInfoObject;
+    bool attached = false;
+    if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+                                                              env,
+                                                              javaCmDevInfoClass,
+                                                              javaCmDevInfoObject,
+                                                              attached) != 0)
+    {
+        return -1;
+    }
+
+    // Find the capability class
+    jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
+    if (javaCapClassLocal == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Can't find java class VideoCaptureCapabilityAndroid.",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // get the method ID for the Android Java GetCapabilityArray .
+    char signature[256];
+    sprintf(signature,
+            "(Ljava/lang/String;)[L%s;",
+            AndroidJavaCaptureCapabilityClass);
+    jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+                                     "GetCapabilityArray",
+                                     signature);
+    if (cid == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Can't find method GetCapabilityArray.", __FUNCTION__);
+        return -1;
+    }
+    // Create a jstring so we can pass the deviceUniquName to the java method.
+    jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+
+    if (capureIdString == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Can't create string for  method GetCapabilityArray.",
+                     __FUNCTION__);
+        return -1;
+    }
+    // Call the java class and get an array with capabilities back.
+    jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
+                                                        cid, capureIdString);
+    if (!javaCapabilitiesObj)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Failed to call java GetCapabilityArray.",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
+    jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
+    jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
+    if (widthField == NULL || heigtField == NULL || maxFpsField == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Failed to get field Id.", __FUNCTION__);
+        return -1;
+    }
+
+    const jsize numberOfCapabilities =
+        env->GetArrayLength((jarray) javaCapabilitiesObj);
+
+    for (jsize i = 0; i < numberOfCapabilities; ++i)
+    {
+        VideoCaptureCapability *cap = new VideoCaptureCapability();
+        jobject capabilityElement = env->GetObjectArrayElement(
+                                            (jobjectArray) javaCapabilitiesObj,
+                                             i);
+
+        cap->width = env->GetIntField(capabilityElement, widthField);
+        cap->height = env->GetIntField(capabilityElement, heigtField);
+        cap->expectedCaptureDelay = _expectedCaptureDelay;
+        cap->rawType = kVideoNV21;
+        cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
+                     cap->width, cap->height, cap->maxFPS);
+        _captureCapabilities.Insert(i, cap);
+    }
+
+    _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
+    _lastUsedDeviceName = (WebRtc_UWord8*) realloc(_lastUsedDeviceName,
+                                                   _lastUsedDeviceNameLength + 1);
+    memcpy(_lastUsedDeviceName,
+           deviceUniqueIdUTF8,
+           _lastUsedDeviceNameLength + 1);
+
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CreateCapabilityMap %d", _captureCapabilities.Size());
+
+    return _captureCapabilities.Size();
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
+                                            const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                            VideoCaptureRotation& orientation)
+{
+
+    JNIEnv *env;
+    jclass javaCmDevInfoClass;
+    jobject javaCmDevInfoObject;
+    WebRtc_Word32 result = 0;
+    bool attached = false;
+    if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+                                                              env,
+                                                              javaCmDevInfoClass,
+                                                              javaCmDevInfoObject,
+                                                              attached) != 0)
+    {
+        return -1;
+    }
+
+    // get the method ID for the Android Java GetOrientation .
+    jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
+                                     "(Ljava/lang/String;)I");
+    if (cid == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Can't find method GetOrientation.", __FUNCTION__);
+        return -1;
+    }
+    // Create a jstring so we can pass the deviceUniquName to the java method.
+    jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+    if (capureIdString == NULL)
+    {
+        VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Can't create string for  method GetCapabilityArray.",
+                     __FUNCTION__);
+        return -1;
+    }
+    // Call the java class and get the orientation.
+    jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
+                                           capureIdString);
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+    WebRtc_Word32 retValue = 0;
+    switch (jorientation)
+    {
+        case -1: //Error
+            orientation = kCameraRotate0;
+            retValue = -1;
+            break;
+        case 0:
+            orientation = kCameraRotate0;
+            break;
+        case 90:
+            orientation = kCameraRotate90;
+            break;
+        case 180:
+            orientation = kCameraRotate180;
+            break;
+        case 270:
+            orientation = kCameraRotate270;
+            break;
+        case 360:
+            orientation = kCameraRotate0;
+            break;
+    }
+    return retValue;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Android/device_info_android.h b/trunk/src/modules/video_capture/main/source/Android/device_info_android.h
new file mode 100644
index 0000000..33b22c3
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/device_info_android.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+
+#include <jni.h>
+#include "video_capture_impl.h"
+#include "device_info_impl.h"
+
+#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
+#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+// Android logging, uncomment to print trace to logcat instead of trace file/callback
+//#include <android/log.h>
+//#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+
+class DeviceInfoAndroid: public DeviceInfoImpl
+{
+public:
+
+    DeviceInfoAndroid(const WebRtc_Word32 id);
+    WebRtc_Word32 Init();
+    virtual ~DeviceInfoAndroid();
+    virtual WebRtc_UWord32 NumberOfDevices();
+    virtual WebRtc_Word32 GetDeviceName(WebRtc_UWord32 deviceNumber,
+                                  WebRtc_UWord8* deviceNameUTF8,
+                                  WebRtc_UWord32 deviceNameLength,
+                                  WebRtc_UWord8* deviceUniqueIdUTF8,
+                                  WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                  WebRtc_UWord8* productUniqueIdUTF8 = 0,
+                                  WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+    virtual WebRtc_Word32 CreateCapabilityMap(const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+                                      const WebRtc_UWord8* /*deviceUniqueIdUTF8*/,
+                                      const WebRtc_UWord8* /*dialogTitleUTF8*/,
+                                      void* /*parentWindow*/,
+                                      WebRtc_UWord32 /*positionX*/,
+                                      WebRtc_UWord32 /*positionY*/){return -1;}
+    virtual WebRtc_Word32 GetOrientation(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                         VideoCaptureRotation& orientation);
+private:
+    bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+    enum {_expectedCaptureDelay = 190};
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
diff --git a/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
new file mode 100644
index 0000000..0443953
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+public class CaptureCapabilityAndroid {
+  public int width  = 0;
+  public int height = 0;
+  public int maxFPS = 0;
+}
diff --git a/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
new file mode 100644
index 0000000..a4c39a8
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.io.IOException;
+import java.util.Locale;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.webrtc.videoengine.CaptureCapabilityAndroid;
+import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.hardware.Camera;
+import android.hardware.Camera.PreviewCallback;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceHolder.Callback;
+
+public class VideoCaptureAndroid implements PreviewCallback, Callback {
+
+  private Camera camera;
+  private AndroidVideoCaptureDevice currentDevice = null;
+  public ReentrantLock previewBufferLock = new ReentrantLock();
+  private int PIXEL_FORMAT = ImageFormat.NV21;
+  PixelFormat pixelFormat = new PixelFormat();
+  // True when the C++ layer has ordered the camera to be started.
+  private boolean isRunning=false;
+
+  private final int numCaptureBuffers = 3;
+  private int expectedFrameSize = 0;
+  private int orientation = 0;
+  private int id = 0;
+  // C++ callback context variable.
+  private long context = 0;
+  private SurfaceHolder localPreview = null;
+  // True if this class owns the preview video buffers.
+  private boolean ownsBuffers = false;
+
+  // Set this to 2 for VERBOSE logging. 1 for DEBUG
+  private static int LOGLEVEL = 0;
+  private static boolean VERBOSE = LOGLEVEL > 2;
+  private static boolean DEBUG = LOGLEVEL > 1;
+
+  CaptureCapabilityAndroid currentCapability = null;
+
+  public static
+  void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
+    if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid");
+
+    captureAndroid.StopCapture();
+    captureAndroid.camera.release();
+    captureAndroid.camera = null;
+    captureAndroid.context = 0;
+
+    if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended");
+
+  }
+
+  public VideoCaptureAndroid(int in_id,
+                             long in_context,
+                             Camera in_camera,
+                             AndroidVideoCaptureDevice in_device) {
+    id = in_id;
+    context = in_context;
+    camera = in_camera;
+    currentDevice = in_device;
+  }
+
+  public int StartCapture(int width, int height, int frameRate) {
+    if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width +
+                    " height " + height +" frame rate " + frameRate);
+    try {
+      if (camera == null) {
+        Log.e("*WEBRTC*",
+              String.format(Locale.US,"Camera not initialized %d",id));
+        return -1;
+      }
+      currentCapability = new CaptureCapabilityAndroid();
+      currentCapability.width = width;
+      currentCapability.height = height;
+      currentCapability.maxFPS = frameRate;
+      PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
+
+      Camera.Parameters parameters = camera.getParameters();
+      parameters.setPreviewSize(currentCapability.width,
+                                currentCapability.height);
+      parameters.setPreviewFormat(PIXEL_FORMAT );
+      parameters.setPreviewFrameRate(currentCapability.maxFPS);
+      camera.setParameters(parameters);
+
+      // Get the local preview SurfaceHolder from the static render class
+      localPreview = ViERenderer.GetLocalRenderer();
+      if(localPreview != null) {
+        localPreview.addCallback(this);
+      }
+
+      int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
+      if(android.os.Build.VERSION.SDK_INT >= 7) {
+        // According to Doc addCallbackBuffer belongs to API level 8.
+        // But it seems like it works on Android 2.1 as well.
+        // At least SE X10 and Milestone
+        byte[] buffer = null;
+        for (int i = 0; i < numCaptureBuffers; i++) {
+          buffer = new byte[bufSize];
+          camera.addCallbackBuffer(buffer);
+        }
+
+        camera.setPreviewCallbackWithBuffer(this);
+        ownsBuffers = true;
+      }
+      else {
+        camera.setPreviewCallback(this);
+      }
+
+      camera.startPreview();
+      previewBufferLock.lock();
+      expectedFrameSize = bufSize;
+      isRunning = true;
+      previewBufferLock.unlock();
+    }
+    catch (Exception ex) {
+      Log.e("*WEBRTC*", "Failed to start camera");
+      return -1;
+    }
+    return 0;
+  }
+
+  public int StopCapture() {
+    if(DEBUG) Log.d("*WEBRTC*", "StopCapture");
+    try {
+      previewBufferLock.lock();
+      isRunning = false;
+      previewBufferLock.unlock();
+
+      camera.stopPreview();
+
+      if(android.os.Build.VERSION.SDK_INT > 7) {
+        camera.setPreviewCallbackWithBuffer(null);
+      }
+      else {
+        camera.setPreviewCallback(null);
+      }
+    }
+    catch (Exception ex) {
+      Log.e("*WEBRTC*", "Failed to stop camera");
+      return -1;
+    }
+
+    if(DEBUG) {
+      Log.d("*WEBRTC*", "StopCapture ended");
+    }
+    return 0;
+  }
+
+  native void ProvideCameraFrame(byte[] data,int length, long captureObject);
+
+  public void onPreviewFrame(byte[] data, Camera camera) {
+    previewBufferLock.lock();
+
+    if(VERBOSE) {
+      Log.v("*WEBRTC*",
+            String.format(Locale.US, "preview frame length %d context %x",
+                          data.length, context));
+    }
+    if(isRunning) {
+      // If StartCapture has been called but not StopCapture
+      // Call the C++ layer with the captured frame
+      if (data.length == expectedFrameSize) {
+        ProvideCameraFrame(data, expectedFrameSize, context);
+        if (VERBOSE) {
+          Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered"));
+        }
+        if(ownsBuffers) {
+          // Give the video buffer to the camera service again.
+          camera.addCallbackBuffer(data);
+        }
+      }
+    }
+    previewBufferLock.unlock();
+  }
+
+
+  public void surfaceChanged(SurfaceHolder holder,
+                             int format, int width, int height) {
+
+    try {
+      if(camera != null) {
+        camera.setPreviewDisplay(localPreview);
+      }
+    } catch (IOException e) {
+      Log.e("*WEBRTC*",
+            String.format(Locale.US,
+                          "Failed to set Local preview. " + e.getMessage()));
+    }
+  }
+
+  // Sets the rotation of the preview render window.
+  // Does not affect the captured video image.
+  public void SetPreviewRotation(int rotation) {
+    if(camera != null) {
+      previewBufferLock.lock();
+      final boolean running = isRunning;
+      int width = 0;
+      int height = 0;
+      int framerate = 0;
+
+      if(running) {
+        width = currentCapability.width;
+        height = currentCapability.height;
+        framerate = currentCapability.maxFPS;
+
+        StopCapture();
+      }
+
+      int resultRotation = 0;
+      if(currentDevice.frontCameraType ==
+         VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
+        // this is a 2.3 or later front facing camera.
+        // SetDisplayOrientation will flip the image horizontally
+        // before doing the rotation.
+        resultRotation=(360-rotation) % 360; // compensate the mirror
+      }
+      else {
+        // Back facing or 2.2 or previous front camera
+        resultRotation=rotation;
+      }
+      if(android.os.Build.VERSION.SDK_INT>7) {
+        camera.setDisplayOrientation(resultRotation);
+      }
+      else {
+        // Android 2.1 and previous
+        // This rotation unfortunately does not seems to work.
+        // http://code.google.com/p/android/issues/detail?id=1193
+        Camera.Parameters parameters = camera.getParameters();
+        parameters.setRotation(resultRotation);
+        camera.setParameters(parameters);
+      }
+
+      if(running) {
+        StartCapture(width, height, framerate);
+      }
+      previewBufferLock.unlock();
+    }
+  }
+
+  public void surfaceCreated(SurfaceHolder holder) {
+  }
+
+
+  public void surfaceDestroyed(SurfaceHolder holder) {
+  }
+
+}
diff --git a/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
new file mode 100644
index 0000000..4ccf060
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -0,0 +1,432 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.io.File;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+
+import dalvik.system.DexClassLoader;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.hardware.Camera.Size;
+import android.util.Log;
+
+public class VideoCaptureDeviceInfoAndroid {
+
+  //Context
+  Context context;
+
+  // Set this to 2 for VERBOSE logging. 1 for DEBUG
+  private static int LOGLEVEL = 0;
+  private static boolean VERBOSE = LOGLEVEL > 2;
+  private static boolean DEBUG = LOGLEVEL > 1;
+
+  // Private class with info about all available cameras and the capabilities
+  public class AndroidVideoCaptureDevice {
+    AndroidVideoCaptureDevice() {
+      frontCameraType = FrontFacingCameraType.None;
+      index = 0;
+    }
+
+    public String deviceUniqueName;
+    public CaptureCapabilityAndroid captureCapabilies[];
+    public FrontFacingCameraType frontCameraType;
+
+    // Orientation of camera as described in
+    // android.hardware.Camera.CameraInfo.Orientation
+    public int orientation;
+    // Camera index used in Camera.Open on Android 2.3 and onwards
+    public int index;
+  }
+
+  public enum FrontFacingCameraType {
+    None, // This is not a front facing camera
+    GalaxyS, // Galaxy S front facing camera.
+    HTCEvo, // HTC Evo front facing camera
+    Android23, // Android 2.3 front facing camera.
+  }
+
+  String currentDeviceUniqueId;
+  int id;
+  List<AndroidVideoCaptureDevice> deviceList;
+
+  public static VideoCaptureDeviceInfoAndroid
+  CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
+    if(DEBUG) {
+      Log.d("*WEBRTC*",
+            String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
+    }
+
+    VideoCaptureDeviceInfoAndroid self =
+        new VideoCaptureDeviceInfoAndroid(in_id, in_context);
+    if(self != null && self.Init() == 0) {
+      return self;
+    }
+    else {
+      if(DEBUG) {
+        Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid.");
+      }
+    }
+    return null;
+  }
+
+  private VideoCaptureDeviceInfoAndroid(int in_id,
+                                        Context in_context) {
+    id = in_id;
+    context = in_context;
+    deviceList = new ArrayList<AndroidVideoCaptureDevice>();
+  }
+
+  private int Init() {
+    // Populate the deviceList with available cameras and their capabilities.
+    Camera camera = null;
+    try{
+      if(android.os.Build.VERSION.SDK_INT > 8) {
+        // From Android 2.3 and onwards
+        for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+          AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
+
+          Camera.CameraInfo info = new Camera.CameraInfo();
+          Camera.getCameraInfo(i, info);
+          newDevice.index = i;
+          newDevice.orientation=info.orientation;
+          if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+            newDevice.deviceUniqueName =
+                "Camera " + i +", Facing back, Orientation "+ info.orientation;
+          }
+          else {
+            newDevice.deviceUniqueName =
+                "Camera " + i +", Facing front, Orientation "+ info.orientation;
+            newDevice.frontCameraType = FrontFacingCameraType.Android23;
+          }
+
+          camera = Camera.open(i);
+          Camera.Parameters parameters = camera.getParameters();
+          AddDeviceInfo(newDevice, parameters);
+          camera.release();
+          camera = null;
+          deviceList.add(newDevice);
+        }
+      }
+      else {
+        // Prior to Android 2.3
+        AndroidVideoCaptureDevice newDevice;
+        Camera.Parameters parameters;
+
+        newDevice = new AndroidVideoCaptureDevice();
+        camera = Camera.open();
+        parameters = camera.getParameters();
+        newDevice.deviceUniqueName = "Camera 1, Facing back";
+        newDevice.orientation = 90;
+        AddDeviceInfo(newDevice, parameters);
+
+        deviceList.add(newDevice);
+        camera.release();
+        camera=null;
+
+        newDevice = new AndroidVideoCaptureDevice();
+        newDevice.deviceUniqueName = "Camera 2, Facing front";
+        parameters = SearchOldFrontFacingCameras(newDevice);
+        if(parameters != null) {
+          AddDeviceInfo(newDevice, parameters);
+          deviceList.add(newDevice);
+        }
+      }
+    }
+    catch (Exception ex) {
+      Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" +
+            ex.getLocalizedMessage());
+      return -1;
+    }
+    VerifyCapabilities();
+    return 0;
+  }
+
+  // Adds the capture capabilities of the currently opened device
+  private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
+                             Camera.Parameters parameters) {
+
+    List<Size> sizes = parameters.getSupportedPreviewSizes();
+    List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
+    int maxFPS=0;
+    for(Integer frameRate:frameRates) {
+      if(VERBOSE) {
+        Log.v("*WEBRTC*",
+              "VideoCaptureDeviceInfoAndroid:frameRate " + frameRate);
+      }
+      if(frameRate > maxFPS) {
+        maxFPS = frameRate;
+      }
+    }
+
+    newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
+    for(int i = 0; i < sizes.size(); ++i) {
+      Size s = sizes.get(i);
+      newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
+      newDevice.captureCapabilies[i].height = s.height;
+      newDevice.captureCapabilies[i].width = s.width;
+      newDevice.captureCapabilies[i].maxFPS = maxFPS;
+    }
+  }
+
+  // Function that make sure device specific capabilities are
+  // in the capability list.
+  // Ie Galaxy S supports CIF but does not list CIF as a supported capability.
+  // Motorola Droid Camera does not work with frame rate above 15fps.
+  // http://code.google.com/p/android/issues/detail?id=5514#c0
+  private void VerifyCapabilities() {
+    // Nexus S or Galaxy S
+    if(android.os.Build.DEVICE.equals("GT-I9000") ||
+       android.os.Build.DEVICE.equals("crespo")) {
+      CaptureCapabilityAndroid specificCapability =
+          new CaptureCapabilityAndroid();
+      specificCapability.width = 352;
+      specificCapability.height = 288;
+      specificCapability.maxFPS = 15;
+      AddDeviceSpecificCapability(specificCapability);
+
+      specificCapability = new CaptureCapabilityAndroid();
+      specificCapability.width = 176;
+      specificCapability.height = 144;
+      specificCapability.maxFPS = 15;
+      AddDeviceSpecificCapability(specificCapability);
+
+      specificCapability = new CaptureCapabilityAndroid();
+      specificCapability.width = 320;
+      specificCapability.height = 240;
+      specificCapability.maxFPS = 15;
+      AddDeviceSpecificCapability(specificCapability);
+    }
+    // Motorola Milestone Camera server does not work at 30fps
+    // even though it reports that it can
+    if(android.os.Build.MANUFACTURER.equals("motorola") &&
+       android.os.Build.DEVICE.equals("umts_sholes")) {
+      for(AndroidVideoCaptureDevice device:deviceList) {
+        for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
+          capability.maxFPS=15;
+        }
+      }
+    }
+  }
+
+  private void AddDeviceSpecificCapability(
+      CaptureCapabilityAndroid specificCapability) {
+    for(AndroidVideoCaptureDevice device:deviceList) {
+      boolean foundCapability = false;
+      for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
+        if(capability.width == specificCapability.width &&
+           capability.height == specificCapability.height) {
+          foundCapability = true;
+          break;
+        }
+      }
+      if(foundCapability==false) {
+        CaptureCapabilityAndroid newCaptureCapabilies[]=
+            new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
+        for(int i = 0; i < device.captureCapabilies.length; ++i) {
+          newCaptureCapabilies[i+1] = device.captureCapabilies[i];
+        }
+        newCaptureCapabilies[0] = specificCapability;
+        device.captureCapabilies = newCaptureCapabilies;
+      }
+    }
+  }
+
+  // Returns the number of Capture devices that is supported
+  public int NumberOfDevices() {
+    return deviceList.size();
+  }
+
+  public String GetDeviceUniqueName(int deviceNumber) {
+    if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
+      return null;
+    }
+    return deviceList.get(deviceNumber).deviceUniqueName;
+  }
+
+  public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
+  {
+    for (AndroidVideoCaptureDevice device: deviceList) {
+      if(device.deviceUniqueName.equals(deviceUniqueId)) {
+        return (CaptureCapabilityAndroid[]) device.captureCapabilies;
+      }
+    }
+    return null;
+  }
+
+  // Returns the camera orientation as described by
+  // android.hardware.Camera.CameraInfo.orientation
+  public int GetOrientation(String deviceUniqueId) {
+    for (AndroidVideoCaptureDevice device: deviceList) {
+      if(device.deviceUniqueName.equals(deviceUniqueId)) {
+        return device.orientation;
+      }
+    }
+    return -1;
+  }
+
+  // Returns an instance of VideoCaptureAndroid.
+  public VideoCaptureAndroid AllocateCamera(int id, long context,
+                                            String deviceUniqueId) {
+    try {
+      if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId);
+
+      Camera camera = null;
+      AndroidVideoCaptureDevice deviceToUse = null;
+      for (AndroidVideoCaptureDevice device: deviceList) {
+        if(device.deviceUniqueName.equals(deviceUniqueId)) {
+          // Found the wanted camera
+          deviceToUse = device;
+          switch(device.frontCameraType) {
+            case GalaxyS:
+              camera = AllocateGalaxySFrontCamera();
+              break;
+            case HTCEvo:
+              camera = AllocateEVOFrontFacingCamera();
+              break;
+            default:
+              // From Android 2.3 and onwards)
+              if(android.os.Build.VERSION.SDK_INT>8)
+                camera=Camera.open(device.index);
+              else
+                camera=Camera.open(); // Default camera
+          }
+        }
+      }
+
+      if(camera == null) {
+        return null;
+      }
+      if(VERBOSE) {
+        Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid");
+      }
+
+      return new VideoCaptureAndroid(id,context,camera,deviceToUse);
+
+    }catch (Exception ex) {
+      Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " +
+            ex.getLocalizedMessage());
+    }
+    return null;
+  }
+
+  // Searches for a front facing camera device. This is device specific code.
+  private Camera.Parameters
+  SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
+      throws SecurityException, IllegalArgumentException,
+      NoSuchMethodException, ClassNotFoundException,
+      IllegalAccessException, InvocationTargetException {
+    // Check the id of the opened camera device
+    // Returns null on X10 and 1 on Samsung Galaxy S.
+    Camera camera = Camera.open();
+    Camera.Parameters parameters = camera.getParameters();
+    String cameraId = parameters.get("camera-id");
+    if(cameraId != null && cameraId.equals("1")) {
+      // This might be a Samsung Galaxy S with a front facing camera.
+      try {
+        parameters.set("camera-id", 2);
+        camera.setParameters(parameters);
+        parameters = camera.getParameters();
+        newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
+        newDevice.orientation = 0;
+        camera.release();
+        return parameters;
+      }
+      catch (Exception ex) {
+        //Nope - it did not work.
+        Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " +
+              ex.getLocalizedMessage());
+      }
+    }
+    camera.release();
+
+    //Check for Evo front facing camera
+    File file =
+        new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
+    boolean exists = file.exists();
+    if (!exists){
+      file =
+          new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
+      exists = file.exists();
+    }
+    if(exists) {
+      newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
+      newDevice.orientation = 0;
+      Camera evCamera = AllocateEVOFrontFacingCamera();
+      parameters = evCamera.getParameters();
+      evCamera.release();
+      return parameters;
+    }
+    return null;
+  }
+
+  // Returns a handle to HTC front facing camera.
+  // The caller is responsible to release it on completion.
+  private Camera AllocateEVOFrontFacingCamera()
+      throws SecurityException, NoSuchMethodException,
+      ClassNotFoundException, IllegalArgumentException,
+      IllegalAccessException, InvocationTargetException {
+    String classPath = null;
+    File file =
+        new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
+    classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
+    boolean exists = file.exists();
+    if (!exists){
+      file =
+          new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
+      classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
+      exists = file.exists();
+    }
+    if(!exists) {
+      return null;
+    }
+
+    String dexOutputDir = "";
+    if(context != null) {
+      dexOutputDir = context.getFilesDir().getAbsolutePath();
+      File mFilesDir = new File(dexOutputDir, "dexfiles");
+      if(!mFilesDir.exists()){
+        //Log.e("*WEBRTCN*", "Directory doesn't exists");
+        if(!mFilesDir.mkdirs()) {
+          //Log.e("*WEBRTCN*", "Unable to create files directory");
+        }
+      }
+    }
+
+    dexOutputDir += "/dexfiles";
+
+    DexClassLoader loader =
+        new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
+                           null, ClassLoader.getSystemClassLoader());
+
+    Method method = loader.loadClass(classPath).getDeclaredMethod(
+        "getFrontFacingCamera", (Class[]) null);
+    Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
+    return camera;
+  }
+
+  // Returns a handle to Galaxy S front camera.
+  // The caller is responsible to release it on completion.
+  private Camera AllocateGalaxySFrontCamera()
+  {
+    Camera camera = Camera.open();
+    Camera.Parameters parameters = camera.getParameters();
+    parameters.set("camera-id",2);
+    camera.setParameters(parameters);
+    return camera;
+  }
+
+}
diff --git a/trunk/src/modules/video_capture/main/source/Android/video_capture_android.cc b/trunk/src/modules/video_capture/main/source/Android/video_capture_android.cc
new file mode 100644
index 0000000..acf7e3b
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/video_capture_android.cc
@@ -0,0 +1,703 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_android.h"
+
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "ref_count.h"
+#include "trace.h"
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
+        new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
+
+    if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        delete implementation;
+        implementation = NULL;
+    }
+    return implementation;
+}
+
+// Android logging, uncomment to print trace to logcat instead of trace file/callback
+//#include <android/log.h>
+//#undef WEBRTC_TRACE
+//#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+
+JavaVM* VideoCaptureAndroid::g_jvm = NULL;
+jclass VideoCaptureAndroid::g_javaCmClass = NULL; //VideoCaptureAndroid.java
+jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; //VideoCaptureDeviceInfoAndroid.java
+jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; //static instance of VideoCaptureDeviceInfoAndroid.java
+jobject VideoCaptureAndroid::g_javaContext = NULL;
+
+/*
+ * Register references to Java Capture class.
+ */
+WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
+                                                     void* javaContext)
+{
+
+    g_jvm = static_cast<JavaVM*> (javaVM);
+    g_javaContext = static_cast<jobject> (javaContext);
+
+    if (javaVM)
+    {
+        JNIEnv* env = NULL;
+        if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not get Java environment", __FUNCTION__);
+            return -1;
+        }
+        // get java capture class type (note path to class packet)
+        jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
+        if (!javaCmClassLocal)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not find java class", __FUNCTION__);
+            return -1;
+        }
+        // create a global reference to the class (to tell JNI that we are referencing it
+        // after this function has returned)
+        g_javaCmClass = static_cast<jclass>
+                                    (env->NewGlobalRef(javaCmClassLocal));
+        if (!g_javaCmClass)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: InitVideoEngineJava(): could not create"
+                         " Java Camera class reference",
+                         __FUNCTION__);
+            return -1;
+        }
+        // Delete local class ref, we only use the global ref
+        env->DeleteLocalRef(javaCmClassLocal);
+        JNINativeMethod nativeFunctions = { "ProvideCameraFrame", "([BIJ)V",
+                            (void*) &VideoCaptureAndroid::ProvideCameraFrame };
+        if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                         "%s: Registered native functions", __FUNCTION__);
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: Failed to register native functions",
+                         __FUNCTION__);
+            return -1;
+        }
+
+        // get java capture class type (note path to class packet)
+        jclass javaCmDevInfoClassLocal = env->FindClass(
+                                            AndroidJavaCaptureDeviceInfoClass);
+        if (!javaCmDevInfoClassLocal)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not find java class", __FUNCTION__);
+            return -1;
+        }
+
+        // create a global reference to the class (to tell JNI that we are referencing it
+        // after this function has returned)
+        g_javaCmDevInfoClass = static_cast<jclass>
+                                   (env->NewGlobalRef(javaCmDevInfoClassLocal));
+        if (!g_javaCmDevInfoClass)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: InitVideoEngineJava(): could not create Java "
+                         "Camera Device info class reference",
+                         __FUNCTION__);
+            return -1;
+        }
+        // Delete local class ref, we only use the global ref
+        env->DeleteLocalRef(javaCmDevInfoClassLocal);
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                     "VideoCaptureDeviceInfoAndroid get method id");
+
+        // get the method ID for the Android Java CaptureClass static
+        //CreateVideoCaptureAndroid factory method.
+        jmethodID cid = env->GetStaticMethodID(g_javaCmDevInfoClass,
+                                               "CreateVideoCaptureDeviceInfoAndroid",
+                                               "(ILandroid/content/Context;)"
+                                               "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
+        if (cid == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not get java VideoCaptureDeviceInfoAndroid constructor ID",
+                         __FUNCTION__);
+            return -1;
+        }
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                     "%s: construct static java device object", __FUNCTION__);
+
+        // construct the object by calling the static constructor object
+        jobject javaCameraDeviceInfoObjLocal = env->CallStaticObjectMethod(
+                                                            g_javaCmDevInfoClass,
+                                                            cid, (int) -1,
+                                                            g_javaContext);
+        if (!javaCameraDeviceInfoObjLocal)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not create Java Capture Device info object",
+                         __FUNCTION__);
+            return -1;
+        }
+        // create a reference to the object (to tell JNI that we are referencing it
+        // after this function has returned)
+        g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
+        if (!g_javaCmDevInfoObject)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1,
+                         "%s: could not create Java cameradevinceinfo object reference",
+                         __FUNCTION__);
+            return -1;
+        }
+        // Delete local object ref, we only use the global ref
+        env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
+        return 0;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                     "%s: JVM is NULL, assuming deinit", __FUNCTION__);
+        if (!g_jvm)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: SetAndroidObjects not called with a valid JVM.",
+                         __FUNCTION__);
+            return -1;
+        }
+        JNIEnv* env = NULL;
+        bool attached = false;
+        if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = g_jvm->AttachCurrentThread(&env, NULL);
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             -1, "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+                return -1;
+            }
+            attached = true;
+        }
+        env->DeleteGlobalRef(g_javaCmDevInfoObject);
+        env->DeleteGlobalRef(g_javaCmDevInfoClass);
+        env->DeleteGlobalRef(g_javaCmClass);
+        if (attached && g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+            return -1;
+        }
+        return 0;
+        env = (JNIEnv *) NULL;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+                                                        JNIEnv*& env,
+                                                        jclass& javaCmDevInfoClass,
+                                                        jobject& javaCmDevInfoObject,
+                                                        bool& attached)
+{
+    // get the JNI env for this thread
+    if (!g_jvm)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: SetAndroidObjects not called with a valid JVM.",
+                     __FUNCTION__);
+        return -1;
+    }
+    attached = false;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        attached = true;
+    }
+    javaCmDevInfoClass = g_javaCmDevInfoClass;
+    javaCmDevInfoObject = g_javaCmDevInfoObject;
+    return 0;
+
+}
+
+WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(bool attached)
+{
+    if (attached && g_jvm->DetachCurrentThread() < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                     "%s: Could not detach thread from JVM", __FUNCTION__);
+        return -1;
+    }
+    return 0;
+}
+
+/*
+ * JNI callback from Java class. Called when the camera has a new frame to deliver
+ * Class:     org_webrtc_capturemodule_VideoCaptureAndroid
+ * Method:    ProvideCameraFrame
+ * Signature: ([BIJ)V
+ */
+void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
+                                                    jobject,
+                                                    jbyteArray javaCameraFrame,
+                                                    jint length,
+                                                    jlong context)
+{
+    VideoCaptureAndroid* captureModule=reinterpret_cast<VideoCaptureAndroid*>(context);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
+                 -1, "%s: IncomingFrame %d", __FUNCTION__,length);
+    jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
+    captureModule->IncomingFrame((WebRtc_UWord8*) cameraFrame,
+                                 length,captureModule->_frameInfo,0);
+    env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
+}
+
+
+
+VideoCaptureAndroid::VideoCaptureAndroid(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL),
+      _captureStarted(false)
+
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "%s: context %x", __FUNCTION__, (int) this);
+}
+// ----------------------------------------------------------------------------
+//  Init
+//
+//  Initializes needed Java resources like the JNI interface to
+//  VideoCaptureAndroid.java
+// ----------------------------------------------------------------------------
+WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id,
+                                        const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    const int nameLength = strlen((char*) deviceUniqueIdUTF8);
+    if (nameLength >= kVideoCaptureUniqueNameLength)
+    {
+        return -1;
+    }
+
+    // Store the device name
+    _deviceUniqueId = new WebRtc_UWord8[nameLength + 1];
+    memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+    if (_capInfo.Init() != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Failed to initialize CaptureDeviceInfo", __FUNCTION__);
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
+                 __FUNCTION__);
+    // use the jvm that has been set
+    if (!g_jvm)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: Not a valid Java VM pointer", __FUNCTION__);
+        return -1;
+    }
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "get method id");
+
+    // get the method ID for the Android Java CaptureDeviceInfoClass AllocateCamera factory method.
+    char signature[256];
+    sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
+
+    jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
+                                     signature);
+    if (cid == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s: could not get constructor ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+
+    jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+    // construct the object by calling the static constructor object
+    jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
+                                                       cid, (jint) id,
+                                                       (jlong) this,
+                                                       capureIdString);
+    if (!javaCameraObjLocal)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s: could not create Java Capture object", __FUNCTION__);
+        return -1;
+    }
+
+    // create a reference to the object (to tell JNI that we are referencing it
+    // after this function has returned)
+    _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
+    if (!_javaCaptureObj)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
+                     "%s: could not create Java camera object reference",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaCameraObjLocal);
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    return 0;
+}
+
+VideoCaptureAndroid::~VideoCaptureAndroid()
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
+                 __FUNCTION__);
+    if (_javaCaptureObj == NULL || g_jvm == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: Nothing to clean", __FUNCTION__);
+    }
+    else
+    {
+        bool isAttached = false;
+        // get the JNI env for this thread
+        JNIEnv *env;
+        if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = g_jvm->AttachCurrentThread(&env, NULL);
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+            }
+            else
+            {
+                isAttached = true;
+            }
+        }
+
+        // get the method ID for the Android Java CaptureClass static
+        // DeleteVideoCaptureAndroid  method. Call this to release the camera so
+        // another application can use it.
+        jmethodID cid = env->GetStaticMethodID(g_javaCmClass,
+                                               "DeleteVideoCaptureAndroid",
+                                               "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
+        if (cid != NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                         "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
+            // Close the camera by calling the static destruct function.
+            env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
+
+            // Delete global object ref to the camera.
+            env->DeleteGlobalRef(_javaCaptureObj);
+            _javaCaptureObj = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: Failed to find DeleteVideoCaptureAndroid id",
+                         __FUNCTION__);
+        }
+
+        // Detach this thread if it was attached
+        if (isAttached)
+        {
+            if (g_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
+                             _id, "%s: Could not detach thread from JVM",
+                             __FUNCTION__);
+            }
+        }
+    }
+}
+
+WebRtc_Word32 VideoCaptureAndroid::StartCapture(
+                                        const VideoCaptureCapability& capability)
+{
+    CriticalSectionScoped cs(_apiCs);
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: ", __FUNCTION__);
+
+    bool isAttached = false;
+    WebRtc_Word32 result = 0;
+    // get the JNI env for this thread
+    JNIEnv *env;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+        }
+        else
+        {
+            isAttached = true;
+        }
+    }
+
+    if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
+                                          _frameInfo) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: GetBestMatchedCapability failed. Req cap w%d h%d",
+                     __FUNCTION__, capability.width, capability.height);
+        return -1;
+    }
+
+    // Store the new expected capture delay
+    _captureDelay = _frameInfo.expectedCaptureDelay;
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
+                 _frameInfo.height);
+
+    // get the method ID for the Android Java CaptureClass static StartCapture  method.
+    jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
+    if (cid != NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                     "%s: Call StartCapture", __FUNCTION__);
+        // Close the camera by calling the static destruct function.
+        result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
+                                    _frameInfo.height, _frameInfo.maxFPS);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: Failed to find StartCapture id", __FUNCTION__);
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+    if (result == 0)
+    {
+        _requestedCapability = capability;
+        _captureStarted = true;
+    }
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: result %d", __FUNCTION__, result);
+    return result;
+}
+WebRtc_Word32 VideoCaptureAndroid::StopCapture()
+{
+    CriticalSectionScoped cs(_apiCs);
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: ", __FUNCTION__);
+
+    bool isAttached = false;
+    WebRtc_Word32 result = 0;
+    // get the JNI env for this thread
+    JNIEnv *env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+        }
+        else
+        {
+            isAttached = true;
+        }
+    }
+
+    memset(&_requestedCapability, 0, sizeof(_requestedCapability));
+    memset(&_frameInfo, 0, sizeof(_frameInfo));
+
+    // get the method ID for the Android Java CaptureClass StopCapture  method.
+    jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
+    if (cid != NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                     "%s: Call StopCapture", __FUNCTION__);
+        // Close the camera by calling the static destruct function.
+        result = env->CallIntMethod(_javaCaptureObj, cid);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: Failed to find StopCapture id", __FUNCTION__);
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+    _captureStarted = false;
+
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: result %d", __FUNCTION__, result);
+    return result;
+}
+
+bool VideoCaptureAndroid::CaptureStarted()
+{
+    CriticalSectionScoped cs(_apiCs);
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: ", __FUNCTION__);
+    return _captureStarted;
+}
+WebRtc_Word32 VideoCaptureAndroid::CaptureSettings(
+                                               VideoCaptureCapability& settings)
+{
+    CriticalSectionScoped cs(_apiCs);
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: ", __FUNCTION__);
+    settings = _requestedCapability;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::SetCaptureRotation(
+                                                  VideoCaptureRotation rotation)
+{
+    CriticalSectionScoped cs(_apiCs);
+    if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0)
+    {
+        if (!g_jvm)
+            return -1;
+
+        // get the JNI env for this thread
+        JNIEnv *env;
+        bool isAttached = false;
+
+        // get the JNI env for this thread
+        if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = g_jvm->AttachCurrentThread(&env, NULL);
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+                return -1;
+            }
+            isAttached = true;
+        }
+
+        jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
+                                         "(I)V");
+        if (cid == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not get java SetPreviewRotation ID",
+                         __FUNCTION__);
+            return -1;
+        }
+        jint rotateFrame = 0;
+        switch (rotation)
+        {
+            case kCameraRotate0:
+                rotateFrame = 0;
+                break;
+            case kCameraRotate90:
+                rotateFrame = 90;
+                break;
+            case kCameraRotate180:
+                rotateFrame = 180;
+                break;
+            case kCameraRotate270:
+                rotateFrame = 270;
+                break;
+        }
+        env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
+
+        // Detach this thread if it was attached
+        if (isAttached)
+        {
+            if (g_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
+                             _id, "%s: Could not detach thread from JVM",
+                             __FUNCTION__);
+            }
+        }
+
+    }
+    return 0;
+}
+} //namespace videocapturemodule
+} //namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Android/video_capture_android.h b/trunk/src/modules/video_capture/main/source/Android/video_capture_android.h
new file mode 100644
index 0000000..abb4dc9
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Android/video_capture_android.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+
+#include <jni.h>
+#include "device_info_android.h"
+#include "video_capture_impl.h"
+
+#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class VideoCaptureAndroid: public VideoCaptureImpl
+{
+public:
+    static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
+    static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects(JNIEnv*& env,
+                                                 jclass& javaCmDevInfoClass,
+                                                 jobject& javaCmDevInfoObject,
+                                                 bool& attached);
+    static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached);
+
+    VideoCaptureAndroid(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
+                               const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+
+    virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+    virtual bool CaptureStarted();
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+    virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
+
+protected:
+    virtual ~VideoCaptureAndroid();
+    static void JNICALL ProvideCameraFrame (JNIEnv * env,
+                                            jobject,
+                                            jbyteArray javaCameraFrame,
+                                            jint length, jlong context);
+    DeviceInfoAndroid _capInfo;
+    jobject _javaCaptureObj; // Java Camera object.
+    VideoCaptureCapability _frameInfo;
+    bool _captureStarted;
+
+    static JavaVM* g_jvm;
+    static jclass g_javaCmClass;
+    static jclass g_javaCmDevInfoClass;
+    static jobject g_javaCmDevInfoObject; //Static java object implementing the needed device info functions;
+    static jobject g_javaContext; // Java Application context
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
diff --git a/trunk/src/modules/video_capture/main/source/External/device_info_external.cc b/trunk/src/modules/video_capture/main/source/External/device_info_external.cc
new file mode 100644
index 0000000..2aad081
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/External/device_info_external.cc
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../device_info_impl.h"
+#include "../video_capture_impl.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+class ExternalDeviceInfo : public DeviceInfoImpl {
+ public:
+  ExternalDeviceInfo(const WebRtc_Word32 id)
+      : DeviceInfoImpl(id) {
+  }
+  virtual ~ExternalDeviceInfo() {}
+  virtual WebRtc_UWord32 NumberOfDevices() { return 0; }
+  virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+      const WebRtc_UWord8* /*deviceUniqueIdUTF8*/,
+      const WebRtc_UWord8* /*dialogTitleUTF8*/,
+      void* /*parentWindow*/,
+      WebRtc_UWord32 /*positionX*/,
+      WebRtc_UWord32 /*positionY*/) { return -1; }
+  virtual WebRtc_Word32 GetDeviceName(
+      WebRtc_UWord32 deviceNumber,
+      WebRtc_UWord8* deviceNameUTF8,
+      WebRtc_UWord32 deviceNameLength,
+      WebRtc_UWord8* deviceUniqueIdUTF8,
+      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+      WebRtc_UWord8* productUniqueIdUTF8=0,
+      WebRtc_UWord32 productUniqueIdUTF8Length=0) {
+    return -1;
+  }
+  virtual WebRtc_Word32 CreateCapabilityMap(
+      const WebRtc_UWord8* deviceUniqueIdUTF8) { return 0; }
+  virtual WebRtc_Word32 Init() { return 0; }
+};
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+    const WebRtc_Word32 id) {
+  return new ExternalDeviceInfo(id);
+}
+
+}  // namespace videocapturemodule
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/External/video_capture_external.cc b/trunk/src/modules/video_capture/main/source/External/video_capture_external.cc
new file mode 100644
index 0000000..e8cbccb
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/External/video_capture_external.cc
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../video_capture_impl.h"
+#include "ref_count.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const WebRtc_UWord8* deviceUniqueIdUTF8) {
+  RefCountImpl<VideoCaptureImpl>* implementation =
+      new RefCountImpl<VideoCaptureImpl>(id);
+  return implementation;
+}
+
+}  // namespace videocapturemodule
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.cc b/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.cc
new file mode 100644
index 0000000..c67885f
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.cc
@@ -0,0 +1,330 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_linux.h"
+
+#include <errno.h>
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+//v4l includes
+#include <linux/videodev2.h>
+
+#include "ref_count.h"
+#include "trace.h"
+
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const WebRtc_Word32 id)
+{
+    videocapturemodule::DeviceInfoLinux *deviceInfo =
+                    new videocapturemodule::DeviceInfoLinux(id);
+    if (!deviceInfo)
+    {
+        deviceInfo = NULL;
+    }
+
+    return deviceInfo;
+}
+
+DeviceInfoLinux::DeviceInfoLinux(const WebRtc_Word32 id)
+    : DeviceInfoImpl(id)
+{
+}
+
+WebRtc_Word32 DeviceInfoLinux::Init()
+{
+    return 0;
+}
+
+DeviceInfoLinux::~DeviceInfoLinux()
+{
+}
+
+WebRtc_UWord32 DeviceInfoLinux::NumberOfDevices()
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+    WebRtc_UWord32 count = 0;
+    char device[20];
+    int fd = -1;
+
+    /* detect /dev/video [0-63]VideoCaptureModule entries */
+    for (int n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            close(fd);
+            count++;
+        }
+    }
+
+    return count;
+}
+
+WebRtc_Word32 DeviceInfoLinux::GetDeviceName(
+                                         WebRtc_UWord32 deviceNumber,
+                                         WebRtc_UWord8* deviceNameUTF8,
+                                         WebRtc_UWord32 deviceNameLength,
+                                         WebRtc_UWord8* deviceUniqueIdUTF8,
+                                         WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                         WebRtc_UWord8* /*productUniqueIdUTF8*/,
+                                         WebRtc_UWord32 /*productUniqueIdUTF8Length*/)
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+    // Travel through /dev/video [0-63]
+    WebRtc_UWord32 count = 0;
+    char device[20];
+    int fd = -1;
+    bool found = false;
+    for (int n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            if (count == deviceNumber) {
+                // Found the device
+                found = true;
+                break;
+            } else {
+                close(fd);
+                count++;
+            }
+        }
+    }
+
+    if (!found)
+        return -1;
+
+    // query device capabilities
+    struct v4l2_capability cap;
+    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in querying the device capability for device %s. errno = %d",
+                   device, errno);
+        close(fd);
+        return -1;
+    }
+
+    close(fd);
+
+    char cameraName[64];
+    memset(deviceNameUTF8, 0, deviceNameLength);
+    memcpy(cameraName, cap.card, sizeof(cap.card));
+
+    if (deviceNameLength >= strlen(cameraName))
+    {
+        memcpy(deviceNameUTF8, cameraName, strlen(cameraName));
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "buffer passed is too small");
+        return -1;
+    }
+
+    if (cap.bus_info[0] != 0) // may not available in all drivers
+    {
+        // copy device id 
+        if (deviceUniqueIdUTF8Length >= strlen((const char*) cap.bus_info))
+        {
+            memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+            memcpy(deviceUniqueIdUTF8, cap.bus_info,
+                   strlen((const char*) cap.bus_info));
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "buffer passed is too small");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoLinux::CreateCapabilityMap(
+                                        const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    int fd;
+    char device[32];
+    bool found = false;
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+                            (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "Device name too long");
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+               "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+    /* detect /dev/video [0-63] entries */
+    for (int n = 0; n < 64; ++n)
+    {
+        sprintf(device, "/dev/video%d", n);
+        fd = open(device, O_RDONLY);
+        if (fd == -1)
+          continue;
+
+        // query device capabilities
+        struct v4l2_capability cap;
+        if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+        {
+            if (cap.bus_info[0] != 0)
+            {
+                if (strncmp((const char*) cap.bus_info,
+                            (const char*) deviceUniqueIdUTF8,
+                            strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                {
+                    found = true;
+                    break; // fd matches with device unique id supplied
+                }
+            }
+            else //match for device name
+            {
+                if (IsDeviceNameMatches((const char*) cap.card,
+                                        (const char*) deviceUniqueIdUTF8))
+                {
+                    found = true;
+                    break;
+                }
+            }
+        }
+        close(fd); // close since this is not the matching device
+    }
+
+    if (!found)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+        return -1;
+    }
+
+    // now fd will point to the matching device
+    // reset old capability map
+    MapItem* item = NULL;
+    while ((item = _captureCapabilities.Last()))
+    {
+        delete static_cast<VideoCaptureCapability*> (item->GetItem());
+        _captureCapabilities.Erase(item);
+    }
+
+    int size = FillCapabilityMap(fd);
+    close(fd);
+
+    // Store the new used device name
+    _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+    _lastUsedDeviceName = (WebRtc_UWord8*) realloc(_lastUsedDeviceName,
+                                                   _lastUsedDeviceNameLength + 1);
+    memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1);
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
+               _captureCapabilities.Size());
+
+    return size;
+}
+
+bool DeviceInfoLinux::IsDeviceNameMatches(const char* name,
+                                                      const char* deviceUniqueIdUTF8)
+{
+    if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0)
+            return true;
+    return false;
+}
+
+WebRtc_Word32 DeviceInfoLinux::FillCapabilityMap(int fd)
+{
+
+    // set image format
+    struct v4l2_format video_fmt;
+    memset(&video_fmt, 0, sizeof(struct v4l2_format));
+
+    video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    video_fmt.fmt.pix.sizeimage = 0;
+
+    int totalFmts = 3;
+    unsigned int videoFormats[] = {
+        V4L2_PIX_FMT_MJPEG,
+        V4L2_PIX_FMT_YUV420,
+        V4L2_PIX_FMT_YUYV };
+
+    int sizes = 13;
+    unsigned int size[][2] = { { 128, 96 }, { 160, 120 }, { 176, 144 },
+                               { 320, 240 }, { 352, 288 }, { 640, 480 },
+                               { 704, 576 }, { 800, 600 }, { 960, 720 },
+                               { 1280, 720 }, { 1024, 768 }, { 1440, 1080 },
+                               { 1920, 1080 } };
+
+    int index = 0;
+    for (int fmts = 0; fmts < totalFmts; fmts++)
+    {
+        for (int i = 0; i < sizes; i++)
+        {
+            video_fmt.fmt.pix.pixelformat = videoFormats[fmts];
+            video_fmt.fmt.pix.width = size[i][0];
+            video_fmt.fmt.pix.height = size[i][1];
+
+            if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0)
+            {
+                if ((video_fmt.fmt.pix.width == size[i][0])
+                    && (video_fmt.fmt.pix.height == size[i][1]))
+                {
+                    VideoCaptureCapability *cap = new VideoCaptureCapability();
+                    cap->width = video_fmt.fmt.pix.width;
+                    cap->height = video_fmt.fmt.pix.height;
+                    cap->expectedCaptureDelay = 120;
+                    if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
+                    {
+                        cap->rawType = kVideoYUY2;
+                    }
+                    else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
+                    {
+                        cap->rawType = kVideoMJPEG;
+                    }
+
+                    // get fps of current camera mode
+                    // V4l2 does not have a stable method of knowing so we just guess.
+                    if(cap->width >= 800 && cap->rawType != kVideoMJPEG)
+                    {
+                        cap->maxFPS = 15;
+                    }
+                    else
+                    {
+                        cap->maxFPS = 30;
+                    }
+
+                    _captureCapabilities.Insert(index, cap);
+                    index++;
+                    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                               "Camera capability, width:%d height:%d type:%d fps:%d",
+                               cap->width, cap->height, cap->rawType, cap->maxFPS);
+                }
+            }
+        }
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
+               _captureCapabilities.Size());
+    return _captureCapabilities.Size();
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.h b/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.h
new file mode 100644
index 0000000..3d0e031
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Linux/device_info_linux.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoLinux: public DeviceInfoImpl
+{
+public:
+    DeviceInfoLinux(const WebRtc_Word32 id);
+    virtual ~DeviceInfoLinux();
+    virtual WebRtc_UWord32 NumberOfDevices();
+    virtual WebRtc_Word32 GetDeviceName(WebRtc_UWord32 deviceNumber,
+                                      WebRtc_UWord8* deviceNameUTF8,
+                                      WebRtc_UWord32 deviceNameLength,
+                                      WebRtc_UWord8* deviceUniqueIdUTF8,
+                                      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                      WebRtc_UWord8* productUniqueIdUTF8=0,
+                                      WebRtc_UWord32 productUniqueIdUTF8Length=0);
+    /*
+    * Fills the membervariable _captureCapabilities with capabilites for the given device name.
+    */
+    virtual WebRtc_Word32 CreateCapabilityMap (const WebRtc_UWord8* deviceUniqueIdUTF8);
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(const WebRtc_UWord8* /*deviceUniqueIdUTF8*/,
+                                            const WebRtc_UWord8* /*dialogTitleUTF8*/,
+                                            void* /*parentWindow*/,
+                                            WebRtc_UWord32 /*positionX*/,
+                                            WebRtc_UWord32 /*positionY*/) { return -1;}
+    WebRtc_Word32 FillCapabilityMap(int fd);
+    WebRtc_Word32 Init();
+private:
+
+    bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
diff --git a/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.cc b/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.cc
new file mode 100644
index 0000000..6b47f24
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.cc
@@ -0,0 +1,464 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <linux/videodev2.h>
+#include <errno.h>
+#include <stdio.h>
+#include <sys/mman.h>
+#include <string.h>
+
+#include <iostream>
+#include <new>
+
+#include "ref_count.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "video_capture_linux.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(const WebRtc_Word32 id,
+                                             const WebRtc_UWord8* deviceUniqueId)
+{
+    RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
+        new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
+
+    if (!implementation || implementation->Init(deviceUniqueId) != 0)
+    {
+        delete implementation;
+        implementation = NULL;
+    }
+
+    return implementation;
+}
+
+VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), 
+      _captureThread(NULL),
+      _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _deviceId(-1), 
+      _deviceFd(-1),
+      _buffersAllocatedByDevice(-1),
+      _currentWidth(-1), 
+      _currentHeight(-1),
+      _currentFrameRate(-1), 
+      _captureStarted(false),
+      _captureVideoType(kVideoI420), 
+      _pool(NULL)
+{
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::Init(const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    int len = strlen((const char*) deviceUniqueIdUTF8);
+    _deviceUniqueId = new (std::nothrow) WebRtc_UWord8[len + 1];
+    if (_deviceUniqueId)
+    {
+        memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
+    }
+
+    int fd;
+    char device[32];
+    bool found = false;
+
+    /* detect /dev/video [0-63] entries */
+    int n;
+    for (n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            // query device capabilities
+            struct v4l2_capability cap;
+            if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+            {
+                if (cap.bus_info[0] != 0)
+                {
+                    if (strncmp((const char*) cap.bus_info,
+                                (const char*) deviceUniqueIdUTF8,
+                                strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                    {
+                        close(fd);
+                        found = true;
+                        break; // fd matches with device unique id supplied
+                    }
+                }
+            }
+            close(fd); // close since this is not the matching device
+        }
+    }
+    if (!found)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+        return -1;
+    }
+    _deviceId = n; //store the device id
+    return 0;
+}
+
+VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
+{
+    StopCapture();
+    if (_captureCritSect)
+    {
+        delete _captureCritSect;
+    }
+    if (_deviceFd != -1)
+      close(_deviceFd);
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture(
+                                        const VideoCaptureCapability& capability)
+{
+    if (_captureStarted)
+    {
+        if (capability.width == _currentWidth &&
+            capability.height == _currentHeight &&
+            _captureVideoType == capability.rawType)
+        {
+            return 0;
+        }
+        else
+        {
+            StopCapture();
+        }
+    }
+
+    CriticalSectionScoped cs(*_captureCritSect);
+    //first open /dev/video device
+    char device[20];
+    sprintf(device, "/dev/video%d", (int) _deviceId);
+
+    if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in opening %s errono = %d", device, errno);
+        return -1;
+    }
+
+    // Supported video formats in preferred order.
+    // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
+    // I420 otherwise.
+    const int nFormats = 3;
+    unsigned int fmts[nFormats];
+    if (capability.width > 640 || capability.height > 480) {
+        fmts[0] = V4L2_PIX_FMT_MJPEG;
+        fmts[1] = V4L2_PIX_FMT_YUV420;
+        fmts[2] = V4L2_PIX_FMT_YUYV;
+    } else {
+        fmts[0] = V4L2_PIX_FMT_YUV420;
+        fmts[1] = V4L2_PIX_FMT_YUYV;
+        fmts[2] = V4L2_PIX_FMT_MJPEG;
+    }
+
+    struct v4l2_format video_fmt;
+    memset(&video_fmt, 0, sizeof(struct v4l2_format));
+    video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    video_fmt.fmt.pix.sizeimage = 0;
+    video_fmt.fmt.pix.width = capability.width;
+    video_fmt.fmt.pix.height = capability.height;
+
+    bool formatMatch = false;
+    for (int i = 0; i < nFormats; i++)
+    {
+        video_fmt.fmt.pix.pixelformat = fmts[i];
+        if (ioctl(_deviceFd, VIDIOC_TRY_FMT, &video_fmt) < 0)
+        {
+            continue;
+        }
+        else
+        {
+            formatMatch = true;
+            break;
+        }
+    }
+    if (!formatMatch)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "no supporting video formats found");
+        return -1;
+    }
+
+    if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
+        _captureVideoType = kVideoYUY2;
+    else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
+        _captureVideoType = kVideoI420;
+    else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
+        _captureVideoType = kVideoMJPEG;
+
+    //set format and frame size now
+    if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in VIDIOC_S_FMT, errno = %d", errno);
+        return -1;
+    }
+
+    // initialize current width and height
+    _currentWidth = video_fmt.fmt.pix.width;
+    _currentHeight = video_fmt.fmt.pix.height;
+    _captureDelay = 120;
+    // No way of knowing frame rate, make a guess.
+    if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG)
+      _currentFrameRate = 15;
+    else
+      _currentFrameRate = 30;
+
+    if (!AllocateVideoBuffers())
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "failed to allocate video capture buffers");
+        return -1;
+    }
+
+    //start capture thread;
+    if (!_captureThread)
+    {
+        _captureThread = ThreadWrapper::CreateThread(
+            VideoCaptureModuleV4L2::CaptureThread, this, kHighPriority);
+        unsigned int id;
+        _captureThread->Start(id);
+    }
+
+    // Needed to start UVC camera - from the uvcview application
+    enum v4l2_buf_type type;
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to turn on stream");
+        return -1;
+    }
+
+    _captureStarted = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::StopCapture()
+{
+    if (_captureThread)
+        _captureThread->SetNotAlive();// Make sure the capture thread stop stop using the critsect.
+
+
+    CriticalSectionScoped cs(*_captureCritSect);
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, -1, "StopCapture(), was running: %d",
+               _captureStarted);
+
+    if (!_captureStarted)
+    {
+        // we were not capturing!
+        return 0;
+    }
+
+    _captureStarted = false;
+
+    // stop the capture thread
+    // Delete capture update thread and event
+    if (_captureThread)
+    {
+        ThreadWrapper* temp = _captureThread;
+        _captureThread = NULL;
+        temp->SetNotAlive();
+        if (temp->Stop())
+        {
+            delete temp;
+        }
+    }
+
+    DeAllocateVideoBuffers();
+    close(_deviceFd);
+    _deviceFd = -1;
+
+    return 0;
+}
+
+//critical section protected by the caller
+
+bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
+{
+    struct v4l2_requestbuffers rbuffer;
+    memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
+
+    rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    rbuffer.memory = V4L2_MEMORY_MMAP;
+    rbuffer.count = kNoOfV4L2Bufffers;
+
+    if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "Could not get buffers from device. errno = %d", errno);
+        return false;
+    }
+
+    if (rbuffer.count > kNoOfV4L2Bufffers)
+        rbuffer.count = kNoOfV4L2Bufffers;
+
+    _buffersAllocatedByDevice = rbuffer.count;
+
+    //Map the buffers
+    _pool = new Buffer[rbuffer.count];
+
+    for (unsigned int i = 0; i < rbuffer.count; i++)
+    {
+        struct v4l2_buffer buffer;
+        memset(&buffer, 0, sizeof(v4l2_buffer));
+        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buffer.memory = V4L2_MEMORY_MMAP;
+        buffer.index = i;
+
+        if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
+        {
+            return false;
+        }
+
+        _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+                              _deviceFd, buffer.m.offset);
+
+        if (MAP_FAILED == _pool[i].start)
+        {
+            for (unsigned int j = 0; j < i; j++)
+                munmap(_pool[j].start, _pool[j].length);
+            return false;
+        }
+
+        _pool[i].length = buffer.length;
+
+        if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
+{
+    // unmap buffers
+    for (int i = 0; i < _buffersAllocatedByDevice; i++)
+        munmap(_pool[i].start, _pool[i].length);
+
+    delete[] _pool;
+
+    // turn off stream
+    enum v4l2_buf_type type;
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "VIDIOC_STREAMOFF error. errno: %d", errno);
+    }
+
+    return true;
+}
+
+bool VideoCaptureModuleV4L2::CaptureStarted()
+{
+    return _captureStarted;
+}
+
+bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
+{
+    return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
+}
+bool VideoCaptureModuleV4L2::CaptureProcess()
+{
+    int retVal = 0;
+    fd_set rSet;
+    struct timeval timeout;
+
+    _captureCritSect->Enter();
+    if (!_captureThread)
+    {
+        // terminating
+        _captureCritSect->Leave();
+        return false;
+    }
+
+    FD_ZERO(&rSet);
+    FD_SET(_deviceFd, &rSet);
+    timeout.tv_sec = 1;
+    timeout.tv_usec = 0;
+
+    retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
+    if (retVal < 0 && errno != EINTR) // continue if interrupted
+    {
+        // select failed
+        _captureCritSect->Leave();
+        return false;
+    }
+    else if (retVal == 0)
+    {
+        // select timed out
+        _captureCritSect->Leave();
+        return true;
+    }
+    else if (!FD_ISSET(_deviceFd, &rSet))
+    {
+        // not event on camera handle
+        _captureCritSect->Leave();
+        return true;
+    }
+
+    if (_captureStarted)
+    {
+        struct v4l2_buffer buf;
+        memset(&buf, 0, sizeof(struct v4l2_buffer));
+        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buf.memory = V4L2_MEMORY_MMAP;
+        // dequeue a buffer - repeat until dequeued properly!
+        while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
+        {
+            if (errno != EINTR)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                           "could not sync on a buffer on device %s", strerror(errno));
+                _captureCritSect->Leave();
+                return true;
+            }
+        }
+        VideoCaptureCapability frameInfo;
+        frameInfo.width = _currentWidth;
+        frameInfo.height = _currentHeight;
+        frameInfo.rawType = _captureVideoType;
+
+        // convert to to I420 if needed
+        IncomingFrame((unsigned char*) _pool[buf.index].start,
+                      buf.bytesused, frameInfo);
+        // enqueue the buffer again
+        if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                       "Failed to enqueue capture buffer");
+        }
+    }
+    _captureCritSect->Leave();
+    usleep(0);
+    return true;
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
+{
+    settings.width = _currentWidth;
+    settings.height = _currentHeight;
+    settings.maxFPS = _currentFrameRate;
+    settings.rawType=_captureVideoType;
+
+    return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.h b/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.h
new file mode 100644
index 0000000..1cb6702
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Linux/video_capture_linux.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+
+#include "common_types.h"
+#include "../video_capture_impl.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class ThreadWrapper;
+namespace videocapturemodule
+{
+class VideoCaptureModuleV4L2: public VideoCaptureImpl
+{
+public:
+    VideoCaptureModuleV4L2(WebRtc_Word32 id);
+    virtual ~VideoCaptureModuleV4L2();
+    virtual WebRtc_Word32 Init(const WebRtc_UWord8* deviceUniqueId);
+    virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+    virtual bool CaptureStarted();
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+private:
+    enum {kNoOfV4L2Bufffers=4};
+
+    static bool CaptureThread(void*);
+    bool CaptureProcess();
+    bool AllocateVideoBuffers();
+    bool DeAllocateVideoBuffers();
+
+    ThreadWrapper* _captureThread;
+    CriticalSectionWrapper* _captureCritSect;
+
+    WebRtc_Word32 _deviceId;
+    WebRtc_Word32 _deviceFd;
+
+    WebRtc_Word32 _buffersAllocatedByDevice;
+    WebRtc_Word32 _currentWidth;
+    WebRtc_Word32 _currentHeight;
+    WebRtc_Word32 _currentFrameRate;
+    bool _captureStarted;
+    RawVideoType _captureVideoType;
+    struct Buffer
+    {
+        void *start;
+        size_t length;
+    };
+    Buffer *_pool;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h
new file mode 100644
index 0000000..14ca344
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+
+#import <QTKit/QTKit.h>
+
+#include <stdio.h>
+
+#include "../../video_capture_impl.h"
+#include "video_capture_qtkit_utility.h"
+#include "../../device_info_impl.h"
+
+
+// Forward declaraion
+@class VideoCaptureMacQTKitObjC;
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKit : public VideoCaptureImpl
+{
+public:
+    VideoCaptureMacQTKit(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQTKit();
+
+    /*
+    *   Create a video capture module object
+    *
+    *   id - unique identifier of this video capture module object
+    *   deviceUniqueIdUTF8 -  name of the device. Available names can be found
+    *       by using GetDeviceName
+    *   deviceUniqueIdUTF8Length - length of deviceUniqueIdUTF8
+    */
+    static void Destroy(VideoCaptureModule* module);
+
+    WebRtc_Word32 Init(const WebRtc_Word32 id,
+                       const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+
+    // Start/Stop
+    virtual WebRtc_Word32 StartCapture(
+        const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+
+    // Properties of the set device
+
+    virtual bool CaptureStarted();
+
+    WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+    // Help functions
+    WebRtc_Word32 SetCameraOutput();
+
+private:
+    VideoCaptureMacQTKitObjC*        _captureDevice;
+    VideoCaptureMacQTKitInfoObjC*    _captureInfo;
+    bool                    _isCapturing;
+    WebRtc_Word32            _id;
+    WebRtc_Word32            _captureWidth;
+    WebRtc_Word32            _captureHeight;
+    WebRtc_Word32            _captureFrameRate;
+    WebRtc_UWord8            _currentDeviceNameUTF8[MAX_NAME_LENGTH];
+    WebRtc_UWord8            _currentDeviceUniqueIdUTF8[MAX_NAME_LENGTH];
+    WebRtc_UWord8            _currentDeviceProductUniqueIDUTF8[MAX_NAME_LENGTH];
+    WebRtc_Word32            _frameCount;
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm
new file mode 100644
index 0000000..c4a02c9
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm
@@ -0,0 +1,223 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_qtkit.h"
+#import "video_capture_qtkit_objc.h"
+#import "video_capture_qtkit_info_objc.h"
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "../../video_capture_config.h"
+
+namespace webrtc
+{
+
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKit::VideoCaptureMacQTKit(const WebRtc_Word32 id) :
+    VideoCaptureImpl(id),
+    _captureDevice(NULL),
+    _captureInfo(NULL),
+    _isCapturing(false),
+    _id(id),
+    _captureWidth(QTKIT_DEFAULT_WIDTH),
+    _captureHeight(QTKIT_DEFAULT_HEIGHT),
+    _captureFrameRate(QTKIT_DEFAULT_FRAME_RATE),
+    _frameCount(0)
+{
+
+    memset(_currentDeviceNameUTF8, 0, MAX_NAME_LENGTH);
+    memset(_currentDeviceUniqueIdUTF8, 0, MAX_NAME_LENGTH);
+    memset(_currentDeviceProductUniqueIDUTF8, 0, MAX_NAME_LENGTH);
+}
+
+VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
+{
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "~VideoCaptureMacQTKit() called");
+    if(_captureDevice)
+    {
+        [_captureDevice stopCapture];
+        [_captureDevice release];
+    }
+
+    if(_captureInfo)
+    {
+        [_captureInfo release];
+    }
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::Init(
+    const WebRtc_Word32 id, const WebRtc_UWord8* iDeviceUniqueIdUTF8)
+{
+    CriticalSectionScoped cs(_apiCs);
+
+
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*)iDeviceUniqueIdUTF8);
+    if(nameLength>kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new WebRtc_UWord8[nameLength+1];
+    memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
+
+    _captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
+    if(NULL == _captureDevice)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "Failed to create an instance of "
+                     "VideoCaptureMacQTKitObjC");
+        return -1;
+    }
+
+    if(-1 == [[_captureDevice registerOwner:this]intValue])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "Failed to register owner for _captureDevice");
+        return -1;
+    }
+
+    if(0 == strcmp((char*)iDeviceUniqueIdUTF8, ""))
+    {
+        // the user doesn't want to set a capture device at this time
+        return 0;
+    }
+
+    _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc]init];
+    if(nil == _captureInfo)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id, "Failed to create an instance of VideoCaptureMacQTKitInfoObjC");
+        return -1;
+    }
+
+    int captureDeviceCount = [[_captureInfo getCaptureDeviceCount]intValue];
+    if(captureDeviceCount < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "No Capture Devices Present");
+        return -1;
+    }
+
+    const int NAME_LENGTH = 1024;
+    WebRtc_UWord8 deviceNameUTF8[1024] = "";
+    WebRtc_UWord8 deviceUniqueIdUTF8[1024] = "";
+    WebRtc_UWord8 deviceProductUniqueIDUTF8[1024] = "";
+
+    bool captureDeviceFound = false;
+    for(int index = 0; index < captureDeviceCount; index++){
+
+        memset(deviceNameUTF8, 0, NAME_LENGTH);
+        memset(deviceUniqueIdUTF8, 0, NAME_LENGTH);
+        memset(deviceProductUniqueIDUTF8, 0, NAME_LENGTH);
+        if(-1 == [[_captureInfo getDeviceNamesFromIndex:index
+                   DefaultName:deviceNameUTF8 WithLength:NAME_LENGTH
+                   AndUniqueID:deviceUniqueIdUTF8 WithLength:NAME_LENGTH
+                   AndProductID:deviceProductUniqueIDUTF8
+                   WithLength:NAME_LENGTH]intValue])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "GetDeviceName returned -1 for index %d", index);
+            return -1;
+        }
+        if(0 == strcmp((const char*)iDeviceUniqueIdUTF8,
+                       (char*)deviceUniqueIdUTF8))
+        {
+            // we have a match
+            captureDeviceFound = true;
+            break;
+        }
+    }
+
+    if(false == captureDeviceFound)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "Failed to find capture device unique ID %s",
+                     iDeviceUniqueIdUTF8);
+        return -1;
+    }
+
+    // at this point we know that the user has passed in a valid camera. Let's
+    // set it as the current.
+    if(-1 == [[_captureDevice
+               setCaptureDeviceById:(char*)deviceUniqueIdUTF8]intValue])
+    {
+        strcpy((char*)_deviceUniqueId, (char*)deviceUniqueIdUTF8);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to set capture device %s (unique ID %s) even "
+                     "though it was a valid return from "
+                     "VideoCaptureMacQTKitInfo");
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "successfully Init VideoCaptureMacQTKit" );
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::StartCapture(
+    const VideoCaptureCapability& capability)
+{
+
+    _captureWidth = capability.width;
+    _captureHeight = capability.height;
+    _captureFrameRate = capability.maxFPS;
+
+    if(-1 == [[_captureDevice setCaptureHeight:_captureHeight
+               AndWidth:_captureWidth AndFrameRate:_captureFrameRate]intValue])
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "Could not set width=%d height=%d frameRate=%d",
+                     _captureWidth, _captureHeight, _captureFrameRate);
+        return -1;
+    }
+
+    if(-1 == [[_captureDevice startCapture]intValue])
+    {
+        return -1;
+    }
+    _isCapturing = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::StopCapture()
+{
+    [_captureDevice stopCapture];
+
+    _isCapturing = false;
+    return 0;
+}
+
+bool VideoCaptureMacQTKit::CaptureStarted()
+{
+    return _isCapturing;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::CaptureSettings(VideoCaptureCapability& settings)
+{
+    settings.width = _captureWidth;
+    settings.height = _captureHeight;
+    settings.maxFPS = _captureFrameRate;
+    return 0;
+}
+
+
+// ********** begin functions inherited from DeviceInfoImpl **********
+
+struct VideoCaptureCapabilityMacQTKit:public VideoCaptureCapability
+{
+    VideoCaptureCapabilityMacQTKit()
+    {
+    }
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h
new file mode 100644
index 0000000..b6d05ad
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+
+#include "../../video_capture_impl.h"
+#include "../../device_info_impl.h"
+#include "video_capture_qtkit_utility.h"
+
+#include "map_wrapper.h"
+
+
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKitInfo: public DeviceInfoImpl
+{
+public:
+
+   VideoCaptureMacQTKitInfo(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQTKitInfo();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     * deviceNumber   -[in] index of capture device
+     * deviceNameUTF8 - friendly name of the capture device
+     * deviceUniqueIdUTF8 - unique name of the capture device if it exist.
+     *      Otherwise same as deviceNameUTF8
+     * productUniqueIdUTF8 - unique product id if it exist. Null terminated
+     *      otherwise.
+     */
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber, WebRtc_UWord8* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength, WebRtc_UWord8* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        WebRtc_UWord8* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+    /*
+     *   Returns the number of capabilities for this device
+     */
+    virtual WebRtc_Word32 NumberOfCapabilities(
+        const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    /*
+     *   Gets the capabilities of the named device
+     */
+    virtual WebRtc_Word32 GetCapability(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability);
+
+    /*
+     *  Gets the capability that best matches the requested width, height and frame rate.
+     *  Returns the deviceCapabilityNumber on success.
+     */
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const WebRtc_UWord8*deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting);
+
+    /*
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord8* dialogTitleUTF8, void* parentWindow,
+        WebRtc_UWord32 positionX, WebRtc_UWord32 positionY);
+
+protected:
+    virtual WebRtc_Word32 CreateCapabilityMap(
+        const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    VideoCaptureMacQTKitInfoObjC*    _captureInfo;
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm
new file mode 100644
index 0000000..b13a8ab
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "../../video_capture_config.h"
+#import "video_capture_qtkit_info_objc.h"
+
+#include "video_capture.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const WebRtc_Word32 id) :
+    DeviceInfoImpl(id)
+{
+    _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
+}
+
+VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
+{
+    [_captureInfo release];
+
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::Init()
+{
+
+    return 0;
+}
+
+WebRtc_UWord32 VideoCaptureMacQTKitInfo::NumberOfDevices()
+{
+
+    WebRtc_UWord32 captureDeviceCount =
+        [[_captureInfo getCaptureDeviceCount]intValue];
+    return captureDeviceCount;
+
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetDeviceName(
+    WebRtc_UWord32 deviceNumber, WebRtc_UWord8* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameLength, WebRtc_UWord8* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, WebRtc_UWord8* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
+                   DefaultName:deviceNameUTF8 WithLength:deviceNameLength
+                   AndUniqueID:deviceUniqueIdUTF8
+                   WithLength:deviceUniqueIdUTF8Length
+                   AndProductID:productUniqueIdUTF8
+                   WithLength:productUniqueIdUTF8Length]intValue];
+    return errNum;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::NumberOfCapabilities(
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetCapability(
+    const WebRtc_UWord8* deviceUniqueIdUTF8,
+    const WebRtc_UWord32 deviceCapabilityNumber,
+    VideoCaptureCapability& capability)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetBestMatchedCapability(
+    const WebRtc_UWord8*deviceUniqueIdUTF8,
+    const VideoCaptureCapability& requested, VideoCaptureCapability& resulting)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox(
+    const WebRtc_UWord8* deviceUniqueIdUTF8,
+    const WebRtc_UWord8* dialogTitleUTF8, void* parentWindow,
+    WebRtc_UWord32 positionX, WebRtc_UWord32 positionY)
+{
+
+    return [[_captureInfo
+             displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
+             AndTitle:dialogTitleUTF8
+             AndParentWindow:parentWindow AtX:positionX AndY:positionY]
+             intValue];
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::CreateCapabilityMap(
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+     return -1;
+}
+}  // namespace videocapturemodule
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h
new file mode 100644
index 0000000..1d3ac86
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_qtkit_info_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+
+#import <QTKit/QTKit.h>
+#import <Foundation/Foundation.h>
+#include "video_capture_qtkit_utility.h"
+#include "video_capture_qtkit_info.h"
+
+@interface VideoCaptureMacQTKitInfoObjC : NSObject{
+    bool                                _OSSupportedInfo;
+    NSArray*                            _captureDevicesInfo;
+    NSAutoreleasePool*                    _poolInfo;
+    int                                    _captureDeviceCountInfo;
+
+}
+
+/**************************************************************************
+ *
+ *   The following functions are considered to be private
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDevices;
+- (NSNumber*)initializeVariables;
+- (void)checkOSSupported;
+
+
+/**************************************************************************
+ *
+ *   The following functions are considered to be public and called by VideoCaptureMacQTKitInfo class
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDeviceCount;
+
+- (NSNumber*)getDeviceNamesFromIndex:(WebRtc_UWord32)index
+    DefaultName:(WebRtc_UWord8*)deviceName
+    WithLength:(WebRtc_UWord32)deviceNameLength
+    AndUniqueID:(WebRtc_UWord8*)deviceUniqueID
+    WithLength:(WebRtc_UWord32)deviceUniqueIDLength
+    AndProductID:(WebRtc_UWord8*)deviceProductID
+    WithLength:(WebRtc_UWord32)deviceProductIDLength;
+
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:
+        (const WebRtc_UWord8*)deviceUniqueIdUTF8
+    AndTitle:(const WebRtc_UWord8*)dialogTitleUTF8
+    AndParentWindow:(void*) parentWindow AtX:(WebRtc_UWord32)positionX
+    AndY:(WebRtc_UWord32) positionY;
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm
new file mode 100644
index 0000000..57ab3d0
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma mark **** imports/includes
+
+#import "video_capture_qtkit_info_objc.h"
+
+#include "trace.h"
+
+using namespace webrtc;
+
+#pragma mark **** hidden class interface
+
+@implementation VideoCaptureMacQTKitInfoObjC
+
+// ****************** over-written OS methods ***********************
+#pragma mark **** over-written OS methods
+
+/// ***** Objective-C. Similar to C++ constructor, although invoked manually
+/// ***** Potentially returns an instance of self
+-(id)init{
+    self = [super init];
+    if(nil != self){
+        [self checkOSSupported];
+        [self initializeVariables];
+    }
+    else
+    {
+        return nil;
+    }
+    return self;
+}
+
+/// ***** Objective-C. Similar to C++ destructor
+/// ***** Returns nothing
+- (void)dealloc {
+    [super dealloc];
+}
+
+// ****************** public methods ******************
+#pragma mark **** public method implementations
+
+/// ***** Creates a message box with Cocoa framework
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:(const WebRtc_UWord8*)deviceUniqueIdUTF8
+                    AndTitle:(const WebRtc_UWord8*)dialogTitleUTF8
+                    AndParentWindow:(void*) parentWindow
+                    AtX:(WebRtc_UWord32)positionX
+                    AndY:(WebRtc_UWord32) positionY
+{
+    NSString* strTitle = [NSString stringWithFormat:@"%s", dialogTitleUTF8];
+    NSString* strButton = @"Alright";
+    NSString* strMessage = [NSString stringWithFormat:@"Device %s is capturing:\nWidth:%d\n:Height:%d\n@%dfps", deviceUniqueIdUTF8];
+    NSAlert* alert = [NSAlert alertWithMessageText:strTitle
+                      defaultButton:strButton
+                      alternateButton:nil otherButton:nil
+                      informativeTextWithFormat:strMessage];
+    [alert setAlertStyle:NSInformationalAlertStyle];
+    [alert runModal];
+    return [NSNumber numberWithInt:0];
+}
+
+- (NSNumber*)getCaptureDeviceCount{
+    [self getCaptureDevices];
+    return [NSNumber numberWithInt:_captureDeviceCountInfo];
+}
+
+
+- (NSNumber*)getDeviceNamesFromIndex:(WebRtc_UWord32)index
+    DefaultName:(WebRtc_UWord8*)deviceName
+    WithLength:(WebRtc_UWord32)deviceNameLength
+    AndUniqueID:(WebRtc_UWord8*)deviceUniqueID
+    WithLength:(WebRtc_UWord32)deviceUniqueIDLength
+    AndProductID:(WebRtc_UWord8*)deviceProductID
+    WithLength:(WebRtc_UWord32)deviceProductIDLength
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(index > (WebRtc_UWord32)_captureDeviceCountInfo)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    QTCaptureDevice* tempCaptureDevice =
+        (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+    if(!tempCaptureDevice)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    memset(deviceName, 0, deviceNameLength);
+    memset(deviceUniqueID, 0, deviceUniqueIDLength);
+
+    bool successful = NO;
+
+    NSString* tempString = [tempCaptureDevice localizedDisplayName];
+    successful = [tempString getCString:(char*)deviceName
+                  maxLength:deviceNameLength encoding:NSUTF8StringEncoding];
+    if(NO == successful)
+    {
+        memset(deviceName, 0, deviceNameLength);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    tempString = [tempCaptureDevice uniqueID];
+    successful = [tempString getCString:(char*)deviceUniqueID
+                  maxLength:deviceUniqueIDLength encoding:NSUTF8StringEncoding];
+    if(NO == successful)
+    {
+        memset(deviceUniqueID, 0, deviceNameLength);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    return [NSNumber numberWithInt:0];
+
+}
+
+// ****************** "private" category functions below here  ******************
+#pragma mark **** "private" method implementations
+
+- (NSNumber*)initializeVariables
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _poolInfo = [[NSAutoreleasePool alloc]init];
+    _captureDeviceCountInfo = 0;
+    [self getCaptureDevices];
+
+    return [NSNumber numberWithInt:0];
+}
+
+// ***** Checks to see if the QTCaptureSession framework is available in the OS
+// ***** If it is not, isOSSupprted = NO
+// ***** Throughout the rest of the class isOSSupprted is checked and functions
+// ***** are/aren't called depending
+// ***** The user can use weak linking to the QTKit framework and run on older
+// ***** versions of the OS
+// ***** I.E. Backwards compaitibility
+// ***** Returns nothing. Sets member variable
+- (void)checkOSSupported
+{
+    Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
+    _OSSupportedInfo = NO;
+    if(nil == osSupportedTest)
+    {
+    }
+    _OSSupportedInfo = YES;
+}
+
+/// ***** Retrieves the number of capture devices currently available
+/// ***** Stores them in an NSArray instance
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)getCaptureDevices
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(_captureDevicesInfo)
+    {
+        [_captureDevicesInfo release];
+    }
+    _captureDevicesInfo = [[NSArray alloc]
+                            initWithArray:[QTCaptureDevice
+                                           inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+    _captureDeviceCountInfo = _captureDevicesInfo.count;
+    if(_captureDeviceCountInfo < 1){
+        return [NSNumber numberWithInt:0];
+    }
+    return [NSNumber numberWithInt:0];
+}
+
+@end
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h
new file mode 100644
index 0000000..d48dbf1
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_qtkit_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+
+#import <Foundation/Foundation.h>
+#import <QTKit/QTKit.h>
+#import <AppKit/AppKit.h>
+#import <CoreData/CoreData.h>
+#import <CoreFoundation/CoreFoundation.h>
+#import <CoreVideo/CoreVideo.h>
+
+#import "video_capture_recursive_lock.h"
+
+#include "video_capture_qtkit.h"
+
+@interface VideoCaptureMacQTKitObjC : NSObject{
+    // class properties
+    bool                                    _capturing;
+    int                                    _counter;
+    int                                    _frameRate;
+    int                                    _frameWidth;
+    int                                    _frameHeight;
+    int                                    _framesDelivered;
+    int                                    _framesRendered;
+    bool                                _OSSupported;
+    bool                                _captureInitialized;
+    
+    // WebRTC Custom classes
+    webrtc::videocapturemodule::VideoCaptureMacQTKit* _owner;
+    VideoCaptureRecursiveLock*            _rLock;
+    
+    // QTKit variables
+    QTCaptureSession*                    _captureSession;
+    QTCaptureDeviceInput*                _captureVideoDeviceInput;
+    QTCaptureDecompressedVideoOutput*    _captureDecompressedVideoOutput;
+    NSArray*                            _captureDevices;
+    int                                    _captureDeviceCount;
+    int                                    _captureDeviceIndex;
+    NSString*                            _captureDeviceName;
+    char                                _captureDeviceNameUTF8[1024];
+    char                                _captureDeviceNameUniqueID[1024];
+    char                                _captureDeviceNameProductID[1024];
+    NSString*                            _key;
+    NSNumber*                            _val;
+    NSDictionary*                        _videoSettings;
+    NSString*                            _captureQuality;
+    
+    // other
+    NSAutoreleasePool*                    _pool;
+
+}
+/**************************************************************************
+ *
+ *   The following functions are considered to be private.
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDevices;
+- (NSNumber*)initializeVideoCapture;
+- (NSNumber*)initializeVariables;
+- (void)checkOSSupported;
+
+
+/**************************************************************************
+ *
+ *   The following functions are considered public and to be called by the VideoCaptureMacQTKit class.
+ *
+ ***************************************************************************/
+
+
+- (NSNumber*)registerOwner:(webrtc::videocapturemodule::VideoCaptureMacQTKit*)owner;
+- (NSNumber*)setCaptureDeviceById:(char*)uniqueId;
+- (NSNumber*)setCaptureHeight:(int)height AndWidth:(int)width AndFrameRate:(int)frameRate;
+- (NSNumber*)startCapture;
+- (NSNumber*)stopCapture;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm
new file mode 100644
index 0000000..48f734c
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm
@@ -0,0 +1,462 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#define DEFAULT_CAPTURE_DEVICE_INDEX    1
+#define DEFAULT_FRAME_RATE              30
+#define DEFAULT_FRAME_WIDTH                352
+#define DEFAULT_FRAME_HEIGHT            288
+#define ROTATE_CAPTURED_FRAME           1
+#define LOW_QUALITY                     1
+
+#import "video_capture_qtkit_objc.h"
+#include "video_capture_qtkit_utility.h"
+#include "trace.h"
+
+using namespace webrtc;
+using namespace videocapturemodule;
+
+@implementation VideoCaptureMacQTKitObjC
+
+#pragma mark **** over-written OS methods
+
+/// ***** Objective-C. Similar to C++ constructor, although must be invoked
+///       manually.
+/// ***** Potentially returns an instance of self
+-(id)init{
+    self = [super init];
+    if(nil != self)
+    {
+        [self checkOSSupported];
+        [self initializeVariables];
+    }
+    else
+    {
+        return nil;
+    }
+    return self;
+}
+
+/// ***** Objective-C. Similar to C++ destructor
+/// ***** Returns nothing
+- (void)dealloc {
+    if(_captureSession)
+    {
+        [_captureSession stopRunning];
+        [_captureSession release];
+    }
+    [super dealloc];
+}
+
+#pragma mark **** public methods
+
+
+
+/// ***** Registers the class's owner, which is where the delivered frames are
+///       sent
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)registerOwner:(VideoCaptureMacQTKit*)owner{
+    if(!owner){
+        return [NSNumber numberWithInt:-1];
+    }
+    _owner = owner;
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Sets the QTCaptureSession's input device from a char*
+/// ***** Sets several member variables. Can signal the error system if one has
+///       occurred
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)setCaptureDeviceById:(char*)uniqueId{
+    if(NO == _OSSupported)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d OS version does not support necessary APIs",
+                     __FUNCTION__, __LINE__);
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(!uniqueId || (0 == strcmp("", uniqueId)))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d  \"\" was passed in for capture device name",
+                     __FUNCTION__, __LINE__);
+        memset(_captureDeviceNameUTF8, 0, 1024);
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(0 == strcmp(uniqueId, _captureDeviceNameUniqueID))
+    {
+        // camera already set
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d Capture device is already set to %s", __FUNCTION__,
+                     __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:0];
+    }
+
+    bool success = NO;
+    QTCaptureDevice* tempCaptureDevice;
+    for(int index = 0; index < _captureDeviceCount; index++)
+    {
+        tempCaptureDevice = (QTCaptureDevice*)[_captureDevices
+                                               objectAtIndex:index];
+        char tempCaptureDeviceId[1024] = "";
+        [[tempCaptureDevice uniqueID]
+          getCString:tempCaptureDeviceId maxLength:1024
+          encoding:NSUTF8StringEncoding];
+        if(0 == strcmp(uniqueId, tempCaptureDeviceId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                         "%s:%d Found capture device id %s as index %d",
+                         __FUNCTION__, __LINE__, tempCaptureDeviceId, index);
+            success = YES;
+          [[tempCaptureDevice localizedDisplayName]
+              getCString:_captureDeviceNameUTF8
+               maxLength:1024
+                encoding:NSUTF8StringEncoding];
+          [[tempCaptureDevice uniqueID]
+              getCString:_captureDeviceNameUniqueID
+               maxLength:1024
+                encoding:NSUTF8StringEncoding];
+            break;
+        }
+
+    }
+
+    if(NO == success)
+    {
+        // camera not found
+        // nothing has been changed yet, so capture device will stay in it's
+        // state
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d Capture device id %s was not found in list of "
+                     "available devices.", __FUNCTION__, __LINE__, uniqueId);
+        return [NSNumber numberWithInt:0];
+    }
+
+    NSError* error;
+    success = [tempCaptureDevice open:&error];
+    if(!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+                     "%s:%d Failed to open capture device: %s",
+                     __FUNCTION__, __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    if(_captureVideoDeviceInput)
+    {
+        [_captureVideoDeviceInput release];
+    }
+    _captureVideoDeviceInput = [[QTCaptureDeviceInput alloc]
+                                 initWithDevice:tempCaptureDevice];
+
+    success = [_captureSession addInput:_captureVideoDeviceInput error:&error];
+    if(!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+                     "%s:%d Failed to add input from %s to the capture session",
+                     __FUNCTION__, __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                 "%s:%d successfully added capture device: %s", __FUNCTION__,
+                 __LINE__, _captureDeviceNameUTF8);
+    return [NSNumber numberWithInt:0];
+}
+
+
+/// ***** Updates the capture devices size and frequency
+/// ***** Sets member variables _frame* and _captureDecompressedVideoOutput
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)setCaptureHeight:(int)height AndWidth:(int)width
+             AndFrameRate:(int)frameRate{
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _frameWidth = width;
+    _frameHeight = height;
+    _frameRate = frameRate;
+
+    // TODO(mflodman) Check fps settings.
+    // [_captureDecompressedVideoOutput
+    //     setMinimumVideoFrameInterval:(NSTimeInterval)1/(float)_frameRate];
+    NSDictionary* captureDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
+                                       [NSNumber numberWithDouble:_frameWidth], (id)kCVPixelBufferWidthKey,
+                                       [NSNumber numberWithDouble:_frameHeight], (id)kCVPixelBufferHeightKey,
+                                       [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+                                       (id)kCVPixelBufferPixelFormatTypeKey, nil]; 
+    [_captureDecompressedVideoOutput performSelectorOnMainThread:@selector(setPixelBufferAttributes:) withObject:captureDictionary waitUntilDone:NO];
+//    [_captureDecompressedVideoOutput setPixelBufferAttributes:captureDictionary];
+
+        
+    // these methods return type void so there isn't much we can do about
+    // checking success
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Starts the QTCaptureSession, assuming correct state. Also ensures that
+///       an NSRunLoop is running
+/// ***** Without and NSRunLoop to process events, the OS doesn't check for a
+///       new frame.
+/// ***** Sets member variables _capturing
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)startCapture{
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(YES == _capturing)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+  
+//    NSLog(@"--------------- before ---------------");
+    [[NSRunLoop mainRunLoop] runUntilDate:[NSDate distantFuture]];
+//    NSLog(@"--------------- after ---------------");
+
+    if(NO == _captureInitialized)
+    {
+        // this should never be called..... it is initialized on class init
+        [self initializeVideoCapture];
+    }    
+    [_captureSession startRunning];
+
+    
+    _capturing = YES;
+
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Stops the QTCaptureSession, assuming correct state
+/// ***** Sets member variables _capturing
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)stopCapture{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(nil == _captureSession)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(NO == _capturing)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(YES == _capturing)
+    {
+        [_captureSession stopRunning];
+    }
+
+    _capturing = NO;
+    return [NSNumber numberWithInt:0];
+}
+
+// ********** "private" functions below here **********
+#pragma mark **** "private" methods
+
+/// ***** Class member variables are initialized here
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)initializeVariables{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _pool = [[NSAutoreleasePool alloc]init];
+
+    memset(_captureDeviceNameUTF8, 0, 1024);
+    _counter = 0;
+    _framesDelivered = 0;
+    _framesRendered = 0;
+    _captureDeviceCount = 0;
+    _capturing = NO;
+    _captureInitialized = NO;
+    _frameRate = DEFAULT_FRAME_RATE;
+    _frameWidth = DEFAULT_FRAME_WIDTH;
+    _frameHeight = DEFAULT_FRAME_HEIGHT;
+    _captureDeviceName = [[NSString alloc] initWithFormat:@""];
+    _rLock = [[VideoCaptureRecursiveLock alloc] init];
+    _captureSession = [[QTCaptureSession alloc] init];
+    _captureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc]
+                                        init];
+    [_captureDecompressedVideoOutput setDelegate:self];
+
+    [self getCaptureDevices];
+    [self initializeVideoCapture];
+
+    return [NSNumber numberWithInt:0];
+
+}
+
+// Checks to see if the QTCaptureSession framework is available in the OS
+// If it is not, isOSSupprted = NO.
+// Throughout the rest of the class isOSSupprted is checked and functions
+// are/aren't called depending
+// The user can use weak linking to the QTKit framework and run on older
+// versions of the OS. I.E. Backwards compaitibility
+// Returns nothing. Sets member variable
+- (void)checkOSSupported{
+
+    Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
+    _OSSupported = NO;
+    if(nil == osSupportedTest)
+    {
+    }
+    _OSSupported = YES;
+}
+
+/// ***** Retrieves the number of capture devices currently available
+/// ***** Stores them in an NSArray instance
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)getCaptureDevices{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(_captureDevices)
+    {
+        [_captureDevices release];
+    }
+    _captureDevices = [[NSArray alloc] initWithArray:
+        [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+    _captureDeviceCount = _captureDevices.count;
+    if(_captureDeviceCount < 1)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+    return [NSNumber numberWithInt:0];
+}
+
+// Initializes a QTCaptureSession (member variable) to deliver frames via
+// callback
+// QTCapture* member variables affected
+// The image format and frequency are setup here
+// Returns 0 on success, -1 otherwise.
+- (NSNumber*)initializeVideoCapture{
+
+    if(YES == _captureInitialized)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    QTCaptureDevice* videoDevice =
+        (QTCaptureDevice*)[_captureDevices objectAtIndex:0];
+
+    bool success = NO;
+    NSError*    error;
+
+    success = [videoDevice open:&error];
+    if(!success)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    [_captureDecompressedVideoOutput setPixelBufferAttributes:
+        [NSDictionary dictionaryWithObjectsAndKeys:
+            [NSNumber numberWithDouble:_frameWidth], (id)kCVPixelBufferWidthKey,
+            [NSNumber numberWithDouble:_frameHeight], (id)kCVPixelBufferHeightKey,
+            [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+            (id)kCVPixelBufferPixelFormatTypeKey, nil]];
+
+    // TODO(mflodman) Check fps settings.
+    //[_captureDecompressedVideoOutput setMinimumVideoFrameInterval:
+    //    (NSTimeInterval)1/(float)_frameRate];
+    //[_captureDecompressedVideoOutput setAutomaticallyDropsLateVideoFrames:YES];
+
+    success = [_captureSession addOutput:_captureDecompressedVideoOutput
+               error:&error];
+
+    if(!success)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    _captureInitialized = YES;
+
+    return [NSNumber numberWithInt:0];
+}
+
+// This is the callback that is called when the OS has a frame to deliver to us.
+// Starts being called when [_captureSession startRunning] is called. Stopped
+// similarly.
+// Parameter videoFrame contains the image. The format, size, and frequency
+// were setup earlier.
+// Returns 0 on success, -1 otherwise.
+- (void)captureOutput:(QTCaptureOutput *)captureOutput
+    didOutputVideoFrame:(CVImageBufferRef)videoFrame
+     withSampleBuffer:(QTSampleBuffer *)sampleBuffer
+     fromConnection:(QTCaptureConnection *)connection{
+
+    if(YES == [_rLock tryLock])
+    {
+        [_rLock lock];
+    }
+    else
+    {
+        return;
+    }
+
+    if(NO == _OSSupported)
+    {
+        return;
+    }
+
+    const int LOCK_FLAGS = 0; // documentation says to pass 0
+
+    // get size of the frame
+    CVPixelBufferLockBaseAddress(videoFrame, LOCK_FLAGS);
+    void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
+    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
+    int frameHeight = CVPixelBufferGetHeight(videoFrame);
+    CVPixelBufferUnlockBaseAddress(videoFrame, LOCK_FLAGS);
+
+    if(_owner)
+    {
+
+        int frameSize = bytesPerRow * frameHeight;    // 32 bit ARGB format
+        CVBufferRetain(videoFrame);
+        VideoCaptureCapability tempCaptureCapability;
+        tempCaptureCapability.width = _frameWidth;
+        tempCaptureCapability.height = _frameHeight;
+        tempCaptureCapability.maxFPS = _frameRate;
+        // TODO(wu) : Update actual type and not hard-coded value. 
+        tempCaptureCapability.rawType = kVideoBGRA;
+
+        _owner->IncomingFrame((unsigned char*)baseAddress,
+                              frameSize,
+                              tempCaptureCapability,
+                              0);
+
+        CVBufferRelease(videoFrame);
+    }
+
+    _framesDelivered++;
+    _framesRendered++;
+
+    if(YES == [_rLock locked])
+    {
+        [_rLock unlock];
+    }
+}
+
+@end
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h
new file mode 100644
index 0000000..5ef0b96
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_qtkit_utility.h
+ *
+ */
+
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+
+#define MAX_NAME_LENGTH                1024
+
+#define QTKIT_MIN_WIDTH                0
+#define QTKIT_MAX_WIDTH                2560
+#define QTKIT_DEFAULT_WIDTH            352
+
+#define QTKIT_MIN_HEIGHT            0
+#define QTKIT_MAX_HEIGHT            1440
+#define QTKIT_DEFAULT_HEIGHT        288
+
+#define QTKIT_MIN_FRAME_RATE        1
+#define QTKIT_MAX_FRAME_RATE        60
+#define QTKIT_DEFAULT_FRAME_RATE    30
+
+#define RELEASE_AND_CLEAR(p)        if (p) { (p) -> Release () ; (p) = NULL ; }
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h
new file mode 100644
index 0000000..f4008a4
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_recursive_lock.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
+
+#import <Foundation/Foundation.h>
+
+@interface VideoCaptureRecursiveLock : NSRecursiveLock <NSLocking> {
+    BOOL _locked;
+}
+
+@property BOOL locked;
+
+- (void)lock;
+- (void)unlock;
+
+@end
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm
new file mode 100644
index 0000000..d9df5cb
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm
@@ -0,0 +1,33 @@
+//
+//  video_capture_recursive_lock.mm
+//
+//
+
+#import "video_capture_recursive_lock.h"
+
+@implementation VideoCaptureRecursiveLock
+
+@synthesize locked = _locked;
+
+- (id)init{
+    self = [super init];
+    if(nil == self){
+        return nil;
+    }
+
+    [self setLocked:NO];
+    return self;
+}
+
+- (void)lock{
+    [self setLocked:YES];
+    [super lock];
+}
+
+- (void)unlock{
+    [self setLocked:NO];
+    [super unlock];
+}
+
+
+@end
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc
new file mode 100644
index 0000000..f70923e
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc
@@ -0,0 +1,1387 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time.cc
+ *
+ */
+
+
+#include "video_capture_quick_time.h"
+
+#include "CriticalSectionWrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "trace.h"
+#include <unistd.h>
+
+namespace webrtc
+{
+
+VideoCaptureMacQuickTime::VideoCaptureMacQuickTime(WebRtc_Word32 iID) :
+    VideoCaptureImpl(iID), // super class constructor
+    _id(iID),
+    _isCapturing(false),
+    _captureCapability(),
+    _grabberCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _videoMacCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _terminated(true), _grabberUpdateThread(NULL),
+    _grabberUpdateEvent(NULL), _captureGrabber(NULL), _captureDevice(NULL),
+    _captureVideoType(kVideoUnknown), _captureIsInitialized(false),
+    _gWorld(NULL), _captureChannel(0), _captureSequence(NULL),
+    _sgPrepared(false), _sgStarted(false), _trueCaptureWidth(0),
+    _trueCaptureHeight(0), _captureDeviceList(),
+    _captureDeviceListTime(0), _captureCapabilityList()
+
+{
+    _captureCapability.width = START_CODEC_WIDTH;
+    _captureCapability.height = START_CODEC_HEIGHT;
+    memset(_captureDeviceDisplayName, 0, sizeof(_captureDeviceDisplayName));
+}
+
+VideoCaptureMacQuickTime::~VideoCaptureMacQuickTime()
+{
+
+
+    VideoCaptureTerminate();
+
+    if (_videoMacCritsect)
+    {
+        delete _videoMacCritsect;
+    }
+    if (_grabberCritsect)
+    {
+        delete _grabberCritsect;
+    }
+
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::Init(
+    const WebRtc_Word32 id, const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (nameLength > kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new WebRtc_UWord8[nameLength + 1];
+    memset(_deviceUniqueId, 0, nameLength + 1);
+    memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+    // Check OSX version
+    OSErr err = noErr;
+    long version;
+
+    _videoMacCritsect->Enter();
+    if (!_terminated)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Already Initialized", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    err = Gestalt(gestaltSystemVersion, &version);
+    if (err != noErr)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not retrieve OS version", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d OS X version: %x,", __FUNCTION__, __LINE__, version);
+    if (version < 0x00001040) // Older version than Mac OSX 10.4
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d OS version not supported", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    err = Gestalt(gestaltQuickTime, &version);
+    if (err != noErr)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not retrieve QuickTime version",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d QuickTime version: %x", __FUNCTION__, __LINE__,
+                 version);
+    if (version < 0x07000000) // QT v. 7.x or newer (QT 5.0.2 0x05020000)
+    {
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d EnterMovies()", __FUNCTION__, __LINE__);
+    EnterMovies();
+
+    if (VideoCaptureSetCaptureDevice((char*) deviceUniqueIdUTF8,
+                                   kVideoCaptureProductIdLength) == -1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d failed to set capture device: %s", __FUNCTION__,
+                     __LINE__, deviceUniqueIdUTF8);
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    _terminated = false;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d successful initialization", __FUNCTION__, __LINE__);
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::StartCapture(
+    const VideoCaptureCapability& capability)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "%s:%d "
+        "capability.width=%d, capability.height=%d ,capability.maxFPS=%d "
+        "capability.expectedCaptureDelay=%d, capability.interlaced=%d",
+        __FUNCTION__, __LINE__, capability.width, capability.height,
+        capability.maxFPS, capability.expectedCaptureDelay,
+        capability.interlaced);
+
+    _captureCapability.width = capability.width;
+    _captureCapability.height = capability.height;
+
+    if (VideoCaptureRun() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::StopCapture()
+{
+
+    if (VideoCaptureStop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+bool VideoCaptureMacQuickTime::CaptureStarted()
+{
+    return _isCapturing;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::CaptureSettings(
+    VideoCaptureCapability& settings)
+{
+	settings.width = _captureCapability.width;
+	settings.height = _captureCapability.height;
+	settings.maxFPS = 0;
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureTerminate()
+{
+    VideoCaptureStop();
+
+    _videoMacCritsect->Enter();
+    if (_terminated)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Already terminated", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    _grabberCritsect->Enter();
+
+    // Stop the camera/sequence grabber
+    // Resets: _captureSequence, _sgStarted
+    StopQuickTimeCapture();
+
+    // Remove local video settings
+    // Resets: _gWorld, _captureCapability.width, _captureCapability.height
+    RemoveLocalGWorld();
+    DisconnectCaptureDevice();
+
+    if (_grabberUpdateThread)
+        _grabberUpdateThread->SetNotAlive();
+
+    _grabberCritsect->Leave();
+
+    if (_grabberUpdateEvent)
+        _grabberUpdateEvent->Set();
+
+    SLEEP(1);
+    _grabberCritsect->Enter();
+
+    if (_grabberUpdateThread)
+    {
+        _grabberUpdateThread->Stop();
+        delete _grabberUpdateThread;
+        _grabberUpdateThread = NULL;
+    }
+    if (_grabberUpdateEvent)
+    {
+        delete _grabberUpdateEvent;
+        _grabberUpdateEvent = NULL;
+    }
+
+    // Close the sequence grabber
+    if (_captureGrabber)
+    {
+        SGRelease(_captureGrabber);
+        _captureGrabber = NULL;
+        CloseComponent(_captureGrabber);
+        _captureDevice = NULL;
+    }
+    _captureVideoType = kVideoUnknown;
+
+    // Delete capture device list
+    ListItem* item = _captureDeviceList.First();
+    while (item)
+    {
+        delete static_cast<unsigned char*> (item->GetItem());
+        _captureDeviceList.Erase(item);
+        item = _captureDeviceList.First();
+    }
+    _captureDeviceListTime = 0;
+
+    _terminated = true;
+
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::UpdateCaptureSettings(int channel,
+                                                    webrtc::VideoCodec& inst,
+                                                    bool def)
+{
+
+    if (channel < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid channel number: %d", __FUNCTION__,
+                     __LINE__, channel);
+        return -1;
+    }
+
+    // the size has changed, we need to change our setup
+    _videoMacCritsect->Enter();
+
+    // Stop capturing, if we are...
+    _grabberCritsect->Enter();
+
+    bool wasCapturing = false;
+    StopQuickTimeCapture(&wasCapturing);
+
+    // Create a new offline GWorld to receive captured frames
+    RemoveLocalGWorld();
+
+    if (CreateLocalGWorld(inst.width, inst.height) == -1)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        // Error already logged
+        return -1;
+    }
+    _captureCapability.width = inst.width;
+    _captureCapability.height = inst.height;
+
+    // Connect the capture device to our offline GWorld
+    // if we already have a capture device selected.
+    if (_captureDevice)
+    {
+        DisconnectCaptureDevice();
+        if (ConnectCaptureDevice() == -1)
+        {
+            // Error already logged
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            return -1;
+        }
+    }
+
+    // Start capture if we did before
+    if (wasCapturing)
+    {
+        if (StartQuickTimeCapture() == -1)
+        {
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Failed to start capturing", __FUNCTION__,
+                         __LINE__);
+            return -1;
+        }
+    }
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+// Creates an off screen graphics world used for converting
+// captured video frames if we can't get a format we want.
+// Assumed protected by critsects
+int VideoCaptureMacQuickTime::CreateLocalGWorld(int width, int height)
+{
+    if (_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d GWorld already created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (width == 0 || height == 0)
+    {
+        return -1;
+    }
+
+    Rect captureRect;
+    captureRect.left = 0;
+    captureRect.top = 0;
+    captureRect.right = width;
+    captureRect.bottom = height;
+
+    // Create a GWorld in same size as we want to send to the codec
+    if (QTNewGWorld(&(_gWorld), k2vuyPixelFormat, &captureRect, 0, NULL, 0)
+        != noErr)
+    {
+        return -1;
+    }
+    _captureCapability.width = width;
+    _captureCapability.height = height;
+
+    if (!LockPixels(GetGWorldPixMap(_gWorld)))
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not lock pixmap. Continuing anyhow",
+                     __FUNCTION__, __LINE__);
+    }
+
+    CGrafPtr theOldPort;
+    GDHandle theOldDevice;
+    GetGWorld(&theOldPort, &theOldDevice); // Gets the result from QTGetNewGWorld
+    SetGWorld(_gWorld, NULL); // Sets the new GWorld
+    BackColor( blackColor); // Changes the color on the graphic port
+    ForeColor( whiteColor);
+    EraseRect(&captureRect);
+    SetGWorld(theOldPort, theOldDevice);
+
+    return 0;
+}
+
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::RemoveLocalGWorld()
+{
+    if (!_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d !gWorld", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    DisposeGWorld(_gWorld);
+    _gWorld = NULL;
+    _captureCapability.width = START_CODEC_WIDTH;
+    _captureCapability.height = START_CODEC_HEIGHT;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d GWorld has been removed", __FUNCTION__, __LINE__);
+    return 0;
+}
+
+// ConnectCaptureDevice
+// This function prepares the capture device
+// with the wanted settings, but the capture
+// device isn't started.
+//
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::ConnectCaptureDevice()
+{
+    // Prepare the capture grabber if a capture device is already set
+    if (!_captureGrabber)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No capture device is selected", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    if (_captureIsInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Capture device is already initialized",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (!_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No GWorld is created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    OSErr err = noErr;
+    long flags = 0;
+
+    // Connect the camera to our offline GWorld
+    // We won't use the GWorld if we get the format we want
+    // from the camera.
+    if (SGSetGWorld(_captureGrabber, _gWorld, NULL ) != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not connect capture device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    if (SGSetDataRef(_captureGrabber, 0, 0, seqGrabDontMakeMovie) != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not configure capture device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Set our capture callback
+    if (SGSetDataProc(_captureGrabber, NewSGDataUPP(SendProcess), (long) this)
+        != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not set capture callback. Unable to receive "
+                     "frames", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    // Create a video channel to the sequence grabber
+    if (SGNewChannel(_captureGrabber, VideoMediaType, &_captureChannel)
+        != noErr) // Takes time!!!
+    {
+        return -1;
+    }
+
+    // Get a list with all capture devices to choose the one we want.
+    SGDeviceList deviceList = NULL;
+    if (SGGetChannelDeviceList(_captureChannel, sgDeviceListIncludeInputs,
+                               &deviceList) != noErr)
+    {
+
+    }
+
+    int numDevicesTypes = (*deviceList)->count;
+    bool captureDeviceFound = false;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Found %d channel devices", __FUNCTION__, __LINE__,
+                 numDevicesTypes);
+
+    // Loop through all devices to get the one we want.
+    for (int i = 0; i < numDevicesTypes; i++)
+    {
+        SGDeviceName deviceTypeName = (*deviceList)->entry[i];
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Inspecting device number: %d", __FUNCTION__,
+                     __LINE__, i);
+        // Get the list with input devices
+        if (deviceTypeName.inputs)
+        {
+            SGDeviceInputList inputList = deviceTypeName.inputs;
+            int numInputDev = (*inputList)->count;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Device has %d inputs", __FUNCTION__, __LINE__,
+                         numInputDev);
+            for (int inputDevIndex = 0;
+                 inputDevIndex < numInputDev;
+                 inputDevIndex++)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Inspecting input number: %d",
+                             __FUNCTION__, __LINE__, inputDevIndex);
+                SGDeviceInputName deviceInputName =
+                    (*inputList)->entry[inputDevIndex];
+                char devInName[64];
+                memset(devInName, 0, 64);
+
+                // SGDeviceInputName::name is a Str63, defined as a Pascal string.
+                // (Refer to MacTypes.h)
+                CFIndex devInNameLength =
+                    PascalStringToCString(deviceInputName.name, devInName,
+                                          sizeof(devInName));
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Converted pascal string with length:%d  "
+                             "to: %s", __FUNCTION__, __LINE__,
+                             sizeof(devInName), devInName);
+                if (devInNameLength < 0)
+                {
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Failed to convert device name from "
+                                 "pascal string to c string", __FUNCTION__,
+                                 __LINE__);
+                    return -1;
+                }
+
+                if (!strcmp(devInName, _captureDeviceDisplayName))
+                {
+                    WEBRTC_TRACE(webrtc::kTraceDebug,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d We have found our device: %s",
+                                 __FUNCTION__, __LINE__,
+                                 _captureDeviceDisplayName);
+
+                    if (SGSetChannelDevice(_captureChannel, deviceTypeName.name)
+                        != noErr)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Could not set capture device type: "
+                                     "%s",__FUNCTION__, __LINE__,
+                                     deviceTypeName.name);
+                        return -1;
+                    }
+
+                    WEBRTC_TRACE(webrtc::kTraceInfo,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d Capture device type is: %s",
+                                 __FUNCTION__, __LINE__, deviceTypeName.name);
+                    if (SGSetChannelDeviceInput(_captureChannel, inputDevIndex)
+                        != noErr)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Could not set SG device",
+                                     __FUNCTION__, __LINE__);
+                        return -1;
+                    }
+
+                    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Capture device: %s has successfully "
+                                 "been set", __FUNCTION__, __LINE__,
+                                 _captureDeviceDisplayName);
+                    captureDeviceFound = true;
+                    break;
+                }
+            }
+            if (captureDeviceFound)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Capture device found, breaking from loops",
+                             __FUNCTION__, __LINE__);
+                break;
+            }
+        }
+    }
+    err = SGDisposeDeviceList(_captureGrabber, deviceList);
+
+    if (!captureDeviceFound)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Failed to find capture device: %s. Returning -1",
+                     __FUNCTION__, __LINE__, _captureDeviceDisplayName);
+        return -1;
+    }
+
+    // Set the size we want from the capture device
+    Rect captureSize;
+    captureSize.left = 0;
+    captureSize.top = 0;
+    captureSize.right = _captureCapability.width;
+    captureSize.bottom = _captureCapability.height;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Using capture rect: l:%d t:%d r:%d b:%d", __FUNCTION__,
+                 __LINE__, captureSize.left, captureSize.top,
+                 captureSize.right, captureSize.bottom);
+
+    err = SGSetChannelBounds(_captureChannel, &captureSize);
+    if (err == noErr)
+    {
+        err = SGSetChannelUsage(_captureChannel, flags | seqGrabRecord);
+    }
+    if (err != noErr)
+    {
+        SGDisposeChannel(_captureGrabber, _captureChannel);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error setting SG channel to device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Find out what video format we'll get from the capture device.
+    OSType compType;
+    err = SGGetVideoCompressorType(_captureChannel, &compType);
+
+    // Convert the Apple video format name to a VideoCapture name.
+    if (compType == k2vuyPixelFormat)
+    {
+        _captureVideoType = kVideoUYVY;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers UYUV formatted frames",
+                     __FUNCTION__, __LINE__);
+    }
+    else if (compType == kYUVSPixelFormat)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers YUY2 formatted frames",
+                     __FUNCTION__, __LINE__);
+        _captureVideoType = kVideoYUY2;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers frames in an unknown format: 0x%x. "
+                     "Consult QuickdrawTypes.h",
+                     __FUNCTION__, __LINE__, compType);
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers frames in an unknown format.",
+                     __FUNCTION__, __LINE__);
+        _captureVideoType = kVideoUnknown;
+    }
+
+    if (SGPrepare(_captureGrabber, false, true) != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Try to set the codec size as capture size.
+    err = SGSetChannelBounds(_captureChannel, &captureSize);
+
+    // Check if we really will get the size we asked for.
+    ImageDescriptionHandle imageDesc = (ImageDescriptionHandle) NewHandle(0);
+    err = SGGetChannelSampleDescription(_captureChannel, (Handle) imageDesc);
+
+    _trueCaptureWidth = (**imageDesc).width;
+    _trueCaptureHeight = (**imageDesc).height;
+
+    DisposeHandle((Handle) imageDesc);
+
+    _captureIsInitialized = true;
+    _sgPrepared = true;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Success starting sequence grabber", __FUNCTION__,
+                 __LINE__);
+
+    return 0;
+}
+
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::DisconnectCaptureDevice()
+{
+    if (_sgStarted)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Capture device is still running. Returning -1",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (!_sgPrepared)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No capture device connected", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Close the capture channel
+    SGStop(_captureGrabber);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d !!!! releasing sg stuff", __FUNCTION__, __LINE__);
+    SGDisposeChannel(_captureGrabber, _captureChannel);
+    SGRelease(_captureGrabber);
+    CloseComponent(_captureGrabber);
+
+    // Reset all values
+    _captureChannel = NULL;
+    _captureVideoType = kVideoUnknown;
+    _trueCaptureWidth = 0;
+    _trueCaptureHeight = 0;
+    _captureIsInitialized = false;
+    _sgPrepared = false;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Sequence grabber removed", __FUNCTION__, __LINE__);
+
+    return 0;
+}
+
+// StartQuickTimeCapture
+//
+// Actually starts the camera
+// 
+int VideoCaptureMacQuickTime::StartQuickTimeCapture()
+{
+    _grabberCritsect->Enter();
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Attempting to start sequence grabber", __FUNCTION__,
+                 __LINE__);
+
+    if (_sgStarted)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber already started", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+    if (!_sgPrepared)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber not prepared properly",
+                     __FUNCTION__, __LINE__);
+        return 0;
+    }
+
+    if (SGStartRecord(_captureGrabber) != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    Rect captureRect = { 0, 0, 0, 0 };
+    MatrixRecord scaleMatrix;
+    ImageDescriptionHandle imageDesc = (ImageDescriptionHandle) NewHandle(0);
+
+    // Get the sample description for the channel, which is the same as for the
+    // capture device
+    if (SGGetChannelSampleDescription(_captureChannel, (Handle) imageDesc)
+        != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error accessing device properties", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Create a scale matrix to scale the captured image
+    // Needed if we don't get the size wanted from the camera
+    captureRect.right = (**imageDesc).width;
+    captureRect.bottom = (**imageDesc).height;
+
+    Rect codecRect;
+    codecRect.left = 0;
+    codecRect.top = 0;
+    codecRect.right = _captureCapability.width;
+    codecRect.bottom = _captureCapability.height;
+    RectMatrix(&scaleMatrix, &captureRect, &codecRect);
+
+    // Start grabbing images from the capture device to _gWorld
+    if (DecompressSequenceBegin(&_captureSequence, imageDesc, _gWorld, NULL,
+                                NULL, &scaleMatrix, srcCopy, (RgnHandle) NULL,
+                                NULL, codecNormalQuality, bestSpeedCodec)
+        != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting decompress sequence", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    DisposeHandle((Handle) imageDesc);
+    _sgStarted = true;
+    _grabberCritsect->Leave();
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::StopQuickTimeCapture(bool* wasCapturing)
+{
+    _grabberCritsect->Enter();
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d wasCapturing=%d", __FUNCTION__, __LINE__, wasCapturing);
+
+    if (!_sgStarted)
+    {
+        if (wasCapturing)
+            *wasCapturing = false;
+
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber was never started", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+
+    if (wasCapturing)
+        *wasCapturing = true;
+
+    OSErr error = noErr;
+    error = SGStop(_captureGrabber);
+    CDSequenceEnd(_captureSequence);
+    _captureSequence = NULL;
+    _sgStarted = false;
+
+    _grabberCritsect->Leave();
+    if (error != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not stop sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+//-------------------------------------------------
+//
+//  Thread/function to keep capture device working
+//
+//-------------------------------------------------
+
+
+//
+// GrabberUpdateThread / GrabberUpdateProcess
+//
+// Called at a certain time interval to tell
+// the capture device / SequenceGrabber to
+// actually work.
+bool VideoCaptureMacQuickTime::GrabberUpdateThread(void* obj)
+{
+    return static_cast<VideoCaptureMacQuickTime*> (obj)->GrabberUpdateProcess();
+}
+
+bool VideoCaptureMacQuickTime::GrabberUpdateProcess()
+{
+    _grabberUpdateEvent->Wait(30);
+
+    if (_isCapturing == false)
+        return false;
+
+    _grabberCritsect->Enter();
+    if (_captureGrabber)
+    {
+        if (SGIdle(_captureGrabber) != noErr)
+        {
+        }
+    }
+    _grabberCritsect->Leave();
+    return true;
+}
+
+//
+// VideoCaptureStop
+//
+// Stops the capture device
+//
+int VideoCaptureMacQuickTime::VideoCaptureStop()
+{
+    if (_grabberUpdateThread)
+    {
+        _grabberUpdateThread->Stop();
+    }
+
+    _videoMacCritsect->Enter();
+    _grabberCritsect->Enter();
+    int retVal = StopQuickTimeCapture();
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+    if (retVal == -1)
+    {
+        return -1;
+    }
+
+    _isCapturing = false;
+    return 0;
+}
+
+//
+// VideoCaptureRun
+//
+// Starts the capture device and creates
+// the update thread.
+//
+int VideoCaptureMacQuickTime::VideoCaptureRun()
+{
+    _videoMacCritsect->Enter();
+    _grabberCritsect->Enter();
+
+    int res = StartQuickTimeCapture();
+
+    // Create the thread for updating sequence grabber if not created earlier
+    if (!_grabberUpdateThread)
+    {
+        _grabberUpdateEvent = EventWrapper::Create();
+        _grabberUpdateThread = ThreadWrapper::CreateThread(
+            VideoCaptureMacQuickTime::GrabberUpdateThread, this, kHighPriority);
+        unsigned int id;
+        _grabberUpdateThread->Start(id);
+    }
+    else
+    {
+        unsigned int id;
+        _grabberUpdateThread->Start(id);
+    }
+
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    _isCapturing = true;
+    return res;
+}
+
+// ---------------------------------------------------------------------- 
+//
+// SendProcess
+// sequence grabber data procedure
+//
+// This function is called by the capture device as soon as a new
+// frame is available.
+//
+//
+// SendFrame
+//
+// The non-static function used by the capture device callback
+//
+// Input:
+//        sgChannel: the capture device channel generating the callback
+//        data:      the video frame
+//        length:    the data length in bytes
+//        grabTime:  time stamp generated by the capture device / sequece grabber
+//
+// ----------------------------------------------------------------------
+
+OSErr VideoCaptureMacQuickTime::SendProcess(SGChannel sgChannel, Ptr p,
+                                            long len, long* /*offset*/,
+                                            long /*chRefCon*/, TimeValue time,
+                                            short /*writeType*/, long refCon)
+{
+    VideoCaptureMacQuickTime* videoEngine =
+        reinterpret_cast<VideoCaptureMacQuickTime*> (refCon);
+    return videoEngine->SendFrame(sgChannel, (char*) p, len, time);
+}
+
+int VideoCaptureMacQuickTime::SendFrame(SGChannel /*sgChannel*/, char* data,
+                                        long length, TimeValue /*grabTime*/)
+{
+    if (!_sgPrepared)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence Grabber is not initialized", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Frame has been delivered\n", __FUNCTION__, __LINE__);
+
+    CodecFlags ignore;
+    _grabberCritsect->Enter();
+    if (_gWorld)
+    {
+        // Will be set to true if we don't recognize the size and/or video
+        // format.
+        bool convertFrame = false;
+        WebRtc_Word32 width = 352;
+        WebRtc_Word32 height = 288;
+        WebRtc_Word32 frameSize = 0;
+
+        VideoCaptureCapability captureCapability;
+        captureCapability.width = width;
+        captureCapability.height = height;
+        captureCapability.maxFPS = 30;
+
+        switch (_captureVideoType)
+        {
+            case kVideoUYVY:
+                captureCapability.rawType = kVideoUYVY;
+                break;
+            case kVideoYUY2:
+                captureCapability.rawType = kVideoYUY2;
+                break;
+            case kVideoI420:
+                captureCapability.rawType = kVideoI420;
+                break;
+            default:
+                captureCapability.rawType = kVideoI420;
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d raw = I420 by default\n",
+                             __FUNCTION__, __LINE__);
+                break;
+        }
+
+        // Convert the camera video type to something VideoEngine can work with
+        // Check if we need to downsample the incomming frame.
+        switch (_captureVideoType)
+        {
+            case kVideoUYVY:
+            case kVideoYUY2:
+                frameSize = (width * height * 16) >> 3; // 16 is for YUY2 format
+                if (width == _captureCapability.width || height
+                    == _captureCapability.height)
+                {
+                    // Ok format and size, send the frame to super class
+                    IncomingFrame((WebRtc_UWord8*) data,
+                                  (WebRtc_Word32) frameSize, captureCapability,
+                                  TickTime::MillisecondTimestamp());
+
+                }
+                else if (width == _trueCaptureWidth && height
+                    == _trueCaptureHeight)
+                {
+                    // We need to scale the picture to correct size...
+                    // This happens for cameras not supporting all sizes.
+                    // E.g. older built-in iSight doesn't support QCIF.
+
+                    // Convert the incoming frame into our GWorld.
+                    int res =
+                        DecompressSequenceFrameS(_captureSequence, data,
+                                                 length, 0, &ignore, NULL);
+                    if (res != noErr && res != -8976) // 8796 ==  black frame
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceWarning,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Captured black frame. Not "
+                                     "processing it", __FUNCTION__, __LINE__);
+                        _grabberCritsect->Leave();
+                        return 0;
+                    }
+
+                    // Copy the frame from the PixMap to our video buffer
+                    PixMapHandle pixMap = GetGWorldPixMap(_gWorld);
+
+                    // Lock the image data in the GWorld.
+                    LockPixels(pixMap);
+
+                    // Get a pointer to the pixel data.
+                    Ptr capturedFrame = GetPixBaseAddr(pixMap);
+
+                    // Send the converted frame out to super class
+                    IncomingFrame((WebRtc_UWord8*) data,
+                                  (WebRtc_Word32) frameSize, captureCapability,
+                                  TickTime::MillisecondTimestamp());
+
+                    // Unlock the image data to get ready for the next frame.
+                    UnlockPixels(pixMap);
+                }
+                else
+                {
+                    // Not a size we recognize, use the Mac internal scaling...
+                    convertFrame = true;
+                    WEBRTC_TRACE(webrtc::kTraceDebug,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d Not correct incoming stream size for "
+                                 "the format and configured size",
+                                 __FUNCTION__, __LINE__);
+                }
+                break;
+            default:
+
+                // Not a video format we recognize, use the Mac internal scaling
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Unknown video frame format (default)",
+                             __FUNCTION__, __LINE__);
+                convertFrame = true;
+                break;
+        }
+
+        if (convertFrame)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Unrecognized frame format. Converting frame",
+                         __FUNCTION__, __LINE__);
+
+            // We don't recognise the input format. Convert to UYVY, I420 is not
+            // supported on osx. Decompress the grabbed frame into the GWorld,
+            // i.e from webcam format to ARGB (RGB24), and extract the frame.
+            int res = DecompressSequenceFrameS(_captureSequence, data, length,
+                                               0, &ignore, NULL);
+            if (res != noErr && res != -8976) // 8796 means a black frame
+            {
+                _grabberCritsect->Leave();
+                return 0;
+            }
+
+            // Copy the frame from the PixMap to our video buffer
+            PixMapHandle rgbPixMap = GetGWorldPixMap(_gWorld);
+            LockPixels(rgbPixMap);
+            Ptr capturedFrame = GetPixBaseAddr(rgbPixMap);
+
+            // Get the picture size
+            int width = (*rgbPixMap)->bounds.right;
+            int height = (*rgbPixMap)->bounds.bottom;
+
+            // 16 is for YUY2 format.
+            WebRtc_Word32 frameSize = (width * height * 16) >> 3;
+
+            // Ok format and size, send the frame to super class
+            IncomingFrame((WebRtc_UWord8*) data, (WebRtc_Word32) frameSize,
+                          captureCapability, TickTime::MillisecondTimestamp());
+
+            UnlockPixels(rgbPixMap);
+        }
+
+        // Tell the capture device it's ok to update.
+        SGUpdate(_captureGrabber, NULL);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No GWorld created, but frames are being delivered",
+                     __FUNCTION__, __LINE__);
+    }
+
+    _grabberCritsect->Leave();
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureInitThreadContext()
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d ", __FUNCTION__, __LINE__);
+    _videoMacCritsect->Enter();
+    EnterMoviesOnThread( kQTEnterMoviesFlagDontSetComponentsThreadMode);
+    _videoMacCritsect->Leave();
+    return 0;
+}
+
+//
+//
+//  Functions for handling capture devices
+//
+//
+
+VideoCaptureMacQuickTime::VideoCaptureMacName::VideoCaptureMacName() :
+    _size(0)
+{
+    memset(_name, 0, kVideoCaptureMacNameMaxSize);
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureSetCaptureDevice(
+    const char* deviceName, int size)
+{
+
+
+    _videoMacCritsect->Enter();
+    bool wasCapturing = false;
+
+    _grabberCritsect->Enter();
+    if (_captureGrabber)
+    {
+        // Stop grabbing, disconnect and close the old capture device
+        StopQuickTimeCapture(&wasCapturing);
+        DisconnectCaptureDevice();
+        CloseComponent(_captureGrabber);
+        _captureDevice = NULL;
+        _captureGrabber = NULL;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Old capture device removed", __FUNCTION__,
+                     __LINE__);
+    }
+
+    if (deviceName == NULL || size == 0)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return 0;
+    }
+
+    if (size < 0)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d 'size' is not valid", __FUNCTION__, __LINE__);
+        return 0;
+    }
+
+    ComponentDescription compCaptureType;
+
+    // Define the component we want to open
+    compCaptureType.componentType = SeqGrabComponentType;
+    compCaptureType.componentSubType = 0;
+    compCaptureType.componentManufacturer = 0;
+    compCaptureType.componentFlags = 0;
+    compCaptureType.componentFlagsMask = 0;
+
+    long numSequenceGrabbers = CountComponents(&compCaptureType);
+
+    // loop through the available grabbers and open the first possible
+    for (int i = 0; i < numSequenceGrabbers; i++)
+    {
+        _captureDevice = FindNextComponent(0, &compCaptureType);
+        _captureGrabber = OpenComponent(_captureDevice);
+        if (_captureGrabber != NULL)
+        {
+            // We've found a sequencegrabber that we could open
+            if (SGInitialize(_captureGrabber) != noErr)
+            {
+                _grabberCritsect->Leave();
+                _videoMacCritsect->Leave();
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Could not initialize sequence grabber",
+                             __FUNCTION__, __LINE__);
+                return -1;
+            }
+            break;
+        }
+        if (i == numSequenceGrabbers - 1)
+        {
+            // Couldn't open a sequence grabber
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Could not open a sequence grabber",
+                         __FUNCTION__, __LINE__);
+            return -1;
+        }
+    }
+
+    if (!_gWorld)
+    {
+        // We don't have a GWorld. Create one to enable early preview
+        // without calling SetSendCodec
+        if (CreateLocalGWorld(_captureCapability.width,
+                              _captureCapability.height) == -1)
+        {
+            // Error already logged
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            return -1;
+        }
+    }
+    // Connect the camera with our GWorld
+    int cpySize = size;
+    if ((unsigned int) size > sizeof(_captureDeviceDisplayName))
+    {
+        cpySize = sizeof(_captureDeviceDisplayName);
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Copying %d chars from deviceName to "
+                 "_captureDeviceDisplayName (size=%d)\n",
+                 __FUNCTION__, __LINE__, cpySize, size);
+    memcpy(_captureDeviceDisplayName, deviceName, cpySize);
+    if (ConnectCaptureDevice() == -1)
+    {
+        // Error already logged
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    if (StartQuickTimeCapture() == -1)
+    {
+        // Error already logged
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+    return 0;
+}
+
+bool VideoCaptureMacQuickTime::IsCaptureDeviceSelected()
+{
+    _grabberCritsect->Leave();
+    return (_captureIsInitialized) ? true : false;
+    _grabberCritsect->Leave();
+}
+
+/**
+ Convert a Pascal string to a C string.
+ 
+ \param[in]  pascalString
+ Pascal string to convert. Pascal strings contain the number of 
+ characters in the first byte and are not null-terminated.
+ 
+ \param[out] cString
+ The C string buffer into which to copy the converted string.
+ 
+ \param[in]  bufferSize
+ The size of the C string buffer in bytes.
+ 
+ \return The number of characters in the string on success and -1 on failure.
+ */
+CFIndex VideoCaptureMacQuickTime::PascalStringToCString(
+    const unsigned char* pascalString, char* cString, CFIndex bufferSize)
+{
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "%s:%d Converting pascal string to c string", __FUNCTION__,
+                 __LINE__);
+    if (pascalString == NULL)
+    {
+        return -1;
+    }
+
+    if (cString == NULL)
+    {
+        return -1;
+    }
+
+    if (bufferSize == 0)
+    {
+        return -1;
+    }
+
+    CFIndex cStringLength = 0;
+    CFIndex maxStringLength = bufferSize - 1;
+
+    CFStringRef cfString = CFStringCreateWithPascalString(
+        NULL, pascalString, kCFStringEncodingMacRoman);
+    if (cfString == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                     "%s:%d Error in CFStringCreateWithPascalString()",
+                     __FUNCTION__, __LINE__);
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFIndex cfLength = CFStringGetLength(cfString);
+    cStringLength = cfLength;
+    if (cfLength > maxStringLength)
+    {
+        cStringLength = maxStringLength;
+    }
+
+    Boolean success = CFStringGetCString(cfString, cString, bufferSize,
+                                         kCFStringEncodingMacRoman);
+
+    // Ensure the problem isn't insufficient buffer length.
+    // This is fine; we will return a partial string.
+    if (success == false && cfLength <= maxStringLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                     "%s:%d Error in CFStringGetCString()", __FUNCTION__,
+                     __LINE__);
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFRelease(cfString);
+    return cStringLength;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h
new file mode 100644
index 0000000..84e0667
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time.h
+ *
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
+
+#include <QuickTime/QuickTime.h>
+
+
+#include "../../device_info_impl.h"
+#include "../../video_capture_impl.h"
+#include "list_wrapper.h"
+
+
+#define START_CODEC_WIDTH 352
+#define START_CODEC_HEIGHT 288
+#define SLEEP(x) usleep(x * 1000);
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+class VideoCaptureMacQuickTime : public VideoCaptureImpl
+{
+
+public:
+	VideoCaptureMacQuickTime(const WebRtc_Word32 id);
+	virtual ~VideoCaptureMacQuickTime();
+
+	static void Destroy(VideoCaptureModule* module);
+
+    WebRtc_Word32 Init(const WebRtc_Word32 id,
+                       const WebRtc_UWord8* deviceUniqueIdUTF8);
+	virtual WebRtc_Word32 StartCapture(
+	    const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+	virtual bool CaptureStarted();
+	virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+    // TODO: remove?
+    int VideoCaptureInitThreadContext();
+    int VideoCaptureTerminate();
+    int VideoCaptureSetCaptureDevice(const char* deviceName, int size);
+	int UpdateCaptureSettings(int channel, webrtc::VideoCodec& inst, bool def);
+    int VideoCaptureRun();
+    int VideoCaptureStop();
+
+protected:
+
+private: // functions
+
+    struct VideoCaptureMacName
+    {
+        VideoCaptureMacName();
+
+        enum { kVideoCaptureMacNameMaxSize = 64};
+        char _name[kVideoCaptureMacNameMaxSize];
+        CFIndex _size;
+    };
+
+    // Timeout value [ms] if we want to create a new device list or not
+    enum { kVideoCaptureDeviceListTimeout =     5000};
+    // Temporary constant allowing this size from builtin iSight webcams.
+    enum { kYuy2_1280_1024_length = 2621440};
+
+private:
+
+    // Capture device callback
+    static OSErr SendProcess(SGChannel sgChannel, Ptr p, long len, long *offset,
+                             long chRefCon, TimeValue time, short writeType,
+                             long refCon);
+    int SendFrame(SGChannel sgChannel, char* data, long length, TimeValue time);
+
+    // Capture device functions
+    int CreateLocalGWorld(int width, int height);
+    int RemoveLocalGWorld();
+    int ConnectCaptureDevice();
+    int DisconnectCaptureDevice();
+    virtual bool IsCaptureDeviceSelected();
+
+    // Process to make sure the capture device won't stop
+    static bool GrabberUpdateThread(void*);
+    bool GrabberUpdateProcess();
+
+    // Starts and stops the capture
+    int StartQuickTimeCapture();
+    int StopQuickTimeCapture(bool* wasCapturing = NULL);
+
+    static CFIndex PascalStringToCString(const unsigned char* pascalString,
+                                         char* cString,
+                                         CFIndex bufferSize);
+
+private: // variables
+	WebRtc_Word32			_id;
+	bool					_isCapturing;
+	VideoCaptureCapability	_captureCapability;
+    CriticalSectionWrapper* _grabberCritsect;
+    CriticalSectionWrapper* _videoMacCritsect;
+    bool                    _terminated;
+    webrtc::ThreadWrapper*  _grabberUpdateThread;
+    webrtc::EventWrapper*           _grabberUpdateEvent;
+    SeqGrabComponent        _captureGrabber;
+    Component               _captureDevice;
+    char                    _captureDeviceDisplayName[64];
+	RawVideoType		    _captureVideoType;
+    bool                    _captureIsInitialized;
+    GWorldPtr               _gWorld;
+    SGChannel               _captureChannel;
+    ImageSequence           _captureSequence;
+    bool                    _sgPrepared;
+    bool                    _sgStarted;
+    int                     _trueCaptureWidth;
+    int                     _trueCaptureHeight;
+    ListWrapper             _captureDeviceList;
+    unsigned long           _captureDeviceListTime;
+    ListWrapper             _captureCapabilityList;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc
new file mode 100644
index 0000000..4f3cc1a
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc
@@ -0,0 +1,391 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time_info.cc
+ *
+ */
+
+#include "../../video_capture_config.h"
+#include "video_capture_quick_time_info.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "trace.h"
+#include "video_capture.h"
+
+namespace webrtc
+{
+
+VideoCaptureMacQuickTimeInfo::VideoCaptureMacQuickTimeInfo(
+    const WebRtc_Word32 iID) :
+    DeviceInfoImpl(iID), _id(iID),
+    _grabberCritsect(CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+VideoCaptureMacQuickTimeInfo::~VideoCaptureMacQuickTimeInfo()
+{
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::Init()
+{
+
+    return 0;
+}
+
+WebRtc_UWord32 VideoCaptureMacQuickTimeInfo::NumberOfDevices()
+{
+    int numOfDevices = 0;
+
+    // don't care about these variables... dummy vars to call GetCaptureDevices
+    const int kNameLength = 1024;
+    WebRtc_UWord8 deviceNameUTF8[kNameLength] = "";
+    WebRtc_UWord8 deviceUniqueIdUTF8[kNameLength] = "";
+    WebRtc_UWord8 productUniqueIdUTF8[kNameLength] = "";
+
+    if (GetCaptureDevices(0, deviceNameUTF8, kNameLength, deviceUniqueIdUTF8,
+                          kNameLength, productUniqueIdUTF8, kNameLength,
+                          numOfDevices) != 0)
+    {
+        return 0;
+    }
+
+    return numOfDevices;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetDeviceName(
+    WebRtc_UWord32 deviceNumber, WebRtc_UWord8* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameUTF8Length, WebRtc_UWord8* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, WebRtc_UWord8* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+
+    int numOfDevices = 0; // not needed for this function
+    return GetCaptureDevices(deviceNumber, deviceNameUTF8,
+                             deviceNameUTF8Length, deviceUniqueIdUTF8,
+                             deviceUniqueIdUTF8Length, productUniqueIdUTF8,
+                             productUniqueIdUTF8Length, numOfDevices);
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::NumberOfCapabilities(
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetCapability(
+    const WebRtc_UWord8* deviceUniqueIdUTF8,
+    const WebRtc_UWord32 deviceCapabilityNumber,
+    VideoCaptureCapability& capability)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetBestMatchedCapability(
+    const WebRtc_UWord8*deviceUniqueIdUTF8,
+    const VideoCaptureCapability& requested, VideoCaptureCapability& resulting)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::DisplayCaptureSettingsDialogBox(
+    const WebRtc_UWord8* deviceUniqueIdUTF8,
+    const WebRtc_UWord8* dialogTitleUTF8, void* parentWindow,
+    WebRtc_UWord32 positionX, WebRtc_UWord32 positionY)
+{
+     return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::CreateCapabilityMap(
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+int VideoCaptureMacQuickTimeInfo::GetCaptureDevices(
+    WebRtc_UWord32 deviceNumber, WebRtc_UWord8* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameUTF8Length, WebRtc_UWord8* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, WebRtc_UWord8* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length, int& numberOfDevices)
+{
+
+
+    numberOfDevices = 0;
+    memset(deviceNameUTF8, 0, deviceNameUTF8Length);
+    memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+    memset(productUniqueIdUTF8, 0, productUniqueIdUTF8Length);
+
+    if (deviceNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid deviceNumber", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    Component captureDevice = NULL;
+    SeqGrabComponent captureGrabber = NULL;
+    SGChannel captureChannel = NULL;
+    bool closeChannel = false;
+
+    ComponentDescription compCaptureType;
+
+    compCaptureType.componentType = SeqGrabComponentType;
+    compCaptureType.componentSubType = 0;
+    compCaptureType.componentManufacturer = 0;
+    compCaptureType.componentFlags = 0;
+    compCaptureType.componentFlagsMask = 0;
+
+    // Get the number of sequence grabbers
+    long numSequenceGrabbers = CountComponents(&compCaptureType);
+
+    if (deviceNumber > numSequenceGrabbers)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid deviceNumber", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    if (numSequenceGrabbers <= 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No sequence grabbers available", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Open a sequence grabber
+    for (int seqGrabberIndex = 0;
+         seqGrabberIndex < numSequenceGrabbers;
+         seqGrabberIndex++)
+    {
+        captureDevice = FindNextComponent(0, &compCaptureType);
+        captureGrabber = OpenComponent(captureDevice);
+        if (captureGrabber != NULL)
+        {
+            // We've found a sequencegrabber
+            if (SGInitialize(captureGrabber) != noErr)
+            {
+                CloseComponent(captureGrabber);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Could not init the sequence grabber",
+                             __FUNCTION__, __LINE__);
+                return -1;
+            }
+            break;
+        }
+        if (seqGrabberIndex == numSequenceGrabbers - 1)
+        {
+            // Couldn't open a sequence grabber
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Could not open a sequence grabber",
+                         __FUNCTION__, __LINE__);
+            return -1;
+        }
+    }
+
+    // Create a temporary channel to get the names of the capture devices.
+    // Takes time, make this in a nother way...
+    if (SGNewChannel(captureGrabber, VideoMediaType, &captureChannel) != noErr)
+    {
+        // Could not create a video channel...
+        SGRelease(captureGrabber);
+        CloseComponent(captureGrabber);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not create a sequence grabber video channel",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    closeChannel = true;
+
+    // Find the type of capture devices, e.g. USB-devices, Firewire, DV, ...
+    SGDeviceList deviceList = NULL;
+    if (SGGetChannelDeviceList(captureChannel, sgDeviceListIncludeInputs,
+                               &deviceList) != noErr)
+    {
+        if (closeChannel)
+            SGDisposeChannel(captureGrabber, captureChannel);
+        if (captureGrabber)
+        {
+            SGRelease(captureGrabber);
+            CloseComponent(captureGrabber);
+        }
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not create a device list", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Loop through all device types and all devices for each type
+    // and store in a list.
+    int numDevices = (*deviceList)->count;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Found %d devices", __FUNCTION__, __LINE__, numDevices);
+
+    for (int i = 0; i < numDevices; i++)
+    {
+
+        SGDeviceName sgDeviceName = (*deviceList)->entry[i];
+        // Get the list with input devices for this type of device
+        if (sgDeviceName.inputs)
+        {
+            SGDeviceInputList inputList = sgDeviceName.inputs;
+            int numInputDev = (*inputList)->count;
+
+            for (int inputDevIndex = 0;
+                 inputDevIndex < numInputDev;
+                 inputDevIndex++)
+            {
+                // Get the name for this capture device
+                SGDeviceInputName deviceInputName =
+                    (*inputList)->entry[inputDevIndex];
+
+                VideoCaptureMacName* deviceName = new VideoCaptureMacName();
+
+                deviceName->_size = PascalStringToCString(
+                    deviceInputName.name, deviceName->_name,
+                    sizeof(deviceName->_name));
+
+                if (deviceName->_size > 0)
+                {
+                    WEBRTC_TRACE(webrtc::kTraceDebug,webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Capture device %d: %s was successfully "
+                                 "set", __FUNCTION__, __LINE__, numberOfDevices,
+                                 deviceName->_name);
+
+                    if (numberOfDevices == deviceNumber)
+                    {
+                        strcpy((char*) deviceNameUTF8, deviceName->_name);
+                        strcpy((char*) deviceUniqueIdUTF8, deviceName->_name);
+                    }
+                    numberOfDevices++;
+                }
+                else
+                {
+                    delete deviceName;
+
+                    if (deviceName->_size < 0)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Error in PascalStringToCString",
+                                     __FUNCTION__, __LINE__);
+                        return -1;
+                    }
+                }
+            }
+        }
+    }
+
+    // clean up
+    SGDisposeDeviceList(captureGrabber, deviceList);
+    if (closeChannel)
+    {
+        SGDisposeChannel(captureGrabber, captureChannel);
+    }
+    if (captureGrabber)
+    {
+        SGRelease(captureGrabber);
+        CloseComponent(captureGrabber);
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d End function successfully", __FUNCTION__, __LINE__);
+    return 0;
+}
+
+/**
+ Convert a Pascal string to a C string.
+ 
+ \param[in]  pascalString
+ Pascal string to convert. Pascal strings contain the number of 
+ characters in the first byte and are not null-terminated.
+ 
+ \param[out] cString
+ The C string buffer into which to copy the converted string.
+ 
+ \param[in]  bufferSize
+ The size of the C string buffer in bytes.
+ 
+ \return The number of characters in the string on success and -1 on failure.
+ */
+CFIndex VideoCaptureMacQuickTimeInfo::PascalStringToCString(
+    const unsigned char* pascalString, char* cString, CFIndex bufferSize)
+{
+    if (pascalString == NULL)
+    {
+        return -1;
+    }
+
+    if (cString == NULL)
+    {
+        return -1;
+    }
+
+    if (bufferSize == 0)
+    {
+        return -1;
+    }
+
+    CFIndex cStringLength = 0;
+    CFIndex maxStringLength = bufferSize - 1;
+
+    CFStringRef cfString = CFStringCreateWithPascalString(
+        NULL, pascalString, kCFStringEncodingMacRoman);
+    if (cfString == NULL)
+    {
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFIndex cfLength = CFStringGetLength(cfString);
+    cStringLength = cfLength;
+    if (cfLength > maxStringLength)
+    {
+        cStringLength = maxStringLength;
+    }
+
+    Boolean success = CFStringGetCString(cfString, cString, bufferSize,
+                                         kCFStringEncodingMacRoman);
+
+    // Ensure the problem isn't insufficient buffer length.
+    // This is fine; we will return a partial string.
+    if (success == false && cfLength <= maxStringLength)
+    {
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFRelease(cfString);
+    return cStringLength;
+}
+
+//
+//
+//  Functions for handling capture devices
+//
+//
+
+VideoCaptureMacQuickTimeInfo::VideoCaptureMacName::VideoCaptureMacName() :
+    _size(0)
+{
+    memset(_name, 0, kVideoCaptureMacNameMaxSize);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h
new file mode 100644
index 0000000..5114d11
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time_info.h
+ *
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
+
+#include <QuickTime/QuickTime.h>
+
+#include "../../video_capture_impl.h"
+#include "../../device_info_impl.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+
+class VideoRenderCallback;
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+
+class VideoCaptureMacQuickTimeInfo: public DeviceInfoImpl
+{
+public:
+
+    static DeviceInfo* Create(const WebRtc_Word32 id);
+    static void Destroy(DeviceInfo* deviceInfo);
+
+    VideoCaptureMacQuickTimeInfo(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQuickTimeInfo();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     * deviceNumber   -[in] index of capture device
+     * deviceNameUTF8 - friendly name of the capture device
+     * deviceUniqueIdUTF8 - unique name of the capture device if it exist.
+     *                      Otherwise same as deviceNameUTF8
+     * productUniqueIdUTF8 - unique product id if it exist. Null terminated
+     *                       otherwise.
+     */
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber, WebRtc_UWord8* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength, WebRtc_UWord8* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        WebRtc_UWord8* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+
+    // ************** The remaining public functions are not supported on Mac
+
+    /*
+     *   Returns the number of capabilities for this device
+     */
+    virtual WebRtc_Word32 NumberOfCapabilities(const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    /*
+     *   Gets the capabilities of the named device
+     */
+    virtual WebRtc_Word32 GetCapability(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability);
+
+    /*
+     *  Gets the capability that best matches the requested width, height and frame rate.
+     *  Returns the deviceCapabilityNumber on success.
+     */
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const WebRtc_UWord8*deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting);
+
+    /*
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const WebRtc_UWord8* deviceUniqueIdUTF8,
+        const WebRtc_UWord8* dialogTitleUTF8, void* parentWindow,
+        WebRtc_UWord32 positionX, WebRtc_UWord32 positionY);
+
+protected:
+    virtual WebRtc_Word32 CreateCapabilityMap(
+        const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+private:
+
+    struct VideoCaptureMacName
+    {
+        VideoCaptureMacName();
+
+        enum
+        {
+            kVideoCaptureMacNameMaxSize = 64
+        };
+        char _name[kVideoCaptureMacNameMaxSize];
+        CFIndex _size;
+    };
+
+    enum
+    {
+        kVideoCaptureMacDeviceListTimeout = 5000
+    }; // Timeout value [ms] if we want to create a new device list or not
+    enum
+    {
+        kYuy2_1280_1024_length = 2621440
+    }; // Temporary constant allowing this size from built-in iSight webcams.
+
+private:
+    // private methods
+
+    int GetCaptureDevices(WebRtc_UWord32 deviceNumber,
+                          WebRtc_UWord8* deviceNameUTF8,
+                          WebRtc_UWord32 deviceNameUTF8Length,
+                          WebRtc_UWord8* deviceUniqueIdUTF8,
+                          WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                          WebRtc_UWord8* productUniqueIdUTF8,
+                          WebRtc_UWord32 productUniqueIdUTF8Length,
+                          int& numberOfDevices);
+
+    static CFIndex PascalStringToCString(const unsigned char* pascalString,
+                                         char* cString, CFIndex bufferSize);
+
+private:
+    // member vars
+    WebRtc_Word32 _id;
+    bool _terminated;
+    CriticalSectionWrapper* _grabberCritsect;
+    webrtc::Trace* _trace;
+    webrtc::ThreadWrapper* _grabberUpdateThread;
+    webrtc::EventWrapper* _grabberUpdateEvent;
+    SeqGrabComponent _captureGrabber;
+    Component _captureDevice;
+    char _captureDeviceDisplayName[64];
+    bool _captureIsInitialized;
+    GWorldPtr _gWorld;
+    SGChannel _captureChannel;
+    ImageSequence _captureSequence;
+    bool _sgPrepared;
+    bool _sgStarted;
+    int _codecWidth;
+    int _codecHeight;
+    int _trueCaptureWidth;
+    int _trueCaptureHeight;
+    ListWrapper _captureDeviceList;
+    WebRtc_Word64 _captureDeviceListTime;
+    ListWrapper _captureCapabilityList;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
diff --git a/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.h b/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.h
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.h
diff --git a/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.mm b/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.mm
new file mode 100644
index 0000000..d329936
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Mac/video_capture_mac.mm
@@ -0,0 +1,275 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_mac.cc
+ *
+ */
+
+
+// super class stuff
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+#include "../video_capture_config.h"
+#include "ref_count.h"
+
+#include "trace.h"
+
+#include <QuickTime/QuickTime.h>
+
+// 10.4 support must be decided runtime. We will just decide which framework to
+// use at compile time "work" classes. One for QTKit, one for QuickTime
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+#include "QuickTime/video_capture_quick_time.h"
+#include "QuickTime/video_capture_quick_time_info.h"
+#else
+#include "QTKit/video_capture_qtkit.h"
+#include "QTKit/video_capture_qtkit_info.h"
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+// static
+bool CheckOSVersion()
+{
+    // Check OSX version
+    OSErr err = noErr;
+
+    SInt32 version;
+
+    err = Gestalt(gestaltSystemVersion, &version);
+    if (err != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "Could not get OS version");
+        return false;
+    }
+
+    if (version < 0x00001040) // Older version than Mac OSX 10.4
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "OS version too old: 0x%x", version);
+        return false;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "OS version compatible: 0x%x", version);
+
+    return true;
+}
+
+// static
+bool CheckQTVersion()
+{
+    // Check OSX version
+    OSErr err = noErr;
+
+    SInt32 version;
+
+    err = Gestalt(gestaltQuickTime, &version);
+    if (err != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "Could not get QuickTime version");
+        return false;
+    }
+
+    if (version < 0x07000000) // QT v. 7.x or newer (QT 5.0.2 0x05020000)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "QuickTime version too old: 0x%x", version);
+        return false;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "QuickTime version compatible: 0x%x", version);
+    return true;
+}
+
+/**************************************************************************
+ *
+ *    Create/Destroy a VideoCaptureModule
+ *
+ ***************************************************************************/
+
+/*
+ *   Returns version of the module and its components
+ *
+ *   version                 - buffer to which the version will be written
+ *   remainingBufferInBytes  - remaining number of WebRtc_Word8 in the version
+ *                             buffer
+ *   position                - position of the next empty WebRtc_Word8 in the
+ *                             version buffer
+ */
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id, const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    if (webrtc::videocapturemodule::CheckOSVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "OS version is too old. Could not create video capture "
+                     "module. Returning NULL");
+        return NULL;
+    }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+    if (webrtc::videocapturemodule::CheckQTVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "QuickTime version is too old. Could not create video "
+                     "capture module. Returning NULL");
+        return NULL;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "%s line %d. QTKit is not supported on this machine. Using "
+                 "QuickTime framework to capture video",
+                 __FILE__, __LINE__);
+
+    RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>*
+        newCaptureModule =
+            new RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>(id);
+
+    if (!newCaptureModule)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, !newCaptureModule",
+                     deviceUniqueIdUTF8);
+        return NULL;
+    }
+
+    if (newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, "
+                     "newCaptureModule->Init()!=0",
+                     deviceUniqueIdUTF8);
+        delete newCaptureModule;
+        return NULL;
+    }
+
+    // Successfully created VideoCaptureMacQuicktime. Return it
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Module created for unique device %s. Will use QuickTime "
+                 "framework to capture",
+                 deviceUniqueIdUTF8);
+    return newCaptureModule;
+
+#else // QTKit version
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Using QTKit framework to capture video", id);
+
+    RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>* newCaptureModule =
+        new RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>(id);
+
+    if(!newCaptureModule)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, !newCaptureModule",
+                     deviceUniqueIdUTF8);
+        return NULL;
+    }
+    if(newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, "
+                     "newCaptureModule->Init()!=0", deviceUniqueIdUTF8);
+        delete newCaptureModule;
+        return NULL;
+    }
+
+    // Successfully created VideoCaptureMacQuicktime. Return it
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Module created for unique device %s, will use QTKit "
+                 "framework",deviceUniqueIdUTF8);
+    return newCaptureModule;
+#endif
+}
+
+/**************************************************************************
+ *
+ *    Create/Destroy a DeviceInfo
+ *
+ ***************************************************************************/
+
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const WebRtc_Word32 id)
+{
+
+
+    if (webrtc::videocapturemodule::CheckOSVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "OS version is too old. Could not create video capture "
+                     "module. Returning NULL");
+        return NULL;
+    }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+    if (webrtc::videocapturemodule::CheckQTVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "QuickTime version is too old. Could not create video "
+                     "capture module. Returning NULL");
+        return NULL;
+    }
+
+    webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo* newCaptureInfoModule =
+        new webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo(id);
+
+    if (!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+    {
+        Destroy(newCaptureInfoModule);
+        newCaptureInfoModule = NULL;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                     "Failed to Init newCaptureInfoModule created with id %d "
+                     "and device \"\" ", id);
+        return NULL;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "VideoCaptureModule created for id", id);
+    return newCaptureInfoModule;
+
+#else // QTKit version
+    webrtc::videocapturemodule::VideoCaptureMacQTKitInfo* newCaptureInfoModule =
+        new webrtc::videocapturemodule::VideoCaptureMacQTKitInfo(id);
+
+    if(!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+    {
+        //Destroy(newCaptureInfoModule);
+        delete newCaptureInfoModule;
+        newCaptureInfoModule = NULL;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                     "Failed to Init newCaptureInfoModule created with id %d "
+                     "and device \"\" ", id);
+        return NULL;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "VideoCaptureModule created for id", id);
+    return newCaptureInfoModule;
+
+#endif
+
+}
+
+/**************************************************************************
+ *
+ *    End Create/Destroy VideoCaptureModule
+ *
+ ***************************************************************************/
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h b/trunk/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h
new file mode 100644
index 0000000..a4d9da6
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
+
+#include "../video_capture_delay.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+const WebRtc_Word32 NoWindowsCaptureDelays=1;
+const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays]=
+{ 
+    "Microsoft LifeCam Cinema","usb#vid_045e&pid_075d",{{640,480,125},{640,360,117},{424,240,111},{352,288,111},{320,240,116},{176,144,101},{160,120,109},{1280,720,166},{960,544,126},{800,448,120},{800,600,127}},
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
diff --git a/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.cc b/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.cc
new file mode 100644
index 0000000..5adcc96
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.cc
@@ -0,0 +1,792 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_windows.h"
+
+#include "../video_capture_config.h"
+#include "help_functions_windows.h"
+#include "capture_delay_values_windows.h"
+#include "ref_count.h"
+#include "trace.h"
+
+#include <Streams.h>
+#include <Dvdmedia.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+                                                        const WebRtc_Word32 id)
+{
+    videocapturemodule::DeviceInfoWindows* dsInfo =
+                        new videocapturemodule::DeviceInfoWindows(id);
+
+    if (!dsInfo || dsInfo->Init() != 0)
+    {
+        delete dsInfo;
+        dsInfo = NULL;
+    }
+    return dsInfo;
+}
+
+DeviceInfoWindows::DeviceInfoWindows(const WebRtc_Word32 id)
+    : DeviceInfoImpl(id), _dsDevEnum(NULL), _dsMonikerDevEnum(NULL),
+      _CoUninitializeIsRequired(true)
+{
+    // 1) Initialize the COM library (make Windows load the DLLs).
+    //
+    // CoInitializeEx must be called at least once, and is usually called only once,
+    // for each thread that uses the COM library. Multiple calls to CoInitializeEx
+    // by the same thread are allowed as long as they pass the same concurrency flag,
+    // but subsequent valid calls return S_FALSE.
+    // To close the COM library gracefully on a thread, each successful call to
+    // CoInitializeEx, including any call that returns S_FALSE, must be balanced
+    // by a corresponding call to CoUninitialize.
+    //
+
+    /*Apartment-threading, while allowing for multiple threads of execution,
+     serializes all incoming calls by requiring that calls to methods of objects created by this thread always run on the same thread
+     the apartment/thread that created them. In addition, calls can arrive only at message-queue boundaries (i.e., only during a
+     PeekMessage, SendMessage, DispatchMessage, etc.). Because of this serialization, it is not typically necessary to write concurrency control into
+     the code for the object, other than to avoid calls to PeekMessage and SendMessage during processing that must not be interrupted by other method
+     invocations or calls to other objects in the same apartment/thread.*/
+
+    ///CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //| COINIT_SPEED_OVER_MEMORY 
+    HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice Engine uses COINIT_MULTITHREADED
+    if (FAILED(hr))
+    {
+        // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+        _CoUninitializeIsRequired = FALSE;
+
+        if (hr == RPC_E_CHANGED_MODE)
+        {
+            // Calling thread has already initialized COM to be used in a single-threaded
+            // apartment (STA). We are then prevented from using STA.
+            // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is set".
+            //
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                         "VideoCaptureWindowsDSInfo::VideoCaptureWindowsDSInfo "
+                         "CoInitializeEx(NULL, COINIT_APARTMENTTHREADED) => "
+                         "RPC_E_CHANGED_MODE, error 0x%x",
+                         hr);
+        }
+    }
+}
+
+DeviceInfoWindows::~DeviceInfoWindows()
+{
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    RELEASE_AND_CLEAR(_dsDevEnum);
+    if (_CoUninitializeIsRequired)
+    {
+        CoUninitialize();
+    }
+}
+
+WebRtc_Word32 DeviceInfoWindows::Init()
+{
+    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
+                                  IID_ICreateDevEnum, (void **) &_dsDevEnum);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create CLSID_SystemDeviceEnum, error 0x%x", hr);
+        return -1;
+    }
+    return 0;
+}
+WebRtc_UWord32 DeviceInfoWindows::NumberOfDevices()
+{
+    ReadLockScoped cs(_apiLock);
+    return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
+
+}
+WebRtc_Word32 DeviceInfoWindows::GetDeviceName(
+                                       WebRtc_UWord32 deviceNumber,
+                                       WebRtc_UWord8* deviceNameUTF8,
+                                       WebRtc_UWord32 deviceNameLength,
+                                       WebRtc_UWord8* deviceUniqueIdUTF8,
+                                       WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                       WebRtc_UWord8* productUniqueIdUTF8,
+                                       WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    ReadLockScoped cs(_apiLock);
+    const WebRtc_Word32 result = GetDeviceInfo(deviceNumber, deviceNameUTF8,
+                                               deviceNameLength,
+                                               deviceUniqueIdUTF8,
+                                               deviceUniqueIdUTF8Length,
+                                               productUniqueIdUTF8,
+                                               productUniqueIdUTF8Length);
+    return result > (WebRtc_Word32) deviceNumber ? 0 : -1;
+}
+
+WebRtc_Word32 DeviceInfoWindows::GetDeviceInfo(
+                                       WebRtc_UWord32 deviceNumber,
+                                       WebRtc_UWord8* deviceNameUTF8,
+                                       WebRtc_UWord32 deviceNameLength,
+                                       WebRtc_UWord8* deviceUniqueIdUTF8,
+                                       WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                       WebRtc_UWord8* productUniqueIdUTF8,
+                                       WebRtc_UWord32 productUniqueIdUTF8Length)
+
+{
+
+    // enumerate all video capture devices
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    HRESULT hr =
+        _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+                                          &_dsMonikerDevEnum, 0);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+                     " No webcam exist?", hr);
+        return 0;
+    }
+
+    _dsMonikerDevEnum->Reset();
+    ULONG cFetched;
+    IMoniker *pM;
+    int index = 0;
+    while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched))
+    {
+        IPropertyBag *pBag;
+        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+        if (S_OK == hr)
+        {
+            // Find the description or friendly name.
+            VARIANT varName;
+            VariantInit(&varName);
+            hr = pBag->Read(L"Description", &varName, 0);
+            if (FAILED(hr))
+            {
+                hr = pBag->Read(L"FriendlyName", &varName, 0);
+            }
+            if (SUCCEEDED(hr))
+            {
+                // ignore all VFW drivers
+                if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) &&
+                    (_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"),21)
+                        != 0))
+                {
+                    // Found a valid device
+                    if (index == deviceNumber) // This is the device we are interested in.
+                    {
+                        int convResult = 0;
+                        if (deviceNameLength > 0)
+                        {
+                            convResult = WideCharToMultiByte(CP_UTF8, 0,
+                                                             varName.bstrVal, -1,
+                                                             (char*) deviceNameUTF8,
+                                                             deviceNameLength, NULL,
+                                                             NULL);
+                            if (convResult == 0)
+                            {
+                                WEBRTC_TRACE(webrtc::kTraceError,
+                                             webrtc::kTraceVideoCapture, _id,
+                                             "Failed to convert device name to UTF8. %d",
+                                             GetLastError());
+                                return -1;
+                            }
+                        }
+                        if (deviceUniqueIdUTF8Length > 0)
+                        {
+                            hr = pBag->Read(L"DevicePath", &varName, 0);
+                            if (FAILED(hr))
+                            {
+                                strncpy_s((char *) deviceUniqueIdUTF8,
+                                          deviceUniqueIdUTF8Length,
+                                          (char *) deviceNameUTF8, convResult);
+                                WEBRTC_TRACE(webrtc::kTraceError,
+                                             webrtc::kTraceVideoCapture, _id,
+                                             "Failed to get deviceUniqueIdUTF8 using deviceNameUTF8");
+                            }
+                            else
+                            {
+                                convResult = WideCharToMultiByte(
+                                                          CP_UTF8,
+                                                          0,
+                                                          varName.bstrVal,
+                                                          -1,
+                                                          (char*) deviceUniqueIdUTF8,
+                                                          deviceUniqueIdUTF8Length,
+                                                          NULL, NULL);
+                                if (convResult == 0)
+                                {
+                                    WEBRTC_TRACE(webrtc::kTraceError,
+                                                 webrtc::kTraceVideoCapture, _id,
+                                                 "Failed to convert device name to UTF8. %d",
+                                                 GetLastError());
+                                    return -1;
+                                }
+                                if (productUniqueIdUTF8
+                                    && productUniqueIdUTF8Length > 0)
+                                {
+                                    GetProductId(deviceUniqueIdUTF8,
+                                                 productUniqueIdUTF8,
+                                                 productUniqueIdUTF8Length);
+                                }
+                            }
+                        }
+
+                    }
+                    ++index; // increase the number of valid devices
+                }
+            }
+            VariantClear(&varName);
+            pBag->Release();
+            pM->Release();
+        }
+
+    }
+    if (deviceNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "%s %s",
+                     __FUNCTION__, deviceNameUTF8);
+    }
+    return index;
+}
+
+IBaseFilter * DeviceInfoWindows::GetDeviceFilter(
+                                     const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                     WebRtc_UWord8* productUniqueIdUTF8,
+                                     WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8); // UTF8 is also NULL terminated
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Device name too long");
+        return NULL;
+    }
+
+    // enumerate all video capture devices
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+                                                   &_dsMonikerDevEnum, 0);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+                     " No webcam exist?", hr);
+        return 0;
+    }
+    _dsMonikerDevEnum->Reset();
+    ULONG cFetched;
+    IMoniker *pM;
+
+    IBaseFilter *captureFilter = NULL;
+    bool deviceFound = false;
+    while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound)
+    {
+        IPropertyBag *pBag;
+        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+        if (S_OK == hr)
+        {
+            // Find the description or friendly name.
+            VARIANT varName;
+            VariantInit(&varName);
+            if (deviceUniqueIdUTF8Length > 0)
+            {
+                hr = pBag->Read(L"DevicePath", &varName, 0);
+                if (FAILED(hr))
+                {
+                    hr = pBag->Read(L"Description", &varName, 0);
+                    if (FAILED(hr))
+                    {
+                        hr = pBag->Read(L"FriendlyName", &varName, 0);
+                    }
+                }
+                if (SUCCEEDED(hr))
+                {
+                    char tempDevicePathUTF8[256];
+                    tempDevicePathUTF8[0] = 0;
+                    const int compresult =
+                        WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
+                                            tempDevicePathUTF8,
+                                            sizeof(tempDevicePathUTF8), NULL,
+                                            NULL);
+                    if (strncmp(tempDevicePathUTF8,
+                                (const char*) deviceUniqueIdUTF8,
+                                deviceUniqueIdUTF8Length) == 0)
+                    {
+                        // We have found the requested device                        
+                        deviceFound = true;
+                        hr = pM->BindToObject(0, 0, IID_IBaseFilter,
+                                              (void**) &captureFilter);
+                        if FAILED(hr)
+                        {
+                            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                         _id, "Failed to bind to the selected capture device %d",hr);
+                        }
+
+                        if (productUniqueIdUTF8
+                            && productUniqueIdUTF8Length > 0) // Get the device name
+                        {
+
+                            GetProductId(deviceUniqueIdUTF8,
+                                         productUniqueIdUTF8,
+                                         productUniqueIdUTF8Length);
+                        }
+
+                    }
+                }
+            }
+            VariantClear(&varName);
+            pBag->Release();
+            pM->Release();
+        }
+    }
+    return captureFilter;
+}
+
+WebRtc_Word32 DeviceInfoWindows::GetWindowsCapability(
+                              const WebRtc_Word32 capabilityIndex,
+                              VideoCaptureCapabilityWindows& windowsCapability)
+
+{
+    ReadLockScoped cs(_apiLock);
+    // Make sure the number is valid
+    if (capabilityIndex >= _captureCapabilities.Size() || capabilityIndex < 0)
+        return -1;
+
+    MapItem* item = _captureCapabilities.Find(capabilityIndex);
+    if (!item)
+        return -1;
+
+    VideoCaptureCapabilityWindows* capPointer =
+                static_cast<VideoCaptureCapabilityWindows*> (item->GetItem());
+    windowsCapability = *capPointer;
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoWindows::CreateCapabilityMap(
+                                         const WebRtc_UWord8* deviceUniqueIdUTF8)
+
+{
+    // Reset old capability list
+    MapItem* item = NULL;
+    while (item = _captureCapabilities.Last())
+    {
+        delete item->GetItem();
+        _captureCapabilities.Erase(item);
+    }
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Device name too long");
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+
+    WebRtc_UWord8 productId[kVideoCaptureProductIdLength];
+    IBaseFilter* captureDevice = DeviceInfoWindows::GetDeviceFilter(
+                                               deviceUniqueIdUTF8,
+                                               productId,
+                                               kVideoCaptureProductIdLength);
+    if (!captureDevice)
+        return -1;
+    IPin* outputCapturePin = GetOutputPin(captureDevice);
+    if (!outputCapturePin)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get capture device output pin");
+        RELEASE_AND_CLEAR(captureDevice);
+        return -1;
+    }
+    IAMExtDevice* extDevice = NULL;
+    HRESULT hr = captureDevice->QueryInterface(IID_IAMExtDevice,
+                                               (void **) &extDevice);
+    if (SUCCEEDED(hr) && extDevice)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "This is an external device");
+        extDevice->Release();
+    }
+
+    IAMStreamConfig* streamConfig = NULL;
+    hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+                                          (void**) &streamConfig);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get IID_IAMStreamConfig interface from capture device");
+        return -1;
+    }
+
+    // this  gets the FPS
+    IAMVideoControl* videoControlConfig = NULL;
+    HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl,
+                                      (void**) &videoControlConfig);
+    if (FAILED(hrVC))
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "IID_IAMVideoControl Interface NOT SUPPORTED");
+    }
+
+    AM_MEDIA_TYPE *pmt = NULL;
+    VIDEO_STREAM_CONFIG_CAPS caps;
+    int count, size;
+
+    hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to GetNumberOfCapabilities");
+        RELEASE_AND_CLEAR(videoControlConfig);
+        RELEASE_AND_CLEAR(streamConfig);
+        RELEASE_AND_CLEAR(outputCapturePin);
+        RELEASE_AND_CLEAR(captureDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 index = 0; // Index in created _capabilities map
+    // Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo. 
+    // Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2
+    // Interlace flag is only supported in FORMAT_VideoInfo2
+    bool supportFORMAT_VideoInfo2 = false;
+    bool supportFORMAT_VideoInfo = false;
+    bool foundInterlacedFormat = false;
+    GUID preferedVideoFormat = FORMAT_VideoInfo;
+    for (WebRtc_Word32 tmp = 0; tmp < count; ++tmp)
+    {
+        hr = streamConfig->GetStreamCaps(tmp, &pmt,
+                                         reinterpret_cast<BYTE*> (&caps));
+        if (!FAILED(hr))
+        {
+            if (pmt->majortype == MEDIATYPE_Video
+                && pmt->formattype == FORMAT_VideoInfo2)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                             " Device support FORMAT_VideoInfo2");
+                supportFORMAT_VideoInfo2 = true;
+                VIDEOINFOHEADER2* h =
+                    reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+                assert(h);
+                foundInterlacedFormat |= h->dwInterlaceFlags
+                                        & (AMINTERLACE_IsInterlaced
+                                           | AMINTERLACE_DisplayModeBobOnly);
+            }
+            if (pmt->majortype == MEDIATYPE_Video
+                && pmt->formattype == FORMAT_VideoInfo)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                             " Device support FORMAT_VideoInfo2");
+                supportFORMAT_VideoInfo = true;
+            }
+        }
+    }
+    if (supportFORMAT_VideoInfo2)
+    {
+        if (supportFORMAT_VideoInfo && !foundInterlacedFormat)
+        {
+            preferedVideoFormat = FORMAT_VideoInfo;
+        }
+        else
+        {
+            preferedVideoFormat = FORMAT_VideoInfo2;
+        }
+    }
+
+    for (WebRtc_Word32 tmp = 0; tmp < count; ++tmp)
+    {
+        hr = streamConfig->GetStreamCaps(tmp, &pmt,
+                                         reinterpret_cast<BYTE*> (&caps));
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to GetStreamCaps");
+            RELEASE_AND_CLEAR(videoControlConfig);
+            RELEASE_AND_CLEAR(streamConfig);
+            RELEASE_AND_CLEAR(outputCapturePin);
+            RELEASE_AND_CLEAR(captureDevice);
+            return -1;
+        }
+
+        if (pmt->majortype == MEDIATYPE_Video
+            && pmt->formattype == preferedVideoFormat)
+        {
+
+            VideoCaptureCapabilityWindows* capability =
+                                        new VideoCaptureCapabilityWindows();
+            WebRtc_Word64 avgTimePerFrame = 0;
+            bool interlaced = false;
+
+            if (pmt->formattype == FORMAT_VideoInfo)
+            {
+                VIDEOINFOHEADER* h =
+                    reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat);
+                assert(h);
+                capability->directShowCapabilityIndex = tmp;
+                capability->width = h->bmiHeader.biWidth;
+                capability->height = h->bmiHeader.biHeight;
+                avgTimePerFrame = h->AvgTimePerFrame;
+            }
+            if (pmt->formattype == FORMAT_VideoInfo2)
+            {
+                VIDEOINFOHEADER2* h =
+                    reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+                assert(h);
+                capability->directShowCapabilityIndex = tmp;
+                capability->width = h->bmiHeader.biWidth;
+                capability->height = h->bmiHeader.biHeight;
+                capability->interlaced = h->dwInterlaceFlags
+                                        & (AMINTERLACE_IsInterlaced
+                                           | AMINTERLACE_DisplayModeBobOnly);
+                avgTimePerFrame = h->AvgTimePerFrame;
+            }
+
+            if (hrVC == S_OK)
+            {
+                LONGLONG *maxFps; // array                        
+                long listSize;
+                SIZE size;
+                size.cx = capability->width;
+                size.cy = capability->height;
+
+                // GetMaxAvailableFrameRate doesn't return max frame rate always
+                // eg: Logitech Notebook. This may be due to a bug in that API
+                // because GetFrameRateList array is reversed in the above camera. So 
+                // a util method written. Can't assume the first value will return
+                // the max fps.
+                hrVC = videoControlConfig->GetFrameRateList(outputCapturePin,
+                                                            tmp, size,
+                                                            &listSize,
+                                                            &maxFps);
+
+                if (hrVC == S_OK && listSize > 0)
+                {
+                    LONGLONG maxFPS = GetMaxOfFrameArray(maxFps, listSize);
+                    capability->maxFPS = static_cast<int> (10000000
+                                                           / maxFPS);
+                    capability->supportFrameRateControl = true;
+                }
+                else // use existing method
+                {
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "GetMaxAvailableFrameRate NOT SUPPORTED");
+                    if (avgTimePerFrame > 0)
+                        capability->maxFPS = static_cast<int> (10000000
+                                                               / avgTimePerFrame);
+                    else
+                        capability->maxFPS = 0;
+                }
+            }
+            else // use existing method in case IAMVideoControl is not supported
+            {
+                if (avgTimePerFrame > 0)
+                    capability->maxFPS = static_cast<int> (10000000
+                                                           / avgTimePerFrame);
+                else
+                    capability->maxFPS = 0;
+            }
+
+            // can't switch MEDIATYPE :~(
+            if (pmt->subtype == MEDIASUBTYPE_I420)
+            {
+                capability->rawType = kVideoI420;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_IYUV)
+            {
+                capability->rawType = kVideoIYUV;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_RGB24)
+            {
+                capability->rawType = kVideoRGB24;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_YUY2)
+            {
+                capability->rawType = kVideoYUY2;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_RGB565)
+            {
+                capability->rawType = kVideoRGB565;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_MJPG)
+            {
+                capability->rawType = kVideoMJPEG;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_dvsl
+                    || pmt->subtype == MEDIASUBTYPE_dvsd
+                    || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
+            {
+                capability->rawType = kVideoYUY2;// MS DV filter seems to create this type
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
+            {
+                capability->rawType = kVideoUYVY;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
+            {
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                             "Device support HDYC.");
+                capability->rawType = kVideoUYVY;
+            }
+            else
+            {
+                WCHAR strGuid[39];
+                StringFromGUID2(pmt->subtype, strGuid, 39);
+                WEBRTC_TRACE( webrtc::kTraceWarning,
+                             webrtc::kTraceVideoCapture, _id,
+                             "Device support unknown media type %ls, width %d, height %d",
+                             strGuid);
+                delete capability;
+                continue;
+            }
+
+            // Get the expected capture delay from the static list
+            capability->expectedCaptureDelay
+                            = GetExpectedCaptureDelay(WindowsCaptureDelays,
+                                                      NoWindowsCaptureDelays,
+                                                      productId,
+                                                      capability->width,
+                                                      capability->height);
+            _captureCapabilities.Insert(index++, capability);
+            WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                         "Camera capability, width:%d height:%d type:%d fps:%d",
+                         capability->width, capability->height,
+                         capability->rawType, capability->maxFPS);
+        }
+        DeleteMediaType(pmt);
+        pmt = NULL;
+    }
+    RELEASE_AND_CLEAR(streamConfig);
+    RELEASE_AND_CLEAR(videoControlConfig);
+    RELEASE_AND_CLEAR(outputCapturePin);
+    RELEASE_AND_CLEAR(captureDevice); // Release the capture device
+    
+    // Store the new used device name
+    _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+    _lastUsedDeviceName = (WebRtc_UWord8*) realloc(_lastUsedDeviceName,
+                                                   _lastUsedDeviceNameLength
+                                                       + 1);
+    memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength+ 1);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CreateCapabilityMap %d", _captureCapabilities.Size());
+
+    return _captureCapabilities.Size();
+}
+
+/* Constructs a product ID from the Windows DevicePath. on a USB device the devicePath contains product id and vendor id. 
+ This seems to work for firewire as well
+ /* Example of device path
+ "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ */
+void DeviceInfoWindows::GetProductId(const WebRtc_UWord8* devicePath,
+                                      WebRtc_UWord8* productUniqueIdUTF8,
+                                      WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    *productUniqueIdUTF8 = '\0';
+    char* startPos = strstr((char*) devicePath, "\\\\?\\");
+    if (!startPos)
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+        return;
+    }
+    startPos += 4;
+
+    char* pos = strchr(startPos, '&');
+    if (!pos || pos >= (char*) devicePath + strlen((char*) devicePath))
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+        return;
+    }
+    // Find the second occurence
+    pos = strchr(pos + 1, '&');
+    WebRtc_UWord32 bytesToCopy = (WebRtc_UWord32)(pos - startPos);
+    if (pos && (bytesToCopy <= productUniqueIdUTF8Length) && bytesToCopy
+        <= kVideoCaptureProductIdLength)
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length,
+                  (char*) startPos, bytesToCopy);
+    }
+    else
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+    }
+}
+
+WebRtc_Word32 DeviceInfoWindows::DisplayCaptureSettingsDialogBox(
+                                         const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                         const WebRtc_UWord8* dialogTitleUTF8,
+                                         void* parentWindow,
+                                         WebRtc_UWord32 positionX,
+                                         WebRtc_UWord32 positionY)
+{
+    ReadLockScoped cs(_apiLock);
+    HWND window = (HWND) parentWindow;
+
+    IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0);
+    if (!filter)
+        return -1;
+
+    ISpecifyPropertyPages* pPages = NULL;
+    CAUUID uuid;
+    HRESULT hr = S_OK;
+
+    hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*) &pPages);
+    if (!SUCCEEDED(hr))
+    {
+        filter->Release();
+        return -1;
+    }
+    hr = pPages->GetPages(&uuid);
+    if (!SUCCEEDED(hr))
+    {
+        filter->Release();
+        return -1;
+    }
+
+    WCHAR tempDialogTitleWide[256];
+    tempDialogTitleWide[0] = 0;
+    int size = 255;
+
+    // UTF-8 to wide char
+    MultiByteToWideChar(CP_UTF8, 0, (char*) dialogTitleUTF8, -1,
+                        tempDialogTitleWide, size);
+
+    // Invoke a dialog box to display.
+
+    hr = OleCreatePropertyFrame(window, // You must create the parent window.
+                                positionX, // Horizontal position for the dialog box.
+                                positionY, // Vertical position for the dialog box.
+                                tempDialogTitleWide,// String used for the dialog box caption.
+                                1, // Number of pointers passed in pPlugin.
+                                (LPUNKNOWN*) &filter, // Pointer to the filter.
+                                uuid.cElems, // Number of property pages.
+                                uuid.pElems, // Array of property page CLSIDs.
+                                LOCALE_USER_DEFAULT, // Locale ID for the dialog box.
+                                0, NULL); // Reserved
+    // Release memory.
+    if (uuid.pElems)
+    {
+        CoTaskMemFree(uuid.pElems);
+    }
+    filter->Release();
+    return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.h b/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.h
new file mode 100644
index 0000000..b05a63c
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/device_info_windows.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
+
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+
+#include <Dshow.h>
+#include "map_wrapper.h"
+
+// forward declarations
+namespace webrtc
+{
+namespace videocapturemodule
+{
+struct VideoCaptureCapabilityWindows: public VideoCaptureCapability
+{
+    WebRtc_UWord32 directShowCapabilityIndex;
+    bool supportFrameRateControl;
+    VideoCaptureCapabilityWindows()
+    {
+        directShowCapabilityIndex = 0;
+        supportFrameRateControl = false;
+    }
+
+};
+class DeviceInfoWindows: public DeviceInfoImpl
+{
+public:
+    DeviceInfoWindows(const WebRtc_Word32 id);
+    virtual ~DeviceInfoWindows();
+
+    WebRtc_Word32 Init();
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     */
+    virtual WebRtc_Word32
+        GetDeviceName(WebRtc_UWord32 deviceNumber,
+                      WebRtc_UWord8* deviceNameUTF8,
+                      WebRtc_UWord32 deviceNameLength,
+                      WebRtc_UWord8* deviceUniqueIdUTF8,
+                      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                      WebRtc_UWord8* productUniqueIdUTF8,
+                      WebRtc_UWord32 productUniqueIdUTF8Length);
+
+    /* 
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32
+        DisplayCaptureSettingsDialogBox(
+                                        const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                        const WebRtc_UWord8* dialogTitleUTF8,
+                                        void* parentWindow,
+                                        WebRtc_UWord32 positionX,
+                                        WebRtc_UWord32 positionY);
+
+    // Windows specific
+
+    /* Gets a capture device filter
+     The user of this API is responsible for releasing the filter when it not needed.
+     */
+    IBaseFilter * GetDeviceFilter(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                  WebRtc_UWord8* productUniqueIdUTF8 = NULL,
+                                  WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+    WebRtc_Word32
+        GetWindowsCapability(const WebRtc_Word32 capabilityIndex,
+                             VideoCaptureCapabilityWindows& windowsCapability);
+
+    static void GetProductId(const WebRtc_UWord8* devicePath,
+                             WebRtc_UWord8* productUniqueIdUTF8,
+                             WebRtc_UWord32 productUniqueIdUTF8Length);
+protected:
+
+    WebRtc_Word32 GetDeviceInfo(WebRtc_UWord32 deviceNumber,
+                                WebRtc_UWord8* deviceNameUTF8,
+                                WebRtc_UWord32 deviceNameLength,
+                                WebRtc_UWord8* deviceUniqueIdUTF8,
+                                WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                WebRtc_UWord8* productUniqueIdUTF8,
+                                WebRtc_UWord32 productUniqueIdUTF8Length);
+
+    virtual WebRtc_Word32
+        CreateCapabilityMap(const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+private:
+    ICreateDevEnum* _dsDevEnum;
+    IEnumMoniker* _dsMonikerDevEnum;
+    bool _CoUninitializeIsRequired;
+
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
diff --git a/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.cc b/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.cc
new file mode 100644
index 0000000..dd533a3
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.cc
@@ -0,0 +1,116 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "help_functions_windows.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// This returns minimum :), which will give max frame rate...
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size)
+{
+    LONGLONG maxFPS = maxFps[0];
+    for (int i = 0; i < size; i++)
+    {
+        if (maxFPS > maxFps[i])
+            maxFPS = maxFps[i];
+    }
+    return maxFPS;
+}
+
+IPin* GetInputPin(IBaseFilter* filter)
+{
+    HRESULT hr;
+    IPin* pin = NULL;
+    IEnumPins* pPinEnum = NULL;
+    filter->EnumPins(&pPinEnum);
+    if (pPinEnum == NULL)
+    {
+        return NULL;
+    }
+
+    // get first unconnected pin
+    hr = pPinEnum->Reset(); // set to first pin
+
+    int count = 0;
+    while (S_OK == pPinEnum->Next(1, &pin, NULL))
+    {
+        PIN_DIRECTION pPinDir;
+        pin->QueryDirection(&pPinDir);
+        if (PINDIR_INPUT == pPinDir) // This is an input pin
+        {
+            IPin* tempPin = NULL;
+            if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected
+            {
+                pPinEnum->Release();
+                return pin;
+            }
+        }
+        pin->Release();
+    }
+    pPinEnum->Release();
+    return NULL;
+}
+
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category)
+{
+    HRESULT hr;
+    IPin* pin = NULL;
+    IEnumPins* pPinEnum = NULL;
+    filter->EnumPins(&pPinEnum);
+    if (pPinEnum == NULL)
+    {
+        return NULL;
+    }
+    // get first unconnected pin
+    hr = pPinEnum->Reset(); // set to first pin
+    int count = 0;
+    while (S_OK == pPinEnum->Next(1, &pin, NULL))
+    {
+        PIN_DIRECTION pPinDir;
+        pin->QueryDirection(&pPinDir);
+        if (PINDIR_OUTPUT == pPinDir) // This is an output pin
+        {
+            if (Category == GUID_NULL || PinMatchesCategory(pin, Category))
+            {
+                pPinEnum->Release();
+                return pin;
+            }
+        }
+        pin->Release();
+        pin = NULL;
+    }
+    pPinEnum->Release();
+    return NULL;
+}
+
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category)
+{
+    BOOL bFound = FALSE;
+    IKsPropertySet *pKs = NULL;
+    HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
+    if (SUCCEEDED(hr))
+    {
+        GUID PinCategory;
+        DWORD cbReturned;
+        hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, &PinCategory,
+                      sizeof(GUID), &cbReturned);
+        if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID)))
+        {
+            bFound = (PinCategory == Category);
+        }
+        pKs->Release();
+    }
+    return bFound;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
+
diff --git a/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.h b/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.h
new file mode 100644
index 0000000..0020877
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/help_functions_windows.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
+
+#include <dshow.h>
+#include <initguid.h>
+
+DEFINE_GUID(MEDIASUBTYPE_I420, 0x30323449, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+DEFINE_GUID(MEDIASUBTYPE_V210, 0x30313276, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+DEFINE_GUID(MEDIASUBTYPE_HDYC, 0x43594448, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+
+#define RELEASE_AND_CLEAR(p) if (p) { (p) -> Release () ; (p) = NULL ; }
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size);
+
+IPin* GetInputPin(IBaseFilter* filter);
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category = GUID_NULL);
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category);
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
diff --git a/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc b/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc
new file mode 100644
index 0000000..88e47a0
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc
@@ -0,0 +1,506 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sink_filter_windows.h"
+
+#include "trace.h"
+#include "help_functions_windows.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+#include <initguid.h>
+
+#define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+typedef struct tagTHREADNAME_INFO
+{
+   DWORD dwType;        // must be 0x1000
+   LPCSTR szName;       // pointer to name (in user addr space)
+   DWORD dwThreadID;    // thread ID (-1=caller thread)
+   DWORD dwFlags;       // reserved for future use, must be zero
+} THREADNAME_INFO;
+
+DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
+            0xe2, 0xce, 0x12, 0xc3);
+
+CaptureInputPin::CaptureInputPin (WebRtc_Word32 moduleId,
+                            IN TCHAR * szName,
+                            IN CaptureSinkFilter* pFilter,
+                            IN CCritSec * pLock,
+                            OUT HRESULT * pHr,
+                            IN LPCWSTR pszName)
+    : CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
+      _requestedCapability(),
+      _resultingCapability()
+{
+    _moduleId=moduleId;
+    _threadHandle = NULL;
+}
+
+CaptureInputPin::~CaptureInputPin()
+{
+}
+
+HRESULT
+CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
+{
+    // reset the thread handle
+    _threadHandle = NULL;
+
+    if(iPosition < 0)
+    return E_INVALIDARG;
+
+    VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*) pmt->AllocFormatBuffer(
+                            sizeof(VIDEOINFOHEADER));
+    if(NULL == pvi)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType VIDEOINFOHEADER is NULL. Returning...Line:%d\n", __LINE__);
+        return(E_OUTOFMEMORY);
+    }
+
+    ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
+    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+    pvi->bmiHeader.biPlanes = 1;
+    pvi->bmiHeader.biClrImportant = 0;
+    pvi->bmiHeader.biClrUsed = 0;
+    pvi->AvgTimePerFrame = 10000000/_requestedCapability.maxFPS;
+
+    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+    pmt->SetType(&MEDIATYPE_Video);
+    pmt->SetFormatType(&FORMAT_VideoInfo);
+    pmt->SetTemporalCompression(FALSE);
+
+    WebRtc_Word32 positionOffset=1;
+    if(_requestedCapability.codecType!=kVideoCodecUnknown)
+    {
+        positionOffset=0;
+    }
+
+    switch (iPosition+positionOffset)
+    {
+        case 0:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
+            pvi->bmiHeader.biBitCount = 12; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                        *_requestedCapability.width/2;
+            pmt->SetSubtype(&MEDIASUBTYPE_I420);
+        }
+        break;
+        case 1:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');;
+            pvi->bmiHeader.biBitCount = 16; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 2*_requestedCapability.width
+                                        *_requestedCapability.height;
+            pmt->SetSubtype(&MEDIASUBTYPE_YUY2);
+        }
+        break;
+        case 2:
+        {
+            pvi->bmiHeader.biCompression = BI_RGB;
+            pvi->bmiHeader.biBitCount = 24; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                        *_requestedCapability.width;
+            pmt->SetSubtype(&MEDIASUBTYPE_RGB24);
+        }
+        break;
+        case 3:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
+            pvi->bmiHeader.biBitCount = 16; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 2*_requestedCapability.height
+                                         *_requestedCapability.width;
+            pmt->SetSubtype(&MEDIASUBTYPE_UYVY);
+        }
+        break;
+        case 4:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('M','J','P','G');
+            pvi->bmiHeader.biBitCount = 12; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                         *_requestedCapability.width/2;
+            pmt->SetSubtype(&MEDIASUBTYPE_MJPG);
+        }
+        break;
+        default :
+        return VFW_S_NO_MORE_ITEMS;
+    }
+    pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+             "GetMediaType position %d, width %d, height %d, biCompression 0x%x",
+             iPosition, _requestedCapability.width,
+             _requestedCapability.height,pvi->bmiHeader.biCompression);
+    return NOERROR;
+}
+
+HRESULT
+CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
+{
+    // reset the thread handle
+    _threadHandle = NULL;
+
+    const GUID *type = pMediaType->Type();
+    if (*type != MEDIATYPE_Video)
+    return E_INVALIDARG;
+
+    const GUID *formatType = pMediaType->FormatType();
+
+    // Check for the subtypes we support
+    const GUID *SubType = pMediaType->Subtype();
+    if (SubType == NULL)
+    {
+        return E_INVALIDARG;
+    }
+
+    if(*formatType == FORMAT_VideoInfo)
+    {
+        VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pMediaType->Format();
+        if(pvi == NULL)
+        {
+            return E_INVALIDARG;
+        }
+
+        // Store the incoming width and height
+        _resultingCapability.width = pvi->bmiHeader.biWidth;
+        _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType width:%d height:%d Compression:0x%x\n",
+                     pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+                     pvi->bmiHeader.biCompression);
+
+        if(*SubType == MEDIASUBTYPE_MJPG
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+        {
+            _resultingCapability.rawType = kVideoMJPEG;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_I420
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+        {
+            _resultingCapability.rawType = kVideoI420;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_YUY2
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+        {
+            _resultingCapability.rawType = kVideoYUY2;
+            ::Sleep(60); // workaround for bad driver
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_UYVY
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+
+        if(*SubType == MEDIASUBTYPE_HDYC)
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biCompression == BI_RGB)
+        {
+            _resultingCapability.rawType = kVideoRGB24;
+            return S_OK; // This format is acceptable.
+        }
+    }
+    if(*formatType == FORMAT_VideoInfo2)
+    {
+        // VIDEOINFOHEADER2 that has dwInterlaceFlags
+        VIDEOINFOHEADER2 *pvi = (VIDEOINFOHEADER2 *) pMediaType->Format();
+
+        if(pvi == NULL)
+        {
+            return E_INVALIDARG;
+        }
+
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType width:%d height:%d Compression:0x%x\n",
+                     pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+                     pvi->bmiHeader.biCompression);
+
+        _resultingCapability.width = pvi->bmiHeader.biWidth;
+        _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+
+        if(*SubType == MEDIASUBTYPE_MJPG
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+        {
+            _resultingCapability.rawType = kVideoMJPEG;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_I420
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+        {
+            _resultingCapability.rawType = kVideoI420;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_YUY2
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+        {
+            _resultingCapability.rawType = kVideoYUY2;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_UYVY
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+
+        if(*SubType == MEDIASUBTYPE_HDYC)
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biCompression == BI_RGB)
+        {
+            _resultingCapability.rawType = kVideoRGB24;
+            return S_OK; // This format is acceptable.
+        }
+    }
+    return E_INVALIDARG;
+}
+
+HRESULT
+CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
+{
+    HRESULT hr = S_OK;
+
+    ASSERT (m_pFilter);
+    ASSERT (pIMediaSample);
+
+    // get the thread handle of the delivering thread inc its priority
+    if( _threadHandle == NULL)
+    {
+        HANDLE handle= GetCurrentThread();
+        SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
+        _threadHandle = handle;
+        // See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
+        // in this function. Name od article is "Setting a Thread Name (Unmanaged)".
+
+        THREADNAME_INFO info;
+        info.dwType = 0x1000;
+        info.szName = "capture_thread";
+        info.dwThreadID = (DWORD)-1;
+        info.dwFlags = 0;
+
+        __try
+        {
+            RaiseException( 0x406D1388, 0, sizeof(info)/sizeof(DWORD),
+                            (DWORD_PTR*)&info );
+        }
+        __except (EXCEPTION_CONTINUE_EXECUTION)
+        {
+        }
+
+    }
+
+    reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
+    hr = CBaseInputPin::Receive (pIMediaSample);
+
+    if (SUCCEEDED (hr))
+    {
+        const WebRtc_Word32 length = pIMediaSample->GetActualDataLength();
+
+        unsigned char* pBuffer = NULL;
+        if(S_OK != pIMediaSample->GetPointer(&pBuffer))
+        {
+            reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+            return S_FALSE;
+        }
+
+        // NOTE: filter unlocked within Send call
+        reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
+                                        pBuffer,length,_resultingCapability);
+    }
+    else
+    {
+        reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+    }
+
+    return hr;
+}
+
+// called under LockReceive
+HRESULT CaptureInputPin::SetMatchingMediaType(
+                                    const VideoCaptureCapability& capability)
+{
+
+    _requestedCapability = capability;
+    _resultingCapability = VideoCaptureCapability();
+    return S_OK;
+}
+//  ----------------------------------------------------------------------------
+CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
+                              IN LPUNKNOWN punk,
+                              OUT HRESULT * phr,
+                              VideoCaptureExternal& captureObserver,
+                              WebRtc_Word32 moduleId)
+    : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
+      m_pInput(NULL),
+      _captureObserver(captureObserver),
+      _moduleId(moduleId)
+{
+    (* phr) = S_OK;
+    m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
+                                   this,
+                                   & m_crtFilter,
+                                   phr, L"VideoCapture");
+    if (m_pInput == NULL || FAILED (* phr))
+    {
+        (* phr) = FAILED (* phr) ? (* phr) : E_OUTOFMEMORY;
+        goto cleanup;
+    }
+    cleanup :
+    return;
+}
+
+CaptureSinkFilter::~CaptureSinkFilter()
+{
+    delete m_pInput;
+}
+
+int CaptureSinkFilter::GetPinCount()
+{
+    return 1;
+}
+
+CBasePin *
+CaptureSinkFilter::GetPin(IN int Index)
+{
+    CBasePin * pPin;
+    LockFilter ();
+    if (Index == 0)
+    {
+        pPin = m_pInput;
+    }
+    else
+    {
+        pPin = NULL;
+    }
+    UnlockFilter ();
+    return pPin;
+}
+
+STDMETHODIMP CaptureSinkFilter::Pause()
+{
+    HRESULT hr = S_OK;
+    LockFilter();
+    if (m_State == State_Stopped)
+    {
+        //  change the state, THEN activate the input pin
+        m_State = State_Paused;
+        if (m_pInput && m_pInput->IsConnected())
+        {
+            m_pInput->Active();
+        }
+        if (m_pInput && !m_pInput->IsConnected())
+        {
+            m_State = State_Running;
+        }
+    }
+    else if (m_State == State_Running)
+    {
+        m_State = State_Paused;
+    }
+    UnlockFilter();
+    return S_OK;
+}
+
+STDMETHODIMP CaptureSinkFilter::Stop()
+{
+    LockReceive();
+    LockFilter();
+
+    //  set the state
+    m_State = State_Stopped;
+
+    //  inactivate the pins
+    if (m_pInput)
+        m_pInput->Inactive();
+
+    UnlockFilter();
+    UnlockReceive();
+    return S_OK;
+}
+
+void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
+{
+    LockFilter();
+    m_pGraph = graph;
+    UnlockFilter();
+}
+
+void CaptureSinkFilter::ProcessCapturedFrame(unsigned char* pBuffer,
+                                         WebRtc_Word32 length,
+                                         const VideoCaptureCapability& frameInfo)
+{
+    //  we have the receiver lock
+    if (m_State == State_Running)
+    {
+        _captureObserver.IncomingFrame(pBuffer, length, frameInfo);
+
+        // trying to hold it since it's only a memcpy
+        // IMPROVEMENT if this work move critsect
+        UnlockReceive();
+        return;
+    }
+    UnlockReceive();
+    return;
+}
+
+STDMETHODIMP CaptureSinkFilter::SetMatchingMediaType(
+                                        const VideoCaptureCapability& capability)
+{
+    LockReceive();
+    LockFilter();
+    HRESULT hr;
+    if (m_pInput)
+    {
+        hr = m_pInput->SetMatchingMediaType(capability);
+    }
+    else
+    {
+        hr = E_UNEXPECTED;
+    }
+    UnlockFilter();
+    UnlockReceive();
+    return hr;
+}
+
+STDMETHODIMP CaptureSinkFilter::GetClassID( OUT CLSID * pCLSID )
+{
+    (* pCLSID) = CLSID_SINKFILTER;
+    return S_OK;
+}
+
+} // namespace videocapturemodule
+} //namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.h b/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.h
new file mode 100644
index 0000000..efeb9c7
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/sink_filter_windows.h
@@ -0,0 +1,100 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
+
+#include <Streams.h> // Include base DS filter header files
+
+#include "video_capture_defines.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+//forward declaration
+
+class CaptureSinkFilter;
+/**
+ *	input pin for camera input
+ *
+ */
+class CaptureInputPin: public CBaseInputPin
+{
+public:
+    WebRtc_Word32 _moduleId;
+
+    VideoCaptureCapability _requestedCapability;
+    VideoCaptureCapability _resultingCapability;
+    HANDLE _threadHandle;
+
+    CaptureInputPin ( WebRtc_Word32 moduleId,
+                      IN TCHAR* szName,
+                      IN CaptureSinkFilter* pFilter,
+                      IN CCritSec * pLock,
+                      OUT HRESULT * pHr,
+                      IN LPCWSTR pszName);
+    virtual ~CaptureInputPin();
+
+    HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
+    HRESULT CheckMediaType (IN const CMediaType * pmt);
+    STDMETHODIMP Receive (IN IMediaSample *);
+    HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
+};
+
+class CaptureSinkFilter: public CBaseFilter
+{
+
+public:
+    CaptureSinkFilter (IN TCHAR * tszName,
+                   IN LPUNKNOWN punk,
+                   OUT HRESULT * phr,
+                   VideoCaptureExternal& captureObserver,
+                   WebRtc_Word32 moduleId);
+    virtual ~CaptureSinkFilter();
+
+    //  --------------------------------------------------------------------
+    //  class methods
+
+    void ProcessCapturedFrame(unsigned char* pBuffer, WebRtc_Word32 length,
+                              const VideoCaptureCapability& frameInfo);
+    //  explicit receiver lock aquisition and release
+    void LockReceive()  { m_crtRecv.Lock();}
+    void UnlockReceive() {m_crtRecv.Unlock();}
+    //  explicit filter lock aquisition and release
+    void LockFilter() {m_crtFilter.Lock();}
+    void UnlockFilter() { m_crtFilter.Unlock(); }
+    void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
+
+    //  --------------------------------------------------------------------
+    //  COM interfaces
+DECLARE_IUNKNOWN    ;
+    STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
+
+    //  --------------------------------------------------------------------
+    //  CBaseFilter methods
+    int GetPinCount ();
+    CBasePin * GetPin ( IN int Index);
+    STDMETHODIMP Pause ();
+    STDMETHODIMP Stop ();
+    STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
+    //  --------------------------------------------------------------------
+    //  class factory calls this
+    static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
+private:
+    CCritSec m_crtFilter; //  filter lock
+    CCritSec m_crtRecv;  //  receiver lock; always acquire before filter lock
+    CaptureInputPin * m_pInput;
+    VideoCaptureExternal& _captureObserver;
+    WebRtc_Word32 _moduleId;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
diff --git a/trunk/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc b/trunk/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc
new file mode 100644
index 0000000..bfc7a30
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ref_count.h"
+#include "video_capture_windows.h"
+#include "trace.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    if (deviceUniqueIdUTF8 == NULL)
+    {
+        return NULL;
+    }
+
+    WebRtc_UWord8 productId[kVideoCaptureProductIdLength];
+    videocapturemodule::DeviceInfoWindows::GetProductId(deviceUniqueIdUTF8,
+                                                        productId,
+                                                        sizeof(productId));
+    
+    RefCountImpl<videocapturemodule::VideoCaptureDS>* newCaptureModule =
+        new RefCountImpl<videocapturemodule::VideoCaptureDS>(id);
+
+    if (newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        delete newCaptureModule;
+        newCaptureModule = NULL;
+    }
+    return newCaptureModule;
+}
+} //namespace videocapturemodule
+} //namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.cc b/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.cc
new file mode 100644
index 0000000..889b7ef
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.cc
@@ -0,0 +1,413 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_windows.h"
+
+#include "../video_capture_config.h"
+#include "critical_section_wrapper.h"
+#include "help_functions_windows.h"
+#include "sink_filter_windows.h"
+#include "trace.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureDS::VideoCaptureDS(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), _dsInfo(id), _captureFilter(NULL),
+      _graphBuilder(NULL), _mediaControl(NULL), _sinkFilter(NULL),
+      _inputSendPin(NULL), _outputCapturePin(NULL), _dvFilter(NULL),
+      _inputDvPin(NULL), _outputDvPin(NULL)
+{
+}
+
+VideoCaptureDS::~VideoCaptureDS()
+{
+    if (_mediaControl)
+    {
+        _mediaControl->Stop();
+    }
+    if (_graphBuilder)
+    {
+        if (_sinkFilter)
+            _graphBuilder->RemoveFilter(_sinkFilter);
+        if (_captureFilter)
+            _graphBuilder->RemoveFilter(_captureFilter);
+        if (_dvFilter)
+            _graphBuilder->RemoveFilter(_dvFilter);
+    }
+    RELEASE_AND_CLEAR(_captureFilter); // release the capture device
+    RELEASE_AND_CLEAR(_sinkFilter);
+    RELEASE_AND_CLEAR(_dvFilter);
+
+    RELEASE_AND_CLEAR(_mediaControl);
+    RELEASE_AND_CLEAR(_inputSendPin);
+    RELEASE_AND_CLEAR(_outputCapturePin);
+
+    RELEASE_AND_CLEAR(_inputDvPin);
+    RELEASE_AND_CLEAR(_outputDvPin);
+
+    RELEASE_AND_CLEAR(_graphBuilder);
+}
+
+WebRtc_Word32 VideoCaptureDS::Init(const WebRtc_Word32 id,
+                                          const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+    WebRtc_Word32 result = 0;
+
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (nameLength > kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new (std::nothrow) WebRtc_UWord8[nameLength + 1];
+    memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+    if (_dsInfo.Init() != 0)
+        return -1;
+
+    _captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
+    if (!_captureFilter)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create capture filter.");
+        return -1;
+    }
+
+    // Get the interface for DirectShow's GraphBuilder
+    HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL,
+                                  CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
+                                  (void **) &_graphBuilder);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create graph builder.");
+        return -1;
+    }
+
+    hr = _graphBuilder->QueryInterface(IID_IMediaControl,
+                                       (void **) &_mediaControl);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create media control builder.");
+        return -1;
+    }
+    hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to add the capture device to the graph.");
+        return -1;
+    }
+
+    _outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
+
+    // Create the sink filte used for receiving Captured frames.
+    _sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr,
+                                        *this, _id);
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create send filter");
+        return -1;
+    }
+    _sinkFilter->AddRef();
+
+    hr = _graphBuilder->AddFilter(_sinkFilter, SINK_FILTER_NAME);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to add the send filter to the graph.");
+        return -1;
+    }
+    _inputSendPin = GetInputPin(_sinkFilter);
+
+    // Temporary connect here.
+    // This is done so that no one else can use the capture device.
+    if (SetCameraOutput(_requestedCapability) != 0)
+    {
+        return -1;
+    }
+    hr = _mediaControl->Pause();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to Pause the Capture device. Is it already occupied? %d.",
+                     hr);
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, _id,
+                 "Capture device '%s' initialized.", deviceUniqueIdUTF8);
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::StartCapture(
+                                      const VideoCaptureCapability& capability)
+{
+    CriticalSectionScoped cs(_apiCs);
+
+    if (capability != _requestedCapability)
+    {
+        DisconnectGraph();
+
+        if (SetCameraOutput(capability) != 0)
+        {
+            return -1;
+        }
+    }
+    HRESULT hr = _mediaControl->Run();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to start the Capture device.");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::StopCapture()
+{
+    CriticalSectionScoped cs(_apiCs);
+
+    HRESULT hr = _mediaControl->Pause();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to stop the capture graph. %d", hr);
+        return -1;
+    }
+    return 0;
+}
+bool VideoCaptureDS::CaptureStarted()
+{
+    OAFilterState state = 0;
+    HRESULT hr = _mediaControl->GetState(1000, &state);
+    if (hr != S_OK && hr != VFW_S_CANT_CUE)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get the CaptureStarted status");
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CaptureStarted %d", state);
+    return state == State_Running;
+
+}
+WebRtc_Word32 VideoCaptureDS::CaptureSettings(
+                                             VideoCaptureCapability& settings)
+{
+    settings = _requestedCapability;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::SetCameraOutput(
+                             const VideoCaptureCapability& requestedCapability)
+{
+
+    // Get the best matching capability
+    VideoCaptureCapability capability;
+    WebRtc_Word32 capabilityIndex;
+
+    // Store the new requested size
+    _requestedCapability = requestedCapability;
+    // Match the requested capability with the supported.
+    if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(_deviceUniqueId,
+                                                            _requestedCapability,
+                                                            capability)) < 0)
+    {
+        return -1;
+    }
+    //Reduce the frame rate if possible.
+    if (capability.maxFPS > requestedCapability.maxFPS)
+    {
+        capability.maxFPS = requestedCapability.maxFPS;
+    }
+    // Store the new expected capture delay
+    _captureDelay = capability.expectedCaptureDelay;
+
+    // Convert it to the windows capability index since they are not nexessary
+    // the same
+    VideoCaptureCapabilityWindows windowsCapability;
+    if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0)
+    {
+        return -1;
+    }
+
+    IAMStreamConfig* streamConfig = NULL;
+    AM_MEDIA_TYPE *pmt = NULL;
+    VIDEO_STREAM_CONFIG_CAPS caps;
+
+    HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+                                                   (void**) &streamConfig);
+    if (hr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Can't get the Capture format settings.");
+        return -1;
+    }
+
+    //Get the windows capability from the capture device
+    bool isDVCamera = false;
+    hr = streamConfig->GetStreamCaps(
+                                    windowsCapability.directShowCapabilityIndex,
+                                    &pmt, reinterpret_cast<BYTE*> (&caps));
+    if (!FAILED(hr))
+    {
+        if (pmt->formattype == FORMAT_VideoInfo2)
+        {
+            VIDEOINFOHEADER2* h =
+                reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+            if (capability.maxFPS > 0
+                && windowsCapability.supportFrameRateControl)
+            {
+                h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+                                                    / capability.maxFPS);
+            }
+        }
+        else
+        {
+            VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>
+                                (pmt->pbFormat);
+            if (capability.maxFPS > 0
+                && windowsCapability.supportFrameRateControl)
+            {
+                h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+                                                    / capability.maxFPS);
+            }
+
+        }
+
+        // Set the sink filter to request this capability
+        _sinkFilter->SetMatchingMediaType(capability);
+        //Order the capture device to use this capability
+        hr += streamConfig->SetFormat(pmt);
+
+        //Check if this is a DV camera and we need to add MS DV Filter
+        if (pmt->subtype == MEDIASUBTYPE_dvsl
+           || pmt->subtype == MEDIASUBTYPE_dvsd
+           || pmt->subtype == MEDIASUBTYPE_dvhd)
+            isDVCamera = true; // This is a DV camera. Use MS DV filter
+    }
+    RELEASE_AND_CLEAR(streamConfig);
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to set capture device output format");
+        return -1;
+    }
+
+    if (isDVCamera)
+    {
+        hr = ConnectDVCamera();
+    }
+    else
+    {
+        hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin,
+                                          NULL);
+    }
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to connect the Capture graph %d", hr);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::DisconnectGraph()
+{
+    HRESULT hr = _mediaControl->Stop();
+    hr += _graphBuilder->Disconnect(_outputCapturePin);
+    hr += _graphBuilder->Disconnect(_inputSendPin);
+
+    //if the DV camera filter exist
+    if (_dvFilter)
+    {
+        _graphBuilder->Disconnect(_inputDvPin);
+        _graphBuilder->Disconnect(_outputDvPin);
+    }
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE( webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to Stop the Capture device for reconfiguration %d",
+                     hr);
+        return -1;
+    }
+    return 0;
+}
+HRESULT VideoCaptureDS::ConnectDVCamera()
+{
+    HRESULT hr = S_OK;
+
+    if (!_dvFilter)
+    {
+        hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
+                              IID_IBaseFilter, (void **) &_dvFilter);
+        if (hr != S_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to create the dv decoder: %x", hr);
+            return hr;
+        }
+        hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
+        if (hr != S_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to add the dv decoder to the graph: %x", hr);
+            return hr;
+        }
+        _inputDvPin = GetInputPin(_dvFilter);
+        if (_inputDvPin == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to get input pin from DV decoder");
+            return -1;
+        }
+        _outputDvPin = GetOutputPin(_dvFilter);
+        if (_outputDvPin == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to get output pin from DV decoder");
+            return -1;
+        }
+    }
+    hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to connect capture device to the dv devoder: %x",
+                     hr);
+        return hr;
+    }
+
+    hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
+    if (hr != S_OK)
+    {
+        if (hr == 0x80070004)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to connect the capture device, busy");
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to connect capture device to the send graph: 0x%x",
+                         hr);
+        }
+        return hr;
+    }
+    return hr;
+}
+} // namespace videocapturemodule
+} //namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.h b/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.h
new file mode 100644
index 0000000..69e2db5
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/Windows/video_capture_windows.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
+
+#include "../video_capture_impl.h"
+#include <tchar.h>
+
+#include "device_info_windows.h"
+
+#define CAPTURE_FILTER_NAME L"VideoCaptureFilter"
+#define SINK_FILTER_NAME L"SinkFilter"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// Forward declaraion
+class CaptureSinkFilter;
+
+class VideoCaptureDS: public VideoCaptureImpl
+{
+public:
+
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    VideoCaptureDS(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
+                               const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    /*************************************************************************
+     *
+     *   Start/Stop
+     *
+     *************************************************************************/
+    virtual WebRtc_Word32
+        StartCapture(const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+
+    /**************************************************************************
+     *
+     *   Properties of the set device
+     *
+     **************************************************************************/
+
+    virtual bool CaptureStarted();
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+    virtual ~VideoCaptureDS();
+
+    // Help functions
+
+    WebRtc_Word32
+        SetCameraOutput(const VideoCaptureCapability& requestedCapability);
+    WebRtc_Word32 DisconnectGraph();
+    HRESULT VideoCaptureDS::ConnectDVCamera();
+
+    DeviceInfoWindows _dsInfo;
+
+    IBaseFilter* _captureFilter;
+    IGraphBuilder* _graphBuilder;
+    IMediaControl* _mediaControl;
+    CaptureSinkFilter* _sinkFilter;
+    IPin* _inputSendPin;
+    IPin* _outputCapturePin;
+
+    // Microsoft DV interface (external DV cameras)
+    IBaseFilter* _dvFilter;
+    IPin* _inputDvPin;
+    IPin* _outputDvPin;
+
+};
+} // namespace videocapturemodule
+} //namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
diff --git a/trunk/src/modules/video_capture/main/source/device_info_impl.cc b/trunk/src/modules/video_capture/main/source/device_info_impl.cc
new file mode 100644
index 0000000..e41e0bb
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/device_info_impl.cc
@@ -0,0 +1,400 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_impl.h"
+#include "video_capture_config.h"
+#include "trace.h"
+#include <stdlib.h>
+
+#ifndef abs
+#define abs(a) (a>=0?a:-a)
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+DeviceInfoImpl::DeviceInfoImpl(const WebRtc_Word32 id)
+    : _id(id), _apiLock(*RWLockWrapper::CreateRWLock()), _lastUsedDeviceName(NULL),
+      _lastUsedDeviceNameLength(0)
+{
+}
+
+DeviceInfoImpl::~DeviceInfoImpl(void)
+{
+    _apiLock.AcquireLockExclusive();
+    // Reset old capability list
+    MapItem* item = NULL;
+    while ((item = _captureCapabilities.Last()))
+    {
+        delete (VideoCaptureCapability*) item->GetItem();
+        _captureCapabilities.Erase(item);
+    }
+    free(_lastUsedDeviceName);
+    _apiLock.ReleaseLockExclusive();
+
+    delete &_apiLock;
+}
+WebRtc_Word32 DeviceInfoImpl::NumberOfCapabilities(
+                                        const WebRtc_UWord8* deviceUniqueIdUTF8)
+{
+
+    if (!deviceUniqueIdUTF8)
+        return -1;
+
+    _apiLock.AcquireLockShared();
+
+    if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
+    {
+        // Is it the same device that is asked for again.
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        if(strncasecmp((char*)_lastUsedDeviceName,
+                       (char*) deviceUniqueIdUTF8,
+                       _lastUsedDeviceNameLength)==0)
+#else
+        if (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) == 0)
+#endif
+        {
+            //yes
+            _apiLock.ReleaseLockShared();
+            return _captureCapabilities.Size();
+        }
+    }
+    // Need to get exclusive rights to create the new capability map.
+    _apiLock.ReleaseLockShared();
+    WriteLockScoped cs2(_apiLock);
+
+    WebRtc_Word32 ret = CreateCapabilityMap(deviceUniqueIdUTF8);
+    return ret;
+}
+
+WebRtc_Word32 DeviceInfoImpl::GetCapability(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                            const WebRtc_UWord32 deviceCapabilityNumber,
+                                            VideoCaptureCapability& capability)
+{
+
+    if (!deviceUniqueIdUTF8)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "deviceUniqueIdUTF8 parameter not set in call to GetCapability");
+        return -1;
+    }
+    ReadLockScoped cs(_apiLock);
+
+    if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        || (strncasecmp((char*)_lastUsedDeviceName,
+                        (char*) deviceUniqueIdUTF8,
+                        _lastUsedDeviceNameLength)!=0))
+#else
+        || (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) != 0))
+#endif
+
+    {
+        _apiLock.ReleaseLockShared();
+        _apiLock.AcquireLockExclusive();
+        if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+        {
+            _apiLock.ReleaseLockExclusive();
+            _apiLock.AcquireLockShared();
+            return -1;
+        }
+        _apiLock.ReleaseLockExclusive();
+        _apiLock.AcquireLockShared();
+    }
+
+    // Make sure the number is valid
+    if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.Size())
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "deviceCapabilityNumber %d is invalid in call to GetCapability",
+                   deviceCapabilityNumber);
+        return -1;
+    }
+
+    MapItem* item = _captureCapabilities.Find(deviceCapabilityNumber);
+    if (!item)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "Failed to find capability number %d of %d possible",
+                   deviceCapabilityNumber, _captureCapabilities.Size());
+        return -1;
+    }
+
+    VideoCaptureCapability* capPointer =  static_cast<VideoCaptureCapability*>
+                                          (item->GetItem());
+    if (!capPointer)
+    {
+        return -1;
+    }
+
+    capability = *capPointer;
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoImpl::GetBestMatchedCapability(
+                                        const WebRtc_UWord8*deviceUniqueIdUTF8,
+                                        const VideoCaptureCapability& requested,
+                                        VideoCaptureCapability& resulting)
+{
+
+
+    if (!deviceUniqueIdUTF8)
+        return -1;
+
+    ReadLockScoped cs(_apiLock);
+    if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        || (strncasecmp((char*)_lastUsedDeviceName,
+                        (char*) deviceUniqueIdUTF8,
+                        _lastUsedDeviceNameLength)!=0))
+#else
+        || (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) != 0))
+#endif
+    {
+        _apiLock.ReleaseLockShared();
+        _apiLock.AcquireLockExclusive();
+        if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+        {
+            return -1;
+        }
+        _apiLock.ReleaseLockExclusive();
+        _apiLock.AcquireLockShared();
+    }
+
+    WebRtc_Word32 bestformatIndex = -1;
+    WebRtc_Word32 bestWidth = 0;
+    WebRtc_Word32 bestHeight = 0;
+    WebRtc_Word32 bestFrameRate = 0;
+    RawVideoType bestRawType = kVideoUnknown;
+    webrtc::VideoCodecType bestCodecType = webrtc::kVideoCodecUnknown;
+
+    const WebRtc_Word32 numberOfCapabilies = _captureCapabilities.Size();
+
+    for (WebRtc_Word32 tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
+    {
+        MapItem* item = _captureCapabilities.Find(tmp);
+        if (!item)
+            return -1;
+
+        VideoCaptureCapability& capability = *static_cast<VideoCaptureCapability*>
+                                              (item->GetItem());
+
+        const WebRtc_Word32 diffWidth = capability.width - requested.width;
+        const WebRtc_Word32 diffHeight = capability.height - requested.height;
+        const WebRtc_Word32 diffFrameRate = capability.maxFPS - requested.maxFPS;
+
+        const WebRtc_Word32 currentbestDiffWith = bestWidth - requested.width;
+        const WebRtc_Word32 currentbestDiffHeight = bestHeight - requested.height;
+        const WebRtc_Word32 currentbestDiffFrameRate = bestFrameRate - requested.maxFPS;
+
+        if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equalt that previouse.
+            || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+        {
+
+            if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+            {
+                if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+                    || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+                {
+                    if (diffWidth == currentbestDiffWith && diffHeight
+                        == currentbestDiffHeight) // Same size as previously
+                    {
+                        //Also check the best frame rate if the diff is the same as previouse
+                        if (((diffFrameRate >= 0 &&
+                              diffFrameRate <= currentbestDiffFrameRate) // Frame rate to high but better match than previouse and we have not selected IUV
+                            ||
+                            (currentbestDiffFrameRate < 0 &&
+                             diffFrameRate >= currentbestDiffFrameRate)) // Current frame rate is lower than requested. This is better.
+                        )
+                        {
+                            if ((currentbestDiffFrameRate == diffFrameRate) // Same frame rate as previous  or frame rate allready good enough
+                                || (currentbestDiffFrameRate >= 0))
+                            {
+                                if (bestRawType != requested.rawType
+                                    && requested.rawType != kVideoUnknown
+                                    && (capability.rawType == requested.rawType
+                                        || capability.rawType == kVideoI420
+                                        || capability.rawType == kVideoYUY2
+                                        || capability.rawType == kVideoYV12))
+                                {
+                                    bestCodecType = capability.codecType;
+                                    bestRawType = capability.rawType;
+                                    bestformatIndex = tmp;
+                                }
+                                // If width height and frame rate is full filled we can use the camera for encoding if it is supported.
+                                if (capability.height == requested.height
+                                    && capability.width == requested.width
+                                    && capability.maxFPS >= requested.maxFPS)
+                                {
+                                    if (capability.codecType == requested.codecType
+                                        && bestCodecType != requested.codecType)
+                                    {
+                                        bestCodecType = capability.codecType;
+                                        bestformatIndex = tmp;
+                                    }
+                                }
+                            }
+                            else // Better frame rate
+                            {
+                                if (requested.codecType == capability.codecType)
+                                {
+
+                                    bestWidth = capability.width;
+                                    bestHeight = capability.height;
+                                    bestFrameRate = capability.maxFPS;
+                                    bestCodecType = capability.codecType;
+                                    bestRawType = capability.rawType;
+                                    bestformatIndex = tmp;
+                                }
+                            }
+                        }
+                    }
+                    else // Better width than previously
+                    {
+                        if (requested.codecType == capability.codecType)
+                        {
+                            bestWidth = capability.width;
+                            bestHeight = capability.height;
+                            bestFrameRate = capability.maxFPS;
+                            bestCodecType = capability.codecType;
+                            bestRawType = capability.rawType;
+                            bestformatIndex = tmp;
+                        }
+                    }
+                }// else width no good
+            }
+            else // Better height
+            {
+                if (requested.codecType == capability.codecType)
+                {
+                    bestWidth = capability.width;
+                    bestHeight = capability.height;
+                    bestFrameRate = capability.maxFPS;
+                    bestCodecType = capability.codecType;
+                    bestRawType = capability.rawType;
+                    bestformatIndex = tmp;
+                }
+            }
+        }// else height not good
+    }//end for
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+               "Best camera format: Width %d, Height %d, Frame rate %d, Color format %d",
+               bestWidth, bestHeight, bestFrameRate, bestRawType);
+
+    // Copy the capability
+    MapItem* item = _captureCapabilities.Find(bestformatIndex);
+    if (!item)
+        return -1;
+    VideoCaptureCapability* capPointer =
+        static_cast<VideoCaptureCapability*> (item->GetItem());
+    if (!capPointer)
+        return -1;
+
+    resulting = *capPointer;
+
+    return bestformatIndex;
+}
+
+/* Returns the expected Capture delay*/
+WebRtc_Word32 DeviceInfoImpl::GetExpectedCaptureDelay(
+                                          const DelayValues delayValues[],
+                                          const WebRtc_UWord32 sizeOfDelayValues,
+                                          const WebRtc_UWord8* productId,
+                                          const WebRtc_UWord32 width,
+                                          const WebRtc_UWord32 height)
+{
+    WebRtc_Word32 bestDelay = kDefaultCaptureDelay;
+
+    for (WebRtc_UWord32 device = 0; device < sizeOfDelayValues; ++device)
+    {
+        if (delayValues[device].productId && strncmp((char*) productId,
+                                                     (char*) delayValues[device].productId,
+                                                     kVideoCaptureProductIdLength) == 0)
+        {
+            // We have found the camera
+
+            WebRtc_Word32 bestWidth = 0;
+            WebRtc_Word32 bestHeight = 0;
+
+            //Loop through all tested sizes and find one that seems fitting
+            for (WebRtc_UWord32 delayIndex = 0; delayIndex < NoOfDelayValues; ++delayIndex)
+            {
+                const DelayValue& currentValue = delayValues[device].delayValues[delayIndex];
+
+                const WebRtc_Word32 diffWidth = currentValue.width - width;
+                const WebRtc_Word32 diffHeight = currentValue.height - height;
+
+                const WebRtc_Word32 currentbestDiffWith = bestWidth - width;
+                const WebRtc_Word32 currentbestDiffHeight = bestHeight - height;
+
+                if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equal than previous.
+                    || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+                {
+
+                    if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+                    {
+                        if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+                            || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+                        {
+                            if (diffWidth == currentbestDiffWith && diffHeight
+                                == currentbestDiffHeight) // Same size as previous
+                            {
+                            }
+                            else // Better width than previously
+                            {
+                                bestWidth = currentValue.width;
+                                bestHeight = currentValue.height;
+                                bestDelay = currentValue.delay;
+                            }
+                        }// else width no good
+                    }
+                    else // Better height
+                    {
+                        bestWidth = currentValue.width;
+                        bestHeight = currentValue.height;
+                        bestDelay = currentValue.delay;
+                    }
+                }// else height not good
+            }//end for
+            break;
+        }
+    }
+    if (bestDelay > kMaxCaptureDelay)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                   "Expected capture delay too high. %dms, will use %d", bestDelay,
+                   kMaxCaptureDelay);
+        bestDelay = kMaxCaptureDelay;
+
+    }
+
+    return bestDelay;
+
+}
+
+//Default implementation. This should be overridden by Mobile implementations.
+WebRtc_Word32 DeviceInfoImpl::GetOrientation(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                             VideoCaptureRotation& orientation)
+{
+    orientation = kCameraRotate0;
+    return -1;
+}
+} //namespace videocapturemodule
+} // namespace webrtc
+
+
diff --git a/trunk/src/modules/video_capture/main/source/device_info_impl.h b/trunk/src/modules/video_capture/main/source/device_info_impl.h
new file mode 100644
index 0000000..34ef65d
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/device_info_impl.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+
+#include "video_capture.h"
+
+#include "map_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "video_capture_delay.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoImpl: public VideoCaptureModule::DeviceInfo
+{
+public:
+    DeviceInfoImpl(const WebRtc_Word32 id);
+    virtual ~DeviceInfoImpl(void);
+    virtual WebRtc_Word32 NumberOfCapabilities(const WebRtc_UWord8* deviceUniqueIdUTF8);
+    virtual WebRtc_Word32 GetCapability(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                        const WebRtc_UWord32 deviceCapabilityNumber,
+                                        VideoCaptureCapability& capability);
+
+    virtual WebRtc_Word32 GetBestMatchedCapability(const WebRtc_UWord8*deviceUniqueIdUTF8,
+                                                   const VideoCaptureCapability& requested,
+                                                   VideoCaptureCapability& resulting);
+    virtual WebRtc_Word32 GetOrientation(const WebRtc_UWord8* deviceUniqueIdUTF8,
+                                         VideoCaptureRotation& orientation);
+
+protected:
+    /* Initialize this object*/
+
+    virtual WebRtc_Word32 Init()=0;
+    /*
+     * Fills the member variable _captureCapabilities with capabilities for the given device name.
+     */
+    virtual WebRtc_Word32 CreateCapabilityMap(const WebRtc_UWord8* deviceUniqueIdUTF8)=0;
+
+    /* Returns the expected Capture delay*/
+    WebRtc_Word32 GetExpectedCaptureDelay(const DelayValues delayValues[],
+                                          const WebRtc_UWord32 sizeOfDelayValues,
+                                          const WebRtc_UWord8* productId,
+                                          const WebRtc_UWord32 width,
+                                          const WebRtc_UWord32 height);
+protected:
+    // Data members
+    WebRtc_Word32 _id;
+    MapWrapper _captureCapabilities;
+    RWLockWrapper& _apiLock;
+    WebRtc_UWord8* _lastUsedDeviceName;
+    WebRtc_UWord32 _lastUsedDeviceNameLength;
+};
+} //namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
diff --git a/trunk/src/modules/video_capture/main/source/video_capture.gypi b/trunk/src/modules/video_capture/main/source/video_capture.gypi
new file mode 100644
index 0000000..de696ae
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture.gypi
@@ -0,0 +1,259 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_capture_module',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+          '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '<(webrtc_root)/common_video/libyuv/include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../interface',
+          '<(webrtc_root)/common_video/libyuv/include',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_capture.h',
+        '../interface/video_capture_defines.h',
+        '../interface/video_capture_factory.h',
+        # headers
+        'video_capture_config.h',
+        'video_capture_delay.h',
+        'video_capture_impl.h',
+        'device_info_impl.h',
+
+        # DEFINE PLATFORM INDEPENDENT SOURCE FILES
+        'video_capture_factory.cc',
+        'video_capture_impl.cc',
+        'device_info_impl.cc',
+      ],
+      'conditions': [
+        ['include_internal_video_capture==0', {
+          'sources': [
+            'External/device_info_external.cc',
+            'External/video_capture_external.cc',
+          ],
+        },{  # include_internal_video_capture == 1
+          'conditions': [
+            # DEFINE PLATFORM SPECIFIC SOURCE FILES
+            ['OS=="linux"', {
+              'include_dirs': [
+                'Linux',
+              ],
+              'sources': [
+                'Linux/device_info_linux.h',
+                'Linux/video_capture_linux.h',
+                'Linux/device_info_linux.cc',
+                'Linux/video_capture_linux.cc',
+              ],
+            }],  # linux
+            ['OS=="mac"', {
+              'sources': [
+                'Mac/QTKit/video_capture_recursive_lock.h',
+                'Mac/QTKit/video_capture_qtkit.h',
+                'Mac/QTKit/video_capture_qtkit_info.h',
+                'Mac/QTKit/video_capture_qtkit_info_objc.h',
+                'Mac/QTKit/video_capture_qtkit_objc.h',
+                'Mac/QTKit/video_capture_qtkit_utility.h',
+                'Mac/video_capture_mac.mm',
+                'Mac/QTKit/video_capture_qtkit.mm',
+                'Mac/QTKit/video_capture_qtkit_objc.mm',
+                'Mac/QTKit/video_capture_recursive_lock.mm',
+                'Mac/QTKit/video_capture_qtkit_info.mm',
+                'Mac/QTKit/video_capture_qtkit_info_objc.mm',
+              ],
+              'include_dirs': [
+                'Mac',
+              ],
+              'link_settings': {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-framework QTKit',
+                  ],
+                },
+              },
+            }],  # mac
+            ['OS=="win"', {
+              'include_dirs': [
+                'Windows',
+                '<(direct_show_base_classes)',
+              ],
+              'defines!': [
+                'NOMINMAX',
+              ],
+              'sources': [
+                'Windows/help_functions_windows.h',
+                'Windows/sink_filter_windows.h',
+                'Windows/video_capture_windows.h',
+                'Windows/device_info_windows.h',
+                'Windows/capture_delay_values_windows.h',
+                'Windows/help_functions_windows.cc',
+                'Windows/sink_filter_windows.cc',
+                'Windows/video_capture_windows.cc',
+                'Windows/device_info_windows.cc',
+                'Windows/video_capture_factory_windows.cc',
+                '<(direct_show_base_classes)amextra.cpp',
+                '<(direct_show_base_classes)amextra.h',
+                '<(direct_show_base_classes)amfilter.cpp',
+                '<(direct_show_base_classes)amfilter.h',
+                '<(direct_show_base_classes)amvideo.cpp',
+                '<(direct_show_base_classes)arithutil.cpp',
+                '<(direct_show_base_classes)cache.h',
+                '<(direct_show_base_classes)checkbmi.h',
+                '<(direct_show_base_classes)combase.cpp',
+                '<(direct_show_base_classes)combase.h',
+                '<(direct_show_base_classes)cprop.cpp',
+                '<(direct_show_base_classes)cprop.h',
+                '<(direct_show_base_classes)ctlutil.cpp',
+                '<(direct_show_base_classes)ctlutil.h',
+                '<(direct_show_base_classes)ddmm.cpp',
+                '<(direct_show_base_classes)ddmm.h',
+                '<(direct_show_base_classes)dllentry.cpp',
+                '<(direct_show_base_classes)dllsetup.cpp',
+                '<(direct_show_base_classes)dllsetup.h',
+                '<(direct_show_base_classes)dxmperf.h',
+                '<(direct_show_base_classes)fourcc.h',
+                '<(direct_show_base_classes)measure.h',
+                '<(direct_show_base_classes)msgthrd.h',
+                '<(direct_show_base_classes)mtype.cpp',
+                '<(direct_show_base_classes)mtype.h',
+                '<(direct_show_base_classes)outputq.cpp',
+                '<(direct_show_base_classes)outputq.h',
+                '<(direct_show_base_classes)perflog.cpp',
+                '<(direct_show_base_classes)perflog.h',
+                '<(direct_show_base_classes)perfstruct.h',
+                '<(direct_show_base_classes)pstream.cpp',
+                '<(direct_show_base_classes)pstream.h',
+                '<(direct_show_base_classes)pullpin.cpp',
+                '<(direct_show_base_classes)pullpin.h',
+                '<(direct_show_base_classes)refclock.cpp',
+                '<(direct_show_base_classes)refclock.h',
+                '<(direct_show_base_classes)reftime.h',
+                '<(direct_show_base_classes)renbase.cpp',
+                '<(direct_show_base_classes)renbase.h',
+                '<(direct_show_base_classes)schedule.cpp',
+                '<(direct_show_base_classes)schedule.h',
+                '<(direct_show_base_classes)seekpt.cpp',
+                '<(direct_show_base_classes)seekpt.h',
+                '<(direct_show_base_classes)source.cpp',
+                '<(direct_show_base_classes)source.h',
+                '<(direct_show_base_classes)streams.h',
+                '<(direct_show_base_classes)strmctl.cpp',
+                '<(direct_show_base_classes)strmctl.h',
+                '<(direct_show_base_classes)sysclock.cpp',
+                '<(direct_show_base_classes)sysclock.h',
+                '<(direct_show_base_classes)transfrm.cpp',
+                '<(direct_show_base_classes)transfrm.h',
+                '<(direct_show_base_classes)transip.cpp',
+                '<(direct_show_base_classes)transip.h',
+                '<(direct_show_base_classes)videoctl.cpp',
+                '<(direct_show_base_classes)videoctl.h',
+                '<(direct_show_base_classes)vtrans.cpp',
+                '<(direct_show_base_classes)vtrans.h',
+                '<(direct_show_base_classes)winctrl.cpp',
+                '<(direct_show_base_classes)winctrl.h',
+                '<(direct_show_base_classes)winutil.cpp',
+                '<(direct_show_base_classes)winutil.h',
+                '<(direct_show_base_classes)wxdebug.cpp',
+                '<(direct_show_base_classes)wxdebug.h',
+                '<(direct_show_base_classes)wxlist.cpp',
+                '<(direct_show_base_classes)wxlist.h',
+                '<(direct_show_base_classes)wxutil.cpp',
+                '<(direct_show_base_classes)wxutil.h',
+              ],
+              'msvs_settings': {
+                'VCLibrarianTool': {
+                  'AdditionalDependencies': 'Strmiids.lib',
+                },
+              },
+            }],  # win
+            ['OS=="android"', {
+              'include_dirs': [
+                'Android',
+              ],
+              'sources': [
+                'Android/device_info_android.cc',
+                'Android/device_info_android.h',
+                'Android/video_capture_android.cc',
+                'Android/video_capture_android.h',
+              ],
+            }],  # android
+          ], # conditions
+        }],  # include_internal_video_capture
+      ], # conditions
+    },
+  ],
+   # Exclude the test targets when building with chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [        
+        {        
+          'target_name': 'video_capture_module_test',
+          'type': 'executable',
+          'dependencies': [
+           'video_capture_module',
+           'webrtc_utility',
+           '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+           '<(webrtc_root)/../testing/gtest.gyp:gtest',
+           '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'include_dirs': [
+            '../interface',
+          ],
+          'sources': [
+            '../test/video_capture_unittest.cc',
+          ],
+          'conditions': [            
+           # DEFINE PLATFORM SPECIFIC INCLUDE AND CFLAGS
+            ['OS=="mac" or OS=="linux"', {
+              'cflags': [
+                '-Wno-write-strings',
+              ],
+              'ldflags': [
+                '-lpthread -lm',
+              ],
+            }],
+            ['OS=="linux"', {
+              'libraries': [
+                '-lrt',
+                '-lXext',
+                '-lX11',
+              ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                # TODO(andrew): CoreAudio and AudioToolbox shouldn't be needed.
+                'OTHER_LDFLAGS': [
+                  '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
+                ],
+              },
+            }],
+          ] # conditions
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_capture/main/source/video_capture_config.h b/trunk/src/modules/video_capture/main/source/video_capture_config.h
new file mode 100644
index 0000000..ab4010c
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture_config.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+enum {kDefaultWidth = 640};  // Start width
+enum {kDefaultHeight = 480}; // Start heigt
+enum {kDefaultFrameRate = 30}; // Start frame rate
+
+enum {kMaxFrameRate =60}; // Max allowed frame rate of the start image 
+
+enum {kDefaultCaptureDelay = 120}; 
+enum {kMaxCaptureDelay = 270}; // Max capture delay allowed in the precompiled capture delay values.  
+
+enum {kProcessInterval = 300}; 
+enum {kFrameRateCallbackInterval = 1000}; 
+enum {kFrameRateCountHistorySize = 90};
+enum {kFrameRateHistoryWindowMs = 2000};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
diff --git a/trunk/src/modules/video_capture/main/source/video_capture_delay.h b/trunk/src/modules/video_capture/main/source/video_capture_delay.h
new file mode 100644
index 0000000..9f5b76e
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture_delay.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+struct DelayValue
+{
+    WebRtc_Word32 width;
+    WebRtc_Word32 height;
+    WebRtc_Word32 delay;
+};
+
+enum { NoOfDelayValues = 40 };
+struct DelayValues
+{
+    char * deviceName;
+    char* productId;
+    DelayValue delayValues[NoOfDelayValues];
+};
+
+} //namespace videocapturemodule
+} //namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
diff --git a/trunk/src/modules/video_capture/main/source/video_capture_factory.cc b/trunk/src/modules/video_capture/main/source/video_capture_factory.cc
new file mode 100644
index 0000000..ac0e641
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture_factory.cc
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_factory.h"
+#include "video_capture_impl.h"
+#ifdef WEBRTC_ANDROID
+#include "video_capture_android.h"
+#endif
+
+namespace webrtc
+{
+
+VideoCaptureModule* VideoCaptureFactory::Create(const WebRtc_Word32 id,
+    const WebRtc_UWord8* deviceUniqueIdUTF8) {
+  return videocapturemodule::VideoCaptureImpl::Create(id, deviceUniqueIdUTF8);
+}
+
+VideoCaptureModule* VideoCaptureFactory::Create(const WebRtc_Word32 id,
+    VideoCaptureExternal*& externalCapture) {
+  return videocapturemodule::VideoCaptureImpl::Create(id, externalCapture);
+}
+
+VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
+    const WebRtc_Word32 id) {
+  return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
+}
+
+#ifdef WEBRTC_ANDROID
+WebRtc_Word32 VideoCaptureFactory::SetAndroidObjects(void* javaVM,
+    void* javaContext) {
+  return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(javaVM,
+      javaContext);
+}
+#endif
+
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/video_capture_impl.cc b/trunk/src/modules/video_capture/main/source/video_capture_impl.cc
new file mode 100644
index 0000000..f976d10
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture_impl.cc
@@ -0,0 +1,501 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_impl.h"
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "critical_section_wrapper.h"
+#include "module_common_types.h"
+#include "ref_count.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "video_capture_config.h"
+
+#ifdef WEBRTC_ANDROID
+#include "video_capture_android.h" // Need inclusion here to set Java environment.
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    VideoCaptureExternal*& externalCapture)
+{
+    RefCountImpl<VideoCaptureImpl>* implementation =
+        new RefCountImpl<VideoCaptureImpl>(id);
+    externalCapture = implementation;
+    return implementation;
+}
+
+const WebRtc_UWord8* VideoCaptureImpl::CurrentDeviceName() const
+{
+    return _deviceUniqueId;
+}
+
+WebRtc_Word32 VideoCaptureImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// returns the number of milliseconds until the module want a worker thread to call Process
+WebRtc_Word32 VideoCaptureImpl::TimeUntilNextProcess()
+{
+    TickTime timeNow = TickTime::Now();
+
+    WebRtc_Word32 timeToNormalProcess = kProcessInterval
+        - (WebRtc_Word32)((TickTime::Now() - _lastProcessTime).Milliseconds());
+    WebRtc_Word32 timeToStartImage = timeToNormalProcess;
+    if (_startImageFrameIntervall)
+    {
+        timeToStartImage = _startImageFrameIntervall
+            - (WebRtc_Word32)((timeNow - _lastSentStartImageTime).Milliseconds());
+        if (timeToStartImage < 0)
+        {
+            timeToStartImage = 0;
+        }
+    }
+    return (timeToStartImage < timeToNormalProcess)
+            ? timeToStartImage : timeToNormalProcess;
+}
+
+// Process any pending tasks such as timeouts
+WebRtc_Word32 VideoCaptureImpl::Process()
+{
+    CriticalSectionScoped cs(_callBackCs);
+
+    const TickTime now = TickTime::Now();
+    _lastProcessTime = TickTime::Now();
+
+    // Handle No picture alarm
+
+    if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() &&
+        _captureAlarm != Raised)
+    {
+        if (_noPictureAlarmCallBack && _captureCallBack)
+        {
+            _captureAlarm = Raised;
+            _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+        }
+    }
+    else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() &&
+             _captureAlarm != Cleared)
+    {
+        if (_noPictureAlarmCallBack && _captureCallBack)
+        {
+            _captureAlarm = Cleared;
+            _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+
+        }
+    }
+
+    // Handle frame rate callback
+    if ((now - _lastFrameRateCallbackTime).Milliseconds()
+        > kFrameRateCallbackInterval)
+    {
+        if (_frameRateCallBack && _captureCallBack)
+        {
+            const WebRtc_UWord32 frameRate = CalculateFrameRate(now);
+            _captureCallBack->OnCaptureFrameRate(_id, frameRate);
+        }
+        _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback
+
+    }
+
+    _lastProcessFrameCount = _incomingFrameTimes[0];
+
+    // Handle start image frame rates.
+    if (_startImageFrameIntervall
+        && (now - _lastSentStartImageTime).Milliseconds() >= _startImageFrameIntervall)
+    {
+        _lastSentStartImageTime = now;
+        if (_dataCallBack)
+        {
+            _captureFrame.CopyFrame(_startImage);
+            _captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+            _dataCallBack->OnIncomingCapturedFrame(_id, _captureFrame,
+                                                   kVideoCodecUnknown);
+        }
+    }
+    return 0;
+}
+
+VideoCaptureImpl::VideoCaptureImpl(const WebRtc_Word32 id)
+    : _id(id), _deviceUniqueId(NULL), _apiCs(*CriticalSectionWrapper::CreateCriticalSection()),
+      _captureDelay(0), _requestedCapability(),
+      _callBackCs(*CriticalSectionWrapper::CreateCriticalSection()),
+      _lastProcessTime(TickTime::Now()),
+      _lastFrameRateCallbackTime(TickTime::Now()), _frameRateCallBack(false),
+      _noPictureAlarmCallBack(false), _captureAlarm(Cleared), _setCaptureDelay(0),
+      _dataCallBack(NULL), _captureCallBack(NULL),
+      _startImage(), _startImageFrameIntervall(0),
+      _lastSentStartImageTime(TickTime::Now()),
+      _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kRotateNone),
+      last_capture_time_(TickTime::MillisecondTimestamp())
+
+{
+    _requestedCapability.width = kDefaultWidth;
+    _requestedCapability.height = kDefaultHeight;
+    _requestedCapability.maxFPS = 30;
+    _requestedCapability.rawType = kVideoI420;
+    _requestedCapability.codecType = kVideoCodecUnknown;
+    memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+}
+
+VideoCaptureImpl::~VideoCaptureImpl()
+{
+    DeRegisterCaptureDataCallback();
+    DeRegisterCaptureCallback();
+    delete &_callBackCs;
+    delete &_apiCs;
+
+    if (_deviceUniqueId)
+        delete[] _deviceUniqueId;
+}
+
+WebRtc_Word32 VideoCaptureImpl::RegisterCaptureDataCallback(
+                                        VideoCaptureDataCallback& dataCallBack)
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _dataCallBack = &dataCallBack;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureDataCallback()
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _dataCallBack = NULL;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack)
+{
+
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _captureCallBack = &callBack;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureCallback()
+{
+
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _captureCallBack = NULL;
+    return 0;
+
+}
+WebRtc_Word32 VideoCaptureImpl::SetCaptureDelay(WebRtc_Word32 delayMS)
+{
+    CriticalSectionScoped cs(_apiCs);
+    _captureDelay = delayMS;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::CaptureDelay()
+{
+    CriticalSectionScoped cs(_apiCs);
+    return _setCaptureDelay;
+}
+
+WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
+    WebRtc_Word32 width, WebRtc_Word32 height, WebRtc_Word64 capture_time,
+    VideoCodecType codec_type) {
+  UpdateFrameCount();// frame count used for local frame rate callback.
+  _startImageFrameIntervall = 0; // prevent the start image to be displayed.
+
+  const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
+  // Capture delay changed
+  if (_setCaptureDelay != _captureDelay) {
+      _setCaptureDelay = _captureDelay;
+  }
+
+  // Set the capture time
+  if (capture_time != 0) {
+      captureFrame.SetRenderTime(capture_time);
+  }
+  else {
+      captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+  }
+
+  if (captureFrame.RenderTimeMs() == last_capture_time_) {
+    // We don't allow the same capture time for two frames, drop this one.
+    return -1;
+  }
+  last_capture_time_ = captureFrame.RenderTimeMs();
+
+  captureFrame.SetHeight(height);
+  captureFrame.SetWidth(width);
+
+  if (_dataCallBack) {
+    if (callOnCaptureDelayChanged) {
+      _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
+    }
+    _dataCallBack->OnIncomingCapturedFrame(_id, captureFrame, codec_type);
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
+    WebRtc_UWord8* videoFrame,
+    WebRtc_Word32 videoFrameLength,
+    const VideoCaptureCapability& frameInfo,
+    WebRtc_Word64 captureTime/*=0*/)
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id,
+               "IncomingFrame width %d, height %d", (int) frameInfo.width,
+               (int) frameInfo.height);
+
+    TickTime startProcessTime = TickTime::Now();
+
+    CriticalSectionScoped cs(_callBackCs);
+
+    const WebRtc_Word32 width = frameInfo.width;
+    const WebRtc_Word32 height = frameInfo.height;
+
+    if (frameInfo.codecType == kVideoCodecUnknown)
+    {
+        // Not encoded, convert to I420.
+        const VideoType commonVideoType =
+                  RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
+
+        if (frameInfo.rawType != kVideoMJPEG &&
+            CalcBufferSize(commonVideoType, width, height) != videoFrameLength)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Wrong incoming frame length.");
+            return -1;
+        }
+
+        // Allocate I420 buffer.
+        int requiredLength = CalcBufferSize(kI420, width, height);
+        _captureFrame.VerifyAndAllocate(requiredLength);
+        if (!_captureFrame.Buffer())
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to allocate frame buffer.");
+            return -1;
+        }
+
+        memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
+        // Keeping stride = width for I420 destination.
+        int dstStride  = width;
+        const int conversionResult = ConvertToI420(commonVideoType,
+                                                   videoFrame,
+                                                   0, 0,  // No cropping
+                                                   width, height,
+                                                   videoFrameLength,
+                                                   width, height, dstStride,
+                                                   _rotateFrame,
+                                                   _captureFrame.Buffer());
+        if (conversionResult < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to convert capture frame from type %d to I420",
+                       frameInfo.rawType);
+            return -1;
+        }
+        _captureFrame.SetLength(requiredLength);
+    }
+    else // Encoded format
+    {
+        if (_captureFrame.CopyFrame(videoFrameLength, videoFrame) != 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to copy captured frame of length %d", (int) videoFrameLength);
+        }
+    }
+
+    DeliverCapturedFrame(_captureFrame, width, height, captureTime, frameInfo.codecType);
+
+
+    const WebRtc_UWord32 processTime =
+        (WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds();
+    if (processTime > 10) // If the process time is too long MJPG will not work well.
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                   "Too long processing time of Incoming frame: %ums",
+                   (unsigned int) processTime);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420(
+    const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) {
+
+  CriticalSectionScoped cs(_callBackCs);
+
+  // Allocate I420 buffer
+  int frame_size = CalcBufferSize(kI420,
+                                  video_frame.width,
+                                  video_frame.height);
+  _captureFrame.VerifyAndAllocate(frame_size);
+  if (!_captureFrame.Buffer()) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+               "Failed to allocate frame buffer.");
+    return -1;
+  }
+
+  // Copy planes to the _captureFrame
+  int y_width = video_frame.width;
+  int uv_width = video_frame.width / 2;
+  int y_rows = video_frame.height;
+  int uv_rows = video_frame.height / 2;  // I420
+  unsigned char* current_pointer = _captureFrame.Buffer();
+  unsigned char* y_plane = video_frame.y_plane;
+  unsigned char* u_plane = video_frame.u_plane;
+  unsigned char* v_plane = video_frame.v_plane;
+  // Copy Y
+  for (int i = 0; i < y_rows; ++i) {
+    memcpy(current_pointer, y_plane, y_width);
+    current_pointer += video_frame.y_pitch;
+    y_plane += video_frame.y_pitch;
+  }
+  // Copy U
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, u_plane, uv_width);
+    current_pointer += video_frame.u_pitch;
+    u_plane += video_frame.u_pitch;
+  }
+  // Copy V
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, v_plane, uv_width);
+    current_pointer += video_frame.v_pitch;
+    v_plane += video_frame.v_pitch;
+  }
+  _captureFrame.SetLength(frame_size);
+
+  DeliverCapturedFrame(_captureFrame,
+                       video_frame.width,
+                       video_frame.height,
+                       captureTime,
+                       kVideoCodecUnknown);
+
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation)
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    switch (rotation)
+    {
+        case kCameraRotate0:
+            _rotateFrame = kRotateNone;
+            break;
+        case kCameraRotate90:
+            _rotateFrame = kRotate90;
+            break;
+        case kCameraRotate180:
+            _rotateFrame = kRotate180;
+            break;
+        case kCameraRotate270:
+            _rotateFrame = kRotate270;
+            break;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::StartSendImage(const VideoFrame& videoFrame,
+                                                     WebRtc_Word32 frameRate)
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    if (frameRate < 1 || frameRate > kMaxFrameRate)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "StartSendImage Invalid parameter. frameRate %d", (int) frameRate);
+        return -1;;
+    }
+    _startImage.CopyFrame(videoFrame);
+    _startImageFrameIntervall = 1000 / frameRate;
+    _lastSentStartImageTime = TickTime::Now();
+    return 0;
+
+}
+WebRtc_Word32 VideoCaptureImpl::StopSendImage()
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _startImageFrameIntervall = 0;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _frameRateCallBack = enable;
+    if (enable)
+    {
+        _lastFrameRateCallbackTime = TickTime::Now();
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::EnableNoPictureAlarm(const bool enable)
+{
+    CriticalSectionScoped cs(_apiCs);
+    CriticalSectionScoped cs2(_callBackCs);
+    _noPictureAlarmCallBack = enable;
+    return 0;
+}
+
+void VideoCaptureImpl::UpdateFrameCount()
+{
+    if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0)
+    {
+        // first no shift
+    }
+    else
+    {
+        // shift
+        for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
+        {
+            _incomingFrameTimes[i + 1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = TickTime::Now();
+}
+
+WebRtc_UWord32 VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
+{
+    WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num].Ticks() <= 0
+            || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec
+        {
+            break;
+        }
+        else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        WebRtc_Word64 diff = (now - _incomingFrameTimes[num - 1]).Milliseconds();
+        if (diff > 0)
+        {
+            return WebRtc_UWord32((nrOfFrames * 1000.0f / diff) + 0.5f);
+        }
+    }
+
+    return nrOfFrames;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/trunk/src/modules/video_capture/main/source/video_capture_impl.h b/trunk/src/modules/video_capture/main/source/video_capture_impl.h
new file mode 100644
index 0000000..eae3f39
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/source/video_capture_impl.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+
+/*
+ * video_capture_impl.h
+ */
+
+#include "video_capture.h"
+#include "video_capture_config.h"
+#include "tick_util.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+namespace videocapturemodule {
+// Class definitions
+class VideoCaptureImpl: public VideoCaptureModule, public VideoCaptureExternal
+{
+public:
+
+    /*
+     *   Create a video capture module object
+     *
+     *   id              - unique identifier of this video capture module object
+     *   deviceUniqueIdUTF8 -  name of the device. Available names can be found by using GetDeviceName
+     */
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    /*
+     *   Create a video capture module object used for external capture.
+     *
+     *   id              - unique identifier of this video capture module object
+     *   externalCapture - [out] interface to call when a new frame is captured.
+     */
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      VideoCaptureExternal*& externalCapture);
+
+    static DeviceInfo* CreateDeviceInfo(const WebRtc_Word32 id);
+
+    // Implements Module declared functions.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    //Call backs
+    virtual WebRtc_Word32 RegisterCaptureDataCallback(VideoCaptureDataCallback& dataCallback);
+    virtual WebRtc_Word32 DeRegisterCaptureDataCallback();
+    virtual WebRtc_Word32 RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
+    virtual WebRtc_Word32 DeRegisterCaptureCallback();
+
+    virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
+                                         WebRtc_Word32 frameRate = 1);
+    virtual WebRtc_Word32 StopSendImage();
+
+    virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS);
+    virtual WebRtc_Word32 CaptureDelay();
+    virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
+
+    virtual WebRtc_Word32 EnableFrameRateCallback(const bool enable);
+    virtual WebRtc_Word32 EnableNoPictureAlarm(const bool enable);
+
+    virtual const WebRtc_UWord8* CurrentDeviceName() const;
+
+    // Module handling
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // Implement VideoCaptureExternal
+    virtual WebRtc_Word32 IncomingFrame(WebRtc_UWord8* videoFrame,
+                                        WebRtc_Word32 videoFrameLength,
+                                        const VideoCaptureCapability& frameInfo,
+                                        WebRtc_Word64 captureTime = 0);
+    virtual WebRtc_Word32 IncomingFrameI420(
+        const VideoFrameI420& video_frame,
+        WebRtc_Word64 captureTime = 0);
+
+    // Platform dependent
+    virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability)
+    {
+        _requestedCapability = capability;
+        return -1;
+    }
+    virtual WebRtc_Word32 StopCapture()   { return -1; }
+    virtual bool CaptureStarted() {return false; }
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& /*settings*/)
+    { return -1; }
+    VideoCaptureEncodeInterface* GetEncodeInterface(const VideoCodec& /*codec*/)
+    { return NULL; }
+
+protected:
+    VideoCaptureImpl(const WebRtc_Word32 id);
+    virtual ~VideoCaptureImpl();
+    WebRtc_Word32 DeliverCapturedFrame(
+        VideoFrame& captureFrame, WebRtc_Word32 width, WebRtc_Word32 height,
+        WebRtc_Word64 capture_time, VideoCodecType codec_type);
+
+    WebRtc_Word32 _id; // Module ID
+    WebRtc_UWord8* _deviceUniqueId; // current Device unique name;
+    CriticalSectionWrapper& _apiCs;
+    WebRtc_Word32 _captureDelay; // Current capture delay. May be changed of platform dependent parts.
+    VideoCaptureCapability _requestedCapability; // Should be set by platform dependent code in StartCapture.
+private:
+    void UpdateFrameCount();
+    WebRtc_UWord32 CalculateFrameRate(const TickTime& now);
+
+    CriticalSectionWrapper& _callBackCs;
+
+    TickTime _lastProcessTime; // last time the module process function was called.
+    TickTime _lastFrameRateCallbackTime; // last time the frame rate callback function was called.
+    bool _frameRateCallBack; // true if EnableFrameRateCallback
+    bool _noPictureAlarmCallBack; //true if EnableNoPictureAlarm
+    VideoCaptureAlarm _captureAlarm; // current value of the noPictureAlarm
+
+    WebRtc_Word32 _setCaptureDelay; // The currently used capture delay
+    VideoCaptureDataCallback* _dataCallBack;
+    VideoCaptureFeedBack* _captureCallBack;
+
+    VideoFrame _startImage;
+    WebRtc_Word32 _startImageFrameIntervall;
+    TickTime _lastSentStartImageTime; // last time the start image was sent
+    TickTime _lastProcessFrameCount;
+    TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
+    VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module.
+
+    VideoFrame _captureFrame;
+
+    // Used to make sure incoming timestamp is increasing for every frame.
+    WebRtc_Word64 last_capture_time_;
+};
+} // namespace videocapturemodule
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
diff --git a/trunk/src/modules/video_capture/main/test/android/.classpath b/trunk/src/modules/video_capture/main/test/android/.classpath
new file mode 100644
index 0000000..841ac51
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="renderer"/>

+	<classpathentry kind="src" path="java"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/trunk/src/modules/video_capture/main/test/android/AndroidManifest.xml b/trunk/src/modules/video_capture/main/test/android/AndroidManifest.xml
new file mode 100644
index 0000000..1a4ec9b
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/AndroidManifest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  package="org.webrtc.capturemoduleandroidtest"
+	  android:versionCode="1"
+	  android:versionName="1.0">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+        <activity android:label="@string/app_name"
+		  android:name="VideoCaptureModuleTest"
+		  android:configChanges="orientation|keyboardHidden"
+		  android:launchMode="singleTask"
+		  android:multiprocess="false">
+          <intent-filter>
+            <action android:name="android.intent.action.MAIN" />
+            <category android:name="android.intent.category.LAUNCHER" />
+          </intent-filter>
+        </activity>
+  </application>
+
+  <uses-feature android:required="true"
+		android:name="android.hardware.camera">
+  </uses-feature>
+
+  <uses-permission android:name="android.permission.CAMERA">
+  </uses-permission>
+  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+  <uses-sdk android:targetSdkVersion="7"
+	    android:minSdkVersion="7">
+  </uses-sdk>
+</manifest> 
diff --git a/trunk/src/modules/video_capture/main/test/android/default.properties b/trunk/src/modules/video_capture/main/test/android/default.properties
new file mode 100644
index 0000000..2ad44a4
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-9

diff --git a/trunk/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java b/trunk/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java
new file mode 100644
index 0000000..94bf93b
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java
@@ -0,0 +1,33 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.capturemoduleandroidtest;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050000;

+        public static final int Button02=0x7f050001;

+        public static final int Button03=0x7f050002;

+        public static final int Button04=0x7f050003;

+        public static final int renderView=0x7f050004;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040001;

+        public static final int run_button2=0x7f040002;

+        public static final int run_button3=0x7f040003;

+        public static final int run_button4=0x7f040004;

+    }

+}

diff --git a/trunk/src/modules/video_capture/main/test/android/jni/Android.mk b/trunk/src/modules/video_capture/main/test/android/jni/Android.mk
new file mode 100644
index 0000000..7eba756
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/jni/Android.mk
@@ -0,0 +1,19 @@
+# Android makefile for VideoCapture Module

+

+LOCAL_PATH := $(call my-dir)

+

+WEBRTC_INTERFACES_PATH := $(LOCAL_PATH)/../../../../../../../build/interface

+WEBRTC_LIBS_PATH := $(LOCAL_PATH)/../../../../../../../build/libraries

+

+include $(CLEAR_VARS)

+

+LOCAL_MODULE     := VideoCaptureModuleAndroidTestJniAPI

+LOCAL_SRC_FILES  := video_capture_module_android_test_jni.cc

+LOCAL_CFLAGS     := -DWEBRTC_TARGET_PC # For typedefs.h

+LOCAL_C_INCLUDES := $(WEBRTC_INTERFACES_PATH)

+LOCAL_LDLIBS     := \

+    $(WEBRTC_LIBS_PATH)/testVideoCaptureAPI_android_gcc.a \

+    $(WEBRTC_LIBS_PATH)/VideoCaptureModuleTestApiLib_android_gcc.a \

+    -llog -lgcc -lGLESv2

+include $(BUILD_SHARED_LIBRARY)

+

diff --git a/trunk/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h b/trunk/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h
new file mode 100644
index 0000000..0320df0
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest */
+
+#ifndef _Included_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+#define _Included_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+ * Method:    RunTest
+ * Signature: (Landroid/content/Context;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RunTest
+  (JNIEnv *, jobject, jobject);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RenderInit
+(JNIEnv * env, jobject context,jobject surface);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StartCapture
+(JNIEnv *, jobject);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StopCapture
+(JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/trunk/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc b/trunk/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc
new file mode 100644
index 0000000..ac2f6e1
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h> // memset
+#include <android/log.h>
+
+#include "org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h"
+#include "../../../interface/video_capture_factory.h"
+#include "../../../../../video_render/main/interface/video_render.h"
+#include "../../testAPI/testPlatformDependent.h"
+#include "../../testAPI/testPlatformDependent.h"
+#ifdef RENDER_PREVIEW
+#include "../../testAPI/Renderer.h"
+#endif
+
+using namespace webrtc;
+#define WEBRTC_LOG_TAG "*WEBRTCN*" // As in WEBRTC Native...
+// ADM data struct
+typedef struct
+{
+    // Other
+    JavaVM* jvm;
+    Renderer* renderer;
+    VideoCaptureModule* _videoCapture;
+    VideoCaptureModule::DeviceInfo*_captureInfo;
+} JniData;
+
+// Global variables visible in this file
+static JniData jniData;
+
+//////////////////////////////////////////////////////////////////
+// General functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// JNI_OnLoad
+//
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "JNI_OnLoad");
+  if (!vm)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  // Get JNI
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env), JNI_VERSION_1_4))
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init JniData data
+  memset(&jniData, 0, sizeof(jniData));
+
+  // Store the JVM
+  jniData.jvm = vm;
+
+  return JNI_VERSION_1_4;
+}
+
+/////////////////////////////////////////////
+// Run Test
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RunTest(
+    JNIEnv * env,
+    jobject context,
+    jobject surface)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Run test");
+  // Set instance independent Java objects
+  VideoCaptureModule::SetAndroidObjects(jniData.jvm, context);
+
+  // Start test
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "Create testPlatformDependent");
+  testPlatformDependent testPlatformDependent;
+  testPlatformDependent.SetRenderer(jniData.renderer);
+  testPlatformDependent.DoTest();
+
+  // Clear instance independent Java objects
+  VideoCaptureModule::SetAndroidObjects(NULL, NULL);
+
+  return 0;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RenderInit(
+    JNIEnv * env,
+    jobject context,
+    jobject surface)
+{
+  VideoRender::SetAndroidObjects(jniData.jvm);
+#ifdef RENDER_PREVIEW
+  Renderer::SetRenderWindow(surface);
+  jniData.renderer=new Renderer(true);
+#endif
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StartCapture(
+    JNIEnv * env,
+    jobject context)
+{
+  if (!jniData._captureInfo) {
+    VideoCaptureModule::SetAndroidObjects(jniData.jvm, context);
+    jniData._captureInfo = VideoCaptureFactory::CreateDeviceInfo(5);
+    WebRtc_UWord8 id[256];
+    WebRtc_UWord8 name[256];
+    jniData._captureInfo->GetDeviceName(0, name, 256, id, 256);
+    jniData._videoCapture = VideoCaptureFactory::Create(0, id);
+    VideoCaptureCapability capability;
+
+    jniData._captureInfo->GetCapability(id, 0, capability);
+    capability.width = 176;
+    capability.height = 144;
+    capability.maxFPS = 15;
+
+    jniData._videoCapture->StartCapture(capability);
+  }
+  return 0;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StopCapture(
+    JNIEnv * env,
+    jobject context)
+{
+  if (jniData._videoCapture) {
+    jniData._videoCapture->StopCapture();
+    delete jniData._captureInfo;
+    VideoCaptureModule::Destroy(jniData._videoCapture);
+    jniData._videoCapture = NULL;
+    jniData._captureInfo = NULL;
+  }
+  return 0;
+}
diff --git a/trunk/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png b/trunk/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png
new file mode 100644
index 0000000..8074c4c
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png
Binary files differ
diff --git a/trunk/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png b/trunk/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png
new file mode 100644
index 0000000..1095584
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png
Binary files differ
diff --git a/trunk/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png b/trunk/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png
Binary files differ
diff --git a/trunk/src/modules/video_capture/main/test/android/res/layout/main.xml b/trunk/src/modules/video_capture/main/test/android/res/layout/main.xml
new file mode 100644
index 0000000..3642733
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/res/layout/main.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+  <Button android:text="@string/run_button"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button2"
+	  android:id="@+id/Button02"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button3"
+	  android:id="@+id/Button03"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button4"
+	  android:id="@+id/Button04"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <LinearLayout 
+     android:id="@+id/renderView"
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent"
+     android:layout_weight="1">
+  </LinearLayout>
+</LinearLayout>
diff --git a/trunk/src/modules/video_capture/main/test/android/res/values/strings.xml b/trunk/src/modules/video_capture/main/test/android/res/values/strings.xml
new file mode 100644
index 0000000..bd9a752
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/res/values/strings.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+    <string name="app_name">VideoCaptureModuleAndroidTest</string>
+<string name="run_button">Run Test</string>
+
+<string name="run_button2">Run Java test</string>
+<string name="run_button3">Start c++ Capture</string>
+<string name="run_button4">Stop c++ Capture</string>
+
+</resources>
diff --git a/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java b/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java
new file mode 100644
index 0000000..7a92c48
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java
@@ -0,0 +1,61 @@
+package org.webrtc.capturemoduleandroidtest;

+

+import java.util.List;

+

+import android.content.Context;

+import android.util.Log;

+

+import org.webrtc.videoengine.CaptureCapabilityAndroid;

+import org.webrtc.videoengine.VideoCaptureAndroid;

+import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid;

+

+public class VideoCaptureJavaTest {

+  void DoTest(Context context)

+  {

+    VideoCaptureDeviceInfoAndroid videoCaptureDeviceInfo =

+        VideoCaptureDeviceInfoAndroid.CreateVideoCaptureDeviceInfoAndroid(

+            5,context);

+    for(int i = 0; i < videoCaptureDeviceInfo.NumberOfDevices(); i++) {

+      String deviceUniqueId=videoCaptureDeviceInfo.GetDeviceUniqueName(i);

+      VideoCaptureAndroid videoCapture =

+          videoCaptureDeviceInfo.AllocateCamera(i,0,deviceUniqueId);

+

+      CaptureCapabilityAndroid capArray[] =

+          videoCaptureDeviceInfo.GetCapabilityArray(deviceUniqueId);

+      for(CaptureCapabilityAndroid cap: capArray) {

+        Log.d("*WEBRTC*", "Capability widht" + cap.width +

+              " height " +cap.height+ " frameRate " +cap.maxFPS);

+        int result=videoCapture.StartCapture(cap.width,

+                                             cap.height,

+                                             cap.maxFPS);

+        try{

+          Thread.sleep(2000);//sleep for 2000 ms

+        }

+        catch(InterruptedException ie){

+          //If this thread was interrupted by another thread

+        }

+        result+=videoCapture.StopCapture();

+        Log.d("*WEBRTC*", "Start stop result " + result);

+      }

+      VideoCaptureAndroid.DeleteVideoCaptureAndroid(videoCapture);

+      videoCapture=null;

+    }

+    Log.d("*WEBRTC*", "Test complete");

+  }

+

+  VideoCaptureDeviceInfoAndroid _videoCaptureDeviceInfo;

+  VideoCaptureAndroid _videoCapture;

+  void StartCapture(Context context) {

+    _videoCaptureDeviceInfo =

+        VideoCaptureDeviceInfoAndroid.CreateVideoCaptureDeviceInfoAndroid(

+            5,context);

+    String deviceUniqueId=_videoCaptureDeviceInfo.GetDeviceUniqueName(0);

+    _videoCapture=_videoCaptureDeviceInfo.AllocateCamera(5,0,deviceUniqueId);

+    _videoCapture.StartCapture(176,144,15);

+  }

+  void StopCapture() {

+    _videoCapture.StopCapture();

+    VideoCaptureAndroid.DeleteVideoCaptureAndroid(_videoCapture);

+ }

+

+}

diff --git a/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java b/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java
new file mode 100644
index 0000000..7191b1c
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java
@@ -0,0 +1,142 @@
+package org.webrtc.capturemoduleandroidtest;

+

+import javax.microedition.khronos.egl.EGLConfig;

+import javax.microedition.khronos.opengles.GL10;

+

+import org.webrtc.capturemoduleandroidtest.R;

+import org.webrtc.videoengine.ViERenderer;

+

+import android.app.Activity;

+import android.opengl.GLSurfaceView;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.SurfaceHolder;

+import android.view.SurfaceView;

+import android.view.View;

+import android.view.View.OnClickListener;

+import android.widget.Button;

+import android.widget.LinearLayout;

+

+public class VideoCaptureModuleTest

+    extends Activity implements OnClickListener {

+  // Set to 1 if OpenGL shall be used. 0 Otherwise

+  private final int _useOpenGL=1;

+  private Thread _testThread;

+  private SurfaceView _view=null;

+  private VideoCaptureModuleTest _thisPointer;

+  private VideoCaptureJavaTest _videoCaptureJavaTest;

+  /** Called when the activity is first created. */

+  @Override

+  public void onCreate(Bundle savedInstanceState) {

+    super.onCreate(savedInstanceState);

+    setContentView(R.layout.main);

+

+    final Button buttonStartCP = (Button) findViewById(R.id.Button01);

+    buttonStartCP.setOnClickListener(this);

+    final Button buttonStartJava = (Button) findViewById(R.id.Button02);

+    buttonStartJava.setOnClickListener(this);

+    final Button buttonStartCPP = (Button) findViewById(R.id.Button03);

+    buttonStartCPP.setOnClickListener(this);

+    final Button buttonStopCPP = (Button) findViewById(R.id.Button04);

+    buttonStopCPP.setOnClickListener(this);

+  }

+

+  private Runnable _testProc = new Runnable() {

+      public void run() {

+        // TODO: choose test from GUI

+        // Select test here, 0 for API test, 1-> for Func tests

+        RunTest(_view);

+      }

+    };

+

+  @Override

+  protected void onStart()

+  {

+    super.onStart();

+  }

+  @Override

+  protected void onRestart()

+  {

+    super.onRestart();

+  }

+  @Override

+  protected void onPause()

+  {

+    super.onPause();

+  }

+  @Override

+  protected void onStop()

+  {

+    super.onStop();

+  }

+

+  // Function used to call test

+  private native int RunTest(Object view);

+  private native int RenderInit(Object view);

+

+  private native int StartCapture();

+  private native int StopCapture();

+

+  static {

+    Log.d("*WEBRTC*",

+          "Loading ModuleVideoCaptureModuleAndroidTest...");

+    System.loadLibrary(

+        "ModuleVideoCaptureModuleAndroidTestJniAPI");

+  }

+

+  public void onClick(View v) {

+    //get the handle to the layout

+    LinearLayout renderLayout=(LinearLayout) findViewById(R.id.renderView);

+    switch(v.getId())

+    {

+      case R.id.Button01:

+        renderLayout.removeAllViews();

+        _view=ViERenderer.CreateLocalRenderer(this);

+        if(_useOpenGL==1)

+        {

+          _view= ViERenderer.CreateRenderer(this, true);

+        }

+        else

+        {

+          _view= new SurfaceView(this);

+        }

+        // add the surfaceview to the layout,

+        // the surfaceview will be the same size as the layout (container)

+        renderLayout.addView(_view);

+        RenderInit(_view);

+        _testThread = new Thread(_testProc);

+        _testThread.start();

+        break;

+      case R.id.Button02:

+        _view=ViERenderer.CreateLocalRenderer(this);

+        renderLayout.removeAllViews();

+        if(_videoCaptureJavaTest==null)

+        {

+          _videoCaptureJavaTest=new VideoCaptureJavaTest();

+          _videoCaptureJavaTest.StartCapture(this);

+          // add the surfaceview to the layout,

+          // the surfaceview will be the same size as the layout (container)

+          renderLayout.addView(_view);

+        }

+        else

+        {

+          _videoCaptureJavaTest.StopCapture();

+          _videoCaptureJavaTest=null;

+        }

+        break;

+

+      case R.id.Button03:

+        _view=ViERenderer.CreateLocalRenderer(this);

+        renderLayout.removeAllViews();

+        StartCapture();

+        // add the surfaceview to the layout,

+        // the surfaceview will be the same size as the layout (container)

+        renderLayout.addView(_view);

+        break;

+      case R.id.Button04:

+        renderLayout.removeAllViews();

+        StopCapture();

+        break;

+    }

+  }

+}
\ No newline at end of file
diff --git a/trunk/src/modules/video_capture/main/test/video_capture_unittest.cc b/trunk/src/modules/video_capture/main/test/video_capture_unittest.cc
new file mode 100644
index 0000000..ac49601
--- /dev/null
+++ b/trunk/src/modules/video_capture/main/test/video_capture_unittest.cc
@@ -0,0 +1,455 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "gtest/gtest.h"
+#include "process_thread.h"
+#include "scoped_ptr.h"
+#include "scoped_refptr.h"
+#include "tick_util.h"
+#include "video_capture.h"
+#include "video_capture_factory.h"
+
+using webrtc::TickTime;
+using webrtc::VideoCaptureAlarm;
+using webrtc::VideoCaptureCapability;
+using webrtc::VideoCaptureDataCallback;
+using webrtc::VideoCaptureFactory;
+using webrtc::VideoCaptureFeedBack;
+using webrtc::VideoCaptureModule;
+
+#if defined(_WIN32)
+#define SLEEP(x) Sleep(x)
+#elif defined(WEBRTC_ANDROID)
+#define SLEEP(x) usleep(x*1000)
+#else
+#include <unistd.h>
+#define SLEEP(x) usleep(x * 1000)
+#endif
+
+#define WAIT_(ex, timeout, res) \
+  do { \
+    res = (ex); \
+    WebRtc_Word64 start = TickTime::MillisecondTimestamp(); \
+    while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
+      SLEEP(5); \
+      res = (ex); \
+    } \
+  } while (0);\
+
+#define EXPECT_TRUE_WAIT(ex, timeout) \
+  do { \
+    bool res; \
+    WAIT_(ex, timeout, res); \
+    if (!res) EXPECT_TRUE(ex); \
+  } while (0);
+
+
+static const int kTimeOut = 5000;
+static const int kTestHeight = 288;
+static const int kTestWidth = 352;
+static const int kTestFramerate = 30;
+
+// Compares the content of two video frames.
+static bool CompareFrames(const webrtc::VideoFrame& frame1,
+                          const webrtc::VideoFrame& frame2) {
+  bool result =
+      (frame1.Length() == frame2.Length()) &&
+      (frame1.Width() == frame2.Width()) &&
+      (frame1.Height() == frame2.Height());
+
+  for (unsigned int i = 0; i < frame1.Length() && result; ++i)
+    result = (*(frame1.Buffer()+i) == *(frame2.Buffer()+i));
+  return result;
+}
+
+// Compares the content of a I420 frame in planar form and video frame.
+static bool CompareFrames(const webrtc::VideoFrameI420& frame1,
+                          const webrtc::VideoFrame& frame2) {
+  if (frame1.width != frame2.Width() ||
+      frame1.height != frame2.Height()) {
+      return false;
+  }
+
+  // Compare Y
+  unsigned char* y_plane = frame1.y_plane;
+  for (unsigned int i = 0; i < frame2.Height(); ++i) {
+    for (unsigned int j = 0; j < frame2.Width(); ++j) {
+      if (*y_plane != *(frame2.Buffer()+i*frame2.Width() +j))
+        return false;
+      ++y_plane;
+    }
+    y_plane += frame1.y_pitch - frame1.width;
+  }
+
+  // Compare U
+  unsigned char* u_plane = frame1.u_plane;
+  for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
+    for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
+      if (*u_plane !=*(
+          frame2.Buffer()+frame2.Width() * frame2.Height() +
+          i*frame2.Width() / 2 + j)) {
+        return false;
+      }
+      ++u_plane;
+    }
+    u_plane += frame1.u_pitch - frame1.width / 2;
+  }
+
+  // Compare V
+  unsigned char* v_plane = frame1.v_plane;
+  for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
+    for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
+      if (*v_plane != *(
+          frame2.Buffer()+frame2.Width() * frame2.Height()* 5 / 4 +
+          i*frame2.Width() / 2 + j)) {
+        return false;
+      }
+      ++v_plane;
+    }
+    v_plane += frame1.v_pitch - frame1.width / 2;
+  }
+  return true;
+}
+
+class TestVideoCaptureCallback : public VideoCaptureDataCallback {
+ public:
+  TestVideoCaptureCallback()
+    : capture_delay(0),
+      last_render_time_ms(0),
+      incoming_frames(0),
+      timing_warnings(0) {
+  }
+
+  ~TestVideoCaptureCallback() {
+    if (timing_warnings > 0)
+      printf("No of timing warnings %d\n", timing_warnings);
+  }
+
+  virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                       webrtc::VideoFrame& videoFrame,
+                                       webrtc::VideoCodecType codecType) {
+    int height = static_cast<int>(videoFrame.Height());
+    int width = static_cast<int>(videoFrame.Width());
+    EXPECT_EQ(height, capability.height);
+    EXPECT_EQ(width, capability.width);
+    // RenderTimstamp should be the time now.
+    EXPECT_TRUE(
+        videoFrame.RenderTimeMs() >= TickTime::MillisecondTimestamp()-30 &&
+        videoFrame.RenderTimeMs() <= TickTime::MillisecondTimestamp());
+
+    if ((videoFrame.RenderTimeMs() >
+            last_render_time_ms + (1000 * 1.1) / capability.maxFPS &&
+            last_render_time_ms > 0) ||
+        (videoFrame.RenderTimeMs() <
+            last_render_time_ms + (1000 * 0.9) / capability.maxFPS &&
+            last_render_time_ms > 0)) {
+      timing_warnings++;
+    }
+
+    incoming_frames++;
+    last_render_time_ms = videoFrame.RenderTimeMs();
+    last_frame.CopyFrame(videoFrame);
+  }
+
+  virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                     const WebRtc_Word32 delay) {
+    capture_delay = delay;
+  }
+
+  VideoCaptureCapability capability;
+  int capture_delay;
+  WebRtc_Word64 last_render_time_ms;
+  int incoming_frames;
+  int timing_warnings;
+  webrtc::VideoFrame last_frame;
+};
+
+class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
+ public:
+  TestVideoCaptureFeedBack() : frame_rate(0), alarm(webrtc::Cleared) {}
+
+  virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 frameRate) {
+    frame_rate = frameRate;
+  }
+
+  virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                const VideoCaptureAlarm reported_alarm) {
+    alarm = reported_alarm;
+  }
+  unsigned int frame_rate;
+  VideoCaptureAlarm alarm;
+};
+
+class VideoCaptureTest : public testing::Test {
+ public:
+  VideoCaptureTest() : number_of_devices_(0) {}
+
+  void SetUp() {
+    device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(5));
+    number_of_devices_ = device_info_->NumberOfDevices();
+    ASSERT_GT(number_of_devices_, 0u);
+  }
+
+  webrtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice(
+      unsigned int device,
+      VideoCaptureDataCallback* callback) {
+    WebRtc_UWord8 device_name[256];
+    WebRtc_UWord8 unique_name[256];
+
+    EXPECT_EQ(0, device_info_->GetDeviceName(
+        device, device_name, 256, unique_name, 256));
+
+    webrtc::scoped_refptr<VideoCaptureModule> module(
+        VideoCaptureFactory::Create(device, unique_name));
+    if (module.get() == NULL)
+      return NULL;
+
+    EXPECT_FALSE(module->CaptureStarted());
+
+    EXPECT_EQ(0, module->RegisterCaptureDataCallback(*callback));
+    return module;
+  }
+
+  void StartCapture(VideoCaptureModule* capture_module,
+                    VideoCaptureCapability capability) {
+    EXPECT_EQ(0, capture_module->StartCapture(capability));
+    EXPECT_TRUE(capture_module->CaptureStarted());
+
+    VideoCaptureCapability resulting_capability;
+    EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability));
+    EXPECT_EQ(capability, resulting_capability);
+  }
+
+  webrtc::scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_;
+  unsigned int number_of_devices_;
+};
+
+TEST_F(VideoCaptureTest, CreateDelete) {
+  for (int i = 0; i < 5; ++i) {
+    WebRtc_Word64 start_time = TickTime::MillisecondTimestamp();
+    TestVideoCaptureCallback capture_observer;
+    webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
+        0, &capture_observer));
+    ASSERT_TRUE(module.get() != NULL);
+
+#ifndef WEBRTC_MAC
+    device_info_->GetCapability(module->CurrentDeviceName(), 0,
+                                capture_observer.capability);
+#else
+    capture_observer.capability.width = kTestWidth;
+    capture_observer.capability.height = kTestHeight;
+    capture_observer.capability.maxFPS = kTestFramerate;
+    capture_observer.capability.rawType = webrtc::kVideoUnknown;
+#endif
+
+    StartCapture(module.get(), capture_observer.capability);
+
+    // Less than 4s to start the camera.
+    EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
+
+    // Make sure 5 frames are captured.
+    EXPECT_TRUE_WAIT(capture_observer.incoming_frames >= 5, kTimeOut);
+
+    EXPECT_GT(capture_observer.capture_delay, 0);
+
+    WebRtc_Word64 stop_time = TickTime::MillisecondTimestamp();
+    EXPECT_EQ(0, module->StopCapture());
+    EXPECT_FALSE(module->CaptureStarted());
+
+    // Less than 3s to stop the camera.
+    EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
+  }
+}
+
+TEST_F(VideoCaptureTest, Capabilities) {
+#ifdef WEBRTC_MAC
+  printf("Video capture capabilities are not supported on Mac.\n");
+  return;
+#endif
+
+  TestVideoCaptureCallback capture_observer;
+
+  webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
+          0, &capture_observer));
+  ASSERT_TRUE(module.get() != NULL);
+
+  int number_of_capabilities = device_info_->NumberOfCapabilities(
+      module->CurrentDeviceName());
+  EXPECT_GT(number_of_capabilities, 0);
+  for (int i = 0; i < number_of_capabilities; ++i) {
+    device_info_->GetCapability(module->CurrentDeviceName(), i,
+                                capture_observer.capability);
+    StartCapture(module.get(), capture_observer.capability);
+    // Make sure 5 frames are captured.
+    EXPECT_TRUE_WAIT(capture_observer.incoming_frames >= 5, kTimeOut);
+    capture_observer.incoming_frames = 0;
+    EXPECT_EQ(0, module->StopCapture());
+  }
+}
+
+TEST_F(VideoCaptureTest, TestTwoCameras) {
+  if (number_of_devices_ < 2) {
+    printf("There are not two cameras available. Aborting test. \n");
+    return;
+  }
+
+  TestVideoCaptureCallback capture_observer1;
+  webrtc::scoped_refptr<VideoCaptureModule> module1(OpenVideoCaptureDevice(
+          0, &capture_observer1));
+  ASSERT_TRUE(module1.get() != NULL);
+
+#ifndef WEBRTC_MAC
+  device_info_->GetCapability(module1->CurrentDeviceName(), 0,
+                              capture_observer1.capability);
+#else
+  capture_observer1.capability.width = kTestWidth;
+  capture_observer1.capability.height = kTestHeight;
+  capture_observer1.capability.maxFPS = kTestFramerate;
+  capture_observer1.capability.rawType = webrtc::kVideoUnknown;
+#endif
+
+  TestVideoCaptureCallback capture_observer2;
+  webrtc::scoped_refptr<VideoCaptureModule> module2(OpenVideoCaptureDevice(
+          1, &capture_observer2));
+  ASSERT_TRUE(module1.get() != NULL);
+
+
+#ifndef WEBRTC_MAC
+  device_info_->GetCapability(module2->CurrentDeviceName(), 0,
+                              capture_observer2.capability);
+#else
+  capture_observer2.capability.width = kTestWidth;
+  capture_observer2.capability.height = kTestHeight;
+  capture_observer2.capability.maxFPS = kTestFramerate;
+  capture_observer2.capability.rawType = webrtc::kVideoUnknown;
+#endif
+
+  StartCapture(module1.get(), capture_observer1.capability);
+  StartCapture(module2.get(), capture_observer2.capability);
+  EXPECT_TRUE_WAIT(capture_observer1.incoming_frames >= 5, kTimeOut);
+  EXPECT_TRUE_WAIT(capture_observer2.incoming_frames >= 5, kTimeOut);
+}
+
+// Test class for testing external capture and capture feedback information
+// such as frame rate and picture alarm.
+class VideoCaptureExternalTest : public testing::Test {
+ public:
+  void SetUp() {
+    capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
+    process_module_ = webrtc::ProcessThread::CreateProcessThread();
+    process_module_->Start();
+    process_module_->RegisterModule(capture_module_);
+
+    capture_callback_.capability.width = kTestWidth;
+    capture_callback_.capability.height = kTestHeight;
+    capture_callback_.capability.rawType = webrtc::kVideoYV12;
+    capture_callback_.capability.maxFPS = kTestFramerate;
+
+    test_frame_.VerifyAndAllocate(kTestWidth * kTestHeight * 3 / 2);
+    test_frame_.SetLength(kTestWidth * kTestHeight * 3 / 2);
+    test_frame_.SetHeight(kTestHeight);
+    test_frame_.SetWidth(kTestWidth);
+    SLEEP(1); // Wait 1ms so that two tests can't have the same timestamp.
+    memset(test_frame_.Buffer(), 127, test_frame_.Length());
+
+    EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback(
+        capture_callback_));
+    EXPECT_EQ(0, capture_module_->RegisterCaptureCallback(capture_feedback_));
+    EXPECT_EQ(0, capture_module_->EnableFrameRateCallback(true));
+    EXPECT_EQ(0, capture_module_->EnableNoPictureAlarm(true));
+  }
+
+  void TearDown() {
+    process_module_->Stop();
+    webrtc::ProcessThread::DestroyProcessThread(process_module_);
+  }
+
+  webrtc::VideoCaptureExternal* capture_input_interface_;
+  webrtc::scoped_refptr<VideoCaptureModule> capture_module_;
+  webrtc::ProcessThread* process_module_;
+  webrtc::VideoFrame test_frame_;
+  TestVideoCaptureCallback capture_callback_;
+  TestVideoCaptureFeedBack capture_feedback_;
+};
+
+// Test input of external video frames.
+TEST_F(VideoCaptureExternalTest , TestExternalCapture) {
+  EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+      test_frame_.Buffer(), test_frame_.Length(), capture_callback_.capability,
+      0));
+  EXPECT_TRUE(CompareFrames(test_frame_, capture_callback_.last_frame));
+}
+
+// Test input of planar I420 frames.
+TEST_F(VideoCaptureExternalTest , TestExternalCaptureI420) {
+  webrtc::VideoFrameI420 frame_i420;
+  frame_i420.width = kTestWidth;
+  frame_i420.height = kTestHeight;
+  frame_i420.y_plane = test_frame_.Buffer();
+  frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight);
+  frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2);
+  frame_i420.y_pitch = kTestWidth;
+  frame_i420.u_pitch = kTestWidth / 2;
+  frame_i420.v_pitch = kTestWidth / 2;
+  EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0));
+
+  EXPECT_TRUE(CompareFrames(frame_i420, capture_callback_.last_frame));
+}
+
+// Test frame rate and no picture alarm.
+TEST_F(VideoCaptureExternalTest , FrameRate) {
+  WebRtc_Word64 testTime = 3;
+  TickTime startTime = TickTime::Now();
+  capture_callback_.capability.maxFPS = 10;
+  while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+    EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+        test_frame_.Buffer(), test_frame_.Length(),
+        capture_callback_.capability, 0));
+    SLEEP(1000 / capture_callback_.capability.maxFPS);
+  }
+  EXPECT_TRUE(capture_feedback_.frame_rate >= 8 &&
+              capture_feedback_.frame_rate <= 10);
+  SLEEP(500);
+  EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm);
+
+  startTime = TickTime::Now();
+  capture_callback_.capability.maxFPS = 30;
+  while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+    EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+        test_frame_.Buffer(), test_frame_.Length(),
+        capture_callback_.capability, 0));
+    SLEEP(1000 / capture_callback_.capability.maxFPS);
+  }
+  EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm);
+  // Frame rate might be less than 33 since we have paused providing
+  // frames for a while.
+  EXPECT_TRUE(capture_feedback_.frame_rate >= 25 &&
+              capture_feedback_.frame_rate <= 33);
+}
+
+// Test start image
+TEST_F(VideoCaptureExternalTest , StartImage) {
+  capture_callback_.capability.maxFPS = 10;
+  EXPECT_EQ(0, capture_module_->StartSendImage(
+      test_frame_, capture_callback_.capability.maxFPS));
+
+  EXPECT_TRUE_WAIT(capture_callback_.incoming_frames == 5, kTimeOut);
+  EXPECT_EQ(0, capture_module_->StopSendImage());
+
+  SLEEP(200);
+  // Test that no more start images have arrived.
+  EXPECT_TRUE(capture_callback_.incoming_frames >= 4 &&
+              capture_callback_.incoming_frames <= 5);
+  EXPECT_TRUE(CompareFrames(test_frame_, capture_callback_.last_frame));
+}
+
diff --git a/trunk/src/modules/video_coding/codecs/OWNERS b/trunk/src/modules/video_coding/codecs/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/trunk/src/modules/video_coding/codecs/i420/main/interface/i420.h b/trunk/src/modules/video_coding/codecs/i420/main/interface/i420.h
new file mode 100644
index 0000000..fb4ba8c
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/i420/main/interface/i420.h
@@ -0,0 +1,156 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
+
+#include "video_codec_interface.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class I420Encoder : public VideoEncoder
+{
+public:
+
+    I420Encoder();
+
+    virtual ~I420Encoder();
+
+// Initialize the encoder with the information from the VideoCodec
+//
+// Input:
+//          - codecSettings     : Codec settings
+//          - numberOfCores     : Number of cores available for the encoder
+//          - maxPayloadSize    : The maximum size each payload is allowed
+//                                to have. Usually MTU - overhead.
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+//                                <0 - Error
+    virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings, WebRtc_Word32 /*numberOfCores*/, WebRtc_UWord32 /*maxPayloadSize*/);
+
+// "Encode" an I420 image (as a part of a video stream). The encoded image
+// will be returned to the user via the encode complete callback.
+//
+// Input:
+//          - inputImage        : Image to be encoded
+//          - codecSpecificInfo : Pointer to codec specific data
+//          - frameType         : Frame type to be sent (Key /Delta) .
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+//                                <0 - Error
+    virtual WebRtc_Word32
+        Encode(const RawImage& inputImage,
+               const CodecSpecificInfo* /*codecSpecificInfo*/,
+               const VideoFrameType* /*frameTypes*/);
+
+// Register an encode complete callback object.
+//
+// Input:
+//          - callback         : Callback object which handles encoded images.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+
+// Free encoder memory.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Release();
+
+    virtual WebRtc_Word32 SetRates(WebRtc_UWord32 /*newBitRate*/,
+                                   WebRtc_UWord32 /*frameRate*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 /*packetLoss*/,
+                                               int /*rtt*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
+                                                WebRtc_Word32 /*size*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+private:
+    bool                     _inited;
+    EncodedImage             _encodedImage;
+    EncodedImageCallback*    _encodedCompleteCallback;
+
+}; // end of WebRtcI420DEncoder class
+
+class I420Decoder : public VideoDecoder
+{
+public:
+
+    I420Decoder();
+
+    virtual ~I420Decoder();
+
+// Initialize the decoder.
+// The user must notify the codec of width and height values.
+//
+// Return value         :  WEBRTC_VIDEO_CODEC_OK.
+//                        <0 - Errors
+    virtual WebRtc_Word32 InitDecode(const VideoCodec* codecSettings, WebRtc_Word32 /*numberOfCores*/);
+
+    virtual WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/){return WEBRTC_VIDEO_CODEC_OK;};
+
+// Decode encoded image (as a part of a video stream). The decoded image
+// will be returned to the user through the decode complete callback.
+//
+// Input:
+//          - inputImage        : Encoded image to be decoded
+//          - missingFrames     : True if one or more frames have been lost
+//                                since the previous decode call.
+//          - codecSpecificInfo : pointer to specific codec data
+//          - renderTimeMs      : Render time in Ms
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+//                                 <0 - Error
+    virtual WebRtc_Word32 Decode(
+        const EncodedImage& inputImage,
+        bool missingFrames,
+        const RTPFragmentationHeader* /*fragmentation*/,
+        const CodecSpecificInfo* /*codecSpecificInfo*/,
+        WebRtc_Word64 /*renderTimeMs*/);
+
+// Register a decode complete callback object.
+//
+// Input:
+//          - callback         : Callback object which handles decoded images.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
+
+// Free decoder memory.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK
+//                                  <0 - Error
+    virtual WebRtc_Word32 Release();
+
+// Reset decoder state and prepare for a new call.
+//
+// Return value         :  WEBRTC_VIDEO_CODEC_OK.
+//                          <0 - Error
+    virtual WebRtc_Word32 Reset();
+
+private:
+
+    RawImage                    _decodedImage;
+    WebRtc_Word32               _width;
+    WebRtc_Word32               _height;
+    bool                        _inited;
+    DecodedImageCallback*       _decodeCompleteCallback;
+
+
+}; // end of WebRtcI420Decoder class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
diff --git a/trunk/src/modules/video_coding/codecs/i420/main/source/Android.mk b/trunk/src/modules/video_coding/codecs/i420/main/source/Android.mk
new file mode 100644
index 0000000..d73e77d
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/i420/main/source/Android.mk
@@ -0,0 +1,41 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_i420
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := i420.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../common_video/interface \
+    $(LOCAL_PATH)/../../../../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_coding/codecs/i420/main/source/i420.cc b/trunk/src/modules/video_coding/codecs/i420/main/source/i420.cc
new file mode 100644
index 0000000..b73a00a
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/i420/main/source/i420.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "i420.h"
+#include <string.h>
+
+namespace webrtc
+{
+
+I420Encoder::I420Encoder():
+_inited(false),
+_encodedImage(),
+_encodedCompleteCallback(NULL)
+{
+     //
+}
+
+I420Encoder::~I420Encoder()
+{
+    _inited = false;
+    if (_encodedImage._buffer != NULL)
+    {
+        delete [] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+}
+
+WebRtc_Word32
+I420Encoder::Release()
+{
+    // should allocate an encoded frame and then release it here, for that we actaully need an init flag
+    if (_encodedImage._buffer != NULL)
+    {
+        delete [] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+I420Encoder::InitEncode(const VideoCodec* codecSettings,
+                              WebRtc_Word32 /*numberOfCores*/,
+                              WebRtc_UWord32 /*maxPayloadSize */)
+{
+    if (codecSettings == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (codecSettings->width < 1 || codecSettings->height < 1)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+
+    // allocating encoded memory
+
+    if (_encodedImage._buffer != NULL)
+    {
+        delete [] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+        _encodedImage._size = 0;
+    }
+    const WebRtc_UWord32 newSize = (3 * codecSettings->width *
+                                      codecSettings->height) >> 1;
+    WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+    if (newBuffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    _encodedImage._size = newSize;
+    _encodedImage._buffer = newBuffer;
+
+    // if no memory allocation, no point to init
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+
+WebRtc_Word32
+I420Encoder::Encode(const RawImage& inputImage,
+                    const CodecSpecificInfo* /*codecSpecificInfo*/,
+                    const VideoFrameType* /*frameTypes*/)
+{
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (_encodedCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    _encodedImage._frameType = kKeyFrame; // no coding
+    _encodedImage._timeStamp = inputImage._timeStamp;
+    _encodedImage._encodedHeight = inputImage._height;
+    _encodedImage._encodedWidth = inputImage._width;
+    if (inputImage._length > _encodedImage._size)
+    {
+
+        // allocating encoded memory
+        if (_encodedImage._buffer != NULL)
+        {
+            delete [] _encodedImage._buffer;
+            _encodedImage._buffer = NULL;
+            _encodedImage._size = 0;
+        }
+        const WebRtc_UWord32 newSize = (3 * _encodedImage._encodedWidth * _encodedImage._encodedHeight) >> 1;
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+        if (newBuffer == NULL)
+        {
+            return WEBRTC_VIDEO_CODEC_MEMORY;
+        }
+        _encodedImage._size = newSize;
+        _encodedImage._buffer = newBuffer;
+    }
+    memcpy(_encodedImage._buffer, inputImage._buffer, inputImage._length);
+    _encodedImage._length = inputImage._length;
+    _encodedCompleteCallback->Encoded(_encodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+WebRtc_Word32
+I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback)
+{
+    _encodedCompleteCallback = callback;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+I420Decoder::I420Decoder():
+_decodedImage(),
+_width(0),
+_height(0),
+_inited(false),
+_decodeCompleteCallback(NULL)
+{
+    //
+}
+
+I420Decoder::~I420Decoder()
+{
+    Release();
+}
+
+WebRtc_Word32
+I420Decoder::Reset()
+{
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+WebRtc_Word32
+I420Decoder::InitDecode(const VideoCodec* codecSettings, WebRtc_Word32 /*numberOfCores */)
+{
+    if (codecSettings == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    else if (codecSettings->width < 1 || codecSettings->height < 1)
+    {
+         return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    _width = codecSettings->width;
+    _height = codecSettings->height;
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+I420Decoder::Decode(const EncodedImage& inputImage,
+                    bool /*missingFrames*/,
+                    const RTPFragmentationHeader* /*fragmentation*/,
+                    const CodecSpecificInfo* /*codecSpecificInfo*/,
+                    WebRtc_Word64 /*renderTimeMs*/)
+{
+    if (inputImage._buffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (inputImage._length <= 0)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (!_inited)
+    {
+       return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    //Allocate memory for decoded image
+
+    if (_decodedImage._buffer != NULL)
+    {
+        delete [] _decodedImage._buffer;
+        _decodedImage._buffer = NULL;
+        _decodedImage._size = 0;
+    }
+    if (_decodedImage._buffer == NULL)
+    {
+        const WebRtc_UWord32 newSize = (3*_width*_height) >> 1;
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+        if (newBuffer == NULL)
+        {
+            return WEBRTC_VIDEO_CODEC_MEMORY;
+        }
+        _decodedImage._size = newSize;
+        _decodedImage._buffer = newBuffer;
+    }
+
+    // Set decoded image parameters
+    _decodedImage._height = _height;
+    _decodedImage._width = _width;
+    _decodedImage._timeStamp = inputImage._timeStamp;
+    memcpy(_decodedImage._buffer, inputImage._buffer, inputImage._length);
+    _decodedImage._length = inputImage._length;
+    //_decodedImage._buffer = inputImage._buffer;
+
+    _decodeCompleteCallback->Decoded(_decodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+{
+    _decodeCompleteCallback = callback;
+        return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+I420Decoder::Release()
+{
+    if (_decodedImage._buffer != NULL)
+    {
+        delete [] _decodedImage._buffer;
+        _decodedImage._buffer = NULL;
+    }
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/codecs/i420/main/source/i420.gypi b/trunk/src/modules/video_coding/codecs/i420/main/source/i420.gypi
new file mode 100644
index 0000000..af13f8d
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/i420/main/source/i420.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_i420',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '../../../../../../common_video/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../../../../common_video/interface',
+        ],
+      },
+      'sources': [
+        '../interface/i420.h',
+        'i420.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/trunk/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
new file mode 100644
index 0000000..f61fce5
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+
+#include <string>
+
+#include "gmock/gmock.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class MockEncodedImageCallback : public EncodedImageCallback {
+ public:
+  MOCK_METHOD3(Encoded,
+               WebRtc_Word32(EncodedImage& encodedImage,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             const RTPFragmentationHeader* fragmentation));
+};
+
+class MockVideoEncoder : public VideoEncoder {
+ public:
+  MOCK_CONST_METHOD2(Version,
+                     WebRtc_Word32(WebRtc_Word8 *version,
+                                   WebRtc_Word32 length));
+  MOCK_METHOD3(InitEncode,
+               WebRtc_Word32(const VideoCodec* codecSettings,
+                             WebRtc_Word32 numberOfCores,
+                             WebRtc_UWord32 maxPayloadSize));
+  MOCK_METHOD3(Encode,
+               WebRtc_Word32(const RawImage& inputImage,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             const VideoFrameType* frameType));
+  MOCK_METHOD1(RegisterEncodeCompleteCallback,
+               WebRtc_Word32(EncodedImageCallback* callback));
+  MOCK_METHOD0(Release, WebRtc_Word32());
+  MOCK_METHOD0(Reset, WebRtc_Word32());
+  MOCK_METHOD2(SetChannelParameters, WebRtc_Word32(WebRtc_UWord32 packetLoss,
+                                                   int rtt));
+  MOCK_METHOD2(SetRates,
+               WebRtc_Word32(WebRtc_UWord32 newBitRate,
+                             WebRtc_UWord32 frameRate));
+  MOCK_METHOD1(SetPeriodicKeyFrames, WebRtc_Word32(bool enable));
+  MOCK_METHOD2(CodecConfigParameters,
+               WebRtc_Word32(WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
+};
+
+class MockDecodedImageCallback : public DecodedImageCallback {
+ public:
+  MOCK_METHOD1(Decoded,
+               WebRtc_Word32(RawImage& decodedImage));
+  MOCK_METHOD1(ReceivedDecodedReferenceFrame,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+  MOCK_METHOD1(ReceivedDecodedFrame,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+};
+
+class MockVideoDecoder : public VideoDecoder {
+ public:
+  MOCK_METHOD2(InitDecode,
+      WebRtc_Word32(const VideoCodec* codecSettings,
+                    WebRtc_Word32 numberOfCores));
+  MOCK_METHOD5(Decode,
+               WebRtc_Word32(const EncodedImage& inputImage,
+                             bool missingFrames,
+                             const RTPFragmentationHeader* fragmentation,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             WebRtc_Word64 renderTimeMs));
+  MOCK_METHOD1(RegisterDecodeCompleteCallback,
+               WebRtc_Word32(DecodedImageCallback* callback));
+  MOCK_METHOD0(Release, WebRtc_Word32());
+  MOCK_METHOD0(Reset, WebRtc_Word32());
+  MOCK_METHOD2(SetCodecConfigParameters,
+               WebRtc_Word32(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
+  MOCK_METHOD0(Copy, VideoDecoder*());
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
diff --git a/trunk/src/modules/video_coding/codecs/interface/video_codec_interface.h b/trunk/src/modules/video_coding/codecs/interface/video_codec_interface.h
new file mode 100644
index 0000000..9aa5cb7
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+
+#include "common_types.h"
+#include "common_video/interface/video_image.h"
+#include "modules/video_coding/codecs/interface/video_error_codes.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class RTPFragmentationHeader; // forward declaration
+
+// Note: if any pointers are added to this struct, it must be fitted
+// with a copy-constructor. See below.
+struct CodecSpecificInfoVP8
+{
+    bool             hasReceivedSLI;
+    WebRtc_UWord8    pictureIdSLI;
+    bool             hasReceivedRPSI;
+    WebRtc_UWord64   pictureIdRPSI;
+    WebRtc_Word16    pictureId;         // negative value to skip pictureId
+    bool             nonReference;
+    WebRtc_UWord8    simulcastIdx;
+    WebRtc_UWord8    temporalIdx;
+    bool             layerSync;
+    int              tl0PicIdx;         // Negative value to skip tl0PicIdx
+    WebRtc_Word8     keyIdx;            // negative value to skip keyIdx
+};
+
+union CodecSpecificInfoUnion
+{
+    CodecSpecificInfoVP8       VP8;
+};
+
+// Note: if any pointers are added to this struct or its sub-structs, it
+// must be fitted with a copy-constructor. This is because it is copied
+// in the copy-constructor of VCMEncodedFrame.
+struct CodecSpecificInfo
+{
+    VideoCodecType   codecType;
+    CodecSpecificInfoUnion codecSpecific;
+};
+
+class EncodedImageCallback
+{
+public:
+    virtual ~EncodedImageCallback() {};
+
+    // Callback function which is called when an image has been encoded.
+    //
+    // Input:
+    //          - encodedImage         : The encoded image
+    //
+    // Return value                    : > 0,   signals to the caller that one or more future frames
+    //                                          should be dropped to keep bit rate or frame rate.
+    //                                   = 0,   if OK.
+    //                                   < 0,   on error.
+    virtual WebRtc_Word32
+    Encoded(EncodedImage& encodedImage,
+            const CodecSpecificInfo* codecSpecificInfo = NULL,
+            const RTPFragmentationHeader* fragmentation = NULL) = 0;
+};
+
+class VideoEncoder
+{
+public:
+    virtual ~VideoEncoder() {};
+
+    // Initialize the encoder with the information from the VideoCodec.
+    //
+    // Input:
+    //          - codecSettings     : Codec settings
+    //          - numberOfCores     : Number of cores available for the encoder
+    //          - maxPayloadSize    : The maximum size each payload is allowed
+    //                                to have. Usually MTU - overhead.
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores, WebRtc_UWord32 maxPayloadSize) = 0;
+
+    // Encode an I420 image (as a part of a video stream). The encoded image
+    // will be returned to the user through the encode complete callback.
+    //
+    // Input:
+    //          - inputImage        : Image to be encoded
+    //          - codecSpecificInfo : Pointer to codec specific data
+    //          - frameType         : The frame type to encode
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32
+    Encode(const RawImage& inputImage,
+           const CodecSpecificInfo* codecSpecificInfo,
+           const VideoFrameType* frameTypes) = 0;
+
+    // Register an encode complete callback object.
+    //
+    // Input:
+    //          - callback         : Callback object which handles encoded images.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterEncodeCompleteCallback(EncodedImageCallback* callback) = 0;
+
+    // Free encoder memory.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Release() = 0;
+
+    // Inform the encoder about the packet loss and round trip time on the
+    // network used to decide the best pattern and signaling.
+    //
+    //          - packetLoss       : Fraction lost (loss rate in percent =
+    //                               100 * packetLoss / 255)
+    //          - rtt              : Round-trip time in milliseconds
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt) = 0;
+
+    // Inform the encoder about the new target bit rate.
+    //
+    //          - newBitRate       : New target bit rate
+    //          - frameRate        : The target frame rate
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate) = 0;
+
+    // Use this function to enable or disable periodic key frames. Can be useful for codecs
+    // which have other ways of stopping error propagation.
+    //
+    //          - enable           : Enable or disable periodic key frames
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable) { return WEBRTC_VIDEO_CODEC_ERROR; }
+
+    // Codec configuration data to send out-of-band, i.e. in SIP call setup
+    //
+    //          - buffer           : Buffer pointer to where the configuration data
+    //                               should be stored
+    //          - size             : The size of the buffer in bytes
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
+};
+
+class DecodedImageCallback
+{
+public:
+    virtual ~DecodedImageCallback() {};
+
+    // Callback function which is called when an image has been decoded.
+    //
+    // Input:
+    //          - decodedImage         : The decoded image
+    //
+    // Return value                    : 0 if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Decoded(RawImage& decodedImage) = 0;
+
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}
+
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId) {return -1;}
+};
+
+class VideoDecoder
+{
+public:
+    virtual ~VideoDecoder() {};
+
+    // Initialize the decoder with the information from the VideoCodec.
+    //
+    // Input:
+    //          - inst              : Codec settings
+    //          - numberOfCores     : Number of cores available for the decoder
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 InitDecode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores) = 0;
+
+    // Decode encoded image (as a part of a video stream). The decoded image
+    // will be returned to the user through the decode complete callback.
+    //
+    // Input:
+    //          - inputImage        : Encoded image to be decoded
+    //          - missingFrames     : True if one or more frames have been lost
+    //                                since the previous decode call.
+    //          - fragmentation     : Specifies where the encoded frame can be
+    //                                split into separate fragments. The meaning
+    //                                of fragment is codec specific, but often
+    //                                means that each fragment is decodable by
+    //                                itself.
+    //          - codecSpecificInfo : Pointer to codec specific data
+    //          - renderTimeMs      : System time to render in milliseconds. Only
+    //                                used by decoders with internal rendering.
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32
+    Decode(const EncodedImage& inputImage,
+           bool missingFrames,
+           const RTPFragmentationHeader* fragmentation,
+           const CodecSpecificInfo* codecSpecificInfo = NULL,
+           WebRtc_Word64 renderTimeMs = -1) = 0;
+
+    // Register an decode complete callback object.
+    //
+    // Input:
+    //          - callback         : Callback object which handles decoded images.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterDecodeCompleteCallback(DecodedImageCallback* callback) = 0;
+
+    // Free decoder memory.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Release() = 0;
+
+    // Reset decoder state and prepare for a new call.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Reset() = 0;
+
+    // Codec configuration data sent out-of-band, i.e. in SIP call setup
+    //
+    // Input/Output:
+    //          - buffer           : Buffer pointer to the configuration data
+    //          - size             : The size of the configuration data in
+    //                               bytes
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
+
+    // Create a copy of the codec and its internal state.
+    //
+    // Return value                : A copy of the instance if OK, NULL otherwise.
+    virtual VideoDecoder* Copy() { return NULL; }
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
diff --git a/trunk/src/modules/video_coding/codecs/interface/video_error_codes.h b/trunk/src/modules/video_coding/codecs/interface/video_error_codes.h
new file mode 100644
index 0000000..dfa3f53
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/interface/video_error_codes.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+
+// NOTE: in sync with video_coding_module_defines.h
+
+// Define return values
+
+#define WEBRTC_VIDEO_CODEC_REQUEST_SLI 2
+#define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1
+#define WEBRTC_VIDEO_CODEC_OK 0
+#define WEBRTC_VIDEO_CODEC_ERROR -1
+#define WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED -2
+#define WEBRTC_VIDEO_CODEC_MEMORY -3
+#define WEBRTC_VIDEO_CODEC_ERR_PARAMETER -4
+#define WEBRTC_VIDEO_CODEC_ERR_SIZE -5
+#define WEBRTC_VIDEO_CODEC_TIMEOUT -6
+#define WEBRTC_VIDEO_CODEC_UNINITIALIZED -7
+#define WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI -12
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
diff --git a/trunk/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h b/trunk/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
new file mode 100644
index 0000000..57d21ca
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <string>
+
+#include "common_video/interface/video_image.h"
+#include "gmock/gmock.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class MockPacketManipulator : public PacketManipulator {
+ public:
+  MOCK_METHOD1(ManipulatePackets, int(webrtc::EncodedImage* encoded_image));
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
diff --git a/trunk/src/modules/video_coding/codecs/test/packet_manipulator.cc b/trunk/src/modules/video_coding/codecs/test/packet_manipulator.cc
new file mode 100644
index 0000000..acdb2e5
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/packet_manipulator.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <cassert>
+#include <cstdio>
+
+namespace webrtc {
+namespace test {
+
+PacketManipulatorImpl::PacketManipulatorImpl(PacketReader* packet_reader,
+                                             const NetworkingConfig& config,
+                                             bool verbose)
+    : packet_reader_(packet_reader),
+      config_(config),
+      active_burst_packets_(0),
+      critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      random_seed_(1),
+      verbose_(verbose) {
+  assert(packet_reader);
+}
+
+PacketManipulatorImpl::~PacketManipulatorImpl() {
+  delete critsect_;
+}
+
+int PacketManipulatorImpl::ManipulatePackets(
+    webrtc::EncodedImage* encoded_image) {
+  assert(encoded_image);
+  int nbr_packets_dropped = 0;
+  // There's no need to build a copy of the image data since viewing an
+  // EncodedImage object, setting the length to a new lower value represents
+  // that everything is dropped after that position in the byte array.
+  // EncodedImage._size is the allocated bytes.
+  // EncodedImage._length is how many that are filled with data.
+  int new_length = 0;
+  packet_reader_->InitializeReading(encoded_image->_buffer,
+                                    encoded_image->_length,
+                                    config_.packet_size_in_bytes);
+  WebRtc_UWord8* packet = NULL;
+  int nbr_bytes_to_read;
+  // keep track of if we've lost any packets, since then we shall loose
+  // the remains of the current frame:
+  bool packet_loss_has_occurred = false;
+  while ((nbr_bytes_to_read = packet_reader_->NextPacket(&packet)) > 0) {
+    // Check if we're currently in a packet loss burst that is not completed:
+    if (active_burst_packets_ > 0) {
+      active_burst_packets_--;
+      nbr_packets_dropped++;
+    } else if (RandomUniform() < config_.packet_loss_probability ||
+        packet_loss_has_occurred) {
+      packet_loss_has_occurred = true;
+      nbr_packets_dropped++;
+      if (config_.packet_loss_mode == kBurst) {
+        // Initiate a new burst
+        active_burst_packets_ = config_.packet_loss_burst_length - 1;
+      }
+    } else {
+      new_length += nbr_bytes_to_read;
+    }
+  }
+  encoded_image->_length = new_length;
+  if (nbr_packets_dropped > 0) {
+    // Must set completeFrame to false to inform the decoder about this:
+    encoded_image->_completeFrame = false;
+    if (verbose_) {
+      printf("Dropped %d packets for frame %d (frame length: %d)\n",
+             nbr_packets_dropped, encoded_image->_timeStamp,
+             encoded_image->_length);
+    }
+  }
+  return nbr_packets_dropped;
+}
+
+void PacketManipulatorImpl::InitializeRandomSeed(unsigned int seed) {
+  random_seed_ = seed;
+}
+
+inline double PacketManipulatorImpl::RandomUniform() {
+  // Use the previous result as new seed before each rand() call. Doing this
+  // it doesn't matter if other threads are calling rand() since we'll always
+  // get the same behavior as long as we're using a fixed initial seed.
+  critsect_->Enter();
+  srand(random_seed_);
+  random_seed_ = std::rand();
+  critsect_->Leave();
+  return (random_seed_ + 1.0)/(RAND_MAX + 1.0);
+}
+
+const char* PacketLossModeToStr(PacketLossMode e) {
+  switch (e) {
+    case kUniform:
+      return "Uniform";
+    case kBurst:
+      return "Burst";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+}  // namespace test
+}  // namespace webrtcc
diff --git a/trunk/src/modules/video_coding/codecs/test/packet_manipulator.h b/trunk/src/modules/video_coding/codecs/test/packet_manipulator.h
new file mode 100644
index 0000000..e3891cb
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/packet_manipulator.h
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
+
+#include <cstdlib>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+// Which mode the packet loss shall be performed according to.
+enum PacketLossMode {
+  // Drops packets with a configured probability independently for each packet
+  kUniform,
+  // Drops packets similar to uniform but when a packet is being dropped,
+  // the number of lost packets in a row is equal to the configured burst
+  // length.
+  kBurst
+};
+// Returns a string representation of the enum value.
+const char* PacketLossModeToStr(PacketLossMode e);
+
+// Contains configurations related to networking and simulation of
+// scenarios caused by network interference.
+struct NetworkingConfig {
+  NetworkingConfig()
+  : packet_size_in_bytes(1500), max_payload_size_in_bytes(1440),
+    packet_loss_mode(kUniform), packet_loss_probability(0.0),
+    packet_loss_burst_length(1) {
+  }
+
+  // Packet size in bytes. Default: 1500 bytes.
+  int packet_size_in_bytes;
+
+  // Encoder specific setting of maximum size in bytes of each payload.
+  // Default: 1440 bytes.
+  int max_payload_size_in_bytes;
+
+  // Packet loss mode. Two different packet loss models are supported:
+  // uniform or burst. This setting has no effect unless
+  // packet_loss_probability is >0.
+  // Default: uniform.
+  PacketLossMode packet_loss_mode;
+
+  // Packet loss probability. A value between 0.0 and 1.0 that defines the
+  // probability of a packet being lost. 0.1 means 10% and so on.
+  // Default: 0 (no loss).
+  double packet_loss_probability;
+
+  // Packet loss burst length. Defines how many packets will be lost in a burst
+  // when a packet has been decided to be lost. Must be >=1. Default: 1.
+  int packet_loss_burst_length;
+};
+
+// Class for simulating packet loss on the encoded frame data.
+// When a packet loss has occurred in a frame, the remaining data in that
+// frame is lost (even if burst length is only a single packet).
+// TODO(kjellander): Support discarding only individual packets in the frame
+// when CL 172001 has been submitted. This also requires a correct
+// fragmentation header to be passed to the decoder.
+//
+// To get a repeatable packet drop pattern, re-initialize the random seed
+// using InitializeRandomSeed before each test run.
+class PacketManipulator {
+ public:
+  virtual ~PacketManipulator() {}
+
+  // Manipulates the data of the encoded_image to simulate parts being lost
+  // during transport.
+  // If packets are dropped from frame data, the completedFrame field will be
+  // set to false.
+  // Returns the number of packets being dropped.
+  virtual int
+    ManipulatePackets(webrtc::EncodedImage* encoded_image) = 0;
+};
+
+class PacketManipulatorImpl : public PacketManipulator {
+ public:
+  PacketManipulatorImpl(PacketReader* packet_reader,
+                        const NetworkingConfig& config,
+                        bool verbose);
+  virtual ~PacketManipulatorImpl();
+  virtual int ManipulatePackets(webrtc::EncodedImage* encoded_image);
+  virtual void InitializeRandomSeed(unsigned int seed);
+ protected:
+  // Returns a uniformly distributed random value between 0.0 and 1.0
+  virtual double RandomUniform();
+ private:
+  PacketReader* packet_reader_;
+  const NetworkingConfig& config_;
+  // Used to simulate a burst over several frames.
+  int active_burst_packets_;
+  CriticalSectionWrapper* critsect_;
+  unsigned int random_seed_;
+  bool verbose_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
diff --git a/trunk/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc b/trunk/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
new file mode 100644
index 0000000..a5d8bc3
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
@@ -0,0 +1,153 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <queue>
+
+#include "gtest/gtest.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
+#include "testsupport/unittest_utils.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+const double kNeverDropProbability = 0.0;
+const double kAlwaysDropProbability = 1.0;
+const int kBurstLength = 1;
+
+class PacketManipulatorTest: public PacketRelatedTest {
+ protected:
+  PacketReader packet_reader_;
+  EncodedImage image_;
+  NetworkingConfig drop_config_;
+  NetworkingConfig no_drop_config_;
+
+  PacketManipulatorTest() {
+    image_._buffer = packet_data_;
+    image_._length = kPacketDataLength;
+    image_._size = kPacketDataLength;
+
+    drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
+    drop_config_.packet_loss_probability = kAlwaysDropProbability;
+    drop_config_.packet_loss_burst_length = kBurstLength;
+    drop_config_.packet_loss_mode = kUniform;
+
+    no_drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
+    no_drop_config_.packet_loss_probability = kNeverDropProbability;
+    no_drop_config_.packet_loss_burst_length = kBurstLength;
+    no_drop_config_.packet_loss_mode = kUniform;
+  }
+
+  virtual ~PacketManipulatorTest() {}
+
+  void SetUp() {
+    PacketRelatedTest::SetUp();
+  }
+
+  void TearDown() {
+    PacketRelatedTest::TearDown();
+  }
+
+  void VerifyPacketLoss(int expected_nbr_packets_dropped,
+                        int actual_nbr_packets_dropped,
+                        int expected_packet_data_length,
+                        WebRtc_UWord8* expected_packet_data,
+                        EncodedImage& actual_image) {
+    EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
+    EXPECT_EQ(expected_packet_data_length, static_cast<int>(image_._length));
+    EXPECT_EQ(0, memcmp(expected_packet_data, actual_image._buffer,
+                        expected_packet_data_length));
+  }
+};
+
+TEST_F(PacketManipulatorTest, Constructor) {
+  PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
+}
+
+TEST_F(PacketManipulatorTest, DropNone) {
+  PacketManipulatorImpl manipulator(&packet_reader_,  no_drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+  VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength,
+                   packet_data_, image_);
+}
+
+TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
+  int data_length = 400;  // smaller than the packet size
+  image_._length = data_length;
+  PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  VerifyPacketLoss(0, nbr_packets_dropped, data_length,
+                     packet_data_, image_);
+}
+
+TEST_F(PacketManipulatorTest, UniformDropAll) {
+  PacketManipulatorImpl manipulator(&packet_reader_, drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+  VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped,
+                   0, packet_data_, image_);
+}
+
+// Use our customized test class to make the second packet being lost
+TEST_F(PacketManipulatorTest, UniformDropSinglePacket) {
+  drop_config_.packet_loss_probability = 0.5;
+  PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
+  manipulator.AddRandomResult(1.0);
+  manipulator.AddRandomResult(0.3);  // less than 0.5 will cause packet loss
+  manipulator.AddRandomResult(1.0);
+
+  // Execute the test target method:
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  // Since we setup the predictive packet manipulator, it will throw away the
+  // second packet. The third packet is also lost because when we have lost one,
+  // the remains shall also be discarded (in the current implementation).
+  VerifyPacketLoss(2, nbr_packets_dropped, kPacketSizeInBytes, packet1_,
+                   image_);
+}
+
+// Use our customized test class to make the second packet being lost
+TEST_F(PacketManipulatorTest, BurstDropNinePackets) {
+  // Create a longer packet data structure (10 packets)
+  const int kNbrPackets = 10;
+  const int kDataLength = kPacketSizeInBytes * kNbrPackets;
+  WebRtc_UWord8 data[kDataLength];
+  WebRtc_UWord8* data_pointer = data;
+  // Fill with 0s, 1s and so on to be able to easily verify which were dropped:
+  for (int i = 0; i < kNbrPackets; ++i) {
+    memset(data_pointer + i * kPacketSizeInBytes, i, kPacketSizeInBytes);
+  }
+  // Overwrite the defaults from the test fixture:
+  image_._buffer = data;
+  image_._length = kDataLength;
+  image_._size = kDataLength;
+
+  drop_config_.packet_loss_probability = 0.5;
+  drop_config_.packet_loss_burst_length = 5;
+  drop_config_.packet_loss_mode = kBurst;
+  PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
+  manipulator.AddRandomResult(1.0);
+  manipulator.AddRandomResult(0.3);  // less than 0.5 will cause packet loss
+  for (int i = 0; i < kNbrPackets - 2; ++i) {
+    manipulator.AddRandomResult(1.0);
+  }
+
+  // Execute the test target method:
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  // Should discard every packet after the first one.
+  VerifyPacketLoss(9, nbr_packets_dropped, kPacketSizeInBytes, data, image_);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc b/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
new file mode 100644
index 0000000..5668378
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
+
+#include <cassert>
+#include <cstdio>
+
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+PredictivePacketManipulator::PredictivePacketManipulator(
+    PacketReader* packet_reader, const NetworkingConfig& config)
+    : PacketManipulatorImpl(packet_reader, config, false) {
+}
+
+PredictivePacketManipulator::~PredictivePacketManipulator() {
+}
+
+
+void PredictivePacketManipulator::AddRandomResult(double result) {
+  assert(result >= 0.0 && result <= 1.0);
+  random_results_.push(result);
+}
+
+double PredictivePacketManipulator::RandomUniform() {
+  if(random_results_.size() == 0u) {
+    fprintf(stderr, "No more stored results, please make sure AddRandomResult()"
+            "is called same amount of times you're going to invoke the "
+            "RandomUniform() function, i.e. once per packet.\n");
+    assert(false);
+  }
+  double result = random_results_.front();
+  random_results_.pop();
+  return result;
+}
+
+}  // namespace test
+}  // namespace webrtcc
diff --git a/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h b/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h
new file mode 100644
index 0000000..22a0ce9
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
+
+#include <queue>
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+// Predictive packet manipulator that allows for setup of the result of
+// the random invocations.
+class PredictivePacketManipulator : public PacketManipulatorImpl {
+ public:
+  PredictivePacketManipulator(PacketReader* packet_reader,
+                              const NetworkingConfig& config);
+  virtual ~PredictivePacketManipulator();
+  // Adds a result. You must add at least the same number of results as the
+  // expected calls to the RandomUniform method. The results are added to a
+  // FIFO queue so they will be returned in the same order they were added.
+  // Result parameter must be 0.0 to 1.0.
+  void AddRandomResult(double result);
+ protected:
+  // Returns a uniformly distributed random value between 0.0 and 1.0
+  virtual double RandomUniform();
+
+ private:
+  std::queue<double> random_results_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
diff --git a/trunk/src/modules/video_coding/codecs/test/stats.cc b/trunk/src/modules/video_coding/codecs/test/stats.cc
new file mode 100644
index 0000000..5cdf364
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/stats.cc
@@ -0,0 +1,172 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/stats.h"
+
+#include <algorithm>  // min_element, max_element
+#include <cassert>
+#include <cstdio>
+
+namespace webrtc {
+namespace test {
+
+Stats::Stats() {}
+
+Stats::~Stats() {}
+
+bool LessForEncodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.encode_time_in_us < s2.encode_time_in_us;
+}
+
+bool LessForDecodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.decode_time_in_us < s2.decode_time_in_us;
+}
+
+bool LessForEncodedSize(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
+}
+
+bool LessForBitRate(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
+}
+
+FrameStatistic& Stats::NewFrame(int frame_number) {
+  assert(frame_number >= 0);
+  FrameStatistic stat;
+  stat.frame_number = frame_number;
+  stats_.push_back(stat);
+  return stats_[frame_number];
+}
+
+void Stats::PrintSummary() {
+  printf("Processing summary:\n");
+  if (stats_.size() == 0) {
+    printf("No frame statistics have been logged yet.\n");
+    return;
+  }
+
+  // Calculate min, max, average and total encoding time
+  int total_encoding_time_in_us = 0;
+  int total_decoding_time_in_us = 0;
+  int total_encoded_frames_lengths = 0;
+  int total_encoded_key_frames_lengths = 0;
+  int total_encoded_nonkey_frames_lengths = 0;
+  int nbr_keyframes = 0;
+  int nbr_nonkeyframes = 0;
+
+  for (FrameStatisticsIterator it = stats_.begin();
+      it != stats_.end(); ++it) {
+    total_encoding_time_in_us += it->encode_time_in_us;
+    total_decoding_time_in_us += it->decode_time_in_us;
+    total_encoded_frames_lengths += it->encoded_frame_length_in_bytes;
+    if (it->frame_type == webrtc::kKeyFrame) {
+      total_encoded_key_frames_lengths += it->encoded_frame_length_in_bytes;
+      nbr_keyframes++;
+    } else {
+      total_encoded_nonkey_frames_lengths += it->encoded_frame_length_in_bytes;
+      nbr_nonkeyframes++;
+    }
+  }
+
+  FrameStatisticsIterator frame;
+
+  // ENCODING
+  printf("Encoding time:\n");
+  frame = min_element(stats_.begin(),
+                      stats_.end(), LessForEncodeTime);
+  printf("  Min     : %7d us (frame %d)\n",
+         frame->encode_time_in_us, frame->frame_number);
+
+  frame = max_element(stats_.begin(),
+                      stats_.end(), LessForEncodeTime);
+  printf("  Max     : %7d us (frame %d)\n",
+         frame->encode_time_in_us, frame->frame_number);
+
+  printf("  Average : %7d us\n",
+         static_cast<int>(total_encoding_time_in_us / stats_.size()));
+
+  // DECODING
+  printf("Decoding time:\n");
+  // only consider frames that were successfully decoded (packet loss may cause
+  // failures)
+  std::vector<FrameStatistic> decoded_frames;
+  for (std::vector<FrameStatistic>::iterator it = stats_.begin();
+      it != stats_.end(); ++it) {
+    if (it->decoding_successful) {
+      decoded_frames.push_back(*it);
+    }
+  }
+  if (decoded_frames.size() == 0) {
+    printf("No successfully decoded frames exist in this statistics.\n");
+  } else {
+    frame = min_element(decoded_frames.begin(),
+                        decoded_frames.end(), LessForDecodeTime);
+    printf("  Min     : %7d us (frame %d)\n",
+           frame->decode_time_in_us, frame->frame_number);
+
+    frame = max_element(decoded_frames.begin(),
+                        decoded_frames.end(), LessForDecodeTime);
+    printf("  Max     : %7d us (frame %d)\n",
+           frame->decode_time_in_us, frame->frame_number);
+
+    printf("  Average : %7d us\n",
+           static_cast<int>(total_decoding_time_in_us / decoded_frames.size()));
+    printf("  Failures: %d frames failed to decode.\n",
+           static_cast<int>(stats_.size() - decoded_frames.size()));
+  }
+
+  // SIZE
+  printf("Frame sizes:\n");
+  frame = min_element(stats_.begin(),
+                      stats_.end(), LessForEncodedSize);
+  printf("  Min     : %7d bytes (frame %d)\n",
+         frame->encoded_frame_length_in_bytes, frame->frame_number);
+
+  frame = max_element(stats_.begin(),
+                      stats_.end(), LessForEncodedSize);
+  printf("  Max     : %7d bytes (frame %d)\n",
+         frame->encoded_frame_length_in_bytes, frame->frame_number);
+
+  printf("  Average : %7d bytes\n",
+         static_cast<int>(total_encoded_frames_lengths / stats_.size()));
+  if (nbr_keyframes > 0) {
+    printf("  Average key frame size    : %7d bytes (%d keyframes)\n",
+           total_encoded_key_frames_lengths / nbr_keyframes,
+           nbr_keyframes);
+  }
+  if (nbr_nonkeyframes > 0) {
+    printf("  Average non-key frame size: %7d bytes (%d frames)\n",
+           total_encoded_nonkey_frames_lengths / nbr_nonkeyframes,
+           nbr_nonkeyframes);
+  }
+
+  // BIT RATE
+  printf("Bit rates:\n");
+  frame = min_element(stats_.begin(),
+                      stats_.end(), LessForBitRate);
+  printf("  Min bit rate: %7d kbps (frame %d)\n",
+         frame->bit_rate_in_kbps, frame->frame_number);
+
+  frame = max_element(stats_.begin(),
+                      stats_.end(), LessForBitRate);
+  printf("  Max bit rate: %7d kbps (frame %d)\n",
+         frame->bit_rate_in_kbps, frame->frame_number);
+
+  printf("\n");
+  printf("Total encoding time  : %7d ms.\n",
+         total_encoding_time_in_us / 1000);
+  printf("Total decoding time  : %7d ms.\n",
+         total_decoding_time_in_us / 1000);
+  printf("Total processing time: %7d ms.\n",
+         (total_encoding_time_in_us + total_decoding_time_in_us) / 1000);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test/stats.h b/trunk/src/modules/video_coding/codecs/test/stats.h
new file mode 100644
index 0000000..ec2bb9d
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/stats.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
+
+#include <vector>
+
+#include "common_video/interface/video_image.h"
+
+namespace webrtc {
+namespace test {
+
+// Contains statistics of a single frame that has been processed.
+struct FrameStatistic {
+  FrameStatistic() :
+      encoding_successful(false), decoding_successful(false),
+      encode_return_code(0), decode_return_code(0),
+      encode_time_in_us(0), decode_time_in_us(0),
+      frame_number(0), packets_dropped(0), total_packets(0),
+      bit_rate_in_kbps(0), encoded_frame_length_in_bytes(0),
+      frame_type(kDeltaFrame) {
+  };
+  bool encoding_successful;
+  bool decoding_successful;
+  int encode_return_code;
+  int decode_return_code;
+  int encode_time_in_us;
+  int decode_time_in_us;
+  int frame_number;
+  // How many packets were discarded of the encoded frame data (if any)
+  int packets_dropped;
+  int total_packets;
+
+  // Current bit rate. Calculated out of the size divided with the time
+  // interval per frame.
+  int bit_rate_in_kbps;
+
+  // Copied from EncodedImage
+  int encoded_frame_length_in_bytes;
+  webrtc::VideoFrameType frame_type;
+};
+
+// Handles statistics from a single video processing run.
+// Contains calculation methods for interesting metrics from these stats.
+class Stats {
+ public:
+  typedef std::vector<FrameStatistic>::iterator FrameStatisticsIterator;
+
+  Stats();
+  virtual ~Stats();
+
+  // Add a new statistic data object.
+  // The frame number must be incrementing and start at zero in order to use
+  // it as an index for the frame_statistics_ vector.
+  // Returns the newly created statistic object.
+  FrameStatistic& NewFrame(int frame_number);
+
+  // Prints a summary of all the statistics that have been gathered during the
+  // processing
+  void PrintSummary();
+
+  std::vector<FrameStatistic> stats_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
diff --git a/trunk/src/modules/video_coding/codecs/test/stats_unittest.cc b/trunk/src/modules/video_coding/codecs/test/stats_unittest.cc
new file mode 100644
index 0000000..53a50d7
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/stats_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/stats.h"
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class StatsTest: public testing::Test {
+ protected:
+  StatsTest() {
+  }
+
+  virtual ~StatsTest() {
+  }
+
+  void SetUp() {
+    stats_ = new Stats();
+  }
+
+  void TearDown() {
+    delete stats_;
+  }
+
+  Stats* stats_;
+};
+
+// Test empty object
+TEST_F(StatsTest, Uninitialized) {
+  EXPECT_EQ(0u, stats_->stats_.size());
+  stats_->PrintSummary();  // should not crash
+}
+
+// Add single frame stats and verify
+TEST_F(StatsTest, AddOne) {
+  stats_->NewFrame(0u);
+  FrameStatistic* frameStat = &stats_->stats_[0];
+  EXPECT_EQ(0, frameStat->frame_number);
+}
+
+// Add multiple frame stats and verify
+TEST_F(StatsTest, AddMany) {
+  int nbr_of_frames = 1000;
+  for (int i = 0; i < nbr_of_frames; ++i) {
+    FrameStatistic& frameStat = stats_->NewFrame(i);
+    EXPECT_EQ(i, frameStat.frame_number);
+  }
+  EXPECT_EQ(nbr_of_frames, static_cast<int>(stats_->stats_.size()));
+
+  stats_->PrintSummary();  // should not crash
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi b/trunk/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi
new file mode 100644
index 0000000..662d09d
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi
@@ -0,0 +1,72 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  # Exclude the test target when building with chromium.
+  'conditions': [   
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'video_codecs_test_framework',
+          'type': '<(library)',
+          'dependencies': [
+            '<(webrtc_root)/../test/test.gyp:test_support',
+          ],
+          'sources': [
+            'mock/mock_packet_manipulator.h',
+            'packet_manipulator.h',
+            'packet_manipulator.cc',
+            'predictive_packet_manipulator.h',
+            'predictive_packet_manipulator.cc',
+            'stats.h',
+            'stats.cc',
+            'videoprocessor.h',
+            'videoprocessor.cc',
+          ],
+        },
+        {
+          'target_name': 'video_codecs_test_framework_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../testing/gmock.gyp:gmock',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'packet_manipulator_unittest.cc',
+            'stats_unittest.cc',
+            'videoprocessor_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'video_codecs_test_framework_integrationtests',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            'webrtc_vp8',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/metrics.gyp:metrics',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'videoprocessor_integrationtest.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/codecs/test/videoprocessor.cc b/trunk/src/modules/video_coding/codecs/test/videoprocessor.cc
new file mode 100644
index 0000000..8b13513
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/videoprocessor.cc
@@ -0,0 +1,287 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+
+#include <cassert>
+#include <cstring>
+#include <limits>
+
+#include "system_wrappers/interface/cpu_info.h"
+
+namespace webrtc {
+namespace test {
+
+VideoProcessorImpl::VideoProcessorImpl(webrtc::VideoEncoder* encoder,
+                                       webrtc::VideoDecoder* decoder,
+                                       FrameReader* frame_reader,
+                                       FrameWriter* frame_writer,
+                                       PacketManipulator* packet_manipulator,
+                                       const TestConfig& config,
+                                       Stats* stats)
+    : encoder_(encoder),
+      decoder_(decoder),
+      frame_reader_(frame_reader),
+      frame_writer_(frame_writer),
+      packet_manipulator_(packet_manipulator),
+      config_(config),
+      stats_(stats),
+      encode_callback_(NULL),
+      decode_callback_(NULL),
+      source_buffer_(NULL),
+      first_key_frame_has_been_excluded_(false),
+      last_frame_missing_(false),
+      initialized_(false) {
+  assert(encoder);
+  assert(decoder);
+  assert(frame_reader);
+  assert(frame_writer);
+  assert(packet_manipulator);
+  assert(stats);
+}
+
+bool VideoProcessorImpl::Init() {
+  // Calculate a factor used for bit rate calculations:
+  bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8;  // bits
+
+  int frame_length_in_bytes = frame_reader_->FrameLength();
+
+  // Initialize data structures used by the encoder/decoder APIs
+  source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
+  last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
+
+  // Set fixed properties common for all frames:
+  source_frame_._width = config_.codec_settings->width;
+  source_frame_._height = config_.codec_settings->height;
+  source_frame_._length = frame_length_in_bytes;
+  source_frame_._size = frame_length_in_bytes;
+
+  // Setup required callbacks for the encoder/decoder:
+  encode_callback_ = new VideoProcessorEncodeCompleteCallback(this);
+  decode_callback_ = new VideoProcessorDecodeCompleteCallback(this);
+  WebRtc_Word32 register_result =
+      encoder_->RegisterEncodeCompleteCallback(encode_callback_);
+  if (register_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to register encode complete callback, return code: "
+        "%d\n", register_result);
+    return false;
+  }
+  register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
+  if (register_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to register decode complete callback, return code: "
+            "%d\n", register_result);
+    return false;
+  }
+  // Init the encoder and decoder
+  WebRtc_UWord32 nbr_of_cores = 1;
+  if (!config_.use_single_core) {
+    nbr_of_cores = CpuInfo::DetectNumberOfCores();
+  }
+  WebRtc_Word32 init_result =
+      encoder_->InitEncode(config_.codec_settings, nbr_of_cores,
+                           config_.networking_config.max_payload_size_in_bytes);
+  if (init_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to initialize VideoEncoder, return code: %d\n",
+            init_result);
+    return false;
+  }
+  init_result = decoder_->InitDecode(config_.codec_settings, nbr_of_cores);
+  if (init_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to initialize VideoDecoder, return code: %d\n",
+            init_result);
+    return false;
+  }
+
+  if (config_.verbose) {
+    printf("Video Processor:\n");
+    printf("  #CPU cores used  : %d\n", nbr_of_cores);
+    printf("  Total # of frames: %d\n", frame_reader_->NumberOfFrames());
+    printf("  Codec settings:\n");
+    printf("    Start bitrate  : %d kbps\n",
+           config_.codec_settings->startBitrate);
+    printf("    Width          : %d\n", config_.codec_settings->width);
+    printf("    Height         : %d\n", config_.codec_settings->height);
+  }
+  initialized_ = true;
+  return true;
+}
+
+VideoProcessorImpl::~VideoProcessorImpl() {
+  delete[] source_buffer_;
+  delete[] last_successful_frame_buffer_;
+  encoder_->RegisterEncodeCompleteCallback(NULL);
+  delete encode_callback_;
+  decoder_->RegisterDecodeCompleteCallback(NULL);
+  delete decode_callback_;
+}
+
+bool VideoProcessorImpl::ProcessFrame(int frame_number) {
+  assert(frame_number >=0);
+  if (!initialized_) {
+    fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n");
+    return false;
+  }
+  if (frame_reader_->ReadFrame(source_buffer_)) {
+    // point the source frame buffer to the newly read frame data:
+    source_frame_._buffer = source_buffer_;
+
+    // Ensure we have a new statistics data object we can fill:
+    FrameStatistic& stat = stats_->NewFrame(frame_number);
+
+    encode_start_ = TickTime::Now();
+    // Use the frame number as "timestamp" to identify frames
+    source_frame_._timeStamp = frame_number;
+
+    // Decide if we're going to force a keyframe:
+    VideoFrameType frame_type = kDeltaFrame;
+    if (config_.keyframe_interval > 0 &&
+        frame_number % config_.keyframe_interval == 0) {
+      frame_type = kKeyFrame;
+    }
+    WebRtc_Word32 encode_result = encoder_->Encode(source_frame_, NULL,
+                                                   &frame_type);
+    if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
+      fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
+              frame_number, encode_result);
+    }
+    stat.encode_return_code = encode_result;
+    return true;
+  } else {
+    return false;  // we've reached the last frame
+  }
+}
+
+void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
+  TickTime encode_stop = TickTime::Now();
+  int frame_number = encoded_image->_timeStamp;
+  FrameStatistic& stat = stats_->stats_[frame_number];
+  stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
+                                                      encode_stop);
+  stat.encoding_successful = true;
+  stat.encoded_frame_length_in_bytes = encoded_image->_length;
+  stat.frame_number = encoded_image->_timeStamp;
+  stat.frame_type = encoded_image->_frameType;
+  stat.bit_rate_in_kbps = encoded_image->_length * bit_rate_factor_;
+  stat.total_packets = encoded_image->_length /
+      config_.networking_config.packet_size_in_bytes + 1;
+
+  // Perform packet loss if criteria is fullfilled:
+  bool exclude_this_frame = false;
+  // Only keyframes can be excluded
+  if (encoded_image->_frameType == kKeyFrame) {
+    switch (config_.exclude_frame_types) {
+      case kExcludeOnlyFirstKeyFrame:
+        if (!first_key_frame_has_been_excluded_) {
+          first_key_frame_has_been_excluded_ = true;
+          exclude_this_frame = true;
+        }
+        break;
+      case kExcludeAllKeyFrames:
+        exclude_this_frame = true;
+        break;
+      default:
+        assert(false);
+    }
+  }
+  if (!exclude_this_frame) {
+    stat.packets_dropped =
+          packet_manipulator_->ManipulatePackets(encoded_image);
+  }
+
+  // Keep track of if frames are lost due to packet loss so we can tell
+  // this to the encoder (this is handled by the RTP logic in the full stack)
+  decode_start_ = TickTime::Now();
+  // TODO(kjellander): Pass fragmentation header to the decoder when
+  // CL 172001 has been submitted and PacketManipulator supports this.
+  WebRtc_Word32 decode_result = decoder_->Decode(*encoded_image,
+                                                 last_frame_missing_, NULL);
+  stat.decode_return_code = decode_result;
+  if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
+    // Write the last successful frame the output file to avoid getting it out
+    // of sync with the source file for SSIM and PSNR comparisons:
+    frame_writer_->WriteFrame(last_successful_frame_buffer_);
+  }
+  // save status for losses so we can inform the decoder for the next frame:
+  last_frame_missing_ = encoded_image->_length == 0;
+}
+
+void VideoProcessorImpl::FrameDecoded(const RawImage& image) {
+  TickTime decode_stop = TickTime::Now();
+  int frame_number = image._timeStamp;
+  // Report stats
+  FrameStatistic& stat = stats_->stats_[frame_number];
+  stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
+                                                      decode_stop);
+  stat.decoding_successful = true;
+  // Update our copy of the last successful frame:
+  memcpy(last_successful_frame_buffer_, image._buffer, image._length);
+
+  bool write_success = frame_writer_->WriteFrame(image._buffer);
+  if (!write_success) {
+    fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
+  }
+}
+
+int VideoProcessorImpl::GetElapsedTimeMicroseconds(
+    const webrtc::TickTime& start, const webrtc::TickTime& stop) {
+  WebRtc_UWord64 encode_time = (stop - start).Microseconds();
+  assert(encode_time <
+         static_cast<unsigned int>(std::numeric_limits<int>::max()));
+  return static_cast<int>(encode_time);
+}
+
+const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e) {
+  switch (e) {
+    case kExcludeOnlyFirstKeyFrame:
+      return "ExcludeOnlyFirstKeyFrame";
+    case kExcludeAllKeyFrames:
+      return "ExcludeAllKeyFrames";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
+  switch (e) {
+    case kVideoCodecVP8:
+      return "VP8";
+    case kVideoCodecI420:
+      return "I420";
+    case kVideoCodecRED:
+      return "RED";
+    case kVideoCodecULPFEC:
+      return "ULPFEC";
+    case kVideoCodecUnknown:
+      return "Unknown";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+// Callbacks
+WebRtc_Word32
+VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
+    EncodedImage& encoded_image,
+    const webrtc::CodecSpecificInfo* codec_specific_info,
+    const webrtc::RTPFragmentationHeader* fragmentation) {
+  video_processor_->FrameEncoded(&encoded_image);  // forward to parent class
+  return 0;
+}
+WebRtc_Word32
+VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
+    RawImage& image) {
+  video_processor_->FrameDecoded(image);  // forward to parent class
+  return 0;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test/videoprocessor.h b/trunk/src/modules/video_coding/codecs/test/videoprocessor.h
new file mode 100644
index 0000000..7c2c14f
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/videoprocessor.h
@@ -0,0 +1,229 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
+
+#include <string>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/stats.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+// Defines which frame types shall be excluded from packet loss and when.
+enum ExcludeFrameTypes {
+  // Will exclude the first keyframe in the video sequence from packet loss.
+  // Following keyframes will be targeted for packet loss.
+  kExcludeOnlyFirstKeyFrame,
+  // Exclude all keyframes from packet loss, no matter where in the video
+  // sequence they occur.
+  kExcludeAllKeyFrames
+};
+// Returns a string representation of the enum value.
+const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e);
+
+// Test configuration for a test run
+struct TestConfig {
+  TestConfig()
+    : name(""), description(""), test_number(0),
+      input_filename(""), output_filename(""), output_dir("out"),
+      networking_config(), exclude_frame_types(kExcludeOnlyFirstKeyFrame),
+      frame_length_in_bytes(-1), use_single_core(false), keyframe_interval(0),
+      codec_settings(NULL), verbose(true) {
+  };
+
+  // Name of the test. This is purely metadata and does not affect
+  // the test in any way.
+  std::string name;
+
+  // More detailed description of the test. This is purely metadata and does
+  // not affect the test in any way.
+  std::string description;
+
+  // Number of this test. Useful if multiple runs of the same test with
+  // different configurations shall be managed.
+  int test_number;
+
+  // File to process for the test. This must be a video file in the YUV format.
+  std::string input_filename;
+
+  // File to write to during processing for the test. Will be a video file
+  // in the YUV format.
+  std::string output_filename;
+
+  // Path to the directory where encoded files will be put
+  // (absolute or relative to the executable). Default: "out".
+  std::string output_dir;
+
+  // Configurations related to networking.
+  NetworkingConfig networking_config;
+
+  // Decides how the packet loss simulations shall exclude certain frames
+  // from packet loss. Default: kExcludeOnlyFirstKeyFrame.
+  ExcludeFrameTypes exclude_frame_types;
+
+  // The length of a single frame of the input video file. This value is
+  // calculated out of the width and height according to the video format
+  // specification. Must be set before processing.
+  int frame_length_in_bytes;
+
+  // Force the encoder and decoder to use a single core for processing.
+  // Using a single core is necessary to get a deterministic behavior for the
+  // encoded frames - using multiple cores will produce different encoded frames
+  // since multiple cores are competing to consume the byte budget for each
+  // frame in parallel.
+  // If set to false, the maximum number of available cores will be used.
+  // Default: false.
+  bool use_single_core;
+
+  // If set to a value >0 this setting forces the encoder to create a keyframe
+  // every Nth frame. Note that the encoder may create a keyframe in other
+  // locations in addition to the interval that is set using this parameter.
+  // Forcing key frames may also affect encoder planning optimizations in
+  // a negative way, since it will suddenly be forced to produce an expensive
+  // key frame.
+  // Default: 0.
+  int keyframe_interval;
+
+  // The codec settings to use for the test (target bitrate, video size,
+  // framerate and so on). This struct must be created and filled in using
+  // the VideoCodingModule::Codec() method.
+  webrtc::VideoCodec* codec_settings;
+
+  // If printing of information to stdout shall be performed during processing.
+  bool verbose;
+};
+
+// Returns a string representation of the enum value.
+const char* VideoCodecTypeToStr(webrtc::VideoCodecType e);
+
+// Handles encoding/decoding of video using the VideoEncoder/VideoDecoder
+// interfaces. This is done in a sequential manner in order to be able to
+// measure times properly.
+// The class processes a frame at the time for the configured input file.
+// It maintains state of where in the source input file the processing is at.
+//
+// Regarding packet loss: Note that keyframes are excluded (first or all
+// depending on the ExcludeFrameTypes setting). This is because if key frames
+// would be altered, all the following delta frames would be pretty much
+// worthless. VP8 has an error-resilience feature that makes it able to handle
+// packet loss in key non-first keyframes, which is why only the first is
+// excluded by default.
+// Packet loss in such important frames is handled on a higher level in the
+// Video Engine, where signaling would request a retransmit of the lost packets,
+// since they're so important.
+//
+// Note this class is not thread safe in any way and is meant for simple testing
+// purposes.
+class VideoProcessor {
+ public:
+  virtual ~VideoProcessor() {}
+
+  // Performs initial calculations about frame size, sets up callbacks etc.
+  // Returns false if an error has occurred, in addition to printing to stderr.
+  virtual bool Init() = 0;
+
+  // Processes a single frame. Returns true as long as there's more frames
+  // available in the source clip.
+  // Frame number must be an integer >=0.
+  virtual bool ProcessFrame(int frame_number) = 0;
+};
+
+class VideoProcessorImpl : public VideoProcessor {
+ public:
+  VideoProcessorImpl(webrtc::VideoEncoder* encoder,
+                     webrtc::VideoDecoder* decoder,
+                     FrameReader* frame_reader,
+                     FrameWriter* frame_writer,
+                     PacketManipulator* packet_manipulator,
+                     const TestConfig& config,
+                     Stats* stats);
+  virtual ~VideoProcessorImpl();
+  virtual bool Init();
+  virtual bool ProcessFrame(int frame_number);
+
+ private:
+  // Invoked by the callback when a frame has completed encoding.
+  void FrameEncoded(EncodedImage* encodedImage);
+  // Invoked by the callback when a frame has completed decoding.
+  void FrameDecoded(const RawImage& image);
+  // Used for getting a 32-bit integer representing time
+  // (checks the size is within signed 32-bit bounds before casting it)
+  int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
+                                 const webrtc::TickTime& stop);
+
+  webrtc::VideoEncoder* encoder_;
+  webrtc::VideoDecoder* decoder_;
+  FrameReader* frame_reader_;
+  FrameWriter* frame_writer_;
+  PacketManipulator* packet_manipulator_;
+  const TestConfig& config_;
+  Stats* stats_;
+
+  EncodedImageCallback* encode_callback_;
+  DecodedImageCallback* decode_callback_;
+  // Buffer used for reading the source video file:
+  WebRtc_UWord8* source_buffer_;
+  // Keep track of the last successful frame, since we need to write that
+  // when decoding fails:
+  WebRtc_UWord8* last_successful_frame_buffer_;
+  webrtc::RawImage source_frame_;
+  // To keep track of if we have excluded the first key frame from packet loss:
+  bool first_key_frame_has_been_excluded_;
+  // To tell the decoder previous frame have been dropped due to packet loss:
+  bool last_frame_missing_;
+  // If Init() has executed successfully.
+  bool initialized_;
+
+  // Statistics
+  double bit_rate_factor_;  // multiply frame length with this to get bit rate
+  webrtc::TickTime encode_start_;
+  webrtc::TickTime decode_start_;
+
+  // Callback class required to implement according to the VideoEncoder API.
+  class VideoProcessorEncodeCompleteCallback
+    : public webrtc::EncodedImageCallback {
+   public:
+      explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
+        : video_processor_(vp) {
+    }
+    WebRtc_Word32 Encoded(
+        webrtc::EncodedImage& encoded_image,
+        const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
+        const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+
+   private:
+    VideoProcessorImpl* video_processor_;
+  };
+
+  // Callback class required to implement according to the VideoDecoder API.
+  class VideoProcessorDecodeCompleteCallback
+    : public webrtc::DecodedImageCallback {
+   public:
+      explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
+      : video_processor_(vp) {
+    }
+    WebRtc_Word32 Decoded(webrtc::RawImage& image);
+
+   private:
+    VideoProcessorImpl* video_processor_;
+  };
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
diff --git a/trunk/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/trunk/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
new file mode 100644
index 0000000..19c40ad
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -0,0 +1,173 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/vp8/main/interface/vp8.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "testsupport/packet_reader.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+const int kNbrFrames = 61;  // foreman_cif_short.yuv
+const int kCIFWidth = 352;
+const int kCIFHeight = 288;
+const int kBitRateKbps = 500;
+
+// Integration test for video processor. Encodes+decodes a small clip and
+// writes it to the output directory. After completion, PSNR and SSIM
+// measurements are performed on the original and the processed clip to verify
+// the quality is acceptable.
+// The limits for the PSNR and SSIM values must be set quite low, since we have
+// no control over the random function used for packet loss in this test.
+class VideoProcessorIntegrationTest: public testing::Test {
+ protected:
+  VideoEncoder* encoder_;
+  VideoDecoder* decoder_;
+  webrtc::test::FrameReader* frame_reader_;
+  webrtc::test::FrameWriter* frame_writer_;
+  webrtc::test::PacketReader packet_reader_;
+  webrtc::test::PacketManipulator* packet_manipulator_;
+  webrtc::test::Stats stats_;
+  webrtc::test::TestConfig config_;
+  VideoCodec codec_settings_;
+  webrtc::test::VideoProcessor* processor_;
+
+  VideoProcessorIntegrationTest() {}
+  virtual ~VideoProcessorIntegrationTest() {}
+
+  void SetUp() {
+    encoder_ = VP8Encoder::Create();
+    decoder_ = VP8Decoder::Create();
+
+    // Setup the TestConfig struct for processing of a clip in CIF resolution.
+    config_.input_filename =
+        webrtc::test::ResourcePath("foreman_cif_short", "yuv");
+    config_.output_filename = webrtc::test::OutputPath() +
+        "foreman_cif_short_video_codecs_test_framework_integrationtests.yuv";
+    config_.frame_length_in_bytes = 3 * kCIFWidth * kCIFHeight / 2;
+    config_.verbose = false;
+    // Only allow encoder/decoder to use single core, for predictability.
+    config_.use_single_core = true;
+
+    // Get a codec configuration struct and configure it.
+    VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
+    config_.codec_settings = &codec_settings_;
+    config_.codec_settings->startBitrate = kBitRateKbps;
+    config_.codec_settings->width = kCIFWidth;
+    config_.codec_settings->height = kCIFHeight;
+
+    frame_reader_ =
+        new webrtc::test::FrameReaderImpl(config_.input_filename,
+                                          config_.frame_length_in_bytes);
+    frame_writer_ =
+        new webrtc::test::FrameWriterImpl(config_.output_filename,
+                                          config_.frame_length_in_bytes);
+    ASSERT_TRUE(frame_reader_->Init());
+    ASSERT_TRUE(frame_writer_->Init());
+
+    packet_manipulator_ = new webrtc::test::PacketManipulatorImpl(
+        &packet_reader_, config_.networking_config, config_.verbose);
+    processor_ = new webrtc::test::VideoProcessorImpl(encoder_, decoder_,
+                                                      frame_reader_,
+                                                      frame_writer_,
+                                                      packet_manipulator_,
+                                                      config_, &stats_);
+    ASSERT_TRUE(processor_->Init());
+  }
+
+  void TearDown() {
+    delete processor_;
+    delete packet_manipulator_;
+    delete frame_writer_;
+    delete frame_reader_;
+    delete decoder_;
+    delete encoder_;
+  }
+
+  // Processes all frames in the clip and verifies the result.
+  void ProcessFramesAndVerify(double minimum_avg_psnr,
+                              double minimum_min_psnr,
+                              double minimum_avg_ssim,
+                              double minimum_min_ssim) {
+    int frame_number = 0;
+    while (processor_->ProcessFrame(frame_number)) {
+      frame_number++;
+    }
+    EXPECT_EQ(kNbrFrames, frame_number);
+    EXPECT_EQ(kNbrFrames, static_cast<int>(stats_.stats_.size()));
+
+    // Release encoder and decoder to make sure they have finished processing:
+    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
+    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
+    // Close the files before we start using them for SSIM/PSNR calculations.
+    frame_reader_->Close();
+    frame_writer_->Close();
+
+    webrtc::test::QualityMetricsResult psnr_result, ssim_result;
+    EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
+        config_.input_filename.c_str(),
+        config_.output_filename.c_str(),
+        config_.codec_settings->width,
+        config_.codec_settings->height,
+        &psnr_result,
+        &ssim_result));
+    printf("PSNR avg: %f, min: %f    SSIM avg: %f, min: %f\n",
+           psnr_result.average, psnr_result.min,
+           ssim_result.average, ssim_result.min);
+    EXPECT_GT(psnr_result.average, minimum_avg_psnr);
+    EXPECT_GT(psnr_result.min, minimum_min_psnr);
+    EXPECT_GT(ssim_result.average, minimum_avg_ssim);
+    EXPECT_GT(ssim_result.min, minimum_min_ssim);
+  }
+};
+
+// Run with no packet loss. Quality should be very high.
+TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
+  config_.networking_config.packet_loss_probability = 0;
+  double minimum_avg_psnr = 36;
+  double minimum_min_psnr = 34;
+  double minimum_avg_ssim = 0.9;
+  double minimum_min_ssim = 0.9;
+  ProcessFramesAndVerify(minimum_avg_psnr, minimum_min_psnr,
+                         minimum_avg_ssim, minimum_min_ssim);
+}
+
+// Run with 5% packet loss. Quality should be a bit lower.
+// TODO(mflodman): Reenable this once it's not flaky.
+TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
+  config_.networking_config.packet_loss_probability = 0.05;
+  double minimum_avg_psnr = 21;
+  double minimum_min_psnr = 17;
+  double minimum_avg_ssim = 0.6;
+  double minimum_min_ssim = 0.4;
+  ProcessFramesAndVerify(minimum_avg_psnr, minimum_min_psnr,
+                         minimum_avg_ssim, minimum_min_ssim);
+}
+
+// Run with 10% packet loss. Quality should be even lower.
+TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
+  config_.networking_config.packet_loss_probability = 0.10;
+  double minimum_avg_psnr = 19;
+  double minimum_min_psnr = 16;
+  double minimum_avg_ssim = 0.6;
+  double minimum_min_ssim = 0.4;
+  ProcessFramesAndVerify(minimum_avg_psnr, minimum_min_psnr,
+                         minimum_avg_ssim, minimum_min_ssim);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/trunk/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc
new file mode 100644
index 0000000..d51ef6b
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "gmock/gmock.h"
+#include "modules/video_coding/codecs/test/mock/mock_packet_manipulator.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "testsupport/mock/mock_frame_reader.h"
+#include "testsupport/mock/mock_frame_writer.h"
+#include "testsupport/packet_reader.h"
+#include "testsupport/unittest_utils.h"
+#include "typedefs.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Return;
+
+namespace webrtc {
+namespace test {
+
+// Very basic testing for VideoProcessor. It's mostly tested by running the
+// video_quality_measurement program.
+class VideoProcessorTest: public testing::Test {
+ protected:
+  MockVideoEncoder encoder_mock_;
+  MockVideoDecoder decoder_mock_;
+  MockFrameReader frame_reader_mock_;
+  MockFrameWriter frame_writer_mock_;
+  MockPacketManipulator packet_manipulator_mock_;
+  Stats stats_;
+  TestConfig config_;
+  VideoCodec codec_settings_;
+
+  VideoProcessorTest() {}
+  virtual ~VideoProcessorTest() {}
+  void SetUp() {
+    // Get a codec configuration struct and configure it.
+    VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
+    config_.codec_settings = &codec_settings_;
+    config_.codec_settings->startBitrate = 100;
+    config_.codec_settings->width = 352;
+    config_.codec_settings->height = 288;
+  }
+  void TearDown() {}
+
+  void ExpectInit() {
+    EXPECT_CALL(encoder_mock_, InitEncode(_, _, _))
+      .Times(1);
+    EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
+      .Times(AtLeast(1));
+    EXPECT_CALL(decoder_mock_, InitDecode(_, _))
+      .Times(1);
+    EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
+      .Times(AtLeast(1));
+    EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
+      .WillOnce(Return(1));
+    EXPECT_CALL(frame_reader_mock_, FrameLength())
+      .WillOnce(Return(150000));
+  }
+};
+
+TEST_F(VideoProcessorTest, Init) {
+  ExpectInit();
+  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
+                                     &frame_reader_mock_,
+                                     &frame_writer_mock_,
+                                     &packet_manipulator_mock_, config_,
+                                     &stats_);
+  ASSERT_TRUE(video_processor.Init());
+}
+
+TEST_F(VideoProcessorTest, ProcessFrame) {
+  ExpectInit();
+  EXPECT_CALL(encoder_mock_, Encode(_, _, _))
+    .Times(1);
+  EXPECT_CALL(frame_reader_mock_, ReadFrame(_))
+    .WillOnce(Return(true));
+  // Since we don't return any callback from the mock, the decoder will not
+  // be more than initialized...
+  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
+                                     &frame_reader_mock_,
+                                     &frame_writer_mock_,
+                                     &packet_manipulator_mock_, config_,
+                                     &stats_);
+  ASSERT_TRUE(video_processor.Init());
+  video_processor.ProcessFrame(0);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/benchmark.cc b/trunk/src/modules/video_coding/codecs/test_framework/benchmark.cc
new file mode 100644
index 0000000..883330c
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/benchmark.cc
@@ -0,0 +1,310 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark.h"
+
+#include <cassert>
+#include <iostream>
+#include <sstream>
+#include <vector>
+#if defined(_WIN32)
+    #include <windows.h>
+#endif
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "modules/video_coding/codecs/test_framework/video_source.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+
+#define SSIM_CALC 0 // by default, don't compute SSIM
+
+using namespace webrtc;
+
+Benchmark::Benchmark()
+:
+NormalAsyncTest("Benchmark", "Codec benchmark over a range of test cases", 6),
+_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
+_codecName("Default")
+{
+}
+
+Benchmark::Benchmark(std::string name, std::string description)
+:
+NormalAsyncTest(name, description, 6),
+_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
+_codecName("Default")
+{
+}
+
+Benchmark::Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName)
+:
+NormalAsyncTest(name, description, 6),
+_resultsFileName(resultsFileName),
+_codecName(codecName)
+{
+}
+
+void
+Benchmark::Perform()
+{
+    std::vector<const VideoSource*> sources;
+    std::vector<const VideoSource*>::iterator it;
+
+    // Configuration --------------------------
+    sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
+                                            "resources/foreman_cif.yuv", kCIF));
+//    sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
+//                                            "resources/akiyo_cif.yuv", kCIF));
+
+    const VideoSize size[] = {kQCIF, kCIF};
+    const int frameRate[] = {10, 15, 30};
+    // Specifies the framerates for which to perform a speed test.
+    const bool speedTestMask[] = {false, false, false};
+    const int bitRate[] = {50, 100, 200, 300, 400, 500, 600, 1000};
+    // Determines the number of iterations to perform to arrive at the speed result.
+    enum { kSpeedTestIterations = 10 };
+    // ----------------------------------------
+
+    const int nFrameRates = sizeof(frameRate)/sizeof(*frameRate);
+    assert(sizeof(speedTestMask)/sizeof(*speedTestMask) == nFrameRates);
+    const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+    int testIterations = 10;
+
+    webrtc::test::QualityMetricsResult psnr[nBitrates];
+    webrtc::test::QualityMetricsResult ssim[nBitrates];
+    double fps[nBitrates];
+    double totalEncodeTime[nBitrates];
+    double totalDecodeTime[nBitrates];
+
+    _results.open(_resultsFileName.c_str(), std::fstream::out);
+    _results << GetMagicStr() << std::endl;
+    _results << _codecName << std::endl;
+
+    for (it = sources.begin() ; it < sources.end(); it++)
+    {
+        for (int i = 0; i < static_cast<int>(sizeof(size)/sizeof(*size)); i++)
+        {
+            for (int j = 0; j < nFrameRates; j++)
+            {
+                std::stringstream ss;
+                std::string strFrameRate;
+                std::string outFileName;
+                ss << frameRate[j];
+                ss >> strFrameRate;
+                outFileName = (*it)->GetFilePath() + "/" + (*it)->GetName() + "_" +
+                    VideoSource::GetSizeString(size[i]) + "_" + strFrameRate + ".yuv";
+
+                _target = new const VideoSource(outFileName, size[i], frameRate[j]);
+                (*it)->Convert(*_target);
+                if (VideoSource::FileExists(outFileName.c_str()))
+                {
+                    _inname = outFileName;
+                }
+                else
+                {
+                    _inname = (*it)->GetFileName();
+                }
+
+                std::cout << (*it)->GetName() << ", " << VideoSource::GetSizeString(size[i])
+                    << ", " << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]:";
+                _results << (*it)->GetName() << "," << VideoSource::GetSizeString(size[i])
+                    << "," << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]";
+
+                if (speedTestMask[j])
+                {
+                    testIterations = kSpeedTestIterations;
+                }
+                else
+                {
+                    testIterations = 1;
+                }
+
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    _bitRate = (bitRate[k]);
+                    double avgFps = 0.0;
+                    totalEncodeTime[k] = 0;
+                    totalDecodeTime[k] = 0;
+
+                    for (int l = 0; l < testIterations; l++)
+                    {
+                        PerformNormalTest();
+                        _appendNext = false;
+
+                        avgFps += _framecnt / (_totalEncodeTime + _totalDecodeTime);
+                        totalEncodeTime[k] += _totalEncodeTime;
+                        totalDecodeTime[k] += _totalDecodeTime;
+
+                    }
+                    avgFps /= testIterations;
+                    totalEncodeTime[k] /= testIterations;
+                    totalDecodeTime[k] /= testIterations;
+
+                    double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
+                    std::cout << " " << actualBitRate;
+                    _results << "," << actualBitRate;
+                    webrtc::test::QualityMetricsResult psnr_result;
+                    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(),
+                                      _inst.width, _inst.height, &psnr[k]);
+                    if (SSIM_CALC)
+                    {
+                        webrtc::test::QualityMetricsResult ssim_result;
+                        I420SSIMFromFiles(_inname.c_str(), _outname.c_str(),
+                                          _inst.width, _inst.height, &ssim[k]);
+
+                    }
+                    fps[k] = avgFps;
+                }
+                std::cout << std::endl << "Y-PSNR [dB]:";
+                _results << std::endl << "Y-PSNR [dB]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << psnr[k].average;
+                    _results << "," << psnr[k].average;
+
+                }
+                if (SSIM_CALC)
+                {
+                    std::cout << std::endl << "SSIM: ";
+                    _results << std::endl << "SSIM ";
+                    for (int k = 0; k < nBitrates; k++)
+                    {
+                        std::cout << " " << ssim[k].average;
+                        _results << "," << ssim[k].average;
+                    }
+
+                }
+
+                std::cout << std::endl << "Encode Time[ms]:";
+                _results << std::endl << "Encode Time[ms]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << totalEncodeTime[k];
+                    _results << "," << totalEncodeTime[k];
+
+                }
+
+                std::cout << std::endl << "Decode Time[ms]:";
+                _results << std::endl << "Decode Time[ms]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << totalDecodeTime[k];
+                    _results << "," << totalDecodeTime[k];
+
+                }
+
+                if (speedTestMask[j])
+                {
+                    std::cout << std::endl << "Speed [fps]:";
+                    _results << std::endl << "Speed [fps]";
+                    for (int k = 0; k < nBitrates; k++)
+                    {
+                        std::cout << " " << static_cast<int>(fps[k] + 0.5);
+                        _results << "," << static_cast<int>(fps[k] + 0.5);
+                    }
+                }
+                std::cout << std::endl << std::endl;
+                _results << std::endl << std::endl;
+
+                delete _target;
+            }
+        }
+        delete *it;
+    }
+    _results.close();
+}
+
+void
+Benchmark::PerformNormalTest()
+{
+    _encoder = GetNewEncoder();
+    _decoder = GetNewDecoder();
+    CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
+    Setup();
+    EventWrapper* waitEvent = EventWrapper::Create();
+
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _encoder->InitEncode(&_inst, 4, 1440);
+    CodecSpecific_InitBitrate();
+    _decoder->InitDecode(&_inst,1);
+
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+
+    SetCodecSpecificParameters();
+
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    while (!complete)
+    {
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode = static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                DoPacketLoss();
+                int ret = Decode();
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr, "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+        waitEvent->Wait(5);
+    }
+
+    _inputVideoBuffer.Free();
+    //_encodedVideoBuffer.Reset(); ?
+    _encodedVideoBuffer.Free();
+    _decodedVideoBuffer.Free();
+
+    _encoder->Release();
+    _decoder->Release();
+    delete waitEvent;
+    delete _encoder;
+    delete _decoder;
+    Teardown();
+}
+
+void
+Benchmark::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }
+    else
+    {
+        _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/benchmark.h b/trunk/src/modules/video_coding/codecs/test_framework/benchmark.h
new file mode 100644
index 0000000..57806e5
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/benchmark.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+
+#include "normal_async_test.h"
+
+class VideoSource;
+
+class Benchmark : public NormalAsyncTest
+{
+public:
+    Benchmark();
+    virtual void Perform();
+
+protected:
+    Benchmark(std::string name, std::string description);
+    Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName);
+    virtual webrtc::VideoEncoder* GetNewEncoder() = 0;
+    virtual webrtc::VideoDecoder* GetNewDecoder() = 0;
+    virtual void PerformNormalTest();
+    virtual void CodecSpecific_InitBitrate();
+    static const char* GetMagicStr() { return "#!benchmark1.0"; }
+
+    const VideoSource* _target;
+    std::string        _resultsFileName;
+    std::ofstream      _results;
+    std::string        _codecName;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/exportfig.m b/trunk/src/modules/video_coding/codecs/test_framework/exportfig.m
new file mode 100644
index 0000000..d0d5ed9
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/exportfig.m
@@ -0,0 +1,500 @@
+function exportfig(varargin)
+%EXPORTFIG  Export a figure to Encapsulated Postscript.
+%   EXPORTFIG(H, FILENAME) writes the figure H to FILENAME.  H is
+%   a figure handle and FILENAME is a string that specifies the
+%   name of the output file.
+%
+%   EXPORTFIG(...,PARAM1,VAL1,PARAM2,VAL2,...) specifies
+%   parameters that control various characteristics of the output
+%   file.
+%
+%   Format Paramter:
+%     'Format'  one of the strings 'eps','eps2','jpeg','png','preview'
+%          specifies the output format. Defaults to 'eps'.
+%          The output format 'preview' does not generate an output
+%          file but instead creates a new figure window with a
+%          preview of the exported figure. In this case the
+%          FILENAME parameter is ignored.
+%
+%     'Preview' one of the strings 'none', 'tiff'
+%          specifies a preview for EPS files. Defaults to 'none'.
+%
+%   Size Parameters:
+%     'Width'   a positive scalar
+%          specifies the width in the figure's PaperUnits
+%     'Height'  a positive scalar
+%          specifies the height in the figure's PaperUnits
+%
+%     Specifying only one dimension sets the other dimension
+%     so that the exported aspect ratio is the same as the
+%     figure's current aspect ratio. 
+%     If neither dimension is specified the size defaults to 
+%     the width and height from the figure's PaperPosition. 
+%           
+%   Rendering Parameters:
+%     'Color'     one of the strings 'bw', 'gray', 'cmyk'
+%         'bw'    specifies that lines and text are exported in
+%                 black and all other objects in grayscale
+%         'gray'  specifies that all objects are exported in grayscale
+%         'cmyk'  specifies that all objects are exported in color
+%                 using the CMYK color space
+%     'Renderer'  one of the strings 'painters', 'zbuffer', 'opengl'
+%         specifies the renderer to use
+%     'Resolution'   a positive scalar
+%         specifies the resolution in dots-per-inch.
+%     
+%     The default color setting is 'bw'.
+%
+%   Font Parameters:
+%     'FontMode'     one of the strings 'scaled', 'fixed'
+%     'FontSize'     a positive scalar
+%          in 'scaled' mode multiplies with the font size of each
+%          text object to obtain the exported font size
+%          in 'fixed' mode specifies the font size of all text
+%          objects in points
+%     'FontEncoding' one of the strings 'latin1', 'adobe'
+%          specifies the character encoding of the font
+%
+%     If FontMode is 'scaled' but FontSize is not specified then a
+%     scaling factor is computed from the ratio of the size of the
+%     exported figure to the size of the actual figure. The minimum
+%     font size allowed after scaling is 5 points.
+%     If FontMode is 'fixed' but FontSize is not specified then the
+%     exported font sizes of all text objects is 7 points.
+%
+%     The default 'FontMode' setting is 'scaled'.
+%
+%   Line Width Parameters:
+%     'LineMode'     one of the strings 'scaled', 'fixed'
+%     'LineWidth'    a positive scalar
+%          the semantics of LineMode and LineWidth are exactly the
+%          same as FontMode and FontSize, except that they apply
+%          to line widths instead of font sizes. The minumum line
+%          width allowed after scaling is 0.5 points.
+%          If LineMode is 'fixed' but LineWidth is not specified 
+%          then the exported line width of all line objects is 1
+%          point. 
+%
+%   Examples:
+%     exportfig(gcf,'fig1.eps','height',3);
+%       Exports the current figure to the file named 'fig1.eps' with
+%       a height of 3 inches (assuming the figure's PaperUnits is 
+%       inches) and an aspect ratio the same as the figure's aspect
+%       ratio on screen.
+%
+%     exportfig(gcf, 'fig2.eps', 'FontMode', 'fixed',...
+%                'FontSize', 10, 'color', 'cmyk' );
+%       Exports the current figure to 'fig2.eps' in color with all
+%       text in 10 point fonts. The size of the exported figure is
+%       the figure's PaperPostion width and height.
+
+
+if (nargin < 2)
+  error('Too few input arguments');
+end
+
+% exportfig(H, filename, ...)
+H = varargin{1};
+if ~ishandle(H) | ~strcmp(get(H,'type'), 'figure')
+  error('First argument must be a handle to a figure.');
+end
+filename = varargin{2};
+if ~ischar(filename)
+  error('Second argument must be a string.');
+end
+paramPairs = varargin(3:end);
+
+% Do some validity checking on param-value pairs
+if (rem(length(paramPairs),2) ~= 0)
+  error(['Invalid input syntax. Optional parameters and values' ...
+	 ' must be in pairs.']);
+end
+
+format = 'eps';
+preview = 'none';
+width = -1;
+height = -1;
+color = 'bw';
+fontsize = -1;
+fontmode='scaled';
+linewidth = -1;
+linemode=[];
+fontencoding = 'latin1';
+renderer = [];
+resolution = [];
+
+% Process param-value pairs
+args = {};
+for k = 1:2:length(paramPairs)
+  param = lower(paramPairs{k});
+  if (~ischar(param))
+    error('Optional parameter names must be strings');
+  end
+  value = paramPairs{k+1};
+  
+  switch (param)
+   case 'format'
+    format = value;
+    if (~strcmp(format,{'eps','eps2','jpeg','png','preview'}))
+      error(['Format must be ''eps'', ''eps2'', ''jpeg'', ''png'' or' ...
+	     ' ''preview''.']);
+    end
+   case 'preview'
+    preview = value;
+    if (~strcmp(preview,{'none','tiff'}))
+      error('Preview must be ''none'' or ''tiff''.');
+    end
+   case 'width'
+    width = LocalToNum(value);
+    if(~LocalIsPositiveScalar(width))
+      error('Width must be a numeric scalar > 0');
+    end
+   case 'height'
+    height = LocalToNum(value);
+    if(~LocalIsPositiveScalar(height))
+      error('Height must be a numeric scalar > 0');
+    end
+   case 'color'
+    color = lower(value);
+    if (~strcmp(color,{'bw','gray','cmyk'}))
+      error('Color must be ''bw'', ''gray'' or ''cmyk''.');
+    end
+   case 'fontmode'
+    fontmode = lower(value);
+    if (~strcmp(fontmode,{'scaled','fixed'}))
+      error('FontMode must be ''scaled'' or ''fixed''.');
+    end
+   case 'fontsize'
+    fontsize = LocalToNum(value);
+    if(~LocalIsPositiveScalar(fontsize))
+      error('FontSize must be a numeric scalar > 0');
+    end
+   case 'fontencoding'
+    fontencoding = lower(value);
+    if (~strcmp(fontencoding,{'latin1','adobe'}))
+      error('FontEncoding must be ''latin1'' or ''adobe''.');
+    end
+   case 'linemode'
+    linemode = lower(value);
+    if (~strcmp(linemode,{'scaled','fixed'}))
+      error('LineMode must be ''scaled'' or ''fixed''.');
+    end
+   case 'linewidth'
+    linewidth = LocalToNum(value);
+    if(~LocalIsPositiveScalar(linewidth))
+      error('LineWidth must be a numeric scalar > 0');
+    end
+   case 'renderer'
+    renderer = lower(value);
+    if (~strcmp(renderer,{'painters','zbuffer','opengl'}))
+      error('Renderer must be ''painters'', ''zbuffer'' or ''opengl''.');
+    end
+   case 'resolution'
+    resolution = LocalToNum(value);
+    if ~(isnumeric(value) & (prod(size(value)) == 1) & (value >= 0));
+      error('Resolution must be a numeric scalar >= 0');
+    end
+   otherwise
+    error(['Unrecognized option ' param '.']);
+  end
+end
+
+allLines  = findall(H, 'type', 'line');
+allText   = findall(H, 'type', 'text');
+allAxes   = findall(H, 'type', 'axes');
+allImages = findall(H, 'type', 'image');
+allLights = findall(H, 'type', 'light');
+allPatch  = findall(H, 'type', 'patch');
+allSurf   = findall(H, 'type', 'surface');
+allRect   = findall(H, 'type', 'rectangle');
+allFont   = [allText; allAxes];
+allColor  = [allLines; allText; allAxes; allLights];
+allMarker = [allLines; allPatch; allSurf];
+allEdge   = [allPatch; allSurf];
+allCData  = [allImages; allPatch; allSurf];
+
+old.objs = {};
+old.prop = {};
+old.values = {};
+
+% Process format and preview parameter
+showPreview = strcmp(format,'preview');
+if showPreview
+  format = 'png';
+  filename = [tempName '.png'];
+end
+if strncmp(format,'eps',3) & ~strcmp(preview,'none')
+  args = {args{:}, ['-' preview]};
+end
+
+hadError = 0;
+try
+  % Process size parameters
+  paperPos = get(H, 'PaperPosition');
+  old = LocalPushOldData(old, H, 'PaperPosition', paperPos);
+  figureUnits = get(H, 'Units');
+  set(H, 'Units', get(H,'PaperUnits'));
+  figurePos = get(H, 'Position');
+  aspectRatio = figurePos(3)/figurePos(4);
+  set(H, 'Units', figureUnits);
+  if (width == -1) & (height == -1)
+    width = paperPos(3);
+    height = paperPos(4);
+  elseif (width == -1)
+    width = height * aspectRatio;
+  elseif (height == -1)
+    height = width / aspectRatio;
+  end
+  set(H, 'PaperPosition', [0 0 width height]);
+  paperPosMode = get(H, 'PaperPositionMode');
+  old = LocalPushOldData(old, H, 'PaperPositionMode', paperPosMode);
+  set(H, 'PaperPositionMode', 'manual');
+
+  % Process rendering parameters
+  switch (color)
+   case {'bw', 'gray'}
+    if ~strcmp(color,'bw') & strncmp(format,'eps',3)
+      format = [format 'c'];
+    end
+    args = {args{:}, ['-d' format]};
+
+    %compute and set gray colormap
+    oldcmap = get(H,'Colormap');
+    newgrays = 0.30*oldcmap(:,1) + 0.59*oldcmap(:,2) + 0.11*oldcmap(:,3);
+    newcmap = [newgrays newgrays newgrays];
+    old = LocalPushOldData(old, H, 'Colormap', oldcmap);
+    set(H, 'Colormap', newcmap);
+
+    %compute and set ColorSpec and CData properties
+    old = LocalUpdateColors(allColor, 'color', old);
+    old = LocalUpdateColors(allAxes, 'xcolor', old);
+    old = LocalUpdateColors(allAxes, 'ycolor', old);
+    old = LocalUpdateColors(allAxes, 'zcolor', old);
+    old = LocalUpdateColors(allMarker, 'MarkerEdgeColor', old);
+    old = LocalUpdateColors(allMarker, 'MarkerFaceColor', old);
+    old = LocalUpdateColors(allEdge, 'EdgeColor', old);
+    old = LocalUpdateColors(allEdge, 'FaceColor', old);
+    old = LocalUpdateColors(allCData, 'CData', old);
+    
+   case 'cmyk'
+    if strncmp(format,'eps',3)
+      format = [format 'c'];
+      args = {args{:}, ['-d' format], '-cmyk'};
+    else
+      args = {args{:}, ['-d' format]};
+    end
+   otherwise
+    error('Invalid Color parameter');
+  end
+  if (~isempty(renderer))
+    args = {args{:}, ['-' renderer]};
+  end
+  if (~isempty(resolution)) | ~strncmp(format,'eps',3)
+    if isempty(resolution)
+      resolution = 0;
+    end
+    args = {args{:}, ['-r' int2str(resolution)]};
+  end
+
+  % Process font parameters
+  if (~isempty(fontmode))
+    oldfonts = LocalGetAsCell(allFont,'FontSize');
+    switch (fontmode)
+     case 'fixed'
+      oldfontunits = LocalGetAsCell(allFont,'FontUnits');
+      old = LocalPushOldData(old, allFont, {'FontUnits'}, oldfontunits);
+      set(allFont,'FontUnits','points');
+      if (fontsize == -1)
+	set(allFont,'FontSize',7);
+      else
+	set(allFont,'FontSize',fontsize);
+      end
+     case 'scaled'
+      if (fontsize == -1)
+	wscale = width/figurePos(3);
+	hscale = height/figurePos(4);
+	scale = min(wscale, hscale);
+      else
+	scale = fontsize;
+      end
+      newfonts = LocalScale(oldfonts,scale,5);
+      set(allFont,{'FontSize'},newfonts);
+     otherwise
+      error('Invalid FontMode parameter');
+    end
+    % make sure we push the size after the units
+    old = LocalPushOldData(old, allFont, {'FontSize'}, oldfonts);
+  end
+  if strcmp(fontencoding,'adobe') & strncmp(format,'eps',3)
+    args = {args{:}, '-adobecset'};
+  end
+
+  % Process linewidth parameters
+  if (~isempty(linemode))
+    oldlines = LocalGetAsCell(allMarker,'LineWidth');
+    old = LocalPushOldData(old, allMarker, {'LineWidth'}, oldlines);
+    switch (linemode)
+     case 'fixed'
+      if (linewidth == -1)
+	set(allMarker,'LineWidth',1);
+      else
+	set(allMarker,'LineWidth',linewidth);
+      end
+     case 'scaled'
+      if (linewidth == -1)
+	wscale = width/figurePos(3);
+	hscale = height/figurePos(4);
+	scale = min(wscale, hscale);
+      else
+	scale = linewidth;
+      end
+      newlines = LocalScale(oldlines, scale, 0.5);
+      set(allMarker,{'LineWidth'},newlines);
+     otherwise
+      error('Invalid LineMode parameter');
+    end
+  end
+
+  % Export
+  print(H, filename, args{:});
+
+catch
+  hadError = 1;
+end
+
+% Restore figure settings
+for n=1:length(old.objs)
+  set(old.objs{n}, old.prop{n}, old.values{n});
+end
+
+if hadError
+  error(deblank(lasterr));
+end
+
+% Show preview if requested
+if showPreview
+  X = imread(filename,'png');
+  delete(filename);
+  f = figure( 'Name', 'Preview', ...
+	      'Menubar', 'none', ...
+	      'NumberTitle', 'off', ...
+	      'Visible', 'off');
+  image(X);
+  axis image;
+  ax = findobj(f, 'type', 'axes');
+  set(ax, 'Units', get(H,'PaperUnits'), ...
+	  'Position', [0 0 width height], ...
+	  'Visible', 'off');
+  set(ax, 'Units', 'pixels');
+  axesPos = get(ax,'Position');
+  figPos = get(f,'Position');
+  rootSize = get(0,'ScreenSize');
+  figPos(3:4) = axesPos(3:4);
+  if figPos(1) + figPos(3) > rootSize(3)
+    figPos(1) = rootSize(3) - figPos(3) - 50;
+  end
+  if figPos(2) + figPos(4) > rootSize(4)
+    figPos(2) = rootSize(4) - figPos(4) - 50;
+  end
+  set(f, 'Position',figPos, ...
+	 'Visible', 'on');
+end
+
+%
+%  Local Functions
+%
+
+function outData = LocalPushOldData(inData, objs, prop, values)
+outData.objs = {inData.objs{:}, objs};
+outData.prop = {inData.prop{:}, prop};
+outData.values = {inData.values{:}, values};
+
+function cellArray = LocalGetAsCell(fig,prop);
+cellArray = get(fig,prop);
+if (~isempty(cellArray)) & (~iscell(cellArray))
+  cellArray = {cellArray};
+end
+
+function newArray = LocalScale(inArray, scale, minValue)
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  newArray{k} = max(minValue,scale*inArray{k}(1));
+end
+
+function newArray = LocalMapToGray(inArray);
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  color = inArray{k};
+  if (~isempty(color))
+    if ischar(color)
+      switch color(1)
+       case 'y'
+	color = [1 1 0];
+       case 'm'
+	color = [1 0 1];
+       case 'c'
+	color = [0 1 1];
+       case 'r'
+	color = [1 0 0];
+       case 'g'
+	color = [0 1 0];
+       case 'b'
+	color = [0 0 1];
+       case 'w'
+	color = [1 1 1];
+       case 'k'
+	color = [0 0 0];
+       otherwise
+	newArray{k} = color;
+      end
+    end
+    if ~ischar(color)
+      color = 0.30*color(1) + 0.59*color(2) + 0.11*color(3);
+    end
+  end
+  if isempty(color) | ischar(color)
+    newArray{k} = color;
+  else
+    newArray{k} = [color color color];
+  end
+end
+
+function newArray = LocalMapCData(inArray);
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  color = inArray{k};
+  if (ndims(color) == 3) & isa(color,'double')
+    gray = 0.30*color(:,:,1) + 0.59*color(:,:,2) + 0.11*color(:,:,3);
+    color(:,:,1) = gray;
+    color(:,:,2) = gray;
+    color(:,:,3) = gray;
+  end
+  newArray{k} = color;
+end
+
+function outData = LocalUpdateColors(inArray, prop, inData)
+value = LocalGetAsCell(inArray,prop);
+outData.objs = {inData.objs{:}, inArray};
+outData.prop = {inData.prop{:}, {prop}};
+outData.values = {inData.values{:}, value};
+if (~isempty(value))
+  if strcmp(prop,'CData') 
+    value = LocalMapCData(value);
+  else
+    value = LocalMapToGray(value);
+  end
+  set(inArray,{prop},value);
+end
+
+function bool = LocalIsPositiveScalar(value)
+bool = isnumeric(value) & ...
+       prod(size(value)) == 1 & ...
+       value > 0;
+
+function value = LocalToNum(value)
+if ischar(value)
+  value = str2num(value);
+end
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.cc b/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.cc
new file mode 100644
index 0000000..e379758
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -0,0 +1,593 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_async_test.h"
+
+#include <assert.h>
+#include <string.h>
+#include <sstream>
+#include <queue>
+
+#include "gtest/gtest.h"
+#include "tick_util.h"
+#include "testsupport/fileutils.h"
+#include "typedefs.h"
+
+using namespace webrtc;
+
+NormalAsyncTest::NormalAsyncTest()
+:
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
+           _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(1),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(WebRtc_UWord32 bitRate)
+:
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
+           bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(1),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 unsigned int testNo)
+:
+NormalTest(name, description, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 WebRtc_UWord32 bitRate, unsigned int testNo)
+:
+NormalTest(name, description, bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 WebRtc_UWord32 bitRate, unsigned int testNo,
+                                 unsigned int rttFrames)
+:
+NormalTest(name, description, bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(rttFrames),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+void
+NormalAsyncTest::Setup()
+{
+    Test::Setup();
+    std::stringstream ss;
+    std::string strTestNo;
+    ss << _testNo;
+    ss >> strTestNo;
+
+    // Check if settings exist. Otherwise use defaults.
+    if (_outname == "")
+    {
+        _outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
+            ".yuv";
+    }
+
+    if (_encodedName == "")
+    {
+        _encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
+            strTestNo + ".yuv";
+    }
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    char mode[3] = "wb";
+    if (_appendNext)
+    {
+        strncpy(mode, "ab", 3);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _appendNext = true;
+}
+
+void
+NormalAsyncTest::Teardown()
+{
+    Test::Teardown();
+    fclose(_sourceFile);
+    fclose(_encodedFile);
+    fclose(_decodedFile);
+}
+
+FrameQueueTuple::~FrameQueueTuple()
+{
+    if (_codecSpecificInfo != NULL)
+    {
+        delete _codecSpecificInfo;
+    }
+    if (_frame != NULL)
+    {
+        delete _frame;
+    }
+}
+
+void FrameQueue::PushFrame(TestVideoEncodedBuffer *frame,
+                           webrtc::CodecSpecificInfo* codecSpecificInfo)
+{
+    WriteLockScoped cs(_queueRWLock);
+    _frameBufferQueue.push(new FrameQueueTuple(frame, codecSpecificInfo));
+}
+
+FrameQueueTuple* FrameQueue::PopFrame()
+{
+    WriteLockScoped cs(_queueRWLock);
+    if (_frameBufferQueue.empty())
+    {
+        return NULL;
+    }
+    FrameQueueTuple* tuple = _frameBufferQueue.front();
+    _frameBufferQueue.pop();
+    return tuple;
+}
+
+bool FrameQueue::Empty()
+{
+    ReadLockScoped cs(_queueRWLock);
+    return _frameBufferQueue.empty();
+}
+
+WebRtc_UWord32 VideoEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+WebRtc_Word32
+VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+                                     const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                                     const webrtc::RTPFragmentationHeader*
+                                     fragmentation)
+{
+    _test.Encoded(encodedImage);
+    TestVideoEncodedBuffer *newBuffer = new TestVideoEncodedBuffer();
+    //newBuffer->VerifyAndAllocate(encodedImage._length);
+    newBuffer->VerifyAndAllocate(encodedImage._size);
+    _encodedBytes += encodedImage._length;
+    // If _frameQueue would have been a fixed sized buffer we could have asked
+    // it for an empty frame and then just do:
+    // emptyFrame->SwapBuffers(encodedBuffer);
+    // This is how it should be done in Video Engine to save in on memcpys
+    webrtc::CodecSpecificInfo* codecSpecificInfoCopy =
+        _test.CopyCodecSpecificInfo(codecSpecificInfo);
+    _test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
+    if (_encodedFile != NULL)
+    {
+        fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(), _encodedFile);
+    }
+    _frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy);
+    return 0;
+}
+
+WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::Decoded(RawImage& image)
+{
+    _test.Decoded(image);
+    _decodedBytes += image._length;
+    if (_decodedFile != NULL)
+    {
+        fwrite(image._buffer, 1, image._length, _decodedFile);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    return _test.ReceivedDecodedReferenceFrame(pictureId);
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::ReceivedDecodedFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    return _test.ReceivedDecodedFrame(pictureId);
+}
+
+void
+NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
+{
+    _encodeCompleteTime = tGetTime();
+    _encFrameCnt++;
+    _totalEncodePipeTime += _encodeCompleteTime -
+        _encodeTimes[encodedImage._timeStamp];
+}
+
+void
+NormalAsyncTest::Decoded(const RawImage& decodedImage)
+{
+    _decodeCompleteTime = tGetTime();
+    _decFrameCnt++;
+    _totalDecodePipeTime += _decodeCompleteTime -
+        _decodeTimes[decodedImage._timeStamp];
+    _decodedWidth = decodedImage._width;
+    _decodedHeight = decodedImage._height;
+}
+
+void
+NormalAsyncTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _decoder->InitDecode(&_inst, 1);
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _decFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    double starttime = tGetTime();
+    while (!complete)
+    {
+        CodecSpecific_InitBitrate();
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode =
+                    static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                int lost = DoPacketLoss();
+                if (lost == 2)
+                {
+                    // Lost the whole frame, continue
+                    _missingFrames = true;
+                    delete _frameToDecode;
+                    _frameToDecode = NULL;
+                    continue;
+                }
+                int ret = Decode(lost);
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr,
+                        "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+    }
+    double endtime = tGetTime();
+    double totalExecutionTime = endtime - starttime;
+    printf("Total execution time: %.1f s\n", totalExecutionTime);
+    _sumEncBytes = encCallback.EncodedBytes();
+    double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _encFrameCnt;
+    double avgDecTime = _totalDecodeTime / _decFrameCnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+    printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+    printf("Average encode pipeline time: %.1f ms\n",
+           1000 * _totalEncodePipeTime / _encFrameCnt);
+    printf("Average decode pipeline  time: %.1f ms\n",
+           1000 * _totalDecodePipeTime / _decFrameCnt);
+    printf("Number of encoded frames: %u\n", _encFrameCnt);
+    printf("Number of decoded frames: %u\n", _decFrameCnt);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+        _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    _encoder->Release();
+    _decoder->Release();
+    Teardown();
+}
+
+bool
+NormalAsyncTest::Encode()
+{
+    _lengthEncFrame = 0;
+    EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.SetTimeStamp((unsigned int)
+        (_encFrameCnt * 9e4 / _inst.maxFramerate));
+    _inputVideoBuffer.SetWidth(_inst.width);
+    _inputVideoBuffer.SetHeight(_inst.height);
+    RawImage rawImage;
+    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+    if (feof(_sourceFile) != 0)
+    {
+        return true;
+    }
+    _encodeCompleteTime = 0;
+    _encodeTimes[rawImage._timeStamp] = tGetTime();
+    VideoFrameType frameType = kDeltaFrame;
+
+    // check SLI queue
+    _hasReceivedSLI = false;
+    while (!_signalSLI.empty() && _signalSLI.front().delay == 0)
+    {
+        // SLI message has arrived at sender side
+        _hasReceivedSLI = true;
+        _pictureIdSLI = _signalSLI.front().id;
+        _signalSLI.pop_front();
+    }
+    // decrement SLI queue times
+    for (std::list<fbSignal>::iterator it = _signalSLI.begin();
+        it !=_signalSLI.end(); it++)
+    {
+        (*it).delay--;
+    }
+
+    // check PLI queue
+    _hasReceivedPLI = false;
+    while (!_signalPLI.empty() && _signalPLI.front().delay == 0)
+    {
+        // PLI message has arrived at sender side
+        _hasReceivedPLI = true;
+        _signalPLI.pop_front();
+    }
+    // decrement PLI queue times
+    for (std::list<fbSignal>::iterator it = _signalPLI.begin();
+        it != _signalPLI.end(); it++)
+    {
+        (*it).delay--;
+    }
+
+    if (_hasReceivedPLI)
+    {
+        // respond to PLI by encoding a key frame
+        frameType = kKeyFrame;
+        _hasReceivedPLI = false;
+        _hasReceivedSLI = false; // don't trigger both at once
+    }
+
+    webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
+    EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
+    if (codecSpecificInfo != NULL)
+    {
+        delete codecSpecificInfo;
+        codecSpecificInfo = NULL;
+    }
+    if (_encodeCompleteTime > 0)
+    {
+        _totalEncodeTime += _encodeCompleteTime -
+            _encodeTimes[rawImage._timeStamp];
+    }
+    else
+    {
+        _totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
+    }
+    assert(ret >= 0);
+    return false;
+}
+
+int
+NormalAsyncTest::Decode(int lossValue)
+{
+    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    _decodeCompleteTime = 0;
+    _decodeTimes[encodedImage._timeStamp] = tGetTime();
+    int ret = WEBRTC_VIDEO_CODEC_OK;
+    if (!_waitForKey || encodedImage._frameType == kKeyFrame)
+    {
+        _waitForKey = false;
+        ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+
+        if (ret >= 0)
+        {
+            _missingFrames = false;
+        }
+    }
+
+    // check for SLI
+    if (ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
+    {
+        // add an SLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalSLI.push_back(fbSignal(_rttFrames,
+            static_cast<WebRtc_UWord8>((_lastDecPictureId) & 0x3f))); // 6 lsb
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI)
+    {
+        // add an SLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalSLI.push_back(fbSignal(_rttFrames,
+            static_cast<WebRtc_UWord8>((_lastDecPictureId + 1) & 0x3f)));//6 lsb
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_ERROR)
+    {
+        // wait for new key frame
+        // add an PLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalPLI.push_back(fbSignal(_rttFrames, 0 /* picId not used*/));
+        _waitForKey = true;
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+
+    if (_decodeCompleteTime > 0)
+    {
+        _totalDecodeTime += _decodeCompleteTime -
+            _decodeTimes[encodedImage._timeStamp];
+    }
+    else
+    {
+        _totalDecodeTime += tGetTime() - _decodeTimes[encodedImage._timeStamp];
+    }
+    return ret;
+}
+
+webrtc::CodecSpecificInfo*
+NormalAsyncTest::CopyCodecSpecificInfo(
+        const webrtc::CodecSpecificInfo* codecSpecificInfo) const
+{
+    webrtc::CodecSpecificInfo* info = new webrtc::CodecSpecificInfo;
+    *info = *codecSpecificInfo;
+    return info;
+}
+
+void NormalAsyncTest::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }
+    else
+    {
+        _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
+void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
+                                       EncodedImage& src,
+                                       void* /*codecSpecificInfo*/) const
+{
+    dest.CopyBuffer(src._length, src._buffer);
+    dest.SetFrameType(src._frameType);
+    dest.SetCaptureWidth((WebRtc_UWord16)src._encodedWidth);
+    dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
+    dest.SetTimeStamp(src._timeStamp);
+}
+
+WebRtc_Word32 NormalAsyncTest::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId) {
+  _lastDecRefPictureId = pictureId;
+  return 0;
+}
+
+WebRtc_Word32 NormalAsyncTest::ReceivedDecodedFrame(
+    const WebRtc_UWord64 pictureId) {
+  _lastDecPictureId = pictureId;
+  return 0;
+}
+
+double
+NormalAsyncTest::tGetTime()
+{// return time in sec
+    return ((double) (TickTime::MillisecondTimestamp())/1000);
+ }
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.h b/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.h
new file mode 100644
index 0000000..e520601
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
+
+#include "common_types.h"
+
+#include "normal_test.h"
+#include "rw_lock_wrapper.h"
+#include <list>
+#include <map>
+#include <queue>
+
+class FrameQueueTuple
+{
+public:
+    FrameQueueTuple(TestVideoEncodedBuffer *frame,
+                    const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
+    :
+        _frame(frame),
+        _codecSpecificInfo(codecSpecificInfo)
+    {};
+    ~FrameQueueTuple();
+    TestVideoEncodedBuffer*          _frame;
+    const webrtc::CodecSpecificInfo* _codecSpecificInfo;
+};
+
+class FrameQueue
+{
+public:
+    FrameQueue()
+    :
+        _queueRWLock(*webrtc::RWLockWrapper::CreateRWLock()),
+        _prevTS(-1)
+    {
+    }
+
+    ~FrameQueue()
+    {
+        delete &_queueRWLock;
+    }
+
+    void PushFrame(TestVideoEncodedBuffer *frame,
+                   webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
+    FrameQueueTuple* PopFrame();
+    bool Empty();
+
+private:
+    webrtc::RWLockWrapper&                       _queueRWLock;
+    std::queue<FrameQueueTuple *>     _frameBufferQueue;
+    WebRtc_Word64                       _prevTS;
+};
+
+// feedback signal to encoder
+struct fbSignal
+{
+    fbSignal(int d, WebRtc_UWord8 pid) : delay(d), id(pid) {};
+    int         delay;
+    WebRtc_UWord8 id;
+};
+
+class NormalAsyncTest : public NormalTest
+{
+public:
+    NormalAsyncTest();
+    NormalAsyncTest(WebRtc_UWord32 bitRate);
+    NormalAsyncTest(std::string name, std::string description,
+                    unsigned int testNo);
+    NormalAsyncTest(std::string name, std::string description,
+                    WebRtc_UWord32 bitRate, unsigned int testNo);
+    NormalAsyncTest(std::string name, std::string description,
+                    WebRtc_UWord32 bitRate, unsigned int testNo,
+                    unsigned int rttFrames);
+    virtual ~NormalAsyncTest() {};
+    virtual void Perform();
+    virtual void Encoded(const webrtc::EncodedImage& encodedImage);
+    virtual void Decoded(const webrtc::RawImage& decodedImage);
+    virtual webrtc::CodecSpecificInfo*
+    CopyCodecSpecificInfo(
+        const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
+    virtual void CopyEncodedImage(TestVideoEncodedBuffer& dest,
+                                  webrtc::EncodedImage& src,
+                                  void* /*codecSpecificInfo*/) const;
+    virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
+    {
+        return NULL;
+    };
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+        const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+protected:
+    virtual void Setup();
+    virtual void Teardown();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void CodecSpecific_InitBitrate();
+    virtual int SetCodecSpecificParameters() {return 0;};
+    double tGetTime();// return time in sec
+
+    FILE*                   _sourceFile;
+    FILE*                   _decodedFile;
+    WebRtc_UWord32          _decodedWidth;
+    WebRtc_UWord32          _decodedHeight;
+    double                  _totalEncodeTime;
+    double                  _totalDecodeTime;
+    double                  _decodeCompleteTime;
+    double                  _encodeCompleteTime;
+    double                  _totalEncodePipeTime;
+    double                  _totalDecodePipeTime;
+    int                     _framecnt;
+    int                     _encFrameCnt;
+    int                     _decFrameCnt;
+    bool                    _requestKeyFrame;
+    unsigned int            _testNo;
+    unsigned int            _lengthEncFrame;
+    FrameQueueTuple*        _frameToDecode;
+    bool                    _appendNext;
+    std::map<WebRtc_UWord32, double> _encodeTimes;
+    std::map<WebRtc_UWord32, double> _decodeTimes;
+    bool                    _missingFrames;
+    std::list<fbSignal>     _signalSLI;
+    int                     _rttFrames;
+    mutable bool            _hasReceivedSLI;
+    mutable bool            _hasReceivedRPSI;
+    WebRtc_UWord8           _pictureIdSLI;
+    WebRtc_UWord16          _pictureIdRPSI;
+    WebRtc_UWord64          _lastDecRefPictureId;
+    WebRtc_UWord64          _lastDecPictureId;
+    std::list<fbSignal>     _signalPLI;
+    bool                    _hasReceivedPLI;
+    bool                    _waitForKey;
+};
+
+class VideoEncodeCompleteCallback : public webrtc::EncodedImageCallback
+{
+public:
+    VideoEncodeCompleteCallback(FILE* encodedFile, FrameQueue *frameQueue,
+                                NormalAsyncTest& test)
+    :
+      _encodedFile(encodedFile),
+      _frameQueue(frameQueue),
+      _test(test),
+      _encodedBytes(0)
+    {}
+
+    WebRtc_Word32
+    Encoded(webrtc::EncodedImage& encodedImage,
+            const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
+            const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+    WebRtc_UWord32 EncodedBytes();
+private:
+    FILE*             _encodedFile;
+    FrameQueue*       _frameQueue;
+    NormalAsyncTest&  _test;
+    WebRtc_UWord32    _encodedBytes;
+};
+
+class VideoDecodeCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    VideoDecodeCompleteCallback(FILE* decodedFile, NormalAsyncTest& test)
+    :
+        _decodedFile(decodedFile),
+        _test(test),
+        _decodedBytes(0)
+    {}
+
+    virtual WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
+    virtual WebRtc_Word32
+    ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+    WebRtc_UWord32 DecodedBytes();
+private:
+    FILE* _decodedFile;
+    NormalAsyncTest& _test;
+    WebRtc_UWord32    _decodedBytes;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/normal_test.cc b/trunk/src/modules/video_coding/codecs/test_framework/normal_test.cc
new file mode 100644
index 0000000..2dd7bce
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/normal_test.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_test.h"
+
+#include <time.h>
+#include <sstream>
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+NormalTest::NormalTest()
+:
+Test("Normal Test 1", "A test of normal execution of the codec"),
+_testNo(1),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+NormalTest::NormalTest(std::string name, std::string description,
+                       unsigned int testNo)
+:
+Test(name, description),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+NormalTest::NormalTest(std::string name, std::string description,
+                       WebRtc_UWord32 bitRate, unsigned int testNo)
+:
+Test(name, description, bitRate),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+void
+NormalTest::Setup()
+{
+    Test::Setup();
+    std::stringstream ss;
+    std::string strTestNo;
+    ss << _testNo;
+    ss >> strTestNo;
+
+    // Check if settings exist. Otherwise use defaults.
+    if (_outname == "")
+    {
+        _outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
+            ".yuv";
+    }
+
+    if (_encodedName == "")
+    {
+        _encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
+            strTestNo + ".yuv";
+    }
+    
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    char mode[3] = "wb";
+    if (_appendNext)
+    {
+        strncpy(mode, "ab", 3);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _appendNext = true;
+}
+
+void
+NormalTest::Teardown()
+{
+    Test::Teardown();
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+}
+
+void
+NormalTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    
+    _encoder->InitEncode(&_inst, 1, 1460);
+    CodecSpecific_InitBitrate();
+    _decoder->InitDecode(&_inst,1);
+
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _framecnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    int decodeLength = 0;
+    while (!Encode())
+    {
+        DoPacketLoss();
+        _encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
+        fwrite(_encodedVideoBuffer.GetBuffer(), 1, _encodedVideoBuffer.GetLength(), _encodedFile);
+        decodeLength = Decode();
+        if (decodeLength < 0)
+        {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
+            exit(EXIT_FAILURE);
+        }
+        fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile);
+        CodecSpecific_InitBitrate();
+        _framecnt++;
+    }
+
+    // Ensure we empty the decoding queue.
+    while (decodeLength > 0)
+    {
+        decodeLength = Decode();
+        if (decodeLength < 0)
+        {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
+            exit(EXIT_FAILURE);
+        }
+        fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile);
+    }
+
+    double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _framecnt;
+    double avgDecTime = _totalDecodeTime / _framecnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %f s\n", avgEncTime);
+    printf("Average decode time: %f s\n", avgDecTime);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+
+    _inputVideoBuffer.Free();
+    _encodedVideoBuffer.Reset();
+    _decodedVideoBuffer.Free();
+
+    _encoder->Release();
+    _decoder->Release();
+
+    Teardown();
+}
+
+bool
+NormalTest::Encode()
+{
+    _lengthEncFrame = 0;
+    EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
+    if (feof(_sourceFile) != 0)
+    {
+        return true;
+    }
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.SetTimeStamp(_framecnt);
+
+    // This multiple attempt ridiculousness is to accomodate VP7:
+    // 1. The wrapper can unilaterally reduce the framerate for low bitrates.
+    // 2. The codec inexplicably likes to reject some frames. Perhaps there
+    //    is a good reason for this...
+    int encodingAttempts = 0;
+    double starttime = 0;
+    double endtime = 0;
+    while (_lengthEncFrame == 0)
+    {
+        starttime = clock()/(double)CLOCKS_PER_SEC;
+
+        _inputVideoBuffer.SetWidth(_inst.width);
+        _inputVideoBuffer.SetHeight(_inst.height);
+        //_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
+        //  _inst.frameRate, _requestKeyFrame && !(_framecnt%50));
+
+        endtime = clock()/(double)CLOCKS_PER_SEC;
+
+        _encodedVideoBuffer.SetCaptureHeight(_inst.height);
+        _encodedVideoBuffer.SetCaptureWidth(_inst.width);
+        if (_lengthEncFrame < 0)
+        {
+            (*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
+            fprintf(stderr,"\n\nError in encoder: %d\n\n", _lengthEncFrame);
+            exit(EXIT_FAILURE);
+        }
+        _sumEncBytes += _lengthEncFrame;
+
+        encodingAttempts++;
+        if (encodingAttempts > 50)
+        {
+            (*_log) << "Unable to encode frame: " << _framecnt << std::endl;
+            fprintf(stderr,"\n\nUnable to encode frame: %d\n\n", _framecnt);
+            exit(EXIT_FAILURE);
+        }
+    }
+    _totalEncodeTime += endtime - starttime;
+
+    if (encodingAttempts > 1)
+    {
+        (*_log) << encodingAttempts << " attempts required to encode frame: " <<
+            _framecnt + 1 << std::endl;
+        fprintf(stderr,"\n%d attempts required to encode frame: %d\n", encodingAttempts,
+            _framecnt + 1);
+    }
+        
+    return false;
+}
+
+int
+NormalTest::Decode(int lossValue)
+{
+    _encodedVideoBuffer.SetWidth(_inst.width);
+    _encodedVideoBuffer.SetHeight(_inst.height);
+    int lengthDecFrame = 0;
+    //int lengthDecFrame = _decoder->Decode(_encodedVideoBuffer, _decodedVideoBuffer);
+    //_totalDecodeTime += (double)((clock()/(double)CLOCKS_PER_SEC) - starttime);
+    if (lengthDecFrame < 0)
+    {
+        return lengthDecFrame;
+    }
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return lengthDecFrame;
+}
+
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/normal_test.h b/trunk/src/modules/video_coding/codecs/test_framework/normal_test.h
new file mode 100644
index 0000000..0dcd9c2
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/normal_test.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+
+#include "test.h"
+
+class NormalTest : public Test
+{
+public:
+    NormalTest();
+    NormalTest(std::string name, std::string description, unsigned int testNo);
+    NormalTest(std::string name, std::string description, WebRtc_UWord32 bitRate, unsigned int testNo);
+    virtual ~NormalTest() {};
+    virtual void Perform();
+
+protected:
+    virtual void Setup();
+    virtual void Teardown();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void CodecSpecific_InitBitrate()=0;
+    virtual int DoPacketLoss() {return 0;};
+
+    FILE*                   _sourceFile;
+    FILE*                   _decodedFile;
+    FILE*                   _encodedFile;
+    double                  _totalEncodeTime;
+    double                  _totalDecodeTime;
+    unsigned int            _framecnt;
+    bool                    _requestKeyFrame;
+    unsigned int            _testNo;
+    int                     _lengthEncFrame;
+    bool                    _appendNext;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc b/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc
new file mode 100644
index 0000000..c94fc9a
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc
@@ -0,0 +1,248 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet_loss_test.h"
+#include "video_source.h"
+#include <sstream>
+#include <cassert>
+#include <string.h>
+
+using namespace webrtc;
+
+PacketLossTest::PacketLossTest()
+:
+NormalAsyncTest("PacketLossTest", "Encode, remove lost packets, decode", 300,
+                5),
+_lossRate(0.1),
+_lossProbability(0.1),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+}
+
+PacketLossTest::PacketLossTest(std::string name, std::string description)
+:
+NormalAsyncTest(name, description, 300, 5),
+_lossRate(0.1),
+_lossProbability(0.1),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+}
+
+PacketLossTest::PacketLossTest(std::string name, std::string description, double lossRate, bool useNack, unsigned int rttFrames /* = 0*/)
+:
+NormalAsyncTest(name, description, 300, 5, rttFrames),
+_lossRate(lossRate),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+    assert(lossRate >= 0 && lossRate <= 1);
+    if (useNack)
+    {
+        _lossProbability = 0;
+    }
+    else
+    {
+        _lossProbability = lossRate;
+    }
+}
+
+void
+PacketLossTest::Encoded(const EncodedImage& encodedImage)
+{
+    // push timestamp to queue
+    _frameQueue.push_back(encodedImage._timeStamp);
+    NormalAsyncTest::Encoded(encodedImage);
+}
+
+void
+PacketLossTest::Decoded(const RawImage& decodedImage)
+{
+    // check the frame queue if any frames have gone missing
+    assert(!_frameQueue.empty()); // decoded frame is not in the queue
+    while(_frameQueue.front() < decodedImage._timeStamp)
+    {
+        // this frame is missing
+        // write previous decoded frame again (frame freeze)
+        if (_decodedFile && _lastFrame)
+        {
+            fwrite(_lastFrame, 1, _lastFrameLength, _decodedFile);
+        }
+
+        // remove frame from queue
+        _frameQueue.pop_front();
+    }
+    assert(_frameQueue.front() == decodedImage._timeStamp); // decoded frame is not in the queue
+
+    // pop the current frame
+    _frameQueue.pop_front();
+
+    // save image for future freeze-frame
+    if (_lastFrameLength < decodedImage._length)
+    {
+        if (_lastFrame) delete [] _lastFrame;
+
+        _lastFrame = new WebRtc_UWord8[decodedImage._length];
+    }
+    memcpy(_lastFrame, decodedImage._buffer, decodedImage._length);
+    _lastFrameLength = decodedImage._length;
+
+    NormalAsyncTest::Decoded(decodedImage);
+}
+
+void
+PacketLossTest::Teardown()
+{
+    if (_totalKept + _totalThrown > 0)
+    {
+        printf("Target packet loss rate: %.4f\n", _lossProbability);
+        printf("Actual packet loss rate: %.4f\n", (_totalThrown * 1.0f) / (_totalKept + _totalThrown));
+        printf("Channel rate: %.2f kbps\n",
+            0.001 * 8.0 * _sumChannelBytes / ((_framecnt * 1.0f) / _inst.maxFramerate));
+    }
+    else
+    {
+        printf("No packet losses inflicted\n");
+    }
+
+    NormalAsyncTest::Teardown();
+}
+
+void
+PacketLossTest::Setup()
+{
+    const VideoSource source(_inname, _inst.width, _inst.height, _inst.maxFramerate);
+
+    std::stringstream ss;
+    std::string lossRateStr;
+    ss << _lossRate;
+    ss >> lossRateStr;
+    _encodedName = source.GetName() + "-" + lossRateStr;
+    _outname = "out-" + source.GetName() + "-" + lossRateStr;
+
+    if (_lossProbability != _lossRate)
+    {
+        _encodedName += "-nack";
+        _outname += "-nack";
+    }
+    _encodedName += ".vp8";
+    _outname += ".yuv";
+
+    _totalKept = 0;
+    _totalThrown = 0;
+    _sumChannelBytes = 0;
+
+    NormalAsyncTest::Setup();
+}
+
+void
+PacketLossTest::CodecSpecific_InitBitrate()
+{
+    assert(_bitRate > 0);
+    WebRtc_UWord32 simulatedBitRate;
+    if (_lossProbability != _lossRate)
+    {
+        // Simulating NACK
+        simulatedBitRate = WebRtc_UWord32(_bitRate / (1 + _lossRate));
+    }
+    else
+    {
+        simulatedBitRate = _bitRate;
+    }
+    int rtt = 0;
+    if (_inst.maxFramerate > 0)
+      rtt = _rttFrames * (1000 / _inst.maxFramerate);
+    _encoder->SetChannelParameters((WebRtc_UWord32)(_lossProbability * 255.0),
+                                                    rtt);
+    _encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
+}
+
+int PacketLossTest::DoPacketLoss()
+{
+    // Only packet loss for delta frames
+    if (_frameToDecode->_frame->GetLength() == 0 || _frameToDecode->_frame->GetFrameType() != kDeltaFrame)
+    {
+        _sumChannelBytes += _frameToDecode->_frame->GetLength();
+        return 0;
+    }
+    unsigned char *packet = NULL;
+    TestVideoEncodedBuffer newEncBuf;
+    newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
+    _inBufIdx = 0;
+    _outBufIdx = 0;
+    int size = 1;
+    int kept = 0;
+    int thrown = 0;
+    while ((size = NextPacket(1500, &packet)) > 0)
+    {
+        if (!PacketLoss(_lossProbability, thrown))
+        {
+            InsertPacket(&newEncBuf, packet, size);
+            kept++;
+        }
+        else
+        {
+            // Use the ByteLoss function if you want to lose only
+            // parts of a packet, and not the whole packet.
+
+            //int size2 = ByteLoss(size, packet, 15);
+            thrown++;
+            //if (size2 != size)
+            //{
+            //    InsertPacket(&newEncBuf, packet, size2);
+            //}
+        }
+    }
+    int	lossResult  = (thrown!=0);	// 0 = no loss	1 = loss(es)
+    if (lossResult)
+    {
+        lossResult += (kept==0);	// 2 = all lost = full frame
+    }
+    _frameToDecode->_frame->CopyBuffer(newEncBuf.GetLength(), newEncBuf.GetBuffer());
+    _sumChannelBytes += newEncBuf.GetLength();
+    _totalKept += kept;
+    _totalThrown += thrown;
+
+    return lossResult;
+    //printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
+    //printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());
+}
+
+int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
+{
+    unsigned char *buf = _frameToDecode->_frame->GetBuffer();
+    *pkg = buf + _inBufIdx;
+    if (static_cast<long>(_frameToDecode->_frame->GetLength()) - _inBufIdx <= mtu)
+    {
+        int size = _frameToDecode->_frame->GetLength() - _inBufIdx;
+        _inBufIdx = _frameToDecode->_frame->GetLength();
+        return size;
+    }
+    _inBufIdx += mtu;
+    return mtu;
+}
+
+int PacketLossTest::ByteLoss(int size, unsigned char *pkg, int bytesToLose)
+{
+    return size;
+}
+
+void PacketLossTest::InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size)
+{
+    if (static_cast<long>(buf->GetSize()) - _outBufIdx < size)
+    {
+        printf("InsertPacket error!\n");
+        return;
+    }
+    memcpy(buf->GetBuffer() + _outBufIdx, pkg, size);
+    buf->UpdateLength(buf->GetLength() + size);
+    _outBufIdx += size;
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.h b/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.h
new file mode 100644
index 0000000..ea37681
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/packet_loss_test.h
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
+
+#include <list>
+
+#include "normal_async_test.h"
+
+class PacketLossTest : public NormalAsyncTest
+{
+public:
+    PacketLossTest();
+    virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
+    virtual void Encoded(const webrtc::EncodedImage& encodedImage);
+    virtual void Decoded(const webrtc::RawImage& decodedImage);
+protected:
+    PacketLossTest(std::string name, std::string description);
+    PacketLossTest(std::string name,
+                   std::string description,
+                   double lossRate,
+                   bool useNack,
+                   unsigned int rttFrames = 0);
+
+    virtual void Setup();
+    virtual void Teardown();
+    virtual void CodecSpecific_InitBitrate();
+    virtual int DoPacketLoss();
+    virtual int NextPacket(int size, unsigned char **pkg);
+    virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
+    virtual void InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size);
+    int _inBufIdx;
+    int _outBufIdx;
+
+    // When NACK is being simulated _lossProbabilty is zero,
+    // otherwise it is set equal to _lossRate.
+    // Desired channel loss rate.
+    double _lossRate;
+    // Probability used to simulate packet drops.
+    double _lossProbability;
+
+    int _totalKept;
+    int _totalThrown;
+    int _sumChannelBytes;
+    std::list<WebRtc_UWord32> _frameQueue;
+    WebRtc_UWord8* _lastFrame;
+    WebRtc_UWord32 _lastFrameLength;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/performance_test.cc b/trunk/src/modules/video_coding/codecs/test_framework/performance_test.cc
new file mode 100644
index 0000000..250400c
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/performance_test.cc
@@ -0,0 +1,296 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "performance_test.h"
+
+#include <assert.h>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+
+using namespace webrtc;
+
+#define NUM_FRAMES 300
+
+PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate)
+:
+NormalAsyncTest(bitRate),
+_numCodecs(0),
+_tests(NULL),
+_encoders(NULL),
+_decoders(NULL),
+_threads(NULL),
+_rawImageLock(NULL),
+_encodeEvents(new EventWrapper*[1]),
+_stopped(true),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs)
+:
+NormalAsyncTest(bitRate),
+_numCodecs(numCodecs),
+_tests(new PerformanceTest*[_numCodecs]),
+_encoders(new VideoEncoder*[_numCodecs]),
+_decoders(new VideoDecoder*[_numCodecs]),
+_threads(new ThreadWrapper*[_numCodecs]),
+_rawImageLock(RWLockWrapper::CreateRWLock()),
+_encodeEvents(new EventWrapper*[_numCodecs]),
+_stopped(true),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _tests[i] = new PerformanceTest(bitRate);
+        _encodeEvents[i] = EventWrapper::Create();
+    }
+}
+
+PerformanceTest::~PerformanceTest()
+{
+    if (_encoders != NULL)
+    {
+        delete [] _encoders;
+    }
+    if (_decoders != NULL)
+    {
+        delete [] _decoders;
+    }
+    if (_tests != NULL)
+    {
+        delete [] _tests;
+    }
+    if (_threads != NULL)
+    {
+        delete [] _threads;
+    }
+    if (_rawImageLock != NULL)
+    {
+        delete _rawImageLock;
+    }
+    if (_encodeEvents != NULL)
+    {
+        delete [] _encodeEvents;
+    }
+}
+
+void
+PerformanceTest::Setup()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    NormalAsyncTest::Setup(); // Setup input and output files
+    CodecSettings(352, 288, 30, _bitRate); // common to all codecs
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _encoders[i] = CreateEncoder();
+        _decoders[i] = CreateDecoder();
+        if (_encoders[i] == NULL)
+        {
+            printf("Must create a codec specific test!\n");
+            exit(EXIT_FAILURE);
+        }
+        if(_encoders[i]->InitEncode(&_inst, 4, 1440) < 0)
+        {
+            exit(EXIT_FAILURE);
+        }
+        if (_decoders[i]->InitDecode(&_inst, 1))
+        {
+            exit(EXIT_FAILURE);
+        }
+        _tests[i]->SetEncoder(_encoders[i]);
+        _tests[i]->SetDecoder(_decoders[i]);
+        _tests[i]->_rawImageLock = _rawImageLock;
+        _encodeEvents[i]->Reset();
+        _tests[i]->_encodeEvents[0] = _encodeEvents[i];
+        _tests[i]->_inst = _inst;
+        _threads[i] = ThreadWrapper::CreateThread(PerformanceTest::RunThread, _tests[i]);
+        unsigned int id = 0;
+        _tests[i]->_stopped = false;
+        _threads[i]->Start(id);
+    }
+}
+
+void
+PerformanceTest::Perform()
+{
+    Setup();
+    EventWrapper& sleepEvent = *EventWrapper::Create();
+    const WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    for (int i=0; i < NUM_FRAMES; i++)
+    {
+        {
+            // Read a new frame from file
+            WriteLockScoped imageLock(*_rawImageLock);
+            _lengthEncFrame = 0;
+            EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile),
+                      0u);
+            if (feof(_sourceFile) != 0)
+            {
+                rewind(_sourceFile);
+            }
+            _inputVideoBuffer.VerifyAndAllocate(_inst.width*_inst.height*3/2);
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+            _inputVideoBuffer.SetTimeStamp((unsigned int) (_encFrameCnt * 9e4 / static_cast<float>(_inst.maxFramerate)));
+            _inputVideoBuffer.SetWidth(_inst.width);
+            _inputVideoBuffer.SetHeight(_inst.height);
+            for (int i=0; i < _numCodecs; i++)
+            {
+                _tests[i]->_inputVideoBuffer.CopyPointer(_inputVideoBuffer);
+                _encodeEvents[i]->Set();
+            }
+        }
+        if (i < NUM_FRAMES - 1)
+        {
+            sleepEvent.Wait(33);
+        }
+    }
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _tests[i]->_stopped = true;
+        _encodeEvents[i]->Set();
+        _threads[i]->Stop();
+    }
+    const WebRtc_UWord32 totalTime =
+            static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp() - startTime);
+    printf("Total time: %u\n", totalTime);
+    delete &sleepEvent;
+    Teardown();
+}
+
+void PerformanceTest::Teardown()
+{
+    if (_encodeCompleteCallback != NULL)
+    {
+        delete _encodeCompleteCallback;
+    }
+    if (_decodeCompleteCallback != NULL)
+    {
+        delete _decodeCompleteCallback;
+    }
+    // main test only, all others have numCodecs = 0:
+    if (_numCodecs > 0)
+    {
+        WriteLockScoped imageLock(*_rawImageLock);
+        _inputVideoBuffer.Free();
+        NormalAsyncTest::Teardown();
+    }
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _encoders[i]->Release();
+        delete _encoders[i];
+        _decoders[i]->Release();
+        delete _decoders[i];
+        _tests[i]->_inputVideoBuffer.ClearPointer();
+        _tests[i]->_rawImageLock = NULL;
+        _tests[i]->Teardown();
+        delete _tests[i];
+        delete _encodeEvents[i];
+        delete _threads[i];
+    }
+}
+
+bool
+PerformanceTest::RunThread(void* obj)
+{
+    PerformanceTest& test = *static_cast<PerformanceTest*>(obj);
+    return test.PerformSingleTest();
+}
+
+bool
+PerformanceTest::PerformSingleTest()
+{
+    if (_encodeCompleteCallback == NULL)
+    {
+        _encodeCompleteCallback = new VideoEncodeCompleteCallback(NULL, &_frameQueue, *this);
+        _encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        _decodeCompleteCallback = new VideoDecodeCompleteCallback(NULL, *this);
+        _decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
+    }
+    (*_encodeEvents)->Wait(WEBRTC_EVENT_INFINITE); // The first event is used for every single test
+    CodecSpecific_InitBitrate();
+    bool complete = false;
+    {
+        ReadLockScoped imageLock(*_rawImageLock);
+        complete = Encode();
+    }
+    if (!_frameQueue.Empty() || complete)
+    {
+        while (!_frameQueue.Empty())
+        {
+            _frameToDecode = static_cast<FrameQueueTuple *>(_frameQueue.PopFrame());
+            int lost = DoPacketLoss();
+            if (lost == 2)
+            {
+                // Lost the whole frame, continue
+                _missingFrames = true;
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                continue;
+            }
+            int ret = Decode(lost);
+            delete _frameToDecode;
+            _frameToDecode = NULL;
+            if (ret < 0)
+            {
+                fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                return false;
+            }
+            else if (ret < 0)
+            {
+                fprintf(stderr, "\n\nPositive return value from decode!\n\n");
+                return false;
+            }
+        }
+    }
+    if (_stopped)
+    {
+        return false;
+    }
+    return true;
+}
+
+bool PerformanceTest::Encode()
+{
+    RawImage rawImage;
+    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+    VideoFrameType frameType = kDeltaFrame;
+    if (_requestKeyFrame && !(_encFrameCnt%50))
+    {
+        frameType = kKeyFrame;
+    }
+    webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
+    EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
+    if (codecSpecificInfo != NULL)
+    {
+        delete codecSpecificInfo;
+        codecSpecificInfo = NULL;
+    }
+    assert(ret >= 0);
+    return false;
+}
+
+int PerformanceTest::Decode(int lossValue)
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+    _missingFrames = false;
+    return ret;
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/performance_test.h b/trunk/src/modules/video_coding/codecs/test_framework/performance_test.h
new file mode 100644
index 0000000..d060832
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/performance_test.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
+
+#include "normal_async_test.h"
+#include "thread_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "event_wrapper.h"
+
+class PerformanceTest : public NormalAsyncTest
+{
+public:
+    PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs);
+    virtual ~PerformanceTest();
+
+    virtual void Perform();
+    virtual void Print() {};
+
+protected:
+    PerformanceTest(WebRtc_UWord32 bitRate);
+    virtual void Setup();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void Teardown();
+    static bool RunThread(void* obj);
+    bool PerformSingleTest();
+
+    virtual webrtc::VideoEncoder* CreateEncoder() const { return NULL; };
+    virtual webrtc::VideoDecoder* CreateDecoder() const { return NULL; };
+
+    WebRtc_UWord8                 _numCodecs;
+    PerformanceTest**             _tests;
+    webrtc::VideoEncoder**        _encoders;
+    webrtc::VideoDecoder**        _decoders;
+    webrtc::ThreadWrapper**       _threads;
+    webrtc::RWLockWrapper*        _rawImageLock;
+    webrtc::EventWrapper**        _encodeEvents;
+    FrameQueue                    _frameQueue;
+    bool                          _stopped;
+    webrtc::EncodedImageCallback* _encodeCompleteCallback;
+    webrtc::DecodedImageCallback* _decodeCompleteCallback;
+    FILE*                         _outFile;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/plotBenchmark.m b/trunk/src/modules/video_coding/codecs/test_framework/plotBenchmark.m
new file mode 100644
index 0000000..33c8eb6
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/plotBenchmark.m
@@ -0,0 +1,427 @@
+function plotBenchmark(fileNames, export)
+%PLOTBENCHMARK Plots and exports video codec benchmarking results.
+%   PLOTBENCHMARK(FILENAMES, EXPORT) parses the video codec benchmarking result
+%   files given by the cell array of strings FILENAME. It plots the results and
+%   optionally exports each plot to an appropriately named file.
+%
+%   EXPORT parameter:
+%       'none'  No file exports.
+%       'eps'   Exports to eps files (default).
+%       'pdf'   Exports to eps files and uses the command-line utility
+%               epstopdf to obtain pdf files.
+%
+%   Example:
+%       plotBenchmark({'H264Benchmark.txt' 'LSVXBenchmark.txt'}, 'pdf')
+
+if (nargin < 1)
+    error('Too few input arguments');
+elseif (nargin < 2)
+    export = 'eps';
+end
+
+if ~iscell(fileNames)
+    if ischar(fileNames)
+        % one single file name as a string is ok
+        if size(fileNames,1) > 1
+            % this is a char matrix, not ok
+            error('First argument must not be a char matrix');
+        end
+        % wrap in a cell array
+        fileNames = {fileNames};
+    else
+        error('First argument must be a cell array of strings');
+    end
+end
+
+if ~ischar(export)
+    error('Second argument must be a string');
+end
+
+outpath = 'BenchmarkPlots';
+[status, errMsg] = mkdir(outpath);
+if status == 0
+    error(errMsg);
+end
+
+nCases = 0;
+testCases = [];
+% Read each test result file
+for fileIdx = 1:length(fileNames)
+    if ~isstr(fileNames{fileIdx})
+        error('First argument must be a cell array of strings');
+    end
+
+    fid = fopen(fileNames{fileIdx}, 'rt');
+    if fid == -1
+        error(['Unable to open ' fileNames{fileIdx}]);
+    end
+
+    version = '1.0';
+    if ~strcmp(fgetl(fid), ['#!benchmark' version])
+        fclose(fid);
+        error(['Requires benchmark file format version ' version]);
+    end
+
+    % Parse results file into testCases struct
+    codec = fgetl(fid);
+    tline = fgetl(fid);
+    while(tline ~= -1)
+        nCases = nCases + 1;
+
+        delim = strfind(tline, ',');
+        name = tline(1:delim(1)-1);
+        % Drop underscored suffix from name
+        underscore = strfind(name, '_'); 
+        if ~isempty(underscore)
+            name = name(1:underscore(1)-1);
+        end
+
+        resolution = tline(delim(1)+1:delim(2)-1);
+        frameRate = tline(delim(2)+1:end);
+
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        bitrateLabel = tline(1:delim(1)-1); 
+        bitrate = sscanf(tline(delim(1):end),',%f');
+
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        psnrLabel = tline(1:delim(1)-1); 
+        psnr = sscanf(tline(delim(1):end),',%f'); 
+
+
+        % Default data for the optional lines
+        speedLabel = 'Default';
+        speed = 0;
+        ssimLabel = 'Default';
+        ssim = 0;
+        
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        
+        while ~isempty(delim)
+            % More data
+            % Check type of data
+            if strncmp(lower(tline), 'speed', 5)
+                % Speed data included
+                speedLabel = tline(1:delim(1)-1);
+                speed = sscanf(tline(delim(1):end), ',%f');
+
+                tline = fgetl(fid);
+                
+            elseif strncmp(lower(tline), 'encode time', 11)
+                % Encode and decode times included
+                % TODO: take care of the data
+                
+                % pop two lines from file
+                tline = fgetl(fid);
+                tline = fgetl(fid);
+                
+            elseif strncmp(tline, 'SSIM', 4)
+                % SSIM data included
+                ssimLabel = tline(1:delim(1)-1);
+                ssim = sscanf(tline(delim(1):end), ',%f');
+
+                tline = fgetl(fid);
+            end
+            delim = strfind(tline, ',');
+        end
+
+        testCases = [testCases struct('codec', codec, 'name', name, 'resolution', ...
+            resolution, 'frameRate', frameRate, 'bitrate', bitrate, 'psnr', psnr, ...
+            'speed', speed, 'bitrateLabel', bitrateLabel, 'psnrLabel', psnrLabel, ...
+            'speedLabel', speedLabel, ...
+            'ssim', ssim, 'ssimLabel', ssimLabel)];
+
+        tline = fgetl(fid);
+    end
+
+    fclose(fid);
+end
+
+i = 0;
+casesPsnr = testCases;
+while ~isempty(casesPsnr)
+    i = i + 1;
+    casesPsnr = plotOnePsnr(casesPsnr, i, export, outpath);
+end
+
+casesSSIM = testCases;
+while ~isempty(casesSSIM)
+    i = i + 1;
+    casesSSIM = plotOneSSIM(casesSSIM, i, export, outpath);
+end
+
+casesSpeed = testCases;
+while ~isempty(casesSpeed)
+    if casesSpeed(1).speed == 0
+        casesSpeed = casesSpeed(2:end);
+    else
+        i = i + 1;
+        casesSpeed = plotOneSpeed(casesSpeed, i, export, outpath);
+    end
+end
+
+
+
+%%%%%%%%%%%%%%%%%%
+%% SUBFUNCTIONS %%
+%%%%%%%%%%%%%%%%%%
+
+function casesOut = plotOnePsnr(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
+        cases(i).psnr = cases(i).psnr(diffIndx);
+        cases(i).bitrate = cases(i).bitrate(diffIndx);
+        simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    end
+end
+
+% Prepare figure with axis labels and so on
+hFig = figure(num);
+clf;
+hold on;
+grid on;
+axis([0 1100 20 50]);
+set(gca, 'XTick', 0:200:1000);
+set(gca, 'YTick', 20:10:60);
+xlabel(cases(1).bitrateLabel);
+ylabel(cases(1).psnrLabel);
+res = cases(1).resolution;
+frRate = cases(1).frameRate;
+title([res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'psnr', i, sequences, 1);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+legend(hLines, codecs, 4);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/psnr-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function casesOut = plotOneSSIM(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
+        cases(i).ssim = cases(i).ssim(diffIndx);
+        cases(i).bitrate = cases(i).bitrate(diffIndx);
+        simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    end
+end
+
+% Prepare figure with axis labels and so on
+hFig = figure(num);
+clf;
+hold on;
+grid on;
+axis([0 1100 0.5 1]); % y-limit are set to 'auto' below
+set(gca, 'XTick', 0:200:1000);
+%set(gca, 'YTick', 20:10:60);
+xlabel(cases(1).bitrateLabel);
+ylabel(cases(1).ssimLabel);
+res = cases(1).resolution;
+frRate = cases(1).frameRate;
+title([res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'ssim', i, sequences, 1);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+%set(gca,'YLimMode','auto')
+set(gca,'YLim',[0.5 1])
+set(gca,'YScale','log')
+legend(hLines, codecs, 4);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/psnr-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function casesOut = plotOneSpeed(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate) & ...
+        strcmp(cases(1).name, cases(i).name)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).psnr), simIndx);
+        cases(i).psnr = cases(i).psnr(diffIndx);
+        cases(i).speed = cases(i).speed(diffIndx);
+        simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
+    end
+end
+
+hFig = figure(num);
+clf;
+hold on;
+%grid on;
+xlabel(cases(1).psnrLabel);
+ylabel(cases(1).speedLabel);
+res = cases(1).resolution;
+name = cases(1).name;
+frRate = cases(1).frameRate;
+title([name ', ' res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'psnr', 'speed', i, sequences, 0);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+legend(hLines, codecs, 1);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/speed-' name '-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function [casesOut, hLine, codec, sequences] = plotOneCodec(cases, xfield, yfield, num, sequences, annotatePlot)
+plotStr = {'gx-', 'bo-', 'r^-', 'kd-', 'cx-', 'go--', 'b^--'};
+% Find matching codecs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).codec, cases(i).codec)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+for i = 1:length(cases)
+    % Plot a single case
+    hLine = plot(getfield(cases(i), xfield), getfield(cases(i), yfield), plotStr{num}, ...
+        'LineWidth', 1.1, 'MarkerSize', 6);
+end
+
+% hLine handle and codec are returned to construct the legend afterwards
+codec = cases(1).codec;
+
+if annotatePlot == 0
+    return;
+end
+
+for i = 1:length(cases)
+    % Print the codec name as a text label
+    % Ensure each codec is only printed once
+    sequencePlotted = 0;
+    for j = 1:length(sequences)
+        if strcmp(cases(i).name, sequences{j})
+            sequencePlotted = 1;
+            break;
+        end
+    end
+
+    if sequencePlotted == 0
+        text(getfield(cases(i), xfield, {1}), getfield(cases(i), yfield, {1}), ...
+            ['    ' cases(i).name]);
+        sequences = {sequences{:} cases(i).name};
+    end
+end
+
+
+% Strip whitespace from string
+function str = stripws(str)
+if ~isstr(str)
+    error('String required');
+end
+str = str(setdiff(1:length(str), find(isspace(str) == 1)));
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/test.cc b/trunk/src/modules/video_coding/codecs/test_framework/test.cc
new file mode 100644
index 0000000..3eb6935
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/test.cc
@@ -0,0 +1,165 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test.h"
+
+#include <cstring>
+#include <iostream>
+
+#include "testsupport/metrics/video_metrics.h"
+
+using namespace webrtc;
+
+long filesize(const char *filename); // local function defined at end of file
+
+Test::Test(std::string name, std::string description)
+:
+_bitRate(0),
+_inname(""),
+_outname(""),
+_encodedName(""),
+_name(name),
+_description(description)
+{
+    memset(&_inst, 0, sizeof(_inst));
+    unsigned int seed = static_cast<unsigned int>(0);
+    std::srand(seed);
+}
+
+Test::Test(std::string name, std::string description, WebRtc_UWord32 bitRate)
+:
+_bitRate(bitRate),
+_inname(""),
+_outname(""),
+_encodedName(""),
+_name(name),
+_description(description)
+{
+    memset(&_inst, 0, sizeof(_inst));
+    unsigned int seed = static_cast<unsigned int>(0);
+    std::srand(seed);
+}
+
+void
+Test::Print()
+{
+    std::cout << _name << " completed!" << std::endl;
+    (*_log) << _name << std::endl;
+    (*_log) << _description << std::endl;
+    (*_log) << "Input file: " << _inname << std::endl;
+    (*_log) << "Output file: " << _outname << std::endl;
+    webrtc::test::QualityMetricsResult psnr;
+    webrtc::test::QualityMetricsResult ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
+                      _inst.height, &psnr);
+    I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
+                      _inst.height, &ssim);
+
+    (*_log) << "PSNR: " << psnr.average << std::endl;
+    std::cout << "PSNR: " << psnr.average << std::endl << std::endl;
+    (*_log) << "SSIM: " << ssim.average << std::endl;
+    std::cout << "SSIM: " << ssim.average << std::endl << std::endl;
+    (*_log) << std::endl;
+}
+
+void
+Test::Setup()
+{
+    int widhei          = _inst.width*_inst.height;
+    _lengthSourceFrame  = 3*widhei/2;
+    _sourceBuffer       = new unsigned char[_lengthSourceFrame];
+}
+
+void
+Test::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, WebRtc_UWord32 bitRate /*=0*/)
+{
+    if (bitRate > 0)
+    {
+        _bitRate = bitRate;
+    }
+    else if (_bitRate == 0)
+    {
+        _bitRate = 600;
+    }
+    _inst.codecType = kVideoCodecVP8;
+    _inst.codecSpecific.VP8.feedbackModeOn = true;
+    _inst.maxFramerate = (unsigned char)frameRate;
+    _inst.startBitrate = (int)_bitRate;
+    _inst.maxBitrate = 8000;
+    _inst.width = width;
+    _inst.height = height;
+}
+
+void
+Test::Teardown()
+{
+    delete [] _sourceBuffer;
+}
+
+void
+Test::SetEncoder(webrtc::VideoEncoder*encoder)
+{
+    _encoder = encoder;
+}
+
+void
+Test::SetDecoder(VideoDecoder*decoder)
+{
+    _decoder = decoder;
+}
+
+void
+Test::SetLog(std::fstream* log)
+{
+    _log = log;
+}
+
+double Test::ActualBitRate(int nFrames)
+{
+    return 8.0 * _sumEncBytes / (nFrames / _inst.maxFramerate);
+}
+
+bool Test::PacketLoss(double lossRate, int /*thrown*/)
+{
+    return RandUniform() < lossRate;
+}
+
+void
+Test::VideoBufferToRawImage(TestVideoBuffer& videoBuffer, RawImage &image)
+{
+    image._buffer = videoBuffer.GetBuffer();
+    image._size = videoBuffer.GetSize();
+    image._length = videoBuffer.GetLength();
+    image._width = videoBuffer.GetWidth();
+    image._height = videoBuffer.GetHeight();
+    image._timeStamp = videoBuffer.GetTimeStamp();
+}
+void
+Test::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer, EncodedImage &image)
+{
+    image._buffer = videoBuffer.GetBuffer();
+    image._length = videoBuffer.GetLength();
+    image._size = videoBuffer.GetSize();
+    image._frameType = static_cast<VideoFrameType>(videoBuffer.GetFrameType());
+    image._timeStamp = videoBuffer.GetTimeStamp();
+    image._encodedWidth = videoBuffer.GetCaptureWidth();
+    image._encodedHeight = videoBuffer.GetCaptureHeight();
+    image._completeFrame = true;
+}
+
+long filesize(const char *filename)
+{
+    FILE *f = fopen(filename,"rb");  /* open the file in read only */
+    long size = 0;
+    if (fseek(f,0,SEEK_END)==0) /* seek was successful */
+        size = ftell(f);
+    fclose(f);
+    return size;
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/test.h b/trunk/src/modules/video_coding/codecs/test_framework/test.h
new file mode 100644
index 0000000..a31868b
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/test.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
+
+#include "video_codec_interface.h"
+#include "video_buffer.h"
+#include <string>
+#include <fstream>
+#include <cstdlib>
+
+class Test
+{
+public:
+    Test(std::string name, std::string description);
+    Test(std::string name, std::string description, WebRtc_UWord32 bitRate);
+    virtual ~Test() {};
+    virtual void Perform()=0;
+    virtual void Print();
+    void SetEncoder(webrtc::VideoEncoder *encoder);
+    void SetDecoder(webrtc::VideoDecoder *decoder);
+    void SetLog(std::fstream* log);
+
+protected:
+    virtual void Setup();
+    virtual void CodecSettings(int width,
+                               int height,
+                               WebRtc_UWord32 frameRate=30,
+                               WebRtc_UWord32 bitRate=0);
+    virtual void Teardown();
+    double ActualBitRate(int nFrames);
+    virtual bool PacketLoss(double lossRate, int /*thrown*/);
+    static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+    static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
+                                      webrtc::RawImage &image);
+    static void VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,
+                                                 webrtc::EncodedImage &image);
+
+    webrtc::VideoEncoder*   _encoder;
+    webrtc::VideoDecoder*   _decoder;
+    WebRtc_UWord32          _bitRate;
+    unsigned int            _lengthSourceFrame;
+    unsigned char*          _sourceBuffer;
+    TestVideoBuffer         _inputVideoBuffer;
+    TestVideoEncodedBuffer  _encodedVideoBuffer;
+    TestVideoBuffer         _decodedVideoBuffer;
+    webrtc::VideoCodec      _inst;
+    std::fstream*           _log;
+    std::string             _inname;
+    std::string             _outname;
+    std::string             _encodedName;
+    int                     _sumEncBytes;
+
+private:
+    std::string             _name;
+    std::string             _description;
+
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/test_framework.gypi b/trunk/src/modules/video_coding/codecs/test_framework/test_framework.gypi
new file mode 100644
index 0000000..3639e9a
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/test_framework.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  # Exclude the test target when building with chromium.
+  'conditions': [   
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'test_framework',
+          'type': '<(library)',
+
+          'dependencies': [
+            '<(webrtc_root)/../test/metrics.gyp:metrics',
+            '<(webrtc_root)/../test/test.gyp:test_support',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+          ],
+
+          'include_dirs': [
+            '../interface',
+            '<(webrtc_root)/../testing/gtest/include',
+            '../../../../common_video/interface',
+          ],
+
+          'direct_dependent_settings': {
+            'include_dirs': [
+              '../interface',
+            ],
+          },
+
+          'sources': [
+            # header files
+            'benchmark.h',
+            'normal_async_test.h',
+            'normal_test.h',
+            'packet_loss_test.h',
+            'performance_test.h',
+            'test.h',
+            'unit_test.h',
+            'video_buffer.h',
+            'video_source.h',
+
+            # source files
+            'benchmark.cc',
+            'normal_async_test.cc',
+            'normal_test.cc',
+            'packet_loss_test.cc',
+            'performance_test.cc',
+            'test.cc',
+            'unit_test.cc',
+            'video_buffer.cc',
+            'video_source.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/unit_test.cc b/trunk/src/modules/video_coding/codecs/test_framework/unit_test.cc
new file mode 100644
index 0000000..99bd7fb
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -0,0 +1,817 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <cassert>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+#include "unit_test.h"
+#include "video_source.h"
+
+using namespace webrtc;
+
+UnitTest::UnitTest()
+:
+Test("UnitTest", "Unit test"),
+_tests(0),
+_errors(0),
+_source(NULL),
+_refFrame(NULL),
+_refEncFrame(NULL),
+_refDecFrame(NULL),
+_refEncFrameLength(0),
+_sourceFile(NULL),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+UnitTest::UnitTest(std::string name, std::string description)
+:
+Test(name, description),
+_tests(0),
+_errors(0),
+_source(NULL),
+_refFrame(NULL),
+_refEncFrame(NULL),
+_refDecFrame(NULL),
+_refEncFrameLength(0),
+_sourceFile(NULL),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+UnitTest::~UnitTest()
+{
+    if (_encodeCompleteCallback) {
+        delete _encodeCompleteCallback;
+    }
+
+    if (_decodeCompleteCallback) {
+        delete _decodeCompleteCallback;
+    }
+
+    if (_source) {
+        delete _source;
+    }
+
+    if (_refFrame) {
+        delete [] _refFrame;
+    }
+
+    if (_refDecFrame) {
+        delete [] _refDecFrame;
+    }
+
+    if (_sourceBuffer) {
+        delete [] _sourceBuffer;
+    }
+
+    if (_sourceFile) {
+        fclose(_sourceFile);
+    }
+
+    if (_refEncFrame) {
+        delete [] _refEncFrame;
+    }
+}
+
+WebRtc_Word32
+UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+                                        const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                                        const webrtc::RTPFragmentationHeader*
+                                        fragmentation)
+{
+    _encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
+    _encodedVideoBuffer->CopyBuffer(encodedImage._size, encodedImage._buffer);
+    _encodedVideoBuffer->UpdateLength(encodedImage._length);
+    _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
+    _encodedVideoBuffer->SetCaptureWidth(
+        (WebRtc_UWord16)encodedImage._encodedWidth);
+    _encodedVideoBuffer->SetCaptureHeight(
+        (WebRtc_UWord16)encodedImage._encodedHeight);
+    _encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
+    _encodeComplete = true;
+    _encodedFrameType = encodedImage._frameType;
+    return 0;
+}
+
+WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(RawImage& image)
+{
+    _decodedVideoBuffer->VerifyAndAllocate(image._length);
+    _decodedVideoBuffer->CopyBuffer(image._length, image._buffer);
+    _decodedVideoBuffer->SetWidth(image._width);
+    _decodedVideoBuffer->SetHeight(image._height);
+    _decodedVideoBuffer->SetTimeStamp(image._timeStamp);
+    _decodeComplete = true;
+    return 0;
+}
+
+bool
+UnitTestEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+VideoFrameType
+UnitTestEncodeCompleteCallback::EncodedFrameType() const
+{
+    return _encodedFrameType;
+}
+
+bool
+UnitTestDecodeCompleteCallback::DecodeComplete()
+{
+    if (_decodeComplete)
+    {
+        _decodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+WebRtc_UWord32
+UnitTest::WaitForEncodedFrame() const
+{
+    WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs)
+    {
+        if (_encodeCompleteCallback->EncodeComplete())
+        {
+            return _encodedVideoBuffer.GetLength();
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+UnitTest::WaitForDecodedFrame() const
+{
+    WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs)
+    {
+        if (_decodeCompleteCallback->DecodeComplete())
+        {
+            return _decodedVideoBuffer.GetLength();
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
+                                   WebRtc_UWord32 /* frameRate */)
+{
+    return _encoder->SetRates(bitRate, _inst.maxFramerate);
+}
+
+void
+UnitTest::Setup()
+{
+    // Use _sourceFile as a check to prevent multiple Setup() calls.
+    if (_sourceFile != NULL)
+    {
+        return;
+    }
+
+    if (_encodeCompleteCallback == NULL)
+    {
+        _encodeCompleteCallback =
+            new UnitTestEncodeCompleteCallback(&_encodedVideoBuffer);
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        _decodeCompleteCallback =
+            new UnitTestDecodeCompleteCallback(&_decodedVideoBuffer);
+    }
+
+    _encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
+    _decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
+
+    _source = new VideoSource(webrtc::test::ProjectRootPath() +
+                              "resources/foreman_cif.yuv", kCIF);
+
+    _lengthSourceFrame = _source->GetFrameLength();
+    _refFrame = new unsigned char[_lengthSourceFrame];
+    _refDecFrame = new unsigned char[_lengthSourceFrame];
+    _sourceBuffer = new unsigned char [_lengthSourceFrame];
+    _sourceFile = fopen(_source->GetFileName().c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL);
+
+    _inst.maxFramerate = _source->GetFrameRate();
+    _bitRate = 300;
+    _inst.startBitrate = 300;
+    _inst.maxBitrate = 4000;
+    _inst.width = _source->GetWidth();
+    _inst.height = _source->GetHeight();
+
+    // Get input frame.
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
+                           == _lengthSourceFrame);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+    rewind(_sourceFile);
+
+    // Get a reference encoded frame.
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+
+    RawImage image;
+    VideoBufferToRawImage(_inputVideoBuffer, image);
+
+    // Ensures our initial parameters are valid.
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    VideoFrameType videoFrameType = kDeltaFrame;
+    _encoder->Encode(image, NULL, &videoFrameType);
+    _refEncFrameLength = WaitForEncodedFrame();
+    ASSERT_TRUE(_refEncFrameLength > 0);
+    _refEncFrame = new unsigned char[_refEncFrameLength];
+    memcpy(_refEncFrame, _encodedVideoBuffer.GetBuffer(), _refEncFrameLength);
+
+    // Get a reference decoded frame.
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    unsigned int frameLength = 0;
+    int i=0;
+    while (frameLength == 0)
+    {
+        if (i > 0)
+        {
+            // Insert yet another frame
+            _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+            ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
+                _sourceFile) == _lengthSourceFrame);
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+            _inputVideoBuffer.SetWidth(_source->GetWidth());
+            _inputVideoBuffer.SetHeight(_source->GetHeight());
+            VideoBufferToRawImage(_inputVideoBuffer, image);
+            _encoder->Encode(image, NULL, &videoFrameType);
+            ASSERT_TRUE(WaitForEncodedFrame() > 0);
+        }
+        EncodedImage encodedImage;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
+                               == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = WaitForDecodedFrame();
+        _encodedVideoBuffer.Reset();
+        _encodedVideoBuffer.UpdateLength(0);
+        i++;
+    }
+    rewind(_sourceFile);
+    EXPECT_TRUE(frameLength == _lengthSourceFrame);
+    memcpy(_refDecFrame, _decodedVideoBuffer.GetBuffer(), _lengthSourceFrame);
+}
+
+void
+UnitTest::Teardown()
+{
+    // Use _sourceFile as a check to prevent multiple Teardown() calls.
+    if (_sourceFile == NULL)
+    {
+        return;
+    }
+
+    _encoder->Release();
+    _decoder->Release();
+
+    fclose(_sourceFile);
+    _sourceFile = NULL;
+    delete [] _refFrame;
+    _refFrame = NULL;
+    delete [] _refEncFrame;
+    _refEncFrame = NULL;
+    delete [] _refDecFrame;
+    _refDecFrame = NULL;
+    delete [] _sourceBuffer;
+    _sourceBuffer = NULL;
+}
+
+void
+UnitTest::Print()
+{
+    printf("Unit Test\n\n%i tests completed\n", _tests);
+    if (_errors > 0)
+    {
+        printf("%i FAILED\n\n", _errors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+int
+UnitTest::DecodeWithoutAssert()
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    int ret = _decoder->Decode(encodedImage, 0, NULL);
+    int frameLength = WaitForDecodedFrame();
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
+}
+
+int
+UnitTest::Decode()
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    if (encodedImage._length == 0)
+    {
+        return WEBRTC_VIDEO_CODEC_OK;
+    }
+    int ret = _decoder->Decode(encodedImage, 0, NULL);
+    unsigned int frameLength = WaitForDecodedFrame();
+    assert(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
+        == _lengthSourceFrame));
+    EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
+        == _lengthSourceFrame));
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
+}
+
+// Test pure virtual VideoEncoder and VideoDecoder APIs.
+void
+UnitTest::Perform()
+{
+    UnitTest::Setup();
+    int frameLength;
+    RawImage inputImage;
+    EncodedImage encodedImage;
+    VideoFrameType videoFrameType = kDeltaFrame;
+
+    //----- Encoder parameter tests -----
+
+    //-- Calls before InitEncode() --
+    // We want to revert the initialization done in Setup().
+    EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &videoFrameType )
+               == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+    //-- InitEncode() errors --
+    // Null pointer.
+    EXPECT_TRUE(_encoder->InitEncode(NULL, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    // bit rate exceeds max bit rate
+    WebRtc_Word32 tmpBitRate = _inst.startBitrate;
+    WebRtc_Word32 tmpMaxBitRate = _inst.maxBitrate;
+    _inst.startBitrate = 4000;
+    _inst.maxBitrate = 3000;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440)  ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.startBitrate = tmpBitRate;
+    _inst.maxBitrate = tmpMaxBitRate; //unspecified value
+
+    // Bad framerate.
+    _inst.maxFramerate = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    // Seems like we should allow any framerate in range [0, 255].
+    //_inst.frameRate = 100;
+    //EXPECT_TRUE(_encoder->InitEncode(&_inst, 1) == -1); // FAILS
+    _inst.maxFramerate = 30;
+
+    // Bad bitrate.
+    _inst.startBitrate = -1;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = _inst.startBitrate - 1;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = 0;
+    _inst.startBitrate = 300;
+
+    // Bad maxBitRate.
+    _inst.maxBitrate = 200;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = 4000;
+
+    // Bad width.
+    _inst.width = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
+    // Should there be a width and height cap?
+    //_inst.width = 10000;
+    //EXPECT_TRUE(_encoder->InitEncode(&_inst, 1) == -1);
+    _inst.width = _source->GetWidth();
+
+    // Bad height.
+    _inst.height = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
+    _inst.height = _source->GetHeight();
+
+    // Bad number of cores.
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, -1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+
+    //-- Encode() errors --
+
+    // inputVideoBuffer unallocated.
+    _inputVideoBuffer.Free();
+    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+
+    //----- Encoder stress tests -----
+
+    // Vary frame rate and I-frame request.
+    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+    for (int i = 1; i <= 60; i++)
+    {
+        VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame;
+        EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &frameType) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(WaitForEncodedFrame() > 0);
+    }
+
+    // Init then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(WaitForEncodedFrame() > 0);
+
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, &videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+            _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    // Reset then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    WaitForEncodedFrame();
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, &videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    // Release then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    WaitForEncodedFrame();
+    EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, &videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    //----- Decoder parameter tests -----
+
+    //-- Calls before InitDecode() --
+    // We want to revert the initialization done in Setup().
+    EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    EXPECT_TRUE(_decoder->Decode(encodedImage, false, NULL) ==
+        WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+    WaitForDecodedFrame();
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    //-- Decode() errors --
+    // Unallocated encodedVideoBuffer.
+    _encodedVideoBuffer.Free();
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    encodedImage._length = 10;  // Buffer NULL but length > 0
+    EXPECT_EQ(_decoder->Decode(encodedImage, false, NULL),
+              WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+
+    //----- Decoder stress tests -----
+    unsigned char* tmpBuf = new unsigned char[_lengthSourceFrame];
+
+    // "Random" and zero data.
+    // We either expect an error, or at the least, no output.
+    // This relies on the codec's ability to detect an erroneous bitstream.
+    /*
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    for (int i = 0; i < 100; i++)
+    {
+        ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
+            == _refEncFrameLength);
+        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        FillDecoderSpecificInfo(encodedImage);
+        int ret = _decoder->Decode(encodedImage, false, _decoderSpecificInfo);
+        EXPECT_TRUE(ret <= 0);
+        if (ret == 0)
+        {
+            EXPECT_TRUE(WaitForDecodedFrame() == 0);
+        }
+
+        memset(tmpBuf, 0, _refEncFrameLength);
+        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        FillDecoderSpecificInfo(encodedImage);
+        ret = _decoder->Decode(encodedImage, false, _decoderSpecificInfo);
+        EXPECT_TRUE(ret <= 0);
+        if (ret == 0)
+        {
+            EXPECT_TRUE(WaitForDecodedFrame() == 0);
+        }
+    }
+    */
+    rewind(_sourceFile);
+
+    _encodedVideoBuffer.UpdateLength(_refEncFrameLength);
+    _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, _refEncFrame);
+
+    // Init then decode.
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+
+    // Reset then decode.
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+
+    // Decode with other size, reset, then decode with original size again
+    // to verify that decoder is reset to a "fresh" state upon Reset().
+    {
+        // assert that input frame size is a factor of two, so that we can use
+        // quarter size below
+        EXPECT_TRUE((_inst.width % 2 == 0) && (_inst.height % 2 == 0));
+
+        VideoCodec tempInst;
+        memcpy(&tempInst, &_inst, sizeof(VideoCodec));
+        tempInst.width /= 2;
+        tempInst.height /= 2;
+
+        // Encode reduced (quarter) frame size
+        EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        RawImage tempInput(inputImage._buffer, inputImage._length/4,
+                           inputImage._size/4);
+        VideoFrameType videoFrameType = kDeltaFrame;
+        _encoder->Encode(tempInput, NULL, &videoFrameType);
+        frameLength = WaitForEncodedFrame();
+        EXPECT_TRUE(frameLength > 0);
+
+        // Reset then decode.
+        EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = 0;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        while (frameLength == 0)
+        {
+            _decoder->Decode(encodedImage, false, NULL);
+            frameLength = WaitForDecodedFrame();
+        }
+
+        // Encode original frame again
+        EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        _encoder->Encode(inputImage, NULL, &videoFrameType);
+        frameLength = WaitForEncodedFrame();
+        EXPECT_TRUE(frameLength > 0);
+
+        // Reset then decode original frame again.
+        EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = 0;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        while (frameLength == 0)
+        {
+            _decoder->Decode(encodedImage, false, NULL);
+            frameLength = WaitForDecodedFrame();
+        }
+
+        // check that decoded frame matches with reference
+        EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+            _refDecFrame, _lengthSourceFrame) == true);
+
+    }
+
+    // Release then decode.
+    EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+
+    delete [] tmpBuf;
+
+    //----- Function tests -----
+    int frames = 0;
+    // Do not specify maxBitRate (as in ViE).
+    _inst.maxBitrate = 0;
+
+    //-- Timestamp propagation --
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    printf("\nTimestamp propagation test...\n");
+    frames = 0;
+    int frameDelay = 0;
+    int encTimeStamp;
+    _decodedVideoBuffer.SetTimeStamp(0);
+    while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+        _lengthSourceFrame)
+    {
+        _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+        _inputVideoBuffer.SetTimeStamp(frames);
+        VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+        VideoFrameType videoFrameType = kDeltaFrame;
+        ASSERT_TRUE(_encoder->Encode(inputImage,
+                                                NULL,
+                                                &videoFrameType) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        frameLength = WaitForEncodedFrame();
+        //ASSERT_TRUE(frameLength);
+        EXPECT_TRUE(frameLength > 0);
+        encTimeStamp = _encodedVideoBuffer.GetTimeStamp();
+        EXPECT_TRUE(_inputVideoBuffer.GetTimeStamp() ==
+                static_cast<unsigned>(encTimeStamp));
+
+        frameLength = Decode();
+        if (frameLength == 0)
+        {
+            frameDelay++;
+        }
+
+        encTimeStamp -= frameDelay;
+        if (encTimeStamp < 0)
+        {
+            encTimeStamp = 0;
+        }
+        EXPECT_TRUE(_decodedVideoBuffer.GetTimeStamp() ==
+                static_cast<unsigned>(encTimeStamp));
+        frames++;
+    }
+    ASSERT_TRUE(feof(_sourceFile) != 0);
+    rewind(_sourceFile);
+
+    RateControlTests();
+
+    Teardown();
+}
+
+void
+UnitTest::RateControlTests()
+{
+    std::string outFileName;
+    int frames = 0;
+    RawImage inputImage;
+    WebRtc_UWord32 frameLength;
+
+    // Do not specify maxBitRate (as in ViE).
+    _inst.maxBitrate = 0;
+    //-- Verify rate control --
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    // add: should also be 0, and 1
+    const int bitRate[] =
+    {100, 200, 300, 400, 500, 600, 800, 1000, 2000, 3000, 4000, 10000};
+    const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+
+    printf("\nRate control test\n");
+    for (int i = 0; i < nBitrates; i++)
+    {
+        _bitRate = bitRate[i];
+        int totalBytes = 0;
+        _inst.startBitrate = _bitRate;
+        _encoder->InitEncode(&_inst, 4, 1440);
+        _decoder->Reset();
+        _decoder->InitDecode(&_inst, 1);
+        frames = 0;
+
+        if (_bitRate > _inst.maxBitrate)
+        {
+            CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
+        }
+        else
+        {
+            CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
+        }
+
+        while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+            _lengthSourceFrame)
+        {
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+            _inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.GetTimeStamp() +
+                static_cast<WebRtc_UWord32>(9e4 /
+                    static_cast<float>(_inst.maxFramerate)));
+            VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+            VideoFrameType videoFrameType = kDeltaFrame;
+            ASSERT_EQ(_encoder->Encode(inputImage, NULL, &videoFrameType),
+                      WEBRTC_VIDEO_CODEC_OK);
+            frameLength = WaitForEncodedFrame();
+            ASSERT_GE(frameLength, 0u);
+            totalBytes += frameLength;
+            frames++;
+
+            _encodedVideoBuffer.UpdateLength(0);
+            _encodedVideoBuffer.Reset();
+        }
+        WebRtc_UWord32 actualBitrate =
+            (totalBytes  / frames * _inst.maxFramerate * 8)/1000;
+        printf("Target bitrate: %d kbps, actual bitrate: %d kbps\n", _bitRate,
+            actualBitrate);
+        // Test for close match over reasonable range.
+        if (_bitRate >= 100 && _bitRate <= 2500)
+        {
+            EXPECT_TRUE(abs(WebRtc_Word32(actualBitrate - _bitRate)) <
+                0.1 * _bitRate); // for VP8
+        }
+        ASSERT_TRUE(feof(_sourceFile) != 0);
+        rewind(_sourceFile);
+    }
+}
+
+bool
+UnitTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                          const void* ptrB, unsigned int bLengthBytes)
+{
+    if (aLengthBytes != bLengthBytes)
+    {
+        return false;
+    }
+
+    return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/unit_test.h b/trunk/src/modules/video_coding/codecs/test_framework/unit_test.h
new file mode 100644
index 0000000..01b348d
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/unit_test.h
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+
+#include "test.h"
+#include "event_wrapper.h"
+
+// Disable "conditional expression is constant" warnings on the perfectly
+// acceptable
+// do { ... } while (0) constructions below.
+// Refer to http://stackoverflow.com/questions/1946445/
+//   is-there-better-way-to-write-do-while0-construct-to-avoid-compiler-warnings
+// for some discussion of the issue.
+#ifdef _WIN32
+#pragma warning(disable : 4127)
+#endif
+
+class VideoSource;
+class UnitTestEncodeCompleteCallback;
+class UnitTestDecodeCompleteCallback;
+
+class UnitTest : public Test
+{
+public:
+    UnitTest();
+    virtual ~UnitTest();
+    virtual void Perform();
+    virtual void Print();
+
+protected:
+    UnitTest(std::string name, std::string description);
+    virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
+        WebRtc_UWord32 bitRate,
+        WebRtc_UWord32 /* frameRate */);
+    virtual void Setup();
+    virtual void Teardown();
+    virtual void RateControlTests();
+    virtual int Decode();
+    virtual int DecodeWithoutAssert();
+    virtual int SetCodecSpecificParameters() {return 0;};
+
+    virtual bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
+                                 const void *ptrB, unsigned int bLengthBytes);
+
+    WebRtc_UWord32 WaitForEncodedFrame() const;
+    WebRtc_UWord32 WaitForDecodedFrame() const;
+
+    int _tests;
+    int _errors;
+
+    VideoSource* _source;
+    unsigned char* _refFrame;
+    unsigned char* _refEncFrame;
+    unsigned char* _refDecFrame;
+    int _refEncFrameLength;
+    FILE* _sourceFile;
+
+    UnitTestEncodeCompleteCallback* _encodeCompleteCallback;
+    UnitTestDecodeCompleteCallback* _decodeCompleteCallback;
+    enum { kMaxWaitEncTimeMs = 100 };
+    enum { kMaxWaitDecTimeMs = 25 };
+};
+
+class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
+{
+public:
+    UnitTestEncodeCompleteCallback(TestVideoEncodedBuffer* buffer,
+                                   WebRtc_UWord32 decoderSpecificSize = 0,
+                                   void* decoderSpecificInfo = NULL) :
+      _encodedVideoBuffer(buffer),
+      _decoderSpecificInfo(decoderSpecificInfo),
+      _decoderSpecificSize(decoderSpecificSize),
+      _encodeComplete(false) {}
+    WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
+                          const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                          const webrtc::RTPFragmentationHeader*
+                          fragmentation = NULL);
+    bool EncodeComplete();
+    // Note that this only makes sense if an encode has been completed
+    webrtc::VideoFrameType EncodedFrameType() const;
+private:
+    TestVideoEncodedBuffer* _encodedVideoBuffer;
+    void* _decoderSpecificInfo;
+    WebRtc_UWord32 _decoderSpecificSize;
+    bool _encodeComplete;
+    webrtc::VideoFrameType _encodedFrameType;
+};
+
+class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
+        _decodedVideoBuffer(buffer), _decodeComplete(false) {}
+    WebRtc_Word32 Decoded(webrtc::RawImage& image);
+    bool DecodeComplete();
+private:
+    TestVideoBuffer* _decodedVideoBuffer;
+    bool _decodeComplete;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.cc b/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.cc
new file mode 100644
index 0000000..3958e90
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.cc
@@ -0,0 +1,319 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <string.h>
+#include "video_buffer.h"
+
+using namespace webrtc;
+
+TestVideoBuffer::TestVideoBuffer():
+_buffer(0),
+_bufferSize(0),
+_bufferLength(0),
+_startOffset(0),
+_timeStamp(0),
+_width(0),
+_height(0)
+{
+   //
+}
+
+
+TestVideoBuffer::~TestVideoBuffer()
+{
+    _timeStamp = 0;
+    _startOffset = 0;
+    _bufferLength = 0;
+    _bufferSize = 0;
+
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = 0;
+    }
+}
+
+TestVideoBuffer::TestVideoBuffer(const TestVideoBuffer& rhs)
+:
+_buffer(0),
+_bufferSize(rhs._bufferSize),
+_bufferLength(rhs._bufferLength),
+_startOffset(rhs._startOffset),
+_timeStamp(rhs._timeStamp),
+_width(rhs._width),
+_height(rhs._height)
+{
+    // make sure that our buffer is big enough
+    _buffer = new unsigned char[_bufferSize];
+
+    // only copy required length
+    memcpy(_buffer + _startOffset, rhs._buffer, _bufferLength);  // GetBuffer() includes _startOffset
+}
+
+void TestVideoBuffer::SetTimeStamp(unsigned int timeStamp)
+{
+    _timeStamp = timeStamp;
+}
+
+unsigned int
+TestVideoBuffer::GetWidth() const
+{
+    return _width;
+}
+
+unsigned int
+TestVideoBuffer::GetHeight() const
+{
+    return _height;
+}
+
+void
+TestVideoBuffer::SetWidth(unsigned int width)
+{
+    _width = width;
+}
+
+void
+TestVideoBuffer::SetHeight(unsigned int height)
+{
+    _height = height;
+}
+
+
+void TestVideoBuffer::Free()
+{
+    _timeStamp = 0;
+    _startOffset = 0;
+    _bufferLength = 0;
+    _bufferSize = 0;
+    _height = 0;
+    _width = 0;
+
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = 0;
+    }
+}
+
+void TestVideoBuffer::VerifyAndAllocate(unsigned int minimumSize)
+{
+    if(minimumSize > _bufferSize)
+    {
+        // make sure that our buffer is big enough
+        unsigned char * newBufferBuffer = new unsigned char[minimumSize];
+        if(_buffer)
+        {
+            // copy the old data
+            memcpy(newBufferBuffer, _buffer, _bufferSize);
+            delete [] _buffer;
+        }
+        _buffer = newBufferBuffer;
+        _bufferSize = minimumSize;
+    }
+}
+
+int TestVideoBuffer::SetOffset(unsigned int length)
+{
+    if (length > _bufferSize ||
+        length > _bufferLength)
+    {
+        return -1;
+    }
+
+    unsigned int oldOffset = _startOffset;
+
+    if(oldOffset > length)
+    {
+        unsigned int newLength = _bufferLength + (oldOffset-length);// increase by the diff
+        assert(newLength <= _bufferSize);
+        _bufferLength = newLength;
+    }
+    if(oldOffset < length)
+    {
+        if(_bufferLength > (length-oldOffset))
+        {
+            _bufferLength -= (length-oldOffset); // decrease by the diff
+        }
+    }
+    _startOffset = length; // update
+
+    return 0;
+}
+
+void TestVideoBuffer::UpdateLength(unsigned int newLength)
+{
+    assert(newLength +_startOffset <= _bufferSize);
+    _bufferLength = newLength;
+}
+
+void TestVideoBuffer::CopyBuffer(unsigned int length, const unsigned char* buffer)
+{
+    assert(length+_startOffset <= _bufferSize);
+    memcpy(_buffer+_startOffset, buffer, length);
+    _bufferLength = length;
+}
+
+void TestVideoBuffer::CopyBuffer(TestVideoBuffer& fromVideoBuffer)
+{
+    assert(fromVideoBuffer.GetLength() + fromVideoBuffer.GetStartOffset() <= _bufferSize);
+    assert(fromVideoBuffer.GetSize() <= _bufferSize);
+
+    _bufferLength = fromVideoBuffer.GetLength();
+    _startOffset = fromVideoBuffer.GetStartOffset();
+    _timeStamp = fromVideoBuffer.GetTimeStamp();
+    _height = fromVideoBuffer.GetHeight();
+    _width = fromVideoBuffer.GetWidth();
+
+    // only copy required length
+    memcpy(_buffer+_startOffset, fromVideoBuffer.GetBuffer(), fromVideoBuffer.GetLength());  // GetBuffer() includes _startOffset
+
+}
+
+void TestVideoBuffer::CopyPointer(const TestVideoBuffer& fromVideoBuffer)
+{
+    _bufferSize = fromVideoBuffer.GetSize();
+    _bufferLength = fromVideoBuffer.GetLength();
+    _startOffset = fromVideoBuffer.GetStartOffset();
+    _timeStamp = fromVideoBuffer.GetTimeStamp();
+    _height = fromVideoBuffer.GetHeight();
+    _width = fromVideoBuffer.GetWidth();
+
+    _buffer = fromVideoBuffer.GetBuffer();
+}
+
+void TestVideoBuffer::ClearPointer()
+{
+    _buffer = NULL;
+}
+
+void TestVideoBuffer::SwapBuffers(TestVideoBuffer& videoBuffer)
+{
+    unsigned char*  tempBuffer = _buffer;
+    unsigned int    tempSize = _bufferSize;
+    unsigned int    tempLength =_bufferLength;
+    unsigned int    tempOffset = _startOffset;
+    unsigned int    tempTime = _timeStamp;
+    unsigned int    tempHeight = _height;
+    unsigned int    tempWidth = _width;
+
+    _buffer = videoBuffer.GetBuffer();
+    _bufferSize = videoBuffer.GetSize();
+    _bufferLength = videoBuffer.GetLength();
+    _startOffset = videoBuffer.GetStartOffset();
+    _timeStamp =  videoBuffer.GetTimeStamp();
+    _height = videoBuffer.GetHeight();
+    _width = videoBuffer.GetWidth();
+
+
+    videoBuffer.Set(tempBuffer, tempSize, tempLength, tempOffset, tempTime);
+    videoBuffer.SetHeight(tempHeight);
+    videoBuffer.SetWidth(tempWidth);
+}
+
+void TestVideoBuffer::Set(unsigned char* tempBuffer,unsigned int tempSize,unsigned int tempLength, unsigned int tempOffset,unsigned int timeStamp)
+{
+    _buffer = tempBuffer;
+    _bufferSize = tempSize;
+    _bufferLength = tempLength;
+    _startOffset = tempOffset;
+    _timeStamp = timeStamp;
+}
+
+unsigned char* TestVideoBuffer::GetBuffer() const
+{
+    return _buffer+_startOffset;
+}
+
+unsigned int TestVideoBuffer::GetStartOffset() const
+{
+    return _startOffset;
+}
+
+unsigned int TestVideoBuffer::GetSize() const
+{
+    return _bufferSize;
+}
+
+unsigned int TestVideoBuffer::GetLength() const
+{
+    return _bufferLength;
+}
+
+unsigned int TestVideoBuffer::GetTimeStamp() const
+{
+    return _timeStamp;
+}
+
+/**
+*   TestVideoEncodedBuffer
+*
+*/
+
+TestVideoEncodedBuffer::TestVideoEncodedBuffer() :
+    _captureWidth(0),
+    _captureHeight(0),
+    _frameRate(-1)
+{
+    _frameType = kDeltaFrame;
+}
+
+TestVideoEncodedBuffer::~TestVideoEncodedBuffer()
+{
+}
+
+void TestVideoEncodedBuffer::SetCaptureWidth(unsigned short width)
+{
+    _captureWidth = width;
+}
+
+void TestVideoEncodedBuffer::SetCaptureHeight(unsigned short height)
+{
+    _captureHeight = height;
+}
+
+unsigned short TestVideoEncodedBuffer::GetCaptureWidth()
+{
+    return _captureWidth;
+}
+
+unsigned short TestVideoEncodedBuffer::GetCaptureHeight()
+{
+    return _captureHeight;
+}
+
+VideoFrameType TestVideoEncodedBuffer::GetFrameType()
+{
+    return _frameType;
+}
+
+void TestVideoEncodedBuffer::SetFrameType(VideoFrameType frametype)
+{
+    _frameType = frametype;
+}
+
+void TestVideoEncodedBuffer::Reset()
+{
+    _captureWidth = 0;
+    _captureHeight = 0;
+    _frameRate = -1;
+    _frameType = kDeltaFrame;
+}
+
+void  TestVideoEncodedBuffer::SetFrameRate(float frameRate)
+{
+    _frameRate = frameRate;
+}
+
+float  TestVideoEncodedBuffer::GetFrameRate()
+{
+    return _frameRate;
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.h b/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.h
new file mode 100644
index 0000000..824440e
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/video_buffer.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
+
+#include "typedefs.h"
+#include "video_image.h"
+
+class TestVideoBuffer
+{
+public:
+    TestVideoBuffer();
+
+    virtual ~TestVideoBuffer();
+
+    TestVideoBuffer(const TestVideoBuffer& rhs);
+
+    /**
+    * Verifies that current allocated buffer size is larger than or equal to the input size.
+    * If the current buffer size is smaller, a new allocation is made and the old buffer data is copied to the new buffer.
+    */
+    void VerifyAndAllocate(unsigned int minimumSize);
+
+    void UpdateLength(unsigned int newLength);
+
+    void SwapBuffers(TestVideoBuffer& videoBuffer);
+
+    void CopyBuffer(unsigned int length, const unsigned char* fromBuffer);
+
+    void CopyBuffer(TestVideoBuffer& fromVideoBuffer);
+
+    // Use with care, and remember to call ClearPointer() when done.
+    void CopyPointer(const TestVideoBuffer& fromVideoBuffer);
+
+    void ClearPointer();
+
+    int  SetOffset(unsigned int length);            // Sets offset to beginning of frame in buffer
+
+    void Free();                                    // Deletes frame buffer and resets members to zero
+
+    void SetTimeStamp(unsigned int timeStamp);      // Sets timestamp of frame (90kHz)
+
+    /**
+    *   Gets pointer to frame buffer
+    */
+    unsigned char* GetBuffer() const;
+
+    /**
+    *   Gets allocated buffer size
+    */
+    unsigned int	GetSize() const;
+
+    /**
+    *   Gets length of frame
+    */
+    unsigned int	GetLength() const;
+
+    /**
+    *   Gets timestamp of frame (90kHz)
+    */
+    unsigned int	GetTimeStamp() const;
+
+    unsigned int	GetWidth() const;
+    unsigned int	GetHeight() const;
+
+    void            SetWidth(unsigned int width);
+    void            SetHeight(unsigned int height);
+
+private:
+    TestVideoBuffer& operator=(const TestVideoBuffer& inBuffer);
+
+private:
+    void Set(unsigned char* buffer,unsigned int size,unsigned int length,unsigned int offset, unsigned int timeStamp);
+    unsigned int GetStartOffset() const;
+
+    unsigned char*		  _buffer;          // Pointer to frame buffer
+    unsigned int		  _bufferSize;      // Allocated buffer size
+    unsigned int		  _bufferLength;    // Length (in bytes) of frame
+    unsigned int		  _startOffset;     // Offset (in bytes) to beginning of frame in buffer
+    unsigned int		  _timeStamp;       // Timestamp of frame (90kHz)
+    unsigned int          _width;
+    unsigned int          _height;
+};
+
+class TestVideoEncodedBuffer: public TestVideoBuffer
+{
+public:
+    TestVideoEncodedBuffer();
+    ~TestVideoEncodedBuffer();
+
+    void SetCaptureWidth(unsigned short width);
+    void SetCaptureHeight(unsigned short height);
+    unsigned short GetCaptureWidth();
+    unsigned short GetCaptureHeight();
+
+    webrtc::VideoFrameType GetFrameType();
+    void SetFrameType(webrtc::VideoFrameType frametype);
+
+    void Reset();
+
+    void SetFrameRate(float frameRate);
+    float GetFrameRate();
+
+private:
+    TestVideoEncodedBuffer& operator=(const TestVideoEncodedBuffer& inBuffer);
+
+private:
+    unsigned short			   _captureWidth;
+    unsigned short			   _captureHeight;
+    webrtc::VideoFrameType     _frameType;
+    float                      _frameRate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/video_source.cc b/trunk/src/modules/video_coding/codecs/test_framework/video_source.cc
new file mode 100644
index 0000000..8abad1b
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/video_source.cc
@@ -0,0 +1,419 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_source.h"
+
+#include <stdio.h>
+#include <cassert>
+
+#include "testsupport/fileutils.h"
+
+VideoSource::VideoSource()
+:
+_fileName(webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"),
+_width(352),
+_height(288),
+_type(webrtc::kI420),
+_frameRate(30)
+{
+}
+
+VideoSource::VideoSource(std::string fileName, VideoSize size,
+    int frameRate /*= 30*/, webrtc::VideoType type /*=  webrtc::kI420*/)
+:
+_fileName(fileName),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(size != kUndefined && size != kNumberOfVideoSizes);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+    assert(GetWidthHeight(size, _width, _height) == 0);
+}
+
+VideoSource::VideoSource(std::string fileName, int width, int height,
+    int frameRate /*= 30*/,  webrtc::VideoType type /*=  webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(width),
+_height(height),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(width > 0);
+    assert(height > 0);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+}
+
+VideoSize
+VideoSource::GetSize() const
+{
+    return GetSize(_width, _height);
+}
+
+VideoSize
+VideoSource::GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height)
+{
+    if(width == 128 && height == 96)
+    {
+        return kSQCIF;
+    }else if(width == 160 && height == 120)
+    {
+        return kQQVGA;
+    }else if(width == 176 && height == 144)
+    {
+        return kQCIF;
+    }else if(width == 320 && height == 240)
+    {
+        return kQVGA;
+    }else if(width == 352 && height == 288)
+    {
+        return kCIF;
+    }else if(width == 640 && height == 480)
+    {
+        return kVGA;
+    }else if(width == 720 && height == 480)
+    {
+        return kNTSC;
+    }else if(width == 704 && height == 576)
+    {
+        return k4CIF;
+    }else if(width == 800 && height == 600)
+    {
+        return kSVGA;
+    }else if(width == 960 && height == 720)
+    {
+        return kHD;
+    }else if(width == 1024 && height == 768)
+    {
+        return kXGA;
+    }else if(width == 1440 && height == 1080)
+    {
+        return kFullHD;
+    }else if(width == 400 && height == 240)
+    {
+        return kWQVGA;
+    }else if(width == 800 && height == 480)
+    {
+        return kWVGA;
+    }else if(width == 1280 && height == 720)
+    {
+        return kWHD;
+    }else if(width == 1920 && height == 1080)
+    {
+        return kWFullHD;
+    }
+    return kUndefined;
+}
+
+unsigned int
+VideoSource::GetFrameLength() const
+{
+    return webrtc::CalcBufferSize(_type, _width, _height);
+}
+
+const char*
+VideoSource::GetMySizeString() const
+{
+    return VideoSource::GetSizeString(GetSize());
+}
+
+const char*
+VideoSource::GetSizeString(VideoSize size)
+{
+    switch (size)
+    {
+        case kSQCIF:
+            return "SQCIF";
+        case kQQVGA:
+            return "QQVGA";
+        case kQCIF:
+            return "QCIF";
+        case kQVGA:
+            return "QVGA";
+        case kCIF:
+            return "CIF";
+        case kVGA:
+            return "VGA";
+        case kNTSC:
+            return "NTSC";
+        case k4CIF:
+            return "4CIF";
+        case kSVGA:
+            return "SVGA";
+        case kHD:
+            return "HD";
+        case kXGA:
+            return "XGA";
+        case kFullHD:
+            return "Full_HD";
+        case kWQVGA:
+            return "WQVGA";
+        case kWHD:
+            return "WHD";
+        case kWFullHD:
+            return "WFull_HD";
+        default:
+            return "Undefined";
+    }
+}
+
+std::string
+VideoSource::GetFilePath() const
+{
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        return ".";
+    }
+
+    return _fileName.substr(0, slashPos);
+}
+
+std::string
+VideoSource::GetName() const
+{
+    // Remove path.
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        slashPos = 0;
+    }
+    else
+    {
+        slashPos++;
+    }
+
+    // Remove extension and underscored suffix if it exists.
+    return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
+        _fileName.find_last_of(".")) - slashPos);
+}
+
+void
+VideoSource::Convert(const VideoSource &target, bool force /* = false */) const
+{
+    // Ensure target rate is less than or equal to source
+    // (i.e. we are only temporally downsampling).
+    assert(target.GetFrameRate() <= _frameRate);
+    // Only supports YUV420 currently.
+    assert(_type == webrtc::kI420 && target.GetType() == webrtc::kI420);
+    if (!force && (FileExists(target.GetFileName().c_str()) ||
+        (target.GetWidth() == _width && target.GetHeight() == _height && target.GetFrameRate() == _frameRate)))
+    {
+        // Assume that the filename uniquely defines the content.
+        // If the file already exists, it is the correct file.
+        return;
+    }
+    FILE *inFile = NULL;
+    FILE *outFile = NULL;
+
+    inFile = fopen(_fileName.c_str(), "rb");
+    assert(inFile != NULL);
+
+    outFile = fopen(target.GetFileName().c_str(), "wb");
+    assert(outFile != NULL);
+
+    FrameDropper fd;
+    fd.SetFrameRate(target.GetFrameRate(), _frameRate);
+
+    const size_t lengthOutFrame = webrtc::CalcBufferSize(target.GetType(),
+        target.GetWidth(), target.GetHeight());
+    assert(lengthOutFrame > 0);
+    unsigned char *outFrame = new unsigned char[lengthOutFrame];
+
+    const size_t lengthInFrame = webrtc::CalcBufferSize(_type, _width, _height);
+    assert(lengthInFrame > 0);
+    unsigned char *inFrame = new unsigned char[lengthInFrame];
+
+    while (fread(inFrame, 1, lengthInFrame, inFile) == lengthInFrame)
+    {
+        if (!fd.DropFrame())
+        {
+            assert(target.GetWidth() == _width &&
+                   target.GetHeight() == _height); // Add video interpolator here!
+            fwrite(outFrame, 1, lengthOutFrame, outFile);
+        }
+    }
+
+    delete inFrame;
+    delete outFrame;
+    fclose(inFile);
+    fclose(outFile);
+}
+
+bool VideoSource::FileExists(const char* fileName)
+{
+    FILE* fp = NULL;
+    fp = fopen(fileName, "rb");
+    if(fp != NULL)
+    {
+        fclose(fp);
+        return true;
+    }
+    return false;
+}
+
+
+int
+VideoSource::GetWidthHeight( VideoSize size, int & width, int& height)
+{
+    switch(size)
+    {
+    case kSQCIF:
+        width = 128;
+        height = 96;
+        return 0;
+    case kQQVGA:
+        width = 160;
+        height = 120;
+        return 0;
+    case kQCIF:
+        width = 176;
+        height = 144;
+        return 0;
+    case kCGA:
+        width = 320;
+        height = 200;
+        return 0;
+    case kQVGA:
+        width = 320;
+        height = 240;
+        return 0;
+    case kSIF:
+        width = 352;
+        height = 240;
+        return 0;
+    case kWQVGA:
+        width = 400;
+        height = 240;
+        return 0;
+    case kCIF:
+        width = 352;
+        height = 288;
+        return 0;
+    case kW288p:
+        width = 512;
+        height = 288;
+        return 0;
+    case k448p:
+        width = 576;
+        height = 448;
+        return 0;
+    case kVGA:
+        width = 640;
+        height = 480;
+        return 0;
+    case k432p:
+        width = 720;
+        height = 432;
+        return 0;
+    case kW432p:
+        width = 768;
+        height = 432;
+        return 0;
+    case k4SIF:
+        width = 704;
+        height = 480;
+        return 0;
+    case kW448p:
+        width = 768;
+        height = 448;
+        return 0;
+    case kNTSC:
+        width = 720;
+        height = 480;
+        return 0;
+    case kFW448p:
+        width = 800;
+        height = 448;
+        return 0;
+    case kWVGA:
+        width = 800;
+        height = 480;
+        return 0;
+    case k4CIF:
+        width = 704;
+        height = 576;
+        return 0;
+    case kSVGA:
+        width = 800;
+        height = 600;
+        return 0;
+    case kW544p:
+        width = 960;
+        height = 544;
+        return 0;
+    case kW576p:
+        width = 1024;
+        height = 576;
+        return 0;
+    case kHD:
+        width = 960;
+        height = 720;
+        return 0;
+    case kXGA:
+        width = 1024;
+        height = 768;
+        return 0;
+    case kFullHD:
+        width = 1440;
+        height = 1080;
+        return 0;
+    case kWHD:
+        width = 1280;
+        height = 720;
+        return 0;
+    case kWFullHD:
+        width = 1920;
+        height = 1080;
+        return 0;
+    default:
+        return -1;
+    }
+}
+
+FrameDropper::FrameDropper()
+:
+_dropsBetweenRenders(0),
+_frameCounter(0)
+{
+}
+
+bool
+FrameDropper::DropFrame()
+{
+    _frameCounter++;
+    if (_frameCounter > _dropsBetweenRenders)
+    {
+        _frameCounter = 0;
+        return false;
+    }
+    return true;
+}
+
+unsigned int
+FrameDropper::DropsBetweenRenders()
+{
+    return _dropsBetweenRenders;
+}
+
+void
+FrameDropper::SetFrameRate(double frameRate, double maxFrameRate)
+{
+    if (frameRate >= 1.0)
+    {
+        _dropsBetweenRenders = static_cast<unsigned int>(maxFrameRate / frameRate + 0.5) - 1;
+    }
+    else
+    {
+        _dropsBetweenRenders = 0;
+    }
+}
diff --git a/trunk/src/modules/video_coding/codecs/test_framework/video_source.h b/trunk/src/modules/video_coding/codecs/test_framework/video_source.h
new file mode 100644
index 0000000..4f785ba
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/test_framework/video_source.h
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+
+#include <string>
+#include "common_video/libyuv/include/libyuv.h"
+
+enum VideoSize
+    {
+        kUndefined,
+        kSQCIF,     // 128*96       = 12 288
+        kQQVGA,     // 160*120      = 19 200
+        kQCIF,      // 176*144      = 25 344
+        kCGA,       // 320*200      = 64 000
+        kQVGA,      // 320*240      = 76 800
+        kSIF,       // 352*240      = 84 480
+        kWQVGA,     // 400*240      = 96 000
+        kCIF,       // 352*288      = 101 376
+        kW288p,     // 512*288      = 147 456 (WCIF)
+        k448p,      // 576*448      = 281 088
+        kVGA,       // 640*480      = 307 200
+        k432p,      // 720*432      = 311 040
+        kW432p,     // 768*432      = 331 776
+        k4SIF,      // 704*480      = 337 920
+        kW448p,     // 768*448      = 344 064
+        kNTSC,		// 720*480      = 345 600
+        kFW448p,    // 800*448      = 358 400
+        kWVGA,      // 800*480      = 384 000
+        k4CIF,      // 704�576      = 405 504
+        kSVGA,      // 800*600      = 480 000
+        kW544p,     // 960*544      = 522 240
+        kW576p,     // 1024*576     = 589 824 (W4CIF)
+        kHD,        // 960*720      = 691 200
+        kXGA,       // 1024*768     = 786 432
+        kWHD,       // 1280*720     = 921 600
+        kFullHD,    // 1440*1080    = 1 555 200
+        kWFullHD,   // 1920*1080    = 2 073 600
+
+        kNumberOfVideoSizes
+    };
+
+class VideoSource
+{
+public:
+    VideoSource();
+    VideoSource(std::string fileName, VideoSize size, int frameRate = 30,
+        webrtc::VideoType type = webrtc::kI420);
+    VideoSource(std::string fileName, int width, int height, int frameRate = 30,
+                webrtc::VideoType type = webrtc::kI420);
+
+    std::string GetFileName() const { return _fileName; }
+    int GetWidth() const { return _width; }
+    int GetHeight() const { return _height; }
+    webrtc::VideoType GetType() const { return _type; }
+    int GetFrameRate() const { return _frameRate; }
+
+    // Returns the file path without a trailing slash.
+    std::string GetFilePath() const;
+
+    // Returns the filename with the path (including the leading slash) removed.
+    std::string GetName() const;
+
+    VideoSize GetSize() const;
+    static VideoSize GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
+    unsigned int GetFrameLength() const;
+
+    // Returns a human-readable size string.
+    static const char* GetSizeString(VideoSize size);
+    const char* GetMySizeString() const;
+
+    // Opens the video source, converting and writing to the specified target.
+    // If force is true, the conversion will be done even if the target file
+    // already exists.
+    void Convert(const VideoSource& target, bool force = false) const;
+    static bool FileExists(const char* fileName);
+private:
+    static int GetWidthHeight( VideoSize size, int& width, int& height);
+    std::string _fileName;
+    int _width;
+    int _height;
+    webrtc::VideoType _type;
+    int _frameRate;
+};
+
+class FrameDropper
+{
+public:
+    FrameDropper();
+    bool DropFrame();
+    unsigned int DropsBetweenRenders();
+    void SetFrameRate(double frameRate, double maxFrameRate);
+
+private:
+    unsigned int _dropsBetweenRenders;
+    unsigned int _frameCounter;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+
diff --git a/trunk/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi b/trunk/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi
new file mode 100644
index 0000000..f43f285
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi
@@ -0,0 +1,37 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  # Exclude the test target when building with chromium.
+  'conditions': [   
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'video_quality_measurement',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            'webrtc_vp8',
+            '<(webrtc_root)/../test/metrics.gyp:metrics',
+            '<(webrtc_root)/../third_party/google-gflags/google-gflags.gyp:google-gflags',
+           ],
+           'sources': [
+             'video_quality_measurement.cc',
+           ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/codecs/tools/video_quality_measurement.cc b/trunk/src/modules/video_coding/codecs/tools/video_quality_measurement.cc
new file mode 100644
index 0000000..953e3b4
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/tools/video_quality_measurement.cc
@@ -0,0 +1,526 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdarg.h>
+#include <sys/stat.h>  // To check for directory existence.
+
+#include <cassert>
+#include <cstdio>
+#include <ctime>
+
+#ifndef S_ISDIR  // Not defined in stat.h on Windows.
+#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#endif
+
+#include "common_types.h"
+#include "google/gflags.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/stats.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/vp8/main/interface/vp8.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/trace.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "testsupport/packet_reader.h"
+
+DEFINE_string(test_name, "Quality test", "The name of the test to run. ");
+DEFINE_string(test_description, "", "A more detailed description about what "
+              "the current test is about.");
+DEFINE_string(input_filename, "", "Input file. "
+              "The source video file to be encoded and decoded. Must be in "
+              ".yuv format");
+DEFINE_int32(width, -1, "Width in pixels of the frames in the input file.");
+DEFINE_int32(height, -1, "Height in pixels of the frames in the input file.");
+DEFINE_int32(framerate, 30, "Frame rate of the input file, in FPS "
+             "(frames-per-second). ");
+DEFINE_string(output_dir, ".", "Output directory. "
+              "The directory where the output file will be put. Must already "
+              "exist.");
+DEFINE_bool(use_single_core, false, "Force using a single core. If set to "
+            "true, only one core will be used for processing. Using a single "
+            "core is necessary to get a deterministic behavior for the"
+            "encoded frames - using multiple cores will produce different "
+            "encoded frames since multiple cores are competing to consume the "
+            "byte budget for each frame in parallel. If set to false, "
+            "the maximum detected number of cores will be used. ");
+DEFINE_bool(disable_fixed_random_seed , false, "Set this flag to disable the"
+            "usage of a fixed random seed for the random generator used "
+            "for packet loss. Disabling this will cause consecutive runs "
+            "loose packets at different locations, which is bad for "
+            "reproducibility.");
+DEFINE_string(output_filename, "", "Output file. "
+              "The name of the output video file resulting of the processing "
+              "of the source file. By default this is the same name as the "
+              "input file with '_out' appended before the extension.");
+DEFINE_int32(bitrate, 500, "Bit rate in kilobits/second.");
+DEFINE_int32(keyframe_interval, 0, "Forces a keyframe every Nth frame. "
+             "0 means the encoder decides when to insert keyframes.  Note that "
+             "the encoder may create a keyframe in other locations in addition "
+             "to the interval that is set using this parameter.");
+DEFINE_int32(temporal_layers, 0, "The number of temporal layers to use "
+             "(VP8 specific codec setting). Must be 0-4.");
+DEFINE_int32(packet_size, 1500, "Simulated network packet size in bytes (MTU). "
+             "Used for packet loss simulation.");
+DEFINE_int32(max_payload_size, 1440, "Max payload size in bytes for the "
+             "encoder.");
+DEFINE_string(packet_loss_mode, "uniform", "Packet loss mode. Two different "
+              "packet loss models are supported: uniform or burst. This "
+              "setting has no effect unless packet_loss_rate is >0. ");
+DEFINE_double(packet_loss_probability, 0.0, "Packet loss probability. A value "
+              "between 0.0 and 1.0 that defines the probability of a packet "
+              "being lost. 0.1 means 10% and so on.");
+DEFINE_int32(packet_loss_burst_length, 1, "Packet loss burst length. Defines "
+             "how many packets will be lost in a burst when a packet has been "
+             "decided to be lost. Must be >=1.");
+DEFINE_bool(csv, false, "CSV output. Enabling this will output all frame "
+            "statistics at the end of execution. Recommended to run combined "
+            "with --noverbose to avoid mixing output.");
+DEFINE_bool(python, false, "Python output. Enabling this will output all frame "
+            "statistics as a Python script at the end of execution. "
+            "Recommended to run combine with --noverbose to avoid mixing "
+            "output.");
+DEFINE_bool(verbose, true, "Verbose mode. Prints a lot of debugging info. "
+            "Suitable for tracking progress but not for capturing output. "
+            "Disable with --noverbose flag.");
+
+// Custom log method that only prints if the verbose flag is given.
+// Supports all the standard printf parameters and formatting (just forwarded).
+int Log(const char *format, ...) {
+  int result = 0;
+  if (FLAGS_verbose) {
+    va_list args;
+    va_start(args, format);
+    result = vprintf(format, args);
+    va_end(args);
+  }
+  return result;
+}
+
+// Validates the arguments given as command line flags and fills in the
+// TestConfig struct with all configurations needed for video processing.
+// Returns 0 if everything is OK, otherwise an exit code.
+int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
+  // Validate the mandatory flags:
+  if (FLAGS_input_filename == "" || FLAGS_width == -1 || FLAGS_height == -1) {
+    printf("%s\n", google::ProgramUsage());
+    return 1;
+  }
+  config->name = FLAGS_test_name;
+  config->description = FLAGS_test_description;
+
+  // Verify the input file exists and is readable.
+  FILE* test_file;
+  test_file = fopen(FLAGS_input_filename.c_str(), "rb");
+  if (test_file == NULL) {
+    fprintf(stderr, "Cannot read the specified input file: %s\n",
+            FLAGS_input_filename.c_str());
+    return 2;
+  }
+  fclose(test_file);
+  config->input_filename = FLAGS_input_filename;
+
+  // Verify the output dir exists.
+  struct stat dir_info;
+  if (!(stat(FLAGS_output_dir.c_str(), &dir_info) == 0 &&
+      S_ISDIR(dir_info.st_mode))) {
+    fprintf(stderr, "Cannot find output directory: %s\n",
+              FLAGS_output_dir.c_str());
+    return 3;
+  }
+  config->output_dir = FLAGS_output_dir;
+
+  // Manufacture an output filename if none was given.
+  if (FLAGS_output_filename == "") {
+    // Cut out the filename without extension from the given input file
+    // (which may include a path)
+    int startIndex = FLAGS_input_filename.find_last_of("/") + 1;
+    if (startIndex == 0) {
+      startIndex = 0;
+    }
+    FLAGS_output_filename =
+        FLAGS_input_filename.substr(startIndex,
+                                    FLAGS_input_filename.find_last_of(".")
+                                    - startIndex) + "_out.yuv";
+  }
+
+  // Verify output file can be written.
+  if (FLAGS_output_dir == ".") {
+    config->output_filename = FLAGS_output_filename;
+  } else {
+    config->output_filename = FLAGS_output_dir + "/"+ FLAGS_output_filename;
+  }
+  test_file = fopen(config->output_filename.c_str(), "wb");
+  if (test_file == NULL) {
+    fprintf(stderr, "Cannot write output file: %s\n",
+            config->output_filename.c_str());
+    return 4;
+  }
+  fclose(test_file);
+
+  // Check single core flag.
+  config->use_single_core = FLAGS_use_single_core;
+
+  // Get codec specific configuration.
+  webrtc::VideoCodingModule::Codec(webrtc::kVideoCodecVP8,
+                                   config->codec_settings);
+
+  // Check the temporal layers.
+  if (FLAGS_temporal_layers < 0 ||
+      FLAGS_temporal_layers > webrtc::kMaxTemporalStreams) {
+    fprintf(stderr, "Temporal layers number must be 0-4, was: %d\n",
+            FLAGS_temporal_layers);
+    return 13;
+  }
+  config->codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
+      FLAGS_temporal_layers;
+
+  // Check the bit rate.
+  if (FLAGS_bitrate <= 0) {
+    fprintf(stderr, "Bit rate must be >0 kbps, was: %d\n", FLAGS_bitrate);
+    return 5;
+  }
+  config->codec_settings->startBitrate = FLAGS_bitrate;
+
+  // Check the keyframe interval.
+  if (FLAGS_keyframe_interval < 0) {
+    fprintf(stderr, "Keyframe interval must be >=0, was: %d\n",
+            FLAGS_keyframe_interval);
+    return 6;
+  }
+  config->keyframe_interval = FLAGS_keyframe_interval;
+
+  // Check packet size and max payload size.
+  if (FLAGS_packet_size <= 0) {
+    fprintf(stderr, "Packet size must be >0 bytes, was: %d\n",
+            FLAGS_packet_size);
+    return 7;
+  }
+  config->networking_config.packet_size_in_bytes = FLAGS_packet_size;
+
+  if (FLAGS_max_payload_size <= 0) {
+    fprintf(stderr, "Max payload size must be >0 bytes, was: %d\n",
+            FLAGS_max_payload_size);
+    return 8;
+  }
+  config->networking_config.max_payload_size_in_bytes =
+      FLAGS_max_payload_size;
+
+  // Check the width and height
+  if (FLAGS_width <= 0 || FLAGS_height <= 0) {
+    fprintf(stderr, "Width and height must be >0.");
+    return 9;
+  }
+  config->codec_settings->width = FLAGS_width;
+  config->codec_settings->height = FLAGS_height;
+  config->codec_settings->maxFramerate = FLAGS_framerate;
+
+  // Calculate the size of each frame to read (according to YUV spec).
+  config->frame_length_in_bytes =
+      3 * config->codec_settings->width * config->codec_settings->height / 2;
+
+  // Check packet loss settings
+  if (FLAGS_packet_loss_mode != "uniform" &&
+      FLAGS_packet_loss_mode != "burst") {
+    fprintf(stderr, "Unsupported packet loss mode, must be 'uniform' or "
+            "'burst'\n.");
+    return 10;
+  }
+  config->networking_config.packet_loss_mode = webrtc::test::kUniform;
+  if (FLAGS_packet_loss_mode == "burst") {
+    config->networking_config.packet_loss_mode =  webrtc::test::kBurst;
+  }
+
+  if (FLAGS_packet_loss_probability < 0.0 ||
+      FLAGS_packet_loss_probability > 1.0) {
+    fprintf(stderr, "Invalid packet loss probability. Must be 0.0 - 1.0, "
+            "was: %f\n", FLAGS_packet_loss_probability);
+    return 11;
+  }
+  config->networking_config.packet_loss_probability =
+      FLAGS_packet_loss_probability;
+
+  if (FLAGS_packet_loss_burst_length < 1) {
+    fprintf(stderr, "Invalid packet loss burst length, must be >=1, "
+            "was: %d\n", FLAGS_packet_loss_burst_length);
+    return 12;
+  }
+  config->networking_config.packet_loss_burst_length =
+      FLAGS_packet_loss_burst_length;
+  config->verbose = FLAGS_verbose;
+  return 0;
+}
+
+void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
+                               webrtc::test::QualityMetricsResult* result) {
+  Log("Calculating SSIM...\n");
+  I420SSIMFromFiles(config->input_filename.c_str(),
+                    config->output_filename.c_str(),
+                    config->codec_settings->width,
+                    config->codec_settings->height, result);
+  Log("  Average: %3.2f\n", result->average);
+  Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
+  Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
+}
+
+void CalculatePsnrVideoMetrics(webrtc::test::TestConfig* config,
+                               webrtc::test::QualityMetricsResult* result) {
+  Log("Calculating PSNR...\n");
+  I420PSNRFromFiles(config->input_filename.c_str(),
+                    config->output_filename.c_str(),
+                    config->codec_settings->width,
+                    config->codec_settings->height, result);
+  Log("  Average: %3.2f\n", result->average);
+  Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
+  Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
+}
+
+void PrintConfigurationSummary(const webrtc::test::TestConfig& config) {
+  Log("Quality test with parameters:\n");
+  Log("  Test name        : %s\n", config.name.c_str());
+  Log("  Description      : %s\n", config.description.c_str());
+  Log("  Input filename   : %s\n", config.input_filename.c_str());
+  Log("  Output directory : %s\n", config.output_dir.c_str());
+  Log("  Output filename  : %s\n", config.output_filename.c_str());
+  Log("  Frame length       : %d bytes\n", config.frame_length_in_bytes);
+  Log("  Packet size      : %d bytes\n",
+      config.networking_config.packet_size_in_bytes);
+  Log("  Max payload size : %d bytes\n",
+      config.networking_config.max_payload_size_in_bytes);
+  Log("  Packet loss:\n");
+  Log("    Mode           : %s\n",
+      PacketLossModeToStr(config.networking_config.packet_loss_mode));
+  Log("    Probability    : %2.1f\n",
+      config.networking_config.packet_loss_probability);
+  Log("    Burst length   : %d packets\n",
+      config.networking_config.packet_loss_burst_length);
+}
+
+void PrintCsvOutput(const webrtc::test::Stats& stats,
+                    const webrtc::test::QualityMetricsResult& ssim_result,
+                    const webrtc::test::QualityMetricsResult& psnr_result) {
+  Log("\nCSV output (recommended to run with --noverbose to skip the "
+              "above output)\n");
+  printf("frame_number encoding_successful decoding_successful "
+      "encode_return_code decode_return_code "
+      "encode_time_in_us decode_time_in_us "
+      "bit_rate_in_kbps encoded_frame_length_in_bytes frame_type "
+      "packets_dropped total_packets "
+      "ssim psnr\n");
+
+  for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
+    const webrtc::test::FrameStatistic& f = stats.stats_[i];
+    const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
+    const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
+    printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7d, %d, %2d, %2d, "
+           "%5.3f, %5.2f\n",
+           f.frame_number,
+           f.encoding_successful,
+           f.decoding_successful,
+           f.encode_return_code,
+           f.decode_return_code,
+           f.encode_time_in_us,
+           f.decode_time_in_us,
+           f.bit_rate_in_kbps,
+           f.encoded_frame_length_in_bytes,
+           f.frame_type,
+           f.packets_dropped,
+           f.total_packets,
+           ssim.value,
+           psnr.value);
+  }
+}
+
+void PrintPythonOutput(const webrtc::test::TestConfig& config,
+                       const webrtc::test::Stats& stats,
+                       const webrtc::test::QualityMetricsResult& ssim_result,
+                       const webrtc::test::QualityMetricsResult& psnr_result) {
+  Log("\nPython output (recommended to run with --noverbose to skip the "
+               "above output)\n");
+  printf("test_configuration = ["
+         "{'name': 'name',                      'value': '%s'},\n"
+         "{'name': 'description',               'value': '%s'},\n"
+         "{'name': 'test_number',               'value': '%d'},\n"
+         "{'name': 'input_filename',            'value': '%s'},\n"
+         "{'name': 'output_filename',           'value': '%s'},\n"
+         "{'name': 'output_dir',                'value': '%s'},\n"
+         "{'name': 'packet_size_in_bytes',      'value': '%d'},\n"
+         "{'name': 'max_payload_size_in_bytes', 'value': '%d'},\n"
+         "{'name': 'packet_loss_mode',          'value': '%s'},\n"
+         "{'name': 'packet_loss_probability',   'value': '%f'},\n"
+         "{'name': 'packet_loss_burst_length',  'value': '%d'},\n"
+         "{'name': 'exclude_frame_types',       'value': '%s'},\n"
+         "{'name': 'frame_length_in_bytes',     'value': '%d'},\n"
+         "{'name': 'use_single_core',           'value': '%s'},\n"
+         "{'name': 'keyframe_interval;',        'value': '%d'},\n"
+         "{'name': 'video_codec_type',          'value': '%s'},\n"
+         "{'name': 'width',                     'value': '%d'},\n"
+         "{'name': 'height',                    'value': '%d'},\n"
+         "{'name': 'bit_rate_in_kbps',          'value': '%d'},\n"
+         "]\n",
+         config.name.c_str(),
+         config.description.c_str(),
+         config.test_number,
+         config.input_filename.c_str(),
+         config.output_filename.c_str(),
+         config.output_dir.c_str(),
+         config.networking_config.packet_size_in_bytes,
+         config.networking_config.max_payload_size_in_bytes,
+         PacketLossModeToStr(config.networking_config.packet_loss_mode),
+         config.networking_config.packet_loss_probability,
+         config.networking_config.packet_loss_burst_length,
+         ExcludeFrameTypesToStr(config.exclude_frame_types),
+         config.frame_length_in_bytes,
+         config.use_single_core ? "True " : "False",
+         config.keyframe_interval,
+         webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
+         config.codec_settings->width,
+         config.codec_settings->height,
+         config.codec_settings->startBitrate);
+  printf("frame_data_types = {"
+         "'frame_number': ('number', 'Frame number'),\n"
+         "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
+         "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
+         "'encode_time': ('number', 'Encode time (us)'),\n"
+         "'decode_time': ('number', 'Decode time (us)'),\n"
+         "'encode_return_code': ('number', 'Encode return code'),\n"
+         "'decode_return_code': ('number', 'Decode return code'),\n"
+         "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
+         "'encoded_frame_length': "
+         "('number', 'Encoded frame length (bytes)'),\n"
+         "'frame_type': ('string', 'Frame type'),\n"
+         "'packets_dropped': ('number', 'Packets dropped'),\n"
+         "'total_packets': ('number', 'Total packets'),\n"
+         "'ssim': ('number', 'SSIM'),\n"
+         "'psnr': ('number', 'PSNR (dB)'),\n"
+         "}\n");
+  printf("frame_data = [");
+  for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
+    const webrtc::test::FrameStatistic& f = stats.stats_[i];
+    const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
+    const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
+    printf("{'frame_number': %d, "
+           "'encoding_successful': %s, 'decoding_successful': %s, "
+           "'encode_time': %d, 'decode_time': %d, "
+           "'encode_return_code': %d, 'decode_return_code': %d, "
+           "'bit_rate': %d, 'encoded_frame_length': %d, 'frame_type': %s, "
+           "'packets_dropped': %d, 'total_packets': %d, "
+           "'ssim': %f, 'psnr': %f},\n",
+           f.frame_number,
+           f.encoding_successful ? "True " : "False",
+           f.decoding_successful ? "True " : "False",
+           f.encode_time_in_us,
+           f.decode_time_in_us,
+           f.encode_return_code,
+           f.decode_return_code,
+           f.bit_rate_in_kbps,
+           f.encoded_frame_length_in_bytes,
+           f.frame_type == webrtc::kDeltaFrame ? "'Delta'" : "'Other'",
+           f.packets_dropped,
+           f.total_packets,
+           ssim.value,
+           psnr.value);
+  }
+  printf("]\n");
+}
+
+// Runs a quality measurement on the input file supplied to the program.
+// The input file must be in YUV format.
+int main(int argc, char* argv[]) {
+  std::string program_name = argv[0];
+  std::string usage = "Quality test application for video comparisons.\n"
+    "Run " + program_name + " --helpshort for usage.\n"
+    "Example usage:\n" + program_name +
+    " --input_filename=filename.yuv --width=352 --height=288\n";
+  google::SetUsageMessage(usage);
+
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  // Create TestConfig and codec settings struct.
+  webrtc::test::TestConfig config;
+  webrtc::VideoCodec codec_settings;
+  config.codec_settings = &codec_settings;
+
+  int return_code = HandleCommandLineFlags(&config);
+  // Exit if an invalid argument is supplied.
+  if (return_code != 0) {
+    return return_code;
+  }
+
+  PrintConfigurationSummary(config);
+
+  webrtc::VP8Encoder* encoder = webrtc::VP8Encoder::Create();
+  webrtc::VP8Decoder* decoder = webrtc::VP8Decoder::Create();
+  webrtc::test::Stats stats;
+  webrtc::test::FrameReaderImpl frame_reader(config.input_filename,
+                                             config.frame_length_in_bytes);
+  webrtc::test::FrameWriterImpl frame_writer(config.output_filename,
+                                             config.frame_length_in_bytes);
+  frame_reader.Init();
+  frame_writer.Init();
+  webrtc::test::PacketReader packet_reader;
+
+  webrtc::test::PacketManipulatorImpl packet_manipulator(
+      &packet_reader, config.networking_config, config.verbose);
+  // By default the packet manipulator is seeded with a fixed random.
+  // If disabled we must generate a new seed.
+  if (FLAGS_disable_fixed_random_seed) {
+    packet_manipulator.InitializeRandomSeed(time(NULL));
+  }
+  webrtc::test::VideoProcessor* processor =
+      new webrtc::test::VideoProcessorImpl(encoder, decoder,
+                                           &frame_reader,
+                                           &frame_writer,
+                                           &packet_manipulator,
+                                           config, &stats);
+  processor->Init();
+
+  int frame_number = 0;
+  while (processor->ProcessFrame(frame_number)) {
+    if (frame_number % 80 == 0) {
+      Log("\n");  // make the output a bit nicer.
+    }
+    Log(".");
+    frame_number++;
+  }
+  Log("\n");
+  Log("Processed %d frames\n", frame_number);
+
+  // Release encoder and decoder to make sure they have finished processing.
+  encoder->Release();
+  decoder->Release();
+
+  // Verify statistics are correct:
+  assert(frame_number == static_cast<int>(stats.stats_.size()));
+
+  // Close the files before we start using them for SSIM/PSNR calculations.
+  frame_reader.Close();
+  frame_writer.Close();
+
+  stats.PrintSummary();
+
+  webrtc::test::QualityMetricsResult ssim_result;
+  CalculateSsimVideoMetrics(&config, &ssim_result);
+  webrtc::test::QualityMetricsResult psnr_result;
+  CalculatePsnrVideoMetrics(&config, &psnr_result);
+
+  if (FLAGS_csv) {
+    PrintCsvOutput(stats, ssim_result, psnr_result);
+  }
+  if (FLAGS_python) {
+    PrintPythonOutput(config, stats, ssim_result, psnr_result);
+  }
+  delete processor;
+  delete encoder;
+  delete decoder;
+  Log("Quality test finished!");
+  return 0;
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8.h b/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8.h
new file mode 100644
index 0000000..89cf067
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ * WEBRTC VP8 wrapper interface
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
+
+#include "video_codec_interface.h"
+
+// VPX forward declaration
+typedef struct vpx_codec_ctx vpx_codec_ctx_t;
+typedef struct vpx_codec_ctx vpx_dec_ctx_t;
+typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
+typedef struct vpx_image vpx_image_t;
+typedef struct vpx_ref_frame vpx_ref_frame_t;
+struct vpx_codec_cx_pkt;
+
+namespace webrtc
+{
+class TemporalLayers;
+class ReferencePictureSelection;
+
+class VP8Encoder : public VideoEncoder {
+ public:
+  static VP8Encoder* Create();
+
+  virtual ~VP8Encoder();
+
+  // Free encoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int Release();
+
+  // Initialize the encoder with the information from the codecSettings
+  //
+  // Input:
+  //          - codec_settings    : Codec settings
+  //          - number_of_cores   : Number of cores available for the encoder
+  //          - max_payload_size  : The maximum size each payload is allowed
+  //                                to have. Usually MTU - overhead.
+  //
+  // Return value                 : Set bit rate if OK
+  //                                <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  //                                  WEBRTC_VIDEO_CODEC_ERR_SIZE
+  //                                  WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
+  //                                  WEBRTC_VIDEO_CODEC_MEMORY
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int InitEncode(const VideoCodec* codec_settings,
+                         int number_of_cores,
+                         uint32_t max_payload_size);
+
+  // Encode an I420 image (as a part of a video stream). The encoded image
+  // will be returned to the user through the encode complete callback.
+  //
+  // Input:
+  //          - input_image       : Image to be encoded
+  //          - frame_types       : Frame type to be generated by the encoder.
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+  //                                <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  //                                  WEBRTC_VIDEO_CODEC_MEMORY
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  //                                  WEBRTC_VIDEO_CODEC_TIMEOUT
+
+  virtual int Encode(const RawImage& input_image,
+                     const CodecSpecificInfo* codec_specific_info,
+                     const VideoFrameType* frame_types);
+
+  // Register an encode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles encoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+
+  // Inform the encoder of the new packet loss rate and the round-trip time of
+  // the network.
+  //
+  //          - packet_loss : Fraction lost
+  //                          (loss rate in percent = 100 * packetLoss / 255)
+  //          - rtt         : Round-trip time in milliseconds
+  // Return value           : WEBRTC_VIDEO_CODEC_OK if OK
+  //                          <0 - Errors: WEBRTC_VIDEO_CODEC_ERROR
+  //
+  virtual int SetChannelParameters(uint32_t packet_loss, int rtt);
+
+  // Inform the encoder about the new target bit rate.
+  //
+  //          - new_bitrate_kbit : New target bit rate
+  //          - frame_rate       : The target frame rate
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
+
+ private:
+  VP8Encoder();
+
+  // Call encoder initialize function and set control settings.
+  int InitAndSetControlSettings();
+
+  void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
+                             const vpx_codec_cx_pkt& pkt);
+
+  int GetEncodedFrame(const RawImage& input_image);
+
+  int GetEncodedPartitions(const RawImage& input_image);
+
+  // Determine maximum target for Intra frames
+  //
+  // Input:
+  //    - optimal_buffer_size : Optimal buffer size
+  // Return Value             : Max target size for Intra frames represented as
+  //                            percentage of the per frame bandwidth
+  uint32_t MaxIntraTarget(uint32_t optimal_buffer_size);
+
+  EncodedImage encoded_image_;
+  EncodedImageCallback* encoded_complete_callback_;
+  VideoCodec codec_;
+  bool inited_;
+  uint32_t timestamp_;
+  uint16_t picture_id_;
+  bool feedback_mode_;
+  int cpu_speed_;
+  uint32_t rc_max_intra_target_;
+  int token_partitions_;
+  ReferencePictureSelection* rps_;
+  TemporalLayers* temporal_layers_;
+  vpx_codec_ctx_t* encoder_;
+  vpx_codec_enc_cfg_t* config_;
+  vpx_image_t* raw_;
+};  // end of VP8Encoder class
+
+
+class VP8Decoder : public VideoDecoder {
+ public:
+  static VP8Decoder* Create();
+
+  virtual ~VP8Decoder();
+
+  // Initialize the decoder.
+  //
+  // Return value         :  WEBRTC_VIDEO_CODEC_OK.
+  //                        <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int InitDecode(const VideoCodec* inst, int number_of_cores);
+
+  // Decode encoded image (as a part of a video stream). The decoded image
+  // will be returned to the user through the decode complete callback.
+  //
+  // Input:
+  //          - input_image         : Encoded image to be decoded
+  //          - missing_frames      : True if one or more frames have been lost
+  //                                  since the previous decode call.
+  //          - fragmentation       : Specifies the start and length of each VP8
+  //                                  partition.
+  //          - codec_specific_info : pointer to specific codec data
+  //          - render_time_ms      : Render time in Ms
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+  //                                <0 - Errors:
+  //                                      WEBRTC_VIDEO_CODEC_ERROR
+  //                                      WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  virtual int Decode(const EncodedImage& input_image,
+                     bool missing_frames,
+                     const RTPFragmentationHeader* fragmentation,
+                     const CodecSpecificInfo* codec_specific_info,
+                     int64_t /*render_time_ms*/);
+
+  // Register a decode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles decoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
+
+  // Free decoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK
+  //                               <0 - Errors:
+  //                                      WEBRTC_VIDEO_CODEC_ERROR
+  virtual int Release();
+
+  // Reset decoder state and prepare for a new call.
+  //
+  // Return value         : WEBRTC_VIDEO_CODEC_OK.
+  //                        <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_UNINITIALIZED
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int Reset();
+
+  // Create a copy of the codec and its internal state.
+  //
+  // Return value                : A copy of the instance if OK, NULL otherwise.
+  virtual VideoDecoder* Copy();
+
+ private:
+  VP8Decoder();
+
+  // Copy reference image from this _decoder to the _decoder in copyTo. Set
+  // which frame type to copy in _refFrame->frame_type before the call to
+  // this function.
+  int CopyReference(VP8Decoder* copy);
+
+  int DecodePartitions(const EncodedImage& input_image,
+                       const RTPFragmentationHeader* fragmentation);
+
+  int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
+
+  RawImage decoded_image_;
+  DecodedImageCallback* decode_complete_callback_;
+  bool inited_;
+  bool feedback_mode_;
+  vpx_dec_ctx_t* decoder_;
+  VideoCodec codec_;
+  EncodedImage last_keyframe_;
+  int image_format_;
+  vpx_ref_frame_t* ref_frame_;
+  int propagation_cnt_;
+  bool latest_keyframe_complete_;
+  bool mfqe_enabled_;
+};  // end of VP8Decoder class
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8_common_types.h b/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8_common_types.h
new file mode 100644
index 0000000..6f347cd
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/interface/vp8_common_types.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+// Ratio allocation between temporal streams:
+// Values as required for the VP8 codec (accumulating).
+static const float
+  kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = {
+    {1.0f, 0, 0, 0},  // 1 layer
+    {0.6f, 1.0f , 0 , 0},  // 2 layers {60%, 40%}
+    {0.4f, 0.6f , 1.0f, 0},  // 3 layers {40%, 20%, 40%}
+    {0.25f, 0.4f, 0.6f, 1.0f}  // 4 layers {25%, 15%, 20%, 40%}
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/Android.mk b/trunk/src/modules/video_coding/codecs/vp8/main/source/Android.mk
new file mode 100644
index 0000000..eb6c1af
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/Android.mk
@@ -0,0 +1,47 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_vp8
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    reference_picture_selection.cc \
+    vp8.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+# TODO(leozwang) Enable WEBRTC_LIBVPX_VERSION after libvpx is updateed
+# to a new version and also add temporal_layers.cc
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../common_video/interface \
+    $(LOCAL_PATH)/../../../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../../../modules/interface \
+    $(LOCAL_PATH)/../../../../../../system_wrappers/interface \
+    external/libvpx 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.cc b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.cc
new file mode 100644
index 0000000..3ae6f19
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.cc
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "reference_picture_selection.h"
+
+#include "typedefs.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+ReferencePictureSelection::ReferencePictureSelection()
+    : kRttConfidence(1.33),
+      update_golden_next_(true),
+      established_golden_(false),
+      received_ack_(false),
+      last_sent_ref_picture_id_(0),
+      last_sent_ref_update_time_(0),
+      established_ref_picture_id_(0),
+      last_refresh_time_(0),
+      rtt_(0) {
+}
+
+void ReferencePictureSelection::Init() {
+  update_golden_next_ = true;
+  established_golden_ = false;
+  received_ack_ = false;
+  last_sent_ref_picture_id_ = 0;
+  last_sent_ref_update_time_ = 0;
+  established_ref_picture_id_ = 0;
+  last_refresh_time_ = 0;
+  rtt_ = 0;
+}
+
+void ReferencePictureSelection::ReceivedRPSI(int rpsi_picture_id) {
+  // Assume RPSI is signaled with 14 bits.
+  if ((rpsi_picture_id & 0x3fff) == (last_sent_ref_picture_id_ & 0x3fff)) {
+    // Remote peer has received our last reference frame, switch frame type.
+    received_ack_ = true;
+    established_golden_ = update_golden_next_;
+    update_golden_next_ = !update_golden_next_;
+    established_ref_picture_id_ = last_sent_ref_picture_id_;
+  }
+}
+
+bool ReferencePictureSelection::ReceivedSLI(uint32_t now_ts) {
+  bool send_refresh = false;
+  // Don't send a refresh more than once per round-trip time.
+  // This is to avoid too frequent refreshes, since the receiver
+  // will signal an SLI for every corrupt frame.
+  if (TimestampDiff(now_ts, last_refresh_time_) > rtt_) {
+    send_refresh = true;
+    last_refresh_time_ = now_ts;
+  }
+  return send_refresh;
+}
+
+int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
+                                           uint32_t now_ts) {
+  int flags = 0;
+  // We can't refresh the decoder until we have established the key frame.
+  if (send_refresh && received_ack_) {
+    flags |= VP8_EFLAG_NO_REF_LAST;  // Don't reference the last frame
+    if (established_golden_)
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    else
+      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame
+  }
+
+  // Make sure we don't update the reference frames too often. We must wait long
+  // enough for an RPSI to arrive after the decoder decoded the reference frame.
+  // Ideally that should happen after one round-trip time.
+  // Add a margin defined by |kRttConfidence|.
+  uint32_t update_interval = kRttConfidence * rtt_;
+  if (update_interval < kMinUpdateInterval)
+    update_interval = kMinUpdateInterval;
+  // Don't send reference frame updates until we have an established reference.
+  if (TimestampDiff(now_ts, last_sent_ref_update_time_) > update_interval &&
+      received_ack_) {
+    flags |= VP8_EFLAG_NO_REF_LAST;  // Don't reference the last frame.
+    if (update_golden_next_) {
+      flags |= VP8_EFLAG_FORCE_GF;  // Update the golden reference.
+      flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update alt-ref.
+      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame.
+    } else {
+      flags |= VP8_EFLAG_FORCE_ARF;  // Update the alt-ref reference.
+      flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    }
+    last_sent_ref_picture_id_ = picture_id;
+    last_sent_ref_update_time_ = now_ts;
+  } else {
+    // No update of golden or alt-ref. We can therefore freely reference the
+    // established reference frame and the last frame.
+    if (established_golden_)
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    else
+      flags |= VP8_EFLAG_NO_REF_GF;   // Don't reference the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update the alt-ref frame.
+  }
+  return flags;
+}
+
+void ReferencePictureSelection::EncodedKeyFrame(int picture_id) {
+  last_sent_ref_picture_id_ = picture_id;
+  received_ack_ = false;
+}
+
+void ReferencePictureSelection::SetRtt(int rtt) {
+  // Convert from milliseconds to timestamp frequency.
+  rtt_ = 90 * rtt;
+}
+
+uint32_t ReferencePictureSelection::TimestampDiff(uint32_t new_ts,
+                                                  uint32_t old_ts) {
+  if (old_ts > new_ts) {
+    // Assuming this is a wrap, doing a compensated subtraction.
+    return (new_ts + (static_cast<int64_t>(1) << 32)) - old_ts;
+  }
+  return new_ts - old_ts;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.h b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.h
new file mode 100644
index 0000000..59e5940
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file defines classes for doing reference picture selection, primarily
+ * with VP8.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class ReferencePictureSelection {
+ public:
+  ReferencePictureSelection();
+  void Init();
+
+  // Report a received reference picture selection indication. This will
+  // introduce a new established reference if the received RPSI isn't too late.
+  void ReceivedRPSI(int rpsi_picture_id);
+
+  // Report a received slice loss indication. Returns true if a refresh frame
+  // must be sent to the receiver, which is accomplished by only predicting
+  // from the established reference.
+  // |now_ts| is the RTP timestamp corresponding to the current time. Typically
+  // the capture timestamp of the frame currently being processed.
+  // Returns true if it's time to encode a decoder refresh, otherwise false.
+  bool ReceivedSLI(uint32_t now_ts);
+
+  // Returns the recommended VP8 encode flags needed. May refresh the decoder
+  // and/or update the reference buffers.
+  // |picture_id| picture id of the frame to be encoded.
+  // |send_refresh| should be set to true if a decoder refresh should be
+  // encoded, otherwise false.
+  // |now_ts| is the RTP timestamp corresponding to the current time. Typically
+  // the capture timestamp of the frame currently being processed.
+  // Returns the flags to be given to the libvpx encoder when encoding the next
+  // frame.
+  int EncodeFlags(int picture_id, bool send_refresh, uint32_t now_ts);
+
+  // Notify the RPS that the frame with picture id |picture_id| was encoded as
+  // a key frame, effectively updating all reference buffers.
+  void EncodedKeyFrame(int picture_id);
+
+  // Set the round-trip time between the sender and the receiver to |rtt|
+  // milliseconds.
+  void SetRtt(int rtt);
+
+ private:
+  static uint32_t TimestampDiff(uint32_t new_ts, uint32_t old_ts);
+
+  // The minimum time between reference frame updates.
+  enum { kMinUpdateInterval = 90 * 10 };  // Timestamp frequency
+  const double kRttConfidence;
+
+  bool update_golden_next_;
+  bool established_golden_;
+  bool received_ack_;
+  int last_sent_ref_picture_id_;
+  uint32_t last_sent_ref_update_time_;
+  int established_ref_picture_id_;
+  uint32_t last_refresh_time_;
+  uint32_t rtt_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection_unittest.cc b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection_unittest.cc
new file mode 100644
index 0000000..cdac76c
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/reference_picture_selection_unittest.cc
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "reference_picture_selection.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+using webrtc::ReferencePictureSelection;
+
+// The minimum time between reference frame updates. Should match the values
+// set in reference_picture_selection.h
+enum { kMinUpdateInterval = 10 };
+// The minimum time between decoder refreshes through restricted prediction.
+// Should match the values set in reference_picture_selection.h
+enum { kRtt = 10 };
+
+enum {
+  kNoPropagationGolden    = VP8_EFLAG_NO_REF_ARF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_UPD_ARF,
+  kNoPropagationAltRef    = VP8_EFLAG_NO_REF_GF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_UPD_ARF,
+  kPropagateGolden        = VP8_EFLAG_FORCE_GF |
+                            VP8_EFLAG_NO_UPD_ARF |
+                            VP8_EFLAG_NO_REF_GF |
+                            VP8_EFLAG_NO_REF_LAST,
+  kPropagateAltRef        = VP8_EFLAG_FORCE_ARF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_REF_ARF |
+                            VP8_EFLAG_NO_REF_LAST,
+  kRefreshFromGolden      = VP8_EFLAG_NO_REF_LAST |
+                            VP8_EFLAG_NO_REF_ARF,
+  kRefreshFromAltRef      = VP8_EFLAG_NO_REF_LAST |
+                            VP8_EFLAG_NO_REF_GF
+};
+
+class TestRPS : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    rps_.Init();
+    // Initialize with sending a key frame and acknowledging it.
+    rps_.EncodedKeyFrame(0);
+    rps_.ReceivedRPSI(0);
+    rps_.SetRtt(kRtt);
+  }
+
+  ReferencePictureSelection rps_;
+};
+
+TEST_F(TestRPS, TestPropagateReferenceFrames) {
+  // Should propagate the alt-ref reference.
+  uint32_t time = (4 * kMinUpdateInterval) / 3 + 1;
+  EXPECT_EQ(rps_.EncodeFlags(1, false, 90 * time), kPropagateAltRef);
+  rps_.ReceivedRPSI(1);
+  time += (4 * (time + kMinUpdateInterval)) / 3 + 1;
+  // Should propagate the golden reference.
+  EXPECT_EQ(rps_.EncodeFlags(2, false, 90 * time), kPropagateGolden);
+  rps_.ReceivedRPSI(2);
+  // Should propagate the alt-ref reference.
+  time = (4 * (time + kMinUpdateInterval)) / 3 + 1;
+  EXPECT_EQ(rps_.EncodeFlags(3, false, 90 * time), kPropagateAltRef);
+  rps_.ReceivedRPSI(3);
+  // Shouldn't propagate any reference frames (except last), and the established
+  // reference is alt-ref.
+  time = time + kMinUpdateInterval;
+  EXPECT_EQ(rps_.EncodeFlags(4, false, 90 * time), kNoPropagationAltRef);
+}
+
+TEST_F(TestRPS, TestDecoderRefresh) {
+  uint32_t time = kRtt + 1;
+  // No more than one refresh per RTT.
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  time += 5;
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), false);
+  time += kRtt - 4;
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  // Enough time have elapsed since the previous reference propagation, we will
+  // therefore get both a refresh from golden and a propagation of alt-ref.
+  EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
+            kPropagateAltRef);
+  rps_.ReceivedRPSI(5);
+  time += kRtt + 1;
+  // Enough time for a new refresh, but not enough time for a reference
+  // propagation.
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
+            kNoPropagationAltRef);
+}
+
+TEST_F(TestRPS, TestWrap) {
+  EXPECT_EQ(rps_.ReceivedSLI(0xffffffff), true);
+  EXPECT_EQ(rps_.ReceivedSLI(1), false);
+  EXPECT_EQ(rps_.ReceivedSLI(90 * 100), true);
+
+  EXPECT_EQ(rps_.EncodeFlags(7, false, 0xffffffff), kPropagateAltRef);
+  EXPECT_EQ(rps_.EncodeFlags(8, false, 1), kNoPropagationGolden);
+  EXPECT_EQ(rps_.EncodeFlags(10, false, 90 * 100), kPropagateAltRef);
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.cc b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.cc
new file mode 100644
index 0000000..59ad9c2
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.cc
@@ -0,0 +1,219 @@
+/* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*/
+
+#include "temporal_layers.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <cassert>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/vp8/main/interface/vp8_common_types.h"
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+TemporalLayers::TemporalLayers(int numberOfTemporalLayers)
+    : number_of_temporal_layers_(numberOfTemporalLayers),
+      temporal_ids_length_(0),
+      temporal_pattern_length_(0),
+      tl0_pic_idx_(rand()),
+      pattern_idx_(255) {
+  assert(kMaxTemporalStreams >= numberOfTemporalLayers);
+  memset(temporal_ids_, 0, sizeof(temporal_ids_));
+  memset(temporal_pattern_, 0, sizeof(temporal_pattern_));
+}
+
+bool TemporalLayers::ConfigureBitrates(int bitrateKbit,
+                                       vpx_codec_enc_cfg_t* cfg) {
+  switch (number_of_temporal_layers_) {
+    case 0:
+    case 1:
+      // Do nothing.
+      break;
+    case 2:
+      temporal_ids_length_ = 2;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 1;
+      cfg->ts_number_layers = number_of_temporal_layers_;
+      cfg->ts_periodicity = temporal_ids_length_;
+      // Split stream 60% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[1][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 2;
+      cfg->ts_rate_decimator[1] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 8;
+      temporal_pattern_[0] = kTemporalUpdateLast;
+      temporal_pattern_[1] = kTemporalUpdateGoldenWithoutDependency;
+      temporal_pattern_[2] = kTemporalUpdateLast;
+      temporal_pattern_[3] = kTemporalUpdateGolden;
+      temporal_pattern_[4] = kTemporalUpdateLast;
+      temporal_pattern_[5] = kTemporalUpdateGolden;
+      temporal_pattern_[6] = kTemporalUpdateLast;
+      temporal_pattern_[7] = kTemporalUpdateNoneNoRefAltref;
+      break;
+    case 3:
+      temporal_ids_length_ = 4;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 2;
+      temporal_ids_[2] = 1;
+      temporal_ids_[3] = 2;
+      cfg->ts_number_layers = number_of_temporal_layers_;
+      cfg->ts_periodicity = temporal_ids_length_;
+      // Split stream 40% 20% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[2][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[2][1];
+      cfg->ts_target_bitrate[2] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 4;
+      cfg->ts_rate_decimator[1] = 2;
+      cfg->ts_rate_decimator[2] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 8;
+      temporal_pattern_[0] = kTemporalUpdateLast;
+      temporal_pattern_[1] = kTemporalUpdateAltrefWithoutDependency;
+      temporal_pattern_[2] = kTemporalUpdateGoldenWithoutDependency;
+      temporal_pattern_[3] = kTemporalUpdateAltref;
+      temporal_pattern_[4] = kTemporalUpdateLast;
+      temporal_pattern_[5] = kTemporalUpdateAltref;
+      temporal_pattern_[6] = kTemporalUpdateGolden;
+      temporal_pattern_[7] = kTemporalUpdateNone;
+      break;
+    case 4:
+      temporal_ids_length_ = 8;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 3;
+      temporal_ids_[2] = 2;
+      temporal_ids_[3] = 3;
+      temporal_ids_[4] = 1;
+      temporal_ids_[5] = 3;
+      temporal_ids_[6] = 2;
+      temporal_ids_[7] = 3;
+      // Split stream 25% 15% 20% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_number_layers = 4;
+      cfg->ts_periodicity = temporal_ids_length_;
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[3][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[3][1];
+      cfg->ts_target_bitrate[2] = bitrateKbit * kVp8LayerRateAlloction[3][2];
+      cfg->ts_target_bitrate[3] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 8;
+      cfg->ts_rate_decimator[1] = 4;
+      cfg->ts_rate_decimator[2] = 2;
+      cfg->ts_rate_decimator[3] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 16;
+      temporal_pattern_[0] = kTemporalUpdateLast;
+      temporal_pattern_[1] = kTemporalUpdateNone;
+      temporal_pattern_[2] = kTemporalUpdateAltrefWithoutDependency;
+      temporal_pattern_[3] = kTemporalUpdateNone;
+      temporal_pattern_[4] = kTemporalUpdateGoldenWithoutDependency;
+      temporal_pattern_[5] = kTemporalUpdateNone;
+      temporal_pattern_[6] = kTemporalUpdateAltref;
+      temporal_pattern_[7] = kTemporalUpdateNone;
+      temporal_pattern_[8] = kTemporalUpdateLast;
+      temporal_pattern_[9] = kTemporalUpdateNone;
+      temporal_pattern_[10] = kTemporalUpdateAltref;
+      temporal_pattern_[11] = kTemporalUpdateNone;
+      temporal_pattern_[12] = kTemporalUpdateGolden;
+      temporal_pattern_[13] = kTemporalUpdateNone;
+      temporal_pattern_[14] = kTemporalUpdateAltref;
+      temporal_pattern_[15] = kTemporalUpdateNone;
+      break;
+    default:
+      assert(false);
+      return false;
+  }
+  return true;
+}
+
+int TemporalLayers::EncodeFlags() {
+  assert(number_of_temporal_layers_ > 1);
+  assert(kMaxTemporalPattern >= temporal_pattern_length_);
+  assert(0 < temporal_pattern_length_);
+
+  int flags = 0;
+  int patternIdx = ++pattern_idx_ % temporal_pattern_length_;
+  assert(kMaxTemporalPattern >= patternIdx);
+  switch (temporal_pattern_[patternIdx]) {
+    case kTemporalUpdateLast:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      break;
+    case kTemporalUpdateGoldenWithoutDependency:
+      flags |= VP8_EFLAG_NO_REF_GF;
+      // Deliberately no break here.
+    case kTemporalUpdateGolden:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateAltrefWithoutDependency:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      // Deliberately no break here.
+    case kTemporalUpdateAltref:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateNoneNoRefAltref:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      // Deliberately no break here.
+    case kTemporalUpdateNone:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+      break;
+  }
+  return flags;
+}
+
+void TemporalLayers::PopulateCodecSpecific(bool key_frame,
+                                           CodecSpecificInfoVP8 *vp8_info) {
+  assert(number_of_temporal_layers_ > 1);
+  assert(0 < temporal_ids_length_);
+
+  if (key_frame) {
+    // Keyframe is always temporal layer 0
+    vp8_info->temporalIdx = 0;
+  } else {
+    vp8_info->temporalIdx = temporal_ids_[pattern_idx_ % temporal_ids_length_];
+  }
+  TemporalReferences temporal_reference =
+      temporal_pattern_[pattern_idx_ % temporal_pattern_length_];
+
+  if (temporal_reference == kTemporalUpdateAltrefWithoutDependency ||
+      temporal_reference == kTemporalUpdateGoldenWithoutDependency ||
+      (temporal_reference == kTemporalUpdateNone &&
+      number_of_temporal_layers_ == 4)) {
+    vp8_info->layerSync = true;
+  } else {
+    vp8_info->layerSync = false;
+  }
+
+  if (vp8_info->temporalIdx == 0) {
+    tl0_pic_idx_++;
+  }
+  vp8_info->tl0PicIdx = tl0_pic_idx_;
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.h b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.h
new file mode 100644
index 0000000..2ca7229
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers.h
@@ -0,0 +1,67 @@
+/* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*/
+/*
+* This file defines classes for doing temporal layers with VP8.
+*/
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+
+#include <typedefs.h>
+
+ // VPX forward declaration
+typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
+
+namespace webrtc {
+
+struct CodecSpecificInfoVP8;
+
+class TemporalLayers {
+ public:
+  TemporalLayers(int number_of_temporal_layers);
+
+  // Returns the recommended VP8 encode flags needed. May refresh the decoder
+  // and/or update the reference buffers.
+  int EncodeFlags();
+
+  bool ConfigureBitrates(int bitrate_kbit, vpx_codec_enc_cfg_t* cfg);
+
+  void PopulateCodecSpecific(bool key_frame, CodecSpecificInfoVP8 *vp8_info);
+
+ private:
+  enum TemporalReferences {
+    // Second layer and last frame in cycle, for 2 layers.
+    kTemporalUpdateNoneNoRefAltref = 6,
+    // Highest enhancement layer.
+    kTemporalUpdateNone = 5,
+    // Second enhancement layer.
+    kTemporalUpdateAltref = 4,
+    // Second enhancement layer without dependency on previous frames in
+    // the second enhancement layer.
+    kTemporalUpdateAltrefWithoutDependency = 3,
+    // First enhancement layer.
+    kTemporalUpdateGolden = 2,
+    // First enhancement layer without dependency on previous frames in
+    // the first enhancement layer.
+    kTemporalUpdateGoldenWithoutDependency = 1,
+    // Base layer.
+    kTemporalUpdateLast = 0,
+  };
+  enum { kMaxTemporalPattern = 16 };
+
+  int number_of_temporal_layers_;
+  int temporal_ids_length_;
+  int temporal_ids_[kMaxTemporalPattern];
+  int temporal_pattern_length_;
+  TemporalReferences temporal_pattern_[kMaxTemporalPattern];
+  uint8_t tl0_pic_idx_;
+  uint8_t pattern_idx_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers_unittest.cc b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers_unittest.cc
new file mode 100644
index 0000000..6525dda
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/temporal_layers_unittest.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "gtest/gtest.h"
+#include "temporal_layers.h"
+#include "video_codec_interface.h"
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+enum {
+  kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF |
+                        VP8_EFLAG_NO_UPD_ARF |
+                        VP8_EFLAG_NO_REF_GF |
+                        VP8_EFLAG_NO_REF_ARF,
+  kTemporalUpdateGoldenWithoutDependency = VP8_EFLAG_NO_REF_GF |
+                                           VP8_EFLAG_NO_REF_ARF |
+                                           VP8_EFLAG_NO_UPD_ARF |
+                                           VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateGolden = VP8_EFLAG_NO_REF_ARF |
+                          VP8_EFLAG_NO_UPD_ARF |
+                          VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltrefWithoutDependency = VP8_EFLAG_NO_REF_ARF |
+                                           VP8_EFLAG_NO_REF_GF |
+                                           VP8_EFLAG_NO_UPD_GF |
+                                           VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF |
+                          VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF |
+                        VP8_EFLAG_NO_UPD_ARF |
+                        VP8_EFLAG_NO_UPD_LAST |
+                        VP8_EFLAG_NO_UPD_ENTROPY,
+  kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF |
+                                   VP8_EFLAG_NO_UPD_GF |
+                                   VP8_EFLAG_NO_UPD_ARF |
+                                   VP8_EFLAG_NO_UPD_LAST |
+                                   VP8_EFLAG_NO_UPD_ENTROPY,
+};
+
+TEST(TemporalLayersTest, 2Layers) {
+  TemporalLayers tl(2);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[16] = { kTemporalUpdateLast,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateNoneNoRefAltRef,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateNoneNoRefAltRef
+  };
+  int expected_temporal_idx[16] =
+      { 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
+
+  bool expected_layer_sync[16] =
+      { false, true, false, false, false, false, false, false,
+        false, true, false, false, false, false, false, false };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, 3Layers) {
+  TemporalLayers tl(3);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[16] = { kTemporalUpdateLast,
+                             kTemporalUpdateAltrefWithoutDependency,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateAltref,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateAltref,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateAltrefWithoutDependency,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateAltref,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateAltref,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[16] =
+      { 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2 };
+
+  bool expected_layer_sync[16] =
+      { false, true, true, false, false, false, false, false,
+        false, true, true, false, false, false, false, false };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, 4Layers) {
+  TemporalLayers tl(4);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+  int expected_flags[16] = {
+      kTemporalUpdateLast,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltrefWithoutDependency,
+      kTemporalUpdateNone,
+      kTemporalUpdateGoldenWithoutDependency,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+      kTemporalUpdateLast,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+      kTemporalUpdateGolden,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[16] =
+      { 0, 3, 2, 3, 1, 3, 2, 3, 0, 3, 2, 3, 1, 3, 2, 3 };
+
+  bool expected_layer_sync[16] =
+      { false, true, true, true, true, true, false, true,
+        false, true, false, true, false, true, false, true };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, KeyFrame) {
+  TemporalLayers tl(3);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[8] = {
+      kTemporalUpdateLast,
+      kTemporalUpdateAltrefWithoutDependency,
+      kTemporalUpdateGoldenWithoutDependency,
+      kTemporalUpdateAltref,
+      kTemporalUpdateLast,
+      kTemporalUpdateAltref,
+      kTemporalUpdateGolden,
+      kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[8] =
+      { 0, 0, 0, 0, 0, 0, 0, 2};
+
+  bool expected_layer_sync[8] =
+      { false, true, true, false, false, false, false, false };
+
+  for (int i = 0; i < 7; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(true, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+  EXPECT_EQ(expected_flags[7], tl.EncodeFlags());
+  tl.PopulateCodecSpecific(false, &vp8_info);
+  EXPECT_EQ(expected_temporal_idx[7], vp8_info.temporalIdx);
+  bool expected_sync = expected_layer_sync[7];
+  EXPECT_EQ(expected_sync, vp8_info.layerSync);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.cc b/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.cc
new file mode 100644
index 0000000..a477237
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.cc
@@ -0,0 +1,977 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ * This file contains the WEBRTC VP8 wrapper implementation
+ *
+ */
+#include "vp8.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#include "module_common_types.h"
+#include "reference_picture_selection.h"
+#include "temporal_layers.h"
+#include "tick_util.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_decoder.h"
+#include "vpx/vp8cx.h"
+#include "vpx/vp8dx.h"
+
+enum { kVp8ErrorPropagationTh = 30 };
+
+namespace webrtc
+{
+
+VP8Encoder* VP8Encoder::Create() {
+  return new VP8Encoder();
+}
+
+VP8Encoder::VP8Encoder()
+    : encoded_image_(),
+      encoded_complete_callback_(NULL),
+      inited_(false),
+      timestamp_(0),
+      picture_id_(0),
+      feedback_mode_(false),
+      cpu_speed_(-6), // default value
+      rc_max_intra_target_(0),
+      token_partitions_(VP8_ONE_TOKENPARTITION),
+      rps_(new ReferencePictureSelection),
+#if WEBRTC_LIBVPX_VERSION >= 971
+      temporal_layers_(NULL),
+#endif
+      encoder_(NULL),
+      config_(NULL),
+      raw_(NULL) {
+  uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+  srand(seed);
+}
+
+VP8Encoder::~VP8Encoder() {
+  Release();
+  delete rps_;
+}
+
+int VP8Encoder::Release() {
+  if (encoded_image_._buffer != NULL) {
+    delete [] encoded_image_._buffer;
+    encoded_image_._buffer = NULL;
+  }
+  if (encoder_ != NULL) {
+    if (vpx_codec_destroy(encoder_)) {
+      return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    delete encoder_;
+    encoder_ = NULL;
+  }
+  if (config_ != NULL) {
+    delete config_;
+    config_ = NULL;
+  }
+  if (raw_ != NULL) {
+    vpx_img_free(raw_);
+    delete raw_;
+    raw_ = NULL;
+  }
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_ != NULL) {
+    delete temporal_layers_;
+    temporal_layers_ = NULL;
+  }
+#endif
+  inited_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::SetRates(uint32_t new_bitrate_kbit, uint32_t new_framerate) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (encoder_->err) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  if (new_framerate < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // update bit rate
+  if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) {
+    new_bitrate_kbit = codec_.maxBitrate;
+  }
+  config_->rc_target_bitrate = new_bitrate_kbit; // in kbit/s
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->ConfigureBitrates(new_bitrate_kbit, config_);
+  }
+#endif
+  codec_.maxFramerate = new_framerate;
+
+  // update encoder context
+  if (vpx_codec_enc_config_set(encoder_, config_)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::InitEncode(const VideoCodec* inst,
+                           int number_of_cores,
+                           uint32_t /*max_payload_size*/) {
+  if (inst == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (inst->maxFramerate < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // allow zero to represent an unspecified maxBitRate
+  if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (inst->width < 1 || inst->height < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (number_of_cores < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
+
+  int retVal = Release();
+  if (retVal < 0) {
+    return retVal;
+  }
+  if (encoder_ == NULL) {
+    encoder_ = new vpx_codec_ctx_t;
+  }
+  if (config_ == NULL) {
+    config_ = new vpx_codec_enc_cfg_t;
+  }
+  if (raw_ == NULL) {
+    raw_ = new vpx_image_t;
+  }
+  timestamp_ = 0;
+
+  codec_ = *inst;
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (inst->codecSpecific.VP8.numberOfTemporalLayers > 1) {
+    assert(temporal_layers_ == NULL);
+    temporal_layers_ =
+        new TemporalLayers(inst->codecSpecific.VP8.numberOfTemporalLayers);
+  }
+#endif
+  // random start 16 bits is enough.
+  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+
+  // allocate memory for encoded image
+  if (encoded_image_._buffer != NULL) {
+    delete [] encoded_image_._buffer;
+  }
+  encoded_image_._size = (3 * codec_.width * codec_.height) >> 1;
+  encoded_image_._buffer = new uint8_t[encoded_image_._size];
+  encoded_image_._completeFrame = true;
+
+  vpx_img_alloc(raw_, IMG_FMT_I420, codec_.width, codec_.height, 1);
+  // populate encoder configuration with default values
+  if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), config_, 0)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  config_->g_w = codec_.width;
+  config_->g_h = codec_.height;
+  config_->rc_target_bitrate = inst->startBitrate;  // in kbit/s
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->ConfigureBitrates(inst->startBitrate, config_);
+  }
+#endif
+  // setting the time base of the codec
+  config_->g_timebase.num = 1;
+  config_->g_timebase.den = 90000;
+
+  // Set the error resilience mode according to user settings.
+  switch (inst->codecSpecific.VP8.resilience) {
+    case kResilienceOff:
+      config_->g_error_resilient = 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+      if (temporal_layers_) {
+        // Must be on for temporal layers.
+        config_->g_error_resilient = 1;
+      }
+#endif
+      break;
+    case kResilientStream:
+      config_->g_error_resilient = 1;  // TODO(holmer): Replace with
+      // VPX_ERROR_RESILIENT_DEFAULT when we
+      // drop support for libvpx 9.6.0.
+      break;
+    case kResilientFrames:
+#ifdef INDEPENDENT_PARTITIONS
+      config_->g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT |
+      VPX_ERROR_RESILIENT_PARTITIONS;
+      break;
+#else
+      return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  // Not supported
+#endif
+  }
+  config_->g_lag_in_frames = 0; // 0- no frame lagging
+
+  // Determining number of threads based on the image size
+  if (codec_.width * codec_.height > 704 * 576 && number_of_cores > 1) {
+    // 2 threads when larger than 4CIF
+    config_->g_threads = 2;
+  } else {
+    config_->g_threads = 1;
+  }
+
+  // rate control settings
+  config_->rc_dropframe_thresh = 30;
+  config_->rc_end_usage = VPX_CBR;
+  config_->g_pass = VPX_RC_ONE_PASS;
+  config_->rc_resize_allowed = 0;
+  config_->rc_min_quantizer = 8;
+  config_->rc_max_quantizer = 56;
+  config_->rc_undershoot_pct = 100;
+  config_->rc_overshoot_pct = 15;
+  config_->rc_buf_initial_sz = 500;
+  config_->rc_buf_optimal_sz = 600;
+  config_->rc_buf_sz = 1000;
+  // set the maximum target size of any key-frame.
+  rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
+
+  if (feedback_mode_) {
+    // Disable periodic key frames if we get feedback from the decoder
+    // through SLI and RPSI.
+    config_->kf_mode = VPX_KF_DISABLED;
+  } else {
+    config_->kf_mode = VPX_KF_AUTO;
+    config_->kf_max_dist = 3000;
+  }
+  switch (inst->codecSpecific.VP8.complexity) {
+    case kComplexityHigh:
+      cpu_speed_ = -5;
+      break;
+    case kComplexityHigher:
+      cpu_speed_ = -4;
+      break;
+    case kComplexityMax:
+      cpu_speed_ = -3;
+      break;
+    default:
+      cpu_speed_ = -6;
+      break;
+  }
+  rps_->Init();
+  return InitAndSetControlSettings();
+}
+
+int VP8Encoder::InitAndSetControlSettings() {
+  vpx_codec_flags_t flags = 0;
+  // TODO(holmer): We should make a smarter decision on the number of
+  // partitions. Eight is probably not the optimal number for low resolution
+  // video.
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
+#endif
+  if (vpx_codec_enc_init(encoder_, vpx_codec_vp8_cx(), config_, flags)) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 800);
+  vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
+  vpx_codec_control(encoder_, VP8E_SET_TOKEN_PARTITIONS,
+                    static_cast<vp8e_token_partitions>(token_partitions_));
+  vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY, 2);
+#if WEBRTC_LIBVPX_VERSION >= 971
+  vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
+                    rc_max_intra_target_);
+#endif
+  inited_ = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+uint32_t VP8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) {
+  // Set max to the optimal buffer level (normalized by target BR),
+  // and scaled by a scalePar.
+  // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
+  // This values is presented in percentage of perFrameBw:
+  // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
+  // The target in % is as follows:
+
+  float scalePar = 0.5;
+  uint32_t targetPct = optimalBuffersize * scalePar * codec_.maxFramerate / 10;
+
+  // Don't go below 3 times the per frame bandwidth.
+  const uint32_t minIntraTh = 300;
+  return (targetPct < minIntraTh) ? minIntraTh: targetPct;
+}
+
+int VP8Encoder::Encode(const RawImage& input_image,
+                       const CodecSpecificInfo* codec_specific_info,
+                       const VideoFrameType* frame_types) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (input_image._buffer == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (encoded_complete_callback_ == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  // image in vpx_image_t format
+  raw_->planes[PLANE_Y] = input_image._buffer;
+  raw_->planes[PLANE_U] = &input_image._buffer[codec_.height * codec_.width];
+  raw_->planes[PLANE_V] =
+      &input_image._buffer[codec_.height * codec_.width * 5 >> 2];
+
+  int flags = 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    flags |= temporal_layers_->EncodeFlags();
+  }
+#endif
+  bool send_keyframe = frame_types && (*frame_types == kKeyFrame);
+  if (send_keyframe) {
+    // Key frame request from caller.
+    // Will update both golden and alt-ref.
+    flags = VPX_EFLAG_FORCE_KF;
+  } else if (feedback_mode_ && codec_specific_info) {
+    // Handle RPSI and SLI messages and set up the appropriate encode flags.
+    bool sendRefresh = false;
+    if (codec_specific_info->codecType == kVideoCodecVP8) {
+      if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
+        rps_->ReceivedRPSI(
+            codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
+      }
+      if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
+        sendRefresh = rps_->ReceivedSLI(input_image._timeStamp);
+      }
+    }
+    flags = rps_->EncodeFlags(picture_id_, sendRefresh, input_image._timeStamp);
+  }
+
+  // TODO(holmer): Ideally the duration should be the timestamp diff of this
+  // frame and the next frame to be encoded, which we don't have. Instead we
+  // would like to use the duration of the previous frame. Unfortunately the
+  // rate control seems to be off with that setup. Using the average input
+  // frame rate to calculate an average duration for now.
+  assert(codec_.maxFramerate > 0);
+  uint32_t duration = 90000 / codec_.maxFramerate;
+  if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
+                       VPX_DL_REALTIME)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  timestamp_ += duration;
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  return GetEncodedPartitions(input_image);
+#else
+  return GetEncodedFrame(input_image);
+#endif
+}
+
+void VP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
+                                       const vpx_codec_cx_pkt& pkt) {
+  assert(codec_specific != NULL);
+  codec_specific->codecType = kVideoCodecVP8;
+  CodecSpecificInfoVP8 *vp8Info = &(codec_specific->codecSpecific.VP8);
+  vp8Info->pictureId = picture_id_;
+  vp8Info->simulcastIdx = 0;
+  vp8Info->keyIdx = kNoKeyIdx;  // TODO(hlundin) populate this
+  vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->PopulateCodecSpecific(
+        (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? true : false, vp8Info);
+  } else {
+#endif
+    vp8Info->temporalIdx = kNoTemporalIdx;
+    vp8Info->layerSync = false;
+    vp8Info->tl0PicIdx = kNoTl0PicIdx;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  }
+#endif
+  picture_id_ = (picture_id_ + 1) & 0x7FFF;  // prepare next
+}
+
+int VP8Encoder::GetEncodedFrame(const RawImage& input_image) {
+  vpx_codec_iter_t iter = NULL;
+  encoded_image_._frameType = kDeltaFrame;
+  const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(encoder_, &iter);
+  if (pkt == NULL) {
+    if (!encoder_->err) {
+      // dropped frame
+      return WEBRTC_VIDEO_CODEC_OK;
+    } else {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  } else if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
+    CodecSpecificInfo codecSpecific;
+    PopulateCodecSpecific(&codecSpecific, *pkt);
+
+    assert(pkt->data.frame.sz <= encoded_image_._size);
+    memcpy(encoded_image_._buffer, pkt->data.frame.buf, pkt->data.frame.sz);
+    encoded_image_._length = uint32_t(pkt->data.frame.sz);
+    encoded_image_._encodedHeight = raw_->h;
+    encoded_image_._encodedWidth = raw_->w;
+
+    // check if encoded frame is a key frame
+    if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
+      encoded_image_._frameType = kKeyFrame;
+      rps_->EncodedKeyFrame(picture_id_);
+    }
+
+    if (encoded_image_._length > 0) {
+      encoded_image_._timeStamp = input_image._timeStamp;
+
+      // Figure out where partition boundaries are located.
+      RTPFragmentationHeader fragInfo;
+      fragInfo.VerifyAndAllocateFragmentationHeader(2);
+      // two partitions: 1st and 2nd
+
+      // First partition
+      fragInfo.fragmentationOffset[0] = 0;
+      uint8_t *firstByte = encoded_image_._buffer;
+      uint32_t tmpSize = (firstByte[2] << 16) | (firstByte[1] << 8)
+                    | firstByte[0];
+      fragInfo.fragmentationLength[0] = (tmpSize >> 5) & 0x7FFFF;
+      fragInfo.fragmentationPlType[0] = 0; // not known here
+      fragInfo.fragmentationTimeDiff[0] = 0;
+
+      // Second partition
+      fragInfo.fragmentationOffset[1] = fragInfo.fragmentationLength[0];
+      fragInfo.fragmentationLength[1] = encoded_image_._length -
+          fragInfo.fragmentationLength[0];
+      fragInfo.fragmentationPlType[1] = 0; // not known here
+      fragInfo.fragmentationTimeDiff[1] = 0;
+
+      encoded_complete_callback_->Encoded(encoded_image_, &codecSpecific,
+                                        &fragInfo);
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  return WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+int VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
+  vpx_codec_iter_t iter = NULL;
+  int part_idx = 0;
+  encoded_image_._length = 0;
+  encoded_image_._frameType = kDeltaFrame;
+  RTPFragmentationHeader frag_info;
+  frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) + 1);
+  CodecSpecificInfo codec_specific;
+
+  const vpx_codec_cx_pkt_t *pkt = NULL;
+  while ((pkt = vpx_codec_get_cx_data(encoder_, &iter)) != NULL) {
+    switch(pkt->kind) {
+      case VPX_CODEC_CX_FRAME_PKT: {
+        memcpy(&encoded_image_._buffer[encoded_image_._length],
+               pkt->data.frame.buf,
+               pkt->data.frame.sz);
+        frag_info.fragmentationOffset[part_idx] = encoded_image_._length;
+        frag_info.fragmentationLength[part_idx] =  pkt->data.frame.sz;
+        frag_info.fragmentationPlType[part_idx] = 0;  // not known here
+        frag_info.fragmentationTimeDiff[part_idx] = 0;
+        encoded_image_._length += pkt->data.frame.sz;
+        assert(encoded_image_._length <= encoded_image_._size);
+        ++part_idx;
+        break;
+      }
+      default: {
+        break;
+      }
+    }
+    // End of frame
+    if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
+      // check if encoded frame is a key frame
+      if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
+          encoded_image_._frameType = kKeyFrame;
+          rps_->EncodedKeyFrame(picture_id_);
+      }
+      PopulateCodecSpecific(&codec_specific, *pkt);
+      break;
+    }
+  }
+  if (encoded_image_._length > 0) {
+    encoded_image_._timeStamp = input_image._timeStamp;
+    encoded_image_._encodedHeight = raw_->h;
+    encoded_image_._encodedWidth = raw_->w;
+    encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
+                                      &frag_info);
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+#endif
+
+int VP8Encoder::SetChannelParameters(uint32_t /*packet_loss*/, int rtt) {
+  rps_->SetRtt(rtt);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::RegisterEncodeCompleteCallback(
+    EncodedImageCallback* callback) {
+  encoded_complete_callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+VP8Decoder* VP8Decoder::Create() {
+  return new VP8Decoder();
+}
+
+VP8Decoder::VP8Decoder()
+    : decode_complete_callback_(NULL),
+      inited_(false),
+      feedback_mode_(false),
+      decoder_(NULL),
+      last_keyframe_(),
+      image_format_(VPX_IMG_FMT_NONE),
+      ref_frame_(NULL),
+      propagation_cnt_(-1),
+      latest_keyframe_complete_(false),
+      mfqe_enabled_(false) {
+}
+
+VP8Decoder::~VP8Decoder() {
+  inited_ = true; // in order to do the actual release
+  Release();
+}
+
+int VP8Decoder::Reset() {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  InitDecode(&codec_, 1);
+  propagation_cnt_ = -1;
+  latest_keyframe_complete_ = false;
+  mfqe_enabled_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) {
+  int ret_val = Release();
+  if (ret_val < 0 ) {
+    return ret_val;
+  }
+  if (decoder_ == NULL) {
+    decoder_ = new vpx_dec_ctx_t;
+  }
+  if (inst && inst->codecType == kVideoCodecVP8) {
+    feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
+  }
+  vpx_codec_dec_cfg_t  cfg;
+  // Setting number of threads to a constant value (1)
+  cfg.threads = 1;
+  cfg.h = cfg.w = 0; // set after decode
+
+  vpx_codec_flags_t flags = 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  flags = VPX_CODEC_USE_ERROR_CONCEALMENT | VPX_CODEC_USE_POSTPROC;
+#ifdef INDEPENDENT_PARTITIONS
+  flags |= VPX_CODEC_USE_INPUT_PARTITION;
+#endif
+#endif
+
+  if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
+    return WEBRTC_VIDEO_CODEC_MEMORY;
+  }
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  vp8_postproc_cfg_t  ppcfg;
+  // Disable deblocking for now due to uninitialized memory being returned.
+  ppcfg.post_proc_flag = 0;
+  // Strength of deblocking filter. Valid range:[0,16]
+  //ppcfg.deblocking_level = 3;
+  vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
+#endif
+
+  // Save VideoCodec instance for later; mainly for duplicating the decoder.
+  codec_ = *inst;
+  propagation_cnt_ = -1;
+  latest_keyframe_complete_ = false;
+
+  inited_ = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::Decode(const EncodedImage& input_image,
+                       bool missing_frames,
+                       const RTPFragmentationHeader* fragmentation,
+                       const CodecSpecificInfo* codec_specific_info,
+                       int64_t /*render_time_ms*/) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (decode_complete_callback_ == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (input_image._buffer == NULL && input_image._length > 0) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+
+#ifdef INDEPENDENT_PARTITIONS
+  if (fragmentation == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+#endif
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (!mfqe_enabled_ && codec_specific_info &&
+      codec_specific_info->codecSpecific.VP8.temporalIdx > 0) {
+    // Enable MFQE if we are receiving layers.
+    // temporalIdx is set in the jitter buffer according to what the RTP
+    // header says.
+    mfqe_enabled_ = true;
+    vp8_postproc_cfg_t  ppcfg;
+    ppcfg.post_proc_flag = VP8_MFQE;
+    vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
+  }
+#endif
+
+  // Restrict error propagation using key frame requests. Disabled when
+  // the feedback mode is enabled (RPS).
+  // Reset on a key frame refresh.
+  if (!feedback_mode_) {
+    if (input_image._frameType == kKeyFrame && input_image._completeFrame)
+      propagation_cnt_ = -1;
+    // Start count on first loss.
+    else if ((!input_image._completeFrame || missing_frames) &&
+        propagation_cnt_ == -1)
+      propagation_cnt_ = 0;
+    if (propagation_cnt_ >= 0)
+      propagation_cnt_++;
+  }
+
+  vpx_dec_iter_t iter = NULL;
+  vpx_image_t* img;
+  int ret;
+
+  // Check for missing frames.
+  if (missing_frames) {
+    // Call decoder with zero data length to signal missing frames.
+    if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    img = vpx_codec_get_frame(decoder_, &iter);
+    iter = NULL;
+  }
+
+#ifdef INDEPENDENT_PARTITIONS
+  if (DecodePartitions(inputImage, fragmentation)) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0) {
+      propagation_cnt_ = 0;
+    }
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+#else
+  uint8_t* buffer = input_image._buffer;
+  if (input_image._length == 0) {
+    buffer = NULL; // Triggers full frame concealment.
+  }
+  if (vpx_codec_decode(decoder_,
+                       buffer,
+                       input_image._length,
+                       0,
+                       VPX_DL_REALTIME)) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+#endif
+
+  // Store encoded frame if key frame. (Used in Copy method.)
+  if (input_image._frameType == kKeyFrame && input_image._buffer != NULL) {
+    const uint32_t bytes_to_copy = input_image._length;
+    if (last_keyframe_._size < bytes_to_copy) {
+      delete [] last_keyframe_._buffer;
+      last_keyframe_._buffer = NULL;
+      last_keyframe_._size = 0;
+    }
+
+    uint8_t* temp_buffer = last_keyframe_._buffer; // Save buffer ptr.
+    uint32_t temp_size = last_keyframe_._size; // Save size.
+    last_keyframe_ = input_image; // Shallow copy.
+    last_keyframe_._buffer = temp_buffer; // Restore buffer ptr.
+    last_keyframe_._size = temp_size; // Restore buffer size.
+    if (!last_keyframe_._buffer) {
+      // Allocate memory.
+      last_keyframe_._size = bytes_to_copy;
+      last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
+    }
+    // Copy encoded frame.
+    memcpy(last_keyframe_._buffer, input_image._buffer, bytes_to_copy);
+    last_keyframe_._length = bytes_to_copy;
+  }
+
+  img = vpx_codec_get_frame(decoder_, &iter);
+  ret = ReturnFrame(img, input_image._timeStamp);
+  if (ret != 0) {
+    // Reset to avoid requesting key frames too often.
+    if (ret < 0 && propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return ret;
+  }
+  if (feedback_mode_) {
+    // Whenever we receive an incomplete key frame all reference buffers will
+    // be corrupt. If that happens we must request new key frames until we
+    // decode a complete.
+    if (input_image._frameType == kKeyFrame)
+      latest_keyframe_complete_ = input_image._completeFrame;
+    if (!latest_keyframe_complete_)
+      return WEBRTC_VIDEO_CODEC_ERROR;
+
+    // Check for reference updates and last reference buffer corruption and
+    // signal successful reference propagation or frame corruption to the
+    // encoder.
+    int reference_updates = 0;
+    if (vpx_codec_control(decoder_, VP8D_GET_LAST_REF_UPDATES,
+                          &reference_updates)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    int corrupted = 0;
+    if (vpx_codec_control(decoder_, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    int16_t picture_id = -1;
+    if (codec_specific_info) {
+      picture_id = codec_specific_info->codecSpecific.VP8.pictureId;
+    }
+    if (picture_id > -1) {
+      if (((reference_updates & VP8_GOLD_FRAME) ||
+          (reference_updates & VP8_ALTR_FRAME)) && !corrupted) {
+        decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id);
+      }
+      decode_complete_callback_->ReceivedDecodedFrame(picture_id);
+    }
+    if (corrupted) {
+      // we can decode but with artifacts
+      return WEBRTC_VIDEO_CODEC_REQUEST_SLI;
+    }
+  }
+  // Check Vs. threshold
+  if (propagation_cnt_ > kVp8ErrorPropagationTh) {
+    // Reset to avoid requesting key frames too often.
+    propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::DecodePartitions(
+    const EncodedImage& input_image,
+    const RTPFragmentationHeader* fragmentation) {
+  for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
+    const uint8_t* partition = input_image._buffer +
+        fragmentation->fragmentationOffset[i];
+    const uint32_t partition_length =
+        fragmentation->fragmentationLength[i];
+    if (vpx_codec_decode(decoder_,
+                         partition,
+                         partition_length,
+                         0,
+                         VPX_DL_REALTIME)) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  }
+  // Signal end of frame data. If there was no frame data this will trigger
+  // a full frame concealment.
+  if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME))
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
+  if (img == NULL) {
+    // Decoder OK and NULL image => No show frame
+    return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+  }
+
+  // Allocate memory for decoded image
+  uint32_t required_size = (3 * img->d_h * img->d_w) >> 1;
+  if (required_size > decoded_image_._size) {
+    delete [] decoded_image_._buffer;
+    decoded_image_._buffer = NULL;
+  }
+  if (decoded_image_._buffer == NULL) {
+    decoded_image_._size = required_size;
+    decoded_image_._buffer = new uint8_t[decoded_image_._size];
+  }
+  uint8_t* buf;
+  uint32_t pos = 0;
+  uint32_t plane, y;
+
+  for (plane = 0; plane < 3; plane++) {
+    unsigned int width = (plane ? (img->d_w + 1) >> 1 : img->d_w);
+    unsigned int height = (plane ? (img->d_h + 1) >> 1 : img->d_h);
+    buf = img->planes[plane];
+    for(y = 0; y < height; y++) {
+      memcpy(&decoded_image_._buffer[pos], buf, width);
+      pos += width;
+      buf += img->stride[plane];
+    }
+  }
+
+  // Set image parameters
+  decoded_image_._height = img->d_h;
+  decoded_image_._width = img->d_w;
+  decoded_image_._length = (3 * img->d_h * img->d_w) >> 1;
+  decoded_image_._timeStamp = timestamp;
+  int ret = decode_complete_callback_->Decoded(decoded_image_);
+  if (ret != 0)
+    return ret;
+
+  // Remember image format for later
+  image_format_ = img->fmt;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::RegisterDecodeCompleteCallback(
+    DecodedImageCallback* callback) {
+  decode_complete_callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::Release() {
+  if (decoded_image_._buffer != NULL) {
+    delete [] decoded_image_._buffer;
+    decoded_image_._buffer = NULL;
+  }
+  if (last_keyframe_._buffer != NULL) {
+    delete [] last_keyframe_._buffer;
+    last_keyframe_._buffer = NULL;
+  }
+  if (decoder_ != NULL) {
+    if(vpx_codec_destroy(decoder_)) {
+      return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    delete decoder_;
+    decoder_ = NULL;
+  }
+  if (ref_frame_ != NULL) {
+    vpx_img_free(&ref_frame_->img);
+    delete ref_frame_;
+    ref_frame_ = NULL;
+  }
+  inited_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+VideoDecoder* VP8Decoder::Copy() {
+  // Sanity checks.
+  if (!inited_) {
+    // Not initialized.
+    assert(false);
+    return NULL;
+  }
+  if (decoded_image_._buffer == NULL) {
+    // Nothing has been decoded before; cannot clone.
+    return NULL;
+  }
+  if (last_keyframe_._buffer == NULL) {
+    // Cannot clone if we have no key frame to start with.
+    return NULL;
+  }
+  // Create a new VideoDecoder object
+  VP8Decoder *copy = new VP8Decoder;
+
+  // Initialize the new decoder
+  if (copy->InitDecode(&codec_, 1) != WEBRTC_VIDEO_CODEC_OK) {
+    delete copy;
+    return NULL;
+  }
+  // Inject last key frame into new decoder.
+  if (vpx_codec_decode(copy->decoder_, last_keyframe_._buffer,
+                       last_keyframe_._length, NULL, VPX_DL_REALTIME)) {
+    delete copy;
+    return NULL;
+  }
+  // Allocate memory for reference image copy
+  assert(decoded_image_._width > 0);
+  assert(decoded_image_._height > 0);
+  assert(image_format_ > VPX_IMG_FMT_NONE);
+  // Check if frame format has changed.
+  if (ref_frame_ &&
+      (decoded_image_._width != ref_frame_->img.d_w ||
+          decoded_image_._height != ref_frame_->img.d_h ||
+          image_format_ != ref_frame_->img.fmt)) {
+    vpx_img_free(&ref_frame_->img);
+    delete ref_frame_;
+    ref_frame_ = NULL;
+  }
+
+
+  if (!ref_frame_) {
+    ref_frame_ = new vpx_ref_frame_t;
+
+    if (!vpx_img_alloc(&ref_frame_->img,
+                       static_cast<vpx_img_fmt_t>(image_format_),
+                       decoded_image_._width, decoded_image_._height, 1)) {
+      assert(false);
+      delete copy;
+      return NULL;
+    }
+  }
+  const vpx_ref_frame_type_t type_vec[] = { VP8_LAST_FRAME, VP8_GOLD_FRAME,
+      VP8_ALTR_FRAME };
+  for (uint32_t ix = 0;
+      ix < sizeof(type_vec) / sizeof(vpx_ref_frame_type_t); ++ix) {
+    ref_frame_->frame_type = type_vec[ix];
+    if (CopyReference(copy) < 0) {
+      delete copy;
+      return NULL;
+    }
+  }
+  // Copy all member variables (that are not set in initialization).
+  copy->feedback_mode_ = feedback_mode_;
+  copy->image_format_ = image_format_;
+  copy->last_keyframe_ = last_keyframe_; // Shallow copy.
+  // Allocate memory. (Discard copied _buffer pointer.)
+  copy->last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
+  memcpy(copy->last_keyframe_._buffer, last_keyframe_._buffer,
+         last_keyframe_._length);
+
+  // Initialize decoded_image_.
+  copy->decoded_image_ = decoded_image_;  // Shallow copy
+  copy->decoded_image_._buffer = NULL;
+  if (decoded_image_._size) {
+    copy->decoded_image_._buffer = new uint8_t[decoded_image_._size];
+  }
+  return static_cast<VideoDecoder*>(copy);
+}
+
+int VP8Decoder::CopyReference(VP8Decoder* copyTo) {
+  // The type of frame to copy should be set in ref_frame_->frame_type
+  // before the call to this function.
+  if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_)
+      != VPX_CODEC_OK) {
+    return -1;
+  }
+  if (vpx_codec_control(copyTo->decoder_, VP8_SET_REFERENCE, ref_frame_)
+      != VPX_CODEC_OK) {
+    return -1;
+  }
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.gypi b/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.gypi
new file mode 100644
index 0000000..89d8c78
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/source/vp8.gypi
@@ -0,0 +1,121 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_vp8',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+        '../interface',
+        '<(webrtc_root)/common_video/interface',
+        '<(webrtc_root)/modules/video_coding/codecs/interface',
+        '<(webrtc_root)/modules/interface',
+      ],
+      'conditions': [
+        ['build_with_chromium==1', {
+          'dependencies': [
+            '<(webrtc_root)/../libvpx/libvpx.gyp:libvpx',
+          ],
+          'defines': [
+            'WEBRTC_LIBVPX_VERSION=960' # Bali
+          ],
+        },{
+          'dependencies': [
+            '<(webrtc_root)/../third_party/libvpx/libvpx.gyp:libvpx',
+          ],
+          'defines': [
+            'WEBRTC_LIBVPX_VERSION=971' # Cayuga
+          ],
+          'sources': [
+            'temporal_layers.h',
+            'temporal_layers.cc',
+          ],
+        }],
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '<(webrtc_root)/common_video/interface',
+          '<(webrtc_root)/modules/video_coding/codecs/interface',
+        ],
+      },
+      'sources': [
+        'reference_picture_selection.h',
+        'reference_picture_selection.cc',
+        '../interface/vp8.h',
+        '../interface/vp8_common_types.h',
+        'vp8.cc',
+      ],
+    },
+  ], # targets
+  # Exclude the test target when building with chromium.
+  'conditions': [   
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'vp8_test',
+          'type': 'executable',
+          'dependencies': [
+            'test_framework',
+            'webrtc_vp8',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../test/test.gyp:test_support',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+         'sources': [
+            # header files
+            '../test/benchmark.h',
+            '../test/dual_decoder_test.h',
+            '../test/normal_async_test.h',
+            '../test/packet_loss_test.h',
+            '../test/rps_test.h',
+            '../test/unit_test.h',
+
+           # source files
+            '../test/benchmark.cc',
+            '../test/dual_decoder_test.cc',
+            '../test/normal_async_test.cc',
+            '../test/packet_loss_test.cc',
+            '../test/rps_test.cc',
+            '../test/tester.cc',
+            '../test/unit_test.cc',
+          ],
+        },
+        {
+          'target_name': 'vp8_unittests',
+          'type': 'executable',
+          'dependencies': [
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../third_party/libvpx/libvpx.gyp:libvpx',
+            'webrtc_vp8',
+          ],
+          'include_dirs': [
+            '<(webrtc_root)/../third_party/libvpx/source/libvpx',
+          ],
+          'sources': [
+            'reference_picture_selection_unittest.cc',
+            'temporal_layers_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.cc
new file mode 100644
index 0000000..4fc0e25
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.cc
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+VP8Benchmark::VP8Benchmark()
+    : Benchmark("VP8Benchmark", "VP8 benchmark over a range of test cases",
+                webrtc::test::OutputPath() + "VP8Benchmark.txt", "VP8") {
+}
+
+VP8Benchmark::VP8Benchmark(std::string name, std::string description)
+    : Benchmark(name, description,
+                webrtc::test::OutputPath() + "VP8Benchmark.txt",
+                "VP8") {
+}
+
+VP8Benchmark::VP8Benchmark(std::string name, std::string description,
+                           std::string resultsFileName)
+    : Benchmark(name, description, resultsFileName, "VP8") {
+}
+
+VideoEncoder* VP8Benchmark::GetNewEncoder() {
+    return VP8Encoder::Create();
+}
+
+VideoDecoder* VP8Benchmark::GetNewDecoder() {
+    return VP8Decoder::Create();
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.h
new file mode 100644
index 0000000..5143de3
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/benchmark.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
+
+#include "../../../test_framework/benchmark.h"
+
+class VP8Benchmark : public Benchmark
+{
+public:
+    VP8Benchmark();
+    VP8Benchmark(std::string name, std::string description);
+    VP8Benchmark(std::string name, std::string description, std::string resultsFileName);
+
+protected:
+    virtual webrtc::VideoEncoder* GetNewEncoder();
+    virtual webrtc::VideoDecoder* GetNewDecoder();
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.cc
new file mode 100644
index 0000000..478b034
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.cc
@@ -0,0 +1,221 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dual_decoder_test.h"
+
+#include <assert.h>
+#include <string.h> // memcmp
+#include <time.h>
+
+#include "testsupport/fileutils.h"
+
+VP8DualDecoderTest::VP8DualDecoderTest(float bitRate)
+:
+VP8NormalAsyncTest(bitRate)
+{
+    _decoder2 = NULL;
+}
+
+VP8DualDecoderTest::VP8DualDecoderTest()
+:
+VP8NormalAsyncTest("VP8 Dual Decoder Test", "Tests VP8 dual decoder", 1),
+_decoder2(NULL)
+{}
+
+VP8DualDecoderTest::~VP8DualDecoderTest()
+{
+    if(_decoder2)
+    {
+        _decoder2->Release();
+        delete _decoder2;
+    }
+
+    _decodedVideoBuffer2.Free();
+}
+
+void
+VP8DualDecoderTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer2.VerifyAndAllocate(_lengthSourceFrame);
+    if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _decoder->InitDecode(&_inst,1);
+
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    DualDecoderCompleteCallback decCallback(&_decodedVideoBuffer);
+    DualDecoderCompleteCallback decCallback2(&_decodedVideoBuffer2);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _decFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    double starttime = clock()/(double)CLOCKS_PER_SEC;
+    while (!complete)
+    {
+        if (_encFrameCnt == 10)
+        {
+            // initialize second decoder and copy state
+            _decoder2 = static_cast<webrtc::VP8Decoder *>(_decoder->Copy());
+            assert(_decoder2 != NULL);
+            _decoder2->RegisterDecodeCompleteCallback(&decCallback2);
+        }
+        CodecSpecific_InitBitrate();
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode =
+                    static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                int lost = DoPacketLoss();
+                if (lost == 2)
+                {
+                    // Lost the whole frame, continue
+                    _missingFrames = true;
+                    delete _frameToDecode;
+                    _frameToDecode = NULL;
+                    continue;
+                }
+                int ret = Decode(lost);
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr,
+                        "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+    }
+    double endtime = clock()/(double)CLOCKS_PER_SEC;
+    double totalExecutionTime = endtime - starttime;
+    printf("Total execution time: %.1f s\n", totalExecutionTime);
+    _sumEncBytes = encCallback.EncodedBytes();
+    double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _encFrameCnt;
+    double avgDecTime = _totalDecodeTime / _decFrameCnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+    printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+    printf("Average encode pipeline time: %.1f ms\n",
+           1000 * _totalEncodePipeTime / _encFrameCnt);
+    printf("Average decode pipeline  time: %.1f ms\n",
+           1000 * _totalDecodePipeTime / _decFrameCnt);
+    printf("Number of encoded frames: %u\n", _encFrameCnt);
+    printf("Number of decoded frames: %u\n", _decFrameCnt);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+        _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    _encoder->Release();
+    _decoder->Release();
+    Teardown();
+}
+
+
+int
+VP8DualDecoderTest::Decode(int lossValue)
+{
+    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    webrtc::EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    _decodeCompleteTime = 0;
+    _decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
+    int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+    // second decoder
+    if (_decoder2)
+    {
+        int ret2 = _decoder2->Decode(encodedImage, _missingFrames, NULL,
+                                     _frameToDecode->_codecSpecificInfo,
+                                     0 /* dummy */);
+
+        // check return values
+        if (ret < 0 || ret2 < 0 || ret2 != ret)
+        {
+            exit(EXIT_FAILURE);
+        }
+
+        // compare decoded images
+        if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+            _decodedVideoBuffer.GetLength(),
+            _decodedVideoBuffer2.GetBuffer(), _decodedVideoBuffer.GetLength()))
+        {
+            fprintf(stderr,"\n\nClone output different from master.\n\n");
+            exit(EXIT_FAILURE);
+        }
+
+    }
+
+    _missingFrames = false;
+    return ret;
+}
+
+
+bool
+VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                                    const void* ptrB, unsigned int bLengthBytes)
+{
+    if (aLengthBytes != bLengthBytes)
+    {
+        return false;
+    }
+
+    return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
+
+WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::RawImage& image)
+{
+    _decodedVideoBuffer->VerifyAndAllocate(image._length);
+    _decodedVideoBuffer->CopyBuffer(image._length, image._buffer);
+    _decodedVideoBuffer->SetWidth(image._width);
+    _decodedVideoBuffer->SetHeight(image._height);
+    _decodedVideoBuffer->SetTimeStamp(image._timeStamp);
+    _decodeComplete = true;
+    return 0;
+}
+
+bool DualDecoderCompleteCallback::DecodeComplete()
+{
+    if (_decodeComplete)
+    {
+        _decodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.h
new file mode 100644
index 0000000..4af4e3e
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/dual_decoder_test.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
+
+#include "vp8.h"
+#include "normal_async_test.h"
+
+class DualDecoderCompleteCallback;
+
+class VP8DualDecoderTest : public VP8NormalAsyncTest
+{
+public:
+    VP8DualDecoderTest(float bitRate);
+    VP8DualDecoderTest();
+    virtual ~VP8DualDecoderTest();
+    virtual void Perform();
+protected:
+    VP8DualDecoderTest(std::string name, std::string description,
+                       unsigned int testNo)
+    : VP8NormalAsyncTest(name, description, testNo) {}
+    virtual int Decode(int lossValue = 0);
+
+    webrtc::VP8Decoder*     _decoder2;
+    TestVideoBuffer         _decodedVideoBuffer2;
+    static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes, 
+        const void *ptrB, unsigned int bLengthBytes);
+private:
+};
+
+class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    DualDecoderCompleteCallback(TestVideoBuffer* buffer)
+    : _decodedVideoBuffer(buffer), _decodeComplete(false) {}
+    WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
+    bool DecodeComplete();
+private:
+    TestVideoBuffer* _decodedVideoBuffer;
+    bool _decodeComplete;
+};
+
+
+#endif
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.cc
new file mode 100644
index 0000000..9ed508b
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.cc
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_async_test.h"
+
+using namespace webrtc;
+
+VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate) :
+    NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, 1),
+    _hasReceivedRPSI(false)
+{
+}
+
+VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo):
+    NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, testNo),
+    _hasReceivedRPSI(false)
+{
+}
+
+void
+VP8NormalAsyncTest::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, WebRtc_UWord32 bitRate /*=0*/)
+{
+    if (bitRate > 0)
+    {
+        _bitRate = bitRate;
+
+    }else if (_bitRate == 0)
+    {
+        _bitRate = 600;
+    }
+    _inst.codecType = kVideoCodecVP8;
+    _inst.codecSpecific.VP8.feedbackModeOn = true;
+    _inst.codecSpecific.VP8.pictureLossIndicationOn = true;
+    _inst.codecSpecific.VP8.complexity = kComplexityNormal;
+    _inst.maxFramerate = (unsigned char)frameRate;
+    _inst.startBitrate = _bitRate;
+    _inst.maxBitrate = 8000;
+    _inst.width = width;
+    _inst.height = height;
+}
+
+void
+VP8NormalAsyncTest::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }else
+    {
+         _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
+WebRtc_Word32
+VP8NormalAsyncTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
+{
+    _pictureIdRPSI = pictureId;
+    _hasReceivedRPSI = true;
+    return 0;
+}
+
+CodecSpecificInfo*
+VP8NormalAsyncTest::CreateEncoderSpecificInfo() const
+{
+    CodecSpecificInfo* vp8CodecSpecificInfo = new CodecSpecificInfo();
+    vp8CodecSpecificInfo->codecType = kVideoCodecVP8;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
+
+    _hasReceivedSLI = false;
+    _hasReceivedRPSI = false;
+
+    return vp8CodecSpecificInfo;
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.h
new file mode 100644
index 0000000..f49c812
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/normal_async_test.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
+
+#include "../../../test_framework/normal_async_test.h"
+
+class VP8NormalAsyncTest : public NormalAsyncTest
+{
+public:
+    VP8NormalAsyncTest(WebRtc_UWord32 bitRate);
+    VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo);
+    VP8NormalAsyncTest() : NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", 1) {}
+protected:
+    VP8NormalAsyncTest(std::string name, std::string description, unsigned int testNo) : NormalAsyncTest(name, description, testNo) {}
+    virtual void CodecSpecific_InitBitrate();
+    virtual void CodecSettings(int width, int height, WebRtc_UWord32 frameRate=30, WebRtc_UWord32 bitRate=0);
+    virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+private:
+    mutable bool  _hasReceivedRPSI;
+    WebRtc_UWord64  _pictureIdRPSI;
+};
+
+#endif
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.cc
new file mode 100644
index 0000000..69e028a
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.cc
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet_loss_test.h"
+#include <cassert>
+
+VP8PacketLossTest::VP8PacketLossTest()
+:
+PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode")
+{
+}
+
+VP8PacketLossTest::VP8PacketLossTest(std::string name, std::string description)
+:
+PacketLossTest(name, description)
+{
+}
+
+VP8PacketLossTest::VP8PacketLossTest(double lossRate,
+                                     bool useNack,
+                                     int rttFrames)
+:
+PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode",
+               lossRate, useNack, rttFrames)
+{
+}
+
+int VP8PacketLossTest::ByteLoss(int size, unsigned char* /* pkg */, int bytesToLose)
+{
+    int retLength = size - bytesToLose;
+    if (retLength < 4)
+    {
+        retLength = 4;
+    }
+    return retLength;
+}
+
+WebRtc_Word32
+VP8PacketLossTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
+{
+    _pictureIdRPSI = pictureId;
+    _hasReceivedRPSI = true;
+    return 0;
+}
+
+webrtc::CodecSpecificInfo*
+VP8PacketLossTest::CreateEncoderSpecificInfo() const
+{
+    webrtc::CodecSpecificInfo* vp8CodecSpecificInfo =
+      new webrtc::CodecSpecificInfo();
+    vp8CodecSpecificInfo->codecType = webrtc::kVideoCodecVP8;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
+
+    _hasReceivedSLI = false;
+    _hasReceivedRPSI = false;
+
+    return vp8CodecSpecificInfo;
+}
+
+bool VP8PacketLossTest::PacketLoss(double lossRate, int numLosses) {
+  if (numLosses)
+    return true;
+  return RandUniform() < lossRate;
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.h
new file mode 100644
index 0000000..262ee2f
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/packet_loss_test.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
+
+#include "../../../test_framework/packet_loss_test.h"
+
+class VP8PacketLossTest : public PacketLossTest
+{
+public:
+    VP8PacketLossTest();
+    VP8PacketLossTest(double lossRate, bool useNack, int rttFrames);
+
+protected:
+    VP8PacketLossTest(std::string name, std::string description);
+    virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
+    WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    // |lossRate| is the probability of packet loss between 0 and 1.
+    // |numLosses| is the number of packets already lost in the current frame.
+    virtual bool PacketLoss(double lossRate, int numLosses);
+
+    webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
+
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.cc
new file mode 100644
index 0000000..cd1c42e
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.cc
@@ -0,0 +1,311 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rps_test.h"
+
+#include <assert.h>
+#include <string.h> // memcmp
+#include <time.h>
+
+#include "vp8.h"
+
+VP8RpsTest::VP8RpsTest(float bitRate)
+    : VP8NormalAsyncTest(bitRate),
+      decoder2_(webrtc::VP8Decoder::Create()),
+      sli_(false) {
+}
+
+VP8RpsTest::VP8RpsTest()
+    : VP8NormalAsyncTest("VP8 Reference Picture Selection Test",
+                         "VP8 Reference Picture Selection Test", 1),
+      decoder2_(webrtc::VP8Decoder::Create()),
+      sli_(false) {
+}
+
+VP8RpsTest::~VP8RpsTest() {
+  if (decoder2_) {
+    decoder2_->Release();
+    delete decoder2_;
+  }
+  decoded_frame2_.Free();
+}
+
+void VP8RpsTest::Perform() {
+  _inname = "test/testFiles/foreman_cif.yuv";
+  CodecSettings(352, 288, 30, _bitRate);
+  Setup();
+  _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+  _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+  decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame);
+
+  // Enable RPS functionality
+  _inst.codecSpecific.VP8.pictureLossIndicationOn = true;
+  _inst.codecSpecific.VP8.feedbackModeOn = true;
+
+  if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
+    exit(EXIT_FAILURE);
+
+  _decoder->InitDecode(&_inst,1);
+  decoder2_->InitDecode(&_inst,1);
+
+  FrameQueue frameQueue;
+  VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+  RpsDecodeCompleteCallback decCallback(&_decodedVideoBuffer);
+  RpsDecodeCompleteCallback decCallback2(&decoded_frame2_);
+  _encoder->RegisterEncodeCompleteCallback(&encCallback);
+  _decoder->RegisterDecodeCompleteCallback(&decCallback);
+  decoder2_->RegisterDecodeCompleteCallback(&decCallback2);
+
+  if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    exit(EXIT_FAILURE);
+
+  _totalEncodeTime = _totalDecodeTime = 0;
+  _totalEncodePipeTime = _totalDecodePipeTime = 0;
+  bool complete = false;
+  _framecnt = 0;
+  _encFrameCnt = 0;
+  _decFrameCnt = 0;
+  _sumEncBytes = 0;
+  _lengthEncFrame = 0;
+  double starttime = clock()/(double)CLOCKS_PER_SEC;
+  while (!complete) {
+    CodecSpecific_InitBitrate();
+    complete = EncodeRps(&decCallback2);
+    if (!frameQueue.Empty() || complete) {
+      while (!frameQueue.Empty()) {
+        _frameToDecode =
+            static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+        int lost = DoPacketLoss();
+        if (lost == 2) {
+            // Lost the whole frame, continue
+            _missingFrames = true;
+            delete _frameToDecode;
+            _frameToDecode = NULL;
+            continue;
+        }
+        int ret = Decode(lost);
+        delete _frameToDecode;
+        _frameToDecode = NULL;
+        if (ret < 0) {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+            exit(EXIT_FAILURE);
+        }
+        else if (ret == 0) {
+            _framecnt++;
+        }
+        else {
+            fprintf(stderr,
+                "\n\nPositive return value from decode!\n\n");
+        }
+      }
+    }
+  }
+  double endtime = clock()/(double)CLOCKS_PER_SEC;
+  double totalExecutionTime = endtime - starttime;
+  printf("Total execution time: %.1f s\n", totalExecutionTime);
+  _sumEncBytes = encCallback.EncodedBytes();
+  double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+  double avgEncTime = _totalEncodeTime / _encFrameCnt;
+  double avgDecTime = _totalDecodeTime / _decFrameCnt;
+  printf("Actual bitrate: %f kbps\n", actualBitRate);
+  printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+  printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+  printf("Average encode pipeline time: %.1f ms\n",
+         1000 * _totalEncodePipeTime / _encFrameCnt);
+  printf("Average decode pipeline  time: %.1f ms\n",
+         1000 * _totalDecodePipeTime / _decFrameCnt);
+  printf("Number of encoded frames: %u\n", _encFrameCnt);
+  printf("Number of decoded frames: %u\n", _decFrameCnt);
+  (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+      _bitRate << " kbps" << std::endl;
+  (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+  (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+  _encoder->Release();
+  _decoder->Release();
+  Teardown();
+}
+
+bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
+  _lengthEncFrame = 0;
+  size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
+  if (bytes_read < _lengthSourceFrame)
+    return true;
+  _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+  _inputVideoBuffer.SetTimeStamp((unsigned int)
+      (_encFrameCnt * 9e4 / _inst.maxFramerate));
+  _inputVideoBuffer.SetWidth(_inst.width);
+  _inputVideoBuffer.SetHeight(_inst.height);
+  webrtc::RawImage rawImage;
+  VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+  if (feof(_sourceFile) != 0) {
+      return true;
+  }
+  _encodeCompleteTime = 0;
+  _encodeTimes[rawImage._timeStamp] = tGetTime();
+  webrtc::VideoFrameType frameType = webrtc::kDeltaFrame;
+
+  webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+  codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
+      decodeCallback->LastDecodedRefPictureId(
+          &codecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI);
+  if (sli_) {
+    codecSpecificInfo->codecSpecific.VP8.pictureIdSLI =
+        decodeCallback->LastDecodedPictureId();
+    codecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = true;
+    sli_ = false;
+  }
+  printf("Encoding: %u\n", _framecnt);
+  int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
+  if (ret < 0)
+    printf("Failed to encode: %u\n", _framecnt);
+
+  if (codecSpecificInfo != NULL) {
+      delete codecSpecificInfo;
+      codecSpecificInfo = NULL;
+  }
+  if (_encodeCompleteTime > 0) {
+      _totalEncodeTime += _encodeCompleteTime -
+          _encodeTimes[rawImage._timeStamp];
+  }
+  else {
+      _totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
+  }
+  return false;
+}
+
+//#define FRAME_LOSS 1
+
+int VP8RpsTest::Decode(int lossValue) {
+  _sumEncBytes += _frameToDecode->_frame->GetLength();
+  webrtc::EncodedImage encodedImage;
+  VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+  encodedImage._completeFrame = !lossValue;
+  _decodeCompleteTime = 0;
+  _decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
+  int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                             _frameToDecode->_codecSpecificInfo);
+  // Drop every 10th frame for the second decoder
+#if FRAME_LOSS
+  if (_framecnt == 0 || _framecnt % 10 != 0) {
+    printf("Decoding: %u\n", _framecnt);
+    if (_framecnt > 1 && (_framecnt - 1) % 10 == 0)
+      _missingFrames = true;
+#else
+  if (true) {
+    if (_framecnt > 0 && _framecnt % 10 == 0) {
+      encodedImage._length = std::rand() % encodedImage._length;
+      printf("Decoding with loss: %u\n", _framecnt);
+    }
+    else
+      printf("Decoding: %u\n", _framecnt);
+#endif
+    int ret2 = decoder2_->Decode(encodedImage, _missingFrames, NULL,
+                                 _frameToDecode->_codecSpecificInfo,
+                                 0 /* dummy */);
+
+    // check return values
+    if (ret < 0 || ret2 < 0) {
+      return -1;
+    } else if (ret2 == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI ||
+        ret2 == WEBRTC_VIDEO_CODEC_REQUEST_SLI) {
+      sli_ = true;
+    }
+
+    // compare decoded images
+#if FRAME_LOSS
+    if (!_missingFrames) {
+      if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+        _decodedVideoBuffer.GetLength(),
+        decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
+        fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
+                _framecnt);
+        return -1;
+      }
+    }
+#else
+    if (_framecnt > 0 && _framecnt % 10 != 0) {
+      if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+        _decodedVideoBuffer.GetLength(),
+        decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
+        fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
+                _framecnt);
+        return -1;
+      }
+    }
+#endif
+  }
+#if FRAME_LOSS
+  else
+    printf("Dropping %u\n", _framecnt);
+#endif
+  _missingFrames = false;
+  return 0;
+}
+
+
+bool
+VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                            const void* ptrB, unsigned int bLengthBytes) {
+  if (aLengthBytes != bLengthBytes)
+    return false;
+  return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
+
+RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
+    : decoded_frame_(buffer),
+      decode_complete_(false),
+      last_decoded_picture_id_(0),
+      last_decoded_ref_picture_id_(0),
+      updated_ref_picture_id_(false) {
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::RawImage& image) {
+  decoded_frame_->VerifyAndAllocate(image._length);
+  decoded_frame_->CopyBuffer(image._length, image._buffer);
+  decoded_frame_->SetWidth(image._width);
+  decoded_frame_->SetHeight(image._height);
+  decoded_frame_->SetTimeStamp(image._timeStamp);
+  decode_complete_ = true;
+  return 0;
+}
+
+bool RpsDecodeCompleteCallback::DecodeComplete() {
+  if (decode_complete_)
+  {
+    decode_complete_ = false;
+    return true;
+  }
+  return false;
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 picture_id) {
+  last_decoded_ref_picture_id_ = picture_id & 0x7FFF;
+  updated_ref_picture_id_ = true;
+  return 0;
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedFrame(
+    const WebRtc_UWord64 picture_id) {
+  last_decoded_picture_id_ = picture_id & 0x3F;
+  return 0;
+}
+
+WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedPictureId() const {
+  return last_decoded_picture_id_;
+}
+
+WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedRefPictureId(
+    bool *updated) {
+  if (updated)
+    *updated = updated_ref_picture_id_;
+  updated_ref_picture_id_ = false;
+  return last_decoded_ref_picture_id_;
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.h
new file mode 100644
index 0000000..cbdbfa3
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/rps_test.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
+
+#include "vp8.h"
+#include "normal_async_test.h"
+
+class RpsDecodeCompleteCallback;
+
+class VP8RpsTest : public VP8NormalAsyncTest {
+ public:
+  VP8RpsTest(float bitRate);
+  VP8RpsTest();
+  virtual ~VP8RpsTest();
+  virtual void Perform();
+ private:
+  VP8RpsTest(std::string name, std::string description, unsigned int testNo)
+  : VP8NormalAsyncTest(name, description, testNo) {}
+  virtual bool EncodeRps(RpsDecodeCompleteCallback* decodeCallback);
+  virtual int Decode(int lossValue = 0);
+
+  static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
+      const void *ptrB, unsigned int bLengthBytes);
+
+  webrtc::VP8Decoder* decoder2_;
+  TestVideoBuffer decoded_frame2_;
+  bool sli_;
+};
+
+class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
+ public:
+  RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
+  WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
+  bool DecodeComplete();
+  WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
+  WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);
+  WebRtc_UWord64 LastDecodedPictureId() const;
+  WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
+
+ private:
+  TestVideoBuffer* decoded_frame_;
+  bool decode_complete_;
+  WebRtc_UWord64 last_decoded_picture_id_;
+  WebRtc_UWord64 last_decoded_ref_picture_id_;
+  bool updated_ref_picture_id_;
+};
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/tester.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/tester.cc
new file mode 100644
index 0000000..350d9a3
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/tester.cc
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <fstream>
+#include <iostream>
+#include <vector>
+
+#include "benchmark.h"
+#include "dual_decoder_test.h"
+#include "normal_async_test.h"
+#include "packet_loss_test.h"
+#include "unit_test.h"
+#include "rps_test.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+void PopulateTests(std::vector<Test*>* tests)
+{
+//    tests->push_back(new VP8RpsTest());
+//    tests->push_back(new VP8UnitTest());
+//    tests->push_back(new VP8DualDecoderTest());
+//    tests->push_back(new VP8Benchmark());
+//    tests->push_back(new VP8PacketLossTest(0.05, false, 5));
+    tests->push_back(new VP8NormalAsyncTest());
+}
+
+int main()
+{
+    VP8Encoder* enc;
+    VP8Decoder* dec;
+    std::vector<Test*> tests;
+    PopulateTests(&tests);
+    std::fstream log;
+    std::string log_file = webrtc::test::OutputPath() + "VP8_test_log.txt";
+    log.open(log_file.c_str(), std::fstream::out | std::fstream::app);
+    std::vector<Test*>::iterator it;
+    for (it = tests.begin() ; it < tests.end(); it++)
+    {
+        enc = VP8Encoder::Create();
+        dec = VP8Decoder::Create();
+        (*it)->SetEncoder(enc);
+        (*it)->SetDecoder(dec);
+        (*it)->SetLog(&log);
+        (*it)->Perform();
+        (*it)->Print();
+        delete enc;
+        delete dec;
+        delete *it;
+    }
+   log.close();
+   tests.pop_back();
+   return 0;
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.cc b/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.cc
new file mode 100644
index 0000000..5ba35ec
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.cc
@@ -0,0 +1,161 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "unit_test.h"
+
+#include <string.h>
+
+#include "../../../test_framework/video_source.h"
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+VP8UnitTest::VP8UnitTest()
+:
+UnitTest("VP8UnitTest", "Unit test")
+{
+}
+
+VP8UnitTest::VP8UnitTest(std::string name, std::string description)
+:
+UnitTest(name, description)
+{
+}
+
+WebRtc_UWord32
+VP8UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate, WebRtc_UWord32 /*frameRate*/)
+{
+    int rate = _encoder->SetRates(bitRate, _inst.maxFramerate);
+    EXPECT_TRUE(rate >= 0);
+    return rate;
+}
+
+bool
+VP8UnitTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                             const void* ptrB, unsigned int bLengthBytes)
+{
+    const unsigned char* cPtrA = (const unsigned char*)ptrA;
+    const unsigned char* cPtrB = (const unsigned char*)ptrB;
+    // Skip picture ID before comparing
+    int aSkip = PicIdLength(cPtrA);
+    int bSkip = PicIdLength(cPtrB);
+    return UnitTest::CheckIfBitExact(cPtrA + aSkip, aLengthBytes,
+                                     cPtrB + bSkip, bLengthBytes);
+}
+
+int
+VP8UnitTest::PicIdLength(const unsigned char* ptr)
+{
+    WebRtc_UWord8 numberOfBytes;
+    WebRtc_UWord64 pictureID = 0;
+    for (numberOfBytes = 0; (ptr[numberOfBytes] & 0x80) && numberOfBytes < 8; numberOfBytes++)
+    {
+        pictureID += ptr[numberOfBytes] & 0x7f;
+        pictureID <<= 7;
+    }
+    pictureID += ptr[numberOfBytes] & 0x7f;
+    numberOfBytes++;
+    return numberOfBytes;
+}
+
+void
+VP8UnitTest::Perform()
+{
+    Setup();
+    FILE *outFile = NULL;
+    std::string outFileName;
+    VP8Encoder* enc = (VP8Encoder*)_encoder;
+    VP8Decoder* dec = (VP8Decoder*)_decoder;
+
+    //----- Encoder parameter tests -----
+    //-- Calls before InitEncode() --
+    EXPECT_EQ(enc->Release(), WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+    EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+   // EXPECT_TRUE(enc->GetCodecConfigParameters(configParameters, sizeof(configParameters)) ==
+   //     WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+
+    VideoCodec codecInst;
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 1440;
+    codecInst.height = 1080;
+    codecInst.maxFramerate = 30;
+    codecInst.startBitrate = 300;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+
+    //-- Test two problematic level settings --
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 352;
+    codecInst.height = 288;
+    codecInst.maxFramerate = 30;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    codecInst.startBitrate = 300;
+    EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+    // Settings not correct for this profile
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 176;
+    codecInst.height = 144;
+    codecInst.maxFramerate = 15;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    codecInst.startBitrate = 300;
+    //EXPECT_TRUE(enc->InitEncode(&codecInst, 1, 1440) == WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED);
+
+    ASSERT_EQ(enc->InitEncode(&_inst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+
+    //-- ProcessNewBitrate() errors --
+    // Bad bitrate.
+    EXPECT_EQ(enc->SetRates(_inst.maxBitrate + 1, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_OK);
+
+   // Signaling not used.
+
+    // Bad packetloss.
+//    EXPECT_TRUE(enc->SetPacketLoss(300) < 0);
+
+    //----- Decoder parameter tests -----
+    //-- Calls before InitDecode() --
+    EXPECT_TRUE(dec->Release() == 0);
+    ASSERT_TRUE(dec->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+
+    //-- SetCodecConfigParameters() errors --
+    unsigned char tmpBuf[128];
+    EXPECT_TRUE(dec->SetCodecConfigParameters(NULL, sizeof(tmpBuf)) == -1);
+    EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, 1) == -1);
+   // Garbage data.
+    EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, sizeof(tmpBuf)) == -1);
+
+    //----- Function tests -----
+    outFileName = webrtc::test::OutputPath() + _source->GetName() + "-errResTest.yuv";
+    outFile = fopen(outFileName.c_str(), "wb");
+    ASSERT_TRUE(outFile != NULL);
+
+    UnitTest::Perform();
+    Teardown();
+
+}
diff --git a/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.h b/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.h
new file mode 100644
index 0000000..e9287e6
--- /dev/null
+++ b/trunk/src/modules/video_coding/codecs/vp8/main/test/unit_test.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_UNIT_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_UNIT_TEST_H_
+
+#include "../../../test_framework/unit_test.h"
+
+class VP8UnitTest : public UnitTest
+{
+public:
+    VP8UnitTest();
+    VP8UnitTest(std::string name, std::string description);
+    virtual void Perform();
+
+protected:
+    virtual WebRtc_UWord32 CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
+                                                    WebRtc_UWord32 /*frameRate*/);
+    virtual bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
+                                 const void *ptrB, unsigned int bLengthBytes);
+    static int PicIdLength(const unsigned char* ptr);
+};
+
+////////////////////////////////////////////////////////////////
+// RESERVATIONS TO PASSING UNIT TEST ON VP8 CODEC             //
+// Test that will not pass:                                   //
+// 1. Check bit exact for decoded images.                     //
+// 2. Target bitrate - Allow a margin of 10% instead of 5%    //
+// 3. Detecting errors in bit stream - NA.                    //
+////////////////////////////////////////////////////////////////
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_UNIT_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/OWNERS b/trunk/src/modules/video_coding/main/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/trunk/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h b/trunk/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
new file mode 100644
index 0000000..c84d5e7
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
+
+#include "gmock/gmock.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class MockVCMFrameTypeCallback : public VCMFrameTypeCallback {
+ public:
+  MOCK_METHOD0(RequestKeyFrame, int32_t());
+  MOCK_METHOD1(SliceLossIndicationRequest,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+};
+
+class MockPacketRequestCallback : public VCMPacketRequestCallback {
+ public:
+  MOCK_METHOD2(ResendPackets, int32_t(const uint16_t* sequenceNumbers,
+                                      uint16_t length));
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
diff --git a/trunk/src/modules/video_coding/main/interface/video_coding.h b/trunk/src/modules/video_coding/main/interface/video_coding.h
new file mode 100644
index 0000000..54e6742
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/interface/video_coding.h
@@ -0,0 +1,556 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
+#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
+
+#include "modules/interface/module.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+class VideoEncoder;
+class VideoDecoder;
+struct CodecSpecificInfo;
+
+class VideoCodingModule : public Module
+{
+public:
+    enum SenderNackMode {
+        kNackNone,
+        kNackAll,
+        kNackSelective
+    };
+
+    enum ReceiverRobustness {
+        kNone,
+        kHardNack,
+        kSoftNack,
+        kDualDecoder,
+        kReferenceSelection
+    };
+
+    enum DecodeErrors {
+        kNoDecodeErrors,
+        kAllowDecodeErrors
+    };
+
+    static VideoCodingModule* Create(const WebRtc_Word32 id);
+
+    static VideoCodingModule* Create(const WebRtc_Word32 id,
+                                     TickTimeBase* clock);
+
+    static void Destroy(VideoCodingModule* module);
+
+    // Get number of supported codecs
+    //
+    // Return value     : Number of supported codecs
+    static WebRtc_UWord8 NumberOfCodecs();
+
+    // Get supported codec settings with using id
+    //
+    // Input:
+    //      - listId         : Id or index of the codec to look up
+    //      - codec          : Memory where the codec settings will be stored
+    //
+    // Return value     : VCM_OK,              on success
+    //                    VCM_PARAMETER_ERROR  if codec not supported or id too high
+    static WebRtc_Word32 Codec(const WebRtc_UWord8 listId, VideoCodec* codec);
+
+    // Get supported codec settings using codec type
+    //
+    // Input:
+    //      - codecType      : The codec type to get settings for
+    //      - codec          : Memory where the codec settings will be stored
+    //
+    // Return value     : VCM_OK,              on success
+    //                    VCM_PARAMETER_ERROR  if codec not supported
+    static WebRtc_Word32 Codec(VideoCodecType codecType, VideoCodec* codec);
+
+    /*
+    *   Sender
+    */
+
+    // Any encoder-related state of VCM will be initialized to the
+    // same state as when the VCM was created. This will not interrupt
+    // or effect decoding functionality of VCM. VCM will lose all the
+    // encoding-related settings by calling this function.
+    // For instance, a send codec has to be registered again.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 InitializeSender() = 0;
+
+    // Registers a codec to be used for encoding. Calling this
+    // API multiple times overwrites any previously registered codecs.
+    //
+    // Input:
+    //      - sendCodec      : Settings for the codec to be registered.
+    //      - numberOfCores  : The number of cores the codec is allowed
+    //                         to use.
+    //      - maxPayloadSize : The maximum size each payload is allowed
+    //                                to have. Usually MTU - overhead.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                            WebRtc_UWord32 numberOfCores,
+                                            WebRtc_UWord32 maxPayloadSize) = 0;
+
+    // API to get the current send codec in use.
+    //
+    // Input:
+    //      - currentSendCodec : Address where the sendCodec will be written.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const = 0;
+
+    // API to get the current send codec type
+    //
+    // Return value      : Codec type, on success.
+    //                     kVideoCodecUnknown, on error or if no send codec is set
+    virtual VideoCodecType SendCodec() const = 0;
+
+    // Register an external encoder object. This can not be used together with
+    // external decoder callbacks.
+    //
+    // Input:
+    //      - externalEncoder : Encoder object to be used for encoding frames inserted
+    //                          with the AddVideoFrame API.
+    //      - payloadType     : The payload type bound which this encoder is bound to.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalSource = false) = 0;
+
+    // API to get codec config parameters to be sent out-of-band to a receiver.
+    //
+    // Input:
+    //      - buffer          : Memory where the codec config parameters should be written.
+    //      - size            : Size of the memory available.
+    //
+    // Return value      : Number of bytes written, on success.
+    //                     < 0,                     on error.
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size) = 0;
+
+    // API to get currently configured encoder target bitrate in kbit/s.
+    //
+    // Return value      : 0,   on success.
+    //                     < 0, on error.
+    virtual int Bitrate(unsigned int* bitrate) const = 0;
+
+    // API to get currently configured encoder target frame rate.
+    //
+    // Return value      : 0,   on success.
+    //                     < 0, on error.
+    virtual int FrameRate(unsigned int* framerate) const = 0;
+
+    // Sets the parameters describing the send channel. These parameters are inputs to the
+    // Media Optimization inside the VCM and also specifies the target bit rate for the
+    // encoder. Bit rate used by NACK should already be compensated for by the user.
+    //
+    // Input:
+    //      - availableBandWidth    : Band width available for the VCM in kbit/s.
+    //      - lossRate              : Fractions of lost packets the past second.
+    //                                (loss rate in percent = 100 * packetLoss / 255)
+    //      - rtt                   : Current round-trip time in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 availableBandWidth,
+                                               WebRtc_UWord8 lossRate,
+                                               WebRtc_UWord32 rtt) = 0;
+
+    // Sets the parameters describing the receive channel. These parameters are inputs to the
+    // Media Optimization inside the VCM.
+    //
+    // Input:
+    //      - rtt                   : Current round-trip time in ms.
+    //                                with the most amount available bandwidth in a conference
+    //                                scenario
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt) = 0;
+
+    // Register a transport callback which will be called to deliver the encoded data and
+    // side information.
+    //
+    // Input:
+    //      - transport  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterTransportCallback(VCMPacketizationCallback* transport) = 0;
+
+    // Register video output information callback which will be called to deliver information
+    // about the video stream produced by the encoder, for instance the average frame rate and
+    // bit rate.
+    //
+    // Input:
+    //      - outputInformation  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterSendStatisticsCallback(
+                                     VCMSendStatisticsCallback* sendStats) = 0;
+
+    // Register a video quality settings callback which will be called when
+    // frame rate/dimensions need to be updated for video quality optimization
+    //
+    // Input:
+    //		- videoQMSettings  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error
+    virtual WebRtc_Word32 RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings) = 0;
+
+    // Register a video protection callback which will be called to deliver
+    // the requested FEC rate and NACK status (on/off).
+    //
+    // Input:
+    //      - protection  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterProtectionCallback(VCMProtectionCallback* protection) = 0;
+
+    // Enable or disable a video protection method.
+    //
+    // Input:
+    //      - videoProtection  : The method to enable or disable.
+    //      - enable           : True if the method should be enabled, false if
+    //                           it should be disabled.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetVideoProtection(VCMVideoProtection videoProtection,
+                                             bool enable) = 0;
+
+    // Add one raw video frame to the encoder. This function does all the necessary
+    // processing, then decides what frame type to encode, or if the frame should be
+    // dropped. If the frame should be encoded it passes the frame to the encoder
+    // before it returns.
+    //
+    // Input:
+    //      - videoFrame        : Video frame to encode.
+    //      - codecSpecificInfo : Extra codec information, e.g., pre-parsed in-band signaling.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 AddVideoFrame(
+        const VideoFrame& videoFrame,
+        const VideoContentMetrics* contentMetrics = NULL,
+        const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
+
+    // Next frame encoded should be of the type frameType.
+    //
+    // Input:
+    //      - frameType    : The frame type to encode next time a VideoFrame
+    //                       is added to the module.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType,
+                                           WebRtc_UWord8 simulcastIdx) = 0;
+
+    // Frame Dropper enable. Can be used to disable the frame dropping when the encoder
+    // over-uses its bit rate. This API is designed to be used when the encoded frames
+    // are supposed to be stored to an AVI file, or when the I420 codec is used and the
+    // target bit rate shouldn't affect the frame rate.
+    //
+    // Input:
+    //      - enable            : True to enable the setting, false to disable it.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 EnableFrameDropper(bool enable) = 0;
+
+    // Sent frame counters
+    virtual WebRtc_Word32 SentFrameCount(VCMFrameCount& frameCount) const = 0;
+
+    /*
+    *   Receiver
+    */
+
+    // The receiver state of the VCM will be initialized to the
+    // same state as when the VCM was created. This will not interrupt
+    // or effect the send side functionality of VCM. VCM will lose all the
+    // decoding-related settings by calling this function. All frames
+    // inside the jitter buffer are flushed and the delay is reset.
+    // For instance, a receive codec has to be registered again.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 InitializeReceiver() = 0;
+
+    // Register possible receive codecs, can be called multiple times for different codecs.
+    // The module will automatically switch between registered codecs depending on the
+    // payload type of incoming frames. The actual decoder will be created when needed.
+    //
+    // Input:
+    //      - receiveCodec      : Settings for the codec to be registered.
+    //      - numberOfCores     : Number of CPU cores that the decoder is allowed to use.
+    //      - requireKeyFrame   : Set this to true if you don't want any delta frames
+    //                            to be decoded until the first key frame has been decoded.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                               WebRtc_Word32 numberOfCores,
+                                               bool requireKeyFrame = false) = 0;
+
+    // Register an externally defined decoder/renderer object. Can be a decoder only or a
+    // decoder coupled with a renderer. Note that RegisterReceiveCodec must be called to
+    // be used for decoding incoming streams.
+    //
+    // Input:
+    //      - externalDecoder        : The external decoder/renderer object.
+    //      - payloadType            : The payload type which this decoder should be
+    //                                 registered to.
+    //      - internalRenderTiming   : True if the internal renderer (if any) of the decoder
+    //                                 object can make sure to render at a given time in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalRenderTiming) = 0;
+
+    // Register a receive callback. Will be called whenever there is a new frame ready
+    // for rendering.
+    //
+    // Input:
+    //      - receiveCallback        : The callback object to be used by the module when a
+    //                                 frame is ready for rendering.
+    //                                 De-register with a NULL pointer.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveCallback(VCMReceiveCallback* receiveCallback) = 0;
+
+    // Register a receive statistics callback which will be called to deliver information
+    // about the video stream received by the receiving side of the VCM, for instance the
+    // average frame rate and bit rate.
+    //
+    // Input:
+    //      - receiveStats  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveStatisticsCallback(
+                               VCMReceiveStatisticsCallback* receiveStats) = 0;
+
+    // Register a frame type request callback. This callback will be called when the
+    // module needs to request specific frame types from the send side.
+    //
+    // Input:
+    //      - frameTypeCallback      : The callback object to be used by the module when
+    //                                 requesting a specific type of frame from the send side.
+    //                                 De-register with a NULL pointer.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterFrameTypeCallback(
+                                  VCMFrameTypeCallback* frameTypeCallback) = 0;
+
+    // Register a frame storage callback. This callback will be called right before an
+    // encoded frame is given to the decoder. Useful for recording the incoming video sequence.
+    //
+    // Input:
+    //      - frameStorageCallback    : The callback object used by the module
+    //                                  to store a received encoded frame.
+    //
+    // Return value     : VCM_OK, on success.
+    //                    < 0,         on error.
+    virtual WebRtc_Word32 RegisterFrameStorageCallback(
+                             VCMFrameStorageCallback* frameStorageCallback) = 0;
+
+    // Registers a callback which is called whenever the receive side of the VCM
+    // encounters holes in the packet sequence and needs packets to be retransmitted.
+    //
+    // Input:
+    //              - callback      : The callback to be registered in the VCM.
+    //
+    // Return value     : VCM_OK,     on success.
+    //                    <0,              on error.
+    virtual WebRtc_Word32 RegisterPacketRequestCallback(
+                                        VCMPacketRequestCallback* callback) = 0;
+
+    // Waits for the next frame in the jitter buffer to become complete
+    // (waits no longer than maxWaitTimeMs), then passes it to the decoder for decoding.
+    // Should be called as often as possible to get the most out of the decoder.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 Decode(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
+
+    // Waits for the next frame in the dual jitter buffer to become complete
+    // (waits no longer than maxWaitTimeMs), then passes it to the dual decoder
+    // for decoding. This will never trigger a render callback. Should be
+    // called frequently, and as long as it returns 1 it should be called again
+    // as soon as possible.
+    //
+    // Return value      : 1,           if a frame was decoded
+    //                     0,           if no frame was decoded
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
+
+    // Decodes a frame and sets an appropriate render time in ms relative to the system time.
+    // Should be used in conjunction with VCMFrameStorageCallback.
+    //
+    // Input:
+    //      - frameFromStorage      : Encoded frame read from file or received through
+    //                                the VCMFrameStorageCallback callback.
+    //
+    // Return value:        : VCM_OK, on success
+    //                        < 0,         on error
+    virtual WebRtc_Word32 DecodeFromStorage(const EncodedVideoData& frameFromStorage) = 0;
+
+    // Reset the decoder state to the initial state.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 ResetDecoder() = 0;
+
+    // API to get the codec which is currently used for decoding by the module.
+    //
+    // Input:
+    //      - currentReceiveCodec      : Settings for the codec to be registered.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const = 0;
+
+    // API to get the codec type currently used for decoding by the module.
+    //
+    // Return value      : codecy type,            on success.
+    //                     kVideoCodecUnknown, on error or if no receive codec is registered
+    virtual VideoCodecType ReceiveCodec() const = 0;
+
+    // Insert a parsed packet into the receiver side of the module. Will be placed in the
+    // jitter buffer waiting for the frame to become complete. Returns as soon as the packet
+    // has been placed in the jitter buffer.
+    //
+    // Input:
+    //      - incomingPayload      : Payload of the packet.
+    //      - payloadLength        : Length of the payload.
+    //      - rtpInfo              : The parsed header.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                       WebRtc_UWord32 payloadLength,
+                                       const WebRtcRTPHeader& rtpInfo) = 0;
+
+    // Minimum playout delay (Used for lip-sync). This is the minimum delay required
+    // to sync with audio. Not included in  VideoCodingModule::Delay()
+    // Defaults to 0 ms.
+    //
+    // Input:
+    //      - minPlayoutDelayMs   : Additional delay in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetMinimumPlayoutDelay(WebRtc_UWord32 minPlayoutDelayMs) = 0;
+
+    // Set the time required by the renderer to render a frame.
+    //
+    // Input:
+    //      - timeMS        : The time in ms required by the renderer to render a frame.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 timeMS) = 0;
+
+    // The total delay desired by the VCM. Can be less than the minimum
+    // delay set with SetMinimumPlayoutDelay.
+    //
+    // Return value      : Total delay in ms, on success.
+    //                     < 0,               on error.
+    virtual WebRtc_Word32 Delay() const = 0;
+
+    // Get the received frame counters. Keeps track of the number of each frame type
+    // received since the start of the call.
+    //
+    // Output:
+    //      - frameCount      : Struct to be filled with the number of frames received.
+    //
+    // Return value           : VCM_OK,        on success.
+    //                          <0,                 on error.
+    virtual WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const = 0;
+
+    // Returns the number of packets discarded by the jitter buffer due to being
+    // too late. This can include duplicated packets which arrived after the
+    // frame was sent to the decoder. Therefore packets which were prematurely
+    // NACKed will be counted.
+    virtual WebRtc_UWord32 DiscardedPackets() const = 0;
+
+
+    // Robustness APIs
+
+    // Set the sender RTX/NACK mode.
+    // Input:
+    //      - mode       : the selected NACK mode.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderNackMode(SenderNackMode mode) = 0;
+
+    // Set the sender reference picture selection (RPS) mode.
+    // Input:
+    //      - enable     : true or false, for enable and disable, respectively.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderReferenceSelection(bool enable) = 0;
+
+    // Set the sender forward error correction (FEC) mode.
+    // Input:
+    //      - enable     : true or false, for enable and disable, respectively.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderFEC(bool enable) = 0;
+
+    // Set the key frame period, or disable periodic key frames (I-frames).
+    // Input:
+    //      - periodMs   : period in ms; <= 0 to disable periodic key frames.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderKeyFramePeriod(int periodMs) = 0;
+
+    // Set the receiver robustness mode. The mode decides how the receiver
+    // responds to losses in the stream. The type of counter-measure (soft or
+    // hard NACK, dual decoder, RPS, etc.) is selected through the
+    // robustnessMode parameter. The errorMode parameter decides if it is
+    // allowed to display frames corrupted by losses. Note that not all
+    // combinations of the two parameters are feasible. An error will be
+    // returned for invalid combinations.
+    // Input:
+    //      - robustnessMode : selected robustness mode.
+    //      - errorMode      : selected error mode.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+                                          DecodeErrors errorMode) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
diff --git a/trunk/src/modules/video_coding/main/interface/video_coding_defines.h b/trunk/src/modules/video_coding/main/interface/video_coding_defines.h
new file mode 100644
index 0000000..3755f6d
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/interface/video_coding_defines.h
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
+#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
+
+#include "typedefs.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc
+{
+
+// Error codes
+#define VCM_FRAME_NOT_READY      3
+#define VCM_REQUEST_SLI          2
+#define VCM_MISSING_CALLBACK     1
+#define VCM_OK                   0
+#define VCM_GENERAL_ERROR       -1
+#define VCM_LEVEL_EXCEEDED      -2
+#define VCM_MEMORY              -3
+#define VCM_PARAMETER_ERROR     -4
+#define VCM_UNKNOWN_PAYLOAD     -5
+#define VCM_CODEC_ERROR         -6
+#define VCM_UNINITIALIZED       -7
+#define VCM_NO_CODEC_REGISTERED -8
+#define VCM_JITTER_BUFFER_ERROR -9
+#define VCM_OLD_PACKET_ERROR    -10
+#define VCM_NO_FRAME_DECODED    -11
+#define VCM_ERROR_REQUEST_SLI   -12
+#define VCM_NOT_IMPLEMENTED     -20
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+#define VCM_VP8_PAYLOAD_TYPE       120
+#define VCM_I420_PAYLOAD_TYPE      124
+
+enum VCMNackProperties
+{
+    kNackHistoryLength = 450
+};
+
+enum VCMVideoProtection
+{
+    kProtectionNack,                // Both send-side and receive-side
+    kProtectionNackSender,          // Send-side only
+    kProtectionNackReceiver,        // Receive-side only
+    kProtectionDualDecoder,
+    kProtectionFEC,
+    kProtectionNackFEC,
+    kProtectionKeyOnLoss,
+    kProtectionKeyOnKeyLoss,
+    kProtectionPeriodicKeyFrames
+};
+
+enum VCMTemporalDecimation
+{
+    kBitrateOverUseDecimation,
+};
+
+struct VCMFrameCount
+{
+    WebRtc_UWord32 numKeyFrames;
+    WebRtc_UWord32 numDeltaFrames;
+};
+
+
+// Callback class used for sending data ready to be packetized
+class VCMPacketizationCallback
+{
+public:
+    virtual WebRtc_Word32 SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& fragmentationHeader,
+        const RTPVideoHeader* rtpVideoHdr) = 0;
+protected:
+    virtual ~VCMPacketizationCallback() {}
+};
+
+// Callback class used for passing decoded frames which are ready to be rendered.
+class VCMFrameStorageCallback
+{
+public:
+    virtual WebRtc_Word32 StoreReceivedFrame(const EncodedVideoData& frameToStore) = 0;
+
+protected:
+    virtual ~VCMFrameStorageCallback() {}
+};
+
+// Callback class used for passing decoded frames which are ready to be rendered.
+class VCMReceiveCallback
+{
+public:
+    virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}
+
+protected:
+    virtual ~VCMReceiveCallback() {}
+};
+
+// Callback class used for informing the user of the bit rate and frame rate produced by the
+// encoder.
+class VCMSendStatisticsCallback
+{
+public:
+    virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
+                                         const WebRtc_UWord32 frameRate) = 0;
+
+protected:
+    virtual ~VCMSendStatisticsCallback() {}
+};
+
+// Callback class used for informing the user of the incoming bit rate and frame rate.
+class VCMReceiveStatisticsCallback
+{
+public:
+    virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
+                                            const WebRtc_UWord32 frameRate) = 0;
+
+protected:
+    virtual ~VCMReceiveStatisticsCallback() {}
+};
+
+// Callback class used for telling the user about the requested amount of
+// bit stream protection: FEC rate for key and delta frame;
+// whether the FEC uses unequal protection (UEP) across packets,
+// for key and delta frame;
+// and whether NACK should be on or off.
+class VCMProtectionCallback
+{
+public:
+    virtual int ProtectionRequest(
+        uint8_t delta_fec_rate,
+        uint8_t key_fec_rate,
+        bool delta_use_uep_protection,
+        bool key_use_uep_protection,
+        bool nack_enabled,
+        uint32_t* sent_video_rate_bps,
+        uint32_t* sent_nack_rate_bps,
+        uint32_t* sent_fec_rate_bps) = 0;
+
+protected:
+    virtual ~VCMProtectionCallback() {}
+};
+
+// Callback class used for telling the user about what frame type needed to continue decoding.
+// Typically a key frame when the stream has been corrupted in some way.
+class VCMFrameTypeCallback
+{
+public:
+    virtual WebRtc_Word32 RequestKeyFrame() = 0;
+    virtual WebRtc_Word32 SliceLossIndicationRequest(const WebRtc_UWord64 pictureId) {return -1;}
+
+protected:
+    virtual ~VCMFrameTypeCallback() {}
+};
+
+// Callback class used for telling the user about which packet sequence numbers are currently
+// missing and need to be resent.
+class VCMPacketRequestCallback
+{
+public:
+    virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+                                        WebRtc_UWord16 length) = 0;
+
+protected:
+    virtual ~VCMPacketRequestCallback() {}
+};
+
+// Callback used to inform the user of the the desired resolution
+// as subscribed by Media Optimization (Quality Modes)
+class VCMQMSettingsCallback
+{
+public:
+    virtual WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                             const WebRtc_UWord32 width,
+                                             const WebRtc_UWord32 height) = 0;
+
+protected:
+    virtual ~VCMQMSettingsCallback() {}
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
diff --git a/trunk/src/modules/video_coding/main/source/Android.mk b/trunk/src/modules/video_coding/main/source/Android.mk
new file mode 100644
index 0000000..2ecf45d
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/Android.mk
@@ -0,0 +1,70 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_video_coding
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    codec_database.cc \
+    codec_timer.cc \
+    content_metrics_processing.cc \
+    decoding_state.cc \
+    encoded_frame.cc \
+    exp_filter.cc \
+    frame_buffer.cc \
+    frame_dropper.cc \
+    generic_decoder.cc \
+    generic_encoder.cc \
+    inter_frame_delay.cc \
+    jitter_buffer.cc \
+    jitter_buffer_common.cc \
+    jitter_estimator.cc \
+    media_opt_util.cc \
+    media_optimization.cc \
+    packet.cc \
+    qm_select.cc \
+    receiver.cc \
+    rtt_filter.cc \
+    session_info.cc \
+    timestamp_extrapolator.cc \
+    timestamp_map.cc \
+    timing.cc \
+    video_coding_impl.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../codecs/interface \
+    $(LOCAL_PATH)/../../codecs/i420/main/interface \
+    $(LOCAL_PATH)/../../codecs/vp8/main/interface \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../common_video/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_coding/main/source/codec_database.cc b/trunk/src/modules/video_coding/main/source/codec_database.cc
new file mode 100644
index 0000000..6f81895
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/codec_database.cc
@@ -0,0 +1,719 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_database.h"
+
+#include <assert.h>
+
+#include "../../../../engine_configurations.h"
+#include "internal_defines.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+    // VS 2005: Don't warn for default initialized arrays. See help for more info.
+    // Don't warn for strncpy being unsecure.
+    // switch statement contains 'default' but no 'case' labels
+#pragma warning(disable:4351; disable:4996; disable:4065)
+#endif
+
+// Supported codecs
+#ifdef  VIDEOCODEC_VP8
+    #include "vp8.h"
+#endif
+#ifdef  VIDEOCODEC_I420
+    #include "i420.h"
+#endif
+
+namespace webrtc
+{
+
+VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
+                                     WebRtc_UWord32 numberOfCores,
+                                     bool requireKeyFrame)
+:
+_settings(settings),
+_numberOfCores(numberOfCores),
+_requireKeyFrame(requireKeyFrame)
+{
+}
+
+VCMExtDecoderMapItem::VCMExtDecoderMapItem(VideoDecoder* externalDecoderInstance,
+                                           WebRtc_UWord8 payloadType,
+                                           bool internalRenderTiming)
+:
+_payloadType(payloadType),
+_externalDecoderInstance(externalDecoderInstance),
+_internalRenderTiming(internalRenderTiming)
+{
+}
+
+VCMCodecDataBase::VCMCodecDataBase(WebRtc_Word32 id):
+_id(id),
+_numberOfCores(0),
+_maxPayloadSize(kDefaultPayloadSize),
+_periodicKeyFrames(false),
+_currentEncIsExternal(false),
+_sendCodec(),
+_receiveCodec(),
+_externalPayloadType(0),
+_externalEncoder(NULL),
+_internalSource(false),
+_ptrEncoder(NULL),
+_ptrDecoder(NULL),
+_currentDecIsExternal(false),
+_decMap(),
+_decExternalMap()
+{
+    //
+}
+
+VCMCodecDataBase::~VCMCodecDataBase()
+{
+    Reset();
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Reset()
+{
+    WebRtc_Word32 ret = ResetReceiver();
+    if (ret < 0)
+    {
+        return ret;
+    }
+    ret = ResetSender();
+    if (ret < 0)
+    {
+        return ret;
+    }
+   return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ResetSender()
+{
+    DeleteEncoder();
+    _periodicKeyFrames = false;
+    return VCM_OK;
+}
+
+VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
+    const VideoCodecType type) const {
+
+    switch(type)
+    {
+#ifdef VIDEOCODEC_VP8
+        case kVideoCodecVP8:
+            return new VCMGenericEncoder(*(VP8Encoder::Create()));
+#endif
+#ifdef VIDEOCODEC_I420
+        case kVideoCodecI420:
+            return new VCMGenericEncoder(*(new I420Encoder));
+#endif
+        default:
+            return NULL;
+    }
+}
+
+void
+VCMCodecDataBase::DeleteEncoder()
+{
+    if (_ptrEncoder)
+    {
+        _ptrEncoder->Release();
+        if (!_currentEncIsExternal)
+        {
+            delete &_ptrEncoder->_encoder;
+        }
+        delete _ptrEncoder;
+        _ptrEncoder = NULL;
+    }
+}
+
+WebRtc_UWord8
+VCMCodecDataBase::NumberOfCodecs()
+{
+    return VCM_NUM_VIDEO_CODECS_AVAILABLE;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Codec(WebRtc_UWord8 listId, VideoCodec *settings)
+{
+    if (settings == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+
+    if (listId >= VCM_NUM_VIDEO_CODECS_AVAILABLE)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    memset(settings, 0, sizeof(VideoCodec));
+    switch (listId)
+    {
+#ifdef VIDEOCODEC_VP8
+    case VCM_VP8_IDX:
+        {
+            strncpy(settings->plName, "VP8", 4);
+            settings->codecType = kVideoCodecVP8;
+            // 96 to 127 dynamic payload types for video codecs
+            settings->plType = VCM_VP8_PAYLOAD_TYPE;
+            settings->startBitrate = 100;
+            settings->minBitrate = VCM_MIN_BITRATE;
+            settings->maxBitrate = 0;
+            settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+            settings->width = VCM_DEFAULT_CODEC_WIDTH;
+            settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+            settings->numberOfSimulcastStreams = 0;
+            settings->codecSpecific.VP8.resilience = kResilientStream;
+            settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
+            break;
+        }
+#endif
+#ifdef VIDEOCODEC_I420
+    case VCM_I420_IDX:
+        {
+            strncpy(settings->plName, "I420", 5);
+            settings->codecType = kVideoCodecI420;
+            // 96 to 127 dynamic payload types for video codecs
+            settings->plType = VCM_I420_PAYLOAD_TYPE;
+            // Bitrate needed for this size and framerate
+            settings->startBitrate = 3*VCM_DEFAULT_CODEC_WIDTH*
+                                       VCM_DEFAULT_CODEC_HEIGHT*8*
+                                       VCM_DEFAULT_FRAME_RATE/1000/2;
+            settings->maxBitrate = settings->startBitrate;
+            settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+            settings->width = VCM_DEFAULT_CODEC_WIDTH;
+            settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+            settings->minBitrate = VCM_MIN_BITRATE;
+            settings->numberOfSimulcastStreams = 0;
+            break;
+        }
+#endif
+    default:
+        {
+            return VCM_PARAMETER_ERROR;
+        }
+    }
+
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Codec(VideoCodecType codecType, VideoCodec* settings)
+{
+    for (int i = 0; i < VCMCodecDataBase::NumberOfCodecs(); i++)
+    {
+        const WebRtc_Word32 ret = VCMCodecDataBase::Codec(i, settings);
+        if (ret != VCM_OK)
+        {
+            return ret;
+        }
+        if (codecType == settings->codecType)
+        {
+            return VCM_OK;
+        }
+    }
+    return VCM_PARAMETER_ERROR;
+}
+
+// assuming only one registered encoder - since only one used, no need for more
+WebRtc_Word32
+VCMCodecDataBase::RegisterSendCodec(const VideoCodec* sendCodec,
+                                    WebRtc_UWord32 numberOfCores,
+                                    WebRtc_UWord32 maxPayloadSize)
+ {
+    if (sendCodec == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    if (maxPayloadSize == 0)
+    {
+        maxPayloadSize = kDefaultPayloadSize;
+    }
+    if (numberOfCores > 32)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (sendCodec->plType <= 0)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    // Make sure the start bit rate is sane...
+    if (sendCodec->startBitrate > 1000000)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (sendCodec->codecType == kVideoCodecUnknown)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    _numberOfCores = numberOfCores;
+    _maxPayloadSize = maxPayloadSize;
+
+    memcpy(&_sendCodec, sendCodec, sizeof(VideoCodec));
+
+    if (_sendCodec.maxBitrate == 0)
+    {
+        // max is one bit per pixel
+        _sendCodec.maxBitrate = ((WebRtc_Word32)_sendCodec.height *
+                                 (WebRtc_Word32)_sendCodec.width *
+                                 (WebRtc_Word32)_sendCodec.maxFramerate) / 1000;
+        if (_sendCodec.startBitrate > _sendCodec.maxBitrate)
+        {
+            // but if the customer tries to set a higher start bit rate we will increase
+            // the max accordingly
+            _sendCodec.maxBitrate = _sendCodec.startBitrate;
+        }
+    }
+
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::SendCodec(VideoCodec* currentSendCodec) const
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(_id), "SendCodec");
+
+    if(_ptrEncoder == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    memcpy(currentSendCodec, &_sendCodec, sizeof(VideoCodec));
+    return VCM_OK;
+}
+
+VideoCodecType
+VCMCodecDataBase::SendCodec() const
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(_id),
+            "SendCodec type");
+    if (_ptrEncoder == NULL)
+    {
+        return kVideoCodecUnknown;
+    }
+    return _sendCodec.codecType;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::DeRegisterExternalEncoder(WebRtc_UWord8 payloadType, bool& wasSendCodec)
+{
+    wasSendCodec = false;
+    if (_externalPayloadType != payloadType)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (_sendCodec.plType == payloadType)
+    {
+        //De-register as send codec if needed
+        DeleteEncoder();
+        memset(&_sendCodec, 0, sizeof(VideoCodec));
+        _currentEncIsExternal = false;
+        wasSendCodec = true;
+    }
+    _externalPayloadType = 0;
+    _externalEncoder = NULL;
+    _internalSource = false;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                          WebRtc_UWord8 payloadType,
+                                          bool internalSource)
+{
+    // since only one encoder can be used at a given time,
+    // only one external encoder can be registered/used
+    _externalEncoder = externalEncoder;
+    _externalPayloadType = payloadType;
+    _internalSource = internalSource;
+
+    return VCM_OK;
+}
+
+VCMGenericEncoder*
+VCMCodecDataBase::SetEncoder(const VideoCodec* settings,
+                             VCMEncodedFrameCallback* VCMencodedFrameCallback)
+
+{
+    // if encoder exists, will destroy it and create new one
+    DeleteEncoder();
+
+    if (settings->plType == _externalPayloadType)
+    {
+        // External encoder
+        _ptrEncoder = new VCMGenericEncoder(*_externalEncoder, _internalSource);
+        _currentEncIsExternal = true;
+    }
+    else
+    {
+        _ptrEncoder = CreateEncoder(settings->codecType);
+        _currentEncIsExternal = false;
+    }
+    VCMencodedFrameCallback->SetPayloadType(settings->plType);
+
+    if (_ptrEncoder == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to create encoder: %s.",
+                     settings->plName);
+        return NULL;
+    }
+    if (_ptrEncoder->InitEncode(settings, _numberOfCores, _maxPayloadSize) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to initialize encoder: %s.",
+                     settings->plName);
+        DeleteEncoder();
+        return NULL;
+    }
+    else if (_ptrEncoder->RegisterEncodeCallback(VCMencodedFrameCallback) < 0)
+    {
+        DeleteEncoder();
+        return NULL;
+    }
+    // Intentionally don't check return value since the encoder registration
+    // shouldn't fail because the codec doesn't support changing the
+    // periodic key frame setting.
+    _ptrEncoder->SetPeriodicKeyFrames(_periodicKeyFrames);
+    return _ptrEncoder;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::SetPeriodicKeyFrames(bool enable)
+{
+    _periodicKeyFrames = enable;
+    if (_ptrEncoder != NULL)
+    {
+        return _ptrEncoder->SetPeriodicKeyFrames(_periodicKeyFrames);
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                       WebRtc_UWord32 numberOfCores,
+                                       bool requireKeyFrame)
+{
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, VCMId(_id),
+                 "Codec: %s, Payload type %d, Height %d, Width %d, Bitrate %d, Framerate %d.",
+                 receiveCodec->plName, receiveCodec->plType,
+                 receiveCodec->height, receiveCodec->width,
+                 receiveCodec->startBitrate, receiveCodec->maxFramerate);
+
+    // check if payload value already exists, if so  - erase old and insert new
+    DeRegisterReceiveCodec(receiveCodec->plType);
+    if (receiveCodec->codecType == kVideoCodecUnknown)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    VideoCodec* newReceiveCodec = new VideoCodec(*receiveCodec);
+    _decMap[receiveCodec->plType] =
+        new VCMDecoderMapItem(newReceiveCodec, numberOfCores, requireKeyFrame);
+
+    return VCM_OK;
+}
+
+WebRtc_Word32 VCMCodecDataBase::DeRegisterReceiveCodec(
+    WebRtc_UWord8 payloadType)
+{
+    DecoderMap::iterator it = _decMap.find(payloadType);
+    if (it == _decMap.end())
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    VCMDecoderMapItem* decItem = (*it).second;
+    delete decItem->_settings;
+    delete decItem;
+    _decMap.erase(it);
+    if (_receiveCodec.plType == payloadType)
+    {
+        // This codec is currently in use.
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+        _currentDecIsExternal = false;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ResetReceiver()
+{
+    ReleaseDecoder(_ptrDecoder);
+    _ptrDecoder = NULL;
+    memset(&_receiveCodec, 0, sizeof(VideoCodec));
+    DecoderMap::iterator it = _decMap.begin();
+    while (it != _decMap.end()) {
+        if ((*it).second->_settings != NULL)
+        {
+            delete (*it).second->_settings;
+        }
+        delete (*it).second;
+        _decMap.erase(it);
+        it = _decMap.begin();
+    }
+    ExternalDecoderMap::iterator exterit = _decExternalMap.begin();
+    while (exterit != _decExternalMap.end()) {
+        delete (*exterit).second;
+        _decExternalMap.erase(exterit);
+        exterit = _decExternalMap.begin();
+    }
+
+    _currentDecIsExternal = false;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::DeRegisterExternalDecoder(WebRtc_UWord8 payloadType)
+{
+    ExternalDecoderMap::iterator it = _decExternalMap.find(payloadType);
+    if (it == _decExternalMap.end())
+    {
+        // Not found
+        return VCM_PARAMETER_ERROR;
+    }
+    if (_receiveCodec.plType == payloadType)
+    {
+        // Release it if it was registered and in use
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+    }
+    DeRegisterReceiveCodec(payloadType);
+    delete (*it).second;
+    _decExternalMap.erase(it);
+    return VCM_OK;
+}
+
+// Add the external encoder object to the list of external decoders.
+// Won't be registered as a receive codec until RegisterReceiveCodec is called.
+WebRtc_Word32
+VCMCodecDataBase::RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                          WebRtc_UWord8 payloadType,
+                                          bool internalRenderTiming)
+{
+    // check if payload value already exists, if so  - erase old and insert new
+    VCMExtDecoderMapItem* extDecoder = new VCMExtDecoderMapItem(externalDecoder,
+                                                                payloadType,
+                                                                internalRenderTiming);
+    if (extDecoder == NULL)
+    {
+        return VCM_MEMORY;
+    }
+    DeRegisterExternalDecoder(payloadType);
+    _decExternalMap[payloadType] = extDecoder;
+
+    return VCM_OK;
+}
+
+bool
+VCMCodecDataBase::DecoderRegistered() const
+{
+    return !_decMap.empty();
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ReceiveCodec(VideoCodec* currentReceiveCodec) const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return VCM_NO_FRAME_DECODED;
+    }
+    memcpy(currentReceiveCodec, &_receiveCodec, sizeof(VideoCodec));
+    return VCM_OK;
+}
+
+VideoCodecType
+VCMCodecDataBase::ReceiveCodec() const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return kVideoCodecUnknown;
+    }
+    return _receiveCodec.codecType;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::SetDecoder(WebRtc_UWord8 payloadType,
+                             VCMDecodedFrameCallback& callback)
+{
+    if (payloadType == _receiveCodec.plType || payloadType == 0)
+    {
+        return _ptrDecoder;
+    }
+    // check for exisitng decoder, if exists - delete
+    if (_ptrDecoder)
+    {
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+    }
+    _ptrDecoder = CreateAndInitDecoder(payloadType, _receiveCodec,
+                                       _currentDecIsExternal);
+    if (_ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+    if (_ptrDecoder->RegisterDecodeCompleteCallback(&callback) < 0)
+    {
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+        return NULL;
+    }
+    return _ptrDecoder;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateAndInitDecoder(WebRtc_UWord8 payloadType,
+                                       VideoCodec& newCodec,
+                                       bool &external) const
+{
+    VCMDecoderMapItem* decoderItem = FindDecoderItem(payloadType);
+    if (decoderItem == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id),
+                     "Unknown payload type: %u", payloadType);
+        return NULL;
+    }
+    VCMGenericDecoder* ptrDecoder = NULL;
+    VCMExtDecoderMapItem* externalDecItem = FindExternalDecoderItem(
+        payloadType);
+    if (externalDecItem != NULL)
+    {
+        // External codec
+        ptrDecoder = new VCMGenericDecoder(
+            *externalDecItem->_externalDecoderInstance,
+            _id,
+            true);
+        external = true;
+    }
+    else
+    {
+        // create decoder
+        ptrDecoder = CreateDecoder(decoderItem->_settings->codecType);
+        external = false;
+    }
+    if (ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+
+    if (ptrDecoder->InitDecode(decoderItem->_settings,
+                               decoderItem->_numberOfCores,
+                               decoderItem->_requireKeyFrame) < 0)
+    {
+        ReleaseDecoder(ptrDecoder);
+        return NULL;
+    }
+    memcpy(&newCodec, decoderItem->_settings, sizeof(VideoCodec));
+    return ptrDecoder;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateDecoderCopy() const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+    VideoDecoder* decoderCopy = _ptrDecoder->_decoder.Copy();
+    if (decoderCopy == NULL)
+    {
+        return NULL;
+    }
+    return new VCMGenericDecoder(*decoderCopy, _id, _ptrDecoder->External());
+}
+
+void
+VCMCodecDataBase::CopyDecoder(const VCMGenericDecoder& decoder)
+{
+    VideoDecoder* decoderCopy = decoder._decoder.Copy();
+    if (decoderCopy != NULL)
+    {
+        VCMDecodedFrameCallback* cb = _ptrDecoder->_callback;
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = new VCMGenericDecoder(*decoderCopy, _id,
+                                            decoder.External());
+        if (cb && _ptrDecoder->RegisterDecodeCompleteCallback(cb))
+        {
+            assert(false);
+        }
+    }
+}
+
+bool
+VCMCodecDataBase::RenderTiming() const
+{
+    bool renderTiming = true;
+    if (_currentDecIsExternal)
+    {
+        VCMExtDecoderMapItem* extItem = FindExternalDecoderItem(_receiveCodec.plType);
+        renderTiming = extItem->_internalRenderTiming;
+    }
+    return renderTiming;
+}
+
+void
+VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const
+{
+    if (decoder != NULL)
+    {
+        assert(&decoder->_decoder != NULL);
+        decoder->Release();
+        if (!decoder->External())
+        {
+            delete &decoder->_decoder;
+        }
+        delete decoder;
+    }
+}
+
+VCMDecoderMapItem*
+VCMCodecDataBase::FindDecoderItem(WebRtc_UWord8 payloadType) const
+{
+    DecoderMap::const_iterator it = _decMap.find(payloadType);
+    if (it != _decMap.end())
+    {
+        return (*it).second;
+    }
+    return NULL;
+}
+
+VCMExtDecoderMapItem*
+VCMCodecDataBase::FindExternalDecoderItem(WebRtc_UWord8 payloadType) const
+{
+    ExternalDecoderMap::const_iterator it = _decExternalMap.find(payloadType);
+    if (it != _decExternalMap.end())
+    {
+        return (*it).second;
+    }
+    return NULL;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateDecoder(VideoCodecType type) const
+{
+    switch(type)
+    {
+#ifdef VIDEOCODEC_VP8
+    case kVideoCodecVP8:
+        return new VCMGenericDecoder(*(VP8Decoder::Create()), _id);
+#endif
+#ifdef VIDEOCODEC_I420
+    case kVideoCodecI420:
+         return new VCMGenericDecoder(*(new I420Decoder), _id);
+#endif
+    default:
+        return NULL;
+    }
+}
+}
diff --git a/trunk/src/modules/video_coding/main/source/codec_database.h b/trunk/src/modules/video_coding/main/source/codec_database.h
new file mode 100644
index 0000000..aab8229
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/codec_database.h
@@ -0,0 +1,210 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+
+#include <map>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/source/generic_decoder.h"
+#include "modules/video_coding/main/source/generic_encoder.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum VCMCodecDBProperties
+{
+    kDefaultPayloadSize = 1440
+};
+
+class VCMDecoderMapItem {
+public:
+    VCMDecoderMapItem(VideoCodec* settings,
+                      WebRtc_UWord32 numberOfCores,
+                      bool requireKeyFrame);
+
+    VideoCodec*     _settings;
+    WebRtc_UWord32  _numberOfCores;
+    bool            _requireKeyFrame;
+};
+
+class VCMExtDecoderMapItem {
+public:
+    VCMExtDecoderMapItem(VideoDecoder* externalDecoderInstance,
+                         WebRtc_UWord8 payloadType,
+                         bool internalRenderTiming);
+
+    WebRtc_UWord8   _payloadType;
+    VideoDecoder*   _externalDecoderInstance;
+    bool            _internalRenderTiming;
+};
+
+/*******************************/
+/* VCMCodecDataBase class      */
+/*******************************/
+class VCMCodecDataBase
+{
+public:
+    VCMCodecDataBase(WebRtc_Word32 id);
+    ~VCMCodecDataBase();
+    /**
+    * Release codecdatabase - release all memory for both send and receive side
+    */
+    WebRtc_Word32 Reset();
+    /**
+    * Sender Side
+    */
+    /**
+    * Returns the number of supported codecs (or -1 in case of error).
+    */
+    static WebRtc_UWord8 NumberOfCodecs();
+    /**
+    * Get supported codecs with ID
+    * Input Values:
+    *       listnr    : Requested codec id number
+    *       codec_inst: Pointer to the struct in which the returned codec information is copied
+    * Return Values: 0 if successful, otherwise
+    */
+    static WebRtc_Word32 Codec(WebRtc_UWord8 listId, VideoCodec* settings);
+    static WebRtc_Word32 Codec(VideoCodecType codecType, VideoCodec* settings);
+    /**
+    * Reset Sender side
+    */
+    WebRtc_Word32 ResetSender();
+    /**
+    * Setting the sender side codec and initiaiting the desired codec given the VideoCodec
+    * struct.
+    * Return Value:	0 if the codec and the settings are supported, otherwise
+    */
+    WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                  WebRtc_UWord32 numberOfCores,
+                                  WebRtc_UWord32 maxPayloadSize);
+    /**
+    * Get current send side codec. Relevant for internal codecs only.
+    */
+    WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const;
+    /**
+    * Get current send side codec type. Relevant for internal codecs only.
+    */
+    VideoCodecType SendCodec() const;
+    /**
+    * Register external encoder - current assumption - if one is registered then it will also
+    * be used, and therefore it is also initialized
+    * Return value: A pointer to the encoder on success, or null, in case of an error.
+    */
+    WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 payloadType, bool& wasSendCodec);
+    WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                        WebRtc_UWord8 payloadType,
+                                        bool internalSource);
+    /**
+    * Returns a encoder given a payloadname - to be used with internal encoders only.
+    * Special cases:
+    *	 Encoder exists -  If payload matches, returns existing one, otherwise,
+    *	 deletes existing one and creates new one.
+    *	 No match found / Error - returns NULL.
+    */
+    VCMGenericEncoder* SetEncoder(const VideoCodec* settings,
+                                  VCMEncodedFrameCallback* VCMencodedFrameCallback);
+
+    WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    bool InternalSource() const;
+
+    /*
+    * Receiver Side
+    */
+    WebRtc_Word32 ResetReceiver();
+    /**
+    * Register external decoder/render object
+    */
+    WebRtc_Word32 DeRegisterExternalDecoder(WebRtc_UWord8 payloadType);
+    WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                        WebRtc_UWord8 payloadType,
+                                        bool internalRenderTiming);
+
+    bool DecoderRegistered() const;
+    /**
+    * Register recieve codec
+    */
+    WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                     WebRtc_UWord32 numberOfCores,
+                                     bool requireKeyFrame);
+    WebRtc_Word32 DeRegisterReceiveCodec(WebRtc_UWord8 payloadType);
+    /**
+    * Get current receive side codec. Relevant for internal codecs only.
+    */
+    WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const;
+    /**
+    * Get current receive side codec type. Relevant for internal codecs only.
+    */
+    VideoCodecType ReceiveCodec() const;
+    /**
+    * Returns a decoder given which matches a payload type.
+    * Special cases:
+    *	 Decoder exists -  If payload matches, returns existing one, otherwise, deletes
+    *	 existing one, and creates new one.
+    *	 No match found / Error - returns NULL.
+    */
+    VCMGenericDecoder* SetDecoder(WebRtc_UWord8 payloadType, VCMDecodedFrameCallback& callback);
+
+    VCMGenericDecoder* CreateAndInitDecoder(WebRtc_UWord8 payloadType,
+                                            VideoCodec& newCodec,
+                                            bool &external) const;
+
+    VCMGenericDecoder* CreateDecoderCopy() const;
+
+    void ReleaseDecoder(VCMGenericDecoder* decoder) const;
+
+    void CopyDecoder(const VCMGenericDecoder& decoder);
+
+    bool RenderTiming() const;
+
+protected:
+    /**
+    * Create an internal Encoder given a codec type
+    */
+    VCMGenericEncoder* CreateEncoder(const VideoCodecType type) const;
+
+    void DeleteEncoder();
+    /*
+    * Create an internal Decoder given a codec type
+    */
+    VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
+
+    VCMDecoderMapItem* FindDecoderItem(WebRtc_UWord8 payloadType) const;
+
+    VCMExtDecoderMapItem* FindExternalDecoderItem(WebRtc_UWord8 payloadType) const;
+
+private:
+    typedef std::map<uint8_t, VCMDecoderMapItem*> DecoderMap;
+    typedef std::map<uint8_t, VCMExtDecoderMapItem*> ExternalDecoderMap;
+    WebRtc_Word32 _id;
+    WebRtc_UWord32 _numberOfCores;
+    WebRtc_UWord32 _maxPayloadSize;
+    bool _periodicKeyFrames;
+    bool _currentEncIsExternal;
+    VideoCodec _sendCodec;
+    VideoCodec _receiveCodec;
+    WebRtc_UWord8 _externalPayloadType;
+    VideoEncoder* _externalEncoder;
+    bool _internalSource;
+    VCMGenericEncoder* _ptrEncoder;
+    VCMGenericDecoder* _ptrDecoder;
+    bool _currentDecIsExternal;
+    DecoderMap _decMap;
+    ExternalDecoderMap _decExternalMap;
+}; // end of VCMCodecDataBase class definition
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
diff --git a/trunk/src/modules/video_coding/main/source/codec_timer.cc b/trunk/src/modules/video_coding/main/source/codec_timer.cc
new file mode 100644
index 0000000..1d112fa
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/codec_timer.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_timer.h"
+
+#include <assert.h>
+
+namespace webrtc
+{
+
+VCMCodecTimer::VCMCodecTimer()
+:
+_filteredMax(0),
+_firstDecodeTime(true),
+_shortMax(0),
+_history()
+{
+    Reset();
+}
+
+WebRtc_Word32 VCMCodecTimer::StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs)
+{
+    const WebRtc_Word32 timeDiff = static_cast<WebRtc_Word32>(nowMs - startTimeMs);
+    MaxFilter(timeDiff, nowMs);
+    return timeDiff;
+}
+
+void VCMCodecTimer::Reset()
+{
+    _filteredMax = 0;
+    _firstDecodeTime = true;
+    _shortMax = 0;
+    for (int i=0; i < MAX_HISTORY_SIZE; i++)
+    {
+        _history[i].shortMax = 0;
+        _history[i].timeMs = -1;
+    }
+}
+
+// Update the max-value filter
+void VCMCodecTimer::MaxFilter(WebRtc_Word32 decodeTime, WebRtc_Word64 nowMs)
+{
+    if (!_firstDecodeTime)
+    {
+        UpdateMaxHistory(decodeTime, nowMs);
+        ProcessHistory(nowMs);
+    }
+    else
+    {
+        _firstDecodeTime = false;
+    }
+}
+
+void
+VCMCodecTimer::UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now)
+{
+    if (_history[0].timeMs >= 0 &&
+        now - _history[0].timeMs < SHORT_FILTER_MS)
+    {
+        if (decodeTime > _shortMax)
+        {
+            _shortMax = decodeTime;
+        }
+    }
+    else
+    {
+        // Only add a new value to the history once a second
+        if(_history[0].timeMs == -1)
+        {
+            // First, no shift
+            _shortMax = decodeTime;
+        }
+        else
+        {
+            // Shift
+            for(int i = (MAX_HISTORY_SIZE - 2); i >= 0 ; i--)
+            {
+                _history[i+1].shortMax = _history[i].shortMax;
+                _history[i+1].timeMs = _history[i].timeMs;
+            }
+        }
+        if (_shortMax == 0)
+        {
+            _shortMax = decodeTime;
+        }
+
+        _history[0].shortMax = _shortMax;
+        _history[0].timeMs = now;
+        _shortMax = 0;
+    }
+}
+
+void
+VCMCodecTimer::ProcessHistory(WebRtc_Word64 nowMs)
+{
+    _filteredMax = _shortMax;
+    if (_history[0].timeMs == -1)
+    {
+        return;
+    }
+    for (int i=0; i < MAX_HISTORY_SIZE; i++)
+    {
+        if (_history[i].timeMs == -1)
+        {
+            break;
+        }
+        if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS)
+        {
+            // This sample (and all samples after this) is too old
+            break;
+        }
+        if (_history[i].shortMax > _filteredMax)
+        {
+            // This sample is the largest one this far into the history
+            _filteredMax = _history[i].shortMax;
+        }
+    }
+}
+
+// Get the maximum observed time within a time window
+WebRtc_Word32 VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const
+{
+    return _filteredMax;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/codec_timer.h b/trunk/src/modules/video_coding/main/source/codec_timer.h
new file mode 100644
index 0000000..e03c5bf
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/codec_timer.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+
+// MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
+#define MAX_HISTORY_SIZE 20
+#define SHORT_FILTER_MS 1000
+
+class VCMShortMaxSample
+{
+public:
+    VCMShortMaxSample() : shortMax(0), timeMs(-1) {};
+
+    WebRtc_Word32     shortMax;
+    WebRtc_Word64     timeMs;
+};
+
+class VCMCodecTimer
+{
+public:
+    VCMCodecTimer();
+
+    // Updates and returns the max filtered decode time.
+    WebRtc_Word32 StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs);
+
+    // Empty the list of timers.
+    void Reset();
+
+    // Get the required decode time in ms.
+    WebRtc_Word32 RequiredDecodeTimeMs(FrameType frameType) const;
+
+private:
+    void UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now);
+    void MaxFilter(WebRtc_Word32 newTime, WebRtc_Word64 nowMs);
+    void ProcessHistory(WebRtc_Word64 nowMs);
+
+    WebRtc_Word32                     _filteredMax;
+    bool                              _firstDecodeTime;
+    WebRtc_Word32                     _shortMax;
+    VCMShortMaxSample                 _history[MAX_HISTORY_SIZE];
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
diff --git a/trunk/src/modules/video_coding/main/source/content_metrics_processing.cc b/trunk/src/modules/video_coding/main/source/content_metrics_processing.cc
new file mode 100644
index 0000000..99160c9
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/content_metrics_processing.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/content_metrics_processing.h"
+
+#include <math.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc {
+//////////////////////////////////
+/// VCMContentMetricsProcessing //
+//////////////////////////////////
+
+VCMContentMetricsProcessing::VCMContentMetricsProcessing()
+    : recursive_avg_factor_(1 / 150.0f),  // matched to  30fps.
+      frame_cnt_uniform_avg_(0),
+      avg_motion_level_(0.0f),
+      avg_spatial_level_(0.0f) {
+  recursive_avg_ = new VideoContentMetrics();
+  uniform_avg_ = new VideoContentMetrics();
+}
+
+VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
+  delete recursive_avg_;
+  delete uniform_avg_;
+}
+
+int VCMContentMetricsProcessing::Reset() {
+  recursive_avg_->Reset();
+  uniform_avg_->Reset();
+  frame_cnt_uniform_avg_ = 0;
+  avg_motion_level_  = 0.0f;
+  avg_spatial_level_ = 0.0f;
+  return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
+  // Update factor for recursive averaging.
+  recursive_avg_factor_ = static_cast<float> (1000.0f) /
+      static_cast<float>(frameRate *  kQmMinIntervalMs);
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
+  return recursive_avg_;
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
+  if (frame_cnt_uniform_avg_ == 0) {
+    return NULL;
+  }
+  // Two metrics are used: motion and spatial level.
+  uniform_avg_->motion_magnitude = avg_motion_level_ /
+      static_cast<float>(frame_cnt_uniform_avg_);
+  uniform_avg_->spatial_pred_err = avg_spatial_level_ /
+      static_cast<float>(frame_cnt_uniform_avg_);
+  return uniform_avg_;
+}
+
+void VCMContentMetricsProcessing::ResetShortTermAvgData() {
+  // Reset.
+  avg_motion_level_ = 0.0f;
+  avg_spatial_level_ = 0.0f;
+  frame_cnt_uniform_avg_ = 0;
+}
+
+int VCMContentMetricsProcessing::UpdateContentData(
+    const VideoContentMetrics *contentMetrics) {
+  if (contentMetrics == NULL) {
+    return VCM_OK;
+  }
+  return ProcessContent(contentMetrics);
+}
+
+int VCMContentMetricsProcessing::ProcessContent(
+    const VideoContentMetrics *contentMetrics) {
+  // Update the recursive averaged metrics: average is over longer window
+  // of time: over QmMinIntervalMs ms.
+  UpdateRecursiveAvg(contentMetrics);
+  // Update the uniform averaged metrics: average is over shorter window
+  // of time: based on ~RTCP reports.
+  UpdateUniformAvg(contentMetrics);
+  return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateUniformAvg(
+    const VideoContentMetrics *contentMetrics) {
+  // Update frame counter.
+  frame_cnt_uniform_avg_ += 1;
+  // Update averaged metrics: motion and spatial level are used.
+  avg_motion_level_ += contentMetrics->motion_magnitude;
+  avg_spatial_level_ +=  contentMetrics->spatial_pred_err;
+  return;
+}
+
+void VCMContentMetricsProcessing::UpdateRecursiveAvg(
+    const VideoContentMetrics *contentMetrics) {
+
+  // Spatial metrics: 2x2, 1x2(H), 2x1(V).
+  recursive_avg_->spatial_pred_err = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err;
+
+  recursive_avg_->spatial_pred_err_h = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err_h +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
+
+  recursive_avg_->spatial_pred_err_v = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err_v +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
+
+  // Motion metric: Derived from NFD (normalized frame difference).
+  recursive_avg_->motion_magnitude = (1 - recursive_avg_factor_) *
+      recursive_avg_->motion_magnitude +
+      recursive_avg_factor_ * contentMetrics->motion_magnitude;
+}
+}  // end of namespace
diff --git a/trunk/src/modules/video_coding/main/source/content_metrics_processing.h b/trunk/src/modules/video_coding/main/source/content_metrics_processing.h
new file mode 100644
index 0000000..0317add
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/content_metrics_processing.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+struct VideoContentMetrics;
+
+// QM interval time (in ms)
+enum {
+  kQmMinIntervalMs = 10000
+};
+
+// Flag for NFD metric vs motion metric
+enum {
+  kNfdMetric = 1
+};
+
+/**********************************/
+/* Content Metrics Processing     */
+/**********************************/
+class VCMContentMetricsProcessing {
+ public:
+  VCMContentMetricsProcessing();
+  ~VCMContentMetricsProcessing();
+
+  // Update class with latest metrics.
+  int UpdateContentData(const VideoContentMetrics *contentMetrics);
+
+  // Reset the short-term averaged content data.
+  void ResetShortTermAvgData();
+
+  // Initialize.
+  int Reset();
+
+  // Inform class of current frame rate.
+  void UpdateFrameRate(uint32_t frameRate);
+
+  // Returns the long-term averaged content data: recursive average over longer
+  // time scale.
+  VideoContentMetrics* LongTermAvgData();
+
+  // Returns the short-term averaged content data: uniform average over
+  // shorter time scalE.
+  VideoContentMetrics* ShortTermAvgData();
+
+ private:
+  // Compute working average.
+  int ProcessContent(const VideoContentMetrics *contentMetrics);
+
+  // Update the recursive averaged metrics: longer time average (~5/10 secs).
+  void UpdateRecursiveAvg(const VideoContentMetrics *contentMetrics);
+
+  // Update the uniform averaged metrics: shorter time average (~RTCP report).
+  void UpdateUniformAvg(const VideoContentMetrics *contentMetrics);
+
+  VideoContentMetrics* recursive_avg_;
+  VideoContentMetrics* uniform_avg_;
+  float recursive_avg_factor_;
+  uint32_t frame_cnt_uniform_avg_;
+  float avg_motion_level_;
+  float avg_spatial_level_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/trunk/src/modules/video_coding/main/source/decoding_state.cc b/trunk/src/modules/video_coding/main/source/decoding_state.cc
new file mode 100644
index 0000000..eea0a2c
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/decoding_state.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/decoding_state.h"
+
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+VCMDecodingState::VCMDecodingState()
+    : sequence_num_(0),
+      time_stamp_(0),
+      picture_id_(kNoPictureId),
+      temporal_id_(kNoTemporalIdx),
+      tl0_pic_id_(kNoTl0PicIdx),
+      full_sync_(true),
+      init_(true) {}
+
+VCMDecodingState::~VCMDecodingState() {}
+
+void VCMDecodingState::Reset() {
+  // TODO(mikhal): Verify - not always would want to reset the sync
+  sequence_num_ = 0;
+  time_stamp_ = 0;
+  picture_id_ = kNoPictureId;
+  temporal_id_ = kNoTemporalIdx;
+  tl0_pic_id_ = kNoTl0PicIdx;
+  full_sync_ = true;
+  init_ = true;
+}
+
+uint32_t VCMDecodingState::time_stamp() const {
+  return time_stamp_;
+}
+
+uint16_t VCMDecodingState::sequence_num() const {
+  return sequence_num_;
+}
+
+bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
+  assert(frame != NULL);
+  if (init_)
+    return false;
+  return (LatestTimestamp(time_stamp_, frame->TimeStamp(), NULL)
+          == time_stamp_);
+}
+
+bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
+  assert(packet != NULL);
+  if (init_)
+    return false;
+  return (LatestTimestamp(time_stamp_, packet->timestamp, NULL)
+           == time_stamp_);
+}
+
+void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
+  assert(frame != NULL && frame->GetHighSeqNum() >= 0);
+  UpdateSyncState(frame);
+  sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
+  time_stamp_ = frame->TimeStamp();
+  picture_id_ = frame->PictureId();
+  temporal_id_ = frame->TemporalId();
+  tl0_pic_id_ = frame->Tl0PicId();
+  init_ = false;
+}
+
+void VCMDecodingState::SetStateOneBack(const VCMFrameBuffer* frame) {
+  assert(frame != NULL && frame->GetHighSeqNum() >= 0);
+  sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum()) - 1u;
+  time_stamp_ = frame->TimeStamp() - 1u;
+  temporal_id_ = frame->TemporalId();
+  if (frame->PictureId() != kNoPictureId) {
+    if (frame->PictureId() == 0)
+      picture_id_ = 0x7FFF;
+    else
+      picture_id_ =  frame->PictureId() - 1;
+  }
+  if (frame->Tl0PicId() != kNoTl0PicIdx) {
+    if (frame->Tl0PicId() == 0)
+      tl0_pic_id_ = 0x00FF;
+    else
+      tl0_pic_id_ = frame->Tl0PicId() - 1;
+  }
+  init_ = false;
+}
+
+void VCMDecodingState::UpdateOldPacket(const VCMPacket* packet) {
+  assert(packet != NULL);
+  if (packet->timestamp == time_stamp_) {
+    // Late packet belonging to the last decoded frame - make sure we update the
+    // last decoded sequence number.
+    sequence_num_ = LatestSequenceNumber(packet->seqNum, sequence_num_, NULL);
+  }
+}
+
+void VCMDecodingState::SetSeqNum(uint16_t new_seq_num) {
+  sequence_num_ = new_seq_num;
+}
+
+bool VCMDecodingState::init() const {
+  return init_;
+}
+
+bool VCMDecodingState::full_sync() const {
+  return full_sync_;
+}
+
+void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) {
+  if (init_)
+    return;
+  if (frame->TemporalId() == kNoTemporalIdx ||
+      frame->Tl0PicId() == kNoTl0PicIdx) {
+    full_sync_ = true;
+  } else if (frame->FrameType() == kVideoFrameKey || frame->LayerSync()) {
+    full_sync_ = true;
+  } else if (full_sync_) {
+    // Verify that we are still in sync.
+    // Sync will be broken if continuity is true for layers but not for the
+    // other methods (PictureId and SeqNum).
+    if (!ContinuousPictureId(frame->PictureId()) &&
+        !ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()))) {
+      // Non-layered methods have failed.
+      full_sync_ = false;
+    }
+  }
+}
+
+bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const {
+  // Check continuity based on the following hierarchy:
+  // - Temporal layers (stop here if out of sync).
+  // - Picture Id when available.
+  // - Sequence numbers.
+  // Return true when in initial state.
+  // Note that when a method is not applicable it will return false.
+  assert(frame != NULL);
+  if (init_)
+    return true;
+
+  if (!ContinuousLayer(frame->TemporalId(), frame->Tl0PicId())) {
+    // Base layers are not continuous or temporal layers are inactive.
+    // In the presence of temporal layers, check for Picture ID/sequence number
+    // continuity if sync can be restored by this frame.
+    if (!full_sync_ && !frame->LayerSync())
+      return false;
+    else if (!ContinuousPictureId(frame->PictureId()))
+      return ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
+  }
+  return true;
+}
+
+bool VCMDecodingState::ContinuousPictureId(int picture_id) const {
+  // First, check if applicable.
+  if (picture_id == kNoPictureId || picture_id_ == kNoPictureId)
+    return false;
+
+  int next_picture_id = picture_id_ + 1;
+  if (picture_id < picture_id_) {
+    // Wrap
+    if (picture_id_ >= 0x80) {
+      // 15 bits used for picture id
+      return ((next_picture_id & 0x7FFF) == picture_id);
+    } else {
+      // 7 bits used for picture id
+      return ((next_picture_id & 0x7F) == picture_id);
+    }
+  }
+  // No wrap
+  return (next_picture_id == picture_id);
+}
+
+bool VCMDecodingState::ContinuousSeqNum(uint16_t seq_num) const {
+  return (seq_num == static_cast<uint16_t>(sequence_num_ + 1));
+}
+
+bool VCMDecodingState::ContinuousLayer(int temporal_id,
+                                       int tl0_pic_id) const {
+  // First, check if applicable.
+  if (temporal_id == kNoTemporalIdx || tl0_pic_id == kNoTl0PicIdx)
+    return false;
+  // If this is the first frame to use temporal layers, make sure we start
+  // from base.
+  else if (tl0_pic_id_ == kNoTl0PicIdx && temporal_id_ == kNoTemporalIdx &&
+           temporal_id == 0)
+    return true;
+
+  // Current implementation: Look for base layer continuity.
+  if (temporal_id != 0)
+    return false;
+  return (static_cast<uint8_t>(tl0_pic_id_ + 1) == tl0_pic_id);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/decoding_state.h b/trunk/src/modules/video_coding/main/source/decoding_state.h
new file mode 100644
index 0000000..afec3ba
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/decoding_state.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations
+class VCMFrameBuffer;
+class VCMPacket;
+
+class VCMDecodingState {
+ public:
+  VCMDecodingState();
+  ~VCMDecodingState();
+  // Check for old frame
+  bool IsOldFrame(const VCMFrameBuffer* frame) const;
+  // Check for old packet
+  bool IsOldPacket(const VCMPacket* packet) const;
+  // Check for frame continuity based on current decoded state. Use best method
+  // possible, i.e. temporal info, picture ID or sequence number.
+  bool ContinuousFrame(const VCMFrameBuffer* frame) const;
+  void SetState(const VCMFrameBuffer* frame);
+  // Set the decoding state one frame back.
+  void SetStateOneBack(const VCMFrameBuffer* frame);
+  // Update the sequence number if the timestamp matches current state and the
+  // sequence number is higher than the current one. This accounts for packets
+  // arriving late.
+  void UpdateOldPacket(const VCMPacket* packet);
+  void SetSeqNum(uint16_t new_seq_num);
+  void Reset();
+  uint32_t time_stamp() const;
+  uint16_t sequence_num() const;
+  // Return true if at initial state.
+  bool init() const;
+  // Return true when sync is on - decode all layers.
+  bool full_sync() const;
+
+ private:
+  void UpdateSyncState(const VCMFrameBuffer* frame);
+  // Designated continuity functions
+  bool ContinuousPictureId(int picture_id) const;
+  bool ContinuousSeqNum(uint16_t seq_num) const;
+  bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
+
+  // Keep state of last decoded frame.
+  // TODO(mikhal/stefan): create designated classes to handle these types.
+  uint16_t    sequence_num_;
+  uint32_t    time_stamp_;
+  int         picture_id_;
+  int         temporal_id_;
+  int         tl0_pic_id_;
+  bool        full_sync_;  // Sync flag when temporal layers are used.
+  bool        init_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
diff --git a/trunk/src/modules/video_coding/main/source/decoding_state_unittest.cc b/trunk/src/modules/video_coding/main/source/decoding_state_unittest.cc
new file mode 100644
index 0000000..853f42a
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/decoding_state_unittest.cc
@@ -0,0 +1,430 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "modules/video_coding/main/source/decoding_state.h"
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+
+TEST(TestDecodingState, Sanity) {
+  VCMDecodingState dec_state;
+  dec_state.Reset();
+  EXPECT_TRUE(dec_state.init());
+  EXPECT_TRUE(dec_state.full_sync());
+}
+
+TEST(TestDecodingState, FrameContinuity) {
+  VCMDecodingState dec_state;
+  // Check that makes decision based on correct method.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->isFirstPacket = 1;
+  packet->timestamp = 1;
+  packet->seqNum = 0xffff;
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Should return true on init.
+  dec_state.Reset();
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  frame.Reset();
+  // Use pictureId
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->seqNum = 10;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+  // Use sequence numbers.
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->seqNum = dec_state.sequence_num() - 1u;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->seqNum = dec_state.sequence_num() + 1u;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Insert another packet to this frame
+  packet->seqNum++;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Verify wrap.
+  EXPECT_EQ(dec_state.sequence_num(), 0xffff);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+
+  // Insert packet with temporal info.
+  dec_state.Reset();
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->seqNum = 1;
+  packet->timestamp = 1;
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  // 1 layer up - still good.
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  packet->seqNum = 2;
+  packet->timestamp = 2;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  // Lost non-base layer packet => should update sync parameter.
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+  packet->seqNum = 4;
+  packet->timestamp = 4;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  // Now insert the next non-base layer (belonging to a next tl0PicId).
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+  packet->seqNum = 5;
+  packet->timestamp = 5;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Checking continuity and not updating the state - this should not trigger
+  // an update of sync state.
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  // Next base layer (dropped interim non-base layers) - should update sync.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+  packet->seqNum = 6;
+  packet->timestamp = 6;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+
+  // Check wrap for temporal layers.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+  packet->seqNum = 7;
+  packet->timestamp = 7;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 7;
+  packet->seqNum = 8;
+  packet->timestamp = 8;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  // The current frame is not continuous
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  delete packet;
+}
+
+TEST(TestDecodingState, SetStateOneBack) {
+  VCMDecodingState dec_state;
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  // Based on PictureId.
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->frameType = kVideoFrameDelta;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetStateOneBack(&frame);
+  EXPECT_EQ(dec_state.sequence_num(), 0xFFFF);
+  // Check continuity.
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+  // Based on Temporal layers.
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetStateOneBack(&frame);
+  // Check continuity
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  delete packet;
+}
+
+TEST(TestDecodingState, UpdateOldPacket) {
+  VCMDecodingState dec_state;
+  // Update only if zero size and newer than previous.
+  // Should only update if the timeStamp match.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->frameType = kVideoFrameDelta;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_EQ(dec_state.sequence_num(), 1);
+  // Insert an empty packet that does not belong to the same frame.
+  // => Sequence num should be the same.
+  packet->timestamp = 2;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 1);
+  // Now insert empty packet belonging to the same frame.
+  packet->timestamp = 1;
+  packet->seqNum = 2;
+  packet->frameType = kFrameEmpty;
+  packet->sizeBytes = 0;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 2);
+  // Now insert delta packet belonging to the same frame.
+  packet->timestamp = 1;
+  packet->seqNum = 3;
+  packet->frameType = kVideoFrameDelta;
+  packet->sizeBytes = 1400;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 3);
+  // Insert a packet belonging to an older timestamp - should not update the
+  // sequence number.
+  packet->timestamp = 0;
+  packet->seqNum = 4;
+  packet->frameType = kFrameEmpty;
+  packet->sizeBytes = 0;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 3);
+
+  delete packet;
+}
+
+TEST(TestDecodingState, MultiLayerBehavior) {
+  // Identify sync/non-sync when more than one layer.
+  VCMDecodingState dec_state;
+  // Identify packets belonging to old frames/packets.
+  // Set state for current frames.
+  // tl0PicIdx 0, temporal id 0.
+  VCMFrameBuffer frame;
+  VCMPacket* packet = new VCMPacket();
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  // tl0PicIdx 0, temporal id 1.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // Lost tl0PicIdx 0, temporal id 2.
+  // Insert tl0PicIdx 0, temporal id 3.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 3;
+  packet->seqNum = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  // Insert next base layer
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 4;
+  packet->seqNum = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  // Insert key frame - should update sync value.
+  // A key frame is always a base layer.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameKey;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 5;
+  packet->seqNum = 5;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // After sync, a continuous PictureId is required
+  // (continuous base layer is not enough )
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->timestamp = 6;
+  packet->seqNum = 6;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 8;
+  packet->seqNum = 8;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 8;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+
+  // Insert a non-ref frame - should update sync value.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 9;
+  packet->seqNum = 9;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 9;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+
+  // The following test will verify the sync flag behavior after a loss.
+  // Create the following pattern:
+  // Update base layer, lose packet 1 (sync flag on, layer 2), insert packet 3
+  // (sync flag on, layer 2) check continuity and sync flag after inserting
+  // packet 2 (sync flag on, layer 1).
+  // Base layer.
+  frame.Reset();
+  dec_state.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 1;
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = false;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // Layer 2 - 2 packets (insert one, lose one).
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 0;
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  // Layer 1
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 1;
+  packet->timestamp = 2;
+  packet->seqNum = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+
+  delete packet;
+}
+
+TEST(TestDecodingState, OldInput) {
+  VCMDecodingState dec_state;
+  // Identify packets belonging to old frames/packets.
+  // Set state for current frames.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->timestamp = 10;
+  packet->seqNum = 1;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  packet->timestamp = 9;
+  EXPECT_TRUE(dec_state.IsOldPacket(packet));
+  // Check for old frame
+  frame.Reset();
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.IsOldFrame(&frame));
+
+
+  delete packet;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/encoded_frame.cc b/trunk/src/modules/video_coding/main/source/encoded_frame.cc
new file mode 100644
index 0000000..8e5d745
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/encoded_frame.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "encoded_frame.h"
+#include "generic_encoder.h"
+#include "jitter_buffer_common.h"
+#include "video_coding_defines.h"
+
+namespace webrtc {
+
+VCMEncodedFrame::VCMEncodedFrame()
+:
+webrtc::EncodedImage(),
+_renderTimeMs(-1),
+_payloadType(0),
+_missingFrame(false),
+_codec(kVideoCodecUnknown),
+_fragmentation()
+{
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
+:
+webrtc::EncodedImage(rhs),
+_renderTimeMs(-1),
+_payloadType(0),
+_missingFrame(false),
+_codec(kVideoCodecUnknown),
+_fragmentation()
+{
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+    _buffer = NULL;
+    _size = 0;
+    _length = 0;
+    if (rhs._buffer != NULL)
+    {
+        VerifyAndAllocate(rhs._length);
+        memcpy(_buffer, rhs._buffer, rhs._length);
+    }
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
+  :
+    webrtc::EncodedImage(rhs),
+    _renderTimeMs(rhs._renderTimeMs),
+    _payloadType(rhs._payloadType),
+    _missingFrame(rhs._missingFrame),
+    _codecSpecificInfo(rhs._codecSpecificInfo),
+    _codec(rhs._codec),
+    _fragmentation() {
+  _buffer = NULL;
+  _size = 0;
+  _length = 0;
+  if (rhs._buffer != NULL)
+  {
+      VerifyAndAllocate(rhs._length);
+      memcpy(_buffer, rhs._buffer, rhs._length);
+      _length = rhs._length;
+  }
+  // Deep operator=
+  _fragmentation = rhs._fragmentation;
+}
+
+VCMEncodedFrame::~VCMEncodedFrame()
+{
+    Free();
+}
+
+void VCMEncodedFrame::Free()
+{
+    Reset();
+    if (_buffer != NULL)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+}
+
+void VCMEncodedFrame::Reset()
+{
+    _renderTimeMs = -1;
+    _timeStamp = 0;
+    _payloadType = 0;
+    _frameType = kDeltaFrame;
+    _encodedWidth = 0;
+    _encodedHeight = 0;
+    _completeFrame = false;
+    _missingFrame = false;
+    _length = 0;
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+    _codec = kVideoCodecUnknown;
+}
+
+void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
+{
+    if (header)
+    {
+        switch (header->codec)
+        {
+            case kRTPVideoVP8:
+            {
+                if (_codecSpecificInfo.codecType != kVideoCodecVP8)
+                {
+                    // This is the first packet for this frame.
+                    _codecSpecificInfo.codecSpecific.VP8.pictureId = -1;
+                    _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
+                    _codecSpecificInfo.codecSpecific.VP8.layerSync = false;
+                    _codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
+                    _codecSpecificInfo.codecType = kVideoCodecVP8;
+                }
+                _codecSpecificInfo.codecSpecific.VP8.nonReference =
+                    header->codecHeader.VP8.nonReference;
+                if (header->codecHeader.VP8.pictureId != kNoPictureId)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.pictureId =
+                        header->codecHeader.VP8.pictureId;
+                }
+                if (header->codecHeader.VP8.temporalIdx != kNoTemporalIdx)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.temporalIdx =
+                        header->codecHeader.VP8.temporalIdx;
+                    _codecSpecificInfo.codecSpecific.VP8.layerSync =
+                        header->codecHeader.VP8.layerSync;
+                }
+                if (header->codecHeader.VP8.keyIdx != kNoKeyIdx)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.keyIdx =
+                        header->codecHeader.VP8.keyIdx;
+                }
+                break;
+            }
+            default:
+            {
+                _codecSpecificInfo.codecType = kVideoCodecUnknown;
+                break;
+            }
+        }
+    }
+}
+
+const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
+  return &_fragmentation;
+}
+
+WebRtc_Word32
+VCMEncodedFrame::Store(VCMFrameStorageCallback& storeCallback) const
+{
+    EncodedVideoData frameToStore;
+    frameToStore.codec = _codec;
+    if (_buffer != NULL)
+    {
+        frameToStore.VerifyAndAllocate(_length);
+        memcpy(frameToStore.payloadData, _buffer, _length);
+        frameToStore.payloadSize = _length;
+    }
+    frameToStore.completeFrame = _completeFrame;
+    frameToStore.encodedWidth = _encodedWidth;
+    frameToStore.encodedHeight = _encodedHeight;
+    frameToStore.frameType = ConvertFrameType(_frameType);
+    frameToStore.missingFrame = _missingFrame;
+    frameToStore.payloadType = _payloadType;
+    frameToStore.renderTimeMs = _renderTimeMs;
+    frameToStore.timeStamp = _timeStamp;
+    storeCallback.StoreReceivedFrame(frameToStore);
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMEncodedFrame::VerifyAndAllocate(const WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > _size)
+    {
+        // create buffer of sufficient size
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[minimumSize];
+        if (newBuffer == NULL)
+        {
+            return -1;
+        }
+        if(_buffer)
+        {
+            // copy old data
+            memcpy(newBuffer, _buffer, _size);
+            delete [] _buffer;
+        }
+        _buffer = newBuffer;
+        _size = minimumSize;
+    }
+    return 0;
+}
+
+webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
+{
+    switch(frameType)
+    {
+    case kKeyFrame:
+        {
+            return  kVideoFrameKey;
+        }
+    case kDeltaFrame:
+        {
+            return kVideoFrameDelta;
+        }
+    case kGoldenFrame:
+        {
+            return kVideoFrameGolden;
+        }
+    case kAltRefFrame:
+        {
+            return kVideoFrameAltRef;
+        }
+    default:
+        {
+            return kVideoFrameDelta;
+        }
+    }
+}
+
+VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frameType)
+{
+    switch (frameType)
+    {
+    case kVideoFrameKey:
+        {
+            return kKeyFrame;
+        }
+    case kVideoFrameDelta:
+        {
+            return kDeltaFrame;
+        }
+    case kVideoFrameGolden:
+        {
+            return kGoldenFrame;
+        }
+    case kVideoFrameAltRef:
+        {
+            return kAltRefFrame;
+        }
+    default:
+        {
+            return kDeltaFrame;
+        }
+    }
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/encoded_frame.h b/trunk/src/modules/video_coding/main/source/encoded_frame.h
new file mode 100644
index 0000000..6289e9e
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/encoded_frame.h
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+#define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+
+#include "common_types.h"
+#include "common_video/interface/video_image.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc
+{
+
+class VCMEncodedFrame : protected EncodedImage
+{
+public:
+    VCMEncodedFrame();
+    VCMEncodedFrame(const webrtc::EncodedImage& rhs);
+    VCMEncodedFrame(const VCMEncodedFrame& rhs);
+
+    ~VCMEncodedFrame();
+    /**
+    *   Delete VideoFrame and resets members to zero
+    */
+    void Free();
+    /**
+    *   Set render time in milliseconds
+    */
+    void SetRenderTime(const WebRtc_Word64 renderTimeMs) {_renderTimeMs = renderTimeMs;}
+
+    /**
+    *   Set the encoded frame size
+    */
+    void SetEncodedSize(WebRtc_UWord32 width, WebRtc_UWord32 height)
+                       { _encodedWidth  = width; _encodedHeight = height; }
+    /**
+    *   Get the encoded image
+    */
+    const webrtc::EncodedImage& EncodedImage() const
+                       { return static_cast<const webrtc::EncodedImage&>(*this); }
+    /**
+    *   Get pointer to frame buffer
+    */
+    const WebRtc_UWord8* Buffer() const {return _buffer;}
+    /**
+    *   Get frame length
+    */
+    WebRtc_UWord32 Length() const {return _length;}
+    /**
+    *   Get frame timestamp (90kHz)
+    */
+    WebRtc_UWord32 TimeStamp() const {return _timeStamp;}
+    /**
+    *   Get render time in milliseconds
+    */
+    WebRtc_Word64 RenderTimeMs() const {return _renderTimeMs;}
+    /**
+    *   Get frame type
+    */
+    webrtc::FrameType FrameType() const {return ConvertFrameType(_frameType);}
+    /**
+    *   True if this frame is complete, false otherwise
+    */
+    bool Complete() const { return _completeFrame; }
+    /**
+    *   True if there's a frame missing before this frame
+    */
+    bool MissingFrame() const { return _missingFrame; }
+    /**
+    *   Payload type of the encoded payload
+    */
+    WebRtc_UWord8 PayloadType() const { return _payloadType; }
+    /**
+    *   Get codec specific info.
+    *   The returned pointer is only valid as long as the VCMEncodedFrame
+    *   is valid. Also, VCMEncodedFrame owns the pointer and will delete
+    *   the object.
+    */
+    const CodecSpecificInfo* CodecSpecific() const {return &_codecSpecificInfo;}
+
+    const RTPFragmentationHeader* FragmentationHeader() const;
+
+    WebRtc_Word32 Store(VCMFrameStorageCallback& storeCallback) const;
+
+    static webrtc::FrameType ConvertFrameType(VideoFrameType frameType);
+    static VideoFrameType ConvertFrameType(webrtc::FrameType frameType);
+
+protected:
+    /**
+    * Verifies that current allocated buffer size is larger than or equal to the input size.
+    * If the current buffer size is smaller, a new allocation is made and the old buffer data
+    * is copied to the new buffer.
+    * Buffer size is updated to minimumSize.
+    */
+    WebRtc_Word32 VerifyAndAllocate(const WebRtc_UWord32 minimumSize);
+
+    void Reset();
+
+    void CopyCodecSpecific(const RTPVideoHeader* header);
+
+    WebRtc_Word64                 _renderTimeMs;
+    WebRtc_UWord8                 _payloadType;
+    bool                          _missingFrame;
+    CodecSpecificInfo             _codecSpecificInfo;
+    webrtc::VideoCodecType        _codec;
+    RTPFragmentationHeader        _fragmentation;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
diff --git a/trunk/src/modules/video_coding/main/source/er_tables_xor.h b/trunk/src/modules/video_coding/main/source/er_tables_xor.h
new file mode 100644
index 0000000..99163ba
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/er_tables_xor.h
@@ -0,0 +1,38742 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
+
+// This is a private header for media_opt_util.cc.
+// It should not be included by other files.
+
+namespace webrtc {
+
+// Table for average FEC recovery from packet loss, for XOR code.
+// From RPL model of random loss.
+// Input is the received packet loss (up to 50%), and FEC code parameters
+// (up to 24x24):
+// i.e., kAvgFECRecoveryXOR[k] where k = code_i*129 + loss_j;
+// code_i=1x1,2x1,2x2,..24x24, loss_j = 0,1,..128.
+
+// Maximum number of source packets in off-line model
+static const int kMaxNumPackets = 24;
+// Max value of loss rates in off-line model
+static const int kPacketLossMax = 129;
+
+// Table size for model is: kPacketLossMax * numberOfFecCodes = 38700
+// numberOfFecCodes is determined as:
+// {(1,1), (2,1), (2,2),...(n,1),..(n,n-1), (n,n)} = n*(n+1)/2
+// for n = kMaxNumPackets.
+static const int kSizeAvgFECRecoveryXOR = 38700;
+static const unsigned char kAvgFECRecoveryXOR[kSizeAvgFECRecoveryXOR] = {
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+61,
+61,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+46,
+46,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+62,
+62,
+61,
+60,
+59,
+59,
+58,
+57,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+58,
+57,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+71,
+72,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+68,
+68,
+67,
+66,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+57,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+60,
+59,
+58,
+57,
+57,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+50,
+49,
+49,
+48,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+70,
+71,
+71,
+71,
+72,
+72,
+72,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+72,
+72,
+72,
+71,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+68,
+68,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+62,
+62,
+61,
+60,
+59,
+58,
+58,
+57,
+56,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+56,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+50,
+49,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+66,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+59,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+63,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+68,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+60,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+59,
+60,
+60,
+61,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+57,
+56,
+54,
+53,
+52,
+51,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+47,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+45,
+44,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+71,
+71,
+70,
+69,
+69,
+68,
+67,
+66,
+66,
+65,
+64,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+54,
+53,
+52,
+51,
+50,
+49,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+27,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+0,
+0,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+0,
+0,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+57,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+0,
+1,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+0,
+1,
+2,
+3,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+33,
+34,
+35,
+36,
+37,
+38,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+40,
+41,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+45,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+30,
+32,
+33,
+35,
+36,
+38,
+39,
+40,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+15,
+14,
+13,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+15,
+17,
+18,
+19,
+21,
+22,
+24,
+25,
+27,
+28,
+30,
+31,
+33,
+35,
+36,
+38,
+39,
+41,
+42,
+43,
+45,
+46,
+47,
+48,
+49,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+28,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+44,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+23,
+24,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+23,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+18,
+19,
+20,
+22,
+23,
+25,
+26,
+28,
+29,
+30,
+32,
+33,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+52,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+24,
+25,
+26,
+28,
+29,
+31,
+32,
+34,
+35,
+36,
+38,
+39,
+40,
+41,
+42,
+44,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+18,
+19,
+21,
+22,
+23,
+25,
+26,
+28,
+29,
+31,
+32,
+34,
+35,
+37,
+38,
+39,
+40,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+26,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+31,
+32,
+34,
+35,
+37,
+39,
+40,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+38,
+37,
+36,
+35,
+34,
+32,
+31,
+30,
+29,
+28,
+27,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+23,
+25,
+26,
+28,
+30,
+31,
+33,
+35,
+36,
+38,
+39,
+41,
+42,
+44,
+45,
+46,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+43,
+42,
+41,
+40,
+39,
+38,
+36,
+35,
+34,
+33,
+32,
+31,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+19,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+30,
+32,
+34,
+35,
+37,
+38,
+40,
+41,
+43,
+44,
+46,
+47,
+48,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+44,
+43,
+42,
+41,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+29,
+28,
+27,
+26,
+25,
+24,
+22,
+21,
+20,
+19,
+18,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+19,
+20,
+22,
+23,
+24,
+25,
+26,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+23,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+19,
+20,
+21,
+22,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+6,
+7,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+22,
+24,
+25,
+26,
+28,
+29,
+30,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+15,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+25,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+22,
+23,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
diff --git a/trunk/src/modules/video_coding/main/source/event.h b/trunk/src/modules/video_coding/main/source/event.h
new file mode 100644
index 0000000..39fd494
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/event.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
+#define WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
+
+#include "event_wrapper.h"
+
+namespace webrtc
+{
+
+//#define EVENT_DEBUG
+
+class VCMEvent : public EventWrapper
+{
+public:
+    VCMEvent() : _event(*EventWrapper::Create()) {};
+
+    virtual ~VCMEvent() { delete &_event; };
+
+    /**
+    *   Release waiting threads
+    */
+    bool Set() { return _event.Set(); };
+
+    bool Reset() { return _event.Reset(); };
+
+    /**
+    *   Wait for this event
+    */
+    EventTypeWrapper Wait(unsigned long maxTime)
+    {
+#ifdef EVENT_DEBUG
+        return kEventTimeout;
+#else
+        return _event.Wait(maxTime);
+#endif
+    };
+
+    /**
+    *   Start a timer
+    */
+    bool StartTimer(bool periodic, unsigned long time)
+                   { return _event.StartTimer(periodic, time); };
+    /**
+    *   Stop the timer
+    */
+    bool StopTimer() { return _event.StopTimer(); };
+
+private:
+    EventWrapper&      _event;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
diff --git a/trunk/src/modules/video_coding/main/source/exp_filter.cc b/trunk/src/modules/video_coding/main/source/exp_filter.cc
new file mode 100644
index 0000000..1d6f9a7
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/exp_filter.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "exp_filter.h"
+
+#include <math.h>
+
+namespace webrtc {
+
+void
+VCMExpFilter::Reset(float alpha)
+{
+    _alpha = alpha;
+    _filtered = -1.0;
+}
+
+float
+VCMExpFilter::Apply(float exp, float sample)
+{
+    if (_filtered == -1.0)
+    {
+        // Initialize filtered bit rates
+        _filtered = sample;
+    }
+    else if (exp == 1.0)
+    {
+        _filtered = _alpha * _filtered + (1 - _alpha) * sample;
+    }
+    else
+    {
+        float alpha = pow(_alpha, exp);
+        _filtered = alpha * _filtered + (1 - alpha) * sample;
+    }
+    if (_max != -1 && _filtered > _max)
+    {
+        _filtered = _max;
+    }
+    return _filtered;
+}
+
+void
+VCMExpFilter::UpdateBase(float alpha)
+{
+    _alpha = alpha;
+}
+
+float
+VCMExpFilter::Value() const
+{
+    return _filtered;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/exp_filter.h b/trunk/src/modules/video_coding/main/source/exp_filter.h
new file mode 100644
index 0000000..46d206a
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/exp_filter.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
+
+namespace webrtc
+{
+
+/**********************/
+/* ExpFilter class    */
+/**********************/
+
+class VCMExpFilter
+{
+public:
+    VCMExpFilter(float alpha, float max = -1.0) : _alpha(alpha), _filtered(-1.0), _max(max) {}
+
+    // Resets the filter to its initial state, and resets alpha to the given value
+    //
+    // Input:
+    //          - alpha     : the new value of the filter factor base.
+    void Reset(float alpha);
+
+    // Applies the filter with the given exponent on the provided sample
+    //
+    // Input:
+    //          - exp       : Exponent T in y(k) = alpha^T * y(k-1) + (1 - alpha^T) * x(k)
+    //          - sample    : x(k) in the above filter equation
+    float Apply(float exp, float sample);
+
+    // Return current filtered value: y(k)
+    //
+    // Return value         : The current filter output
+    float Value() const;
+
+    // Change the filter factor base
+    //
+    // Input:
+    //          - alpha     : The new filter factor base.
+    void UpdateBase(float alpha);
+
+private:
+    float          _alpha;     // Filter factor base
+    float          _filtered;  // Current filter output
+    const float    _max;
+}; // end of ExpFilter class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
diff --git a/trunk/src/modules/video_coding/main/source/fec_tables_xor.h b/trunk/src/modules/video_coding/main/source/fec_tables_xor.h
new file mode 100644
index 0000000..27db9a4
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/fec_tables_xor.h
@@ -0,0 +1,6481 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
+
+// This is a private header for media_opt_util.cc.
+// It should not be included by other files.
+
+namespace webrtc {
+
+// Table for Protection factor (code rate) of delta frames, for the XOR FEC.
+// Input is the packet loss and an effective rate (bits/frame).
+// Output is array kCodeRateXORTable[k], where k = rate_i*129 + loss_j;
+// loss_j = 0,1,..128, and rate_i varies over some range.
+static const int kSizeCodeRateXORTable = 6450;
+static const unsigned char kCodeRateXORTable[kSizeCodeRateXORTable] = {
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+6,
+6,
+6,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+44,
+44,
+44,
+44,
+44,
+44,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+5,
+5,
+5,
+5,
+5,
+5,
+19,
+19,
+19,
+36,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+55,
+55,
+55,
+55,
+55,
+55,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+75,
+75,
+80,
+80,
+80,
+80,
+80,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+4,
+16,
+16,
+16,
+16,
+16,
+16,
+30,
+35,
+35,
+47,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+63,
+63,
+63,
+63,
+63,
+63,
+77,
+77,
+77,
+77,
+77,
+77,
+77,
+82,
+82,
+82,
+82,
+94,
+94,
+94,
+94,
+94,
+105,
+105,
+105,
+105,
+110,
+110,
+110,
+110,
+110,
+110,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+4,
+14,
+27,
+27,
+27,
+27,
+27,
+31,
+41,
+52,
+52,
+56,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+79,
+79,
+79,
+79,
+83,
+83,
+83,
+94,
+94,
+94,
+94,
+106,
+106,
+106,
+106,
+106,
+115,
+115,
+115,
+115,
+125,
+125,
+125,
+125,
+125,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+3,
+3,
+3,
+17,
+28,
+38,
+38,
+38,
+38,
+38,
+47,
+51,
+63,
+63,
+63,
+72,
+72,
+72,
+72,
+72,
+72,
+72,
+76,
+76,
+76,
+76,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+84,
+84,
+84,
+84,
+93,
+93,
+93,
+105,
+105,
+105,
+105,
+114,
+114,
+114,
+114,
+114,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+12,
+12,
+12,
+35,
+43,
+47,
+47,
+47,
+47,
+47,
+58,
+58,
+66,
+66,
+66,
+70,
+70,
+70,
+70,
+70,
+73,
+73,
+82,
+82,
+82,
+86,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+105,
+105,
+105,
+114,
+114,
+114,
+114,
+117,
+117,
+117,
+117,
+117,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+24,
+24,
+24,
+49,
+53,
+53,
+53,
+53,
+53,
+53,
+61,
+61,
+64,
+64,
+64,
+64,
+70,
+70,
+70,
+70,
+78,
+78,
+88,
+88,
+88,
+96,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+112,
+112,
+112,
+120,
+120,
+120,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+5,
+36,
+36,
+36,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+58,
+58,
+58,
+58,
+58,
+64,
+78,
+78,
+78,
+78,
+87,
+87,
+94,
+94,
+94,
+103,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+18,
+43,
+43,
+43,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+58,
+58,
+58,
+58,
+71,
+87,
+87,
+87,
+87,
+94,
+94,
+97,
+97,
+97,
+109,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+125,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+31,
+46,
+46,
+46,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+66,
+66,
+66,
+66,
+80,
+93,
+93,
+93,
+93,
+95,
+95,
+95,
+95,
+100,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+4,
+40,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+49,
+49,
+49,
+74,
+74,
+74,
+74,
+86,
+90,
+90,
+90,
+90,
+95,
+95,
+95,
+95,
+106,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+14,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+46,
+56,
+56,
+56,
+80,
+80,
+80,
+80,
+84,
+84,
+84,
+84,
+88,
+99,
+99,
+99,
+99,
+111,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+26,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+54,
+66,
+66,
+66,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+84,
+94,
+106,
+106,
+106,
+106,
+116,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+3,
+34,
+38,
+38,
+38,
+38,
+38,
+42,
+42,
+42,
+63,
+72,
+72,
+76,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+89,
+101,
+114,
+114,
+114,
+114,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+12,
+36,
+36,
+36,
+36,
+36,
+36,
+49,
+49,
+49,
+69,
+73,
+76,
+86,
+86,
+86,
+86,
+86,
+86,
+86,
+86,
+97,
+109,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+22,
+34,
+34,
+34,
+34,
+38,
+38,
+57,
+57,
+57,
+69,
+73,
+82,
+92,
+92,
+92,
+92,
+92,
+92,
+96,
+96,
+104,
+117,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+29,
+33,
+33,
+33,
+33,
+44,
+44,
+62,
+62,
+62,
+69,
+77,
+87,
+95,
+95,
+95,
+95,
+95,
+95,
+107,
+107,
+110,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+31,
+31,
+31,
+31,
+31,
+51,
+51,
+62,
+65,
+65,
+73,
+83,
+91,
+94,
+94,
+94,
+94,
+97,
+97,
+114,
+114,
+114,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+29,
+29,
+29,
+29,
+29,
+56,
+56,
+59,
+70,
+70,
+79,
+86,
+89,
+89,
+89,
+89,
+89,
+100,
+100,
+116,
+116,
+116,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+28,
+28,
+28,
+28,
+28,
+57,
+57,
+57,
+76,
+76,
+83,
+86,
+86,
+86,
+86,
+86,
+89,
+104,
+104,
+114,
+114,
+114,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+27,
+27,
+27,
+27,
+30,
+55,
+55,
+55,
+80,
+80,
+83,
+86,
+86,
+86,
+86,
+86,
+93,
+108,
+108,
+111,
+111,
+111,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+26,
+26,
+26,
+26,
+36,
+53,
+53,
+53,
+80,
+80,
+80,
+90,
+90,
+90,
+90,
+90,
+98,
+107,
+107,
+107,
+107,
+107,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+26,
+26,
+26,
+28,
+42,
+52,
+54,
+54,
+78,
+78,
+78,
+95,
+95,
+95,
+97,
+97,
+104,
+106,
+106,
+106,
+106,
+106,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+24,
+24,
+24,
+33,
+47,
+49,
+58,
+58,
+74,
+74,
+74,
+97,
+97,
+97,
+106,
+106,
+108,
+108,
+108,
+108,
+108,
+108,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+24,
+24,
+24,
+39,
+48,
+50,
+63,
+63,
+72,
+74,
+74,
+96,
+96,
+96,
+109,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+119,
+119,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+23,
+23,
+23,
+43,
+46,
+54,
+66,
+66,
+69,
+77,
+77,
+92,
+92,
+92,
+105,
+113,
+113,
+113,
+113,
+113,
+113,
+113,
+115,
+117,
+123,
+123,
+123,
+123,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+22,
+22,
+22,
+44,
+44,
+59,
+67,
+67,
+67,
+81,
+81,
+89,
+89,
+89,
+97,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+119,
+126,
+126,
+126,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+21,
+21,
+24,
+43,
+45,
+63,
+65,
+65,
+67,
+85,
+85,
+87,
+87,
+87,
+91,
+109,
+109,
+109,
+111,
+111,
+111,
+111,
+111,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+21,
+21,
+28,
+42,
+50,
+63,
+63,
+66,
+71,
+85,
+85,
+85,
+85,
+87,
+92,
+106,
+106,
+108,
+114,
+114,
+114,
+114,
+114,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+20,
+20,
+34,
+41,
+54,
+62,
+62,
+69,
+75,
+82,
+82,
+82,
+82,
+92,
+98,
+105,
+105,
+110,
+117,
+117,
+117,
+117,
+117,
+124,
+124,
+126,
+126,
+126,
+126,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+20,
+20,
+38,
+40,
+58,
+60,
+60,
+73,
+78,
+80,
+80,
+80,
+80,
+100,
+105,
+107,
+107,
+113,
+118,
+118,
+118,
+118,
+118,
+120,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+19,
+21,
+38,
+40,
+58,
+58,
+60,
+75,
+77,
+77,
+77,
+81,
+81,
+107,
+109,
+109,
+109,
+114,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+25,
+37,
+44,
+56,
+56,
+63,
+75,
+75,
+75,
+75,
+88,
+88,
+111,
+111,
+111,
+111,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+114,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+30,
+36,
+48,
+55,
+55,
+67,
+73,
+73,
+73,
+73,
+97,
+97,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+34,
+36,
+52,
+55,
+55,
+70,
+72,
+73,
+73,
+73,
+102,
+104,
+108,
+108,
+108,
+108,
+109,
+109,
+109,
+109,
+109,
+109,
+109,
+119,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+17,
+35,
+35,
+52,
+59,
+59,
+70,
+70,
+76,
+76,
+76,
+99,
+105,
+105,
+105,
+105,
+105,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+121,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+17,
+34,
+36,
+51,
+61,
+62,
+70,
+70,
+80,
+80,
+80,
+93,
+103,
+103,
+103,
+103,
+103,
+112,
+112,
+112,
+112,
+112,
+116,
+118,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+16,
+33,
+39,
+50,
+59,
+65,
+72,
+72,
+82,
+82,
+82,
+91,
+100,
+100,
+100,
+100,
+100,
+109,
+109,
+109,
+109,
+109,
+121,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+16,
+32,
+43,
+48,
+54,
+66,
+75,
+75,
+81,
+83,
+83,
+92,
+97,
+97,
+97,
+99,
+99,
+105,
+105,
+105,
+105,
+105,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+31,
+46,
+47,
+49,
+69,
+77,
+77,
+81,
+85,
+85,
+93,
+95,
+95,
+95,
+100,
+100,
+102,
+102,
+102,
+102,
+102,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+30,
+46,
+48,
+48,
+70,
+75,
+79,
+82,
+87,
+87,
+92,
+94,
+94,
+94,
+103,
+103,
+103,
+103,
+103,
+104,
+104,
+115,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+30,
+45,
+50,
+50,
+68,
+70,
+80,
+85,
+89,
+89,
+90,
+95,
+95,
+95,
+104,
+104,
+104,
+104,
+104,
+109,
+109,
+112,
+114,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+29,
+44,
+54,
+54,
+64,
+64,
+83,
+87,
+88,
+88,
+88,
+98,
+98,
+98,
+103,
+103,
+103,
+103,
+103,
+113,
+113,
+113,
+113,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+29,
+43,
+56,
+56,
+61,
+61,
+84,
+85,
+88,
+88,
+88,
+100,
+100,
+100,
+102,
+102,
+102,
+102,
+102,
+113,
+116,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+28,
+42,
+57,
+57,
+62,
+62,
+80,
+80,
+91,
+91,
+91,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+109,
+119,
+119,
+119,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+28,
+42,
+56,
+56,
+65,
+66,
+76,
+76,
+92,
+92,
+92,
+97,
+97,
+97,
+101,
+101,
+101,
+101,
+101,
+106,
+121,
+121,
+121,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+13,
+27,
+41,
+55,
+55,
+67,
+72,
+74,
+74,
+90,
+90,
+90,
+91,
+91,
+91,
+105,
+105,
+105,
+105,
+105,
+107,
+122,
+122,
+122,
+123,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+13,
+27,
+40,
+54,
+54,
+67,
+76,
+76,
+76,
+85,
+85,
+85,
+85,
+85,
+85,
+112,
+112,
+112,
+112,
+112,
+112,
+121,
+121,
+121,
+121,
+121,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
diff --git a/trunk/src/modules/video_coding/main/source/frame_buffer.cc b/trunk/src/modules/video_coding/main/source/frame_buffer.cc
new file mode 100644
index 0000000..6339b05
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/frame_buffer.cc
@@ -0,0 +1,415 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_buffer.h"
+#include "packet.h"
+
+#include <cassert>
+#include <string.h>
+
+#if defined(_WIN32)
+    // VS 2005: Don't warn for default initialized arrays. See help for more info.
+    #pragma warning(disable:4351)
+#endif
+
+namespace webrtc {
+
+VCMFrameBuffer::VCMFrameBuffer()
+  :
+    _state(kStateFree),
+    _frameCounted(false),
+    _nackCount(0),
+    _latestPacketTimeMs(-1) {
+}
+
+VCMFrameBuffer::~VCMFrameBuffer() {
+}
+
+VCMFrameBuffer::VCMFrameBuffer(VCMFrameBuffer& rhs)
+:
+VCMEncodedFrame(rhs),
+_state(rhs._state),
+_frameCounted(rhs._frameCounted),
+_sessionInfo(),
+_nackCount(rhs._nackCount),
+_latestPacketTimeMs(rhs._latestPacketTimeMs)
+{
+    _sessionInfo = rhs._sessionInfo;
+    _sessionInfo.UpdateDataPointers(rhs._buffer, _buffer);
+}
+
+webrtc::FrameType
+VCMFrameBuffer::FrameType() const
+{
+    return _sessionInfo.FrameType();
+}
+
+void
+VCMFrameBuffer::SetPreviousFrameLoss()
+{
+    _sessionInfo.SetPreviousFrameLoss();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::GetLowSeqNum() const
+{
+    return _sessionInfo.LowSequenceNumber();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::GetHighSeqNum() const
+{
+    return _sessionInfo.HighSequenceNumber();
+}
+
+int VCMFrameBuffer::PictureId() const {
+  return _sessionInfo.PictureId();
+}
+
+int VCMFrameBuffer::TemporalId() const {
+  return _sessionInfo.TemporalId();
+}
+
+bool VCMFrameBuffer::LayerSync() const {
+  return _sessionInfo.LayerSync();
+}
+
+int VCMFrameBuffer::Tl0PicId() const {
+  return _sessionInfo.Tl0PicId();
+}
+
+bool VCMFrameBuffer::NonReference() const {
+  return _sessionInfo.NonReference();
+}
+
+bool
+VCMFrameBuffer::IsSessionComplete() const
+{
+    return _sessionInfo.complete();
+}
+
+// Insert packet
+VCMFrameBufferEnum
+VCMFrameBuffer::InsertPacket(const VCMPacket& packet, WebRtc_Word64 timeInMs,
+                             bool enableDecodableState, WebRtc_UWord32 rttMS)
+{
+    if (_state == kStateDecoding)
+    {
+        // Do not insert packet
+        return kNoError;
+    }
+
+    // Sanity to check if the frame has been freed. (Too old for example)
+    if (_state == kStateFree)
+    {
+        return kStateError;
+    }
+
+    // is this packet part of this frame
+    if (TimeStamp() && (TimeStamp() != packet.timestamp))
+    {
+        return kTimeStampError;
+    }
+
+    // sanity checks
+    if (_size + packet.sizeBytes +
+        (packet.insertStartCode ?  kH264StartCodeLengthBytes : 0 )
+        > kMaxJBFrameSizeBytes)
+    {
+        return kSizeError;
+    }
+    if (NULL == packet.dataPtr && packet.sizeBytes > 0)
+    {
+        return kSizeError;
+    }
+    if (packet.dataPtr != NULL)
+    {
+        _payloadType = packet.payloadType;
+    }
+
+    if (kStateEmpty == _state)
+    {
+        // First packet (empty and/or media) inserted into this frame.
+        // store some info and set some initial values.
+        _timeStamp = packet.timestamp;
+        _codec = packet.codec;
+        if (packet.frameType != kFrameEmpty)
+        {
+            // first media packet
+            SetState(kStateIncomplete);
+        }
+    }
+
+    WebRtc_UWord32 requiredSizeBytes = Length() + packet.sizeBytes +
+                   (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+    if (requiredSizeBytes >= _size)
+    {
+        const WebRtc_UWord8* prevBuffer = _buffer;
+        const WebRtc_UWord32 increments = requiredSizeBytes /
+                                          kBufferIncStepSizeBytes +
+                                        (requiredSizeBytes %
+                                         kBufferIncStepSizeBytes > 0);
+        const WebRtc_UWord32 newSize = _size +
+                                       increments * kBufferIncStepSizeBytes;
+        if (newSize > kMaxJBFrameSizeBytes)
+        {
+            return kSizeError;
+        }
+        if (VerifyAndAllocate(newSize) == -1)
+        {
+            return kSizeError;
+        }
+        _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
+    }
+
+    CopyCodecSpecific(&packet.codecSpecificHeader);
+
+    int retVal = _sessionInfo.InsertPacket(packet, _buffer,
+                                           enableDecodableState,
+                                           rttMS);
+    if (retVal == -1)
+    {
+        return kSizeError;
+    }
+    else if (retVal == -2)
+    {
+        return kDuplicatePacket;
+    }
+    // update length
+    _length = Length() + static_cast<WebRtc_UWord32>(retVal);
+
+    _latestPacketTimeMs = timeInMs;
+
+    if (_sessionInfo.complete()) {
+      return kCompleteSession;
+    } else if (_sessionInfo.decodable()) {
+      SetState(kStateDecodable);
+      return kDecodableSession;
+    } else {
+      // this layer is not complete
+      if (_state == kStateComplete) {
+        // we already have a complete layer
+        // wait for all independent layers belonging to the same frame
+        _state = kStateIncomplete;
+      }
+    }
+    return kIncomplete;
+}
+
+WebRtc_Word64
+VCMFrameBuffer::LatestPacketTimeMs()
+{
+    return _latestPacketTimeMs;
+}
+
+// Build hard NACK list:Zero out all entries in list up to and including the
+// (first) entry equal to _lowSeqNum.
+int VCMFrameBuffer::BuildHardNackList(int* list, int num) {
+  if (_sessionInfo.BuildHardNackList(list, num) != 0) {
+   return -1;
+  }
+  return 0;
+}
+
+// Build selective NACK list: Create a soft (selective) list of entries to zero
+// out up to and including the (first) entry equal to _lowSeqNum.
+int VCMFrameBuffer::BuildSoftNackList(int* list, int num, int rttMs) {
+  return _sessionInfo.BuildSoftNackList(list, num, rttMs);
+}
+
+void
+VCMFrameBuffer::IncrementNackCount()
+{
+    _nackCount++;
+}
+
+WebRtc_Word16
+VCMFrameBuffer::GetNackCount() const
+{
+    return _nackCount;
+}
+
+bool
+VCMFrameBuffer::HaveLastPacket() const
+{
+    return _sessionInfo.HaveLastPacket();
+}
+
+void
+VCMFrameBuffer::Reset()
+{
+    _length = 0;
+    _timeStamp = 0;
+    _sessionInfo.Reset();
+    _frameCounted = false;
+    _payloadType = 0;
+    _nackCount = 0;
+    _latestPacketTimeMs = -1;
+    _state = kStateFree;
+    VCMEncodedFrame::Reset();
+}
+
+// Makes sure the session contains a decodable stream.
+void
+VCMFrameBuffer::MakeSessionDecodable()
+{
+    WebRtc_UWord32 retVal;
+#ifdef INDEPENDENT_PARTITIONS
+    if (_codec != kVideoCodecVP8) {
+        retVal = _sessionInfo.MakeDecodable();
+        _length -= retVal;
+    }
+#else
+    retVal = _sessionInfo.MakeDecodable();
+    _length -= retVal;
+#endif
+}
+
+// Set state of frame
+void
+VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state)
+{
+    if (_state == state)
+    {
+        return;
+    }
+    switch (state)
+    {
+    case kStateFree:
+        // Reset everything
+        // We can go to this state from all other states.
+        // The one setting the state to free must ensure
+        // that the frame is removed from the timestamp
+        // ordered frame list in the jb.
+        Reset();
+        break;
+
+    case kStateIncomplete:
+        // we can go to this state from state kStateEmpty
+        assert(_state == kStateEmpty ||
+            _state == kStateDecoding);
+
+        // Do nothing, we received a packet
+        break;
+
+    case kStateComplete:
+        assert(_state == kStateEmpty ||
+               _state == kStateIncomplete ||
+               _state == kStateDecodable);
+
+        break;
+
+    case kStateEmpty:
+        assert(_state == kStateFree);
+        // Do nothing
+        break;
+
+    case kStateDecoding:
+        // A frame might have received empty packets, or media packets might
+        // have been removed when making the frame decodable. The frame can
+        // still be set to decodable since it can be used to inform the
+        // decoder of a frame loss.
+        assert(_state == kStateComplete || _state == kStateIncomplete ||
+               _state == kStateDecodable || _state == kStateEmpty);
+        // Transfer frame information to EncodedFrame and create any codec
+        // specific information
+        RestructureFrameInformation();
+        break;
+
+    case kStateDecodable:
+        assert(_state == kStateEmpty ||
+               _state == kStateIncomplete);
+        break;
+    }
+    _state = state;
+}
+
+void
+VCMFrameBuffer::RestructureFrameInformation()
+{
+    PrepareForDecode();
+    _frameType = ConvertFrameType(_sessionInfo.FrameType());
+    _completeFrame = _sessionInfo.complete();
+    _missingFrame = _sessionInfo.PreviousFrameLoss();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::ExtractFromStorage(const EncodedVideoData& frameFromStorage)
+{
+    _frameType = ConvertFrameType(frameFromStorage.frameType);
+    _timeStamp = frameFromStorage.timeStamp;
+    _payloadType = frameFromStorage.payloadType;
+    _encodedWidth = frameFromStorage.encodedWidth;
+    _encodedHeight = frameFromStorage.encodedHeight;
+    _missingFrame = frameFromStorage.missingFrame;
+    _completeFrame = frameFromStorage.completeFrame;
+    _renderTimeMs = frameFromStorage.renderTimeMs;
+    _codec = frameFromStorage.codec;
+    const WebRtc_UWord8 *prevBuffer = _buffer;
+    if (VerifyAndAllocate(frameFromStorage.payloadSize) < 0)
+    {
+        return VCM_MEMORY;
+    }
+    _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
+    memcpy(_buffer, frameFromStorage.payloadData, frameFromStorage.payloadSize);
+    _length = frameFromStorage.payloadSize;
+    return VCM_OK;
+}
+
+int VCMFrameBuffer::NotDecodablePackets() const {
+  return _sessionInfo.packets_not_decodable();
+}
+
+// Set counted status (as counted by JB or not)
+void VCMFrameBuffer::SetCountedFrame(bool frameCounted)
+{
+    _frameCounted = frameCounted;
+}
+
+bool VCMFrameBuffer::GetCountedFrame() const
+{
+    return _frameCounted;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum
+VCMFrameBuffer::GetState() const
+{
+    return _state;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum
+VCMFrameBuffer::GetState(WebRtc_UWord32& timeStamp) const
+{
+    timeStamp = TimeStamp();
+    return GetState();
+}
+
+bool
+VCMFrameBuffer::IsRetransmitted() const
+{
+    return _sessionInfo.session_nack();
+}
+
+void
+VCMFrameBuffer::PrepareForDecode()
+{
+#ifdef INDEPENDENT_PARTITIONS
+    if (_codec == kVideoCodecVP8)
+    {
+        _length =
+            _sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
+                                                     &_fragmentation);
+    }
+#endif
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/frame_buffer.h b/trunk/src/modules/video_coding/main/source/frame_buffer.h
new file mode 100644
index 0000000..ea05754
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/frame_buffer.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/encoded_frame.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/session_info.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMFrameBuffer : public VCMEncodedFrame
+{
+public:
+    VCMFrameBuffer();
+    virtual ~VCMFrameBuffer();
+
+    VCMFrameBuffer(VCMFrameBuffer& rhs);
+
+    virtual void Reset();
+
+    VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
+                                    WebRtc_Word64 timeInMs,
+                                    bool enableDecodableState,
+                                    WebRtc_UWord32 rttMs);
+
+    // State
+    // Get current state of frame
+    VCMFrameBufferStateEnum GetState() const;
+    // Get current state and timestamp of frame
+    VCMFrameBufferStateEnum GetState(WebRtc_UWord32& timeStamp) const;
+    void SetState(VCMFrameBufferStateEnum state); // Set state of frame
+
+    bool IsRetransmitted() const;
+    bool IsSessionComplete() const;
+    bool HaveLastPacket() const;
+    // Makes sure the session contain a decodable stream.
+    void MakeSessionDecodable();
+
+    // Sequence numbers
+    // Get lowest packet sequence number in frame
+    WebRtc_Word32 GetLowSeqNum() const;
+    // Get highest packet sequence number in frame
+    WebRtc_Word32 GetHighSeqNum() const;
+
+    int PictureId() const;
+    int TemporalId() const;
+    bool LayerSync() const;
+    int Tl0PicId() const;
+    bool NonReference() const;
+
+    // Set counted status (as counted by JB or not)
+    void SetCountedFrame(bool frameCounted);
+    bool GetCountedFrame() const;
+
+    // NACK - Building the NACK lists.
+    // Build hard NACK list: Zero out all entries in list up to and including
+    // _lowSeqNum.
+    int BuildHardNackList(int* list, int num);
+    // Build soft NACK list: Zero out only a subset of the packets, discard
+    // empty packets.
+    int BuildSoftNackList(int* list, int num, int rttMs);
+    void IncrementNackCount();
+    WebRtc_Word16 GetNackCount() const;
+
+    WebRtc_Word64 LatestPacketTimeMs();
+
+    webrtc::FrameType FrameType() const;
+    void SetPreviousFrameLoss();
+
+    WebRtc_Word32 ExtractFromStorage(const EncodedVideoData& frameFromStorage);
+
+    // The number of packets discarded because the decoder can't make use of
+    // them.
+    int NotDecodablePackets() const;
+
+protected:
+    void RestructureFrameInformation();
+    void PrepareForDecode();
+
+private:
+    VCMFrameBufferStateEnum    _state;         // Current state of the frame
+    bool                       _frameCounted;  // Was this frame counted by JB?
+    VCMSessionInfo             _sessionInfo;
+    WebRtc_UWord16             _nackCount;
+    WebRtc_Word64              _latestPacketTimeMs;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
diff --git a/trunk/src/modules/video_coding/main/source/frame_dropper.cc b/trunk/src/modules/video_coding/main/source/frame_dropper.cc
new file mode 100644
index 0000000..065e452
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/frame_dropper.cc
@@ -0,0 +1,331 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_dropper.h"
+#include "internal_defines.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+VCMFrameDropper::VCMFrameDropper(WebRtc_Word32 vcmId)
+:
+_vcmId(vcmId),
+_keyFrameSizeAvgKbits(0.9f),
+_keyFrameRatio(0.99f),
+_dropRatio(0.9f, 0.96f)
+{
+    Reset();
+}
+
+void
+VCMFrameDropper::Reset()
+{
+    _keyFrameRatio.Reset(0.99f);
+    _keyFrameRatio.Apply(1.0f, 1.0f/300.0f); // 1 key frame every 10th second in 30 fps
+    _keyFrameSizeAvgKbits.Reset(0.9f);
+    _keyFrameCount = 0;
+    _accumulator = 0.0f;
+    _accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
+    _targetBitRate = 300.0f;
+    _userFrameRate = 30;
+    _keyFrameSpreadFrames = 0.5f * _userFrameRate;
+    _dropNext = false;
+    _dropRatio.Reset(0.9f);
+    _dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
+    _dropCount = 0;
+    _windowSize = 0.5f;
+    _wasBelowMax = true;
+    _enabled = true;
+    _fastMode = false; // start with normal (non-aggressive) mode
+}
+
+void
+VCMFrameDropper::Enable(bool enable)
+{
+    _enabled = enable;
+}
+
+void
+VCMFrameDropper::Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
+    if (!deltaFrame && !_fastMode) // fast mode does not treat key-frames any different
+    {
+        _keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
+        _keyFrameRatio.Apply(1.0, 1.0);
+        if (frameSizeKbits > _keyFrameSizeAvgKbits.Value())
+        {
+            // Remove the average key frame size since we
+            // compensate for key frames when adding delta
+            // frames.
+            frameSizeKbits -= _keyFrameSizeAvgKbits.Value();
+        }
+        else
+        {
+            // Shouldn't be negative, so zero is the lower bound.
+            frameSizeKbits = 0;
+        }
+        if (_keyFrameRatio.Value() > 1e-5 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
+        {
+            // We are sending key frames more often than our upper bound for
+            // how much we allow the key frame compensation to be spread
+            // out in time. Therefor we must use the key frame ratio rather
+            // than keyFrameSpreadFrames.
+            _keyFrameCount = static_cast<WebRtc_Word32>(1 / _keyFrameRatio.Value() + 0.5);
+        }
+        else
+        {
+            // Compensate for the key frame the following frames
+            _keyFrameCount = static_cast<WebRtc_Word32>(_keyFrameSpreadFrames + 0.5);
+        }
+    }
+    else
+    {
+        // Decrease the keyFrameRatio
+        _keyFrameRatio.Apply(1.0, 0.0);
+    }
+    // Change the level of the accumulator (bucket)
+    _accumulator += frameSizeKbits;
+}
+
+void
+VCMFrameDropper::Leak(WebRtc_UWord32 inputFrameRate)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    if (inputFrameRate < 1)
+    {
+        return;
+    }
+    if (_targetBitRate < 0.0f)
+    {
+        return;
+    }
+    _keyFrameSpreadFrames = 0.5f * inputFrameRate;
+    // T is the expected bits per frame (target). If all frames were the same size,
+    // we would get T bits per frame. Notice that T is also weighted to be able to
+    // force a lower frame rate if wanted.
+    float T = _targetBitRate / inputFrameRate;
+    if (_keyFrameCount > 0)
+    {
+        // Perform the key frame compensation
+        if (_keyFrameRatio.Value() > 0 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
+        {
+            T -= _keyFrameSizeAvgKbits.Value() * _keyFrameRatio.Value();
+        }
+        else
+        {
+            T -= _keyFrameSizeAvgKbits.Value() / _keyFrameSpreadFrames;
+        }
+        _keyFrameCount--;
+    }
+    _accumulator -= T;
+    UpdateRatio();
+
+}
+
+void
+VCMFrameDropper::UpdateNack(WebRtc_UWord32 nackBytes)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    _accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
+}
+
+void
+VCMFrameDropper::FillBucket(float inKbits, float outKbits)
+{
+    _accumulator += (inKbits - outKbits);
+}
+
+void
+VCMFrameDropper::UpdateRatio()
+{
+    if (_accumulator > 1.3f * _accumulatorMax)
+    {
+        // Too far above accumulator max, react faster
+        _dropRatio.UpdateBase(0.8f);
+    }
+    else
+    {
+        // Go back to normal reaction
+        _dropRatio.UpdateBase(0.9f);
+    }
+    if (_accumulator > _accumulatorMax)
+    {
+        // We are above accumulator max, and should ideally
+        // drop a frame. Increase the dropRatio and drop
+        // the frame later.
+        if (_wasBelowMax)
+        {
+            _dropNext = true;
+        }
+        if (_fastMode)
+        {
+            // always drop in aggressive mode
+            _dropNext = true;
+        }
+
+        _dropRatio.Apply(1.0f, 1.0f);
+        _dropRatio.UpdateBase(0.9f);
+    }
+    else
+    {
+        _dropRatio.Apply(1.0f, 0.0f);
+    }
+    if (_accumulator < 0.0f)
+    {
+        _accumulator = 0.0f;
+    }
+    _wasBelowMax = _accumulator < _accumulatorMax;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId),  "FrameDropper: dropRatio = %f accumulator = %f, accumulatorMax = %f", _dropRatio.Value(), _accumulator, _accumulatorMax);
+}
+
+// This function signals when to drop frames to the caller. It makes use of the dropRatio
+// to smooth out the drops over time.
+bool
+VCMFrameDropper::DropFrame()
+{
+    if (!_enabled)
+    {
+        return false;
+    }
+    if (_dropNext)
+    {
+        _dropNext = false;
+        _dropCount = 0;
+    }
+
+    if (_dropRatio.Value() >= 0.5f) // Drops per keep
+    {
+        // limit is the number of frames we should drop between each kept frame
+        // to keep our drop ratio. limit is positive in this case.
+        float denom = 1.0f - _dropRatio.Value();
+        if (denom < 1e-5)
+        {
+            denom = (float)1e-5;
+        }
+        WebRtc_Word32 limit = static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
+        if (_dropCount < 0)
+        {
+            // Reset the _dropCount since it was negative and should be positive.
+            if (_dropRatio.Value() > 0.4f)
+            {
+                _dropCount = -_dropCount;
+            }
+            else
+            {
+                _dropCount = 0;
+            }
+        }
+        if (_dropCount < limit)
+        {
+            // As long we are below the limit we should drop frames.
+            _dropCount++;
+            return true;
+        }
+        else
+        {
+            // Only when we reset _dropCount a frame should be kept.
+            _dropCount = 0;
+            return false;
+        }
+    }
+    else if (_dropRatio.Value() > 0.0f && _dropRatio.Value() < 0.5f) // Keeps per drop
+    {
+        // limit is the number of frames we should keep between each drop
+        // in order to keep the drop ratio. limit is negative in this case,
+        // and the _dropCount is also negative.
+        float denom = _dropRatio.Value();
+        if (denom < 1e-5)
+        {
+            denom = (float)1e-5;
+        }
+        WebRtc_Word32 limit = -static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
+        if (_dropCount > 0)
+        {
+            // Reset the _dropCount since we have a positive
+            // _dropCount, and it should be negative.
+            if (_dropRatio.Value() < 0.6f)
+            {
+                _dropCount = -_dropCount;
+            }
+            else
+            {
+                _dropCount = 0;
+            }
+        }
+        if (_dropCount > limit)
+        {
+            if (_dropCount == 0)
+            {
+                // Drop frames when we reset _dropCount.
+                _dropCount--;
+                return true;
+            }
+            else
+            {
+                // Keep frames as long as we haven't reached limit.
+                _dropCount--;
+                return false;
+            }
+        }
+        else
+        {
+            _dropCount = 0;
+            return false;
+        }
+    }
+    _dropCount = 0;
+    return false;
+
+    // A simpler version, unfiltered and quicker
+    //bool dropNext = _dropNext;
+    //_dropNext = false;
+    //return dropNext;
+}
+
+void
+VCMFrameDropper::SetRates(float bitRate, float userFrameRate)
+{
+    // Bit rate of -1 means infinite bandwidth.
+    _accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
+    if (_targetBitRate > 0.0f && bitRate < _targetBitRate && _accumulator > _accumulatorMax)
+    {
+        // Rescale the accumulator level if the accumulator max decreases
+        _accumulator = bitRate / _targetBitRate * _accumulator;
+    }
+    _targetBitRate = bitRate;
+    if (userFrameRate > 0.0f)
+    {
+        _userFrameRate = userFrameRate;
+    }
+}
+
+float
+VCMFrameDropper::ActualFrameRate(WebRtc_UWord32 inputFrameRate) const
+{
+    if (!_enabled)
+    {
+        return static_cast<float>(inputFrameRate);
+    }
+    return inputFrameRate * (1.0f - _dropRatio.Value());
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/frame_dropper.h b/trunk/src/modules/video_coding/main/source/frame_dropper.h
new file mode 100644
index 0000000..5e7e8a1
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/frame_dropper.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
+
+#include "exp_filter.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+/******************************/
+/* VCMFrameDropper class     */
+/****************************/
+// The Frame Dropper implements a variant of the leaky bucket algorithm
+// for keeping track of when to drop frames to avoid bit rate
+// over use when the encoder can't keep its bit rate.
+class VCMFrameDropper
+{
+public:
+    VCMFrameDropper(WebRtc_Word32 vcmId = 0);
+    // Resets the FrameDropper to its initial state.
+    // This means that the frameRateWeight is set to its
+    // default value as well.
+    void Reset();
+
+    void Enable(bool enable);
+    // Answers the question if it's time to drop a frame
+    // if we want to reach a given frame rate. Must be
+    // called for every frame.
+    //
+    // Return value     : True if we should drop the current frame
+    bool DropFrame();
+    // Updates the FrameDropper with the size of the latest encoded
+    // frame. The FrameDropper calculates a new drop ratio (can be
+    // seen as the probability to drop a frame) and updates its
+    // internal statistics.
+    //
+    // Input:
+    //          - frameSizeBytes    : The size of the latest frame
+    //                                returned from the encoder.
+    //          - deltaFrame        : True if the encoder returned
+    //                                a key frame.
+    void Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame);
+
+    void Leak(WebRtc_UWord32 inputFrameRate);
+
+    void UpdateNack(WebRtc_UWord32 nackBytes);
+
+    // Sets the target bit rate and the frame rate produced by
+    // the camera.
+    //
+    // Input:
+    //          - bitRate       : The target bit rate
+    void SetRates(float bitRate, float userFrameRate);
+
+    // Return value     : The current average frame rate produced
+    //                    if the DropFrame() function is used as
+    //                    instruction of when to drop frames.
+    float ActualFrameRate(WebRtc_UWord32 inputFrameRate) const;
+
+private:
+    void FillBucket(float inKbits, float outKbits);
+    void UpdateRatio();
+
+    WebRtc_Word32     _vcmId;
+    VCMExpFilter       _keyFrameSizeAvgKbits;
+    VCMExpFilter       _keyFrameRatio;
+    float           _keyFrameSpreadFrames;
+    WebRtc_Word32     _keyFrameCount;
+    float           _accumulator;
+    float           _accumulatorMax;
+    float           _targetBitRate;
+    bool            _dropNext;
+    VCMExpFilter       _dropRatio;
+    WebRtc_Word32     _dropCount;
+    float           _windowSize;
+    float           _userFrameRate;
+    bool            _wasBelowMax;
+    bool            _enabled;
+    bool            _fastMode;
+}; // end of VCMFrameDropper class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
diff --git a/trunk/src/modules/video_coding/main/source/generic_decoder.cc b/trunk/src/modules/video_coding/main/source/generic_decoder.cc
new file mode 100644
index 0000000..217c21d
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/generic_decoder.cc
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_coding.h"
+#include "trace.h"
+#include "generic_decoder.h"
+#include "internal_defines.h"
+#include "tick_time_base.h"
+
+namespace webrtc {
+
+VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming& timing,
+                                                 TickTimeBase* clock)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_clock(clock),
+_receiveCallback(NULL),
+_timing(timing),
+_timestampMap(kDecoderFrameMemoryLength),
+_lastReceivedPictureID(0)
+{
+}
+
+VCMDecodedFrameCallback::~VCMDecodedFrameCallback()
+{
+    delete _critSect;
+}
+
+void VCMDecodedFrameCallback::SetUserReceiveCallback(
+    VCMReceiveCallback* receiveCallback)
+{
+    CriticalSectionScoped cs(_critSect);
+    _receiveCallback = receiveCallback;
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Decoded(RawImage& decodedImage)
+{
+    // TODO(holmer): We should improve this so that we can handle multiple
+    // callbacks from one call to Decode().
+    CriticalSectionScoped cs(_critSect);
+    VCMFrameInformation* frameInfo = static_cast<VCMFrameInformation*>(
+        _timestampMap.Pop(decodedImage._timeStamp));
+    if (frameInfo == NULL)
+    {
+        // The map should never be empty or full if this callback is called.
+        return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    _timing.StopDecodeTimer(
+        decodedImage._timeStamp,
+        frameInfo->decodeStartTimeMs,
+        _clock->MillisecondTimestamp());
+
+    if (_receiveCallback != NULL)
+    {
+        _frame.Swap(decodedImage._buffer,
+                    decodedImage._length,
+                    decodedImage._size);
+        _frame.SetWidth(decodedImage._width);
+        _frame.SetHeight(decodedImage._height);
+        _frame.SetTimeStamp(decodedImage._timeStamp);
+        _frame.SetRenderTime(frameInfo->renderTimeMs);
+        // Convert raw image to video frame
+        WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame);
+        if (callbackReturn < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug,
+                         webrtc::kTraceVideoCoding,
+                         -1,
+                         "Render callback returned error: %d", callbackReturn);
+        }
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_receiveCallback != NULL)
+    {
+        return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId);
+    }
+    return -1;
+}
+
+WebRtc_Word32
+VCMDecodedFrameCallback::ReceivedDecodedFrame(const WebRtc_UWord64 pictureId)
+{
+    _lastReceivedPictureID = pictureId;
+    return 0;
+}
+
+WebRtc_UWord64 VCMDecodedFrameCallback::LastReceivedPictureID() const
+{
+    return _lastReceivedPictureID;
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo)
+{
+    CriticalSectionScoped cs(_critSect);
+    return _timestampMap.Add(timestamp, frameInfo);
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Pop(WebRtc_UWord32 timestamp)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_timestampMap.Pop(timestamp) == NULL)
+    {
+        return VCM_GENERAL_ERROR;
+    }
+    return VCM_OK;
+}
+
+VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id, bool isExternal)
+:
+_id(id),
+_callback(NULL),
+_frameInfos(),
+_nextFrameInfoIdx(0),
+_decoder(decoder),
+_codecType(kVideoCodecUnknown),
+_isExternal(isExternal),
+_requireKeyFrame(false),
+_keyFrameDecoded(false)
+{
+}
+
+VCMGenericDecoder::~VCMGenericDecoder()
+{
+}
+
+WebRtc_Word32 VCMGenericDecoder::InitDecode(const VideoCodec* settings,
+                                            WebRtc_Word32 numberOfCores,
+                                            bool requireKeyFrame)
+{
+    _requireKeyFrame = requireKeyFrame;
+    _keyFrameDecoded = false;
+    _codecType = settings->codecType;
+
+    return _decoder.InitDecode(settings, numberOfCores);
+}
+
+WebRtc_Word32 VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
+                                        int64_t nowMs)
+{
+    if (_requireKeyFrame &&
+        !_keyFrameDecoded &&
+        frame.FrameType() != kVideoFrameKey &&
+        frame.FrameType() != kVideoFrameGolden)
+    {
+        // Require key frame is enabled, meaning that one key frame must be decoded
+        // before we can decode delta frames.
+        return VCM_CODEC_ERROR;
+    }
+    _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
+    _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
+    _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
+
+    WEBRTC_TRACE(webrtc::kTraceDebug,
+                 webrtc::kTraceVideoCoding,
+                 VCMId(_id),
+                 "Decoding timestamp %u", frame.TimeStamp());
+
+    _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
+
+    WebRtc_Word32 ret = _decoder.Decode(frame.EncodedImage(),
+                                        frame.MissingFrame(),
+                                        frame.FragmentationHeader(),
+                                        frame.CodecSpecific(),
+                                        frame.RenderTimeMs());
+
+    if (ret < WEBRTC_VIDEO_CODEC_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id), "Decoder error: %d\n", ret);
+        _callback->Pop(frame.TimeStamp());
+        return ret;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT ||
+             ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
+    {
+        // No output
+        _callback->Pop(frame.TimeStamp());
+    }
+    // Update the key frame decoded variable so that we know whether or not we've decoded a key frame since reset.
+    _keyFrameDecoded = (frame.FrameType() == kVideoFrameKey || frame.FrameType() == kVideoFrameGolden);
+    return ret;
+}
+
+WebRtc_Word32
+VCMGenericDecoder::Release()
+{
+    _keyFrameDecoded = false;
+    return _decoder.Release();
+}
+
+WebRtc_Word32 VCMGenericDecoder::Reset()
+{
+    _keyFrameDecoded = false;
+    return _decoder.Reset();
+}
+
+WebRtc_Word32 VCMGenericDecoder::SetCodecConfigParameters(const WebRtc_UWord8* buffer, WebRtc_Word32 size)
+{
+    return _decoder.SetCodecConfigParameters(buffer, size);
+}
+
+WebRtc_Word32 VCMGenericDecoder::RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback)
+{
+    _callback = callback;
+    return _decoder.RegisterDecodeCompleteCallback(callback);
+}
+
+bool VCMGenericDecoder::External() const
+{
+    return _isExternal;
+}
+
+} // namespace
diff --git a/trunk/src/modules/video_coding/main/source/generic_decoder.h b/trunk/src/modules/video_coding/main/source/generic_decoder.h
new file mode 100644
index 0000000..58dabc7
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/generic_decoder.h
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+
+#include "timing.h"
+#include "timestamp_map.h"
+#include "video_codec_interface.h"
+#include "encoded_frame.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+
+class VCMReceiveCallback;
+
+enum { kDecoderFrameMemoryLength = 10 };
+
+struct VCMFrameInformation
+{
+    WebRtc_Word64     renderTimeMs;
+    WebRtc_Word64     decodeStartTimeMs;
+    void*             userData;
+};
+
+class VCMDecodedFrameCallback : public DecodedImageCallback
+{
+public:
+    VCMDecodedFrameCallback(VCMTiming& timing, TickTimeBase* clock);
+    virtual ~VCMDecodedFrameCallback();
+    void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
+
+    virtual WebRtc_Word32 Decoded(RawImage& decodedImage);
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+    WebRtc_UWord64 LastReceivedPictureID() const;
+
+    WebRtc_Word32 Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo);
+    WebRtc_Word32 Pop(WebRtc_UWord32 timestamp);
+
+private:
+    CriticalSectionWrapper* _critSect;
+    TickTimeBase* _clock;
+    VideoFrame _frame;
+    VCMReceiveCallback* _receiveCallback;
+    VCMTiming& _timing;
+    VCMTimestampMap _timestampMap;
+    WebRtc_UWord64 _lastReceivedPictureID;
+};
+
+
+class VCMGenericDecoder
+{
+    friend class VCMCodecDataBase;
+public:
+    VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id = 0, bool isExternal = false);
+    ~VCMGenericDecoder();
+
+    /**
+    *	Initialize the decoder with the information from the VideoCodec
+    */
+    WebRtc_Word32 InitDecode(const VideoCodec* settings,
+                             WebRtc_Word32 numberOfCores,
+                             bool requireKeyFrame);
+
+    /**
+    *	Decode to a raw I420 frame,
+    *
+    *	inputVideoBuffer	reference to encoded video frame
+    */
+    WebRtc_Word32 Decode(const VCMEncodedFrame& inputFrame, int64_t nowMs);
+
+    /**
+    *	Free the decoder memory
+    */
+    WebRtc_Word32 Release();
+
+    /**
+    *	Reset the decoder state, prepare for a new call
+    */
+    WebRtc_Word32 Reset();
+
+    /**
+    *	Codec configuration data sent out-of-band, i.e. in SIP call setup
+    *
+    *	buffer pointer to the configuration data
+    *	size the size of the configuration data in bytes
+    */
+    WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/,
+                                           WebRtc_Word32 /*size*/);
+
+    WebRtc_Word32 RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback);
+
+    bool External() const;
+
+protected:
+
+    WebRtc_Word32               _id;
+    VCMDecodedFrameCallback*    _callback;
+    VCMFrameInformation         _frameInfos[kDecoderFrameMemoryLength];
+    WebRtc_UWord32              _nextFrameInfoIdx;
+    VideoDecoder&               _decoder;
+    VideoCodecType              _codecType;
+    bool                        _isExternal;
+    bool                        _requireKeyFrame;
+    bool                        _keyFrameDecoded;
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
diff --git a/trunk/src/modules/video_coding/main/source/generic_encoder.cc b/trunk/src/modules/video_coding/main/source/generic_encoder.cc
new file mode 100644
index 0000000..58cdd99
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/generic_encoder.cc
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "encoded_frame.h"
+#include "generic_encoder.h"
+#include "media_optimization.h"
+#include "../../../../engine_configurations.h"
+
+namespace webrtc {
+
+//#define DEBUG_ENCODER_BIT_STREAM
+
+VCMGenericEncoder::VCMGenericEncoder(VideoEncoder& encoder, bool internalSource /*= false*/)
+:
+_encoder(encoder),
+_codecType(kVideoCodecUnknown),
+_VCMencodedFrameCallback(NULL),
+_bitRate(0),
+_frameRate(0),
+_internalSource(false)
+{
+}
+
+
+VCMGenericEncoder::~VCMGenericEncoder()
+{
+}
+
+WebRtc_Word32 VCMGenericEncoder::Release()
+{
+    _bitRate = 0;
+    _frameRate = 0;
+    _VCMencodedFrameCallback = NULL;
+    return _encoder.Release();
+}
+
+WebRtc_Word32
+VCMGenericEncoder::InitEncode(const VideoCodec* settings,
+                              WebRtc_Word32 numberOfCores,
+                              WebRtc_UWord32 maxPayloadSize)
+{
+    _bitRate = settings->startBitrate;
+    _frameRate = settings->maxFramerate;
+    _codecType = settings->codecType;
+    if (_VCMencodedFrameCallback != NULL)
+    {
+        _VCMencodedFrameCallback->SetCodecType(_codecType);
+    }
+    return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
+                          const CodecSpecificInfo* codecSpecificInfo,
+                          FrameType* frameType)
+{
+    RawImage rawImage(inputFrame.Buffer(),
+                      inputFrame.Length(),
+                      inputFrame.Size());
+    rawImage._width     = inputFrame.Width();
+    rawImage._height    = inputFrame.Height();
+    rawImage._timeStamp = inputFrame.TimeStamp();
+
+    VideoFrameType videoFrameTypes[kMaxSimulcastStreams];
+    for (int i = 0; i < kMaxSimulcastStreams; i++)
+    {
+        videoFrameTypes[i] = VCMEncodedFrame::ConvertFrameType(frameType[i]);
+    }
+    return _encoder.Encode(rawImage, codecSpecificInfo, videoFrameTypes);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetChannelParameters(WebRtc_Word32 packetLoss, int rtt)
+{
+    return _encoder.SetChannelParameters(packetLoss, rtt);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate)
+{
+    WebRtc_Word32 ret = _encoder.SetRates(newBitRate, frameRate);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    _bitRate = newBitRate;
+    _frameRate = frameRate;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMGenericEncoder::CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size)
+{
+    WebRtc_Word32 ret = _encoder.CodecConfigParameters(buffer, size);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    return ret;
+}
+
+WebRtc_UWord32 VCMGenericEncoder::BitRate() const
+{
+    return _bitRate;
+}
+
+WebRtc_UWord32 VCMGenericEncoder::FrameRate() const
+{
+    return _frameRate;
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
+{
+    return _encoder.SetPeriodicKeyFrames(enable);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::RequestFrame(FrameType* frameTypes)
+{
+    RawImage image;
+    VideoFrameType videoFrameTypes[kMaxSimulcastStreams];
+    for (int i = 0; i < kMaxSimulcastStreams; i++)
+    {
+        videoFrameTypes[i] = VCMEncodedFrame::ConvertFrameType(frameTypes[i]);
+    }
+    return _encoder.Encode(image, NULL,  videoFrameTypes);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback)
+{
+   _VCMencodedFrameCallback = VCMencodedFrameCallback;
+
+   _VCMencodedFrameCallback->SetCodecType(_codecType);
+   _VCMencodedFrameCallback->SetInternalSource(_internalSource);
+   return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback);
+}
+
+bool
+VCMGenericEncoder::InternalSource() const
+{
+    return _internalSource;
+}
+
+ /***************************
+  * Callback Implementation
+  ***************************/
+VCMEncodedFrameCallback::VCMEncodedFrameCallback():
+_sendCallback(),
+_mediaOpt(NULL),
+_encodedBytes(0),
+_payloadType(0),
+_bitStreamAfterEncoder(NULL)
+{
+#ifdef DEBUG_ENCODER_BIT_STREAM
+    _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
+#endif
+}
+
+VCMEncodedFrameCallback::~VCMEncodedFrameCallback()
+{
+#ifdef DEBUG_ENCODER_BIT_STREAM
+    fclose(_bitStreamAfterEncoder);
+#endif
+}
+
+WebRtc_Word32
+VCMEncodedFrameCallback::SetTransportCallback(VCMPacketizationCallback* transport)
+{
+    _sendCallback = transport;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMEncodedFrameCallback::Encoded(
+    EncodedImage &encodedImage,
+    const CodecSpecificInfo* codecSpecificInfo,
+    const RTPFragmentationHeader* fragmentationHeader)
+{
+    FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
+
+    WebRtc_UWord32 encodedBytes = 0;
+    if (_sendCallback != NULL)
+    {
+        encodedBytes = encodedImage._length;
+
+#ifdef DEBUG_ENCODER_BIT_STREAM
+        if (_bitStreamAfterEncoder != NULL)
+        {
+            fwrite(encodedImage._buffer, 1, encodedImage._length, _bitStreamAfterEncoder);
+        }
+#endif
+
+        RTPVideoHeader rtpVideoHeader;
+        RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
+        if (codecSpecificInfo)
+        {
+            CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
+        }
+        else
+        {
+            rtpVideoHeaderPtr = NULL;
+        }
+
+        WebRtc_Word32 callbackReturn = _sendCallback->SendData(
+            frameType,
+            _payloadType,
+            encodedImage._timeStamp,
+            encodedImage._buffer,
+            encodedBytes,
+            *fragmentationHeader,
+            rtpVideoHeaderPtr);
+       if (callbackReturn < 0)
+       {
+           return callbackReturn;
+       }
+    }
+    else
+    {
+        return VCM_UNINITIALIZED;
+    }
+    _encodedBytes = encodedBytes;
+    if (_mediaOpt != NULL) {
+      _mediaOpt->UpdateWithEncodedData(_encodedBytes, frameType);
+      if (_internalSource)
+      {
+          return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame
+      }
+    }
+    return VCM_OK;
+}
+
+WebRtc_UWord32
+VCMEncodedFrameCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+void
+VCMEncodedFrameCallback::SetMediaOpt(VCMMediaOptimization *mediaOpt)
+{
+    _mediaOpt = mediaOpt;
+}
+
+void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
+                                                RTPVideoHeader** rtp) {
+    switch (info.codecType) {
+        case kVideoCodecVP8: {
+            (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
+            (*rtp)->codecHeader.VP8.pictureId =
+                info.codecSpecific.VP8.pictureId;
+            (*rtp)->codecHeader.VP8.nonReference =
+                info.codecSpecific.VP8.nonReference;
+            (*rtp)->codecHeader.VP8.temporalIdx =
+                info.codecSpecific.VP8.temporalIdx;
+            (*rtp)->codecHeader.VP8.layerSync =
+                info.codecSpecific.VP8.layerSync;
+            (*rtp)->codecHeader.VP8.tl0PicIdx =
+                info.codecSpecific.VP8.tl0PicIdx;
+            (*rtp)->codecHeader.VP8.keyIdx =
+                info.codecSpecific.VP8.keyIdx;
+            (*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
+            return;
+        }
+        default: {
+            // No codec specific info. Change RTP header pointer to NULL.
+            *rtp = NULL;
+            return;
+        }
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/generic_encoder.h b/trunk/src/modules/video_coding/main/source/generic_encoder.h
new file mode 100644
index 0000000..99c2ce9
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/generic_encoder.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+
+#include "video_codec_interface.h"
+
+#include <stdio.h>
+
+namespace webrtc
+{
+
+class VCMMediaOptimization;
+
+/*************************************/
+/* VCMEncodeFrameCallback class     */
+/***********************************/
+class VCMEncodedFrameCallback : public EncodedImageCallback
+{
+public:
+    VCMEncodedFrameCallback();
+    virtual ~VCMEncodedFrameCallback();
+
+    /*
+    * Callback implementation - codec encode complete
+    */
+    WebRtc_Word32 Encoded(
+        EncodedImage& encodedImage,
+        const CodecSpecificInfo* codecSpecificInfo = NULL,
+        const RTPFragmentationHeader* fragmentationHeader = NULL);
+    /*
+    * Get number of encoded bytes
+    */
+    WebRtc_UWord32 EncodedBytes();
+    /*
+    * Callback implementation - generic encoder encode complete
+    */
+    WebRtc_Word32 SetTransportCallback(VCMPacketizationCallback* transport);
+    /**
+    * Set media Optimization
+    */
+    void SetMediaOpt (VCMMediaOptimization* mediaOpt);
+
+    void SetPayloadType(WebRtc_UWord8 payloadType) { _payloadType = payloadType; };
+    void SetCodecType(VideoCodecType codecType) {_codecType = codecType;};
+    void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
+
+private:
+    /*
+     * Map information from info into rtp. If no relevant information is found
+     * in info, rtp is set to NULL.
+     */
+    static void CopyCodecSpecific(const CodecSpecificInfo& info,
+                                  RTPVideoHeader** rtp);
+
+    VCMPacketizationCallback* _sendCallback;
+    VCMMediaOptimization*     _mediaOpt;
+    WebRtc_UWord32            _encodedBytes;
+    WebRtc_UWord8             _payloadType;
+    VideoCodecType            _codecType;
+    bool                      _internalSource;
+    FILE*                     _bitStreamAfterEncoder;
+};// end of VCMEncodeFrameCallback class
+
+
+/******************************/
+/* VCMGenericEncoder class    */
+/******************************/
+class VCMGenericEncoder
+{
+    friend class VCMCodecDataBase;
+public:
+    VCMGenericEncoder(VideoEncoder& encoder, bool internalSource = false);
+    ~VCMGenericEncoder();
+    /**
+    *	Free encoder memory
+    */
+    WebRtc_Word32 Release();
+    /**
+    *	Initialize the encoder with the information from the VideoCodec
+    */
+    WebRtc_Word32 InitEncode(const VideoCodec* settings,
+                             WebRtc_Word32 numberOfCores,
+                             WebRtc_UWord32 maxPayloadSize);
+    /**
+    *	Encode raw image
+    *	inputFrame        : Frame containing raw image
+    *	codecSpecificInfo : Specific codec data
+    *	cameraFrameRate	  :	request or information from the remote side
+    *	frameType         : The requested frame type to encode
+    */
+    WebRtc_Word32 Encode(const VideoFrame& inputFrame,
+                         const CodecSpecificInfo* codecSpecificInfo,
+                         FrameType* frameType);
+    /**
+    *	Set new target bit rate and frame rate
+    * Return Value: new bit rate if OK, otherwise <0s
+    */
+    WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate);
+    /**
+    * Set a new packet loss rate and a new round-trip time in milliseconds.
+    */
+    WebRtc_Word32 SetChannelParameters(WebRtc_Word32 packetLoss, int rtt);
+    WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size);
+    /**
+    * Register a transport callback which will be called to deliver the encoded buffers
+    */
+    WebRtc_Word32 RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback);
+    /**
+    * Get encoder bit rate
+    */
+    WebRtc_UWord32 BitRate() const;
+     /**
+    * Get encoder frame rate
+    */
+    WebRtc_UWord32 FrameRate() const;
+
+    WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    WebRtc_Word32 RequestFrame(FrameType* frameTypes);
+
+    bool InternalSource() const;
+
+private:
+    VideoEncoder&               _encoder;
+    VideoCodecType              _codecType;
+    VCMEncodedFrameCallback*    _VCMencodedFrameCallback;
+    WebRtc_UWord32              _bitRate;
+    WebRtc_UWord32              _frameRate;
+    bool                        _internalSource;
+}; // end of VCMGenericEncoder class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
diff --git a/trunk/src/modules/video_coding/main/source/inter_frame_delay.cc b/trunk/src/modules/video_coding/main/source/inter_frame_delay.cc
new file mode 100644
index 0000000..3b520b3
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/inter_frame_delay.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "inter_frame_delay.h"
+
+namespace webrtc {
+
+VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock)
+{
+    Reset(currentWallClock);
+}
+
+// Resets the delay estimate
+void
+VCMInterFrameDelay::Reset(int64_t currentWallClock)
+{
+    _zeroWallClock = currentWallClock;
+    _wrapArounds = 0;
+    _prevWallClock = 0;
+    _prevTimestamp = 0;
+    _dTS = 0;
+}
+
+// Calculates the delay of a frame with the given timestamp.
+// This method is called when the frame is complete.
+bool
+VCMInterFrameDelay::CalculateDelay(WebRtc_UWord32 timestamp,
+                                WebRtc_Word64 *delay,
+                                int64_t currentWallClock)
+{
+    if (_prevWallClock == 0)
+    {
+        // First set of data, initialization, wait for next frame
+        _prevWallClock = currentWallClock;
+        _prevTimestamp = timestamp;
+        *delay = 0;
+        return true;
+    }
+
+    WebRtc_Word32 prevWrapArounds = _wrapArounds;
+    CheckForWrapArounds(timestamp);
+
+    // This will be -1 for backward wrap arounds and +1 for forward wrap arounds
+    WebRtc_Word32 wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
+
+    // Account for reordering in jitter variance estimate in the future?
+    // Note that this also captures incomplete frames which are grabbed
+    // for decoding after a later frame has been complete, i.e. real
+    // packet losses.
+    if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || wrapAroundsSincePrev < 0)
+    {
+        *delay = 0;
+        return false;
+    }
+
+    // Compute the compensated timestamp difference and convert it to ms and
+    // round it to closest integer.
+    _dTS = static_cast<WebRtc_Word64>((timestamp + wrapAroundsSincePrev *
+                (static_cast<WebRtc_Word64>(1)<<32) - _prevTimestamp) / 90.0 + 0.5);
+
+    // frameDelay is the difference of dT and dTS -- i.e. the difference of
+    // the wall clock time difference and the timestamp difference between
+    // two following frames.
+    *delay = static_cast<WebRtc_Word64>(currentWallClock - _prevWallClock - _dTS);
+
+    _prevTimestamp = timestamp;
+    _prevWallClock = currentWallClock;
+
+    return true;
+}
+
+// Returns the current difference between incoming timestamps
+WebRtc_UWord32 VCMInterFrameDelay::CurrentTimeStampDiffMs() const
+{
+    if (_dTS < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(_dTS);
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp and
+// keeps track of the number of wrap arounds since reset.
+void
+VCMInterFrameDelay::CheckForWrapArounds(WebRtc_UWord32 timestamp)
+{
+    if (timestamp < _prevTimestamp)
+    {
+        // This difference will probably be less than -2^31 if we have had a wrap around
+        // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is cast to a Word32,
+        // it should be positive.
+        if (static_cast<WebRtc_Word32>(timestamp - _prevTimestamp) > 0)
+        {
+            // Forward wrap around
+            _wrapArounds++;
+        }
+    }
+    // This difference will probably be less than -2^31 if we have had a backward wrap around.
+    // Since it is cast to a Word32, it should be positive.
+    else if (static_cast<WebRtc_Word32>(_prevTimestamp - timestamp) > 0)
+    {
+        // Backward wrap around
+        _wrapArounds--;
+    }
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/inter_frame_delay.h b/trunk/src/modules/video_coding/main/source/inter_frame_delay.h
new file mode 100644
index 0000000..807c64b
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/inter_frame_delay.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMInterFrameDelay
+{
+public:
+    VCMInterFrameDelay(int64_t currentWallClock);
+
+    // Resets the estimate. Zeros are given as parameters.
+    void Reset(int64_t currentWallClock);
+
+    // Calculates the delay of a frame with the given timestamp.
+    // This method is called when the frame is complete.
+    //
+    // Input:
+    //          - timestamp         : RTP timestamp of a received frame
+    //          - *delay            : Pointer to memory where the result should be stored
+    //          - currentWallClock  : The current time in milliseconds.
+    //                                Should be -1 for normal operation, only used for testing.
+    // Return value                 : true if OK, false when reordered timestamps
+    bool CalculateDelay(WebRtc_UWord32 timestamp,
+                        WebRtc_Word64 *delay,
+                        int64_t currentWallClock);
+
+    // Returns the current difference between incoming timestamps
+    //
+    // Return value                 : Wrap-around compensated difference between incoming
+    //                                timestamps.
+    WebRtc_UWord32 CurrentTimeStampDiffMs() const;
+
+private:
+    // Controls if the RTP timestamp counter has had a wrap around
+    // between the current and the previously received frame.
+    //
+    // Input:
+    //          - timestmap         : RTP timestamp of the current frame.
+    void CheckForWrapArounds(WebRtc_UWord32 timestamp);
+
+    WebRtc_Word64         _zeroWallClock; // Local timestamp of the first video packet received
+    WebRtc_Word32         _wrapArounds;   // Number of wrapArounds detected
+    // The previous timestamp passed to the delay estimate
+    WebRtc_UWord32        _prevTimestamp;
+    // The previous wall clock timestamp used by the delay estimate
+    WebRtc_Word64         _prevWallClock;
+    // Wrap-around compensated difference between incoming timestamps
+    WebRtc_Word64         _dTS;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
diff --git a/trunk/src/modules/video_coding/main/source/internal_defines.h b/trunk/src/modules/video_coding/main/source/internal_defines.h
new file mode 100644
index 0000000..0ba6385
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/internal_defines.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+#define MASK_32_BITS(x) (0xFFFFFFFF & (x))
+
+inline WebRtc_UWord32 MaskWord64ToUWord32(WebRtc_Word64 w64)
+{
+    return static_cast<WebRtc_UWord32>(MASK_32_BITS(w64));
+}
+
+#define VCM_MAX(a, b) (((a) > (b)) ? (a) : (b))
+#define VCM_MIN(a, b) (((a) < (b)) ? (a) : (b))
+
+#define VCM_DEFAULT_CODEC_WIDTH 352
+#define VCM_DEFAULT_CODEC_HEIGHT 288
+#define VCM_DEFAULT_FRAME_RATE 30
+#define VCM_MIN_BITRATE 30
+#define VCM_FLUSH_INDICATOR 4
+
+// Helper macros for creating the static codec list
+#define VCM_NO_CODEC_IDX -1
+#ifdef VIDEOCODEC_VP8
+  #define VCM_VP8_IDX VCM_NO_CODEC_IDX + 1
+#else
+  #define VCM_VP8_IDX VCM_NO_CODEC_IDX
+#endif
+#ifdef VIDEOCODEC_I420
+  #define VCM_I420_IDX VCM_VP8_IDX + 1
+#else
+  #define VCM_I420_IDX VCM_VP8_IDX
+#endif
+#define VCM_NUM_VIDEO_CODECS_AVAILABLE VCM_I420_IDX + 1
+
+#define VCM_NO_RECEIVER_ID 0
+
+inline WebRtc_Word32 VCMId(const WebRtc_Word32 vcmId, const WebRtc_Word32 receiverId = 0)
+{
+    return static_cast<WebRtc_Word32>((vcmId << 16) + receiverId);
+}
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
diff --git a/trunk/src/modules/video_coding/main/source/jitter_buffer.cc b/trunk/src/modules/video_coding/main/source/jitter_buffer.cc
new file mode 100644
index 0000000..4b146b6
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_buffer.cc
@@ -0,0 +1,1775 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/main/source/jitter_buffer.h"
+
+#include <algorithm>
+#include <cassert>
+
+#include "modules/video_coding/main/source/event.h"
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "modules/video_coding/main/source/inter_frame_delay.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/jitter_estimator.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+#if defined(_WIN32)
+    // VS 2005: Don't warn for default initialized arrays. See help for more info.
+    #pragma warning(disable:4351)
+#endif
+
+namespace webrtc {
+
+// Predicates used when searching for frames in the frame buffer list
+class FrameSmallerTimestamp {
+ public:
+  FrameSmallerTimestamp(uint32_t timestamp) : timestamp_(timestamp) {}
+  bool operator()(VCMFrameBuffer* frame) {
+    return (LatestTimestamp(timestamp_, frame->TimeStamp(), NULL) ==
+        timestamp_);
+  }
+
+ private:
+  uint32_t timestamp_;
+};
+
+class FrameEqualTimestamp {
+ public:
+  FrameEqualTimestamp(uint32_t timestamp) : timestamp_(timestamp) {}
+  bool operator()(VCMFrameBuffer* frame) {
+    return (timestamp_ == frame->TimeStamp());
+  }
+
+ private:
+  uint32_t timestamp_;
+};
+
+class CompleteDecodableKeyFrameCriteria {
+ public:
+  bool operator()(VCMFrameBuffer* frame) {
+    return (frame->FrameType() == kVideoFrameKey) &&
+           (frame->GetState() == kStateComplete ||
+            frame->GetState() == kStateDecodable);
+  }
+};
+
+// Constructor
+VCMJitterBuffer::VCMJitterBuffer(TickTimeBase* clock,
+                                 WebRtc_Word32 vcmId,
+                                 WebRtc_Word32 receiverId,
+                                 bool master) :
+    _vcmId(vcmId),
+    _receiverId(receiverId),
+    _clock(clock),
+    _running(false),
+    _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+    _master(master),
+    _frameEvent(),
+    _packetEvent(),
+    _maxNumberOfFrames(kStartNumberOfFrames),
+    _frameBuffers(),
+    _frameList(),
+    _lastDecodedState(),
+    _packetsNotDecodable(0),
+    _receiveStatistics(),
+    _incomingFrameRate(0),
+    _incomingFrameCount(0),
+    _timeLastIncomingFrameCount(0),
+    _incomingBitCount(0),
+    _incomingBitRate(0),
+    _dropCount(0),
+    _numConsecutiveOldFrames(0),
+    _numConsecutiveOldPackets(0),
+    _discardedPackets(0),
+    _jitterEstimate(vcmId, receiverId),
+    _delayEstimate(_clock->MillisecondTimestamp()),
+    _rttMs(0),
+    _nackMode(kNoNack),
+    _lowRttNackThresholdMs(-1),
+    _highRttNackThresholdMs(-1),
+    _NACKSeqNum(),
+    _NACKSeqNumLength(0),
+    _waitingForKeyFrame(false),
+    _firstPacket(true)
+{
+    memset(_frameBuffers, 0, sizeof(_frameBuffers));
+    memset(_receiveStatistics, 0, sizeof(_receiveStatistics));
+    memset(_NACKSeqNumInternal, -1, sizeof(_NACKSeqNumInternal));
+
+    for (int i = 0; i< kStartNumberOfFrames; i++)
+    {
+        _frameBuffers[i] = new VCMFrameBuffer();
+    }
+}
+
+// Destructor
+VCMJitterBuffer::~VCMJitterBuffer()
+{
+    Stop();
+    for (int i = 0; i< kMaxNumberOfFrames; i++)
+    {
+        if (_frameBuffers[i])
+        {
+            delete _frameBuffers[i];
+        }
+    }
+    delete _critSect;
+}
+
+void
+VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs)
+{
+    if (this != &rhs)
+    {
+        _critSect->Enter();
+        rhs._critSect->Enter();
+        _vcmId = rhs._vcmId;
+        _receiverId = rhs._receiverId;
+        _running = rhs._running;
+        _master = !rhs._master;
+        _maxNumberOfFrames = rhs._maxNumberOfFrames;
+        _incomingFrameRate = rhs._incomingFrameRate;
+        _incomingFrameCount = rhs._incomingFrameCount;
+        _timeLastIncomingFrameCount = rhs._timeLastIncomingFrameCount;
+        _incomingBitCount = rhs._incomingBitCount;
+        _incomingBitRate = rhs._incomingBitRate;
+        _dropCount = rhs._dropCount;
+        _numConsecutiveOldFrames = rhs._numConsecutiveOldFrames;
+        _numConsecutiveOldPackets = rhs._numConsecutiveOldPackets;
+        _discardedPackets = rhs._discardedPackets;
+        _jitterEstimate = rhs._jitterEstimate;
+        _delayEstimate = rhs._delayEstimate;
+        _waitingForCompletion = rhs._waitingForCompletion;
+        _rttMs = rhs._rttMs;
+        _NACKSeqNumLength = rhs._NACKSeqNumLength;
+        _waitingForKeyFrame = rhs._waitingForKeyFrame;
+        _firstPacket = rhs._firstPacket;
+        _lastDecodedState =  rhs._lastDecodedState;
+        _packetsNotDecodable = rhs._packetsNotDecodable;
+        memcpy(_receiveStatistics, rhs._receiveStatistics,
+               sizeof(_receiveStatistics));
+        memcpy(_NACKSeqNumInternal, rhs._NACKSeqNumInternal,
+               sizeof(_NACKSeqNumInternal));
+        memcpy(_NACKSeqNum, rhs._NACKSeqNum, sizeof(_NACKSeqNum));
+        for (int i = 0; i < kMaxNumberOfFrames; i++)
+        {
+            if (_frameBuffers[i] != NULL)
+            {
+                delete _frameBuffers[i];
+                _frameBuffers[i] = NULL;
+            }
+        }
+        _frameList.clear();
+        for (int i = 0; i < _maxNumberOfFrames; i++)
+        {
+            _frameBuffers[i] = new VCMFrameBuffer(*(rhs._frameBuffers[i]));
+            if (_frameBuffers[i]->Length() > 0)
+            {
+                FrameList::reverse_iterator rit = std::find_if(
+                    _frameList.rbegin(), _frameList.rend(),
+                    FrameSmallerTimestamp(_frameBuffers[i]->TimeStamp()));
+                _frameList.insert(rit.base(), _frameBuffers[i]);
+            }
+        }
+        rhs._critSect->Leave();
+        _critSect->Leave();
+    }
+}
+
+// Start jitter buffer
+void
+VCMJitterBuffer::Start()
+{
+    CriticalSectionScoped cs(_critSect);
+    _running = true;
+    _incomingFrameCount = 0;
+    _incomingFrameRate = 0;
+    _incomingBitCount = 0;
+    _incomingBitRate = 0;
+    _timeLastIncomingFrameCount = _clock->MillisecondTimestamp();
+    memset(_receiveStatistics, 0, sizeof(_receiveStatistics));
+
+    _numConsecutiveOldFrames = 0;
+    _numConsecutiveOldPackets = 0;
+    _discardedPackets = 0;
+
+    _frameEvent.Reset(); // start in a non-signaled state
+    _packetEvent.Reset(); // start in a non-signaled state
+    _waitingForCompletion.frameSize = 0;
+    _waitingForCompletion.timestamp = 0;
+    _waitingForCompletion.latestPacketTime = -1;
+    _firstPacket = true;
+    _NACKSeqNumLength = 0;
+    _waitingForKeyFrame = false;
+    _rttMs = 0;
+    _packetsNotDecodable = 0;
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: start", this);
+}
+
+
+// Stop jitter buffer
+void
+VCMJitterBuffer::Stop()
+{
+    _critSect->Enter();
+    _running = false;
+    _lastDecodedState.Reset();
+    _frameList.clear();
+    for (int i = 0; i < kMaxNumberOfFrames; i++)
+    {
+        if (_frameBuffers[i] != NULL)
+        {
+            static_cast<VCMFrameBuffer*>(_frameBuffers[i])->SetState(kStateFree);
+        }
+    }
+
+    _critSect->Leave();
+    _frameEvent.Set(); // Make sure we exit from trying to get a frame to decoder
+    _packetEvent.Set(); // Make sure we exit from trying to get a sequence number
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: stop", this);
+}
+
+bool
+VCMJitterBuffer::Running() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _running;
+}
+
+// Flush jitter buffer
+void
+VCMJitterBuffer::Flush()
+{
+    CriticalSectionScoped cs(_critSect);
+    FlushInternal();
+}
+
+// Must be called under the critical section _critSect
+void
+VCMJitterBuffer::FlushInternal()
+{
+    // Erase all frames from the sorted list and set their state to free.
+    _frameList.clear();
+    for (WebRtc_Word32 i = 0; i < _maxNumberOfFrames; i++)
+    {
+        ReleaseFrameInternal(_frameBuffers[i]);
+    }
+    _lastDecodedState.Reset(); // TODO (mikhal): sync reset
+    _packetsNotDecodable = 0;
+
+    _frameEvent.Reset();
+    _packetEvent.Reset();
+
+    _numConsecutiveOldFrames = 0;
+    _numConsecutiveOldPackets = 0;
+
+    // Also reset the jitter and delay estimates
+    _jitterEstimate.Reset();
+    _delayEstimate.Reset(_clock->MillisecondTimestamp());
+
+    _waitingForCompletion.frameSize = 0;
+    _waitingForCompletion.timestamp = 0;
+    _waitingForCompletion.latestPacketTime = -1;
+
+    _firstPacket = true;
+
+    _NACKSeqNumLength = 0;
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: flush", this);
+}
+
+// Set the frame state to free and remove it from the sorted
+// frame list. Must be called from inside the critical section _critSect.
+void
+VCMJitterBuffer::ReleaseFrameInternal(VCMFrameBuffer* frame)
+{
+    if (frame != NULL && frame->GetState() != kStateDecoding)
+    {
+        frame->SetState(kStateFree);
+    }
+}
+
+// Update frame state (set as complete if conditions are met)
+// Doing it here increases the degree of freedom for e.g. future
+// reconstructability of separate layers. Must be called under the
+// critical section _critSect.
+VCMFrameBufferEnum
+VCMJitterBuffer::UpdateFrameState(VCMFrameBuffer* frame)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "JB(0x%x) FB(0x%x): "
+                         "UpdateFrameState NULL frame pointer", this, frame);
+        return kNoError;
+    }
+
+    int length = frame->Length();
+    if (_master)
+    {
+        // Only trace the primary jitter buffer to make it possible to parse
+        // and plot the trace file.
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "JB(0x%x) FB(0x%x): Complete frame added to jitter buffer,"
+                     " size:%d type %d",
+                     this, frame,length,frame->FrameType());
+    }
+
+    if (length != 0 && !frame->GetCountedFrame())
+    {
+        // ignore Ack frames
+        _incomingFrameCount++;
+        frame->SetCountedFrame(true);
+    }
+
+    // Check if we should drop frame
+    // an old complete frame can arrive too late
+    if (_lastDecodedState.IsOldFrame(frame))
+    {
+        // Frame is older than the latest decoded frame, drop it. Will be
+        // released by CleanUpOldFrames later.
+        frame->Reset();
+        frame->SetState(kStateEmpty);
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "JB(0x%x) FB(0x%x): Dropping old frame in Jitter buffer",
+                     this, frame);
+        _dropCount++;
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Jitter buffer drop count: %d, consecutive drops: %u",
+                     _dropCount, _numConsecutiveOldFrames);
+        // Flush() if this happens consistently.
+        _numConsecutiveOldFrames++;
+        if (_numConsecutiveOldFrames > kMaxConsecutiveOldFrames) {
+          FlushInternal();
+          return kFlushIndicator;
+        }
+        return kNoError;
+    }
+    _numConsecutiveOldFrames = 0;
+    frame->SetState(kStateComplete);
+
+
+    // Update receive statistics. We count all layers, thus when you use layers
+    // adding all key and delta frames might differ from frame count
+    if (frame->IsSessionComplete())
+    {
+        switch (frame->FrameType())
+        {
+        case kVideoFrameKey:
+            {
+                _receiveStatistics[0]++;
+                break;
+            }
+        case kVideoFrameDelta:
+            {
+                _receiveStatistics[1]++;
+                break;
+            }
+        case kVideoFrameGolden:
+            {
+                _receiveStatistics[2]++;
+                break;
+            }
+        case kVideoFrameAltRef:
+            {
+                _receiveStatistics[3]++;
+                break;
+            }
+        default:
+            assert(false);
+
+        }
+    }
+    const FrameList::iterator it = FindOldestCompleteContinuousFrame(false);
+    VCMFrameBuffer* oldFrame = NULL;
+    if (it != _frameList.end())
+    {
+        oldFrame = *it;
+    }
+
+    // Only signal if this is the oldest frame.
+    // Not necessary the case due to packet reordering or NACK.
+    if (!WaitForNack() || (oldFrame != NULL && oldFrame == frame))
+    {
+        _frameEvent.Set();
+    }
+    return kNoError;
+}
+
+// Get received key and delta frames
+WebRtc_Word32
+VCMJitterBuffer::GetFrameStatistics(WebRtc_UWord32& receivedDeltaFrames,
+                                    WebRtc_UWord32& receivedKeyFrames) const
+{
+    {
+        CriticalSectionScoped cs(_critSect);
+        receivedDeltaFrames = _receiveStatistics[1] + _receiveStatistics[3];
+        receivedKeyFrames = _receiveStatistics[0] + _receiveStatistics[2];
+    }
+    return 0;
+}
+
+WebRtc_UWord32 VCMJitterBuffer::NumNotDecodablePackets() const {
+  CriticalSectionScoped cs(_critSect);
+  return _packetsNotDecodable;
+}
+
+WebRtc_UWord32 VCMJitterBuffer::DiscardedPackets() const {
+  CriticalSectionScoped cs(_critSect);
+  return _discardedPackets;
+}
+
+// Gets frame to use for this timestamp. If no match, get empty frame.
+WebRtc_Word32
+VCMJitterBuffer::GetFrame(const VCMPacket& packet, VCMEncodedFrame*& frame)
+{
+    if (!_running) // don't accept incoming packets until we are started
+    {
+        return VCM_UNINITIALIZED;
+    }
+
+    _critSect->Enter();
+    // Does this packet belong to an old frame?
+    if (_lastDecodedState.IsOldPacket(&packet))
+    {
+        // Account only for media packets
+        if (packet.sizeBytes > 0)
+        {
+            _discardedPackets++;
+            _numConsecutiveOldPackets++;
+        }
+        // Update last decoded sequence number if the packet arrived late and
+        // belongs to a frame with a timestamp equal to the last decoded
+        // timestamp.
+        _lastDecodedState.UpdateOldPacket(&packet);
+
+        if (_numConsecutiveOldPackets > kMaxConsecutiveOldPackets)
+        {
+            FlushInternal();
+            _critSect->Leave();
+            return VCM_FLUSH_INDICATOR;
+        }
+        _critSect->Leave();
+        return VCM_OLD_PACKET_ERROR;
+    }
+    _numConsecutiveOldPackets = 0;
+
+    FrameList::iterator it = std::find_if(
+        _frameList.begin(),
+        _frameList.end(),
+        FrameEqualTimestamp(packet.timestamp));
+
+    if (it != _frameList.end()) {
+      frame = *it;
+      _critSect->Leave();
+      return VCM_OK;
+    }
+
+    _critSect->Leave();
+
+    // No match, return empty frame
+    frame = GetEmptyFrame();
+    if (frame != NULL)
+    {
+        return VCM_OK;
+    }
+    // No free frame! Try to reclaim some...
+    _critSect->Enter();
+    RecycleFramesUntilKeyFrame();
+    _critSect->Leave();
+
+    frame = GetEmptyFrame();
+    if (frame != NULL)
+    {
+        return VCM_OK;
+    }
+    return VCM_JITTER_BUFFER_ERROR;
+}
+
+// Deprecated! Kept for testing purposes.
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrame(const VCMPacket& packet)
+{
+    VCMEncodedFrame* frame = NULL;
+    if (GetFrame(packet, frame) < 0)
+    {
+        return NULL;
+    }
+    return frame;
+}
+
+// Get empty frame, creates new (i.e. increases JB size) if necessary
+VCMFrameBuffer*
+VCMJitterBuffer::GetEmptyFrame()
+{
+    if (!_running) // don't accept incoming packets until we are started
+    {
+        return NULL;
+    }
+
+    _critSect->Enter();
+
+    for (int i = 0; i <_maxNumberOfFrames; ++i)
+    {
+        if (kStateFree == _frameBuffers[i]->GetState())
+        {
+            // found a free buffer
+            _frameBuffers[i]->SetState(kStateEmpty);
+            _critSect->Leave();
+            return _frameBuffers[i];
+        }
+    }
+
+    // Check if we can increase JB size
+    if (_maxNumberOfFrames < kMaxNumberOfFrames)
+    {
+        VCMFrameBuffer* ptrNewBuffer = new VCMFrameBuffer();
+        ptrNewBuffer->SetState(kStateEmpty);
+        _frameBuffers[_maxNumberOfFrames] = ptrNewBuffer;
+        _maxNumberOfFrames++;
+
+        _critSect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+        VCMId(_vcmId, _receiverId), "JB(0x%x) FB(0x%x): Jitter buffer "
+        "increased to:%d frames", this, ptrNewBuffer, _maxNumberOfFrames);
+        return ptrNewBuffer;
+    }
+    _critSect->Leave();
+
+    // We have reached max size, cannot increase JB size
+    return NULL;
+}
+
+
+// Find oldest complete frame used for getting next frame to decode
+// Must be called under critical section
+FrameList::iterator
+VCMJitterBuffer::FindOldestCompleteContinuousFrame(bool enable_decodable) {
+  // If we have more than one frame done since last time, pick oldest.
+  VCMFrameBuffer* oldest_frame = NULL;
+  FrameList::iterator it = _frameList.begin();
+
+  // When temporal layers are available, we search for a complete or decodable
+  // frame until we hit one of the following:
+  // 1. Continuous base or sync layer.
+  // 2. The end of the list was reached.
+  for (; it != _frameList.end(); ++it)  {
+    oldest_frame = *it;
+    VCMFrameBufferStateEnum state = oldest_frame->GetState();
+    // Is this frame complete or decodable and continuous?
+    if ((state == kStateComplete ||
+        (enable_decodable && state == kStateDecodable)) &&
+        _lastDecodedState.ContinuousFrame(oldest_frame)) {
+      break;
+    } else {
+      int temporal_id = oldest_frame->TemporalId();
+      oldest_frame = NULL;
+      if (temporal_id <= 0) {
+        // When temporal layers are disabled or we have hit a base layer
+        // we break (regardless of continuity and completeness).
+        break;
+      }
+    }
+  }
+
+  if (oldest_frame == NULL) {
+    // No complete frame no point to continue.
+    return _frameList.end();
+  } else  if (_waitingForKeyFrame &&
+              oldest_frame->FrameType() != kVideoFrameKey) {
+    // We are waiting for a key frame.
+    return _frameList.end();
+  }
+
+  // We have a complete continuous frame.
+  return it;
+}
+
+// Call from inside the critical section _critSect
+void
+VCMJitterBuffer::RecycleFrame(VCMFrameBuffer* frame)
+{
+    if (frame == NULL)
+    {
+        return;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                 VCMId(_vcmId, _receiverId),
+                 "JB(0x%x) FB(0x%x): RecycleFrame, size:%d",
+                 this, frame, frame->Length());
+
+    ReleaseFrameInternal(frame);
+}
+
+// Calculate frame and bit rates
+WebRtc_Word32
+VCMJitterBuffer::GetUpdate(WebRtc_UWord32& frameRate, WebRtc_UWord32& bitRate)
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    WebRtc_Word64 diff = now - _timeLastIncomingFrameCount;
+    if (diff < 1000 && _incomingFrameRate > 0 && _incomingBitRate > 0)
+    {
+        // Make sure we report something even though less than
+        // 1 second has passed since last update.
+        frameRate = _incomingFrameRate;
+        bitRate = _incomingBitRate;
+    }
+    else if (_incomingFrameCount != 0)
+    {
+        // We have received frame(s) since last call to this function
+
+        // Prepare calculations
+        if (diff <= 0)
+        {
+            diff = 1;
+        }
+        // we add 0.5f for rounding
+        float rate = 0.5f + ((_incomingFrameCount * 1000.0f) / diff);
+        if (rate < 1.0f) // don't go below 1, can crash
+        {
+            rate = 1.0f;
+        }
+
+        // Calculate frame rate
+        // Let r be rate.
+        // r(0) = 1000*framecount/delta_time.
+        // (I.e. frames per second since last calculation.)
+        // frameRate = r(0)/2 + r(-1)/2
+        // (I.e. fr/s average this and the previous calculation.)
+        frameRate = (_incomingFrameRate + (WebRtc_Word32)rate) >> 1;
+        _incomingFrameRate = (WebRtc_UWord8)rate;
+
+        // Calculate bit rate
+        if (_incomingBitCount == 0)
+        {
+            bitRate = 0;
+        }
+        else
+        {
+            bitRate = 10 * ((100 * _incomingBitCount) /
+                      static_cast<WebRtc_UWord32>(diff));
+        }
+        _incomingBitRate = bitRate;
+
+        // Reset count
+        _incomingFrameCount = 0;
+        _incomingBitCount = 0;
+        _timeLastIncomingFrameCount = now;
+
+    }
+    else
+    {
+        // No frames since last call
+        _timeLastIncomingFrameCount = _clock->MillisecondTimestamp();
+        frameRate = 0;
+        bitRate = 0;
+        _incomingBitRate = 0;
+    }
+
+    return 0;
+}
+
+// Returns immediately or a X ms event hang waiting for a complete frame,
+// X decided by caller
+VCMEncodedFrame*
+VCMJitterBuffer::GetCompleteFrameForDecoding(WebRtc_UWord32 maxWaitTimeMS)
+{
+    if (!_running)
+    {
+        return NULL;
+    }
+
+    _critSect->Enter();
+
+    CleanUpOldFrames();
+
+    if (_lastDecodedState.init() && WaitForNack()) {
+      _waitingForKeyFrame = true;
+    }
+
+    FrameList::iterator it = FindOldestCompleteContinuousFrame(false);
+    if (it == _frameList.end())
+    {
+        if (maxWaitTimeMS == 0)
+        {
+            _critSect->Leave();
+            return NULL;
+        }
+        const WebRtc_Word64 endWaitTimeMs = _clock->MillisecondTimestamp()
+                                            + maxWaitTimeMS;
+        WebRtc_Word64 waitTimeMs = maxWaitTimeMS;
+        while (waitTimeMs > 0)
+        {
+            _critSect->Leave();
+            const EventTypeWrapper ret =
+                  _frameEvent.Wait(static_cast<WebRtc_UWord32>(waitTimeMs));
+            _critSect->Enter();
+            if (ret == kEventSignaled)
+            {
+                // are we closing down the Jitter buffer
+                if (!_running)
+                {
+                    _critSect->Leave();
+                    return NULL;
+                }
+
+                // Finding oldest frame ready for decoder, but check
+                // sequence number and size
+                CleanUpOldFrames();
+                it = FindOldestCompleteContinuousFrame(false);
+                if (it == _frameList.end())
+                {
+                    waitTimeMs = endWaitTimeMs -
+                                 _clock->MillisecondTimestamp();
+                }
+                else
+                {
+                    break;
+                }
+            }
+            else
+            {
+                _critSect->Leave();
+                return NULL;
+            }
+        }
+        // Inside critSect
+    }
+    else
+    {
+        // we already have a frame reset the event
+        _frameEvent.Reset();
+    }
+
+    if (it == _frameList.end())
+    {
+        // Even after signaling we're still missing a complete continuous frame
+        _critSect->Leave();
+        return NULL;
+    }
+
+    VCMFrameBuffer* oldestFrame = *it;
+    it = _frameList.erase(it);
+
+    // Update jitter estimate
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        UpdateJitterAndDelayEstimates(*oldestFrame, false);
+    }
+
+    oldestFrame->SetState(kStateDecoding);
+
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    _critSect->Leave();
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+WebRtc_UWord32
+VCMJitterBuffer::GetEstimatedJitterMS()
+{
+    CriticalSectionScoped cs(_critSect);
+    return GetEstimatedJitterMsInternal();
+}
+
+WebRtc_UWord32
+VCMJitterBuffer::GetEstimatedJitterMsInternal()
+{
+    WebRtc_UWord32 estimate = VCMJitterEstimator::OPERATING_SYSTEM_JITTER;
+
+    // Compute RTT multiplier for estimation
+    // _lowRttNackThresholdMs == -1 means no FEC.
+    double rttMult = 1.0f;
+    if (_nackMode == kNackHybrid && (_lowRttNackThresholdMs >= 0 &&
+        static_cast<int>(_rttMs) > _lowRttNackThresholdMs))
+    {
+        // from here we count on FEC
+        rttMult = 0.0f;
+    }
+    estimate += static_cast<WebRtc_UWord32>
+                (_jitterEstimate.GetJitterEstimate(rttMult) + 0.5);
+    return estimate;
+}
+
+void
+VCMJitterBuffer::UpdateRtt(WebRtc_UWord32 rttMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _rttMs = rttMs;
+    _jitterEstimate.UpdateRtt(rttMs);
+}
+
+// wait for the first packet in the next frame to arrive
+WebRtc_Word64
+VCMJitterBuffer::GetNextTimeStamp(WebRtc_UWord32 maxWaitTimeMS,
+                                  FrameType& incomingFrameType,
+                                  WebRtc_Word64& renderTimeMs)
+{
+    if (!_running)
+    {
+        return -1;
+    }
+
+    _critSect->Enter();
+
+    // Finding oldest frame ready for decoder, check sequence number and size
+    CleanUpOldFrames();
+
+    FrameList::iterator it = _frameList.begin();
+
+    if (it == _frameList.end())
+    {
+        _packetEvent.Reset();
+        _critSect->Leave();
+
+        if (_packetEvent.Wait(maxWaitTimeMS) == kEventSignaled)
+        {
+            // are we closing down the Jitter buffer
+            if (!_running)
+            {
+                return -1;
+            }
+            _critSect->Enter();
+
+            CleanUpOldFrames();
+            it = _frameList.begin();
+        }
+        else
+        {
+            _critSect->Enter();
+        }
+    }
+
+    if (it == _frameList.end())
+    {
+        _critSect->Leave();
+        return -1;
+    }
+    // we have a frame
+
+    // return frame type
+    // All layers are assumed to have the same type
+    incomingFrameType = (*it)->FrameType();
+
+    renderTimeMs = (*it)->RenderTimeMs();
+
+    const WebRtc_UWord32 timestamp = (*it)->TimeStamp();
+
+    _critSect->Leave();
+
+    // return current time
+    return timestamp;
+}
+
+// Answers the question:
+// Will the packet sequence be complete if the next frame is grabbed for
+// decoding right now? That is, have we lost a frame between the last decoded
+// frame and the next, or is the next
+// frame missing one or more packets?
+bool
+VCMJitterBuffer::CompleteSequenceWithNextFrame()
+{
+    CriticalSectionScoped cs(_critSect);
+    // Finding oldest frame ready for decoder, check sequence number and size
+    CleanUpOldFrames();
+
+    if (_frameList.empty())
+      return true;
+
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    if (_frameList.size() <= 1 &&
+        oldestFrame->GetState() != kStateComplete)
+    {
+        // Frame not ready to be decoded.
+        return true;
+    }
+    if (!oldestFrame->Complete())
+    {
+        return false;
+    }
+
+    // See if we have lost a frame before this one.
+    if (_lastDecodedState.init())
+    {
+        // Following start, reset or flush -> check for key frame.
+        if (oldestFrame->FrameType() != kVideoFrameKey)
+        {
+            return false;
+        }
+    }
+    else if (oldestFrame->GetLowSeqNum() == -1)
+    {
+        return false;
+    }
+    else if (!_lastDecodedState.ContinuousFrame(oldestFrame))
+    {
+        return false;
+    }
+    return true;
+}
+
+// Returns immediately
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrameForDecoding()
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_running)
+    {
+        return NULL;
+    }
+
+    if (WaitForNack())
+    {
+        return GetFrameForDecodingNACK();
+    }
+
+    CleanUpOldFrames();
+
+    if (_frameList.empty()) {
+      return NULL;
+    }
+
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    if (_frameList.size() <= 1 &&
+        oldestFrame->GetState() != kStateComplete) {
+      return NULL;
+    }
+
+    // Incomplete frame pulled out from jitter buffer,
+    // update the jitter estimate with what we currently know.
+    // This frame shouldn't have been retransmitted, but if we recently
+    // turned off NACK this might still happen.
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        // Update with the previous incomplete frame first
+        if (_waitingForCompletion.latestPacketTime >= 0)
+        {
+            UpdateJitterAndDelayEstimates(_waitingForCompletion, true);
+        }
+        // Then wait for this one to get complete
+        _waitingForCompletion.frameSize = oldestFrame->Length();
+        _waitingForCompletion.latestPacketTime =
+                              oldestFrame->LatestPacketTimeMs();
+        _waitingForCompletion.timestamp = oldestFrame->TimeStamp();
+    }
+    _frameList.erase(_frameList.begin());
+
+    // Look for previous frame loss
+    VerifyAndSetPreviousFrameLost(*oldestFrame);
+
+    // The state must be changed to decoding before cleaning up zero sized
+    // frames to avoid empty frames being cleaned up and then given to the
+    // decoder.
+    // Set as decoding. Propagates the missingFrame bit.
+    oldestFrame->SetState(kStateDecoding);
+
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    _packetsNotDecodable += oldestFrame->NotDecodablePackets();
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrameForDecodingNACK()
+{
+    // when we use NACK we don't release non complete frames
+    // unless we have a complete key frame.
+    // In hybrid mode, we may release decodable frames (non-complete)
+
+    // Clean up old frames and empty frames
+    CleanUpOldFrames();
+
+    // First look for a complete _continuous_ frame.
+    // When waiting for nack, wait for a key frame, if a continuous frame cannot
+    // be determined (i.e. initial decoding state).
+    if (_lastDecodedState.init()) {
+      _waitingForKeyFrame = true;
+    }
+
+    // Allow for a decodable frame when in Hybrid mode.
+    bool enableDecodable = _nackMode == kNackHybrid ? true : false;
+    FrameList::iterator it = FindOldestCompleteContinuousFrame(enableDecodable);
+    if (it == _frameList.end())
+    {
+        // If we didn't find one we're good with a complete key/decodable frame.
+        it = find_if(_frameList.begin(), _frameList.end(),
+                     CompleteDecodableKeyFrameCriteria());
+        if (it == _frameList.end())
+        {
+            return NULL;
+        }
+    }
+    VCMFrameBuffer* oldestFrame = *it;
+    // Update jitter estimate
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        UpdateJitterAndDelayEstimates(*oldestFrame, false);
+    }
+    it = _frameList.erase(it);
+
+    // Look for previous frame loss
+    VerifyAndSetPreviousFrameLost(*oldestFrame);
+
+    // The state must be changed to decoding before cleaning up zero sized
+    // frames to avoid empty frames being cleaned up and then given to the
+    // decoder.
+    oldestFrame->SetState(kStateDecoding);
+
+    // Clean up old frames and empty frames
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+// Must be called under the critical section _critSect. Should never be called
+// with retransmitted frames, they must be filtered out before this function is
+// called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(VCMJitterSample& sample,
+                                               bool incompleteFrame)
+{
+    if (sample.latestPacketTime == -1)
+    {
+        return;
+    }
+    if (incompleteFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "Received incomplete frame "
+                     "timestamp %u frame size %u at time %u",
+                     sample.timestamp, sample.frameSize,
+                     MaskWord64ToUWord32(sample.latestPacketTime));
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "Received complete frame "
+                     "timestamp %u frame size %u at time %u",
+                     sample.timestamp, sample.frameSize,
+                     MaskWord64ToUWord32(sample.latestPacketTime));
+    }
+    UpdateJitterAndDelayEstimates(sample.latestPacketTime,
+                                  sample.timestamp,
+                                  sample.frameSize,
+                                  incompleteFrame);
+}
+
+// Must be called under the critical section _critSect. Should never be
+// called with retransmitted frames, they must be filtered out before this
+// function is called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(VCMFrameBuffer& frame,
+                                               bool incompleteFrame)
+{
+    if (frame.LatestPacketTimeMs() == -1)
+    {
+        return;
+    }
+    // No retransmitted frames should be a part of the jitter
+    // estimate.
+    if (incompleteFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                   "Received incomplete frame timestamp %u frame type %d "
+                   "frame size %u at time %u, jitter estimate was %u",
+                   frame.TimeStamp(), frame.FrameType(), frame.Length(),
+                   MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
+                   GetEstimatedJitterMsInternal());
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),"Received complete frame "
+                     "timestamp %u frame type %d frame size %u at time %u, "
+                     "jitter estimate was %u",
+                     frame.TimeStamp(), frame.FrameType(), frame.Length(),
+                     MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
+                     GetEstimatedJitterMsInternal());
+    }
+    UpdateJitterAndDelayEstimates(frame.LatestPacketTimeMs(), frame.TimeStamp(),
+                                  frame.Length(), incompleteFrame);
+}
+
+// Must be called under the critical section _critSect. Should never be called
+// with retransmitted frames, they must be filtered out before this function
+// is called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(WebRtc_Word64 latestPacketTimeMs,
+                                               WebRtc_UWord32 timestamp,
+                                               WebRtc_UWord32 frameSize,
+                                               bool incompleteFrame)
+{
+    if (latestPacketTimeMs == -1)
+    {
+        return;
+    }
+    WebRtc_Word64 frameDelay;
+    // Calculate the delay estimate
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                 VCMId(_vcmId, _receiverId),
+                 "Packet received and sent to jitter estimate with: "
+                 "timestamp=%u wallClock=%u", timestamp,
+                 MaskWord64ToUWord32(latestPacketTimeMs));
+    bool notReordered = _delayEstimate.CalculateDelay(timestamp,
+                                                      &frameDelay,
+                                                      latestPacketTimeMs);
+    // Filter out frames which have been reordered in time by the network
+    if (notReordered)
+    {
+        // Update the jitter estimate with the new samples
+        _jitterEstimate.UpdateEstimate(frameDelay, frameSize, incompleteFrame);
+    }
+}
+
+WebRtc_UWord16*
+VCMJitterBuffer::GetNackList(WebRtc_UWord16& nackSize,bool& listExtended)
+{
+    return CreateNackList(nackSize,listExtended);
+}
+
+// Assume called internally with critsect
+WebRtc_Word32
+VCMJitterBuffer::GetLowHighSequenceNumbers(WebRtc_Word32& lowSeqNum,
+                                           WebRtc_Word32& highSeqNum) const
+{
+    // TODO (mikhal/stefan): refactor to use lastDecodedState
+    WebRtc_Word32 i = 0;
+    WebRtc_Word32 seqNum = -1;
+
+    highSeqNum = -1;
+    lowSeqNum = -1;
+    if (!_lastDecodedState.init())
+      lowSeqNum = _lastDecodedState.sequence_num();
+
+    // find highest seq numbers
+    for (i = 0; i < _maxNumberOfFrames; ++i)
+    {
+        seqNum = _frameBuffers[i]->GetHighSeqNum();
+
+        // Ignore free / empty frames
+        VCMFrameBufferStateEnum state = _frameBuffers[i]->GetState();
+
+        if ((kStateFree != state) &&
+            (kStateEmpty != state) &&
+            (kStateDecoding != state) &&
+             seqNum != -1)
+        {
+            bool wrap;
+            highSeqNum = LatestSequenceNumber(seqNum, highSeqNum, &wrap);
+        }
+    } // for
+    return 0;
+}
+
+
+WebRtc_UWord16*
+VCMJitterBuffer::CreateNackList(WebRtc_UWord16& nackSize, bool& listExtended)
+{
+    // TODO (mikhal/stefan): Refactor to use lastDecodedState.
+    CriticalSectionScoped cs(_critSect);
+    int i = 0;
+    WebRtc_Word32 lowSeqNum = -1;
+    WebRtc_Word32 highSeqNum = -1;
+    listExtended = false;
+
+    // Don't create list, if we won't wait for it
+    if (!WaitForNack())
+    {
+        nackSize = 0;
+        return NULL;
+    }
+
+    // Find the lowest (last decoded) sequence number and
+    // the highest (highest sequence number of the newest frame)
+    // sequence number. The nack list is a subset of the range
+    // between those two numbers.
+    GetLowHighSequenceNumbers(lowSeqNum, highSeqNum);
+
+    // write a list of all seq num we have
+    if (lowSeqNum == -1 || highSeqNum == -1)
+    {
+        // This happens if we lose the first packet, nothing is popped
+        if (highSeqNum == -1)
+        {
+            // we have not received any packets yet
+            nackSize = 0;
+        }
+        else
+        {
+            // signal that we want a key frame request to be sent
+            nackSize = 0xffff;
+        }
+        return NULL;
+    }
+
+    int numberOfSeqNum = 0;
+    if (lowSeqNum > highSeqNum)
+    {
+        if (lowSeqNum - highSeqNum > 0x00ff)
+        {
+            // wrap
+            numberOfSeqNum = (0xffff-lowSeqNum) + highSeqNum + 1;
+        }
+    }
+    else
+    {
+        numberOfSeqNum = highSeqNum - lowSeqNum;
+    }
+
+    if (numberOfSeqNum > kNackHistoryLength)
+    {
+        // Nack list is too big, flush and try to restart.
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Nack list too large, try to find a key frame and restart "
+                     "from seq: %d. Lowest seq in jb %d", highSeqNum,lowSeqNum);
+
+        // This nack size will trigger a key request...
+        bool foundKeyFrame = false;
+
+        while (numberOfSeqNum > kNackHistoryLength)
+        {
+            foundKeyFrame = RecycleFramesUntilKeyFrame();
+
+            if (!foundKeyFrame)
+            {
+                break;
+            }
+
+            // Check if we still have too many packets in JB
+            lowSeqNum = -1;
+            highSeqNum = -1;
+            GetLowHighSequenceNumbers(lowSeqNum, highSeqNum);
+
+            if (highSeqNum == -1)
+            {
+                assert(lowSeqNum != -1); // This should never happen
+                // We can't calculate the nack list length...
+                return NULL;
+            }
+
+            numberOfSeqNum = 0;
+            if (lowSeqNum > highSeqNum)
+            {
+                if (lowSeqNum - highSeqNum > 0x00ff)
+                {
+                    // wrap
+                    numberOfSeqNum = (0xffff-lowSeqNum) + highSeqNum + 1;
+                    highSeqNum=lowSeqNum;
+                }
+            }
+            else
+            {
+                numberOfSeqNum = highSeqNum - lowSeqNum;
+            }
+
+        } // end while
+
+        if (!foundKeyFrame)
+        {
+            // No key frame in JB.
+
+            // Set the last decoded sequence number to current high.
+            // This is to not get a large nack list again right away
+            _lastDecodedState.SetSeqNum(static_cast<uint16_t>(highSeqNum));
+            // Set to trigger key frame signal
+            nackSize = 0xffff;
+            listExtended = true;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                    "\tNo key frame found, request one. _lastDecodedSeqNum[0] "
+                    "%d", _lastDecodedState.sequence_num());
+        }
+        else
+        {
+            // We have cleaned up the jb and found a key frame
+            // The function itself has set last decoded seq.
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                    "\tKey frame found. _lastDecodedSeqNum[0] %d",
+                    _lastDecodedState.sequence_num());
+            nackSize = 0;
+        }
+
+        return NULL;
+    }
+
+    WebRtc_UWord16 seqNumberIterator = (WebRtc_UWord16)(lowSeqNum + 1);
+    for (i = 0; i < numberOfSeqNum; i++)
+    {
+        _NACKSeqNumInternal[i] = seqNumberIterator;
+        seqNumberIterator++;
+    }
+
+    // now we have a list of all sequence numbers that could have been sent
+
+    // zero out the ones we have received
+    for (i = 0; i < _maxNumberOfFrames; i++)
+    {
+        // loop all created frames
+        // We don't need to check if frame is decoding since lowSeqNum is based
+        // on _lastDecodedSeqNum
+        // Ignore free frames
+        VCMFrameBufferStateEnum state = _frameBuffers[i]->GetState();
+
+        if ((kStateFree != state) &&
+            (kStateEmpty != state) &&
+            (kStateDecoding != state))
+        {
+            // Reaching thus far means we are going to update the nack list
+            // When in hybrid mode, we use the soft NACKing feature.
+            if (_nackMode == kNackHybrid)
+            {
+                _frameBuffers[i]->BuildSoftNackList(_NACKSeqNumInternal,
+                                                    numberOfSeqNum,
+                                                    _rttMs);
+            }
+            else
+            {
+                // Used when the frame is being processed by the decoding thread
+                // don't need to use that info in this loop.
+                _frameBuffers[i]->BuildHardNackList(_NACKSeqNumInternal,
+                                                    numberOfSeqNum);
+            }
+        }
+    }
+
+    // compress list
+    int emptyIndex = -1;
+    for (i = 0; i < numberOfSeqNum; i++)
+    {
+        if (_NACKSeqNumInternal[i] == -1 || _NACKSeqNumInternal[i] == -2 )
+        {
+            // this is empty
+            if (emptyIndex == -1)
+            {
+                // no empty index before, remember this position
+                emptyIndex = i;
+            }
+        }
+        else
+        {
+            // this is not empty
+            if (emptyIndex == -1)
+            {
+                // no empty index, continue
+            }
+            else
+            {
+                _NACKSeqNumInternal[emptyIndex] = _NACKSeqNumInternal[i];
+                _NACKSeqNumInternal[i] = -1;
+                emptyIndex++;
+            }
+        }
+    } // for
+
+    if (emptyIndex == -1)
+    {
+        // no empty
+        nackSize = numberOfSeqNum;
+    }
+    else
+    {
+        nackSize = emptyIndex;
+    }
+
+    if (nackSize > _NACKSeqNumLength)
+    {
+        // Larger list: nack list was extended since the last call.
+        listExtended = true;
+    }
+
+    for (WebRtc_UWord32 j = 0; j < nackSize; j++)
+    {
+        // Check if the list has been extended since it was last created. I.e,
+        // new items have been added
+        if (_NACKSeqNumLength > j && !listExtended)
+        {
+            WebRtc_UWord32 k = 0;
+            for (k = j; k < _NACKSeqNumLength; k++)
+            {
+                // Found the item in the last list, i.e, no new items found yet.
+                if (_NACKSeqNum[k] == (WebRtc_UWord16)_NACKSeqNumInternal[j])
+                {
+                   break;
+                }
+            }
+            if (k == _NACKSeqNumLength) // New item not found in last list.
+            {
+                listExtended = true;
+            }
+        }
+        else
+        {
+            listExtended = true;
+        }
+        _NACKSeqNum[j] = (WebRtc_UWord16)_NACKSeqNumInternal[j];
+    }
+
+    _NACKSeqNumLength = nackSize;
+
+    return _NACKSeqNum;
+}
+
+// Release frame when done with decoding. Should never be used to release
+// frames from within the jitter buffer.
+void
+VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame)
+{
+    CriticalSectionScoped cs(_critSect);
+    VCMFrameBuffer* frameBuffer = static_cast<VCMFrameBuffer*>(frame);
+    if (frameBuffer != NULL)
+        frameBuffer->SetState(kStateFree);
+}
+
+WebRtc_Word64
+VCMJitterBuffer::LastPacketTime(VCMEncodedFrame* frame,
+                                bool& retransmitted) const
+{
+    CriticalSectionScoped cs(_critSect);
+    retransmitted = (static_cast<VCMFrameBuffer*>(frame)->GetNackCount() > 0);
+    return static_cast<VCMFrameBuffer*>(frame)->LatestPacketTimeMs();
+}
+
+WebRtc_Word64
+VCMJitterBuffer::LastDecodedTimestamp() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _lastDecodedState.time_stamp();
+}
+
+// Insert packet
+// Takes crit sect, and inserts packet in frame buffer, possibly does logging
+VCMFrameBufferEnum
+VCMJitterBuffer::InsertPacket(VCMEncodedFrame* buffer, const VCMPacket& packet)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+    VCMFrameBufferEnum bufferReturn = kSizeError;
+    VCMFrameBufferEnum ret = kSizeError;
+    VCMFrameBuffer* frame = static_cast<VCMFrameBuffer*>(buffer);
+
+    // We are keeping track of the first seq num, the latest seq num and
+    // the number of wraps to be able to calculate how many packets we expect.
+    if (_firstPacket)
+    {
+        // Now it's time to start estimating jitter
+        // reset the delay estimate.
+        _delayEstimate.Reset(_clock->MillisecondTimestamp());
+        _firstPacket = false;
+    }
+
+    // Empty packets may bias the jitter estimate (lacking size component),
+    // therefore don't let empty packet trigger the following updates:
+    if (packet.frameType != kFrameEmpty)
+    {
+        if (_waitingForCompletion.timestamp == packet.timestamp)
+        {
+            // This can get bad if we have a lot of duplicate packets,
+            // we will then count some packet multiple times.
+            _waitingForCompletion.frameSize += packet.sizeBytes;
+            _waitingForCompletion.latestPacketTime = nowMs;
+        }
+        else if (_waitingForCompletion.latestPacketTime >= 0 &&
+                 _waitingForCompletion.latestPacketTime + 2000 <= nowMs)
+        {
+            // A packet should never be more than two seconds late
+            UpdateJitterAndDelayEstimates(_waitingForCompletion, true);
+            _waitingForCompletion.latestPacketTime = -1;
+            _waitingForCompletion.frameSize = 0;
+            _waitingForCompletion.timestamp = 0;
+        }
+    }
+
+    if (frame != NULL)
+    {
+        VCMFrameBufferStateEnum state = frame->GetState();
+        _lastDecodedState.UpdateOldPacket(&packet);
+        // Insert packet
+        // Check for first packet
+        // High sequence number will be -1 if neither an empty packet nor
+        // a media packet has been inserted.
+        bool first = (frame->GetHighSeqNum() == -1);
+        // When in Hybrid mode, we allow for a decodable state
+        // Note: Under current version, a decodable frame will never be
+        // triggered, as the body of the function is empty.
+        // TODO (mikhal): Update when decodable is enabled.
+        bufferReturn = frame->InsertPacket(packet, nowMs,
+                                           _nackMode == kNackHybrid,
+                                           _rttMs);
+        ret = bufferReturn;
+
+        if (bufferReturn > 0)
+        {
+            _incomingBitCount += packet.sizeBytes << 3;
+
+            // Has this packet been nacked or is it about to be nacked?
+            if (IsPacketRetransmitted(packet))
+            {
+                frame->IncrementNackCount();
+            }
+
+            // Insert each frame once on the arrival of the first packet
+            // belonging to that frame (media or empty)
+            if (state == kStateEmpty && first)
+            {
+                ret = kFirstPacket;
+                FrameList::reverse_iterator rit = std::find_if(
+                    _frameList.rbegin(), _frameList.rend(),
+                    FrameSmallerTimestamp(frame->TimeStamp()));
+                _frameList.insert(rit.base(), frame);
+            }
+        }
+    }
+    switch(bufferReturn)
+    {
+    case kStateError:
+    case kTimeStampError:
+    case kSizeError:
+        {
+            if (frame != NULL)
+            {
+                // Will be released when it gets old.
+                frame->Reset();
+                frame->SetState(kStateEmpty);
+            }
+            break;
+        }
+    case kCompleteSession:
+        {
+            // Only update return value for a JB flush indicator.
+            if (UpdateFrameState(frame) == kFlushIndicator)
+              ret = kFlushIndicator;
+            // Signal that we have a received packet
+            _packetEvent.Set();
+            break;
+        }
+    case kDecodableSession:
+    case kIncomplete:
+        {
+          // Signal that we have a received packet
+          _packetEvent.Set();
+          break;
+        }
+    case kNoError:
+    case kDuplicatePacket:
+        {
+            break;
+        }
+    default:
+        {
+            assert(!"JitterBuffer::InsertPacket: Undefined value");
+        }
+    }
+   return ret;
+}
+
+// Must be called from within _critSect
+void
+VCMJitterBuffer::UpdateOldJitterSample(const VCMPacket& packet)
+{
+    if (_waitingForCompletion.timestamp != packet.timestamp &&
+        LatestTimestamp(_waitingForCompletion.timestamp, packet.timestamp,
+                        NULL) == packet.timestamp)
+    {
+        // This is a newer frame than the one waiting for completion.
+        _waitingForCompletion.frameSize = packet.sizeBytes;
+        _waitingForCompletion.timestamp = packet.timestamp;
+    }
+    else
+    {
+        // This can get bad if we have a lot of duplicate packets,
+        // we will then count some packet multiple times.
+        _waitingForCompletion.frameSize += packet.sizeBytes;
+        _jitterEstimate.UpdateMaxFrameSize(_waitingForCompletion.frameSize);
+    }
+}
+
+// Must be called from within _critSect
+bool
+VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const
+{
+    if (_NACKSeqNum && _NACKSeqNumLength > 0)
+    {
+        for (WebRtc_UWord16 i = 0; i < _NACKSeqNumLength; i++)
+        {
+            if (packet.seqNum == _NACKSeqNum[i])
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+// Get nack status (enabled/disabled)
+VCMNackMode
+VCMJitterBuffer::GetNackMode() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _nackMode;
+}
+
+// Set NACK mode
+void
+VCMJitterBuffer::SetNackMode(VCMNackMode mode,
+                             int lowRttNackThresholdMs,
+                             int highRttNackThresholdMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _nackMode = mode;
+    assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
+    assert(highRttNackThresholdMs == -1 ||
+           lowRttNackThresholdMs <= highRttNackThresholdMs);
+    assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
+    _lowRttNackThresholdMs = lowRttNackThresholdMs;
+    _highRttNackThresholdMs = highRttNackThresholdMs;
+    if (_nackMode == kNoNack)
+    {
+        _jitterEstimate.ResetNackCount();
+    }
+}
+
+
+// Recycle oldest frames up to a key frame, used if JB is completely full
+bool
+VCMJitterBuffer::RecycleFramesUntilKeyFrame()
+{
+    // Remove up to oldest key frame
+    while (_frameList.size() > 0)
+    {
+        // Throw at least one frame.
+        _dropCount++;
+        FrameList::iterator it = _frameList.begin();
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Jitter buffer drop count:%d, lowSeq %d", _dropCount,
+                     (*it)->GetLowSeqNum());
+        RecycleFrame(*it);
+        it = _frameList.erase(it);
+        if (it != _frameList.end() && (*it)->FrameType() == kVideoFrameKey)
+        {
+            // Fake the lastDecodedState to match this key frame.
+            _lastDecodedState.SetStateOneBack(*it);
+            return true;
+        }
+    }
+    _waitingForKeyFrame = true;
+    _lastDecodedState.Reset(); // TODO (mikhal): no sync
+    return false;
+}
+
+// Must be called under the critical section _critSect.
+void VCMJitterBuffer::CleanUpOldFrames() {
+  while (_frameList.size() > 0) {
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    bool nextFrameEmpty = (_lastDecodedState.ContinuousFrame(oldestFrame) &&
+        oldestFrame->GetState() == kStateEmpty);
+    if (_lastDecodedState.IsOldFrame(oldestFrame) ||
+        (nextFrameEmpty && _frameList.size() > 1)) {
+      ReleaseFrameInternal(_frameList.front());
+      _frameList.erase(_frameList.begin());
+    } else {
+      break;
+    }
+  }
+}
+
+// Used in GetFrameForDecoding
+void VCMJitterBuffer::VerifyAndSetPreviousFrameLost(VCMFrameBuffer& frame) {
+  frame.MakeSessionDecodable();  // Make sure the session can be decoded.
+  if (frame.FrameType() == kVideoFrameKey)
+    return;
+
+  if (!_lastDecodedState.ContinuousFrame(&frame))
+    frame.SetPreviousFrameLoss();
+}
+
+bool
+VCMJitterBuffer::WaitForNack()
+{
+     // NACK disabled -> can't wait
+     if (_nackMode == kNoNack)
+     {
+         return false;
+     }
+     // NACK only -> always wait
+     else if (_nackMode == kNackInfinite)
+     {
+         return true;
+     }
+     // else: hybrid mode, evaluate
+     // RTT high, don't wait
+     if (_highRttNackThresholdMs >= 0 &&
+         _rttMs >= static_cast<unsigned int>(_highRttNackThresholdMs))
+     {
+         return false;
+     }
+     // Either NACK only or hybrid
+     return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/jitter_buffer.h b/trunk/src/modules/video_coding/main/source/jitter_buffer.h
new file mode 100644
index 0000000..d951187
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_buffer.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+
+#include <list>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "modules/video_coding/main/source/decoding_state.h"
+#include "modules/video_coding/main/source/event.h"
+#include "modules/video_coding/main/source/inter_frame_delay.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/jitter_estimator.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum VCMNackMode
+{
+    kNackInfinite,
+    kNackHybrid,
+    kNoNack
+};
+
+typedef std::list<VCMFrameBuffer*> FrameList;
+
+// forward declarations
+class TickTimeBase;
+class VCMFrameBuffer;
+class VCMPacket;
+class VCMEncodedFrame;
+
+class VCMJitterSample
+{
+public:
+    VCMJitterSample() : timestamp(0), frameSize(0), latestPacketTime(-1) {}
+    WebRtc_UWord32 timestamp;
+    WebRtc_UWord32 frameSize;
+    WebRtc_Word64 latestPacketTime;
+};
+
+class VCMJitterBuffer
+{
+public:
+    VCMJitterBuffer(TickTimeBase* clock,
+                    WebRtc_Word32 vcmId = -1,
+                    WebRtc_Word32 receiverId = -1,
+                    bool master = true);
+    virtual ~VCMJitterBuffer();
+
+    void CopyFrom(const VCMJitterBuffer& rhs);
+
+    // We need a start and stop to break out of the wait event
+    // used in GetCompleteFrameForDecoding
+    void Start();
+    void Stop();
+    bool Running() const;
+
+    // Empty the Jitter buffer of all its data
+    void Flush();
+
+    // Statistics, Get received key and delta frames
+    WebRtc_Word32 GetFrameStatistics(WebRtc_UWord32& receivedDeltaFrames,
+                                     WebRtc_UWord32& receivedKeyFrames) const;
+
+    // The number of packets discarded by the jitter buffer because the decoder
+    // won't be able to decode them.
+    WebRtc_UWord32 NumNotDecodablePackets() const;
+    // Get number of packets discarded by the jitter buffer
+    WebRtc_UWord32 DiscardedPackets() const;
+
+    // Statistics, Calculate frame and bit rates
+    WebRtc_Word32 GetUpdate(WebRtc_UWord32& frameRate, WebRtc_UWord32& bitRate);
+
+    // Wait for the first packet in the next frame to arrive, blocks
+    // for <= maxWaitTimeMS ms
+    WebRtc_Word64 GetNextTimeStamp(WebRtc_UWord32 maxWaitTimeMS,
+                                   FrameType& incomingFrameType,
+                                   WebRtc_Word64& renderTimeMs);
+
+    // Will the packet sequence be complete if the next frame is grabbed
+    // for decoding right now? That is, have we lost a frame between the
+    // last decoded frame and the next, or is the next frame missing one
+    // or more packets?
+    bool CompleteSequenceWithNextFrame();
+
+    // TODO (mikhal/stefan): Merge all GetFrameForDecoding into one.
+    // Wait maxWaitTimeMS for a complete frame to arrive. After timeout NULL
+    // is returned.
+    VCMEncodedFrame* GetCompleteFrameForDecoding(WebRtc_UWord32 maxWaitTimeMS);
+
+    // Get a frame for decoding (even an incomplete) without delay.
+    VCMEncodedFrame* GetFrameForDecoding();
+
+    VCMEncodedFrame* GetFrameForDecodingNACK();
+
+    // Release frame (when done with decoding)
+    void ReleaseFrame(VCMEncodedFrame* frame);
+
+    // Get frame to use for this timestamp
+    WebRtc_Word32 GetFrame(const VCMPacket& packet, VCMEncodedFrame*&);
+    VCMEncodedFrame* GetFrame(const VCMPacket& packet); // deprecated
+
+    // Returns the time in ms when the latest packet was inserted into the frame.
+    // Retransmitted is set to true if any of the packets belonging to the frame
+    // has been retransmitted.
+    WebRtc_Word64 LastPacketTime(VCMEncodedFrame* frame,
+                                 bool& retransmitted) const;
+
+    // Insert a packet into a frame
+    VCMFrameBufferEnum InsertPacket(VCMEncodedFrame* frame,
+                                    const VCMPacket& packet);
+
+    // Sync
+    WebRtc_UWord32 GetEstimatedJitterMS();
+    void UpdateRtt(WebRtc_UWord32 rttMs);
+
+    // NACK
+    // Set the NACK mode. "highRttNackThreshold" is an RTT threshold in ms above
+    // which NACK will be disabled if the NACK mode is "kNackHybrid",
+    // -1 meaning that NACK is always enabled in the Hybrid mode.
+    // "lowRttNackThreshold" is an RTT threshold in ms below which we expect to
+    // rely on NACK only, and therefore are using larger buffers to have time to
+    // wait for retransmissions.
+    void SetNackMode(VCMNackMode mode,
+                     int lowRttNackThresholdMs,
+                     int highRttNackThresholdMs);
+    VCMNackMode GetNackMode() const;    // Get nack mode
+    // Get list of missing sequence numbers (size in number of elements)
+    WebRtc_UWord16* GetNackList(WebRtc_UWord16& nackSize,
+                                bool& listExtended);
+
+    WebRtc_Word64 LastDecodedTimestamp() const;
+
+private:
+    // Misc help functions
+    // Recycle (release) frame, used if we didn't receive whole frame
+    void RecycleFrame(VCMFrameBuffer* frame);
+    void ReleaseFrameInternal(VCMFrameBuffer* frame);
+    // Flush and reset the jitter buffer. Call under critical section.
+    void FlushInternal();
+
+    // Help functions for insert packet
+    // Get empty frame, creates new (i.e. increases JB size) if necessary
+    VCMFrameBuffer* GetEmptyFrame();
+    // Recycle oldest frames up to a key frame, used if JB is completely full
+    bool RecycleFramesUntilKeyFrame();
+    // Update frame state
+    // (set as complete or reconstructable if conditions are met)
+    VCMFrameBufferEnum UpdateFrameState(VCMFrameBuffer* frameListItem);
+
+    // Help functions for getting a frame
+    // Find oldest complete frame, used for getting next frame to decode
+    // When enabled, will return a decodable frame
+    FrameList::iterator FindOldestCompleteContinuousFrame(bool enableDecodable);
+
+    void CleanUpOldFrames();
+
+    void VerifyAndSetPreviousFrameLost(VCMFrameBuffer& frame);
+    bool IsPacketRetransmitted(const VCMPacket& packet) const;
+
+    void UpdateJitterAndDelayEstimates(VCMJitterSample& sample,
+                                       bool incompleteFrame);
+    void UpdateJitterAndDelayEstimates(VCMFrameBuffer& frame,
+                                       bool incompleteFrame);
+    void UpdateJitterAndDelayEstimates(WebRtc_Word64 latestPacketTimeMs,
+                                       WebRtc_UWord32 timestamp,
+                                       WebRtc_UWord32 frameSize,
+                                       bool incompleteFrame);
+    void UpdateOldJitterSample(const VCMPacket& packet);
+    WebRtc_UWord32 GetEstimatedJitterMsInternal();
+
+    // NACK help
+    WebRtc_UWord16* CreateNackList(WebRtc_UWord16& nackSize,
+                                   bool& listExtended);
+    WebRtc_Word32 GetLowHighSequenceNumbers(WebRtc_Word32& lowSeqNum,
+                                            WebRtc_Word32& highSeqNum) const;
+
+    // Decide whether should wait for NACK (mainly relevant for hybrid mode)
+    bool WaitForNack();
+
+    WebRtc_Word32                 _vcmId;
+    WebRtc_Word32                 _receiverId;
+    TickTimeBase*                 _clock;
+    // If we are running (have started) or not
+    bool                          _running;
+    CriticalSectionWrapper*       _critSect;
+    bool                          _master;
+    // Event to signal when we have a frame ready for decoder
+    VCMEvent                      _frameEvent;
+    // Event to signal when we have received a packet
+    VCMEvent                      _packetEvent;
+    // Number of allocated frames
+    WebRtc_Word32                 _maxNumberOfFrames;
+    // Array of pointers to the frames in JB
+    VCMFrameBuffer*               _frameBuffers[kMaxNumberOfFrames];
+    FrameList _frameList;
+
+    // timing
+    VCMDecodingState       _lastDecodedState;
+    WebRtc_UWord32          _packetsNotDecodable;
+
+    // Statistics
+    // Frame counter for each type (key, delta, golden, key-delta)
+    WebRtc_UWord8           _receiveStatistics[4];
+    // Latest calculated frame rates of incoming stream
+    WebRtc_UWord8           _incomingFrameRate;
+    // Frame counter, reset in GetUpdate
+    WebRtc_UWord32          _incomingFrameCount;
+    // Real time for last _frameCount reset
+    WebRtc_Word64           _timeLastIncomingFrameCount;
+    // Received bits counter, reset in GetUpdate
+    WebRtc_UWord32          _incomingBitCount;
+    WebRtc_UWord32          _incomingBitRate;
+    WebRtc_UWord32          _dropCount;            // Frame drop counter
+    // Number of frames in a row that have been too old
+    WebRtc_UWord32          _numConsecutiveOldFrames;
+    // Number of packets in a row that have been too old
+    WebRtc_UWord32          _numConsecutiveOldPackets;
+    // Number of packets discarded by the jitter buffer
+    WebRtc_UWord32          _discardedPackets;
+
+    // Filters for estimating jitter
+    VCMJitterEstimator      _jitterEstimate;
+    // Calculates network delays used for jitter calculations
+    VCMInterFrameDelay      _delayEstimate;
+    VCMJitterSample         _waitingForCompletion;
+    WebRtc_UWord32          _rttMs;
+
+    // NACK
+    VCMNackMode             _nackMode;
+    int                     _lowRttNackThresholdMs;
+    int                     _highRttNackThresholdMs;
+    // Holds the internal nack list (the missing sequence numbers)
+    WebRtc_Word32           _NACKSeqNumInternal[kNackHistoryLength];
+    WebRtc_UWord16          _NACKSeqNum[kNackHistoryLength];
+    WebRtc_UWord32          _NACKSeqNumLength;
+    bool                    _waitingForKeyFrame;
+
+    bool                    _firstPacket;
+
+    DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
diff --git a/trunk/src/modules/video_coding/main/source/jitter_buffer_common.cc b/trunk/src/modules/video_coding/main/source/jitter_buffer_common.cc
new file mode 100644
index 0000000..79a21b4
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_buffer_common.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "jitter_buffer_common.h"
+
+#include <cstdlib>
+
+namespace webrtc {
+
+WebRtc_UWord32 LatestTimestamp(WebRtc_UWord32 timestamp1,
+                               WebRtc_UWord32 timestamp2,
+                               bool* has_wrapped) {
+  bool wrap = (timestamp2 < 0x0000ffff && timestamp1 > 0xffff0000) ||
+      (timestamp2 > 0xffff0000 && timestamp1 < 0x0000ffff);
+  if (has_wrapped != NULL)
+    *has_wrapped = wrap;
+  if (timestamp1 > timestamp2 && !wrap)
+      return timestamp1;
+  else if (timestamp1 <= timestamp2 && !wrap)
+      return timestamp2;
+  else if (timestamp1 < timestamp2 && wrap)
+      return timestamp1;
+  else
+      return timestamp2;
+}
+
+WebRtc_Word32 LatestSequenceNumber(WebRtc_Word32 seq_num1,
+                                   WebRtc_Word32 seq_num2,
+                                   bool* has_wrapped) {
+  if (seq_num1 < 0 && seq_num2 < 0)
+    return -1;
+  else if (seq_num1 < 0)
+    return seq_num2;
+  else if (seq_num2 < 0)
+    return seq_num1;
+
+  bool wrap = (seq_num1 < 0x00ff && seq_num2 > 0xff00) ||
+          (seq_num1 > 0xff00 && seq_num2 < 0x00ff);
+
+  if (has_wrapped != NULL)
+    *has_wrapped = wrap;
+
+  if (seq_num2 > seq_num1 && !wrap)
+    return seq_num2;
+  else if (seq_num2 <= seq_num1 && !wrap)
+    return seq_num1;
+  else if (seq_num2 < seq_num1 && wrap)
+    return seq_num2;
+  else
+    return seq_num1;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/jitter_buffer_common.h b/trunk/src/modules/video_coding/main/source/jitter_buffer_common.h
new file mode 100644
index 0000000..38cea42
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_buffer_common.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum { kMaxNumberOfFrames     = 100 };
+enum { kStartNumberOfFrames   = 6 };    // in packets, 6 packets are approximately 198 ms,
+                                        // we need at least one more for process
+enum { kMaxVideoDelayMs       = 2000 }; // in ms
+
+enum VCMJitterBufferEnum
+{
+    kMaxConsecutiveOldFrames        = 60,
+    kMaxConsecutiveOldPackets       = 300,
+    kMaxPacketsInSession            = 800,
+    kBufferIncStepSizeBytes         = 30000,       // >20 packets
+    kMaxJBFrameSizeBytes            = 4000000      // sanity don't go above 4Mbyte
+};
+
+enum VCMFrameBufferEnum
+{
+    kStateError           = -4,
+    kFlushIndicator       = -3,   // Indicator that a flush has occurred.
+    kTimeStampError       = -2,
+    kSizeError            = -1,
+    kNoError              = 0,
+    kIncomplete           = 1,    // Frame incomplete
+    kFirstPacket          = 2,
+    kCompleteSession      = 3,    // at least one layer in the frame complete.
+    kDecodableSession     = 4,    // Frame incomplete, but ready to be decoded
+    kDuplicatePacket      = 5     // We're receiving a duplicate packet.
+};
+
+enum VCMFrameBufferStateEnum
+{
+    kStateFree,               // Unused frame in the JB
+    kStateEmpty,              // frame popped by the RTP receiver
+    kStateIncomplete,         // frame that have one or more packet(s) stored
+    kStateComplete,           // frame that have all packets
+    kStateDecoding,           // frame popped by the decoding thread
+    kStateDecodable           // Hybrid mode - frame can be decoded
+};
+
+enum { kH264StartCodeLengthBytes = 4};
+
+// Used to indicate if a received packet contain a complete NALU (or equivalent)
+enum VCMNaluCompleteness
+{
+    kNaluUnset = 0,       //Packet has not been filled.
+    kNaluComplete = 1,    //Packet can be decoded as is.
+    kNaluStart,           // Packet contain beginning of NALU
+    kNaluIncomplete,      //Packet is not beginning or end of NALU
+    kNaluEnd,             // Packet is the end of a NALU
+};
+
+// Returns the latest of the two timestamps, compensating for wrap arounds.
+// This function assumes that the two timestamps are close in time.
+WebRtc_UWord32 LatestTimestamp(WebRtc_UWord32 timestamp1,
+                               WebRtc_UWord32 timestamp2,
+                               bool* has_wrapped);
+
+// Returns the latest of the two sequence numbers, compensating for wrap
+// arounds. This function assumes that the two sequence numbers are close in
+// time.
+WebRtc_Word32 LatestSequenceNumber(WebRtc_Word32 seq_num1,
+                                   WebRtc_Word32 seq_num2,
+                                   bool* has_wrapped);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
diff --git a/trunk/src/modules/video_coding/main/source/jitter_buffer_unittest.cc b/trunk/src/modules/video_coding/main/source/jitter_buffer_unittest.cc
new file mode 100644
index 0000000..6e48ea7
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_buffer_unittest.cc
@@ -0,0 +1,376 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include <list>
+
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/jitter_buffer.h"
+#include "modules/video_coding/main/source/media_opt_util.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+class StreamGenerator {
+ public:
+  StreamGenerator(uint16_t start_seq_num, uint32_t start_timestamp,
+                  int64_t current_time)
+      : packets_(),
+        sequence_number_(start_seq_num),
+        timestamp_(start_timestamp),
+        start_time_(current_time) {}
+
+  void Init(uint16_t start_seq_num, uint32_t start_timestamp,
+            int64_t current_time) {
+    packets_.clear();
+    sequence_number_ = start_seq_num;
+    timestamp_ = start_timestamp;
+    start_time_ = current_time;
+  }
+
+  void GenerateFrame(FrameType type, int num_media_packets,
+                     int num_empty_packets, int64_t current_time) {
+    timestamp_ += 90 * (current_time - start_time_);
+    // Move the sequence number counter if all packets from the previous frame
+    // wasn't collected.
+    sequence_number_ += packets_.size();
+    packets_.clear();
+    for (int i = 0; i < num_media_packets; ++i) {
+      packets_.push_back(GeneratePacket(sequence_number_,
+                                        timestamp_,
+                                        (i == 0),
+                                        (i == num_media_packets - 1),
+                                        type));
+      ++sequence_number_;
+    }
+    for (int i = 0; i < num_empty_packets; ++i) {
+      packets_.push_back(GeneratePacket(sequence_number_,
+                                        timestamp_,
+                                        false,
+                                        false,
+                                        kFrameEmpty));
+      ++sequence_number_;
+    }
+  }
+
+  static VCMPacket GeneratePacket(uint16_t sequence_number,
+                                  uint32_t timestamp,
+                                  bool first_packet,
+                                  bool marker_bit,
+                                  FrameType type) {
+    VCMPacket packet;
+    packet.seqNum = sequence_number;
+    packet.timestamp = timestamp;
+    packet.frameType = type;
+    packet.isFirstPacket = first_packet;
+    packet.markerBit = marker_bit;
+    if (packet.isFirstPacket)
+      packet.completeNALU = kNaluStart;
+    else if (packet.markerBit)
+      packet.completeNALU = kNaluEnd;
+    else
+      packet.completeNALU = kNaluIncomplete;
+    return packet;
+  }
+
+  bool PopPacket(VCMPacket* packet, int index) {
+    std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+    if (it == packets_.end())
+      return false;
+    if (packet)
+      *packet = (*it);
+    packets_.erase(it);
+    return true;
+  }
+
+  bool GetPacket(VCMPacket* packet, int index) {
+    std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+    if (it == packets_.end())
+      return false;
+    if (packet)
+      *packet = (*it);
+    return true;
+  }
+
+  bool NextPacket(VCMPacket* packet) {
+    if (packets_.empty())
+      return false;
+    if (packet != NULL)
+      *packet = packets_.front();
+    packets_.pop_front();
+    return true;
+  }
+
+  uint16_t NextSequenceNumber() const {
+    if (packets_.empty())
+      return sequence_number_;
+    return packets_.front().seqNum;
+  }
+
+  int PacketsRemaining() const {
+    return packets_.size();
+  }
+
+ private:
+  std::list<VCMPacket>::iterator GetPacketIterator(int index) {
+    std::list<VCMPacket>::iterator it = packets_.begin();
+    for (int i = 0; i < index; ++i) {
+      ++it;
+      if (it == packets_.end()) break;
+    }
+    return it;
+  }
+
+  std::list<VCMPacket> packets_;
+  uint16_t sequence_number_;
+  uint32_t timestamp_;
+  int64_t start_time_;
+
+  DISALLOW_COPY_AND_ASSIGN(StreamGenerator);
+};
+
+class TestRunningJitterBuffer : public ::testing::Test {
+ protected:
+  enum { kDataBufferSize = 10 };
+  enum { kDefaultFrameRate = 25 };
+  enum { kDefaultFramePeriodMs = 1000 / kDefaultFrameRate };
+
+  virtual void SetUp() {
+    clock_ = new FakeTickTime(0);
+    jitter_buffer_ = new VCMJitterBuffer(clock_);
+    stream_generator = new StreamGenerator(0, 0,
+                                           clock_->MillisecondTimestamp());
+    jitter_buffer_->Start();
+    memset(data_buffer_, 0, kDataBufferSize);
+  }
+
+  virtual void TearDown() {
+    jitter_buffer_->Stop();
+    delete stream_generator;
+    delete jitter_buffer_;
+    delete clock_;
+  }
+
+  VCMFrameBufferEnum InsertPacketAndPop(int index) {
+    VCMPacket packet;
+    VCMEncodedFrame* frame;
+
+    packet.dataPtr = data_buffer_;
+    bool packet_available = stream_generator->PopPacket(&packet, index);
+    EXPECT_TRUE(packet_available);
+    if (!packet_available)
+      return kStateError;  // Return here to avoid crashes below.
+    EXPECT_EQ(VCM_OK, jitter_buffer_->GetFrame(packet, frame));
+    return jitter_buffer_->InsertPacket(frame, packet);
+  }
+
+  VCMFrameBufferEnum InsertPacket(int index) {
+    VCMPacket packet;
+    VCMEncodedFrame* frame;
+
+    packet.dataPtr = data_buffer_;
+    bool packet_available = stream_generator->GetPacket(&packet, index);
+    EXPECT_TRUE(packet_available);
+    if (!packet_available)
+      return kStateError;  // Return here to avoid crashes below.
+    EXPECT_EQ(VCM_OK, jitter_buffer_->GetFrame(packet, frame));
+    return jitter_buffer_->InsertPacket(frame, packet);
+  }
+
+  void InsertFrame(FrameType frame_type) {
+    stream_generator->GenerateFrame(frame_type,
+                                    (frame_type != kFrameEmpty) ? 1 : 0,
+                                    (frame_type == kFrameEmpty) ? 1 : 0,
+                                    clock_->MillisecondTimestamp());
+    EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+    clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  }
+
+  void InsertFrames(int num_frames, FrameType frame_type) {
+    for (int i = 0; i < num_frames; ++i) {
+      InsertFrame(frame_type);
+    }
+  }
+
+  void DropFrame(int num_packets) {
+    stream_generator->GenerateFrame(kVideoFrameDelta, num_packets, 0,
+                                    clock_->MillisecondTimestamp());
+    clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  }
+
+  bool DecodeCompleteFrame() {
+    VCMEncodedFrame* frame = jitter_buffer_->GetCompleteFrameForDecoding(0);
+    bool ret = (frame != NULL);
+    jitter_buffer_->ReleaseFrame(frame);
+    return ret;
+  }
+
+  bool DecodeFrame() {
+    VCMEncodedFrame* frame = jitter_buffer_->GetFrameForDecoding();
+    bool ret = (frame != NULL);
+    jitter_buffer_->ReleaseFrame(frame);
+    return ret;
+  }
+
+  VCMJitterBuffer* jitter_buffer_;
+  StreamGenerator* stream_generator;
+  FakeTickTime* clock_;
+  uint8_t data_buffer_[kDataBufferSize];
+};
+
+class TestJitterBufferNack : public TestRunningJitterBuffer {
+ protected:
+  virtual void SetUp() {
+    TestRunningJitterBuffer::SetUp();
+    jitter_buffer_->SetNackMode(kNackInfinite, -1, -1);
+  }
+
+  virtual void TearDown() {
+    TestRunningJitterBuffer::TearDown();
+  }
+};
+
+TEST_F(TestRunningJitterBuffer, TestFull) {
+  // Insert a key frame and decode it.
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+  DropFrame(1);
+  // Fill the jitter buffer.
+  InsertFrames(kMaxNumberOfFrames, kVideoFrameDelta);
+  // Make sure we can't decode these frames.
+  EXPECT_FALSE(DecodeCompleteFrame());
+  // This frame will make the jitter buffer recycle frames until a key frame.
+  // Since none is found it will have to wait until the next key frame before
+  // decoding.
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+}
+
+TEST_F(TestRunningJitterBuffer, TestEmptyPackets) {
+  // Make sure a frame can get complete even though empty packets are missing.
+  stream_generator->GenerateFrame(kVideoFrameKey, 3, 3,
+                                  clock_->MillisecondTimestamp());
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(4));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(4));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+}
+
+TEST_F(TestJitterBufferNack, TestEmptyPackets) {
+  // Make sure empty packets doesn't clog the jitter buffer.
+  jitter_buffer_->SetNackMode(kNackHybrid, kLowRttNackMs, -1);
+  InsertFrames(kMaxNumberOfFrames, kFrameEmpty);
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, TestNackListFull) {
+  // Insert a key frame and decode it.
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+
+  // Generate and drop |kNackHistoryLength| packets to fill the NACK list.
+  DropFrame(kNackHistoryLength);
+  // Insert a frame which should trigger a recycle until the next key frame.
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+
+  uint16_t nack_list_length = kNackHistoryLength;
+  bool extended;
+  uint16_t* nack_list = jitter_buffer_->GetNackList(nack_list_length, extended);
+  // Verify that the jitter buffer requests a key frame.
+  EXPECT_TRUE(nack_list_length == 0xffff && nack_list == NULL);
+
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeFrame());
+}
+
+TEST_F(TestJitterBufferNack, TestNackBeforeDecode) {
+  DropFrame(10);
+  // Insert a frame and try to generate a NACK list. Shouldn't get one.
+  InsertFrame(kVideoFrameDelta);
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // No list generated, and a key frame request is signaled.
+  EXPECT_TRUE(list == NULL);
+  EXPECT_EQ(0xFFFF, nack_list_size);
+}
+
+TEST_F(TestJitterBufferNack, TestNormalOperation) {
+  EXPECT_EQ(kNackInfinite, jitter_buffer_->GetNackMode());
+
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeFrame());
+
+  //  ----------------------------------------------------------------
+  // | 1 | 2 | .. | 8 | 9 | x | 11 | 12 | .. | 19 | x | 21 | .. | 100 |
+  //  ----------------------------------------------------------------
+  stream_generator->GenerateFrame(kVideoFrameKey, 100, 0,
+                                  clock_->MillisecondTimestamp());
+  clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+  // Verify that the frame is incomplete.
+  EXPECT_FALSE(DecodeCompleteFrame());
+  while (stream_generator->PacketsRemaining() > 1) {
+    if (stream_generator->NextSequenceNumber() % 10 != 0)
+      EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+    else
+      stream_generator->NextPacket(NULL);  // Drop packet
+  }
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(0, stream_generator->PacketsRemaining());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeFrame());
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // Verify the NACK list.
+  const int kExpectedNackSize = 9;
+  ASSERT_EQ(kExpectedNackSize, nack_list_size);
+  for (int i = 0; i < nack_list_size; ++i)
+    EXPECT_EQ((1 + i) * 10, list[i]);
+}
+
+TEST_F(TestJitterBufferNack, TestNormalOperationWrap) {
+  //  -------   ------------------------------------------------------------
+  // | 65532 | | 65533 | 65534 | 65535 | x | 1 | .. | 9 | x | 11 |.....| 96 |
+  //  -------   ------------------------------------------------------------
+  stream_generator->Init(65532, 0, clock_->MillisecondTimestamp());
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+  stream_generator->GenerateFrame(kVideoFrameDelta, 100, 0,
+                                  clock_->MillisecondTimestamp());
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+  while (stream_generator->PacketsRemaining() > 1) {
+    if (stream_generator->NextSequenceNumber() % 10 != 0)
+      EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+    else
+      stream_generator->NextPacket(NULL);  // Drop packet
+  }
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(0, stream_generator->PacketsRemaining());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // Verify the NACK list.
+  const int kExpectedNackSize = 10;
+  ASSERT_EQ(kExpectedNackSize, nack_list_size);
+  for (int i = 0; i < nack_list_size; ++i)
+    EXPECT_EQ(i * 10, list[i]);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/jitter_estimator.cc b/trunk/src/modules/video_coding/main/source/jitter_estimator.cc
new file mode 100644
index 0000000..93c6ccb
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_estimator.cc
@@ -0,0 +1,422 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "jitter_estimator.h"
+#include "rtt_filter.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+
+namespace webrtc {
+
+VCMJitterEstimator::VCMJitterEstimator(WebRtc_Word32 vcmId, WebRtc_Word32 receiverId) :
+_vcmId(vcmId),
+_receiverId(receiverId),
+_phi(0.97),
+_psi(0.9999),
+_alphaCountMax(400),
+_beta(0.9994),
+_thetaLow(0.000001),
+_nackLimit(3),
+_numStdDevDelayOutlier(15),
+_numStdDevFrameSizeOutlier(3),
+_noiseStdDevs(2.33), // ~Less than 1% chance
+                     // (look up in normal distribution table)...
+_noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
+_rttFilter(vcmId, receiverId)
+{
+    Reset();
+}
+
+VCMJitterEstimator&
+VCMJitterEstimator::operator=(const VCMJitterEstimator& rhs)
+{
+    if (this != &rhs)
+    {
+        memcpy(_thetaCov, rhs._thetaCov, sizeof(_thetaCov));
+        memcpy(_Qcov, rhs._Qcov, sizeof(_Qcov));
+
+        _vcmId = rhs._vcmId;
+        _receiverId = rhs._receiverId;
+        _avgFrameSize = rhs._avgFrameSize;
+        _varFrameSize = rhs._varFrameSize;
+        _maxFrameSize = rhs._maxFrameSize;
+        _fsSum = rhs._fsSum;
+        _fsCount = rhs._fsCount;
+        _lastUpdateT = rhs._lastUpdateT;
+        _prevEstimate = rhs._prevEstimate;
+        _prevFrameSize = rhs._prevFrameSize;
+        _avgNoise = rhs._avgNoise;
+        _alphaCount = rhs._alphaCount;
+        _filterJitterEstimate = rhs._filterJitterEstimate;
+        _startupCount = rhs._startupCount;
+        _latestNackTimestamp = rhs._latestNackTimestamp;
+        _nackCount = rhs._nackCount;
+        _rttFilter = rhs._rttFilter;
+    }
+    return *this;
+}
+
+// Resets the JitterEstimate
+void
+VCMJitterEstimator::Reset()
+{
+    _theta[0] = 1/(512e3/8);
+    _theta[1] = 0;
+    _varNoise = 4.0;
+
+    _thetaCov[0][0] = 1e-4;
+    _thetaCov[1][1] = 1e2;
+    _thetaCov[0][1] = _thetaCov[1][0] = 0;
+    _Qcov[0][0] = 2.5e-10;
+    _Qcov[1][1] = 1e-10;
+    _Qcov[0][1] = _Qcov[1][0] = 0;
+    _avgFrameSize = 500;
+    _maxFrameSize = 500;
+    _varFrameSize = 100;
+    _lastUpdateT = -1;
+    _prevEstimate = -1.0;
+    _prevFrameSize = 0;
+    _avgNoise = 0.0;
+    _alphaCount = 1;
+    _filterJitterEstimate = 0.0;
+    _latestNackTimestamp = 0;
+    _nackCount = 0;
+    _fsSum = 0;
+    _fsCount = 0;
+    _startupCount = 0;
+    _rttFilter.Reset();
+}
+
+void
+VCMJitterEstimator::ResetNackCount()
+{
+    _nackCount = 0;
+}
+
+// Updates the estimates with the new measurements
+void
+VCMJitterEstimator::UpdateEstimate(WebRtc_Word64 frameDelayMS, WebRtc_UWord32 frameSizeBytes,
+                                            bool incompleteFrame /* = false */)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+               VCMId(_vcmId, _receiverId),
+               "Jitter estimate updated with: frameSize=%d frameDelayMS=%d",
+               frameSizeBytes, frameDelayMS);
+    if (frameSizeBytes == 0)
+    {
+        return;
+    }
+    int deltaFS = frameSizeBytes - _prevFrameSize;
+    if (_fsCount < kFsAccuStartupSamples)
+    {
+        _fsSum += frameSizeBytes;
+        _fsCount++;
+    }
+    else if (_fsCount == kFsAccuStartupSamples)
+    {
+        // Give the frame size filter
+        _avgFrameSize = static_cast<double>(_fsSum) /
+                        static_cast<double>(_fsCount);
+        _fsCount++;
+    }
+    if (!incompleteFrame || frameSizeBytes > _avgFrameSize)
+    {
+        double avgFrameSize = _phi * _avgFrameSize +
+                              (1 - _phi) * frameSizeBytes;
+        if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize))
+        {
+            // Only update the average frame size if this sample wasn't a
+            // key frame
+            _avgFrameSize = avgFrameSize;
+        }
+        // Update the variance anyway since we want to capture cases where we only get
+        // key frames.
+        _varFrameSize = VCM_MAX(_phi * _varFrameSize + (1 - _phi) *
+                                (frameSizeBytes - avgFrameSize) *
+                                (frameSizeBytes - avgFrameSize), 1.0);
+    }
+
+    // Update max frameSize estimate
+    _maxFrameSize = VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
+
+    if (_prevFrameSize == 0)
+    {
+        _prevFrameSize = frameSizeBytes;
+        return;
+    }
+    _prevFrameSize = frameSizeBytes;
+
+    // Only update the Kalman filter if the sample is not considered
+    // an extreme outlier. Even if it is an extreme outlier from a
+    // delay point of view, if the frame size also is large the
+    // deviation is probably due to an incorrect line slope.
+    double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
+
+    if (abs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
+        frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
+    {
+        // Update the variance of the deviation from the
+        // line given by the Kalman filter
+        EstimateRandomJitter(deviation, incompleteFrame);
+        // Prevent updating with frames which have been congested by a large
+        // frame, and therefore arrives almost at the same time as that frame.
+        // This can occur when we receive a large frame (key frame) which
+        // has been delayed. The next frame is of normal size (delta frame),
+        // and thus deltaFS will be << 0. This removes all frame samples
+        // which arrives after a key frame.
+        if ((!incompleteFrame || deviation >= 0.0) &&
+            static_cast<double>(deltaFS) > - 0.25 * _maxFrameSize)
+        {
+            // Update the Kalman filter with the new data
+            KalmanEstimateChannel(frameDelayMS, deltaFS);
+        }
+    }
+    else
+    {
+        int nStdDev = (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
+        EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
+    }
+    // Post process the total estimated jitter
+    if (_startupCount >= kStartupDelaySamples)
+    {
+        PostProcessEstimate();
+    }
+    else
+    {
+        _startupCount++;
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Framesize statistics: max=%f average=%f", _maxFrameSize, _avgFrameSize);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "The estimated slope is: theta=(%f, %f)", _theta[0], _theta[1]);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Random jitter: mean=%f variance=%f", _avgNoise, _varNoise);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Current jitter estimate: %f", _filterJitterEstimate);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Current max RTT: %u", _rttFilter.RttMs());
+}
+
+// Updates the nack/packet ratio
+void
+VCMJitterEstimator::FrameNacked()
+{
+    // Wait until _nackLimit retransmissions has been received,
+    // then always add ~1 RTT delay.
+    // TODO(holmer): Should we ever remove the additional delay if the
+    // the packet losses seem to have stopped? We could for instance scale
+    // the number of RTTs to add with the amount of retransmissions in a given
+    // time interval, or similar.
+    if (_nackCount < _nackLimit)
+    {
+        _nackCount++;
+    }
+}
+
+// Updates Kalman estimate of the channel
+// The caller is expected to sanity check the inputs.
+void
+VCMJitterEstimator::KalmanEstimateChannel(WebRtc_Word64 frameDelayMS,
+                                          WebRtc_Word32 deltaFSBytes)
+{
+    double Mh[2];
+    double hMh_sigma;
+    double kalmanGain[2];
+    double measureRes;
+    double t00, t01;
+
+    // Kalman filtering
+
+    // Prediction
+    // M = M + Q
+    _thetaCov[0][0] += _Qcov[0][0];
+    _thetaCov[0][1] += _Qcov[0][1];
+    _thetaCov[1][0] += _Qcov[1][0];
+    _thetaCov[1][1] += _Qcov[1][1];
+
+    // Kalman gain
+    // K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h')
+    // h = [dFS 1]
+    // Mh = M*h'
+    // hMh_sigma = h*M*h' + R
+    Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1];
+    Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1];
+    // sigma weights measurements with a small deltaFS as noisy and
+    // measurements with large deltaFS as good
+    if (_maxFrameSize < 1.0)
+    {
+        return;
+    }
+    double sigma = (300.0 * exp(-abs(static_cast<double>(deltaFSBytes)) /
+                   (1e0 * _maxFrameSize)) + 1) * sqrt(_varNoise);
+    if (sigma < 1.0)
+    {
+        sigma = 1.0;
+    }
+    hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma;
+    if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) || (hMh_sigma > -1e-9 && hMh_sigma <= 0))
+    {
+        assert(false);
+        return;
+    }
+    kalmanGain[0] = Mh[0] / hMh_sigma;
+    kalmanGain[1] = Mh[1] / hMh_sigma;
+
+    // Correction
+    // theta = theta + K*(dT - h*theta)
+    measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]);
+    _theta[0] += kalmanGain[0] * measureRes;
+    _theta[1] += kalmanGain[1] * measureRes;
+
+    if (_theta[0] < _thetaLow)
+    {
+        _theta[0] = _thetaLow;
+    }
+
+    // M = (I - K*h)*M
+    t00 = _thetaCov[0][0];
+    t01 = _thetaCov[0][1];
+    _thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 -
+                      kalmanGain[0] * _thetaCov[1][0];
+    _thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 -
+                      kalmanGain[0] * _thetaCov[1][1];
+    _thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) -
+                      kalmanGain[1] * deltaFSBytes * t00;
+    _thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) -
+                      kalmanGain[1] * deltaFSBytes * t01;
+
+    // Covariance matrix, must be positive semi-definite
+    assert(_thetaCov[0][0] + _thetaCov[1][1] >= 0 &&
+           _thetaCov[0][0] * _thetaCov[1][1] - _thetaCov[0][1] * _thetaCov[1][0] >= 0 &&
+           _thetaCov[0][0] >= 0);
+}
+
+// Calculate difference in delay between a sample and the
+// expected delay estimated by the Kalman filter
+double
+VCMJitterEstimator::DeviationFromExpectedDelay(WebRtc_Word64 frameDelayMS,
+                                               WebRtc_Word32 deltaFSBytes) const
+{
+    return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]);
+}
+
+// Estimates the random jitter by calculating the variance of the
+// sample distance from the line given by theta.
+void
+VCMJitterEstimator::EstimateRandomJitter(double d_dT, bool incompleteFrame)
+{
+    double alpha;
+    if (_alphaCount == 0)
+    {
+        assert(_alphaCount > 0);
+        return;
+    }
+    alpha = static_cast<double>(_alphaCount - 1) / static_cast<double>(_alphaCount);
+    _alphaCount++;
+    if (_alphaCount > _alphaCountMax)
+    {
+        _alphaCount = _alphaCountMax;
+    }
+    double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT;
+    double varNoise = alpha * _varNoise +
+                      (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise);
+    if (!incompleteFrame || varNoise > _varNoise)
+    {
+        _avgNoise = avgNoise;
+        _varNoise = varNoise;
+    }
+    if (_varNoise < 1.0)
+    {
+        // The variance should never be zero, since we might get
+        // stuck and consider all samples as outliers.
+        _varNoise = 1.0;
+    }
+}
+
+double
+VCMJitterEstimator::NoiseThreshold() const
+{
+    double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset;
+    if (noiseThreshold < 1.0)
+    {
+        noiseThreshold = 1.0;
+    }
+    return noiseThreshold;
+}
+
+// Calculates the current jitter estimate from the filtered estimates
+double
+VCMJitterEstimator::CalculateEstimate()
+{
+    double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold();
+
+    // A very low estimate (or negative) is neglected
+    if (ret < 1.0) {
+        if (_prevEstimate <= 0.01)
+        {
+            ret = 1.0;
+        }
+        else
+        {
+            ret = _prevEstimate;
+        }
+    }
+    if (ret > 10000.0) // Sanity
+    {
+        ret = 10000.0;
+    }
+    _prevEstimate = ret;
+    return ret;
+}
+
+void
+VCMJitterEstimator::PostProcessEstimate()
+{
+    _filterJitterEstimate = CalculateEstimate();
+}
+
+void
+VCMJitterEstimator::UpdateRtt(WebRtc_UWord32 rttMs)
+{
+    _rttFilter.Update(rttMs);
+}
+
+void
+VCMJitterEstimator::UpdateMaxFrameSize(WebRtc_UWord32 frameSizeBytes)
+{
+    if (_maxFrameSize < frameSizeBytes)
+    {
+        _maxFrameSize = frameSizeBytes;
+    }
+}
+
+// Returns the current filtered estimate if available,
+// otherwise tries to calculate an estimate.
+double
+VCMJitterEstimator::GetJitterEstimate(double rttMultiplier)
+{
+    double jitterMS = CalculateEstimate();
+    if (_filterJitterEstimate > jitterMS)
+    {
+        jitterMS = _filterJitterEstimate;
+    }
+    if (_nackCount >= _nackLimit)
+    {
+        return jitterMS + _rttFilter.RttMs() * rttMultiplier;
+    }
+    return jitterMS;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/jitter_estimator.h b/trunk/src/modules/video_coding/main/source/jitter_estimator.h
new file mode 100644
index 0000000..6fc4703
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/jitter_estimator.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+
+#include "typedefs.h"
+#include "rtt_filter.h"
+
+namespace webrtc
+{
+
+class VCMJitterEstimator
+{
+public:
+    VCMJitterEstimator(WebRtc_Word32 vcmId = 0, WebRtc_Word32 receiverId = 0);
+
+    VCMJitterEstimator& operator=(const VCMJitterEstimator& rhs);
+
+    // Resets the estimate to the initial state
+    void Reset();
+    void ResetNackCount();
+
+    // Updates the jitter estimate with the new data.
+    //
+    // Input:
+    //          - frameDelay      : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - frameSize       : Frame size of the current frame.
+    //          - incompleteFrame : Flags if the frame is used to update the estimate before it
+    //                              was complete. Default is false.
+    void UpdateEstimate(WebRtc_Word64 frameDelayMS,
+                        WebRtc_UWord32 frameSizeBytes,
+                        bool incompleteFrame = false);
+
+    // Returns the current jitter estimate in milliseconds and adds
+    // also adds an RTT dependent term in cases of retransmission.
+    //  Input:
+    //          - rttMultiplier  : RTT param multiplier (when applicable).
+    //
+    // Return value                   : Jitter estimate in milliseconds
+    double GetJitterEstimate(double rttMultiplier);
+
+    // Updates the nack counter.
+    void FrameNacked();
+
+    // Updates the RTT filter.
+    //
+    // Input:
+    //          - rttMs               : RTT in ms
+    void UpdateRtt(WebRtc_UWord32 rttMs);
+
+    void UpdateMaxFrameSize(WebRtc_UWord32 frameSizeBytes);
+
+    // A constant describing the delay from the jitter buffer
+    // to the delay on the receiving side which is not accounted
+    // for by the jitter buffer nor the decoding delay estimate.
+    static const WebRtc_UWord32 OPERATING_SYSTEM_JITTER = 10;
+
+protected:
+    // These are protected for better testing possibilities
+    double              _theta[2]; // Estimated line parameters (slope, offset)
+    double              _varNoise; // Variance of the time-deviation from the line
+
+private:
+    // Updates the Kalman filter for the line describing
+    // the frame size dependent jitter.
+    //
+    // Input:
+    //          - frameDelayMS    : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - deltaFSBytes    : Frame size delta, i.e.
+    //                            : frame size at time T minus frame size at time T-1
+    void KalmanEstimateChannel(WebRtc_Word64 frameDelayMS, WebRtc_Word32 deltaFSBytes);
+
+    // Updates the random jitter estimate, i.e. the variance
+    // of the time deviations from the line given by the Kalman filter.
+    //
+    // Input:
+    //          - d_dT              : The deviation from the kalman estimate
+    //          - incompleteFrame   : True if the frame used to update the estimate
+    //                                with was incomplete
+    void EstimateRandomJitter(double d_dT, bool incompleteFrame);
+
+    double NoiseThreshold() const;
+
+    // Calculates the current jitter estimate.
+    //
+    // Return value                 : The current jitter estimate in milliseconds
+    double CalculateEstimate();
+
+    // Post process the calculated estimate
+    void PostProcessEstimate();
+
+    // Calculates the difference in delay between a sample and the
+    // expected delay estimated by the Kalman filter.
+    //
+    // Input:
+    //          - frameDelayMS    : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - deltaFS         : Frame size delta, i.e. frame size at time
+    //                              T minus frame size at time T-1
+    //
+    // Return value                 : The difference in milliseconds
+    double DeviationFromExpectedDelay(WebRtc_Word64 frameDelayMS,
+                                      WebRtc_Word32 deltaFSBytes) const;
+
+    // Constants, filter parameters
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _receiverId;
+    const double          _phi;
+    const double          _psi;
+    const WebRtc_UWord32  _alphaCountMax;
+    const double          _beta;
+    const double          _thetaLow;
+    const WebRtc_UWord32  _nackLimit;
+    const WebRtc_Word32   _numStdDevDelayOutlier;
+    const WebRtc_Word32   _numStdDevFrameSizeOutlier;
+    const double          _noiseStdDevs;
+    const double          _noiseStdDevOffset;
+
+    double                _thetaCov[2][2]; // Estimate covariance
+    double                _Qcov[2][2];     // Process noise covariance
+    double                _avgFrameSize;   // Average frame size
+    double                _varFrameSize;   // Frame size variance
+    double                _maxFrameSize;   // Largest frame size received (descending
+                                           // with a factor _psi)
+    WebRtc_UWord32        _fsSum;
+    WebRtc_UWord32        _fsCount;
+
+    WebRtc_Word64         _lastUpdateT;
+    double                _prevEstimate;         // The previously returned jitter estimate
+    WebRtc_UWord32        _prevFrameSize;        // Frame size of the previous frame
+    double                _avgNoise;             // Average of the random jitter
+    WebRtc_UWord32        _alphaCount;
+    double                _filterJitterEstimate; // The filtered sum of jitter estimates
+
+    WebRtc_UWord32        _startupCount;
+
+    WebRtc_Word64         _latestNackTimestamp;  // Timestamp in ms when the latest nack was seen
+    WebRtc_UWord32        _nackCount;            // Keeps track of the number of nacks received,
+                                                 // but never goes above _nackLimit
+    VCMRttFilter          _rttFilter;
+
+    enum { kStartupDelaySamples = 30 };
+    enum { kFsAccuStartupSamples = 5 };
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
diff --git a/trunk/src/modules/video_coding/main/source/media_opt_util.cc b/trunk/src/modules/video_coding/main/source/media_opt_util.cc
new file mode 100644
index 0000000..b520278
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/media_opt_util.cc
@@ -0,0 +1,893 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/media_opt_util.h"
+
+#include <math.h>
+#include <float.h>
+#include <limits.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/vp8/main/interface/vp8_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "modules/video_coding/main/source/er_tables_xor.h"
+#include "modules/video_coding/main/source/fec_tables_xor.h"
+#include "modules/video_coding/main/source/nack_fec_tables.h"
+
+namespace webrtc {
+
+VCMProtectionMethod::VCMProtectionMethod():
+_effectivePacketLoss(0),
+_protectionFactorK(0),
+_protectionFactorD(0),
+_residualPacketLossFec(0.0f),
+_scaleProtKey(2.0f),
+_maxPayloadSize(1460),
+_qmRobustness(new VCMQmRobustness()),
+_useUepProtectionK(false),
+_useUepProtectionD(true),
+_corrFecCost(1.0),
+_type(kNone),
+_efficiency(0)
+{
+    //
+}
+
+VCMProtectionMethod::~VCMProtectionMethod()
+{
+    delete _qmRobustness;
+}
+void
+VCMProtectionMethod::UpdateContentMetrics(const
+                                          VideoContentMetrics* contentMetrics)
+{
+    _qmRobustness->UpdateContent(contentMetrics);
+}
+
+VCMNackFecMethod::VCMNackFecMethod(int lowRttNackThresholdMs,
+                                   int highRttNackThresholdMs)
+    : VCMFecMethod(),
+      _lowRttNackMs(lowRttNackThresholdMs),
+      _highRttNackMs(highRttNackThresholdMs) {
+  assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
+  assert(highRttNackThresholdMs == -1 ||
+         lowRttNackThresholdMs <= highRttNackThresholdMs);
+  assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
+  _type = kNackFec;
+}
+
+VCMNackFecMethod::~VCMNackFecMethod()
+{
+    //
+}
+bool
+VCMNackFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
+{
+    // Hybrid Nack FEC has three operational modes:
+    // 1. Low RTT (below kLowRttNackMs) - Nack only: Set FEC rate
+    //    (_protectionFactorD) to zero. -1 means no FEC.
+    // 2. High RTT (above _highRttNackMs) - FEC Only: Keep FEC factors.
+    //    -1 means always allow NACK.
+    // 3. Medium RTT values - Hybrid mode: We will only nack the
+    //    residual following the decoding of the FEC (refer to JB logic). FEC
+    //    delta protection factor will be adjusted based on the RTT.
+
+    // Otherwise: we count on FEC; if the RTT is below a threshold, then we
+    // nack the residual, based on a decision made in the JB.
+
+    // Compute the protection factors
+    VCMFecMethod::ProtectionFactor(parameters);
+    if (_lowRttNackMs == -1 || parameters->rtt < _lowRttNackMs)
+    {
+        _protectionFactorD = 0;
+        VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+    }
+
+    // When in Hybrid mode (RTT range), adjust FEC rates based on the
+    // RTT (NACK effectiveness) - adjustment factor is in the range [0,1].
+    else if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs)
+    {
+        // TODO(mikhal): Disabling adjustment temporarily.
+        // WebRtc_UWord16 rttIndex = (WebRtc_UWord16) parameters->rtt;
+        float adjustRtt = 1.0f;// (float)VCMNackFecTable[rttIndex] / 100.0f;
+
+        // Adjust FEC with NACK on (for delta frame only)
+        // table depends on RTT relative to rttMax (NACK Threshold)
+        _protectionFactorD = static_cast<WebRtc_UWord8>
+                            (adjustRtt *
+                             static_cast<float>(_protectionFactorD));
+        // update FEC rates after applying adjustment
+        VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+    }
+
+    return true;
+}
+
+bool
+VCMNackFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
+{
+    // Set the effective packet loss for encoder (based on FEC code).
+    // Compute the effective packet loss and residual packet loss due to FEC.
+    VCMFecMethod::EffectivePacketLoss(parameters);
+    return true;
+}
+
+bool
+VCMNackFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    ProtectionFactor(parameters);
+    EffectivePacketLoss(parameters);
+
+    // Efficiency computation is based on FEC and NACK
+
+    // Add FEC cost: ignore I frames for now
+    float fecRate = static_cast<float> (_protectionFactorD) / 255.0f;
+    _efficiency = parameters->bitRate * fecRate * _corrFecCost;
+
+    // Add NACK cost, when applicable
+    if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs)
+    {
+        // nackCost  = (bitRate - nackCost) * (lossPr)
+        _efficiency += parameters->bitRate * _residualPacketLossFec /
+                       (1.0f + _residualPacketLossFec);
+    }
+
+    // Protection/fec rates obtained above are defined relative to total number
+    // of packets (total rate: source + fec) FEC in RTP module assumes
+    // protection factor is defined relative to source number of packets so we
+    // should convert the factor to reduce mismatch between mediaOpt's rate and
+    // the actual one
+    _protectionFactorK = VCMFecMethod::ConvertFECRate(_protectionFactorK);
+    _protectionFactorD = VCMFecMethod::ConvertFECRate(_protectionFactorD);
+
+    return true;
+}
+
+VCMNackMethod::VCMNackMethod():
+VCMProtectionMethod()
+{
+    _type = kNack;
+}
+
+VCMNackMethod::~VCMNackMethod()
+{
+    //
+}
+
+bool
+VCMNackMethod::EffectivePacketLoss(const VCMProtectionParameters* parameter)
+{
+    // Effective Packet Loss, NA in current version.
+    _effectivePacketLoss = 0;
+    return true;
+}
+
+bool
+VCMNackMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    // Compute the effective packet loss
+    EffectivePacketLoss(parameters);
+
+    // nackCost  = (bitRate - nackCost) * (lossPr)
+    _efficiency = parameters->bitRate * parameters->lossPr /
+                  (1.0f + parameters->lossPr);
+    return true;
+}
+
+VCMFecMethod::VCMFecMethod():
+VCMProtectionMethod()
+{
+    _type = kFec;
+}
+VCMFecMethod::~VCMFecMethod()
+{
+    //
+}
+
+WebRtc_UWord8
+VCMFecMethod::BoostCodeRateKey(WebRtc_UWord8 packetFrameDelta,
+                               WebRtc_UWord8 packetFrameKey) const
+{
+    WebRtc_UWord8 boostRateKey = 2;
+    // Default: ratio scales the FEC protection up for I frames
+    WebRtc_UWord8 ratio = 1;
+
+    if (packetFrameDelta > 0)
+    {
+        ratio = (WebRtc_Word8) (packetFrameKey / packetFrameDelta);
+    }
+    ratio = VCM_MAX(boostRateKey, ratio);
+
+    return ratio;
+}
+
+WebRtc_UWord8
+VCMFecMethod::ConvertFECRate(WebRtc_UWord8 codeRateRTP) const
+{
+    return static_cast<WebRtc_UWord8> (VCM_MIN(255,(0.5 + 255.0 * codeRateRTP /
+                                      (float)(255 - codeRateRTP))));
+}
+
+// Update FEC with protectionFactorD
+void
+VCMFecMethod::UpdateProtectionFactorD(WebRtc_UWord8 protectionFactorD)
+{
+    _protectionFactorD = protectionFactorD;
+}
+
+// Update FEC with protectionFactorK
+void
+VCMFecMethod::UpdateProtectionFactorK(WebRtc_UWord8 protectionFactorK)
+{
+    _protectionFactorK = protectionFactorK;
+}
+
+// AvgRecoveryFEC: computes the residual packet loss (RPL) function.
+// This is the average recovery from the FEC, assuming random packet loss model.
+// Computed off-line for a range of FEC code parameters and loss rates.
+float
+VCMFecMethod::AvgRecoveryFEC(const VCMProtectionParameters* parameters) const
+{
+    // Total (avg) bits available per frame: total rate over actual/sent frame
+    // rate units are kbits/frame
+    const WebRtc_UWord16 bitRatePerFrame = static_cast<WebRtc_UWord16>
+                        (parameters->bitRate / (parameters->frameRate));
+
+    // Total (average) number of packets per frame (source and fec):
+    const WebRtc_UWord8 avgTotPackets = 1 + static_cast<WebRtc_UWord8>
+                        (static_cast<float> (bitRatePerFrame * 1000.0) /
+                         static_cast<float> (8.0 * _maxPayloadSize) + 0.5);
+
+    const float protectionFactor = static_cast<float>(_protectionFactorD) /
+                                                      255.0;
+
+    WebRtc_UWord8 fecPacketsPerFrame = static_cast<WebRtc_UWord8>
+                                      (0.5 + protectionFactor * avgTotPackets);
+
+    WebRtc_UWord8 sourcePacketsPerFrame = avgTotPackets - fecPacketsPerFrame;
+
+    if ( (fecPacketsPerFrame == 0) || (sourcePacketsPerFrame == 0) )
+    {
+        // No protection, or rate too low: so average recovery from FEC == 0.
+        return 0.0;
+    }
+
+    // Table defined up to kMaxNumPackets
+    if (sourcePacketsPerFrame > kMaxNumPackets)
+    {
+        sourcePacketsPerFrame = kMaxNumPackets;
+    }
+
+    // Table defined up to kMaxNumPackets
+    if (fecPacketsPerFrame > kMaxNumPackets)
+    {
+        fecPacketsPerFrame = kMaxNumPackets;
+    }
+
+    // Code index for tables: up to (kMaxNumPackets * kMaxNumPackets)
+    WebRtc_UWord16 codeIndexTable[kMaxNumPackets * kMaxNumPackets];
+    WebRtc_UWord16 k = 0;
+    for (WebRtc_UWord8 i = 1; i <= kMaxNumPackets; i++)
+    {
+        for (WebRtc_UWord8 j = 1; j <= i; j++)
+        {
+            codeIndexTable[(j - 1) * kMaxNumPackets + i - 1] = k;
+            k += 1;
+        }
+    }
+
+    WebRtc_UWord8 lossRate = static_cast<WebRtc_UWord8> (255.0 *
+                             parameters->lossPr + 0.5f);
+
+    // Constrain lossRate to 50%: tables defined up to 50%
+    if (lossRate >= kPacketLossMax)
+    {
+        lossRate = kPacketLossMax - 1;
+    }
+
+    const WebRtc_UWord16 codeIndex = (fecPacketsPerFrame - 1) * kMaxNumPackets +
+                                     (sourcePacketsPerFrame - 1);
+
+    const WebRtc_UWord16 indexTable = codeIndexTable[codeIndex] * kPacketLossMax +
+                                      lossRate;
+
+    // Check on table index
+    assert(indexTable < kSizeAvgFECRecoveryXOR);
+    float avgFecRecov = static_cast<float>(kAvgFECRecoveryXOR[indexTable]);
+
+    return avgFecRecov;
+}
+
+bool
+VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
+{
+    // FEC PROTECTION SETTINGS: varies with packet loss and bitrate
+
+    // No protection if (filtered) packetLoss is 0
+    WebRtc_UWord8 packetLoss = (WebRtc_UWord8) (255 * parameters->lossPr);
+    if (packetLoss == 0)
+    {
+        _protectionFactorK = 0;
+        _protectionFactorD = 0;
+         return true;
+    }
+
+    // Parameters for FEC setting:
+    // first partition size, thresholds, table pars, spatial resoln fac.
+
+    // First partition protection: ~ 20%
+    WebRtc_UWord8 firstPartitionProt = (WebRtc_UWord8) (255 * 0.20);
+
+    // Minimum protection level needed to generate one FEC packet for one
+    // source packet/frame (in RTP sender)
+    WebRtc_UWord8 minProtLevelFec = 85;
+
+    // Threshold on packetLoss and bitRrate/frameRate (=average #packets),
+    // above which we allocate protection to cover at least first partition.
+    WebRtc_UWord8 lossThr = 0;
+    WebRtc_UWord8 packetNumThr = 1;
+
+    // Parameters for range of rate index of table.
+    const WebRtc_UWord8 ratePar1 = 5;
+    const WebRtc_UWord8 ratePar2 = 49;
+
+    // Spatial resolution size, relative to a reference size.
+    float spatialSizeToRef = static_cast<float>
+                           (parameters->codecWidth * parameters->codecHeight) /
+                           (static_cast<float>(704 * 576));
+    // resolnFac: This parameter will generally increase/decrease the FEC rate
+    // (for fixed bitRate and packetLoss) based on system size.
+    // Use a smaller exponent (< 1) to control/soften system size effect.
+    const float resolnFac = 1.0 / powf(spatialSizeToRef, 0.3f);
+
+    const int bitRatePerFrame = BitsPerFrame(parameters);
+
+
+    // Average number of packets per frame (source and fec):
+    const WebRtc_UWord8 avgTotPackets = 1 + (WebRtc_UWord8)
+                                        ((float) bitRatePerFrame * 1000.0
+                                       / (float) (8.0 * _maxPayloadSize) + 0.5);
+
+    // FEC rate parameters: for P and I frame
+    WebRtc_UWord8 codeRateDelta = 0;
+    WebRtc_UWord8 codeRateKey = 0;
+
+    // Get index for table: the FEC protection depends on an effective rate.
+    // The range on the rate index corresponds to rates (bps)
+    // from ~200k to ~8000k, for 30fps
+    const WebRtc_UWord16 effRateFecTable = static_cast<WebRtc_UWord16>
+                                           (resolnFac * bitRatePerFrame);
+    WebRtc_UWord8 rateIndexTable =
+        (WebRtc_UWord8) VCM_MAX(VCM_MIN((effRateFecTable - ratePar1) /
+                                         ratePar1, ratePar2), 0);
+
+    // Restrict packet loss range to 50:
+    // current tables defined only up to 50%
+    if (packetLoss >= kPacketLossMax)
+    {
+        packetLoss = kPacketLossMax - 1;
+    }
+    WebRtc_UWord16 indexTable = rateIndexTable * kPacketLossMax + packetLoss;
+
+    // Check on table index
+    assert(indexTable < kSizeCodeRateXORTable);
+
+    // Protection factor for P frame
+    codeRateDelta = kCodeRateXORTable[indexTable];
+
+    if (packetLoss > lossThr && avgTotPackets > packetNumThr)
+    {
+        // Set a minimum based on first partition size.
+        if (codeRateDelta < firstPartitionProt)
+        {
+            codeRateDelta = firstPartitionProt;
+        }
+    }
+
+    // Check limit on amount of protection for P frame; 50% is max.
+    if (codeRateDelta >= kPacketLossMax)
+    {
+        codeRateDelta = kPacketLossMax - 1;
+    }
+
+    float adjustFec = 1.0f;
+    // Avoid additional adjustments when layers are active.
+    // TODO(mikhal/marco): Update adjusmtent based on layer info.
+    if (parameters->numLayers == 1)
+    {
+        adjustFec = _qmRobustness->AdjustFecFactor(codeRateDelta,
+                                                   parameters->bitRate,
+                                                   parameters->frameRate,
+                                                   parameters->rtt,
+                                                   packetLoss);
+    }
+
+    codeRateDelta = static_cast<WebRtc_UWord8>(codeRateDelta * adjustFec);
+
+    // For Key frame:
+    // Effectively at a higher rate, so we scale/boost the rate
+    // The boost factor may depend on several factors: ratio of packet
+    // number of I to P frames, how much protection placed on P frames, etc.
+    const WebRtc_UWord8 packetFrameDelta = (WebRtc_UWord8)
+                                           (0.5 + parameters->packetsPerFrame);
+    const WebRtc_UWord8 packetFrameKey = (WebRtc_UWord8)
+                                         (0.5 + parameters->packetsPerFrameKey);
+    const WebRtc_UWord8 boostKey = BoostCodeRateKey(packetFrameDelta,
+                                                    packetFrameKey);
+
+    rateIndexTable = (WebRtc_UWord8) VCM_MAX(VCM_MIN(
+                      1 + (boostKey * effRateFecTable - ratePar1) /
+                      ratePar1,ratePar2),0);
+    WebRtc_UWord16 indexTableKey = rateIndexTable * kPacketLossMax + packetLoss;
+
+    indexTableKey = VCM_MIN(indexTableKey, kSizeCodeRateXORTable);
+
+    // Check on table index
+    assert(indexTableKey < kSizeCodeRateXORTable);
+
+    // Protection factor for I frame
+    codeRateKey = kCodeRateXORTable[indexTableKey];
+
+    // Boosting for Key frame.
+    int boostKeyProt = _scaleProtKey * codeRateDelta;
+    if (boostKeyProt >= kPacketLossMax)
+    {
+        boostKeyProt = kPacketLossMax - 1;
+    }
+
+    // Make sure I frame protection is at least larger than P frame protection,
+    // and at least as high as filtered packet loss.
+    codeRateKey = static_cast<WebRtc_UWord8> (VCM_MAX(packetLoss,
+            VCM_MAX(boostKeyProt, codeRateKey)));
+
+    // Check limit on amount of protection for I frame: 50% is max.
+    if (codeRateKey >= kPacketLossMax)
+    {
+        codeRateKey = kPacketLossMax - 1;
+    }
+
+    _protectionFactorK = codeRateKey;
+    _protectionFactorD = codeRateDelta;
+
+    // Generally there is a rate mis-match between the FEC cost estimated
+    // in mediaOpt and the actual FEC cost sent out in RTP module.
+    // This is more significant at low rates (small # of source packets), where
+    // the granularity of the FEC decreases. In this case, non-zero protection
+    // in mediaOpt may generate 0 FEC packets in RTP sender (since actual #FEC
+    // is based on rounding off protectionFactor on actual source packet number).
+    // The correction factor (_corrFecCost) attempts to corrects this, at least
+    // for cases of low rates (small #packets) and low protection levels.
+
+    float numPacketsFl = 1.0f + ((float) bitRatePerFrame * 1000.0
+                                / (float) (8.0 * _maxPayloadSize) + 0.5);
+
+    const float estNumFecGen = 0.5f + static_cast<float> (_protectionFactorD *
+                                                         numPacketsFl / 255.0f);
+
+
+    // We reduce cost factor (which will reduce overhead for FEC and
+    // hybrid method) and not the protectionFactor.
+    _corrFecCost = 1.0f;
+    if (estNumFecGen < 1.1f && _protectionFactorD < minProtLevelFec)
+    {
+        _corrFecCost = 0.5f;
+    }
+    if (estNumFecGen < 0.9f && _protectionFactorD < minProtLevelFec)
+    {
+        _corrFecCost = 0.0f;
+    }
+
+     // TODO (marpan): Set the UEP protection on/off for Key and Delta frames
+    _useUepProtectionK = _qmRobustness->SetUepProtection(codeRateKey,
+                                                         parameters->bitRate,
+                                                         packetLoss,
+                                                         0);
+
+    _useUepProtectionD = _qmRobustness->SetUepProtection(codeRateDelta,
+                                                         parameters->bitRate,
+                                                         packetLoss,
+                                                         1);
+
+    // DONE WITH FEC PROTECTION SETTINGS
+    return true;
+}
+
+int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) {
+  // When temporal layers are available FEC will only be applied on the base
+  // layer.
+  const float bitRateRatio =
+    kVp8LayerRateAlloction[parameters->numLayers - 1][0];
+  float frameRateRatio = powf(1 / 2, parameters->numLayers - 1);
+  float bitRate = parameters->bitRate * bitRateRatio;
+  float frameRate = parameters->frameRate * frameRateRatio;
+
+  // TODO(mikhal): Update factor following testing.
+  float adjustmentFactor = 1;
+
+  // Average bits per frame (units of kbits)
+  return static_cast<int>(adjustmentFactor * bitRate / frameRate);
+}
+
+bool
+VCMFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
+{
+    // Effective packet loss to encoder is based on RPL (residual packet loss)
+    // this is a soft setting based on degree of FEC protection
+    // RPL = received/input packet loss - average_FEC_recovery
+    // note: received/input packet loss may be filtered based on FilteredLoss
+
+    // The packet loss:
+    WebRtc_UWord8 packetLoss = (WebRtc_UWord8) (255 * parameters->lossPr);
+
+    float avgFecRecov = AvgRecoveryFEC(parameters);
+
+    // Residual Packet Loss:
+    _residualPacketLossFec = (float) (packetLoss - avgFecRecov) / 255.0f;
+
+    // Effective Packet Loss, NA in current version.
+    _effectivePacketLoss = 0;
+
+    return true;
+}
+
+bool
+VCMFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    // Compute the protection factor
+    ProtectionFactor(parameters);
+
+    // Compute the effective packet loss
+    EffectivePacketLoss(parameters);
+
+    // Compute the bit cost
+    // Ignore key frames for now.
+    float fecRate = static_cast<float> (_protectionFactorD) / 255.0f;
+    if (fecRate >= 0.0f)
+    {
+        // use this formula if the fecRate (protection factor) is defined
+        // relative to number of source packets
+        // this is the case for the previous tables:
+        // _efficiency = parameters->bitRate * ( 1.0 - 1.0 / (1.0 + fecRate));
+
+        // in the new tables, the fecRate is defined relative to total number of
+        // packets (total rate), so overhead cost is:
+        _efficiency = parameters->bitRate * fecRate * _corrFecCost;
+    }
+    else
+    {
+        _efficiency = 0.0f;
+    }
+
+    // Protection/fec rates obtained above is defined relative to total number
+    // of packets (total rate: source+fec) FEC in RTP module assumes protection
+    // factor is defined relative to source number of packets so we should
+    // convert the factor to reduce mismatch between mediaOpt suggested rate and
+    // the actual rate
+    _protectionFactorK = ConvertFECRate(_protectionFactorK);
+    _protectionFactorD = ConvertFECRate(_protectionFactorD);
+
+    return true;
+}
+VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs):
+_selectedMethod(NULL),
+_currentParameters(),
+_rtt(0),
+_lossPr(0.0f),
+_bitRate(0.0f),
+_frameRate(0.0f),
+_keyFrameSize(0.0f),
+_fecRateKey(0),
+_fecRateDelta(0),
+_lastPrUpdateT(0),
+_lossPr255(0.9999f),
+_lossPrHistory(),
+_shortMaxLossPr255(0),
+_packetsPerFrame(0.9999f),
+_packetsPerFrameKey(0.9999f),
+_residualPacketLossFec(0),
+_boostRateKey(2),
+_codecWidth(0),
+_codecHeight(0),
+_numLayers(1)
+{
+    Reset(nowMs);
+}
+
+VCMLossProtectionLogic::~VCMLossProtectionLogic()
+{
+    Release();
+}
+
+bool
+VCMLossProtectionLogic::SetMethod(enum VCMProtectionMethodEnum newMethodType)
+{
+    if (_selectedMethod != NULL)
+    {
+        if (_selectedMethod->Type() == newMethodType)
+        {
+            // Nothing to update
+            return false;
+        }
+        // New method - delete existing one
+        delete _selectedMethod;
+    }
+    VCMProtectionMethod *newMethod = NULL;
+    switch (newMethodType)
+    {
+        case kNack:
+        {
+            newMethod = new VCMNackMethod();
+            break;
+        }
+        case kFec:
+        {
+            newMethod  = new VCMFecMethod();
+            break;
+        }
+        case kNackFec:
+        {
+            // Default to always having NACK enabled for the hybrid mode.
+            newMethod =  new VCMNackFecMethod(kLowRttNackMs, -1);
+            break;
+        }
+        default:
+        {
+          return false;
+          break;
+        }
+
+    }
+    _selectedMethod = newMethod;
+    return true;
+}
+bool
+VCMLossProtectionLogic::RemoveMethod(enum VCMProtectionMethodEnum method)
+{
+    if (_selectedMethod == NULL)
+    {
+        return false;
+    }
+    else if (_selectedMethod->Type() == method)
+    {
+        delete _selectedMethod;
+        _selectedMethod = NULL;
+    }
+    return true;
+}
+
+float
+VCMLossProtectionLogic::RequiredBitRate() const
+{
+    float RequiredBitRate = 0.0f;
+    if (_selectedMethod != NULL)
+    {
+        RequiredBitRate = _selectedMethod->RequiredBitRate();
+    }
+    return RequiredBitRate;
+}
+
+void
+VCMLossProtectionLogic::UpdateRtt(WebRtc_UWord32 rtt)
+{
+    _rtt = rtt;
+}
+
+void
+VCMLossProtectionLogic::UpdateResidualPacketLoss(float residualPacketLoss)
+{
+    _residualPacketLossFec = residualPacketLoss;
+}
+
+void
+VCMLossProtectionLogic::UpdateMaxLossHistory(WebRtc_UWord8 lossPr255,
+                                             WebRtc_Word64 now)
+{
+    if (_lossPrHistory[0].timeMs >= 0 &&
+        now - _lossPrHistory[0].timeMs < kLossPrShortFilterWinMs)
+    {
+        if (lossPr255 > _shortMaxLossPr255)
+        {
+            _shortMaxLossPr255 = lossPr255;
+        }
+    }
+    else
+    {
+        // Only add a new value to the history once a second
+        if (_lossPrHistory[0].timeMs == -1)
+        {
+            // First, no shift
+            _shortMaxLossPr255 = lossPr255;
+        }
+        else
+        {
+            // Shift
+            for (WebRtc_Word32 i = (kLossPrHistorySize - 2); i >= 0; i--)
+            {
+                _lossPrHistory[i + 1].lossPr255 = _lossPrHistory[i].lossPr255;
+                _lossPrHistory[i + 1].timeMs = _lossPrHistory[i].timeMs;
+            }
+        }
+        if (_shortMaxLossPr255 == 0)
+        {
+            _shortMaxLossPr255 = lossPr255;
+        }
+
+        _lossPrHistory[0].lossPr255 = _shortMaxLossPr255;
+        _lossPrHistory[0].timeMs = now;
+        _shortMaxLossPr255 = 0;
+    }
+}
+
+WebRtc_UWord8
+VCMLossProtectionLogic::MaxFilteredLossPr(WebRtc_Word64 nowMs) const
+{
+    WebRtc_UWord8 maxFound = _shortMaxLossPr255;
+    if (_lossPrHistory[0].timeMs == -1)
+    {
+        return maxFound;
+    }
+    for (WebRtc_Word32 i = 0; i < kLossPrHistorySize; i++)
+    {
+        if (_lossPrHistory[i].timeMs == -1)
+        {
+            break;
+        }
+        if (nowMs - _lossPrHistory[i].timeMs >
+            kLossPrHistorySize * kLossPrShortFilterWinMs)
+        {
+            // This sample (and all samples after this) is too old
+            break;
+        }
+        if (_lossPrHistory[i].lossPr255 > maxFound)
+        {
+            // This sample is the largest one this far into the history
+            maxFound = _lossPrHistory[i].lossPr255;
+        }
+    }
+    return maxFound;
+}
+
+WebRtc_UWord8 VCMLossProtectionLogic::FilteredLoss(
+    int64_t nowMs,
+    FilterPacketLossMode filter_mode,
+    WebRtc_UWord8 lossPr255) {
+
+  // Update the max window filter.
+  UpdateMaxLossHistory(lossPr255, nowMs);
+
+  // Update the recursive average filter.
+  _lossPr255.Apply(static_cast<float> (nowMs - _lastPrUpdateT),
+                   static_cast<float> (lossPr255));
+  _lastPrUpdateT = nowMs;
+
+  // Filtered loss: default is received loss (no filtering).
+  WebRtc_UWord8 filtered_loss = lossPr255;
+
+  switch (filter_mode) {
+    case kNoFilter:
+      break;
+    case kAvgFilter:
+      filtered_loss = static_cast<WebRtc_UWord8> (_lossPr255.Value() + 0.5);
+      break;
+    case kMaxFilter:
+      filtered_loss = MaxFilteredLossPr(nowMs);
+      break;
+  }
+
+  return filtered_loss;
+}
+
+void
+VCMLossProtectionLogic::UpdateFilteredLossPr(WebRtc_UWord8 packetLossEnc)
+{
+    _lossPr = (float) packetLossEnc / (float) 255.0;
+}
+
+void
+VCMLossProtectionLogic::UpdateBitRate(float bitRate)
+{
+    _bitRate = bitRate;
+}
+
+void
+VCMLossProtectionLogic::UpdatePacketsPerFrame(float nPackets, int64_t nowMs)
+{
+    _packetsPerFrame.Apply(static_cast<float>(nowMs - _lastPacketPerFrameUpdateT),
+                           nPackets);
+    _lastPacketPerFrameUpdateT = nowMs;
+}
+
+void
+VCMLossProtectionLogic::UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs)
+{
+    _packetsPerFrameKey.Apply(static_cast<float>(nowMs -
+                              _lastPacketPerFrameUpdateTKey), nPackets);
+    _lastPacketPerFrameUpdateTKey = nowMs;
+}
+
+void
+VCMLossProtectionLogic::UpdateKeyFrameSize(float keyFrameSize)
+{
+    _keyFrameSize = keyFrameSize;
+}
+
+void
+VCMLossProtectionLogic::UpdateFrameSize(WebRtc_UWord16 width,
+                                        WebRtc_UWord16 height)
+{
+    _codecWidth = width;
+    _codecHeight = height;
+}
+
+void VCMLossProtectionLogic::UpdateNumLayers(int numLayers) {
+  _numLayers = (numLayers == 0) ? 1 : numLayers;
+}
+
+bool
+VCMLossProtectionLogic::UpdateMethod()
+{
+    if (_selectedMethod == NULL)
+    {
+        return false;
+    }
+    _currentParameters.rtt = _rtt;
+    _currentParameters.lossPr = _lossPr;
+    _currentParameters.bitRate = _bitRate;
+    _currentParameters.frameRate = _frameRate; // rename actual frame rate?
+    _currentParameters.keyFrameSize = _keyFrameSize;
+    _currentParameters.fecRateDelta = _fecRateDelta;
+    _currentParameters.fecRateKey = _fecRateKey;
+    _currentParameters.packetsPerFrame = _packetsPerFrame.Value();
+    _currentParameters.packetsPerFrameKey = _packetsPerFrameKey.Value();
+    _currentParameters.residualPacketLossFec = _residualPacketLossFec;
+    _currentParameters.codecWidth = _codecWidth;
+    _currentParameters.codecHeight = _codecHeight;
+    _currentParameters.numLayers = _numLayers;
+    return _selectedMethod->UpdateParameters(&_currentParameters);
+}
+
+VCMProtectionMethod*
+VCMLossProtectionLogic::SelectedMethod() const
+{
+    return _selectedMethod;
+}
+
+VCMProtectionMethodEnum
+VCMLossProtectionLogic::SelectedType() const
+{
+    return _selectedMethod->Type();
+}
+
+void
+VCMLossProtectionLogic::Reset(int64_t nowMs)
+{
+    _lastPrUpdateT = nowMs;
+    _lastPacketPerFrameUpdateT = nowMs;
+    _lastPacketPerFrameUpdateTKey = nowMs;
+    _lossPr255.Reset(0.9999f);
+    _packetsPerFrame.Reset(0.9999f);
+    _fecRateDelta = _fecRateKey = 0;
+    for (WebRtc_Word32 i = 0; i < kLossPrHistorySize; i++)
+    {
+        _lossPrHistory[i].lossPr255 = 0;
+        _lossPrHistory[i].timeMs = -1;
+    }
+    _shortMaxLossPr255 = 0;
+    Release();
+}
+
+void
+VCMLossProtectionLogic::Release()
+{
+    delete _selectedMethod;
+    _selectedMethod = NULL;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/media_opt_util.h b/trunk/src/modules/video_coding/main/source/media_opt_util.h
new file mode 100644
index 0000000..1f808de
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/media_opt_util.h
@@ -0,0 +1,375 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+
+#include "typedefs.h"
+#include "trace.h"
+#include "exp_filter.h"
+#include "internal_defines.h"
+#include "qm_select.h"
+
+#include <cmath>
+#include <cstdlib>
+
+
+namespace webrtc
+{
+// Number of time periods used for (max) window filter for packet loss
+// TODO (marpan): set reasonable window size for filtered packet loss,
+// adjustment should be based on logged/real data of loss stats/correlation.
+enum { kLossPrHistorySize = 10 };
+
+// 1000 ms, total filter length is (kLossPrHistorySize * 1000) ms
+enum { kLossPrShortFilterWinMs = 1000 };
+
+// The type of filter used on the received packet loss reports.
+enum FilterPacketLossMode {
+  kNoFilter,    // No filtering on received loss.
+  kAvgFilter,   // Recursive average filter.
+  kMaxFilter    // Max-window filter, over the time interval of:
+                // (kLossPrHistorySize * kLossPrShortFilterWinMs) ms.
+};
+
+// Thresholds for hybrid NACK/FEC
+// common to media optimization and the jitter buffer.
+enum HybridNackTH {
+    kHighRttNackMs = 100,
+    kLowRttNackMs = 20
+};
+
+struct VCMProtectionParameters
+{
+    VCMProtectionParameters() : rtt(0), lossPr(0.0f), bitRate(0.0f),
+        packetsPerFrame(0.0f), packetsPerFrameKey(0.0f), frameRate(0.0f),
+        keyFrameSize(0.0f), fecRateDelta(0), fecRateKey(0),
+        residualPacketLossFec(0.0f), codecWidth(0), codecHeight(0),
+        numLayers(1)
+        {}
+
+    int                 rtt;
+    float               lossPr;
+    float               bitRate;
+    float               packetsPerFrame;
+    float               packetsPerFrameKey;
+    float               frameRate;
+    float               keyFrameSize;
+    WebRtc_UWord8       fecRateDelta;
+    WebRtc_UWord8       fecRateKey;
+    float               residualPacketLossFec;
+    WebRtc_UWord16      codecWidth;
+    WebRtc_UWord16      codecHeight;
+    int                 numLayers;
+};
+
+
+/******************************/
+/* VCMProtectionMethod class    */
+/****************************/
+
+enum VCMProtectionMethodEnum
+{
+    kNack,
+    kFec,
+    kNackFec,
+    kNone
+};
+
+class VCMLossProbabilitySample
+{
+public:
+    VCMLossProbabilitySample() : lossPr255(0), timeMs(-1) {};
+
+    WebRtc_UWord8     lossPr255;
+    WebRtc_Word64     timeMs;
+};
+
+
+class VCMProtectionMethod
+{
+public:
+    VCMProtectionMethod();
+    virtual ~VCMProtectionMethod();
+
+    // Updates the efficiency of the method using the parameters provided
+    //
+    // Input:
+    //         - parameters         : Parameters used to calculate efficiency
+    //
+    // Return value                 : True if this method is recommended in
+    //                                the given conditions.
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters) = 0;
+
+    // Returns the protection type
+    //
+    // Return value                 : The protection type
+    enum VCMProtectionMethodEnum Type() const { return _type; }
+
+    // Returns the bit rate required by this protection method
+    // during these conditions.
+    //
+    // Return value                 : Required bit rate
+    virtual float RequiredBitRate() { return _efficiency; }
+
+    // Returns the effective packet loss for ER, required by this protection method
+    //
+    // Return value                 : Required effective packet loss
+    virtual WebRtc_UWord8 RequiredPacketLossER() { return _effectivePacketLoss; }
+
+    // Extracts the FEC protection factor for Key frame, required by this protection method
+    //
+    // Return value                 : Required protectionFactor for Key frame
+    virtual WebRtc_UWord8 RequiredProtectionFactorK() { return _protectionFactorK; }
+
+    // Extracts the FEC protection factor for Delta frame, required by this protection method
+    //
+    // Return value                 : Required protectionFactor for delta frame
+    virtual WebRtc_UWord8 RequiredProtectionFactorD() { return _protectionFactorD; }
+
+    // Extracts whether the FEC Unequal protection (UEP) is used for Key frame.
+    //
+    // Return value                 : Required Unequal protection on/off state.
+    virtual bool RequiredUepProtectionK() { return _useUepProtectionK; }
+
+    // Extracts whether the the FEC Unequal protection (UEP) is used for Delta frame.
+    //
+    // Return value                 : Required Unequal protection on/off state.
+    virtual bool RequiredUepProtectionD() { return _useUepProtectionD; }
+
+    // Updates content metrics
+    void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
+
+protected:
+
+    WebRtc_UWord8                        _effectivePacketLoss;
+    WebRtc_UWord8                        _protectionFactorK;
+    WebRtc_UWord8                        _protectionFactorD;
+    // Estimation of residual loss after the FEC
+    float                                _residualPacketLossFec;
+    float                                _scaleProtKey;
+    WebRtc_Word32                        _maxPayloadSize;
+
+    VCMQmRobustness*                     _qmRobustness;
+    bool                                 _useUepProtectionK;
+    bool                                 _useUepProtectionD;
+    float                                _corrFecCost;
+    enum VCMProtectionMethodEnum         _type;
+    float                                _efficiency;
+};
+
+class VCMNackMethod : public VCMProtectionMethod
+{
+public:
+    VCMNackMethod();
+    virtual ~VCMNackMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameter);
+};
+
+class VCMFecMethod : public VCMProtectionMethod
+{
+public:
+    VCMFecMethod();
+    virtual ~VCMFecMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss for ER
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+    // Get the FEC protection factors
+    bool ProtectionFactor(const VCMProtectionParameters* parameters);
+    // Get the boost for key frame protection
+    WebRtc_UWord8 BoostCodeRateKey(WebRtc_UWord8 packetFrameDelta,
+                                   WebRtc_UWord8 packetFrameKey) const;
+    // Convert the rates: defined relative to total# packets or source# packets
+    WebRtc_UWord8 ConvertFECRate(WebRtc_UWord8 codeRate) const;
+    // Get the average effective recovery from FEC: for random loss model
+    float AvgRecoveryFEC(const VCMProtectionParameters* parameters) const;
+    // Update FEC with protectionFactorD
+    void UpdateProtectionFactorD(WebRtc_UWord8 protectionFactorD);
+    // Update FEC with protectionFactorK
+    void UpdateProtectionFactorK(WebRtc_UWord8 protectionFactorK);
+    // Compute the bits per frame. Account for temporal layers when applicable.
+    int BitsPerFrame(const VCMProtectionParameters* parameters);
+};
+
+
+class VCMNackFecMethod : public VCMFecMethod
+{
+public:
+    VCMNackFecMethod(int lowRttNackThresholdMs,
+                     int highRttNackThresholdMs);
+    virtual ~VCMNackFecMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss for ER
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+    // Get the protection factors
+    bool ProtectionFactor(const VCMProtectionParameters* parameters);
+
+private:
+    int _lowRttNackMs;
+    int _highRttNackMs;
+};
+
+class VCMLossProtectionLogic
+{
+public:
+    VCMLossProtectionLogic(int64_t nowMs);
+    ~VCMLossProtectionLogic();
+
+    // Set the protection method to be used
+    //
+    // Input:
+    //        - newMethodType    : New requested protection method type. If one
+    //                           is already set, it will be deleted and replaced
+    // Return value:             Returns true on update
+    bool SetMethod(enum VCMProtectionMethodEnum newMethodType);
+
+    // Remove requested protection method
+    // Input:
+    //        - method          : method to be removed (if currently selected)
+    //
+    // Return value:             Returns true on update
+    bool RemoveMethod(enum VCMProtectionMethodEnum method);
+
+    // Return required bit rate per selected protectin method
+    float RequiredBitRate() const;
+
+    // Update the round-trip time
+    //
+    // Input:
+    //          - rtt           : Round-trip time in seconds.
+    void UpdateRtt(WebRtc_UWord32 rtt);
+
+    // Update residual packet loss
+    //
+    // Input:
+    //          - residualPacketLoss  : residual packet loss:
+    //                                  effective loss after FEC recovery
+    void UpdateResidualPacketLoss(float _residualPacketLoss);
+
+    // Update the filtered packet loss.
+    //
+    // Input:
+    //          - packetLossEnc :  The reported packet loss filtered
+    //                             (max window or average)
+    void UpdateFilteredLossPr(WebRtc_UWord8 packetLossEnc);
+
+    // Update the current target bit rate.
+    //
+    // Input:
+    //          - bitRate          : The current target bit rate in kbits/s
+    void UpdateBitRate(float bitRate);
+
+    // Update the number of packets per frame estimate, for delta frames
+    //
+    // Input:
+    //          - nPackets         : Number of packets in the latest sent frame.
+    void UpdatePacketsPerFrame(float nPackets, int64_t nowMs);
+
+   // Update the number of packets per frame estimate, for key frames
+    //
+    // Input:
+    //          - nPackets         : umber of packets in the latest sent frame.
+    void UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs);
+
+    // Update the keyFrameSize estimate
+    //
+    // Input:
+    //          - keyFrameSize     : The size of the latest sent key frame.
+    void UpdateKeyFrameSize(float keyFrameSize);
+
+    // Update the frame rate
+    //
+    // Input:
+    //          - frameRate        : The current target frame rate.
+    void UpdateFrameRate(float frameRate) { _frameRate = frameRate; }
+
+    // Update the frame size
+    //
+    // Input:
+    //          - width        : The codec frame width.
+    //          - height       : The codec frame height.
+    void UpdateFrameSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
+
+    // Update the number of active layers
+    //
+    // Input:
+    //          - numLayers    : Number of layers used.
+    void UpdateNumLayers(int numLayers);
+
+    // The amount of packet loss to cover for with FEC.
+    //
+    // Input:
+    //          - fecRateKey      : Packet loss to cover for with FEC when
+    //                              sending key frames.
+    //          - fecRateDelta    : Packet loss to cover for with FEC when
+    //                              sending delta frames.
+    void UpdateFECRates(WebRtc_UWord8 fecRateKey, WebRtc_UWord8 fecRateDelta)
+                       { _fecRateKey = fecRateKey;
+                         _fecRateDelta = fecRateDelta; }
+
+    // Update the protection methods with the current VCMProtectionParameters
+    // and set the requested protection settings.
+    // Return value     : Returns true on update
+    bool UpdateMethod();
+
+    // Returns the method currently selected.
+    //
+    // Return value                 : The protection method currently selected.
+    VCMProtectionMethod* SelectedMethod() const;
+
+    // Return the protection type of the currently selected method
+    VCMProtectionMethodEnum SelectedType() const;
+
+    // Updates the filtered loss for the average and max window packet loss,
+    // and returns the filtered loss probability in the interval [0, 255].
+    // The returned filtered loss value depends on the parameter |filter_mode|.
+    // The input parameter |lossPr255| is the received packet loss.
+
+    // Return value                 : The filtered loss probability
+    WebRtc_UWord8 FilteredLoss(int64_t nowMs, FilterPacketLossMode filter_mode,
+                               WebRtc_UWord8 lossPr255);
+
+    void Reset(int64_t nowMs);
+
+    void Release();
+
+private:
+    // Sets the available loss protection methods.
+    void UpdateMaxLossHistory(WebRtc_UWord8 lossPr255, WebRtc_Word64 now);
+    WebRtc_UWord8 MaxFilteredLossPr(WebRtc_Word64 nowMs) const;
+    VCMProtectionMethod*      _selectedMethod;
+    VCMProtectionParameters   _currentParameters;
+    WebRtc_UWord32            _rtt;
+    float                     _lossPr;
+    float                     _bitRate;
+    float                     _frameRate;
+    float                     _keyFrameSize;
+    WebRtc_UWord8             _fecRateKey;
+    WebRtc_UWord8             _fecRateDelta;
+    WebRtc_Word64             _lastPrUpdateT;
+    WebRtc_Word64             _lastPacketPerFrameUpdateT;
+    WebRtc_Word64             _lastPacketPerFrameUpdateTKey;
+    VCMExpFilter              _lossPr255;
+    VCMLossProbabilitySample  _lossPrHistory[kLossPrHistorySize];
+    WebRtc_UWord8             _shortMaxLossPr255;
+    VCMExpFilter              _packetsPerFrame;
+    VCMExpFilter              _packetsPerFrameKey;
+    float                     _residualPacketLossFec;
+    WebRtc_UWord8             _boostRateKey;
+    WebRtc_UWord16            _codecWidth;
+    WebRtc_UWord16            _codecHeight;
+    int                       _numLayers;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
diff --git a/trunk/src/modules/video_coding/main/source/media_optimization.cc b/trunk/src/modules/video_coding/main/source/media_optimization.cc
new file mode 100644
index 0000000..cae4912
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/media_optimization.cc
@@ -0,0 +1,697 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media_optimization.h"
+#include "content_metrics_processing.h"
+#include "frame_dropper.h"
+#include "qm_select.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc {
+
+VCMMediaOptimization::VCMMediaOptimization(WebRtc_Word32 id,
+                                           TickTimeBase* clock):
+_id(id),
+_clock(clock),
+_maxBitRate(0),
+_sendCodecType(kVideoCodecUnknown),
+_codecWidth(0),
+_codecHeight(0),
+_initCodecWidth(0),
+_initCodecHeight(0),
+_userFrameRate(0),
+_packetLossEnc(0),
+_fractionLost(0),
+_sendStatisticsZeroEncode(0),
+_maxPayloadSize(1460),
+_targetBitRate(0),
+_incomingFrameRate(0),
+_enableQm(false),
+_videoProtectionCallback(NULL),
+_videoQMSettingsCallback(NULL),
+_encodedFrameSamples(),
+_avgSentBitRateBps(0.0f),
+_keyFrameCnt(0),
+_deltaFrameCnt(0),
+_lastQMUpdateTime(0),
+_lastChangeTime(0),
+_numLayers(0)
+{
+    memset(_sendStatistics, 0, sizeof(_sendStatistics));
+    memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+
+    _frameDropper  = new VCMFrameDropper(_id);
+    _lossProtLogic = new VCMLossProtectionLogic(_clock->MillisecondTimestamp());
+    _content = new VCMContentMetricsProcessing();
+    _qmResolution = new VCMQmResolution();
+}
+
+VCMMediaOptimization::~VCMMediaOptimization(void)
+{
+    _lossProtLogic->Release();
+    delete _lossProtLogic;
+    delete _frameDropper;
+    delete _content;
+    delete _qmResolution;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::Reset()
+{
+    memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+    _incomingFrameRate = 0.0;
+    _frameDropper->Reset();
+    _lossProtLogic->Reset(_clock->MillisecondTimestamp());
+    _frameDropper->SetRates(0, 0);
+    _content->Reset();
+    _qmResolution->Reset();
+    _lossProtLogic->UpdateFrameRate(_incomingFrameRate);
+    _lossProtLogic->Reset(_clock->MillisecondTimestamp());
+    _sendStatisticsZeroEncode = 0;
+    _targetBitRate = 0;
+    _codecWidth = 0;
+    _codecHeight = 0;
+    _userFrameRate = 0;
+    _keyFrameCnt = 0;
+    _deltaFrameCnt = 0;
+    _lastQMUpdateTime = 0;
+    _lastChangeTime = 0;
+    for (WebRtc_Word32 i = 0; i < kBitrateMaxFrameSamples; i++)
+    {
+        _encodedFrameSamples[i]._sizeBytes = -1;
+        _encodedFrameSamples[i]._timeCompleteMs = -1;
+    }
+    _avgSentBitRateBps = 0.0f;
+    _numLayers = 1;
+    return VCM_OK;
+}
+
+WebRtc_UWord32
+VCMMediaOptimization::SetTargetRates(WebRtc_UWord32 bitRate,
+                                     WebRtc_UWord8 &fractionLost,
+                                     WebRtc_UWord32 roundTripTimeMs)
+{
+    VCMProtectionMethod *selectedMethod = _lossProtLogic->SelectedMethod();
+    _lossProtLogic->UpdateBitRate(static_cast<float>(bitRate));
+    _lossProtLogic->UpdateRtt(roundTripTimeMs);
+    _lossProtLogic->UpdateResidualPacketLoss(static_cast<float>(fractionLost));
+
+    // Get frame rate for encoder: this is the actual/sent frame rate
+    float actualFrameRate = SentFrameRate();
+
+    // sanity
+    if (actualFrameRate  < 1.0)
+    {
+        actualFrameRate = 1.0;
+    }
+
+    // Update frame rate for the loss protection logic class: frame rate should
+    // be the actual/sent rate
+    _lossProtLogic->UpdateFrameRate(actualFrameRate);
+
+    _fractionLost = fractionLost;
+
+    // Returns the filtered packet loss, used for the protection setting.
+    // The filtered loss may be the received loss (no filter), or some
+    // filtered value (average or max window filter).
+    // Use max window filter for now.
+    FilterPacketLossMode filter_mode = kMaxFilter;
+    WebRtc_UWord8 packetLossEnc = _lossProtLogic->FilteredLoss(
+        _clock->MillisecondTimestamp(), filter_mode, fractionLost);
+
+    // For now use the filtered loss for computing the robustness settings
+    _lossProtLogic->UpdateFilteredLossPr(packetLossEnc);
+
+    // Rate cost of the protection methods
+    uint32_t protection_overhead_kbps = 0;
+
+    // Update protection settings, when applicable
+    float sent_video_rate = 0.0f;
+    if (selectedMethod)
+    {
+        // Update protection method with content metrics
+        selectedMethod->UpdateContentMetrics(_content->ShortTermAvgData());
+
+        // Update method will compute the robustness settings for the given
+        // protection method and the overhead cost
+        // the protection method is set by the user via SetVideoProtection.
+        _lossProtLogic->UpdateMethod();
+
+        // Update protection callback with protection settings.
+        uint32_t sent_video_rate_bps = 0;
+        uint32_t sent_nack_rate_bps = 0;
+        uint32_t sent_fec_rate_bps = 0;
+        // Get the bit cost of protection method, based on the amount of
+        // overhead data actually transmitted (including headers) the last
+        // second.
+        UpdateProtectionCallback(selectedMethod,
+                                 &sent_video_rate_bps,
+                                 &sent_nack_rate_bps,
+                                 &sent_fec_rate_bps);
+        uint32_t sent_total_rate_bps = sent_video_rate_bps +
+            sent_nack_rate_bps + sent_fec_rate_bps;
+        // Estimate the overhead costs of the next second as staying the same
+        // wrt the source bitrate.
+        if (sent_total_rate_bps > 0) {
+          protection_overhead_kbps = static_cast<uint32_t>(bitRate *
+              static_cast<double>(sent_nack_rate_bps + sent_fec_rate_bps) /
+              sent_total_rate_bps + 0.5);
+        }
+        // Cap the overhead estimate to 50%.
+        if (protection_overhead_kbps > bitRate / 2)
+          protection_overhead_kbps = bitRate / 2;
+
+        // Get the effective packet loss for encoder ER
+        // when applicable, should be passed to encoder via fractionLost
+        packetLossEnc = selectedMethod->RequiredPacketLossER();
+        sent_video_rate =  static_cast<float>(sent_video_rate_bps / 1000.0);
+    }
+
+    // Source coding rate: total rate - protection overhead
+    _targetBitRate = bitRate - protection_overhead_kbps;
+
+    // Update encoding rates following protection settings
+    _frameDropper->SetRates(static_cast<float>(_targetBitRate), 0);
+
+    if (_enableQm && _numLayers == 1)
+    {
+        // Update QM with rates
+        _qmResolution->UpdateRates((float)_targetBitRate, sent_video_rate,
+                                  _incomingFrameRate, _fractionLost);
+        // Check for QM selection
+        bool selectQM = checkStatusForQMchange();
+        if (selectQM)
+        {
+            SelectQuality();
+        }
+        // Reset the short-term averaged content data.
+        _content->ResetShortTermAvgData();
+    }
+
+    return _targetBitRate;
+}
+
+int VCMMediaOptimization::UpdateProtectionCallback(
+    VCMProtectionMethod *selected_method,
+    uint32_t* video_rate_bps,
+    uint32_t* nack_overhead_rate_bps,
+    uint32_t* fec_overhead_rate_bps)
+{
+    if (!_videoProtectionCallback)
+    {
+        return VCM_OK;
+    }
+    // Get the FEC code rate for Key frames (set to 0 when NA)
+    const WebRtc_UWord8
+    codeRateKeyRTP  = selected_method->RequiredProtectionFactorK();
+
+    // Get the FEC code rate for Delta frames (set to 0 when NA)
+    const WebRtc_UWord8
+    codeRateDeltaRTP = selected_method->RequiredProtectionFactorD();
+
+    // Get the FEC-UEP protection status for Key frames: UEP on/off
+    const bool
+    useUepProtectionKeyRTP  = selected_method->RequiredUepProtectionK();
+
+    // Get the FEC-UEP protection status for Delta frames: UEP on/off
+    const bool
+    useUepProtectionDeltaRTP = selected_method->RequiredUepProtectionD();
+
+    // NACK is on for NACK and NackFec protection method: off for FEC method
+    bool nackStatus = (selected_method->Type() == kNackFec ||
+                       selected_method->Type() == kNack);
+
+    // TODO(Marco): Pass FEC protection values per layer.
+
+    return _videoProtectionCallback->ProtectionRequest(codeRateDeltaRTP,
+                                                       codeRateKeyRTP,
+                                                       useUepProtectionDeltaRTP,
+                                                       useUepProtectionKeyRTP,
+                                                       nackStatus,
+                                                       video_rate_bps,
+                                                       nack_overhead_rate_bps,
+                                                       fec_overhead_rate_bps);
+}
+
+bool
+VCMMediaOptimization::DropFrame()
+{
+    // leak appropriate number of bytes
+    _frameDropper->Leak((WebRtc_UWord32)(InputFrameRate() + 0.5f));
+    return _frameDropper->DropFrame();
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SentFrameCount(VCMFrameCount &frameCount) const
+{
+    frameCount.numDeltaFrames = _deltaFrameCnt;
+    frameCount.numKeyFrames = _keyFrameCnt;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SetEncodingData(VideoCodecType sendCodecType,
+                                      WebRtc_Word32 maxBitRate,
+                                      WebRtc_UWord32 frameRate,
+                                      WebRtc_UWord32 bitRate,
+                                      WebRtc_UWord16 width,
+                                      WebRtc_UWord16 height,
+                                      int numLayers)
+{
+    // Everything codec specific should be reset here since this means the codec
+    // has changed. If native dimension values have changed, then either user
+    // initiated change, or QM initiated change. Will be able to determine only
+    // after the processing of the first frame.
+    _lastChangeTime = _clock->MillisecondTimestamp();
+    _content->Reset();
+    _content->UpdateFrameRate(frameRate);
+
+    _maxBitRate = maxBitRate;
+    _sendCodecType = sendCodecType;
+    _targetBitRate = bitRate;
+    _lossProtLogic->UpdateBitRate(static_cast<float>(bitRate));
+    _lossProtLogic->UpdateFrameRate(static_cast<float>(frameRate));
+    _lossProtLogic->UpdateFrameSize(width, height);
+    _lossProtLogic->UpdateNumLayers(numLayers);
+    _frameDropper->Reset();
+    _frameDropper->SetRates(static_cast<float>(bitRate),
+                            static_cast<float>(frameRate));
+    _userFrameRate = static_cast<float>(frameRate);
+    _codecWidth = width;
+    _codecHeight = height;
+    _initCodecWidth = width;
+    _initCodecHeight = height;
+    _numLayers = (numLayers <= 1) ? 1 : numLayers;  // Can also be zero.
+    WebRtc_Word32 ret = VCM_OK;
+    ret = _qmResolution->Initialize((float)_targetBitRate, _userFrameRate,
+                                    _codecWidth, _codecHeight);
+    return ret;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::RegisterProtectionCallback(VCMProtectionCallback*
+                                                 protectionCallback)
+{
+    _videoProtectionCallback = protectionCallback;
+    return VCM_OK;
+
+}
+
+void
+VCMMediaOptimization::EnableFrameDropper(bool enable)
+{
+    _frameDropper->Enable(enable);
+}
+
+void
+VCMMediaOptimization::EnableProtectionMethod(bool enable,
+                                             VCMProtectionMethodEnum method)
+{
+    bool updated = false;
+    if (enable)
+    {
+        updated = _lossProtLogic->SetMethod(method);
+    }
+    else
+    {
+        _lossProtLogic->RemoveMethod(method);
+    }
+    if (updated)
+    {
+        _lossProtLogic->UpdateMethod();
+    }
+}
+
+bool
+VCMMediaOptimization::IsProtectionMethodEnabled(VCMProtectionMethodEnum method)
+{
+    return (_lossProtLogic->SelectedType() == method);
+}
+
+void
+VCMMediaOptimization::SetMtu(WebRtc_Word32 mtu)
+{
+    _maxPayloadSize = mtu;
+}
+
+float
+VCMMediaOptimization::SentFrameRate()
+{
+    if (_frameDropper)
+    {
+        return _frameDropper->ActualFrameRate((WebRtc_UWord32)(InputFrameRate()
+                                                               + 0.5f));
+    }
+
+    return VCM_CODEC_ERROR;
+}
+
+float
+VCMMediaOptimization::SentBitRate()
+{
+    UpdateBitRateEstimate(-1, _clock->MillisecondTimestamp());
+    return _avgSentBitRateBps / 1000.0f;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::MaxBitRate()
+{
+    return _maxBitRate;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::UpdateWithEncodedData(WebRtc_Word32 encodedLength,
+                                            FrameType encodedFrameType)
+{
+    // look into the ViE version - debug mode - needs also number of layers.
+    UpdateBitRateEstimate(encodedLength, _clock->MillisecondTimestamp());
+    if(encodedLength > 0)
+    {
+        const bool deltaFrame = (encodedFrameType != kVideoFrameKey &&
+                                 encodedFrameType != kVideoFrameGolden);
+
+        _frameDropper->Fill(encodedLength, deltaFrame);
+        if (_maxPayloadSize > 0 && encodedLength > 0)
+        {
+            const float minPacketsPerFrame = encodedLength /
+                                             static_cast<float>(_maxPayloadSize);
+            if (deltaFrame)
+            {
+                _lossProtLogic->UpdatePacketsPerFrame(
+                    minPacketsPerFrame, _clock->MillisecondTimestamp());
+            }
+            else
+            {
+                _lossProtLogic->UpdatePacketsPerFrameKey(
+                    minPacketsPerFrame, _clock->MillisecondTimestamp());
+            }
+
+            if (_enableQm)
+            {
+                // update quality select with encoded length
+                _qmResolution->UpdateEncodedSize(encodedLength,
+                                                 encodedFrameType);
+            }
+        }
+        if (!deltaFrame && encodedLength > 0)
+        {
+            _lossProtLogic->UpdateKeyFrameSize(static_cast<float>(encodedLength));
+        }
+
+        // updating counters
+        if (deltaFrame)
+        {
+            _deltaFrameCnt++;
+        }
+        else
+        {
+            _keyFrameCnt++;
+        }
+
+    }
+
+     return VCM_OK;
+
+}
+
+void VCMMediaOptimization::UpdateBitRateEstimate(WebRtc_Word64 encodedLength,
+                                                 WebRtc_Word64 nowMs)
+{
+    int i = kBitrateMaxFrameSamples - 1;
+    WebRtc_UWord32 frameSizeSum = 0;
+    WebRtc_Word64 timeOldest = -1;
+    // Find an empty slot for storing the new sample and at the same time
+    // accumulate the history.
+    for (; i >= 0; i--)
+    {
+        if (_encodedFrameSamples[i]._sizeBytes == -1)
+        {
+            // Found empty slot
+            break;
+        }
+        if (nowMs - _encodedFrameSamples[i]._timeCompleteMs <
+            kBitrateAverageWinMs)
+        {
+            frameSizeSum += static_cast<WebRtc_UWord32>
+                            (_encodedFrameSamples[i]._sizeBytes);
+            if (timeOldest == -1)
+            {
+                timeOldest = _encodedFrameSamples[i]._timeCompleteMs;
+            }
+        }
+    }
+    if (encodedLength > 0)
+    {
+        if (i < 0)
+        {
+            // No empty slot, shift
+            for (i = kBitrateMaxFrameSamples - 2; i >= 0; i--)
+            {
+                _encodedFrameSamples[i + 1] = _encodedFrameSamples[i];
+            }
+            i++;
+        }
+        // Insert new sample
+        _encodedFrameSamples[i]._sizeBytes = encodedLength;
+        _encodedFrameSamples[i]._timeCompleteMs = nowMs;
+    }
+    if (timeOldest > -1)
+    {
+        // Update average bit rate
+        float denom = static_cast<float>(nowMs - timeOldest);
+        if (denom < 1.0)
+        {
+            denom = 1.0;
+        }
+        _avgSentBitRateBps = (frameSizeSum + encodedLength) * 8 * 1000 / denom;
+    }
+    else if (encodedLength > 0)
+    {
+        _avgSentBitRateBps = static_cast<float>(encodedLength * 8);
+    }
+    else
+    {
+        _avgSentBitRateBps = 0;
+    }
+}
+
+
+WebRtc_Word32
+VCMMediaOptimization::RegisterVideoQMCallback(VCMQMSettingsCallback*
+                                              videoQMSettings)
+{
+    _videoQMSettingsCallback = videoQMSettings;
+    // Callback setting controls QM
+    if (_videoQMSettingsCallback != NULL)
+    {
+        _enableQm = true;
+    }
+    else
+    {
+        _enableQm = false;
+    }
+    return VCM_OK;
+}
+
+void
+VCMMediaOptimization::updateContentData(const VideoContentMetrics*
+                                        contentMetrics)
+{
+    // Updating content metrics
+    if (contentMetrics == NULL)
+    {
+         // Disable QM if metrics are NULL
+         _enableQm = false;
+         _qmResolution->Reset();
+    }
+    else
+    {
+        _content->UpdateContentData(contentMetrics);
+    }
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SelectQuality()
+{
+    // Reset quantities for QM select
+    _qmResolution->ResetQM();
+
+    // Update QM will long-term averaged content metrics.
+    _qmResolution->UpdateContent(_content->LongTermAvgData());
+
+    // Select quality mode
+    VCMResolutionScale* qm = NULL;
+    WebRtc_Word32 ret = _qmResolution->SelectResolution(&qm);
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    // Check for updates to spatial/temporal modes
+    QMUpdate(qm);
+
+    // Reset all the rate and related frame counters quantities
+    _qmResolution->ResetRates();
+
+    // Reset counters
+    _lastQMUpdateTime = _clock->MillisecondTimestamp();
+
+    // Reset content metrics
+    _content->Reset();
+
+    return VCM_OK;
+}
+
+
+// Check timing constraints and look for significant change in:
+// (1) scene content
+// (2) target bit rate
+
+bool
+VCMMediaOptimization::checkStatusForQMchange()
+{
+
+    bool status  = true;
+
+    // Check that we do not call QMSelect too often, and that we waited some time
+    // (to sample the metrics) from the event lastChangeTime
+    // lastChangeTime is the time where user changed the size/rate/frame rate
+    // (via SetEncodingData)
+    WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    if ((now - _lastQMUpdateTime) < kQmMinIntervalMs ||
+        (now  - _lastChangeTime) <  kQmMinIntervalMs)
+    {
+        status = false;
+    }
+
+    return status;
+
+}
+
+bool
+VCMMediaOptimization::QMUpdate(VCMResolutionScale* qm)
+{
+    // Check for no change
+    if (qm->spatialHeightFact == 1 &&
+        qm->spatialWidthFact == 1 &&
+        qm->temporalFact == 1) {
+        return false;
+    }
+
+    // Temporal
+    WebRtc_UWord32 frameRate = static_cast<WebRtc_UWord32>
+                               (_incomingFrameRate + 0.5f);
+
+    // Check if go back up in temporal resolution
+    if (qm->temporalFact == 0) {
+      // Currently only allow for 1/2 frame rate reduction per action.
+      // TODO (marpan): allow for 2/3 reduction.
+      frameRate = (WebRtc_UWord32) 2 * _incomingFrameRate;
+    }
+    // go down in temporal resolution
+    else {
+      frameRate = (WebRtc_UWord32)(_incomingFrameRate / qm->temporalFact + 1);
+    }
+    // Reset _incomingFrameRate if temporal action was selected.
+    if  (qm->temporalFact != 1) {
+      memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+      _incomingFrameRate = frameRate;
+    }
+
+    // Spatial
+    WebRtc_UWord32 height = _codecHeight;
+    WebRtc_UWord32 width = _codecWidth;
+    // Check if go back up in spatial resolution, and update frame sizes.
+    // Currently only allow for 2x2 spatial down-sampling.
+    // TODO (marpan): allow for 1x2, 2x1, and 4/3x4/3 (or 3/2x3/2).
+    if (qm->spatialHeightFact == 0 && qm->spatialWidthFact == 0) {
+      width = _codecWidth * 2;
+      height = _codecHeight * 2;
+    } else {
+      width = _codecWidth / qm->spatialWidthFact;
+      height = _codecHeight / qm->spatialHeightFact;
+    }
+    _codecWidth = width;
+    _codecHeight = height;
+
+    // New frame sizes should never exceed the original sizes
+    // from SetEncodingData().
+    assert(_codecWidth <= _initCodecWidth);
+    assert(_codecHeight <= _initCodecHeight);
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, _id,
+               "Quality Mode Update: W = %d, H = %d, FR = %f",
+               width, height, frameRate);
+
+    // Update VPM with new target frame rate and size
+    _videoQMSettingsCallback->SetVideoQMSettings(frameRate, width, height);
+
+    _content->UpdateFrameRate(frameRate);
+    _qmResolution->UpdateCodecFrameSize(width, height);
+
+    return true;
+}
+
+void
+VCMMediaOptimization::UpdateIncomingFrameRate()
+{
+    WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    if (_incomingFrameTimes[0] == 0)
+    {
+        // first no shift
+    } else
+    {
+        // shift
+        for(WebRtc_Word32 i = (kFrameCountHistorySize - 2); i >= 0 ; i--)
+        {
+            _incomingFrameTimes[i+1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = now;
+    ProcessIncomingFrameRate(now);
+}
+
+// allowing VCM to keep track of incoming frame rate
+void
+VCMMediaOptimization::ProcessIncomingFrameRate(WebRtc_Word64 now)
+{
+    WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for (num = 1; num < (kFrameCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num] <= 0 ||
+            // don't use data older than 2 s
+            now - _incomingFrameTimes[num] > kFrameHistoryWinMs)
+        {
+            break;
+        } else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        const WebRtc_Word64 diff = now - _incomingFrameTimes[num-1];
+        _incomingFrameRate = 1.0;
+        if(diff >0)
+        {
+            _incomingFrameRate = nrOfFrames * 1000.0f / static_cast<float>(diff);
+        }
+    }
+}
+
+WebRtc_UWord32
+VCMMediaOptimization::InputFrameRate()
+{
+    ProcessIncomingFrameRate(_clock->MillisecondTimestamp());
+    return WebRtc_UWord32 (_incomingFrameRate + 0.5f);
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/media_optimization.h b/trunk/src/modules/video_coding/main/source/media_optimization.h
new file mode 100644
index 0000000..14e5d1a
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/media_optimization.h
@@ -0,0 +1,212 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+
+#include "module_common_types.h"
+#include "video_coding.h"
+#include "trace.h"
+#include "media_opt_util.h"
+#include "qm_select.h"
+
+namespace webrtc
+{
+
+enum { kBitrateMaxFrameSamples = 60 };
+enum { kBitrateAverageWinMs    = 1000 };
+
+class TickTimeBase;
+class VCMContentMetricsProcessing;
+class VCMFrameDropper;
+
+struct VCMEncodedFrameSample
+{
+    VCMEncodedFrameSample() : _sizeBytes(-1), _timeCompleteMs(-1) {}
+
+    WebRtc_Word64     _sizeBytes;
+    WebRtc_Word64     _timeCompleteMs;
+};
+
+class VCMMediaOptimization
+{
+public:
+    VCMMediaOptimization(WebRtc_Word32 id, TickTimeBase* clock);
+    ~VCMMediaOptimization(void);
+    /*
+    * Reset the Media Optimization module
+    */
+    WebRtc_Word32 Reset();
+    /**
+    * Set target Rates for the encoder given the channel parameters
+    * Inputs:       bitRate - target bitRate, in the conference case this is the rate
+    *                         between the sending client and the server
+    *               fractionLost - packet loss in % in the network
+    *               roundTripTimeMs - round trip time in miliseconds
+    *               minBitRate - the bit rate of the end-point with lowest rate
+    *               maxBitRate - the bit rate of the end-point with highest rate
+    */
+    WebRtc_UWord32 SetTargetRates(WebRtc_UWord32 bitRate,
+                                  WebRtc_UWord8 &fractionLost,
+                                  WebRtc_UWord32 roundTripTimeMs);
+
+    /**
+    * Inform media optimization of initial encoding state
+    */
+    WebRtc_Word32 SetEncodingData(VideoCodecType sendCodecType,
+                                  WebRtc_Word32 maxBitRate,
+                                  WebRtc_UWord32 frameRate,
+                                  WebRtc_UWord32 bitRate,
+                                  WebRtc_UWord16 width,
+                                  WebRtc_UWord16 height,
+                                  int numTemporalLayers);
+    /**
+    * Enable protection method
+    */
+    void EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method);
+    /**
+    * Returns weather or not protection method is enabled
+    */
+    bool IsProtectionMethodEnabled(VCMProtectionMethodEnum method);
+    /**
+    * Updates the max pay load size
+    */
+    void SetMtu(WebRtc_Word32 mtu);
+    /*
+    * Get actual input frame rate
+    */
+    WebRtc_UWord32 InputFrameRate();
+
+    /*
+    * Get actual sent frame rate
+    */
+    float SentFrameRate();
+    /*
+    * Get actual sent bit rate
+    */
+    float SentBitRate();
+    /*
+    * Get maximum allowed bit rate
+    */
+    WebRtc_Word32 MaxBitRate();
+    /*
+    * Inform Media Optimization of encoding output: Length and frame type
+    */
+    WebRtc_Word32 UpdateWithEncodedData(WebRtc_Word32 encodedLength,
+                                        FrameType encodedFrameType);
+    /*
+    * Register a protection callback to be used to inform the user about the
+    * protection methods used
+    */
+    WebRtc_Word32 RegisterProtectionCallback(VCMProtectionCallback*
+                                             protectionCallback);
+    /*
+    * Register a quality settings callback to be used to inform VPM/user about
+    */
+    WebRtc_Word32 RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings);
+    void EnableFrameDropper(bool enable);
+
+    bool DropFrame();
+
+      /*
+    * Get number of key/delta frames encoded
+    */
+    WebRtc_Word32 SentFrameCount(VCMFrameCount &frameCount) const;
+
+    /*
+    *  update incoming frame rate value
+    */
+    void UpdateIncomingFrameRate();
+
+    /**
+    * Update content metric Data
+    */
+    void updateContentData(const VideoContentMetrics* contentMetrics);
+
+    /**
+    * Compute new Quality Mode
+    */
+    WebRtc_Word32 SelectQuality();
+
+private:
+
+    /*
+     *  Update protection callback with protection settings
+     */
+    int UpdateProtectionCallback(VCMProtectionMethod *selected_method,
+                                 uint32_t* total_video_rate_bps,
+                                 uint32_t* nack_overhead_rate_bps,
+                                 uint32_t* fec_overhead_rate_bps);
+
+    void UpdateBitRateEstimate(WebRtc_Word64 encodedLength, WebRtc_Word64 nowMs);
+    /*
+    * verify if QM settings differ from default, i.e. if an update is required
+    * Compute actual values, as will be sent to the encoder
+    */
+    bool QMUpdate(VCMResolutionScale* qm);
+    /**
+    * check if we should make a QM change
+    * will return 1 if yes, 0 otherwise
+    */
+    bool checkStatusForQMchange();
+
+    void ProcessIncomingFrameRate(WebRtc_Word64 now);
+
+    enum { kFrameCountHistorySize = 90};
+    enum { kFrameHistoryWinMs = 2000};
+
+    WebRtc_Word32                     _id;
+    TickTimeBase*                     _clock;
+    WebRtc_Word32                     _maxBitRate;
+    VideoCodecType                    _sendCodecType;
+    WebRtc_UWord16                    _codecWidth;
+    WebRtc_UWord16                    _codecHeight;
+    WebRtc_UWord16                    _initCodecWidth;
+    WebRtc_UWord16                    _initCodecHeight;
+    float                             _userFrameRate;
+
+    VCMFrameDropper*                  _frameDropper;
+    VCMLossProtectionLogic*           _lossProtLogic;
+    WebRtc_UWord8                     _packetLossEnc;
+    WebRtc_UWord8                     _fractionLost;
+
+
+    WebRtc_UWord32                    _sendStatistics[4];
+    WebRtc_UWord32                    _sendStatisticsZeroEncode;
+    WebRtc_Word32                     _maxPayloadSize;
+    WebRtc_UWord32                    _targetBitRate;
+
+    float                             _incomingFrameRate;
+    WebRtc_Word64                     _incomingFrameTimes[kFrameCountHistorySize];
+
+    bool                              _enableQm;
+
+    VCMProtectionCallback*            _videoProtectionCallback;
+    VCMQMSettingsCallback*            _videoQMSettingsCallback;
+
+    VCMEncodedFrameSample             _encodedFrameSamples[kBitrateMaxFrameSamples];
+    float                             _avgSentBitRateBps;
+
+    WebRtc_UWord32                    _keyFrameCnt;
+    WebRtc_UWord32                    _deltaFrameCnt;
+
+    VCMContentMetricsProcessing*      _content;
+    VCMQmResolution*                  _qmResolution;
+
+    WebRtc_Word64                     _lastQMUpdateTime;
+    WebRtc_Word64                     _lastChangeTime; // content/user triggered
+    int                               _numLayers;
+
+
+}; // end of VCMMediaOptimization class definition
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
diff --git a/trunk/src/modules/video_coding/main/source/mock/fake_tick_time.h b/trunk/src/modules/video_coding/main/source/mock/fake_tick_time.h
new file mode 100644
index 0000000..c6da348
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/mock/fake_tick_time.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
+
+#include <assert.h>
+
+#include <limits>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc {
+
+// Provides a fake implementation of TickTimeBase, intended for offline
+// testing. This implementation does not query the system clock, but returns a
+// time value set by the user when creating the object, and incremented with
+// the method IncrementDebugClock.
+class FakeTickTime : public TickTimeBase {
+ public:
+  explicit FakeTickTime(int64_t start_time_ms) : fake_now_ms_(start_time_ms) {}
+  virtual ~FakeTickTime() {}
+  virtual int64_t MillisecondTimestamp() const {
+    return fake_now_ms_;
+  }
+  virtual int64_t MicrosecondTimestamp() const {
+    return 1000 * fake_now_ms_;
+  }
+  virtual void IncrementDebugClock(int64_t increase_ms) {
+    assert(increase_ms <= std::numeric_limits<int64_t>::max() - fake_now_ms_);
+    fake_now_ms_ += increase_ms;
+  }
+
+ private:
+  int64_t fake_now_ms_;
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
diff --git a/trunk/src/modules/video_coding/main/source/nack_fec_tables.h b/trunk/src/modules/video_coding/main/source/nack_fec_tables.h
new file mode 100644
index 0000000..88e225d
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/nack_fec_tables.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
+
+namespace webrtc
+{
+
+// Table for adjusting FEC rate for NACK/FEC protection method
+// Table values are built as a sigmoid function, ranging from 0 to
+// kHighRttNackMs (100), based on the HybridNackTH values defined in
+// media_opt_util.h.
+const WebRtc_UWord16 VCMNackFecTable[100] = {
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+1,
+1,
+1,
+1,
+2,
+2,
+2,
+3,
+3,
+4,
+5,
+6,
+7,
+9,
+10,
+12,
+15,
+18,
+21,
+24,
+28,
+32,
+37,
+41,
+46,
+51,
+56,
+61,
+66,
+70,
+74,
+78,
+81,
+84,
+86,
+89,
+90,
+92,
+93,
+95,
+95,
+96,
+97,
+97,
+98,
+98,
+99,
+99,
+99,
+99,
+99,
+99,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
diff --git a/trunk/src/modules/video_coding/main/source/packet.cc b/trunk/src/modules/video_coding/main/source/packet.cc
new file mode 100644
index 0000000..e52cbdd
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/packet.cc
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet.h"
+#include "module_common_types.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+VCMPacket::VCMPacket()
+  :
+    payloadType(0),
+    timestamp(0),
+    seqNum(0),
+    dataPtr(NULL),
+    sizeBytes(0),
+    markerBit(false),
+    frameType(kFrameEmpty),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(false),
+    completeNALU(kNaluUnset),
+    insertStartCode(false),
+    codecSpecificHeader() {
+}
+
+VCMPacket::VCMPacket(const WebRtc_UWord8* ptr,
+                               const WebRtc_UWord32 size,
+                               const WebRtcRTPHeader& rtpHeader) :
+    payloadType(rtpHeader.header.payloadType),
+    timestamp(rtpHeader.header.timestamp),
+    seqNum(rtpHeader.header.sequenceNumber),
+    dataPtr(ptr),
+    sizeBytes(size),
+    markerBit(rtpHeader.header.markerBit),
+
+    frameType(rtpHeader.frameType),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(rtpHeader.type.Video.isFirstPacket),
+    completeNALU(kNaluComplete),
+    insertStartCode(false),
+    codecSpecificHeader(rtpHeader.type.Video)
+{
+    CopyCodecSpecifics(rtpHeader.type.Video);
+}
+
+VCMPacket::VCMPacket(const WebRtc_UWord8* ptr, WebRtc_UWord32 size, WebRtc_UWord16 seq, WebRtc_UWord32 ts, bool mBit) :
+    payloadType(0),
+    timestamp(ts),
+    seqNum(seq),
+    dataPtr(ptr),
+    sizeBytes(size),
+    markerBit(mBit),
+
+    frameType(kVideoFrameDelta),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(false),
+    completeNALU(kNaluComplete),
+    insertStartCode(false),
+    codecSpecificHeader()
+{}
+
+void VCMPacket::Reset() {
+  payloadType = 0;
+  timestamp = 0;
+  seqNum = 0;
+  dataPtr = NULL;
+  sizeBytes = 0;
+  markerBit = false;
+  frameType = kFrameEmpty;
+  codec = kVideoCodecUnknown;
+  isFirstPacket = false;
+  completeNALU = kNaluUnset;
+  insertStartCode = false;
+  memset(&codecSpecificHeader, 0, sizeof(RTPVideoHeader));
+}
+
+void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
+{
+    switch(videoHeader.codec)
+    {
+        case kRTPVideoVP8:
+            {
+                // Handle all packets within a frame as depending on the previous packet
+                // TODO(holmer): This should be changed to make fragments independent
+                // when the VP8 RTP receiver supports fragments.
+                if (isFirstPacket && markerBit)
+                    completeNALU = kNaluComplete;
+                else if (isFirstPacket)
+                    completeNALU = kNaluStart;
+                else if (markerBit)
+                    completeNALU = kNaluEnd;
+                else
+                    completeNALU = kNaluIncomplete;
+
+                codec = kVideoCodecVP8;
+                break;
+            }
+        case kRTPVideoI420:
+            {
+                codec = kVideoCodecI420;
+                break;
+            }
+        default:
+            {
+                codec = kVideoCodecUnknown;
+                break;
+            }
+    }
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/packet.h b/trunk/src/modules/video_coding/main/source/packet.h
new file mode 100644
index 0000000..2035653
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/packet.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+#define WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+#include "jitter_buffer_common.h"
+
+namespace webrtc
+{
+
+class VCMPacket
+{
+public:
+    VCMPacket();
+    VCMPacket(const WebRtc_UWord8* ptr,
+              const WebRtc_UWord32 size,
+              const WebRtcRTPHeader& rtpHeader);
+    VCMPacket(const WebRtc_UWord8* ptr,
+              WebRtc_UWord32 size,
+              WebRtc_UWord16 seqNum,
+              WebRtc_UWord32 timestamp,
+              bool markerBit);
+
+    void Reset();
+
+    WebRtc_UWord8           payloadType;
+    WebRtc_UWord32          timestamp;
+    WebRtc_UWord16          seqNum;
+    const WebRtc_UWord8*    dataPtr;
+    WebRtc_UWord32          sizeBytes;
+    bool                    markerBit;
+
+    FrameType               frameType;
+    webrtc::VideoCodecType  codec;
+
+    bool isFirstPacket;                 // Is this first packet in a frame.
+    VCMNaluCompleteness completeNALU;   // Default is kNaluIncomplete.
+    bool insertStartCode;               // True if a start code should be inserted before this
+                                        // packet.
+    RTPVideoHeader codecSpecificHeader;
+
+protected:
+    void CopyCodecSpecifics(const RTPVideoHeader& videoHeader);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
diff --git a/trunk/src/modules/video_coding/main/source/qm_select.cc b/trunk/src/modules/video_coding/main/source/qm_select.cc
new file mode 100644
index 0000000..c4bd707
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/qm_select.cc
@@ -0,0 +1,664 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/qm_select.h"
+
+#include <math.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/qm_select_data.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+// QM-METHOD class
+
+VCMQmMethod::VCMQmMethod()
+    : _contentMetrics(NULL),
+      _width(0),
+      _height(0),
+      _nativeWidth(0),
+      _nativeHeight(0),
+      _frameRateLevel(kDefault),
+      _init(false) {
+  ResetQM();
+}
+
+VCMQmMethod::~VCMQmMethod() {
+}
+
+void VCMQmMethod::ResetQM() {
+  _aspectRatio = 1.0f;
+  _imageType = 2;
+  _motion.Reset();
+  _spatial.Reset();
+  _contentClass = 0;
+}
+
+uint8_t VCMQmMethod::ComputeContentClass() {
+  ComputeMotionNFD();
+  ComputeSpatial();
+  return _contentClass = 3 * _motion.level + _spatial.level;
+}
+
+void VCMQmMethod::UpdateContent(const VideoContentMetrics*  contentMetrics) {
+  _contentMetrics = contentMetrics;
+}
+
+void VCMQmMethod::ComputeMotionNFD() {
+  if (_contentMetrics) {
+    _motion.value = _contentMetrics->motion_magnitude;
+  }
+  // Determine motion level.
+  if (_motion.value < kLowMotionNfd) {
+    _motion.level = kLow;
+  } else if (_motion.value > kHighMotionNfd) {
+    _motion.level  = kHigh;
+  } else {
+    _motion.level = kDefault;
+  }
+}
+
+void VCMQmMethod::ComputeSpatial() {
+  float spatialErr = 0.0;
+  float spatialErrH = 0.0;
+  float spatialErrV = 0.0;
+  if (_contentMetrics) {
+    spatialErr =  _contentMetrics->spatial_pred_err;
+    spatialErrH = _contentMetrics->spatial_pred_err_h;
+    spatialErrV = _contentMetrics->spatial_pred_err_v;
+  }
+  // Spatial measure: take average of 3 prediction errors.
+  _spatial.value = (spatialErr + spatialErrH + spatialErrV) / 3.0f;
+
+  // Reduce thresholds for large scenes/higher pixel correlation (~>=WHD).
+  float scale2 = _imageType > 3 ? kScaleTexture : 1.0;
+
+  if (_spatial.value > scale2 * kHighTexture) {
+    _spatial.level = kHigh;
+  } else if (_spatial.value < scale2 * kLowTexture) {
+    _spatial.level = kLow;
+  } else {
+    _spatial.level = kDefault;
+  }
+}
+
+uint8_t VCMQmMethod::GetImageType(uint16_t width,
+                                  uint16_t height) {
+  // Get the closest image type for encoder frame size.
+  uint32_t imageSize = width * height;
+  if (imageSize < kFrameSizeTh[0]) {
+    return 0;  // QCIF
+  } else if (imageSize < kFrameSizeTh[1]) {
+    return 1;  // CIF
+  } else if (imageSize < kFrameSizeTh[2]) {
+    return 2;  // VGA
+  } else if (imageSize < kFrameSizeTh[3]) {
+    return 3;  // 4CIF
+  } else if (imageSize < kFrameSizeTh[4]) {
+    return 4;  // 720,4:3
+  } else if (imageSize < kFrameSizeTh[5]) {
+    return 5;  // WHD
+  } else {
+    return 6;  // HD
+  }
+}
+
+LevelClass VCMQmMethod::FrameRateLevel(float avgFrameRate) {
+  if (avgFrameRate < kLowFrameRate) {
+    return kLow;
+  } else if (avgFrameRate > kHighFrameRate) {
+    return kHigh;
+  } else {
+    return kDefault;
+  }
+}
+
+// RESOLUTION CLASS
+
+VCMQmResolution::VCMQmResolution()
+    :  _qm(new VCMResolutionScale()) {
+  Reset();
+}
+
+VCMQmResolution::~VCMQmResolution() {
+  delete _qm;
+}
+
+void VCMQmResolution::ResetRates() {
+  _sumTargetRate = 0.0f;
+  _sumIncomingFrameRate = 0.0f;
+  _sumRateMM = 0.0f;
+  _sumRateMMSgn = 0;
+  _sumPacketLoss = 0.0f;
+  _frameCnt = 0;
+  _frameCntDelta = 0;
+  _lowBufferCnt = 0;
+  _updateRateCnt = 0;
+}
+
+void VCMQmResolution::ResetDownSamplingState() {
+  _stateDecFactorSpatial = 1;
+  _stateDecFactorTemp  = 1;
+}
+
+void VCMQmResolution::Reset() {
+  _targetBitRate = 0.0f;
+  _userFrameRate = 0.0f;
+  _incomingFrameRate = 0.0f;
+  _perFrameBandwidth =0.0f;
+  _bufferLevel = 0.0f;
+  _avgTargetRate = 0.0f;
+  _avgIncomingFrameRate = 0.0f;
+  _avgRatioBufferLow = 0.0f;
+  _avgRateMisMatch = 0.0f;
+  _avgRateMisMatchSgn = 0.0f;
+  _avgPacketLoss = 0.0f;
+  _encoderState = kStableEncoding;
+  ResetRates();
+  ResetDownSamplingState();
+  ResetQM();
+}
+
+EncoderState VCMQmResolution::GetEncoderState() {
+  return _encoderState;
+}
+
+// Initialize state after re-initializing the encoder,
+// i.e., after SetEncodingData() in mediaOpt.
+int VCMQmResolution::Initialize(float bitRate,
+                                float userFrameRate,
+                                uint16_t width,
+                                uint16_t height) {
+  if (userFrameRate == 0.0f || width == 0 || height == 0) {
+    return VCM_PARAMETER_ERROR;
+  }
+  Reset();
+  _targetBitRate = bitRate;
+  _userFrameRate = userFrameRate;
+  _incomingFrameRate = userFrameRate;
+  UpdateCodecFrameSize(width, height);
+  _nativeWidth = width;
+  _nativeHeight = height;
+  // Initial buffer level.
+  _bufferLevel = kInitBufferLevel * _targetBitRate;
+  // Per-frame bandwidth.
+  _perFrameBandwidth = _targetBitRate / _userFrameRate;
+  _init  = true;
+  return VCM_OK;
+}
+
+void VCMQmResolution::UpdateCodecFrameSize(uint16_t width, uint16_t height) {
+  _width = width;
+  _height = height;
+  // Set the imageType for the encoder width/height.
+  _imageType = GetImageType(width, height);
+}
+
+// Update rate data after every encoded frame.
+void VCMQmResolution::UpdateEncodedSize(int encodedSize,
+                                        FrameType encodedFrameType) {
+  _frameCnt++;
+  // Convert to Kbps.
+  float encodedSizeKbits = static_cast<float>((encodedSize * 8.0) / 1000.0);
+
+  // Update the buffer level:
+  // Note this is not the actual encoder buffer level.
+  // |_bufferLevel| is reset to 0 every time SelectResolution is called, and
+  // does not account for frame dropping by encoder or VCM.
+  _bufferLevel += _perFrameBandwidth - encodedSizeKbits;
+  // Counter for occurrences of low buffer level:
+  // low/negative values means encoder is likely dropping frames.
+  if (_bufferLevel <= kPercBufferThr * kOptBufferLevel * _targetBitRate) {
+    _lowBufferCnt++;
+  }
+}
+
+// Update various quantities after SetTargetRates in MediaOpt.
+void VCMQmResolution::UpdateRates(float targetBitRate,
+                                  float encoderSentRate,
+                                  float incomingFrameRate,
+                                  uint8_t packetLoss) {
+  // Sum the target bitrate and incoming frame rate:
+  // these values are the encoder rates (from previous update ~1sec),
+  // i.e, before the update for next ~1sec.
+  _sumTargetRate += _targetBitRate;
+  _sumIncomingFrameRate += _incomingFrameRate;
+  _updateRateCnt++;
+
+  // Sum the received (from RTCP reports) packet loss rates.
+  _sumPacketLoss += static_cast<float>(packetLoss / 255.0);
+
+  // Sum the sequence rate mismatch:
+  // Mismatch here is based on the difference between the target rate
+  // used (in previous ~1sec) and the average actual encoding rate measured
+  // at previous ~1sec.
+  float diff = _targetBitRate - encoderSentRate;
+  if (_targetBitRate > 0.0)
+    _sumRateMM += fabs(diff) / _targetBitRate;
+  int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
+  // To check for consistent under(+)/over_shooting(-) of target rate.
+  _sumRateMMSgn += sgnDiff;
+
+  // Update with the current new target and frame rate:
+  // these values are ones the encoder will use for the current/next ~1sec
+  _targetBitRate =  targetBitRate;
+  _incomingFrameRate = incomingFrameRate;
+
+  // Update the per_frame_bandwidth:
+  // this is the per_frame_bw for the current/next ~1sec
+  _perFrameBandwidth  = 0.0f;
+  if (_incomingFrameRate > 0.0f) {
+    _perFrameBandwidth = _targetBitRate / _incomingFrameRate;
+  }
+}
+
+// Select the resolution factors: frame size and frame rate change (qm scales).
+// Selection is for going down in resolution, or for going back up
+// (if a previous down-sampling action was taken).
+
+// In the current version the following constraints are imposed:
+// 1) we only allow for one action (either down or back up) at a given time.
+// 2) the possible down-sampling actions are: 2x2 spatial and 1/2 temporal.
+// 3) the total amount of down-sampling (spatial and/or temporal) from the
+//    initial (native) resolution is limited by various factors.
+
+// TODO(marpan): extend to allow options for: 4/3x4/3, 1x2, 2x1 spatial,
+// and 2/3 temporal (i.e., skip every third frame).
+int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
+  if (!_init) {
+    return VCM_UNINITIALIZED;
+  }
+  if (_contentMetrics == NULL) {
+    Reset();
+    *qm =  _qm;
+    return VCM_OK;
+  }
+
+  // Default settings: no action.
+  _qm->spatialWidthFact = 1;
+  _qm->spatialHeightFact = 1;
+  _qm->temporalFact = 1;
+  *qm = _qm;
+
+  // Compute content class for selection.
+  _contentClass = ComputeContentClass();
+
+  // Compute various rate quantities for selection.
+  ComputeRatesForSelection();
+
+  // Get the encoder state.
+  ComputeEncoderState();
+
+  // Check for going back up in resolution, if we have had some down-sampling
+  // relative to native state in Initialize (i.e., after SetEncodingData()
+  // in mediaOpt.).
+  if (_stateDecFactorSpatial > 1 || _stateDecFactorTemp > 1) {
+    if (GoingUpResolution()) {
+      *qm = _qm;
+      return VCM_OK;
+    }
+  }
+
+  // Check for going down in resolution, only if current total amount of
+  // down-sampling state is below threshold.
+  if (_stateDecFactorTemp * _stateDecFactorSpatial < kMaxDownSample) {
+    if (GoingDownResolution()) {
+      *qm = _qm;
+      return VCM_OK;
+    }
+  }
+  return VCM_OK;
+}
+
+void VCMQmResolution::ComputeRatesForSelection() {
+  _avgTargetRate = 0.0f;
+  _avgIncomingFrameRate = 0.0f;
+  _avgRatioBufferLow = 0.0f;
+  _avgRateMisMatch = 0.0f;
+  _avgRateMisMatchSgn = 0.0f;
+  _avgPacketLoss = 0.0f;
+  if (_frameCnt > 0) {
+    _avgRatioBufferLow = static_cast<float>(_lowBufferCnt) /
+        static_cast<float>(_frameCnt);
+  }
+  if (_updateRateCnt > 0) {
+    _avgRateMisMatch = static_cast<float>(_sumRateMM) /
+        static_cast<float>(_updateRateCnt);
+    _avgRateMisMatchSgn = static_cast<float>(_sumRateMMSgn) /
+        static_cast<float>(_updateRateCnt);
+    _avgTargetRate = static_cast<float>(_sumTargetRate) /
+        static_cast<float>(_updateRateCnt);
+    _avgIncomingFrameRate = static_cast<float>(_sumIncomingFrameRate) /
+        static_cast<float>(_updateRateCnt);
+    _avgPacketLoss =  static_cast<float>(_sumPacketLoss) /
+        static_cast<float>(_updateRateCnt);
+  }
+  // For selection we may want to weight some quantities more heavily
+  // with the current (i.e., next ~1sec) rate values.
+  float weight = 0.7f;
+  _avgTargetRate = weight * _avgTargetRate + (1.0 - weight) * _targetBitRate;
+  _avgIncomingFrameRate = weight * _avgIncomingFrameRate +
+      (1.0 - weight) * _incomingFrameRate;
+  _frameRateLevel = FrameRateLevel(_avgIncomingFrameRate);
+}
+
+void VCMQmResolution::ComputeEncoderState() {
+  // Default.
+  _encoderState = kStableEncoding;
+
+  // Assign stressed state if:
+  // 1) occurrences of low buffer levels is high, or
+  // 2) rate mis-match is high, and consistent over-shooting by encoder.
+  if ((_avgRatioBufferLow > kMaxBufferLow) ||
+      ((_avgRateMisMatch > kMaxRateMisMatch) &&
+          (_avgRateMisMatchSgn < -kRateOverShoot))) {
+    _encoderState = kStressedEncoding;
+  }
+  // Assign easy state if:
+  // 1) rate mis-match is high, and
+  // 2) consistent under-shooting by encoder.
+  if ((_avgRateMisMatch > kMaxRateMisMatch) &&
+      (_avgRateMisMatchSgn > kRateUnderShoot)) {
+    _encoderState = kEasyEncoding;
+  }
+}
+
+bool VCMQmResolution::GoingUpResolution() {
+  // Check if we should go up both spatially and temporally.
+  if (_stateDecFactorSpatial > 1 && _stateDecFactorTemp > 1) {
+    if (ConditionForGoingUp(2, 2, 2, kTransRateScaleUpSpatialTemp)) {
+      _qm->spatialHeightFact = 0;
+      _qm->spatialWidthFact = 0;
+      _qm->temporalFact = 0;
+      UpdateDownsamplingState(kUpResolution);
+      return true;
+    }
+  } else {
+    // Check if we should go up either spatially or temporally.
+    bool selectedUpS = false;
+    bool selectedUpT = false;
+    if (_stateDecFactorSpatial > 1) {
+      selectedUpS = ConditionForGoingUp(2, 2, 1, kTransRateScaleUpSpatial);
+    }
+    if (_stateDecFactorTemp > 1) {
+      selectedUpT = ConditionForGoingUp(1, 1, 2, kTransRateScaleUpTemp);
+    }
+    if (selectedUpS && !selectedUpT) {
+      _qm->spatialHeightFact = 0;
+      _qm->spatialWidthFact = 0;
+      UpdateDownsamplingState(kUpResolution);
+      return true;
+    } else if (!selectedUpS && selectedUpT) {
+      _qm->temporalFact = 0;
+      UpdateDownsamplingState(kUpResolution);
+      return true;
+    } else if (selectedUpS && selectedUpT) {
+      // TODO(marpan): which one to pick?
+      // pickSpatialOrTemporal()
+      // For now take spatial over temporal.
+      _qm->spatialHeightFact = 0;
+      _qm->spatialWidthFact = 0;
+      UpdateDownsamplingState(kUpResolution);
+      return true;
+    }
+  }
+  return false;
+}
+
+bool VCMQmResolution::ConditionForGoingUp(uint8_t facWidth,
+                                          uint8_t facHeight,
+                                          uint8_t facTemp,
+                                          float scaleFac) {
+  float estimatedTransitionRateUp = GetTransitionRate(facWidth, facHeight,
+                                                    facTemp, scaleFac);
+  // Go back up if:
+  // 1) target rate is above threshold and current encoder state is stable, or
+  // 2) encoder state is easy (encoder is significantly under-shooting target).
+  if (((_avgTargetRate > estimatedTransitionRateUp) &&
+      (_encoderState == kStableEncoding)) ||
+      (_encoderState == kEasyEncoding)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+bool VCMQmResolution::GoingDownResolution() {
+  float estimatedTransitionRateDown = GetTransitionRate(1, 1, 1, 1.0);
+  float maxRate = kFrameRateFac[_frameRateLevel] * kMaxRateQm[_imageType];
+
+  // TODO(marpan): Bias down-sampling based on packet loss conditions.
+
+  // Resolution reduction if:
+  // (1) target rate is below transition rate, or
+  // (2) encoder is in stressed state and target rate below a max threshold.
+  if ((_avgTargetRate < estimatedTransitionRateDown ) ||
+      (_encoderState == kStressedEncoding && _avgTargetRate < maxRate)) {
+    // Get the down-sampling action.
+    uint8_t spatialFact = kSpatialAction[_contentClass];
+    uint8_t tempFact = kTemporalAction[_contentClass];
+
+    switch (spatialFact) {
+      case 4: {
+        _qm->spatialWidthFact = 2;
+        _qm->spatialHeightFact = 2;
+        break;
+      }
+      case 2: {
+        assert(false);  // Currently not used.
+        // Select 1x2,2x1, or 4/3x4/3.
+        // SelectSpatialDirectionMode((float) estimatedTransitionRateDown);
+        break;
+      }
+      case 1: {
+        _qm->spatialWidthFact = 1;
+        _qm->spatialHeightFact = 1;
+        break;
+      }
+      default: {
+        assert(false);
+      }
+    }
+    switch (tempFact) {
+      case 2: {
+        _qm->temporalFact = 2;
+        break;
+      }
+      case 1: {
+        _qm->temporalFact = 1;
+        break;
+      }
+      default: {
+        assert(false);
+      }
+    }
+    // Adjust some cases based on frame rate.
+    // TODO(marpan): will be modified when we add 1/2 spatial and 2/3 temporal.
+    AdjustAction();
+
+    // Sanity checks on down-sampling selection:
+    // override the settings for too small image size and/or frame rate.
+    // Also check the limit on current down-sampling states.
+
+    // No spatial sampling if current frame size is too small (QCIF),
+    // or if amount of spatial down-sampling is already too much.
+    if ((_width * _height) <= kMinImageSize ||
+        _stateDecFactorSpatial >= kMaxSpatialDown) {
+      _qm->spatialWidthFact = 1;
+      _qm->spatialHeightFact = 1;
+    }
+    // No frame rate reduction if average frame rate is below some point,
+    // or if the amount of temporal down-sampling is already too much.
+    if (_avgIncomingFrameRate <= kMinFrameRate ||
+        _stateDecFactorTemp >= kMaxTempDown) {
+      _qm->temporalFact = 1;
+    }
+
+    // Update down-sampling state.
+    if (_qm->spatialWidthFact != 1 || _qm->spatialHeightFact != 1 ||
+               _qm->temporalFact != 1) {
+      UpdateDownsamplingState(kDownResolution);
+      return true;
+    }
+  }
+  return false;
+}
+
+float VCMQmResolution::GetTransitionRate(uint8_t facWidth,
+                                         uint8_t facHeight,
+                                         uint8_t facTemp,
+                                         float scaleFac) {
+  uint8_t imageType = GetImageType(facWidth * _width,
+                                   facHeight * _height);
+  LevelClass frameRateLevel = FrameRateLevel(facTemp * _avgIncomingFrameRate);
+
+  // The maximum allowed rate below which down-sampling is allowed:
+  // Nominal values based on image format (frame size and frame rate).
+  float maxRate = kFrameRateFac[frameRateLevel] * kMaxRateQm[imageType];
+
+  uint8_t imageClass = imageType > 3 ? 1: 0;
+  uint8_t tableIndex = imageClass * 9 + _contentClass;
+  // Scale factor for down-sampling transition threshold:
+  // factor based on the content class and the image size.
+  float scaleTransRate = kScaleTransRateQm[tableIndex];
+
+  // Threshold bitrate for resolution action.
+  return static_cast<float> (scaleFac * facTemp * _incomingFrameRate *
+      scaleTransRate * maxRate / 30);
+}
+
+void VCMQmResolution::UpdateDownsamplingState(ResolutionAction action) {
+  // Assumes for now only actions are 1/2 frame rate of 2x2 spatial.
+  if (action == kUpResolution) {
+    if (_qm->spatialHeightFact == 0 && _qm->spatialWidthFact == 0) {
+      _stateDecFactorSpatial = _stateDecFactorSpatial / 4;
+      assert(_stateDecFactorSpatial >= 1);
+    }
+    if (_qm->temporalFact == 0) {
+      _stateDecFactorTemp = _stateDecFactorTemp / 2;
+      assert(_stateDecFactorTemp >= 1);
+    }
+  } else if (action == kDownResolution) {
+    _stateDecFactorSpatial = _stateDecFactorSpatial * _qm->spatialWidthFact
+        * _qm->spatialHeightFact;
+    _stateDecFactorTemp = _stateDecFactorTemp * _qm->temporalFact;
+    assert(_stateDecFactorSpatial >= 1);
+    assert(_stateDecFactorTemp >= 1);
+  } else {
+    assert(false);
+  }
+}
+
+void VCMQmResolution::AdjustAction() {
+  if (_spatial.level == kDefault && _motion.level != kHigh &&
+      _frameRateLevel == kHigh) {
+      _qm->temporalFact = 2;
+      _qm->spatialWidthFact = 1;
+      _qm->spatialHeightFact = 1;
+  }
+}
+
+// TODO(marpan): Update this when we allow for 1/2 spatial down-sampling.
+void VCMQmResolution::SelectSpatialDirectionMode(float transRate) {
+  // Default is 1x2 (H)
+  // For bit rates well below transitional rate, we select 2x2.
+  if (_targetBitRate < transRate * kRateRedSpatial2X2) {
+    _qm->spatialWidthFact = 2;
+    _qm->spatialHeightFact = 2;
+  }
+  // Otherwise check prediction errors and aspect ratio.
+  float spatialErr = 0.0;
+  float spatialErrH = 0.0;
+  float spatialErrV = 0.0;
+  if (_contentMetrics) {
+    spatialErr = _contentMetrics->spatial_pred_err;
+    spatialErrH = _contentMetrics->spatial_pred_err_h;
+    spatialErrV = _contentMetrics->spatial_pred_err_v;
+  }
+
+  // Favor 1x2 if aspect_ratio is 16:9.
+  if (_aspectRatio >= 16.0f / 9.0f) {
+    // Check if 1x2 has lowest prediction error.
+    if (spatialErrH < spatialErr && spatialErrH < spatialErrV) {
+      _qm->spatialWidthFact = 2;
+      _qm->spatialHeightFact = 1;
+    }
+  }
+  // Check for 2x2 selection: favor 2x2 over 1x2 and 2x1.
+  if (spatialErr < spatialErrH * (1.0f + kSpatialErr2x2VsHoriz) &&
+      spatialErr < spatialErrV * (1.0f + kSpatialErr2X2VsVert)) {
+    _qm->spatialWidthFact = 2;
+    _qm->spatialHeightFact = 2;
+  }
+  // Check for 2x1 selection.
+  if (spatialErrV < spatialErrH * (1.0f - kSpatialErrVertVsHoriz) &&
+      spatialErrV < spatialErr * (1.0f - kSpatialErr2X2VsVert)) {
+    _qm->spatialWidthFact = 1;
+    _qm->spatialHeightFact = 2;
+  }
+}
+
+// ROBUSTNESS CLASS
+
+VCMQmRobustness::VCMQmRobustness() {
+  Reset();
+}
+
+VCMQmRobustness::~VCMQmRobustness() {
+}
+
+void VCMQmRobustness::Reset() {
+  _prevTotalRate = 0.0f;
+  _prevRttTime = 0;
+  _prevPacketLoss = 0;
+  _prevCodeRateDelta = 0;
+  ResetQM();
+}
+
+// Adjust the FEC rate based on the content and the network state
+// (packet loss rate, total rate/bandwidth, round trip time).
+// Note that packetLoss here is the filtered loss value.
+float VCMQmRobustness::AdjustFecFactor(uint8_t codeRateDelta,
+                                       float totalRate,
+                                       float frameRate,
+                                       uint32_t rttTime,
+                                       uint8_t packetLoss) {
+  // Default: no adjustment
+  float adjustFec =  1.0f;
+  if (_contentMetrics == NULL) {
+    return adjustFec;
+  }
+  // Compute class state of the content.
+  ComputeMotionNFD();
+  ComputeSpatial();
+
+  // TODO(marpan): Set FEC adjustment factor.
+
+  // Keep track of previous values of network state:
+  // adjustment may be also based on pattern of changes in network state.
+  _prevTotalRate = totalRate;
+  _prevRttTime = rttTime;
+  _prevPacketLoss = packetLoss;
+  _prevCodeRateDelta = codeRateDelta;
+  return adjustFec;
+}
+
+// Set the UEP (unequal-protection across packets) on/off for the FEC.
+bool VCMQmRobustness::SetUepProtection(uint8_t codeRateDelta,
+                                       float totalRate,
+                                       uint8_t packetLoss,
+                                       bool frameType) {
+  // Default.
+  return false;
+}
+}  // end of namespace
diff --git a/trunk/src/modules/video_coding/main/source/qm_select.h b/trunk/src/modules/video_coding/main/source/qm_select.h
new file mode 100644
index 0000000..1859530
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/qm_select.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+
+/******************************************************/
+/* Quality Modes: Resolution and Robustness settings  */
+/******************************************************/
+
+namespace webrtc {
+struct VideoContentMetrics;
+
+struct VCMResolutionScale {
+  VCMResolutionScale()
+      : spatialWidthFact(1),
+        spatialHeightFact(1),
+        temporalFact(1) {
+  }
+  uint8_t spatialWidthFact;
+  uint8_t spatialHeightFact;
+  uint8_t temporalFact;
+};
+
+enum LevelClass {
+  kLow,
+  kHigh,
+  kDefault
+};
+
+struct VCMContFeature {
+  VCMContFeature()
+      : value(0.0f),
+        level(kDefault) {
+  }
+  void Reset() {
+    value = 0.0f;
+    level = kDefault;
+  }
+  float value;
+  LevelClass level;
+};
+
+enum ResolutionAction {
+  kDownResolution,
+  kUpResolution,
+  kNoChangeResolution
+};
+
+enum EncoderState {
+  kStableEncoding,    // Low rate mis-match, stable buffer levels.
+  kStressedEncoding,  // Significant over-shooting of target rate,
+                      // Buffer under-flow, etc.
+  kEasyEncoding       // Significant under-shooting of target rate.
+};
+
+// QmMethod class: main class for resolution and robustness settings
+
+class VCMQmMethod {
+ public:
+  VCMQmMethod();
+  virtual ~VCMQmMethod();
+
+  // Reset values
+  void ResetQM();
+  virtual void Reset() = 0;
+
+  // Compute content class.
+  uint8_t ComputeContentClass();
+
+  // Update with the content metrics.
+  void UpdateContent(const VideoContentMetrics* contentMetrics);
+
+  // Compute spatial texture magnitude and level.
+  // Spatial texture is a spatial prediction error measure.
+  void ComputeSpatial();
+
+  // Compute motion magnitude and level for NFD metric.
+  // NFD is normalized frame difference (normalized by spatial variance).
+  void ComputeMotionNFD();
+
+  // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
+  uint8_t GetImageType(uint16_t width, uint16_t height);
+
+  // Get the frame rate level.
+  LevelClass FrameRateLevel(float frame_rate);
+
+ protected:
+  // Content Data.
+  const VideoContentMetrics* _contentMetrics;
+
+  // Encoder frame sizes and native frame sizes.
+  uint16_t _width;
+  uint16_t _height;
+  uint16_t _nativeWidth;
+  uint16_t _nativeHeight;
+  float _aspectRatio;
+  // Image type and frame rate leve, for the current encoder resolution.
+  uint8_t _imageType;
+  LevelClass _frameRateLevel;
+  // Content class data.
+  VCMContFeature _motion;
+  VCMContFeature _spatial;
+  uint8_t _contentClass;
+  bool _init;
+};
+
+// Resolution settings class
+
+class VCMQmResolution : public VCMQmMethod {
+ public:
+  VCMQmResolution();
+  virtual ~VCMQmResolution();
+
+  // Reset all quantities.
+  virtual void Reset();
+
+  // Reset rate quantities and counters after every SelectResolution() call.
+  void ResetRates();
+
+  // Reset down-sampling state.
+  void ResetDownSamplingState();
+
+  // Get the encoder state.
+  EncoderState GetEncoderState();
+
+  // Initialize after SetEncodingData in media_opt.
+  int Initialize(float bitRate, float userFrameRate,
+                 uint16_t width, uint16_t height);
+
+  // Update the encoder frame size.
+  void UpdateCodecFrameSize(uint16_t width, uint16_t height);
+
+  // Update with actual bit rate (size of the latest encoded frame)
+  // and frame type, after every encoded frame.
+  void UpdateEncodedSize(int encodedSize,
+                         FrameType encodedFrameType);
+
+  // Update with new target bitrate, actual encoder sent rate, frame_rate,
+  // loss rate: every ~1 sec from SetTargetRates in media_opt.
+  void UpdateRates(float targetBitRate, float encoderSentRate,
+                   float incomingFrameRate, uint8_t packetLoss);
+
+  // Extract ST (spatio-temporal) resolution action.
+  // Inputs: qm: Reference to the quality modes pointer.
+  // Output: the spatial and/or temporal scale change.
+  int SelectResolution(VCMResolutionScale** qm);
+
+  // Compute rates for the selection of down-sampling action.
+  void ComputeRatesForSelection();
+
+  // Compute the encoder state.
+  void ComputeEncoderState();
+
+  // Return true if the action is to go back up in resolution.
+  bool GoingUpResolution();
+
+  // Return true if the action is to go down in resolution.
+  bool GoingDownResolution();
+
+  // Check the condition for going up in resolution by the scale factors:
+  // |facWidth|, |facHeight|, |facTemp|.
+  // |scaleFac| is a scale factor for the transition rate.
+  bool ConditionForGoingUp(uint8_t facWidth, uint8_t facHeight,
+                           uint8_t facTemp,
+                           float scaleFac);
+
+  // Get the bitrate threshold for the resolution action.
+  // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
+  // |scaleFac| is a scale factor for the transition rate.
+  float GetTransitionRate(uint8_t facWidth, uint8_t facHeight,
+                          uint8_t facTemp, float scaleFac);
+
+  // Update the downsampling state.
+  void UpdateDownsamplingState(ResolutionAction action);
+
+  void AdjustAction();
+
+  // Select the directional (1x2 or 2x1) spatial down-sampling action.
+  void SelectSpatialDirectionMode(float transRate);
+
+ private:
+  VCMResolutionScale* _qm;
+  // Encoder rate control parameters.
+  float _targetBitRate;
+  float _userFrameRate;
+  float _incomingFrameRate;
+  float _perFrameBandwidth;
+  float _bufferLevel;
+
+  // Data accumulated every ~1sec from MediaOpt.
+  float _sumTargetRate;
+  float _sumIncomingFrameRate;
+  float _sumRateMM;
+  float _sumRateMMSgn;
+  float  _sumPacketLoss;
+  // Counters.
+  uint32_t _frameCnt;
+  uint32_t _frameCntDelta;
+  uint32_t _updateRateCnt;
+  uint32_t _lowBufferCnt;
+
+  // Resolution state parameters.
+  uint8_t _stateDecFactorSpatial;
+  uint8_t _stateDecFactorTemp;
+
+  // Quantities used for selection.
+  float _avgTargetRate;
+  float _avgIncomingFrameRate;
+  float _avgRatioBufferLow;
+  float _avgRateMisMatch;
+  float _avgRateMisMatchSgn;
+  float _avgPacketLoss;
+  EncoderState _encoderState;
+};
+
+// Robustness settings class.
+
+class VCMQmRobustness : public VCMQmMethod {
+ public:
+  VCMQmRobustness();
+  ~VCMQmRobustness();
+
+  virtual void Reset();
+
+  // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
+  // Returns an adjustment factor.
+  float AdjustFecFactor(uint8_t codeRateDelta,
+                        float totalRate,
+                        float frameRate,
+                        uint32_t rttTime,
+                        uint8_t packetLoss);
+
+  // Set the UEP protection on/off.
+  bool SetUepProtection(uint8_t codeRateDelta,
+                        float totalRate,
+                        uint8_t packetLoss,
+                        bool frameType);
+
+ private:
+  // Previous state of network parameters.
+  float _prevTotalRate;
+  uint32_t _prevRttTime;
+  uint8_t _prevPacketLoss;
+  uint8_t _prevCodeRateDelta;
+};
+}   // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+
diff --git a/trunk/src/modules/video_coding/main/source/qm_select_data.h b/trunk/src/modules/video_coding/main/source/qm_select_data.h
new file mode 100644
index 0000000..d4af642
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/qm_select_data.h
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+
+/***************************************************************
+*QMSelectData.h
+* This file includes parameters for content-aware media optimization
+****************************************************************/
+
+#include "typedefs.h"
+
+namespace webrtc {
+//
+// PARAMETERS FOR RESOLUTION ADAPTATION
+//
+
+// Initial level of buffer in secs: should corresponds to wrapper settings.
+const float kInitBufferLevel = 0.5f;
+
+// Optimal level of buffer in secs: should corresponds to wrapper settings.
+const float kOptBufferLevel = 0.6f;
+
+// Threshold of (max) buffer size below which we consider too low (underflow).
+const float kPercBufferThr = 0.10f;
+
+// Threshold on the occurrences of low buffer levels.
+const float kMaxBufferLow = 0.5f;
+
+// Threshold on rate mismatch
+const float kMaxRateMisMatch = 0.5f;
+
+// Threshold on amount of under/over encoder shooting.
+const float kRateOverShoot = 0.75f;
+const float kRateUnderShoot = 0.75f;
+
+// Factor for transitional rate for going back up in resolution.
+const float kTransRateScaleUpSpatial = 1.25f;
+const float kTransRateScaleUpTemp = 1.25f;
+const float kTransRateScaleUpSpatialTemp = 1.25f;
+
+// Threshold on packet loss rate, above which favor resolution reduction.
+const float kPacketLossThr = 0.1f;
+
+// Factor for reducing transitonal bitrate under packet loss.
+const float kPacketLossRateFac = 1.0f;
+
+// Maximum possible transitional rate for down-sampling:
+// (units in kbps), for 30fps.
+const uint16_t kMaxRateQm[7] = {
+    100,   // QCIF
+    250,   // CIF
+    500,   // VGA
+    800,   // 4CIF
+    1000,  // 720 HD 4:3,
+    1500,  // 720 HD 16:9
+    2000   // 1080HD
+};
+
+// Frame rate scale for maximum transition rate.
+const float kFrameRateFac[3] = {
+    0.7f,  // L
+    1.0f,  // H
+    0.8f   // D
+};
+
+// Scale for transitional rate: based on content class
+// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
+const float kScaleTransRateQm[18] = {
+    // 4CIF and lower
+    0.50f,       // L, L
+    0.50f,       // L, H
+    0.50f,       // L, D
+    0.50f,       // H ,L
+    0.25f,       // H, H
+    0.25f,       // H, D
+    0.50f,       // D, L
+    0.50f,       // D, D
+    0.25f,       // D, H
+
+    // over 4CIF: WHD, HD
+    0.50f,       // L, L
+    0.50f,       // L, H
+    0.50f,       // L, D
+    0.50f,       // H ,L
+    0.25f,       // H, H
+    0.25f,       // H, D
+    0.50f,       // D, L
+    0.50f,       // D, D
+    0.25f,       // D, H
+};
+
+// Action for down-sampling:
+// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
+const uint8_t kSpatialAction[9] = {
+    1,       // L, L
+    1,       // L, H
+    1,       // L, D
+    4,       // H ,L
+    1,       // H, H
+    4,       // H, D
+    4,       // D, L
+    1,       // D, H
+    1,       // D, D
+};
+
+const uint8_t kTemporalAction[9] = {
+    1,       // L, L
+    2,       // L, H
+    2,       // L, D
+    1,       // H ,L
+    2,       // H, H
+    1,       // H, D
+    1,       // D, L
+    2,       // D, H
+    1,       // D, D
+};
+
+// Control the total amount of down-sampling allowed.
+const int kMaxSpatialDown = 16;
+const int kMaxTempDown = 4;
+const int kMaxDownSample = 16;
+
+// Minimum image size for a spatial down-sampling.
+const int kMinImageSize= 176 * 144;
+
+// Minimum frame rate for temporal down-sampling:
+// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE
+const int kMinFrameRate = 8;
+
+// Boundaries for the closest standard frame size
+const uint32_t kFrameSizeTh[6] = {
+    63360,    // between 176*144 and 352*288
+    204288,   // between 352*288 and 640*480
+    356352,   // between 640*480 and 704*576
+    548352,   // between 704*576 and 960*720
+    806400,   // between 960*720 and 1280*720
+    1497600,  // between 1280*720 and 1920*1080
+};
+
+//
+// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
+//
+
+//
+// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
+//
+
+// Thresholds for frame rate:
+const int kLowFrameRate = 10;
+const int kHighFrameRate = 25;
+
+// Thresholds for motion: motion level is from NFD
+const float kHighMotionNfd = 0.075f;
+const float kLowMotionNfd = 0.04f;
+
+// Thresholds for spatial prediction error:
+// this is applied on the min(2x2,1x2,2x1)
+const float kHighTexture = 0.035f;
+const float kLowTexture = 0.025f;
+
+// Used to reduce thresholds for larger/HD scenes: correction factor since
+// higher correlation in HD scenes means lower spatial prediction error.
+const float kScaleTexture = 0.9f;
+
+// percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1
+const float kRateRedSpatial2X2 = 0.6f;
+
+const float kSpatialErr2x2VsHoriz = 0.1f;   // percentage to favor 2x2 over H
+const float kSpatialErr2X2VsVert = 0.1f;    // percentage to favor 2x2 over V
+const float kSpatialErrVertVsHoriz = 0.1f;  // percentage to favor H over V
+
+}  //  namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+
diff --git a/trunk/src/modules/video_coding/main/source/qm_select_unittest.cc b/trunk/src/modules/video_coding/main/source/qm_select_unittest.cc
new file mode 100644
index 0000000..e0ab7bf
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/qm_select_unittest.cc
@@ -0,0 +1,834 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests the QmResolution class
+ * In particular, for the selection of spatial and/or temporal down-sampling.
+ */
+
+#include <gtest/gtest.h>
+
+#include "modules/video_coding/main/source/qm_select.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+class QmSelectTest : public ::testing::Test {
+ protected:
+  QmSelectTest()
+      :  qm_resolution_(new VCMQmResolution()),
+         content_metrics_(new VideoContentMetrics()),
+         qm_scale_(NULL) {
+  }
+  VCMQmResolution* qm_resolution_;
+  VideoContentMetrics* content_metrics_;
+  VCMResolutionScale* qm_scale_;
+
+  void InitQmNativeData(float initial_bit_rate, int user_frame_rate,
+                        int native_width, int native_height);
+
+  void UpdateQmEncodedFrame(int* encoded_size, int num_updates);
+
+  void UpdateQmRateData(int* target_rate,
+                        int* encoder_sent_rate,
+                        int* incoming_frame_rate,
+                        uint8_t* fraction_lost,
+                        int num_updates);
+
+  void UpdateQmContentData(float motion_metric,
+                           float spatial_metric,
+                           float spatial_metric_horiz,
+                           float spatial_metric_vert);
+
+  bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+                               uint8_t fac_width,
+                               uint8_t fac_height,
+                               uint8_t fac_temp);
+
+  void TearDown() {
+    delete qm_resolution_;
+    delete content_metrics_;
+  }
+};
+
+TEST_F(QmSelectTest, HandleInputs) {
+  // Expect parameter error. Initialize with invalid inputs.
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480));
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0));
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480));
+
+  // Expect uninitialized error.: No valid initialization before selection.
+  EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
+
+  VideoContentMetrics* content_metrics = NULL;
+  EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480));
+  qm_resolution_->UpdateContent(content_metrics);
+  // Content metrics are NULL: Expect success and no down-sampling action.
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// No down-sampling action at high rates.
+TEST_F(QmSelectTest, NoActionHighRate) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(800, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {800, 800, 800};
+  int encoder_sent_rate[] = {800, 800, 800};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  UpdateQmContentData(0.01f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// Rate is well below transition, down-sampling action is taken,
+// depending on the content state.
+TEST_F(QmSelectTest, DownActionLowRate) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial: 2x2 spatial expected.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, low spatial: no action expected: content is too low.
+  UpdateQmContentData(0.01f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, low spatial: 2x2 spatial expected.
+  UpdateQmContentData(0.06f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // High motion, high spatial: 1/2 temporal expected.
+  UpdateQmContentData(0.1f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial: 1/2 temporal expected.
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, high spatial: 1/2 temporal expected.
+  UpdateQmContentData(0.06f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  qm_resolution_->ResetDownSamplingState();
+  // High motion, medium spatial: 2x2 spatial expected.
+  UpdateQmContentData(0.1f, 0.03f, 0.03f, 0.03f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
+  UpdateQmContentData(0.01f, 0.03f, 0.03f, 0.03f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, medium spatial: high frame rate, so 1/2 temporal expected.
+  UpdateQmContentData(0.06f, 0.03f, 0.03f, 0.03f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+}
+
+// Rate mis-match is high, and we have over-shooting.
+// since target rate is below max for down-sampling, down-sampling is selected.
+TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(450, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {450, 450, 450};
+  int encoder_sent_rate[] = {900, 900, 900};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+}
+
+// Rate mis-match is high, target rate is below max for down-sampling,
+// but since we have consistent under-shooting, no down-sampling action.
+TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(450, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {450, 450, 450};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// Buffer is underflowing, and target rate is below max for down-sampling,
+// so action is taken.
+TEST_F(QmSelectTest, DownActionBufferUnderflow) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(450, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update with encoded size over a number of frames.
+  // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
+  int encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
+  UpdateQmEncodedFrame(encoded_size, 10);
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {450, 450, 450};
+  int encoder_sent_rate[] = {450, 450, 450};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+}
+
+// Target rate is below max for down-sampling, but buffer level is stable,
+// so no action is taken.
+TEST_F(QmSelectTest, NoActionBufferStable) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(450, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update with encoded size over a number of frames.
+  // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
+  int32_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
+  UpdateQmEncodedFrame(encoded_size, 10);
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {450, 450, 450};
+  int encoder_sent_rate[] = {450, 450, 450};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// Very low rate, but no spatial down-sampling below some size (QCIF).
+TEST_F(QmSelectTest, LimitDownSpatialAction) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(10, 30, 176, 144);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 176;
+  uint16_t codec_height = 144;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {10, 10, 10};
+  int encoder_sent_rate[] = {10, 10, 10};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// Very low rate, but no frame reduction below some frame_rate (8fps).
+TEST_F(QmSelectTest, LimitDownTemporalAction) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(10, 8, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {10, 10, 10};
+  int encoder_sent_rate[] = {10, 10, 10};
+  int incoming_frame_rate[] = {8, 8, 8};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, medium spatial.
+  UpdateQmContentData(0.01f, 0.03f, 0.03f, 0.03f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+// Two stages: spatial down-sample and then back up spatially,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset and go up in rate: expected to go back up.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(320, 240);
+  EXPECT_EQ(1, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0, 0, 1));
+}
+
+// Two stages: spatial down-sample and then back up spatially, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset rates and simulate under-shooting scenario.: expect to go back up.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(320, 240);
+  EXPECT_EQ(1, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {200, 200, 200, 200, 200};
+  int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0, 0, 1));
+}
+
+// Two stages: spatial down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset and simulate large rate mis-match: expect no action to go back up.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(320, 240);
+  EXPECT_EQ(1, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+// Two stages: temporally down-sample and then back up temporally,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  // Reset rates and go up in rate: expect to go back up.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 0));
+}
+
+// Two stages: temporal down-sample and then back up temporally, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  // Reset rates and simulate under-shooting scenario.: expect to go back up.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {200, 200, 200, 200, 200};
+  int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 0));
+}
+
+// Two stages: temporal down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  // Reset and simulate large rate mis-match: expect no action to go back up.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {600, 600, 600, 600, 600};
+  int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+// 3 stages: spatial down-sample, followed by temporal down-sample,
+// and then go up to full state, as encoding rate has increased.
+TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(100, 30, 640, 480);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset rate and change content data: expect temporal down-sample.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(320, 240);
+  EXPECT_EQ(1, qm_resolution_->GetImageType(320, 240));
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(0.01f, 0.1f, 0.1f, 0.1f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+
+  // Reset rates and go high up in rate: expect to go back up both spatial
+  // and temporally.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0, 0, 0));
+}
+
+// No down-sampling below some totol amount (factor of 16)
+TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
+  // Initialize with bitrate, frame rate, and native system width/height.
+  InitQmNativeData(400, 30, 1280, 720);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 1280;
+  uint16_t codec_height = 720;
+  qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {400, 400, 400};
+  int encoder_sent_rate[] = {400, 400, 400};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial: 2x2 spatial expected.
+  UpdateQmContentData(0.1f, 0.01f, 0.01f, 0.01f);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset and lower rates to get another spatial action.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(640, 360);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(640, 360));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {100, 100, 100, 100, 100};
+  int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2, 2, 1));
+
+  // Reset and go to low rate: no action should be taken,
+  // we went down too much already.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecFrameSize(320, 180);
+  EXPECT_EQ(0, qm_resolution_->GetImageType(320, 180));
+  // Update rates for a sequence of intervals.
+  int target_rate3[] = {10, 10, 10, 10, 10};
+  int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
+  int incoming_frame_rate3[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                   fraction_lost3, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 1));
+}
+
+void QmSelectTest::InitQmNativeData(float initial_bit_rate,
+                                    int user_frame_rate,
+                                    int native_width,
+                                    int native_height) {
+  EXPECT_EQ(0, qm_resolution_->Initialize(initial_bit_rate, user_frame_rate,
+                                          native_width, native_height));
+}
+
+void QmSelectTest::UpdateQmContentData(float motion_metric,
+                                       float spatial_metric,
+                                       float spatial_metric_horiz,
+                                       float spatial_metric_vert) {
+  content_metrics_->motion_magnitude = motion_metric;
+  content_metrics_->spatial_pred_err = spatial_metric;
+  content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
+  content_metrics_->spatial_pred_err_v = spatial_metric_vert;
+  qm_resolution_->UpdateContent(content_metrics_);
+}
+
+void QmSelectTest::UpdateQmEncodedFrame(int* encoded_size, int num_updates) {
+  FrameType frame_type = kVideoFrameDelta;
+  for (int i = 0; i < num_updates; i++) {
+    // Convert to bytes.
+    int32_t encoded_size_update = 1000 * encoded_size[i] / 8;
+    qm_resolution_->UpdateEncodedSize(encoded_size_update, frame_type);
+  }
+}
+
+void QmSelectTest::UpdateQmRateData(int* target_rate,
+                                    int* encoder_sent_rate,
+                                    int* incoming_frame_rate,
+                                    uint8_t* fraction_lost,
+                                    int num_updates) {
+  for (int i = 0; i < num_updates; i++) {
+    float target_rate_update = target_rate[i];
+    float encoder_sent_rate_update = encoder_sent_rate[i];
+    float incoming_frame_rate_update = incoming_frame_rate[i];
+    uint8_t fraction_lost_update = fraction_lost[i];
+    qm_resolution_->UpdateRates(target_rate_update,
+                                encoder_sent_rate_update,
+                                incoming_frame_rate_update,
+                                fraction_lost_update);
+  }
+}
+
+// Check is the selected action from the QmResolution class is the same
+// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
+bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+                                           uint8_t fac_width,
+                                           uint8_t fac_height,
+                                           uint8_t fac_temp) {
+  if (qm_scale->spatialWidthFact == fac_width &&
+      qm_scale->spatialHeightFact == fac_height &&
+      qm_scale->temporalFact == fac_temp) {
+    return true;
+  } else {
+    return false;
+  }
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/receiver.cc b/trunk/src/modules/video_coding/main/source/receiver.cc
new file mode 100644
index 0000000..6be5336
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/receiver.cc
@@ -0,0 +1,493 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/receiver.h"
+
+#include <assert.h>
+
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/source/encoded_frame.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/media_opt_util.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+VCMReceiver::VCMReceiver(VCMTiming& timing,
+                         TickTimeBase* clock,
+                         WebRtc_Word32 vcmId,
+                         WebRtc_Word32 receiverId,
+                         bool master)
+    : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _vcmId(vcmId),
+      _clock(clock),
+      _receiverId(receiverId),
+      _master(master),
+      _jitterBuffer(_clock, vcmId, receiverId, master),
+      _timing(timing),
+      _renderWaitEvent(*new VCMEvent()),
+      _state(kPassive) {}
+
+VCMReceiver::~VCMReceiver()
+{
+    _renderWaitEvent.Set();
+    delete &_renderWaitEvent;
+    delete _critSect;
+}
+
+void
+VCMReceiver::Reset()
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_jitterBuffer.Running())
+    {
+        _jitterBuffer.Start();
+    }
+    else
+    {
+        _jitterBuffer.Flush();
+    }
+    _renderWaitEvent.Reset();
+    if (_master)
+    {
+        _state = kReceiving;
+    }
+    else
+    {
+        _state = kPassive;
+    }
+}
+
+WebRtc_Word32
+VCMReceiver::Initialize()
+{
+    CriticalSectionScoped cs(_critSect);
+    Reset();
+    if (!_master)
+    {
+        SetNackMode(kNoNack);
+    }
+    return VCM_OK;
+}
+
+void VCMReceiver::UpdateRtt(WebRtc_UWord32 rtt)
+{
+    _jitterBuffer.UpdateRtt(rtt);
+}
+
+WebRtc_Word32
+VCMReceiver::InsertPacket(const VCMPacket& packet,
+                          WebRtc_UWord16 frameWidth,
+                          WebRtc_UWord16 frameHeight)
+{
+    // Find an empty frame
+    VCMEncodedFrame *buffer = NULL;
+    const WebRtc_Word32 error = _jitterBuffer.GetFrame(packet, buffer);
+    if (error == VCM_OLD_PACKET_ERROR)
+    {
+        return VCM_OK;
+    }
+    else if (error != VCM_OK)
+    {
+        return error;
+    }
+    assert(buffer);
+    {
+        CriticalSectionScoped cs(_critSect);
+
+        if (frameWidth && frameHeight)
+        {
+            buffer->SetEncodedSize(static_cast<WebRtc_UWord32>(frameWidth),
+                                   static_cast<WebRtc_UWord32>(frameHeight));
+        }
+
+        if (_master)
+        {
+            // Only trace the primary receiver to make it possible
+            // to parse and plot the trace file.
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                         VCMId(_vcmId, _receiverId),
+                         "Packet seqNo %u of frame %u at %u",
+                         packet.seqNum, packet.timestamp,
+                         MaskWord64ToUWord32(_clock->MillisecondTimestamp()));
+        }
+
+        const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+
+        WebRtc_Word64 renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
+
+        if (renderTimeMs < 0)
+        {
+            // Render time error. Assume that this is due to some change in
+            // the incoming video stream and reset the JB and the timing.
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+        else if (renderTimeMs < nowMs - kMaxVideoDelayMs)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                "This frame should have been rendered more than %u ms ago."
+                "Flushing jitter buffer and resetting timing.", kMaxVideoDelayMs);
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+        else if (_timing.TargetVideoDelay() > kMaxVideoDelayMs)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                "More than %u ms target delay. Flushing jitter buffer and resetting timing.",
+                kMaxVideoDelayMs);
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+
+        // First packet received belonging to this frame.
+        if (buffer->Length() == 0)
+        {
+            const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+            if (_master)
+            {
+                // Only trace the primary receiver to make it possible to parse and plot the trace file.
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                           "First packet of frame %u at %u", packet.timestamp,
+                           MaskWord64ToUWord32(nowMs));
+            }
+            renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
+            if (renderTimeMs >= 0)
+            {
+                buffer->SetRenderTime(renderTimeMs);
+            }
+            else
+            {
+                buffer->SetRenderTime(nowMs);
+            }
+        }
+
+        // Insert packet into the jitter buffer
+        // both media and empty packets
+        const VCMFrameBufferEnum
+        ret = _jitterBuffer.InsertPacket(buffer, packet);
+        if (ret == kFlushIndicator) {
+          return VCM_FLUSH_INDICATOR;
+        } else if (ret < 0) {
+          WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
+                       VCMId(_vcmId, _receiverId),
+                       "Error inserting packet seqNo=%u, timeStamp=%u",
+                       packet.seqNum, packet.timestamp);
+          return VCM_JITTER_BUFFER_ERROR;
+        }
+    }
+    return VCM_OK;
+}
+
+VCMEncodedFrame* VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                               WebRtc_Word64& nextRenderTimeMs,
+                                               bool renderTiming,
+                                               VCMReceiver* dualReceiver)
+{
+    // No need to enter the critical section here since the jitter buffer
+    // is thread-safe.
+    FrameType incomingFrameType = kVideoFrameDelta;
+    nextRenderTimeMs = -1;
+    const WebRtc_Word64 startTimeMs = _clock->MillisecondTimestamp();
+    WebRtc_Word64 ret = _jitterBuffer.GetNextTimeStamp(maxWaitTimeMs,
+                                                       incomingFrameType,
+                                                       nextRenderTimeMs);
+    if (ret < 0)
+    {
+        // No timestamp in jitter buffer at the moment
+        return NULL;
+    }
+    const WebRtc_UWord32 timeStamp = static_cast<WebRtc_UWord32>(ret);
+
+    // Update the timing
+    _timing.SetRequiredDelay(_jitterBuffer.GetEstimatedJitterMS());
+    _timing.UpdateCurrentDelay(timeStamp);
+
+    const WebRtc_Word32 tempWaitTime = maxWaitTimeMs -
+            static_cast<WebRtc_Word32>(_clock->MillisecondTimestamp() - startTimeMs);
+    WebRtc_UWord16 newMaxWaitTime = static_cast<WebRtc_UWord16>(VCM_MAX(tempWaitTime, 0));
+
+    VCMEncodedFrame* frame = NULL;
+
+    if (renderTiming)
+    {
+        frame = FrameForDecoding(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
+    }
+    else
+    {
+        frame = FrameForRendering(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
+    }
+
+    if (frame != NULL)
+    {
+        bool retransmitted = false;
+        const WebRtc_Word64 lastPacketTimeMs =
+                _jitterBuffer.LastPacketTime(frame, retransmitted);
+        if (lastPacketTimeMs >= 0 && !retransmitted)
+        {
+            // We don't want to include timestamps which have suffered from retransmission
+            // here, since we compensate with extra retransmission delay within
+            // the jitter estimate.
+            _timing.IncomingTimestamp(timeStamp, lastPacketTimeMs);
+        }
+        if (dualReceiver != NULL)
+        {
+            dualReceiver->UpdateState(*frame);
+        }
+    }
+    return frame;
+}
+
+VCMEncodedFrame*
+VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                              WebRtc_Word64 nextRenderTimeMs,
+                              VCMReceiver* dualReceiver)
+{
+    // How long can we wait until we must decode the next frame
+    WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
+                                          _clock->MillisecondTimestamp());
+
+    // Try to get a complete frame from the jitter buffer
+    VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
+
+    if (frame == NULL && maxWaitTimeMs == 0 && waitTimeMs > 0)
+    {
+        // If we're not allowed to wait for frames to get complete we must
+        // calculate if it's time to decode, and if it's not we will just return
+        // for now.
+        return NULL;
+    }
+
+    if (frame == NULL && VCM_MIN(waitTimeMs, maxWaitTimeMs) == 0)
+    {
+        // No time to wait for a complete frame,
+        // check if we have an incomplete
+        const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
+                                     dualReceiver->State() == kPassive &&
+                                     dualReceiver->NackMode() == kNackInfinite);
+        if (dualReceiverEnabledAndPassive &&
+            !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+            frame = _jitterBuffer.GetFrameForDecoding();
+            assert(frame);
+        } else {
+            frame = _jitterBuffer.GetFrameForDecoding();
+        }
+    }
+    if (frame == NULL)
+    {
+        // Wait for a complete frame
+        frame = _jitterBuffer.GetCompleteFrameForDecoding(maxWaitTimeMs);
+    }
+    if (frame == NULL)
+    {
+        // Get an incomplete frame
+        if (_timing.MaxWaitingTime(nextRenderTimeMs,
+                                   _clock->MillisecondTimestamp()) > 0)
+        {
+            // Still time to wait for a complete frame
+            return NULL;
+        }
+
+        // No time left to wait, we must decode this frame now.
+        const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
+                                     dualReceiver->State() == kPassive &&
+                                     dualReceiver->NackMode() == kNackInfinite);
+        if (dualReceiverEnabledAndPassive &&
+            !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+        }
+
+        frame = _jitterBuffer.GetFrameForDecoding();
+    }
+    return frame;
+}
+
+VCMEncodedFrame*
+VCMReceiver::FrameForRendering(WebRtc_UWord16 maxWaitTimeMs,
+                               WebRtc_Word64 nextRenderTimeMs,
+                               VCMReceiver* dualReceiver)
+{
+    // How long MUST we wait until we must decode the next frame. This is different for the case
+    // where we have a renderer which can render at a specified time. Here we must wait as long
+    // as possible before giving the frame to the decoder, which will render the frame as soon
+    // as it has been decoded.
+    WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
+                                                       _clock->MillisecondTimestamp());
+    if (maxWaitTimeMs < waitTimeMs)
+    {
+        // If we're not allowed to wait until the frame is supposed to be rendered
+        // we will have to return NULL for now.
+        return NULL;
+    }
+    // Wait until it's time to render
+    _renderWaitEvent.Wait(waitTimeMs);
+
+    // Get a complete frame if possible
+    VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
+
+    if (frame == NULL)
+    {
+        // Get an incomplete frame
+        const bool dualReceiverEnabledAndPassive = dualReceiver != NULL &&
+                                                   dualReceiver->State() == kPassive &&
+                                                   dualReceiver->NackMode() == kNackInfinite;
+        if (dualReceiverEnabledAndPassive && !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+        }
+
+        frame = _jitterBuffer.GetFrameForDecoding();
+    }
+    return frame;
+}
+
+void
+VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame)
+{
+    _jitterBuffer.ReleaseFrame(frame);
+}
+
+WebRtc_Word32
+VCMReceiver::ReceiveStatistics(WebRtc_UWord32& bitRate, WebRtc_UWord32& frameRate)
+{
+    const WebRtc_Word32 ret = _jitterBuffer.GetUpdate(frameRate, bitRate);
+    bitRate /= 1000; // Should be in kbps
+    return ret;
+}
+
+WebRtc_Word32
+VCMReceiver::ReceivedFrameCount(VCMFrameCount& frameCount) const
+{
+    return _jitterBuffer.GetFrameStatistics(frameCount.numDeltaFrames,
+                                            frameCount.numKeyFrames);
+}
+
+WebRtc_UWord32 VCMReceiver::DiscardedPackets() const {
+  return _jitterBuffer.DiscardedPackets();
+}
+
+void
+VCMReceiver::SetNackMode(VCMNackMode nackMode)
+{
+    CriticalSectionScoped cs(_critSect);
+    // Default to always having NACK enabled in hybrid mode.
+    _jitterBuffer.SetNackMode(nackMode, kLowRttNackMs, -1);
+    if (!_master)
+    {
+        _state = kPassive; // The dual decoder defaults to passive
+    }
+}
+
+VCMNackMode
+VCMReceiver::NackMode() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _jitterBuffer.GetNackMode();
+}
+
+VCMNackStatus
+VCMReceiver::NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size)
+{
+    bool extended = false;
+    WebRtc_UWord16 nackListSize = 0;
+    WebRtc_UWord16* internalNackList = _jitterBuffer.GetNackList(nackListSize, extended);
+    if (internalNackList == NULL && nackListSize == 0xffff)
+    {
+        // This combination is used to trigger key frame requests.
+        size = 0;
+        return kNackKeyFrameRequest;
+    }
+    if (nackListSize > size)
+    {
+        size = nackListSize;
+        return kNackNeedMoreMemory;
+    }
+    if (internalNackList != NULL && nackListSize > 0) {
+      memcpy(nackList, internalNackList, nackListSize * sizeof(WebRtc_UWord16));
+    }
+    size = nackListSize;
+    return kNackOk;
+}
+
+// Decide whether we should change decoder state. This should be done if the dual decoder
+// has caught up with the decoder decoding with packet losses.
+bool
+VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dualFrame, VCMReceiver& dualReceiver) const
+{
+    if (dualFrame == NULL)
+    {
+        return false;
+    }
+    if (_jitterBuffer.LastDecodedTimestamp() == dualFrame->TimeStamp())
+    {
+        dualReceiver.UpdateState(kWaitForPrimaryDecode);
+        return true;
+    }
+    return false;
+}
+
+void
+VCMReceiver::CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver)
+{
+    _jitterBuffer.CopyFrom(receiver._jitterBuffer);
+}
+
+VCMReceiverState
+VCMReceiver::State() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _state;
+}
+
+void
+VCMReceiver::UpdateState(VCMReceiverState newState)
+{
+    CriticalSectionScoped cs(_critSect);
+    assert(!(_state == kPassive && newState == kWaitForPrimaryDecode));
+//    assert(!(_state == kReceiving && newState == kPassive));
+    _state = newState;
+}
+
+void
+VCMReceiver::UpdateState(VCMEncodedFrame& frame)
+{
+    if (_jitterBuffer.GetNackMode() == kNoNack)
+    {
+        // Dual decoder mode has not been enabled.
+        return;
+    }
+    // Update the dual receiver state
+    if (frame.Complete() && frame.FrameType() == kVideoFrameKey)
+    {
+        UpdateState(kPassive);
+    }
+    if (State() == kWaitForPrimaryDecode &&
+        frame.Complete() && !frame.MissingFrame())
+    {
+        UpdateState(kPassive);
+    }
+    if (frame.MissingFrame() || !frame.Complete())
+    {
+        // State was corrupted, enable dual receiver.
+        UpdateState(kReceiving);
+    }
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/receiver.h b/trunk/src/modules/video_coding/main/source/receiver.h
new file mode 100644
index 0000000..0081ed1
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/receiver.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+
+#include "critical_section_wrapper.h"
+#include "jitter_buffer.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "timing.h"
+#include "packet.h"
+
+namespace webrtc
+{
+
+class VCMEncodedFrame;
+
+enum VCMNackStatus
+{
+    kNackOk,
+    kNackNeedMoreMemory,
+    kNackKeyFrameRequest
+};
+
+
+enum VCMReceiverState
+{
+    kReceiving,
+    kPassive,
+    kWaitForPrimaryDecode
+};
+
+class VCMReceiver
+{
+public:
+    VCMReceiver(VCMTiming& timing,
+                TickTimeBase* clock,
+                WebRtc_Word32 vcmId = -1,
+                WebRtc_Word32 receiverId = -1,
+                bool master = true);
+    ~VCMReceiver();
+
+    void Reset();
+    WebRtc_Word32 Initialize();
+    void UpdateRtt(WebRtc_UWord32 rtt);
+    WebRtc_Word32 InsertPacket(const VCMPacket& packet,
+                               WebRtc_UWord16 frameWidth,
+                               WebRtc_UWord16 frameHeight);
+    VCMEncodedFrame* FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                      WebRtc_Word64& nextRenderTimeMs,
+                                      bool renderTiming = true,
+                                      VCMReceiver* dualReceiver = NULL);
+    void ReleaseFrame(VCMEncodedFrame* frame);
+    WebRtc_Word32 ReceiveStatistics(WebRtc_UWord32& bitRate, WebRtc_UWord32& frameRate);
+    WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const;
+    WebRtc_UWord32 DiscardedPackets() const;
+
+    // NACK
+    void SetNackMode(VCMNackMode nackMode);
+    VCMNackMode NackMode() const;
+    VCMNackStatus NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size);
+
+    // Dual decoder
+    bool DualDecoderCaughtUp(VCMEncodedFrame* dualFrame, VCMReceiver& dualReceiver) const;
+    VCMReceiverState State() const;
+
+private:
+    VCMEncodedFrame* FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                      WebRtc_Word64 nextrenderTimeMs,
+                                      VCMReceiver* dualReceiver);
+    VCMEncodedFrame* FrameForRendering(WebRtc_UWord16 maxWaitTimeMs,
+                                       WebRtc_Word64 nextrenderTimeMs,
+                                       VCMReceiver* dualReceiver);
+    void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver);
+    void UpdateState(VCMReceiverState newState);
+    void UpdateState(VCMEncodedFrame& frame);
+    static WebRtc_Word32 GenerateReceiverId();
+
+    CriticalSectionWrapper* _critSect;
+    WebRtc_Word32           _vcmId;
+    TickTimeBase*           _clock;
+    WebRtc_Word32           _receiverId;
+    bool                    _master;
+    VCMJitterBuffer         _jitterBuffer;
+    VCMTiming&              _timing;
+    VCMEvent&               _renderWaitEvent;
+    VCMReceiverState        _state;
+
+    static WebRtc_Word32    _receiverIdCounter;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
diff --git a/trunk/src/modules/video_coding/main/source/rtt_filter.cc b/trunk/src/modules/video_coding/main/source/rtt_filter.cc
new file mode 100644
index 0000000..36f7660
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/rtt_filter.cc
@@ -0,0 +1,214 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "rtt_filter.h"
+
+#include <cmath>
+#include <stdlib.h>
+#include <string.h>
+
+namespace webrtc {
+
+VCMRttFilter::VCMRttFilter(WebRtc_Word32 vcmId, WebRtc_Word32 receiverId)
+:
+_vcmId(vcmId),
+_receiverId(receiverId),
+_filtFactMax(35),
+_jumpStdDevs(2.5),
+_driftStdDevs(3.5),
+_detectThreshold(kMaxDriftJumpCount)
+{
+    Reset();
+}
+
+VCMRttFilter&
+VCMRttFilter::operator=(const VCMRttFilter& rhs)
+{
+    if (this != &rhs)
+    {
+        _gotNonZeroUpdate = rhs._gotNonZeroUpdate;
+        _avgRtt = rhs._avgRtt;
+        _varRtt = rhs._varRtt;
+        _maxRtt = rhs._maxRtt;
+        _filtFactCount = rhs._filtFactCount;
+        _jumpCount = rhs._jumpCount;
+        _driftCount = rhs._driftCount;
+        memcpy(_jumpBuf, rhs._jumpBuf, sizeof(_jumpBuf));
+        memcpy(_driftBuf, rhs._driftBuf, sizeof(_driftBuf));
+    }
+    return *this;
+}
+
+void
+VCMRttFilter::Reset()
+{
+    _gotNonZeroUpdate = false;
+    _avgRtt = 0;
+    _varRtt = 0;
+    _maxRtt = 0;
+    _filtFactCount = 1;
+    _jumpCount = 0;
+    _driftCount = 0;
+    memset(_jumpBuf, 0, kMaxDriftJumpCount);
+    memset(_driftBuf, 0, kMaxDriftJumpCount);
+}
+
+void
+VCMRttFilter::Update(WebRtc_UWord32 rttMs)
+{
+    if (!_gotNonZeroUpdate)
+    {
+        if (rttMs == 0)
+        {
+            return;
+        }
+        _gotNonZeroUpdate = true;
+    }
+
+    // Sanity check
+    if (rttMs > 3000)
+    {
+        rttMs = 3000;
+    }
+
+    double filtFactor = 0;
+    if (_filtFactCount > 1)
+    {
+        filtFactor = static_cast<double>(_filtFactCount - 1) / _filtFactCount;
+    }
+    _filtFactCount++;
+    if (_filtFactCount > _filtFactMax)
+    {
+        // This prevents filtFactor from going above
+        // (_filtFactMax - 1) / _filtFactMax,
+        // e.g., _filtFactMax = 50 => filtFactor = 49/50 = 0.98
+        _filtFactCount = _filtFactMax;
+    }
+    double oldAvg = _avgRtt;
+    double oldVar = _varRtt;
+    _avgRtt = filtFactor * _avgRtt + (1 - filtFactor) * rttMs;
+    _varRtt = filtFactor * _varRtt + (1 - filtFactor) *
+                (rttMs - _avgRtt) * (rttMs - _avgRtt);
+    _maxRtt = VCM_MAX(rttMs, _maxRtt);
+    if (!JumpDetection(rttMs) || !DriftDetection(rttMs))
+    {
+        // In some cases we don't want to update the statistics
+        _avgRtt = oldAvg;
+        _varRtt = oldVar;
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "RttFilter Update: sample=%u avgRtt=%f varRtt=%f maxRtt=%u",
+               rttMs, _avgRtt, _varRtt, _maxRtt);
+}
+
+bool
+VCMRttFilter::JumpDetection(WebRtc_UWord32 rttMs)
+{
+    double diffFromAvg = _avgRtt - rttMs;
+    if (abs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
+    {
+        int diffSign = (diffFromAvg >= 0) ? 1 : -1;
+        int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
+        if (diffSign != jumpCountSign)
+        {
+            // Since the signs differ the samples currently
+            // in the buffer is useless as they represent a
+            // jump in a different direction.
+            _jumpCount = 0;
+        }
+        if (abs(_jumpCount) < kMaxDriftJumpCount)
+        {
+            // Update the buffer used for the short time
+            // statistics.
+            // The sign of the diff is used for updating the counter since
+            // we want to use the same buffer for keeping track of when
+            // the RTT jumps down and up.
+            _jumpBuf[abs(_jumpCount)] = rttMs;
+            _jumpCount += diffSign;
+        }
+        if (abs(_jumpCount) >= _detectThreshold)
+        {
+            // Detected an RTT jump
+            ShortRttFilter(_jumpBuf, abs(_jumpCount));
+            _filtFactCount = _detectThreshold + 1;
+            _jumpCount = 0;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                       "Detected an RTT jump");
+        }
+        else
+        {
+            return false;
+        }
+    }
+    else
+    {
+        _jumpCount = 0;
+    }
+    return true;
+}
+
+bool
+VCMRttFilter::DriftDetection(WebRtc_UWord32 rttMs)
+{
+    if (_maxRtt - _avgRtt > _driftStdDevs * sqrt(_varRtt))
+    {
+        if (_driftCount < kMaxDriftJumpCount)
+        {
+            // Update the buffer used for the short time
+            // statistics.
+            _driftBuf[_driftCount] = rttMs;
+            _driftCount++;
+        }
+        if (_driftCount >= _detectThreshold)
+        {
+            // Detected an RTT drift
+            ShortRttFilter(_driftBuf, _driftCount);
+            _filtFactCount = _detectThreshold + 1;
+            _driftCount = 0;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                       "Detected an RTT drift");
+        }
+    }
+    else
+    {
+        _driftCount = 0;
+    }
+    return true;
+}
+
+void
+VCMRttFilter::ShortRttFilter(WebRtc_UWord32* buf, WebRtc_UWord32 length)
+{
+    if (length == 0)
+    {
+        return;
+    }
+    _maxRtt = 0;
+    _avgRtt = 0;
+    for (WebRtc_UWord32 i=0; i < length; i++)
+    {
+        if (buf[i] > _maxRtt)
+        {
+            _maxRtt = buf[i];
+        }
+        _avgRtt += buf[i];
+    }
+    _avgRtt = _avgRtt / static_cast<double>(length);
+}
+
+WebRtc_UWord32
+VCMRttFilter::RttMs() const
+{
+    return static_cast<WebRtc_UWord32>(_maxRtt + 0.5);
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/rtt_filter.h b/trunk/src/modules/video_coding/main/source/rtt_filter.h
new file mode 100644
index 0000000..5ec85fd
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/rtt_filter.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMRttFilter
+{
+public:
+    VCMRttFilter(WebRtc_Word32 vcmId = 0, WebRtc_Word32 receiverId = 0);
+
+    VCMRttFilter& operator=(const VCMRttFilter& rhs);
+
+    // Resets the filter.
+    void Reset();
+    // Updates the filter with a new sample.
+    void Update(WebRtc_UWord32 rttMs);
+    // A getter function for the current RTT level in ms.
+    WebRtc_UWord32 RttMs() const;
+
+private:
+    // The size of the drift and jump memory buffers
+    // and thus also the detection threshold for these
+    // detectors in number of samples.
+    enum { kMaxDriftJumpCount = 5 };
+    // Detects RTT jumps by comparing the difference between
+    // samples and average to the standard deviation.
+    // Returns true if the long time statistics should be updated
+    // and false otherwise
+    bool JumpDetection(WebRtc_UWord32 rttMs);
+    // Detects RTT drifts by comparing the difference between
+    // max and average to the standard deviation.
+    // Returns true if the long time statistics should be updated
+    // and false otherwise
+    bool DriftDetection(WebRtc_UWord32 rttMs);
+    // Computes the short time average and maximum of the vector buf.
+    void ShortRttFilter(WebRtc_UWord32* buf, WebRtc_UWord32 length);
+
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _receiverId;
+    bool                  _gotNonZeroUpdate;
+    double                _avgRtt;
+    double                _varRtt;
+    WebRtc_UWord32        _maxRtt;
+    WebRtc_UWord32        _filtFactCount;
+    const WebRtc_UWord32  _filtFactMax;
+    const double          _jumpStdDevs;
+    const double          _driftStdDevs;
+    WebRtc_Word32         _jumpCount;
+    WebRtc_Word32         _driftCount;
+    const WebRtc_Word32   _detectThreshold;
+    WebRtc_UWord32        _jumpBuf[kMaxDriftJumpCount];
+    WebRtc_UWord32        _driftBuf[kMaxDriftJumpCount];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
diff --git a/trunk/src/modules/video_coding/main/source/session_info.cc b/trunk/src/modules/video_coding/main/source/session_info.cc
new file mode 100644
index 0000000..7261403
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/session_info.cc
@@ -0,0 +1,595 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/session_info.h"
+
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+VCMSessionInfo::VCMSessionInfo()
+    : session_nack_(false),
+      complete_(false),
+      decodable_(false),
+      frame_type_(kVideoFrameDelta),
+      previous_frame_loss_(false),
+      packets_(),
+      empty_seq_num_low_(-1),
+      empty_seq_num_high_(-1),
+      packets_not_decodable_(0) {
+}
+
+void VCMSessionInfo::UpdateDataPointers(const uint8_t* old_base_ptr,
+                                        const uint8_t* new_base_ptr) {
+  for (PacketIterator it = packets_.begin(); it != packets_.end(); ++it)
+    if ((*it).dataPtr != NULL) {
+      assert(old_base_ptr != NULL && new_base_ptr != NULL);
+      (*it).dataPtr = new_base_ptr + ((*it).dataPtr - old_base_ptr);
+    }
+}
+
+int VCMSessionInfo::LowSequenceNumber() const {
+  if (packets_.empty())
+    return empty_seq_num_low_;
+  return packets_.front().seqNum;
+}
+
+int VCMSessionInfo::HighSequenceNumber() const {
+  if (packets_.empty())
+    return empty_seq_num_high_;
+  return LatestSequenceNumber(packets_.back().seqNum, empty_seq_num_high_,
+                              NULL);
+}
+
+int VCMSessionInfo::PictureId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoPictureId;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
+}
+
+int VCMSessionInfo::TemporalId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoTemporalIdx;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
+}
+
+bool VCMSessionInfo::LayerSync() const {
+  if (packets_.empty() ||
+        packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return false;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
+}
+
+int VCMSessionInfo::Tl0PicId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoTl0PicIdx;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
+}
+
+bool VCMSessionInfo::NonReference() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return false;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
+}
+
+void VCMSessionInfo::Reset() {
+  session_nack_ = false;
+  complete_ = false;
+  decodable_ = false;
+  frame_type_ = kVideoFrameDelta;
+  previous_frame_loss_ = false;
+  packets_.clear();
+  empty_seq_num_low_ = -1;
+  empty_seq_num_high_ = -1;
+  packets_not_decodable_ = 0;
+}
+
+int VCMSessionInfo::SessionLength() const {
+  int length = 0;
+  for (PacketIteratorConst it = packets_.begin(); it != packets_.end(); ++it)
+    length += (*it).sizeBytes;
+  return length;
+}
+
+int VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer,
+                                 PacketIterator packet_it) {
+  VCMPacket& packet = *packet_it;
+  PacketIterator it;
+
+  int packet_size = packet.sizeBytes;
+  packet_size += (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+
+  // Calculate the offset into the frame buffer for this packet.
+  int offset = 0;
+  for (it = packets_.begin(); it != packet_it; ++it)
+    offset += (*it).sizeBytes;
+
+  // Set the data pointer to pointing to the start of this packet in the
+  // frame buffer.
+  const uint8_t* data = packet.dataPtr;
+  packet.dataPtr = frame_buffer + offset;
+  packet.sizeBytes = packet_size;
+
+  ShiftSubsequentPackets(packet_it, packet_size);
+
+  const unsigned char startCode[] = {0, 0, 0, 1};
+  if (packet.insertStartCode) {
+    memcpy(const_cast<uint8_t*>(packet.dataPtr), startCode,
+           kH264StartCodeLengthBytes);
+  }
+  memcpy(const_cast<uint8_t*>(packet.dataPtr
+      + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0)),
+      data,
+      packet.sizeBytes);
+
+  return packet_size;
+}
+
+void VCMSessionInfo::ShiftSubsequentPackets(PacketIterator it,
+                                            int steps_to_shift) {
+  ++it;
+  if (it == packets_.end())
+    return;
+  uint8_t* first_packet_ptr = const_cast<WebRtc_UWord8*>((*it).dataPtr);
+  int shift_length = 0;
+  // Calculate the total move length and move the data pointers in advance.
+  for (; it != packets_.end(); ++it) {
+    shift_length += (*it).sizeBytes;
+    if ((*it).dataPtr != NULL)
+      (*it).dataPtr += steps_to_shift;
+  }
+  memmove(first_packet_ptr + steps_to_shift, first_packet_ptr, shift_length);
+}
+
+void VCMSessionInfo::UpdateCompleteSession() {
+  if (packets_.front().isFirstPacket && packets_.back().markerBit) {
+    // Do we have all the packets in this session?
+    bool complete_session = true;
+    PacketIterator it = packets_.begin();
+    PacketIterator prev_it = it;
+    ++it;
+    for (; it != packets_.end(); ++it) {
+      if (!InSequence(it, prev_it)) {
+        complete_session = false;
+        break;
+      }
+      prev_it = it;
+    }
+    complete_ = complete_session;
+  }
+}
+
+void VCMSessionInfo::UpdateDecodableSession(int rttMs) {
+  // Irrelevant if session is already complete or decodable
+  if (complete_ || decodable_)
+    return;
+  // First iteration - do nothing
+}
+
+bool VCMSessionInfo::complete() const {
+  return complete_;
+}
+
+bool VCMSessionInfo::decodable() const {
+  return decodable_;
+}
+
+// Find the end of the NAL unit which the packet pointed to by |packet_it|
+// belongs to. Returns an iterator to the last packet of the frame if the end
+// of the NAL unit wasn't found.
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNaluEnd(
+    PacketIterator packet_it) const {
+  if ((*packet_it).completeNALU == kNaluEnd ||
+      (*packet_it).completeNALU == kNaluComplete) {
+    return packet_it;
+  }
+  // Find the end of the NAL unit.
+  for (; packet_it != packets_.end(); ++packet_it) {
+    if (((*packet_it).completeNALU == kNaluComplete &&
+        (*packet_it).sizeBytes > 0) ||
+        // Found next NALU.
+        (*packet_it).completeNALU == kNaluStart)
+      return --packet_it;
+    if ((*packet_it).completeNALU == kNaluEnd)
+      return packet_it;
+  }
+  // The end wasn't found.
+  return --packet_it;
+}
+
+int VCMSessionInfo::DeletePacketData(PacketIterator start,
+                                     PacketIterator end) {
+  int bytes_to_delete = 0;  // The number of bytes to delete.
+  PacketIterator packet_after_end = end;
+  ++packet_after_end;
+
+  // Get the number of bytes to delete.
+  // Clear the size of these packets.
+  for (PacketIterator it = start; it != packet_after_end; ++it) {
+    bytes_to_delete += (*it).sizeBytes;
+    (*it).sizeBytes = 0;
+    (*it).dataPtr = NULL;
+    ++packets_not_decodable_;
+  }
+  if (bytes_to_delete > 0)
+    ShiftSubsequentPackets(end, -bytes_to_delete);
+  return bytes_to_delete;
+}
+
+int VCMSessionInfo::BuildVP8FragmentationHeader(
+    uint8_t* frame_buffer,
+    int frame_buffer_length,
+    RTPFragmentationHeader* fragmentation) {
+  int new_length = 0;
+  // Allocate space for max number of partitions
+  fragmentation->VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+  fragmentation->fragmentationVectorSize = 0;
+  memset(fragmentation->fragmentationLength, 0,
+         kMaxVP8Partitions * sizeof(WebRtc_UWord32));
+  if (packets_.empty())
+      return new_length;
+  PacketIterator it = FindNextPartitionBeginning(packets_.begin(),
+                                                 &packets_not_decodable_);
+  while (it != packets_.end()) {
+    const int partition_id =
+        (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+    PacketIterator partition_end = FindPartitionEnd(it);
+    fragmentation->fragmentationOffset[partition_id] =
+        (*it).dataPtr - frame_buffer;
+    assert(fragmentation->fragmentationOffset[partition_id] <
+           static_cast<WebRtc_UWord32>(frame_buffer_length));
+    fragmentation->fragmentationLength[partition_id] =
+        (*partition_end).dataPtr + (*partition_end).sizeBytes - (*it).dataPtr;
+    assert(fragmentation->fragmentationLength[partition_id] <=
+           static_cast<WebRtc_UWord32>(frame_buffer_length));
+    new_length += fragmentation->fragmentationLength[partition_id];
+    ++partition_end;
+    it = FindNextPartitionBeginning(partition_end, &packets_not_decodable_);
+    if (partition_id + 1 > fragmentation->fragmentationVectorSize)
+      fragmentation->fragmentationVectorSize = partition_id + 1;
+  }
+  // Set all empty fragments to start where the previous fragment ends,
+  // and have zero length.
+  if (fragmentation->fragmentationLength[0] == 0)
+      fragmentation->fragmentationOffset[0] = 0;
+  for (int i = 1; i < fragmentation->fragmentationVectorSize; ++i) {
+    if (fragmentation->fragmentationLength[i] == 0)
+      fragmentation->fragmentationOffset[i] =
+          fragmentation->fragmentationOffset[i - 1] +
+          fragmentation->fragmentationLength[i - 1];
+    assert(i == 0 ||
+           fragmentation->fragmentationOffset[i] >=
+           fragmentation->fragmentationOffset[i - 1]);
+  }
+  assert(new_length <= frame_buffer_length);
+  return new_length;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNextPartitionBeginning(
+    PacketIterator it, int* packets_skipped) const {
+  while (it != packets_.end()) {
+    if ((*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition) {
+      return it;
+    } else if (packets_skipped !=  NULL) {
+      // This packet belongs to a partition with a previous loss and can't
+      // be decoded.
+      ++(*packets_skipped);
+    }
+    ++it;
+  }
+  return it;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindPartitionEnd(
+    PacketIterator it) const {
+  assert((*it).codec == kVideoCodecVP8);
+  PacketIterator prev_it = it;
+  const int partition_id =
+      (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+  while (it != packets_.end()) {
+    bool beginning =
+        (*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition;
+    int current_partition_id =
+        (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+    bool packet_loss_found = (!beginning && !InSequence(it, prev_it));
+    if (packet_loss_found ||
+        (beginning && current_partition_id != partition_id)) {
+      // Missing packet, the previous packet was the last in sequence.
+      return prev_it;
+    }
+    prev_it = it;
+    ++it;
+  }
+  return prev_it;
+}
+
+bool VCMSessionInfo::InSequence(const PacketIterator& packet_it,
+                                const PacketIterator& prev_packet_it) {
+  // If the two iterators are pointing to the same packet they are considered
+  // to be in sequence.
+  return (packet_it == prev_packet_it ||
+      (static_cast<WebRtc_UWord16>((*prev_packet_it).seqNum + 1) ==
+          (*packet_it).seqNum));
+}
+
+int VCMSessionInfo::MakeDecodable() {
+  int return_length = 0;
+  if (packets_.empty()) {
+    return 0;
+  }
+  PacketIterator it = packets_.begin();
+  // Make sure we remove the first NAL unit if it's not decodable.
+  if ((*it).completeNALU == kNaluIncomplete ||
+      (*it).completeNALU == kNaluEnd) {
+    PacketIterator nalu_end = FindNaluEnd(it);
+    return_length += DeletePacketData(it, nalu_end);
+    it = nalu_end;
+  }
+  PacketIterator prev_it = it;
+  // Take care of the rest of the NAL units.
+  for (; it != packets_.end(); ++it) {
+    bool start_of_nalu = ((*it).completeNALU == kNaluStart ||
+        (*it).completeNALU == kNaluComplete);
+    if (!start_of_nalu && !InSequence(it, prev_it)) {
+      // Found a sequence number gap due to packet loss.
+      PacketIterator nalu_end = FindNaluEnd(it);
+      return_length += DeletePacketData(it, nalu_end);
+      it = nalu_end;
+    }
+    prev_it = it;
+  }
+  return return_length;
+}
+
+int VCMSessionInfo::BuildHardNackList(int* seq_num_list,
+                                      int seq_num_list_length) {
+  if (NULL == seq_num_list || seq_num_list_length < 1) {
+    return -1;
+  }
+  if (packets_.empty()) {
+    return 0;
+  }
+
+  // Find end point (index of entry equals the sequence number of the first
+  // packet).
+  int index = 0;
+  for (; index < seq_num_list_length; ++index) {
+    if (seq_num_list[index] == packets_.front().seqNum) {
+      seq_num_list[index] = -1;
+      ++index;
+      break;
+    }
+  }
+
+  // Zero out between the first entry and the end point.
+  PacketIterator it = packets_.begin();
+  PacketIterator prev_it = it;
+  ++it;
+  while (it != packets_.end() && index < seq_num_list_length) {
+    if (!InSequence(it, prev_it)) {
+      // Found a sequence number gap due to packet loss.
+      index += PacketsMissing(it, prev_it);
+      session_nack_ = true;
+    }
+    seq_num_list[index] = -1;
+    ++index;
+    prev_it = it;
+    ++it;
+  }
+  if (!packets_.front().isFirstPacket)
+    session_nack_ = true;
+  return 0;
+}
+
+int VCMSessionInfo::BuildSoftNackList(int* seq_num_list,
+                                      int seq_num_list_length,
+                                      int rtt_ms) {
+  if (NULL == seq_num_list || seq_num_list_length < 1) {
+    return -1;
+  }
+  if (packets_.empty() && empty_seq_num_low_ == -1) {
+    return 0;
+  }
+
+  int index = 0;
+  int low_seq_num = (packets_.empty()) ? empty_seq_num_low_:
+      packets_.front().seqNum;
+  // Find entrance point (index of entry equals the sequence number of the
+  // first packet).
+  for (; index < seq_num_list_length; ++index) {
+    if (seq_num_list[index] == low_seq_num) {
+      seq_num_list[index] = -1;
+      break;
+    }
+  }
+
+  // TODO(mikhal): 1. Update score based on RTT value 2. Add partition data.
+  // Use the previous available.
+  bool base_available = false;
+  if ((index > 0) && (seq_num_list[index] == -1)) {
+    // Found first packet, for now let's go only one back.
+    if ((seq_num_list[index - 1] == -1) || (seq_num_list[index - 1] == -2)) {
+      // This is indeed the first packet, as previous packet was populated.
+      base_available = true;
+    }
+  }
+  bool allow_nack = ((packets_.size() > 0 && !packets_.front().isFirstPacket)
+    || !base_available);
+
+  // Zero out between first entry and end point.
+
+  int media_high_seq_num;
+  if (HaveLastPacket()) {
+    media_high_seq_num = packets_.back().seqNum;
+  } else {
+    // Estimation.
+    if (empty_seq_num_low_ >= 0) {
+      // Assuming empty packets have later sequence numbers than media packets.
+      media_high_seq_num = empty_seq_num_low_ - 1;
+    } else {
+      // Since this frame doesn't have the marker bit we can assume it should
+      // contain at least one more packet.
+      media_high_seq_num = static_cast<uint16_t>(packets_.back().seqNum + 1);
+    }
+  }
+
+  // Compute session/packet scores and thresholds:
+  // based on RTT and layer info (when available).
+  float nack_score_threshold = 0.25f;
+  float layer_score = TemporalId() > 0 ? 0.0f : 1.0f;
+  float rtt_score = 1.0f;
+  float score_multiplier = rtt_score * layer_score;
+  // Zero out between first entry and end point.
+  if (!packets_.empty()) {
+    PacketIterator it = packets_.begin();
+    PacketIterator prev_it = it;
+    ++index;
+    ++it;
+    // TODO(holmer): Rewrite this in a way which better makes use of the list.
+    while (it != packets_.end() && index < seq_num_list_length) {
+    // Only process media packet sequence numbers.
+      if (LatestSequenceNumber((*it).seqNum, media_high_seq_num, NULL) ==
+        (*it).seqNum && (*it).seqNum != media_high_seq_num)
+        break;
+      if (!InSequence(it, prev_it)) {
+        // Found a sequence number gap due to packet loss.
+        int num_lost = PacketsMissing(it, prev_it);
+        for (int i = 0 ; i < num_lost; ++i) {
+          // Compute score of the packet.
+          float score = 1.0f;
+          // Multiply internal score (packet) by score multiplier.
+          score *= score_multiplier;
+          if (score > nack_score_threshold) {
+            allow_nack = true;
+          } else {
+            seq_num_list[index] = -1;
+          }
+          ++index;
+        }
+      }
+      seq_num_list[index] = -1;
+      ++index;
+      prev_it = it;
+      ++it;
+    }
+  }
+
+  // Empty packets follow the data packets, and therefore have a higher
+  // sequence number. We do not want to NACK empty packets.
+  if ((empty_seq_num_low_ != -1) && (empty_seq_num_high_ != -1) &&
+      (index < seq_num_list_length)) {
+    // First make sure that we are at least at the minimum value (if not we are
+    // missing last packet(s)).
+    while (seq_num_list[index] < empty_seq_num_low_ &&
+        index < seq_num_list_length) {
+      ++index;
+    }
+
+    // Mark empty packets.
+    while (seq_num_list[index] <= empty_seq_num_high_ &&
+        index < seq_num_list_length) {
+      seq_num_list[index] = -2;
+      ++index;
+    }
+  }
+
+  session_nack_ = allow_nack;
+  return 0;
+}
+
+int VCMSessionInfo::PacketsMissing(const PacketIterator& packet_it,
+                                   const PacketIterator& prev_packet_it) {
+  if (packet_it == prev_packet_it)
+    return 0;
+  if ((*prev_packet_it).seqNum > (*packet_it).seqNum)  // Wrap.
+    return static_cast<WebRtc_UWord16>(
+        static_cast<WebRtc_UWord32>((*packet_it).seqNum + 0x10000) -
+        (*prev_packet_it).seqNum) - 1;
+  else
+    return (*packet_it).seqNum - (*prev_packet_it).seqNum - 1;
+}
+
+bool
+VCMSessionInfo::HaveLastPacket() const {
+  return (!packets_.empty() && packets_.back().markerBit);
+}
+
+bool
+VCMSessionInfo::session_nack() const {
+  return session_nack_;
+}
+
+int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
+                                 uint8_t* frame_buffer,
+                                 bool enable_decodable_state,
+                                 int rtt_ms) {
+  // Check if this is first packet (only valid for some codecs)
+  if (packet.isFirstPacket) {
+    // The first packet in a frame signals the frame type.
+    frame_type_ = packet.frameType;
+  } else if (frame_type_ == kFrameEmpty && packet.frameType != kFrameEmpty) {
+    // Update the frame type with the first media packet.
+    frame_type_ = packet.frameType;
+  }
+  if (packet.frameType == kFrameEmpty) {
+    // Update sequence number of an empty packet.
+    // Only media packets are inserted into the packet list.
+    InformOfEmptyPacket(packet.seqNum);
+    return 0;
+  }
+
+  if (packets_.size() == kMaxPacketsInSession)
+    return -1;
+
+  // Find the position of this packet in the packet list in sequence number
+  // order and insert it. Loop over the list in reverse order.
+  ReversePacketIterator rit = packets_.rbegin();
+  for (; rit != packets_.rend(); ++rit)
+    if (LatestSequenceNumber((*rit).seqNum, packet.seqNum, NULL) ==
+        packet.seqNum)
+      break;
+
+  // Check for duplicate packets.
+  if (rit != packets_.rend() &&
+      (*rit).seqNum == packet.seqNum && (*rit).sizeBytes > 0)
+    return -2;
+
+  // The insert operation invalidates the iterator |rit|.
+  PacketIterator packet_list_it = packets_.insert(rit.base(), packet);
+
+  int returnLength = InsertBuffer(frame_buffer, packet_list_it);
+  UpdateCompleteSession();
+  if (enable_decodable_state)
+    UpdateDecodableSession(rtt_ms);
+  return returnLength;
+}
+
+void VCMSessionInfo::InformOfEmptyPacket(uint16_t seq_num) {
+  // Empty packets may be FEC or filler packets. They are sequential and
+  // follow the data packets, therefore, we should only keep track of the high
+  // and low sequence numbers and may assume that the packets in between are
+  // empty packets belonging to the same frame (timestamp).
+  empty_seq_num_high_ = LatestSequenceNumber(seq_num, empty_seq_num_high_,
+                                             NULL);
+  if (empty_seq_num_low_ == -1 ||
+      LatestSequenceNumber(seq_num, empty_seq_num_low_, NULL) ==
+          empty_seq_num_low_)
+    empty_seq_num_low_ = seq_num;
+}
+
+int VCMSessionInfo::packets_not_decodable() const {
+  return packets_not_decodable_;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/session_info.h b/trunk/src/modules/video_coding/main/source/session_info.h
new file mode 100644
index 0000000..27533ce
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/session_info.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+
+#include <list>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class VCMSessionInfo {
+ public:
+  VCMSessionInfo();
+
+  void UpdateDataPointers(const uint8_t* old_base_ptr,
+                          const uint8_t* new_base_ptr);
+  // NACK - Building the NACK lists.
+  // Build hard NACK list: Zero out all entries in list up to and including
+  // _lowSeqNum.
+  int BuildHardNackList(int* seq_num_list,
+                        int seq_num_list_length);
+
+  // Build soft NACK list:  Zero out only a subset of the packets, discard
+  // empty packets.
+  int BuildSoftNackList(int* seq_num_list,
+                        int seq_num_list_length,
+                        int rtt_ms);
+  void Reset();
+  int InsertPacket(const VCMPacket& packet,
+                   uint8_t* frame_buffer,
+                   bool enable_decodable_state,
+                   int rtt_ms);
+  bool complete() const;
+  bool decodable() const;
+
+  // Builds fragmentation headers for VP8, each fragment being a decodable
+  // VP8 partition. Returns the total number of bytes which are decodable. Is
+  // used instead of MakeDecodable for VP8.
+  int BuildVP8FragmentationHeader(uint8_t* frame_buffer,
+                                  int frame_buffer_length,
+                                  RTPFragmentationHeader* fragmentation);
+
+  // Makes the frame decodable. I.e., only contain decodable NALUs. All
+  // non-decodable NALUs will be deleted and packets will be moved to in
+  // memory to remove any empty space.
+  // Returns the number of bytes deleted from the session.
+  int MakeDecodable();
+  int SessionLength() const;
+  bool HaveLastPacket() const;
+  bool session_nack() const;
+  webrtc::FrameType FrameType() const { return frame_type_; }
+  int LowSequenceNumber() const;
+
+  // Returns highest sequence number, media or empty.
+  int HighSequenceNumber() const;
+  int PictureId() const;
+  int TemporalId() const;
+  bool LayerSync() const;
+  int Tl0PicId() const;
+  bool NonReference() const;
+  void SetPreviousFrameLoss() { previous_frame_loss_ = true; }
+  bool PreviousFrameLoss() const { return previous_frame_loss_; }
+
+  // The number of packets discarded because the decoder can't make use of
+  // them.
+  int packets_not_decodable() const;
+
+ private:
+  enum { kMaxVP8Partitions = 9 };
+
+  typedef std::list<VCMPacket> PacketList;
+  typedef PacketList::iterator PacketIterator;
+  typedef PacketList::const_iterator PacketIteratorConst;
+  typedef PacketList::reverse_iterator ReversePacketIterator;
+
+  void InformOfEmptyPacket(uint16_t seq_num);
+
+  // Finds the packet of the beginning of the next VP8 partition. If
+  // none is found the returned iterator points to |packets_.end()|.
+  // |it| is expected to point to the last packet of the previous partition,
+  // or to the first packet of the frame. |packets_skipped| is incremented
+  // for each packet found which doesn't have the beginning bit set.
+  PacketIterator FindNextPartitionBeginning(PacketIterator it,
+                                            int* packets_skipped) const;
+
+  // Returns an iterator pointing to the last packet of the partition pointed to
+  // by |it|.
+  PacketIterator FindPartitionEnd(PacketIterator it) const;
+  static bool InSequence(const PacketIterator& it,
+                         const PacketIterator& prev_it);
+  static int PacketsMissing(const PacketIterator& packet_it,
+                            const PacketIterator& prev_packet_it);
+  int InsertBuffer(uint8_t* frame_buffer,
+                   PacketIterator packetIterator);
+  void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
+  PacketIterator FindNaluEnd(PacketIterator packet_iter) const;
+  // Deletes the data of all packets between |start| and |end|, inclusively.
+  // Note that this function doesn't delete the actual packets.
+  int DeletePacketData(PacketIterator start,
+                       PacketIterator end);
+  void UpdateCompleteSession();
+
+  // When enabled, determine if session is decodable, i.e. incomplete but
+  // would be sent to the decoder.
+  void UpdateDecodableSession(int rtt_ms);
+
+  // If this session has been NACKed by the jitter buffer.
+  bool session_nack_;
+  bool complete_;
+  bool decodable_;
+  webrtc::FrameType frame_type_;
+  bool previous_frame_loss_;
+  // Packets in this frame.
+  PacketList packets_;
+  int empty_seq_num_low_;
+  int empty_seq_num_high_;
+  // Number of packets discarded because the decoder can't use them.
+  int packets_not_decodable_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
diff --git a/trunk/src/modules/video_coding/main/source/session_info_unittest.cc b/trunk/src/modules/video_coding/main/source/session_info_unittest.cc
new file mode 100644
index 0000000..e017735
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/session_info_unittest.cc
@@ -0,0 +1,931 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/video_coding/main/source/session_info.h"
+
+namespace webrtc {
+
+class TestSessionInfo : public ::testing::Test {
+ protected:
+  enum { kPacketBufferSize = 10 };
+  enum { kFrameBufferSize = 10 * kPacketBufferSize };
+
+  virtual void SetUp() {
+    memset(packet_buffer_, 0, kPacketBufferSize);
+    memset(frame_buffer_, 0, kFrameBufferSize);
+    session_.Reset();
+    packet_.Reset();
+    packet_.frameType = kVideoFrameDelta;
+    packet_.sizeBytes = kPacketBufferSize;
+    packet_.dataPtr = packet_buffer_;
+    packet_.seqNum = 0;
+    packet_.timestamp = 0;
+  }
+
+  void FillPacket(uint8_t start_value) {
+    for (int i = 0; i < kPacketBufferSize; ++i)
+      packet_buffer_[i] = start_value + i;
+  }
+
+  void VerifyPacket(uint8_t* start_ptr, uint8_t start_value) {
+    for (int j = 0; j < kPacketBufferSize; ++j) {
+      ASSERT_EQ(start_value + j, start_ptr[j]);
+    }
+  }
+
+  uint8_t packet_buffer_[kPacketBufferSize];
+  uint8_t frame_buffer_[kFrameBufferSize];
+  VCMSessionInfo session_;
+  VCMPacket packet_;
+};
+
+class TestVP8Partitions : public TestSessionInfo {
+ protected:
+  enum { kMaxVP8Partitions = 9 };
+
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
+    packet_header_.frameType = kVideoFrameDelta;
+    packet_header_.type.Video.codec = kRTPVideoVP8;
+    vp8_header_->InitRTPVideoHeaderVP8();
+    fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+  }
+
+  bool VerifyPartition(int partition_id,
+                       int packets_expected,
+                       int start_value) {
+    EXPECT_EQ(static_cast<uint32_t>(packets_expected * kPacketBufferSize),
+              fragmentation_.fragmentationLength[partition_id]);
+    for (int i = 0; i < packets_expected; ++i) {
+      int packet_index = fragmentation_.fragmentationOffset[partition_id] +
+          i * kPacketBufferSize;
+      if (packet_index + kPacketBufferSize > kFrameBufferSize)
+        return false;
+      VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+    }
+    return true;
+  }
+
+  WebRtcRTPHeader packet_header_;
+  RTPVideoHeaderVP8* vp8_header_;
+  RTPFragmentationHeader fragmentation_;
+};
+
+class TestNalUnits : public TestSessionInfo {
+ protected:
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    packet_.codec = kVideoCodecVP8;
+  }
+
+  bool VerifyNalu(int offset, int packets_expected, int start_value) {
+    EXPECT_GE(session_.SessionLength(),
+              packets_expected * kPacketBufferSize);
+    for (int i = 0; i < packets_expected; ++i) {
+      int packet_index = offset * kPacketBufferSize + i * kPacketBufferSize;
+      VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+    }
+    return true;
+  }
+};
+
+class TestNackList : public TestSessionInfo {
+ protected:
+  enum { kMaxSeqNumListLength = 30 };
+
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    seq_num_list_length_ = 0;
+    memset(seq_num_list_, 0, sizeof(seq_num_list_));
+  }
+
+  void BuildSeqNumList(uint16_t low,
+                       uint16_t high) {
+    int i = 0;
+    while (low != high + 1) {
+      EXPECT_LT(i, kMaxSeqNumListLength);
+      if (i >= kMaxSeqNumListLength) {
+        seq_num_list_length_ = kMaxSeqNumListLength;
+        return;
+      }
+      seq_num_list_[i] = low;
+      low++;
+      i++;
+    }
+    seq_num_list_length_ = i;
+  }
+
+  void VerifyAll(int value) {
+    for (int i = 0; i < seq_num_list_length_; ++i)
+      EXPECT_EQ(seq_num_list_[i], value);
+  }
+
+  int seq_num_list_[kMaxSeqNumListLength];
+  int seq_num_list_length_;
+};
+
+TEST_F(TestSessionInfo, TestSimpleAPIs) {
+  packet_.isFirstPacket = true;
+  packet_.seqNum = 0xFFFE;
+  packet_.sizeBytes = kPacketBufferSize;
+  packet_.frameType = kVideoFrameKey;
+  FillPacket(0);
+  ASSERT_EQ(kPacketBufferSize,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_FALSE(session_.HaveLastPacket());
+  EXPECT_EQ(kVideoFrameKey, session_.FrameType());
+
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  packet_.seqNum += 1;
+  ASSERT_EQ(kPacketBufferSize,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_TRUE(session_.HaveLastPacket());
+  EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+  EXPECT_EQ(0xFFFE, session_.LowSequenceNumber());
+
+  // Insert empty packet which will be the new high sequence number.
+  // To make things more difficult we will make sure to have a wrap here.
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  packet_.seqNum  = 2;
+  packet_.sizeBytes = 0;
+  packet_.frameType = kFrameEmpty;
+  ASSERT_EQ(0,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+}
+
+TEST_F(TestSessionInfo, NormalOperation) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = true;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    FillPacket(i);
+    ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+              kPacketBufferSize);
+  }
+
+  packet_.seqNum += 1;
+  packet_.markerBit = true;
+  FillPacket(9);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  EXPECT_EQ(10 * kPacketBufferSize, session_.SessionLength());
+  for (int i = 0; i < 10; ++i) {
+    SCOPED_TRACE("Calling VerifyPacket");
+    VerifyPacket(frame_buffer_ + i * kPacketBufferSize, i);
+  }
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss) {
+  // Partition 0 | Partition 1
+  // [ 0 ] [ 2 ] | [ 3 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed (end of partition 0).
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            2*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 1, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 3));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss2) {
+  // Partition 0 | Partition 1
+  // [ 1 ] [ 2 ] | [ 3 ] [ 5 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0)
+            , kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed (end of partition 2), 3 left.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 3));
+  EXPECT_EQ(1, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsNoLossWrap) {
+  // Partition 0       | Partition 1
+  // [ fffd ] [ fffe ] | [ ffff ] [ 0 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0xfffd;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packet should be removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 2, 2));
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsLossWrap) {
+  // Partition 0       | Partition 1
+  // [ fffd ] [ fffe ] | [ ffff ] [ 1 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0xfffd;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed from the last partition
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 2));
+  EXPECT_EQ(1, session_.packets_not_decodable());
+}
+
+
+TEST_F(TestVP8Partitions, ThreePartitionsOneMissing) {
+  // Partition 1  |Partition 2    | Partition 3
+  // [ 1 ] [ 2 ]  |               | [ 5 ] | [ 6 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 3;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(6);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packet should be removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 2, 5));
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, ThreePartitionsLossInSecond) {
+  // Partition 0  |Partition 1          | Partition 2
+  // [ 1 ] [ 2 ]  |        [ 4 ] [ 5 ]  | [ 6 ] [ 7 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(4);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(6);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(7);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // 2 partitions left. 2 packets removed from second partition
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 2, 6));
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, AggregationOverTwoPackets) {
+  // Partition 0   | Partition 1         | Partition 2
+  // [ 0           |           ]  [ 1 ]  | [ 2 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packets removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  // This partition is aggregated in partition 0
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 0, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 1, 2));
+}
+
+TEST_F(TestNalUnits, OnlyReceivedEmptyPacket) {
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluComplete;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  ASSERT_EQ(0, session_.InsertPacket(packet_, frame_buffer_, false, 0));
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestNalUnits, OneIsolatedNaluLoss) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(2 * kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(1, 1, 2));
+}
+
+TEST_F(TestNalUnits, LossInMiddleOfNalu) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(1, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, StartAndEndOfLastNalUnitLost) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.seqNum += 2;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(1, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, ReorderWrapNoLoss) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.seqNum += 1;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum -= 1;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  EXPECT_EQ(3*kPacketBufferSize, session_.SessionLength());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, WrapLosses) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(2 * kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestNalUnits, ReorderWrapLosses) {
+  packet_.seqNum = 0xFFFF;
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.seqNum -= 2;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(2 * kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestNackList, NoLosses) {
+  uint16_t low = 0xFFFF - 5;
+
+  packet_.seqNum = low;
+  packet_.isFirstPacket = true;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    packet_.isFirstPacket = false;
+    packet_.markerBit = false;
+    FillPacket(i + 1);
+    ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+              kPacketBufferSize);
+  }
+
+  packet_.seqNum += 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  FillPacket(10);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(10 * kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  EXPECT_FALSE(session_.session_nack());
+  SCOPED_TRACE("Calling VerifyAll");
+  VerifyAll(-1);
+
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  SCOPED_TRACE("Calling VerifyAll");
+  VerifyAll(-1);
+}
+
+TEST_F(TestNackList, FiveLossesSpreadOut) {
+  uint16_t low = 0xFFFF - 5;
+
+  packet_.seqNum = low;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    packet_.isFirstPacket = false;
+    packet_.markerBit = false;
+    FillPacket(i);
+    if ((i + 1) % 2)
+      ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+                kPacketBufferSize);
+  }
+
+  packet_.seqNum++;  // Simulate loss of last packet.
+
+  EXPECT_EQ(5 * kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  for (int i = 0; i < seq_num_list_length_; ++i) {
+    if (i % 2)
+      EXPECT_EQ(static_cast<uint16_t>(low + i), seq_num_list_[i]);
+    else
+      EXPECT_EQ(-1, seq_num_list_[i]);
+  }
+
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  for (int i = 0; i < seq_num_list_length_; ++i) {
+    if (i % 2)
+      EXPECT_EQ(static_cast<uint16_t>(low + i), seq_num_list_[i]);
+    else
+      EXPECT_EQ(-1, seq_num_list_[i]);
+  }
+}
+
+TEST_F(TestNackList, FirstAndLastLost) {
+  uint16_t low = 0xFFFF;
+
+  packet_.seqNum = low + 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  EXPECT_EQ(0xFFFF, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(1, seq_num_list_[2]);
+
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_,seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  EXPECT_EQ(0xFFFF, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(1, seq_num_list_[2]);
+}
+
+TEST_F(TestNackList, LostAllButEmptyPackets) {
+  uint16_t low = 0;
+  packet_.seqNum = low + 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0), 0);
+
+  packet_.seqNum = low + 3;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0), 0);
+
+  EXPECT_EQ(0, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  EXPECT_EQ(0, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(-2, seq_num_list_[2]);
+  EXPECT_EQ(-2, seq_num_list_[3]);
+  EXPECT_EQ(4, seq_num_list_[4]);
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/tick_time_base.h b/trunk/src/modules/video_coding/main/source/tick_time_base.h
new file mode 100644
index 0000000..a212591
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/tick_time_base.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
+
+#include "system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+// This class provides a mockable wrapper to TickTime.
+class TickTimeBase {
+ public:
+  virtual ~TickTimeBase() {}
+
+  // "Now" in milliseconds.
+  virtual int64_t MillisecondTimestamp() const {
+    return TickTime::MillisecondTimestamp();
+  }
+
+  // "Now" in microseconds.
+  virtual int64_t MicrosecondTimestamp() const {
+    return TickTime::MicrosecondTimestamp();
+  }
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
diff --git a/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.cc b/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.cc
new file mode 100644
index 0000000..e272eb9
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.cc
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "internal_defines.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "timestamp_extrapolator.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VCMTimestampExtrapolator::VCMTimestampExtrapolator(TickTimeBase* clock,
+                                                   WebRtc_Word32 vcmId,
+                                                   WebRtc_Word32 id)
+:
+_rwLock(RWLockWrapper::CreateRWLock()),
+_vcmId(vcmId),
+_id(id),
+_clock(clock),
+_startMs(0),
+_firstTimestamp(0),
+_wrapArounds(0),
+_prevTs90khz(0),
+_lambda(1),
+_firstAfterReset(true),
+_packetCount(0),
+_startUpFilterDelayInPackets(2),
+_detectorAccumulatorPos(0),
+_detectorAccumulatorNeg(0),
+_alarmThreshold(60e3),
+_accDrift(6600), // in timestamp ticks, i.e. 15 ms
+_accMaxError(7000),
+_P11(1e10)
+{
+    Reset(_clock->MillisecondTimestamp());
+}
+
+VCMTimestampExtrapolator::~VCMTimestampExtrapolator()
+{
+    delete _rwLock;
+}
+
+void
+VCMTimestampExtrapolator::Reset(const WebRtc_Word64 nowMs /* = -1 */)
+{
+    WriteLockScoped wl(*_rwLock);
+    if (nowMs > -1)
+    {
+        _startMs = nowMs;
+    }
+    else
+    {
+        _startMs = _clock->MillisecondTimestamp();
+    }
+    _prevMs = _startMs;
+    _firstTimestamp = 0;
+    _w[0] = 90.0;
+    _w[1] = 0;
+    _P[0][0] = 1;
+    _P[1][1] = _P11;
+    _P[0][1] = _P[1][0] = 0;
+    _firstAfterReset = true;
+    _prevTs90khz = 0;
+    _wrapArounds = 0;
+    _packetCount = 0;
+    _detectorAccumulatorPos = 0;
+    _detectorAccumulatorNeg = 0;
+}
+
+void
+VCMTimestampExtrapolator::Update(WebRtc_Word64 tMs, WebRtc_UWord32 ts90khz, bool trace)
+{
+
+    _rwLock->AcquireLockExclusive();
+    if (tMs - _prevMs > 10e3)
+    {
+        // Ten seconds without a complete frame.
+        // Reset the extrapolator
+        _rwLock->ReleaseLockExclusive();
+        Reset();
+        _rwLock->AcquireLockExclusive();
+    }
+    else
+    {
+        _prevMs = tMs;
+    }
+
+    // Remove offset to prevent badly scaled matrices
+    tMs -= _startMs;
+
+    WebRtc_Word32 prevWrapArounds = _wrapArounds;
+    CheckForWrapArounds(ts90khz);
+    WebRtc_Word32 wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
+
+    if (wrapAroundsSincePrev == 0 && ts90khz < _prevTs90khz)
+    {
+        _rwLock->ReleaseLockExclusive();
+        return;
+    }
+
+    if (_firstAfterReset)
+    {
+        // Make an initial guess of the offset,
+        // should be almost correct since tMs - _startMs
+        // should about zero at this time.
+        _w[1] = -_w[0] * tMs;
+        _firstTimestamp = ts90khz;
+        _firstAfterReset = false;
+    }
+
+    // Compensate for wraparounds by changing the line offset
+    _w[1] = _w[1] - wrapAroundsSincePrev * ((static_cast<WebRtc_Word64>(1)<<32) - 1);
+
+    double residual = (static_cast<double>(ts90khz) - _firstTimestamp) - static_cast<double>(tMs) * _w[0] - _w[1];
+    if (DelayChangeDetection(residual, trace) &&
+        _packetCount >= _startUpFilterDelayInPackets)
+    {
+        // A sudden change of average network delay has been detected.
+        // Force the filter to adjust its offset parameter by changing
+        // the offset uncertainty. Don't do this during startup.
+        _P[1][1] = _P11;
+    }
+    //T = [t(k) 1]';
+    //that = T'*w;
+    //K = P*T/(lambda + T'*P*T);
+    double K[2];
+    K[0] = _P[0][0] * tMs + _P[0][1];
+    K[1] = _P[1][0] * tMs + _P[1][1];
+    double TPT = _lambda + tMs * K[0] + K[1];
+    K[0] /= TPT;
+    K[1] /= TPT;
+    //w = w + K*(ts(k) - that);
+    _w[0] = _w[0] + K[0] * residual;
+    _w[1] = _w[1] + K[1] * residual;
+    //P = 1/lambda*(P - K*T'*P);
+    double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0]));
+    double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1]));
+    _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0]));
+    _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1]));
+    _P[0][0] = p00;
+    _P[0][1] = p01;
+    if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        _packetCount++;
+    }
+    if (trace)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "w[0]=%f w[1]=%f ts=%u tMs=%u", _w[0], _w[1], ts90khz, tMs);
+    }
+    _rwLock->ReleaseLockExclusive();
+}
+
+WebRtc_UWord32
+VCMTimestampExtrapolator::ExtrapolateTimestamp(WebRtc_Word64 tMs) const
+{
+    ReadLockScoped rl(*_rwLock);
+    WebRtc_UWord32 timestamp = 0;
+    if (_packetCount == 0)
+    {
+        timestamp = 0;
+    }
+    else if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        timestamp = static_cast<WebRtc_UWord32>(90.0 * (tMs - _prevMs) + _prevTs90khz + 0.5);
+    }
+    else
+    {
+        timestamp = static_cast<WebRtc_UWord32>(_w[0] * (tMs - _startMs) + _w[1] + _firstTimestamp + 0.5);
+    }
+    return timestamp;
+}
+
+WebRtc_Word64
+VCMTimestampExtrapolator::ExtrapolateLocalTime(WebRtc_UWord32 timestamp90khz) const
+{
+    ReadLockScoped rl(*_rwLock);
+    WebRtc_Word64 localTimeMs = 0;
+    if (_packetCount == 0)
+    {
+        localTimeMs = -1;
+    }
+    else if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        localTimeMs = _prevMs + static_cast<WebRtc_Word64>(static_cast<double>(timestamp90khz - _prevTs90khz) / 90.0 + 0.5);
+    }
+    else
+    {
+        if (_w[0] < 1e-3)
+        {
+            localTimeMs = _startMs;
+        }
+        else
+        {
+            double timestampDiff = static_cast<double>(timestamp90khz) - static_cast<double>(_firstTimestamp);
+            localTimeMs = static_cast<WebRtc_Word64>(static_cast<double>(_startMs) + (timestampDiff - _w[1]) / _w[0] + 0.5);
+        }
+    }
+    return localTimeMs;
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp and
+// keeps track of the number of wrap arounds since reset.
+void
+VCMTimestampExtrapolator::CheckForWrapArounds(WebRtc_UWord32 ts90khz)
+{
+    if (_prevTs90khz == 0)
+    {
+        _prevTs90khz = ts90khz;
+        return;
+    }
+    if (ts90khz < _prevTs90khz)
+    {
+        // This difference will probably be less than -2^31 if we have had a wrap around
+        // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is casted to a Word32,
+        // it should be positive.
+        if (static_cast<WebRtc_Word32>(ts90khz - _prevTs90khz) > 0)
+        {
+            // Forward wrap around
+            _wrapArounds++;
+        }
+    }
+    // This difference will probably be less than -2^31 if we have had a backward wrap around.
+    // Since it is casted to a Word32, it should be positive.
+    else if (static_cast<WebRtc_Word32>(_prevTs90khz - ts90khz) > 0)
+    {
+        // Backward wrap around
+        _wrapArounds--;
+    }
+    _prevTs90khz = ts90khz;
+}
+
+bool
+VCMTimestampExtrapolator::DelayChangeDetection(double error, bool trace)
+{
+    // CUSUM detection of sudden delay changes
+    error = (error > 0) ? VCM_MIN(error, _accMaxError) : VCM_MAX(error, -_accMaxError);
+    _detectorAccumulatorPos = VCM_MAX(_detectorAccumulatorPos + error - _accDrift, (double)0);
+    _detectorAccumulatorNeg = VCM_MIN(_detectorAccumulatorNeg + error + _accDrift, (double)0);
+    if (_detectorAccumulatorPos > _alarmThreshold || _detectorAccumulatorNeg < -_alarmThreshold)
+    {
+        // Alarm
+        if (trace)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "g1=%f g2=%f alarm=1", _detectorAccumulatorPos, _detectorAccumulatorNeg);
+        }
+        _detectorAccumulatorPos = _detectorAccumulatorNeg = 0;
+        return true;
+    }
+    if (trace)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "g1=%f g2=%f alarm=0", _detectorAccumulatorPos, _detectorAccumulatorNeg);
+    }
+    return false;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.h b/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.h
new file mode 100644
index 0000000..901d8d4
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timestamp_extrapolator.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
+
+#include "typedefs.h"
+#include "rw_lock_wrapper.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+
+class VCMTimestampExtrapolator
+{
+public:
+    VCMTimestampExtrapolator(TickTimeBase* clock,
+                             WebRtc_Word32 vcmId = 0,
+                             WebRtc_Word32 receiverId = 0);
+    ~VCMTimestampExtrapolator();
+    void Update(WebRtc_Word64 tMs, WebRtc_UWord32 ts90khz, bool trace = true);
+    WebRtc_UWord32 ExtrapolateTimestamp(WebRtc_Word64 tMs) const;
+    WebRtc_Word64 ExtrapolateLocalTime(WebRtc_UWord32 timestamp90khz) const;
+    void Reset(WebRtc_Word64 nowMs = -1);
+
+private:
+    void CheckForWrapArounds(WebRtc_UWord32 ts90khz);
+    bool DelayChangeDetection(double error, bool trace = true);
+    RWLockWrapper*        _rwLock;
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _id;
+    TickTimeBase*         _clock;
+    double              _w[2];
+    double              _P[2][2];
+    WebRtc_Word64         _startMs;
+    WebRtc_Word64         _prevMs;
+    WebRtc_UWord32        _firstTimestamp;
+    WebRtc_Word32         _wrapArounds;
+    WebRtc_UWord32        _prevTs90khz;
+    const double        _lambda;
+    bool                _firstAfterReset;
+    WebRtc_UWord32        _packetCount;
+    const WebRtc_UWord32  _startUpFilterDelayInPackets;
+
+    double              _detectorAccumulatorPos;
+    double              _detectorAccumulatorNeg;
+    const double        _alarmThreshold;
+    const double        _accDrift;
+    const double        _accMaxError;
+    const double        _P11;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/trunk/src/modules/video_coding/main/source/timestamp_map.cc b/trunk/src/modules/video_coding/main/source/timestamp_map.cc
new file mode 100644
index 0000000..f19819b
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timestamp_map.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "timestamp_map.h"
+#include <stdlib.h>
+#include <assert.h>
+
+namespace webrtc {
+
+// Constructor. Optional parameter specifies maximum number of
+// coexisting timers.
+VCMTimestampMap::VCMTimestampMap(WebRtc_Word32 length):
+    _nextAddIx(0),
+    _nextPopIx(0)
+{
+    if (length <= 0)
+    {
+        // default
+        length = 10;
+    }
+
+    _map = new VCMTimestampDataTuple[length];
+    _length = length;
+}
+
+// Destructor.
+VCMTimestampMap::~VCMTimestampMap()
+{
+    delete [] _map;
+}
+
+// Empty the list of timers.
+void
+VCMTimestampMap::Reset()
+{
+    _nextAddIx = 0;
+    _nextPopIx = 0;
+}
+
+WebRtc_Word32
+VCMTimestampMap::Add(WebRtc_UWord32 timestamp, void* data)
+{
+    _map[_nextAddIx].timestamp = timestamp;
+    _map[_nextAddIx].data = data;
+    _nextAddIx = (_nextAddIx + 1) % _length;
+
+    if (_nextAddIx == _nextPopIx)
+    {
+        // Circular list full; forget oldest entry
+        _nextPopIx = (_nextPopIx + 1) % _length;
+        return -1;
+    }
+    return 0;
+}
+
+void*
+VCMTimestampMap::Pop(WebRtc_UWord32 timestamp)
+{
+    while (!IsEmpty())
+    {
+        if (_map[_nextPopIx].timestamp == timestamp)
+        {
+            // found start time for this timestamp
+            void* data = _map[_nextPopIx].data;
+            _map[_nextPopIx].data = NULL;
+            _nextPopIx = (_nextPopIx + 1) % _length;
+            return data;
+        }
+        else if (_map[_nextPopIx].timestamp > timestamp)
+        {
+            // the timestamp we are looking for is not in the list
+            assert(_nextPopIx < _length && _nextPopIx >= 0);
+            return NULL;
+        }
+
+        // not in this position, check next (and forget this position)
+        _nextPopIx = (_nextPopIx + 1) % _length;
+    }
+
+    // could not find matching timestamp in list
+    assert(_nextPopIx < _length && _nextPopIx >= 0);
+    return NULL;
+}
+
+// Check if no timers are currently running
+bool
+VCMTimestampMap::IsEmpty() const
+{
+    return (_nextAddIx == _nextPopIx);
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/timestamp_map.h b/trunk/src/modules/video_coding/main/source/timestamp_map.h
new file mode 100644
index 0000000..fd532bc
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timestamp_map.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+struct VCMTimestampDataTuple
+{
+    WebRtc_UWord32    timestamp;
+    void*             data;
+};
+
+class VCMTimestampMap
+{
+public:
+    // Constructor. Optional parameter specifies maximum number of
+    // timestamps in map.
+    VCMTimestampMap(const WebRtc_Word32 length = 10);
+
+    // Destructor.
+    ~VCMTimestampMap();
+
+    // Empty the map
+    void Reset();
+
+    WebRtc_Word32 Add(WebRtc_UWord32 timestamp, void*  data);
+    void* Pop(WebRtc_UWord32 timestamp);
+
+private:
+    bool IsEmpty() const;
+
+    VCMTimestampDataTuple* _map;
+    WebRtc_Word32                   _nextAddIx;
+    WebRtc_Word32                   _nextPopIx;
+    WebRtc_Word32                   _length;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
diff --git a/trunk/src/modules/video_coding/main/source/timing.cc b/trunk/src/modules/video_coding/main/source/timing.cc
new file mode 100644
index 0000000..aca05fa
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timing.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "jitter_buffer_common.h"
+#include "timing.h"
+#include "timestamp_extrapolator.h"
+
+namespace webrtc {
+
+VCMTiming::VCMTiming(TickTimeBase* clock,
+                     WebRtc_Word32 vcmId,
+                     WebRtc_Word32 timingId,
+                     VCMTiming* masterTiming)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_vcmId(vcmId),
+_clock(clock),
+_timingId(timingId),
+_master(false),
+_tsExtrapolator(),
+_codecTimer(),
+_renderDelayMs(kDefaultRenderDelayMs),
+_minTotalDelayMs(0),
+_requiredDelayMs(0),
+_currentDelayMs(0),
+_prevFrameTimestamp(0)
+{
+    if (masterTiming == NULL)
+    {
+        _master = true;
+        _tsExtrapolator = new VCMTimestampExtrapolator(_clock, vcmId, timingId);
+    }
+    else
+    {
+        _tsExtrapolator = masterTiming->_tsExtrapolator;
+    }
+}
+
+VCMTiming::~VCMTiming()
+{
+    if (_master)
+    {
+        delete _tsExtrapolator;
+    }
+    delete _critSect;
+}
+
+void
+VCMTiming::Reset(WebRtc_Word64 nowMs /* = -1 */)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (nowMs > -1)
+    {
+        _tsExtrapolator->Reset(nowMs);
+    }
+    else
+    {
+        _tsExtrapolator->Reset();
+    }
+    _codecTimer.Reset();
+    _renderDelayMs = kDefaultRenderDelayMs;
+    _minTotalDelayMs = 0;
+    _requiredDelayMs = 0;
+    _currentDelayMs = 0;
+    _prevFrameTimestamp = 0;
+}
+
+void VCMTiming::ResetDecodeTime()
+{
+    _codecTimer.Reset();
+}
+
+void
+VCMTiming::SetRenderDelay(WebRtc_UWord32 renderDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _renderDelayMs = renderDelayMs;
+}
+
+void
+VCMTiming::SetMinimumTotalDelay(WebRtc_UWord32 minTotalDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _minTotalDelayMs = minTotalDelayMs;
+}
+
+void
+VCMTiming::SetRequiredDelay(WebRtc_UWord32 requiredDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (requiredDelayMs != _requiredDelayMs)
+    {
+        if (_master)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                    "Desired jitter buffer level: %u ms", requiredDelayMs);
+        }
+        _requiredDelayMs = requiredDelayMs;
+    }
+}
+
+void VCMTiming::UpdateCurrentDelay(WebRtc_UWord32 frameTimestamp)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_UWord32 targetDelayMs = TargetDelayInternal();
+
+    // Make sure we try to sync with audio
+    if (targetDelayMs < _minTotalDelayMs)
+    {
+        targetDelayMs = _minTotalDelayMs;
+    }
+
+    if (_currentDelayMs == 0)
+    {
+        // Not initialized, set current delay to target.
+        _currentDelayMs = targetDelayMs;
+    }
+    else if (targetDelayMs != _currentDelayMs)
+    {
+        WebRtc_Word64 delayDiffMs = static_cast<WebRtc_Word64>(targetDelayMs) -
+                                    _currentDelayMs;
+        // Never change the delay with more than 100 ms every second. If we're changing the
+        // delay in too large steps we will get noticable freezes. By limiting the change we
+        // can increase the delay in smaller steps, which will be experienced as the video is
+        // played in slow motion. When lowering the delay the video will be played at a faster
+        // pace.
+        WebRtc_Word64 maxChangeMs = 0;
+        if (frameTimestamp < 0x0000ffff && _prevFrameTimestamp > 0xffff0000)
+        {
+            // wrap
+            maxChangeMs = kDelayMaxChangeMsPerS * (frameTimestamp +
+                         (static_cast<WebRtc_Word64>(1)<<32) - _prevFrameTimestamp) / 90000;
+        }
+        else
+        {
+            maxChangeMs = kDelayMaxChangeMsPerS *
+                          (frameTimestamp - _prevFrameTimestamp) / 90000;
+        }
+        if (maxChangeMs <= 0)
+        {
+            // Any changes less than 1 ms are truncated and
+            // will be postponed. Negative change will be due
+            // to reordering and should be ignored.
+            return;
+        }
+        else if (delayDiffMs < -maxChangeMs)
+        {
+            delayDiffMs = -maxChangeMs;
+        }
+        else if (delayDiffMs > maxChangeMs)
+        {
+            delayDiffMs = maxChangeMs;
+        }
+        _currentDelayMs = _currentDelayMs + static_cast<WebRtc_Word32>(delayDiffMs);
+    }
+    _prevFrameTimestamp = frameTimestamp;
+}
+
+void VCMTiming::UpdateCurrentDelay(WebRtc_Word64 renderTimeMs,
+                                   WebRtc_Word64 actualDecodeTimeMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_UWord32 targetDelayMs = TargetDelayInternal();
+    // Make sure we try to sync with audio
+    if (targetDelayMs < _minTotalDelayMs)
+    {
+        targetDelayMs = _minTotalDelayMs;
+    }
+    WebRtc_Word64 delayedMs = actualDecodeTimeMs -
+                              (renderTimeMs - MaxDecodeTimeMs() - _renderDelayMs);
+    if (delayedMs < 0)
+    {
+        return;
+    }
+    else if (_currentDelayMs + delayedMs <= targetDelayMs)
+    {
+        _currentDelayMs += static_cast<WebRtc_UWord32>(delayedMs);
+    }
+    else
+    {
+        _currentDelayMs = targetDelayMs;
+    }
+}
+
+WebRtc_Word32
+VCMTiming::StopDecodeTimer(WebRtc_UWord32 timeStamp,
+                           WebRtc_Word64 startTimeMs,
+                           WebRtc_Word64 nowMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word32 maxDecTime = MaxDecodeTimeMs();
+    WebRtc_Word32 timeDiffMs = _codecTimer.StopTimer(startTimeMs, nowMs);
+    if (timeDiffMs < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Codec timer error: %d", timeDiffMs);
+        assert(false);
+    }
+
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                "Frame decoded: timeStamp=%u decTime=%d maxDecTime=%u, at %u",
+                timeStamp, timeDiffMs, maxDecTime, MaskWord64ToUWord32(nowMs));
+    }
+    return 0;
+}
+
+void
+VCMTiming::IncomingTimestamp(WebRtc_UWord32 timeStamp, WebRtc_Word64 nowMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _tsExtrapolator->Update(nowMs, timeStamp, _master);
+}
+
+WebRtc_Word64
+VCMTiming::RenderTimeMs(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word64 renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs);
+    if (renderTimeMs < 0)
+    {
+        return renderTimeMs;
+    }
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Render frame %u at %u. Render delay %u, required delay %u,"
+                " max decode time %u, min total delay %u",
+            frameTimestamp, MaskWord64ToUWord32(renderTimeMs), _renderDelayMs,
+            _requiredDelayMs, MaxDecodeTimeMs(),_minTotalDelayMs);
+    }
+    return renderTimeMs;
+}
+
+WebRtc_Word64
+VCMTiming::RenderTimeMsInternal(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const
+{
+    WebRtc_Word64 estimatedCompleteTimeMs =
+            _tsExtrapolator->ExtrapolateLocalTime(frameTimestamp);
+    if (estimatedCompleteTimeMs - nowMs > kMaxVideoDelayMs)
+    {
+        if (_master)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                    "Timestamp arrived 2 seconds early, reset statistics",
+                    frameTimestamp, estimatedCompleteTimeMs);
+        }
+        return -1;
+    }
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                "ExtrapolateLocalTime(%u)=%u ms",
+                frameTimestamp, MaskWord64ToUWord32(estimatedCompleteTimeMs));
+    }
+    if (estimatedCompleteTimeMs == -1)
+    {
+        estimatedCompleteTimeMs = nowMs;
+    }
+
+    return estimatedCompleteTimeMs + _currentDelayMs;
+}
+
+// Must be called from inside a critical section
+WebRtc_Word32
+VCMTiming::MaxDecodeTimeMs(FrameType frameType /*= kVideoFrameDelta*/) const
+{
+    const WebRtc_Word32 decodeTimeMs = _codecTimer.RequiredDecodeTimeMs(frameType);
+
+    if (decodeTimeMs < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Negative maximum decode time: %d", decodeTimeMs);
+        return -1;
+    }
+    return decodeTimeMs;
+}
+
+WebRtc_UWord32
+VCMTiming::MaxWaitingTime(WebRtc_Word64 renderTimeMs, WebRtc_Word64 nowMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+
+    const WebRtc_Word64 maxWaitTimeMs = renderTimeMs - nowMs -
+                                        MaxDecodeTimeMs() - _renderDelayMs;
+
+    if (maxWaitTimeMs < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(maxWaitTimeMs);
+}
+
+bool
+VCMTiming::EnoughTimeToDecode(WebRtc_UWord32 availableProcessingTimeMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_Word32 maxDecodeTimeMs = MaxDecodeTimeMs();
+    if (maxDecodeTimeMs < 0)
+    {
+        // Haven't decoded any frames yet, try decoding one to get an estimate
+        // of the decode time.
+        return true;
+    }
+    else if (maxDecodeTimeMs == 0)
+    {
+        // Decode time is less than 1, set to 1 for now since
+        // we don't have any better precision. Count ticks later?
+        maxDecodeTimeMs = 1;
+    }
+    return static_cast<WebRtc_Word32>(availableProcessingTimeMs) - maxDecodeTimeMs > 0;
+}
+
+WebRtc_UWord32
+VCMTiming::TargetVideoDelay() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return TargetDelayInternal();
+}
+
+WebRtc_UWord32
+VCMTiming::TargetDelayInternal() const
+{
+    return _requiredDelayMs + MaxDecodeTimeMs() + _renderDelayMs;
+}
+
+}
diff --git a/trunk/src/modules/video_coding/main/source/timing.h b/trunk/src/modules/video_coding/main/source/timing.h
new file mode 100644
index 0000000..41a4945
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/timing.h
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+
+#include "typedefs.h"
+#include "critical_section_wrapper.h"
+#include "codec_timer.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+class VCMTimestampExtrapolator;
+
+class VCMTiming
+{
+public:
+    // The primary timing component should be passed
+    // if this is the dual timing component.
+    VCMTiming(TickTimeBase* clock,
+              WebRtc_Word32 vcmId = 0,
+              WebRtc_Word32 timingId = 0,
+              VCMTiming* masterTiming = NULL);
+    ~VCMTiming();
+
+    // Resets the timing to the initial state.
+    void Reset(WebRtc_Word64 nowMs = -1);
+    void ResetDecodeTime();
+
+    // The amount of time needed to render an image. Defaults to 10 ms.
+    void SetRenderDelay(WebRtc_UWord32 renderDelayMs);
+
+    // The minimum time the video must be delayed on the receiver to
+    // get the desired jitter buffer level.
+    void SetRequiredDelay(WebRtc_UWord32 requiredDelayMs);
+
+    // Minimum total delay required to sync video with audio.
+    void SetMinimumTotalDelay(WebRtc_UWord32 minTotalDelayMs);
+
+    // Increases or decreases the current delay to get closer to the target delay.
+    // Calculates how long it has been since the previous call to this function,
+    // and increases/decreases the delay in proportion to the time difference.
+    void UpdateCurrentDelay(WebRtc_UWord32 frameTimestamp);
+
+    // Increases or decreases the current delay to get closer to the target delay.
+    // Given the actual decode time in ms and the render time in ms for a frame, this
+    // function calculates how late the frame is and increases the delay accordingly.
+    void UpdateCurrentDelay(WebRtc_Word64 renderTimeMs, WebRtc_Word64 actualDecodeTimeMs);
+
+    // Stops the decoder timer, should be called when the decoder returns a frame
+    // or when the decoded frame callback is called.
+    WebRtc_Word32 StopDecodeTimer(WebRtc_UWord32 timeStamp,
+                                  WebRtc_Word64 startTimeMs,
+                                  WebRtc_Word64 nowMs);
+
+    // Used to report that a frame is passed to decoding. Updates the timestamp filter
+    // which is used to map between timestamps and receiver system time.
+    void IncomingTimestamp(WebRtc_UWord32 timeStamp, WebRtc_Word64 lastPacketTimeMs);
+
+    // Returns the receiver system time when the frame with timestamp frameTimestamp
+    // should be rendered, assuming that the system time currently is nowMs.
+    WebRtc_Word64 RenderTimeMs(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const;
+
+    // Returns the maximum time in ms that we can wait for a frame to become complete
+    // before we must pass it to the decoder.
+    WebRtc_UWord32 MaxWaitingTime(WebRtc_Word64 renderTimeMs, WebRtc_Word64 nowMs) const;
+
+    // Returns the current target delay which is required delay + decode time + render
+    // delay.
+    WebRtc_UWord32 TargetVideoDelay() const;
+
+    // Calculates whether or not there is enough time to decode a frame given a
+    // certain amount of processing time.
+    bool EnoughTimeToDecode(WebRtc_UWord32 availableProcessingTimeMs) const;
+
+    enum { kDefaultRenderDelayMs = 10 };
+    enum { kDelayMaxChangeMsPerS = 100 };
+
+protected:
+    WebRtc_Word32 MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const;
+    WebRtc_Word64 RenderTimeMsInternal(WebRtc_UWord32 frameTimestamp,
+                                       WebRtc_Word64 nowMs) const;
+    WebRtc_UWord32 TargetDelayInternal() const;
+
+private:
+    CriticalSectionWrapper*       _critSect;
+    WebRtc_Word32                 _vcmId;
+    TickTimeBase*                 _clock;
+    WebRtc_Word32                 _timingId;
+    bool                          _master;
+    VCMTimestampExtrapolator*     _tsExtrapolator;
+    VCMCodecTimer                 _codecTimer;
+    WebRtc_UWord32                _renderDelayMs;
+    WebRtc_UWord32                _minTotalDelayMs;
+    WebRtc_UWord32                _requiredDelayMs;
+    WebRtc_UWord32                _currentDelayMs;
+    WebRtc_UWord32                _prevFrameTimestamp;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
diff --git a/trunk/src/modules/video_coding/main/source/video_coding.gypi b/trunk/src/modules/video_coding/main/source/video_coding.gypi
new file mode 100644
index 0000000..3a99283
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/video_coding.gypi
@@ -0,0 +1,106 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_video_coding',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_i420',
+        'webrtc_vp8',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '../../codecs/interface',
+        '../../../../common_video/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../codecs/interface',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_coding.h',
+        '../interface/video_coding_defines.h',
+
+        # headers
+        'codec_database.h',
+        'codec_timer.h',
+        'content_metrics_processing.h',
+        'decoding_state.h',
+        'encoded_frame.h',
+        'er_tables_xor.h',
+        'event.h',
+        'exp_filter.h',
+        'fec_tables_xor.h',
+        'frame_buffer.h',
+        'frame_dropper.h',
+        'generic_decoder.h',
+        'generic_encoder.h',
+        'inter_frame_delay.h',
+        'internal_defines.h',
+        'jitter_buffer.h',
+        'jitter_buffer_common.h',
+        'jitter_estimator.h',
+        'media_opt_util.h',
+        'media_optimization.h',
+        'nack_fec_tables.h',
+        'packet.h',
+        'qm_select_data.h',
+        'qm_select.h',
+        'receiver.h',
+        'rtt_filter.h',
+        'session_info.h',
+        'tick_time_base.h',
+        'timestamp_extrapolator.h',
+        'timestamp_map.h',
+        'timing.h',
+        'video_coding_impl.h',
+
+        # sources
+        'codec_database.cc',
+        'codec_timer.cc',
+        'content_metrics_processing.cc',
+        'decoding_state.cc',
+        'encoded_frame.cc',
+        'exp_filter.cc',
+        'frame_buffer.cc',
+        'frame_dropper.cc',
+        'generic_decoder.cc',
+        'generic_encoder.cc',
+        'inter_frame_delay.cc',
+        'jitter_buffer.cc',
+        'jitter_buffer_common.cc',
+        'jitter_estimator.cc',
+        'media_opt_util.cc',
+        'media_optimization.cc',
+        'packet.cc',
+        'qm_select.cc',
+        'receiver.cc',
+        'rtt_filter.cc',
+        'session_info.cc',
+        'timestamp_extrapolator.cc',
+        'timestamp_map.cc',
+        'timing.cc',
+        'video_coding_impl.cc',
+      ], # source
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_coding/main/source/video_coding_impl.cc b/trunk/src/modules/video_coding/main/source/video_coding_impl.cc
new file mode 100644
index 0000000..771f702
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/video_coding_impl.cc
@@ -0,0 +1,1385 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_coding_impl.h"
+#include "common_types.h"
+#include "encoded_frame.h"
+#include "jitter_buffer.h"
+#include "packet.h"
+#include "trace.h"
+#include "video_codec_interface.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc
+{
+
+//#define DEBUG_DECODER_BIT_STREAM
+//#define DEBUG_ENCODER_INPUT
+
+WebRtc_UWord32
+VCMProcessTimer::Period() const
+{
+    return _periodMs;
+}
+
+WebRtc_UWord32
+VCMProcessTimer::TimeUntilProcess() const
+{
+    return static_cast<WebRtc_UWord32>(
+        VCM_MAX(static_cast<WebRtc_Word64>(_periodMs) -
+                (_clock->MillisecondTimestamp() - _latestMs), 0));
+}
+
+void
+VCMProcessTimer::Processed()
+{
+    _latestMs = _clock->MillisecondTimestamp();
+}
+
+VideoCodingModuleImpl::VideoCodingModuleImpl(const WebRtc_Word32 id,
+                                             TickTimeBase* clock,
+                                             bool delete_clock_on_destroy)
+:
+_id(id),
+clock_(clock),
+delete_clock_on_destroy_(delete_clock_on_destroy),
+_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_receiverInited(false),
+_timing(clock_, id, 1),
+_dualTiming(clock_, id, 2, &_timing),
+_receiver(_timing, clock_, id, 1),
+_dualReceiver(_dualTiming, clock_, id, 2, false),
+_decodedFrameCallback(_timing, clock_),
+_dualDecodedFrameCallback(_dualTiming, clock_),
+_frameTypeCallback(NULL),
+_frameStorageCallback(NULL),
+_receiveStatsCallback(NULL),
+_packetRequestCallback(NULL),
+_decoder(NULL),
+_dualDecoder(NULL),
+_bitStreamBeforeDecoder(NULL),
+_frameFromFile(),
+_keyRequestMode(kKeyOnError),
+_scheduleKeyRequest(false),
+
+_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_encoder(),
+_encodedFrameCallback(),
+_mediaOpt(id, clock_),
+_sendCodecType(kVideoCodecUnknown),
+_sendStatsCallback(NULL),
+_encoderInputFile(NULL),
+
+_codecDataBase(id),
+_receiveStatsTimer(1000, clock_),
+_sendStatsTimer(1000, clock_),
+_retransmissionTimer(10, clock_),
+_keyRequestTimer(500, clock_)
+{
+    assert(clock_);
+    for (int i = 0; i < kMaxSimulcastStreams; i++)
+    {
+        _nextFrameType[i] = kVideoFrameDelta;
+    }
+#ifdef DEBUG_DECODER_BIT_STREAM
+    _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
+#endif
+#ifdef DEBUG_ENCODER_INPUT
+    _encoderInputFile = fopen("encoderInput.yuv", "wb");
+#endif
+}
+
+VideoCodingModuleImpl::~VideoCodingModuleImpl()
+{
+    if (_dualDecoder != NULL)
+    {
+        _codecDataBase.ReleaseDecoder(_dualDecoder);
+    }
+    delete _receiveCritSect;
+    delete _sendCritSect;
+    if (delete_clock_on_destroy_) delete clock_;
+#ifdef DEBUG_DECODER_BIT_STREAM
+    fclose(_bitStreamBeforeDecoder);
+#endif
+#ifdef DEBUG_ENCODER_INPUT
+    fclose(_encoderInputFile);
+#endif
+}
+
+VideoCodingModule*
+VideoCodingModule::Create(const WebRtc_Word32 id)
+{
+    return new VideoCodingModuleImpl(id, new TickTimeBase(), true);
+}
+
+VideoCodingModule*
+VideoCodingModule::Create(const WebRtc_Word32 id, TickTimeBase* clock)
+{
+    assert(clock);
+    return new VideoCodingModuleImpl(id, clock, false);
+}
+
+void
+VideoCodingModule::Destroy(VideoCodingModule* module)
+{
+    if (module != NULL)
+    {
+        delete static_cast<VideoCodingModuleImpl*>(module);
+    }
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::Process()
+{
+    WebRtc_Word32 returnValue = VCM_OK;
+
+    // Receive-side statistics
+    if (_receiveStatsTimer.TimeUntilProcess() == 0)
+    {
+        _receiveStatsTimer.Processed();
+        if (_receiveStatsCallback != NULL)
+        {
+            WebRtc_UWord32 bitRate;
+            WebRtc_UWord32 frameRate;
+            const WebRtc_Word32 ret = _receiver.ReceiveStatistics(bitRate,
+                                                                  frameRate);
+            if (ret == 0)
+            {
+                _receiveStatsCallback->ReceiveStatistics(bitRate, frameRate);
+            }
+            else if (returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+        }
+    }
+
+    // Send-side statistics
+    if (_sendStatsTimer.TimeUntilProcess() == 0)
+    {
+        _sendStatsTimer.Processed();
+        if (_sendStatsCallback != NULL)
+        {
+            WebRtc_UWord32 bitRate;
+            WebRtc_UWord32 frameRate;
+            {
+                CriticalSectionScoped cs(_sendCritSect);
+                bitRate = static_cast<WebRtc_UWord32>(
+                    _mediaOpt.SentBitRate() + 0.5f);
+                frameRate = static_cast<WebRtc_UWord32>(
+                    _mediaOpt.SentFrameRate() + 0.5f);
+            }
+            _sendStatsCallback->SendStatistics(bitRate, frameRate);
+        }
+    }
+
+    // Packet retransmission requests
+    if (_retransmissionTimer.TimeUntilProcess() == 0)
+    {
+        _retransmissionTimer.Processed();
+        if (_packetRequestCallback != NULL)
+        {
+            WebRtc_UWord16 nackList[kNackHistoryLength];
+            WebRtc_UWord16 length = kNackHistoryLength;
+            const WebRtc_Word32 ret = NackList(nackList, length);
+            if (ret != VCM_OK && returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+            if (length > 0)
+            {
+                _packetRequestCallback->ResendPackets(nackList, length);
+            }
+        }
+    }
+
+    // Key frame requests
+    if (_keyRequestTimer.TimeUntilProcess() == 0)
+    {
+        _keyRequestTimer.Processed();
+        if (_scheduleKeyRequest && _frameTypeCallback != NULL)
+        {
+            const WebRtc_Word32 ret = RequestKeyFrame();
+            if (ret != VCM_OK && returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+        }
+    }
+
+    return returnValue;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::Id() const
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        return _id;
+    }
+}
+
+//  Change the unique identifier of this object
+WebRtc_Word32
+VideoCodingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        _id = id;
+        return VCM_OK;
+    }
+}
+
+// Returns the number of milliseconds until the module wants a worker thread to
+// call Process
+WebRtc_Word32
+VideoCodingModuleImpl::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 timeUntilNextProcess = VCM_MIN(
+                                    _receiveStatsTimer.TimeUntilProcess(),
+                                    _sendStatsTimer.TimeUntilProcess());
+    if ((_receiver.NackMode() != kNoNack) ||
+        (_dualReceiver.State() != kPassive))
+    {
+        // We need a Process call more often if we are relying on
+        // retransmissions
+        timeUntilNextProcess = VCM_MIN(timeUntilNextProcess,
+                                       _retransmissionTimer.TimeUntilProcess());
+    }
+    timeUntilNextProcess = VCM_MIN(timeUntilNextProcess,
+                                   _keyRequestTimer.TimeUntilProcess());
+
+    return timeUntilNextProcess;
+}
+
+// Get number of supported codecs
+WebRtc_UWord8
+VideoCodingModule::NumberOfCodecs()
+{
+    return VCMCodecDataBase::NumberOfCodecs();
+}
+
+// Get supported codec with id
+WebRtc_Word32
+VideoCodingModule::Codec(WebRtc_UWord8 listId, VideoCodec* codec)
+{
+    if (codec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return VCMCodecDataBase::Codec(listId, codec);
+}
+
+// Get supported codec with type
+WebRtc_Word32
+VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec)
+{
+    if (codec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return VCMCodecDataBase::Codec(codecType, codec);
+}
+
+/*
+*   Sender
+*/
+
+// Reset send side to initial state - all components
+WebRtc_Word32
+VideoCodingModuleImpl::InitializeSender()
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _codecDataBase.ResetSender();
+    _encoder = NULL;
+    _encodedFrameCallback.SetTransportCallback(NULL);
+    // setting default bitRate and frameRate to 0
+    _mediaOpt.SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0);
+    _mediaOpt.Reset(); // Resetting frame dropper
+    return VCM_OK;
+}
+
+// Register the send codec to be used.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterSendCodec(const VideoCodec* sendCodec,
+                                         WebRtc_UWord32 numberOfCores,
+                                         WebRtc_UWord32 maxPayloadSize)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    if (sendCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    WebRtc_Word32 ret = _codecDataBase.RegisterSendCodec(sendCodec,
+                                                         numberOfCores,
+                                                         maxPayloadSize);
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    _encoder = _codecDataBase.SetEncoder(sendCodec, &_encodedFrameCallback);
+    if (_encoder == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to initialize encoder");
+        return VCM_CODEC_ERROR;
+    }
+    _sendCodecType = sendCodec->codecType;
+    int numLayers = (_sendCodecType != kVideoCodecVP8) ? 1 :
+                        sendCodec->codecSpecific.VP8.numberOfTemporalLayers;
+
+    _mediaOpt.SetEncodingData(_sendCodecType,
+                              sendCodec->maxBitrate,
+                              sendCodec->maxFramerate,
+                              sendCodec->startBitrate,
+                              sendCodec->width,
+                              sendCodec->height,
+                              numLayers);
+    _mediaOpt.SetMtu(maxPayloadSize);
+
+    return VCM_OK;
+}
+
+// Get current send codec
+WebRtc_Word32
+VideoCodingModuleImpl::SendCodec(VideoCodec* currentSendCodec) const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (currentSendCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.SendCodec(currentSendCodec);
+}
+
+// Get the current send codec type
+VideoCodecType
+VideoCodingModuleImpl::SendCodec() const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    return _codecDataBase.SendCodec();
+}
+
+// Register an external decoder object.
+// This can not be used together with external decoder callbacks.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                               WebRtc_UWord8 payloadType,
+                                               bool internalSource /*= false*/)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (externalEncoder == NULL)
+    {
+        bool wasSendCodec = false;
+        const WebRtc_Word32 ret = _codecDataBase.DeRegisterExternalEncoder(
+                                                                  payloadType,
+                                                                  wasSendCodec);
+        if (wasSendCodec)
+        {
+            // Make sure the VCM doesn't use the de-registered codec
+            _encoder = NULL;
+        }
+        return ret;
+    }
+    return _codecDataBase.RegisterExternalEncoder(externalEncoder,
+                                                  payloadType,
+                                                  internalSource);
+}
+
+// Get codec config parameters
+WebRtc_Word32
+VideoCodingModuleImpl::CodecConfigParameters(WebRtc_UWord8* buffer,
+                                             WebRtc_Word32 size)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    if (_encoder != NULL)
+    {
+        return _encoder->CodecConfigParameters(buffer, size);
+    }
+    return VCM_UNINITIALIZED;
+}
+
+// Get encode bitrate
+int VideoCodingModuleImpl::Bitrate(unsigned int* bitrate) const
+{
+  CriticalSectionScoped cs(_sendCritSect);
+  // return the bit rate which the encoder is set to
+  if (!_encoder) {
+    return VCM_UNINITIALIZED;
+  }
+  *bitrate = _encoder->BitRate();
+  return 0;
+}
+
+// Get encode frame rate
+int VideoCodingModuleImpl::FrameRate(unsigned int* framerate) const
+{
+  CriticalSectionScoped cs(_sendCritSect);
+  // input frame rate, not compensated
+  if (!_encoder) {
+    return VCM_UNINITIALIZED;
+  }
+  *framerate = _encoder->FrameRate();
+  return 0;
+}
+
+// Set channel parameters
+WebRtc_Word32
+VideoCodingModuleImpl::SetChannelParameters(WebRtc_UWord32 availableBandWidth,
+                                            WebRtc_UWord8 lossRate,
+                                            WebRtc_UWord32 rtt)
+{
+    WebRtc_Word32 ret = 0;
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        WebRtc_UWord32 targetRate = _mediaOpt.SetTargetRates(availableBandWidth,
+                                                             lossRate,
+                                                             rtt);
+        if (_encoder != NULL)
+        {
+            ret = _encoder->SetChannelParameters(lossRate, rtt);
+            if (ret < 0 )
+            {
+                return ret;
+            }
+            ret = (WebRtc_Word32)_encoder->SetRates(targetRate,
+                                                    _mediaOpt.InputFrameRate());
+            if (ret < 0)
+            {
+                return ret;
+            }
+        }
+        else
+        {
+            return VCM_UNINITIALIZED;
+        } // encoder
+    }// send side
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::SetReceiveChannelParameters(WebRtc_UWord32 rtt)
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    _receiver.UpdateRtt(rtt);
+    return 0;
+}
+
+// Register a transport callback which will be called to deliver the encoded
+// buffers
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterTransportCallback(
+    VCMPacketizationCallback* transport)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
+    _encodedFrameCallback.SetTransportCallback(transport);
+    return VCM_OK;
+}
+
+// Register video output information callback which will be called to deliver
+// information about the video stream produced by the encoder, for instance the
+// average frame rate and bit rate.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterSendStatisticsCallback(
+    VCMSendStatisticsCallback* sendStats)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _sendStatsCallback = sendStats;
+    return VCM_OK;
+}
+
+// Register a video quality settings callback which will be called when frame
+// rate/dimensions need to be updated for video quality optimization
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterVideoQMCallback(
+    VCMQMSettingsCallback* videoQMSettings)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    return _mediaOpt.RegisterVideoQMCallback(videoQMSettings);
+}
+
+
+// Register a video protection callback which will be called to deliver the
+// requested FEC rate and NACK status (on/off).
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterProtectionCallback(
+    VCMProtectionCallback* protection)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _mediaOpt.RegisterProtectionCallback(protection);
+    return VCM_OK;
+}
+
+// Enable or disable a video protection method.
+WebRtc_Word32
+VideoCodingModuleImpl::SetVideoProtection(VCMVideoProtection videoProtection,
+                                          bool enable)
+{
+
+    switch (videoProtection)
+    {
+
+    case kProtectionNack:
+        {
+            // Both send-side and receive-side
+            SetVideoProtection(kProtectionNackSender, enable);
+            SetVideoProtection(kProtectionNackReceiver, enable);
+            break;
+        }
+
+    case kProtectionNackSender:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            _mediaOpt.EnableProtectionMethod(enable, kNack);
+            break;
+        }
+
+    case kProtectionNackReceiver:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _receiver.SetNackMode(kNackInfinite);
+            }
+            else
+            {
+                _receiver.SetNackMode(kNoNack);
+            }
+            break;
+        }
+
+    case kProtectionDualDecoder:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _receiver.SetNackMode(kNoNack);
+                _dualReceiver.SetNackMode(kNackInfinite);
+            }
+            else
+            {
+                _dualReceiver.SetNackMode(kNoNack);
+            }
+            break;
+        }
+
+    case kProtectionKeyOnLoss:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _keyRequestMode = kKeyOnLoss;
+            }
+            else if (_keyRequestMode == kKeyOnLoss)
+            {
+                _keyRequestMode = kKeyOnError; // default mode
+            }
+            else
+            {
+                return VCM_PARAMETER_ERROR;
+            }
+            break;
+        }
+
+    case kProtectionKeyOnKeyLoss:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _keyRequestMode = kKeyOnKeyLoss;
+            }
+            else if (_keyRequestMode == kKeyOnKeyLoss)
+            {
+                _keyRequestMode = kKeyOnError; // default mode
+            }
+            else
+            {
+                return VCM_PARAMETER_ERROR;
+            }
+            break;
+        }
+
+    case kProtectionNackFEC:
+        {
+            {
+              // Receive side
+                CriticalSectionScoped cs(_receiveCritSect);
+                if (enable)
+                {
+                    _receiver.SetNackMode(kNackHybrid);
+                }
+                else
+                {
+                    _receiver.SetNackMode(kNoNack);
+                }
+            }
+            // Send Side
+            {
+                CriticalSectionScoped cs(_sendCritSect);
+                _mediaOpt.EnableProtectionMethod(enable, kNackFec);
+            }
+            break;
+        }
+
+    case kProtectionFEC:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            _mediaOpt.EnableProtectionMethod(enable, kFec);
+            break;
+        }
+
+    case kProtectionPeriodicKeyFrames:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            return _codecDataBase.SetPeriodicKeyFrames(enable);
+            break;
+        }
+    }
+    return VCM_OK;
+}
+
+// Add one raw video frame to the encoder, blocking.
+WebRtc_Word32
+VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
+                                     const VideoContentMetrics* contentMetrics,
+                                     const CodecSpecificInfo* codecSpecificInfo)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (_encoder == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    if (_nextFrameType[0] == kFrameEmpty)
+    {
+        return VCM_OK;
+    }
+    _mediaOpt.UpdateIncomingFrameRate();
+
+    if (_mediaOpt.DropFrame())
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Drop frame due to bitrate");
+    }
+    else
+    {
+        _mediaOpt.updateContentData(contentMetrics);
+        WebRtc_Word32 ret = _encoder->Encode(videoFrame,
+                                             codecSpecificInfo,
+                                             _nextFrameType);
+#ifdef DEBUG_ENCODER_INPUT
+        if (_encoderInputFile != NULL)
+        {
+            fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+                   _encoderInputFile);
+        }
+#endif
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Encode error: %d", ret);
+            return ret;
+        }
+        for (int i = 0; i < kMaxSimulcastStreams; i++)
+        {
+            _nextFrameType[i] = kVideoFrameDelta; // default frame type
+        }
+    }
+    return VCM_OK;
+}
+
+// Next frame encoded should be of the type frameType
+// Good for only one frame
+WebRtc_Word32
+VideoCodingModuleImpl::FrameTypeRequest(FrameType frameType, 
+                                        WebRtc_UWord8 simulcastIdx)
+{
+    assert(simulcastIdx < kMaxSimulcastStreams);
+
+
+    CriticalSectionScoped cs(_sendCritSect);
+    _nextFrameType[simulcastIdx] = frameType;
+    if (_encoder != NULL && _encoder->InternalSource())
+    {
+        // Try to request the frame if we have an external encoder with
+        // internal source since AddVideoFrame never will be called.
+        if (_encoder->RequestFrame(_nextFrameType) == WEBRTC_VIDEO_CODEC_OK)
+        {
+            _nextFrameType[simulcastIdx] = kVideoFrameDelta;
+        }
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::EnableFrameDropper(bool enable)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _mediaOpt.EnableFrameDropper(enable);
+    return VCM_OK;
+}
+
+
+WebRtc_Word32
+VideoCodingModuleImpl::SentFrameCount(VCMFrameCount &frameCount) const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    return _mediaOpt.SentFrameCount(frameCount);
+}
+
+// Initialize receiver, resets codec database etc
+WebRtc_Word32
+VideoCodingModuleImpl::InitializeReceiver()
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    WebRtc_Word32 ret = _receiver.Initialize();
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    ret = _dualReceiver.Initialize();
+    if (ret < 0)
+    {
+        return ret;
+    }
+    _codecDataBase.ResetReceiver();
+    _timing.Reset();
+
+    _decoder = NULL;
+    _decodedFrameCallback.SetUserReceiveCallback(NULL);
+    _receiverInited = true;
+    _frameTypeCallback = NULL;
+    _frameStorageCallback = NULL;
+    _receiveStatsCallback = NULL;
+    _packetRequestCallback = NULL;
+    _keyRequestMode = kKeyOnError;
+    _scheduleKeyRequest = false;
+
+    return VCM_OK;
+}
+
+// Register a receive callback. Will be called whenever there is a new frame
+// ready for rendering.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveCallback(
+    VCMReceiveCallback* receiveCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveStatisticsCallback(
+                                     VCMReceiveStatisticsCallback* receiveStats)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _receiveStatsCallback = receiveStats;
+    return VCM_OK;
+}
+
+// Register an externally defined decoder/render object.
+// Can be a decoder only or a decoder coupled with a renderer.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                               WebRtc_UWord8 payloadType,
+                                               bool internalRenderTiming)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (externalDecoder == NULL)
+    {
+        // Make sure the VCM updates the decoder next time it decodes.
+        _decoder = NULL;
+        return _codecDataBase.DeRegisterExternalDecoder(payloadType);
+    }
+    else
+    {
+        return _codecDataBase.RegisterExternalDecoder(externalDecoder,
+                                                      payloadType,
+                                                      internalRenderTiming);
+    }
+}
+
+// Register a frame type request callback.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterFrameTypeCallback(
+    VCMFrameTypeCallback* frameTypeCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _frameTypeCallback = frameTypeCallback;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterFrameStorageCallback(
+    VCMFrameStorageCallback* frameStorageCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _frameStorageCallback = frameStorageCallback;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterPacketRequestCallback(
+    VCMPacketRequestCallback* callback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _packetRequestCallback = callback;
+    return VCM_OK;
+}
+
+// Decode next frame, blocking.
+// Should be called as often as possible to get the most out of the decoder.
+WebRtc_Word32
+VideoCodingModuleImpl::Decode(WebRtc_UWord16 maxWaitTimeMs)
+{
+    WebRtc_Word64 nextRenderTimeMs;
+    {
+        CriticalSectionScoped cs(_receiveCritSect);
+        if (!_receiverInited)
+        {
+            return VCM_UNINITIALIZED;
+        }
+        if (!_codecDataBase.DecoderRegistered())
+        {
+            return VCM_NO_CODEC_REGISTERED;
+        }
+    }
+
+    const bool dualReceiverEnabledNotReceiving =
+        (_dualReceiver.State() != kReceiving &&
+         _dualReceiver.NackMode() == kNackInfinite);
+
+    VCMEncodedFrame* frame = _receiver.FrameForDecoding(
+                                                  maxWaitTimeMs,
+                                                  nextRenderTimeMs,
+                                                  _codecDataBase.RenderTiming(),
+                                                  &_dualReceiver);
+
+    if (dualReceiverEnabledNotReceiving && _dualReceiver.State() == kReceiving)
+    {
+        // Dual receiver is enabled (kNACK enabled), but was not receiving
+        // before the call to FrameForDecoding(). After the call the state
+        // changed to receiving, and therefore we must copy the primary decoder
+        // state to the dual decoder to make it possible for the dual decoder to
+        // start decoding retransmitted frames and recover.
+        CriticalSectionScoped cs(_receiveCritSect);
+        if (_dualDecoder != NULL)
+        {
+            _codecDataBase.ReleaseDecoder(_dualDecoder);
+        }
+        _dualDecoder = _codecDataBase.CreateDecoderCopy();
+        if (_dualDecoder != NULL)
+        {
+            _dualDecoder->RegisterDecodeCompleteCallback(
+                &_dualDecodedFrameCallback);
+        }
+        else
+        {
+            _dualReceiver.Reset();
+        }
+    }
+
+    if (frame == NULL)
+      return VCM_FRAME_NOT_READY;
+    else
+    {
+        CriticalSectionScoped cs(_receiveCritSect);
+
+        // If this frame was too late, we should adjust the delay accordingly
+        _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
+                                   clock_->MillisecondTimestamp());
+
+#ifdef DEBUG_DECODER_BIT_STREAM
+        if (_bitStreamBeforeDecoder != NULL)
+        {
+            // Write bit stream to file for debugging purposes
+            fwrite(frame->Buffer(), 1, frame->Length(),
+                   _bitStreamBeforeDecoder);
+        }
+#endif
+        if (_frameStorageCallback != NULL)
+        {
+            WebRtc_Word32 ret = frame->Store(*_frameStorageCallback);
+            if (ret < 0)
+            {
+                return ret;
+            }
+        }
+
+        const WebRtc_Word32 ret = Decode(*frame);
+        _receiver.ReleaseFrame(frame);
+        frame = NULL;
+        if (ret != VCM_OK)
+        {
+            return ret;
+        }
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RequestSliceLossIndication(
+    const WebRtc_UWord64 pictureID) const
+{
+    if (_frameTypeCallback != NULL)
+    {
+        const WebRtc_Word32 ret =
+            _frameTypeCallback->SliceLossIndicationRequest(pictureID);
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to request key frame");
+            return ret;
+        }
+    } else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "No frame type request callback registered");
+        return VCM_MISSING_CALLBACK;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RequestKeyFrame()
+{
+    if (_frameTypeCallback != NULL)
+    {
+        const WebRtc_Word32 ret = _frameTypeCallback->RequestKeyFrame();
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to request key frame");
+            return ret;
+        }
+        _scheduleKeyRequest = false;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "No frame type request callback registered");
+        return VCM_MISSING_CALLBACK;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (_dualReceiver.State() != kReceiving ||
+        _dualReceiver.NackMode() != kNackInfinite)
+    {
+        // The dual receiver is currently not receiving or
+        // dual decoder mode is disabled.
+        return VCM_OK;
+    }
+    WebRtc_Word64 dummyRenderTime;
+    WebRtc_Word32 decodeCount = 0;
+    VCMEncodedFrame* dualFrame = _dualReceiver.FrameForDecoding(
+                                                            maxWaitTimeMs,
+                                                            dummyRenderTime);
+    if (dualFrame != NULL && _dualDecoder != NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Decoding frame %u with dual decoder",
+                     dualFrame->TimeStamp());
+        // Decode dualFrame and try to catch up
+        WebRtc_Word32 ret = _dualDecoder->Decode(*dualFrame,
+                                                 clock_->MillisecondTimestamp());
+        if (ret != WEBRTC_VIDEO_CODEC_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to decode frame with dual decoder");
+            _dualReceiver.ReleaseFrame(dualFrame);
+            return VCM_CODEC_ERROR;
+        }
+        if (_receiver.DualDecoderCaughtUp(dualFrame, _dualReceiver))
+        {
+            // Copy the complete decoder state of the dual decoder
+            // to the primary decoder.
+            WEBRTC_TRACE(webrtc::kTraceStream,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Dual decoder caught up");
+            _codecDataBase.CopyDecoder(*_dualDecoder);
+            _codecDataBase.ReleaseDecoder(_dualDecoder);
+            _dualDecoder = NULL;
+        }
+        decodeCount++;
+    }
+    _dualReceiver.ReleaseFrame(dualFrame);
+    return decodeCount;
+}
+
+
+// Must be called from inside the receive side critical section.
+WebRtc_Word32
+VideoCodingModuleImpl::Decode(const VCMEncodedFrame& frame)
+{
+    // Change decoder if payload type has changed
+    const bool renderTimingBefore = _codecDataBase.RenderTiming();
+    _decoder = _codecDataBase.SetDecoder(frame.PayloadType(),
+                                         _decodedFrameCallback);
+    if (renderTimingBefore != _codecDataBase.RenderTiming())
+    {
+        // Make sure we reset the decode time estimate since it will
+        // be zero for codecs without render timing.
+        _timing.ResetDecodeTime();
+    }
+    if (_decoder == NULL)
+    {
+        return VCM_NO_CODEC_REGISTERED;
+    }
+    // Decode a frame
+    WebRtc_Word32 ret = _decoder->Decode(frame, clock_->MillisecondTimestamp());
+
+    // Check for failed decoding, run frame type request callback if needed.
+    if (ret < 0)
+    {
+        if (ret == VCM_ERROR_REQUEST_SLI)
+        {
+            return RequestSliceLossIndication(
+                    _decodedFrameCallback.LastReceivedPictureID() + 1);
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to decode frame %u, requesting key frame",
+                         frame.TimeStamp());
+            ret = RequestKeyFrame();
+        }
+    }
+    else if (ret == VCM_REQUEST_SLI)
+    {
+        ret = RequestSliceLossIndication(
+            _decodedFrameCallback.LastReceivedPictureID() + 1);
+    }
+    if (!frame.Complete() || frame.MissingFrame())
+    {
+        switch (_keyRequestMode)
+        {
+            case kKeyOnKeyLoss:
+            {
+                if (frame.FrameType() == kVideoFrameKey)
+                {
+                    _scheduleKeyRequest = true;
+                    return VCM_OK;
+                }
+                break;
+            }
+            case kKeyOnLoss:
+            {
+                _scheduleKeyRequest = true;
+                return VCM_OK;
+            }
+            default:
+                break;
+        }
+    }
+    return ret;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::DecodeFromStorage(
+    const EncodedVideoData& frameFromStorage)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    WebRtc_Word32 ret = _frameFromFile.ExtractFromStorage(frameFromStorage);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    return Decode(_frameFromFile);
+}
+
+// Reset the decoder state
+WebRtc_Word32
+VideoCodingModuleImpl::ResetDecoder()
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (_decoder != NULL)
+    {
+        _receiver.Initialize();
+        _timing.Reset();
+        _scheduleKeyRequest = false;
+        _decoder->Reset();
+    }
+    if (_dualReceiver.State() != kPassive)
+    {
+        _dualReceiver.Initialize();
+    }
+    if (_dualDecoder != NULL)
+    {
+        _codecDataBase.ReleaseDecoder(_dualDecoder);
+        _dualDecoder = NULL;
+    }
+    return VCM_OK;
+}
+
+// Register possible receive codecs, can be called multiple times
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                                WebRtc_Word32 numberOfCores,
+                                                bool requireKeyFrame)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (receiveCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores,
+                                               requireKeyFrame);
+}
+
+// Get current received codec
+WebRtc_Word32
+VideoCodingModuleImpl::ReceiveCodec(VideoCodec* currentReceiveCodec) const
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (currentReceiveCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.ReceiveCodec(currentReceiveCodec);
+}
+
+// Get current received codec
+VideoCodecType
+VideoCodingModuleImpl::ReceiveCodec() const
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    return _codecDataBase.ReceiveCodec();
+}
+
+// Incoming packet from network parsed and ready for decode, non blocking.
+WebRtc_Word32
+VideoCodingModuleImpl::IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                    WebRtc_UWord32 payloadLength,
+                                    const WebRtcRTPHeader& rtpInfo)
+{
+    const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
+    WebRtc_Word32 ret;
+    if (_dualReceiver.State() != kPassive)
+    {
+        ret = _dualReceiver.InsertPacket(packet,
+                                         rtpInfo.type.Video.width,
+                                         rtpInfo.type.Video.height);
+        if (ret == VCM_FLUSH_INDICATOR) {
+          RequestKeyFrame();
+          ResetDecoder();
+        } else if (ret < 0) {
+          return ret;
+        }
+    }
+    ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
+                                 rtpInfo.type.Video.height);
+    if (ret == VCM_FLUSH_INDICATOR) {
+      RequestKeyFrame();
+      ResetDecoder();
+    } else if (ret < 0) {
+      return ret;
+    }
+    return VCM_OK;
+}
+
+// Minimum playout delay (used for lip-sync). This is the minimum delay required
+// to sync with audio. Not included in  VideoCodingModule::Delay()
+// Defaults to 0 ms.
+WebRtc_Word32
+VideoCodingModuleImpl::SetMinimumPlayoutDelay(WebRtc_UWord32 minPlayoutDelayMs)
+{
+    _timing.SetMinimumTotalDelay(minPlayoutDelayMs);
+    return VCM_OK;
+}
+
+// The estimated delay caused by rendering, defaults to
+// kDefaultRenderDelayMs = 10 ms
+WebRtc_Word32
+VideoCodingModuleImpl::SetRenderDelay(WebRtc_UWord32 timeMS)
+{
+    _timing.SetRenderDelay(timeMS);
+    return VCM_OK;
+}
+
+// Current video delay
+WebRtc_Word32
+VideoCodingModuleImpl::Delay() const
+{
+    return _timing.TargetVideoDelay();
+}
+
+// Nack list
+WebRtc_Word32
+VideoCodingModuleImpl::NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size)
+{
+    VCMNackStatus nackStatus = kNackOk;
+    // Collect sequence numbers from the default receiver
+    // if in normal nack mode. Otherwise collect them from
+    // the dual receiver if the dual receiver is receiving.
+    if (_receiver.NackMode() != kNoNack)
+    {
+        nackStatus = _receiver.NackList(nackList, size);
+    }
+    else if (_dualReceiver.State() != kPassive)
+    {
+        nackStatus = _dualReceiver.NackList(nackList, size);
+    }
+    else
+    {
+        size = 0;
+    }
+
+    switch (nackStatus)
+    {
+    case kNackNeedMoreMemory:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Out of memory");
+            return VCM_MEMORY;
+        }
+    case kNackKeyFrameRequest:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            WEBRTC_TRACE(webrtc::kTraceWarning,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to get NACK list, requesting key frame");
+            return RequestKeyFrame();
+        }
+    default:
+        break;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::ReceivedFrameCount(VCMFrameCount& frameCount) const
+{
+    return _receiver.ReceivedFrameCount(frameCount);
+}
+
+WebRtc_UWord32 VideoCodingModuleImpl::DiscardedPackets() const {
+  return _receiver.DiscardedPackets();
+}
+
+int VideoCodingModuleImpl::SetSenderNackMode(SenderNackMode mode) {
+  CriticalSectionScoped cs(_sendCritSect);
+
+  switch (mode) {
+    case kNackNone:
+      _mediaOpt.EnableProtectionMethod(false, kNack);
+      break;
+    case kNackAll:
+      _mediaOpt.EnableProtectionMethod(true, kNack);
+      break;
+    case kNackSelective:
+      return VCM_NOT_IMPLEMENTED;
+      break;
+  }
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::SetSenderReferenceSelection(bool enable) {
+  return VCM_NOT_IMPLEMENTED;
+}
+
+int VideoCodingModuleImpl::SetSenderFEC(bool enable) {
+  CriticalSectionScoped cs(_sendCritSect);
+  _mediaOpt.EnableProtectionMethod(enable, kFec);
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::SetSenderKeyFramePeriod(int periodMs) {
+  return VCM_NOT_IMPLEMENTED;
+}
+
+int VideoCodingModuleImpl::SetReceiverRobustnessMode(
+    ReceiverRobustness robustnessMode,
+    DecodeErrors errorMode) {
+  CriticalSectionScoped cs(_receiveCritSect);
+  switch (robustnessMode) {
+    case kNone:
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNoNack);
+      if (errorMode == kNoDecodeErrors) {
+        _keyRequestMode = kKeyOnLoss;
+      } else {
+        _keyRequestMode = kKeyOnError;
+      }
+      break;
+    case kHardNack:
+      if (errorMode == kAllowDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNackInfinite);
+      _dualReceiver.SetNackMode(kNoNack);
+      _keyRequestMode = kKeyOnError;  // TODO(hlundin): On long NACK list?
+      break;
+    case kSoftNack:
+      assert(false); // TODO(hlundin): Not completed.
+      return VCM_NOT_IMPLEMENTED;
+      _receiver.SetNackMode(kNackHybrid);
+      _dualReceiver.SetNackMode(kNoNack);
+      _keyRequestMode = kKeyOnError;
+      break;
+    case kDualDecoder:
+      if (errorMode == kNoDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNackInfinite);
+      _keyRequestMode = kKeyOnError;
+      break;
+    case kReferenceSelection:
+      assert(false); // TODO(hlundin): Not completed.
+      return VCM_NOT_IMPLEMENTED;
+      if (errorMode == kNoDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNoNack);
+      break;
+  }
+  return VCM_OK;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/video_coding_impl.h b/trunk/src/modules/video_coding/main/source/video_coding_impl.h
new file mode 100644
index 0000000..490c847
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/video_coding_impl.h
@@ -0,0 +1,311 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+
+#include "video_coding.h"
+#include "critical_section_wrapper.h"
+#include "frame_buffer.h"
+#include "receiver.h"
+#include "timing.h"
+#include "jitter_buffer.h"
+#include "codec_database.h"
+#include "generic_decoder.h"
+#include "generic_encoder.h"
+#include "media_optimization.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+#include <stdio.h>
+
+namespace webrtc
+{
+
+class VCMProcessTimer
+{
+public:
+    VCMProcessTimer(WebRtc_UWord32 periodMs, TickTimeBase* clock)
+        : _clock(clock),
+          _periodMs(periodMs),
+          _latestMs(_clock->MillisecondTimestamp()) {}
+    WebRtc_UWord32 Period() const;
+    WebRtc_UWord32 TimeUntilProcess() const;
+    void Processed();
+
+private:
+    TickTimeBase*         _clock;
+    WebRtc_UWord32        _periodMs;
+    WebRtc_Word64         _latestMs;
+};
+
+enum VCMKeyRequestMode
+{
+    kKeyOnError,    // Normal mode, request key frames on decoder error
+    kKeyOnKeyLoss,  // Request key frames on decoder error and on packet loss
+                    // in key frames.
+    kKeyOnLoss,     // Request key frames on decoder error and on packet loss
+                    // in any frame
+};
+
+class VideoCodingModuleImpl : public VideoCodingModule
+{
+public:
+    VideoCodingModuleImpl(const WebRtc_Word32 id,
+                          TickTimeBase* clock,
+                          bool delete_clock_on_destroy);
+
+    virtual ~VideoCodingModuleImpl();
+
+    WebRtc_Word32 Id() const;
+
+    //  Change the unique identifier of this object
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    // Returns the number of milliseconds until the module want a worker thread
+    // to call Process
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+
+    virtual WebRtc_Word32 Process();
+
+    /*
+    *   Sender
+    */
+
+    // Initialize send codec
+    virtual WebRtc_Word32 InitializeSender();
+
+    // Register the send codec to be used.
+    virtual WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                            WebRtc_UWord32 numberOfCores,
+                                            WebRtc_UWord32 maxPayloadSize);
+
+    // Get current send codec
+    virtual WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const;
+
+    // Get current send codec type
+    virtual VideoCodecType SendCodec() const;
+
+    // Register an external encoder object.
+    virtual WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalSource = false);
+
+    // Get codec config parameters
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer,
+                                                WebRtc_Word32 size);
+
+    // Get encode bitrate
+    virtual int Bitrate(unsigned int* bitrate) const;
+
+    // Get encode frame rate
+    virtual int FrameRate(unsigned int* framerate) const;
+
+    // Set channel parameters
+    virtual WebRtc_Word32 SetChannelParameters(
+        WebRtc_UWord32 availableBandWidth,
+        WebRtc_UWord8 lossRate,
+        WebRtc_UWord32 rtt);
+
+    // Set recieve channel parameters
+    virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt);
+
+    // Register a transport callback which will be called to deliver the
+    // encoded buffers
+    virtual WebRtc_Word32 RegisterTransportCallback(
+        VCMPacketizationCallback* transport);
+
+    // Register a send statistics callback which will be called to deliver
+    // information about the video stream produced by the encoder,
+    // for instance the average frame rate and bit rate.
+    virtual WebRtc_Word32 RegisterSendStatisticsCallback(
+        VCMSendStatisticsCallback* sendStats);
+
+    // Register a video quality settings callback which will be called when
+    // frame rate/dimensions need to be updated for video quality optimization
+    virtual WebRtc_Word32 RegisterVideoQMCallback(
+        VCMQMSettingsCallback* videoQMSettings);
+
+    // Register a video protection callback which will be called to deliver
+    // the requested FEC rate and NACK status (on/off).
+    virtual WebRtc_Word32 RegisterProtectionCallback(
+        VCMProtectionCallback* protection);
+
+    // Enable or disable a video protection method.
+   virtual WebRtc_Word32 SetVideoProtection(VCMVideoProtection videoProtection,
+                                            bool enable);
+
+    // Add one raw video frame to the encoder, blocking.
+    virtual WebRtc_Word32 AddVideoFrame(
+        const VideoFrame& videoFrame,
+        const VideoContentMetrics* _contentMetrics = NULL,
+        const CodecSpecificInfo* codecSpecificInfo = NULL);
+
+    // Next frame encoded should be of the type frameType.
+    virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType,
+                                           WebRtc_UWord8 simulcastIdx);
+
+    //Enable frame dropper
+    virtual WebRtc_Word32 EnableFrameDropper(bool enable);
+
+    // Sent frame counters
+    virtual WebRtc_Word32 SentFrameCount(VCMFrameCount& frameCount) const;
+
+    /*
+    *   Receiver
+    */
+
+    // Initialize receiver, resets codec database etc
+    virtual WebRtc_Word32 InitializeReceiver();
+
+    // Register possible reveive codecs, can be called multiple times
+    virtual WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                               WebRtc_Word32 numberOfCores,
+                                               bool requireKeyFrame = false);
+
+    // Register an externally defined decoder/render object.
+    // Can be a decoder only or a decoder coupled with a renderer.
+    virtual WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalRenderTiming);
+
+    // Register a receive callback. Will be called whenever there are a new
+    // frame ready for rendering.
+    virtual WebRtc_Word32 RegisterReceiveCallback(
+        VCMReceiveCallback* receiveCallback);
+
+    // Register a receive statistics callback which will be called to deliver
+    // information about the video stream received by the receiving side of the
+    // VCM, for instance the average frame rate and bit rate.
+    virtual WebRtc_Word32 RegisterReceiveStatisticsCallback(
+        VCMReceiveStatisticsCallback* receiveStats);
+
+    // Register a frame type request callback.
+    virtual WebRtc_Word32 RegisterFrameTypeCallback(
+        VCMFrameTypeCallback* frameTypeCallback);
+
+    // Register a frame storage callback.
+    virtual WebRtc_Word32 RegisterFrameStorageCallback(
+        VCMFrameStorageCallback* frameStorageCallback);
+
+    // Nack callback
+    virtual WebRtc_Word32 RegisterPacketRequestCallback(
+        VCMPacketRequestCallback* callback);
+
+    // Decode next frame, blocks for a maximum of maxWaitTimeMs milliseconds.
+    // Should be called as often as possible to get the most out of the decoder.
+    virtual WebRtc_Word32 Decode(WebRtc_UWord16 maxWaitTimeMs = 200);
+
+    // Decode next dual frame, blocks for a maximum of maxWaitTimeMs
+    // milliseconds.
+    virtual WebRtc_Word32 DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs = 200);
+
+    // Reset the decoder state
+    virtual WebRtc_Word32 ResetDecoder();
+
+    // Get current received codec
+    virtual WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const;
+
+    // Get current received codec type
+    virtual VideoCodecType ReceiveCodec() const;
+
+    // Incoming packet from network parsed and ready for decode, non blocking.
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                         WebRtc_UWord32 payloadLength,
+                                         const WebRtcRTPHeader& rtpInfo);
+
+    // A part of an encoded frame to be decoded.
+    // Used in conjunction with VCMFrameStorageCallback.
+    virtual WebRtc_Word32 DecodeFromStorage(
+        const EncodedVideoData& frameFromStorage);
+
+    // Minimum playout delay (Used for lip-sync). This is the minimum delay
+    // required to sync with audio. Not included in  VideoCodingModule::Delay()
+    // Defaults to 0 ms.
+    virtual WebRtc_Word32 SetMinimumPlayoutDelay(
+        WebRtc_UWord32 minPlayoutDelayMs);
+
+    // The estimated delay caused by rendering
+    virtual WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 timeMS);
+
+    // Current delay
+    virtual WebRtc_Word32 Delay() const;
+
+    // Received frame counters
+    virtual WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const;
+
+    // Returns the number of packets discarded by the jitter buffer.
+    virtual WebRtc_UWord32 DiscardedPackets() const;
+
+
+    // Robustness APIs
+
+    // Set the sender RTX/NACK mode.
+    virtual int SetSenderNackMode(SenderNackMode mode);
+
+    // Set the sender reference picture selection (RPS) mode.
+    virtual int SetSenderReferenceSelection(bool enable);
+
+    // Set the sender forward error correction (FEC) mode.
+    virtual int SetSenderFEC(bool enable);
+
+    // Set the key frame period, or disable periodic key frames (I-frames).
+    virtual int SetSenderKeyFramePeriod(int periodMs);
+
+    // Set the receiver robustness mode.
+    virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+                                          DecodeErrors errorMode);
+
+protected:
+    WebRtc_Word32 Decode(const webrtc::VCMEncodedFrame& frame);
+    WebRtc_Word32 RequestKeyFrame();
+    WebRtc_Word32 RequestSliceLossIndication(
+        const WebRtc_UWord64 pictureID) const;
+    WebRtc_Word32 NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size);
+
+private:
+    WebRtc_Word32                       _id;
+    TickTimeBase*                       clock_;
+    bool                                delete_clock_on_destroy_;
+    CriticalSectionWrapper*             _receiveCritSect;
+    bool                                _receiverInited;
+    VCMTiming                           _timing;
+    VCMTiming                           _dualTiming;
+    VCMReceiver                         _receiver;
+    VCMReceiver                         _dualReceiver;
+    VCMDecodedFrameCallback             _decodedFrameCallback;
+    VCMDecodedFrameCallback             _dualDecodedFrameCallback;
+    VCMFrameTypeCallback*               _frameTypeCallback;
+    VCMFrameStorageCallback*            _frameStorageCallback;
+    VCMReceiveStatisticsCallback*       _receiveStatsCallback;
+    VCMPacketRequestCallback*           _packetRequestCallback;
+    VCMGenericDecoder*                  _decoder;
+    VCMGenericDecoder*                  _dualDecoder;
+    FILE*                               _bitStreamBeforeDecoder;
+    VCMFrameBuffer                      _frameFromFile;
+    VCMKeyRequestMode                   _keyRequestMode;
+    bool                                _scheduleKeyRequest;
+
+    CriticalSectionWrapper*             _sendCritSect; // Critical section for send side
+    VCMGenericEncoder*                  _encoder;
+    VCMEncodedFrameCallback             _encodedFrameCallback;
+    FrameType                           _nextFrameType[kMaxSimulcastStreams];
+    VCMMediaOptimization                _mediaOpt;
+    VideoCodecType                      _sendCodecType;
+    VCMSendStatisticsCallback*          _sendStatsCallback;
+    FILE*                               _encoderInputFile;
+
+    VCMCodecDataBase                    _codecDataBase;
+    VCMProcessTimer                     _receiveStatsTimer;
+    VCMProcessTimer                     _sendStatsTimer;
+    VCMProcessTimer                     _retransmissionTimer;
+    VCMProcessTimer                     _keyRequestTimer;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
diff --git a/trunk/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc b/trunk/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc
new file mode 100644
index 0000000..0ee9657
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc
@@ -0,0 +1,396 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/interface/mock/mock_vcm_callbacks.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+namespace webrtc {
+
+using ::testing::Return;
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::AllOf;
+using ::testing::Args;
+using ::testing::Field;
+using ::testing::Pointee;
+using ::testing::NiceMock;
+using ::testing::Sequence;
+
+class VCMRobustnessTest : public ::testing::Test {
+ protected:
+  static const size_t kPayloadLen = 10;
+
+  virtual void SetUp() {
+    clock_ = new FakeTickTime(0);
+    ASSERT_TRUE(clock_ != NULL);
+    vcm_ = VideoCodingModule::Create(0, clock_);
+    ASSERT_TRUE(vcm_ != NULL);
+    ASSERT_EQ(0, vcm_->InitializeReceiver());
+    ASSERT_EQ(0, vcm_->RegisterFrameTypeCallback(&frame_type_callback_));
+    ASSERT_EQ(0, vcm_->RegisterPacketRequestCallback(&request_callback_));
+    ASSERT_EQ(VCM_OK, vcm_->Codec(kVideoCodecVP8, &video_codec_));
+    ASSERT_EQ(VCM_OK, vcm_->RegisterReceiveCodec(&video_codec_, 1));
+    ASSERT_EQ(VCM_OK, vcm_->RegisterExternalDecoder(&decoder_,
+                                                    video_codec_.plType,
+                                                    true));
+  }
+
+  virtual void TearDown() {
+    VideoCodingModule::Destroy(vcm_);
+    delete clock_;
+  }
+
+  void InsertPacket(uint32_t timestamp,
+                    uint16_t seq_no,
+                    bool first,
+                    bool marker_bit,
+                    FrameType frame_type) {
+    const uint8_t payload[kPayloadLen] = {0};
+    WebRtcRTPHeader rtp_info;
+    memset(&rtp_info, 0, sizeof(rtp_info));
+    rtp_info.frameType = frame_type;
+    rtp_info.header.timestamp = timestamp;
+    rtp_info.header.sequenceNumber = seq_no;
+    rtp_info.header.markerBit = marker_bit;
+    rtp_info.header.payloadType = video_codec_.plType;
+    rtp_info.type.Video.codec = kRTPVideoVP8;
+    rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+    rtp_info.type.Video.isFirstPacket = first;
+
+    ASSERT_EQ(VCM_OK, vcm_->IncomingPacket(payload, kPayloadLen, rtp_info));
+  }
+
+  VideoCodingModule* vcm_;
+  VideoCodec video_codec_;
+  MockVCMFrameTypeCallback frame_type_callback_;
+  MockPacketRequestCallback request_callback_;
+  NiceMock<MockVideoDecoder> decoder_;
+  NiceMock<MockVideoDecoder> decoderCopy_;
+  FakeTickTime* clock_;
+};
+
+TEST_F(VCMRobustnessTest, TestHardNack) {
+  Sequence s;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 2))
+      .With(Args<0, 1>(ElementsAre(6, 7)))
+      .Times(1);
+  for (int ts = 0; ts <= 6000; ts += 3000) {
+    EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
+                                       Field(&EncodedImage::_length,
+                                             kPayloadLen * 3),
+                                       Field(&EncodedImage::_completeFrame,
+                                             true)),
+                                 false, _, _, _))
+        .Times(1)
+        .InSequence(s);
+  }
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kHardNack,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  clock_->IncrementDebugClock(10);
+
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  clock_->IncrementDebugClock(10);
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  clock_->IncrementDebugClock(10);
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+}
+
+TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
+  EXPECT_CALL(request_callback_, ResendPackets(_, _))
+      .Times(0);
+  EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
+        .Times(1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kHardNack,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  clock_->IncrementDebugClock(10);
+
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+}
+
+TEST_F(VCMRobustnessTest, TestDualDecoder) {
+  Sequence s1, s2;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(1);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(1)
+      .WillOnce(Return(&decoderCopy_));
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(1)
+      .WillOnce(Return(&decoder_));
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+
+  EXPECT_CALL(decoderCopy_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s2);
+  EXPECT_CALL(decoderCopy_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s2);
+
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kDualDecoder,
+      VideoCodingModule::kAllowDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+                                       // Spawn a decoder copy.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Generate NACK list.
+
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  EXPECT_EQ(1, vcm_->DecodeDualFrame(0));  // Dual decode of timestamp 3000.
+  EXPECT_EQ(1, vcm_->DecodeDualFrame(0));  // Dual decode of timestamp 6000.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // No more frames.
+
+  InsertPacket(9000, 9, true, false, kVideoFrameDelta);
+  InsertPacket(9000, 10, false, false, kVideoFrameDelta);
+  InsertPacket(9000, 11, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 9000 complete.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+}
+
+TEST_F(VCMRobustnessTest, TestModeNoneWithErrors) {
+  EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
+  EXPECT_CALL(decoder_, Release()).Times(1);
+  Sequence s1;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(0);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(0);
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(0);
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kNone,
+      VideoCodingModule::kAllowDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(23);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+
+  InsertPacket(9000, 9, true, false, kVideoFrameDelta);
+  InsertPacket(9000, 10, false, false, kVideoFrameDelta);
+  InsertPacket(9000, 11, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 9000 complete.
+}
+
+TEST_F(VCMRobustnessTest, TestModeNoneWithoutErrors) {
+  Sequence s1;
+  EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
+  EXPECT_CALL(decoder_, Release()).Times(1);
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(0);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(0);
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(0);
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
+        .Times(1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kNone,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+                                       // Schedule key frame request.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(500);    // Wait for the key request timer to set.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect key frame request.
+}
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/source/video_coding_test.gypi b/trunk/src/modules/video_coding/main/source/video_coding_test.gypi
new file mode 100644
index 0000000..7ab265c
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/source/video_coding_test.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [{
+      'target_name': 'video_coding_test',
+      'type': 'executable',
+      'dependencies': [
+         '<(webrtc_root)/../testing/gtest.gyp:gtest',
+         '<(webrtc_root)/../test/test.gyp:test_support',
+         '<(webrtc_root)/../test/metrics.gyp:metrics',
+         'webrtc_video_coding',
+         'rtp_rtcp',
+         'webrtc_utility',
+         'video_processing',
+         '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+         '../../../interface',
+         '../../codecs/vp8/main/interface',
+         '../../../../system_wrappers/interface',
+          '../../../../common_video/interface',
+         '../source',
+      ],
+      'sources': [
+        # headers
+        '../test/codec_database_test.h',
+        '../test/generic_codec_test.h',
+        '../test/jitter_estimate_test.h',
+        '../test/media_opt_test.h',
+        '../test/mt_test_common.h',
+        '../test/normal_test.h',
+        '../test/quality_modes_test.h',
+        '../test/receiver_tests.h',
+        '../test/release_test.h',
+        '../test/rtp_player.h',
+        '../test/test_callbacks.h',
+        '../test/test_util.h',
+        '../test/video_source.h',
+
+        # sources
+        '../test/codec_database_test.cc',
+        '../test/decode_from_storage_test.cc',
+        '../test/generic_codec_test.cc',
+        '../test/jitter_buffer_test.cc',
+        '../test/media_opt_test.cc',
+        '../test/mt_test_common.cc',
+        '../test/mt_rx_tx_test.cc',
+        '../test/normal_test.cc',
+        '../test/quality_modes_test.cc',
+        '../test/receiver_timing_tests.cc',
+        '../test/rtp_player.cc',
+        '../test/test_callbacks.cc',
+        '../test/test_util.cc',
+        '../test/tester_main.cc',
+        '../test/video_rtp_play_mt.cc',
+        '../test/video_rtp_play.cc',
+        '../test/video_source.cc',
+      ], # source
+    },
+    {
+      'target_name': 'video_coding_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'webrtc_video_coding',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/../testing/gmock.gyp:gmock',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../../interface',
+        '../../codecs/interface',
+      ],
+      'sources': [
+        '../interface/mock/mock_vcm_callbacks.h',
+        'decoding_state_unittest.cc',
+        'jitter_buffer_unittest.cc',
+        'session_info_unittest.cc',
+        'video_coding_robustness_unittest.cc',
+        'qm_select_unittest.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_coding/main/test/codec_database_test.cc b/trunk/src/modules/video_coding/main/test/codec_database_test.cc
new file mode 100644
index 0000000..f9ea3e5
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/codec_database_test.cc
@@ -0,0 +1,408 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation of codec data base test
+// testing is done via the VCM module, no specific CodecDataBase module functionality.
+
+#include "codec_database_test.h"
+
+#include <assert.h>
+#include <stdio.h>
+
+#include "../../../../engine_configurations.h"
+#include "../source/event.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "test_util.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "vp8.h" // for external codecs test
+
+
+using namespace webrtc;
+
+int CodecDataBaseTest::RunTest(CmdArgs& args)
+{
+    VideoCodingModule* vcm = VideoCodingModule::Create(1);
+    CodecDataBaseTest* cdbt = new CodecDataBaseTest(vcm);
+    cdbt->Perform(args);
+    VideoCodingModule::Destroy(vcm);
+    delete cdbt;
+    return 0;
+
+}
+
+CodecDataBaseTest::CodecDataBaseTest(VideoCodingModule* vcm):
+_vcm(vcm),
+_width(0),
+_height(0),
+_lengthSourceFrame(0),
+_timeStamp(0)
+{
+    //
+}
+CodecDataBaseTest::~CodecDataBaseTest()
+{
+    //
+}
+void
+CodecDataBaseTest::Setup(CmdArgs& args)
+{
+    _inname= args.inputFile;
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _lengthSourceFrame  = 3*_width*_height/2;
+    if (args.outputFile.compare(""))
+        _outname = test::OutputPath() + "CDBtest_decoded.yuv";
+    else
+        _outname = args.outputFile;
+    _outname = args.outputFile;
+    _encodedName = test::OutputPath() + "CDBtest_encoded.vp8";
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    return;
+}
+
+
+
+WebRtc_Word32
+CodecDataBaseTest::Perform(CmdArgs& args)
+{
+#ifndef VIDEOCODEC_VP8
+    assert(false);
+#endif
+    Setup(args);
+    EventWrapper* waitEvent = EventWrapper::Create();
+
+    /**************************/
+    /* General Sanity Checks */
+    /************************/
+    VideoCodec sendCodec, receiveCodec;
+    TEST(VideoCodingModule::NumberOfCodecs() > 0);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    VCMDecodeCompleteCallback *_decodeCallback = new VCMDecodeCompleteCallback(_decodedFile);
+    VCMEncodeCompleteCallback *_encodeCompleteCallback = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    _encodeCompleteCallback->SetFrameDimensions(_width, _height);
+    // registering the callback - encode and decode with the same vcm (could be later changed)
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+    // preparing a frame to be encoded
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+    sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+    sourceFrame.SetHeight(_height);
+    sourceFrame.SetWidth(_width);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    // Encoder registration
+    TEST (VideoCodingModule::NumberOfCodecs() > 0);
+    TEST(VideoCodingModule::Codec(-1, &sendCodec) == VCM_PARAMETER_ERROR);
+    TEST(VideoCodingModule::Codec(VideoCodingModule::NumberOfCodecs() + 1, &sendCodec) == VCM_PARAMETER_ERROR);
+    VideoCodingModule::Codec(1, &sendCodec);
+    sendCodec.plType = 0; // random value
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    printf(" \nNumber of Registered Codecs: %d \n\n", VideoCodingModule::NumberOfCodecs());
+    printf("Registered codec names: ");
+    for (int i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+    {
+        VideoCodingModule::Codec(i, &sendCodec);
+        printf("%s   ", sendCodec.plName);
+    }
+    printf("\n\nVerify that all requested codecs are used\n \n \n");
+
+    // Testing with VP8.
+    VideoCodingModule::Codec(kVideoCodecVP8, &sendCodec);
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _encodeCompleteCallback->SetCodecType(kRTPVideoVP8);
+    _vcm->InitializeReceiver();
+    TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK );
+    _vcm->InitializeSender();
+    TEST (_vcm->AddVideoFrame(sourceFrame) < 0 );
+
+    // Test changing frame size while keeping the same payload type
+    VideoCodingModule::Codec(0, &sendCodec);
+    sendCodec.width = 352;
+    sendCodec.height = 288;
+    VideoCodec currentSendCodec;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->SendCodec(&currentSendCodec);
+    TEST(currentSendCodec.width == sendCodec.width &&
+        currentSendCodec.height == sendCodec.height);
+    sendCodec.width = 352/2;
+    sendCodec.height = 288/2;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->SendCodec(&currentSendCodec);
+    TEST(currentSendCodec.width == sendCodec.width &&
+        currentSendCodec.height == sendCodec.height);
+
+    delete _decodeCallback;
+    _decodeCallback = NULL;
+    delete _encodeCompleteCallback;
+    _encodeCompleteCallback = NULL;
+
+    VCMEncodeCompleteCallback *_encodeCallback = new VCMEncodeCompleteCallback(_encodedFile);
+
+    /*************************/
+    /* External codecs       */
+    /*************************/
+
+
+    _vcm->InitializeReceiver();
+    VP8Decoder* decoder = VP8Decoder::Create();
+    VideoCodec vp8DecSettings;
+    VideoCodingModule::Codec(kVideoCodecVP8, &vp8DecSettings);
+    TEST(_vcm->RegisterExternalDecoder(decoder, vp8DecSettings.plType, false) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&vp8DecSettings, 1, false) == VCM_OK);
+    VP8Encoder* encoder = VP8Encoder::Create();
+    VideoCodec vp8EncSettings;
+    VideoCodingModule::Codec(kVideoCodecVP8, &vp8EncSettings);
+    _vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
+    _encodeCallback->RegisterReceiverVCM(_vcm);
+    _encodeCallback->SetCodecType(kRTPVideoVP8);
+    TEST(_vcm->RegisterExternalEncoder(encoder, vp8EncSettings.plType) == VCM_OK);
+    TEST(_vcm->RegisterSendCodec(&vp8EncSettings, 4, 1440) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    // De-register and try again.
+    TEST(_vcm->RegisterExternalDecoder(NULL, vp8DecSettings.plType, false) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() < 0); // Expect an error since we have de-registered the decoder
+    TEST(_vcm->RegisterExternalEncoder(NULL, vp8DecSettings.plType) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) < 0); // No send codec registered
+
+    delete decoder;
+    decoder = NULL;
+    delete encoder;
+    encoder = NULL;
+
+    /***************************************
+     * Test the "require key frame" setting*
+     ***************************************/
+
+    TEST(_vcm->InitializeSender() == VCM_OK);
+    TEST(_vcm->InitializeReceiver() == VCM_OK);
+    VideoCodingModule::Codec(kVideoCodecVP8, &receiveCodec);
+    receiveCodec.height = _height;
+    receiveCodec.width = _width;
+    TEST(_vcm->RegisterSendCodec(&receiveCodec, 4, 1440) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&receiveCodec, 1, true) == VCM_OK); // Require key frame
+    _vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
+    _encodeCallback->RegisterReceiverVCM(_vcm);
+    _encodeCallback->SetCodecType(kRTPVideoVP8);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->ResetDecoder() == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    // Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting
+    // and because no frame type request callback has been registered.
+    TEST(_vcm->Decode() == VCM_MISSING_CALLBACK);
+    TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    // Make sure we can register another codec with the same
+    // payload type without crash.
+    _vcm->InitializeReceiver();
+    sendCodec.width = _width;
+    sendCodec.height = _height;
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->ResetDecoder() == VCM_OK);
+
+    delete _encodeCallback;
+
+    /*************************/
+    /* Send/Receive Control */
+    /***********************/
+    /*
+    1. check available codecs (N)
+    2. register all corresponding decoders
+    3. encode 300/N frames with each encoder, and hope to properly decode
+    4. encode without a matching decoder - expect an error
+    */
+    rewind(_sourceFile);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    sourceFrame.Free();
+    VCMDecodeCompleteCallback* decodeCallCDT = new VCMDecodeCompleteCallback(_decodedFile);
+    VCMEncodeCompleteCallback* encodeCallCDT = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(decodeCallCDT);
+    _vcm->RegisterTransportCallback(encodeCallCDT);
+    encodeCallCDT->RegisterReceiverVCM(_vcm);
+    if (VideoCodingModule::NumberOfCodecs() > 0)
+    {
+        // Register all available decoders.
+        int i, j;
+        //double psnr;
+        sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+        _vcm->RegisterReceiveCallback(decodeCallCDT);
+        for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+        {
+            VideoCodingModule::Codec(i, &receiveCodec);
+            if (strcmp(receiveCodec.plName, "I420") == 0)
+            {
+                receiveCodec.height = _height;
+                receiveCodec.width = _width;
+            }
+            _vcm->RegisterReceiveCodec(&receiveCodec, 1);
+        }
+        // start encoding - iterating over available encoders
+        _vcm->RegisterTransportCallback(encodeCallCDT);
+        encodeCallCDT->RegisterReceiverVCM(_vcm);
+        encodeCallCDT->Initialize();
+        int frameCnt = 0;
+        for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+        {
+            encodeCallCDT->ResetByteCount();
+            VideoCodingModule::Codec(i, &sendCodec);
+            sendCodec.height = _height;
+            sendCodec.width = _width;
+            sendCodec.startBitrate = 1000;
+            sendCodec.maxBitrate = 8000;
+            encodeCallCDT->SetFrameDimensions(_width, _height);
+            encodeCallCDT->SetCodecType(ConvertCodecType(sendCodec.plName));
+            TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) == VCM_OK);
+
+            // We disable the frame dropper to avoid dropping frames due to
+            // bad rate control. This isn't a codec performance test, and the
+            // I420 codec is expected to produce too many bits.
+            _vcm->EnableFrameDropper(false);
+
+            printf("Encoding with %s \n\n", sendCodec.plName);
+            for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++)// assuming 300 frames, NumberOfCodecs <= 10
+            {
+                frameCnt++;
+                TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+                // building source frame
+                sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+                sourceFrame.SetHeight(_height);
+                sourceFrame.SetWidth(_width);
+                sourceFrame.SetLength(_lengthSourceFrame);
+                _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+                sourceFrame.SetTimeStamp(_timeStamp);
+                // send frame to the encoder
+                TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+                waitEvent->Wait(33); // was 100
+
+                int ret =_vcm->Decode();
+                TEST(ret == 0);
+                if (ret < 0)
+                {
+                    printf("Error #%d in frame number %d \n",ret, frameCnt);
+                }
+                 // verifying matching payload types:
+                _vcm->SendCodec(&sendCodec);
+                _vcm->ReceiveCodec(&receiveCodec);
+                TEST(sendCodec.plType == receiveCodec.plType);
+                if (sendCodec.plType != receiveCodec.plType)
+                {
+                    printf("frame number:%d\n",frameCnt);
+                }
+            } // end for:encode-decode
+           // byte count for codec specific
+
+            printf("Total bytes encoded: %f \n\n",(8.0/1000)*(encodeCallCDT->EncodedBytes()/((int)10/VideoCodingModule::NumberOfCodecs())));
+            // decode what's left in the buffer....
+            _vcm->Decode();
+            _vcm->Decode();
+            // Don't measure PSNR for I420 since it will be perfect.
+            if (sendCodec.codecType != kVideoCodecI420) {
+                webrtc::test::QualityMetricsResult psnr;
+                I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _width,
+                                  _height, &psnr);
+                printf("\n @ %d KBPS:  ", sendCodec.startBitrate);
+                printf("PSNR from encoder-decoder send-receive control test"
+                       "is %f\n\n", psnr.average);
+            }
+        } // end: iterate codecs
+        rewind(_sourceFile);
+        sourceFrame.Free();
+        delete [] tmpBuffer;
+        delete decodeCallCDT;
+        delete encodeCallCDT;
+        // closing and calculating PSNR for prior encoder-decoder test
+        TearDown(); // closing open files
+    } // end of #codecs >1
+
+    delete waitEvent;
+    Print();
+    return 0;
+}
+void
+CodecDataBaseTest::Print()
+{
+    printf("\nVCM Codec DataBase Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+void
+CodecDataBaseTest::TearDown()
+{
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+    fclose(_encodedFile);
+    return;
+}
diff --git a/trunk/src/modules/video_coding/main/test/codec_database_test.h b/trunk/src/modules/video_coding/main/test/codec_database_test.h
new file mode 100644
index 0000000..cc33e05
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/codec_database_test.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
+
+#include "video_coding.h"
+#include "test_util.h"
+
+#include <string.h>
+
+/*
+Test consists of:
+1. Sanity chacks: Send and Receive side (bad input, etc. )
+2. Send-side control (encoder registration etc.)
+3. Decoder-side control - encode with various encoders, and verify correct decoding
+*/
+
+class CodecDataBaseTest
+{
+public:
+    CodecDataBaseTest(webrtc::VideoCodingModule* vcm);
+    ~CodecDataBaseTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32 Perform(CmdArgs& args);
+private:
+    void TearDown();
+    void Setup(CmdArgs& args);
+    void Print();
+    webrtc::VideoCodingModule*       _vcm;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _encodedName;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _encodedFile;
+    WebRtc_UWord16                   _width;
+    WebRtc_UWord16                   _height;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    float                            _frameRate;
+}; // end of codecDBTest class definition
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/decode_from_storage_test.cc b/trunk/src/modules/video_coding/main/test/decode_from_storage_test.cc
new file mode 100644
index 0000000..628d509
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/decode_from_storage_test.cc
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "rtp_player.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+using namespace webrtc;
+
+class FrameStorageCallback : public VCMFrameStorageCallback
+{
+public:
+    FrameStorageCallback(VideoCodingModule* vcm) : _vcm(vcm) {}
+
+    WebRtc_Word32 StoreReceivedFrame(const EncodedVideoData& frameToStore)
+    {
+        _vcm->DecodeFromStorage(frameToStore);
+        return VCM_OK;
+    }
+
+private:
+    VideoCodingModule* _vcm;
+};
+
+int DecodeFromStorageTest(CmdArgs& args)
+{
+    // Make sure this test isn't executed without simulated events.
+#if !defined(EVENT_DEBUG)
+    return -1;
+#endif
+    // BEGIN Settings
+
+    bool protectionEnabled = false;
+    VCMVideoProtection protectionMethod = kProtectionNack;
+    WebRtc_UWord32 rttMS = 100;
+    float lossRate = 0.00f;
+    bool reordering = false;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = -1;
+    std::string rtpFilename = args.inputFile;
+    std::string outFilename = args.outputFile;
+    if (outFilename == "")
+        outFilename = test::OutputPath() + "DecodeFromStorage.yuv";
+
+    FrameReceiveCallback receiveCallback(outFilename.c_str());
+
+    // END Settings
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "decodeFromStorageTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+
+    FakeTickTime clock(0);
+    // TODO(hlundin): This test was not verified after changing to FakeTickTime.
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    VideoCodingModule* vcmPlayback = VideoCodingModule::Create(2, &clock);
+    FrameStorageCallback storageCallback(vcmPlayback);
+    RtpDataCallback dataCallback(vcm);
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    ret = vcmPlayback->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    vcm->RegisterFrameStorageCallback(&storageCallback);
+    vcmPlayback->RegisterReceiveCallback(&receiveCallback);
+    RTPPlayer rtpStream(rtpFilename.c_str(), &dataCallback, &clock);
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            memset(&codec, 0, sizeof(codec));
+            strncpy(codec.plName, payloadType->name.c_str(), payloadType->name.length());
+            codec.plName[payloadType->name.length()] = '\0';
+            codec.plType = payloadType->payloadType;
+            codec.codecType = payloadType->codecType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+            if (vcmPlayback->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    bool nackEnabled = protectionEnabled && (protectionMethod == kProtectionNack ||
+                                            protectionMethod == kProtectionDualDecoder);
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+    rtpStream.SetReordering(reordering);
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protectionMethod, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    ret = 0;
+
+    // RTP stream main loop
+    while ((ret = rtpStream.NextPacket(clock.MillisecondTimestamp())) == 0)
+    {
+        if (clock.MillisecondTimestamp() % 5 == 0)
+        {
+            ret = vcm->Decode();
+            if (ret < 0)
+            {
+                return -1;
+            }
+        }
+        if (vcm->TimeUntilNextProcess() <= 0)
+        {
+            vcm->Process();
+        }
+        if (MAX_RUNTIME_MS > -1 && clock.MillisecondTimestamp() >= MAX_RUNTIME_MS)
+        {
+            break;
+        }
+        clock.IncrementDebugClock(1);
+    }
+
+    switch (ret)
+    {
+    case 1:
+        printf("Success\n");
+        break;
+    case -1:
+        printf("Failed\n");
+        break;
+    case 0:
+        printf("Timeout\n");
+        break;
+    }
+
+    rtpStream.Print();
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    VideoCodingModule::Destroy(vcm);
+    vcm = NULL;
+    VideoCodingModule::Destroy(vcmPlayback);
+    vcmPlayback = NULL;
+    Trace::ReturnTrace();
+
+    return 0;
+}
diff --git a/trunk/src/modules/video_coding/main/test/generic_codec_test.cc b/trunk/src/modules/video_coding/main/test/generic_codec_test.cc
new file mode 100644
index 0000000..56b3c86
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/generic_codec_test.cc
@@ -0,0 +1,587 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "generic_codec_test.h"
+#include <cmath>
+#include <stdio.h>
+#include "../source/event.h"
+#include "rtp_rtcp.h"
+#include "module_common_types.h"
+#include "test_macros.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+using namespace webrtc;
+
+enum { kMaxWaitEncTimeMs = 100 };
+
+int GenericCodecTest::RunTest(CmdArgs& args)
+{
+#if !defined(EVENT_DEBUG)
+    printf("\n\nEnable debug events to run this test!\n\n");
+    return -1;
+#endif
+    FakeTickTime clock(0);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    GenericCodecTest* get = new GenericCodecTest(vcm, &clock);
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "genericCodecTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    get->Perform(args);
+    Trace::ReturnTrace();
+    delete get;
+    VideoCodingModule::Destroy(vcm);
+    return 0;
+}
+
+GenericCodecTest::GenericCodecTest(VideoCodingModule* vcm, FakeTickTime* clock):
+_clock(clock),
+_vcm(vcm),
+_width(0),
+_height(0),
+_frameRate(0),
+_lengthSourceFrame(0),
+_timeStamp(0)
+{
+}
+
+GenericCodecTest::~GenericCodecTest()
+{
+}
+
+void
+GenericCodecTest::Setup(CmdArgs& args)
+{
+    _timeStamp = 0;
+
+    /* Test Sequence parameters */
+
+    _inname= args.inputFile;
+    if (args.outputFile.compare(""))
+        _outname = test::OutputPath() + "GCTest_decoded.yuv";
+    else
+        _outname = args.outputFile;
+    _encodedName = test::OutputPath() + "GCTest_encoded.vp8";
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _lengthSourceFrame  = 3*_width*_height/2;
+
+    /* File settings */
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    return;
+}
+WebRtc_Word32
+GenericCodecTest::Perform(CmdArgs& args)
+{
+    WebRtc_Word32 ret;
+    Setup(args);
+    /*
+    1. sanity checks
+    2. encode/decoder individuality
+    3. API testing
+    4. Target bitrate (within a specific timespan)
+    5. Pipeline Delay
+    */
+
+    /*******************************/
+    /* sanity checks on inputs    */
+    /*****************************/
+    VideoCodec sendCodec, receiveCodec;
+    sendCodec.maxBitrate = 8000;
+    TEST(_vcm->NumberOfCodecs() > 0); // This works since we now initialize the list in the constructor
+    TEST(_vcm->Codec(0, &sendCodec)  == VCM_OK);
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    WebRtc_Word32 NumberOfCodecs = _vcm->NumberOfCodecs();
+    // registration of first codec in the list
+    int i = 0;
+    _vcm->Codec(0, &_sendCodec);
+    TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1440) == VCM_OK);
+    // sanity on encoder registration
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    _vcm->InitializeSender();
+    TEST(_vcm->Codec(kVideoCodecVP8, &sendCodec) == 0);
+    TEST(_vcm->RegisterSendCodec(&sendCodec, -1, 1440) < 0); // bad number of cores
+    sendCodec.maxBitrate = 8000;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->InitializeSender();
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    sendCodec.height = 0;
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0); // bad height
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    sendCodec.startBitrate = -2;
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0); // bad bit rate
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    _vcm->InitializeSender();
+    TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rate when encoder uninitialized
+    // register all availbale decoders -- need to have more for this test
+    for (i=0; i< NumberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &receiveCodec);
+        _vcm->RegisterReceiveCodec(&receiveCodec, 1);
+    }
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+    // building source frame
+    sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+    sourceFrame.SetHeight(_height);
+    sourceFrame.SetWidth(_width);
+    sourceFrame.SetTimeStamp(_timeStamp++);
+    // encode/decode
+    TEST(_vcm->AddVideoFrame(sourceFrame) < 0 ); // encoder uninitialized
+    _vcm->InitializeReceiver();
+    TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rtt when receiver uninitialized
+
+      /**************************************/
+     /* encoder/decoder individuality test */
+    /**************************************/
+    //Register both encoder and decoder, reset decoder - encode, set up decoder, reset encoder - decode.
+    rewind(_sourceFile);
+    sourceFrame.Free();
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    // Register VP8
+    _vcm->Codec(kVideoCodecVP8, &_sendCodec);
+    _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+    _vcm->SendCodec(&sendCodec);
+    sendCodec.startBitrate = 2000;
+
+    // Set target frame rate to half of the incoming frame rate
+    // to test the frame rate control in the VCM
+    sendCodec.maxFramerate = (WebRtc_UWord8)(_frameRate / 2);
+    sendCodec.width = _width;
+    sendCodec.height = _height;
+    TEST(strncmp(_sendCodec.plName, "VP8", 3) == 0); // was VP8
+
+    _decodeCallback = new VCMDecodeCompleteCallback(_decodedFile);
+    _encodeCompleteCallback = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+
+    _vcm->RegisterSendCodec(&sendCodec, 4, 1440);
+    _encodeCompleteCallback->SetCodecType(ConvertCodecType(sendCodec.plName));
+
+    _vcm->InitializeReceiver();
+    _vcm->Process();
+
+    //encoding 1 second of video
+    for (i = 0; i < _frameRate; i++)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+        IncrementDebugClock(_frameRate);
+        _vcm->Process();
+    }
+    sendCodec.maxFramerate = (WebRtc_UWord8)_frameRate;
+    _vcm->InitializeSender();
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK); // same codec for encode and decode
+    ret = 0;
+    i = 0;
+    while ((i < 25) && (ret == 0) )
+    {
+        ret = _vcm->Decode();
+        TEST(ret == VCM_OK);
+        if (ret < 0)
+        {
+            printf("error in frame # %d \n", i);
+        }
+        IncrementDebugClock(_frameRate);
+        i++;
+    }
+    //TEST((ret == 0) && (i = 50));
+    if (ret == 0)
+    {
+        printf("Encoder/Decoder individuality test complete - View output files \n");
+    }
+    // last frame - not decoded
+    _vcm->InitializeReceiver();
+    TEST(_vcm->Decode() < 0); // frame to be encoded exists, decoder uninitialized
+
+
+    // Test key frame request on packet loss mode.
+    // This a frame as a key frame and fooling the receiver
+    // that the last packet was lost. The decoding will succeed,
+    // but the VCM will see a packet loss and request a new key frame.
+    VCMEncComplete_KeyReqTest keyReqTest_EncCompleteCallback(*_vcm);
+    KeyFrameReqTest frameTypeCallback;
+    _vcm->RegisterTransportCallback(&keyReqTest_EncCompleteCallback);
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+    _vcm->RegisterSendCodec(&sendCodec, 4, 1440);
+    _encodeCompleteCallback->SetCodecType(ConvertCodecType(sendCodec.plName));
+    TEST(_vcm->SetVideoProtection(kProtectionKeyOnKeyLoss, true) == VCM_OK);
+    TEST(_vcm->RegisterFrameTypeCallback(&frameTypeCallback) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+    sourceFrame.SetTimeStamp(_timeStamp);
+    // First packet of a subsequent frame required before the jitter buffer
+    // will allow decoding an incomplete frame.
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    printf("API tests complete \n");
+
+     /*******************/
+    /* Bit Rate Tests */
+    /*****************/
+    /* Requirements:
+    * 1. OneSecReq = 15 % above/below target over a time period of 1s (_frameRate number of frames)
+    * 3. FullReq  = 10% for total seq. (for 300 frames/seq. coincides with #1)
+    * 4. Test will go over all registered codecs
+    //NOTE: time requirements are not part of the release tests
+    */
+    double FullReq   =  0.1;
+    //double OneSecReq = 0.15;
+    printf("\n RATE CONTROL TEST\n");
+    // initializing....
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    rewind(_sourceFile);
+    sourceFrame.Free();
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    const float bitRate[] = {100, 400, 600, 1000, 2000};
+    const float nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+    float _bitRate = 0;
+    int _frameCnt = 0;
+    float totalBytesOneSec;//, totalBytesTenSec;
+    float totalBytes, actualBitrate;
+    VCMFrameCount frameCount; // testing frame type counters
+    // start test
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    // going over all available codecs
+    _encodeCompleteCallback->SetFrameDimensions(_width, _height);
+    SendStatsTest sendStats;
+    for (int k = 0; k < NumberOfCodecs; k++)
+    //for (int k = NumberOfCodecs - 1; k >=0; k--)
+    {// static list starts from 0
+        //just checking
+        _vcm->InitializeSender();
+        _sendCodec.maxBitrate = 8000;
+        TEST(_vcm->Codec(k, &_sendCodec)== VCM_OK);
+        _vcm->RegisterSendCodec(&_sendCodec, 1, 1440);
+        _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+        _encodeCompleteCallback->SetCodecType(ConvertCodecType(_sendCodec.plName));
+        printf (" \n\n Codec type = %s \n\n",_sendCodec.plName);
+        for (i = 0; i < nBitrates; i++)
+        {
+             _bitRate = static_cast<float>(bitRate[i]);
+            // just testing
+            _vcm->InitializeSender();
+            _sendCodec.startBitrate = (int)_bitRate;
+            _sendCodec.maxBitrate = 8000;
+            _sendCodec.maxFramerate = _frameRate;
+            _vcm->RegisterSendCodec(&_sendCodec, 1, 1440);
+            _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+            // up to here
+            _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 20);
+            _frameCnt = 0;
+            totalBytes = 0;
+            _encodeCompleteCallback->Initialize();
+            sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+            _vcm->RegisterSendStatisticsCallback(&sendStats);
+            while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+                _lengthSourceFrame)
+            {
+                _frameCnt++;
+                sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+                sourceFrame.SetHeight(_height);
+                sourceFrame.SetWidth(_width);
+                _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+                sourceFrame.SetTimeStamp(_timeStamp);
+
+                ret = _vcm->AddVideoFrame(sourceFrame);
+                IncrementDebugClock(_frameRate);
+                // The following should be uncommneted for timing tests. Release tests only include
+                // compliance with full sequence bit rate.
+
+
+                //totalBytes = WaitForEncodedFrame();
+                //currentTime = VCMTickTime::MillisecondTimestamp();//clock()/(double)CLOCKS_PER_SEC;
+                if (_frameCnt == _frameRate)// @ 1sec
+                {
+                    totalBytesOneSec =  _encodeCompleteCallback->EncodedBytes();//totalBytes;
+                }
+                TEST(_vcm->TimeUntilNextProcess() >= 0);
+            } // video seq. encode done
+            TEST(_vcm->TimeUntilNextProcess() == 0);
+            _vcm->Process(); // Let the module calculate its send bit rate estimate
+            // estimating rates
+            // complete sequence
+            // bit rate assumes input frame rate is as specified
+            totalBytes = _encodeCompleteCallback->EncodedBytes();
+            actualBitrate = (float)(8.0/1000)*(totalBytes / (_frameCnt / _frameRate));
+
+            printf("Complete Seq.: target bitrate: %.0f kbps, actual bitrate: %.1f kbps\n", _bitRate, actualBitrate);
+            TEST((fabs(actualBitrate - _bitRate) < FullReq * _bitRate) ||
+                 (strncmp(_sendCodec.plName, "I420", 4) == 0));
+
+            // 1 Sec.
+            actualBitrate = (float)(8.0/1000)*(totalBytesOneSec);
+            //actualBitrate = (float)(8.0*totalBytesOneSec)/(oneSecTime - startTime);
+            //printf("First 1Sec: target bitrate: %.0f kbps, actual bitrate: %.1f kbps\n", _bitRate, actualBitrate);
+            //TEST(fabs(actualBitrate - _bitRate) < OneSecReq * _bitRate);
+            rewind(_sourceFile);
+
+            //checking key/delta frame count
+            _vcm->SentFrameCount(frameCount);
+            printf("frame count: %d delta, %d key\n", frameCount.numDeltaFrames, frameCount.numKeyFrames);
+        }// end per codec
+
+    } // end rate control test
+    /********************************/
+    /* Encoder Pipeline Delay Test */
+    /******************************/
+    _vcm->InitializeSender();
+    sourceFrame.Free();
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    bool encodeComplete = false;
+    // going over all available codecs
+    for (int k = 0; k < NumberOfCodecs; k++)
+    {
+        _vcm->Codec(k, &_sendCodec);
+        _vcm->InitializeSender();
+        _sendCodec.maxBitrate = 8000;
+        _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+        _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+
+        _frameCnt = 0;
+        encodeComplete = false;
+        while (encodeComplete == false)
+        {
+            TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+            _frameCnt++;
+            sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+            sourceFrame.SetHeight(_height);
+            sourceFrame.SetWidth(_width);
+            _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+            sourceFrame.SetTimeStamp(_timeStamp);
+            _vcm->AddVideoFrame(sourceFrame);
+            encodeComplete = _encodeCompleteCallback->EncodeComplete();
+        } // first frame encoded
+        printf ("\n Codec type = %s \n", _sendCodec.plName);
+        printf(" Encoder pipeline delay = %d frames\n", _frameCnt - 1);
+    } // end for all codecs
+
+    /********************************/
+    /* Encoder Packet Size Test     */
+    /********************************/
+    RtpRtcp& rtpModule = *RtpRtcp::CreateRtpRtcp(1, false);
+    TEST(rtpModule.InitSender() == 0);
+    RTPSendCallback_SizeTest sendCallback;
+    rtpModule.RegisterSendTransport(&sendCallback);
+
+    VCMRTPEncodeCompleteCallback encCompleteCallback(&rtpModule);
+    _vcm->InitializeSender();
+
+    // TEST DISABLED FOR NOW SINCE VP8 DOESN'T HAVE THIS FEATURE
+
+//    sourceFrame.Free();
+//    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+//    NumberOfCodecs = _vcm->NumberOfCodecs();
+//    WebRtc_UWord32 targetPayloadSize = 500;
+//    rtpModule.SetMaxTransferUnit(targetPayloadSize);
+//    // going over all available codecs
+//    for (int k = 0; k < NumberOfCodecs; k++)
+//    {
+//        _vcm->Codec(k, &_sendCodec);
+//        if (strncmp(_sendCodec.plName, "VP8", 3) == 0)
+//        {
+//            // Only test with VP8
+//            continue;
+//        }
+//        rtpModule.RegisterSendPayload(_sendCodec.plName, _sendCodec.plType);
+//        // Make sure we only get one NAL unit per packet
+//        _vcm->InitializeSender();
+//        _vcm->RegisterSendCodec(&_sendCodec, 4, targetPayloadSize);
+//        sendCallback.SetMaxPayloadSize(targetPayloadSize);
+//        _vcm->RegisterTransportCallback(&encCompleteCallback);
+//        sendCallback.Reset();
+//        _frameCnt = 0;
+//        rewind(_sourceFile);
+//        while (!feof(_sourceFile))
+//        {
+//            fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile);
+//            _frameCnt++;
+//            sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+//            sourceFrame.SetHeight(_height);
+//            sourceFrame.SetWidth(_width);
+//            _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+//            sourceFrame.SetTimeStamp(_timeStamp);
+//            ret = _vcm->AddVideoFrame(sourceFrame);
+//        } // first frame encoded
+//        printf ("\n Codec type = %s \n",_sendCodec.plName);
+//        printf(" Average payload size = %f bytes, target = %u bytes\n", sendCallback.AveragePayloadSize(), targetPayloadSize);
+//    } // end for all codecs
+
+
+    // Test temporal decimation settings
+    for (int k = 0; k < NumberOfCodecs; k++)
+    {
+        _vcm->Codec(k, &_sendCodec);
+        if (strncmp(_sendCodec.plName, "I420", 4) == 0)
+        {
+            // Only test with I420
+            break;
+        }
+    }
+    TEST(strncmp(_sendCodec.plName, "I420", 4) == 0);
+    _vcm->InitializeSender();
+    _sendCodec.maxFramerate = static_cast<WebRtc_UWord8>(_frameRate / 2.0 + 0.5f);
+    _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+    _vcm->SetChannelParameters(2000, 0, 0);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    // up to here
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 20);
+    _encodeCompleteCallback->Initialize();
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+    rewind(_sourceFile);
+    while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+        _lengthSourceFrame)
+    {
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        ret = _vcm->AddVideoFrame(sourceFrame);
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        IncrementDebugClock(_frameRate);
+    } // first frame encoded
+
+    RtpRtcp::DestroyRtpRtcp(&rtpModule);
+    Print();
+    delete tmpBuffer;
+    delete _decodeCallback;
+    delete _encodeCompleteCallback;
+    return 0;
+}
+
+
+void
+GenericCodecTest::Print()
+{
+    printf(" \n\n VCM Generic Encoder Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+float
+GenericCodecTest::WaitForEncodedFrame() const
+{
+    WebRtc_Word64 startTime = _clock->MillisecondTimestamp();
+    while (_clock->MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs*10)
+    {
+        if (_encodeCompleteCallback->EncodeComplete())
+        {
+            return _encodeCompleteCallback->EncodedBytes();
+        }
+    }
+    return 0;
+}
+
+void
+GenericCodecTest::IncrementDebugClock(float frameRate)
+{
+    _clock->IncrementDebugClock(1000/frameRate);
+}
+
+int
+RTPSendCallback_SizeTest::SendPacket(int channel, const void *data, int len)
+{
+    _nPackets++;
+    _payloadSizeSum += len;
+    // Make sure no payloads (len - header size) are larger than maxPayloadSize
+    TEST(len > 0 && static_cast<WebRtc_UWord32>(len - 12) <= _maxPayloadSize);
+    return 0;
+}
+
+void
+RTPSendCallback_SizeTest::SetMaxPayloadSize(WebRtc_UWord32 maxPayloadSize)
+{
+    _maxPayloadSize = maxPayloadSize;
+}
+
+void
+RTPSendCallback_SizeTest::Reset()
+{
+    _nPackets = 0;
+    _payloadSizeSum = 0;
+}
+
+float
+RTPSendCallback_SizeTest::AveragePayloadSize() const
+{
+    if (_nPackets > 0)
+    {
+        return _payloadSizeSum / static_cast<float>(_nPackets);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+VCMEncComplete_KeyReqTest::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /*fragmentationHeader*/,
+        const webrtc::RTPVideoHeader* /*videoHdr*/)
+{
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true; // end of frame
+    rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+    rtpInfo.type.Video.codec = kRTPVideoVP8;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo;
+    _seqNo += 2;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = _timeStamp;
+    _timeStamp += 3000;
+    rtpInfo.type.Video.isFirstPacket = false;
+    rtpInfo.frameType = kVideoFrameKey;
+    return _vcm.IncomingPacket(payloadData, payloadSize, rtpInfo);
+}
diff --git a/trunk/src/modules/video_coding/main/test/generic_codec_test.h b/trunk/src/modules/video_coding/main/test/generic_codec_test.h
new file mode 100644
index 0000000..c88280f
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/generic_codec_test.h
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
+
+#include "video_coding.h"
+
+#include <string.h>
+#include <fstream>
+
+#include "test_callbacks.h"
+#include "test_util.h"
+/*
+Test consists of:
+1. Sanity checks
+2. Bit rate validation
+3. Encoder control test / General API functionality
+4. Decoder control test / General API functionality
+
+*/
+
+namespace webrtc {
+
+int VCMGenericCodecTest(CmdArgs& args);
+
+class FakeTickTime;
+
+class GenericCodecTest
+{
+public:
+    GenericCodecTest(webrtc::VideoCodingModule* vcm,
+                     webrtc::FakeTickTime* clock);
+    ~GenericCodecTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32 Perform(CmdArgs& args);
+    float WaitForEncodedFrame() const;
+
+private:
+    void Setup(CmdArgs& args);
+    void Print();
+    WebRtc_Word32 TearDown();
+    void IncrementDebugClock(float frameRate);
+
+    webrtc::FakeTickTime*                _clock;
+    webrtc::VideoCodingModule*           _vcm;
+    webrtc::VideoCodec                   _sendCodec;
+    webrtc::VideoCodec                   _receiveCodec;
+    std::string                          _inname;
+    std::string                          _outname;
+    std::string                          _encodedName;
+    WebRtc_Word32                        _sumEncBytes;
+    FILE*                                _sourceFile;
+    FILE*                                _decodedFile;
+    FILE*                                _encodedFile;
+    WebRtc_UWord16                       _width;
+    WebRtc_UWord16                       _height;
+    float                                _frameRate;
+    WebRtc_UWord32                       _lengthSourceFrame;
+    WebRtc_UWord32                       _timeStamp;
+    VCMDecodeCompleteCallback*           _decodeCallback;
+    VCMEncodeCompleteCallback*           _encodeCompleteCallback;
+
+}; // end of GenericCodecTest class definition
+
+class RTPSendCallback_SizeTest : public webrtc::Transport
+{
+public:
+    // constructor input: (receive side) rtp module to send encoded data to
+    RTPSendCallback_SizeTest() : _maxPayloadSize(0), _payloadSizeSum(0), _nPackets(0) {}
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len) {return 0;}
+    void SetMaxPayloadSize(WebRtc_UWord32 maxPayloadSize);
+    void Reset();
+    float AveragePayloadSize() const;
+private:
+    WebRtc_UWord32         _maxPayloadSize;
+    WebRtc_UWord32         _payloadSizeSum;
+    WebRtc_UWord32         _nPackets;
+};
+
+class VCMEncComplete_KeyReqTest : public webrtc::VCMPacketizationCallback
+{
+public:
+    VCMEncComplete_KeyReqTest(webrtc::VideoCodingModule &vcm) : _vcm(vcm), _seqNo(0), _timeStamp(0) {}
+    WebRtc_Word32 SendData(
+            const webrtc::FrameType frameType,
+            const WebRtc_UWord8 payloadType,
+            WebRtc_UWord32 timeStamp,
+            const WebRtc_UWord8* payloadData,
+            const WebRtc_UWord32 payloadSize,
+            const webrtc::RTPFragmentationHeader& fragmentationHeader,
+            const webrtc::RTPVideoHeader* videoHdr);
+private:
+    webrtc::VideoCodingModule& _vcm;
+    WebRtc_UWord16 _seqNo;
+    WebRtc_UWord32 _timeStamp;
+}; // end of VCMEncodeCompleteCallback
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/jitter_buffer_test.cc b/trunk/src/modules/video_coding/main/test/jitter_buffer_test.cc
new file mode 100644
index 0000000..2066983
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/jitter_buffer_test.cc
@@ -0,0 +1,1936 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdio.h>
+
+#include "common_types.h"
+#include "../source/event.h"
+#include "frame_buffer.h"
+#include "inter_frame_delay.h"
+#include "jitter_buffer.h"
+#include "jitter_estimate_test.h"
+#include "jitter_estimator.h"
+#include "media_opt_util.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "packet.h"
+#include "test_util.h"
+#include "test_macros.h"
+
+// TODO(holmer): Get rid of this to conform with style guide.
+using namespace webrtc;
+
+// TODO (Mikhal/Stefan): Update as gtest and separate to specific tests.
+
+int CheckOutFrame(VCMEncodedFrame* frameOut, unsigned int size, bool startCode)
+{
+    if (frameOut == 0)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord8* outData = frameOut->Buffer();
+
+    unsigned int i = 0;
+
+    if(startCode)
+    {
+        if (outData[0] != 0 || outData[1] != 0 || outData[2] != 0 ||
+            outData[3] != 1)
+        {
+            return -2;
+        }
+        i+= 4;
+    }
+
+    // check the frame data
+    int count = 3;
+
+    // check the frame length
+    if (frameOut->Length() != size)
+    {
+        return -3;
+    }
+
+    for(; i < size; i++)
+    {
+        if (outData[i] == 0 && outData[i + 1] == 0 && outData[i + 2] == 0x80)
+        {
+            i += 2;
+        }
+        else if(startCode && outData[i] == 0 && outData[i + 1] == 0)
+        {
+            if (outData[i] != 0 || outData[i + 1] != 0 ||
+                outData[i + 2] != 0 || outData[i + 3] != 1)
+            {
+                return -3;
+            }
+            i += 3;
+        }
+        else
+        {
+            if (outData[i] != count)
+            {
+                return -4;
+            }
+            count++;
+            if(count == 10)
+            {
+                count = 3;
+            }
+        }
+    }
+    return 0;
+}
+
+
+int JitterBufferTest(CmdArgs& args)
+{
+    // Don't run these tests with debug event.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+    TickTimeBase clock;
+
+    // Start test
+    WebRtc_UWord16 seqNum = 1234;
+    WebRtc_UWord32 timeStamp = 0;
+    int size = 1400;
+    WebRtc_UWord8 data[1500];
+    VCMPacket packet(data, size, seqNum, timeStamp, true);
+
+    VCMJitterBuffer jb(&clock);
+
+    seqNum = 1234;
+    timeStamp = 123*90;
+    FrameType incomingFrameType(kVideoFrameKey);
+    VCMEncodedFrame* frameOut=NULL;
+    WebRtc_Word64 renderTimeMs = 0;
+    packet.timestamp = timeStamp;
+    packet.seqNum = seqNum;
+
+    // build a data vector with 0, 0, 0x80, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0x80, 3....
+    data[0] = 0;
+    data[1] = 0;
+    data[2] = 0x80;
+    int count = 3;
+    for (unsigned int i = 3; i < sizeof(data) - 3; ++i)
+    {
+        data[i] = count;
+        count++;
+        if(count == 10)
+        {
+            data[i+1] = 0;
+            data[i+2] = 0;
+            data[i+3] = 0x80;
+            count = 3;
+            i += 3;
+        }
+    }
+
+    // Test out of range inputs
+    TEST(kSizeError == jb.InsertPacket(0, packet));
+    jb.ReleaseFrame(0);
+
+    // Not started
+    TEST(0 == jb.GetFrame(packet));
+    TEST(-1 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+    TEST(0 == jb.GetFrameForDecoding());
+
+    // Start
+    jb.Start();
+
+    // Get frame to use for this timestamp
+    VCMEncodedFrame* frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // No packets inserted
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+
+
+    //
+    // TEST single packet frame
+    //
+    //  --------
+    // |  1234  |
+    //  --------
+
+    // packet.frameType;
+    // packet.dataPtr;
+    // packet.sizeBytes;
+    // packet.timestamp;
+    // packet.seqNum;
+    // packet.isFirstPacket;
+    // packet.markerBit;
+    //
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 1 packet\n");
+
+    //
+    // TEST dual packet frame
+    //
+    //  -----------------
+    // |  1235  |  1236  |
+    //  -----------------
+    //
+
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 2 packets\n");
+
+
+    //
+    // TEST 100 packets frame Key frame
+    //
+    //  ----------------------------------
+    // |  1237  |  1238  |  .... |  1336  |
+    //  ----------------------------------
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum++;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameKey);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    int loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameKey);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE key frame 100 packets\n");
+
+    //
+    // TEST 100 packets frame Delta frame
+    //
+    //  ----------------------------------
+    // |  1337  |  1238  |  .... |  1436  |
+    //  ----------------------------------
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum++;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets\n");
+
+    //
+    // TEST packet re-ordering reverse order
+    //
+    //  ----------------------------------
+    // |  1437  |  1238  |  .... |  1536  |
+    //  ----------------------------------
+    //            <----------
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum += 100;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 packets
+    loop = 0;
+    do
+    {
+        seqNum--;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum--;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets reverse order\n");
+
+    seqNum+= 100;
+
+    //
+    // TEST frame re-ordering 2 frames 2 packets each
+    //
+    //  -----------------     -----------------
+    // |  1539  |  1540  |   |  1537  |  1538  |
+    //  -----------------     -----------------
+
+    seqNum += 2;
+    timeStamp += 2* 33 * 90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // check that we fail to get frame since seqnum is not continuous
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(frameOut == 0);
+
+    seqNum -= 3;
+    timeStamp -= 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    seqNum += 2;
+    //printf("DONE frame re-ordering 2 frames 2 packets\n");
+
+    // restore
+    packet.dataPtr = data;
+    packet.codec = kVideoCodecUnknown;
+
+    //
+    // TEST duplicate packets
+    //
+    //  -----------------
+    // |  1543  |  1543  |
+    //  -----------------
+    //
+
+   seqNum++;
+    timeStamp += 2*33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kDuplicatePacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE test duplicate packets\n");
+
+    //
+    // TEST H.264 insert start code
+    //
+    //  -----------------
+    // |  1544  |  1545  |
+    //  -----------------
+    // insert start code, both packets
+
+    seqNum++;
+    timeStamp += 33 * 90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.insertStartCode = true;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    // reset
+    packet.insertStartCode = false;
+    //printf("DONE H.264 insert start code test 2 packets\n");
+
+    //
+    // TEST statistics
+    //
+    WebRtc_UWord32 numDeltaFrames = 0;
+    WebRtc_UWord32 numKeyFrames = 0;
+    TEST(jb.GetFrameStatistics(numDeltaFrames, numKeyFrames) == 0);
+
+    TEST(numDeltaFrames == 8);
+    TEST(numKeyFrames == 1);
+
+    WebRtc_UWord32 frameRate;
+    WebRtc_UWord32 bitRate;
+    TEST(jb.GetUpdate(frameRate, bitRate) == 0);
+
+    // these depend on CPU speed works on a T61
+    TEST(frameRate > 30);
+    TEST(bitRate > 10000000);
+
+
+    jb.Flush();
+
+    //
+    // TEST packet loss. Verify missing packets statistics and not decodable
+    // packets statistics.
+    // Insert 10 frames consisting of 4 packets and remove one from all of them.
+    // The last packet is an empty (non-media) packet
+    //
+
+    // Select a start seqNum which triggers a difficult wrap situation
+    // The JB will only output (incomplete)frames if the next one has started
+    // to arrive. Start by inserting one frame (key).
+    seqNum = 0xffff - 4;
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.completeNALU = kNaluStart;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    for (int i = 0; i < 11; ++i) {
+      webrtc::FrameType frametype = kVideoFrameDelta;
+      seqNum++;
+      timeStamp += 33*90;
+      packet.frameType = frametype;
+      packet.isFirstPacket = true;
+      packet.markerBit = false;
+      packet.seqNum = seqNum;
+      packet.timestamp = timeStamp;
+      packet.completeNALU = kNaluStart;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+      // Get packet notification
+      TEST(timeStamp - 33 * 90 == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                      renderTimeMs));
+
+      // Check incoming frame type
+      if (i == 0)
+      {
+          TEST(incomingFrameType == kVideoFrameKey);
+      }
+      else
+      {
+          TEST(incomingFrameType == frametype);
+      }
+
+      // Get the frame
+      frameOut = jb.GetCompleteFrameForDecoding(10);
+
+      // Should not be complete
+      TEST(frameOut == 0);
+
+      seqNum += 2;
+      packet.isFirstPacket = false;
+      packet.markerBit = true;
+      packet.seqNum = seqNum;
+      packet.completeNALU = kNaluEnd;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+
+      // Insert an empty (non-media) packet
+      seqNum++;
+      packet.isFirstPacket = false;
+      packet.markerBit = false;
+      packet.seqNum = seqNum;
+      packet.completeNALU = kNaluEnd;
+      packet.frameType = kFrameEmpty;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+      // Get the frame
+      frameOut = jb.GetFrameForDecoding();
+
+      // One of the packets has been discarded by the jitter buffer.
+      // Last frame can't be extracted yet.
+      if (i < 10)
+      {
+          TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+          // check the frame type
+          if (i == 0)
+          {
+              TEST(frameOut->FrameType() == kVideoFrameKey);
+          }
+         else
+         {
+             TEST(frameOut->FrameType() == frametype);
+         }
+          TEST(frameOut->Complete() == false);
+          TEST(frameOut->MissingFrame() == false);
+      }
+
+      // Release frame (when done with decoding)
+      jb.ReleaseFrame(frameOut);
+    }
+
+    TEST(jb.NumNotDecodablePackets() == 10);
+
+    // Insert 3 old packets and verify that we have 3 discarded packets
+    // Match value to actual latest timestamp decoded
+    timeStamp -= 33 * 90;
+    packet.timestamp = timeStamp - 1000;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    packet.timestamp = timeStamp - 500;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    packet.timestamp = timeStamp - 100;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    TEST(jb.DiscardedPackets() == 3);
+
+    jb.Flush();
+
+    // This statistic shouldn't be reset by a flush.
+    TEST(jb.DiscardedPackets() == 3);
+
+    //printf("DONE Statistics\n");
+
+
+    // Temporarily do this to make the rest of the test work:
+    timeStamp += 33*90;
+    seqNum += 4;
+
+
+    //
+    // TEST delta frame 100 packets with seqNum wrap
+    //
+    //  ---------------------------------------
+    // |  65520  |  65521  | ... |  82  |  83  |
+    //  ---------------------------------------
+    //
+
+    jb.Flush();
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum = 0xfff0;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 packets
+    loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+        // get packet notification
+        TEST(timeStamp == jb.GetNextTimeStamp(2, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        // get the frame
+        frameOut = jb.GetCompleteFrameForDecoding(2);
+
+        // it should not be complete
+        TEST(frameOut == 0);
+
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets with wrap in seqNum\n");
+
+    //
+    // TEST packet re-ordering reverse order with neg seqNum warp
+    //
+    //  ----------------------------------------
+    // |  65447  |  65448  | ... |   9   |  10  |
+    //  ----------------------------------------
+    //              <-------------
+
+    // test flush
+    jb.Flush();
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum = 10;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    loop = 0;
+    do
+    {
+        seqNum--;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+        // get packet notification
+        TEST(timeStamp == jb.GetNextTimeStamp(2, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        // get the frame
+        frameOut = jb.GetCompleteFrameForDecoding(2);
+
+        // it should not be complete
+        TEST(frameOut == 0);
+
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum--;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets reverse order with wrap in seqNum \n");
+
+    // test flush
+    jb.Flush();
+
+    //
+    // TEST packet re-ordering with seqNum wrap
+    //
+    //  -----------------------
+    // |   1   | 65535 |   0   |
+    //  -----------------------
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum = 1;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert last packet
+    seqNum -= 2;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*3, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 3 packets re-ordering with wrap in seqNum \n");
+
+    // test flush
+    jb.Flush();
+
+    //
+    // TEST insert old frame
+    //
+    //   -------      -------
+    //  |   2   |    |   1   |
+    //   -------      -------
+    //  t = 3000     t = 2000
+
+    seqNum = 2;
+    timeStamp = 3000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(3000 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(3000 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum--;
+    timeStamp = 2000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    // Changed behavior, never insert packets into frames older than the
+    // last decoded frame.
+    TEST(frameIn == 0);
+
+    //printf("DONE insert old frame\n");
+
+    jb.Flush();
+
+   //
+    // TEST insert old frame with wrap in timestamp
+    //
+    //   -------      -------
+    //  |   2   |    |   1   |
+    //   -------      -------
+    //  t = 3000     t = 0xffffff00
+
+    seqNum = 2;
+    timeStamp = 3000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(timeStamp == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum--;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    // This timestamp is old
+    TEST(frameIn == 0);
+
+    jb.Flush();
+
+    //
+    // TEST wrap in timeStamp
+    //
+    //  ---------------     ---------------
+    // |   1   |   2   |   |   3   |   4   |
+    //  ---------------     ---------------
+    //  t = 0xffffff00        t = 33*90
+
+    seqNum = 1;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE time stamp wrap 2 frames 2 packets\n");
+
+    jb.Flush();
+
+    //
+    // TEST insert 2 frames with wrap in timeStamp
+    //
+    //   -------          -------
+    //  |   1   |        |   2   |
+    //   -------          -------
+    // t = 0xffffff00    t = 2700
+
+    seqNum = 1;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert first frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Insert next frame
+    seqNum++;
+    timeStamp = 2700;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(0xffffff00 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(0, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    VCMEncodedFrame* frameOut2 = jb.GetCompleteFrameForDecoding(10);
+    TEST(2700 == frameOut2->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut2, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut2->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+    jb.ReleaseFrame(frameOut2);
+
+    //printf("DONE insert 2 frames (1 packet) with wrap in timestamp\n");
+
+    jb.Flush();
+
+    //
+    // TEST insert 2 frames re-ordered with wrap in timeStamp
+    //
+    //   -------          -------
+    //  |   2   |        |   1   |
+    //   -------          -------
+    //  t = 2700        t = 0xffffff00
+
+    seqNum = 2;
+    timeStamp = 2700;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert first frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Insert second frame
+    seqNum--;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(0xffffff00 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(0, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut2 = jb.GetCompleteFrameForDecoding(10);
+    TEST(2700 == frameOut2->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut2, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut2->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+    jb.ReleaseFrame(frameOut2);
+
+    //printf("DONE insert 2 frames (1 packet) re-ordered with wrap in timestamp\n");
+
+    //
+    // TEST delta frame with more than max number of packets
+    //
+
+    jb.Start();
+
+    loop = 0;
+    packet.timestamp += 33*90;
+    bool firstPacket = true;
+    // insert kMaxPacketsInJitterBuffer into frame
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert frame
+        if (firstPacket)
+        {
+            TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+            firstPacket = false;
+        }
+        else
+        {
+            TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        }
+
+        // get packet notification
+        TEST(packet.timestamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        loop++;
+    } while (loop < kMaxPacketsInSession);
+
+    // Max number of packets inserted
+
+    // Insert one more packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert the packet -> frame recycled
+    TEST(kSizeError == jb.InsertPacket(frameIn, packet));
+
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+
+    //printf("DONE fill frame - packets > max number of packets\n");
+
+    //
+    // TEST fill JB with more than max number of frame (50 delta frames +
+    // 51 key frames) with wrap in seqNum
+    //
+    //  --------------------------------------------------------------
+    // | 65485 | 65486 | 65487 | .... | 65535 | 0 | 1 | 2 | .....| 50 |
+    //  --------------------------------------------------------------
+    // |<-----------delta frames------------->|<------key frames----->|
+
+    jb.Flush();
+
+    loop = 0;
+    seqNum = 65485;
+    WebRtc_UWord32 timeStampStart = timeStamp +  33*90;
+    WebRtc_UWord32 timeStampFirstKey = 0;
+    VCMEncodedFrame* ptrLastDeltaFrame = NULL;
+    VCMEncodedFrame* ptrFirstKeyFrame = NULL;
+    // insert MAX_NUMBER_OF_FRAMES frames
+    do
+    {
+        timeStamp += 33*90;
+        seqNum++;
+        packet.isFirstPacket = true;
+        packet.markerBit = true;
+        packet.seqNum = seqNum;
+        packet.timestamp = timeStamp;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        if (loop == 49)  // last delta
+        {
+            ptrLastDeltaFrame = frameIn;
+        }
+        if (loop == 50)  // first key
+        {
+            ptrFirstKeyFrame = frameIn;
+            packet.frameType = kVideoFrameKey;
+            timeStampFirstKey = packet.timestamp;
+        }
+
+        // Insert frame
+        TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+        // Get packet notification, should be first inserted frame
+        TEST(timeStampStart == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                   renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        loop++;
+    } while (loop < kMaxNumberOfFrames);
+
+    // Max number of frames inserted
+
+    // Insert one more frame
+    timeStamp += 33*90;
+    seqNum++;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    // Now, no free frame - frames will be recycled until first key frame
+    frameIn = jb.GetFrame(packet);
+    // ptr to last inserted delta frame should be returned
+    TEST(frameIn != 0 && frameIn && ptrLastDeltaFrame);
+
+    // Insert frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // First inserted key frame should be oldest in buffer
+    TEST(timeStampFirstKey == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                  renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameKey);
+
+    // get the first key frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(ptrFirstKeyFrame == frameOut);
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameKey);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    jb.Flush();
+
+    // printf("DONE fill JB - nr of delta + key frames (w/ wrap in seqNum) >
+    // max nr of frames\n");
+
+    // Testing that 1 empty packet inserted last will not be set for decoding
+    seqNum = 3;
+    // Insert one empty packet per frame, should never return the last timestamp
+    // inserted. Only return empty frames in the presence of subsequent frames.
+    int maxSize = 1000;
+    for (int i = 0; i < maxSize + 10; i++)
+    {
+        timeStamp += 33 * 90;
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+        packet.timestamp = timeStamp;
+        packet.frameType = kFrameEmpty;
+        VCMEncodedFrame* testFrame = jb.GetFrameForDecoding();
+        // timestamp should bever be the last TS inserted
+        if (testFrame != NULL)
+        {
+            TEST(testFrame->TimeStamp() < timeStamp);
+            printf("Not null TS = %d\n",testFrame->TimeStamp());
+        }
+    }
+
+    jb.Flush();
+
+
+    // printf(DONE testing inserting empty packets to the JB)
+
+
+    // H.264 tests
+    // Test incomplete NALU frames
+
+    jb.Flush();
+    jb.SetNackMode(kNoNack, -1, -1);
+    seqNum ++;
+    timeStamp += 33 * 90;
+    int insertedLength = 0;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+
+    frameIn = jb.GetFrame(packet);
+
+     // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum += 2; // Skip one packet
+    packet.seqNum = seqNum;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluIncomplete;
+    packet.markerBit = false;
+
+     // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = false;
+
+    // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = true; // Last packet
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+
+    // The JB will only output (incomplete) frames if a packet belonging to a
+    // subsequent frame was already inserted. Insert one packet of a subsequent
+    // frame. place high timestamp so the JB would always have a next frame
+    // (otherwise, for every inserted frame we need to take care of the next
+    // frame as well).
+    packet.seqNum = 1;
+    packet.timestamp = timeStamp + 33 * 90 * 10;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    frameIn = jb.GetFrame(packet);
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    frameOut = jb.GetFrameForDecoding();
+
+    // We can decode everything from a NALU until a packet has been lost.
+    // Thus we can decode the first packet of the first NALU and the second NALU
+    // which consists of one packet.
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes * 2, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+    // Test reordered start frame + 1 lost
+    seqNum += 2; // Reoreder 1 frame
+    timeStamp += 33*90;
+    insertedLength = 0;
+
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = false;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum--;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum += 3; // One packet drop
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum += 1;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    // This packet should be decoded since it's the beginning of a NAL
+    insertedLength += packet.sizeBytes;
+
+    seqNum += 2;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = true;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    // This packet should not be decoded because it is an incomplete NAL if it
+    // is the last
+
+    frameOut = jb.GetFrameForDecoding();
+    // Only last NALU is complete
+    TEST(CheckOutFrame(frameOut, insertedLength, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+
+    // Test to insert empty packet
+    seqNum += 1;
+    timeStamp += 33 * 90;
+    VCMPacket emptypacket(data, 0, seqNum, timeStamp, true);
+    emptypacket.seqNum = seqNum;
+    emptypacket.timestamp = timeStamp;
+    emptypacket.frameType = kVideoFrameKey;
+    emptypacket.isFirstPacket = true;
+    emptypacket.completeNALU = kNaluComplete;
+    emptypacket.markerBit = true;
+    TEST(frameIn = jb.GetFrame(emptypacket));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, emptypacket));
+    // This packet should not be decoded because it is an incomplete NAL if it
+    // is the last
+    insertedLength += 0;
+
+    // Will be sent to the decoder, as a packet belonging to a subsequent frame
+    // has arrived.
+    frameOut = jb.GetFrameForDecoding();
+
+
+    // Test that a frame can include an empty packet.
+    seqNum += 1;
+    timeStamp += 33 * 90;
+
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = false;
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum += 1;
+    emptypacket.seqNum = seqNum;
+    emptypacket.timestamp = timeStamp;
+    emptypacket.frameType = kVideoFrameKey;
+    emptypacket.isFirstPacket = true;
+    emptypacket.completeNALU = kNaluComplete;
+    emptypacket.markerBit = true;
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    // Only last NALU is complete
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
+
+    jb.ReleaseFrame(frameOut);
+
+    jb.Flush();
+
+    // Test that a we cannot get incomplete frames from the JB if we haven't
+    // received the marker bit, unless we have received a packet from a later
+    // timestamp.
+
+    packet.seqNum += 2;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = false;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetFrameForDecoding();
+    TEST(frameOut == NULL);
+
+    packet.seqNum += 2;
+    packet.timestamp += 33 * 90;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetFrameForDecoding();
+
+    TEST(frameOut != NULL);
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+    jb.Stop();
+
+    printf("DONE !!!\n");
+
+    printf("\nVCM Jitter Buffer Test: \n\n%i tests completed\n",
+           vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    return 0;
+
+}
diff --git a/trunk/src/modules/video_coding/main/test/jitter_estimate_test.cc b/trunk/src/modules/video_coding/main/test/jitter_estimate_test.cc
new file mode 100644
index 0000000..ac74a8a
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/jitter_estimate_test.cc
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <ctime>
+#include "JitterEstimateTest.h"
+
+using namespace webrtc;
+
+JitterEstimateTest::JitterEstimateTest(unsigned int frameRate) :
+_frameRate(frameRate),
+_capacity(2000),
+_rate(500),
+_jitter(5, 0),
+_keyFrameRate(1.0),
+_deltaFrameSize(10000, 1e6),
+_counter(0),
+_lossrate(0.0)
+{
+    // Assign to random value between 0 and max of unsigned int
+    _seed = static_cast<unsigned>(std::time(0));
+    std::srand(_seed);
+    _prevTimestamp = static_cast<unsigned int>((std::rand() + 1.0)/(RAND_MAX + 1.0)*(pow((float) 2, (long) sizeof(unsigned int)*8)-1));
+    _prevWallClock = VCMTickTime::MillisecondTimestamp();
+}
+
+FrameSample
+JitterEstimateTest::GenerateFrameSample()
+{
+    double increment = 1.0/_frameRate;
+    unsigned int frameSize = static_cast<unsigned int>(_deltaFrameSize.RandValue());
+    bool keyFrame = false;
+    bool resent = false;
+    _prevTimestamp += static_cast<unsigned int>(90000*increment + 0.5);
+    double deltaFrameRate = _frameRate - _keyFrameRate;
+    double ratio = deltaFrameRate/static_cast<double>(_keyFrameRate);
+    if (ratio < 1.0)
+    {
+        ratio = 1.0/ratio;
+        if (_counter >= ratio)
+            _counter = 0;
+        else
+        {
+            _counter++;
+            frameSize += static_cast<unsigned int>(3*_deltaFrameSize.GetAverage());
+            keyFrame = true;
+        }
+    }
+    else
+    {
+        if (_counter >= ratio)
+        {
+            frameSize += static_cast<unsigned int>(3*_deltaFrameSize.GetAverage());
+            _counter = 0;
+            keyFrame = true;
+        }
+        else
+            _counter++;
+    }
+    WebRtc_Word64 jitter =  static_cast<WebRtc_Word64>(_jitter.RandValue() + 1.0/_capacity * frameSize + 0.5);
+    _prevWallClock += static_cast<WebRtc_Word64>(1000*increment + 0.5);
+    double rndValue = RandUniform();
+    resent = (rndValue < _lossrate);
+    //printf("rndValue = %f\n", rndValue);
+    return FrameSample(_prevTimestamp, _prevWallClock + jitter, frameSize, keyFrame, resent);
+}
+
+void
+JitterEstimateTest::SetCapacity(unsigned int c)
+{
+    _capacity = c;
+}
+
+void
+JitterEstimateTest::SetRate(unsigned int r)
+{
+    _rate = r;
+}
+
+void
+JitterEstimateTest::SetJitter(double m, double v)
+{
+    _jitter.SetParams(m, v);
+}
+
+void
+JitterEstimateTest::SetFrameSizeStats(double m, double v)
+{
+    _deltaFrameSize.SetParams(m, v);
+}
+
+void
+JitterEstimateTest::SetKeyFrameRate(int rate)
+{
+    _keyFrameRate = rate;
+}
+
+void
+JitterEstimateTest::SetLossRate(double rate)
+{
+    _lossrate = rate;
+}
diff --git a/trunk/src/modules/video_coding/main/test/jitter_estimate_test.h b/trunk/src/modules/video_coding/main/test/jitter_estimate_test.h
new file mode 100644
index 0000000..cd7338a
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/jitter_estimate_test.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
+
+#include "typedefs.h"
+#include "jitter_buffer.h"
+#include "jitter_estimator.h"
+#include <cstdlib>
+#include <cmath>
+
+double const pi = 4*std::atan(1.0);
+
+class GaussDist
+{
+public:
+    GaussDist(double m, double v): _mu(m), _sigma(sqrt(v)) {}
+
+    double RandValue() // returns a single normally distributed number
+    {
+        double r1 = (std::rand() + 1.0)/(RAND_MAX + 1.0); // gives equal distribution in (0, 1]
+        double r2 = (std::rand() + 1.0)/(RAND_MAX + 1.0);
+        return _mu + _sigma * std::sqrt(-2*std::log(r1))*std::cos(2*pi*r2);
+    }
+
+    double GetAverage()
+    {
+        return _mu;
+    }
+
+    double GetVariance()
+    {
+        return _sigma*_sigma;
+    }
+
+    void SetParams(double m, double v)
+    {
+        _mu = m;
+        _sigma = sqrt(v);
+    }
+
+private:
+    double _mu, _sigma;
+};
+
+class JitterEstimateTestWrapper : public webrtc::VCMJitterEstimator
+{
+public:
+    JitterEstimateTestWrapper() : VCMJitterEstimator() {}
+    double GetTheta() { return _theta[0]; }
+    double GetVarNoise() { return _varNoise; }
+};
+
+class FrameSample
+{
+public:
+    FrameSample() {FrameSample(0, 0, 0, false, false);}
+    FrameSample(unsigned int ts, WebRtc_Word64 wallClk, unsigned int fs, bool _keyFrame, bool _resent):
+      timestamp90Khz(ts), wallClockMs(wallClk), frameSize(fs), keyFrame(_keyFrame), resent(_resent) {}
+
+    unsigned int timestamp90Khz;
+    WebRtc_Word64 wallClockMs;
+    unsigned int frameSize;
+    bool keyFrame;
+    bool resent;
+};
+
+class JitterEstimateTest
+{
+public:
+    JitterEstimateTest(unsigned int frameRate);
+    FrameSample GenerateFrameSample();
+    void SetCapacity(unsigned int c);
+    void SetRate(unsigned int r);
+    void SetJitter(double m, double v);
+    void SetFrameSizeStats(double m, double v);
+    void SetKeyFrameRate(int rate);
+    void SetLossRate(double rate);
+
+private:
+    double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+    unsigned int _frameRate;
+    unsigned int _capacity;
+    unsigned int _rate;
+    GaussDist _jitter;
+    //GaussDist _noResend;
+    GaussDist _deltaFrameSize;
+    unsigned int _prevTimestamp;
+    WebRtc_Word64 _prevWallClock;
+    unsigned int _nextDelay;
+    double _keyFrameRate;
+    unsigned int _counter;
+    unsigned int _seed;
+    double _lossrate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/media_opt_test.cc b/trunk/src/modules/video_coding/main/test/media_opt_test.cc
new file mode 100644
index 0000000..f767919
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/media_opt_test.cc
@@ -0,0 +1,560 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation of Media Optimization Test
+// testing is done via the VCM module, no specific Media opt functionality.
+
+#include "media_opt_test.h"
+
+#include <string.h>
+#include <stdio.h>
+#include <time.h>
+#include <vector>
+
+#include "../source/event.h"
+#include "receiver_tests.h" // receive side callbacks
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "test_util.h" // send side callback
+#include "testsupport/metrics/video_metrics.h"
+#include "video_coding.h"
+
+
+using namespace webrtc;
+
+int MediaOptTest::RunTest(int testNum, CmdArgs& args)
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "mediaOptTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    MediaOptTest* mot = new MediaOptTest(vcm, &clock);
+    if (testNum == 0)
+    { // regular
+         mot->Setup(0, args);
+         mot->GeneralSetup();
+         mot->Perform();
+         mot->Print(1);// print to screen
+         mot->TearDown();
+    }
+    if (testNum == 1)
+    {   // release test
+        mot->Setup(0, args);
+        mot->RTTest();
+    }
+    if (testNum == 2)
+    { // release test, running from script
+         mot->Setup(1, args);
+         mot->GeneralSetup();
+         mot->Perform();
+         mot->Print(1);// print to screen
+         mot->TearDown();
+    }
+
+    VideoCodingModule::Destroy(vcm);
+    delete mot;
+    Trace::ReturnTrace();
+    return 0;
+
+}
+
+
+MediaOptTest::MediaOptTest(VideoCodingModule* vcm, TickTimeBase* clock):
+_vcm(vcm),
+_clock(clock),
+_width(0),
+_height(0),
+_lengthSourceFrame(0),
+_timeStamp(0),
+_frameRate(30.0f),
+_nackEnabled(false),
+_fecEnabled(false),
+_rttMS(0),
+_bitRate(300.0f),
+_lossRate(0.0f),
+_renderDelayMs(0),
+_frameCnt(0),
+_sumEncBytes(0),
+_numFramesDropped(0),
+_numberOfCores(4)
+{
+    _rtp = RtpRtcp::CreateRtpRtcp(1, false);
+}
+
+MediaOptTest::~MediaOptTest()
+{
+    RtpRtcp::DestroyRtpRtcp(_rtp);
+}
+void
+MediaOptTest::Setup(int testType, CmdArgs& args)
+{
+    /*TEST USER SETTINGS*/
+    // test parameters
+    _inname = args.inputFile;
+    if (args.outputFile == "")
+        _outname = test::OutputPath() + "MOTest_out.vp8";
+    else
+        _outname = args.outputFile;
+    // actual source after frame dropping
+    _actualSourcename = test::OutputPath() + "MOTestSource.yuv";
+    _codecName = args.codecName;
+    _sendCodecType = args.codecType;
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _bitRate = args.bitRate;
+    _numberOfCores = 4;
+
+    // error resilience
+    _nackEnabled = false;
+    _fecEnabled = true;
+    _nackFecEnabled = false;
+
+    _rttMS = 100;
+    _lossRate = 0.00*255; // no packet loss
+
+    _testType = testType;
+
+    //For multiple runs with script
+    if (_testType == 1)
+    {
+        float rateTest,lossTest;
+        int numRuns;
+        _fpinp = fopen("dat_inp","rb");
+        _fpout = fopen("test_runs/dat_out","ab");
+        _fpout2 = fopen("test_runs/dat_out2","ab");
+        TEST(fscanf(_fpinp,"%f %f %d \n",&rateTest,&lossTest,&numRuns) > 0);
+        _bitRate = rateTest;
+        _lossRate = lossTest;
+        _testNum = 0;
+
+        // for bit rates: 500, 1000, 2000, 3000,4000
+        // for loss rates: 0, 1, 3, 5, 10%
+        _numParRuns = 25;
+
+        _testNum = numRuns + 1;
+        if (rateTest == 0.0) _lossRate = 0.0;
+        else
+        {
+            if (rateTest == 4000)  //final bit rate
+            {
+                if (lossTest == 0.1*255) _lossRate = 0.0;  //start at 1%
+                else
+                    if (lossTest == 0.05*255) _lossRate = 0.1*255;  //final loss rate
+                    else
+                        if (lossTest == 0.0) _lossRate = 0.01*255;
+                        else _lossRate = lossTest + 0.02*255;
+            }
+        }
+
+        if (rateTest == 0.0 || rateTest == 4000) _bitRate = 500; //starting bit rate
+        else
+            if (rateTest == 500) _bitRate = 1000;
+                else _bitRate = rateTest +  1000;
+    }
+   //
+
+    _renderDelayMs = 0;
+    /* test settings end*/
+
+   _lengthSourceFrame  = 3*_width*_height/2;
+    _log.open((test::OutputPath() + "VCM_MediaOptLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    return;
+}
+
+void
+MediaOptTest::GeneralSetup()
+{
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    if ((_actualSourceFile = fopen(_actualSourcename.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _actualSourcename.c_str());
+        exit(1);
+    }
+
+    if (_rtp->InitReceiver() < 0)
+    {
+        exit(1);
+    }
+    if (_rtp->InitSender() < 0)
+    {
+        exit(1);
+    }
+    if (_vcm->InitializeReceiver() < 0)
+    {
+        exit(1);
+    }
+    if (_vcm->InitializeSender())
+    {
+        exit(1);
+    }
+
+    // Registering codecs for the RTP module
+
+    // Register receive and send payload
+    VideoCodec videoCodec;
+    strncpy(videoCodec.plName, "VP8", 32);
+    videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    strncpy(videoCodec.plName, "ULPFEC", 32);
+    videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    strncpy(videoCodec.plName, "RED", 32);
+    videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    if (_nackFecEnabled == 1)
+        _rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+    else
+        _rtp->SetGenericFECStatus(_fecEnabled, VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+
+    // VCM: Registering codecs
+    VideoCodec sendCodec;
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
+    if (numberOfCodecs < 1)
+    {
+        exit(1);
+    }
+
+    if (_vcm->Codec(_sendCodecType, &sendCodec) != 0)
+    {
+        printf("Unknown codec\n");
+        exit(1);
+    }
+    // register codec
+    sendCodec.startBitrate = (int) _bitRate;
+    sendCodec.height = _height;
+    sendCodec.width = _width;
+    sendCodec.maxFramerate = (WebRtc_UWord8)_frameRate;
+    _vcm->RegisterSendCodec(&sendCodec, _numberOfCores, 1440);
+    _vcm->RegisterReceiveCodec(&sendCodec, _numberOfCores); // same settings for encode and decode
+
+    _vcm->SetRenderDelay(_renderDelayMs);
+    _vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    return;
+}
+// The following test shall be conducted under release tests
+
+
+
+WebRtc_Word32
+MediaOptTest::Perform()
+{
+    //Setup();
+    EventWrapper* waitEvent = EventWrapper::Create();
+
+    // callback settings
+    VCMRTPEncodeCompleteCallback* encodeCompleteCallback = new VCMRTPEncodeCompleteCallback(_rtp);
+    _vcm->RegisterTransportCallback(encodeCompleteCallback);
+    encodeCompleteCallback->SetCodecType(ConvertCodecType(_codecName.c_str()));
+    encodeCompleteCallback->SetFrameDimensions(_width, _height);
+    // frame ready to be sent to network
+    RTPSendCompleteCallback* outgoingTransport =
+        new RTPSendCompleteCallback(_rtp, _clock);
+    _rtp->RegisterSendTransport(outgoingTransport);
+    //FrameReceiveCallback
+    VCMDecodeCompleteCallback receiveCallback(_decodedFile);
+    RtpDataCallback dataCallback(_vcm);
+    _rtp->RegisterIncomingDataCallback(&dataCallback);
+
+    VideoProtectionCallback  protectionCallback;
+    protectionCallback.RegisterRtpModule(_rtp);
+    _vcm->RegisterProtectionCallback(&protectionCallback);
+
+    // set error resilience / test parameters:
+    outgoingTransport->SetLossPct(_lossRate);
+    if (_nackFecEnabled == 1)
+        _vcm->SetVideoProtection(kProtectionNackFEC, _nackFecEnabled);
+    else
+    {
+        _vcm->SetVideoProtection(kProtectionNack, _nackEnabled);
+        _vcm->SetVideoProtection(kProtectionFEC, _fecEnabled);
+    }
+
+    // START TEST
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, (WebRtc_UWord8)_lossRate, _rttMS);
+    _vcm->RegisterReceiveCallback(&receiveCallback);
+
+    _frameCnt  = 0;
+    _sumEncBytes = 0.0;
+    _numFramesDropped = 0;
+
+    while (feof(_sourceFile)== 0)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        _frameCnt++;
+
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+        // inform RTP Module of error resilience features
+        //_rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),protectionCallback.FECDeltaRate());
+        //_rtp->SetNACKStatus(protectionCallback.NACKMethod());
+
+        WebRtc_Word32 ret = _vcm->Decode();
+        if (ret < 0 )
+        {
+            TEST(ret == 0);
+            printf ("Decode error in frame # %d",_frameCnt);
+        }
+
+        float encBytes = encodeCompleteCallback->EncodedBytes();
+        if (encBytes == 0)
+        {
+            _numFramesDropped += 1;
+            //printf("frame #%d dropped \n", _frameCnt );
+        }
+        else
+        {
+            // write frame to file
+            fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(), _actualSourceFile);
+        }
+
+        _sumEncBytes += encBytes;
+         //waitEvent->Wait(33);
+    }
+
+    //END TEST
+    delete waitEvent;
+    delete encodeCompleteCallback;
+    delete outgoingTransport;
+    delete tmpBuffer;
+
+return 0;
+
+}
+
+void
+MediaOptTest::RTTest()
+{
+    // will only calculate PSNR - not create output files for all
+    // SET UP
+    // Set bit rates
+    const float bitRateVec[] = {500, 1000, 2000,3000, 4000};
+    //const float bitRateVec[] = {1000};
+    // Set Packet loss values ([0,255])
+    const double lossPctVec[]     = {0.0*255, 0.0*255, 0.01*255, 0.01*255, 0.03*255, 0.03*255, 0.05*255, 0.05*255, 0.1*255, 0.1*255};
+    const bool  nackEnabledVec[] = {false  , false, false, false, false, false, false, false , false, false};
+    const bool  fecEnabledVec[]  = {false  , true,  false, true , false, true , false, true , false, true};
+    // fec and nack are set according to the packet loss values
+
+    const float nBitrates = sizeof(bitRateVec)/sizeof(*bitRateVec);
+    const float nlossPct = sizeof(lossPctVec)/sizeof(*lossPctVec);
+
+    std::vector<const VideoSource*> sources;
+    std::vector<const VideoSource*>::iterator it;
+
+    sources.push_back(new const VideoSource(_inname, _width, _height));
+    int numOfSrc = 1;
+
+    // constant settings (valid for entire run time)
+    _rttMS = 20;
+    _renderDelayMs = 0;
+
+    // same out name for all
+    _outname = test::OutputPath() + "RTMOTest_out.yuv";
+    // actual source after frame dropping
+    _actualSourcename = test::OutputPath() + "RTMOTestSource.yuv";
+
+    _codecName = "VP8";  // for now just this one - later iterate over all codec types
+    _log.open((test::OutputPath() + "/VCM_RTMediaOptLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    _outputRes=fopen((test::OutputPath() + "VCM_MediaOptResults.txt").c_str(),
+                     "ab");
+
+    //char filename[128];
+    /* test settings end*/
+
+    // START TEST
+    // iterate over test sequences
+    printf("\n****START TEST OVER ALL RUNS ****\n");
+    int runCnt = 0;
+    for (it = sources.begin() ; it < sources.end(); it++)
+    {
+
+        // test set up
+        _inname = (*it)->GetFileName();
+        _width  = (*it)->GetWidth();
+        _height = (*it)->GetHeight();
+        _lengthSourceFrame  = 3*_width*_height/2;
+        _frameRate = (*it)->GetFrameRate();
+         //GeneralSetup();
+
+
+        // iterate over all bit rates
+        for (int i = 0; i < nBitrates; i++)
+        {
+           _bitRate = static_cast<float>(bitRateVec[i]);
+            // iterate over all packet loss values
+            for (int j = 0; j < nlossPct; j++)
+            {
+                 _lossRate = static_cast<float>(lossPctVec[j]);
+                 _nackEnabled = static_cast<bool>(nackEnabledVec[j]);
+                 _fecEnabled = static_cast<bool>(fecEnabledVec[j]);
+
+                 runCnt++;
+                 printf("run #%d out of %d \n", runCnt,(int)(nlossPct*nBitrates*numOfSrc));
+
+                //printf("**FOR RUN: **%d %d %d %d \n",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+
+                 /*
+                 int ch = sprintf(filename,"../test_mediaOpt/RTMOTest_%d_%d_%d_%d.yuv",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+                _outname = filename;
+
+                printf("**FOR RUN: **%d %d %d %d \n",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+               */
+                 if (_rtp != NULL)
+                 {
+                     RtpRtcp::DestroyRtpRtcp(_rtp);
+                 }
+                 _rtp = RtpRtcp::CreateRtpRtcp(1, false);
+                 GeneralSetup();
+                 Perform();
+                 Print(1);
+                 TearDown();
+                 RtpRtcp::DestroyRtpRtcp(_rtp);
+                 _rtp = NULL;
+
+                 printf("\n");
+                  //printf("**DONE WITH RUN: **%d %d %f %d \n",_nackEnabled,_fecEnabled,lossPctVec[j],int(_bitRate));
+                 //
+
+            }// end of packet loss loop
+        }// end of bit rate loop
+        delete *it;
+    }// end of video sequence loop
+    // at end of sequence
+    fclose(_outputRes);
+    printf("\nVCM Media Optimization Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+
+void
+MediaOptTest::Print(int mode)
+{
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _frameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    webrtc::test::QualityMetricsResult psnr;
+    I420PSNRFromFiles(_actualSourcename.c_str(), _outname.c_str(), _width,
+                      _height, &psnr);
+
+    (_log) << "VCM: Media Optimization Test Cycle Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file:" << _outname << std::endl;
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    (_log) << "Error Reslience: NACK:" << _nackEnabled << "; FEC: " << _fecEnabled << std::endl;
+    (_log) << "Packet Loss applied= %f " << _lossRate << std::endl;
+    (_log) << _numFramesDropped << " FRames were dropped" << std::endl;
+     ( _log) << "PSNR: " << psnr.average << std::endl;
+    (_log) << std::endl;
+
+    if (_testType == 2)
+    {
+        fprintf(_outputRes,"************\n");
+        fprintf(_outputRes,"\n\n\n");
+        fprintf(_outputRes,"Actual bitrate: %f kbps\n", actualBitRate);
+        fprintf(_outputRes,"Target bitrate: %f kbps\n", _bitRate);
+        fprintf(_outputRes,"NACK: %s  ",(_nackEnabled)?"true":"false");
+        fprintf(_outputRes,"FEC: %s \n ",(_fecEnabled)?"true":"false");
+        fprintf(_outputRes,"Packet loss applied = %f\n", _lossRate);
+        fprintf(_outputRes,"%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        fprintf(_outputRes,"PSNR: %f \n", psnr.average);
+        fprintf(_outputRes,"************\n");
+    }
+
+
+    //
+    if (_testType == 1)
+    {
+        fprintf(_fpout,"************\n");
+        fprintf(_fpout,"\n\n\n");
+        fprintf(_fpout,"Actual bitrate: %f kbps\n", actualBitRate);
+        fprintf(_fpout,"Target bitrate: %f kbps\n", _bitRate);
+        fprintf(_fpout,"NACK: %s  ",(_nackEnabled)?"true":"false");
+        fprintf(_fpout,"FEC: %s \n ",(_fecEnabled)?"true":"false");
+        fprintf(_fpout,"Packet loss applied = %f\n", _lossRate);
+        fprintf(_fpout,"%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        fprintf(_fpout,"PSNR: %f \n", psnr.average);
+        fprintf(_fpout,"************\n");
+
+        int testNum1 = _testNum/(_numParRuns +1) + 1;
+        int testNum2 = _testNum%_numParRuns;
+        if (testNum2 == 0) testNum2 = _numParRuns;
+        fprintf(_fpout2,"%d %d %f %f %f %f \n",testNum1,testNum2,_bitRate,actualBitRate,_lossRate,psnr.average);
+        fclose(_fpinp);
+        _fpinp = fopen("dat_inp","wb");
+        fprintf(_fpinp,"%f %f %d \n",_bitRate,_lossRate,_testNum);
+    }
+    //
+
+
+    if (mode == 1)
+    {
+        // print to screen
+        printf("\n\n\n");
+        printf("Actual bitrate: %f kbps\n", actualBitRate);
+        printf("Target bitrate: %f kbps\n", _bitRate);
+        printf("NACK: %s  ",(_nackEnabled)?"true":"false");
+        printf("FEC: %s \n",(_fecEnabled)?"true":"false");
+        printf("Packet loss applied = %f\n", _lossRate);
+        printf("%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        printf("PSNR: %f \n", psnr.average);
+    }
+    TEST(psnr.average > 10); // low becuase of possible frame dropping (need to verify that OK for all packet loss values/ rates)
+}
+
+void
+MediaOptTest::TearDown()
+{
+    _log.close();
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+    fclose(_actualSourceFile);
+    return;
+}
diff --git a/trunk/src/modules/video_coding/main/test/media_opt_test.h b/trunk/src/modules/video_coding/main/test/media_opt_test.h
new file mode 100644
index 0000000..7d4e226
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/media_opt_test.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// VCM Media Optimization Test
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
+
+
+#include <string>
+
+#include "rtp_rtcp.h"
+#include "test_util.h"
+#include "video_coding.h"
+#include "video_source.h"
+
+// media optimization test
+// This test simulates a complete encode-decode cycle via the RTP module.
+// allows error resilience tests, packet loss tests, etc.
+// Does not test the media optimization deirectly, but via the VCM API only.
+// The test allows two modes:
+// 1 - Standard, basic settings, one run
+// 2 - Release test - iterates over a number of video sequences, bit rates, packet loss values ,etc.
+
+class MediaOptTest
+{
+public:
+    MediaOptTest(webrtc::VideoCodingModule* vcm,
+                 webrtc::TickTimeBase* clock);
+    ~MediaOptTest();
+
+    static int RunTest(int testNum, CmdArgs& args);
+    // perform encode-decode of an entire sequence
+    WebRtc_Word32 Perform();
+    // Set up for a single mode test
+    void Setup(int testType, CmdArgs& args);
+    // General set up - applicable for both modes
+    void GeneralSetup();
+    // Run release testing
+    void RTTest();
+    void TearDown();
+    // mode = 1; will print to screen, otherwise only to log file
+    void Print(int mode);
+
+private:
+
+    webrtc::VideoCodingModule*       _vcm;
+    webrtc::RtpRtcp*                 _rtp;
+    webrtc::TickTimeBase*            _clock;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _actualSourcename;
+    std::fstream                     _log;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _actualSourceFile;
+    FILE*                            _outputRes;
+    WebRtc_UWord16                   _width;
+    WebRtc_UWord16                   _height;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    float                            _frameRate;
+    bool                             _nackEnabled;
+    bool                             _fecEnabled;
+    bool                             _nackFecEnabled;
+    WebRtc_UWord8                    _rttMS;
+    float                            _bitRate;
+    double                           _lossRate;
+    WebRtc_UWord32                   _renderDelayMs;
+    WebRtc_Word32                    _frameCnt;
+    float                            _sumEncBytes;
+    WebRtc_Word32                    _numFramesDropped;
+    std::string                      _codecName;
+    webrtc::VideoCodecType           _sendCodecType;
+    WebRtc_Word32                    _numberOfCores;
+
+    //for release test#2
+    FILE*                            _fpinp;
+    FILE*                            _fpout;
+    FILE*                            _fpout2;
+    int                              _testType;
+    int                              _testNum;
+    int                              _numParRuns;
+
+}; // end of MediaOptTest class definition
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/mt_rx_tx_test.cc b/trunk/src/modules/video_coding/main/test/mt_rx_tx_test.cc
new file mode 100644
index 0000000..d0a39ad
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/mt_rx_tx_test.cc
@@ -0,0 +1,362 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*************************************************
+ *
+ * Testing multi thread - receive and send sides
+ *
+ **************************************************/
+
+#include <string.h>
+
+#include "../source/event.h"
+#include "media_opt_test.h"
+#include "mt_test_common.h"
+#include "receiver_tests.h" // shared RTP state and receive side threads
+#include "rtp_rtcp.h"
+#include "test_macros.h"
+#include "test_util.h" // send side callback
+#include "thread_wrapper.h"
+#include "video_coding.h"
+
+using namespace webrtc;
+
+bool
+MainSenderThread(void* obj)
+{
+    SendSharedState* state = static_cast<SendSharedState*>(obj);
+    EventWrapper& waitEvent = *EventWrapper::Create();
+    // preparing a frame for encoding
+    VideoFrame sourceFrame;
+    WebRtc_Word32 width = state->_args.width;
+    WebRtc_Word32 height = state->_args.height;
+    float frameRate = state->_args.frameRate;
+    WebRtc_Word32 lengthSourceFrame  = 3*width*height/2;
+    sourceFrame.VerifyAndAllocate(lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[lengthSourceFrame];
+
+    if (state->_sourceFile == NULL)
+    {
+        state->_sourceFile = fopen(state->_args.inputFile.c_str(), "rb");
+        if (state->_sourceFile == NULL)
+        {
+            printf ("Error when opening file \n");
+            delete &waitEvent;
+            delete [] tmpBuffer;
+            return false;
+        }
+    }
+    if (feof(state->_sourceFile) == 0)
+    {
+        TEST(fread(tmpBuffer, 1, lengthSourceFrame,state->_sourceFile) > 0 ||
+             feof(state->_sourceFile));
+        state->_frameCnt++;
+        sourceFrame.CopyFrame(lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(height);
+        sourceFrame.SetWidth(width);
+        state->_timestamp += (WebRtc_UWord32)(9e4 / frameRate);
+        sourceFrame.SetTimeStamp(state->_timestamp);
+
+        WebRtc_Word32 ret = state->_vcm.AddVideoFrame(sourceFrame);
+        if (ret < 0)
+        {
+            printf("Add Frame error: %d\n", ret);
+            delete &waitEvent;
+            delete [] tmpBuffer;
+            return false;
+        }
+        waitEvent.Wait(33);
+    }
+
+    delete &waitEvent;
+    delete [] tmpBuffer;
+
+    return true;
+}
+
+bool
+IntSenderThread(void* obj)
+{
+    SendSharedState* state = static_cast<SendSharedState*>(obj);
+    state->_vcm.SetChannelParameters(1000,30,0);
+
+    return true;
+}
+
+
+int MTRxTxTest(CmdArgs& args)
+{
+    /* TEST SETTINGS */
+    std::string   inname = args.inputFile;
+    std::string outname;
+    if (args.outputFile == "")
+        outname = test::OutputPath() + "MTRxTxTest_decoded.yuv";
+    else
+        outname = args.outputFile;
+
+    WebRtc_UWord16  width = args.width;
+    WebRtc_UWord16  height = args.height;
+
+    float         frameRate = args.frameRate;
+    float         bitRate = args.bitRate;
+    WebRtc_Word32   numberOfCores = 1;
+
+    // error resilience/network
+    // Nack support is currently not implemented in this test.
+    bool          nackEnabled = false;
+    bool          fecEnabled = false;
+    WebRtc_UWord8   rttMS = 20;
+    float         lossRate = 0.0*255; // no packet loss
+    WebRtc_UWord32  renderDelayMs = 0;
+    WebRtc_UWord32  minPlayoutDelayMs = 0;
+
+    /* TEST SET-UP */
+
+    // Set up trace
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "MTRxTxTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    FILE* sourceFile;
+    FILE* decodedFile;
+
+    if ((sourceFile = fopen(inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", inname.c_str());
+        return -1;
+    }
+
+    if ((decodedFile = fopen(outname.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", outname.c_str());
+        return -1;
+    }
+
+    //RTP
+    RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(1, false);
+    if (rtp->InitReceiver() < 0)
+    {
+        return -1;
+    }
+    if (rtp->InitSender() < 0)
+    {
+        return -1;
+    }
+    // registering codecs for the RTP module
+    VideoCodec videoCodec;
+    strncpy(videoCodec.plName, "ULPFEC", 32);
+    videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+
+    strncpy(videoCodec.plName, "RED", 32);
+    videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+
+    strncpy(videoCodec.plName, args.codecName.c_str(), 32);
+    videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
+    videoCodec.maxBitrate = 10000;
+    videoCodec.codecType = args.codecType;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+    TEST(rtp->RegisterSendPayload(videoCodec) == 0);
+
+    // inform RTP Module of error resilience features
+    TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0);
+
+    //VCM
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    if (vcm->InitializeReceiver() < 0)
+    {
+        return -1;
+    }
+    if (vcm->InitializeSender())
+    {
+        return -1;
+    }
+    // registering codecs for the VCM module
+    VideoCodec sendCodec;
+    vcm->InitializeSender();
+    WebRtc_Word32 numberOfCodecs = vcm->NumberOfCodecs();
+    if (numberOfCodecs < 1)
+    {
+        return -1;
+    }
+
+    if (vcm->Codec(args.codecType, &sendCodec) != 0)
+    {
+        // desired codec unavailable
+        printf("Codec not registered\n");
+        return -1;
+    }
+    // register codec
+    sendCodec.startBitrate = (int) bitRate;
+    sendCodec.height = height;
+    sendCodec.width = width;
+    sendCodec.maxFramerate = (WebRtc_UWord8)frameRate;
+    vcm->RegisterSendCodec(&sendCodec, numberOfCores, 1440);
+    vcm->RegisterReceiveCodec(&sendCodec, numberOfCores); // same settings for encode and decode
+
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    // Callback Settings
+
+    PacketRequester packetRequester(*rtp);
+    vcm->RegisterPacketRequestCallback(&packetRequester);
+
+    VCMRTPEncodeCompleteCallback* encodeCompleteCallback = new VCMRTPEncodeCompleteCallback(rtp);
+    vcm->RegisterTransportCallback(encodeCompleteCallback);
+    encodeCompleteCallback->SetCodecType(ConvertCodecType(args.codecName.c_str()));
+    encodeCompleteCallback->SetFrameDimensions(width, height);
+    // frame ready to be sent to network
+    RTPSendCompleteCallback* outgoingTransport =
+        new RTPSendCompleteCallback(rtp, &clock, "dump.rtp");
+    rtp->RegisterSendTransport(outgoingTransport);
+    // FrameReceiveCallback
+    VCMDecodeCompleteCallback receiveCallback(decodedFile);
+    RtpDataCallback dataCallback(vcm);
+    rtp->RegisterIncomingDataCallback(&dataCallback);
+    vcm->RegisterReceiveCallback(&receiveCallback);
+
+    VideoProtectionCallback protectionCallback;
+    vcm->RegisterProtectionCallback(&protectionCallback);
+
+    outgoingTransport->SetLossPct(lossRate);
+    // Nack support is currently not implemented in this test
+    assert(nackEnabled == false);
+    vcm->SetVideoProtection(kProtectionNack, nackEnabled);
+    vcm->SetVideoProtection(kProtectionFEC, fecEnabled);
+
+    // inform RTP Module of error resilience features
+    rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),
+                        protectionCallback.FECDeltaRate());
+    rtp->SetNACKStatus(protectionCallback.NACKMethod());
+
+    vcm->SetChannelParameters((WebRtc_UWord32) bitRate,
+                              (WebRtc_UWord8) lossRate, rttMS);
+
+    SharedRTPState mtState(*vcm, *rtp); // receive side
+    SendSharedState mtSendState(*vcm, *rtp, args); // send side
+
+    /*START TEST*/
+
+    // Create and start all threads
+    // send side threads
+    ThreadWrapper* mainSenderThread = ThreadWrapper::CreateThread(MainSenderThread,
+            &mtSendState, kNormalPriority, "MainSenderThread");
+    ThreadWrapper* intSenderThread = ThreadWrapper::CreateThread(IntSenderThread,
+            &mtSendState, kNormalPriority, "IntThread");
+
+    if (mainSenderThread != NULL)
+    {
+        unsigned int tid;
+        mainSenderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start main sender thread\n");
+        return -1;
+    }
+
+    if (intSenderThread != NULL)
+    {
+        unsigned int tid;
+        intSenderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start sender interference thread\n");
+        return -1;
+    }
+
+    // Receive side threads
+    ThreadWrapper* processingThread = ThreadWrapper::CreateThread(ProcessingThread,
+            &mtState, kNormalPriority, "ProcessingThread");
+    ThreadWrapper* decodeThread = ThreadWrapper::CreateThread(DecodeThread,
+            &mtState, kNormalPriority, "DecodeThread");
+
+    if (processingThread != NULL)
+    {
+        unsigned int tid;
+        processingThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start processing thread\n");
+        return -1;
+    }
+
+    if (decodeThread != NULL)
+    {
+        unsigned int tid;
+        decodeThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start decode thread\n");
+        return -1;
+    }
+
+    EventWrapper& waitEvent = *EventWrapper::Create();
+
+    // Decode for 10 seconds and then tear down and exit.
+    waitEvent.Wait(30000);
+
+    // Tear down
+
+    while (!mainSenderThread->Stop())
+    {
+        ;
+    }
+
+    while (!intSenderThread->Stop())
+    {
+        ;
+    }
+
+
+    while (!processingThread->Stop())
+    {
+        ;
+    }
+
+    while (!decodeThread->Stop())
+    {
+        ;
+    }
+
+    printf("\nVCM MT RX/TX Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    delete &waitEvent;
+    delete mainSenderThread;
+    delete intSenderThread;
+    delete processingThread;
+    delete decodeThread;
+    delete encodeCompleteCallback;
+    delete outgoingTransport;
+    VideoCodingModule::Destroy(vcm);
+    RtpRtcp::DestroyRtpRtcp(rtp);
+    rtp = NULL;
+    vcm = NULL;
+    Trace::ReturnTrace();
+    fclose(decodedFile);
+    printf("Multi-Thread test Done: View output file \n");
+    return 0;
+
+}
+
diff --git a/trunk/src/modules/video_coding/main/test/mt_test_common.cc b/trunk/src/modules/video_coding/main/test/mt_test_common.cc
new file mode 100644
index 0000000..e3ebd97
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/mt_test_common.cc
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "mt_test_common.h"
+
+#include <cmath>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_dump.h"
+
+namespace webrtc {
+
+TransportCallback::TransportCallback(webrtc::RtpRtcp* rtp,
+                                     TickTimeBase* clock,
+                                     const char* filename):
+RTPSendCompleteCallback(rtp, clock, filename)
+{
+    //
+}
+
+TransportCallback::~TransportCallback()
+{
+    //
+}
+
+int
+TransportCallback::SendPacket(int channel, const void *data, int len)
+{
+    _sendCount++;
+    _totalSentLength += len;
+
+    if (_rtpDump != NULL)
+    {
+        if (_rtpDump->DumpPacket((const WebRtc_UWord8*)data, len) != 0)
+        {
+            return -1;
+        }
+    }
+
+    bool transmitPacket = true;
+    // Off-line tests, don't drop first Key frame (approx.)
+    if (_sendCount > 20)
+    {
+        transmitPacket = PacketLoss();
+    }
+
+    TickTimeBase clock;
+    int64_t now = clock.MillisecondTimestamp();
+    // Insert outgoing packet into list
+    if (transmitPacket)
+    {
+        RtpPacket* newPacket = new RtpPacket();
+        memcpy(newPacket->data, data, len);
+        newPacket->length = len;
+        // Simulate receive time = network delay + packet jitter
+        // simulated as a Normal distribution random variable with
+        // mean = networkDelay and variance = jitterVar
+        WebRtc_Word32
+        simulatedDelay = (WebRtc_Word32)NormalDist(_networkDelayMs,
+                                                   sqrt(_jitterVar));
+        newPacket->receiveTime = now + simulatedDelay;
+        _rtpPackets.push_back(newPacket);
+    }
+    return 0;
+}
+
+int
+TransportCallback::TransportPackets()
+{
+    // Are we ready to send packets to the receiver?
+    RtpPacket* packet = NULL;
+    TickTimeBase clock;
+    int64_t now = clock.MillisecondTimestamp();
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - now;
+        if (timeToReceive > 0)
+        {
+            // No available packets to send
+            break;
+        }
+
+        _rtpPackets.pop_front();
+        // Send to receive side
+        if (_rtp->IncomingPacket((const WebRtc_UWord8*)packet->data,
+                                     packet->length) < 0)
+        {
+            delete packet;
+            packet = NULL;
+            // Will return an error after the first packet that goes wrong
+            return -1;
+        }
+        delete packet;
+        packet = NULL;
+    }
+    return 0; // OK
+}
+
+
+
+bool VCMProcessingThread(void* obj)
+{
+    SharedRTPState* state = static_cast<SharedRTPState*>(obj);
+    if (state->_vcm.TimeUntilNextProcess() <= 0)
+    {
+        if (state->_vcm.Process() < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+
+bool VCMDecodeThread(void* obj)
+{
+    SharedRTPState* state = static_cast<SharedRTPState*>(obj);
+    state->_vcm.Decode();
+    return true;
+}
+
+bool TransportThread(void *obj)
+{
+    SharedTransportState* state = static_cast<SharedTransportState*>(obj);
+    state->_transport.TransportPackets();
+    return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/test/mt_test_common.h b/trunk/src/modules/video_coding/main/test/mt_test_common.h
new file mode 100644
index 0000000..438f0be
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/mt_test_common.h
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Common multi-thread functionality across video coding module tests
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
+
+#include "rtp_rtcp.h"
+#include "test_callbacks.h"
+#include "test_util.h"
+#include "video_coding.h"
+
+namespace webrtc {
+
+class SendSharedState
+{
+public:
+    SendSharedState(webrtc::VideoCodingModule& vcm, webrtc::RtpRtcp& rtp,
+            CmdArgs args) :
+            _vcm(vcm),
+            _rtp(rtp),
+            _args(args),
+            _sourceFile(NULL),
+            _frameCnt(0),
+            _timestamp(0) {}
+
+    webrtc::VideoCodingModule&  _vcm;
+    webrtc::RtpRtcp&            _rtp;
+    CmdArgs                     _args;
+    FILE*                       _sourceFile;
+    WebRtc_Word32               _frameCnt;
+    WebRtc_Word32               _timestamp;
+};
+
+// MT implementation of the RTPSendCompleteCallback (Transport)
+class TransportCallback:public RTPSendCompleteCallback
+{
+ public:
+    // constructor input: (receive side) rtp module to send encoded data to
+    TransportCallback(webrtc::RtpRtcp* rtp, TickTimeBase* clock,
+                      const char* filename = NULL);
+    virtual ~TransportCallback();
+    // Add packets to list
+    // Incorporate network conditions - delay and packet loss
+    // Actual transmission will occur on a separate thread
+    int SendPacket(int channel, const void *data, int len);
+    // Send to the receiver packets which are ready to be submitted
+    int TransportPackets();
+};
+
+class SharedRTPState
+{
+public:
+    SharedRTPState(webrtc::VideoCodingModule& vcm, webrtc::RtpRtcp& rtp) :
+        _vcm(vcm),
+        _rtp(rtp) {}
+    webrtc::VideoCodingModule&  _vcm;
+    webrtc::RtpRtcp&            _rtp;
+};
+
+
+class SharedTransportState
+{
+public:
+    SharedTransportState(webrtc::RtpRtcp& rtp, TransportCallback& transport):
+        _rtp(rtp),
+        _transport(transport) {}
+    webrtc::RtpRtcp&            _rtp;
+    TransportCallback&          _transport;
+};
+
+bool VCMProcessingThread(void* obj);
+bool VCMDecodeThread(void* obj);
+bool TransportThread(void *obj);
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
diff --git a/trunk/src/modules/video_coding/main/test/normal_test.cc b/trunk/src/modules/video_coding/main/test/normal_test.cc
new file mode 100644
index 0000000..923d649
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/normal_test.cc
@@ -0,0 +1,402 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_test.h"
+
+#include <assert.h>
+#include <iostream>
+#include <sstream>
+#include <time.h>
+
+#include "../source/event.h"
+#include "common_types.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "test_util.h"
+#include "trace.h"
+#include "testsupport/metrics/video_metrics.h"
+
+using namespace webrtc;
+
+int NormalTest::RunTest(CmdArgs& args)
+{
+#if defined(EVENT_DEBUG)
+    printf("SIMULATION TIME\n");
+    FakeTickTime clock(0);
+#else
+    printf("REAL-TIME\n");
+    TickTimeBase clock;
+#endif
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "VCMNormalTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    NormalTest VCMNTest(vcm, &clock);
+    VCMNTest.Perform(args);
+    VideoCodingModule::Destroy(vcm);
+    Trace::ReturnTrace();
+    return 0;
+}
+
+////////////////
+// Callback Implementation
+//////////////
+
+VCMNTEncodeCompleteCallback::VCMNTEncodeCompleteCallback(FILE* encodedFile,
+                                                         NormalTest& test):
+    _encodedFile(encodedFile),
+    _encodedBytes(0),
+    _skipCnt(0),
+    _VCMReceiver(NULL),
+    _seqNo(0),
+    _test(test)
+{
+    //
+}
+VCMNTEncodeCompleteCallback::~VCMNTEncodeCompleteCallback()
+{
+}
+
+void
+VCMNTEncodeCompleteCallback::RegisterTransportCallback(VCMPacketizationCallback* transport)
+{
+}
+
+WebRtc_Word32
+VCMNTEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /*fragmentationHeader*/,
+        const webrtc::RTPVideoHeader* videoHdr)
+
+{
+    // will call the VCMReceiver input packet
+    _frameType = frameType;
+    // writing encodedData into file
+    fwrite(payloadData, 1, payloadSize, _encodedFile);
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true;
+    rtpInfo.type.Video.width = 0;
+    rtpInfo.type.Video.height = 0;
+    switch (_test.VideoType())
+    {
+    case kVideoCodecVP8:
+        rtpInfo.type.Video.codec = kRTPVideoVP8;
+        rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+        rtpInfo.type.Video.codecHeader.VP8.nonReference =
+            videoHdr->codecHeader.VP8.nonReference;
+        rtpInfo.type.Video.codecHeader.VP8.pictureId =
+            videoHdr->codecHeader.VP8.pictureId;
+        break;
+    case kVideoCodecI420:
+        rtpInfo.type.Video.codec = kRTPVideoI420;
+        break;
+    default:
+        assert(false);
+        return -1;
+    }
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = timeStamp;
+    rtpInfo.frameType = frameType;
+    rtpInfo.type.Video.isFirstPacket = true;
+    // Size should also be received from that table, since the payload type
+    // defines the size.
+
+    _encodedBytes += payloadSize;
+    if (payloadSize < 20)
+    {
+        _skipCnt++;
+    }
+    _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+    return 0;
+}
+void
+VCMNTEncodeCompleteCallback::RegisterReceiverVCM(VideoCodingModule *vcm)
+{
+    _VCMReceiver = vcm;
+    return;
+}
+ WebRtc_Word32
+VCMNTEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+WebRtc_UWord32
+VCMNTEncodeCompleteCallback::SkipCnt()
+{
+    return _skipCnt;
+}
+
+// Decoded Frame Callback Implementation
+VCMNTDecodeCompleCallback::~VCMNTDecodeCompleCallback()
+{
+  if (_decodedFile)
+    fclose(_decodedFile);
+}
+ WebRtc_Word32
+VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame)
+{
+    if (videoFrame.Width() != _currentWidth ||
+        videoFrame.Height() != _currentHeight)
+    {
+        _currentWidth = videoFrame.Width();
+        _currentHeight = videoFrame.Height();
+        if (_decodedFile != NULL)
+        {
+            fclose(_decodedFile);
+            _decodedFile = NULL;
+        }
+        _decodedFile = fopen(_outname.c_str(), "wb");
+    }
+    fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile);
+    _decodedBytes+= videoFrame.Length();
+    return VCM_OK;
+}
+
+ WebRtc_Word32
+VCMNTDecodeCompleCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+ //VCM Normal Test Class implementation
+
+NormalTest::NormalTest(VideoCodingModule* vcm, TickTimeBase* clock)
+:
+_clock(clock),
+_vcm(vcm),
+_sumEncBytes(0),
+_timeStamp(0),
+_totalEncodeTime(0),
+_totalDecodeTime(0),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_totalEncodePipeTime(0),
+_totalDecodePipeTime(0),
+_frameCnt(0),
+_encFrameCnt(0),
+_decFrameCnt(0)
+{
+    //
+}
+
+NormalTest::~NormalTest()
+{
+    //
+}
+void
+NormalTest::Setup(CmdArgs& args)
+{
+    _inname = args.inputFile;
+    _encodedName = test::OutputPath() + "encoded_normaltest.yuv";
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _bitRate = args.bitRate;
+    if (args.outputFile == "")
+    {
+        std::ostringstream filename;
+        filename << test::OutputPath() << "NormalTest_" <<
+            _width << "x" << _height << "_" << _frameRate << "Hz_P420.yuv";
+        _outname = filename.str();
+    }
+    else
+    {
+        _outname = args.outputFile;
+    }
+    _lengthSourceFrame  = 3*_width*_height/2;
+    _videoType = args.codecType;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    _log.open((test::OutputPath() + "TestLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    return;
+}
+
+WebRtc_Word32
+NormalTest::Perform(CmdArgs& args)
+{
+    Setup(args);
+    EventWrapper* waitEvent = EventWrapper::Create();
+    VideoCodec _sendCodec;//, _receiveCodec; // tmp - sendCodecd used as receive codec
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    TEST(VideoCodingModule::Codec(_videoType, &_sendCodec) == VCM_OK);
+    _sendCodec.startBitrate = (int)_bitRate; // should be later on changed via the API
+    _sendCodec.width = static_cast<WebRtc_UWord16>(_width);
+    _sendCodec.height = static_cast<WebRtc_UWord16>(_height);
+    _sendCodec.maxFramerate = _frameRate;
+    TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1400) == VCM_OK);// will also set and init the desired codec
+    // register a decoder (same codec for decoder and encoder )
+    TEST(_vcm->RegisterReceiveCodec(&_sendCodec, 1) == VCM_OK);
+    /* Callback Settings */
+    VCMNTDecodeCompleCallback _decodeCallback(_outname);
+    _vcm->RegisterReceiveCallback(&_decodeCallback);
+    VCMNTEncodeCompleteCallback _encodeCompleteCallback(_encodedFile, *this);
+    _vcm->RegisterTransportCallback(&_encodeCompleteCallback);
+    // encode and decode with the same vcm
+    _encodeCompleteCallback.RegisterReceiverVCM(_vcm);
+    ///////////////////////
+    /// Start Test
+    ///////////////////////
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    double startTime = clock()/(double)CLOCKS_PER_SEC;
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
+
+    SendStatsTest sendStats;
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+
+    while (feof(_sourceFile) == 0)
+    {
+#if !defined(EVENT_DEBUG)
+        WebRtc_Word64 processStartTime = _clock->MillisecondTimestamp();
+#endif
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0 ||
+             feof(_sourceFile));
+        _frameCnt++;
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_sendCodec.maxFramerate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
+        WebRtc_Word32 ret = _vcm->AddVideoFrame(sourceFrame);
+        double encodeTime = clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
+        _totalEncodeTime += encodeTime;
+        if (ret < 0)
+        {
+            printf("Error in AddFrame: %d\n", ret);
+            //exit(1);
+        }
+        _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
+        ret = _vcm->Decode();
+        _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
+        if (ret < 0)
+        {
+            printf("Error in Decode: %d\n", ret);
+            //exit(1);
+        }
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        WebRtc_UWord32 framePeriod = static_cast<WebRtc_UWord32>(1000.0f/static_cast<float>(_sendCodec.maxFramerate) + 0.5f);
+#if defined(EVENT_DEBUG)
+        static_cast<FakeTickTime*>(_clock)->IncrementDebugClock(framePeriod);
+#else
+        WebRtc_Word64 timeSpent = _clock->MillisecondTimestamp() - processStartTime;
+        if (timeSpent < framePeriod)
+        {
+            waitEvent->Wait(framePeriod - timeSpent);
+        }
+#endif
+    }
+    double endTime = clock()/(double)CLOCKS_PER_SEC;
+    _testTotalTime = endTime - startTime;
+    _sumEncBytes = _encodeCompleteCallback.EncodedBytes();
+
+    delete [] tmpBuffer;
+    delete waitEvent;
+    Teardown();
+    Print();
+    return 0;
+}
+
+void
+NormalTest::FrameEncoded(WebRtc_UWord32 timeStamp)
+{
+    _encodeCompleteTime = clock()/(double)CLOCKS_PER_SEC;
+    _encFrameCnt++;
+    _totalEncodePipeTime += _encodeCompleteTime - _encodeTimes[int(timeStamp)];
+
+}
+
+void
+NormalTest::FrameDecoded(WebRtc_UWord32 timeStamp)
+{
+    _decodeCompleteTime = clock()/(double)CLOCKS_PER_SEC;
+    _decFrameCnt++;
+    _totalDecodePipeTime += _decodeCompleteTime - _decodeTimes[timeStamp];
+}
+
+void
+NormalTest::Print()
+{
+    std::cout << "Normal Test Completed!" << std::endl;
+    (_log) << "Normal Test Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file: " << _outname << std::endl;
+    (_log) << "Total run time: " << _testTotalTime << std::endl;
+    printf("Total run time: %f s \n", _testTotalTime);
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _frameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    double avgEncTime = _totalEncodeTime / _frameCnt;
+    double avgDecTime = _totalDecodeTime / _frameCnt;
+    webrtc::test::QualityMetricsResult psnr, ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _width, _height,
+                      &psnr);
+    I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _width, _height,
+                      &ssim);
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Target bitrate: %f kbps\n", _bitRate);
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    printf("Average encode time: %f s\n", avgEncTime);
+    ( _log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    printf("Average decode time: %f s\n", avgDecTime);
+    ( _log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    printf("PSNR: %f \n", psnr.average);
+    ( _log) << "PSNR: " << psnr.average << std::endl;
+    printf("SSIM: %f \n", ssim.average);
+    ( _log) << "SSIM: " << ssim.average << std::endl;
+    (_log) << std::endl;
+
+    printf("\nVCM Normal Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+void
+NormalTest::Teardown()
+{
+    //_log.close();
+    fclose(_sourceFile);
+    fclose(_encodedFile);
+    return;
+}
+
+
+
+
diff --git a/trunk/src/modules/video_coding/main/test/normal_test.h b/trunk/src/modules/video_coding/main/test/normal_test.h
new file mode 100644
index 0000000..6f75dfb
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/normal_test.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
+
+#include "video_coding.h"
+#include "test_util.h"
+
+#include <map>
+
+class NormalTest;
+
+//Send Side - Packetization callback - will create and send a packet to the VCMReceiver
+class VCMNTEncodeCompleteCallback : public webrtc::VCMPacketizationCallback
+{
+public:
+    // constructor input: file in which encoded data will be written
+    VCMNTEncodeCompleteCallback(FILE* encodedFile, NormalTest& test);
+    virtual ~VCMNTEncodeCompleteCallback();
+    // Register transport callback
+    void RegisterTransportCallback(webrtc::VCMPacketizationCallback* transport);
+    // process encoded data received from the encoder, pass stream to the VCMReceiver module
+    WebRtc_Word32 SendData(const webrtc::FrameType frameType,
+                           const WebRtc_UWord8 payloadType,
+                           const WebRtc_UWord32 timeStamp,
+                           const WebRtc_UWord8* payloadData,
+                           const WebRtc_UWord32 payloadSize,
+                           const webrtc::RTPFragmentationHeader& fragmentationHeader,
+                           const webrtc::RTPVideoHeader* videoHdr);
+
+    // Register exisitng VCM. Currently - encode and decode with the same vcm module.
+    void RegisterReceiverVCM(webrtc::VideoCodingModule *vcm);
+    // Return sum of encoded data (all frames in the sequence)
+    WebRtc_Word32 EncodedBytes();
+    // return number of encoder-skipped frames
+    WebRtc_UWord32 SkipCnt();;
+    // conversion function for payload type (needed for the callback function)
+//    RTPVideoVideoCodecTypes ConvertPayloadType(WebRtc_UWord8 payloadType);
+
+private:
+    FILE*                       _encodedFile;
+    WebRtc_UWord32              _encodedBytes;
+    WebRtc_UWord32              _skipCnt;
+    webrtc::VideoCodingModule*  _VCMReceiver;
+    webrtc::FrameType           _frameType;
+    WebRtc_UWord8*              _payloadData; // max payload size??
+    WebRtc_UWord16              _seqNo;
+    NormalTest&                 _test;
+}; // end of VCMEncodeCompleteCallback
+
+class VCMNTDecodeCompleCallback: public webrtc::VCMReceiveCallback
+{
+public:
+    VCMNTDecodeCompleCallback(std::string outname): // or should it get a name?
+        _decodedFile(NULL),
+        _outname(outname),
+        _decodedBytes(0),
+        _currentWidth(0),
+        _currentHeight(0) {}
+    virtual ~VCMNTDecodeCompleCallback();
+    void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
+    // will write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+private:
+    FILE*             _decodedFile;
+    std::string       _outname;
+    WebRtc_UWord32    _decodedBytes;
+    WebRtc_UWord32    _currentWidth;
+    WebRtc_UWord32    _currentHeight;
+
+}; // end of VCMDecodeCompleCallback class
+
+
+class NormalTest
+{
+public:
+    NormalTest(webrtc::VideoCodingModule* vcm,
+               webrtc::TickTimeBase* clock);
+    ~NormalTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32    Perform(CmdArgs& args);
+    // option:: turn into private and call from perform
+    WebRtc_UWord32   Width() const { return _width; };
+    WebRtc_UWord32   Height() const { return _height; };
+    webrtc::VideoCodecType VideoType() const { return _videoType; };
+
+
+protected:
+    // test setup - open files, general initializations
+    void            Setup(CmdArgs& args);
+   // close open files, delete used memory
+    void            Teardown();
+    // print results to std output and to log file
+    void            Print();
+    // calculating pipeline delay, and encoding time
+    void            FrameEncoded(WebRtc_UWord32 timeStamp);
+    // calculating pipeline delay, and decoding time
+    void            FrameDecoded(WebRtc_UWord32 timeStamp);
+
+    webrtc::TickTimeBase*            _clock;
+    webrtc::VideoCodingModule*       _vcm;
+    webrtc::VideoCodec               _sendCodec;
+    webrtc::VideoCodec               _receiveCodec;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _encodedName;
+    WebRtc_Word32                    _sumEncBytes;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _encodedFile;
+    std::fstream                     _log;
+    WebRtc_UWord32                   _width;
+    WebRtc_UWord32                   _height;
+    float                            _frameRate;
+    float                            _bitRate;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    webrtc::VideoCodecType           _videoType;
+    double                           _totalEncodeTime;
+    double                           _totalDecodeTime;
+    double                           _decodeCompleteTime;
+    double                           _encodeCompleteTime;
+    double                           _totalEncodePipeTime;
+    double                           _totalDecodePipeTime;
+    double                           _testTotalTime;
+    std::map<int, double>            _encodeTimes;
+    std::map<int, double>            _decodeTimes;
+    WebRtc_Word32                    _frameCnt;
+    WebRtc_Word32                    _encFrameCnt;
+    WebRtc_Word32                    _decFrameCnt;
+
+}; // end of VCMNormalTestClass
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/plotJitterEstimate.m b/trunk/src/modules/video_coding/main/test/plotJitterEstimate.m
new file mode 100644
index 0000000..d6185f5
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/plotJitterEstimate.m
@@ -0,0 +1,52 @@
+function plotJitterEstimate(filename)
+
+[timestamps, framedata, slopes, randJitters, framestats, timetable, filtjitter, rtt, rttStatsVec] = jitterBufferTraceParser(filename);
+
+x = 1:size(framestats, 1);
+%figure(2);
+subfigure(3, 2, 1);
+hold on;
+plot(x, slopes(x, 1).*(framestats(x, 1) - framestats(x, 2)) + 3*sqrt(randJitters(x,2)), 'b'); title('Estimate ms');
+plot(x, filtjitter, 'r');
+plot(x, slopes(x, 1).*(framestats(x, 1) - framestats(x, 2)), 'g');
+subfigure(3, 2, 2);
+%subplot(211);
+plot(x, slopes(x, 1)); title('Line slope');
+%subplot(212);
+%plot(x, slopes(x, 2)); title('Line offset');
+subfigure(3, 2, 3); hold on;
+plot(x, framestats); plot(x, framedata(x, 1)); title('frame size and average frame size');
+subfigure(3, 2, 4);
+plot(x, framedata(x, 2)); title('Delay');
+subfigure(3, 2, 5);
+hold on;
+plot(x, randJitters(x,1),'r');
+plot(x, randJitters(x,2)); title('Random jitter');
+
+subfigure(3, 2, 6);
+delays = framedata(:,2);
+dL = [0; framedata(2:end, 1) - framedata(1:end-1, 1)];
+hold on;
+plot(dL, delays, '.');
+s = [min(dL) max(dL)];
+plot(s, slopes(end, 1)*s + slopes(end, 2), 'g');
+plot(s, slopes(end, 1)*s + slopes(end, 2) + 3*sqrt(randJitters(end,2)), 'r');
+plot(s, slopes(end, 1)*s + slopes(end, 2) - 3*sqrt(randJitters(end,2)), 'r');
+title('theta(1)*x+theta(2), (dT-dTS)/dL');
+if sum(size(rttStatsVec)) > 0
+    figure; hold on; 
+    rttNstdDevsDrift = 3.5;
+    rttNstdDevsJump = 2.5;
+    rttSamples = rttStatsVec(:, 1);
+    rttAvgs = rttStatsVec(:, 2);
+    rttStdDevs = sqrt(rttStatsVec(:, 3));
+    rttMax = rttStatsVec(:, 4);
+    plot(rttSamples, 'ko-');
+    plot(rttAvgs, 'g');
+    plot(rttAvgs + rttNstdDevsDrift*rttStdDevs, 'b--'); 
+    plot(rttAvgs + rttNstdDevsJump*rttStdDevs, 'b'); 
+    plot(rttAvgs - rttNstdDevsJump*rttStdDevs, 'b');
+    plot(rttMax, 'r');
+    %plot(driftRestarts*max(maxRtts), '.');
+    %plot(jumpRestarts*max(maxRtts), '.');
+end
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/plotReceiveTrace.m b/trunk/src/modules/video_coding/main/test/plotReceiveTrace.m
new file mode 100644
index 0000000..4d262aa
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/plotReceiveTrace.m
@@ -0,0 +1,213 @@
+function [t, TS] = plotReceiveTrace(filename, flat)
+fid=fopen(filename);
+%DEBUG     ; ( 8:32:33:375 |    0)        VIDEO:1          ;      5260; First packet of frame 1869537938
+%DEBUG     ; ( 8:32:33:375 |    0) VIDEO CODING:1          ;      5260; Decoding timestamp 1869534934
+%DEBUG     ; ( 8:32:33:375 |    0)        VIDEO:1          ;      5260; Render frame 1869534934 at 20772610
+%DEBUG     ; ( 8:32:33:375 |    0) VIDEO CODING:-1         ;      5260; Frame decoded: timeStamp=1870511259 decTime=0 maxDecTime=0, at 19965
+%DEBUG     ; ( 7:59:42:500 |    0)        VIDEO:-1         ;      2500; Received complete frame timestamp 1870514263 frame type 1 frame size 7862 at time 19965, jitter estimate was 130
+%DEBUG     ; ( 8: 5:51:774 |    0)        VIDEO:-1         ;      3968; ExtrapolateLocalTime(1870967878)=24971 ms
+
+if nargin == 1
+    flat = 0;
+end
+line = fgetl(fid);
+estimatedArrivalTime = [];
+packetTime = [];
+firstPacketTime = [];
+decodeTime = [];
+decodeCompleteTime = [];
+renderTime = [];
+completeTime = [];
+while ischar(line)%line ~= -1
+    if length(line) == 0
+        line = fgetl(fid);
+        continue;
+    end
+    % Parse the trace line header
+    [tempres, count] = sscanf(line, 'DEBUG     ; (%u:%u:%u:%u |%*lu)%13c:');
+    if count < 5
+        line = fgetl(fid);
+        continue;
+    end
+    hr=tempres(1);
+    mn=tempres(2);
+    sec=tempres(3);
+    ms=tempres(4);
+    timeInMs=hr*60*60*1000 + mn*60*1000 + sec*1000 + ms;
+    label = tempres(5:end);
+    I = find(label ~= 32); 
+    label = label(I(1):end); % remove white spaces
+    if ~strncmp(char(label), 'VIDEO', 5) & ~strncmp(char(label), 'VIDEO CODING', 12)
+        line = fgetl(fid);
+        continue;
+    end
+    message = line(72:end);
+    
+    % Parse message
+    [p, count] = sscanf(message, 'ExtrapolateLocalTime(%lu)=%lu ms');
+    if count == 2
+        estimatedArrivalTime = [estimatedArrivalTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'Packet seqNo %u of frame %lu at %lu');
+    if count == 3
+        packetTime = [packetTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'First packet of frame %lu at %lu');
+    if count == 2
+        firstPacketTime = [firstPacketTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'Decoding timestamp %lu at %lu');
+    if count == 2
+        decodeTime = [decodeTime; p'];
+        line = fgetl(fid);
+        continue;        
+    end
+    
+    [p, count] = sscanf(message, 'Render frame %lu at %lu. Render delay %lu, required delay %lu, max decode time %lu, min total delay %lu');
+    if count == 6
+        renderTime = [renderTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+
+    [p, count] = sscanf(message, 'Frame decoded: timeStamp=%lu decTime=%d maxDecTime=%lu, at %lu');
+    if count == 4
+        decodeCompleteTime = [decodeCompleteTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+
+    [p, count] = sscanf(message, 'Received complete frame timestamp %lu frame type %u frame size %*u at time %lu, jitter estimate was %lu');
+    if count == 4
+        completeTime = [completeTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    line = fgetl(fid);
+end
+fclose(fid);
+
+t = completeTime(:,3);
+TS = completeTime(:,1);
+
+figure;
+subplot(211);
+hold on;
+slope = 0;
+
+if sum(size(packetTime)) > 0
+    % Plot the time when each packet arrives
+    firstTimeStamp = packetTime(1,2);
+    x = (packetTime(:,2) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    firstTime = packetTime(1,3);
+    plot(x, packetTime(:,3) - firstTime - slope, 'b.');
+else
+    % Plot the time when the first packet of a frame arrives
+    firstTimeStamp = firstPacketTime(1,1);
+    x = (firstPacketTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    firstTime = firstPacketTime(1,2);
+    plot(x, firstPacketTime(:,2) - firstTime - slope, 'b.');
+end
+
+% Plot the frame complete time
+if prod(size(completeTime)) > 0
+    x = (completeTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, completeTime(:,3) - firstTime - slope, 'ks');
+end
+
+% Plot the time the decode starts
+if prod(size(decodeTime)) > 0
+    x = (decodeTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, decodeTime(:,2) - firstTime - slope, 'r.');
+end
+
+% Plot the decode complete time
+if prod(size(decodeCompleteTime)) > 0
+    x = (decodeCompleteTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, decodeCompleteTime(:,4) - firstTime - slope, 'g.');
+end
+
+if prod(size(renderTime)) > 0
+    % Plot the wanted render time in ms
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope, 'c-');
+    
+    % Plot the render time if there were no render delay or decoding delay.
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope - renderTime(:, 3) - renderTime(:, 5), 'c--');
+    
+    % Plot the render time if there were no render delay.
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope - renderTime(:, 3) - renderTime(:, 5), 'b-');
+end
+
+%plot(x, 90*x, 'r-');
+
+xlabel('RTP timestamp (in ms)');
+ylabel('Time (ms)');
+legend('Packet arrives', 'Frame complete', 'Decode', 'Decode complete', 'Time to render', 'Only jitter', 'Must decode');
+
+% subplot(312);
+% hold on;
+% completeTs = completeTime(:, 1);
+% arrivalTs = estimatedArrivalTime(:, 1);
+% [c, completeIdx, arrivalIdx] = intersect(completeTs, arrivalTs);
+% %plot(completeTs(completeIdx), completeTime(completeIdx, 3) - estimatedArrivalTime(arrivalIdx, 2));
+% timeUntilComplete = completeTime(completeIdx, 3) - estimatedArrivalTime(arrivalIdx, 2);
+% devFromAvgCompleteTime = timeUntilComplete - mean(timeUntilComplete);
+% plot(completeTs(completeIdx) - completeTs(completeIdx(1)), devFromAvgCompleteTime);
+% plot(completeTime(:, 1) - completeTime(1, 1), completeTime(:, 4), 'r');
+% plot(decodeCompleteTime(:, 1) - decodeCompleteTime(1, 1), decodeCompleteTime(:, 2), 'g');
+% plot(decodeCompleteTime(:, 1) - decodeCompleteTime(1, 1), decodeCompleteTime(:, 3), 'k');
+% xlabel('RTP timestamp');
+% ylabel('Time (ms)');
+% legend('Complete time - Estimated arrival time', 'Desired jitter buffer level', 'Actual decode time', 'Max decode time', 0);
+
+if prod(size(renderTime)) > 0
+    subplot(212);
+    hold on;
+    firstTime = renderTime(1, 1);
+    targetDelay = max(renderTime(:, 3) + renderTime(:, 4) + renderTime(:, 5), renderTime(:, 6));
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 3), 'r-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 4), 'b-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 5), 'g-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 6), 'k-');
+    plot(renderTime(:, 1) - firstTime, targetDelay, 'c-');
+    xlabel('RTP timestamp');
+    ylabel('Time (ms)');
+    legend('Render delay', 'Jitter delay', 'Decode delay', 'Extra delay', 'Min total delay');
+end
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/plotTimingTest.m b/trunk/src/modules/video_coding/main/test/plotTimingTest.m
new file mode 100644
index 0000000..52a6f30
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/plotTimingTest.m
@@ -0,0 +1,62 @@
+function plotTimingTest(filename)
+fid=fopen(filename);
+
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; Stochastic test 1
+%DEBUG     ; ( 9:53:33:859 |    0) VIDEO CODING:-1         ;      7132; Frame decoded: timeStamp=3000 decTime=10 at 10012
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; timeStamp=3000 clock=10037 maxWaitTime=0
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; timeStampMs=33 renderTime=54
+line = fgetl(fid);
+decTime = [];
+waitTime = [];
+renderTime = [];
+foundStart = 0;
+testName = 'Stochastic test 1';
+while ischar(line)
+    if length(line) == 0
+        line = fgetl(fid);
+        continue;
+    end
+    lineOrig = line;
+    line = line(72:end);
+    if ~foundStart
+        if strncmp(line, testName, length(testName)) 
+            foundStart = 1;
+        end
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'Frame decoded: timeStamp=%lu decTime=%d maxDecTime=%d, at %lu');
+    if count == 4
+        decTime = [decTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'timeStamp=%u clock=%u maxWaitTime=%u');
+    if count == 3
+        waitTime = [waitTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'timeStamp=%u renderTime=%u');
+    if count == 2
+        renderTime = [renderTime; p'];
+        line = fgetl(fid);
+        continue;
+    end    
+    line = fgetl(fid);
+end
+fclose(fid);
+
+% Compensate for wrap arounds and start counting from zero.
+timeStamps = waitTime(:, 1);
+tsDiff = diff(timeStamps);
+wrapIdx = find(tsDiff < 0);
+timeStamps(wrapIdx+1:end) = hex2dec('ffffffff') + timeStamps(wrapIdx+1:end);
+timeStamps = timeStamps - timeStamps(1);
+
+figure;
+hold on;
+plot(timeStamps, decTime(:, 2), 'r');
+plot(timeStamps, waitTime(:, 3), 'g');
+plot(timeStamps(2:end), diff(renderTime(:, 2)), 'b');
+legend('Decode time', 'Max wait time', 'Render time diff');
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/quality_modes_test.cc b/trunk/src/modules/video_coding/main/test/quality_modes_test.cc
new file mode 100644
index 0000000..e7da691
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/quality_modes_test.cc
@@ -0,0 +1,485 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "quality_modes_test.h"
+
+#include <iostream>
+#include <string>
+#include <time.h>
+
+#include "../source/event.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+using namespace webrtc;
+
+int qualityModeTest()
+{
+    // Don't run this test with debug events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    QualityModesTest QMTest(vcm, &clock);
+    QMTest.Perform();
+    VideoCodingModule::Destroy(vcm);
+    return 0;
+}
+
+
+QualityModesTest::QualityModesTest(VideoCodingModule* vcm,
+                                   TickTimeBase* clock):
+NormalTest(vcm, clock),
+_vpm()
+{
+    //
+}
+
+
+QualityModesTest::~QualityModesTest()
+{
+    //
+}
+
+void
+QualityModesTest::Setup()
+{
+
+
+    _inname= test::ProjectRootPath() + "resources/crew_30f_4CIF.yuv";
+    _outname = test::OutputPath() + "out_qmtest.yuv";
+    _encodedName = test::OutputPath() + "encoded_qmtest.yuv";
+
+    //NATIVE/SOURCE VALUES
+    _nativeWidth = 2*352;
+    _nativeHeight = 2*288;
+    _nativeFrameRate = 30;
+
+
+    //TARGET/ENCODER VALUES
+     _width = 2*352;
+     _height = 2*288;
+    _frameRate = 30;
+    //
+    _bitRate = 400;
+
+    _flagSSIM = false;
+
+    _lengthSourceFrame  = 3*_nativeWidth*_nativeHeight/2;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+     if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _log.open((test::OutputPath() + "TestLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    return;
+}
+
+
+void
+QualityModesTest::Print()
+{
+    std::cout << "Quality Modes Test Completed!" << std::endl;
+    (_log) << "Quality Modes Test Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file: " << _outname << std::endl;
+    (_log) << "Total run time: " << _testTotalTime << std::endl;
+    printf("Total run time: %f s \n", _testTotalTime);
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _nativeFrameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    double avgEncTime = _totalEncodeTime / _frameCnt;
+    double avgDecTime = _totalDecodeTime / _frameCnt;
+    webrtc::test::QualityMetricsResult psnr,ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _nativeWidth,
+                      _nativeHeight, &psnr);
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Target bitrate: %f kbps\n", _bitRate);
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    printf("Average encode time: %f s\n", avgEncTime);
+    ( _log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    printf("Average decode time: %f s\n", avgDecTime);
+    ( _log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    printf("PSNR: %f \n", psnr.average);
+    printf("**Number of frames dropped in VPM***%d \n",_numFramesDroppedVPM);
+    ( _log) << "PSNR: " << psnr.average << std::endl;
+    if (_flagSSIM == 1)
+    {
+        printf("***computing SSIM***\n");
+        I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _nativeWidth,
+                          _nativeHeight, &ssim);
+        printf("SSIM: %f \n", ssim.average);
+    }
+    (_log) << std::endl;
+
+    printf("\nVCM Qualit Modes Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+void
+QualityModesTest::Teardown()
+{
+    _log.close();
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+    fclose(_encodedFile);
+    return;
+}
+
+
+WebRtc_Word32
+QualityModesTest::Perform()
+{
+    Setup();
+    // changing bit/frame rate during the test
+    const float bitRateUpdate[] = {1000};
+    const float frameRateUpdate[] = {30};
+    const int updateFrameNum[] = {10000}; // frame numbers at which an update will occur
+
+    WebRtc_UWord32 numChanges = sizeof(updateFrameNum)/sizeof(*updateFrameNum);
+    WebRtc_UWord8 change = 0;// change counter
+
+    _vpm = VideoProcessingModule::Create(1);
+
+    EventWrapper* waitEvent = EventWrapper::Create();
+    VideoCodec codec;//both send and receive
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    WebRtc_Word32 NumberOfCodecs = _vcm->NumberOfCodecs();
+    for (int i = 0; i < NumberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &codec);
+        if(strncmp(codec.plName,"VP8" , 5) == 0)
+        {
+             codec.startBitrate = (int)_bitRate;
+             codec.maxFramerate = (WebRtc_UWord8) _frameRate;
+             codec.width = (WebRtc_UWord16)_width;
+             codec.height = (WebRtc_UWord16)_height;
+             TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec
+             i = NumberOfCodecs;
+        }
+    }
+
+    // register a decoder (same codec for decoder and encoder )
+    TEST(_vcm->RegisterReceiveCodec(&codec, 2) == VCM_OK);
+    /* Callback Settings */
+    VCMQMDecodeCompleCallback  _decodeCallback(_decodedFile);
+    _vcm->RegisterReceiveCallback(&_decodeCallback);
+    VCMNTEncodeCompleteCallback   _encodeCompleteCallback(_encodedFile, *this);
+    _vcm->RegisterTransportCallback(&_encodeCompleteCallback);
+    // encode and decode with the same vcm
+    _encodeCompleteCallback.RegisterReceiverVCM(_vcm);
+
+    //quality modes callback
+    QMTestVideoSettingsCallback QMCallback;
+    QMCallback.RegisterVCM(_vcm);
+    QMCallback.RegisterVPM(_vpm);
+    _vcm->RegisterVideoQMCallback(&QMCallback);
+
+    ///////////////////////
+    /// Start Test
+    ///////////////////////
+    _vpm->EnableTemporalDecimation(true);
+    _vpm->EnableContentAnalysis(true);
+    _vpm->SetInputFrameResampleMode(kFastRescaling);
+
+    // disabling internal VCM frame dropper
+    _vcm->EnableFrameDropper(false);
+
+    VideoFrame sourceFrame;
+    VideoFrame *decimatedFrame = NULL;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    double startTime = clock()/(double)CLOCKS_PER_SEC;
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
+
+    SendStatsTest sendStats;
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+
+    VideoContentMetrics* contentMetrics = NULL;
+    // setting user frame rate
+    _vpm->SetMaxFrameRate((WebRtc_UWord32)(_nativeFrameRate+ 0.5f));
+    // for starters: keeping native values:
+    _vpm->SetTargetResolution(_width, _height, (WebRtc_UWord32)(_frameRate+ 0.5f));
+    _decodeCallback.SetOriginalFrameDimensions(_nativeWidth, _nativeHeight);
+
+    //tmp  - disabling VPM frame dropping
+    _vpm->EnableTemporalDecimation(false);
+
+
+    WebRtc_Word32 ret = 0;
+      _numFramesDroppedVPM = 0;
+
+    while (feof(_sourceFile)== 0)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        _frameCnt++;
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_nativeHeight);
+        sourceFrame.SetWidth(_nativeWidth);
+
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+
+        ret = _vpm->PreprocessFrame(&sourceFrame, &decimatedFrame);
+        if (ret  == 1)
+        {
+            printf("VD: frame drop %d \n",_frameCnt);
+            _numFramesDroppedVPM += 1;
+            continue; // frame drop
+        }
+        else if (ret < 0)
+        {
+            printf("Error in PreprocessFrame: %d\n", ret);
+            //exit(1);
+        }
+        contentMetrics = _vpm->ContentMetrics();
+        if (contentMetrics == NULL)
+        {
+            printf("error: contentMetrics = NULL\n");
+        }
+
+        // counting only encoding time
+        _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
+
+        WebRtc_Word32 ret = _vcm->AddVideoFrame(*decimatedFrame, contentMetrics);
+
+        _totalEncodeTime += clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
+
+        if (ret < 0)
+        {
+            printf("Error in AddFrame: %d\n", ret);
+            //exit(1);
+        }
+        _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
+        ret = _vcm->Decode();
+        _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
+        if (ret < 0)
+        {
+            printf("Error in Decode: %d\n", ret);
+            //exit(1);
+        }
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        // mimicking setTargetRates - update every 1 sec
+        // this will trigger QMSelect
+        if (_frameCnt%((int)_frameRate) == 0)
+        {
+            _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 1);
+            waitEvent->Wait(33);
+        }
+        waitEvent->Wait(33);
+        // check for bit rate update
+        if (change < numChanges && _frameCnt == updateFrameNum[change])
+        {
+            _bitRate = bitRateUpdate[change];
+            _frameRate = frameRateUpdate[change];
+            codec.startBitrate = (int)_bitRate;
+            codec.maxFramerate = (WebRtc_UWord8) _frameRate;
+            TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec
+            change++;
+        }
+    }
+
+    double endTime = clock()/(double)CLOCKS_PER_SEC;
+    _testTotalTime = endTime - startTime;
+    _sumEncBytes = _encodeCompleteCallback.EncodedBytes();
+
+    delete tmpBuffer;
+    delete waitEvent;
+    _vpm->Reset();
+    Teardown();
+    Print();
+    VideoProcessingModule::Destroy(_vpm);
+    return 0;
+}
+
+
+// implementing callback to be called from VCM to update VPM of frame rate and size
+QMTestVideoSettingsCallback::QMTestVideoSettingsCallback():
+_vpm(NULL),
+_vcm(NULL)
+{
+    //
+}
+
+void
+QMTestVideoSettingsCallback::RegisterVPM(VideoProcessingModule *vpm)
+{
+    _vpm = vpm;
+}
+void
+QMTestVideoSettingsCallback::RegisterVCM(VideoCodingModule *vcm)
+{
+    _vcm = vcm;
+}
+
+bool
+QMTestVideoSettingsCallback::Updated()
+{
+    if (_updated)
+    {
+        _updated = false;
+        return true;
+    }
+    return false;
+}
+
+WebRtc_Word32
+QMTestVideoSettingsCallback::SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                                const WebRtc_UWord32 width,
+                                                const WebRtc_UWord32 height)
+{
+    WebRtc_Word32 retVal = 0;
+    printf("QM updates: W = %d, H = %d, FR = %d, \n", width, height, frameRate);
+    retVal = _vpm->SetTargetResolution(width, height, frameRate);
+    //Initialize codec with new values - is this the best place to do it?
+    if (!retVal)
+    {
+        // first get current settings
+        VideoCodec currentCodec;
+        _vcm->SendCodec(&currentCodec);
+        // now set new values:
+        currentCodec.height = (WebRtc_UWord16)height;
+        currentCodec.width = (WebRtc_UWord16)width;
+        currentCodec.maxFramerate = (WebRtc_UWord8)frameRate;
+
+        // re-register encoder
+        retVal = _vcm->RegisterSendCodec(&currentCodec, 2, 1440);
+        _updated = true;
+    }
+
+    return retVal;
+}
+
+
+// Decoded Frame Callback Implmentation
+VCMQMDecodeCompleCallback::VCMQMDecodeCompleCallback(FILE* decodedFile):
+_decodedFile(decodedFile),
+_decodedBytes(0),
+//_test(test),
+_origWidth(0),
+_origHeight(0),
+_decWidth(0),
+_decHeight(0),
+//_interpolator(NULL),
+_decBuffer(NULL),
+_frameCnt(0)
+{
+    //
+}
+
+VCMQMDecodeCompleCallback::~VCMQMDecodeCompleCallback()
+ {
+//     if (_interpolator != NULL)
+//     {
+//         deleteInterpolator(_interpolator);
+//         _interpolator = NULL;
+//     }
+     if (_decBuffer != NULL)
+     {
+         delete [] _decBuffer;
+         _decBuffer = NULL;
+     }
+ }
+WebRtc_Word32
+VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame)
+{
+    if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height()))
+    {
+        fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile);
+        _frameCnt++;
+        //printf("frame dec # %d", _frameCnt);
+        // no need for interpolator and decBuffer
+        if (_decBuffer != NULL)
+        {
+            delete [] _decBuffer;
+            _decBuffer = NULL;
+        }
+//        if (_interpolator != NULL)
+//        {
+//            deleteInterpolator(_interpolator);
+//            _interpolator = NULL;
+//        }
+        _decWidth = 0;
+        _decHeight = 0;
+    }
+    else
+    {
+        if ((_decWidth != videoFrame.Width()) || (_decHeight != videoFrame.Height()))
+        {
+            _decWidth = videoFrame.Width();
+            _decHeight = videoFrame.Height();
+            buildInterpolator();
+        }
+
+//        interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer);
+        fwrite(_decBuffer, 1, _origWidth*_origHeight*3/2, _decodedFile);
+        _frameCnt++;
+    }
+
+    _decodedBytes += videoFrame.Length();
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMQMDecodeCompleCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+void
+VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height)
+{
+    _origWidth = width;
+    _origHeight = height;
+}
+
+WebRtc_Word32
+VCMQMDecodeCompleCallback::buildInterpolator()
+{
+    WebRtc_UWord32 decFrameLength  = _origWidth*_origHeight*3 >> 1;
+    if (_decBuffer != NULL)
+    {
+        delete [] _decBuffer;
+    }
+    _decBuffer = new WebRtc_UWord8[decFrameLength];
+    if (_decBuffer == NULL)
+    {
+        return -1;
+    }
+
+    return 0;
+}
diff --git a/trunk/src/modules/video_coding/main/test/quality_modes_test.h b/trunk/src/modules/video_coding/main/test/quality_modes_test.h
new file mode 100644
index 0000000..87fa01f
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/quality_modes_test.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
+
+#include "video_processing.h"
+#include "normal_test.h"
+#include "video_coding_defines.h"
+
+int qualityModeTest();
+
+class QualityModesTest : public NormalTest
+{
+public:
+    QualityModesTest(webrtc::VideoCodingModule* vcm,
+                     webrtc::TickTimeBase* clock);
+    virtual ~QualityModesTest();
+    WebRtc_Word32 Perform();
+
+private:
+
+    void Setup();
+    void Print();
+    void Teardown();
+    void SsimComp();
+
+    webrtc::VideoProcessingModule*  _vpm;
+
+    WebRtc_UWord32                      _width;
+    WebRtc_UWord32                      _height;
+    float                               _frameRate;
+    WebRtc_UWord32                      _nativeWidth;
+    WebRtc_UWord32                      _nativeHeight;
+    float                               _nativeFrameRate;
+
+    WebRtc_UWord32                      _numFramesDroppedVPM;
+    bool                                _flagSSIM;
+
+}; // end of QualityModesTest class
+
+
+class VCMQMDecodeCompleCallback: public webrtc::VCMReceiveCallback
+{
+public:
+    VCMQMDecodeCompleCallback(FILE* decodedFile);
+    virtual ~VCMQMDecodeCompleCallback();
+    void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
+    // will write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+    void SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height);
+    WebRtc_Word32 buildInterpolator();
+private:
+    FILE*                _decodedFile;
+    WebRtc_UWord32       _decodedBytes;
+   // QualityModesTest&  _test;
+    WebRtc_UWord32       _origWidth;
+    WebRtc_UWord32       _origHeight;
+    WebRtc_UWord32       _decWidth;
+    WebRtc_UWord32       _decHeight;
+//    VideoInterpolator* _interpolator;
+    WebRtc_UWord8*       _decBuffer;
+    WebRtc_UWord32       _frameCnt; // debug
+
+}; // end of VCMQMDecodeCompleCallback class
+
+class QMTestVideoSettingsCallback : public webrtc::VCMQMSettingsCallback
+{
+public:
+    QMTestVideoSettingsCallback();
+    // update VPM with QM settings
+    WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                     const WebRtc_UWord32 width,
+                                     const WebRtc_UWord32 height);
+    // register VPM used by test
+    void RegisterVPM(webrtc::VideoProcessingModule* vpm);
+    void RegisterVCM(webrtc::VideoCodingModule* vcm);
+    bool Updated();
+
+private:
+    webrtc::VideoProcessingModule*         _vpm;
+    webrtc::VideoCodingModule*             _vcm;
+    bool                                   _updated;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
diff --git a/trunk/src/modules/video_coding/main/test/receiver_tests.h b/trunk/src/modules/video_coding/main/test/receiver_tests.h
new file mode 100644
index 0000000..33d9f5f
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/receiver_tests.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+
+#include "video_coding.h"
+#include "module_common_types.h"
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "typedefs.h"
+#include "rtp_player.h"
+#include "test_util.h"
+
+#include <string>
+#include <stdio.h>
+
+class RtpDataCallback : public webrtc::RtpData
+{
+public:
+    RtpDataCallback(webrtc::VideoCodingModule* vcm)
+        : _vcm(vcm) {};
+
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                              const WebRtc_UWord16 payloadSize,
+                                              const webrtc::WebRtcRTPHeader* rtpHeader);
+private:
+    webrtc::VideoCodingModule* _vcm;
+};
+
+class FrameReceiveCallback : public webrtc::VCMReceiveCallback
+{
+public:
+    FrameReceiveCallback(std::string outFilename) :
+        _outFilename(outFilename),
+        _outFile(NULL),
+        _timingFile(NULL) {}
+
+    virtual ~FrameReceiveCallback();
+
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+
+private:
+    std::string     _outFilename;
+    FILE*           _outFile;
+    FILE*           _timingFile;
+};
+
+class SharedState
+{
+public:
+    SharedState(webrtc::VideoCodingModule& vcm, RTPPlayer& rtpPlayer) :
+        _vcm(vcm),
+        _rtpPlayer(rtpPlayer) {}
+    webrtc::VideoCodingModule&  _vcm;
+    RTPPlayer&              _rtpPlayer;
+};
+
+
+int RtpPlay(CmdArgs& args);
+int RtpPlayMT(CmdArgs& args,
+              int releaseTest = 0,
+              webrtc::VideoCodecType releaseTestVideoType = webrtc::kVideoCodecVP8);
+int ReceiverTimingTests(CmdArgs& args);
+int JitterBufferTest(CmdArgs& args);
+int DecodeFromStorageTest(CmdArgs& args);
+
+// Thread functions:
+bool ProcessingThread(void* obj);
+bool RtpReaderThread(void* obj);
+bool DecodeThread(void* obj);
+bool NackThread(void* obj);
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
diff --git a/trunk/src/modules/video_coding/main/test/receiver_timing_tests.cc b/trunk/src/modules/video_coding/main/test/receiver_timing_tests.cc
new file mode 100644
index 0000000..0b09256
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/receiver_timing_tests.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "../source/internal_defines.h"
+#include "timing.h"
+#include "test_macros.h"
+#include "test_util.h"
+
+#include <cstdio>
+#include <cstdlib>
+#include <cmath>
+
+using namespace webrtc;
+
+float vcmFloatMax(float a, float b)
+{
+    return a > b ? a : b;
+}
+
+float vcmFloatMin(float a, float b)
+{
+    return a < b ? a : b;
+}
+
+double const pi = 4*std::atan(1.0);
+
+class GaussDist
+{
+public:
+    static float RandValue(float m, float stdDev) // returns a single normally distributed number
+    {
+        float r1 = static_cast<float>((std::rand() + 1.0)/(RAND_MAX + 1.0)); // gives equal distribution in (0, 1]
+        float r2 = static_cast<float>((std::rand() + 1.0)/(RAND_MAX + 1.0));
+        return m + stdDev * static_cast<float>(std::sqrt(-2*std::log(r1))*std::cos(2*pi*r2));
+    }
+};
+
+int ReceiverTimingTests(CmdArgs& args)
+{
+    // Make sure this test is never executed with simulated events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+
+    // Set up trace
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "receiverTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    // A static random seed
+    srand(0);
+
+    TickTimeBase clock;
+    VCMTiming timing(&clock);
+    float clockInMs = 0.0;
+    WebRtc_UWord32 waitTime = 0;
+    WebRtc_UWord32 jitterDelayMs = 0;
+    WebRtc_UWord32 maxDecodeTimeMs = 0;
+    WebRtc_UWord32 timeStamp = 0;
+
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    timing.UpdateCurrentDelay(timeStamp);
+
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    jitterDelayMs = 20;
+    timing.SetRequiredDelay(jitterDelayMs);
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    // First update initializes the render time. Since we have no decode delay
+    // we get waitTime = renderTime - now - renderDelay = jitter
+    TEST(waitTime == jitterDelayMs);
+
+    jitterDelayMs += VCMTiming::kDelayMaxChangeMsPerS + 10;
+    timeStamp += 90000;
+    clockInMs += 1000.0f;
+    timing.SetRequiredDelay(jitterDelayMs);
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    // Since we gradually increase the delay we only get
+    // 100 ms every second.
+    TEST(waitTime == jitterDelayMs - 10);
+
+    timeStamp += 90000;
+    clockInMs += 1000.0;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    // 300 incoming frames without jitter, verify that this gives the exact wait time
+    for (int i=0; i < 300; i++)
+    {
+        clockInMs += 1000.0f/30.0f;
+        timeStamp += 3000;
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    }
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    // Add decode time estimates
+    for (int i=0; i < 10; i++)
+    {
+        WebRtc_Word64 startTimeMs = static_cast<WebRtc_Word64>(clockInMs + 0.5);
+        clockInMs += 10.0f;
+        timing.StopDecodeTimer(timeStamp, startTimeMs, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+        timeStamp += 3000;
+        clockInMs += 1000.0f/30.0f - 10.0f;
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    }
+    maxDecodeTimeMs = 10;
+    timing.SetRequiredDelay(jitterDelayMs);
+    clockInMs += 1000.0f;
+    timeStamp += 90000;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    WebRtc_UWord32 totalDelay1 = timing.TargetVideoDelay();
+    WebRtc_UWord32 minTotalDelayMs = 200;
+    timing.SetMinimumTotalDelay(minTotalDelayMs);
+    clockInMs += 5000.0f;
+    timeStamp += 5*90000;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    WebRtc_UWord32 totalDelay2 = timing.TargetVideoDelay();
+    // We should at least have minTotalDelayMs - decodeTime (10) - renderTime (10) to wait
+    TEST(waitTime == minTotalDelayMs - maxDecodeTimeMs - 10);
+    // The total video delay should not increase with the extra delay,
+    // the extra delay should be independent.
+    TEST(totalDelay1 == totalDelay2);
+
+    // Reset min total delay
+    timing.SetMinimumTotalDelay(0);
+    clockInMs += 5000.0f;
+    timeStamp += 5*90000;
+    timing.UpdateCurrentDelay(timeStamp);
+
+    // A sudden increase in timestamp of 2.1 seconds
+    clockInMs += 1000.0f/30.0f;
+    timeStamp += static_cast<WebRtc_UWord32>(2.1*90000 + 0.5);
+    WebRtc_Word64 ret = timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(ret == -1);
+    timing.Reset();
+
+    // This test produces a trace which can be parsed with plotTimingTest.m. The plot
+    // can be used to see that the timing is reasonable under noise, and that the
+    // gradual transition between delays works as expected.
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "Stochastic test 1");
+
+    jitterDelayMs = 60;
+    maxDecodeTimeMs = 10;
+
+    timeStamp = static_cast<WebRtc_UWord32>(-10000); // To produce a wrap
+    clockInMs = 10000.0f;
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    float noise = 0.0f;
+    for (int i=0; i < 1400; i++)
+    {
+        if (i == 400)
+        {
+            jitterDelayMs = 30;
+        }
+        else if (i == 700)
+        {
+            jitterDelayMs = 100;
+        }
+        else if (i == 1000)
+        {
+            minTotalDelayMs = 200;
+            timing.SetMinimumTotalDelay(minTotalDelayMs);
+        }
+        else if (i == 1200)
+        {
+            minTotalDelayMs = 0;
+            timing.SetMinimumTotalDelay(minTotalDelayMs);
+        }
+        WebRtc_Word64 startTimeMs = static_cast<WebRtc_Word64>(clockInMs + 0.5);
+        noise = vcmFloatMin(vcmFloatMax(GaussDist::RandValue(0, 2), -10.0f), 30.0f);
+        clockInMs += 10.0f;
+        timing.StopDecodeTimer(timeStamp, startTimeMs, static_cast<WebRtc_Word64>(clockInMs + noise + 0.5));
+        timeStamp += 3000;
+        clockInMs += 1000.0f/30.0f - 10.0f;
+        noise = vcmFloatMin(vcmFloatMax(GaussDist::RandValue(0, 8), -15.0f), 15.0f);
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + noise + 0.5));
+        timing.SetRequiredDelay(jitterDelayMs);
+        timing.UpdateCurrentDelay(timeStamp);
+        waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+            static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "timeStamp=%u clock=%u maxWaitTime=%u", timeStamp,
+            static_cast<WebRtc_UWord32>(clockInMs + 0.5), waitTime);
+
+        WebRtc_Word64 renderTimeMs = timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                   "timeStamp=%u renderTime=%u",
+                   timeStamp,
+                   MaskWord64ToUWord32(renderTimeMs));
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "End Stochastic test 1");
+
+    printf("\nVCM Timing Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/trunk/src/modules/video_coding/main/test/release_test.cc b/trunk/src/modules/video_coding/main/test/release_test.cc
new file mode 100644
index 0000000..8e3a073
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/release_test.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ReleaseTest.h"
+#include "ReceiverTests.h"
+#include "TestMacros.h"
+#include "MediaOptTest.h"
+#include "CodecDataBaseTest.h"
+#include "GenericCodecTest.h"
+
+
+
+
+int ReleaseTest()
+{
+    printf("VCM RELEASE TESTS \n\n");
+    
+    // Automatic tests
+
+    printf("Testing receive side timing...\n");
+    TEST(ReceiverTimingTests() == 0);
+    
+    printf("Testing jitter buffer...\n");
+    TEST(JitterBufferTest() == 0);
+    
+    printf("Testing Codec Data Base...\n");
+    TEST(CodecDBTest() == 0);
+    
+    printf("Testing Media Optimization....\n");
+    TEST(VCMMediaOptTest(1) == 0); 
+
+    // Tests requiring verification
+    
+    printf("Testing Multi thread send-receive....\n");
+    TEST(MTRxTxTest() == 0);
+    printf("Verify by viewing output file MTRxTx_out.yuv \n");
+    
+    return 0;
+}
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/release_test.h b/trunk/src/modules/video_coding/main/test/release_test.h
new file mode 100644
index 0000000..2578160
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/release_test.h
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RELEASE_TEST_H
+#define RELEASE_TEST_H
+
+int ReleaseTest();
+int ReleaseTestPart2();
+
+#endif
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/release_test_pt2.cc b/trunk/src/modules/video_coding/main/test/release_test_pt2.cc
new file mode 100644
index 0000000..5ff48e5
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/release_test_pt2.cc
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ReleaseTest.h"
+#include "ReceiverTests.h"
+#include "TestMacros.h"
+#include "MediaOptTest.h"
+#include "CodecDataBaseTest.h"
+#include "GenericCodecTest.h"
+
+
+
+
+int ReleaseTestPart2()
+{
+    printf("Verify that TICK_TIME_DEBUG and EVENT_DEBUG are uncommented");
+    // Tests requiring verification
+
+    printf("Testing Generic Codecs...\n");
+    TEST(VCMGenericCodecTest() == 0);
+    printf("Verify by viewing output file GCTest_out.yuv \n");
+    
+    return 0;
+}
\ No newline at end of file
diff --git a/trunk/src/modules/video_coding/main/test/rtp_player.cc b/trunk/src/modules/video_coding/main/test/rtp_player.cc
new file mode 100644
index 0000000..5361e31
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/rtp_player.cc
@@ -0,0 +1,443 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_player.h"
+
+#include <cstdlib>
+#ifdef WIN32
+#include <windows.h>
+#include <Winsock2.h>
+#else
+#include <arpa/inet.h>
+#endif
+
+#include "../source/internal_defines.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_rtcp.h"
+
+using namespace webrtc;
+
+RawRtpPacket::RawRtpPacket(uint8_t* rtp_data, uint16_t rtp_length)
+    : data(rtp_data),
+      length(rtp_length),
+      resend_time_ms(-1) {
+  data = new uint8_t[length];
+  memcpy(data, rtp_data, length);
+}
+
+RawRtpPacket::~RawRtpPacket() {
+  delete [] data;
+}
+
+LostPackets::LostPackets()
+    : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+      loss_count_(0),
+      debug_file_(NULL),
+      packets_() {
+  debug_file_ = fopen("PacketLossDebug.txt", "w");
+}
+
+LostPackets::~LostPackets() {
+  if (debug_file_) {
+      fclose(debug_file_);
+  }
+  while (!packets_.empty()) {
+    delete packets_.front();
+    packets_.pop_front();
+  }
+  delete crit_sect_;
+}
+
+void LostPackets::AddPacket(RawRtpPacket* packet) {
+  CriticalSectionScoped cs(crit_sect_);
+  packets_.push_back(packet);
+  uint16_t seq_num = (packet->data[2] << 8) + packet->data[3];
+  if (debug_file_ != NULL) {
+    fprintf(debug_file_, "%u Lost packet: %u\n", loss_count_, seq_num);
+  }
+  ++loss_count_;
+}
+
+void LostPackets::SetResendTime(uint16_t resend_seq_num,
+                                int64_t resend_time_ms,
+                                int64_t now_ms) {
+  CriticalSectionScoped cs(crit_sect_);
+  for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+    const uint16_t seq_num = ((*it)->data[2] << 8) +
+        (*it)->data[3];
+    if (resend_seq_num == seq_num) {
+      if ((*it)->resend_time_ms + 10 < now_ms) {
+        if (debug_file_ != NULL) {
+          fprintf(debug_file_, "Resend %u at %u\n", seq_num,
+                  MaskWord64ToUWord32(resend_time_ms));
+        }
+        (*it)->resend_time_ms = resend_time_ms;
+      }
+      return;
+    }
+  }
+  assert(false);
+}
+
+RawRtpPacket* LostPackets::NextPacketToResend(int64_t timeNow) {
+  CriticalSectionScoped cs(crit_sect_);
+  for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+    if (timeNow >= (*it)->resend_time_ms && (*it)->resend_time_ms != -1) {
+      RawRtpPacket* packet = *it;
+      it = packets_.erase(it);
+      return packet;
+    }
+  }
+  return NULL;
+}
+
+int LostPackets::NumberOfPacketsToResend() const {
+  CriticalSectionScoped cs(crit_sect_);
+  int count = 0;
+  for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+      ++it) {
+    if ((*it)->resend_time_ms >= 0) {
+        count++;
+    }
+  }
+  return count;
+}
+
+void LostPackets::SetPacketResent(uint16_t seq_num, int64_t now_ms) {
+  CriticalSectionScoped cs(crit_sect_);
+  if (debug_file_ != NULL) {
+    fprintf(debug_file_, "Resent %u at %u\n", seq_num,
+            MaskWord64ToUWord32(now_ms));
+  }
+}
+
+void LostPackets::Print() const {
+  CriticalSectionScoped cs(crit_sect_);
+  printf("Lost packets: %u\n", loss_count_);
+  printf("Packets waiting to be resent: %u\n",
+         NumberOfPacketsToResend());
+  printf("Packets still lost: %u\n",
+         static_cast<unsigned int>(packets_.size()));
+  printf("Sequence numbers:\n");
+  for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+      ++it) {
+    uint16_t seq_num = ((*it)->data[2] << 8) + (*it)->data[3];
+    printf("%u, ", seq_num);
+  }
+  printf("\n");
+}
+
+RTPPlayer::RTPPlayer(const char* filename,
+                     RtpData* callback,
+                     TickTimeBase* clock)
+:
+_clock(clock),
+_rtpModule(*RtpRtcp::CreateRtpRtcp(1, false)),
+_nextRtpTime(0),
+_dataCallback(callback),
+_firstPacket(true),
+_lossRate(0.0f),
+_nackEnabled(false),
+_resendPacketCount(0),
+_noLossStartup(100),
+_endOfFile(false),
+_rttMs(0),
+_firstPacketRtpTime(0),
+_firstPacketTimeMs(0),
+_reorderBuffer(NULL),
+_reordering(false),
+_nextPacket(),
+_nextPacketLength(0),
+_randVec(),
+_randVecPos(0)
+{
+    _rtpFile = fopen(filename, "rb");
+    memset(_nextPacket, 0, sizeof(_nextPacket));
+}
+
+RTPPlayer::~RTPPlayer()
+{
+    RtpRtcp::DestroyRtpRtcp(&_rtpModule);
+    if (_rtpFile != NULL)
+    {
+        fclose(_rtpFile);
+    }
+    if (_reorderBuffer != NULL)
+    {
+        delete _reorderBuffer;
+        _reorderBuffer = NULL;
+    }
+}
+
+WebRtc_Word32 RTPPlayer::Initialize(const PayloadTypeList* payloadList)
+{
+    std::srand(321);
+    for (int i=0; i < RAND_VEC_LENGTH; i++)
+    {
+        _randVec[i] = rand();
+    }
+    _randVecPos = 0;
+    WebRtc_Word32 ret = _rtpModule.SetNACKStatus(kNackOff);
+    if (ret < 0)
+    {
+        return -1;
+    }
+    ret = _rtpModule.InitReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+
+    _rtpModule.InitSender();
+    _rtpModule.SetRTCPStatus(kRtcpNonCompound);
+    _rtpModule.SetTMMBRStatus(true);
+
+    ret = _rtpModule.RegisterIncomingDataCallback(_dataCallback);
+    if (ret < 0)
+    {
+        return -1;
+    }
+    // Register payload types
+    for (PayloadTypeList::const_iterator it = payloadList->begin();
+        it != payloadList->end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec videoCodec;
+            strncpy(videoCodec.plName, payloadType->name.c_str(), 32);
+            videoCodec.plType = payloadType->payloadType;
+            if (_rtpModule.RegisterReceivePayload(videoCodec) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+    if (ReadHeader() < 0)
+    {
+        return -1;
+    }
+    memset(_nextPacket, 0, sizeof(_nextPacket));
+    _nextPacketLength = ReadPacket(_nextPacket, &_nextRtpTime);
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::ReadHeader()
+{
+    char firstline[FIRSTLINELEN];
+    if (_rtpFile == NULL)
+    {
+        return -1;
+    }
+    EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, _rtpFile) != NULL);
+    if(strncmp(firstline,"#!rtpplay",9) == 0) {
+        if(strncmp(firstline,"#!rtpplay1.0",12) != 0){
+            printf("ERROR: wrong rtpplay version, must be 1.0\n");
+            return -1;
+        }
+    }
+    else if (strncmp(firstline,"#!RTPencode",11) == 0) {
+        if(strncmp(firstline,"#!RTPencode1.0",14) != 0){
+            printf("ERROR: wrong RTPencode version, must be 1.0\n");
+            return -1;
+        }
+    }
+    else {
+        printf("ERROR: wrong file format of input file\n");
+        return -1;
+    }
+
+    WebRtc_UWord32 start_sec;
+    WebRtc_UWord32 start_usec;
+    WebRtc_UWord32 source;
+    WebRtc_UWord16 port;
+    WebRtc_UWord16 padding;
+
+    EXPECT_GT(fread(&start_sec, 4, 1, _rtpFile), 0u);
+    start_sec=ntohl(start_sec);
+    EXPECT_GT(fread(&start_usec, 4, 1, _rtpFile), 0u);
+    start_usec=ntohl(start_usec);
+    EXPECT_GT(fread(&source, 4, 1, _rtpFile), 0u);
+    source=ntohl(source);
+    EXPECT_GT(fread(&port, 2, 1, _rtpFile), 0u);
+    port=ntohs(port);
+    EXPECT_GT(fread(&padding, 2, 1, _rtpFile), 0u);
+    padding=ntohs(padding);
+    return 0;
+}
+
+WebRtc_UWord32 RTPPlayer::TimeUntilNextPacket() const
+{
+    WebRtc_Word64 timeLeft = (_nextRtpTime - _firstPacketRtpTime) - (_clock->MillisecondTimestamp() - _firstPacketTimeMs);
+    if (timeLeft < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(timeLeft);
+}
+
+WebRtc_Word32 RTPPlayer::NextPacket(const WebRtc_Word64 timeNow)
+{
+    // Send any packets ready to be resent,
+    RawRtpPacket* resend_packet = _lostPackets.NextPacketToResend(timeNow);
+    while (resend_packet != NULL) {
+      const uint16_t seqNo = (resend_packet->data[2] << 8) +
+          resend_packet->data[3];
+      printf("Resend: %u\n", seqNo);
+      int ret = SendPacket(resend_packet->data, resend_packet->length);
+      delete resend_packet;
+      _resendPacketCount++;
+      if (ret > 0) {
+        _lostPackets.SetPacketResent(seqNo, _clock->MillisecondTimestamp());
+      } else if (ret < 0) {
+        return ret;
+      }
+      resend_packet = _lostPackets.NextPacketToResend(timeNow);
+    }
+
+    // Send any packets from rtp file
+    if (!_endOfFile && (TimeUntilNextPacket() == 0 || _firstPacket))
+    {
+        _rtpModule.Process();
+        if (_firstPacket)
+        {
+            _firstPacketRtpTime = static_cast<WebRtc_Word64>(_nextRtpTime);
+            _firstPacketTimeMs = _clock->MillisecondTimestamp();
+        }
+        if (_reordering && _reorderBuffer == NULL)
+        {
+            _reorderBuffer = new RawRtpPacket(reinterpret_cast<WebRtc_UWord8*>(_nextPacket), static_cast<WebRtc_UWord16>(_nextPacketLength));
+            return 0;
+        }
+        WebRtc_Word32 ret = SendPacket(reinterpret_cast<WebRtc_UWord8*>(_nextPacket), static_cast<WebRtc_UWord16>(_nextPacketLength));
+        if (_reordering && _reorderBuffer != NULL)
+        {
+            RawRtpPacket* rtpPacket = _reorderBuffer;
+            _reorderBuffer = NULL;
+            SendPacket(rtpPacket->data, rtpPacket->length);
+            delete rtpPacket;
+        }
+        _firstPacket = false;
+        if (ret < 0)
+        {
+            return ret;
+        }
+        _nextPacketLength = ReadPacket(_nextPacket, &_nextRtpTime);
+        if (_nextPacketLength < 0)
+        {
+            _endOfFile = true;
+            return 0;
+        }
+        else if (_nextPacketLength == 0)
+        {
+            return 0;
+        }
+    }
+    if (_endOfFile && _lostPackets.NumberOfPacketsToResend() == 0)
+    {
+        return 1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::SendPacket(WebRtc_UWord8* rtpData, WebRtc_UWord16 rtpLen)
+{
+    if ((_randVec[(_randVecPos++) % RAND_VEC_LENGTH] + 1.0)/(RAND_MAX + 1.0) < _lossRate &&
+        _noLossStartup < 0)
+    {
+        if (_nackEnabled)
+        {
+            const WebRtc_UWord16 seqNo = (rtpData[2] << 8) + rtpData[3];
+            printf("Throw: %u\n", seqNo);
+            _lostPackets.AddPacket(new RawRtpPacket(rtpData, rtpLen));
+            return 0;
+        }
+    }
+    else if (rtpLen > 0)
+    {
+        WebRtc_Word32 ret = _rtpModule.IncomingPacket(rtpData, rtpLen);
+        if (ret < 0)
+        {
+            return -1;
+        }
+    }
+    if (_noLossStartup >= 0)
+    {
+        _noLossStartup--;
+    }
+    return 1;
+}
+
+WebRtc_Word32 RTPPlayer::ReadPacket(WebRtc_Word16* rtpdata, WebRtc_UWord32* offset)
+{
+    WebRtc_UWord16 length, plen;
+
+    if (fread(&length,2,1,_rtpFile)==0)
+        return(-1);
+    length=ntohs(length);
+
+    if (fread(&plen,2,1,_rtpFile)==0)
+        return(-1);
+    plen=ntohs(plen);
+
+    if (fread(offset,4,1,_rtpFile)==0)
+        return(-1);
+    *offset=ntohl(*offset);
+
+    // Use length here because a plen of 0 specifies rtcp
+    length = (WebRtc_UWord16) (length - HDR_SIZE);
+    if (fread((unsigned short *) rtpdata,1,length,_rtpFile) != length)
+        return(-1);
+
+#ifdef JUNK_DATA
+    // destroy the RTP payload with random data
+    if (plen > 12) { // ensure that we have more than just a header
+        for ( int ix = 12; ix < plen; ix=ix+2 ) {
+            rtpdata[ix>>1] = (short) (rtpdata[ix>>1] + (short) rand());
+        }
+    }
+#endif
+    return plen;
+}
+
+WebRtc_Word32 RTPPlayer::SimulatePacketLoss(float lossRate, bool enableNack, WebRtc_UWord32 rttMs)
+{
+    _nackEnabled = enableNack;
+    _lossRate = lossRate;
+    _rttMs = rttMs;
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::SetReordering(bool enabled)
+{
+    _reordering = enabled;
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::ResendPackets(const WebRtc_UWord16* sequenceNumbers, WebRtc_UWord16 length)
+{
+    if (sequenceNumbers == NULL)
+    {
+        return 0;
+    }
+    for (int i=0; i < length; i++)
+    {
+        _lostPackets.SetResendTime(sequenceNumbers[i],
+                                   _clock->MillisecondTimestamp() + _rttMs,
+                                   _clock->MillisecondTimestamp());
+    }
+    return 0;
+}
+
+void RTPPlayer::Print() const
+{
+    printf("Resent packets: %u\n", _resendPacketCount);
+    _lostPackets.Print();
+}
diff --git a/trunk/src/modules/video_coding/main/test/rtp_player.h b/trunk/src/modules/video_coding/main/test/rtp_player.h
new file mode 100644
index 0000000..eac6ba8
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/rtp_player.h
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp.h"
+#include "critical_section_wrapper.h"
+#include "video_coding_defines.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+#include <stdio.h>
+#include <list>
+#include <string>
+
+#define HDR_SIZE 8 // rtpplay packet header size in bytes
+#define FIRSTLINELEN 40
+#define RAND_VEC_LENGTH 4096
+
+struct PayloadCodecTuple;
+
+struct RawRtpPacket
+{
+public:
+    RawRtpPacket(WebRtc_UWord8* rtp_data, WebRtc_UWord16 rtp_length);
+    ~RawRtpPacket();
+
+    uint8_t* data;
+    uint16_t length;
+    int64_t resend_time_ms;
+};
+
+typedef std::list<PayloadCodecTuple*> PayloadTypeList;
+typedef std::list<RawRtpPacket*> RtpPacketList;
+typedef RtpPacketList::iterator RtpPacketIterator;
+typedef RtpPacketList::const_iterator ConstRtpPacketIterator;
+
+class LostPackets {
+ public:
+  LostPackets();
+  ~LostPackets();
+
+  void AddPacket(RawRtpPacket* packet);
+  void SetResendTime(uint16_t sequenceNumber,
+                     int64_t resendTime,
+                     int64_t nowMs);
+  RawRtpPacket* NextPacketToResend(int64_t timeNow);
+  int NumberOfPacketsToResend() const;
+  void SetPacketResent(uint16_t seqNo, int64_t nowMs);
+  void Print() const;
+
+ private:
+  webrtc::CriticalSectionWrapper* crit_sect_;
+  int loss_count_;
+  FILE* debug_file_;
+  RtpPacketList packets_;
+};
+
+struct PayloadCodecTuple
+{
+    PayloadCodecTuple(WebRtc_UWord8 plType, std::string codecName, webrtc::VideoCodecType type) :
+        name(codecName), payloadType(plType), codecType(type) {};
+    const std::string name;
+    const WebRtc_UWord8 payloadType;
+    const webrtc::VideoCodecType codecType;
+};
+
+class RTPPlayer : public webrtc::VCMPacketRequestCallback
+{
+public:
+    RTPPlayer(const char* filename,
+              webrtc::RtpData* callback,
+              webrtc::TickTimeBase* clock);
+    virtual ~RTPPlayer();
+
+    WebRtc_Word32 Initialize(const PayloadTypeList* payloadList);
+    WebRtc_Word32 NextPacket(const WebRtc_Word64 timeNow);
+    WebRtc_UWord32 TimeUntilNextPacket() const;
+    WebRtc_Word32 SimulatePacketLoss(float lossRate, bool enableNack = false, WebRtc_UWord32 rttMs = 0);
+    WebRtc_Word32 SetReordering(bool enabled);
+    WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers, WebRtc_UWord16 length);
+    void Print() const;
+
+private:
+    WebRtc_Word32 SendPacket(WebRtc_UWord8* rtpData, WebRtc_UWord16 rtpLen);
+    WebRtc_Word32 ReadPacket(WebRtc_Word16* rtpdata, WebRtc_UWord32* offset);
+    WebRtc_Word32 ReadHeader();
+    webrtc::TickTimeBase* _clock;
+    FILE*              _rtpFile;
+    webrtc::RtpRtcp&   _rtpModule;
+    WebRtc_UWord32     _nextRtpTime;
+    webrtc::RtpData*   _dataCallback;
+    bool               _firstPacket;
+    float              _lossRate;
+    bool               _nackEnabled;
+    LostPackets        _lostPackets;
+    WebRtc_UWord32     _resendPacketCount;
+    WebRtc_Word32      _noLossStartup;
+    bool               _endOfFile;
+    WebRtc_UWord32     _rttMs;
+    WebRtc_Word64      _firstPacketRtpTime;
+    WebRtc_Word64      _firstPacketTimeMs;
+    RawRtpPacket*      _reorderBuffer;
+    bool               _reordering;
+    WebRtc_Word16      _nextPacket[8000];
+    WebRtc_Word32      _nextPacketLength;
+    int                _randVec[RAND_VEC_LENGTH];
+    int                _randVecPos;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
diff --git a/trunk/src/modules/video_coding/main/test/subfigure.m b/trunk/src/modules/video_coding/main/test/subfigure.m
new file mode 100644
index 0000000..eadfcb6
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/subfigure.m
@@ -0,0 +1,30 @@
+function H = subfigure(m, n, p)
+%
+% H = SUBFIGURE(m, n, p)
+%
+% Create a new figure window and adjust position and size such that it will
+% become the p-th tile in an m-by-n matrix of windows. (The interpretation of
+% m, n, and p is the same as for SUBPLOT.
+%
+% Henrik Lundin, 2009-01-19
+%
+
+
+h = figure;
+
+[j, i] = ind2sub([n m], p);
+scrsz = get(0,'ScreenSize'); % get screen size
+%scrsz = [1, 1, 1600, 1200];
+
+taskbarSize = 58;
+windowbarSize = 68;
+windowBorder = 4;
+
+scrsz(2) = scrsz(2) + taskbarSize;
+scrsz(4) = scrsz(4) - taskbarSize;
+
+set(h, 'position', [(j-1)/n * scrsz(3) + scrsz(1) + windowBorder,...
+        (m-i)/m * scrsz(4) + scrsz(2) + windowBorder, ...
+        scrsz(3)/n - (windowBorder + windowBorder),...
+        scrsz(4)/m - (windowbarSize + windowBorder + windowBorder)]);
+
diff --git a/trunk/src/modules/video_coding/main/test/test_callbacks.cc b/trunk/src/modules/video_coding/main/test/test_callbacks.cc
new file mode 100644
index 0000000..1ec0e59
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/test_callbacks.cc
@@ -0,0 +1,509 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test_callbacks.h"
+
+#include <cmath>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_dump.h"
+#include "test_macros.h"
+
+namespace webrtc {
+
+/******************************
+ *  VCMEncodeCompleteCallback
+ *****************************/
+// Basic callback implementation
+// passes the encoded frame directly to the encoder
+// Packetization callback implementation
+VCMEncodeCompleteCallback::VCMEncodeCompleteCallback(FILE* encodedFile):
+    _encodedFile(encodedFile),
+    _encodedBytes(0),
+    _VCMReceiver(NULL),
+    _seqNo(0),
+    _encodeComplete(false),
+    _width(0),
+    _height(0),
+    _codecType(kRTPVideoNoVideo)
+{
+    //
+}
+VCMEncodeCompleteCallback::~VCMEncodeCompleteCallback()
+{
+}
+
+void
+VCMEncodeCompleteCallback::RegisterTransportCallback(
+                                            VCMPacketizationCallback* transport)
+{
+}
+
+WebRtc_Word32
+VCMEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& fragmentationHeader,
+        const RTPVideoHeader* videoHdr)
+{
+    // will call the VCMReceiver input packet
+    _frameType = frameType;
+    // writing encodedData into file
+    fwrite(payloadData, 1, payloadSize, _encodedFile);
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true; // end of frame
+    rtpInfo.type.Video.isFirstPacket = true;
+    rtpInfo.type.Video.codec = _codecType;
+    rtpInfo.type.Video.height = (WebRtc_UWord16)_height;
+    rtpInfo.type.Video.width = (WebRtc_UWord16)_width;
+    switch (_codecType)
+    {
+    case webrtc::kRTPVideoVP8:
+        rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+        rtpInfo.type.Video.codecHeader.VP8.nonReference =
+            videoHdr->codecHeader.VP8.nonReference;
+        rtpInfo.type.Video.codecHeader.VP8.pictureId =
+            videoHdr->codecHeader.VP8.pictureId;
+        break;
+    case webrtc::kRTPVideoI420:
+        break;
+    default:
+        assert(false);
+        return -1;
+    }
+
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = timeStamp;
+    rtpInfo.frameType = frameType;
+    // Size should also be received from that table, since the payload type
+    // defines the size.
+
+    _encodedBytes += payloadSize;
+    // directly to receiver
+    int ret = _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+    _encodeComplete = true;
+
+    return ret;
+}
+
+float
+VCMEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+bool
+VCMEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+void
+VCMEncodeCompleteCallback::Initialize()
+{
+    _encodeComplete = false;
+    _encodedBytes = 0;
+    _seqNo = 0;
+    return;
+}
+
+void
+VCMEncodeCompleteCallback::ResetByteCount()
+{
+    _encodedBytes = 0;
+}
+
+/***********************************/
+/*   VCMRTPEncodeCompleteCallback  */
+/***********************************/
+// Encode Complete callback implementation
+// passes the encoded frame via the RTP module to the decoder
+// Packetization callback implementation
+
+WebRtc_Word32
+VCMRTPEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& fragmentationHeader,
+        const RTPVideoHeader* videoHdr)
+{
+    _frameType = frameType;
+    _encodedBytes+= payloadSize;
+    _encodeComplete = true;
+    return _RTPModule->SendOutgoingData(frameType,
+                                        payloadType,
+                                        timeStamp,
+                                        payloadData,
+                                        payloadSize,
+                                        &fragmentationHeader,
+                                        videoHdr);
+}
+
+float
+VCMRTPEncodeCompleteCallback::EncodedBytes()
+{
+    // only good for one call  - after which will reset value;
+    float tmp = _encodedBytes;
+    _encodedBytes = 0;
+    return tmp;
+ }
+
+bool
+VCMRTPEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+// Decoded Frame Callback Implementation
+
+WebRtc_Word32
+VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame)
+{
+    fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile);
+    _decodedBytes+= videoFrame.Length();
+    return VCM_OK;
+ }
+
+WebRtc_Word32
+VCMDecodeCompleteCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+RTPSendCompleteCallback::RTPSendCompleteCallback(RtpRtcp* rtp,
+                                                 TickTimeBase* clock,
+                                                 const char* filename):
+    _clock(clock),
+    _sendCount(0),
+    _rtp(rtp),
+    _lossPct(0),
+    _burstLength(0),
+    _networkDelayMs(0),
+    _jitterVar(0),
+    _prevLossState(0),
+    _totalSentLength(0),
+    _rtpPackets(),
+    _rtpDump(NULL)
+{
+    if (filename != NULL)
+    {
+        _rtpDump = RtpDump::CreateRtpDump();
+        _rtpDump->Start(filename);
+    }
+}
+
+RTPSendCompleteCallback::~RTPSendCompleteCallback()
+{
+    if (_rtpDump != NULL)
+    {
+        _rtpDump->Stop();
+        RtpDump::DestroyRtpDump(_rtpDump);
+    }
+    // Delete remaining packets
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        delete _rtpPackets.front();
+        _rtpPackets.pop_front();
+    }
+}
+
+int
+RTPSendCompleteCallback::SendPacket(int channel, const void *data, int len)
+{
+    _sendCount++;
+    _totalSentLength += len;
+
+    if (_rtpDump != NULL)
+    {
+        if (_rtpDump->DumpPacket((const WebRtc_UWord8*)data, len) != 0)
+        {
+            return -1;
+        }
+    }
+
+    bool transmitPacket = true;
+    transmitPacket = PacketLoss();
+
+    WebRtc_UWord64 now = _clock->MillisecondTimestamp();
+    // Insert outgoing packet into list
+    if (transmitPacket)
+    {
+        RtpPacket* newPacket = new RtpPacket();
+        memcpy(newPacket->data, data, len);
+        newPacket->length = len;
+        // Simulate receive time = network delay + packet jitter
+        // simulated as a Normal distribution random variable with
+        // mean = networkDelay and variance = jitterVar
+        WebRtc_Word32
+        simulatedDelay = (WebRtc_Word32)NormalDist(_networkDelayMs,
+                                                   sqrt(_jitterVar));
+        newPacket->receiveTime = now + simulatedDelay;
+        _rtpPackets.push_back(newPacket);
+    }
+
+    // Are we ready to send packets to the receiver?
+    RtpPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - now;
+        if (timeToReceive > 0)
+        {
+            // No available packets to send
+            break;
+        }
+
+        _rtpPackets.pop_front();
+        // Send to receive side
+        if (_rtp->IncomingPacket((const WebRtc_UWord8*)packet->data,
+                                 packet->length) < 0)
+        {
+            delete packet;
+            packet = NULL;
+            // Will return an error after the first packet that goes wrong
+            return -1;
+        }
+        delete packet;
+        packet = NULL;
+    }
+    return len; // OK
+}
+
+int
+RTPSendCompleteCallback::SendRTCPPacket(int channel, const void *data, int len)
+{
+    // Incorporate network conditions
+    return SendPacket(channel, data, len);
+}
+
+void
+RTPSendCompleteCallback::SetLossPct(double lossPct)
+{
+    _lossPct = lossPct;
+    return;
+}
+
+void
+RTPSendCompleteCallback::SetBurstLength(double burstLength)
+{
+    _burstLength = burstLength;
+    return;
+}
+
+bool
+RTPSendCompleteCallback::PacketLoss()
+{
+    bool transmitPacket = true;
+    if (_burstLength <= 1.0)
+    {
+        // Random loss: if _burstLength parameter is not set, or <=1
+        if (UnifomLoss(_lossPct))
+        {
+            // drop
+            transmitPacket = false;
+        }
+    }
+    else
+    {
+        // Simulate bursty channel (Gilbert model)
+        // (1st order) Markov chain model with memory of the previous/last
+        // packet state (loss or received)
+
+        // 0 = received state
+        // 1 = loss state
+
+        // probTrans10: if previous packet is lost, prob. to -> received state
+        // probTrans11: if previous packet is lost, prob. to -> loss state
+
+        // probTrans01: if previous packet is received, prob. to -> loss state
+        // probTrans00: if previous packet is received, prob. to -> received
+
+        // Map the two channel parameters (average loss rate and burst length)
+        // to the transition probabilities:
+        double probTrans10 = 100 * (1.0 / _burstLength);
+        double probTrans11 = (100.0 - probTrans10);
+        double probTrans01 = (probTrans10 * ( _lossPct / (100.0 - _lossPct)));
+
+        // Note: Random loss (Bernoulli) model is a special case where:
+        // burstLength = 100.0 / (100.0 - _lossPct) (i.e., p10 + p01 = 100)
+
+        if (_prevLossState == 0 )
+        {
+            // previous packet was received
+            if (UnifomLoss(probTrans01))
+            {
+                // drop, update previous state to loss
+                _prevLossState = 1;
+                transmitPacket = false;
+            }
+        }
+        else if (_prevLossState == 1)
+        {
+            _prevLossState = 0;
+            // previous packet was lost
+            if (UnifomLoss(probTrans11))
+            {
+                // drop, update previous state to loss
+                _prevLossState = 1;
+                transmitPacket = false;
+             }
+        }
+    }
+    return transmitPacket;
+}
+
+
+bool
+RTPSendCompleteCallback::UnifomLoss(double lossPct)
+{
+    double randVal = (std::rand() + 1.0)/(RAND_MAX + 1.0);
+    return randVal < lossPct/100;
+}
+
+WebRtc_Word32
+PacketRequester::ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+                               WebRtc_UWord16 length)
+{
+    return _rtp.SendNACK(sequenceNumbers, length);
+}
+
+WebRtc_Word32
+SendStatsTest::SendStatistics(const WebRtc_UWord32 bitRate,
+                              const WebRtc_UWord32 frameRate)
+{
+    TEST(frameRate <= _frameRate);
+    TEST(bitRate > 0 && bitRate < 100000);
+    printf("VCM 1 sec: Bit rate: %u\tFrame rate: %u\n", bitRate, frameRate);
+    return 0;
+}
+
+WebRtc_Word32 KeyFrameReqTest::RequestKeyFrame() {
+  printf("Key frame requested\n");
+  return 0;
+}
+
+
+VideoProtectionCallback::VideoProtectionCallback():
+_deltaFECRate(0),
+_keyFECRate(0),
+_deltaUseUepProtection(0),
+_keyUseUepProtection(0),
+_nack(kNackOff)
+{
+    //
+}
+
+VideoProtectionCallback::~VideoProtectionCallback()
+{
+    //
+}
+
+WebRtc_Word32
+VideoProtectionCallback::ProtectionRequest(WebRtc_UWord8 deltaFECRate,
+                                           WebRtc_UWord8 keyFECRate,
+                                           bool deltaUseUepProtection,
+                                           bool keyUseUepProtection,
+                                           bool nack_enabled,
+                                           WebRtc_UWord32* sent_video_rate_bps,
+                                           WebRtc_UWord32* sent_nack_rate_bps,
+                                           WebRtc_UWord32* sent_fec_rate_bps)
+{
+    _deltaFECRate = deltaFECRate;
+    _keyFECRate = keyFECRate;
+    _deltaUseUepProtection = deltaUseUepProtection;
+    _keyUseUepProtection = keyUseUepProtection;
+    if (nack_enabled)
+    {
+        _nack = kNackRtcp;
+    }
+    else
+    {
+        _nack = kNackOff;
+    }
+
+    // Update RTP
+    if (_rtp->SetFECCodeRate(keyFECRate, deltaFECRate) != 0)
+    {
+        printf("Error in Setting FEC rate\n");
+        return -1;
+
+    }
+    if (_rtp->SetFECUepProtection(keyUseUepProtection,
+                                  deltaUseUepProtection) != 0)
+    {
+        printf("Error in Setting FEC UEP protection\n");
+        return -1;
+    }
+    return 0;
+
+}
+NACKMethod
+VideoProtectionCallback::NACKMethod()
+{
+    return _nack;
+}
+
+WebRtc_UWord8
+VideoProtectionCallback::FECDeltaRate()
+{
+    return _deltaFECRate;
+}
+
+WebRtc_UWord8
+VideoProtectionCallback::FECKeyRate()
+{
+    return _keyFECRate;
+}
+
+bool
+VideoProtectionCallback::FECDeltaUepProtection()
+{
+    return _deltaUseUepProtection;
+}
+
+bool
+VideoProtectionCallback::FECKeyUepProtection()
+{
+    return _keyUseUepProtection;
+}
+
+void
+RTPFeedbackCallback::OnNetworkChanged(const WebRtc_Word32 id,
+                                      const WebRtc_UWord32 bitrateBps,
+                                      const WebRtc_UWord8 fractionLost,
+                                      const WebRtc_UWord16 roundTripTimeMs)
+{
+
+    _vcm->SetChannelParameters(bitrateBps / 1000, fractionLost,
+                               (WebRtc_UWord8)roundTripTimeMs);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_coding/main/test/test_callbacks.h b/trunk/src/modules/video_coding/main/test/test_callbacks.h
new file mode 100644
index 0000000..07820bb
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/test_callbacks.h
@@ -0,0 +1,279 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_CALLBACKS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_CALLBACKS_H_
+
+/*
+ * Declaration of general callbacks that are used throughout VCM's offline tests
+ */
+
+
+#include <string.h>
+#include <cstdlib>
+#include <fstream>
+#include <list>
+
+#include "module_common_types.h"
+#include "rtp_rtcp.h"
+#include "test_util.h"
+#include "trace.h"
+#include "video_coding.h"
+
+namespace webrtc
+{
+class RtpDump;
+
+// Send Side - Packetization callback - send an encoded frame to the VCMReceiver
+class VCMEncodeCompleteCallback: public VCMPacketizationCallback
+{
+public:
+    // Constructor input: file in which encoded data will be written
+    VCMEncodeCompleteCallback(FILE* encodedFile);
+    virtual ~VCMEncodeCompleteCallback();
+    // Register transport callback
+    void RegisterTransportCallback(VCMPacketizationCallback* transport);
+    // Process encoded data received from the encoder, pass stream to the
+    // VCMReceiver module
+    WebRtc_Word32 SendData(const FrameType frameType,
+            const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+            const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
+            const RTPFragmentationHeader& fragmentationHeader,
+            const RTPVideoHeader* videoHdr);
+    // Register exisitng VCM. Currently - encode and decode under same module.
+    void RegisterReceiverVCM(VideoCodingModule *vcm) {_VCMReceiver = vcm;}
+    // Return size of last encoded frame data (all frames in the sequence)
+    // Good for only one call - after which will reset value
+    // (to allow detection of frame drop)
+    float EncodedBytes();
+    // Return encode complete (true/false)
+    bool EncodeComplete();
+    // Inform callback of codec used
+    void SetCodecType(RTPVideoCodecTypes codecType)
+    {_codecType = codecType;}
+    // Inform callback of frame dimensions
+    void SetFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height)
+    {
+        _width = width;
+        _height = height;
+    }
+    // Initialize callback data
+    void Initialize();
+    void ResetByteCount();
+
+    // Conversion function for payload type (needed for the callback function)
+
+private:
+    FILE*              _encodedFile;
+    float              _encodedBytes;
+    VideoCodingModule* _VCMReceiver;
+    FrameType          _frameType;
+    WebRtc_UWord8*     _payloadData;
+    WebRtc_UWord16     _seqNo;
+    bool               _encodeComplete;
+    WebRtc_Word32      _width;
+    WebRtc_Word32      _height;
+    RTPVideoCodecTypes _codecType;
+
+}; // end of VCMEncodeCompleteCallback
+
+// Send Side - Packetization callback - packetize an encoded frame via the
+// RTP module
+class VCMRTPEncodeCompleteCallback: public VCMPacketizationCallback
+{
+public:
+    VCMRTPEncodeCompleteCallback(RtpRtcp* rtp) :
+        _encodedBytes(0),
+        _seqNo(0),
+        _encodeComplete(false),
+        _RTPModule(rtp) {}
+
+    virtual ~VCMRTPEncodeCompleteCallback() {}
+    // Process encoded data received from the encoder, pass stream to the
+    // RTP module
+    WebRtc_Word32 SendData(const FrameType frameType,
+            const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+            const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
+            const RTPFragmentationHeader& fragmentationHeader,
+            const RTPVideoHeader* videoHdr);
+    // Return size of last encoded frame. Value good for one call
+    // (resets to zero after call to inform test of frame drop)
+    float EncodedBytes();
+    // Return encode complete (true/false)
+    bool EncodeComplete();
+    // Inform callback of codec used
+    void SetCodecType(RTPVideoCodecTypes codecType)
+    {_codecType = codecType;}
+
+    // Inform callback of frame dimensions
+    void SetFrameDimensions(WebRtc_Word16 width, WebRtc_Word16 height)
+    {
+        _width = width;
+        _height = height;
+    }
+
+private:
+    float              _encodedBytes;
+    FrameType          _frameType;
+    WebRtc_UWord8*     _payloadData;
+    WebRtc_UWord16     _seqNo;
+    bool               _encodeComplete;
+    RtpRtcp*           _RTPModule;
+    WebRtc_Word16      _width;
+    WebRtc_Word16      _height;
+    RTPVideoCodecTypes _codecType;
+}; // end of VCMEncodeCompleteCallback
+
+// Decode Complete callback
+// Writes the decoded frames to a given file.
+class VCMDecodeCompleteCallback: public VCMReceiveCallback
+{
+public:
+    VCMDecodeCompleteCallback(FILE* decodedFile) :
+        _decodedFile(decodedFile), _decodedBytes(0) {}
+    virtual ~VCMDecodeCompleteCallback() {}
+    // Write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+private:
+    FILE*               _decodedFile;
+    WebRtc_UWord32      _decodedBytes;
+}; // end of VCMDecodeCompleCallback class
+
+// Transport callback
+// Called by the RTP Sender - simulates sending packets through a network to the
+// RTP receiver. User can set network conditions as: RTT, packet loss,
+// burst length and jitter.
+class RTPSendCompleteCallback: public Transport
+{
+public:
+    // Constructor input: (receive side) rtp module to send encoded data to
+    RTPSendCompleteCallback(RtpRtcp* rtp, TickTimeBase* clock,
+                            const char* filename = NULL);
+    virtual ~RTPSendCompleteCallback();
+    // Send Packet to receive side RTP module
+    virtual int SendPacket(int channel, const void *data, int len);
+    // Send RTCP Packet to receive side RTP module
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+    // Set percentage of channel loss in the network
+    void SetLossPct(double lossPct);
+    // Set average size of burst loss
+    void SetBurstLength(double burstLength);
+    // Set network delay in the network
+    void SetNetworkDelay(WebRtc_UWord32 networkDelayMs)
+                        {_networkDelayMs = networkDelayMs;};
+    // Set Packet jitter delay
+    void SetJitterVar(WebRtc_UWord32 jitterVar)
+                      {_jitterVar = jitterVar;};
+    // Return send count
+    int SendCount() {return _sendCount; }
+    // Return accumulated length in bytes of transmitted packets
+    WebRtc_UWord32 TotalSentLength() {return _totalSentLength;}
+protected:
+    // Randomly decide whether to drop packets, based on the channel model
+    bool PacketLoss();
+    // Random uniform loss model
+    bool UnifomLoss(double lossPct);
+
+    TickTimeBase*           _clock;
+    WebRtc_UWord32          _sendCount;
+    RtpRtcp*                _rtp;
+    double                  _lossPct;
+    double                  _burstLength;
+    WebRtc_UWord32          _networkDelayMs;
+    double                  _jitterVar;
+    bool                    _prevLossState;
+    WebRtc_UWord32          _totalSentLength;
+    std::list<RtpPacket*>   _rtpPackets;
+    RtpDump*                _rtpDump;
+};
+
+// Request re-transmission of packets (NACK)
+class PacketRequester: public VCMPacketRequestCallback
+{
+public:
+    PacketRequester(RtpRtcp& rtp) :
+        _rtp(rtp) {}
+    WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+            WebRtc_UWord16 length);
+private:
+    webrtc::RtpRtcp& _rtp;
+};
+
+// Key frame request
+class KeyFrameReqTest: public VCMFrameTypeCallback
+{
+public:
+    WebRtc_Word32 RequestKeyFrame();
+};
+
+
+// VCM statistics
+class SendStatsTest: public webrtc::VCMSendStatisticsCallback
+{
+public:
+    SendStatsTest() : _frameRate(15) {}
+    WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
+            const WebRtc_UWord32 frameRate);
+    void SetTargetFrameRate(WebRtc_UWord32 frameRate) {_frameRate = frameRate;}
+private:
+    WebRtc_UWord32 _frameRate;
+};
+
+// Protection callback - allows the VCM (media optimization) to inform the RTP
+// module of the required protection(FEC rates/settings and NACK mode).
+class VideoProtectionCallback: public VCMProtectionCallback
+{
+public:
+    VideoProtectionCallback();
+    virtual ~VideoProtectionCallback();
+    void RegisterRtpModule(RtpRtcp* rtp) {_rtp = rtp;}
+    WebRtc_Word32 ProtectionRequest(WebRtc_UWord8 deltaFECRate,
+                                    WebRtc_UWord8 keyFECRate,
+                                    bool deltaUseUepProtection,
+                                    bool keyUseUepProtection,
+                                    bool nack_enabled,
+                                    WebRtc_UWord32* sent_video_rate_bps,
+                                    WebRtc_UWord32* sent_nack_rate_bps,
+                                    WebRtc_UWord32* sent_fec_rate_bps);
+    enum NACKMethod   NACKMethod();
+    WebRtc_UWord8     FECDeltaRate();
+    WebRtc_UWord8     FECKeyRate();
+    bool              FECDeltaUepProtection();
+    bool              FECKeyUepProtection();
+private:
+    RtpRtcp*             _rtp;
+    WebRtc_UWord8        _deltaFECRate;
+    WebRtc_UWord8        _keyFECRate;
+    bool                 _deltaUseUepProtection;
+    bool                 _keyUseUepProtection;
+    enum NACKMethod      _nack;
+};
+
+// Feed back from the RTP Module callback
+class RTPFeedbackCallback : public RtpVideoFeedback {
+ public:
+  RTPFeedbackCallback(VideoCodingModule* vcm) {_vcm = vcm;};
+  void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+                                   const FrameType type,
+                                   const WebRtc_UWord8 streamIdx) {};
+
+   void OnNetworkChanged(const WebRtc_Word32 id,
+                         const WebRtc_UWord32 bitrateBps,
+                         const WebRtc_UWord8 fractionLost,
+                         const WebRtc_UWord16 roundTripTimeMs);
+
+ private:
+  VideoCodingModule* _vcm;
+};
+
+}  // namespace webrtc
+
+#endif
diff --git a/trunk/src/modules/video_coding/main/test/test_macros.h b/trunk/src/modules/video_coding/main/test/test_macros.h
new file mode 100644
index 0000000..31693b5
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/test_macros.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VCM_TEST_MACROS_H
+#define VCM_TEST_MACROS_H
+
+#include <cstdio>
+#include <cstdlib>
+
+extern int vcmMacrosTests;
+extern int vcmMacrosErrors;
+
+#define PRINT_ERR_MSG(msg)                              \
+    do {                                                \
+        fprintf(stderr, "Error at line %i of %s\n%s",   \
+            __LINE__, __FILE__, msg);                   \
+    } while(0)
+
+#define TEST(expr)                                              \
+    do {                                                        \
+        vcmMacrosTests++;                                       \
+        if (!(expr)) {                                          \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\n\n");   \
+            vcmMacrosErrors++;                                  \
+        }                                                       \
+    } while(0)
+
+#define TEST_EXIT_ON_FAIL(expr)                                             \
+    do {                                                                    \
+        vcmMacrosTests++;                                                   \
+        if (!(expr)) {                                                      \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\nExiting...\n\n");   \
+            vcmMacrosErrors++;                                              \
+            exit(EXIT_FAILURE);                                             \
+        }                                                                   \
+    } while(0)
+
+#endif
diff --git a/trunk/src/modules/video_coding/main/test/test_util.cc b/trunk/src/modules/video_coding/main/test/test_util.cc
new file mode 100644
index 0000000..1f89168
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/test_util.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test_util.h"
+#include "test_macros.h"
+#include "rtp_dump.h"
+#include <cmath>
+
+using namespace webrtc;
+
+// Normal Distribution
+#define PI  3.14159265
+double
+NormalDist(double mean, double stdDev)
+{
+    // Creating a Normal distribution variable from two independent uniform
+    // variables based on the Box-Muller transform
+    double uniform1 = (std::rand() + 1.0) / (RAND_MAX + 1.0);
+    double uniform2 = (std::rand() + 1.0) / (RAND_MAX + 1.0);
+    return (mean + stdDev * sqrt(-2 * log(uniform1)) * cos(2 * PI * uniform2));
+}
+
+RTPVideoCodecTypes
+ConvertCodecType(const char* plname)
+{
+    if (strncmp(plname,"VP8" , 3) == 0)
+    {
+        return kRTPVideoVP8;
+    }
+    else if (strncmp(plname,"I420" , 5) == 0)
+    {
+        return kRTPVideoI420;
+    }
+    else
+    {
+        return kRTPVideoNoVideo; // Default value
+    }
+}
+
diff --git a/trunk/src/modules/video_coding/main/test/test_util.h b/trunk/src/modules/video_coding/main/test/test_util.h
new file mode 100644
index 0000000..d705434
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/test_util.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+
+/*
+ * General declarations used through out VCM offline tests.
+ */
+
+#include <string.h>
+#include <fstream>
+#include <cstdlib>
+
+#include "module_common_types.h"
+#include "testsupport/fileutils.h"
+
+// Class used for passing command line arguments to tests
+class CmdArgs
+{
+ public:
+  CmdArgs()
+      : codecName("VP8"),
+        codecType(webrtc::kVideoCodecVP8),
+        width(352),
+        height(288),
+        bitRate(500),
+        frameRate(30),
+        packetLoss(0),
+        rtt(0),
+        protectionMode(0),
+        camaEnable(0),
+        inputFile(webrtc::test::ProjectRootPath() +
+                  "/resources/foreman_cif.yuv"),
+        outputFile(webrtc::test::OutputPath() +
+                   "video_coding_test_output_352x288.yuv"),
+        testNum(11) {}
+     std::string codecName;
+     webrtc::VideoCodecType codecType;
+     int width;
+     int height;
+     int bitRate;
+     int frameRate;
+     int packetLoss;
+     int rtt;
+     int protectionMode;
+     int camaEnable;
+     std::string inputFile;
+     std::string outputFile;
+     int testNum;
+};
+
+// forward declaration
+int MTRxTxTest(CmdArgs& args);
+double NormalDist(double mean, double stdDev);
+
+struct RtpPacket {
+  WebRtc_Word8 data[1650]; // max packet size
+  WebRtc_Word32 length;
+  WebRtc_Word64 receiveTime;
+};
+
+
+// Codec type conversion
+webrtc::RTPVideoCodecTypes
+ConvertCodecType(const char* plname);
+
+#endif
diff --git a/trunk/src/modules/video_coding/main/test/tester_main.cc b/trunk/src/modules/video_coding/main/test/tester_main.cc
new file mode 100644
index 0000000..e5d7cd3
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/tester_main.cc
@@ -0,0 +1,208 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "normal_test.h"
+#include "codec_database_test.h"
+#include "generic_codec_test.h"
+#include "../source/event.h"
+#include "media_opt_test.h"
+#include "quality_modes_test.h"
+#include "test_util.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _WIN32
+//#include "vld.h"
+#endif
+
+using namespace webrtc;
+
+/*
+ * Build with EVENT_DEBUG defined
+ * to build the tests with simulated events.
+ */
+
+int vcmMacrosTests = 0;
+int vcmMacrosErrors = 0;
+
+int ParseArguments(int argc, char **argv, CmdArgs& args)
+{
+    int i = 1;
+    while (i < argc)
+    {
+        if (argv[i][0] != '-')
+        {
+            return -1;
+        }
+        switch (argv[i][1])
+        {
+        case 'w':
+        {
+            int w = atoi(argv[i+1]);
+            if (w < 1)
+                return -1;
+            args.width = w;
+            break;
+        }
+        case 'h':
+        {
+            int h = atoi(argv[i+1]);
+            if (h < 1)
+                return -1;
+            args.height = h;
+            break;
+        }
+        case 'b':
+        {
+            int b = atoi(argv[i+1]);
+            if (b < 1)
+                return -1;
+            args.bitRate = b;
+            break;
+        }
+        case 'f':
+        {
+            int f = atoi(argv[i+1]);
+            if (f < 1)
+                return -1;
+            args.frameRate = f;
+            break;
+        }
+        case 'c':
+        {
+            // TODO(holmer): This should be replaced with a map if more codecs
+            // are added
+            args.codecName = argv[i+1];
+            if (strncmp(argv[i+1], "VP8", 3) == 0)
+            {
+                args.codecType = kVideoCodecVP8;
+            }
+            else if (strncmp(argv[i+1], "I420", 4) == 0)
+            {
+                args.codecType = kVideoCodecI420;
+            }
+            else
+                return -1;
+
+            break;
+        }
+        case 'i':
+        {
+            args.inputFile = argv[i+1];
+            break;
+        }
+        case 'o':
+            args.outputFile = argv[i+1];
+            break;
+        case 'n':
+        {
+            int n = atoi(argv[i+1]);
+            if (n < 1)
+                return -1;
+            args.testNum = n;
+            break;
+        }
+        case 'p':
+        {
+            args.packetLoss = atoi(argv[i+1]);
+            break;
+        }
+        case 'r':
+        {
+            args.rtt = atoi(argv[i+1]);
+            break;
+        }
+        case 'm':
+        {
+            args.protectionMode = atoi(argv[i+1]);
+            break;
+        }
+        case 'e':
+        {
+            args.camaEnable = atoi(argv[i+1]);
+            break;
+        }
+        default:
+            return -1;
+        }
+        i += 2;
+    }
+    return 0;
+}
+
+int main(int argc, char **argv)
+{
+    CmdArgs args;
+
+    if (ParseArguments(argc, argv, args) != 0)
+    {
+        printf("Unable to parse input arguments\n");
+        printf("args: -n <test #> -w <width> -h <height> -f <fps> -b <bps> "
+               "-c <codec>  -i <input file> -o <output file> -p <packet loss> "
+               "-r <round-trip-time> -e <cama enable> -m <protection mode> \n");
+        return -1;
+    }
+
+    int ret = 0;
+    switch (args.testNum)
+    {
+    case 1:
+        ret = NormalTest::RunTest(args);
+        break;
+    case 2:
+        ret = MTRxTxTest(args);
+        break;
+    case 3:
+        ret = GenericCodecTest::RunTest(args);
+        break;
+    case 4:
+        ret = CodecDataBaseTest::RunTest(args);
+        break;
+    case 5:
+        // 0- normal, 1-Release test(50 runs) 2- from file
+        ret = MediaOptTest::RunTest(0, args);
+        break;
+    case 6:
+        ret = ReceiverTimingTests(args);
+        break;
+    case 7:
+        ret = RtpPlay(args);
+        break;
+    case 8:
+        ret = RtpPlayMT(args);
+        break;
+    case 9:
+        ret = JitterBufferTest(args);
+        break;
+    case 10:
+        ret = DecodeFromStorageTest(args);
+        break;
+    case 11:
+        ret = NormalTest::RunTest(args);
+        ret |= CodecDataBaseTest::RunTest(args);
+        ret |= ReceiverTimingTests(args);
+        ret |= JitterBufferTest(args);
+        break;
+    default:
+        ret = -1;
+        break;
+    }
+    if (ret != 0)
+    {
+        printf("Test failed!\n");
+        return -1;
+    }
+    return 0;
+}
+
+
+
diff --git a/trunk/src/modules/video_coding/main/test/video_rtp_play.cc b/trunk/src/modules/video_coding/main/test/video_rtp_play.cc
new file mode 100644
index 0000000..577408f
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/video_rtp_play.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "../source/internal_defines.h"
+#include "test_macros.h"
+#include "rtp_player.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+#include <stdio.h>
+#include <string.h>
+
+using namespace webrtc;
+
+WebRtc_Word32
+RtpDataCallback::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadSize,
+                                          const WebRtcRTPHeader* rtpHeader)
+{
+    return _vcm->IncomingPacket(payloadData, payloadSize, *rtpHeader);
+}
+
+FrameReceiveCallback::~FrameReceiveCallback()
+{
+    if (_timingFile != NULL)
+    {
+        fclose(_timingFile);
+    }
+    if (_outFile != NULL)
+    {
+        fclose(_outFile);
+    }
+}
+
+WebRtc_Word32
+FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
+{
+    if (_timingFile == NULL)
+    {
+        _timingFile = fopen((test::OutputPath() + "renderTiming.txt").c_str(),
+                            "w");
+        if (_timingFile == NULL)
+        {
+            return -1;
+        }
+    }
+    if (_outFile == NULL)
+    {
+        _outFile = fopen(_outFilename.c_str(), "wb");
+        if (_outFile == NULL)
+        {
+            return -1;
+        }
+    }
+    fprintf(_timingFile, "%u, %u\n",
+            videoFrame.TimeStamp(),
+            MaskWord64ToUWord32(videoFrame.RenderTimeMs()));
+    fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _outFile);
+    return 0;
+}
+
+int RtpPlay(CmdArgs& args)
+{
+    // Make sure this test isn't executed without simulated events.
+#if !defined(EVENT_DEBUG)
+    return -1;
+#endif
+    // BEGIN Settings
+
+    bool protectionEnabled = false;
+    VCMVideoProtection protectionMethod = kProtectionNack;
+    WebRtc_UWord32 rttMS = 0;
+    float lossRate = 0.0f;
+    bool reordering = false;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = -1;
+    std::string outFile = args.outputFile;
+    if (outFile == "")
+        outFile = test::OutputPath() + "RtpPlay_decoded.yuv";
+    FrameReceiveCallback receiveCallback(outFile);
+    FakeTickTime clock(0);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    RtpDataCallback dataCallback(vcm);
+    RTPPlayer rtpStream(args.inputFile.c_str(), &dataCallback, &clock);
+
+
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "receiverTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    // END Settings
+
+    // Set up
+
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    vcm->RegisterReceiveCallback(&receiveCallback);
+    vcm->RegisterPacketRequestCallback(&rtpStream);
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            if (VideoCodingModule::Codec(payloadType->codecType, &codec) < 0)
+            {
+                return -1;
+            }
+            codec.plType = payloadType->payloadType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    bool nackEnabled = protectionEnabled &&
+        (protectionMethod == kProtectionNack ||
+         protectionMethod == kProtectionDualDecoder);
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+    rtpStream.SetReordering(reordering);
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protectionMethod, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    ret = 0;
+
+    // RTP stream main loop
+    while ((ret = rtpStream.NextPacket(clock.MillisecondTimestamp())) == 0)
+    {
+        if (clock.MillisecondTimestamp() % 5 == 0)
+        {
+            ret = vcm->Decode();
+            if (ret < 0)
+            {
+                return -1;
+            }
+        }
+        while (vcm->DecodeDualFrame(0) == 1);
+        if (vcm->TimeUntilNextProcess() <= 0)
+        {
+            vcm->Process();
+        }
+        if (MAX_RUNTIME_MS > -1 && clock.MillisecondTimestamp() >=
+            MAX_RUNTIME_MS)
+        {
+            break;
+        }
+        clock.IncrementDebugClock(1);
+    }
+
+    switch (ret)
+    {
+    case 1:
+        printf("Success\n");
+        break;
+    case -1:
+        printf("Failed\n");
+        break;
+    case 0:
+        printf("Timeout\n");
+        break;
+    }
+
+    rtpStream.Print();
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    delete vcm;
+    vcm = NULL;
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/trunk/src/modules/video_coding/main/test/video_rtp_play_mt.cc b/trunk/src/modules/video_coding/main/test/video_rtp_play_mt.cc
new file mode 100644
index 0000000..21b9f73
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/video_rtp_play_mt.cc
@@ -0,0 +1,261 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "../source/event.h"
+#include "test_macros.h"
+#include "rtp_player.h"
+
+#include <string.h>
+
+using namespace webrtc;
+
+bool ProcessingThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    if (state->_vcm.TimeUntilNextProcess() <= 0)
+    {
+        if (state->_vcm.Process() < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool RtpReaderThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    EventWrapper& waitEvent = *EventWrapper::Create();
+    // RTP stream main loop
+    TickTimeBase clock;
+    if (state->_rtpPlayer.NextPacket(clock.MillisecondTimestamp()) < 0)
+    {
+        return false;
+    }
+    waitEvent.Wait(state->_rtpPlayer.TimeUntilNextPacket());
+    delete &waitEvent;
+    return true;
+}
+
+bool DecodeThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    state->_vcm.Decode(10000);
+    while (state->_vcm.DecodeDualFrame(0) == 1);
+    return true;
+}
+
+int RtpPlayMT(CmdArgs& args, int releaseTestNo, webrtc::VideoCodecType releaseTestVideoType)
+{
+    // Don't run these tests with debug events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+
+    // BEGIN Settings
+
+    bool protectionEnabled = true;
+    VCMVideoProtection protection = kProtectionDualDecoder;
+    WebRtc_UWord8 rttMS = 50;
+    float lossRate = 0.05f;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = 10000;
+    std::string outFilename = args.outputFile;
+    if (outFilename == "")
+        outFilename = test::OutputPath() + "RtpPlayMT_decoded.yuv";
+
+    bool nackEnabled = (protectionEnabled &&
+                (protection == kProtectionDualDecoder ||
+                protection == kProtectionNack ||
+                kProtectionNackFEC));
+    TickTimeBase clock;
+    VideoCodingModule* vcm =
+            VideoCodingModule::Create(1, &clock);
+    RtpDataCallback dataCallback(vcm);
+    std::string rtpFilename;
+    rtpFilename = args.inputFile;
+    if (releaseTestNo > 0)
+    {
+        // Setup a release test
+        switch (releaseTestVideoType)
+        {
+        case webrtc::kVideoCodecVP8:
+            rtpFilename = args.inputFile;
+            outFilename = test::OutputPath() + "MTReceiveTest_VP8";
+            break;
+        default:
+            return -1;
+        }
+        switch (releaseTestNo)
+        {
+        case 1:
+            // Normal execution
+            protectionEnabled = false;
+            nackEnabled = false;
+            rttMS = 0;
+            lossRate = 0.0f;
+            outFilename += "_Normal.yuv";
+            break;
+        case 2:
+            // Packet loss
+            protectionEnabled = false;
+            nackEnabled = false;
+            rttMS = 0;
+            lossRate = 0.05f;
+            outFilename += "_0.05.yuv";
+            break;
+        case 3:
+            // Packet loss and NACK
+            protection = kProtectionNack;
+            nackEnabled = true;
+            protectionEnabled = true;
+            rttMS = 100;
+            lossRate = 0.05f;
+            outFilename += "_0.05_NACK_100ms.yuv";
+            break;
+        case 4:
+            // Packet loss and dual decoder
+            // Not implemented
+            return 0;
+            break;
+        default:
+            return -1;
+        }
+        printf("Watch %s to verify that the output is reasonable\n", outFilename.c_str());
+    }
+    RTPPlayer rtpStream(rtpFilename.c_str(), &dataCallback, &clock);
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+    Trace::CreateTrace();
+    Trace::SetTraceFile("receiverTestTrace.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    // END Settings
+
+    // Set up
+
+    SharedState mtState(*vcm, rtpStream);
+
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+
+    // Create and start all threads
+    ThreadWrapper* processingThread = ThreadWrapper::CreateThread(ProcessingThread,
+            &mtState, kNormalPriority, "ProcessingThread");
+    ThreadWrapper* rtpReaderThread = ThreadWrapper::CreateThread(RtpReaderThread,
+            &mtState, kNormalPriority, "RtpReaderThread");
+    ThreadWrapper* decodeThread = ThreadWrapper::CreateThread(DecodeThread,
+            &mtState, kNormalPriority, "DecodeThread");
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            VideoCodingModule::Codec(payloadType->codecType, &codec);
+            codec.plType = payloadType->payloadType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+
+    if (processingThread != NULL)
+    {
+        unsigned int tid;
+        processingThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start processing thread\n");
+        return -1;
+    }
+    if (rtpReaderThread != NULL)
+    {
+        unsigned int tid;
+        rtpReaderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start RTP reader thread\n");
+        return -1;
+    }
+    if (decodeThread != NULL)
+    {
+        unsigned int tid;
+        decodeThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start decode thread\n");
+        return -1;
+    }
+
+    FrameReceiveCallback receiveCallback(outFilename);
+    vcm->RegisterReceiveCallback(&receiveCallback);
+    vcm->RegisterPacketRequestCallback(&rtpStream);
+
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protection, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    EventWrapper& waitEvent = *EventWrapper::Create();
+
+    // Decode for 10 seconds and then tear down and exit.
+    waitEvent.Wait(MAX_RUNTIME_MS);
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    while (!processingThread->Stop())
+    {
+        ;
+    }
+    while (!rtpReaderThread->Stop())
+    {
+        ;
+    }
+    while (!decodeThread->Stop())
+    {
+        ;
+    }
+    VideoCodingModule::Destroy(vcm);
+    vcm = NULL;
+    delete &waitEvent;
+    delete processingThread;
+    delete decodeThread;
+    delete rtpReaderThread;
+    rtpStream.Print();
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/trunk/src/modules/video_coding/main/test/video_source.cc b/trunk/src/modules/video_coding/main/test/video_source.cc
new file mode 100644
index 0000000..d7ba0b9
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/video_source.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_source.h"
+
+#include <cassert>
+
+#include "testsupport/fileutils.h"
+
+VideoSource::VideoSource()
+:
+_fileName(webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"),
+_width(352),
+_height(288),
+_type(webrtc::kI420),
+_frameRate(30)
+{
+   //
+}
+
+VideoSource::VideoSource(std::string fileName, VideoSize size,
+    float frameRate, webrtc::VideoType type /*= webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(0),
+_height(0),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(size != kUndefined && size != kNumberOfVideoSizes);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+    GetWidthHeight(size);
+}
+
+VideoSource::VideoSource(std::string fileName, WebRtc_UWord16 width, WebRtc_UWord16 height,
+    float frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(width),
+_height(height),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(width > 0);
+    assert(height > 0);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+}
+
+WebRtc_Word32
+VideoSource::GetFrameLength() const
+{
+    return webrtc::CalcBufferSize(_type, _width, _height);
+}
+
+std::string
+VideoSource::GetName() const
+{
+    // Remove path.
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        slashPos = 0;
+    }
+    else
+    {
+        slashPos++;
+    }
+
+    // Remove extension and underscored suffix if it exists.
+    //return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
+    //    _fileName.find_last_of(".")) - slashPos);
+    // MS: Removing suffix, not underscore....keeping full file name
+    return _fileName.substr(slashPos, _fileName.find_last_of(".") - slashPos);
+
+}
+
+int
+VideoSource::GetWidthHeight( VideoSize size)
+{
+    switch(size)
+    {
+    case kSQCIF:
+        _width = 128;
+        _height = 96;
+        return 0;
+    case kQQVGA:
+        _width = 160;
+        _height = 120;
+        return 0;
+    case kQCIF:
+        _width = 176;
+        _height = 144;
+        return 0;
+    case kCGA:
+        _width = 320;
+        _height = 200;
+        return 0;
+    case kQVGA:
+        _width = 320;
+        _height = 240;
+        return 0;
+    case kSIF:
+        _width = 352;
+        _height = 240;
+        return 0;
+    case kWQVGA:
+        _width = 400;
+        _height = 240;
+        return 0;
+    case kCIF:
+        _width = 352;
+        _height = 288;
+        return 0;
+    case kW288p:
+        _width = 512;
+        _height = 288;
+        return 0;
+    case k448p:
+        _width = 576;
+        _height = 448;
+        return 0;
+    case kVGA:
+        _width = 640;
+        _height = 480;
+        return 0;
+    case k432p:
+        _width = 720;
+        _height = 432;
+        return 0;
+    case kW432p:
+        _width = 768;
+        _height = 432;
+        return 0;
+    case k4SIF:
+        _width = 704;
+        _height = 480;
+        return 0;
+    case kW448p:
+        _width = 768;
+        _height = 448;
+        return 0;
+    case kNTSC:
+        _width = 720;
+        _height = 480;
+        return 0;
+    case kFW448p:
+        _width = 800;
+        _height = 448;
+        return 0;
+    case kWVGA:
+        _width = 800;
+        _height = 480;
+        return 0;
+    case k4CIF:
+        _width = 704;
+        _height = 576;
+        return 0;
+    case kSVGA:
+        _width = 800;
+        _height = 600;
+        return 0;
+    case kW544p:
+        _width = 960;
+        _height = 544;
+        return 0;
+    case kW576p:
+        _width = 1024;
+        _height = 576;
+        return 0;
+    case kHD:
+        _width = 960;
+        _height = 720;
+        return 0;
+    case kXGA:
+        _width = 1024;
+        _height = 768;
+        return 0;
+    case kFullHD:
+        _width = 1440;
+        _height = 1080;
+        return 0;
+    case kWHD:
+        _width = 1280;
+        _height = 720;
+        return 0;
+    case kWFullHD:
+        _width = 1920;
+        _height = 1080;
+        return 0;
+    default:
+        return -1;
+    }
+}
diff --git a/trunk/src/modules/video_coding/main/test/video_source.h b/trunk/src/modules/video_coding/main/test/video_source.h
new file mode 100644
index 0000000..d9ad3d8
--- /dev/null
+++ b/trunk/src/modules/video_coding/main/test/video_source.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "typedefs.h"
+
+#include <string>
+
+enum VideoSize
+    {
+        kUndefined,
+        kSQCIF,     // 128*96       = 12 288
+        kQQVGA,     // 160*120      = 19 200
+        kQCIF,      // 176*144      = 25 344
+        kCGA,       // 320*200      = 64 000
+        kQVGA,      // 320*240      = 76 800
+        kSIF,       // 352*240      = 84 480
+        kWQVGA,     // 400*240      = 96 000
+        kCIF,       // 352*288      = 101 376
+        kW288p,     // 512*288      = 147 456 (WCIF)
+        k448p,      // 576*448      = 281 088
+        kVGA,       // 640*480      = 307 200
+        k432p,      // 720*432      = 311 040
+        kW432p,     // 768*432      = 331 776
+        k4SIF,      // 704*480      = 337 920
+        kW448p,     // 768*448      = 344 064
+        kNTSC,		// 720*480      = 345 600
+        kFW448p,    // 800*448      = 358 400
+        kWVGA,      // 800*480      = 384 000
+        k4CIF,      // 704*576      = 405 504
+        kSVGA,      // 800*600      = 480 000
+        kW544p,     // 960*544      = 522 240
+        kW576p,     // 1024*576     = 589 824 (W4CIF)
+        kHD,        // 960*720      = 691 200
+        kXGA,       // 1024*768     = 786 432
+        kWHD,       // 1280*720     = 921 600
+        kFullHD,   // 1440*1080    = 1 555 200
+        kWFullHD,  // 1920*1080    = 2 073 600
+
+        kNumberOfVideoSizes
+    };
+
+
+class VideoSource
+{
+public:
+  VideoSource();
+  VideoSource(std::string fileName, VideoSize size, float frameRate, webrtc::VideoType type = webrtc::kI420);
+  VideoSource(std::string fileName, WebRtc_UWord16 width, WebRtc_UWord16 height,
+      float frameRate = 30, webrtc::VideoType type = webrtc::kI420);
+
+    std::string GetFileName() const { return _fileName; }
+    WebRtc_UWord16  GetWidth() const { return _width; }
+    WebRtc_UWord16 GetHeight() const { return _height; }
+    webrtc::VideoType GetType() const { return _type; }
+    float GetFrameRate() const { return _frameRate; }
+    int GetWidthHeight( VideoSize size);
+
+    // Returns the filename with the path (including the leading slash) removed.
+    std::string GetName() const;
+
+    WebRtc_Word32 GetFrameLength() const;
+
+private:
+    std::string         _fileName;
+    WebRtc_UWord16      _width;
+    WebRtc_UWord16      _height;
+    webrtc::VideoType   _type;
+    float               _frameRate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+
diff --git a/trunk/src/modules/video_processing/main/OWNERS b/trunk/src/modules/video_processing/main/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/trunk/src/modules/video_processing/main/interface/video_processing.h b/trunk/src/modules/video_processing/main/interface/video_processing.h
new file mode 100644
index 0000000..512cace
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/interface/video_processing.h
@@ -0,0 +1,382 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_processing.h
+ * This header file contains the API required for the video
+ * processing module class.
+ */
+
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
+#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
+
+#include "module.h"
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+/**
+   The module is largely intended to process video streams, except functionality 
+   provided by static functions which operate independent of previous frames. It
+   is recommended, but not required that a unique instance be used for each 
+   concurrently processed stream. Similarly, it is recommended to call Reset()
+   before switching to a new stream, but this is not absolutely required.
+   
+   The module provides basic thread safety by permitting only a single function to
+   execute concurrently.
+*/
+
+namespace webrtc {
+
+class VideoProcessingModule : public Module
+{
+public:
+    /**
+       Structure to hold frame statistics. Populate it with GetFrameStats().
+    */
+    struct FrameStats
+    {
+        FrameStats() :
+            mean(0),
+            sum(0),
+            numPixels(0),
+            subSamplWidth(0),
+            subSamplHeight(0)
+        {
+            memset(hist, 0, sizeof(hist));
+        }
+
+        WebRtc_UWord32 hist[256];      /**< Histogram of frame */
+        WebRtc_UWord32 mean;           /**< Mean value of frame */
+        WebRtc_UWord32 sum;            /**< Sum of frame */
+        WebRtc_UWord32 numPixels;      /**< Number of pixels */
+        WebRtc_UWord8  subSamplWidth;  /**< Subsampling rate of width in powers of 2 */
+        WebRtc_UWord8  subSamplHeight; /**< Subsampling rate of height in powers of 2 */
+    };
+
+    /**
+       Specifies the warning types returned by BrightnessDetection().
+    */
+    enum BrightnessWarning 
+    {
+        kNoWarning,                /**< Frame has acceptable brightness */
+        kDarkWarning,              /**< Frame is too dark */
+        kBrightWarning            /**< Frame is too bright */
+    };
+
+    /*
+       Creates a VPM object.
+      
+       \param[in] id
+           Unique identifier of this object.
+      
+       \return Pointer to a VPM object.
+    */
+    static VideoProcessingModule* Create(WebRtc_Word32 id);
+
+    /**
+       Destroys a VPM object.
+      
+       \param[in] module
+           Pointer to the VPM object to destroy.
+    */
+    static void Destroy(VideoProcessingModule* module);
+
+    /**
+       Not supported.
+    */
+    virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; }
+
+    /**
+       Not supported.
+    */
+    virtual WebRtc_Word32 Process() { return -1; }
+
+    /**
+       Resets all processing components to their initial states. This should be
+       called whenever a new video stream is started.
+    */
+    virtual void Reset() = 0;
+
+    /**
+       Retrieves statistics for the input frame. This function must be used to
+       prepare a FrameStats struct for use in certain VPM functions.
+      
+       \param[out] stats
+           The frame statistics will be stored here on return.
+      
+       \param[in]  frame
+           Pointer to the video frame.
+      
+       \param[in]  width
+           Frame width in pixels.
+      
+       \param[in]  height
+           Frame height in pixels.
+      
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 GetFrameStats(FrameStats& stats,
+                                     const WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height);
+
+    /**
+       \overload
+    */
+     static WebRtc_Word32 GetFrameStats(FrameStats& stats,
+                                     const VideoFrame& frame);
+
+    /**
+       Checks the validity of a FrameStats struct. Currently, valid implies only
+       that is had changed from its initialized state.
+      
+       \param[in] stats
+           Frame statistics.
+      
+       \return True on valid stats, false on invalid stats.
+    */
+    static bool ValidFrameStats(const FrameStats& stats);
+
+    /**
+       Returns a FrameStats struct to its intialized state.
+      
+       \param[in,out] stats
+           Frame statistics.
+    */
+    static void ClearFrameStats(FrameStats& stats);
+
+    /**
+       Enhances the color of an image through a constant mapping. Only the 
+       chrominance is altered. Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
+                                        WebRtc_UWord32 width,
+                                        WebRtc_UWord32 height);
+
+    /**
+       \overload
+    */
+    static WebRtc_Word32 ColorEnhancement(VideoFrame& frame);
+
+    /**
+       Increases/decreases the luminance value.
+
+       \param[in,out] frame
+           Pointer to the video frame buffer.
+
+       \param[in]     width
+           Frame width in pixels.
+
+       \param[in]     height
+           Frame height in pixels.
+
+      \param[in] delta
+           The amount to change the chrominance value of every single pixel.
+           Can be < 0 also.
+
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                                  int width, int height, int delta);
+    /**
+       \overload
+    */
+    static WebRtc_Word32 Brighten(VideoFrame& frame, int delta);
+
+    /**
+       Detects and removes camera flicker from a video stream. Every frame from the
+       stream must be passed in. A frame will only be altered if flicker has been
+       detected. Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \param[in]     timestamp
+           Frame timestamp in 90 kHz format.
+      
+       \param[in,out] stats
+           Frame statistics provided by GetFrameStats(). On return the stats will
+           be reset to zero if the frame was altered. Call GetFrameStats() again
+           if the statistics for the altered frame are required.
+      
+       \return 0 on success, -1 on failure.
+    */
+    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height,
+                                     WebRtc_UWord32 timestamp,
+                                     FrameStats& stats) = 0;
+    
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
+                                     FrameStats& stats) = 0;
+
+    /**
+       Denoises a video frame. Every frame from the stream should be passed in.
+       Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \return The number of modified pixels on success, -1 on failure.
+    */
+    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
+                                  WebRtc_UWord32 width,
+                                  WebRtc_UWord32 height) = 0;
+    
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 Denoising(VideoFrame& frame) = 0;
+
+    /**
+       Detects if a video frame is excessively bright or dark. Returns a warning if
+       this is the case. Multiple frames should be passed in before expecting a 
+       warning. Has a floating-point implementation.
+      
+       \param[in] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \param[in] stats
+           Frame statistics provided by GetFrameStats().
+      
+       \return A member of BrightnessWarning on success, -1 on error
+    */
+    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
+                                            WebRtc_UWord32 width,
+                                            WebRtc_UWord32 height,
+                                            const FrameStats& stats) = 0;
+
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
+                                            const FrameStats& stats) = 0;
+
+
+    /**
+    The following functions refer to the pre-processor unit within VPM. The pre-processor
+    perfoms spatial/temporal decimation and content analysis on the frames prior to encoding.
+    */
+	
+    /**
+    Enable/disable temporal decimation
+
+    \param[in] enable when true, temporal decimation is enabled
+    */
+    virtual void EnableTemporalDecimation(bool enable) = 0;
+	
+    /**
+   Set target resolution
+    
+   \param[in] width
+   Target width
+    
+   \param[in] height
+   Target height
+    
+    \param[in] frameRate
+    Target frameRate
+           
+    \return VPM_OK on success, a negative value on error (see error codes)
+
+    */
+    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) = 0;
+    
+    /**
+    Set max frame rate
+    \param[in] maxFrameRate: maximum frame rate (limited to native frame rate)
+
+    \return VPM_OK on success, a negative value on error (see error codes)
+    */
+    virtual WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate) = 0;
+
+    /**
+    Get decimated(target) frame rate
+    */
+    virtual WebRtc_UWord32 DecimatedFrameRate() = 0;
+	
+    /**
+    Get decimated(target) frame width
+    */
+    virtual WebRtc_UWord32 DecimatedWidth() const = 0;
+
+    /**
+    Get decimated(target) frame height
+    */
+    virtual WebRtc_UWord32 DecimatedHeight() const = 0 ;
+
+    /**
+    Set the spatial resampling settings of the VPM: The resampler may either be disabled or one of the following:
+    scaling to a close to target dimension followed by crop/pad
+
+    \param[in] resamplingMode
+    Set resampling mode (a member of VideoFrameResampling)
+    */
+    virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0;
+  
+    /**
+    Get Processed (decimated) frame
+	  
+    \param[in] frame pointer to the video frame.
+	  
+	  \param[in] processedFrame pointer (double) to the processed frame
+    
+    \return VPM_OK on success, a negative value on error (see error codes)
+    */
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) = 0;
+
+    /**
+    Return content metrics for the last processed frame
+    */
+    virtual VideoContentMetrics* ContentMetrics() const = 0 ;
+
+    /**
+    Enable content analysis
+    */
+    virtual void EnableContentAnalysis(bool enable) = 0;
+
+};
+
+} //namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/interface/video_processing_defines.h b/trunk/src/modules/video_processing/main/interface/video_processing_defines.h
new file mode 100644
index 0000000..d9bebd4
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/interface/video_processing_defines.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_processing_defines.h
+ * This header file includes the definitions used in the video processor module
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
+#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Error codes
+#define VPM_OK                   0
+#define VPM_GENERAL_ERROR       -1
+#define VPM_MEMORY              -2
+#define VPM_PARAMETER_ERROR     -3
+#define VPM_SCALE_ERROR         -4
+#define VPM_UNINITIALIZED       -5
+#define VPM_UNIMPLEMENTED       -6
+
+enum VideoFrameResampling
+{
+  // TODO: Do we still need crop/pad?
+    kNoRescaling,         // disables rescaling
+    kFastRescaling,       // point
+    kBiLinear,            // bi-linear interpolation
+    kBox,                 // Box inteprolation
+};
+
+} //namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/source/Android.mk b/trunk/src/modules/video_processing/main/source/Android.mk
new file mode 100644
index 0000000..829fa96
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/Android.mk
@@ -0,0 +1,59 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_video_processing
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    brighten.cc \
+    brightness_detection.cc \
+    color_enhancement.cc \
+    content_analysis.cc \
+    deflickering.cc \
+    denoising.cc \
+    frame_preprocessor.cc \
+    spatial_resampler.cc \
+    video_decimator.cc \
+    video_processing_impl.cc
+
+ifeq ($(TARGET_ARCH),x86)
+LOCAL_SRC_FILES += \
+    content_analysis_sse2.cc
+endif
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../audio_coding/main/interface \
+    $(LOCAL_PATH)/../../../utility/interface \
+    $(LOCAL_PATH)/../../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_processing/main/source/brighten.cc b/trunk/src/modules/video_processing/main/source/brighten.cc
new file mode 100644
index 0000000..51e4b6b
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/brighten.cc
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/source/brighten.h"
+
+#include <cstdlib>
+
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+namespace VideoProcessing {
+
+WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                       int width, int height, int delta) {
+  if (frame == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                 "Null frame pointer");
+    return VPM_PARAMETER_ERROR;
+  }
+
+  if (width <= 0 || height <= 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                 "Invalid frame size");
+    return VPM_PARAMETER_ERROR;
+  }
+
+  int numPixels = width * height;
+
+  int lookUp[256];
+  for (int i = 0; i < 256; i++) {
+    int val = i + delta;
+    lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
+  }
+
+  WebRtc_UWord8* tempPtr = frame;
+
+  for (int i = 0; i < numPixels; i++) {
+    *tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);
+    tempPtr++;
+  }
+  return VPM_OK;
+}
+
+}  // namespace VideoProcessing
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/source/brighten.h b/trunk/src/modules/video_processing/main/source/brighten.h
new file mode 100644
index 0000000..b7e6fb7
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/brighten.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
+#define MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
+
+#include "typedefs.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+
+namespace webrtc {
+namespace VideoProcessing {
+
+WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                       int width, int height, int delta);
+
+}  // namespace VideoProcessing
+}  // namespace webrtc
+
+#endif  // MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
diff --git a/trunk/src/modules/video_processing/main/source/brightness_detection.cc b/trunk/src/modules/video_processing/main/source/brightness_detection.cc
new file mode 100644
index 0000000..6840df2
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/brightness_detection.cc
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_processing.h"
+#include "brightness_detection.h"
+#include "trace.h"
+
+#include <math.h>
+
+namespace webrtc {
+
+VPMBrightnessDetection::VPMBrightnessDetection() :
+    _id(0)
+{
+    Reset();
+}
+
+VPMBrightnessDetection::~VPMBrightnessDetection()
+{
+}
+
+WebRtc_Word32
+VPMBrightnessDetection::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void
+VPMBrightnessDetection::Reset()
+{
+    _frameCntBright = 0;
+    _frameCntDark = 0;
+}
+
+WebRtc_Word32
+VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
+                                     const WebRtc_UWord32 width,
+                                     const WebRtc_UWord32 height,
+                                     const VideoProcessingModule::FrameStats& stats)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_PARAMETER_ERROR;
+    }
+    
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    if (!VideoProcessingModule::ValidFrameStats(stats))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    const WebRtc_UWord8 frameCntAlarm = 2;
+
+    // Get proportion in lowest bins 
+    WebRtc_UWord8 lowTh = 20;
+    float propLow = 0;
+    for (WebRtc_UWord32 i = 0; i < lowTh; i++)
+    {
+        propLow += stats.hist[i];
+    }
+    propLow /= stats.numPixels;
+
+    // Get proportion in highest bins 
+    unsigned char highTh = 230;
+    float propHigh = 0;
+    for (WebRtc_UWord32 i = highTh; i < 256; i++)
+    {
+        propHigh += stats.hist[i];
+    }
+    propHigh /= stats.numPixels;
+
+    if(propHigh < 0.4)
+    {
+        if (stats.mean < 90 || stats.mean > 170)
+        {
+            // Standard deviation of Y
+            float stdY = 0;
+            for (WebRtc_UWord32 h = 0; h < height; h += (1 << stats.subSamplHeight))
+            {
+                WebRtc_UWord32 row = h*width;
+                for (WebRtc_UWord32 w = 0; w < width; w += (1 << stats.subSamplWidth))
+                {
+                    stdY += (frame[w + row] - stats.mean) * (frame[w + row] - stats.mean);
+                }
+            }           
+            stdY = sqrt(stdY / stats.numPixels);
+
+            // Get percentiles
+            WebRtc_UWord32 sum = 0;
+            WebRtc_UWord32 medianY = 140;
+            WebRtc_UWord32 perc05 = 0;
+            WebRtc_UWord32 perc95 = 255;
+            float posPerc05 = stats.numPixels * 0.05f;
+            float posMedian = stats.numPixels * 0.5f;
+            float posPerc95 = stats.numPixels * 0.95f;
+            for (WebRtc_UWord32 i = 0; i < 256; i++)
+            {
+                sum += stats.hist[i];
+
+                if (sum < posPerc05)
+                {
+                    perc05 = i;     // 5th perc
+                }
+                if (sum < posMedian)
+                {
+                    medianY = i;    // 50th perc
+                }
+                if (sum < posPerc95)
+                {
+                    perc95 = i;     // 95th perc
+                }
+                else
+                {
+                    break;
+                }
+            }
+
+            // Check if image is too dark
+            if ((stdY < 55) && (perc05 < 50))
+            { 
+                if (medianY < 60 || stats.mean < 80 ||  perc95 < 130 || propLow > 0.20)
+                {
+                    _frameCntDark++;
+                }
+                else
+                {
+                    _frameCntDark = 0;
+                }
+            } 
+            else
+            {
+                _frameCntDark = 0;
+            }
+
+            // Check if image is too bright
+            if ((stdY < 52) && (perc95 > 200) && (medianY > 160))
+            {
+                if (medianY > 185 || stats.mean > 185 || perc05 > 140 || propHigh > 0.25)
+                {
+                    _frameCntBright++;  
+                }
+                else 
+                {
+                    _frameCntBright = 0;
+                }
+            } 
+            else
+            {
+                _frameCntBright = 0;
+            }
+
+        } 
+        else
+        {
+            _frameCntDark = 0;
+            _frameCntBright = 0;
+        }
+
+    } 
+    else
+    {
+        _frameCntBright++;
+        _frameCntDark = 0;
+    }
+    
+    if (_frameCntDark > frameCntAlarm)
+    {
+        return VideoProcessingModule::kDarkWarning;
+    }
+    else if (_frameCntBright > frameCntAlarm)
+    {
+        return VideoProcessingModule::kBrightWarning;
+    }
+    else
+    {
+        return VideoProcessingModule::kNoWarning;
+    }
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/brightness_detection.h b/trunk/src/modules/video_processing/main/source/brightness_detection.h
new file mode 100644
index 0000000..7bed556
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/brightness_detection.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * brightness_detection.h
+ */
+#ifndef VPM_BRIGHTNESS_DETECTION_H
+#define VPM_BRIGHTNESS_DETECTION_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+class VPMBrightnessDetection
+{
+public:
+    VPMBrightnessDetection();
+    ~VPMBrightnessDetection();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(const WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height,
+                             const VideoProcessingModule::FrameStats& stats);
+
+private:
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32 _frameCntBright;
+    WebRtc_UWord32 _frameCntDark;
+};
+
+} //namespace
+
+#endif // VPM_BRIGHTNESS_DETECTION_H
diff --git a/trunk/src/modules/video_processing/main/source/color_enhancement.cc b/trunk/src/modules/video_processing/main/source/color_enhancement.cc
new file mode 100644
index 0000000..426596f
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/color_enhancement.cc
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "color_enhancement.h"
+#include "color_enhancement_private.h"
+#include "trace.h"
+#include <cstdlib>  // NULL
+
+namespace webrtc {
+
+namespace VideoProcessing
+{ 
+    WebRtc_Word32
+    ColorEnhancement(WebRtc_UWord8* frame,
+                     const WebRtc_UWord32 width,
+                     const WebRtc_UWord32 height)
+    {
+        // pointers to U and V color pixels
+        WebRtc_UWord8* ptrU;
+        WebRtc_UWord8* ptrV;
+        WebRtc_UWord8 tempChroma;
+        const WebRtc_UWord32 numPixels = width * height;
+
+
+        if (frame == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+            return VPM_GENERAL_ERROR;
+        }
+
+        if (width == 0 || height == 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+            return VPM_GENERAL_ERROR;
+        }
+        
+        // set pointers to first U and V pixels
+        
+        // stream format:
+        // | numPixels bytes luminance | numPixels/4 bytes chroma U | numPixels/4 chroma V |
+        
+        ptrU = frame + numPixels;       // skip luminance
+        ptrV = ptrU + (numPixels>>2);
+
+        // loop through all chrominance pixels and modify color
+        for (WebRtc_UWord32 ix = 0; ix < (numPixels>>2); ix++)
+        {
+            tempChroma = colorTable[*ptrU][*ptrV];
+            *ptrV = colorTable[*ptrV][*ptrU];
+            *ptrU = tempChroma;
+            
+            // increment pointers
+            ptrU++;
+            ptrV++;
+        }
+        return VPM_OK;
+    }
+
+} //namespace
+
+} //namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/source/color_enhancement.h b/trunk/src/modules/video_processing/main/source/color_enhancement.h
new file mode 100644
index 0000000..87fabc3
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/color_enhancement.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * color_enhancement.h
+ */
+#ifndef VPM_COLOR_ENHANCEMENT_H
+#define VPM_COLOR_ENHANCEMENT_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+namespace VideoProcessing
+{
+    WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
+                                 WebRtc_UWord32 width,
+                                 WebRtc_UWord32 height);
+}
+
+} //namespace
+
+#endif // VPM_COLOR_ENHANCEMENT_H
diff --git a/trunk/src/modules/video_processing/main/source/color_enhancement_private.h b/trunk/src/modules/video_processing/main/source/color_enhancement_private.h
new file mode 100644
index 0000000..b88fc1a
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/color_enhancement_private.h
@@ -0,0 +1,273 @@
+#ifndef VPM_COLOR_ENHANCEMENT_PRIVATE_H
+#define VPM_COLOR_ENHANCEMENT_PRIVATE_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+//Table created with Matlab script createTable.m
+//Usage:
+//    Umod=colorTable[U][V]
+//    Vmod=colorTable[V][U]
+static const WebRtc_UWord8 colorTable[256][256] = {
+    {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+    {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
+    {2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2},
+    {3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3},
+    {4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4},
+    {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5},
+    {6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6},
+    {7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7},
+    {8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8},
+    {9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
+    {10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10},
+    {11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11},
+    {12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12},
+    {13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13},
+    {14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14},
+    {15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15},
+    {16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16},
+    {17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17},
+    {18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18},
+    {19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19},
+    {20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20},
+    {21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21},
+    {22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22},
+    {23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23},
+    {24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24},
+    {25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25},
+    {26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26},
+    {27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27},
+    {28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28},
+    {29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29},
+    {30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30},
+    {31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31},
+    {32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32},
+    {33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33},
+    {34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34},
+    {35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35},
+    {36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36},
+    {37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37},
+    {38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38},
+    {39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39},
+    {40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40},
+    {41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41},
+    {42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42},
+    {43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43},
+    {44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44},
+    {45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45},
+    {46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46},
+    {47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47},
+    {48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48},
+    {49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49},
+    {50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50},
+    {51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51},
+    {52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52},
+    {53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53},
+    {54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54},
+    {55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55},
+    {56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56},
+    {57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57},
+    {58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58},
+    {59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59},
+    {60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60},
+    {61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61},
+    {62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62},
+    {63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63},
+    {64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64},
+    {65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65},
+    {66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66},
+    {67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67},
+    {68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68},
+    {69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69},
+    {70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70},
+    {71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71},
+    {72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72},
+    {73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73},
+    {74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74},
+    {75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75},
+    {76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76},
+    {77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77},
+    {78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78},
+    {79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79},
+    {80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80},
+    {81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81},
+    {82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82},
+    {83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83},
+    {84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84},
+    {85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85},
+    {86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86},
+    {87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87},
+    {88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88},
+    {89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89},
+    {90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90},
+    {91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91},
+    {92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92},
+    {93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93},
+    {94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94},
+    {95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95},
+    {96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96},
+    {97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97},
+    {98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98},
+    {99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99},
+    {100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100},
+    {101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101},
+    {102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102},
+    {103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103},
+    {104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104},
+    {105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105},
+    {106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106},
+    {107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107},
+    {108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108},
+    {109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109},
+    {110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110},
+    {111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111},
+    {112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112},
+    {113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113},
+    {114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114},
+    {115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115},
+    {116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116},
+    {117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117},
+    {118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118},
+    {119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 119, 119, 119, 119, 120, 120, 120, 119, 119, 119, 119, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119},
+    {120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120},
+    {121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 121, 121, 122, 122, 122, 122, 123, 123, 123, 122, 122, 122, 122, 121, 121, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121},
+    {122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 122, 122, 122, 123, 123, 123, 124, 124, 124, 124, 124, 123, 123, 123, 122, 122, 122, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122},
+    {123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 123, 123, 123, 124, 124, 124, 124, 125, 125, 125, 125, 125, 124, 124, 124, 124, 123, 123, 123, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123},
+    {124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 126, 126, 126, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124},
+    {125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 125, 125, 125, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 125, 125, 125, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125},
+    {126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 127, 127, 127, 127, 127, 127, 127, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126},
+    {127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127},
+    {128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 127, 127, 127, 127, 127, 127, 127, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+    {129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 129, 129, 129, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 129, 129, 129, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129},
+    {130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 128, 128, 128, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130},
+    {131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 131, 131, 131, 130, 130, 130, 130, 129, 129, 129, 129, 129, 130, 130, 130, 130, 131, 131, 131, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131},
+    {132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 132, 132, 132, 131, 131, 131, 130, 130, 130, 130, 130, 131, 131, 131, 132, 132, 132, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132},
+    {133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 133, 133, 132, 132, 132, 132, 131, 131, 131, 132, 132, 132, 132, 133, 133, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133},
+    {134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134},
+    {135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 135, 135, 135, 135, 134, 134, 134, 135, 135, 135, 135, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135},
+    {136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136},
+    {137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137},
+    {138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138},
+    {139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139},
+    {140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140},
+    {141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141},
+    {142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142},
+    {143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143},
+    {144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144},
+    {145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145},
+    {146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146},
+    {147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147},
+    {148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148},
+    {149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149},
+    {150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150},
+    {151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151},
+    {152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152},
+    {153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153},
+    {154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154},
+    {155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155},
+    {156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156},
+    {157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157},
+    {158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158},
+    {159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159},
+    {160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160},
+    {161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161},
+    {162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162},
+    {163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163},
+    {164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164},
+    {165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165},
+    {166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166},
+    {167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167},
+    {168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168},
+    {169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169},
+    {170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170},
+    {171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171},
+    {172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172},
+    {173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173},
+    {174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174},
+    {175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175},
+    {176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176},
+    {177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177},
+    {178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178},
+    {179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179},
+    {180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180},
+    {181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181},
+    {182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182},
+    {183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183},
+    {184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184},
+    {185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185},
+    {186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186},
+    {187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187},
+    {188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188},
+    {189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189},
+    {190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190},
+    {191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191},
+    {192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192},
+    {193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193},
+    {194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194},
+    {195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195},
+    {196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196},
+    {197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197},
+    {198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198},
+    {199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199},
+    {200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200},
+    {201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201},
+    {202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202},
+    {203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203},
+    {204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204},
+    {205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205},
+    {206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206},
+    {207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207},
+    {208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208},
+    {209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209},
+    {210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210},
+    {211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211},
+    {212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212},
+    {213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213},
+    {214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214},
+    {215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215},
+    {216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216},
+    {217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217},
+    {218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218},
+    {219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219},
+    {220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220},
+    {221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221},
+    {222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222},
+    {223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223},
+    {224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224},
+    {225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225},
+    {226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226},
+    {227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227},
+    {228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228},
+    {229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229},
+    {230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230},
+    {231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231},
+    {232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232},
+    {233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233},
+    {234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234},
+    {235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235},
+    {236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236},
+    {237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237},
+    {238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238},
+    {239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239},
+    {240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240},
+    {241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241},
+    {242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242},
+    {243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243},
+    {244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244},
+    {245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245},
+    {246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246},
+    {247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247},
+    {248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248},
+    {249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249},
+    {250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250},
+    {251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251},
+    {252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252},
+    {253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253},
+    {254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254},
+    {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}
+};
+
+} //namespace
+
+#endif // VPM_COLOR_ENHANCEMENT_PRIVATE_H
diff --git a/trunk/src/modules/video_processing/main/source/content_analysis.cc b/trunk/src/modules/video_processing/main/source/content_analysis.cc
new file mode 100644
index 0000000..8ea319c
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/content_analysis.cc
@@ -0,0 +1,338 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "content_analysis.h"
+#include "tick_util.h"
+#include "system_wrappers/interface/cpu_features_wrapper.h"
+
+#include <math.h>
+#include <stdlib.h>
+
+namespace webrtc {
+
+VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection):
+_origFrame(NULL),
+_prevFrame(NULL),
+_width(0),
+_height(0),
+_skipNum(1),
+_border(8),
+_motionMagnitude(0.0f),
+_spatialPredErr(0.0f),
+_spatialPredErrH(0.0f),
+_spatialPredErrV(0.0f),
+_firstFrame(true),
+_CAInit(false),
+_cMetrics(NULL)
+{
+    ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
+    TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
+
+    if (runtime_cpu_detection)
+    {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+        if (WebRtc_GetCPUInfo(kSSE2))
+        {
+            ComputeSpatialMetrics =
+                          &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
+            TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
+        }
+#endif
+    }
+
+    Release();
+}
+
+VPMContentAnalysis::~VPMContentAnalysis()
+{
+    Release();
+}
+
+
+VideoContentMetrics*
+VPMContentAnalysis::ComputeContentMetrics(const VideoFrame* inputFrame)
+{
+    if (inputFrame == NULL)
+    {
+        return NULL;
+    }
+
+    // Init if needed (native dimension change)
+    if (_width != inputFrame->Width() || _height != inputFrame->Height())
+    {
+        if (VPM_OK != Initialize((WebRtc_UWord16)inputFrame->Width(),
+                                 (WebRtc_UWord16)inputFrame->Height()))
+        {
+            return NULL;
+        }
+    }
+
+    _origFrame = inputFrame->Buffer();
+
+    // compute spatial metrics: 3 spatial prediction errors
+    (this->*ComputeSpatialMetrics)();
+
+    // compute motion metrics
+    if (_firstFrame == false)
+        ComputeMotionMetrics();
+
+    // saving current frame as previous one: Y only
+    memcpy(_prevFrame, _origFrame, _width * _height);
+
+    _firstFrame =  false;
+    _CAInit = true;
+
+    return ContentMetrics();
+}
+
+WebRtc_Word32
+VPMContentAnalysis::Release()
+{
+    if (_cMetrics != NULL)
+    {
+        delete _cMetrics;
+       _cMetrics = NULL;
+    }
+
+    if (_prevFrame != NULL)
+    {
+        delete [] _prevFrame;
+        _prevFrame = NULL;
+    }
+
+    _width = 0;
+    _height = 0;
+    _firstFrame = true;
+
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMContentAnalysis::Initialize(WebRtc_UWord16 width, WebRtc_UWord16 height)
+{
+   _width = width;
+   _height = height;
+   _firstFrame = true;
+
+    // skip parameter: # of skipped rows: for complexity reduction
+    //  temporal also currently uses it for column reduction.
+    _skipNum = 1;
+
+    // use skipNum = 2 for 4CIF, WHD
+    if ( (_height >=  576) && (_width >= 704) )
+    {
+        _skipNum = 2;
+    }
+    // use skipNum = 4 for FULLL_HD images
+    if ( (_height >=  1080) && (_width >= 1920) )
+    {
+        _skipNum = 4;
+    }
+
+    if (_cMetrics != NULL)
+    {
+        delete _cMetrics;
+    }
+
+    if (_prevFrame != NULL)
+    {
+        delete [] _prevFrame;
+    }
+
+    // Spatial Metrics don't work on a border of 8.  Minimum processing
+    // block size is 16 pixels.  So make sure the width and height support this.
+    if (_width <= 32 || _height <= 32)
+    {
+        _CAInit = false;
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _cMetrics = new VideoContentMetrics();
+    if (_cMetrics == NULL)
+    {
+        return VPM_MEMORY;
+    }
+
+    _prevFrame = new WebRtc_UWord8[_width * _height] ; // Y only
+    if (_prevFrame == NULL)
+    {
+        return VPM_MEMORY;
+    }
+
+    return VPM_OK;
+}
+
+
+// Compute motion metrics: magnitude over non-zero motion vectors,
+//  and size of zero cluster
+WebRtc_Word32
+VPMContentAnalysis::ComputeMotionMetrics()
+{
+
+    // Motion metrics: only one is derived from normalized
+    //  (MAD) temporal difference
+    (this->*TemporalDiffMetric)();
+
+    return VPM_OK;
+}
+
+// Normalized temporal difference (MAD): used as a motion level metric
+// Normalize MAD by spatial contrast: images with more contrast
+//  (pixel variance) likely have larger temporal difference
+// To reduce complexity, we compute the metric for a reduced set of points.
+WebRtc_Word32
+VPMContentAnalysis::TemporalDiffMetric_C()
+{
+    // size of original frame
+    WebRtc_UWord16 sizei = _height;
+    WebRtc_UWord16 sizej = _width;
+
+    WebRtc_UWord32 tempDiffSum = 0;
+    WebRtc_UWord32 pixelSum = 0;
+    WebRtc_UWord64 pixelSqSum = 0;
+
+    WebRtc_UWord32 numPixels = 0; // counter for # of pixels
+    WebRtc_UWord32 ssn;
+
+    const WebRtc_Word32 width_end = ((_width - 2*_border) & -16) + _border;
+
+    for(WebRtc_UWord16 i = _border; i < sizei - _border; i += _skipNum)
+    {
+        for(WebRtc_UWord16 j = _border; j < width_end; j++)
+        {
+            numPixels += 1;
+            ssn =  i * sizej + j;
+
+            WebRtc_UWord8 currPixel  = _origFrame[ssn];
+            WebRtc_UWord8 prevPixel  = _prevFrame[ssn];
+
+            tempDiffSum += (WebRtc_UWord32)
+                            abs((WebRtc_Word16)(currPixel - prevPixel));
+            pixelSum += (WebRtc_UWord32) currPixel;
+            pixelSqSum += (WebRtc_UWord64) (currPixel * currPixel);
+        }
+    }
+
+    // default
+    _motionMagnitude = 0.0f;
+
+    if (tempDiffSum == 0)
+    {
+        return VPM_OK;
+    }
+
+    // normalize over all pixels
+    float const tempDiffAvg = (float)tempDiffSum / (float)(numPixels);
+    float const pixelSumAvg = (float)pixelSum / (float)(numPixels);
+    float const pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels);
+    float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+    if (contrast > 0.0)
+    {
+        contrast = sqrt(contrast);
+       _motionMagnitude = tempDiffAvg/contrast;
+    }
+
+    return VPM_OK;
+
+}
+
+// Compute spatial metrics:
+// To reduce complexity, we compute the metric for a reduced set of points.
+// The spatial metrics are rough estimates of the prediction error cost for
+//  each QM spatial mode: 2x2,1x2,2x1
+// The metrics are a simple estimate of the up-sampling prediction error,
+// estimated assuming sub-sampling for decimation (no filtering),
+// and up-sampling back up with simple bilinear interpolation.
+WebRtc_Word32
+VPMContentAnalysis::ComputeSpatialMetrics_C()
+{
+    //size of original frame
+    const WebRtc_UWord16 sizei = _height;
+    const WebRtc_UWord16 sizej = _width;
+
+    // pixel mean square average: used to normalize the spatial metrics
+    WebRtc_UWord32 pixelMSA = 0;
+
+    WebRtc_UWord32 spatialErrSum = 0;
+    WebRtc_UWord32 spatialErrVSum = 0;
+    WebRtc_UWord32 spatialErrHSum = 0;
+
+    // make sure work section is a multiple of 16
+    const WebRtc_UWord32 width_end = ((sizej - 2*_border) & -16) + _border;
+
+    for(WebRtc_UWord16 i = _border; i < sizei - _border; i += _skipNum)
+    {
+        for(WebRtc_UWord16 j = _border; j < width_end; j++)
+        {
+            WebRtc_UWord32 ssn1,ssn2,ssn3,ssn4,ssn5;
+
+            ssn1=  i * sizej + j;
+            ssn2 = (i + 1) * sizej + j; // bottom
+            ssn3 = (i - 1) * sizej + j; // top
+            ssn4 = i * sizej + j + 1;   // right
+            ssn5 = i * sizej + j - 1;   // left
+
+            WebRtc_UWord16 refPixel1  = _origFrame[ssn1] << 1;
+            WebRtc_UWord16 refPixel2  = _origFrame[ssn1] << 2;
+
+            WebRtc_UWord8 bottPixel = _origFrame[ssn2];
+            WebRtc_UWord8 topPixel = _origFrame[ssn3];
+            WebRtc_UWord8 rightPixel = _origFrame[ssn4];
+            WebRtc_UWord8 leftPixel = _origFrame[ssn5];
+
+            spatialErrSum  += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel2
+                            - (WebRtc_UWord16)(bottPixel + topPixel
+                                             + leftPixel + rightPixel)));
+            spatialErrVSum += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel1
+                            - (WebRtc_UWord16)(bottPixel + topPixel)));
+            spatialErrHSum += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel1
+                            - (WebRtc_UWord16)(leftPixel + rightPixel)));
+
+            pixelMSA += _origFrame[ssn1];
+        }
+    }
+
+    // normalize over all pixels
+    const float spatialErr  = (float)(spatialErrSum >> 2);
+    const float spatialErrH = (float)(spatialErrHSum >> 1);
+    const float spatialErrV = (float)(spatialErrVSum >> 1);
+    const float norm = (float)pixelMSA;
+
+    // 2X2:
+    _spatialPredErr = spatialErr / norm;
+
+    // 1X2:
+    _spatialPredErrH = spatialErrH / norm;
+
+    // 2X1:
+    _spatialPredErrV = spatialErrV / norm;
+
+    return VPM_OK;
+}
+
+VideoContentMetrics*
+VPMContentAnalysis::ContentMetrics()
+{
+    if (_CAInit == false)
+    {
+        return NULL;
+    }
+
+    _cMetrics->spatial_pred_err = _spatialPredErr;
+    _cMetrics->spatial_pred_err_h = _spatialPredErrH;
+    _cMetrics->spatial_pred_err_v = _spatialPredErrV;
+    // Motion metric: normalized temporal difference (MAD)
+    _cMetrics->motion_magnitude = _motionMagnitude;
+
+    return _cMetrics;
+
+}
+
+} // namespace
diff --git a/trunk/src/modules/video_processing/main/source/content_analysis.h b/trunk/src/modules/video_processing/main/source/content_analysis.h
new file mode 100644
index 0000000..588712a
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/content_analysis.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VPM_CONTENT_ANALYSIS_H
+#define VPM_CONTENT_ANALYSIS_H
+
+#include "typedefs.h"
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+namespace webrtc {
+
+class VPMContentAnalysis
+{
+public:
+    // When |runtime_cpu_detection| is true, runtime selection of an optimized
+    // code path is allowed.
+    VPMContentAnalysis(bool runtime_cpu_detection);
+    ~VPMContentAnalysis();
+
+    // Initialize ContentAnalysis - should be called prior to
+    //  extractContentFeature
+    // Inputs:         width, height
+    // Return value:   0 if OK, negative value upon error
+    WebRtc_Word32 Initialize(WebRtc_UWord16 width, WebRtc_UWord16 height);
+
+    // Extract content Feature - main function of ContentAnalysis
+    // Input:           new frame
+    // Return value:    pointer to structure containing content Analysis
+    //                  metrics or NULL value upon error
+    VideoContentMetrics* ComputeContentMetrics(const VideoFrame* inputFrame);
+
+    // Release all allocated memory
+    // Output: 0 if OK, negative value upon error
+    WebRtc_Word32 Release();
+
+private:
+
+    // return motion metrics
+    VideoContentMetrics* ContentMetrics();
+
+    // Normalized temporal difference metric: for motion magnitude
+    typedef WebRtc_Word32 (VPMContentAnalysis::*TemporalDiffMetricFunc)();
+    TemporalDiffMetricFunc TemporalDiffMetric;
+    WebRtc_Word32 TemporalDiffMetric_C();
+
+    // Motion metric method: call 2 metrics (magnitude and size)
+    WebRtc_Word32 ComputeMotionMetrics();
+
+    // Spatial metric method: computes the 3 frame-average spatial
+    //  prediction errors (1x2,2x1,2x2)
+    typedef WebRtc_Word32 (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
+    ComputeSpatialMetricsFunc ComputeSpatialMetrics;
+    WebRtc_Word32 ComputeSpatialMetrics_C();
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+    WebRtc_Word32 ComputeSpatialMetrics_SSE2();
+    WebRtc_Word32 TemporalDiffMetric_SSE2();
+#endif
+
+    const WebRtc_UWord8*       _origFrame;
+    WebRtc_UWord8*             _prevFrame;
+    WebRtc_UWord16             _width;
+    WebRtc_UWord16             _height;
+    WebRtc_UWord32             _skipNum;
+    WebRtc_Word32              _border;
+
+    // Content Metrics:
+    // stores the local average of the metrics
+    float                  _motionMagnitude;    // motion class
+    float                  _spatialPredErr;     // spatial class
+    float                  _spatialPredErrH;    // spatial class
+    float                  _spatialPredErrV;    // spatial class
+    bool                   _firstFrame;
+    bool                   _CAInit;
+
+    VideoContentMetrics*   _cMetrics;
+
+}; // end of VPMContentAnalysis class definition
+
+} // namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/source/content_analysis_sse2.cc b/trunk/src/modules/video_processing/main/source/content_analysis_sse2.cc
new file mode 100644
index 0000000..f505850
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/content_analysis_sse2.cc
@@ -0,0 +1,300 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "content_analysis.h"
+
+#include <emmintrin.h>
+#include <math.h>
+
+namespace webrtc {
+
+WebRtc_Word32
+VPMContentAnalysis::TemporalDiffMetric_SSE2()
+{
+    WebRtc_UWord32 numPixels = 0;       // counter for # of pixels
+
+    const WebRtc_UWord8* imgBufO = _origFrame + _border*_width + _border;
+    const WebRtc_UWord8* imgBufP = _prevFrame + _border*_width + _border;
+
+    const WebRtc_Word32 width_end = ((_width - 2*_border) & -16) + _border;
+
+    __m128i sad_64   = _mm_setzero_si128();
+    __m128i sum_64   = _mm_setzero_si128();
+    __m128i sqsum_64 = _mm_setzero_si128();
+    const __m128i z  = _mm_setzero_si128();
+
+    for(WebRtc_UWord16 i = 0; i < (_height - 2*_border); i += _skipNum)
+    {
+        __m128i sqsum_32  = _mm_setzero_si128();
+
+        const WebRtc_UWord8 *lineO = imgBufO;
+        const WebRtc_UWord8 *lineP = imgBufP;
+
+        // Work on 16 pixels at a time.  For HD content with a width of 1920
+        // this loop will run ~67 times (depending on border).  Maximum for
+        // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
+        // results which are then accumulated.  There is no chance of
+        // rollover for these two accumulators.
+        // o*o will have a maximum of 255*255 = 65025.  This will roll over
+        // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
+        // 32 bit accumulator.
+        for(WebRtc_UWord16 j = 0; j < width_end - _border; j += 16)
+        {
+            const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
+            const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
+
+            lineO += 16;
+            lineP += 16;
+
+            // abs pixel difference between frames
+            sad_64 = _mm_add_epi64 (sad_64, _mm_sad_epu8(o, p));
+
+            // sum of all pixels in frame
+            sum_64 = _mm_add_epi64 (sum_64, _mm_sad_epu8(o, z));
+
+            // squared sum of all pixels in frame
+            const __m128i olo = _mm_unpacklo_epi8(o,z);
+            const __m128i ohi = _mm_unpackhi_epi8(o,z);
+
+            const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
+            const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
+
+            sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
+            sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
+        }
+
+        // Add to 64 bit running sum as to not roll over.
+        sqsum_64 = _mm_add_epi64(sqsum_64,
+                                _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32,z),
+                                              _mm_unpacklo_epi32(sqsum_32,z)));
+
+        imgBufO += _width * _skipNum;
+        imgBufP += _width * _skipNum;
+        numPixels += (width_end - _border);
+    }
+
+    __m128i sad_final_128;
+    __m128i sum_final_128;
+    __m128i sqsum_final_128;
+
+    // bring sums out of vector registers and into integer register
+    // domain, summing them along the way
+    _mm_store_si128 (&sad_final_128, sad_64);
+    _mm_store_si128 (&sum_final_128, sum_64);
+    _mm_store_si128 (&sqsum_final_128, sqsum_64);
+
+    WebRtc_UWord64 *sad_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sad_final_128);
+    WebRtc_UWord64 *sum_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sum_final_128);
+    WebRtc_UWord64 *sqsum_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sqsum_final_128);
+
+    const WebRtc_UWord32 pixelSum = sum_final_64[0] + sum_final_64[1];
+    const WebRtc_UWord64 pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
+    const WebRtc_UWord32 tempDiffSum = sad_final_64[0] + sad_final_64[1];
+
+    // default
+    _motionMagnitude = 0.0f;
+
+    if (tempDiffSum == 0)
+    {
+        return VPM_OK;
+    }
+
+    // normalize over all pixels
+    const float tempDiffAvg = (float)tempDiffSum / (float)(numPixels);
+    const float pixelSumAvg = (float)pixelSum / (float)(numPixels);
+    const float pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels);
+    float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+    if (contrast > 0.0)
+    {
+        contrast = sqrt(contrast);
+       _motionMagnitude = tempDiffAvg/contrast;
+    }
+
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMContentAnalysis::ComputeSpatialMetrics_SSE2()
+{
+    const WebRtc_UWord8* imgBuf = _origFrame + _border*_width;
+    const WebRtc_Word32 width_end = ((_width - 2*_border) & -16) + _border;
+
+    __m128i se_32  = _mm_setzero_si128();
+    __m128i sev_32 = _mm_setzero_si128();
+    __m128i seh_32 = _mm_setzero_si128();
+    __m128i msa_32 = _mm_setzero_si128();
+    const __m128i z = _mm_setzero_si128();
+
+    // Error is accumulated as a 32 bit value.  Looking at HD content with a
+    // height of 1080 lines, or about 67 macro blocks.  If the 16 bit row
+    // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
+    // will not roll over a 32 bit accumulator.
+    // _skipNum is also used to reduce the number of rows
+    for(WebRtc_Word32 i = 0; i < (_height - 2*_border); i += _skipNum)
+    {
+        __m128i se_16  = _mm_setzero_si128();
+        __m128i sev_16 = _mm_setzero_si128();
+        __m128i seh_16 = _mm_setzero_si128();
+        __m128i msa_16 = _mm_setzero_si128();
+
+        // Row error is accumulated as a 16 bit value.  There are 8
+        // accumulators.  Max value of a 16 bit number is 65529.  Looking
+        // at HD content, 1080p, has a width of 1920, 120 macro blocks.
+        // A mb at a time is processed at a time.  Absolute max error at
+        // a point would be abs(0-255+255+255+255) which equals 1020.
+        // 120*1020 = 122400.  The probability of hitting this is quite low
+        // on well behaved content.  A specially crafted image could roll over.
+        // _border could also be adjusted to concentrate on just the center of
+        // the images for an HD capture in order to reduce the possiblity of
+        // rollover.
+        const WebRtc_UWord8 *lineTop = imgBuf - _width + _border;
+        const WebRtc_UWord8 *lineCen = imgBuf + _border;
+        const WebRtc_UWord8 *lineBot = imgBuf + _width + _border;
+
+        for(WebRtc_Word32 j = 0; j < width_end - _border; j += 16)
+        {
+            const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
+            const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
+            const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
+            const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
+            const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
+
+            lineTop += 16;
+            lineCen += 16;
+            lineBot += 16;
+
+            // center pixel unpacked
+            __m128i clo = _mm_unpacklo_epi8(c,z);
+            __m128i chi = _mm_unpackhi_epi8(c,z);
+
+            // left right pixels unpacked and added together
+            const __m128i lrlo = _mm_add_epi16(_mm_unpacklo_epi8(l,z),
+                                               _mm_unpacklo_epi8(r,z));
+            const __m128i lrhi = _mm_add_epi16(_mm_unpackhi_epi8(l,z),
+                                               _mm_unpackhi_epi8(r,z));
+
+            // top & bottom pixels unpacked and added together
+            const __m128i tblo = _mm_add_epi16(_mm_unpacklo_epi8(t,z),
+                                               _mm_unpacklo_epi8(b,z));
+            const __m128i tbhi = _mm_add_epi16(_mm_unpackhi_epi8(t,z),
+                                               _mm_unpackhi_epi8(b,z));
+
+            // running sum of all pixels
+            msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
+
+            clo = _mm_slli_epi16(clo, 1);
+            chi = _mm_slli_epi16(chi, 1);
+            const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
+            const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
+            const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
+            const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
+
+            clo = _mm_slli_epi16(clo, 1);
+            chi = _mm_slli_epi16(chi, 1);
+            const __m128i setlo = _mm_subs_epi16(clo,
+                                                 _mm_add_epi16(lrlo, tblo));
+            const __m128i sethi = _mm_subs_epi16(chi,
+                                                 _mm_add_epi16(lrhi, tbhi));
+
+            // Add to 16 bit running sum
+            se_16  = _mm_add_epi16(se_16,
+                                   _mm_max_epi16(setlo,
+                                                 _mm_subs_epi16(z, setlo)));
+            se_16  = _mm_add_epi16(se_16,
+                                   _mm_max_epi16(sethi,
+                                                 _mm_subs_epi16(z, sethi)));
+            sev_16 = _mm_add_epi16(sev_16,
+                                   _mm_max_epi16(sevtlo,
+                                                 _mm_subs_epi16(z, sevtlo)));
+            sev_16 = _mm_add_epi16(sev_16,
+                                   _mm_max_epi16(sevthi,
+                                                 _mm_subs_epi16(z, sevthi)));
+            seh_16 = _mm_add_epi16(seh_16,
+                                   _mm_max_epi16(sehtlo,
+                                                 _mm_subs_epi16(z, sehtlo)));
+            seh_16 = _mm_add_epi16(seh_16,
+                                   _mm_max_epi16(sehthi,
+                                                 _mm_subs_epi16(z, sehthi)));
+        }
+
+        // Add to 32 bit running sum as to not roll over.
+        se_32  = _mm_add_epi32(se_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(se_16,z),
+                                             _mm_unpacklo_epi16(se_16,z)));
+        sev_32 = _mm_add_epi32(sev_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(sev_16,z),
+                                             _mm_unpacklo_epi16(sev_16,z)));
+        seh_32 = _mm_add_epi32(seh_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(seh_16,z),
+                                             _mm_unpacklo_epi16(seh_16,z)));
+        msa_32 = _mm_add_epi32(msa_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(msa_16,z),
+                                             _mm_unpacklo_epi16(msa_16,z)));
+
+        imgBuf += _width * _skipNum;
+    }
+
+    __m128i se_128;
+    __m128i sev_128;
+    __m128i seh_128;
+    __m128i msa_128;
+
+    // bring sums out of vector registers and into integer register
+    // domain, summing them along the way
+    _mm_store_si128 (&se_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(se_32,z),
+                                   _mm_unpacklo_epi32(se_32,z)));
+    _mm_store_si128 (&sev_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(sev_32,z),
+                                   _mm_unpacklo_epi32(sev_32,z)));
+    _mm_store_si128 (&seh_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(seh_32,z),
+                                   _mm_unpacklo_epi32(seh_32,z)));
+    _mm_store_si128 (&msa_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(msa_32,z),
+                                   _mm_unpacklo_epi32(msa_32,z)));
+
+    WebRtc_UWord64 *se_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&se_128);
+    WebRtc_UWord64 *sev_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sev_128);
+    WebRtc_UWord64 *seh_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&seh_128);
+    WebRtc_UWord64 *msa_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&msa_128);
+
+    const WebRtc_UWord32 spatialErrSum  = se_64[0] + se_64[1];
+    const WebRtc_UWord32 spatialErrVSum = sev_64[0] + sev_64[1];
+    const WebRtc_UWord32 spatialErrHSum = seh_64[0] + seh_64[1];
+    const WebRtc_UWord32 pixelMSA = msa_64[0] + msa_64[1];
+
+    // normalize over all pixels
+    const float spatialErr  = (float)(spatialErrSum >> 2);
+    const float spatialErrH = (float)(spatialErrHSum >> 1);
+    const float spatialErrV = (float)(spatialErrVSum >> 1);
+    const float norm = (float)pixelMSA;
+
+    // 2X2:
+    _spatialPredErr = spatialErr / norm;
+
+    // 1X2:
+    _spatialPredErrH = spatialErrH / norm;
+
+    // 2X1:
+    _spatialPredErrV = spatialErrV / norm;
+
+    return VPM_OK;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/source/deflickering.cc b/trunk/src/modules/video_processing/main/source/deflickering.cc
new file mode 100644
index 0000000..d0b8d3b
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/deflickering.cc
@@ -0,0 +1,445 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "deflickering.h"
+#include "trace.h"
+#include "signal_processing_library.h"
+#include "sort.h"
+
+namespace webrtc {
+
+// Detection constants
+enum { kFrequencyDeviation = 39 };      // (Q4) Maximum allowed deviation for detection
+enum { kMinFrequencyToDetect = 32 };    // (Q4) Minimum frequency that can be detected
+enum { kNumFlickerBeforeDetect = 2 };   // Number of flickers before we accept detection
+enum { kMeanValueScaling = 4 };         // (Q4) In power of 2
+enum { kZeroCrossingDeadzone = 10 };    // Deadzone region in terms of pixel values
+
+// Deflickering constants
+// Compute the quantiles over 1 / DownsamplingFactor of the image.
+enum { kDownsamplingFactor = 8 };
+enum { kLog2OfDownsamplingFactor = 3 };
+
+// To generate in Matlab:
+// >> probUW16 = round(2^11 * [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
+// >> fprintf('%d, ', probUW16)
+// Resolution reduced to avoid overflow when multiplying with the (potentially) large 
+// number of pixels.
+const WebRtc_UWord16 VPMDeflickering::_probUW16[kNumProbs] =
+    {102, 205, 410, 614, 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
+
+// To generate in Matlab:
+// >> numQuants = 14; maxOnlyLength = 5;
+// >> weightUW16 = round(2^15 * [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
+// >> fprintf('%d, %d,\n ', weightUW16);
+const WebRtc_UWord16 VPMDeflickering::_weightUW16[kNumQuants - kMaxOnlyLength] =
+    {16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
+ 
+VPMDeflickering::VPMDeflickering() :
+    _id(0)
+{
+    Reset();
+}
+
+VPMDeflickering::~VPMDeflickering()
+{
+}
+
+WebRtc_Word32
+VPMDeflickering::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+void
+VPMDeflickering::Reset()
+{
+    _meanBufferLength = 0;
+    _detectionState = 0;
+    _frameRate = 0;
+
+    memset(_meanBuffer, 0, sizeof(WebRtc_Word32) * kMeanBufferLength);
+    memset(_timestampBuffer, 0, sizeof(WebRtc_Word32) * kMeanBufferLength);
+
+    // Initialize the history with a uniformly distributed histogram
+    _quantHistUW8[0][0] = 0;
+    _quantHistUW8[0][kNumQuants - 1] = 255;
+    for (WebRtc_Word32 i = 0; i < kNumProbs; i++)
+    {
+        _quantHistUW8[0][i + 1] = static_cast<WebRtc_UWord8>((WEBRTC_SPL_UMUL_16_16(
+            _probUW16[i], 255) + (1 << 10)) >> 11); // Unsigned round. <Q0>
+    }
+    
+    for (WebRtc_Word32 i = 1; i < kFrameHistorySize; i++)
+    {
+        memcpy(_quantHistUW8[i], _quantHistUW8[0], sizeof(WebRtc_UWord8) * kNumQuants);
+    }
+}
+
+WebRtc_Word32
+VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
+                              const WebRtc_UWord32 width,
+                              const WebRtc_UWord32 height,
+                              const WebRtc_UWord32 timestamp,
+                              VideoProcessingModule::FrameStats& stats)
+{
+    WebRtc_UWord32 frameMemory;
+    WebRtc_UWord8 quantUW8[kNumQuants];
+    WebRtc_UWord8 maxQuantUW8[kNumQuants];
+    WebRtc_UWord8 minQuantUW8[kNumQuants];
+    WebRtc_UWord16 targetQuantUW16[kNumQuants];
+    WebRtc_UWord16 incrementUW16;
+    WebRtc_UWord8 mapUW8[256];
+
+    WebRtc_UWord16 tmpUW16;
+    WebRtc_UWord32 tmpUW32;
+
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_GENERAL_ERROR;
+    }
+
+    // Stricter height check due to subsampling size calculation below.
+    if (width == 0 || height < 2)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (!VideoProcessingModule::ValidFrameStats(stats))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (PreDetection(timestamp, stats) == -1)
+    {
+        return VPM_GENERAL_ERROR;
+    }
+
+    // Flicker detection
+    WebRtc_Word32 detFlicker = DetectFlicker();
+    if (detFlicker < 0)
+    { // Error
+        return VPM_GENERAL_ERROR;
+    }
+    else if (detFlicker != 1)
+    {
+        return 0;
+    }
+
+    // Size of luminance component
+    const WebRtc_UWord32 ySize = height * width;
+
+    const WebRtc_UWord32 ySubSize = width * (((height - 1) >>
+        kLog2OfDownsamplingFactor) + 1);
+    WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize];
+    WebRtc_UWord32 sortRowIdx = 0;
+    for (WebRtc_UWord32 i = 0; i < height; i += kDownsamplingFactor)
+    {
+        memcpy(ySorted + sortRowIdx * width, frame + i * width, width);
+        sortRowIdx++;
+    }
+    
+    webrtc::Sort(ySorted, ySubSize, webrtc::TYPE_UWord8);
+
+    WebRtc_UWord32 probIdxUW32 = 0;
+    quantUW8[0] = 0;
+    quantUW8[kNumQuants - 1] = 255;
+
+    // Ensure we won't get an overflow below.
+    // In practice, the number of subsampled pixels will not become this large.
+    if (ySubSize > (1 << 21) - 1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, 
+            "Subsampled number of pixels too large");
+        return -1;
+    }
+
+    for (WebRtc_Word32 i = 0; i < kNumProbs; i++)
+    {
+        probIdxUW32 = WEBRTC_SPL_UMUL_32_16(ySubSize, _probUW16[i]) >> 11; // <Q0>
+        quantUW8[i + 1] = ySorted[probIdxUW32];
+    }
+
+    delete [] ySorted;
+    ySorted = NULL;
+
+    // Shift history for new frame.
+    memmove(_quantHistUW8[1], _quantHistUW8[0], (kFrameHistorySize - 1) * kNumQuants *
+        sizeof(WebRtc_UWord8));
+    // Store current frame in history.
+    memcpy(_quantHistUW8[0], quantUW8, kNumQuants * sizeof(WebRtc_UWord8));
+
+    // We use a frame memory equal to the ceiling of half the frame rate to ensure we
+    // capture an entire period of flicker.
+    frameMemory = (_frameRate + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
+                                                // _frameRate in Q4.
+    if (frameMemory > kFrameHistorySize)
+    {
+        frameMemory = kFrameHistorySize;
+    }
+
+    // Get maximum and minimum.
+    for (WebRtc_Word32 i = 0; i < kNumQuants; i++)
+    {
+        maxQuantUW8[i] = 0;
+        minQuantUW8[i] = 255;
+        for (WebRtc_UWord32 j = 0; j < frameMemory; j++)
+        {
+            if (_quantHistUW8[j][i] > maxQuantUW8[i])
+            {
+                maxQuantUW8[i] = _quantHistUW8[j][i];
+            }
+
+            if (_quantHistUW8[j][i] < minQuantUW8[i])
+            {
+                minQuantUW8[i] = _quantHistUW8[j][i];
+            }
+        }
+    }
+    
+    // Get target quantiles.
+    for (WebRtc_Word32 i = 0; i < kNumQuants - kMaxOnlyLength; i++)
+    {
+        targetQuantUW16[i] = static_cast<WebRtc_UWord16>((WEBRTC_SPL_UMUL_16_16(
+            _weightUW16[i], maxQuantUW8[i]) + WEBRTC_SPL_UMUL_16_16((1 << 15) -
+            _weightUW16[i], minQuantUW8[i])) >> 8); // <Q7>
+    }
+
+    for (WebRtc_Word32 i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++)
+    {
+        targetQuantUW16[i] = ((WebRtc_UWord16)maxQuantUW8[i]) << 7;
+    }
+
+    // Compute the map from input to output pixels.
+    WebRtc_UWord16 mapUW16; // <Q7>
+    for (WebRtc_Word32 i = 1; i < kNumQuants; i++)
+    {
+        // As quant and targetQuant are limited to UWord8, we're safe to use Q7 here.
+        tmpUW32 = static_cast<WebRtc_UWord32>(targetQuantUW16[i] -
+            targetQuantUW16[i - 1]); // <Q7>
+        tmpUW16 = static_cast<WebRtc_UWord16>(quantUW8[i] - quantUW8[i - 1]); // <Q0>
+
+        if (tmpUW16 > 0)
+        {
+            incrementUW16 = static_cast<WebRtc_UWord16>(WebRtcSpl_DivU32U16(tmpUW32,
+                tmpUW16)); // <Q7>
+         }
+        else
+        {
+            // The value is irrelevant; the loop below will only iterate once.
+            incrementUW16 = 0;
+        }
+
+        mapUW16 = targetQuantUW16[i - 1];
+        for (WebRtc_UWord32 j = quantUW8[i - 1]; j < (WebRtc_UWord32)(quantUW8[i] + 1); j++)
+        {
+            mapUW8[j] = (WebRtc_UWord8)((mapUW16 + (1 << 6)) >> 7); // Unsigned round. <Q0>
+            mapUW16 += incrementUW16;
+        }
+    }
+
+    // Map to the output frame.
+    for (WebRtc_UWord32 i = 0; i < ySize; i++)
+    {
+        frame[i] = mapUW8[frame[i]];
+    }
+
+    // Frame was altered, so reset stats.
+    VideoProcessingModule::ClearFrameStats(stats);
+
+    return 0;
+}
+
+/**
+   Performs some pre-detection operations. Must be called before 
+   DetectFlicker().
+
+   \param[in] timestamp Timestamp of the current frame.
+   \param[in] stats     Statistics of the current frame.
+ 
+   \return 0: Success\n
+           2: Detection not possible due to flickering frequency too close to
+              zero.\n
+          -1: Error
+*/
+WebRtc_Word32
+VPMDeflickering::PreDetection(const WebRtc_UWord32 timestamp,
+                              const VideoProcessingModule::FrameStats& stats)
+{
+    WebRtc_Word32 meanVal; // Mean value of frame (Q4)
+    WebRtc_UWord32 frameRate = 0;
+    WebRtc_Word32 meanBufferLength; // Temp variable
+
+    meanVal = ((stats.sum << kMeanValueScaling) / stats.numPixels);
+    /* Update mean value buffer.
+     * This should be done even though we might end up in an unreliable detection.
+     */
+    memmove(_meanBuffer + 1, _meanBuffer, (kMeanBufferLength - 1) * sizeof(WebRtc_Word32));
+    _meanBuffer[0] = meanVal;
+
+    /* Update timestamp buffer.
+     * This should be done even though we might end up in an unreliable detection.
+     */
+    memmove(_timestampBuffer + 1, _timestampBuffer, (kMeanBufferLength - 1) *
+        sizeof(WebRtc_UWord32));
+    _timestampBuffer[0] = timestamp;
+
+    /* Compute current frame rate (Q4) */
+    if (_timestampBuffer[kMeanBufferLength - 1] != 0)
+    {
+        frameRate = ((90000 << 4) * (kMeanBufferLength - 1));
+        frameRate /= (_timestampBuffer[0] - _timestampBuffer[kMeanBufferLength - 1]);
+    }else if (_timestampBuffer[1] != 0)
+    {
+        frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]);
+    }
+
+    /* Determine required size of mean value buffer (_meanBufferLength) */
+    if (frameRate == 0) {
+        meanBufferLength = 1;
+    }
+    else {
+        meanBufferLength = (kNumFlickerBeforeDetect * frameRate) / kMinFrequencyToDetect;
+    }
+    /* Sanity check of buffer length */
+    if (meanBufferLength >= kMeanBufferLength)
+    {
+        /* Too long buffer. The flickering frequency is too close to zero, which
+         * makes the estimation unreliable.
+         */
+        _meanBufferLength = 0;
+        return 2;
+    }
+    _meanBufferLength = meanBufferLength;
+
+    if ((_timestampBuffer[_meanBufferLength - 1] != 0) && (_meanBufferLength != 1))
+    {
+        frameRate = ((90000 << 4) * (_meanBufferLength - 1));
+        frameRate /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]);
+    }else if (_timestampBuffer[1] != 0)
+    {
+        frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]);
+    }
+    _frameRate = frameRate;
+
+    return 0;
+}
+
+/**
+   This function detects flicker in the video stream. As a side effect the mean value
+   buffer is updated with the new mean value.
+ 
+   \return 0: No flickering detected\n
+           1: Flickering detected\n
+           2: Detection not possible due to unreliable frequency interval
+          -1: Error
+*/
+WebRtc_Word32 VPMDeflickering::DetectFlicker()
+{
+    /* Local variables */
+    WebRtc_UWord32  i;
+    WebRtc_Word32  freqEst;       // (Q4) Frequency estimate to base detection upon
+    WebRtc_Word32  retVal = -1;
+
+    /* Sanity check for _meanBufferLength */
+    if (_meanBufferLength < 2)
+    {
+        /* Not possible to estimate frequency */
+        return(2);
+    }
+    /* Count zero crossings with a dead zone to be robust against noise.
+     * If the noise std is 2 pixel this corresponds to about 95% confidence interval.
+     */
+    WebRtc_Word32 deadzone = (kZeroCrossingDeadzone << kMeanValueScaling); // Q4
+    WebRtc_Word32 meanOfBuffer = 0; // Mean value of mean value buffer
+    WebRtc_Word32 numZeros     = 0; // Number of zeros that cross the deadzone
+    WebRtc_Word32 cntState     = 0; // State variable for zero crossing regions
+    WebRtc_Word32 cntStateOld  = 0; // Previous state variable for zero crossing regions
+
+    for (i = 0; i < _meanBufferLength; i++)
+    {
+        meanOfBuffer += _meanBuffer[i];
+    }
+    meanOfBuffer += (_meanBufferLength >> 1); // Rounding, not truncation
+    meanOfBuffer /= _meanBufferLength;
+
+    /* Count zero crossings */
+    cntStateOld = (_meanBuffer[0] >= (meanOfBuffer + deadzone));
+    cntStateOld -= (_meanBuffer[0] <= (meanOfBuffer - deadzone));
+    for (i = 1; i < _meanBufferLength; i++)
+    {
+        cntState = (_meanBuffer[i] >= (meanOfBuffer + deadzone));
+        cntState -= (_meanBuffer[i] <= (meanOfBuffer - deadzone));
+        if (cntStateOld == 0)
+        {
+            cntStateOld = -cntState;
+        }
+        if (((cntState + cntStateOld) == 0) && (cntState != 0))
+        {
+            numZeros++;
+            cntStateOld = cntState;
+        }
+    }
+    /* END count zero crossings */
+
+    /* Frequency estimation according to:
+     * freqEst = numZeros * frameRate / 2 / _meanBufferLength;
+     *
+     * Resolution is set to Q4
+     */
+    freqEst = ((numZeros * 90000) << 3);
+    freqEst /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]);
+
+    /* Translate frequency estimate to regions close to 100 and 120 Hz */
+    WebRtc_UWord8 freqState = 0; // Current translation state;
+                               // (0) Not in interval,
+                               // (1) Within valid interval,
+                               // (2) Out of range
+    WebRtc_Word32 freqAlias = freqEst;
+    if (freqEst > kMinFrequencyToDetect)
+    {
+        WebRtc_UWord8 aliasState = 1;
+        while(freqState == 0)
+        {
+            /* Increase frequency */
+            freqAlias += (aliasState * _frameRate);
+            freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
+            /* Compute state */
+            freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
+            freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
+            freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
+            /* Switch alias state */
+            aliasState++;
+            aliasState &= 0x01;
+        }
+    }
+    /* Is frequency estimate within detection region? */
+    if (freqState == 1)
+    {
+        retVal = 1;
+    }else if (freqState == 0)
+    {
+        retVal = 2;
+    }else
+    {
+        retVal = 0;
+    }
+    return retVal;
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/deflickering.h b/trunk/src/modules/video_processing/main/source/deflickering.h
new file mode 100644
index 0000000..ee5f90d
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/deflickering.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * deflickering.h
+ */
+
+#ifndef VPM_DEFLICKERING_H
+#define VPM_DEFLICKERING_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+#include <cstring>  // NULL
+
+namespace webrtc {
+
+class VPMDeflickering
+{
+public:
+    VPMDeflickering();
+    ~VPMDeflickering();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height,
+                             WebRtc_UWord32 timestamp,
+                             VideoProcessingModule::FrameStats& stats);
+private:
+    WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,
+                             const VideoProcessingModule::FrameStats& stats);
+
+    WebRtc_Word32 DetectFlicker();
+
+    enum { kMeanBufferLength = 32 };
+    enum { kFrameHistorySize = 15 };
+    enum { kNumProbs = 12 };
+    enum { kNumQuants = kNumProbs + 2 };
+    enum { kMaxOnlyLength = 5 };
+
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32  _meanBufferLength;
+    WebRtc_UWord8   _detectionState;    // 0: No flickering
+                                      // 1: Flickering detected
+                                      // 2: In flickering
+    WebRtc_Word32    _meanBuffer[kMeanBufferLength];
+    WebRtc_UWord32   _timestampBuffer[kMeanBufferLength];
+    WebRtc_UWord32   _frameRate;
+    static const WebRtc_UWord16 _probUW16[kNumProbs];
+    static const WebRtc_UWord16 _weightUW16[kNumQuants - kMaxOnlyLength];
+    WebRtc_UWord8 _quantHistUW8[kFrameHistorySize][kNumQuants];
+};
+
+} //namespace
+
+#endif // VPM_DEFLICKERING_H
+
diff --git a/trunk/src/modules/video_processing/main/source/denoising.cc b/trunk/src/modules/video_processing/main/source/denoising.cc
new file mode 100644
index 0000000..d8931c9
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/denoising.cc
@@ -0,0 +1,180 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "denoising.h"
+#include "trace.h"
+
+#include <cstring>
+
+namespace webrtc {
+
+enum { kSubsamplingTime = 0 };       // Down-sampling in time (unit: number of frames)
+enum { kSubsamplingWidth = 0 };      // Sub-sampling in width (unit: power of 2)
+enum { kSubsamplingHeight = 0 };     // Sub-sampling in height (unit: power of 2)
+enum { kDenoiseFiltParam = 179 };    // (Q8) De-noising filter parameter
+enum { kDenoiseFiltParamRec = 77 };  // (Q8) 1 - filter parameter
+enum { kDenoiseThreshold = 19200 };  // (Q8) De-noising threshold level
+
+VPMDenoising::VPMDenoising() :
+    _id(0),
+    _moment1(NULL),
+    _moment2(NULL)
+{
+    Reset();
+}
+
+VPMDenoising::~VPMDenoising()
+{
+    if (_moment1)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+    }
+    
+    if (_moment2)
+    {
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+}
+
+WebRtc_Word32
+VPMDenoising::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void
+VPMDenoising::Reset()
+{
+    _frameSize = 0;
+    _denoiseFrameCnt = 0;
+
+    if (_moment1)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+    }
+    
+    if (_moment2)
+    {
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+}
+
+WebRtc_Word32
+VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
+                           const WebRtc_UWord32 width,
+                           const WebRtc_UWord32 height)
+{
+    WebRtc_Word32     thevar;
+    WebRtc_UWord32    k;
+    WebRtc_UWord32    jsub, ksub;
+    WebRtc_Word32     diff0;
+    WebRtc_UWord32    tmpMoment1;
+    WebRtc_UWord32    tmpMoment2;
+    WebRtc_UWord32    tmp;
+    WebRtc_Word32     numPixelsChanged = 0;
+
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_GENERAL_ERROR;
+    }
+
+    /* Size of luminance component */
+    const WebRtc_UWord32 ysize  = height * width;
+
+    /* Initialization */
+    if (ysize != _frameSize)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+    _frameSize = ysize;
+
+    if (!_moment1)
+    {
+        _moment1 = new WebRtc_UWord32[ysize];
+        memset(_moment1, 0, sizeof(WebRtc_UWord32)*ysize);
+    }
+    
+    if (!_moment2)
+    {
+        _moment2 = new WebRtc_UWord32[ysize];
+        memset(_moment2, 0, sizeof(WebRtc_UWord32)*ysize);
+    }
+
+    /* Apply de-noising on each pixel, but update variance sub-sampled */
+    for (WebRtc_UWord32 i = 0; i < height; i++)
+    { // Collect over height
+        k = i * width;
+        ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
+        for (WebRtc_UWord32 j = 0; j < width; j++)
+        { // Collect over width
+            jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
+            /* Update mean value for every pixel and every frame */
+            tmpMoment1 = _moment1[k + j];
+            tmpMoment1 *= kDenoiseFiltParam; // Q16
+            tmpMoment1 += ((kDenoiseFiltParamRec * ((WebRtc_UWord32)frame[k + j])) << 8);
+            tmpMoment1 >>= 8; // Q8
+            _moment1[k + j] = tmpMoment1;
+
+            tmpMoment2 = _moment2[ksub + jsub];
+            if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0))
+            {
+                tmp = ((WebRtc_UWord32)frame[k + j] * (WebRtc_UWord32)frame[k + j]);
+                tmpMoment2 *= kDenoiseFiltParam; // Q16
+                tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8);
+                tmpMoment2 >>= 8; // Q8
+            }
+            _moment2[k + j] = tmpMoment2;
+            /* Current event = deviation from mean value */
+            diff0 = ((WebRtc_Word32)frame[k + j] << 8) - _moment1[k + j];
+            /* Recent events = variance (variations over time) */
+            thevar = _moment2[k + j];
+            thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8);
+            /***************************************************************************
+             * De-noising criteria, i.e., when should we replace a pixel by its mean
+             *
+             * 1) recent events are minor
+             * 2) current events are minor
+             ***************************************************************************/
+            if ((thevar < kDenoiseThreshold)
+                && ((diff0 * diff0 >> 8) < kDenoiseThreshold))
+            { // Replace with mean
+                frame[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
+                numPixelsChanged++;
+            }
+        }
+    }
+
+    /* Update frame counter */
+    _denoiseFrameCnt++;
+    if (_denoiseFrameCnt > kSubsamplingTime)
+    {
+        _denoiseFrameCnt = 0;
+    }
+
+    return numPixelsChanged;
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/denoising.h b/trunk/src/modules/video_processing/main/source/denoising.h
new file mode 100644
index 0000000..f53157c
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/denoising.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * denoising.h
+ */
+#ifndef VPM_DENOISING_H
+#define VPM_DENOISING_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+class VPMDenoising
+{
+public:
+    VPMDenoising();
+    ~VPMDenoising();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height);
+
+private:
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32*   _moment1;           // (Q8) First order moment (mean)
+    WebRtc_UWord32*   _moment2;           // (Q8) Second order moment
+    WebRtc_UWord32    _frameSize;         // Size (# of pixels) of frame
+    WebRtc_Word32     _denoiseFrameCnt;   // Counter for subsampling in time
+};
+
+} //namespace
+
+#endif // VPM_DENOISING_H
+  
diff --git a/trunk/src/modules/video_processing/main/source/frame_preprocessor.cc b/trunk/src/modules/video_processing/main/source/frame_preprocessor.cc
new file mode 100644
index 0000000..76fdac8
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/frame_preprocessor.cc
@@ -0,0 +1,182 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_preprocessor.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VPMFramePreprocessor::VPMFramePreprocessor():
+_id(0),
+_contentMetrics(NULL),
+_maxFrameRate(0),
+_resampledFrame(),
+_enableCA(false)
+{
+    _spatialResampler = new VPMSimpleSpatialResampler();
+    _ca = new VPMContentAnalysis(true);
+    _vd = new VPMVideoDecimator();
+}
+
+VPMFramePreprocessor::~VPMFramePreprocessor()
+{
+    Reset();
+    delete _spatialResampler;
+    delete _ca;
+    delete _vd;
+    _resampledFrame.Free(); // is this needed?
+}
+
+WebRtc_Word32
+VPMFramePreprocessor::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void 
+VPMFramePreprocessor::Reset()
+{
+    _ca->Release();
+    _vd->Reset();
+    _contentMetrics = NULL;
+    _spatialResampler->Reset();
+    _enableCA = false;
+}
+	
+    
+void 
+VPMFramePreprocessor::EnableTemporalDecimation(bool enable)
+{
+    _vd->EnableTemporalDecimation(enable);
+}
+void
+VPMFramePreprocessor::EnableContentAnalysis(bool enable)
+{
+    _enableCA = enable;
+}
+
+void 
+VPMFramePreprocessor::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
+{
+    _spatialResampler->SetInputFrameResampleMode(resamplingMode);
+}
+
+    
+WebRtc_Word32
+VPMFramePreprocessor::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    if (maxFrameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    //Max allowed frame rate
+    _maxFrameRate = maxFrameRate;
+
+    return _vd->SetMaxFrameRate(maxFrameRate);
+}
+    
+
+WebRtc_Word32
+VPMFramePreprocessor::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
+{
+    if ( (width == 0) || (height == 0) || (frameRate == 0))
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    WebRtc_Word32 retVal = 0;
+    retVal = _spatialResampler->SetTargetFrameSize(width, height);
+    if (retVal < 0)
+    {
+        return retVal;
+    }
+    retVal = _vd->SetTargetFrameRate(frameRate);
+    if (retVal < 0)
+    {
+        return retVal;
+    }
+
+	  return VPM_OK;
+}
+
+void 
+VPMFramePreprocessor::UpdateIncomingFrameRate()
+{
+    _vd->UpdateIncomingFrameRate();
+}
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedFrameRate()
+{
+    return _vd->DecimatedFrameRate();
+}
+
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedWidth() const
+{
+    return _spatialResampler->TargetWidth();
+}
+
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedHeight() const
+{
+    return _spatialResampler->TargetHeight();
+}
+
+
+WebRtc_Word32
+VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame)
+{
+    if (frame == NULL || frame->Height() == 0 || frame->Width() == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _vd->UpdateIncomingFrameRate();
+
+    if (_vd->DropFrame())
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, "Drop frame due to frame rate");
+        return 1;  // drop 1 frame
+    }
+
+    // Resizing incoming frame if needed.
+    // Note that we must make a copy of it.
+    // We are not allowed to resample the input frame.
+    *processedFrame = NULL;
+    if (_spatialResampler->ApplyResample(frame->Width(), frame->Height()))  {
+      WebRtc_Word32 ret = _spatialResampler->ResampleFrame(*frame, _resampledFrame);
+      if (ret != VPM_OK)
+        return ret;
+      *processedFrame = &_resampledFrame;
+    }
+
+    // Perform content analysis on the frame to be encoded
+    if (_enableCA)
+    {
+        if (*processedFrame == NULL)  {
+          _contentMetrics = _ca->ComputeContentMetrics(frame);
+        } else {
+          _contentMetrics = _ca->ComputeContentMetrics(&_resampledFrame);
+        }
+    }
+    return VPM_OK;
+}
+
+
+VideoContentMetrics*
+VPMFramePreprocessor::ContentMetrics() const
+{
+    return _contentMetrics;
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/frame_preprocessor.h b/trunk/src/modules/video_processing/main/source/frame_preprocessor.h
new file mode 100644
index 0000000..3c07a47
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/frame_preprocessor.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * frame_preprocessor.h
+ */
+#ifndef VPM_FRAME_PREPROCESSOR_H
+#define VPM_FRAME_PREPROCESSOR_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+#include "content_analysis.h"
+#include "spatial_resampler.h"
+#include "video_decimator.h"
+
+namespace webrtc {
+
+
+class VPMFramePreprocessor
+{
+public:
+
+    VPMFramePreprocessor();
+    ~VPMFramePreprocessor();
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    void Reset();
+
+    // Enable temporal decimation
+    void EnableTemporalDecimation(bool enable);
+
+    void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+
+    //Enable content analysis
+    void EnableContentAnalysis(bool enable);
+
+    //Set max frame rate
+    WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+
+    //Set target resolution: frame rate and dimension
+    WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate);
+
+    //Update incoming frame rate/dimension
+    void UpdateIncomingFrameRate();
+
+    WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width, WebRtc_UWord32 height);
+
+    //Set decimated values: frame rate/dimension
+    WebRtc_UWord32 DecimatedFrameRate();
+    WebRtc_UWord32 DecimatedWidth() const;
+    WebRtc_UWord32 DecimatedHeight() const;
+
+    //Preprocess output:
+    WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame);
+    VideoContentMetrics* ContentMetrics() const;
+
+private:
+
+    WebRtc_Word32              _id;
+    VideoContentMetrics*      _contentMetrics;
+    WebRtc_UWord32             _maxFrameRate;
+    VideoFrame           _resampledFrame;
+    VPMSpatialResampler*     _spatialResampler;
+    VPMContentAnalysis*      _ca;
+    VPMVideoDecimator*       _vd;
+    bool                     _enableCA;
+    
+}; // end of VPMFramePreprocessor class definition
+
+} //namespace
+
+#endif // VPM_FRAME_PREPROCESS_H
diff --git a/trunk/src/modules/video_processing/main/source/spatial_resampler.cc b/trunk/src/modules/video_processing/main/source/spatial_resampler.cc
new file mode 100644
index 0000000..b48c322
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/spatial_resampler.cc
@@ -0,0 +1,123 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "spatial_resampler.h"
+
+
+namespace webrtc {
+
+VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
+:
+_resamplingMode(kFastRescaling),
+_targetWidth(0),
+_targetHeight(0),
+_scaler()
+{
+}
+
+VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler()
+{
+  //
+}
+
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_Word32 width,
+                                              WebRtc_Word32 height)
+{
+  if (_resamplingMode == kNoRescaling)  {
+    return VPM_OK;
+  }
+
+  if (width < 1 || height < 1)  {
+    return VPM_PARAMETER_ERROR;
+  }
+
+  _targetWidth = width;
+  _targetHeight = height;
+
+  return VPM_OK;
+}
+
+void
+VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling
+                                                     resamplingMode)
+{
+  _resamplingMode = resamplingMode;
+}
+
+void
+VPMSimpleSpatialResampler::Reset()
+{
+  _resamplingMode = kFastRescaling;
+  _targetWidth = 0;
+  _targetHeight = 0;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
+                                         VideoFrame& outFrame)
+{
+  if (_resamplingMode == kNoRescaling)
+     return outFrame.CopyFrame(inFrame);
+  // Check if re-sampling is needed
+  if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
+    (inFrame.Height() == (WebRtc_UWord32)_targetHeight))  {
+    return outFrame.CopyFrame(inFrame);
+  }
+
+  // Setting scaler
+  //TODO: Modify scaler types
+  int retVal = 0;
+  retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
+                       _targetWidth, _targetHeight, kI420, kI420, kScaleBox);
+  if (retVal < 0)
+    return retVal;
+
+
+  // Disabling cut/pad for now - only scaling.
+  int requiredSize = (WebRtc_UWord32)(_targetWidth * _targetHeight * 3 >> 1);
+  outFrame.VerifyAndAllocate(requiredSize);
+  outFrame.SetTimeStamp(inFrame.TimeStamp());
+  outFrame.SetWidth(_targetWidth);
+  outFrame.SetHeight(_targetHeight);
+
+  retVal = _scaler.Scale(inFrame.Buffer(), outFrame.Buffer(), requiredSize);
+  outFrame.SetLength(requiredSize);
+  if (retVal == 0)
+    return VPM_OK;
+  else
+    return VPM_SCALE_ERROR;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::TargetHeight()
+{
+  return _targetHeight;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::TargetWidth()
+{
+  return _targetWidth;
+}
+
+bool
+VPMSimpleSpatialResampler::ApplyResample(WebRtc_Word32 width,
+                                         WebRtc_Word32 height)
+{
+  if ((width == _targetWidth && height == _targetHeight) ||
+       _resamplingMode == kNoRescaling)
+    return false;
+  else
+    return true;
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/spatial_resampler.h b/trunk/src/modules/video_processing/main/source/spatial_resampler.h
new file mode 100644
index 0000000..76a63fb
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/spatial_resampler.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * spatial_resampler.h
+ */
+
+#ifndef VPM_SPATIAL_RESAMPLER_H
+#define VPM_SPATIAL_RESAMPLER_H
+
+#include "typedefs.h"
+
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "common_video/libyuv/include/scaler.h"
+
+namespace webrtc {
+
+class VPMSpatialResampler
+{
+public:
+  virtual ~VPMSpatialResampler() {};
+  virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
+                                           WebRtc_Word32 height) = 0;
+  virtual void SetInputFrameResampleMode(VideoFrameResampling
+                                         resamplingMode) = 0;
+  virtual void Reset() = 0;
+  virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
+                                      VideoFrame& outFrame) = 0;
+  virtual WebRtc_Word32 TargetWidth() = 0;
+  virtual WebRtc_Word32 TargetHeight() = 0;
+  virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
+};
+
+class VPMSimpleSpatialResampler : public VPMSpatialResampler
+{
+public:
+  VPMSimpleSpatialResampler();
+  ~VPMSimpleSpatialResampler();
+  virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
+                                           WebRtc_Word32 height);
+  virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+  virtual void Reset();
+  virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
+                                      VideoFrame& outFrame);
+  virtual WebRtc_Word32 TargetWidth();
+  virtual WebRtc_Word32 TargetHeight();
+  virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);
+
+private:
+
+  VideoFrameResampling        _resamplingMode;
+  WebRtc_Word32               _targetWidth;
+  WebRtc_Word32               _targetHeight;
+  Scaler                      _scaler;
+};
+
+} //namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/source/video_decimator.cc b/trunk/src/modules/video_processing/main/source/video_decimator.cc
new file mode 100644
index 0000000..43bda08
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/video_decimator.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_decimator.h"
+#include "tick_util.h"
+#include "video_processing.h"
+
+#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+namespace webrtc {
+
+VPMVideoDecimator::VPMVideoDecimator()
+:
+_overShootModifier(0),
+_dropCount(0),
+_keepCount(0),
+_targetFrameRate(30),
+_incomingFrameRate(0.0f),
+_maxFrameRate(30),
+_incomingFrameTimes(),
+_enableTemporalDecimation(true)
+{
+    Reset();
+}
+
+VPMVideoDecimator::~VPMVideoDecimator()
+{
+	//
+}
+
+void
+VPMVideoDecimator::Reset() 
+{
+   _overShootModifier = 0;
+    _dropCount = 0;
+    _keepCount = 0;
+    _targetFrameRate = 30;
+    _incomingFrameRate = 0.0f;
+    _maxFrameRate = 30;
+    memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+    _enableTemporalDecimation = true;
+}
+
+void
+VPMVideoDecimator::EnableTemporalDecimation(bool enable)
+{
+    _enableTemporalDecimation = enable;
+}
+WebRtc_Word32
+VPMVideoDecimator::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    if (maxFrameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _maxFrameRate = maxFrameRate;
+    
+    if (_targetFrameRate > _maxFrameRate)
+    {
+        _targetFrameRate = _maxFrameRate;
+
+    }
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMVideoDecimator::SetTargetFrameRate(WebRtc_UWord32 frameRate)
+{
+    if (frameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    if (frameRate > _maxFrameRate)
+    {
+        //override
+        _targetFrameRate = _maxFrameRate;
+    }
+    else
+    {
+        _targetFrameRate = frameRate;
+    }
+    return VPM_OK;
+}
+
+bool
+VPMVideoDecimator::DropFrame()
+{
+    if (!_enableTemporalDecimation)
+    {
+        return false;
+    }
+
+    if (_incomingFrameRate <= 0)
+    {
+        return false;
+    }
+
+    const WebRtc_UWord32 incomingFrameRate = static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+
+    if (_targetFrameRate == 0)
+    {
+        return true;
+    }
+    
+    bool drop = false; 
+    if (incomingFrameRate > _targetFrameRate)
+    {       
+        WebRtc_Word32 overshoot = _overShootModifier + (incomingFrameRate - _targetFrameRate);
+        if(overshoot < 0)
+        {
+            overshoot = 0;
+            _overShootModifier = 0;
+        }
+        
+        if (overshoot && 2 * overshoot < (WebRtc_Word32) incomingFrameRate)
+        {
+
+            if (_dropCount) // Just got here so drop to be sure.
+            {
+                _dropCount = 0;         
+                return true;
+            }                        
+            const WebRtc_UWord32 dropVar = incomingFrameRate / overshoot;
+
+            if (_keepCount >= dropVar)
+            {
+                drop = true;                           
+                _overShootModifier = -((WebRtc_Word32) incomingFrameRate % overshoot) / 3;
+                _keepCount = 1;
+            }
+            else
+            {                        
+                
+                _keepCount++;
+            }
+        }
+        else
+        {
+            _keepCount = 0;         
+            const WebRtc_UWord32 dropVar = overshoot / _targetFrameRate;
+            if (_dropCount < dropVar)
+            {                
+                drop = true;
+                _dropCount++;                
+            }
+            else
+            {
+                _overShootModifier = overshoot % _targetFrameRate;
+                drop = false;
+                _dropCount = 0;                
+            }
+        }
+    }
+
+    return drop;
+}
+
+
+WebRtc_UWord32
+VPMVideoDecimator::DecimatedFrameRate()
+{
+    ProcessIncomingFrameRate(TickTime::MillisecondTimestamp());
+    if (!_enableTemporalDecimation)
+    {
+        return static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+    }
+    return VD_MIN(_targetFrameRate, static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f));
+}
+
+WebRtc_UWord32
+VPMVideoDecimator::InputFrameRate()
+{
+    ProcessIncomingFrameRate(TickTime::MillisecondTimestamp());
+    return static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+}
+
+void
+VPMVideoDecimator::UpdateIncomingFrameRate()
+{
+   WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+    if(_incomingFrameTimes[0] == 0)
+    {
+        // first no shift
+    } else
+    {
+        // shift 
+        for(int i = (kFrameCountHistorySize - 2); i >= 0 ; i--)
+        {
+            _incomingFrameTimes[i+1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = now;
+    ProcessIncomingFrameRate(now);
+}
+
+void 
+VPMVideoDecimator::ProcessIncomingFrameRate(WebRtc_Word64 now)
+{
+   WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for(num = 1; num < (kFrameCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num] <= 0 ||
+            now - _incomingFrameTimes[num] > kFrameHistoryWindowMs) // don't use data older than 2sec
+        {
+            break;
+        } else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        WebRtc_Word64 diff = now - _incomingFrameTimes[num-1];
+        _incomingFrameRate = 1.0;
+        if(diff >0)
+        {
+            _incomingFrameRate = nrOfFrames * 1000.0f / static_cast<float>(diff);
+        }
+    }
+    else
+    {
+        _incomingFrameRate = static_cast<float>(nrOfFrames);
+    }
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/video_decimator.h b/trunk/src/modules/video_processing/main/source/video_decimator.h
new file mode 100644
index 0000000..e152bb9
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/video_decimator.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_decimator.h
+ */
+#ifndef VPM_VIDEO_DECIMATOR_H
+#define VPM_VIDEO_DECIMATOR_H
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+class VPMVideoDecimator
+{
+public:
+    VPMVideoDecimator();
+    ~VPMVideoDecimator();
+    
+    void Reset();
+    
+    void EnableTemporalDecimation(bool enable);
+    
+    WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+    WebRtc_Word32 SetTargetFrameRate(WebRtc_UWord32 frameRate);
+
+    bool DropFrame();
+    
+    void UpdateIncomingFrameRate();
+
+    // Get Decimated Frame Rate/Dimensions
+    WebRtc_UWord32 DecimatedFrameRate();
+
+    //Get input frame rate
+    WebRtc_UWord32 InputFrameRate();
+
+private:
+    void ProcessIncomingFrameRate(WebRtc_Word64 now);
+
+    enum { kFrameCountHistorySize = 90};
+    enum { kFrameHistoryWindowMs = 2000};
+
+    // Temporal decimation
+    WebRtc_Word32         _overShootModifier;
+    WebRtc_UWord32        _dropCount;
+    WebRtc_UWord32        _keepCount;
+    WebRtc_UWord32        _targetFrameRate;
+    float               _incomingFrameRate;
+    WebRtc_UWord32        _maxFrameRate;
+    WebRtc_Word64         _incomingFrameTimes[kFrameCountHistorySize];
+    bool                _enableTemporalDecimation;
+
+};
+
+} //namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/source/video_processing.gypi b/trunk/src/modules/video_processing/main/source/video_processing.gypi
new file mode 100644
index 0000000..3bc03bc
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/video_processing.gypi
@@ -0,0 +1,88 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_processing',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../interface/video_processing.h',
+        '../interface/video_processing_defines.h',
+        'brighten.cc',
+        'brighten.h',
+        'brightness_detection.cc',
+        'brightness_detection.h',
+        'color_enhancement.cc',
+        'color_enhancement.h',
+        'color_enhancement_private.h',
+        'content_analysis.cc',
+        'content_analysis.h',
+        'deflickering.cc',
+        'deflickering.h',
+        'denoising.cc',
+        'denoising.h',
+        'frame_preprocessor.cc',
+        'frame_preprocessor.h',
+        'spatial_resampler.cc',
+        'spatial_resampler.h',
+        'video_decimator.cc',
+        'video_decimator.h',
+        'video_processing_impl.cc',
+        'video_processing_impl.h',
+      ],
+      'conditions': [
+        ['target_arch=="ia32" or target_arch=="x64"', {
+          'dependencies': [ 'video_processing_sse2', ],
+        }],
+      ],
+    },
+  ],
+  'conditions': [
+    ['target_arch=="ia32" or target_arch=="x64"', {
+      'targets': [
+        {
+          'target_name': 'video_processing_sse2',
+          'type': '<(library)',
+          'sources': [
+            'content_analysis_sse2.cc',
+          ],
+          'include_dirs': [
+            '../interface',
+            '../../../interface',
+          ],
+          'conditions': [
+            ['os_posix==1 and OS!="mac"', {
+              'cflags': [ '-msse2', ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_CFLAGS': [ '-msse2', ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+  ],
+}
+
diff --git a/trunk/src/modules/video_processing/main/source/video_processing_impl.cc b/trunk/src/modules/video_processing/main/source/video_processing_impl.cc
new file mode 100644
index 0000000..3619996
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/video_processing_impl.cc
@@ -0,0 +1,340 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_processing_impl.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <cassert>
+
+namespace webrtc {
+
+namespace
+{
+    void
+    SetSubSampling(VideoProcessingModule::FrameStats& stats,
+                   const WebRtc_Word32 width,
+                   const WebRtc_Word32 height)
+    {
+        if (width * height >= 640 * 480)
+        {
+            stats.subSamplWidth = 3; 
+            stats.subSamplHeight = 3;
+        }
+        else if (width * height >= 352 * 288)
+        {
+            stats.subSamplWidth = 2; 
+            stats.subSamplHeight = 2;
+        }
+        else if (width * height >= 176 * 144)
+        {
+            stats.subSamplWidth = 1; 
+            stats.subSamplHeight = 1;
+        }
+        else
+        {
+            stats.subSamplWidth = 0; 
+            stats.subSamplHeight = 0;
+        }
+    }
+}
+
+VideoProcessingModule*
+VideoProcessingModule::Create(const WebRtc_Word32 id)
+{
+
+    return new VideoProcessingModuleImpl(id);
+}
+
+void
+VideoProcessingModule::Destroy(VideoProcessingModule* module)
+{
+    if (module)
+    {
+        delete static_cast<VideoProcessingModuleImpl*>(module);
+    }
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _id = id;
+    _brightnessDetection.ChangeUniqueId(id);
+    _deflickering.ChangeUniqueId(id);
+    _denoising.ChangeUniqueId(id);
+    _framePreProcessor.ChangeUniqueId(id);
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Id() const
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _id;
+}
+
+VideoProcessingModuleImpl::VideoProcessingModuleImpl(const WebRtc_Word32 id) :
+    _id(id),
+    _mutex(*CriticalSectionWrapper::CreateCriticalSection())
+{
+    _brightnessDetection.ChangeUniqueId(id);
+    _deflickering.ChangeUniqueId(id);
+    _denoising.ChangeUniqueId(id);
+    _framePreProcessor.ChangeUniqueId(id);
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Created");
+}
+
+
+VideoProcessingModuleImpl::~VideoProcessingModuleImpl()
+{
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Destroyed");
+    
+    delete &_mutex;
+}
+
+void
+VideoProcessingModuleImpl::Reset()
+{
+    CriticalSectionScoped mutex(_mutex);
+    _deflickering.Reset();
+    _denoising.Reset();
+    _brightnessDetection.Reset();
+    _framePreProcessor.Reset();
+
+}
+
+WebRtc_Word32
+VideoProcessingModule::GetFrameStats(FrameStats& stats,
+                                         const VideoFrame& frame)
+{
+    return GetFrameStats(stats, frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModule::GetFrameStats(FrameStats& stats,
+                                         const WebRtc_UWord8* frame,
+                                         const WebRtc_UWord32 width,
+                                         const WebRtc_UWord32 height)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+        return VPM_PARAMETER_ERROR;
+    }
+    
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    ClearFrameStats(stats); // The histogram needs to be zeroed out.
+    SetSubSampling(stats, width, height);
+
+    // Compute histogram and sum of frame
+    for (WebRtc_UWord32 i = 0; i < height; i += (1 << stats.subSamplHeight))
+    {
+        WebRtc_Word32 k = i * width;
+        for (WebRtc_UWord32 j = 0; j < width; j += (1 << stats.subSamplWidth))
+        { 
+            stats.hist[frame[k + j]]++;
+            stats.sum += frame[k + j];
+        }
+    }
+
+    stats.numPixels = (width * height) / ((1 << stats.subSamplWidth) * (1 << stats.subSamplHeight));
+    assert(stats.numPixels > 0);
+
+    // Compute mean value of frame
+    stats.mean = stats.sum / stats.numPixels;
+    
+    return VPM_OK;
+}
+
+bool
+VideoProcessingModule::ValidFrameStats(const FrameStats& stats)
+{
+    if (stats.numPixels == 0)
+    {
+        return false;
+    }
+
+    return true;
+}
+
+void
+VideoProcessingModule::ClearFrameStats(FrameStats& stats)
+{
+    stats.mean = 0;
+    stats.sum = 0;
+    stats.numPixels = 0;
+    stats.subSamplWidth = 0;
+    stats.subSamplHeight = 0;
+    memset(stats.hist, 0, sizeof(stats.hist));
+}
+
+WebRtc_Word32
+VideoProcessingModule::ColorEnhancement(VideoFrame& frame)
+{
+    return ColorEnhancement(frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModule::ColorEnhancement(WebRtc_UWord8* frame,
+                                            const WebRtc_UWord32 width,
+                                            const WebRtc_UWord32 height)
+{
+    return VideoProcessing::ColorEnhancement(frame, width, height);
+}
+
+WebRtc_Word32
+VideoProcessingModule::Brighten(VideoFrame& frame, int delta)
+{
+    return Brighten(frame.Buffer(), frame.Width(), frame.Height(), delta);
+}
+
+WebRtc_Word32
+VideoProcessingModule::Brighten(WebRtc_UWord8* frame,
+                                    int width,
+                                    int height,
+                                    int delta)
+{
+    return VideoProcessing::Brighten(frame, width, height, delta);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Deflickering(VideoFrame& frame,
+                                            FrameStats& stats)
+{
+    return Deflickering(frame.Buffer(), frame.Width(), frame.Height(), 
+        frame.TimeStamp(), stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Deflickering(WebRtc_UWord8* frame,
+                                            const WebRtc_UWord32 width,
+                                            const WebRtc_UWord32 height,
+                                            const WebRtc_UWord32 timestamp,
+                                            FrameStats& stats)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _deflickering.ProcessFrame(frame, width, height, timestamp, stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Denoising(VideoFrame& frame)
+{
+    return Denoising(frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Denoising(WebRtc_UWord8* frame,
+                                         const WebRtc_UWord32 width,
+                                         const WebRtc_UWord32 height)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _denoising.ProcessFrame(frame, width, height);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
+                                                   const FrameStats& stats)
+{
+    return BrightnessDetection(frame.Buffer(), frame.Width(), frame.Height(), stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::BrightnessDetection(const WebRtc_UWord8* frame,
+                                                   const WebRtc_UWord32 width,
+                                                   const WebRtc_UWord32 height,
+                                                   const FrameStats& stats)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _brightnessDetection.ProcessFrame(frame, width, height, stats);
+}
+
+
+void 
+VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _framePreProcessor.EnableTemporalDecimation(enable);
+}
+
+
+void 
+VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
+{
+    CriticalSectionScoped cs(_mutex);
+    _framePreProcessor.SetInputFrameResampleMode(resamplingMode);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.SetMaxFrameRate(maxFrameRate);
+
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.SetTargetResolution(width, height, frameRate);
+}
+
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedFrameRate()
+{
+    CriticalSectionScoped cs(_mutex);
+    return  _framePreProcessor.DecimatedFrameRate();
+}
+
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedWidth() const
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.DecimatedWidth();
+}
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedHeight() const
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.DecimatedHeight();
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame *frame, VideoFrame **processedFrame)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _framePreProcessor.PreprocessFrame(frame, processedFrame);
+}
+
+VideoContentMetrics*
+VideoProcessingModuleImpl::ContentMetrics() const
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _framePreProcessor.ContentMetrics();
+}
+
+
+void
+VideoProcessingModuleImpl::EnableContentAnalysis(bool enable)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _framePreProcessor.EnableContentAnalysis(enable);
+}
+
+} //namespace
diff --git a/trunk/src/modules/video_processing/main/source/video_processing_impl.h b/trunk/src/modules/video_processing/main/source/video_processing_impl.h
new file mode 100644
index 0000000..3170ab1
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/source/video_processing_impl.h
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
+#define WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
+
+#include "video_processing.h"
+#include "brighten.h"
+#include "brightness_detection.h"
+#include "color_enhancement.h"
+#include "deflickering.h"
+#include "denoising.h"
+#include "frame_preprocessor.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoProcessingModuleImpl : public VideoProcessingModule
+{
+public:
+
+    VideoProcessingModuleImpl(WebRtc_Word32 id);
+
+    virtual ~VideoProcessingModuleImpl();
+
+    WebRtc_Word32 Id() const;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual void Reset();
+
+    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height,
+                                     WebRtc_UWord32 timestamp,
+                                     FrameStats& stats);
+
+    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
+                                       FrameStats& stats);
+
+    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
+                                    WebRtc_UWord32 width,
+                                    WebRtc_UWord32 height);
+
+    virtual WebRtc_Word32 Denoising(VideoFrame& frame);
+
+    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
+                                              WebRtc_UWord32 width,
+                                              WebRtc_UWord32 height,
+                                              const FrameStats& stats);
+
+    virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
+                                              const FrameStats& stats);
+
+
+    //Frame pre-processor functions
+
+    //Enable temporal decimation
+    virtual void EnableTemporalDecimation(bool enable);
+
+    virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+
+    //Enable content analysis
+    virtual void EnableContentAnalysis(bool enable);
+
+    //Set max frame rate
+    virtual WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+
+    // Set Target Resolution: frame rate and dimension
+    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
+                                              WebRtc_UWord32 height,
+                                              WebRtc_UWord32 frameRate);
+
+
+    // Get decimated values: frame rate/dimension
+    virtual WebRtc_UWord32 DecimatedFrameRate();
+    virtual WebRtc_UWord32 DecimatedWidth() const;
+    virtual WebRtc_UWord32 DecimatedHeight() const;
+
+    // Preprocess:
+    // Pre-process incoming frame: Sample when needed and compute content
+    // metrics when enabled.
+    // If no resampling takes place - processedFrame is set to NULL.
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame,
+                                          VideoFrame** processedFrame);
+    virtual VideoContentMetrics* ContentMetrics() const;
+
+private:
+    WebRtc_Word32              _id;
+    CriticalSectionWrapper&    _mutex;
+
+    VPMDeflickering            _deflickering;
+    VPMDenoising               _denoising;
+    VPMBrightnessDetection     _brightnessDetection;
+    VPMFramePreprocessor       _framePreProcessor;
+};
+
+} // namespace
+
+#endif
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
new file mode 100644
index 0000000..6510a5c
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "unit_test.h"
+#include "video_processing.h"
+
+using namespace webrtc;
+
+TEST_F(VideoProcessingModuleTest, BrightnessDetection)
+{
+    WebRtc_UWord32 frameNum = 0;
+    WebRtc_Word32 brightnessWarning = 0;
+    WebRtc_UWord32 warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+    {
+        frameNum++;
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        if (brightnessWarning != VideoProcessingModule::kNoWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect few warnings
+    float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("\nWarning proportions:\n");
+    printf("Stock foreman: %.1f %%\n", warningProportion);
+    EXPECT_LT(warningProportion, 10);
+
+    rewind(_sourceFile);
+    frameNum = 0;
+    warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
+        frameNum < 300)
+    {
+        frameNum++;
+
+        WebRtc_UWord8* frame = _videoFrame.Buffer();
+        WebRtc_UWord32 yTmp = 0;
+        for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
+        {
+            yTmp = frame[yIdx] << 1;
+            if (yTmp > 255)
+            {
+                yTmp = 255;
+            }
+            frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
+        }
+
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
+        if (brightnessWarning == VideoProcessingModule::kBrightWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect many brightness warnings
+    warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("Bright foreman: %.1f %%\n", warningProportion);
+    EXPECT_GT(warningProportion, 95);
+
+    rewind(_sourceFile);
+    frameNum = 0;
+    warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
+        frameNum < 300)
+    {
+        frameNum++;
+
+        WebRtc_UWord8* frame = _videoFrame.Buffer();
+        WebRtc_Word32 yTmp = 0;
+        for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
+        {
+            yTmp = frame[yIdx] >> 1;
+            frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
+        }
+
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
+        if (brightnessWarning == VideoProcessingModule::kDarkWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect many darkness warnings
+    warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("Dark foreman: %.1f %%\n\n", warningProportion);
+    EXPECT_GT(warningProportion, 90);
+}
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
new file mode 100644
index 0000000..c494c85
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, ColorEnhancement)
+{
+    TickTime t0;
+    TickTime t1;
+    TickInterval accTicks;
+
+    // Use a shorter version of the Foreman clip for this test.
+    fclose(_sourceFile);
+    const std::string video_file =
+      webrtc::test::ResourcePath("foreman_cif_short", "yuv");
+    _sourceFile  = fopen(video_file.c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL) <<
+        "Cannot read source file: " + video_file + "\n";
+
+    std::string output_file = webrtc::test::OutputPath() +
+        "foremanColorEnhancedVPM_cif_short.yuv";
+    FILE* modFile = fopen(output_file.c_str(), "w+b");
+    ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
+        
+    WebRtc_UWord32 frameNum = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+    {
+        frameNum++;
+        t0 = TickTime::Now();
+        ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame));
+        t1 = TickTime::Now();
+        accTicks += t1 - t0;
+        fwrite(_videoFrame.Buffer(), 1, _frameLength, modFile);
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    printf("\nTime per frame: %d us \n", 
+        static_cast<int>(accTicks.Microseconds() / frameNum));
+    rewind(modFile);
+
+    printf("Comparing files...\n\n");
+    std::string reference_filename =
+        webrtc::test::ResourcePath("foremanColorEnhanced_cif_short", "yuv");
+    FILE* refFile = fopen(reference_filename.c_str(), "rb");
+    ASSERT_TRUE(refFile != NULL) << "Cannot open reference file: " <<
+        reference_filename << "\n"
+        "Create the reference by running Matlab script createTable.m.";
+
+    // get file lenghts
+    ASSERT_EQ(0, fseek(refFile, 0L, SEEK_END));
+    long refLen = ftell(refFile);
+    ASSERT_NE(-1L, refLen);
+    rewind(refFile);
+    ASSERT_EQ(0, fseek(modFile, 0L, SEEK_END));
+    long testLen = ftell(modFile);
+    ASSERT_NE(-1L, testLen);
+    rewind(modFile);
+    ASSERT_EQ(refLen, testLen) << "File lengths differ.";
+	
+    VideoFrame refVideoFrame;
+    refVideoFrame.VerifyAndAllocate(_frameLength);
+    refVideoFrame.SetWidth(_width);
+    refVideoFrame.SetHeight(_height);
+
+    // Compare frame-by-frame.
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength)
+    {
+        ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile));
+        EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength));
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Verify that all color pixels are enhanced, that no luminance values are altered,
+    // and that the function does not write outside the vector.
+    WebRtc_UWord32 safeGuard = 1000;
+    WebRtc_UWord32 numPixels = 352*288; // CIF size
+    WebRtc_UWord8 *testFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
+    WebRtc_UWord8 *refFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
+
+    // use value 128 as probe value, since we know that this will be changed in the enhancement
+    memset(testFrame, 128, safeGuard);
+    memset(&testFrame[safeGuard], 128, numPixels);
+    memset(&testFrame[safeGuard + numPixels], 128, numPixels / 2);
+    memset(&testFrame[safeGuard + numPixels + (numPixels / 2)], 128, safeGuard);
+
+    memcpy(refFrame, testFrame, numPixels + (numPixels / 2) + (2 * safeGuard));
+
+    ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testFrame[safeGuard], 352, 288));
+
+    EXPECT_EQ(0, memcmp(testFrame, refFrame, safeGuard)) <<
+        "Function is writing outside the frame memory.";
+    
+    EXPECT_EQ(0, memcmp(&testFrame[safeGuard + numPixels + (numPixels / 2)], 
+        &refFrame[safeGuard + numPixels + (numPixels / 2)], safeGuard)) <<
+        "Function is writing outside the frame memory.";
+
+    EXPECT_EQ(0, memcmp(&testFrame[safeGuard], &refFrame[safeGuard], numPixels)) <<
+        "Function is modifying the luminance.";
+
+    EXPECT_NE(0, memcmp(&testFrame[safeGuard + numPixels],
+        &refFrame[safeGuard + numPixels], numPixels / 2)) <<
+        "Function is not modifying all chrominance pixels";
+
+    ASSERT_EQ(0, fclose(refFile));
+    ASSERT_EQ(0, fclose(modFile));
+    delete [] testFrame;
+    delete [] refFrame;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc
new file mode 100644
index 0000000..54a1390
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/source/content_analysis.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, ContentAnalysis)
+{
+    VPMContentAnalysis    _ca_c(false);
+    VPMContentAnalysis    _ca_sse(true);
+    VideoContentMetrics  *_cM_c, *_cM_SSE;
+
+    _ca_c.Initialize(_width,_height);
+    _ca_sse.Initialize(_width,_height);
+
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
+           == _frameLength)
+    {
+        _cM_c   = _ca_c.ComputeContentMetrics(&_videoFrame);
+        _cM_SSE = _ca_sse.ComputeContentMetrics(&_videoFrame);
+
+        ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
+        ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
+        ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
+        ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/createTable.m b/trunk/src/modules/video_processing/main/test/unit_test/createTable.m
new file mode 100644
index 0000000..2c7fb52
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/createTable.m
@@ -0,0 +1,179 @@
+% Create the color enhancement look-up table and write it to
+% file colorEnhancementTable.cpp. Copy contents of that file into
+% the source file for the color enhancement function.
+
+clear
+close all
+
+
+% First, define the color enhancement in a normalized domain
+
+% Compander function is defined in three radial zones.
+% 1. From 0 to radius r0, the compander function
+% is a second-order polynomial intersecting the points (0,0)
+% and (r0, r0), and with a slope B in (0,0).
+% 2. From r0 to r1, the compander is a third-order polynomial
+% intersecting the points (r0, r0) and (r1, r1), and with the
+% same slope as the first part in the point (r0, r0) and slope
+% equal to 1 in (r1, r1).
+% 3. For radii larger than r1, the compander function is the
+% unity scale function (no scaling at all).
+
+r0=0.07; % Dead zone radius (must be > 0)
+r1=0.6; % Enhancement zone radius (must be > r0 and < 1)
+B=0.2; % initial slope of compander function (between 0 and 1)
+
+x0=linspace(0,r0).'; % zone 1
+x1=linspace(r0,r1).'; % zone 2
+x2=linspace(r1,1).'; % zone 3
+
+A=(1-B)/r0;
+f0=A*x0.^2+B*x0; % compander function in zone 1
+
+% equation system for finding second zone parameters
+M=[r0^3 r0^2 r0 1; 
+    3*r0^2 2*r0 1 0;
+    3*r1^2 2*r1 1 0;
+    r1^3 r1^2 r1 1];
+m=[A*r0^2+B*r0; 2*A*r0+B; 1; r1];
+% solve equations
+theta=M\m;
+
+% compander function in zone 1
+f1=[x1.^3 x1.^2 x1 ones(size(x1))]*theta;
+
+x=[x0; x1; x2];
+f=[f0; f1; x2];
+
+% plot it
+figure(1)
+plot(x,f,x,x,':')
+xlabel('Normalized radius')
+ylabel('Modified radius')
+
+
+% Now, create the look-up table in the integer color space
+[U,V]=meshgrid(0:255, 0:255); % U-V space
+U0=U;
+V0=V;
+
+% Conversion matrix from normalized YUV to RGB
+T=[1 0 1.13983; 1 -0.39465 -0.58060; 1 2.03211 0];
+Ylum=0.5;
+
+figure(2)
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(121)
+image(Z);
+axis square
+axis off
+set(gcf,'color','k')
+
+R = sqrt((U-127).^2 + (V-127).^2);
+Rnorm = R/127;
+RnormMod = Rnorm;
+RnormMod(RnormMod==0)=1; % avoid division with zero
+
+% find indices to pixels in dead-zone (zone 1)
+ix=find(Rnorm<=r0);
+scaleMatrix = (A*Rnorm(ix).^2 + B*Rnorm(ix))./RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix+127;
+V(ix)=(V(ix)-127).*scaleMatrix+127;
+
+% find indices to pixels in zone 2
+ix=find(Rnorm>r0 & Rnorm<=r1);
+scaleMatrix = (theta(1)*Rnorm(ix).^3 + theta(2)*Rnorm(ix).^2 + ...
+    theta(3)*Rnorm(ix) + theta(4)) ./ RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix + 127;
+V(ix)=(V(ix)-127).*scaleMatrix + 127;
+
+% round to integer values and saturate
+U=round(U);
+V=round(V);
+U=max(min(U,255),0);
+V=max(min(V,255),0);
+
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(122)
+image(Z);
+axis square
+axis off
+
+figure(3)
+subplot(121)
+mesh(U-U0)
+subplot(122)
+mesh(V-V0)
+
+
+
+% Last, write to file
+% Write only one matrix, since U=V'
+
+fid = fopen('../out/Debug/colorEnhancementTable.h','wt');
+if fid==-1
+    error('Cannot open file colorEnhancementTable.cpp');
+end
+
+fprintf(fid,'//colorEnhancementTable.h\n\n');
+fprintf(fid,'//Copy the constant table to the appropriate header file.\n\n');
+
+fprintf(fid,'//Table created with Matlab script createTable.m\n\n');
+fprintf(fid,'//Usage:\n');
+fprintf(fid,'//    Umod=colorTable[U][V]\n');
+fprintf(fid,'//    Vmod=colorTable[V][U]\n');
+
+fprintf(fid,'static unsigned char colorTable[%i][%i] = {\n', size(U,1), size(U,2));
+
+for u=1:size(U,2)
+    fprintf(fid,'    {%i', U(1,u));
+    for v=2:size(U,1)
+        fprintf(fid,', %i', U(v,u));
+    end
+    fprintf(fid,'}');
+    if u<size(U,2)
+        fprintf(fid,',');
+    end
+    fprintf(fid,'\n');
+end
+fprintf(fid,'};\n\n');
+fclose(fid);
+fprintf('done');
+
+
+answ=input('Create test vector (takes some time...)? y/n : ','s');
+if answ ~= 'y'
+    return
+end
+
+% Also, create test vectors
+
+% Read test file foreman.yuv
+fprintf('Reading test file...')
+[y,u,v]=readYUV420file('../out/Debug/testFiles/foreman_cif.yuv',352,288);
+fprintf(' done\n');
+unew=uint8(zeros(size(u)));
+vnew=uint8(zeros(size(v)));
+
+% traverse all frames
+for k=1:size(y,3)
+    fprintf('Frame %i\n', k);
+    for r=1:size(u,1)
+        for c=1:size(u,2)
+            unew(r,c,k) = uint8(U(double(v(r,c,k))+1, double(u(r,c,k))+1));
+            vnew(r,c,k) = uint8(V(double(v(r,c,k))+1, double(u(r,c,k))+1));
+        end
+    end
+end
+      
+fprintf('\nWriting modified test file...')
+writeYUV420file('../out/Debug/foremanColorEnhanced.yuv',y,unew,vnew);
+fprintf(' done\n');
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/deflickering_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/deflickering_test.cc
new file mode 100644
index 0000000..f91dc13
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/deflickering_test.cc
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, Deflickering)
+{
+    enum { NumRuns = 30 };
+    WebRtc_UWord32 frameNum = 0;
+    const WebRtc_UWord32 frameRate = 15;
+
+    WebRtc_Word64 minRuntime = 0;
+    WebRtc_Word64 avgRuntime = 0;
+
+    // Close automatically opened Foreman.
+    fclose(_sourceFile);
+    const std::string input_file =
+        webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
+    _sourceFile  = fopen(input_file.c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL) <<
+        "Cannot read input file: " << input_file << "\n";
+
+    const std::string output_file =
+        webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
+    FILE* deflickerFile = fopen(output_file.c_str(), "wb");
+    ASSERT_TRUE(deflickerFile != NULL) <<
+        "Could not open output file: " << output_file << "\n";
+
+    printf("\nRun time [us / frame]:\n");
+    for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+    {
+        TickTime t0;
+        TickTime t1;
+        TickInterval accTicks;
+        WebRtc_UWord32 timeStamp = 1;
+
+        frameNum = 0;
+        while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+        {
+            frameNum++;
+            _videoFrame.SetTimeStamp(timeStamp);
+
+            t0 = TickTime::Now();           
+            VideoProcessingModule::FrameStats stats;
+            ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+            ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+            t1 = TickTime::Now();
+            accTicks += t1 - t0;
+            
+            if (runIdx == 0)
+            {
+                fwrite(_videoFrame.Buffer(), 1, _frameLength, deflickerFile);
+            }
+            timeStamp += (90000 / frameRate);
+        }
+        ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+        printf("%u\n", static_cast<int>(accTicks.Microseconds() / frameNum));
+        if (accTicks.Microseconds() < minRuntime || runIdx == 0)
+        {
+            minRuntime = accTicks.Microseconds();
+        }
+        avgRuntime += accTicks.Microseconds();
+
+        rewind(_sourceFile);
+    }
+    ASSERT_EQ(0, fclose(deflickerFile));
+    // TODO(kjellander): Add verification of deflicker output file.
+
+    printf("\nAverage run time = %d us / frame\n", 
+        static_cast<int>(avgRuntime / frameNum / NumRuns));
+    printf("Min run time = %d us / frame\n\n", 
+        static_cast<int>(minRuntime / frameNum));
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/denoising_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/denoising_test.cc
new file mode 100644
index 0000000..173ceb8
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/denoising_test.cc
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, Denoising)
+{
+    enum { NumRuns = 10 };
+    WebRtc_UWord32 frameNum = 0;
+
+    WebRtc_Word64 minRuntime = 0;
+    WebRtc_Word64 avgRuntime = 0;
+
+    const std::string denoise_filename =
+        webrtc::test::OutputPath() + "denoise_testfile.yuv";
+    FILE* denoiseFile = fopen(denoise_filename.c_str(), "wb");
+    ASSERT_TRUE(denoiseFile != NULL) <<
+        "Could not open output file: " << denoise_filename << "\n";
+
+    const std::string noise_filename =
+        webrtc::test::OutputPath() + "noise_testfile.yuv";
+    FILE* noiseFile = fopen(noise_filename.c_str(), "wb");
+    ASSERT_TRUE(noiseFile != NULL) <<
+        "Could not open noisy file: " << noise_filename << "\n";
+
+    printf("\nRun time [us / frame]:\n");
+    for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+    {
+        TickTime t0;
+        TickTime t1;
+        TickInterval accTicks;
+        WebRtc_Word32 modifiedPixels = 0;
+
+        frameNum = 0;
+        while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+        {
+            frameNum++;
+            WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer();
+
+            // Add noise to a part in video stream
+            // Random noise
+            // TODO: investigate the effectiveness of this test.
+
+            //for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++)
+            //    sourceBuffer[ir] = 128
+            for (WebRtc_UWord32 ir = 0; ir < _height; ir++)
+            {
+                WebRtc_UWord32 ik = ir * _width;
+                for (WebRtc_UWord32 ic = 0; ic < _width; ic++)
+                {
+                    WebRtc_UWord8 r = rand() % 16;
+                    r -= 8;
+                    if (ir < _height / 4)
+                        r = 0;
+                    if (ir >= 3 * _height / 4)
+                        r = 0;
+                    if (ic < _width / 4)
+                        r = 0;
+                    if (ic >= 3 * _width / 4)
+                        r = 0;
+
+                    /*WebRtc_UWord8 pixelValue = 0;
+                    if (ir >= _height / 2)
+                    { // Region 3 or 4
+                        pixelValue = 170;
+                    }
+                    if (ic >= _width / 2)
+                    { // Region 2 or 4
+                        pixelValue += 85;
+                    }
+                    pixelValue += r;
+                    sourceBuffer[ik + ic] = pixelValue;
+                    */
+                    sourceBuffer[ik + ic] += r;
+                }
+            }
+
+            if (runIdx == 0)
+            {
+                fwrite(_videoFrame.Buffer(), 1, _frameLength, noiseFile);
+            }
+            
+            t0 = TickTime::Now();            
+            ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0);
+            t1 = TickTime::Now();
+            accTicks += t1 - t0;
+            
+            if (runIdx == 0)
+            {
+                fwrite(_videoFrame.Buffer(), 1, _frameLength, denoiseFile);
+            }
+        }
+        ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+        printf("%u\n", static_cast<int>(accTicks.Microseconds() / frameNum));
+        if (accTicks.Microseconds() < minRuntime || runIdx == 0)
+        {
+            minRuntime = accTicks.Microseconds();
+        }
+        avgRuntime += accTicks.Microseconds();
+
+        rewind(_sourceFile);
+    }
+    ASSERT_EQ(0, fclose(denoiseFile));
+    ASSERT_EQ(0, fclose(noiseFile));
+    printf("\nAverage run time = %d us / frame\n", 
+        static_cast<int>(avgRuntime / frameNum / NumRuns));
+    printf("Min run time = %d us / frame\n\n", 
+        static_cast<int>(minRuntime / frameNum));
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/readYUV420file.m b/trunk/src/modules/video_processing/main/test/unit_test/readYUV420file.m
new file mode 100644
index 0000000..03013ef
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/readYUV420file.m
@@ -0,0 +1,45 @@
+function [Y,U,V] = readYUV420file(filename, width, height)
+% [Y,U,V] = readYUVfile(filename, width, height)
+
+fid = fopen(filename,'rb');
+if fid==-1
+    error(['Cannot open file ' filename]);
+end
+
+% Number of pixels per image
+nPx=width*height;
+
+% nPx bytes luminance, nPx/4 bytes U, nPx/4 bytes V
+frameSizeBytes = nPx*1.5; 
+
+% calculate number of frames
+fseek(fid,0,'eof'); % move to end of file
+fileLen=ftell(fid); % number of bytes
+fseek(fid,0,'bof'); % rewind to start
+
+% calculate number of frames
+numFrames = floor(fileLen/frameSizeBytes);
+
+Y=uint8(zeros(height,width,numFrames));
+U=uint8(zeros(height/2,width/2,numFrames));
+V=uint8(zeros(height/2,width/2,numFrames));
+
+[X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+
+for k=1:numFrames
+    
+    % Store luminance
+    Y(:,:,k)=uint8(reshape(X(1:nPx), width, height).');
+    
+    % Store U channel
+    U(:,:,k)=uint8(reshape(X(nPx + (1:nPx/4)), width/2, height/2).');
+
+    % Store V channel
+    V(:,:,k)=uint8(reshape(X(nPx + nPx/4 + (1:nPx/4)), width/2, height/2).');
+    
+    % Read next frame
+    [X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+end
+
+    
+fclose(fid);
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/unit_test.cc b/trunk/src/modules/video_processing/main/test/unit_test/unit_test.cc
new file mode 100644
index 0000000..1dbe070
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/unit_test.cc
@@ -0,0 +1,343 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+
+#include <string>
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+void TestSize(VideoFrame& sourceFrame,
+              WebRtc_UWord32 targetWidth, WebRtc_UWord32 targetHeight,
+              WebRtc_UWord32 mode, VideoProcessingModule *vpm);
+
+VideoProcessingModuleTest::VideoProcessingModuleTest() :
+  _vpm(NULL),
+  _sourceFile(NULL),
+  _width(352),
+  _height(288),
+  _frameLength(CalcBufferSize(kI420, 352, 288))
+{
+}
+
+void VideoProcessingModuleTest::SetUp()
+{
+  _vpm = VideoProcessingModule::Create(0);
+  ASSERT_TRUE(_vpm != NULL);
+
+  ASSERT_EQ(0, _videoFrame.VerifyAndAllocate(_frameLength));
+  _videoFrame.SetWidth(_width);
+  _videoFrame.SetHeight(_height);
+
+  const std::string video_file =
+      webrtc::test::ResourcePath("foreman_cif", "yuv");
+  _sourceFile  = fopen(video_file.c_str(),"rb");
+  ASSERT_TRUE(_sourceFile != NULL) <<
+      "Cannot read source file: " + video_file + "\n";
+}
+
+void VideoProcessingModuleTest::TearDown()
+{
+  if (_sourceFile != NULL)  {
+    ASSERT_EQ(0, fclose(_sourceFile));
+  }
+  _sourceFile = NULL;
+
+  if (_vpm != NULL)  {
+    VideoProcessingModule::Destroy(_vpm);
+  }
+  _vpm = NULL;
+}
+
+TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  // Video frame with unallocated buffer.
+  VideoFrame videoFrame;
+  videoFrame.SetWidth(_width);
+  videoFrame.SetHeight(_height);
+
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, NULL, _width, _height));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(NULL, _width, _height));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(NULL, _width, _height, 0, stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(NULL, _width, _height));
+  EXPECT_EQ(-1, _vpm->Denoising(videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(NULL, _width, _height, stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats));
+
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(NULL, NULL));
+}
+
+TEST_F(VideoProcessingModuleTest, HandleBadStats)
+{
+  VideoProcessingModule::FrameStats stats;
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, _height, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width,
+                                          _height, stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+}
+
+TEST_F(VideoProcessingModuleTest, HandleBadSize)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+
+  // Bad width
+  _videoFrame.SetWidth(0);
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), 0, _height, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), 0, _height,
+                                          stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+
+
+  // Bad height
+  _videoFrame.SetWidth(_width);
+  _videoFrame.SetHeight(0);
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, 0, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width, 0,
+                                          stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
+
+  VideoFrame *outFrame = NULL;
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(&_videoFrame,
+                                                       &outFrame));
+}
+
+TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
+{
+  VideoFrame videoFrame2;
+  VideoProcessingModule::FrameStats stats;
+
+  ASSERT_EQ(0, videoFrame2.VerifyAndAllocate(_frameLength));
+  videoFrame2.SetWidth(_width);
+  videoFrame2.SetHeight(_height);
+
+  // Only testing non-static functions here.
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+  _vpm->Reset();
+  // Retrieve frame stats again in case Deflickering() has zeroed them.
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, videoFrame2));
+  ASSERT_EQ(0, _vpm->Deflickering(videoFrame2, stats));
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_GE(_vpm->Denoising(_videoFrame), 0);
+  _vpm->Reset();
+  ASSERT_GE(_vpm->Denoising(videoFrame2), 0);
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
+  _vpm->Reset();
+  ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats));
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+}
+
+TEST_F(VideoProcessingModuleTest, FrameStats)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+
+  EXPECT_FALSE(_vpm->ValidFrameStats(stats));
+  EXPECT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  EXPECT_TRUE(_vpm->ValidFrameStats(stats));
+
+  printf("\nFrameStats\n");
+  printf("mean: %u\nnumPixels: %u\nsubSamplWidth: "
+         "%u\nsumSamplHeight: %u\nsum: %u\n\n",
+         static_cast<unsigned int>(stats.mean),
+         static_cast<unsigned int>(stats.numPixels),
+         static_cast<unsigned int>(stats.subSamplHeight),
+         static_cast<unsigned int>(stats.subSamplWidth),
+         static_cast<unsigned int>(stats.sum));
+
+  _vpm->ClearFrameStats(stats);
+  EXPECT_FALSE(_vpm->ValidFrameStats(stats));
+}
+
+TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
+{
+  // Disable temporal sampling
+  _vpm->EnableTemporalDecimation(false);
+  ASSERT_EQ(VPM_OK, _vpm->SetMaxFrameRate(30));
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 15));
+  // Revert
+  _vpm->EnableTemporalDecimation(true);
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
+  // Disable spatial sampling
+  _vpm->SetInputFrameResampleMode(kNoRescaling);
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
+  VideoFrame *outFrame = NULL;
+  ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(&_videoFrame, &outFrame));
+  // No rescaling=> output frame = NULL
+  ASSERT_TRUE(outFrame == NULL);
+}
+
+TEST_F(VideoProcessingModuleTest, Resampler)
+{
+  enum { NumRuns = 1 };
+
+  WebRtc_Word64 minRuntime = 0;
+  WebRtc_Word64 avgRuntime = 0;
+
+  TickTime t0;
+  TickTime t1;
+  TickInterval accTicks;
+  WebRtc_Word32 height = 288;
+  WebRtc_Word32 width = 352;
+  WebRtc_Word32 lengthSourceFrame = width*height*3/2;
+
+  rewind(_sourceFile);
+  ASSERT_TRUE(_sourceFile != NULL) <<
+      "Cannot read input file \n";
+
+  // CA not needed here
+  _vpm->EnableContentAnalysis(false);
+  // no temporal decimation
+  _vpm->EnableTemporalDecimation(false);
+
+  // Reading test frame
+  VideoFrame sourceFrame;
+  ASSERT_EQ(0, sourceFrame.VerifyAndAllocate(lengthSourceFrame));
+  EXPECT_GT(fread(sourceFrame.Buffer(), 1, lengthSourceFrame, _sourceFile), 0u);
+  ASSERT_EQ(0, sourceFrame.SetLength(lengthSourceFrame));
+  sourceFrame.SetHeight(height);
+  sourceFrame.SetWidth(width);
+
+  for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+  {
+    // initiate test timer
+    t0 = TickTime::Now();
+
+    // kFastRescaling
+    _vpm->SetInputFrameResampleMode(kFastRescaling);
+    // TESTING DIFFERENT SIZES
+    TestSize(sourceFrame, 100, 50, 1, _vpm);  // Cut, decimation 1x, interpolate
+    TestSize(sourceFrame, 352/2, 288/2, 1, _vpm);  // Even decimation
+    TestSize(sourceFrame, 352, 288, 1, _vpm);      // No resampling
+    TestSize(sourceFrame, 2*352, 2*288,1,  _vpm);  // Even upsampling
+    TestSize(sourceFrame, 400, 256, 1, _vpm);      // Upsampling 1.5x and cut
+    TestSize(sourceFrame, 960, 720, 1, _vpm);      // Upsampling 3.5x and cut
+    TestSize(sourceFrame, 1280, 720, 1, _vpm);     // Upsampling 4x and cut
+
+    // kBiLinear
+    _vpm->SetInputFrameResampleMode(kBiLinear);
+    // TESTING DIFFERENT SIZES
+    TestSize(sourceFrame, 352/4, 288/4, 2, _vpm);
+    TestSize(sourceFrame, 352/2, 288/2, 2, _vpm);
+    TestSize(sourceFrame, 2*352, 2*288,2, _vpm);
+    TestSize(sourceFrame, 480, 640, 2, _vpm);
+    TestSize(sourceFrame, 960, 720, 2, _vpm);
+    TestSize(sourceFrame, 1280, 720, 2, _vpm);
+    // stop timer
+    t1 = TickTime::Now();
+    accTicks += t1 - t0;
+
+    if (accTicks.Microseconds() < minRuntime || runIdx == 0)  {
+      minRuntime = accTicks.Microseconds();
+    }
+    avgRuntime += accTicks.Microseconds();
+  }
+
+  sourceFrame.Free();
+
+  printf("\nAverage run time = %d us / frame\n",
+         //static_cast<int>(avgRuntime / frameNum / NumRuns));
+         static_cast<int>(avgRuntime));
+  printf("Min run time = %d us / frame\n\n",
+         //static_cast<int>(minRuntime / frameNum));
+         static_cast<int>(minRuntime));
+}
+
+void TestSize(VideoFrame& sourceFrame, WebRtc_UWord32 targetWidth,
+              WebRtc_UWord32 targetHeight,
+              WebRtc_UWord32 mode, VideoProcessingModule *vpm)
+{
+    VideoFrame *outFrame = NULL;
+  std::ostringstream filename;
+  filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
+      targetWidth << "x" << targetHeight << "_30Hz_P420.yuv";
+  // TODO(kjellander): Add automatic verification of these output files:
+  std::cout << "Watch " << filename.str() << " and verify that it is okay."
+            << std::endl;
+  FILE* standAloneFile = fopen(filename.str().c_str(), "wb");
+  ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(targetWidth, targetHeight, 30));
+  ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&sourceFrame, &outFrame));
+  // Length should be updated only if frame was resampled
+  if (targetWidth != sourceFrame.Width() ||
+      targetHeight != sourceFrame.Height())  {
+    ASSERT_EQ((targetWidth * targetHeight * 3 / 2), outFrame->Length());
+    // Write to file for visual inspection
+    fwrite(outFrame->Buffer(), 1, outFrame->Length(), standAloneFile);
+    outFrame->Free();
+  }
+  fclose(standAloneFile);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/unit_test.h b/trunk/src/modules/video_processing/main/test/unit_test/unit_test.h
new file mode 100644
index 0000000..2363e1a
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/unit_test.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
+#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
+
+#include "gtest/gtest.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "system_wrappers/interface/trace.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+class VideoProcessingModuleTest : public ::testing::Test
+{
+protected:
+    VideoProcessingModuleTest();
+    virtual void SetUp();
+    virtual void TearDown();
+    static void SetUpTestCase()
+    {
+      Trace::CreateTrace();
+      std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
+      ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
+    }
+    static void TearDownTestCase()
+    {
+      Trace::ReturnTrace();
+    }
+    VideoProcessingModule* _vpm;
+    FILE* _sourceFile;
+    VideoFrame _videoFrame;
+    const WebRtc_UWord32 _width;
+    const WebRtc_UWord32 _height;
+    const WebRtc_UWord32 _frameLength;
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
diff --git a/trunk/src/modules/video_processing/main/test/unit_test/writeYUV420file.m b/trunk/src/modules/video_processing/main/test/unit_test/writeYUV420file.m
new file mode 100644
index 0000000..69a8808
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/unit_test/writeYUV420file.m
@@ -0,0 +1,22 @@
+function writeYUV420file(filename, Y, U, V)
+% writeYUV420file(filename, Y, U, V)
+
+fid = fopen(filename,'wb');
+if fid==-1
+    error(['Cannot open file ' filename]);
+end
+
+numFrames=size(Y,3);
+
+for k=1:numFrames
+   % Write luminance
+   fwrite(fid,uint8(Y(:,:,k).'), 'uchar');
+   
+   % Write U channel
+   fwrite(fid,uint8(U(:,:,k).'), 'uchar');
+   
+   % Write V channel
+   fwrite(fid,uint8(V(:,:,k).'), 'uchar');
+end
+
+fclose(fid);
diff --git a/trunk/src/modules/video_processing/main/test/vpm_tests.gypi b/trunk/src/modules/video_processing/main/test/vpm_tests.gypi
new file mode 100644
index 0000000..e53ac07
--- /dev/null
+++ b/trunk/src/modules/video_processing/main/test/vpm_tests.gypi
@@ -0,0 +1,39 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+     'target_name': 'video_processing_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'video_processing',
+        'webrtc_utility',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        # headers
+        'unit_test/unit_test.h',
+        # sources
+        'unit_test/brightness_detection_test.cc',
+        'unit_test/color_enhancement_test.cc',
+        'unit_test/content_metrics_test.cc',
+        'unit_test/deflickering_test.cc',
+        'unit_test/denoising_test.cc',
+        'unit_test/unit_test.cc',
+      ], # sources
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_render/OWNERS b/trunk/src/modules/video_render/OWNERS
new file mode 100644
index 0000000..ac607bd
--- /dev/null
+++ b/trunk/src/modules/video_render/OWNERS
@@ -0,0 +1,4 @@
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+mallinath@webrtc.org
diff --git a/trunk/src/modules/video_render/main/interface/video_render.h b/trunk/src/modules/video_render/main/interface/video_render.h
new file mode 100644
index 0000000..42fce70
--- /dev/null
+++ b/trunk/src/modules/video_render/main/interface/video_render.h
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
+
+/*
+ * video_render.h
+ *
+ * This header file together with module.h and module_common_types.h
+ * contains all of the APIs that are needed for using the video render
+ * module class.
+ *
+ */
+
+#include "module.h"
+#include "video_render_defines.h"
+
+namespace webrtc {
+// Class definitions
+class VideoRender: public Module
+{
+public:
+    /*
+     *   Create a video render module object
+     *
+     *   id              - unique identifier of this video render module object
+     *   window          - pointer to the window to render to
+     *   fullscreen      - true if this is a fullscreen renderer
+     *   videoRenderType - type of renderer to create
+     */
+    static VideoRender
+            * CreateVideoRender(
+                                          const WebRtc_Word32 id,
+                                          void* window,
+                                          const bool fullscreen,
+                                          const VideoRenderType videoRenderType =
+                                                  kRenderDefault);
+
+    /*
+     *   Destroy a video render module object
+     *
+     *   module  - object to destroy
+     */
+    static void DestroyVideoRender(VideoRender* module);
+
+    /*
+     *   Change the unique identifier of this object
+     *
+     *   id      - new unique identifier of this video render module object
+     */
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    virtual WebRtc_Word32 TimeUntilNextProcess() = 0;
+    virtual WebRtc_Word32 Process() = 0;
+
+    /**************************************************************************
+     *
+     *   Window functions
+     *
+     ***************************************************************************/
+
+    /*
+     *   Get window for this renderer
+     */
+    virtual void* Window() = 0;
+
+    /*
+     *   Change render window
+     *
+     *   window      - the new render window, assuming same type as originally created.
+     */
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     *   zOrder      - relative render order for the streams, 0 = on top
+     *   left        - position of the stream in the window, [0.0f, 1.0f]
+     *   top         - position of the stream in the window, [0.0f, 1.0f]
+     *   right       - position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - position of the stream in the window, [0.0f, 1.0f]
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom) = 0;
+    /*
+     *   Delete incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     */
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     *
+     *   streamID     - id of the stream the callback is used for
+     *   renderObject - the VideoRenderCallback to use for this stream, NULL to remove
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual WebRtc_Word32
+            AddExternalRenderCallback(const WebRtc_UWord32 streamId,
+                                      VideoRenderCallback* renderObject) = 0;
+
+    /*
+     *   Get the porperties for an incoming render stream
+     *
+     *   streamID    - [in] id of the stream to get properties for
+     *   zOrder      - [out] relative render order for the streams, 0 = on top
+     *   left        - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   top         - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   right       - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - [out] position of the stream in the window, [0.0f, 1.0f]
+     */
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const = 0;
+    /*
+     *   The incoming frame rate to the module, not the rate rendered in the window.
+     */
+    virtual WebRtc_UWord32
+            GetIncomingFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const = 0;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool
+            HasIncomingRenderStream(const WebRtc_UWord32 streamId) const = 0;
+
+    /*
+     *   Registers a callback to get raw images in the same time as sent
+     *   to the renderer. To be used for external rendering.
+     */
+    virtual WebRtc_Word32
+            RegisterRawFrameCallback(const WebRtc_UWord32 streamId,
+                                     VideoRenderCallback* callbackObj) = 0;
+
+    /*
+     * This method is usefull to get last rendered frame for the stream specified
+     */
+    virtual WebRtc_Word32
+            GetLastRenderedFrame(const WebRtc_UWord32 streamId,
+                                 VideoFrame &frame) const = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual WebRtc_Word32 StartRender(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Stops the renderer
+     */
+    virtual WebRtc_Word32 StopRender(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Resets the renderer
+     *   No streams are removed. The state should be as after AddStream was called.
+     */
+    virtual WebRtc_Word32 ResetRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the preferred render video type
+     */
+    virtual RawVideoType PreferredVideoType() const = 0;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen() = 0;
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const = 0;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    /*
+     * re-configure renderer
+     */
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom) = 0;
+
+    /*
+     * Set a start image. The image is rendered before the first image has been delivered
+     */
+    virtual WebRtc_Word32
+            SetStartImage(const WebRtc_UWord32 streamId,
+                          const VideoFrame& videoFrame) = 0;
+
+    /*
+     * Set a timout image. The image is rendered if no videoframe has been delivered
+     */
+    virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId,
+                                          const VideoFrame& videoFrame,
+                                          const WebRtc_UWord32 timeout)= 0;
+
+    virtual WebRtc_Word32 MirrorRenderStream(const int renderId,
+                                             const bool enable,
+                                             const bool mirrorXAxis,
+                                             const bool mirrorYAxis) = 0;
+
+    static WebRtc_Word32 SetAndroidObjects(void* javaVM);
+};
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
diff --git a/trunk/src/modules/video_render/main/interface/video_render_defines.h b/trunk/src/modules/video_render/main/interface/video_render_defines.h
new file mode 100644
index 0000000..2a87f3f
--- /dev/null
+++ b/trunk/src/modules/video_render/main/interface/video_render_defines.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
+
+// Includes
+#include "common_types.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+#define NULL    0
+#endif
+
+// Enums
+enum VideoRenderType
+{
+    kRenderExternal = 0, // External
+    kRenderWindows = 1, // Windows
+    kRenderCocoa = 2, // Mac
+    kRenderCarbon = 3,
+    kRenderiPhone = 4, // iPhone
+    kRenderAndroid = 5, // Android
+    kRenderX11 = 6, // Linux
+    kRenderDefault
+};
+
+// Runtime errors
+enum VideoRenderError
+{
+    kRenderShutDown = 0,
+    kRenderPerformanceAlarm = 1
+};
+
+// The object a module user uses to send new frames to the renderer
+// One object is used for each incoming stream
+class VideoRenderCallback
+{
+public:
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame) = 0;
+
+protected:
+    virtual ~VideoRenderCallback()
+    {
+    }
+};
+
+// Feedback class to be implemented by module user
+class VideoRenderFeedback
+{
+public:
+    virtual void OnRenderError(const WebRtc_Word32 streamId,
+                               const VideoRenderError error) = 0;
+
+protected:
+    virtual ~VideoRenderFeedback()
+    {
+    }
+};
+
+// Mobile enums
+enum StretchMode
+{
+    kStretchToInsideEdge = 1,
+    kStretchToOutsideEdge = 2,
+    kStretchMatchWidth = 3,
+    kStretchMatchHeight = 4,
+    kStretchNone = 5
+};
+
+enum Rotation
+{
+    kRotation0 = 0,
+    kRotation90 = 1,
+    kRotation180 = 2,
+    kRotation270 = 3
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
diff --git a/trunk/src/modules/video_render/main/source/Android.mk b/trunk/src/modules/video_render/main/source/Android.mk
new file mode 100644
index 0000000..73eec93
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android.mk
@@ -0,0 +1,53 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := libwebrtc_video_render
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    incoming_video_stream.cc \
+    video_render_frames.cc \
+    video_render_impl.cc \
+    external/video_render_external_impl.cc \
+    Android/video_render_android_impl.cc \
+    Android/video_render_android_native_opengl2.cc \
+    Android/video_render_android_surface_view.cc \
+    Android/video_render_opengles20.cc 
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH) \
+    $(LOCAL_PATH)/Android \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../audio_coding/main/interface \
+    $(LOCAL_PATH)/../../../interface \
+    $(LOCAL_PATH)/../../../utility/interface \
+    $(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
new file mode 100644
index 0000000..fc30607
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
@@ -0,0 +1,263 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.pm.ConfigurationInfo;
+import android.opengl.GLSurfaceView;
+import android.util.Log;
+
+public class ViEAndroidGLES20 extends GLSurfaceView
+    implements GLSurfaceView.Renderer {
+  // True if onSurfaceCreated has been called.
+  private boolean surfaceCreated = false;
+  private boolean openGLCreated = false;
+  // True if NativeFunctionsRegistered has been called.
+  private boolean nativeFunctionsRegisted = false;
+  private ReentrantLock nativeFunctionLock = new ReentrantLock();
+  // Address of Native object that will do the drawing.
+  private long nativeObject = 0;
+  private int viewWidth = 0;
+  private int viewHeight = 0;
+
+  public static boolean UseOpenGL2(Object renderWindow) {
+    return ViEAndroidGLES20.class.isInstance(renderWindow);
+  }
+
+  public ViEAndroidGLES20(Context context) {
+    super(context);
+
+    // Setup the context factory for 2.0 rendering.
+    // See ContextFactory class definition below
+    setEGLContextFactory(new ContextFactory());
+
+    // We need to choose an EGLConfig that matches the format of
+    // our surface exactly. This is going to be done in our
+    // custom config chooser. See ConfigChooser class definition below
+    // Use RGB 565 without an alpha channel.
+    setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) );
+
+    this.setRenderer(this);
+    this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+  }
+
+  // IsSupported
+  // Return true if this device support Open GL ES 2.0 rendering.
+  public static boolean IsSupported(Context context) {
+    ActivityManager am =
+        (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+    ConfigurationInfo info = am.getDeviceConfigurationInfo();
+    if(info.reqGlEsVersion >= 0x20000) {
+      // Open GL ES 2.0 is supported.
+      return true;
+    }
+    return false;
+  }
+
+  public void onDrawFrame(GL10 gl) {
+    nativeFunctionLock.lock();
+    if(!nativeFunctionsRegisted || !surfaceCreated) {
+      nativeFunctionLock.unlock();
+      return;
+    }
+
+    if(!openGLCreated) {
+      if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
+        return; // Failed to create OpenGL
+      }
+      openGLCreated = true; // Created OpenGL successfully
+    }
+    DrawNative(nativeObject); // Draw the new frame
+    nativeFunctionLock.unlock();
+  }
+
+  public void onSurfaceChanged(GL10 gl, int width, int height) {
+    surfaceCreated = true;
+    viewWidth = width;
+    viewHeight = height;
+
+    nativeFunctionLock.lock();
+    if(nativeFunctionsRegisted) {
+      if(CreateOpenGLNative(nativeObject,width,height) == 0)
+        openGLCreated = true;
+    }
+    nativeFunctionLock.unlock();
+  }
+
+  public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+  }
+
+  public void RegisterNativeObject(long nativeObject) {
+    nativeFunctionLock.lock();
+    nativeObject = nativeObject;
+    nativeFunctionsRegisted = true;
+    nativeFunctionLock.unlock();
+  }
+
+  public void DeRegisterNativeObject() {
+    nativeFunctionLock.lock();
+    nativeFunctionsRegisted = false;
+    openGLCreated = false;
+    nativeObject = 0;
+    nativeFunctionLock.unlock();
+  }
+
+  public void ReDraw() {
+    if(surfaceCreated) {
+      // Request the renderer to redraw using the render thread context.
+      this.requestRender();
+    }
+  }
+
+  // EGL Context factory used for creating EGL 2.0 context
+  // on Android 2.1(and later,
+  // though there are simpler ways in 2.2)
+  // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
+  private static class ContextFactory
+      implements GLSurfaceView.EGLContextFactory {
+    private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+    public EGLContext createContext(EGL10 egl,
+                                    EGLDisplay display,
+                                    EGLConfig eglConfig) {
+      //checkEglError("Before eglCreateContext", egl);
+      int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+      // Create an Open GL ES 2.0 context
+      EGLContext context = egl.eglCreateContext(display,
+                                                eglConfig,
+                                                EGL10.EGL_NO_CONTEXT,
+                                                attrib_list);
+      checkEglError("ContextFactory eglCreateContext", egl);
+      return context;
+    }
+
+    public void destroyContext(EGL10 egl, EGLDisplay display,
+                               EGLContext context) {
+      egl.eglDestroyContext(display, context);
+    }
+  }
+
+  private static void checkEglError(String prompt, EGL10 egl) {
+    int error;
+    while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
+      Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
+    }
+  }
+
+  // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni
+  private static class ConfigChooser
+      implements GLSurfaceView.EGLConfigChooser {
+
+    public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
+      mRedSize = r;
+      mGreenSize = g;
+      mBlueSize = b;
+      mAlphaSize = a;
+      mDepthSize = depth;
+      mStencilSize = stencil;
+    }
+
+    // This EGL config specification is used to specify 2.0 rendering.
+    // We use a minimum size of 4 bits for red/green/blue, but will
+    // perform actual matching in chooseConfig() below.
+    private static int EGL_OPENGL_ES2_BIT = 4;
+    private static int[] s_configAttribs2 =
+    {
+      EGL10.EGL_RED_SIZE, 4,
+      EGL10.EGL_GREEN_SIZE, 4,
+      EGL10.EGL_BLUE_SIZE, 4,
+      EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+      EGL10.EGL_NONE
+    };
+
+    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+
+      // Get the number of minimally matching EGL configurations
+      int[] num_config = new int[1];
+      egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+      int numConfigs = num_config[0];
+
+      if (numConfigs <= 0) {
+        throw new IllegalArgumentException("No configs match configSpec");
+      }
+
+      // Allocate then read the array of minimally matching EGL configs
+      EGLConfig[] configs = new EGLConfig[numConfigs];
+      egl.eglChooseConfig(display, s_configAttribs2, configs,
+                          numConfigs, num_config);
+
+      // Now return the "best" one
+      return chooseConfig(egl, display, configs);
+    }
+
+    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                                  EGLConfig[] configs) {
+      for(EGLConfig config : configs) {
+        int d = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_DEPTH_SIZE, 0);
+        int s = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_STENCIL_SIZE, 0);
+
+        // We need at least mDepthSize and mStencilSize bits
+        if (d < mDepthSize || s < mStencilSize)
+          continue;
+
+        // We want an *exact* match for red/green/blue/alpha
+        int r = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_RED_SIZE, 0);
+        int g = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_GREEN_SIZE, 0);
+        int b = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_BLUE_SIZE, 0);
+        int a = findConfigAttrib(egl, display, config,
+                                 EGL10.EGL_ALPHA_SIZE, 0);
+
+        if (r == mRedSize && g == mGreenSize &&
+            b == mBlueSize && a == mAlphaSize)
+          return config;
+      }
+      return null;
+    }
+
+    private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                                 EGLConfig config, int attribute,
+                                 int defaultValue) {
+
+      if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+        return mValue[0];
+      }
+      return defaultValue;
+    }
+
+    // Subclasses can adjust these values:
+    protected int mRedSize;
+    protected int mGreenSize;
+    protected int mBlueSize;
+    protected int mAlphaSize;
+    protected int mDepthSize;
+    protected int mStencilSize;
+    private int[] mValue = new int[1];
+  }
+
+  private native int CreateOpenGLNative(long nativeObject,
+                                        int width, int height);
+  private native void DrawNative(long nativeObject);
+
+}
diff --git a/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java
new file mode 100644
index 0000000..56d5261
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import android.content.Context;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+public class ViERenderer {
+
+  // View used for local rendering that Cameras can use for Video Overlay.
+  private static SurfaceHolder g_localRenderer;
+
+  public static SurfaceView CreateRenderer(Context context) {
+    return  CreateRenderer(context,false);
+  }
+
+  public static SurfaceView CreateRenderer(Context context,
+                                           boolean useOpenGLES2) {
+    if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
+      return new ViEAndroidGLES20(context);
+    else
+      return new SurfaceView(context);
+  }
+
+  // Creates a SurfaceView to be used by Android Camera
+  // service to display a local preview.
+  // This needs to be used on Android prior to version 2.1
+  // in order to run the camera.
+  // Call this function before ViECapture::StartCapture.
+  // The created view needs to be added to a visible layout
+  // after a camera has been allocated
+  // (with the call ViECapture::AllocateCaptureDevice).
+  // IE.
+  // CreateLocalRenderer
+  // ViECapture::AllocateCaptureDevice
+  // LinearLayout.addview
+  // ViECapture::StartCapture
+  public static SurfaceView CreateLocalRenderer(Context context) {
+    SurfaceView localRender = new SurfaceView(context);
+    g_localRenderer = localRender.getHolder();
+    g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+    return  localRender;
+  }
+
+  public static SurfaceHolder GetLocalRenderer() {
+    return g_localRenderer;
+  }
+
+}
diff --git a/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
new file mode 100644
index 0000000..3412582
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.nio.ByteBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.SurfaceHolder.Callback;
+
+public class ViESurfaceRenderer implements Callback {
+
+  // the bitmap used for drawing.
+  private Bitmap bitmap = null;
+  private ByteBuffer byteBuffer;
+  private SurfaceHolder surfaceHolder;
+  // Rect of the source bitmap to draw
+  private Rect srcRect = new Rect();
+  // Rect of the destination canvas to draw to
+  private Rect dstRect = new Rect();
+  private int  dstHeight = 0;
+  private int  dstWidth = 0;
+  private float dstTopScale = 0;
+  private float dstBottomScale = 1;
+  private float dstLeftScale = 0;
+  private float dstRightScale = 1;
+
+  public  ViESurfaceRenderer(SurfaceView view) {
+    surfaceHolder = view.getHolder();
+    if(surfaceHolder == null)
+      return;
+
+    Canvas canvas = surfaceHolder.lockCanvas();
+    if(canvas != null) {
+      Rect dst =surfaceHolder.getSurfaceFrame();
+      if(dst != null) {
+        dstRect = dst;
+        dstHeight =dstRect.bottom-dstRect.top;
+        dstWidth = dstRect.right-dstRect.left;
+      }
+      surfaceHolder.unlockCanvasAndPost(canvas);
+    }
+
+    surfaceHolder.addCallback(this);
+  }
+
+  public void surfaceChanged(SurfaceHolder holder, int format,
+                             int in_width, int in_height) {
+
+    dstHeight = in_height;
+    dstWidth = in_width;
+    dstRect.left = (int)(dstLeftScale*dstWidth);
+    dstRect.top = (int)(dstTopScale*dstHeight);
+    dstRect.bottom = (int)(dstBottomScale*dstHeight);
+    dstRect.right = (int) (dstRightScale*dstWidth);
+  }
+
+  public void surfaceCreated(SurfaceHolder holder) {
+    // TODO(leozwang) Auto-generated method stub
+  }
+
+  public void surfaceDestroyed(SurfaceHolder holder) {
+    // TODO(leozwang) Auto-generated method stub
+  }
+
+  public Bitmap CreateBitmap(int width, int height) {
+    if (bitmap == null) {
+      try {
+        android.os.Process.setThreadPriority(
+            android.os.Process.THREAD_PRIORITY_DISPLAY);
+      }
+      catch (Exception e) {
+        }
+    }
+    bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
+    srcRect.left = 0;
+    srcRect.top = 0;
+    srcRect.bottom = height;
+    srcRect.right = width;
+
+    return bitmap;
+  }
+
+  public ByteBuffer CreateByteBuffer(int width, int height) {
+    if (bitmap == null) {
+      try {
+        android.os.Process
+            .setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
+      }
+      catch (Exception e) {
+      }
+    }
+
+    try {
+      bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
+      byteBuffer = ByteBuffer.allocateDirect(width*height*2);
+      srcRect.left = 0;
+      srcRect.top = 0;
+      srcRect.bottom = height;
+      srcRect.right = width;
+    }
+    catch (Exception ex) {
+      Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
+      bitmap = null;
+      byteBuffer = null;
+    }
+
+    return byteBuffer;
+  }
+
+  public void SetCoordinates(float left, float top,
+                             float right, float bottom) {
+    dstLeftScale = left;
+    dstTopScale = top;
+    dstRightScale = right;
+    dstBottomScale = bottom;
+
+    dstRect.left = (int)(dstLeftScale*dstWidth);
+    dstRect.top = (int)(dstTopScale*dstHeight);
+    dstRect.bottom = (int)(dstBottomScale*dstHeight);
+    dstRect.right = (int) (dstRightScale*dstWidth);
+  }
+
+  public void DrawByteBuffer() {
+    if(byteBuffer == null)
+      return;
+    byteBuffer.rewind();
+    bitmap.copyPixelsFromBuffer(byteBuffer);
+    DrawBitmap();
+  }
+
+  public void DrawBitmap() {
+    if(bitmap == null)
+      return;
+
+    Canvas canvas = surfaceHolder.lockCanvas();
+    if(canvas != null) {
+      canvas.drawBitmap(bitmap, srcRect, dstRect, null);
+      surfaceHolder.unlockCanvasAndPost(canvas);
+    }
+  }
+
+}
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.cc b/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.cc
new file mode 100644
index 0000000..22fa659
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.cc
@@ -0,0 +1,392 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+JavaVM* VideoRenderAndroid::g_jvm = NULL;
+
+WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
+
+    g_jvm = (JavaVM*) javaVM;
+
+    return 0;
+
+}
+
+VideoRenderAndroid::VideoRenderAndroid(
+                                       const WebRtc_Word32 id,
+                                       const VideoRenderType videoRenderType,
+									   void* window,
+									   const bool /*fullscreen*/):
+    _id(id),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderType(videoRenderType),
+    _ptrWindow((jobject)(window)),
+    _streamsMap(),
+    _javaShutDownFlag(false),
+    _javaShutdownEvent(*EventWrapper::Create()),
+    _javaRenderEvent(*EventWrapper::Create()),
+    _lastJavaRenderEvent(0),
+    _javaRenderJniEnv(NULL),
+    _javaRenderThread(NULL)
+{
+}
+
+VideoRenderAndroid::~VideoRenderAndroid()
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                 "VideoRenderAndroid dtor");
+
+    if (_javaRenderThread)
+        StopRender();
+
+    for (MapItem* item = _streamsMap.First(); item != NULL; item
+            = _streamsMap.Next(item))
+    { // Delete streams
+        delete static_cast<AndroidStream*> (item->GetItem());
+    }
+    delete &_javaShutdownEvent;
+    delete &_javaRenderEvent;
+    delete &_critSect;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_critSect);
+    _id = id;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/)
+{
+    return -1;
+}
+
+VideoRenderCallback*
+VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                            const WebRtc_UWord32 zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    AndroidStream* renderStream = NULL;
+    MapItem* item = _streamsMap.Find(streamId);
+    if (item)
+    {
+        renderStream = (AndroidStream*) (item->GetItem());
+        if (NULL != renderStream)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                         "%s: Render stream already exists", __FUNCTION__);
+            return renderStream;
+        }
+    }
+
+    renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
+                                              right, bottom, *this);
+    if (renderStream)
+    {
+        _streamsMap.Insert(streamId, renderStream);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+        return NULL;
+    }
+    return renderStream;
+}
+
+WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
+                                                             const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    MapItem* item = _streamsMap.Find(streamId);
+    if (item)
+    {
+        delete (AndroidStream*) item->GetItem();
+        _streamsMap.Erase(streamId);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
+                                                                    const WebRtc_UWord32 streamId,
+                                                                    WebRtc_UWord32& zOrder,
+                                                                    float& left,
+                                                                    float& top,
+                                                                    float& right,
+                                                                    float& bottom) const
+{
+
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StartRender()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (_javaRenderThread)
+    {
+        // StartRender is called when this stream should start render.
+        // However StopRender is not called when the streams stop rendering. Thus the the thread  is only deleted when the renderer is removed.
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                     "%s, Render thread already exist", __FUNCTION__);
+        return 0;
+    }
+
+    _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
+                                                    kRealtimePriority,
+                                                    "AndroidRenderThread");
+    if (!_javaRenderThread)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No thread", __FUNCTION__);
+        return -1;
+    }
+
+    unsigned int tId = 0;
+    if (_javaRenderThread->Start(tId))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                     "%s: thread started: %u", __FUNCTION__, tId);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not start send thread", __FUNCTION__);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StopRender()
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
+    {
+        CriticalSectionScoped cs(_critSect);
+        if (!_javaRenderThread)
+        {
+            return -1;
+        }
+        _javaShutDownFlag = true;
+        _javaRenderEvent.Set();
+    }
+
+    _javaShutdownEvent.Wait(3000);
+    CriticalSectionScoped cs(_critSect);
+    _javaRenderThread->SetNotAlive();
+    if (_javaRenderThread->Stop())
+    {
+        delete _javaRenderThread;
+        _javaRenderThread = NULL;
+    }
+    else
+    {
+        assert(false);
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "%s: Not able to stop thread, leaking", __FUNCTION__);
+        _javaRenderThread = NULL;
+    }
+    return 0;
+}
+
+void VideoRenderAndroid::ReDraw()
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) // Allow redraw if it was more than 20ms since last.
+    {
+        _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
+        _javaRenderEvent.Set();
+    }
+}
+
+bool VideoRenderAndroid::JavaRenderThreadFun(void* obj)
+{
+    return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
+}
+
+bool VideoRenderAndroid::JavaRenderThreadProcess()
+{
+    _javaRenderEvent.Wait(1000);
+
+    CriticalSectionScoped cs(_critSect);
+    if (!_javaRenderJniEnv)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !_javaRenderJniEnv)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, _javaRenderJniEnv);
+            return false;
+        }
+    }
+
+    for (MapItem* item = _streamsMap.First(); item != NULL; item
+            = _streamsMap.Next(item))
+    {
+        static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
+                                                                    _javaRenderJniEnv);
+    }
+
+    if (_javaShutDownFlag)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                         "%s: Java thread detached", __FUNCTION__);
+        }
+        _javaRenderJniEnv = false;
+        _javaShutDownFlag = false;
+        _javaShutdownEvent.Set();
+        return false; // Do not run this thread again.
+    }
+    return true;
+}
+
+VideoRenderType VideoRenderAndroid::RenderType()
+{
+    return _renderType;
+}
+
+RawVideoType VideoRenderAndroid::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderAndroid::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
+                                                    WebRtc_UWord64& /*totalGraphicsMemory*/,
+                                                    WebRtc_UWord64& /*availableGraphicsMemory*/) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
+                                                      WebRtc_UWord32& /*screenWidth*/,
+                                                      WebRtc_UWord32& /*screenHeight*/) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
+                                                    const WebRtc_UWord32 /*streamId*/,
+                                                    const float /*left*/,
+                                                    const float /*top*/,
+                                                    const float /*right*/,
+                                                    const float /*bottom*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
+                                                    const WebRtc_UWord32 streamId,
+                                                    const unsigned int zOrder,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetText(
+                                          const WebRtc_UWord8 textId,
+                                          const WebRtc_UWord8* text,
+                                          const WebRtc_Word32 textLength,
+                                          const WebRtc_UWord32 textColorRef,
+                                          const WebRtc_UWord32 backgroundColorRef,
+                                          const float left, const float top,
+                                          const float rigth, const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
+                                            const WebRtc_UWord8 pictureId,
+                                            const void* colorKey,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Android", __FUNCTION__);
+    return -1;
+}
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.h b/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.h
new file mode 100644
index 0000000..7058871
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_impl.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+
+#include <jni.h>
+#include "i_video_render.h"
+#include "map_wrapper.h"
+
+
+namespace webrtc {
+
+//#define ANDROID_LOG
+
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+
+// The object a module user uses to send new frames to the java renderer
+// Base class for android render streams.
+
+class AndroidStream: public VideoRenderCallback
+{
+public:
+    /*
+     * DeliverFrame is called from a thread connected to the Java VM.
+     *  Used for Delivering frame for rendering.
+     */
+    virtual void DeliverFrame(JNIEnv* jniEnv)=0;
+
+    virtual ~AndroidStream()
+    {
+    };
+};
+
+class VideoRenderAndroid: IVideoRender
+{
+public:
+    static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
+
+    VideoRenderAndroid(const WebRtc_Word32 id,
+                       const VideoRenderType videoRenderType, void* window,
+                       const bool fullscreen);
+
+    virtual ~VideoRenderAndroid();
+
+    virtual WebRtc_Word32 Init()=0;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    virtual void ReDraw();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float rigth, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+protected:
+    virtual AndroidStream
+            * CreateAndroidRenderChannel(WebRtc_Word32 streamId,
+                                         WebRtc_Word32 zOrder,
+                                         const float left, const float top,
+                                         const float right, const float bottom,
+                                         VideoRenderAndroid& renderer) = 0;
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _critSect;
+    VideoRenderType _renderType;
+    jobject _ptrWindow;
+
+    static JavaVM* g_jvm;
+
+private:
+    static bool JavaRenderThreadFun(void* obj);
+    bool JavaRenderThreadProcess();
+
+    MapWrapper _streamsMap; // Map with streams to render.
+    bool _javaShutDownFlag; // True if the _javaRenderThread thread shall be detached from the JVM.
+    EventWrapper& _javaShutdownEvent;
+    EventWrapper& _javaRenderEvent;
+    WebRtc_Word64 _lastJavaRenderEvent;
+    JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
+    ThreadWrapper* _javaRenderThread;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc b/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc
new file mode 100644
index 0000000..a6d7133
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc
@@ -0,0 +1,495 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_native_opengl2.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
+                                                                   const WebRtc_Word32 id,
+                                                                   const VideoRenderType videoRenderType,
+                                                                   void* window,
+                                                                   const bool fullscreen) :
+    VideoRenderAndroid(id, videoRenderType, window, fullscreen),
+    _javaRenderObj(NULL),
+    _javaRenderClass(NULL)
+{
+}
+
+bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window)
+{
+    if (!g_jvm)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "RendererAndroid():UseOpenGL No JVM set.");
+        return false;
+    }
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
+                         res, env);
+            return false;
+        }
+        isAttached = true;
+    }
+
+    // get the renderer class
+    jclass javaRenderClassLocal =
+            env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+    if (!javaRenderClassLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "%s: could not find ViEAndroidRenderer class",
+                     __FUNCTION__);
+        return false;
+    }
+
+    // get the method ID for UseOpenGL
+    jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
+                                                    "UseOpenGL2",
+                                                    "(Ljava/lang/Object;)Z");
+    if (cidUseOpenGL == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "%s: could not get UseOpenGL ID", __FUNCTION__);
+        return false;
+    }
+    jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
+                                                cidUseOpenGL, (jobject) window);
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+    return res;
+}
+
+AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                 "AndroidNativeOpenGl2Renderer dtor");
+    if (g_jvm)
+    {
+        // get the JNI env for this thread
+        bool isAttached = false;
+        JNIEnv* env = NULL;
+        if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+            // Get the JNI env for this thread
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                             "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+                env = NULL;
+            }
+            else
+            {
+                isAttached = true;
+            }
+        }
+
+        env->DeleteGlobalRef(_javaRenderObj);
+        env->DeleteGlobalRef(_javaRenderClass);
+
+        if (isAttached)
+        {
+            if (g_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                             "%s: Could not detach thread from JVM",
+                             __FUNCTION__);
+            }
+        }
+    }
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    if (!g_jvm)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "(%s): Not a valid Java VM pointer.", __FUNCTION__);
+        return -1;
+    }
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "(%s): No window have been provided.", __FUNCTION__);
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the ViEAndroidGLES20 class
+    jclass javaRenderClassLocal =
+            env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+    if (!javaRenderClassLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not find ViEAndroidGLES20", __FUNCTION__);
+        return -1;
+    }
+
+    // create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
+    _javaRenderClass
+            = reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
+    if (!_javaRenderClass)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not create Java SurfaceHolder class reference",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local class ref, we only use the global ref
+    env->DeleteLocalRef(javaRenderClassLocal);
+
+    // create a reference to the object (to tell JNI that we are referencing it
+    // after this function has returned)
+    _javaRenderObj = env->NewGlobalRef(_ptrWindow);
+    if (!_javaRenderObj)
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: could not create Java SurfaceRender object reference",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
+                 __FUNCTION__);
+    return 0;
+
+}
+AndroidStream*
+AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
+                                                             WebRtc_Word32 streamId,
+                                                             WebRtc_Word32 zOrder,
+                                                             const float left,
+                                                             const float top,
+                                                             const float right,
+                                                             const float bottom,
+                                                             VideoRenderAndroid& renderer)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
+                 __FUNCTION__, streamId);
+    AndroidNativeOpenGl2Channel* stream =
+            new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
+                                                _javaRenderObj);
+    if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+        return stream;
+    else
+    {
+        delete stream;
+    }
+    return NULL;
+}
+
+AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,
+                                                                 JavaVM* jvm,
+                                                                 VideoRenderAndroid& renderer,jobject javaRenderObj):
+    _id(streamId),
+    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
+    _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
+    _openGLRenderer(streamId)
+{
+
+}
+AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                 "AndroidNativeOpenGl2Channel dtor");
+    delete &_renderCritSect;
+    if (_jvm)
+    {
+        // get the JNI env for this thread
+        bool isAttached = false;
+        JNIEnv* env = NULL;
+        if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+            // Get the JNI env for this thread
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                             "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+                env = NULL;
+            }
+            else
+            {
+                isAttached = true;
+            }
+        }
+        if (env && _deRegisterNativeCID)
+        {
+            env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
+        }
+
+        if (isAttached)
+        {
+            if (_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                             "%s: Could not detach thread from JVM",
+                             __FUNCTION__);
+            }
+        }
+    }
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
+    if (!_jvm)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Not a valid Java VM pointer", __FUNCTION__);
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    jclass javaRenderClass =
+            env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+    if (!javaRenderClass)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not find ViESurfaceRenderer", __FUNCTION__);
+        return -1;
+    }
+
+    // get the method ID for the ReDraw function
+    _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
+    if (_redrawCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get ReDraw ID", __FUNCTION__);
+        return -1;
+    }
+
+    _registerNativeCID = env->GetMethodID(javaRenderClass,
+                                          "RegisterNativeObject", "(J)V");
+    if (_registerNativeCID == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get RegisterNativeObject ID", __FUNCTION__);
+        return -1;
+    }
+
+    _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
+                                            "DeRegisterNativeObject", "()V");
+    if (_deRegisterNativeCID == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get DeRegisterNativeObject ID",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    JNINativeMethod
+            nativeFunctions[2] = {
+                    "DrawNative",
+                    "(J)V",
+                    (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic,
+                    "CreateOpenGLNative",
+                    "(JII)I",
+                    (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic };
+    if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
+                     "%s: Registered native functions", __FUNCTION__);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "%s: Failed to register native functions", __FUNCTION__);
+        return -1;
+    }
+
+    env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0)
+    {
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
+    return 0;
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(const WebRtc_UWord32 /*streamId*/,
+                                                           VideoFrame& videoFrame)
+{
+    //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+    _renderCritSect.Enter();
+    _bufferToRender.SwapFrame(videoFrame);
+    _renderCritSect.Leave();
+    _renderer.ReDraw();
+    return 0;
+}
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv)
+{
+    //TickTime timeNow=TickTime::Now();
+
+    //Draw the Surface
+    jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
+}
+
+/*
+ * JNI callback from Java class. Called when the render want to render a frame. Called from the GLRenderThread
+ * Method:    DrawNative
+ * Signature: (J)V
+ */
+void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic
+(JNIEnv * env, jobject, jlong context)
+{
+    AndroidNativeOpenGl2Channel* renderChannel=reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
+    renderChannel->DrawNative();
+}
+
+void AndroidNativeOpenGl2Channel::DrawNative()
+{
+    _openGLRenderer.Render(_bufferToRender);
+}
+/*
+ * JNI callback from Java class. Called when the GLSurfaceview have created a surface. Called from the GLRenderThread
+ * Method:    CreateOpenGLNativeStatic
+ * Signature: (JII)I
+ */
+jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(JNIEnv * env,
+                                                                       jobject,
+                                                                       jlong context,
+                                                                       jint width,
+                                                                       jint height)
+{
+    AndroidNativeOpenGl2Channel* renderChannel =
+            reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
+    return renderChannel->CreateOpenGLNative(width, height);
+}
+
+jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(int width, int height)
+{
+
+    return _openGLRenderer.Setup(width, height);
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h b/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h
new file mode 100644
index 0000000..54532a6
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+
+#include "video_render_android_impl.h"
+#include "video_render_opengles20.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class AndroidNativeOpenGl2Channel: public AndroidStream
+{
+
+public:
+    AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj);
+    ~AndroidNativeOpenGl2Channel();
+
+    WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    //Implement VideoRenderCallback
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
+
+    //Implements AndroidStream
+    virtual void DeliverFrame(JNIEnv* jniEnv);
+
+private:
+    static jint CreateOpenGLNativeStatic(JNIEnv * env,jobject, jlong context, jint width, jint height);
+    jint CreateOpenGLNative(int width, int height);
+
+    static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
+    void DrawNative();
+    WebRtc_UWord32 _id;
+    CriticalSectionWrapper& _renderCritSect;
+
+    VideoFrame _bufferToRender;
+    VideoRenderAndroid& _renderer;
+    JavaVM*     _jvm;
+    jobject     _javaRenderObj;
+
+    jmethodID      _redrawCid;
+    jmethodID      _registerNativeCID;
+    jmethodID      _deRegisterNativeCID;
+    VideoRenderOpenGles20 _openGLRenderer;
+
+};
+
+
+class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid
+{
+public:
+    AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
+                                   const VideoRenderType videoRenderType,
+                                   void* window,
+                                   const bool fullscreen);
+
+	~AndroidNativeOpenGl2Renderer();
+	static bool UseOpenGL2(void* window);
+
+	WebRtc_Word32 Init();
+	virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
+                    WebRtc_Word32 zOrder,
+                    const float left,
+                    const float top,
+                    const float right,
+                    const float bottom,
+                    VideoRenderAndroid& renderer);
+
+private:
+    jobject 	_javaRenderObj;
+    jclass		_javaRenderClass;
+        
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc b/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc
new file mode 100644
index 0000000..253d831
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc
@@ -0,0 +1,470 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_surface_view.h"
+#include "critical_section_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+#include "tick_util.h"
+#ifdef ANDROID_NDK_8_OR_ABOVE
+    #include <android/bitmap.h>
+#endif
+
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
+                                const VideoRenderType videoRenderType,
+                                void* window,
+                                const bool fullscreen)
+:
+    VideoRenderAndroid(id,videoRenderType,window,fullscreen),
+	_javaRenderObj(NULL),
+	_javaRenderClass(NULL)
+{
+}
+
+AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewRenderer dtor");
+    if(g_jvm)
+    {
+        // get the JNI env for this thread
+        bool isAttached = false;
+        JNIEnv* env = NULL;
+        if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+            // Get the JNI env for this thread
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
+                env=NULL;
+            }
+            else
+            {
+                isAttached = true;
+            }
+        }
+        env->DeleteGlobalRef(_javaRenderObj);
+        env->DeleteGlobalRef(_javaRenderClass);
+
+        if (isAttached)
+        {
+            if (g_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
+            }
+        }
+    }
+}
+
+
+WebRtc_Word32 
+AndroidSurfaceViewRenderer::Init()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    if (!g_jvm)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "(%s): Not a valid Java VM pointer.", __FUNCTION__);
+        return -1;
+    }
+    if(!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,  "(%s): No window have been provided.", __FUNCTION__);
+        return -1;
+    }
+    
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the ViESurfaceRender class
+	jclass javaRenderClassLocal = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+	if (!javaRenderClassLocal)
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not find ViESurfaceRenderer", __FUNCTION__);
+		return -1;
+	}
+
+	// create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
+	_javaRenderClass = reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
+	if (!_javaRenderClass)
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not create Java ViESurfaceRenderer class reference", __FUNCTION__);
+		return -1;
+	}
+
+        // Delete local class ref, we only use the global ref
+	env->DeleteLocalRef(javaRenderClassLocal);
+
+	// get the method ID for the constructor
+	jmethodID cid = env->GetMethodID(_javaRenderClass, "<init>", "(Landroid/view/SurfaceView;)V");
+	if (cid == NULL)
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get constructor ID", __FUNCTION__);
+		return -1; /* exception thrown */
+	}
+
+    // construct the object
+    jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, cid, _ptrWindow);
+    if (!javaRenderObjLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not create Java Render", __FUNCTION__);
+        return -1;
+    }
+
+    // create a reference to the object (to tell JNI that we are referencing it
+    // after this function has returned)
+    _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
+    if (!_javaRenderObj)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not create Java SurfaceRender object reference", __FUNCTION__);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (g_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
+    return 0;
+
+}
+AndroidStream*
+AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(WebRtc_Word32 streamId,
+                WebRtc_Word32 zOrder,
+                const float left,
+                const float top,
+                const float right,
+                const float bottom,
+                VideoRenderAndroid& renderer)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__,streamId);
+    AndroidSurfaceViewChannel* stream=new AndroidSurfaceViewChannel(streamId,g_jvm,renderer,_javaRenderObj);
+    if(stream && stream->Init(zOrder,left,top,right,bottom)==0)
+        return stream;
+    else
+        delete stream;
+    return NULL;
+}
+
+
+
+
+
+
+AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj)
+:
+_id(streamId),
+_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+_renderer(renderer),
+_jvm(jvm),
+_javaRenderObj(javaRenderObj),
+_bitmapWidth(0),
+_bitmapHeight(0)
+{
+
+}
+AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewChannel dtor");
+    delete &_renderCritSect;
+    if(_jvm)
+    {
+        // get the JNI env for this thread
+        bool isAttached = false;
+        JNIEnv* env = NULL;
+        if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
+        {
+            // try to attach the thread and get the env
+            // Attach this thread to JVM
+            jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+            // Get the JNI env for this thread
+            if ((res < 0) || !env)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
+                env=NULL;
+            }
+            else
+            {
+                isAttached = true;
+            }
+        }
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+        env->DeleteGlobalRef(_javaBitmapObj);
+#else
+        env->DeleteGlobalRef(_javaByteBufferObj);
+#endif
+        if (isAttached)
+        {
+            if (_jvm->DetachCurrentThread() < 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
+            }
+        }
+    }
+}
+
+WebRtc_Word32
+AndroidSurfaceViewChannel::Init(WebRtc_Word32 /*zOrder*/,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel", __FUNCTION__);
+    if (!_jvm)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer,_id,  "%s: Not a valid Java VM pointer", __FUNCTION__);
+        return -1;
+    }
+
+    if((top>1 || top<0) || (right>1 || right<0) || (bottom>1 || bottom<0) || (left>1 || left<0))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Wrong coordinates",
+                               __FUNCTION__);
+        return -1;
+    }
+
+
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    jclass javaRenderClass = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+    if (!javaRenderClass)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not find ViESurfaceRenderer", __FUNCTION__);
+        return -1;
+    }
+#ifdef ANDROID_NDK_8_OR_ABOVE
+    // get the method ID for the CreateBitmap
+    _createBitmapCid = env->GetMethodID(_javaRenderClass, "CreateBitmap", "(II)Landroid/graphics/Bitmap;");
+    if (_createBitmapCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get CreateBitmap ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+    // get the method ID for the DrawBitmap function
+    _drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
+    if (_drawBitmapCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get DrawBitmap ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+#else
+    // get the method ID for the CreateIntArray
+    _createByteBufferCid = env->GetMethodID(javaRenderClass, "CreateByteBuffer", "(II)Ljava/nio/ByteBuffer;");
+    if (_createByteBufferCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get CreateByteBuffer ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+
+    // get the method ID for the DrawByteBuffer function
+    _drawByteBufferCid = env->GetMethodID(javaRenderClass, "DrawByteBuffer", "()V");
+    if (_drawByteBufferCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get DrawByteBuffer ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+#endif
+
+    // get the method ID for the SetCoordinates function
+    _setCoordinatesCid = env->GetMethodID(javaRenderClass, "SetCoordinates", "(FFFF)V");
+    if (_setCoordinatesCid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not get SetCoordinates ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+
+    env->CallVoidMethod(_javaRenderObj,_setCoordinatesCid,left,top,right,bottom);
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_jvm->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel done", __FUNCTION__);
+    return 0;
+}
+
+
+WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
+{
+ //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+    _renderCritSect.Enter();
+    _bufferToRender.SwapFrame(videoFrame);
+    _renderCritSect.Leave();
+    _renderer.ReDraw();
+    return 0;
+}
+
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
+  _renderCritSect.Enter();
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+  if (_bitmapWidth != _bufferToRender.Width() ||
+    _bitmapHeight != _bufferToRender.Height()) {
+    // Create the bitmap to write to
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
+                 "%u", __FUNCTION__, _bufferToRender.Width(),
+                 _bufferToRender.Height());
+    if (_javaBitmapObj) {
+      jniEnv->DeleteGlobalRef(_javaBitmapObj);
+     _javaBitmapObj = NULL;
+    }
+    jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
+                                                  _createBitmapCid,
+                                                  videoFrame.Width(),
+                                                  videoFrame.Height());
+    _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
+     if (!_javaBitmapObj) {
+       WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
+                    "create Java Bitmap object reference", __FUNCTION__);
+       _renderCritSect.Leave();
+       return;
+    } else {
+      _bitmapWidth=_bufferToRender.Width();
+      _bitmapHeight=_bufferToRender.Height();
+    }
+  }
+  void* pixels;
+  if (_javaBitmapObj &&
+      AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
+                 __FUNCTION__);
+    // Convert I420 straight into the Java bitmap.
+    int ret = ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
+                                  (unsigned char* ) pixels,
+                                  _bitmapWidth, _bitmapHeight);
+    if (ret < 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion "
+                   "failed.", __FUNCTION__);
+    }
+
+    AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
+    // Draw the Surface.
+    jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
+
+  } else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
+                 "bitmap", __FUNCTION__);
+  }
+  _renderCritSect.Leave();
+
+#else
+  if (_bitmapWidth != _bufferToRender.Width() ||
+      _bitmapHeight != _bufferToRender.Height()) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
+                 "%d",__FUNCTION__,
+                 _bufferToRender.Width(), _bufferToRender.Height());
+    if (_javaByteBufferObj) {
+        jniEnv->DeleteGlobalRef(_javaByteBufferObj);
+        _javaByteBufferObj = NULL;
+        _directBuffer = NULL;
+    }
+    jobject javaByteBufferObj =
+      jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
+                               _bufferToRender.Width(),
+                               _bufferToRender.Height());
+    _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
+    if (!_javaByteBufferObj) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not "
+                   "create Java ByteBuffer object reference", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    } else {
+      _directBuffer = static_cast<unsigned char*>
+          (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
+      _bitmapWidth = _bufferToRender.Width();
+      _bitmapHeight = _bufferToRender.Height();
+    }
+  }
+
+  if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
+    // Android requires a vertically flipped image compared to std convert.
+    // This is done by giving a negative height input.
+    const int conversionResult =
+      ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
+                          _directBuffer, _bitmapWidth, -_bitmapHeight);
+    if (conversionResult < 0)  {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
+                   " failed.", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    }
+  }
+  _renderCritSect.Leave();
+  // Draw the Surface
+  jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
+#endif
+}
+
+}  // namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.h b/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.h
new file mode 100644
index 0000000..f55e60b
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_android_surface_view.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+
+#include "video_render_android_impl.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+
+class AndroidSurfaceViewChannel: public AndroidStream
+{
+
+public:
+    AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
+                                  JavaVM* jvm,
+                                  VideoRenderAndroid& renderer,
+                                  jobject javaRenderObj);
+    ~AndroidSurfaceViewChannel();
+
+    WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+                       const float left,
+                       const float top,
+                       const float right,
+                       const float bottom);
+
+    //Implement VideoRenderCallback
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    //Implements AndroidStream
+    virtual void DeliverFrame(JNIEnv* jniEnv);
+
+private:
+    WebRtc_UWord32 _id;
+    CriticalSectionWrapper& _renderCritSect;
+
+    VideoFrame _bufferToRender;
+    VideoRenderAndroid& _renderer;
+    JavaVM* _jvm;
+    jobject _javaRenderObj;
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+    jclass _javaBitmapClass;
+    jmethodID _createBitmapCid;
+    jobject _javaBitmapObj;
+    jmethodID _drawBitmapCid;
+#else
+    jobject _javaByteBufferObj;
+    unsigned char* _directBuffer;
+    jmethodID _createByteBufferCid;
+    jmethodID _drawByteBufferCid;
+#endif
+    jmethodID _setCoordinatesCid;
+    unsigned int _bitmapWidth;
+    unsigned int _bitmapHeight;
+};
+
+class AndroidSurfaceViewRenderer: private VideoRenderAndroid
+{
+public:
+    AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
+                                   const VideoRenderType videoRenderType,
+                                   void* window,
+                                   const bool fullscreen);
+    ~AndroidSurfaceViewRenderer();
+    WebRtc_Word32 Init();
+    virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
+                                                 WebRtc_Word32 zOrder,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom,
+                                                 VideoRenderAndroid& renderer);
+private:
+    jobject _javaRenderObj;
+    jclass _javaRenderClass;
+
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.cc b/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.cc
new file mode 100644
index 0000000..8f4e5c5
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.cc
@@ -0,0 +1,446 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "video_render_opengles20.h"
+
+//#define ANDROID_LOG
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
+
+const char VideoRenderOpenGles20::g_vertextShader[] = {
+    "attribute vec4 aPosition;\n"
+    "attribute vec2 aTextureCoord;\n"
+    "varying vec2 vTextureCoord;\n"
+    "void main() {\n"
+    "  gl_Position = aPosition;\n"
+    "  vTextureCoord = aTextureCoord;\n"
+    "}\n" };
+
+// The fragment shader.
+// Do YUV to RGB565 conversion.
+const char VideoRenderOpenGles20::g_fragmentShader[] = {
+    "precision mediump float;\n"
+    "uniform sampler2D Ytex;\n"
+    "uniform sampler2D Utex,Vtex;\n"
+    "varying vec2 vTextureCoord;\n"
+    "void main(void) {\n"
+    "  float nx,ny,r,g,b,y,u,v;\n"
+    "  mediump vec4 txl,ux,vx;"
+    "  nx=vTextureCoord[0];\n"
+    "  ny=vTextureCoord[1];\n"
+    "  y=texture2D(Ytex,vec2(nx,ny)).r;\n"
+    "  u=texture2D(Utex,vec2(nx,ny)).r;\n"
+    "  v=texture2D(Vtex,vec2(nx,ny)).r;\n"
+
+    //"  y = v;\n"+
+    "  y=1.1643*(y-0.0625);\n"
+    "  u=u-0.5;\n"
+    "  v=v-0.5;\n"
+
+    "  r=y+1.5958*v;\n"
+    "  g=y-0.39173*u-0.81290*v;\n"
+    "  b=y+2.017*u;\n"
+    "  gl_FragColor=vec4(r,g,b,1.0);\n"
+    "}\n" };
+
+VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
+    _id(id),
+    _textureWidth(-1),
+    _textureHeight(-1)
+
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+                 __FUNCTION__, (int) _id);
+
+    const GLfloat vertices[20] = {
+    // X, Y, Z, U, V
+        -1, -1, 0, 0, 1, // Bottom Left
+        1, -1, 0, 1, 1, //Bottom Right
+        1, 1, 0, 1, 0, //Top Right
+        -1, 1, 0, 0, 0 }; //Top Left
+
+    memcpy(_vertices, vertices, sizeof(_vertices));
+}
+
+VideoRenderOpenGles20::~VideoRenderOpenGles20()
+{
+
+}
+
+WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
+                                               WebRtc_Word32 height)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s: width %d, height %d", __FUNCTION__, (int) width,
+                 (int) height);
+
+    printGLString("Version", GL_VERSION);
+    printGLString("Vendor", GL_VENDOR);
+    printGLString("Renderer", GL_RENDERER);
+    printGLString("Extensions", GL_EXTENSIONS);
+
+    int maxTextureImageUnits[2];
+    int maxTextureSize[2];
+    glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s: number of textures %d, size %d", __FUNCTION__,
+                 (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
+
+    _program = createProgram(g_vertextShader, g_fragmentShader);
+    if (!_program)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not create program", __FUNCTION__);
+        return -1;
+    }
+
+    int positionHandle = glGetAttribLocation(_program, "aPosition");
+    checkGlError("glGetAttribLocation aPosition");
+    if (positionHandle == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not get aPosition handle", __FUNCTION__);
+        return -1;
+    }
+    int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
+    checkGlError("glGetAttribLocation aTextureCoord");
+    if (textureHandle == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not get aTextureCoord handle", __FUNCTION__);
+        return -1;
+    }
+
+    // set the vertices array in the shader
+    // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
+    glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5
+            * sizeof(GLfloat), _vertices);
+    checkGlError("glVertexAttribPointer aPosition");
+
+    glEnableVertexAttribArray(positionHandle);
+    checkGlError("glEnableVertexAttribArray positionHandle");
+
+    // set the texture coordinate array in the shader
+    // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
+    glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
+            * sizeof(GLfloat), &_vertices[3]);
+    checkGlError("glVertexAttribPointer maTextureHandle");
+    glEnableVertexAttribArray(textureHandle);
+    checkGlError("glEnableVertexAttribArray textureHandle");
+
+    glUseProgram(_program);
+    int i = glGetUniformLocation(_program, "Ytex");
+    checkGlError("glGetUniformLocation");
+    glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
+    checkGlError("glUniform1i Ytex");
+
+    i = glGetUniformLocation(_program, "Utex");
+    checkGlError("glGetUniformLocation Utex");
+    glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
+    checkGlError("glUniform1i Utex");
+
+    i = glGetUniformLocation(_program, "Vtex");
+    checkGlError("glGetUniformLocation");
+    glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
+    checkGlError("glUniform1i");
+
+    glViewport(0, 0, width, height);
+    checkGlError("glViewport");
+    return 0;
+
+}
+/*
+ * SetCoordinates
+ * Sets the coordinates where the stream shall be rendered. Values must be between 0 and 1.
+ */
+WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1
+            || bottom < 0) || (left > 1 || left < 0))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Wrong coordinates", __FUNCTION__);
+        return -1;
+    }
+    /*
+     // X, Y, Z, U, V
+     -1, -1, 0, 0, 1, // Bottom Left
+     1, -1, 0, 1, 1, //Bottom Right
+     1,  1, 0, 1, 0, //Top Right
+     -1,  1, 0, 0, 0 }; //Top Left
+     */
+    // Bottom Left
+    _vertices[0] = (left * 2) - 1;
+    _vertices[1] = -1 * (2 * bottom) + 1;
+    _vertices[2] = zOrder;
+
+    //Bottom Right
+    _vertices[5] = (right * 2) - 1;
+    _vertices[6] = -1 * (2 * bottom) + 1;
+    _vertices[7] = zOrder;
+
+    //Top Right
+    _vertices[10] = (right * 2) - 1;
+    _vertices[11] = -1 * (2 * top) + 1;
+    _vertices[12] = zOrder;
+
+    //Top Left
+    _vertices[15] = (left * 2) - 1;
+    _vertices[16] = -1 * (2 * top) + 1;
+    _vertices[17] = zOrder;
+
+    return 0;
+
+}
+WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender)
+{
+
+    if (frameToRender.Length() == 0)
+    {
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+                 __FUNCTION__, (int) _id);
+
+    //glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
+    //glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
+
+    glUseProgram(_program);
+    checkGlError("glUseProgram");
+
+    if (_textureWidth != (GLsizei) frameToRender.Width() || _textureHeight
+            != (GLsizei) frameToRender.Height())
+    {
+        SetupTextures(frameToRender);
+    }
+    else
+    {
+        UpdateTextures(frameToRender);
+    }
+
+    glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
+    checkGlError("glDrawArrays");
+
+    return 0;
+}
+
+GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
+                                             const char* pSource)
+{
+    GLuint shader = glCreateShader(shaderType);
+    if (shader)
+    {
+        glShaderSource(shader, 1, &pSource, NULL);
+        glCompileShader(shader);
+        GLint compiled = 0;
+        glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+        if (!compiled)
+        {
+            GLint infoLen = 0;
+            glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+            if (infoLen)
+            {
+                char* buf = (char*) malloc(infoLen);
+                if (buf)
+                {
+                    glGetShaderInfoLog(shader, infoLen, NULL, buf);
+                    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                                 "%s: Could not compile shader %d: %s",
+                                 __FUNCTION__, shaderType, buf);
+                    free(buf);
+                }
+                glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+    }
+    return shader;
+}
+
+GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
+                                                const char* pFragmentSource)
+{
+    GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
+    if (!vertexShader)
+    {
+        return 0;
+    }
+
+    GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
+    if (!pixelShader)
+    {
+        return 0;
+    }
+
+    GLuint program = glCreateProgram();
+    if (program)
+    {
+        glAttachShader(program, vertexShader);
+        checkGlError("glAttachShader");
+        glAttachShader(program, pixelShader);
+        checkGlError("glAttachShader");
+        glLinkProgram(program);
+        GLint linkStatus = GL_FALSE;
+        glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+        if (linkStatus != GL_TRUE)
+        {
+            GLint bufLength = 0;
+            glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+            if (bufLength)
+            {
+                char* buf = (char*) malloc(bufLength);
+                if (buf)
+                {
+                    glGetProgramInfoLog(program, bufLength, NULL, buf);
+                    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                                 "%s: Could not link program: %s",
+                                 __FUNCTION__, buf);
+                    free(buf);
+                }
+            }
+            glDeleteProgram(program);
+            program = 0;
+        }
+    }
+    return program;
+}
+
+void VideoRenderOpenGles20::printGLString(const char *name, GLenum s)
+{
+    const char *v = (const char *) glGetString(s);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
+                 name, v);
+}
+
+void VideoRenderOpenGles20::checkGlError(const char* op)
+{
+#ifdef ANDROID_LOG
+    for (GLint error = glGetError(); error; error
+            = glGetError())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "after %s() glError (0x%x)\n", op, error);
+    }
+#else
+    return;
+#endif
+}
+
+void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s: width %d, height %d length %u", __FUNCTION__,
+                 frameToRender.Width(), frameToRender.Height(),
+                 frameToRender.Length());
+
+    const GLsizei width = frameToRender.Width();
+    const GLsizei height = frameToRender.Height();
+
+    glGenTextures(3, _textureIds); //Generate  the Y, U and V texture
+    GLuint currentTextureId = _textureIds[0]; // Y
+    glActiveTexture( GL_TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
+                 GL_LUMINANCE, GL_UNSIGNED_BYTE,
+                 (const GLvoid*) frameToRender.Buffer());
+
+    currentTextureId = _textureIds[1]; // U
+    glActiveTexture( GL_TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
+                 GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
+
+    currentTextureId = _textureIds[2]; // V
+    glActiveTexture( GL_TEXTURE2);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
+                 GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
+    checkGlError("SetupTextures");
+
+    _textureWidth = width;
+    _textureHeight = height;
+}
+
+void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender)
+{
+    const GLsizei width = frameToRender.Width();
+    const GLsizei height = frameToRender.Height();
+
+    GLuint currentTextureId = _textureIds[0]; // Y
+    glActiveTexture( GL_TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
+                    GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer());
+
+    currentTextureId = _textureIds[1]; // U
+    glActiveTexture( GL_TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+    const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
+    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
+                    GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
+
+    currentTextureId = _textureIds[2]; // V
+    glActiveTexture( GL_TEXTURE2);
+    glBindTexture(GL_TEXTURE_2D, currentTextureId);
+    const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
+    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
+                    GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
+    checkGlError("UpdateTextures");
+
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.h b/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.h
new file mode 100644
index 0000000..379b1e7
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/Android/video_render_opengles20.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+
+#include "video_render_defines.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace webrtc
+{
+
+class VideoRenderOpenGles20
+{
+public:
+    VideoRenderOpenGles20(WebRtc_Word32 id);
+    ~VideoRenderOpenGles20();
+
+    WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
+    WebRtc_Word32 Render(const VideoFrame& frameToRender);
+    WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
+                                 const float left,
+                                 const float top,
+                                 const float right,
+                                 const float bottom);
+
+private:
+    void printGLString(const char *name, GLenum s);
+    void checkGlError(const char* op);
+    GLuint loadShader(GLenum shaderType, const char* pSource);
+    GLuint createProgram(const char* pVertexSource, const char* pFragmentSource);
+    void SetupTextures(const VideoFrame& frameToRender);
+    void UpdateTextures(const VideoFrame& frameToRender);
+
+    WebRtc_Word32 _id;
+    GLuint _textureIds[3]; // Texture id of Y,U and V texture.
+    GLuint _program;
+    GLuint _vPositionHandle;
+    GLsizei _textureWidth;
+    GLsizei _textureHeight;
+
+    GLfloat _vertices[20];
+    static const char g_indices[];
+
+    static const char g_vertextShader[];
+    static const char g_fragmentShader[];
+
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
diff --git a/trunk/src/modules/video_render/main/source/external/video_render_external_impl.cc b/trunk/src/modules/video_render/main/source/external/video_render_external_impl.cc
new file mode 100644
index 0000000..7abb09a
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/external/video_render_external_impl.cc
@@ -0,0 +1,205 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_external_impl.h"
+
+namespace webrtc {
+
+VideoRenderExternalImpl::VideoRenderExternalImpl(
+                                                 const WebRtc_Word32 id,
+                                                 const VideoRenderType videoRenderType,
+                                                 void* window,
+                                                 const bool fullscreen) :
+    _id(id), _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+            _window(window), _fullscreen(fullscreen)
+{
+}
+
+VideoRenderExternalImpl::~VideoRenderExternalImpl()
+{
+    delete &_critSect;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::Init()
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_critSect);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderExternalImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                                 const WebRtc_UWord32 zOrder,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    return this;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::DeleteIncomingRenderStream(
+                                                                  const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
+                                                                         const WebRtc_UWord32 streamId,
+                                                                         WebRtc_UWord32& zOrder,
+                                                                         float& left,
+                                                                         float& top,
+                                                                         float& right,
+                                                                         float& bottom) const
+{
+    CriticalSectionScoped cs(_critSect);
+
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::StartRender()
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::StopRender()
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+VideoRenderType VideoRenderExternalImpl::RenderType()
+{
+    return kRenderExternal;
+}
+
+RawVideoType VideoRenderExternalImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderExternalImpl::FullScreen()
+{
+    CriticalSectionScoped cs(_critSect);
+    return _fullscreen;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetGraphicsMemory(
+                                                         WebRtc_UWord64& totalGraphicsMemory,
+                                                         WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetScreenResolution(
+                                                           WebRtc_UWord32& screenWidth,
+                                                           WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(_critSect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+WebRtc_UWord32 VideoRenderExternalImpl::RenderFrameRate(
+                                                        const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetStreamCropping(
+                                                         const WebRtc_UWord32 streamId,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ConfigureRenderer(
+                                                         const WebRtc_UWord32 streamId,
+                                                         const unsigned int zOrder,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetTransparentBackground(
+                                                                const bool enable)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetText(
+                                               const WebRtc_UWord8 textId,
+                                               const WebRtc_UWord8* text,
+                                               const WebRtc_Word32 textLength,
+                                               const WebRtc_UWord32 textColorRef,
+                                               const WebRtc_UWord32 backgroundColorRef,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetBitmap(const void* bitMap,
+                                                 const WebRtc_UWord8 pictureId,
+                                                 const void* colorKey,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    return 0;
+}
+
+// VideoRenderCallback
+WebRtc_Word32 VideoRenderExternalImpl::RenderFrame(
+                                                   const WebRtc_UWord32 streamId,
+                                                   VideoFrame& videoFrame)
+{
+    return 0;
+}
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/external/video_render_external_impl.h b/trunk/src/modules/video_render/main/source/external/video_render_external_impl.h
new file mode 100644
index 0000000..e1374f4
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/external/video_render_external_impl.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+
+#include "i_video_render.h"
+#include "critical_section_wrapper.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderExternalImpl(const WebRtc_Word32 id,
+                            const VideoRenderType videoRenderType,
+                            void* window, const bool fullscreen);
+
+    virtual ~VideoRenderExternalImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    // VideoRenderCallback
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _critSect;
+    void* _window;
+    bool _fullscreen;
+};
+
+} //namespace webrtc
+
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
diff --git a/trunk/src/modules/video_render/main/source/i_video_render.h b/trunk/src/modules/video_render/main/source/i_video_render.h
new file mode 100644
index 0000000..2799a79
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/i_video_render.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+
+#include "video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    virtual ~IVideoRender()
+    {
+    };
+
+    virtual WebRtc_Word32 Init() = 0;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left,
+                                              float& top,
+                                              float& right,
+                                              float& bottom) const = 0;
+    // Implemented in common code?
+    //virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const = 0;
+    //virtual bool HasIncomingRenderStream(const WebRtc_UWord16 stramId) const = 0;
+
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender() = 0;
+
+    virtual WebRtc_Word32 StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+    virtual VideoRenderType RenderType() = 0;
+
+    virtual RawVideoType PerferedVideoType() = 0;
+
+    virtual bool FullScreen() = 0;
+
+    // TODO: This should be treated in platform specific code only
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const = 0;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const = 0;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom) = 0;
+
+};
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
diff --git a/trunk/src/modules/video_render/main/source/incoming_video_stream.cc b/trunk/src/modules/video_render/main/source/incoming_video_stream.cc
new file mode 100644
index 0000000..418a325
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/incoming_video_stream.cc
@@ -0,0 +1,413 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "incoming_video_stream.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "video_render_frames.h"
+#include "tick_util.h"
+#include "map_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+#include <cassert>
+
+// Platform specifics
+#if defined(_WIN32)
+#include <windows.h>
+#elif defined(WEBRTC_LINUX)
+#include <ctime>
+#include <sys/time.h>
+#else
+#include <sys/time.h>
+#endif
+
+namespace webrtc {
+IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 moduleId,
+                                         const WebRtc_UWord32 streamId) :
+    _moduleId(moduleId),
+    _streamId(streamId),
+    _streamCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _threadCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _bufferCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrIncomingRenderThread(),
+    _deliverBufferEvent(*EventWrapper::Create()),
+    _running(false),
+    _ptrExternalCallback(NULL),
+    _ptrRenderCallback(NULL),
+    _renderBuffers(*(new VideoRenderFrames)),
+    _callbackVideoType(kVideoI420),
+    _callbackWidth(0),
+    _callbackHeight(0),
+    _incomingRate(0),
+    _lastRateCalculationTimeMs(0),
+    _numFramesSinceLastCalculation(0),
+    _lastRenderedFrame(),
+    _tempFrame(),
+    _startImage(),
+    _timeoutImage(),
+    _timeoutTime(),
+    _mirrorFramesEnabled(false),
+    _mirroring(),
+    _transformedVideoFrame()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
+                 "%s created for stream %d", __FUNCTION__, streamId);
+}
+
+IncomingVideoStream::~IncomingVideoStream()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
+                 "%s deleted for stream %d", __FUNCTION__, _streamId);
+
+    Stop();
+
+    // _ptrIncomingRenderThread - Delete in stop
+    delete &_renderBuffers;
+    delete &_streamCritsect;
+    delete &_bufferCritsect;
+    delete &_threadCritsect;
+    delete &_deliverBufferEvent;
+
+}
+
+WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_streamCritsect);
+
+    _moduleId = id;
+    return 0;
+}
+
+VideoRenderCallback*
+IncomingVideoStream::ModuleCallback()
+{
+    CriticalSectionScoped cs(_streamCritsect);
+    return this;
+}
+
+WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 streamId,
+                                               VideoFrame& videoFrame)
+{
+
+    CriticalSectionScoped csS(_streamCritsect);
+    WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
+                 "%s for stream %d, render time: %u", __FUNCTION__, _streamId,
+                 videoFrame.RenderTimeMs());
+
+    if (!_running)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
+                     "%s: Not running", __FUNCTION__);
+        return -1;
+    }
+
+    if (true == _mirrorFramesEnabled)
+    {
+        _transformedVideoFrame.VerifyAndAllocate(videoFrame.Length());
+        if (_mirroring.mirrorXAxis)
+        {
+            MirrorI420UpDown(videoFrame.Buffer(),
+                                     _transformedVideoFrame.Buffer(),
+                                     videoFrame.Width(), videoFrame.Height());
+            _transformedVideoFrame.SetLength(videoFrame.Length());
+            _transformedVideoFrame.SetWidth(videoFrame.Width());
+            _transformedVideoFrame.SetHeight(videoFrame.Height());
+            videoFrame.SwapFrame(_transformedVideoFrame);
+        }
+        if (_mirroring.mirrorYAxis)
+        {
+            MirrorI420LeftRight(videoFrame.Buffer(),
+                                        _transformedVideoFrame.Buffer(),
+                                        videoFrame.Width(), videoFrame.Height());
+            _transformedVideoFrame.SetLength(videoFrame.Length());
+            _transformedVideoFrame.SetWidth(videoFrame.Width());
+            _transformedVideoFrame.SetHeight(videoFrame.Height());
+            videoFrame.SwapFrame(_transformedVideoFrame);
+        }
+    }
+
+    // Rate statistics
+    _numFramesSinceLastCalculation++;
+    WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp();
+    if (nowMs >= _lastRateCalculationTimeMs + KFrameRatePeriodMs)
+    {
+        _incomingRate = (WebRtc_UWord32) (1000 * _numFramesSinceLastCalculation
+                / (nowMs - _lastRateCalculationTimeMs));
+        _numFramesSinceLastCalculation = 0;
+        _lastRateCalculationTimeMs = nowMs;
+    }
+
+    // Insert frame
+    CriticalSectionScoped csB(_bufferCritsect);
+    if (_renderBuffers.AddFrame(&videoFrame) == 1)
+        _deliverBufferEvent.Set();
+
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::SetStartImage(const VideoFrame& videoFrame)
+{
+    CriticalSectionScoped csS(_threadCritsect);
+    return _startImage.CopyFrame(videoFrame);
+}
+
+WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(const VideoFrame& videoFrame,
+                                                   const WebRtc_UWord32 timeout)
+{
+    CriticalSectionScoped csS(_threadCritsect);
+    _timeoutTime = timeout;
+    return _timeoutImage.CopyFrame(videoFrame);
+}
+
+WebRtc_Word32 IncomingVideoStream::SetRenderCallback(VideoRenderCallback* renderCallback)
+{
+    CriticalSectionScoped cs(_streamCritsect);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
+                 "%s(%x) for stream %d", __FUNCTION__, renderCallback,
+                 _streamId);
+    _ptrRenderCallback = renderCallback;
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable,
+                                                   const bool mirrorXAxis,
+                                                   const bool mirrorYAxis)
+{
+    CriticalSectionScoped cs(_streamCritsect);
+    _mirrorFramesEnabled = enable;
+    _mirroring.mirrorXAxis = mirrorXAxis;
+    _mirroring.mirrorYAxis = mirrorYAxis;
+
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::SetExternalCallback(VideoRenderCallback* externalCallback)
+{
+    CriticalSectionScoped cs(_streamCritsect);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
+                 "%s(%x) for stream %d", __FUNCTION__, externalCallback,
+                 _streamId);
+    _ptrExternalCallback = externalCallback;
+    _callbackVideoType = kVideoI420;
+    _callbackWidth = 0;
+    _callbackHeight = 0;
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Start()
+{
+    CriticalSectionScoped csS(_streamCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
+                 "%s for stream %d", __FUNCTION__, _streamId);
+    if (_running)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
+                     "%s: Already running", __FUNCTION__);
+        return 0;
+    }
+
+    CriticalSectionScoped csT(_threadCritsect);
+    assert(_ptrIncomingRenderThread == NULL);
+
+    _ptrIncomingRenderThread
+            = ThreadWrapper::CreateThread(IncomingVideoStreamThreadFun, this,
+                                          kRealtimePriority,
+                                          "IncomingVideoStreamThread");
+    if (!_ptrIncomingRenderThread)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
+                     "%s: No thread", __FUNCTION__);
+        return -1;
+    }
+
+    unsigned int tId = 0;
+    if (_ptrIncomingRenderThread->Start(tId))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
+                     "%s: thread started: %u", __FUNCTION__, tId);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
+                     "%s: Could not start send thread", __FUNCTION__);
+        return -1;
+    }
+    _deliverBufferEvent.StartTimer(false, KEventStartupTimeMS);
+
+    _running = true;
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Stop()
+{
+    CriticalSectionScoped csStream(_streamCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
+                 "%s for stream %d", __FUNCTION__, _streamId);
+
+    if (!_running)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
+                     "%s: Not running", __FUNCTION__);
+        return 0;
+    }
+
+    _threadCritsect.Enter();
+    if (_ptrIncomingRenderThread)
+    {
+        ThreadWrapper* ptrThread = _ptrIncomingRenderThread;
+        _ptrIncomingRenderThread = NULL;
+        ptrThread->SetNotAlive();
+#ifndef _WIN32
+        _deliverBufferEvent.StopTimer();
+#endif
+        _threadCritsect.Leave();
+        if (ptrThread->Stop())
+        {
+            delete ptrThread;
+        }
+        else
+        {
+            assert(false);
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
+                         "%s: Not able to stop thread, leaking", __FUNCTION__);
+        }
+    }
+    else
+    {
+        _threadCritsect.Leave();
+    }
+    _running = false;
+    return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Reset()
+{
+    CriticalSectionScoped csStream(_streamCritsect);
+    CriticalSectionScoped csBuffer(_bufferCritsect);
+
+    _renderBuffers.ReleaseAllFrames();
+    return 0;
+}
+
+WebRtc_UWord32 IncomingVideoStream::StreamId() const
+{
+    CriticalSectionScoped csStream(_streamCritsect);
+    return _streamId;
+}
+
+WebRtc_UWord32 IncomingVideoStream::IncomingRate() const
+{
+    CriticalSectionScoped cs(_streamCritsect);
+    return _incomingRate;
+}
+
+bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj)
+{
+    return static_cast<IncomingVideoStream*> (obj)->IncomingVideoStreamProcess();
+}
+
+bool IncomingVideoStream::IncomingVideoStreamProcess()
+{
+    if (kEventError != _deliverBufferEvent.Wait(KEventMaxWaitTimeMs))
+    {
+        if (_ptrIncomingRenderThread == NULL)
+        {
+            // Terminating
+            return false;
+        }
+
+        _threadCritsect.Enter();
+
+        VideoFrame* ptrFrameToRender = NULL;
+
+        // Get a new frame to render and the time for the frame after this one.
+        _bufferCritsect.Enter();
+        ptrFrameToRender = _renderBuffers.FrameToRender();
+        WebRtc_UWord32 waitTime = _renderBuffers.TimeToNextFrameRelease();
+        _bufferCritsect.Leave();
+
+        // Set timer for next frame to render
+        if (waitTime > KEventMaxWaitTimeMs)
+        {
+            waitTime = KEventMaxWaitTimeMs;
+        }
+        _deliverBufferEvent.StartTimer(false, waitTime);
+
+        if (!ptrFrameToRender)
+        {
+            if (_ptrRenderCallback)
+            {
+                if (_lastRenderedFrame.RenderTimeMs() == 0
+                        && _startImage.Size()) // And we have not rendered anything and have a start image
+                {
+                    _tempFrame.CopyFrame(_startImage);// Copy the startimage if the renderer modifies the render buffer.
+                    _ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
+                }
+                else if (_timeoutImage.Size()
+                        && _lastRenderedFrame.RenderTimeMs() + _timeoutTime
+                                < TickTime::MillisecondTimestamp()) // We have rendered something a long time ago and have a timeout image
+                {
+                    _tempFrame.CopyFrame(_timeoutImage); // Copy the timeoutImage if the renderer modifies the render buffer.
+                    _ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
+                }
+            }
+
+            // No frame
+            _threadCritsect.Leave();
+            return true;
+        }
+
+        // Send frame for rendering
+        if (_ptrExternalCallback)
+        {
+            WEBRTC_TRACE(kTraceStream,
+                         kTraceVideoRenderer,
+                         _moduleId,
+                         "%s: executing external renderer callback to deliver frame",
+                         __FUNCTION__, ptrFrameToRender->RenderTimeMs());
+            _ptrExternalCallback->RenderFrame(_streamId, *ptrFrameToRender);
+        }
+        else
+        {
+            if (_ptrRenderCallback)
+            {
+                WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
+                             "%s: Render frame, time: ", __FUNCTION__,
+                             ptrFrameToRender->RenderTimeMs());
+                _ptrRenderCallback->RenderFrame(_streamId, *ptrFrameToRender);
+            }
+        }
+
+        // Release critsect before calling the module user
+        _threadCritsect.Leave();
+
+        // We're done with this frame, delete it.
+        if (ptrFrameToRender)
+        {
+            CriticalSectionScoped cs(_bufferCritsect);
+            _lastRenderedFrame.SwapFrame(*ptrFrameToRender);
+            _renderBuffers.ReturnFrame(ptrFrameToRender);
+        }
+    }
+    return true;
+}
+WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(VideoFrame& videoFrame) const
+{
+    CriticalSectionScoped cs(_bufferCritsect);
+    return videoFrame.CopyFrame(_lastRenderedFrame);
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/incoming_video_stream.h b/trunk/src/modules/video_render/main/source/incoming_video_stream.h
new file mode 100644
index 0000000..cd2785d
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/incoming_video_stream.h
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
+
+#include "video_render.h"
+#include "map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class VideoRenderCallback;
+class VideoRenderFrames;
+
+struct VideoMirroring
+{
+    bool mirrorXAxis;
+    bool mirrorYAxis;
+    VideoMirroring() :
+        mirrorXAxis(false), mirrorYAxis(false)
+    {
+    }
+};
+
+// Class definitions
+class IncomingVideoStream: public VideoRenderCallback
+{
+public:
+    /*
+     *   VideoRenderer constructor/destructor
+     */
+    IncomingVideoStream(const WebRtc_Word32 moduleId,
+                        const WebRtc_UWord32 streamId);
+    ~IncomingVideoStream();
+
+    WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id);
+
+    // Get callbck to deliver frames to the module
+    VideoRenderCallback* ModuleCallback();
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    // Set callback to the platform dependant code
+    WebRtc_Word32 SetRenderCallback(VideoRenderCallback* renderCallback);
+
+    // Callback for file recording, snapshot, ...
+    WebRtc_Word32 SetExternalCallback(VideoRenderCallback* renderObject);
+
+    /*
+     *   Start/Stop
+     */
+    WebRtc_Word32 Start();
+    WebRtc_Word32 Stop();
+
+    // Clear all buffers
+    WebRtc_Word32 Reset();
+
+    /*
+     *   Properties
+     */
+    WebRtc_UWord32 StreamId() const;
+    WebRtc_UWord32 IncomingRate() const;
+
+    /*
+     *
+     */
+    WebRtc_Word32 GetLastRenderedFrame(VideoFrame& videoFrame) const;
+
+    WebRtc_Word32 SetStartImage(const VideoFrame& videoFrame);
+
+    WebRtc_Word32 SetTimeoutImage(const VideoFrame& videoFrame,
+                                  const WebRtc_UWord32 timeout);
+
+    WebRtc_Word32 EnableMirroring(const bool enable,
+                                  const bool mirrorXAxis,
+                                  const bool mirrorYAxis);
+
+protected:
+    static bool IncomingVideoStreamThreadFun(void* obj);
+    bool IncomingVideoStreamProcess();
+
+private:
+
+    // Enums
+    enum
+    {
+        KEventStartupTimeMS = 10
+    };
+    enum
+    {
+        KEventMaxWaitTimeMs = 100
+    };
+    enum
+    {
+        KFrameRatePeriodMs = 1000
+    };
+
+    WebRtc_Word32 _moduleId;
+    WebRtc_UWord32 _streamId;
+    CriticalSectionWrapper& _streamCritsect; // Critsects in allowed to enter order
+    CriticalSectionWrapper& _threadCritsect;
+    CriticalSectionWrapper& _bufferCritsect;
+    ThreadWrapper* _ptrIncomingRenderThread;
+    EventWrapper& _deliverBufferEvent;
+    bool _running;
+
+    VideoRenderCallback* _ptrExternalCallback;
+    VideoRenderCallback* _ptrRenderCallback;
+    VideoRenderFrames& _renderBuffers;
+
+    RawVideoType _callbackVideoType;
+    WebRtc_UWord32 _callbackWidth;
+    WebRtc_UWord32 _callbackHeight;
+
+    WebRtc_UWord32 _incomingRate;
+    WebRtc_Word64 _lastRateCalculationTimeMs;
+    WebRtc_UWord16 _numFramesSinceLastCalculation;
+    VideoFrame _lastRenderedFrame;
+    VideoFrame _tempFrame;
+    VideoFrame _startImage;
+    VideoFrame _timeoutImage;
+    WebRtc_UWord32 _timeoutTime;
+
+    bool _mirrorFramesEnabled;
+    VideoMirroring _mirroring;
+    VideoFrame _transformedVideoFrame;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
diff --git a/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.cc b/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.cc
new file mode 100644
index 0000000..a5e311a
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.cc
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_linux_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "video_x11_render.h"
+
+#include <X11/Xlib.h>
+
+namespace webrtc {
+
+VideoRenderLinuxImpl::VideoRenderLinuxImpl(
+                                           const WebRtc_Word32 id,
+                                           const VideoRenderType videoRenderType,
+                                           void* window, const bool fullscreen) :
+            _id(id),
+            _renderLinuxCritsect(
+                                 *CriticalSectionWrapper::CreateCriticalSection()),
+            _ptrWindow(window), _fullscreen(fullscreen), _ptrX11Render(NULL),
+            _renderType(videoRenderType)
+{
+}
+
+VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
+{
+    if (_ptrX11Render)
+        delete _ptrX11Render;
+
+    delete &_renderLinuxCritsect;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+    _ptrX11Render = new VideoX11Render((Window) _ptrWindow);
+    if (!_ptrX11Render)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s",
+                     "Failed to create instance of VideoX11Render object");
+        return -1;
+    }
+    int retVal = _ptrX11Render->Init();
+    if (retVal == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+    _ptrWindow = window;
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->ChangeWindow((Window) window);
+    }
+
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
+                                                                       const WebRtc_UWord32 streamId,
+                                                                       const WebRtc_UWord32 zOrder,
+                                                                       const float left,
+                                                                       const float top,
+                                                                       const float right,
+                                                                       const float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+
+    VideoRenderCallback* renderCallback = NULL;
+    if (_ptrX11Render)
+    {
+        VideoX11Channel* renderChannel =
+                _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
+                                                      top, right, bottom);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "Render channel creation failed for stream id: %d",
+                         streamId);
+            return NULL;
+        }
+        renderCallback = (VideoRenderCallback *) renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "_ptrX11Render is NULL");
+        return NULL;
+    }
+    return renderCallback;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::DeleteIncomingRenderStream(
+                                                               const WebRtc_UWord32 streamId)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->DeleteX11RenderChannel(streamId);
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
+                                                                      const WebRtc_UWord32 streamId,
+                                                                      WebRtc_UWord32& zOrder,
+                                                                      float& left,
+                                                                      float& top,
+                                                                      float& right,
+                                                                      float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
+                                                          left, top, right,
+                                                          bottom);
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::StartRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::StopRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+VideoRenderType VideoRenderLinuxImpl::RenderType()
+{
+    return kRenderX11;
+}
+
+RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderLinuxImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetGraphicsMemory(
+                                                      WebRtc_UWord64& /*totalGraphicsMemory*/,
+                                                      WebRtc_UWord64& /*availableGraphicsMemory*/) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetScreenResolution(
+                                                        WebRtc_UWord32& /*screenWidth*/,
+                                                        WebRtc_UWord32& /*screenHeight*/) const
+{
+    return -1;
+}
+
+WebRtc_UWord32 VideoRenderLinuxImpl::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
+{
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetStreamCropping(
+                                                      const WebRtc_UWord32 /*streamId*/,
+                                                      const float /*left*/,
+                                                      const float /*top*/,
+                                                      const float /*right*/,
+                                                      const float /*bottom*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ConfigureRenderer(
+                                                      const WebRtc_UWord32 streamId,
+                                                      const unsigned int zOrder,
+                                                      const float left,
+                                                      const float top,
+                                                      const float right,
+                                                      const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetText(
+                                            const WebRtc_UWord8 textId,
+                                            const WebRtc_UWord8* text,
+                                            const WebRtc_Word32 textLength,
+                                            const WebRtc_UWord32 textColorRef,
+                                            const WebRtc_UWord32 backgroundColorRef,
+                                            const float left, const float top,
+                                            const float rigth,
+                                            const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
+                                              const WebRtc_UWord8 pictureId,
+                                              const void* colorKey,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.h b/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.h
new file mode 100644
index 0000000..10460ec
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_render_linux_impl.h
@@ -0,0 +1,136 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Render;
+
+// Class definitions
+class VideoRenderLinuxImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderLinuxImpl(const WebRtc_Word32 id,
+                         const VideoRenderType videoRenderType,
+                         void* window, const bool fullscreen);
+
+    virtual ~VideoRenderLinuxImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float rigth, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderLinuxCritsect;
+
+    void* _ptrWindow;
+    bool _fullscreen;
+
+    // X11 Render 
+    VideoX11Render* _ptrX11Render;
+
+    VideoRenderType _renderType;
+
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
diff --git a/trunk/src/modules/video_render/main/source/linux/video_x11_channel.cc b/trunk/src/modules/video_render/main/source/linux/video_x11_channel.cc
new file mode 100644
index 0000000..5f8bfa1
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_x11_channel.cc
@@ -0,0 +1,332 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_x11_channel.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+
+#define DISP_MAX 128
+
+static Display *dispArray[DISP_MAX];
+static int dispCount = 0;
+
+
+VideoX11Channel::VideoX11Channel(WebRtc_Word32 id) :
+    _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
+          _shminfo(), _image(NULL), _window(0L), _gc(NULL),
+          _width(DEFAULT_RENDER_FRAME_WIDTH),
+          _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
+          _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
+          _top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
+          _Id(id)
+{
+}
+
+VideoX11Channel::~VideoX11Channel()
+{
+    if (_prepared)
+    {
+        _crit.Enter();
+        ReleaseWindow();
+        _crit.Leave();
+    }
+    delete &_crit;
+}
+
+WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
+                                               VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(_crit);
+    if (_width != (WebRtc_Word32) videoFrame.Width() || _height
+            != (WebRtc_Word32) videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
+                                                   WebRtc_Word32 height,
+                                                   WebRtc_Word32 /*numberOfStreams */)
+{
+    CriticalSectionScoped cs(_crit);
+    if (_prepared)
+    {
+        RemoveRenderer();
+    }
+    if (CreateLocalRenderer(width, height) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
+                                                WebRtc_Word32 bufferSize,
+                                                unsigned WebRtc_Word32 /*timeStamp90kHz*/)
+{
+    CriticalSectionScoped cs(_crit);
+    if (!_prepared)
+    {
+        return 0;
+    }
+
+    if (!dispArray[_dispCount])
+    {
+        return -1;
+    }
+
+    unsigned char *pBuf = buffer;
+    // convert to RGB32, setting stride = width.
+    ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
+
+    // put image in window
+    XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
+                 _height, True);
+
+    // very important for the image to update properly!
+    XSync(_display, False);
+    return 0;
+
+}
+
+WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
+                                                WebRtc_Word32& height)
+{
+    width = _width;
+    height = _height;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::Init(Window window, float left, float top,
+                                        float right, float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_crit);
+
+    _window = window;
+    _left = left;
+    _right = right;
+    _top = top;
+    _bottom = bottom;
+
+    _display = XOpenDisplay(NULL); // Use default display
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (dispCount < DISP_MAX)
+    {
+        dispArray[dispCount] = _display;
+        _dispCount = dispCount;
+        dispCount++;
+    }
+    else
+    {
+        return -1;
+    }
+
+    if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
+            < 0) || (1 < bottom || bottom < 0))
+    {
+        return -1;
+    }
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == 0)
+    {
+        return -1;
+    }
+
+    _xPos = (WebRtc_Word32) (winWidth * left);
+    _yPos = (WebRtc_Word32) (winHeight * top);
+    _outWidth = (WebRtc_Word32) (winWidth * (right - left));
+    _outHeight = (WebRtc_Word32) (winHeight * (bottom - top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    _gc = XCreateGC(_display, _window, 0, 0);
+    if (!_gc) {
+      // Failed to create the graphics context.
+      assert(false);
+      return -1;
+    }
+
+    if (CreateLocalRenderer(winWidth, winHeight) == -1)
+    {
+        return -1;
+    }
+    return 0;
+
+}
+
+WebRtc_Word32 VideoX11Channel::ChangeWindow(Window window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_crit);
+
+    // Stop the rendering, if we are rendering...
+    RemoveRenderer();
+    _window = window;
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == -1)
+    {
+        return -1;
+    }
+    _xPos = (int) (winWidth * _left);
+    _yPos = (int) (winHeight * _top);
+    _outWidth = (int) (winWidth * (_right - _left));
+    _outHeight = (int) (winHeight * (_bottom - _top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    // Prepare rendering using the
+    if (CreateLocalRenderer(_width, _height) == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::ReleaseWindow()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_crit);
+
+    RemoveRenderer();
+    if (_gc) {
+      XFreeGC(_display, _gc);
+      _gc = NULL;
+    }
+    if (_display)
+    {
+        XCloseDisplay(_display);
+        _display = NULL;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::CreateLocalRenderer(WebRtc_Word32 width,
+                                                       WebRtc_Word32 height)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(_crit);
+
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (_prepared)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
+                     "Renderer already prepared, exits.");
+        return -1;
+    }
+
+    _width = width;
+    _height = height;
+
+    // create shared memory image
+    _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
+                             &_shminfo, _width, _height); // this parameter needs to be the same for some reason.
+    _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
+            * _image->height), IPC_CREAT | 0777);
+    _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
+    if (_image->data == reinterpret_cast<char*>(-1))
+    {
+        return -1;
+    }
+    _buffer = (unsigned char*) _image->data;
+    _shminfo.readOnly = False;
+
+    // attach image to display
+    if (!XShmAttach(_display, &_shminfo))
+    {
+        //printf("XShmAttach failed !\n");
+        return -1;
+    }
+    XSync(_display, False);
+
+    _prepared = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::RemoveRenderer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    if (!_prepared)
+    {
+        return 0;
+    }
+    _prepared = false;
+
+    // Free the memory.
+    XShmDetach(_display, &_shminfo);
+    XDestroyImage( _image );
+    _image = NULL;
+    shmdt(_shminfo.shmaddr);
+    _shminfo.shmaddr = NULL;
+    _buffer = NULL;
+    shmctl(_shminfo.shmid, IPC_RMID, 0);
+    _shminfo.shmid = 0;
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::GetStreamProperties(WebRtc_UWord32& zOrder,
+                                                       float& left, float& top,
+                                                       float& right,
+                                                       float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    zOrder = 0; // no z-order support yet
+    left = _left;
+    top = _top;
+    right = _right;
+    bottom = _bottom;
+
+    return 0;
+}
+
+
+} //namespace webrtc
+
+
diff --git a/trunk/src/modules/video_render/main/source/linux/video_x11_channel.h b/trunk/src/modules/video_render/main/source/linux/video_x11_channel.h
new file mode 100644
index 0000000..1fb2dab
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_x11_channel.h
@@ -0,0 +1,97 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+
+#include "video_render_defines.h"
+#include "common_video/libyuv/include/libyuv.h"
+#include <sys/shm.h>
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/extensions/XShm.h>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define DEFAULT_RENDER_FRAME_WIDTH 352
+#define DEFAULT_RENDER_FRAME_HEIGHT 288
+
+
+class VideoX11Channel: public VideoRenderCallback
+{
+public:
+    VideoX11Channel(WebRtc_Word32 id);
+
+    virtual ~VideoX11Channel();
+
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
+                                  WebRtc_Word32 numberOfStreams);
+    WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
+                               unsigned WebRtc_Word32 /*timeStamp90kHz*/);
+    WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
+    WebRtc_Word32 Init(Window window, float left, float top, float right,
+                       float bottom);
+    WebRtc_Word32 ChangeWindow(Window window);
+    WebRtc_Word32
+            GetStreamProperties(WebRtc_UWord32& zOrder, float& left,
+                                float& top, float& right, float& bottom) const;
+    WebRtc_Word32 ReleaseWindow();
+
+    bool IsPrepared()
+    {
+        return _prepared;
+    }
+
+private:
+
+    WebRtc_Word32
+            CreateLocalRenderer(WebRtc_Word32 width, WebRtc_Word32 height);
+    WebRtc_Word32 RemoveRenderer();
+
+    //FIXME a better place for this method? the GetWidthHeight no longer
+    // supported by common_video.
+    int GetWidthHeight(VideoType type, int bufferSize, int& width,
+                       int& height);
+
+    CriticalSectionWrapper& _crit;
+
+    Display* _display;
+    XShmSegmentInfo _shminfo;
+    XImage* _image;
+    Window _window;
+    GC _gc;
+    WebRtc_Word32 _width; // incoming frame width
+    WebRtc_Word32 _height; // incoming frame height
+    WebRtc_Word32 _outWidth; // render frame width
+    WebRtc_Word32 _outHeight; // render frame height
+    WebRtc_Word32 _xPos; // position within window
+    WebRtc_Word32 _yPos;
+    bool _prepared; // true if ready to use
+    WebRtc_Word32 _dispCount;
+
+    unsigned char* _buffer;
+    float _top;
+    float _left;
+    float _right;
+    float _bottom;
+
+    WebRtc_Word32 _Id;
+
+};
+
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
diff --git a/trunk/src/modules/video_render/main/source/linux/video_x11_render.cc b/trunk/src/modules/video_render/main/source/linux/video_x11_render.cc
new file mode 100644
index 0000000..a3543e3
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_x11_render.cc
@@ -0,0 +1,154 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_x11_render.h"
+#include "video_x11_channel.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VideoX11Render::VideoX11Render(Window window) :
+    _window(window),
+            _critSect(*CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+VideoX11Render::~VideoX11Render()
+{
+    delete &_critSect;
+}
+
+WebRtc_Word32 VideoX11Render::Init()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _streamIdToX11ChannelMap.clear();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Render::ChangeWindow(Window window)
+{
+    CriticalSectionScoped cs(_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.begin();
+
+    while (iter != _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ChangeWindow(window);
+        }
+        iter++;
+    }
+
+    _window = window;
+
+    return 0;
+}
+
+VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
+                                                                WebRtc_Word32 streamId,
+                                                                WebRtc_Word32 zOrder,
+                                                                const float left,
+                                                                const float top,
+                                                                const float right,
+                                                                const float bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+
+    if (iter == _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = new VideoX11Channel(streamId);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "Failed to create VideoX11Channel for streamId : %d",
+                         streamId);
+            return NULL;
+        }
+        renderChannel->Init(_window, left, top, right, bottom);
+        _streamIdToX11ChannelMap[streamId] = renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                     "Render Channel already exists for streamId: %d", streamId);
+        renderChannel = iter->second;
+    }
+
+    return renderChannel;
+}
+
+WebRtc_Word32 VideoX11Render::DeleteX11RenderChannel(WebRtc_Word32 streamId)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ReleaseWindow();
+            delete renderChannel;
+            renderChannel = NULL;
+        }
+        _streamIdToX11ChannelMap.erase(iter);
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+WebRtc_Word32 VideoX11Render::GetIncomingStreamProperties(
+                                                              WebRtc_Word32 streamId,
+                                                              WebRtc_UWord32& zOrder,
+                                                              float& left,
+                                                              float& top,
+                                                              float& right,
+                                                              float& bottom)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
+        }
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/linux/video_x11_render.h b/trunk/src/modules/video_render/main/source/linux/video_x11_render.h
new file mode 100644
index 0000000..9b140ef
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/linux/video_x11_render.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+
+#include "video_render_defines.h"
+
+#include <X11/Xlib.h>
+#include <map>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Channel;
+
+class VideoX11Render
+{
+
+public:
+    VideoX11Render(Window window);
+    ~VideoX11Render();
+
+    WebRtc_Word32 Init();
+    WebRtc_Word32 ChangeWindow(Window window);
+
+    VideoX11Channel* CreateX11RenderChannel(WebRtc_Word32 streamId,
+                                                WebRtc_Word32 zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom);
+
+    WebRtc_Word32 DeleteX11RenderChannel(WebRtc_Word32 streamId);
+
+    WebRtc_Word32 GetIncomingStreamProperties(WebRtc_Word32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom);
+
+private:
+    Window _window;
+    CriticalSectionWrapper& _critSect;
+    std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
+
+};
+
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
diff --git a/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h b/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h
new file mode 100644
index 0000000..c8e98bb
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_full_screen_window.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+
+#import <Cocoa/Cocoa.h>
+//#define GRAB_ALL_SCREENS 1
+
+@interface CocoaFullScreenWindow : NSObject {
+	NSWindow*			_window;
+}
+
+-(id)init;
+-(void)grabFullScreen;
+-(void)releaseFullScreen;
+-(NSWindow*)window;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
diff --git a/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm b/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm
new file mode 100644
index 0000000..e86bab1
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "cocoa_full_screen_window.h"
+#include "trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaFullScreenWindow
+
+-(id)init{	
+	
+	self = [super init];
+	if(!self){
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__); 
+		return nil;
+	}
+	
+	
+	WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__); 
+	return self;
+}
+
+-(void)grabFullScreen{
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGCaptureAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__); 
+	}
+	
+	// get the shielding window level
+	int windowLevel = CGShieldingWindowLevel();
+	
+	// get the screen rect of main display
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+	
+	_window = [[NSWindow alloc]initWithContentRect:screenRect 
+										   styleMask:NSBorderlessWindowMask
+											 backing:NSBackingStoreBuffered
+											   defer:NO
+											  screen:[NSScreen mainScreen]];
+	
+	[_window setLevel:windowLevel];
+	[_window setBackgroundColor:[NSColor blackColor]];
+	[_window makeKeyAndOrderFront:nil];
+
+}
+ 
+-(void)releaseFullScreen
+{
+	[_window orderOut:self];
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGReleaseAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__); 
+	}		
+}
+
+- (NSWindow*)window
+{
+  return _window;
+}
+
+- (void) dealloc
+{
+	[self releaseFullScreen];
+	[super dealloc];
+}	
+
+
+	
+@end
diff --git a/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.h b/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.h
new file mode 100644
index 0000000..15a8108
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_render_view.h
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/gl.h>
+#import <OpenGL/glu.h>
+#import <OpenGL/OpenGL.h>
+
+@interface CocoaRenderView : NSOpenGLView {
+  NSOpenGLContext* _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
+-(NSOpenGLContext*)nsOpenGLContext;
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
diff --git a/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.mm b/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.mm
new file mode 100644
index 0000000..567d171
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/cocoa_render_view.mm
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+#import "cocoa_render_view.h"
+#include "trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaRenderView
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
+	
+	self = [super initWithFrame:[self frame] pixelFormat:[fmt autorelease]];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+-(NSOpenGLContext*)nsOpenGLContext {
+    return _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
+	
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+//	[_windowRef setFrame:screenRect];
+//	[_windowRef setBounds:screenRect];
+	self = [super initWithFrame:screenRect	pixelFormat:[fmt autorelease]];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+@end
+
+
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_agl.cc b/trunk/src/modules/video_render/main/source/mac/video_render_agl.cc
new file mode 100644
index 0000000..e968865
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_agl.cc
@@ -0,0 +1,2007 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#include "video_render_agl.h"
+
+//  includes
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+namespace webrtc {
+
+/*
+ *
+ *    VideoChannelAGL
+ *
+ */
+
+#pragma mark VideoChannelAGL constructor
+
+VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
+    _aglContext( aglContext),
+    _id( iId),
+    _owner( owner),
+    _width( 0),
+    _height( 0),
+    _stretchedWidth( 0),
+    _stretchedHeight( 0),
+    _startWidth( 0.0f),
+    _startHeight( 0.0f),
+    _stopWidth( 0.0f),
+    _stopHeight( 0.0f),
+    _xOldWidth( 0),
+    _yOldHeight( 0),
+    _oldStretchedHeight(0),
+    _oldStretchedWidth( 0),
+    _buffer( 0),
+    _bufferSize( 0),
+    _incommingBufferSize(0),
+    _bufferIsUpdated( false),
+    _sizeInitialized( false),
+    _numberOfStreams( 0),
+    _bVideoSizeStartedChanging(false),
+    _pixelFormat( GL_RGBA),
+    _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+    _texture( 0)
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
+}
+
+VideoChannelAGL::~VideoChannelAGL()
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    aglSetCurrentContext(_aglContext);
+
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame)
+{
+    _owner->LockAGLCntx();
+    if(_width != videoFrame.Width() ||
+            _height != videoFrame.Height())
+    {
+        if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        { //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSizeChange returned an error", __FUNCTION__, __LINE__);
+            _owner->UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    _owner->UnlockAGLCntx();
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
+}
+
+int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
+{
+    _owner->LockAGLCntx();
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We'll get a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Delete a possible old texture
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+    }
+
+    // Do the setup for both textures
+    // Note: we setup two textures even if we're not running full screen
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    // Maximum width/height for a texture
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        // Image too big for memory
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_BGRA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+// Called from video engine when a new frame should be rendered.
+int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
+{
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    if (bufferSize != _incommingBufferSize)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Setting stride = width.
+    int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
+                                 _buffer);
+    if (rgbret < 0)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    aglSetCurrentContext(_aglContext);
+
+    // Put the new frame into the graphic card texture.
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
+    GLenum glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Copy buffer to texture
+    glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+            0, // Level, not use
+            0, // start point x, (low left of pic)
+            0, // start point y,
+            _width, // width
+            _height, // height
+            _pixelFormat, // pictue format for _buffer
+            _pixelDataType, // data type of _buffer
+            (const GLvoid*) _buffer); // the pixel data
+
+    if (glGetError() != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+    _owner->UnlockAGLCntx();
+
+    return 0;
+}
+
+int VideoChannelAGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    aglSetCurrentContext(_aglContext);
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
+    {
+        glViewport(0, 0, _stretchedWidth, _stretchedHeight);
+    }
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    // Now really put the texture into the framebuffer
+    glLoadIdentity();
+
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+    isUpdated = _bufferIsUpdated;
+    _owner->UnlockAGLCntx();
+
+    return 0;
+}
+
+int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+
+    return retVal;
+}
+
+int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+#pragma mark VideoRenderAGL WindowRef constructor
+
+VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( 0),
+_windowRef( windowRef),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+_threadID( )
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if(!IsValidWindowPtr(_windowRef))
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
+    }
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+}
+
+// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark WindowRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
+        EventRef theEvent,
+        void* userData)
+{
+    WindowRef windowRef = NULL;
+
+    int eventType = GetEventKind(theEvent);
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeWindowRef,
+            NULL,
+            sizeof (WindowRef),
+            NULL,
+            &windowRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+
+    bool updateUI = true;
+    if(kEventWindowBoundsChanged == eventType)
+    {
+    }
+    else if(kEventWindowBoundsChanging == eventType)
+    {
+    }
+    else if(kEventWindowZoomed == eventType)
+    {
+    }
+    else if(kEventWindowExpanding == eventType)
+    {
+    }
+    else if(kEventWindowExpanded == eventType)
+    {
+    }
+    else if(kEventWindowClickResizeRgn == eventType)
+    {
+    }
+    else if(kEventWindowClickDragRgn == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(windowRef);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return noErr;
+}
+
+#pragma mark VideoRenderAGL HIViewRef constructor
+
+VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( windowRef),
+_windowRef( 0),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+_threadID( )
+{
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
+    //    _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
+    // The event handler looks for window resize events and adjusts the offset of the controls.
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
+
+
+    static const EventTypeSpec windowEventTypes[] =
+    {
+        kEventClassWindow, kEventWindowBoundsChanged,
+        kEventClassWindow, kEventWindowBoundsChanging,
+        kEventClassWindow, kEventWindowZoomed,
+        kEventClassWindow, kEventWindowExpanded,
+        kEventClassWindow, kEventWindowClickResizeRgn,
+        kEventClassWindow, kEventWindowClickDragRgn
+    };
+
+    WindowRef parentWindow = HIViewGetWindow(windowRef);
+
+    InstallWindowEventHandler (parentWindow,
+            NewEventHandlerUPP (sHandleWindowResized),
+            GetEventTypeCount(windowEventTypes),
+            windowEventTypes,
+            (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
+            &_windowEventHandlerRef);
+
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER	
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
+
+    static const EventTypeSpec hiviewEventTypes[] =
+    {
+        kEventClassControl, kEventControlBoundsChanged,
+        kEventClassControl, kEventControlDraw
+        //			kEventControlDragLeave
+        //			kEventControlDragReceive
+        //			kEventControlGetFocusPart
+        //			kEventControlApplyBackground
+        //			kEventControlDraw
+        //			kEventControlHit
+
+    };
+
+    HIViewInstallEventHandler(_hiviewRef,
+            NewEventHandlerUPP(sHandleHiViewResized),
+            GetEventTypeCount(hiviewEventTypes),
+            hiviewEventTypes,
+            (void *) this,
+            &_hiviewEventHandlerRef);
+
+#endif
+}
+
+// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark HIViewRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
+{
+    //static int      callbackCounter = 1;
+    HIViewRef hiviewRef = NULL;
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    int eventType = GetEventKind(theEvent);
+    OSStatus status = noErr;
+    status = GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeControlRef,
+            NULL,
+            sizeof (ControlRef),
+            NULL,
+            &hiviewRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+    WindowRef parentWindow = HIViewGetWindow(hiviewRef);
+    bool updateUI = true;
+
+    if(kEventControlBoundsChanged == eventType)
+    {
+    }
+    else if(kEventControlDraw == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(parentWindow);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return status;
+}
+
+VideoRenderAGL::~VideoRenderAGL()
+{
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
+
+
+#ifdef USE_EVENT_HANDLERS
+    // remove event handlers
+    OSStatus status;
+    if(_isHIViewRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+    }
+    else
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+    }
+    if(noErr != status)
+    {
+        if(_isHIViewRef)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+        else
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+
+#endif
+
+    OSStatus status;
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if(_windowEventHandlerRef)
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER	
+    if(_hiviewEventHandlerRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+    }
+#endif
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_aglContext != 0)
+    {
+        aglSetCurrentContext(_aglContext);
+        aglDestroyContext(_aglContext);
+        _aglContext = 0;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+    while (it!= _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+    //delete _renderCritSec;
+
+
+}
+
+int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
+{
+    aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
+    return 0;
+}
+
+int VideoRenderAGL::Init()
+{
+    LockAGLCntx();
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    unsigned int threadId;
+    _screenUpdateThread->Start(threadId);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    // Create mixing textures
+    if (CreateMixingContext() == -1)
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
+
+    if (HasChannel(channel))
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();k
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+        // There are already one channel using this zOrder
+        // TODO: Allow multiple channels with same zOrder
+    }
+
+    VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
+
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+        //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return NULL;
+    }
+k
+    _aglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    UnlockAGLCntx();
+    return newAGLChannel;
+}
+
+int VideoRenderAGL::DeleteAllAGLChannels()
+{
+    CriticalSectionScoped cs(_renderCritSec);
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
+    //int i = 0 ;
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        if (channel)
+        delete channel;
+
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+    return 0;
+}
+
+int VideoRenderAGL::DeleteAGLChannel(int channel)
+{
+    CriticalSectionScoped cs(_renderCritSec);
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;// = _zOrderToChannel.begin();
+    }
+
+    return 0;
+}
+
+int VideoRenderAGL::StopThread()
+{
+    CriticalSectionScoped cs(_renderCritSec);
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    //_screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderAGL::IsFullScreen()
+{
+    CriticalSectionScoped cs(_renderCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderAGL::HasChannels()
+{
+
+    CriticalSectionScoped cs(_renderCritSec);
+
+    if (_aglChannels.begin() != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+bool VideoRenderAGL::HasChannel(int channel)
+{
+    CriticalSectionScoped cs(_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+int VideoRenderAGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(_renderCritSec);
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+
+    if (it != _aglChannels.end())
+    {
+        VideoChannelAGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            return NULL;
+        }
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderAGL::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return false;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == GL_FALSE)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        // We have a new window size, update the context.
+        if (aglUpdateContext(_aglContext) == GL_FALSE)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+
+    // this section will poll to see if the window size has changed
+    // this is causing problem w/invalid windowRef
+    // this code has been modified and exists now in the window event handler
+#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if (_isHIViewRef)
+    {
+
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        WindowRef window = HIViewGetWindow(_hiviewRef);
+
+        if(FALSE == IsValidWindowPtr(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        if (window == NULL)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        if(FALSE == MacIsWindowVisible(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        HIRect viewBounds; // Placement and size for HIView
+        int windowWidth = 0; // Parent window width
+        int windowHeight = 0; // Parent window height
+
+        // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
+        // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
+        Rect contentBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+        Rect globalBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+        globalBounds.top = contentBounds.top;
+        globalBounds.right = contentBounds.right;
+        globalBounds.bottom = contentBounds.bottom;
+        globalBounds.left = contentBounds.left;
+
+        windowHeight = globalBounds.bottom - globalBounds.top;
+        windowWidth = globalBounds.right - globalBounds.left;
+
+        // Get the size of the HIViewRef
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        // Check if this is the first call..
+        if (_lastWindowHeight == -1 &&
+                _lastWindowWidth == -1)
+        {
+            _lastWindowWidth = windowWidth;
+            _lastWindowHeight = windowHeight;
+
+            _lastViewBounds.origin.x = viewBounds.origin.x;
+            _lastViewBounds.origin.y = viewBounds.origin.y;
+            _lastViewBounds.size.width = viewBounds.size.width;
+            _lastViewBounds.size.height = viewBounds.size.height;
+        }
+        sfasdfasdf
+
+        bool resized = false;
+
+        // Check if parent window size has changed
+        if (windowHeight != _lastWindowHeight ||
+                windowWidth != _lastWindowWidth)
+        {
+            resized = true;
+        }
+
+        // Check if the HIView has new size or is moved in the parent window
+        if (_lastViewBounds.origin.x != viewBounds.origin.x ||
+                _lastViewBounds.origin.y != viewBounds.origin.y ||
+                _lastViewBounds.size.width != viewBounds.size.width ||
+                _lastViewBounds.size.height != viewBounds.size.height)
+        {
+            // The HiView is resized or has moved.
+            resized = true;
+        }
+
+        if (resized)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
+
+            // Calculate offset between the windows
+            // {x, y, widht, height}, x,y = lower left corner
+            const GLint offs[4] =
+            {   (int)(0.5f + viewBounds.origin.x),
+                (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+                viewBounds.size.width, viewBounds.size.height};
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
+            contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
+
+            aglSetDrawable (_aglContext, GetWindowPort(window));
+            aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+            aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+            // We need to change the viewport too if the HIView size has changed
+            glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+        }
+        _lastWindowWidth = windowWidth;
+        _lastWindowHeight = windowHeight;
+
+        _lastViewBounds.origin.x = viewBounds.origin.x;
+        _lastViewBounds.origin.y = viewBounds.origin.y;
+        _lastViewBounds.size.width = viewBounds.size.width;
+        _lastViewBounds.size.height = viewBounds.size.height;
+
+    }
+#endif
+    if (_fullScreen)
+    {
+        // TODO
+        // We use double buffers, must always update
+        //RenderOffScreenBuffersToBackBuffer();
+    }
+    else
+    {
+        // Check if there are any updated buffers
+        bool updated = false;
+
+        // TODO: check if window size is updated!
+        // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+        while (it != _aglChannels.end())
+        {
+
+            VideoChannelAGL* aglChannel = it->second;
+            aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+            aglChannel->IsUpdated(updated);
+            if (updated)
+            {
+                break;
+            }
+            it++;
+        }
+
+        if (updated)
+        {
+            // At least on buffers is updated, we need to repaint the texture
+            if (RenderOffScreenBuffers() != -1)
+            {
+                // MF
+                //SwapAndDisplayBuffers();
+            }
+            else
+            {
+                // Error updating the mixing texture, don't swap.
+            }
+        }
+    }
+
+    UnlockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
+    return true;
+}
+
+void VideoRenderAGL::ParentWindowResized(WindowRef window)
+{
+    //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
+
+    LockAGLCntx();
+k
+    // set flag
+    _windowHasResized = false;
+
+    if(FALSE == HIViewIsValid(_hiviewRef))
+    {
+        //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == IsValidWindowPtr(window))
+    {
+        //WEBRTC_LOG(kTraceError, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if (window == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "windowRef = NULL");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == MacIsWindowVisible(window))
+    {
+        //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
+        UnlockAGLCntx();
+        return;
+    }
+
+    Rect contentBounds =
+    {   0, 0, 0, 0};
+
+#if		defined(USE_CONTENT_RGN)
+    GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+    GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+    //WEBRTC_LOG(kTraceDebug, "%s contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+
+    // update global vars
+    _currentParentWindowBounds.top = contentBounds.top;
+    _currentParentWindowBounds.left = contentBounds.left;
+    _currentParentWindowBounds.bottom = contentBounds.bottom;
+    _currentParentWindowBounds.right = contentBounds.right;
+
+    _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
+    _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
+
+    _windowHasResized = true;
+
+    // ********* update AGL offsets
+    HIRect viewBounds;
+    HIViewGetBounds(_hiviewRef, &viewBounds);
+    HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+    const GLint offs[4] =
+    {   (int)(0.5f + viewBounds.origin.x),
+        (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+        viewBounds.size.width, viewBounds.size.height};
+    //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
+    //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+    aglSetCurrentContext(_aglContext);
+    aglSetDrawable (_aglContext, GetWindowPort(window));
+    aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+    aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+    // We need to change the viewport too if the HIView size has changed
+    glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+    UnlockAGLCntx();
+
+    return;
+}
+
+int VideoRenderAGL::CreateMixingContext()
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
+
+    // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure 
+    // a hardware renderer is used and not a software renderer.
+
+    GLint attributes[] =
+    {
+        AGL_DOUBLEBUFFER,
+        AGL_WINDOW,
+        AGL_RGBA,
+        AGL_NO_RECOVERY,
+        AGL_ACCELERATED,
+        AGL_RED_SIZE, 8,
+        AGL_GREEN_SIZE, 8,
+        AGL_BLUE_SIZE, 8,
+        AGL_ALPHA_SIZE, 8,
+        AGL_DEPTH_SIZE, 24,
+        AGL_NONE,
+    };
+
+    AGLPixelFormat aglPixelFormat;
+
+    // ***** Set up the OpenGL Context *****
+
+    // Get a pixel format for the attributes above
+    aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
+    if (NULL == aglPixelFormat)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not create pixel format");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Create an AGL context
+    _aglContext = aglCreateContext(aglPixelFormat, NULL);
+    if (_aglContext == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "Could no create AGL context");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Release the pixel format memory
+    aglDestroyPixelFormat(aglPixelFormat);
+
+    // Set the current AGL context for the rest of the settings
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (_isHIViewRef)
+    {
+        //---------------------------
+        // BEGIN: new test code
+#if 0
+        // Don't use this one!
+        // There seems to be an OS X bug that can't handle
+        // movements and resizing of the parent window
+        // and or the HIView
+        if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+#else
+
+        // Get the parent window for this control
+        WindowRef window = GetControlOwner(_hiviewRef);
+
+        Rect globalBounds =
+        {   0,0,0,0}; // The bounds for the parent window
+        HIRect viewBounds; // Placemnt in the parent window and size.
+        int windowHeight = 0;
+
+        //		Rect titleBounds = {0,0,0,0};
+        //		GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
+        //		_titleBarHeight = titleBounds.top - titleBounds.bottom;
+        //		if(0 == _titleBarHeight)
+        //		{
+        //            //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
+        //            //return -1;
+        //		}
+
+
+        // Get the bounds for the parent window
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &globalBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
+#endif
+        windowHeight = globalBounds.bottom - globalBounds.top;
+
+        // Get the bounds for the HIView
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        const GLint offs[4] =
+        {   (int)(0.5f + viewBounds.origin.x),
+            (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+            viewBounds.size.width, viewBounds.size.height};
+
+        //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+
+        aglSetDrawable (_aglContext, GetWindowPort(window));
+        aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+        aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+        GLint surfaceOrder = 1; // 1: above window, -1 below.
+        //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+        aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+
+        glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+#endif
+
+    }
+    else
+    {
+        if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    _windowWidth = _windowRect.right - _windowRect.left;
+    _windowHeight = _windowRect.bottom - _windowRect.top;
+
+    // opaque surface
+    int surfaceOpacity = 1;
+    if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // 1 -> sync to screen rat, slow...
+    //int swapInterval = 0;  // 0 don't sync with vertical trace
+    int swapInterval = 0; // 1 sync with vertical trace
+    if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Update the rect with the current size
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window size");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    GLenum glErr = glGetError();
+
+    if (glErr)
+    {
+    }
+
+    UpdateClipping();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window rect");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // HERE - onl if updated!
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+    rIt != _zOrderToChannel.rend();
+    rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
+
+        VideoChannelAGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    SwapAndDisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::SwapAndDisplayBuffers()
+{
+
+    LockAGLCntx();
+    if (_fullScreen)
+    {
+        // TODO:
+        // Swap front and back buffers, rendering taking care of in the same call
+        //aglSwapBuffers(_aglContext);
+        // Update buffer index to the idx for the next rendering!
+        //_textureIdx = (_textureIdx + 1) & 1;
+    }
+    else
+    {
+        // Single buffer rendering, only update context.
+        glFlush();
+        aglSwapBuffers(_aglContext);
+        HIViewSetNeedsDisplay(_hiviewRef, true);
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::GetWindowRect(Rect& rect)
+{
+
+    LockAGLCntx();
+
+    if (_isHIViewRef)
+    {
+        if (_hiviewRef)
+        {
+            HIRect HIViewRect1;
+            if(FALSE == HIViewIsValid(_hiviewRef))
+            {
+                rect.top = 0;
+                rect.left = 0;
+                rect.right = 0;
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
+                UnlockAGLCntx();
+            }
+            HIViewGetBounds(_hiviewRef,&HIViewRect1);
+            HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
+            if(HIViewRect1.origin.x < 0)
+            {
+                rect.top = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
+            }
+            else
+            {
+                rect.top = HIViewRect1.origin.x;
+            }
+
+            if(HIViewRect1.origin.y < 0)
+            {
+                rect.left = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
+            }
+            else
+            {
+                rect.left = HIViewRect1.origin.y;
+            }
+
+            if(HIViewRect1.size.width < 0)
+            {
+                rect.right = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
+            }
+            else
+            {
+                rect.right = HIViewRect1.size.width;
+            }
+
+            if(HIViewRect1.size.height < 0)
+            {
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
+            }
+            else
+            {
+                rect.bottom = HIViewRect1.size.height;
+            }
+
+            ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
+            UnlockAGLCntx();
+        }
+    }
+    else
+    {
+        if (_windowRef)
+        {
+            GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "No WindowRef");
+            UnlockAGLCntx();
+        }
+    }
+}
+
+int VideoRenderAGL::UpdateClipping()
+{
+    //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
+    LockAGLCntx();
+
+    if(_isHIViewRef)
+    {
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+            //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        RgnHandle visibleRgn = NewRgn();
+        SetEmptyRgn (visibleRgn);
+
+        if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
+        {
+        }
+
+        if(GL_FALSE == aglSetCurrentContext(_aglContext))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        DisposeRgn(visibleRgn);
+    }
+    else
+    {
+        //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
+    UnlockAGLCntx();
+    return true;
+}
+
+int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
+{
+
+    //	LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
+    OSStatus osStatus = 0;
+    OSErr osErr = 0;
+
+    RgnHandle tempRgn = NewRgn();
+    if (IsControlVisible(control))
+    {
+        RgnHandle childRgn = NewRgn();
+        WindowRef window = GetControlOwner(control);
+        ControlRef rootControl;
+        GetRootControl(window, &rootControl); // 'wvnc'
+        ControlRef masterControl;
+        osStatus = GetSuperControl(rootControl, &masterControl);
+        // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
+
+        if (masterControl != NULL)
+        {
+            CheckValidRegion(visibleRgn);
+            // init visibleRgn with region of 'wvnc'
+            osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
+            // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+            //GetSuperControl(rootControl, &rootControl);
+            ControlRef tempControl = control, lastControl = 0;
+            while (tempControl != masterControl) // current control != master
+
+            {
+                CheckValidRegion(tempRgn);
+
+                // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
+                ControlRef subControl;
+
+                osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
+                // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
+                // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                SectRgn(tempRgn, visibleRgn, visibleRgn);
+                CheckValidRegion(tempRgn);
+                CheckValidRegion(visibleRgn);
+                if (EmptyRgn(visibleRgn)) // if the region is empty, bail
+                break;
+
+                if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
+
+                {
+                    UInt16 numChildren;
+                    osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
+                    // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
+
+                    // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
+                    for (int i = 0; i < numChildren; i++)
+                    {
+                        osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
+                        if ( subControl == lastControl ) // break because of zorder
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
+                            break;
+                        }
+
+                        if (!IsControlVisible(subControl)) // dont' clip invisible controls
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
+                            continue;
+                        }
+
+                        if(!subControl) continue;
+
+                        osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
+                        CheckValidRegion(tempRgn);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!tempRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
+                        CheckValidRegion(tempRgn);
+                        // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!rootControl)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        UnionRgn(tempRgn, childRgn, childRgn);
+                        CheckValidRegion(tempRgn);
+                        CheckValidRegion(childRgn);
+                        CheckValidRegion(visibleRgn);
+                        if(!childRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                    } // next child control
+                }
+                lastControl = tempControl;
+                GetSuperControl(tempControl, &subControl);
+                tempControl = subControl;
+            }
+
+            DiffRgn(visibleRgn, childRgn, visibleRgn);
+            CheckValidRegion(visibleRgn);
+            CheckValidRegion(childRgn);
+            DisposeRgn(childRgn);
+        }
+        else
+        {
+            CopyRgn(tempRgn, visibleRgn);
+            CheckValidRegion(tempRgn);
+            CheckValidRegion(visibleRgn);
+        }
+        DisposeRgn(tempRgn);
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
+    //_aglCritPtr->Leave();
+    return 0;
+}
+
+bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
+{
+
+    Handle hndSize = (Handle)rHandle;
+    long size = GetHandleSize(hndSize);
+    if(0 == size)
+    {
+
+        OSErr memErr = MemError();
+        if(noErr != memErr)
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
+        }
+        else
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
+        }
+
+    }
+    else
+    {
+        // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
+    }
+
+    if(false == IsValidRgnHandle(rHandle))
+    {
+        // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
+        assert(false);
+    }
+
+    int err = QDError();
+    switch(err)
+    {
+        case 0:
+        break;
+        case -147:
+        //WEBRTC_LOG(kTraceError, "ERROR region too big");
+        assert(false);
+        break;
+
+        case -149:
+        //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
+        assert(false);
+        break;
+
+        default:
+        //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
+        assert(false);
+        break;
+    }
+
+    return true;
+}
+
+int VideoRenderAGL::ChangeWindow(void* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    UnlockAGLCntx();
+    return -1;
+}
+WebRtc_Word32 VideoRenderAGL::ChangeUniqueID(WebRtc_Word32 id)
+{
+    LockAGLCntx();
+
+    UnlockAGLCntx();
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAGL::StartRender()
+{
+
+    LockAGLCntx();
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        if(FALSE == _screenUpdateThread->Start(_threadID))
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+        if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        return 0;
+    }
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if (!_screenUpdateThread)
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start(_threadID);
+    _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
+
+    UnlockAGLCntx();
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderAGL::StopRender()
+{
+    LockAGLCntx();
+
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
+    {
+        _renderingIsPaused = FALSE;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
+    UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAGL::DeleteAGLChannel(const WebRtc_UWord32 streamID)
+{
+
+    LockAGLCntx();
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
+        delete channel;
+        it++;
+    }
+    _aglChannels.clear();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAGL::GetChannelProperties(const WebRtc_UWord16 streamId,
+WebRtc_UWord32& zOrder,
+float& left,
+float& top,
+float& right,
+float& bottom)
+{
+
+    LockAGLCntx();
+    UnlockAGLCntx();
+    return -1;
+
+}
+
+void VideoRenderAGL::LockAGLCntx()
+{
+    _renderCritSec.Enter();
+}
+void VideoRenderAGL::UnlockAGLCntx()
+{
+    _renderCritSec.Leave();
+}
+
+} //namespace webrtc
+
+#endif   // CARBON_RENDERING
+
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_agl.h b/trunk/src/modules/video_render/main/source/mac/video_render_agl.h
new file mode 100644
index 0000000..bdee619
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_agl.h
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+
+#include "video_render_defines.h"
+
+
+#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
+#define NEW_HIVIEW_EVENT_HANDLER		1
+#define USE_STRUCT_RGN
+
+#include <AGL/agl.h>
+#include <Carbon/Carbon.h>
+#include <OpenGL/OpenGL.h>
+#include <OpenGL/glu.h>
+#include <OpenGL/glext.h>
+#include <list>
+#include <map>
+
+class VideoRenderAGL;
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+class VideoChannelAGL : public VideoRenderCallback
+{
+public:
+
+	VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
+    virtual ~VideoChannelAGL();
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);    
+    virtual int UpdateSize(int width, int height);
+    int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int RenderOffScreenBuffer();
+    int IsUpdated(bool& isUpdated);
+	virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+	virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
+    
+    
+private:
+	
+	AGLContext      _aglContext;
+	int				_id;
+    VideoRenderAGL* _owner;
+    int             _width;
+    int             _height;
+	int				_stretchedWidth;
+	int				_stretchedHeight;
+    float           _startHeight;
+    float           _startWidth;
+    float           _stopWidth;
+    float           _stopHeight;
+    int				_xOldWidth;
+	int				_yOldHeight;
+	int				_oldStretchedHeight;
+	int				_oldStretchedWidth;
+	unsigned char*  _buffer;
+    int             _bufferSize;
+    int             _incommingBufferSize;
+    bool            _bufferIsUpdated;
+	bool			_sizeInitialized;
+    int             _numberOfStreams;
+	bool			_bVideoSizeStartedChanging;
+	GLenum          _pixelFormat;
+    GLenum          _pixelDataType;
+    unsigned int    _texture;	
+};
+
+
+
+
+class VideoRenderAGL
+{
+public:
+    VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
+    VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
+    ~VideoRenderAGL();
+
+    int		Init();
+    VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int		DeleteAGLChannel(int channel);
+    int		DeleteAllAGLChannels();
+    int		StopThread();
+    bool	IsFullScreen();
+    bool	HasChannels();
+    bool	HasChannel(int channel);
+    int		GetChannels(std::list<int>& channelList);
+    void	LockAGLCntx();    
+    void	UnlockAGLCntx();
+	
+	static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
+	
+	// ********** new module functions ************ //
+	int ChangeWindow(void* newWindowRef);
+	WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
+	WebRtc_Word32 StartRender();
+	WebRtc_Word32 StopRender();
+	WebRtc_Word32 DeleteAGLChannel(const WebRtc_UWord32 streamID);
+	WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
+									 WebRtc_UWord32& zOrder,
+									 float& left,
+									 float& top,
+									 float& right,
+									 float& bottom);
+
+protected:
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+    int GetWindowRect(Rect& rect);
+	
+private:
+    int		CreateMixingContext();
+    int		RenderOffScreenBuffers();
+    int		SwapAndDisplayBuffers();
+	int		UpdateClipping();
+    int		CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren);
+    bool	CheckValidRegion(RgnHandle rHandle);
+    void	ParentWindowResized(WindowRef window);
+
+    // Carbon GUI event handlers
+    static pascal OSStatus sHandleWindowResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+    static pascal OSStatus sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+
+    HIViewRef                       _hiviewRef;
+	WindowRef                       _windowRef;
+    bool                            _fullScreen;
+	int								_id;
+    webrtc::CriticalSectionWrapper&            _renderCritSec;
+    webrtc::ThreadWrapper*                     _screenUpdateThread;
+    webrtc::EventWrapper*                      _screenUpdateEvent;
+	bool                            _isHIViewRef; 
+    AGLContext                      _aglContext;
+    int                             _windowWidth;
+    int                             _windowHeight;
+    int                             _lastWindowWidth;
+    int                             _lastWindowHeight;
+    int                             _lastHiViewWidth;
+    int                             _lastHiViewHeight;
+	int								_currentParentWindowHeight;
+	int								_currentParentWindowWidth;
+	Rect							_currentParentWindowBounds;
+	bool							_windowHasResized;
+	Rect							_lastParentWindowBounds;
+	Rect							_currentHIViewBounds;
+	Rect							_lastHIViewBounds;
+    Rect                            _windowRect;
+    std::map<int, VideoChannelAGL*> _aglChannels;
+    std::multimap<int, int>			_zOrderToChannel;
+	EventHandlerRef					_hiviewEventHandlerRef;
+	EventHandlerRef					_windowEventHandlerRef;
+	HIRect							_currentViewBounds;
+	HIRect							_lastViewBounds;
+	bool							_renderingIsPaused;
+	unsigned int					_threadID;
+	
+	
+	
+    
+};
+
+} //namespace webrtc
+
+
+#endif   // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+#endif // CARBON_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc b/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc
new file mode 100644
index 0000000..88dc1e6
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc
@@ -0,0 +1,297 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#include "video_render_mac_carbon_impl.h"
+#include "critical_section_wrapper.h"
+#include "video_render_agl.h"
+#include "trace.h"
+#include <AGL/agl.h>
+
+namespace webrtc {
+
+VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+
+}
+
+VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCarbonCritsect;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::Init()
+{
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    // We don't know if the user passed us a WindowRef or a HIViewRef, so test.
+    bool referenceIsValid = false;
+
+    // Check if it's a valid WindowRef
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    if (IsValidWindowPtr(*windowRef))
+    {
+        _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
+        referenceIsValid = true;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
+    }
+    else
+    {
+        HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
+        if (HIViewIsValid(*hiviewRef))
+        {
+            _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
+            referenceIsValid = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
+        }
+    }
+
+    if(!referenceIsValid)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    if(!_ptrCarbonRender)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
+    }
+
+    int retVal = _ptrCarbonRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    return -1;
+
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    _id = id;
+
+    if(_ptrCarbonRender)
+    {
+        _ptrCarbonRender->ChangeUniqueID(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::ChangeWindow(void* window)
+{
+    return -1;
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCarbonImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+        const WebRtc_UWord32 zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelAGL* AGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!AGLChannel)
+    {
+        AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return AGLChannel;
+
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
+{
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    _ptrCarbonRender->DeleteAGLChannel(streamId);
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return -1;
+    return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::StartRender()
+{
+    return _ptrCarbonRender->StartRender();
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::StopRender()
+{
+    return _ptrCarbonRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCarbonImpl::RenderType()
+{
+    return kRenderCarbon;
+}
+
+RawVideoType
+VideoRenderMacCarbonImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCarbonImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
+        WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    //NSScreen* mainScreen = [NSScreen mainScreen];
+
+    //NSRect frame = [mainScreen frame];
+
+    //screenWidth = frame.size.width;
+    //screenHeight = frame.size.height;
+    return 0;
+}
+
+WebRtc_UWord32
+VideoRenderMacCarbonImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_renderMacCarbonCritsect);
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
+        const unsigned int zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::SetText(const WebRtc_UWord8 textId,
+        const WebRtc_UWord8* text,
+        const WebRtc_Word32 textLength,
+        const WebRtc_UWord32 textColorRef,
+        const WebRtc_UWord32 backgroundColorRef,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
+        const WebRtc_UWord8 pictureId,
+        const void* colorKey,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+
+} //namespace webrtc
+
+#endif // CARBON_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h b/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h
new file mode 100644
index 0000000..3ff3d26
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class VideoRenderAGL;
+
+// Class definitions
+class VideoRenderMacCarbonImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCarbonImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+            const WebRtc_UWord32 zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+            WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
+            WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+            const unsigned int zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+            const WebRtc_UWord8 pictureId,
+            const void* colorKey,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable)
+    {
+        // not supported in Carbon at this time
+        return -1;
+    }
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderMacCarbonCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderAGL* _ptrCarbonRender;
+
+};
+
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#endif // CARBON_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h b/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h
new file mode 100644
index 0000000..f9216bb
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class VideoRenderNSOpenGL;
+
+// Class definitions
+class VideoRenderMacCocoaImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCocoaImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+            const WebRtc_UWord32 zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+            WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
+            WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+            const unsigned int zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+            const WebRtc_UWord8 pictureId,
+            const void* colorKey,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderMacCocoaCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderNSOpenGL* _ptrCocoaRender;
+
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#endif	// COCOA_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm b/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm
new file mode 100644
index 0000000..4698b6e
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm
@@ -0,0 +1,269 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#import "cocoa_render_view.h"
+
+#include "video_render_mac_cocoa_impl.h"
+#include "critical_section_wrapper.h"
+#include "video_render_nsopengl.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+}
+
+VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCocoaCritsect;
+    if (_ptrCocoaRender)
+    {
+        delete _ptrCocoaRender;
+        _ptrCocoaRender = NULL;
+    }
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::Init()
+{
+
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
+    _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    int retVal = _ptrCocoaRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    _id = id;
+
+    if(_ptrCocoaRender)
+    {
+        _ptrCocoaRender->ChangeUniqueID(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+    _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCocoaImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+        const WebRtc_UWord32 zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!nsOpenGLChannel)
+    {
+        nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return nsOpenGLChannel;
+
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    _ptrCocoaRender->DeleteNSGLChannel(streamId);
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::StartRender()
+{
+    return _ptrCocoaRender->StartRender();
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::StopRender()
+{
+    return _ptrCocoaRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCocoaImpl::RenderType()
+{
+    return kRenderCocoa;
+}
+
+RawVideoType
+VideoRenderMacCocoaImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCocoaImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
+        WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    NSScreen* mainScreen = [NSScreen mainScreen];
+
+    NSRect frame = [mainScreen frame];
+
+    screenWidth = frame.size.width;
+    screenHeight = frame.size.height;
+    return 0;
+}
+
+WebRtc_UWord32
+VideoRenderMacCocoaImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_renderMacCocoaCritsect);
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
+        const unsigned int zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::SetText(const WebRtc_UWord8 textId,
+        const WebRtc_UWord8* text,
+        const WebRtc_Word32 textLength,
+        const WebRtc_UWord32 textColorRef,
+        const WebRtc_UWord32 backgroundColorRef,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
+        const WebRtc_UWord8 pictureId,
+        const void* colorKey,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
+{
+    return -1;
+}
+
+} //namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.h b/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.h
new file mode 100644
index 0000000..cdd3be7
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.h
@@ -0,0 +1,193 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/OpenGL.h>
+#import <OpenGL/glu.h>
+#import <OpenGL/glext.h>
+#include <QuickTime/QuickTime.h>
+#include <list>
+#include <map>
+
+#include "video_render_defines.h"
+
+#import "cocoa_render_view.h"
+#import "cocoa_full_screen_window.h"
+
+class Trace;
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+class VideoRenderNSOpenGL;
+class CriticalSectionWrapper;
+
+class VideoChannelNSOpenGL : public VideoRenderCallback
+{
+
+public:
+
+    VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
+    virtual ~VideoChannelNSOpenGL();
+
+    // A new frame is delivered
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
+
+    // Called when the incomming frame size and/or number of streams in mix changes
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    virtual int UpdateSize(int width, int height);
+
+    // Setup 
+    int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+
+    // Called when it's time to render the last frame for the channel
+    int RenderOffScreenBuffer();
+
+    // Returns true if a new buffer has been delivered to the texture
+    int IsUpdated(bool& isUpdated);
+    virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+
+    // ********** new module functions ************ //
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
+
+    // ********** new module helper functions ***** //
+    int ChangeContext(NSOpenGLContext *nsglContext);
+    WebRtc_Word32 GetChannelProperties(float& left,
+            float& top,
+            float& right,
+            float& bottom);
+
+private:
+
+    NSOpenGLContext* _nsglContext;
+    int _id;
+    VideoRenderNSOpenGL* _owner;
+    WebRtc_Word32 _width;
+    WebRtc_Word32 _height;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+    int _stretchedWidth;
+    int _stretchedHeight;
+    int _oldStretchedHeight;
+    int _oldStretchedWidth;
+    int _xOldWidth;
+    int _yOldHeight;
+    unsigned char* _buffer;
+    int _bufferSize;
+    int _incommingBufferSize;
+    bool _bufferIsUpdated;
+    int _numberOfStreams;
+    GLenum _pixelFormat;
+    GLenum _pixelDataType;
+    unsigned int _texture;
+    bool _bVideoSizeStartedChanging;
+};
+
+class VideoRenderNSOpenGL
+{
+
+public: // methods
+    VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
+    ~VideoRenderNSOpenGL();
+
+    static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
+
+    // Allocates textures
+    int Init();
+    VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int DeleteNSGLChannel(int channel);
+    int DeleteAllNSGLChannels();
+    int StopThread();
+    bool IsFullScreen();
+    bool HasChannels();
+    bool HasChannel(int channel);
+    int GetChannels(std::list<int>& channelList);
+    void LockAGLCntx();
+    void UnlockAGLCntx();
+
+    // ********** new module functions ************ //
+    int ChangeWindow(CocoaRenderView* newWindowRef);
+    WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
+    WebRtc_Word32 StartRender();
+    WebRtc_Word32 StopRender();
+    WebRtc_Word32 DeleteNSGLChannel(const WebRtc_UWord32 streamID);
+    WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom);
+
+    WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    // ********** new module helper functions ***** //
+    int configureNSOpenGLEngine();
+    int configureNSOpenGLView();
+    int setRenderTargetWindow();
+    int setRenderTargetFullScreen();
+
+protected: // methods
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+    int GetWindowRect(Rect& rect);
+
+private: // methods
+
+    int CreateMixingContext();
+    int RenderOffScreenBuffers();
+    int DisplayBuffers();
+
+private: // variables
+
+
+    CocoaRenderView* _windowRef;
+    bool _fullScreen;
+    int _id;
+    CriticalSectionWrapper& _nsglContextCritSec;
+    ThreadWrapper* _screenUpdateThread;
+    EventWrapper* _screenUpdateEvent;
+    NSOpenGLContext* _nsglContext;
+    NSOpenGLContext* _nsglFullScreenContext;
+    CocoaFullScreenWindow* _fullScreenWindow;
+    Rect _windowRect; // The size of the window
+    int _windowWidth;
+    int _windowHeight;
+    std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
+    std::multimap<int, int> _zOrderToChannel;
+    unsigned int _threadID;
+    bool _renderingIsPaused;
+    NSView* _windowRefSuperView;
+    NSRect _windowRefSuperViewFrame;
+};
+
+} //namespace webrtc
+
+#endif   // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#endif	 // COCOA_RENDERING
+
diff --git a/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.mm b/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.mm
new file mode 100644
index 0000000..4f1a6c5
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/mac/video_render_nsopengl.mm
@@ -0,0 +1,1275 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#include "video_render_nsopengl.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+namespace webrtc {
+
+VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
+_nsglContext( nsglContext),
+_id( iId),
+_owner( owner),
+_width( 0),
+_height( 0),
+_startWidth( 0.0f),
+_startHeight( 0.0f),
+_stopWidth( 0.0f),
+_stopHeight( 0.0f),
+_stretchedWidth( 0),
+_stretchedHeight( 0),
+_oldStretchedHeight( 0),
+_oldStretchedWidth( 0),
+_xOldWidth( 0),
+_yOldHeight( 0),
+_buffer( 0),
+_bufferSize( 0),
+_incommingBufferSize( 0),
+_bufferIsUpdated( false),
+_numberOfStreams( 0),
+_pixelFormat( GL_RGBA),
+_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+_texture( 0),
+_bVideoSizeStartedChanging(false)
+
+{
+
+}
+
+VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
+{
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    if (_texture != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
+{
+    _owner->UnlockAGLCntx();
+
+    _nsglContext = nsglContext;
+    [_nsglContext makeCurrentContext];
+
+    _owner->UnlockAGLCntx();
+    return 0;
+
+}
+
+WebRtc_Word32 VideoChannelNSOpenGL::GetChannelProperties(float& left,
+        float& top,
+        float& right,
+        float& bottom)
+{
+
+    _owner->LockAGLCntx();
+
+    left = _startWidth;
+    top = _startHeight;
+    right = _stopWidth;
+    bottom = _stopHeight;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
+{
+
+    _owner->LockAGLCntx();
+
+    if(_width != (int)videoFrame.Width() ||
+            _height != (int)videoFrame.Height())
+    {
+        if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            _owner->UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    int ret = DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
+
+    _owner->UnlockAGLCntx();
+    return ret;
+}
+
+int VideoChannelNSOpenGL::UpdateSize(int width, int height)
+{
+    _owner->LockAGLCntx();
+    _width = width;
+    _height = height;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We got a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    [_nsglContext makeCurrentContext];
+
+    if(glIsTexture(_texture))
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+
+    }
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_RGBA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    if (bufferSize != _incommingBufferSize)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    int rgbRet = ConvertFromYV12(buffer, _width,
+                                 kBGRA, 0, _width, _height,
+                                 _buffer);
+    if (rgbRet < 0)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    [_nsglContext makeCurrentContext];
+
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
+    GLenum glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glBindTexture", glErr);
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+            0, // Level, not use
+            0, // start point x, (low left of pic)
+            0, // start point y,
+            _width, // width
+            _height, // height
+            _pixelFormat, // pictue format for _buffer
+            _pixelDataType, // data type of _buffer
+            (const GLvoid*) _buffer); // the pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glTexSubImage2d", glErr);
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    //	if(_fullscreen)
+    //	{
+    // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+    //		_width = mainDisplayRect.size.width;
+    //		_height = mainDisplayRect.size.height;
+    //		glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+    //		float newX = mainDisplayRect.size.width/_width;
+    //		float newY = mainDisplayRect.size.height/_height;
+
+    // convert from 0.0 <= size <= 1.0 to
+    // open gl world -1.0 < size < 1.0
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    [_nsglContext makeCurrentContext];
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    glLoadIdentity();
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+
+    isUpdated = _bufferIsUpdated;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+    return retVal;
+}
+
+int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+/*
+ *
+ *    VideoRenderNSOpenGL
+ *
+ */
+
+VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
+_windowRef( (CocoaRenderView*)windowRef),
+_fullScreen( fullScreen),
+_id( iId),
+_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_nsglContext( 0),
+_nsglFullScreenContext( 0),
+_fullScreenWindow( nil),
+_windowRect( ),
+_windowWidth( 0),
+_windowHeight( 0),
+_nsglChannels( ),
+_zOrderToChannel( ),
+_threadID (0),
+_renderingIsPaused (FALSE),
+_windowRefSuperView(NULL),
+_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
+{
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+}
+
+int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    _windowRef = newWindowRef;
+
+    if(CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    int error = 0;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        error |= (it->second)->ChangeContext(_nsglContext);
+        it++;
+    }
+    if(error != 0)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/* Check if the thread and event already exist. 
+ * If so then they will simply be restarted
+ * If not then create them and continue
+ */
+WebRtc_Word32 VideoRenderNSOpenGL::StartRender()
+{
+
+    LockAGLCntx();
+
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        if(FALSE == _screenUpdateThread->Start(_threadID) ||
+                FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+
+    UnlockAGLCntx();
+    return 0;
+}
+WebRtc_Word32 VideoRenderNSOpenGL::StopRender()
+{
+
+    LockAGLCntx();
+
+    /* The code below is functional
+     * but it pauses for several seconds
+     */
+
+    // pause the update thread and the event timer
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
+    {
+        _renderingIsPaused = FALSE;
+
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLView()
+{
+    return 0;
+
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLEngine()
+{
+
+    LockAGLCntx();
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+    glViewport(0, 0, _windowWidth, _windowHeight);
+
+    // Synchronize buffer swaps with vertical refresh rate
+    GLint swapInt = 1;
+    [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetWindow()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[NSOpenGLPixelFormat alloc] initWithAttributes: (NSOpenGLPixelFormatAttribute*) attribs];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderView:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [fmt release];
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetFullScreen()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[NSOpenGLPixelFormat alloc] initWithAttributes: (NSOpenGLPixelFormatAttribute*) attribs];
+
+    // Store original superview and frame for use when exiting full screens
+    _windowRefSuperViewFrame = [_windowRef frame];
+    _windowRefSuperView = [_windowRef superview];
+
+
+    // create new fullscreen window
+    NSRect screenRect = [[NSScreen mainScreen]frame];
+    [_windowRef setFrame:screenRect];
+    [_windowRef setBounds:screenRect];
+
+    
+    _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
+    [_fullScreenWindow grabFullScreen];
+    [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderViewFullScreen:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [fmt release];
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
+{
+
+    if(_fullScreen)
+    {
+        if(_fullScreenWindow)
+        {
+            // Detach CocoaRenderView from full screen view back to 
+            // it's original parent.
+            [_windowRef removeFromSuperview];
+            if(_windowRefSuperView) 
+            {
+              [_windowRefSuperView addSubview:_windowRef];
+              [_windowRef setFrame:_windowRefSuperViewFrame];
+            }
+            
+            WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
+            [_fullScreenWindow releaseFullScreen];
+     
+        }
+    }
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_nsglContext != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        _nsglContext = nil;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+        it = _nsglChannels.begin();
+    }
+    _nsglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+}
+
+/* static */
+int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
+{
+    return -1;
+}
+
+int VideoRenderNSOpenGL::Init()
+{
+
+    LockAGLCntx();
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start(_threadID);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    if (CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    if (HasChannel(channel))
+    {
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+
+    }
+
+    VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+
+        return NULL;
+    }
+
+    _nsglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
+
+    return newAGLChannel;
+}
+
+int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* channel = it->second;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
+        delete channel;
+        it++;
+    }
+    _nsglChannels.clear();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::DeleteNSGLChannel(const WebRtc_UWord32 channel)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.find(channel);
+    if (it != _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+    }
+    else
+    {
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == (int)channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::GetChannelProperties(const WebRtc_UWord16 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    bool channelFound = false;
+
+    // Loop through all channels until we find a match.
+    // From that, get zorder.
+    // From that, get T, L, R, B
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        if(streamId == rIt->second)
+        {
+            channelFound = true;
+
+            zOrder = rIt->second;
+
+            std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
+            VideoChannelNSOpenGL* tempChannel = rIt->second;
+
+            if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
+            {
+                return -1;
+            }
+            break;
+        }
+    }
+
+    if(false == channelFound)
+    {
+
+        return -1;
+    }
+
+    return 0;
+}
+
+int VideoRenderNSOpenGL::StopThread()
+{
+
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Stopping thread ", __FUNCTION__, _screenUpdateThread);
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderNSOpenGL::IsFullScreen()
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderNSOpenGL::HasChannels()
+{
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    if (_nsglChannels.begin() != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderNSOpenGL::HasChannel(int channel)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                    __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+            return NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+
+bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderNSOpenGL::ScreenUpdateProcess()
+{
+
+    _screenUpdateEvent->Wait(10);
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
+        UnlockAGLCntx();
+        return false;
+    }
+
+    [_nsglContext makeCurrentContext];
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+        glViewport(0, 0, _windowWidth, _windowHeight);
+    }
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it != _nsglChannels.end())
+    {
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+        aglChannel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+
+    if (updated)
+    {
+
+        // At least on buffers is updated, we need to repaint the texture
+        if (RenderOffScreenBuffers() != -1)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+    }
+    //    }
+    UnlockAGLCntx();
+    return true;
+}
+
+/*
+ *
+ *    Functions for creating mixing buffers and screen settings
+ *
+ */
+
+int VideoRenderNSOpenGL::CreateMixingContext()
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    if(_fullScreen)
+    {
+        if(-1 == setRenderTargetFullScreen())
+        {
+            return -1;
+        }
+    }
+    else
+    {
+
+        if(-1 == setRenderTargetWindow())
+        {
+            return -1;
+        }
+    }
+
+    configureNSOpenGLEngine();
+
+    DisplayBuffers();
+
+    GLenum glErr = glGetError();
+    if (glErr)
+    {
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    Rendering functions
+ *
+ */
+
+int VideoRenderNSOpenGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [_nsglContext makeCurrentContext];
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/*
+ *
+ * Help functions
+ *
+ * All help functions assumes external protections
+ *
+ */
+
+int VideoRenderNSOpenGL::DisplayBuffers()
+{
+
+    LockAGLCntx();
+
+    glFinish();
+    [_nsglContext flushBuffer];
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+
+    if (_windowRef)
+    {
+        if(_fullScreen)
+        {
+            NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+            rect.bottom = 0;
+            rect.left = 0;
+            rect.right = mainDisplayRect.size.width;
+            rect.top = mainDisplayRect.size.height;
+        }
+        else
+        {
+            rect.top = [_windowRef frame].origin.y;
+            rect.left = [_windowRef frame].origin.x;
+            rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
+            rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
+        }
+
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::ChangeUniqueID(WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(_nsglContextCritSec);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::SetText(const WebRtc_UWord8 /*textId*/,
+        const WebRtc_UWord8* /*text*/,
+        const WebRtc_Word32 /*textLength*/,
+        const WebRtc_UWord32 /*textColorRef*/,
+        const WebRtc_UWord32 /*backgroundColorRef*/,
+        const float /*left*/,
+        const float /*top*/,
+        const float /*right*/,
+        const float /*bottom*/)
+{
+
+    return 0;
+
+}
+
+void VideoRenderNSOpenGL::LockAGLCntx()
+{
+    _nsglContextCritSec.Enter();
+}
+void VideoRenderNSOpenGL::UnlockAGLCntx()
+{
+    _nsglContextCritSec.Leave();
+}
+
+/*
+
+ bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
+ {
+ NSRect mainDisplayRect, viewRect;
+
+ // Create a screen-sized window on the display you want to take over
+ // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
+ mainDisplayRect = [[NSScreen mainScreen] frame];
+ fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
+ backing:NSBackingStoreBuffered defer:YES];
+
+ // Set the window level to be above the menu bar
+ [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
+
+ // Perform any other window configuration you desire
+ [fullScreenWindow setOpaque:YES];
+ [fullScreenWindow setHidesOnDeactivate:YES];
+
+ // Create a view with a double-buffered OpenGL context and attach it to the window
+ // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
+ viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+ fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
+ [fullScreenWindow setContentView:fullScreenView];
+
+ // Show the window
+ [fullScreenWindow makeKeyAndOrderFront:self];
+
+ // Set the scene with the full-screen viewport and viewing transformation
+ [scene setViewportRect:viewRect];
+
+ // Assign the view's MainController to self
+ [fullScreenView setMainController:self];
+
+ if (!isAnimating) {
+ // Mark the view as needing drawing to initalize its contents
+ [fullScreenView setNeedsDisplay:YES];
+ }
+ else {
+ // Start playing the animation
+ [fullScreenView startAnimation];
+ }
+
+ }
+
+
+
+ */
+
+
+} //namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/trunk/src/modules/video_render/main/source/video_render.gypi b/trunk/src/modules/video_render/main/source/video_render.gypi
new file mode 100644
index 0000000..7a7545e
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/video_render.gypi
@@ -0,0 +1,195 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_render_module',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '.',
+        '../interface',
+        '../../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../interface',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_render.h',
+        '../interface/video_render_defines.h',
+
+        # headers
+        'incoming_video_stream.h',
+        'video_render_frames.h',
+        'video_render_impl.h',
+        'i_video_render.h',
+        # Linux
+        'linux/video_render_linux_impl.h',
+        'linux/video_x11_channel.h',
+        'linux/video_x11_render.h',
+        # Mac
+        'mac/cocoa_full_screen_window.h',
+        'mac/cocoa_render_view.h',
+        'mac/video_render_agl.h',
+        'mac/video_render_mac_carbon_impl.h',
+        'mac/video_render_mac_cocoa_impl.h',
+        'mac/video_render_nsopengl.h',
+        # Windows
+        'windows/i_video_render_win.h',
+        'windows/video_render_direct3d9.h',
+        'windows/video_render_directdraw.h',
+        'windows/video_render_windows_impl.h',
+        # External
+        'external/video_render_external_impl.h',
+
+        # PLATFORM INDEPENDENT SOURCE FILES
+        'incoming_video_stream.cc',
+        'video_render_frames.cc',
+        'video_render_impl.cc',
+        # PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
+        # Linux
+        'linux/video_render_linux_impl.cc',
+        'linux/video_x11_channel.cc',
+        'linux/video_x11_render.cc',
+        # Mac
+        'mac/video_render_nsopengl.mm',
+        'mac/video_render_mac_cocoa_impl.mm',
+        'mac/video_render_agl.cc',
+        'mac/video_render_mac_carbon_impl.cc',
+        'mac/cocoa_render_view.mm',
+        'mac/cocoa_full_screen_window.mm',
+        # Windows
+        'windows/video_render_direct3d9.cc',
+        'windows/video_render_directdraw.cc',
+        'windows/video_render_windows_impl.cc',
+        # External
+        'external/video_render_external_impl.cc',
+      ],
+      # TODO(andrew): with the proper suffix, these files will be excluded
+      # automatically.
+      'conditions': [
+        ['include_internal_video_render==1', {
+          'defines': [
+            'WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',
+          ],
+        }],
+        ['OS!="linux" or include_internal_video_render==0', {
+          'sources!': [
+            'linux/video_render_linux_impl.h',
+            'linux/video_x11_channel.h',
+            'linux/video_x11_render.h',
+            'linux/video_render_linux_impl.cc',
+            'linux/video_x11_channel.cc',
+            'linux/video_x11_render.cc',
+          ],
+        }],
+        ['OS!="mac" or include_internal_video_render==0', {
+          'sources!': [
+            'mac/cocoa_full_screen_window.h',
+            'mac/cocoa_render_view.h',
+            'mac/video_render_agl.h',
+            'mac/video_render_mac_carbon_impl.h',
+            'mac/video_render_mac_cocoa_impl.h',
+            'mac/video_render_nsopengl.h',
+            'mac/video_render_nsopengl.mm',
+            'mac/video_render_mac_cocoa_impl.mm',
+            'mac/video_render_agl.cc',
+            'mac/video_render_mac_carbon_impl.cc',
+            'mac/cocoa_render_view.mm',
+            'mac/cocoa_full_screen_window.mm',
+          ],
+        }],
+        ['OS=="mac"', {
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'mac',
+            ],
+          },
+        }],
+        ['OS!="win" or include_internal_video_render==0', {
+          'sources!': [
+            'windows/i_video_render_win.h',
+            'windows/video_render_direct3d9.h',
+            'windows/video_render_directdraw.h',
+            'windows/video_render_windows_impl.h',
+            'windows/video_render_direct3d9.cc',
+            'windows/video_render_directdraw.cc',
+            'windows/video_render_windows_impl.cc',
+          ],
+        }],
+      ] # conditions
+    }, # video_render_module
+  ], # targets
+
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['include_internal_video_render==1', {
+      'defines': [
+        'WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',
+      ],
+    }],
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'video_render_module_test',
+          'type': 'executable',
+          'dependencies': [
+            'video_render_module',
+            'webrtc_utility',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+          ],
+          'sources': [
+            '../test/testAPI/testAPI.cc',
+            '../test/testAPI/testAPI.h',
+            '../test/testAPI/testAPI_mac.mm',
+          ],
+          'conditions': [
+            ['OS=="mac" or OS=="linux"', {
+              'cflags': [
+                '-Wno-write-strings',
+              ],
+              'ldflags': [
+                '-lpthread -lm',
+              ],
+            }],
+            ['OS=="linux"', {
+              'libraries': [
+                '-lrt',
+                '-lXext',
+                '-lX11',
+              ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_LDFLAGS': [
+                  '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
+                ],
+              },
+            }],
+          ] # conditions
+        }, # video_render_module_test
+      ], # targets
+    }], # build_with_chromium==0
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/modules/video_render/main/source/video_render_frames.cc b/trunk/src/modules/video_render/main/source/video_render_frames.cc
new file mode 100644
index 0000000..28078e3
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/video_render_frames.cc
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_frames.h"
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "trace.h"
+#include <cassert>
+
+namespace webrtc {
+
+VideoRenderFrames::VideoRenderFrames() :
+    _incomingFrames(), _renderDelayMs(10)
+{
+}
+
+VideoRenderFrames::~VideoRenderFrames()
+{
+    ReleaseAllFrames();
+}
+
+WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* ptrNewFrame)
+{
+    const WebRtc_Word64 timeNow = TickTime::MillisecondTimestamp();
+
+    if (ptrNewFrame->RenderTimeMs() + KOldRenderTimestampMS < timeNow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                     "%s: too old frame.", __FUNCTION__);
+        return -1;
+    }
+    if (ptrNewFrame->RenderTimeMs() > timeNow + KFutureRenderTimestampMS)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                     "%s: frame too long into the future.", __FUNCTION__);
+        return -1;
+    }
+
+    // Get an empty frame
+    VideoFrame* ptrFrameToAdd = NULL;
+    if (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            ptrFrameToAdd = static_cast<VideoFrame*> (item->GetItem());
+            _emptyFrames.Erase(item);
+        }
+    }
+    if (!ptrFrameToAdd)
+    {
+
+        if (_emptyFrames.GetSize() + _incomingFrames.GetSize()
+                > KMaxNumberOfFrames)
+        {
+            // Already allocated toom many frames...
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
+                         -1, "%s: too many frames, limit: %d", __FUNCTION__,
+                         KMaxNumberOfFrames);
+            return -1;
+        }
+
+        // Allocate new memory
+        WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
+                     "%s: allocating buffer %d", __FUNCTION__,
+                     _emptyFrames.GetSize() + _incomingFrames.GetSize());
+
+        ptrFrameToAdd = new VideoFrame();
+        if (!ptrFrameToAdd)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                         "%s: could not create new frame for", __FUNCTION__);
+            return -1;
+        }
+    }
+
+    ptrFrameToAdd->VerifyAndAllocate(ptrNewFrame->Length());
+    ptrFrameToAdd->SwapFrame(const_cast<VideoFrame&> (*ptrNewFrame)); //remove const ness. Copying will be costly.
+    _incomingFrames.PushBack(ptrFrameToAdd);
+
+    return _incomingFrames.GetSize();
+}
+
+VideoFrame*
+VideoRenderFrames::FrameToRender()
+{
+    VideoFrame* ptrRenderFrame = NULL;
+    while (!_incomingFrames.Empty())
+    {
+        ListItem* item = _incomingFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrOldestFrameInList =
+                    static_cast<VideoFrame*> (item->GetItem());
+            if (ptrOldestFrameInList->RenderTimeMs()
+                    <= TickTime::MillisecondTimestamp() + _renderDelayMs)
+            {
+                // This is the oldest one so far and it's ok to render
+                if (ptrRenderFrame)
+                {
+                    // This one is older than the newly found frame, remove this one.
+                    ptrRenderFrame->SetWidth(0);
+                    ptrRenderFrame->SetHeight(0);
+                    ptrRenderFrame->SetLength(0);
+                    ptrRenderFrame->SetRenderTime(0);
+                    ptrRenderFrame->SetTimeStamp(0);
+                    _emptyFrames.PushFront(ptrRenderFrame);
+                }
+                ptrRenderFrame = ptrOldestFrameInList;
+                _incomingFrames.Erase(item);
+            }
+            else
+            {
+                // We can't release this one yet, we're done here.
+                break;
+            }
+        }
+        else
+        {
+            assert(false);
+        }
+    }
+    return ptrRenderFrame;
+}
+
+WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* ptrOldFrame)
+{
+    ptrOldFrame->SetWidth(0);
+    ptrOldFrame->SetHeight(0);
+    ptrOldFrame->SetRenderTime(0);
+    ptrOldFrame->SetLength(0);
+    _emptyFrames.PushBack(ptrOldFrame);
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames()
+{
+    while (!_incomingFrames.Empty())
+    {
+        ListItem* item = _incomingFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame =
+                    static_cast<VideoFrame*> (item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _incomingFrames.Erase(item);
+    }
+    while (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame =
+                    static_cast<VideoFrame*> (item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _emptyFrames.Erase(item);
+    }
+    return 0;
+}
+
+WebRtc_Word32 KEventMaxWaitTimeMs = 200;
+
+WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease()
+{
+    WebRtc_Word64 timeToRelease = 0;
+    ListItem* item = _incomingFrames.First();
+    if (item)
+    {
+        VideoFrame* oldestFrame =
+                static_cast<VideoFrame*> (item->GetItem());
+        timeToRelease = oldestFrame->RenderTimeMs() - _renderDelayMs
+                - TickTime::MillisecondTimestamp();
+        if (timeToRelease < 0)
+        {
+            timeToRelease = 0;
+        }
+    }
+    else
+    {
+        timeToRelease = KEventMaxWaitTimeMs;
+    }
+
+    return (WebRtc_UWord32) timeToRelease;
+}
+
+//
+WebRtc_Word32 VideoRenderFrames::SetRenderDelay(
+                                                const WebRtc_UWord32 renderDelay)
+{
+    _renderDelayMs = renderDelay;
+    return 0;
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/video_render_frames.h b/trunk/src/modules/video_render/main/source/video_render_frames.h
new file mode 100644
index 0000000..84a2a1c
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/video_render_frames.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
+
+#include "list_wrapper.h"
+#include "video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRenderFrames
+{
+public:
+    VideoRenderFrames();
+    ~VideoRenderFrames();
+
+    /*
+     *   Add a frame to the render queue
+     */
+    WebRtc_Word32 AddFrame(VideoFrame* ptrNewFrame);
+
+    /*
+     *   Get a frame for rendering, if it's time to render.
+     */
+    VideoFrame* FrameToRender();
+
+    /*
+     *   Return an old frame
+     */
+    WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
+
+    /*
+     *   Releases all frames
+     */
+    WebRtc_Word32 ReleaseAllFrames();
+
+    /*
+     *   Returns the number of ms to next frame to render
+     */
+    WebRtc_UWord32 TimeToNextFrameRelease();
+
+    /*
+     *   Sets estimates delay in renderer
+     */
+    WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 renderDelay);
+
+private:
+    enum
+    {
+        KMaxNumberOfFrames = 300
+    }; // 10 seconds for 30 fps.
+    enum
+    {
+        KOldRenderTimestampMS = 500
+    }; //Don't render frames with timestamp older than 500ms from now.
+    enum
+    {
+        KFutureRenderTimestampMS = 10000
+    }; //Don't render frames with timestamp more than 10s into the future.
+
+    ListWrapper _incomingFrames; // Sorted oldest video frame first
+    ListWrapper _emptyFrames; // Empty frames
+
+    WebRtc_UWord32 _renderDelayMs; // Set render delay
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
diff --git a/trunk/src/modules/video_render/main/source/video_render_impl.cc b/trunk/src/modules/video_render/main/source/video_render_impl.cc
new file mode 100644
index 0000000..616d03b
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/video_render_impl.cc
@@ -0,0 +1,981 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_impl.h"
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "video_render_defines.h"
+#include "trace.h"
+#include "incoming_video_stream.h"
+#include "i_video_render.h"
+
+#include <cassert>
+
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined (_WIN32)
+#include "windows/video_render_windows_impl.h"
+#define STANDARD_RENDERING kRenderWindows
+
+#elif defined(MAC_IPHONE) // MAC_IPHONE should go before WEBRTC_MAC_INTEL because WEBRTC_MAC_INTEL gets defined if MAC_IPHONE is defined
+#if defined(IPHONE_GLES_RENDERING)
+#define STANDARD_RENDERING kRenderiPhone
+#include "iPhone/video_render_iphone_impl.h"
+#endif
+
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+#if defined(COCOA_RENDERING)
+#define STANDARD_RENDERING kRenderCocoa
+#include "mac/video_render_mac_cocoa_impl.h"
+#elif defined(CARBON_RENDERING)
+#define STANDARD_RENDERING kRenderCarbon
+#include "mac/video_render_mac_carbon_impl.h"
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+#include "Android/video_render_android_impl.h"
+#include "Android/video_render_android_surface_view.h"
+#include "Android/video_render_android_native_opengl2.h"
+#define STANDARD_RENDERING	kRenderAndroid
+
+#elif defined(WEBRTC_LINUX)
+#include "linux/video_render_linux_impl.h"
+#define STANDARD_RENDERING kRenderX11
+
+#else
+//Other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+// For external rendering
+#include "external/video_render_external_impl.h"
+#ifndef STANDARD_RENDERING
+#define STANDARD_RENDERING kRenderExternal
+#endif  // STANDARD_RENDERING
+
+namespace webrtc {
+
+VideoRender*
+VideoRender::CreateVideoRender(const WebRtc_Word32 id,
+                               void* window,
+                               const bool fullscreen,
+                               const VideoRenderType videoRenderType/*=kRenderDefault*/)
+{
+
+    VideoRenderType resultVideoRenderType = videoRenderType;
+    if (videoRenderType == kRenderDefault)
+    {
+        resultVideoRenderType = STANDARD_RENDERING;
+    }
+    return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
+                                     fullscreen);
+}
+
+void VideoRender::DestroyVideoRender(
+                                                         VideoRender* module)
+{
+    if (module)
+    {
+        delete module;
+    }
+}
+
+WebRtc_Word32 VideoRender::SetAndroidObjects(void *javaVM)
+{
+#ifdef WEBRTC_ANDROID
+    return VideoRenderAndroid::SetAndroidEnvVariables(javaVM);
+#else
+    return -1;
+#endif
+}
+
+ModuleVideoRenderImpl::ModuleVideoRenderImpl(
+                                             const WebRtc_Word32 id,
+                                             const VideoRenderType videoRenderType,
+                                             void* window,
+                                             const bool fullscreen) :
+    _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+            _ptrWindow(window), _renderType(videoRenderType),
+            _fullScreen(fullscreen), _ptrRenderer(NULL),
+            _streamRenderMap(*(new MapWrapper()))
+{
+
+    // Create platform specific renderer
+    switch (videoRenderType)
+    {
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(_WIN32)
+        case kRenderWindows:
+        {
+            VideoRenderWindowsImpl* ptrRenderer;
+            ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(MAC_IPHONE)
+        case kRenderiPhone:
+        {
+            VideoRenderIPhoneImpl* ptrRenderer = new VideoRenderIPhoneImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+
+#if defined(COCOA_RENDERING)
+        case kRenderCocoa:
+        {
+            VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+
+        break;
+#elif defined(CARBON_RENDERING)
+        case kRenderCarbon:
+        {
+            VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+        case kRenderAndroid:
+        {
+            if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
+            {
+                AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+            else
+            {
+                AndroidSurfaceViewRenderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+
+        }
+        break;
+#elif defined(WEBRTC_LINUX)
+        case kRenderX11:
+        {
+            VideoRenderLinuxImpl* ptrRenderer = NULL;
+            ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
+            if ( ptrRenderer )
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+        break;
+
+#else
+        // Other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+        case kRenderExternal:
+        {
+            VideoRenderExternalImpl* ptrRenderer(NULL);
+            ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
+                                                      window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+            break;
+        default:
+            // Error...
+            break;
+    }
+    if (_ptrRenderer)
+    {
+        if (_ptrRenderer->Init() == -1)
+        {
+        }
+    }
+}
+
+ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
+{
+    delete &_moduleCrit;
+
+    while (_streamRenderMap.Size() > 0)
+    {
+        MapItem* item = _streamRenderMap.First();
+        IncomingVideoStream* ptrIncomingStream =
+                static_cast<IncomingVideoStream*> (item->GetItem());
+        assert(ptrIncomingStream != NULL);
+        delete ptrIncomingStream;
+        _streamRenderMap.Erase(item);
+    }
+    delete &_streamRenderMap;
+
+    // Delete platform specific renderer
+    if (_ptrRenderer)
+    {
+        VideoRenderType videoRenderType = _ptrRenderer->RenderType();
+        switch (videoRenderType)
+        {
+            case kRenderExternal:
+            {
+                VideoRenderExternalImpl
+                        * ptrRenderer =
+                                reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(_WIN32)
+            case kRenderWindows:
+            {
+                VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+
+#if defined(COCOA_RENDERING)
+            case kRenderCocoa:
+            {
+                VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(CARBON_RENDERING)
+            case kRenderCarbon:
+            {
+                VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#endif
+
+#elif defined(MAC_IPHONE)
+            case kRenderiPhone:
+            break;
+
+#elif defined(WEBRTC_ANDROID)
+            case kRenderAndroid:
+            {
+                VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+
+#elif defined(WEBRTC_LINUX)
+            case kRenderX11:
+            {
+                VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#else
+            //other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+            default:
+                // Error...
+                break;
+        }
+    }
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(_moduleCrit);
+
+    _id = id;
+
+    if (_ptrRenderer)
+    {
+        _ptrRenderer->ChangeUniqueId(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::TimeUntilNextProcess()
+{
+    // Not used
+    return 50;
+}
+WebRtc_Word32 ModuleVideoRenderImpl::Process()
+{
+    // Not used
+    return 0;
+}
+
+void*
+ModuleVideoRenderImpl::Window()
+{
+    CriticalSectionScoped cs(_moduleCrit);
+    return _ptrWindow;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(_moduleCrit);
+
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(MAC_IPHONE) // MAC_IPHONE must go before WEBRTC_MAC or WEBRTC_MAC_INTEL
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+    VideoRenderIPhoneImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderIPhoneImpl(_id, kRenderiPhone, window, _fullScreen);
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+
+#elif defined(WEBRTC_MAC) | defined(WEBRTC_MAC_INTEL)
+
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+#if defined(COCOA_RENDERING)
+    VideoRenderMacCocoaImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
+#elif defined(CARBON_RENDERING)
+    VideoRenderMacCarbonImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
+#endif
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+
+#else
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->ChangeWindow(window);
+
+#endif
+
+#else  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    return -1;
+#endif
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::Id()
+{
+    CriticalSectionScoped cs(_moduleCrit);
+    return _id;
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::GetIncomingFrameRate(
+                                                           const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (mapItem == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(mapItem);
+        return 0;
+    }
+    return incomingStream->IncomingRate();
+}
+
+VideoRenderCallback*
+ModuleVideoRenderImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                               const WebRtc_UWord32 zOrder,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return NULL;
+    }
+
+    if (_streamRenderMap.Find(streamId) != NULL)
+    {
+        // The stream already exists...
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream already exists", __FUNCTION__);
+        return NULL;
+    }
+
+    VideoRenderCallback* ptrRenderCallback =
+            _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
+                                                  right, bottom);
+    if (ptrRenderCallback == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream in renderer",
+                     __FUNCTION__);
+        return NULL;
+    }
+
+    // Create platform independant code
+    IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(_id,
+                                                                     streamId);
+    if (ptrIncomingStream == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream", __FUNCTION__);
+        return NULL;
+    }
+
+
+    if (ptrIncomingStream->SetRenderCallback(ptrRenderCallback) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't set render callback", __FUNCTION__);
+        delete ptrIncomingStream;
+        _ptrRenderer->DeleteIncomingRenderStream(streamId);
+        return NULL;
+    }
+
+    VideoRenderCallback* moduleCallback =
+            ptrIncomingStream->ModuleCallback();
+
+    // Store the stream
+    _streamRenderMap.Insert(streamId, ptrIncomingStream);
+
+    return moduleCallback;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::DeleteIncomingRenderStream(
+                                                                const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (!mapItem)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStream* ptrIncomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    delete ptrIncomingStream;
+    ptrIncomingStream = NULL;
+    _ptrRenderer->DeleteIncomingRenderStream(streamId);
+    _streamRenderMap.Erase(mapItem);
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::AddExternalRenderCallback(
+                                                               const WebRtc_UWord32 streamId,
+                                                               VideoRenderCallback* renderObject)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (!mapItem)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStream* ptrIncomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    if (!ptrIncomingStream) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get stream", __FUNCTION__);
+        return -1;
+    }
+    return ptrIncomingStream->SetExternalCallback(renderObject);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
+                                                                       const WebRtc_UWord32 streamId,
+                                                                       WebRtc_UWord32& zOrder,
+                                                                       float& left,
+                                                                       float& top,
+                                                                       float& right,
+                                                                       float& bottom) const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
+                                                           left, top, right,
+                                                           bottom);
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    return (WebRtc_UWord32) _streamRenderMap.Size();
+}
+
+bool ModuleVideoRenderImpl::HasIncomingRenderStream(
+                                                    const WebRtc_UWord32 streamId) const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    bool hasStream = false;
+    if (_streamRenderMap.Find(streamId) != NULL)
+    {
+        hasStream = true;
+    }
+    return hasStream;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::RegisterRawFrameCallback(
+                                                              const WebRtc_UWord32 streamId,
+                                                              VideoRenderCallback* callbackObj)
+{
+    return -1;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::StartRender(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    // Start the stream
+    MapItem* item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        return -1;
+    }
+
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream->Start() == -1)
+    {
+        return -1;
+    }
+
+    // Start the HW renderer
+    if (_ptrRenderer->StartRender() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::StopRender(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s(%d): No renderer", __FUNCTION__, streamId);
+        return -1;
+    }
+
+    // Stop the incoming stream
+    MapItem* item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        return -1;
+    }
+
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream->Stop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ResetRender()
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    WebRtc_Word32 error = 0;
+
+    // Loop through all incoming streams and stop them
+    MapItem* item = _streamRenderMap.First();
+    while (item)
+    {
+        IncomingVideoStream* incomingStream =
+                static_cast<IncomingVideoStream*> (item->GetItem());
+        if (incomingStream->Reset() == -1)
+        {
+            error = -1;
+        }
+        item = _streamRenderMap.Next(item);
+    }
+    return error;
+}
+
+RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (_ptrRenderer == NULL)
+    {
+        return kVideoI420;
+    }
+
+    return _ptrRenderer->PerferedVideoType();
+}
+
+bool ModuleVideoRenderImpl::IsFullScreen()
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->FullScreen();
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetScreenResolution(
+                                                         WebRtc_UWord32& screenWidth,
+                                                         WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::RenderFrameRate(
+                                                      const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->RenderFrameRate(streamId);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetStreamCropping(
+                                                       const WebRtc_UWord32 streamId,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetTransparentBackground(enable);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::FullScreenRender(void* window,
+                                                      const bool enable)
+{
+    return -1;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetText(
+                                             const WebRtc_UWord8 textId,
+                                             const WebRtc_UWord8* text,
+                                             const WebRtc_Word32 textLength,
+                                             const WebRtc_UWord32 textColorRef,
+                                             const WebRtc_UWord32 backgroundColorRef,
+                                             const float left, const float top,
+                                             const float right,
+                                             const float bottom)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
+                                 backgroundColorRef, left, top, right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
+                                               const WebRtc_UWord8 pictureId,
+                                               const void* colorKey,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
+                                   right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetLastRenderedFrame(
+                                                          const WebRtc_UWord32 streamId,
+                                                          VideoFrame &frame) const
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->GetLastRenderedFrame(frame);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ConfigureRenderer(
+                                                       const WebRtc_UWord32 streamId,
+                                                       const unsigned int zOrder,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
+                                           bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetStartImage(
+                                                   const WebRtc_UWord32 streamId,
+                                                   const VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->SetStartImage(videoFrame);
+
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetTimeoutImage(
+                                                     const WebRtc_UWord32 streamId,
+                                                     const VideoFrame& videoFrame,
+                                                     const WebRtc_UWord32 timeout)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->SetTimeoutImage(videoFrame, timeout);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
+                                                        const bool enable,
+                                                        const bool mirrorXAxis,
+                                                        const bool mirrorYAxis)
+{
+    CriticalSectionScoped cs(_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(renderId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+
+    return incomingStream->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/video_render_impl.h b/trunk/src/modules/video_render/main/source/video_render_impl.h
new file mode 100644
index 0000000..16f934e
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/video_render_impl.h
@@ -0,0 +1,229 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+
+#include "engine_configurations.h"
+#include "video_render.h"
+#include "map_wrapper.h"
+
+//#include "video_render_defines.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class IncomingVideoStream;
+class IVideoRender;
+class MapWrapper;
+
+// Class definitions
+class ModuleVideoRenderImpl: public VideoRender
+{
+public:
+    /*
+     *   VideoRenderer constructor/destructor
+     */
+    ModuleVideoRenderImpl(const WebRtc_Word32 id,
+                          const VideoRenderType videoRenderType,
+                          void* window, const bool fullscreen);
+
+    virtual ~ModuleVideoRenderImpl();
+
+    /*
+     *   Change the unique identifier of this object
+     */
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    /*
+     *   Returns the render window
+     */
+    virtual void* Window();
+
+    /*
+     *   Change render window
+     */
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /*
+     *   Returns module id
+     */
+    WebRtc_Word32 Id();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+    /*
+     *   Delete incoming render stream
+     */
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     */
+    virtual WebRtc_Word32
+            AddExternalRenderCallback(const WebRtc_UWord32 streamId,
+                                      VideoRenderCallback* renderObject);
+
+    /*
+     *   Get the porperties for an incoming render stream
+     */
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+    /*
+     *   Incoming frame rate for the specified stream.
+     */
+    virtual WebRtc_UWord32 GetIncomingFrameRate(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool HasIncomingRenderStream(const WebRtc_UWord32 streamId) const;
+
+    /*
+     *
+     */
+    virtual WebRtc_Word32
+            RegisterRawFrameCallback(const WebRtc_UWord32 streamId,
+                                     VideoRenderCallback* callbackObj);
+
+    virtual WebRtc_Word32 GetLastRenderedFrame(const WebRtc_UWord32 streamId,
+                                               VideoFrame &frame) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual WebRtc_Word32 StartRender(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Stops the renderer
+     */
+    virtual WebRtc_Word32 StopRender(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Sets the renderer in start state, no streams removed.
+     */
+    virtual WebRtc_Word32 ResetRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the prefered render video type
+     */
+    virtual RawVideoType PreferredVideoType() const;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen();
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetStartImage(const WebRtc_UWord32 streamId,
+                                        const VideoFrame& videoFrame);
+
+    virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId,
+                                          const VideoFrame& videoFrame,
+                                          const WebRtc_UWord32 timeout);
+
+    virtual WebRtc_Word32 MirrorRenderStream(const int renderId,
+                                             const bool enable,
+                                             const bool mirrorXAxis,
+                                             const bool mirrorYAxis);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _moduleCrit;
+    void* _ptrWindow;
+    VideoRenderType _renderType;
+    bool _fullScreen;
+
+    IVideoRender* _ptrRenderer;
+    MapWrapper& _streamRenderMap;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
diff --git a/trunk/src/modules/video_render/main/source/windows/i_video_render_win.h b/trunk/src/modules/video_render/main/source/windows/i_video_render_win.h
new file mode 100644
index 0000000..a765134
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/i_video_render_win.h
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+
+#include "video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRenderWin
+{
+public:
+    /**************************************************************************
+     *
+     *   Constructor/destructor
+     *
+     ***************************************************************************/
+    virtual ~IVideoRenderWin()
+    {
+    };
+
+    virtual WebRtc_Word32 Init() = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left,
+                                            float& top,
+                                            float& right,
+                                            float& bottom) = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender() = 0;
+
+    virtual WebRtc_Word32 StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen() = 0;
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom) = 0;
+
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory) = 0;
+
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.cc b/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.cc
new file mode 100644
index 0000000..00bc41b
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.cc
@@ -0,0 +1,1192 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Own include file
+#include "video_render_direct3d9.h"
+
+// System include files
+#include <windows.h>
+
+// WebRtc include files
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+namespace webrtc {
+
+// A structure for our custom vertex type
+struct CUSTOMVERTEX
+{
+    FLOAT x, y, z;
+    DWORD color; // The vertex color
+    FLOAT u, v;
+};
+
+// Our custom FVF, which describes our custom vertex structure
+#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1)
+
+/*
+ *
+ *    D3D9Channel
+ *
+ */
+D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                                 CriticalSectionWrapper* critSect,
+                                 Trace* trace) :
+    _width(0),
+    _height(0),
+    _pd3dDevice(pd3DDevice),
+    _pTexture(NULL),
+    _bufferIsUpdated(false),
+    _critSect(critSect),
+    _streamId(0),
+    _zOrder(0),
+    _startWidth(0),
+    _startHeight(0),
+    _stopWidth(0),
+    _stopHeight(0)
+{
+
+}
+
+D3D9Channel::~D3D9Channel()
+{
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+}
+
+void D3D9Channel::SetStreamSettings(WebRtc_UWord16 streamId,
+                                        WebRtc_UWord32 zOrder,
+                                        float startWidth,
+                                        float startHeight,
+                                        float stopWidth,
+                                        float stopHeight)
+{
+    _streamId = streamId;
+    _zOrder = zOrder;
+    _startWidth = startWidth;
+    _startHeight = startHeight;
+    _stopWidth = stopWidth;
+    _stopHeight = stopHeight;
+}
+
+int D3D9Channel::GetStreamSettings(WebRtc_UWord16 streamId,
+                                       WebRtc_UWord32& zOrder,
+                                       float& startWidth,
+                                       float& startHeight,
+                                       float& stopWidth,
+                                       float& stopHeight)
+{
+    streamId = _streamId;
+    zOrder = _zOrder;
+    startWidth = _startWidth;
+    startHeight = _startHeight;
+    stopWidth = _stopWidth;
+    stopHeight = _stopHeight;
+    return 0;
+}
+
+int D3D9Channel::GetTextureWidth()
+{
+    return _width;
+}
+
+int D3D9Channel::GetTextureHeight()
+{
+    return _height;
+}
+
+// Called from video engine when a the frame size changed
+int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "FrameSizeChange, wifth: %d, height: %d, streams: %d", width,
+                 height, numberOfStreams);
+
+    CriticalSectionScoped cs(*_critSect);
+    _width = width;
+    _height = height;
+
+    //clean the previous texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret = E_POINTER;
+
+    if (_pd3dDevice)
+      ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                       D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId,
+                                           VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(*_critSect);
+    if (_width != videoFrame.Width() || _height != videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+// Called from video engine when a new frame should be rendered.
+int D3D9Channel::DeliverFrame(unsigned char* buffer,
+                                  int bufferSize,
+                                  unsigned int timeStamp90kHz)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "DeliverFrame to D3D9Channel");
+
+    CriticalSectionScoped cs(*_critSect);
+
+    //FIXME if _bufferIsUpdated is still true (not be renderred), do we what to update the texture?)
+    //probably not
+    if (_bufferIsUpdated)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                     "Last frame hasn't been rendered yet. Drop this frame.");
+        return -1;
+    }
+
+    if (!_pd3dDevice)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "D3D for rendering not initialized.");
+        return -1;
+    }
+
+    if (!_pTexture)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Texture for rendering not initialized.");
+        return -1;
+    }
+
+    D3DLOCKED_RECT lr;
+
+    if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock a texture in D3D9 Channel.");
+        return -1;
+    }
+    UCHAR* pRect = (UCHAR*) lr.pBits;
+
+    ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
+
+    if (FAILED(_pTexture->UnlockRect(0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to unlock a texture in D3D9 Channel.");
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+
+    return 0;
+}
+
+// Called by d3d channel owner to indicate the frame/texture has been rendered off
+int D3D9Channel::RenderOffFrame()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "Frame has been rendered to the screen.");
+    CriticalSectionScoped cs(*_critSect);
+    _bufferIsUpdated = false;
+    return 0;
+}
+
+// Called by d3d channel owner to check if the texture is updated
+int D3D9Channel::IsUpdated(bool& isUpdated)
+{
+    CriticalSectionScoped cs(*_critSect);
+    isUpdated = _bufferIsUpdated;
+    return 0;
+}
+
+// Called by d3d channel owner to get the texture
+LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture()
+{
+    CriticalSectionScoped cs(*_critSect);
+    return _pTexture;
+}
+
+int D3D9Channel::ReleaseTexture()
+{
+    CriticalSectionScoped cs(*_critSect);
+
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+    _pd3dDevice = NULL;
+    return 0;
+}
+
+int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice)
+{
+    CriticalSectionScoped cs(*_critSect);
+
+    _pd3dDevice = pd3DDevice;
+
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret;
+
+    ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                     D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    VideoRenderDirect3D9
+ *
+ */
+VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
+                                                   HWND hWnd,
+                                                   bool fullScreen) :
+    _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _trace(trace),
+    _hWnd(hWnd),
+    _fullScreen(fullScreen),
+    _pTextureLogo(NULL),
+    _pVB(NULL),
+    _pd3dDevice(NULL),
+    _pD3D(NULL),
+    _d3dChannels(),
+    _d3dZorder(),
+    _screenUpdateThread(NULL),
+    _screenUpdateEvent(NULL),
+    _logoLeft(0),
+    _logoTop(0),
+    _logoRight(0),
+    _logoBottom(0),
+    _pd3dSurface(NULL),
+    _totalMemory(-1),
+    _availableMemory(-1)
+{
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc,
+                                                      this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+    SetRect(&_originalHwndRect, 0, 0, 0, 0);
+}
+
+VideoRenderDirect3D9::~VideoRenderDirect3D9()
+{
+    //NOTE: we should not enter CriticalSection in here!
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+    delete _screenUpdateEvent;
+
+    //close d3d device
+    CloseDevice();
+
+    // Delete all channels
+    std::map<int, D3D9Channel*>::iterator it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        delete it->second;
+        it = _d3dChannels.erase(it);
+    }
+    // Clean the zOrder map
+    _d3dZorder.clear();
+
+    if (_fullScreen)
+    {
+        // restore hwnd to original size and position
+        ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
+                       _originalHwndRect.top, _originalHwndRect.right
+                               - _originalHwndRect.left,
+                       _originalHwndRect.bottom - _originalHwndRect.top,
+                       SWP_FRAMECHANGED);
+        ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+        ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+    }
+
+    delete &_refD3DCritsect;
+}
+
+DWORD VideoRenderDirect3D9::GetVertexProcessingCaps()
+{
+    D3DCAPS9 caps;
+    DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+    if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
+                                       &caps)))
+    {
+        if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+                == D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+        {
+            dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+        }
+    }
+    return dwVertexProcessing;
+}
+
+int VideoRenderDirect3D9::InitializeD3D(HWND hWnd,
+                                            D3DPRESENT_PARAMETERS* pd3dpp)
+{
+    // initialize Direct3D
+    if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+    {
+        return -1;
+    }
+
+    // determine what type of vertex processing to use based on the device capabilities
+    DWORD dwVertexProcessing = GetVertexProcessingCaps();
+
+    // get the display mode
+    D3DDISPLAYMODE d3ddm;
+    _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm);
+    pd3dpp->BackBufferFormat = d3ddm.Format;
+
+    // create the D3D device
+    if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
+                                   dwVertexProcessing | D3DCREATE_MULTITHREADED
+                                           | D3DCREATE_FPU_PRESERVE, pd3dpp,
+                                   &_pd3dDevice)))
+    {
+        //try the ref device
+        if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF,
+                                       hWnd, dwVertexProcessing
+                                               | D3DCREATE_MULTITHREADED
+                                               | D3DCREATE_FPU_PRESERVE,
+                                       pd3dpp, &_pd3dDevice)))
+        {
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::ResetDevice()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::ResetDevice");
+
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+    //release the channel texture
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->ReleaseTexture();
+        }
+        it++;
+    }
+
+    //close d3d device
+    if (CloseDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to CloseDevice");
+        return -1;
+    }
+
+    //reinit d3d device
+    if (InitDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to InitDevice");
+        return -1;
+    }
+
+    //recreate channel texture
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->RecreateTexture(_pd3dDevice);
+        }
+        it++;
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::InitDevice()
+{
+    // Set up the structure used to create the D3DDevice
+    ZeroMemory(&_d3dpp, sizeof(_d3dpp));
+    _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+    _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
+    if (GetWindowRect(_hWnd, &_originalHwndRect) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice Could not get window size");
+        return -1;
+    }
+    if (!_fullScreen)
+    {
+        _winWidth = _originalHwndRect.right - _originalHwndRect.left;
+        _winHeight = _originalHwndRect.bottom - _originalHwndRect.top;
+        _d3dpp.Windowed = TRUE;
+        _d3dpp.BackBufferHeight = 0;
+        _d3dpp.BackBufferWidth = 0;
+    }
+    else
+    {
+        _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN);
+        _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN);
+        _d3dpp.Windowed = FALSE;
+        _d3dpp.BackBufferWidth = _winWidth;
+        _d3dpp.BackBufferHeight = _winHeight;
+        _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+    }
+
+    if (InitializeD3D(_hWnd, &_d3dpp) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice failed in InitializeD3D");
+        return -1;
+    }
+
+    // Turn off culling, so we see the front and back of the triangle
+    _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
+
+    // Turn off D3D lighting, since we are providing our own vertex colors
+    _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
+
+    // Settings for alpha blending
+    _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
+    _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
+    _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
+    //_pd3dDevice->SetTextureStageState(0,D3DTSS_ALPHAOP,D3DTOP_SELECTARG1);
+    //_pd3dDevice->SetTextureStageState(0,D3DTSS_ALPHAARG1,D3DTA_TEXTURE);
+
+    // Initialize Vertices
+    CUSTOMVERTEX Vertices[] = {
+            //front
+            { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f,
+                    0xffffffff, 0, 0 },
+            { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f,
+                    0xffffffff, 1, 0 } };
+
+    // Create the vertex buffer. 
+    if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0,
+                                               D3DFVF_CUSTOMVERTEX,
+                                               D3DPOOL_DEFAULT, &_pVB, NULL )))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to create the vertex buffer.");
+        return -1;
+    }
+
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, Vertices, sizeof(Vertices));
+    _pVB->Unlock();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::Init");
+
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created");
+        return -1;
+    }
+    unsigned int threadId;
+    _screenUpdateThread->Start(threadId);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    DEVMODE dm;
+    // initialize the DEVMODE structure
+    ZeroMemory(&dm, sizeof(dm));
+    dm.dmSize = sizeof(dm);
+    if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm))
+    {
+        monitorFreq = dm.dmDisplayFrequency;
+    }
+    _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq);
+
+    return InitDevice();
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return -1;
+}
+
+int VideoRenderDirect3D9::UpdateRenderSurface()
+{
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+
+        D3D9Channel* channel = it->second;
+        channel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+    //nothing is updated, continue
+    if (!updated)
+        return -1;
+
+    // Clear the backbuffer to a black color
+    _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f,
+                       0);
+
+    // Begin the scene
+    if (SUCCEEDED(_pd3dDevice->BeginScene()))
+    {
+        _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX));
+        _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX);
+
+        D3DXMATRIX matWorld;
+        D3DXMATRIX matWorldTemp;
+
+        //draw all the channels
+        //get texture from the channels
+        LPDIRECT3DTEXTURE9 textureFromChannel = NULL;
+        DWORD textureWidth, textureHeight;
+
+        std::multimap<int, unsigned int>::reverse_iterator it;
+        it = _d3dZorder.rbegin();
+        while (it != _d3dZorder.rend())
+        {
+            // loop through all channels and streams in Z order
+            int channel = it->second & 0x0000ffff;
+
+            std::map<int, D3D9Channel*>::iterator ddIt;
+            ddIt = _d3dChannels.find(channel);
+            if (ddIt != _d3dChannels.end())
+            {
+                // found the channel
+                D3D9Channel* channelObj = ddIt->second;
+                if (channelObj)
+                {
+                    textureFromChannel = channelObj->GetTexture();
+                    textureWidth = channelObj->GetTextureWidth();
+                    textureHeight = channelObj->GetTextureHeight();
+
+                    WebRtc_UWord32 zOrder;
+                    float startWidth, startHeight, stopWidth, stopHeight;
+                    channelObj->GetStreamSettings(0, zOrder, startWidth,
+                                                  startHeight, stopWidth,
+                                                  stopHeight);
+
+                    //draw the video stream
+                    UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight,
+                                        stopWidth, stopHeight);
+                    _pd3dDevice->SetTexture(0, textureFromChannel);
+                    _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+
+                    //Notice channel that this frame as been rendered
+                    channelObj->RenderOffFrame();
+                }
+            }
+            it++;
+        }
+
+        //draw the logo
+        if (_pTextureLogo)
+        {
+            UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight,
+                                _logoBottom);
+            _pd3dDevice->SetTexture(0, _pTextureLogo);
+            _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+        }
+
+        // End the scene
+        _pd3dDevice->EndScene();
+    }
+
+    // Present the backbuffer contents to the display
+    _pd3dDevice->Present(NULL, NULL, NULL, NULL );
+
+    return 0;
+}
+
+//set the  alpha value of the pixal with a particular colorkey as 0
+int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                                                  DDCOLORKEY* transparentColorKey,
+                                                  DWORD width,
+                                                  DWORD height)
+{
+    D3DLOCKED_RECT lr;
+    if (!pTexture)
+        return -1;
+
+    CriticalSectionScoped cs(_refD3DCritsect);
+    if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD)))
+    {
+        for (DWORD y = 0; y < height; y++)
+        {
+            DWORD dwOffset = y * width;
+
+            for (DWORD x = 0; x < width; x)
+            {
+                DWORD a = (DWORD) 0;
+
+                DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x];
+                if ((temp & 0x00FFFFFF)
+                        == transparentColorKey->dwColorSpaceLowValue)
+                {
+                    temp &= 0x00FFFFFF;
+                }
+                else
+                {
+                    temp |= 0xFF000000;
+                }
+                ((DWORD*) lr.pBits)[dwOffset + x] = temp;
+                x++;
+            }
+        }
+        pTexture->UnlockRect(0);
+        return 0;
+    }
+    return -1;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderDirect3D9*> (obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderDirect3D9::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    if (!_screenUpdateThread)
+    {
+        //stop the thread
+        return false;
+    }
+    if (!_pd3dDevice)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "d3dDevice not created.");
+        return true;
+    }
+
+    HRESULT hr = _pd3dDevice->TestCooperativeLevel();
+
+    if (SUCCEEDED(hr))
+    {
+        UpdateRenderSurface();
+    }
+
+    if (hr == D3DERR_DEVICELOST)
+    {
+        //Device is lost and cannot be reset yet
+
+    }
+    else if (hr == D3DERR_DEVICENOTRESET)
+    {
+        //Lost but we can reset it now
+        //Note: the standard way is to call Reset, however for some reason doesn't work here.
+        //so we will release the device and create it again.
+        ResetDevice();
+    }
+
+    return true;
+}
+
+int VideoRenderDirect3D9::CloseDevice()
+{
+    CriticalSectionScoped cs(_refD3DCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::CloseDevice");
+
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+
+    if (_pVB != NULL)
+    {
+        _pVB->Release();
+        _pVB = NULL;
+    }
+
+    if (_pd3dDevice != NULL)
+    {
+        _pd3dDevice->Release();
+        _pd3dDevice = NULL;
+    }
+
+    if (_pD3D != NULL)
+    {
+        _pD3D->Release();
+        _pD3D = NULL;
+    }
+
+    if (_pd3dSurface != NULL)
+        _pd3dSurface->Release();
+    return 0;
+}
+
+D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return NULL;
+    }
+    return ddobj;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::DeleteChannel(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+
+    std::multimap<int, unsigned int>::iterator it;
+    it = _d3dZorder.begin();
+    while (it != _d3dZorder.end())
+    {
+        if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            it = _d3dZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(streamId & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        delete ddIt->second;
+        _d3dChannels.erase(ddIt);        
+        return 0;
+    }
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const WebRtc_UWord32 channel,
+                                                                 const WebRtc_UWord32 zOrder,
+                                                                 const float left,
+                                                                 const float top,
+                                                                 const float right,
+                                                                 const float bottom)
+{
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+    //FIXME this should be done in VideoAPIWindows? stop the frame deliver first
+    //remove the old channel	
+    DeleteChannel(channel);
+
+    D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice,
+                                                      &_refD3DCritsect, _trace);
+    d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    // store channel
+    _d3dChannels[channel & 0x0000ffff] = d3dChannel;
+
+    // store Z order
+    // default streamID is 0
+    _d3dZorder.insert(
+                      std::pair<int, unsigned int>(zOrder, channel & 0x0000ffff));
+
+    return d3dChannel;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::GetStreamSettings(const WebRtc_UWord32 channel,
+                                                          const WebRtc_UWord16 streamId,
+                                                          WebRtc_UWord32& zOrder,
+                                                          float& left,
+                                                          float& top,
+                                                          float& right,
+                                                          float& bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is 
+    return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom);
+    //return ddobj->GetStreamSettings(streamId, zOrder, left, top, right, bottom);    
+}
+
+int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB,
+                                                  int offset,
+                                                  float startWidth,
+                                                  float startHeight,
+                                                  float stopWidth,
+                                                  float stopHeight)
+{
+    if (pVB == NULL)
+        return -1;
+
+    float left, right, top, bottom;
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    left = startWidth * 2 - 1;
+    right = stopWidth * 2 - 1;
+
+    //0,1 => 1,-1
+    top = 1 - startHeight * 2;
+    bottom = 1 - stopHeight * 2;
+
+    CUSTOMVERTEX newVertices[] = {
+            //logo
+            { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f,
+                    0xffffffff, 0, 0 },
+            { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f,
+                    0xffffffff, 1, 0 }, };
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices),
+                         (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, newVertices, sizeof(newVertices));
+    pVB->Unlock();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::StartRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::StopRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+bool VideoRenderDirect3D9::IsFullScreen()
+{
+    return _fullScreen;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetCropping(const WebRtc_UWord32 channel,
+                                                    const WebRtc_UWord16 streamId,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetTransparentBackground(
+                                                                 const bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetText(const WebRtc_UWord8 textId,
+                                                const WebRtc_UWord8* text,
+                                                const WebRtc_Word32 textLength,
+                                                const WebRtc_UWord32 colorText,
+                                                const WebRtc_UWord32 colorBg,
+                                                const float left,
+                                                const float top,
+                                                const float rigth,
+                                                const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetBitmap(const void* bitMap,
+                                                  const WebRtc_UWord8 pictureId,
+                                                  const void* colorKey,
+                                                  const float left,
+                                                  const float top,
+                                                  const float right,
+                                                  const float bottom)
+{
+    if (!bitMap)
+    {
+        if (_pTextureLogo != NULL)
+        {
+            _pTextureLogo->Release();
+            _pTextureLogo = NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap.");
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f ||
+        top > 1.0f || top < 0.0f ||
+        right > 1.0f || right < 0.0f ||
+        bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    if ((bottom <= top) || (right <= left))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_refD3DCritsect);
+
+    unsigned char* srcPtr;
+    HGDIOBJ oldhand;
+    BITMAPINFO pbi;
+    BITMAP bmap;
+    HDC hdcNew;
+    hdcNew = CreateCompatibleDC(0);
+    // Fill out the BITMAP structure.
+    GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap);
+    //Select the bitmap handle into the new device context.
+    oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap);
+    // we are done with this object
+    DeleteObject(oldhand);
+    pbi.bmiHeader.biSize = 40;
+    pbi.bmiHeader.biWidth = bmap.bmWidth;
+    pbi.bmiHeader.biHeight = bmap.bmHeight;
+    pbi.bmiHeader.biPlanes = 1;
+    pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
+    pbi.bmiHeader.biCompression = BI_RGB;
+    pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
+    srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4];
+    // the original un-stretched image in RGB24
+    int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi,
+                                DIB_RGB_COLORS);
+    if (pixelHeight == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to GetDIBits in SetBitmap");
+        return -1;
+    }
+    DeleteDC(hdcNew);
+    if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to SetBitmap invalid bit depth");
+        return -1;
+    }
+
+    HRESULT ret;
+    //release the previous logo texture
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+    ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0,
+                                     D3DFMT_A8R8G8B8, D3DPOOL_MANAGED,
+                                     &_pTextureLogo, NULL);
+    if (FAILED(ret))
+    {
+        _pTextureLogo = NULL;
+        return -1;
+    }
+    if (!_pTextureLogo)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Texture for rendering not initialized.");
+        return -1;
+    }
+
+    D3DLOCKED_RECT lr;
+    if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0)))
+    {
+        return -1;
+    }
+    unsigned char* dstPtr = (UCHAR*) lr.pBits;
+    int pitch = bmap.bmWidth * 4;
+
+    if (pbi.bmiHeader.biBitCount == 24)
+    {       
+        ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0);
+    }
+    else
+    {
+        unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
+        for (int i = 0; i < bmap.bmHeight; ++i)
+        {
+            memcpy(dstPtr, srcTmp, bmap.bmWidth * 4);
+            srcTmp -= bmap.bmWidth * 4;
+            dstPtr += pitch;
+        }
+    }
+
+    delete srcPtr;
+    if (FAILED(_pTextureLogo->UnlockRect(0)))
+    {
+        return -1;
+    }
+
+    if (colorKey)
+    {
+        DDCOLORKEY* ddColorKey =
+                static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
+        SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth,
+                            bmap.bmHeight);
+    }
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    _logoLeft = left;
+    _logoRight = right;
+
+    //0,1 => 1,-1
+    _logoTop = top;
+    _logoBottom = bottom;
+
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                                          WebRtc_UWord64& availableMemory)
+{
+    if (_totalMemory == -1 || _availableMemory == -1)
+    {
+        totalMemory = 0;
+        availableMemory = 0;
+        return -1;
+    }
+    totalMemory = _totalMemory;
+    availableMemory = _availableMemory;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::ConfigureRenderer(const WebRtc_UWord32 channel,
+                                                          const WebRtc_UWord16 streamId,
+                                                          const unsigned int zOrder,
+                                                          const float left,
+                                                          const float top,
+                                                          const float right,
+                                                          const float bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is 
+    ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    return 0;
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.h b/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.h
new file mode 100644
index 0000000..6d6fef3
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_direct3d9.h
@@ -0,0 +1,267 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+
+// WebRtc includes
+#include "i_video_render_win.h"
+
+#include <d3d9.h>
+#include <d3dx9.h>
+#include "ddraw.h"
+
+#include <Map>
+
+// Added
+#include "video_render_defines.h"
+
+#pragma comment(lib, "d3d9.lib")       // located in DirectX SDK
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class Trace;
+class ThreadWrapper;
+
+class D3D9Channel: public VideoRenderCallback
+{
+public:
+    D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                    CriticalSectionWrapper* critSect, Trace* trace);
+
+    virtual ~D3D9Channel();
+
+    // Inherited from VideoRencerCallback, called from VideoAPI class.
+    // Called when the incomming frame size and/or number of streams in mix changes
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    // A new frame is delivered
+    virtual int DeliverFrame(unsigned char* buffer,
+                             int bufferSize,
+                             unsigned int timeStame90kHz);
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    // Called to check if the video frame is updated.
+    int IsUpdated(bool& isUpdated);
+    // Called after the video frame has been render to the screen
+    int RenderOffFrame();
+    // Called to get the texture that contains the video frame
+    LPDIRECT3DTEXTURE9 GetTexture();
+    // Called to get the texture(video frame) size
+    int GetTextureWidth();
+    int GetTextureHeight();
+    //
+    void SetStreamSettings(WebRtc_UWord16 streamId,
+                           WebRtc_UWord32 zOrder,
+                           float startWidth,
+                           float startHeight,
+                           float stopWidth,
+                           float stopHeight);
+    int GetStreamSettings(WebRtc_UWord16 streamId,
+                          WebRtc_UWord32& zOrder,
+                          float& startWidth,
+                          float& startHeight,
+                          float& stopWidth,
+                          float& stopHeight);
+
+    int ReleaseTexture();
+    int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
+
+protected:
+
+private:
+    //critical section passed from the owner
+    CriticalSectionWrapper* _critSect;
+    LPDIRECT3DDEVICE9 _pd3dDevice;
+    LPDIRECT3DTEXTURE9 _pTexture;
+
+    bool _bufferIsUpdated;
+    // the frame size
+    int _width;
+    int _height;
+    //sream settings
+    //TODO support multiple streams in one channel
+    WebRtc_UWord16 _streamId;
+    WebRtc_UWord32 _zOrder;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+};
+
+class VideoRenderDirect3D9: IVideoRenderWin
+{
+public:
+    VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
+    ~VideoRenderDirect3D9();
+
+public:
+    //IVideoRenderWin
+
+    /**************************************************************************
+     *
+     *   Init
+     *
+     ***************************************************************************/
+    virtual WebRtc_Word32 Init();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom);
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left,
+                                            float& top,
+                                            float& right,
+                                            float& bottom);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen();
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom);
+
+public:
+    // Get a channel by channel id
+    D3D9Channel* GetD3DChannel(int channel);
+    int UpdateRenderSurface();
+
+protected:
+    // The thread rendering the screen
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+
+private:
+    // Init/close the d3d device
+    int InitDevice();
+    int CloseDevice();
+
+    // Transparent related functions
+    int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                            DDCOLORKEY* transparentColorKey,
+                            DWORD width,
+                            DWORD height);
+
+    CriticalSectionWrapper& _refD3DCritsect;
+    Trace* _trace;
+    ThreadWrapper* _screenUpdateThread;
+    EventWrapper* _screenUpdateEvent;
+
+    HWND _hWnd;
+    bool _fullScreen;
+    RECT _originalHwndRect;
+    //FIXME we probably don't need this since all the information can be get from _d3dChannels
+    int _channel;
+    //Window size
+    UINT _winWidth;
+    UINT _winHeight;
+
+    // Device
+    LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
+    LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
+    LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
+    LPDIRECT3DTEXTURE9 _pTextureLogo;
+
+    std::map<int, D3D9Channel*> _d3dChannels;
+    std::multimap<int, unsigned int> _d3dZorder;
+
+    // The position where the logo will be placed
+    float _logoLeft;
+    float _logoTop;
+    float _logoRight;
+    float _logoBottom;
+
+    typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
+    LPDIRECT3DSURFACE9 _pd3dSurface;
+
+    DWORD GetVertexProcessingCaps();
+    int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
+
+    D3DPRESENT_PARAMETERS _d3dpp;
+    int ResetDevice();
+
+    int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
+                            float startWidth, float startHeight,
+                            float stopWidth, float stopHeight);
+
+    //code for providing graphics settings
+    DWORD _totalMemory;
+    DWORD _availableMemory;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.cc b/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.cc
new file mode 100644
index 0000000..034f710
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.cc
@@ -0,0 +1,4012 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_directdraw.h"
+#include "video_render_windows_impl.h"
+#include "Windows.h"
+#include <ddraw.h>
+#include <assert.h>
+#include <initguid.h>
+#include <MMSystem.h> // timeGetTime
+DEFINE_GUID( IID_IDirectDraw7,0x15e65ec0,0x3b9c,0x11d2,0xb9,0x2f,0x00,0x60,0x97,0x97,0xea,0x5b );
+
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "critical_section_wrapper.h"
+//#include "VideoErrors.h"
+
+// Added
+#include "module_common_types.h"
+
+#pragma warning(disable: 4355) // 'this' : used in base member initializer list
+// picture in picture do we need overlay? answer no we can blit directly
+// conference is easy since we can blt the quadrants seperatly
+
+// To determine if the driver supports DMA, retrieve the driver capabilities by calling the IDirectDraw::GetCaps method, 
+// then look for DDBLTCAPS_READSYSMEM and/or DDBLTCAPS_WRITESYSMEM. If either of these flags is set, the device supports DMA.
+// Blt with SRCCOPY should do this can we use it?
+// investigate DDLOCK_NOSYSLOCK 
+
+namespace webrtc {
+
+#define EXTRACT_BITS_RL(the_val, bits_start, bits_len) ((the_val >> (bits_start - 1)) & ((1 << bits_len) - 1)) 
+
+WindowsThreadCpuUsage::WindowsThreadCpuUsage() :
+    _lastGetCpuUsageTime(0),
+    _lastCpuUsageTime(0),
+    _hThread(::GetCurrentThread()),
+    _cores(0),
+    _lastCpuUsage(0)
+{
+
+    DWORD_PTR pmask, smask;
+    DWORD access = PROCESS_QUERY_INFORMATION;
+    if (GetProcessAffinityMask(
+                               OpenProcess(access, false, GetCurrentProcessId()),
+                               &pmask, &smask) != 0)
+    {
+
+        for (int i = 1; i < 33; i++)
+        {
+            if (EXTRACT_BITS_RL(pmask,i,1) == 0)
+            {
+                break;
+            }
+            _cores++;
+        }
+        //sanity
+        if (_cores > 32)
+        {
+            _cores = 32;
+        }
+        if (_cores < 1)
+        {
+            _cores = 1;
+        }
+    }
+    else
+    {
+        _cores = 1;
+    }
+    GetCpuUsage();
+}
+
+//in % since last call
+int WindowsThreadCpuUsage::GetCpuUsage()
+{
+    DWORD now = timeGetTime();
+
+    _int64 newTime = 0;
+    FILETIME creationTime;
+    FILETIME exitTime;
+    _int64 kernelTime = 0;
+    _int64 userTime = 0;
+    if (GetThreadTimes(_hThread, (FILETIME*) &creationTime, &exitTime,
+                       (FILETIME*) &kernelTime, (FILETIME*) &userTime) != 0)
+    {
+        newTime = (kernelTime + userTime);
+    }
+    if (newTime == 0)
+    {
+        _lastGetCpuUsageTime = now;
+        return _lastCpuUsage;
+    }
+
+    // calculate the time difference since last call
+    const DWORD diffTime = (now - _lastGetCpuUsageTime);
+    _lastGetCpuUsageTime = now;
+
+    if (newTime < _lastCpuUsageTime)
+    {
+        _lastCpuUsageTime = newTime;
+        return _lastCpuUsage;
+    }
+    const int cpuDiff = (int) (newTime - _lastCpuUsageTime) / 10000;
+    _lastCpuUsageTime = newTime;
+
+    // calculate the CPU usage
+
+    _lastCpuUsage = (int) (float((cpuDiff * 100)) / (diffTime * _cores) + 0.5f);
+
+    if (_lastCpuUsage > 100)
+    {
+        _lastCpuUsage = 100;
+    }
+    return _lastCpuUsage;
+
+}
+
+DirectDrawStreamSettings::DirectDrawStreamSettings() :
+    _startWidth(0.0F),
+    _stopWidth(1.0F),
+    _startHeight(0.0F),
+    _stopHeight(1.0F),
+    _cropStartWidth(0.0F),
+    _cropStopWidth(1.0F),
+    _cropStartHeight(0.0F),
+    _cropStopHeight(1.0F)
+{
+}
+;
+
+DirectDrawBitmapSettings::DirectDrawBitmapSettings() :
+    _transparentBitMap(NULL),
+    _transparentBitmapLeft(0.0f),
+    _transparentBitmapRight(1.0f),
+    _transparentBitmapTop(0.0f),
+    _transparentBitmapBottom(1.0f),
+    _transparentBitmapWidth(0),
+    _transparentBitmapHeight(0),
+    _transparentBitmapColorKey(NULL),
+    _transparentBitmapSurface(NULL)
+{
+}
+;
+
+DirectDrawBitmapSettings::~DirectDrawBitmapSettings()
+{
+    if (_transparentBitmapColorKey)
+    {
+        delete _transparentBitmapColorKey;
+    }
+    if (_transparentBitmapSurface)
+    {
+        _transparentBitmapSurface->Release();
+    }
+    _transparentBitmapColorKey = NULL;
+    _transparentBitmapSurface = NULL;
+}
+;
+
+int DirectDrawBitmapSettings::SetBitmap(Trace* _trace,
+                                            DirectDraw* directDraw)
+{
+    VideoFrame tempVideoBuffer;
+    HGDIOBJ oldhand;
+    BITMAPINFO pbi;
+    BITMAP bmap;
+    HDC hdcNew;
+
+    hdcNew = CreateCompatibleDC(0);
+
+    // Fill out the BITMAP structure.
+    GetObject(_transparentBitMap, sizeof(bmap), &bmap);
+
+    //Select the bitmap handle into the new device context.
+    oldhand = SelectObject(hdcNew, (HGDIOBJ) _transparentBitMap);
+
+    // we are done with this object
+    DeleteObject(oldhand);
+
+    pbi.bmiHeader.biSize = 40;
+    pbi.bmiHeader.biWidth = bmap.bmWidth;
+    pbi.bmiHeader.biHeight = bmap.bmHeight;
+    pbi.bmiHeader.biPlanes = 1;
+    pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
+    pbi.bmiHeader.biCompression = BI_RGB;
+    pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
+
+    tempVideoBuffer.VerifyAndAllocate(bmap.bmWidth * bmap.bmHeight * 4);
+
+    // the original un-stretched image in RGB24
+    // todo is there another struct for pbi purify reports read of 24 bytes larger than size
+    int pixelHeight = GetDIBits(hdcNew, _transparentBitMap, 0, bmap.bmHeight,
+                                tempVideoBuffer.Buffer(), &pbi, DIB_RGB_COLORS);
+    if (pixelHeight == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to GetDIBits in SetBitmap.");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    DeleteDC(hdcNew);
+
+    if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to SetBitmap invalid bit depth");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = bmap.bmHeight;
+    ddsd.dwWidth = bmap.bmWidth;
+
+    ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+    ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+
+    _transparentBitmapWidth = bmap.bmWidth;
+    _transparentBitmapHeight = bmap.bmHeight;
+
+    ddsd.ddpfPixelFormat.dwRGBBitCount = 32;
+    ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+    ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+    ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+    ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+
+    if (_transparentBitmapSurface)
+    {
+        _transparentBitmapSurface->Release();
+        _transparentBitmapSurface = NULL;
+    }
+
+    HRESULT ddrval =
+            directDraw->CreateSurface(&ddsd, &_transparentBitmapSurface, NULL);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw failed to CreateSurface _transparentBitmapSurface: 0x%x",
+                     ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    memset(&ddsd, 0, sizeof(DDSURFACEDESC));
+    ddsd.dwSize = sizeof(DDSURFACEDESC);
+    ddrval = _transparentBitmapSurface->Lock(NULL, &ddsd, DDLOCK_WAIT, NULL);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        ddrval = _transparentBitmapSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to restore lost _transparentBitmapSurface");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored lost _transparentBitmapSurface");
+
+        ddrval
+                = _transparentBitmapSurface->Lock(NULL, &ddsd, DDLOCK_WAIT,
+                                                  NULL);
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(
+                         kTraceInfo,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw lock error 0x%x _transparentBitmapSurface",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    unsigned char* dstPtr = (unsigned char*) ddsd.lpSurface;
+    unsigned char* srcPtr = (unsigned char*) tempVideoBuffer.Buffer();
+
+    int pitch = bmap.bmWidth * 4;
+    if (ddsd.dwFlags & DDSD_PITCH)
+    {
+        pitch = ddsd.lPitch;
+    }
+
+    if (pbi.bmiHeader.biBitCount == 24)
+    {
+        ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight,
+                                   0);
+    }
+    else
+    {
+        srcPtr += (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
+
+        for (int i = 0; i < bmap.bmHeight; ++i)
+        {
+            memcpy(dstPtr, srcPtr, bmap.bmWidth * 4);
+            srcPtr -= bmap.bmWidth * 4;
+            dstPtr += pitch;
+        }
+    }
+
+    _transparentBitmapSurface->Unlock(NULL);
+    return 0;
+}
+/**
+ *
+ *   DirectDrawTextSettings
+ *
+ */
+DirectDrawTextSettings::DirectDrawTextSettings() :
+    _ptrText(NULL),
+    _textLength(0),
+    _colorRefText(RGB(255, 255, 255)), // white
+    _colorRefBackground(RGB(0, 0, 0)), // black
+    _textLeft(0.0f),
+    _textRight(0.0f),
+    _textTop(0.0f),
+    _textBottom(0.0f),
+    _transparent(true)
+{
+}
+
+DirectDrawTextSettings::~DirectDrawTextSettings()
+{
+    if (_ptrText)
+    {
+        delete[] _ptrText;
+    }
+}
+
+int DirectDrawTextSettings::SetText(const char* text, int textLength,
+                                        COLORREF colorText, COLORREF colorBg,
+                                        float left, float top, float right,
+                                        float bottom)
+{
+    if (_ptrText)
+    {
+        delete[] _ptrText;
+    }
+    _ptrText = new char[textLength];
+    memcpy(_ptrText, text, textLength);
+    _textLength = textLength;
+    _colorRefText = colorText;
+    _colorRefBackground = colorBg;
+    //_transparent = transparent;
+    _textLeft = left;
+    _textRight = right;
+    _textTop = top;
+    _textBottom = bottom;
+    return 0;
+}
+
+/**
+ *
+ *	DirectDrawChannel
+ *
+ *
+ */
+
+// this need to have a refcount dueto multiple HWNDS demux
+DirectDrawChannel::DirectDrawChannel(DirectDraw* directDraw,
+                                             VideoType blitVideoType,
+                                             VideoType incomingVideoType,
+                                             VideoType screenVideoType,
+                                             VideoRenderDirectDraw* owner) :
+
+    _critSect(CriticalSectionWrapper::CreateCriticalSection()), _refCount(1),
+            _width(0), _height(0), _numberOfStreams(0), _doubleBuffer(false),
+            _directDraw(directDraw), _offScreenSurface(NULL),
+            _offScreenSurfaceNext(NULL), _incomingVideoType(incomingVideoType),
+            _blitVideoType(blitVideoType),
+            _originalBlitVideoType(blitVideoType),
+            _screenVideoType(screenVideoType), _deliverInScreenType(false),
+            _owner(owner)
+{
+    _directDraw->AddRef();
+}
+
+DirectDrawChannel::~DirectDrawChannel()
+{
+    if (_directDraw)
+    {
+        _directDraw->Release();
+    }
+    if (_offScreenSurface)
+    {
+        _offScreenSurface->Release();
+    }
+    if (_offScreenSurfaceNext)
+    {
+        _offScreenSurfaceNext->Release();
+    }
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.begin();
+    while (it != _streamIdToSettings.end())
+    {
+        DirectDrawStreamSettings* streamSettings = it->second;
+        if (streamSettings)
+        {
+            delete streamSettings;
+        }
+        it = _streamIdToSettings.erase(it);
+    }
+    delete _critSect;
+}
+
+void DirectDrawChannel::AddRef()
+{
+    CriticalSectionScoped cs(*_critSect);
+    _refCount++;
+}
+
+void DirectDrawChannel::Release()
+{
+    bool deleteObj = false;
+    _critSect->Enter();
+    _refCount--;
+    if (_refCount == 0)
+    {
+        deleteObj = true;
+    }
+    _critSect->Leave();
+
+    if (deleteObj)
+    {
+        delete this;
+    }
+}
+
+void DirectDrawChannel::SetStreamSettings(VideoRenderDirectDraw* DDobj,
+                                              short streamId, float startWidth,
+                                              float startHeight,
+                                              float stopWidth, float stopHeight)
+{
+    // we can save 5 bits due to 16 byte alignment of the pointer
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDobj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    CriticalSectionScoped cs(*_critSect);
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        streamSettings = new DirectDrawStreamSettings();
+        _streamIdToSettings[lookupID] = streamSettings;
+    }
+    else
+    {
+        streamSettings = it->second;
+    }
+
+    streamSettings->_startHeight = startHeight;
+    streamSettings->_startWidth = startWidth;
+    streamSettings->_stopWidth = stopWidth;
+    streamSettings->_stopHeight = stopHeight;
+
+    _offScreenSurfaceUpdated = false;
+}
+
+void DirectDrawChannel::SetStreamCropSettings(VideoRenderDirectDraw* DDObj,
+                                                  short streamId,
+                                                  float startWidth,
+                                                  float startHeight,
+                                                  float stopWidth,
+                                                  float stopHeight)
+{
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDObj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    CriticalSectionScoped cs(*_critSect);
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        streamSettings = new DirectDrawStreamSettings();
+        _streamIdToSettings[streamId] = streamSettings;
+    }
+    else
+    {
+        streamSettings = it->second;
+    }
+    streamSettings->_cropStartWidth = startWidth;
+    streamSettings->_cropStopWidth = stopWidth;
+    streamSettings->_cropStartHeight = startHeight;
+    streamSettings->_cropStopHeight = stopHeight;
+}
+
+int DirectDrawChannel::GetStreamSettings(VideoRenderDirectDraw* DDObj,
+                                             short streamId, float& startWidth,
+                                             float& startHeight,
+                                             float& stopWidth,
+                                             float& stopHeight)
+{
+    CriticalSectionScoped cs(*_critSect);
+
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDObj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        // Didn't find this stream...
+        return -1;
+    }
+    streamSettings = it->second;
+    startWidth = streamSettings->_startWidth;
+    startHeight = streamSettings->_startHeight;
+    stopWidth = streamSettings->_stopWidth;
+    stopHeight = streamSettings->_stopHeight;
+
+    return 0;
+}
+
+bool DirectDrawChannel::IsOffScreenSurfaceUpdated(VideoRenderDirectDraw* DDobj)
+{
+    CriticalSectionScoped cs(*_critSect);
+    return _offScreenSurfaceUpdated;
+}
+
+void DirectDrawChannel::GetLargestSize(RECT* mixingRect)
+{
+    CriticalSectionScoped cs(*_critSect);
+    if (mixingRect)
+    {
+        if (mixingRect->bottom < _height)
+        {
+            mixingRect->bottom = _height;
+        }
+        if (mixingRect->right < _width)
+        {
+            mixingRect->right = _width;
+        }
+    }
+}
+
+int DirectDrawChannel::ChangeDeliverColorFormat(bool useScreenType)
+{
+    _deliverInScreenType = useScreenType;
+    return FrameSizeChange(0, 0, 0);
+}
+
+WebRtc_Word32 DirectDrawChannel::RenderFrame(const WebRtc_UWord32 streamId,
+                                                 VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(*_critSect);
+    if (_width != videoFrame.Width() || _height != videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+int DirectDrawChannel::FrameSizeChange(int width, int height,
+                                           int numberOfStreams)
+{
+    CriticalSectionScoped cs(*_critSect);
+
+    if (_directDraw == NULL)
+    {
+        return -1; // signal that we are not ready for the change
+    }
+    if (_width == width && _height == height && _offScreenSurface
+            && _offScreenSurfaceNext)
+    {
+        _numberOfStreams = numberOfStreams;
+        return 0;
+    }
+    if (_offScreenSurface)
+    {
+        _offScreenSurface->Release();
+        _offScreenSurface = NULL;
+    }
+    if (_offScreenSurfaceNext)
+    {
+        _offScreenSurfaceNext->Release();
+        _offScreenSurfaceNext = NULL;
+    }
+    if (width && height)
+    {
+        _width = width;
+        _height = height;
+        _numberOfStreams = numberOfStreams;
+    }
+
+    // create this channels offscreen buffer
+    DirectDrawSurfaceDesc ddsd;
+    HRESULT ddrval = DD_OK;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = _height;
+    ddsd.dwWidth = _width;
+    /*
+     char logStr[256];
+     _snprintf(logStr,256, "offscreen H:%d W:%d \n",_height, _width);
+     OutputDebugString(logStr);
+     */
+    //Fix for bad video driver on HP Mini. If it takes to long time to deliver a frame - try to blit using the same pixel format as used by the screen.
+    if (_deliverInScreenType && _screenVideoType != kUnknown)
+    {
+        //The HP mini netbook, which this fix for, uses the VIA processor.
+        //The measuring shows that this fix will impact systems with Intel processor, including Atom.
+        //So let's disable it here. If we really need this for VIA processor, we should have additional logic to detect
+        //the processor model.
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDrawChannel changing to screen video type");
+        //_blitVideoType=_screenVideoType;
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceInfo,
+                     kTraceVideo,
+                     -1,
+                     "DirectDrawChannel changing to originial blit video type %d",
+                     _originalBlitVideoType);
+        _blitVideoType = _originalBlitVideoType;
+    }
+
+    WEBRTC_TRACE(
+                 kTraceInfo,
+                 kTraceVideo,
+                 -1,
+                 "DirectDrawChannel::FrameSizeChange height %d, width %d, _blitVideoType %d",
+                 ddsd.dwHeight, ddsd.dwWidth, _blitVideoType);
+    switch (_blitVideoType)
+    {
+        case kYV12:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('Y', 'V', '1', '2');
+        }
+            break;
+        case kYUY2:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('Y', 'U', 'Y', '2');
+        }
+            break;
+        case kUYVY:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('U', 'Y', 'V', 'Y');
+        }
+            break;
+        case kIYUV:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('I', 'Y', 'U', 'V');
+        }
+            break;
+        case kARGB:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 32;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kRGB24:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 24;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kRGB565:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x0000F800;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x000007e0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x0000001F;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kARGB4444:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x00000f00;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x000000f0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x0000000f;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+            break;
+        }
+        case kARGB1555:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x00007C00;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x3E0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x1F;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+            break;
+        }
+        case kI420:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('I', '4', '2', '0');
+        }
+            break;
+        default:
+            ddrval = S_FALSE;
+    }
+
+    if (ddrval == DD_OK)
+    {
+        if (!_owner->IsPrimaryOrMixingSurfaceOnSystem())
+        {
+            ddrval
+                    = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                 NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceInfo,
+                             kTraceVideo,
+                             -1,
+                             "CreateSurface failed for _offScreenSurface on VideoMemory, trying on System Memory");
+
+                memset(&ddsd, 0, sizeof(ddsd));
+                ddsd.dwSize = sizeof(ddsd);
+                ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+                ddsd.dwHeight = _height;
+                ddsd.dwWidth = _width;
+
+                ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+                _blitVideoType = kARGB;
+
+                ddrval = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                    NULL);
+                if (FAILED(ddrval))
+                {
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceVideo,
+                                 -1,
+                                 "DirectDraw failed to CreateSurface _offScreenSurface using SystemMemory: 0x%x",
+                                 ddrval);
+                }
+                ddrval = _directDraw->CreateSurface(&ddsd,
+                                                    &_offScreenSurfaceNext,
+                                                    NULL);
+                if (FAILED(ddrval))
+                {
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceVideo,
+                                 -1,
+                                 "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                                 ddrval);
+                }
+            }
+            else
+            {
+                ddrval = _directDraw->CreateSurface(&ddsd,
+                                                    &_offScreenSurfaceNext,
+                                                    NULL);
+                if (ddrval == DDERR_OUTOFVIDEOMEMORY)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "CreateSurface failed for _offScreenSurfaceNext on VideoMemory, trying on System Memory");
+
+                    memset(&ddsd, 0, sizeof(ddsd));
+                    ddsd.dwSize = sizeof(ddsd);
+                    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+                    ddsd.dwHeight = _height;
+                    ddsd.dwWidth = _width;
+
+                    ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+                    _blitVideoType = kARGB;
+
+                    ddrval = _directDraw->CreateSurface(&ddsd,
+                                                        &_offScreenSurfaceNext,
+                                                        NULL);
+                    if (FAILED(ddrval))
+                    {
+                        WEBRTC_TRACE(
+                                     kTraceError,
+                                     kTraceVideo,
+                                     -1,
+                                     "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                                     ddrval);
+                    }
+                }
+            }
+        }
+        else
+        {
+            memset(&ddsd, 0, sizeof(ddsd));
+            ddsd.dwSize = sizeof(ddsd);
+            ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+            ddsd.dwHeight = _height;
+            ddsd.dwWidth = _width;
+
+            ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+            if (_owner->CanBltFourCC())
+            {
+                _blitVideoType = kARGB;
+            }
+            else
+            {
+                _blitVideoType = _originalBlitVideoType;
+            }
+
+            ddrval
+                    = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                 NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _offScreenSurface using SystemMemory: 0x%x",
+                             ddrval);
+            }
+
+            ddrval = _directDraw->CreateSurface(&ddsd, &_offScreenSurfaceNext,
+                                                NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                             ddrval);
+            }
+        }
+    }
+
+    if (FAILED(ddrval))
+    {
+        // failed to change size
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to CreateSurface : 0x%x", ddrval);
+        return -1;
+    }
+
+    return 0;
+}
+
+int DirectDrawChannel::DeliverFrame(unsigned char* buffer, int bufferSize,
+                                        unsigned int /*timeStamp90KHz*/)
+{
+    CriticalSectionScoped cs(*_critSect);
+
+    if (CalcBufferSize(_incomingVideoType, _width, _height)
+            != bufferSize)
+    {
+        // sanity
+        return -1;
+    }
+    if (!_offScreenSurface || !_offScreenSurfaceNext)
+    {
+        if (_width && _height && _numberOfStreams)
+        {
+            // our surface was lost recreate it
+            FrameSizeChange(_width, _height, _numberOfStreams);
+        }
+        return -1;
+    }
+    if (_offScreenSurface->IsLost() == DDERR_SURFACELOST)
+    {
+        HRESULT ddrval = _offScreenSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+        ddrval = _offScreenSurfaceNext->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+    }
+    _doubleBuffer = false;
+
+    // check if _offScreenSurfaceUpdated is true
+    DirectDrawSurface* offScreenSurface = _offScreenSurface;
+    {
+
+        if (_offScreenSurfaceUpdated)
+        {
+            // this frame is not yet rendered
+            offScreenSurface = _offScreenSurfaceNext;
+            _doubleBuffer = true;
+        }
+    }
+
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    HRESULT ddrval = offScreenSurface->Lock(NULL, &ddsd, DDLOCK_WAIT, NULL);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDrawChannel::DeliverFrame offScreenSurface lost");
+        ddrval = offScreenSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+        return 0;
+    }
+    if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDrawChannel::DeliverFrame failed to lock");
+        // failed to lock our surface remove it and it will be re-created in next frame
+        _offScreenSurface->Release();
+        _offScreenSurface = NULL;
+        _offScreenSurfaceNext->Release();
+        _offScreenSurfaceNext = NULL;
+        return -1;
+    }
+
+    unsigned char* ptr = (unsigned char*) ddsd.lpSurface;
+    // ddsd.lPitch; distance in bytes
+
+
+    switch (_incomingVideoType)
+    {
+        case kI420:
+        {
+            switch (_blitVideoType)
+            {
+                case kYUY2:
+                case kUYVY:
+                case kIYUV:  // same as kYV12
+                case kYV12:
+                    ConvertFromI420(buffer, _width,
+                                    _blitVideoType, 0,
+                                    _width, _height,
+                                    ptr);
+                    break;
+                case kRGB24:
+                {
+                    _tempRenderBuffer.VerifyAndAllocate(_width * _height * 3);
+                    unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
+                    ConvertFromI420(buffer, _width, kRGB24, 0, _width, _height,
+                                    ptrTempBuffer);
+                    for (int i = 0; i < _height; i++)
+                    {
+                        memcpy(ptr, ptrTempBuffer, _width * 3);
+                        ptrTempBuffer += _width * 3;
+                        ptr += ddsd.lPitch;
+                    }
+                    break;
+                }
+                case kARGB:
+                  ConvertFromI420(buffer, ddsd.lPitch, kARGB, 0,
+                                  _width, _height, ptr);
+                    break;
+                case kARGB4444:
+                    ConvertI420ToARGB4444(buffer, ptr, _width, _height,
+                                          (ddsd.lPitch >> 1) - _width);
+                    break;
+                case kARGB1555:
+                    ConvertI420ToARGB1555(buffer, ptr, _width, _height,
+                                          (ddsd.lPitch >> 1) - _width);
+                    break;
+                case kRGB565:
+                {
+                    _tempRenderBuffer.VerifyAndAllocate(_width * _height * 2);
+                    unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
+                    ConvertI420ToRGB565(buffer, ptrTempBuffer, _width, _height);
+                    ptr += ddsd.lPitch * (_height - 1);
+                    for (int i = 0; i < _height; i++)
+                    {
+                        memcpy(ptr, ptrTempBuffer, _width * 2);
+                        ptrTempBuffer += _width * 2;
+                        ptr -= ddsd.lPitch;
+                    }
+                    break;
+                }
+                default:
+                    assert(!"DirectDrawChannel::DeliverFrame unknown blitVideoType");
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceVideo,
+                                 -1,
+                                 "DirectDrawChannel::DeliverFrame unknown blitVideoType %d",
+                                 _blitVideoType);
+            }
+            break;
+        }
+        default:
+            assert(!"DirectDrawChannel::DeliverFrame wrong incomming video type");
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDrawChannel::DeliverFrame wrong incomming %d",
+                         _incomingVideoType);
+    }
+
+    _offScreenSurfaceUpdated = true;
+    offScreenSurface->Unlock(NULL);
+    return 0;
+}
+
+int DirectDrawChannel::BlitFromOffscreenBufferToMixingBuffer(
+                                                                 VideoRenderDirectDraw* DDobj,
+                                                                 short streamID,
+                                                                 DirectDrawSurface* mixingSurface,
+                                                                 RECT &hwndRect,
+                                                                 bool demuxing)
+{
+    HRESULT ddrval;
+    RECT srcRect;
+    RECT dstRect;
+    DirectDrawStreamSettings* streamSettings = NULL;
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDobj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamID;
+
+    CriticalSectionScoped cs(*_critSect);
+
+    if (_offScreenSurface == NULL)
+    {
+        // The offscreen surface has been deleted but not restored yet
+        return 0;
+    }
+    if (mixingSurface == NULL)
+    {
+        // Not a valid input argument
+        return 0;
+    }
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        // ignore this stream id
+        return 0;
+    }
+    streamSettings = it->second;
+
+    int numberOfStreams = _numberOfStreams;
+    if (!demuxing)
+    {
+        numberOfStreams = 1; // treat as one stream if we only have one config
+    }
+
+    switch (numberOfStreams)
+    {
+        case 0:
+            return 0;
+        case 1:
+        {
+            // no demux
+            if (streamID > 0)
+                return 0;
+
+            ::SetRect(&srcRect, int(_width * streamSettings->_cropStartWidth),
+                      int(_height * streamSettings->_cropStartHeight),
+                      int(_width * streamSettings->_cropStopWidth), int(_height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+        }
+            break;
+        case 2:
+        case 3:
+        case 4:
+            // classic quadrant demux
+        {
+            int width = _width >> 1;
+            int height = _height >> 1;
+            ::SetRect(&srcRect, int(width * streamSettings->_cropStartWidth),
+                      int(height * streamSettings->_cropStartHeight), int(width
+                              * streamSettings->_cropStopWidth), int(height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            // stream id to select quadrant
+            if (streamID == 1)
+            {
+                ::OffsetRect(&srcRect, width, 0);
+            }
+            if (streamID == 2)
+            {
+                ::OffsetRect(&srcRect, 0, height);
+            }
+            if (streamID == 3)
+            {
+                ::OffsetRect(&srcRect, width, height);
+            }
+        }
+            break;
+        case 5:
+        case 6:
+        {
+            const int width = (_width / (3 * 16)) * 16;
+            const int widthMidCol = width + ((_width % (16 * 3)) / 16) * 16;
+            const int height = _height / (2 * 16) * 16;
+            if (streamID == 1 || streamID == 4)
+            {
+                ::SetRect(&srcRect, int(widthMidCol
+                        * streamSettings->_cropStartWidth), int(height
+                        * streamSettings->_cropStartHeight), int(widthMidCol
+                        * streamSettings->_cropStopWidth), int(height
+                        * streamSettings->_cropStopHeight));
+            }
+            else
+            {
+                ::SetRect(&srcRect,
+                          int(width * streamSettings->_cropStartWidth),
+                          int(height * streamSettings->_cropStartHeight),
+                          int(width * streamSettings->_cropStopWidth),
+                          int(height * streamSettings->_cropStopHeight));
+            }
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            // stream id to select quadrant
+            switch (streamID)
+            {
+                case 1:
+                    ::OffsetRect(&srcRect, width, 0);
+                    break;
+                case 2:
+                    ::OffsetRect(&srcRect, width + widthMidCol, 0);
+                    break;
+                case 3:
+                    ::OffsetRect(&srcRect, 0, height);
+                    break;
+                case 4:
+                    ::OffsetRect(&srcRect, width, height);
+                    break;
+                case 5:
+                    ::OffsetRect(&srcRect, width + widthMidCol, height);
+                    break;
+            }
+        }
+            break;
+        case 7:
+        case 8:
+        case 9:
+
+        {
+            const int width = (_width / (3 * 16)) * 16;
+            const int widthMidCol = width + ((_width % (16 * 3)) / 16) * 16;
+            const int height = _height / (3 * 16) * 16;
+            const int heightMidRow = height + ((_height % (16 * 3)) / 16) * 16;
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            switch (streamID)
+            {
+                case 0:
+                    //Size
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    //Position
+                    ::OffsetRect(&srcRect, 0, 0);
+                    break;
+                case 1:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(height * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(height * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, 0);
+                    break;
+                case 2:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, 0);
+                    break;
+                case 3:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(width * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, 0, height);
+                    break;
+                case 4:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, height);
+
+                    break;
+                case 5:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(width * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, height);
+                    break;
+                case 6:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, 0, height + heightMidRow);
+                    break;
+                case 7:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(height * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(height * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, height + heightMidRow);
+                    break;
+                case 8:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, height
+                            + heightMidRow);
+                    break;
+            }
+        }
+            break;
+        case 10:
+        case 11:
+        case 12:
+        case 13:
+        case 14:
+        case 15:
+        case 16:
+        default:
+        {
+            ::SetRect(&srcRect, int(_width * streamSettings->_cropStartWidth),
+                      int(_height * streamSettings->_cropStartHeight),
+                      int(_width * streamSettings->_cropStopWidth), int(_height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+        }
+    }
+
+    if (dstRect.right > hwndRect.right)
+    {
+        srcRect.right -= (int) ((float) (srcRect.right - srcRect.left)
+                * ((float) (dstRect.right - hwndRect.right)
+                        / (float) (dstRect.right - dstRect.left)));
+        dstRect.right = hwndRect.right;
+    }
+    if (dstRect.left < hwndRect.left)
+    {
+        srcRect.left += (int) ((float) (srcRect.right - srcRect.left)
+                * ((float) (hwndRect.left - dstRect.left)
+                        / (float) (dstRect.right - dstRect.left)));
+        dstRect.left = hwndRect.left;
+    }
+    if (dstRect.bottom > hwndRect.bottom)
+    {
+        srcRect.bottom -= (int) ((float) (srcRect.bottom - srcRect.top)
+                * ((float) (dstRect.bottom - hwndRect.bottom)
+                        / (float) (dstRect.bottom - dstRect.top)));
+        dstRect.bottom = hwndRect.bottom;
+    }
+    if (dstRect.top < hwndRect.top)
+    {
+        srcRect.top += (int) ((float) (srcRect.bottom - srcRect.top)
+                * ((float) (hwndRect.top - dstRect.top)
+                        / (float) (dstRect.bottom - dstRect.top)));
+        dstRect.top = hwndRect.top;
+    }
+
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    // wait for the _mixingSurface to be available
+    ddrval = mixingSurface->Blt(&dstRect, _offScreenSurface, &srcRect,
+                                DDBLT_WAIT | DDBLT_DDFX, &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt surface lost");
+        ddrval = mixingSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // we dont own the surface just report the error
+            return -1;
+        }
+    }
+    else if (ddrval == DDERR_INVALIDRECT)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt DDERR_INVALIDRECT");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "dstRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "srcRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+
+        // ignore
+    }
+    else if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt !DD_OK");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw blt mixingSurface BlitFromOffscreenBufferToMixingBuffer error 0x%x  ",
+                     ddrval);
+
+        //logging the co-ordinates and hwnd
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "dstRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "srcRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+
+        /*      char logStr[256];
+         _snprintf(logStr,256, "srcRect T:%d L:%d B:%d R:%d\n",srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+         OutputDebugString(logStr);
+         char logStr1[256];
+         _snprintf(logStr1,256, "dstRect T:%d L:%d B:%d R:%d\n",dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+         OutputDebugString(logStr1);
+         char logStr2[256];
+         _snprintf(logStr2,256, "error 0x:%x \n",ddrval);
+         OutputDebugString(logStr2);
+         */
+        // we dont own the surface just report the error
+        return -1;
+    }
+    if (_doubleBuffer)
+    {
+        DirectDrawSurface* oldOffScreenSurface = _offScreenSurface;
+        _offScreenSurface = _offScreenSurfaceNext;
+        _offScreenSurfaceNext = oldOffScreenSurface;
+        _doubleBuffer = false;
+    }
+    else
+    {
+        _offScreenSurfaceUpdated = false;
+    }
+    return 0;
+}
+
+/**
+ *
+ *	VideoRenderDirectDraw
+ *
+ *
+ */
+
+VideoRenderDirectDraw::VideoRenderDirectDraw(Trace* trace,
+                                                     HWND hWnd, bool fullscreen) :
+            _trace(trace),
+            _confCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+            _fullscreen(fullscreen),
+            _demuxing(false),
+            _transparentBackground(false),
+            _supportTransparency(false),
+            _canStretch(false),
+            _canMirrorLeftRight(false),
+            _clearMixingSurface(false),
+            _deliverInScreenType(false),
+            _renderModeWaitForCorrectScanLine(false),
+            _deliverInHalfFrameRate(false),
+            _deliverInQuarterFrameRate(false),
+            _bCanBltFourcc(true),
+            _frameChanged(false),
+            _processCount(0),
+            _hWnd(hWnd),
+            _screenRect(),
+            _mixingRect(),
+
+            _incomingVideoType(kUnknown),
+            _blitVideoType(kUnknown),
+            _rgbVideoType(kUnknown),
+
+            _directDraw(NULL),
+            _primarySurface(NULL),
+            _backSurface(NULL),
+            _mixingSurface(NULL),
+            _bitmapSettings(),
+            _textSettings(),
+            _directDrawChannels(),
+            _directDrawZorder(),
+
+            _fullScreenWaitEvent(EventWrapper::Create()),
+            _screenEvent(EventWrapper::Create()),
+            _screenRenderThread(
+                                ThreadWrapper::CreateThread(
+                                                            RemoteRenderingThreadProc,
+                                                            this,
+                                                            kRealtimePriority,
+                                                            "Video_directdraw_thread")),
+            _blit(true), _lastRenderModeCpuUsage(-1), _totalMemory(-1),
+            _availableMemory(-1), _systemCPUUsage(0), _maxAllowedRenderTime(0),
+            _nrOfTooLongRenderTimes(0),
+            _isPrimaryOrMixingSurfaceOnSystem(false)
+{
+    SetRect(&_screenRect, 0, 0, 0, 0);
+    SetRect(&_mixingRect, 0, 0, 0, 0);
+    SetRect(&_originalHwndRect, 0, 0, 0, 0);
+    ::GetClientRect(_hWnd, &_hwndRect);
+}
+
+VideoRenderDirectDraw::~VideoRenderDirectDraw()
+{
+    ThreadWrapper* temp = _screenRenderThread;
+    _screenRenderThread = NULL;
+    if (temp)
+    {
+        temp->SetNotAlive();
+        _screenEvent->Set();
+        _screenEvent->StopTimer();
+        _fullScreenWaitEvent->StopTimer();
+
+        if (temp->Stop())
+        {
+            delete temp;
+        }
+    }
+    delete _screenEvent;
+    delete _fullScreenWaitEvent;
+
+    std::map<int, DirectDrawChannel*>::iterator it;
+    it = _directDrawChannels.begin();
+    while (it != _directDrawChannels.end())
+    {
+        it->second->Release();
+        it = _directDrawChannels.erase(it);
+    }
+    if (_primarySurface)
+    {
+        _primarySurface->Release();
+    }
+    if (_mixingSurface)
+    {
+        _mixingSurface->Release();
+    }
+
+    std::map<unsigned char, DirectDrawBitmapSettings*>::iterator bitIt;
+
+    bitIt = _bitmapSettings.begin();
+    while (_bitmapSettings.end() != bitIt)
+    {
+        delete bitIt->second;
+        bitIt = _bitmapSettings.erase(bitIt);
+    }
+
+    std::map<unsigned char, DirectDrawTextSettings*>::iterator textIt;
+    textIt = _textSettings.begin();
+    while (_textSettings.end() != textIt)
+    {
+        delete textIt->second;
+        textIt = _textSettings.erase(textIt);
+    }
+    if (_directDraw)
+    {
+        _directDraw->Release();
+        if (_fullscreen)
+        {
+            // restore hwnd to original size and position
+            ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
+                           _originalHwndRect.top, _originalHwndRect.right
+                                   - _originalHwndRect.left,
+                           _originalHwndRect.bottom - _originalHwndRect.top,
+                           SWP_FRAMECHANGED);
+            ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                    | RDW_ERASE);
+            ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                    | RDW_ERASE);
+        }
+    }
+    delete _confCritSect;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::Init()
+{
+    int retVal = 0;
+    HRESULT ddrval = DirectDrawCreateEx(NULL, (void**) &_directDraw,
+                                        IID_IDirectDraw7, NULL);
+    if (FAILED(ddrval) || NULL == _directDraw)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to created DirectDraw7 object");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    retVal = CheckCapabilities();
+    if (retVal != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw CheckCapabilities failed");
+        return retVal;
+    }
+    if (_hWnd)
+    {
+        retVal = CreatePrimarySurface();
+        if (retVal != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreatePrimarySurface");
+            return retVal;
+        }
+        retVal = CreateMixingSurface();
+        if (retVal != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateMixingSurface");
+            return retVal;
+        }
+        if (_screenRenderThread)
+        {
+            unsigned int tid;
+            _screenRenderThread->Start(tid);
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Screen Render thread started, thread id: %d", tid);
+        }
+        DWORD freq = 0;
+        _directDraw->GetMonitorFrequency(&freq);
+        if (freq == 0)
+        {
+            freq = 60;
+        }
+        // Do this now to not do it in each render process loop
+        _maxAllowedRenderTime = (int) (1000 / freq * 0.8F);
+        _nrOfTooLongRenderTimes = 0;
+
+        _screenEvent->StartTimer(true, 1000 / freq);
+
+        _deliverInScreenType = false;
+        _renderModeWaitForCorrectScanLine = false;
+        _deliverInHalfFrameRate = false;
+        _deliverInQuarterFrameRate = false;
+
+        _lastRenderModeCpuUsage = -1;
+        if (_fullscreen)
+        {
+            _fullScreenWaitEvent->StartTimer(true, 1);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Screen freq %d", freq);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "Created DirectDraw object");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::GetGraphicsMemory(
+                                                           WebRtc_UWord64& totalMemory,
+                                                           WebRtc_UWord64& availableMemory)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    if (_totalMemory == -1 || _availableMemory == -1)
+    {
+        totalMemory = 0;
+        availableMemory = 0;
+        return -1;
+    }
+    totalMemory = _totalMemory;
+    availableMemory = _availableMemory;
+    return 0;
+}
+
+int VideoRenderDirectDraw::GetScreenResolution(int& screenWidth,
+                                                   int& screenHeight)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    screenWidth = _screenRect.right - _screenRect.left;
+    screenHeight = _screenRect.bottom - _screenRect.top;
+    return 0;
+}
+
+int VideoRenderDirectDraw::UpdateSystemCPUUsage(int systemCPU)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+    if (systemCPU <= 100 && systemCPU >= 0)
+    {
+        _systemCPUUsage = systemCPU;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::CheckCapabilities()
+{
+    HRESULT ddrval = DD_OK;
+    DDCAPS ddcaps;
+    DDCAPS ddcapsEmul;
+    memset(&ddcaps, 0, sizeof(ddcaps));
+    memset(&ddcapsEmul, 0, sizeof(ddcapsEmul));
+    ddcaps.dwSize = sizeof(ddcaps);
+    ddcapsEmul.dwSize = sizeof(ddcapsEmul);
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (IsRectEmpty(&_screenRect))
+    {
+        ::GetWindowRect(GetDesktopWindow(), &_screenRect);
+    }
+    // Log Screen resolution
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "ScreenRect. Top: %d, left: %d, bottom: %d, right: %d",
+                 _screenRect.top, _screenRect.left, _screenRect.bottom,
+                 _screenRect.right);
+
+    bool fullAccelerationEnabled = false;
+    bool badDriver = false;
+    VideoRenderWindowsImpl::CheckHWDriver(badDriver, fullAccelerationEnabled);
+    if (!fullAccelerationEnabled)
+    {
+
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct draw Hardware acceleration is not enabled.");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_HWACC_NOT_ENABLED;
+
+    }
+
+    // ddcaps supported by the HW
+    // ddcapsEmul supported by the OS emulating the HW
+    ddrval = _directDraw->GetCaps(&ddcaps, &ddcapsEmul);
+    if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw HW: could not get capabilities: %x", ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    unsigned int minVideoMemory = 3 * 4 * (_screenRect.right
+            * _screenRect.bottom); // assuming ARGB size (4 bytes)
+
+    // Store the memory for possible calls to GetMemory()
+    _totalMemory = ddcaps.dwVidMemTotal;
+    _availableMemory = ddcaps.dwVidMemFree;
+
+    if (ddcaps.dwVidMemFree < minVideoMemory)
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw HW does not have enough memory, freeMem:%d, requiredMem:%d",
+                     ddcaps.dwVidMemFree, minVideoMemory);
+        // If memory is not available on the Video Card...allocate it on RAM
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw video memory, freeMem:%d, totalMem:%d",
+                     ddcaps.dwVidMemFree, ddcaps.dwVidMemTotal);
+    }
+
+    /*
+     DirectDrawCaps       ddsCaps ;
+     ZeroMemory(&ddsCaps, sizeof(ddsCaps)) ;
+     ddsCaps.dwCaps  = DDSCAPS_VIDEOMEMORY;
+     DWORD memTotal=0;
+     DWORD memFree=0;
+     ddrval = _directDraw->GetAvailableVidMem(&ddsCaps, &memTotal, &memFree);
+     if(ddrval == DD_OK)
+     {
+     WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw video memory, freeMem:%d, totalMem:%d", memFree, memTotal);
+     }
+     */
+    // Determine if the hardware supports overlay deinterlacing
+    //	bCanDeinterlace = (ddcaps.dwCaps2 & DDCAPS2_CANFLIPODDEVEN) ? 1 : 0;
+
+    // this fail since we check before we set the mode
+    //	bool bCanFlip =(ddcaps.dwCaps & DDSCAPS_FLIP) ? 1 : 0;
+
+    // Determine if the hardware supports colorkeying
+    _supportTransparency = (ddcaps.dwCaps & DDCAPS_COLORKEY) ? 1 : 0;
+    if (_supportTransparency)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw support colorkey");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                     "DirectDraw don't support colorkey");
+    }
+
+    if (ddcaps.dwCaps2 & DDCAPS2_CANRENDERWINDOWED)
+    {
+        //	required for _directDraw->FlipToGDISurface();
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw support CANRENDERWINDOWED");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw don't support CANRENDERWINDOWED");
+    }
+
+    // Determine if the hardware supports scaling during a blit
+    _canStretch = (ddcaps.dwCaps & DDCAPS_BLTSTRETCH) ? 1 : 0;
+    if (_canStretch)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw blit can stretch");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                     "DirectDraw blit can't stretch");
+    }
+
+    _canMirrorLeftRight = (ddcaps.dwFXAlphaCaps & DDBLTFX_MIRRORLEFTRIGHT) ? 1
+            : 0;
+    if (_canMirrorLeftRight)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw mirroring is supported");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw mirroring is not supported");
+    }
+
+    // Determine if the hardware supports color conversion during a blit
+    _bCanBltFourcc = (ddcaps.dwCaps & DDCAPS_BLTFOURCC) ? 1 : 0;
+    if (_bCanBltFourcc)
+        _bCanBltFourcc = (ddcaps.dwCKeyCaps & DDCKEYCAPS_DESTBLT) ? 1 : 0;
+
+    if (_bCanBltFourcc)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw can blit Fourcc");
+        DWORD i_codes;
+        ddrval = _directDraw->GetFourCCCodes(&i_codes, NULL);
+
+        if (i_codes > 0)
+        {
+            DWORD* pi_codes = new DWORD[i_codes];
+
+            ddrval = _directDraw->GetFourCCCodes(&i_codes, pi_codes);
+            for (unsigned int i = 0; i < i_codes && _blitVideoType
+                    != kI420; i++)
+            {
+                DWORD w = pi_codes[i];
+                switch (w)
+                {
+                    case MAKEFOURCC('I', '4', '2', '0'):
+                        //					_blitVideoType = kI420;
+                        // not enabled since its not tested
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support I420");
+                        break;
+                    case MAKEFOURCC('I', 'Y', 'U', 'V'): // same as YV12
+                    //					_blitVideoType = kIYUV;
+                        // not enabled since its not tested
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support IYUV");
+                        break;
+                    case MAKEFOURCC('U', 'Y', 'N', 'V'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support UYNV");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', '4', '2', '2'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support Y422");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', 'U', 'N', 'V'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YUNV");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', 'V', '1', '2'):
+                        _blitVideoType = kYV12;
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YV12");
+                        break;
+                    case MAKEFOURCC('Y', 'U', 'Y', '2'):
+                        if (_blitVideoType != kYV12)
+                        {
+                            _blitVideoType = kYUY2;
+                        }
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YUY2");
+                        break;
+                    case MAKEFOURCC('U', 'Y', 'V', 'Y'):
+                        if (_blitVideoType != kYV12)
+                        {
+                            _blitVideoType = kUYVY;
+                        }
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support UYVY");
+                        break;
+                    default:
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw unknown blit type %x", w);
+                        break;
+                }
+            }
+            delete[] pi_codes;
+        }
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::Stop()
+{
+    _confCritSect->Enter();
+
+    _blit = false;
+
+    _confCritSect->Leave();
+    return 0;
+}
+
+bool VideoRenderDirectDraw::IsPrimaryOrMixingSurfaceOnSystem()
+{
+    return _isPrimaryOrMixingSurfaceOnSystem;
+}
+
+int VideoRenderDirectDraw::CreatePrimarySurface()
+{
+    // Create the primary surface
+    DirectDrawSurfaceDesc ddsd;
+    ZeroMemory(&ddsd, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    HRESULT ddrval = DD_OK;
+
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (_primarySurface)
+    {
+        _primarySurface->Release();
+        _primarySurface = NULL;
+    }
+
+    if (!_fullscreen)
+    {
+        // create a normal window
+        ddrval = _directDraw->SetCooperativeLevel(_hWnd, DDSCL_NORMAL);
+        if (FAILED(ddrval))
+        {
+            //******** Potential workaround for D#4608 *************** Ignore error.
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to set SetCooperativeLevel %x, ddrval");
+        }
+        // we cant size the primary surface based on _hwndRect
+        ddsd.dwFlags = DDSD_CAPS;
+        ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_VIDEOMEMORY;
+
+#ifndef NOGRAPHICSCARD_MEMORY
+        ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _primarySurface using VideoMemory: 0x%x",
+                         ddrval);
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+#endif
+            //allocate using System memory
+            ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_SYSTEMMEMORY;
+            ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _primarySurface using SystemMemory: 0x%x",
+                             ddrval);
+                if (ddrval != 0x887600E1)
+                {
+                    _directDraw->Release();
+                    _directDraw = 0;
+                }
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+            _isPrimaryOrMixingSurfaceOnSystem = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw _primarySurface on SystemMemory");
+
+#ifndef NOGRAPHICSCARD_MEMORY
+        }
+#endif
+
+        // Create a clipper to ensure that our drawing stays inside our window
+        LPDIRECTDRAWCLIPPER directDrawClipper;
+        ddrval = _directDraw->CreateClipper(0, &directDrawClipper, NULL );
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateClipper");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // setting it to our hwnd gives the clipper the coordinates from our window
+        // when using cliplist we run into problem with transparent HWNDs (such as REX)
+        ddrval = directDrawClipper->SetHWnd(0, _hWnd);
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetHWnd");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // attach the clipper to the primary surface
+        ddrval = _primarySurface->SetClipper(directDrawClipper);
+        directDrawClipper->Release(); // no need to keep the clipper around
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetClipper");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    else
+    {
+        /* The cooperative level determines how much control we have over the
+         * screen. This must at least be either DDSCL_EXCLUSIVE or DDSCL_NORMAL
+         *
+         * DDSCL_EXCLUSIVE allows us to change video modes, and requires
+         * the DDSCL_FULLSCREEN flag, which will cause the window to take over
+         * the fullscreen. This is the preferred DirectDraw mode because it allows
+         * us to have control of the whole screen without regard for GDI.
+         *
+         * DDSCL_NORMAL is used to allow the DirectDraw app to run windowed.
+         */
+
+        // Note: debuging in fullscreen mode does not work, thanks MS...
+        ::GetWindowRect(_hWnd, &_originalHwndRect);
+
+        // DDSCL_NOWINDOWCHANGES prevents DD to change the window but it give us trouble too, not using it
+        ddrval = _directDraw->SetCooperativeLevel(_hWnd, DDSCL_EXCLUSIVE
+                | DDSCL_FULLSCREEN | DDSCL_ALLOWREBOOT);
+
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetCooperativeLevel DDSCL_EXCLUSIVE");
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _directDraw->Release();
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT;
+        ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP
+                | DDSCAPS_COMPLEX | DDSCAPS_VIDEOMEMORY;
+        ddsd.dwBackBufferCount = 1;
+
+        ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _primarySurface, fullscreen mode: 0x%x",
+                         ddrval);
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _directDraw->Release();
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // Get a pointer to the back buffer
+        DirectDrawCaps ddsCaps;
+        ZeroMemory(&ddsCaps, sizeof(ddsCaps));
+        ddsCaps.dwCaps = DDSCAPS_BACKBUFFER | DDSCAPS_VIDEOMEMORY;
+
+        ddrval = _primarySurface->GetAttachedSurface(&ddsCaps, &_backSurface);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to GetAttachedSurface, fullscreen mode ");
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // Get the screen size and save it as a rect
+        ZeroMemory(&ddsd, sizeof(ddsd));
+        ddsd.dwSize = sizeof(ddsd);
+    }
+
+    ZeroMemory(&ddsd, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+
+    // get our prinmary surface description
+    ddrval = _primarySurface->GetSurfaceDesc(&ddsd);
+    if (!(SUCCEEDED(ddrval) && (ddsd.dwFlags & DDSD_WIDTH) && (ddsd.dwFlags
+            & DDSD_HEIGHT)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to GetSurfaceDesc _primarySurface");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                     _hWnd, _hwndRect.top, _hwndRect.left, _hwndRect.bottom,
+                     _hwndRect.right, ddsd.dwFlags, __LINE__);
+
+        _primarySurface->Release();
+        _directDraw->Release();
+        _primarySurface = 0;
+        _directDraw = 0;
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    // first we need to figure out the size of the primary surface
+
+    // store screen size
+    ::SetRect(&_screenRect, 0, 0, ddsd.dwWidth, ddsd.dwHeight);
+
+    // store RGB type
+    if (ddsd.ddpfPixelFormat.dwFlags & DDPF_RGB)
+    {
+        // RGB surface
+        switch (ddsd.ddpfPixelFormat.dwRGBBitCount)
+        {
+            case 16:
+                switch (ddsd.ddpfPixelFormat.dwGBitMask)
+                {
+                    case 0x00e0:
+                        _rgbVideoType = kARGB4444;
+                        break;
+                    case 0x03e0:
+                        _rgbVideoType = kARGB1555;
+                        break;
+                    case 0x07e0:
+                        _rgbVideoType = kRGB565;
+                        break;
+                }
+                break;
+            case 24:
+                _rgbVideoType = kRGB24;
+                break;
+            case 32:
+                _rgbVideoType = kARGB;
+                break;
+        }
+    }
+    switch (_blitVideoType)
+    {
+        case kI420:
+        case kIYUV:
+        case kYUY2:
+        case kYV12:
+        case kUYVY:
+            _incomingVideoType = kI420;
+            break;
+        case kUnknown:
+            _blitVideoType = _rgbVideoType;
+            _incomingVideoType = kI420;
+            break;
+        default:
+            _blitVideoType = _rgbVideoType;
+            _incomingVideoType = kI420;
+            break;
+    }
+    WEBRTC_TRACE(
+                 kTraceInfo,
+                 kTraceVideo,
+                 -1,
+                 "DirectDraw created _primarySurface, _blitVideoType %d, _rgbvideoType %d",
+                 _blitVideoType, _rgbVideoType);
+    return 0;
+}
+
+int VideoRenderDirectDraw::CreateMixingSurface()
+{
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    if (_fullscreen)
+    {
+        ::CopyRect(&_hwndRect, &_screenRect);
+    }
+    else
+    {
+        // update our _hWnd size
+        ::GetClientRect(_hWnd, &_hwndRect);
+    }
+
+    if (_mixingSurface)
+    {
+        _mixingSurface->Release();
+        _mixingSurface = NULL;
+    }
+    // create mixing surface
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = _hwndRect.bottom;
+    ddsd.dwWidth = _hwndRect.right;
+
+    /*    char logStr[256];
+     _snprintf(logStr,256, "CreateMixingSurface H:%d W:%d \n",_hwndRect.bottom, _hwndRect.right);
+     OutputDebugString(logStr);
+     */
+
+#ifndef NOGRAPHICSCARD_MEMORY
+    HRESULT ddrval = _directDraw->CreateSurface(&ddsd, &_mixingSurface, NULL);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw failed to CreateSurface _mixingSurface using VideoMemory: 0x%x",
+                     ddrval);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d",
+                     _hWnd, _hwndRect.top, _hwndRect.left, _hwndRect.bottom,
+                     _hwndRect.right, ddsd.dwFlags);
+#endif
+
+        ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+        HRESULT ddrval = _directDraw->CreateSurface(&ddsd, &_mixingSurface,
+                                                    NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _mixingSurface on System Memory: 0x%x",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        _isPrimaryOrMixingSurfaceOnSystem = true;
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw CreateSurface _mixingSurface on SystemMemory");
+
+#ifndef NOGRAPHICSCARD_MEMORY        
+    }
+#endif
+
+    _clearMixingSurface = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "DirectDraw _mixingSurface created");
+    return 0;
+}
+
+VideoRenderCallback* VideoRenderDirectDraw::CreateChannel(WebRtc_UWord32 channel,
+                                                                  WebRtc_UWord32 zOrder,
+                                                                  float startWidth,
+                                                                  float startHeight,
+                                                                  float stopWidth,
+                                                                  float stopHeight)
+{
+    if (!_canStretch)
+    {
+        if (startWidth != 0.0f || startHeight != 0.0f || stopWidth != 1.0f
+                || stopHeight != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateChannel HW don't support stretch");
+            return NULL;
+        }
+    }
+    DirectDrawChannel* ddobj =
+            new DirectDrawChannel(_directDraw, _blitVideoType,
+                                      _incomingVideoType, _rgbVideoType, this);
+    ddobj->SetStreamSettings(this, 0, startWidth, startHeight, stopWidth,
+                             stopHeight);
+
+    // store channel
+    _directDrawChannels[channel & 0x0000ffff] = ddobj;
+
+    // store Z order
+    // default streamID is 0
+    _directDrawZorder.insert(ZorderPair(zOrder, channel & 0x0000ffff));
+    return ddobj;
+}
+
+int VideoRenderDirectDraw::AddDirectDrawChannel(int channel,
+                                                    unsigned char streamID,
+                                                    int zOrder,
+                                                    DirectDrawChannel* ddObj)
+{
+    // Only allow one stream per channel, demuxing is done outside of DirectDraw...
+    streamID = 0;
+    unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);
+
+    // store channel
+    _directDrawChannels[channel & 0x0000ffff] = ddObj;
+
+    _demuxing = true; // with this function it's always demux
+
+    // store Z order
+    _directDrawZorder.insert(ZorderPair(zOrder, streamChannel));
+    return 0;
+}
+
+DirectDrawChannel* VideoRenderDirectDraw::ShareDirectDrawChannel(
+                                                                         int channel)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    DirectDrawChannel* obj = NULL;
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        obj = ddIt->second;
+        obj->AddRef();
+    }
+    return obj;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::DeleteChannel(const WebRtc_UWord32 channel)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    // Remove the old z order
+
+    //unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);	
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        //if(streamChannel == it->second )
+        if ((channel & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            it = _directDrawZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        ddIt->second->Release();
+        _directDrawChannels.erase(ddIt);
+        _clearMixingSurface = true;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::GetStreamSettings(const WebRtc_UWord32 channel,
+                                                           const WebRtc_UWord16 streamId,
+                                                           WebRtc_UWord32& zOrder,
+                                                           float& startWidth,
+                                                           float& startHeight,
+                                                           float& stopWidth,
+                                                           float& stopHeight)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt == _directDrawChannels.end())
+    {
+        // This channel doesn't exist.
+        return -1;
+    }
+
+    DirectDrawChannel* ptrChannel = ddIt->second;
+    // Only support one stream per channel, is demuxing done outside if DD.
+    //if (ptrChannel->GetStreamSettings(this, streamId, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    if (ptrChannel->GetStreamSettings(this, 0, startWidth, startHeight,
+                                      stopWidth, stopHeight) == -1)
+    {
+        // Error for this stream
+        return -1;
+    }
+
+    // Get the zOrder
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if ((channel & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            // We found our channel zOrder
+            zOrder = (unsigned int) (it->first);
+            break;
+        }
+        it++;
+    }
+
+    return 0;
+}
+
+int VideoRenderDirectDraw::GetChannels(std::list<int>& channelList)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.begin();
+
+    while (ddIt != _directDrawChannels.end())
+    {
+        int channel = ddIt->first;
+        if (channel == 0x0000ffff)
+        {
+            channel = -1;
+        }
+        channelList.push_back(channel);
+        ddIt++;
+    }
+    return 0;
+}
+
+bool VideoRenderDirectDraw::HasChannel(int channel)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderDirectDraw::HasChannels()
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    if (_directDrawChannels.begin() != _directDrawChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderDirectDraw::IsFullScreen()
+{
+    return _fullscreen;
+}
+
+VideoType VideoRenderDirectDraw::GetPerferedVideoFormat()
+{
+    return _incomingVideoType;
+}
+
+// this can be called rutime from another thread
+DirectDrawChannel* VideoRenderDirectDraw::ConfigureDirectDrawChannel(int channel,
+                                                                             unsigned char streamID,
+                                                                             int zOrder,
+                                                                             float left,
+                                                                             float top,
+                                                                             float right,
+                                                                             float bottom)
+{
+    // Only support one stream per channel, is demuxing done outside if DD.
+    streamID = 0;
+
+    CriticalSectionScoped cs(*_confCritSect);
+
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to ConfigureDirectDrawChannel HW don't support stretch");
+            return NULL;
+        }
+    }
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    DirectDrawChannel* ddobj = NULL;
+    if (ddIt != _directDrawChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "DirectDraw failed to find channel");
+        return NULL;
+    }
+    unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);
+    // remove the old z order
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if (streamChannel == it->second)
+        {
+            it = _directDrawZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+    // if this channel already are in the zOrder map it's demux
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if (channel == (it->second & 0x0000ffff))
+        {
+            _demuxing = true;
+            break;
+        }
+        it++;
+    }
+    if (it == _directDrawZorder.end())
+    {
+        _demuxing = false;
+    }
+
+    _clearMixingSurface = true;
+
+    if (left == 0.0f && top == 0.0f && right == 0.0f && bottom == 0.0f)
+    {
+        // remove
+        _directDrawChannels.erase(ddIt);
+        ddobj->Release();
+        return NULL;
+    }
+    ddobj->SetStreamSettings(this, streamID, left, top, right, bottom);
+
+    _directDrawZorder.insert(ZorderPair(zOrder, streamChannel));
+    return ddobj;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::SetCropping(const WebRtc_UWord32 channel,
+                                                     const WebRtc_UWord16 streamID,
+                                                     float left, float top,
+                                                     float right, float bottom)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                         "DirectDraw failed to SetCropping HW don't support stretch");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        DirectDrawChannel* ddobj = ddIt->second;
+        if (ddobj)
+        {
+            // Only support one stream per channel, is demuxing done outside if DD.
+            ddobj->SetStreamCropSettings(this, 0, left, top, right, bottom);
+            //ddobj->SetStreamCropSettings(this, streamID, left, top, right, bottom);
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::ConfigureRenderer(const WebRtc_UWord32 channel,
+                                                           const WebRtc_UWord16 streamId,
+                                                           const unsigned int zOrder,
+                                                           const float left,
+                                                           const float top,
+                                                           const float right,
+                                                           const float bottom)
+{
+    if (ConfigureDirectDrawChannel(channel, (unsigned char) streamId, zOrder,
+                                   left, top, right, bottom) == NULL)
+    {
+        if (left == 0.0f && top == 0.0f && right == 0.0f && bottom == 0.0f)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                         "ConfigureRender, removed channel:%d streamId:%d",
+                         channel, streamId);
+        }
+        else
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "DirectDraw failed to ConfigureRenderer for channel: %d",
+                         channel);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+// this can be called runtime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetText(const WebRtc_UWord8 textId,
+                                                 const WebRtc_UWord8* text,
+                                                 const WebRtc_Word32 textLength,
+                                                 const WebRtc_UWord32 colorText,
+                                                 const WebRtc_UWord32 colorBg,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    DirectDrawTextSettings* textSetting = NULL;
+
+    CriticalSectionScoped cs(*_confCritSect);
+
+    _frameChanged = true;
+
+    std::map<unsigned char, DirectDrawTextSettings*>::iterator it;
+    it = _textSettings.find(textId);
+    if (it != _textSettings.end())
+    {
+        if (it->second)
+        {
+            textSetting = it->second;
+        }
+    }
+    _clearMixingSurface = true;
+
+    if (text == NULL || textLength == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw remove text textId:%d", textId);
+        if (textSetting)
+        {
+            delete textSetting;
+            _textSettings.erase(it);
+        }
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (top > 1.0f || top < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (right > 1.0f || right < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (textSetting == NULL)
+    {
+        textSetting = new DirectDrawTextSettings();
+    }
+    int retVal = textSetting->SetText((const char*) text, textLength,
+                                      (COLORREF) colorText, (COLORREF) colorBg,
+                                      left, top, right, bottom);
+    if (retVal != 0)
+    {
+        delete textSetting;
+        textSetting = NULL;
+        _textSettings.erase(textId);
+        return retVal;
+    }
+    if (textSetting)
+    {
+        _textSettings[textId] = textSetting;
+    }
+    return retVal;
+}
+
+// this can be called runtime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetBitmap(const void* bitMap,
+                                                   const WebRtc_UWord8 pictureId,
+                                                   const void* colorKey,
+                                                   const float left,
+                                                   const float top,
+                                                   const float right,
+                                                   const float bottom)
+{
+    DirectDrawBitmapSettings* bitmapSetting = NULL;
+
+    CriticalSectionScoped cs(*_confCritSect);
+
+    _frameChanged = true;
+    std::map<unsigned char, DirectDrawBitmapSettings*>::iterator it;
+    it = _bitmapSettings.find(pictureId);
+    if (it != _bitmapSettings.end())
+    {
+        if (it->second)
+        {
+            bitmapSetting = it->second;
+        }
+    }
+    _clearMixingSurface = true;
+
+    if (bitMap == NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw remove bitmap pictureId:%d", pictureId);
+        if (bitmapSetting)
+        {
+            delete bitmapSetting;
+            _bitmapSettings.erase(it);
+        }
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (top > 1.0f || top < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (right > 1.0f || right < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetBitmap HW don't support stretch");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+        }
+    }
+    if (bitmapSetting == NULL)
+    {
+        bitmapSetting = new DirectDrawBitmapSettings();
+    }
+
+    bitmapSetting->_transparentBitMap = (HBITMAP) bitMap;
+    bitmapSetting->_transparentBitmapLeft = left;
+    bitmapSetting->_transparentBitmapRight = right;
+    bitmapSetting->_transparentBitmapTop = top;
+    bitmapSetting->_transparentBitmapBottom = bottom;
+
+    // colorKey == NULL equals no transparency
+    if (colorKey)
+    {
+        // first remove constness
+        DDCOLORKEY* ddColorKey =
+                static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
+        if (!_supportTransparency)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetBitmap HW don't support transparency");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+        }
+        if (bitmapSetting->_transparentBitmapColorKey == NULL)
+        {
+            bitmapSetting->_transparentBitmapColorKey = new DDCOLORKEY();
+        }
+
+        if (ddColorKey)
+        {
+            bitmapSetting->_transparentBitmapColorKey->dwColorSpaceLowValue
+                    = ddColorKey->dwColorSpaceLowValue;
+            bitmapSetting->_transparentBitmapColorKey->dwColorSpaceHighValue
+                    = ddColorKey->dwColorSpaceHighValue;
+        }
+    }
+    int retval = bitmapSetting->SetBitmap(_trace, _directDraw);
+    if (retval != 0)
+    {
+        delete bitmapSetting;
+        bitmapSetting = NULL;
+        _bitmapSettings.erase(pictureId);
+        return retval;
+    }
+    if (bitmapSetting)
+    {
+        _bitmapSettings[pictureId] = bitmapSetting;
+    }
+    return retval;
+}
+
+// this can be called rutime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetTransparentBackground(
+                                                                  const bool enable)
+{
+    CriticalSectionScoped cs(*_confCritSect);
+
+    if (_supportTransparency)
+    {
+        _transparentBackground = enable;
+        if (enable)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw enabled TransparentBackground");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw disabled TransparentBackground");
+        }
+        return 0;
+    }
+    WEBRTC_TRACE(
+                 kTraceError,
+                 kTraceVideo,
+                 -1,
+                 "DirectDraw failed to EnableTransparentBackground HW don't support transparency");
+    return -1;
+    //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+}
+
+int VideoRenderDirectDraw::FillSurface(DirectDrawSurface *pDDSurface,
+                                           RECT* rect)
+{
+    // sanity checks
+    if (NULL == pDDSurface)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (NULL == rect)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    // Repaint the whole specified surface
+    HRESULT ddrval;
+    DDBLTFX ddFX;
+
+    ZeroMemory(&ddFX, sizeof(ddFX));
+    ddFX.dwSize = sizeof(ddFX);
+    ddFX.dwFillColor = RGB(0, 0, 0);
+
+    // Draw color key on the video area of given surface
+    ddrval = pDDSurface->Blt(rect, NULL, NULL, DDBLT_COLORFILL | DDBLT_WAIT,
+                             &ddFX);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to fill surface");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+// the real rendering thread
+bool VideoRenderDirectDraw::RemoteRenderingThreadProc(void *obj)
+{
+    return static_cast<VideoRenderDirectDraw*> (obj)->RemoteRenderingProcess();
+}
+
+bool VideoRenderDirectDraw::RemoteRenderingProcess()
+{
+    bool hwndChanged = false;
+    int waitTime = 0;
+
+    _screenEvent->Wait(100);
+
+    _confCritSect->Enter();
+
+    if (_blit == false)
+    {
+        _confCritSect->Leave();
+        return true;
+    }
+
+    if (!::GetForegroundWindow())
+    {
+        //no window, i.e the user have clicked CTRL+ALT+DEL, return true and wait
+        _confCritSect->Leave();
+        return true;
+    }
+
+    // Skip to blit if last render to primare surface took too long time.
+    _processCount++;
+    if (_deliverInQuarterFrameRate)
+    {
+        if (_processCount % 4 != 0)
+        {
+            _confCritSect->Leave();
+            return true;
+        }
+    }
+    else if (_deliverInHalfFrameRate)
+    {
+        if (_processCount % 2 != 0)
+        {
+            _confCritSect->Leave();
+            return true;
+        }
+    }
+
+    // Calculate th erender process time
+    unsigned int startProcessTime = timeGetTime();
+
+    hwndChanged = HasHWNDChanged();
+    if (hwndChanged)
+    {
+        _clearMixingSurface = true;
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator it;
+    it = _directDrawChannels.begin();
+    while (it != _directDrawChannels.end() && !_frameChanged)
+    {
+        if (it->second)
+        {
+            int channel = it->first;
+            _frameChanged = it->second->IsOffScreenSurfaceUpdated(this);
+        }
+        it++;
+    }
+    if (_backSurface)
+    {
+        if (hwndChanged || _frameChanged)
+        {
+            BlitFromOffscreenBuffersToMixingBuffer();
+            BlitFromBitmapBuffersToMixingBuffer();
+            BlitFromTextToMixingBuffer();
+        }
+        BlitFromMixingBufferToBackBuffer();
+        WaitAndFlip(waitTime);
+    }
+    else
+    {
+        if (hwndChanged || _frameChanged)
+        {
+            BlitFromOffscreenBuffersToMixingBuffer();
+            BlitFromBitmapBuffersToMixingBuffer();
+            BlitFromTextToMixingBuffer();
+        }
+        BlitFromMixingBufferToFrontBuffer(hwndChanged, waitTime);
+
+    }
+    // Check the total time it took processing all rendering. Don't consider waitTime.
+    //const int totalRenderTime=GET_TIME_IN_MS()- startProcessTime-waitTime;            
+    const int totalRenderTime = ::timeGetTime() - startProcessTime - waitTime;
+    DecideBestRenderingMode(hwndChanged, totalRenderTime);
+    _frameChanged = false;
+    _confCritSect->Leave();
+
+    return true;
+}
+void VideoRenderDirectDraw::DecideBestRenderingMode(bool hwndChanged,
+                                                        int totalRenderTime)
+{
+    /* Apply variuos fixes for bad graphic drivers.
+     1. If cpu to high- test wait fix
+     2. If cpu still too high render in 1/2 display update period.
+     3. If RemoteRenderingProcess take to long time reduce the blit period to 1/2 display update period.
+     4. If RemoteRenderingProcess still take to long time try color conversion fix. It do color conversion in VieoRenderDirectDrawChannel::DeliverFrame
+     5. If RemoteRenderingProcess still take to long time reduce the blit period to 1/4 display update period and disable color conversion fix.
+     6  if  RemoteRenderingProcess still take to long time reduce the blit period to 1/4 display update period and enable color conversion fix again.
+     */
+
+    const int timesSinceLastCPUCheck = timeGetTime()
+            - _screenRenderCpuUsage.LastGetCpuTime();
+    int cpu = 0;
+
+    if (hwndChanged) // Render window changed.
+    {
+        cpu = _screenRenderCpuUsage.GetCpuUsage(); // Get CPU usage for this thread. (Called if hwndCanged just to reset the GET CPU Usage function)
+        _nrOfTooLongRenderTimes = 0; // Reset count of too long render times.
+        return; // Return - nothing more to do since the window has changed.
+    }
+    // Check total rendering times
+    if (_maxAllowedRenderTime > 0 && totalRenderTime > _maxAllowedRenderTime)
+    {
+        if (!_deliverInHalfFrameRate || totalRenderTime > 2
+                * _maxAllowedRenderTime)
+        {
+            _nrOfTooLongRenderTimes += totalRenderTime / _maxAllowedRenderTime; //Weighted with the number of to long render times
+        }
+    }
+
+    // If we are not using back surface (ie full screen rendering) we might try to switch BlitFromMixingBufferToFrontBuffer mode. 
+    if (timesSinceLastCPUCheck > WindowsThreadCpuUsage::CPU_CHECK_INTERVAL)
+    {
+        cpu = _screenRenderCpuUsage.GetCpuUsage(); // Get CPU usage for this thread. (Called if hwndCanged just to reset the GET CPU Usage function)
+        WEBRTC_TRACE(
+                     kTraceStream,
+                     kTraceVideo,
+                     -1,
+                     "Screen render thread cpu usage. (Tid %d), cpu usage %d processTime %d, no of too long render times %d",
+                     GetCurrentThreadId(), cpu, totalRenderTime,
+                     _nrOfTooLongRenderTimes);
+
+        // If this screen render thread uses more than 5% of the total CPU time and the 
+        // 1. try waitFix     
+        if (cpu >= 5 && _renderModeWaitForCorrectScanLine == false
+                && !_backSurface)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "HIGH screen render thread cpu usage. (Tid %d), cpu usage %d, applying wait for scan line",
+                         GetCurrentThreadId(), cpu);
+            _renderModeWaitForCorrectScanLine = true;
+            _fullScreenWaitEvent->StartTimer(true, 1);
+        }
+        else if (cpu >= 10 && _deliverInHalfFrameRate == false)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "HIGH screen render thread cpu usage. (Tid %d), cpu usage %d, Render half rate",
+                         GetCurrentThreadId(), cpu);
+            _deliverInHalfFrameRate = true;
+        }
+        else
+        {
+            // Check if rendering takes too long time
+            if (_nrOfTooLongRenderTimes > 15 || totalRenderTime
+                    >= WindowsThreadCpuUsage::CPU_CHECK_INTERVAL)
+            {
+
+                // The rendering is taking too long time
+                if (_deliverInHalfFrameRate == false)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "Render half rate, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInHalfFrameRate = true;
+                }
+                else if (_deliverInScreenType == false
+                        && !_deliverInQuarterFrameRate)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Applying deliver in screen type format, tid: %d",
+                                 GetCurrentThreadId());
+                    // 2. try RGB fix
+                    std::map<int, DirectDrawChannel*>::iterator it;
+                    it = _directDrawChannels.begin();
+                    while (it != _directDrawChannels.end())
+                    {
+                        it->second->ChangeDeliverColorFormat(true);
+                        it++;
+                    }
+                    _deliverInScreenType = true;
+                }
+                else if (_deliverInQuarterFrameRate == false)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Render quarter rate and disable deliver in screen type format, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInQuarterFrameRate = true;
+                    if (_deliverInScreenType)
+                    {
+                        //Disable  RGB fix
+                        std::map<int, DirectDrawChannel*>::iterator it;
+                        it = _directDrawChannels.begin();
+                        while (it != _directDrawChannels.end())
+                        {
+                            it->second->ChangeDeliverColorFormat(false);
+                            it++;
+                        }
+                        _deliverInScreenType = false;
+                    }
+                }
+                else if (_deliverInQuarterFrameRate == true
+                        && !_deliverInScreenType)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Render quarter rate and enable RGB fix, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInQuarterFrameRate = true;
+
+                    //Enabe  RGB fix
+                    std::map<int, DirectDrawChannel*>::iterator it;
+                    it = _directDrawChannels.begin();
+                    while (it != _directDrawChannels.end())
+                    {
+                        it->second->ChangeDeliverColorFormat(true);
+                        it++;
+                    }
+                    _deliverInScreenType = true;
+                }
+            }
+        }
+        _nrOfTooLongRenderTimes = 0; // Reset count of too long render times.
+    }
+}
+
+/*
+ *	Internal help functions for blitting
+ */
+
+bool VideoRenderDirectDraw::HasHWNDChanged()
+{
+    //	we check if the HWND has changed 
+    if (!_fullscreen)
+    {
+        RECT currentRect;
+        ::GetClientRect(_hWnd, &currentRect);
+        if (!EqualRect(&currentRect, &_hwndRect))
+        {
+            int retVal = CreateMixingSurface(); // this will delete the old mixing surface
+            if (retVal != 0)
+            {
+                return false;
+            }
+            return true;
+        }
+    }
+    return false;
+}
+
+int VideoRenderDirectDraw::BlitFromOffscreenBuffersToMixingBuffer()
+{
+    bool updateAll = false; // used to minimize the number of blt
+
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    if (_mixingSurface == NULL)
+    {
+        int retVal = CreateMixingSurface();
+        if (retVal != 0)
+        {
+            // trace done
+            return retVal;
+        }
+    }
+    RECT mixingRect;
+    ::SetRectEmpty(&mixingRect);
+
+    if (_fullscreen)
+    {
+        ::CopyRect(&mixingRect, &_screenRect);
+    }
+    else
+    {
+        ::CopyRect(&mixingRect, &_hwndRect);
+        // what if largest size is larger than screen
+        if (mixingRect.right > _screenRect.right)
+        {
+            mixingRect.right = _screenRect.right;
+        }
+        if (mixingRect.bottom > _screenRect.bottom)
+        {
+            mixingRect.bottom = _screenRect.bottom;
+        }
+    }
+    if (!EqualRect(&_mixingRect, &mixingRect))
+    {
+        // size changed
+        CopyRect(&_mixingRect, &mixingRect);
+        FillSurface(_mixingSurface, &mixingRect);
+        updateAll = true;
+    }
+
+    if (_clearMixingSurface)
+    {
+        FillSurface(_mixingSurface, &_mixingRect);
+        _clearMixingSurface = false;
+        updateAll = true;
+    }
+
+    std::multimap<int, unsigned int>::reverse_iterator it;
+    it = _directDrawZorder.rbegin();
+    while (it != _directDrawZorder.rend())
+    {
+        // loop through all channels and streams in Z order
+        short streamID = (it->second >> 16);
+        int channel = it->second & 0x0000ffff;
+
+        std::map<int, DirectDrawChannel*>::iterator ddIt;
+        ddIt = _directDrawChannels.find(channel);
+        if (ddIt != _directDrawChannels.end())
+        {
+            // found the channel
+            DirectDrawChannel* channelObj = ddIt->second;
+            if (channelObj && _mixingSurface)
+            {
+                if (updateAll || channelObj->IsOffScreenSurfaceUpdated(this))
+                {
+                    updateAll = true;
+                    if (channelObj->BlitFromOffscreenBufferToMixingBuffer(
+                                                                          this,
+                                                                          streamID,
+                                                                          _mixingSurface,
+                                                                          _mixingRect,
+                                                                          _demuxing)
+                            != 0)
+                    {
+                        WEBRTC_TRACE(kTraceError, kTraceVideo,
+                                     -1,
+                                     "DirectDraw error BlitFromOffscreenBufferToMixingBuffer ");
+                        _mixingSurface->Release();
+                        _mixingSurface = NULL;
+                    }
+                }
+            }
+        }
+        it++;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromTextToMixingBuffer()
+{
+    if (_directDraw == NULL)
+    {
+        return -1;
+    }
+    if (!_mixingSurface)
+    {
+        return -1;
+    }
+    if (_textSettings.empty())
+    {
+        return 0;
+    }
+
+    HDC hdcDDSurface;
+    HRESULT res = _mixingSurface->GetDC(&hdcDDSurface);
+    if (res != S_OK)
+    {
+        return -1;
+    }
+    //        
+    std::map<unsigned char, DirectDrawTextSettings*>::reverse_iterator it;
+    it = _textSettings.rbegin();
+
+    while (it != _textSettings.rend())
+    {
+        DirectDrawTextSettings* settings = it->second;
+        it++;
+        if (settings == NULL)
+        {
+            continue;
+        }
+        SetTextColor(hdcDDSurface, settings->_colorRefText);
+        SetBkColor(hdcDDSurface, settings->_colorRefBackground);
+
+        if (settings->_transparent)
+        {
+            SetBkMode(hdcDDSurface, TRANSPARENT); // do we need to call this all the time?
+        }
+        else
+        {
+            SetBkMode(hdcDDSurface, OPAQUE); // do we need to call this all the time?
+        }
+        RECT textRect;
+        textRect.left = int(_mixingRect.right * settings->_textLeft);
+        textRect.right = int(_mixingRect.right * settings->_textRight);
+        textRect.top = int(_mixingRect.bottom * settings->_textTop);
+        textRect.bottom = int(_mixingRect.bottom * settings->_textBottom);
+
+        DrawTextA(hdcDDSurface, settings->_ptrText, settings->_textLength,
+                  &textRect, DT_LEFT);
+    }
+    _mixingSurface->ReleaseDC(hdcDDSurface);
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromBitmapBuffersToMixingBuffer()
+{
+    HRESULT ddrval;
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    if (_directDraw == NULL)
+    {
+        return -1; // signal that we are not ready for the change
+    }
+
+    std::map<unsigned char, DirectDrawBitmapSettings*>::reverse_iterator it;
+    it = _bitmapSettings.rbegin();
+
+    while (it != _bitmapSettings.rend())
+    {
+        DirectDrawBitmapSettings* settings = it->second;
+        it++;
+        if (settings == NULL)
+        {
+            continue;
+        }
+
+        // Color keying lets you set colors on a surface to be completely transparent.
+        // always blit _transparentBitmapSurface last
+        if (_mixingSurface && settings->_transparentBitmapSurface
+                && settings->_transparentBitmapWidth
+                && settings->_transparentBitmapHeight)
+        {
+            DWORD signal = DDBLT_WAIT | DDBLT_DDFX;
+            // Set transparent color
+            if (settings->_transparentBitmapColorKey)
+            {
+                signal |= DDBLT_KEYSRC;
+                settings->_transparentBitmapSurface->SetColorKey(
+                                                                 DDCKEY_SRCBLT,
+                                                                 settings->_transparentBitmapColorKey);
+            }
+
+            // Now we can blt the transparent surface to another surface
+            RECT srcRect;
+            SetRect(&srcRect, 0, 0, settings->_transparentBitmapWidth,
+                    settings->_transparentBitmapHeight);
+
+            RECT dstRect;
+            if (settings->_transparentBitmapLeft
+                    != settings->_transparentBitmapRight
+                    && settings->_transparentBitmapTop
+                            != settings->_transparentBitmapBottom)
+            {
+                CopyRect(&dstRect, &_mixingRect);
+                dstRect.left = (int) (dstRect.right
+                        * settings->_transparentBitmapLeft);
+                dstRect.right = (int) (dstRect.right
+                        * settings->_transparentBitmapRight);
+                dstRect.top = (int) (dstRect.bottom
+                        * settings->_transparentBitmapTop);
+                dstRect.bottom = (int) (dstRect.bottom
+                        * settings->_transparentBitmapBottom);
+            }
+            else
+            {
+
+                // if left, right, top and bottom are describing one point use the original size
+                CopyRect(&dstRect, &srcRect);
+                POINT startp;
+                startp.x = (int) (_mixingRect.right
+                        * settings->_transparentBitmapLeft);
+                startp.y = (int) (_mixingRect.bottom
+                        * settings->_transparentBitmapTop);
+                OffsetRect(&dstRect, startp.x, startp.y);
+
+                // make sure that we blit inside our surface
+                if (dstRect.bottom > _mixingRect.bottom)
+                {
+                    srcRect.bottom -= dstRect.bottom - _mixingRect.bottom;
+                    // sanity
+                    if (srcRect.bottom < 0)
+                    {
+                        srcRect.bottom = 0;
+                    }
+                    dstRect.bottom = _mixingRect.bottom;
+                }
+                if (dstRect.right > _mixingRect.right)
+                {
+                    srcRect.right -= dstRect.right - _mixingRect.right;
+                    // sanity
+                    if (srcRect.right < 0)
+                    {
+                        srcRect.right = 0;
+                    }
+                    dstRect.right = _mixingRect.right;
+                }
+            }
+            // ddbltfx.dwDDFX |= DDBLTFX_MIRRORUPDOWN; //only for test requires hw support
+
+            // wait for the  _mixingSurface to be available
+            ddrval = _mixingSurface->Blt(&dstRect,
+                                         settings->_transparentBitmapSurface,
+                                         &srcRect, signal, &ddbltfx);
+            if (ddrval == DDERR_SURFACELOST)
+            {
+                if (!::GetForegroundWindow())
+                {
+                    // no window, i.e the user have clicked CTRL+ALT+DEL
+                    return 0;
+                }
+                // always re-creted via the SetBitmap call
+                settings->_transparentBitmapSurface->Release();
+                settings->_transparentBitmapSurface = NULL;
+
+                _clearMixingSurface = true;
+
+                if (settings->_transparentBitMap)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "DirectDraw re-set transparent bitmap");
+                    settings->SetBitmap(_trace, _directDraw);
+                }
+            }
+            else if (ddrval != DD_OK)
+            {
+                settings->_transparentBitmapSurface->Release();
+                settings->_transparentBitmapSurface = NULL;
+                WEBRTC_TRACE(
+                             kTraceInfo,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw blt error 0x%x _transparentBitmapSurface",
+                             ddrval);
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+        }
+    }
+    return 0;
+}
+
+/**
+ *	normal blitting
+ */
+int VideoRenderDirectDraw::BlitFromMixingBufferToFrontBuffer(
+                                                                 bool hwndChanged,
+                                                                 int& waitTime)
+{
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+    RECT rcRectDest;
+
+    // test for changing mode
+    /*    for(int i= 0; i< 6000000; i ++)
+     {
+     rcRectDest.left = i;
+     }
+     */
+
+    if (IsRectEmpty(&_mixingRect))
+    {
+        // no error just nothing to blit
+        return 0;
+    }
+    if (_mixingSurface == NULL)
+    {
+        // The mixing surface has probably been deleted
+        // and we haven't had time to restore it yet. Wait...
+        return 0;
+    }
+    if (_primarySurface == NULL)
+    {
+        int retVal = CreatePrimarySurface();
+        if (retVal != 0)
+        {
+            // tracing done
+            return retVal;
+        }
+    }
+
+    // first we need to figure out where on the primary surface our window lives
+    ::GetWindowRect(_hWnd, &rcRectDest);
+
+    DWORD signal = DDBLT_WAIT | DDBLT_DDFX;
+
+    // Set transparent color
+    if (_transparentBackground)
+    {
+        signal |= DDBLT_KEYSRC;
+        DDCOLORKEY ColorKey;
+        ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
+        ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
+        _mixingSurface->SetColorKey(DDCKEY_SRCBLT, &ColorKey);
+    }
+
+    if (_renderModeWaitForCorrectScanLine)
+    {
+        // wait for previus draw to complete
+        DWORD scanLines = 0;
+        DWORD screenLines = _screenRect.bottom - 1; // scanlines start on 0
+        DWORD screenLines90 = (screenLines * 9) / 10; //  % of the screen is rendered
+        //waitTime=GET_TIME_IN_MS();
+        waitTime = ::timeGetTime();
+        HRESULT hr = _directDraw->GetScanLine(&scanLines);
+        while (screenLines90 > scanLines && hr == DD_OK)
+        {
+            _confCritSect->Leave();
+            _fullScreenWaitEvent->Wait(3);
+            _confCritSect->Enter();
+            if (_directDraw == NULL)
+            {
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+            hr = _directDraw->GetScanLine(&scanLines);
+        }
+        //waitTime=GET_TIME_IN_MS()-waitTime;
+        waitTime = ::timeGetTime() - waitTime;
+    }
+
+    HRESULT ddrval = _primarySurface->Blt(&rcRectDest, _mixingSurface,
+                                          &_mixingRect, signal, &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        ddrval = _primarySurface->Restore();
+        if (ddrval == DD_OK) // Try again
+        {
+            ddrval = _primarySurface->Blt(&rcRectDest, _mixingSurface,
+                                          &_mixingRect, signal, &ddbltfx);
+        }
+        if (ddrval != DD_OK) // If restore failed or second time blt failed. Delete the surface. It will be recreated next time.
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to restore lost _primarySurface  0x%x",
+                         ddrval);
+            _primarySurface->Release();
+            _primarySurface = NULL;
+            if (_mixingSurface)
+            {
+                _mixingSurface->Release();
+                _mixingSurface = NULL;
+            }
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored lost _primarySurface");
+    }
+    else if (ddrval == DDERR_EXCEPTION)
+    {
+        _primarySurface->Release();
+        _primarySurface = NULL;
+        if (_mixingSurface)
+        {
+            _mixingSurface->Release();
+            _mixingSurface = NULL;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw exception in _primarySurface");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (ddrval != DD_OK)
+    {
+        if (ddrval != 0x80004005) // Undefined error. Ignore
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw blt error 0x%x _primarySurface", ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    return 0;
+}
+
+/**
+ *	fullscreen mode blitting
+ */
+
+int VideoRenderDirectDraw::WaitAndFlip(int& waitTime)
+{
+    if (_primarySurface == NULL)
+    {
+        // no trace, too much in the file
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (_directDraw == NULL)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    // wait for previus draw to complete
+    DWORD scanLines = 0;
+    DWORD screenLines = _screenRect.bottom - 1; // scanlines start on 0
+    DWORD screenLines90 = (screenLines * 9) / 10; //  % of the screen is rendered
+
+    //waitTime=GET_TIME_IN_MS();
+    waitTime = ::timeGetTime();
+    HRESULT hr = _directDraw->GetScanLine(&scanLines);
+    while (screenLines90 > scanLines && hr == DD_OK)
+    {
+        _confCritSect->Leave();
+        _fullScreenWaitEvent->Wait(3);
+        _confCritSect->Enter();
+        if (_directDraw == NULL)
+        {
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        hr = _directDraw->GetScanLine(&scanLines);
+    }
+    //waitTime=GET_TIME_IN_MS()-waitTime;    
+    waitTime = ::timeGetTime() - waitTime;
+    if (screenLines > scanLines)
+    {
+        // this function sucks a lot of the CPU... but it's worth it
+        _directDraw->WaitForVerticalBlank(DDWAITVB_BLOCKBEGIN, NULL);
+    }
+
+    // schedule a flip
+    HRESULT ddrval = _primarySurface->Flip(NULL, DDFLIP_WAIT); // schedule flip DDFLIP_WAIT
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        //if(::IsIconic(_hWnd))
+        //{
+        // need to do this before Restore
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw our window is an icon maximize it ");
+        // When the full screen window is switched out by ALT-TAB or ALT-CTRL-DEL-TASKMANAGER,
+        // this call will hang the app. Remove it to fix the problem.
+        // FIXME:
+        // 1) Why we want to active and max the window when it was minimized?
+        // 2) Why this is needed before restore? We didn't do that in non full screen mode.
+        //::ShowWindow(_hWnd, SW_SHOWMAXIMIZED);
+        //}
+        ddrval = _primarySurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to restore _primarySurface, in flip, 0x%x",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restore _primarySurface in flip");
+
+    }
+    else if (ddrval != DD_OK)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromMixingBufferToBackBuffer()
+{
+    if (_backSurface == NULL)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (IsRectEmpty(&_mixingRect))
+    {
+        // nothing to blit
+        return 0;
+    }
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    // wait for the _backSurface to be available
+    HRESULT ddrval = _backSurface->Blt(&_screenRect, _mixingSurface,
+                                       &_mixingRect, DDBLT_WAIT | DDBLT_DDFX,
+                                       &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        //if(::IsIconic(_hWnd))
+        //{
+        // need to do this before Restore
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw our window is an icon maximize it ");
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw show our window is an icon maximize it ");
+        // When the full screen window is switch out by ALT-TAB or ALT-CTRL-DEL-TASKMANAGER,
+        // this call will hang the app. Remove it to fix the problem.
+        // FIXME:
+        // 1) Why we want to active and max the window when it was minimized?
+        // 2) Why this is needed before restore? We didn't do that in non full screen mode.
+        //::ShowWindow(_hWnd, SW_SHOWMAXIMIZED);
+        //}
+        ddrval = _primarySurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to restore _primarySurface");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored _primarySurface");
+
+        _clearMixingSurface = true;
+
+    }
+    else if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw blt error 0x%x _backSurface", ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+/*
+ Saving the code for using a clip list instead of HWND, problem was that other transparent
+ HWNDs caused us not to update an area or that we painted in other HWNDs area.
+
+ RECT hWndRect;
+ ::GetWindowRect(_hWnd, &hWndRect);
+
+ LPRGNDATA lpClipList = (LPRGNDATA)malloc(sizeof(RGNDATAHEADER) + sizeof(RECT));
+
+ // now fill out all the structure fields
+ memcpy(lpClipList->Buffer, &hWndRect, sizeof(RECT));
+
+ ::CopyRect(&(lpClipList->rdh.rcBound), &hWndRect);
+ lpClipList->rdh.dwSize = sizeof(RGNDATAHEADER);
+ lpClipList->rdh.iType = RDH_RECTANGLES;
+ lpClipList->rdh.nCount = 1;
+ lpClipList->rdh.nRgnSize = sizeof(RECT) * lpClipList->rdh.nCount;
+ ddrval= _directDrawClipper->SetClipList(lpClipList, 0);
+
+ void Visible(HWND hwnd, HRGN &hRgn)
+ {
+ if (!IsWindowVisible(hwnd))      // If the window is visible
+ {
+ if(CombineRgn(hRgn, hRgn, hRgn, RGN_XOR) == NULLREGION)
+ {
+ return;
+ }
+ }
+ // Gets the topmost window
+ HWND hWnd=GetTopWindow(NULL);
+ while (hWnd != NULL && hWnd != hwnd)  // If the window is above in Z-order
+ {
+ if (IsWindowVisible(hWnd))      // If the window is visible
+ {
+ RECT Rect;
+ // Gets window dimension
+ GetWindowRect(hWnd, &Rect);
+ // Creates a region corresponding to the window
+ if(Rect.left > 0) // test fo rnow
+ {
+ HRGN hrgnWnd = CreateRectRgn(Rect.left, Rect.top, Rect.right, Rect.bottom);
+ //                int err = GetUpdateRgn(hWnd, hrgnWnd, FALSE);
+ // Creates a region corresponding to region not overlapped
+ if(CombineRgn(hRgn, hRgn, hrgnWnd, RGN_DIFF) == COMPLEXREGION)
+ {
+ int a = 0;
+ }
+ DeleteObject(hrgnWnd);
+ }
+ }
+ // Loops through all windows till the specified window
+ hWnd = GetWindow(hWnd, GW_HWNDNEXT);
+ }
+
+ HRGN region;
+ region = CreateRectRgn(0, 0, 500, 500);
+
+ // Get the affected region
+ //    if (GetUpdateRgn(_hWnd, region, FALSE) != ERROR)
+ HDC dc = GetDC(_hWnd);
+ if(GetClipRgn(dc, region) > 0)
+ {
+ int buffsize;
+ UINT x;
+ RGNDATA *buff;
+ POINT TopLeft;
+
+ // Get the top-left point of the client area
+ TopLeft.x = 0;
+ TopLeft.y = 0;
+ if (!ClientToScreen(_hWnd, &TopLeft))
+ {
+ int a = 0;
+ }
+
+
+ // Get the size of buffer required
+ buffsize = GetRegionData(region, 0, 0);
+ if (buffsize != 0)
+ {
+ buff = (RGNDATA *) new BYTE [buffsize];
+ if (buff == NULL)
+ {
+ int a = 0;
+ }
+
+ // Now get the region data
+ if(GetRegionData(region, buffsize, buff))
+ {
+ if(buff->rdh.nCount > 0)
+ {
+ ::OffsetRect(&(buff->rdh.rcBound), TopLeft.x, TopLeft.y);
+ for (x=0; x<(buff->rdh.nCount); x++)
+ {
+ RECT *urect = (RECT *) (((BYTE *) buff) + sizeof(RGNDATAHEADER) + (x * sizeof(RECT)));
+ ::OffsetRect(urect, TopLeft.x, TopLeft.y);
+ char logStr[256];
+ _snprintf(logStr,256, "rect T:%d L:%d B:%d R:%d\n",urect->top, urect->left, urect->bottom, urect->right);
+ OutputDebugString(logStr);
+
+ }
+ OutputDebugString("\n");
+ _directDrawClipper->SetClipList(buff, 0);
+ }
+ LPRGNDATA lpClipList = (LPRGNDATA)malloc(sizeof(RGNDATAHEADER) + sizeof(RECT) * buff->rdh.nCount);
+ if(buff->rdh.nCount > 0)
+ {
+ _directDrawClipper->SetClipList(lpClipList, 0);
+
+ lpClipList->
+ DWORD size = sizeof(RGNDATAHEADER) + sizeof(RECT)* buff->rdh.nCount;
+ lpClipList->rdh.dwSize = sizeof(RGNDATAHEADER);
+ lpClipList->rdh.iType = RDH_RECTANGLES;
+ lpClipList->rdh.nCount = 1;
+
+ HRESULT ddrval1 = _directDrawClipper->GetClipList(NULL, lpClipList, &size);
+ memcpy(lpClipList->Buffer, &rcRectDest, sizeof(RECT));
+ ::CopyRect(&(lpClipList->rdh.rcBound), &rcRectDest);
+ _directDrawClipper->SetClipList(lpClipList, 0);
+ }                    }
+
+ for (x=0; x<(buff->rdh.nCount); x++)
+ {
+ // Obtain the rectangles from the list
+ RECT *urect = (RECT *) (((BYTE *) buff) + sizeof(RGNDATAHEADER) + (x * sizeof(RECT)));
+ int a = 0;
+
+ }
+ delete lpClipList;
+ }
+ delete buff;
+ }
+ }
+ */
+/*
+ void VideoRenderDirectDraw::Wait()
+ {
+ // wait for previus draw to complete
+ int count = 0;
+ DWORD scanLines = 0;
+ DWORD screenLines = _screenRect.bottom -1; // scanlines start on 0
+ DWORD screenLines75 = (screenLines*3)/4; //  % of the screen is rendered
+ HRESULT hr = DD_OK;
+ if(_directDraw == NULL)
+ {
+ return;
+ }
+ hr =_directDraw->GetScanLine(&scanLines);
+ while ( screenLines75 > scanLines && hr == DD_OK)
+ {
+ //   		_confCritSect->Leave();
+ _screenEvent->Wait(10);
+ //      _confCritSect->Enter();
+ if(_directDraw == NULL)
+ {
+ return;
+ }
+ hr = _directDraw->GetScanLine(&scanLines);
+ }
+ }
+ */
+
+WebRtc_Word32 VideoRenderDirectDraw::StartRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::StopRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.h b/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.h
new file mode 100644
index 0000000..e790661
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_directdraw.h
@@ -0,0 +1,399 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
+
+#include "typedefs.h"
+#include "i_video_render_win.h"
+#include "common_video/libyuv/include/libyuv.h"
+
+#include "ddraw.h"
+#include <Map>
+#include <List>
+
+// Added
+#include "video_render_defines.h"
+
+#pragma comment(lib, "ddraw.lib")       // located in DirectX SDK
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+
+class VideoRenderDirectDraw;
+
+// some typedefs to make it easy to test different versions
+typedef IDirectDraw7 DirectDraw;
+typedef IDirectDrawSurface7 DirectDrawSurface;
+typedef DDSURFACEDESC2 DirectDrawSurfaceDesc;
+typedef DDSCAPS2 DirectDrawCaps;
+typedef std::pair<int, unsigned int> ZorderPair;
+
+class WindowsThreadCpuUsage
+{
+public:
+    WindowsThreadCpuUsage();
+    int GetCpuUsage(); //in % since last call
+    DWORD LastGetCpuTime()
+    {
+        return _lastGetCpuUsageTime;
+    }
+    const enum
+    {
+        CPU_CHECK_INTERVAL = 1000
+    };
+private:
+    _int64 _lastCpuUsageTime;
+    DWORD _lastGetCpuUsageTime;
+    int _lastCpuUsage;
+    HANDLE _hThread;
+    int _cores;
+};
+
+class DirectDrawStreamSettings
+{
+public:
+    DirectDrawStreamSettings();
+
+    float _startWidth;
+    float _stopWidth;
+    float _startHeight;
+    float _stopHeight;
+
+    float _cropStartWidth;
+    float _cropStopWidth;
+    float _cropStartHeight;
+    float _cropStopHeight;
+};
+
+class DirectDrawBitmapSettings
+{
+public:
+    DirectDrawBitmapSettings();
+    ~DirectDrawBitmapSettings();
+
+    int SetBitmap(Trace* trace, DirectDraw* directDraw);
+
+    HBITMAP _transparentBitMap;
+    float _transparentBitmapLeft;
+    float _transparentBitmapRight;
+    float _transparentBitmapTop;
+    float _transparentBitmapBottom;
+    int _transparentBitmapWidth;
+    int _transparentBitmapHeight;
+    DDCOLORKEY* _transparentBitmapColorKey;
+    DirectDrawSurface* _transparentBitmapSurface; // size of bitmap image
+};
+
+class DirectDrawTextSettings
+{
+public:
+    DirectDrawTextSettings();
+    ~DirectDrawTextSettings();
+
+    int SetText(const char* text, int textLength, COLORREF colorText,
+                COLORREF colorBg, float left, float top, float right,
+                float bottom);
+
+    char* _ptrText;
+    WebRtc_UWord32 _textLength;
+    COLORREF _colorRefText;
+    COLORREF _colorRefBackground;
+    float _textLeft;
+    float _textRight;
+    float _textTop;
+    float _textBottom;
+    bool _transparent;
+};
+
+class DirectDrawChannel: public VideoRenderCallback
+{
+public:
+    DirectDrawChannel(DirectDraw* directDraw,
+                          VideoType blitVideoType,
+                          VideoType incomingVideoType,
+                          VideoType screenVideoType,
+                          VideoRenderDirectDraw* owner);
+
+    int FrameSizeChange(int width, int height, int numberOfStreams);
+    int DeliverFrame(unsigned char* buffer, int buffeSize,
+                     unsigned int timeStamp90KHz);
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    int ChangeDeliverColorFormat(bool useScreenType);
+
+    void AddRef();
+    void Release();
+
+    void SetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
+                           float startWidth, float startHeight,
+                           float stopWidth, float stopHeight);
+    void SetStreamCropSettings(VideoRenderDirectDraw* DDObj,
+                               short streamId, float startWidth,
+                               float startHeight, float stopWidth,
+                               float stopHeight);
+
+    int GetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
+                          float& startWidth, float& startHeight,
+                          float& stopWidth, float& stopHeight);
+
+    void GetLargestSize(RECT* mixingRect);
+    int
+            BlitFromOffscreenBufferToMixingBuffer(
+                                                  VideoRenderDirectDraw* DDObj,
+                                                  short streamID,
+                                                  DirectDrawSurface* mixingSurface,
+                                                  RECT &dstRect, bool demuxing);
+    bool IsOffScreenSurfaceUpdated(VideoRenderDirectDraw* DDobj);
+
+protected:
+    virtual ~DirectDrawChannel();
+
+private:
+    CriticalSectionWrapper* _critSect; // protect members from change while using them
+    int _refCount;
+    int _width;
+    int _height;
+    int _numberOfStreams;
+    bool _deliverInScreenType;
+    bool _doubleBuffer;
+    DirectDraw* _directDraw;
+    DirectDrawSurface* _offScreenSurface; // size of incoming stream
+    DirectDrawSurface* _offScreenSurfaceNext; // size of incoming stream
+    VideoType _blitVideoType;
+    VideoType _originalBlitVideoType;
+    VideoType _incomingVideoType;
+    VideoType _screenVideoType;
+    enum
+    {
+        MAX_FRAMEDELIVER_TIME = 20
+    }; //Maximum time it might take to deliver a frame (process time in DeliverFrame)
+    enum
+    {
+        MAX_NO_OF_LATE_FRAMEDELIVER_TIME = 10
+    }; //No of times we allow DeliverFrame process time to exceed MAX_FRAMEDELIVER_TIME before we take action.
+    VideoFrame _tempRenderBuffer;
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>
+            _streamIdToSettings;
+    bool _offScreenSurfaceUpdated;
+    VideoRenderDirectDraw* _owner;
+};
+
+class VideoRenderDirectDraw: IVideoRenderWin
+{
+public:
+    VideoRenderDirectDraw(Trace* trace, HWND hWnd, bool fullscreen);
+    ~VideoRenderDirectDraw();
+public:
+    //IVideoRenderWin
+
+    /**************************************************************************
+     *
+     *   Init
+     *
+     ***************************************************************************/
+    virtual WebRtc_Word32 Init();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder, const float left,
+                            const float top, const float right,
+                            const float bottom);
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left, float& top,
+                                            float& right, float& bottom);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen();
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left, const float top,
+                                  const float rigth, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+public:
+
+    // Used for emergency stops...
+    int Stop();
+
+    DirectDrawChannel* ShareDirectDrawChannel(int channel);
+    DirectDrawChannel* ConfigureDirectDrawChannel(int channel,
+                                                      unsigned char streamID,
+                                                      int zOrder, float left,
+                                                      float top, float right,
+                                                      float bottom);
+
+    int AddDirectDrawChannel(int channel, unsigned char streamID, int zOrder,
+                             DirectDrawChannel*);
+
+    VideoType GetPerferedVideoFormat();
+    bool HasChannels();
+    bool HasChannel(int channel);
+    bool DeliverInScreenType();
+    int GetChannels(std::list<int>& channelList);
+
+    // code for getting graphics settings    
+    int GetScreenResolution(int& screenWidth, int& screenHeight);
+    int UpdateSystemCPUUsage(int systemCPU);
+
+    int SetBitmap(HBITMAP bitMap, unsigned char pictureId,
+                  DDCOLORKEY* colorKey, float left, float top, float rigth,
+                  float bottom);
+
+    bool IsPrimaryOrMixingSurfaceOnSystem();
+    bool CanBltFourCC()
+    {
+        return _bCanBltFourcc;
+    }
+
+protected:
+    static bool RemoteRenderingThreadProc(void* obj);
+    bool RemoteRenderingProcess();
+
+private:
+    int CheckCapabilities();
+    int CreateMixingSurface();
+    int CreatePrimarySurface();
+
+    int FillSurface(DirectDrawSurface *pDDSurface, RECT* rect);
+    int DrawOnSurface(unsigned char* buffer, int buffeSize);
+    int BlitFromOffscreenBuffersToMixingBuffer();
+    int BlitFromBitmapBuffersToMixingBuffer();
+    int BlitFromTextToMixingBuffer();
+
+    bool HasHWNDChanged();
+    void DecideBestRenderingMode(bool hwndChanged, int totalRenderTime);
+
+    // in fullscreen flip mode
+    int WaitAndFlip(int& waitTime);
+    int BlitFromMixingBufferToBackBuffer();
+
+    // in normal window mode
+    int BlitFromMixingBufferToFrontBuffer(bool hwndChanged, int& waitTime);
+
+    // private members
+    Trace* _trace;
+    CriticalSectionWrapper* _confCritSect; // protect members from change while using them
+
+    bool _fullscreen;
+    bool _demuxing;
+    bool _transparentBackground;
+    bool _supportTransparency;
+    bool _canStretch;
+    bool _canMirrorLeftRight;
+    bool _clearMixingSurface;
+    bool _deliverInScreenType;
+    bool _renderModeWaitForCorrectScanLine;
+    bool _deliverInHalfFrameRate;
+    bool _deliverInQuarterFrameRate;
+    bool _bCanBltFourcc;
+    bool _frameChanged; // True if a frame has changed or bitmap or text has changed.
+    int _processCount;
+    HWND _hWnd;
+    RECT _screenRect; // whole screen as a rect
+    RECT _mixingRect;
+    RECT _originalHwndRect;
+    RECT _hwndRect;
+
+    VideoType _incomingVideoType;
+    VideoType _blitVideoType;
+    VideoType _rgbVideoType;
+
+    DirectDraw* _directDraw;
+    DirectDrawSurface* _primarySurface; // size of screen
+    DirectDrawSurface* _backSurface; // size of screen
+    DirectDrawSurface* _mixingSurface; // size of screen
+
+    std::map<unsigned char, DirectDrawBitmapSettings*> _bitmapSettings;
+    std::map<unsigned char, DirectDrawTextSettings*> _textSettings;
+    std::map<int, DirectDrawChannel*> _directDrawChannels;
+    std::multimap<int, unsigned int> _directDrawZorder;
+
+    EventWrapper* _fullScreenWaitEvent;
+    EventWrapper* _screenEvent;
+    ThreadWrapper* _screenRenderThread;
+    WindowsThreadCpuUsage _screenRenderCpuUsage;
+
+    int _lastRenderModeCpuUsage;
+
+    // Used for emergency stop caused by OnDisplayChange
+    bool _blit;
+
+    //code for providing graphics settings
+    DWORD _totalMemory;
+    DWORD _availableMemory;
+    int _systemCPUUsage;
+
+    // Variables used for checking render time
+    int _maxAllowedRenderTime;
+    int _nrOfTooLongRenderTimes;
+    bool _isPrimaryOrMixingSurfaceOnSystem;
+};
+
+} //namespace webrtc
+
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.cc b/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.cc
new file mode 100644
index 0000000..6f2237f
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.cc
@@ -0,0 +1,989 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#include "video_render_windows_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#ifdef DIRECTDRAW_RENDERING
+#include "video_render_directdraw.h"
+#endif
+#ifdef DIRECT3D9_RENDERING
+#include "video_render_direct3d9.h"
+#endif
+
+#include <tchar.h>
+
+namespace webrtc {
+
+VideoRenderWindowsImpl::VideoRenderWindowsImpl(
+                                               const WebRtc_Word32 id,
+                                               const VideoRenderType videoRenderType,
+                                               void* window,
+                                               const bool fullscreen) :
+            _id(id),
+            _renderWindowsCritsect(
+                                   *CriticalSectionWrapper::CreateCriticalSection()),
+            _prtWindow(window), _fullscreen(fullscreen), _ptrRendererWin(NULL)
+{
+}
+
+VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
+{
+    delete &_renderWindowsCritsect;
+    if (_ptrRendererWin)
+    {
+        delete _ptrRendererWin;
+        _ptrRendererWin = NULL;
+    }
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::Init()
+{
+    //LogOSAndHardwareDetails();
+    CheckHWAcceleration();
+
+    _renderMethod = kVideoRenderWinD3D9;
+
+    // Create the win renderer
+    switch (_renderMethod)
+    {
+        case kVideoRenderWinDd:
+        {
+#ifdef DIRECTDRAW_RENDERING
+            VideoRenderDirectDraw* ptrRenderer;
+            ptrRenderer = new VideoRenderDirectDraw(NULL, (HWND) _prtWindow, _fullscreen);
+            if (ptrRenderer == NULL)
+            {
+                break;
+            }
+            _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
+#else
+            return NULL;
+#endif  //DIRECTDRAW_RENDERING
+        }
+            break;
+        case kVideoRenderWinD3D9:
+        {
+#ifdef DIRECT3D9_RENDERING
+            VideoRenderDirect3D9* ptrRenderer;
+            ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
+            if (ptrRenderer == NULL)
+            {
+                break;
+            }
+            _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
+#else
+            return NULL;
+#endif  //DIRECT3D9_RENDERING
+        }
+            break;
+        default:
+            break;
+    }
+
+    //Init renderer
+    if (_ptrRendererWin)
+        return _ptrRendererWin->Init();
+    else
+        return -1;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    if (!_ptrRendererWin)
+    {
+        return -1;
+    }
+    else
+    {
+        return _ptrRendererWin->ChangeWindow(window);
+    }
+}
+
+VideoRenderCallback*
+VideoRenderWindowsImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                                const WebRtc_UWord32 zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    VideoRenderCallback* renderCallback = NULL;
+
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
+                                                        top, right, bottom);
+    }
+
+    return renderCallback;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::DeleteIncomingRenderStream(
+                                                                 const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->DeleteChannel(streamId);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
+                                                                        const WebRtc_UWord32 streamId,
+                                                                        WebRtc_UWord32& zOrder,
+                                                                        float& left,
+                                                                        float& top,
+                                                                        float& right,
+                                                                        float& bottom) const
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::StartRender()
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StartRender();
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::StopRender()
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StopRender();
+    }
+    return error;
+}
+
+VideoRenderType VideoRenderWindowsImpl::RenderType()
+{
+    return kRenderWindows;
+}
+
+RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderWindowsImpl::FullScreen()
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    bool fullscreen = false;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        fullscreen = _ptrRendererWin->IsFullScreen();
+    }
+    return fullscreen;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetGraphicsMemory(
+                                                        WebRtc_UWord64& totalGraphicsMemory,
+                                                        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    if (_ptrRendererWin)
+    {
+        return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
+                                                  availableGraphicsMemory);
+    }
+
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetScreenResolution(
+                                                          WebRtc_UWord32& screenWidth,
+                                                          WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+WebRtc_UWord32 VideoRenderWindowsImpl::RenderFrameRate(
+                                                       const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetStreamCropping(
+                                                        const WebRtc_UWord32 streamId,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
+                                             bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ConfigureRenderer(
+                                                        const WebRtc_UWord32 streamId,
+                                                        const unsigned int zOrder,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetTransparentBackground(
+                                                               const bool enable)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetTransparentBackground(enable);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetText(
+                                              const WebRtc_UWord8 textId,
+                                              const WebRtc_UWord8* text,
+                                              const WebRtc_Word32 textLength,
+                                              const WebRtc_UWord32 textColorRef,
+                                              const WebRtc_UWord32 backgroundColorRef,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetText(textId, text, textLength,
+                                         textColorRef, backgroundColorRef,
+                                         left, top, right, bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
+                                                const WebRtc_UWord8 pictureId,
+                                                const void* colorKey,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    CriticalSectionScoped cs(_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
+                                           top, right, bottom);
+    }
+    return error;
+}
+
+void VideoRenderWindowsImpl::LogOSAndHardwareDetails()
+{
+    HRESULT hr;
+    IDxDiagProvider* m_pDxDiagProvider = NULL;
+    IDxDiagContainer* m_pDxDiagRoot = NULL;
+
+    hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+    bool coUninitializeIsRequired = true;
+    if (FAILED(hr))
+    {
+        // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+        coUninitializeIsRequired = false;
+        if (hr == RPC_E_CHANGED_MODE)
+        {
+            // Calling thread has already initialized COM to be used in a single-threaded
+            // apartment (STA). We are then prevented from using STA.
+            // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is set".
+            //
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideoRenderer,
+                         _id,
+                         "VideoRenderWindowsImpl::LogOSAndHardwareDetails() CoInitializeEx(NULL, COINIT_APARTMENTTHREADED) => RPC_E_CHANGED_MODE, error 0x%x",
+                         hr);
+        }
+    }
+
+    hr = CoCreateInstance(CLSID_DxDiagProvider, NULL, CLSCTX_INPROC_SERVER,
+                          IID_IDxDiagProvider, (LPVOID*) &m_pDxDiagProvider);
+
+    if (FAILED(hr) || m_pDxDiagProvider == NULL)
+    {
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    // Fill out a DXDIAG_INIT_PARAMS struct and pass it to IDxDiagContainer::Initialize
+    // Passing in TRUE for bAllowWHQLChecks, allows dxdiag to check if drivers are 
+    // digital signed as logo'd by WHQL which may connect via internet to update 
+    // WHQL certificates.    
+    DXDIAG_INIT_PARAMS dxDiagInitParam;
+    ZeroMemory(&dxDiagInitParam, sizeof(DXDIAG_INIT_PARAMS));
+
+    dxDiagInitParam.dwSize = sizeof(DXDIAG_INIT_PARAMS);
+    dxDiagInitParam.dwDxDiagHeaderVersion = DXDIAG_DX9_SDK_VERSION;
+    dxDiagInitParam.bAllowWHQLChecks = TRUE;
+    dxDiagInitParam.pReserved = NULL;
+
+    hr = m_pDxDiagProvider->Initialize(&dxDiagInitParam);
+    if (FAILED(hr))
+    {
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    hr = m_pDxDiagProvider->GetRootContainer(&m_pDxDiagRoot);
+    if (FAILED(hr) || m_pDxDiagRoot == NULL)
+    {
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    IDxDiagContainer* pObject = NULL;
+
+    hr = m_pDxDiagRoot->GetChildContainer(L"DxDiag_SystemInfo", &pObject);
+    if (FAILED(hr) || pObject == NULL)
+    {
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    TCHAR m_szDirectXVersionLongEnglish[100];
+    TCHAR m_szOSLocalized[100];
+    TCHAR m_szProcessorEnglish[200];
+    TCHAR m_szSystemManufacturerEnglish[200];
+
+    ZeroMemory(m_szDirectXVersionLongEnglish, sizeof(TCHAR) * 100);
+    ZeroMemory(m_szOSLocalized, sizeof(TCHAR) * 100);
+    ZeroMemory(m_szProcessorEnglish, sizeof(TCHAR) * 200);
+    ZeroMemory(m_szSystemManufacturerEnglish, sizeof(TCHAR) * 200);
+
+    GetStringValue( pObject, L"szDirectXVersionLongEnglish",
+                   EXPAND(m_szDirectXVersionLongEnglish) );
+    GetStringValue(pObject, L"szOSLocalized", EXPAND(m_szOSLocalized) );
+    GetStringValue(pObject, L"szProcessorEnglish", EXPAND(m_szProcessorEnglish) );
+    GetStringValue( pObject, L"szSystemManufacturerEnglish",
+                   EXPAND(m_szSystemManufacturerEnglish) );
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "System Manufacturer             --- %s",
+                 m_szSystemManufacturerEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Processor                       --- %s", m_szProcessorEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Operating System                --- %s", m_szOSLocalized);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DirectX Version                 --- %s",
+                 m_szDirectXVersionLongEnglish);
+
+    if (pObject)
+        pObject->Release();
+
+    struct DisplayInfo
+    {
+        TCHAR m_szDescription[200];
+        TCHAR m_szManufacturer[200];
+        TCHAR m_szChipType[100];
+        TCHAR m_szDisplayMemoryEnglish[100];
+        TCHAR m_szDisplayModeEnglish[100];
+        TCHAR m_szDriverName[100];
+        TCHAR m_szDriverVersion[100];
+        TCHAR m_szDDStatusEnglish[100];
+        TCHAR m_szD3DStatusEnglish[100];
+        BOOL m_bDDAccelerationEnabled;
+        BOOL m_bNoHardware;
+        BOOL m_b3DAccelerationExists;
+        BOOL m_b3DAccelerationEnabled;
+    };
+
+    WCHAR wszContainer[256];
+    IDxDiagContainer* pContainer = NULL;
+
+    DWORD nInstanceCount = 0;
+    DWORD nItem = 0;
+    DWORD nCurCount = 0;
+
+    // Get the IDxDiagContainer object called "DxDiag_DisplayDevices".
+    // This call may take some time while dxdiag gathers the info.
+    if (FAILED(hr = m_pDxDiagRoot->GetChildContainer(L"DxDiag_DisplayDevices",
+                                                     &pContainer)))
+    {
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    if (FAILED(hr = pContainer->GetNumberOfChildContainers(&nInstanceCount)))
+    {
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    DisplayInfo *pDisplayInfo = new DisplayInfo;
+    if (pDisplayInfo == NULL)
+        return;
+    ZeroMemory(pDisplayInfo, sizeof(DisplayInfo));
+
+    hr = pContainer->EnumChildContainerNames(nItem, wszContainer, 256);
+    if (FAILED(hr))
+    {
+        delete pDisplayInfo;
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    hr = pContainer->GetChildContainer(wszContainer, &pObject);
+    if (FAILED(hr) || pObject == NULL)
+    {
+        delete pDisplayInfo;
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    GetStringValue( pObject, L"szDescription",
+                   EXPAND(pDisplayInfo->m_szDescription) );
+    GetStringValue( pObject, L"szManufacturer",
+                   EXPAND(pDisplayInfo->m_szManufacturer) );
+    GetStringValue(pObject, L"szChipType", EXPAND(pDisplayInfo->m_szChipType) );
+    GetStringValue( pObject, L"szDisplayMemoryEnglish",
+                   EXPAND(pDisplayInfo->m_szDisplayMemoryEnglish) );
+    GetStringValue( pObject, L"szDisplayModeEnglish",
+                   EXPAND(pDisplayInfo->m_szDisplayModeEnglish) );
+    GetStringValue( pObject, L"szDriverName",
+                   EXPAND(pDisplayInfo->m_szDriverName) );
+    GetStringValue( pObject, L"szDriverVersion",
+                   EXPAND(pDisplayInfo->m_szDriverVersion) );
+    GetBoolValue(pObject, L"bDDAccelerationEnabled",
+                 &pDisplayInfo->m_bDDAccelerationEnabled);
+    GetBoolValue(pObject, L"bNoHardware", &pDisplayInfo->m_bNoHardware);
+    GetBoolValue(pObject, L"bDDAccelerationEnabled",
+                 &pDisplayInfo->m_bDDAccelerationEnabled);
+    GetBoolValue(pObject, L"b3DAccelerationExists",
+                 &pDisplayInfo->m_b3DAccelerationExists);
+    GetBoolValue(pObject, L"b3DAccelerationEnabled",
+                 &pDisplayInfo->m_b3DAccelerationEnabled);
+    GetStringValue( pObject, L"szDDStatusEnglish",
+                   EXPAND(pDisplayInfo->m_szDDStatusEnglish));
+    GetStringValue( pObject, L"szD3DStatusEnglish",
+                   EXPAND(pDisplayInfo->m_szD3DStatusEnglish));
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Name                     --- %s",
+                 pDisplayInfo->m_szDescription);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Manufacturer             --- %s",
+                 pDisplayInfo->m_szManufacturer);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device ChipType                 --- %s",
+                 pDisplayInfo->m_szChipType);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Approx. Total Device Memory     --- %s",
+                 pDisplayInfo->m_szDisplayMemoryEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Current Display Mode            --- %s",
+                 pDisplayInfo->m_szDisplayModeEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Driver Name              --- %s",
+                 pDisplayInfo->m_szDriverName);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Driver Version           --- %s",
+                 pDisplayInfo->m_szDriverVersion);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DirectDraw Acceleration Enabled --- %s",
+                 pDisplayInfo->m_szDescription ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "bNoHardware                     --- %s",
+                 pDisplayInfo->m_bNoHardware ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "b3DAccelerationExists Enabled   --- %s",
+                 pDisplayInfo->m_b3DAccelerationExists ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "b3DAccelerationEnabled Enabled  --- %s",
+                 pDisplayInfo->m_b3DAccelerationEnabled ? "Enabled"
+                         : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DDraw Status                    --- %s",
+                 pDisplayInfo->m_szDDStatusEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "D3D Status                      --- %s",
+                 pDisplayInfo->m_szD3DStatusEnglish);
+
+    // Get OS version
+    OSVERSIONINFOEX osvie;
+    osvie.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    GetVersionEx((LPOSVERSIONINFO) & osvie);
+    /*
+     Operating system	    Version number	dwMajorVersion	dwMinorVersion
+     Windows 7	            6.1	            6	            1
+     Windows Server 2008 R2	6.1	            6	            1
+     Windows Server 2008	    6.0	            6           	0
+     Windows Vista	        6.0	            6	            0
+     Windows Server 2003 R2	5.2	            5	            2
+     Windows Server 2003	    5.2	            5           	2
+     Windows XP	            5.1	            5           	1
+     Windows 2000	        5.0         	5	            0
+     */
+    //RDP problem exists only when XP is involved
+    if (osvie.dwMajorVersion < 6)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideoRenderer, _id,
+                     "Checking for RDP driver");
+        if (_tcsncmp(pDisplayInfo->m_szDriverName, _T("RDPDD.dll"), 9) == 0)
+        {
+            //
+        }
+    }
+
+    if (pObject)
+    {
+        pObject->Release();
+        pObject = NULL;
+    }
+
+    if (pContainer)
+        pContainer->Release();
+
+    if (m_pDxDiagProvider)
+        m_pDxDiagProvider->Release();
+
+    if (m_pDxDiagRoot)
+        m_pDxDiagRoot->Release();
+
+    if (pDisplayInfo)
+        delete pDisplayInfo;
+
+    if (coUninitializeIsRequired)
+        CoUninitialize();
+
+    return;
+}
+
+//-----------------------------------------------------------------------------
+// Name: GetStringValue()
+// Desc: Get a string value from a IDxDiagContainer object
+//-----------------------------------------------------------------------------
+HRESULT VideoRenderWindowsImpl::GetStringValue(IDxDiagContainer* pObject,
+                                               WCHAR* wstrName,
+                                               TCHAR* strValue, int nStrLen)
+{
+    HRESULT hr;
+    VARIANT var;
+    VariantInit(&var);
+
+    if (FAILED(hr = pObject->GetProp(wstrName, &var)))
+        return hr;
+
+    if (var.vt != VT_BSTR)
+        return E_INVALIDARG;
+
+#ifdef _UNICODE
+    wcsncpy( strValue, var.bstrVal, nStrLen-1 );
+#else
+    wcstombs(strValue, var.bstrVal, nStrLen);
+#endif
+    strValue[nStrLen - 1] = TEXT('\0');
+    VariantClear(&var);
+
+    return S_OK;
+}
+
+//-----------------------------------------------------------------------------
+// Name: GetBoolValue()
+// Desc: Get a BOOL value from a IDxDiagContainer object
+//-----------------------------------------------------------------------------
+HRESULT VideoRenderWindowsImpl::GetBoolValue(IDxDiagContainer* pObject,
+                                             WCHAR* wstrName, BOOL* pbValue)
+{
+    HRESULT hr;
+    VARIANT var;
+    VariantInit(&var);
+
+    if (FAILED(hr = pObject->GetProp(wstrName, &var)))
+        return hr;
+
+    if (var.vt != VT_BOOL)
+        return E_INVALIDARG;
+
+    *pbValue = (var.boolVal != 0);
+    VariantClear(&var);
+
+    return S_OK;
+}
+
+int VideoRenderWindowsImpl::CheckHWAcceleration()
+{
+    // Read the registry to check if HW acceleration is enabled or not.
+    HKEY regKey;
+    DWORD value = 0;
+    DWORD valueLength = 4;
+
+    bool directDraw = true;
+    bool direct3D = true;
+    bool dci = true;
+
+    // DirectDraw
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, _T("SOFTWARE\\Microsoft\\DirectDraw"),
+                     0, KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("EmulationOnly"), NULL, NULL,
+                            (BYTE*) &value, &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DirectDraw acceleration is disabled");
+                directDraw = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DirectDraw acceleration is enabled");
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find EmulationOnly key, DirectDraw acceleration is probably enabled");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open DirectDraw settings");
+    }
+
+    // Direct3D
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE,
+                     _T("SOFTWARE\\Microsoft\\Direct3D\\Drivers"), 0,
+                     KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("SoftwareOnly"), NULL, NULL,
+                            (BYTE*) &value, &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "Direct3D acceleration is disabled");
+                direct3D = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "Direct3D acceleration is enabled");
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find SoftwarOnly key, Direct3D acceleration is probably enabled");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open Direct3D settings");
+    }
+
+    // DCI
+    if (RegOpenKeyEx(
+                     HKEY_LOCAL_MACHINE,
+                     _T(
+                        "SYSTEM\\CurrentControlSet\\Control\\GraphicsDrivers\\DCI"),
+                     0, KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have found the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("Timeout"), NULL, NULL, (BYTE*) &value,
+                            &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DCI - DirectDraw acceleration is disabled");
+                dci = false;
+            }
+            else if (value == 7)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DCI is fully enabled");
+            }
+            else
+            {
+                WEBRTC_TRACE(
+                             kTraceWarning,
+                             kTraceVideo,
+                             -1,
+                             "DCI - DirectDraw acceleration is enabled, but short timeout: %d",
+                             value);
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find Timeout key");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open DCI settings");
+    }
+
+    // We don't care about Direct3D right now...
+    if (dci == false || directDraw == false)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+void VideoRenderWindowsImpl::CheckHWDriver(bool& badDriver,
+                                           bool& fullAccelerationEnabled)
+{
+    // Read the registry to check if HW acceleration is enabled or not.
+    HKEY regKey;
+    DWORD value = 0;
+    DWORD valueLength = 4;
+
+    //Assume the best
+    badDriver = false;
+    fullAccelerationEnabled = true;
+
+    // Check the path to the currently used driver
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, _T("HARDWARE\\DEVICEMAP\\VIDEO"), 0,
+                     KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have found the registry key containing the driver location
+        value = 0;
+        DWORD driverPathLen = 512;
+        TCHAR driverPath[512];
+        memset(driverPath, 0, driverPathLen * sizeof(TCHAR));
+        DWORD dwType = REG_SZ;
+
+        long retVal = RegQueryValueEx(regKey, _T("\\Device\\Video0"), NULL,
+                                      NULL, (BYTE*) driverPath, &driverPathLen);
+
+        // Close the key...
+        RegCloseKey(regKey);
+
+        if (retVal == ERROR_SUCCESS)
+        {
+            // We have the path to the currently used video card
+
+            // trueDriverPath = modified nameStr, from above, that works
+            // for RegOpenKeyEx
+            TCHAR trueDriverPath[512];
+            memset(trueDriverPath, 0, 512 * sizeof(TCHAR));
+
+            // Convert the path to correct format.
+            //      - Remove \Registry\Machine\
+            //      - Replace '\' with '\\'
+            // Should be something like this: System\\CurrentControlSet\\Control\\Video\\{F6987E15-F12C-4B15-8C84-0F635F3F09EA}\\0000"
+            int idx = 0;
+            for (DWORD i = 18; i < (driverPathLen / sizeof(TCHAR)); i++)
+            {
+                trueDriverPath[idx++] = driverPath[i];
+                if (driverPath[i] == _T('\\'))
+                {
+                    trueDriverPath[idx++] = driverPath[i];
+                }
+            }
+
+            // Open the driver key
+            if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, trueDriverPath, 0,
+                             KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+            {
+                TCHAR driverName[64];
+                memset(driverName, 0, 64 * sizeof(TCHAR));
+                DWORD driverNameLength = 64;
+                retVal = RegQueryValueEx(regKey, _T("drv"), NULL, NULL,
+                                         (BYTE*) driverName, &driverNameLength);
+                if (retVal == ERROR_SUCCESS)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "Graphics card driver name: %s", driverName);
+                }
+                DWORD accLevel = 0;
+                DWORD accLevelS = sizeof(accLevel);
+
+                RegQueryValueEx(regKey, _T("Acceleration.Level"), NULL, NULL,
+                                (LPBYTE) & accLevel, &accLevelS);
+                //Don't care if the key is not found. It probably means that acceleration is enabled
+                if (accLevel != 0)
+                {
+                    // Close the key...
+                    RegCloseKey(regKey);
+
+                    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, trueDriverPath, 0,
+                                     KEY_SET_VALUE, &regKey) == ERROR_SUCCESS)
+                    {
+                        // try setting it to full
+                        accLevel = 0;
+                        LONG retVal;
+                        retVal = RegSetValueEx(regKey,
+                                               _T("Acceleration.Level"), NULL,
+                                               REG_DWORD, (PBYTE) & accLevel,
+                                               sizeof(DWORD));
+                        if (retVal != ERROR_SUCCESS)
+                        {
+                            fullAccelerationEnabled = false;
+                        }
+                        else
+                        {
+                            RegQueryValueEx(regKey, _T("Acceleration.Level"),
+                                            NULL, NULL, (LPBYTE) & accLevel,
+                                            &accLevelS);
+                            if (accLevel != 0)
+                            {
+                                fullAccelerationEnabled = false;
+                            }
+                            else
+                            {
+                                fullAccelerationEnabled = true;
+                            }
+                        }
+                    }
+                    else
+                    {
+                        fullAccelerationEnabled = false;
+                    }
+                }
+                else
+                {
+                    fullAccelerationEnabled = true;
+                }
+
+                // Close the key...
+                RegCloseKey(regKey);
+            }
+        }
+    }
+}
+
+} //namespace webrtc
+
diff --git a/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.h b/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.h
new file mode 100644
index 0000000..bdc0a0a
--- /dev/null
+++ b/trunk/src/modules/video_render/main/source/windows/video_render_windows_impl.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+
+#include <Winerror.h>
+#include <dxdiag.h>
+
+#include "i_video_render.h"
+#include "i_video_render_win.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define EXPAND(x)            x, sizeof(x)/sizeof(TCHAR)
+
+#pragma comment(lib, "dxguid.lib")
+
+enum VideoRenderWinMethod
+{
+    kVideoRenderWinDd = 0, kVideoRenderWinD3D9 = 1
+};
+
+// Class definitions
+class VideoRenderWindowsImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderWindowsImpl(const WebRtc_Word32 id,
+                           const VideoRenderType videoRenderType,
+                           void* window, const bool fullscreen);
+
+    virtual ~VideoRenderWindowsImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    static int CheckHWAcceleration();
+    static void CheckHWDriver(bool& badDriver, bool& fullAccelerationEnabled);
+
+private:
+
+    void LogOSAndHardwareDetails();
+    HRESULT GetBoolValue(IDxDiagContainer* pObject, WCHAR* wstrName,
+                         BOOL* pbValue);
+    HRESULT GetStringValue(IDxDiagContainer* pObject, WCHAR* wstrName,
+                           TCHAR* strValue, int nStrLen);
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderWindowsCritsect;
+
+    void* _prtWindow;
+    bool _fullscreen;
+
+    VideoRenderWinMethod _renderMethod;
+    IVideoRenderWin* _ptrRendererWin;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
diff --git a/trunk/src/modules/video_render/main/test/testAPI/renderStartImage.bmp b/trunk/src/modules/video_render/main/test/testAPI/renderStartImage.bmp
new file mode 100644
index 0000000..c443a58
--- /dev/null
+++ b/trunk/src/modules/video_render/main/test/testAPI/renderStartImage.bmp
Binary files differ
diff --git a/trunk/src/modules/video_render/main/test/testAPI/testAPI.cc b/trunk/src/modules/video_render/main/test/testAPI/testAPI.cc
new file mode 100644
index 0000000..607a866
--- /dev/null
+++ b/trunk/src/modules/video_render/main/test/testAPI/testAPI.cc
@@ -0,0 +1,680 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testAPI.h"
+
+#include <stdio.h>
+
+#if defined(_WIN32)
+#include <tchar.h>
+#include <windows.h>
+#include <cassert>
+#include <fstream>
+#include <iostream>
+#include <string>
+#include <windows.h>
+#include <ddraw.h>
+
+#elif defined(WEBRTC_LINUX)
+
+#include <iostream>
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <sys/time.h>
+
+#endif
+
+#include "common_types.h"
+#include "process_thread.h"
+#include "module_common_types.h"
+#include "video_render_defines.h"
+#include "video_render.h"
+#include "tick_util.h"
+#include "trace.h"
+
+using namespace webrtc;
+
+void GetTestVideoFrame(WebRtc_UWord8* frame,
+                       WebRtc_Word32 width,
+                       WebRtc_Word32 height,
+                       WebRtc_UWord8 startColor);
+int TestSingleStream(VideoRender* renderModule);
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType);
+int TestBitmapText(VideoRender* renderModule);
+int TestMultipleStreams(VideoRender* renderModule);
+int TestExternalRender(VideoRender* renderModule);
+
+#define TEST_FRAME_RATE 30
+#define TEST_TIME_SECOND 5
+#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
+#define TEST_STREAM0_START_COLOR 0
+#define TEST_STREAM1_START_COLOR 64
+#define TEST_STREAM2_START_COLOR 128
+#define TEST_STREAM3_START_COLOR 192
+
+#if defined(_WIN32) && defined(_DEBUG)
+//    #include "vld.h"
+#define SLEEP(x) ::Sleep(x)
+#elif defined(WEBRTC_LINUX)
+
+#define GET_TIME_IN_MS timeGetTime()
+#define SLEEP(x) Sleep(x)
+
+void Sleep(unsigned long x)
+{
+    timespec t;
+    t.tv_sec = x/1000;
+    t.tv_nsec = (x-(x/1000)*1000)*1000000;
+    nanosleep(&t,NULL);
+}
+
+unsigned long timeGetTime()
+{
+    struct timeval tv;
+    struct timezone tz;
+    unsigned long val;
+
+    gettimeofday(&tv, &tz);
+    val= tv.tv_sec*1000+ tv.tv_usec/1000;
+    return(val);
+}
+
+#elif defined(WEBRTC_MAC_INTEL)
+
+#include <unistd.h>
+
+#define GET_TIME_IN_MS timeGetTime()
+#define SLEEP(x) usleep(x * 1000)
+
+unsigned long timeGetTime()
+{
+    return 0;
+}
+
+#else
+
+#define GET_TIME_IN_MS ::timeGetTime()
+#define SLEEP(x) ::Sleep(x)
+
+#endif
+
+using namespace std;
+
+#if defined(_WIN32)
+LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
+{
+    switch(uMsg)
+    {
+        case WM_DESTROY:
+        break;
+        case WM_COMMAND:
+        break;
+    }
+    return DefWindowProc(hWnd,uMsg,wParam,lParam);
+}
+
+int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
+{
+    HINSTANCE hinst = GetModuleHandle(0);
+    WNDCLASSEX wcx;
+    wcx.hInstance = hinst;
+    wcx.lpszClassName = TEXT("VideoRenderTest");
+    wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
+    wcx.style = CS_DBLCLKS;
+    wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
+    wcx.lpszMenuName = NULL;
+    wcx.cbSize = sizeof (WNDCLASSEX);
+    wcx.cbClsExtra = 0;
+    wcx.cbWndExtra = 0;
+    wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+
+    // Register our window class with the operating system.
+    // If there is an error, exit program.
+    if ( !RegisterClassEx (&wcx) )
+    {
+        MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
+        return 0;
+    }
+
+    // Create the main window.
+    hwndMain = CreateWindowEx(
+            0, // no extended styles
+            TEXT("VideoRenderTest"), // class name
+            TEXT("VideoRenderTest Window"), // window name
+            WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
+            800, // horizontal position
+            0, // vertical position
+            width, // width
+            height, // height
+            (HWND) NULL, // no parent or owner window
+            (HMENU) NULL, // class menu used
+            hinst, // instance handle
+            NULL); // no window creation data
+
+    if (!hwndMain)
+    {
+        int error = GetLastError();
+        return -1;
+    }
+
+    // Show the window using the flag specified by the program
+    // that started the application, and send the application
+    // a WM_PAINT message.
+
+    ShowWindow(hwndMain, SW_SHOWDEFAULT);
+    UpdateWindow(hwndMain);
+    return 0;
+}
+
+#elif defined(WEBRTC_LINUX)
+
+int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
+
+{
+    int screen, xpos = 10, ypos = 10;
+    XEvent evnt;
+    XSetWindowAttributes xswa; // window attribute struct
+    XVisualInfo vinfo; // screen visual info struct
+    unsigned long mask; // attribute mask
+
+    // get connection handle to xserver
+    Display* _display = XOpenDisplay( NULL );
+
+    // get screen number
+    screen = DefaultScreen(_display);
+
+    // put desired visual info for the screen in vinfo
+    if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
+    {
+        //printf( "Screen visual info match!\n" );
+    }
+
+    // set window attributes
+    xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
+    xswa.event_mask = StructureNotifyMask | ExposureMask;
+    xswa.background_pixel = 0;
+    xswa.border_pixel = 0;
+
+    // value mask for attributes
+    mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
+
+    switch( winNum )
+    {
+        case 0:
+        xpos = 200;
+        ypos = 200;
+        break;
+        case 1:
+        xpos = 300;
+        ypos = 200;
+        break;
+        default:
+        break;
+    }
+
+    // create a subwindow for parent (defroot)
+    Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
+            xpos, ypos,
+            width,
+            height,
+            0, vinfo.depth,
+            InputOutput,
+            vinfo.visual,
+            mask, &xswa);
+
+    // Set window name
+    if( winNum == 0 )
+    {
+        XStoreName(_display, _window, "VE MM Local Window");
+        XSetIconName(_display, _window, "VE MM Local Window");
+    }
+    else if( winNum == 1 )
+    {
+        XStoreName(_display, _window, "VE MM Remote Window");
+        XSetIconName(_display, _window, "VE MM Remote Window");
+    }
+
+    // make x report events for mask
+    XSelectInput(_display, _window, StructureNotifyMask);
+
+    // map the window to the display
+    XMapWindow(_display, _window);
+
+    // wait for map event
+    do
+    {
+        XNextEvent(_display, &evnt);
+    }
+    while (evnt.type != MapNotify || evnt.xmap.event != _window);
+
+    *outWindow = _window;
+    *outDisplay = _display;
+
+    return 0;
+}
+#endif  // LINUX
+
+// Note: Mac code is in testApi_mac.mm.
+
+class MyRenderCallback: public VideoRenderCallback
+{
+public:
+    MyRenderCallback() :
+        _cnt(0)
+    {
+    }
+    ;
+    ~MyRenderCallback()
+    {
+    }
+    ;
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame)
+    {
+        _cnt++;
+        if (_cnt % 100 == 0)
+        {
+            printf("Render callback %d \n",_cnt);
+        }
+        return 0;
+    }
+    WebRtc_Word32 _cnt;
+};
+
+void GetTestVideoFrame(WebRtc_UWord8* frame,
+                       WebRtc_Word32 width,
+                       WebRtc_Word32 height,
+                       WebRtc_UWord8 startColor) {
+    // changing color
+    static WebRtc_UWord8 color = startColor;
+
+    WebRtc_UWord8* destY = frame;
+    WebRtc_UWord8* destU = &frame[width*height];
+    WebRtc_UWord8* destV = &frame[width*height*5/4];
+    //Y
+    for (WebRtc_Word32 y=0; y<(width*height); y++)
+    {
+      destY[y] = color;
+    }
+    //U
+    for (WebRtc_Word32 u=0; u<(width*height/4); u++)
+    {
+      destU[u] = color;
+    }
+    //V
+    for (WebRtc_Word32 v=0; v<(width*height/4); v++)
+    {
+      destV[v] = color;
+    }
+
+    color++;
+}
+
+int TestSingleStream(VideoRender* renderModule) {
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    MyRenderCallback externalRender;
+    renderModule->AddExternalRenderCallback(streamId0, &externalRender);
+#endif
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    if (error != 0) {
+      // TODO(phoglund): This test will not work if compiled in release mode.
+      // This rather silly construct here is to avoid compilation errors when
+      // compiling in release. Release => no asserts => unused 'error' variable.
+      assert(false);
+    }
+
+    // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SLEEP(1000/TEST_FRAME_RATE);
+    }
+
+    videoFrame0.Free();
+
+    // Shut down
+    printf("Closing...\n");
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+
+    return 0;
+}
+
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType) {
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
+
+    TestSingleStream(renderModule);
+
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
+
+    return 0;
+}
+
+int TestBitmapText(VideoRender* renderModule) {
+#if defined(WIN32)
+
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+    printf("Adding Bitmap\n");
+    DDCOLORKEY ColorKey; // black
+    ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
+    ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
+    HBITMAP hbm = (HBITMAP)LoadImage(NULL,
+                                     (LPCTSTR)_T("renderStartImage.bmp"),
+                                     IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
+    renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
+                             0.3f);
+
+    printf("Adding Text\n");
+    renderModule->SetText(1, (WebRtc_UWord8*) "WebRtc Render Demo App", 20,
+                           RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
+                           1.0f);
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    assert(error == 0);
+
+        // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SLEEP(1000/TEST_FRAME_RATE);
+    }
+    videoFrame0.Free();
+    // Sleep and let all frames be rendered before closing
+    SLEEP(renderDelayMs*2);
+
+
+    // Shut down
+    printf("Closing...\n");
+    ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
+    ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
+    renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
+    renderModule->SetText(1, NULL, 20, RGB(255,255,255),
+                    RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
+
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+#endif
+
+    return 0;
+}
+
+int TestMultipleStreams(VideoRender* renderModule) {
+    // Add settings for a stream to render
+    printf("Add stream 0\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
+    assert(renderCallback0 != NULL);
+    printf("Add stream 1\n");
+    const int streamId1 = 1;
+    VideoRenderCallback* renderCallback1 =
+        renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
+    assert(renderCallback1 != NULL);
+    printf("Add stream 2\n");
+    const int streamId2 = 2;
+    VideoRenderCallback* renderCallback2 =
+        renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
+    assert(renderCallback2 != NULL);
+    printf("Add stream 3\n");
+    const int streamId3 = 3;
+    VideoRenderCallback* renderCallback3 =
+        renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
+    assert(renderCallback3 != NULL);
+    assert(renderModule->StartRender(streamId0) == 0);
+    assert(renderModule->StartRender(streamId1) == 0);
+    assert(renderModule->StartRender(streamId2) == 0);
+    assert(renderModule->StartRender(streamId3) == 0);
+
+    // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame1;
+    videoFrame1.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame2;
+    videoFrame2.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame3;
+    videoFrame3.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+
+        GetTestVideoFrame(videoFrame1.Buffer(), width, height, TEST_STREAM1_START_COLOR);
+        videoFrame1.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame1.SetWidth(width);
+        videoFrame1.SetHeight(height);
+        videoFrame1.SetLength(numBytes);
+        renderCallback1->RenderFrame(streamId1, videoFrame1);
+
+        GetTestVideoFrame(videoFrame2.Buffer(), width, height, TEST_STREAM2_START_COLOR);
+        videoFrame2.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame2.SetWidth(width);
+        videoFrame2.SetHeight(height);
+        videoFrame2.SetLength(numBytes);
+        renderCallback2->RenderFrame(streamId2, videoFrame2);
+
+        GetTestVideoFrame(videoFrame3.Buffer(), width, height, TEST_STREAM3_START_COLOR);
+        videoFrame3.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame3.SetWidth(width);
+        videoFrame3.SetHeight(height);
+        videoFrame3.SetLength(numBytes);
+        renderCallback3->RenderFrame(streamId3, videoFrame3);
+
+        SLEEP(1000/TEST_FRAME_RATE);
+    }
+
+    videoFrame0.Free();
+    videoFrame1.Free();
+    videoFrame2.Free();
+    videoFrame3.Free();
+
+    // Shut down
+    printf("Closing...\n");
+    assert(renderModule->StopRender(streamId0) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId0) == 0);
+    assert(renderModule->StopRender(streamId1) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId1) == 0);
+    assert(renderModule->StopRender(streamId2) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId2) == 0);
+    assert(renderModule->StopRender(streamId3) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId3) == 0);
+
+    return 0;
+}
+
+int TestExternalRender(VideoRender* renderModule) {
+    MyRenderCallback *externalRender = new MyRenderCallback();
+
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
+                                                   1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+    assert(renderModule->AddExternalRenderCallback(streamId0,
+                                                   externalRender) == 0);
+
+    assert(renderModule->StartRender(streamId0) == 0);
+
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32) (1.5 * width * height);
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+    int frameCount = TEST_FRAME_NUM;
+    for (int i=0; i<frameCount; i++) {
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs);
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SLEEP(33);
+    }
+
+    // Sleep and let all frames be rendered before closing
+    SLEEP(2*renderDelayMs);
+    videoFrame0.Free();
+
+    assert(renderModule->StopRender(streamId0) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId0) == 0);
+    assert(frameCount == externalRender->_cnt);
+
+    delete externalRender;
+    externalRender = NULL;
+
+    return 0;
+}
+
+void RunVideoRenderTests(void* window, VideoRenderType windowType) {
+#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    windowType = kRenderExternal;
+#endif
+
+    int myId = 12345;
+
+    // Create the render module
+    printf("Create render module\n");
+    VideoRender* renderModule = NULL;
+    renderModule = VideoRender::CreateVideoRender(myId,
+                                                  window,
+                                                  false,
+                                                  windowType);
+    assert(renderModule != NULL);
+
+
+    // ##### Test single stream rendering ####
+    printf("#### TestSingleStream ####\n");
+    if (TestSingleStream(renderModule) != 0) {
+        printf ("TestSingleStream failed\n");
+    }
+
+    // ##### Test fullscreen rendering ####
+    printf("#### TestFullscreenStream ####\n");
+    if (TestFullscreenStream(renderModule, window, windowType) != 0) {
+        printf ("TestFullscreenStream failed\n");
+    }
+
+    // ##### Test bitmap and text ####
+    printf("#### TestBitmapText ####\n");
+    if (TestBitmapText(renderModule) != 0) {
+        printf ("TestBitmapText failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestMultipleStreams ####\n");
+    if (TestMultipleStreams(renderModule) != 0) {
+        printf ("TestMultipleStreams failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestExternalRender ####\n");
+    if (TestExternalRender(renderModule) != 0) {
+        printf ("TestExternalRender failed\n");
+    }
+
+    delete renderModule;
+    renderModule = NULL;
+
+    printf("VideoRender unit tests passed.\n");
+}
+
+// Note: The Mac main is implemented in testApi_mac.mm.
+#if defined(_WIN32)
+int _tmain(int argc, _TCHAR* argv[])
+#elif defined(WEBRTC_LINUX)
+int main(int argc, char* argv[])
+#endif
+#if !defined(WEBRTC_MAC)
+{
+    // Create a window for testing.
+    void* window = NULL;
+#if defined (_WIN32)
+    HWND testHwnd;
+    WebRtcCreateWindow(testHwnd, 0, 352, 288);
+    window = (void*)testHwnd;
+    VideoRenderType windowType = kRenderWindows;
+#elif defined(WEBRTC_LINUX)
+    Window testWindow;
+    Display* display;
+    WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
+    VideoRenderType windowType = kRenderX11;
+    window = (void*)testWindow;
+#endif // WEBRTC_LINUX
+
+    RunVideoRenderTests(window, windowType);
+    return 0;
+}
+#endif  // !WEBRTC_MAC
diff --git a/trunk/src/modules/video_render/main/test/testAPI/testAPI.h b/trunk/src/modules/video_render/main/test/testAPI/testAPI.h
new file mode 100644
index 0000000..e0e0631
--- /dev/null
+++ b/trunk/src/modules/video_render/main/test/testAPI/testAPI.h
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+
+#include "video_render_defines.h"
+
+void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
diff --git a/trunk/src/modules/video_render/main/test/testAPI/testAPI_mac.mm b/trunk/src/modules/video_render/main/test/testAPI/testAPI_mac.mm
new file mode 100644
index 0000000..2f836ab
--- /dev/null
+++ b/trunk/src/modules/video_render/main/test/testAPI/testAPI_mac.mm
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testAPI.h"
+
+#include <iostream>
+
+#import <Foundation/Foundation.h>
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+#import <QTKit/QTKit.h>
+#include <sys/time.h>
+
+#include "common_types.h"
+#import "modules/video_render/main/source/mac/cocoa_render_view.h"
+#include "module_common_types.h"
+#include "process_thread.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "video_render_defines.h"
+#include "video_render.h"
+
+using namespace webrtc;
+
+int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
+{
+    // In Cocoa, rendering is not done directly to a window like in Windows and Linux.
+    // It is rendererd to a Subclass of NSOpenGLView
+
+    // create cocoa container window
+    NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
+    NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame 
+                                                      styleMask:NSTitledWindowMask 
+                                                        backing:NSBackingStoreBuffered 
+                                                          defer:NO];
+    [outWindow orderOut:nil];
+    [outWindow setTitle:@"Cocoa Renderer"];
+    [outWindow setBackgroundColor:[NSColor blueColor]];
+
+    // create renderer and attach to window
+    NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
+    cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
+    [[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
+
+    [outWindow makeKeyAndOrderFront:NSApp];
+
+    return 0;
+}
+
+int main (int argc, const char * argv[]) {
+    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
+    [NSApplication sharedApplication];
+
+    CocoaRenderView* testWindow;
+    WebRtcCreateWindow(testWindow, 0, 352, 288);
+    VideoRenderType windowType = kRenderCocoa;
+    void* window = (void*)testWindow;
+
+    RunVideoRenderTests(window, windowType);
+
+    [pool release];
+}
diff --git a/trunk/src/system_wrappers/OWNERS b/trunk/src/system_wrappers/OWNERS
new file mode 100644
index 0000000..4091a93
--- /dev/null
+++ b/trunk/src/system_wrappers/OWNERS
@@ -0,0 +1,7 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+perkj@webrtc.org
+henrika@webrtc.org
+henrikg@webrtc.org
+mflodman@webrtc.org
+niklas.enbom@webrtc.org
\ No newline at end of file
diff --git a/trunk/src/system_wrappers/interface/aligned_malloc.h b/trunk/src/system_wrappers/interface/aligned_malloc.h
new file mode 100644
index 0000000..c229435
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/aligned_malloc.h
@@ -0,0 +1,25 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
+
+#include <stddef.h>
+
+namespace webrtc
+{
+    void* AlignedMalloc(
+        size_t size,
+        size_t alignment);
+    void AlignedFree(
+        void* memBlock);
+}
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
diff --git a/trunk/src/system_wrappers/interface/atomic32_wrapper.h b/trunk/src/system_wrappers/interface/atomic32_wrapper.h
new file mode 100644
index 0000000..40862fb
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/atomic32_wrapper.h
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Atomic system independant 32-bit integer.
+// Note: uses full memory barriers.
+// Note: assumes 32-bit (or higher) system
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_WRAPPER_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+class Atomic32Impl;
+class Atomic32Wrapper
+{
+public:
+    Atomic32Wrapper(WebRtc_Word32 initialValue = 0);
+    ~Atomic32Wrapper();
+
+    // Prefix operator!
+    WebRtc_Word32 operator++();
+    WebRtc_Word32 operator--();
+
+    Atomic32Wrapper& operator=(const Atomic32Wrapper& rhs);
+    Atomic32Wrapper& operator=(WebRtc_Word32 rhs);
+
+    WebRtc_Word32 operator+=(WebRtc_Word32 rhs);
+    WebRtc_Word32 operator-=(WebRtc_Word32 rhs);
+
+    // Sets the value atomically to newValue if the value equals compare value.
+    // The function returns true if the exchange happened.
+    bool CompareExchange(WebRtc_Word32 newValue, WebRtc_Word32 compareValue);
+    WebRtc_Word32 Value() const;
+private:
+    // Disable the + and - operator since it's unclear what these operations
+    // should do.
+    Atomic32Wrapper operator+(const Atomic32Wrapper& rhs);
+    Atomic32Wrapper operator-(const Atomic32Wrapper& rhs);
+
+    WebRtc_Word32& operator++(int);
+    WebRtc_Word32& operator--(int);
+
+    // Cheshire cat to hide the implementation (faster than
+    // using virtual functions)
+    Atomic32Impl& _impl;
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/condition_variable_wrapper.h b/trunk/src/system_wrappers/interface/condition_variable_wrapper.h
new file mode 100644
index 0000000..c040fbf
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/condition_variable_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class ConditionVariableWrapper
+{
+public:
+    // Factory method, constructor disabled.
+    static ConditionVariableWrapper* CreateConditionVariable();
+
+    virtual ~ConditionVariableWrapper() {}
+
+    // Calling thread will atomically release critSect and wait until next
+    // some other thread calls Wake() or WakeAll().
+    virtual void SleepCS(CriticalSectionWrapper& critSect) = 0;
+
+    // Same as above but with a timeout.
+    virtual bool SleepCS(CriticalSectionWrapper& critSect,
+                         unsigned long maxTimeInMS) = 0;
+
+    // Wakes one thread calling SleepCS().
+    virtual void Wake() = 0;
+
+    // Wakes all threads calling SleepCS().
+    virtual void WakeAll() = 0;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/constructor_magic.h b/trunk/src/system_wrappers/interface/constructor_magic.h
new file mode 100644
index 0000000..b2aabc5
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/constructor_magic.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * WebRtc
+ * Copy from third_party/libjingle/source/talk/base/constructormagic.h
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
+
+#ifndef DISALLOW_ASSIGN
+#define DISALLOW_ASSIGN(TypeName) \
+  void operator=(const TypeName&)
+#endif
+
+#ifndef DISALLOW_COPY_AND_ASSIGN
+// A macro to disallow the evil copy constructor and operator= functions
+// This should be used in the private: declarations for a class
+#define DISALLOW_COPY_AND_ASSIGN(TypeName)    \
+  TypeName(const TypeName&);                    \
+  DISALLOW_ASSIGN(TypeName)
+#endif
+
+#ifndef DISALLOW_EVIL_CONSTRUCTORS
+// Alternative, less-accurate legacy name.
+#define DISALLOW_EVIL_CONSTRUCTORS(TypeName) \
+  DISALLOW_COPY_AND_ASSIGN(TypeName)
+#endif
+
+#ifndef DISALLOW_IMPLICIT_CONSTRUCTORS
+// A macro to disallow all the implicit constructors, namely the
+// default constructor, copy constructor and operator= functions.
+//
+// This should be used in the private: declarations for a class
+// that wants to prevent anyone from instantiating it. This is
+// especially useful for classes containing only static methods.
+#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \
+  TypeName();                                    \
+  DISALLOW_EVIL_CONSTRUCTORS(TypeName)
+#endif
+
+#endif  // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
diff --git a/trunk/src/system_wrappers/interface/cpu_features_wrapper.h b/trunk/src/system_wrappers/interface/cpu_features_wrapper.h
new file mode 100644
index 0000000..d949592
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/cpu_features_wrapper.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
+
+#if defined(__cplusplus) || defined(c_plusplus)
+extern "C" {
+#endif
+
+#include <typedefs.h>
+
+// List of features in x86.
+typedef enum {
+  kSSE2,
+  kSSE3
+} CPUFeature;
+
+// List of features in ARM.
+enum {
+  kCPUFeatureARMv7       = (1 << 0),
+  kCPUFeatureVFPv3       = (1 << 1),
+  kCPUFeatureNEON        = (1 << 2),
+  kCPUFeatureLDREXSTREX  = (1 << 3)
+};
+
+typedef int (*WebRtc_CPUInfo)(CPUFeature feature);
+// returns true if the CPU supports the feature.
+extern WebRtc_CPUInfo WebRtc_GetCPUInfo;
+// No CPU feature is available => straight C path.
+extern WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM;
+
+// Return the features in an ARM device.
+// It detects the features in the hardware platform, and returns supported 
+// values in the above enum definition as a bitmask.
+extern uint64_t WebRtc_GetCPUFeaturesARM(void);
+
+#if defined(__cplusplus) || defined(c_plusplus)
+}    // extern "C"
+#endif
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/cpu_info.h b/trunk/src/system_wrappers/interface/cpu_info.h
new file mode 100644
index 0000000..a6da29f
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/cpu_info.h
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CpuInfo
+{
+public:
+    static WebRtc_UWord32 DetectNumberOfCores();
+
+private:
+    CpuInfo() {}
+    static WebRtc_UWord32 _numberOfCores;
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
diff --git a/trunk/src/system_wrappers/interface/cpu_wrapper.h b/trunk/src/system_wrappers/interface/cpu_wrapper.h
new file mode 100644
index 0000000..d938741
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/cpu_wrapper.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_WRAPPER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CpuWrapper
+{
+public:
+    static CpuWrapper* CreateCpu();
+    virtual ~CpuWrapper() {}
+
+    // Returns the average CPU usage for all processors. The CPU usage can be
+    // between and including 0 to 100 (%)
+    virtual WebRtc_Word32 CpuUsage() = 0;
+    virtual WebRtc_Word32 CpuUsage(WebRtc_Word8* processName,
+                                   WebRtc_UWord32 length) = 0;
+    virtual WebRtc_Word32 CpuUsage(WebRtc_UWord32  dwProcessID) = 0;
+
+    // The CPU usage per core is returned in cpu_usage. The CPU can be between
+    // and including 0 to 100 (%)
+    // Note that the pointer passed as cpu_usage is redirected to a local member
+    // of the CPU wrapper.
+    // numCores is the number of cores in the cpu_usage array.
+    // The return value is -1 for failure or 0-100, indicating the average
+    // CPU usage across all cores.
+    // Note: on some OSs this class is initialized lazy. This means that it
+    // might not yet be possible to retrieve any CPU metrics. When this happens
+    // the return value will be zero (indicating that there is not a failure),
+    // numCores will be 0 and cpu_usage will be set to NULL (indicating that
+    // no metrics are available yet). Once the initialization is completed,
+    // which can take in the order of seconds, CPU metrics can be retrieved.
+    virtual WebRtc_Word32 CpuUsageMultiCore(WebRtc_UWord32& numCores,
+                                            WebRtc_UWord32*& cpu_usage) = 0;
+
+    virtual void Reset() = 0;
+    virtual void Stop() = 0;
+
+protected:
+    CpuWrapper() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/critical_section_wrapper.h b/trunk/src/system_wrappers/interface/critical_section_wrapper.h
new file mode 100644
index 0000000..cfec9ae
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/critical_section_wrapper.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
+
+// If the critical section is heavily contended it may be beneficial to use
+// read/write locks instead.
+
+#include "common_types.h"
+
+namespace webrtc {
+class CriticalSectionWrapper
+{
+public:
+    // Factory method, constructor disabled
+    static CriticalSectionWrapper* CreateCriticalSection();
+
+    virtual ~CriticalSectionWrapper() {}
+
+    // Tries to grab lock, beginning of a critical section. Will wait for the
+    // lock to become available if the grab failed.
+    virtual void Enter() = 0;
+
+    // Returns a grabbed lock, end of critical section.
+    virtual void Leave() = 0;
+};
+
+// RAII extension of the critical section. Prevents Enter/Leave mismatches and
+// provides more compact critical section syntax.
+class CriticalSectionScoped
+{
+public:
+    // Deprecated, don't add more users of this constructor.
+    // TODO(mflodman) Remove this version of the constructor when no one is
+    // using it any longer.
+    explicit CriticalSectionScoped(CriticalSectionWrapper& critsec)
+        : _ptrCritSec(&critsec)
+    {
+        _ptrCritSec->Enter();
+    }
+
+    explicit CriticalSectionScoped(CriticalSectionWrapper* critsec)
+        : _ptrCritSec(critsec)
+    {
+      _ptrCritSec->Enter();
+    }
+
+    ~CriticalSectionScoped()
+    {
+        if (_ptrCritSec)
+        {
+            Leave();
+        }
+    }
+
+private:
+    void Leave()
+    {
+        _ptrCritSec->Leave();
+        _ptrCritSec = 0;
+    }
+
+    CriticalSectionWrapper* _ptrCritSec;
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/data_log.h b/trunk/src/system_wrappers/interface/data_log.h
new file mode 100644
index 0000000..6fc1d64
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/data_log.h
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This singleton can be used for logging data for offline processing. Data
+ * logged with it can conveniently be parsed and processed with e.g. Matlab.
+ *
+ * Following is an example of the log file format, starting with the header
+ * row at line 1, and the data rows following.
+ * col1,col2,col3,multi-value-col4[3],,,col5
+ * 123,10.2,-243,1,2,3,100
+ * 241,12.3,233,1,2,3,200
+ * 13,16.4,-13,1,2,3,300
+ *
+ * As can be seen in the example, a multi-value-column is specified with the
+ * name followed the number of elements it contains. This followed by
+ * number of elements - 1 empty columns.
+ *
+ * Without multi-value-columns this format can be natively by Matlab. With
+ * multi-value-columns a small Matlab script is needed, available at
+ * trunk/tools/matlab/parseLog.m.
+ *
+ * Table names and column names are case sensitive.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
+
+#include <string>
+
+#include "data_log_impl.h"
+
+namespace webrtc {
+
+class DataLog {
+ public:
+  // Creates a log which uses a separate thread (referred to as the file
+  // writer thread) for writing log rows to file.
+  //
+  // Calls to this function after the log object has been created will only
+  // increment the reference counter.
+  static int CreateLog();
+
+  // Decrements the reference counter and deletes the log when the counter
+  // reaches 0. Should be called equal number of times as successful calls to
+  // CreateLog or memory leak will occur.
+  static void ReturnLog();
+
+  // Combines the string table_name and the integer table_id into a new string
+  // table_name + _ + table_id. The new string will be lower-case.
+  static std::string Combine(const std::string& table_name, int table_id);
+
+  // Adds a new table, with the name table_name, and creates the file, with the
+  // name table_name + ".txt", to which the table will be written.
+  // table_name is treated in a case sensitive way.
+  static int AddTable(const std::string& table_name);
+
+  // Adds a new column to a table. The column will be a multi-value-column
+  // if multi_value_length is greater than 1.
+  // table_name and column_name are treated in a case sensitive way.
+  static int AddColumn(const std::string& table_name,
+                       const std::string& column_name,
+                       int multi_value_length);
+
+  // Inserts a single value into a table with name table_name at the column with
+  // name column_name.
+  // Note that the ValueContainer makes use of the copy constructor,
+  // operator= and operator<< of the type T, and that the template type must
+  // implement a deep copy copy constructor and operator=.
+  // Copy constructor and operator= must not be disabled for the type T.
+  // table_name and column_name are treated in a case sensitive way.
+  template<class T>
+  static int InsertCell(const std::string& table_name,
+                        const std::string& column_name,
+                        T value) {
+    DataLogImpl* data_log = DataLogImpl::StaticInstance();
+    if (data_log == NULL)
+      return -1;
+    return data_log->InsertCell(
+             table_name,
+             column_name,
+             new ValueContainer<T>(value));
+  }
+
+  // Inserts an array of values into a table with name table_name at the
+  // column specified by column_name, which must be a multi-value-column.
+  // Note that the MultiValueContainer makes use of the copy constructor,
+  // operator= and operator<< of the type T, and that the template type
+  // must implement a deep copy copy constructor and operator=.
+  // Copy constructor and operator= must not be disabled for the type T.
+  // table_name and column_name are treated in a case sensitive way.
+  template<class T>
+  static int InsertCell(const std::string& table_name,
+                        const std::string& column_name,
+                        const T* array,
+                        int length) {
+    DataLogImpl* data_log = DataLogImpl::StaticInstance();
+    if (data_log == NULL)
+      return -1;
+    return data_log->InsertCell(
+             table_name,
+             column_name,
+             new MultiValueContainer<T>(array, length));
+  }
+
+  // For the table with name table_name: Writes the current row to file.
+  // Starts a new empty row.
+  // table_name is treated in a case-sensitive way.
+  static int NextRow(const std::string& table_name);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
diff --git a/trunk/src/system_wrappers/interface/data_log_c.h b/trunk/src/system_wrappers/interface/data_log_c.h
new file mode 100644
index 0000000..fffbb4f
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/data_log_c.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is a pure C wrapper of the DataLog class. The functions are directly
+ * mapped here except for InsertCell as C does not support templates.
+ * See data_log.h for a description of the functions.
+ */
+
+#ifndef SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_
+#define SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_
+
+#include <stddef.h>  /* size_t */
+
+#include "typedefs.h"  /* NOLINT(build/include) */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * All char* parameters in this file are expected to be null-terminated
+ * character sequences.
+ */
+int WebRtcDataLog_CreateLog();
+void WebRtcDataLog_ReturnLog();
+char* WebRtcDataLog_Combine(char* combined_name, size_t combined_len,
+                            const char* table_name, int table_id);
+int WebRtcDataLog_AddTable(const char* table_name);
+int WebRtcDataLog_AddColumn(const char* table_name, const char* column_name,
+                            int multi_value_length);
+
+int WebRtcDataLog_InsertCell_int(const char* table_name,
+                                 const char* column_name,
+                                 int value);
+int WebRtcDataLog_InsertArray_int(const char* table_name,
+                                  const char* column_name,
+                                  const int* values,
+                                  int length);
+int WebRtcDataLog_InsertCell_float(const char* table_name,
+                                   const char* column_name,
+                                   float value);
+int WebRtcDataLog_InsertArray_float(const char* table_name,
+                                    const char* column_name,
+                                    const float* values,
+                                    int length);
+int WebRtcDataLog_InsertCell_double(const char* table_name,
+                                    const char* column_name,
+                                    double value);
+int WebRtcDataLog_InsertArray_double(const char* table_name,
+                                     const char* column_name,
+                                     const double* values,
+                                     int length);
+int WebRtcDataLog_InsertCell_int32(const char* table_name,
+                                   const char* column_name,
+                                   int32_t value);
+int WebRtcDataLog_InsertArray_int32(const char* table_name,
+                                    const char* column_name,
+                                    const int32_t* values,
+                                    int length);
+int WebRtcDataLog_InsertCell_uint32(const char* table_name,
+                                    const char* column_name,
+                                    uint32_t value);
+int WebRtcDataLog_InsertArray_uint32(const char* table_name,
+                                     const char* column_name,
+                                     const uint32_t* values,
+                                     int length);
+int WebRtcDataLog_InsertCell_int64(const char* table_name,
+                                   const char* column_name,
+                                   int64_t value);
+int WebRtcDataLog_InsertArray_int64(const char* table_name,
+                                    const char* column_name,
+                                    const int64_t* values,
+                                    int length);
+
+int WebRtcDataLog_NextRow(const char* table_name);
+
+#ifdef __cplusplus
+}  /* end of extern "C" */
+#endif
+
+#endif  /* SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_ */ /* NOLINT */
diff --git a/trunk/src/system_wrappers/interface/data_log_impl.h b/trunk/src/system_wrappers/interface/data_log_impl.h
new file mode 100644
index 0000000..cef4964
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/data_log_impl.h
@@ -0,0 +1,157 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the helper classes for the DataLog APIs. See data_log.h
+ * for the APIs.
+ *
+ * These classes are helper classes used for logging data for offline
+ * processing. Data logged with these classes can conveniently be parsed and
+ * processed with e.g. Matlab.
+ */
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
+
+#include <map>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "scoped_ptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class LogTable;
+class RWLockWrapper;
+class ThreadWrapper;
+
+// All container classes need to implement a ToString-function to be
+// writable to file. Enforce this via the Container interface.
+class Container {
+ public:
+  virtual ~Container() {}
+
+  virtual void ToString(std::string* container_string) const = 0;
+};
+
+template<class T>
+class ValueContainer : public Container {
+ public:
+  explicit ValueContainer(T data) : data_(data) {}
+
+  virtual void ToString(std::string* container_string) const {
+    *container_string = "";
+    std::stringstream ss;
+    ss << data_ << ",";
+    ss >> *container_string;
+  }
+
+ private:
+  T   data_;
+};
+
+template<class T>
+class MultiValueContainer : public Container {
+ public:
+  MultiValueContainer(const T* data, int length)
+    : data_(data, data + length) {
+  }
+
+  virtual void ToString(std::string* container_string) const {
+    *container_string = "";
+    std::stringstream ss;
+    for (size_t i = 0; i < data_.size(); ++i)
+      ss << data_[i] << ",";
+    *container_string += ss.str();
+  }
+
+ private:
+  std::vector<T>  data_;
+};
+
+class DataLogImpl {
+ public:
+  ~DataLogImpl();
+
+  // The implementation of the CreateLog() method declared in data_log.h.
+  // See data_log.h for a description.
+  static int CreateLog();
+
+  // The implementation of the StaticInstance() method declared in data_log.h.
+  // See data_log.h for a description.
+  static DataLogImpl* StaticInstance();
+
+  // The implementation of the ReturnLog() method declared in data_log.h. See
+  // data_log.h for a description.
+  static void ReturnLog();
+
+  // The implementation of the AddTable() method declared in data_log.h. See
+  // data_log.h for a description.
+  int AddTable(const std::string& table_name);
+
+  // The implementation of the AddColumn() method declared in data_log.h. See
+  // data_log.h for a description.
+  int AddColumn(const std::string& table_name,
+                const std::string& column_name,
+                int multi_value_length);
+
+  // Inserts a Container into a table with name table_name at the column
+  // with name column_name.
+  // column_name is treated in a case sensitive way.
+  int InsertCell(const std::string& table_name,
+                 const std::string& column_name,
+                 const Container* value_container);
+
+  // The implementation of the NextRow() method declared in data_log.h. See
+  // data_log.h for a description.
+  int NextRow(const std::string& table_name);
+
+ private:
+  DataLogImpl();
+
+  // Initializes the DataLogImpl object, allocates and starts the
+  // thread file_writer_thread_.
+  int Init();
+
+  // Write all complete rows in every table to file.
+  // This function should only be called by the file_writer_thread_ if that
+  // thread is running to avoid race conditions.
+  void Flush();
+
+  // Run() is called by the thread file_writer_thread_.
+  static bool Run(void* obj);
+
+  // This function writes data to file. Note, it blocks if there is no data
+  // that should be written to file availble. Flush is the non-blocking
+  // version of this function.
+  void Process();
+
+  // Stops the continuous calling of Process().
+  void StopThread();
+
+  // Collection of tables indexed by the table name as std::string.
+  typedef std::map<std::string, LogTable*> TableMap;
+  typedef webrtc::scoped_ptr<CriticalSectionWrapper> CritSectScopedPtr;
+
+  static CritSectScopedPtr  crit_sect_;
+  static DataLogImpl*       instance_;
+  int                       counter_;
+  TableMap                  tables_;
+  EventWrapper*             flush_event_;
+  ThreadWrapper*            file_writer_thread_;
+  RWLockWrapper*            tables_lock_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
diff --git a/trunk/src/system_wrappers/interface/event_wrapper.h b/trunk/src/system_wrappers/interface/event_wrapper.h
new file mode 100644
index 0000000..0c9a908
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/event_wrapper.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
+
+namespace webrtc {
+enum EventTypeWrapper
+{
+    kEventSignaled = 1,
+    kEventError = 2,
+    kEventTimeout = 3
+};
+
+#define WEBRTC_EVENT_10_SEC   10000
+#define WEBRTC_EVENT_INFINITE 0xffffffff
+
+class EventWrapper
+{
+public:
+    // Factory method. Constructor disabled.
+    static EventWrapper* Create();
+    virtual ~EventWrapper() {}
+
+    // Releases threads who are calling Wait() and has started waiting. Please
+    // note that a thread calling Wait() will not start waiting immediately.
+    // assumptions to the contrary is a very common source of issues in
+    // multithreaded programming.
+    // Set is sticky in the sense that it will release at least one thread
+    // either immediately or some time in the future.
+    virtual bool Set() = 0;
+
+    // Prevents future Wait() calls from finishing without a new Set() call.
+    virtual bool Reset() = 0;
+
+    // Puts the calling thread into a wait state. The thread may be released
+    // by a Set() call depending on if other threads are waiting and if so on
+    // timing. The thread that was released will call Reset() before leaving
+    // preventing more threads from being released. If multiple threads
+    // are waiting for the same Set(), only one (random) thread is guaranteed to
+    // be released. It is possible that multiple (random) threads are released
+    // Depending on timing.
+    virtual EventTypeWrapper Wait(unsigned long maxTime) = 0;
+
+    // Starts a timer that will call a non-sticky version of Set() either once
+    // or periodically. If the timer is periodic it ensures that there is no
+    // drift over time relative to the system clock.
+    virtual bool StartTimer(bool periodic, unsigned long time) = 0;
+
+    virtual bool StopTimer() = 0;
+
+    // Only implemented on Windows
+    // Returns 1 if a key has been pressed since last call to this function.
+    // -1 indicates failure
+    // 0 indicates no key has been pressed since last call
+    // TODO(hellner) this function does not seem to belong here
+    static int KeyPressed();
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/file_wrapper.h b/trunk/src/system_wrappers/interface/file_wrapper.h
new file mode 100644
index 0000000..4d17438
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/file_wrapper.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
+
+#include <stddef.h>
+
+#include "common_types.h"
+#include "typedefs.h"
+
+// Implementation of an InStream and OutStream that can read (exclusive) or
+// write from/to a file.
+
+namespace webrtc {
+
+class FileWrapper : public InStream, public OutStream
+{
+public:
+    static const size_t kMaxFileNameSize = 1024;
+
+    // Factory method. Constructor disabled.
+    static FileWrapper* Create();
+
+    // Returns true if a file has been opened.
+    virtual bool Open() const = 0;
+
+    // Opens a file in read or write mode, decided by the readOnly parameter.
+    virtual int OpenFile(const char* fileNameUTF8,
+                         bool readOnly,
+                         bool loop = false,
+                         bool text = false) = 0;
+
+    virtual int CloseFile() = 0;
+
+    // Limits the file size to |bytes|. Writing will fail after the cap
+    // is hit. Pass zero to use an unlimited size.
+    virtual int SetMaxFileSize(size_t bytes)  = 0;
+
+    // Flush any pending writes.
+    virtual int Flush() = 0;
+
+    // Returns the opened file's name in |fileNameUTF8|. Provide the size of
+    // the buffer in bytes in |size|. The name will be truncated if |size| is
+    // too small.
+    virtual int FileName(char* fileNameUTF8,
+                         size_t size) const = 0;
+
+    // Write |format| to the opened file. Arguments are taken in the same manner
+    // as printf. That is, supply a format string containing text and
+    // specifiers. Returns the number of characters written or -1 on error.
+    virtual int WriteText(const char* format, ...) = 0;
+
+    // Inherited from Instream.
+    // Reads |length| bytes from file to |buf|. Returns the number of bytes read
+    // or -1 on error.
+    virtual int Read(void* buf, int length) = 0;
+
+    // Inherited from OutStream.
+    // Writes |length| bytes from |buf| to file. The actual writing may happen
+    // some time later. Call Flush() to force a write.
+    virtual bool Write(const void *buf, int length) = 0;
+
+    // Inherited from both Instream and OutStream.
+    // Rewinds the file to the start. Only available when OpenFile() has been
+    // called with |loop| == true or |readOnly| == true.
+    virtual int Rewind() = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/fix_interlocked_exchange_pointer_win.h b/trunk/src/system_wrappers/interface/fix_interlocked_exchange_pointer_win.h
new file mode 100644
index 0000000..d85c724
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/fix_interlocked_exchange_pointer_win.h
@@ -0,0 +1,35 @@
+// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file under third_party_mods/chromium directory of
+// source tree or at
+// http://src.chromium.org/viewvc/chrome/trunk/src/LICENSE
+
+// Various inline functions and macros to fix compilation of 32 bit target
+// on MSVC with /Wp64 flag enabled.
+
+// The original code can be found here:
+// http://src.chromium.org/svn/trunk/src/base/fix_wp64.h
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_FIX_INTERLOCKED_EXCHANGE_POINTER_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_FIX_INTERLOCKED_EXCHANGE_POINTER_WINDOWS_H_
+
+#include <windows.h>
+
+// Platform SDK fixes when building with /Wp64 for a 32 bits target.
+#if !defined(_WIN64) && defined(_Wp64)
+
+#ifdef InterlockedExchangePointer
+#undef InterlockedExchangePointer
+// The problem is that the macro provided for InterlockedExchangePointer() is
+// doing a (LONG) C-style cast that triggers invariably the warning C4312 when
+// building on 32 bits.
+inline void* InterlockedExchangePointer(void* volatile* target, void* value) {
+  return reinterpret_cast<void*>(static_cast<LONG_PTR>(InterlockedExchange(
+      reinterpret_cast<volatile LONG*>(target),
+      static_cast<LONG>(reinterpret_cast<LONG_PTR>(value)))));
+}
+#endif  // #ifdef InterlockedExchangePointer
+
+#endif // #if !defined(_WIN64) && defined(_Wp64)
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_FIX_INTERLOCKED_EXCHANGE_POINTER_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/interface/list_wrapper.h b/trunk/src/system_wrappers/interface/list_wrapper.h
new file mode 100644
index 0000000..3608ada
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/list_wrapper.h
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
+
+#include "constructor_magic.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class ListItem
+{
+friend class ListWrapper;
+
+public:
+    ListItem(const void* ptr);
+    ListItem(const unsigned int item);
+    virtual ~ListItem();
+    void* GetItem() const;
+    unsigned int GetUnsignedItem() const;
+
+protected:
+    ListItem* next_;
+    ListItem* prev_;
+
+private:
+    const void*         item_ptr_;
+    const unsigned int  item_;
+};
+
+class ListWrapper
+{
+public:
+    ListWrapper();
+    virtual ~ListWrapper();
+
+    // Returns the number of elements stored in the list.
+    unsigned int GetSize() const;
+
+    // Puts a pointer to anything last in the list.
+    int PushBack(const void* ptr);
+    // Puts a pointer to anything first in the list.
+    int PushFront(const void* ptr);
+
+    // Puts a copy of the specified integer last in the list.
+    int PushBack(const unsigned int item_id);
+    // Puts a copy of the specified integer first in the list.
+    int PushFront(const unsigned int item_id);
+
+    // Pops the first ListItem from the list
+    int PopFront();
+
+    // Pops the last ListItem from the list
+    int PopBack();
+
+    // Returns true if the list is empty
+    bool Empty() const;
+
+    // Returns a pointer to the first ListItem in the list.
+    ListItem* First() const;
+
+    // Returns a pointer to the last ListItem in the list.
+    ListItem* Last() const;
+
+    // Returns a pointer to the ListItem stored after item in the list.
+    ListItem* Next(ListItem* item) const;
+
+    // Returns a pointer to the ListItem stored before item in the list.
+    ListItem* Previous(ListItem* item) const;
+
+    // Removes item from the list.
+    int Erase(ListItem* item);
+
+    // Insert list item after existing_previous_item. Please note that new_item
+    // must be created using new ListItem(). The map will take ownership of
+    // new_item following a successfull insert. If insert fails new_item will
+    // not be released by the List
+    int Insert(ListItem* existing_previous_item,
+               ListItem* new_item);
+
+    // Insert list item before existing_next_item. Please note that new_item
+    // must be created using new ListItem(). The map will take ownership of
+    // new_item following a successfull insert. If insert fails new_item will
+    // not be released by the List
+    int InsertBefore(ListItem* existing_next_item,
+                     ListItem* new_item);
+
+private:
+    void PushBackImpl(ListItem* item);
+    void PushFrontImpl(ListItem* item);
+
+    CriticalSectionWrapper* critical_section_;
+    ListItem* first_;
+    ListItem* last_;
+    unsigned int size_;
+};
+} //namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/map_wrapper.h b/trunk/src/system_wrappers/interface/map_wrapper.h
new file mode 100644
index 0000000..7d4e733
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/map_wrapper.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_MAP_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_MAP_WRAPPER_H_
+
+#include <map>
+
+#include "constructor_magic.h"
+
+namespace webrtc {
+class MapItem
+{
+friend class MapWrapper;
+
+public:
+    MapItem(int id, void* ptr);
+    virtual ~MapItem();
+    void* GetItem();
+    int GetId();
+    unsigned int GetUnsignedId();
+    void SetItem(void* ptr);
+
+private:
+    int   item_id_;
+    void* item_pointer_;
+};
+
+class MapWrapper
+{
+public:
+    MapWrapper();
+    ~MapWrapper();
+
+    // Puts a pointer to anything in the map and associates it with id. Note, id
+    // needs to be unique for all items in the map.
+    int Insert(int id, void* ptr);
+
+    // Removes item from map.
+    int Erase(MapItem* item);
+
+    // Finds item with associated with id and removes it from the map.
+    int Erase(int id);
+
+    // Returns the number of elements stored in the map.
+    int Size() const;
+
+    // Returns a pointer to the first MapItem in the map.
+    MapItem* First() const;
+
+    // Returns a pointer to the last MapItem in the map.
+    MapItem* Last() const;
+
+    // Returns a pointer to the MapItem stored after item in the map.
+    MapItem* Next(MapItem* item) const;
+
+    // Returns a pointer to the MapItem stored before item in the map.
+    MapItem* Previous(MapItem* item) const;
+
+    // Returns a pointer to the MapItem associated with id from the map.
+    MapItem* Find(int id) const;
+
+private:
+    std::map<int, MapItem*>    map_;
+};
+} // namespace webrtc
+
+#endif  // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_MAP_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/ref_count.h b/trunk/src/system_wrappers/interface/ref_count.h
new file mode 100644
index 0000000..f90b0b3
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/ref_count.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
+#define SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
+
+#include "system_wrappers/interface/atomic32_wrapper.h"
+
+namespace webrtc {
+
+// This class can be used for instantiating
+// reference counted objects.
+// int32_t AddRef() and int32_t Release().
+// Usage:
+// RefCountImpl<T>* implementation = new RefCountImpl<T>(p);
+//
+// Example:
+// class MyInterface {
+//  public:
+//   virtual void DoSomething() = 0;
+//   virtual int32_t AddRef() = 0;
+//   virtual int32_t Release() = 0:
+//  private:
+//   virtual ~MyInterface(){};
+// }
+// class MyImplementation : public MyInterface {
+//  public:
+//   virtual DoSomething() { printf("hello"); };
+// };
+// MyImplementation* CreateMyImplementation() {
+//   RefCountImpl<MyImplementation>* implementation =
+//       new RefCountImpl<MyImplementation>();
+//   return implementation;
+// }
+
+template <class T>
+class RefCountImpl : public T {
+ public:
+  RefCountImpl() : ref_count_(0) {}
+
+  template<typename P>
+  explicit RefCountImpl(P p) : T(p), ref_count_(0) {}
+
+  template<typename P1, typename P2>
+  RefCountImpl(P1 p1, P2 p2) : T(p1, p2), ref_count_(0) {}
+
+  template<typename P1, typename P2, typename P3>
+  RefCountImpl(P1 p1, P2 p2, P3 p3) : T(p1, p2, p3), ref_count_(0) {}
+
+  template<typename P1, typename P2, typename P3, typename P4>
+  RefCountImpl(P1 p1, P2 p2, P3 p3, P4 p4) : T(p1, p2, p3, p4), ref_count_(0) {}
+
+  template<typename P1, typename P2, typename P3, typename P4, typename P5>
+  RefCountImpl(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5)
+      : T(p1, p2, p3, p4, p5), ref_count_(0) {}
+
+  virtual int32_t AddRef() {
+    return ++ref_count_;
+  }
+
+  virtual int32_t Release() {
+    int32_t ref_count;
+    ref_count = --ref_count_;
+    if (ref_count == 0)
+      delete this;
+    return ref_count;
+  }
+
+ protected:
+  Atomic32Wrapper ref_count_;
+};
+
+}  // namespace webrtc
+
+#endif  // SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
diff --git a/trunk/src/system_wrappers/interface/rw_lock_wrapper.h b/trunk/src/system_wrappers/interface/rw_lock_wrapper.h
new file mode 100644
index 0000000..f0842ac
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/rw_lock_wrapper.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
+
+// Note, Windows pre-Vista version of RW locks are not supported nativly. For
+// these OSs regular critical sections have been used to approximate RW lock
+// functionality and will therefore have worse performance.
+
+namespace webrtc {
+class RWLockWrapper
+{
+public:
+    static RWLockWrapper* CreateRWLock();
+    virtual ~RWLockWrapper();
+
+    virtual void AcquireLockExclusive() = 0;
+    virtual void ReleaseLockExclusive() = 0;
+
+    virtual void AcquireLockShared() = 0;
+    virtual void ReleaseLockShared() = 0;
+
+protected:
+    virtual int Init() = 0;
+};
+
+// RAII extensions of the RW lock. Prevents Acquire/Release missmatches and
+// provides more compact locking syntax.
+class ReadLockScoped
+{
+public:
+    ReadLockScoped(RWLockWrapper& rwLock)
+        :
+        _rwLock(rwLock)
+    {
+        _rwLock.AcquireLockShared();
+    }
+
+    ~ReadLockScoped()
+    {
+        _rwLock.ReleaseLockShared();
+    }
+
+private:
+    RWLockWrapper& _rwLock;
+};
+
+class WriteLockScoped
+{
+public:
+    WriteLockScoped(RWLockWrapper& rwLock)
+        :
+        _rwLock(rwLock)
+    {
+        _rwLock.AcquireLockExclusive();
+    }
+
+    ~WriteLockScoped()
+    {
+        _rwLock.ReleaseLockExclusive();
+    }
+
+private:
+    RWLockWrapper& _rwLock;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/scoped_ptr.h b/trunk/src/system_wrappers/interface/scoped_ptr.h
new file mode 100644
index 0000000..74b6ad3
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/scoped_ptr.h
@@ -0,0 +1,258 @@
+//  (C) Copyright Greg Colvin and Beman Dawes 1998, 1999.
+//  Copyright (c) 2001, 2002 Peter Dimov
+//
+//  Permission to copy, use, modify, sell and distribute this software
+//  is granted provided this copyright notice appears in all copies.
+//  This software is provided "as is" without express or implied
+//  warranty, and with no claim as to its suitability for any purpose.
+//
+//  See http://www.boost.org/libs/smart_ptr/scoped_ptr.htm for documentation.
+//
+
+//  scoped_ptr mimics a built-in pointer except that it guarantees deletion
+//  of the object pointed to, either on destruction of the scoped_ptr or via
+//  an explicit reset(). scoped_ptr is a simple solution for simple needs;
+//  use shared_ptr or std::auto_ptr if your needs are more complex.
+
+//  scoped_ptr_malloc added in by Google.  When one of
+//  these goes out of scope, instead of doing a delete or delete[], it
+//  calls free().  scoped_ptr_malloc<char> is likely to see much more
+//  use than any other specializations.
+
+//  release() added in by Google. Use this to conditionally
+//  transfer ownership of a heap-allocated object to the caller, usually on
+//  method success.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_
+
+#include <assert.h>            // for assert
+#include <stdlib.h>            // for free() decl
+
+#include <cstddef>             // for std::ptrdiff_t
+
+#ifdef _WIN32
+namespace std { using ::ptrdiff_t; };
+#endif // _WIN32
+
+namespace webrtc {
+
+template <typename T>
+class scoped_ptr {
+ private:
+
+  T* ptr;
+
+  scoped_ptr(scoped_ptr const &);
+  scoped_ptr & operator=(scoped_ptr const &);
+
+ public:
+
+  typedef T element_type;
+
+  explicit scoped_ptr(T* p = NULL): ptr(p) {}
+
+  ~scoped_ptr() {
+    typedef char type_must_be_complete[sizeof(T)];
+    delete ptr;
+  }
+
+  void reset(T* p = NULL) {
+    typedef char type_must_be_complete[sizeof(T)];
+
+    if (ptr != p) {
+      T* obj = ptr;
+      ptr = p;
+      // Delete last, in case obj destructor indirectly results in ~scoped_ptr
+      delete obj;
+    }
+  }
+
+  T& operator*() const {
+    assert(ptr != NULL);
+    return *ptr;
+  }
+
+  T* operator->() const  {
+    assert(ptr != NULL);
+    return ptr;
+  }
+
+  T* get() const  {
+    return ptr;
+  }
+
+  void swap(scoped_ptr & b) {
+    T* tmp = b.ptr;
+    b.ptr = ptr;
+    ptr = tmp;
+  }
+
+  T* release() {
+    T* tmp = ptr;
+    ptr = NULL;
+    return tmp;
+  }
+
+  T** accept() {
+    if (ptr) {
+      delete ptr;
+      ptr = NULL;
+    }
+    return &ptr;
+  }
+
+  T** use() {
+    return &ptr;
+  }
+};
+
+template<typename T> inline
+void swap(scoped_ptr<T>& a, scoped_ptr<T>& b) {
+  a.swap(b);
+}
+
+
+
+
+//  scoped_array extends scoped_ptr to arrays. Deletion of the array pointed to
+//  is guaranteed, either on destruction of the scoped_array or via an explicit
+//  reset(). Use shared_array or std::vector if your needs are more complex.
+
+template<typename T>
+class scoped_array {
+ private:
+
+  T* ptr;
+
+  scoped_array(scoped_array const &);
+  scoped_array & operator=(scoped_array const &);
+
+ public:
+
+  typedef T element_type;
+
+  explicit scoped_array(T* p = NULL) : ptr(p) {}
+
+  ~scoped_array() {
+    typedef char type_must_be_complete[sizeof(T)];
+    delete[] ptr;
+  }
+
+  void reset(T* p = NULL) {
+    typedef char type_must_be_complete[sizeof(T)];
+
+    if (ptr != p) {
+      T* arr = ptr;
+      ptr = p;
+      // Delete last, in case arr destructor indirectly results in ~scoped_array
+      delete [] arr;
+    }
+  }
+
+  T& operator[](std::ptrdiff_t i) const {
+    assert(ptr != NULL);
+    assert(i >= 0);
+    return ptr[i];
+  }
+
+  T* get() const {
+    return ptr;
+  }
+
+  void swap(scoped_array & b) {
+    T* tmp = b.ptr;
+    b.ptr = ptr;
+    ptr = tmp;
+  }
+
+  T* release() {
+    T* tmp = ptr;
+    ptr = NULL;
+    return tmp;
+  }
+
+  T** accept() {
+    if (ptr) {
+      delete [] ptr;
+      ptr = NULL;
+    }
+    return &ptr;
+  }
+};
+
+template<class T> inline
+void swap(scoped_array<T>& a, scoped_array<T>& b) {
+  a.swap(b);
+}
+
+// scoped_ptr_malloc<> is similar to scoped_ptr<>, but it accepts a
+// second template argument, the function used to free the object.
+
+template<typename T, void (*FF)(void*) = free> class scoped_ptr_malloc {
+ private:
+
+  T* ptr;
+
+  scoped_ptr_malloc(scoped_ptr_malloc const &);
+  scoped_ptr_malloc & operator=(scoped_ptr_malloc const &);
+
+ public:
+
+  typedef T element_type;
+
+  explicit scoped_ptr_malloc(T* p = 0): ptr(p) {}
+
+  ~scoped_ptr_malloc() {
+    FF(static_cast<void*>(ptr));
+  }
+
+  void reset(T* p = 0) {
+    if (ptr != p) {
+      FF(static_cast<void*>(ptr));
+      ptr = p;
+    }
+  }
+
+  T& operator*() const {
+    assert(ptr != 0);
+    return *ptr;
+  }
+
+  T* operator->() const {
+    assert(ptr != 0);
+    return ptr;
+  }
+
+  T* get() const {
+    return ptr;
+  }
+
+  void swap(scoped_ptr_malloc & b) {
+    T* tmp = b.ptr;
+    b.ptr = ptr;
+    ptr = tmp;
+  }
+
+  T* release() {
+    T* tmp = ptr;
+    ptr = 0;
+    return tmp;
+  }
+
+  T** accept() {
+    if (ptr) {
+      FF(static_cast<void*>(ptr));
+      ptr = 0;
+    }
+    return &ptr;
+  }
+};
+
+template<typename T, void (*FF)(void*)> inline
+void swap(scoped_ptr_malloc<T,FF>& a, scoped_ptr_malloc<T,FF>& b) {
+  a.swap(b);
+}
+
+} // namespace webrtc
+
+#endif  // #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_
diff --git a/trunk/src/system_wrappers/interface/scoped_refptr.h b/trunk/src/system_wrappers/interface/scoped_refptr.h
new file mode 100644
index 0000000..0df15be
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/scoped_refptr.h
@@ -0,0 +1,137 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file under third_party_mods/chromium or at:
+// http://src.chromium.org/svn/trunk/src/LICENSE
+
+#ifndef SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_
+#define SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_
+
+namespace webrtc {
+
+// Extracted from Chromium's src/base/memory/ref_counted.h.
+
+//
+// A smart pointer class for reference counted objects.  Use this class instead
+// of calling AddRef and Release manually on a reference counted object to
+// avoid common memory leaks caused by forgetting to Release an object
+// reference.  Sample usage:
+//
+//   class MyFoo : public RefCounted<MyFoo> {
+//    ...
+//   };
+//
+//   void some_function() {
+//     scoped_refptr<MyFoo> foo = new MyFoo();
+//     foo->Method(param);
+//     // |foo| is released when this function returns
+//   }
+//
+//   void some_other_function() {
+//     scoped_refptr<MyFoo> foo = new MyFoo();
+//     ...
+//     foo = NULL;  // explicitly releases |foo|
+//     ...
+//     if (foo)
+//       foo->Method(param);
+//   }
+//
+// The above examples show how scoped_refptr<T> acts like a pointer to T.
+// Given two scoped_refptr<T> classes, it is also possible to exchange
+// references between the two objects, like so:
+//
+//   {
+//     scoped_refptr<MyFoo> a = new MyFoo();
+//     scoped_refptr<MyFoo> b;
+//
+//     b.swap(a);
+//     // now, |b| references the MyFoo object, and |a| references NULL.
+//   }
+//
+// To make both |a| and |b| in the above example reference the same MyFoo
+// object, simply use the assignment operator:
+//
+//   {
+//     scoped_refptr<MyFoo> a = new MyFoo();
+//     scoped_refptr<MyFoo> b;
+//
+//     b = a;
+//     // now, |a| and |b| each own a reference to the same MyFoo object.
+//   }
+//
+template <class T>
+class scoped_refptr {
+ public:
+  scoped_refptr() : ptr_(NULL) {
+  }
+
+  scoped_refptr(T* p) : ptr_(p) {
+    if (ptr_)
+      ptr_->AddRef();
+  }
+
+  scoped_refptr(const scoped_refptr<T>& r) : ptr_(r.ptr_) {
+    if (ptr_)
+      ptr_->AddRef();
+  }
+
+  template <typename U>
+  scoped_refptr(const scoped_refptr<U>& r) : ptr_(r.get()) {
+    if (ptr_)
+      ptr_->AddRef();
+  }
+
+  ~scoped_refptr() {
+    if (ptr_)
+      ptr_->Release();
+  }
+
+  T* get() const { return ptr_; }
+  operator T*() const { return ptr_; }
+  T* operator->() const { return ptr_; }
+
+  // Release a pointer.
+  // The return value is the current pointer held by this object.
+  // If this object holds a NULL pointer, the return value is NULL.
+  // After this operation, this object will hold a NULL pointer,
+  // and will not own the object any more.
+  T* release() {
+    T* retVal = ptr_;
+    ptr_ = NULL;
+    return retVal;
+  }
+
+  scoped_refptr<T>& operator=(T* p) {
+    // AddRef first so that self assignment should work
+    if (p)
+      p->AddRef();
+    if (ptr_ )
+      ptr_->Release();
+    ptr_ = p;
+    return *this;
+  }
+
+  scoped_refptr<T>& operator=(const scoped_refptr<T>& r) {
+    return *this = r.ptr_;
+  }
+
+  template <typename U>
+  scoped_refptr<T>& operator=(const scoped_refptr<U>& r) {
+    return *this = r.get();
+  }
+
+  void swap(T** pp) {
+    T* p = ptr_;
+    ptr_ = *pp;
+    *pp = p;
+  }
+
+  void swap(scoped_refptr<T>& r) {
+    swap(&r.ptr_);
+  }
+
+ protected:
+  T* ptr_;
+};
+}  // namespace webrtc
+
+#endif  // SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_
diff --git a/trunk/src/system_wrappers/interface/sort.h b/trunk/src/system_wrappers/interface/sort.h
new file mode 100644
index 0000000..fb25ecf
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/sort.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Generic unstable sorting routines.
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
+
+#include "typedefs.h"
+#include "common_types.h"
+
+namespace webrtc
+{
+    enum Type
+    {
+        TYPE_Word8,
+        TYPE_UWord8,
+        TYPE_Word16,
+        TYPE_UWord16,
+        TYPE_Word32,
+        TYPE_UWord32,
+        TYPE_Word64,
+        TYPE_UWord64,
+        TYPE_Float32,
+        TYPE_Float64
+    };
+    // Sorts intrinsic data types.
+    //
+    // data          [in/out] A pointer to an array of intrinsic type.
+    //               Upon return it will be sorted in ascending order.
+    // numOfElements The number of elements in the array.
+    // dataType      Enum corresponding to the type of the array.
+    //
+    // returns 0 on success, -1 on failure.
+    WebRtc_Word32 Sort(void* data, WebRtc_UWord32 numOfElements, Type dataType);
+
+    // Sorts arbitrary data types. This requires an array of intrinsically typed
+    // key values which will be used to sort the data array. There must be a
+    // one-to-one correspondence between data elements and key elements, with
+    // corresponding elements sharing the same position in their respective
+    // arrays.
+    //
+    // data          [in/out] A pointer to an array of arbitrary type.
+    //               Upon return it will be sorted in ascending order.
+    // key           [in] A pointer to an array of keys used to sort the
+    //               data array.
+    // numOfElements The number of elements in the arrays.
+    // sizeOfElement The size, in bytes, of the data array.
+    // keyType       Enum corresponding to the type of the key array.
+    //
+    // returns 0 on success, -1 on failure.
+    //
+    WebRtc_Word32 KeySort(void* data, void* key, WebRtc_UWord32 numOfElements,
+                          WebRtc_UWord32 sizeOfElement, Type keyType);
+}
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
diff --git a/trunk/src/system_wrappers/interface/static_instance.h b/trunk/src/system_wrappers/interface/static_instance.h
new file mode 100644
index 0000000..8fe91cc
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/static_instance.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATICINSTANCETEMPLATE_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATICINSTANCETEMPLATE_H_
+
+#include <assert.h>
+
+#include "critical_section_wrapper.h"
+#ifdef _WIN32
+#include "fix_interlocked_exchange_pointer_win.h"
+#endif
+
+namespace webrtc {
+
+enum CountOperation {
+  kRelease,
+  kAddRef,
+  kAddRefNoCreate
+};
+enum CreateOperation {
+  kInstanceExists,
+  kCreate,
+  kDestroy
+};
+
+template <class T>
+// Construct On First Use idiom. Avoids
+// "static initialization order fiasco".
+static T* GetStaticInstance(CountOperation count_operation) {
+  // TODO (hellner): use atomic wrapper instead.
+  static volatile long instance_count = 0;
+  static T* volatile instance = NULL;
+  CreateOperation state = kInstanceExists;
+#ifndef _WIN32
+  // This memory is staticly allocated once. The application does not try to
+  // free this memory. This approach is taken to avoid issues with
+  // destruction order for statically allocated memory. The memory will be
+  // reclaimed by the OS and memory leak tools will not recognize memory
+  // reachable from statics leaked so no noise is added by doing this.
+  static CriticalSectionWrapper* crit_sect(
+      CriticalSectionWrapper::CreateCriticalSection());
+  CriticalSectionScoped lock(crit_sect);
+
+  if (count_operation ==
+      kAddRefNoCreate && instance_count == 0) {
+    return NULL;
+  }
+  if (count_operation ==
+      kAddRef ||
+      count_operation == kAddRefNoCreate) {
+    instance_count++;
+    if (instance_count == 1) {
+      state = kCreate;
+    }
+  } else {
+    instance_count--;
+    if (instance_count == 0) {
+      state = kDestroy;
+    }
+  }
+  if (state == kCreate) {
+    instance = T::CreateInstance();
+  } else if (state == kDestroy) {
+    T* old_instance = instance;
+    instance = NULL;
+    // The state will not change past this point. Release the critical
+    // section while deleting the object in case it would be blocking on
+    // access back to this object. (This is the case for the tracing class
+    // since the thread owned by the tracing class also traces).
+    // TODO(hellner): this is a bit out of place but here goes, de-couple
+    // thread implementation with trace implementation.
+    crit_sect->Leave();
+    if (old_instance) {
+      delete old_instance;
+    }
+    // Re-acquire the lock since the scoped critical section will release
+    // it.
+    crit_sect->Enter();
+    return NULL;
+  }
+#else  // _WIN32
+  if (count_operation ==
+      kAddRefNoCreate && instance_count == 0) {
+    return NULL;
+  }
+  if (count_operation == kAddRefNoCreate) {
+    if (1 == InterlockedIncrement(&instance_count)) {
+      // The instance has been destroyed by some other thread. Rollback.
+      InterlockedDecrement(&instance_count);
+      assert(false);
+      return NULL;
+    }
+    // Sanity to catch corrupt state.
+    if (instance == NULL) {
+      assert(false);
+      InterlockedDecrement(&instance_count);
+      return NULL;
+    }
+  } else if (count_operation == kAddRef) {
+    if (instance_count == 0) {
+      state = kCreate;
+    } else {
+      if (1 == InterlockedIncrement(&instance_count)) {
+        // InterlockedDecrement because reference count should not be
+        // updated just yet (that's done when the instance is created).
+        InterlockedDecrement(&instance_count);
+        state = kCreate;
+      }
+    }
+  } else {
+    int newValue = InterlockedDecrement(&instance_count);
+    if (newValue == 0) {
+      state = kDestroy;
+    }
+  }
+
+  if (state == kCreate) {
+    // Create instance and let whichever thread finishes first assign its
+    // local copy to the global instance. All other threads reclaim their
+    // local copy.
+    T* new_instance = T::CreateInstance();
+    if (1 == InterlockedIncrement(&instance_count)) {
+      T* old_value = static_cast<T*> (InterlockedExchangePointer(
+          reinterpret_cast<void* volatile*>(&instance), new_instance));
+      assert(old_value == NULL);
+      assert(instance);
+    } else {
+      InterlockedDecrement(&instance_count);
+      if (new_instance) {
+        delete static_cast<T*>(new_instance);
+      }
+    }
+  } else if (state == kDestroy) {
+    T* old_value = static_cast<T*> (InterlockedExchangePointer(
+        reinterpret_cast<void* volatile*>(&instance), NULL));
+    if (old_value) {
+      delete static_cast<T*>(old_value);
+    }
+    return NULL;
+  }
+#endif  // #ifndef _WIN32
+  return instance;
+}
+
+}  // namspace webrtc
+
+#endif  // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATICINSTANCETEMPLATE_H_
diff --git a/trunk/src/system_wrappers/interface/thread_wrapper.h b/trunk/src/system_wrappers/interface/thread_wrapper.h
new file mode 100644
index 0000000..030ac8a
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/thread_wrapper.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// System independant wrapper for spawning threads
+// Note: the spawned thread will loop over the callback function until stopped.
+// Note: The callback function is expected to return every 2 seconds or more
+// often.
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+// Object that will be passed by the spawned thread when it enters the callback
+// function.
+#define ThreadObj void*
+
+// Callback function that the spawned thread will enter once spawned.
+// A return value of false is interpreted as that the function has no
+// more work to do and that the thread can be released.
+typedef  bool(*ThreadRunFunction)(ThreadObj);
+
+enum ThreadPriority
+{
+    kLowPriority = 1,
+    kNormalPriority = 2,
+    kHighPriority = 3,
+    kHighestPriority = 4,
+    kRealtimePriority = 5
+};
+
+class ThreadWrapper
+{
+public:
+    enum {kThreadMaxNameLength = 64};
+
+    virtual ~ThreadWrapper() {};
+
+    // Factory method. Constructor disabled.
+    //
+    // func        Pointer to a, by user, specified callback function.
+    // obj         Object associated with the thread. Passed in the callback
+    //             function.
+    // prio        Thread priority. May require root/admin rights.
+    // threadName  NULL terminated thread name, will be visable in the Windows
+    //             debugger.
+    static ThreadWrapper* CreateThread(ThreadRunFunction func = 0,
+                                       ThreadObj obj= 0,
+                                       ThreadPriority prio = kNormalPriority,
+                                       const char* threadName = 0);
+
+    // Get the current thread's kernel thread ID.
+    static uint32_t GetThreadId();
+
+    // Non blocking termination of the spawned thread. Note that it is not safe
+    // to delete this class until the spawned thread has been reclaimed.
+    virtual void SetNotAlive() = 0;
+
+    // Spawns the thread. This will start the triggering of the callback
+    // function.
+    virtual bool Start(unsigned int& id) = 0;
+
+    // Sets the threads CPU affinity. CPUs are listed 0 - (number of CPUs - 1).
+    // The numbers in processorNumbers specify which CPUs are allowed to run the
+    // thread. processorNumbers should not contain any duplicates and elements
+    // should be lower than (number of CPUs - 1). amountOfProcessors should be
+    // equal to the number of processors listed in processorNumbers
+    virtual bool SetAffinity(const int* /*processorNumbers*/,
+                             const unsigned int /*amountOfProcessors*/) {
+      return false;
+    }
+
+    // Stops the spawned thread and waits for it to be reclaimed with a timeout
+    // of two seconds. Will return false if the thread was not reclaimed.
+    // Multiple tries to Stop are allowed (e.g. to wait longer than 2 seconds).
+    // It's ok to call Stop() even if the spawned thread has been reclaimed.
+    virtual bool Stop() = 0;
+
+    // Stops the spawned thread dead in its tracks. Will likely result in a
+    // corrupt state. There should be an extremely good reason for even looking
+    // at this function. Can cause many problems deadlock being one of them.
+    virtual bool Shutdown() {return false;}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
diff --git a/trunk/src/system_wrappers/interface/tick_util.h b/trunk/src/system_wrappers/interface/tick_util.h
new file mode 100644
index 0000000..e78e53d
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/tick_util.h
@@ -0,0 +1,325 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// System independant wrapper for polling elapsed time in ms and us.
+// The implementation works in the tick domain which can be mapped over to the
+// time domain.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
+
+#if _WIN32
+#include <windows.h>
+#include <mmsystem.h>
+#elif WEBRTC_LINUX
+#include <ctime>
+#else
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#include "typedefs.h"
+
+namespace webrtc {
+class TickInterval;
+
+class TickTime
+{
+public:
+    // Current time in the tick domain.
+    static TickTime Now();
+
+    // Now in the time domain in ms.
+    static WebRtc_Word64 MillisecondTimestamp();
+
+    // Now in the time domain in us.
+    static WebRtc_Word64 MicrosecondTimestamp();
+
+    WebRtc_Word64 Ticks() const;
+
+    static WebRtc_Word64 MillisecondsToTicks(const WebRtc_Word64 ms);
+
+    static WebRtc_Word64 TicksToMilliseconds(const WebRtc_Word64 ticks);
+
+    // Returns a TickTime that is ticks later than the passed TickTime
+    friend TickTime operator+(const TickTime lhs, const WebRtc_Word64 ticks);
+    TickTime& operator+=(const WebRtc_Word64& rhs);
+
+
+    // Returns a TickInterval that is the difference in ticks beween rhs and lhs
+    friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
+private:
+    WebRtc_Word64 _ticks;
+};
+
+class TickInterval
+{
+public:
+    TickInterval();
+
+    WebRtc_Word64 Milliseconds() const;
+    WebRtc_Word64 Microseconds() const;
+
+    // Returns the sum of two TickIntervals as a TickInterval
+    friend TickInterval operator+(const TickInterval& lhs,
+                                  const TickInterval& rhs);
+    TickInterval& operator-=(const TickInterval& rhs);
+
+    // Returns a TickInterval corresponding to rhs - lhs
+    friend TickInterval operator-(const TickInterval& lhs,
+                                  const TickInterval& rhs);
+    TickInterval& operator+=(const TickInterval& rhs);
+
+    friend bool operator>(const TickInterval& lhs, const TickInterval& rhs);
+    friend bool operator<=(const TickInterval& lhs, const TickInterval& rhs);
+    friend bool operator<(const TickInterval& lhs, const TickInterval& rhs);
+    friend bool operator>=(const TickInterval& lhs, const TickInterval& rhs);
+
+private:
+    TickInterval(WebRtc_Word64 interval);
+
+    friend class TickTime;
+    friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
+
+private:
+    WebRtc_Word64 _interval;
+};
+
+inline TickInterval operator+(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return TickInterval(lhs._interval + rhs._interval);
+}
+
+inline TickInterval operator-(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return TickInterval(lhs._interval - rhs._interval);
+}
+
+inline TickInterval operator-(const TickTime& lhs,const TickTime& rhs)
+{
+    return TickInterval(lhs._ticks - rhs._ticks);
+}
+
+inline TickTime operator+(const TickTime lhs, const WebRtc_Word64 ticks)
+{
+    TickTime time = lhs;
+    time._ticks += ticks;
+    return time;
+}
+inline bool operator>(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return lhs._interval > rhs._interval;
+}
+inline bool operator<=(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return lhs._interval <= rhs._interval;
+}
+inline bool operator<(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return lhs._interval <= rhs._interval;
+}
+inline bool operator>=(const TickInterval& lhs, const TickInterval& rhs)
+{
+    return lhs._interval >= rhs._interval;
+}
+
+inline TickTime TickTime::Now()
+{
+    TickTime result;
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        // QueryPerformanceCounter returns the value from the TSC which is
+        // incremented at the CPU frequency. The algorithm used requires
+        // the CPU frequency to be constant. Technology like speed stepping
+        // which has variable CPU frequency will therefore yield unpredictable,
+        // incorrect time estimations.
+        LARGE_INTEGER qpcnt;
+        QueryPerformanceCounter(&qpcnt);
+        result._ticks = qpcnt.QuadPart;
+    #else
+        static volatile LONG lastTimeGetTime = 0;
+        static volatile WebRtc_Word64 numWrapTimeGetTime = 0;
+        volatile LONG* lastTimeGetTimePtr = &lastTimeGetTime;
+        DWORD now = timeGetTime();
+        // Atomically update the last gotten time
+        DWORD old = InterlockedExchange(lastTimeGetTimePtr, now);
+        if(now < old)
+        {
+            // If now is earlier than old, there may have been a race between
+            // threads.
+            // 0x0fffffff ~3.1 days, the code will not take that long to execute
+            // so it must have been a wrap around.
+            if(old > 0xf0000000 && now < 0x0fffffff) 
+            {
+                numWrapTimeGetTime++;
+            }
+        }
+        result._ticks = now + (numWrapTimeGetTime<<32);
+    #endif
+#elif defined(WEBRTC_LINUX)
+    struct timespec ts;
+    #ifdef WEBRTC_CLOCK_TYPE_REALTIME
+        clock_gettime(CLOCK_REALTIME, &ts);
+    #else
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+    #endif
+    result._ticks = 1000000000LL * static_cast<WebRtc_Word64>(ts.tv_sec) + static_cast<WebRtc_Word64>(ts.tv_nsec);
+#else
+    struct timeval tv;
+    gettimeofday(&tv, NULL);
+    result._ticks = 1000000LL * static_cast<WebRtc_Word64>(tv.tv_sec) + static_cast<WebRtc_Word64>(tv.tv_usec);
+#endif
+    return result;
+}
+
+inline WebRtc_Word64 TickTime::MillisecondTimestamp()
+{
+    TickTime now = TickTime::Now();
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (now._ticks * 1000) / qpfreq.QuadPart;
+    #else
+        return now._ticks;
+    #endif
+#elif WEBRTC_LINUX
+    return now._ticks / 1000000LL;
+#else
+    return now._ticks / 1000LL;
+#endif
+}
+
+inline WebRtc_Word64 TickTime::MicrosecondTimestamp()
+{
+    TickTime now = TickTime::Now();
+
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (now._ticks * 1000) / (qpfreq.QuadPart/1000);
+    #else
+        return now._ticks *1000LL;
+    #endif
+#elif WEBRTC_LINUX
+    return now._ticks / 1000LL;
+#else
+    return now._ticks;
+#endif
+}
+
+inline WebRtc_Word64 TickTime::Ticks() const
+{
+    return _ticks;
+}
+
+inline WebRtc_Word64 TickTime::MillisecondsToTicks(const WebRtc_Word64 ms)
+{
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (qpfreq.QuadPart * ms) / 1000;
+    #else
+        return ms;
+    #endif
+#elif WEBRTC_LINUX
+    return ms * 1000000LL;
+#else
+    return ms * 1000LL;
+#endif
+}
+
+inline WebRtc_Word64 TickTime::TicksToMilliseconds(const WebRtc_Word64 ticks)
+{
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (ticks * 1000) / qpfreq.QuadPart;
+    #else
+        return ticks;
+    #endif
+#elif WEBRTC_LINUX
+    return ticks / 1000000LL;
+#else
+    return ticks / 1000LL;
+#endif
+}
+
+inline TickTime& TickTime::operator+=(const WebRtc_Word64& ticks)
+{
+    _ticks += ticks;
+    return *this;
+}
+
+inline TickInterval::TickInterval() : _interval(0)
+{
+}
+
+inline TickInterval::TickInterval(const WebRtc_Word64 interval)
+    : _interval(interval)
+{
+}
+
+inline WebRtc_Word64 TickInterval::Milliseconds() const
+{
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (_interval * 1000) / qpfreq.QuadPart;
+    #else
+	// _interval is in ms
+        return _interval;
+    #endif
+#elif WEBRTC_LINUX
+    // _interval is in ns
+    return _interval / 1000000;
+#else
+    // _interval is usecs
+    return _interval / 1000;
+#endif
+}
+
+inline WebRtc_Word64 TickInterval::Microseconds() const
+{
+#if _WIN32
+    #ifdef USE_QUERY_PERFORMANCE_COUNTER
+        LARGE_INTEGER qpfreq;
+        QueryPerformanceFrequency(&qpfreq);
+        return (_interval * 1000000) / qpfreq.QuadPart;
+    #else
+	// _interval is in ms
+        return _interval *1000LL;
+    #endif
+#elif WEBRTC_LINUX
+    // _interval is in ns
+    return _interval / 1000;
+#else
+    // _interval is usecs
+    return _interval;
+#endif
+}
+
+inline TickInterval& TickInterval::operator+=(const TickInterval& rhs)
+{
+    _interval += rhs._interval;
+    return *this;
+}
+
+inline TickInterval& TickInterval::operator-=(const TickInterval& rhs)
+{
+    _interval -= rhs._interval;
+    return *this;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
diff --git a/trunk/src/system_wrappers/interface/trace.h b/trunk/src/system_wrappers/interface/trace.h
new file mode 100644
index 0000000..0333e76
--- /dev/null
+++ b/trunk/src/system_wrappers/interface/trace.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// System independant wrapper for logging runtime information to file.
+// Note: All log messages will be written to the same trace file.
+// Note: If to many messages are written to file there will be a build up of
+//       messages. Apply filtering to avoid that.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+
+#define WEBRTC_TRACE Trace::Add
+
+namespace webrtc {
+class Trace
+{
+public:
+
+    // Increments the reference count to the trace.
+    static void CreateTrace();
+    // Decrements the reference count to the trace.
+    static void ReturnTrace();
+    // Note: any instance that writes to the trace file should increment and
+    // decrement the reference count on construction and destruction
+    // respectively
+
+    // Specifies what type of messages should be written to the trace file. The
+    // filter parameter is a bitmask where each message type is enumerated by
+    // the TraceLevel enumerator. TODO (hellner) why is the
+    // TraceLevel enumerator not defined in this file?
+    static WebRtc_Word32 SetLevelFilter(const WebRtc_UWord32 filter);
+
+    // Returns what type of messages are written to the trace file.
+    static WebRtc_Word32 LevelFilter(WebRtc_UWord32& filter);
+
+    // Sets the file name. If addFileCounter is false the same file will be
+    // reused when it fills up. If it's true a new file with incremented name
+    // will be used.
+    static WebRtc_Word32 SetTraceFile(const char* fileName,
+                                      const bool addFileCounter = false);
+
+    // Returns the name of the file that the trace is currently writing to.
+    static WebRtc_Word32 TraceFile(char fileName[1024]);
+
+    // Registers callback to receive trace messages. TODO (hellner)
+    // why not use OutStream instead? Why is TraceCallback not defined in this
+    // file
+    static WebRtc_Word32 SetTraceCallback(TraceCallback* callback);
+
+    // Adds a trace message for writing to file. The message is put in a queue
+    // for writing to file whenever possible for performance reasons. I.e. there
+    // is a crash it is possible that the last, vital logs are not logged yet.
+    // level is the the type of message to log. If that type of messages is
+    // filtered it will not be written to file. module is an identifier for what
+    // part of the code the message is comming.
+    // id is an identifier that should be unique for that set of classes that
+    // are associated (e.g. all instances owned by an engine).
+    // msg and the elipsis are the same as e.g. sprintf.
+    // TODO (hellner) Why is TraceModule not defined in this file?
+    static void Add(const TraceLevel level,
+                    const TraceModule module,
+                    const WebRtc_Word32 id,
+                    const char* msg, ...);
+
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
diff --git a/trunk/src/system_wrappers/source/Android.mk b/trunk/src/system_wrappers/source/Android.mk
new file mode 100644
index 0000000..aea9b32
--- /dev/null
+++ b/trunk/src/system_wrappers/source/Android.mk
@@ -0,0 +1,60 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_system_wrappers
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    map.cc \
+    sort.cc \
+    aligned_malloc.cc \
+    atomic32.cc \
+    condition_variable.cc \
+    cpu_no_op.cc \
+    cpu_features.cc \
+    cpu_features_arm.c \
+    cpu_info.cc \
+    critical_section.cc \
+    event.cc \
+    file_impl.cc \
+    list_no_stl.cc \
+    rw_lock.cc \
+    thread.cc \
+    trace_impl.cc \
+    condition_variable_posix.cc \
+    cpu_linux.cc \
+    critical_section_posix.cc \
+    event_posix.cc \
+    thread_posix.cc \
+    trace_posix.cc \
+    rw_lock_posix.cc 
+
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../.. \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/spreadsortlib
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/system_wrappers/source/aligned_malloc.cc b/trunk/src/system_wrappers/source/aligned_malloc.cc
new file mode 100644
index 0000000..bb10c6b
--- /dev/null
+++ b/trunk/src/system_wrappers/source/aligned_malloc.cc
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aligned_malloc.h"
+
+#include <assert.h>
+#include <memory.h>
+
+#ifdef WEBRTC_ANDROID
+#include <stdlib.h>
+#endif
+
+#if WEBRTC_MAC
+  #include <malloc/malloc.h>
+#else
+  #include <malloc.h>
+#endif
+
+#if _WIN32
+    #include <windows.h>
+#else
+    #include <stdint.h>
+#endif
+
+#include "typedefs.h"
+
+// Ok reference on memory alignment:
+// http://stackoverflow.com/questions/227897/solve-the-memory-alignment-in-c-interview-question-that-stumped-me
+
+namespace webrtc
+{
+// TODO (hellner) better to create just one memory block and
+//                           interpret the first sizeof(AlignedMemory) bytes as
+//                           an AlignedMemory struct.
+struct AlignedMemory
+{
+  void* alignedBuffer;
+  void* memoryPointer;
+};
+
+void* AlignedMalloc(size_t size, size_t alignment)
+{
+    if(alignment == 0)
+    {
+        // Don't allow alignment 0 since it's undefined.
+        return NULL;
+    }
+    // Make sure that the alignment is an integer power of two or fail.
+    if(alignment & (alignment - 1))
+    {
+        return NULL;
+    }
+
+    AlignedMemory* returnValue = new AlignedMemory();
+    if(returnValue == NULL)
+    {
+        return NULL;
+    }
+
+    // The memory is aligned towards the lowest address that so only
+    // alignment - 1 bytes needs to be allocated.
+    // A pointer to AlignedMemory must be stored so that it can be retreived for
+    // deletion, ergo the sizeof(uintptr_t).
+    returnValue->memoryPointer = malloc(size + sizeof(uintptr_t) +
+                                        alignment - 1);
+    if(returnValue->memoryPointer == NULL)
+    {
+        delete returnValue;
+        return NULL;
+    }
+
+    // Alligning after the sizeof(header) bytes will leave room for the header
+    // in the same memory block.
+    uintptr_t alignStartPos = (uintptr_t)returnValue->memoryPointer;
+    alignStartPos += sizeof(uintptr_t);
+
+    // The buffer should be aligned with 'alignment' bytes. The - 1 guarantees
+    // that we align towards the lowest address.
+    uintptr_t alignedPos = (alignStartPos + alignment - 1) & ~(alignment - 1);
+
+    // alignedPos is the address sought for.
+    returnValue->alignedBuffer = (void*)alignedPos;
+
+    // Store the address to the AlignedMemory struct in the header so that a
+    // it's possible to reclaim all memory.
+    uintptr_t headerPos = alignedPos;
+    headerPos -= sizeof(uintptr_t);
+    void* headerPtr = (void*) headerPos;
+    uintptr_t headerValue = (uintptr_t)returnValue;
+    memcpy(headerPtr,&headerValue,sizeof(uintptr_t));
+
+    return returnValue->alignedBuffer;
+}
+
+void AlignedFree(void* memBlock)
+{
+    if(memBlock == NULL)
+    {
+        return;
+    }
+    uintptr_t alignedPos = (uintptr_t)memBlock;
+    uintptr_t headerPos = alignedPos - sizeof(uintptr_t);
+
+    // Read out the address of the AlignedMemory struct from the header.
+    uintptr_t* headerPtr = (uintptr_t*)headerPos;
+    AlignedMemory* deleteMemory = (AlignedMemory*) *headerPtr;
+
+    if(deleteMemory->memoryPointer != NULL)
+    {
+        free(deleteMemory->memoryPointer);
+    }
+    delete deleteMemory;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/atomic32.cc b/trunk/src/system_wrappers/source/atomic32.cc
new file mode 100644
index 0000000..588dd3e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/atomic32.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "atomic32_wrapper.h"
+
+#if defined(_WIN32)
+    #include "atomic32_win.h"
+#elif defined(WEBRTC_LINUX)
+    #include "atomic32_linux.h"
+#elif defined(WEBRTC_MAC)
+    #include "atomic32_mac.h"
+#else
+    #error unsupported os!
+#endif
+
+namespace webrtc {
+Atomic32Wrapper::Atomic32Wrapper(WebRtc_Word32 initialValue)
+    : _impl(*new Atomic32Impl(initialValue))
+{
+}
+
+Atomic32Wrapper::~Atomic32Wrapper()
+{
+    delete &_impl;
+}
+
+WebRtc_Word32 Atomic32Wrapper::operator++()
+{
+    return ++_impl;
+}
+
+WebRtc_Word32 Atomic32Wrapper::operator--()
+{
+    return --_impl;
+}
+
+// Read and write to properly aligned variables are atomic operations.
+// Ex reference (for Windows): http://msdn.microsoft.com/en-us/library/ms684122(v=VS.85).aspx
+// TODO (hellner) operator= and Atomic32Wrapper::Value() can be fully
+// implemented here.
+Atomic32Wrapper& Atomic32Wrapper::operator=(const Atomic32Wrapper& rhs)
+{
+    if(this == &rhs)
+    {
+        return *this;
+    }
+    _impl = rhs._impl;
+    return *this;
+}
+
+Atomic32Wrapper& Atomic32Wrapper::operator=(WebRtc_Word32 rhs)
+{
+    _impl = rhs;
+    return *this;
+}
+
+WebRtc_Word32 Atomic32Wrapper::operator+=(WebRtc_Word32 rhs)
+{
+    return _impl += rhs;
+}
+
+WebRtc_Word32 Atomic32Wrapper::operator-=(WebRtc_Word32 rhs)
+{
+    return _impl -= rhs;
+}
+
+bool Atomic32Wrapper::CompareExchange(WebRtc_Word32 newValue,
+                                      WebRtc_Word32 compareValue)
+{
+    return _impl.CompareExchange(newValue,compareValue);
+}
+
+WebRtc_Word32 Atomic32Wrapper::Value() const
+{
+    return _impl.Value();
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/atomic32_linux.h b/trunk/src/system_wrappers/source/atomic32_linux.h
new file mode 100644
index 0000000..f9f5650
--- /dev/null
+++ b/trunk/src/system_wrappers/source/atomic32_linux.h
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Atomic system independant 32-bit signed integer.
+// Linux implementation.
+// Note: Requires gcc 4.1.2 or later.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_LINUX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_LINUX_H_
+
+#include <inttypes.h>
+#include <malloc.h>
+
+#include "common_types.h"
+
+namespace webrtc {
+class Atomic32Impl
+{
+public:
+    inline Atomic32Impl(WebRtc_Word32 initialValue);
+    inline ~Atomic32Impl();
+
+    inline WebRtc_Word32 operator++();
+    inline WebRtc_Word32 operator--();
+
+    inline Atomic32Impl& operator=(const Atomic32Impl& rhs);
+    inline Atomic32Impl& operator=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator+=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator-=(WebRtc_Word32 rhs);
+
+    inline bool CompareExchange(WebRtc_Word32 newValue,
+                                WebRtc_Word32 compareValue);
+
+    inline WebRtc_Word32 Value() const;
+private:
+    void*        _ptrMemory;
+    // Volatile ensures full memory barriers.
+    volatile WebRtc_Word32* _value;
+};
+
+// TODO (hellner) use aligned_malloc instead of doing it manually.
+inline Atomic32Impl::Atomic32Impl(WebRtc_Word32 initialValue)
+    : _ptrMemory(NULL),
+      _value(NULL)
+{   // Align the memory associated with _value on a 32-bit boundary. This is a
+    // requirement for the used Linux APIs to be atomic.
+    // Keep _ptrMemory to be able to reclaim memory.
+    _ptrMemory = malloc(sizeof(WebRtc_Word32)*2);
+    _value = (WebRtc_Word32*) (((uintptr_t)_ptrMemory+3)&(~0x3));
+    *_value = initialValue;
+}
+
+inline Atomic32Impl::~Atomic32Impl()
+{
+    if(_ptrMemory != NULL)
+    {
+        free(_ptrMemory);
+    }
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator++()
+{
+    WebRtc_Word32 returnValue = __sync_fetch_and_add(_value,1);
+    returnValue++;
+    return returnValue;
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator--()
+{
+    WebRtc_Word32 returnValue = __sync_fetch_and_sub(_value,1);
+    returnValue--;
+    return returnValue;
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(const Atomic32Impl& rhs)
+{
+    *_value = *rhs._value;
+    return *this;
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(WebRtc_Word32 rhs)
+{
+    *_value = rhs;
+    return *this;
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator+=(WebRtc_Word32 rhs)
+{
+    WebRtc_Word32 returnValue = __sync_fetch_and_add(_value,rhs);
+    returnValue += rhs;
+    return returnValue;
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator-=(WebRtc_Word32 rhs)
+{
+    WebRtc_Word32 returnValue = __sync_fetch_and_sub(_value,rhs);
+    returnValue -= rhs;
+    return returnValue;
+}
+
+inline bool Atomic32Impl::CompareExchange(WebRtc_Word32 newValue,
+                                          WebRtc_Word32 compareValue)
+{
+    return __sync_bool_compare_and_swap(_value,compareValue,newValue);
+}
+
+inline WebRtc_Word32 Atomic32Impl::Value() const
+{
+    return *_value;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_LINUX_H_
diff --git a/trunk/src/system_wrappers/source/atomic32_mac.h b/trunk/src/system_wrappers/source/atomic32_mac.h
new file mode 100644
index 0000000..bf8febc
--- /dev/null
+++ b/trunk/src/system_wrappers/source/atomic32_mac.h
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Atomic system independant 32-bit signed integer.
+// Mac implementation.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_MAC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_MAC_H_
+
+#include <stdlib.h>
+#include <libkern/OSAtomic.h>
+
+#include "common_types.h"
+
+namespace webrtc {
+class Atomic32Impl
+{
+public:
+    inline Atomic32Impl(WebRtc_Word32 initialValue);
+    inline ~Atomic32Impl();
+
+    inline WebRtc_Word32 operator++();
+    inline WebRtc_Word32 operator--();
+
+    inline Atomic32Impl& operator=(const Atomic32Impl& rhs);
+    inline Atomic32Impl& operator=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator+=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator-=(WebRtc_Word32 rhs);
+
+    inline bool CompareExchange(WebRtc_Word32 newValue,
+                                WebRtc_Word32 compareValue);
+
+    inline WebRtc_Word32 Value() const;
+private:
+    void*        _ptrMemory;
+    // Volatile ensures full memory barriers.
+    volatile WebRtc_Word32* _value;
+};
+
+// TODO (hellner) use aligned_malloc instead of doing it manually.
+inline Atomic32Impl::Atomic32Impl(WebRtc_Word32 initialValue)
+    :
+    _ptrMemory(NULL),
+    _value(NULL)
+{   // Align the memory associated with _value on a 32-bit boundary. This is a
+    // requirement for the used Mac APIs to be atomic.
+    // Keep _ptrMemory to be able to reclaim memory.
+    _ptrMemory = malloc(sizeof(WebRtc_Word32)*2);
+    _value = (WebRtc_Word32*) (((uintptr_t)_ptrMemory+3)&(~0x3));
+    *_value = initialValue;
+}
+
+inline Atomic32Impl::~Atomic32Impl()
+{
+    if(_ptrMemory != NULL)
+    {
+        free(_ptrMemory);
+    }
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator++()
+{
+    return OSAtomicIncrement32Barrier(
+               reinterpret_cast<volatile int32_t*>(_value));
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator--()
+{
+    return OSAtomicDecrement32Barrier(
+               reinterpret_cast<volatile int32_t*>(_value));
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(const Atomic32Impl& rhs)
+{
+    *_value = *rhs._value;
+    return *this;
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(WebRtc_Word32 rhs)
+{
+    *_value = rhs;
+    return *this;
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator+=(WebRtc_Word32 rhs)
+{
+    return OSAtomicAdd32Barrier(rhs,
+                                reinterpret_cast<volatile int32_t*>(_value));
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator-=(WebRtc_Word32 rhs)
+{
+    return OSAtomicAdd32Barrier(-rhs,
+                                reinterpret_cast<volatile int32_t*>(_value));
+}
+
+inline bool Atomic32Impl::CompareExchange(WebRtc_Word32 newValue,
+                                          WebRtc_Word32 compareValue)
+{
+    return OSAtomicCompareAndSwap32Barrier(
+               compareValue,
+               newValue,
+               reinterpret_cast<volatile int32_t*>(_value));
+}
+
+inline WebRtc_Word32 Atomic32Impl::Value() const
+{
+    return *_value;
+}
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_MAC_H_
diff --git a/trunk/src/system_wrappers/source/atomic32_win.h b/trunk/src/system_wrappers/source/atomic32_win.h
new file mode 100644
index 0000000..c27e48e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/atomic32_win.h
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Atomic system independant 32-bit signed integer.
+// Windows implementation.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_WINDOWS_H_
+
+#include <malloc.h>
+#include <windows.h>
+
+#include "common_types.h"
+
+namespace webrtc {
+class Atomic32Impl
+{
+public:
+    inline Atomic32Impl(WebRtc_Word32 initialValue);
+    inline ~Atomic32Impl();
+
+    inline WebRtc_Word32 operator++();
+    inline WebRtc_Word32 operator--();
+
+    inline Atomic32Impl& operator=(const Atomic32Impl& rhs);
+    inline Atomic32Impl& operator=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator+=(WebRtc_Word32 rhs);
+    inline WebRtc_Word32 operator-=(WebRtc_Word32 rhs);
+
+    inline bool CompareExchange(WebRtc_Word32 newValue,
+                                WebRtc_Word32 compareValue);
+
+    inline WebRtc_Word32 Value() const;
+private:
+    void* _ptrMemory;
+    // Volatile ensures full memory barriers.
+    volatile LONG* _value;
+};
+
+// TODO (hellner) use aligned_malloc instead of doing it manually.
+inline Atomic32Impl::Atomic32Impl(WebRtc_Word32 initialValue)
+    : _ptrMemory(NULL),
+      _value(NULL)
+{   // Align the memory associated with _value on a 32-bit boundary. This is a
+    // requirement for the used Windows APIs to be atomic.
+    // Keep _ptrMemory to be able to reclaim memory.
+    _ptrMemory = malloc(sizeof(WebRtc_Word32)*2);
+    _value = reinterpret_cast<LONG*> (((uintptr_t)_ptrMemory+3)&(~0x3));
+    *_value = initialValue;
+}
+
+inline Atomic32Impl::~Atomic32Impl()
+{
+    if(_ptrMemory != NULL)
+    {
+         free(_ptrMemory);
+    }
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator++()
+{
+    return (WebRtc_Word32)InterlockedIncrement(_value);
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator--()
+{
+    return (WebRtc_Word32)InterlockedDecrement(_value);
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(const Atomic32Impl& rhs)
+{
+    *_value = *rhs._value;
+    return *this;
+}
+
+inline Atomic32Impl& Atomic32Impl::operator=(WebRtc_Word32 rhs)
+{
+    *_value = rhs;
+    return *this;
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator+=(WebRtc_Word32 rhs)
+{
+    return InterlockedExchangeAdd(_value,rhs);
+}
+
+inline WebRtc_Word32 Atomic32Impl::operator-=(WebRtc_Word32 rhs)
+{
+    return InterlockedExchangeAdd(_value,-rhs);
+}
+
+inline bool Atomic32Impl::CompareExchange(WebRtc_Word32 newValue,
+                                          WebRtc_Word32 compareValue)
+{
+    const LONG oldValue = InterlockedCompareExchange(_value,newValue,
+                                                     compareValue);
+    // If the old value and the compare value is the same an exchange happened.
+    return (oldValue == compareValue);
+}
+
+inline WebRtc_Word32 Atomic32Impl::Value() const
+{
+    return *_value;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_ATOMIC32_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/condition_variable.cc b/trunk/src/system_wrappers/source/condition_variable.cc
new file mode 100644
index 0000000..b37d037
--- /dev/null
+++ b/trunk/src/system_wrappers/source/condition_variable.cc
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(_WIN32)
+   #include <windows.h>
+   #include "condition_variable_wrapper.h"
+   #include "condition_variable_win.h"
+#elif defined(WEBRTC_LINUX)
+   #include <pthread.h>
+   #include "condition_variable_wrapper.h"
+   #include "condition_variable_posix.h"
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+   #include <pthread.h>
+   #include "condition_variable_wrapper.h"
+   #include "condition_variable_posix.h"
+#endif
+
+namespace webrtc {
+ConditionVariableWrapper*
+ConditionVariableWrapper::CreateConditionVariable()
+{
+#if defined(_WIN32)
+    return new ConditionVariableWindows;
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+    return ConditionVariablePosix::Create();
+#else
+    return NULL;
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/condition_variable_posix.cc b/trunk/src/system_wrappers/source/condition_variable_posix.cc
new file mode 100644
index 0000000..48835ab
--- /dev/null
+++ b/trunk/src/system_wrappers/source/condition_variable_posix.cc
@@ -0,0 +1,151 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "condition_variable_posix.h"
+
+#if defined(WEBRTC_LINUX)
+#include <ctime>
+#else
+#include <sys/time.h>
+#endif
+
+#include <errno.h>
+
+#include "critical_section_posix.h"
+
+namespace webrtc {
+ConditionVariableWrapper* ConditionVariablePosix::Create()
+{
+    ConditionVariablePosix* ptr = new ConditionVariablePosix;
+    if (!ptr)
+    {
+        return NULL;
+    }
+
+    const int error = ptr->Construct();
+    if (error)
+    {
+        delete ptr;
+        return NULL;
+    }
+
+    return ptr;
+}
+
+ConditionVariablePosix::ConditionVariablePosix()
+{
+}
+
+int ConditionVariablePosix::Construct()
+{
+    int result = 0;
+#ifdef WEBRTC_CLOCK_TYPE_REALTIME
+    result = pthread_cond_init(&_cond, NULL);
+#else
+    pthread_condattr_t condAttr;
+    result = pthread_condattr_init(&condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_condattr_setclock(&condAttr, CLOCK_MONOTONIC);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_cond_init(&_cond, &condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_condattr_destroy(&condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+#endif
+    return 0;
+}
+
+ConditionVariablePosix::~ConditionVariablePosix()
+{
+    pthread_cond_destroy(&_cond);
+}
+
+void ConditionVariablePosix::SleepCS(CriticalSectionWrapper& critSect)
+{
+    CriticalSectionPosix* cs = reinterpret_cast<CriticalSectionPosix*>(
+                                   &critSect);
+    pthread_cond_wait(&_cond, &cs->_mutex);
+}
+
+
+bool
+ConditionVariablePosix::SleepCS(
+    CriticalSectionWrapper& critSect,
+    unsigned long maxTimeInMS)
+{
+    const unsigned long INFINITE =  0xFFFFFFFF;
+
+    const int MILLISECONDS_PER_SECOND      = 1000;
+#ifndef WEBRTC_LINUX
+    const int MICROSECONDS_PER_MILLISECOND = 1000;
+#endif
+    const int NANOSECONDS_PER_SECOND       = 1000000000;
+    const int NANOSECONDS_PER_MILLISECOND  = 1000000;
+
+    CriticalSectionPosix* cs = reinterpret_cast<CriticalSectionPosix*>(
+                                   &critSect);
+
+    if (maxTimeInMS != INFINITE)
+    {
+        timespec ts;
+#ifndef WEBRTC_MAC
+#ifdef WEBRTC_CLOCK_TYPE_REALTIME
+        clock_gettime(CLOCK_REALTIME, &ts);
+#else
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+#endif
+#else
+        struct timeval tv;
+        gettimeofday(&tv, 0);
+        ts.tv_sec  = tv.tv_sec;
+        ts.tv_nsec = tv.tv_usec * MICROSECONDS_PER_MILLISECOND;
+#endif
+
+        ts.tv_sec += maxTimeInMS / MILLISECONDS_PER_SECOND;
+        ts.tv_nsec += (maxTimeInMS - ((maxTimeInMS / MILLISECONDS_PER_SECOND)*
+                      MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISECOND;
+
+        if (ts.tv_nsec >= NANOSECONDS_PER_SECOND)
+        {
+            ts.tv_sec += ts.tv_nsec / NANOSECONDS_PER_SECOND;
+            ts.tv_nsec %= NANOSECONDS_PER_SECOND;
+        }
+        const int res = pthread_cond_timedwait(&_cond, &cs->_mutex, &ts);
+        return (res == ETIMEDOUT) ? false : true;
+    }
+    else
+    {
+        pthread_cond_wait(&_cond, &cs->_mutex);
+        return true;
+    }
+}
+
+void ConditionVariablePosix::Wake()
+{
+    pthread_cond_signal(&_cond);
+}
+
+void ConditionVariablePosix::WakeAll()
+{
+    pthread_cond_broadcast(&_cond);
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/condition_variable_posix.h b/trunk/src/system_wrappers/source/condition_variable_posix.h
new file mode 100644
index 0000000..c239a47
--- /dev/null
+++ b/trunk/src/system_wrappers/source/condition_variable_posix.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_POSIX_H_
+
+#include "condition_variable_wrapper.h"
+
+#include <pthread.h>
+
+namespace webrtc {
+class ConditionVariablePosix : public ConditionVariableWrapper
+{
+public:
+    static ConditionVariableWrapper* Create();
+    ~ConditionVariablePosix();
+
+    void SleepCS(CriticalSectionWrapper& critSect);
+    bool SleepCS(CriticalSectionWrapper& critSect, unsigned long maxTimeInMS);
+    void Wake();
+    void WakeAll();
+
+private:
+    ConditionVariablePosix();
+    int Construct();
+
+private:
+    pthread_cond_t _cond;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/condition_variable_win.cc b/trunk/src/system_wrappers/source/condition_variable_win.cc
new file mode 100644
index 0000000..f4fae0b
--- /dev/null
+++ b/trunk/src/system_wrappers/source/condition_variable_win.cc
@@ -0,0 +1,224 @@
+/*
+ *  Use of this source code is governed by the ACE copyright license which
+ *  can be found in the LICENSE file in the third_party_mods/ace directory of
+ *  the source tree or at http://www1.cse.wustl.edu/~schmidt/ACE-copying.html.
+ */
+/*
+ *  This source code contain modifications to the original source code
+ *  which can be found here:
+ *  http://www.cs.wustl.edu/~schmidt/win32-cv-1.html (section 3.2).
+ *  Modifications:
+ *  1) Dynamic detection of native support for condition variables.
+ *  2) Use of WebRTC defined types and classes. Renaming of some functions.
+ *  3) Introduction of a second event for wake all functionality. This prevents
+ *     a thread from spinning on the same condition variable, preventing other
+ *     threads from waking up.
+ */
+
+// TODO (hellner): probably nicer to split up native and generic
+// implementation into two different files
+
+#include "condition_variable_win.h"
+
+#include "critical_section_win.h"
+#include "trace.h"
+
+namespace webrtc {
+bool ConditionVariableWindows::_winSupportConditionVariablesPrimitive = false;
+static HMODULE library = NULL;
+
+PInitializeConditionVariable  _PInitializeConditionVariable;
+PSleepConditionVariableCS     _PSleepConditionVariableCS;
+PWakeConditionVariable        _PWakeConditionVariable;
+PWakeAllConditionVariable     _PWakeAllConditionVariable;
+
+typedef void (WINAPI *PInitializeConditionVariable)(PCONDITION_VARIABLE);
+typedef BOOL (WINAPI *PSleepConditionVariableCS)(PCONDITION_VARIABLE,
+                                                 PCRITICAL_SECTION, DWORD);
+typedef void (WINAPI *PWakeConditionVariable)(PCONDITION_VARIABLE);
+typedef void (WINAPI *PWakeAllConditionVariable)(PCONDITION_VARIABLE);
+
+ConditionVariableWindows::ConditionVariableWindows()
+    : _eventID(WAKEALL_0)
+{
+    if (!library)
+    {
+        // Use native implementation if supported (i.e Vista+)
+        library = LoadLibrary(TEXT("Kernel32.dll"));
+        if (library)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                         "Loaded Kernel.dll");
+
+            _PInitializeConditionVariable =
+                (PInitializeConditionVariable) GetProcAddress(
+                    library,
+                    "InitializeConditionVariable");
+            _PSleepConditionVariableCS =
+                (PSleepConditionVariableCS)GetProcAddress(
+                    library,
+                    "SleepConditionVariableCS");
+            _PWakeConditionVariable =
+                (PWakeConditionVariable)GetProcAddress(
+                    library,
+                     "WakeConditionVariable");
+            _PWakeAllConditionVariable =
+                (PWakeAllConditionVariable)GetProcAddress(
+                    library,
+                    "WakeAllConditionVariable");
+
+            if(_PInitializeConditionVariable &&
+               _PSleepConditionVariableCS &&
+               _PWakeConditionVariable &&
+               _PWakeAllConditionVariable)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                             "Loaded native condition variables");
+                _winSupportConditionVariablesPrimitive = true;
+            }
+        }
+    }
+
+    if (_winSupportConditionVariablesPrimitive)
+    {
+        _PInitializeConditionVariable(&_conditionVariable);
+
+        _events[WAKEALL_0] = NULL;
+        _events[WAKEALL_1] = NULL;
+        _events[WAKE] = NULL;
+
+    } else {
+        memset(&_numWaiters[0],0,sizeof(_numWaiters));
+
+        InitializeCriticalSection(&_numWaitersCritSect);
+
+        _events[WAKEALL_0] = CreateEvent(NULL,  // no security attributes
+                                         TRUE,  // manual-reset, sticky event
+                                         FALSE, // initial state non-signaled
+                                         NULL); // no name for event
+
+        _events[WAKEALL_1] = CreateEvent(NULL,  // no security attributes
+                                         TRUE,  // manual-reset, sticky event
+                                         FALSE, // initial state non-signaled
+                                         NULL); // no name for event
+
+        _events[WAKE] = CreateEvent(NULL,  // no security attributes
+                                    FALSE, // auto-reset, sticky event
+                                    FALSE, // initial state non-signaled
+                                    NULL); // no name for event
+    }
+}
+
+ConditionVariableWindows::~ConditionVariableWindows()
+{
+    if(!_winSupportConditionVariablesPrimitive)
+    {
+        CloseHandle(_events[WAKE]);
+        CloseHandle(_events[WAKEALL_1]);
+        CloseHandle(_events[WAKEALL_0]);
+
+        DeleteCriticalSection(&_numWaitersCritSect);
+    }
+}
+
+void ConditionVariableWindows::SleepCS(CriticalSectionWrapper& critSect)
+{
+    SleepCS(critSect, INFINITE);
+}
+
+bool ConditionVariableWindows::SleepCS(CriticalSectionWrapper& critSect,
+                                       unsigned long maxTimeInMS)
+{
+    CriticalSectionWindows* cs = reinterpret_cast<CriticalSectionWindows*>(
+                                     &critSect);
+
+    if(_winSupportConditionVariablesPrimitive)
+    {
+        BOOL retVal = _PSleepConditionVariableCS(&_conditionVariable,
+                                                 &(cs->crit),maxTimeInMS);
+        return (retVal == 0) ? false : true;
+
+    }else
+    {
+        EnterCriticalSection(&_numWaitersCritSect);
+        // Get the eventID for the event that will be triggered by next
+        // WakeAll() call and start waiting for it.
+        const EventWakeUpType eventID = (WAKEALL_0 == _eventID) ?
+                                            WAKEALL_1 : WAKEALL_0;
+        ++(_numWaiters[eventID]);
+        LeaveCriticalSection(&_numWaitersCritSect);
+
+        LeaveCriticalSection(&cs->crit);
+        HANDLE events[2];
+        events[0] = _events[WAKE];
+        events[1] = _events[eventID];
+        const DWORD result = WaitForMultipleObjects(2, // Wait on 2 events.
+                                                    events,
+                                                    FALSE, // Wait for either.
+                                                    maxTimeInMS);
+
+        const bool retVal = (result != WAIT_TIMEOUT);
+
+        EnterCriticalSection(&_numWaitersCritSect);
+        --(_numWaiters[eventID]);
+        // Last waiter should only be true for WakeAll(). WakeAll() correspond
+        // to position 1 in events[] -> (result == WAIT_OBJECT_0 + 1)
+        const bool lastWaiter = (result == WAIT_OBJECT_0 + 1) &&
+                                (_numWaiters[eventID] == 0);
+        LeaveCriticalSection(&_numWaitersCritSect);
+
+        if (lastWaiter)
+        {
+            // Reset/unset the WakeAll() event since all threads have been
+            // released.
+            ResetEvent(_events[eventID]);
+        }
+
+        EnterCriticalSection(&cs->crit);
+        return retVal;
+    }
+}
+
+void
+ConditionVariableWindows::Wake()
+{
+    if(_winSupportConditionVariablesPrimitive)
+    {
+        _PWakeConditionVariable(&_conditionVariable);
+    }else
+    {
+        EnterCriticalSection(&_numWaitersCritSect);
+        const bool haveWaiters = (_numWaiters[WAKEALL_0] > 0) ||
+                                 (_numWaiters[WAKEALL_1] > 0);
+        LeaveCriticalSection(&_numWaitersCritSect);
+
+        if (haveWaiters)
+        {
+            SetEvent(_events[WAKE]);
+        }
+    }
+}
+
+void
+ConditionVariableWindows::WakeAll()
+{
+    if(_winSupportConditionVariablesPrimitive)
+    {
+        _PWakeAllConditionVariable(&_conditionVariable);
+    }else
+    {
+        EnterCriticalSection(&_numWaitersCritSect);
+        // Update current WakeAll() event
+        _eventID = (WAKEALL_0 == _eventID) ? WAKEALL_1 : WAKEALL_0;
+        // Trigger current event
+        const EventWakeUpType eventID = _eventID;
+        const bool haveWaiters = _numWaiters[eventID] > 0;
+        LeaveCriticalSection(&_numWaitersCritSect);
+
+        if (haveWaiters)
+        {
+            SetEvent(_events[eventID]);
+        }
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/condition_variable_win.h b/trunk/src/system_wrappers/source/condition_variable_win.h
new file mode 100644
index 0000000..aab2564
--- /dev/null
+++ b/trunk/src/system_wrappers/source/condition_variable_win.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_WINDOWS_H_
+
+#include "condition_variable_wrapper.h"
+
+#include <windows.h>
+
+namespace webrtc {
+#if !defined CONDITION_VARIABLE_INIT
+    typedef struct _RTL_CONDITION_VARIABLE
+    {
+        void* Ptr;
+    } RTL_CONDITION_VARIABLE, *PRTL_CONDITION_VARIABLE;
+
+    typedef RTL_CONDITION_VARIABLE CONDITION_VARIABLE, *PCONDITION_VARIABLE;
+#endif
+
+typedef void (WINAPI *PInitializeConditionVariable)(PCONDITION_VARIABLE);
+typedef BOOL (WINAPI *PSleepConditionVariableCS)(PCONDITION_VARIABLE,
+                                                 PCRITICAL_SECTION, DWORD);
+typedef void (WINAPI *PWakeConditionVariable)(PCONDITION_VARIABLE);
+typedef void (WINAPI *PWakeAllConditionVariable)(PCONDITION_VARIABLE);
+
+
+class ConditionVariableWindows : public ConditionVariableWrapper
+{
+public:
+    ConditionVariableWindows();
+    ~ConditionVariableWindows();
+
+    void SleepCS(CriticalSectionWrapper& critSect);
+    bool SleepCS(CriticalSectionWrapper& critSect, unsigned long maxTimeInMS);
+    void Wake();
+    void WakeAll();
+
+private:
+    enum EventWakeUpType
+    {
+        WAKEALL_0   = 0,
+        WAKEALL_1   = 1,
+        WAKE        = 2,
+        EVENT_COUNT = 3
+    };
+
+private:
+    // Native support for Windows Vista+
+    static bool              _winSupportConditionVariablesPrimitive;
+    CONDITION_VARIABLE       _conditionVariable;
+
+    unsigned int     _numWaiters[2];
+    EventWakeUpType  _eventID;
+    CRITICAL_SECTION _numWaitersCritSect;
+    HANDLE           _events[EVENT_COUNT];
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CONDITION_VARIABLE_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/cpu.cc b/trunk/src/system_wrappers/source/cpu.cc
new file mode 100644
index 0000000..3df5d18
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu.cc
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_wrapper.h"
+
+#if defined(_WIN32)
+    #include "cpu_win.h"
+#elif defined(WEBRTC_MAC)
+    #include "cpu_mac.h"
+#elif defined(WEBRTC_MAC_INTEL)
+    #include "cpu_mac.h"
+#elif defined(WEBRTC_ANDROID)
+    // Not implemented yet, might be possible to use Linux implementation
+#else // defined(WEBRTC_LINUX)
+    #include "cpu_linux.h"
+#endif
+
+namespace webrtc {
+CpuWrapper* CpuWrapper::CreateCpu()
+{
+#if defined(_WIN32)
+   return new CpuWindows();
+#elif (defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL))
+    return new CpuWrapperMac();
+#elif defined(WEBRTC_ANDROID)
+    return 0;
+#else
+    return new CpuLinux();
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_features.cc b/trunk/src/system_wrappers/source/cpu_features.cc
new file mode 100644
index 0000000..41a86e3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_features.cc
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Parts of this file derived from Chromium's base/cpu.cc.
+
+#include "cpu_features_wrapper.h"
+
+#include "typedefs.h"
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+#if defined(_MSC_VER)
+#include <intrin.h>
+#endif
+#endif
+
+// No CPU feature is available => straight C path.
+int GetCPUInfoNoASM(CPUFeature feature) {
+  (void)feature;
+  return 0;
+}
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+#ifndef _MSC_VER
+// Intrinsic for "cpuid".
+#if defined(__pic__) && defined(__i386__)
+static inline void __cpuid(int cpu_info[4], int info_type) {
+  __asm__ volatile (
+    "mov %%ebx, %%edi\n"
+    "cpuid\n"
+    "xchg %%edi, %%ebx\n"
+    : "=a"(cpu_info[0]), "=D"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3])
+    : "a"(info_type));
+}
+#else
+static inline void __cpuid(int cpu_info[4], int info_type) {
+  __asm__ volatile (
+    "cpuid\n"
+    : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3])
+    : "a"(info_type));
+}
+#endif
+#endif  // _MSC_VER
+#endif  // WEBRTC_ARCH_X86_FAMILY
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+// Actual feature detection for x86.
+static int GetCPUInfo(CPUFeature feature) {
+  int cpu_info[4];
+  __cpuid(cpu_info, 1);
+  if (feature == kSSE2) {
+    return 0 != (cpu_info[3] & 0x04000000);
+  }
+  if (feature == kSSE3) {
+    return 0 != (cpu_info[2] & 0x00000001);
+  }
+  return 0;
+}
+#else
+// Default to straight C for other platforms.
+static int GetCPUInfo(CPUFeature feature) {
+  (void)feature;
+  return 0;
+}
+#endif
+
+WebRtc_CPUInfo WebRtc_GetCPUInfo = GetCPUInfo;
+WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM = GetCPUInfoNoASM;
diff --git a/trunk/src/system_wrappers/source/cpu_features_arm.c b/trunk/src/system_wrappers/source/cpu_features_arm.c
new file mode 100644
index 0000000..1065118
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_features_arm.c
@@ -0,0 +1,333 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is derived from Android's NDK package r7, located at
+// <ndk>/sources/android/cpufeatures/ (downloadable from
+// http://developer.android.com/sdk/ndk/index.html).
+
+#include "cpu_features_wrapper.h"
+
+#include <fcntl.h>
+#include <errno.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+// Define CPU family.
+typedef enum {
+  CPU_FAMILY_UNKNOWN = 0,
+  CPU_FAMILY_ARM,
+  CPU_FAMILY_X86,
+  CPU_FAMILY_MAX  // Do not remove.
+} CpuFamily;
+
+static pthread_once_t g_once;
+static CpuFamily g_cpuFamily;
+static uint64_t g_cpuFeatures;
+static int g_cpuCount;
+
+static const int cpufeatures_debug = 0;
+
+#ifdef __arm__
+#  define DEFAULT_CPU_FAMILY  CPU_FAMILY_ARM
+#elif defined __i386__
+#  define DEFAULT_CPU_FAMILY  CPU_FAMILY_X86
+#else
+#  define DEFAULT_CPU_FAMILY  CPU_FAMILY_UNKNOWN
+#endif
+
+#define  D(...) \
+  do { \
+    if (cpufeatures_debug) { \
+      printf(__VA_ARGS__); fflush(stdout); \
+    } \
+  } while (0)
+
+/* Read the content of /proc/cpuinfo into a user-provided buffer.
+ * Return the length of the data, or -1 on error. Does *not*
+ * zero-terminate the content. Will not read more
+ * than 'buffsize' bytes.
+ */
+static int read_file(const char*  pathname, char*  buffer, size_t  buffsize) {
+  int  fd, len;
+
+  fd = open(pathname, O_RDONLY);
+  if (fd < 0)
+    return -1;
+
+  do {
+    len = read(fd, buffer, buffsize);
+  } while (len < 0 && errno == EINTR);
+
+  close(fd);
+
+  return len;
+}
+
+/* Extract the content of a the first occurence of a given field in
+ * the content of /proc/cpuinfo and return it as a heap-allocated
+ * string that must be freed by the caller.
+ *
+ * Return NULL if not found
+ */
+static char* extract_cpuinfo_field(char* buffer, int buflen, const char* field) {
+  int  fieldlen = strlen(field);
+  char* bufend = buffer + buflen;
+  char* result = NULL;
+  int len, ignore;
+  const char* p, *q;
+
+  /* Look for first field occurence, and ensures it starts the line.
+   */
+  p = buffer;
+  bufend = buffer + buflen;
+  for (;;) {
+    p = memmem(p, bufend - p, field, fieldlen);
+    if (p == NULL)
+      goto EXIT;
+
+    if (p == buffer || p[-1] == '\n')
+      break;
+
+    p += fieldlen;
+  }
+
+  /* Skip to the first column followed by a space */
+  p += fieldlen;
+  p  = memchr(p, ':', bufend - p);
+  if (p == NULL || p[1] != ' ')
+    goto EXIT;
+
+  /* Find the end of the line */
+  p += 2;
+  q = memchr(p, '\n', bufend - p);
+  if (q == NULL)
+    q = bufend;
+
+  /* Copy the line into a heap-allocated buffer */
+  len = q - p;
+  result = malloc(len + 1);
+  if (result == NULL)
+    goto EXIT;
+
+  memcpy(result, p, len);
+  result[len] = '\0';
+
+EXIT:
+  return result;
+}
+
+/* Count the number of occurences of a given field prefix in /proc/cpuinfo.
+ */
+static int count_cpuinfo_field(char* buffer, int buflen, const char* field) {
+  int fieldlen = strlen(field);
+  const char* p = buffer;
+  const char* bufend = buffer + buflen;
+  const char* q;
+  int count = 0;
+
+  for (;;) {
+    const char* q;
+
+    p = memmem(p, bufend - p, field, fieldlen);
+    if (p == NULL)
+      break;
+
+    /* Ensure that the field is at the start of a line */
+    if (p > buffer && p[-1] != '\n') {
+      p += fieldlen;
+      continue;
+    }
+
+
+    /* skip any whitespace */
+    q = p + fieldlen;
+    while (q < bufend && (*q == ' ' || *q == '\t'))
+      q++;
+
+    /* we must have a colon now */
+    if (q < bufend && *q == ':') {
+      count += 1;
+      q ++;
+    }
+    p = q;
+  }
+
+  return count;
+}
+
+/* Like strlen(), but for constant string literals */
+#define STRLEN_CONST(x)  ((sizeof(x)-1)
+
+
+/* Checks that a space-separated list of items contains one given 'item'.
+ * Returns 1 if found, 0 otherwise.
+ */
+static int has_list_item(const char* list, const char* item) {
+  const char*  p = list;
+  int itemlen = strlen(item);
+
+  if (list == NULL)
+    return 0;
+
+  while (*p) {
+    const char*  q;
+
+    /* skip spaces */
+    while (*p == ' ' || *p == '\t')
+      p++;
+
+    /* find end of current list item */
+    q = p;
+    while (*q && *q != ' ' && *q != '\t')
+      q++;
+
+    if (itemlen == q - p && !memcmp(p, item, itemlen))
+      return 1;
+
+    /* skip to next item */
+    p = q;
+  }
+  return 0;
+}
+
+
+static void cpuInit(void) {
+  char cpuinfo[4096];
+  int  cpuinfo_len;
+
+  g_cpuFamily   = DEFAULT_CPU_FAMILY;
+  g_cpuFeatures = 0;
+  g_cpuCount    = 1;
+
+  cpuinfo_len = read_file("/proc/cpuinfo", cpuinfo, sizeof cpuinfo);
+  D("cpuinfo_len is (%d):\n%.*s\n", cpuinfo_len,
+    cpuinfo_len >= 0 ? cpuinfo_len : 0, cpuinfo);
+
+  if (cpuinfo_len < 0) { /* should not happen */
+    return;
+  }
+
+  /* Count the CPU cores, the value may be 0 for single-core CPUs */
+  g_cpuCount = count_cpuinfo_field(cpuinfo, cpuinfo_len, "processor");
+  if (g_cpuCount == 0) {
+    g_cpuCount = count_cpuinfo_field(cpuinfo, cpuinfo_len, "Processor");
+    if (g_cpuCount == 0) {
+      g_cpuCount = 1;
+    }
+  }
+
+  D("found cpuCount = %d\n", g_cpuCount);
+
+#ifdef __arm__
+  {
+    char*  features = NULL;
+    char*  architecture = NULL;
+
+    /* Extract architecture from the "CPU Architecture" field.
+     * The list is well-known, unlike the the output of
+     * the 'Processor' field which can vary greatly.
+     *
+     * See the definition of the 'proc_arch' array in
+     * $KERNEL/arch/arm/kernel/setup.c and the 'c_show' function in
+     * same file.
+     */
+    char* cpuArch = extract_cpuinfo_field(cpuinfo, cpuinfo_len,
+                                          "CPU architecture");
+
+    if (cpuArch != NULL) {
+      char*  end;
+      long   archNumber;
+      int    hasARMv7 = 0;
+
+      D("found cpuArch = '%s'\n", cpuArch);
+
+      /* read the initial decimal number, ignore the rest */
+      archNumber = strtol(cpuArch, &end, 10);
+
+      /* Here we assume that ARMv8 will be upwards compatible with v7
+          * in the future. Unfortunately, there is no 'Features' field to
+          * indicate that Thumb-2 is supported.
+          */
+      if (end > cpuArch && archNumber >= 7) {
+        hasARMv7 = 1;
+      }
+
+      /* Unfortunately, it seems that certain ARMv6-based CPUs
+       * report an incorrect architecture number of 7!
+       *
+       * We try to correct this by looking at the 'elf_format'
+       * field reported by the 'Processor' field, which is of the
+       * form of "(v7l)" for an ARMv7-based CPU, and "(v6l)" for
+       * an ARMv6-one.
+       */
+      if (hasARMv7) {
+        char* cpuProc = extract_cpuinfo_field(cpuinfo, cpuinfo_len,
+                                              "Processor");
+        if (cpuProc != NULL) {
+          D("found cpuProc = '%s'\n", cpuProc);
+          if (has_list_item(cpuProc, "(v6l)")) {
+            D("CPU processor and architecture mismatch!!\n");
+            hasARMv7 = 0;
+          }
+          free(cpuProc);
+        }
+      }
+
+      if (hasARMv7) {
+        g_cpuFeatures |= kCPUFeatureARMv7;
+      }
+
+      /* The LDREX / STREX instructions are available from ARMv6 */
+      if (archNumber >= 6) {
+        g_cpuFeatures |= kCPUFeatureLDREXSTREX;
+      }
+
+      free(cpuArch);
+    }
+
+    /* Extract the list of CPU features from 'Features' field */
+    char* cpuFeatures = extract_cpuinfo_field(cpuinfo, cpuinfo_len,
+                                              "Features");
+
+    if (cpuFeatures != NULL) {
+
+      D("found cpuFeatures = '%s'\n", cpuFeatures);
+
+      if (has_list_item(cpuFeatures, "vfpv3"))
+        g_cpuFeatures |= kCPUFeatureVFPv3;
+
+      else if (has_list_item(cpuFeatures, "vfpv3d16"))
+        g_cpuFeatures |= kCPUFeatureVFPv3;
+
+      if (has_list_item(cpuFeatures, "neon")) {
+        /* Note: Certain kernels only report neon but not vfpv3
+            *       in their features list. However, ARM mandates
+            *       that if Neon is implemented, so must be VFPv3
+            *       so always set the flag.
+            */
+        g_cpuFeatures |= kCPUFeatureNEON |
+                         kCPUFeatureVFPv3;
+      }
+      free(cpuFeatures);
+    }
+  }
+#endif  // __arm__
+
+#ifdef __i386__
+  g_cpuFamily = CPU_FAMILY_X86;
+#endif
+}
+
+
+uint64_t WebRtc_GetCPUFeaturesARM(void) {
+  pthread_once(&g_once, cpuInit);
+  return g_cpuFeatures;
+}
diff --git a/trunk/src/system_wrappers/source/cpu_info.cc b/trunk/src/system_wrappers/source/cpu_info.cc
new file mode 100644
index 0000000..e367abf
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_info.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_info.h"
+
+#if defined(_WIN32)
+#include <Windows.h>
+#elif defined(WEBRTC_MAC)
+#include <sys/types.h>
+#include <sys/sysctl.h>
+#elif defined(WEBRTC_MAC_INTEL)
+// Intentionally empty
+#elif defined(WEBRTC_ANDROID)
+// Not implemented yet, might be possible to use Linux implementation
+#else // defined(WEBRTC_LINUX)
+#include <sys/sysinfo.h>
+#endif
+
+#include "trace.h"
+
+namespace webrtc {
+
+WebRtc_UWord32 CpuInfo::_numberOfCores = 0;
+
+WebRtc_UWord32 CpuInfo::DetectNumberOfCores()
+{
+    if (!_numberOfCores)
+    {
+#if defined(_WIN32)
+        SYSTEM_INFO si;
+        GetSystemInfo(&si);
+        _numberOfCores = static_cast<WebRtc_UWord32>(si.dwNumberOfProcessors);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                     "Available number of cores:%d", _numberOfCores);
+
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+        _numberOfCores = get_nprocs();
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                     "Available number of cores:%d", _numberOfCores);
+
+#elif (defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL))
+        int name[] = {CTL_HW, HW_AVAILCPU};
+        int ncpu;
+        size_t size = sizeof(ncpu);
+        if(0 == sysctl(name, 2, &ncpu, &size, NULL, 0))
+        {
+            _numberOfCores = static_cast<WebRtc_UWord32>(ncpu);
+            WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                         "Available number of cores:%d", _numberOfCores);
+    } else
+    {
+            WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                         "Failed to get number of cores");
+            _numberOfCores = 1;
+    }
+#else
+        WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
+                     "No function to get number of cores");
+        _numberOfCores = 1;
+#endif
+    }
+    return _numberOfCores;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_linux.cc b/trunk/src/system_wrappers/source/cpu_linux.cc
new file mode 100644
index 0000000..8e8ecda
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_linux.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_linux.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+namespace webrtc {
+CpuLinux::CpuLinux()
+    : m_oldBusyTime(0),
+      m_oldIdleTime(0),
+      m_oldBusyTimeMulti(NULL),
+      m_oldIdleTimeMulti(NULL),
+      m_idleArray(NULL),
+      m_busyArray(NULL),
+      m_resultArray(NULL),
+      m_numCores(0) {
+    const int result = GetNumCores();
+    if (result != -1) {
+      m_numCores = result;
+      m_oldBusyTimeMulti = new long long[m_numCores];
+      memset(m_oldBusyTimeMulti, 0, sizeof(long long) * m_numCores);
+      m_oldIdleTimeMulti = new long long[m_numCores];
+      memset(m_oldIdleTimeMulti, 0, sizeof(long long) * m_numCores);
+      m_idleArray = new long long[m_numCores];
+      memset(m_idleArray, 0, sizeof(long long) * m_numCores);
+      m_busyArray = new long long[m_numCores];
+      memset(m_busyArray, 0, sizeof(long long) * m_numCores);
+      m_resultArray = new WebRtc_UWord32[m_numCores];
+
+      GetData(m_oldBusyTime, m_oldIdleTime, m_busyArray, m_idleArray);
+    }
+}
+
+CpuLinux::~CpuLinux()
+{
+    delete [] m_oldBusyTimeMulti;
+    delete [] m_oldIdleTimeMulti;
+    delete [] m_idleArray;
+    delete [] m_busyArray;
+    delete [] m_resultArray;
+}
+
+WebRtc_Word32 CpuLinux::CpuUsage()
+{
+    WebRtc_UWord32 dummy = 0;
+    WebRtc_UWord32* dummyArray = NULL;
+    return CpuUsageMultiCore(dummy, dummyArray);
+}
+
+WebRtc_Word32 CpuLinux::CpuUsageMultiCore(WebRtc_UWord32& numCores,
+                                          WebRtc_UWord32*& coreArray)
+{
+    coreArray = m_resultArray;
+    numCores = m_numCores;
+    long long busy = 0;
+    long long idle = 0;
+    if (GetData(busy, idle, m_busyArray, m_idleArray) != 0)
+        return -1;
+
+    long long deltaBusy = busy - m_oldBusyTime;
+    long long deltaIdle = idle - m_oldIdleTime;
+    m_oldBusyTime = busy;
+    m_oldIdleTime = idle;
+
+    int retVal = -1;
+    if (deltaBusy + deltaIdle == 0)
+    {
+        retVal = 0;
+    }
+    else
+    {
+        retVal = (int)(100 * (deltaBusy) / (deltaBusy + deltaIdle));
+    }
+
+    if (coreArray == NULL)
+    {
+      return retVal;
+    }
+
+    for (WebRtc_UWord32 i = 0; i < m_numCores; i++)
+    {
+        deltaBusy = m_busyArray[i] - m_oldBusyTimeMulti[i];
+        deltaIdle = m_idleArray[i] - m_oldIdleTimeMulti[i];
+        m_oldBusyTimeMulti[i] = m_busyArray[i];
+        m_oldIdleTimeMulti[i] = m_idleArray[i];
+        if(deltaBusy + deltaIdle == 0)
+        {
+            coreArray[i] = 0;
+        }
+        else
+        {
+            coreArray[i] = (int)(100 * (deltaBusy) / (deltaBusy+deltaIdle));
+        }
+    }
+    return retVal;
+}
+
+
+int CpuLinux::GetData(long long& busy, long long& idle, long long*& busyArray,
+                      long long*& idleArray)
+{
+    FILE* fp = fopen("/proc/stat", "r");
+    if (!fp)
+    {
+        return -1;
+    }
+
+    char line[100];
+    if (fgets(line, 100, fp) == NULL) {
+        fclose(fp);
+        return -1;
+    }
+    char firstWord[100];
+    if (sscanf(line, "%s ", firstWord) != 1) {
+        fclose(fp);
+        return -1;
+    }
+    if (strncmp(firstWord, "cpu", 3) != 0) {
+        fclose(fp);
+        return -1;
+    }
+    char sUser[100];
+    char sNice[100];
+    char sSystem[100];
+    char sIdle[100];
+    if (sscanf(line, "%s %s %s %s %s ",
+               firstWord, sUser, sNice, sSystem, sIdle) != 5) {
+        fclose(fp);
+        return -1;
+    }
+    long long luser = atoll(sUser);
+    long long lnice = atoll(sNice);
+    long long lsystem = atoll(sSystem);
+    long long lidle = atoll (sIdle);
+
+    busy = luser + lnice + lsystem;
+    idle = lidle;
+    for (WebRtc_UWord32 i = 0; i < m_numCores; i++)
+    {
+        if (fgets(line, 100, fp) == NULL) {
+            fclose(fp);
+            return -1;
+        }
+        if (sscanf(line, "%s %s %s %s %s ", firstWord, sUser, sNice, sSystem,
+                   sIdle) != 5) {
+            fclose(fp);
+            return -1;
+        }
+        luser = atoll(sUser);
+        lnice = atoll(sNice);
+        lsystem = atoll(sSystem);
+        lidle = atoll (sIdle);
+        busyArray[i] = luser + lnice + lsystem;
+        idleArray[i] = lidle;
+    }
+    fclose(fp);
+    return 0;
+}
+
+int CpuLinux::GetNumCores()
+{
+    FILE* fp = fopen("/proc/stat", "r");
+    if (!fp)
+    {
+        return -1;
+    }
+    // Skip first line
+    char line[100];
+    if (!fgets(line, 100, fp))
+    {
+        fclose(fp);
+        return -1;
+    }
+    int numCores = -1;
+    char firstWord[100];
+    do
+    {
+        numCores++;
+        if (fgets(line, 100, fp))
+        {
+            if (sscanf(line, "%s ", firstWord) != 1) {
+                firstWord[0] = '\0';
+            }
+        } else {
+            break;
+        }
+    } while (strncmp(firstWord, "cpu", 3) == 0);
+    fclose(fp);
+    return numCores;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_linux.h b/trunk/src/system_wrappers/source/cpu_linux.h
new file mode 100644
index 0000000..9b22e83
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_linux.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_LINUX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_LINUX_H_
+
+#include "cpu_wrapper.h"
+
+namespace webrtc {
+class CpuLinux : public CpuWrapper
+{
+public:
+    CpuLinux();
+    virtual ~CpuLinux();
+
+    virtual WebRtc_Word32 CpuUsage();
+    virtual WebRtc_Word32 CpuUsage(WebRtc_Word8* /*pProcessName*/,
+                                   WebRtc_UWord32 /*length*/) {return 0;}
+    virtual WebRtc_Word32 CpuUsage(WebRtc_UWord32 /*dwProcessID*/) {return 0;}
+
+    virtual WebRtc_Word32 CpuUsageMultiCore(WebRtc_UWord32& numCores,
+                                            WebRtc_UWord32*& array);
+
+    virtual void Reset() {return;}
+    virtual void Stop() {return;}
+private:
+    int GetData(long long& busy, long long& idle, long long*& busyArray,
+                long long*& idleArray);
+    int GetNumCores();
+
+    long long m_oldBusyTime;
+    long long m_oldIdleTime;
+
+    long long* m_oldBusyTimeMulti;
+    long long* m_oldIdleTimeMulti;
+
+    long long* m_idleArray;
+    long long* m_busyArray;
+    WebRtc_UWord32* m_resultArray;
+    WebRtc_UWord32  m_numCores;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_LINUX_H_
diff --git a/trunk/src/system_wrappers/source/cpu_mac.cc b/trunk/src/system_wrappers/source/cpu_mac.cc
new file mode 100644
index 0000000..d82bf07
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_mac.cc
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_mac.h"
+
+#include <iostream>
+#include <mach/mach.h>
+#include <mach/mach_error.h>
+
+#include "tick_util.h"
+
+namespace webrtc {
+CpuWrapperMac::CpuWrapperMac()
+    : _cpuCount(0),
+      _cpuUsage(NULL), 
+      _totalCpuUsage(0),
+      _lastTickCount(NULL),
+      _lastTime(0)
+{
+    natural_t cpuCount;
+    processor_info_array_t infoArray;
+    mach_msg_type_number_t infoCount;
+
+    kern_return_t error = host_processor_info(mach_host_self(),
+                                              PROCESSOR_CPU_LOAD_INFO,
+                                              &cpuCount,
+                                              &infoArray,
+                                              &infoCount);
+    if (error)
+    {
+        return;
+    }
+
+    _cpuCount = cpuCount;
+    _cpuUsage = new WebRtc_UWord32[cpuCount];
+    _lastTickCount = new WebRtc_Word64[cpuCount];
+    _lastTime = TickTime::MillisecondTimestamp();
+
+    processor_cpu_load_info_data_t* cpuLoadInfo =
+        (processor_cpu_load_info_data_t*) infoArray;
+    for (unsigned int cpu= 0; cpu < cpuCount; cpu++)
+    {
+        WebRtc_Word64 ticks = 0;
+        for (int state = 0; state < 2; state++)
+        {
+            ticks += cpuLoadInfo[cpu].cpu_ticks[state];
+        }
+        _lastTickCount[cpu] = ticks;
+        _cpuUsage[cpu] = 0;
+    }
+    vm_deallocate(mach_task_self(), (vm_address_t)infoArray, infoCount);
+}
+
+CpuWrapperMac::~CpuWrapperMac()
+{
+    delete[] _cpuUsage;
+    delete[] _lastTickCount;
+}
+
+WebRtc_Word32 CpuWrapperMac::CpuUsage()
+{
+    WebRtc_UWord32 numCores;
+    WebRtc_UWord32* array = NULL;
+    return CpuUsageMultiCore(numCores, array);
+}
+
+WebRtc_Word32
+CpuWrapperMac::CpuUsageMultiCore(WebRtc_UWord32& numCores,
+                                 WebRtc_UWord32*& array)
+{
+    // sanity check
+    if(_cpuUsage == NULL)
+    {
+        return -1;
+    }
+    
+    WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+    WebRtc_Word64 timeDiffMS = now - _lastTime;
+    if(timeDiffMS >= 500) 
+    {
+        if(Update(timeDiffMS) != 0) 
+        {
+           return -1;
+        }
+        _lastTime = now;
+    }
+    
+    numCores = _cpuCount;
+    array = _cpuUsage;
+    return _totalCpuUsage / _cpuCount;
+}
+
+WebRtc_Word32 CpuWrapperMac::Update(WebRtc_Word64 timeDiffMS)
+{    
+    natural_t cpuCount;
+    processor_info_array_t infoArray;
+    mach_msg_type_number_t infoCount;
+    
+    kern_return_t error = host_processor_info(mach_host_self(),
+                                              PROCESSOR_CPU_LOAD_INFO,
+                                              &cpuCount,
+                                              &infoArray,
+                                              &infoCount);
+    if (error)
+    {
+        return -1;
+    }
+
+    processor_cpu_load_info_data_t* cpuLoadInfo =
+        (processor_cpu_load_info_data_t*) infoArray;
+
+    _totalCpuUsage = 0;
+    for (unsigned int cpu = 0; cpu < cpuCount; cpu++)
+    {
+        WebRtc_Word64 ticks = 0;
+        for (int state = 0; state < 2; state++)
+        {
+            ticks += cpuLoadInfo[cpu].cpu_ticks[state];
+        }
+        if(timeDiffMS <= 0)
+        {
+            _cpuUsage[cpu] = 0;
+        }else {
+            _cpuUsage[cpu] = (WebRtc_UWord32)((1000 *
+                                              (ticks - _lastTickCount[cpu])) /
+                                              timeDiffMS);
+        }
+        _lastTickCount[cpu] = ticks;
+        _totalCpuUsage += _cpuUsage[cpu];
+    }
+
+    vm_deallocate(mach_task_self(), (vm_address_t)infoArray, infoCount);
+
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_mac.h b/trunk/src/system_wrappers/source/cpu_mac.h
new file mode 100644
index 0000000..f9f8207
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_mac.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_MAC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_MAC_H_
+
+#include "cpu_wrapper.h"
+
+namespace webrtc {
+class CpuWrapperMac : public CpuWrapper
+{
+public:
+    CpuWrapperMac();
+    virtual ~CpuWrapperMac();
+
+    virtual WebRtc_Word32 CpuUsage();
+    virtual WebRtc_Word32 CpuUsage(WebRtc_Word8* /*pProcessName*/,
+                                   WebRtc_UWord32 /*length*/) {return -1;}
+    virtual WebRtc_Word32 CpuUsage(WebRtc_UWord32  /*dwProcessID*/) {return -1;}
+
+    // Note: this class will block the call and sleep if called too fast
+    // This function blocks the calling thread if the thread is calling it more
+    // often than every 500 ms.
+    virtual WebRtc_Word32 CpuUsageMultiCore(WebRtc_UWord32& numCores,
+                                            WebRtc_UWord32*& array);
+
+    virtual void Reset() {}
+    virtual void Stop() {}
+
+private:
+    WebRtc_Word32 Update(WebRtc_Word64 timeDiffMS);
+    
+    WebRtc_UWord32  _cpuCount;
+    WebRtc_UWord32* _cpuUsage;
+    WebRtc_Word32   _totalCpuUsage;
+    WebRtc_Word64*  _lastTickCount;
+    WebRtc_Word64   _lastTime;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_MAC_H_
diff --git a/trunk/src/system_wrappers/source/cpu_measurement_harness.cc b/trunk/src/system_wrappers/source/cpu_measurement_harness.cc
new file mode 100644
index 0000000..237e776
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_measurement_harness.cc
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/cpu_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/source/cpu_measurement_harness.h"
+
+const int kCpuCheckPeriodMs = 100;
+
+namespace webrtc {
+
+CpuMeasurementHarness* CpuMeasurementHarness::Create(
+    CpuTarget* target,
+    int work_period_ms,
+    int work_iterations_per_period,
+    int duration_ms) {
+  if (target == NULL) {
+    return NULL;
+  }
+  if (work_period_ms > duration_ms) {
+    return NULL;
+  }
+  if (work_period_ms < 0) {
+    return NULL;
+  }
+  if (duration_ms < 0) {
+    return NULL;
+  }
+  if (work_iterations_per_period < 1) {
+    return NULL;
+  }
+  return new CpuMeasurementHarness(target, work_period_ms,
+                                   work_iterations_per_period, duration_ms);
+}
+
+CpuMeasurementHarness::CpuMeasurementHarness(CpuTarget* target,
+                                             int work_period_ms,
+                                             int work_iterations_per_period,
+                                             int duration_ms)
+    : cpu_target_(target),
+      work_period_ms_(work_period_ms),
+      work_iterations_per_period_(work_iterations_per_period),
+      duration_ms_(duration_ms),
+      cpu_sum_(0),
+      cpu_iterations_(0),
+      cpu_(CpuWrapper::CreateCpu()),
+      event_(EventWrapper::Create()) {
+}
+
+CpuMeasurementHarness::~CpuMeasurementHarness() {
+}
+
+bool CpuMeasurementHarness::Run() {
+  if (!WaitForCpuInit()) {
+    return false;
+  }
+  // No need for precision. Run for approximately the asked for duration.
+  // TODO(hellner): very low prio if at all, the actual duration of the test
+  // will be longer if calling DoWork() is not negligable and/or called many
+  // times. It may make sense to compensate for drift here. This will,
+  // however, only add complexity with minimal gains. Perhaps renaming the
+  // duration_ms_ to something more fuzzy is a better idea. However, the name
+  // would be very convoluted if it is to be self documenting.
+  int elapsed_time_ms = 0;
+  int last_measured_time = 0;
+  while (elapsed_time_ms < duration_ms_) {
+    if (((elapsed_time_ms - last_measured_time) / kCpuCheckPeriodMs) >= 1) {
+      last_measured_time = elapsed_time_ms;
+      Measure();
+    }
+    if (!DoWork()) {
+      return false;
+    }
+    event_->Wait(work_period_ms_);
+    elapsed_time_ms += work_period_ms_;
+  }
+  return true;
+}
+
+int CpuMeasurementHarness::AverageCpu() {
+  if (cpu_iterations_ == 0) {
+    return 0;
+  }
+  assert(cpu_sum_ >= 0);
+  assert(cpu_iterations_ >= 0);
+  return cpu_sum_ / cpu_iterations_;
+}
+
+bool CpuMeasurementHarness::WaitForCpuInit() {
+  bool cpu_usage_available = false;
+  int num_iterations = 0;
+  // Initializing the CPU measurements may take a couple of seconds on Windows.
+  // Since the initialization is lazy we need to wait until it is completed.
+  // Should not take more than 10000 ms.
+  while (!cpu_usage_available && (++num_iterations < 10000)) {
+    event_->Wait(1);
+    cpu_usage_available = cpu_->CpuUsage() != -1;
+  }
+  return cpu_usage_available;
+}
+
+void CpuMeasurementHarness::Measure() {
+  WebRtc_UWord32 num_cores = 0;
+  WebRtc_UWord32* cores = NULL;
+  // Return the average CPU for now.
+  cpu_sum_ = cpu_->CpuUsageMultiCore(num_cores, cores);
+  ++cpu_iterations_;
+}
+
+bool CpuMeasurementHarness::DoWork() {
+  for (int i = 0; i < work_iterations_per_period_; ++i) {
+    if (!cpu_target_->DoWork()) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_measurement_harness.h b/trunk/src/system_wrappers/source/cpu_measurement_harness.h
new file mode 100644
index 0000000..3b87f27
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_measurement_harness.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_SYSTEM_WRAPPERS_SOURCE_CPU_MEASUREMENT_HARNESS_H_
+#define SRC_SYSTEM_WRAPPERS_SOURCE_CPU_MEASUREMENT_HARNESS_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CpuWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+// This abstract class provides an interface that should be passed to
+// CpuMeasurementHarness. CpuMeasurementHarness will call it with the
+// frequency requested and measure the CPU usage for all calls.
+class CpuTarget {
+ public:
+  // Callback function for which the CPU usage should be calculated.
+  virtual bool DoWork() = 0;
+
+ protected:
+  CpuTarget() {}
+  virtual ~CpuTarget() {}
+};
+
+class CpuMeasurementHarness {
+ public:
+  static CpuMeasurementHarness* Create(CpuTarget* target,
+                                       int work_period_ms,
+                                       int work_iterations_per_period,
+                                       int duration_ms);
+  ~CpuMeasurementHarness();
+  bool Run();
+  int AverageCpu();
+
+ protected:
+  CpuMeasurementHarness(CpuTarget* target, int work_period_ms,
+                        int work_iterations_per_period, int duration_ms);
+
+ private:
+  bool WaitForCpuInit();
+  void Measure();
+  bool DoWork();
+
+  CpuTarget* cpu_target_;
+  const int work_period_ms_;
+  const int work_iterations_per_period_;
+  const int duration_ms_;
+  int cpu_sum_;
+  int cpu_iterations_;
+  scoped_ptr<CpuWrapper> cpu_;
+  scoped_ptr<EventWrapper> event_;
+};
+
+}  // namespace webrtc
+
+#endif  // SRC_SYSTEM_WRAPPERS_SOURCE_CPU_MEASUREMENT_HARNESS_H_
diff --git a/trunk/src/system_wrappers/source/cpu_no_op.cc b/trunk/src/system_wrappers/source/cpu_no_op.cc
new file mode 100644
index 0000000..e42ef91
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_no_op.cc
@@ -0,0 +1,22 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include "cpu_wrapper.h"
+
+namespace webrtc {
+
+CpuWrapper* CpuWrapper::CreateCpu()
+{
+    return NULL;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_win.cc b/trunk/src/system_wrappers/source/cpu_win.cc
new file mode 100644
index 0000000..5792023
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_win.cc
@@ -0,0 +1,534 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_win.h"
+
+#define _WIN32_DCOM
+
+#include <assert.h>
+#include <iostream>
+#include <Wbemidl.h>
+
+#pragma comment(lib, "wbemuuid.lib")
+
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+
+namespace webrtc {
+WebRtc_Word32 CpuWindows::CpuUsage()
+{
+    if (!has_initialized_)
+    {
+        return -1;
+    }
+    // Last element is the average
+    return cpu_usage_[number_of_objects_ - 1];
+}
+
+WebRtc_Word32 CpuWindows::CpuUsageMultiCore(WebRtc_UWord32& num_cores,
+                                            WebRtc_UWord32*& cpu_usage)
+{
+    if (has_terminated_) {
+        num_cores = 0;
+        cpu_usage = NULL;
+        return -1;
+    }
+    if (!has_initialized_)
+    {
+        num_cores = 0;
+        cpu_usage = NULL;
+        return -1;
+    }
+    num_cores = number_of_objects_ - 1;
+    cpu_usage = cpu_usage_;
+    return cpu_usage_[number_of_objects_-1];
+}
+
+CpuWindows::CpuWindows()
+    : cpu_polling_thread(NULL),
+      initialize_(true),
+      has_initialized_(false),
+      terminate_(false),
+      has_terminated_(false),
+      cpu_usage_(NULL),
+      wbem_enum_access_(NULL),
+      number_of_objects_(0),
+      cpu_usage_handle_(0),
+      previous_processor_timestamp_(NULL),
+      timestamp_sys_100_ns_handle_(0),
+      previous_100ns_timestamp_(NULL),
+      wbem_service_(NULL),
+      wbem_service_proxy_(NULL),
+      wbem_refresher_(NULL),
+      wbem_enum_(NULL)
+{
+    // All resources are allocated in PollingCpu().
+    if (AllocateComplexDataTypes())
+    {
+        StartPollingCpu();
+    }
+    else
+    {
+        assert(false);
+    }
+}
+
+CpuWindows::~CpuWindows()
+{
+    // All resources are reclaimed in StopPollingCpu().
+    const bool success = StopPollingCpu();
+    assert(success);
+    DeAllocateComplexDataTypes();
+}
+
+bool CpuWindows::AllocateComplexDataTypes()
+{
+    cpu_polling_thread = ThreadWrapper::CreateThread(
+        CpuWindows::Process,
+        reinterpret_cast<void*>(this),
+        kNormalPriority,
+        "CpuWindows");
+    init_crit_ = CriticalSectionWrapper::CreateCriticalSection();
+    init_cond_ = ConditionVariableWrapper::CreateConditionVariable();
+    terminate_crit_ = CriticalSectionWrapper::CreateCriticalSection();
+    terminate_cond_ = ConditionVariableWrapper::CreateConditionVariable();
+    sleep_event = EventWrapper::Create();
+    return (cpu_polling_thread != NULL) && (init_crit_ != NULL) &&
+           (init_cond_ != NULL) && (terminate_crit_ != NULL) &&
+           (terminate_cond_ != NULL) && (sleep_event != NULL);
+}
+
+void CpuWindows::DeAllocateComplexDataTypes()
+{
+    if (sleep_event != NULL)
+    {
+        delete sleep_event;
+        sleep_event = NULL;
+    }
+    if (terminate_cond_ != NULL)
+    {
+        delete terminate_cond_;
+        terminate_cond_ = NULL;
+    }
+    if (terminate_crit_ != NULL)
+    {
+        delete terminate_crit_;
+        terminate_crit_ = NULL;
+    }
+    if (init_cond_ != NULL)
+    {
+        delete init_cond_;
+        init_cond_ = NULL;
+    }
+    if (init_crit_ != NULL)
+    {
+        delete init_crit_;
+        init_crit_ = NULL;
+    }
+    if (cpu_polling_thread != NULL)
+    {
+        delete cpu_polling_thread;
+        cpu_polling_thread = NULL;
+    }
+}
+
+void CpuWindows::StartPollingCpu()
+{
+    unsigned int dummy_id = 0;
+    if (!cpu_polling_thread->Start(dummy_id))
+    {
+        initialize_ = false;
+        has_terminated_ = true;
+        assert(false);
+    }
+}
+
+bool CpuWindows::StopPollingCpu()
+{
+    {
+        // If StopPollingCpu is called immediately after StartPollingCpu() it is
+        // possible that cpu_polling_thread is in the process of initializing.
+        // Let initialization finish to avoid getting into a bad state.
+        CriticalSectionScoped cs(init_crit_);
+        while(initialize_)
+        {
+            init_cond_->SleepCS(*init_crit_);
+        }
+    }
+
+    CriticalSectionScoped cs(terminate_crit_);
+    terminate_ = true;
+    sleep_event->Set();
+    while (!has_terminated_)
+    {
+        terminate_cond_->SleepCS(*terminate_crit_);
+    }
+    cpu_polling_thread->Stop();
+    delete cpu_polling_thread;
+    cpu_polling_thread = NULL;
+    return true;
+}
+
+bool CpuWindows::Process(void* thread_object)
+{
+    return reinterpret_cast<CpuWindows*>(thread_object)->ProcessImpl();
+}
+
+bool CpuWindows::ProcessImpl()
+{
+    {
+        CriticalSectionScoped cs(terminate_crit_);
+        if (terminate_)
+        {
+            const bool success = Terminate();
+            assert(success);
+            terminate_cond_->WakeAll();
+            return false;
+        }
+    }
+    // Initialize on first iteration
+    if (initialize_)
+    {
+        CriticalSectionScoped cs(init_crit_);
+        initialize_ = false;
+        const bool success = Initialize();
+        init_cond_->WakeAll();
+        if (!success || !has_initialized_)
+        {
+            has_initialized_ = false;
+            terminate_ = true;
+            return true;
+        }
+    }
+    // Approximately one seconds sleep for each CPU measurement. Precision is
+    // not important. 1 second refresh rate is also used by Performance Monitor
+    // (perfmon).
+    if(kEventTimeout != sleep_event->Wait(1000))
+    {
+        // Terminating. No need to update CPU usage.
+        assert(terminate_);
+        return true;
+    }
+
+    // UpdateCpuUsage() returns false if a single (or more) CPU read(s) failed.
+    // Not a major problem if it happens but make sure it doesnt trigger in
+    // debug.
+    const bool success = UpdateCpuUsage();
+    assert(success);
+    return true;
+}
+
+bool CpuWindows::CreateWmiConnection()
+{
+    IWbemLocator* service_locator = NULL;
+    HRESULT hr = CoCreateInstance(CLSID_WbemLocator, NULL,
+                                  CLSCTX_INPROC_SERVER, IID_IWbemLocator,
+                                  reinterpret_cast<void**> (&service_locator));
+    if (FAILED(hr))
+    {
+        return false;
+    }
+    // To get the WMI service specify the WMI namespace.
+    BSTR wmi_namespace = SysAllocString(L"\\\\.\\root\\cimv2");
+    if (wmi_namespace == NULL)
+    {
+        // This type of failure signifies running out of memory.
+        service_locator->Release();
+        return false;
+    }
+    hr = service_locator->ConnectServer(wmi_namespace, NULL, NULL, NULL, 0L,
+                                        NULL, NULL, &wbem_service_);
+    SysFreeString(wmi_namespace);
+    service_locator->Release();
+    return !FAILED(hr);
+}
+
+// Sets up WMI refresher and enum
+bool CpuWindows::CreatePerfOsRefresher()
+{
+    // Create refresher.
+    HRESULT hr = CoCreateInstance(CLSID_WbemRefresher, NULL,
+                                  CLSCTX_INPROC_SERVER, IID_IWbemRefresher,
+                                  reinterpret_cast<void**> (&wbem_refresher_));
+    if (FAILED(hr))
+    {
+        return false;
+    }
+    // Create PerfOS_Processor enum.
+    IWbemConfigureRefresher* wbem_refresher_config = NULL;
+    hr = wbem_refresher_->QueryInterface(
+        IID_IWbemConfigureRefresher,
+        reinterpret_cast<void**> (&wbem_refresher_config));
+    if (FAILED(hr))
+    {
+        return false;
+    }
+
+    // Create a proxy to the IWbemServices so that a local authentication
+    // can be set up (this is needed to be able to successfully call 
+    // IWbemConfigureRefresher::AddEnum). Setting authentication with
+    // CoInitializeSecurity is process-wide (which is too intrusive).
+    hr = CoCopyProxy(static_cast<IUnknown*> (wbem_service_),
+                     reinterpret_cast<IUnknown**> (&wbem_service_proxy_));
+    if(FAILED(hr))
+    {
+        return false;
+    }
+    // Set local authentication.
+    // RPC_C_AUTHN_WINNT means using NTLM instead of Kerberos which is default.
+    hr = CoSetProxyBlanket(static_cast<IUnknown*> (wbem_service_proxy_),
+                           RPC_C_AUTHN_WINNT, RPC_C_AUTHZ_NONE, NULL,
+                           RPC_C_AUTHN_LEVEL_DEFAULT,
+                           RPC_C_IMP_LEVEL_IMPERSONATE, NULL, EOAC_NONE);
+    if(FAILED(hr))
+    {
+        return false;
+    }
+
+    // Don't care about the particular id for the enum.
+    long enum_id = 0;
+    hr = wbem_refresher_config->AddEnum(wbem_service_proxy_,
+                                        L"Win32_PerfRawData_PerfOS_Processor",
+                                        0, NULL, &wbem_enum_, &enum_id);
+    wbem_refresher_config->Release();
+    wbem_refresher_config = NULL;
+    return !FAILED(hr);
+}
+
+// Have to pull the first round of data to be able set the handles.
+bool CpuWindows::CreatePerfOsCpuHandles()
+{
+    // Update the refresher so that there is data available in wbem_enum_.
+    wbem_refresher_->Refresh(0L);
+
+    // The number of enumerators is the number of processor + 1 (the total).
+    // This is unknown at this point.
+    DWORD number_returned = 0;
+    HRESULT hr = wbem_enum_->GetObjects(0L, number_of_objects_,
+                                        wbem_enum_access_, &number_returned);
+    // number_returned indicates the number of enumerators that are needed.
+    if (hr == WBEM_E_BUFFER_TOO_SMALL &&
+        number_returned > number_of_objects_)
+    {
+        // Allocate the number IWbemObjectAccess asked for by the
+        // GetObjects(..) function.
+        wbem_enum_access_ = new IWbemObjectAccess*[number_returned];
+        cpu_usage_ = new WebRtc_UWord32[number_returned];
+        previous_processor_timestamp_ = new unsigned __int64[number_returned];
+        previous_100ns_timestamp_ = new unsigned __int64[number_returned];
+        if ((wbem_enum_access_ == NULL) || (cpu_usage_ == NULL) ||
+            (previous_processor_timestamp_ == NULL) ||
+            (previous_100ns_timestamp_ == NULL))
+        {
+            // Out of memory.
+            return false;
+        }
+
+        SecureZeroMemory(wbem_enum_access_, number_returned *
+                         sizeof(IWbemObjectAccess*));
+        memset(cpu_usage_, 0, sizeof(int) * number_returned);
+        memset(previous_processor_timestamp_, 0, sizeof(unsigned __int64) *
+               number_returned);
+        memset(previous_100ns_timestamp_, 0, sizeof(unsigned __int64) *
+               number_returned);
+
+        number_of_objects_ = number_returned;
+        // Read should be successfull now that memory has been allocated.
+        hr = wbem_enum_->GetObjects(0L, number_of_objects_, wbem_enum_access_,
+                                    &number_returned);
+        if (FAILED(hr))
+        {
+            return false;
+        }
+    }
+    else
+    {
+        // 0 enumerators should not be enough. Something has gone wrong here.
+        return false;
+    }
+
+    // Get the enumerator handles that are needed for calculating CPU usage.
+    CIMTYPE cpu_usage_type;
+    hr = wbem_enum_access_[0]->GetPropertyHandle(L"PercentProcessorTime",
+                                                 &cpu_usage_type,
+                                                 &cpu_usage_handle_);
+    if (FAILED(hr))
+    {
+        return false;
+    }
+    CIMTYPE timestamp_sys_100_ns_type;
+    hr = wbem_enum_access_[0]->GetPropertyHandle(L"TimeStamp_Sys100NS",
+                                                 &timestamp_sys_100_ns_type,
+                                                 &timestamp_sys_100_ns_handle_);
+    return !FAILED(hr);
+}
+
+bool CpuWindows::Initialize()
+{
+    if (terminate_)
+    {
+        return false;
+    }
+    // Initialize COM library.
+    HRESULT hr = CoInitializeEx(NULL,COINIT_MULTITHREADED);
+    if (FAILED(hr))
+    {
+        return false;
+    }
+    if (FAILED(hr))
+    {
+        return false;
+    }
+
+    if (!CreateWmiConnection())
+    {
+        return false;
+    }
+    if (!CreatePerfOsRefresher())
+    {
+        return false;
+    }
+    if (!CreatePerfOsCpuHandles())
+    {
+        return false;
+    }
+    has_initialized_ = true;
+    return true;
+}
+
+bool CpuWindows::Terminate()
+{
+    if (has_terminated_)
+    {
+        return false;
+    }
+    // Reverse order of Initialize().
+    // Some compilers complain about deleting NULL though it's well defined
+    if (previous_100ns_timestamp_ != NULL)
+    {
+        delete[] previous_100ns_timestamp_;
+        previous_100ns_timestamp_ = NULL;
+    }
+    if (previous_processor_timestamp_ != NULL)
+    {
+        delete[] previous_processor_timestamp_;
+        previous_processor_timestamp_ = NULL;
+    }
+    if (cpu_usage_ != NULL)
+    {
+        delete[] cpu_usage_;
+        cpu_usage_ = NULL;
+    }
+    if (wbem_enum_access_ != NULL)
+    {
+        for (DWORD i = 0; i < number_of_objects_; i++)
+        {
+            if(wbem_enum_access_[i] != NULL)
+            {
+                wbem_enum_access_[i]->Release();
+            }
+        }
+        delete[] wbem_enum_access_;
+        wbem_enum_access_ = NULL;
+    }
+    if (wbem_enum_ != NULL)
+    {
+        wbem_enum_->Release();
+        wbem_enum_ = NULL;
+    }    
+    if (wbem_refresher_ != NULL)
+    {
+        wbem_refresher_->Release();
+        wbem_refresher_ = NULL;
+    }
+    if (wbem_service_proxy_ != NULL)
+    {
+        wbem_service_proxy_->Release();
+        wbem_service_proxy_ = NULL;
+    }
+    if (wbem_service_ != NULL)
+    {
+        wbem_service_->Release();
+        wbem_service_ = NULL;
+    }
+    // CoUninitialized should be called once for every CoInitializeEx.
+    // Regardless if it failed or not.
+    CoUninitialize();
+    has_terminated_ = true;
+    return true;
+}
+
+bool CpuWindows::UpdateCpuUsage()
+{
+    wbem_refresher_->Refresh(0L);
+    DWORD number_returned = 0;
+    HRESULT hr = wbem_enum_->GetObjects(0L, number_of_objects_,
+                                        wbem_enum_access_,&number_returned);
+    if (FAILED(hr))
+    {
+        // wbem_enum_access_ has already been allocated. Unless the number of
+        // CPUs change runtime this should not happen.
+        return false;
+    }
+    unsigned __int64 cpu_usage = 0;
+    unsigned __int64 timestamp_100ns = 0;
+    bool returnValue = true;
+    for (DWORD i = 0; i < number_returned; i++)
+    {
+        hr = wbem_enum_access_[i]->ReadQWORD(cpu_usage_handle_,&cpu_usage);
+        if (FAILED(hr))
+        {
+            returnValue = false;
+        }
+        hr = wbem_enum_access_[i]->ReadQWORD(timestamp_sys_100_ns_handle_,
+                                             &timestamp_100ns);
+        if (FAILED(hr))
+        {
+            returnValue = false;
+        }
+        wbem_enum_access_[i]->Release();
+        wbem_enum_access_[i] = NULL;
+
+        const bool wrapparound =
+            (previous_processor_timestamp_[i] > cpu_usage) ||
+            (previous_100ns_timestamp_[i] > timestamp_100ns);
+        const bool first_time = (previous_processor_timestamp_[i] == 0) ||
+                                (previous_100ns_timestamp_[i] == 0);
+        if (wrapparound || first_time)
+        {
+            previous_processor_timestamp_[i] = cpu_usage;
+            previous_100ns_timestamp_[i] = timestamp_100ns;
+            continue;
+        }
+        const unsigned __int64 processor_timestamp_delta =
+            cpu_usage - previous_processor_timestamp_[i];
+        const unsigned __int64 timestamp_100ns_delta =
+            timestamp_100ns - previous_100ns_timestamp_[i];
+
+        if (processor_timestamp_delta >= timestamp_100ns_delta)
+        {
+            cpu_usage_[i] = 0;
+        } else {
+            // Quotient must be float since the division is guaranteed to yield
+            // a value between 0 and 1 which is 0 in integer division.
+            const float delta_quotient =
+                static_cast<float>(processor_timestamp_delta) /
+                static_cast<float>(timestamp_100ns_delta);
+            cpu_usage_[i] = 100 - static_cast<WebRtc_UWord32>(delta_quotient *
+                                                              100);
+        }
+        previous_processor_timestamp_[i] = cpu_usage;
+        previous_100ns_timestamp_[i] = timestamp_100ns;
+    }
+    return returnValue;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/cpu_win.h b/trunk/src/system_wrappers/source/cpu_win.h
new file mode 100644
index 0000000..d15073c
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_win.h
@@ -0,0 +1,103 @@
+// This file contains a Windows implementation of CpuWrapper.
+// Note: Windows XP, Windows Server 2003 are the minimum requirements.
+//       The requirements are due to the implementation being based on
+//       WMI.
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_WINDOWS_NO_CPOL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_WINDOWS_NO_CPOL_H_
+
+#include "cpu_wrapper.h"
+
+#include <Wbemidl.h>
+
+namespace webrtc {
+class ConditionVariableWrapper;
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+class CpuWindows : public CpuWrapper
+{
+public:
+    virtual WebRtc_Word32 CpuUsage();
+    virtual WebRtc_Word32 CpuUsage(WebRtc_Word8* /*pProcessName*/,
+                                   WebRtc_UWord32 /*length*/) {return -1;}
+    virtual WebRtc_Word32 CpuUsage(WebRtc_UWord32  /*dwProcessID*/) {return -1;}
+
+    virtual WebRtc_Word32 CpuUsageMultiCore(WebRtc_UWord32& num_cores,
+                                            WebRtc_UWord32*& cpu_usage);
+
+    virtual void Reset() {}
+    virtual void Stop() {}
+
+    CpuWindows();
+    virtual ~CpuWindows();
+private:
+    bool AllocateComplexDataTypes();
+    void DeAllocateComplexDataTypes();
+
+    void StartPollingCpu();
+    bool StopPollingCpu();
+
+    static bool Process(void* thread_object);
+    bool ProcessImpl();
+
+    bool CreateWmiConnection();
+    bool CreatePerfOsRefresher();
+    bool CreatePerfOsCpuHandles();
+    bool Initialize();
+    bool Terminate();
+
+    bool UpdateCpuUsage();
+
+    ThreadWrapper* cpu_polling_thread;
+
+    bool initialize_;
+    bool has_initialized_;
+    CriticalSectionWrapper* init_crit_;
+    ConditionVariableWrapper* init_cond_;
+
+    bool terminate_;
+    bool has_terminated_;
+    CriticalSectionWrapper* terminate_crit_;
+    ConditionVariableWrapper* terminate_cond_;
+
+    // For sleep with wake-up functionality.
+    EventWrapper* sleep_event;
+
+    // Will be an array. Just care about CPU 0 for now.
+    WebRtc_UWord32* cpu_usage_;
+
+    // One IWbemObjectAccess for each processor and one for the total.
+    // 0-n-1 is the individual processors.
+    // n is the total.
+    IWbemObjectAccess** wbem_enum_access_;
+    DWORD number_of_objects_;
+
+    // Cpu timestamp
+    long cpu_usage_handle_;
+    unsigned __int64* previous_processor_timestamp_;
+
+    // Timestamp
+    long timestamp_sys_100_ns_handle_;
+    unsigned __int64* previous_100ns_timestamp_;
+
+    IWbemServices* wbem_service_;
+    IWbemServices* wbem_service_proxy_;
+
+    IWbemRefresher* wbem_refresher_;
+
+    IWbemHiPerfEnum* wbem_enum_;
+
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CPU_WINDOWS_NO_CPOL_H_
diff --git a/trunk/src/system_wrappers/source/cpu_wrapper_unittest.cc b/trunk/src/system_wrappers/source/cpu_wrapper_unittest.cc
new file mode 100644
index 0000000..cd149dc
--- /dev/null
+++ b/trunk/src/system_wrappers/source/cpu_wrapper_unittest.cc
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/cpu_wrapper.h"
+
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/cpu_info.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/trace.h"
+#include "testsupport/fileutils.h"
+
+using webrtc::CpuInfo;
+using webrtc::CpuWrapper;
+using webrtc::EventWrapper;
+using webrtc::scoped_ptr;
+using webrtc::Trace;
+
+// This test is flaky on Windows/Release.
+// http://code.google.com/p/webrtc/issues/detail?id=290
+#ifdef _WIN32
+#define MAYBE_Usage DISABLED_Usage
+#else
+#define MAYBE_Usage Usage
+#endif
+TEST(CpuWrapperTest, MAYBE_Usage) {
+  Trace::CreateTrace();
+  std::string trace_file = webrtc::test::OutputPath() +
+      "cpu_wrapper_unittest.txt";
+  Trace::SetTraceFile(trace_file.c_str());
+  Trace::SetLevelFilter(webrtc::kTraceAll);
+  printf("Number of cores detected:%u\n", CpuInfo::DetectNumberOfCores());
+  scoped_ptr<CpuWrapper> cpu(CpuWrapper::CreateCpu());
+  ASSERT_TRUE(cpu.get() != NULL);
+  scoped_ptr<EventWrapper> sleep_event(EventWrapper::Create());
+  ASSERT_TRUE(sleep_event.get() != NULL);
+
+  int num_iterations = 0;
+  WebRtc_UWord32 num_cores = 0;
+  WebRtc_UWord32* cores = NULL;
+  bool cpu_usage_available = cpu->CpuUsageMultiCore(num_cores, cores) != -1;
+  // Initializing the CPU measurements may take a couple of seconds on Windows.
+  // Since the initialization is lazy we need to wait until it is completed.
+  // Should not take more than 10000 ms.
+  while (!cpu_usage_available && (++num_iterations < 10000)) {
+    if (cores != NULL) {
+      ASSERT_GT(num_cores, 0u);
+      break;
+    }
+    sleep_event->Wait(1);
+    cpu_usage_available = cpu->CpuUsageMultiCore(num_cores, cores) != -1;
+  }
+  ASSERT_TRUE(cpu_usage_available);
+
+  const WebRtc_Word32 average = cpu->CpuUsageMultiCore(num_cores, cores);
+  ASSERT_TRUE(cores != NULL);
+  EXPECT_GT(num_cores, 0u);
+  EXPECT_GE(average, 0);
+  EXPECT_LE(average, 100);
+
+  printf("\nNumber of cores:%d\n", num_cores);
+  printf("Average cpu:%d\n", average);
+  for (WebRtc_UWord32 i = 0; i < num_cores; i++) {
+    printf("Core:%u CPU:%u \n", i, cores[i]);
+    EXPECT_GE(cores[i], 0u);
+    EXPECT_LE(cores[i], 100u);
+  }
+
+  Trace::ReturnTrace();
+};
diff --git a/trunk/src/system_wrappers/source/critical_section.cc b/trunk/src/system_wrappers/source/critical_section.cc
new file mode 100644
index 0000000..d3f3f01
--- /dev/null
+++ b/trunk/src/system_wrappers/source/critical_section.cc
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(_WIN32)
+    #include <windows.h>
+    #include "critical_section_win.h"
+#else
+    #include "critical_section_posix.h"
+#endif
+
+namespace webrtc {
+CriticalSectionWrapper* CriticalSectionWrapper::CreateCriticalSection()
+{
+#ifdef _WIN32
+    return new CriticalSectionWindows();
+#else
+    return new CriticalSectionPosix();
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/critical_section_posix.cc b/trunk/src/system_wrappers/source/critical_section_posix.cc
new file mode 100644
index 0000000..70f85f9
--- /dev/null
+++ b/trunk/src/system_wrappers/source/critical_section_posix.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// General note: return values for the various pthread synchronization APIs
+// are explicitly ignored here. In Chromium, the same thing is done for release.
+// However, in debugging, failure in these APIs are logged. There is currently
+// no equivalent to DCHECK_EQ in WebRTC code so this is the best we can do here.
+// TODO(henrike): add logging when pthread synchronization APIs are failing.
+
+#include "critical_section_posix.h"
+
+namespace webrtc {
+
+CriticalSectionPosix::CriticalSectionPosix()
+{
+    pthread_mutexattr_t attr;
+    (void) pthread_mutexattr_init(&attr);
+    (void) pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
+    (void) pthread_mutex_init(&_mutex, &attr);
+}
+
+CriticalSectionPosix::~CriticalSectionPosix()
+{
+    (void) pthread_mutex_destroy(&_mutex);
+}
+
+void
+CriticalSectionPosix::Enter()
+{
+    (void) pthread_mutex_lock(&_mutex);
+}
+
+void
+CriticalSectionPosix::Leave()
+{
+    (void) pthread_mutex_unlock(&_mutex);
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/critical_section_posix.h b/trunk/src/system_wrappers/source/critical_section_posix.h
new file mode 100644
index 0000000..40b7dc9
--- /dev/null
+++ b/trunk/src/system_wrappers/source/critical_section_posix.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_POSIX_H_
+
+#include "critical_section_wrapper.h"
+
+#include <pthread.h>
+
+namespace webrtc {
+class CriticalSectionPosix : public CriticalSectionWrapper
+{
+public:
+    CriticalSectionPosix();
+
+    virtual ~CriticalSectionPosix();
+
+    virtual void Enter();
+    virtual void Leave();
+
+private:
+    pthread_mutex_t _mutex;
+    friend class ConditionVariablePosix;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/critical_section_win.cc b/trunk/src/system_wrappers/source/critical_section_win.cc
new file mode 100644
index 0000000..bbc66e5
--- /dev/null
+++ b/trunk/src/system_wrappers/source/critical_section_win.cc
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_win.h"
+
+namespace webrtc {
+CriticalSectionWindows::CriticalSectionWindows()
+{
+    InitializeCriticalSection(&crit);
+}
+
+CriticalSectionWindows::~CriticalSectionWindows()
+{
+    DeleteCriticalSection(&crit);
+}
+
+void
+CriticalSectionWindows::Enter()
+{
+    EnterCriticalSection(&crit);
+}
+
+void
+CriticalSectionWindows::Leave()
+{
+    LeaveCriticalSection(&crit);
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/critical_section_win.h b/trunk/src/system_wrappers/source/critical_section_win.h
new file mode 100644
index 0000000..9556fa9
--- /dev/null
+++ b/trunk/src/system_wrappers/source/critical_section_win.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_WINDOWS_H_
+
+#include "typedefs.h"
+#include "critical_section_wrapper.h"
+#include <windows.h>
+
+namespace webrtc {
+class CriticalSectionWindows : public CriticalSectionWrapper
+{
+public:
+    CriticalSectionWindows();
+
+    virtual ~CriticalSectionWindows();
+
+    virtual void Enter();
+    virtual void Leave();
+
+private:
+    CRITICAL_SECTION crit;
+
+    friend class ConditionVariableWindows;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_CRITICAL_SECTION_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/data_log.cc b/trunk/src/system_wrappers/source/data_log.cc
new file mode 100644
index 0000000..f123896
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log.cc
@@ -0,0 +1,455 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "data_log.h"
+
+#include <assert.h>
+
+#include <algorithm>
+#include <list>
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "file_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "thread_wrapper.h"
+
+namespace webrtc {
+
+DataLogImpl::CritSectScopedPtr DataLogImpl::crit_sect_(
+  CriticalSectionWrapper::CreateCriticalSection());
+
+DataLogImpl* DataLogImpl::instance_ = NULL;
+
+// A Row contains cells, which are indexed by the column names as std::string.
+// The string index is treated in a case sensitive way.
+class Row {
+ public:
+  Row();
+  ~Row();
+
+  // Inserts a Container into the cell of the column specified with
+  // column_name.
+  // column_name is treated in a case sensitive way.
+  int InsertCell(const std::string& column_name,
+                 const Container* value_container);
+
+  // Converts the value at the column specified by column_name to a string
+  // stored in value_string.
+  // column_name is treated in a case sensitive way.
+  void ToString(const std::string& column_name, std::string* value_string);
+
+ private:
+  // Collection of containers indexed by column name as std::string
+  typedef std::map<std::string, const Container*> CellMap;
+
+  CellMap                   cells_;
+  CriticalSectionWrapper*   cells_lock_;
+};
+
+// A LogTable contains multiple rows, where only the latest row is active for
+// editing. The rows are defined by the ColumnMap, which contains the name of
+// each column and the length of the column (1 for one-value-columns and greater
+// than 1 for multi-value-columns).
+class LogTable {
+ public:
+  LogTable();
+  ~LogTable();
+
+  // Adds the column with name column_name to the table. The column will be a
+  // multi-value-column if multi_value_length is greater than 1.
+  // column_name is treated in a case sensitive way.
+  int AddColumn(const std::string& column_name, int multi_value_length);
+
+  // Buffers the current row while it is waiting to be written to file,
+  // which is done by a call to Flush(). A new row is available when the
+  // function returns
+  void NextRow();
+
+  // Inserts a Container into the cell of the column specified with
+  // column_name.
+  // column_name is treated in a case sensitive way.
+  int InsertCell(const std::string& column_name,
+                 const Container* value_container);
+
+  // Creates a log file, named as specified in the string file_name, to
+  // where the table will be written when calling Flush().
+  int CreateLogFile(const std::string& file_name);
+
+  // Write all complete rows to file.
+  // May not be called by two threads simultaneously (doing so may result in
+  // a race condition). Will be called by the file_writer_thread_ when that
+  // thread is running.
+  void Flush();
+
+ private:
+  // Collection of multi_value_lengths indexed by column name as std::string
+  typedef std::map<std::string, int> ColumnMap;
+  typedef std::list<Row*> RowList;
+
+  ColumnMap               columns_;
+  RowList                 rows_[2];
+  RowList*                rows_history_;
+  RowList*                rows_flush_;
+  Row*                    current_row_;
+  FileWrapper*            file_;
+  bool                    write_header_;
+  CriticalSectionWrapper* table_lock_;
+};
+
+Row::Row()
+  : cells_(),
+    cells_lock_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+Row::~Row() {
+  for (CellMap::iterator it = cells_.begin(); it != cells_.end();) {
+    delete it->second;
+    // For maps all iterators (except the erased) are valid after an erase
+    cells_.erase(it++);
+  }
+  delete cells_lock_;
+}
+
+int Row::InsertCell(const std::string& column_name,
+                    const Container* value_container) {
+  CriticalSectionScoped synchronize(cells_lock_);
+  assert(cells_.count(column_name) == 0);
+  if (cells_.count(column_name) > 0)
+    return -1;
+  cells_[column_name] = value_container;
+  return 0;
+}
+
+void Row::ToString(const std::string& column_name,
+                   std::string* value_string) {
+  CriticalSectionScoped synchronize(cells_lock_);
+  const Container* container = cells_[column_name];
+  if (container == NULL) {
+    *value_string = "NaN,";
+    return;
+  }
+  container->ToString(value_string);
+}
+
+LogTable::LogTable()
+  : columns_(),
+    rows_(),
+    rows_history_(&rows_[0]),
+    rows_flush_(&rows_[1]),
+    current_row_(new Row),
+    file_(FileWrapper::Create()),
+    write_header_(true),
+    table_lock_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+LogTable::~LogTable() {
+  for (RowList::iterator row_it = rows_history_->begin();
+       row_it != rows_history_->end();) {
+    delete *row_it;
+    row_it = rows_history_->erase(row_it);
+  }
+  for (ColumnMap::iterator col_it = columns_.begin();
+       col_it != columns_.end();) {
+    // For maps all iterators (except the erased) are valid after an erase
+    columns_.erase(col_it++);
+  }
+  if (file_ != NULL) {
+    file_->Flush();
+    file_->CloseFile();
+    delete file_;
+  }
+  delete current_row_;
+  delete table_lock_;
+}
+
+int LogTable::AddColumn(const std::string& column_name,
+                        int multi_value_length) {
+  assert(multi_value_length > 0);
+  if (!write_header_) {
+    // It's not allowed to add new columns after the header
+    // has been written.
+    assert(false);
+    return -1;
+  } else {
+    CriticalSectionScoped synchronize(table_lock_);
+    if (write_header_)
+      columns_[column_name] = multi_value_length;
+    else
+      return -1;
+  }
+  return 0;
+}
+
+void LogTable::NextRow() {
+  CriticalSectionScoped sync_rows(table_lock_);
+  rows_history_->push_back(current_row_);
+  current_row_ = new Row;
+}
+
+int LogTable::InsertCell(const std::string& column_name,
+                         const Container* value_container) {
+  CriticalSectionScoped synchronize(table_lock_);
+  assert(columns_.count(column_name) > 0);
+  if (columns_.count(column_name) == 0)
+    return -1;
+  return current_row_->InsertCell(column_name, value_container);
+}
+
+int LogTable::CreateLogFile(const std::string& file_name) {
+  if (file_name.length() == 0)
+    return -1;
+  if (file_->Open())
+    return -1;
+  file_->OpenFile(file_name.c_str(),
+                  false,  // Open with read/write permissions
+                  false,  // Don't wraparound and write at the beginning when
+                          // the file is full
+                  true);  // Open as a text file
+  if (file_ == NULL)
+    return -1;
+  return 0;
+}
+
+void LogTable::Flush() {
+  ColumnMap::iterator column_it;
+  bool commit_header = false;
+  if (write_header_) {
+    CriticalSectionScoped synchronize(table_lock_);
+    if (write_header_) {
+      commit_header = true;
+      write_header_ = false;
+    }
+  }
+  if (commit_header) {
+    for (column_it = columns_.begin();
+         column_it != columns_.end(); ++column_it) {
+      if (column_it->second > 1) {
+        file_->WriteText("%s[%u],", column_it->first.c_str(),
+                         column_it->second);
+        for (int i = 1; i < column_it->second; ++i)
+          file_->WriteText(",");
+      } else {
+        file_->WriteText("%s,", column_it->first.c_str());
+      }
+    }
+    if (columns_.size() > 0)
+      file_->WriteText("\n");
+  }
+
+  // Swap the list used for flushing with the list containing the row history
+  // and clear the history. We also create a local pointer to the new
+  // list used for flushing to avoid race conditions if another thread
+  // calls this function while we are writing.
+  // We don't want to block the list while we're writing to file.
+  {
+    CriticalSectionScoped synchronize(table_lock_);
+    RowList* tmp = rows_flush_;
+    rows_flush_ = rows_history_;
+    rows_history_ = tmp;
+    rows_history_->clear();
+  }
+
+  // Write all complete rows to file and delete them
+  for (RowList::iterator row_it = rows_flush_->begin();
+       row_it != rows_flush_->end();) {
+    for (column_it = columns_.begin();
+         column_it != columns_.end(); ++column_it) {
+      std::string row_string;
+      (*row_it)->ToString(column_it->first, &row_string);
+      file_->WriteText("%s", row_string.c_str());
+    }
+    if (columns_.size() > 0)
+      file_->WriteText("\n");
+    delete *row_it;
+    row_it = rows_flush_->erase(row_it);
+  }
+}
+
+int DataLog::CreateLog() {
+  return DataLogImpl::CreateLog();
+}
+
+void DataLog::ReturnLog() {
+  return DataLogImpl::ReturnLog();
+}
+
+std::string DataLog::Combine(const std::string& table_name, int table_id) {
+  std::stringstream ss;
+  std::string combined_id = table_name;
+  std::string number_suffix;
+  ss << "_" << table_id;
+  ss >> number_suffix;
+  combined_id += number_suffix;
+  std::transform(combined_id.begin(), combined_id.end(), combined_id.begin(),
+                 ::tolower);
+  return combined_id;
+}
+
+int DataLog::AddTable(const std::string& table_name) {
+  DataLogImpl* data_log = DataLogImpl::StaticInstance();
+  if (data_log == NULL)
+    return -1;
+  return data_log->AddTable(table_name);
+}
+
+int DataLog::AddColumn(const std::string& table_name,
+                       const std::string& column_name,
+                       int multi_value_length) {
+  DataLogImpl* data_log = DataLogImpl::StaticInstance();
+  if (data_log == NULL)
+    return -1;
+  return data_log->DataLogImpl::StaticInstance()->AddColumn(table_name,
+                                                            column_name,
+                                                            multi_value_length);
+}
+
+int DataLog::NextRow(const std::string& table_name) {
+  DataLogImpl* data_log = DataLogImpl::StaticInstance();
+  if (data_log == NULL)
+    return -1;
+  return data_log->DataLogImpl::StaticInstance()->NextRow(table_name);
+}
+
+DataLogImpl::DataLogImpl()
+  : counter_(1),
+    tables_(),
+    flush_event_(EventWrapper::Create()),
+    file_writer_thread_(NULL),
+    tables_lock_(RWLockWrapper::CreateRWLock()) {
+}
+
+DataLogImpl::~DataLogImpl() {
+  StopThread();
+  Flush();  // Write any remaining rows
+  delete file_writer_thread_;
+  delete flush_event_;
+  for (TableMap::iterator it = tables_.begin(); it != tables_.end();) {
+    delete static_cast<LogTable*>(it->second);
+    // For maps all iterators (except the erased) are valid after an erase
+    tables_.erase(it++);
+  }
+  delete tables_lock_;
+}
+
+int DataLogImpl::CreateLog() {
+  CriticalSectionScoped synchronize(crit_sect_.get());
+  if (instance_ == NULL) {
+    instance_ = new DataLogImpl();
+    return instance_->Init();
+  } else {
+    ++instance_->counter_;
+  }
+  return 0;
+}
+
+int DataLogImpl::Init() {
+  file_writer_thread_ = ThreadWrapper::CreateThread(
+                          DataLogImpl::Run,
+                          instance_,
+                          kHighestPriority,
+                          "DataLog");
+  if (file_writer_thread_ == NULL)
+    return -1;
+  unsigned int thread_id = 0;
+  bool success = file_writer_thread_->Start(thread_id);
+  if (!success)
+    return -1;
+  return 0;
+}
+
+DataLogImpl* DataLogImpl::StaticInstance() {
+  return instance_;
+}
+
+void DataLogImpl::ReturnLog() {
+  CriticalSectionScoped synchronize(crit_sect_.get());
+  if (instance_ && instance_->counter_ > 1) {
+    --instance_->counter_;
+    return;
+  }
+  delete instance_;
+  instance_ = NULL;
+}
+
+int DataLogImpl::AddTable(const std::string& table_name) {
+  WriteLockScoped synchronize(*tables_lock_);
+  // Make sure we don't add a table which already exists
+  if (tables_.count(table_name) > 0)
+    return -1;
+  tables_[table_name] = new LogTable();
+  if (tables_[table_name]->CreateLogFile(table_name + ".txt") == -1)
+    return -1;
+  return 0;
+}
+
+int DataLogImpl::AddColumn(const std::string& table_name,
+                           const std::string& column_name,
+                           int multi_value_length) {
+  ReadLockScoped synchronize(*tables_lock_);
+  if (tables_.count(table_name) == 0)
+    return -1;
+  return tables_[table_name]->AddColumn(column_name, multi_value_length);
+}
+
+int DataLogImpl::InsertCell(const std::string& table_name,
+                            const std::string& column_name,
+                            const Container* value_container) {
+  ReadLockScoped synchronize(*tables_lock_);
+  assert(tables_.count(table_name) > 0);
+  if (tables_.count(table_name) == 0)
+    return -1;
+  return tables_[table_name]->InsertCell(column_name, value_container);
+}
+
+int DataLogImpl::NextRow(const std::string& table_name) {
+  ReadLockScoped synchronize(*tables_lock_);
+  if (tables_.count(table_name) == 0)
+    return -1;
+  tables_[table_name]->NextRow();
+  if (file_writer_thread_ == NULL) {
+    // Write every row to file as they get complete.
+    tables_[table_name]->Flush();
+  } else {
+    // Signal a complete row
+    flush_event_->Set();
+  }
+  return 0;
+}
+
+void DataLogImpl::Flush() {
+  ReadLockScoped synchronize(*tables_lock_);
+  for (TableMap::iterator it = tables_.begin(); it != tables_.end(); ++it) {
+    it->second->Flush();
+  }
+}
+
+bool DataLogImpl::Run(void* obj) {
+  static_cast<DataLogImpl*>(obj)->Process();
+  return true;
+}
+
+void DataLogImpl::Process() {
+  // Wait for a row to be complete
+  flush_event_->Wait(WEBRTC_EVENT_INFINITE);
+  Flush();
+}
+
+void DataLogImpl::StopThread() {
+  if (file_writer_thread_ != NULL) {
+    file_writer_thread_->SetNotAlive();
+    flush_event_->Set();
+    // Call Stop() repeatedly, waiting for the Flush() call in Process() to
+    // finish.
+    while (!file_writer_thread_->Stop()) continue;
+  }
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/data_log_c.cc b/trunk/src/system_wrappers/source/data_log_c.cc
new file mode 100644
index 0000000..f8d7efd
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_c.cc
@@ -0,0 +1,145 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the pure C wrapper of the DataLog class.
+ */
+
+#include "system_wrappers/interface/data_log_c.h"
+
+#include <string>
+
+#include "system_wrappers/interface/data_log.h"
+
+extern "C" int WebRtcDataLog_CreateLog() {
+  return webrtc::DataLog::CreateLog();
+}
+
+extern "C" void WebRtcDataLog_ReturnLog() {
+  return webrtc::DataLog::ReturnLog();
+}
+
+extern "C" char* WebRtcDataLog_Combine(char* combined_name, size_t combined_len,
+                                       const char* table_name, int table_id) {
+  if (!table_name) return NULL;
+  std::string combined = webrtc::DataLog::Combine(table_name, table_id);
+  if (combined.size() >= combined_len) return NULL;
+  std::copy(combined.begin(), combined.end(), combined_name);
+  combined_name[combined.size()] = '\0';
+  return combined_name;
+}
+
+extern "C" int WebRtcDataLog_AddTable(const char* table_name) {
+  if (!table_name) return -1;
+  return webrtc::DataLog::AddTable(table_name);
+}
+
+extern "C" int WebRtcDataLog_AddColumn(const char* table_name,
+                                       const char* column_name,
+                                       int multi_value_length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::AddColumn(table_name, column_name,
+                                    multi_value_length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_int(const char* table_name,
+                                            const char* column_name,
+                                            int value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_int(const char* table_name,
+                                             const char* column_name,
+                                             const int* values,
+                                             int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_float(const char* table_name,
+                                              const char* column_name,
+                                              float value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_float(const char* table_name,
+                                               const char* column_name,
+                                               const float* values,
+                                               int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_double(const char* table_name,
+                                               const char* column_name,
+                                               double value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_double(const char* table_name,
+                                                const char* column_name,
+                                                const double* values,
+                                                int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_int32(const char* table_name,
+                                              const char* column_name,
+                                              int32_t value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_int32(const char* table_name,
+                                               const char* column_name,
+                                               const int32_t* values,
+                                               int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_uint32(const char* table_name,
+                                               const char* column_name,
+                                               uint32_t value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_uint32(const char* table_name,
+                                                const char* column_name,
+                                                const uint32_t* values,
+                                                int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_InsertCell_int64(const char* table_name,
+                                              const char* column_name,
+                                              int64_t value) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, value);
+}
+
+extern "C" int WebRtcDataLog_InsertArray_int64(const char* table_name,
+                                               const char* column_name,
+                                               const int64_t* values,
+                                               int length) {
+  if (!table_name || !column_name) return -1;
+  return webrtc::DataLog::InsertCell(table_name, column_name, values, length);
+}
+
+extern "C" int WebRtcDataLog_NextRow(const char* table_name) {
+  if (!table_name) return -1;
+  return webrtc::DataLog::NextRow(table_name);
+}
diff --git a/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.c b/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.c
new file mode 100644
index 0000000..e78a0e3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.c
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/source/data_log_c_helpers_unittest.h"
+
+#include <assert.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "system_wrappers/interface/data_log_c.h"
+
+enum { kTestArrayLen = 4 };
+static const char kTableName[] = "c_wrapper_table";
+static const char kColumnName1[] = "Scalar";
+static const char kColumnName2[] = "Vector";
+
+int WebRtcDataLogCHelper_TestCreateLog() {
+  return WebRtcDataLog_CreateLog();
+}
+
+int WebRtcDataLogCHelper_TestReturnLog() {
+  WebRtcDataLog_ReturnLog();
+  return 0;
+}
+
+int WebRtcDataLogCHelper_TestCombine() {
+  const int kOutLen = strlen(kTableName) + 4;  /* Room for "_17" + '\0' */
+  char* combined_name = malloc(kOutLen * sizeof(char));
+  char* out_ptr = WebRtcDataLog_Combine(combined_name, kOutLen, kTableName, 17);
+  int return_code = 0;
+  if (!out_ptr) {
+    return_code = -1;
+  }
+  if (strcmp(combined_name, "c_wrapper_table_17") != 0) {
+    return_code = -2;
+  }
+  free(combined_name);
+  return return_code;
+}
+
+int WebRtcDataLogCHelper_TestAddTable() {
+  return WebRtcDataLog_AddTable(kTableName);
+}
+
+int WebRtcDataLogCHelper_TestAddColumn() {
+  if (WebRtcDataLog_AddColumn(kTableName, kColumnName1, 1) != 0) {
+    return -1;
+  }
+  if (WebRtcDataLog_AddColumn(kTableName, kColumnName2, kTestArrayLen) != 0) {
+    return -2;
+  }
+  return 0;
+}
+
+int WebRtcDataLogCHelper_TestNextRow() {
+  return WebRtcDataLog_NextRow(kTableName);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_int() {
+  return WebRtcDataLog_InsertCell_int(kTableName, kColumnName1, 17);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_int() {
+  int values[kTestArrayLen] = {1, 2, 3, 4};
+  return WebRtcDataLog_InsertArray_int(kTableName, kColumnName2, values,
+                                       kTestArrayLen);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_float() {
+  return WebRtcDataLog_InsertCell_float(kTableName, kColumnName1, 17.0f);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_float() {
+  float values[kTestArrayLen] = {1.0f, 2.0f, 3.0f, 4.0f};
+  return WebRtcDataLog_InsertArray_float(kTableName, kColumnName2, values,
+                                         kTestArrayLen);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_double() {
+  return WebRtcDataLog_InsertCell_int(kTableName, kColumnName1, 17.0);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_double() {
+  double values[kTestArrayLen] = {1.0, 2.0, 3.0, 4.0};
+  return WebRtcDataLog_InsertArray_double(kTableName, kColumnName2, values,
+                                          kTestArrayLen);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_int32() {
+  return WebRtcDataLog_InsertCell_int32(kTableName, kColumnName1, 17);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_int32() {
+  int32_t values[kTestArrayLen] = {1, 2, 3, 4};
+  return WebRtcDataLog_InsertArray_int32(kTableName, kColumnName2, values,
+                                         kTestArrayLen);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_uint32() {
+  return WebRtcDataLog_InsertCell_uint32(kTableName, kColumnName1, 17);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_uint32() {
+  uint32_t values[kTestArrayLen] = {1, 2, 3, 4};
+  return WebRtcDataLog_InsertArray_uint32(kTableName, kColumnName2, values,
+                                          kTestArrayLen);
+}
+
+int WebRtcDataLogCHelper_TestInsertCell_int64() {
+  return WebRtcDataLog_InsertCell_int64(kTableName, kColumnName1, 17);
+}
+
+int WebRtcDataLogCHelper_TestInsertArray_int64() {
+  int64_t values[kTestArrayLen] = {1, 2, 3, 4};
+  return WebRtcDataLog_InsertArray_int64(kTableName, kColumnName2, values,
+                                         kTestArrayLen);
+}
diff --git a/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.h b/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.h
new file mode 100644
index 0000000..ef86eae
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_c_helpers_unittest.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_SYSTEM_WRAPPERS_SOURCE_DATA_LOG_C_HELPERS_UNITTEST_H_
+#define SRC_SYSTEM_WRAPPERS_SOURCE_DATA_LOG_C_HELPERS_UNITTEST_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int WebRtcDataLogCHelper_TestCreateLog();
+
+int WebRtcDataLogCHelper_TestReturnLog();
+
+int WebRtcDataLogCHelper_TestCombine();
+
+int WebRtcDataLogCHelper_TestAddTable();
+
+int WebRtcDataLogCHelper_TestAddColumn();
+
+int WebRtcDataLogCHelper_TestNextRow();
+
+int WebRtcDataLogCHelper_TestInsertCell_int();
+
+int WebRtcDataLogCHelper_TestInsertArray_int();
+
+int WebRtcDataLogCHelper_TestInsertCell_float();
+
+int WebRtcDataLogCHelper_TestInsertArray_float();
+
+int WebRtcDataLogCHelper_TestInsertCell_double();
+
+int WebRtcDataLogCHelper_TestInsertArray_double();
+
+int WebRtcDataLogCHelper_TestInsertCell_int32();
+
+int WebRtcDataLogCHelper_TestInsertArray_int32();
+
+int WebRtcDataLogCHelper_TestInsertCell_uint32();
+
+int WebRtcDataLogCHelper_TestInsertArray_uint32();
+
+int WebRtcDataLogCHelper_TestInsertCell_int64();
+
+int WebRtcDataLogCHelper_TestInsertArray_int64();
+
+#ifdef __cplusplus
+}  // end of extern "C"
+#endif
+
+#endif  // SRC_SYSTEM_WRAPPERS_SOURCE_DATA_LOG_C_HELPERS_UNITTEST_H_
diff --git a/trunk/src/system_wrappers/source/data_log_helpers_unittest.cc b/trunk/src/system_wrappers/source/data_log_helpers_unittest.cc
new file mode 100644
index 0000000..94b4d6e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_helpers_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "data_log.h"
+#include "gtest/gtest.h"
+
+using ::webrtc::DataLog;
+
+TEST(TestDataLog, IntContainers) {
+  int c = 5;
+  webrtc::ValueContainer<int> v1(c);
+  c = 10;
+  webrtc::ValueContainer<int> v2(c);
+  std::string s1, s2;
+  v1.ToString(&s1);
+  v2.ToString(&s2);
+  ASSERT_EQ(s1, "5,");
+  ASSERT_EQ(s2, "10,");
+  v1 = v2;
+  v1.ToString(&s1);
+  ASSERT_EQ(s1, s2);
+}
+
+TEST(TestDataLog, DoubleContainers) {
+  double c = 3.5;
+  webrtc::ValueContainer<double> v1(c);
+  c = 10.3;
+  webrtc::ValueContainer<double> v2(c);
+  std::string s1, s2;
+  v1.ToString(&s1);
+  v2.ToString(&s2);
+  ASSERT_EQ(s1, "3.5,");
+  ASSERT_EQ(s2, "10.3,");
+  v1 = v2;
+  v1.ToString(&s1);
+  ASSERT_EQ(s1, s2);
+}
+
+TEST(TestDataLog, MultiValueContainers) {
+  int a[3] = {1, 2, 3};
+  int b[3] = {4, 5, 6};
+  webrtc::MultiValueContainer<int> m1(a, 3);
+  webrtc::MultiValueContainer<int> m2(b, 3);
+  webrtc::MultiValueContainer<int> m3(a, 3);
+  std::string s1, s2, s3;
+  m1.ToString(&s1);
+  m2.ToString(&s2);
+  ASSERT_EQ(s1, "1,2,3,");
+  ASSERT_EQ(s2, "4,5,6,");
+  m1 = m2;
+  m1.ToString(&s1);
+  ASSERT_EQ(s1, s2);
+  m3.ToString(&s3);
+  ASSERT_EQ(s3, "1,2,3,");
+}
diff --git a/trunk/src/system_wrappers/source/data_log_no_op.cc b/trunk/src/system_wrappers/source/data_log_no_op.cc
new file mode 100644
index 0000000..bedc82a
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_no_op.cc
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "data_log.h"
+
+#include <string>
+
+namespace webrtc {
+
+int DataLog::CreateLog() {
+  return 0;
+}
+
+void DataLog::ReturnLog() {
+}
+
+std::string DataLog::Combine(const std::string& table_name, int table_id) {
+  return std::string();
+}
+
+int DataLog::AddTable(const std::string& /*table_name*/) {
+  return 0;
+}
+
+int DataLog::AddColumn(const std::string& /*table_name*/,
+                       const std::string& /*column_name*/,
+                       int /*multi_value_length*/) {
+  return 0;
+}
+
+int DataLog::NextRow(const std::string& /*table_name*/) {
+  return 0;
+}
+
+DataLogImpl::DataLogImpl() {
+}
+
+DataLogImpl::~DataLogImpl() {
+}
+
+DataLogImpl* DataLogImpl::StaticInstance() {
+  return NULL;
+}
+
+void DataLogImpl::ReturnLog() {
+}
+
+int DataLogImpl::AddTable(const std::string& /*table_name*/) {
+  return 0;
+}
+
+int DataLogImpl::AddColumn(const std::string& /*table_name*/,
+                           const std::string& /*column_name*/,
+                           int /*multi_value_length*/) {
+  return 0;
+}
+
+int DataLogImpl::InsertCell(const std::string& /*table_name*/,
+                            const std::string& /*column_name*/,
+                            const Container* /*value_container*/) {
+  return 0;
+}
+
+int DataLogImpl::NextRow(const std::string& /*table_name*/) {
+  return 0;
+}
+
+void DataLogImpl::Flush() {
+}
+
+bool DataLogImpl::Run(void* /*obj*/) {
+  return true;
+}
+
+void DataLogImpl::Process() {
+}
+
+void DataLogImpl::StopThread() {
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/data_log_unittest.cc b/trunk/src/system_wrappers/source/data_log_unittest.cc
new file mode 100644
index 0000000..c64ed94
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_unittest.cc
@@ -0,0 +1,310 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <string>
+
+#include "system_wrappers/interface/data_log.h"
+#include "system_wrappers/interface/data_log_c.h"
+#include "system_wrappers/source/data_log_c_helpers_unittest.h"
+#include "gtest/gtest.h"
+
+using ::webrtc::DataLog;
+
+// A class for storing the values expected from a log table column when
+// verifying a log table file.
+struct ExpectedValues {
+ public:
+  ExpectedValues()
+    : values(NULL),
+      multi_value_length(1) {
+  }
+
+  ExpectedValues(std::vector<std::string> expected_values,
+                 int expected_multi_value_length)
+    : values(expected_values),
+      multi_value_length(expected_multi_value_length) {
+  }
+
+  std::vector<std::string> values;
+  int multi_value_length;
+};
+
+typedef std::map<std::string, ExpectedValues> ExpectedValuesMap;
+
+// A static class used for parsing and verifying data log files.
+class DataLogParser {
+ public:
+  // Verifies that the log table stored in the file "log_file" corresponds to
+  // the cells and columns specified in "columns".
+  static int VerifyTable(FILE* log_file, const ExpectedValuesMap& columns) {
+    int row = 0;
+    char line_buffer[kMaxLineLength];
+    char* ret = fgets(line_buffer, kMaxLineLength, log_file);
+    EXPECT_FALSE(ret == NULL);
+    if (ret == NULL)
+      return -1;
+
+    std::string line(line_buffer, kMaxLineLength);
+    VerifyHeader(line, columns);
+    while (fgets(line_buffer, kMaxLineLength, log_file) != NULL) {
+      line = std::string(line_buffer, kMaxLineLength);
+      size_t line_position = 0;
+
+      for (ExpectedValuesMap::const_iterator it = columns.begin();
+           it != columns.end(); ++it) {
+        std::string str = ParseElement(line, &line_position,
+                                       it->second.multi_value_length);
+        EXPECT_EQ(str, it->second.values[row]);
+        if (str != it->second.values[row])
+          return -1;
+      }
+      ++row;
+    }
+    return 0;
+  }
+
+  // Verifies the table header stored in "line" to correspond with the header
+  // specified in "columns".
+  static int VerifyHeader(const std::string& line,
+                          const ExpectedValuesMap& columns) {
+    size_t line_position = 0;
+    for (ExpectedValuesMap::const_iterator it = columns.begin();
+         it != columns.end(); ++it) {
+      std::string str = ParseElement(line, &line_position,
+                                     it->second.multi_value_length);
+      EXPECT_EQ(str, it->first);
+      if (str != it->first)
+        return -1;
+    }
+    return 0;
+  }
+
+  // Parses out and returns one element from the string "line", which contains
+  // one line read from a log table file. An element can either be a column
+  // header or a cell of a row.
+  static std::string ParseElement(const std::string& line,
+                                  size_t* line_position,
+                                  int multi_value_length) {
+    std::string parsed_cell;
+    parsed_cell = "";
+    for (int i = 0; i < multi_value_length; ++i) {
+      size_t next_separator = line.find(',', *line_position);
+      EXPECT_NE(next_separator, std::string::npos);
+      if (next_separator == std::string::npos)
+        break;
+      parsed_cell += line.substr(*line_position,
+                                 next_separator - *line_position + 1);
+      *line_position = next_separator + 1;
+    }
+    return parsed_cell;
+  }
+
+  // This constant defines the maximum line length the DataLogParser can
+  // parse.
+  enum { kMaxLineLength = 100 };
+};
+
+TEST(TestDataLog, CreateReturnTest) {
+  for (int i = 0; i < 10; ++i)
+    ASSERT_EQ(DataLog::CreateLog(), 0);
+  ASSERT_EQ(DataLog::AddTable(DataLog::Combine("a proper table", 1)), 0);
+  for (int i = 0; i < 10; ++i)
+    DataLog::ReturnLog();
+  ASSERT_LT(DataLog::AddTable(DataLog::Combine("table failure", 1)), 0);
+}
+
+TEST(TestDataLog, VerifyCombineMethod) {
+  EXPECT_EQ(std::string("a proper table_1"),
+            DataLog::Combine("a proper table", 1));
+}
+
+TEST(TestDataLog, VerifySingleTable) {
+  DataLog::CreateLog();
+  DataLog::AddTable(DataLog::Combine("table", 1));
+  DataLog::AddColumn(DataLog::Combine("table", 1), "arrival", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 1), "timestamp", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 1), "size", 5);
+  WebRtc_UWord32 sizes[5] = {1400, 1500, 1600, 1700, 1800};
+  for (int i = 0; i < 10; ++i) {
+    DataLog::InsertCell(DataLog::Combine("table", 1), "arrival",
+                        static_cast<double>(i));
+    DataLog::InsertCell(DataLog::Combine("table", 1), "timestamp",
+                        static_cast<WebRtc_Word64>(4354 + i));
+    DataLog::InsertCell(DataLog::Combine("table", 1), "size", sizes, 5);
+    DataLog::NextRow(DataLog::Combine("table", 1));
+  }
+  DataLog::ReturnLog();
+  // Verify file
+  FILE* table = fopen("table_1.txt", "r");
+  ASSERT_FALSE(table == NULL);
+  // Read the column names and verify with the expected columns.
+  // Note that the columns are written to file in alphabetical order.
+  // Data expected from parsing the file
+  const int kNumberOfRows = 10;
+  std::string string_arrival[kNumberOfRows] = {
+    "0,", "1,", "2,", "3,", "4,",
+    "5,", "6,", "7,", "8,", "9,"
+  };
+  std::string string_timestamp[kNumberOfRows] = {
+    "4354,", "4355,", "4356,", "4357,",
+    "4358,", "4359,", "4360,", "4361,",
+    "4362,", "4363,"
+  };
+  std::string string_sizes = "1400,1500,1600,1700,1800,";
+  ExpectedValuesMap expected;
+  expected["arrival,"] = ExpectedValues(
+                           std::vector<std::string>(string_arrival,
+                                                    string_arrival +
+                                                    kNumberOfRows),
+                           1);
+  expected["size[5],,,,,"] = ExpectedValues(
+                               std::vector<std::string>(10, string_sizes), 5);
+  expected["timestamp,"] = ExpectedValues(
+                             std::vector<std::string>(string_timestamp,
+                                                      string_timestamp +
+                                                      kNumberOfRows),
+                             1);
+  ASSERT_EQ(DataLogParser::VerifyTable(table, expected), 0);
+  fclose(table);
+}
+
+TEST(TestDataLog, VerifyMultipleTables) {
+  DataLog::CreateLog();
+  DataLog::AddTable(DataLog::Combine("table", 2));
+  DataLog::AddTable(DataLog::Combine("table", 3));
+  DataLog::AddColumn(DataLog::Combine("table", 2), "arrival", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 2), "timestamp", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 2), "size", 1);
+  DataLog::AddTable(DataLog::Combine("table", 4));
+  DataLog::AddColumn(DataLog::Combine("table", 3), "timestamp", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 3), "arrival", 1);
+  DataLog::AddColumn(DataLog::Combine("table", 4), "size", 1);
+  for (WebRtc_Word32 i = 0; i < 10; ++i) {
+    DataLog::InsertCell(DataLog::Combine("table", 2), "arrival",
+                        static_cast<WebRtc_Word32>(i));
+    DataLog::InsertCell(DataLog::Combine("table", 2), "timestamp",
+                        static_cast<WebRtc_Word32>(4354 + i));
+    DataLog::InsertCell(DataLog::Combine("table", 2), "size",
+                        static_cast<WebRtc_Word32>(1200 + 10 * i));
+    DataLog::InsertCell(DataLog::Combine("table", 3), "timestamp",
+                        static_cast<WebRtc_Word32>(4354 + i));
+    DataLog::InsertCell(DataLog::Combine("table", 3), "arrival",
+                        static_cast<WebRtc_Word32>(i));
+    DataLog::InsertCell(DataLog::Combine("table", 4), "size",
+                        static_cast<WebRtc_Word32>(1200 + 10 * i));
+    DataLog::NextRow(DataLog::Combine("table", 4));
+    DataLog::NextRow(DataLog::Combine("table", 2));
+    DataLog::NextRow(DataLog::Combine("table", 3));
+  }
+  DataLog::ReturnLog();
+
+  // Data expected from parsing the file
+  const int kNumberOfRows = 10;
+  std::string string_arrival[kNumberOfRows] = {
+    "0,", "1,", "2,", "3,", "4,",
+    "5,", "6,", "7,", "8,", "9,"
+  };
+  std::string string_timestamp[kNumberOfRows] = {
+    "4354,", "4355,", "4356,", "4357,",
+    "4358,", "4359,", "4360,", "4361,",
+    "4362,", "4363,"
+  };
+  std::string string_size[kNumberOfRows] = {
+    "1200,", "1210,", "1220,", "1230,",
+    "1240,", "1250,", "1260,", "1270,",
+    "1280,", "1290,"
+  };
+
+  // Verify table 2
+  {
+    FILE* table = fopen("table_2.txt", "r");
+    ASSERT_FALSE(table == NULL);
+    ExpectedValuesMap expected;
+    expected["arrival,"] = ExpectedValues(
+                             std::vector<std::string>(string_arrival,
+                                                      string_arrival +
+                                                      kNumberOfRows),
+                             1);
+    expected["size,"] = ExpectedValues(
+                          std::vector<std::string>(string_size,
+                                                   string_size + kNumberOfRows),
+                          1);
+    expected["timestamp,"] = ExpectedValues(
+                               std::vector<std::string>(string_timestamp,
+                                                        string_timestamp +
+                                                        kNumberOfRows),
+                               1);
+    ASSERT_EQ(DataLogParser::VerifyTable(table, expected), 0);
+    fclose(table);
+  }
+
+  // Verify table 3
+  {
+    FILE* table = fopen("table_3.txt", "r");
+    ASSERT_FALSE(table == NULL);
+    ExpectedValuesMap expected;
+    expected["arrival,"] = ExpectedValues(
+                             std::vector<std::string>(string_arrival,
+                                                      string_arrival +
+                                                      kNumberOfRows),
+                             1);
+    expected["timestamp,"] = ExpectedValues(
+                             std::vector<std::string>(string_timestamp,
+                                                      string_timestamp +
+                                                      kNumberOfRows),
+                               1);
+    ASSERT_EQ(DataLogParser::VerifyTable(table, expected), 0);
+    fclose(table);
+  }
+
+  // Verify table 4
+  {
+    FILE* table = fopen("table_4.txt", "r");
+    ASSERT_FALSE(table == NULL);
+    ExpectedValuesMap expected;
+    expected["size,"] = ExpectedValues(
+                          std::vector<std::string>(string_size,
+                                                   string_size +
+                                                   kNumberOfRows),
+                          1);
+    ASSERT_EQ(DataLogParser::VerifyTable(table, expected), 0);
+    fclose(table);
+  }
+}
+
+TEST(TestDataLogCWrapper, VerifyCWrapper) {
+  // Simply call all C wrapper log functions through the C helper unittests.
+  // Main purpose is to make sure that the linkage is correct.
+
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestCreateLog());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestCombine());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestAddTable());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestAddColumn());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_int());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_int());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_float());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_float());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_double());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_double());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_int32());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_int32());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_uint32());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_uint32());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertCell_int64());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestInsertArray_int64());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestNextRow());
+  EXPECT_EQ(0, WebRtcDataLogCHelper_TestReturnLog());
+}
diff --git a/trunk/src/system_wrappers/source/data_log_unittest_disabled.cc b/trunk/src/system_wrappers/source/data_log_unittest_disabled.cc
new file mode 100644
index 0000000..9d630b6
--- /dev/null
+++ b/trunk/src/system_wrappers/source/data_log_unittest_disabled.cc
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/data_log.h"
+
+#include <cstdio>
+
+#include "gtest/gtest.h"
+
+using ::webrtc::DataLog;
+
+const char* kDataLogFileName = "table_1.txt";
+
+void PerformLogging(std::string table_name) {
+  // Simulate normal DataTable logging behavior using this table name.
+  ASSERT_EQ(0, DataLog::AddTable(table_name));
+  ASSERT_EQ(0, DataLog::AddColumn(table_name, "test", 1));
+  for (int i = 0; i < 10; ++i) {
+    // TODO(kjellander): Check InsertCell result when the DataLog dummy is
+    // fixed.
+    DataLog::InsertCell(table_name, "test", static_cast<double>(i));
+    ASSERT_EQ(0, DataLog::NextRow(table_name));
+  }
+}
+
+// Simple test to verify DataLog is still working when the GYP variable
+// enable_data_logging==0 (the default case).
+TEST(TestDataLogDisabled, VerifyLoggingWorks) {
+  ASSERT_EQ(0, DataLog::CreateLog());
+  // Generate a table_name name and assure it's an empty string
+  // (dummy behavior).
+  std::string table_name = DataLog::Combine("table", 1);
+  ASSERT_EQ("", table_name);
+  PerformLogging(table_name);
+  DataLog::ReturnLog();
+}
+
+TEST(TestDataLogDisabled, EnsureNoFileIsWritten) {
+  // Remove any previous data files on disk:
+  std::remove(kDataLogFileName);
+  ASSERT_EQ(0, DataLog::CreateLog());
+  // Don't use the table name we would get from Combine on a disabled DataLog.
+  // Use "table_1" instead (which is what an enabled DataLog would give us).
+  PerformLogging("table_1");
+  DataLog::ReturnLog();
+  // Verify no data log file have been written:
+  ASSERT_EQ(NULL, fopen(kDataLogFileName, "r"));
+}
diff --git a/trunk/src/system_wrappers/source/event.cc b/trunk/src/system_wrappers/source/event.cc
new file mode 100644
index 0000000..608cd53
--- /dev/null
+++ b/trunk/src/system_wrappers/source/event.cc
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "event_wrapper.h"
+
+#if defined(_WIN32)
+    #include <windows.h>
+    #include "event_win.h"
+#elif defined(WEBRTC_MAC_INTEL)
+    #include <ApplicationServices/ApplicationServices.h>
+    #include <pthread.h>
+    #include "event_posix.h"
+#else
+    #include <pthread.h>
+    #include "event_posix.h"
+#endif
+
+namespace webrtc {
+EventWrapper* EventWrapper::Create()
+{
+#if defined(_WIN32)
+    return new EventWindows();
+#else
+    return EventPosix::Create();
+#endif
+}
+
+int EventWrapper::KeyPressed()
+{
+#if defined(_WIN32)
+    int keyDown = 0;
+    for(int key = 0x20; key < 0x90; key++)
+    {
+        short res = GetAsyncKeyState(key);
+        keyDown |= res%2;  // Get the LSB
+    }
+    if(keyDown)
+    {
+        return 1;
+    }
+    else
+    {
+        return 0;
+    }
+#elif defined(WEBRTC_MAC_INTEL)
+    bool keyDown = false;
+    // loop through all Mac virtual key constant values
+    for(int keyIndex = 0; keyIndex <= 0x5C; keyIndex++) 
+    {
+        keyDown |= CGEventSourceKeyState(kCGEventSourceStateHIDSystemState, keyIndex);
+    }
+    if(keyDown)
+    {
+        return 1;
+    }
+    else
+    {
+        return 0;
+    } 
+#else
+    return -1;
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/event_posix.cc b/trunk/src/system_wrappers/source/event_posix.cc
new file mode 100644
index 0000000..b77b902
--- /dev/null
+++ b/trunk/src/system_wrappers/source/event_posix.cc
@@ -0,0 +1,324 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "event_posix.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <signal.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/time.h>
+#include <unistd.h>
+
+namespace webrtc {
+const long int E6 = 1000000;
+const long int E9 = 1000 * E6;
+
+EventWrapper* EventPosix::Create()
+{
+    EventPosix* ptr = new EventPosix;
+    if (!ptr)
+    {
+        return NULL;
+    }
+
+    const int error = ptr->Construct();
+    if (error)
+    {
+        delete ptr;
+        return NULL;
+    }
+    return ptr;
+}
+
+
+EventPosix::EventPosix()
+    : _timerThread(0),
+      _timerEvent(0),
+      _periodic(false),
+      _time(0),
+      _count(0),
+      _state(kDown)
+{
+}
+
+int EventPosix::Construct()
+{
+    // Set start time to zero
+    memset(&_tCreate, 0, sizeof(_tCreate));
+
+    int result = pthread_mutex_init(&mutex, 0);
+    if (result != 0)
+    {
+        return -1;
+    }
+#ifdef WEBRTC_CLOCK_TYPE_REALTIME
+    result = pthread_cond_init(&cond, 0);
+    if (result != 0)
+    {
+        return -1;
+    }
+#else
+    pthread_condattr_t condAttr;
+    result = pthread_condattr_init(&condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_condattr_setclock(&condAttr, CLOCK_MONOTONIC);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_cond_init(&cond, &condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_condattr_destroy(&condAttr);
+    if (result != 0)
+    {
+        return -1;
+    }
+#endif
+    return 0;
+}
+
+EventPosix::~EventPosix()
+{
+    StopTimer();
+    pthread_cond_destroy(&cond);
+    pthread_mutex_destroy(&mutex);
+}
+
+bool EventPosix::Reset()
+{
+    if (0 != pthread_mutex_lock(&mutex))
+    {
+        return false;
+    }
+    _state = kDown;
+    pthread_mutex_unlock(&mutex);
+    return true;
+}
+
+bool EventPosix::Set()
+{
+    if (0 != pthread_mutex_lock(&mutex))
+    {
+        return false;
+    }
+    _state = kUp;
+     // Release all waiting threads
+    pthread_cond_broadcast(&cond);
+    pthread_mutex_unlock(&mutex);
+    return true;
+}
+
+EventTypeWrapper EventPosix::Wait(unsigned long timeout)
+{
+    int retVal = 0;
+    if (0 != pthread_mutex_lock(&mutex))
+    {
+        return kEventError;
+    }
+
+    if (kDown == _state)
+    {
+        if (WEBRTC_EVENT_INFINITE != timeout)
+        {
+            timespec tEnd;
+#ifndef WEBRTC_MAC
+#ifdef WEBRTC_CLOCK_TYPE_REALTIME
+            clock_gettime(CLOCK_REALTIME, &tEnd);
+#else
+            clock_gettime(CLOCK_MONOTONIC, &tEnd);
+#endif
+#else
+            timeval tVal;
+            struct timezone tZone;
+            tZone.tz_minuteswest = 0;
+            tZone.tz_dsttime = 0;
+            gettimeofday(&tVal,&tZone);
+            TIMEVAL_TO_TIMESPEC(&tVal,&tEnd);
+#endif
+            tEnd.tv_sec  += timeout / 1000;
+            tEnd.tv_nsec += (timeout - (timeout / 1000) * 1000) * E6;
+
+            if (tEnd.tv_nsec >= E9)
+            {
+                tEnd.tv_sec++;
+                tEnd.tv_nsec -= E9;
+            }
+            retVal = pthread_cond_timedwait(&cond, &mutex, &tEnd);
+        } else {
+            retVal = pthread_cond_wait(&cond, &mutex);
+        }
+    }
+
+    _state = kDown;
+    pthread_mutex_unlock(&mutex);
+
+    switch(retVal)
+    {
+    case 0:
+        return kEventSignaled;
+    case ETIMEDOUT:
+        return kEventTimeout;
+    default:
+        return kEventError;
+    }
+}
+
+EventTypeWrapper EventPosix::Wait(timespec& tPulse)
+{
+    int retVal = 0;
+    if (0 != pthread_mutex_lock(&mutex))
+    {
+        return kEventError;
+    }
+
+    if (kUp != _state)
+    {
+        retVal = pthread_cond_timedwait(&cond, &mutex, &tPulse);
+    }
+    _state = kDown;
+
+    pthread_mutex_unlock(&mutex);
+
+    switch(retVal)
+    {
+    case 0:
+        return kEventSignaled;
+    case ETIMEDOUT:
+        return kEventTimeout;
+    default:
+        return kEventError;
+    }
+}
+
+bool EventPosix::StartTimer(bool periodic, unsigned long time)
+{
+    if (_timerThread)
+    {
+        if(_periodic)
+        {
+            // Timer already started.
+            return false;
+        } else  {
+            // New one shot timer
+            _time = time;
+            _tCreate.tv_sec = 0;
+            _timerEvent->Set();
+            return true;
+        }
+    }
+
+    // Start the timer thread
+    _timerEvent = static_cast<EventPosix*>(EventWrapper::Create());
+    const char* threadName = "WebRtc_event_timer_thread";
+    _timerThread = ThreadWrapper::CreateThread(Run, this, kRealtimePriority,
+                                               threadName);
+    _periodic = periodic;
+    _time = time;
+    unsigned int id = 0;
+    if (_timerThread->Start(id))
+    {
+        return true;
+    }
+    return false;
+}
+
+bool EventPosix::Run(ThreadObj obj)
+{
+    return static_cast<EventPosix*>(obj)->Process();
+}
+
+bool EventPosix::Process()
+{
+    if (_tCreate.tv_sec == 0)
+    {
+#ifndef WEBRTC_MAC
+#ifdef WEBRTC_CLOCK_TYPE_REALTIME
+        clock_gettime(CLOCK_REALTIME, &_tCreate);
+#else
+        clock_gettime(CLOCK_MONOTONIC, &_tCreate);
+#endif
+#else
+        timeval tVal;
+        struct timezone tZone;
+        tZone.tz_minuteswest = 0;
+        tZone.tz_dsttime = 0;
+        gettimeofday(&tVal,&tZone);
+        TIMEVAL_TO_TIMESPEC(&tVal,&_tCreate);
+#endif
+        _count=0;
+    }
+
+    timespec tEnd;
+    unsigned long long time = _time * ++_count;
+    tEnd.tv_sec  = _tCreate.tv_sec + time/1000;
+    tEnd.tv_nsec = _tCreate.tv_nsec + (time - (time/1000)*1000)*E6;
+
+    if ( tEnd.tv_nsec >= E9 )
+    {
+        tEnd.tv_sec++;
+        tEnd.tv_nsec -= E9;
+    }
+
+    switch(_timerEvent->Wait(tEnd))
+    {
+    case kEventSignaled:
+        return true;
+    case kEventError:
+        return false;
+    case kEventTimeout:
+        break;
+    }
+    if(_periodic || _count==1)
+    {
+        Set();
+    }
+    return true;
+}
+
+bool EventPosix::StopTimer()
+{
+    if(_timerThread)
+    {
+        _timerThread->SetNotAlive();
+    }
+    if (_timerEvent)
+    {
+        _timerEvent->Set();
+    }
+    if (_timerThread)
+    {
+        if(!_timerThread->Stop())
+        {
+            return false;
+        }
+
+        delete _timerThread;
+        _timerThread = 0;
+    }
+    if (_timerEvent)
+    {
+        delete _timerEvent;
+        _timerEvent = 0;
+    }
+
+    // Set time to zero to force new reference time for the timer.
+    memset(&_tCreate, 0, sizeof(_tCreate));
+    _count=0;
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/event_posix.h b/trunk/src/system_wrappers/source/event_posix.h
new file mode 100644
index 0000000..0e5893b
--- /dev/null
+++ b/trunk/src/system_wrappers/source/event_posix.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_POSIX_H_
+
+#include "event_wrapper.h"
+
+#include <pthread.h>
+#include <time.h>
+
+#include "thread_wrapper.h"
+
+namespace webrtc {
+enum State
+{
+    kUp = 1,
+    kDown = 2
+};
+
+class EventPosix : public EventWrapper
+{
+public:
+    static EventWrapper* Create();
+
+    virtual ~EventPosix();
+
+    virtual EventTypeWrapper Wait(unsigned long maxTime);
+    virtual bool Set();
+    virtual bool Reset();
+
+    virtual bool StartTimer(bool periodic, unsigned long time);
+    virtual bool StopTimer();
+
+private:
+    EventPosix();
+    int Construct();
+
+    static bool Run(ThreadObj obj);
+    bool Process();
+    EventTypeWrapper Wait(timespec& tPulse);
+
+
+private:
+    pthread_cond_t  cond;
+    pthread_mutex_t mutex;
+
+    ThreadWrapper* _timerThread;
+    EventPosix*    _timerEvent;
+    timespec       _tCreate;
+
+    bool          _periodic;
+    unsigned long _time;  // In ms
+    unsigned long _count;
+    State         _state;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/event_win.cc b/trunk/src/system_wrappers/source/event_win.cc
new file mode 100644
index 0000000..efcb5af
--- /dev/null
+++ b/trunk/src/system_wrappers/source/event_win.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "event_win.h"
+
+#include "Mmsystem.h"
+
+namespace webrtc {
+EventWindows::EventWindows()
+    : _event(::CreateEvent(NULL  /* security attributes */,
+                           FALSE /* manual reset */,
+                           FALSE /* initial state */,
+                           NULL  /* name of event */)),
+      _timerID(NULL)
+{
+}
+
+EventWindows::~EventWindows()
+{
+    CloseHandle(_event);
+}
+
+bool EventWindows::Set()
+{
+    // Note: setting an event that is already set has no effect.
+    return SetEvent(_event) == 1 ? true : false;
+}
+
+bool EventWindows::Reset()
+{
+    return ResetEvent(_event) == 1 ? true : false;
+}
+
+EventTypeWrapper EventWindows::Wait(unsigned long maxTime)
+{
+    unsigned long res = WaitForSingleObject(_event, maxTime);
+    switch(res)
+    {
+    case WAIT_OBJECT_0:
+        return kEventSignaled;
+    case WAIT_TIMEOUT:
+        return kEventTimeout;
+    default:
+        return kEventError;
+    }
+}
+
+bool EventWindows::StartTimer(bool periodic, unsigned long time)
+{
+    if (_timerID != NULL)
+    {
+        timeKillEvent(_timerID);
+        _timerID=NULL;
+    }
+    if (periodic)
+    {
+        _timerID=timeSetEvent(time, 0,(LPTIMECALLBACK)HANDLE(_event),0,
+                              TIME_PERIODIC|TIME_CALLBACK_EVENT_PULSE);
+    } else {
+        _timerID=timeSetEvent(time, 0,(LPTIMECALLBACK)HANDLE(_event),0,
+                              TIME_ONESHOT|TIME_CALLBACK_EVENT_SET);
+    }
+
+    if (_timerID == NULL)
+    {
+        return false;
+    }
+    return true;
+}
+
+bool EventWindows::StopTimer()
+{
+    timeKillEvent(_timerID);
+    _timerID = NULL;
+    return true;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/event_win.h b/trunk/src/system_wrappers/source/event_win.h
new file mode 100644
index 0000000..8ca1360
--- /dev/null
+++ b/trunk/src/system_wrappers/source/event_win.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_WINDOWS_H_
+
+#include <windows.h>
+
+#include "event_wrapper.h"
+
+#include "typedefs.h"
+
+namespace webrtc {
+class EventWindows : public EventWrapper
+{
+public:
+    EventWindows();
+    virtual ~EventWindows();
+
+    virtual EventTypeWrapper Wait(unsigned long maxTime);
+    virtual bool Set();
+    virtual bool Reset();
+
+    virtual bool StartTimer(bool periodic, unsigned long time);
+    virtual bool StopTimer();
+
+private:
+    HANDLE  _event;
+    WebRtc_UWord32 _timerID;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_EVENT_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/file_impl.cc b/trunk/src/system_wrappers/source/file_impl.cc
new file mode 100644
index 0000000..4d06c54
--- /dev/null
+++ b/trunk/src/system_wrappers/source/file_impl.cc
@@ -0,0 +1,265 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "file_impl.h"
+
+#include <assert.h>
+
+#ifdef _WIN32
+#include <Windows.h>
+#else
+#include <stdarg.h>
+#include <string.h>
+#endif
+
+namespace webrtc {
+
+FileWrapper* FileWrapper::Create()
+{
+    return new FileWrapperImpl();
+}
+
+FileWrapperImpl::FileWrapperImpl()
+    : _id(NULL),
+      _open(false),
+      _looping(false),
+      _readOnly(false),
+      _maxSizeInBytes(0),
+      _sizeInBytes(0)
+{
+    memset(_fileNameUTF8, 0, kMaxFileNameSize);
+}
+
+FileWrapperImpl::~FileWrapperImpl()
+{
+    if (_id != NULL)
+    {
+        fclose(_id);
+    }
+}
+
+int FileWrapperImpl::CloseFile()
+{
+    if (_id != NULL)
+    {
+        fclose(_id);
+        _id = NULL;
+    }
+    memset(_fileNameUTF8, 0, kMaxFileNameSize);
+    _open = false;
+    return 0;
+}
+
+int FileWrapperImpl::Rewind()
+{
+    if(_looping || !_readOnly)
+    {
+        if (_id != NULL)
+        {
+            _sizeInBytes = 0;
+            return fseek(_id, 0, SEEK_SET);
+        }
+    }
+    return -1;
+}
+
+int FileWrapperImpl::SetMaxFileSize(size_t bytes)
+{
+    _maxSizeInBytes = bytes;
+    return 0;
+}
+
+int FileWrapperImpl::Flush()
+{
+    if (_id != NULL)
+    {
+        return fflush(_id);
+    }
+    return -1;
+}
+
+int FileWrapperImpl::FileName(char* fileNameUTF8,
+                              size_t size) const
+{
+    size_t length = strlen(_fileNameUTF8);
+    if(length > kMaxFileNameSize)
+    {
+        assert(false);
+        return -1;
+    }
+    if(length < 1)
+    {
+        return -1;
+    }
+
+    // Make sure to NULL terminate
+    if(size < length)
+    {
+        length = size - 1;
+    }
+    memcpy(fileNameUTF8, _fileNameUTF8, length);
+    fileNameUTF8[length] = 0;
+    return 0;
+}
+
+bool FileWrapperImpl::Open() const
+{
+    return _open;
+}
+
+int FileWrapperImpl::OpenFile(const char *fileNameUTF8, bool readOnly,
+                              bool loop, bool text)
+{
+    size_t length = strlen(fileNameUTF8);
+    if (length > kMaxFileNameSize - 1)
+    {
+        return -1;
+    }
+
+    _readOnly = readOnly;
+
+    FILE *tmpId = NULL;
+#if defined _WIN32
+    wchar_t wideFileName[kMaxFileNameSize];
+    wideFileName[0] = 0;
+
+    MultiByteToWideChar(CP_UTF8,
+                        0 /*UTF8 flag*/,
+                        fileNameUTF8,
+                        -1 /*Null terminated string*/,
+                        wideFileName,
+                        kMaxFileNameSize);
+    if(text)
+    {
+        if(readOnly)
+        {
+            tmpId = _wfopen(wideFileName, L"rt");
+        } else {
+            tmpId = _wfopen(wideFileName, L"wt");
+        }
+    } else {
+        if(readOnly)
+        {
+            tmpId = _wfopen(wideFileName, L"rb");
+        } else {
+            tmpId = _wfopen(wideFileName, L"wb");
+        }
+    }
+#else
+    if(text)
+    {
+        if(readOnly)
+        {
+            tmpId = fopen(fileNameUTF8, "rt");
+        } else {
+            tmpId = fopen(fileNameUTF8, "wt");
+        }
+    } else {
+        if(readOnly)
+        {
+            tmpId = fopen(fileNameUTF8, "rb");
+        } else {
+            tmpId = fopen(fileNameUTF8, "wb");
+        }
+    }
+#endif
+
+    if (tmpId != NULL)
+    {
+        // +1 comes from copying the NULL termination character.
+        memcpy(_fileNameUTF8, fileNameUTF8, length + 1);
+        if (_id != NULL)
+        {
+            fclose(_id);
+        }
+        _id = tmpId;
+        _looping = loop;
+        _open = true;
+        return 0;
+    }
+    return -1;
+}
+
+int FileWrapperImpl::Read(void* buf, int length)
+{
+    if (length < 0)
+        return -1;
+
+    if (_id == NULL)
+        return -1;
+
+    int bytes_read = static_cast<int>(fread(buf, 1, length, _id));
+    if (bytes_read != length && !_looping)
+    {
+        CloseFile();
+    }
+    return bytes_read;
+}
+
+int FileWrapperImpl::WriteText(const char* format, ...)
+{
+    if (format == NULL)
+        return -1;
+
+    if (_readOnly)
+        return -1;
+
+    if (_id == NULL)
+        return -1;
+
+    va_list args;
+    va_start(args, format);
+    int num_chars = vfprintf(_id, format, args);
+    va_end(args);
+
+    if (num_chars >= 0)
+    {
+        return num_chars;
+    }
+    else
+    {
+        CloseFile();
+        return -1;
+    }
+}
+
+bool FileWrapperImpl::Write(const void* buf, int length)
+{
+    if (buf == NULL)
+        return false;
+
+    if (length < 0)
+        return false;
+
+    if (_readOnly)
+        return false;
+
+    if (_id == NULL)
+        return false;
+
+    // Check if it's time to stop writing.
+    if (_maxSizeInBytes > 0 && (_sizeInBytes + length) > _maxSizeInBytes)
+    {
+        Flush();
+        return false;
+    }
+
+    size_t num_bytes = fwrite(buf, 1, length, _id);
+    if (num_bytes > 0)
+    {
+        _sizeInBytes += num_bytes;
+        return true;
+    }
+
+    CloseFile();
+    return false;
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/file_impl.h b/trunk/src/system_wrappers/source/file_impl.h
new file mode 100644
index 0000000..31ab31e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/file_impl.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_FILE_IMPL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_FILE_IMPL_H_
+
+#include "file_wrapper.h"
+
+#include <stdio.h>
+
+namespace webrtc {
+
+class FileWrapperImpl : public FileWrapper
+{
+public:
+    FileWrapperImpl();
+    virtual ~FileWrapperImpl();
+
+    virtual int FileName(char* fileNameUTF8,
+                         size_t size) const;
+
+    virtual bool Open() const;
+
+    virtual int OpenFile(const char* fileNameUTF8,
+                         bool readOnly,
+                         bool loop = false,
+                         bool text = false);
+
+    virtual int CloseFile();
+    virtual int SetMaxFileSize(size_t bytes);
+    virtual int Flush();
+
+    virtual int Read(void* buf, int length);
+    virtual bool Write(const void *buf, int length);
+    virtual int WriteText(const char* format, ...);
+    virtual int Rewind();
+
+private:
+    FILE* _id;
+    bool _open;
+    bool _looping;
+    bool _readOnly;
+    size_t _maxSizeInBytes; // -1 indicates file size limitation is off
+    size_t _sizeInBytes;
+    char _fileNameUTF8[kMaxFileNameSize];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_FILE_IMPL_H_
diff --git a/trunk/src/system_wrappers/source/list_no_stl.cc b/trunk/src/system_wrappers/source/list_no_stl.cc
new file mode 100644
index 0000000..dbba571
--- /dev/null
+++ b/trunk/src/system_wrappers/source/list_no_stl.cc
@@ -0,0 +1,289 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "list_wrapper.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+ListItem::ListItem(const void* item)
+    : next_(0),
+      prev_(0),
+      item_ptr_(item),
+      item_(0)
+{
+}
+
+ListItem::ListItem(const unsigned int item)
+    : next_(0),
+      prev_(0),
+      item_ptr_(0),
+      item_(item)
+{
+}
+
+ListItem::~ListItem()
+{
+}
+
+void* ListItem::GetItem() const
+{
+    return const_cast<void*>(item_ptr_);
+}
+
+unsigned int ListItem::GetUnsignedItem() const
+{
+    return item_;
+}
+
+ListWrapper::ListWrapper()
+    : critical_section_(CriticalSectionWrapper::CreateCriticalSection()),
+      first_(0),
+      last_(0),
+      size_(0)
+{
+}
+
+ListWrapper::~ListWrapper()
+{
+    if (!Empty())
+    {
+        // TODO (hellner) I'm not sure this loggin is useful.
+        WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
+                   "Potential memory leak in ListWrapper");
+        // Remove all remaining list items.
+        while (Erase(First()) == 0)
+        {}
+    }
+    delete critical_section_;
+}
+
+bool ListWrapper::Empty() const
+{
+    return !first_ && !last_;
+}
+
+unsigned int ListWrapper::GetSize() const
+{
+    return size_;
+}
+
+int ListWrapper::PushBack(const void* ptr)
+{
+    ListItem* item = new ListItem(ptr);
+    CriticalSectionScoped lock(critical_section_);
+    PushBackImpl(item);
+    return 0;
+}
+
+int ListWrapper::PushBack(const unsigned int item_id)
+{
+    ListItem* item = new ListItem(item_id);
+    CriticalSectionScoped lock(critical_section_);
+    PushBackImpl(item);
+    return 0;
+}
+
+int ListWrapper::PushFront(const unsigned int item_id)
+{
+    ListItem* item = new ListItem(item_id);
+    CriticalSectionScoped lock(critical_section_);
+    PushFrontImpl(item);
+    return 0;
+}
+
+int ListWrapper::PushFront(const void* ptr)
+{
+    ListItem* item = new ListItem(ptr);
+    CriticalSectionScoped lock(critical_section_);
+    PushFrontImpl(item);
+    return 0;
+}
+
+int ListWrapper::PopFront()
+{
+    return Erase(first_);
+}
+
+int ListWrapper::PopBack()
+{
+    return Erase(last_);
+}
+
+ListItem* ListWrapper::First() const
+{
+    return first_;
+}
+
+ListItem* ListWrapper::Last() const
+{
+    return last_;
+}
+
+ListItem* ListWrapper::Next(ListItem* item) const
+{
+    if(!item)
+    {
+        return 0;
+    }
+    return item->next_;
+}
+
+ListItem* ListWrapper::Previous(ListItem* item) const
+{
+    if (!item)
+    {
+        return 0;
+    }
+    return item->prev_;
+}
+
+int ListWrapper::Insert(ListItem* existing_previous_item, ListItem* new_item)
+{
+    if (!new_item)
+    {
+        return -1;
+    }
+    // Allow existing_previous_item to be NULL if the list is empty.
+    // TODO (hellner) why allow this? Keep it as is for now to avoid
+    // breaking API contract.
+    if (!existing_previous_item && !Empty())
+    {
+        return -1;
+    }
+    CriticalSectionScoped lock(critical_section_);
+    if (!existing_previous_item)
+    {
+        PushBackImpl(new_item);
+        return 0;
+    }
+    ListItem* next_item = existing_previous_item->next_;
+    new_item->next_ = existing_previous_item->next_;
+    new_item->prev_ = existing_previous_item;
+    existing_previous_item->next_ = new_item;
+    if (next_item)
+    {
+        next_item->prev_ = new_item;
+    }
+    else
+    {
+        last_ = new_item;
+    }
+    size_++;
+    return 0;
+}
+
+int ListWrapper::InsertBefore(ListItem* existing_next_item,
+                              ListItem* new_item)
+{
+    if (!new_item)
+    {
+        return -1;
+    }
+    // Allow existing_next_item to be NULL if the list is empty.
+    // Todo: why allow this? Keep it as is for now to avoid breaking API
+    // contract.
+    if (!existing_next_item && !Empty())
+    {
+        return -1;
+    }
+    CriticalSectionScoped lock(critical_section_);
+    if (!existing_next_item)
+    {
+        PushBackImpl(new_item);
+        return 0;
+    }
+
+    ListItem* previous_item = existing_next_item->prev_;
+    new_item->next_ = existing_next_item;
+    new_item->prev_ = previous_item;
+    existing_next_item->prev_ = new_item;
+    if (previous_item)
+    {
+        previous_item->next_ = new_item;
+    }
+    else
+    {
+        first_ = new_item;
+    }
+    size_++;
+    return 0;
+}
+
+int ListWrapper::Erase(ListItem* item)
+{
+    if (!item)
+    {
+        return -1;
+    }
+    size_--;
+    ListItem* previous_item = item->prev_;
+    ListItem* next_item = item->next_;
+    if (!previous_item)
+    {
+        if(next_item)
+        {
+            next_item->prev_ = 0;
+        }
+        first_ = next_item;
+    }
+    else
+    {
+        previous_item->next_ = next_item;
+    }
+    if (!next_item)
+    {
+        if(previous_item)
+        {
+            previous_item->next_ = 0;
+        }
+        last_ = previous_item;
+    }
+    else
+    {
+        next_item->prev_ = previous_item;
+    }
+    delete item;
+    return 0;
+}
+
+void ListWrapper::PushBackImpl(ListItem* item)
+{
+    if (Empty())
+    {
+        first_ = item;
+        last_ = item;
+        size_++;
+        return;
+    }
+
+    item->prev_ = last_;
+    last_->next_ = item;
+    last_ = item;
+    size_++;
+}
+
+void ListWrapper::PushFrontImpl(ListItem* item)
+{
+    if (Empty())
+    {
+        first_ = item;
+        last_ = item;
+        size_++;
+        return;
+    }
+
+    item->next_ = first_;
+    first_->prev_ = item;
+    first_ = item;
+    size_++;
+}
+} //namespace webrtc
diff --git a/trunk/src/system_wrappers/source/list_no_stl.h b/trunk/src/system_wrappers/source/list_no_stl.h
new file mode 100644
index 0000000..26d844c
--- /dev/null
+++ b/trunk/src/system_wrappers/source/list_no_stl.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
+
+#include "constructor_magic.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class ListNoStlItem
+{
+public:
+    ListNoStlItem(const void* ptr);
+    ListNoStlItem(const unsigned int item);
+    virtual ~ListNoStlItem();
+    void* GetItem() const;
+    unsigned int GetUnsignedItem() const;
+
+protected:
+    ListNoStlItem* next_;
+    ListNoStlItem* prev_;
+
+private:
+    friend class ListNoStl;
+
+    const void*         item_ptr_;
+    const unsigned int  item_;
+    DISALLOW_COPY_AND_ASSIGN(ListNoStlItem);
+};
+
+
+class ListNoStl
+{
+public:
+    ListNoStl();
+    virtual ~ListNoStl();
+
+    // ListWrapper functions
+    unsigned int GetSize() const;
+    int PushBack(const void* ptr);
+    int PushBack(const unsigned int item_id);
+    int PushFront(const void* ptr);
+    int PushFront(const unsigned int item_id);
+    int PopFront();
+    int PopBack();
+    bool Empty() const;
+    ListNoStlItem* First() const;
+    ListNoStlItem* Last() const;
+    ListNoStlItem* Next(ListNoStlItem* item) const;
+    ListNoStlItem* Previous(ListNoStlItem* item) const;
+    int Erase(ListNoStlItem* item);
+    int Insert(ListNoStlItem* existing_previous_item,
+               ListNoStlItem* new_item);
+
+    int InsertBefore(ListNoStlItem* existing_next_item,
+                     ListNoStlItem* new_item);
+
+private:
+    void PushBack(ListNoStlItem* item);
+    void PushFront(ListNoStlItem* item);
+
+    CriticalSectionWrapper* critical_section_;
+    ListNoStlItem* first_;
+    ListNoStlItem* last_;
+    unsigned int size_;
+    DISALLOW_COPY_AND_ASSIGN(ListNoStl);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
diff --git a/trunk/src/system_wrappers/source/list_stl.cc b/trunk/src/system_wrappers/source/list_stl.cc
new file mode 100644
index 0000000..dcc63c3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/list_stl.cc
@@ -0,0 +1,244 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "list_wrapper.h"
+
+#include "trace.h"
+
+namespace webrtc {
+ListItem::ListItem(const void* item)
+    : this_iter_(),
+      item_ptr_(item),
+      item_(0)
+{
+}
+
+ListItem::ListItem(const unsigned int item)
+    : this_iter_(),
+      item_ptr_(0),
+      item_(item)
+{
+}
+
+ListItem::~ListItem()
+{
+}
+
+void* ListItem::GetItem() const
+{
+    return const_cast<void*>(item_ptr_);
+}
+
+unsigned int ListItem::GetUnsignedItem() const
+{
+    return item_;
+}
+
+ListWrapper::ListWrapper() : list_()
+{
+}
+
+ListWrapper::~ListWrapper()
+{
+    if (!Empty())
+    {
+        // TODO (hellner) I'm not sure this loggin is useful.
+        WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
+                   "Potential memory leak in ListWrapper");
+        // Remove all remaining list items.
+        while (Erase(First()) == 0)
+        {}
+    }
+}
+
+bool ListWrapper::Empty() const
+{
+    return list_.empty();
+}
+
+unsigned int ListWrapper::GetSize() const
+{
+    return list_.size();
+}
+
+int ListWrapper::PushBack(const void* ptr)
+{
+    ListItem* item = new ListItem(ptr);
+    list_.push_back(item);
+    return 0;
+}
+
+int ListWrapper::PushBack(const unsigned int item_id)
+{
+    ListItem* item = new ListItem(item_id);
+    list_.push_back(item);
+    return 0;
+}
+
+int ListWrapper::PushFront(const unsigned int item_id)
+{
+    ListItem* item = new ListItem(item_id);
+    list_.push_front(item);
+    return 0;
+}
+
+int ListWrapper::PushFront(const void* ptr)
+{
+    ListItem* item = new ListItem(ptr);
+    list_.push_front(item);
+    return 0;
+}
+
+int ListWrapper::PopFront()
+{
+    if(list_.empty())
+    {
+        return -1;
+    }
+    list_.pop_front();
+    return 0;
+}
+
+int ListWrapper::PopBack()
+{
+    if(list_.empty())
+    {
+        return -1;
+    }
+    list_.pop_back();
+    return 0;
+}
+
+ListItem* ListWrapper::First() const
+{
+    if(list_.empty())
+    {
+        return NULL;
+    }
+    std::list<ListItem*>::iterator item_iter = list_.begin();
+    ListItem* return_item = (*item_iter);
+    return_item->this_iter_ = item_iter;
+    return return_item;
+}
+
+ListItem* ListWrapper::Last() const
+{
+    if(list_.empty())
+    {
+        return NULL;
+    }
+    // std::list::end() addresses the last item + 1. Decrement so that the
+    // actual last is accessed.
+    std::list<ListItem*>::iterator item_iter = list_.end();
+    --item_iter;
+    ListItem* return_item = (*item_iter);
+    return_item->this_iter_ = item_iter;
+    return return_item;
+}
+
+ListItem* ListWrapper::Next(ListItem* item) const
+{
+    if(item == NULL)
+    {
+        return NULL;
+    }
+    std::list<ListItem*>::iterator item_iter = item->this_iter_;
+    ++item_iter;
+    if (item_iter == list_.end())
+    {
+        return NULL;
+    }
+    ListItem* return_item = (*item_iter);
+    return_item->this_iter_ = item_iter;
+    return return_item;
+}
+
+ListItem* ListWrapper::Previous(ListItem* item) const
+{
+    if(item == NULL)
+    {
+        return NULL;
+    }
+    std::list<ListItem*>::iterator item_iter = item->this_iter_;
+    if (item_iter == list_.begin())
+    {
+      return NULL;
+    }
+    --item_iter;
+    ListItem* return_item = (*item_iter);
+    return_item->this_iter_ = item_iter;
+    return return_item;
+}
+
+int ListWrapper::Insert(ListItem* existing_previous_item,
+                        ListItem* new_item)
+{
+    // Allow existingPreviousItem to be NULL if the list is empty.
+    // TODO (hellner) why allow this? Keep it as is for now to avoid
+    // breaking API contract.
+    if (!existing_previous_item && !Empty())
+    {
+        return -1;
+    }
+
+    if (!new_item)
+    {
+        return -1;
+    }
+
+    std::list<ListItem*>::iterator insert_location = list_.begin();
+    if (!Empty())
+    {
+        insert_location = existing_previous_item->this_iter_;
+        if(insert_location != list_.end())
+        {
+            ++insert_location;
+        }
+    }
+
+    list_.insert(insert_location,new_item);
+    return 0;
+}
+
+int ListWrapper::InsertBefore(ListItem* existing_next_item,
+                           ListItem* new_item)
+{
+    // Allow existing_next_item to be NULL if the list is empty.
+    // Todo: why allow this? Keep it as is for now to avoid breaking API
+    // contract.
+    if (!existing_next_item && !Empty())
+    {
+        return -1;
+    }
+    if (!new_item)
+    {
+        return -1;
+    }
+
+    std::list<ListItem*>::iterator insert_location = list_.begin();
+    if (!Empty())
+    {
+        insert_location = existing_next_item->this_iter_;
+    }
+
+    list_.insert(insert_location,new_item);
+    return 0;
+}
+
+int ListWrapper::Erase(ListItem* item)
+{
+    if(item == NULL)
+    {
+        return -1;
+    }
+    list_.erase(item->this_iter_);
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/list_stl.h b/trunk/src/system_wrappers/source/list_stl.h
new file mode 100644
index 0000000..b83a664
--- /dev/null
+++ b/trunk/src/system_wrappers/source/list_stl.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
+
+#include <list>
+
+#include "constructor_magic.h"
+
+namespace webrtc {
+class ListItem
+{
+friend class ListWrapper;
+
+public:
+    ListItem(const void* ptr);
+    ListItem(const unsigned int item);
+    virtual ~ListItem();
+    void* GetItem() const;
+    unsigned int GetUnsignedItem() const;
+
+private:
+    mutable std::list<ListItem*>::iterator this_iter_;
+    const void*         item_ptr_;
+    const unsigned int  item_;
+    DISALLOW_COPY_AND_ASSIGN(ListItem);
+};
+
+class ListWrapper
+{
+public:
+    ListWrapper();
+    ~ListWrapper();
+
+    // ListWrapper functions
+    unsigned int GetSize() const;
+    int PushBack(const void* ptr);
+    int PushBack(const unsigned int item_id);
+    int PushFront(const void* ptr);
+    int PushFront(const unsigned int item_id);
+    int PopFront();
+    int PopBack();
+    bool Empty() const;
+    ListItem* First() const;
+    ListItem* Last() const;
+    ListItem* Next(ListItem* item) const;
+    ListItem* Previous(ListItem* item) const;
+    int Erase(ListItem* item);
+    int Insert(ListItem* existing_previous_item, ListItem* new_item);
+    int InsertBefore(ListItem* existing_next_item, ListItem* new_item);
+
+private:
+    mutable std::list<ListItem*> list_;
+    DISALLOW_COPY_AND_ASSIGN(ListWrapper);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
diff --git a/trunk/src/system_wrappers/source/list_unittest.cc b/trunk/src/system_wrappers/source/list_unittest.cc
new file mode 100644
index 0000000..4d32f59
--- /dev/null
+++ b/trunk/src/system_wrappers/source/list_unittest.cc
@@ -0,0 +1,479 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include "system_wrappers/interface/list_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+using ::webrtc::ListWrapper;
+using ::webrtc::ListItem;
+using ::webrtc::scoped_ptr;
+
+// Note: kNumberOfElements needs to be even.
+const unsigned int kNumberOfElements = 10;
+
+// An opaque implementation of dynamic or statically allocated unsigned ints.
+// This class makes it possible to use the exact same code for testing of both
+// the dynamic and static implementation of ListWrapper.
+// Clarification: ListWrapper has two versions of PushBack(..). It takes an
+// unsigned integer or a void pointer. The integer implementation takes care
+// of memory management. The void pointer version expect the caller to manage
+// the memory associated with the void pointer.
+// This class works like the integer version but can be implemented on top of
+// either the integer version or void pointer version of ListWrapper.
+// Note: the non-virtual fuctions behave the same for both versions.
+class ListWrapperSimple {
+public:
+    static ListWrapperSimple* Create(bool static_allocation);
+    virtual ~ListWrapperSimple() {}
+
+    // These three functions should be used for manipulating ListItems so that
+    // they are the type corresponding to the underlying implementation.
+    virtual unsigned int GetUnsignedItem(
+        const ListItem* item) const = 0;
+    virtual ListItem* CreateListItem(unsigned int item_id) = 0;
+    unsigned int GetSize() const {
+        return list_.GetSize();
+    }
+    virtual int PushBack(const unsigned int item_id) = 0;
+    virtual int PushFront(const unsigned int item_id) = 0;
+    virtual int PopFront() = 0;
+    virtual int PopBack() = 0;
+    bool Empty() const {
+        return list_.Empty();
+    }
+    ListItem* First() const {
+        return list_.First();
+    }
+    ListItem* Last() const {
+        return list_.Last();
+    }
+    ListItem* Next(ListItem* item) const {
+        return list_.Next(item);
+    }
+    ListItem* Previous(ListItem* item) const {
+        return list_.Previous(item);
+    }
+    virtual int Erase(ListItem* item) = 0;
+    int Insert(ListItem* existing_previous_item,
+               ListItem* new_item) {
+        const int retval = list_.Insert(existing_previous_item, new_item);
+        if (retval != 0) {
+            EXPECT_TRUE(DestroyListItem(new_item));
+        }
+        return retval;
+    }
+
+    int InsertBefore(ListItem* existing_next_item,
+                     ListItem* new_item) {
+        const int retval = list_.InsertBefore(existing_next_item, new_item);
+        if (retval != 0) {
+            EXPECT_TRUE(DestroyListItem(new_item));
+        }
+        return retval;
+    }
+protected:
+    ListWrapperSimple() {}
+
+    virtual bool DestroyListItemContent(ListItem* item) = 0;
+    bool DestroyListItem(ListItem* item) {
+        const bool retval = DestroyListItemContent(item);
+        delete item;
+        return retval;
+    }
+
+    ListWrapper list_;
+};
+
+void ClearList(ListWrapperSimple* list_wrapper) {
+  if (list_wrapper == NULL) {
+      return;
+  }
+  ListItem* list_item = list_wrapper->First();
+  while (list_item != NULL) {
+    EXPECT_EQ(list_wrapper->Erase(list_item), 0);
+    list_item = list_wrapper->First();
+  }
+}
+
+class ListWrapperStatic : public ListWrapperSimple {
+public:
+    ListWrapperStatic() {}
+    virtual ~ListWrapperStatic() {
+        ClearList(this);
+    }
+
+    virtual unsigned int GetUnsignedItem(const ListItem* item) const {
+        return item->GetUnsignedItem();
+    }
+    virtual ListItem* CreateListItem(unsigned int item_id) {
+        return new ListItem(item_id);
+    }
+    virtual bool DestroyListItemContent(ListItem* item) {
+        return true;
+    }
+    virtual int PushBack(const unsigned int item_id) {
+        return list_.PushBack(item_id);
+    }
+    virtual int PushFront(const unsigned int item_id) {
+        return list_.PushFront(item_id);
+    }
+    virtual int PopFront() {
+        return list_.PopFront();
+    }
+    virtual int PopBack() {
+        return list_.PopBack();
+    }
+    virtual int Erase(ListItem* item) {
+        return list_.Erase(item);
+    }
+};
+
+class ListWrapperDynamic : public ListWrapperSimple {
+public:
+    ListWrapperDynamic() {}
+    virtual ~ListWrapperDynamic() {
+        ClearList(this);
+    }
+
+    virtual unsigned int GetUnsignedItem(const ListItem* item) const {
+        const unsigned int* return_value_pointer =
+            reinterpret_cast<unsigned int*> (item->GetItem());
+        if (return_value_pointer == NULL) {
+            return -1;
+        }
+        return *return_value_pointer;
+    }
+    virtual ListItem* CreateListItem(unsigned int item_id) {
+        unsigned int* item_id_pointer = new unsigned int;
+        if (item_id_pointer == NULL) {
+            return NULL;
+        }
+        *item_id_pointer = item_id;
+        ListItem* return_value = new ListItem(
+            reinterpret_cast<void*>(item_id_pointer));
+        if (return_value == NULL) {
+            delete item_id_pointer;
+            return NULL;
+        }
+        return return_value;
+    }
+    virtual bool DestroyListItemContent(ListItem* item) {
+        if (item == NULL) {
+            return false;
+        }
+        bool return_value = false;
+        unsigned int* item_id_ptr = reinterpret_cast<unsigned int*>(
+            item->GetItem());
+        if (item_id_ptr != NULL) {
+            return_value = true;
+            delete item_id_ptr;
+        }
+        return return_value;
+    }
+    virtual int PushBack(const unsigned int item_id) {
+        unsigned int* item_id_ptr = new unsigned int;
+        if (item_id_ptr == NULL) {
+            return -1;
+        }
+        *item_id_ptr = item_id;
+        const int return_value = list_.PushBack(
+            reinterpret_cast<void*>(item_id_ptr));
+        if (return_value != 0) {
+            delete item_id_ptr;
+        }
+        return return_value;
+    }
+    virtual int PushFront(const unsigned int item_id) {
+        unsigned int* item_id_ptr = new unsigned int;
+        if (item_id_ptr == NULL) {
+            return -1;
+        }
+        *item_id_ptr = item_id;
+        const int return_value = list_.PushFront(
+            reinterpret_cast<void*>(item_id_ptr));
+        if (return_value != 0) {
+            delete item_id_ptr;
+        }
+        return return_value;
+    }
+    virtual int PopFront() {
+        return Erase(list_.First());
+    }
+    virtual int PopBack() {
+        return Erase(list_.Last());
+    }
+    virtual int Erase(ListItem* item) {
+        if (item == NULL) {
+            return -1;
+        }
+        int retval = 0;
+        if (!DestroyListItemContent(item)) {
+            retval = -1;
+            ADD_FAILURE();
+        }
+        if (list_.Erase(item) != 0) {
+            retval = -1;
+        }
+        return retval;
+    }
+};
+
+ListWrapperSimple* ListWrapperSimple::Create(bool static_allocation) {
+    if(static_allocation)
+    {
+        return new ListWrapperStatic();
+    }
+    return new ListWrapperDynamic();
+}
+
+ListWrapperSimple* CreateAscendingList(bool static_allocation) {
+    ListWrapperSimple* return_value = ListWrapperSimple::Create(
+        static_allocation);
+    if (return_value == NULL) {
+        return NULL;
+    }
+    for (unsigned int i = 0; i < kNumberOfElements; ++i) {
+        if (return_value->PushBack(i) == -1) {
+            ClearList(return_value);
+            delete return_value;
+            return NULL;
+        }
+    }
+    return return_value;
+}
+
+ListWrapperSimple* CreateDescendingList(bool static_allocation) {
+    ListWrapperSimple* return_value = ListWrapperSimple::Create(
+        static_allocation);
+    if (return_value == NULL) {
+        return NULL;
+    }
+    for (unsigned int i = 0; i < kNumberOfElements; ++i) {
+        if (return_value->PushBack(kNumberOfElements - i - 1) == -1) {
+            ClearList(return_value);
+            delete return_value;
+            return NULL;
+        }
+    }
+    return return_value;
+}
+
+// [0,kNumberOfElements - 1,1,kNumberOfElements - 2,...] (this is why
+// kNumberOfElements need to be even)
+ListWrapperSimple* CreateInterleavedList(bool static_allocation) {
+    ListWrapperSimple* return_value = ListWrapperSimple::Create(
+        static_allocation);
+    if (return_value == NULL) {
+        return NULL;
+    }
+    unsigned int uneven_count = 0;
+    unsigned int even_count = 0;
+    for (unsigned int i = 0; i < kNumberOfElements; i++) {
+        unsigned int push_value = 0;
+        if ((i % 2) == 0) {
+            push_value = even_count;
+            even_count++;
+        } else {
+            push_value = kNumberOfElements - uneven_count - 1;
+            uneven_count++;
+        }
+        if (return_value->PushBack(push_value) == -1) {
+            ClearList(return_value);
+            delete return_value;
+            return NULL;
+        }
+    }
+    return return_value;
+}
+
+void PrintList(const ListWrapperSimple* list) {
+    ListItem* list_item = list->First();
+    printf("[");
+    while (list_item != NULL)
+    {
+        printf("%3u", list->GetUnsignedItem(list_item));
+        list_item = list->Next(list_item);
+    }
+    printf("]\n");
+}
+
+bool CompareLists(const ListWrapperSimple* lhs, const ListWrapperSimple* rhs) {
+    const unsigned int list_size = lhs->GetSize();
+    if (lhs->GetSize() != rhs->GetSize()) {
+        return false;
+    }
+    if (lhs->Empty()) {
+        return rhs->Empty();
+    }
+    unsigned int i = 0;
+    ListItem* lhs_item = lhs->First();
+    ListItem* rhs_item = rhs->First();
+    while (i < list_size) {
+        if (lhs_item == NULL) {
+            return false;
+        }
+        if (rhs_item == NULL) {
+            return false;
+        }
+        if (lhs->GetUnsignedItem(lhs_item) != rhs->GetUnsignedItem(rhs_item)) {
+            return false;
+        }
+        i++;
+        lhs_item = lhs->Next(lhs_item);
+        rhs_item = rhs->Next(rhs_item);
+    }
+    return true;
+}
+
+TEST(ListWrapperTest,ReverseNewIntList) {
+    // Create a new temporary list with elements reversed those of
+    // new_int_list_
+    const scoped_ptr<ListWrapperSimple> descending_list(
+        CreateDescendingList(rand()%2));
+    ASSERT_FALSE(descending_list.get() == NULL);
+    ASSERT_FALSE(descending_list->Empty());
+    ASSERT_EQ(kNumberOfElements,descending_list->GetSize());
+
+    const scoped_ptr<ListWrapperSimple> ascending_list(
+        CreateAscendingList(rand()%2));
+    ASSERT_FALSE(ascending_list.get() == NULL);
+    ASSERT_FALSE(ascending_list->Empty());
+    ASSERT_EQ(kNumberOfElements,ascending_list->GetSize());
+
+    scoped_ptr<ListWrapperSimple> list_to_reverse(
+        ListWrapperSimple::Create(rand()%2));
+
+    // Reverse the list using PushBack and Previous.
+    for (ListItem* item = ascending_list->Last(); item != NULL;
+         item = ascending_list->Previous(item)) {
+         list_to_reverse->PushBack(ascending_list->GetUnsignedItem(item));
+    }
+
+    ASSERT_TRUE(CompareLists(descending_list.get(), list_to_reverse.get()));
+
+    scoped_ptr<ListWrapperSimple> list_to_un_reverse(
+        ListWrapperSimple::Create(rand()%2));
+    ASSERT_FALSE(list_to_un_reverse.get() == NULL);
+    // Reverse the reversed list using PushFront and Next.
+    for (ListItem* item = list_to_reverse->First(); item != NULL;
+         item = list_to_reverse->Next(item)) {
+         list_to_un_reverse->PushFront(list_to_reverse->GetUnsignedItem(item));
+    }
+    ASSERT_TRUE(CompareLists(ascending_list.get(), list_to_un_reverse.get()));
+}
+
+TEST(ListWrapperTest,PopTest) {
+    scoped_ptr<ListWrapperSimple> ascending_list(CreateAscendingList(rand()%2));
+    ASSERT_FALSE(ascending_list.get() == NULL);
+    ASSERT_FALSE(ascending_list->Empty());
+    EXPECT_EQ(0, ascending_list->PopFront());
+    EXPECT_EQ(1U, ascending_list->GetUnsignedItem(ascending_list->First()));
+
+    EXPECT_EQ(0, ascending_list->PopBack());
+    EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetUnsignedItem(
+              ascending_list->Last()));
+    EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetSize());
+}
+
+// Use Insert to interleave two lists.
+TEST(ListWrapperTest,InterLeaveTest) {
+    scoped_ptr<ListWrapperSimple> interleave_list(
+        CreateAscendingList(rand()%2));
+    ASSERT_FALSE(interleave_list.get() == NULL);
+    ASSERT_FALSE(interleave_list->Empty());
+
+    scoped_ptr<ListWrapperSimple> descending_list(
+        CreateDescendingList(rand()%2));
+    ASSERT_FALSE(descending_list.get() == NULL);
+
+    for (unsigned int i = 0; i < kNumberOfElements/2; ++i) {
+        ASSERT_EQ(0, interleave_list->PopBack());
+        ASSERT_EQ(0, descending_list->PopBack());
+    }
+    ASSERT_EQ(kNumberOfElements/2, interleave_list->GetSize());
+    ASSERT_EQ(kNumberOfElements/2, descending_list->GetSize());
+
+    unsigned int insert_position = kNumberOfElements/2;
+    ASSERT_EQ(insert_position * 2, kNumberOfElements);
+    while (!descending_list->Empty())
+    {
+        ListItem* item = descending_list->Last();
+        ASSERT_FALSE(item == NULL);
+
+        const unsigned int item_id = descending_list->GetUnsignedItem(item);
+        ASSERT_EQ(0, descending_list->Erase(item));
+
+        ListItem* insert_item = interleave_list->CreateListItem(item_id);
+        ASSERT_FALSE(insert_item == NULL);
+        item = interleave_list->First();
+        ASSERT_FALSE(item == NULL);
+        for (unsigned int j = 0; j < insert_position - 1; ++j) {
+            item = interleave_list->Next(item);
+            ASSERT_FALSE(item == NULL);
+        }
+        EXPECT_EQ(0, interleave_list->Insert(item, insert_item));
+        --insert_position;
+    }
+
+    scoped_ptr<ListWrapperSimple> interleaved_list(
+        CreateInterleavedList(rand()%2));
+    ASSERT_FALSE(interleaved_list.get() == NULL);
+    ASSERT_FALSE(interleaved_list->Empty());
+    ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get()));
+}
+
+// Use InsertBefore to interleave two lists.
+TEST(ListWrapperTest,InterLeaveTestII) {
+    scoped_ptr<ListWrapperSimple> interleave_list(
+        CreateDescendingList(rand()%2));
+    ASSERT_FALSE(interleave_list.get() == NULL);
+    ASSERT_FALSE(interleave_list->Empty());
+
+    scoped_ptr<ListWrapperSimple> ascending_list(CreateAscendingList(rand()%2));
+    ASSERT_FALSE(ascending_list.get() == NULL);
+
+    for (unsigned int i = 0; i < kNumberOfElements/2; ++i) {
+        ASSERT_EQ(0, interleave_list->PopBack());
+        ASSERT_EQ(0, ascending_list->PopBack());
+    }
+    ASSERT_EQ(kNumberOfElements/2, interleave_list->GetSize());
+    ASSERT_EQ(kNumberOfElements/2, ascending_list->GetSize());
+
+    unsigned int insert_position = kNumberOfElements/2;
+    ASSERT_EQ(insert_position * 2, kNumberOfElements);
+    while (!ascending_list->Empty())
+    {
+        ListItem* item = ascending_list->Last();
+        ASSERT_FALSE(item == NULL);
+
+        const unsigned int item_id = ascending_list->GetUnsignedItem(item);
+        ASSERT_EQ(0,ascending_list->Erase(item));
+
+        ListItem* insert_item = interleave_list->CreateListItem(item_id);
+        ASSERT_FALSE(insert_item == NULL);
+        item = interleave_list->First();
+        ASSERT_FALSE(item == NULL);
+        for (unsigned int j = 0; j < insert_position - 1; ++j) {
+            item = interleave_list->Next(item);
+            ASSERT_FALSE(item == NULL);
+        }
+        EXPECT_EQ(interleave_list->InsertBefore(item, insert_item), 0);
+        --insert_position;
+    }
+
+    scoped_ptr<ListWrapperSimple> interleaved_list(
+        CreateInterleavedList(rand()%2));
+    ASSERT_FALSE(interleaved_list.get() == NULL);
+    ASSERT_FALSE(interleaved_list->Empty());
+
+    ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get()));
+}
diff --git a/trunk/src/system_wrappers/source/map.cc b/trunk/src/system_wrappers/source/map.cc
new file mode 100644
index 0000000..331da32
--- /dev/null
+++ b/trunk/src/system_wrappers/source/map.cc
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "map_wrapper.h"
+
+#include "trace.h"
+
+namespace webrtc {
+MapItem::MapItem(int id, void* item) : item_id_(id), item_pointer_(item)
+{
+}
+
+MapItem::~MapItem()
+{
+}
+
+void* MapItem::GetItem()
+{
+    return item_pointer_;
+}
+
+int MapItem::GetId()
+{
+    return item_id_;
+}
+
+unsigned int MapItem::GetUnsignedId()
+{
+    return static_cast<unsigned int>(item_id_);
+}
+
+void MapItem::SetItem(void* ptr)
+{
+    item_pointer_ = ptr;
+}
+
+MapWrapper::MapWrapper() : map_()
+{
+}
+
+MapWrapper::~MapWrapper()
+{
+    if (!map_.empty())
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
+                   "Potential memory leak in MapWrapper");
+        // Remove all map items. Please note that std::map::clear() can't be
+        // used because each item has some dynamically allocated memory
+        // associated with it (i.e. using std::map::clear would introduce a
+        // memory leak).
+        while (Erase(First()) == 0)
+        {}
+    }
+}
+
+int MapWrapper::Size() const
+{
+    return (int)map_.size();
+}
+
+int MapWrapper::Insert(int id, void* ptr)
+{
+    map_[id] = new MapItem(id,ptr);
+    return 0;
+}
+
+MapItem* MapWrapper::First() const
+{
+    std::map<int, MapItem*>::const_iterator it = map_.begin();
+    if (it != map_.end())
+    {
+        return it->second;
+    }
+    return 0;
+}
+
+MapItem* MapWrapper::Last() const
+{
+    std::map<int, MapItem*>::const_reverse_iterator it = map_.rbegin();
+    if (it != map_.rend())
+    {
+        return it->second;
+    }
+    return 0;
+}
+
+MapItem* MapWrapper::Next(MapItem* item) const
+{
+    if (item == 0)
+    {
+        return 0;
+    }
+    std::map<int, MapItem*>::const_iterator it = map_.find(item->item_id_);
+    if (it != map_.end())
+    {
+        it++;
+        if (it != map_.end())
+        {
+            return it->second;
+        }
+    }
+    return 0;
+}
+
+MapItem* MapWrapper::Previous(MapItem* item) const
+{
+    if (item == 0)
+    {
+        return 0;
+    }
+
+    std::map<int, MapItem*>::const_iterator it = map_.find(item->item_id_);
+    if ((it != map_.end()) &&
+       (it != map_.begin()))
+    {
+        --it;
+        return it->second;
+    }
+    return 0;
+}
+
+MapItem* MapWrapper::Find(int id) const
+{
+    std::map<int, MapItem*>::const_iterator it = map_.find(id);
+    if (it != map_.end())
+    {
+        return it->second;
+    }
+    return 0;
+}
+
+int MapWrapper::Erase(MapItem* item)
+{
+    if (item == 0)
+    {
+        return -1;
+    }
+    std::map<int, MapItem*>::iterator it = map_.find(item->item_id_);
+    if (it != map_.end())
+    {
+        delete it->second;
+        map_.erase(it);
+        return 0;
+    }
+    return -1;
+}
+
+int MapWrapper::Erase(const int id)
+{
+    std::map<int, MapItem*>::iterator it = map_.find(id);
+    if (it != map_.end())
+    {
+        delete it->second;
+        map_.erase(it);
+        return 0;
+    }
+    return -1;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/map_no_stl.cc b/trunk/src/system_wrappers/source/map_no_stl.cc
new file mode 100644
index 0000000..ef93a1f
--- /dev/null
+++ b/trunk/src/system_wrappers/source/map_no_stl.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "map_no_stl.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+MapNoStlItem::MapNoStlItem(int id, void* item)
+    : next_(0),
+      prev_(0),
+      item_id_(id),
+      item_ptr_(item)
+{
+}
+
+MapNoStlItem::~MapNoStlItem()
+{
+}
+
+void* MapNoStlItem::GetItem()
+{
+    return item_ptr_;
+}
+
+int MapNoStlItem::GetId()
+{
+    return item_id_;
+}
+
+unsigned int MapNoStlItem::GetUnsignedId()
+{
+    return static_cast<unsigned int>(item_id_);
+}
+
+void MapNoStlItem::SetItem(void* ptr)
+{
+    item_ptr_ = ptr;
+}
+
+MapNoStl::MapNoStl()
+    : critical_section_(CriticalSectionWrapper::CreateCriticalSection()),
+      first_(0),
+      last_(0),
+      size_(0)
+{
+}
+
+MapNoStl::~MapNoStl()
+{
+    if (First())
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
+                   "Potential memory leak in MapNoStl");
+        while (Erase(First()) == 0)
+        {}
+    }
+    delete critical_section_;
+}
+
+int MapNoStl::Size() const
+{
+    return size_;
+}
+
+int MapNoStl::Insert(int id, void* ptr)
+{
+    MapNoStlItem* new_item = new MapNoStlItem(id, ptr);
+
+    CriticalSectionScoped lock(critical_section_);
+    MapNoStlItem* item = first_;
+    size_++;
+    if (!item)
+    {
+        first_ = new_item;
+        last_ = new_item;
+        return 0;
+    }
+    while(item->next_)
+    {
+        // Three scenarios
+        // 1. Item should be inserted first.
+        // 2. Item should be inserted between two items
+        // 3. Item should be inserted last
+        if (item->GetId() > id)
+        {
+            new_item->next_ = item;
+            item->prev_ = new_item;
+            if (item == first_)
+            {
+                first_ = new_item;
+            }
+            else
+            {
+                new_item->prev_ = item->prev_;
+                new_item->prev_->next_ = new_item;
+            }
+            return 0;
+        }
+        item = item->next_;
+    }
+    // 3
+    item->next_ = new_item;
+    new_item->prev_ = item;
+    last_ = new_item;
+    return 0;
+}
+
+MapNoStlItem* MapNoStl::First() const
+{
+    return first_;
+}
+
+MapNoStlItem* MapNoStl::Last() const
+{
+    return last_;
+}
+
+MapNoStlItem* MapNoStl::Next(MapNoStlItem* item) const
+{
+    if (!item)
+    {
+        return 0;
+    }
+    return item->next_;
+}
+
+MapNoStlItem* MapNoStl::Previous(MapNoStlItem* item) const
+{
+    if (!item)
+    {
+        return 0;
+    }
+    return item->prev_;
+}
+
+MapNoStlItem* MapNoStl::Find(int id) const
+{
+    CriticalSectionScoped lock(critical_section_);
+    MapNoStlItem* item = Locate(id);
+    return item;
+}
+
+int MapNoStl::Erase(MapNoStlItem* item)
+{
+    if(!item)
+    {
+        return -1;
+    }
+    CriticalSectionScoped lock(critical_section_);
+    return Remove(item);
+}
+
+int MapNoStl::Erase(const int id)
+{
+    CriticalSectionScoped lock(critical_section_);
+    MapNoStlItem* item = Locate(id);
+    if(!item)
+    {
+        return -1;
+    }
+    return Remove(item);
+}
+
+MapNoStlItem* MapNoStl::Locate(int id) const
+{
+    MapNoStlItem* item = first_;
+    while(item)
+    {
+        if (item->GetId() == id)
+        {
+            return item;
+        }
+        item = item->next_;
+    }
+    return 0;
+}
+
+int MapNoStl::Remove(MapNoStlItem* item)
+{
+    if (!item)
+    {
+        return -1;
+    }
+    size_--;
+    MapNoStlItem* previous_item = item->prev_;
+    MapNoStlItem* next_item = item->next_;
+    if (!previous_item)
+    {
+        next_item->prev_ = 0;
+        first_ = next_item;
+    }
+    else
+    {
+        previous_item->next_ = next_item;
+    }
+    if (!next_item)
+    {
+        previous_item->next_ = 0;
+        last_ = previous_item;
+    }
+    else
+    {
+        next_item->prev_ = previous_item;
+    }
+    delete item;
+    return 0;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/map_no_stl.h b/trunk/src/system_wrappers/source/map_no_stl.h
new file mode 100644
index 0000000..51bc011
--- /dev/null
+++ b/trunk/src/system_wrappers/source/map_no_stl.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_MAP_NO_STL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_MAP_NO_STL_H_
+
+#include "constructor_magic.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class MapNoStlItem
+{
+friend class Map;
+
+public:
+    MapNoStlItem(int id, void* ptr);
+    virtual ~MapNoStlItem();
+    void* GetItem();
+    int GetId();
+    unsigned int GetUnsignedId();
+    void SetItem(void* ptr);
+
+protected:
+    MapNoStlItem* next_;
+    MapNoStlItem* prev_;
+
+private:
+    int item_id_;
+    void* item_ptr_;
+    DISALLOW_COPY_AND_ASSIGN(MapNoStlItem);
+};
+
+class MapNoStl
+{
+public:
+    MapNoStl();
+    virtual ~MapNoStl();
+
+    // MapWrapper functions.
+    int Insert(int id, void* ptr);
+    int Erase(MapNoStlItem* item);
+    int Erase(int id);
+    int Size() const;
+    MapNoStlItem* First() const;
+    MapNoStlItem* Last() const;
+    MapNoStlItem* Next(MapNoStlItem* item) const;
+    MapNoStlItem* Previous(MapNoStlItem* item) const;
+    MapNoStlItem* Find(int id) const;
+
+private:
+    MapNoStlItem* Locate(int id) const;
+    int Remove(MapNoStlItem* item);
+
+    CriticalSection* critical_section_;
+    MapNoStlItem* first_;
+    MapNoStlItem* last_;
+    int size_;
+    DISALLOW_COPY_AND_ASSIGN(MapNoStl);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_MAP_NO_STL_H_
diff --git a/trunk/src/system_wrappers/source/map_unittest.cc b/trunk/src/system_wrappers/source/map_unittest.cc
new file mode 100644
index 0000000..1c85a92
--- /dev/null
+++ b/trunk/src/system_wrappers/source/map_unittest.cc
@@ -0,0 +1,231 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include "map_wrapper.h"
+
+using ::webrtc::MapWrapper;
+using ::webrtc::MapItem;
+
+const int kNumberOfElements = 10;
+
+int* ItemPointer(MapItem* item) {
+    if (item == NULL) {
+        return NULL;
+    }
+    return reinterpret_cast<int*>(item->GetItem());
+}
+
+bool DeleteItemContent(MapItem* item) {
+    if(item == NULL) {
+        return false;
+    }
+    int* value_ptr = ItemPointer(item);
+    delete value_ptr;
+    return true;
+}
+
+int ItemValue(MapItem* item) {
+    if (item == NULL) {
+        return -1;
+    }
+    const int* value_ptr = ItemPointer(item);
+    if (value_ptr == 0) {
+        return -1;
+    }
+    return *value_ptr;
+}
+
+void PrintToConsole(const char* message, bool supress) {
+    if (supress) {
+        return;
+    }
+    printf("%s", message);
+}
+
+bool CreateAscendingMap(MapWrapper* ascending_map) {
+    int* insert_value = NULL;
+    for (int i = 0; i < kNumberOfElements; ++i) {
+        insert_value = new int;
+        if (insert_value == NULL) {
+            return false;
+        }
+        *insert_value = i;
+        if (0 != ascending_map->Insert(
+                     i,
+                     reinterpret_cast<void*>(insert_value))) {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool ClearMap(MapWrapper* clear_map) {
+    bool success = true;
+    while (clear_map->Size() != 0) {
+        MapItem* remove_item = clear_map->First();
+        if (remove_item == NULL) {
+            return false;
+        }
+        if (!DeleteItemContent(remove_item)) {
+            success = false;
+        }
+        if (clear_map->Erase(remove_item) != 0) {
+            return false;
+        }
+    }
+    return success;
+}
+
+void PrintMapItem(MapItem* item, bool supress) {
+    const int id = item->GetId();
+    const int value = ItemValue(item);
+    char print_buffer[16];
+    sprintf(print_buffer, "(%3i,%3i) ", id, value);
+    PrintToConsole(print_buffer, supress);
+}
+
+// Succeeds only if all the items were printed.
+bool PrintMap(const MapWrapper& print_map, bool supress) {
+    const int elements_to_print = print_map.Size();
+    int elements_printed = 0;
+    MapItem* item = print_map.First();
+    PrintToConsole("[", supress);
+    while (item != NULL) {
+        PrintMapItem(item, supress);
+        ++elements_printed;
+        item = print_map.Next(item);
+    }
+    PrintToConsole("]\n", supress);
+    return elements_printed == elements_to_print;
+}
+
+// Succeeds only if all the items were printed.
+bool ReversePrintMap(const MapWrapper& print_map, bool supress) {
+    const int elements_to_print = print_map.Size();
+    int elements_printed = 0;
+    MapItem* item = print_map.Last();
+    PrintToConsole("[", supress);
+    while (item != NULL) {
+        PrintMapItem(item, supress);
+        ++elements_printed;
+        item = print_map.Previous(item);
+    }
+    PrintToConsole("]\n", supress);
+    return elements_printed == elements_to_print;
+}
+
+// Returns true if the map items contain the same item.
+bool CompareItems(MapItem* lhs_item, MapItem* rhs_item) {
+    if ((lhs_item == NULL) || (rhs_item == NULL)) {
+        return false;
+    }
+    if (lhs_item->GetId() != rhs_item->GetId()) {
+        return false;
+    }
+    return lhs_item->GetItem() == rhs_item->GetItem();
+}
+
+// Returns true if the map contains the same items.
+bool CompareMaps(const MapWrapper& lhs, const MapWrapper& rhs) {
+    const int map_size = lhs.Size();
+    if (map_size != rhs.Size()) {
+        return false;
+    }
+    int item_count = 0;
+    MapItem* lhs_item = lhs.First();
+    while (lhs_item != NULL) {
+        MapItem* rhs_item = rhs.Find(lhs_item->GetId());
+        if (rhs_item == NULL) {
+            return false;
+        }
+        if (!CompareItems(lhs_item, rhs_item)) {
+            return false;
+        }
+        ++item_count;
+        lhs_item = lhs.Next(lhs_item);
+    }
+    return item_count == map_size;
+} 
+
+class MapWrapperTest : public ::testing::Test {
+protected:
+    virtual void SetUp() {
+        ASSERT_TRUE(CreateAscendingMap(&ascending_map_));
+    }
+
+    virtual void TearDown() {
+        EXPECT_TRUE(ClearMap(&ascending_map_));
+    }
+    MapWrapper ascending_map_;
+};
+
+TEST_F(MapWrapperTest,RemoveTest) {
+    // Erase using int id
+    { // Create local scope to avoid accidental re-use
+        MapItem* item_first = ascending_map_.First();
+        ASSERT_FALSE(item_first == NULL);
+        const int first_value_id = item_first->GetId();
+        const int first_value = ItemValue(item_first);
+        EXPECT_TRUE(first_value == 0);
+        EXPECT_EQ(first_value_id,first_value);
+        EXPECT_FALSE(NULL == ascending_map_.Find(first_value_id));
+        EXPECT_TRUE(DeleteItemContent(item_first));
+        ascending_map_.Erase(first_value_id);
+        EXPECT_TRUE(NULL == ascending_map_.Find(first_value_id));
+        EXPECT_EQ(kNumberOfElements-1,ascending_map_.Size());
+    }
+    
+    // Erase using MapItem* item
+    MapItem* item_last = ascending_map_.Last();
+    ASSERT_FALSE(item_last == NULL);
+    const int last_value_id = item_last->GetId();
+    const int last_value = ItemValue(item_last);
+    EXPECT_TRUE(last_value == kNumberOfElements - 1);    
+    EXPECT_EQ(last_value_id, last_value);
+    EXPECT_FALSE(NULL == ascending_map_.Find(last_value_id));
+    EXPECT_TRUE(DeleteItemContent(item_last));
+    ascending_map_.Erase(last_value_id);
+    EXPECT_TRUE(NULL == ascending_map_.Find(last_value_id));
+    EXPECT_EQ(kNumberOfElements-2,ascending_map_.Size());
+}
+
+TEST_F(MapWrapperTest, PrintTest) {
+    const bool supress = true; // Don't spam the console
+
+    EXPECT_TRUE(PrintMap(ascending_map_, supress));
+    EXPECT_TRUE(ReversePrintMap(ascending_map_, supress));
+}
+
+TEST_F(MapWrapperTest, CopyTest) {
+    MapWrapper compare_map;
+    ASSERT_TRUE(CreateAscendingMap(&compare_map));
+    const int map_size = compare_map.Size();
+    ASSERT_EQ(ascending_map_.Size(), map_size);
+    // CompareMaps compare the pointers not value of the pointers.
+    // (the values are the same since both are ascending maps).
+    EXPECT_FALSE(CompareMaps(compare_map,ascending_map_));
+
+    int copy_count = 0;
+    MapItem* ascend_item = ascending_map_.First();
+    while (ascend_item != NULL) {
+        MapItem* compare_item = compare_map.Find(ascend_item->GetId());
+        ASSERT_FALSE(compare_item == NULL);
+        DeleteItemContent(compare_item);
+        compare_item->SetItem(ascend_item->GetItem());
+        ascend_item = ascending_map_.Next(ascend_item);
+        ++copy_count;
+    }
+    EXPECT_TRUE(CompareMaps(compare_map,ascending_map_));
+    while (compare_map.Erase(compare_map.First()) == 0) {
+    }
+    EXPECT_EQ(map_size, copy_count);
+}
diff --git a/trunk/src/system_wrappers/source/rw_lock.cc b/trunk/src/system_wrappers/source/rw_lock.cc
new file mode 100644
index 0000000..16da0e3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock.cc
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rw_lock_wrapper.h"
+
+#include <assert.h>
+
+#if defined(_WIN32)
+    #include "rw_lock_win.h"
+#else
+    #include "rw_lock_posix.h"
+#endif
+
+namespace webrtc {
+RWLockWrapper* RWLockWrapper::CreateRWLock()
+{
+#ifdef _WIN32
+    RWLockWrapper* lock =  new RWLockWindows();
+#else
+    RWLockWrapper* lock =  new RWLockPosix();
+#endif
+    if(lock->Init() != 0)
+    {
+        delete lock;
+        assert(false);
+        return NULL;
+    }
+    return lock;
+}
+
+RWLockWrapper::~RWLockWrapper()
+{
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/rw_lock_generic.cc b/trunk/src/system_wrappers/source/rw_lock_generic.cc
new file mode 100644
index 0000000..a468ef3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_generic.cc
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rw_lock_generic.h"
+
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+RWLockWrapperGeneric::RWLockWrapperGeneric()
+    : _readersActive(0),
+      _writerActive(false),
+      _readersWaiting(0),
+      _writersWaiting(0)
+{
+    _critSectPtr  = CriticalSectionWrapper::CreateCriticalSection();
+    _readCondPtr  = ConditionVariableWrapper::CreateConditionVariable();
+    _writeCondPtr = ConditionVariableWrapper::CreateConditionVariable();
+}
+
+RWLockWrapperGeneric::~RWLockWrapperGeneric()
+{
+    delete _writeCondPtr;
+    delete _readCondPtr;
+    delete _critSectPtr;
+}
+
+int RWLockWrapperGeneric::Init()
+{
+    return 0;
+}
+
+void RWLockWrapperGeneric::AcquireLockExclusive()
+{
+    _critSectPtr->Enter();
+
+    if (_writerActive || _readersActive > 0)
+    {
+        ++_writersWaiting;
+
+        while (_writerActive || _readersActive > 0)
+        {
+            _writeCondPtr->SleepCS(*_critSectPtr);
+        }
+
+        --_writersWaiting;
+    }
+    _writerActive = true;
+    _critSectPtr->Leave();
+}
+
+void RWLockWrapperGeneric::ReleaseLockExclusive()
+{
+    _critSectPtr->Enter();
+
+    _writerActive = false;
+
+    if (_writersWaiting > 0)
+    {
+        _writeCondPtr->Wake();
+
+    }else if (_readersWaiting > 0)
+    {
+        _readCondPtr->WakeAll();
+    }
+    _critSectPtr->Leave();
+}
+
+void RWLockWrapperGeneric::AcquireLockShared()
+{
+    _critSectPtr->Enter();
+
+    if (_writerActive || _writersWaiting > 0)
+    {
+        ++_readersWaiting;
+
+        while (_writerActive || _writersWaiting > 0)
+        {
+            _readCondPtr->SleepCS(*_critSectPtr);
+        }
+        --_readersWaiting;
+    }
+    ++_readersActive;
+    _critSectPtr->Leave();
+}
+
+void RWLockWrapperGeneric::ReleaseLockShared()
+{
+    _critSectPtr->Enter();
+
+    --_readersActive;
+
+    if (_readersActive == 0 && _writersWaiting > 0)
+    {
+        _writeCondPtr->Wake();
+    }
+    _critSectPtr->Leave();
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/rw_lock_generic.h b/trunk/src/system_wrappers/source/rw_lock_generic.h
new file mode 100644
index 0000000..fff5e5d
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_generic.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_GENERIC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_GENERIC_H_
+
+#include "rw_lock_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class ConditionVariableWrapper;
+
+class RWLockWrapperGeneric : public RWLockWrapper
+{
+public:
+    RWLockWrapperGeneric();
+    virtual ~RWLockWrapperGeneric();
+
+    virtual void AcquireLockExclusive();
+    virtual void ReleaseLockExclusive();
+
+    virtual void AcquireLockShared();
+    virtual void ReleaseLockShared();
+
+protected:
+    virtual int Init();
+
+private:
+    CriticalSectionWrapper*   _critSectPtr;
+    ConditionVariableWrapper* _readCondPtr;
+    ConditionVariableWrapper* _writeCondPtr;
+
+    int  _readersActive;
+    bool _writerActive;
+    int  _readersWaiting;
+    int  _writersWaiting;
+};
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_GENERIC_H_
diff --git a/trunk/src/system_wrappers/source/rw_lock_posix.cc b/trunk/src/system_wrappers/source/rw_lock_posix.cc
new file mode 100644
index 0000000..81a161e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_posix.cc
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rw_lock_posix.h"
+
+namespace webrtc {
+RWLockPosix::RWLockPosix() : _lock()
+{
+}
+
+RWLockPosix::~RWLockPosix()
+{
+    pthread_rwlock_destroy(&_lock);
+}
+
+int RWLockPosix::Init()
+{
+    return pthread_rwlock_init(&_lock, 0);
+}
+
+void RWLockPosix::AcquireLockExclusive()
+{
+    pthread_rwlock_wrlock(&_lock);
+}
+
+void RWLockPosix::ReleaseLockExclusive()
+{
+    pthread_rwlock_unlock(&_lock);
+}
+
+void RWLockPosix::AcquireLockShared()
+{
+    pthread_rwlock_rdlock(&_lock);
+}
+
+void RWLockPosix::ReleaseLockShared()
+{
+    pthread_rwlock_unlock(&_lock);
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/rw_lock_posix.h b/trunk/src/system_wrappers/source/rw_lock_posix.h
new file mode 100644
index 0000000..929bbb8
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_posix.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_POSIX_H_
+
+#include "rw_lock_wrapper.h"
+
+#include <pthread.h>
+
+namespace webrtc {
+class RWLockPosix : public RWLockWrapper
+{
+public:
+    RWLockPosix();
+    virtual ~RWLockPosix();
+
+    virtual void AcquireLockExclusive();
+    virtual void ReleaseLockExclusive();
+
+    virtual void AcquireLockShared();
+    virtual void ReleaseLockShared();
+
+protected:
+    virtual int Init();
+
+private:
+    pthread_rwlock_t _lock;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/rw_lock_win.cc b/trunk/src/system_wrappers/source/rw_lock_win.cc
new file mode 100644
index 0000000..82cd0ac
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_win.cc
@@ -0,0 +1,186 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rw_lock_win.h"
+
+#include "critical_section_wrapper.h"
+#include "condition_variable_wrapper.h"
+#include "trace.h"
+
+// TODO (hellner) why not just use the rw_lock_generic.cc solution if
+//                           native is not supported? Unnecessary redundancy!
+
+namespace webrtc {
+bool RWLockWindows::_winSupportRWLockPrimitive = false;
+static HMODULE library = NULL;
+
+PInitializeSRWLock       _PInitializeSRWLock;
+PAcquireSRWLockExclusive _PAcquireSRWLockExclusive;
+PAcquireSRWLockShared    _PAcquireSRWLockShared;
+PReleaseSRWLockShared    _PReleaseSRWLockShared;
+PReleaseSRWLockExclusive _PReleaseSRWLockExclusive;
+
+RWLockWindows::RWLockWindows()
+    : _critSectPtr(NULL),
+      _readCondPtr(NULL),
+      _writeCondPtr(NULL),
+      _readersActive(0),
+      _writerActive(false),
+      _readersWaiting(0),
+      _writersWaiting(0)
+{
+}
+
+RWLockWindows::~RWLockWindows()
+{
+    delete _writeCondPtr;
+    delete _readCondPtr;
+    delete _critSectPtr;
+}
+
+int RWLockWindows::Init()
+{
+    if(!library)
+    {
+        // Use native implementation if supported (i.e Vista+)
+        library = LoadLibrary(TEXT("Kernel32.dll"));
+        if(library)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                         "Loaded Kernel.dll");
+
+            _PInitializeSRWLock =
+                (PInitializeSRWLock)GetProcAddress(
+                    library,
+                    "InitializeSRWLock");
+
+            _PAcquireSRWLockExclusive =
+               (PAcquireSRWLockExclusive)GetProcAddress(
+                   library,
+                   "AcquireSRWLockExclusive");
+            _PReleaseSRWLockExclusive =
+                (PReleaseSRWLockExclusive)GetProcAddress(
+                    library,
+                    "ReleaseSRWLockExclusive");
+            _PAcquireSRWLockShared =
+                (PAcquireSRWLockShared)GetProcAddress(
+                    library,
+                    "AcquireSRWLockShared");
+            _PReleaseSRWLockShared =
+                (PReleaseSRWLockShared)GetProcAddress(
+                    library,
+                    "ReleaseSRWLockShared");
+
+            if( _PInitializeSRWLock &&
+                _PAcquireSRWLockExclusive &&
+                _PReleaseSRWLockExclusive &&
+                _PAcquireSRWLockShared &&
+                _PReleaseSRWLockShared )
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                            "Loaded Simple RW Lock");
+                _winSupportRWLockPrimitive = true;
+            }
+        }
+    }
+    if(_winSupportRWLockPrimitive)
+    {
+        _PInitializeSRWLock(&_lock);
+    } else {
+        _critSectPtr  = CriticalSectionWrapper::CreateCriticalSection();
+        _readCondPtr  = ConditionVariableWrapper::CreateConditionVariable();
+        _writeCondPtr = ConditionVariableWrapper::CreateConditionVariable();
+    }
+    return 0;
+}
+
+void RWLockWindows::AcquireLockExclusive()
+{
+    if (_winSupportRWLockPrimitive)
+    {
+        _PAcquireSRWLockExclusive(&_lock);
+    } else {
+        _critSectPtr->Enter();
+
+        if (_writerActive || _readersActive > 0)
+        {
+            ++_writersWaiting;
+            while (_writerActive || _readersActive > 0)
+            {
+                _writeCondPtr->SleepCS(*_critSectPtr);
+            }
+            --_writersWaiting;
+        }
+        _writerActive = true;
+        _critSectPtr->Leave();
+    }
+}
+
+void RWLockWindows::ReleaseLockExclusive()
+{
+    if(_winSupportRWLockPrimitive)
+    {
+        _PReleaseSRWLockExclusive(&_lock);
+    } else {
+        _critSectPtr->Enter();
+        _writerActive = false;
+        if (_writersWaiting > 0)
+        {
+            _writeCondPtr->Wake();
+
+        }else if (_readersWaiting > 0) {
+            _readCondPtr->WakeAll();
+        }
+        _critSectPtr->Leave();
+    }
+}
+
+void RWLockWindows::AcquireLockShared()
+{
+    if(_winSupportRWLockPrimitive)
+    {
+        _PAcquireSRWLockShared(&_lock);
+    } else
+    {
+        _critSectPtr->Enter();
+        if (_writerActive || _writersWaiting > 0)
+        {
+            ++_readersWaiting;
+
+            while (_writerActive || _writersWaiting > 0)
+            {
+                _readCondPtr->SleepCS(*_critSectPtr);
+            }
+            --_readersWaiting;
+        }
+        ++_readersActive;
+        _critSectPtr->Leave();
+    }
+}
+
+void RWLockWindows::ReleaseLockShared()
+{
+    if(_winSupportRWLockPrimitive)
+    {
+        _PReleaseSRWLockShared(&_lock);
+    } else
+    {
+        _critSectPtr->Enter();
+
+        --_readersActive;
+
+        if (_readersActive == 0 && _writersWaiting > 0)
+        {
+            _writeCondPtr->Wake();
+        }
+        _critSectPtr->Leave();
+    }
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/rw_lock_win.h b/trunk/src/system_wrappers/source/rw_lock_win.h
new file mode 100644
index 0000000..dc5355e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/rw_lock_win.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_WINDOWS_H_
+
+#include "rw_lock_wrapper.h"
+
+#include <Windows.h>
+
+#if !defined(RTL_SRWLOCK_INIT)
+    typedef struct _RTL_SRWLOCK
+    {
+        void* Ptr;
+    } RTL_SRWLOCK, *PRTL_SRWLOCK;
+    typedef RTL_SRWLOCK SRWLOCK, *PSRWLOCK;
+#endif
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class ConditionVariableWrapper;
+
+typedef void (WINAPI *PInitializeSRWLock)(PSRWLOCK);
+
+typedef void (WINAPI *PAcquireSRWLockExclusive)(PSRWLOCK);
+typedef void (WINAPI *PReleaseSRWLockExclusive)(PSRWLOCK);
+
+typedef void (WINAPI *PAcquireSRWLockShared)(PSRWLOCK);
+typedef void (WINAPI *PReleaseSRWLockShared)(PSRWLOCK);
+
+
+class RWLockWindows :public RWLockWrapper
+{
+public:
+    RWLockWindows();
+    virtual ~RWLockWindows();
+
+    virtual void AcquireLockExclusive();
+    virtual void ReleaseLockExclusive();
+
+    virtual void AcquireLockShared();
+    virtual void ReleaseLockShared();
+
+protected:
+    virtual int Init();
+
+private:
+    // For native implementation.
+    static bool _winSupportRWLockPrimitive;
+    SRWLOCK     _lock;
+
+    // Genric implementation, fallback if native is not supported.
+    CriticalSectionWrapper*   _critSectPtr;
+    ConditionVariableWrapper* _readCondPtr;
+    ConditionVariableWrapper* _writeCondPtr;
+
+    int  _readersActive;
+    bool _writerActive;
+    int  _readersWaiting;
+    int  _writersWaiting;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_RW_LOCK_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/set_thread_name_win.h b/trunk/src/system_wrappers/source/set_thread_name_win.h
new file mode 100644
index 0000000..a46f4d6
--- /dev/null
+++ b/trunk/src/system_wrappers/source/set_thread_name_win.h
@@ -0,0 +1,43 @@
+/*
+ *  Use of this source code is governed by the MICROSOFT LIMITED PUBLIC LICENSE
+ *  copyright license which can be found in the LICENSE file in the
+ *  third_party_mods/mslpl directory of the source tree or at
+ *  http://msdn.microsoft.com/en-us/cc300389.aspx#P.
+ */
+/*
+ *  The original code can be found here:
+ *  http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_SET_NAME_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_SET_NAME_H_
+
+namespace webrtc {
+
+struct THREADNAME_INFO
+{
+   DWORD dwType;     // must be 0x1000
+   LPCSTR szName;    // pointer to name (in user addr space)
+   DWORD dwThreadID; // thread ID (-1 = caller thread)
+   DWORD dwFlags;    // reserved for future use, must be zero
+};
+
+void SetThreadName(DWORD dwThreadID, LPCSTR szThreadName)
+{
+    THREADNAME_INFO info;
+    info.dwType = 0x1000;
+    info.szName = szThreadName;
+    info.dwThreadID = dwThreadID;
+    info.dwFlags = 0;
+
+    __try
+    {
+        RaiseException(0x406D1388, 0, sizeof(info) / sizeof(DWORD),
+                       (ULONG_PTR*)&info);
+    }
+    __except (EXCEPTION_CONTINUE_EXECUTION)
+    {
+    }
+}
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_SET_NAME_H_
diff --git a/trunk/src/system_wrappers/source/sort.cc b/trunk/src/system_wrappers/source/sort.cc
new file mode 100644
index 0000000..34aa437
--- /dev/null
+++ b/trunk/src/system_wrappers/source/sort.cc
@@ -0,0 +1,549 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// When the platform supports STL, the functions are implemented using a
+// templated spreadsort algorithm (http://sourceforge.net/projects/spreadsort/),
+// part of the Boost C++ library collection. Otherwise, the C standard library's
+// qsort() will be used.
+
+#include "sort.h"
+
+#include <cassert>
+#include <cstring>  // memcpy
+#include <new>      // nothrow new
+
+#ifdef NO_STL
+#include <cstdlib>      // qsort
+#else
+#include <algorithm>    // std::sort
+#include <vector>
+#include "spreadsort.hpp" // TODO (ajm) upgrade to spreadsortv2.
+#endif
+
+#ifdef NO_STL
+#define COMPARE_DEREFERENCED(XT, YT)        \
+    do                                      \
+    {                                       \
+        if ((XT) > (YT))                    \
+        {                                   \
+            return 1;                       \
+        }                                   \
+        else if ((XT) < (YT))               \
+        {                                   \
+            return -1;                      \
+        }                                   \
+                                            \
+        return 0;                           \
+    }                                       \
+    while(0)
+
+#define COMPARE_FOR_QSORT(X, Y, TYPE)                               \
+    do                                                              \
+    {                                                               \
+        TYPE xT = static_cast<TYPE>(*static_cast<const TYPE*>(X));  \
+        TYPE yT = static_cast<TYPE>(*static_cast<const TYPE*>(Y));  \
+        COMPARE_DEREFERENCED(xT, yT);                               \
+    }                                                               \
+    while(0)
+
+#define COMPARE_KEY_FOR_QSORT(SORT_KEY_X, SORT_KEY_Y, TYPE)         \
+    do                                                              \
+    {                                                               \
+        TYPE xT = static_cast<TYPE>(*static_cast<TYPE*>             \
+            (static_cast<const SortKey*>(SORT_KEY_X)->key));        \
+        TYPE yT = static_cast<TYPE>(*static_cast<TYPE*>             \
+            (static_cast<const SortKey*>(SORT_KEY_Y)->key));        \
+        COMPARE_DEREFERENCED(xT, yT);                               \
+    }                                                               \
+    while(0)
+
+#define KEY_QSORT(SORT_KEY, KEY, NUM_OF_ELEMENTS, KEY_TYPE, COMPARE_FUNC)     \
+    do                                                                        \
+    {                                                                         \
+        KEY_TYPE* keyT = (KEY_TYPE*)(key);                                    \
+        for (WebRtc_UWord32 i = 0; i < (NUM_OF_ELEMENTS); i++)                \
+        {                                                                     \
+            ptrSortKey[i].key = &keyT[i];                                     \
+            ptrSortKey[i].index = i;                                          \
+        }                                                                     \
+                                                                              \
+        qsort((SORT_KEY), (NUM_OF_ELEMENTS), sizeof(SortKey), (COMPARE_FUNC));\
+    }                                                                         \
+    while(0)
+#endif
+
+namespace webrtc
+{
+#ifdef NO_STL
+    struct SortKey
+    {
+        void* key;
+        WebRtc_UWord32 index;
+    };
+#else
+    template<typename KeyType>
+    struct SortKey
+    {
+        KeyType key;
+        WebRtc_UWord32 index;
+    };
+#endif
+
+    namespace // Unnamed namespace provides internal linkage.
+    {
+#ifdef NO_STL
+        int CompareWord8(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_Word8);
+        }
+
+        int CompareUWord8(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_UWord8);
+        }
+
+        int CompareWord16(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_Word16);
+        }
+
+        int CompareUWord16(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_UWord16);
+        }
+
+        int CompareWord32(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_Word32);
+        }
+
+        int CompareUWord32(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_UWord32);
+        }
+
+        int CompareWord64(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_Word64);
+        }
+
+        int CompareUWord64(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, WebRtc_UWord64);
+        }
+
+        int CompareFloat32(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, float);
+        }
+
+        int CompareFloat64(const void* x, const void* y)
+        {
+            COMPARE_FOR_QSORT(x, y, double);
+        }
+
+        int CompareKeyWord8(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_Word8);
+        }
+
+        int CompareKeyUWord8(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_UWord8);
+        }
+
+        int CompareKeyWord16(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_Word16);
+        }
+
+        int CompareKeyUWord16(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_UWord16);
+        }
+
+        int CompareKeyWord32(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_Word32);
+        }
+
+        int CompareKeyUWord32(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_UWord32);
+        }
+
+        int CompareKeyWord64(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_Word64);
+        }
+
+        int CompareKeyUWord64(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, WebRtc_UWord64);
+        }
+
+        int CompareKeyFloat32(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, float);
+        }
+
+        int CompareKeyFloat64(const void* sortKeyX, const void* sortKeyY)
+        {
+            COMPARE_KEY_FOR_QSORT(sortKeyX, sortKeyY, double);
+        }
+#else
+        template <typename KeyType>
+        struct KeyLessThan
+        {
+            bool operator()(const SortKey<KeyType>& sortKeyX,
+                const SortKey<KeyType>& sortKeyY) const
+            {
+                return sortKeyX.key < sortKeyY.key;
+            }
+        };
+
+        template <typename KeyType>
+        struct KeyRightShift
+        {
+            KeyType operator()(const SortKey<KeyType>& sortKey,
+                const unsigned offset) const
+            {
+                return sortKey.key >> offset;
+            }
+        };
+
+        template <typename DataType>
+        inline void IntegerSort(void* data, WebRtc_UWord32 numOfElements)
+        {
+            DataType* dataT = static_cast<DataType*>(data);
+            boost::integer_sort(dataT, dataT + numOfElements);
+        }
+
+        template <typename DataType, typename IntegerType>
+        inline void FloatSort(void* data, WebRtc_UWord32 numOfElements)
+        {
+            DataType* dataT = static_cast<DataType*>(data);
+            IntegerType cVal = 0;
+            boost::float_sort_cast(dataT, dataT + numOfElements, cVal);
+        }
+
+        template <typename DataType>
+        inline void StdSort(void* data, WebRtc_UWord32 numOfElements)
+        {
+            DataType* dataT = static_cast<DataType*>(data);
+            std::sort(dataT, dataT + numOfElements);
+        }
+
+        template<typename KeyType>
+        inline WebRtc_Word32 SetupKeySort(void* key,
+                                          SortKey<KeyType>*& ptrSortKey,
+                                          WebRtc_UWord32 numOfElements)
+        {
+            ptrSortKey = new(std::nothrow) SortKey<KeyType>[numOfElements];
+            if (ptrSortKey == NULL)
+            {
+                return -1;
+            }
+
+            KeyType* keyT = static_cast<KeyType*>(key);
+            for (WebRtc_UWord32 i = 0; i < numOfElements; i++)
+            {
+                ptrSortKey[i].key = keyT[i];
+                ptrSortKey[i].index = i;
+            }
+
+            return 0;
+        }
+
+        template<typename KeyType>
+        inline WebRtc_Word32 TeardownKeySort(void* data,
+                                             SortKey<KeyType>* ptrSortKey,
+            WebRtc_UWord32 numOfElements, WebRtc_UWord32 sizeOfElement)
+        {
+            WebRtc_UWord8* ptrData = static_cast<WebRtc_UWord8*>(data);
+            WebRtc_UWord8* ptrDataSorted = new(std::nothrow) WebRtc_UWord8
+                [numOfElements * sizeOfElement];
+            if (ptrDataSorted == NULL)
+            {
+                return -1;
+            }
+
+            for (WebRtc_UWord32 i = 0; i < numOfElements; i++)
+            {
+                memcpy(ptrDataSorted + i * sizeOfElement, ptrData +
+                       ptrSortKey[i].index * sizeOfElement, sizeOfElement);
+            }
+            memcpy(ptrData, ptrDataSorted, numOfElements * sizeOfElement);
+            delete[] ptrSortKey;
+            delete[] ptrDataSorted;
+            return 0;
+        }
+
+        template<typename KeyType>
+        inline WebRtc_Word32 IntegerKeySort(void* data, void* key,
+                                            WebRtc_UWord32 numOfElements,
+                                            WebRtc_UWord32 sizeOfElement)
+        {
+            SortKey<KeyType>* ptrSortKey;
+            if (SetupKeySort<KeyType>(key, ptrSortKey, numOfElements) != 0)
+            {
+                return -1;
+            }
+
+            boost::integer_sort(ptrSortKey, ptrSortKey + numOfElements,
+                KeyRightShift<KeyType>(), KeyLessThan<KeyType>());
+
+            if (TeardownKeySort<KeyType>(data, ptrSortKey, numOfElements,
+                    sizeOfElement) != 0)
+            {
+                return -1;
+            }
+
+            return 0;
+        }
+
+        template<typename KeyType>
+        inline WebRtc_Word32 StdKeySort(void* data, void* key,
+                                        WebRtc_UWord32 numOfElements,
+                                        WebRtc_UWord32 sizeOfElement)
+        {
+            SortKey<KeyType>* ptrSortKey;
+            if (SetupKeySort<KeyType>(key, ptrSortKey, numOfElements) != 0)
+            {
+                return -1;
+            }
+
+            std::sort(ptrSortKey, ptrSortKey + numOfElements,
+                KeyLessThan<KeyType>());
+
+            if (TeardownKeySort<KeyType>(data, ptrSortKey, numOfElements,
+                    sizeOfElement) != 0)
+            {
+                return -1;
+            }
+
+            return 0;
+        }
+#endif
+    }
+
+    WebRtc_Word32 Sort(void* data, WebRtc_UWord32 numOfElements, Type type)
+    {
+        if (data == NULL)
+        {
+            return -1;
+        }
+
+#ifdef NO_STL
+        switch (type)
+        {
+        case TYPE_Word8:
+            qsort(data, numOfElements, sizeof(WebRtc_Word8), CompareWord8);
+            break;
+        case TYPE_UWord8:
+            qsort(data, numOfElements, sizeof(WebRtc_UWord8), CompareUWord8);
+            break;
+        case TYPE_Word16:
+            qsort(data, numOfElements, sizeof(WebRtc_Word16), CompareWord16);
+            break;
+        case TYPE_UWord16:
+            qsort(data, numOfElements, sizeof(WebRtc_UWord16), CompareUWord16);
+            break;
+        case TYPE_Word32:
+            qsort(data, numOfElements, sizeof(WebRtc_Word32), CompareWord32);
+            break;
+        case TYPE_UWord32:
+            qsort(data, numOfElements, sizeof(WebRtc_UWord32), CompareUWord32);
+            break;
+        case TYPE_Word64:
+            qsort(data, numOfElements, sizeof(WebRtc_Word64), CompareWord64);
+            break;
+        case TYPE_UWord64:
+            qsort(data, numOfElements, sizeof(WebRtc_UWord64), CompareUWord64);
+            break;
+        case TYPE_Float32:
+            qsort(data, numOfElements, sizeof(float), CompareFloat32);
+            break;
+        case TYPE_Float64:
+            qsort(data, numOfElements, sizeof(double), CompareFloat64);
+            break;
+        default:
+            return -1;
+        }
+#else
+        // Fall back to std::sort for 64-bit types and floats due to compiler
+	// warnings and VS 2003 build crashes respectively with spreadsort.
+        switch (type)
+        {
+        case TYPE_Word8:
+            IntegerSort<WebRtc_Word8>(data, numOfElements);
+            break;
+        case TYPE_UWord8:
+            IntegerSort<WebRtc_UWord8>(data, numOfElements);
+            break;
+        case TYPE_Word16:
+            IntegerSort<WebRtc_Word16>(data, numOfElements);
+            break;
+        case TYPE_UWord16:
+            IntegerSort<WebRtc_UWord16>(data, numOfElements);
+            break;
+        case TYPE_Word32:
+            IntegerSort<WebRtc_Word32>(data, numOfElements);
+            break;
+        case TYPE_UWord32:
+            IntegerSort<WebRtc_UWord32>(data, numOfElements);
+            break;
+        case TYPE_Word64:
+            StdSort<WebRtc_Word64>(data, numOfElements);
+            break;
+        case TYPE_UWord64:
+            StdSort<WebRtc_UWord64>(data, numOfElements);
+            break;
+        case TYPE_Float32:
+            StdSort<float>(data, numOfElements);
+            break;
+        case TYPE_Float64:
+            StdSort<double>(data, numOfElements);
+            break;
+        }
+#endif
+        return 0;
+    }
+
+    WebRtc_Word32 KeySort(void* data, void* key, WebRtc_UWord32 numOfElements,
+                          WebRtc_UWord32 sizeOfElement, Type keyType)
+    {
+        if (data == NULL)
+        {
+            return -1;
+        }
+
+        if (key == NULL)
+        {
+            return -1;
+        }
+
+        if ((WebRtc_UWord64)numOfElements * sizeOfElement > 0xffffffff)
+        {
+            return -1;
+        }
+
+#ifdef NO_STL
+        SortKey* ptrSortKey = new(std::nothrow) SortKey[numOfElements];
+        if (ptrSortKey == NULL)
+        {
+            return -1;
+        }
+
+        switch (keyType)
+        {
+        case TYPE_Word8:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_Word8,
+                CompareKeyWord8);
+            break;
+        case TYPE_UWord8:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_UWord8,
+                CompareKeyUWord8);
+            break;
+        case TYPE_Word16:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_Word16,
+                CompareKeyWord16);
+            break;
+        case TYPE_UWord16:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_UWord16,
+                CompareKeyUWord16);
+            break;
+        case TYPE_Word32:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_Word32,
+                CompareKeyWord32);
+            break;
+        case TYPE_UWord32:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_UWord32,
+                CompareKeyUWord32);
+            break;
+        case TYPE_Word64:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_Word64,
+                CompareKeyWord64);
+            break;
+        case TYPE_UWord64:
+            KEY_QSORT(ptrSortKey, key, numOfElements, WebRtc_UWord64,
+                CompareKeyUWord64);
+            break;
+        case TYPE_Float32:
+            KEY_QSORT(ptrSortKey, key, numOfElements, float,
+                CompareKeyFloat32);
+            break;
+        case TYPE_Float64:
+            KEY_QSORT(ptrSortKey, key, numOfElements, double,
+                CompareKeyFloat64);
+            break;
+        default:
+            return -1;
+        }
+
+        // Shuffle into sorted position based on index map.
+        WebRtc_UWord8* ptrData = static_cast<WebRtc_UWord8*>(data);
+        WebRtc_UWord8* ptrDataSorted = new(std::nothrow) WebRtc_UWord8
+            [numOfElements * sizeOfElement];
+        if (ptrDataSorted == NULL)
+        {
+            return -1;
+        }
+
+        for (WebRtc_UWord32 i = 0; i < numOfElements; i++)
+        {
+            memcpy(ptrDataSorted + i * sizeOfElement, ptrData +
+                ptrSortKey[i].index * sizeOfElement, sizeOfElement);
+        }
+        memcpy(ptrData, ptrDataSorted, numOfElements * sizeOfElement);
+
+        delete[] ptrSortKey;
+        delete[] ptrDataSorted;
+
+        return 0;
+#else
+        // Fall back to std::sort for 64-bit types and floats due to compiler
+	// warnings and errors respectively with spreadsort.
+        switch (keyType)
+        {
+        case TYPE_Word8:
+            return IntegerKeySort<WebRtc_Word8>(data, key, numOfElements,
+                                                sizeOfElement);
+        case TYPE_UWord8:
+            return IntegerKeySort<WebRtc_UWord8>(data, key, numOfElements,
+                                                 sizeOfElement);
+        case TYPE_Word16:
+            return IntegerKeySort<WebRtc_Word16>(data, key, numOfElements,
+                                                 sizeOfElement);
+        case TYPE_UWord16:
+            return IntegerKeySort<WebRtc_UWord16>(data, key, numOfElements,
+                                                  sizeOfElement);
+        case TYPE_Word32:
+            return IntegerKeySort<WebRtc_Word32>(data, key, numOfElements,
+                                                 sizeOfElement);
+        case TYPE_UWord32:
+            return IntegerKeySort<WebRtc_UWord32>(data, key, numOfElements,
+                                                  sizeOfElement);
+        case TYPE_Word64:
+            return StdKeySort<WebRtc_Word64>(data, key, numOfElements,
+                                             sizeOfElement);
+        case TYPE_UWord64:
+            return StdKeySort<WebRtc_UWord64>(data, key, numOfElements,
+                                              sizeOfElement);
+        case TYPE_Float32:
+            return StdKeySort<float>(data, key, numOfElements, sizeOfElement);
+        case TYPE_Float64:
+            return StdKeySort<double>(data, key, numOfElements, sizeOfElement);
+        }
+        assert(false);
+        return -1;
+#endif
+    }
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/spreadsortlib/constants.hpp b/trunk/src/system_wrappers/source/spreadsortlib/constants.hpp
new file mode 100644
index 0000000..fa81ece
--- /dev/null
+++ b/trunk/src/system_wrappers/source/spreadsortlib/constants.hpp
@@ -0,0 +1,42 @@
+/*Boost Software License - Version 1.0 - August 17th, 2003

+

+Permission is hereby granted, free of charge, to any person or organization

+obtaining a copy of the software and accompanying documentation covered by

+this license (the "Software") to use, reproduce, display, distribute,

+execute, and transmit the Software, and to prepare derivative works of the

+Software, and to permit third-parties to whom the Software is furnished to

+do so, all subject to the following:

+

+The copyright notices in the Software and this entire statement, including

+the above license grant, this restriction and the following disclaimer,

+must be included in all copies of the Software, in whole or in part, and

+all derivative works of the Software, unless such copies or derivative

+works are solely in the form of machine-executable object code generated by

+a source language processor.

+

+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT

+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE

+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,

+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER

+DEALINGS IN THE SOFTWARE.*/

+#ifndef BOOST_SPREADSORT_CONSTANTS

+#define BOOST_SPREADSORT_CONSTANTS

+namespace boost {

+namespace detail {

+//Tuning constants

+//Sets the minimum number of items per bin.

+static const unsigned LOG_MEAN_BIN_SIZE = 2;

+//This should be tuned to your processor cache; if you go too large you get cache misses on bins

+//The smaller this number, the less worst-case memory usage.  If too small, too many recursions slow down spreadsort

+static const unsigned MAX_SPLITS = 10;

+//Used to force a comparison-based sorting for small bins, if it's faster.  Minimum value 0

+static const unsigned LOG_MIN_SPLIT_COUNT = 5;

+//There is a minimum size below which it is not worth using spreadsort

+static const long MIN_SORT_SIZE = 1000;

+//This is the constant on the log base n of m calculation; make this larger the faster std::sort is relative to spreadsort

+static const unsigned LOG_CONST = 2;

+}

+}

+#endif

diff --git a/trunk/src/system_wrappers/source/spreadsortlib/spreadsort.hpp b/trunk/src/system_wrappers/source/spreadsortlib/spreadsort.hpp
new file mode 100644
index 0000000..2d1529a
--- /dev/null
+++ b/trunk/src/system_wrappers/source/spreadsortlib/spreadsort.hpp
@@ -0,0 +1,1688 @@
+//Templated spread_sort library

+

+//          Copyright Steven J. Ross 2001 - 2009.

+// Distributed under the Boost Software License, Version 1.0.

+//    (See accompanying file LICENSE_1_0.txt or copy at

+//          http://www.boost.org/LICENSE_1_0.txt)

+

+//  See http://www.boost.org/ for updates, documentation, and revision history.

+		  

+/*

+Some improvements suggested by:

+Phil Endecott and Frank Gennari

+Cygwin fix provided by:

+Scott McMurray

+*/

+

+#ifndef BOOST_SPREAD_SORT_H

+#define BOOST_SPREAD_SORT_H

+#include <algorithm>

+#include <vector>

+#include "constants.hpp"

+#include <cstring>

+

+namespace boost {

+  namespace detail {

+  	//This only works on unsigned data types

+  	template <typename T>

+  	inline unsigned 

+  	rough_log_2_size(const T& input) 

+  	{

+  		unsigned result = 0;

+  		//The && is necessary on some compilers to avoid infinite loops; it doesn't significantly impair performance

+  		while((input >> result) && (result < (8*sizeof(T)))) ++result;

+  		return result;

+  	}

+

+  	//Gets the maximum size which we'll call spread_sort on to control worst-case performance

+  	//Maintains both a minimum size to recurse and a check of distribution size versus count

+  	//This is called for a set of bins, instead of bin-by-bin, to avoid performance overhead

+  	inline size_t

+  	get_max_count(unsigned log_range, size_t count)

+  	{

+  		unsigned divisor = rough_log_2_size(count);

+  		//Making sure the divisor is positive

+  		if(divisor > LOG_MEAN_BIN_SIZE)

+  			divisor -= LOG_MEAN_BIN_SIZE;

+  		else

+  			divisor = 1;

+  		unsigned relative_width = (LOG_CONST * log_range)/((divisor > MAX_SPLITS) ? MAX_SPLITS : divisor);

+  		//Don't try to bitshift more than the size of an element

+  		if((8*sizeof(size_t)) <= relative_width)

+  			relative_width = (8*sizeof(size_t)) - 1;

+  		return (size_t)1 << ((relative_width < (LOG_MEAN_BIN_SIZE + LOG_MIN_SPLIT_COUNT)) ? 

+  			(LOG_MEAN_BIN_SIZE + LOG_MIN_SPLIT_COUNT) :  relative_width);

+  	}

+

+  	//Find the minimum and maximum using <

+  	template <class RandomAccessIter>

+  	inline void 

+  	find_extremes(RandomAccessIter current, RandomAccessIter last, RandomAccessIter & max, RandomAccessIter & min)

+  	{

+  		min = max = current;

+  		//Start from the second item, as max and min are initialized to the first

+  		while(++current < last) {

+  			if(*max < *current)

+  				max = current;

+  			else if(*current < *min)

+  				min = current;

+  		}

+  	}

+

+  	//Uses a user-defined comparison operator to find minimum and maximum

+  	template <class RandomAccessIter, class compare>

+  	inline void 

+  	find_extremes(RandomAccessIter current, RandomAccessIter last, RandomAccessIter & max, RandomAccessIter & min, compare comp)

+  	{

+  		min = max = current;

+  		while(++current < last) {

+  			if(comp(*max, *current))

+  				max = current;

+  			else if(comp(*current, *min))

+  				min = current;

+  		}

+  	}

+

+  	//Gets a non-negative right bit shift to operate as a logarithmic divisor

+  	inline int

+  	get_log_divisor(size_t count, unsigned log_range)

+  	{

+  		int log_divisor;

+  		//If we can finish in one iteration without exceeding either (2 to the MAX_SPLITS) or n bins, do so

+  		if((log_divisor = log_range - rough_log_2_size(count)) <= 0 && log_range < MAX_SPLITS)

+  			log_divisor = 0;

+  		else {

+  			//otherwise divide the data into an optimized number of pieces

+  			log_divisor += LOG_MEAN_BIN_SIZE;

+  			if(log_divisor < 0)

+  				log_divisor = 0;

+  			//Cannot exceed MAX_SPLITS or cache misses slow down bin lookups dramatically

+  			if((log_range - log_divisor) > MAX_SPLITS)

+  				log_divisor = log_range - MAX_SPLITS;

+  		}

+  		return log_divisor;

+  	}

+

+  	template <class RandomAccessIter>

+  	inline RandomAccessIter * 

+  	size_bins(std::vector<size_t> &bin_sizes, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset, unsigned &cache_end, unsigned bin_count)

+  	{

+  		//Assure space for the size of each bin, followed by initializing sizes

+  		if(bin_count > bin_sizes.size())

+  			bin_sizes.resize(bin_count);

+  		for(size_t u = 0; u < bin_count; u++)

+  			bin_sizes[u] = 0;

+  		//Make sure there is space for the bins

+  		cache_end = cache_offset + bin_count;

+  		if(cache_end > bin_cache.size())

+  			bin_cache.resize(cache_end);

+  		return &(bin_cache[cache_offset]);

+  	}

+

+  	//Implementation for recursive integer sorting

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void 

+  	spread_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  				  , std::vector<size_t> &bin_sizes)

+  	{

+  		//This step is roughly 10% of runtime, but it helps avoid worst-case behavior and improve behavior with real data

+  		//If you know the maximum and minimum ahead of time, you can pass those values in and skip this step for the first iteration

+  		RandomAccessIter max, min;

+  		find_extremes(first, last, max, min);

+  		//max and min will be the same (the first item) iff all values are equivalent

+  		if(max == min)

+  			return;

+  		RandomAccessIter * target_bin;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(*max >> 0) - (*min >> 0)));

+  		div_type div_min = *min >> log_divisor;

+  		div_type div_max = *max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  	

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[(*(current++) >> log_divisor) - div_min]++;

+  		//Assign the bin positions

+  		bins[0] = first;

+  		for(unsigned u = 0; u < bin_count - 1; u++)

+  			bins[u + 1] = bins[u] + bin_sizes[u];

+  

+  		//Swap into place

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count - 1; ++u) {

+  			RandomAccessIter * local_bin = bins + u;

+  			nextbinstart += bin_sizes[u];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = (bins + ((*current >> log_divisor) - div_min));  target_bin != local_bin; 

+  					target_bin = bins + ((*current >> log_divisor) - div_min)) {

+  					//3-way swap; this is about 1% faster than a 2-way swap with integers

+  					//The main advantage is less copies are involved per item put in the correct place

+  					data_type tmp;

+  					RandomAccessIter b = (*target_bin)++;

+  					RandomAccessIter * b_bin = bins + ((*b >> log_divisor) - div_min);

+  					if (b_bin != local_bin) {

+  						RandomAccessIter c = (*b_bin)++;

+  						tmp = *c;

+  						*c = *b;

+  					} 

+  					else

+  						tmp = *b;

+  					*b = *current;

+  					*current = tmp;

+  				}

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[bin_count - 1] = last;

+  

+  		//If we've bucketsorted, the array is sorted and we should skip recursion

+  		if(!log_divisor)

+  			return;

+  

+  		//Recursing; log_divisor is the remaining range

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(unsigned u = cache_offset; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			else

+  				spread_sort_rec<RandomAccessIter, div_type, data_type>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//Generic bitshift-based 3-way swapping code

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void inner_swap_loop(RandomAccessIter * bins, const RandomAccessIter & nextbinstart, unsigned ii, right_shift &shift

+  		, const unsigned log_divisor, const div_type div_min) 

+  	{

+  		RandomAccessIter * local_bin = bins + ii;

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			for(RandomAccessIter * target_bin = (bins + (shift(*current, log_divisor) - div_min));  target_bin != local_bin; 

+  				target_bin = bins + (shift(*current, log_divisor) - div_min)) {

+  				data_type tmp;

+  				RandomAccessIter b = (*target_bin)++;

+  				RandomAccessIter * b_bin = bins + (shift(*b, log_divisor) - div_min);

+  				//Three-way swap; if the item to be swapped doesn't belong in the current bin, swap it to where it belongs

+  				if (b_bin != local_bin) {

+  					RandomAccessIter c = (*b_bin)++;

+  					tmp = *c;

+  					*c = *b;

+  				} 

+  				//Note: we could increment current once the swap is done in this case, but that seems to impair performance

+  				else

+  					tmp = *b;

+  				*b = *current;

+  				*current = tmp;

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  	}

+

+  	//Standard swapping wrapper for ascending values

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void swap_loop(RandomAccessIter * bins, RandomAccessIter & nextbinstart, unsigned ii, right_shift &shift

+  		, const std::vector<size_t> &bin_sizes, const unsigned log_divisor, const div_type div_min) 

+  	{

+  		nextbinstart += bin_sizes[ii];

+  		inner_swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, ii, shift, log_divisor, div_min);

+  	}

+

+  	//Functor implementation for recursive sorting

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift, class compare>

+  	inline void 

+  	spread_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift, compare comp)

+  	{

+  		RandomAccessIter max, min;

+  		find_extremes(first, last, max, min, comp);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(shift(*max, 0)) - (shift(*min, 0))));

+  		div_type div_min = shift(*min, log_divisor);

+  		div_type div_max = shift(*max, log_divisor);

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		bins[0] = first;

+  		for(unsigned u = 0; u < bin_count - 1; u++)

+  			bins[u + 1] = bins[u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count - 1; ++u)

+  			swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, u, shift, bin_sizes, log_divisor, div_min);

+  		bins[bin_count - 1] = last;

+  		

+  		//If we've bucketsorted, the array is sorted and we should skip recursion

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(unsigned u = cache_offset; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u], comp);

+  			else

+  				spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift, compare>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes, shift, comp);

+  		}

+  	}

+

+  	//Functor implementation for recursive sorting with only Shift overridden

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void 

+  	spread_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift)

+  	{

+  		RandomAccessIter max, min;

+  		find_extremes(first, last, max, min);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(shift(*max, 0)) - (shift(*min, 0))));

+  		div_type div_min = shift(*min, log_divisor);

+  		div_type div_max = shift(*max, log_divisor);

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		bins[0] = first;

+  		for(unsigned u = 0; u < bin_count - 1; u++)

+  			bins[u + 1] = bins[u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned ii = 0; ii < bin_count - 1; ++ii)

+  			swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, ii, shift, bin_sizes, log_divisor, div_min);

+  		bins[bin_count - 1] = last;

+  		

+  		//If we've bucketsorted, the array is sorted and we should skip recursion

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(unsigned u = cache_offset; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			else

+  				spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes, shift);

+  		}

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void 

+  	spread_sort(RandomAccessIter first, RandomAccessIter last, div_type, data_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		spread_sort_rec<RandomAccessIter, div_type, data_type>(first, last, bin_cache, 0, bin_sizes);

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift, class compare>

+  	inline void 

+  	spread_sort(RandomAccessIter first, RandomAccessIter last, div_type, data_type, right_shift shift, compare comp)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift, compare>(first, last, bin_cache, 0, bin_sizes, shift, comp);

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void 

+  	spread_sort(RandomAccessIter first, RandomAccessIter last, div_type, data_type, right_shift shift)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(first, last, bin_cache, 0, bin_sizes, shift);

+  	}

+  }

+

+  //Top-level sorting call for integers

+  template <class RandomAccessIter>

+  inline void integer_sort(RandomAccessIter first, RandomAccessIter last) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else

+  		detail::spread_sort(first, last, *first >> 0, *first);

+  }

+

+  //integer_sort with functors

+  template <class RandomAccessIter, class right_shift, class compare>

+  inline void integer_sort(RandomAccessIter first, RandomAccessIter last,

+  						right_shift shift, compare comp) {

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last, comp);

+  	else

+  		detail::spread_sort(first, last, shift(*first, 0), *first, shift, comp);

+  }

+

+  //integer_sort with right_shift functor

+  template <class RandomAccessIter, class right_shift>

+  inline void integer_sort(RandomAccessIter first, RandomAccessIter last,

+  						right_shift shift) {

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else

+  		detail::spread_sort(first, last, shift(*first, 0), *first, shift);

+  }

+

+  //------------------------------------------------------ float_sort source --------------------------------------

+  //Casts a RandomAccessIter to the specified data type

+  template<class cast_type, class RandomAccessIter>

+  inline cast_type

+  cast_float_iter(const RandomAccessIter & floatiter)

+  {

+  	cast_type result;

+  	std::memcpy(&result, &(*floatiter), sizeof(cast_type));

+  	return result;

+  }

+

+  //Casts a data element to the specified datinner_float_a type

+  template<class data_type, class cast_type>

+  inline cast_type

+  mem_cast(const data_type & data)

+  {

+  	cast_type result;

+  	std::memcpy(&result, &data, sizeof(cast_type));

+  	return result;

+  }

+

+  namespace detail {

+  	template <class RandomAccessIter, class div_type, class right_shift>

+  	inline void 

+  	find_extremes(RandomAccessIter current, RandomAccessIter last, div_type & max, div_type & min, right_shift shift)

+  	{

+  		min = max = shift(*current, 0);

+  		while(++current < last) {

+  			div_type value = shift(*current, 0);

+  			if(max < value)

+  				max = value;

+  			else if(value < min)

+  				min = value;

+  		}

+  	}

+

+  	//Specialized swap loops for floating-point casting

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void inner_float_swap_loop(RandomAccessIter * bins, const RandomAccessIter & nextbinstart, unsigned ii

+  		, const unsigned log_divisor, const div_type div_min) 

+  	{

+  		RandomAccessIter * local_bin = bins + ii;

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			for(RandomAccessIter * target_bin = (bins + ((cast_float_iter<div_type, RandomAccessIter>(current) >> log_divisor) - div_min));  target_bin != local_bin; 

+  				target_bin = bins + ((cast_float_iter<div_type, RandomAccessIter>(current) >> log_divisor) - div_min)) {

+  				data_type tmp;

+  				RandomAccessIter b = (*target_bin)++;

+  				RandomAccessIter * b_bin = bins + ((cast_float_iter<div_type, RandomAccessIter>(b) >> log_divisor) - div_min);

+  				//Three-way swap; if the item to be swapped doesn't belong in the current bin, swap it to where it belongs

+  				if (b_bin != local_bin) {

+  					RandomAccessIter c = (*b_bin)++;

+  					tmp = *c;

+  					*c = *b;

+  				} 

+  				else

+  					tmp = *b;

+  				*b = *current;

+  				*current = tmp;

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void float_swap_loop(RandomAccessIter * bins, RandomAccessIter & nextbinstart, unsigned ii

+  		, const std::vector<size_t> &bin_sizes, const unsigned log_divisor, const div_type div_min) 

+  	{

+  		nextbinstart += bin_sizes[ii];

+  		inner_float_swap_loop<RandomAccessIter, div_type, data_type>(bins, nextbinstart, ii, log_divisor, div_min);

+  	}

+

+  	template <class RandomAccessIter, class cast_type>

+  	inline void 

+  	find_extremes(RandomAccessIter current, RandomAccessIter last, cast_type & max, cast_type & min)

+  	{

+  		min = max = cast_float_iter<cast_type, RandomAccessIter>(current);

+  		while(++current < last) {

+  			cast_type value = cast_float_iter<cast_type, RandomAccessIter>(current);

+  			if(max < value)

+  				max = value;

+  			else if(value < min)

+  				min = value;

+  		}

+  	}

+

+  	//Special-case sorting of positive floats with casting instead of a right_shift

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void 

+  	positive_float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[(cast_float_iter<div_type, RandomAccessIter>(current++) >> log_divisor) - div_min]++;

+  		bins[0] = first;

+  		for(unsigned u = 0; u < bin_count - 1; u++)

+  			bins[u + 1] = bins[u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count - 1; ++u)

+  			float_swap_loop<RandomAccessIter, div_type, data_type>(bins, nextbinstart, u, bin_sizes, log_divisor, div_min);

+  		bins[bin_count - 1] = last;

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(unsigned u = cache_offset; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			else

+  				positive_float_sort_rec<RandomAccessIter, div_type, data_type>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//Sorting negative_ float_s

+  	//Note that bins are iterated in reverse order because max_neg_float = min_neg_int

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void 

+  	negative_float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[(cast_float_iter<div_type, RandomAccessIter>(current++) >> log_divisor) - div_min]++;

+  		bins[bin_count - 1] = first;

+  		for(int ii = bin_count - 2; ii >= 0; --ii)

+  			bins[ii] = bins[ii + 1] + bin_sizes[ii + 1];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//The last bin will always have the correct elements in it

+  		for(int ii = bin_count - 1; ii > 0; --ii)

+  			float_swap_loop<RandomAccessIter, div_type, data_type>(bins, nextbinstart, ii, bin_sizes, log_divisor, div_min);

+  		//Since we don't process the last bin, we need to update its end position

+  		bin_cache[cache_offset] = last;

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_end - 1; ii >= (int)cache_offset; lastPos = bin_cache[ii], --ii) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii]);

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//Sorting negative_ float_s

+  	//Note that bins are iterated in reverse order because max_neg_float = min_neg_int

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void 

+  	negative_float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min, shift);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		bins[bin_count - 1] = first;

+  		for(int ii = bin_count - 2; ii >= 0; --ii)

+  			bins[ii] = bins[ii + 1] + bin_sizes[ii + 1];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//The last bin will always have the correct elements in it

+  		for(int ii = bin_count - 1; ii > 0; --ii)

+  			swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, ii, shift, bin_sizes, log_divisor, div_min);

+  		//Since we don't process the last bin, we need to update its end position

+  		bin_cache[cache_offset] = last;

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_end - 1; ii >= (int)cache_offset; lastPos = bin_cache[ii], --ii) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii]);

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes, shift);

+  		}

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift, class compare>

+  	inline void 

+  	negative_float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift, compare comp)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min, shift);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		bins[bin_count - 1] = first;

+  		for(int ii = bin_count - 2; ii >= 0; --ii)

+  			bins[ii] = bins[ii + 1] + bin_sizes[ii + 1];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//The last bin will always have the correct elements in it

+  		for(int ii = bin_count - 1; ii > 0; --ii)

+  			swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, ii, shift, bin_sizes, log_divisor, div_min);

+  		//Since we don't process the last bin, we need to update its end position

+  		bin_cache[cache_offset] = last;

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Recursing

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_end - 1; ii >= (int)cache_offset; lastPos = bin_cache[ii], --ii) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii], comp);

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type, right_shift, compare>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes, shift, comp);

+  		}

+  	}

+

+  	//Casting special-case for floating-point sorting

+  	template <class RandomAccessIter, class div_type, class data_type>

+  	inline void 

+  	float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[(cast_float_iter<div_type, RandomAccessIter>(current++) >> log_divisor) - div_min]++;

+  		//The index of the first positive bin

+  		div_type first_positive = (div_min < 0) ? -div_min : 0;

+  		//Resetting if all bins are negative

+  		if(cache_offset + first_positive > cache_end)

+  			first_positive = cache_end - cache_offset;

+  		//Reversing the order of the negative bins

+  		//Note that because of the negative/positive ordering direction flip

+  		//We can not depend upon bin order and positions matching up

+  		//so bin_sizes must be reused to contain the end of the bin

+  		if(first_positive > 0) {

+  			bins[first_positive - 1] = first;

+  			for(int ii = first_positive - 2; ii >= 0; --ii) {

+  				bins[ii] = first + bin_sizes[ii + 1];

+  				bin_sizes[ii] += bin_sizes[ii + 1];

+  			}

+  			//Handling positives following negatives

+  			if((unsigned)first_positive < bin_count) {

+  				bins[first_positive] = first + bin_sizes[0];

+  				bin_sizes[first_positive] += bin_sizes[0];

+  			}

+  		}

+  		else

+  			bins[0] = first;

+  		for(unsigned u = first_positive; u < bin_count - 1; u++) {

+  			bins[u + 1] = first + bin_sizes[u];

+  			bin_sizes[u + 1] += bin_sizes[u];

+  		}

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count; ++u) {

+  			nextbinstart = first + bin_sizes[u];

+  			inner_float_swap_loop<RandomAccessIter, div_type, data_type>(bins, nextbinstart, u, log_divisor, div_min);

+  		}

+  		

+  		if(!log_divisor)

+  			return;

+  		

+  		//Handling negative values first

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_offset + first_positive - 1; ii >= (int)cache_offset ; lastPos = bin_cache[ii--]) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii]);

+  			//sort negative values using reversed-bin spread_sort

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes);

+  		}

+  		

+  		for(unsigned u = cache_offset + first_positive; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			//sort positive values using normal spread_sort

+  			else

+  				positive_float_sort_rec<RandomAccessIter, div_type, data_type>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//Functor implementation for recursive sorting

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void 

+  	float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min, shift);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		//The index of the first positive bin

+  		div_type first_positive = (div_min < 0) ? -div_min : 0;

+  		//Resetting if all bins are negative

+  		if(cache_offset + first_positive > cache_end)

+  			first_positive = cache_end - cache_offset;

+  		//Reversing the order of the negative bins

+  		//Note that because of the negative/positive ordering direction flip

+  		//We can not depend upon bin order and positions matching up

+  		//so bin_sizes must be reused to contain the end of the bin

+  		if(first_positive > 0) {

+  			bins[first_positive - 1] = first;

+  			for(int ii = first_positive - 2; ii >= 0; --ii) {

+  				bins[ii] = first + bin_sizes[ii + 1];

+  				bin_sizes[ii] += bin_sizes[ii + 1];

+  			}

+  			//Handling positives following negatives

+  			if((unsigned)first_positive < bin_count) {

+  				bins[first_positive] = first + bin_sizes[0];

+  				bin_sizes[first_positive] += bin_sizes[0];

+  			}

+  		}

+  		else

+  			bins[0] = first;

+  		for(unsigned u = first_positive; u < bin_count - 1; u++) {

+  			bins[u + 1] = first + bin_sizes[u];

+  			bin_sizes[u + 1] += bin_sizes[u];

+  		}

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count; ++u) {

+  			nextbinstart = first + bin_sizes[u];

+  			inner_swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, u, shift, log_divisor, div_min);

+  		}

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Handling negative values first

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_offset + first_positive - 1; ii >= (int)cache_offset ; lastPos = bin_cache[ii--]) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii]);

+  			//sort negative values using reversed-bin spread_sort

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes, shift);

+  		}

+  		

+  		for(unsigned u = cache_offset + first_positive; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			//sort positive values using normal spread_sort

+  			else

+  				spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes, shift);

+  		}

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift, class compare>

+  	inline void 

+  	float_sort_rec(RandomAccessIter first, RandomAccessIter last, std::vector<RandomAccessIter> &bin_cache, unsigned cache_offset

+  					, std::vector<size_t> &bin_sizes, right_shift shift, compare comp)

+  	{

+  		div_type max, min;

+  		find_extremes(first, last, max, min, shift);

+  		if(max == min)

+  			return;

+  		unsigned log_divisor = get_log_divisor(last - first, rough_log_2_size((size_t)(max) - min));

+  		div_type div_min = min >> log_divisor;

+  		div_type div_max = max >> log_divisor;

+  		unsigned bin_count = div_max - div_min + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, bin_count);

+  			

+  		//Calculating the size of each bin

+  		for (RandomAccessIter current = first; current != last;)

+  			bin_sizes[shift(*(current++), log_divisor) - div_min]++;

+  		//The index of the first positive bin

+  		div_type first_positive = (div_min < 0) ? -div_min : 0;

+  		//Resetting if all bins are negative

+  		if(cache_offset + first_positive > cache_end)

+  			first_positive = cache_end - cache_offset;

+  		//Reversing the order of the negative bins

+  		//Note that because of the negative/positive ordering direction flip

+  		//We can not depend upon bin order and positions matching up

+  		//so bin_sizes must be reused to contain the end of the bin

+  		if(first_positive > 0) {

+  			bins[first_positive - 1] = first;

+  			for(int ii = first_positive - 2; ii >= 0; --ii) {

+  				bins[ii] = first + bin_sizes[ii + 1];

+  				bin_sizes[ii] += bin_sizes[ii + 1];

+  			}

+  			//Handling positives following negatives

+  			if((unsigned)first_positive < bin_count) {

+  				bins[first_positive] = first + bin_sizes[0];

+  				bin_sizes[first_positive] += bin_sizes[0];

+  			}

+  		}

+  		else

+  			bins[0] = first;

+  		for(unsigned u = first_positive; u < bin_count - 1; u++) {

+  			bins[u + 1] = first + bin_sizes[u];

+  			bin_sizes[u + 1] += bin_sizes[u];

+  		}

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		for(unsigned u = 0; u < bin_count; ++u) {

+  			nextbinstart = first + bin_sizes[u];

+  			inner_swap_loop<RandomAccessIter, div_type, data_type, right_shift>(bins, nextbinstart, u, shift, log_divisor, div_min);

+  		}

+  		

+  		//Return if we've completed bucketsorting

+  		if(!log_divisor)

+  			return;

+  		

+  		//Handling negative values first

+  		size_t max_count = get_max_count(log_divisor, last - first);

+  		RandomAccessIter lastPos = first;

+  		for(int ii = cache_offset + first_positive - 1; ii >= (int)cache_offset ; lastPos = bin_cache[ii--]) {

+  			size_t count = bin_cache[ii] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[ii]);

+  			//sort negative values using reversed-bin spread_sort

+  			else

+  				negative_float_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[ii], bin_cache, cache_end, bin_sizes, shift, comp);

+  		}

+  		

+  		for(unsigned u = cache_offset + first_positive; u < cache_end; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			if(count < 2)

+  				continue;

+  			if(count < max_count)

+  				std::sort(lastPos, bin_cache[u]);

+  			//sort positive values using normal spread_sort

+  			else

+  				spread_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(lastPos, bin_cache[u], bin_cache, cache_end, bin_sizes, shift, comp);

+  		}

+  	}

+

+  	template <class RandomAccessIter, class cast_type, class data_type>

+  	inline void 

+  	float_Sort(RandomAccessIter first, RandomAccessIter last, cast_type, data_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		float_sort_rec<RandomAccessIter, cast_type, data_type>(first, last, bin_cache, 0, bin_sizes);

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift>

+  	inline void 

+  	float_Sort(RandomAccessIter first, RandomAccessIter last, div_type, data_type, right_shift shift)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		float_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(first, last, bin_cache, 0, bin_sizes, shift);

+  	}

+

+  	template <class RandomAccessIter, class div_type, class data_type, class right_shift, class compare>

+  	inline void 

+  	float_Sort(RandomAccessIter first, RandomAccessIter last, div_type, data_type, right_shift shift, compare comp)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		float_sort_rec<RandomAccessIter, div_type, data_type, right_shift>(first, last, bin_cache, 0, bin_sizes, shift, comp);

+  	}

+  }

+

+  //float_sort with casting

+  //The cast_type must be equal in size to the data type, and must be a signed integer

+  template <class RandomAccessIter, class cast_type>

+  inline void float_sort_cast(RandomAccessIter first, RandomAccessIter last, cast_type cVal) 

+  {

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else

+  		detail::float_Sort(first, last, cVal, *first);

+  }

+

+  //float_sort with casting to an int

+  //Only use this with IEEE floating-point numbers

+  template <class RandomAccessIter>

+  inline void float_sort_cast_to_int(RandomAccessIter first, RandomAccessIter last) 

+  {

+  	int cVal = 0;

+  	float_sort_cast(first, last, cVal);

+  }

+

+  //float_sort with functors

+  template <class RandomAccessIter, class right_shift>

+  inline void float_sort(RandomAccessIter first, RandomAccessIter last, right_shift shift) 

+  {

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else

+  		detail::float_Sort(first, last, shift(*first, 0), *first, shift);

+  }

+

+  template <class RandomAccessIter, class right_shift, class compare>

+  inline void float_sort(RandomAccessIter first, RandomAccessIter last, right_shift shift, compare comp) 

+  {

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last, comp);

+  	else

+  		detail::float_Sort(first, last, shift(*first, 0), *first, shift, comp);

+  }

+

+  //------------------------------------------------- string_sort source ---------------------------------------------

+  namespace detail {

+  	//Offsetting on identical characters.  This function works a character at a time for optimal worst-case performance.

+  	template<class RandomAccessIter>

+  	inline void

+  	update_offset(RandomAccessIter first, RandomAccessIter finish, unsigned &char_offset)

+  	{

+  		unsigned nextOffset = char_offset;

+  		bool done = false;

+  		while(!done) {

+  			RandomAccessIter curr = first;

+  			do {

+  				//ignore empties, but if the nextOffset would exceed the length or not match, exit; we've found the last matching character

+  				if((*curr).size() > char_offset && ((*curr).size() <= (nextOffset + 1) || (*curr)[nextOffset] != (*first)[nextOffset])) {

+  					done = true;

+  					break;

+  				}

+  			} while(++curr != finish);

+  			if(!done)

+  				++nextOffset;

+  		} 

+  		char_offset = nextOffset;

+  	}

+

+  	//Offsetting on identical characters.  This function works a character at a time for optimal worst-case performance.

+  	template<class RandomAccessIter, class get_char, class get_length>

+  	inline void

+  	update_offset(RandomAccessIter first, RandomAccessIter finish, unsigned &char_offset, get_char getchar, get_length length)

+  	{

+  		unsigned nextOffset = char_offset;

+  		bool done = false;

+  		while(!done) {

+  			RandomAccessIter curr = first;

+  			do {

+  				//ignore empties, but if the nextOffset would exceed the length or not match, exit; we've found the last matching character

+  				if(length(*curr) > char_offset && (length(*curr) <= (nextOffset + 1) || getchar((*curr), nextOffset) != getchar((*first), nextOffset))) {

+  					done = true;

+  					break;

+  				}

+  			} while(++curr != finish);

+  			if(!done)

+  				++nextOffset;

+  		} 

+  		char_offset = nextOffset;

+  	}

+

+  	//A comparison functor for strings that assumes they are identical up to char_offset

+  	template<class data_type, class unsignedchar_type>

+  	struct offset_lessthan {

+  		offset_lessthan(unsigned char_offset) : fchar_offset(char_offset){}

+  		inline bool operator()(const data_type &x, const data_type &y) const 

+  		{

+  			unsigned minSize = std::min(x.size(), y.size());

+  			for(unsigned u = fchar_offset; u < minSize; ++u) {

+  				if(static_cast<unsignedchar_type>(x[u]) < static_cast<unsignedchar_type>(y[u]))

+  					return true;

+  				else if(static_cast<unsignedchar_type>(y[u]) < static_cast<unsignedchar_type>(x[u]))

+  					return false;

+  			}

+  			return x.size() < y.size();

+  		}

+  		unsigned fchar_offset;

+  	};

+

+  	//A comparison functor for strings that assumes they are identical up to char_offset

+  	template<class data_type, class unsignedchar_type>

+  	struct offset_greaterthan {

+  		offset_greaterthan(unsigned char_offset) : fchar_offset(char_offset){}

+  		inline bool operator()(const data_type &x, const data_type &y) const 

+  		{

+  			unsigned minSize = std::min(x.size(), y.size());

+  			for(unsigned u = fchar_offset; u < minSize; ++u) {

+  				if(static_cast<unsignedchar_type>(x[u]) > static_cast<unsignedchar_type>(y[u]))

+  					return true;

+  				else if(static_cast<unsignedchar_type>(y[u]) > static_cast<unsignedchar_type>(x[u]))

+  					return false;

+  			}

+  			return x.size() > y.size();

+  		}

+  		unsigned fchar_offset;

+  	};

+

+  	//A comparison functor for strings that assumes they are identical up to char_offset

+  	template<class data_type, class get_char, class get_length>

+  	struct offset_char_lessthan {

+  		offset_char_lessthan(unsigned char_offset) : fchar_offset(char_offset){}

+  		inline bool operator()(const data_type &x, const data_type &y) const 

+  		{

+  			unsigned minSize = std::min(length(x), length(y));

+  			for(unsigned u = fchar_offset; u < minSize; ++u) {

+  				if(getchar(x, u) < getchar(y, u))

+  					return true;

+  				else if(getchar(y, u) < getchar(x, u))

+  					return false;

+  			}

+  			return length(x) < length(y);

+  		}

+  		unsigned fchar_offset;

+  		get_char getchar;

+  		get_length length;

+  	};

+

+  	//String sorting recursive implementation

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type>

+  	inline void 

+  	string_sort_rec(RandomAccessIter first, RandomAccessIter last, unsigned char_offset, std::vector<RandomAccessIter> &bin_cache

+  		, unsigned cache_offset, std::vector<size_t> &bin_sizes)

+  	{

+  		//This section is not strictly necessary, but makes handling of long identical substrings much faster, with a mild average performance impact.

+  		//Iterate to the end of the empties.  If all empty, return

+  		while((*first).size() <= char_offset) {

+  			if(++first == last)

+  				return;

+  		}

+  		RandomAccessIter finish = last - 1;

+  		//Getting the last non-empty

+  		for(;(*finish).size() <= char_offset; --finish) { }

+  		++finish;

+  		//Offsetting on identical characters.  This section works a character at a time for optimal worst-case performance.

+  		update_offset(first, finish, char_offset);

+  		

+  		const unsigned bin_count = (1 << (sizeof(unsignedchar_type)*8));

+  		//Equal worst-case between radix and comparison-based is when bin_count = n*log(n).

+  		const unsigned max_size = bin_count;

+  		const unsigned membin_count = bin_count + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, membin_count) + 1;

+  			

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last; ++current) {

+  			if((*current).size() <= char_offset) {

+  				bin_sizes[0]++;

+  			}

+  			else

+  				bin_sizes[static_cast<unsignedchar_type>((*current)[char_offset]) + 1]++;

+  		}

+  		//Assign the bin positions

+  		bin_cache[cache_offset] = first;

+  		for(unsigned u = 0; u < membin_count - 1; u++)

+  			bin_cache[cache_offset + u + 1] = bin_cache[cache_offset + u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//handling empty bins

+  		RandomAccessIter * local_bin = &(bin_cache[cache_offset]);

+  		nextbinstart +=	bin_sizes[0];

+  		RandomAccessIter * target_bin;

+  		//Iterating over each element in the bin of empties

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			//empties belong in this bin

+  			while((*current).size() > char_offset) {

+  				target_bin = bins + static_cast<unsignedchar_type>((*current)[char_offset]);

+  				iter_swap(current, (*target_bin)++);

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  		//iterate backwards to find the last bin with elements in it; this saves iterations in multiple loops

+  		unsigned last_bin = bin_count - 1;

+  		for(; last_bin && !bin_sizes[last_bin + 1]; --last_bin) { }

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		for(unsigned u = 0; u < last_bin; ++u) {

+  			local_bin = bins + u;

+  			nextbinstart += bin_sizes[u + 1];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = bins + static_cast<unsignedchar_type>((*current)[char_offset]);  target_bin != local_bin; 

+  					target_bin = bins + static_cast<unsignedchar_type>((*current)[char_offset]))

+  					iter_swap(current, (*target_bin)++);

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[last_bin] = last;

+  		//Recursing

+  		RandomAccessIter lastPos = bin_cache[cache_offset];

+  		//Skip this loop for empties

+  		for(unsigned u = cache_offset + 1; u < cache_offset + last_bin + 2; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_size)

+  				std::sort(lastPos, bin_cache[u], offset_lessthan<data_type, unsignedchar_type>(char_offset + 1));

+  			else

+  				string_sort_rec<RandomAccessIter, data_type, unsignedchar_type>(lastPos, bin_cache[u], char_offset + 1, bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//Sorts strings in reverse order, with empties at the end

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type>

+  	inline void 

+  	reverse_string_sort_rec(RandomAccessIter first, RandomAccessIter last, unsigned char_offset, std::vector<RandomAccessIter> &bin_cache

+  		, unsigned cache_offset, std::vector<size_t> &bin_sizes)

+  	{

+  		//This section is not strictly necessary, but makes handling of long identical substrings much faster, with a mild average performance impact.

+  		RandomAccessIter curr = first;

+  		//Iterate to the end of the empties.  If all empty, return

+  		while((*curr).size() <= char_offset) {

+  			if(++curr == last)

+  				return;

+  		}

+  		//Getting the last non-empty

+  		while((*(--last)).size() <= char_offset) { }

+  		++last;

+  		//Offsetting on identical characters.  This section works a character at a time for optimal worst-case performance.

+  		update_offset(curr, last, char_offset);

+  		RandomAccessIter * target_bin;

+  		

+  		const unsigned bin_count = (1 << (sizeof(unsignedchar_type)*8));

+  		//Equal worst-case between radix and comparison-based is when bin_count = n*log(n).

+  		const unsigned max_size = bin_count;

+  		const unsigned membin_count = bin_count + 1;

+  		const unsigned max_bin = bin_count - 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, membin_count);

+  		RandomAccessIter * end_bin = &(bin_cache[cache_offset + max_bin]);

+  			

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last; ++current) {

+  			if((*current).size() <= char_offset) {

+  				bin_sizes[bin_count]++;

+  			}

+  			else

+  				bin_sizes[max_bin - static_cast<unsignedchar_type>((*current)[char_offset])]++;

+  		}

+  		//Assign the bin positions

+  		bin_cache[cache_offset] = first;

+  		for(unsigned u = 0; u < membin_count - 1; u++)

+  			bin_cache[cache_offset + u + 1] = bin_cache[cache_offset + u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = last;

+  		//handling empty bins

+  		RandomAccessIter * local_bin = &(bin_cache[cache_offset + bin_count]);

+  		RandomAccessIter lastFull = *local_bin;

+  		//Iterating over each element in the bin of empties

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			//empties belong in this bin

+  			while((*current).size() > char_offset) {

+  				target_bin = end_bin - static_cast<unsignedchar_type>((*current)[char_offset]);

+  				iter_swap(current, (*target_bin)++);

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  		nextbinstart = first;

+  		//iterate backwards to find the last bin with elements in it; this saves iterations in multiple loops

+  		unsigned last_bin = max_bin;

+  		for(; last_bin && !bin_sizes[last_bin]; --last_bin) { }

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		for(unsigned u = 0; u < last_bin; ++u) {

+  			local_bin = bins + u;

+  			nextbinstart += bin_sizes[u];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = end_bin - static_cast<unsignedchar_type>((*current)[char_offset]);  target_bin != local_bin; 

+  					target_bin = end_bin - static_cast<unsignedchar_type>((*current)[char_offset]))

+  					iter_swap(current, (*target_bin)++);

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[last_bin] = lastFull;

+  		//Recursing

+  		RandomAccessIter lastPos = first;

+  		//Skip this loop for empties

+  		for(unsigned u = cache_offset; u <= cache_offset + last_bin; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_size)

+  				std::sort(lastPos, bin_cache[u], offset_greaterthan<data_type, unsignedchar_type>(char_offset + 1));

+  			else

+  				reverse_string_sort_rec<RandomAccessIter, data_type, unsignedchar_type>(lastPos, bin_cache[u], char_offset + 1, bin_cache, cache_end, bin_sizes);

+  		}

+  	}

+

+  	//String sorting recursive implementation

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type, class get_char, class get_length>

+  	inline void 

+  	string_sort_rec(RandomAccessIter first, RandomAccessIter last, unsigned char_offset, std::vector<RandomAccessIter> &bin_cache

+  		, unsigned cache_offset, std::vector<size_t> &bin_sizes, get_char getchar, get_length length)

+  	{

+  		//This section is not strictly necessary, but makes handling of long identical substrings much faster, with a mild average performance impact.

+  		//Iterate to the end of the empties.  If all empty, return

+  		while(length(*first) <= char_offset) {

+  			if(++first == last)

+  				return;

+  		}

+  		RandomAccessIter finish = last - 1;

+  		//Getting the last non-empty

+  		for(;length(*finish) <= char_offset; --finish) { }

+  		++finish;

+  		update_offset(first, finish, char_offset, getchar, length);

+  		

+  		const unsigned bin_count = (1 << (sizeof(unsignedchar_type)*8));

+  		//Equal worst-case between radix and comparison-based is when bin_count = n*log(n).

+  		const unsigned max_size = bin_count;

+  		const unsigned membin_count = bin_count + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, membin_count) + 1;

+  			

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last; ++current) {

+  			if(length(*current) <= char_offset) {

+  				bin_sizes[0]++;

+  			}

+  			else

+  				bin_sizes[getchar((*current), char_offset) + 1]++;

+  		}

+  		//Assign the bin positions

+  		bin_cache[cache_offset] = first;

+  		for(unsigned u = 0; u < membin_count - 1; u++)

+  			bin_cache[cache_offset + u + 1] = bin_cache[cache_offset + u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//handling empty bins

+  		RandomAccessIter * local_bin = &(bin_cache[cache_offset]);

+  		nextbinstart +=	bin_sizes[0];

+  		RandomAccessIter * target_bin;

+  		//Iterating over each element in the bin of empties

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			//empties belong in this bin

+  			while(length(*current) > char_offset) {

+  				target_bin = bins + getchar((*current), char_offset);

+  				iter_swap(current, (*target_bin)++);

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  		//iterate backwards to find the last bin with elements in it; this saves iterations in multiple loops

+  		unsigned last_bin = bin_count - 1;

+  		for(; last_bin && !bin_sizes[last_bin + 1]; --last_bin) { }

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		for(unsigned ii = 0; ii < last_bin; ++ii) {

+  			local_bin = bins + ii;

+  			nextbinstart += bin_sizes[ii + 1];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = bins + getchar((*current), char_offset);  target_bin != local_bin; 

+  					target_bin = bins + getchar((*current), char_offset))

+  					iter_swap(current, (*target_bin)++);

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[last_bin] = last;

+  		

+  		//Recursing

+  		RandomAccessIter lastPos = bin_cache[cache_offset];

+  		//Skip this loop for empties

+  		for(unsigned u = cache_offset + 1; u < cache_offset + last_bin + 2; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_size)

+  				std::sort(lastPos, bin_cache[u], offset_char_lessthan<data_type, get_char, get_length>(char_offset + 1));

+  			else

+  				string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length>(lastPos, bin_cache[u], char_offset + 1, bin_cache, cache_end, bin_sizes, getchar, length);

+  		}

+  	}

+

+  	//String sorting recursive implementation

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type, class get_char, class get_length, class compare>

+  	inline void 

+  	string_sort_rec(RandomAccessIter first, RandomAccessIter last, unsigned char_offset, std::vector<RandomAccessIter> &bin_cache

+  		, unsigned cache_offset, std::vector<size_t> &bin_sizes, get_char getchar, get_length length, compare comp)

+  	{

+  		//This section is not strictly necessary, but makes handling of long identical substrings much faster, with a mild average performance impact.

+  		//Iterate to the end of the empties.  If all empty, return

+  		while(length(*first) <= char_offset) {

+  			if(++first == last)

+  				return;

+  		}

+  		RandomAccessIter finish = last - 1;

+  		//Getting the last non-empty

+  		for(;length(*finish) <= char_offset; --finish) { }

+  		++finish;

+  		update_offset(first, finish, char_offset, getchar, length);

+  		

+  		const unsigned bin_count = (1 << (sizeof(unsignedchar_type)*8));

+  		//Equal worst-case between radix and comparison-based is when bin_count = n*log(n).

+  		const unsigned max_size = bin_count;

+  		const unsigned membin_count = bin_count + 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, membin_count) + 1;

+  			

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last; ++current) {

+  			if(length(*current) <= char_offset) {

+  				bin_sizes[0]++;

+  			}

+  			else

+  				bin_sizes[getchar((*current), char_offset) + 1]++;

+  		}

+  		//Assign the bin positions

+  		bin_cache[cache_offset] = first;

+  		for(unsigned u = 0; u < membin_count - 1; u++)

+  			bin_cache[cache_offset + u + 1] = bin_cache[cache_offset + u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = first;

+  		//handling empty bins

+  		RandomAccessIter * local_bin = &(bin_cache[cache_offset]);

+  		nextbinstart +=	bin_sizes[0];

+  		RandomAccessIter * target_bin;

+  		//Iterating over each element in the bin of empties

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			//empties belong in this bin

+  			while(length(*current) > char_offset) {

+  				target_bin = bins + getchar((*current), char_offset);

+  				iter_swap(current, (*target_bin)++);

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  		//iterate backwards to find the last bin with elements in it; this saves iterations in multiple loops

+  		unsigned last_bin = bin_count - 1;

+  		for(; last_bin && !bin_sizes[last_bin + 1]; --last_bin) { }

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		for(unsigned u = 0; u < last_bin; ++u) {

+  			local_bin = bins + u;

+  			nextbinstart += bin_sizes[u + 1];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = bins + getchar((*current), char_offset);  target_bin != local_bin; 

+  					target_bin = bins + getchar((*current), char_offset))

+  					iter_swap(current, (*target_bin)++);

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[last_bin] = last;

+  		

+  		//Recursing

+  		RandomAccessIter lastPos = bin_cache[cache_offset];

+  		//Skip this loop for empties

+  		for(unsigned u = cache_offset + 1; u < cache_offset + last_bin + 2; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_size)

+  				std::sort(lastPos, bin_cache[u], comp);

+  			else

+  				string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length, compare>(lastPos

+  					, bin_cache[u], char_offset + 1, bin_cache, cache_end, bin_sizes, getchar, length, comp);

+  		}

+  	}

+

+  	//Sorts strings in reverse order, with empties at the end

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type, class get_char, class get_length, class compare>

+  	inline void 

+  	reverse_string_sort_rec(RandomAccessIter first, RandomAccessIter last, unsigned char_offset, std::vector<RandomAccessIter> &bin_cache

+  		, unsigned cache_offset, std::vector<size_t> &bin_sizes, get_char getchar, get_length length, compare comp)

+  	{

+  		//This section is not strictly necessary, but makes handling of long identical substrings much faster, with a mild average performance impact.

+  		RandomAccessIter curr = first;

+  		//Iterate to the end of the empties.  If all empty, return

+  		while(length(*curr) <= char_offset) {

+  			if(++curr == last)

+  				return;

+  		}

+  		//Getting the last non-empty

+  		while(length(*(--last)) <= char_offset) { }

+  		++last;

+  		//Offsetting on identical characters.  This section works a character at a time for optimal worst-case performance.

+  		update_offset(first, last, char_offset, getchar, length);

+  		

+  		const unsigned bin_count = (1 << (sizeof(unsignedchar_type)*8));

+  		//Equal worst-case between radix and comparison-based is when bin_count = n*log(n).

+  		const unsigned max_size = bin_count;

+  		const unsigned membin_count = bin_count + 1;

+  		const unsigned max_bin = bin_count - 1;

+  		unsigned cache_end;

+  		RandomAccessIter * bins = size_bins(bin_sizes, bin_cache, cache_offset, cache_end, membin_count);

+  		RandomAccessIter *end_bin = &(bin_cache[cache_offset + max_bin]);

+  			

+  		//Calculating the size of each bin; this takes roughly 10% of runtime

+  		for (RandomAccessIter current = first; current != last; ++current) {

+  			if(length(*current) <= char_offset) {

+  				bin_sizes[bin_count]++;

+  			}

+  			else

+  				bin_sizes[max_bin - getchar((*current), char_offset)]++;

+  		}

+  		//Assign the bin positions

+  		bin_cache[cache_offset] = first;

+  		for(unsigned u = 0; u < membin_count - 1; u++)

+  			bin_cache[cache_offset + u + 1] = bin_cache[cache_offset + u] + bin_sizes[u];

+  		

+  		//Swap into place

+  		RandomAccessIter nextbinstart = last;

+  		//handling empty bins

+  		RandomAccessIter * local_bin = &(bin_cache[cache_offset + bin_count]);

+  		RandomAccessIter lastFull = *local_bin;

+  		RandomAccessIter * target_bin;

+  		//Iterating over each element in the bin of empties

+  		for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  			//empties belong in this bin

+  			while(length(*current) > char_offset) {

+  				target_bin = end_bin - getchar((*current), char_offset);

+  				iter_swap(current, (*target_bin)++);

+  			}

+  		}

+  		*local_bin = nextbinstart;

+  		nextbinstart = first;

+  		//iterate backwards to find the last bin with elements in it; this saves iterations in multiple loops

+  		unsigned last_bin = max_bin;

+  		for(; last_bin && !bin_sizes[last_bin]; --last_bin) { }

+  		//This dominates runtime, mostly in the swap and bin lookups

+  		for(unsigned u = 0; u < last_bin; ++u) {

+  			local_bin = bins + u;

+  			nextbinstart += bin_sizes[u];

+  			//Iterating over each element in this bin

+  			for(RandomAccessIter current = *local_bin; current < nextbinstart; ++current) {

+  				//Swapping elements in current into place until the correct element has been swapped in

+  				for(target_bin = end_bin - getchar((*current), char_offset);  target_bin != local_bin; 

+  					target_bin = end_bin - getchar((*current), char_offset))

+  					iter_swap(current, (*target_bin)++);

+  			}

+  			*local_bin = nextbinstart;

+  		}

+  		bins[last_bin] = lastFull;

+  		//Recursing

+  		RandomAccessIter lastPos = first;

+  		//Skip this loop for empties

+  		for(unsigned u = cache_offset; u <= cache_offset + last_bin; lastPos = bin_cache[u], ++u) {

+  			size_t count = bin_cache[u] - lastPos;

+  			//don't sort unless there are at least two items to compare

+  			if(count < 2)

+  				continue;

+  			//using std::sort if its worst-case is better

+  			if(count < max_size)

+  				std::sort(lastPos, bin_cache[u], comp);

+  			else

+  				reverse_string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length, compare>(lastPos

+  					, bin_cache[u], char_offset + 1, bin_cache, cache_end, bin_sizes, getchar, length, comp);

+  		}

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type>

+  	inline void 

+  	string_sort(RandomAccessIter first, RandomAccessIter last, data_type, unsignedchar_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		string_sort_rec<RandomAccessIter, data_type, unsignedchar_type>(first, last, 0, bin_cache, 0, bin_sizes);

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class data_type, class unsignedchar_type>

+  	inline void 

+  	reverse_string_sort(RandomAccessIter first, RandomAccessIter last, data_type, unsignedchar_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		reverse_string_sort_rec<RandomAccessIter, data_type, unsignedchar_type>(first, last, 0, bin_cache, 0, bin_sizes);

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class get_char, class get_length, class data_type, class unsignedchar_type>

+  	inline void 

+  	string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length, data_type, unsignedchar_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length>(first, last, 0, bin_cache, 0, bin_sizes, getchar, length);

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class get_char, class get_length, class compare, class data_type, class unsignedchar_type>

+  	inline void 

+  	string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length, compare comp, data_type, unsignedchar_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length, compare>(first, last, 0, bin_cache, 0, bin_sizes, getchar, length, comp);

+  	}

+

+  	//Holds the bin vector and makes the initial recursive call

+  	template <class RandomAccessIter, class get_char, class get_length, class compare, class data_type, class unsignedchar_type>

+  	inline void 

+  	reverse_string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length, compare comp, data_type, unsignedchar_type)

+  	{

+  		std::vector<size_t> bin_sizes;

+  		std::vector<RandomAccessIter> bin_cache;

+  		reverse_string_sort_rec<RandomAccessIter, data_type, unsignedchar_type, get_char, get_length, compare>(first, last, 0, bin_cache, 0, bin_sizes, getchar, length, comp);

+  	}

+  }

+

+  //Allows character-type overloads

+  template <class RandomAccessIter, class unsignedchar_type>

+  inline void string_sort(RandomAccessIter first, RandomAccessIter last, unsignedchar_type unused) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else

+  		detail::string_sort(first, last, *first, unused);

+  }

+

+  //Top-level sorting call; wraps using default of unsigned char

+  template <class RandomAccessIter>

+  inline void string_sort(RandomAccessIter first, RandomAccessIter last) 

+  {

+  	unsigned char unused = '\0';

+  	string_sort(first, last, unused);

+  }

+

+  //Allows character-type overloads

+  template <class RandomAccessIter, class compare, class unsignedchar_type>

+  inline void reverse_string_sort(RandomAccessIter first, RandomAccessIter last, compare comp, unsignedchar_type unused) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last, comp);

+  	else

+  		detail::reverse_string_sort(first, last, *first, unused);

+  }

+

+  //Top-level sorting call; wraps using default of unsigned char

+  template <class RandomAccessIter, class compare>

+  inline void reverse_string_sort(RandomAccessIter first, RandomAccessIter last, compare comp) 

+  {

+  	unsigned char unused = '\0';

+  	reverse_string_sort(first, last, comp, unused);

+  }

+

+  template <class RandomAccessIter, class get_char, class get_length>

+  inline void string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last);

+  	else {

+  		//skipping past empties at the beginning, which allows us to get the character type 

+  		//.empty() is not used so as not to require a user declaration of it

+  		while(!length(*first)) {

+  			if(++first == last)

+  				return;

+  		}

+  		detail::string_sort(first, last, getchar, length, *first, getchar((*first), 0));

+  	}

+  }

+

+  template <class RandomAccessIter, class get_char, class get_length, class compare>

+  inline void string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length, compare comp) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last, comp);

+  	else {

+  		//skipping past empties at the beginning, which allows us to get the character type 

+  		//.empty() is not used so as not to require a user declaration of it

+  		while(!length(*first)) {

+  			if(++first == last)

+  				return;

+  		}

+  		detail::string_sort(first, last, getchar, length, comp, *first, getchar((*first), 0));

+  	}

+  }

+

+  template <class RandomAccessIter, class get_char, class get_length, class compare>

+  inline void reverse_string_sort(RandomAccessIter first, RandomAccessIter last, get_char getchar, get_length length, compare comp) 

+  {

+  	//Don't sort if it's too small to optimize

+  	if(last - first < detail::MIN_SORT_SIZE)

+  		std::sort(first, last, comp);

+  	else {

+  		//skipping past empties at the beginning, which allows us to get the character type 

+  		//.empty() is not used so as not to require a user declaration of it

+  		while(!length(*(--last))) {

+  			//Note: if there is just one non-empty, and it's at the beginning, then it's already in sorted order

+  			if(first == last)

+  				return;

+  		}

+  		//making last just after the end of the non-empty part of the array

+  		++last;

+  		detail::reverse_string_sort(first, last, getchar, length, comp, *first, getchar((*first), 0));

+  	}

+  }

+}

+

+#endif

diff --git a/trunk/src/system_wrappers/source/system_wrappers.gyp b/trunk/src/system_wrappers/source/system_wrappers.gyp
new file mode 100644
index 0000000..79820e9
--- /dev/null
+++ b/trunk/src/system_wrappers/source/system_wrappers.gyp
@@ -0,0 +1,190 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ '../../build/common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'system_wrappers',
+      'type': '<(library)',
+      'include_dirs': [
+        'spreadsortlib',
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../interface/aligned_malloc.h',
+        '../interface/atomic32_wrapper.h',
+        '../interface/condition_variable_wrapper.h',
+        '../interface/cpu_info.h',
+        '../interface/cpu_wrapper.h',
+        '../interface/cpu_features_wrapper.h',
+        '../interface/critical_section_wrapper.h',
+        '../interface/data_log.h',
+        '../interface/data_log_c.h',
+        '../interface/data_log_impl.h',
+        '../interface/event_wrapper.h',
+        '../interface/file_wrapper.h',
+        '../interface/fix_interlocked_exchange_pointer_win.h',
+        '../interface/list_wrapper.h',
+        '../interface/map_wrapper.h',
+        '../interface/ref_count.h',
+        '../interface/rw_lock_wrapper.h',
+        '../interface/scoped_ptr.h',
+        '../interface/scoped_refptr.h',
+        '../interface/sort.h',
+        '../interface/static_instance.h',
+        '../interface/thread_wrapper.h',
+        '../interface/tick_util.h',
+        '../interface/trace.h',
+        'aligned_malloc.cc',
+        'atomic32.cc',
+        'atomic32_linux.h',
+        'atomic32_mac.h',
+        'atomic32_win.h',
+        'condition_variable.cc',
+        'condition_variable_posix.cc',
+        'condition_variable_posix.h',
+        'condition_variable_win.cc',
+        'condition_variable_win.h',
+        'cpu.cc',
+        'cpu_no_op.cc',
+        'cpu_info.cc',
+        'cpu_linux.cc',
+        'cpu_linux.h',
+        'cpu_mac.cc',
+        'cpu_mac.h',
+        'cpu_win.cc',
+        'cpu_win.h',
+        'cpu_features.cc',
+        'critical_section.cc',
+        'critical_section_posix.cc',
+        'critical_section_posix.h',
+        'critical_section_win.cc',
+        'critical_section_win.h',
+        'data_log.cc',
+        'data_log_c.cc',
+        'data_log_no_op.cc',
+        'event.cc',
+        'event_posix.cc',
+        'event_posix.h',
+        'event_win.cc',
+        'event_win.h',
+        'file_impl.cc',
+        'file_impl.h',
+        'list_no_stl.cc',
+        'map.cc',
+        'rw_lock.cc',
+        'rw_lock_posix.cc',
+        'rw_lock_posix.h',
+        'rw_lock_win.cc',
+        'rw_lock_win.h',
+        'sort.cc',
+        'thread.cc',
+        'thread_posix.cc',
+        'thread_posix.h',
+        'thread_win.cc',
+        'thread_win.h',
+        'set_thread_name_win.h',
+        'trace_impl.cc',
+        'trace_impl.h',
+        'trace_impl_no_op.cc',
+        'trace_posix.cc',
+        'trace_posix.h',
+        'trace_win.cc',
+        'trace_win.h',
+      ],
+      'conditions': [
+        ['enable_data_logging==1', {
+          'sources!': [ 'data_log_no_op.cc', ],
+        },{
+          'sources!': [ 'data_log.cc', ],
+        },],
+        ['OS=="linux"', {
+          'link_settings': {
+            'libraries': [ '-lrt', ],
+          },
+        }],
+        ['OS=="mac"', {
+          'link_settings': {
+            'libraries': [ '$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework', ],
+          },
+        }],
+        ['OS=="win"', {
+          'link_settings': {
+            'libraries': [ '-lwinmm.lib', ],
+          },
+        }],
+        ['build_with_chromium==1', {
+          'sources!': [
+            'cpu.cc',
+            'cpu_linux.h',
+            'cpu_mac.h',
+            'cpu_win.h',
+            'trace_impl.cc',
+            'trace_impl.h',
+            'trace_posix.cc',
+            'trace_posix.h',
+            'trace_win.cc',
+            'trace_win.h',
+          ],
+        }, {
+          'sources!': [
+            'cpu_no_op.cc',
+            'trace_impl_no_op.cc',
+          ],
+        }]
+      ] # conditions
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'system_wrappers_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'system_wrappers',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'cpu_wrapper_unittest.cc',
+            'cpu_measurement_harness.h',
+            'cpu_measurement_harness.cc',
+            'list_unittest.cc',
+            'map_unittest.cc',
+            'data_log_unittest.cc',
+            'data_log_unittest_disabled.cc',
+            'data_log_helpers_unittest.cc',
+            'data_log_c_helpers_unittest.c',
+            'data_log_c_helpers_unittest.h',
+            'trace_unittest.cc',
+          ],
+          'conditions': [
+            ['enable_data_logging==1', {
+              'sources!': [ 'data_log_unittest_disabled.cc', ],
+            }, {
+              'sources!': [ 'data_log_unittest.cc', ],
+            }],
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/system_wrappers/source/thread.cc b/trunk/src/system_wrappers/source/thread.cc
new file mode 100644
index 0000000..32dcc63
--- /dev/null
+++ b/trunk/src/system_wrappers/source/thread.cc
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "thread_wrapper.h"
+
+#if defined(_WIN32)
+    #include "thread_win.h"
+#else
+    #include "thread_posix.h"
+#endif
+
+namespace webrtc {
+ThreadWrapper* ThreadWrapper::CreateThread(ThreadRunFunction func,
+                                           ThreadObj obj, ThreadPriority prio,
+                                           const char* threadName)
+{
+#if defined(_WIN32)
+    return new ThreadWindows(func, obj, prio, threadName);
+#else
+    return ThreadPosix::Create(func, obj, prio, threadName);
+#endif
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/thread_posix.cc b/trunk/src/system_wrappers/source/thread_posix.cc
new file mode 100644
index 0000000..05e7943
--- /dev/null
+++ b/trunk/src/system_wrappers/source/thread_posix.cc
@@ -0,0 +1,340 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "thread_posix.h"
+
+#include <errno.h>
+#include <string.h> // strncpy
+#include <time.h>   // nanosleep
+#include <unistd.h>
+#ifdef WEBRTC_LINUX
+#include <sys/types.h>
+#include <sched.h>
+#include <sys/syscall.h>
+#include <linux/unistd.h>
+#include <sys/prctl.h>
+#endif
+
+#if defined(WEBRTC_MAC)
+#include <mach/mach.h>
+#endif
+
+#include "event_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+extern "C"
+{
+    static void* StartThread(void* lpParameter)
+    {
+        static_cast<ThreadPosix*>(lpParameter)->Run();
+        return 0;
+    }
+}
+
+ThreadWrapper* ThreadPosix::Create(ThreadRunFunction func, ThreadObj obj,
+                                   ThreadPriority prio, const char* threadName)
+{
+    ThreadPosix* ptr = new ThreadPosix(func, obj, prio, threadName);
+    if (!ptr)
+    {
+        return NULL;
+    }
+    const int error = ptr->Construct();
+    if (error)
+    {
+        delete ptr;
+        return NULL;
+    }
+    return ptr;
+}
+
+ThreadPosix::ThreadPosix(ThreadRunFunction func, ThreadObj obj,
+                         ThreadPriority prio, const char* threadName)
+    : _runFunction(func),
+      _obj(obj),
+      _alive(false),
+      _dead(true),
+      _prio(prio),
+      _event(EventWrapper::Create()),
+      _name(),
+      _setThreadName(false),
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID))
+      _pid(-1),
+#endif
+      _attr(),
+      _thread(0)
+{
+    if (threadName != NULL)
+    {
+        _setThreadName = true;
+        strncpy(_name, threadName, kThreadMaxNameLength);
+        _name[kThreadMaxNameLength - 1] = '\0';
+    }
+}
+
+uint32_t ThreadWrapper::GetThreadId() {
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_LINUX)
+  return static_cast<uint32_t>(syscall(__NR_gettid));
+#elif defined(WEBRTC_MAC)
+  return static_cast<uint32_t>(mach_thread_self());
+#else
+  return reinterpret_cast<uint32_t>(pthread_self());
+#endif
+}
+
+int ThreadPosix::Construct()
+{
+    int result = 0;
+#if !defined(WEBRTC_ANDROID)
+    // Enable immediate cancellation if requested, see Shutdown()
+    result = pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL);
+    if (result != 0)
+    {
+        return -1;
+    }
+    result = pthread_setcanceltype(PTHREAD_CANCEL_ASYNCHRONOUS, NULL);
+    if (result != 0)
+    {
+        return -1;
+    }
+#endif
+    result = pthread_attr_init(&_attr);
+    if (result != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+ThreadPosix::~ThreadPosix()
+{
+    pthread_attr_destroy(&_attr);
+    delete _event;
+}
+
+#define HAS_THREAD_ID !defined(MAC_IPHONE) && !defined(MAC_IPHONE_SIM)  &&  \
+                      !defined(WEBRTC_MAC) && !defined(WEBRTC_MAC_INTEL) && \
+                      !defined(MAC_DYLIB)  && !defined(MAC_INTEL_DYLIB)
+#if HAS_THREAD_ID
+bool ThreadPosix::Start(unsigned int& threadID)
+#else
+bool ThreadPosix::Start(unsigned int& /*threadID*/)
+#endif
+{
+    if (!_runFunction)
+    {
+        return false;
+    }
+    int result = pthread_attr_setdetachstate(&_attr, PTHREAD_CREATE_DETACHED);
+    // Set the stack stack size to 1M.
+    result |= pthread_attr_setstacksize(&_attr, 1024*1024);
+#ifdef WEBRTC_THREAD_RR
+    const int policy = SCHED_RR;
+#else
+    const int policy = SCHED_FIFO;
+#endif
+    _event->Reset();
+    result |= pthread_create(&_thread, &_attr, &StartThread, this);
+    if (result != 0)
+    {
+        return false;
+    }
+
+    // Wait up to 10 seconds for the OS to call the callback function. Prevents
+    // race condition if Stop() is called too quickly after start.
+    if (kEventSignaled != _event->Wait(WEBRTC_EVENT_10_SEC))
+    {
+        // Timed out. Something went wrong.
+        _runFunction = NULL;
+        return false;
+    }
+
+#if HAS_THREAD_ID
+    threadID = static_cast<unsigned int>(_thread);
+#endif
+    sched_param param;
+
+    const int minPrio = sched_get_priority_min(policy);
+    const int maxPrio = sched_get_priority_max(policy);
+    if ((minPrio == EINVAL) || (maxPrio == EINVAL))
+    {
+        return false;
+    }
+
+    switch (_prio)
+    {
+    case kLowPriority:
+        param.sched_priority = minPrio + 1;
+        break;
+    case kNormalPriority:
+        param.sched_priority = (minPrio + maxPrio) / 2;
+        break;
+    case kHighPriority:
+        param.sched_priority = maxPrio - 3;
+        break;
+    case kHighestPriority:
+        param.sched_priority = maxPrio - 2;
+        break;
+    case kRealtimePriority:
+        param.sched_priority = maxPrio - 1;
+        break;
+    }
+    result = pthread_setschedparam(_thread, policy, &param);
+    if (result == EINVAL)
+    {
+        return false;
+    }
+    return true;
+}
+
+// CPU_ZERO and CPU_SET are not available in NDK r7, so disable
+// SetAffinity on Android for now.
+#if (defined(WEBRTC_LINUX) && (!defined(WEBRTC_ANDROID)))
+bool ThreadPosix::SetAffinity(const int* processorNumbers,
+                              const unsigned int amountOfProcessors) {
+  if (!processorNumbers || (amountOfProcessors == 0)) {
+    return false;
+  }
+  cpu_set_t mask;
+  CPU_ZERO(&mask);
+
+  for (unsigned int processor = 0;
+      processor < amountOfProcessors;
+      processor++) {
+    CPU_SET(processorNumbers[processor], &mask);
+  }
+#if defined(WEBRTC_ANDROID)
+  // Android.
+  const int result = syscall(__NR_sched_setaffinity,
+                             _pid,
+                             sizeof(mask),
+                             &mask);
+#else
+  // "Normal" Linux.
+  const int result = sched_setaffinity(_pid,
+                                       sizeof(mask),
+                                       &mask);
+#endif
+  if (result != 0) {
+    return false;
+  }
+  return true;
+}
+
+#else
+// NOTE: On Mac OS X, use the Thread affinity API in
+// /usr/include/mach/thread_policy.h: thread_policy_set and mach_thread_self()
+// instead of Linux gettid() syscall.
+bool ThreadPosix::SetAffinity(const int* , const unsigned int)
+{
+    return false;
+}
+#endif
+
+void ThreadPosix::SetNotAlive()
+{
+    _alive = false;
+}
+
+bool ThreadPosix::Shutdown()
+{
+#if !defined(WEBRTC_ANDROID)
+    if (_thread && (0 != pthread_cancel(_thread)))
+    {
+        return false;
+    }
+
+    return true;
+#else
+    return false;
+#endif
+}
+
+bool ThreadPosix::Stop()
+{
+    _alive = false;
+
+    // TODO (hellner) why not use an event here?
+    // Wait up to 10 seconds for the thread to terminate
+    for (int i = 0; i < 1000 && !_dead; i++)
+    {
+        timespec t;
+        t.tv_sec = 0;
+        t.tv_nsec = 10*1000*1000;
+        nanosleep(&t, NULL);
+    }
+    if (_dead)
+    {
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+}
+
+void ThreadPosix::Run()
+{
+    _alive = true;
+    _dead  = false;
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID))
+    _pid = GetThreadId();
+#endif
+    // The event the Start() is waiting for.
+    _event->Set();
+
+    if (_setThreadName)
+    {
+#ifdef WEBRTC_LINUX
+        prctl(PR_SET_NAME, (unsigned long)_name, 0, 0, 0);
+#endif
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility,-1,
+                     "Thread with name:%s started ", _name);
+    } else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
+                     "Thread without name started");
+    }
+    do
+    {
+        if (_runFunction)
+        {
+            if (!_runFunction(_obj))
+            {
+                _alive = false;
+            }
+        }
+        else
+        {
+            _alive = false;
+        }
+    }
+    while (_alive);
+
+    if (_setThreadName)
+    {
+        // Don't set the name for the trace thread because it may cause a
+        // deadlock. TODO (hellner) there should be a better solution than
+        // coupling the thread and the trace class like this.
+        if (strcmp(_name, "Trace"))
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceUtility,-1,
+                         "Thread with name:%s stopped", _name);
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility,-1,
+                     "Thread without name stopped");
+    }
+    _dead = true;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/thread_posix.h b/trunk/src/system_wrappers/source/thread_posix.h
new file mode 100644
index 0000000..fc5757d
--- /dev/null
+++ b/trunk/src/system_wrappers/source/thread_posix.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
+
+#include "thread_wrapper.h"
+#include <pthread.h>
+
+namespace webrtc {
+class EventWrapper;
+
+class ThreadPosix : public ThreadWrapper
+{
+public:
+    static ThreadWrapper* Create(ThreadRunFunction func, ThreadObj obj,
+                                 ThreadPriority prio, const char* threadName);
+
+    ThreadPosix(ThreadRunFunction func, ThreadObj obj, ThreadPriority prio,
+                const char* threadName);
+    ~ThreadPosix();
+
+    // From ThreadWrapper
+    virtual void SetNotAlive();
+    virtual bool Start(unsigned int& id);
+    // Not implemented on Mac
+    virtual bool SetAffinity(const int* processorNumbers,
+                             unsigned int amountOfProcessors);
+    virtual bool Stop();
+    virtual bool Shutdown();
+
+    void Run();
+
+private:
+    int Construct();
+
+private:
+    // processing function
+    ThreadRunFunction   _runFunction;
+    ThreadObj           _obj;
+
+    // internal state
+    bool                    _alive;
+    bool                    _dead;
+    ThreadPriority          _prio;
+    EventWrapper*           _event;
+
+    // zero-terminated thread name string
+    char                    _name[kThreadMaxNameLength];
+    bool                    _setThreadName;
+
+    // handle to thread
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID))
+    pid_t                   _pid;
+#endif
+    pthread_attr_t          _attr;
+    pthread_t               _thread;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/thread_win.cc b/trunk/src/system_wrappers/source/thread_win.cc
new file mode 100644
index 0000000..4599bed
--- /dev/null
+++ b/trunk/src/system_wrappers/source/thread_win.cc
@@ -0,0 +1,238 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "thread_win.h"
+
+#include <assert.h>
+#include <process.h>
+#include <stdio.h>
+#include <windows.h>
+
+#include "set_thread_name_win.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+// VS 2005: Disable warnings for default initialized arrays.
+#pragma warning(disable:4351)
+#endif
+
+namespace webrtc {
+ThreadWindows::ThreadWindows(ThreadRunFunction func, ThreadObj obj,
+                             ThreadPriority prio, const char* threadName)
+    : ThreadWrapper(),
+      _runFunction(func),
+      _obj(obj),
+      _alive(false),
+      _dead(true),
+      _doNotCloseHandle(false),
+      _prio(prio),
+      _event(NULL),
+      _thread(NULL),
+      _id(0),
+      _name(),
+      _setThreadName(false)
+{
+    _event = EventWrapper::Create();
+    _critsectStop = CriticalSectionWrapper::CreateCriticalSection();
+    if (threadName != NULL)
+    {
+        // Set the thread name to appear in the VS debugger.
+        _setThreadName = true;
+        strncpy(_name, threadName, kThreadMaxNameLength);
+    }
+}
+
+ThreadWindows::~ThreadWindows()
+{
+#ifdef _DEBUG
+    assert(!_alive);
+#endif
+    if (_thread)
+    {
+        CloseHandle(_thread);
+    }
+    if(_event)
+    {
+        delete _event;
+    }
+    if(_critsectStop)
+    {
+        delete _critsectStop;
+    }
+}
+
+uint32_t ThreadWrapper::GetThreadId() {
+  return GetCurrentThreadId();
+}
+
+unsigned int WINAPI ThreadWindows::StartThread(LPVOID lpParameter)
+{
+    static_cast<ThreadWindows*>(lpParameter)->Run();
+    return 0;
+}
+
+bool ThreadWindows::Start(unsigned int& threadID)
+{
+    _doNotCloseHandle = false;
+
+    // Set stack size to 1M
+    _thread=(HANDLE)_beginthreadex(NULL, 1024*1024, StartThread, (void*)this, 0,
+                                   &threadID);
+    if(_thread == NULL)
+    {
+        return false;
+    }
+    _id = threadID;
+    _event->Wait(INFINITE);
+
+    switch(_prio)
+    {
+    case kLowPriority:
+        SetThreadPriority(_thread, THREAD_PRIORITY_BELOW_NORMAL);
+        break;
+    case kNormalPriority:
+        SetThreadPriority(_thread, THREAD_PRIORITY_NORMAL);
+        break;
+    case kHighPriority:
+        SetThreadPriority(_thread, THREAD_PRIORITY_ABOVE_NORMAL);
+        break;
+    case kHighestPriority:
+        SetThreadPriority(_thread, THREAD_PRIORITY_HIGHEST);
+        break;
+    case kRealtimePriority:
+        SetThreadPriority(_thread, THREAD_PRIORITY_TIME_CRITICAL);
+        break;
+    };
+    return true;
+}
+
+bool ThreadWindows::SetAffinity(const int* processorNumbers,
+                                const unsigned int amountOfProcessors)
+{
+    DWORD_PTR processorBitMask = 0;
+    for(unsigned int processorIndex = 0;
+        processorIndex < amountOfProcessors;
+        processorIndex++)
+    {
+        // Convert from an array with processor numbers to a bitmask
+        // Processor numbers start at zero.
+        // TODO (hellner): this looks like a bug. Shouldn't the '=' be a '+='?
+        // Or even better |=
+        processorBitMask = 1 << processorNumbers[processorIndex];
+    }
+    return SetThreadAffinityMask(_thread,processorBitMask) != 0;
+}
+
+void ThreadWindows::SetNotAlive()
+{
+    _alive = false;
+}
+
+bool ThreadWindows::Shutdown()
+{
+    DWORD exitCode = 0;
+    BOOL ret = TRUE;
+    if (_thread)
+    {
+        ret = TerminateThread(_thread, exitCode);
+        _alive = false;
+        _dead = true;
+        _thread = NULL;
+    }
+    return ret == TRUE;
+}
+
+bool ThreadWindows::Stop()
+{
+    _critsectStop->Enter();
+    // Prevents the handle from being closed in ThreadWindows::Run()
+    _doNotCloseHandle = true;
+    _alive = false;
+    bool signaled = false;
+    if (_thread && !_dead)
+    {
+        _critsectStop->Leave();
+        // Wait up to 2 seconds for the thread to complete.
+        if( WAIT_OBJECT_0 == WaitForSingleObject(_thread, 2000))
+        {
+            signaled = true;
+        }
+        _critsectStop->Enter();
+    }
+    if (_thread)
+    {
+        CloseHandle(_thread);
+        _thread = NULL;
+    }
+    _critsectStop->Leave();
+
+    if (_dead || signaled)
+    {
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+}
+
+void ThreadWindows::Run()
+{
+    _alive = true;
+    _dead = false;
+    _event->Set();
+
+    // All tracing must be after _event->Set to avoid deadlock in Trace.
+    if (_setThreadName)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, _id,
+                     "Thread with name:%s started ", _name);
+        SetThreadName(-1, _name); // -1, set thread name for the calling thread.
+    }else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, _id,
+                     "Thread without name started");
+    }
+
+    do
+    {
+        if (_runFunction)
+        {
+            if (!_runFunction(_obj))
+            {
+                _alive = false;
+            }
+        } else {
+            _alive = false;
+        }
+    } while(_alive);
+
+    if (_setThreadName)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, _id,
+                     "Thread with name:%s stopped", _name);
+    } else {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceUtility,_id,
+                     "Thread without name stopped");
+    }
+
+    _critsectStop->Enter();
+
+    if (_thread && !_doNotCloseHandle)
+    {
+        HANDLE thread = _thread;
+        _thread = NULL;
+        CloseHandle(thread);
+    }
+    _dead = true;
+
+    _critsectStop->Leave();
+};
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/thread_win.h b/trunk/src/system_wrappers/source/thread_win.h
new file mode 100644
index 0000000..4fd7523
--- /dev/null
+++ b/trunk/src/system_wrappers/source/thread_win.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_H_
+
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+#include "critical_section_wrapper.h"
+
+#include <windows.h>
+
+namespace webrtc {
+
+class ThreadWindows : public ThreadWrapper
+{
+public:
+    ThreadWindows(ThreadRunFunction func, ThreadObj obj, ThreadPriority prio,
+                  const char* threadName);
+    virtual ~ThreadWindows();
+
+    virtual bool Start(unsigned int& id);
+    bool SetAffinity(const int* processorNumbers,
+                     const unsigned int amountOfProcessors);
+    virtual bool Stop();
+    virtual void SetNotAlive();
+
+    static unsigned int WINAPI StartThread(LPVOID lpParameter);
+
+    virtual bool Shutdown();
+
+protected:
+    virtual void Run();
+
+private:
+    ThreadRunFunction    _runFunction;
+    ThreadObj            _obj;
+
+    bool                    _alive;
+    bool                    _dead;
+
+    // TODO (hellner)
+    // _doNotCloseHandle member seem pretty redundant. Should be able to remove
+    // it. Basically it should be fine to reclaim the handle when calling stop
+    // and in the destructor.
+    bool                    _doNotCloseHandle;
+    ThreadPriority          _prio;
+    EventWrapper*           _event;
+    CriticalSectionWrapper* _critsectStop;
+
+    HANDLE                  _thread;
+    unsigned int            _id;
+    char                    _name[kThreadMaxNameLength];
+    bool                    _setThreadName;
+
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/source/trace_impl.cc b/trunk/src/system_wrappers/source/trace_impl.cc
new file mode 100644
index 0000000..ad908a3
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_impl.cc
@@ -0,0 +1,829 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace_impl.h"
+
+#include <cassert>
+#include <string.h> // memset
+
+#ifdef _WIN32
+#include "trace_win.h"
+#else
+#include <stdio.h>
+#include <time.h>
+#include <stdarg.h>
+#include "trace_posix.h"
+#endif // _WIN32
+
+#define KEY_LEN_CHARS 31
+
+#ifdef _WIN32
+    #pragma warning(disable:4355)
+// VS 2005: Disable warnings for default initialized arrays.
+    #pragma warning(disable:4351)
+#endif // _WIN32
+
+namespace webrtc {
+static WebRtc_UWord32 levelFilter = kTraceDefault;
+
+// Construct On First Use idiom. Avoids "static initialization order fiasco".
+TraceImpl* TraceImpl::StaticInstance(CountOperation count_operation,
+                                     const TraceLevel level)
+{
+    // Sanities to avoid taking lock unless absolutely necessary (for
+    // performance reasons).
+    // count_operation == kAddRefNoCreate implies that a message will be
+    // written to file.
+    if((level != kTraceAll) && (count_operation == kAddRefNoCreate))
+    {
+        if(!(level & levelFilter))
+        {
+            return NULL;
+        }
+    }
+    TraceImpl* impl =
+        GetStaticInstance<TraceImpl>(count_operation);
+    return impl;
+}
+
+TraceImpl* TraceImpl::GetTrace(const TraceLevel level)
+{
+    return StaticInstance(kAddRefNoCreate, level);
+}
+
+TraceImpl* TraceImpl::CreateInstance()
+{
+#if defined(_WIN32)
+    return new TraceWindows();
+#else
+    return new TracePosix();
+#endif
+}
+
+TraceImpl::TraceImpl()
+    : _critsectInterface(CriticalSectionWrapper::CreateCriticalSection()),
+      _callback(NULL),
+      _rowCountText(0),
+      _fileCountText(0),
+      _traceFile(*FileWrapper::Create()),
+      _thread(*ThreadWrapper::CreateThread(TraceImpl::Run, this,
+                                           kHighestPriority, "Trace")),
+      _event(*EventWrapper::Create()),
+      _critsectArray(CriticalSectionWrapper::CreateCriticalSection()),
+      _nextFreeIdx(),
+      _level(),
+      _length(),
+      _messageQueue(),
+      _activeQueue(0)
+{
+    _nextFreeIdx[0] = 0;
+    _nextFreeIdx[1] = 0;
+
+    unsigned int tid = 0;
+    _thread.Start(tid);
+
+    for(int m = 0; m < WEBRTC_TRACE_NUM_ARRAY; m++)
+    {
+        for(int n = 0; n < WEBRTC_TRACE_MAX_QUEUE; n++)
+        {
+            _messageQueue[m][n] = new
+                char[WEBRTC_TRACE_MAX_MESSAGE_SIZE];
+        }
+    }
+}
+
+bool TraceImpl::StopThread()
+{
+    // Release the worker thread so that it can flush any lingering messages.
+    _event.Set();
+
+    // Allow 10 ms for pending messages to be flushed out.
+    // TODO (hellner): why not use condition variables to do this? Or let the
+    //                 worker thread die and let this thread flush remaining
+    //                 messages?
+#ifdef _WIN32
+    Sleep(10);
+#else
+    timespec t;
+    t.tv_sec = 0;
+    t.tv_nsec = 10*1000000;
+    nanosleep(&t,NULL);
+#endif
+
+    _thread.SetNotAlive();
+    // Make sure the thread finishes as quickly as possible (instead of having
+    // to wait for the timeout).
+    _event.Set();
+    bool stopped = _thread.Stop();
+
+    CriticalSectionScoped lock(_critsectInterface);
+    _traceFile.Flush();
+    _traceFile.CloseFile();
+    return stopped;
+}
+
+TraceImpl::~TraceImpl()
+{
+    StopThread();
+    delete &_event;
+    delete &_traceFile;
+    delete &_thread;
+    delete _critsectInterface;
+    delete _critsectArray;
+
+    for(int m = 0; m < WEBRTC_TRACE_NUM_ARRAY; m++)
+    {
+        for(int n = 0; n < WEBRTC_TRACE_MAX_QUEUE; n++)
+        {
+            delete [] _messageQueue[m][n];
+        }
+    }
+}
+
+WebRtc_Word32 TraceImpl::AddThreadId(char* traceMessage) const {
+  WebRtc_UWord32 threadId = ThreadWrapper::GetThreadId();
+  // Messages is 12 characters.
+  return sprintf(traceMessage, "%10u; ", threadId);
+}
+
+WebRtc_Word32 TraceImpl::AddLevel(char* szMessage, const TraceLevel level) const
+{
+    switch (level)
+    {
+        case kTraceStateInfo:
+            sprintf (szMessage, "STATEINFO ; ");
+            break;
+        case kTraceWarning:
+            sprintf (szMessage, "WARNING   ; ");
+            break;
+        case kTraceError:
+            sprintf (szMessage, "ERROR     ; ");
+            break;
+        case kTraceCritical:
+            sprintf (szMessage, "CRITICAL  ; ");
+            break;
+        case kTraceInfo:
+            sprintf (szMessage, "DEBUGINFO ; ");
+            break;
+        case kTraceModuleCall:
+            sprintf (szMessage, "MODULECALL; ");
+            break;
+        case kTraceMemory:
+            sprintf (szMessage, "MEMORY    ; ");
+            break;
+        case kTraceTimer:
+            sprintf (szMessage, "TIMER     ; ");
+            break;
+        case kTraceStream:
+            sprintf (szMessage, "STREAM    ; ");
+            break;
+        case kTraceApiCall:
+            sprintf (szMessage, "APICALL   ; ");
+            break;
+        case kTraceDebug:
+            sprintf (szMessage, "DEBUG     ; ");
+            break;
+        default:
+            assert(false);
+            return 0;
+    }
+    // All messages are 12 characters.
+    return 12;
+}
+
+WebRtc_Word32 TraceImpl::AddModuleAndId(char* traceMessage,
+                                        const TraceModule module,
+                                        const WebRtc_Word32 id) const
+{
+    // Use long int to prevent problems with different definitions of
+    // WebRtc_Word32.
+    // TODO (hellner): is this actually a problem? If so, it should be better to
+    //                 clean up WebRtc_Word32
+    const long int idl = id;
+    if(idl != -1)
+    {
+        const unsigned long int idEngine = id>>16;
+        const unsigned long int idChannel = id & 0xffff;
+
+        switch (module)
+        {
+            case kTraceVoice:
+                sprintf(traceMessage, "       VOICE:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideo:
+                sprintf(traceMessage, "       VIDEO:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceUtility:
+                sprintf(traceMessage, "     UTILITY:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceRtpRtcp:
+                sprintf(traceMessage, "    RTP/RTCP:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceTransport:
+                sprintf(traceMessage, "   TRANSPORT:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceAudioCoding:
+                sprintf(traceMessage, "AUDIO CODING:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceSrtp:
+                sprintf(traceMessage, "        SRTP:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceAudioMixerServer:
+                sprintf(traceMessage, " AUDIO MIX/S:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceAudioMixerClient:
+                sprintf(traceMessage, " AUDIO MIX/C:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideoCoding:
+                sprintf(traceMessage, "VIDEO CODING:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideoMixer:
+                // Print sleep time and API call
+                sprintf(traceMessage, "   VIDEO MIX:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceFile:
+                sprintf(traceMessage, "        FILE:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceAudioProcessing:
+                sprintf(traceMessage, "  AUDIO PROC:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceAudioDevice:
+                sprintf(traceMessage, "AUDIO DEVICE:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideoRenderer:
+                sprintf(traceMessage, "VIDEO RENDER:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideoCapture:
+                sprintf(traceMessage, "VIDEO CAPTUR:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+            case kTraceVideoPreocessing:
+                sprintf(traceMessage, "  VIDEO PROC:%5ld %5ld;", idEngine,
+                        idChannel);
+                break;
+        }
+    } else {
+        switch (module)
+        {
+            case kTraceVoice:
+                sprintf (traceMessage, "       VOICE:%11ld;", idl);
+                break;
+            case kTraceVideo:
+                sprintf (traceMessage, "       VIDEO:%11ld;", idl);
+                break;
+            case kTraceUtility:
+                sprintf (traceMessage, "     UTILITY:%11ld;", idl);
+                break;
+            case kTraceRtpRtcp:
+                sprintf (traceMessage, "    RTP/RTCP:%11ld;", idl);
+                break;
+            case kTraceTransport:
+                sprintf (traceMessage, "   TRANSPORT:%11ld;", idl);
+                break;
+            case kTraceAudioCoding:
+                sprintf (traceMessage, "AUDIO CODING:%11ld;", idl);
+                break;
+            case kTraceSrtp:
+                sprintf (traceMessage, "        SRTP:%11ld;", idl);
+                break;
+            case kTraceAudioMixerServer:
+                sprintf (traceMessage, " AUDIO MIX/S:%11ld;", idl);
+                break;
+            case kTraceAudioMixerClient:
+                sprintf (traceMessage, " AUDIO MIX/C:%11ld;", idl);
+                break;
+            case kTraceVideoCoding:
+                sprintf (traceMessage, "VIDEO CODING:%11ld;", idl);
+                break;
+            case kTraceVideoMixer:
+                sprintf (traceMessage, "   VIDEO MIX:%11ld;", idl);
+                break;
+            case kTraceFile:
+                sprintf (traceMessage, "        FILE:%11ld;", idl);
+                break;
+            case kTraceAudioProcessing:
+                sprintf (traceMessage, "  AUDIO PROC:%11ld;", idl);
+                break;
+            case kTraceAudioDevice:
+                sprintf (traceMessage, "AUDIO DEVICE:%11ld;", idl);
+                break;
+            case kTraceVideoRenderer:
+                sprintf (traceMessage, "VIDEO RENDER:%11ld;", idl);
+                break;
+            case kTraceVideoCapture:
+                sprintf (traceMessage, "VIDEO CAPTUR:%11ld;", idl);
+                break;
+            case kTraceVideoPreocessing:
+                sprintf (traceMessage, "  VIDEO PROC:%11ld;", idl);
+                break;
+        }
+    }
+    // All messages are 25 characters.
+    return 25;
+}
+
+WebRtc_Word32 TraceImpl::SetTraceFileImpl(const char* fileNameUTF8,
+                                          const bool addFileCounter)
+{
+    CriticalSectionScoped lock(_critsectInterface);
+
+    _traceFile.Flush();
+    _traceFile.CloseFile();
+
+    if(fileNameUTF8)
+    {
+        if(addFileCounter)
+        {
+            _fileCountText = 1;
+
+            char fileNameWithCounterUTF8[FileWrapper::kMaxFileNameSize];
+            CreateFileName(fileNameUTF8, fileNameWithCounterUTF8,
+                           _fileCountText);
+            if(_traceFile.OpenFile(fileNameWithCounterUTF8, false, false,
+                                   true) == -1)
+            {
+                return -1;
+            }
+        }else {
+            _fileCountText = 0;
+            if(_traceFile.OpenFile(fileNameUTF8, false, false, true) == -1)
+            {
+                return -1;
+            }
+        }
+    }
+    _rowCountText = 0;
+    return 0;
+}
+
+WebRtc_Word32 TraceImpl::TraceFileImpl(
+    char fileNameUTF8[FileWrapper::kMaxFileNameSize])
+{
+    CriticalSectionScoped lock(_critsectInterface);
+    return _traceFile.FileName(fileNameUTF8, FileWrapper::kMaxFileNameSize);
+}
+
+WebRtc_Word32 TraceImpl::SetTraceCallbackImpl(TraceCallback* callback)
+{
+    CriticalSectionScoped lock(_critsectInterface);
+    _callback = callback;
+    return 0;
+}
+
+WebRtc_Word32 TraceImpl::AddMessage(
+    char* traceMessage,
+    const char msg[WEBRTC_TRACE_MAX_MESSAGE_SIZE],
+    const WebRtc_UWord16 writtenSoFar) const
+
+{
+    int length = 0;
+    if(writtenSoFar >= WEBRTC_TRACE_MAX_MESSAGE_SIZE)
+    {
+        return -1;
+    }
+    // - 2 to leave room for newline and NULL termination
+#ifdef _WIN32
+    length = _snprintf(traceMessage,
+                       WEBRTC_TRACE_MAX_MESSAGE_SIZE - writtenSoFar - 2,
+                       "%s",msg);
+    if(length < 0)
+    {
+        length = WEBRTC_TRACE_MAX_MESSAGE_SIZE - writtenSoFar - 2;
+        traceMessage[length] = 0;
+    }
+#else
+    length = snprintf(traceMessage,
+                      WEBRTC_TRACE_MAX_MESSAGE_SIZE-writtenSoFar-2, "%s",msg);
+    if(length < 0 || length > WEBRTC_TRACE_MAX_MESSAGE_SIZE-writtenSoFar - 2)
+    {
+        length = WEBRTC_TRACE_MAX_MESSAGE_SIZE - writtenSoFar - 2;
+        traceMessage[length] = 0;
+    }
+#endif
+    // Length with NULL termination.
+    return length+1;
+}
+
+void TraceImpl::AddMessageToList(
+    const char traceMessage[WEBRTC_TRACE_MAX_MESSAGE_SIZE],
+    const WebRtc_UWord16 length,
+    const TraceLevel level)
+{
+    CriticalSectionScoped lock(_critsectArray);
+
+    if(_nextFreeIdx[_activeQueue] >= WEBRTC_TRACE_MAX_QUEUE)
+    {
+        if( ! _traceFile.Open() &&
+            !_callback)
+        {
+            // Keep at least the last 1/4 of old messages when not logging.
+            // TODO (hellner): isn't this redundant. The user will make it known
+            //                 when to start logging. Why keep messages before
+            //                 that?
+            for(int n = 0; n < WEBRTC_TRACE_MAX_QUEUE/4; n++)
+            {
+                const int lastQuarterOffset = (3*WEBRTC_TRACE_MAX_QUEUE/4);
+                memcpy(_messageQueue[_activeQueue][n],
+                       _messageQueue[_activeQueue][n + lastQuarterOffset],
+                       WEBRTC_TRACE_MAX_MESSAGE_SIZE);
+            }
+            _nextFreeIdx[_activeQueue] = WEBRTC_TRACE_MAX_QUEUE/4;
+        } else {
+            // More messages are being written than there is room for in the
+            // buffer. Drop any new messages.
+            // TODO (hellner): its probably better to drop old messages instead
+            //                 of new ones. One step further: if this happens
+            //                 it's due to writing faster than what can be
+            //                 processed. Maybe modify the filter at this point.
+            //                 E.g. turn of STREAM.
+            return;
+        }
+    }
+
+    WebRtc_UWord16 idx = _nextFreeIdx[_activeQueue];
+    _nextFreeIdx[_activeQueue]++;
+
+    _level[_activeQueue][idx] = level;
+    _length[_activeQueue][idx] = length;
+    memcpy(_messageQueue[_activeQueue][idx], traceMessage, length);
+
+    if(_nextFreeIdx[_activeQueue] == WEBRTC_TRACE_MAX_QUEUE-1)
+    {
+        // Logging more messages than can be worked off. Log a warning.
+        const char warning_msg[] = "WARNING MISSING TRACE MESSAGES\n";
+        _level[_activeQueue][_nextFreeIdx[_activeQueue]] = kTraceWarning;
+        _length[_activeQueue][_nextFreeIdx[_activeQueue]] = strlen(warning_msg);
+        memcpy(_messageQueue[_activeQueue][_nextFreeIdx[_activeQueue]],
+               warning_msg, _length[_activeQueue][idx]);
+        _nextFreeIdx[_activeQueue]++;
+    }
+}
+
+bool TraceImpl::Run(void* obj)
+{
+    return static_cast<TraceImpl*>(obj)->Process();
+}
+
+bool TraceImpl::Process()
+{
+    if(_event.Wait(1000) == kEventSignaled)
+    {
+        if(_traceFile.Open() || _callback)
+        {
+            // File mode (not calback mode).
+            WriteToFile();
+        }
+    } else {
+        _traceFile.Flush();
+    }
+    return true;
+}
+
+void TraceImpl::WriteToFile()
+{
+    WebRtc_UWord8 localQueueActive = 0;
+    WebRtc_UWord16 localNextFreeIdx = 0;
+
+    // There are two buffer. One for reading (for writing to file) and one for
+    // writing (for storing new messages). Let new messages be posted to the
+    // unused buffer so that the current buffer can be flushed safely.
+    {
+        CriticalSectionScoped lock(_critsectArray);
+        localNextFreeIdx = _nextFreeIdx[_activeQueue];
+        _nextFreeIdx[_activeQueue] = 0;
+        localQueueActive = _activeQueue;
+        if(_activeQueue == 0)
+        {
+            _activeQueue = 1;
+        } else
+        {
+            _activeQueue = 0;
+        }
+    }
+    if(localNextFreeIdx == 0)
+    {
+        return;
+    }
+
+    CriticalSectionScoped lock(_critsectInterface);
+
+    for(WebRtc_UWord16 idx = 0; idx <localNextFreeIdx; idx++)
+    {
+        TraceLevel localLevel = _level[localQueueActive][idx];
+        if(_callback)
+        {
+            _callback->Print(localLevel, _messageQueue[localQueueActive][idx],
+                             _length[localQueueActive][idx]);
+        }
+        if(_traceFile.Open())
+        {
+            if(_rowCountText > WEBRTC_TRACE_MAX_FILE_SIZE)
+            {
+                // wrap file
+                _rowCountText = 0;
+                _traceFile.Flush();
+
+                if(_fileCountText == 0)
+                {
+                    _traceFile.Rewind();
+                } else
+                {
+                    char oldFileName[FileWrapper::kMaxFileNameSize];
+                    char newFileName[FileWrapper::kMaxFileNameSize];
+
+                    // get current name
+                    _traceFile.FileName(oldFileName,
+                                        FileWrapper::kMaxFileNameSize);
+                    _traceFile.CloseFile();
+
+                    _fileCountText++;
+
+                    UpdateFileName(oldFileName, newFileName, _fileCountText);
+
+                    if(_traceFile.OpenFile(newFileName, false, false,
+                                           true) == -1)
+                    {
+                        return;
+                    }
+                }
+            }
+            if(_rowCountText ==  0)
+            {
+                char message[WEBRTC_TRACE_MAX_MESSAGE_SIZE + 1];
+                WebRtc_Word32 length = AddDateTimeInfo(message);
+                if(length != -1)
+                {
+                    message[length] = 0;
+                    message[length-1] = '\n';
+                    _traceFile.Write(message, length);
+                    _rowCountText++;
+                }
+                length = AddBuildInfo(message);
+                if(length != -1)
+                {
+                    message[length+1] = 0;
+                    message[length] = '\n';
+                    message[length-1] = '\n';
+                    _traceFile.Write(message, length+1);
+                    _rowCountText++;
+                    _rowCountText++;
+                }
+            }
+            WebRtc_UWord16 length = _length[localQueueActive][idx];
+            _messageQueue[localQueueActive][idx][length] = 0;
+            _messageQueue[localQueueActive][idx][length-1] = '\n';
+            _traceFile.Write(_messageQueue[localQueueActive][idx], length);
+            _rowCountText++;
+        }
+    }
+}
+
+void TraceImpl::AddImpl(const TraceLevel level, const TraceModule module,
+                        const WebRtc_Word32 id,
+                        const char msg[WEBRTC_TRACE_MAX_MESSAGE_SIZE])
+{
+    if (TraceCheck(level))
+    {
+        char traceMessage[WEBRTC_TRACE_MAX_MESSAGE_SIZE];
+        char* meassagePtr = traceMessage;
+
+        WebRtc_Word32 len = 0;
+        WebRtc_Word32 ackLen = 0;
+
+        len = AddLevel(meassagePtr, level);
+        if(len == -1)
+        {
+            return;
+        }
+        meassagePtr += len;
+        ackLen += len;
+
+        len = AddTime(meassagePtr, level);
+        if(len == -1)
+        {
+            return;
+        }
+        meassagePtr += len;
+        ackLen += len;
+
+        len = AddModuleAndId(meassagePtr, module, id);
+        if(len == -1)
+        {
+            return;
+        }
+        meassagePtr += len;
+        ackLen += len;
+
+        len = AddThreadId(meassagePtr);
+        if(len < 0)
+        {
+            return;
+        }
+        meassagePtr += len;
+        ackLen += len;
+
+        len = AddMessage(meassagePtr, msg, (WebRtc_UWord16)ackLen);
+        if(len == -1)
+        {
+            return;
+        }
+        ackLen += len;
+        AddMessageToList(traceMessage,(WebRtc_UWord16)ackLen, level);
+
+        // Make sure that messages are written as soon as possible.
+        _event.Set();
+    }
+}
+
+bool TraceImpl::TraceCheck(const TraceLevel level) const
+{
+    return (level & levelFilter)? true:false;
+}
+
+bool TraceImpl::UpdateFileName(
+    const char fileNameUTF8[FileWrapper::kMaxFileNameSize],
+    char fileNameWithCounterUTF8[FileWrapper::kMaxFileNameSize],
+    const WebRtc_UWord32 newCount) const
+{
+    WebRtc_Word32 length = (WebRtc_Word32)strlen(fileNameUTF8);
+    if(length < 0)
+    {
+        return false;
+    }
+
+    WebRtc_Word32 lengthWithoutFileEnding = length-1;
+    while(lengthWithoutFileEnding > 0)
+    {
+        if(fileNameUTF8[lengthWithoutFileEnding] == '.')
+        {
+            break;
+        } else {
+            lengthWithoutFileEnding--;
+        }
+    }
+    if(lengthWithoutFileEnding == 0)
+    {
+        lengthWithoutFileEnding = length;
+    }
+    WebRtc_Word32 lengthTo_ = lengthWithoutFileEnding - 1;
+    while(lengthTo_ > 0)
+    {
+        if(fileNameUTF8[lengthTo_] == '_')
+        {
+            break;
+        } else {
+            lengthTo_--;
+        }
+    }
+
+    memcpy(fileNameWithCounterUTF8, fileNameUTF8, lengthTo_);
+    sprintf(fileNameWithCounterUTF8+lengthTo_, "_%lu%s",
+            static_cast<long unsigned int> (newCount),
+            fileNameUTF8+lengthWithoutFileEnding);
+    return true;
+}
+
+bool TraceImpl::CreateFileName(
+    const char fileNameUTF8[FileWrapper::kMaxFileNameSize],
+    char fileNameWithCounterUTF8[FileWrapper::kMaxFileNameSize],
+    const WebRtc_UWord32 newCount) const
+{
+    WebRtc_Word32 length = (WebRtc_Word32)strlen(fileNameUTF8);
+    if(length < 0)
+    {
+        return false;
+    }
+
+    WebRtc_Word32 lengthWithoutFileEnding = length-1;
+    while(lengthWithoutFileEnding > 0)
+    {
+        if(fileNameUTF8[lengthWithoutFileEnding] == '.')
+        {
+            break;
+        }else
+        {
+            lengthWithoutFileEnding--;
+        }
+    }
+    if(lengthWithoutFileEnding == 0)
+    {
+        lengthWithoutFileEnding = length;
+    }
+    memcpy(fileNameWithCounterUTF8, fileNameUTF8, lengthWithoutFileEnding);
+    sprintf(fileNameWithCounterUTF8+lengthWithoutFileEnding, "_%lu%s",
+            static_cast<long unsigned int> (newCount),
+            fileNameUTF8+lengthWithoutFileEnding);
+    return true;
+}
+
+void Trace::CreateTrace()
+{
+    TraceImpl::StaticInstance(kAddRef);
+}
+
+void Trace::ReturnTrace()
+{
+    TraceImpl::StaticInstance(kRelease);
+}
+
+WebRtc_Word32 Trace::SetLevelFilter(WebRtc_UWord32 filter)
+{
+    levelFilter = filter;
+    return 0;
+}
+
+WebRtc_Word32 Trace::LevelFilter(WebRtc_UWord32& filter)
+{
+    filter = levelFilter;
+    return 0;
+}
+
+WebRtc_Word32 Trace::TraceFile(char fileName[FileWrapper::kMaxFileNameSize])
+{
+    TraceImpl* trace = TraceImpl::GetTrace();
+    if(trace)
+    {
+        int retVal = trace->TraceFileImpl(fileName);
+        ReturnTrace();
+        return retVal;
+    }
+    return -1;
+}
+
+WebRtc_Word32 Trace::SetTraceFile(const char* fileName,
+                                  const bool addFileCounter)
+{
+    TraceImpl* trace = TraceImpl::GetTrace();
+    if(trace)
+    {
+        int retVal = trace->SetTraceFileImpl(fileName, addFileCounter);
+        ReturnTrace();
+        return retVal;
+    }
+    return -1;
+}
+
+WebRtc_Word32 Trace::SetTraceCallback(TraceCallback* callback)
+{
+    TraceImpl* trace = TraceImpl::GetTrace();
+    if(trace)
+    {
+        int retVal = trace->SetTraceCallbackImpl(callback);
+        ReturnTrace();
+        return retVal;
+    }
+    return -1;
+}
+
+void Trace::Add(const TraceLevel level, const TraceModule module,
+                const WebRtc_Word32 id, const char* msg, ...)
+
+{
+    TraceImpl* trace = TraceImpl::GetTrace(level);
+    if(trace)
+    {
+        if(trace->TraceCheck(level))
+        {
+            char tempBuff[WEBRTC_TRACE_MAX_MESSAGE_SIZE];
+            char* buff = 0;
+            if(msg)
+            {
+                va_list args;
+                va_start(args, msg);
+#ifdef _WIN32
+                _vsnprintf(tempBuff,WEBRTC_TRACE_MAX_MESSAGE_SIZE-1,msg,args);
+#else
+                vsnprintf(tempBuff,WEBRTC_TRACE_MAX_MESSAGE_SIZE-1,msg,args);
+#endif
+                va_end(args);
+                buff = tempBuff;
+            }
+            trace->AddImpl(level, module, id, buff);
+        }
+        ReturnTrace();
+    }
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/trace_impl.h b/trunk/src/system_wrappers/source/trace_impl.h
new file mode 100644
index 0000000..2b85813
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_impl.h
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_IMPL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_IMPL_H_
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/file_wrapper.h"
+#include "system_wrappers/interface/static_instance.h"
+#include "system_wrappers/interface/trace.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+
+namespace webrtc {
+
+// TODO (pwestin) WEBRTC_TRACE_MAX_QUEUE needs to be tweaked
+// TODO (hellner) the buffer should be close to how much the system can write to
+//                file. Increasing the buffer will not solve anything. Sooner or
+//                later the buffer is going to fill up anyways.
+#if defined(MAC_IPHONE)
+    #define WEBRTC_TRACE_MAX_QUEUE  2000
+#else
+    #define WEBRTC_TRACE_MAX_QUEUE  8000
+#endif
+#define WEBRTC_TRACE_NUM_ARRAY 2
+#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 256
+// Total buffer size is WEBRTC_TRACE_NUM_ARRAY (number of buffer partitions) *
+// WEBRTC_TRACE_MAX_QUEUE (number of lines per buffer partition) *
+// WEBRTC_TRACE_MAX_MESSAGE_SIZE (number of 1 byte charachters per line) =
+// 1 or 4 Mbyte
+
+#define WEBRTC_TRACE_MAX_FILE_SIZE 100*1000
+// Number of rows that may be written to file. On average 110 bytes per row (max
+// 256 bytes per row). So on average 110*100*1000 = 11 Mbyte, max 256*100*1000 =
+// 25.6 Mbyte
+
+class TraceImpl : public Trace
+{
+public:
+    virtual ~TraceImpl();
+
+    static TraceImpl* CreateInstance();
+    static TraceImpl* GetTrace(const TraceLevel level = kTraceAll);
+
+    WebRtc_Word32 SetTraceFileImpl(const char* fileName,
+                                   const bool addFileCounter);
+    WebRtc_Word32 TraceFileImpl(
+        char fileName[FileWrapper::kMaxFileNameSize]);
+
+    WebRtc_Word32 SetTraceCallbackImpl(TraceCallback* callback);
+
+    void AddImpl(const TraceLevel level, const TraceModule module,
+                 const WebRtc_Word32 id, const char* msg);
+
+    bool StopThread();
+
+    bool TraceCheck(const TraceLevel level) const;
+
+protected:
+    TraceImpl();
+
+    static TraceImpl* StaticInstance(CountOperation count_operation,
+        const TraceLevel level = kTraceAll);
+
+    WebRtc_Word32 AddThreadId(char* traceMessage) const;
+
+    // OS specific implementations
+    virtual WebRtc_Word32 AddTime(char* traceMessage,
+                                  const TraceLevel level) const = 0;
+
+    virtual WebRtc_Word32 AddBuildInfo(char* traceMessage) const = 0;
+    virtual WebRtc_Word32 AddDateTimeInfo(char* traceMessage) const = 0;
+
+    static bool Run(void* obj);
+    bool Process();
+
+private:
+    friend class Trace;
+
+    WebRtc_Word32 AddLevel(char* szMessage, const TraceLevel level) const;
+
+    WebRtc_Word32 AddModuleAndId(char* traceMessage, const TraceModule module,
+                                 const WebRtc_Word32 id) const;
+
+    WebRtc_Word32 AddMessage(char* traceMessage,
+                             const char msg[WEBRTC_TRACE_MAX_MESSAGE_SIZE],
+                             const WebRtc_UWord16 writtenSoFar) const;
+
+    void AddMessageToList(
+        const char traceMessage[WEBRTC_TRACE_MAX_MESSAGE_SIZE],
+        const WebRtc_UWord16 length,
+        const TraceLevel level);
+
+    bool UpdateFileName(
+        const char fileNameUTF8[FileWrapper::kMaxFileNameSize],
+        char fileNameWithCounterUTF8[FileWrapper::kMaxFileNameSize],
+        const WebRtc_UWord32 newCount) const;
+
+    bool CreateFileName(
+        const char fileNameUTF8[FileWrapper::kMaxFileNameSize],
+        char fileNameWithCounterUTF8[FileWrapper::kMaxFileNameSize],
+        const WebRtc_UWord32 newCount) const;
+
+    void WriteToFile();
+
+    CriticalSectionWrapper* _critsectInterface;
+    TraceCallback* _callback;
+    WebRtc_UWord32 _rowCountText;
+    WebRtc_UWord32 _fileCountText;
+
+    FileWrapper& _traceFile;
+    ThreadWrapper& _thread;
+    EventWrapper& _event;
+
+    // _critsectArray protects _activeQueue
+    CriticalSectionWrapper* _critsectArray;
+    WebRtc_UWord16 _nextFreeIdx[WEBRTC_TRACE_NUM_ARRAY];
+    TraceLevel _level[WEBRTC_TRACE_NUM_ARRAY][WEBRTC_TRACE_MAX_QUEUE];
+    WebRtc_UWord16 _length[WEBRTC_TRACE_NUM_ARRAY][WEBRTC_TRACE_MAX_QUEUE];
+    char* _messageQueue[WEBRTC_TRACE_NUM_ARRAY][WEBRTC_TRACE_MAX_QUEUE];
+    WebRtc_UWord8 _activeQueue;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_IMPL_H_
diff --git a/trunk/src/system_wrappers/source/trace_impl_no_op.cc b/trunk/src/system_wrappers/source/trace_impl_no_op.cc
new file mode 100644
index 0000000..1752871
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_impl_no_op.cc
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+
+namespace webrtc {
+
+void Trace::CreateTrace()
+{
+}
+
+void Trace::ReturnTrace()
+{
+}
+
+WebRtc_Word32 Trace::SetLevelFilter(WebRtc_UWord32 /*filter*/)
+{
+    return 0;
+}
+
+WebRtc_Word32 Trace::LevelFilter(WebRtc_UWord32& /*filter*/)
+{
+    return 0;
+}
+
+WebRtc_Word32 Trace::TraceFile(
+    WebRtc_Word8 /*fileName*/[1024])
+{
+    return -1;
+}
+
+WebRtc_Word32 Trace::SetTraceFile(const WebRtc_Word8* /*fileName*/,
+                                  const bool /*addFileCounter*/)
+{
+    return -1;
+}
+
+WebRtc_Word32 Trace::SetTraceCallback(TraceCallback* /*callback*/)
+{
+    return -1;
+}
+
+void Trace::Add(const TraceLevel /*level*/, const TraceModule /*module*/,
+                const WebRtc_Word32 /*id*/, const char* /*msg*/, ...)
+
+{
+}
+
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/trace_posix.cc b/trunk/src/system_wrappers/source/trace_posix.cc
new file mode 100644
index 0000000..caf1128
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_posix.cc
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace_posix.h"
+
+#include <cassert>
+#include <stdarg.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/time.h>
+#include <time.h>
+#ifdef WEBRTC_ANDROID
+    #include <pthread.h>
+#else
+    #include <iostream>
+#endif
+
+#if defined(_DEBUG)
+    #define BUILDMODE "d"
+#elif defined(DEBUG)
+    #define BUILDMODE "d"
+#elif defined(NDEBUG)
+    #define BUILDMODE "r"
+#else
+    #define BUILDMODE "?"
+#endif
+#define BUILDTIME __TIME__
+#define BUILDDATE __DATE__
+// example: "Oct 10 2002 12:05:30 r"
+#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+namespace webrtc {
+TracePosix::TracePosix()
+{
+    struct timeval systemTimeHighRes;
+    gettimeofday(&systemTimeHighRes, 0);
+    _prevAPITickCount = _prevTickCount = systemTimeHighRes.tv_sec;
+}
+
+TracePosix::~TracePosix()
+{
+    StopThread();
+}
+
+WebRtc_Word32 TracePosix::AddTime(char* traceMessage,
+                                  const TraceLevel level) const
+{
+    struct timeval systemTimeHighRes;
+    if (gettimeofday(&systemTimeHighRes, 0) == -1)
+    {
+        return -1;
+    }
+    const struct tm* systemTime =
+        localtime(&systemTimeHighRes.tv_sec);
+
+    const WebRtc_UWord32 ms_time = systemTimeHighRes.tv_usec / 1000;
+    WebRtc_UWord32 prevTickCount = 0;
+    if (level == kTraceApiCall)
+    {
+        prevTickCount = _prevTickCount;
+        _prevTickCount = ms_time;
+    } else {
+        prevTickCount = _prevAPITickCount;
+        _prevAPITickCount = ms_time;
+    }
+    WebRtc_UWord32 dwDeltaTime = ms_time - prevTickCount;
+    if (prevTickCount == 0)
+    {
+        dwDeltaTime = 0;
+    }
+    if (dwDeltaTime > 0x0fffffff)
+    {
+        // Either wraparound or data race.
+        dwDeltaTime = 0;
+    }
+    if(dwDeltaTime > 99999)
+    {
+        dwDeltaTime = 99999;
+    }
+
+    sprintf(traceMessage, "(%2u:%2u:%2u:%3u |%5lu) ", systemTime->tm_hour,
+            systemTime->tm_min, systemTime->tm_sec, ms_time,
+            static_cast<unsigned long>(dwDeltaTime));
+    // Messages are 22 characters.
+    return 22;
+}
+
+WebRtc_Word32 TracePosix::AddBuildInfo(char* traceMessage) const
+{
+    sprintf(traceMessage, "Build info: %s", BUILDINFO);
+    // Include NULL termination (hence + 1).
+    return strlen(traceMessage) + 1;
+}
+
+WebRtc_Word32 TracePosix::AddDateTimeInfo(char* traceMessage) const
+{
+    time_t t;
+    time(&t);
+    sprintf(traceMessage, "Local Date: %s", ctime(&t));
+    WebRtc_Word32 len = static_cast<WebRtc_Word32>(strlen(traceMessage));
+
+    if ('\n' == traceMessage[len - 1])
+    {
+        traceMessage[len - 1] = '\0';
+        --len;
+    }
+
+    // Messages is 12 characters.
+    return len + 1;
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/trace_posix.h b/trunk/src/system_wrappers/source/trace_posix.h
new file mode 100644
index 0000000..8c37cd2
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_posix.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_POSIX_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_POSIX_H_
+
+#include "critical_section_wrapper.h"
+#include "trace_impl.h"
+
+namespace webrtc {
+class TracePosix : public TraceImpl
+{
+public:
+    TracePosix();
+    virtual ~TracePosix();
+
+    virtual WebRtc_Word32 AddTime(char* traceMessage,
+                                  const TraceLevel level) const;
+
+    virtual WebRtc_Word32 AddBuildInfo(char* traceMessage) const;
+    virtual WebRtc_Word32 AddDateTimeInfo(char* traceMessage) const;
+
+private:
+    volatile mutable WebRtc_UWord32  _prevAPITickCount;
+    volatile mutable WebRtc_UWord32  _prevTickCount;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_POSIX_H_
diff --git a/trunk/src/system_wrappers/source/trace_unittest.cc b/trunk/src/system_wrappers/source/trace_unittest.cc
new file mode 100644
index 0000000..982e715
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_unittest.cc
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/trace.h"
+
+#include "gtest/gtest.h"
+#include "system_wrappers/source/cpu_measurement_harness.h"
+#include "testsupport/fileutils.h"
+
+using webrtc::CpuMeasurementHarness;
+using webrtc::Trace;
+using webrtc::kTraceWarning;
+using webrtc::kTraceUtility;
+
+class Logger : public webrtc::CpuTarget {
+ public:
+  Logger() {
+    Trace::CreateTrace();
+    std::string trace_file = webrtc::test::OutputPath() +
+        "trace_unittest.txt";
+    Trace::SetTraceFile(trace_file.c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+  }
+  virtual ~Logger() {
+    Trace::ReturnTrace();
+  }
+
+  virtual bool DoWork() {
+    // Use input paremeters to WEBRTC_TRACE that are not likely to be removed
+    // in future code. E.g. warnings will likely be kept and this file is in
+    // utility so it should use kTraceUtility.
+    WEBRTC_TRACE(kTraceWarning, kTraceUtility, 0, "Log line");
+    return true;
+  }
+};
+
+// This test is disabled because it measures CPU usage. This is flaky because
+// the CPU usage for a machine may spike due to OS or other application.
+TEST(TraceTest, DISABLED_CpuUsage) {
+  Logger logger;
+  const int periodicity_ms = 1;
+  const int iterations_per_period = 10;
+  const int duration_ms = 1000;
+  CpuMeasurementHarness* cpu_harness =
+      CpuMeasurementHarness::Create(&logger, periodicity_ms,
+                                    iterations_per_period, duration_ms);
+  cpu_harness->Run();
+  const int average_cpu = cpu_harness->AverageCpu();
+  EXPECT_GE(5, average_cpu);
+}
diff --git a/trunk/src/system_wrappers/source/trace_win.cc b/trunk/src/system_wrappers/source/trace_win.cc
new file mode 100644
index 0000000..db3da76
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_win.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace_win.h"
+
+#include <cassert>
+#include <stdarg.h>
+
+#include "Mmsystem.h"
+
+#if defined(_DEBUG)
+    #define BUILDMODE "d"
+#elif defined(DEBUG)
+    #define BUILDMODE "d"
+#elif defined(NDEBUG)
+    #define BUILDMODE "r"
+#else
+    #define BUILDMODE "?"
+#endif
+#define BUILDTIME __TIME__
+#define BUILDDATE __DATE__
+// Example: "Oct 10 2002 12:05:30 r"
+#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+namespace webrtc {
+TraceWindows::TraceWindows()
+    : _prevAPITickCount(0),
+      _prevTickCount(0)
+{
+}
+
+TraceWindows::~TraceWindows()
+{
+    StopThread();
+}
+
+WebRtc_Word32 TraceWindows::AddTime(char* traceMessage,
+                                    const TraceLevel level) const
+{
+    WebRtc_UWord32 dwCurrentTime = timeGetTime();
+    SYSTEMTIME systemTime;
+    GetSystemTime(&systemTime);
+
+    if(level == kTraceApiCall)
+    {
+        WebRtc_UWord32 dwDeltaTime = dwCurrentTime- _prevTickCount;
+        _prevTickCount = dwCurrentTime;
+
+        if(_prevTickCount == 0)
+        {
+            dwDeltaTime = 0;
+        }
+        if(dwDeltaTime > 0x0fffffff)
+        {
+            // Either wraparound or data race.
+            dwDeltaTime = 0;
+        }
+        if(dwDeltaTime > 99999)
+        {
+            dwDeltaTime = 99999;
+        }
+
+        sprintf (traceMessage, "(%2u:%2u:%2u:%3u |%5lu) ", systemTime.wHour,
+                 systemTime.wMinute, systemTime.wSecond,
+                 systemTime.wMilliseconds, dwDeltaTime);
+    } else {
+        WebRtc_UWord32 dwDeltaTime = dwCurrentTime - _prevAPITickCount;
+        _prevAPITickCount = dwCurrentTime;
+
+        if(_prevAPITickCount == 0)
+        {
+            dwDeltaTime = 0;
+        }
+        if(dwDeltaTime > 0x0fffffff)
+        {
+            // Either wraparound or data race.
+            dwDeltaTime = 0;
+        }
+        if(dwDeltaTime > 99999)
+        {
+            dwDeltaTime = 99999;
+        }
+        sprintf (traceMessage, "(%2u:%2u:%2u:%3u |%5lu) ", systemTime.wHour,
+                 systemTime.wMinute, systemTime.wSecond,
+                 systemTime.wMilliseconds, dwDeltaTime);
+    }
+    // Messages is 12 characters.
+    return 22;
+}
+
+WebRtc_Word32 TraceWindows::AddBuildInfo(char* traceMessage) const
+{
+    // write data and time to text file
+    sprintf(traceMessage, "Build info: %s", BUILDINFO);
+    // Include NULL termination (hence + 1).
+    return static_cast<WebRtc_Word32>(strlen(traceMessage)+1);
+}
+
+WebRtc_Word32 TraceWindows::AddDateTimeInfo(char* traceMessage) const
+{
+    _prevAPITickCount = timeGetTime();
+    _prevTickCount = _prevAPITickCount;
+
+    SYSTEMTIME sysTime;
+    GetLocalTime (&sysTime);
+
+    TCHAR szDateStr[20];
+    TCHAR szTimeStr[20];
+    TCHAR *pSzDateStr = szDateStr;
+    TCHAR *pSzTimeStr = szTimeStr;
+
+    // Create date string (e.g. Apr 04 2002)
+    GetDateFormat(LOCALE_SYSTEM_DEFAULT, 0, &sysTime, TEXT("MMM dd yyyy"),
+                  szDateStr, 20);
+
+    // Create time string (e.g. 15:32:08)
+    GetTimeFormat(LOCALE_SYSTEM_DEFAULT, 0, &sysTime, TEXT("HH':'mm':'ss"),
+                  szTimeStr, 20);
+
+    sprintf(traceMessage, "Local Date: %s Local Time: %s", szDateStr,
+            szTimeStr);
+
+    // Include NULL termination (hence + 1).
+    return static_cast<WebRtc_Word32>(strlen(traceMessage)+ 1);
+}
+} // namespace webrtc
diff --git a/trunk/src/system_wrappers/source/trace_win.h b/trunk/src/system_wrappers/source/trace_win.h
new file mode 100644
index 0000000..803198e
--- /dev/null
+++ b/trunk/src/system_wrappers/source/trace_win.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_WINDOWS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_WINDOWS_H_
+
+#include "trace_impl.h"
+#include <stdio.h>
+#include <windows.h>
+
+namespace webrtc {
+class TraceWindows : public TraceImpl
+{
+public:
+    TraceWindows();
+    virtual ~TraceWindows();
+
+    virtual WebRtc_Word32 AddTime(char* traceMessage,
+                                  const TraceLevel level) const;
+
+    virtual WebRtc_Word32 AddBuildInfo(char* traceMessage) const;
+    virtual WebRtc_Word32 AddDateTimeInfo(char* traceMessage) const;
+private:
+    volatile mutable WebRtc_UWord32    _prevAPITickCount;
+    volatile mutable WebRtc_UWord32   _prevTickCount;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_WINDOWS_H_
diff --git a/trunk/src/system_wrappers/test/TestSort/TestSort.cc b/trunk/src/system_wrappers/test/TestSort/TestSort.cc
new file mode 100644
index 0000000..6846a71
--- /dev/null
+++ b/trunk/src/system_wrappers/test/TestSort/TestSort.cc
@@ -0,0 +1,265 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <algorithm>
+#include <cstring>
+
+#include "sort.h"
+#include "tick_util.h"
+
+// Excellent work polluting the global namespace Visual Studio...
+#undef max
+#undef min
+#include <limits>
+
+template<typename KeyType>
+struct LotsOfData
+{
+    KeyType key;
+    char data[64];
+};
+
+template<typename DataType>
+int Compare(const void* dataX, const void* dataY)
+{
+    DataType dataX = (DataType)*(const DataType*)dataX;
+    DataType dataY = (DataType)*(const DataType*)dataY;
+    if (dataX > dataY)
+    {
+        return 1;
+    }
+    else if (dataX < dataY)
+    {
+        return -1;
+    }
+
+    return 0;
+};
+
+template<typename DataType, typename KeyType>
+int CompareKey(const void* dataX, const void* dataY)
+{
+    KeyType keyX = ((const DataType*)dataX)->key;
+    KeyType keyY = ((const DataType*)dataY)->key;
+    if (keyX > keyY)
+    {
+        return 1;
+    }
+    else if (keyX < keyY)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+template<typename DataType>
+struct KeyLessThan
+{
+    bool operator()(const DataType &dataX, const DataType &dataY) const
+    {
+        return dataX.key < dataY.key;
+    }
+};
+
+const char* TypeEnumToString(webrtc::Type type)
+{
+    switch (type)
+    {
+        using namespace webrtc;
+        case TYPE_Word8:
+            return "Word8";
+        case TYPE_UWord8:
+            return "UWord8";
+        case TYPE_Word16:
+            return "Word16";
+        case TYPE_UWord16:
+            return "UWord16";
+        case TYPE_Word32:
+            return "Word32";
+        case TYPE_UWord32:
+            return "UWord32";
+        case TYPE_Word64:
+            return "Word64";
+        case TYPE_UWord64:
+            return "UWord64";
+        case TYPE_Float32:
+            return "Float32";
+        case TYPE_Float64:
+            return "Float64";
+        default:
+            return "Unrecognized";
+    }
+}
+
+template<typename Type>
+Type TypedRand()
+{
+    if (std::numeric_limits<Type>::is_integer)
+    {
+        double floatRand = static_cast<double>(rand()) / RAND_MAX;
+        if (std::numeric_limits<Type>::is_signed)
+        {
+            floatRand -= 0.5;
+        }
+
+        // Uniform [-max()/2, max()/2] for signed
+        //         [0, max()] for unsigned
+        return static_cast<Type>(floatRand * std::numeric_limits<Type>::max());
+    }
+    else // Floating point
+    {
+        // Uniform [-0.5, 0.5]
+        // The outer cast is to remove template warnings.
+        return static_cast<Type>((static_cast<Type>(rand()) / RAND_MAX) - 0.5);
+    }
+}
+
+template<typename KeyType>
+void RunSortTest(webrtc::Type sortType, bool keySort)
+{
+    enum { DataLength = 1000 };
+    enum { NumOfTests = 10000 };
+    KeyType key[DataLength];
+    KeyType keyRef[DataLength];
+    LotsOfData<KeyType> data[DataLength];
+    LotsOfData<KeyType> dataRef[DataLength];
+    WebRtc_Word32 retVal = 0;
+
+    if (keySort)
+    {
+        printf("Running %s KeySort() tests...\n", TypeEnumToString(sortType));
+    }
+    else
+    {
+        printf("Running %s Sort() tests...\n", TypeEnumToString(sortType));
+    }
+
+    TickInterval accTicks;
+    for (int i = 0; i < NumOfTests; i++)
+    {
+        for (int j = 0; j < DataLength; j++)
+        {
+            key[j] = TypedRand<KeyType>();
+            data[j].key = key[j];
+            // Write index to payload. We use this later for verification.
+            sprintf(data[j].data, "%d", j);
+        }
+
+        memcpy(dataRef, data, sizeof(data));
+        memcpy(keyRef, key, sizeof(key));
+
+        retVal = 0;
+        TickTime t0 = TickTime::Now();
+        if (keySort)
+        {
+            retVal = webrtc::KeySort(data, key, DataLength, sizeof(LotsOfData<KeyType>),
+                sortType);
+
+            //std::sort(data, data + DataLength, KeyLessThan<KeyType>());
+            //qsort(data, DataLength, sizeof(LotsOfData<KeyType>),
+            //    CompareKey<LotsOfData<KeyType>, KeyType>);
+        }
+        else
+        {
+            retVal = webrtc::Sort(key, DataLength, sortType);
+
+            //std::sort(key, key + DataLength);
+            //qsort(key, DataLength, sizeof(KeyType), Compare<KeyType>);
+        }
+        TickTime t1 = TickTime::Now();
+        accTicks += (t1 - t0);
+
+        if (retVal != 0)
+        {
+            printf("Test failed at iteration %d:\n", i);
+            printf("Sort returned an error. ");
+            printf("It likely does not support the requested type\nExiting...\n");
+            exit(0);
+        }
+
+        // Reference sort.
+        if (!keySort)
+        {
+            std::sort(keyRef, keyRef + DataLength);
+        }
+
+        if (keySort)
+        {
+            for (int j = 0; j < DataLength - 1; j++)
+            {
+                if (data[j].key > data[j + 1].key)
+                {
+                    printf("Test failed at iteration %d:\n", i);
+                    printf("Keys are not monotonically increasing\nExiting...\n");
+                    exit(0);
+                }
+
+                int index = atoi(data[j].data);
+                if (index < 0 || index >= DataLength || data[j].key != dataRef[index].key)
+                {
+                    printf("Test failed at iteration %d:\n", i);
+                    printf("Payload data is corrupt\nExiting...\n");
+                    exit(0);
+                }
+            }
+        }
+        else
+        {
+            for (int j = 0; j < DataLength - 1; j++)
+            {
+                if (key[j] > key[j + 1])
+                {
+                    printf("Test failed at iteration %d:\n", i);
+                    printf("Data is not monotonically increasing\nExiting...\n");
+                    exit(0);
+                }
+            }
+
+            if (memcmp(key, keyRef, sizeof(key)) != 0)
+            {
+                printf("Test failed at iteration %d:\n", i);
+                printf("Sort data differs from std::sort reference\nExiting...\n");
+                exit(0);
+            }
+        }
+    }
+
+    printf("Compliance test passed over %d iterations\n", NumOfTests);
+
+    WebRtc_Word64 executeTime = accTicks.Milliseconds();
+    printf("Execute time: %.2f s\n\n", (float)executeTime / 1000);
+}
+
+int main()
+{
+    // Seed rand().
+    srand(42);
+    bool keySort = false;
+    for (int i = 0; i < 2; i++) {
+        RunSortTest<WebRtc_Word8>(webrtc::TYPE_Word8, keySort);
+        RunSortTest<WebRtc_UWord8>(webrtc::TYPE_UWord8, keySort);
+        RunSortTest<WebRtc_Word16>(webrtc::TYPE_Word16, keySort);
+        RunSortTest<WebRtc_UWord16>(webrtc::TYPE_UWord16, keySort);
+        RunSortTest<WebRtc_Word32>(webrtc::TYPE_Word32, keySort);
+        RunSortTest<WebRtc_UWord32>(webrtc::TYPE_UWord32, keySort);
+        RunSortTest<WebRtc_Word64>(webrtc::TYPE_Word64, keySort);
+        RunSortTest<WebRtc_UWord64>(webrtc::TYPE_UWord64, keySort);
+        RunSortTest<float>(webrtc::TYPE_Float32, keySort);
+        RunSortTest<double>(webrtc::TYPE_Float64, keySort);
+
+        keySort = !keySort;
+    }
+
+    printf("All tests passed\n");
+
+    return 0;
+}
diff --git a/trunk/src/system_wrappers/test/list/list.cc b/trunk/src/system_wrappers/test/list/list.cc
new file mode 100644
index 0000000..5c4f0b9
--- /dev/null
+++ b/trunk/src/system_wrappers/test/list/list.cc
@@ -0,0 +1,174 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "list_wrapper.h"
+
+const int kNumberOfElements = 10;
+
+void FailTest(bool failed)
+{
+    if (failed)
+    {
+        printf("Test failed!\n");
+        printf("Press enter to continue:");
+        getchar();
+        exit(0);
+    }
+}
+
+int GetStoredIntegerValue(ListItem* list_item)
+{
+    void* list_item_pointer = list_item->GetItem();
+    if (list_item_pointer != NULL)
+    {
+        return *(reinterpret_cast<int*>(list_item_pointer));
+    }
+    return static_cast<int>(list_item->GetUnsignedItem());
+}
+
+void PrintList(ListWrapper& list)
+{
+    ListItem* list_item = list.First();
+    printf("List: ");
+    while (list_item != NULL)
+    {
+        int item_value = GetStoredIntegerValue(list_item);
+        FailTest(item_value < 0);
+        printf(" %d",item_value);
+        list_item = list.Next(list_item);
+    }
+    printf("\n");
+}
+
+// The list should always be in ascending order
+void ListSanity(ListWrapper& list)
+{
+    if(list.Empty())
+    {
+      return;
+    }
+    ListItem* item_iter = list.First();
+    // Fake a previous value for the first iteration
+    int previous_value = GetStoredIntegerValue(item_iter) - 1;
+    while (item_iter != NULL)
+    {
+        const int value = GetStoredIntegerValue(item_iter);
+        FailTest(value != previous_value + 1);
+        previous_value = value;
+        item_iter = list.Next(item_iter);
+    }
+}
+
+int main(int /*argc*/, char* /*argv*/[])
+{
+    printf("List Test:\n");
+    int element_array[kNumberOfElements];
+    for (int i = 0; i < kNumberOfElements; i++)
+    {
+        element_array[i] = i;
+    }
+    // Test PushBack 1
+    ListWrapper test_list;
+    for (int i = 2; i < kNumberOfElements - 2; i++)
+    {
+        FailTest(test_list.PushBack((void*)&element_array[i]) != 0);
+    }
+    // Test PushBack 2
+    FailTest(test_list.PushBack(element_array[kNumberOfElements - 2]) != 0);
+    FailTest(test_list.PushBack(element_array[kNumberOfElements - 1]) != 0);
+    // Test PushFront 2
+    FailTest(test_list.PushFront(element_array[1]) != 0);
+    // Test PushFront 1
+    FailTest(test_list.PushFront((void*)&element_array[0]) != 0);
+    // Test GetSize
+    FailTest(test_list.GetSize() != kNumberOfElements);
+    PrintList(test_list);
+    //Test PopFront
+    FailTest(test_list.PopFront() != 0);
+    //Test PopBack
+    FailTest(test_list.PopBack() != 0);
+    // Test GetSize
+    FailTest(test_list.GetSize() != kNumberOfElements - 2);
+    // Test Empty
+    FailTest(test_list.Empty());
+    // Test First
+    ListItem* first_item = test_list.First();
+    FailTest(first_item == NULL);
+    // Test Last
+    ListItem* last_item = test_list.Last();
+    FailTest(last_item == NULL);
+    // Test Next
+    ListItem* second_item = test_list.Next(first_item);
+    FailTest(second_item == NULL);
+    FailTest(test_list.Next(last_item) != NULL);
+    FailTest(test_list.Next(NULL) != NULL);
+    // Test Previous
+    ListItem* second_to_last_item = test_list.Previous(last_item);
+    FailTest(second_to_last_item == NULL);
+    FailTest(test_list.Previous(first_item) != NULL);
+    FailTest(test_list.Previous(NULL) != NULL);
+    // Test GetUnsignedItem
+    FailTest(last_item->GetUnsignedItem() !=
+             kNumberOfElements - 2);
+    FailTest(last_item->GetItem() !=
+             NULL);
+    // Test GetItem
+    FailTest(GetStoredIntegerValue(second_to_last_item) !=
+             kNumberOfElements - 3);
+    FailTest(second_to_last_item->GetUnsignedItem() != 0);
+    // Pop last and first since they are pushed as unsigned items.
+    FailTest(test_list.PopFront() != 0);
+    FailTest(test_list.PopBack() != 0);
+    // Test Insert. Please note that old iterators are no longer valid at
+    // this point.
+    ListItem* insert_item_last = new ListItem(reinterpret_cast<void*>(&element_array[kNumberOfElements - 2]));
+    FailTest(test_list.Insert(test_list.Last(),insert_item_last) != 0);
+    FailTest(test_list.Insert(NULL,insert_item_last) == 0);
+    ListItem* insert_item_last2 = new ListItem(reinterpret_cast<void*>(&element_array[kNumberOfElements - 2]));
+    FailTest(test_list.Insert(insert_item_last2,NULL) == 0);
+    // test InsertBefore
+    ListItem* insert_item_first = new ListItem(reinterpret_cast<void*>(&element_array[1]));
+    FailTest(test_list.InsertBefore(test_list.First(),insert_item_first) != 0);
+    FailTest(test_list.InsertBefore(NULL,insert_item_first) == 0);
+    ListItem* insert_item_first2 = new ListItem(reinterpret_cast<void*>(&element_array[1]));
+    FailTest(test_list.InsertBefore(insert_item_first2,NULL) == 0);
+    PrintList(test_list);
+    ListSanity(test_list);
+    // Erase the whole list
+    int counter = 0;
+    while (test_list.PopFront() == 0)
+    {
+        FailTest(counter++ > kNumberOfElements);
+    }
+    PrintList(test_list);
+    // Test APIs when list is empty
+    FailTest(test_list.GetSize() != 0);
+    FailTest(test_list.PopFront() != -1);
+    FailTest(test_list.PopBack() != -1);
+    FailTest(!test_list.Empty());
+    FailTest(test_list.First() != NULL);
+    FailTest(test_list.Last() != NULL);
+    FailTest(test_list.Next(NULL) != NULL);
+    FailTest(test_list.Previous(NULL) != NULL);
+    FailTest(test_list.Erase(NULL) != -1);
+    // Test Insert APIs when list is empty
+    ListItem* new_item = new ListItem(reinterpret_cast<void*>(&element_array[0]));
+    FailTest(test_list.Insert(NULL,new_item) != 0);
+    FailTest(test_list.Empty());
+    FailTest(test_list.PopFront() != 0);
+    ListItem* new_item2 = new ListItem(reinterpret_cast<void*>(&element_array[0]));
+    FailTest(test_list.InsertBefore(NULL,new_item2) != 0);
+    FailTest(test_list.Empty());
+
+    printf("Tests passed successfully!\n");
+}
diff --git a/trunk/src/system_wrappers/test/map/map.cc b/trunk/src/system_wrappers/test/map/map.cc
new file mode 100644
index 0000000..8a0d3e3
--- /dev/null
+++ b/trunk/src/system_wrappers/test/map/map.cc
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "map_wrapper.h"
+
+const int kNumberOfElements = 10;
+
+void FailTest(bool failed)
+{
+    if (failed)
+    {
+        printf("Test failed!\n");
+        printf("Press enter to continue:");
+        getchar();
+        exit(0);
+    }
+}
+
+int GetStoredIntegerValue(MapItem* map_item)
+{
+    void* map_item_pointer = map_item->GetItem();
+    if (map_item_pointer != NULL)
+    {
+        return *(reinterpret_cast<int*>(map_item_pointer));
+    }
+    return static_cast<int>(map_item->GetUnsignedId());
+}
+
+void PrintMap(MapWrapper& map)
+{
+    MapItem* map_item = map.First();
+    printf("Map: ");
+    while (map_item != NULL)
+    {
+      int item_value = GetStoredIntegerValue(map_item);
+        FailTest(item_value < 0);
+        printf(" %d",item_value);
+        map_item = map.Next(map_item);
+    }
+    printf("\n");
+}
+
+int main(int /*argc*/, char* /*argv*/[])
+{
+    int element_array[kNumberOfElements];
+    for (int i = 0; i < kNumberOfElements; i++)
+    {
+        element_array[i] = i;
+    }
+    // Test insert
+    MapWrapper test_map;
+    for (int i = 0; i < kNumberOfElements; i++)
+    {
+        test_map.Insert(i,(void*)&element_array[i]);
+    }
+    // Test Erase1
+    MapItem* remove_item = test_map.Find(2);
+    FailTest(remove_item == NULL);
+    FailTest(test_map.Erase(remove_item) != 0);
+    FailTest(test_map.Find(2) != NULL);
+    remove_item = NULL;
+    FailTest(test_map.Erase(remove_item) != -1);
+    // Test Erase2
+    FailTest(test_map.Erase(1) != 0);
+    FailTest(test_map.Find(1) != NULL);
+    FailTest(test_map.Erase(1) != -1);
+    // Test Size
+    FailTest(test_map.Size() != kNumberOfElements - 2);
+    PrintMap(test_map);
+    // Test First
+    MapItem* first_item = test_map.First();
+    FailTest(first_item == NULL);
+    FailTest(GetStoredIntegerValue(first_item) != 0);
+    // Test Last
+    MapItem* last_item = test_map.Last();
+    FailTest(last_item == NULL);
+    FailTest(GetStoredIntegerValue(last_item) != 9);
+    // Test Next
+    MapItem* second_item = test_map.Next(first_item);
+    FailTest(second_item == NULL);
+    FailTest(GetStoredIntegerValue(second_item) != 3);
+    FailTest(test_map.Next(last_item) != NULL);
+    // Test Previous
+    MapItem* second_to_last_item = test_map.Previous(last_item);
+    FailTest(second_to_last_item == NULL);
+    FailTest(GetStoredIntegerValue(second_to_last_item) != 8);
+    FailTest(test_map.Previous(first_item) != NULL);
+    // Test Find (only improper usage untested)
+    FailTest(test_map.Find(kNumberOfElements + 2) != NULL);
+    // Test GetId
+    FailTest(*(reinterpret_cast<int*>(second_to_last_item->GetItem())) !=
+         second_to_last_item->GetId());
+    FailTest(second_to_last_item->GetUnsignedId() !=
+             static_cast<unsigned int>(second_to_last_item->GetId()));
+    // Test SetItem
+    int swapped_item = kNumberOfElements;
+    last_item->SetItem(reinterpret_cast<void*>(&swapped_item));
+    FailTest(GetStoredIntegerValue(last_item) !=
+             swapped_item);
+
+    printf("Tests passed successfully!\n");
+}
diff --git a/trunk/src/typedefs.h b/trunk/src/typedefs.h
new file mode 100644
index 0000000..ba87309
--- /dev/null
+++ b/trunk/src/typedefs.h
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains platform-specific typedefs and defines.
+
+#ifndef WEBRTC_TYPEDEFS_H_
+#define WEBRTC_TYPEDEFS_H_
+
+// Reserved words definitions
+// TODO(andrew): Look at removing these.
+#define WEBRTC_EXTERN extern
+#define G_CONST const
+#define WEBRTC_INLINE extern __inline
+
+// Define WebRTC preprocessor identifiers based on the current build platform.
+// TODO(andrew): Clean these up. We can probably remove everything in this
+// block.
+//   - TARGET_MAC_INTEL and TARGET_MAC aren't used anywhere.
+//   - In the few places where TARGET_PC is used, it should be replaced by
+//     something more specific.
+//   - Do we really support PowerPC? Probably not. Remove WEBRTC_MAC_INTEL
+//     from build/common.gypi as well.
+#if defined(WIN32)
+    // Windows & Windows Mobile.
+    #if !defined(WEBRTC_TARGET_PC)
+        #define WEBRTC_TARGET_PC
+    #endif
+#elif defined(__APPLE__)
+    // Mac OS X.
+    #if defined(__LITTLE_ENDIAN__ )
+        #if !defined(WEBRTC_TARGET_MAC_INTEL)
+            #define WEBRTC_TARGET_MAC_INTEL
+        #endif
+    #else
+        #if !defined(WEBRTC_TARGET_MAC)
+            #define WEBRTC_TARGET_MAC
+        #endif
+    #endif
+#else
+    // Linux etc.
+    #if !defined(WEBRTC_TARGET_PC)
+        #define WEBRTC_TARGET_PC
+    #endif
+#endif
+
+// Derived from Chromium's build/build_config.h
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+// TODO(andrew): replace WEBRTC_LITTLE_ENDIAN with WEBRTC_ARCH_LITTLE_ENDIAN?
+#if defined(_M_X64) || defined(__x86_64__)
+#define WEBRTC_ARCH_X86_FAMILY
+#define WEBRTC_ARCH_X86_64
+#define WEBRTC_ARCH_64_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#elif defined(_M_IX86) || defined(__i386__)
+#define WEBRTC_ARCH_X86_FAMILY
+#define WEBRTC_ARCH_X86
+#define WEBRTC_ARCH_32_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#elif defined(__ARMEL__)
+// TODO(andrew): We'd prefer to control platform defines here, but this is
+// currently provided by the Android makefiles. Commented to avoid duplicate
+// definition warnings.
+//#define WEBRTC_ARCH_ARM
+// TODO(andrew): Chromium uses the following two defines. Should we switch?
+//#define WEBRTC_ARCH_ARM_FAMILY
+//#define WEBRTC_ARCH_ARMEL
+#define WEBRTC_ARCH_32_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#else
+#error Please add support for your architecture in typedefs.h
+#endif
+
+#if defined(__SSE2__) || defined(_MSC_VER)
+#define WEBRTC_USE_SSE2
+#endif
+
+#if defined(WEBRTC_TARGET_PC)
+
+#if !defined(_MSC_VER)
+  #include <stdint.h>
+#else
+    // Define C99 equivalent types.
+    // Since MSVC doesn't include these headers, we have to write our own
+    // version to provide a compatibility layer between MSVC and the WebRTC
+    // headers.
+    typedef signed char         int8_t;
+    typedef signed short        int16_t;
+    typedef signed int          int32_t;
+    typedef signed long long    int64_t;
+    typedef unsigned char       uint8_t;
+    typedef unsigned short      uint16_t;
+    typedef unsigned int        uint32_t;
+    typedef unsigned long long  uint64_t;
+#endif
+
+#if defined(WIN32)
+    typedef __int64             WebRtc_Word64;
+    typedef unsigned __int64    WebRtc_UWord64;
+#else
+    typedef int64_t             WebRtc_Word64;
+    typedef uint64_t            WebRtc_UWord64;
+#endif
+    typedef int32_t             WebRtc_Word32;
+    typedef uint32_t            WebRtc_UWord32;
+    typedef int16_t             WebRtc_Word16;
+    typedef uint16_t            WebRtc_UWord16;
+    typedef char                WebRtc_Word8;
+    typedef uint8_t             WebRtc_UWord8;
+
+    // Define endian for the platform
+    #define WEBRTC_LITTLE_ENDIAN
+
+#elif defined(WEBRTC_TARGET_MAC_INTEL)
+    #include <stdint.h>
+
+    typedef int64_t             WebRtc_Word64;
+    typedef uint64_t            WebRtc_UWord64;
+    typedef int32_t             WebRtc_Word32;
+    typedef uint32_t            WebRtc_UWord32;
+    typedef int16_t             WebRtc_Word16;
+    typedef char                WebRtc_Word8;
+    typedef uint16_t            WebRtc_UWord16;
+    typedef uint8_t             WebRtc_UWord8;
+
+    // Define endian for the platform
+    #define WEBRTC_LITTLE_ENDIAN
+
+#else
+    #error "No platform defined for WebRTC type definitions (typedefs.h)"
+#endif
+
+#endif  // WEBRTC_TYPEDEFS_H_
diff --git a/trunk/src/video_engine/Android.mk b/trunk/src/video_engine/Android.mk
new file mode 100644
index 0000000..164c289
--- /dev/null
+++ b/trunk/src/video_engine/Android.mk
@@ -0,0 +1,94 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_vie_core
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    vie_base_impl.cc \
+    vie_capture_impl.cc \
+    vie_codec_impl.cc \
+    vie_encryption_impl.cc \
+    vie_external_codec_impl.cc \
+    vie_file_impl.cc \
+    vie_image_process_impl.cc \
+    vie_impl.cc \
+    vie_network_impl.cc \
+    vie_ref_count.cc \
+    vie_render_impl.cc \
+    vie_rtp_rtcp_impl.cc \
+    vie_shared_data.cc \
+    vie_capturer.cc \
+    vie_channel.cc \
+    vie_channel_manager.cc \
+    vie_encoder.cc \
+    vie_file_image.cc \
+    vie_file_player.cc \
+    vie_file_recorder.cc \
+    vie_frame_provider_base.cc \
+    vie_input_manager.cc \
+    vie_manager_base.cc \
+    vie_performance_monitor.cc \
+    vie_receiver.cc \
+    vie_remb.cc \
+    vie_renderer.cc \
+    vie_render_manager.cc \
+    vie_sender.cc \
+    vie_sync_module.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS)
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/include\
+    $(LOCAL_PATH)/.. \
+    $(LOCAL_PATH)/../common_video/interface \
+    $(LOCAL_PATH)/../common_video/jpeg/main/interface \
+    $(LOCAL_PATH)/../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../modules/interface \
+    $(LOCAL_PATH)/../modules/audio_coding/main/interface \
+    $(LOCAL_PATH)/../modules/media_file/interface \
+    $(LOCAL_PATH)/../modules/rtp_rtcp/interface \
+    $(LOCAL_PATH)/../modules/udp_transport/interface \
+    $(LOCAL_PATH)/../modules/utility/interface \
+    $(LOCAL_PATH)/../modules/video_capture/main/interface \
+    $(LOCAL_PATH)/../modules/video_capture/main/source \
+    $(LOCAL_PATH)/../modules/video_capture/main/source/Android \
+    $(LOCAL_PATH)/../modules/video_coding/codecs/interface \
+    $(LOCAL_PATH)/../modules/video_coding/main/interface \
+    $(LOCAL_PATH)/../modules/video_mixer/main/interface \
+    $(LOCAL_PATH)/../modules/video_processing/main/interface \
+    $(LOCAL_PATH)/../modules/video_render/main/interface \
+    $(LOCAL_PATH)/../system_wrappers/interface \
+    $(LOCAL_PATH)/../voice_engine/main/interface
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+LOCAL_LDLIBS += -ldl -lpthread
+endif
+
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/video_engine/OWNERS b/trunk/src/video_engine/OWNERS
new file mode 100644
index 0000000..ac607bd
--- /dev/null
+++ b/trunk/src/video_engine/OWNERS
@@ -0,0 +1,4 @@
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+mallinath@webrtc.org
diff --git a/trunk/src/video_engine/include/vie_base.h b/trunk/src/video_engine/include/vie_base.h
new file mode 100644
index 0000000..2757a78
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_base.h
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Creating and deleting VideoEngine instances.
+//  - Creating and deleting channels.
+//  - Connect a video channel with a corresponding voice channel for audio/video
+//    synchronization.
+//  - Start and stop sending and receiving.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// Class used for all callbacks from ViEBase.
+class WEBRTC_DLLEXPORT ViEBaseObserver {
+ public:
+  // This method will be called periodically if the average system CPU usage
+  // exceeds 75%.
+  virtual void PerformanceAlarm(const unsigned int cpu_load) = 0;
+
+ protected:
+  virtual ~ViEBaseObserver() {}
+};
+
+class WEBRTC_DLLEXPORT VideoEngine {
+ public:
+  // Creates a VideoEngine object, which can then be used to acquire sub‐APIs.
+  static VideoEngine* Create();
+
+  // Deletes a VideoEngine instance.
+  static bool Delete(VideoEngine*& video_engine);
+
+  // Specifies the amount and type of trace information, which will be created
+  // by the VideoEngine.
+  static int SetTraceFilter(const unsigned int filter);
+
+  // Sets the name of the trace file and enables non‐encrypted trace messages.
+  static int SetTraceFile(const char* file_nameUTF8,
+                          const bool add_file_counter = false);
+
+  // Installs the TraceCallback implementation to ensure that the VideoEngine
+  // user receives callbacks for generated trace messages.
+  static int SetTraceCallback(TraceCallback* callback);
+
+  // Android specific.
+  // Provides VideoEngine with pointers to objects supplied by the Java
+  // applications JNI interface.
+  static int SetAndroidObjects(void* java_vm, void* java_context);
+
+ protected:
+  VideoEngine() {}
+  virtual ~VideoEngine() {}
+};
+
+class WEBRTC_DLLEXPORT ViEBase {
+ public:
+  // Factory for the ViEBase sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEBase* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEBase sub-API and decreases an internal reference counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Initiates all common parts of the VideoEngine.
+  virtual int Init() = 0;
+
+  // Connects a VideoEngine instance to a VoiceEngine instance for audio video
+  // synchronization.
+  virtual int SetVoiceEngine(VoiceEngine* voice_engine) = 0;
+
+  // Creates a new channel, either with a new encoder instance or by sharing
+  // encoder instance with an already created channel.
+  virtual int CreateChannel(int& video_channel) = 0;
+  virtual int CreateChannel(int& video_channel, int original_channel) = 0;
+
+  // Deletes an existing channel and releases the utilized resources.
+  virtual int DeleteChannel(const int video_channel) = 0;
+
+  // Specifies the VoiceEngine and VideoEngine channel pair to use for
+  // audio/video synchronization.
+  virtual int ConnectAudioChannel(const int video_channel,
+                                  const int audio_channel) = 0;
+
+  // Disconnects a previously paired VideoEngine and VoiceEngine channel pair.
+  virtual int DisconnectAudioChannel(const int video_channel) = 0;
+
+  // Starts sending packets to an already specified IP address and port number
+  // for a specified channel.
+  virtual int StartSend(const int video_channel) = 0;
+
+  // Stops packets from being sent for a specified channel.
+  virtual int StopSend(const int video_channel) = 0;
+
+  // Prepares VideoEngine for receiving packets on the specified channel.
+  virtual int StartReceive(const int video_channel) = 0;
+
+  // Stops receiving incoming RTP and RTCP packets on the specified channel.
+  virtual int StopReceive(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViEBase
+  // observer.
+  virtual int RegisterObserver(ViEBaseObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEBaseObserver.
+  virtual int DeregisterObserver() = 0;
+
+  // Retrieves the version information for VideoEngine and its components.
+  virtual int GetVersion(char version[1024]) = 0;
+
+  // Returns the last VideoEngine error code.
+  virtual int LastError() = 0;
+
+ protected:
+  ViEBase() {}
+  virtual ~ViEBase() {}
+};
+
+}  // namespace webrtc
+
+#endif  // #define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_
diff --git a/trunk/src/video_engine/include/vie_capture.h b/trunk/src/video_engine/include/vie_capture.h
new file mode 100644
index 0000000..45d62e5
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_capture.h
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Allocating capture devices.
+//  - Connect a capture device with one or more channels.
+//  - Start and stop capture devices.
+//  - Getting capture device capabilities.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class VideoCaptureModule;
+
+// This structure describes one set of the supported capabilities for a capture
+// device.
+struct CaptureCapability {
+  unsigned int width;
+  unsigned int height;
+  unsigned int maxFPS;
+  RawVideoType rawType;
+  VideoCodecType codecType;
+  unsigned int expectedCaptureDelay;
+  bool interlaced;
+  CaptureCapability() {
+    width = 0;
+    height = 0;
+    maxFPS = 0;
+    rawType = kVideoI420;
+    codecType = kVideoCodecUnknown;
+    expectedCaptureDelay = 0;
+    interlaced = false;
+  }
+};
+
+// This enumerator tells the current brightness alarm mode.
+enum Brightness {
+  Normal = 0,
+  Bright = 1,
+  Dark = 2
+};
+
+// This enumerator describes the capture alarm mode.
+enum CaptureAlarm {
+  AlarmRaised = 0,
+  AlarmCleared = 1
+};
+
+enum RotateCapturedFrame {
+  RotateCapturedFrame_0 = 0,
+  RotateCapturedFrame_90 = 90,
+  RotateCapturedFrame_180 = 180,
+  RotateCapturedFrame_270 = 270
+};
+
+struct ViEVideoFrameI420 {
+  ViEVideoFrameI420() {
+    y_plane = NULL;
+    u_plane = NULL;
+    v_plane = NULL;
+    y_pitch = 0;
+    u_pitch = 0;
+    v_pitch = 0;
+    width = 0;
+    height = 0;
+  }
+
+  unsigned char* y_plane;
+  unsigned char* u_plane;
+  unsigned char* v_plane;
+
+  int y_pitch;
+  int u_pitch;
+  int v_pitch;
+
+  unsigned short width;
+  unsigned short height;
+};
+
+// This class declares an abstract interface to be used when implementing
+// a user-defined capture device. This interface is not meant to be
+// implemented by the user. Instead, the user should call AllocateCaptureDevice
+// in the ViECapture interface, which will create a suitable implementation.
+// The user should then call IncomingFrame in this interface to deliver
+// captured frames to the system.
+class WEBRTC_DLLEXPORT ViEExternalCapture {
+ public:
+  ViEExternalCapture() {}
+  virtual ~ViEExternalCapture() {}
+
+  // This method is called by the user to deliver a new captured frame to
+  // VideoEngine.
+  virtual int IncomingFrame(unsigned char* video_frame,
+                            unsigned int video_frame_length,
+                            unsigned short width,
+                            unsigned short height,
+                            RawVideoType video_type,
+                            unsigned long long capture_time = 0) = 0;
+
+  // This method is specifically for delivering a new captured I420 frame to
+  // VideoEngine.
+  virtual int IncomingFrameI420(
+      const ViEVideoFrameI420& video_frame,
+      unsigned long long capture_time = 0) = 0;
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViECaptureObserver {
+ public:
+  // This method is called if a bright or dark captured image is detected.
+  virtual void BrightnessAlarm(const int capture_id,
+                               const Brightness brightness) = 0;
+
+  // This method is called periodically telling the capture device frame rate.
+  virtual void CapturedFrameRate(const int capture_id,
+                                 const unsigned char frame_rate) = 0;
+
+  // This method is called if the capture device stops delivering images to
+  // VideoEngine.
+  virtual void NoPictureAlarm(const int capture_id,
+                              const CaptureAlarm alarm) = 0;
+
+ protected:
+  virtual ~ViECaptureObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViECapture {
+ public:
+  // Factory for the ViECapture sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViECapture* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViECapture sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Gets the number of available capture devices.
+  virtual int NumberOfCaptureDevices() = 0;
+
+  // Gets the name and unique id of a capture device.
+  virtual int GetCaptureDevice(unsigned int list_number,
+                               char* device_nameUTF8,
+                               const unsigned int device_nameUTF8Length,
+                               char* unique_idUTF8,
+                               const unsigned int unique_idUTF8Length) = 0;
+
+  // Allocates a capture device to be used in VideoEngine.
+  virtual int AllocateCaptureDevice(const char* unique_idUTF8,
+                                    const unsigned int unique_idUTF8Length,
+                                    int& capture_id) = 0;
+
+  // Registers an external capture device to be used in VideoEngine
+  virtual int AllocateExternalCaptureDevice(
+      int& capture_id,
+      ViEExternalCapture *&external_capture) = 0;
+
+  // Use capture device using external capture module.
+  virtual int AllocateCaptureDevice(VideoCaptureModule& capture_module,
+                                    int& capture_id) = 0;
+
+  // Releases a capture device and makes it available for other applications.
+  virtual int ReleaseCaptureDevice(const int capture_id) = 0;
+
+  // This function connects a capture device with a channel. Multiple channels
+  // can be connected to the same capture device.
+  virtual int ConnectCaptureDevice(const int capture_id,
+                                   const int video_channel) = 0;
+
+  // Disconnects a capture device as input for a specified channel.
+  virtual int DisconnectCaptureDevice(const int video_channel) = 0;
+
+  // Makes a capture device start capturing video frames.
+  virtual int StartCapture(
+      const int capture_id,
+      const CaptureCapability& capture_capability = CaptureCapability()) = 0;
+
+  // Stops a started capture device from capturing video frames.
+  virtual int StopCapture(const int capture_id) = 0;
+
+  // Rotates captured frames before encoding and sending.
+  // Used on mobile devices with rotates cameras.
+  virtual int SetRotateCapturedFrames(const int capture_id,
+                                      const RotateCapturedFrame rotation) = 0;
+
+  // This function sets the expected delay from when a video frame is captured
+  // to when that frame is delivered to VideoEngine.
+  virtual int SetCaptureDelay(const int capture_id,
+                              const unsigned int capture_delay_ms) = 0;
+
+  // Returns the number of sets of capture capabilities the capture device
+  // supports.
+  virtual int NumberOfCapabilities(
+      const char* unique_id_utf8,
+      const unsigned int unique_id_utf8_length) = 0;
+
+  // Gets a set of capture capabilities for a specified capture device.
+  virtual int GetCaptureCapability(const char* unique_id_utf8,
+                                   const unsigned int unique_id_utf8_length,
+                                   const unsigned int capability_number,
+                                   CaptureCapability& capability) = 0;
+
+  // Displays the capture device property dialog box for the specified capture
+  // device. Windows only.
+  virtual int ShowCaptureSettingsDialogBox(
+      const char* unique_idUTF8,
+      const unsigned int unique_id_utf8_length,
+      const char* dialog_title,
+      void* parent_window = NULL,
+      const unsigned int x = 200,
+      const unsigned int y = 200) = 0;
+
+  // Gets the clockwise angle the frames from the camera must be rotated in
+  // order to display the frames correctly if the display is rotated in its
+  // natural orientation.
+  virtual int GetOrientation(const char* unique_id_utf8,
+                             RotateCapturedFrame& orientation) = 0;
+
+  // Enables brightness alarm detection and the brightness alarm callback.
+  virtual int EnableBrightnessAlarm(const int capture_id,
+                                    const bool enable) = 0;
+
+  // Registers an instance of a user implementation of the ViECaptureObserver.
+  virtual int RegisterObserver(const int capture_id,
+                               ViECaptureObserver& observer) = 0;
+
+  // Removes an already registered instance of ViECaptureObserver.
+  virtual int DeregisterObserver(const int capture_id) = 0;
+
+ protected:
+  ViECapture() {}
+  virtual ~ViECapture() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
diff --git a/trunk/src/video_engine/include/vie_codec.h b/trunk/src/video_engine/include/vie_codec.h
new file mode 100644
index 0000000..0535038
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_codec.h
@@ -0,0 +1,176 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Setting send and receive codecs.
+//  - Codec specific settings.
+//  - Key frame signaling.
+//  - Stream management settings.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+struct VideoCodec;
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterEncoderObserver()
+// and deregistered using DeregisterEncoderObserver().
+class WEBRTC_DLLEXPORT ViEEncoderObserver {
+ public:
+  // This method is called once per second with the current encoded frame rate
+  // and bit rate.
+  virtual void OutgoingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) = 0;
+ protected:
+  virtual ~ViEEncoderObserver() {}
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterDecoderObserver()
+// and deregistered using DeregisterDecoderObserver().
+class WEBRTC_DLLEXPORT ViEDecoderObserver {
+ public:
+  // This method is called when a new incoming stream is detected, normally
+  // triggered by a new incoming SSRC or payload type.
+  virtual void IncomingCodecChanged(const int video_channel,
+                                    const VideoCodec& video_codec) = 0;
+
+  // This method is called once per second containing the frame rate and bit
+  // rate for the incoming stream
+  virtual void IncomingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) = 0;
+
+  // This method is called when the decoder needs a new key frame from encoder
+  // on the sender.
+  virtual void RequestNewKeyFrame(const int video_channel) = 0;
+
+ protected:
+  virtual ~ViEDecoderObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViECodec {
+ public:
+  // Factory for the ViECodec sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViECodec* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViECodec sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Gets the number of available codecs for the VideoEngine build.
+  virtual int NumberOfCodecs() const = 0;
+
+  // Gets a VideoCodec struct for a codec containing the default configuration
+  // for that codec type.
+  virtual int GetCodec(const unsigned char list_number,
+                       VideoCodec& video_codec) const = 0;
+
+  // Sets the send codec to use for a specified channel.
+  virtual int SetSendCodec(const int video_channel,
+                           const VideoCodec& video_codec) = 0;
+
+  // Gets the current send codec settings.
+  virtual int GetSendCodec(const int video_channel,
+                           VideoCodec& video_codec) const = 0;
+
+  // Prepares VideoEngine to receive a certain codec type and setting for a
+  // specified payload type.
+  virtual int SetReceiveCodec(const int video_channel,
+                              const VideoCodec& video_codec) = 0;
+
+  // Gets the current receive codec.
+  virtual int GetReceiveCodec(const int video_channel,
+                              VideoCodec& video_codec) const = 0;
+
+  // This function is used to get codec configuration parameters to be
+  // signaled from the encoder to the decoder in the call setup.
+  virtual int GetCodecConfigParameters(
+      const int video_channel,
+      unsigned char config_parameters[kConfigParameterSize],
+      unsigned char& config_parameters_size) const = 0;
+
+  // Enables advanced scaling of the captured video stream if the stream
+  // differs from the send codec settings.
+  virtual int SetImageScaleStatus(const int video_channel,
+                                  const bool enable) = 0;
+
+  // Gets the number of sent key frames and number of sent delta frames.
+  virtual int GetSendCodecStastistics(const int video_channel,
+                                      unsigned int& key_frames,
+                                      unsigned int& delta_frames) const = 0;
+
+  // Gets the number of decoded key frames and number of decoded delta frames.
+  virtual int GetReceiveCodecStastistics(const int video_channel,
+                                         unsigned int& key_frames,
+                                         unsigned int& delta_frames) const = 0;
+
+  // Gets the bitrate targeted by the video codec rate control in kbit/s.
+  virtual int GetCodecTargetBitrate(const int video_channel,
+                                    unsigned int* bitrate) const = 0;
+
+  // Gets the number of packets discarded by the jitter buffer because they
+  // arrived too late.
+  virtual unsigned int GetDiscardedPackets(const int video_channel) const = 0;
+
+  // Enables key frame request callback in ViEDecoderObserver.
+  virtual int SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                               const bool enable) = 0;
+
+  // Enables key frame requests for detected lost packets.
+  virtual int SetSignalKeyPacketLossStatus(
+      const int video_channel,
+      const bool enable,
+      const bool only_key_frames = false) = 0;
+
+  // Registers an instance of a user implementation of the ViEEncoderObserver.
+  virtual int RegisterEncoderObserver(const int video_channel,
+                                      ViEEncoderObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEEncoderObserver.
+  virtual int DeregisterEncoderObserver(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViEDecoderObserver.
+  virtual int RegisterDecoderObserver(const int video_channel,
+                                      ViEDecoderObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEDecoderObserver.
+  virtual int DeregisterDecoderObserver(const int video_channel) = 0;
+
+  // This function forces the next encoded frame to be a key frame. This is
+  // normally used when the remote endpoint only supports out‐band key frame
+  // request.
+  virtual int SendKeyFrame(const int video_channel) = 0;
+
+  // This function makes the decoder wait for a key frame before starting to
+  // decode the incoming video stream.
+  virtual int WaitForFirstKeyFrame(const int video_channel,
+                                   const bool wait) = 0;
+
+ protected:
+  ViECodec() {}
+  virtual ~ViECodec() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
diff --git a/trunk/src/video_engine/include/vie_encryption.h b/trunk/src/video_engine/include/vie_encryption.h
new file mode 100644
index 0000000..8e7c955
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_encryption.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - External encryption and decryption.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+class VideoEngine;
+
+class WEBRTC_DLLEXPORT ViEEncryption {
+ public:
+  // Factory for the ViEEncryption sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEEncryption* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEEncryption sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function registers a encryption derived instance and enables
+  // external encryption for the specified channel.
+  virtual int RegisterExternalEncryption(const int video_channel,
+                                         Encryption& encryption) = 0;
+
+  // This function deregisters a registered encryption derived instance
+  // and disables external encryption.
+  virtual int DeregisterExternalEncryption(const int video_channel) = 0;
+
+ protected:
+  ViEEncryption() {}
+  virtual ~ViEEncryption() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
diff --git a/trunk/src/video_engine/include/vie_errors.h b/trunk/src/video_engine/include/vie_errors.h
new file mode 100644
index 0000000..63100b5
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_errors.h
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
+
+enum ViEErrors {
+  // ViEBase.
+  kViENotInitialized = 12000,        // Init has not been called successfully.
+  kViEBaseVoEFailure,                // SetVoiceEngine. ViE failed to use VE instance. Check VE instance pointer.ConnectAudioChannel failed to set voice channel. Have SetVoiceEngine been called? Is the voice channel correct.
+  kViEBaseChannelCreationFailed,     // CreateChannel.
+  kViEBaseInvalidChannelId,          // The channel does not exist.
+  kViEAPIDoesNotExist,               // Release called on Interface that has not been created.
+  kViEBaseInvalidArgument,
+  kViEBaseAlreadySending,            // StartSend called on channel that is already sending.
+  kViEBaseNotSending,                // StopSend called on channel that is not sending.
+  kViEBaseAlreadyReceiving,          // StartReceive called on channel that is already receiving.
+  kViEBaseObserverAlreadyRegistered,  // RegisterObserver- an observer has already been set.
+  kViEBaseObserverNotRegistered,     // DeregisterObserver - no observer has been registered.
+  kViEBaseUnknownError,              // An unknown error has occurred. Check the log file.
+
+  // ViECodec.
+  kViECodecInvalidArgument  = 12100,    // Wrong input parameter to function.
+  kViECodecObserverAlreadyRegistered,   // RegisterEncoderObserver, RegisterDecoderObserver.
+  kViECodecObserverNotRegistered,       // DeregisterEncoderObserver, DeregisterDecoderObserver.
+  kViECodecInvalidCodec,                // SetSendCodec,SetReceiveCodec- The codec structure is invalid.
+  kViECodecInvalidChannelId,            // The channel does not exist.
+  kViECodecInUse,                       // SetSendCodec- Can't change codec size or type when multiple channels use the same encoder.
+  kViECodecUnknownError,                // An unknown error has occurred. Check the log file.
+
+  // ViERender.
+  kViERenderInvalidRenderId = 12200,  // No renderer with the ID exist. In AddRenderer - The render ID is invalid. No capture device, channel or file is allocated with that id.
+  kViERenderAlreadyExists,            // AddRenderer: the renderer already exist.
+  kViERenderInvalidFrameFormat,       // AddRender (external renderer). The user has requested a frame format that we don't support.
+  kViERenderUnknownError,             // An unknown error has occurred. Check the log file.
+
+  // ViECapture.
+  kViECaptureDeviceAlreadyConnected = 12300,  // ConnectCaptureDevice - A capture device has already been connected to this video channel.
+  kViECaptureDeviceDoesNotExist,              // No capture device exist with the provided capture id or unique name.
+  kViECaptureDeviceInvalidChannelId,          // ConnectCaptureDevice, DisconnectCaptureDevice- No Channel exist with the provided channel id.
+  kViECaptureDeviceNotConnected,              // DisconnectCaptureDevice- No capture device is connected to the channel.
+  kViECaptureDeviceNotStarted,                // Stop- The capture device is not started.
+  kViECaptureDeviceAlreadyStarted,            // Start- The capture device is already started.
+  kViECaptureDeviceAlreadyAllocated,          // AllocateCaptureDevice The device is already allocated.
+  kViECaptureDeviceMaxNoDevicesAllocated,     // AllocateCaptureDevice Max number of devices already allocated.
+  kViECaptureObserverAlreadyRegistered,       // RegisterObserver- An observer is already registered. Need to deregister first.
+  kViECaptureDeviceObserverNotRegistered,     // DeregisterObserver- No observer is registered.
+  kViECaptureDeviceUnknownError,              // An unknown error has occurred. Check the log file.
+  kViECaptureDeviceMacQtkitNotSupported,      // QTKit handles the capture devices automatically. Thus querying capture capabilities is not supported.
+
+  // ViEFile.
+  kViEFileInvalidChannelId  = 12400,  // No Channel exist with the provided channel id.
+  kViEFileInvalidArgument,            // Incorrect input argument
+  kViEFileAlreadyRecording,           // StartRecordOutgoingVideo - already recording channel
+  kViEFileVoENotSet,                  // StartRecordOutgoingVideo. Failed to access voice engine. Has SetVoiceEngine been called?
+  kViEFileNotRecording,               // StopRecordOutgoingVideo
+  kViEFileMaxNoOfFilesOpened,         // StartPlayFile
+  kViEFileNotPlaying,                 // StopPlayFile. The file with the provided id is not playing.
+  kViEFileObserverAlreadyRegistered,  // RegisterObserver
+  kViEFileObserverNotRegistered,      // DeregisterObserver
+  kViEFileInputAlreadyConnected,      // SendFileOnChannel- the video channel already have a connected input.
+  kViEFileNotConnected,               // StopSendFileOnChannel- No file is being sent on the channel.
+  kViEFileVoEFailure,                 // SendFileOnChannel,StartPlayAudioLocally - failed to play audio stream
+  kViEFileInvalidRenderId,            // SetRenderTimeoutImage and SetRenderStartImage: Renderer with the provided render id does not exist.
+  kViEFileInvalidFile,                // Can't open the file with provided filename. Is the path and file format correct?
+  kViEFileInvalidCapture,             // Can't use ViEPicture. Is the object correct?
+  kViEFileSetRenderTimeoutError,      // SetRenderTimeoutImage- Please see log file.
+  kViEFileInvalidCaptureId,           // SetCaptureDeviceImage capture id does not exist.
+  kViEFileSetCaptureImageError,       // SetCaptureDeviceImage error. Please see log file.
+  kViEFileSetStartImageError,         // SetRenderStartImage error. Please see log file.
+  kViEFileUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViENetwork.
+  kViENetworkInvalidChannelId = 12500,   // No Channel exist with the provided channel id.
+  kViENetworkAlreadyReceiving,           // SetLocalReceiver: Can not change ports while receiving.
+  kViENetworkLocalReceiverNotSet,        // GetLocalReceiver: SetLocalReceiver not called.
+  kViENetworkAlreadySending,             // SetSendDestination
+  kViENetworkDestinationNotSet,          // GetSendDestination
+  kViENetworkInvalidArgument,            // GetLocalIP- Check function  arguments.
+  kViENetworkSendCodecNotSet,            // SetSendGQoS- Need to set the send codec first.
+  kViENetworkServiceTypeNotSupported,    // SetSendGQoS
+  kViENetworkNotSupported,               // SetSendGQoS Not supported on this OS.
+  kViENetworkObserverAlreadyRegistered,  // RegisterObserver
+  kViENetworkObserverNotRegistered,      // SetPeriodicDeadOrAliveStatus - Need to call RegisterObserver first, DeregisterObserver if no observer is registered.
+  kViENetworkUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViERTP_RTCP.
+  kViERtpRtcpInvalidChannelId = 12600,   // No Channel exist with the provided channel id.
+  kViERtpRtcpAlreadySending,             // The channel is already sending. Need to stop send before calling this API.
+  kViERtpRtcpNotSending,                 // The channel needs to be sending in order for this function to work.
+  kViERtpRtcpRtcpDisabled,               // Functions failed because RTCP is disabled.
+  kViERtpRtcpObserverAlreadyRegistered,  // An observer is already registered. Need to deregister the old first.
+  kViERtpRtcpObserverNotRegistered,      // No observer registered.
+  kViERtpRtcpUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViEEncryption.
+  kViEEncryptionInvalidChannelId = 12700,  // Channel id does not exist.
+  kViEEncryptionInvalidSrtpParameter,      // EnableSRTPSend, EnableSRTPReceive-  Check the SRTP parameters.
+  kViEEncryptionSrtpNotSupported,          // This build does not support SRTP.
+  kViEEncryptionUnknownError,              // An unknown error has occurred. Check the log file.
+
+  // ViEImageProcess.
+  kViEImageProcessInvalidChannelId  = 12800,  // No Channel exist with the provided channel id.
+  kViEImageProcessInvalidCaptureId,          // No capture device exist with the provided capture id.
+  kViEImageProcessFilterExists,              // RegisterCaptureEffectFilter,RegisterSendEffectFilter,RegisterRenderEffectFilter - Effect filter already registered.
+  kViEImageProcessFilterDoesNotExist,        // DeRegisterCaptureEffectFilter,DeRegisterSendEffectFilter,DeRegisterRenderEffectFilter - Effect filter not registered.
+  kViEImageProcessAlreadyEnabled,            // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already enabled.
+  kViEImageProcessAlreadyDisabled,           // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already disabled.
+  kViEImageProcessUnknownError               // An unknown error has occurred. Check the log file.
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
diff --git a/trunk/src/video_engine/include/vie_external_codec.h b/trunk/src/video_engine/include/vie_external_codec.h
new file mode 100644
index 0000000..a2e686a
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_external_codec.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoDecoder;
+class VideoEncoder;
+class VideoEngine;
+
+class WEBRTC_DLLEXPORT ViEExternalCodec {
+ public:
+  static ViEExternalCodec* GetInterface(VideoEngine* video_engine);
+
+  virtual int Release() = 0;
+
+  virtual int RegisterExternalSendCodec(const int video_channel,
+                                        const unsigned char pl_type,
+                                        VideoEncoder* encoder) = 0;
+
+  virtual int DeRegisterExternalSendCodec(const int video_channel,
+                                          const unsigned char pl_type) = 0;
+
+  virtual int RegisterExternalReceiveCodec(const int video_channel,
+                                           const unsigned int pl_type,
+                                           VideoDecoder* decoder,
+                                           bool decoder_render = false,
+                                           int render_delay = 0) = 0;
+
+  virtual int DeRegisterExternalReceiveCodec(const int video_channel,
+                                             const unsigned char pl_type) = 0;
+
+ protected:
+  ViEExternalCodec() {}
+  virtual ~ViEExternalCodec() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
diff --git a/trunk/src/video_engine/include/vie_file.h b/trunk/src/video_engine/include/vie_file.h
new file mode 100644
index 0000000..fbc3d2b
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_file.h
@@ -0,0 +1,221 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - File recording and playing.
+//  - Snapshots.
+//  - Background images.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+struct VideoCodec;
+
+// This structure contains picture data and describes the picture type.
+struct ViEPicture {
+  unsigned char* data;
+  unsigned int size;
+  unsigned int width;
+  unsigned int height;
+  RawVideoType type;
+
+  ViEPicture() {
+    data = NULL;
+    size = 0;
+    width = 0;
+    height = 0;
+    type = kVideoI420;
+  }
+
+  // Call FreePicture to free data.
+  ~ViEPicture() {
+    data = NULL;
+    size = 0;
+    width = 0;
+    height = 0;
+    type = kVideoUnknown;
+  }
+};
+
+// This enumerator tells which audio source to use for media files.
+enum AudioSource {
+  NO_AUDIO,
+  MICROPHONE,
+  PLAYOUT,
+  VOICECALL
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViEFileObserver {
+ public:
+  // This method is called when the end is reached of a played file.
+  virtual void PlayFileEnded(const WebRtc_Word32 file_id) = 0;
+
+ protected:
+  virtual ~ViEFileObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViEFile {
+ public:
+  // Factory for the ViEFile sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEFile* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEFile sub-API and decreases an internal reference counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Starts playing a video file.
+  virtual int StartPlayFile(
+      const char* file_name_utf8,
+      int& file_id,
+      const bool loop = false,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // Stops a file from being played.
+  virtual int StopPlayFile(const int file_id) = 0;
+
+  // Registers an instance of a user implementation of the ViEFileObserver.
+  virtual int RegisterObserver(int file_id, ViEFileObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEFileObserver.
+  virtual int DeregisterObserver(int file_id, ViEFileObserver& observer) = 0;
+
+  // This function tells which channel, if any, the file should be sent on.
+  virtual int SendFileOnChannel(const int file_id, const int video_channel) = 0;
+
+  // Stops a file from being sent on a a channel.
+  virtual int StopSendFileOnChannel(const int video_channel) = 0;
+
+  // Starts playing the file audio as microphone input for the specified voice
+  // channel.
+  virtual int StartPlayFileAsMicrophone(const int file_id,
+                                        const int audio_channel,
+                                        bool mix_microphone = false,
+                                        float volume_scaling = 1) = 0;
+
+  // The function stop the audio from being played on a VoiceEngine channel.
+  virtual int StopPlayFileAsMicrophone(const int file_id,
+                                       const int audio_channel) = 0;
+
+  // The function plays and mixes the file audio with the local speaker signal
+  // for playout.
+  virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
+                                    float volume_scaling = 1) = 0;
+
+  // Stops the audio from a file from being played locally.
+  virtual int StopPlayAudioLocally(const int file_id,
+                                   const int audio_channel) = 0;
+
+  // This function starts recording the video transmitted to another endpoint.
+  virtual int StartRecordOutgoingVideo(
+      const int video_channel,
+      const char* file_name_utf8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // This function starts recording the incoming video stream on a channel.
+  virtual int StartRecordIncomingVideo(
+      const int video_channel,
+      const char* file_name_utf8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // Stops the file recording of the outgoing stream.
+  virtual int StopRecordOutgoingVideo(const int video_channel) = 0;
+
+  // Stops the file recording of the incoming stream.
+  virtual int StopRecordIncomingVideo(const int video_channel) = 0;
+
+  // Gets the audio codec, video codec and file format of a recorded file.
+  virtual int GetFileInformation(
+      const char* file_name,
+      VideoCodec& video_codec,
+      CodecInst& audio_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // The function takes a snapshot of the last rendered image for a video
+  // channel.
+  virtual int GetRenderSnapshot(const int video_channel,
+                                const char* file_name_utf8) = 0;
+
+  // The function takes a snapshot of the last rendered image for a video
+  // channel
+  virtual int GetRenderSnapshot(const int video_channel,
+                                ViEPicture& picture) = 0;
+
+  // The function takes a snapshot of the last captured image by a specified
+  // capture device.
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       const char* file_name_utf8) = 0;
+
+  // The function takes a snapshot of the last captured image by a specified
+  // capture device.
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to show before the first frame is captured
+  // by the capture device. This frame will be encoded and transmitted to a
+  // possible receiver
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const char* file_name_utf8) = 0;
+
+  // This function sets an image to show before the first frame is captured by
+  // the capture device. This frame will be encoded and transmitted to a
+  // possible receiver
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const ViEPicture& picture) = 0;
+
+  virtual int FreePicture(ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to render before the first received video
+  // frame is decoded for a specified channel.
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const char* file_name_utf8) = 0;
+
+  // This function sets an image to render before the first received video
+  // frame is decoded for a specified channel.
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to render if no frame is decoded for a
+  // specified time interval.
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const char* file_name_utf8,
+                                    const unsigned int timeout_ms = 1000) = 0;
+
+  // This function sets an image to render if no frame is decoded for a
+  // specified time interval.
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const ViEPicture& picture,
+                                    const unsigned int timeout_ms) = 0;
+
+ protected:
+  ViEFile() {}
+  virtual ~ViEFile() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
diff --git a/trunk/src/video_engine/include/vie_image_process.h b/trunk/src/video_engine/include/vie_image_process.h
new file mode 100644
index 0000000..dfad08d
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_image_process.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Effect filters
+//  - Deflickering
+//  - Denoising
+//  - Color enhancement
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+
+// This class declares an abstract interface for a user defined effect filter.
+// The effect filter is registered using RegisterCaptureEffectFilter(),
+// RegisterSendEffectFilter() or RegisterRenderEffectFilter() and deregistered
+// with the corresponding deregister function.
+class WEBRTC_DLLEXPORT ViEEffectFilter {
+ public:
+  // This method is called with an I420 video frame allowing the user to
+  // modify the video frame.
+  virtual int Transform(int size,
+                        unsigned char* frame_buffer,
+                        unsigned int time_stamp90KHz,
+                        unsigned int width,
+                        unsigned int height) = 0;
+ protected:
+  ViEEffectFilter() {}
+  virtual ~ViEEffectFilter() {}
+};
+
+class WEBRTC_DLLEXPORT ViEImageProcess {
+ public:
+  // Factory for the ViEImageProcess sub‐API and increases an internal
+  // reference counter if successful. Returns NULL if the API is not supported
+  // or if construction fails.
+  static ViEImageProcess* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEImageProcess sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function registers a EffectFilter to use for a specified capture
+  // device.
+  virtual int RegisterCaptureEffectFilter(const int capture_id,
+                                          ViEEffectFilter& capture_filter) = 0;
+
+  // This function deregisters a EffectFilter for a specified capture device.
+  virtual int DeregisterCaptureEffectFilter(const int capture_id) = 0;
+
+  // This function registers an EffectFilter to use for a specified channel.
+  virtual int RegisterSendEffectFilter(const int video_channel,
+                                       ViEEffectFilter& send_filter) = 0;
+
+  // This function deregisters a send effect filter for a specified channel.
+  virtual int DeregisterSendEffectFilter(const int video_channel) = 0;
+
+  // This function registers a EffectFilter to use for the rendered video
+  // stream on an incoming channel.
+  virtual int RegisterRenderEffectFilter(const int video_channel,
+                                         ViEEffectFilter& render_filter) = 0;
+
+  // This function deregisters a render effect filter for a specified channel.
+  virtual int DeregisterRenderEffectFilter(const int video_channel) = 0;
+
+  // All cameras run the risk of getting in almost perfect sync with
+  // florescent lamps, which will result in a very annoying flickering of the
+  // image. Most cameras have some type of filter to protect against this but
+  // not all of them succeed. Enabling this function will remove the flicker.
+  virtual int EnableDeflickering(const int capture_id, const bool enable) = 0;
+
+  // Some cameras produce very noisy captured images, especially in low‐light
+  // conditions. This functionality will reduce the camera noise.
+  virtual int EnableDenoising(const int capture_id, const bool enable) = 0;
+
+  // This function enhances the colors on the decoded video stream, enabled by
+  // default.
+  virtual int EnableColorEnhancement(const int video_channel,
+                                     const bool enable) = 0;
+
+ protected:
+  ViEImageProcess() {}
+  virtual ~ViEImageProcess() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
diff --git a/trunk/src/video_engine/include/vie_network.h b/trunk/src/video_engine/include/vie_network.h
new file mode 100644
index 0000000..9752008
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_network.h
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
+
+// This sub-API supports the following functionalities:
+//  - Configuring send and receive addresses.
+//  - External transport support.
+//  - Port and address filters.
+//  - Windows GQoS functions and ToS functions.
+//  - Packet timeout notification.
+//  - Dead‐or‐Alive connection observations.
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class Transport;
+class VideoEngine;
+
+// This enumerator describes VideoEngine packet timeout states.
+enum ViEPacketTimeout {
+  NoPacket = 0,
+  PacketReceived = 1
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViENetworkObserver {
+ public:
+  // This method will be called periodically delivering a dead‐or‐alive
+  // decision for a specified channel.
+  virtual void OnPeriodicDeadOrAlive(const int video_channel,
+                                     const bool alive) = 0;
+
+  // This method is called once if a packet timeout occurred.
+  virtual void PacketTimeout(const int video_channel,
+                             const ViEPacketTimeout timeout) = 0;
+ protected:
+  virtual ~ViENetworkObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViENetwork {
+ public:
+  // Default values.
+  enum { KDefaultSampleTimeSeconds = 2 };
+
+  // Factory for the ViENetwork sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViENetwork* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViENetwork sub-API and decreases an internal reference
+  // counter.Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Specifies the ports to receive RTP packets on. It is also possible to set
+  // port for RTCP and local IP address.
+  virtual int SetLocalReceiver(const int video_channel,
+                               const unsigned short rtp_port,
+                               const unsigned short rtcp_port = 0,
+                               const char* ip_address = NULL) = 0;
+
+  // Gets the local receiver ports and address for a specified channel.
+  virtual int GetLocalReceiver(const int video_channel,
+                               unsigned short& rtp_port,
+                               unsigned short& rtcp_port, char* ip_address) = 0;
+
+  // Specifies the destination port and IP address for a specified channel.
+  virtual int SetSendDestination(const int video_channel,
+                                 const char* ip_address,
+                                 const unsigned short rtp_port,
+                                 const unsigned short rtcp_port = 0,
+                                 const unsigned short source_rtp_port = 0,
+                                 const unsigned short source_rtcp_port = 0) = 0;
+
+  // Get the destination port and address for a specified channel.
+  virtual int GetSendDestination(const int video_channel,
+                                 char* ip_address,
+                                 unsigned short& rtp_port,
+                                 unsigned short& rtcp_port,
+                                 unsigned short& source_rtp_port,
+                                 unsigned short& source_rtcp_port) = 0;
+
+  // This function registers a user implementation of Transport to use for
+  // sending RTP and RTCP packets on this channel.
+  virtual int RegisterSendTransport(const int video_channel,
+                                    Transport& transport) = 0;
+
+  // This function deregisters a used Transport for a specified channel.
+  virtual int DeregisterSendTransport(const int video_channel) = 0;
+
+  // When using external transport for a channel, received RTP packets should
+  // be passed to VideoEngine using this function. The input should contain
+  // the RTP header and payload.
+  virtual int ReceivedRTPPacket(const int video_channel,
+                                const void* data,
+                                const int length) = 0;
+
+  // When using external transport for a channel, received RTCP packets should
+  // be passed to VideoEngine using this function.
+  virtual int ReceivedRTCPPacket(const int video_channel,
+                                 const void* data,
+                                 const int length) = 0;
+
+  // Gets the source ports and IP address of the incoming stream for a
+  // specified channel.
+  virtual int GetSourceInfo(const int video_channel,
+                            unsigned short& rtp_port,
+                            unsigned short& rtcp_port,
+                            char* ip_address,
+                            unsigned int ip_address_length) = 0;
+
+  // Gets the local IP address, in string format.
+  virtual int GetLocalIP(char ip_address[64], bool ipv6 = false) = 0;
+
+  // Enables IPv6, instead of IPv4, for a specified channel.
+  virtual int EnableIPv6(int video_channel) = 0;
+
+  // The function returns true if IPv6 is enabled, false otherwise.
+  virtual bool IsIPv6Enabled(int video_channel) = 0;
+
+  // Enables a port and IP address filtering for incoming packets on a
+  // specific channel.
+  virtual int SetSourceFilter(const int video_channel,
+                              const unsigned short rtp_port,
+                              const unsigned short rtcp_port = 0,
+                              const char* ip_address = NULL) = 0;
+
+  // Gets current port and IP address filter for a specified channel.
+  virtual int GetSourceFilter(const int video_channel,
+                              unsigned short& rtp_port,
+                              unsigned short& rtcp_port,
+                              char* ip_address) = 0;
+
+  // This function sets the six‐bit Differentiated Services Code Point (DSCP)
+  // in the IP header of the outgoing stream for a specific channel.
+  // Windows and Linux only.
+  virtual int SetSendToS(const int video_channel,
+                         const int DSCP,
+                         const bool use_set_sockOpt = false) = 0;
+
+  // Retrieves the six‐bit Differentiated Services Code Point (DSCP) in the IP
+  // header of the outgoing stream for a specific channel.
+  virtual int GetSendToS(const int video_channel,
+                         int& DSCP,
+                         bool& use_set_sockOpt) = 0;
+
+  // This function sets the Generic Quality of Service (GQoS) service level.
+  // The Windows operating system then maps to a Differentiated Services Code
+  // Point (DSCP) and to an 802.1p setting. Windows only.
+  virtual int SetSendGQoS(const int video_channel, const bool enable,
+                          const int service_type,
+                          const int overrideDSCP = 0) = 0;
+
+  // This function retrieves the currently set GQoS service level for a
+  // specific channel.
+  virtual int GetSendGQoS(const int video_channel,
+                          bool& enabled,
+                          int& service_type,
+                          int& overrideDSCP) = 0;
+
+  // This function sets the Maximum Transition Unit (MTU) for a channel. The
+  // RTP packet will be packetized based on this MTU to optimize performance
+  // over the network.
+  virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
+
+  // This function enables or disables warning reports if packets have not
+  // been received for a specified time interval.
+  virtual int SetPacketTimeoutNotification(const int video_channel,
+                                           bool enable,
+                                           int timeout_seconds) = 0;
+
+  // Registers an instance of a user implementation of the ViENetwork
+  // observer.
+  virtual int RegisterObserver(const int video_channel,
+                               ViENetworkObserver& observer) = 0;
+
+  // Removes a registered instance of ViENetworkObserver.
+  virtual int DeregisterObserver(const int video_channel) = 0;
+
+  // This function enables or disables the periodic dead‐or‐alive callback
+  // functionality for a specified channel.
+  virtual int SetPeriodicDeadOrAliveStatus(
+      const int video_channel,
+      const bool enable,
+      const unsigned int sample_time_seconds = KDefaultSampleTimeSeconds) = 0;
+
+  // This function handles sending a raw UDP data packet over an existing RTP
+  // or RTCP socket.
+  virtual int SendUDPPacket(const int video_channel,
+                            const void* data,
+                            const unsigned int length,
+                            int& transmitted_bytes,
+                            bool use_rtcp_socket = false) = 0;
+
+ protected:
+  ViENetwork() {}
+  virtual ~ViENetwork() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
diff --git a/trunk/src/video_engine/include/vie_render.h b/trunk/src/video_engine/include/vie_render.h
new file mode 100644
index 0000000..0b8328e
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_render.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Specify render destinations for incoming video streams, capture devices
+//    and files.
+//  - Configuring render streams.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class VideoRender;
+
+// This class declares an abstract interface to be used for external renderers.
+// The user implemented derived class is registered using AddRenderer().
+class WEBRTC_DLLEXPORT ExternalRenderer {
+ public:
+  // This method will be called when the stream to be rendered changes in
+  // resolution or number of streams mixed in the image.
+  virtual int FrameSizeChange(unsigned int width,
+                              unsigned int height,
+                              unsigned int number_of_streams) = 0;
+
+  // This method is called when a new frame should be rendered.
+  virtual int DeliverFrame(unsigned char* buffer,
+                           int buffer_size,
+                           // RTP timestamp in 90kHz.
+                           uint32_t time_stamp,
+                           // Wallclock render time in miliseconds
+                           int64_t render_time) = 0;
+
+ protected:
+  virtual ~ExternalRenderer() {}
+};
+
+class WEBRTC_DLLEXPORT ViERender {
+ public:
+  // Factory for the ViERender sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViERender* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViERender sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Registers render module.
+  virtual int RegisterVideoRenderModule(VideoRender& render_module) = 0;
+
+  // Deregisters render module.
+  virtual int DeRegisterVideoRenderModule(VideoRender& render_module) = 0;
+
+  // Sets the render destination for a given render ID.
+  virtual int AddRenderer(const int render_id,
+                          void* window,
+                          const unsigned int z_order,
+                          const float left,
+                          const float top,
+                          const float right,
+                          const float bottom) = 0;
+
+  // Removes the renderer for a stream.
+  virtual int RemoveRenderer(const int render_id) = 0;
+
+  // Starts rendering a render stream.
+  virtual int StartRender(const int render_id) = 0;
+
+  // Stops rendering a render stream.
+  virtual int StopRender(const int render_id) = 0;
+
+  // Configures an already added render stream.
+  virtual int ConfigureRender(int render_id,
+                              const unsigned int z_order,
+                              const float left,
+                              const float top,
+                              const float right,
+                              const float bottom) = 0;
+
+  // This function mirrors the rendered stream left and right or up and down.
+  virtual int MirrorRenderStream(const int render_id,
+                                 const bool enable,
+                                 const bool mirror_xaxis,
+                                 const bool mirror_yaxis) = 0;
+
+  // External render.
+  virtual int AddRenderer(const int render_id,
+                          RawVideoType video_input_format,
+                          ExternalRenderer* renderer) = 0;
+
+ protected:
+  ViERender() {}
+  virtual ~ViERender() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
diff --git a/trunk/src/video_engine/include/vie_rtp_rtcp.h b/trunk/src/video_engine/include/vie_rtp_rtcp.h
new file mode 100644
index 0000000..1397222
--- /dev/null
+++ b/trunk/src/video_engine/include/vie_rtp_rtcp.h
@@ -0,0 +1,311 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
+//  - SSRC handling.
+//  - Transmission of RTCP reports.
+//  - Obtaining RTCP data from incoming RTCP sender reports.
+//  - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
+//  - Forward Error Correction (FEC).
+//  - RTP Keep‐alive for maintaining the NAT mappings associated to RTP flows.
+//  - Writing RTP and RTCP packets to binary files for off‐line analysis of the
+//    call quality.
+//  - Inserting extra RTP packets into active audio stream.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+
+// This enumerator sets the RTCP mode.
+enum ViERTCPMode {
+  kRtcpNone = 0,
+  kRtcpCompound_RFC4585 = 1,
+  kRtcpNonCompound_RFC5506 = 2
+};
+
+// This enumerator describes the key frame request mode.
+enum ViEKeyFrameRequestMethod {
+  kViEKeyFrameRequestNone = 0,
+  kViEKeyFrameRequestPliRtcp = 1,
+  kViEKeyFrameRequestFirRtp = 2,
+  kViEKeyFrameRequestFirRtcp = 3
+};
+
+enum StreamType {
+  kViEStreamTypeNormal = 0,  // Normal media stream
+  kViEStreamTypeRtx = 1  // Retransmission media stream
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterRTPObserver() and
+// deregistered using DeregisterRTPObserver().
+class WEBRTC_DLLEXPORT ViERTPObserver {
+ public:
+  // This method is called if SSRC of the incoming stream is changed.
+  virtual void IncomingSSRCChanged(const int video_channel,
+                                   const unsigned int SSRC) = 0;
+
+  // This method is called if a field in CSRC changes or if the number of
+  // CSRCs changes.
+  virtual void IncomingCSRCChanged(const int video_channel,
+                                   const unsigned int CSRC,
+                                   const bool added) = 0;
+ protected:
+  virtual ~ViERTPObserver() {}
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterRTCPObserver() and
+// deregistered using DeregisterRTCPObserver().
+
+class WEBRTC_DLLEXPORT ViERTCPObserver {
+ public:
+  // This method is called if a application-defined RTCP packet has been
+  // received.
+  virtual void OnApplicationDataReceived(
+      const int video_channel,
+      const unsigned char sub_type,
+      const unsigned int name,
+      const char* data,
+      const unsigned short data_length_in_bytes) = 0;
+ protected:
+  virtual ~ViERTCPObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViERTP_RTCP {
+ public:
+  enum { KDefaultDeltaTransmitTimeSeconds = 15 };
+  enum { KMaxRTCPCNameLength = 256 };
+
+  // Factory for the ViERTP_RTCP sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViERTP_RTCP* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViERTP_RTCP sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function enables you to specify the RTP synchronization source
+  // identifier (SSRC) explicitly.
+  virtual int SetLocalSSRC(const int video_channel,
+                           const unsigned int SSRC,
+                           const StreamType usage = kViEStreamTypeNormal,
+                           const unsigned char simulcast_idx = 0) = 0;
+
+  // This function gets the SSRC for the outgoing RTP stream for the specified
+  // channel.
+  virtual int GetLocalSSRC(const int video_channel,
+                           unsigned int& SSRC) const = 0;
+
+  // This function map a incoming SSRC to a StreamType so that the engine
+  // can know which is the normal stream and which is the RTX
+  virtual int SetRemoteSSRCType(const int video_channel,
+                                const StreamType usage,
+                                const unsigned int SSRC) const = 0;
+
+  // This function gets the SSRC for the incoming RTP stream for the specified
+  // channel.
+  virtual int GetRemoteSSRC(const int video_channel,
+                            unsigned int& SSRC) const = 0;
+
+  // This function returns the CSRCs of the incoming RTP packets.
+  virtual int GetRemoteCSRCs(const int video_channel,
+                             unsigned int CSRCs[kRtpCsrcSize]) const = 0;
+
+  // This function enables manual initialization of the sequence number. The
+  // start sequence number is normally a random number.
+  virtual int SetStartSequenceNumber(const int video_channel,
+                                     unsigned short sequence_number) = 0;
+
+  // This function sets the RTCP status for the specified channel.
+  // Default mode is kRtcpCompound_RFC4585.
+  virtual int SetRTCPStatus(const int video_channel,
+                            const ViERTCPMode rtcp_mode) = 0;
+
+  // This function gets the RTCP status for the specified channel.
+  virtual int GetRTCPStatus(const int video_channel,
+                            ViERTCPMode& rtcp_mode) const = 0;
+
+  // This function sets the RTCP canonical name (CNAME) for the RTCP reports
+  // on a specific channel.
+  virtual int SetRTCPCName(const int video_channel,
+                           const char rtcp_cname[KMaxRTCPCNameLength]) = 0;
+
+  // This function gets the RTCP canonical name (CNAME) for the RTCP reports
+  // sent the specified channel.
+  virtual int GetRTCPCName(const int video_channel,
+                           char rtcp_cname[KMaxRTCPCNameLength]) const = 0;
+
+  // This function gets the RTCP canonical name (CNAME) for the RTCP reports
+  // received on the specified channel.
+  virtual int GetRemoteRTCPCName(
+      const int video_channel,
+      char rtcp_cname[KMaxRTCPCNameLength]) const = 0;
+
+  // This function sends an RTCP APP packet on a specific channel.
+  virtual int SendApplicationDefinedRTCPPacket(
+      const int video_channel,
+      const unsigned char sub_type,
+      unsigned int name,
+      const char* data,
+      unsigned short data_length_in_bytes) = 0;
+
+  // This function enables Negative Acknowledgment (NACK) using RTCP,
+  // implemented based on RFC 4585. NACK retransmits RTP packets if lost on
+  // the network. This creates a lossless transport at the expense of delay.
+  // If using NACK, NACK should be enabled on both endpoints in a call.
+  virtual int SetNACKStatus(const int video_channel, const bool enable) = 0;
+
+  // This function enables Forward Error Correction (FEC) using RTCP,
+  // implemented based on RFC 5109, to improve packet loss robustness. Extra
+  // FEC packets are sent together with the usual media packets, hence
+  // part of the bitrate will be used for FEC packets.
+  virtual int SetFECStatus(const int video_channel,
+                           const bool enable,
+                           const unsigned char payload_typeRED,
+                           const unsigned char payload_typeFEC) = 0;
+
+  // This function enables hybrid Negative Acknowledgment using RTCP
+  // and Forward Error Correction (FEC) implemented based on RFC 5109,
+  // to improve packet loss robustness. Extra
+  // FEC packets are sent together with the usual media packets, hence will
+  // part of the bitrate be used for FEC packets.
+  // The hybrid mode will choose between nack only, fec only and both based on
+  // network conditions. When both are applied, only packets that were not
+  // recovered by the FEC will be nacked.
+  virtual int SetHybridNACKFECStatus(const int video_channel,
+                                     const bool enable,
+                                     const unsigned char payload_typeRED,
+                                     const unsigned char payload_typeFEC) = 0;
+
+  // This function enables RTCP key frame requests.
+  virtual int SetKeyFrameRequestMethod(
+    const int video_channel, const ViEKeyFrameRequestMethod method) = 0;
+
+  // This function enables signaling of temporary bitrate constraints using
+  // RTCP, implemented based on RFC4585.
+  virtual int SetTMMBRStatus(const int video_channel, const bool enable) = 0;
+
+  // Enables and disables REMB packets for this channel. |sender| indicates
+  // this channel is encoding, |receiver| tells the bitrate estimate for
+  // this channel should be included in the REMB packet.
+  virtual int SetRembStatus(int video_channel,
+                            bool sender,
+                            bool receiver) = 0;
+
+  // This function returns our locally created statistics of the received RTP
+  // stream.
+  virtual int GetReceivedRTCPStatistics(
+      const int video_channel,
+      unsigned short& fraction_lost,
+      unsigned int& cumulative_lost,
+      unsigned int& extended_max,
+      unsigned int& jitter,
+      int& rtt_ms) const = 0;
+
+  // This function returns statistics reported by the remote client in a RTCP
+  // packet.
+  virtual int GetSentRTCPStatistics(const int video_channel,
+                                    unsigned short& fraction_lost,
+                                    unsigned int& cumulative_lost,
+                                    unsigned int& extended_max,
+                                    unsigned int& jitter,
+                                    int& rtt_ms) const = 0;
+
+  // The function gets statistics from the sent and received RTP streams.
+  virtual int GetRTPStatistics(const int video_channel,
+                               unsigned int& bytes_sent,
+                               unsigned int& packets_sent,
+                               unsigned int& bytes_received,
+                               unsigned int& packets_received) const = 0;
+
+  // The function gets bandwidth usage statistics from the sent RTP streams in
+  // bits/s.
+  virtual int GetBandwidthUsage(const int video_channel,
+                                unsigned int& total_bitrate_sent,
+                                unsigned int& video_bitrate_sent,
+                                unsigned int& fec_bitrate_sent,
+                                unsigned int& nackBitrateSent) const = 0;
+
+  // This function gets the send-side estimated bandwidth available for video,
+  // including overhead, in bits/s.
+  virtual int GetEstimatedSendBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const = 0;
+
+  // This function gets the receive-side estimated bandwidth available for
+  // video, including overhead, in bits/s.
+  // Returns -1 when no valid estimate is available.
+  virtual int GetEstimatedReceiveBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const = 0;
+
+  // This function enables or disables an RTP keep-alive mechanism which can
+  // be used to maintain an existing Network Address Translator (NAT) mapping
+  // while regular RTP is no longer transmitted.
+  virtual int SetRTPKeepAliveStatus(
+      const int video_channel,
+      bool enable,
+      const char unknown_payload_type,
+      const unsigned int delta_transmit_time_seconds =
+          KDefaultDeltaTransmitTimeSeconds) = 0;
+
+  // This function gets the RTP keep-alive status.
+  virtual int GetRTPKeepAliveStatus(
+      const int video_channel,
+      bool& enabled,
+      char& unkown_payload_type,
+      unsigned int& delta_transmit_time_seconds) const = 0;
+
+  // This function enables capturing of RTP packets to a binary file on a
+  // specific channel and for a given direction. The file can later be
+  // replayed using e.g. RTP Tools rtpplay since the binary file format is
+  // compatible with the rtpdump format.
+  virtual int StartRTPDump(const int video_channel,
+                           const char file_nameUTF8[1024],
+                           RTPDirections direction) = 0;
+
+  // This function disables capturing of RTP packets to a binary file on a
+  // specific channel and for a given direction.
+  virtual int StopRTPDump(const int video_channel,
+                          RTPDirections direction) = 0;
+
+  // Registers an instance of a user implementation of the ViERTPObserver.
+  virtual int RegisterRTPObserver(const int video_channel,
+                                  ViERTPObserver& observer) = 0;
+
+  // Removes a registered instance of ViERTPObserver.
+  virtual int DeregisterRTPObserver(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViERTCPObserver.
+  virtual int RegisterRTCPObserver(const int video_channel,
+                                   ViERTCPObserver& observer) = 0;
+
+  // Removes a registered instance of ViERTCPObserver.
+  virtual int DeregisterRTCPObserver(const int video_channel) = 0;
+
+ protected:
+  ViERTP_RTCP() {}
+  virtual ~ViERTP_RTCP() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h b/trunk/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h
new file mode 100644
index 0000000..8382844
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  GUI_Defines.h
+ *
+ */
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
+
+#define		ViE_TEST(x) if(-1 == x){ \
+int errNum = _ptrViEBase->LastError();	\
+NSLog(@"ERROR: %d at %s:%d", errNum, __FUNCTION__, __LINE__); \
+} 
+
+
+// Video Engine Related
+#define	V_CAPTURE_DEVICE_INDEX		0
+#define V_VIE_CAPTURE_ID			747
+#define V_DEVICE_NAME_LENGTH		256
+#define V_CODEC_INDEX		2
+#define V_IP_ADDRESS		"127.0.0.1"
+#define V_RTP_PORT			8000
+
+
+
+#endif	// WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist
new file mode 100644
index 0000000..d0d3a18
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">

+<plist version="1.0">

+<dict>

+	<key>CFBundleDevelopmentRegion</key>

+	<string>English</string>

+	<key>CFBundleExecutable</key>

+	<string>${EXECUTABLE_NAME}</string>

+	<key>CFBundleIconFile</key>

+	<string></string>

+	<key>CFBundleIdentifier</key>

+	<string>com.yourcompany.${PRODUCT_NAME:rfc1034identifier}</string>

+	<key>CFBundleInfoDictionaryVersion</key>

+	<string>6.0</string>

+	<key>CFBundleName</key>

+	<string>${PRODUCT_NAME}</string>

+	<key>CFBundlePackageType</key>

+	<string>APPL</string>

+	<key>CFBundleShortVersionString</key>

+	<string>1.0</string>

+	<key>CFBundleSignature</key>

+	<string>????</string>

+	<key>CFBundleVersion</key>

+	<string>1</string>

+	<key>LSMinimumSystemVersion</key>

+	<string>${MACOSX_DEPLOYMENT_TARGET}</string>

+	<key>NSMainNibFile</key>

+	<string>SimpleCocoaGUI</string>

+	<key>NSPrincipalClass</key>

+	<string>NSApplication</string>

+</dict>

+</plist>

diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h
new file mode 100644
index 0000000..10d52fc
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  SimpleCocoaGUIAppDelegate.h
+//
+
+#import <Cocoa/Cocoa.h>
+#include <iostream>
+using namespace std;
+
+@class ViECocoaRenderView;
+
+#include "GUI_Defines.h"
+
+#include "common_types.h"
+#include "voe_base.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_file.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_errors.h"
+
+
+
+@interface SimpleCocoaGUIAppDelegate : NSObject <NSApplicationDelegate> {
+    NSWindow*						_window;
+	IBOutlet NSOpenGLView*			_vieCocoaRenderView1;
+	IBOutlet NSOpenGLView*			_vieCocoaRenderView2;
+	IBOutlet NSButton*				_butRestartLoopback;
+	VideoEngine*				_ptrViE;
+	ViEBase*					_ptrViEBase;
+	ViECapture*					_ptrViECapture;
+	ViERender*					_ptrViERender;
+	ViECodec*					_ptrViECodec;
+	ViENetwork*					_ptrViENetwork;
+	
+	bool							_fullScreen;
+	int								_videoChannel;
+	
+	int _captureId;
+	
+	VideoEngine* ptrViE;
+	ViEBase* ptrViEBase;
+	ViECapture* ptrViECapture;
+	ViERTP_RTCP* ptrViERtpRtcp;
+	ViERender* ptrViERender;
+	ViECodec* ptrViECodec;
+	ViENetwork* ptrViENetwork;
+}
+
+@property (assign) IBOutlet NSWindow* window;
+-(void)createUI:(bool)fullScreen;
+-(void)initViECocoaTest;
+-(void)initializeVariables;
+-(void)NSLogVideoCodecs;
+-(void)startViECocoaTest;
+-(int)initLoopback;
+-(int)ioLooback;
+-(int)startLoopback;
+-(int)stopLooback;
+
+-(IBAction)handleRestart:(id)sender;
+
+
+@end
diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm
new file mode 100644
index 0000000..d594cfe
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm
@@ -0,0 +1,1075 @@
+//
+//  SimpleCocoaGUIAppDelegate.m
+//
+
+#import "SimpleCocoaGUIAppDelegate.h"
+
+@implementation SimpleCocoaGUIAppDelegate
+
+@synthesize window = _window;
+
+- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
+
+//	[self initializeVariables];
+	[self createUI];
+//	[self initViECocoaTest];
+//	[self NSLogVideoCodecs];
+//	[self startViECocoaTest];
+	
+//	[self startLoopback];
+	
+	[self ioLooback];
+}
+
+-(void)createUI{
+	
+	NSRect outWindow1Frame = NSMakeRect(200, 200, 200, 200);
+	NSWindow* outWindow1 = [[NSWindow alloc] initWithContentRect:outWindow1Frame styleMask:NSTitledWindowMask backing:NSBackingStoreBuffered defer:NO];
+	[outWindow1 orderOut:nil];
+	NSRect vieAutotestCocoaRenderView1Frame = NSMakeRect(0, 0, 200, 200);
+	_vieCocoaRenderView1 = [[ViECocoaRenderView alloc] initWithFrame:vieAutotestCocoaRenderView1Frame];
+	[[outWindow1 contentView] addSubview:_vieCocoaRenderView1];
+	[outWindow1 setTitle:[NSString stringWithFormat:@"window1"]];
+	[outWindow1 makeKeyAndOrderFront:NSApp];	
+
+	
+	NSRect outWindow2Frame = NSMakeRect(400, 200, 200, 200);
+	NSWindow* outWindow2 = [[NSWindow alloc] initWithContentRect:outWindow2Frame styleMask:NSTitledWindowMask backing:NSBackingStoreBuffered defer:NO];
+	[outWindow2 orderOut:nil];
+	NSRect vieAutotestCocoaRenderView2Frame = NSMakeRect(0, 0, 200, 200);
+	_vieCocoaRenderView2 = [[ViECocoaRenderView alloc] initWithFrame:vieAutotestCocoaRenderView2Frame];
+	[[outWindow2 contentView] addSubview:_vieCocoaRenderView2];
+	[outWindow2 setTitle:[NSString stringWithFormat:@"window2"]];
+	[outWindow2 makeKeyAndOrderFront:NSApp];	
+	
+	
+	
+
+
+
+
+}
+
+-(void)initViECocoaTest{
+	
+	int _error = 0;
+    _ptrViE = VideoEngine::Create();
+	_ptrViEBase = ViEBase::GetInterface(_ptrViE);
+	_error = _ptrViEBase->Init();
+		
+	_ptrViECapture = ViECapture::GetInterface(_ptrViE);
+	_ptrViERender = ViERender::GetInterface(_ptrViE);
+	_ptrViECodec = ViECodec::GetInterface(_ptrViE);	
+	_ptrViENetwork = ViENetwork::GetInterface(_ptrViE);
+	
+
+	_error = _ptrViE->SetTraceFile("ViEBaseStandardTest.txt");
+    _error = _ptrViE->SetEncryptedTraceFile("ViEBaseStandardTestEncrypted.txt");
+
+	
+}
+
+
+-(void)initializeVariables{
+	_fullScreen = YES;
+}
+
+-(void)NSLogVideoCodecs{
+	NSLog(@"Searching for video codecs.....");
+
+	VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    for(int index = 0; index < _ptrViECodec->NumberOfCodecs(); index++)
+    {
+        ViE_TEST(_ptrViECodec->GetCodec(index, videoCodec));
+		NSLog(@"Video codec found: %s", videoCodec.plName);
+    }	
+	
+}
+-(void)startViECocoaTest{
+
+
+
+
+    int error=0;
+
+    char deviceName[128];
+    char deviceUniqueName[512];
+    int captureId = 0;
+    int dummy = 0;
+
+	//ViE_TEST(_ptrViEBase->CreateChannel(_videoChannel));
+    //ViE_TEST(_ptrViECapture->GetCaptureDevice(0,deviceName,sizeof(deviceName),deviceUniqueName,sizeof(deviceUniqueName)));
+    //ViE_TEST(_ptrViECapture->AllocateCaptureDevice(deviceUniqueName,sizeof(deviceUniqueName),captureId));
+    //ViE_TEST(_ptrViECapture->AllocateCaptureDevice("dummydevicethatdoesnotexist",sizeof(deviceUniqueName),dummy));
+
+    char	captureDeviceName[V_DEVICE_NAME_LENGTH] = "";
+    char	captureDeviceUniqueId[V_DEVICE_NAME_LENGTH] = "";
+	int		captureDeviceId = 0;
+	
+	
+	
+	ViE_TEST(_ptrViE->SetTraceFilter(webrtc::TR_ALL));
+    ViE_TEST(_ptrViE->SetTraceFile("ViECocoaTrace.txt"));
+    ViE_TEST(_ptrViE->SetEncryptedTraceFile("ViECocoaEncryptedTrace.txt"));
+
+	
+	
+	
+	// base
+    ViE_TEST(_ptrViEBase->CreateChannel(_videoChannel));
+    
+	// capture device
+    ViE_TEST(_ptrViECapture->GetCaptureDevice(V_CAPTURE_DEVICE_INDEX, captureDeviceName, V_DEVICE_NAME_LENGTH, captureDeviceUniqueId, V_DEVICE_NAME_LENGTH));    
+	ViE_TEST(_ptrViECapture->AllocateCaptureDevice(captureDeviceUniqueId, V_DEVICE_NAME_LENGTH, captureDeviceId));
+    ViE_TEST(_ptrViECapture->ConnectCaptureDevice(captureDeviceId, _videoChannel));
+    ViE_TEST(_ptrViECapture->StartCapture(captureDeviceId));
+	
+	// renderer
+    ViE_TEST(_ptrViERender->AddRenderer(captureDeviceId,  (void*)_vieCocoaRenderView1, 0, 0.0, 0.0, 1.0, 1.0));
+    ViE_TEST(_ptrViERender->StartRender(captureDeviceId));
+//	usleep(3 * 1000);
+//	ViE_TEST(_ptrViERender->RemoveRenderer(captureDeviceId));
+	//exit(0);
+
+	
+//	// codec
+//	[self NSLogVideoCodecs];
+//	VideoCodec videoCodec;
+//    memset(&videoCodec, 0, sizeof(VideoCodec));
+//	ViE_TEST(_ptrViECodec->GetCodec(V_CODEC_INDEX, videoCodec));
+//	ViE_TEST(_ptrViECodec->SetReceiveCodec(_videoChannel, videoCodec));
+//	ViE_TEST(_ptrViECodec->SetSendCodec(_videoChannel, videoCodec));
+//	
+//	// network + base
+//	ViE_TEST(_ptrViENetwork->SetLocalReceiver(_videoChannel, V_RTP_PORT)); 
+//	ViE_TEST(_ptrViEBase->StartReceive(_videoChannel));
+//    ViE_TEST(_ptrViENetwork->SetSendDestination(_videoChannel, V_IP_ADDRESS, V_RTP_PORT));	
+//    ViE_TEST(_ptrViEBase->StartSend(_videoChannel));
+//	ViE_TEST(_ptrViERender->MirrorRenderStream(captureDeviceId, true, false, true));
+	
+	
+}
+
+-(int)initLoopback
+{
+	
+}
+-(int)startLoopback
+{
+	//********************************************************
+    //  Begin create/initialize  Video Engine for testing
+    //********************************************************	
+	
+    int error = 0;
+    bool succeeded = true;
+    int numberOfErrors = 0;
+    std::string str;
+    
+	//
+    // Create a  VideoEngine instance
+    //
+//    VideoEngine* ptrViE = NULL;
+    ptrViE = VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+	
+	error = ptrViE->SetTraceFilter(webrtc::TR_ALL);
+	if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+	
+	
+    error = ptrViE->SetTraceFile("ViETrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+	
+    error = ptrViE->SetEncryptedTraceFile("ViEEncryptedTrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetEncryptedTraceFile\n");
+        return -1;
+    }
+	
+    //
+    // Init  VideoEngine and create a channel
+    //
+    ptrViEBase = ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+	
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+	
+    //
+    // List available capture devices, allocate and connect.
+    //
+	ptrViECapture = ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+	
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    std::cout << std::endl;
+    std::cout << "Available capture devices:" << std::endl;
+    unsigned int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+		
+        error = ptrViECapture->GetCaptureDevice(captureIdx,
+														deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        std::cout << "   " << captureIdx+1 << ". " << deviceName
+		<< std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose capture devices: ";
+//    std::getline(std::cin, str);
+//    captureIdx = atoi(str.c_str()) - 1;
+	captureIdx = 0;
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+													KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+	
+    _captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+														 KMaxUniqueIdLength, _captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ConnectCaptureDevice(_captureId,
+														_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StartCapture(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+	
+    //
+    // RTP/RTCP settings
+    //
+	ptrViERtpRtcp = ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetRTCPStatus(_videoChannel,
+												 kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(_videoChannel,
+															kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetTMMBRStatus(_videoChannel, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+	
+    //
+    // Set up rendering
+    //
+    ptrViERender = ViERender::GetInterface(ptrViE);
+	if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(_captureId, _vieCocoaRenderView1,
+											  0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(_videoChannel, _vieCocoaRenderView2,
+											  1, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    //
+    // Setup codecs
+    //
+    ptrViECodec = ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }    
+	
+    std::cout << std::endl;
+    std::cout << "Available codecs:" << std::endl;
+	
+    // Check available codecs and prepare receive codecs
+    VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    unsigned int codecIdx = 0;
+    for (codecIdx = 0;
+         codecIdx < ptrViECodec->NumberOfCodecs();
+         codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+		
+        error = ptrViECodec->SetReceiveCodec(_videoChannel,
+													 videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != kVideoCodecRED &&
+            videoCodec.codecType != kVideoCodecULPFEC)
+        {
+            std::cout << "   " << codecIdx+1 << ". " << videoCodec.plName
+			<< std::endl;
+        }
+    }
+//    std::cout << std::endl;
+//    std::cout << "Choose codec: ";
+//    std::getline(std::cin, str);
+//    codecIdx = atoi(str.c_str()) - 1;
+	codecIdx = 0;
+	
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+	
+    error = ptrViECodec->SetSendCodec(_videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+	
+    //
+    // Address settings
+    //
+    ptrViENetwork = ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+	
+    const char* ipAddress = "127.0.0.1";
+    const unsigned short rtpPort = 6000;
+    error = ptrViENetwork->SetLocalReceiver(_videoChannel, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetLocalReceiver\n");
+        return -1;
+    }
+    
+    error = ptrViEBase->StartReceive(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+	
+    error = ptrViENetwork->SetSendDestination(_videoChannel,
+													  ipAddress, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetSendDestination\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StartSend(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+	
+	
+    //********************************************************
+    //  Engine started
+    //********************************************************
+	
+	
+    // Call started
+    std::cout << std::endl;
+    std::cout << "Loopback call started" << std::endl;
+//    std::cout << std::endl << std::endl;
+//    std::cout << "Press enter to stop...";
+//    std::getline(std::cin, str);
+}
+
+-(int)stopLooback
+{
+	int error = 0;
+	
+	
+	
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+	
+    error = ptrViEBase->StopReceive(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StopSend(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StopRender(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1; 
+    }
+	
+    error = ptrViERender->StopRender(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StopCapture(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1; 
+    }
+	
+    error = ptrViECapture->DisconnectCaptureDevice(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ReleaseCaptureDevice(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->DeleteChannel(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+	
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+	
+    bool deleted = VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+	
+    return 0;
+	
+	// ===================================================================
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+	
+	
+}
+
+-(int)ioLooback
+{
+    //********************************************************
+    //  Begin create/initialize  Video Engine for testing
+    //********************************************************	
+	
+    int error = 0;
+    bool succeeded = true;
+    int numberOfErrors = 0;
+    std::string str;
+    
+	//
+    // Create a  VideoEngine instance
+    //
+    VideoEngine* ptrViE = NULL;
+    ptrViE = VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+	
+	error = ptrViE->SetTraceFilter(webrtc::TR_ALL);
+	if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+	
+	
+    error = ptrViE->SetTraceFile("ViETrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+	
+    error = ptrViE->SetEncryptedTraceFile("ViEEncryptedTrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetEncryptedTraceFile\n");
+        return -1;
+    }
+	
+    //
+    // Init  VideoEngine and create a channel
+    //
+    ViEBase* ptrViEBase = ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+	
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+	
+    //
+    // List available capture devices, allocate and connect.
+    //
+    ViECapture* ptrViECapture = 
+	ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+	
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+	
+    std::cout << std::endl;
+    std::cout << "Available capture devices:" << std::endl;
+    unsigned int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+		
+        error = ptrViECapture->GetCaptureDevice(captureIdx,
+														deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        std::cout << "   " << captureIdx+1 << ". " << deviceName
+		<< std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose capture devices: ";
+//    std::getline(std::cin, str);
+//    captureIdx = atoi(str.c_str()) - 1;
+	captureIdx = 0;
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+													KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+	
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+														 KMaxUniqueIdLength, captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ConnectCaptureDevice(captureId,
+														videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+	
+    //
+    // RTP/RTCP settings
+    //
+    ViERTP_RTCP* ptrViERtpRtcp =
+	ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+												 kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(videoChannel,
+															kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+	
+    //
+    // Set up rendering
+    //
+    ViERender* ptrViERender =
+	ViERender::GetInterface(ptrViE);
+	if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+	
+//    error = ptrViERender->EnableFullScreenRender(_vieCocoaRenderView1);
+//    if (error == -1)
+//    {
+//        printf("ERROR in ViERender::AddRenderer\n");
+//        return -1;
+//    }	
+	
+	
+    error = ptrViERender->AddRenderer(captureId, _vieCocoaRenderView1,
+											0, 0.5, 0.5, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(videoChannel, _vieCocoaRenderView2,
+											  1, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    //
+    // Setup codecs
+    //
+    ViECodec* ptrViECodec = ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }    
+	
+    std::cout << std::endl;
+    std::cout << "Available codecs:" << std::endl;
+	
+    // Check available codecs and prepare receive codecs
+    VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    unsigned int codecIdx = 0;
+    for (codecIdx = 0;
+         codecIdx < ptrViECodec->NumberOfCodecs();
+         codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+		
+        error = ptrViECodec->SetReceiveCodec(videoChannel,
+													 videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != kVideoCodecRED &&
+            videoCodec.codecType != kVideoCodecULPFEC)
+        {
+            std::cout << "   " << codecIdx+1 << ". " << videoCodec.plName
+			<< std::endl;
+        }
+    }
+    std::cout << std::endl;
+    std::cout << "Choose codec: ";
+//    std::getline(std::cin, str);
+//    codecIdx = atoi(str.c_str()) - 1;
+	
+	
+	error = ptrViECapture->ShowCaptureSettingsDialogBox("unique",10, "mytitle");
+	codecIdx = 0;
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+	
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+	
+    //
+    // Address settings
+    //
+    ViENetwork* ptrViENetwork =
+	ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+	
+    const char* ipAddress = "127.0.0.1";
+    const unsigned short rtpPort = 6000;
+    error = ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetLocalReceiver\n");
+        return -1;
+    }
+    
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+	
+    error = ptrViENetwork->SetSendDestination(videoChannel,
+													  ipAddress, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetSendDestination\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+	
+	
+    //********************************************************
+    //  Engine started
+    //********************************************************
+	
+	
+    // Call started
+    std::cout << std::endl;
+    std::cout << "Loopback call started" << std::endl;
+    std::cout << std::endl << std::endl;
+    std::cout << "Press enter to stop...";
+//	[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
+//    std::getline(std::cin, str);
+	usleep(5 * 1000 * 1000);
+	
+	//int i = 0;
+//	while(1)
+//	{
+//		NSLog(@"app iteration %d", i);
+//		i++;
+//		[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
+//		std::getline(std::cin, str);
+//		if(i > 3)
+//		{
+//			break;
+//		}
+//	}
+	
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+	
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1; 
+    }
+	
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1; 
+    }
+	
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+	
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+	
+    bool deleted = VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+	
+	NSLog(@"Finished function");
+    return 0;
+	
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+
+
+
+-(IBAction)handleRestart:(id)sender
+{
+//	[self stopLooback];
+//	[self startLoopback];
+	[self ioLooback];
+}
+@end
diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch
new file mode 100644
index 0000000..72b5870
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch
@@ -0,0 +1,7 @@
+//

+// Prefix header for all source files of the 'SimpleCocoaGUI' target in the 'SimpleCocoaGUI' project

+//

+

+#ifdef __OBJC__

+    #import <Cocoa/Cocoa.h>

+#endif

diff --git a/trunk/src/video_engine/main/test/SimpleCocoaGUI/main.m b/trunk/src/video_engine/main/test/SimpleCocoaGUI/main.m
new file mode 100644
index 0000000..9d52a1c
--- /dev/null
+++ b/trunk/src/video_engine/main/test/SimpleCocoaGUI/main.m
@@ -0,0 +1,12 @@
+//
+//  main.m
+//  SimpleCocoaGUI
+//
+//
+
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, char *argv[])
+{
+    return NSApplicationMain(argc,  (const char **) argv);
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/Capture.rc b/trunk/src/video_engine/main/test/WindowsTest/Capture.rc
new file mode 100644
index 0000000..962256c
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/Capture.rc
@@ -0,0 +1,255 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "resource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Korean resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)

+#ifdef _WIN32

+LANGUAGE LANG_KOREAN, SUBLANG_DEFAULT

+#pragma code_page(949)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "resource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "#define _AFX_NO_SPLITTER_RESOURCES\r\n"

+    "#define _AFX_NO_OLE_RESOURCES\r\n"

+    "#define _AFX_NO_TRACKER_RESOURCES\r\n"

+    "#define _AFX_NO_PROPERTY_RESOURCES\r\n"

+    "\r\n"

+    "#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)\r\n"

+    "#ifdef _WIN32\r\n"

+    "LANGUAGE 18, 1\r\n"

+    "#pragma code_page(949)\r\n"

+    "#endif //_WIN32\r\n"

+    "#include ""res\\Capture.rc2""  // non-Microsoft Visual C++ edited resources\r\n"

+    "#include ""l.kor\\afxres.rc""          // Standard components\r\n"

+    "#endif\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Version

+//

+

+VS_VERSION_INFO VERSIONINFO

+ FILEVERSION 1,0,0,1

+ PRODUCTVERSION 1,0,0,1

+ FILEFLAGSMASK 0x3fL

+#ifdef _DEBUG

+ FILEFLAGS 0x1L

+#else

+ FILEFLAGS 0x0L

+#endif

+ FILEOS 0x4L

+ FILETYPE 0x1L

+ FILESUBTYPE 0x0L

+BEGIN

+END

+

+#endif    // Korean resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_SLAVE_CHANNEL DIALOGEX 0, 0, 677, 358

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Slave channel"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    CONTROL         "",IDC_IPADDRESS1,"SysIPAddress32",WS_TABSTOP,485,18,105,15

+    EDITTEXT        IDC_LOCAL_PORT1,631,18,36,16,ES_AUTOHSCROLL

+    LTEXT           "IP-address",IDC_STATIC,495,7,42,9

+    LTEXT           "Local Port",IDC_STATIC,633,7,37,9

+    EDITTEXT        IDC_REMOTE_PORT1,593,18,36,16,ES_AUTOHSCROLL

+    LTEXT           "Port",IDC_STATIC,595,7,17,9

+    CONTROL         "Ext",IDC_EXTTRANSPORT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,489,41,29,12

+    LTEXT           "delay",IDC_STATIC,589,41,21,9

+    COMBOBOX        IDC_PACKETLOSS,535,40,45,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_DELAY,611,40,45,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_SLAVE_CHANNEL, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 670

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 351

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Neutral (Sys. Default) resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_NEUSD)

+#ifdef _WIN32

+LANGUAGE LANG_NEUTRAL, SUBLANG_SYS_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_DXQUALITY_DIALOG DIALOGEX 0, 0, 699, 385

+STYLE DS_ABSALIGN | DS_SETFONT | DS_MODALFRAME | DS_3DLOOK | WS_MINIMIZEBOX | WS_POPUP | WS_VISIBLE | WS_CAPTION | WS_SYSMENU

+EXSTYLE WS_EX_WINDOWEDGE | WS_EX_STATICEDGE | WS_EX_APPWINDOW | WS_EX_NOINHERITLAYOUT

+CAPTION "webrtc ViE test program"

+FONT 9, "Arial", 400, 0, 0x0

+BEGIN

+    PUSHBUTTON      "Start Send",IDC_STARTSEND,589,270,50,19

+    PUSHBUTTON      "Stop Send",IDC_STOPSend,639,270,50,19

+    PUSHBUTTON      "Start Listen",IDC_STARTLISTEN,589,291,50,19

+    PUSHBUTTON      "StopListen",IDC_STOPLISTEN,639,291,50,19

+    CONTROL         "",IDC_LIVEVIDEO,"Static",SS_BITMAP | SS_CENTERIMAGE | SS_SUNKEN,450,179,139,101

+    COMBOBOX        IDC_DEVICE,487,14,185,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    CTEXT           "Select Capture Device",IDC_STATIC,485,7,78,8

+    COMBOBOX        IDC_CODEC_LIST,490,90,58,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Codec",IDC_STATIC,490,82,21,8

+    COMBOBOX        IDC_CODEC_SIZE,627,90,62,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Codec Size",IDC_STATIC,611,82,36,8

+    CONTROL         "",IDC_IPADDRESS1,"SysIPAddress32",WS_TABSTOP,490,46,90,13

+    EDITTEXT        IDC_LOCAL_PORT1,615,46,31,14,ES_AUTOHSCROLL

+    LTEXT           "IP-address",IDC_STATIC,498,37,36,8

+    LTEXT           "Local Port",IDC_STATIC,616,36,32,8

+    LTEXT           "Start Bitrate",IDC_STATIC,553,80,37,8

+    COMBOBOX        IDC_BITRATE,558,90,49,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    EDITTEXT        IDC_REMOTE_PORT1,582,46,31,14,ES_AUTOHSCROLL

+    LTEXT           "Port",IDC_STATIC,584,37,14,8

+    GROUPBOX        "Remote client 1",IDC_STATIC,487,27,203,50

+    LTEXT           "Max FrameRate",IDC_STATIC,488,106,50,8

+    COMBOBOX        IDC_MIN_FRAME_RATE,488,115,48,82,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    CONTROL         "",IDC_CAPTURE,"Static",SS_BITMAP | SS_CENTERIMAGE | SS_REALSIZEIMAGE,7,7,418,276

+    CONTROL         "TMMBR",IDC_TMMBR,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,486,138,40,10

+    GROUPBOX        "Standard Protection",IDC_STATIC,607,138,72,55

+    CONTROL         "None",IDC_PROT_NONE,"Button",BS_AUTORADIOBUTTON | WS_GROUP,615,146,33,10

+    CONTROL         "NACK",IDC_PROT_NACK,"Button",BS_AUTORADIOBUTTON,615,165,35,10

+    CONTROL         "FEC",IDC_PROT_FEC,"Button",BS_AUTORADIOBUTTON,615,155,30,10

+    CONTROL         "NACK & FEC",IDC_PROT_NACKFEC,"Button",BS_AUTORADIOBUTTON,615,174,52,10

+    COMBOBOX        IDC_RTCPMODE,571,119,80,57,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "RTCP Mode",IDC_STATIC,571,110,39,8

+    LISTBOX         IDC_INFORMATION,476,309,214,63,LBS_SORT | LBS_NOINTEGRALHEIGHT | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_PACKETBURST,653,118,36,57,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Packet Burst",IDC_STATIC,649,109,40,8

+    CONTROL         "Stop Log",IDC_FREEZELOG,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,465,292,44,10

+    PUSHBUTTON      "Version",IDC_VERSION,530,291,55,16

+    CONTROL         "Ext",IDC_EXTTRANSPORT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,493,66,25,10

+    LTEXT           "loss",IDC_STATIC,519,66,15,8

+    LTEXT           "delay",IDC_STATIC,578,66,18,8

+    COMBOBOX        IDC_PACKETLOSS,533,65,38,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_DELAY,598,65,38,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Record Incoming",IDC_BTN_RECORD_INCOMING,587,198,69,14

+    PUSHBUTTON      "Record outgoing",IDC_BTN_RECORD_OUTGOING,587,212,69,14

+    PUSHBUTTON      "Create Slave",IDC_BTN_CREATE_SLAVE,586,231,50,14

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_DXQUALITY_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 690

+        VERTGUIDE, 321

+        VERTGUIDE, 372

+        VERTGUIDE, 425

+        VERTGUIDE, 465

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 372

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Neutral (Sys. Default) resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+#define _AFX_NO_SPLITTER_RESOURCES

+#define _AFX_NO_OLE_RESOURCES

+#define _AFX_NO_TRACKER_RESOURCES

+#define _AFX_NO_PROPERTY_RESOURCES

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)

+#ifdef _WIN32

+LANGUAGE 18, 1

+#pragma code_page(949)

+#endif //_WIN32

+#include "res\Capture.rc2"  // non-Microsoft Visual C++ edited resources

+#include "l.kor\afxres.rc"          // Standard components

+#endif

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc b/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc
new file mode 100644
index 0000000..8f7c9a2
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "CaptureDevicePool.h"
+#include "map_wrapper.h"
+#include <string.h>
+#include <assert.h>
+#include "critical_section_wrapper.h"
+#include "vie_file.h"
+
+CaptureDevicePool::CaptureDevicePool(VideoEngine* videoEngine):
+_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+_vieCapture(ViECapture::GetInterface(videoEngine)),
+_vieFile(ViEFile::GetInterface(videoEngine))
+{
+}
+
+CaptureDevicePool::~CaptureDevicePool(void)
+{
+    assert(_deviceMap.Size()==0);
+    _vieCapture->Release();
+    _vieFile->Release();
+    delete &_critSect;
+}
+
+WebRtc_Word32 CaptureDevicePool::GetCaptureDevice(int& captureId, const char* uniqeDeviceName)
+{
+    CriticalSectionScoped cs(_critSect);
+    DeviceItem* device=NULL;
+    
+    for(MapItem* item=_deviceMap.First();
+        item!=NULL;
+        item=_deviceMap.Next(item))
+    {
+        //Found the device?
+        if(strcmp(uniqeDeviceName,(static_cast<DeviceItem*>( item->GetItem()))->uniqeDeviceName)==0)
+        {
+            device=static_cast<DeviceItem*>( item->GetItem());
+            device->refCount++;
+            captureId=device->captureId;
+            return 0;
+        }
+    }
+    device = new DeviceItem;
+    strncpy(device->uniqeDeviceName,uniqeDeviceName,255);
+
+
+    // Device does not exist. Create it.
+    WebRtc_Word32 result=_vieCapture->AllocateCaptureDevice(device->uniqeDeviceName,strlen(device->uniqeDeviceName),device->captureId);
+    if(result==0)
+    {
+        //CaptureCapability cap;
+        /*cap.height=1080;
+        cap.width=1920;
+        cap.maxFPS=25;    
+        cap.interlaced=true;*/
+     //   result=_vieCapture->StartCapture(device->captureId,cap);
+        result=_vieFile->SetCaptureDeviceImage(device->captureId,"captureDeviceImage.jpg");
+    }
+    captureId=device->captureId;
+    _deviceMap.Insert(captureId,device);
+    device->refCount++;
+    
+    return result;
+
+
+}
+WebRtc_Word32 CaptureDevicePool::ReturnCaptureDevice(int captureId)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    MapItem* mapItem=_deviceMap.Find(captureId);
+    if(!mapItem)
+        return -1;
+
+    DeviceItem* item=static_cast<DeviceItem*> (mapItem->GetItem());
+    if(!item)
+        return 0;
+    item->refCount--;
+    WebRtc_Word32 result=0;
+
+    if(item->refCount==0)
+    {
+        result=_vieCapture->ReleaseCaptureDevice(captureId);
+        
+        _deviceMap.Erase(mapItem);
+        delete item;
+
+    }
+    return result;
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h b/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h
new file mode 100644
index 0000000..104b84f
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_file.h"
+#include "map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+}
+using namespace webrtc;
+class CaptureDevicePool
+{
+public:
+    CaptureDevicePool(VideoEngine* videoEngine);
+    ~CaptureDevicePool(void);
+    WebRtc_Word32 GetCaptureDevice(int& captureId, const char uniqeDeviceName[256]);
+    WebRtc_Word32 ReturnCaptureDevice(int captureId);
+
+    private: 
+        struct DeviceItem
+        {
+            int captureId;
+            WebRtc_Word32 refCount;
+            char uniqeDeviceName[256];
+            DeviceItem()
+            {
+                captureId=-1;
+                refCount=0;
+            }
+        };
+        CriticalSectionWrapper& _critSect;
+        ViECapture* _vieCapture;
+        ViEFile*    _vieFile;
+        MapWrapper _deviceMap;
+
+};
diff --git a/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.cc b/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.cc
new file mode 100644
index 0000000..cccfa8e
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.cc
@@ -0,0 +1,1297 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ChannelDlg.h"
+#include "VideoSize.h"
+#include "CaptureDevicePool.h"
+#include "ChannelPool.h"
+
+#include <Mmsystem.h>
+#include <dbt.h>
+
+
+#include "assert.h"
+
+
+#include <process.h> // threads.
+
+#if defined _WIN32
+    #define SLEEP_10_SEC ::Sleep(10000)
+    #define GET_TIME_IN_MS timeGetTime
+#endif
+
+// Hack to convert char to TCHAR, using two buffers to be able to
+// call twice in the same statement
+TCHAR convertTemp1[256] = {0};
+TCHAR convertTemp2[256] = {0};
+bool convertBufferSwitch(false);
+TCHAR* CharToTchar(const char* str, int len)
+{
+#ifdef _UNICODE
+  TCHAR* temp = convertBufferSwitch ? convertTemp1 : convertTemp2;
+  convertBufferSwitch = !convertBufferSwitch;
+  memset(temp, 0, sizeof(convertTemp1));
+  MultiByteToWideChar(CP_UTF8, 0, str, len, temp, 256);
+  return temp;
+#else
+  return str;
+#endif
+}
+
+// Hack to convert TCHAR to char
+char convertTemp3[256] = {0};
+char* TcharToChar(TCHAR* str, int len)
+{
+#ifdef _UNICODE
+  memset(convertTemp3, 0, sizeof(convertTemp3));
+  WideCharToMultiByte(CP_UTF8, 0, str, len, convertTemp3, 256, 0, 0);
+  return convertTemp3;
+#else
+  return str;
+#endif
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg dialog
+
+CDXChannelDlg::CDXChannelDlg(VideoEngine* videoEngine,
+                             CaptureDevicePool& captureDevicePool,
+                             ChannelPool& channelPool,
+    void* voiceEngine
+    ,CWnd* pParent,CDXChannelDlgObserver* observer,
+    int parentChannel/*=-1*/)
+	: CDialog(CDXChannelDlg::IDD, pParent),
+    _canAddLog(true),
+    _dialogObserver(observer),
+    _videoEngine(videoEngine),
+    _captureDevicePool(captureDevicePool),
+    _channelPool(channelPool),
+    _parentChannel(parentChannel),
+#ifndef NO_VOICE_ENGINE
+    _voiceEngine((VoiceEngine*) voiceEngine),
+#endif
+    _callbackEvent(::CreateEvent( NULL, FALSE, FALSE, NULL)),
+    _externalTransport(NULL)
+{
+    strcpy(_logMsg,"");
+	_channelId = -1;
+    _audioChannel=-1;
+    _captureId=-1;
+
+	//_transport = NULL;
+
+
+	//{{AFX_DATA_INIT(CDXChannelDlg)
+	//}}AFX_DATA_INIT
+	// Note that LoadIcon does not require a subsequent DestroyIcon in Win32	
+
+    InitializeCriticalSection(&_critCallback);
+    unsigned int threadID;
+    _callbackThread=(HANDLE)_beginthreadex(NULL,1024*1024,CallbackThread,(void*)this,0, &threadID);
+
+
+}
+
+void CDXChannelDlg::DoDataExchange(CDataExchange* pDX)
+{
+	CDialog::DoDataExchange(pDX);
+	//{{AFX_DATA_MAP(CDXChannelDlg)
+	DDX_Control(pDX, IDC_DEVICE, m_ctrlDevice);
+	DDX_Control(pDX, IDC_CODEC_LIST, m_ctrlCodec);
+	DDX_Control(pDX, IDC_CAPTURE, m_ctrlLiveRemoteVideo);
+	DDX_Control(pDX, IDC_LIVEVIDEO, m_ctrlLiveVideo);
+	DDX_Control(pDX, IDC_LOCAL_PORT1, m_localPort1);
+	DDX_Control(pDX, IDC_REMOTE_PORT1, m_remotePort1);
+	DDX_Control(pDX, IDC_IPADDRESS1, m_remoteIp1);
+    DDX_Control(pDX, IDC_CODEC_SIZE, m_ctrlCodecSize);
+    DDX_Control(pDX, IDC_RTCPMODE, m_ctrlRtcpMode);
+    DDX_Control(pDX, IDC_PACKETBURST, m_ctrlPacketBurst);
+	DDX_Control(pDX, IDC_BITRATE, m_ctrlBitrate);
+	DDX_Control(pDX, IDC_MIN_FRAME_RATE, m_ctrlMinFrameRate);
+    DDX_Control(pDX, IDC_TMMBR,m_cbTmmbr);
+    DDX_Control(pDX, IDC_EXTTRANSPORT,m_cbExternalTransport);
+    DDX_Control(pDX, IDC_PACKETLOSS,m_ctrlPacketLoss);
+    DDX_Control(pDX, IDC_DELAY,m_ctrlDelay);
+    DDX_Control(pDX, IDC_FREEZELOG,m_cbFreezeLog);
+    DDX_Control(pDX,IDC_INFORMATION,m_ctrlInfo);
+	//}}AFX_DATA_MAP
+}
+
+// ON_WM_SYSKEYDOWN			ALT+key
+
+BEGIN_MESSAGE_MAP(CDXChannelDlg, CDialog)
+	//{{AFX_MSG_MAP(CDXChannelDlg)
+	ON_WM_SYSCOMMAND()
+	ON_WM_RBUTTONUP()
+	//ON_WM_DEVICECHANGE()
+	ON_WM_PAINT()
+	ON_WM_QUERYDRAGICON()
+    ON_BN_CLICKED(IDC_STARTSEND, OnStartSend)
+	ON_BN_CLICKED(IDC_STOPSend, OnStopSend)
+	//ON_WM_TIMER()
+	ON_WM_DESTROY()
+	//}}AFX_MSG_MAP
+	ON_CBN_SELCHANGE(IDC_CODEC_LIST, OnCbnSelchangeCodecList)
+	ON_CBN_SELCHANGE(IDC_DEVICE, OnCbnSelchangeDevice)
+	ON_CBN_SELCHANGE(IDC_CODEC_SIZE, OnCbnSelchangeSize)
+	ON_CBN_SELCHANGE(IDC_BITRATE, OnCbnSelchangeBitrate)
+	//ON_MESSAGE(WM_DISPLAYCHANGE, OnDisplayChange)
+	ON_CBN_SELCHANGE(IDC_MIN_FRAME_RATE, OnCbnSelchangeMinFrameRate)	
+    ON_BN_CLICKED(IDC_STARTLISTEN, OnBnClickedStartlisten)
+    ON_BN_CLICKED(IDC_STOPLISTEN, OnBnClickedStoplisten)
+    ON_BN_CLICKED(IDC_TMMBR, &CDXChannelDlg::OnBnClickedTmmbr)
+    ON_CBN_SELCHANGE(IDC_RTCPMODE, &CDXChannelDlg::OnCbnSelchangeRtcpmode)
+    ON_BN_CLICKED(IDC_PROT_NACK, &CDXChannelDlg::OnBnClickedProtNack)
+    ON_BN_CLICKED(IDC_PROT_NONE, &CDXChannelDlg::OnBnClickedProtNone)
+    ON_BN_CLICKED(IDC_PROT_FEC, &CDXChannelDlg::OnBnClickedProtFec)    
+    ON_BN_CLICKED(IDC_FREEZELOG, &CDXChannelDlg::OnBnClickedFreezelog)
+    ON_BN_CLICKED(IDC_VERSION, &CDXChannelDlg::OnBnClickedVersion)
+    ON_BN_CLICKED(IDC_EXTTRANSPORT, &CDXChannelDlg::OnBnClickedExttransport)
+    ON_CBN_SELCHANGE(IDC_PACKETLOSS, &CDXChannelDlg::OnCbnSelchangePacketloss)
+    ON_CBN_SELCHANGE(IDC_DELAY, &CDXChannelDlg::OnCbnSelchangeDelay)
+    ON_BN_CLICKED(IDC_BTN_RECORD_INCOMING, &CDXChannelDlg::OnBnClickedBtnRecordIncoming)
+    ON_BN_CLICKED(IDC_BTN_RECORD_OUTGOING, &CDXChannelDlg::OnBnClickedBtnRecordOutgoing)
+    ON_BN_CLICKED(IDC_BTN_CREATE_SLAVE, &CDXChannelDlg::OnBnClickedBtnCreateSlave)
+    ON_BN_CLICKED(IDC_PROT_NACKFEC, &CDXChannelDlg::OnBnClickedProtNackFec)
+END_MESSAGE_MAP()
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg message handlers
+
+
+BOOL CDXChannelDlg::OnInitDialog()
+{
+	CDialog::OnInitDialog();
+
+	// Set the icon for this dialog.  The framework does this automatically
+	//  when the application's main window is not a dialog
+	SetIcon(m_hIcon, TRUE);			// Set big icon
+	SetIcon(m_hIcon, FALSE);		// Set small icon
+
+
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("5"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("7"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("8"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("9"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("11"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("12"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("13"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("14"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("15"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("16"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("17"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("18"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("19"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("21"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("22"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("23"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("24"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("25"));
+	::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("26"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("27"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("28"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("29"));
+    ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+    m_ctrlMinFrameRate.SetCurSel(25);
+
+    // Codec sizes
+    for(VideoSize i=UNDEFINED;i<NUMBER_OF_VIDEO_SIZE;i=VideoSize(i+1))
+    {
+        char sizeStr[64];
+        int width=0;
+        int height=0;
+        GetWidthHeight(i,width,height);
+        sprintf(sizeStr,"%d x %d",width,height);
+        ::SendMessage(m_ctrlCodecSize.m_hWnd, CB_ADDSTRING, 0,(LPARAM) CharToTchar(sizeStr,-1));
+    }
+    m_ctrlCodecSize.SetCurSel(8);
+
+    // RTCP mode
+    /*
+    kRtcpNone     = 0,
+    kRtcpCompound_RFC4585     = 1,
+    kRtcpNonCompound_RFC5506 = 2 */
+    ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_NONE"));
+    ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_COMPOUND_RFC4585"));
+    ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_NON_COMPOUND_RFC5506"));
+    m_ctrlRtcpMode.SetCurSel(2);
+
+
+    //Packet Burst
+    ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+    ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+    ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+    ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+    m_ctrlPacketBurst.SetCurSel(0);
+
+
+    //Send Bitrate
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("50"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("100"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("200"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("300"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("500"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("1000"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("2000"));
+    ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("3000"));
+	::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("4000"));
+    ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("5000"));
+    ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6000"));
+    ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("7000"));
+
+    m_ctrlBitrate.SetCurSel(3);
+
+    // External transport packet loss
+    ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("2"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("4"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("8"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("12"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("14"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("16"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("18"));
+	::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+    m_ctrlPacketLoss.SetCurSel(0);
+
+    // External transport delay
+    ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("60"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("90"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("120"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("150"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("180"));
+	::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("210"));
+    m_ctrlDelay.SetCurSel(0);
+
+
+    _vieBase=ViEBase::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieBase==0,-5);
+
+    _vieCapture=ViECapture::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieCapture==0,-5);
+
+    _vieRTPRTCP=ViERTP_RTCP::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieRTPRTCP==0,-5);
+
+    _vieRender=ViERender::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieRender==0,-5);
+
+    _vieCodec=ViECodec::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieCodec==0,-5);
+    _vieNetwork=ViENetwork::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieNetwork==0,-5);
+
+    _vieFile=ViEFile::GetInterface(_videoEngine);
+    TEST_MUSTPASS(_vieFile==0,-5);
+
+#ifndef NO_VOICE_ENGINE
+
+    _veBase = VoEBase::GetInterface(_voiceEngine);
+    _veNetwork = VoENetwork::GetInterface(_voiceEngine);
+    _veCodec = VoECodec::GetInterface(_voiceEngine);
+    _veRTCP = VoERTP_RTCP::GetInterface(_voiceEngine);
+    TEST_MUSTPASS(_vieBase->SetVoiceEngine(_voiceEngine),-5);
+#endif
+
+	int err = 0;
+
+	char str[64];
+	bool found = false;
+
+
+	int captureIdx = 0;
+	while (-1 !=_vieCapture->GetCaptureDevice(captureIdx,str,sizeof(str),NULL,0))
+	{
+		char* tmp = strstr(str,"(VFW)");
+		if (!tmp)
+		{
+            ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(str,-1));
+			found = true;
+		}
+		captureIdx++;
+		memset(str, 0, 64);
+	}
+    WIN32_FIND_DATA FindFileData;
+    HANDLE hFind;
+    //char fileSearch[256];
+    //strcpy(fileSearch,_T("*.avi"));
+    hFind = FindFirstFile(_T("*.avi"), &FindFileData);
+    if (hFind != INVALID_HANDLE_VALUE)
+    {
+
+      ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)(FindFileData.cFileName));
+      while(FindNextFile(hFind,&FindFileData))
+      {
+        ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)(FindFileData.cFileName));
+      }
+      FindClose(hFind);
+    }
+
+
+
+    ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("Conference"));
+    ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("None"));
+
+	if (!found)
+	{
+		strncpy(str,"N/A",64);
+		::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(str,-1));
+	}
+    m_ctrlDevice.SetCurSel(0);
+
+    //Codecs
+    int numOfCodecs = _vieCodec->NumberOfCodecs();
+	for(int i=0; i<numOfCodecs;++i)
+	{
+		VideoCodec codec;
+        if(-1 !=_vieCodec->GetCodec(i,codec))
+        {
+            ::SendMessage(m_ctrlCodec.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(codec.plName,-1));
+        }
+	}
+    m_ctrlCodec.SetCurSel(0);
+
+#ifndef NO_VOICE_ENGINE
+	CodecInst voiceCodec;
+    int numOfVeCodecs = _veCodec->NumOfCodecs();
+	for(int i=0; i<numOfVeCodecs;++i)
+	{
+        if(_veCodec->GetCodec(i,voiceCodec)!=-1)
+        {
+            if(strncmp(voiceCodec.plname,"ISAC",4)==0)
+            break;
+
+
+        }
+	}
+
+	_audioChannel = _veBase->CreateChannel();
+
+
+    TEST_MUSTPASS(_veRTCP->SetRTCPStatus(_audioChannel, true),-5);
+    TEST_MUSTPASS(_veCodec->SetSendCodec(_audioChannel, voiceCodec),-5);
+    TEST_MUSTPASS(_veBase->StartPlayout(_audioChannel),-5);
+
+#endif  //NO_VOICE_ENGINE
+
+	if(_parentChannel==-1)
+    {
+        TEST_MUSTPASS(_vieBase->CreateChannel(_channelId),-5);
+    }
+    else // This is a slave channel
+    {
+        TEST_MUSTPASS(_vieBase->CreateChannel(_channelId,_parentChannel),-5);
+    }
+#ifndef NO_VOICE_ENGINE
+    TEST_MUSTPASS(_vieBase->ConnectAudioChannel(_channelId,_audioChannel),-5);
+#endif
+
+    _channelPool.AddChannel(_channelId);	
+
+    //Set Receive codec
+    {
+    VideoCodec codec;
+    int numOfCodecs = _vieCodec->NumberOfCodecs();;
+	for(int i=0; i<numOfCodecs;++i)
+	{
+		if(-1 !=_vieCodec->GetCodec(i,codec))
+        {
+				if(codec.codecType == webrtc::kVideoCodecVP8)
+				{
+					codec.codecSpecific.VP8.feedbackModeOn = true;
+					codec.codecSpecific.VP8.pictureLossIndicationOn = true;
+				}
+            TEST_MUSTPASS(_vieCodec->SetReceiveCodec(_channelId,codec),-5);
+        }
+	}
+    }
+
+    //TMMBR
+    m_cbTmmbr.SetCheck(BST_CHECKED);
+    OnBnClickedTmmbr();
+
+    //Packet Burst
+    m_ctrlPacketBurst.SetCurSel(0);
+
+
+    //Protection method none
+    CButton *opProtection = (CButton *) GetDlgItem(IDC_PROT_NONE);
+    opProtection->SetCheck(BST_CHECKED);
+    OnBnClickedProtNone();
+
+
+    // Configure the renderer
+    ConfigureRender();
+
+    TEST_MUSTPASS(_vieCodec->RegisterEncoderObserver(_channelId,*this),kViECodecObserverAlreadyRegistered);
+    TEST_MUSTPASS(_vieCodec->RegisterDecoderObserver(_channelId,*this),-5);
+
+    TEST_MUSTPASS(_vieBase->RegisterObserver(*this),kViEBaseObserverAlreadyRegistered);
+
+
+
+
+    //Set captions based on channel id
+    m_remoteIp1.SetAddress(127,0,0,1);
+    CString port;	
+	port.AppendFormat(_T("%d"),11111+_channelId*4);       
+	m_remotePort1.SetWindowText(port);
+	m_localPort1.SetWindowText(port);
+
+    CString title;
+    this->GetWindowText(title);
+    if(_parentChannel==-1)
+    {        
+        title.AppendFormat(_T("%s - channel %d"),title,_channelId);
+    }
+    else
+    {
+        title.AppendFormat(_T("%s - slave channel %d - parent %d"),title,_channelId,_parentChannel);
+    }
+    this->SetWindowText(title);
+
+    if(_parentChannel!=-1)
+       m_ctrlDevice.EnableWindow(FALSE); //Prevent from changing capture device
+
+	return TRUE;  // return TRUE  unless you set the focus to a control
+}
+
+
+
+void CDXChannelDlg::OnTimer(UINT nIDEvent)
+{
+	CDialog::OnTimer(nIDEvent);
+}
+
+void CDXChannelDlg::SetSendCodec()
+{
+    // Get the codec stucture
+    int codecSel= m_ctrlCodec.GetCurSel();
+    VideoCodec codec;
+    TEST_MUSTPASS(_vieCodec->GetCodec(codecSel,codec),-5);
+
+
+    // Set Codec Size
+    VideoSize sizeSel=VideoSize(m_ctrlCodecSize.GetCurSel());
+    int width, height;
+    GetWidthHeight(sizeSel, width, height);
+    codec.width=width;
+    codec.height=height;
+
+    //Set the codec bitrate
+    CString bitrateStr;
+	m_ctrlBitrate.GetLBText(m_ctrlBitrate.GetCurSel(), bitrateStr);
+    int bitrate = _ttoi(bitrateStr.GetBuffer(0));
+    if(codec.codecType!=kVideoCodecI420)
+    {
+        codec.startBitrate=bitrate;
+        codec.maxBitrate=bitrate*4;
+    }
+
+
+    //Set the codec frame rate
+    codec.maxFramerate = m_ctrlMinFrameRate.GetCurSel() +5;
+
+    if(strncmp(codec.plName, "VP8", 5) == 0)
+    {
+		codec.codecSpecific.VP8.feedbackModeOn = true;
+		codec.codecSpecific.VP8.pictureLossIndicationOn = true;
+        TEST_MUSTPASS(_vieRTPRTCP->SetKeyFrameRequestMethod(_channelId, kViEKeyFrameRequestPliRtcp),-5);
+    }else
+    {
+        TEST_MUSTPASS(_vieRTPRTCP->SetKeyFrameRequestMethod(_channelId, kViEKeyFrameRequestPliRtcp),-5);
+    }
+    TEST_MUSTPASS(_vieCodec->SetSendCodec(_channelId, codec),-5);
+
+    if (codec.codecType == webrtc::kVideoCodecI420)
+    {        // Need to set the receive codec size
+        _vieCodec->SetReceiveCodec(_channelId, codec);
+    }
+}
+
+void CDXChannelDlg::SetSendDestination()
+{
+    if(_externalTransport)
+        return;
+
+    BYTE part1, part2, part3, part4;
+	char sendIP1[16];
+	m_remoteIp1.GetAddress(part1, part2, part3, part4);
+	sprintf(sendIP1,"%d.%d.%d.%d",part1,part2,part3,part4);
+
+    CString strPort;
+    m_remotePort1.GetWindowText(strPort);
+	int remotePort1 = _ttoi(strPort.GetString());
+
+#ifdef IPV6
+	char* recIP = "::0";
+#else
+	char* recIP = "0.0.0.0";
+#endif //IPV6
+
+    TEST_MUSTPASS(_vieNetwork->SetSendDestination(_channelId,sendIP1,remotePort1),kViENetworkAlreadySending);
+
+    #ifndef NO_VOICE_ENGINE
+	    m_localPort1.GetWindowText(strPort);
+	    int localPort1 = _ttoi(strPort.GetString());
+        int res=_veBase->SetLocalReceiver(_audioChannel,localPort1+2);
+
+        TEST_MUSTPASS(_veBase->SetSendDestination(_audioChannel, remotePort1+2, sendIP1),-5)
+    #endif
+
+}
+
+void CDXChannelDlg::SetLocalReceiver()
+{
+    if(_externalTransport)
+        return;
+
+    CString strPort;
+	m_localPort1.GetWindowText(strPort);
+	int localPort1 = _ttoi(strPort.GetString());
+
+
+
+    // May fail because we are sending
+    TEST_MUSTPASS(_vieNetwork->SetLocalReceiver(_channelId, localPort1),-5);
+
+    #ifndef NO_VOICE_ENGINE
+        int res=_veBase->SetLocalReceiver(_audioChannel,localPort1+2);
+    #endif
+
+
+
+}
+
+void CDXChannelDlg::SetCaptureDevice()
+{
+    if(_parentChannel!=-1) // don't accept changing input on slave channels.
+        return;
+
+    int camSel=-1;
+    camSel=m_ctrlDevice.GetCurSel();
+
+	CString captureStr;
+    //captureStr.Compare
+	m_ctrlDevice.GetLBText(camSel, captureStr);
+	if(captureStr!=_T("N/A") != 0)
+	{
+
+        TEST_MUSTPASS(_vieFile->StopPlayFile(_captureId),kViEFileNotPlaying);
+        TEST_MUSTPASS(_vieCapture->DisconnectCaptureDevice(_channelId),kViECaptureDeviceNotConnected);
+        TEST_MUSTPASS(_vieRender->RemoveRenderer(_captureId),kViERenderInvalidRenderId);
+
+        if(_captureId>=0x1001 && _captureId<0x10FF)// ID is a capture device
+        {
+            TEST_MUSTPASS(_captureDevicePool.ReturnCaptureDevice(_captureId),-5);
+        }
+
+        if(captureStr!=_T("None")==0)
+        {
+            _captureId=-1;
+        }
+        else if(_tcsstr(captureStr,_T(".avi"))!=NULL ) // Selected an AVI file
+        {
+            TEST_MUSTPASS(_vieFile->StartPlayFile(TcharToChar(captureStr.GetBuffer(),-1),_captureId,false,webrtc::kFileFormatAviFile),-5);
+            TEST_MUSTPASS(_vieRender->AddRenderer(_captureId,m_ctrlLiveVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+            TEST_MUSTPASS(_vieRender->StartRender(_captureId),-5);
+            TEST_MUSTPASS(_vieFile->SendFileOnChannel(_captureId,_channelId),-5);
+            TEST_MUSTPASS(_vieFile->StartPlayFileAsMicrophone(_captureId,_channelId,true),-5);
+            //TEST_MUSTPASS(_vieFile->StartPlayAudioLocally(_captureId,_channelId),-5);
+        }
+        else
+        {
+
+            char captureName[256];
+            char uniqueCaptureName[256];
+
+            TEST_MUSTPASS(_vieCapture->GetCaptureDevice(camSel,captureName,256,uniqueCaptureName,256),-5);
+
+            TEST_MUSTPASS(_captureDevicePool.GetCaptureDevice(_captureId,uniqueCaptureName),-5);
+            TEST_MUSTPASS(_vieCapture->StartCapture(_captureId),kViECaptureDeviceAlreadyStarted);
+            TEST_MUSTPASS(_vieCapture->RegisterObserver(_captureId,*this),kViECaptureObserverAlreadyRegistered);
+
+            TEST_MUSTPASS(_vieRender->AddRenderer(_captureId,m_ctrlLiveVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+            TEST_MUSTPASS(_vieCapture->ConnectCaptureDevice(_captureId,_channelId),-5);
+            TEST_MUSTPASS(_vieRender->StartRender(_captureId),-5);            
+        }
+    }
+
+}
+
+
+
+void CDXChannelDlg::OnBnClickedStartlisten()
+{
+
+
+    // Configure the local ports
+    SetLocalReceiver();
+
+    //Configure the remote destination- needed in order to be able to respond to RTCP messages
+    SetSendDestination();
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StartReceive(_audioChannel),-5);
+    #endif
+    TEST_MUSTPASS(_vieBase->StartReceive(_channelId),-5);
+
+
+}
+
+void CDXChannelDlg::OnStartSend()
+{
+
+    // Set the send destination
+    SetSendDestination();
+
+    // Configure the local ports (Needed to be able to receive RTCP
+    //SetLocalReceiver();
+
+
+    // Set the send codec
+    SetSendCodec();
+
+    if(_captureId==-1) // If no capture device has been set.
+	    SetCaptureDevice(); //Set the capture device
+
+
+
+    //TEST_MUSTPASS(_vieRTPRTCP->SetStartSequenceNumber(_channelId,1),-5);
+
+    // Start sending
+    TEST_MUSTPASS(_vieBase->StartSend(_channelId),-5);
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StartSend(_audioChannel),-5);
+    #endif
+
+
+}
+
+void CDXChannelDlg::ConfigureRender()
+{
+    TEST_MUSTPASS(_vieRender->AddRenderer(_channelId,m_ctrlLiveRemoteVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+
+    TEST_MUSTPASS(_vieFile->SetRenderStartImage(_channelId,"renderStartImage.jpg"),-5);
+    TEST_MUSTPASS(_vieRender->StartRender(_channelId),-5);
+    TEST_MUSTPASS(_vieFile->SetRenderTimeoutImage(_channelId,"renderTimeoutImage.jpg"),-5);
+
+
+}
+
+
+void CDXChannelDlg::OnStopSend()
+{
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StopSend(_audioChannel),-5);
+    #endif
+
+
+    TEST_MUSTPASS(_vieBase->StopSend(_channelId),kViEBaseNotSending);   // Accept error Not sending
+
+
+}
+void CDXChannelDlg::OnBnClickedStoplisten()
+{
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StopReceive(_audioChannel),-5);
+    #endif
+    TEST_MUSTPASS(_vieBase->StopReceive(_channelId),-5);
+}
+
+
+void CDXChannelDlg::OnDestroy()
+{
+
+    OnStopSend();
+    OnBnClickedStoplisten();
+
+    if(_vieCapture && _parentChannel==-1)
+    {
+        _vieCapture->DisconnectCaptureDevice(_channelId);
+        _captureDevicePool.ReturnCaptureDevice(_captureId);
+    }
+    if(_vieFile && _parentChannel!=-1)
+    {
+        TEST_MUSTPASS(_vieFile->StopPlayFile(_captureId),kViEFileNotPlaying);
+    }
+
+
+
+
+    if(_videoEngine)
+	{
+        if(_parentChannel==-1)
+        {
+            _vieCodec->DeregisterEncoderObserver(_channelId);
+        }
+        _vieBase->DeleteChannel(_channelId);
+        _channelPool.RemoveChannel(_channelId);
+	}
+
+	_videoEngine = NULL;
+#ifndef NO_VOICE_ENGINE
+
+    if (_voiceEngine)
+	{
+        _veBase->DeleteChannel(_audioChannel);
+        _veBase->Release();
+        _veNetwork->Release();
+        _veCodec->Release();
+        _veRTCP->Release();
+	}
+#endif
+
+
+    strcpy(_logMsg,"");
+    SetEvent(_callbackEvent);
+    MSG msg; // Wait until the callback thread exits. Need to handle messages since the callback thread can call SendMessage when updating UI
+    while(WaitForSingleObject(_callbackThread,10)==WAIT_TIMEOUT)
+    {
+        DWORD ret = PeekMessage( &msg, NULL, 0, 0,PM_REMOVE );
+        if (ret >0)
+        {
+            TranslateMessage(&msg);
+            DispatchMessage(&msg);
+        }
+    }
+
+    CloseHandle(_callbackThread);
+    CloseHandle(_callbackEvent);
+    DeleteCriticalSection(&_critCallback);
+
+    TEST_MUSTPASS(_vieCapture->Release()<0,-5);
+    TEST_MUSTPASS(_vieRTPRTCP->Release()<0,-5);
+    TEST_MUSTPASS(_vieRender->Release()<0,-5);
+    TEST_MUSTPASS(_vieCodec->Release()<0,-5);
+    TEST_MUSTPASS(_vieNetwork->Release()<0,-5);
+    TEST_MUSTPASS(_vieFile->Release()<0,-5);
+    TEST_MUSTPASS(_vieBase->Release()<0,-5);
+
+
+
+#ifdef TEST_EXTERNAL_TRANSPORT
+	if(_transport)
+		delete _transport;
+	_transport = NULL;
+#endif
+
+    delete _externalTransport;
+
+	CDialog::OnDestroy();
+    if(_dialogObserver)
+    {
+        _dialogObserver->ChannelDialogEnded(this);
+    }
+}
+
+void CDXChannelDlg::OnCancel()
+{
+    DestroyWindow();
+}
+// If you add a minimize button to your dialog, you will need the code below
+//  to draw the icon.  For MFC applications using the document/view model,
+//  this is automatically done for you by the framework.
+
+void CDXChannelDlg::OnPaint()
+{
+    if (IsIconic())
+	{
+		CPaintDC dc(this); // device context for painting
+
+		SendMessage(WM_ICONERASEBKGND, (WPARAM) dc.GetSafeHdc(), 0);
+
+		// Center icon in client rectangle
+		int cxIcon = GetSystemMetrics(SM_CXICON);
+		int cyIcon = GetSystemMetrics(SM_CYICON);
+		CRect rect;
+		GetClientRect(&rect);
+		int x = (rect.Width() - cxIcon + 1) / 2;
+		int y = (rect.Height() - cyIcon + 1) / 2;
+
+		// Draw the icon
+		dc.DrawIcon(x, y, m_hIcon);
+	}
+	else
+	{
+		CDialog::OnPaint();
+	}
+}
+
+BOOL CDXChannelDlg::OnDeviceChange( UINT nID, DWORD lParam)
+{
+	if(nID ==  DBT_DEVNODES_CHANGED)
+	{
+	  //  SetCaptureDevice();
+	}
+	return CDialog::OnDeviceChange(nID, lParam);
+}
+
+
+void CDXChannelDlg::OnSysCommand(UINT nID, LPARAM lParam)
+{
+	if(SC_MAXIMIZE == nID)
+	{}
+	CDialog::OnSysCommand(nID, lParam);
+}
+
+
+static bool fullScreen = false;
+void CDXChannelDlg::OnRButtonUp( UINT nFlags, CPoint point)
+{
+	CDialog::OnRButtonUp( nFlags,  point);
+}
+
+// The system calls this to obtain the cursor to display while the user drags
+//  the minimized window.
+HCURSOR CDXChannelDlg::OnQueryDragIcon()
+{
+	return (HCURSOR) m_hIcon;
+}
+
+void CDXChannelDlg::OnCbnSelchangeCodecList()
+{
+    SetSendCodec();
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeSize()
+{
+    SetSendCodec();
+}
+
+void CDXChannelDlg::OnCbnSelchangeDevice()
+{
+
+
+    SetCaptureDevice();
+
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeBitrate()
+{
+
+    SetSendCodec();
+
+}
+
+void CDXChannelDlg::OnCbnSelchangeMinFrameRate()
+{
+
+    SetSendCodec();
+
+}
+
+
+void CDXChannelDlg::OnBnClickedTmmbr()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetTMMBRStatus(_channelId,m_cbTmmbr.GetCheck()==BST_CHECKED),-5);
+
+}
+
+void CDXChannelDlg::OnCbnSelchangeRtcpmode()
+{
+
+ /*
+ kRtcpNone     = 0,
+ kRtcpCompound_RFC4585     = 1,
+ kRtcpNonCompound_RFC5506 = 2 */
+    ViERTCPMode mode=ViERTCPMode(m_ctrlRtcpMode.GetCurSel());
+    TEST_MUSTPASS(_vieRTPRTCP->SetRTCPStatus(_channelId,mode),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedFreezelog()
+{
+    _canAddLog=m_cbFreezeLog.GetCheck()!=BST_CHECKED;
+}
+
+void CDXChannelDlg::OnBnClickedProtNack()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetNACKStatus(_channelId,true),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedProtNone()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetNACKStatus(_channelId,false),-5);
+    TEST_MUSTPASS(_vieRTPRTCP->SetFECStatus(_channelId,false,0,0),-5);
+    TEST_MUSTPASS(_vieRTPRTCP->SetHybridNACKFECStatus(_channelId,false,0,0),-5);
+}
+
+void CDXChannelDlg::OnBnClickedProtFec()
+{
+    int noCodec=_vieCodec->NumberOfCodecs();
+    int redPayloadType=0;
+    int fecPayloadType=0;
+    for(unsigned char i=0;i<noCodec;++i)
+    {
+        VideoCodec codec;
+        _vieCodec->GetCodec(i,codec);
+        if(codec.codecType==webrtc::kVideoCodecRED)
+        {
+            redPayloadType=codec.plType;
+        }
+        if(codec.codecType==webrtc::kVideoCodecULPFEC)
+        {
+            fecPayloadType=codec.plType;
+        }
+    }
+    TEST_MUSTPASS(_vieRTPRTCP->SetFECStatus(_channelId,true,redPayloadType,fecPayloadType),-5);
+}
+
+void CDXChannelDlg::OnBnClickedProtNackFec()
+{
+    int noCodec=_vieCodec->NumberOfCodecs();
+    int redPayloadType=0;
+    int fecPayloadType=0;
+    for(unsigned char i=0;i<noCodec;++i)
+    {
+        VideoCodec codec;
+        _vieCodec->GetCodec(i,codec);
+        if(codec.codecType==webrtc::kVideoCodecRED)
+        {
+            redPayloadType=codec.plType;
+        }
+        if(codec.codecType==webrtc::kVideoCodecULPFEC)
+        {
+            fecPayloadType=codec.plType;
+        }
+    }
+    TEST_MUSTPASS(_vieRTPRTCP->SetHybridNACKFECStatus(_channelId,true,
+                                                      redPayloadType,
+                                                      fecPayloadType),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedVersion()
+{
+    char version[1024];
+    _vieBase->GetVersion(version);
+    MessageBox(CharToTchar(version,-1));
+    int p=strlen(version);
+#ifndef NO_VOICE_ENGINE
+    _veBase->GetVersion(version);
+    MessageBox(CharToTchar(version,-1));
+#endif
+}
+
+unsigned int WINAPI CDXChannelDlg::CallbackThread(LPVOID lpParameter)
+{
+    static_cast<CDXChannelDlg*>(lpParameter)->CallbackThreadProcess();
+    return 0;
+}
+
+void CDXChannelDlg::CallbackThreadProcess()
+{
+    while(1)
+    {
+        if(WAIT_OBJECT_0==WaitForSingleObject(_callbackEvent,INFINITE))
+        {
+            char smsg[512];
+            EnterCriticalSection(&_critCallback);
+            strncpy(smsg,_logMsg,strlen(_logMsg)+1);
+            strcpy(_logMsg,"");
+
+
+            LeaveCriticalSection(&_critCallback);
+            if(strstr(smsg,"Send")!=NULL)
+            {
+                unsigned short fractionLost=0;
+                unsigned int cumulativeLost=0;
+                unsigned int extendedMax=0;
+                unsigned int jitter=0;
+                int rttMs=0;
+
+
+
+                _vieRTPRTCP->GetReceivedRTCPStatistics(_channelId,
+                                                  fractionLost,
+                                                  cumulativeLost,
+                                                  extendedMax,
+                                                  jitter,
+                                                  rttMs);
+
+                //int bw=0;
+                //if(_vieCodec->GetAvailableBandwidth(_channelId,bw)==0)
+                //{
+                //    sprintf(smsg,"%s, rtt %d, loss %d,bw %d", smsg,rttMs,fractionLost,bw);
+                //}
+                //else
+                //{
+                //    _vieBase->LastError(); // Reset last error.
+                //}
+
+
+
+            }
+            if(strlen(smsg))
+            {
+                m_ctrlInfo.InsertString(0,(LPCTSTR) CharToTchar(smsg,-1));
+                while(m_ctrlInfo.GetCount()==151)
+                    m_ctrlInfo.DeleteString(150);
+            }
+            else
+            {
+                break; // End the callback thread
+            }
+        }
+    }
+
+}
+void CDXChannelDlg::AddToInfo(const char* msg)
+{
+    if(!_canAddLog)
+        return;
+    EnterCriticalSection(&_critCallback);
+
+    SYSTEMTIME systemTime;
+    GetSystemTime(&systemTime);
+
+    if(strlen(_logMsg)==0)
+    {
+        SetEvent(_callbackEvent); // Notify of new
+    }
+
+    sprintf (_logMsg, "(%2u:%2u:%2u:%3u) %s", systemTime.wHour,
+                                                           systemTime.wMinute,
+                                                           systemTime.wSecond,
+                                                           systemTime.wMilliseconds,
+                                                           msg
+                                                           );
+
+
+
+    LeaveCriticalSection(&_critCallback);
+
+
+}
+
+void CDXChannelDlg::IncomingRate(const int videoChannel,
+                              unsigned int framerate,
+                              unsigned int bitrate)
+{
+	char str[64];
+	sprintf(str,"Incoming Fr:%d br %d\n", framerate, bitrate);
+    AddToInfo(str);
+}
+
+void CDXChannelDlg::RequestNewKeyFrame(int channel)
+{
+    assert(!"(RequestNewKeyFrame why is it called");
+}
+void CDXChannelDlg::PerformanceAlarm(unsigned int cpuLoad)
+{
+    char str[64];
+    sprintf(str,"Performance alarm %d",cpuLoad);    
+    AddToInfo(str);
+}
+void CDXChannelDlg::OutgoingRate(const int videoChannel,
+                              unsigned int framerate,
+                              unsigned int bitrate)
+	{
+		char str[64];
+        sprintf(str,"Send Fr:%d br %d", framerate, bitrate);
+        AddToInfo(str);
+	}
+void CDXChannelDlg::IncomingCodecChanged(const int  videoChannel,
+                                      const VideoCodec& videoCodec)
+	{
+		char str[128];
+        sprintf(str,"Incoming codec channel:%d pltype:%d width:%d height:%d\n", videoChannel, videoCodec.plType, videoCodec.width,videoCodec.height);        
+        AddToInfo(str);
+	}
+void CDXChannelDlg::BrightnessAlarm(const int captureId,
+                                 const Brightness brightness)
+{
+
+    switch(brightness)
+    {
+    case Normal:        
+        AddToInfo("BrightnessAlarm - image ok.\n");
+        break;
+    case Bright:        
+        AddToInfo("BrightnessAlarm - light image.\n");
+        break;
+    case Dark:        
+        AddToInfo("BrightnessAlarm - dark image.\n");
+        break;
+    }
+}
+
+void CDXChannelDlg::CapturedFrameRate(const int captureId,
+                                   const unsigned char frameRate)
+{
+   char str[64];
+   sprintf(str,"Local Camera Frame rate:%d \n", frameRate);
+   AddToInfo(str);
+}
+
+void CDXChannelDlg::NoPictureAlarm(const int captureId,
+                                const CaptureAlarm alarm)
+{
+   char str[64];
+   sprintf(str,"No Picture alarm\n");   
+   AddToInfo(str);
+
+}
+
+
+void CDXChannelDlg::OnBnClickedExttransport()
+{
+    if(m_cbExternalTransport.GetCheck()==BST_CHECKED)
+    {
+        m_localPort1.EnableWindow(FALSE);
+        m_remotePort1.EnableWindow(FALSE);
+        m_remoteIp1.EnableWindow(FALSE);
+        m_ctrlPacketLoss.EnableWindow(TRUE);
+        m_ctrlDelay.EnableWindow(TRUE);
+        _externalTransport= new TbExternalTransport(*_vieNetwork);
+        _vieNetwork->RegisterSendTransport(_channelId,*_externalTransport);
+    }
+    else
+    {
+        _vieNetwork->DeregisterSendTransport(_channelId);
+
+        delete _externalTransport;
+        _externalTransport=NULL;
+        m_localPort1.EnableWindow(TRUE);
+        m_remotePort1.EnableWindow(TRUE);
+        m_remoteIp1.EnableWindow(TRUE);
+        m_ctrlPacketLoss.EnableWindow(FALSE);
+        m_ctrlDelay.EnableWindow(FALSE);
+    }
+}
+
+
+void CDXChannelDlg::OnCbnSelchangePacketloss()
+{
+    if(_externalTransport)
+    {
+        _externalTransport->SetPacketLoss(m_ctrlPacketLoss.GetCurSel()*2);
+    }
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeDelay()
+{
+    if(_externalTransport)
+    {
+        _externalTransport->SetNetworkDelay(m_ctrlDelay.GetCurSel()*30);
+    }
+
+}
+
+void CDXChannelDlg::OnBnClickedBtnRecordIncoming()
+{
+
+    CButton *recordBtn = (CButton *) GetDlgItem(IDC_BTN_RECORD_INCOMING);
+    
+    CString text;
+    recordBtn->GetWindowText(text);
+    if(text!=_T("Stop Rec Inc")!=0)
+    {
+        recordBtn->SetWindowText(_T("Stop Rec Inc"));
+        SYSTEMTIME time;
+        GetSystemTime(&time);
+        sprintf(_fileName,"IncomingChannel%d_%4d%2d%2d%2d%2d.avi",_channelId,time.wYear,time.wMonth,time.wDay,time.wHour,time.wMinute);
+
+        AudioSource audioSource=PLAYOUT;
+        webrtc::CodecInst audioCodec;
+        strcpy(audioCodec.plname,"L16");
+        audioCodec.rate     = 256000;
+        audioCodec.plfreq   = 16000;
+        audioCodec.pacsize  = 160;
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec,0,sizeof(videoCodec));
+
+        strcpy(videoCodec.plName,"VP8");
+        videoCodec.maxBitrate=1000;
+        videoCodec.startBitrate=1000;
+        videoCodec.width=352;
+        videoCodec.height=288;
+        videoCodec.codecType=webrtc::kVideoCodecVP8;
+        videoCodec.maxFramerate=30;        
+        TEST_MUSTPASS(_vieFile->StartRecordIncomingVideo(_channelId,_fileName,audioSource,audioCodec, videoCodec),-5);		
+    }
+    else
+    {
+        recordBtn->SetWindowText(_T("Record Incoming"));
+        TEST_MUSTPASS(_vieFile->StopRecordIncomingVideo(_channelId),-5);
+        CString msg;
+        msg.AppendFormat(_T("Recorded file %s"),_fileName);
+        MessageBox(msg);
+    }
+}
+
+void CDXChannelDlg::OnBnClickedBtnRecordOutgoing()
+{
+
+    CButton *recordBtn = (CButton *) GetDlgItem(IDC_BTN_RECORD_OUTGOING);
+    CString text;
+    recordBtn->GetWindowText(text);
+    if(text!=_T("Stop Rec Out"))
+    {
+        recordBtn->SetWindowText(_T("Stop Rec Out"));
+        SYSTEMTIME time;
+        GetSystemTime(&time);
+        sprintf(_fileName,"OutgoingChannel%d_%4d%2d%2d%2d%2d.avi",_channelId,time.wYear,time.wMonth,time.wDay,time.wHour,time.wMinute);
+
+        AudioSource audioSource=MICROPHONE;
+        webrtc::CodecInst audioCodec;
+        strcpy(audioCodec.plname,"L16");
+        audioCodec.rate     = 256000;
+        audioCodec.plfreq   = 16000;
+        audioCodec.pacsize  = 160;
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec,0,sizeof(videoCodec));
+
+        strcpy(videoCodec.plName,"VP8");
+        videoCodec.maxBitrate=1000;
+        videoCodec.startBitrate=1000;
+        videoCodec.width=352;
+        videoCodec.height=288;
+        videoCodec.codecType=webrtc::kVideoCodecVP8;
+        videoCodec.maxFramerate=30;        
+        TEST_MUSTPASS(_vieFile->StartRecordOutgoingVideo(_channelId,_fileName,audioSource,audioCodec,videoCodec),-5);		
+    }
+    else
+    {
+        recordBtn->SetWindowText(_T("Record Outgoing"));
+        TEST_MUSTPASS(_vieFile->StopRecordOutgoingVideo(_channelId),-5);
+        CString msg;
+        msg.AppendFormat(_T("Recorded file %s"),_fileName);
+        MessageBox(msg);
+    }
+}
+
+void CDXChannelDlg::OnBnClickedBtnCreateSlave()
+{
+    CDXChannelDlg* newSlave =new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,_dialogObserver,_channelId);
+    newSlave->Create(CDXChannelDlg::IDD,NULL);
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.h b/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.h
new file mode 100644
index 0000000..43aeb09
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/ChannelDlg.h
@@ -0,0 +1,273 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
+
+#include "StdAfx.h"
+//#define NO_VOICE_ENGINE
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg dialog
+// Include ViE headers
+
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_errors.h"
+#include "vie_file.h"
+#include "tbExternalTransport.h"
+
+#include "resource.h"		// main symbols
+
+
+#ifndef NO_VOICE_ENGINE
+
+#include "voe_base.h"
+#include "voe_errors.h"
+#include "voe_base.h"
+#include "voe_network.h"
+#include "voe_codec.h"
+#include "voe_rtp_rtcp.h"
+#endif
+
+using namespace webrtc;
+class CDXChannelDlg;
+class CaptureDevicePool;
+class ChannelPool;
+
+#define TEST_MUSTPASS(expr,oklasterror)                                         \
+    {                                                               \
+        if ((expr))                                                 \
+        {                                                           \
+            CString r_msg;                                        \
+            int r_lastError=_vieBase->LastError();    \
+            CString exp;    \
+            exp=#expr;\
+            r_msg.Format(_T("\nError at line:%i, %s \nError code: %i\n"),__LINE__, exp,r_lastError);      \
+            if(r_lastError!=oklasterror) \
+            ::MessageBox (NULL, (LPCTSTR)r_msg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);                                   \
+        }                                                           \
+    }
+
+class CDXChannelDlgObserver
+{
+public:
+    virtual void ChannelDialogEnded(CDXChannelDlg* context)=0;
+
+protected:
+    virtual ~CDXChannelDlgObserver(){};
+
+};
+
+class CDXChannelDlg : public CDialog , public ViEEncoderObserver, public ViEDecoderObserver, public ViEBaseObserver, public ViECaptureObserver
+{
+// Construction
+public:
+	CDXChannelDlg(VideoEngine* videoEngine,
+        CaptureDevicePool& captureDevicePool,
+        ChannelPool& channelPool,
+        void* voiceEngine=NULL
+    ,CWnd* pParent = NULL,CDXChannelDlgObserver* observer=NULL,int parentChannel=-1);	// standard constructor
+
+// Dialog Data
+	//{{AFX_DATA(CDXChannelDlg)
+	enum { IDD = IDD_DXQUALITY_DIALOG };
+	CComboBox	m_ctrlDevice;
+	CComboBox	m_ctrlCodec;	
+	CComboBox	m_ctrlBitrate;
+	CComboBox	m_ctrlCodecSize;
+    CComboBox	m_ctrlRtcpMode;    
+    CComboBox	m_ctrlPacketBurst;    
+	CComboBox	m_ctrlMinFrameRate;	
+    
+    CListBox 	m_ctrlInfo;
+	
+	CStatic		m_ctrlLiveRemoteVideo;
+	CStatic		m_ctrlLiveVideo;
+	CEdit		m_localPort1;
+	CEdit		m_remotePort1;	
+	CIPAddressCtrl	m_remoteIp1;
+    CButton     m_cbTmmbr;
+    CButton     m_cbExternalTransport;
+    CButton     m_cbFreezeLog;
+    CButton     m_cbDefaultSendChannel;
+    CComboBox   m_ctrlPacketLoss;
+    CComboBox   m_ctrlDelay;
+    
+	
+	//}}AFX_DATA
+
+	// ClassWizard generated virtual function overrides
+	//{{AFX_VIRTUAL(CDXChannelDlg)
+	protected:
+	virtual void DoDataExchange(CDataExchange* pDX);	// DDX/DDV support
+	//}}AFX_VIRTUAL
+
+
+
+public : 
+    // Callback
+
+    //Capture observer
+    virtual void BrightnessAlarm(const int captureId,
+                                 const Brightness brightness);
+
+    virtual void CapturedFrameRate(const int captureId,
+                                   const unsigned char frameRate);
+
+    virtual void NoPictureAlarm(const int captureId,
+                                const CaptureAlarm alarm);
+
+
+    // same callback method is being used to raise also to clear.
+    // true - raise, false - clear
+    // virtual void NoPictureAlarm(bool active = true);
+
+    // Encoder observer
+    virtual void OutgoingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate) ;
+
+    //Decoder observer
+    virtual void IncomingCodecChanged(const int  videoChannel,
+                                      const VideoCodec& videoCodec);
+
+    virtual void IncomingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate);
+    
+    virtual void RequestNewKeyFrame(const int videoChannel);
+
+    // Base observer
+    virtual void PerformanceAlarm(const unsigned int cpuLoad);
+
+    
+    //virtual void IncomingCSRCChanged(int channel, unsigned int csrc, bool added);    
+    
+
+
+// Implementation
+protected:
+	HICON m_hIcon;
+    int _channelId;
+    int _parentChannel;
+    int _audioChannel;
+	bool _canAddLog;
+
+    // Thread and function for callbacks
+    CRITICAL_SECTION _critCallback;
+    HANDLE _callbackThread;
+    HANDLE _callbackEvent;
+    char _logMsg[512];
+    static  unsigned int WINAPI CallbackThread(LPVOID lpParameter);    
+    void CallbackThreadProcess();
+
+
+
+	//void GetSize(int sizeSel, int &width, int &height);
+	virtual void ConfigureRender();
+
+    virtual void SetCaptureDevice();
+    virtual void SetLocalReceiver();
+    virtual void SetSendDestination();
+    virtual void SetSendCodec();
+
+    
+    void AddToInfo(const char* msg);
+
+	//	afx_msg void Command(UINT nID, LPARAM lParam);
+
+	// Generated message map functions
+	//{{AFX_MSG(CDXChannelDlg)
+	virtual BOOL OnInitDialog();
+	afx_msg void OnSysCommand(UINT nID, LPARAM lParam);
+	afx_msg void OnRButtonUp( UINT nFlags, CPoint point);	
+	afx_msg BOOL OnDeviceChange( UINT, DWORD );
+	afx_msg void OnPaint();
+	//afx_msg LRESULT OnDisplayChange(WPARAM, LPARAM);
+	afx_msg HCURSOR OnQueryDragIcon();
+	virtual afx_msg void OnStartSend();	
+	virtual afx_msg void OnDestroy();
+	virtual afx_msg void OnStopSend();    
+    virtual afx_msg void OnCancel();
+	afx_msg void OnTimer(UINT nIDEvent);
+
+	//}}AFX_MSG
+	DECLARE_MESSAGE_MAP()
+
+private:
+	CDXChannelDlgObserver*  _dialogObserver;
+	
+	VideoEngine* _videoEngine;
+    ViEBase*     _vieBase;
+    ViECapture*  _vieCapture;
+    ViERTP_RTCP* _vieRTPRTCP;
+    ViERender*   _vieRender;
+    ViECodec*    _vieCodec;
+    ViENetwork*  _vieNetwork;
+    ViEFile*      _vieFile;
+    TbExternalTransport* _externalTransport;
+    char             _fileName[256];
+
+
+#ifndef NO_VOICE_ENGINE
+    VoiceEngine*		_voiceEngine;
+    VoEBase*             _veBase;
+    VoENetwork*          _veNetwork;
+    VoECodec*            _veCodec;
+    VoERTP_RTCP*         _veRTCP;
+#else
+    void*                   _voiceEngine;
+
+#endif
+
+    VideoCodec     _sendCodec;
+    int _captureId;
+    CaptureDevicePool& _captureDevicePool;
+    ChannelPool& _channelPool;
+
+
+	afx_msg void OnCbnSelchangeCodecList();
+	afx_msg void OnCbnSelchangeDevice();
+	afx_msg void OnCbnSelchangeSize();
+	afx_msg void OnCbnSelchangeBitrate();    
+	afx_msg void OnCbnSelchangeWindowSize();	
+	afx_msg void OnBnClickedversion();
+	afx_msg void OnCbnSelchangeMinFrameRate();	
+    afx_msg void OnBnClickedStartlisten();
+    afx_msg void OnBnClickedStoplisten();
+    afx_msg void OnBnClickedStopsend();
+    afx_msg void OnBnClickedTmmbr();
+    afx_msg void OnCbnSelchangeRtcpmode();
+    afx_msg void OnBnClickedProtNack();
+    afx_msg void OnBnClickedProtNone();
+    afx_msg void OnBnClickedProtFec();
+    afx_msg void OnBnClickedProtNackFec();  
+    afx_msg void OnBnClickedFreezelog();
+public:
+    afx_msg void OnBnClickedExttransport();    
+    afx_msg void OnCbnSelchangePacketloss();
+    afx_msg void OnCbnSelchangeDelay();
+    afx_msg void OnBnClickedBtnRecordIncoming();
+    afx_msg void OnBnClickedBtnRecordOutgoing();
+    afx_msg void OnBnClickedBtnCreateSlave();
+    afx_msg void OnBnClickedVersion();
+};
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
diff --git a/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.cc b/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.cc
new file mode 100644
index 0000000..dbd1644
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.cc
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ChannelPool.h"
+#include "map_wrapper.h"
+#include <string.h>
+#include <assert.h>
+#include "critical_section_wrapper.h"
+
+ChannelPool::ChannelPool():
+_critSect(*webrtc::CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+ChannelPool::~ChannelPool(void)
+{
+    assert(_channelMap.Size()==0);    
+    delete &_critSect;
+}
+
+WebRtc_Word32 ChannelPool::AddChannel(int channel)
+{
+    return _channelMap.Insert(channel,(void*) channel);
+}
+WebRtc_Word32 ChannelPool::RemoveChannel(int channel)
+{
+    return _channelMap.Erase(channel);
+}
+
+webrtc::MapWrapper& ChannelPool::ChannelMap()
+{
+    return _channelMap;
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.h b/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.h
new file mode 100644
index 0000000..374c676
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/ChannelPool.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+#include "StdAfx.h"
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+}
+
+class ChannelPool
+{
+public:
+    ChannelPool();
+    ~ChannelPool(void);
+    WebRtc_Word32 AddChannel(int channel);
+    WebRtc_Word32 RemoveChannel(int channel);    
+
+    webrtc::MapWrapper& ChannelMap();
+
+    private:     
+        webrtc::CriticalSectionWrapper& _critSect;        
+        webrtc::MapWrapper _channelMap;
+
+};
diff --git a/trunk/src/video_engine/main/test/WindowsTest/StdAfx.h b/trunk/src/video_engine/main/test/WindowsTest/StdAfx.h
new file mode 100644
index 0000000..78b1fbd
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/StdAfx.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+//  or project specific include files that are used frequently, but
+//      are changed infrequently
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif // _MSC_VER > 1000
+
+#define VC_EXTRALEAN		// Exclude rarely-used stuff from Windows headers
+
+#include <afxwin.h>         // MFC core and standard components
+#include <afxext.h>         // MFC extensions
+#include <afxdisp.h>        // MFC Automation classes
+#include <afxdtctl.h>		// MFC support for Internet Explorer 4 Common Controls
+#ifndef _AFX_NO_AFXCMN_SUPPORT
+#include <afxcmn.h>			// MFC support for Windows Common Controls
+#endif // _AFX_NO_AFXCMN_SUPPORT
+
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
diff --git a/trunk/src/video_engine/main/test/WindowsTest/VideoSize.h b/trunk/src/video_engine/main/test/WindowsTest/VideoSize.h
new file mode 100644
index 0000000..60e2bdd
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/VideoSize.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
+#include "StdAfx.h"
+enum VideoSize
+	{
+		UNDEFINED, 
+		SQCIF,     // 128*96       = 12 288
+		QQVGA,     // 160*120      = 19 200
+		QCIF,      // 176*144      = 25 344
+        CGA,       // 320*200      = 64 000
+		QVGA,      // 320*240      = 76 800
+        SIF,       // 352*240      = 84 480
+		WQVGA,     // 400*240      = 96 000
+		CIF,       // 352*288      = 101 376
+        W288P,     // 512*288      = 147 456 (WCIF)
+        W368P,     // 640*368      = 235 520
+        S_448P,      // 576*448      = 281 088
+		VGA,       // 640*480      = 307 200
+        S_432P,      // 720*432      = 311 040
+        W432P,     // 768*432      = 331 776 (a.k.a WVGA 16:9)
+        S_4SIF,      // 704*480      = 337 920
+        W448P,     // 768*448      = 344 064
+		NTSC,		// 720*480      = 345 600
+        FW448P,    // 800*448      = 358 400
+        S_768x480P,  // 768*480      = 368 640 (a.k.a WVGA 16:10)
+		WVGA,      // 800*480      = 384 000
+		S_4CIF,      // 704576      = 405 504
+		SVGA,      // 800*600      = 480 000
+        W544P,     // 960*544      = 522 240
+        W576P,     // 1024*576     = 589 824 (W4CIF)
+		HD,        // 960*720      = 691 200
+		XGA,       // 1024*768     = 786 432
+		WHD,       // 1280*720     = 921 600
+		FULL_HD,   // 1440*1080    = 1 555 200
+        UXGA,      // 1600*1200    = 1 920 000
+		WFULL_HD,  // 1920*1080    = 2 073 600
+		NUMBER_OF_VIDEO_SIZE
+	};
+
+int GetWidthHeight(VideoSize size, int& width, int& height);
+
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.cc b/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.cc
new file mode 100644
index 0000000..ceda8a9
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "WindowsTest.h"
+#include "ChannelDlg.h"
+#include "WindowsTestMainDlg.h"
+#include "engine_configurations.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#undef THIS_FILE
+static char THIS_FILE[] = __FILE__;
+#endif
+
+// Check memory leaks id running debug
+#if (defined(_DEBUG) && defined(_WIN32))
+//    #include "vld.h"
+#endif
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp
+
+BEGIN_MESSAGE_MAP(CDXWindowsTestApp, CWinApp)
+	//{{AFX_MSG_MAP(CDXWindowsTestApp)
+		// NOTE - the ClassWizard will add and remove mapping macros here.
+		//    DO NOT EDIT what you see in these blocks of generated code!
+	//}}AFX_MSG
+	ON_COMMAND(ID_HELP, CWinApp::OnHelp)
+END_MESSAGE_MAP()
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp construction
+
+CDXWindowsTestApp::CDXWindowsTestApp()
+{
+    
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// The one and only object
+
+CDXWindowsTestApp theApp;
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp initialization
+
+BOOL CDXWindowsTestApp::InitInstance()
+{
+    int result=0;
+    #ifndef NO_VOICE_ENGINE
+        _voiceEngine = VoiceEngine::Create();
+        _veBase = VoEBase::GetInterface(_voiceEngine);
+         result+=_veBase->Init();
+     #else
+        _voiceEngine=NULL;
+    #endif
+
+    _videoEngine = VideoEngine::Create();
+
+    _videoEngine->SetTraceFilter(webrtc::kTraceDefault);//webrtc::kTraceDebug | webrtc::kTraceError | webrtc::kTraceApiCall | webrtc::kTraceWarning | webrtc::kTraceCritical | webrtc::kTraceStateInfo | webrtc::kTraceInfo | webrtc::kTraceStream);
+    _videoEngine->SetTraceFile("trace.txt");
+    
+    ViEBase* vieBase=ViEBase::GetInterface(_videoEngine);
+    result+=vieBase->Init();
+    if(result!=0)
+    {
+        ::MessageBox (NULL, (LPCTSTR)("failed to init VideoEngine"), TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);                
+    }
+    
+    {
+        WindowsTestMainDlg dlg(_videoEngine,_voiceEngine);
+
+	    m_pMainWnd = &dlg;
+	    int nResponse = dlg.DoModal();
+    }
+    
+    vieBase->Release();
+
+    if(!VideoEngine::Delete(_videoEngine))
+    {
+        char errorMsg[255];
+        sprintf(errorMsg,"All VideoEngine interfaces are not released properly!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+
+  #ifndef NO_VOICE_ENGINE
+    
+    _veBase->Terminate();
+    if(_veBase->Release()!=0)        
+    {
+        // ensure that no interface is still referenced
+        char errorMsg[256];
+        sprintf(errorMsg,"All VoiceEngine interfaces are not released properly!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+
+    if (false == VoiceEngine::Delete(_voiceEngine))
+    {
+        char errorMsg[256];
+        sprintf(errorMsg,"VoiceEngine::Delete() failed!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+   #endif
+
+	// Since the dialog has been closed, return FALSE so that we exit the
+	//  application, rather than start the application's message pump.
+	return FALSE;
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.h b/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.h
new file mode 100644
index 0000000..dc3ee9d
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTest.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
+
+
+#include "StdAfx.h"
+#include "resource.h"		// main symbols
+
+
+
+/////////////////////////////////////////////////////////////////////////////
+
+//Forward declarations
+namespace webrtc {
+    class VoiceEngine;
+    class VoEBase;
+    class VideoEngine;
+}
+using namespace webrtc;
+
+class CDXWindowsTestApp : public CWinApp
+{
+public:
+	CDXWindowsTestApp();
+
+// Overrides
+	// ClassWizard generated virtual function overrides
+	//{{AFX_VIRTUAL(CDXWindowsTestApp)
+	public:
+	virtual BOOL InitInstance();
+	//}}AFX_VIRTUAL
+
+// Implementation
+
+	//{{AFX_MSG(CDXWindowsTestApp)
+		// NOTE - the ClassWizard will add and remove member functions here.
+		//    DO NOT EDIT what you see in these blocks of generated code !
+	//}}AFX_MSG
+	DECLARE_MESSAGE_MAP()
+
+	VideoEngine*  _videoEngine;
+    VoiceEngine*  _voiceEngine;
+    VoEBase*       _veBase;
+};
+
+
+/////////////////////////////////////////////////////////////////////////////
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc
new file mode 100644
index 0000000..fcc490d
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// WindowsTestMainDlg.cpp : implementation file
+//
+#include "WindowsTestMainDlg.h"
+#include "WindowsTest.h"
+#include "ChannelDlg.h"
+
+#include "voe_base.h"
+
+// WindowsTestMainDlg dialog
+
+IMPLEMENT_DYNAMIC(WindowsTestMainDlg, CDialog)
+
+WindowsTestMainDlg::WindowsTestMainDlg(VideoEngine* videoEngine,void* voiceEngine,CWnd* pParent /*=NULL*/)
+	: CDialog(WindowsTestMainDlg::IDD, pParent),
+        _videoEngine(videoEngine),
+        _voiceEngine((VoiceEngine*) voiceEngine),
+        _testDlg1(NULL),
+        _testDlg2(NULL),
+        _testDlg3(NULL),
+        _testDlg4(NULL),    
+        _externalInWidth(0),   
+        _externalInHeight(0),    
+        _externalInVideoType(0),
+        _captureDevicePool(videoEngine)
+{
+    
+}
+
+WindowsTestMainDlg::~WindowsTestMainDlg()
+{        
+}
+
+void WindowsTestMainDlg::DoDataExchange(CDataExchange* pDX)
+{
+	CDialog::DoDataExchange(pDX);
+}
+
+
+BEGIN_MESSAGE_MAP(WindowsTestMainDlg, CDialog)
+        ON_BN_CLICKED(IDC_CHANNEL1, &WindowsTestMainDlg::OnBnClickedChannel1)
+        ON_BN_CLICKED(IDC_CHANNEL2, &WindowsTestMainDlg::OnBnClickedChannel2)
+        ON_BN_CLICKED(IDC_CHANNEL3, &WindowsTestMainDlg::OnBnClickedChannel3)
+        ON_BN_CLICKED(IDC_CHANNEL4, &WindowsTestMainDlg::OnBnClickedChannel4)
+END_MESSAGE_MAP()
+
+
+
+void WindowsTestMainDlg::OnBnClickedChannel1()
+{
+    if(!_testDlg1)
+    {
+        _testDlg1=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg1->Create(CDXChannelDlg::IDD,this);
+    }
+    else
+    {
+        _testDlg1->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::OnBnClickedChannel2()
+{
+    if(!_testDlg2)
+    {
+        _testDlg2=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg2->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg2->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::ChannelDialogEnded(CDXChannelDlg* context)
+{
+    if(context==_testDlg4)
+    {
+        delete _testDlg4;
+        _testDlg4=NULL;
+    }
+    else if(context==_testDlg3)
+    {
+        delete _testDlg3;
+        _testDlg3=NULL;
+    }
+    else if(context==_testDlg2)
+    {
+        delete _testDlg2;
+        _testDlg2=NULL;
+    }
+    else if(context==_testDlg1)
+    {
+        delete _testDlg1;
+        _testDlg1=NULL;
+    }
+    else // Slave channel
+    {
+        delete context;
+    }
+
+}
+
+
+
+void WindowsTestMainDlg::OnBnClickedChannel3()
+{
+    if(!_testDlg3)
+    {
+        _testDlg3=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg3->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg3->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::OnBnClickedChannel4()
+{
+    if(!_testDlg4)
+    {
+        _testDlg4=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg4->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg4->SetActiveWindow();
+    }                
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h
new file mode 100644
index 0000000..8aae99a
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+#include "StdAfx.h"
+#include "WindowsTestResource.h"
+
+#include "ChannelDlg.h"
+#include "CaptureDevicePool.h"
+#include "ChannelPool.h"
+
+//Forward declarations
+namespace webrtc {
+    class VideoEngine;
+    class VoiceEngine;
+}
+using namespace webrtc;
+class CDXCaptureDlg;
+
+
+class WindowsTestMainDlg : public CDialog, private CDXChannelDlgObserver
+{
+	DECLARE_DYNAMIC(WindowsTestMainDlg)
+
+public:
+	WindowsTestMainDlg(VideoEngine* videoEngine,void* voiceEngine=NULL,CWnd* pParent = NULL);   // standard constructor
+	virtual ~WindowsTestMainDlg();
+
+// Dialog Data
+	enum { IDD = IDD_WINDOWSTEST_MAIN };
+
+protected:
+	virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+	DECLARE_MESSAGE_MAP()
+public:
+     afx_msg void OnBnClickedChannel1();
+     afx_msg void OnBnClickedChannel2();
+     afx_msg void OnBnClickedChannel3();
+     afx_msg void OnBnClickedChannel4();
+
+
+     VideoEngine* _videoEngine;
+    VoiceEngine*		_voiceEngine;
+    VoEBase* _veBase;
+
+    CDXChannelDlg* _testDlg1;
+    CDXChannelDlg* _testDlg2;
+    CDXChannelDlg* _testDlg3;
+    CDXChannelDlg* _testDlg4;
+
+    int _externalInWidth;   
+    int _externalInHeight;
+    int _externalInVideoType;
+
+    CaptureDevicePool _captureDevicePool;
+    ChannelPool       _channelPool;
+
+
+private:
+    virtual void ChannelDialogEnded(CDXChannelDlg* context);
+
+public:
+
+};
diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc
new file mode 100644
index 0000000..5e866ad
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc
@@ -0,0 +1,101 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "WindowsTestResource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "WindowsTestResource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_WINDOWSTEST_MAIN DIALOGEX 0, 0, 186, 156

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Windows ViE Test"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,129,7,50,14

+    PUSHBUTTON      "Cancel",IDCANCEL,129,24,50,14

+    PUSHBUTTON      "Channel 1",IDC_CHANNEL1,129,45,50,14

+    PUSHBUTTON      "Channel 2",IDC_CHANNEL2,129,62,50,14

+    PUSHBUTTON      "Channel 3",IDC_CHANNEL3,129,79,50,14

+    PUSHBUTTON      "Channel 4",IDC_CHANNEL4,129,96,50,14

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_WINDOWSTEST_MAIN, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 179

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 149

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResource.h b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResource.h
new file mode 100644
index 0000000..2d49c28
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/WindowsTestResource.h
@@ -0,0 +1,28 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by WindowsTestResouce.rc
+//
+#define IDD_WINDOWSTEST_MAIN            101
+#define IDC_CHANNEL1                    1001
+#define IDC_CHANNEL2                    1002
+#define IDC_CHANNEL3                    1004
+#define IDC_CHANNEL4                    1005
+#define IDC_POSITION                    1009
+#define IDC_INFORMATION                 1050
+#define IDC_CHECK_CHANNEL1              1070
+#define IDC_CHECK_CHANNEL2              1071
+#define IDC_CHECK_CHANNEL3              1072
+#define IDC_CHECK_CHANNEL4              1073
+#define IDC_COMBO1                      1074
+#define IDC_BTN_CREATE2                 1076
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        106
+#define _APS_NEXT_COMMAND_VALUE         40001
+#define _APS_NEXT_CONTROL_VALUE         1076
+#define _APS_NEXT_SYMED_VALUE           107
+#endif
+#endif
diff --git a/trunk/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg b/trunk/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg
new file mode 100644
index 0000000..3bb3ba4
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/main/test/WindowsTest/renderStartImage.jpg b/trunk/src/video_engine/main/test/WindowsTest/renderStartImage.jpg
new file mode 100644
index 0000000..b10a842
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/renderStartImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg b/trunk/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg
new file mode 100644
index 0000000..cb34d67
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/main/test/WindowsTest/res/Capture.rc2 b/trunk/src/video_engine/main/test/WindowsTest/res/Capture.rc2
new file mode 100644
index 0000000..d9acfd2
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/res/Capture.rc2
@@ -0,0 +1,13 @@
+//

+// DXCAPTURE.RC2 - resources Microsoft Visual C++ does not edit directly

+//

+

+#ifdef APSTUDIO_INVOKED

+	#error this file is not editable by Microsoft Visual C++

+#endif //APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Add manually edited resources here...

+

+/////////////////////////////////////////////////////////////////////////////

diff --git a/trunk/src/video_engine/main/test/WindowsTest/resource.h b/trunk/src/video_engine/main/test/WindowsTest/resource.h
new file mode 100644
index 0000000..34e90eb
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/resource.h
@@ -0,0 +1,58 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by Capture.rc
+//
+#define IDM_ABOUTBOX                    0x0010
+#define IDD_DXQUALITY_DIALOG            102
+#define IDI_ICON1                       130
+#define IDD_SLAVE_CHANNEL               132
+#define IDC_LIVEVIDEO                   1000
+#define IDC_CAPTURE                     1001
+#define IDC_DEVICE                      1003
+#define IDC_STARTSEND                   1004
+#define IDC_STARTLISTEN                 1006
+#define IDC_STOPLISTEN                  1007
+#define IDC_STOPSend                    1008
+#define IDC_CODEC_LIST                  1010
+#define IDC_CODEC_SIZE                  1011
+#define IDC_IPADDRESS1                  1012
+#define IDC_LOCAL_PORT1                 1013
+#define IDC_CHANGE_SIZE                 1017
+#define IDC_QUALITY                     1018
+#define IDC_BITRATE                     1019
+#define IDC_WINDOW_SIZE                 1022
+#define IDC_REMOTE_PORT1                1025
+#define IDC_START_REC                   1030
+#define IDC_STOP_REC                    1031
+#define IDC_CAM_PROPERTY                1033
+#define IDC_ONMODE                      1034
+#define IDC_CAPTURECAP                  1038
+#define IDC_RADIO1                      1039
+#define IDC_MIN_FRAME_RATE              1040
+#define IDC_RTCPMODE                    1042
+#define IDC_TMMBR                       1043
+#define IDC_PACKETBURST                 1044
+#define IDC_PROT_NONE                   1045
+#define IDC_PROT_NACK                   1046
+#define IDC_PROT_FEC                    1047
+#define IDC_PROT_NACKFEC                1048
+#define IDC_INFORMATION                 1050
+#define IDC_PACKETLOSS                  1051
+#define IDC_FREEZELOG                   1052
+#define IDC_VERSION                     1053
+#define IDC_EXTTRANSPORT                1054
+#define IDC_DELAY                       1055
+#define IDC_BTN_RECORD_INCOMING         1056
+#define IDC_BTN_RECORD_OUTGOING         1057
+#define IDC_BTN_CREATE_SLAVE            1058
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        133
+#define _APS_NEXT_COMMAND_VALUE         32771
+#define _APS_NEXT_CONTROL_VALUE         1059
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
diff --git a/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc b/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc
new file mode 100644
index 0000000..f0910ee
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc
@@ -0,0 +1,317 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tbExternalTransport.cpp
+//
+
+#include "tbExternalTransport.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "vie_network.h"
+#include "tick_util.h"
+
+using namespace webrtc;
+
+TbExternalTransport::TbExternalTransport(ViENetwork& vieNetwork)
+    :
+    _vieNetwork(vieNetwork),
+    _thread(*ThreadWrapper::CreateThread(ViEExternalTransportRun, this, kHighPriority, "AutotestTransport")), 
+    _event(*EventWrapper::Create()),
+    _crit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _statCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _lossRate(0),
+    _networkDelayMs(0),
+    _rtpCount(0),
+    _dropCount(0),
+    _rtcpCount(0),
+    _rtpPackets(),
+    _rtcpPackets(),
+    _checkSSRC(false),
+    _lastSSRC(0),
+    _checkSequenceNumber(0),
+    _firstSequenceNumber(0),
+    _lastSeq(0)
+{
+    srand((int)TickTime::MicrosecondTimestamp());
+    unsigned int tId = 0;
+    _thread.Start(tId);
+}
+
+
+TbExternalTransport::~TbExternalTransport()
+{
+    // TODO: stop thread
+    _thread.SetNotAlive();
+    _event.Set();
+    if (_thread.Stop())
+    {
+        delete &_thread;
+        delete &_event;
+    }
+    delete &_crit;
+    delete &_statCrit;
+}
+
+
+
+
+    
+int TbExternalTransport::SendPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtpCount++;
+    _statCrit.Leave();
+
+
+    unsigned short sequenceNumber =  (((unsigned char*) data)[2]) << 8;
+    sequenceNumber +=  (((unsigned char*) data)[3]);
+
+            
+    int marker=((unsigned char*)data)[1] & 0x80;
+    unsigned int timestamp=((((unsigned char*)data)[4]) << 24) + ((((unsigned char*)data)[5])<<16) +((((unsigned char*)data)[6])<<8)+(((unsigned char*)data)[7]);
+
+    
+        // Packet loss
+    int dropThis = rand() % 100;
+    bool nacked=false;
+    if(sequenceNumber<_lastSeq)
+    {
+        nacked=true;
+    }
+    else
+    {
+        _lastSeq=sequenceNumber;
+    }
+
+    if (dropThis < _lossRate)
+    {
+        _statCrit.Enter();
+        _dropCount++;
+        _statCrit.Leave();
+
+      
+      /*  char str[256];
+        sprintf(str,"Dropping seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+        OutputDebugString(str);*/
+        
+        return len;
+    }
+    else
+    {
+        if(nacked)
+        {
+            /*char str[256];
+            sprintf(str,"Resending seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+            OutputDebugString(str);*/
+        }    
+        else
+        {
+            /*char str[256];
+            sprintf(str,"Sending seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+            OutputDebugString(str);*/
+         
+        }
+    }    
+    
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtpPackets.push(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+int TbExternalTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtcpCount++;
+    _statCrit.Leave();
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtcpPackets.push(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+WebRtc_Word32 TbExternalTransport::SetPacketLoss(WebRtc_Word32 lossRate)
+{
+    CriticalSectionScoped cs(_statCrit);
+    _lossRate = lossRate;
+    return 0;
+}
+
+void TbExternalTransport::SetNetworkDelay(WebRtc_Word64 delayMs)
+{
+    CriticalSectionScoped cs(_crit);
+    _networkDelayMs = delayMs;
+    return;
+}
+
+void TbExternalTransport::ClearStats()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _rtpCount = 0;
+    _dropCount = 0;
+    _rtcpCount = 0;
+    return;
+}
+
+void TbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets, WebRtc_Word32& numDroppedPackets, WebRtc_Word32& numRtcpPackets)
+{
+    CriticalSectionScoped cs(_statCrit);
+    numRtpPackets = _rtpCount;
+    numDroppedPackets = _dropCount;
+    numRtcpPackets = _rtcpCount;
+    return;
+}
+
+void TbExternalTransport::EnableSSRCCheck()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _checkSSRC = true;
+}
+unsigned int TbExternalTransport::ReceivedSSRC()
+{
+    CriticalSectionScoped cs(_statCrit);
+    return _lastSSRC;
+}
+
+void TbExternalTransport::EnableSequenceNumberCheck()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _checkSequenceNumber = true;
+}
+
+unsigned short TbExternalTransport::GetFirstSequenceNumber()
+{
+    CriticalSectionScoped cs(_statCrit);
+    return _firstSequenceNumber;
+}
+
+
+bool TbExternalTransport::ViEExternalTransportRun(void* object)
+{
+    return static_cast<TbExternalTransport*>(object)->ViEExternalTransportProcess();
+}
+bool TbExternalTransport::ViEExternalTransportProcess()
+{
+    unsigned int waitTime = KMaxWaitTimeMs;
+
+    VideoPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - NowMs();
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime &&
+                timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtpPackets.pop();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            {
+                CriticalSectionScoped cs(_statCrit);
+                if (_checkSSRC)
+                {
+                    _lastSSRC  = ((packet->packetBuffer[8]) << 24);
+                    _lastSSRC += (packet->packetBuffer[9] << 16);
+                    _lastSSRC += (packet->packetBuffer[10] << 8);
+                    _lastSSRC += packet->packetBuffer[11];
+                    _checkSSRC = false;
+                }
+                if (_checkSequenceNumber)
+                {
+                    _firstSequenceNumber = (unsigned char)packet->packetBuffer[2] << 8;
+                    _firstSequenceNumber += (unsigned char)packet->packetBuffer[3];
+                    _checkSequenceNumber = false;
+                }
+            }
+            /*
+            unsigned short sequenceNumber =  (unsigned char)packet->packetBuffer[2] << 8;
+            sequenceNumber +=  (unsigned char)packet->packetBuffer[3];
+            
+            int marker=packet->packetBuffer[1] & 0x80;
+            unsigned int timestamp=((((unsigned char*)packet->packetBuffer)[4]) << 24) + ((((unsigned char*)packet->packetBuffer)[5])<<16) +((((unsigned char*)packet->packetBuffer)[6])<<8)+(((unsigned char*)packet->packetBuffer)[7]);
+            char str[256];
+            sprintf(str,"Receiving seq %u length %d m %d, ts %u\n", sequenceNumber,packet->length,marker,timestamp) ;
+            OutputDebugString(str);*/
+
+            _vieNetwork.ReceivedRTPPacket(packet->channel, packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    while (!_rtcpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtcpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - NowMs();
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime &&
+                timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        packet = _rtcpPackets.front();
+        _rtcpPackets.pop();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            _vieNetwork.ReceivedRTCPPacket(packet->channel, packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    _event.Wait(waitTime + 1); // Add 1 ms to not call to early...
+    return true;
+}
+
+WebRtc_Word64 TbExternalTransport::NowMs()
+{
+    return TickTime::MillisecondTimestamp();
+}
diff --git a/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.h b/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.h
new file mode 100644
index 0000000..53226c6
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/tbExternalTransport.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tbExternalTransport.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
+
+#include "common_types.h"
+#include <queue>
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViENetwork;
+}
+
+class TbExternalTransport :  public webrtc::Transport
+{
+public:
+    TbExternalTransport(webrtc::ViENetwork& vieNetwork);
+    ~TbExternalTransport(void);
+
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+    WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate);  // Rate in %
+    void SetNetworkDelay(WebRtc_Word64 delayMs);
+
+    void ClearStats();
+    void GetStats(WebRtc_Word32& numRtpPackets, WebRtc_Word32& numDroppedPackets, WebRtc_Word32& numRtcpPackets);
+
+    void EnableSSRCCheck();
+    unsigned int ReceivedSSRC();
+
+    void EnableSequenceNumberCheck();
+    unsigned short GetFirstSequenceNumber();
+
+    
+protected:
+    static bool ViEExternalTransportRun(void* object);
+    bool ViEExternalTransportProcess();
+private:
+    WebRtc_Word64 NowMs();
+
+    enum { KMaxPacketSize = 1650};
+    enum { KMaxWaitTimeMs = 100};
+    typedef struct
+    {
+        WebRtc_Word8  packetBuffer[KMaxPacketSize];
+        WebRtc_Word32 length;
+        WebRtc_Word32 channel;
+        WebRtc_Word64 receiveTime;
+    } VideoPacket;
+
+    typedef std::queue<VideoPacket*>  VideoPacketQueue;
+
+
+    webrtc::ViENetwork&      _vieNetwork;
+    webrtc::ThreadWrapper&   _thread;
+    webrtc::EventWrapper&           _event;
+    webrtc::CriticalSectionWrapper& _crit;
+    webrtc::CriticalSectionWrapper& _statCrit;
+
+    WebRtc_Word32          _lossRate;
+    WebRtc_Word64          _networkDelayMs;
+    WebRtc_Word32          _rtpCount;
+    WebRtc_Word32          _rtcpCount;
+    WebRtc_Word32          _dropCount;
+
+    VideoPacketQueue     _rtpPackets;
+    VideoPacketQueue     _rtcpPackets;
+
+    bool                 _checkSSRC;
+    WebRtc_UWord32         _lastSSRC;
+    bool                 _checkSequenceNumber;
+    WebRtc_UWord16         _firstSequenceNumber;
+    WebRtc_Word32          _lastSeq;
+
+    //int& numberOfErrors;
+
+    //int _bits;
+    //int _lastTicks;            
+    //int _dropCnt;
+    //int _sentCount;
+    //int _frameCount;
+    //int _packetLoss;
+
+    //VideoEngine* _video;
+
+    //ReceiveBufferQueue _videoBufferQueue;
+    //ReceiveBufferQueue _rtcpBufferQueue;
+};
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
diff --git a/trunk/src/video_engine/main/test/WindowsTest/videosize.cc b/trunk/src/video_engine/main/test/WindowsTest/videosize.cc
new file mode 100644
index 0000000..a675ec5
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/videosize.cc
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "VideoSize.h"
+int GetWidthHeight( VideoSize size, int& width, int& height)
+{
+	switch(size)
+	{
+	case SQCIF:
+		width = 128;
+		height = 96;
+		return 0;
+	case QQVGA:
+		width = 160;
+		height = 120;
+		return 0;
+	case QCIF:
+		width = 176;
+		height = 144;
+		return 0;
+	case CGA:
+		width = 320;
+		height = 200;
+		return 0;
+	case QVGA:
+		width = 320;
+		height = 240;
+		return 0;
+	case SIF:
+		width = 352;
+		height = 240;
+		return 0;
+    case WQVGA:
+		width = 400;
+		height = 240;
+		return 0;
+	case CIF:
+		width = 352;
+		height = 288;
+		return 0;
+	case W288P:
+		width = 512;
+		height = 288;
+		return 0;
+    case W368P:
+        width = 640;
+        height = 368;
+        return 0;
+	case S_448P:
+		width = 576;
+		height = 448;
+		return 0;
+	case VGA:
+		width = 640;
+		height = 480;
+		return 0;
+	case S_432P:
+		width = 720;
+		height = 432;
+		return 0;
+	case W432P:
+		width = 768;
+		height = 432;
+		return 0;
+	case S_4SIF:
+		width = 704;
+		height = 480;
+		return 0;
+	case W448P:
+		width = 768;
+		height = 448;
+		return 0;
+	case NTSC:
+		width = 720;
+		height = 480;
+		return 0;
+    case FW448P:
+        width = 800;
+        height = 448;
+        return 0;
+    case S_768x480P:
+        width = 768;
+        height = 480;
+        return 0;
+    case WVGA:
+		width = 800;
+		height = 480;
+		return 0;
+	case S_4CIF:
+		width = 704;
+		height = 576;
+		return 0;
+	case SVGA:
+		width = 800;
+		height = 600; 
+		return 0;
+    case W544P:
+        width = 960;
+        height = 544;
+        return 0;
+	case W576P:
+		width = 1024;
+		height = 576;
+		return 0;
+	case HD:
+		width = 960;
+		height = 720;
+		return 0;
+	case XGA:
+		width = 1024;
+		height = 768;
+		return 0;
+	case FULL_HD:
+		width = 1440;
+		height = 1080;
+		return 0;	
+	case WHD:
+		width = 1280;
+		height = 720;
+		return 0;
+    case UXGA:
+        width = 1600;
+        height = 1200;
+        return 0;
+	case WFULL_HD:
+		width = 1920;
+		height = 1080;
+		return 0;
+	default:
+		return -1;
+	}
+	return -1;
+}
\ No newline at end of file
diff --git a/trunk/src/video_engine/main/test/WindowsTest/windowstest.gypi b/trunk/src/video_engine/main/test/WindowsTest/windowstest.gypi
new file mode 100644
index 0000000..0bebd98
--- /dev/null
+++ b/trunk/src/video_engine/main/test/WindowsTest/windowstest.gypi
@@ -0,0 +1,84 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'conditions': [
+    ['OS=="win"', {
+      'targets': [
+        # WinTest - GUI test for Windows
+        {
+          'target_name': 'vie_win_test',
+          'type': 'executable',
+          'dependencies': [
+            '<(webrtc_root)/modules/modules.gyp:video_render_module',
+            '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            ## VoiceEngine
+            '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+            ## VideoEngine
+            'video_engine_core',            
+          ],
+          'include_dirs': [
+            './interface',            
+            '../../../../', # common_types.h and typedefs.h
+            '../commonTestClasses/'
+          ],
+          'sources': [
+            'Capture.rc',
+            'captureDeviceImage.jpg',
+            'ChannelDlg.cc',
+            'ChannelDlg.h',
+            'ChannelPool.cc',
+            'ChannelPool.h',            
+            'renderStartImage.jpg',
+            'renderTimeoutImage.jpg',
+            'res\Capture.rc2',
+            'resource.h',
+            'StdAfx.h',
+            'videosize.cc',
+            'VideoSize.h',
+            'WindowsTest.cc',
+            'WindowsTest.h',
+            'WindowsTestMainDlg.cc',
+            'WindowsTestMainDlg.h',
+            'WindowsTestResouce.rc',
+            'WindowsTestResource.h',
+            'tbExternalTransport.cc',
+            'CaptureDevicePool.cc',
+            'tbExternalTransport.h',
+            'CaptureDevicePool.h',
+            
+          ],
+           'configurations': {
+            'Common_Base': {
+              'msvs_configuration_attributes': {
+                'conditions': [
+                  ['component=="shared_library"', {
+                    'UseOfMFC': '2',  # Shared DLL
+                  },{
+                    'UseOfMFC': '1',  # Static
+                  }],
+                ],
+              },
+            },
+          },
+          'msvs_settings': {
+            'VCLinkerTool': {
+              'SubSystem': '2',   # Windows
+            },
+          },
+        },
+      ],
+    }],
+  ],
+}
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/video_engine/main/test/android_test/.classpath b/trunk/src/video_engine/main/test/android_test/.classpath
new file mode 100644
index 0000000..f2adf55
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="lib" path="libs/VideoEngine_android_java.jar"/>

+	<classpathentry kind="lib" path="libs/VoiceEngine_android_java.jar"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/trunk/src/video_engine/main/test/android_test/Android.mk b/trunk/src/video_engine/main/test/android_test/Android.mk
new file mode 100644
index 0000000..3ee8627
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/Android.mk
@@ -0,0 +1,41 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+MY_CAPTURE_FOLDER := ../../../../modules/video_capture/main/source
+MY_CAPTURE_JAVA_FOLDER := Android/java/org/webrtc/videoengine
+MY_CAPTURE_PATH := $(MY_CAPTURE_FOLDER)/$(MY_CAPTURE_JAVA_FOLDER)
+
+MY_RENDER_FOLDER := ../../../../modules/video_render/main/source
+MY_RENDER_JAVA_FOLDER := Android/java/org/webrtc/videoengine
+MY_RENDER_PATH := $(MY_RENDER_FOLDER)/$(MY_RENDER_JAVA_FOLDER)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+    src/org/webrtc/videoengineapp/ViEAndroidDemo.java \
+    src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java \
+    src/org/webrtc/videoengineapp/IViEAndroidCallback.java \
+    $(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
+    $(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
+    $(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
+    $(MY_RENDER_PATH)/ViEAndroidGLES20.java \
+    $(MY_RENDER_PATH)/ViERenderer.java \
+    $(MY_RENDER_PATH)/ViESurfaceRenderer.java 
+
+LOCAL_PACKAGE_NAME := webrtc-video-demo
+LOCAL_CERTIFICATE := platform
+
+LOCAL_JNI_SHARED_LIBRARIES := libwebrtc-video-demo-jni
+
+include $(BUILD_PACKAGE)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/trunk/src/video_engine/main/test/android_test/AndroidManifest.xml b/trunk/src/video_engine/main/test/android_test/AndroidManifest.xml
new file mode 100644
index 0000000..0714aed
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/AndroidManifest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+      android:versionCode="1" package="org.webrtc.videoengineapp" android:versionName="1.07">
+    <application android:icon="@drawable/logo"
+		 android:label="@string/app_name"
+		 android:debuggable="true">
+        <activity android:name=".ViEAndroidDemo"
+                  android:label="@string/app_name" 
+                  android:configChanges="keyboardHidden|orientation"                  
+                  >
+                  <!--android:configChanges="keyboardHidden|orientation"  -->
+                  <!-- android:screenOrientation="landscape" -->
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+    <uses-sdk android:minSdkVersion="7" />
+    <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+    <uses-feature android:name="android.hardware.camera" />
+    <uses-feature android:name="android.hardware.camera.autofocus" />
+    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
+    <uses-permission android:name="android.permission.INTERNET" />
+    <uses-permission android:name="android.permission.WAKE_LOCK" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+</manifest> 
diff --git a/trunk/src/video_engine/main/test/android_test/default.properties b/trunk/src/video_engine/main/test/android_test/default.properties
new file mode 100644
index 0000000..c206f58
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/default.properties
@@ -0,0 +1,13 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Indicates whether an apk should be generated for each density.

+split.density=false

+# Project target.

+target=android-9

diff --git a/trunk/src/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java b/trunk/src/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java
new file mode 100644
index 0000000..c35047b
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java
@@ -0,0 +1,68 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.videoengineapp;

+

+public final class R {

+    public static final class array {

+        public static final int codecSize=0x7f040001;

+        public static final int codectype=0x7f040000;

+        public static final int voiceCodecType=0x7f040002;

+    }

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int bar=0x7f020000;

+        public static final int bg=0x7f020001;

+        public static final int logo=0x7f020002;

+        public static final int robot=0x7f020003;

+        public static final int video=0x7f020004;

+    }

+    public static final class id {

+        public static final int LinearLayout01=0x7f060010;

+        public static final int LinearLayout02=0x7f060006;

+        public static final int TextView01=0x7f060005;

+        public static final int TextView02=0x7f06000b;

+        public static final int TextView03=0x7f060004;

+        public static final int btStartBoth=0x7f060013;

+        public static final int btStartListen=0x7f060011;

+        public static final int btStartSend=0x7f060012;

+        public static final int cbLoopback=0x7f06000e;

+        public static final int cbVoice=0x7f06000d;

+        public static final int etRemoteIp=0x7f06000c;

+        public static final int ivPreview=0x7f060014;

+        public static final int ivTopBar=0x7f060002;

+        public static final int rlSurfaces=0x7f060000;

+        public static final int spCodecSize=0x7f06000a;

+        public static final int spCodecType=0x7f060007;

+        public static final int spVoiceCodecType=0x7f060008;

+        public static final int svLocal=0x7f060001;

+        public static final int tvCodecSize=0x7f060009;

+        public static final int tvLocalIp=0x7f06000f;

+        public static final int tvTitle=0x7f060003;

+    }

+    public static final class layout {

+        public static final int both=0x7f030000;

+        public static final int main=0x7f030001;

+        public static final int send=0x7f030002;

+    }

+    public static final class string {

+        public static final int app_name=0x7f050001;

+        public static final int codecSize=0x7f050007;

+        public static final int codecType=0x7f050006;

+        public static final int codectype_prompt=0x7f050004;

+        public static final int demoTitle=0x7f050005;

+        public static final int enableVoice=0x7f05000d;

+        public static final int error=0x7f050002;

+        public static final int errorCamera=0x7f050003;        

+        public static final int loopback=0x7f050009;

+        public static final int remoteIp=0x7f050008;

+        public static final int startBoth=0x7f05000c;

+        public static final int startListen=0x7f05000a;

+        public static final int startSend=0x7f05000b;

+    }

+}

diff --git a/trunk/src/video_engine/main/test/android_test/jni/Android.mk b/trunk/src/video_engine/main/test/android_test/jni/Android.mk
new file mode 100644
index 0000000..4121cc6
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/jni/Android.mk
@@ -0,0 +1,40 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+#

+#  Use of this source code is governed by a BSD-style license

+#  that can be found in the LICENSE file in the root of the source

+#  tree. An additional intellectual property rights grant can be found

+#  in the file PATENTS.  All contributing project authors may

+#  be found in the AUTHORS file in the root of the source tree.

+

+LOCAL_PATH := $(call my-dir)

+

+INTERFACES_PATH := $(LOCAL_PATH)/../../../../../../../build/interface

+LIBS_PATH := $(LOCAL_PATH)/../../../../../../../build/libraries

+

+include $(CLEAR_VARS)

+

+LOCAL_MODULE_TAGS := tests

+LOCAL_MODULE := libwebrtc-video-demo-jni

+LOCAL_CPP_EXTENSION := .cc

+LOCAL_SRC_FILES := vie_android_java_api.cc

+LOCAL_CFLAGS := \

+    '-DWEBRTC_TARGET_PC' \

+    '-DWEBRTC_ANDROID'

+

+LOCAL_C_INCLUDES := \

+    external/gtest/include \

+    $(LOCAL_PATH)/../../../../.. \

+    $(LOCAL_PATH)/../../../../include \

+    $(LOCAL_PATH)/../../../../../voice_engine/main/interface 

+

+LOCAL_PRELINK_MODULE := false

+

+LOCAL_SHARED_LIBRARIES := \

+    libutils \

+    libstlport \

+    libandroid \

+    libwebrtc \

+    libGLESv2

+LOCAL_LDLIBS := $(LIBS_PATH)/VideoEngine_android_gcc.a -llog -lgcc 

+

+include $(BUILD_SHARED_LIBRARY)

diff --git a/trunk/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h b/trunk/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h
new file mode 100644
index 0000000..37a9c43
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h
@@ -0,0 +1,494 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */
+
+#ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
+#define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    NativeInit
+ * Signature: (Landroid/content/Context;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit
+(JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetVideoEngine
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine
+(JNIEnv *, jobject);
+
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Init
+ * Signature: (IIIZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init
+(JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate
+(JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopSend
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartReceive
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopReceive
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    CreateChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver
+(JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendDestination
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
+(JNIEnv *, jobject, jint, jint, jbyteArray);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetReceiveCodec
+ * Signature: (IIIIIIZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec
+(JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendCodec
+ * Signature: (IIIIIIZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec
+(JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    AddRemoteRenderer
+ * Signature: (ILandroid/view/SurfaceView;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer
+(JNIEnv *, jobject, jint, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    RemoveRemoteRenderer
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartCamera
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera
+(JNIEnv *, jobject,jint channel,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopCamera
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera
+(JNIEnv *, jobject,jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetCameraOrientation
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetRotation
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
+(JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnableNACK
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK
+(JNIEnv *, jobject, jint, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnablePLI
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI
+(JNIEnv *, jobject, jint, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartSendNative
+ * Signature: (III[BIIIIIILandroid/view/SurfaceView;III)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSendNative
+(JNIEnv *, jobject, jint, jint, jint, jbyteArray, jint, jint,
+jint, jint, jint, jint, jobject, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartListenNative
+ * Signature: (III[BIIIIIIILandroid/view/SurfaceView;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartListenNative
+(JNIEnv *, jobject, jint, jint, jint, jbyteArray, jint, jint,
+jint, jint, jint, jint, jint, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopAllNative
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopAllNative
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetCallback
+ * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback
+(JNIEnv *, jobject, jint, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_Create
+ * Signature: (Landroid/app/Activity;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Create
+(JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_Delete
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Delete
+(JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_Init
+ * Signature: (IIIZZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Init
+(JNIEnv *, jobject, jint, jint, jint, jboolean, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Terminate
+(JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_CreateChannel
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_CreateChannel
+(JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_DeleteChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_DeleteChannel
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetLocalReceiver
+(JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetSendDestination
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSendDestination
+(JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StartListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartListen
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StartPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayout
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartSend
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StopListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopListen
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StopPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayout
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopSend
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetSpeakerVolume
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSpeakerVolume
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetLoudspeakerStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetLoudspeakerStatus
+(JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StartPlayingFileLocally
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayingFileLocally
+(JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StopPlayingFileLocally
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayingFileLocally
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StartPlayingFileAsMicrophone
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayingFileAsMicrophone
+(JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_StopPlayingFileAsMicrophone
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayingFileAsMicrophone
+(JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_NumOfCodecs
+(JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetSendCodec
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSendCodec
+(JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetECStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetECStatus
+(JNIEnv *, jobject, jboolean, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetNSStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetNSStatus
+(JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VE_SetAGCStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetAGCStatus
+(JNIEnv *, jobject, jboolean, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/trunk/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc b/trunk/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc
new file mode 100644
index 0000000..112667f
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc
@@ -0,0 +1,1563 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <android/log.h>
+
+#include "org_webrtc_videoengineapp_vie_android_java_api.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_file.h"
+#include "voe_network.h"
+#include "voe_audio_processing.h"
+#include "voe_volume_control.h"
+#include "voe_hardware.h"
+
+#include "vie_base.h"
+#include "vie_codec.h"
+#include "vie_capture.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#include "common_types.h"
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*"
+#define VALIDATE_BASE_POINTER                                           \
+  if (!veData.base)                                                     \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Base pointer doesn't exist");                  \
+    return -1;                                                          \
+  }
+#define VALIDATE_CODEC_POINTER                                          \
+  if (!veData.codec)                                                    \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Codec pointer doesn't exist");                 \
+    return -1;                                                          \
+  }
+#define VALIDATE_FILE_POINTER                                           \
+  if (!veData.file)                                                     \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "File pointer doesn't exist");                  \
+    return -1;                                                          \
+  }
+#define VALIDATE_APM_POINTER                                            \
+  if (!veData.codec)                                                    \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Apm pointer doesn't exist");                   \
+    return -1;                                                          \
+  }
+#define VALIDATE_HARDWARE_POINTER                                       \
+  if (!veData.hardware)                                                 \
+  {                                                                     \
+    __android_log_write(                                                \
+        ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,                              \
+        "Hardware pointer doesn't exist");                              \
+    return -1;                                                          \
+  }
+#define VALIDATE_VOLUME_POINTER                                         \
+  if (!veData.volume)                                                   \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Volume pointer doesn't exist");                \
+    return -1;                                                          \
+  }
+
+using namespace webrtc;
+
+//Forward declaration.
+class VideoCallbackAndroid;
+
+// VoiceEngine data struct
+typedef struct
+{
+  // VoiceEngine
+  VoiceEngine* ve;
+  // Sub-APIs
+  VoEBase* base;
+  VoECodec* codec;
+  VoEFile* file;
+  VoENetwork* netw;
+  VoEAudioProcessing* apm;
+  VoEVolumeControl* volume;
+  VoEHardware* hardware;
+  JavaVM* jvm;
+} VoiceEngineData;
+
+class AndroidVideoRenderCallback;
+// VideoEngine data struct
+typedef struct
+{
+  VideoEngine* vie;
+  ViEBase* base;
+  ViECodec* codec;
+  ViENetwork* netw;
+  ViERTP_RTCP* rtp;
+  ViERender* render;
+  ViECapture* capture;
+  VideoCallbackAndroid* callback;
+
+} VideoEngineData;
+
+// Global variables
+JavaVM* webrtcGlobalVM;
+
+// Global variables visible in this file
+static VoiceEngineData veData;
+static VideoEngineData vieData;
+
+// "Local" functions (i.e. not Java accessible)
+#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
+static bool VE_GetSubApis();
+static bool VE_ReleaseSubApis();
+
+#define CHECK_API_RETURN(ret)                                           \
+  if (ret!=0)                                                           \
+  {                                                                     \
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Return error %d",ret);                         \
+    break;                                                              \
+  }
+
+class VideoCallbackAndroid: public ViEDecoderObserver,
+                            public ViEEncoderObserver
+{
+
+    // Implements ViEDecoderObserver
+    virtual void IncomingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate)
+    {
+        // Let's print out the network statistics from this call back as well
+        unsigned short fraction_lost;
+        unsigned int dummy;
+        int intdummy;
+        _vieData.rtp->GetReceivedRTCPStatistics(videoChannel, fraction_lost,
+                                                dummy, dummy, dummy, intdummy);
+        unsigned short packetLossRate = 0;
+        if (fraction_lost > 0)
+        {
+            // Change from frac to %
+            packetLossRate = (fraction_lost * 100) >> 8;
+        }
+
+        JNIEnv* threadEnv = NULL;
+        int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL);
+        // Get the JNI env for this thread
+        if ((ret < 0) || !threadEnv)
+        {
+            __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "Could not attach thread to JVM (%d, %p)", ret,
+                                threadEnv);
+            return;
+        }
+        threadEnv->CallIntMethod(_callbackObj, _callbackId, framerate, bitrate,
+                                 packetLossRate, _frameRateO, _bitRateO);
+        webrtcGlobalVM->DetachCurrentThread();
+    }
+    ;
+
+    virtual void IncomingCodecChanged(const int videoChannel,
+                                      const webrtc::VideoCodec& videoCodec)
+    {
+    }
+    ;
+
+    virtual void RequestNewKeyFrame(const int videoChannel)
+    {
+    }
+    ;
+
+    virtual void OutgoingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate)
+    {
+        _frameRateO = framerate;
+        _bitRateO = bitrate;
+        //__android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+        // "SendRate frameRate %d bitrate %d\n",frameRate,bitrate);
+    }
+    ;
+
+public:
+  VideoEngineData& _vieData;
+  JNIEnv * _env;
+  jobject _callbackObj;
+  jclass _callbackCls;
+  jmethodID _callbackId;
+  int _frameRateO, _bitRateO;
+  VideoCallbackAndroid(VideoEngineData& vieData, JNIEnv * env,
+                       jobject callback) :
+      _vieData(vieData), _env(env), _callbackObj(callback),
+      _frameRateO(0), _bitRateO(0) {
+    _callbackCls = _env->GetObjectClass(_callbackObj);
+    _callbackId
+        = _env->GetMethodID(_callbackCls, "UpdateStats", "(IIIII)I");
+    if (_callbackId == NULL) {
+      __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to get jid");
+    }
+    _callbackObj = _env->NewGlobalRef(_callbackObj);
+  }
+};
+
+// JNI_OnLoad
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
+  webrtcGlobalVM = vm;
+  if (!webrtcGlobalVM)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  // Get JNI
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                           JNI_VERSION_1_4)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init VoiceEngine data
+  memset(&veData, 0, sizeof(veData));
+  // Store the JVM
+  veData.jvm = vm;
+
+  // Init VideoEngineData data
+  memset(&vieData, 0, sizeof(vieData));
+
+  return JNI_VERSION_1_4;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    NativeInit
+// Signature: (Landroid/content/Context;)Z
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit(
+    JNIEnv * env,
+    jobject,
+    jobject context)
+{
+  return true;
+}
+
+// VideoEngine API wrapper functions
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    GetVideoEngine
+// Signature: ()I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine(
+    JNIEnv *,
+    jobject context) {
+
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "GetVideoEngine");
+
+  VideoEngine::SetAndroidObjects(webrtcGlobalVM, context);
+
+  // Check if already got
+  if (vieData.vie) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "ViE already got");
+    return -1;
+  }
+
+  // Create
+  vieData.vie = VideoEngine::Create();
+  if (!vieData.vie) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, "Get ViE failed");
+    return -1;
+  }
+  vieData.base = ViEBase::GetInterface(vieData.vie);
+  if (!vieData.base) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get base sub-API failed");
+    return -1;
+  }
+
+  vieData.codec = ViECodec::GetInterface(vieData.vie);
+  if (!vieData.codec) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get codec sub-API failed");
+    return -1;
+  }
+
+  vieData.netw = ViENetwork::GetInterface(vieData.vie);
+  if (!vieData.netw) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get network sub-API failed");
+    return -1;
+  }
+
+  vieData.rtp = ViERTP_RTCP::GetInterface(vieData.vie);
+  if (!vieData.rtp) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get RTP sub-API failed");
+    return -1;
+  }
+
+  vieData.render = ViERender::GetInterface(vieData.vie);
+  if (!vieData.render) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get Render sub-API failed");
+    return -1;
+  }
+
+  vieData.capture = ViECapture::GetInterface(vieData.vie);
+  if (!vieData.capture) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get Capture sub-API failed");
+    return -1;
+  }
+
+  return 0;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    Init
+// Signature: (IIIZ)I
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init(
+    JNIEnv *,
+    jobject,
+    jboolean enableTrace)
+{
+    if (vieData.vie) {
+      __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Init");
+
+      int ret = vieData.base->Init();
+      __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "Init return %d", ret);
+        if (enableTrace)
+        {
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetTraceFile");
+            if (0 != vieData.vie->SetTraceFile(("/sdcard/trace.txt"), false))
+            {
+                __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                    "Video Engine could not enable trace");
+            }
+
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetTraceFilter");
+            if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceDefault))
+            {
+                __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                    "Could not set trace filter");
+            }
+        }
+        else
+        {
+            if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceNone))
+            {
+                __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                    "Could not set trace filter");
+            }
+        }
+        if (veData.ve) // VoiceEngine is enabled
+        {
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetVoiceEngine");
+            if (0 != vieData.base->SetVoiceEngine(veData.ve))
+            {
+                __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                    "SetVoiceEngine failed");
+            }
+        }
+        return ret;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate(
+    JNIEnv *,
+    jobject)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Terminate");
+
+  if (vieData.vie) {
+    if (!vieData.rtp || vieData.rtp->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release RTP sub-API");
+    }
+
+    if (!vieData.netw || vieData.netw->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Network sub-API");
+    }
+
+    if (!vieData.codec || vieData.codec->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Failed to release Codec sub-API");
+    }
+
+    if (!vieData.render || vieData.render->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Render sub-API");
+    }
+
+    if (!vieData.capture || vieData.capture->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Capture sub-API");
+    }
+
+    if (!vieData.base || vieData.base->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Base sub-API");
+    }
+
+    // Delete Vie
+    if (!VideoEngine::Delete(vieData.vie)) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to delete ViE ");
+      return -1;
+    }
+    memset(&vieData, 0, sizeof(vieData));
+    VideoEngine::SetAndroidObjects(NULL, NULL);
+    return 0;
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StartSend
+// Signature: (I)I
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
+
+  if (vieData.base) {
+    int ret = vieData.base->StartSend(channel);
+    return ret;
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StopRender
+// Signature: (I)I
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender(
+                                                                            JNIEnv *,
+                                                                            jobject,
+                                                                            jint channel)
+{
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopRender");
+
+    if (vieData.render) {
+        return vieData.render->StopRender(channel);
+    }
+    else {
+        return -1;
+    }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    Stop
+// Signature: ()I
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopSend");
+
+  if (vieData.base) {
+    return vieData.base->StopSend(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StartReceive
+// Signature: ()I
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartReceive");
+
+  if (vieData.base) {
+    return vieData.base->StartReceive(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StopReceive
+// Signature: ()I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopReceive");
+  if (vieData.base) {
+    return vieData.base->StopReceive(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    CreateChannel
+// Signature: ()I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel(
+    JNIEnv *,
+    jobject,
+    jint voiceChannel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "CreateChannel");
+
+  if (vieData.vie) {
+    int channel = 0;
+    if (vieData.base->CreateChannel(channel) != 0) {
+      return -1;
+    }
+    if (voiceChannel >= 0) {
+      vieData.base->ConnectAudioChannel(channel, voiceChannel);
+    }
+
+    return channel;
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetLocalReceiver
+// Signature: (II)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint port)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
+
+  if (vieData.vie) {
+    int ret = vieData.netw->SetLocalReceiver(channel, port);
+    return ret;
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetSendDestination
+// Signature: (II[B)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jint port,
+    jbyteArray ipadr)
+{
+
+  if (NULL == vieData.vie)
+    return -1;
+
+  char ip[64];
+  jsize len = env->GetArrayLength(ipadr);
+  if ((len >= 64) || (len == 0))
+    return -1;
+  env->GetByteArrayRegion(ipadr, 0, len, (jbyte*) ip);
+  ip[len] = '\0';
+
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "SetSendDestination: channel=%d, port=%d, ip=%s\n",
+                      channel, port, ip);
+
+  return vieData.netw->SetSendDestination(channel, ip, port);
+}
+
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetReceiveCodec
+// Signature: (IIIIII)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint codecNum,
+    jint intbitRate,
+    jint width,
+    jint height,
+    jint frameRate)
+{
+  if (NULL == vieData.codec)
+    return -1;
+
+  //Create codec
+  webrtc::VideoCodec codec;
+  vieData.codec->GetCodec(codecNum, codec);
+
+  __android_log_print(
+      ANDROID_LOG_DEBUG,
+      WEBRTC_LOG_TAG,
+      "SetReceiveCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
+      " width=%d, height=%d, frameRate=%d, codecSpecific=%d \n",
+      codec.plName, codec.plType, codec.startBitrate,
+      codec.maxBitrate, codec.width, codec.height,
+      codec.maxFramerate, codec.codecSpecific);
+  int ret = vieData.codec->SetReceiveCodec(channel, codec);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "SetReceiveCodec return %d", ret);
+  return ret;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetSendCodec
+// Signature: (IIIIII)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint codecNum,
+    jint intbitRate,
+    jint width,
+    jint height,
+    jint frameRate)
+{
+  if (NULL == vieData.codec)
+    return -1;
+
+  //Create codec
+  webrtc::VideoCodec codec;
+  vieData.codec->GetCodec(codecNum, codec);
+  codec.startBitrate = intbitRate;
+  codec.maxBitrate = 600;
+  codec.width = width;
+  codec.height = height;
+  codec.maxFramerate = frameRate;
+
+  for (int i = 0; i < vieData.codec->NumberOfCodecs(); ++i) {
+    webrtc::VideoCodec codecToList;
+    vieData.codec->GetCodec(i, codecToList);
+    __android_log_print(
+        ANDROID_LOG_DEBUG,
+        WEBRTC_LOG_TAG,
+        "Codec list %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
+        " width=%d, height=%d, frameRate=%d\n",
+        codecToList.plName, codecToList.plType,
+        codecToList.startBitrate, codecToList.maxBitrate,
+        codecToList.width, codecToList.height,
+        codecToList.maxFramerate);
+  }
+  __android_log_print(
+      ANDROID_LOG_DEBUG,
+      WEBRTC_LOG_TAG,
+      "SetSendCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d, "
+      "width=%d, height=%d, frameRate=%d\n",
+      codec.plName, codec.plType, codec.startBitrate,
+      codec.maxBitrate, codec.width, codec.height,
+      codec.maxFramerate);
+
+  return vieData.codec->SetSendCodec(channel, codec);
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    AddRemoteRenderer
+// Signature: (ILandroid/view/SurfaceView;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jobject glSurface)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "AddRemoteRenderer");
+  if (vieData.vie) {
+    return vieData.render->AddRenderer(channel, glSurface, 0, 0, 0, 1, 1);
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    RemoveRemoteRenderer
+// Signature: (I)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "RemoveRemoteRenderer");
+
+  if (vieData.vie) {
+    return vieData.render->RemoveRenderer(channel);
+  }
+  else {
+    return -1;
+  }
+  return 0;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StartRender
+// Signature: (I)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartRender");
+
+  if (vieData.render) {
+    return vieData.render->StartRender(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StarteCamera
+// Signature: (II)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jint cameraNum)
+{
+  if (NULL == vieData.vie)
+    return -1;
+
+  int i = 0;
+  char deviceName[64];
+  char deviceUniqueName[64];
+  int re;
+  do {
+      re = vieData.capture->GetCaptureDevice(i, deviceName,
+                                             sizeof(deviceName),
+                                             deviceUniqueName,
+                                             sizeof(deviceUniqueName));
+      __android_log_print(
+          ANDROID_LOG_DEBUG,
+          WEBRTC_LOG_TAG,
+          "GetCaptureDevice ret %d devicenum %d deviceUniqueName %s",
+          re, i, deviceUniqueName);
+      i++;
+  } while (re == 0);
+
+  int ret;
+  int cameraId;
+  vieData.capture->GetCaptureDevice(cameraNum, deviceName,
+                                    sizeof(deviceName), deviceUniqueName,
+                                    sizeof(deviceUniqueName));
+  vieData.capture->AllocateCaptureDevice(deviceUniqueName,
+                                         sizeof(deviceUniqueName), cameraId);
+
+  if (cameraId >= 0) { //Connect the
+    ret = vieData.capture->ConnectCaptureDevice(cameraId, channel);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "ConnectCaptureDevice ret %d ", ret);
+
+    ret = vieData.capture->StartCapture(cameraId);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "StartCapture ret %d ", ret);
+  }
+
+  return cameraId;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    StopCamera
+// Signature: ()I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera(
+    JNIEnv *,
+    jobject,
+    jint cameraId)
+{
+  if (NULL == vieData.capture)
+    return -1;
+
+  int ret = vieData.capture->StopCapture(cameraId);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "StopCapture  ret %d ", ret);
+  ret = vieData.capture->ReleaseCaptureDevice(cameraId);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "ReleaseCaptureDevice  ret %d ", ret);
+
+  return ret;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    GetCameraOrientation
+// Signature: (I)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation(
+    JNIEnv *,
+    jobject,
+    jint cameraNum)
+{
+  char deviceName[64];
+  char deviceUniqueName[64];
+  int ret;
+
+  ret = vieData.capture->GetCaptureDevice(cameraNum, deviceName,
+                                          sizeof(deviceName),
+                                          deviceUniqueName,
+                                          sizeof(deviceUniqueName));
+  if (ret != 0) {
+    return -1;
+  }
+
+  RotateCapturedFrame orientation;
+  ret = vieData.capture->GetOrientation(deviceUniqueName, orientation);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "GetOrientation  ret %d orientation %d", ret,
+                      orientation);
+
+  return (jint) orientation;
+
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetRotation
+// Signature: (II)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation(
+    JNIEnv *,
+    jobject,
+    jint captureId,
+    jint degrees)
+{
+
+  if (NULL == vieData.capture)
+    return -1;
+  RotateCapturedFrame rotation = RotateCapturedFrame_0;
+  if (degrees == 90)
+    rotation = RotateCapturedFrame_90;
+  else if (degrees == 180)
+    rotation = RotateCapturedFrame_180;
+  else if (degrees == 270)
+    rotation = RotateCapturedFrame_270;
+
+  int ret = vieData.capture->SetRotateCapturedFrames(captureId, rotation);
+  return ret;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    EnableNACK
+// Signature: (IZ)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK(
+                                                                JNIEnv *,
+                                                                jobject,
+                                                                jint channel,
+                                                                jboolean enable)
+{
+  if (NULL == vieData.rtp)
+    return -1;
+
+  if (enable)
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "EnableNACK enable");
+  else
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "EnableNACK disable");
+
+  int ret = vieData.rtp->SetNACKStatus(channel, enable);
+  return ret;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    EnablePLI
+// Signature: (IZ)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jboolean enable)
+{
+  if (NULL == vieData.rtp)
+    return -1;
+
+  if (enable)
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "EnablePLI enable");
+  else
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "EnablePLI disable");
+
+  int ret = vieData.rtp->SetKeyFrameRequestMethod(channel,
+                                                  kViEKeyFrameRequestPliRtcp);
+  return ret;
+}
+
+// Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+// Method:    SetCallback
+// Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jobject callback)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetCallback");
+
+  if (NULL == vieData.codec)
+    return -1;
+  if (vieData.callback == NULL) {
+    vieData.callback = new VideoCallbackAndroid(vieData, env, callback);
+  }
+  else if (vieData.codec) {
+    vieData.codec->DeregisterDecoderObserver(channel); // Wrong channel?
+    vieData.codec->DeregisterEncoderObserver(channel);
+  }
+
+  vieData.codec->RegisterDecoderObserver(channel, *vieData.callback);
+  vieData.codec->RegisterEncoderObserver(channel, *vieData.callback);
+
+  return 0;
+}
+
+//
+// VoiceEngine API wrapper functions
+//
+
+// Create VoiceEngine instance
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Create(
+    JNIEnv *env,
+    jobject,
+    jobject context)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create");
+
+  // Check if already created
+  if (veData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "VoE already created");
+    return false;
+  }
+
+  // Init Android Object
+  VoiceEngine::SetAndroidObjects(veData.jvm, env, context);
+  // Create
+  veData.ve = VoiceEngine::Create();
+  if (!veData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Create VoE failed");
+    return false;
+  }
+
+  // Get sub-APIs
+  if (!VE_GetSubApis()) {
+    // If not OK, release all sub-APIs and delete VoE
+    VE_ReleaseSubApis();
+    if (!VoiceEngine::Delete(veData.ve)) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Delete VoE failed");
+    }
+    return false;
+  }
+
+  return true;
+}
+
+// Delete VoiceEngine instance
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Delete(
+    JNIEnv *,
+    jobject)
+{
+  // Check if exists
+  if (!veData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "VoE does not exist");
+    return false;
+  }
+
+  // Release sub-APIs
+  VE_ReleaseSubApis();
+
+  // Delete
+  if (!VoiceEngine::Delete(veData.ve)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Delete VoE failed");
+    return false;
+  }
+
+  veData.ve = NULL;
+
+  // Clear instance independent Java objects
+  VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+  return true;
+}
+
+// [Base] Initialize VoiceEngine
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Init(
+    JNIEnv *,
+    jobject,
+    jboolean enableTrace,
+    jboolean useExtTrans)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "VE_Init");
+
+  VALIDATE_BASE_POINTER;
+
+  if (useExtTrans) {
+    // Not implemented
+    return -1;
+  }
+
+  return veData.base->Init();
+}
+
+// [Base] Terminate VoiceEngine
+JNIEXPORT jint
+JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_Terminate(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_BASE_POINTER;
+
+  jint retVal = veData.base->Terminate();
+  return retVal;
+}
+
+// [Base] Create channel
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_CreateChannel(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_BASE_POINTER;
+
+  webrtc::CodecInst voiceCodec;
+  int numOfVeCodecs = veData.codec->NumOfCodecs();
+
+  //enum all the supported codec
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "Supported Voice Codec:\n");
+  for (int i = 0; i < numOfVeCodecs; ++i) {
+    if (veData.codec->GetCodec(i, voiceCodec) != -1) {
+      __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                          "num: %d name: %s\n", i, voiceCodec.plname);
+    }
+  }
+
+  jint channel = veData.base->CreateChannel();
+
+  return channel;
+}
+
+// [Base] Delete channel
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_DeleteChannel(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return veData.base->DeleteChannel(channel);
+}
+
+// [Base] SetLocalReceiver
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetLocalReceiver(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint port)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
+  VALIDATE_BASE_POINTER;
+  return veData.base->SetLocalReceiver(channel, port);
+}
+
+// [Base] SetSendDestination
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSendDestination(
+    JNIEnv *env,
+    jobject,
+    jint channel,
+    jint port,
+    jstring ipaddr)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendDestination");
+  VALIDATE_BASE_POINTER;
+
+  const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL);
+  if (!ipaddrNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+  jint retVal = veData.base->SetSendDestination(channel, port, ipaddrNative);
+  env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
+  return retVal;
+}
+
+// [Base] StartListen
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartListen(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartListen");
+  VALIDATE_BASE_POINTER;
+  return veData.base->StartReceive(channel);
+}
+
+// [Base] Start playout
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayout(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartPlayout");
+  VALIDATE_BASE_POINTER;
+  return veData.base->StartPlayout(channel);
+}
+
+// [Base] Start send
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
+  VALIDATE_BASE_POINTER;
+  return veData.base->StartSend(channel);
+}
+
+// [Base] Stop listen
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopListen(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return veData.base->StartReceive(channel);
+}
+
+// [Base] Stop playout
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayout(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return veData.base->StopPlayout(channel);
+}
+
+// [Base] Stop send
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return veData.base->StopSend(channel);
+}
+
+// [codec] Number of codecs
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_NumOfCodecs(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_CODEC_POINTER;
+  return veData.codec->NumOfCodecs();
+}
+
+// [codec] Set send codec
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSendCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint index)
+{
+  VALIDATE_CODEC_POINTER;
+
+  webrtc::CodecInst codec;
+
+  for (int i = 0; i < veData.codec->NumOfCodecs(); ++i) {
+    webrtc::CodecInst codecToList;
+    veData.codec->GetCodec(i, codecToList);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "VE Codec list %s, pltype=%d\n",
+                        codecToList.plname, codecToList.pltype);
+  }
+
+  if (veData.codec->GetCodec(index, codec) != 0) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Failed to get codec");
+    return -1;
+  }
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendCodec %s\n",
+                      codec.plname);
+
+  return veData.codec->SetSendCodec(channel, codec);
+}
+
+// [audioprocessing] SetNSStatus
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetNSStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable,
+    jint mode)
+{
+  //TODO implement
+  return -1;
+}
+
+// [audioprocessing] SetAGCStatus
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetAGCStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable,
+    jint mode)
+{
+  //TODO implement
+  return -1;
+}
+
+// [audioprocessing] SetECStatus
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetECStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable,
+    jint mode,
+    jint AESmode,
+    jint AESattenuation)
+{
+  //TODO implement
+  return -1;
+}
+
+// [File] Start play file locally
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayingFileLocally(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jstring fileName,
+    jboolean loop)
+{
+  VALIDATE_FILE_POINTER;
+
+  const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+  if (!fileNameNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+
+  jint retVal = veData.file->StartPlayingFileLocally(channel,
+                                                     fileNameNative,
+                                                     loop);
+
+  env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+  return retVal;
+}
+
+// [File] Stop play file locally
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayingFileLocally(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_FILE_POINTER;
+  return veData.file->StopPlayingFileLocally(channel);
+}
+
+// [File] Start playing file as microphone
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StartPlayingFileAsMicrophone(
+    JNIEnv *env,
+    jobject,
+    jint channel,
+    jstring fileName,
+    jboolean loop)
+{
+  VALIDATE_FILE_POINTER;
+
+  const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+  if (!fileNameNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+
+  jint retVal = veData.file->StartPlayingFileAsMicrophone(channel,
+                                                          fileNameNative,
+                                                          loop);
+
+  env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+  return retVal;
+}
+
+// [File] Stop playing file as microphone
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_StopPlayingFileAsMicrophone(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_FILE_POINTER;
+  return veData.file->StopPlayingFileAsMicrophone(channel);
+}
+
+// [Volume] Set speaker volume
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetSpeakerVolume(
+    JNIEnv *,
+    jobject,
+    jint level)
+{
+  VALIDATE_VOLUME_POINTER;
+
+  if (veData.volume->SetSpeakerVolume(level) != 0) {
+    return -1;
+  }
+
+  unsigned int storedVolume = 0;
+  if (veData.volume->GetSpeakerVolume(storedVolume) != 0) {
+    return -1;
+  }
+
+  if (storedVolume != level) {
+    return -1;
+  }
+
+  return 0;
+}
+
+// [Hardware] Set speaker volume
+JNIEXPORT jint JNICALL
+Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VE_SetLoudspeakerStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable)
+{
+  VALIDATE_HARDWARE_POINTER;
+
+  if (veData.hardware->SetLoudspeakerStatus(enable) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+//
+// local function
+//
+
+// Get all sub-APIs
+bool VE_GetSubApis() {
+  bool getOK = true;
+
+  // Base
+  veData.base = VoEBase::GetInterface(veData.ve);
+  if (!veData.base) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get base sub-API failed");
+    getOK = false;
+  }
+
+  // Codec
+  veData.codec = VoECodec::GetInterface(veData.ve);
+  if (!veData.codec) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get codec sub-API failed");
+    getOK = false;
+  }
+
+  // File
+  veData.file = VoEFile::GetInterface(veData.ve);
+  if (!veData.file) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get file sub-API failed");
+    getOK = false;
+  }
+
+  // Network
+  veData.netw = VoENetwork::GetInterface(veData.ve);
+  if (!veData.netw) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get network sub-API failed");
+    getOK = false;
+  }
+
+  // audioprocessing
+  veData.apm = VoEAudioProcessing::GetInterface(veData.ve);
+  if (!veData.apm) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get VoEAudioProcessing sub-API failed");
+    getOK = false;
+  }
+
+  // Volume
+  veData.volume = VoEVolumeControl::GetInterface(veData.ve);
+  if (!veData.volume) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get volume sub-API failed");
+    getOK = false;
+  }
+
+  // Hardware
+  veData.hardware = VoEHardware::GetInterface(veData.ve);
+  if (!veData.hardware) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get hardware sub-API failed");
+    getOK = false;
+  }
+
+  return getOK;
+}
+
+// Release all sub-APIs
+bool VE_ReleaseSubApis() {
+  bool releaseOK = true;
+
+  // Base
+  if (veData.base) {
+    if (0 != veData.base->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release base sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.base = NULL;
+    }
+  }
+
+  // Codec
+  if (veData.codec) {
+    if (0 != veData.codec->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release codec sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.codec = NULL;
+    }
+  }
+
+  // File
+  if (veData.file) {
+    if (0 != veData.file->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release file sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.file = NULL;
+    }
+  }
+
+  // Network
+  if (veData.netw) {
+    if (0 != veData.netw->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release network sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.netw = NULL;
+    }
+  }
+
+  // apm
+  if (veData.apm) {
+    if (0 != veData.apm->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release apm sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.apm = NULL;
+    }
+  }
+
+  // Volume
+  if (veData.volume) {
+    if (0 != veData.volume->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release volume sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.volume = NULL;
+    }
+  }
+
+  // Hardware
+  if (veData.hardware) {
+    if (0 != veData.hardware->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release hardware sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      veData.hardware = NULL;
+    }
+  }
+
+  return releaseOK;
+}
diff --git a/trunk/src/video_engine/main/test/android_test/res/drawable/logo.png b/trunk/src/video_engine/main/test/android_test/res/drawable/logo.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/drawable/logo.png
Binary files differ
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/aconfig.xml b/trunk/src/video_engine/main/test/android_test/res/layout/aconfig.xml
new file mode 100644
index 0000000..e88b7c6
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/aconfig.xml
@@ -0,0 +1,81 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:orientation="vertical" 
+	xmlns:android="http://schemas.android.com/apk/res/android">
+	<TextView android:layout_width="wrap_content" 
+	android:layout_height="wrap_content"
+	android:textStyle="bold" 
+	android:textSize="24dip" 
+	android:text="Audio Settings"></TextView>
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+	<TextView android:id="@+id/TextView01"
+		  android:layout_height="wrap_content"
+		  android:layout_gravity="bottom"
+		  android:layout_width="wrap_content"
+		  android:text="@string/codecType">
+	</TextView>
+	<Spinner android:layout_height="wrap_content"
+		 android:layout_width="fill_parent"
+		 android:id="@+id/spVoiceCodecType">
+	</Spinner>
+		
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">	
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/aTxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etATxPort">
+	</EditText>
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/aRxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etARxPort" >
+	</EditText>		
+	</LinearLayout>		
+	
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbSpeaker"
+		  android:text="@string/speaker">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbAutoGainControl"
+		  android:text="@string/AutoGainControl">
+	</CheckBox>	
+	</LinearLayout>
+	
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbAECM"
+		  android:text="@string/AECM">
+	</CheckBox>
+	</LinearLayout>
+	
+	<LinearLayout android:id="@+id/LinearLayout03"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbNoiseSuppression"
+		  android:text="@string/NoiseSuppression">
+	</CheckBox>
+	</LinearLayout>	
+	
+</LinearLayout>
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/both.xml b/trunk/src/video_engine/main/test/android_test/res/layout/both.xml
new file mode 100644
index 0000000..d29d906
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/both.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout 
+    	xmlns:android="http://schemas.android.com/apk/res/android"    		    		
+    		android:orientation="horizontal"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent">
+          <LinearLayout 
+                android:orientation="vertical"
+                android:layout_width="120dip"
+                android:layout_height="fill_parent">
+           <LinearLayout android:id="@+id/llLocalView" 
+            	android:layout_width="fill_parent" 
+            	android:layout_height="80dip">
+            </LinearLayout>
+            <TextView
+                android:layout_width="fill_parent"
+                android:layout_height="fill_parent"
+                android:layout_weight="1"
+                android:text="" />
+            <Button android:id="@+id/btSwitchCamera"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:text="@string/frontCamera"
+                    android:layout_gravity="bottom"/>
+            <Button android:id="@+id/btStartStopCall"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:text="@string/startCall"
+                    android:layout_gravity="bottom"/>
+			<Button android:id="@+id/btExit"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:layout_gravity="bottom"
+                    android:text="@string/exit"/>
+        </LinearLayout>
+        <LinearLayout 
+            android:id="@+id/llRemoteView"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent"
+            android:layout_weight="1">
+        </LinearLayout>
+    </LinearLayout >   
+
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/main.xml b/trunk/src/video_engine/main/test/android_test/res/layout/main.xml
new file mode 100644
index 0000000..3b16cb7
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/main.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:layout_gravity="right"
+	android:orientation="vertical"
+	      xmlns:android="http://schemas.android.com/apk/res/android">
+
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/tvTitle"
+		  android:textStyle="bold"
+		  android:textSize="24dip"
+		  android:text="@string/gSettings">
+	</TextView>
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVideoReceive"
+		  android:text="@string/enableVideoReceive">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVideoSend"
+		  android:text="@string/enableVideoSend">
+	</CheckBox>
+	</LinearLayout>
+	
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVoice"
+		  android:text="@string/enableVoice">
+	</CheckBox>
+		
+	<TextView android:id="@+id/TextView02"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/remoteIp">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/etRemoteIp" >
+	</EditText>
+	
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbLoopback"
+		  android:text="@string/loopback">
+	</CheckBox>
+</LinearLayout>
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/send.xml b/trunk/src/video_engine/main/test/android_test/res/layout/send.xml
new file mode 100644
index 0000000..ee230f5
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/send.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+
+  <SurfaceView android:id="@+id/svLocal"
+	       android:layout_width="wrap_content"
+	       android:layout_height="wrap_content">
+  </SurfaceView>
+  <ImageView android:id="@+id/ivPreview"
+	     android:layout_height="fill_parent"
+	     android:layout_width="fill_parent">
+  </ImageView>
+
+</LinearLayout>
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/stats.xml b/trunk/src/video_engine/main/test/android_test/res/layout/stats.xml
new file mode 100644
index 0000000..1612ec3
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/stats.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:orientation="vertical" 
+	xmlns:android="http://schemas.android.com/apk/res/android">
+	<TextView android:layout_width="wrap_content" 
+	android:layout_height="wrap_content"
+	android:textStyle="bold" 
+	android:textSize="24dip" 
+	android:text="Stats">
+	</TextView>
+	
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+	
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvLocalIp"
+		  android:textStyle="bold">
+	</TextView>
+	
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+	
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvFrameRateI"
+		  android:textStyle="bold">
+	</TextView>
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvBitRateI"
+		  android:textStyle="bold">
+	</TextView>
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvPacketLoss"
+		  android:textStyle="bold">
+	</TextView>
+	
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+	
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvFrameRateO"
+		  android:textStyle="bold">
+	</TextView>
+	<TextView android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/tvBitRateO"
+		  android:textStyle="bold">
+	</TextView>
+</LinearLayout>
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/tabhost.xml b/trunk/src/video_engine/main/test/android_test/res/layout/tabhost.xml
new file mode 100644
index 0000000..9997113
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/tabhost.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<TabHost xmlns:android="http://schemas.android.com/apk/res/android"
+	 android:id="@android:id/tabhost"
+	 android:layout_width="fill_parent"
+	 android:layout_height="fill_parent">
+  <LinearLayout
+     android:orientation="vertical"
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent">
+    <TabWidget
+       android:id="@android:id/tabs"
+       android:layout_width="fill_parent"
+       android:layout_height="wrap_content" />
+    <FrameLayout
+       android:id="@android:id/tabcontent"
+       android:layout_width="fill_parent"
+       android:layout_height="fill_parent">            
+      <include android:id="@+id/tab_video" layout="@layout/both" />
+      <include android:id="@+id/tab_config" layout="@layout/main" />
+      <include android:id="@+id/tab_vconfig" layout="@layout/vconfig" />
+      <include android:id="@+id/tab_aconfig" layout="@layout/aconfig" />
+      <include android:id="@+id/tab_stats" layout="@layout/stats" />		  
+    </FrameLayout>
+  </LinearLayout>
+</TabHost>
diff --git a/trunk/src/video_engine/main/test/android_test/res/layout/vconfig.xml b/trunk/src/video_engine/main/test/android_test/res/layout/vconfig.xml
new file mode 100644
index 0000000..5270c10
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/layout/vconfig.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:orientation="vertical" 
+	xmlns:android="http://schemas.android.com/apk/res/android">
+	<TextView android:layout_width="wrap_content" 
+	android:layout_height="wrap_content"
+	android:textStyle="bold" 
+	android:textSize="24dip" 
+	android:text="@string/vSettings">
+	</TextView>
+	
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>	
+	
+	<TextView android:id="@+id/TextView01"
+		  android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:text="@string/codecType">
+	</TextView>
+	<Spinner android:layout_height="wrap_content"
+		 android:layout_width="fill_parent"
+		 android:id="@+id/spCodecType">
+	</Spinner>
+	
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/tvCodecSize"
+		  android:text="@string/codecSize">
+	</TextView>
+	<Spinner android:layout_height="wrap_content"
+		 android:layout_width="fill_parent"
+		 android:id="@+id/spCodecSize">
+	</Spinner>
+	
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/vTxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etVTxPort" >
+	</EditText>
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/vRxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etVRxPort" >
+	</EditText>
+	</LinearLayout>
+	
+	<LinearLayout android:id="@+id/LinearLayout03"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbNack"
+		  android:text="@string/nack">
+	</CheckBox>	
+	</LinearLayout>
+</LinearLayout>
diff --git a/trunk/src/video_engine/main/test/android_test/res/values/arrays.xml b/trunk/src/video_engine/main/test/android_test/res/values/arrays.xml
new file mode 100644
index 0000000..77dd017
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/values/arrays.xml
@@ -0,0 +1,22 @@
+<resources>
+
+    <string-array name="codectype">
+        <item>vp8</item>
+    </string-array>
+    
+<string-array name="codecSize"><item>176x144</item><item>320x240</item>
+<item>352x288</item><item>640x480</item>
+
+</string-array>
+
+<string-array name="voiceCodecType">
+<item>iPCM-wb</item>
+<item>iSAC</item>
+<item>iSAC-LC</item>
+<item>EG711U</item>
+<item>EG711A</item>
+<item>PCMU</item>
+<item>PCMA</item>
+<item>iLBC</item>
+</string-array>
+</resources>
diff --git a/trunk/src/video_engine/main/test/android_test/res/values/strings.xml b/trunk/src/video_engine/main/test/android_test/res/values/strings.xml
new file mode 100644
index 0000000..6597bec
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/res/values/strings.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>   
+    <string name="app_name">ViEDemo</string>
+    <string name="error">Error</string>
+    <string name="errorCamera">Camera Error</string>
+<string name="codectype_prompt">Choose a codec type</string>
+<string name="demoTitle">Video Engine Android Demo</string>
+<string name="codecType">Codec Type</string>
+<string name="codecSize">Codec Size</string>
+<string name="remoteIp">Remote IP address</string>
+<string name="loopback">Loopback</string>
+<string name="startListen">Start Listen</string>
+<string name="startSend">Start Send</string>
+<string name="startBoth">Start Both</string>
+<string name="enableVoice">Voice</string>
+<string name="enableVideoReceive">Video Receive</string>
+<string name="enableVideoSend">Video Send</string>
+<string name="gSettings">Global Settings</string>
+<string name="vSettings">Video Settings</string>
+<string name="vTxPort">Video Tx Port</string>
+<string name="vRxPort">Video Rx Port</string>
+<string name="aTxPort">Audio Tx Port</string>
+<string name="aRxPort">Audio Rx Port</string>
+<string name="AutoGainControl">Auto Gain Control</string>
+<string name="VoiceActivityDetection">Voice Activity Detection</string>
+<string name="AECM">Acoustic Echo Control</string>
+<string name="NoiseSuppression">Noise Suppression</string>
+<string name="nack">NACK</string>
+<string name="frontCamera">SwitchToFront</string>
+<string name="backCamera">SwitchToBack</string>
+<string name="startCall">StartCall</string>
+<string name="stopCall">StopCall</string>
+<string name="exit">Exit</string>
+<string name="speaker">Speaker</string>
+<string name="h263DecSw">H263 SW Decoder</string>
+</resources>
diff --git a/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
new file mode 100644
index 0000000..7a00d14
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+public interface IViEAndroidCallback {
+  public int UpdateStats(int frameRateI, int bitRateI,
+                         int packetLoss, int frameRateO,
+                         int bitRateO);
+}
diff --git a/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java
new file mode 100644
index 0000000..9301948
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java
@@ -0,0 +1,803 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+import org.webrtc.videoengine.ViERenderer;
+
+
+import android.app.TabActivity;
+import android.content.Context;
+import android.content.res.Configuration;
+import android.hardware.SensorManager;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.PowerManager;
+import android.os.PowerManager.WakeLock;
+
+import android.util.Log;
+import android.view.KeyEvent;
+import android.view.Surface;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.Display;
+import android.view.Window;
+import android.view.WindowManager;
+
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.CheckBox;
+
+import android.widget.EditText;
+import android.widget.LinearLayout;
+import android.widget.Spinner;
+import android.widget.TabHost;
+import android.widget.TextView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.TabHost.TabSpec;
+import android.view.OrientationEventListener;
+
+public class ViEAndroidDemo extends TabActivity implements IViEAndroidCallback,
+                                                View.OnClickListener,
+                                                OnItemSelectedListener {
+  private ViEAndroidJavaAPI ViEAndroidAPI = null;
+
+  // remote renderer
+  private SurfaceView remoteSurfaceView = null;
+
+  // local renderer and camera
+  private SurfaceView svLocal = null;
+
+  // channel number
+  private int channel;
+  private int cameraId;
+  private int voiceChannel = -1;
+
+  // flags
+  private boolean viERunning = false;
+  private boolean voERunning = false;
+
+  // debug
+  private boolean enableTrace = false;
+
+  // Constant
+  private static final String LOG_TAG = "*WEBRTCJ*";
+  private static final int RECEIVE_CODEC_FRAMERATE = 30;
+  private static final int SEND_CODEC_FRAMERATE = 15;
+  private static final int INIT_BITRATE = 400;
+
+  private static final int EXPIRARY_YEAR = 2010;
+  private static final int EXPIRARY_MONTH = 10;
+  private static final int EXPIRARY_DAY = 22;
+
+  private int volumeLevel = 204;
+
+  private TabHost mTabHost = null;
+
+  private TabSpec mTabSpecConfig;
+  private TabSpec mTabSpecVideo;
+
+  private LinearLayout mLlRemoteSurface = null;
+  private LinearLayout mLlLocalSurface = null;
+
+  private Button btStartStopCall;
+  private Button btSwitchCamera;
+
+  //Global Settings
+  private CheckBox cbVideoSend;
+  private boolean enableVideoSend = true;
+  private CheckBox cbVideoReceive;
+  private boolean enableVideoReceive = true;
+  private boolean enableVideo = true;
+  private CheckBox cbVoice;
+  private boolean enableVoice = false;
+  private EditText etRemoteIp;
+  private String remoteIp = "10.1.100.68";
+  private CheckBox cbLoopback;
+  private boolean loopbackMode = true;
+
+  //Video settings
+  private Spinner spCodecType;
+  private int codecType = 0;
+  private Spinner spCodecSize;
+  private int codecSizeWidth = 352;
+  private int codecSizeHeight = 288;
+  private TextView etVRxPort;
+  private int receivePortVideo = 11111;
+  private TextView etVTxPort;
+  private int destinationPortVideo = 11111;
+  private CheckBox cbEnableNack;
+  private boolean enableNack = false;
+
+  //Audio settings
+  private Spinner spVoiceCodecType;
+  private int voiceCodecType = 5; //PCMU = 5
+  private TextView etARxPort;
+  private int receivePortVoice = 11113;
+  private TextView etATxPort;
+  private int destinationPortVoice = 11113;
+  private CheckBox cbEnableSpeaker;
+  private boolean enableSpeaker = false;
+  private CheckBox cbEnableAGC;
+  private boolean enableAGC = false;
+  private CheckBox cbEnableAECM;
+  private boolean enableAECM = false;
+  private CheckBox cbEnableNS;
+  private boolean enableNS = false;
+
+  //Stats
+  private TextView tvFrameRateI;
+  private TextView tvBitRateI;
+  private TextView tvPacketLoss;
+  private TextView tvFrameRateO;
+  private TextView tvBitRateO;
+  private int frameRateI;
+  private int bitRateI;
+  private int packetLoss;
+  private int frameRateO;
+  private int bitRateO;
+
+  private WakeLock wakeLock;
+
+  private boolean usingFrontCamera = false;
+
+  private OrientationEventListener orientationListener;
+  int currentOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
+  int currentCameraOrientation = 0;
+
+  //Convert current display orientation to how much the camera should be rotated.
+  public int GetCameraOrientation(int cameraOrientation) {
+    Display display = this.getWindowManager().getDefaultDisplay();
+    int displatyRotation = display.getRotation();
+    int degrees = 0;
+    switch (displatyRotation) {
+      case Surface.ROTATION_0: degrees = 0; break;
+      case Surface.ROTATION_90: degrees = 90; break;
+      case Surface.ROTATION_180: degrees = 180; break;
+      case Surface.ROTATION_270: degrees = 270; break;
+    }
+    int result=0;
+    if(cameraOrientation>180) {
+      result=(cameraOrientation + degrees) % 360;
+    }
+    else {
+      result=(cameraOrientation - degrees+360) % 360;
+    }
+
+    return result;
+  }
+
+  public void onConfigurationChanged(Configuration newConfig) {
+    super.onConfigurationChanged(newConfig);
+    int newRotation = GetCameraOrientation(currentCameraOrientation);
+    if (viERunning){
+      ViEAndroidAPI.SetRotation(cameraId,newRotation);
+    }
+  }
+
+  // Called when the activity is first created.
+  @Override
+  public void onCreate(Bundle savedInstanceState) {
+    super.onCreate(savedInstanceState);
+    requestWindowFeature(Window.FEATURE_NO_TITLE);
+    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+                         WindowManager.LayoutParams.FLAG_FULLSCREEN);
+
+    PowerManager pm = (PowerManager)this.getSystemService(
+        Context.POWER_SERVICE);
+    wakeLock = pm.newWakeLock(
+        PowerManager.SCREEN_DIM_WAKE_LOCK, LOG_TAG);
+
+    setContentView(R.layout.tabhost);
+    mTabHost = getTabHost();
+
+    //Video tab
+    mTabSpecVideo = mTabHost.newTabSpec("tab_video");
+    mTabSpecVideo.setIndicator("Video");
+    mTabSpecVideo.setContent(R.id.tab_video);
+    mTabHost.addTab(mTabSpecVideo);
+
+    //Shared config tab
+    mTabHost = getTabHost();
+    mTabSpecConfig = mTabHost.newTabSpec("tab_config");
+    mTabSpecConfig.setIndicator("Config");
+    mTabSpecConfig.setContent(R.id.tab_config);
+    mTabHost.addTab(mTabSpecConfig);
+
+    TabSpec mTabv;
+    mTabv = mTabHost.newTabSpec("tab_vconfig");
+    mTabv.setIndicator("V. Config");
+    mTabv.setContent(R.id.tab_vconfig);
+    mTabHost.addTab(mTabv);
+    TabSpec mTaba;
+    mTaba = mTabHost.newTabSpec("tab_aconfig");
+    mTaba.setIndicator("A. Config");
+    mTaba.setContent(R.id.tab_aconfig);
+    mTabHost.addTab(mTaba);
+    TabSpec mTabs;
+    mTabs = mTabHost.newTabSpec("tab_stats");
+    mTabs.setIndicator("Stats");
+    mTabs.setContent(R.id.tab_stats);
+    mTabHost.addTab(mTabs);
+
+    int childCount = mTabHost.getTabWidget().getChildCount();
+    for (int i=0; i<childCount; i++)
+      mTabHost.getTabWidget().getChildAt(i).getLayoutParams().height = 50;
+
+    orientationListener =
+        new OrientationEventListener(this,SensorManager.SENSOR_DELAY_UI) {
+        public void onOrientationChanged (int orientation) {
+          if (orientation != ORIENTATION_UNKNOWN) {
+            currentOrientation = orientation;
+          }
+        }
+      };
+    orientationListener.enable ();
+
+    StartMain();
+    return;
+  }
+
+  private String GetLocalIpAddress() {
+    String localIPs = "";
+    try {
+      for (Enumeration<NetworkInterface> en = NetworkInterface
+               .getNetworkInterfaces(); en.hasMoreElements();) {
+        NetworkInterface intf = en.nextElement();
+        for (Enumeration<InetAddress> enumIpAddr = intf
+                 .getInetAddresses(); enumIpAddr.hasMoreElements();) {
+          InetAddress inetAddress = enumIpAddr.nextElement();
+          if (!inetAddress.isLoopbackAddress()) {
+            localIPs += inetAddress.getHostAddress().toString() + " ";
+            //set the remote ip address the same as
+            // the local ip address of the last netif
+            remoteIp = inetAddress.getHostAddress().toString();
+          }
+        }
+      }
+    } catch (SocketException ex) {
+      Log.e(LOG_TAG, ex.toString());
+    }
+    return localIPs;
+  }
+
+  @Override
+  public boolean onKeyDown(int keyCode, KeyEvent event) {
+    if (keyCode == KeyEvent.KEYCODE_BACK) {
+      if (viERunning) {
+        StopAll();
+        StartMain();
+      }
+      finish();
+      return true;
+    }
+    return super.onKeyDown(keyCode, event);
+  }
+
+  private void StopAll() {
+    if (ViEAndroidAPI != null) {
+      if (voERunning) {
+        voERunning = false;
+        StopVoiceEngine();
+      }
+
+      if (viERunning) {
+        viERunning = false;
+        ViEAndroidAPI.StopRender(channel);
+        ViEAndroidAPI.StopReceive(channel);
+        ViEAndroidAPI.StopSend(channel);
+        ViEAndroidAPI.RemoveRemoteRenderer(channel);
+        // stop the camera
+        ViEAndroidAPI.StopCamera(cameraId);
+        ViEAndroidAPI.Terminate();
+        mLlRemoteSurface.removeView(remoteSurfaceView);
+        mLlLocalSurface.removeView(svLocal);
+        remoteSurfaceView = null;
+
+        svLocal = null;
+      }
+    }
+  }
+
+  private void StartMain() {
+    mTabHost.setCurrentTab(0);
+
+    mLlRemoteSurface = (LinearLayout) findViewById(R.id.llRemoteView);
+    mLlLocalSurface = (LinearLayout) findViewById(R.id.llLocalView);
+
+    if (null == ViEAndroidAPI)
+      ViEAndroidAPI = new ViEAndroidJavaAPI(this);
+
+    //setContentView(R.layout.main);
+    btSwitchCamera = (Button)findViewById(R.id.btSwitchCamera);
+    btSwitchCamera.setOnClickListener(this);
+    btStartStopCall = (Button)findViewById(R.id.btStartStopCall);
+    btStartStopCall.setOnClickListener(this);
+    findViewById(R.id.btExit).setOnClickListener(this);
+
+    // cleaning
+    remoteSurfaceView = null;
+    svLocal = null;
+
+    // init UI
+    ArrayAdapter<?> adapter;
+
+    int resource = android.R.layout.simple_spinner_item;
+    int dropdownRes = android.R.layout.simple_spinner_dropdown_item;
+
+    // video codec
+    spCodecType = (Spinner) findViewById(R.id.spCodecType);
+    adapter = ArrayAdapter.createFromResource(this,
+                                              R.array.codectype,
+                                              resource);
+    adapter.setDropDownViewResource(dropdownRes);
+    spCodecType.setAdapter(adapter);
+    spCodecType.setSelection(codecType);
+    spCodecType.setOnItemSelectedListener(this);
+
+    // voice codec
+    spVoiceCodecType = (Spinner) findViewById(R.id.spVoiceCodecType);
+    adapter = ArrayAdapter.createFromResource(this, R.array.voiceCodecType,
+                                              resource);
+    adapter.setDropDownViewResource(dropdownRes);
+    spVoiceCodecType.setAdapter(adapter);
+    spVoiceCodecType.setSelection(voiceCodecType);
+    spVoiceCodecType.setOnItemSelectedListener(this);
+
+    spCodecSize = (Spinner) findViewById(R.id.spCodecSize);
+    adapter = ArrayAdapter.createFromResource(this, R.array.codecSize,
+                                              resource);
+    adapter.setDropDownViewResource(dropdownRes);
+    spCodecSize.setAdapter(adapter);
+    spCodecSize.setOnItemSelectedListener(this);
+
+    String ip = GetLocalIpAddress();
+    TextView tvLocalIp = (TextView) findViewById(R.id.tvLocalIp);
+    tvLocalIp.setText("Local IP address - " + ip);
+
+    etRemoteIp = (EditText) findViewById(R.id.etRemoteIp);
+    etRemoteIp.setText(remoteIp);
+
+    cbLoopback = (CheckBox) findViewById(R.id.cbLoopback);
+    cbLoopback.setChecked(loopbackMode);
+
+    cbVoice = (CheckBox) findViewById(R.id.cbVoice);
+    cbVoice.setChecked(enableVoice);
+
+    cbVideoSend = (CheckBox) findViewById(R.id.cbVideoSend);
+    cbVideoSend.setChecked(enableVideoSend);
+    cbVideoReceive = (CheckBox) findViewById(R.id.cbVideoReceive);
+    cbVideoReceive.setChecked(enableVideoReceive);
+
+    etVTxPort = (EditText) findViewById(R.id.etVTxPort);
+    etVTxPort.setText(Integer.toString(destinationPortVideo));
+
+    etVRxPort = (EditText) findViewById(R.id.etVRxPort);
+    etVRxPort.setText(Integer.toString(receivePortVideo));
+
+    etATxPort = (EditText) findViewById(R.id.etATxPort);
+    etATxPort.setText(Integer.toString(destinationPortVoice));
+
+    etARxPort = (EditText) findViewById(R.id.etARxPort);
+    etARxPort.setText(Integer.toString(receivePortVoice));
+
+    cbEnableNack = (CheckBox) findViewById(R.id.cbNack);
+    cbEnableNack.setChecked(enableNack);
+
+    cbEnableSpeaker = (CheckBox) findViewById(R.id.cbSpeaker);
+    cbEnableSpeaker.setChecked(enableSpeaker);
+    cbEnableAGC = (CheckBox) findViewById(R.id.cbAutoGainControl);
+    cbEnableAGC.setChecked(enableAGC);
+    cbEnableAECM = (CheckBox) findViewById(R.id.cbAECM);
+    cbEnableAECM.setChecked(enableAECM);
+    cbEnableNS = (CheckBox) findViewById(R.id.cbNoiseSuppression);
+    cbEnableNS.setChecked(enableNS);
+
+    cbEnableNack.setOnClickListener(this);
+    cbEnableSpeaker.setOnClickListener(this);
+    cbEnableAECM.setOnClickListener(this);
+
+    cbEnableAGC.setOnClickListener(this);
+    cbEnableNS.setOnClickListener(this);
+
+    tvFrameRateI = (TextView) findViewById(R.id.tvFrameRateI);
+    tvBitRateI = (TextView) findViewById(R.id.tvBitRateI);
+    tvPacketLoss = (TextView) findViewById(R.id.tvPacketLoss);
+    tvFrameRateO = (TextView) findViewById(R.id.tvFrameRateO);
+    tvBitRateO = (TextView) findViewById(R.id.tvBitRateO);
+
+  }
+
+  @Override
+  protected void onPause() {
+    super.onPause();
+    // if (remoteSurfaceView != null)
+    // glSurfaceView.onPause();
+  }
+
+  @Override
+  protected void onResume() {
+    super.onResume();
+    // if (glSurfaceView != null)
+    // glSurfaceView.onResume();
+  }
+
+  private void StartCall() {
+    int ret = 0;
+
+    if (enableVoice) {
+      SetupVoE();
+      StartVoiceEngine();
+    }
+
+    if (enableVideo) {
+      if (enableVideoSend) {
+        // camera and preview surface
+        svLocal = ViERenderer.CreateLocalRenderer(this);
+      }
+
+      ret = ViEAndroidAPI.GetVideoEngine();
+      ret = ViEAndroidAPI.Init(enableTrace);
+      channel = ViEAndroidAPI.CreateChannel(voiceChannel);
+      ret = ViEAndroidAPI.SetLocalReceiver(channel,
+                                           receivePortVideo);
+      ret = ViEAndroidAPI.SetSendDestination(channel,
+                                             destinationPortVideo,
+                                             remoteIp.getBytes());
+
+      if (enableVideoReceive) {
+        if(android.os.Build.MANUFACTURER.equals("samsung")) {
+          // Create an Open GL renderer
+          remoteSurfaceView = ViERenderer.CreateRenderer(this, true);
+          ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
+        }
+        else {
+          remoteSurfaceView = ViERenderer.CreateRenderer(this, false);
+          ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
+        }
+
+        ret = ViEAndroidAPI.SetReceiveCodec(channel,
+                                            codecType,
+                                            INIT_BITRATE,
+                                            codecSizeWidth,
+                                            codecSizeHeight,
+                                            RECEIVE_CODEC_FRAMERATE);
+        ret = ViEAndroidAPI.StartRender(channel);
+        ret = ViEAndroidAPI.StartReceive(channel);
+      }
+
+      if (enableVideoSend) {
+        currentCameraOrientation =
+            ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0);
+        ret = ViEAndroidAPI.SetSendCodec(channel,
+                                         codecType,
+                                         INIT_BITRATE,
+                                         codecSizeWidth,
+                                         codecSizeHeight,
+                                         SEND_CODEC_FRAMERATE);
+        int cameraId = ViEAndroidAPI.StartCamera(channel, usingFrontCamera?1:0);
+
+        if(cameraId>0) {
+          cameraId = cameraId;
+          int neededRotation = GetCameraOrientation(currentCameraOrientation);
+          ViEAndroidAPI.SetRotation(cameraId,neededRotation);
+        }
+        else {
+          ret=cameraId;
+        }
+        ret = ViEAndroidAPI.StartSend(channel);
+      }
+
+      ret = ViEAndroidAPI.SetCallback(channel, this);
+
+      if (enableVideoSend) {
+        if (mLlLocalSurface != null)
+          mLlLocalSurface.addView(svLocal);
+      }
+
+      if (enableVideoReceive) {
+        if (mLlRemoteSurface != null)
+          mLlRemoteSurface.addView(remoteSurfaceView);
+      }
+
+      viERunning = true;
+    }
+
+  }
+
+  private void DemoLog(String msg) {
+    Log.d("*WEBRTC*", msg);
+  }
+
+  private void StopVoiceEngine() {
+    // Stop send
+    if (0 != ViEAndroidAPI.VoE_StopSend(voiceChannel)) {
+      DemoLog("VoE stop send failed");
+    }
+
+    // Stop listen
+    if (0 != ViEAndroidAPI.VoE_StopListen(voiceChannel)) {
+      DemoLog("VoE stop listen failed");
+    }
+
+    // Stop playout
+    if (0 != ViEAndroidAPI.VoE_StopPlayout(voiceChannel)) {
+      DemoLog("VoE stop playout failed");
+    }
+
+    if (0 != ViEAndroidAPI.VoE_DeleteChannel(voiceChannel)) {
+      DemoLog("VoE delete channel failed");
+    }
+    voiceChannel=-1;
+
+    // Terminate
+    if (0 != ViEAndroidAPI.VoE_Terminate()) {
+      DemoLog("VoE terminate failed");
+    }
+  }
+
+  private void SetupVoE() {
+    // Create VoiceEngine
+    // Error logging is done in native API wrapper
+    ViEAndroidAPI.VoE_Create(this);
+
+    // Initialize
+    if (0 != ViEAndroidAPI.VoE_Init(enableTrace)) {
+      DemoLog("VoE init failed");
+    }
+
+    // Create channel
+    voiceChannel = ViEAndroidAPI.VoE_CreateChannel();
+    if (0 != voiceChannel) {
+      DemoLog("VoE create channel failed");
+    }
+
+    // Suggest to use the voice call audio stream for hardware volume controls
+    setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+  }
+
+  private int StartVoiceEngine() {
+    // Set local receiver
+    if (0 != ViEAndroidAPI.VoE_SetLocalReceiver(voiceChannel,
+                                                receivePortVoice)) {
+      DemoLog("VoE set local receiver failed");
+    }
+
+    if (0 != ViEAndroidAPI.VoE_StartListen(voiceChannel)) {
+      DemoLog("VoE start listen failed");
+    }
+
+    // Route audio
+    RouteAudio(enableSpeaker);
+
+    // set volume to default value
+    if (0 != ViEAndroidAPI.VoE_SetSpeakerVolume(volumeLevel)) {
+      DemoLog("VoE set speaker volume failed");
+    }
+
+    // Start playout
+    if (0 != ViEAndroidAPI.VoE_StartPlayout(voiceChannel)) {
+      DemoLog("VoE start playout failed");
+    }
+
+    if (0 != ViEAndroidAPI.VoE_SetSendDestination(voiceChannel,
+                                                  destinationPortVoice,
+                                                  remoteIp)) {
+      DemoLog("VoE set send  destination failed");
+    }
+
+    // 0 = iPCM-wb, 5 = PCMU
+    if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
+      DemoLog("VoE set send codec failed");
+    }
+
+    if (0 != ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28)){
+      DemoLog("VoE set EC Status failed");
+    }
+
+    if (0 != ViEAndroidAPI.VoE_StartSend(voiceChannel)) {
+      DemoLog("VoE start send failed");
+    }
+
+    voERunning = true;
+    return 0;
+  }
+
+  private void RouteAudio(boolean enableSpeaker) {
+    int sdkVersion = Integer.parseInt(android.os.Build.VERSION.SDK);
+    if (sdkVersion >= 5) {
+      AudioManager am =
+          (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
+      am.setSpeakerphoneOn(enableSpeaker);
+    }
+    else {
+      if (0 != ViEAndroidAPI.VoE_SetLoudspeakerStatus(enableSpeaker)) {
+        DemoLog("VoE set louspeaker status failed");
+      }
+    }
+  }
+
+  public void onClick(View arg0) {
+    switch (arg0.getId()) {
+      case R.id.btSwitchCamera:
+        if (usingFrontCamera ){
+          btSwitchCamera.setText(R.string.frontCamera);
+        }
+        else {
+          btSwitchCamera.setText(R.string.backCamera);
+        }
+        usingFrontCamera = !usingFrontCamera;
+
+        if (viERunning) {
+          currentCameraOrientation =
+              ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0);
+          ViEAndroidAPI.StopCamera(cameraId);
+          mLlLocalSurface.removeView(svLocal);
+
+          ViEAndroidAPI.StartCamera(channel,usingFrontCamera?1:0);
+          mLlLocalSurface.addView(svLocal);
+          int neededRotation = GetCameraOrientation(currentCameraOrientation);
+          ViEAndroidAPI.SetRotation(cameraId, neededRotation);
+        }
+        break;
+      case R.id.btStartStopCall:
+        ReadSettings();
+        if (viERunning || voERunning) {
+          StopAll();
+          wakeLock.release();//release the wake lock
+          btStartStopCall.setText(R.string.startCall);
+        }
+        else if (enableVoice || enableVideo){
+          StartCall();
+          wakeLock.acquire();//screen stay on during the call
+          btStartStopCall.setText(R.string.stopCall);
+        }
+        break;
+      case R.id.btExit:
+        StopAll();
+        finish();
+        break;
+      case R.id.cbNack:
+        enableNack  = cbEnableNack.isChecked();
+        if (viERunning) {
+          ViEAndroidAPI.EnableNACK(channel, enableNack);
+        }
+        break;
+      case R.id.cbSpeaker:
+        enableSpeaker = cbEnableSpeaker.isChecked();
+        if (voERunning){
+          RouteAudio(enableSpeaker);
+        }
+        break;
+      case R.id.cbAutoGainControl:
+        enableAGC=cbEnableAGC.isChecked();
+        if(voERunning) {
+          //Enable AGC default mode.
+          ViEAndroidAPI.VoE_SetAGCStatus(enableAGC,1);
+        }
+        break;
+      case R.id.cbNoiseSuppression:
+        enableNS=cbEnableNS.isChecked();
+        if(voERunning) {
+          //Enable NS default mode.
+          ViEAndroidAPI.VoE_SetNSStatus(enableNS, 1);
+        }
+        break;
+      case R.id.cbAECM:
+        enableAECM = cbEnableAECM.isChecked();
+        if (voERunning) {
+          //EC_AECM=5
+          //AECM_DEFAULT=0
+          ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28);
+        }
+        break;
+    }
+  }
+
+  private void ReadSettings() {
+    codecType = spCodecType.getSelectedItemPosition();
+    voiceCodecType = spVoiceCodecType.getSelectedItemPosition();
+
+    String sCodecSize = spCodecSize.getSelectedItem().toString();
+    String[] aCodecSize = sCodecSize.split("x");
+    codecSizeWidth = Integer.parseInt(aCodecSize[0]);
+    codecSizeHeight = Integer.parseInt(aCodecSize[1]);
+
+    loopbackMode  = cbLoopback.isChecked();
+    enableVoice  = cbVoice.isChecked();
+    enableVideoSend = cbVideoSend.isChecked();
+    enableVideoReceive = cbVideoReceive.isChecked();
+    enableVideo = enableVideoSend || enableVideoReceive;
+
+    destinationPortVideo =
+        Integer.parseInt(etVTxPort.getText().toString());
+    receivePortVideo =
+        Integer.parseInt(etVRxPort.getText().toString());
+    destinationPortVoice =
+        Integer.parseInt(etATxPort.getText().toString());
+    receivePortVoice =
+        Integer.parseInt(etARxPort.getText().toString());
+
+    enableNack  = cbEnableNack.isChecked();
+    enableSpeaker  = cbEnableSpeaker.isChecked();
+    enableAGC  = cbEnableAGC.isChecked();
+    enableAECM  = cbEnableAECM.isChecked();
+    enableNS  = cbEnableNS.isChecked();
+
+    if (loopbackMode)
+      remoteIp = "127.0.0.1";
+    else
+      remoteIp = etRemoteIp.getText().toString();
+  }
+
+  public void onItemSelected(AdapterView<?> adapterView, View view,
+                             int position, long id) {
+    if ((adapterView==spCodecType || adapterView==spCodecSize) &&
+        viERunning) {
+      ReadSettings();
+      //change the codectype
+      if (enableVideoReceive) {
+        if (0 !=ViEAndroidAPI.SetReceiveCodec(channel, codecType,
+                                              INIT_BITRATE, codecSizeWidth,
+                                              codecSizeHeight,
+                                              RECEIVE_CODEC_FRAMERATE))
+          DemoLog("ViE set receive codec failed");
+      }
+      if (enableVideoSend) {
+        if (0!=ViEAndroidAPI.SetSendCodec(channel, codecType, INIT_BITRATE,
+                                          codecSizeWidth,
+                                          codecSizeHeight,
+                                          SEND_CODEC_FRAMERATE))
+          DemoLog("ViE set send codec failed");
+      }
+    }
+    else if ((adapterView==spVoiceCodecType) && voERunning) {
+      //change voice engine codec
+      ReadSettings();
+      if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
+        DemoLog("VoE set send codec failed");
+      }
+    }
+  }
+
+  public void onNothingSelected(AdapterView<?> arg0) {
+    DemoLog("No setting selected");
+  }
+
+  public int UpdateStats(int in_frameRateI, int in_bitRateI, int in_packetLoss,
+                         int in_frameRateO, int in_bitRateO) {
+    frameRateI = in_frameRateI;
+    bitRateI = in_bitRateI;
+    packetLoss = in_packetLoss;
+    frameRateO = in_frameRateO;
+    bitRateO = in_bitRateO;
+    runOnUiThread(new Runnable() {
+        public void run() {
+          tvFrameRateI.setText("Incoming FrameRate - " +
+                               Integer.toString(frameRateI));
+          tvBitRateI.setText("Incoming BitRate - " +
+                             Integer.toString(bitRateI));
+          tvPacketLoss.setText("Incoming Packet Loss - " +
+                               Integer.toString(packetLoss));
+          tvFrameRateO.setText("Send FrameRate - " +
+                               Integer.toString(frameRateO));
+          tvBitRateO.setText("Send BitRate - " +
+                             Integer.toString(bitRateO));
+        }
+      });
+    return 0;
+  }
+}
diff --git a/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
new file mode 100644
index 0000000..701f8bd
--- /dev/null
+++ b/trunk/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
@@ -0,0 +1,138 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+import android.app.Activity;
+import android.content.Context;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+public class ViEAndroidJavaAPI {
+
+  public ViEAndroidJavaAPI(Context context) {
+    Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
+    System.loadLibrary("webrtc-video-demo-jni");
+
+    Log.d("*WEBRTCJ*", "Calling native init...");
+    if (!NativeInit(context)) {
+      Log.e("*WEBRTCJ*", "Native init failed");
+      throw new RuntimeException("Native init failed");
+    }
+    else {
+      Log.d("*WEBRTCJ*", "Native init successful");
+    }
+    String a = "";
+    a.getBytes();
+  }
+
+  // API Native
+  private native boolean NativeInit(Context context);
+
+  // Video Engine API
+  // Initialization and Termination functions
+  public native int GetVideoEngine();
+  public native int Init(boolean enableTrace);
+  public native int Terminate();
+
+
+  public native int StartSend(int channel);
+  public native int StopRender(int channel);
+  public native int StopSend(int channel);
+  public native int StartReceive(int channel);
+  public native int StopReceive(int channel);
+  // Channel functions
+  public native int CreateChannel(int voiceChannel);
+  // Receiver & Destination functions
+  public native int SetLocalReceiver(int channel, int port);
+  public native int SetSendDestination(int channel, int port, byte ipadr[]);
+  // Codec
+  public native int SetReceiveCodec(int channel, int codecNum,
+                                    int intbitRate, int width,
+                                    int height, int frameRate);
+  public native int SetSendCodec(int channel, int codecNum,
+                                 int intbitRate, int width,
+                                 int height, int frameRate);
+  // Rendering
+  public native int AddRemoteRenderer(int channel, Object glSurface);
+  public native int RemoveRemoteRenderer(int channel);
+  public native int StartRender(int channel);
+
+  // Capture
+  public native int StartCamera(int channel, int cameraNum);
+  public native int StopCamera(int cameraId);
+  public native int GetCameraOrientation(int cameraNum);
+  public native int SetRotation(int cameraId,int degrees);
+
+  // NACK
+  public native int EnableNACK(int channel, boolean enable);
+
+  //PLI for H.264
+  public native int EnablePLI(int channel, boolean enable);
+
+  // Enable stats callback
+  public native int SetCallback(int channel, IViEAndroidCallback callback);
+
+  // Voice Engine API
+  // Create and Delete functions
+  public native boolean VoE_Create(Activity context);
+  public native boolean VoE_Delete();
+
+  // Initialization and Termination functions
+  public native int VoE_Authenticate(String key);
+  public native int VoE_Init(boolean enableTrace);
+  public native int VoE_Terminate();
+
+  // Channel functions
+  public native int VoE_CreateChannel();
+  public native int VoE_DeleteChannel(int channel);
+
+  // Receiver & Destination functions
+  public native int VoE_SetLocalReceiver(int channel, int port);
+  public native int VoE_SetSendDestination(int channel, int port,
+                                           String ipaddr);
+
+  // Media functions
+  public native int VoE_StartListen(int channel);
+  public native int VoE_StartPlayout(int channel);
+  public native int VoE_StartSend(int channel);
+  public native int VoE_StopListen(int channel);
+  public native int VoE_StopPlayout(int channel);
+  public native int VoE_StopSend(int channel);
+
+  // Volume
+  public native int VoE_SetSpeakerVolume(int volume);
+
+  // Hardware
+  public native int VoE_SetLoudspeakerStatus(boolean enable);
+
+  // Playout file locally
+  public native int VoE_StartPlayingFileLocally(int channel,
+                                                String fileName,
+                                                boolean loop);
+  public native int VoE_StopPlayingFileLocally(int channel);
+
+  // Play file as microphone
+  public native int VoE_StartPlayingFileAsMicrophone(int channel,
+                                                     String fileName,
+                                                     boolean loop);
+  public native int VoE_StopPlayingFileAsMicrophone(int channel);
+
+  // Codec-setting functions
+  public native int VoE_NumOfCodecs();
+  public native int VoE_SetSendCodec(int channel, int index);
+
+  //VE funtions
+  public native int VoE_SetECStatus(boolean enable, int mode,
+                                    int AESmode, int AESattenuation);
+  public native int VoE_SetAGCStatus(boolean enable, int mode);
+  public native int VoE_SetNSStatus(boolean enable, int mode);
+}
diff --git a/trunk/src/video_engine/test/auto_test/android/.classpath b/trunk/src/video_engine/test/auto_test/android/.classpath
new file mode 100644
index 0000000..f2adf55
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="lib" path="libs/VideoEngine_android_java.jar"/>

+	<classpathentry kind="lib" path="libs/VoiceEngine_android_java.jar"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/trunk/src/video_engine/test/auto_test/android/Android.mk b/trunk/src/video_engine/test/auto_test/android/Android.mk
new file mode 100644
index 0000000..2b20e80
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/Android.mk
@@ -0,0 +1,39 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+MY_CAPTURE_FOLDER := ../../../../modules/video_capture/main/source
+MY_CAPTURE_JAVA_FOLDER := Android/java/org/webrtc/videoengine
+MY_CAPTURE_PATH := $(MY_CAPTURE_FOLDER)/$(MY_CAPTURE_JAVA_FOLDER)
+
+MY_RENDER_FOLDER := ../../../../modules/video_render/main/source
+MY_RENDER_JAVA_FOLDER := Android/java/org/webrtc/videoengine
+MY_RENDER_PATH := $(MY_RENDER_FOLDER)/$(MY_RENDER_JAVA_FOLDER)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+    src/org/webrtc/vieautotest/ViEAutotest.java \
+    $(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
+    $(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
+    $(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
+    $(MY_RENDER_PATH)/ViEAndroidGLES20.java \
+    $(MY_RENDER_PATH)/ViERenderer.java \
+    $(MY_RENDER_PATH)/ViESurfaceRenderer.java 
+
+LOCAL_PACKAGE_NAME := webrtc-video-autotest
+LOCAL_CERTIFICATE := platform
+
+LOCAL_JNI_SHARED_LIBRARIES := libwebrtc-video-autotest-jni
+
+include $(BUILD_PACKAGE)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/trunk/src/video_engine/test/auto_test/android/AndroidManifest.xml b/trunk/src/video_engine/test/auto_test/android/AndroidManifest.xml
new file mode 100644
index 0000000..11b3e27
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/AndroidManifest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.vieautotest">
+  <application android:label="@string/app_name"
+	       android:debuggable="true"
+	       android:icon="@drawable/logo">
+    <activity android:label="@string/app_name"
+	      android:name="ViEAutotest">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+
+  </application>
+  <uses-sdk android:minSdkVersion="3" android:targetSdkVersion="8" />
+  <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+  <uses-feature android:name="android.hardware.camera" />
+  <uses-feature android:name="android.hardware.camera.autofocus" />
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+  <uses-permission android:name="android.permission.RECORD_AUDIO" />
+  <uses-permission android:name="android.permission.INTERNET" />
+  <uses-permission android:name="android.permission.WAKE_LOCK" />
+  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+</manifest> 
diff --git a/trunk/src/video_engine/test/auto_test/android/default.properties b/trunk/src/video_engine/test/auto_test/android/default.properties
new file mode 100644
index 0000000..2ad44a4
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-9

diff --git a/trunk/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java b/trunk/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java
new file mode 100644
index 0000000..4b46020
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java
@@ -0,0 +1,37 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.vieautotest;

+

+public final class R {

+    public static final class array {

+        public static final int subtest_array=0x7f050001;

+        public static final int test_array=0x7f050000;

+    }

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int logo=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f060004;

+        public static final int LocalView=0x7f060001;

+        public static final int RemoteView=0x7f060000;

+        public static final int subtestSpinner=0x7f060003;

+        public static final int testSpinner=0x7f060002;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int SpinnerSubtest=0x7f040004;

+        public static final int SpinnerTitle=0x7f040003;

+        public static final int TitleName=0x7f040001;

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040002;

+    }

+}

diff --git a/trunk/src/video_engine/test/auto_test/android/jni/Android.mk b/trunk/src/video_engine/test/auto_test/android/jni/Android.mk
new file mode 100644
index 0000000..ac43975
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/jni/Android.mk
@@ -0,0 +1,77 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+#

+#  Use of this source code is governed by a BSD-style license

+#  that can be found in the LICENSE file in the root of the source

+#  tree. An additional intellectual property rights grant can be found

+#  in the file PATENTS.  All contributing project authors may

+#  be found in the AUTHORS file in the root of the source tree.

+

+LOCAL_PATH := $(call my-dir)

+

+# the follow two lines are for NDK build

+INTERFACES_PATH := $(LOCAL_PATH)/../../../../../../build/interface

+LIBS_PATH := $(LOCAL_PATH)/../../../../../../build/libraries

+

+include $(CLEAR_VARS)

+

+LOCAL_MODULE_TAGS := tests

+LOCAL_MODULE := libwebrtc-video-autotest-jni

+LOCAL_CPP_EXTENSION := .cc

+LOCAL_SRC_FILES := \

+    vie_autotest_jni.cc \

+    ../../source/vie_autotest_android.cc \

+    ../../source/vie_autotest.cc \

+    ../../source/vie_autotest_base.cc \

+    ../../source/vie_autotest_capture.cc \

+    ../../source/vie_autotest_codec.cc \

+    ../../source/vie_autotest_encryption.cc \

+    ../../source/vie_autotest_file.cc \

+    ../../source/vie_autotest_image_process.cc \

+    ../../source/vie_autotest_loopback.cc \

+    ../../source/vie_autotest_network.cc \

+    ../../source/vie_autotest_render.cc \

+    ../../source/vie_autotest_rtp_rtcp.cc \

+    ../../source/tb_I420_codec.cc \

+    ../../source/tb_capture_device.cc \

+    ../../source/tb_external_transport.cc \

+    ../../source/tb_interfaces.cc \

+    ../../source/tb_video_channel.cc 

+

+LOCAL_CFLAGS := \

+    '-DWEBRTC_TARGET_PC' \

+    '-DWEBRTC_ANDROID' \

+    '-DWEBRTC_ANDROID_OPENSLES'

+

+LOCAL_C_INCLUDES := \

+    external/gtest/include \

+    $(LOCAL_PATH)/../interface \

+    $(LOCAL_PATH)/../../interface \

+    $(LOCAL_PATH)/../../../interface \

+    $(LOCAL_PATH)/../../.. \

+    $(LOCAL_PATH)/../../../../.. \

+    $(LOCAL_PATH)/../../../../../common_video/interface \

+    $(LOCAL_PATH)/../../../../../common_video/vplib/main/interface \

+    $(LOCAL_PATH)/../../../../../modules/interface \

+    $(LOCAL_PATH)/../../../../../modules/video_capture/main/interface \

+    $(LOCAL_PATH)/../../../../../modules/video_capture/main/source \

+    $(LOCAL_PATH)/../../../../../modules/video_coding/codecs/interface \

+    $(LOCAL_PATH)/../../../../../modules/video_render/main/interface \

+    $(LOCAL_PATH)/../../../../../voice_engine/main/interface \

+    $(LOCAL_PATH)/../../../../../system_wrappers/interface 

+

+LOCAL_PRELINK_MODULE := false

+

+LOCAL_SHARED_LIBRARIES := \

+    libutils \

+    libstlport \

+    libandroid \

+    libwebrtc \

+    libGLESv2

+

+# the following line is for NDK build

+LOCAL_LDLIBS     := $(LIBS_PATH)/VideoEngine_android_gcc.a -llog -lgcc 

+

+ifndef NDK_ROOT

+include external/stlport/libstlport.mk

+endif

+include $(BUILD_SHARED_LIBRARY)

diff --git a/trunk/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h b/trunk/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h
new file mode 100644
index 0000000..68ec601
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_vieautotest_ViEAutotest */
+
+#ifndef _Included_org_webrtc_vieautotest_ViEAutotest
+#define _Included_org_webrtc_vieautotest_ViEAutotest
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/*
+ * Class:     org_webrtc_vieautotest_ViEAutotest
+ * Method:    RunTest
+ * Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2
+(JNIEnv *, jobject, jint, jint, jobject, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/trunk/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc b/trunk/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc
new file mode 100644
index 0000000..2e55283
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <android/log.h>
+#include <pthread.h>
+#include <unistd.h>
+
+#include "org_webrtc_vieautotest_vie_autotest.h"
+
+#include "vie_autotest_android.h"
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*"
+
+// VideoEngine data struct
+typedef struct
+{
+    JavaVM* jvm;
+} VideoEngineData;
+
+// Global variables
+JavaVM* webrtcGlobalVM;
+
+// Global variables visible in this file
+static VideoEngineData vieData;
+
+// "Local" functions (i.e. not Java accessible)
+#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
+
+static bool GetSubAPIs(VideoEngineData& vieData);
+static bool ReleaseSubAPIs(VideoEngineData& vieData);
+
+//
+// General functions
+//
+
+// JNI_OnLoad
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
+  if (!vm) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                           JNI_VERSION_1_4)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init ViE data
+  vieData.jvm = vm;
+
+  return JNI_VERSION_1_4;
+}
+
+// Class:     org_webrtc_vieautotest_ViEAutotest
+// Method:    RunTest
+// Signature: (IILandroid/opengl/GLSurfaceView;Landroid/opengl/GLSurfaceView;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_opengl_GLSurfaceView_2Landroid_opengl_GLSurfaceView_2(
+    JNIEnv* env,
+    jobject context,
+    jint testType,
+    jint subtestType,
+    jobject glView1,
+    jobject glView2)
+{
+  int numErrors = -1;
+  numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType, glView1,
+                                              glView2, vieData.jvm, env,
+                                              context);
+  return numErrors;
+}
+
+// Class:     org_webrtc_vieautotest_ViEAutotest
+// Method:    RunTest
+// Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2(
+    JNIEnv* env,
+    jobject context,
+    jint testType,
+    jint subtestType,
+    jobject surfaceHolder1,
+    jobject surfaceHolder2)
+{
+  int numErrors = -1;
+  numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType,
+                                              surfaceHolder1, surfaceHolder2,
+                                              vieData.jvm, env, context);
+  return numErrors;
+}
+
+//
+//local function
+//
+
+bool GetSubAPIs(VideoEngineData& vieData) {
+  bool retVal = true;
+  //vieData.base = ViEBase::GetInterface(vieData.vie);
+  //if (vieData.base == NULL)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get Base API");
+    retVal = false;
+  }
+  return retVal;
+}
+
+bool ReleaseSubAPIs(VideoEngineData& vieData) {
+  bool releaseOk = true;
+  //if (vieData.base)
+  {
+    //if (vieData.base->Release() != 0)
+    if (false) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release base sub-API failed");
+      releaseOk = false;
+    }
+    else {
+      //vieData.base = NULL;
+    }
+  }
+
+  return releaseOk;
+}
diff --git a/trunk/src/video_engine/test/auto_test/android/res/drawable/logo.png b/trunk/src/video_engine/test/auto_test/android/res/drawable/logo.png
new file mode 100644
index 0000000..c3e0a12
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/res/drawable/logo.png
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/android/res/layout/main.xml b/trunk/src/video_engine/test/auto_test/android/res/layout/main.xml
new file mode 100644
index 0000000..1f2aaf9
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/res/layout/main.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="utf-8"?>
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	     android:layout_width="fill_parent"
+	     android:layout_height="fill_parent">
+  <RelativeLayout
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent">
+    <LinearLayout 
+       android:id="@+id/RemoteView"
+       android:layout_width="fill_parent"
+       android:layout_height="fill_parent"
+       android:layout_weight="1">
+      <!-- log instead of video
+           <ImageView
+              android:layout_width="fill_parent"
+              android:layout_height="fill_parent" 
+              android:scaleType="fitXY"
+              android:src="@drawable/logo" /> -->
+
+    </LinearLayout>
+    <LinearLayout 
+       android:id="@+id/LocalView"
+       android:layout_width="120dip"
+       android:layout_height="120dip"
+       android:layout_weight="1">
+      <!-- <ImageView
+       	      android:layout_width="fill_parent"
+       	      android:layout_height="fill_parent" 
+       	      android:scaleType="fitXY"
+       	      android:src="@drawable/logo" /> -->
+    </LinearLayout>
+    <LinearLayout
+       android:orientation="horizontal"
+       android:layout_width="fill_parent"
+       android:layout_height="wrap_content"
+       android:layout_alignParentBottom="true">
+      <LinearLayout
+	 android:orientation="vertical"
+	 android:layout_width="fill_parent"
+	 android:layout_height="wrap_content"
+	 android:layout_alignParentBottom="true">
+	<Spinner
+	   android:id="@+id/testSpinner"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content"
+	   android:prompt="@string/SpinnerTitle"
+	   />
+	<Spinner
+	   android:id="@+id/subtestSpinner"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content"
+	   android:prompt="@string/SpinnerSubtest"
+	   />
+	<Button
+	   android:text="@string/run_button"
+	   android:id="@+id/Button01"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+	</Button>
+      </LinearLayout>
+
+    </LinearLayout>
+  </RelativeLayout>
+</FrameLayout>
diff --git a/trunk/src/video_engine/test/auto_test/android/res/values/strings.xml b/trunk/src/video_engine/test/auto_test/android/res/values/strings.xml
new file mode 100644
index 0000000..ba59c5e
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/res/values/strings.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+<string name="app_name">ViEAutotest</string>
+<string name="TitleName">ViEAutotest</string>
+<string name="run_button">Run Test</string>
+<string name="SpinnerTitle">Test type...</string>
+<string-array name="test_array">
+	<item>Standard</item>
+	<item>API</item>
+	<item>Extended</item>
+	<item>Loopback</item>
+	<item>Custom</item>
+</string-array>
+<string name="SpinnerSubtest">Run...</string>
+<string-array name="subtest_array">
+	<item>All</item>
+	<item>Base</item>
+	<item>Capture</item>
+	<item>Codec</item>
+	<item>Mix</item>
+	<item>Encryption</item>
+	<item>External Codec</item>
+	<item>File</item>
+	<item>Image Process</item>
+	<item>Network</item>
+	<item>Render</item>
+	<item>RTP/RTCP</item>
+</string-array>
+
+</resources>
diff --git a/trunk/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java b/trunk/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java
new file mode 100644
index 0000000..de228a8
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.vieautotest;
+
+import org.webrtc.vieautotest.R;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.widget.Button;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.SurfaceHolder;
+import android.widget.LinearLayout;
+import android.opengl.GLSurfaceView;
+import android.widget.Spinner;
+import android.widget.ArrayAdapter;
+import android.widget.AdapterView;
+
+public class ViEAutotest extends Activity
+    implements
+      AdapterView.OnItemSelectedListener,
+      View.OnClickListener {
+
+  private Thread testThread;
+  private Spinner testSpinner;
+  private Spinner subtestSpinner;
+  private int testSelection;
+  private int subTestSelection;
+
+  // View for remote video
+  private LinearLayout remoteSurface = null;
+  private GLSurfaceView glSurfaceView = null;
+  private SurfaceView surfaceView = null;
+
+  private LinearLayout localSurface = null;
+  private GLSurfaceView glLocalSurfaceView = null;
+  private SurfaceView localSurfaceView = null;
+
+  /** Called when the activity is first created. */
+  @Override
+  public void onCreate(Bundle savedInstanceState) {
+
+    Log.d("*WEBRTC*", "onCreate called");
+
+    super.onCreate(savedInstanceState);
+    setContentView(R.layout.main);
+
+    // Set the Start button action
+    final Button buttonStart = (Button) findViewById(R.id.Button01);
+    buttonStart.setOnClickListener(this);
+
+    // Set test spinner
+    testSpinner = (Spinner) findViewById(R.id.testSpinner);
+    ArrayAdapter<CharSequence> adapter =
+        ArrayAdapter.createFromResource(this, R.array.test_array,
+                                        android.R.layout.simple_spinner_item);
+
+    int resource = android.R.layout.simple_spinner_dropdown_item;
+    adapter.setDropDownViewResource(resource);
+    testSpinner.setAdapter(adapter);
+    testSpinner.setOnItemSelectedListener(this);
+
+    // Set sub test spinner
+    subtestSpinner = (Spinner) findViewById(R.id.subtestSpinner);
+    ArrayAdapter<CharSequence> subtestAdapter =
+        ArrayAdapter.createFromResource(this, R.array.subtest_array,
+                                        android.R.layout.simple_spinner_item);
+
+    subtestAdapter.setDropDownViewResource(resource);
+    subtestSpinner.setAdapter(subtestAdapter);
+    subtestSpinner.setOnItemSelectedListener(this);
+
+    remoteSurface = (LinearLayout) findViewById(R.id.RemoteView);
+    surfaceView = new SurfaceView(this);
+    remoteSurface.addView(surfaceView);
+
+    localSurface = (LinearLayout) findViewById(R.id.LocalView);
+    localSurfaceView = new SurfaceView(this);
+    localSurfaceView.setZOrderMediaOverlay(true);
+    localSurface.addView(localSurfaceView);
+
+    // Set members
+    testSelection = 0;
+    subTestSelection = 0;
+  }
+
+  public void onClick(View v) {
+    Log.d("*WEBRTC*", "Button clicked...");
+    switch (v.getId()) {
+      case R.id.Button01:
+        new Thread(new Runnable() {
+            public void run() {
+              Log.d("*WEBRTC*", "Calling RunTest...");
+              RunTest(testSelection, subTestSelection,
+                      localSurfaceView, surfaceView);
+              Log.d("*WEBRTC*", "RunTest done");
+            }
+          }).start();
+    }
+  }
+
+  public void onItemSelected(AdapterView<?> parent, View v,
+                             int position, long id) {
+
+    if (parent == (Spinner) findViewById(R.id.testSpinner)) {
+      testSelection = position;
+    } else {
+      subTestSelection = position;
+    }
+  }
+
+  public void onNothingSelected(AdapterView<?> parent) {
+  }
+
+  @Override
+  protected void onStart() {
+    super.onStart();
+  }
+
+  @Override
+  protected void onResume() {
+    super.onResume();
+  }
+
+  @Override
+  protected void onPause() {
+    super.onPause();
+  }
+
+  @Override
+  protected void onStop() {
+    super.onStop();
+  }
+
+  @Override
+  protected void onDestroy() {
+
+    super.onDestroy();
+  }
+
+  // C++ function performing the chosen test
+  // private native int RunTest(int testSelection, int subtestSelection,
+  // GLSurfaceView window1, GLSurfaceView window2);
+  private native int RunTest(int testSelection, int subtestSelection,
+                             SurfaceView window1, SurfaceView window2);
+
+  // this is used to load the 'ViEAutotestJNIAPI' library on application
+  // startup.
+  static {
+    Log.d("*WEBRTC*", "Loading ViEAutotest...");
+    System.loadLibrary("webrtc-video-autotest-jni");
+  }
+}
diff --git a/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.cc b/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.cc
new file mode 100644
index 0000000..591a567
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.cc
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/automated/legacy_fixture.h"
+
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+
+void LegacyFixture::SetUpTestCase() {
+  TwoWindowsFixture::SetUpTestCase();
+
+  // Create the test cases
+  tests_ = new ViEAutoTest(window_1_, window_2_);
+}
+
+void LegacyFixture::TearDownTestCase() {
+  delete tests_;
+
+  TwoWindowsFixture::TearDownTestCase();
+}
+
+ViEAutoTest* LegacyFixture::tests_ = NULL;
diff --git a/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.h b/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.h
new file mode 100644
index 0000000..b452766
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/legacy_fixture.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
+
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+
+// Inherited by old-style standard integration tests based on ViEAutoTest.
+class LegacyFixture : public TwoWindowsFixture {
+ public:
+  // Initializes ViEAutoTest in addition to the work done by ViEIntegrationTest.
+  static void SetUpTestCase();
+
+  // Releases anything allocated by SetupTestCase.
+  static void TearDownTestCase();
+
+ protected:
+  static ViEAutoTest* tests_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
diff --git a/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.cc b/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.cc
new file mode 100644
index 0000000..d181aa5
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.cc
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+
+#include "video_engine/test/auto_test/helpers/vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+
+void TwoWindowsFixture::SetUpTestCase() {
+  window_creator_ = new ViEWindowCreator();
+
+  ViEAutoTestWindowManagerInterface* window_manager =
+      window_creator_->CreateTwoWindows();
+
+  window_1_ = window_manager->GetWindow1();
+  window_2_ = window_manager->GetWindow2();
+}
+
+void TwoWindowsFixture::TearDownTestCase() {
+  window_creator_->TerminateWindows();
+  delete window_creator_;
+}
+
+ViEWindowCreator* TwoWindowsFixture::window_creator_ = NULL;
+void* TwoWindowsFixture::window_1_ = NULL;
+void* TwoWindowsFixture::window_2_ = NULL;
diff --git a/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.h b/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.h
new file mode 100644
index 0000000..175a42d
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/two_windows_fixture.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
+
+#include "gtest/gtest.h"
+
+class ViEWindowCreator;
+class ViEAutoTest;
+
+// Meant to be inherited by all standard test who require two windows.
+class TwoWindowsFixture : public testing::Test {
+ public:
+  // Launches two windows in a platform-dependent manner and stores the handles
+  // in the window_1_ and window_2_ fields.
+  static void SetUpTestCase();
+
+  // Releases anything allocated by SetupTestCase.
+  static void TearDownTestCase();
+
+ protected:
+  static void* window_1_;
+  static void* window_2_;
+  static ViEWindowCreator* window_creator_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
diff --git a/trunk/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc b/trunk/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc
new file mode 100644
index 0000000..a2c65a2
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/**
+ * Runs "extended" integration tests.
+ */
+
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "vie_autotest.h"
+
+namespace {
+
+class ViEApiIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEApiIntegrationTest, RunsBaseTestWithoutErrors) {
+  tests_->ViEBaseAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsCaptureTestWithoutErrors) {
+  tests_->ViECaptureAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsCodecTestWithoutErrors) {
+  tests_->ViECodecAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsEncryptionTestWithoutErrors) {
+  tests_->ViEEncryptionAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsFileTestWithoutErrors) {
+  tests_->ViEFileAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsImageProcessTestWithoutErrors) {
+  tests_->ViEImageProcessAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsNetworkTestWithoutErrors) {
+  tests_->ViENetworkAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsRenderTestWithoutErrors) {
+  tests_->ViERenderAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsRtpRtcpTestWithoutErrors) {
+  tests_->ViERtpRtcpAPITest();
+}
+
+} // namespace
diff --git a/trunk/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc b/trunk/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc
new file mode 100644
index 0000000..facb659
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/**
+ * Runs "extended" integration tests.
+ */
+
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "vie_autotest.h"
+
+namespace {
+
+class ViEExtendedIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEExtendedIntegrationTest, RunsBaseTestWithoutErrors) {
+  tests_->ViEBaseExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsCaptureTestWithoutErrors) {
+  tests_->ViECaptureExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsCodecTestWithoutErrors) {
+  tests_->ViECodecExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsEncryptionTestWithoutErrors) {
+  tests_->ViEEncryptionExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsFileTestWithoutErrors) {
+  tests_->ViEFileExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsImageProcessTestWithoutErrors) {
+  tests_->ViEImageProcessExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsNetworkTestWithoutErrors) {
+  tests_->ViENetworkExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsRenderTestWithoutErrors) {
+  tests_->ViERenderExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsRtpRtcpTestWithoutErrors) {
+  tests_->ViERtpRtcpExtendedTest();
+}
+
+} // namespace
diff --git a/trunk/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc b/trunk/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
new file mode 100644
index 0000000..f8740be
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <ctime>
+
+#include "gtest/gtest.h"
+#include "gflags/gflags.h"
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+#include "video_engine/test/auto_test/helpers/bit_flip_encryption.h"
+#include "video_engine/test/auto_test/helpers/random_encryption.h"
+#include "video_engine/test/auto_test/helpers/vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/tb_capture_device.h"
+#include "video_engine/test/auto_test/interface/tb_interfaces.h"
+#include "video_engine/test/auto_test/interface/tb_video_channel.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+
+namespace {
+
+DEFINE_int32(rtp_fuzz_test_rand_seed, 0, "The rand seed to use for "
+    "the RTP fuzz test. Defaults to time(). 0 cannot be specified.");
+
+class ViERtpFuzzTest : public TwoWindowsFixture {
+ protected:
+  TbVideoChannel* video_channel_;
+  TbInterfaces* video_engine_;
+  TbCaptureDevice* capture_device_;
+
+  void SetUp() {
+    video_engine_ = new TbInterfaces(
+        "ViERtpTryInjectingRandomPacketsIntoRtpStream");
+    video_channel_ = new TbVideoChannel(
+        *video_engine_, webrtc::kVideoCodecVP8);
+    capture_device_ = new TbCaptureDevice(*video_engine_);
+
+    capture_device_->ConnectTo(video_channel_->videoChannel);
+
+    // Enable PLI RTCP, which will allow the video engine to recover better.
+    video_engine_->rtp_rtcp->SetKeyFrameRequestMethod(
+        video_channel_->videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+
+    video_channel_->StartReceive();
+    video_channel_->StartSend();
+
+    RenderInWindow(
+        video_engine_->render, capture_device_->captureId, window_1_, 0);
+    RenderInWindow(
+        video_engine_->render, video_channel_->videoChannel, window_2_, 1);
+  }
+
+  void TearDown() {
+    delete capture_device_;
+    delete video_channel_;
+    delete video_engine_;
+  }
+
+  unsigned int FetchRandSeed() {
+    if (FLAGS_rtp_fuzz_test_rand_seed != 0) {
+      return FLAGS_rtp_fuzz_test_rand_seed;
+    }
+    return std::time(NULL);
+  }
+
+  // Pass in a number [0, 1] which will be the bit flip probability per byte.
+  void BitFlipFuzzTest(float flip_probability) {
+    unsigned int rand_seed = FetchRandSeed();
+    ViETest::Log("Running test with rand seed %d.", rand_seed);
+
+    ViETest::Log("Running as usual. You should see video output.");
+    AutoTestSleep(2000);
+
+    ViETest::Log("Starting to flip bits in packets (%f%% chance per byte).",
+                 flip_probability * 100);
+    BitFlipEncryption bit_flip_encryption(rand_seed, flip_probability);
+    video_engine_->encryption->RegisterExternalEncryption(
+        video_channel_->videoChannel, bit_flip_encryption);
+
+    AutoTestSleep(5000);
+
+    ViETest::Log("Back to normal. Flipped %d bits.",
+                 bit_flip_encryption.flip_count());
+    video_engine_->encryption->DeregisterExternalEncryption(
+        video_channel_->videoChannel);
+
+    AutoTestSleep(5000);
+  }
+};
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithASmallNumberOfTamperedPackets) {
+  // Try 0.005% bit flip chance per byte.
+  BitFlipFuzzTest(0.00005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithAMediumNumberOfTamperedPackets) {
+  // Try 0.05% bit flip chance per byte.
+  BitFlipFuzzTest(0.0005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithALargeNumberOfTamperedPackets) {
+  // Try 0.5% bit flip chance per byte.
+  BitFlipFuzzTest(0.005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithAVeryLargeNumberOfTamperedPackets) {
+  // Try 5% bit flip chance per byte.
+  BitFlipFuzzTest(0.05f);
+}
+
+TEST_F(ViERtpFuzzTest,
+       VideoEngineDealsWithAExtremelyLargeNumberOfTamperedPackets) {
+  // Try 25% bit flip chance per byte (madness!)
+  BitFlipFuzzTest(0.25f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithSeveralPeriodsOfTamperedPackets) {
+  // Try 0.05% bit flip chance per byte.
+  BitFlipFuzzTest(0.0005f);
+  BitFlipFuzzTest(0.0005f);
+  BitFlipFuzzTest(0.0005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineRecoversAfterSomeCompletelyRandomPackets) {
+  unsigned int rand_seed = FetchRandSeed();
+  ViETest::Log("Running test with rand seed %d.", rand_seed);
+
+  ViETest::Log("Running as usual. You should see video output.");
+  AutoTestSleep(2000);
+
+  ViETest::Log("Injecting completely random packets...");
+  RandomEncryption random_encryption(rand_seed);
+  video_engine_->encryption->RegisterExternalEncryption(
+      video_channel_->videoChannel, random_encryption);
+
+  AutoTestSleep(5000);
+
+  ViETest::Log("Back to normal.");
+  video_engine_->encryption->DeregisterExternalEncryption(
+      video_channel_->videoChannel);
+
+  AutoTestSleep(5000);
+}
+
+}
diff --git a/trunk/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc b/trunk/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc
new file mode 100644
index 0000000..e85b6e4
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the "standard" suite of integration tests, implemented
+ * as a GUnit test. This file is a part of the effort to try to automate all
+ * tests in this section of the code. Currently, this code makes no attempt
+ * to verify any video output - it only checks for direct errors.
+ */
+
+#include <cstdio>
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "vie_autotest.h"
+#include "vie_autotest_window_manager_interface.h"
+#include "vie_to_file_renderer.h"
+#include "vie_window_creator.h"
+#include "testsupport/metrics/video_metrics.h"
+
+namespace {
+
+class ViEStandardIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEStandardIntegrationTest, RunsBaseTestWithoutErrors)  {
+  tests_->ViEBaseStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsCodecTestWithoutErrors)  {
+  tests_->ViECodecStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsCaptureTestWithoutErrors)  {
+  tests_->ViECaptureStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsEncryptionTestWithoutErrors)  {
+  tests_->ViEEncryptionStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsFileTestWithoutErrors)  {
+  tests_->ViEFileStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsImageProcessTestWithoutErrors)  {
+  tests_->ViEImageProcessStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsNetworkTestWithoutErrors)  {
+  tests_->ViENetworkStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsRenderTestWithoutErrors)  {
+  tests_->ViERenderStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsRtpRtcpTestWithoutErrors)  {
+  tests_->ViERtpRtcpStandardTest();
+}
+
+} // namespace
diff --git a/trunk/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc b/trunk/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc
new file mode 100644
index 0000000..861af8a
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+
+namespace {
+
+// The input file must be QCIF since I420 gets scaled to that in the tests
+// (it is so bandwidth-heavy we have no choice). Our comparison algorithms
+// wouldn't like scaling, so this will work when we compare with the original.
+const int kInputWidth = 176;
+const int kInputHeight = 144;
+const int kVerifyingTestMaxNumAttempts = 3;
+
+class ViEVideoVerificationTest : public testing::Test {
+ protected:
+  void SetUp() {
+    input_file_ = webrtc::test::ResourcePath("paris_qcif", "yuv");
+  }
+
+  void TearDown() {
+    TearDownFileRenderer(local_file_renderer_);
+    TearDownFileRenderer(remote_file_renderer_);
+  }
+
+  void InitializeFileRenderers() {
+    local_file_renderer_ = new ViEToFileRenderer();
+    remote_file_renderer_ = new ViEToFileRenderer();
+    SetUpLocalFileRenderer(local_file_renderer_);
+    SetUpRemoteFileRenderer(remote_file_renderer_);
+  }
+
+  void SetUpLocalFileRenderer(ViEToFileRenderer* file_renderer) {
+    SetUpFileRenderer(file_renderer, "-local-preview.yuv");
+  }
+
+  void SetUpRemoteFileRenderer(ViEToFileRenderer* file_renderer) {
+    SetUpFileRenderer(file_renderer, "-remote.yuv");
+  }
+
+  // Must be called manually inside the tests.
+  void StopRenderers() {
+    local_file_renderer_->StopRendering();
+    remote_file_renderer_->StopRendering();
+  }
+
+  void TearDownFileRenderer(ViEToFileRenderer* file_renderer) {
+    assert(file_renderer);
+    bool test_failed = ::testing::UnitTest::GetInstance()->
+        current_test_info()->result()->Failed();
+    if (test_failed) {
+      // Leave the files for analysis if the test failed.
+      file_renderer->SaveOutputFile("failed-");
+    } else {
+      // No reason to keep the files if we succeeded.
+      file_renderer->DeleteOutputFile();
+    }
+    delete file_renderer;
+  }
+
+  void CompareFiles(const std::string& reference_file,
+                    const std::string& test_file,
+                    double* psnr_result, double *ssim_result) {
+    webrtc::test::QualityMetricsResult psnr;
+    int error = I420PSNRFromFiles(reference_file.c_str(), test_file.c_str(),
+                                  kInputWidth, kInputHeight, &psnr);
+
+    EXPECT_EQ(0, error) << "PSNR routine failed - output files missing?";
+    *psnr_result = psnr.average;
+
+    webrtc::test::QualityMetricsResult ssim;
+    error = I420SSIMFromFiles(reference_file.c_str(), test_file.c_str(),
+                              kInputWidth, kInputHeight, &ssim);
+    EXPECT_EQ(0, error) << "SSIM routine failed - output files missing?";
+    *ssim_result = ssim.average;
+
+    ViETest::Log("Results: PSNR is %f (dB), SSIM is %f (1 is perfect)",
+                 psnr.average, ssim.average);
+  }
+
+  std::string input_file_;
+  ViEToFileRenderer* local_file_renderer_;
+  ViEToFileRenderer* remote_file_renderer_;
+  ViEFileBasedComparisonTests tests_;
+
+ private:
+  void SetUpFileRenderer(ViEToFileRenderer* file_renderer,
+                         const std::string& suffix) {
+    std::string test_case_name =
+        ::testing::UnitTest::GetInstance()->current_test_info()->name();
+
+    std::string output_path = ViETest::GetResultOutputPath();
+    std::string filename = test_case_name + suffix;
+
+    if (!file_renderer->PrepareForRendering(output_path, filename)) {
+      FAIL() << "Could not open output file " << filename <<
+          " for writing.";
+    }
+  }
+};
+
+TEST_F(ViEVideoVerificationTest, RunsBaseStandardTestWithoutErrors)  {
+  // The I420 test should give pretty good values since it's a lossless codec
+  // running on the default bitrate. It should average about 30 dB but there
+  // may be cases where it dips as low as 26 under adverse conditions. That's
+  // why we have a retrying mechanism in place for this test.
+  const double kExpectedMinimumPSNR = 30;
+  const double kExpectedMinimumSSIM = 0.95;
+
+  for (int attempt = 0; attempt < kVerifyingTestMaxNumAttempts; attempt++) {
+    InitializeFileRenderers();
+    ASSERT_TRUE(tests_.TestCallSetup(input_file_, kInputWidth, kInputHeight,
+                                     local_file_renderer_,
+                                     remote_file_renderer_));
+    std::string output_file = remote_file_renderer_->GetFullOutputPath();
+    StopRenderers();
+
+    double actual_psnr = 0;
+    double actual_ssim = 0;
+    CompareFiles(input_file_, output_file, &actual_psnr, &actual_ssim);
+
+    if (actual_psnr >= kExpectedMinimumPSNR &&
+        actual_ssim >= kExpectedMinimumSSIM) {
+      // Test succeeded!
+      return;
+    }
+  }
+
+  ADD_FAILURE() << "Failed to achieve PSNR " << kExpectedMinimumPSNR <<
+      " and SSIM " << kExpectedMinimumSSIM << " after " <<
+      kVerifyingTestMaxNumAttempts << " attempts.";
+}
+
+TEST_F(ViEVideoVerificationTest, RunsCodecTestWithoutErrors)  {
+  // We compare the local and remote here instead of with the original.
+  // The reason is that it is hard to say when the three consecutive tests
+  // switch over into each other, at which point we would have to restart the
+  // original to get a fair comparison.
+  //
+  // The PSNR and SSIM values are quite low here, and they have to be since
+  // the codec switches will lead to lag in the output. This is considered
+  // acceptable, but it probably shouldn't get worse than this.
+  const double kExpectedMinimumPSNR = 20;
+  const double kExpectedMinimumSSIM = 0.7;
+
+  for (int attempt = 0; attempt < kVerifyingTestMaxNumAttempts; attempt++) {
+    InitializeFileRenderers();
+    ASSERT_TRUE(tests_.TestCodecs(input_file_, kInputWidth, kInputHeight,
+                                  local_file_renderer_,
+                                  remote_file_renderer_));
+    std::string reference_file = local_file_renderer_->GetFullOutputPath();
+    std::string output_file = remote_file_renderer_->GetFullOutputPath();
+    StopRenderers();
+
+    double actual_psnr = 0;
+    double actual_ssim = 0;
+    CompareFiles(reference_file, output_file, &actual_psnr, &actual_ssim);
+
+    if (actual_psnr >= kExpectedMinimumPSNR &&
+        actual_ssim >= kExpectedMinimumSSIM) {
+      // Test succeeded!
+      return;
+    }
+  }
+}
+
+// Runs a whole stack processing with tracking of which frames are dropped
+// in the encoder. The local and remote file will not be of equal size because
+// of unknown reasons. Tests show that they start at the same frame, which is
+// the important thing when doing frame-to-frame comparison with PSNR/SSIM.
+TEST_F(ViEVideoVerificationTest, RunsFullStackWithoutErrors)  {
+  FrameDropDetector detector;
+  local_file_renderer_ = new ViEToFileRenderer();
+  remote_file_renderer_ = new FrameDropMonitoringRemoteFileRenderer(&detector);
+  SetUpLocalFileRenderer(local_file_renderer_);
+  SetUpRemoteFileRenderer(remote_file_renderer_);
+
+  // Set a low bit rate so the encoder budget will be tight, causing it to drop
+  // frames every now and then.
+  const int kBitRateKbps = 50;
+  const int kPacketLossPercent = 10;
+  const int kNetworkDelayMs = 100;
+  ViETest::Log("Bit rate     : %5d kbps", kBitRateKbps);
+  ViETest::Log("Packet loss  : %5d %%", kPacketLossPercent);
+  ViETest::Log("Network delay: %5d ms", kNetworkDelayMs);
+  tests_.TestFullStack(input_file_, kInputWidth, kInputHeight, kBitRateKbps,
+                       kPacketLossPercent, kNetworkDelayMs,
+                       local_file_renderer_, remote_file_renderer_, &detector);
+  const std::string reference_file = local_file_renderer_->GetFullOutputPath();
+  const std::string output_file = remote_file_renderer_->GetFullOutputPath();
+  StopRenderers();
+
+  detector.CalculateResults();
+  detector.PrintReport();
+
+  if (detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kRendered) !=
+      detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kDecoded)) {
+    detector.PrintDebugDump();
+  }
+
+  ASSERT_EQ(detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kRendered),
+      detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kDecoded))
+      << "The number of dropped frames on the decode and render steps are not "
+      "equal. This may be because we have a major problem in the buffers of "
+      "the ViEToFileRenderer?";
+
+  // We may have dropped frames during the processing, which means the output
+  // file does not contain all the frames that are present in the input file.
+  // To make the quality measurement correct, we must adjust the output file to
+  // that by copying the last successful frame into the place where the dropped
+  // frame would be, for all dropped frames.
+  const int frame_length_in_bytes = 3 * kInputHeight * kInputWidth / 2;
+  ViETest::Log("Frame length: %d bytes", frame_length_in_bytes);
+  std::vector<Frame*> all_frames = detector.GetAllFrames();
+  FixOutputFileForComparison(output_file, frame_length_in_bytes, all_frames);
+
+  // Verify all sent frames are present in the output file.
+  size_t output_file_size = webrtc::test::GetFileSize(output_file);
+  EXPECT_EQ(all_frames.size(), output_file_size / frame_length_in_bytes)
+      << "The output file size is incorrect. It should be equal to the number "
+      "of frames multiplied by the frame size. This will likely affect "
+      "PSNR/SSIM calculations in a bad way.";
+
+  // We are running on a lower bitrate here so we need to settle for somewhat
+  // lower PSNR and SSIM values.
+  double actual_psnr = 0;
+  double actual_ssim = 0;
+  CompareFiles(reference_file, output_file, &actual_psnr, &actual_ssim);
+
+  const double kExpectedMinimumPSNR = 24;
+  const double kExpectedMinimumSSIM = 0.7;
+
+  EXPECT_GE(actual_psnr, kExpectedMinimumPSNR);
+  EXPECT_GE(actual_ssim, kExpectedMinimumSSIM);
+}
+
+}  // namespace
diff --git a/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.cc b/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.cc
new file mode 100644
index 0000000..423ad5a
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.cc
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/helpers/bit_flip_encryption.h"
+
+#include <cstdlib>
+
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+
+float NormalizedRand() {
+  return static_cast<float>(rand()) /
+         static_cast<float>(RAND_MAX);
+}
+
+BitFlipEncryption::BitFlipEncryption(unsigned int rand_seed,
+                                     float flip_probability)
+    : flip_probability_(flip_probability),
+      flip_count_(0) {
+  srand(rand_seed);
+}
+
+void BitFlipEncryption::FlipSomeBitsInData(const unsigned char* in_data,
+                                           unsigned char* out_data,
+                                           int bytes_in, int* bytes_out) {
+  for (int i = 0; i < bytes_in; i++) {
+    out_data[i] = in_data[i];
+
+    if (NormalizedRand() < flip_probability_) {
+      int bit_to_flip = rand() % 8;
+      out_data[i] ^= 1 << bit_to_flip;
+      flip_count_++;
+    }
+  }
+  *bytes_out = bytes_in;
+}
diff --git a/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.h b/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.h
new file mode 100644
index 0000000..ccc6974
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/bit_flip_encryption.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
+
+#include "video_engine/include/vie_encryption.h"
+
+// This encryption scheme will randomly flip bits every now and then in the
+// input data.
+class BitFlipEncryption : public webrtc::Encryption {
+ public:
+  // Args:
+  //   rand_seed: the seed to initialize the test's random generator with.
+  //   flip_probability: A number [0, 1] which is the percentage chance a bit
+  //       gets flipped in a particular byte.
+  BitFlipEncryption(unsigned int rand_seed, float flip_probability);
+
+  virtual void encrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void encrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  int64_t flip_count() const { return flip_count_; }
+
+ private:
+  // The flip probability ([0, 1]).
+  float flip_probability_;
+  // The number of bits we've flipped so far.
+  int64_t flip_count_;
+
+  // Flips some bits in the data at random.
+  void FlipSomeBitsInData(const unsigned char *in_data, unsigned char* out_data,
+                          int bytes_in, int* bytes_out);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
diff --git a/trunk/src/video_engine/test/auto_test/helpers/random_encryption.cc b/trunk/src/video_engine/test/auto_test/helpers/random_encryption.cc
new file mode 100644
index 0000000..04904b5
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/random_encryption.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/helpers/random_encryption.h"
+
+#include <algorithm>
+#include <cstdlib>
+#include <cmath>
+
+#include "video_engine/vie_defines.h"
+
+static int Saturate(int value, int min, int max) {
+  return std::min(std::max(value, min), max);
+}
+
+RandomEncryption::RandomEncryption(unsigned int rand_seed) {
+  srand(rand_seed);
+}
+
+// Generates some completely random data with roughly the right length.
+void RandomEncryption::GenerateRandomData(unsigned char* out_data, int bytes_in,
+                                          int* bytes_out) {
+  int out_length = MakeUpSimilarLength(bytes_in);
+  for (int i = 0; i < out_length; i++) {
+    // The modulo will skew the random distribution a bit, but I think it
+    // will be random enough.
+    out_data[i] = static_cast<unsigned char>(rand() % 256);
+  }
+  *bytes_out = out_length;
+}
+
+// Makes up a length within +- 50 of the original length, without
+// overstepping the contract for encrypt / decrypt.
+int RandomEncryption::MakeUpSimilarLength(int original_length) {
+  int sign = rand() - RAND_MAX / 2;
+  int length = original_length + sign * rand() % 50;
+
+  return Saturate(length, 0, static_cast<int>(webrtc::kViEMaxMtu));
+}
diff --git a/trunk/src/video_engine/test/auto_test/helpers/random_encryption.h b/trunk/src/video_engine/test/auto_test/helpers/random_encryption.h
new file mode 100644
index 0000000..cbca844
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/random_encryption.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
+
+#include "video_engine/include/vie_encryption.h"
+
+// These algorithms attempt to create an uncrackable encryption
+// scheme by completely disregarding the input data.
+class RandomEncryption : public webrtc::Encryption {
+ public:
+  explicit RandomEncryption(unsigned int rand_seed);
+
+  virtual void encrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void encrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+ private:
+  // Generates some completely random data with roughly the right length.
+  void GenerateRandomData(unsigned char* out_data, int bytes_in,
+                          int* bytes_out);
+
+  // Makes up a length within +- 50 of the original length, without
+  // overstepping the contract for encrypt / decrypt.
+  int MakeUpSimilarLength(int original_length);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.cc b/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.cc
new file mode 100644
index 0000000..f7dae2f
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.cc
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "vie_fake_camera.h"
+
+#include <assert.h>
+
+#include "vie_capture.h"
+#include "vie_file_capture_device.h"
+#include "thread_wrapper.h"
+
+// This callback runs the camera thread:
+bool StreamVideoFileRepeatedlyIntoCaptureDevice(void* data) {
+  ViEFileCaptureDevice* file_capture_device =
+      reinterpret_cast<ViEFileCaptureDevice*>(data);
+
+  // We want to interrupt the camera feeding thread every now and then in order
+  // to follow the contract for the system_wrappers thread library. 1.5 seconds
+  // seems about right here.
+  uint64_t time_slice_ms = 1500;
+  uint32_t max_fps = 30;
+
+  file_capture_device->ReadFileFor(time_slice_ms, max_fps);
+
+  return true;
+}
+
+ViEFakeCamera::ViEFakeCamera(webrtc::ViECapture* capture_interface)
+    : capture_interface_(capture_interface),
+      capture_id_(-1),
+      camera_thread_(NULL),
+      file_capture_device_(NULL) {
+}
+
+ViEFakeCamera::~ViEFakeCamera() {
+}
+
+bool ViEFakeCamera::StartCameraInNewThread(
+    const std::string& i420_test_video_path, int width, int height) {
+
+  assert(file_capture_device_ == NULL && camera_thread_ == NULL);
+
+  webrtc::ViEExternalCapture* externalCapture;
+  int result = capture_interface_->
+      AllocateExternalCaptureDevice(capture_id_, externalCapture);
+  if (result != 0) {
+    return false;
+  }
+
+  file_capture_device_ = new ViEFileCaptureDevice(externalCapture);
+  if (!file_capture_device_->OpenI420File(i420_test_video_path,
+                                          width,
+                                          height)) {
+    return false;
+  }
+
+  // Set up a thread which runs the fake camera. The capturer object is
+  // thread-safe.
+  camera_thread_ = webrtc::ThreadWrapper::CreateThread(
+      StreamVideoFileRepeatedlyIntoCaptureDevice, file_capture_device_);
+  unsigned int id;
+  camera_thread_->Start(id);
+
+  return true;
+}
+
+bool ViEFakeCamera::StopCamera() {
+  assert(file_capture_device_ != NULL && camera_thread_ != NULL);
+
+  camera_thread_->Stop();
+  file_capture_device_->CloseFile();
+
+  int result = capture_interface_->ReleaseCaptureDevice(capture_id_);
+
+  delete camera_thread_;
+  delete file_capture_device_;
+  camera_thread_ = NULL;
+  file_capture_device_ = NULL;
+
+  return result == 0;
+}
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.h b/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.h
new file mode 100644
index 0000000..0b9c2e7
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_fake_camera.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
+
+#include <string>
+
+namespace webrtc {
+class ViECapture;
+class ThreadWrapper;
+}
+
+class ViEFileCaptureDevice;
+
+// Registers an external capture device with the provided capture interface
+// and starts running a fake camera by reading frames from a file. The frame-
+// reading code runs in a separate thread which makes it possible to run tests
+// while the fake camera feeds data into the system. This class is not thread-
+// safe in itself (but handles its own thread in a safe manner).
+class ViEFakeCamera {
+ public:
+  // The argument is the capture interface to register with.
+  explicit ViEFakeCamera(webrtc::ViECapture* capture_interface);
+  virtual ~ViEFakeCamera();
+
+  // Runs the scenario in the class comments.
+  bool StartCameraInNewThread(const std::string& i420_test_video_path,
+                              int width,
+                              int height);
+  // Stops the camera and cleans up everything allocated by the start method.
+  bool StopCamera();
+
+  int capture_id() const { return capture_id_; }
+
+ private:
+  webrtc::ViECapture* capture_interface_;
+
+  int capture_id_;
+  webrtc::ThreadWrapper* camera_thread_;
+  ViEFileCaptureDevice* file_capture_device_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.cc b/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.cc
new file mode 100644
index 0000000..e4caa2a
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.cc
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "vie_file_capture_device.h"
+
+#include <assert.h>
+
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "module_common_types.h"
+#include "vie_autotest_defines.h"
+#include "vie_capture.h"
+#include "tick_util.h"
+
+// This class ensures we are not exceeding the max FPS.
+class FramePacemaker {
+ public:
+  explicit FramePacemaker(uint32_t max_fps)
+      : time_per_frame_ms_(1000 / max_fps) {
+    frame_start_ = webrtc::TickTime::MillisecondTimestamp();
+  }
+
+  void SleepIfNecessary(webrtc::EventWrapper* sleeper) {
+    uint64_t now = webrtc::TickTime::MillisecondTimestamp();
+    if (now - frame_start_ < time_per_frame_ms_) {
+      sleeper->Wait(time_per_frame_ms_ - (now - frame_start_));
+    }
+  }
+
+ private:
+  uint64_t frame_start_;
+  uint64_t time_per_frame_ms_;
+};
+
+ViEFileCaptureDevice::ViEFileCaptureDevice(
+    webrtc::ViEExternalCapture* input_sink)
+    : input_sink_(input_sink),
+      input_file_(NULL) {
+  mutex_ = webrtc::CriticalSectionWrapper::CreateCriticalSection();
+}
+
+ViEFileCaptureDevice::~ViEFileCaptureDevice() {
+  delete mutex_;
+}
+
+bool ViEFileCaptureDevice::OpenI420File(const std::string& path,
+                                        int width,
+                                        int height) {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ == NULL);
+
+  input_file_ = std::fopen(path.c_str(), "rb");
+  if (input_file_ == NULL) {
+    return false;
+  }
+
+  frame_length_ = 3 * width * height / 2;
+  width_  = width;
+  height_ = height;
+  return true;
+}
+
+void ViEFileCaptureDevice::ReadFileFor(uint64_t time_slice_ms,
+                                       uint32_t max_fps) {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ != NULL);
+
+  unsigned char* frame_buffer = new unsigned char[frame_length_];
+
+  webrtc::EventWrapper* sleeper = webrtc::EventWrapper::Create();
+
+  uint64_t start_time_ms = webrtc::TickTime::MillisecondTimestamp();
+  uint64_t elapsed_ms = 0;
+
+  while (elapsed_ms < time_slice_ms) {
+    FramePacemaker pacemaker(max_fps);
+    int read = std::fread(frame_buffer, 1, frame_length_, input_file_);
+
+    if (std::feof(input_file_)) {
+      std::rewind(input_file_);
+    }
+    input_sink_->IncomingFrame(frame_buffer, read, width_, height_,
+                               webrtc::kVideoI420,
+                               webrtc::TickTime::MillisecondTimestamp());
+
+    pacemaker.SleepIfNecessary(sleeper);
+    elapsed_ms = webrtc::TickTime::MillisecondTimestamp() - start_time_ms;
+  }
+
+  delete sleeper;
+  delete[] frame_buffer;
+}
+
+void ViEFileCaptureDevice::CloseFile() {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ != NULL);
+
+  std::fclose(input_file_);
+}
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.h b/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.h
new file mode 100644
index 0000000..5e62c48
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_file_capture_device.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
+
+#include <cstdio>
+
+#include <string>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ViEExternalCapture;
+}
+
+// This class opens a i420 file and feeds it into a ExternalCapture instance,
+// thereby acting as a faked capture device with deterministic input.
+class ViEFileCaptureDevice {
+ public:
+  // The input sink is where to send the I420 video frames.
+  explicit ViEFileCaptureDevice(webrtc::ViEExternalCapture* input_sink);
+  virtual ~ViEFileCaptureDevice();
+
+  // Opens the provided I420 file and interprets it according to the provided
+  // width and height. Returns false if the file doesn't exist.
+  bool OpenI420File(const std::string& path, int width, int height);
+
+  // Reads the previously opened file for at most time_slice_ms milliseconds,
+  // after which it will return. It will make sure to sleep accordingly so we
+  // do not send more than max_fps cap (we may send less, though).
+  void ReadFileFor(uint64_t time_slice_ms, uint32_t max_fps);
+
+  // Closes the opened input file.
+  void CloseFile();
+
+ private:
+  webrtc::ViEExternalCapture* input_sink_;
+
+  std::FILE* input_file_;
+  webrtc::CriticalSectionWrapper* mutex_;
+
+  WebRtc_UWord32 frame_length_;
+  WebRtc_UWord8* frame_buffer_;
+  WebRtc_UWord32 width_;
+  WebRtc_UWord32 height_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.cc b/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.cc
new file mode 100644
index 0000000..d9c8da3
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.cc
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_to_file_renderer.h"
+
+#include <assert.h>
+
+ViEToFileRenderer::ViEToFileRenderer()
+    : output_file_(NULL) {
+}
+
+ViEToFileRenderer::~ViEToFileRenderer() {
+}
+
+bool ViEToFileRenderer::PrepareForRendering(
+    const std::string& output_path,
+    const std::string& output_filename) {
+
+  assert(output_file_ == NULL);
+
+  output_file_ = std::fopen((output_path + output_filename).c_str(), "wb");
+  if (output_file_ == NULL) {
+    return false;
+  }
+
+  output_filename_ = output_filename;
+  output_path_ = output_path;
+  return true;
+}
+
+void ViEToFileRenderer::StopRendering() {
+  assert(output_file_ != NULL);
+  std::fclose(output_file_);
+  output_file_ = NULL;
+}
+
+bool ViEToFileRenderer::SaveOutputFile(const std::string& prefix) {
+  assert(output_file_ == NULL && output_filename_ != "");
+  if (std::rename((output_path_ + output_filename_).c_str(),
+                  (output_path_ + prefix + output_filename_).c_str()) != 0) {
+    std::perror("Failed to rename output file");
+    return false;
+  }
+  ForgetOutputFile();
+  return true;
+}
+
+bool ViEToFileRenderer::DeleteOutputFile() {
+  assert(output_file_ == NULL && output_filename_ != "");
+  if (std::remove((output_path_ + output_filename_).c_str()) != 0) {
+    std::perror("Failed to delete output file");
+    return false;
+  }
+  ForgetOutputFile();
+  return true;
+}
+
+const std::string ViEToFileRenderer::GetFullOutputPath() const {
+  return output_path_ + output_filename_;
+}
+
+void ViEToFileRenderer::ForgetOutputFile() {
+  output_filename_ = "";
+  output_path_ = "";
+}
+
+int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
+                                    int buffer_size,
+                                    uint32_t time_stamp,
+                                    int64_t render_time) {
+  assert(output_file_ != NULL);
+
+  int written = std::fwrite(buffer, sizeof(unsigned char),
+                            buffer_size, output_file_);
+
+  if (written == buffer_size) {
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+int ViEToFileRenderer::FrameSizeChange(unsigned int width,
+                                       unsigned int height,
+                                       unsigned int number_of_streams) {
+  return 0;
+}
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.h b/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.h
new file mode 100644
index 0000000..08559af
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_to_file_renderer.h
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
+
+#include <cstdio>
+#include <string>
+
+#include "video_engine/include/vie_render.h"
+
+class ViEToFileRenderer: public webrtc::ExternalRenderer {
+ public:
+  ViEToFileRenderer();
+  virtual ~ViEToFileRenderer();
+
+  // Returns false if we fail opening the output filename for writing.
+  bool PrepareForRendering(const std::string& output_path,
+                           const std::string& output_filename);
+
+  // Closes the output file.
+  void StopRendering();
+
+  // Deletes the closed output file from the file system. This is one option
+  // after calling StopRendering, the other being KeepOutputFile. This file
+  // renderer will forget about the file after this call and can be used again.
+  bool DeleteOutputFile();
+
+  // Renames the closed output file to its previous name with the provided
+  // prefix prepended. This file renderer will forget about the file after this
+  // call and can be used again.
+  bool SaveOutputFile(const std::string& prefix);
+
+  // Implementation of ExternalRenderer:
+  int FrameSizeChange(unsigned int width, unsigned int height,
+                      unsigned int number_of_streams);
+
+  int DeliverFrame(unsigned char* buffer, int buffer_size,
+                   uint32_t time_stamp,
+                   int64_t render_time);
+
+  const std::string GetFullOutputPath() const;
+
+ private:
+  void ForgetOutputFile();
+
+  std::FILE* output_file_;
+  std::string output_path_;
+  std::string output_filename_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.cc b/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.cc
new file mode 100644
index 0000000..40ec0bb
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.cc
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_window_creator.h"
+
+#include "vie_autotest_main.h"
+#include "vie_codec.h"
+#include "voe_codec.h"
+#include "vie_window_manager_factory.h"
+#include "vie_autotest_window_manager_interface.h"
+
+#if defined(WIN32)
+#include <tchar.h>
+#endif
+
+ViEWindowCreator::ViEWindowCreator() {
+  window_manager_ =
+      ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform();
+}
+
+ViEWindowCreator::~ViEWindowCreator() {
+  delete window_manager_;
+}
+
+ViEAutoTestWindowManagerInterface*
+  ViEWindowCreator::CreateTwoWindows() {
+#if defined(WIN32)
+  TCHAR window1Title[1024] = _T("ViE Autotest Window 1");
+  TCHAR window2Title[1024] = _T("ViE Autotest Window 2");
+#else
+  char window1Title[1024] = "ViE Autotest Window 1";
+  char window2Title[1024] = "ViE Autotest Window 2";
+#endif
+
+  AutoTestRect window1Size(352, 288, 600, 100);
+  AutoTestRect window2Size(352, 288, 1000, 100);
+  window_manager_->CreateWindows(window1Size, window2Size, window1Title,
+                                 window2Title);
+  window_manager_->SetTopmostWindow();
+
+  return window_manager_;
+}
+
+void ViEWindowCreator::TerminateWindows() {
+  window_manager_->TerminateWindows();
+}
+
diff --git a/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.h b/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.h
new file mode 100644
index 0000000..25c23a3
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/helpers/vie_window_creator.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
+
+class ViEAutoTestWindowManagerInterface;
+
+class ViEWindowCreator {
+ public:
+  ViEWindowCreator();
+  virtual ~ViEWindowCreator();
+
+  // The pointer returned here will still be owned by this object.
+  // Only use it to retrieve the created windows.
+  ViEAutoTestWindowManagerInterface* CreateTwoWindows();
+
+  // Terminates windows opened by CreateTwoWindows, which must
+  // have been called before this method.
+  void TerminateWindows();
+ private:
+  ViEAutoTestWindowManagerInterface* window_manager_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/tb_I420_codec.h b/trunk/src/video_engine/test/auto_test/interface/tb_I420_codec.h
new file mode 100644
index 0000000..d6be4b2
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/tb_I420_codec.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * tb_I420_codec.h
+ *
+ * This file contains the interface to I420 "codec"
+ * This is a dummy wrapper to allow VCM deal with raw I420 sequences
+ *
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
+
+#include "video_codec_interface.h"
+
+class TbI420Encoder: public webrtc::VideoEncoder
+{
+public:
+    TbI420Encoder();
+    virtual ~TbI420Encoder();
+
+    static WebRtc_Word32 VersionStatic(WebRtc_Word8* version,
+                                       WebRtc_Word32 length);
+    virtual WebRtc_Word32  Version(WebRtc_Word8 *version,
+                                   WebRtc_Word32 length) const;
+
+    virtual WebRtc_Word32 InitEncode(const webrtc::VideoCodec* codecSettings,
+                                     WebRtc_Word32 numberOfCores,
+                                     WebRtc_UWord32 maxPayloadSize);
+
+    virtual WebRtc_Word32 Encode(
+        const webrtc::RawImage& inputImage,
+        const webrtc::CodecSpecificInfo* codecSpecificInfo,
+        const webrtc::VideoFrameType* frameType);
+
+    virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
+        webrtc::EncodedImageCallback* callback);
+
+    virtual WebRtc_Word32 Release();
+
+    virtual WebRtc_Word32 Reset();
+
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt);
+
+    virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
+
+    virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
+                                   WebRtc_UWord32 frameRate);
+
+    virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
+                                                WebRtc_Word32 /*size*/);
+
+    struct FunctionCalls
+    {
+        WebRtc_Word32 InitEncode;
+        WebRtc_Word32 Encode;
+        WebRtc_Word32 RegisterEncodeCompleteCallback;
+        WebRtc_Word32 Release;
+        WebRtc_Word32 Reset;
+        WebRtc_Word32 SetRates;
+        WebRtc_Word32 SetPacketLoss;
+        WebRtc_Word32 SetPeriodicKeyFrames;
+        WebRtc_Word32 CodecConfigParameters;
+
+    };
+
+    FunctionCalls GetFunctionCalls();
+private:
+    bool _inited;
+    webrtc::EncodedImage _encodedImage;
+    FunctionCalls _functionCalls;
+    webrtc::EncodedImageCallback* _encodedCompleteCallback;
+
+}; // end of tbI420Encoder class
+
+
+/***************************/
+/* tbI420Decoder class */
+/***************************/
+
+class TbI420Decoder: public webrtc::VideoDecoder
+{
+public:
+    TbI420Decoder();
+    virtual ~TbI420Decoder();
+
+    virtual WebRtc_Word32 InitDecode(const webrtc::VideoCodec* inst,
+                                     WebRtc_Word32 numberOfCores);
+    virtual WebRtc_Word32 Decode(
+        const webrtc::EncodedImage& inputImage,
+        bool missingFrames,
+        const webrtc::RTPFragmentationHeader* fragmentation,
+        const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
+        WebRtc_Word64 renderTimeMs = -1);
+
+    virtual WebRtc_Word32
+        RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback);
+    virtual WebRtc_Word32 Release();
+    virtual WebRtc_Word32 Reset();
+
+    struct FunctionCalls
+    {
+        WebRtc_Word32 InitDecode;
+        WebRtc_Word32 Decode;
+        WebRtc_Word32 RegisterDecodeCompleteCallback;
+        WebRtc_Word32 Release;
+        WebRtc_Word32 Reset;
+    };
+
+    FunctionCalls GetFunctionCalls();
+
+private:
+
+    webrtc::RawImage _decodedImage;
+    WebRtc_Word32 _width;
+    WebRtc_Word32 _height;
+    bool _inited;
+    FunctionCalls _functionCalls;
+    webrtc::DecodedImageCallback* _decodeCompleteCallback;
+
+}; // end of tbI420Decoder class
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/tb_capture_device.h b/trunk/src/video_engine/test/auto_test/interface/tb_capture_device.h
new file mode 100644
index 0000000..3ade1e0
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/tb_capture_device.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
+
+#include "tb_interfaces.h"
+#include "video_capture_factory.h"
+
+class TbCaptureDevice
+{
+public:
+    TbCaptureDevice(TbInterfaces& Engine);
+    ~TbCaptureDevice(void);
+
+    int captureId;
+    void ConnectTo(int videoChannel);
+    void Disconnect(int videoChannel);
+private:
+    TbInterfaces& ViE;
+    webrtc::VideoCaptureModule* vcpm_;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/tb_external_transport.h b/trunk/src/video_engine/test/auto_test/interface/tb_external_transport.h
new file mode 100644
index 0000000..6578811
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/tb_external_transport.h
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tb_external_transport.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
+
+#include <list>
+
+#include "common_types.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViENetwork;
+}
+
+// Allows to subscribe for callback when a frame is started being sent.
+class SendFrameCallback
+{
+public:
+    // Called once per frame (when a new RTP timestamp is detected) when the
+    // first data packet of the frame is being sent using the
+    // TbExternalTransport.SendPacket method.
+    virtual void FrameSent(unsigned int rtp_timestamp) = 0;
+protected:
+    SendFrameCallback() {}
+    virtual ~SendFrameCallback() {}
+};
+
+// Allows to subscribe for callback when the first packet of a frame is
+// received.
+class ReceiveFrameCallback
+{
+public:
+    // Called once per frame (when a new RTP timestamp is detected)
+    // during the processing of the RTP packet queue in
+    // TbExternalTransport::ViEExternalTransportProcess.
+    virtual void FrameReceived(unsigned int rtp_timestamp) = 0;
+protected:
+    ReceiveFrameCallback() {}
+    virtual ~ReceiveFrameCallback() {}
+};
+
+// External transport implementation for testing purposes.
+// A packet loss probability must be set in order to drop packets from the data
+// being sent to this class.
+// Will never drop packets from the first frame of a video sequence.
+class TbExternalTransport : public webrtc::Transport
+{
+public:
+    TbExternalTransport(webrtc::ViENetwork& vieNetwork);
+    ~TbExternalTransport(void);
+
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+    // Should only be called before/after traffic is being processed.
+    // Only one observer can be set (multiple calls will overwrite each other).
+    virtual void RegisterSendFrameCallback(SendFrameCallback* callback);
+
+    // Should only be called before/after traffic is being processed.
+    // Only one observer can be set (multiple calls will overwrite each other).
+    virtual void RegisterReceiveFrameCallback(ReceiveFrameCallback* callback);
+
+    // The probability of a packet of being dropped. Packets belonging to the
+    // first packet (same RTP timestamp) will never be dropped.
+    WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate);  // Rate in %
+    void SetNetworkDelay(WebRtc_Word64 delayMs);
+    void SetSSRCFilter(WebRtc_UWord32 SSRC);
+
+    void ClearStats();
+    void GetStats(WebRtc_Word32& numRtpPackets,
+                  WebRtc_Word32& numDroppedPackets,
+                  WebRtc_Word32& numRtcpPackets);
+
+    void SetTemporalToggle(unsigned char layers);
+    void EnableSSRCCheck();
+    unsigned int ReceivedSSRC();
+
+    void EnableSequenceNumberCheck();
+    unsigned short GetFirstSequenceNumber();
+
+protected:
+    static bool ViEExternalTransportRun(void* object);
+    bool ViEExternalTransportProcess();
+private:
+    WebRtc_Word64 NowMs();
+
+    enum
+    {
+        KMaxPacketSize = 1650
+    };
+    enum
+    {
+        KMaxWaitTimeMs = 100
+    };
+    typedef struct
+    {
+        WebRtc_Word8 packetBuffer[KMaxPacketSize];
+        WebRtc_Word32 length;
+        WebRtc_Word32 channel;
+        WebRtc_Word64 receiveTime;
+    } VideoPacket;
+
+    webrtc::ViENetwork& _vieNetwork;
+    webrtc::ThreadWrapper& _thread;
+    webrtc::EventWrapper& _event;
+    webrtc::CriticalSectionWrapper& _crit;
+    webrtc::CriticalSectionWrapper& _statCrit;
+
+    WebRtc_Word32 _lossRate;
+    WebRtc_Word64 _networkDelayMs;
+    WebRtc_Word32 _rtpCount;
+    WebRtc_Word32 _rtcpCount;
+    WebRtc_Word32 _dropCount;
+
+    std::list<VideoPacket*> _rtpPackets;
+    std::list<VideoPacket*> _rtcpPackets;
+
+    SendFrameCallback* _send_frame_callback;
+    ReceiveFrameCallback* _receive_frame_callback;
+
+    unsigned char _temporalLayers;
+    unsigned short _seqNum;
+    unsigned short _sendPID;
+    unsigned char _receivedPID;
+    bool _switchLayer;
+    unsigned char _currentRelayLayer;
+    unsigned int _lastTimeMs;
+
+    bool _checkSSRC;
+    WebRtc_UWord32 _lastSSRC;
+    bool _filterSSRC;
+    WebRtc_UWord32 _SSRC;
+    bool _checkSequenceNumber;
+    WebRtc_UWord16 _firstSequenceNumber;
+
+    // Keep track of the first RTP timestamp so we don't do packet loss on
+    // the first frame.
+    WebRtc_UWord32 _firstRTPTimestamp;
+    // Track RTP timestamps so we invoke callbacks properly (if registered).
+    WebRtc_UWord32 _lastSendRTPTimestamp;
+    WebRtc_UWord32 _lastReceiveRTPTimestamp;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/tb_interfaces.h b/trunk/src/video_engine/test/auto_test/interface/tb_interfaces.h
new file mode 100644
index 0000000..63e78a3
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/tb_interfaces.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
+
+#include "vie_autotest_defines.h"
+
+#include "common_types.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_image_process.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_encryption.h"
+#include "vie_defines.h"
+
+// This class deals with all the tedium of setting up video engine interfaces.
+// It does its work in constructor and destructor, so keeping it in scope is
+// enough.
+class TbInterfaces
+{
+public:
+    TbInterfaces(const char* test_name);
+    ~TbInterfaces(void);
+
+    webrtc::VideoEngine* video_engine;
+    webrtc::ViEBase* base;
+    webrtc::ViECapture* capture;
+    webrtc::ViERender* render;
+    webrtc::ViERTP_RTCP* rtp_rtcp;
+    webrtc::ViECodec* codec;
+    webrtc::ViENetwork* network;
+    webrtc::ViEImageProcess* image_process;
+    webrtc::ViEEncryption* encryption;
+
+    int LastError() {
+        return base->LastError();
+    }
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/tb_video_channel.h b/trunk/src/video_engine/test/auto_test/interface/tb_video_channel.h
new file mode 100644
index 0000000..5961e62
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/tb_video_channel.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
+
+#include "tb_interfaces.h"
+class TbVideoChannel
+{
+public:
+    TbVideoChannel(TbInterfaces& Engine,
+                   webrtc::VideoCodecType sendCodec = webrtc::kVideoCodecVP8,
+                   int width = 352, int height = 288, int frameRate = 30,
+                   int startBitrate = 300);
+
+    ~TbVideoChannel(void);
+
+    void SetFrameSettings(int width, int height, int frameRate);
+
+    void StartSend(const unsigned short rtpPort = 11000,
+                   const char* ipAddress = "127.0.0.1");
+
+    void StopSend();
+
+    void StartReceive(const unsigned short rtpPort = 11000);
+
+    void StopReceive();
+
+    int videoChannel;
+private:
+    TbInterfaces& ViE;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest.h
new file mode 100644
index 0000000..a508cd4
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest.h
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
+
+#include "common_types.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_hardware.h"
+#include "voe_audio_processing.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_file.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_defines.h"
+#include "vie_errors.h"
+#include "video_render_defines.h"
+
+#include "vie_autotest_defines.h"
+
+#ifndef WEBRTC_ANDROID
+#include <string>
+#endif
+
+class TbCaptureDevice;
+class TbInterfaces;
+class TbVideoChannel;
+class ViEToFileRenderer;
+
+// This class provides a bunch of methods, implemented across several .cc
+// files, which runs tests on the video engine. All methods will report
+// errors using standard googletest macros, except when marked otherwise.
+class ViEAutoTest
+{
+public:
+    ViEAutoTest(void* window1, void* window2);
+    ~ViEAutoTest();
+
+    // These three are special and should not be run in a googletest harness.
+    // They keep track of their errors by themselves and return the number
+    // of errors.
+    int ViELoopbackCall();
+    int ViESimulcastCall();
+    int ViECustomCall();
+
+    // All functions except the three above are meant to run in a
+    // googletest harness.
+    void ViEStandardTest();
+    void ViEExtendedTest();
+    void ViEAPITest();
+
+    // vie_autotest_base.cc
+    void ViEBaseStandardTest();
+    void ViEBaseExtendedTest();
+    void ViEBaseAPITest();
+
+    // vie_autotest_capture.cc
+    void ViECaptureStandardTest();
+    void ViECaptureExtendedTest();
+    void ViECaptureAPITest();
+    void ViECaptureExternalCaptureTest();
+
+    // vie_autotest_codec.cc
+    void ViECodecStandardTest();
+    void ViECodecExtendedTest();
+    void ViECodecExternalCodecTest();
+    void ViECodecAPITest();
+
+    // vie_autotest_encryption.cc
+    void ViEEncryptionStandardTest();
+    void ViEEncryptionExtendedTest();
+    void ViEEncryptionAPITest();
+
+    // vie_autotest_file.ccs
+    void ViEFileStandardTest();
+    void ViEFileExtendedTest();
+    void ViEFileAPITest();
+
+    // vie_autotest_image_process.cc
+    void ViEImageProcessStandardTest();
+    void ViEImageProcessExtendedTest();
+    void ViEImageProcessAPITest();
+
+    // vie_autotest_network.cc
+    void ViENetworkStandardTest();
+    void ViENetworkExtendedTest();
+    void ViENetworkAPITest();
+
+    // vie_autotest_render.cc
+    void ViERenderStandardTest();
+    void ViERenderExtendedTest();
+    void ViERenderAPITest();
+
+    // vie_autotest_rtp_rtcp.cc
+    void ViERtpRtcpStandardTest();
+    void ViERtpRtcpExtendedTest();
+    void ViERtpRtcpAPITest();
+
+    // vie_autotest_rtp_fuzz.cc
+    void ViERtpTryInjectingRandomPacketsIntoRtpStream(long rand_seed);
+
+private:
+    void PrintAudioCodec(const webrtc::CodecInst audioCodec);
+    void PrintVideoCodec(const webrtc::VideoCodec videoCodec);
+
+    // Sets up rendering so the capture device output goes to window 1 and
+    // the video engine output goes to window 2.
+    void RenderCaptureDeviceAndOutputStream(TbInterfaces* video_engine,
+                                            TbVideoChannel* video_channel,
+                                            TbCaptureDevice* capture_device);
+
+    void* _window1;
+    void* _window2;
+
+    webrtc::VideoRenderType _renderType;
+    webrtc::VideoRender* _vrm1;
+    webrtc::VideoRender* _vrm2;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_android.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_android.h
new file mode 100644
index 0000000..53b8cc3
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_android.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
+
+class ViEAutoTestAndroid
+{
+public:
+	static int RunAutotest(int testSelection,
+						   int subTestSelection,
+						   void* window1,
+						   void* window2,
+						   void* javaVM,
+						   void* env,
+						   void* context);
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_defines.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_defines.h
new file mode 100644
index 0000000..34d489b
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_defines.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_defines.h
+//
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
+
+#include <cassert>
+#include <stdarg.h>
+#include <stdio.h>
+#include <string>
+
+#include "engine_configurations.h"
+#include "gtest/gtest.h"
+
+#if defined(_WIN32)
+#include <windows.h>
+#elif defined (WEBRTC_ANDROID)
+#include <android/log.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <string.h>
+#include <time.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#endif
+
+// Choose how to log
+//#define VIE_LOG_TO_FILE
+#define VIE_LOG_TO_STDOUT
+
+// Choose one way to test error
+#define VIE_ASSERT_ERROR
+
+#define VIE_LOG_FILE_NAME "ViEAutotestLog.txt"
+
+#undef RGB
+#define RGB(r,g,b) r|g<<8|b<<16
+
+enum {
+  KAutoTestSleepTimeMs = 5000
+};
+
+struct AutoTestSize {
+  unsigned int width;
+  unsigned int height;
+  AutoTestSize() :
+    width(0), height(0) {
+  }
+  AutoTestSize(unsigned int iWidth, unsigned int iHeight) :
+    width(iWidth), height(iHeight) {
+  }
+};
+
+struct AutoTestOrigin {
+  unsigned int x;
+  unsigned int y;
+  AutoTestOrigin() :
+    x(0), y(0) {
+  }
+  AutoTestOrigin(unsigned int iX, unsigned int iY) :
+    x(iX), y(iY) {
+  }
+};
+
+struct AutoTestRect {
+  AutoTestSize size;
+  AutoTestOrigin origin;
+  AutoTestRect() :
+    size(), origin() {
+  }
+
+  AutoTestRect(unsigned int iX, unsigned int iY, unsigned int iWidth, unsigned int iHeight) :
+    size(iX, iY), origin(iWidth, iHeight) {
+  }
+
+  void Copy(AutoTestRect iRect) {
+    origin.x = iRect.origin.x;
+    origin.y = iRect.origin.y;
+    size.width = iRect.size.width;
+    size.height = iRect.size.height;
+  }
+};
+
+// ============================================
+
+class ViETest {
+ public:
+  static int Init() {
+#ifdef VIE_LOG_TO_FILE
+    log_file_ = fopen(VIE_LOG_FILE_NAME, "w+t");
+#else
+    log_file_ = NULL;
+#endif
+    log_str_ = new char[kMaxLogSize];
+    memset(log_str_, 0, kMaxLogSize);
+    return 0;
+  }
+
+  static int Terminate() {
+    if (log_file_) {
+      fclose(log_file_);
+      log_file_ = NULL;
+    }
+    if (log_str_) {
+      delete[] log_str_;
+      log_str_ = NULL;
+    }
+    return 0;
+  }
+
+  static void Log(const char* fmt, ...) {
+    va_list va;
+    va_start(va, fmt);
+    memset(log_str_, 0, kMaxLogSize);
+    vsprintf(log_str_, fmt, va);
+    va_end(va);
+
+    WriteToSuitableOutput(log_str_);
+  }
+
+  // Writes to a suitable output, depending on platform and log mode.
+  static void WriteToSuitableOutput(const char* message) {
+#ifdef VIE_LOG_TO_FILE
+    if (log_file_)
+    {
+      fwrite(log_str_, 1, strlen(log_str_), log_file_);
+      fwrite("\n", 1, 1, log_file_);
+      fflush(log_file_);
+    }
+#endif
+#ifdef VIE_LOG_TO_STDOUT
+#if WEBRTC_ANDROID
+    __android_log_write(ANDROID_LOG_DEBUG, "*WebRTCN*", log_str_);
+#else
+    printf("%s\n", log_str_);
+#endif
+#endif
+  }
+
+  // Deprecated(phoglund): Prefer to use googletest macros in all cases
+  // except the custom call case.
+  static int TestError(bool expr, const char* fmt, ...) {
+    if (!expr) {
+      va_list va;
+      va_start(va, fmt);
+      memset(log_str_, 0, kMaxLogSize);
+      vsprintf(log_str_, fmt, va);
+#ifdef WEBRTC_ANDROID
+      __android_log_write(ANDROID_LOG_ERROR, "*WebRTCN*", log_str_);
+#endif
+      WriteToSuitableOutput(log_str_);
+      va_end(va);
+
+      AssertError(log_str_);
+      return 1;
+    }
+    return 0;
+  }
+
+  // Returns a suitable path to write trace and result files to.
+  // You should always use this when you want to write output files.
+  // The returned path is guaranteed to end with a path separator.
+  // This function may be run at any time during the program's execution.
+  // Implemented in vie_autotest.cc
+  static std::string GetResultOutputPath();
+
+private:
+  static void AssertError(const char* message) {
+#ifdef VIE_ASSERT_ERROR
+    assert(false);
+#endif
+  }
+
+  static FILE* log_file_;
+  enum {
+    kMaxLogSize = 512
+  };
+  static char* log_str_;
+};
+
+// milliseconds
+#if defined(_WIN32)
+#define AutoTestSleep ::Sleep
+#elif defined(WEBRTC_MAC_INTEL)
+#define AutoTestSleep(x) usleep(x * 1000)
+#elif defined(WEBRTC_LINUX)
+namespace {
+  void Sleep(unsigned long x) {
+    timespec t;
+    t.tv_sec = x/1000;
+    t.tv_nsec = (x-(x/1000)*1000)*1000000;
+    nanosleep(&t,NULL);
+  }
+}
+#define AutoTestSleep ::Sleep
+#endif
+
+#ifdef WEBRTC_ANDROID
+namespace {
+  void Sleep(unsigned long x) {
+    timespec t;
+    t.tv_sec = x/1000;
+    t.tv_nsec = (x-(x/1000)*1000)*1000000;
+    nanosleep(&t,NULL);
+  }
+}
+
+#define AutoTestSleep ::Sleep
+#endif
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_linux.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_linux.h
new file mode 100644
index 0000000..fb1d319
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_linux.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
+
+#include "vie_autotest_window_manager_interface.h"
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+
+// Forward declaration
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
+{
+public:
+    ViEAutoTestWindowManager();
+    virtual ~ViEAutoTestWindowManager();
+    virtual void* GetWindow1();
+    virtual void* GetWindow2();
+    virtual int TerminateWindows();
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title);
+    virtual bool SetTopmostWindow();
+
+private:
+    int ViECreateWindow(Window *outWindow, Display **outDisplay, int xpos,
+                        int ypos, int width, int height, char* title);
+    int ViEDestroyWindow(Window *window, Display *display);
+
+    Window _hwnd1;
+    Window _hwnd2;
+    Display* _hdsp1;
+    Display* _hdsp2;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_carbon.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_carbon.h
new file mode 100644
index 0000000..6c35baa
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_carbon.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "EngineConfigurations.h"
+
+#if defined(CARBON_RENDERING)
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
+
+#include "vie_autotest_window_manager_interface.h"
+
+// #define HIVIEWREF_MODE 1
+
+#include <Carbon/Carbon.h>
+#import <Cocoa/Cocoa.h>
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
+{
+public:
+    ViEAutoTestWindowManager();
+    virtual ~ViEAutoTestWindowManager();
+    virtual void* GetWindow1();
+    virtual void* GetWindow2();
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, char* window1Title,
+                              char* window2Title);
+    virtual int TerminateWindows();
+    virtual bool SetTopmostWindow();
+
+    // event handler static methods
+static pascal OSStatus HandleWindowEvent (EventHandlerCallRef nextHandler,
+    EventRef theEvent, void* userData);
+static pascal OSStatus HandleHIViewEvent (EventHandlerCallRef nextHandler,
+    EventRef theEvent, void* userData);
+private:
+    WindowRef* _carbonWindow1;
+    WindowRef* _carbonWindow2;
+    HIViewRef* _hiView1;
+    HIViewRef* _hiView2;
+
+    EventHandlerRef _carbonWindow1EventHandlerRef;
+    EventHandlerRef _carbonWindow2EventHandlerRef;
+    EventHandlerRef _carbonHIView1EventHandlerRef;
+    EventHandlerRef _carbonHIView2EventHandlerRef;
+
+};
+
+@interface AutoTestClass : NSObject
+{
+}
+
+-(void)autoTestWithArg:(NSString*)answerFile;
+@end
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
+#endif CARBON_RENDERING
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h
new file mode 100644
index 0000000..745431d
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+
+#include "vie_autotest_window_manager_interface.h"
+#define MAC_COCOA_USE_NSRUNLOOP 1
+
+@class CocoaRenderView;
+
+#import <Cocoa/Cocoa.h>
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface {
+ public:
+  ViEAutoTestWindowManager();
+  virtual ~ViEAutoTestWindowManager() {}
+  virtual void* GetWindow1();
+  virtual void* GetWindow2();
+  virtual int CreateWindows(AutoTestRect window1Size,
+                            AutoTestRect window2Size,
+                            void* window1Title,
+                            void* window2Title);
+  virtual int TerminateWindows();
+  virtual bool SetTopmostWindow();
+
+ private:
+  CocoaRenderView* _cocoaRenderView1;
+  CocoaRenderView* _cocoaRenderView2;
+  NSWindow* outWindow1_;
+  NSWindow* outWindow2_;
+};
+
+@interface AutoTestClass : NSObject {
+  int    argc_;
+  char** argv_;
+  int    result_;
+}
+
+-(void)setArgc:(int)argc argv:(char**)argv;
+-(int) result;
+-(void)autoTestWithArg:(NSObject*)ignored;
+
+@end
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+#endif  // COCOA_RENDERING
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_main.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_main.h
new file mode 100644
index 0000000..d24de5c
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_main.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
+
+#include <string>
+#include <map>
+
+class ViEAutoTestMain {
+ public:
+  ViEAutoTestMain();
+
+  // Runs the test according to the specified arguments.
+  // Pass in --automated to run in automated mode; interactive
+  // mode is default. All usual googletest flags also apply.
+  int RunTests(int argc, char** argv);
+
+ private:
+  std::map<int, std::string> index_to_test_method_map_;
+
+  static const int kInvalidChoice = -1;
+
+  // Starts interactive mode.
+  bool RunInteractiveMode();
+  // Prompts the user for a specific test method in the provided test case.
+  // Returns 0 on success, nonzero otherwise.
+  int RunSpecificTestCaseIn(const std::string test_case_name);
+  // Asks the user for a particular test case to run.
+  int AskUserForTestCase();
+  // Retrieves a number from the user in the interval
+  // [min_allowed, max_allowed]. Returns kInvalidChoice on failure.
+  int AskUserForNumber(int min_allowed, int max_allowed);
+  // Runs all tests matching the provided filter. * are wildcards.
+  // Returns the test runner result (0 == OK).
+  int RunTestMatching(const std::string test_case,
+                      const std::string test_method);
+  // Runs a non-gtest test case. Choice must be [7,9]. Returns 0 on success.
+  int RunSpecialTestCase(int choice);
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h
new file mode 100644
index 0000000..6dd043e
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  vie_autotest_window_manager_interface.h
+ */
+
+#include "vie_autotest_defines.h"
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
+
+class ViEAutoTestWindowManagerInterface
+{
+public:
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title) = 0;
+    virtual int TerminateWindows() = 0;
+    virtual void* GetWindow1() = 0;
+    virtual void* GetWindow2() = 0;
+    virtual bool SetTopmostWindow() = 0;
+    virtual ~ViEAutoTestWindowManagerInterface() {}
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_autotest_windows.h b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_windows.h
new file mode 100644
index 0000000..314a121
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_autotest_windows.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
+
+#include "vie_autotest_window_manager_interface.h"
+#include "engine_configurations.h"
+
+#include <windows.h>
+#define TITLE_LENGTH 1024
+
+// Forward declaration
+namespace webrtc {
+class ThreadWrapper;
+class CriticalSectionWrapper;
+}
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
+{
+public:
+    ViEAutoTestWindowManager();
+    virtual ~ViEAutoTestWindowManager();
+    virtual void* GetWindow1();
+    virtual void* GetWindow2();
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title);
+    virtual int TerminateWindows();
+    virtual bool SetTopmostWindow();
+protected:
+    static bool EventProcess(void* obj);
+    bool EventLoop();
+
+private:
+    int ViECreateWindow(HWND &hwndMain, int xPos, int yPos, int width,
+                        int height, TCHAR* className);
+    int ViEDestroyWindow(HWND& hwnd);
+
+    void* _window1;
+    void* _window2;
+
+    bool _terminate;
+    webrtc::ThreadWrapper& _eventThread;
+    webrtc::CriticalSectionWrapper& _crit;
+    HWND _hwndMain;
+    HWND _hwnd1;
+    HWND _hwnd2;
+
+    AutoTestRect _hwnd1Size;
+    AutoTestRect _hwnd2Size;
+    TCHAR _hwnd1Title[TITLE_LENGTH];
+    TCHAR _hwnd2Title[TITLE_LENGTH];
+
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h b/trunk/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h
new file mode 100644
index 0000000..a9560bc
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
+
+#include <string>
+
+class FrameDropDetector;
+class ViEToFileRenderer;
+
+// This class contains comparison tests, which will exercise video engine
+// functionality and then run comparison tests on the result using PSNR and
+// SSIM algorithms. These tests are intended mostly as sanity checks so that
+// we know we are outputting roughly the right thing and not random noise or
+// black screens.
+//
+// We will set up a fake ExternalCapture device which will pose as a webcam
+// and read the input from the provided raw YUV file. Output will be written
+// as a local preview in the local file renderer; the remote side output gets
+// written to the provided remote file renderer.
+//
+// The local preview is a straight, unaltered copy of the input. This can be
+// useful for comparisons if the test method contains several stages where the
+// input is restarted between stages.
+class ViEFileBasedComparisonTests {
+ public:
+  // Test a typical simple call setup. Returns false if the input file
+  // could not be opened; reports errors using googletest macros otherwise.
+  bool TestCallSetup(
+      const std::string& i420_test_video_path,
+      int width,
+      int height,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer);
+
+  // Tries testing the I420 and VP8 codecs in turn. Returns false if the
+  // input file could not be opened; reports errors using googletest macros
+  // otherwise.
+  bool TestCodecs(
+      const std::string& i420_video_file,
+      int width,
+      int height,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer);
+
+  // Runs a full stack test using the VP8 codec. Tests the full stack and uses
+  // RTP timestamps to sync frames between the endpoints.
+  void TestFullStack(
+      const std::string& i420_video_file,
+      int width,
+      int height,
+      int bit_rate_kbps,
+      int packet_loss_percent,
+      int network_delay_ms,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer,
+      FrameDropDetector* frame_drop_detector);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
diff --git a/trunk/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h b/trunk/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h
new file mode 100644
index 0000000..a85280d
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h
@@ -0,0 +1,25 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
+
+class ViEAutoTestWindowManagerInterface;
+
+class ViEWindowManagerFactory {
+ public:
+  // This method is implemented in different files depending on platform.
+  // The caller is responsible for freeing the resulting object using
+  // the delete operator.
+  static ViEAutoTestWindowManagerInterface*
+  CreateWindowManagerForCurrentPlatform();
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
diff --git a/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.bmp b/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.bmp
new file mode 100644
index 0000000..6cd34ba
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.bmp
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.jpg b/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.jpg
new file mode 100644
index 0000000..3bb3ba4
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/captureDeviceImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/media/renderStartImage.bmp b/trunk/src/video_engine/test/auto_test/media/renderStartImage.bmp
new file mode 100644
index 0000000..c443a58
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/renderStartImage.bmp
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/media/renderStartImage.jpg b/trunk/src/video_engine/test/auto_test/media/renderStartImage.jpg
new file mode 100644
index 0000000..b10a842
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/renderStartImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp b/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp
new file mode 100644
index 0000000..8159bad
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg b/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg
new file mode 100644
index 0000000..cb34d67
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg
Binary files differ
diff --git a/trunk/src/video_engine/test/auto_test/primitives/base_primitives.cc b/trunk/src/video_engine/test/auto_test/primitives/base_primitives.cc
new file mode 100644
index 0000000..b8c827e
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/base_primitives.cc
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "base_primitives.h"
+
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "video_capture_factory.h"
+
+void TestI420CallSetup(webrtc::ViECodec* codec_interface,
+                       webrtc::VideoEngine* video_engine,
+                       webrtc::ViEBase* base_interface,
+                       webrtc::ViENetwork* network_interface,
+                       int video_channel,
+                       const unsigned char *device_name) {
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+
+  // Set up the codec interface with all known receive codecs and with
+  // I420 as the send codec.
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
+
+    // Try to keep the test frame size small when I420.
+    if (video_codec.codecType == webrtc::kVideoCodecI420) {
+      video_codec.width = 176;
+      video_codec.height = 144;
+      EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec));
+    }
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+
+  // Verify that we really found the I420 codec.
+  EXPECT_EQ(0, codec_interface->GetSendCodec(video_channel, video_codec));
+  EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType);
+
+  // Set up senders and receivers.
+  char version[1024] = "";
+  EXPECT_EQ(0, base_interface->GetVersion(version));
+  ViETest::Log("\nUsing WebRTC Video Engine version: %s", version);
+
+  const char *ipAddress = "127.0.0.1";
+  WebRtc_UWord16 rtpPortListen = 6100;
+  WebRtc_UWord16 rtpPortSend = 6100;
+  EXPECT_EQ(0, network_interface->SetLocalReceiver(video_channel,
+                                                   rtpPortListen));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+  EXPECT_EQ(0, network_interface->SetSendDestination(video_channel, ipAddress,
+                                                     rtpPortSend));
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+
+  // Call started.
+  ViETest::Log("Call started");
+
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  // Done.
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+}
diff --git a/trunk/src/video_engine/test/auto_test/primitives/base_primitives.h b/trunk/src/video_engine/test/auto_test/primitives/base_primitives.h
new file mode 100644
index 0000000..19b6f23
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/base_primitives.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
+
+namespace webrtc {
+class VideoEngine;
+class ViEBase;
+class ViECodec;
+class ViENetwork;
+}
+
+// Tests a I420-to-I420 call. This test exercises the most basic WebRTC
+// functionality by training the codec interface to recognize the most common
+// codecs, and the initiating a I420 call. A video channel with a capture device
+// must be set up prior to this call.
+void TestI420CallSetup(webrtc::ViECodec* codec_interface,
+                       webrtc::VideoEngine* video_engine,
+                       webrtc::ViEBase* base_interface,
+                       webrtc::ViENetwork* network_interface,
+                       int video_channel,
+                       const unsigned char *device_name);
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
diff --git a/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.cc b/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.cc
new file mode 100644
index 0000000..2d43440
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.cc
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_primitives.h"
+
+#include "general_primitives.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "vie_to_file_renderer.h"
+#include "video_capture_factory.h"
+#include "tb_interfaces.h"
+
+// Helper functions.
+
+void TestCodecImageProcess(webrtc::VideoCodec video_codec,
+                           webrtc::ViECodec* codec_interface,
+                           int video_channel,
+                           webrtc::ViEImageProcess* image_process) {
+
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec));
+  FrameCounterEffectFilter frame_counter;
+  EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
+                                                         frame_counter));
+  AutoTestSleep (KAutoTestSleepTimeMs);
+
+  int max_number_of_rendered_frames = video_codec.maxFramerate *
+      KAutoTestSleepTimeMs / 1000;
+
+  if (video_codec.codecType == webrtc::kVideoCodecI420) {
+    // Due to that I420 needs a huge bandwidth, rate control can set
+    // frame rate very low. This happen since we use the same channel
+    // as we just tested with vp8.
+    EXPECT_GT(frame_counter.numFrames, 0);
+  } else {
+#ifdef WEBRTC_ANDROID
+    // Special case to get the autotest to pass on some slow devices
+    EXPECT_GT(frame_counter.numFrames, max_number_of_rendered_frames / 6);
+#else
+    EXPECT_GT(frame_counter.numFrames, max_number_of_rendered_frames / 4);
+#endif
+  }
+  EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(video_channel));
+}
+
+// Test switching from i420 to VP8 as send codec and make sure that
+// the codec observer gets called after the switch.
+void TestCodecCallbacks(webrtc::ViEBase *& base_interface,
+                        webrtc::ViECodec *codec_interface,
+                        int video_channel,
+                        int forced_codec_width,
+                        int forced_codec_height) {
+
+  // Set I420 as send codec so we don't make any assumptions about what
+  // we currently have as send codec:
+  SetSendCodec(webrtc::kVideoCodecI420, codec_interface, video_channel,
+               forced_codec_width, forced_codec_height);
+
+  // Register the observer:
+  ViEAutotestCodecObserver codec_observer;
+  EXPECT_EQ(0, codec_interface->RegisterEncoderObserver(video_channel,
+                                                        codec_observer));
+  EXPECT_EQ(0, codec_interface->RegisterDecoderObserver(video_channel,
+                                                   codec_observer));
+
+  // Make the switch.
+  ViETest::Log("Testing codec callbacks...");
+
+  SetSendCodec(webrtc::kVideoCodecVP8, codec_interface, video_channel,
+               forced_codec_width, forced_codec_height);
+
+  AutoTestSleep (KAutoTestSleepTimeMs);
+
+  // Verify that we got the right codec.
+  EXPECT_EQ(webrtc::kVideoCodecVP8, codec_observer.incomingCodec.codecType);
+
+  // Clean up.
+  EXPECT_EQ(0, codec_interface->DeregisterEncoderObserver(video_channel));
+  EXPECT_EQ(0, codec_interface->DeregisterDecoderObserver(video_channel));
+
+  // Verify results.
+  EXPECT_GT(codec_observer.incomingCodecCalled, 0);
+  EXPECT_GT(codec_observer.incomingRatecalled, 0);
+  EXPECT_GT(codec_observer.outgoingRatecalled, 0);
+}
+
+void TestCodecs(const TbInterfaces& interfaces,
+                int capture_id,
+                int video_channel,
+                int forced_codec_width,
+                int forced_codec_height) {
+  webrtc::VideoEngine *video_engine_interface = interfaces.video_engine;
+  webrtc::ViEBase *base_interface = interfaces.base;
+  webrtc::ViECapture *capture_interface = interfaces.capture;
+  webrtc::ViERender *render_interface = interfaces.render;
+  webrtc::ViECodec *codec_interface = interfaces.codec;
+  webrtc::ViENetwork *network_interface = interfaces.network;
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof (webrtc::VideoCodec));
+
+  // Set up all receive codecs. This basically trains the codec interface
+  // to be able to recognize all receive codecs based on payload type.
+  for (int idx = 0; idx < codec_interface->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(idx, video_codec));
+    SetSuitableResolution(&video_codec,
+                          forced_codec_width,
+                          forced_codec_height);
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+  const char *ip_address = "127.0.0.1";
+  const unsigned short rtp_port = 6000;
+  EXPECT_EQ(0, network_interface->SetLocalReceiver(video_channel, rtp_port));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+  EXPECT_EQ(0, network_interface->SetSendDestination(video_channel, ip_address,
+                                                     rtp_port));
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+
+  // Run all found codecs
+  webrtc::ViEImageProcess *image_process =
+      webrtc::ViEImageProcess::GetInterface(video_engine_interface);
+  EXPECT_TRUE(image_process != NULL);
+
+  ViETest::Log("Loop through all codecs for %d seconds",
+               KAutoTestSleepTimeMs / 1000);
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
+
+    if (video_codec.codecType == webrtc::kVideoCodecRED ||
+        video_codec.codecType == webrtc::kVideoCodecULPFEC) {
+      ViETest::Log("\t %d. %s not tested", i, video_codec.plName);
+    } else {
+      ViETest::Log("\t %d. %s", i, video_codec.plName);
+      SetSuitableResolution(&video_codec, forced_codec_width,
+                            forced_codec_height);
+      TestCodecImageProcess(video_codec, codec_interface, video_channel,
+                            image_process);
+    }
+  }
+  image_process->Release();
+
+  TestCodecCallbacks(base_interface, codec_interface, video_channel,
+                     forced_codec_width, forced_codec_height);
+
+  ViETest::Log("Done!");
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+  EXPECT_EQ(0, render_interface->StopRender(capture_id));
+  EXPECT_EQ(0, render_interface->StopRender(video_channel));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
+  EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void SetSendCodec(webrtc::VideoCodecType of_type,
+                  webrtc::ViECodec* codec_interface,
+                  int video_channel,
+                  int forced_codec_width,
+                  int forced_codec_height) {
+  webrtc::VideoCodec codec;
+  bool ok;
+  EXPECT_TRUE(ok = FindSpecificCodec(of_type, codec_interface, &codec));
+  if (!ok) {
+    return;
+  }
+
+  SetSuitableResolution(&codec, forced_codec_width, forced_codec_height);
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, codec));
+}
diff --git a/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.h b/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.h
new file mode 100644
index 0000000..e778044
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/codec_primitives.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
+
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+
+class TbInterfaces;
+
+// Tests that a codec actually renders frames by registering a basic
+// render effect filter on the codec and then running it. This test is
+// quite lenient on the number of frames that get rendered, so it should not
+// be seen as a end-user-visible quality measure - it is more a sanity check
+// that the codec at least gets some frames through.
+
+// The codec resolution can be forced by specifying the forced* variables
+// (pass in kDoNotForceResolution if you don't care).
+void TestCodecs(const TbInterfaces& interfaces,
+                int capture_id,
+                int video_channel,
+                int forced_codec_width,
+                int forced_codec_height);
+
+// This helper function will set the send codec in the codec interface to a
+// codec of the specified type. It will generate a test failure if we do not
+// support the provided codec type.
+
+// The codec resolution can be forced by specifying the forced* variables
+// (pass in kDoNotForceResolution if you don't care).
+void SetSendCodec(webrtc::VideoCodecType of_type,
+                  webrtc::ViECodec* codec_interface,
+                  int video_channel,
+                  int forced_codec_width,
+                  int forced_codec_height);
+
+class ViEAutotestCodecObserver: public webrtc::ViEEncoderObserver,
+                                public webrtc::ViEDecoderObserver {
+ public:
+  int incomingCodecCalled;
+  int incomingRatecalled;
+  int outgoingRatecalled;
+
+  unsigned char lastPayloadType;
+  unsigned short lastWidth;
+  unsigned short lastHeight;
+
+  unsigned int lastOutgoingFramerate;
+  unsigned int lastOutgoingBitrate;
+  unsigned int lastIncomingFramerate;
+  unsigned int lastIncomingBitrate;
+
+  webrtc::VideoCodec incomingCodec;
+
+  ViEAutotestCodecObserver() {
+    incomingCodecCalled = 0;
+    incomingRatecalled = 0;
+    outgoingRatecalled = 0;
+    lastPayloadType = 0;
+    lastWidth = 0;
+    lastHeight = 0;
+    lastOutgoingFramerate = 0;
+    lastOutgoingBitrate = 0;
+    lastIncomingFramerate = 0;
+    lastIncomingBitrate = 0;
+    memset(&incomingCodec, 0, sizeof(incomingCodec));
+  }
+  virtual void IncomingCodecChanged(const int videoChannel,
+                                    const webrtc::VideoCodec& videoCodec) {
+    incomingCodecCalled++;
+    lastPayloadType = videoCodec.plType;
+    lastWidth = videoCodec.width;
+    lastHeight = videoCodec.height;
+
+    memcpy(&incomingCodec, &videoCodec, sizeof(videoCodec));
+  }
+
+  virtual void IncomingRate(const int videoChannel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    incomingRatecalled++;
+    lastIncomingFramerate += framerate;
+    lastIncomingBitrate += bitrate;
+  }
+
+  virtual void OutgoingRate(const int videoChannel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    outgoingRatecalled++;
+    lastOutgoingFramerate += framerate;
+    lastOutgoingBitrate += bitrate;
+  }
+
+  virtual void RequestNewKeyFrame(const int videoChannel) {
+  }
+};
+
+class FrameCounterEffectFilter : public webrtc::ViEEffectFilter
+{
+ public:
+  int numFrames;
+  FrameCounterEffectFilter() {
+    numFrames = 0;
+  }
+  virtual ~FrameCounterEffectFilter() {
+  }
+
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    numFrames++;
+    return 0;
+  }
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
diff --git a/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc
new file mode 100644
index 0000000..6767cc6
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc
@@ -0,0 +1,513 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <string>
+
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
+#include "video_engine/test/auto_test/interface/tb_interfaces.h"
+#include "video_engine/test/auto_test/interface/tb_external_transport.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+
+// Tracks which frames are created on the local side and reports them to the
+// FrameDropDetector class.
+class CreatedTimestampEffectFilter : public webrtc::ViEEffectFilter {
+ public:
+  explicit CreatedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~CreatedTimestampEffectFilter() {}
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kCreated,
+                                           timeStamp90KHz);
+    return 0;
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks which frames are sent in external transport on the local side
+// and reports them to the FrameDropDetector class.
+class FrameSentCallback : public SendFrameCallback {
+ public:
+  explicit FrameSentCallback(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameSentCallback() {}
+  virtual void FrameSent(unsigned int rtp_timestamp) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kSent,
+                                           rtp_timestamp);
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks which frames are received in external transport on the remote side
+// and reports them to the FrameDropDetector class.
+class FrameReceivedCallback : public ReceiveFrameCallback {
+ public:
+  explicit FrameReceivedCallback(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameReceivedCallback() {}
+  virtual void FrameReceived(unsigned int rtp_timestamp) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kReceived,
+                                           rtp_timestamp);
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks when frames are decoded on the remote side (received from the
+// jitter buffer) and reports them to the FrameDropDetector class.
+class DecodedTimestampEffectFilter : public webrtc::ViEEffectFilter {
+ public:
+  explicit DecodedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~DecodedTimestampEffectFilter() {}
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kDecoded,
+                                           timeStamp90KHz);
+    return 0;
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+void TestFullStack(const TbInterfaces& interfaces,
+                   int capture_id,
+                   int video_channel,
+                   int width,
+                   int height,
+                   int bit_rate_kbps,
+                   int packet_loss_percent,
+                   int network_delay_ms,
+                   FrameDropDetector* frame_drop_detector) {
+  webrtc::VideoEngine *video_engine_interface = interfaces.video_engine;
+  webrtc::ViEBase *base_interface = interfaces.base;
+  webrtc::ViECapture *capture_interface = interfaces.capture;
+  webrtc::ViERender *render_interface = interfaces.render;
+  webrtc::ViECodec *codec_interface = interfaces.codec;
+  webrtc::ViENetwork *network_interface = interfaces.network;
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof (webrtc::VideoCodec));
+
+  // Set up all receive codecs. This basically setup the codec interface
+  // to be able to recognize all receive codecs based on payload type.
+  for (int idx = 0; idx < codec_interface->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(idx, video_codec));
+    SetSuitableResolution(&video_codec, width, height);
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+
+  // Configure External transport to simulate network interference:
+  TbExternalTransport external_transport(*interfaces.network);
+  external_transport.SetPacketLoss(packet_loss_percent);
+  external_transport.SetNetworkDelay(network_delay_ms);
+
+  FrameSentCallback frame_sent_callback(frame_drop_detector);
+  FrameReceivedCallback frame_received_callback(frame_drop_detector);
+  external_transport.RegisterSendFrameCallback(&frame_sent_callback);
+  external_transport.RegisterReceiveFrameCallback(&frame_received_callback);
+  EXPECT_EQ(0, network_interface->RegisterSendTransport(video_channel,
+                                                        external_transport));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+
+  // Setup only the VP8 codec, which is what we'll use.
+  webrtc::VideoCodec codec;
+  EXPECT_TRUE(FindSpecificCodec(webrtc::kVideoCodecVP8, codec_interface,
+                                &codec));
+  codec.startBitrate = bit_rate_kbps;
+  codec.maxBitrate = bit_rate_kbps;
+  codec.width = width;
+  codec.height = height;
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, codec));
+
+  webrtc::ViEImageProcess *image_process =
+      webrtc::ViEImageProcess::GetInterface(video_engine_interface);
+  EXPECT_TRUE(image_process);
+
+  // Setup the effect filters
+  CreatedTimestampEffectFilter create_filter(frame_drop_detector);
+  EXPECT_EQ(0, image_process->RegisterSendEffectFilter(video_channel,
+                                                       create_filter));
+  DecodedTimestampEffectFilter decode_filter(frame_drop_detector);
+  EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
+                                                         decode_filter));
+  // Send video.
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  // Cleanup.
+  EXPECT_EQ(0, image_process->DeregisterSendEffectFilter(video_channel));
+  EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(video_channel));
+  image_process->Release();
+  ViETest::Log("Done!");
+
+  WebRtc_Word32 num_rtp_packets = 0;
+  WebRtc_Word32 num_dropped_packets = 0;
+  WebRtc_Word32 num_rtcp_packets = 0;
+  external_transport.GetStats(num_rtp_packets, num_dropped_packets,
+                              num_rtcp_packets);
+  ViETest::Log("RTP packets    : %5d", num_rtp_packets);
+  ViETest::Log("Dropped packets: %5d", num_dropped_packets);
+  ViETest::Log("RTCP packets   : %5d", num_rtcp_packets);
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+  EXPECT_EQ(0, network_interface->DeregisterSendTransport(video_channel));
+  EXPECT_EQ(0, render_interface->StopRender(capture_id));
+  EXPECT_EQ(0, render_interface->StopRender(video_channel));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
+  EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void FixOutputFileForComparison(const std::string& output_file,
+                                int frame_length_in_bytes,
+                                const std::vector<Frame*>& frames) {
+  webrtc::test::FrameReaderImpl frame_reader(output_file,
+                                             frame_length_in_bytes);
+  const std::string temp_file = output_file + ".fixed";
+  webrtc::test::FrameWriterImpl frame_writer(temp_file, frame_length_in_bytes);
+  frame_reader.Init();
+  frame_writer.Init();
+
+  ASSERT_FALSE(frames.front()->dropped_at_render) << "It should not be "
+      "possible to drop the first frame. Both because we don't have anything "
+      "useful to fill that gap with and it is impossible to detect it without "
+      "any previous timestamps to compare with.";
+
+  WebRtc_UWord8* last_frame_data = new WebRtc_UWord8[frame_length_in_bytes];
+
+  // Process the file and write frame duplicates for all dropped frames.
+  for (std::vector<Frame*>::const_iterator it = frames.begin();
+       it != frames.end(); ++it) {
+    if ((*it)->dropped_at_render) {
+      // Write the previous frame to the output file:
+      EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
+    } else {
+      EXPECT_TRUE(frame_reader.ReadFrame(last_frame_data));
+      EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
+    }
+  }
+  delete[] last_frame_data;
+  frame_reader.Close();
+  frame_writer.Close();
+  ASSERT_EQ(0, std::remove(output_file.c_str()));
+  ASSERT_EQ(0, std::rename(temp_file.c_str(), output_file.c_str()));
+}
+
+void FrameDropDetector::ReportFrameState(State state, unsigned int timestamp) {
+  dirty_ = true;
+  switch (state) {
+    case kCreated: {
+      int number = created_frames_vector_.size();
+      Frame* frame = new Frame(number, timestamp);
+      frame->created_timestamp_in_us_ =
+          webrtc::TickTime::MicrosecondTimestamp();
+      created_frames_vector_.push_back(frame);
+      created_frames_[timestamp] = frame;
+      num_created_frames_++;
+      break;
+    }
+    case kSent:
+      sent_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      if (timestamp_diff_ == 0) {
+        // When the first created frame arrives we calculate the fixed
+        // difference between the timestamps of the frames entering and leaving
+        // the encoder. This diff is used to identify the frames from the
+        // created_frames_ map.
+        timestamp_diff_ =
+            timestamp - created_frames_vector_.front()->frame_timestamp_;
+      }
+      num_sent_frames_++;
+      break;
+    case kReceived:
+      received_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_received_frames_++;
+      break;
+    case kDecoded:
+      decoded_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_decoded_frames_++;
+      break;
+    case kRendered:
+      rendered_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_rendered_frames_++;
+      break;
+  }
+}
+
+void FrameDropDetector::CalculateResults() {
+  // Fill in all fields of the Frame objects in the created_frames_ map.
+  // Iterate over the maps from converted timestamps to the arrival timestamps.
+  std::map<unsigned int, int64_t>::const_iterator it;
+  for (it = sent_frames_.begin(); it != sent_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->sent_timestamp_in_us_ = it->second;
+  }
+  for (it = received_frames_.begin(); it != received_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->received_timestamp_in_us_ = it->second;
+  }
+  for (it = decoded_frames_.begin(); it != decoded_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->decoded_timestamp_in_us_ =it->second;
+  }
+  for (it = rendered_frames_.begin(); it != rendered_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->rendered_timestamp_in_us_ = it->second;
+  }
+  // Find out where the frames were not present in the different states.
+  dropped_frames_at_send_ = 0;
+  dropped_frames_at_receive_ = 0;
+  dropped_frames_at_decode_ = 0;
+  dropped_frames_at_render_ = 0;
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int encoded_timestamp = (*it)->frame_timestamp_ + timestamp_diff_;
+    if (sent_frames_.find(encoded_timestamp) == sent_frames_.end()) {
+      (*it)->dropped_at_send = true;
+      dropped_frames_at_send_++;
+    }
+    if (received_frames_.find(encoded_timestamp) == received_frames_.end()) {
+      (*it)->dropped_at_receive = true;
+      dropped_frames_at_receive_++;
+    }
+    if (decoded_frames_.find(encoded_timestamp) == decoded_frames_.end()) {
+      (*it)->dropped_at_decode = true;
+      dropped_frames_at_decode_++;
+    }
+    if (rendered_frames_.find(encoded_timestamp) == rendered_frames_.end()) {
+      (*it)->dropped_at_render = true;
+      dropped_frames_at_render_++;
+    }
+  }
+  dirty_ = false;
+}
+
+void FrameDropDetector::PrintReport() {
+  assert(!dirty_);
+  ViETest::Log("Frame Drop Detector report:");
+  ViETest::Log("  Created  frames: %ld", created_frames_.size());
+  ViETest::Log("  Sent     frames: %ld", sent_frames_.size());
+  ViETest::Log("  Received frames: %ld", received_frames_.size());
+  ViETest::Log("  Decoded  frames: %ld", decoded_frames_.size());
+  ViETest::Log("  Rendered frames: %ld", rendered_frames_.size());
+
+  // Display all frames and stats for them:
+  long last_created = 0;
+  long last_sent = 0;
+  long last_received = 0;
+  long last_decoded = 0;
+  long last_rendered = 0;
+  ViETest::Log("\nDeltas between sent frames and drop status:");
+  ViETest::Log("Unit: Microseconds");
+  ViETest::Log("Frame  Created    Sent    Received Decoded Rendered "
+      "Dropped at  Dropped at  Dropped at  Dropped at");
+  ViETest::Log(" nbr    delta     delta    delta    delta   delta   "
+      " Send?       Receive?    Decode?     Render?");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int created_delta =
+        static_cast<int>((*it)->created_timestamp_in_us_ - last_created);
+    int sent_delta = (*it)->dropped_at_send ? -1 :
+        static_cast<int>((*it)->sent_timestamp_in_us_ - last_sent);
+    int received_delta = (*it)->dropped_at_receive ? -1 :
+        static_cast<int>((*it)->received_timestamp_in_us_ - last_received);
+    int decoded_delta = (*it)->dropped_at_decode ? -1 :
+        static_cast<int>((*it)->decoded_timestamp_in_us_ - last_decoded);
+    int rendered_delta = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ - last_rendered);
+
+    // Set values to -1 for the first frame:
+    if ((*it)->number_ == 0) {
+      created_delta = -1;
+      sent_delta = -1;
+      received_delta = -1;
+      decoded_delta = -1;
+      rendered_delta = -1;
+    }
+    ViETest::Log("%5d %8d %8d %8d %8d %8d %10s %10s %10s %10s",
+                 (*it)->number_,
+                 created_delta,
+                 sent_delta,
+                 received_delta,
+                 decoded_delta,
+                 rendered_delta,
+                 (*it)->dropped_at_send ? "DROPPED" : "      ",
+                 (*it)->dropped_at_receive ? "DROPPED" : "      ",
+                 (*it)->dropped_at_decode ? "DROPPED" : "      ",
+                 (*it)->dropped_at_render ? "DROPPED" : "      ");
+    last_created = (*it)->created_timestamp_in_us_;
+    if (!(*it)->dropped_at_send) {
+      last_sent = (*it)->sent_timestamp_in_us_;
+    }
+     if (!(*it)->dropped_at_receive) {
+      last_received = (*it)->received_timestamp_in_us_;
+    }
+    if (!(*it)->dropped_at_decode) {
+      last_decoded = (*it)->decoded_timestamp_in_us_;
+    }
+    if (!(*it)->dropped_at_render) {
+      last_rendered = (*it)->rendered_timestamp_in_us_;
+    }
+  }
+  ViETest::Log("\nLatency between states (-1 means N/A because of drop):");
+  ViETest::Log("Unit: Microseconds");
+  ViETest::Log("Frame  Created    Sent      Received   Decoded      Total    "
+      "   Total");
+  ViETest::Log(" nbr   ->Sent  ->Received  ->Decoded ->Rendered    latency   "
+      "  latency");
+  ViETest::Log("                                               (incl network)"
+      "(excl network)");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int created_to_sent = (*it)->dropped_at_send ? -1 :
+        static_cast<int>((*it)->sent_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_);
+    int sent_to_received = (*it)->dropped_at_receive ? -1 :
+        static_cast<int>((*it)->received_timestamp_in_us_ -
+                         (*it)->sent_timestamp_in_us_);
+    int received_to_decoded = (*it)->dropped_at_decode ? -1 :
+        static_cast<int>((*it)->decoded_timestamp_in_us_ -
+                         (*it)->received_timestamp_in_us_);
+    int decoded_to_render = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->decoded_timestamp_in_us_);
+    int total_latency_incl_network = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_);
+    int total_latency_excl_network = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_ - sent_to_received);
+    ViETest::Log("%5d %9d %9d %9d %9d %12d %12d",
+                 (*it)->number_,
+                 created_to_sent,
+                 sent_to_received,
+                 received_to_decoded,
+                 decoded_to_render,
+                 total_latency_incl_network,
+                 total_latency_excl_network);
+  }
+  // Find and print the dropped frames.
+  ViETest::Log("\nTotal # dropped frames at:");
+  ViETest::Log("  Send   : %d", dropped_frames_at_send_);
+  ViETest::Log("  Receive: %d", dropped_frames_at_receive_);
+  ViETest::Log("  Decode : %d", dropped_frames_at_decode_);
+  ViETest::Log("  Render : %d", dropped_frames_at_render_);
+}
+
+void FrameDropDetector::PrintDebugDump() {
+  assert(!dirty_);
+  ViETest::Log("\nPrintDebugDump: Frame objects:");
+  ViETest::Log("Frame FrTimeStamp Created       Sent      Received    Decoded"
+      "    Rendered ");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    ViETest::Log("%5d %11d %11d %11d %11d %11d %11d",
+                 (*it)->number_,
+                 (*it)->frame_timestamp_,
+                 (*it)->created_timestamp_in_us_,
+                 (*it)->sent_timestamp_in_us_,
+                 (*it)->received_timestamp_in_us_,
+                 (*it)->decoded_timestamp_in_us_,
+                 (*it)->rendered_timestamp_in_us_);
+  }
+  std::vector<int> mismatch_frame_num_list;
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    if ((*it)->dropped_at_render != (*it)->dropped_at_decode) {
+      mismatch_frame_num_list.push_back((*it)->number_);
+    }
+  }
+  if (mismatch_frame_num_list.size() > 0) {
+    ViETest::Log("\nDecoded/Rendered mismatches:");
+    ViETest::Log("Frame FrTimeStamp    Created       Sent      Received    "
+        "Decoded    Rendered ");
+    for (std::vector<int>::const_iterator it = mismatch_frame_num_list.begin();
+         it != mismatch_frame_num_list.end(); ++it) {
+      Frame* frame = created_frames_vector_[*it];
+      ViETest::Log("%5d %11d %11d %11d %11d %11d %11d",
+                 frame->number_,
+                 frame->frame_timestamp_,
+                 frame->created_timestamp_in_us_,
+                 frame->sent_timestamp_in_us_,
+                 frame->received_timestamp_in_us_,
+                 frame->decoded_timestamp_in_us_,
+                 frame->rendered_timestamp_in_us_);
+    }
+  }
+
+  ViETest::Log("\nReportFrameState method invocations:");
+  ViETest::Log("  Created : %d", num_created_frames_);
+  ViETest::Log("  Send    : %d", num_sent_frames_);
+  ViETest::Log("  Received: %d", num_received_frames_);
+  ViETest::Log("  Decoded : %d", num_decoded_frames_);
+  ViETest::Log("  Rendered: %d", num_rendered_frames_);
+}
+
+const std::vector<Frame*>& FrameDropDetector::GetAllFrames() {
+  assert(!dirty_);
+  return created_frames_vector_;
+}
+
+int FrameDropDetector::GetNumberOfFramesDroppedAt(State state) {
+  assert(!dirty_);
+  switch (state) {
+    case kSent:
+      return dropped_frames_at_send_;
+    case kReceived:
+      return dropped_frames_at_receive_;
+    case kDecoded:
+      return dropped_frames_at_decode_;
+    case kRendered:
+      return dropped_frames_at_render_;
+    default:
+      return 0;
+  }
+}
+
+int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
+    unsigned char *buffer, int buffer_size, uint32_t time_stamp,
+    int64_t render_time) {
+  // Register that this frame has been rendered:
+  frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
+                                         time_stamp);
+  return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
+                                         time_stamp, render_time);
+}
+
+int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(
+    unsigned int width, unsigned int height, unsigned int number_of_streams) {
+  return ViEToFileRenderer::FrameSizeChange(width, height, number_of_streams);
+}
diff --git a/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.h b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.h
new file mode 100644
index 0000000..80575c3
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives.h
@@ -0,0 +1,229 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
+
+#include <map>
+#include <vector>
+
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
+
+class FrameDropDetector;
+class TbInterfaces;
+
+// Initializes the Video engine and its components, runs video playback using
+// for KAutoTestSleepTimeMs milliseconds, then shuts down everything.
+// The bit rate and packet loss parameters should be configured so that
+// frames are dropped, in order to test the frame drop detection that is
+// performed by the FrameDropDetector class.
+void TestFullStack(const TbInterfaces& interfaces,
+                   int capture_id,
+                   int video_channel,
+                   int width,
+                   int height,
+                   int bit_rate_kbps,
+                   int packet_loss_percent,
+                   int network_delay_ms,
+                   FrameDropDetector* frame_drop_detector);
+
+// A frame in a video file. The four different points in the stack when
+// register the frame state are (in time order): created, transmitted, decoded,
+// rendered.
+class Frame {
+ public:
+  Frame(int number, unsigned int timestamp)
+    : number_(number),
+      frame_timestamp_(timestamp),
+      created_timestamp_in_us_(-1),
+      sent_timestamp_in_us_(-1),
+      received_timestamp_in_us_(-1),
+      decoded_timestamp_in_us_(-1),
+      rendered_timestamp_in_us_(-1),
+      dropped_at_send(false),
+      dropped_at_receive(false),
+      dropped_at_decode(false),
+      dropped_at_render(false) {}
+
+  // Frame number, starting at 0.
+  int number_;
+
+  // Frame timestamp, that is used by Video Engine and RTP headers and set when
+  // the frame is sent into the stack.
+  unsigned int frame_timestamp_;
+
+  // Timestamps for our measurements of when the frame is in different states.
+  int64_t created_timestamp_in_us_;
+  int64_t sent_timestamp_in_us_;
+  int64_t received_timestamp_in_us_;
+  int64_t decoded_timestamp_in_us_;
+  int64_t rendered_timestamp_in_us_;
+
+  // Where the frame was dropped (more than one may be true).
+  bool dropped_at_send;
+  bool dropped_at_receive;
+  bool dropped_at_decode;
+  bool dropped_at_render;
+};
+
+// Fixes the output file by copying the last successful frame into the place
+// where the dropped frame would be, for all dropped frames (if any).
+// This method will not be able to fix data for the first frame if that is
+// dropped, since there'll be no previous frame to copy. This case should never
+// happen because of encoder frame dropping at least.
+// Parameters:
+//    output_file            The output file to modify (pad with frame copies
+//                           for all dropped frames)
+//    frame_length_in_bytes  Byte length of each frame.
+//    frames                 A vector of all Frame objects. Must be sorted by
+//                           frame number. If empty this method will do nothing.
+void FixOutputFileForComparison(const std::string& output_file,
+                                int frame_length_in_bytes,
+                                const std::vector<Frame*>& frames);
+
+// Handles statistics about dropped frames. Frames travel through the stack
+// with different timestamps. The frames created and sent to the encoder have
+// one timestamp on the sending side while the decoded/rendered frames have
+// another timestamp on the receiving side. The difference between these
+// timestamps is fixed, which we can use to identify the frames when they
+// arrive, since the FrameDropDetector class gets data reported from both sides.
+// The four different points in the stack when this class examines the frame
+// states are (in time order): created, sent, received, decoded, rendered.
+//
+// The flow can be visualized like this:
+//
+//         Created        Sent        Received               Decoded   Rendered
+// +-------+  |  +-------+ | +---------+ | +------+  +-------+  |  +--------+
+// |Capture|  |  |Encoder| | |  Ext.   | | |Jitter|  |Decoder|  |  |  Ext.  |
+// | device|---->|       |-->|transport|-->|buffer|->|       |---->|renderer|
+// +-------+     +-------+   +---------+   +------+  +-------+     +--------+
+//
+// This class has no intention of being thread-safe.
+class FrameDropDetector {
+ public:
+  enum State {
+    // A frame being created, i.e. sent to the encoder; the first step of
+    // a frame's life cycle. This timestamp becomes the frame timestamp in the
+    // Frame objects.
+    kCreated,
+    // A frame being sent in external transport (to the simulated network). This
+    // timestamp differs from the one in the Created state by a constant diff.
+    kSent,
+    // A frame being received in external transport (from the simulated
+    // network). This timestamp differs from the one in the Created state by a
+    // constant diff.
+    kReceived,
+    // A frame that has been decoded in the decoder. This timestamp differs
+    // from the one in the Created state by a constant diff.
+    kDecoded,
+    // A frame that has been rendered; the last step of a frame's life cycle.
+    // This timestamp differs from the one in the Created state by a constant
+    // diff.
+    kRendered
+  };
+
+  FrameDropDetector()
+      : dirty_(true),
+        dropped_frames_at_send_(0),
+        dropped_frames_at_receive_(0),
+        dropped_frames_at_decode_(0),
+        dropped_frames_at_render_(0),
+        num_created_frames_(0),
+        num_sent_frames_(0),
+        num_received_frames_(0),
+        num_decoded_frames_(0),
+        num_rendered_frames_(0),
+        timestamp_diff_(0) {}
+
+  // Reports a frame has reached a state in the frame life cycle.
+  void ReportFrameState(State state, unsigned int timestamp);
+
+  // Uses all the gathered timestamp information to calculate which frames have
+  // been dropped during the test and where they were dropped. Not until
+  // this method has been executed, the Frame objects will have all fields
+  // filled with the proper timestamp information.
+  void CalculateResults();
+
+  // Calculates the number of frames have been registered as dropped at the
+  // specified state of the frame life cycle.
+  // CalculateResults() must be called before calling this method.
+  int GetNumberOfFramesDroppedAt(State state);
+
+  // Gets a vector of all the created frames.
+  // CalculateResults() must be called before calling this method to have all
+  // fields of the Frame objects to represent the current state.
+  const std::vector<Frame*>& GetAllFrames();
+
+  // Prints a detailed report about all the different frame states and which
+  // ones are detected as dropped, using ViETest::Log.
+  // CalculateResults() must be called before calling this method.
+  void PrintReport();
+
+  // Prints all the timestamp maps. Mainly used for debugging purposes to find
+  // missing timestamps.
+  void PrintDebugDump();
+ private:
+  // Will be false until CalculateResults() is called. Switches to true
+  // as soon as new timestamps are reported using ReportFrameState().
+  bool dirty_;
+
+  // Map of frame creation timestamps to all Frame objects.
+  std::map<unsigned int, Frame*> created_frames_;
+
+  // Maps converted frame timestamps (differ from creation timestamp) to the
+  // time they arrived in the different states of the frame's life cycle.
+  std::map<unsigned int, int64_t> sent_frames_;
+  std::map<unsigned int, int64_t> received_frames_;
+  std::map<unsigned int, int64_t> decoded_frames_;
+  std::map<unsigned int, int64_t> rendered_frames_;
+
+  // A vector with the frames sorted in their created order.
+  std::vector<Frame*> created_frames_vector_;
+
+  // Statistics.
+  int dropped_frames_at_send_;
+  int dropped_frames_at_receive_;
+  int dropped_frames_at_decode_;
+  int dropped_frames_at_render_;
+
+  int num_created_frames_;
+  int num_sent_frames_;
+  int num_received_frames_;
+  int num_decoded_frames_;
+  int num_rendered_frames_;
+
+  // The constant diff between the created and transmitted frames, since their
+  // timestamps are converted.
+  unsigned int timestamp_diff_;
+};
+
+// Tracks which frames are received on the remote side and reports back to the
+// FrameDropDetector class when they are rendered.
+class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
+ public:
+  explicit FrameDropMonitoringRemoteFileRenderer(
+      FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameDropMonitoringRemoteFileRenderer() {}
+
+  // Implementation of ExternalRenderer:
+  int FrameSizeChange(unsigned int width, unsigned int height,
+                      unsigned int number_of_streams);
+  int DeliverFrame(unsigned char* buffer, int buffer_size,
+                   uint32_t time_stamp,
+                   int64_t render_time);
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
diff --git a/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc
new file mode 100644
index 0000000..f4cc390
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "framedrop_primitives.h"
+
+#include <cstdio>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+
+namespace webrtc {
+
+const std::string kOutputFilename = "temp_outputfile.tmp";
+const int kFrameLength = 1000;
+
+class FrameDropPrimitivesTest: public testing::Test {
+ protected:
+  FrameDropPrimitivesTest() {}
+  virtual ~FrameDropPrimitivesTest() {}
+  void SetUp() {
+    // Cleanup any previous output file.
+    std::remove(kOutputFilename.c_str());
+  }
+  void TearDown() {
+    // Cleanup the temporary file.
+    std::remove(kOutputFilename.c_str());
+  }
+};
+
+TEST_F(FrameDropPrimitivesTest, FixOutputFileForComparison) {
+  // Create test frame objects, where the second and fourth frame is marked
+  // as dropped at rendering.
+  std::vector<Frame*> frames;
+  Frame first_frame(0, kFrameLength);
+  Frame second_frame(0, kFrameLength);
+  Frame third_frame(0, kFrameLength);
+  Frame fourth_frame(0, kFrameLength);
+
+  second_frame.dropped_at_render = true;
+  fourth_frame.dropped_at_render = true;
+
+  frames.push_back(&first_frame);
+  frames.push_back(&second_frame);
+  frames.push_back(&third_frame);
+  frames.push_back(&fourth_frame);
+
+  // Prepare data for the first and third frames:
+  WebRtc_UWord8 first_frame_data[kFrameLength];
+  memset(first_frame_data, 5, kFrameLength);  // Fill it with 5's to identify.
+  WebRtc_UWord8 third_frame_data[kFrameLength];
+  memset(third_frame_data, 7, kFrameLength);  // Fill it with 7's to identify.
+
+  // Write the first and third frames to the temporary file. This means the fix
+  // method should add two frames of data by filling the file with data from
+  // the first and third frames after executing.
+  webrtc::test::FrameWriterImpl frame_writer(kOutputFilename, kFrameLength);
+  EXPECT_TRUE(frame_writer.Init());
+  EXPECT_TRUE(frame_writer.WriteFrame(first_frame_data));
+  EXPECT_TRUE(frame_writer.WriteFrame(third_frame_data));
+  frame_writer.Close();
+  EXPECT_EQ(2 * kFrameLength,
+            static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
+
+  FixOutputFileForComparison(kOutputFilename, kFrameLength, frames);
+
+  // Verify that the output file has correct size.
+  EXPECT_EQ(4 * kFrameLength,
+            static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
+
+  webrtc::test::FrameReaderImpl frame_reader(kOutputFilename, kFrameLength);
+  frame_reader.Init();
+  WebRtc_UWord8 read_buffer[kFrameLength];
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
+
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
+
+  frame_reader.Close();
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/test/auto_test/primitives/general_primitives.cc b/trunk/src/video_engine/test/auto_test/primitives/general_primitives.cc
new file mode 100644
index 0000000..6198a64
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/general_primitives.cc
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "general_primitives.h"
+
+#include "video_capture_factory.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "vie_to_file_renderer.h"
+
+void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
+                               unsigned char* device_name,
+                               unsigned int device_name_length,
+                               int* device_id,
+                               webrtc::VideoCaptureModule** device_video) {
+
+  bool capture_device_set = false;
+  webrtc::VideoCaptureModule::DeviceInfo *dev_info =
+      webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+
+  const unsigned int kMaxUniqueIdLength = 256;
+  WebRtc_UWord8 unique_id[kMaxUniqueIdLength];
+  memset(unique_id, 0, kMaxUniqueIdLength);
+
+  for (unsigned int i = 0; i < dev_info->NumberOfDevices(); i++) {
+    EXPECT_EQ(0, dev_info->GetDeviceName(i, device_name, device_name_length,
+                                         unique_id, kMaxUniqueIdLength));
+
+    *device_video = webrtc::VideoCaptureFactory::Create(4571, unique_id);
+    EXPECT_TRUE(*device_video != NULL);
+
+    (*device_video)->AddRef();
+
+    int error = capture->AllocateCaptureDevice(**device_video, *device_id);
+    if (error == 0) {
+      ViETest::Log("Using capture device: %s, captureId: %d.",
+                   device_name, *device_id);
+      capture_device_set = true;
+      break;
+    } else {
+      (*device_video)->Release();
+      (*device_video) = NULL;
+    }
+  }
+  delete dev_info;
+  EXPECT_TRUE(capture_device_set) << "Found no suitable camera on your system.";
+}
+
+void RenderInWindow(webrtc::ViERender* video_render_interface,
+                    int frame_provider_id,
+                    void* os_window,
+                    float z_index) {
+  EXPECT_EQ(0,
+            video_render_interface->AddRenderer(frame_provider_id, os_window,
+                                                z_index, 0.0, 0.0, 1.0, 1.0));
+  EXPECT_EQ(0, video_render_interface->StartRender(frame_provider_id));
+}
+
+void RenderToFile(webrtc::ViERender* renderer_interface,
+                  int frame_provider_id,
+                  ViEToFileRenderer *to_file_renderer) {
+  EXPECT_EQ(0, renderer_interface->AddRenderer(
+      frame_provider_id, webrtc::kVideoI420, to_file_renderer));
+  EXPECT_EQ(0, renderer_interface->StartRender(frame_provider_id));
+}
+
+void StopAndRemoveRenderers(webrtc::ViEBase* base_interface,
+                            webrtc::ViERender* render_interface,
+                            int channel_id,
+                            int capture_id) {
+  EXPECT_EQ(0, render_interface->StopRender(channel_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(channel_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+}
+
+void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
+                      int video_channel) {
+  EXPECT_EQ(0, rtcp_interface->SetRTCPStatus(video_channel,
+                                             webrtc::kRtcpCompound_RFC4585));
+  EXPECT_EQ(0, rtcp_interface->SetKeyFrameRequestMethod(
+      video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
+  EXPECT_EQ(0, rtcp_interface->SetTMMBRStatus(video_channel, true));
+}
+
+bool FindSpecificCodec(webrtc::VideoCodecType of_type,
+                       webrtc::ViECodec* codec_interface,
+                       webrtc::VideoCodec* result) {
+
+  memset(result, 0, sizeof(webrtc::VideoCodec));
+
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    webrtc::VideoCodec codec;
+    memset(&codec, 0, sizeof(webrtc::VideoCodec));
+    if (codec_interface->GetCodec(i, codec) != 0) {
+      return false;
+    }
+    if (codec.codecType == of_type) {
+      // Done
+      *result = codec;
+      return true;
+    }
+  }
+  // Didn't find it
+  return false;
+}
+
+void SetSuitableResolution(webrtc::VideoCodec* video_codec,
+                           int forced_codec_width,
+                           int forced_codec_height) {
+  if (forced_codec_width != kDoNotForceResolution &&
+      forced_codec_height != kDoNotForceResolution) {
+    video_codec->width = forced_codec_width;
+    video_codec->height = forced_codec_height;
+  } else if (video_codec->codecType == webrtc::kVideoCodecI420) {
+    // I420 is very bandwidth heavy, so limit it here.
+    video_codec->width = 176;
+    video_codec->height = 144;
+  } else {
+    // Otherwise go with 640x480.
+    video_codec->width = 640;
+    video_codec->height = 480;
+  }
+}
diff --git a/trunk/src/video_engine/test/auto_test/primitives/general_primitives.h b/trunk/src/video_engine/test/auto_test/primitives/general_primitives.h
new file mode 100644
index 0000000..07eca0d
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/primitives/general_primitives.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
+
+class ViEToFileRenderer;
+
+#include "common_types.h"
+
+namespace webrtc {
+class VideoCaptureModule;
+class ViEBase;
+class ViECapture;
+class ViECodec;
+class ViERender;
+class ViERTP_RTCP;
+struct VideoCodec;
+}
+
+// This constant can be used as input to various functions to not force the
+// codec resolution.
+const int kDoNotForceResolution = 0;
+
+// Finds a suitable capture device (e.g. camera) on the current system
+// and allocates it. Details about the found device are filled into the out
+// parameters. If this operation fails, device_id is assigned a negative value
+// and number_of_errors is incremented.
+void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
+                               unsigned char* device_name,
+                               const unsigned int kDeviceNameLength,
+                               int* device_id,
+                               webrtc::VideoCaptureModule** device_video);
+
+// Sets up rendering in a window previously created using a Window Manager
+// (See vie_window_manager_factory.h for more details on how to make one of
+// those). The frame provider id is a source of video frames, for instance
+// a capture device or a video channel.
+void RenderInWindow(webrtc::ViERender* video_render_interface,
+                    int  frame_provider_id,
+                    void* os_window,
+                    float z_index);
+
+// Similar in function to RenderInWindow, this function instead renders to
+// a file using a to-file-renderer. The frame provider id is a source of
+// video frames, for instance a capture device or a video channel.
+void RenderToFile(webrtc::ViERender* renderer_interface,
+                  int frame_provider_id,
+                  ViEToFileRenderer* to_file_renderer);
+
+// Stops all rendering given the normal case that we have a capture device
+// and a video channel set up for rendering.
+void StopAndRemoveRenderers(webrtc::ViEBase* base_interface,
+                            webrtc::ViERender* render_interface,
+                            int channel_id,
+                            int capture_id);
+
+// Configures RTP-RTCP.
+void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
+                      int video_channel);
+
+// Finds a codec in the codec list. Returns true on success, false otherwise.
+// The resulting codec is filled into result on success but is zeroed out
+// on failure.
+bool FindSpecificCodec(webrtc::VideoCodecType of_type,
+                       webrtc::ViECodec* codec_interface,
+                       webrtc::VideoCodec* result);
+
+// Sets up the provided codec with a resolution that takes individual codec
+// quirks into account (except if the forced* variables are
+// != kDoNotForceResolution)
+void SetSuitableResolution(webrtc::VideoCodec* video_codec,
+                           int forced_codec_width,
+                           int forced_codec_height);
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
diff --git a/trunk/src/video_engine/test/auto_test/source/Android.mk b/trunk/src/video_engine/test/auto_test/source/Android.mk
new file mode 100644
index 0000000..9e19418
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/Android.mk
@@ -0,0 +1,65 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+# voice engine test app
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= \
+    vie_autotest.cc \
+    vie_autotest_android.cc \
+    vie_autotest_base.cc \
+    vie_autotest_capture.cc \
+    vie_autotest_codec.cc \
+    vie_autotest_encryption.cc \
+    vie_autotest_file.cc \
+    vie_autotest_image_process.cc \
+    vie_autotest_loopback.cc \
+    vie_autotest_network.cc \
+    vie_autotest_render.cc \
+    vie_autotest_rtp_rtcp.cc \
+    vie_comparison_tests.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_ANDROID' \
+    '-DWEBRTC_ANDROID_OPENSLES'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../helpers \
+    $(LOCAL_PATH)/../primitives \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../include \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../../modules/video_coding/codecs/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface \
+    $(LOCAL_PATH)/../../../../modules/video_render/main/interface \
+    $(LOCAL_PATH)/../../../../modules/interface \
+    $(LOCAL_PATH)/../../../../modules/video_capture/main/interface \
+    $(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
+    $(LOCAL_PATH)/../../../../voice_engine/main/interface
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils \
+    libstlport \
+    libwebrtc
+
+LOCAL_MODULE:= webrtc_video_test
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_EXECUTABLE)
diff --git a/trunk/src/video_engine/test/auto_test/source/tb_I420_codec.cc b/trunk/src/video_engine/test/auto_test/source/tb_I420_codec.cc
new file mode 100644
index 0000000..df837c7
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/tb_I420_codec.cc
@@ -0,0 +1,324 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * tb_I420_codec.cc
+ *
+ */
+
+#include "tb_I420_codec.h"
+#include <string.h>
+#include <stdio.h>
+#include <assert.h>
+
+TbI420Encoder::TbI420Encoder() :
+    _inited(false), _encodedImage(), _encodedCompleteCallback(NULL)
+{
+    //
+    memset(&_functionCalls, 0, sizeof(_functionCalls));
+}
+
+TbI420Encoder::~TbI420Encoder()
+{
+    _inited = false;
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+}
+
+WebRtc_Word32 TbI420Encoder::VersionStatic(WebRtc_Word8* version,
+                                           WebRtc_Word32 length)
+{
+    const WebRtc_Word8* str = "I420 version 1.0.0\n";
+    WebRtc_Word32 verLen = (WebRtc_Word32) strlen(str);
+    if (verLen > length)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    strncpy(version, str, length);
+    return verLen;
+}
+
+WebRtc_Word32 TbI420Encoder::Version(WebRtc_Word8 *version,
+                                     WebRtc_Word32 length) const
+{
+    return VersionStatic(version, length);
+}
+
+WebRtc_Word32 TbI420Encoder::Release()
+{
+    _functionCalls.Release++;
+    // should allocate an encoded frame and then release it here, for that we
+    // actaully need an init flag
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::Reset()
+{
+    _functionCalls.Reset++;
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+
+}
+
+WebRtc_Word32 TbI420Encoder::SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                                  int rtt) {
+  return 0;
+}
+
+WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
+                                        WebRtc_Word32 /*numberOfCores*/,
+                                        WebRtc_UWord32 /*maxPayloadSize */)
+{
+    _functionCalls.InitEncode++;
+    if (inst == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (inst->width < 1 || inst->height < 1)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+
+    // allocating encoded memory
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+        _encodedImage._size = 0;
+    }
+    const WebRtc_UWord32 newSize = (3 * inst->width * inst->height) >> 1;
+    WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+    if (newBuffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    _encodedImage._size = newSize;
+    _encodedImage._buffer = newBuffer;
+
+    // if no memeory allocation, no point to init
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::Encode(
+    const webrtc::RawImage& inputImage,
+    const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
+    const webrtc::VideoFrameType* /*frameType*/)
+{
+    _functionCalls.Encode++;
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (_encodedCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    _encodedImage._frameType = webrtc::kKeyFrame; // no coding
+    _encodedImage._timeStamp = inputImage._timeStamp;
+    _encodedImage._encodedHeight = inputImage._height;
+    _encodedImage._encodedWidth = inputImage._width;
+    if (inputImage._length > _encodedImage._size)
+    {
+
+        // allocating encoded memory
+        if (_encodedImage._buffer != NULL)
+        {
+            delete[] _encodedImage._buffer;
+            _encodedImage._buffer = NULL;
+            _encodedImage._size = 0;
+        }
+        const WebRtc_UWord32 newSize = (3 * _encodedImage._encodedWidth
+            * _encodedImage._encodedHeight) >> 1;
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+        if (newBuffer == NULL)
+        {
+            return WEBRTC_VIDEO_CODEC_MEMORY;
+        }
+        _encodedImage._size = newSize;
+        _encodedImage._buffer = newBuffer;
+    }
+    assert(_encodedImage._size >= inputImage._length);
+    memcpy(_encodedImage._buffer, inputImage._buffer, inputImage._length);
+    _encodedImage._length = inputImage._length;
+    _encodedCompleteCallback->Encoded(_encodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::RegisterEncodeCompleteCallback(
+    webrtc::EncodedImageCallback* callback)
+{
+    _functionCalls.RegisterEncodeCompleteCallback++;
+    _encodedCompleteCallback = callback;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetPacketLoss(WebRtc_UWord32 packetLoss)
+{
+    _functionCalls.SetPacketLoss++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetRates(WebRtc_UWord32 newBitRate,
+                                      WebRtc_UWord32 frameRate)
+{
+    _functionCalls.SetRates++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetPeriodicKeyFrames(bool enable)
+{
+    _functionCalls.SetPeriodicKeyFrames++;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+WebRtc_Word32 TbI420Encoder::CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
+                                                   WebRtc_Word32 /*size*/)
+{
+    _functionCalls.CodecConfigParameters++;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+}
+TbI420Encoder::FunctionCalls TbI420Encoder::GetFunctionCalls()
+{
+    return _functionCalls;
+}
+
+TbI420Decoder::TbI420Decoder():
+    _decodedImage(), _width(0), _height(0), _inited(false),
+        _decodeCompleteCallback(NULL)
+{
+    memset(&_functionCalls, 0, sizeof(_functionCalls));
+}
+
+TbI420Decoder::~TbI420Decoder()
+{
+    Release();
+}
+
+WebRtc_Word32 TbI420Decoder::Reset()
+{
+    _functionCalls.Reset++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
+                                        WebRtc_Word32 /*numberOfCores */)
+{
+    _functionCalls.InitDecode++;
+    if (inst == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    else if (inst->width < 1 || inst->height < 1)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    _width = inst->width;
+    _height = inst->height;
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::Decode(
+    const webrtc::EncodedImage& inputImage,
+    bool /*missingFrames*/,
+    const webrtc::RTPFragmentationHeader* /*fragmentation*/,
+    const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
+    WebRtc_Word64 /*renderTimeMs*/)
+{
+    _functionCalls.Decode++;
+    if (inputImage._buffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (inputImage._length <= 0)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    // Allocate memory for decoded image
+
+    if (_decodedImage._buffer != NULL)
+    {
+        delete[] _decodedImage._buffer;
+        _decodedImage._buffer = NULL;
+        _decodedImage._size = 0;
+    }
+    if (_decodedImage._buffer == NULL)
+    {
+        const WebRtc_UWord32 newSize = (3 * _width * _height) >> 1;
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+        if (newBuffer == NULL)
+        {
+            return WEBRTC_VIDEO_CODEC_MEMORY;
+        }
+        _decodedImage._size = newSize;
+        _decodedImage._buffer = newBuffer;
+    }
+
+    // Set decoded image parameters
+    _decodedImage._height = _height;
+    _decodedImage._width = _width;
+    _decodedImage._timeStamp = inputImage._timeStamp;
+    assert(_decodedImage._size >= inputImage._length);
+    memcpy(_decodedImage._buffer, inputImage._buffer, inputImage._length);
+    _decodedImage._length = inputImage._length;
+    //_decodedImage._buffer = inputImage._buffer;
+
+    _decodeCompleteCallback->Decoded(_decodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback(
+    webrtc::DecodedImageCallback* callback)
+{
+    _functionCalls.RegisterDecodeCompleteCallback++;
+    _decodeCompleteCallback = callback;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::Release()
+{
+    _functionCalls.Release++;
+    if (_decodedImage._buffer != NULL)
+    {
+        delete[] _decodedImage._buffer;
+        _decodedImage._buffer = NULL;
+    }
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+TbI420Decoder::FunctionCalls TbI420Decoder::GetFunctionCalls()
+{
+    return _functionCalls;
+}
+
diff --git a/trunk/src/video_engine/test/auto_test/source/tb_capture_device.cc b/trunk/src/video_engine/test/auto_test/source/tb_capture_device.cc
new file mode 100644
index 0000000..0d4e46a
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/tb_capture_device.cc
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tb_capture_device.h"
+
+TbCaptureDevice::TbCaptureDevice(TbInterfaces& Engine) :
+    captureId(-1),
+    ViE(Engine),
+    vcpm_(NULL)
+{
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    WebRtc_UWord8 deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    WebRtc_UWord8 uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    bool captureDeviceSet = false;
+
+    webrtc::VideoCaptureModule::DeviceInfo* devInfo =
+        webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+    for (size_t captureIdx = 0;
+        captureIdx < devInfo->NumberOfDevices();
+        captureIdx++)
+    {
+        EXPECT_EQ(0, devInfo->GetDeviceName(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength));
+
+        vcpm_ = webrtc::VideoCaptureFactory::Create(
+            captureIdx, uniqueId);
+        if (vcpm_ == NULL)  // Failed to open this device. Try next.
+        {
+            continue;
+        }
+        vcpm_->AddRef();
+
+        int error = ViE.capture->AllocateCaptureDevice(*vcpm_, captureId);
+        if (error == 0)
+        {
+            ViETest::Log("Using capture device: %s, captureId: %d", deviceName,
+                         captureId);
+            captureDeviceSet = true;
+            break;
+        }
+    }
+    delete devInfo;
+    EXPECT_TRUE(captureDeviceSet);
+    if (!captureDeviceSet) {
+        return;
+    }
+
+    ViETest::Log("Starting capture device %s with captureId %d\n", deviceName,
+                 captureId);
+    EXPECT_EQ(0, ViE.capture->StartCapture(captureId));
+}
+
+TbCaptureDevice::~TbCaptureDevice(void)
+{
+    ViETest::Log("Stopping capture device with id %d\n", captureId);
+    EXPECT_EQ(0, ViE.capture->StopCapture(captureId));
+    EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
+    vcpm_->Release();
+}
+
+void TbCaptureDevice::ConnectTo(int videoChannel)
+{
+    EXPECT_EQ(0, ViE.capture->ConnectCaptureDevice(captureId, videoChannel));
+}
+
+void TbCaptureDevice::Disconnect(int videoChannel)
+{
+    EXPECT_EQ(0, ViE.capture->DisconnectCaptureDevice(videoChannel));
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/tb_external_transport.cc b/trunk/src/video_engine/test/auto_test/source/tb_external_transport.cc
new file mode 100644
index 0000000..8670c40
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/tb_external_transport.cc
@@ -0,0 +1,445 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tb_external_transport.h"
+
+#include <stdio.h> // printf
+#include <stdlib.h> // rand
+#include <cassert>
+
+#if defined(WEBRTC_LINUX) || defined(__linux__)
+#include <string.h>
+#endif
+#if defined(WEBRTC_MAC)
+#include <cstring>
+#endif
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "vie_network.h"
+
+#if defined(_WIN32)
+#pragma warning(disable: 4355) // 'this' : used in base member initializer list
+#endif
+
+TbExternalTransport::TbExternalTransport(webrtc::ViENetwork& vieNetwork) :
+        _vieNetwork(vieNetwork),
+        _thread(*webrtc::ThreadWrapper::CreateThread(
+            ViEExternalTransportRun, this, webrtc::kHighPriority,
+            "AutotestTransport")),
+        _event(*webrtc::EventWrapper::Create()),
+        _crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+        _statCrit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+        _lossRate(0),
+        _networkDelayMs(0),
+        _rtpCount(0),
+        _rtcpCount(0),
+        _dropCount(0),
+        _rtpPackets(),
+        _rtcpPackets(),
+        _send_frame_callback(NULL),
+        _receive_frame_callback(NULL),
+        _temporalLayers(0),
+        _seqNum(0),
+        _sendPID(0),
+        _receivedPID(0),
+        _switchLayer(false),
+        _currentRelayLayer(0),
+        _lastTimeMs(webrtc::TickTime::MillisecondTimestamp()),
+        _checkSSRC(false),
+        _lastSSRC(0),
+        _filterSSRC(false),
+        _SSRC(0),
+        _checkSequenceNumber(0),
+        _firstSequenceNumber(0),
+        _firstRTPTimestamp(0),
+        _lastSendRTPTimestamp(0),
+        _lastReceiveRTPTimestamp(0)
+{
+    srand((int) webrtc::TickTime::MicrosecondTimestamp());
+    unsigned int tId = 0;
+    _thread.Start(tId);
+}
+
+TbExternalTransport::~TbExternalTransport()
+{
+    _thread.SetNotAlive();
+    _event.Set();
+    if (_thread.Stop())
+    {
+        delete &_thread;
+        delete &_event;
+    }
+    delete &_crit;
+    delete &_statCrit;
+}
+
+int TbExternalTransport::SendPacket(int channel, const void *data, int len)
+{
+  // Parse timestamp from RTP header according to RFC 3550, section 5.1.
+    WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+    WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
+    rtp_timestamp += ptr[5] << 16;
+    rtp_timestamp += ptr[6] << 8;
+    rtp_timestamp += ptr[7];
+    _crit.Enter();
+    if (_firstRTPTimestamp == 0) {
+      _firstRTPTimestamp = rtp_timestamp;
+    }
+    _crit.Leave();
+    if (_send_frame_callback != NULL &&
+        _lastSendRTPTimestamp != rtp_timestamp) {
+      _send_frame_callback->FrameSent(rtp_timestamp);
+    }
+    _lastSendRTPTimestamp = rtp_timestamp;
+
+    if (_filterSSRC)
+    {
+        WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+        WebRtc_UWord32 ssrc = ptr[8] << 24;
+        ssrc += ptr[9] << 16;
+        ssrc += ptr[10] << 8;
+        ssrc += ptr[11];
+        if (ssrc != _SSRC)
+        {  
+            return len; // return len to avoid error in trace file
+        }
+    }
+    if (_temporalLayers) {
+        // parse out vp8 temporal layers
+        // 12 bytes RTP
+        WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+
+        if (ptr[12] & 0x80 &&  // X-bit
+            ptr[13] & 0x20)  // T-bit
+        {
+            int offset = 1;
+            if (ptr[13] & 0x80) // PID-bit
+            {
+                offset++;
+                if (ptr[14] & 0x80) // 2 byte PID
+                {
+                    offset++;
+                }
+            }
+            if (ptr[13] & 0x40)
+            {
+                offset++;
+            }
+            unsigned char TID = (ptr[13 + offset] >> 5);
+            unsigned int timeMs = NowMs();
+
+            // Every 5 second switch layer
+            if (_lastTimeMs + 5000 < timeMs)
+            {
+                _lastTimeMs = timeMs;
+                _switchLayer = true;
+            }
+            // Switch at the non ref frame
+            if (_switchLayer && (ptr[12] & 0x20))
+            {   // N-bit
+              _currentRelayLayer++;
+                if (_currentRelayLayer >= _temporalLayers)
+                  _currentRelayLayer = 0;
+
+                _switchLayer = false;
+                printf("\t Switching to layer:%d\n", _currentRelayLayer);
+            }
+            if (_currentRelayLayer < TID)
+            {
+                return len; // return len to avoid error in trace file
+            }
+            if (ptr[14] & 0x80) // 2 byte PID
+            {
+                if(_receivedPID != ptr[15])
+                {
+                    _sendPID++;
+                    _receivedPID = ptr[15];
+                }
+            } else
+            {
+              if(_receivedPID != ptr[14])
+              {
+                _sendPID++;
+                _receivedPID = ptr[14];
+              }
+            }
+        }
+    }
+    _statCrit.Enter();
+    _rtpCount++;
+    _statCrit.Leave();
+
+    // Packet loss. Never drop packets from the first RTP timestamp, i.e. the
+    // first frame being transmitted.
+    int dropThis = rand() % 100;
+    if (dropThis < _lossRate && _firstRTPTimestamp != rtp_timestamp)
+    {
+        _statCrit.Enter();
+        _dropCount++;
+        _statCrit.Leave();
+        return 0;
+    }
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+
+    if (_temporalLayers)
+    {
+        // rewrite seqNum
+        newPacket->packetBuffer[2] = _seqNum >> 8;
+        newPacket->packetBuffer[3] = _seqNum;
+        _seqNum++;
+
+        // rewrite PID
+        if (newPacket->packetBuffer[14] & 0x80) // 2 byte PID
+        {
+            newPacket->packetBuffer[14] = (_sendPID >> 8) | 0x80;
+            newPacket->packetBuffer[15] = _sendPID;
+        } else
+        {
+            newPacket->packetBuffer[14] = (_sendPID & 0x7f);
+        }
+    }
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtpPackets.push_back(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+void TbExternalTransport::RegisterSendFrameCallback(
+    SendFrameCallback* callback) {
+  _send_frame_callback = callback;
+}
+
+void TbExternalTransport::RegisterReceiveFrameCallback(
+    ReceiveFrameCallback* callback) {
+  _receive_frame_callback = callback;
+}
+
+// Set to 0 to disable.
+void TbExternalTransport::SetTemporalToggle(unsigned char layers)
+{
+    _temporalLayers = layers;
+}
+
+int TbExternalTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtcpCount++;
+    _statCrit.Leave();
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtcpPackets.push_back(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+WebRtc_Word32 TbExternalTransport::SetPacketLoss(WebRtc_Word32 lossRate)
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _lossRate = lossRate;
+    return 0;
+}
+
+void TbExternalTransport::SetNetworkDelay(WebRtc_Word64 delayMs)
+{
+    webrtc::CriticalSectionScoped cs(_crit);
+    _networkDelayMs = delayMs;
+}
+
+void TbExternalTransport::SetSSRCFilter(WebRtc_UWord32 ssrc)
+{
+    webrtc::CriticalSectionScoped cs(_crit);
+    _filterSSRC = true;
+    _SSRC = ssrc;
+}
+
+void TbExternalTransport::ClearStats()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _rtpCount = 0;
+    _dropCount = 0;
+    _rtcpCount = 0;
+}
+
+void TbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets,
+                                   WebRtc_Word32& numDroppedPackets,
+                                   WebRtc_Word32& numRtcpPackets)
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    numRtpPackets = _rtpCount;
+    numDroppedPackets = _dropCount;
+    numRtcpPackets = _rtcpCount;
+}
+
+void TbExternalTransport::EnableSSRCCheck()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _checkSSRC = true;
+}
+
+unsigned int TbExternalTransport::ReceivedSSRC()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    return _lastSSRC;
+}
+
+void TbExternalTransport::EnableSequenceNumberCheck()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _checkSequenceNumber = true;
+}
+
+unsigned short TbExternalTransport::GetFirstSequenceNumber()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    return _firstSequenceNumber;
+}
+
+bool TbExternalTransport::ViEExternalTransportRun(void* object)
+{
+    return static_cast<TbExternalTransport*>
+        (object)->ViEExternalTransportProcess();
+}
+bool TbExternalTransport::ViEExternalTransportProcess()
+{
+    unsigned int waitTime = KMaxWaitTimeMs;
+
+    VideoPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = 0;
+        if (packet)
+        {
+          timeToReceive = packet->receiveTime - NowMs();
+        }
+        else
+        {
+          // There should never be any empty packets in the list.
+          assert(false);
+        }
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime && timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtpPackets.pop_front();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            {
+                webrtc::CriticalSectionScoped cs(_statCrit);
+                if (_checkSSRC)
+                {
+                    _lastSSRC = ((packet->packetBuffer[8]) << 24);
+                    _lastSSRC += (packet->packetBuffer[9] << 16);
+                    _lastSSRC += (packet->packetBuffer[10] << 8);
+                    _lastSSRC += packet->packetBuffer[11];
+                    _checkSSRC = false;
+                }
+                if (_checkSequenceNumber)
+                {
+                    _firstSequenceNumber
+                        = (unsigned char) packet->packetBuffer[2] << 8;
+                    _firstSequenceNumber
+                        += (unsigned char) packet->packetBuffer[3];
+                    _checkSequenceNumber = false;
+                }
+            }
+            // Signal received packet of frame
+            WebRtc_UWord8* ptr = (WebRtc_UWord8*)packet->packetBuffer;
+            WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
+            rtp_timestamp += ptr[5] << 16;
+            rtp_timestamp += ptr[6] << 8;
+            rtp_timestamp += ptr[7];
+            if (_receive_frame_callback != NULL &&
+                _lastReceiveRTPTimestamp != rtp_timestamp) {
+              _receive_frame_callback->FrameReceived(rtp_timestamp);
+            }
+            _lastReceiveRTPTimestamp = rtp_timestamp;
+
+            _vieNetwork.ReceivedRTPPacket(packet->channel,
+                                          packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    while (!_rtcpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtcpPackets.front();
+        WebRtc_Word64 timeToReceive = 0;
+        if (packet)
+        {
+          timeToReceive = packet->receiveTime - NowMs();
+        }
+        else
+        {
+            // There should never be any empty packets in the list.
+            assert(false);
+        }
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime && timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtcpPackets.pop_front();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            _vieNetwork.ReceivedRTCPPacket(
+                 packet->channel,
+                 packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    _event.Wait(waitTime + 1); // Add 1 ms to not call to early...
+    return true;
+}
+
+WebRtc_Word64 TbExternalTransport::NowMs()
+{
+    return webrtc::TickTime::MillisecondTimestamp();
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/tb_interfaces.cc b/trunk/src/video_engine/test/auto_test/source/tb_interfaces.cc
new file mode 100644
index 0000000..b6b13a2
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/tb_interfaces.cc
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tb_interfaces.h"
+
+#include "gtest/gtest.h"
+
+TbInterfaces::TbInterfaces(const char* test_name) {
+    std::string trace_file_path =
+        (ViETest::GetResultOutputPath() + test_name) + "_trace.txt";
+
+    ViETest::Log("Creating ViE Interfaces for test %s\n", test_name);
+
+    video_engine = webrtc::VideoEngine::Create();
+    EXPECT_TRUE(video_engine != NULL);
+
+    EXPECT_EQ(0, video_engine->SetTraceFile(trace_file_path.c_str()));
+    EXPECT_EQ(0, video_engine->SetTraceFilter(webrtc::kTraceAll));
+
+    base = webrtc::ViEBase::GetInterface(video_engine);
+    EXPECT_TRUE(base != NULL);
+
+    EXPECT_EQ(0, base->Init());
+
+    capture = webrtc::ViECapture::GetInterface(video_engine);
+    EXPECT_TRUE(capture != NULL);
+
+    rtp_rtcp = webrtc::ViERTP_RTCP::GetInterface(video_engine);
+    EXPECT_TRUE(rtp_rtcp != NULL);
+
+    render = webrtc::ViERender::GetInterface(video_engine);
+    EXPECT_TRUE(render != NULL);
+
+    codec = webrtc::ViECodec::GetInterface(video_engine);
+    EXPECT_TRUE(codec != NULL);
+
+    network = webrtc::ViENetwork::GetInterface(video_engine);
+    EXPECT_TRUE(network != NULL);
+
+    image_process = webrtc::ViEImageProcess::GetInterface(video_engine);
+    EXPECT_TRUE(image_process != NULL);
+
+    encryption = webrtc::ViEEncryption::GetInterface(video_engine);
+    EXPECT_TRUE(encryption != NULL);
+}
+
+TbInterfaces::~TbInterfaces(void)
+{
+    EXPECT_EQ(0, encryption->Release());
+    EXPECT_EQ(0, image_process->Release());
+    EXPECT_EQ(0, codec->Release());
+    EXPECT_EQ(0, capture->Release());
+    EXPECT_EQ(0, render->Release());
+    EXPECT_EQ(0, rtp_rtcp->Release());
+    EXPECT_EQ(0, network->Release());
+    EXPECT_EQ(0, base->Release());
+    EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine)) <<
+        "Since we have released all interfaces at this point, deletion "
+        "should be successful.";
+
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/tb_video_channel.cc b/trunk/src/video_engine/test/auto_test/source/tb_video_channel.cc
new file mode 100644
index 0000000..1e62a6b
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/tb_video_channel.cc
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tb_video_channel.h"
+
+TbVideoChannel::TbVideoChannel(TbInterfaces& Engine,
+                               webrtc::VideoCodecType sendCodec, int width,
+                               int height, int frameRate, int startBitrate) :
+    videoChannel(-1),  ViE(Engine)
+{
+    EXPECT_EQ(0, ViE.base->CreateChannel(videoChannel));
+
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    bool sendCodecSet = false;
+    for (int idx = 0; idx < ViE.codec->NumberOfCodecs(); idx++)
+    {
+        EXPECT_EQ(0, ViE.codec->GetCodec(idx, videoCodec));
+        videoCodec.width = width;
+        videoCodec.height = height;
+        videoCodec.maxFramerate = frameRate;
+
+        if (videoCodec.codecType == sendCodec && sendCodecSet == false)
+        {
+            if(videoCodec.codecType != webrtc::kVideoCodecI420 )
+            {
+                videoCodec.startBitrate = startBitrate;
+                videoCodec.maxBitrate = startBitrate * 3;
+            }
+            EXPECT_EQ(0, ViE.codec->SetSendCodec(videoChannel, videoCodec));
+            sendCodecSet = true;
+        }
+        if (videoCodec.codecType == webrtc::kVideoCodecVP8)
+        {
+            videoCodec.width = 352;
+            videoCodec.height = 288;
+        }
+        EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel, videoCodec));
+    }
+    EXPECT_TRUE(sendCodecSet);
+}
+
+TbVideoChannel::~TbVideoChannel(void)
+{
+    EXPECT_EQ(0, ViE.base->DeleteChannel(videoChannel));
+}
+
+void TbVideoChannel::StartSend(const unsigned short rtpPort /*= 11000*/,
+                               const char* ipAddress /*= "127.0.0.1"*/)
+{
+    EXPECT_EQ(0, ViE.network->SetSendDestination(videoChannel, ipAddress,
+                                                 rtpPort));
+
+    EXPECT_EQ(0, ViE.base->StartSend(videoChannel));
+}
+
+void TbVideoChannel::SetFrameSettings(int width, int height, int frameRate)
+{
+    webrtc::VideoCodec videoCodec;
+    EXPECT_EQ(0, ViE.codec->GetSendCodec(videoChannel, videoCodec));
+    videoCodec.width = width;
+    videoCodec.height = height;
+    videoCodec.maxFramerate = frameRate;
+
+    EXPECT_EQ(0, ViE.codec->SetSendCodec(videoChannel, videoCodec));
+    EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel, videoCodec));
+}
+
+void TbVideoChannel::StopSend()
+{
+    EXPECT_EQ(0, ViE.base->StopSend(videoChannel));
+}
+
+void TbVideoChannel::StartReceive(const unsigned short rtpPort /*= 11000*/)
+{
+    EXPECT_EQ(0, ViE.network->SetLocalReceiver(videoChannel, rtpPort));
+    EXPECT_EQ(0, ViE.base->StartReceive(videoChannel));
+}
+
+void TbVideoChannel::StopReceive()
+{
+    EXPECT_EQ(0, ViE.base->StopReceive(videoChannel));
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest.cc
new file mode 100644
index 0000000..8281ea2
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest.cc
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest.cc
+//
+
+#include "vie_autotest.h"
+
+#include <stdio.h>
+
+#include "engine_configurations.h"
+#include "general_primitives.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tb_capture_device.h"
+#include "testsupport/fileutils.h"
+#include "video_render.h"
+#include "vie_autotest_defines.h"
+
+// ViETest implementation
+FILE* ViETest::log_file_ = NULL;
+char* ViETest::log_str_ = NULL;
+
+std::string ViETest::GetResultOutputPath() {
+#ifdef WEBRTC_ANDROID
+    return "/sdcard/";
+#else
+    return webrtc::test::OutputPath();
+#endif
+}
+
+// ViEAutoTest implementation
+ViEAutoTest::ViEAutoTest(void* window1, void* window2) :
+    _window1(window1),
+    _window2(window2),
+    _renderType(webrtc::kRenderDefault),
+    _vrm1(webrtc::VideoRender::CreateVideoRender(
+        4561, window1, false, _renderType)),
+    _vrm2(webrtc::VideoRender::CreateVideoRender(
+        4562, window2, false, _renderType))
+{
+    assert(_vrm1);
+    assert(_vrm2);
+}
+
+ViEAutoTest::~ViEAutoTest()
+{
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    webrtc::VideoRender::DestroyVideoRender(_vrm2);
+    _vrm2 = NULL;
+}
+
+void ViEAutoTest::ViEStandardTest()
+{
+    ViEBaseStandardTest();
+    ViECaptureStandardTest();
+    ViECodecStandardTest();
+    ViEEncryptionStandardTest();
+    ViEFileStandardTest();
+    ViEImageProcessStandardTest();
+    ViENetworkStandardTest();
+    ViERenderStandardTest();
+    ViERtpRtcpStandardTest();
+}
+
+void ViEAutoTest::ViEExtendedTest()
+{
+    ViEBaseExtendedTest();
+    ViECaptureExtendedTest();
+    ViECodecExtendedTest();
+    ViEEncryptionExtendedTest();
+    ViEFileExtendedTest();
+    ViEImageProcessExtendedTest();
+    ViENetworkExtendedTest();
+    ViERenderExtendedTest();
+    ViERtpRtcpExtendedTest();
+}
+
+void ViEAutoTest::ViEAPITest()
+{
+    ViEBaseAPITest();
+    ViECaptureAPITest();
+    ViECodecAPITest();
+    ViEEncryptionAPITest();
+    ViEFileAPITest();
+    ViEImageProcessAPITest();
+    ViENetworkAPITest();
+    ViERenderAPITest();
+    ViERtpRtcpAPITest();
+}
+
+void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
+{
+    ViETest::Log("Video Codec Information:");
+
+    switch (videoCodec.codecType)
+    {
+        case webrtc::kVideoCodecVP8:
+            ViETest::Log("\tcodecType: VP8");
+            break;
+            // TODO(sh): keep or remove MPEG4?
+            //    case webrtc::kVideoCodecMPEG4:
+            //        ViETest::Log("\tcodecType: MPEG4");
+            //        break;
+        case webrtc::kVideoCodecI420:
+            ViETest::Log("\tcodecType: I420");
+            break;
+        case webrtc::kVideoCodecRED:
+            ViETest::Log("\tcodecType: RED");
+            break;
+        case webrtc::kVideoCodecULPFEC:
+            ViETest::Log("\tcodecType: ULPFEC");
+            break;
+        case webrtc::kVideoCodecUnknown:
+            ViETest::Log("\tcodecType: ????");
+            break;
+    }
+
+    ViETest::Log("\theight: %u", videoCodec.height);
+    ViETest::Log("\tmaxBitrate: %u", videoCodec.maxBitrate);
+    ViETest::Log("\tmaxFramerate: %u", videoCodec.maxFramerate);
+    ViETest::Log("\tminBitrate: %u", videoCodec.minBitrate);
+    ViETest::Log("\tplName: %s", videoCodec.plName);
+    ViETest::Log("\tplType: %u", videoCodec.plType);
+    ViETest::Log("\tstartBitrate: %u", videoCodec.startBitrate);
+    ViETest::Log("\twidth: %u", videoCodec.width);
+    ViETest::Log("");
+}
+
+void ViEAutoTest::PrintAudioCodec(const webrtc::CodecInst audioCodec)
+{
+    ViETest::Log("Audio Codec Information:");
+    ViETest::Log("\tchannels: %u", audioCodec.channels);
+    ViETest::Log("\t: %u", audioCodec.pacsize);
+    ViETest::Log("\t: %u", audioCodec.plfreq);
+    ViETest::Log("\t: %s", audioCodec.plname);
+    ViETest::Log("\t: %u", audioCodec.pltype);
+    ViETest::Log("\t: %u", audioCodec.rate);
+    ViETest::Log("");
+}
+
+void ViEAutoTest::RenderCaptureDeviceAndOutputStream(
+    TbInterfaces* video_engine,
+    TbVideoChannel* video_channel,
+    TbCaptureDevice* capture_device) {
+  RenderInWindow(
+      video_engine->render, capture_device->captureId, _window1, 0);
+  RenderInWindow(
+      video_engine->render, video_channel->videoChannel, _window1, 1);
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_android.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_android.cc
new file mode 100644
index 0000000..81af69b
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_android.cc
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../interface/vie_autotest_android.h"
+
+#include <android/log.h>
+#include <stdio.h>
+
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+
+int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
+                                    void* window1, void* window2,
+                                    void* javaVM, void* env, void* context) {
+  ViEAutoTest vieAutoTest(window1, window2);
+  ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
+  webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
+#ifndef WEBRTC_ANDROID_OPENSLES
+  // voice engine calls into ADM directly
+  webrtc::VoiceEngine::SetAndroidAudioDeviceObjects(javaVM, env, context);
+#endif
+  int testErrors = 0;
+
+  if (subTestSelection == 0) {
+    // Run all selected test
+    switch (testSelection) {
+      case 0:
+        testErrors += vieAutoTest.ViEStandardTest();
+        if (testErrors == 0) {
+          // No errors found in delivery test, create delivery
+          ViETest::Log("Standard/delivery passed. ");
+        }
+        else {
+          // Didn't pass
+          ViETest::Log("\nStandard/delivery test failed.");
+        }
+        break;
+      case 1:
+        testErrors += vieAutoTest.ViEAPITest();
+        break;
+      case 2:
+        testErrors += vieAutoTest.ViEExtendedTest();
+        break;
+      case 3:
+        testErrors += vieAutoTest.ViELoopbackCall();
+        break;
+      default:
+        break;
+    }
+  }
+
+  switch (testSelection) {
+    case 0: // Specific standard test
+      switch (subTestSelection) {
+        case 1: // base
+          testErrors += vieAutoTest.ViEBaseStandardTest();
+          break;
+
+        case 2: // capture
+          testErrors += vieAutoTest.ViECaptureStandardTest();
+          break;
+
+        case 3: // codec
+          testErrors += vieAutoTest.ViECodecStandardTest();
+          break;
+
+        case 5: //encryption
+          testErrors += vieAutoTest.ViEEncryptionStandardTest();
+          break;
+
+        case 6: // file
+          testErrors += vieAutoTest.ViEFileStandardTest();
+          break;
+
+        case 7: // image process
+          testErrors += vieAutoTest.ViEImageProcessStandardTest();
+          break;
+
+        case 8: // network
+          testErrors += vieAutoTest.ViENetworkStandardTest();
+          break;
+
+        case 9: // Render
+          testErrors += vieAutoTest.ViERenderStandardTest();
+          break;
+
+        case 10: // RTP/RTCP
+          testErrors += vieAutoTest.ViERtpRtcpStandardTest();
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 1:// specific API
+      switch (subTestSelection) {
+        case 1: // base
+          testErrors += vieAutoTest.ViEBaseAPITest();
+          break;
+
+        case 2: // capture
+          testErrors += vieAutoTest.ViECaptureAPITest();
+          break;
+
+        case 3: // codec
+          testErrors += vieAutoTest.ViECodecAPITest();
+          break;
+
+        case 5: //encryption
+          testErrors += vieAutoTest.ViEEncryptionAPITest();
+          break;
+
+        case 6: // file
+          testErrors += vieAutoTest.ViEFileAPITest();
+          break;
+
+        case 7: // image process
+          testErrors += vieAutoTest.ViEImageProcessAPITest();
+          break;
+
+        case 8: // network
+          testErrors += vieAutoTest.ViENetworkAPITest();
+          break;
+
+        case 9: // Render
+          testErrors += vieAutoTest.ViERenderAPITest();
+          break;
+
+        case 10: // RTP/RTCP
+          testErrors += vieAutoTest.ViERtpRtcpAPITest();
+          break;
+        case 11:
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 2:// specific extended
+      switch (subTestSelection) {
+        case 1: // base
+          testErrors += vieAutoTest.ViEBaseExtendedTest();
+          break;
+
+        case 2: // capture
+          testErrors += vieAutoTest.ViECaptureExtendedTest();
+          break;
+
+        case 3: // codec
+          testErrors += vieAutoTest.ViECodecExtendedTest();
+          break;
+
+        case 5: //encryption
+          testErrors += vieAutoTest.ViEEncryptionExtendedTest();
+          break;
+
+        case 6: // file
+          testErrors += vieAutoTest.ViEFileExtendedTest();
+          break;
+
+        case 7: // image process
+          testErrors += vieAutoTest.ViEImageProcessExtendedTest();
+          break;
+
+        case 8: // network
+          testErrors += vieAutoTest.ViENetworkExtendedTest();
+          break;
+
+        case 9: // Render
+          testErrors += vieAutoTest.ViERenderExtendedTest();
+          break;
+
+        case 10: // RTP/RTCP
+          testErrors += vieAutoTest.ViERtpRtcpExtendedTest();
+          break;
+
+        case 11:
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 3:
+      testErrors += vieAutoTest.ViELoopbackCall();
+      break;
+
+    default:
+      break;
+    }
+
+  if (testErrors) {
+    ViETest::Log("Test done with %d errors!\n", testErrors);
+  }
+  else {
+    ViETest::Log("Test passed!\n");
+  }
+  return testErrors;
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_base.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_base.cc
new file mode 100644
index 0000000..d2a29b2
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_base.cc
@@ -0,0 +1,208 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest.h"
+
+#include "base_primitives.h"
+#include "general_primitives.h"
+#include "tb_interfaces.h"
+#include "vie_autotest_defines.h"
+#include "video_capture_factory.h"
+
+class BaseObserver : public webrtc::ViEBaseObserver {
+ public:
+  BaseObserver()
+      : cpu_load_(0) {}
+
+  virtual void PerformanceAlarm(const unsigned int cpu_load) {
+    cpu_load_ = cpu_load;
+  }
+  unsigned int cpu_load_;
+};
+
+void ViEAutoTest::ViEBaseStandardTest() {
+  // ***************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing
+  // ***************************************************************
+
+  TbInterfaces interfaces("ViEBaseStandardTest");
+
+  // ***************************************************************
+  // Engine ready. Set up the test case:
+  // ***************************************************************
+  int video_channel = -1;
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  webrtc::VideoCaptureModule* video_capture_module(NULL);
+  const unsigned int kMaxDeviceNameLength = 128;
+  WebRtc_UWord8 device_name[kMaxDeviceNameLength];
+  memset(device_name, 0, kMaxDeviceNameLength);
+  int capture_id;
+
+  webrtc::ViEBase *base_interface = interfaces.base;
+  webrtc::ViERender *render_interface = interfaces.render;
+  webrtc::ViECapture *capture_interface = interfaces.capture;
+
+  FindCaptureDeviceOnSystem(capture_interface,
+                            device_name,
+                            kMaxDeviceNameLength,
+                            &capture_id,
+                            &video_capture_module);
+
+  EXPECT_EQ(0, capture_interface->ConnectCaptureDevice(capture_id,
+                                                       video_channel));
+  EXPECT_EQ(0, capture_interface->StartCapture(capture_id));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm1));
+  EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm2));
+
+  RenderInWindow(render_interface, capture_id, _window1, 0);
+  RenderInWindow(render_interface, video_channel, _window2, 1);
+
+  // ***************************************************************
+  // Run the actual test:
+  // ***************************************************************
+  ViETest::Log("You should shortly see a local preview from camera %s"
+      " in window 1 and the remote video in window 2.", device_name);
+  ::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
+                      base_interface, interfaces.network, video_channel,
+                      device_name);
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, capture_interface->StopCapture(capture_id));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+
+  StopAndRemoveRenderers(base_interface, render_interface, video_channel,
+                         capture_id);
+
+  EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm1));
+  EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm2));
+
+  EXPECT_EQ(0, capture_interface->ReleaseCaptureDevice(capture_id));
+
+  video_capture_module->Release();
+  video_capture_module = NULL;
+
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void ViEAutoTest::ViEBaseExtendedTest() {
+  // Start with standard test
+  ViEBaseAPITest();
+  ViEBaseStandardTest();
+
+    // ***************************************************************
+    // Test BaseObserver
+    // ***************************************************************
+    // TODO(mflodman) Add test for base observer. Cpu load must be over 75%.
+//    BaseObserver base_observer;
+//    EXPECT_EQ(ptrViEBase->RegisterObserver(base_observer), 0);
+//
+//    AutoTestSleep(KAutoTestSleepTimeMs);
+//
+//    EXPECT_EQ(ptrViEBase->DeregisterObserver(), 0);
+//    EXPECT_GT(base_observer.cpu_load, 0);
+}
+
+void ViEAutoTest::ViEBaseAPITest() {
+  // ***************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing
+  // ***************************************************************
+  // Get the ViEBase API
+  webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(NULL);
+  EXPECT_EQ(NULL, ptrViEBase) << "Should return null for a bad ViE pointer";
+
+  webrtc::VideoEngine* ptrViE = webrtc::VideoEngine::Create();
+  EXPECT_TRUE(NULL != ptrViE);
+
+  std::string trace_file_path =
+      ViETest::GetResultOutputPath() + "ViEBaseAPI_trace.txt";
+  EXPECT_EQ(0, ptrViE->SetTraceFile(trace_file_path.c_str()));
+
+  ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+  EXPECT_TRUE(NULL != ptrViEBase);
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  char version[1024] = "";
+  EXPECT_EQ(0, ptrViEBase->GetVersion(version));
+  EXPECT_EQ(0, ptrViEBase->LastError());
+
+  // Create without init
+  int videoChannel = -1;
+  EXPECT_NE(0, ptrViEBase->CreateChannel(videoChannel)) <<
+      "Should fail since Init has not been called yet";
+  EXPECT_EQ(0, ptrViEBase->Init());
+  EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
+
+  int videoChannel2 = -1;
+  EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel2));
+  EXPECT_NE(videoChannel, videoChannel2) <<
+      "Should allocate new number for independent channel";
+
+  EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel2));
+
+  EXPECT_EQ(-1, ptrViEBase->CreateChannel(videoChannel2, videoChannel + 1)) <<
+      "Should fail since neither channel exists (the second must)";
+
+  EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel2, videoChannel));
+
+  // Test Voice Engine integration with Video Engine.
+  webrtc::VoiceEngine* ptrVoE = NULL;
+  webrtc::VoEBase* ptrVoEBase = NULL;
+  int audioChannel = -1;
+
+  ptrVoE = webrtc::VoiceEngine::Create();
+  EXPECT_TRUE(NULL != ptrVoE);
+
+  ptrVoEBase = webrtc::VoEBase::GetInterface(ptrVoE);
+  EXPECT_TRUE(NULL != ptrVoEBase);
+  EXPECT_EQ(0, ptrVoEBase->Init());
+
+  audioChannel = ptrVoEBase->CreateChannel();
+  EXPECT_NE(-1, audioChannel);
+
+  // Connect before setting VoE.
+  EXPECT_NE(0, ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel)) <<
+      "Should fail since Voice Engine is not set yet.";
+
+  // Then do it right.
+  EXPECT_EQ(0, ptrViEBase->SetVoiceEngine(ptrVoE));
+  EXPECT_EQ(0, ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel));
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_NE(0, ptrViEBase->DisconnectAudioChannel(videoChannel + 5)) <<
+      "Should fail: disconnecting bogus channel";
+
+  EXPECT_EQ(0, ptrViEBase->DisconnectAudioChannel(videoChannel));
+
+  // Clean up voice engine
+  EXPECT_EQ(0, ptrViEBase->SetVoiceEngine(NULL));
+  EXPECT_EQ(0, ptrVoEBase->Release());
+  EXPECT_TRUE(webrtc::VoiceEngine::Delete(ptrVoE));
+
+  webrtc::ViEBase* ptrViEBase2 = webrtc::ViEBase::GetInterface(ptrViE);
+  EXPECT_TRUE(NULL != ptrViEBase2);
+
+  EXPECT_EQ(1, ptrViEBase->Release()) << "There should be one interface left.";
+
+  EXPECT_FALSE(webrtc::VideoEngine::Delete(ptrViE)) <<
+      "Should fail since there are interfaces left.";
+
+  EXPECT_EQ(0, ptrViEBase->Release());
+  EXPECT_TRUE(webrtc::VideoEngine::Delete(ptrViE));
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_capture.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_capture.cc
new file mode 100644
index 0000000..cbb8b03
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_capture.cc
@@ -0,0 +1,559 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest.h"
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "gflags/gflags.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tick_util.h"
+#include "vie_autotest_defines.h"
+#include "video_capture_factory.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "voe_base.h"
+
+DEFINE_bool(capture_test_ensure_resolution_alignment_in_capture_device, true,
+            "If true, we will give resolutions slightly below a reasonable "
+            "value to test the camera's ability to choose a good resolution. "
+            "If false, we will provide reasonable resolutions instead.");
+
+class CaptureObserver: public webrtc::ViECaptureObserver
+{
+public:
+    CaptureObserver() :
+        _brightness(webrtc::Normal),
+        _alarm(webrtc::AlarmCleared),
+        _frameRate(0) {}
+
+    virtual void BrightnessAlarm(const int captureId,
+                                 const webrtc::Brightness brightness)
+    {
+        _brightness = brightness;
+        switch (brightness)
+        {
+            case webrtc::Normal:
+                ViETest::Log("  BrightnessAlarm Normal");
+                break;
+            case webrtc::Bright:
+                ViETest::Log("  BrightnessAlarm Bright");
+                break;
+            case webrtc::Dark:
+                ViETest::Log("  BrightnessAlarm Dark");
+                break;
+        }
+    }
+
+    virtual void CapturedFrameRate(const int captureId,
+                                   const unsigned char frameRate)
+    {
+        ViETest::Log("  CapturedFrameRate %u", frameRate);
+        _frameRate = frameRate;
+    }
+
+    virtual void NoPictureAlarm(const int captureId,
+                                const webrtc::CaptureAlarm alarm)
+    {
+        _alarm = alarm;
+        if (alarm == webrtc::AlarmRaised)
+        {
+            ViETest::Log("NoPictureAlarm CARaised.");
+        }
+        else
+        {
+            ViETest::Log("NoPictureAlarm CACleared.");
+        }
+    }
+
+    webrtc::Brightness _brightness;
+    webrtc::CaptureAlarm _alarm;
+    unsigned char _frameRate;
+};
+
+class CaptureEffectFilter: public webrtc::ViEEffectFilter {
+ public:
+  CaptureEffectFilter(unsigned int expected_width, unsigned int expected_height)
+    : number_of_captured_frames_(0),
+      expected_width_(expected_width),
+      expected_height_(expected_height) {
+  }
+
+  // Implements ViEEffectFilter
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    EXPECT_TRUE(frameBuffer != NULL);
+    EXPECT_EQ(expected_width_, width);
+    EXPECT_EQ(expected_height_, height);
+    ++number_of_captured_frames_;
+    return 0;
+  }
+
+  int number_of_captured_frames_;
+
+ protected:
+  unsigned int expected_width_;
+  unsigned int expected_height_;
+ };
+
+void ViEAutoTest::ViECaptureStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    TbInterfaces ViE("ViECaptureStandardTest");
+
+    webrtc::VideoCaptureModule::DeviceInfo* devInfo =
+        webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+
+    int numberOfCaptureDevices = devInfo->NumberOfDevices();
+    ViETest::Log("Number of capture devices %d", numberOfCaptureDevices);
+    EXPECT_GT(numberOfCaptureDevices, 0);
+
+    int captureDeviceId[10];
+    memset(captureDeviceId, 0, sizeof(captureDeviceId));
+    webrtc::VideoCaptureModule* vcpms[10];
+    memset(vcpms, 0, sizeof(vcpms));
+
+    // Check capabilities
+    for (int deviceIndex = 0;
+         deviceIndex < numberOfCaptureDevices;
+         ++deviceIndex)
+    {
+        WebRtc_UWord8 deviceName[128];
+        WebRtc_UWord8 deviceUniqueName[512];
+
+        EXPECT_EQ(0, devInfo->GetDeviceName(deviceIndex,
+                                            deviceName,
+                                            sizeof(deviceName),
+                                            deviceUniqueName,
+                                            sizeof(deviceUniqueName)));
+        ViETest::Log("Found capture device %s\nUnique name %s", deviceName,
+                     deviceUniqueName);
+
+#if !defined(WEBRTC_MAC_INTEL)  // these functions will return -1
+        int numberOfCapabilities =
+            devInfo->NumberOfCapabilities(deviceUniqueName);
+        EXPECT_GT(numberOfCapabilities, 0);
+
+        for (int capIndex = 0; capIndex < numberOfCapabilities; ++capIndex)
+        {
+            webrtc::VideoCaptureCapability capability;
+            EXPECT_EQ(0, devInfo->GetCapability(deviceUniqueName, capIndex,
+                                                capability));
+            ViETest::Log("Capture capability %d (of %u)", capIndex + 1,
+                         numberOfCapabilities);
+            ViETest::Log("witdh %d, height %d, frame rate %d",
+                         capability.width, capability.height, capability.maxFPS);
+            ViETest::Log("expected delay %d, color type %d, encoding %d",
+                         capability.expectedCaptureDelay, capability.rawType,
+                         capability.codecType);
+
+            EXPECT_GT(capability.width, 0);
+            EXPECT_GT(capability.height, 0);
+            EXPECT_GT(capability.maxFPS, -1);  // >= 0
+            EXPECT_GT(capability.expectedCaptureDelay, 0);
+        }
+#endif
+    }
+    // Capture Capability Functions are not supported on WEBRTC_MAC_INTEL.
+#if !defined(WEBRTC_MAC_INTEL)
+
+    // Check allocation. Try to allocate them all after each other.
+    for (int deviceIndex = 0;
+         deviceIndex < numberOfCaptureDevices;
+         ++deviceIndex)
+    {
+        WebRtc_UWord8 deviceName[128];
+        WebRtc_UWord8 deviceUniqueName[512];
+
+        EXPECT_EQ(0, devInfo->GetDeviceName(deviceIndex,
+                                            deviceName,
+                                            sizeof(deviceName),
+                                            deviceUniqueName,
+                                            sizeof(deviceUniqueName)));
+
+        webrtc::VideoCaptureModule* vcpm =
+            webrtc::VideoCaptureFactory::Create(
+                deviceIndex, deviceUniqueName);
+        EXPECT_TRUE(vcpm != NULL);
+        vcpm->AddRef();
+        vcpms[deviceIndex] = vcpm;
+
+        EXPECT_EQ(0, ViE.capture->AllocateCaptureDevice(
+            *vcpm, captureDeviceId[deviceIndex]));
+
+        webrtc::VideoCaptureCapability capability;
+        EXPECT_EQ(0, devInfo->GetCapability(deviceUniqueName, 0, capability));
+
+        // Test that the camera select the closest capability to the selected
+        // width and height.
+        CaptureEffectFilter filter(capability.width, capability.height);
+        EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+            captureDeviceId[deviceIndex], filter));
+
+        ViETest::Log("Testing Device %s capability width %d  height %d",
+                     deviceUniqueName, capability.width, capability.height);
+
+        if (FLAGS_capture_test_ensure_resolution_alignment_in_capture_device) {
+          // This tests that the capture device properly aligns to a
+          // multiple of 16 (or at least 8).
+          capability.height = capability.height - 2;
+          capability.width  = capability.width  - 2;
+        }
+
+        webrtc::CaptureCapability vieCapability;
+        vieCapability.width = capability.width;
+        vieCapability.height = capability.height;
+        vieCapability.codecType = capability.codecType;
+        vieCapability.maxFPS = capability.maxFPS;
+        vieCapability.rawType = capability.rawType;
+
+        EXPECT_EQ(0, ViE.capture->StartCapture(captureDeviceId[deviceIndex],
+                                               vieCapability));
+        webrtc::TickTime startTime = webrtc::TickTime::Now();
+
+        while (filter.number_of_captured_frames_ < 10
+               && (webrtc::TickTime::Now() - startTime).Milliseconds() < 10000)
+        {
+            AutoTestSleep(100);
+        }
+
+        EXPECT_GT(filter.number_of_captured_frames_, 9) <<
+            "Should capture at least some frames";
+
+        EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+            captureDeviceId[deviceIndex]));
+
+#ifdef WEBRTC_ANDROID  // Can only allocate one camera at the time on Android.
+        EXPECT_EQ(0, ViE.capture->StopCapture(captureDeviceId[deviceIndex]));
+        EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(
+            captureDeviceId[deviceIndex]));
+#endif
+    }
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+
+
+    // Stop all started capture devices.
+    for (int deviceIndex = 0;
+        deviceIndex < numberOfCaptureDevices;
+        ++deviceIndex) {
+#if !defined(WEBRTC_ANDROID)
+      // Don't stop on Android since we can only allocate one camera.
+      EXPECT_EQ(0, ViE.capture->StopCapture(
+          captureDeviceId[deviceIndex]));
+      EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(
+          captureDeviceId[deviceIndex]));
+#endif  // !WEBRTC_ANDROID
+      vcpms[deviceIndex]->Release();
+    }
+#endif  // !WEBRTC_MAC_INTEL
+}
+
+void ViEAutoTest::ViECaptureExtendedTest() {
+    ViECaptureStandardTest();
+    ViECaptureAPITest();
+    ViECaptureExternalCaptureTest();
+}
+
+void ViEAutoTest::ViECaptureAPITest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+    TbInterfaces ViE("ViECaptureAPITest");
+
+    ViE.capture->NumberOfCaptureDevices();
+
+    WebRtc_UWord8 deviceName[128];
+    WebRtc_UWord8 deviceUniqueName[512];
+    int captureId = 0;
+
+    webrtc::VideoCaptureModule::DeviceInfo* devInfo =
+        webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+    EXPECT_TRUE(devInfo != NULL);
+
+    // Get the first capture device
+    EXPECT_EQ(0, devInfo->GetDeviceName(0, deviceName,
+                                        sizeof(deviceName),
+                                        deviceUniqueName,
+                                        sizeof(deviceUniqueName)));
+
+    webrtc::VideoCaptureModule* vcpm =
+        webrtc::VideoCaptureFactory::Create(0, deviceUniqueName);
+    vcpm->AddRef();
+    EXPECT_TRUE(vcpm != NULL);
+
+    // Allocate capture device.
+    EXPECT_EQ(0, ViE.capture->AllocateCaptureDevice(*vcpm, captureId));
+
+    // Start the capture device.
+    EXPECT_EQ(0, ViE.capture->StartCapture(captureId));
+
+    // Start again. Should fail.
+    EXPECT_NE(0, ViE.capture->StartCapture(captureId));
+    EXPECT_EQ(kViECaptureDeviceAlreadyStarted, ViE.LastError());
+
+    // Start invalid capture device.
+    EXPECT_NE(0, ViE.capture->StartCapture(captureId + 1));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Stop invalid capture device.
+    EXPECT_NE(0, ViE.capture->StopCapture(captureId + 1));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Stop the capture device.
+    EXPECT_EQ(0, ViE.capture->StopCapture(captureId));
+
+    // Stop the capture device again.
+    EXPECT_NE(0, ViE.capture->StopCapture(captureId));
+    EXPECT_EQ(kViECaptureDeviceNotStarted, ViE.LastError());
+
+    // Connect to invalid channel.
+    EXPECT_NE(0, ViE.capture->ConnectCaptureDevice(captureId, 0));
+    EXPECT_EQ(kViECaptureDeviceInvalidChannelId, ViE.LastError());
+
+    TbVideoChannel channel(ViE);
+
+    // Connect invalid captureId.
+    EXPECT_NE(0, ViE.capture->ConnectCaptureDevice(captureId + 1,
+                                                   channel.videoChannel));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Connect the capture device to the channel.
+    EXPECT_EQ(0, ViE.capture->ConnectCaptureDevice(captureId,
+                                                   channel.videoChannel));
+
+    // Connect the channel again.
+    EXPECT_NE(0, ViE.capture->ConnectCaptureDevice(captureId,
+                                                   channel.videoChannel));
+    EXPECT_EQ(kViECaptureDeviceAlreadyConnected, ViE.LastError());
+
+    // Start the capture device.
+    EXPECT_EQ(0, ViE.capture->StartCapture(captureId));
+
+    // Release invalid capture device.
+    EXPECT_NE(0, ViE.capture->ReleaseCaptureDevice(captureId + 1));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Release the capture device.
+    EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
+
+    // Release the capture device again.
+    EXPECT_NE(0, ViE.capture->ReleaseCaptureDevice(captureId));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Test GetOrientation.
+    webrtc::VideoCaptureRotation orientation;
+    WebRtc_UWord8 dummy_name[5];
+    EXPECT_NE(0, devInfo->GetOrientation(dummy_name, orientation));
+
+    // Test SetRotation.
+    EXPECT_NE(0, ViE.capture->SetRotateCapturedFrames(
+        captureId, webrtc::RotateCapturedFrame_90));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+
+    // Allocate capture device.
+    EXPECT_EQ(0, ViE.capture->AllocateCaptureDevice(*vcpm, captureId));
+
+    EXPECT_EQ(0, ViE.capture->SetRotateCapturedFrames(
+        captureId, webrtc::RotateCapturedFrame_0));
+    EXPECT_EQ(0, ViE.capture->SetRotateCapturedFrames(
+        captureId, webrtc::RotateCapturedFrame_90));
+    EXPECT_EQ(0, ViE.capture->SetRotateCapturedFrames(
+        captureId, webrtc::RotateCapturedFrame_180));
+    EXPECT_EQ(0, ViE.capture->SetRotateCapturedFrames(
+        captureId, webrtc::RotateCapturedFrame_270));
+
+    // Release the capture device
+    EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+    delete devInfo;
+    vcpm->Release();
+}
+
+void ViEAutoTest::ViECaptureExternalCaptureTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    TbInterfaces ViE("ViECaptureExternalCaptureTest");
+    TbVideoChannel channel(ViE);
+    channel.StartReceive();
+    channel.StartSend();
+
+    webrtc::VideoCaptureExternal* externalCapture;
+    int captureId = 0;
+
+    // Allocate the external capture device.
+    webrtc::VideoCaptureModule* vcpm = webrtc::VideoCaptureFactory::Create(
+        0, externalCapture);
+    EXPECT_TRUE(vcpm != NULL);
+    EXPECT_TRUE(externalCapture != NULL);
+    vcpm->AddRef();
+
+    EXPECT_EQ(0, ViE.capture->AllocateCaptureDevice(*vcpm, captureId));
+
+    // Connect the capture device to the channel.
+    EXPECT_EQ(0, ViE.capture->ConnectCaptureDevice(captureId,
+                                                   channel.videoChannel));
+
+    // Render the local capture.
+    EXPECT_EQ(0, ViE.render->AddRenderer(captureId, _window1, 1, 0.0, 0.0,
+                                         1.0, 1.0));
+
+    // Render the remote capture.
+    EXPECT_EQ(0, ViE.render->AddRenderer(channel.videoChannel, _window2, 1,
+                                         0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(captureId));
+    EXPECT_EQ(0, ViE.render->StartRender(channel.videoChannel));
+
+    // Register observer.
+    CaptureObserver observer;
+    EXPECT_EQ(0, ViE.capture->RegisterObserver(captureId, observer));
+
+    // Enable brightness alarm.
+    EXPECT_EQ(0, ViE.capture->EnableBrightnessAlarm(captureId, true));
+
+    CaptureEffectFilter effectFilter(176, 144);
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(captureId,
+                                                                effectFilter));
+
+    // Call started.
+    ViETest::Log("You should see local preview from external capture\n"
+                 "in window 1 and the remote video in window 2.\n");
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+    const unsigned int videoFrameLength = (176 * 144 * 3) / 2;
+    unsigned char* videoFrame = new unsigned char[videoFrameLength];
+    memset(videoFrame, 128, 176 * 144);
+
+    int frameCount = 0;
+    webrtc::VideoCaptureCapability capability;
+    capability.width = 176;
+    capability.height = 144;
+    capability.rawType = webrtc::kVideoI420;
+
+    ViETest::Log("Testing external capturing and frame rate callbacks.");
+    // TODO: Change when using a real file!
+    // while (fread(videoFrame, videoFrameLength, 1, foreman) == 1)
+    while (frameCount < 120)
+    {
+        externalCapture->IncomingFrame(
+            videoFrame, videoFrameLength, capability,
+            webrtc::TickTime::Now().MillisecondTimestamp());
+        AutoTestSleep(33);
+
+        if (effectFilter.number_of_captured_frames_ > 2)
+        {
+            EXPECT_EQ(webrtc::Normal, observer._brightness) <<
+                "Brightness or picture alarm should not have been called yet.";
+            EXPECT_EQ(webrtc::AlarmCleared, observer._alarm) <<
+                "Brightness or picture alarm should not have been called yet.";
+        }
+        frameCount++;
+    }
+
+    // Test brightness alarm
+    // Test bright image
+    for (int i = 0; i < 176 * 144; ++i)
+    {
+        if (videoFrame[i] <= 155)
+            videoFrame[i] = videoFrame[i] + 100;
+        else
+            videoFrame[i] = 255;
+    }
+    ViETest::Log("Testing Brighness alarm");
+    for (int frame = 0; frame < 30; ++frame)
+    {
+        externalCapture->IncomingFrame(
+            videoFrame, videoFrameLength, capability,
+            webrtc::TickTime::Now().MillisecondTimestamp());
+        AutoTestSleep(33);
+    }
+    EXPECT_EQ(webrtc::Bright, observer._brightness) <<
+        "Should be bright at this point since we are using a bright image.";
+
+    // Test Dark image
+    for (int i = 0; i < 176 * 144; ++i)
+    {
+        videoFrame[i] = videoFrame[i] > 200 ? videoFrame[i] - 200 : 0;
+    }
+    for (int frame = 0; frame < 30; ++frame)
+    {
+        externalCapture->IncomingFrame(
+            videoFrame, videoFrameLength, capability,
+            webrtc::TickTime::Now().MillisecondTimestamp());
+        AutoTestSleep(33);
+    }
+    EXPECT_EQ(webrtc::Dark, observer._brightness) <<
+        "Should be dark at this point since we are using a dark image.";
+    EXPECT_GT(effectFilter.number_of_captured_frames_, 150) <<
+        "Frames should have been played.";
+
+    EXPECT_GE(observer._frameRate, 29) <<
+        "Frame rate callback should be approximately correct.";
+    EXPECT_LE(observer._frameRate, 30) <<
+        "Frame rate callback should be approximately correct.";
+
+    // Test no picture alarm
+    ViETest::Log("Testing NoPictureAlarm.");
+    AutoTestSleep(1050);
+
+    EXPECT_EQ(webrtc::AlarmRaised, observer._alarm) <<
+        "No picture alarm should be raised.";
+    for (int frame = 0; frame < 10; ++frame)
+    {
+        externalCapture->IncomingFrame(
+            videoFrame, videoFrameLength, capability,
+            webrtc::TickTime::Now().MillisecondTimestamp());
+        AutoTestSleep(33);
+    }
+    EXPECT_EQ(webrtc::AlarmCleared, observer._alarm) <<
+        "Alarm should be cleared since ge just got some data.";
+
+    delete videoFrame;
+
+    // Release the capture device
+    EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
+
+    // Release the capture device again
+    EXPECT_NE(0, ViE.capture->ReleaseCaptureDevice(captureId));
+    EXPECT_EQ(kViECaptureDeviceDoesNotExist, ViE.LastError());
+    vcpm->Release();
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_carbon_mac.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_carbon_mac.cc
new file mode 100644
index 0000000..38d9c4e
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_carbon_mac.cc
@@ -0,0 +1,355 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+#include "vie_autotest_mac_carbon.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_autotest_main.h"
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager() :
+    _carbonWindow1(new WindowRef()),
+    _carbonWindow2(new WindowRef()),
+    _hiView1(new HIViewRef()),
+    _hiView2(new HIViewRef())
+{
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager()
+{
+    if (_carbonWindow1EventHandlerRef)
+        RemoveEventHandler(_carbonWindow1EventHandlerRef);
+
+    if (_carbonWindow2EventHandlerRef)
+        RemoveEventHandler(_carbonWindow2EventHandlerRef);
+
+    if (_carbonHIView1EventHandlerRef)
+        RemoveEventHandler(_carbonHIView1EventHandlerRef);
+
+    if (_carbonHIView2EventHandlerRef)
+        RemoveEventHandler(_carbonHIView2EventHandlerRef);
+
+    delete _carbonWindow1;
+    delete _carbonWindow2;
+    delete _hiView1;
+    delete _hiView2;
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            char* window1Title,
+                                            char* window2Title)
+{
+
+    WindowAttributes windowAttributes = kWindowStandardDocumentAttributes
+        | kWindowStandardHandlerAttribute | kWindowCompositingAttribute;
+    Rect windowContentRect;
+    static const EventTypeSpec
+        windowEventTypes[] = { kEventClassWindow, kEventWindowBoundsChanged,
+            kEventClassWindow, kEventWindowBoundsChanging, kEventClassWindow,
+            kEventWindowZoomed, kEventClassWindow, kEventWindowExpanded,
+            kEventClassWindow, kEventWindowClickResizeRgn, kEventClassWindow,
+            kEventWindowClickDragRgn };
+
+    // ************* Window 1 and Event Handler ***********************
+
+    SetRect(&windowContentRect, window1Size.origin.x, window1Size.origin.y,
+            window1Size.origin.x + window1Size.size.width, window1Size.origin.y
+                + window1Size.size.height);
+
+    CreateNewWindow(kDocumentWindowClass, windowAttributes, &windowContentRect,
+                    _carbonWindow1);
+    SetWindowTitleWithCFString(*_carbonWindow1, CFSTR("Carbon Window 1"));
+    ShowWindow(*_carbonWindow1);
+    InitCursor();
+    InstallWindowEventHandler(*_carbonWindow1,
+                              NewEventHandlerUPP(HandleWindowEvent),
+                              GetEventTypeCount(windowEventTypes),
+                              windowEventTypes, (void*) this,
+                              &_carbonWindow1EventHandlerRef);
+
+    // ************* Window 2 and Event Handler ***********************
+
+    SetRect(&windowContentRect, window2Size.origin.x, window2Size.origin.y,
+            window2Size.origin.x + window2Size.size.width, window2Size.origin.y
+                + window2Size.size.height);
+
+    CreateNewWindow(kDocumentWindowClass, windowAttributes, &windowContentRect,
+                    _carbonWindow2);
+    SetWindowTitleWithCFString(*_carbonWindow2, CFSTR("Carbon Window 2"));
+    ShowWindow(*_carbonWindow2);
+    InitCursor();
+    InstallWindowEventHandler(*_carbonWindow2,
+                              NewEventHandlerUPP(HandleWindowEvent),
+                              GetEventTypeCount(windowEventTypes),
+                              windowEventTypes, (void*) this,
+                              &_carbonWindow2EventHandlerRef);
+
+#if defined(HIVIEWREF_MODE)
+    OSStatus status;
+    static const EventTypeSpec hiviewEventTypes[] = { kEventClassControl,
+        kEventControlBoundsChanged, kEventClassControl, kEventControlDraw };
+
+    HIRect hiView1Rect = { 10, 10, 200, 200 };
+    status = HICreateCustomView(&hiView1Rect, &_hiView1);
+    status = HIViewAddSubview(&_carbonWindow1, _hiView1);
+    HIViewSetZOrder(_hiView1, kHIViewZOrderAbove, NULL);
+    HIViewSetVisible(_hiView1, true);
+
+    HIViewInstallEventHandler(_hiView1, NewEventHandlerUPP(HandleHIViewEvent),
+                              GetEventTypeCount(hiviewEventTypes),
+                              hiviewEventTypes, (void *) this,
+                              &_carbonHIView1EventHandlerRef);
+
+    HIRect hiView2Rect = { 10, 10, 200, 200 };
+    status = HICreateCustomView(&hiView2Rect, &_hiView2);
+    status = HIViewAddSubview(&_carbonWindow2, _hiView2);
+    HIViewSetZOrder(_hiView2, kHIViewZOrderAbove, NULL);
+    HIViewSetVisible(_hiView2, true);
+
+    HIViewInstallEventHandler(_hiView2, NewEventHandlerUPP(HandleHIViewEvent),
+                              GetEventTypeCount(hiviewEventTypes),
+                              hiviewEventTypes, (void *) this,
+                              &_carbonHIView2EventHandlerRef);
+#endif
+
+    return 0;
+}
+
+pascal OSStatus ViEAutoTestWindowManager::HandleWindowEvent(
+    EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
+{
+
+    WindowRef windowRef = NULL;
+
+    int eventType = GetEventKind(theEvent);
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/
+    // i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    GetEventParameter(theEvent, kEventParamDirectObject, typeWindowRef, NULL,
+                      sizeof(WindowRef), NULL, &windowRef);
+
+    ViEAutoTestWindowManager* obj = (ViEAutoTestWindowManager*) (userData);
+
+    if (windowRef == obj->GetWindow1())
+    {
+        // event was triggered on window 1
+    }
+    else if (windowRef == obj->GetWindow2())
+    {
+        // event was triggered on window 2
+    }
+
+    if (kEventWindowBoundsChanged == eventType)
+    {
+    }
+    else if (kEventWindowBoundsChanging == eventType)
+    {
+    }
+    else if (kEventWindowZoomed == eventType)
+    {
+    }
+    else if (kEventWindowExpanding == eventType)
+    {
+    }
+    else if (kEventWindowExpanded == eventType)
+    {
+    }
+    else if (kEventWindowClickResizeRgn == eventType)
+    {
+    }
+    else if (kEventWindowClickDragRgn == eventType)
+    {
+    }
+    else
+    {
+    }
+
+    return noErr;
+}
+
+pascal OSStatus ViEAutoTestWindowManager::HandleHIViewEvent(
+    EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
+{
+    HIViewRef hiviewRef = NULL;
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/
+    // i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    int eventType = GetEventKind(theEvent);
+    OSStatus status = noErr;
+    status = GetEventParameter(theEvent, kEventParamDirectObject,
+                               typeControlRef, NULL, sizeof(ControlRef), NULL,
+                               &hiviewRef);
+
+    if (GetEventClass(theEvent) == kEventClassControl)
+    {
+        if (GetEventKind(theEvent) == kEventControlDraw)
+        {
+            ViEAutoTestWindowManager* obj =
+                (ViEAutoTestWindowManager*) (userData);
+
+            CGContextRef context;
+            status = GetEventParameter(theEvent, kEventParamCGContextRef,
+                                       typeCGContextRef, NULL, sizeof(context),
+                                       NULL, &context);
+            HIRect viewBounds;
+
+            HIViewRef* ptrHIViewRef =
+                static_cast<HIViewRef*> (obj->GetWindow1());
+            if (hiviewRef == *ptrHIViewRef)
+            {
+                // color hiview1
+                CGContextSetRGBFillColor(context, 1, 0, 0, 1);
+                HIViewGetBounds(hiviewRef, &viewBounds);
+                CGContextFillRect(context, viewBounds);
+            }
+
+            ptrHIViewRef = static_cast<HIViewRef*> (obj->GetWindow1());
+            if (hiviewRef == *ptrHIViewRef)
+            {
+                CGContextSetRGBFillColor(context, 0, 1, 0, 1);
+                HIViewGetBounds(hiviewRef, &viewBounds);
+                CGContextFillRect(context, viewBounds);
+            }
+
+        }
+    }
+
+    /*
+
+
+     VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+     WindowRef parentWindow = HIViewGetWindow(hiviewRef);
+     bool updateUI = true;
+
+     if(kEventControlBoundsChanged == eventType){
+     }
+     else if(kEventControlDraw == eventType){
+     }
+     else{
+     updateUI = false;
+     }
+
+     if(true == updateUI){
+     obj->ParentWindowResized(parentWindow);
+     obj->UpdateClipping();
+     obj->RenderOffScreenBuffers();
+     }
+     */
+
+    return status;
+}
+
+int ViEAutoTestWindowManager::TerminateWindows()
+{
+    return 0;
+}
+
+void* ViEAutoTestWindowManager::GetWindow1()
+{
+#if defined(HIVIEWREF_MODE)
+    return (void*)_hiView1;
+#else
+    return (void*) _carbonWindow1;
+#endif
+
+}
+void* ViEAutoTestWindowManager::GetWindow2()
+{
+#if defined(HIVIEWREF_MODE)
+    return (void*)_hiView2;
+#else
+    return (void*) _carbonWindow2;
+#endif
+
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow()
+{
+    return true;
+}
+
+/*
+
+ int main (int argc, const char * argv[])
+ {
+ ViEAutoTestMain autoTest;
+
+ if(argc > 1){
+ autoTest.UseAnswerFile(argv[1]);
+ }
+
+ int success = autoTest.BeginOSIndependentTesting();
+
+ }
+
+ */
+
+int main(int argc, const char * argv[])
+{
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    [NSApplication sharedApplication];
+
+    // We have to run the test in a secondary thread because we need to run a
+    // runloop, which blocks.
+    if (argc > 1)
+    {
+AutoTestClass    * autoTestClass = [[AutoTestClass alloc]init];
+    [NSThread detachNewThreadSelector:@selector(autoTestWithArg:)
+     toTarget:autoTestClass withObject:[NSString stringWithFormat:@"%s",
+                                        argv[1]]];
+}
+else
+{
+    AutoTestClass* autoTestClass = [[AutoTestClass alloc]init];
+    [NSThread detachNewThreadSelector:@selector(autoTestWithArg:)
+     toTarget:autoTestClass withObject:nil];
+}
+
+// process OS events. Blocking call
+[[NSRunLoop currentRunLoop]run];
+[pool release];
+}
+
+@implementation AutoTestClass
+
+-(void)autoTestWithArg:(NSString*)answerFile;
+{
+    // TODO(phoglund): Rewrite this file to work with the new way of running
+    // vie_auto_test. The file doesn't seem to be used at the moment though.
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    ViEAutoTestMain autoTest;
+
+    if(NSOrderedSame != [answerFile compare:@""])
+    {
+        char answerFileUTF8[1024] = "";
+        strcpy(answerFileUTF8, (char*)[answerFileUTF8 UTF8]);
+        autoTest.UseAnswerFile(answerFileUTF8);
+    }
+
+    int success = autoTest.BeginOSIndependentTesting();
+
+    [pool release];
+    return;
+}
+// TODO: move window creation to Obj-c class so GUI commands can be run on the
+// main NSThread
+// -(void)createWindow1:(AutoTestRect)window1Size
+// AndWindow2:(AutoTestRect)window2Size WithTitle1:(char*)window1Title
+// AndTitle2:(char*)window2Title{
+
+@end
+
+#endif
+
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm b/trunk/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm
new file mode 100644
index 0000000..3f9ef6b
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+#import "cocoa_render_view.h"
+#include "vie_autotest_mac_cocoa.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_autotest_main.h"
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager()
+    : _cocoaRenderView1(nil), _cocoaRenderView2(nil) {
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1Title,
+                                            void* window2Title) {
+    NSRect outWindow1Frame = NSMakeRect(window1Size.origin.x,
+                                        window1Size.origin.y,
+                                        window1Size.size.width,
+                                        window1Size.size.height);
+    outWindow1_ = [[NSWindow alloc] initWithContentRect:outWindow1Frame
+                                    styleMask:NSTitledWindowMask
+                                    backing:NSBackingStoreBuffered defer:NO];
+    [outWindow1_ orderOut:nil];
+    NSRect cocoaRenderView1Frame = NSMakeRect(0, 0, window1Size.size.width,
+                                              window1Size.size.height);
+    _cocoaRenderView1 = [[CocoaRenderView alloc]
+                          initWithFrame:cocoaRenderView1Frame];
+    [[outWindow1_ contentView] addSubview:(NSView*)_cocoaRenderView1];
+    [outWindow1_ setTitle:[NSString stringWithFormat:@"%s", window1Title]];
+    [outWindow1_ makeKeyAndOrderFront:NSApp];
+
+    NSRect outWindow2Frame = NSMakeRect(window2Size.origin.x,
+                                        window2Size.origin.y,
+                                        window2Size.size.width,
+                                        window2Size.size.height);
+    outWindow2_ = [[NSWindow alloc] initWithContentRect:outWindow2Frame
+                                    styleMask:NSTitledWindowMask
+                                    backing:NSBackingStoreBuffered defer:NO];
+    [outWindow2_ orderOut:nil];
+    NSRect cocoaRenderView2Frame = NSMakeRect(0, 0, window2Size.size.width,
+                                              window2Size.size.height);
+    _cocoaRenderView2 = [[CocoaRenderView alloc]
+                          initWithFrame:cocoaRenderView2Frame];
+    [[outWindow2_ contentView] addSubview:(NSView*)_cocoaRenderView2];
+    [outWindow2_ setTitle:[NSString stringWithFormat:@"%s", window2Title]];
+    [outWindow2_ makeKeyAndOrderFront:NSApp];
+
+    return 0;
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+    [outWindow1_ close];
+    [outWindow2_ close];
+    return 0;
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+    return _cocoaRenderView1;
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+    return _cocoaRenderView2;
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+    return true;
+}
+
+int main(int argc, char * argv[]) {
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    [NSApplication sharedApplication];
+
+    int result = 0;
+#if defined(MAC_COCOA_USE_NSRUNLOOP)
+    AutoTestClass* tests = [[AutoTestClass alloc] init];
+
+    [tests setArgc:argc argv:argv];
+    [NSThread detachNewThreadSelector:@selector(autoTestWithArg:)
+      toTarget:tests withObject:nil];
+    // Process OS events. Blocking call.
+    [[NSRunLoop mainRunLoop]run];
+
+    result = [tests result];
+
+#else
+    ViEAutoTestMain autoTest;
+    result = autoTest.RunTests(argc, argv);
+
+#endif
+    [pool release];
+    return result;
+}
+
+@implementation AutoTestClass
+
+- (void)setArgc:(int)argc argv:(char**)argv {
+  argc_ = argc;
+  argv_ = argv;
+}
+
+- (void)autoTestWithArg:(NSObject*)ignored {
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    ViEAutoTestMain auto_test;
+
+    result_ = auto_test.RunTests(argc_, argv_);
+
+    [pool release];
+    return;
+}
+
+- (int)result {
+  return result_;
+}
+
+@end
+
+#endif
+
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_codec.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_codec.cc
new file mode 100644
index 0000000..ed8f20b
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_codec.cc
@@ -0,0 +1,455 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest.h"
+
+#include "codec_primitives.h"
+#include "common_types.h"
+#include "general_primitives.h"
+#include "tb_capture_device.h"
+#include "tb_I420_codec.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "vie_autotest_defines.h"
+
+class RenderFilter : public webrtc::ViEEffectFilter {
+ public:
+  RenderFilter()
+      : last_render_width_(0),
+        last_render_height_(0) {}
+
+  ~RenderFilter() {}
+
+  virtual int Transform(int size, unsigned char* frame_buffer,
+                        unsigned int time_stamp, unsigned int width,
+                        unsigned int height) {
+    last_render_width_ = width;
+    last_render_height_ = height;
+    return 0;
+  }
+  unsigned int last_render_width_;
+  unsigned int last_render_height_;
+};
+
+void ViEAutoTest::ViECodecStandardTest()
+{
+    TbInterfaces interfaces = TbInterfaces("ViECodecStandardTest");
+    TbCaptureDevice capture_device = TbCaptureDevice(interfaces);
+
+    int video_channel = -1;
+
+    EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+    EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+        capture_device.captureId, video_channel));
+
+    ConfigureRtpRtcp(interfaces.rtp_rtcp,
+                     video_channel);
+
+    RenderInWindow(interfaces.render, capture_device.captureId, _window1, 0);
+    RenderInWindow(interfaces.render, video_channel, _window2, 1);
+
+    TestCodecs(interfaces, capture_device.captureId, video_channel,
+               kDoNotForceResolution, kDoNotForceResolution);
+}
+
+void ViEAutoTest::ViECodecExtendedTest()
+{
+    {
+        ViECodecAPITest();
+        ViECodecStandardTest();
+        ViECodecExternalCodecTest();
+
+        TbInterfaces interfaces = TbInterfaces("ViECodecExtendedTest");
+        webrtc::ViEBase* ptrViEBase = interfaces.base;
+        webrtc::ViECapture* ptrViECapture = interfaces.capture;
+        webrtc::ViERender* ptrViERender = interfaces.render;
+        webrtc::ViECodec* ptrViECodec = interfaces.codec;
+        webrtc::ViERTP_RTCP* ptrViERtpRtcp = interfaces.rtp_rtcp;
+        webrtc::ViENetwork* ptrViENetwork = interfaces.network;
+
+        TbCaptureDevice captureDevice = TbCaptureDevice(interfaces);
+        int captureId = captureDevice.captureId;
+
+        int videoChannel = -1;
+        EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
+            captureId, videoChannel));
+
+        EXPECT_EQ(0, ptrViERtpRtcp->SetRTCPStatus(
+            videoChannel, webrtc::kRtcpCompound_RFC4585));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetKeyFrameRequestMethod(
+            videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true));
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->StartRender(captureId));
+        EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+
+            if (videoCodec.codecType != webrtc::kVideoCodecI420)
+            {
+                videoCodec.width = 640;
+                videoCodec.height = 480;
+            }
+            EXPECT_EQ(0, ptrViECodec->SetReceiveCodec(
+                videoChannel, videoCodec));
+        }
+
+        const char* ipAddress = "127.0.0.1";
+        const unsigned short rtpPort = 6000;
+        EXPECT_EQ(0, ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartReceive(videoChannel));
+        EXPECT_EQ(0, ptrViENetwork->SetSendDestination(
+            videoChannel, ipAddress, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartSend(videoChannel));
+
+        //
+        // Codec specific tests
+        //
+        memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+        EXPECT_EQ(0, ptrViEBase->StopSend(videoChannel));
+        ViEAutotestCodecObserver codecObserver;
+        EXPECT_EQ(0, ptrViECodec->RegisterEncoderObserver(
+            videoChannel, codecObserver));
+        EXPECT_EQ(0, ptrViECodec->RegisterDecoderObserver(
+            videoChannel, codecObserver));
+
+        EXPECT_EQ(0, ptrViEBase->StopReceive(videoChannel));
+        EXPECT_NE(0, ptrViEBase->StopSend(videoChannel));  // Already stopped
+
+        EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(captureId));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
+        EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel));
+    }
+
+    //
+    // Default channel
+    //
+    {
+        // Create VIE
+        TbInterfaces ViE("ViECodecExtendedTest2");
+        // Create a capture device
+        TbCaptureDevice tbCapture(ViE);
+
+        // Create channel 1
+        int videoChannel1 = -1;
+        EXPECT_EQ(0, ViE.base->CreateChannel(videoChannel1));
+
+        unsigned short rtpPort1 = 12000;
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            videoChannel1, rtpPort1));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            videoChannel1, "127.0.0.1", rtpPort1));
+        tbCapture.ConnectTo(videoChannel1);
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            videoChannel1, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ViE.render->AddRenderer(
+            videoChannel1, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ViE.render->StartRender(videoChannel1));
+
+        ViEAutotestCodecObserver codecObserver1;
+        EXPECT_EQ(0, ViE.codec->RegisterEncoderObserver(
+            videoChannel1, codecObserver1));
+        EXPECT_EQ(0, ViE.codec->RegisterDecoderObserver(
+            videoChannel1, codecObserver1));
+
+        // Set Send codec
+        unsigned short codecWidth = 176;
+        unsigned short codecHeight = 144;
+        bool codecSet = false;
+        webrtc::VideoCodec videoCodec;
+        for (int idx = 0; idx < ViE.codec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ViE.codec->GetCodec(idx, videoCodec));
+            EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel1, videoCodec));
+            if (videoCodec.codecType == webrtc::kVideoCodecVP8)
+            {
+                videoCodec.width = codecWidth;
+                videoCodec.height = codecHeight;
+                videoCodec.startBitrate = 200;
+                videoCodec.maxBitrate = 300;
+                EXPECT_EQ(0, ViE.codec->SetSendCodec(
+                    videoChannel1, videoCodec));
+                codecSet = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(codecSet);
+        webrtc::VideoCodec send_codec;
+        memcpy(&send_codec, &videoCodec, sizeof(videoCodec));
+
+        EXPECT_EQ(0, ViE.base->StartSend(videoChannel1));
+        EXPECT_EQ(0, ViE.base->StartReceive(videoChannel1));
+
+        // Create channel 2, based on channel 1
+        int videoChannel2 = -1;
+        EXPECT_EQ(0, ViE.base->CreateChannel(videoChannel2, videoChannel1));
+        EXPECT_NE(videoChannel1, videoChannel2) <<
+            "Channel 2 should be seop";
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            videoChannel2, webrtc::kViEKeyFrameRequestPliRtcp));
+
+        // Prepare receive codecs
+        for (int idx = 0; idx < ViE.codec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ViE.codec->GetCodec(idx, videoCodec));
+            EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel2, videoCodec));
+        }
+
+        ViEAutotestCodecObserver codecObserver2;
+        EXPECT_EQ(0, ViE.codec->RegisterDecoderObserver(
+            videoChannel2, codecObserver2));
+        EXPECT_EQ(0, ViE.render->AddRenderer(
+            videoChannel2, _window2, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ViE.render->StartRender(videoChannel2));
+
+        unsigned short rtpPort2 = 13000;
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(videoChannel2, rtpPort2));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            videoChannel2, "127.0.0.1", rtpPort2));
+
+        EXPECT_EQ(0, ViE.base->StartReceive(videoChannel2));
+        EXPECT_EQ(-1, ViE.base->StartSend(videoChannel2));
+
+        EXPECT_EQ(0, ViE.base->DeleteChannel(videoChannel1));
+        EXPECT_EQ(0, ViE.base->DeleteChannel(videoChannel2));
+
+    }
+}
+
+void ViEAutoTest::ViECodecAPITest()
+{
+    // ***************************************************************
+    // Begin create/initialize WebRTC Video Engine for testing
+    // ***************************************************************
+    webrtc::VideoEngine* ptrViE = NULL;
+    ptrViE = webrtc::VideoEngine::Create();
+    EXPECT_TRUE(ptrViE != NULL);
+
+    webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+    EXPECT_TRUE(ptrViEBase != NULL);
+
+    EXPECT_EQ(0, ptrViEBase->Init());
+
+    int videoChannel = -1;
+    EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
+
+    webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+    EXPECT_TRUE(ptrViECodec != NULL);
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // SendCodec
+    //
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+
+    const int numberOfCodecs = ptrViECodec->NumberOfCodecs();
+    EXPECT_GT(numberOfCodecs, 0);
+
+    SetSendCodec(webrtc::kVideoCodecVP8, ptrViECodec, videoChannel,
+                 kDoNotForceResolution, kDoNotForceResolution);
+
+    memset(&videoCodec, 0, sizeof(videoCodec));
+    EXPECT_EQ(0, ptrViECodec->GetSendCodec(videoChannel, videoCodec));
+    EXPECT_EQ(webrtc::kVideoCodecVP8, videoCodec.codecType);
+    // Verify that the target bit rate is equal to the start bitrate.
+    unsigned int target_bitrate = 0;
+    EXPECT_EQ(0, ptrViECodec->GetCodecTargetBitrate(videoChannel,
+                                                    &target_bitrate));
+    EXPECT_EQ(videoCodec.startBitrate, target_bitrate);
+
+    SetSendCodec(webrtc::kVideoCodecI420, ptrViECodec, videoChannel,
+                 kDoNotForceResolution, kDoNotForceResolution);
+    memset(&videoCodec, 0, sizeof(videoCodec));
+    EXPECT_EQ(0, ptrViECodec->GetSendCodec(videoChannel, videoCodec));
+    EXPECT_EQ(webrtc::kVideoCodecI420, videoCodec.codecType);
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+
+    EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel));
+
+    EXPECT_EQ(0, ptrViECodec->Release());
+    EXPECT_EQ(0, ptrViEBase->Release());
+    EXPECT_TRUE(webrtc::VideoEngine::Delete(ptrViE));
+}
+
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+#include "vie_external_codec.h"
+#endif
+void ViEAutoTest::ViECodecExternalCodecTest()
+{
+    // ***************************************************************
+    // Begin create/initialize WebRTC Video Engine for testing
+    // ***************************************************************
+
+
+    // ***************************************************************
+    // Engine ready. Begin testing class
+    // ***************************************************************
+
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+    {
+        TbInterfaces ViE("ViEExternalCodec");
+        TbCaptureDevice captureDevice(ViE);
+        TbVideoChannel channel(
+            ViE, webrtc::kVideoCodecI420, 352,288,30,(352*288*3*8*30)/(2*1000));
+
+        captureDevice.ConnectTo(channel.videoChannel);
+
+        EXPECT_EQ(0, ViE.render->AddRenderer(
+            channel.videoChannel, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ViE.render->StartRender(channel.videoChannel));
+
+        channel.StartReceive();
+        channel.StartSend();
+
+        ViETest::Log("Using internal I420 codec");
+        AutoTestSleep(KAutoTestSleepTimeMs/2);
+
+        webrtc::ViEExternalCodec* ptrViEExtCodec =
+            webrtc::ViEExternalCodec::GetInterface(ViE.video_engine);
+        EXPECT_TRUE(ptrViEExtCodec != NULL);
+
+        webrtc::VideoCodec codecStruct;
+
+        EXPECT_EQ(0, ViE.codec->GetSendCodec(
+            channel.videoChannel, codecStruct));
+
+        // Use external encoder instead
+        {
+            TbI420Encoder extEncoder;
+
+            // Test to register on wrong channel
+            EXPECT_NE(0, ptrViEExtCodec->RegisterExternalSendCodec(
+                channel.videoChannel+5,codecStruct.plType,&extEncoder));
+            EXPECT_EQ(kViECodecInvalidArgument, ViE.LastError());
+
+            EXPECT_EQ(0, ptrViEExtCodec->RegisterExternalSendCodec(
+                channel.videoChannel,codecStruct.plType,&extEncoder));
+
+            // Use new external encoder
+            EXPECT_EQ(0, ViE.codec->SetSendCodec(
+                channel.videoChannel, codecStruct));
+
+            TbI420Decoder extDecoder;
+            EXPECT_EQ(0, ptrViEExtCodec->RegisterExternalReceiveCodec(
+                channel.videoChannel,codecStruct.plType,&extDecoder));
+
+            EXPECT_EQ(0, ViE.codec->SetReceiveCodec(
+                channel.videoChannel, codecStruct));
+
+            ViETest::Log("Using external I420 codec");
+            AutoTestSleep(KAutoTestSleepTimeMs);
+
+            // Test to deregister on wrong channel
+            EXPECT_NE(0, ptrViEExtCodec->DeRegisterExternalSendCodec(
+                channel.videoChannel+5,codecStruct.plType));
+            EXPECT_EQ(kViECodecInvalidArgument, ViE.LastError());
+
+            // Test to deregister wrong payload type.
+            EXPECT_NE(0, ptrViEExtCodec->DeRegisterExternalSendCodec(
+                channel.videoChannel,codecStruct.plType-1));
+
+            // Deregister external send codec
+            EXPECT_EQ(0, ptrViEExtCodec->DeRegisterExternalSendCodec(
+                channel.videoChannel,codecStruct.plType));
+
+            EXPECT_EQ(0, ptrViEExtCodec->DeRegisterExternalReceiveCodec(
+                channel.videoChannel,codecStruct.plType));
+
+            // Verify that the encoder and decoder has been used
+            TbI420Encoder::FunctionCalls encodeCalls =
+                extEncoder.GetFunctionCalls();
+            EXPECT_EQ(1, encodeCalls.InitEncode);
+            EXPECT_EQ(1, encodeCalls.Release);
+            EXPECT_EQ(1, encodeCalls.RegisterEncodeCompleteCallback);
+            EXPECT_GT(encodeCalls.Encode, 30);
+            EXPECT_GT(encodeCalls.SetRates, 1);
+            EXPECT_GT(encodeCalls.SetPacketLoss, 1);
+
+            TbI420Decoder::FunctionCalls decodeCalls =
+                extDecoder.GetFunctionCalls();
+            EXPECT_EQ(1, decodeCalls.InitDecode);
+            EXPECT_EQ(1, decodeCalls.Release);
+            EXPECT_EQ(1, decodeCalls.RegisterDecodeCompleteCallback);
+            EXPECT_GT(decodeCalls.Decode, 30);
+
+            ViETest::Log("Changing payload type Using external I420 codec");
+
+            codecStruct.plType = codecStruct.plType - 1;
+            EXPECT_EQ(0, ptrViEExtCodec->RegisterExternalReceiveCodec(
+                channel.videoChannel, codecStruct.plType, &extDecoder));
+
+            EXPECT_EQ(0, ViE.codec->SetReceiveCodec(
+                channel.videoChannel, codecStruct));
+
+            EXPECT_EQ(0, ptrViEExtCodec->RegisterExternalSendCodec(
+                channel.videoChannel, codecStruct.plType, &extEncoder));
+
+            // Use new external encoder
+            EXPECT_EQ(0, ViE.codec->SetSendCodec(
+                channel.videoChannel, codecStruct));
+
+            AutoTestSleep(KAutoTestSleepTimeMs/2);
+
+            //***************************************************************
+            //	Testing finished. Tear down Video Engine
+            //***************************************************************
+
+            EXPECT_EQ(0, ptrViEExtCodec->DeRegisterExternalSendCodec(
+                channel.videoChannel,codecStruct.plType));
+            EXPECT_EQ(0, ptrViEExtCodec->DeRegisterExternalReceiveCodec(
+                channel.videoChannel,codecStruct.plType));
+
+            // Verify that the encoder and decoder has been used
+            encodeCalls = extEncoder.GetFunctionCalls();
+            EXPECT_EQ(2, encodeCalls.InitEncode);
+            EXPECT_EQ(2, encodeCalls.Release);
+            EXPECT_EQ(2, encodeCalls.RegisterEncodeCompleteCallback);
+            EXPECT_GT(encodeCalls.Encode, 30);
+            EXPECT_GT(encodeCalls.SetRates, 1);
+            EXPECT_GT(encodeCalls.SetPacketLoss, 1);
+
+            decodeCalls = extDecoder.GetFunctionCalls();
+
+            EXPECT_EQ(2, decodeCalls.InitDecode);
+            EXPECT_EQ(2, decodeCalls.Release);
+            EXPECT_EQ(2, decodeCalls.RegisterDecodeCompleteCallback);
+            EXPECT_GT(decodeCalls.Decode, 30);
+
+            EXPECT_EQ(0, ptrViEExtCodec->Release());
+        }  // tbI420Encoder and extDecoder goes out of scope
+
+        ViETest::Log("Using internal I420 codec");
+        AutoTestSleep(KAutoTestSleepTimeMs/2);
+
+    }
+
+#else
+    ViETest::Log(" ViEExternalCodec not enabled\n");
+#endif
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc
new file mode 100644
index 0000000..fa8bf76
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc
@@ -0,0 +1,2058 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  vie_autotest_custom_call.cc
+ *
+ */
+
+#include <iostream>
+
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+
+#define VCM_RED_PAYLOAD_TYPE                            96
+#define VCM_ULPFEC_PAYLOAD_TYPE                         97
+#define DEFAULT_SEND_IP                                 "127.0.0.1"
+#define DEFAULT_VIDEO_PORT                              11111
+#define DEFAULT_VIDEO_CODEC                             "vp8"
+#define DEFAULT_VIDEO_CODEC_WIDTH                       640
+#define DEFAULT_VIDEO_CODEC_HEIGHT                      480
+#define DEFAULT_VIDEO_CODEC_BITRATE                     300
+#define DEFAULT_VIDEO_CODEC_MIN_BITRATE                 100
+#define DEFAULT_VIDEO_CODEC_MAX_BITRATE                 1000
+#define DEFAULT_AUDIO_PORT                              11113
+#define DEFAULT_AUDIO_CODEC                             "ISAC"
+#define DEFAULT_INCOMING_FILE_NAME                      "IncomingFile.avi"
+#define DEFAULT_OUTGOING_FILE_NAME                      "OutgoingFile.avi"
+#define DEFAULT_VIDEO_CODEC_MAX_FRAMERATE               30
+#define DEFAULT_VIDEO_PROTECTION_METHOD                 0
+#define DEFAULT_TEMPORAL_LAYER                          0
+
+enum StatisticsType {
+  kSendStatistic,
+  kReceivedStatistic
+};
+
+class ViEAutotestFileObserver: public webrtc::ViEFileObserver {
+ public:
+  ViEAutotestFileObserver() {};
+  ~ViEAutotestFileObserver() {};
+
+  void PlayFileEnded(const WebRtc_Word32 fileId) {
+    ViETest::Log("PlayFile ended");
+  }
+};
+
+class ViEAutotestEncoderObserver: public webrtc::ViEEncoderObserver {
+ public:
+  ViEAutotestEncoderObserver() {};
+  ~ViEAutotestEncoderObserver() {};
+
+  void OutgoingRate(const int videoChannel,
+                    const unsigned int framerate,
+                    const unsigned int bitrate) {
+    std::cout << "Send FR: " << framerate
+              << " BR: " << bitrate << std::endl;
+  }
+};
+
+class ViEAutotestDecoderObserver: public webrtc::ViEDecoderObserver {
+ public:
+  ViEAutotestDecoderObserver() {};
+  ~ViEAutotestDecoderObserver() {};
+
+  void IncomingRate(const int videoChannel,
+                    const unsigned int framerate,
+                    const unsigned int bitrate) {
+    std::cout << "Received FR: " << framerate
+              << " BR: " << bitrate << std::endl;
+  }
+  void IncomingCodecChanged(const int videoChannel,
+                            const webrtc::VideoCodec& codec) {}
+  void RequestNewKeyFrame(const int videoChannel) {
+    std::cout << "Decoder requesting a new key frame." << std::endl;
+  }
+};
+
+// The following are general helper functions.
+bool GetVideoDevice(webrtc::ViEBase* ptrViEBase,
+                    webrtc::ViECapture* ptrViECapture,
+                    char* captureDeviceName, char* captureDeviceUniqueId);
+bool GetIPAddress(char* IP);
+#ifndef WEBRTC_ANDROID
+bool ValidateIP(std::string iStr);
+#endif
+// The following are Print to stdout functions.
+void PrintCallInformation(char* IP, char* videoCaptureDeviceName,
+                          char* videoCaptureUniqueId,
+                          webrtc::VideoCodec videoCodec, int videoTxPort,
+                          int videoRxPort, char* audioCaptureDeviceName,
+                          char* audioPlaybackDeviceName,
+                          webrtc::CodecInst audioCodec, int audioTxPort,
+                          int audioRxPort, int protectionMethod);
+void PrintRTCCPStatistics(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                          int videoChannel, StatisticsType statType);
+void PrintRTPStatistics(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                        int videoChannel);
+void PrintBandwidthUsage(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                         int videoChannel);
+void PrintCodecStatistics(webrtc::ViECodec* ptrViECodec, int videoChannel,
+                          StatisticsType statType);
+void PrintGetDiscardedPackets(webrtc::ViECodec* ptrViECodec, int videoChannel);
+void PrintVideoStreamInformation(webrtc::ViECodec* ptrViECodec,
+                                 int videoChannel);
+void PrintVideoCodec(webrtc::VideoCodec videoCodec);
+
+// The following are video functions.
+// TODO(amyfong): change to pointers as input arguments 
+// instead of references
+bool SetVideoPorts(int* txPort, int* rxPort);
+bool SetVideoCodecType(webrtc::ViECodec* ptrViECodec,
+                       webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecResolution(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecSize(webrtc::ViECodec* ptrViECodec,
+                       webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecBitrate(webrtc::ViECodec* ptrViECodec,
+                          webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecMinBitrate(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecMaxBitrate(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecMaxFramerate(webrtc::ViECodec* ptrViECodec,
+                               webrtc::VideoCodec& videoCodec);
+bool SetVideoCodecTemporalLayer(webrtc::VideoCodec& videoCodec);
+int GetVideoProtection();
+bool SetVideoProtection(webrtc::ViECodec* ptrViECodec,
+                        webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                        int videoChannel, int protectionMethod);
+bool GetBitrateSignaling();
+
+// The following are audio helper functions.
+bool GetAudioDevices(webrtc::VoEBase* ptrVEBase,
+                     webrtc::VoEHardware* ptrVEHardware,
+                     char* recordingDeviceName, int& recordingDeviceIndex,
+                     char* playbackDeviceName, int& playbackDeviceIndex);
+bool GetAudioDevices(webrtc::VoEBase* ptrVEBase,
+                     webrtc::VoEHardware* ptrVEHardware,
+                     int& recordingDeviceIndex, int& playbackDeviceIndex);
+bool GetAudioPorts(int* txPort, int* rxPort);
+bool GetAudioCodec(webrtc::VoECodec* ptrVeCodec,
+                   webrtc::CodecInst& audioCodec);
+
+int ViEAutoTest::ViECustomCall()
+{
+  ViETest::Log(" ");
+  ViETest::Log("========================================");
+  ViETest::Log(" Enter values to use custom settings\n");
+
+  int error = 0;
+  int numberOfErrors = 0;
+  std::string str;
+
+  // Create the VoE and get the VoE interfaces.
+  webrtc::VoiceEngine* ptrVE = webrtc::VoiceEngine::Create();
+  numberOfErrors += ViETest::TestError(ptrVE != NULL, "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+
+  webrtc::VoEBase* ptrVEBase = webrtc::VoEBase::GetInterface(ptrVE);
+  numberOfErrors += ViETest::TestError(ptrVEBase != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  error = ptrVEBase->Init();
+  numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+
+  webrtc::VoECodec* ptrVECodec = webrtc::VoECodec::GetInterface(ptrVE);
+  numberOfErrors += ViETest::TestError(ptrVECodec != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::VoEHardware* ptrVEHardware =
+      webrtc::VoEHardware::GetInterface(ptrVE);
+  numberOfErrors += ViETest::TestError(ptrVEHardware != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::VoEAudioProcessing* ptrVEAPM =
+      webrtc::VoEAudioProcessing::GetInterface(ptrVE);
+  numberOfErrors += ViETest::TestError(ptrVEAPM != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  // Create the ViE and get the ViE Interfaces.
+  webrtc::VideoEngine* ptrViE = webrtc::VideoEngine::Create();
+  numberOfErrors += ViETest::TestError(ptrViE != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViEBase != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  error = ptrViEBase->Init();
+  numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+
+  webrtc::ViECapture* ptrViECapture =
+    webrtc::ViECapture::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViECapture != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViERender != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViECodec != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::ViENetwork* ptrViENetwork =
+    webrtc::ViENetwork::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViENetwork != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  webrtc::ViEFile* ptrViEFile = webrtc::ViEFile::GetInterface(ptrViE);
+  numberOfErrors += ViETest::TestError(ptrViEFile != NULL,
+                                       "ERROR: %s at line %d", __FUNCTION__,
+                                       __LINE__);
+
+  bool startCall = false;
+  const unsigned int kMaxIPLength = 16;
+  char ipAddress[kMaxIPLength] = "";
+  const unsigned int KMaxUniqueIdLength = 256;
+  char uniqueId[KMaxUniqueIdLength] = "";
+  char deviceName[KMaxUniqueIdLength] = "";
+  int videoTxPort = 0;
+  int videoRxPort = 0;
+  int videoChannel = -1;
+  webrtc::VideoCodec videoSendCodec;
+  char audioCaptureDeviceName[KMaxUniqueIdLength] = "";
+  char audioPlaybackDeviceName[KMaxUniqueIdLength] = "";
+  int audioCaptureDeviceIndex = -1;
+  int audioPlaybackDeviceIndex = -1;
+  int audioTxPort = 0;
+  int audioRxPort = 0;
+  webrtc::CodecInst audioCodec;
+  int audioChannel = -1;
+  bool isImageScaleEnabled = false;
+  int protectionMethod = DEFAULT_VIDEO_PROTECTION_METHOD;
+  bool remb = true;
+
+  while (!startCall) {
+    // Get the IP address to use from call.
+    memset(ipAddress, 0, kMaxIPLength);
+    GetIPAddress(ipAddress);
+
+    // Get the video device to use for call.
+    memset(deviceName, 0, KMaxUniqueIdLength);
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+    GetVideoDevice(ptrViEBase, ptrViECapture, deviceName, uniqueId);
+
+    // Get and set the video ports for the call.
+    videoTxPort = 0;
+    videoRxPort = 0;
+    SetVideoPorts(&videoTxPort, &videoRxPort);
+
+    // Get and set the video codec parameters for the call.
+    memset((void*)&videoSendCodec, 0, sizeof(webrtc::VideoCodec));
+    SetVideoCodecType(ptrViECodec, videoSendCodec);
+    SetVideoCodecSize(ptrViECodec, videoSendCodec);
+    SetVideoCodecBitrate(ptrViECodec, videoSendCodec);
+    SetVideoCodecMinBitrate(ptrViECodec, videoSendCodec);
+    SetVideoCodecMaxBitrate(ptrViECodec, videoSendCodec);
+    SetVideoCodecMaxFramerate(ptrViECodec, videoSendCodec);
+    SetVideoCodecTemporalLayer(videoSendCodec);
+    remb = GetBitrateSignaling();
+
+    // Get the video protection method for the call.
+    protectionMethod = GetVideoProtection();
+
+    // Get the audio device for the call.
+    memset(audioCaptureDeviceName, 0, KMaxUniqueIdLength);
+    memset(audioPlaybackDeviceName, 0, KMaxUniqueIdLength);
+    GetAudioDevices(ptrVEBase, ptrVEHardware, audioCaptureDeviceName,
+                    audioCaptureDeviceIndex, audioPlaybackDeviceName,
+                    audioPlaybackDeviceIndex);
+
+    // Get the audio port for the call.
+    audioTxPort = 0;
+    audioRxPort = 0;
+    GetAudioPorts(&audioTxPort, &audioRxPort);
+
+    // Get the audio codec for the call.
+    memset((void*)&audioCodec, 0, sizeof(audioCodec));
+    GetAudioCodec(ptrVECodec, audioCodec);
+
+    // Now ready to start the call.  Check user wants to continue.
+    PrintCallInformation(ipAddress, deviceName, uniqueId, videoSendCodec,
+                         videoTxPort, videoRxPort, audioCaptureDeviceName,
+                         audioPlaybackDeviceName, audioCodec, audioTxPort,
+                         audioRxPort, protectionMethod);
+
+    std::cout << std::endl;
+    std::cout << "1. Start the call" << std::endl;
+    std::cout << "2. Reconfigure call settings" << std::endl;
+    std::cout << "What do you want to do? Press enter for default "
+              << "(Start the call): ";
+
+    std::getline(std::cin, str);
+    int selection = 0;
+    selection = atoi(str.c_str());
+
+    switch (selection) {
+      case 0:
+        startCall = true;
+        break;
+      case 1:
+        startCall = true;
+        break;
+      case 2:
+        startCall = false;
+        break;
+      default:
+        // Invalid selection gets error mesage.
+        std::cout << "ERROR: Code=" << error
+                  << " Invalid selection" << std::endl;
+        continue;
+    }
+  }
+  //***************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing.
+  //***************************************************************
+  if (startCall == true) {
+    // Configure audio channel first.
+    audioChannel = ptrVEBase->CreateChannel();
+    error = ptrVEBase->SetSendDestination(audioChannel, audioTxPort,
+                                          ipAddress);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEBase->SetLocalReceiver(audioChannel, audioRxPort);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEHardware->SetRecordingDevice(audioCaptureDeviceIndex);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEHardware->SetPlayoutDevice(audioPlaybackDeviceIndex);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVECodec->SetSendCodec(audioChannel, audioCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEAPM->SetAgcStatus(true, webrtc::kAgcDefault);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEAPM->SetNsStatus(true, webrtc::kNsHighSuppression);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    // Now configure the video channel.
+    error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViECustomCall_trace.txt";
+    error = ptrViE->SetTraceFile(trace_file.c_str());
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->SetVoiceEngine(ptrVE);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->CreateChannel(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+                                                 KMaxUniqueIdLength,
+                                                 captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECapture->StartCapture(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+    webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+    numberOfErrors += ViETest::TestError(ptrViE != NULL,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                         webrtc::kRtcpCompound_RFC4585);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(videoChannel,
+        webrtc::kViEKeyFrameRequestPliRtcp);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    if (remb) {
+      error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
+      numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    } else  {
+      error = ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true);
+      numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    }
+
+    error = ptrViERender->AddRenderer(captureId, _window1, 0, 0.0, 0.0,
+                                      1.0, 1.0);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->AddRenderer(videoChannel, _window2, 1, 0.0, 0.0,
+                                      1.0, 1.0);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    error = ptrViENetwork->SetSendDestination(videoChannel, ipAddress,
+                                              videoTxPort);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViENetwork->SetLocalReceiver(videoChannel, videoRxPort);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECodec->SetSendCodec(videoChannel, videoSendCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECodec->SetReceiveCodec(videoChannel, videoSendCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    // Set the Video Protection before start send and receive.
+    SetVideoProtection(ptrViECodec, ptrViERtpRtcp,
+                       videoChannel, protectionMethod);
+
+    // Start Voice Playout and Receive.
+    error = ptrVEBase->StartReceive(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEBase->StartPlayout(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEBase->StartSend(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    // Now start the Video Send & Receive.
+    error = ptrViEBase->StartSend(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->StartReceive(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->StartRender(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->StartRender(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    ViEAutotestFileObserver fileObserver;
+    int fileId;
+
+    ViEAutotestEncoderObserver* codecEncoderObserver = NULL;
+    ViEAutotestDecoderObserver* codecDecoderObserver = NULL;
+
+    //***************************************************************
+    //  Engine ready. Wait for input
+    //***************************************************************
+
+    // Call started.
+    std::cout << std::endl;
+    std::cout << "Custom call started" << std::endl;
+    std::cout << std::endl << std::endl;
+
+    // Modify call or stop call.
+
+    std::cout << "Custom call in progress, would you like do?" << std::endl;
+    std::cout << "  0. Stop the call" << std::endl;
+    std::cout << "  1. Modify the call" << std::endl;
+    std::cout << "What do you want to do? "
+              << "Press enter for default (Stop the call): ";
+
+    std::getline(std::cin, str);
+    int selection = 0;
+    selection = atoi(str.c_str());
+
+    // Keep on modifying the call until user selects finish modify call.
+    bool modify_call = false;
+
+    while (selection == 1) {
+      std::cout << "Modify Custom Call" << std::endl;
+      std::cout << "  0. Finished modifying custom call" << std::endl;
+      std::cout << "  1. Change Video Send Codec" << std::endl;
+      std::cout << "  2. Change Video Send Size by Common Resolutions"
+                << std::endl;
+      std::cout << "  3. Change Video Send Size by Width & Height" << std::endl;
+      std::cout << "  4. Change Video Capture Device" << std::endl;
+      std::cout << "  5. Record Incoming Call" << std::endl;
+      std::cout << "  6. Record Outgoing Call" << std::endl;
+      std::cout << "  7. Play File on Video Channel"
+                << "(Assumes you recorded incoming & outgoing call)"
+                << std::endl;
+      std::cout << "  8. Change Video Protection Method" << std::endl;
+      std::cout << "  9. Toggle Encoder Observer" << std::endl;
+      std::cout << " 10. Toggle Decoder Observer" << std::endl;
+      std::cout << " 11. Print Call Information" << std::endl;
+      std::cout << " 12. Print Call Statistics" << std::endl;
+      std::cout << " 13. Toggle Image Scaling "
+                << "(Warning high CPU usage when enabled)"
+                << std::endl;
+      std::cout << "What do you want to do? ";
+      std::cout << "Press enter for default "
+                << "(Finished modifying custom call): ";
+
+      std::getline(std::cin, str);
+      int modify_selection = 0;
+      int file_selection = 0;
+
+      modify_selection = atoi(str.c_str());
+
+      switch (modify_selection) {
+        case 0:
+          std::cout << "Finished modifying custom call." << std::endl;
+          modify_call = false;
+          break;
+        case 1:
+          // Change video Codec
+          SetVideoCodecType(ptrViECodec, videoSendCodec);
+          SetVideoCodecSize(ptrViECodec, videoSendCodec);
+          SetVideoCodecBitrate(ptrViECodec, videoSendCodec);
+          SetVideoCodecMinBitrate(ptrViECodec, videoSendCodec);
+          SetVideoCodecMaxBitrate(ptrViECodec, videoSendCodec);
+          SetVideoCodecMaxFramerate(ptrViECodec, videoSendCodec);
+          SetVideoCodecTemporalLayer(videoSendCodec);
+          PrintCallInformation(ipAddress, deviceName,
+                               uniqueId, videoSendCodec,
+                               videoTxPort, videoRxPort,
+                               audioCaptureDeviceName,
+                               audioPlaybackDeviceName, audioCodec,
+                               audioTxPort, audioRxPort, protectionMethod);
+          error = ptrViECodec->SetSendCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECodec->SetReceiveCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 2:
+          // Change Video codec size by common resolution
+          SetVideoCodecResolution(ptrViECodec, videoSendCodec);
+          PrintCallInformation(ipAddress, deviceName,
+                               uniqueId, videoSendCodec,
+                               videoTxPort, videoRxPort,
+                               audioCaptureDeviceName,
+                               audioPlaybackDeviceName, audioCodec,
+                               audioTxPort, audioRxPort, protectionMethod);
+          error = ptrViECodec->SetSendCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECodec->SetReceiveCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 3:
+          // Change Video codec by size height and width
+          SetVideoCodecSize(ptrViECodec, videoSendCodec);
+          PrintCallInformation(ipAddress, deviceName,
+                               uniqueId, videoSendCodec,
+                               videoTxPort, videoRxPort,
+                               audioCaptureDeviceName,
+                               audioPlaybackDeviceName, audioCodec,
+                               audioTxPort, audioRxPort, protectionMethod);
+          error = ptrViECodec->SetSendCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECodec->SetReceiveCodec(videoChannel, videoSendCodec);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 4:
+          error = ptrViERender->StopRender(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViERender->RemoveRenderer(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->StopCapture(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->ReleaseCaptureDevice(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          memset(deviceName, 0, KMaxUniqueIdLength);
+          memset(uniqueId, 0, KMaxUniqueIdLength);
+          GetVideoDevice(ptrViEBase, ptrViECapture, deviceName, uniqueId);
+          captureId = 0;
+          error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+                                                       KMaxUniqueIdLength,
+                                                       captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->ConnectCaptureDevice(captureId,
+                                                      videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->StartCapture(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViERender->AddRenderer(
+              captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          error = ptrViERender->StartRender(captureId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR: %s at line %d",
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 5:
+          // Record the incoming call
+          std::cout << "Start Recording Incoming Video "
+                    << DEFAULT_INCOMING_FILE_NAME <<  std::endl;
+          error = ptrViEFile->StartRecordIncomingVideo(
+              videoChannel, DEFAULT_INCOMING_FILE_NAME,
+              webrtc::NO_AUDIO, audioCodec, videoSendCodec);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          error = ptrViEFile->StopRecordIncomingVideo(videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 6:
+          // Record the outgoing call
+          std::cout << "Start Recording Outgoing Video "
+                    << DEFAULT_OUTGOING_FILE_NAME <<  std::endl;
+          error = ptrViEFile->StartRecordOutgoingVideo(
+              videoChannel, DEFAULT_OUTGOING_FILE_NAME,
+              webrtc::NO_AUDIO, audioCodec, videoSendCodec);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          error = ptrViEFile->StopRecordOutgoingVideo(videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 7:
+          // Send the file on the videoChannel
+          file_selection = 0;
+          std::cout << "Available files to play" << std::endl;
+          std::cout << "  0. " << DEFAULT_INCOMING_FILE_NAME <<  std::endl;
+          std::cout << "  1. " << DEFAULT_OUTGOING_FILE_NAME <<  std::endl;
+          std::cout << "Press enter for default ("
+                    << DEFAULT_INCOMING_FILE_NAME << "): ";
+          std::getline(std::cin, str);
+          file_selection = atoi(str.c_str());
+          // Disconnect the camera first
+          error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          if (file_selection == 1)
+            error = ptrViEFile->StartPlayFile(DEFAULT_OUTGOING_FILE_NAME,
+                                              fileId, true);
+          else
+            error = ptrViEFile->StartPlayFile(DEFAULT_INCOMING_FILE_NAME,
+                                              fileId, true);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                              __FUNCTION__, __LINE__);
+          ViETest::Log("Registering file observer");
+          error = ptrViEFile->RegisterObserver(fileId, fileObserver);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          std::cout << std::endl;
+          std::cout << "Start sending the file that is played in a loop "
+                    << std::endl;
+          error = ptrViEFile->SendFileOnChannel(fileId, videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          ViETest::Log("Stopped sending video on channel");
+          error = ptrViEFile->StopSendFileOnChannel(videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          ViETest::Log("Stop playing the file.");
+          error = ptrViEFile->StopPlayFile(fileId);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          error = ptrViECapture->ConnectCaptureDevice(captureId,
+                                                      videoChannel);
+          numberOfErrors += ViETest::TestError(error == 0,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          error = ptrViEFile->DeregisterObserver(fileId, fileObserver);
+          numberOfErrors += ViETest::TestError(error == -1,
+                                               "ERROR:%d %s at line %d",
+                                               ptrViEBase->LastError(),
+                                               __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 8:
+          // Change the Video Protection
+          protectionMethod = GetVideoProtection();
+          SetVideoProtection(ptrViECodec, ptrViERtpRtcp,
+                             videoChannel, protectionMethod);
+          modify_call = true;
+          break;
+        case 9:
+          // Toggle Encoder Observer
+          if (!codecEncoderObserver) {
+            std::cout << "Registering Encoder Observer" << std::endl;
+            codecEncoderObserver = new ViEAutotestEncoderObserver();
+            error = ptrViECodec->RegisterEncoderObserver(videoChannel,
+                *codecEncoderObserver);
+            numberOfErrors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          } else {
+            std::cout << "Deregistering Encoder Observer" << std::endl;
+            error = ptrViECodec->DeregisterEncoderObserver(videoChannel);
+            delete codecEncoderObserver;
+            codecEncoderObserver = NULL;
+            numberOfErrors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          }
+          modify_call = true;
+          break;
+        case 10:
+          // Toggle Decoder Observer
+          if (!codecDecoderObserver) {
+            std::cout << "Registering Decoder Observer" << std::endl;
+            codecDecoderObserver = new ViEAutotestDecoderObserver();
+            error = ptrViECodec->RegisterDecoderObserver(videoChannel,
+                *codecDecoderObserver);
+            numberOfErrors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          } else {
+            std::cout << "Deregistering Decoder Observer" << std::endl;
+            error = ptrViECodec->DeregisterDecoderObserver(videoChannel);
+            delete codecDecoderObserver;
+            codecDecoderObserver = NULL;
+            numberOfErrors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          }
+          modify_call = true;
+          break;
+        case 11:
+          // Print Call information
+          PrintCallInformation(ipAddress, deviceName,
+                               uniqueId, videoSendCodec,
+                               videoTxPort, videoRxPort,
+                               audioCaptureDeviceName,
+                               audioPlaybackDeviceName,
+                               audioCodec, audioTxPort,
+                               audioRxPort, protectionMethod);
+          PrintVideoStreamInformation(ptrViECodec,
+                                      videoChannel);
+          modify_call = true;
+          break;
+        case 12:
+          // Print Call statistics
+          PrintRTCCPStatistics(ptrViERtpRtcp, videoChannel,
+                               kSendStatistic);
+          PrintRTCCPStatistics(ptrViERtpRtcp, videoChannel,
+                               kReceivedStatistic);
+          PrintRTPStatistics(ptrViERtpRtcp, videoChannel);
+          PrintBandwidthUsage(ptrViERtpRtcp, videoChannel);
+          PrintCodecStatistics(ptrViECodec, videoChannel,
+                               kSendStatistic);
+          PrintCodecStatistics(ptrViECodec, videoChannel,
+                               kReceivedStatistic);
+          PrintGetDiscardedPackets(ptrViECodec, videoChannel);
+          modify_call = true;
+          break;
+        case 13:
+          isImageScaleEnabled = !isImageScaleEnabled;
+          ptrViECodec->SetImageScaleStatus(videoChannel, isImageScaleEnabled);
+          if (isImageScaleEnabled) {
+            std::cout << "Image Scale is now enabled" << std::endl;
+          } else {
+            std::cout << "Image Scale is now disabled" << std::endl;
+          }
+          modify_call = true;
+          break;
+        default:
+          // invalid selection, shows options menu again
+          std::cout << "Invalid selection. Select Again." << std::endl;
+          break;
+      }
+      // modify_call is false if user does not select one of the
+      // modify options
+      if (modify_call == false) {
+        selection = 0;
+      }
+    }
+    // Stop the Call
+    std::cout << "Press enter to stop...";
+    std::getline(std::cin, str);
+    //***************************************************************
+    //  Testing finished. Tear down Voice and Video Engine.
+    //***************************************************************
+    // Tear down the VE first.
+    error = ptrVEBase->StopReceive(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEBase->StopPlayout(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrVEBase->DeleteChannel(audioChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    // Now tear down the ViE engine.
+    error = ptrViEBase->DisconnectAudioChannel(videoChannel);
+
+    // If Encoder/Decoder Observer is running, delete them.
+    if (codecEncoderObserver) {
+      error = ptrViECodec->DeregisterEncoderObserver(videoChannel);
+      delete codecEncoderObserver;
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                            __FUNCTION__, __LINE__);
+    }
+    if (codecDecoderObserver) {
+      error = ptrViECodec->DeregisterDecoderObserver(videoChannel);
+      delete codecDecoderObserver;
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                            __FUNCTION__, __LINE__);
+    }
+
+    error = ptrViEBase->StopReceive(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->StopSend(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->StopRender(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+    error = ptrViERender->StopRender(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->RemoveRenderer(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECapture->StopCapture(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViEFile->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    remainingInterfaces = ptrViECodec->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    remainingInterfaces = ptrViECapture->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    remainingInterfaces = ptrViERtpRtcp->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    remainingInterfaces = ptrViERender->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    remainingInterfaces = ptrViENetwork->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    remainingInterfaces = ptrViEBase->Release();
+    numberOfErrors += ViETest::TestError(remainingInterfaces == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+    numberOfErrors += ViETest::TestError(deleted == true,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Custom Call Started");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+  }
+  return numberOfErrors;
+}
+
+bool GetVideoDevice(webrtc::ViEBase* ptrViEBase,
+                    webrtc::ViECapture* ptrViECapture,
+                    char* captureDeviceName,
+                    char* captureDeviceUniqueId) {
+  int error = 0;
+  int numberOfErrors = 0;
+  int captureDeviceIndex = 0;
+  std::string str;
+
+  const unsigned int KMaxDeviceNameLength = 128;
+  const unsigned int KMaxUniqueIdLength = 256;
+  char deviceName[KMaxDeviceNameLength];
+  char uniqueId[KMaxUniqueIdLength];
+
+  while (1) {
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    std::cout << std::endl;
+    std::cout << "Available video capture devices:" << std::endl;
+    int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++) {
+      memset(deviceName, 0, KMaxDeviceNameLength);
+      memset(uniqueId, 0, KMaxUniqueIdLength);
+
+      error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                              KMaxDeviceNameLength,
+                                              uniqueId,
+                                              KMaxUniqueIdLength);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      std::cout << "   " << captureIdx+1 << ". " << deviceName
+                << "/" << uniqueId
+                << std::endl;
+    }
+    //  Get the devName of the default (or first) camera for display.
+    error = ptrViECapture->GetCaptureDevice(0, deviceName,
+                                            KMaxDeviceNameLength,
+                                            uniqueId,
+                                            KMaxUniqueIdLength);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+    std::cout << "Choose a video capture device. Press enter for default ("
+              << deviceName << "/" << uniqueId << "): ";
+    std::getline(std::cin, str);
+    captureDeviceIndex = atoi(str.c_str());
+
+    if (captureDeviceIndex == 0) {
+      // Use the default (or first) camera.
+      error = ptrViECapture->GetCaptureDevice(0, deviceName,
+                                              KMaxDeviceNameLength,
+                                              uniqueId,
+                                              KMaxUniqueIdLength);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      strcpy(captureDeviceUniqueId, uniqueId);
+      strcpy(captureDeviceName, deviceName);
+      return true;
+    } else if (captureDeviceIndex < 0
+               || (captureDeviceIndex >
+               (int)ptrViECapture->NumberOfCaptureDevices())) {
+      // invalid selection
+      continue;
+    } else {
+      error = ptrViECapture->GetCaptureDevice(captureDeviceIndex - 1,
+                                              deviceName,
+                                              KMaxDeviceNameLength,
+                                              uniqueId,
+                                              KMaxUniqueIdLength);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      strcpy(captureDeviceUniqueId, uniqueId);
+      strcpy(captureDeviceName, deviceName);
+      return true;
+    }
+  }
+}
+
+bool GetAudioDevices(webrtc::VoEBase* ptrVEBase,
+                     webrtc::VoEHardware* ptrVEHardware,
+                     char* recordingDeviceName,
+                     int& recordingDeviceIndex,
+                     char* playbackDeviceName,
+                     int& playbackDeviceIndex) {
+  int error = 0;
+  int numberOfErrors = 0;
+  std::string str;
+
+  const unsigned int KMaxDeviceNameLength = 128;
+  const unsigned int KMaxUniqueIdLength = 128;
+  char recordingDeviceUniqueName[KMaxDeviceNameLength];
+  char playbackDeviceUniqueName[KMaxUniqueIdLength];
+
+  int numberOfRecordingDevices = -1;
+  error = ptrVEHardware->GetNumOfRecordingDevices(numberOfRecordingDevices);
+  numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+
+  while (1) {
+    recordingDeviceIndex = -1;
+    std::cout << std::endl;
+    std::cout << "Available audio capture devices:" << std::endl;
+    int captureIdx = 0;
+
+    for (captureIdx = 0; captureIdx < numberOfRecordingDevices;
+         captureIdx++) {
+      memset(recordingDeviceName, 0, KMaxDeviceNameLength);
+      memset(recordingDeviceUniqueName, 0, KMaxDeviceNameLength);
+      error = ptrVEHardware->GetRecordingDeviceName(
+          captureIdx, recordingDeviceName, recordingDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      std::cout << "   " << captureIdx+1 << ". " << recordingDeviceName
+                << std::endl;
+    }
+
+    std::cout << "Choose an audio capture device. Press enter for default("
+              << recordingDeviceName << "): ";
+    std::getline(std::cin, str);
+    int captureDeviceIndex = atoi(str.c_str());
+
+    if (captureDeviceIndex == 0) {
+      // Use the default (or first) recording device.
+      recordingDeviceIndex = 0;
+      error = ptrVEHardware->GetRecordingDeviceName(
+          recordingDeviceIndex, recordingDeviceName,
+          recordingDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+    } else if (captureDeviceIndex < 0
+               || captureDeviceIndex > numberOfRecordingDevices) {
+      // invalid selection
+      continue;
+    } else {
+      recordingDeviceIndex = captureDeviceIndex - 1;
+      error = ptrVEHardware->GetRecordingDeviceName(
+          recordingDeviceIndex, recordingDeviceName,
+          recordingDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+    }
+  }
+
+  int numberOfPlaybackDevices = -1;
+  error = ptrVEHardware->GetNumOfPlayoutDevices(numberOfPlaybackDevices);
+  numberOfErrors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+
+  while (1) {
+    playbackDeviceIndex = -1;
+    std::cout << std::endl;
+    std::cout << "Available audio playout devices:" << std::endl;
+    int captureIdx = 0;
+
+    for (captureIdx = 0; captureIdx < numberOfPlaybackDevices;
+         captureIdx++) {
+      memset(playbackDeviceName, 0, KMaxDeviceNameLength);
+      memset(playbackDeviceUniqueName, 0, KMaxDeviceNameLength);
+      error = ptrVEHardware->GetPlayoutDeviceName(
+          captureIdx, playbackDeviceName, playbackDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      std::cout << "   " << captureIdx+1 << ". " << playbackDeviceName
+                << std::endl;
+    }
+
+    std::cout <<
+        "Choose an audio playback device. Press enter for default ("
+        << playbackDeviceName << "): ";
+    std::getline(std::cin, str);
+    int captureDeviceIndex = atoi(str.c_str());
+
+    if (captureDeviceIndex == 0) {
+      // Use the default (or first) playout device.
+      playbackDeviceIndex = 0;
+      error = ptrVEHardware->GetPlayoutDeviceName(
+          playbackDeviceIndex, playbackDeviceName,
+          playbackDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      return true;
+    } else if (captureDeviceIndex < 0
+               || captureDeviceIndex > numberOfPlaybackDevices) {
+      // invalid selection
+      continue;
+    } else {
+      playbackDeviceIndex = captureDeviceIndex - 1;
+      error = ptrVEHardware->GetPlayoutDeviceName(
+          playbackDeviceIndex, playbackDeviceName,
+          playbackDeviceUniqueName);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      return true;
+    }
+  }
+}
+
+// General helper functions.
+
+bool GetIPAddress(char* iIP) {
+  char oIP[16] = DEFAULT_SEND_IP;
+  std::string str;
+
+  while (1) {
+    std::cout << std::endl;
+    std::cout << "Enter destination IP. Press enter for default ("
+              << oIP << "): ";
+    std::getline(std::cin, str);
+
+    if (str.compare("") == 0) {
+      // use default value;
+      strcpy(iIP, oIP);
+      return true;
+    }
+    if (ValidateIP(str) == false) {
+      std::cout << "Invalid entry. Try again." << std::endl;
+      continue;
+    }
+    // done. Copy std::string to c_string and return
+    strcpy(iIP, str.c_str());
+    return true;
+  }
+  assert(false);
+  return false;
+}
+
+bool ValidateIP(std::string iStr) {
+  if (0 == iStr.compare("")) {
+        return false;
+  }
+  return true;
+}
+
+// Video settings functions.
+
+bool SetVideoPorts(int* txPort, int* rxPort) {
+  std::string str;
+  int port = 0;
+
+  // set to default values
+  *txPort = DEFAULT_VIDEO_PORT;
+  *rxPort = DEFAULT_VIDEO_PORT;
+
+  while (1) {
+    std::cout << "Enter video send port. Press enter for default ("
+              << *txPort << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // default value
+      break;
+    } else {
+      // user selection
+      if (port <= 0 || port > 63556) {
+        // invalid selection
+        continue;
+      } else {
+        *txPort = port;
+        break; // move on to rxport
+      }
+    }
+  }
+
+  while (1) {
+    std::cout << "Enter video receive port. Press enter for default ("
+              << *rxPort << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // default value
+      return true;
+    } else {
+      // user selection
+      if (port <= 0 || port > 63556) {
+        // invalid selection
+        continue;
+      } else {
+        *rxPort = port;
+        return true;
+      }
+    }
+  }
+  assert(false);
+  return false;
+}
+
+// Audio settings functions.
+
+bool GetAudioPorts(int* txPort, int* rxPort) {
+  int port = 0;
+  std::string str;
+
+  // set to default values
+  *txPort = DEFAULT_AUDIO_PORT;
+  *rxPort = DEFAULT_AUDIO_PORT;
+
+  while (1) {
+    std::cout << "Enter audio send port. Press enter for default ("
+              << *txPort << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // default value
+      break;
+    } else {
+       // user selection
+       if (port <= 0 || port > 63556) {
+         // invalid selection
+         continue;
+       } else {
+         *txPort = port;
+         break; // move on to rxport
+       }
+     }
+  }
+
+  while (1) {
+    std::cout << "Enter audio receive port. Press enter for default ("
+              << *rxPort << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // default value
+      return true;
+    } else {
+      // user selection
+      if (port <= 0 || port > 63556) {
+        // invalid selection
+        continue;
+      } else {
+        *rxPort = port;
+        return true;
+      }
+    }
+  }
+  assert(false);
+  return false;
+}
+
+bool GetAudioCodec(webrtc::VoECodec* ptrVeCodec,
+                   webrtc::CodecInst& audioCodec) {
+  int error = 0;
+  int numberOfErrors = 0;
+  int codecSelection = 0;
+  std::string str;
+  memset(&audioCodec, 0, sizeof(webrtc::CodecInst));
+
+  while (1) {
+    std::cout << std::endl;
+    std::cout << "Available audio codecs:" << std::endl;
+    int codecIdx = 0;
+    int defaultCodecIdx = 0;
+    for (codecIdx = 0; codecIdx < ptrVeCodec->NumOfCodecs(); codecIdx++) {
+      error = ptrVeCodec->GetCodec(codecIdx, audioCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+      // test for default codec index
+      if (strcmp(audioCodec.plname, DEFAULT_AUDIO_CODEC) == 0) {
+        defaultCodecIdx = codecIdx;
+      }
+      std::cout << "   " << codecIdx+1 << ". " << audioCodec.plname
+                << std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose audio codec. Press enter for default ("
+              << DEFAULT_AUDIO_CODEC << "):  ";
+    std::getline(std::cin, str);
+    codecSelection = atoi(str.c_str());
+
+    if (codecSelection == 0) {
+      // use default
+      error = ptrVeCodec->GetCodec(defaultCodecIdx, audioCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      return true;
+    } else {
+      // user selection
+      codecSelection = atoi(str.c_str())-1;
+      error = ptrVeCodec->GetCodec(codecSelection, audioCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      if (error != 0) {
+        std::cout << "ERROR: Code = " << error << " Invalid selection"
+                  << std::endl;
+        continue;
+      }
+      return true;
+    }
+  }
+  assert(false);
+  return false;
+}
+
+void PrintCallInformation(char* IP, char* videoCaptureDeviceName,
+                          char* videoCaptureUniqueId,
+                          webrtc::VideoCodec videoCodec,
+                          int videoTxPort, int videoRxPort,
+                          char* audioCaptureDeviceName,
+                          char* audioPlaybackDeviceName,
+                          webrtc::CodecInst audioCodec,
+                          int audioTxPort, int audioRxPort,
+                          int protectionMethod) {
+  std::string str;
+
+  std::cout << "************************************************"
+            << std::endl;
+  std::cout << "The call has the following settings: " << std::endl;
+  std::cout << "\tIP: " << IP << std::endl;
+  std::cout << "\tVideo Capture Device: " << videoCaptureDeviceName
+            << std::endl;
+  std::cout << "\t\tName: " << videoCaptureDeviceName << std::endl;
+  std::cout << "\t\tUniqueId: " << videoCaptureUniqueId << std::endl;
+  PrintVideoCodec(videoCodec);
+  std::cout << "\t Video Tx Port: " << videoTxPort << std::endl;
+  std::cout << "\t Video Rx Port: " << videoRxPort << std::endl;
+  std::cout << "\t Video Protection Method: " << protectionMethod
+            << std::endl;
+  std::cout << "\tAudio Capture Device: " << audioCaptureDeviceName
+            << std::endl;
+  std::cout << "\tAudio Playback Device: " << audioPlaybackDeviceName
+            << std::endl;
+  std::cout << "\tAudio Codec: " << std::endl;
+  std::cout << "\t\tplname: " << audioCodec.plname << std::endl;
+  std::cout << "\t\tpltype: " << (int)audioCodec.pltype << std::endl;
+  std::cout << "\t Audio Tx Port: " << audioTxPort << std::endl;
+  std::cout << "\t Audio Rx Port: " << audioRxPort << std::endl;
+  std::cout << "************************************************"
+            << std::endl;
+}
+
+bool SetVideoCodecType(webrtc::ViECodec* ptrViECodec,
+                       webrtc::VideoCodec& videoCodec) {
+  int error = 0;
+  int numberOfErrors = 0;
+  int codecSelection = 0;
+  std::string str;
+  memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+
+  bool exitLoop=false;
+  while (!exitLoop) {
+    std::cout << std::endl;
+    std::cout << "Available video codecs:" << std::endl;
+    int codecIdx = 0;
+    int defaultCodecIdx = 0;
+    // Print out all the codecs available to set Codec to.
+    for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++) {
+      error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                            __FUNCTION__, __LINE__);
+      // test for default codec index
+      if (strcmp(videoCodec.plName, DEFAULT_VIDEO_CODEC) == 0) {
+        defaultCodecIdx = codecIdx;
+      }
+      std::cout << "   " << codecIdx+1 << ". " << videoCodec.plName
+                << std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose video codec. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC << "):  ";
+    std::getline(std::cin, str);
+    codecSelection = atoi(str.c_str());
+    if (codecSelection == 0) {
+      // use default
+      error = ptrViECodec->GetCodec(defaultCodecIdx, videoCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                            __FUNCTION__, __LINE__);
+      exitLoop=true;
+    } else {
+    // user selection
+      codecSelection = atoi(str.c_str())-1;
+      error = ptrViECodec->GetCodec(codecSelection, videoCodec);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                            __FUNCTION__, __LINE__);
+      if (error != 0) {
+        std::cout << "ERROR: Code=" << error << " Invalid selection"
+                  << std::endl;
+        continue;
+      }
+      exitLoop=true;
+    }
+  }
+  if (videoCodec.codecType == webrtc::kVideoCodecI420) {
+    videoCodec.width = 176;
+    videoCodec.height = 144;
+  }
+  return true;
+}
+
+bool SetVideoCodecResolution(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec) {
+  std::string str;
+  int sizeOption = 5;
+
+  if (videoCodec.codecType == webrtc::kVideoCodecVP8) {
+    std::cout << std::endl;
+    std::cout << "Available Common Resolutions : " << std::endl;
+    std::cout << "  1. SQCIF (128X96) " << std::endl;
+    std::cout << "  2. QQVGA (160X120) " << std::endl;
+    std::cout << "  3. QCIF (176X144) " << std::endl;
+    std::cout << "  4. CIF  (352X288) " << std::endl;
+    std::cout << "  5. VGA  (640X480) " << std::endl;
+    std::cout << "  6. WVGA (800x480) " << std::endl;
+    std::cout << "  7. 4CIF (704X576) " << std::endl;
+    std::cout << "  8. SVGA (800X600) " << std::endl;
+    std::cout << "  9. HD   (1280X720) " << std::endl;
+    std::cout << " 10. XGA  (1024x768) " << std::endl;
+    std::cout << "Enter frame size option: " << std::endl;
+
+    std::getline(std::cin, str);
+    sizeOption = atoi(str.c_str());
+
+    switch (sizeOption) {
+      case 1:
+        videoCodec.width = 128;
+        videoCodec.height = 96;
+        break;
+      case 2:
+        videoCodec.width = 160;
+        videoCodec.height = 120;
+        break;
+      case 3:
+        videoCodec.width = 176;
+        videoCodec.height = 144;
+        break;
+      case 4:
+        videoCodec.width = 352;
+        videoCodec.height = 288;
+        break;
+      case 5:
+        videoCodec.width = 640;
+        videoCodec.height = 480;
+        break;
+      case 6:
+        videoCodec.width = 800;
+        videoCodec.height = 480;
+        break;
+      case 7:
+        videoCodec.width = 704;
+        videoCodec.height = 576;
+        break;
+      case 8:
+        videoCodec.width = 800;
+        videoCodec.height = 600;
+        break;
+      case 9:
+        videoCodec.width = 1280;
+        videoCodec.height = 720;
+        break;
+      case 10:
+        videoCodec.width = 1024;
+        videoCodec.height = 768;
+        break;
+    }
+  } else {
+    std::cout << "Can Only change codec size if it's VP8" << std::endl;
+  }
+  return true;
+}
+
+bool SetVideoCodecSize(webrtc::ViECodec* ptrViECodec,
+                       webrtc::VideoCodec& videoCodec) {
+  if (videoCodec.codecType == webrtc::kVideoCodecVP8) {
+    std::string str;
+    videoCodec.width = DEFAULT_VIDEO_CODEC_WIDTH;
+    videoCodec.height = DEFAULT_VIDEO_CODEC_HEIGHT;
+    std::cout << "Choose video width. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC_WIDTH << "):  ";
+    std::getline(std::cin, str);
+    int sizeSelection = atoi(str.c_str());
+    if (sizeSelection!=0) {
+      videoCodec.width=sizeSelection;
+    }
+    std::cout << "Choose video height. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC_HEIGHT << "):  ";
+    std::getline(std::cin, str);
+    sizeSelection = atoi(str.c_str());
+    if (sizeSelection!=0) {
+      videoCodec.height=sizeSelection;
+    }
+  } else {
+    std::cout << "Can Only change codec size if it's VP8" << std::endl;
+  }
+  return true;
+}
+
+bool SetVideoCodecBitrate(webrtc::ViECodec* ptrViECodec,
+                          webrtc::VideoCodec& videoCodec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose start rate (in kbps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  int startRate = atoi(str.c_str());
+  videoCodec.startBitrate = DEFAULT_VIDEO_CODEC_BITRATE;
+  if (startRate != 0) {
+    videoCodec.startBitrate = startRate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMaxBitrate(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose max bitrate (in kbps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MAX_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  int maxRate = atoi(str.c_str());
+  videoCodec.maxBitrate = DEFAULT_VIDEO_CODEC_MAX_BITRATE;
+  if (maxRate != 0) {
+    videoCodec.maxBitrate = maxRate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMinBitrate(webrtc::ViECodec* ptrViECodec,
+                             webrtc::VideoCodec& videoCodec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose min bitrate (in fps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MIN_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  char minBitRate = atoi(str.c_str());
+  videoCodec.minBitrate = DEFAULT_VIDEO_CODEC_MIN_BITRATE;
+  if (minBitRate != 0) {
+    videoCodec.minBitrate = minBitRate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMaxFramerate(webrtc::ViECodec* ptrViECodec,
+                               webrtc::VideoCodec& videoCodec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose max framerate (in fps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MAX_FRAMERATE << "):  ";
+  std::getline(std::cin, str);
+  char maxFrameRate = atoi(str.c_str());
+  videoCodec.maxFramerate = DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
+  if (maxFrameRate != 0) {
+    videoCodec.maxFramerate = maxFrameRate;
+  }
+  return true;
+}
+
+bool SetVideoCodecTemporalLayer(webrtc::VideoCodec& videoCodec) {
+  if (videoCodec.codecType == webrtc::kVideoCodecVP8) {
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Choose number of temporal layers (1 to 4). "
+              << "Press enter for default (" 
+              << DEFAULT_TEMPORAL_LAYER << "):  ";
+    std::getline(std::cin, str);
+    char numTemporalLayers = atoi(str.c_str());
+    videoCodec.codecSpecific.VP8.numberOfTemporalLayers
+        = DEFAULT_TEMPORAL_LAYER;
+    if(numTemporalLayers != 0) {
+      videoCodec.codecSpecific.VP8.numberOfTemporalLayers
+          = numTemporalLayers;
+    }
+  }
+  return true;
+}
+// GetVideoProtection only prints the prompt to get a number
+// that SetVideoProtection method uses
+// 0 = None
+// 1 = FEC
+// 2 = NACK
+// 3 = NACK + FEC (aka Hybrid)
+// Default = DEFAULT_VIDEO_PROTECTION METHOD
+int GetVideoProtection() {
+  int protectionMethod = DEFAULT_VIDEO_PROTECTION_METHOD;
+
+  std::cout << "Available Video Protection Method." << std::endl;
+  std::cout << "  0. None" << std::endl;
+  std::cout << "  1. FEC" << std::endl;
+  std::cout << "  2. NACK" << std::endl;
+  std::cout << "  3. NACK+FEC" << std::endl;
+  std::cout << "Enter Video Protection Method. "
+            << "Press enter for default (" << protectionMethod << "):"
+            << std::endl;
+  std::string method;
+  std::getline(std::cin, method);
+  protectionMethod = atoi(method.c_str());
+
+  return protectionMethod;
+}
+
+bool SetVideoProtection(webrtc::ViECodec* ptrViECodec,
+                        webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                        int videoChannel, int protectionMethod) {
+  int error = 0;
+  int numberOfErrors = 0;
+  webrtc::VideoCodec videoCodec;
+
+  memset((void*)&videoCodec, 0, sizeof(webrtc::VideoCodec));
+
+  // Set all video protection to false initially
+  error = ptrViERtpRtcp->SetHybridNACKFECStatus(videoChannel, false,
+                                                VCM_RED_PAYLOAD_TYPE,
+                                                VCM_ULPFEC_PAYLOAD_TYPE);
+  numberOfErrors += ViETest::TestError(error == 0,
+                                       "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+  error = ptrViERtpRtcp->SetFECStatus(videoChannel, false,
+                                      VCM_RED_PAYLOAD_TYPE,
+                                      VCM_ULPFEC_PAYLOAD_TYPE);
+  numberOfErrors += ViETest::TestError(error == 0,
+                                       "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+  error = ptrViERtpRtcp->SetNACKStatus(videoChannel, false);
+  numberOfErrors += ViETest::TestError(error == 0,
+                                       "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+  // Set video protection for FEC, NACK or Hybrid
+  switch (protectionMethod) {
+    case 0: // None
+      // No protection selected, all protection already at false
+      std::cout << "Call using None protection Method"
+                << std::endl;
+      break;
+    case 1: // FEC only
+      std::cout << "Call using FEC protection Method"
+                << std::endl;
+      error = ptrViERtpRtcp->SetFECStatus(videoChannel, true,
+                                          VCM_RED_PAYLOAD_TYPE,
+                                          VCM_ULPFEC_PAYLOAD_TYPE);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                          "ERROR: %s at line %d",
+                                          __FUNCTION__, __LINE__);
+      break;
+    case 2: // NACK only
+      std::cout << "Call using NACK protection Method"
+                << std::endl;
+      error = ptrViERtpRtcp->SetNACKStatus(videoChannel, true);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+    case 3: // Hybrid NACK and FEC
+      std::cout << "Call using Hybrid NACK and FEC protection Method"
+                << std::endl;
+      error = ptrViERtpRtcp->SetHybridNACKFECStatus(videoChannel, true,
+                                                    VCM_RED_PAYLOAD_TYPE,
+                                                    VCM_ULPFEC_PAYLOAD_TYPE);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+  }
+
+  // Set receive codecs for FEC and hybrid NACK/FEC
+  if (protectionMethod == 1 || protectionMethod == 3) {
+    // RED
+    error = ptrViECodec->GetCodec(ptrViECodec->NumberOfCodecs() - 2,
+                                  videoCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
+    error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    std::cout << "RED Codec Information:" << std::endl;
+    PrintVideoCodec(videoCodec);
+    // ULPFEC
+    error = ptrViECodec->GetCodec(ptrViECodec->NumberOfCodecs() - 1,
+                                  videoCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+    numberOfErrors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+    std::cout << "ULPFEC Codec Information:" << std::endl;
+    PrintVideoCodec(videoCodec);
+  }
+
+
+
+  return true;
+}
+
+bool GetBitrateSignaling() {
+  std::cout << std::endl;
+  std::cout << "Available bitrate signaling methods." << std::endl;
+  std::cout << "  0. REMB" << std::endl;
+  std::cout << "  1. TMMBR" << std::endl;
+  std::cout << "Enter bitrate signaling methods. "
+            << "Press enter for default (REMB): " << std::endl;
+  std::string method;
+  std::getline(std::cin, method);
+  if (atoi(method.c_str()) == 1) {
+    return false;
+  }
+  return true;
+}
+
+void PrintRTCCPStatistics(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                          int videoChannel, StatisticsType statType) {
+  int error = 0;
+  int numberOfErrors =0;
+  unsigned short fractionLost = 0;
+  unsigned int cumulativeLost = 0;
+  unsigned int extendedMax = 0;
+  unsigned int jitter = 0;
+  int rttMS = 0;
+
+  switch (statType) {
+    case kReceivedStatistic:
+      std::cout << "RTCP Received statistics"
+                << std::endl;
+      // Get and print the Received RTCP Statistics
+      error = ptrViERtpRtcp->GetReceivedRTCPStatistics(videoChannel,
+                                                       fractionLost,
+                                                       cumulativeLost,
+                                                       extendedMax,
+                                                       jitter, rttMS);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+    case kSendStatistic:
+      std::cout << "RTCP Sent statistics"
+                << std::endl;
+      // Get and print the Sent RTCP Statistics
+      error = ptrViERtpRtcp->GetSentRTCPStatistics(videoChannel, fractionLost,
+                                                   cumulativeLost, extendedMax,
+                                                   jitter, rttMS);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+  }
+  std::cout << "\tRTCP fraction of lost packets: "
+            << fractionLost << std::endl;
+  std::cout << "\tRTCP cumulative number of lost packets: "
+            << cumulativeLost << std::endl;
+  std::cout << "\tRTCP max received sequence number "
+            << extendedMax << std::endl;
+  std::cout << "\tRTCP jitter: "
+            << jitter << std::endl;
+  std::cout << "\tRTCP round trip (ms): "
+            << rttMS<< std::endl;
+}
+
+void PrintRTPStatistics(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                        int videoChannel) {
+  int error = 0;
+  int numberOfErrors =0;
+  unsigned int bytesSent = 0;
+  unsigned int packetsSent= 0;
+  unsigned int bytesReceived = 0;
+  unsigned int packetsReceived = 0;
+
+  std::cout << "RTP statistics"
+            << std::endl;
+
+  // Get and print the RTP Statistics
+  error = ptrViERtpRtcp->GetRTPStatistics(videoChannel, bytesSent, packetsSent,
+                                          bytesReceived, packetsReceived);
+  numberOfErrors += ViETest::TestError(error == 0,
+                                       "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+  std::cout << "\tRTP bytes sent: "
+            << bytesSent << std::endl;
+  std::cout << "\tRTP packets sent: "
+            << packetsSent << std::endl;
+  std::cout << "\tRTP bytes received: "
+            << bytesReceived << std::endl;
+  std::cout << "\tRTP packets received: "
+            << packetsReceived << std::endl;
+}
+
+void PrintBandwidthUsage(webrtc::ViERTP_RTCP* ptrViERtpRtcp,
+                         int videoChannel) {
+  int error = 0;
+  int numberOfErrors = 0;
+  unsigned int totalBitrateSent = 0;
+  unsigned int videoBitrateSent = 0;
+  unsigned int fecBitrateSent = 0;
+  unsigned int nackBitrateSent = 0;
+  double percentageFEC = 0;
+  double percentageNACK = 0;
+
+  std::cout << "Bandwidth Usage"
+            << std::endl;
+
+  // Get and print Bandwidth usage
+  error = ptrViERtpRtcp->GetBandwidthUsage(videoChannel, totalBitrateSent,
+                                           videoBitrateSent, fecBitrateSent,
+                                           nackBitrateSent);
+  numberOfErrors += ViETest::TestError(error == 0,
+                                       "ERROR: %s at line %d",
+                                       __FUNCTION__, __LINE__);
+  std::cout << "\tTotal bitrate sent (Kbit/s): "
+            << totalBitrateSent << std::endl;
+  std::cout << "\tVideo bitrate sent (Kbit/s): "
+            << videoBitrateSent << std::endl;
+  std::cout << "\tFEC bitrate sent (Kbit/s): "
+            << fecBitrateSent << std::endl;
+  percentageFEC = ((double)fecBitrateSent/(double)totalBitrateSent) * 100;
+  std::cout << "\tPercentage FEC bitrate sent from total bitrate: "
+            << percentageFEC << std::endl;
+  std::cout << "\tNACK bitrate sent (Kbit/s): "
+            << nackBitrateSent << std::endl;
+  percentageNACK = ((double)nackBitrateSent/(double)totalBitrateSent) * 100;
+  std::cout << "\tPercentage NACK bitrate sent from total bitrate: "
+            << percentageNACK << std::endl;
+}
+
+void PrintCodecStatistics(webrtc::ViECodec* ptrViECodec, int videoChannel,
+                          StatisticsType statType) {
+  int error = 0;
+  int numberOfErrors = 0;
+  unsigned int keyFrames = 0;
+  unsigned int deltaFrames = 0;
+  switch(statType) {
+    case kReceivedStatistic:
+      std::cout << "Codec Receive statistics"
+                << std::endl;
+      // Get and print the Receive Codec Statistics
+      error = ptrViECodec->GetReceiveCodecStastistics(videoChannel, keyFrames,
+                                                     deltaFrames);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+    case kSendStatistic:
+      std::cout << "Codec Send statistics"
+                << std::endl;
+      // Get and print the Send Codec Statistics
+      error = ptrViECodec->GetSendCodecStastistics(videoChannel, keyFrames,
+                                                  deltaFrames);
+      numberOfErrors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+      break;
+  }
+  std::cout << "\tNumber of encoded key frames: "
+            << keyFrames << std::endl;
+  std::cout << "\tNumber of encoded delta frames: "
+            << deltaFrames << std::endl;
+}
+
+void PrintGetDiscardedPackets(webrtc::ViECodec* ptrViECodec, int videoChannel) {
+  std::cout << "Discarded Packets"
+            << std::endl;
+  int discardedPackets = 0;
+  discardedPackets = ptrViECodec->GetDiscardedPackets(videoChannel);
+  std::cout << "\tNumber of discarded packets: "
+            << discardedPackets << std::endl;
+}
+
+void PrintVideoStreamInformation(webrtc::ViECodec* ptrViECodec,
+                                 int videoChannel) {
+  webrtc::VideoCodec outgoingCodec;
+  webrtc::VideoCodec incomingCodec;
+
+  memset((void*)&outgoingCodec, 0, sizeof(webrtc::VideoCodec));
+  memset((void*)&incomingCodec, 0, sizeof(webrtc::VideoCodec));
+
+  ptrViECodec->GetSendCodec(videoChannel, outgoingCodec);
+  ptrViECodec->GetReceiveCodec(videoChannel, incomingCodec);
+
+  std::cout << "************************************************"
+            << std::endl;
+  std::cout << "ChannelId: " << videoChannel << std::endl;
+  std::cout << "Outgoing Stream information:" <<std::endl;
+  PrintVideoCodec(outgoingCodec);
+  std::cout << "Incoming Stream information:" <<std::endl;
+  PrintVideoCodec(incomingCodec);
+  std::cout << "************************************************"
+            << std::endl;
+}
+
+void PrintVideoCodec(webrtc::VideoCodec videoCodec) {
+  std::cout << "\t\tplName: " << videoCodec.plName << std::endl;
+  std::cout << "\t\tplType: " << (int)videoCodec.plType << std::endl;
+  std::cout << "\t\twidth: " << videoCodec.width << std::endl;
+  std::cout << "\t\theight: " << videoCodec.height << std::endl;
+  std::cout << "\t\tstartBitrate: " << videoCodec.startBitrate
+            << std::endl;
+  std::cout << "\t\tminBitrate: " << videoCodec.minBitrate
+            << std::endl;
+  std::cout << "\t\tmaxBitrate: " << videoCodec.maxBitrate
+            << std::endl;
+  std::cout << "\t\tmaxFramerate: " << (int)videoCodec.maxFramerate
+            << std::endl;
+  if (videoCodec.codecType == webrtc::kVideoCodecVP8) {
+    std::cout << "\t\tVP8 Temporal Layer: "
+              << (int)videoCodec.codecSpecific.VP8.numberOfTemporalLayers
+              << std::endl;
+  }
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc
new file mode 100644
index 0000000..0c06f32
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc
@@ -0,0 +1,567 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_encryption.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+
+class ViEAutotestEncryption: public webrtc::Encryption
+{
+public:
+    ViEAutotestEncryption()
+    {
+    }
+    ~ViEAutotestEncryption()
+    {
+    }
+
+    virtual void encrypt(int channel_no, unsigned char * in_data,
+                         unsigned char * out_data, int bytes_in, int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in + 2;
+    }
+
+    virtual void decrypt(int channel_no, unsigned char * in_data,
+                         unsigned char * out_data, int bytes_in, int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in - 2; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in - 2;
+    }
+
+    virtual void encrypt_rtcp(int channel_no, unsigned char * in_data,
+                              unsigned char * out_data, int bytes_in,
+                              int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in + 2;
+    }
+
+    virtual void decrypt_rtcp(int channel_no, unsigned char * in_data,
+                              unsigned char * out_data, int bytes_in,
+                              int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in - 2; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in - 2;
+    }
+};
+
+void ViEAutoTest::ViEEncryptionStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionStandardTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    tbChannel.StartReceive();
+
+    tbChannel.StartSend();
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+#ifdef WEBRTC_SRTP
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // SRTP
+    //
+    unsigned char srtpKey1[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    ViETest::Log("SRTP encryption only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+
+    ViETest::Log("SRTP authentication only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log("SRTP full protection");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+#endif  // WEBRTC_SRTP
+
+    //
+    // External encryption
+    //
+    ViEAutotestEncryption testEncryption;
+    // Note(qhogpat): StartSend fails, not sure if this is intentional.
+    EXPECT_NE(0, ViE.base->StartSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    ViETest::Log(
+        "External encryption/decryption added, you should still see video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEEncryptionExtendedTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionExtendedTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    tbChannel.StartReceive();
+    tbChannel.StartSend();
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+#ifdef WEBRTC_SRTP
+
+    //
+    // SRTP
+    //
+    unsigned char srtpKey1[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+    unsigned char srtpKey2[30] =
+    {   9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 9, 8, 7, 6,
+        5, 4, 3, 2, 1, 0};
+    // NULL
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey1));
+
+    ViETest::Log("SRTP NULL encryption/authentication");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+
+    ViETest::Log("SRTP encryption only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+
+    ViETest::Log("SRTP authentication only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log("SRTP full protection");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Change receive key, but not send key...
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey2));
+
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log(
+        "\nSRTP receive key changed, you should not see any remote images");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    // Change send key too
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey2));
+
+    ViETest::Log("\nSRTP send key changed too, you should see remote video "
+                 "again with some decoding artefacts at start");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Disable receive, keep send
+    ViETest::Log("SRTP receive disabled , you shouldn't see any video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+#endif //WEBRTC_SRTP
+    //
+    // External encryption
+    //
+    ViEAutotestEncryption testEncryption;
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    ViETest::Log(
+        "External encryption/decryption added, you should still see video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEEncryptionAPITest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    // Connect to channel
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+#ifdef WEBRTC_SRTP
+    unsigned char srtpKey[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+
+    //
+    // EnableSRTPSend and DisableSRTPSend
+    //
+
+    // Incorrect input argument, complete protection not enabled
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kNoProtection, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryption, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kAuthentication, srtpKey));
+
+    // Incorrect cipher key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 15,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 257,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 15, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 257, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    // Incorrect auth key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode,
+        30, webrtc::kAuthHmacSha1, 21, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 257, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 21, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 20, 13, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // NULL input
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        NULL));
+
+    // Double enable and disable
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // Note(qhogpat): the second check is likely incorrect.
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // No protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 20, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 1, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 16,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    //
+    // EnableSRTPReceive and DisableSRTPReceive
+    //
+
+    // Incorrect input argument, complete protection not enabled
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kNoProtection, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryption, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kAuthentication, srtpKey));
+
+    // Incorrect cipher key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 15,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 257,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 15, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 257, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    // Incorrect auth key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 21, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 257, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 21, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 20, 13, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // NULL input
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        NULL));
+
+    // Double enable and disable
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // No protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0,
+        webrtc::kAuthHmacSha1, 20, 20, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 1, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 16,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+#endif //WEBRTC_SRTP
+    //
+    // External encryption
+    //
+
+    ViEAutotestEncryption testEncryption;
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    EXPECT_NE(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_file.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_file.cc
new file mode 100644
index 0000000..83ace52
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_file.cc
@@ -0,0 +1,493 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "testsupport/fileutils.h"
+#include "tb_interfaces.h"
+#include "tb_capture_device.h"
+
+#include "voe_codec.h"
+
+class ViEAutotestFileObserver: public webrtc::ViEFileObserver
+{
+public:
+    ViEAutotestFileObserver() {};
+    ~ViEAutotestFileObserver() {};
+
+    void PlayFileEnded(const WebRtc_Word32 fileId)
+    {
+        ViETest::Log("PlayFile ended");
+    }
+};
+
+void ViEAutoTest::ViEFileStandardTest()
+{
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    {
+        ViETest::Log("Starting a loopback call...");
+
+        TbInterfaces interfaces = TbInterfaces("ViEFileStandardTest");
+
+        webrtc::VideoEngine* ptrViE = interfaces.video_engine;
+        webrtc::ViEBase* ptrViEBase = interfaces.base;
+        webrtc::ViECapture* ptrViECapture = interfaces.capture;
+        webrtc::ViERender* ptrViERender = interfaces.render;
+        webrtc::ViECodec* ptrViECodec = interfaces.codec;
+        webrtc::ViERTP_RTCP* ptrViERtpRtcp = interfaces.rtp_rtcp;
+        webrtc::ViENetwork* ptrViENetwork = interfaces.network;
+
+        TbCaptureDevice captureDevice = TbCaptureDevice(interfaces);
+        int captureId = captureDevice.captureId;
+
+        int videoChannel = -1;
+        EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
+            captureId, videoChannel));
+
+        EXPECT_EQ(0, ptrViERtpRtcp->SetRTCPStatus(
+            videoChannel, webrtc::kRtcpCompound_RFC4585));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetKeyFrameRequestMethod(
+            videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true));
+
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->StartRender(captureId));
+        EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            EXPECT_EQ(0, ptrViECodec->SetReceiveCodec(videoChannel,
+                                                      videoCodec));
+        }
+
+        // Find the codec used for encoding the channel
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            if (videoCodec.codecType == webrtc::kVideoCodecVP8)
+            {
+                EXPECT_EQ(0, ptrViECodec->SetSendCodec(videoChannel, videoCodec));
+                break;
+            }
+        }
+        // Find the codec used for recording.
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            if (videoCodec.codecType == webrtc::kVideoCodecI420)
+            {
+                break;
+            }
+        }
+
+
+        const char* ipAddress = "127.0.0.1";
+        const unsigned short rtpPort = 6000;
+        EXPECT_EQ(0, ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartReceive(videoChannel));
+        EXPECT_EQ(0, ptrViENetwork->SetSendDestination(
+            videoChannel, ipAddress, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartSend(videoChannel));
+        webrtc::ViEFile* ptrViEFile = webrtc::ViEFile::GetInterface(ptrViE);
+        EXPECT_TRUE(ptrViEFile != NULL);
+
+        webrtc::VoiceEngine* ptrVEEngine = webrtc::VoiceEngine::Create();
+        webrtc::VoEBase* ptrVEBase = webrtc::VoEBase::GetInterface(ptrVEEngine);
+        ptrVEBase->Init();
+
+        int audioChannel = ptrVEBase->CreateChannel();
+        ptrViEBase->SetVoiceEngine(ptrVEEngine);
+        ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel);
+
+        webrtc::CodecInst audioCodec;
+        webrtc::VoECodec* ptrVECodec =
+            webrtc::VoECodec::GetInterface(ptrVEEngine);
+        for (int index = 0; index < ptrVECodec->NumOfCodecs(); index++)
+        {
+            ptrVECodec->GetCodec(index, audioCodec);
+            if (0 == strcmp(audioCodec.plname, "PCMU") || 0
+                == strcmp(audioCodec.plname, "PCMA"))
+            {
+                break; // these two types are allowed as avi recording formats
+            }
+        }
+
+        webrtc::CodecInst audioCodec2;
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        // Call started
+        ViETest::Log("Call started\nYou should see local preview from camera\n"
+                     "in window 1 and the remote video in window 2.");
+        AutoTestSleep(2000);
+
+        const int RENDER_TIMEOUT = 1000;
+        const int TEST_SPACING = 1000;
+        const int VIDEO_LENGTH = 5000;
+
+        const std::string root = webrtc::test::ProjectRootPath() +
+            "src/video_engine/test/auto_test/media/";
+        const std::string renderStartImage = root + "renderStartImage.jpg";
+        const std::string captureDeviceImage = root + "captureDeviceImage.jpg";
+        const std::string renderTimeoutFile = root + "renderTimeoutImage.jpg";
+
+        const std::string output = webrtc::test::OutputPath();
+        const std::string snapshotCaptureDeviceFileName =
+            output + "snapshotCaptureDevice.jpg";
+        const std::string incomingVideo = output + "incomingVideo.avi";
+        const std::string outgoingVideo = output + "outgoingVideo.avi";
+        const std::string snapshotRenderFileName =
+            output + "snapshotRenderer.jpg";
+
+        webrtc::ViEPicture capturePicture;
+        webrtc::ViEPicture renderPicture;
+        webrtc::ViEPicture renderTimeoutPicture; // TODO: init with and image
+
+        ViEAutotestFileObserver fileObserver;
+        int fileId;
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing StartRecordIncomingVideo and StopRecordIncomingVideo
+        {
+            ViETest::Log("Recording incoming video (currently no audio) for %d "
+                         "seconds", VIDEO_LENGTH);
+
+            EXPECT_EQ(0, ptrViEFile->StartRecordIncomingVideo(
+                videoChannel, incomingVideo.c_str(), webrtc::NO_AUDIO,
+                audioCodec2, videoCodec));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stop recording incoming video");
+
+            EXPECT_EQ(0, ptrViEFile->StopRecordIncomingVideo(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing GetFileInformation
+        {
+            webrtc::VideoCodec fileVideoCodec;
+            webrtc::CodecInst fileAudioCodec;
+            ViETest::Log("Reading video file information");
+
+            EXPECT_EQ(0, ptrViEFile->GetFileInformation(
+                incomingVideo.c_str(), fileVideoCodec, fileAudioCodec));
+            PrintAudioCodec(fileAudioCodec);
+            PrintVideoCodec(fileVideoCodec);
+        }
+
+        // testing StartPlayFile and RegisterObserver
+        {
+            ViETest::Log("Start playing file: %s with observer",
+                         incomingVideo.c_str());
+            EXPECT_EQ(0, ptrViEFile->StartPlayFile(incomingVideo.c_str(),
+                                                   fileId));
+
+            ViETest::Log("Registering file observer");
+            EXPECT_EQ(0, ptrViEFile->RegisterObserver(fileId, fileObserver));
+            ViETest::Log("Done\n");
+        }
+
+        // testing SendFileOnChannel and StopSendFileOnChannel
+        {
+            ViETest::Log("Sending video on channel");
+            // should fail since we are sending the capture device.
+            EXPECT_NE(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
+
+            // Disconnect the camera
+            EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
+
+            // And try playing the file again.
+            EXPECT_EQ(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stopped sending video on channel");
+            EXPECT_EQ(0, ptrViEFile->StopSendFileOnChannel(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // stop playing the file
+        {
+            ViETest::Log("Stop playing the file.");
+            EXPECT_EQ(0, ptrViEFile->StopPlayFile(fileId));
+            ViETest::Log("Done\n");
+        }
+
+        // testing StartRecordOutgoingVideo and StopRecordOutgoingVideo
+        {
+            // connect the camera to the output.
+            EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
+              captureId, videoChannel));
+
+            ViETest::Log("Recording outgoing video (currently no audio) for %d "
+                         "seconds", VIDEO_LENGTH);
+            EXPECT_EQ(0, ptrViEFile->StartRecordOutgoingVideo(
+                videoChannel, outgoingVideo.c_str(), webrtc::NO_AUDIO,
+                audioCodec2, videoCodec));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stop recording outgoing video");
+            EXPECT_EQ(0, ptrViEFile->StopRecordOutgoingVideo(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        // again testing GetFileInformation
+        {
+            EXPECT_EQ(0, ptrViEFile->GetFileInformation(
+                incomingVideo.c_str(), videoCodec, audioCodec2));
+            PrintAudioCodec(audioCodec2);
+            PrintVideoCodec(videoCodec);
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetCaptureDeviceSnapshot
+        {
+            ViETest::Log("Testing GetCaptureDeviceSnapshot(int, ViEPicture)");
+            ViETest::Log("Taking a picture to use for displaying ViEPictures "
+                         "for the rest of file test");
+            ViETest::Log("Hold an object to the camera. Ready?...");
+            AutoTestSleep(1000);
+            ViETest::Log("3");
+            AutoTestSleep(1000);
+            ViETest::Log("...2");
+            AutoTestSleep(1000);
+            ViETest::Log("...1");
+            AutoTestSleep(1000);
+            ViETest::Log("...Taking picture!");
+            EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
+                captureId, capturePicture));
+            ViETest::Log("Picture has been taken.");
+            AutoTestSleep(TEST_SPACING);
+
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetRenderSnapshot
+        {
+            ViETest::Log("Testing GetRenderSnapshot(int, char*)");
+
+            ViETest::Log("Taking snapshot of videoChannel %d", captureId);
+            EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
+                captureId, snapshotRenderFileName.c_str()));
+            ViETest::Log("Wrote image to file %s",
+                         snapshotRenderFileName.c_str());
+            ViETest::Log("Done\n");
+            AutoTestSleep(TEST_SPACING);
+        }
+
+        // GetRenderSnapshot
+        {
+            ViETest::Log("Testing GetRenderSnapshot(int, ViEPicture)");
+            EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
+                captureId, renderPicture));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetCaptureDeviceSnapshot
+        {
+            ViETest::Log("Testing GetCaptureDeviceSnapshot(int, char*)");
+            ViETest::Log("Taking snapshot from capture device %d", captureId);
+            EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
+                captureId, snapshotCaptureDeviceFileName.c_str()));
+            ViETest::Log("Wrote image to file %s",
+                         snapshotCaptureDeviceFileName.c_str());
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Testing: SetCaptureDeviceImage
+        {
+            ViETest::Log("Testing SetCaptureDeviceImage(int, char*)");
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
+                captureId, captureDeviceImage.c_str()));
+
+            ViETest::Log("you should see the capture device image now");
+            AutoTestSleep(2 * RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Testing: SetCaptureDeviceImage
+        {
+            ViETest::Log("Testing SetCaptureDeviceImage(int, ViEPicture)");
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
+                captureId, capturePicture));
+
+            ViETest::Log("you should see the capture device image now");
+            AutoTestSleep(2 * RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderStartImage(videoChannel, renderStartImage);
+        {
+            ViETest::Log("Testing SetRenderStartImage(int, char*)");
+            // set render image, then stop capture and stop render to display it
+            ViETest::Log("Stoping renderer, setting start image, then "
+                         "restarting");
+            EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
+                videoChannel, renderStartImage.c_str()));
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+
+            ViETest::Log("Render start image should be displayed.");
+            AutoTestSleep(RENDER_TIMEOUT);
+
+            // restarting capture and render
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderStartImage(videoChannel, renderStartImage);
+        {
+            ViETest::Log("Testing SetRenderStartImage(int, ViEPicture)");
+            // set render image, then stop capture and stop render to display it
+            ViETest::Log("Stoping renderer, setting start image, then "
+                         "restarting");
+            EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
+                videoChannel, capturePicture));
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+
+            ViETest::Log("Render start image should be displayed.");
+            AutoTestSleep(RENDER_TIMEOUT);
+
+            // restarting capture and render
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
+        // RENDER_TIMEOUT);
+        {
+            ViETest::Log("Testing SetRenderTimeoutImage(int, char*)");
+            ViETest::Log("Stopping capture device to induce timeout of %d ms",
+                         RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
+                videoChannel, renderTimeoutFile.c_str(), RENDER_TIMEOUT));
+
+            // now stop sending frames to the remote renderer and wait for
+            // timeout
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Timeout image should be displayed now for %d ms",
+                         RENDER_TIMEOUT * 2);
+            AutoTestSleep(RENDER_TIMEOUT * 2);
+
+            // restart the capture device to undo the timeout
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Restarting capture device");
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Need to create a ViEPicture object to pass into this function.
+        // SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
+        // RENDER_TIMEOUT);
+        {
+            ViETest::Log("Testing SetRenderTimeoutImage(int, ViEPicture)");
+            ViETest::Log("Stopping capture device to induce timeout of %d",
+                         RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
+                videoChannel, capturePicture, RENDER_TIMEOUT));
+
+            // now stop sending frames to the remote renderer and wait for
+            // timeout
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Timeout image should be displayed now for %d",
+                         RENDER_TIMEOUT * 2);
+            AutoTestSleep(RENDER_TIMEOUT * 2);
+
+            // restart the capture device to undo the timeout
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Restarting capture device");
+            ViETest::Log("Done\n");
+        }
+
+        // testing DeregisterObserver
+        {
+            ViETest::Log("Deregistering file observer");
+            // Should fail since we don't observe this file.
+            EXPECT_NE(0, ptrViEFile->DeregisterObserver(fileId, fileObserver));
+        }
+
+        //***************************************************************
+        //	Testing finished. Tear down Video Engine
+        //***************************************************************
+
+        EXPECT_EQ(0, ptrViEBase->DisconnectAudioChannel(videoChannel));
+        EXPECT_EQ(0, ptrViEBase->SetVoiceEngine(NULL));
+        EXPECT_EQ(0, ptrVEBase->DeleteChannel(audioChannel));
+        EXPECT_EQ(0, ptrVEBase->Release());
+        EXPECT_EQ(0, ptrVECodec->Release());
+        EXPECT_TRUE(webrtc::VoiceEngine::Delete(ptrVEEngine));
+
+        EXPECT_EQ(0, ptrViEBase->StopReceive(videoChannel));
+        EXPECT_EQ(0, ptrViEBase->StopSend(videoChannel));
+        EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(captureId));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(capturePicture));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(renderPicture));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(renderTimeoutPicture));
+        EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel));
+
+        EXPECT_EQ(0, ptrViEFile->Release());
+    }
+#endif
+}
+
+void ViEAutoTest::ViEFileExtendedTest()
+{
+}
+
+void ViEAutoTest::ViEFileAPITest()
+{
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc
new file mode 100644
index 0000000..887fe6e
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc
@@ -0,0 +1,240 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_image_process.cc
+//
+
+// Settings
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tb_capture_device.h"
+
+class MyEffectFilter: public webrtc::ViEEffectFilter
+{
+public:
+    MyEffectFilter() {}
+
+    ~MyEffectFilter() {}
+
+    virtual int Transform(int size, unsigned char* frameBuffer,
+                          unsigned int timeStamp90KHz, unsigned int width,
+                          unsigned int height)
+    {
+        // Black and white
+        memset(frameBuffer + (2 * size) / 3, 0x7f, size / 3);
+        return 0;
+    }
+};
+
+void ViEAutoTest::ViEImageProcessStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    int rtpPort = 6000;
+    // Create VIE
+    TbInterfaces ViE("ViEImageProcessAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    MyEffectFilter effectFilter;
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+    ViETest::Log("Capture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    ViETest::Log("Black and white filter registered for capture device, "
+                 "affects both windows");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    ViETest::Log("Remove capture effect filter, adding filter for incoming "
+                 "stream");
+    ViETest::Log("Only Window 2 should be black and white");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    int rtpPort2 = rtpPort + 100;
+    // Create a video channel
+    TbVideoChannel tbChannel2(ViE, webrtc::kVideoCodecVP8);
+
+    tbCapture.ConnectTo(tbChannel2.videoChannel);
+    tbChannel2.StartReceive(rtpPort2);
+    tbChannel2.StartSend(rtpPort2);
+
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel2.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel2.videoChannel));
+    EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+
+    ViETest::Log("Local renderer removed, added new channel and rendering in "
+                 "Window1.");
+
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    ViETest::Log("Black and white filter registered for capture device, "
+                 "affects both windows");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    ViETest::Log("Capture filter removed.");
+    ViETest::Log("Black and white filter registered for one channel, Window2 "
+                 "should be black and white");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEImageProcessExtendedTest()
+{
+    ViEImageProcessStandardTest();
+}
+
+void ViEAutoTest::ViEImageProcessAPITest()
+{
+    TbInterfaces ViE("ViEImageProcessAPITest");
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE);
+
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    MyEffectFilter effectFilter;
+
+    //
+    // Capture effect filter
+    //
+    // Add effect filter
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+    // Add again -> error
+    EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    // Double deregister
+    EXPECT_NE(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+    // Non-existing capture device
+    EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    //
+    // Render effect filter
+    //
+    EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+
+    // Non-existing channel id
+    EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    //
+    // Send effect filter
+    //
+    EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    //
+    // Denoising
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
+    EXPECT_NE(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDenoising(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDenoising(
+        tbChannel.videoChannel, true));
+
+    //
+    // Deflickering
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, true));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, true));
+    EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbChannel.videoChannel, true));
+
+    //
+    // Color enhancement
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, false));
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, true));
+    EXPECT_NE(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, true));
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, false));
+    EXPECT_NE(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, false));
+    EXPECT_NE(0, ViE.image_process->EnableColorEnhancement(
+        tbCapture.captureId, true));
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_linux.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_linux.cc
new file mode 100644
index 0000000..aca96bd
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_linux.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_linux.cc
+//
+#include "vie_autotest_linux.h"
+
+#include <string>
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest_main.h"
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "thread_wrapper.h"
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager()
+    : _hdsp1(NULL),
+      _hdsp2(NULL) {
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
+  TerminateWindows();
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+  return reinterpret_cast<void*>(_hwnd1);
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+  return reinterpret_cast<void*>(_hwnd2);
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+  if (_hdsp1) {
+    ViEDestroyWindow(&_hwnd1, _hdsp1);
+    _hdsp1 = NULL;
+  }
+  if (_hdsp2) {
+    ViEDestroyWindow(&_hwnd2, _hdsp2);
+    _hdsp2 = NULL;
+  }
+  return 0;
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1Title,
+                                            void* window2Title) {
+  ViECreateWindow(&_hwnd1, &_hdsp1, window1Size.origin.x,
+                  window1Size.origin.y, window1Size.size.width,
+                  window1Size.size.height,
+                  reinterpret_cast<char*>(window1Title));
+  ViECreateWindow(&_hwnd2, &_hdsp2, window2Size.origin.x,
+                  window2Size.origin.y, window2Size.size.width,
+                  window2Size.size.height,
+                  reinterpret_cast<char*>(window2Title));
+
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViECreateWindow(Window *out_window,
+                                              Display **out_display, int x_pos,
+                                              int y_pos, int width, int height,
+                                              char* title) {
+  Display* display = XOpenDisplay(NULL);
+  if (display == NULL) {
+    // There's no point to continue if this happens: nothing will work anyway.
+    printf("Failed to connect to X server: X environment likely broken\n");
+    exit(-1);
+  }
+
+  int screen = DefaultScreen(display);
+
+  // Try to establish a 24-bit TrueColor display
+  // (our environment must allow this).
+  XVisualInfo visual_info;
+  if (XMatchVisualInfo(display, screen, 24, TrueColor, &visual_info) == 0) {
+    printf("Failed to establish 24-bit TrueColor in X environment.\n");
+    exit(-1);
+  }
+
+  // Create suitable window attributes.
+  XSetWindowAttributes window_attributes;
+  window_attributes.colormap = XCreateColormap(
+      display, DefaultRootWindow(display), visual_info.visual, AllocNone);
+  window_attributes.event_mask = StructureNotifyMask | ExposureMask;
+  window_attributes.background_pixel = 0;
+  window_attributes.border_pixel = 0;
+
+  unsigned long attribute_mask = CWBackPixel | CWBorderPixel | CWColormap |
+                                 CWEventMask;
+
+  Window _window = XCreateWindow(display, DefaultRootWindow(display), x_pos,
+                                 y_pos, width, height, 0, visual_info.depth,
+                                 InputOutput, visual_info.visual,
+                                 attribute_mask, &window_attributes);
+
+  // Set window name.
+  XStoreName(display, _window, title);
+  XSetIconName(display, _window, title);
+
+  // Make x report events for mask.
+  XSelectInput(display, _window, StructureNotifyMask);
+
+  // Map the window to the display.
+  XMapWindow(display, _window);
+
+  // Wait for map event.
+  XEvent event;
+  do {
+    XNextEvent(display, &event);
+  } while (event.type != MapNotify || event.xmap.event != _window);
+
+  *out_window = _window;
+  *out_display = display;
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViEDestroyWindow(Window *window,
+                                               Display *display) {
+  XUnmapWindow(display, *window);
+  XDestroyWindow(display, *window);
+  XSync(display, false);
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+  return 0;
+}
+
+int main(int argc, char** argv) {
+  ViEAutoTestMain auto_test;
+  return auto_test.RunTests(argc, argv);
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc
new file mode 100644
index 0000000..d660745
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc
@@ -0,0 +1,663 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_loopback.cc
+//
+// This code is also used as sample code for ViE 3.0
+//
+
+// ===================================================================
+//
+// BEGIN: VideoEngine 3.0 Sample Code
+//
+
+#include <iostream>
+
+#include "common_types.h"
+#include "tb_external_transport.h"
+#include "voe_base.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+
+int VideoEngineSampleCode(void* window1, void* window2)
+{
+    //********************************************************
+    //  Begin create/initialize Video Engine for testing
+    //********************************************************
+
+    int error = 0;
+
+    //
+    // Create a VideoEngine instance
+    //
+    webrtc::VideoEngine* ptrViE = NULL;
+    ptrViE = webrtc::VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+
+    error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViELoopbackCall_trace.txt";
+    error = ptrViE->SetTraceFile(trace_file.c_str());
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+
+    //
+    // Init VideoEngine and create a channel
+    //
+    webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+
+    //
+    // List available capture devices, allocate and connect.
+    //
+    webrtc::ViECapture* ptrViECapture =
+        webrtc::ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    printf("Available capture devices:\n");
+    int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+
+        error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                                KMaxDeviceNameLength, uniqueId,
+                                                KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        printf("\t %d. %s\n", captureIdx + 1, deviceName);
+    }
+    printf("\nChoose capture device: ");
+#ifdef WEBRTC_ANDROID
+    captureIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &captureIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    captureIdx = captureIdx - 1; // Compensate for idx start at 1.
+#endif
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
+                                                 captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+
+    //
+    // RTP/RTCP settings
+    //
+    webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+        webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                         webrtc::kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
+        videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+
+    //
+    // Set up rendering
+    //
+    webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+    if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+
+    error
+        = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
+                                      1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    //
+    // Setup codecs
+    //
+    webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }
+
+    // Check available codecs and prepare receive codecs
+    printf("\nAvailable codecs:\n");
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    int codecIdx = 0;
+    for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+
+        // try to keep the test frame size small when I420
+        if (videoCodec.codecType == webrtc::kVideoCodecI420)
+        {
+            videoCodec.width = 176;
+            videoCodec.height = 144;
+        }
+
+        error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != webrtc::kVideoCodecRED
+            && videoCodec.codecType != webrtc::kVideoCodecULPFEC)
+        {
+            printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
+        }
+    }
+    printf("Choose codec: ");
+#ifdef WEBRTC_ANDROID
+    codecIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &codecIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    codecIdx = codecIdx - 1; // Compensate for idx start at 1.
+#endif
+
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+
+    // Set spatial resolution option
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Enter frame size option (default is CIF):" << std::endl;
+    std::cout << "1. QCIF (176X144) " << std::endl;
+    std::cout << "2. CIF  (352X288) " << std::endl;
+    std::cout << "3. VGA  (640X480) " << std::endl;
+    std::cout << "4. 4CIF (704X576) " << std::endl;
+    std::cout << "5. WHD  (1280X720) " << std::endl;
+    std::getline(std::cin, str);
+    int resolnOption = atoi(str.c_str());
+    // Try to keep the test frame size small when I420
+    if (videoCodec.codecType == webrtc::kVideoCodecI420)
+    {
+       resolnOption = 1;
+    }
+    switch (resolnOption)
+    {
+        case 1:
+            videoCodec.width = 176;
+            videoCodec.height = 144;
+            break;
+        case 2:
+            videoCodec.width = 352;
+            videoCodec.height = 288;
+            break;
+        case 3:
+            videoCodec.width = 640;
+            videoCodec.height = 480;
+            break;
+        case 4:
+            videoCodec.width = 704;
+            videoCodec.height = 576;
+            break;
+        case 5:
+            videoCodec.width = 1280;
+            videoCodec.height = 720;
+            break;
+    }
+
+    // Set number of temporal layers.
+    std::cout << std::endl;
+    std::cout << "Choose number of temporal layers (1 to 4).";
+    std::cout << "Press enter for default: \n";
+    std::getline(std::cin, str);
+    int numTemporalLayers = atoi(str.c_str());
+    if(numTemporalLayers != 0)
+    {
+        videoCodec.codecSpecific.VP8.numberOfTemporalLayers = numTemporalLayers;
+    }
+
+    // Set start bit rate
+    std::cout << std::endl;
+    std::cout << "Choose start rate (in kbps). Press enter for default:  ";
+    std::getline(std::cin, str);
+    int startRate = atoi(str.c_str());
+    if(startRate != 0)
+    {
+        videoCodec.startBitrate=startRate;
+    }
+
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+
+    //
+    // Choose Protection Mode
+    //
+    std::cout << std::endl;
+    std::cout << "Enter Protection Method:" << std::endl;
+    std::cout << "0. None" << std::endl;
+    std::cout << "1. FEC" << std::endl;
+    std::cout << "2. NACK" << std::endl;
+    std::cout << "3. NACK+FEC" << std::endl;
+    std::getline(std::cin, str);
+    int protectionMethod = atoi(str.c_str());
+    error = 0;
+    bool temporalToggling = true;
+    switch (protectionMethod)
+    {
+        case 0: // None: default is no protection
+            break;
+
+        case 1: // FEC only
+            error = ptrViERtpRtcp->SetFECStatus(videoChannel,
+                                                true,
+                                                VCM_RED_PAYLOAD_TYPE,
+                                                VCM_ULPFEC_PAYLOAD_TYPE);
+            temporalToggling = false;
+            break;
+
+        case 2: // Nack only
+            error = ptrViERtpRtcp->SetNACKStatus(videoChannel, true);
+
+            break;
+
+        case 3: // Hybrid NAck and FEC
+            error = ptrViERtpRtcp->SetHybridNACKFECStatus(
+                videoChannel,
+                true,
+                VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+            temporalToggling = false;
+            break;
+     }
+
+    if (error < 0)
+    {
+        printf("ERROR in ViERTP_RTCP::SetProtectionStatus\n");
+    }
+
+
+    //
+    // Address settings
+    //
+    webrtc::ViENetwork* ptrViENetwork =
+        webrtc::ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+
+    // Setting External transport
+    TbExternalTransport extTransport(*(ptrViENetwork));
+
+    int testMode = 0;
+    std::cout << std::endl;
+    std::cout << "Enter 1 for testing packet loss and delay with "
+        "external transport: ";
+    std::string test_str;
+    std::getline(std::cin, test_str);
+    testMode = atoi(test_str.c_str());
+    if (testMode == 1)
+    {
+        // Avoid changing SSRC due to collision.
+        error = ptrViERtpRtcp->SetLocalSSRC(videoChannel, 1);
+
+        error = ptrViENetwork->RegisterSendTransport(videoChannel,
+                                                     extTransport);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::RegisterSendTransport \n");
+            return -1;
+        }
+
+        // Set up packet loss value
+        std::cout << "Enter Packet Loss Percentage" << std::endl;
+        std::string rate_str;
+        std::getline(std::cin, rate_str);
+        int rate = atoi(rate_str.c_str());
+        extTransport.SetPacketLoss(rate);
+        if (rate) {
+          temporalToggling = false;
+        }
+
+        // Set network delay value
+        std::cout << "Enter network delay value [mS]" << std::endl;
+        std::string delay_str;
+        std::getline(std::cin, delay_str);
+        int delayMs = atoi(delay_str.c_str());
+        extTransport.SetNetworkDelay(delayMs);
+
+        if (numTemporalLayers > 1 && temporalToggling) {
+          extTransport.SetTemporalToggle(numTemporalLayers);
+        } else {
+          // Disabled
+          extTransport.SetTemporalToggle(0);
+        }
+    }
+    else
+    {
+        const char* ipAddress = "127.0.0.1";
+        const unsigned short rtpPort = 6000;
+        std::cout << std::endl;
+        std::cout << "Using rtp port: " << rtpPort << std::endl;
+        std::cout << std::endl;
+        error = ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort);
+        if (error == -1)
+        {
+            printf("ERROR in ViENetwork::SetLocalReceiver\n");
+            return -1;
+        }
+        error = ptrViENetwork->SetSendDestination(videoChannel,
+                                                  ipAddress, rtpPort);
+        if (error == -1)
+        {
+            printf("ERROR in ViENetwork::SetSendDestination\n");
+            return -1;
+        }
+    }
+
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+
+    //********************************************************
+    //  Engine started
+    //********************************************************
+
+
+    // Call started
+    printf("\nLoopback call started\n\n");
+    printf("Press enter to stop...");
+    while ((getchar()) != '\n')
+        ;
+
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1;
+    }
+
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+
+    bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+
+    return 0;
+
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+int ViEAutoTest::ViELoopbackCall()
+{
+    ViETest::Log(" ");
+    ViETest::Log("========================================");
+    ViETest::Log(" ViE Autotest Loopback Call\n");
+
+    if (VideoEngineSampleCode(_window1, _window2) == 0)
+    {
+        ViETest::Log(" ");
+        ViETest::Log(" ViE Autotest Loopback Call Done");
+        ViETest::Log("========================================");
+        ViETest::Log(" ");
+
+        return 0;
+    }
+
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Loopback Call Failed");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return 1;
+
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_main.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_main.cc
new file mode 100644
index 0000000..b9f6958
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_main.cc
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest_main.h"
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "vie_autotest.h"
+#include "vie_autotest_window_manager_interface.h"
+#include "vie_window_creator.h"
+
+DEFINE_bool(automated, false, "Run Video engine tests in noninteractive mode.");
+
+static const std::string kStandardTest = "ViEStandardIntegrationTest";
+static const std::string kExtendedTest = "ViEExtendedIntegrationTest";
+static const std::string kApiTest = "ViEApiIntegrationTest";
+
+ViEAutoTestMain::ViEAutoTestMain() {
+  index_to_test_method_map_[1] = "RunsBaseTestWithoutErrors";
+  index_to_test_method_map_[2] = "RunsCaptureTestWithoutErrors";
+  index_to_test_method_map_[3] = "RunsCodecTestWithoutErrors";
+  index_to_test_method_map_[4] = "RunsEncryptionTestWithoutErrors";
+  index_to_test_method_map_[5] = "RunsFileTestWithoutErrors";
+  index_to_test_method_map_[6] = "RunsImageProcessTestWithoutErrors";
+  index_to_test_method_map_[7] = "RunsNetworkTestWithoutErrors";
+  index_to_test_method_map_[8] = "RunsRenderTestWithoutErrors";
+  index_to_test_method_map_[9] = "RunsRtpRtcpTestWithoutErrors";
+}
+
+int ViEAutoTestMain::RunTests(int argc, char** argv) {
+  // Initialize logging.
+  ViETest::Init();
+  // Initialize the testing framework.
+  testing::InitGoogleTest(&argc, argv);
+  // Parse remaining flags:
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  int result;
+  if (FLAGS_automated) {
+    // Run in automated mode.
+    result = RUN_ALL_TESTS();
+  } else {
+    // Run in interactive mode.
+    result = RunInteractiveMode();
+  }
+
+  ViETest::Terminate();
+  return result;
+}
+
+int ViEAutoTestMain::AskUserForTestCase() {
+  int choice;
+  std::string answer;
+
+  do {
+    ViETest::Log("\nSpecific tests:");
+    ViETest::Log("\t 0. Go back to previous menu.");
+
+    // Print all test method choices. Assumes that map sorts on its key.
+    int last_valid_choice = 0;
+    std::map<int, std::string>::const_iterator iterator;
+    for (iterator = index_to_test_method_map_.begin();
+        iterator != index_to_test_method_map_.end();
+        ++iterator) {
+      ViETest::Log("\t %d. %s", iterator->first, iterator->second.c_str());
+      last_valid_choice = iterator->first;
+    }
+
+    ViETest::Log("Choose specific test:");
+    choice = AskUserForNumber(0, last_valid_choice);
+  } while (choice == kInvalidChoice);
+
+  return choice;
+}
+
+int ViEAutoTestMain::AskUserForNumber(int min_allowed, int max_allowed) {
+  int result;
+  if (scanf("%d", &result) <= 0) {
+    ViETest::Log("\nPlease enter a number instead, then hit enter.");
+    getchar();
+    return kInvalidChoice;
+  }
+  getchar();  // Consume enter key.
+
+  if (result < min_allowed || result > max_allowed) {
+    ViETest::Log("%d-%d are valid choices. Please try again.", min_allowed,
+                 max_allowed);
+    return kInvalidChoice;
+  }
+
+  return result;
+}
+
+int ViEAutoTestMain::RunTestMatching(const std::string test_case,
+                                     const std::string test_method) {
+  testing::FLAGS_gtest_filter = test_case + "." + test_method;
+  return RUN_ALL_TESTS();
+}
+
+int ViEAutoTestMain::RunSpecificTestCaseIn(const std::string test_case_name)
+{
+  // If user says 0, it means don't run anything.
+  int specific_choice = AskUserForTestCase();
+  if (specific_choice != 0){
+    return RunTestMatching(test_case_name,
+                           index_to_test_method_map_[specific_choice]);
+  }
+  return 0;
+}
+
+int ViEAutoTestMain::RunSpecialTestCase(int choice) {
+  // 7-9 don't run in GTest and need to initialize by themselves.
+  assert(choice >= 7 && choice <= 9);
+
+  // Create the windows
+  ViEWindowCreator windowCreator;
+  ViEAutoTestWindowManagerInterface* windowManager =
+      windowCreator.CreateTwoWindows();
+
+  // Create the test cases
+  ViEAutoTest vieAutoTest(windowManager->GetWindow1(),
+                          windowManager->GetWindow2());
+
+  int errors = 0;
+  switch (choice) {
+    case 7: errors = vieAutoTest.ViELoopbackCall();  break;
+    case 8: errors = vieAutoTest.ViECustomCall();    break;
+    case 9: errors = vieAutoTest.ViESimulcastCall(); break;
+  }
+
+  windowCreator.TerminateWindows();
+  return errors;
+}
+
+bool ViEAutoTestMain::RunInteractiveMode() {
+  ViETest::Log(" ============================== ");
+  ViETest::Log("    WebRTC ViE 3.x Autotest     ");
+  ViETest::Log(" ============================== \n");
+
+  int choice = 0;
+  int errors = 0;
+  do {
+    ViETest::Log("Test types: ");
+    ViETest::Log("\t 0. Quit");
+    ViETest::Log("\t 1. All standard tests (delivery test)");
+    ViETest::Log("\t 2. All API tests");
+    ViETest::Log("\t 3. All extended test");
+    ViETest::Log("\t 4. Specific standard test");
+    ViETest::Log("\t 5. Specific API test");
+    ViETest::Log("\t 6. Specific extended test");
+    ViETest::Log("\t 7. Simple loopback call");
+    ViETest::Log("\t 8. Custom configure a call");
+    ViETest::Log("\t 9. Simulcast in loopback");
+    ViETest::Log("Select type of test:");
+
+    choice = AskUserForNumber(0, 9);
+    if (choice == kInvalidChoice) {
+      continue;
+    }
+    switch (choice) {
+      case 0:                                                 break;
+      case 1:  errors = RunTestMatching(kStandardTest, "*");  break;
+      case 2:  errors = RunTestMatching(kApiTest,      "*");  break;
+      case 3:  errors = RunTestMatching(kExtendedTest, "*");  break;
+      case 4:  errors = RunSpecificTestCaseIn(kStandardTest); break;
+      case 5:  errors = RunSpecificTestCaseIn(kApiTest);      break;
+      case 6:  errors = RunSpecificTestCaseIn(kExtendedTest); break;
+      default: errors = RunSpecialTestCase(choice);           break;
+    }
+  } while (choice != 0);
+
+  if (errors) {
+    ViETest::Log("Test done with errors, see ViEAutotestLog.txt for test "
+        "result.\n");
+  } else {
+    ViETest::Log("Test done without errors, see ViEAutotestLog.txt for "
+        "test result.\n");
+  }
+  return true;
+}
+
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_network.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_network.cc
new file mode 100644
index 0000000..2a84644
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_network.cc
@@ -0,0 +1,563 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_network.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+
+#if defined(_WIN32)
+#include <qos.h>
+#elif defined(WEBRTC_MAC_INTEL)
+
+#endif
+
+class ViEAutoTestNetworkObserver: public webrtc::ViENetworkObserver
+{
+public:
+    ViEAutoTestNetworkObserver()
+    {
+    }
+    virtual ~ViEAutoTestNetworkObserver()
+    {
+    }
+    virtual void OnPeriodicDeadOrAlive(const int videoChannel, const bool alive)
+    {
+    }
+    virtual void PacketTimeout(const int videoChannel,
+                               const webrtc::ViEPacketTimeout timeout)
+    {
+    }
+};
+
+void ViEAutoTest::ViENetworkStandardTest()
+{
+    TbInterfaces ViE("ViENetworkStandardTest"); // Create VIE
+    TbCaptureDevice tbCapture(ViE);
+    {
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+        tbCapture.ConnectTo(tbChannel.videoChannel);
+
+        RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+        // ***************************************************************
+        // Engine ready. Begin testing class
+        // ***************************************************************
+
+        //
+        // Transport
+        //
+        TbExternalTransport testTransport(*ViE.network);
+        EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+
+        ViETest::Log("Call started using external transport, video should "
+            "see video in both windows\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        unsigned short rtpPort = 1234;
+        memcpy(myIpAddress, "127.0.0.1", sizeof("127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, myIpAddress, rtpPort,
+            rtpPort + 1, rtpPort));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        ViETest::Log("Changed to WebRTC SocketTransport, you should still see "
+                     "video in both windows\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort + 10, rtpPort + 11, myIpAddress));
+        ViETest::Log("Added UDP port filter for incorrect ports, you should "
+                     "not see video in Window2");
+        AutoTestSleep(2000);
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, "123.1.1.0"));
+        ViETest::Log("Added IP filter for incorrect IP address, you should not "
+                     "see video in Window2");
+        AutoTestSleep(2000);
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        ViETest::Log("Added IP filter for this computer, you should see video "
+                     "in Window2 again\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        tbCapture.Disconnect(tbChannel.videoChannel);
+    }
+}
+
+void ViEAutoTest::ViENetworkExtendedTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    TbInterfaces ViE("ViENetworkExtendedTest"); // Create VIE
+    TbCaptureDevice tbCapture(ViE);
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+
+    {
+        //
+        // ToS
+        //
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+        tbCapture.ConnectTo(tbChannel.videoChannel);
+        const char* remoteIp = "192.168.200.1";
+        int DSCP = 0;
+        bool useSetSockOpt = false;
+
+        webrtc::VideoCodec videoCodec;
+        EXPECT_EQ(0, ViE.codec->GetSendCodec(
+            tbChannel.videoChannel, videoCodec));
+        videoCodec.maxFramerate = 5;
+        EXPECT_EQ(0, ViE.codec->SetSendCodec(
+            tbChannel.videoChannel, videoCodec));
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        unsigned short rtpPort = 9000;
+        EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, remoteIp, rtpPort, rtpPort + 1, rtpPort));
+
+        // ToS
+        int tos_result = ViE.network->SetSendToS(tbChannel.videoChannel, 2);
+        EXPECT_EQ(0, tos_result);
+        if (tos_result != 0)
+        {
+            ViETest::Log("ViESetSendToS error!.");
+            ViETest::Log("You must be admin to run these tests.");
+            ViETest::Log("On Win7 and late Vista, you need to right click the "
+                         "exe and choose");
+            ViETest::Log("\"Run as administrator\"\n");
+            getchar();
+        }
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        ViETest::Log("Use Wireshark to capture the outgoing video stream and "
+                     "verify ToS settings\n");
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0));
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 2, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        tbCapture.Disconnect(tbChannel.videoChannel);
+    }
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViENetworkAPITest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    TbInterfaces ViE("ViENetworkAPITest"); // Create VIE
+    {
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        //
+        // External transport
+        //
+        TbExternalTransport testTransport(*ViE.network);
+        EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+        EXPECT_NE(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+
+        unsigned char packet[1500];
+        packet[0] = 0x80; // V=2, P=0, X=0, CC=0
+        packet[1] = 0x78; // M=0, PT = 120 (VP8)
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_NE(0, ViE.network->ReceivedRTCPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_EQ(0, ViE.network->ReceivedRTCPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 11));
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 11));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 3000));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 3000));
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));  // Sending
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));  // Already deregistered
+
+        //
+        // Local receiver
+        //
+        // TODO (perkj) change when B 4239431 is fixed.
+        /*error = ViE.ptrViENetwork->SetLocalReceiver(tbChannel.videoChannel,
+                                                    1234, 1234, "127.0.0.1");
+        numberOfErrors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);*/
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
+
+        unsigned short rtpPort = 0;
+        unsigned short rtcpPort = 0;
+        char ipAddress[64];
+        memset(ipAddress, 0, 64);
+        EXPECT_EQ(0, ViE.network->GetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->GetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+
+        //
+        // Send destination
+        //
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1236, 1237, 1234, 1235));
+
+        unsigned short sourceRtpPort = 0;
+        unsigned short sourceRtcpPort = 0;
+        EXPECT_EQ(0, ViE.network->GetSendDestination(
+            tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
+            sourceRtpPort, sourceRtcpPort));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        // Not allowed while sending
+        EXPECT_NE(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(kViENetworkAlreadySending, ViE.base->LastError());
+
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->GetSendDestination(
+            tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
+            sourceRtpPort, sourceRtcpPort));
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+        //
+        // Address information
+        //
+
+        // GetSourceInfo: Tested in functional test
+        EXPECT_EQ(0, ViE.network->GetLocalIP(ipAddress, false));
+
+        // TODO(unknown): IPv6
+
+        //
+        // Filter
+        //
+        EXPECT_NE(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 1234, 1235, "10.10.10.10"));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 0, 0, NULL));
+        EXPECT_NE(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+    }
+    {
+        TbVideoChannel tbChannel(ViE);  // Create a video channel
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234));
+
+        int DSCP = 0;
+        bool useSetSockOpt = false;
+        // SetSockOpt should work without a locally bind socket
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        EXPECT_EQ(0, DSCP);
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, -1, true));
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, 64, true));
+
+        // Valid
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 20, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+
+        EXPECT_EQ(20, DSCP);
+        EXPECT_TRUE(useSetSockOpt);
+
+        // Disable
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(0, DSCP);
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        // Get local ip to be able to set ToS withtou setSockOpt
+        EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, myIpAddress));
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(
+            tbChannel.videoChannel, -1, false));
+        EXPECT_NE(0, ViE.network->SetSendToS(
+            tbChannel.videoChannel, 64, false));  // Invalid input
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        EXPECT_EQ(0, DSCP);
+        int tos_result = ViE.network->SetSendToS(
+            tbChannel.videoChannel, 20, false);  // Valid
+        EXPECT_EQ(0, tos_result);
+        if (tos_result != 0)
+        {
+            ViETest::Log("ViESetSendToS error!.");
+            ViETest::Log("You must be admin to run these tests.");
+            ViETest::Log("On Win7 and late Vista, you need to right click the "
+                         "exe and choose");
+            ViETest::Log("\"Run as administrator\"\n");
+            getchar();
+        }
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(20, DSCP);
+#ifdef _WIN32
+        EXPECT_FALSE(useSetSockOpt);
+#else // useSetSockOpt is true on Linux and Mac
+        EXPECT_TRUE(useSetSockOpt);
+#endif
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, false));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(0, DSCP);
+    }
+    {
+        // From qos.h. (*) -> supported by ViE
+        //
+        //  #define SERVICETYPE_NOTRAFFIC               0x00000000
+        //  #define SERVICETYPE_BESTEFFORT              0x00000001 (*)
+        //  #define SERVICETYPE_CONTROLLEDLOAD          0x00000002 (*)
+        //  #define SERVICETYPE_GUARANTEED              0x00000003 (*)
+        //  #define SERVICETYPE_NETWORK_UNAVAILABLE     0x00000004
+        //  #define SERVICETYPE_GENERAL_INFORMATION     0x00000005
+        //  #define SERVICETYPE_NOCHANGE                0x00000006
+        //  #define SERVICETYPE_NONCONFORMING           0x00000009
+        //  #define SERVICETYPE_NETWORK_CONTROL         0x0000000A
+        //  #define SERVICETYPE_QUALITATIVE             0x0000000D (*)
+        //
+        //  #define SERVICE_BESTEFFORT                  0x80010000
+        //  #define SERVICE_CONTROLLEDLOAD              0x80020000
+        //  #define SERVICE_GUARANTEED                  0x80040000
+        //  #define SERVICE_QUALITATIVE                 0x80200000
+
+        TbVideoChannel tbChannel(ViE);  // Create a video channel
+
+
+#if defined(_WIN32)
+        // No socket
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234));
+
+        // Sender not initialized
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 12345));
+
+        // Try to set all non-supported service types
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NETWORK_UNAVAILABLE));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_GENERAL_INFORMATION));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOCHANGE));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NONCONFORMING));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NETWORK_CONTROL));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_BESTEFFORT));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_CONTROLLEDLOAD));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_GUARANTEED));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_QUALITATIVE));
+
+        // Loop through valid service settings
+        bool enabled = false;
+        int serviceType = 0;
+        int overrideDSCP = 0;
+
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_FALSE(enabled);
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_BESTEFFORT, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_CONTROLLEDLOAD));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_CONTROLLEDLOAD, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_GUARANTEED));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_GUARANTEED, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_QUALITATIVE));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_QUALITATIVE, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, false, SERVICETYPE_QUALITATIVE));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_FALSE(enabled);
+#endif
+    }
+    {
+        //
+        // MTU and packet burst
+        //
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE);
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetMTU(tbChannel.videoChannel, 1600));
+        // Valid input
+        EXPECT_EQ(0, ViE.network->SetMTU(tbChannel.videoChannel, 800));
+
+        //
+        // Observer and timeout
+        //
+        ViEAutoTestNetworkObserver vieTestObserver;
+        EXPECT_EQ(0, ViE.network->RegisterObserver(
+            tbChannel.videoChannel, vieTestObserver));
+        EXPECT_NE(0, ViE.network->RegisterObserver(
+            tbChannel.videoChannel, vieTestObserver));
+        EXPECT_EQ(0, ViE.network->SetPeriodicDeadOrAliveStatus(
+            tbChannel.videoChannel, true)); // No observer
+        EXPECT_EQ(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
+
+        EXPECT_NE(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->SetPeriodicDeadOrAliveStatus(
+            tbChannel.videoChannel, true)); // No observer
+
+        // Packet timout notification
+        EXPECT_EQ(0, ViE.network->SetPacketTimeoutNotification(
+            tbChannel.videoChannel, true, 10));
+    }
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_render.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_render.cc
new file mode 100644
index 0000000..2a73a6c
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_render.cc
@@ -0,0 +1,296 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_render.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "video_render.h"
+
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tb_capture_device.h"
+
+#if defined(WIN32)
+#include <windows.h>
+#include <ddraw.h>
+#include <tchar.h>
+#elif defined(WEBRTC_LINUX)
+    //From windgi.h
+    #undef RGB
+    #define RGB(r,g,b)          ((unsigned long)(((unsigned char)(r)|((unsigned short)((unsigned char)(g))<<8))|(((unsigned long)(unsigned char)(b))<<16)))
+    //From ddraw.h
+/*    typedef struct _DDCOLORKEY
+ {
+ DWORD       dwColorSpaceLowValue;   // low boundary of color space that is to
+ DWORD       dwColorSpaceHighValue;  // high boundary of color space that is
+ } DDCOLORKEY;*/
+#elif defined(WEBRTC_MAC)
+#endif
+
+class ViEAutoTestExternalRenderer: public webrtc::ExternalRenderer
+{
+public:
+    ViEAutoTestExternalRenderer() :
+        _width(0),
+        _height(0)
+    {
+    }
+    virtual int FrameSizeChange(unsigned int width, unsigned int height,
+                                unsigned int numberOfStreams)
+    {
+        _width = width;
+        _height = height;
+        return 0;
+    }
+
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
+                             uint32_t time_stamp,
+                             int64_t render_time)
+    {
+        if (bufferSize != _width * _height * 3 / 2)
+        {
+            ViETest::Log("incorrect render buffer received, of length = %d\n",
+                         bufferSize);
+            return 0;
+        }
+        ViETest::Log("callback DeliverFrame is good\n");
+        return 0;
+    }
+
+public:
+    virtual ~ViEAutoTestExternalRenderer()
+    {
+    }
+private:
+    int _width, _height;
+};
+
+void ViEAutoTest::ViERenderStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    int rtpPort = 6000;
+
+    TbInterfaces ViE("ViERenderStandardTest");
+
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE); // Create a capture device
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    ViETest::Log("\nCapture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    // PIP and full screen rendering is not supported on Android
+#ifndef WEBRTC_ANDROID
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window2, 0, 0.75, 0.75, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+
+    ViETest::Log("\nCapture device is now rendered in Window 2, PiP.");
+    ViETest::Log("Switching to full screen rendering in %d seconds.\n",
+                 KAutoTestSleepTimeMs / 1000);
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
+
+    // Destroy render module and create new in full screen mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4563, _window1, true, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.75f, 0.75f, 1.0f, 1.0f));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy full screen render module and create new in normal mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4561, _window1, false, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+#endif
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+    tbCapture.Disconnect(tbChannel.videoChannel);
+}
+
+void ViEAutoTest::ViERenderExtendedTest()
+{
+    int rtpPort = 6000;
+
+    TbInterfaces ViE("ViERenderExtendedTest");
+
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE); // Create a capture device
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    ViETest::Log("\nCapture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+#ifdef _WIN32
+    ViETest::Log("\nConfiguring Window2");
+    ViETest::Log("you will see video only in first quadrant");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.0f, 0.0f, 0.5f, 0.5f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("you will see video only in fourth quadrant");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.5f, 0.5f, 1.0f, 1.0f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("normal video on Window2");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.0f, 0.0f, 1.0f, 1.0f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+#endif
+
+    ViETest::Log("Mirroring Local Preview (Window1) Left-Right");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, true, false, true));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nMirroring Local Preview (Window1) Left-Right and Up-Down");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, true, true, true));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nMirroring Remote Window(Window2) Up-Down");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbChannel.videoChannel, true, true, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("Disabling Mirroing on Window1 and Window2");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, false, false, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbChannel.videoChannel, false, false, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nEnabling Full Screen render in 5 sec");
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
+
+    // Destroy render module and create new in full screen mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4563, _window1, true, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0f, 0.0f, 1.0f, 1.0f));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nStop renderer");
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    ViETest::Log("\nRemove renderer");
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy full screen render module and create new for external rendering
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(4564, NULL, false,
+                                                   _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+
+    ViETest::Log("\nExternal Render Test");
+    ViEAutoTestExternalRenderer externalRenderObj;
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, webrtc::kVideoI420, &externalRenderObj));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy render module for external rendering and create new in normal
+    // mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4561, _window1, false, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+    tbCapture.Disconnect(tbChannel.videoChannel);
+}
+
+void ViEAutoTest::ViERenderAPITest()
+{
+    // TODO(unknown): add the real tests cases
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
new file mode 100644
index 0000000..4ea8383
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
@@ -0,0 +1,682 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_rtp_rtcp.cc
+//
+#include <iostream>
+
+#include "engine_configurations.h"
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "testsupport/fileutils.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+
+class ViERtpObserver: public webrtc::ViERTPObserver
+{
+public:
+    ViERtpObserver()
+    {
+    }
+    virtual ~ViERtpObserver()
+    {
+    }
+
+    virtual void IncomingSSRCChanged(const int videoChannel,
+                                     const unsigned int SSRC)
+    {
+    }
+    virtual void IncomingCSRCChanged(const int videoChannel,
+                                     const unsigned int CSRC, const bool added)
+    {
+    }
+};
+
+class ViERtcpObserver: public webrtc::ViERTCPObserver
+{
+public:
+    int _channel;
+    unsigned char _subType;
+    unsigned int _name;
+    char* _data;
+    unsigned short _dataLength;
+
+    ViERtcpObserver() :
+        _channel(-1),
+        _subType(0),
+        _name(-1),
+        _data(NULL),
+        _dataLength(0)
+    {
+    }
+    ~ViERtcpObserver()
+    {
+        if (_data)
+        {
+            delete[] _data;
+        }
+    }
+    virtual void OnApplicationDataReceived(
+        const int videoChannel, const unsigned char subType,
+        const unsigned int name, const char* data,
+        const unsigned short dataLengthInBytes)
+    {
+        _channel = videoChannel;
+        _subType = subType;
+        _name = name;
+        if (dataLengthInBytes > _dataLength)
+        {
+            delete[] _data;
+            _data = NULL;
+        }
+        if (_data == NULL)
+        {
+            _data = new char[dataLengthInBytes];
+        }
+        memcpy(_data, data, dataLengthInBytes);
+        _dataLength = dataLengthInBytes;
+    }
+};
+
+void ViEAutoTest::ViERtpRtcpStandardTest()
+{
+    // ***************************************************************
+    // Begin create/initialize WebRTC Video Engine for testing
+    // ***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpStandardTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    ViETest::Log("\n");
+    TbExternalTransport myTransport(*(ViE.network));
+
+    EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+        tbChannel.videoChannel, myTransport));
+
+    // ***************************************************************
+    // Engine ready. Begin testing class
+    // ***************************************************************
+
+    unsigned short startSequenceNumber = 12345;
+    ViETest::Log("Set start sequence number: %u", startSequenceNumber);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, startSequenceNumber));
+
+    myTransport.EnableSequenceNumberCheck();
+
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(2000);
+
+    unsigned short receivedSequenceNumber =
+        myTransport.GetFirstSequenceNumber();
+    ViETest::Log("First received sequence number: %u\n",
+                 receivedSequenceNumber);
+    EXPECT_EQ(startSequenceNumber, receivedSequenceNumber);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    //
+    // RTCP CName
+    //
+    ViETest::Log("Testing CName\n");
+    const char* sendCName = "ViEAutoTestCName\0";
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(tbChannel.videoChannel, sendCName));
+
+    char returnCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
+    memset(returnCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
+        tbChannel.videoChannel, returnCName));
+    EXPECT_STRCASEEQ(sendCName, returnCName);
+
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(1000);
+
+    char remoteCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
+    memset(remoteCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteRTCPCName(
+        tbChannel.videoChannel, remoteCName));
+    EXPECT_STRCASEEQ(sendCName, remoteCName);
+
+    //
+    //  Statistics
+    //
+    // Stop and restart to clear stats
+    ViETest::Log("Testing statistics\n");
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    myTransport.ClearStats();
+    int rate = 20;
+    myTransport.SetPacketLoss(rate);
+
+    // Start send to verify sending stats
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, startSequenceNumber));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    unsigned short sentFractionsLost = 0;
+    unsigned int sentCumulativeLost = 0;
+    unsigned int sentExtendedMax = 0;
+    unsigned int sentJitter = 0;
+    int sentRttMs = 0;
+    unsigned short recFractionsLost = 0;
+    unsigned int recCumulativeLost = 0;
+    unsigned int recExtendedMax = 0;
+    unsigned int recJitter = 0;
+    int recRttMs = 0;
+
+    unsigned int sentTotalBitrate = 0;
+    unsigned int sentVideoBitrate = 0;
+    unsigned int sentFecBitrate = 0;
+    unsigned int sentNackBitrate = 0;
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+        sentFecBitrate, sentNackBitrate));
+
+    EXPECT_GT(sentTotalBitrate, 0u);
+    EXPECT_EQ(sentFecBitrate, 0u);
+    EXPECT_EQ(sentNackBitrate, 0u);
+
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+
+    AutoTestSleep(2000);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetSentRTCPStatistics(
+        tbChannel.videoChannel, sentFractionsLost, sentCumulativeLost,
+        sentExtendedMax, sentJitter, sentRttMs));
+    EXPECT_GT(sentCumulativeLost, 0u);
+    EXPECT_GT(sentExtendedMax, startSequenceNumber);
+    EXPECT_GT(sentJitter, 0u);
+    EXPECT_GT(sentRttMs, 0);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetReceivedRTCPStatistics(
+        tbChannel.videoChannel, recFractionsLost, recCumulativeLost,
+        recExtendedMax, recJitter, recRttMs));
+
+    EXPECT_GT(recCumulativeLost, 0u);
+    EXPECT_GT(recExtendedMax, startSequenceNumber);
+    EXPECT_GT(recJitter, 0u);
+    EXPECT_GT(recRttMs, 0);
+
+    unsigned int estimated_bandwidth = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedSendBandwidth(
+        tbChannel.videoChannel,
+        &estimated_bandwidth));
+    EXPECT_GT(estimated_bandwidth, 0u);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(
+        tbChannel.videoChannel,
+        &estimated_bandwidth));
+    EXPECT_GT(estimated_bandwidth, 0u);
+
+    // Check that rec stats extended max is greater than what we've sent.
+    EXPECT_GE(recExtendedMax, sentExtendedMax);
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    //
+    // Test bandwidth statistics with NACK and FEC separately
+    //
+
+    myTransport.ClearStats();
+    myTransport.SetPacketLoss(rate);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
+        tbChannel.videoChannel, true, 96, 97));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+         sentFecBitrate, sentNackBitrate));
+
+    EXPECT_GT(sentTotalBitrate, 0u);
+    EXPECT_GE(sentFecBitrate, 10u);
+    EXPECT_EQ(sentNackBitrate, 0u);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
+        tbChannel.videoChannel, false, 96, 97));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+        sentFecBitrate, sentNackBitrate));
+
+    // TODO(holmer): Write a non-flaky verification of this API.
+    // numberOfErrors += ViETest::TestError(sentTotalBitrate > 0 &&
+    //                                      sentFecBitrate == 0 &&
+    //                                      sentNackBitrate > 0,
+    //                                      "ERROR: %s at line %d",
+    //                                      __FUNCTION__, __LINE__);
+
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, false));
+
+    //
+    // Keepalive
+    //
+    ViETest::Log("Testing RTP keep alive...\n");
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+
+    myTransport.SetPacketLoss(0);
+    myTransport.ClearStats();
+
+    const char keepAlivePT = 109;
+    unsigned int deltaTimeSeconds = 2;
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+        tbChannel.videoChannel, true, keepAlivePT, deltaTimeSeconds));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+        tbChannel.videoChannel, false, keepAlivePT, deltaTimeSeconds));
+
+    WebRtc_Word32 numRtpPackets = 0;
+    WebRtc_Word32 numDroppedPackets = 0;
+    WebRtc_Word32 numRtcpPackets = 0;
+    myTransport.GetStats(numRtpPackets, numDroppedPackets, numRtcpPackets);
+    WebRtc_Word32 expectedPackets = KAutoTestSleepTimeMs / (1000 *
+        static_cast<WebRtc_Word32>(deltaTimeSeconds));
+    EXPECT_EQ(expectedPackets, numRtpPackets);
+
+    // Test to set SSRC
+    unsigned int setSSRC = 0x01234567;
+    ViETest::Log("Set SSRC %u", setSSRC);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, setSSRC));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    myTransport.EnableSSRCCheck();
+
+    AutoTestSleep(2000);
+    unsigned int receivedSSRC = myTransport.ReceivedSSRC();
+    ViETest::Log("Received SSRC %u\n", receivedSSRC);
+    EXPECT_EQ(setSSRC, receivedSSRC);
+
+    unsigned int localSSRC = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, localSSRC));
+    EXPECT_EQ(setSSRC, localSSRC);
+
+    unsigned int remoteSSRC = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteSSRC(
+        tbChannel.videoChannel, remoteSSRC));
+    EXPECT_EQ(setSSRC, remoteSSRC);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    ViETest::Log("Testing RTP dump...\n");
+
+    std::string inDumpName =
+        ViETest::GetResultOutputPath() + "IncomingRTPDump.rtp";
+    std::string outDumpName =
+        ViETest::GetResultOutputPath() + "OutgoingRTPDump.rtp";
+    EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+        tbChannel.videoChannel, inDumpName.c_str(), webrtc::kRtpIncoming));
+    EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+        tbChannel.videoChannel, outDumpName.c_str(), webrtc::kRtpOutgoing));
+
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    AutoTestSleep(1000);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+        tbChannel.videoChannel, webrtc::kRtpIncoming));
+    EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+        tbChannel.videoChannel, webrtc::kRtpOutgoing));
+
+    // Make sure data was actually saved to the file and we stored the same
+    // amount of data in both files
+    FILE* inDump = fopen(inDumpName.c_str(), "r");
+    fseek(inDump, 0L, SEEK_END);
+    long inEndPos = ftell(inDump);
+    fclose(inDump);
+    FILE* outDump = fopen(outDumpName.c_str(), "r");
+    fseek(outDump, 0L, SEEK_END);
+    long outEndPos = ftell(outDump);
+    fclose(outDump);
+
+    EXPECT_GT(inEndPos, 0);
+    EXPECT_LT(inEndPos, outEndPos + 100);
+
+    // Deregister external transport
+    EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViERtpRtcpExtendedTest()
+{
+    //***************************************************************
+    //  Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    ViERtpRtcpStandardTest();
+
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpExtendedTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    //tbChannel.StartReceive(rtpPort);
+    //tbChannel.StartSend(rtpPort);
+    TbExternalTransport myTransport(*(ViE.network));
+
+    EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+        tbChannel.videoChannel, myTransport));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    //***************************************************************
+    //  Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // Application specific RTCP
+    //
+    //
+
+    ViERtcpObserver rtcpObserver;
+    EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+        tbChannel.videoChannel, rtcpObserver));
+
+    unsigned char subType = 3;
+    unsigned int name = static_cast<unsigned int> (0x41424344); // 'ABCD';
+    const char* data = "ViEAutoTest Data of length 32 -\0";
+    const unsigned short numBytes = 32;
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+        tbChannel.videoChannel, subType, name, data, numBytes));
+
+    ViETest::Log("Sending RTCP application data...\n");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(subType, rtcpObserver._subType);
+    EXPECT_STRCASEEQ(data, rtcpObserver._data);
+    EXPECT_EQ(name, rtcpObserver._name);
+    EXPECT_EQ(numBytes, rtcpObserver._dataLength);
+
+    ViETest::Log("\t RTCP application data received\n");
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));
+}
+
+void ViEAutoTest::ViERtpRtcpAPITest()
+{
+    //***************************************************************
+    //  Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    //***************************************************************
+    //  Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // Check different RTCP modes
+    //
+    webrtc::ViERTCPMode rtcpMode = webrtc::kRtcpNone;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpNonCompound_RFC5506));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpNonCompound_RFC5506, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpNone));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpNone, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+
+    //
+    // CName is testedn in SimpleTest
+    // Start sequence number is tested in SimplTEst
+    //
+    const char* testCName = "ViEAutotestCName";
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(
+        tbChannel.videoChannel, testCName));
+
+    char returnCName[256];
+    memset(returnCName, 0, 256);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
+        tbChannel.videoChannel, returnCName));
+    EXPECT_STRCASEEQ(testCName, returnCName);
+
+    //
+    // SSRC
+    //
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
+        tbChannel.videoChannel, 0x01234567));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
+        tbChannel.videoChannel, 0x76543210));
+
+    unsigned int ssrc = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, ssrc));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 1000));
+    tbChannel.StartSend();
+    EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    tbChannel.StopSend();
+
+    //
+    // Start sequence number
+    //
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 1000));
+    tbChannel.StartSend();
+    EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    tbChannel.StopSend();
+
+    //
+    // Application specific RTCP
+    //
+    {
+        unsigned char subType = 3;
+        unsigned int name = static_cast<unsigned int> (0x41424344); // 'ABCD';
+        const char* data = "ViEAutoTest Data of length 32 --";
+        const unsigned short numBytes = 32;
+
+        tbChannel.StartSend();
+        EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, NULL, numBytes)) <<
+                "Should fail on NULL input.";
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes - 1)) <<
+                "Should fail on incorrect length.";
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+            tbChannel.videoChannel, rtcpMode));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+            tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+        tbChannel.StopSend();
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+    }
+
+    //
+    // Statistics
+    //
+    // Tested in SimpleTest(), we'll get errors if we haven't received a RTCP
+    // packet.
+
+    //
+    // RTP Keepalive
+    //
+    {
+        char setPT = 123;
+        unsigned int setDeltaTime = 10;
+        bool enabled = false;
+        char getPT = 0;
+        unsigned int getDeltaTime = 0;
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, true, 119));
+        EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, true, setPT, setDeltaTime));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, false, setPT, setDeltaTime));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, true, setPT, setDeltaTime));
+        EXPECT_EQ(0, ViE.rtp_rtcp->GetRTPKeepAliveStatus(
+            tbChannel.videoChannel, enabled, getPT, getDeltaTime));
+
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(setPT, getPT);
+        EXPECT_EQ(setDeltaTime, getDeltaTime);
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+                    tbChannel.videoChannel, false, setPT, setDeltaTime));
+
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+                    tbChannel.videoChannel, true, setPT, setDeltaTime));
+
+        EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, true, setPT, setDeltaTime));
+
+        tbChannel.StopSend();
+        EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, enabled, getPT, 0));
+        EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
+            tbChannel.videoChannel, enabled, getPT, 61));
+    }
+    //
+    // RTP Dump
+    //
+    {
+#ifdef WEBRTC_ANDROID
+        const char* dumpName = "/sdcard/DumpFileName.rtp";
+#else
+        std::string output_file = webrtc::test::OutputPath() +
+            "DumpFileName.rtp";
+        const char* dumpName = output_file.c_str();
+#endif
+        EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, webrtc::kRtpIncoming));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpIncoming));
+        EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpIncoming));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, webrtc::kRtpOutgoing));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpOutgoing));
+        EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpOutgoing));
+        EXPECT_NE(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, (webrtc::RTPDirections) 3));
+    }
+    //
+    // RTP/RTCP Observers
+    //
+    {
+        ViERtpObserver rtpObserver;
+        EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTPObserver(
+            tbChannel.videoChannel, rtpObserver));
+        EXPECT_NE(0, ViE.rtp_rtcp->RegisterRTPObserver(
+            tbChannel.videoChannel, rtpObserver));
+        EXPECT_EQ(0, ViE.rtp_rtcp->DeregisterRTPObserver(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.rtp_rtcp->DeregisterRTPObserver(
+            tbChannel.videoChannel));
+
+        ViERtcpObserver rtcpObserver;
+        EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+            tbChannel.videoChannel, rtcpObserver));
+        EXPECT_NE(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+            tbChannel.videoChannel, rtcpObserver));
+        EXPECT_EQ(0, ViE.rtp_rtcp->DeregisterRTCPObserver(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.rtp_rtcp->DeregisterRTCPObserver(
+            tbChannel.videoChannel));
+    }
+    //
+    // PLI
+    //
+    {
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
+    }
+    //
+    // NACK
+    //
+    {
+      EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
+    }
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc
new file mode 100644
index 0000000..6d77115
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc
@@ -0,0 +1,531 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <iostream>
+
+#include "common_types.h"
+#include "tb_external_transport.h"
+#include "voe_base.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+
+int VideoEngineSimulcastTest(void* window1, void* window2)
+{
+    //********************************************************
+    //  Begin create/initialize Video Engine for testing
+    //********************************************************
+
+    int error = 0;
+
+    //
+    // Create a VideoEngine instance
+    //
+    webrtc::VideoEngine* ptrViE = NULL;
+    ptrViE = webrtc::VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+
+    error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViESimulcast_trace.txt";
+    error = ptrViE->SetTraceFile(trace_file.c_str());
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+
+    //
+    // Init VideoEngine and create a channel
+    //
+    webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+
+    //
+    // List available capture devices, allocate and connect.
+    //
+    webrtc::ViECapture* ptrViECapture =
+        webrtc::ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    printf("Available capture devices:\n");
+    int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+
+        error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                                KMaxDeviceNameLength, uniqueId,
+                                                KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        printf("\t %d. %s\n", captureIdx + 1, deviceName);
+    }
+    printf("\nChoose capture device: ");
+#ifdef WEBRTC_ANDROID
+    captureIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &captureIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    captureIdx = captureIdx - 1; // Compensate for idx start at 1.
+#endif
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
+                                                 captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+
+    //
+    // RTP/RTCP settings
+    //
+    webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+        webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                         webrtc::kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
+        videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+
+    //
+    // Set up rendering
+    //
+    webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+    if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+
+    error
+        = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
+                                      1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    //
+    // Setup codecs
+    //
+    webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }
+
+    // Check available codecs and prepare receive codecs
+    printf("\nAvailable codecs:\n");
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    int codecIdx = 0;
+    for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+        // try to keep the test frame size small when I420
+        if (videoCodec.codecType != webrtc::kVideoCodecVP8)
+        {
+            continue;
+        }
+        error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != webrtc::kVideoCodecRED
+            && videoCodec.codecType != webrtc::kVideoCodecULPFEC)
+        {
+            printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
+        }
+        break;
+    }
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+
+    // Set spatial resolution option
+    videoCodec.width = 1280;
+    videoCodec.height = 720;
+
+    // simulcast settings
+    videoCodec.numberOfSimulcastStreams = 3;
+    videoCodec.simulcastStream[0].width = 320;
+    videoCodec.simulcastStream[0].height = 180;
+    videoCodec.simulcastStream[0].numberOfTemporalLayers = 0;
+    videoCodec.simulcastStream[0].maxBitrate = 100;
+    videoCodec.simulcastStream[0].qpMax = videoCodec.qpMax;
+
+    videoCodec.simulcastStream[1].width = 640;
+    videoCodec.simulcastStream[1].height = 360;
+    videoCodec.simulcastStream[1].numberOfTemporalLayers = 0;
+    videoCodec.simulcastStream[1].maxBitrate = 500;
+    videoCodec.simulcastStream[1].qpMax = videoCodec.qpMax;
+
+    videoCodec.simulcastStream[2].width = 1280;
+    videoCodec.simulcastStream[2].height = 720;
+    videoCodec.simulcastStream[2].numberOfTemporalLayers = 0;
+    videoCodec.simulcastStream[2].maxBitrate = 1200;
+    videoCodec.simulcastStream[2].qpMax = videoCodec.qpMax;
+
+    // Set start bit rate
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Choose start rate (in kbps). Press enter for default:  ";
+    std::getline(std::cin, str);
+    int startRate = atoi(str.c_str());
+    if(startRate != 0)
+    {
+        videoCodec.startBitrate=startRate;
+    }
+
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+    //
+    // Address settings
+    //
+    webrtc::ViENetwork* ptrViENetwork =
+        webrtc::ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+
+    // Setting External transport
+    TbExternalTransport extTransport(*(ptrViENetwork));
+
+    error = ptrViENetwork->RegisterSendTransport(videoChannel,
+                                                 extTransport);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::RegisterSendTransport \n");
+        return -1;
+    }
+
+    extTransport.SetPacketLoss(0);
+
+    // Set network delay value
+    extTransport.SetNetworkDelay(10);
+
+    extTransport.SetSSRCFilter(3);
+
+    for (int idx = 0; idx < 3; idx++)
+    {
+        error = ptrViERtpRtcp->SetLocalSSRC(videoChannel,
+                                            idx+1, // SSRC
+                                            webrtc::kViEStreamTypeNormal,
+                                            idx);
+        if (error == -1)
+        {
+            printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n", idx);
+            return -1;
+        }
+    }
+
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+
+    //********************************************************
+    //  Engine started
+    //********************************************************
+
+    printf("\nSimulcast call started\n\n");
+    do
+    {
+        printf("Enter new SSRC filter 1,2 or 3\n");
+        printf("Press enter to stop...");
+        str.clear();
+        std::getline(std::cin, str);
+        if (!str.empty())
+        {
+            int ssrc = atoi(str.c_str());
+            if (ssrc > 0 && ssrc < 4)
+            {
+                extTransport.SetSSRCFilter(ssrc);
+            } else
+            {
+                printf("Invalid SSRC\n");
+            }
+        } else
+        {
+            break;
+        }
+    } while (true);
+
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1;
+    }
+
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+
+    bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+    return 0;
+
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+int ViEAutoTest::ViESimulcastCall()
+{
+    ViETest::Log(" ");
+    ViETest::Log("========================================");
+    ViETest::Log(" ViE Autotest Simulcast Call\n");
+
+    if (VideoEngineSimulcastTest(_window1, _window2) == 0)
+    {
+        ViETest::Log(" ");
+        ViETest::Log(" ViE Autotest Simulcast Call Done");
+        ViETest::Log("========================================");
+        ViETest::Log(" ");
+
+        return 0;
+    }
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Simulcast Call Failed");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return 1;
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_autotest_win.cc b/trunk/src/video_engine/test/auto_test/source/vie_autotest_win.cc
new file mode 100755
index 0000000..7887767
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_autotest_win.cc
@@ -0,0 +1,214 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_windows.cc
+//
+
+#include "vie_autotest_windows.h"
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest_main.h"
+
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "thread_wrapper.h"
+
+#include <windows.h>
+
+#ifdef _DEBUG
+//#include "vld.h"
+#endif
+
+// Disable Visual studio warnings
+// 'this' : used in base member initializer list
+#pragma warning(disable: 4355)
+// new behavior: elements of array 'XXX' will be default initialized
+#pragma warning(disable: 4351)
+
+LRESULT CALLBACK ViEAutoTestWinProc(HWND hWnd, UINT uMsg, WPARAM wParam,
+                                    LPARAM lParam) {
+  switch (uMsg) {
+    case WM_DESTROY:
+      PostQuitMessage( WM_QUIT);
+      break;
+    case WM_COMMAND:
+      break;
+  }
+  return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager()
+    : _window1(NULL),
+      _window2(NULL),
+      _terminate(false),
+      _eventThread(*webrtc::ThreadWrapper::CreateThread(
+          EventProcess, this, webrtc::kNormalPriority,
+          "ViEAutotestEventThread")),
+      _crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+      _hwnd1(NULL),
+      _hwnd2(NULL),
+      _hwnd1Size(),
+      _hwnd2Size(),
+      _hwnd1Title(),
+      _hwnd2Title() {
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
+  if (_hwnd1) {
+    ViEDestroyWindow(_hwnd1);
+  }
+  if (_hwnd2) {
+    ViEDestroyWindow(_hwnd1);
+  }
+  delete &_crit;
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+  return _window1;
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+  return _window2;
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1Title,
+                                            void* window2Title) {
+  _hwnd1Size.Copy(window1Size);
+  _hwnd2Size.Copy(window2Size);
+  memcpy(_hwnd1Title, window1Title, TITLE_LENGTH);
+  memcpy(_hwnd2Title, window2Title, TITLE_LENGTH);
+
+  unsigned int tId = 0;
+  _eventThread.Start(tId);
+
+  do {
+    _crit.Enter();
+    if (_window1 != NULL) {
+      break;
+    }
+    _crit.Leave();
+    AutoTestSleep(10);
+  } while (true);
+  _crit.Leave();
+  return 0;
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+  _eventThread.SetNotAlive();
+
+  _terminate = true;
+  if (_eventThread.Stop()) {
+    _crit.Enter();
+    delete &_eventThread;
+    _crit.Leave();
+  }
+
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::EventProcess(void* obj) {
+  return static_cast<ViEAutoTestWindowManager*> (obj)->EventLoop();
+}
+
+bool ViEAutoTestWindowManager::EventLoop() {
+  _crit.Enter();
+
+  ViECreateWindow(_hwnd1, _hwnd1Size.origin.x, _hwnd1Size.origin.y,
+                  _hwnd1Size.size.width, _hwnd1Size.size.height, _hwnd1Title);
+  ViECreateWindow(_hwnd2, _hwnd2Size.origin.x, _hwnd2Size.origin.y,
+                  _hwnd2Size.size.width, _hwnd2Size.size.height, _hwnd2Title);
+
+  _window1 = (void*) _hwnd1;
+  _window2 = (void*) _hwnd2;
+  MSG msg;
+  while (!_terminate) {
+    if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+      TranslateMessage(&msg);
+      DispatchMessage(&msg);
+    }
+    _crit.Leave();
+    AutoTestSleep(10);
+    _crit.Enter();
+  }
+  ViEDestroyWindow(_hwnd1);
+  ViEDestroyWindow(_hwnd2);
+  _crit.Leave();
+
+  return false;
+}
+
+int ViEAutoTestWindowManager::ViECreateWindow(HWND &hwndMain, int xPos,
+                                              int yPos, int width, int height,
+                                              TCHAR* className) {
+  HINSTANCE hinst = GetModuleHandle(0);
+  WNDCLASSEX wcx;
+  wcx.hInstance = hinst;
+  wcx.lpszClassName = className;
+  wcx.lpfnWndProc = (WNDPROC) ViEAutoTestWinProc;
+  wcx.style = CS_DBLCLKS;
+  wcx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
+  wcx.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
+  wcx.hCursor = LoadCursor(NULL, IDC_ARROW);
+  wcx.lpszMenuName = NULL;
+  wcx.cbSize = sizeof(WNDCLASSEX);
+  wcx.cbClsExtra = 0;
+  wcx.cbWndExtra = 0;
+  wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+
+  RegisterClassEx(&wcx);
+
+  // Create the main window.
+  hwndMain = CreateWindowEx(0,          // no extended styles
+                            className,  // class name
+                            className,  // window name
+                            WS_OVERLAPPED | WS_THICKFRAME,  // overlapped window
+                            xPos,    // horizontal position
+                            yPos,    // vertical position
+                            width,   // width
+                            height,  // height
+                            (HWND) NULL,   // no parent or owner window
+                            (HMENU) NULL,  // class menu used
+                            hinst,  // instance handle
+                            NULL);  // no window creation data
+
+  if (!hwndMain) {
+    int error = GetLastError();
+    return -1;
+  }
+
+  // Show the window using the flag specified by the program
+  // that started the application, and send the application
+  // a WM_PAINT message.
+  ShowWindow(hwndMain, SW_SHOWDEFAULT);
+  UpdateWindow(hwndMain);
+
+  ::SetWindowPos(hwndMain, HWND_TOP, xPos, yPos, width, height,
+                 SWP_FRAMECHANGED);
+
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViEDestroyWindow(HWND& hwnd) {
+  ::DestroyWindow(hwnd);
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+  // Meant to put terminal window on top
+  return true;
+}
+
+int main(int argc, char* argv[]) {
+  ViEAutoTestMain auto_test;
+  return auto_test.RunTests(argc, argv);
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc b/trunk/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
new file mode 100644
index 0000000..aff5118
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
@@ -0,0 +1,160 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
+
+#include "video_engine/test/auto_test/interface/tb_interfaces.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/helpers/vie_fake_camera.h"
+#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
+#include "video_engine/test/auto_test/primitives/base_primitives.h"
+#include "video_engine/test/auto_test/primitives/codec_primitives.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+
+bool ViEFileBasedComparisonTests::TestCallSetup(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer) {
+
+  TbInterfaces interfaces("TestCallSetup");
+
+  int video_channel = -1;
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file,
+                                          width,
+                                          height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return false;
+  }
+  int capture_id = fake_camera.capture_id();
+
+  // Apparently, we need to connect external capture devices, but we should
+  // not start them since the external device is not a proper device.
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  webrtc::ViERender *render_interface = interfaces.render;
+
+  RenderToFile(render_interface, capture_id, local_file_renderer);
+  RenderToFile(render_interface, video_channel, remote_file_renderer);
+
+  // Run the test itself:
+  const WebRtc_UWord8* device_name =
+      reinterpret_cast<const WebRtc_UWord8*>("Fake Capture Device");
+
+  ::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
+                      interfaces.base, interfaces.network, video_channel,
+                      device_name);
+
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  EXPECT_EQ(0, interfaces.base->StopReceive(video_channel));
+
+  StopAndRemoveRenderers(interfaces.base, render_interface, video_channel,
+                         capture_id);
+
+  interfaces.capture->DisconnectCaptureDevice(video_channel);
+
+  // Stop sending data, clean up the camera thread and release the capture
+  // device. Note that this all happens after StopEverything, so this
+  // tests that the system doesn't mind that the external capture device sends
+  // data after rendering has been stopped.
+  fake_camera.StopCamera();
+
+  EXPECT_EQ(0, interfaces.base->DeleteChannel(video_channel));
+  return true;
+}
+
+bool ViEFileBasedComparisonTests::TestCodecs(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer) {
+
+  TbInterfaces interfaces = TbInterfaces("TestCodecs");
+
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file, width, height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return false;
+  }
+
+  int video_channel = -1;
+  int capture_id = fake_camera.capture_id();
+
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  RenderToFile(interfaces.render, capture_id, local_file_renderer);
+  RenderToFile(interfaces.render, video_channel, remote_file_renderer);
+
+  // Force the codec resolution to what our input video is so we can make
+  // comparisons later. Our comparison algorithms wouldn't like scaling.
+  ::TestCodecs(interfaces, capture_id, video_channel, width, height);
+
+  fake_camera.StopCamera();
+  return true;
+}
+
+void ViEFileBasedComparisonTests::TestFullStack(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    int bit_rate_kbps,
+    int packet_loss_percent,
+    int network_delay_ms,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer,
+    FrameDropDetector* frame_drop_detector) {
+  TbInterfaces interfaces = TbInterfaces("TestFullStack");
+
+  // Setup camera capturing from file.
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file, width, height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return;
+  }
+  int video_channel = -1;
+  int capture_id = fake_camera.capture_id();
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  // Must set SSRC to avoid SSRC collision detection since we're sending and
+  // receiving from the same machine (that would cause frames being discarded
+  // and decoder reset).
+  EXPECT_EQ(0, interfaces.rtp_rtcp->SetLocalSSRC(video_channel, 12345));
+
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+  RenderToFile(interfaces.render, capture_id, local_file_renderer);
+  RenderToFile(interfaces.render, video_channel, remote_file_renderer);
+
+  ::TestFullStack(interfaces, capture_id, video_channel, width, height,
+                  bit_rate_kbps, packet_loss_percent, network_delay_ms,
+                  frame_drop_detector);
+  EXPECT_TRUE(fake_camera.StopCamera());
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc
new file mode 100644
index 0000000..1b16878
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_window_manager_factory.h"
+
+#include "vie_autotest_linux.h"
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm
new file mode 100644
index 0000000..806d10f
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm
@@ -0,0 +1,23 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_window_manager_factory.h"
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+#include "vie_autotest_mac_cocoa.h"
+#elif defined(CARBON_RENDERING)
+#include "vie_autotest_mac_carbon.h"
+#endif
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc
new file mode 100644
index 0000000..11114fd
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "vie_window_manager_factory.h"
+
+#include "vie_autotest_windows.h"
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/trunk/src/video_engine/test/auto_test/vie_auto_test.gypi b/trunk/src/video_engine/test/auto_test/vie_auto_test.gypi
new file mode 100644
index 0000000..cf403b7
--- /dev/null
+++ b/trunk/src/video_engine/test/auto_test/vie_auto_test.gypi
@@ -0,0 +1,151 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'vie_auto_test',
+      'type': 'executable',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/modules/modules.gyp:video_render_module',
+        '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+        '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/../third_party/google-gflags/google-gflags.gyp:google-gflags',
+        '<(webrtc_root)/../test/metrics.gyp:metrics',
+        '<(webrtc_root)/../test/test.gyp:test_support',
+        'video_engine_core',
+      ],
+      'include_dirs': [
+        'interface/',
+        'helpers/',
+        'primitives',
+        '../../include',
+        '../..',
+        '../../../modules/video_coding/codecs/interface',
+        '../../../common_video/interface',
+      ],
+      'sources': [
+        'interface/tb_capture_device.h',
+        'interface/tb_external_transport.h',
+        'interface/tb_I420_codec.h',
+        'interface/tb_interfaces.h',
+        'interface/tb_video_channel.h',
+        'interface/vie_autotest.h',
+        'interface/vie_autotest_defines.h',
+        'interface/vie_autotest_linux.h',
+        'interface/vie_autotest_mac_carbon.h',
+        'interface/vie_autotest_mac_cocoa.h',
+        'interface/vie_autotest_main.h',
+        'interface/vie_autotest_window_manager_interface.h',
+        'interface/vie_autotest_windows.h',
+        'interface/vie_file_based_comparison_tests.h',
+        'interface/vie_window_manager_factory.h',
+
+        # Helper classes
+        'helpers/bit_flip_encryption.cc',
+        'helpers/bit_flip_encryption.h',
+        'helpers/random_encryption.cc',
+        'helpers/random_encryption.h',
+        'helpers/vie_fake_camera.cc',
+        'helpers/vie_fake_camera.h',
+        'helpers/vie_file_capture_device.cc',
+        'helpers/vie_file_capture_device.h',
+        'helpers/vie_to_file_renderer.cc',
+        'helpers/vie_to_file_renderer.h',
+        'helpers/vie_window_creator.cc',
+        'helpers/vie_window_creator.h',
+
+        # New, fully automated tests
+        'automated/legacy_fixture.cc',
+        'automated/two_windows_fixture.cc',
+        'automated/vie_api_integration_test.cc',
+        'automated/vie_extended_integration_test.cc',
+        'automated/vie_rtp_fuzz_test.cc',
+        'automated/vie_standard_integration_test.cc',
+        'automated/vie_video_verification_test.cc',
+
+        # Test primitives
+        'primitives/base_primitives.cc',
+        'primitives/base_primitives.h',
+        'primitives/codec_primitives.cc',
+        'primitives/codec_primitives.h',
+        'primitives/framedrop_primitives.h',
+        'primitives/framedrop_primitives.cc',
+        'primitives/framedrop_primitives_unittest.cc',
+        'primitives/general_primitives.cc',
+        'primitives/general_primitives.h',
+
+        # Platform independent
+        'source/tb_capture_device.cc',
+        'source/tb_external_transport.cc',
+        'source/tb_I420_codec.cc',
+        'source/tb_interfaces.cc',
+        'source/tb_video_channel.cc',
+        'source/vie_autotest.cc',
+        'source/vie_autotest_base.cc',
+        'source/vie_autotest_capture.cc',
+        'source/vie_autotest_codec.cc',
+        'source/vie_autotest_encryption.cc',
+        'source/vie_autotest_file.cc',
+        'source/vie_autotest_image_process.cc',
+        'source/vie_autotest_loopback.cc',
+        'source/vie_autotest_main.cc',
+        'source/vie_autotest_network.cc',
+        'source/vie_autotest_render.cc',
+        'source/vie_autotest_rtp_rtcp.cc',
+        'source/vie_autotest_custom_call.cc',
+        'source/vie_autotest_simulcast.cc',
+        'source/vie_file_based_comparison_tests.cc',
+
+        # Platform dependent
+        # Linux
+        'source/vie_autotest_linux.cc',
+        'source/vie_window_manager_factory_linux.cc',
+        # Mac
+        'source/vie_autotest_cocoa_mac.mm',
+        'source/vie_autotest_carbon_mac.cc',
+        'source/vie_window_manager_factory_mac.mm',
+        # Windows
+        'source/vie_autotest_win.cc',
+        'source/vie_window_manager_factory_win.cc',
+      ],
+      'conditions': [
+        # TODO(andrew): this likely isn't an actual dependency. It should be
+        # included in webrtc.gyp or video_engine.gyp instead.
+        ['OS=="win"', {
+          'dependencies': [
+            'vie_win_test',
+          ],
+        }],
+        ['OS=="linux"', {
+          # TODO(andrew): these should be provided directly by the projects
+          #   # which require them instead.
+          'libraries': [
+            '-lXext',
+            '-lX11',
+          ],
+        }],
+        ['OS=="mac"', {
+          'xcode_settings': {
+            'OTHER_LDFLAGS': [
+              '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
+            ],
+          },
+        }],
+      ], # conditions
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/video_engine/video_engine.gyp b/trunk/src/video_engine/video_engine.gyp
new file mode 100644
index 0000000..261292c
--- /dev/null
+++ b/trunk/src/video_engine/video_engine.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    './video_engine_core.gypi',
+  ],
+
+  # Test targets, excluded when building with Chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'includes': [
+        'test/auto_test/vie_auto_test.gypi',
+        'main/test/WindowsTest/windowstest.gypi',
+      ],
+    }],
+  ],
+}
+
diff --git a/trunk/src/video_engine/video_engine_core.gypi b/trunk/src/video_engine/video_engine_core.gypi
new file mode 100644
index 0000000..38015af
--- /dev/null
+++ b/trunk/src/video_engine/video_engine_core.gypi
@@ -0,0 +1,160 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_engine_core',
+      'type': '<(library)',
+      'dependencies': [
+
+        # common_video
+       '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+       '<(webrtc_root)/common_video/common_video.gyp:webrtc_jpeg',
+
+        # ModulesShared
+        '<(webrtc_root)/modules/modules.gyp:media_file',
+        '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+        '<(webrtc_root)/modules/modules.gyp:udp_transport',
+        '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+
+        # ModulesVideo
+        '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+        '<(webrtc_root)/modules/modules.gyp:webrtc_video_coding',
+        '<(webrtc_root)/modules/modules.gyp:video_processing',
+        '<(webrtc_root)/modules/modules.gyp:video_render_module',
+
+        # VoiceEngine
+        '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+
+        # system_wrappers
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        'include',
+        '../common_video/interface',
+        '../modules/video_capture/main/interface',
+        '../modules/video_render/main/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        # interface
+        'include/vie_base.h',
+        'include/vie_capture.h',
+        'include/vie_codec.h',
+        'include/vie_encryption.h',
+        'include/vie_errors.h',
+        'include/vie_external_codec.h',
+        'include/vie_file.h',
+        'include/vie_image_process.h',
+        'include/vie_network.h',
+        'include/vie_render.h',
+        'include/vie_rtp_rtcp.h',
+
+        # headers
+        'vie_base_impl.h',
+        'vie_capture_impl.h',
+        'vie_codec_impl.h',
+        'vie_defines.h',
+        'vie_encryption_impl.h',
+        'vie_external_codec_impl.h',
+        'vie_file_impl.h',
+        'vie_image_process_impl.h',
+        'vie_impl.h',
+        'vie_network_impl.h',
+        'vie_ref_count.h',
+        'vie_remb.h',
+        'vie_render_impl.h',
+        'vie_rtp_rtcp_impl.h',
+        'vie_shared_data.h',
+        'vie_capturer.h',
+        'vie_channel.h',
+        'vie_channel_manager.h',
+        'vie_encoder.h',
+        'vie_file_image.h',
+        'vie_file_player.h',
+        'vie_file_recorder.h',
+        'vie_frame_provider_base.h',
+        'vie_input_manager.h',
+        'vie_manager_base.h',
+        'vie_performance_monitor.h',
+        'vie_receiver.h',
+        'vie_renderer.h',
+        'vie_render_manager.h',
+        'vie_sender.h',
+        'vie_sync_module.h',
+
+        # ViE
+        'vie_base_impl.cc',
+        'vie_capture_impl.cc',
+        'vie_codec_impl.cc',
+        'vie_encryption_impl.cc',
+        'vie_external_codec_impl.cc',
+        'vie_file_impl.cc',
+        'vie_image_process_impl.cc',
+        'vie_impl.cc',
+        'vie_network_impl.cc',
+        'vie_ref_count.cc',
+        'vie_render_impl.cc',
+        'vie_rtp_rtcp_impl.cc',
+        'vie_shared_data.cc',
+        'vie_capturer.cc',
+        'vie_channel.cc',
+        'vie_channel_manager.cc',
+        'vie_encoder.cc',
+        'vie_file_image.cc',
+        'vie_file_player.cc',
+        'vie_file_recorder.cc',
+        'vie_frame_provider_base.cc',
+        'vie_input_manager.cc',
+        'vie_manager_base.cc',
+        'vie_performance_monitor.cc',
+        'vie_receiver.cc',
+        'vie_remb.cc',
+        'vie_renderer.cc',
+        'vie_render_manager.cc',
+        'vie_sender.cc',
+        'vie_sync_module.cc',
+      ], # source
+    },
+  ], # targets
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'video_engine_core_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'video_engine_core',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/../testing/gmock.gyp:gmock',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+          ],
+          'include_dirs': [
+            '..',
+            '../modules/interface',
+            '../modules/rtp_rtcp/interface',
+          ],
+          'sources': [
+            'vie_remb_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/video_engine/vie_base_impl.cc b/trunk/src/video_engine/vie_base_impl.cc
new file mode 100644
index 0000000..4f0eae2
--- /dev/null
+++ b/trunk/src/video_engine/vie_base_impl.cc
@@ -0,0 +1,494 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "video_coding.h"
+#include "video_processing.h"
+#include "video_render.h"
+#include "vie_base_impl.h"
+#include "vie_channel.h"
+#include "vie_channel_manager.h"
+#include "vie_defines.h"
+#include "vie_encoder.h"
+#include "vie_errors.h"
+#include "vie_impl.h"
+#include "vie_input_manager.h"
+#include "vie_performance_monitor.h"
+#include "vie_shared_data.h"
+
+namespace webrtc {
+
+ViEBase* ViEBase::GetInterface(VideoEngine* video_engine) {
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEBaseImpl* vie_base_impl = vie_impl;
+  (*vie_base_impl)++;  // Increase ref count.
+
+  return vie_base_impl;
+}
+
+int ViEBaseImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
+               "ViEBase::Release()");
+  (*this)--;  // Decrease ref count.
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
+                 "ViEBase release too many times");
+    shared_data_.SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_.instance_id(),
+               "ViEBase reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEBaseImpl::ViEBaseImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
+               "ViEBaseImpl::ViEBaseImpl() Ctor");
+}
+
+ViEBaseImpl::~ViEBaseImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
+               "ViEBaseImpl::ViEBaseImpl() Dtor");
+}
+
+int ViEBaseImpl::Init() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
+               "Init");
+  if (shared_data_.Initialized()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
+                 "Init called twice");
+    return 0;
+  }
+
+  shared_data_.SetInitialized();
+  return 0;
+}
+
+int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->SetVoiceEngine(voice_engine) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->CreateChannel(video_channel) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not create channel", __FUNCTION__);
+    video_channel = -1;
+    shared_data_.SetLastError(kViEBaseChannelCreationFailed);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel created: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel, int original_channel) {
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(original_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - original_channel does not exist.", __FUNCTION__,
+                 shared_data_.instance_id());
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->CreateChannel(video_channel,
+                                     original_channel) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not create channel", __FUNCTION__);
+    video_channel = -1;
+    shared_data_.SetLastError(kViEBaseChannelCreationFailed);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel created: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+int ViEBaseImpl::DeleteChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  {
+    ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+    ViEChannel* vie_channel = cs.Channel(video_channel);
+    if (!vie_channel) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_.instance_id()),
+                   "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+      shared_data_.SetLastError(kViEBaseInvalidChannelId);
+      return -1;
+    }
+
+    // Deregister the ViEEncoder if no other channel is using it.
+    ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+    if (cs.ChannelUsingViEEncoder(video_channel) == false) {
+      ViEInputManagerScoped is(*(shared_data_.input_manager()));
+      ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+      if (provider) {
+        provider->DeregisterFrameCallback(vie_encoder);
+      }
+    }
+  }
+
+  if (shared_data_.channel_manager()->DeleteChannel(video_channel) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not delete channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel deleted: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
+                                     const int audio_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(video_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->ConnectVoiceChannel(video_channel,
+                                                          audio_channel) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(video_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->DisconnectVoiceChannel(
+      video_channel) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StartSend(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  // Verify no other channel using the same encoder is sending.
+  ChannelList channels;
+  cs.ChannelsUsingViEEncoder(video_channel, &channels);
+  for (ChannelList::iterator it = channels.begin(); it != channels.end();
+       ++it) {
+    if ((*it)->Sending()) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_.instance_id(), video_channel),
+                   "A channel using this encoder is already synding");
+      shared_data_.SetLastError(kViEBaseAlreadySending);
+      return -1;
+    }
+  }
+
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    assert(false);
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Could not find encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    return -1;
+  }
+
+  // Pause and trigger a key frame.
+  vie_encoder->Pause();
+  WebRtc_Word32 error = vie_channel->StartSend();
+  if (error != 0) {
+    vie_encoder->Restart();
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Could not start sending on channel %d", __FUNCTION__,
+                 video_channel);
+    if (error == kViEBaseAlreadySending) {
+      shared_data_.SetLastError(kViEBaseAlreadySending);
+    }
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  vie_encoder->SendKeyFrame();
+  vie_encoder->Restart();
+  return 0;
+}
+
+int ViEBaseImpl::StopSend(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  WebRtc_Word32 error = vie_channel->StopSend();
+  if (error != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Could not stop sending on channel %d", __FUNCTION__,
+                 video_channel);
+    if (error == kViEBaseNotSending) {
+      shared_data_.SetLastError(kViEBaseNotSending);
+    } else {
+      shared_data_.SetLastError(kViEBaseUnknownError);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StartReceive(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Receiving()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d already receive.", __FUNCTION__,
+                 video_channel);
+    shared_data_.SetLastError(kViEBaseAlreadyReceiving);
+    return -1;
+  }
+  if (vie_channel->StartReceive() != 0) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StopReceive(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StopReceive() != 0) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::RegisterObserver(ViEBaseObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+  if (shared_data_.vie_performance_monitor()->ViEBaseObserverRegistered()) {
+    shared_data_.SetLastError(kViEBaseObserverAlreadyRegistered);
+    return -1;
+  }
+  return shared_data_.vie_performance_monitor()->Init(&observer);
+}
+
+int ViEBaseImpl::DeregisterObserver() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_.vie_performance_monitor()->ViEBaseObserverRegistered()) {
+    shared_data_.SetLastError(kViEBaseObserverNotRegistered);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_.instance_id(),
+                 "%s No observer registered.", __FUNCTION__);
+    return -1;
+  }
+  shared_data_.vie_performance_monitor()->Terminate();
+  return 0;
+}
+
+int ViEBaseImpl::GetVersion(char version[1024]) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "GetVersion(version=?)");
+  assert(kViEVersionMaxMessageSize == 1024);
+
+  if (!version) {
+    shared_data_.SetLastError(kViEBaseInvalidArgument);
+    return -1;
+  }
+
+  char version_buf[kViEVersionMaxMessageSize];
+  char* version_ptr = version_buf;
+
+  WebRtc_Word32 len = 0;  // Does not include NULL termination.
+  WebRtc_Word32 acc_len = 0;
+
+  len = AddViEVersion(version_ptr);
+  if (len == -1) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  version_ptr += len;
+  acc_len += len;
+  assert(acc_len < kViEVersionMaxMessageSize);
+
+  len = AddBuildInfo(version_ptr);
+  if (len == -1) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  version_ptr += len;
+  acc_len += len;
+  assert(acc_len < kViEVersionMaxMessageSize);
+
+  len = AddExternalTransportBuild(version_ptr);
+  if (len == -1) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  version_ptr += len;
+  acc_len += len;
+  assert(acc_len < kViEVersionMaxMessageSize);
+
+  memcpy(version, version_buf, acc_len);
+  version[acc_len] = '\0';
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+               ViEId(shared_data_.instance_id()), "GetVersion() => %s",
+               version);
+  return 0;
+}
+
+int ViEBaseImpl::LastError() {
+  return shared_data_.LastErrorInternal();
+}
+
+WebRtc_Word32 ViEBaseImpl::AddBuildInfo(char* str) const {
+  return sprintf(str, "Build: %s\n", BUILDINFO);
+}
+
+WebRtc_Word32 ViEBaseImpl::AddViEVersion(char* str) const {
+  return sprintf(str, "VideoEngine 3.2.0\n");
+}
+
+WebRtc_Word32 ViEBaseImpl::AddExternalTransportBuild(char* str) const {
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  return sprintf(str, "External transport build\n");
+#else
+  return 0;
+#endif
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_base_impl.h b/trunk/src/video_engine/vie_base_impl.h
new file mode 100644
index 0000000..a950300
--- /dev/null
+++ b/trunk/src/video_engine/vie_base_impl.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
+
+#include "vie_base.h"
+#include "vie_defines.h"
+#include "vie_ref_count.h"
+#include "vie_shared_data.h"
+
+namespace webrtc {
+
+class Module;
+class VoiceEngine;
+
+class ViEBaseImpl
+    : public ViEBase,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Initializes VideoEngine and must be called before any other API is called.
+  virtual int Init();
+
+  // Connects ViE to a VoE instance. Pass in NULL to forget about a previously
+  // set voice engine and release all resources we allocated from it.
+  virtual int SetVoiceEngine(VoiceEngine* voice_engine);
+
+  // Creates a new ViE channel.
+  virtual int CreateChannel(int& video_channel);
+
+  // Creates a new ViE channel that will use the same capture device and encoder
+  // as |original_channel|.
+  virtual int CreateChannel(int& video_channel, int original_channel);
+
+  // Deletes a ViE channel.
+  virtual int DeleteChannel(const int video_channel);
+
+  // Connects a ViE channel with a VoE channel.
+  virtual int ConnectAudioChannel(const int video_channel,
+                                  const int audio_channel);
+
+  // Disconnects a video/voice channel pair.
+  virtual int DisconnectAudioChannel(const int video_channel);
+
+  // Starts sending on video_channel and also starts the encoder.
+  virtual int StartSend(const int video_channel);
+
+  // Stops sending on the specified channel.
+  virtual int StopSend(const int video_channel);
+
+  // Starts receiving on the channel and also start decoding.
+  virtual int StartReceive(const int video_channel);
+
+  // Stops receiving on the specified channel.
+  virtual int StopReceive(const int video_channel);
+
+  // Registers a customer implemented observer.
+  virtual int RegisterObserver(ViEBaseObserver& observer);
+
+  // Deregisters the observer.
+  virtual int DeregisterObserver();
+
+  // Prints version information into |version|.
+  virtual int GetVersion(char version[1024]);
+
+  // Returns the error code for the last registered error.
+  virtual int LastError();
+
+ protected:
+  ViEBaseImpl();
+  virtual ~ViEBaseImpl();
+
+  ViESharedData* shared_data() { return &shared_data_; }
+
+ private:
+  // Version functions.
+  WebRtc_Word32 AddViEVersion(char* str) const;
+  WebRtc_Word32 AddBuildInfo(char* str) const;
+  WebRtc_Word32 AddExternalTransportBuild(char* str) const;
+
+  // ViEBaseImpl owns ViESharedData used by all interface implementations.
+  ViESharedData shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
diff --git a/trunk/src/video_engine/vie_capture_impl.cc b/trunk/src/video_engine/vie_capture_impl.cc
new file mode 100644
index 0000000..a94c29c
--- /dev/null
+++ b/trunk/src/video_engine/vie_capture_impl.cc
@@ -0,0 +1,590 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_capture_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViECapture* ViECapture::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViECaptureImpl* vie_capture_impl = vie_impl;
+  // Increase ref count.
+  (*vie_capture_impl)++;
+  return vie_capture_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViECaptureImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViECapture::Release()");
+  // Decrease ref count
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViECapture release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViECapture reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViECaptureImpl::ViECaptureImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECaptureImpl::ViECaptureImpl() Ctor");
+}
+
+ViECaptureImpl::~ViECaptureImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECaptureImpl::~ViECaptureImpl() Dtor");
+}
+
+int ViECaptureImpl::NumberOfCaptureDevices() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return  shared_data_->input_manager()->NumberOfCaptureDevices();
+}
+
+
+int ViECaptureImpl::GetCaptureDevice(unsigned int list_number,
+                                     char* device_nameUTF8,
+                                     unsigned int device_nameUTF8Length,
+                                     char* unique_idUTF8,
+                                     unsigned int unique_idUTF8Length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(list_number: %d)", __FUNCTION__, list_number);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return shared_data_->input_manager()->GetDeviceName(
+      list_number,
+      reinterpret_cast<WebRtc_UWord8*>(device_nameUTF8), device_nameUTF8Length,
+      reinterpret_cast<WebRtc_UWord8*>(unique_idUTF8), unique_idUTF8Length);
+}
+
+int ViECaptureImpl::AllocateCaptureDevice(
+  const char* unique_idUTF8,
+  const unsigned int unique_idUTF8Length,
+  int& capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(unique_idUTF8: %s)", __FUNCTION__, unique_idUTF8);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateCaptureDevice(
+          reinterpret_cast<const WebRtc_UWord8*>(unique_idUTF8),
+      static_cast<const WebRtc_UWord32>(unique_idUTF8Length), capture_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::AllocateExternalCaptureDevice(
+  int& capture_id, ViEExternalCapture*& external_capture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateExternalCaptureDevice(
+          external_capture, capture_id);
+
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::AllocateCaptureDevice(VideoCaptureModule& capture_module,
+                                          int& capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateCaptureDevice(capture_module,
+                                                         capture_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::ReleaseCaptureDevice(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  {
+    ViEInputManagerScoped is((*(shared_data_->input_manager())));
+    ViECapturer* vie_capture = is.Capture(capture_id);
+    if (!vie_capture) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Capture device %d doesn't exist", __FUNCTION__,
+                   capture_id);
+      shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+      return -1;
+    }
+  }
+
+  // Destroy the capture device.
+  return shared_data_->input_manager()->DestroyCaptureDevice(capture_id);
+}
+
+int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
+                                         const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(capture_id: %d, video_channel: %d)", __FUNCTION__,
+               capture_id, video_channel);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
+    return -1;
+  }
+  //  Check if the encoder already has a connected frame provider
+  if (is.FrameProvider(vie_encoder) != NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already connected to a capture device.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected);
+    return -1;
+  }
+  VideoCodec codec;
+  bool use_hardware_encoder = false;
+  if (vie_encoder->GetEncoder(codec) == 0) {
+    // Try to provide the encoder with pre-encoded frames if possible.
+    if (vie_capture->PreEncodeToViEEncoder(codec, *vie_encoder,
+                                           video_channel) == 0) {
+      use_hardware_encoder = true;
+    }
+  }
+  // If we don't use the camera as hardware encoder, we register the vie_encoder
+  // for callbacks.
+  if (!use_hardware_encoder &&
+      vie_capture->RegisterFrameCallback(video_channel, vie_encoder) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
+  if (!frame_provider) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: No capture device connected to channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceNotConnected);
+    return -1;
+  }
+  if (frame_provider->Id() < kViECaptureIdBase ||
+      frame_provider->Id() > kViECaptureIdMax) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: No capture device connected to channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceNotConnected);
+    return -1;
+  }
+
+  if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECaptureImpl::StartCapture(const int capture_id,
+                                 const CaptureCapability& capture_capability) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->Started()) {
+    shared_data_->SetLastError(kViECaptureDeviceAlreadyStarted);
+    return -1;
+  }
+  if (vie_capture->Start(capture_capability) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::StopCapture(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (!vie_capture->Started()) {
+    shared_data_->SetLastError(kViECaptureDeviceNotStarted);
+    return -1;
+  }
+  if (vie_capture->Stop() != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECaptureImpl::SetRotateCapturedFrames(
+    const int capture_id,
+    const RotateCapturedFrame rotation) {
+  int i_rotation = -1;
+  switch (rotation) {
+    case RotateCapturedFrame_0:
+      i_rotation = 0;
+      break;
+    case RotateCapturedFrame_90:
+      i_rotation = 90;
+      break;
+    case RotateCapturedFrame_180:
+      i_rotation = 180;
+      break;
+    case RotateCapturedFrame_270:
+      i_rotation = 270;
+      break;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(rotation: %d)", __FUNCTION__, i_rotation);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->SetRotateCapturedFrames(rotation) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::SetCaptureDelay(const int capture_id,
+                                    const unsigned int capture_delay_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, capture_delay_ms %u)", __FUNCTION__,
+               capture_id, capture_delay_ms);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+
+  if (vie_capture->SetCaptureDelay(capture_delay_ms) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::NumberOfCapabilities(
+    const char* unique_idUTF8,
+    const unsigned int unique_idUTF8Length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module!
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return shared_data_->input_manager()->NumberOfCaptureCapabilities(
+      reinterpret_cast<const WebRtc_UWord8*>(unique_idUTF8));
+}
+
+
+int ViECaptureImpl::GetCaptureCapability(const char* unique_idUTF8,
+                                         const unsigned int unique_idUTF8Length,
+                                         const unsigned int capability_number,
+                                         CaptureCapability& capability) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module!
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  if (shared_data_->input_manager()->GetCaptureCapability(
+      reinterpret_cast<const WebRtc_UWord8*>(unique_idUTF8),
+      capability_number, capability) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::ShowCaptureSettingsDialogBox(
+    const char* unique_idUTF8,
+    const unsigned int unique_idUTF8Length,
+    const char* dialog_title,
+    void* parent_window,
+    const unsigned int x,
+    const unsigned int y) {
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s capture_id (capture_device_name: %s)", __FUNCTION__,
+               unique_idUTF8);
+
+  return shared_data_->input_manager()->DisplayCaptureSettingsDialogBox(
+           reinterpret_cast<const WebRtc_UWord8*>(unique_idUTF8),
+           reinterpret_cast<const WebRtc_UWord8*>(dialog_title),
+           parent_window, x, y);
+}
+
+int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
+                                   RotateCapturedFrame& orientation) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  if (shared_data_->input_manager()->GetOrientation(
+      reinterpret_cast<const WebRtc_UWord8*>(unique_idUTF8),
+      orientation) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
+                                          const bool enable) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->EnableBrightnessAlarm(enable) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::RegisterObserver(const int capture_id,
+                                     ViECaptureObserver& observer) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Observer already registered", __FUNCTION__);
+    shared_data_->SetLastError(kViECaptureObserverAlreadyRegistered);
+    return -1;
+  }
+  if (vie_capture->RegisterObserver(observer) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::DeregisterObserver(const int capture_id) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (!vie_capture->IsObserverRegistered()) {
+    shared_data_->SetLastError(kViECaptureDeviceObserverNotRegistered);
+    return -1;
+  }
+
+  if (vie_capture->DeRegisterObserver() != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_capture_impl.h b/trunk/src/video_engine/vie_capture_impl.h
new file mode 100644
index 0000000..ca88a06
--- /dev/null
+++ b/trunk/src/video_engine/vie_capture_impl.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
+
+#include "typedefs.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViECaptureImpl
+    : public ViECapture,
+      public ViERefCount {
+ public:
+  // Implements ViECapture.
+  virtual int Release();
+  virtual int NumberOfCaptureDevices();
+  virtual int GetCaptureDevice(unsigned int list_number, char* device_nameUTF8,
+                               const unsigned int device_nameUTF8Length,
+                               char* unique_idUTF8,
+                               const unsigned int unique_idUTF8Length);
+  virtual int AllocateCaptureDevice(const char* unique_idUTF8,
+                                    const unsigned int unique_idUTF8Length,
+                                    int& capture_id);
+  virtual int AllocateCaptureDevice(VideoCaptureModule& capture_module,
+                                    int& capture_id);
+  virtual int AllocateExternalCaptureDevice(
+      int& capture_id, ViEExternalCapture *&external_capture);
+  virtual int ReleaseCaptureDevice(const int capture_id);
+
+  virtual int ConnectCaptureDevice(const int capture_id,
+                                   const int video_channel);
+  virtual int DisconnectCaptureDevice(const int video_channel);
+  virtual int StartCapture(
+      const int capture_id,
+      const CaptureCapability& capture_capability = CaptureCapability());
+  virtual int StopCapture(const int capture_id);
+  virtual int SetRotateCapturedFrames(const int capture_id,
+                                      const RotateCapturedFrame rotation);
+  virtual int SetCaptureDelay(const int capture_id,
+                              const unsigned int capture_delay_ms);
+  virtual int NumberOfCapabilities(const char* unique_idUTF8,
+                                   const unsigned int unique_idUTF8Length);
+  virtual int GetCaptureCapability(const char* unique_idUTF8,
+                                   const unsigned int unique_idUTF8Length,
+                                   const unsigned int capability_number,
+                                   CaptureCapability& capability);
+  virtual int ShowCaptureSettingsDialogBox(
+    const char* unique_idUTF8, const unsigned int unique_idUTF8Length,
+    const char* dialog_title, void* parent_window = NULL,
+    const unsigned int x = 200, const unsigned int y = 200);
+  virtual int GetOrientation(const char* unique_idUTF8,
+                             RotateCapturedFrame& orientation);
+  virtual int EnableBrightnessAlarm(const int capture_id, const bool enable);
+  virtual int RegisterObserver(const int capture_id,
+                               ViECaptureObserver& observer);
+  virtual int DeregisterObserver(const int capture_id);
+
+ protected:
+  ViECaptureImpl(ViESharedData* shared_data);
+  virtual ~ViECaptureImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
diff --git a/trunk/src/video_engine/vie_capturer.cc b/trunk/src/video_engine/vie_capturer.cc
new file mode 100644
index 0000000..cd9bd0c
--- /dev/null
+++ b/trunk/src/video_engine/vie_capturer.cc
@@ -0,0 +1,900 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_capturer.h"
+
+#include "modules/interface/module_common_types.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+
+namespace webrtc {
+
+const int kThreadWaitTimeMs = 100;
+const int kMaxDeliverWaitTime = 500;
+
+ViECapturer::ViECapturer(int capture_id,
+                         int engine_id,
+                         ProcessThread& module_process_thread)
+    : ViEFrameProviderBase(capture_id, engine_id),
+      capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      capture_module_(NULL),
+      external_capture_module_(NULL),
+      module_process_thread_(module_process_thread),
+      capture_id_(capture_id),
+      capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
+                                                   this, kHighPriority,
+                                                   "ViECaptureThread")),
+      capture_event_(*EventWrapper::Create()),
+      deliver_event_(*EventWrapper::Create()),
+      effect_filter_(NULL),
+      image_proc_module_(NULL),
+      image_proc_module_ref_counter_(0),
+      deflicker_frame_stats_(NULL),
+      brightness_frame_stats_(NULL),
+      current_brightness_level_(Normal),
+      reported_brightness_level_(Normal),
+      denoising_enabled_(false),
+      observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      observer_(NULL),
+      encoding_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      capture_encoder_(NULL),
+      encode_complete_callback_(NULL),
+      vie_encoder_(NULL),
+      vcm_(NULL),
+      decoder_initialized_(false) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
+               "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
+               capture_id, engine_id);
+  unsigned int t_id = 0;
+  if (capture_thread_.Start(t_id)) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
+                 "%s: thread started: %u", __FUNCTION__, t_id);
+  } else {
+    assert(false);
+  }
+}
+
+ViECapturer::~ViECapturer() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
+               capture_id_, engine_id_);
+
+  // Stop the thread.
+  deliver_cs_->Enter();
+  capture_cs_->Enter();
+  capture_thread_.SetNotAlive();
+  capture_event_.Set();
+  capture_cs_->Leave();
+  deliver_cs_->Leave();
+
+  provider_cs_->Enter();
+  if (vie_encoder_) {
+    vie_encoder_->DeRegisterExternalEncoder(codec_.plType);
+  }
+  provider_cs_->Leave();
+
+  // Stop the camera input.
+  if (capture_module_) {
+    module_process_thread_.DeRegisterModule(capture_module_);
+    capture_module_->DeRegisterCaptureDataCallback();
+    capture_module_->Release();
+    capture_module_ = NULL;
+  }
+  if (capture_thread_.Stop()) {
+    // Thread stopped.
+    delete &capture_thread_;
+    delete &capture_event_;
+    delete &deliver_event_;
+  } else {
+    assert(false);
+    WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
+                 ViEId(engine_id_, capture_id_),
+                 "%s: Not able to stop capture thread for device %d, leaking",
+                 __FUNCTION__, capture_id_);
+  }
+
+  if (image_proc_module_) {
+    VideoProcessingModule::Destroy(image_proc_module_);
+  }
+  if (deflicker_frame_stats_) {
+    delete deflicker_frame_stats_;
+    deflicker_frame_stats_ = NULL;
+  }
+  delete brightness_frame_stats_;
+  if (vcm_) {
+    delete vcm_;
+  }
+}
+
+ViECapturer* ViECapturer::CreateViECapture(
+    int capture_id,
+    int engine_id,
+    VideoCaptureModule& capture_module,
+    ProcessThread& module_process_thread) {
+  ViECapturer* capture = new ViECapturer(capture_id, engine_id,
+                                         module_process_thread);
+  if (!capture || capture->Init(capture_module) != 0) {
+    delete capture;
+    capture = NULL;
+  }
+  return capture;
+}
+
+WebRtc_Word32 ViECapturer::Init(VideoCaptureModule& capture_module) {
+  assert(capture_module_ == NULL);
+  capture_module_ = &capture_module;
+  capture_module_->RegisterCaptureDataCallback(*this);
+  capture_module_->AddRef();
+  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+ViECapturer* ViECapturer::CreateViECapture(
+    int capture_id,
+    int engine_id,
+    const WebRtc_UWord8* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length,
+    ProcessThread& module_process_thread) {
+  ViECapturer* capture = new ViECapturer(capture_id, engine_id,
+                                         module_process_thread);
+  if (!capture ||
+      capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
+    delete capture;
+    capture = NULL;
+  }
+  return capture;
+}
+
+WebRtc_Word32 ViECapturer::Init(
+    const WebRtc_UWord8* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length) {
+  assert(capture_module_ == NULL);
+  if (device_unique_idUTF8 == NULL) {
+    capture_module_  = VideoCaptureFactory::Create(
+        ViEModuleId(engine_id_, capture_id_), external_capture_module_);
+  } else {
+    capture_module_ = VideoCaptureFactory::Create(
+        ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
+  }
+  if (!capture_module_) {
+    return -1;
+  }
+  capture_module_->AddRef();
+  capture_module_->RegisterCaptureDataCallback(*this);
+  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECapturer::FrameCallbackChanged() {
+  if (Started() && !EncoderActive() && !CaptureCapabilityFixed()) {
+    // Reconfigure the camera if a new size is required and the capture device
+    // does not provide encoded frames.
+    int best_width;
+    int best_height;
+    int best_frame_rate;
+    VideoCaptureCapability capture_settings;
+    capture_module_->CaptureSettings(capture_settings);
+    GetBestFormat(best_width, best_height, best_frame_rate);
+    if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
+      if (best_width != capture_settings.width ||
+          best_height != capture_settings.height ||
+          best_frame_rate != capture_settings.maxFPS ||
+          capture_settings.codecType != kVideoCodecUnknown) {
+        Stop();
+        Start(requested_capability_);
+      }
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  int width;
+  int height;
+  int frame_rate;
+  VideoCaptureCapability capability;
+  requested_capability_ = capture_capability;
+  if (EncoderActive()) {
+    CriticalSectionScoped cs(encoding_cs_.get());
+    capability.width = codec_.width;
+    capability.height = codec_.height;
+    capability.maxFPS = codec_.maxFramerate;
+    capability.codecType = codec_.codecType;
+    capability.rawType = kVideoI420;
+
+  } else if (!CaptureCapabilityFixed()) {
+    // Ask the observers for best size.
+    GetBestFormat(width, height, frame_rate);
+    if (width == 0) {
+      width = kViECaptureDefaultWidth;
+    }
+    if (height == 0) {
+      height = kViECaptureDefaultHeight;
+    }
+    if (frame_rate == 0) {
+      frame_rate = kViECaptureDefaultFramerate;
+    }
+    capability.height = height;
+    capability.width = width;
+    capability.maxFPS = frame_rate;
+    capability.rawType = kVideoI420;
+    capability.codecType = kVideoCodecUnknown;
+  } else {
+    // Width, height and type specified with call to Start, not set by
+    // observers.
+    capability.width = requested_capability_.width;
+    capability.height = requested_capability_.height;
+    capability.maxFPS = requested_capability_.maxFPS;
+    capability.rawType = requested_capability_.rawType;
+    capability.interlaced = requested_capability_.interlaced;
+  }
+  return capture_module_->StartCapture(capability);
+}
+
+WebRtc_Word32 ViECapturer::Stop() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  requested_capability_ = CaptureCapability();
+  return capture_module_->StopCapture();
+}
+
+bool ViECapturer::Started() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  return capture_module_->CaptureStarted();
+}
+
+const WebRtc_UWord8* ViECapturer::CurrentDeviceName() const {
+  return capture_module_->CurrentDeviceName();
+}
+
+WebRtc_Word32 ViECapturer::SetCaptureDelay(WebRtc_Word32 delay_ms) {
+  return capture_module_->SetCaptureDelay(delay_ms);
+}
+
+WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
+  const RotateCapturedFrame rotation) {
+  VideoCaptureRotation converted_rotation = kCameraRotate0;
+  switch (rotation) {
+    case RotateCapturedFrame_0:
+      converted_rotation = kCameraRotate0;
+      break;
+    case RotateCapturedFrame_90:
+      converted_rotation = kCameraRotate90;
+      break;
+    case RotateCapturedFrame_180:
+      converted_rotation = kCameraRotate180;
+      break;
+    case RotateCapturedFrame_270:
+      converted_rotation = kCameraRotate270;
+      break;
+  }
+  return capture_module_->SetCaptureRotation(converted_rotation);
+}
+
+int ViECapturer::IncomingFrame(unsigned char* video_frame,
+                               unsigned int video_frame_length,
+                               unsigned short width,
+                               unsigned short height,
+                               RawVideoType video_type,
+                               unsigned long long capture_time) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ExternalCapture::IncomingFrame width %d, height %d, "
+               "capture_time %u", width, height, capture_time);
+
+  if (!external_capture_module_) {
+    return -1;
+  }
+  VideoCaptureCapability capability;
+  capability.width = width;
+  capability.height = height;
+  capability.rawType = video_type;
+  return external_capture_module_->IncomingFrame(video_frame,
+                                                 video_frame_length,
+                                                 capability, capture_time);
+}
+
+int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
+                                   unsigned long long capture_time) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ExternalCapture::IncomingFrame width %d, height %d, "
+               " capture_time %u", video_frame.width, video_frame.height,
+               capture_time);
+
+  if (!external_capture_module_) {
+    return -1;
+  }
+
+  VideoFrameI420 frame;
+  frame.width = video_frame.width;
+  frame.height = video_frame.height;
+  frame.y_plane = video_frame.y_plane;
+  frame.u_plane = video_frame.u_plane;
+  frame.v_plane = video_frame.v_plane;
+  frame.y_pitch = video_frame.y_pitch;
+  frame.u_pitch = video_frame.u_pitch;
+  frame.v_pitch = video_frame.v_pitch;
+
+  return external_capture_module_->IncomingFrameI420(frame, capture_time);
+}
+
+void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
+                                          VideoFrame& video_frame,
+                                          VideoCodecType codec_type) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  CriticalSectionScoped cs(capture_cs_.get());
+  if (codec_type != kVideoCodecUnknown) {
+    if (encoded_frame_.Length() != 0) {
+      // The last encoded frame has not been sent yet. Need to wait.
+      deliver_event_.Reset();
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s(capture_id: %d) Last encoded frame not yet delivered.",
+                   __FUNCTION__, capture_id);
+      capture_cs_->Leave();
+      // Wait for the coded frame to be sent before unblocking this.
+      deliver_event_.Wait(kMaxDeliverWaitTime);
+      assert(encoded_frame_.Length() == 0);
+      capture_cs_->Enter();
+    }
+    encoded_frame_.SwapFrame(video_frame);
+  } else {
+    captured_frame_.SwapFrame(video_frame);
+  }
+  capture_event_.Set();
+  return;
+}
+
+void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                        const WebRtc_Word32 delay) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
+               delay);
+
+  // Deliver the network delay to all registered callbacks.
+  ViEFrameProviderBase::SetFrameDelay(delay);
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (vie_encoder_) {
+    vie_encoder_->DelayChanged(id, delay);
+  }
+}
+
+WebRtc_Word32 ViECapturer::RegisterEffectFilter(
+    ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(deliver_cs_.get());
+
+  if (!effect_filter) {
+    if (!effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: no effect filter added for capture device %d",
+                   __FUNCTION__, capture_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s: deregister effect filter for device %d", __FUNCTION__,
+                 capture_id_);
+  } else {
+    if (effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: effect filter already added for capture device %d",
+                   __FUNCTION__, capture_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s: register effect filter for device %d", __FUNCTION__,
+                 capture_id_);
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
+  if (!image_proc_module_) {
+    assert(image_proc_module_ref_counter_ == 0);
+    image_proc_module_ = VideoProcessingModule::Create(
+        ViEModuleId(engine_id_, capture_id_));
+    if (!image_proc_module_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: could not create video processing module",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+  image_proc_module_ref_counter_++;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
+  image_proc_module_ref_counter_--;
+  if (image_proc_module_ref_counter_ == 0) {
+    // Destroy module.
+    VideoProcessingModule::Destroy(image_proc_module_);
+    image_proc_module_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (denoising_enabled_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: denoising already enabled", __FUNCTION__);
+      return -1;
+    }
+    denoising_enabled_ = true;
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+  } else {
+    if (denoising_enabled_ == false) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: denoising not enabled", __FUNCTION__);
+      return -1;
+    }
+    denoising_enabled_ = false;
+    DecImageProcRefCount();
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (deflicker_frame_stats_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering already enabled", __FUNCTION__);
+      return -1;
+    }
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+    deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
+  } else {
+    if (deflicker_frame_stats_ == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering not enabled", __FUNCTION__);
+      return -1;
+    }
+    DecImageProcRefCount();
+    delete deflicker_frame_stats_;
+    deflicker_frame_stats_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (brightness_frame_stats_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: BrightnessAlarm already enabled", __FUNCTION__);
+      return -1;
+    }
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+    brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
+  } else {
+    DecImageProcRefCount();
+    if (brightness_frame_stats_ == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering not enabled", __FUNCTION__);
+      return -1;
+    }
+    delete brightness_frame_stats_;
+    brightness_frame_stats_ = NULL;
+  }
+  return 0;
+}
+
+bool ViECapturer::ViECaptureThreadFunction(void* obj) {
+  return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
+}
+
+bool ViECapturer::ViECaptureProcess() {
+  if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
+    deliver_cs_->Enter();
+    if (captured_frame_.Length() > 0) {
+      // New I420 frame.
+      capture_cs_->Enter();
+      deliver_frame_.SwapFrame(captured_frame_);
+      captured_frame_.SetLength(0);
+      capture_cs_->Leave();
+      DeliverI420Frame(deliver_frame_);
+    }
+    if (encoded_frame_.Length() > 0) {
+      capture_cs_->Enter();
+      deliver_frame_.SwapFrame(encoded_frame_);
+      encoded_frame_.SetLength(0);
+      deliver_event_.Set();
+      capture_cs_->Leave();
+      DeliverCodedFrame(deliver_frame_);
+    }
+    deliver_cs_->Leave();
+    if (current_brightness_level_ != reported_brightness_level_) {
+      CriticalSectionScoped cs(observer_cs_.get());
+      if (observer_) {
+        observer_->BrightnessAlarm(id_, current_brightness_level_);
+        reported_brightness_level_ = current_brightness_level_;
+      }
+    }
+  }
+  // We're done!
+  return true;
+}
+
+void ViECapturer::DeliverI420Frame(VideoFrame& video_frame) {
+  // Apply image enhancement and effect filter.
+  if (deflicker_frame_stats_) {
+    if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
+                                          video_frame) == 0) {
+      image_proc_module_->Deflickering(video_frame, *deflicker_frame_stats_);
+    } else {
+      WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: could not get frame stats for captured frame",
+                   __FUNCTION__);
+    }
+  }
+  if (denoising_enabled_) {
+    image_proc_module_->Denoising(video_frame);
+  }
+  if (brightness_frame_stats_) {
+    if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
+                                          video_frame) == 0) {
+      WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
+          video_frame, *brightness_frame_stats_);
+
+      switch (brightness) {
+      case VideoProcessingModule::kNoWarning:
+        current_brightness_level_ = Normal;
+        break;
+      case VideoProcessingModule::kDarkWarning:
+        current_brightness_level_ = Dark;
+        break;
+      case VideoProcessingModule::kBrightWarning:
+        current_brightness_level_ = Bright;
+        break;
+      default:
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                     "%s: Brightness detection failed", __FUNCTION__);
+      }
+    }
+  }
+  if (effect_filter_) {
+    effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
+                              video_frame.TimeStamp(), video_frame.Width(),
+                              video_frame.Height());
+  }
+  // Deliver the captured frame to all observers (channels, renderer or file).
+  ViEFrameProviderBase::DeliverFrame(video_frame);
+}
+
+void ViECapturer::DeliverCodedFrame(VideoFrame& video_frame) {
+  if (encode_complete_callback_) {
+    EncodedImage encoded_image(video_frame.Buffer(), video_frame.Length(),
+                               video_frame.Size());
+    encoded_image._timeStamp = 90 * (WebRtc_UWord32) video_frame.RenderTimeMs();
+    encode_complete_callback_->Encoded(encoded_image);
+  }
+
+  if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
+    video_frame.Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
+                     decode_buffer_.payloadSize);
+    decode_buffer_.encodedHeight = video_frame.Height();
+    decode_buffer_.encodedWidth = video_frame.Width();
+    decode_buffer_.renderTimeMs = video_frame.RenderTimeMs();
+    decode_buffer_.timeStamp = 90 * (WebRtc_UWord32) video_frame.RenderTimeMs();
+    decode_buffer_.payloadType = codec_.plType;
+    vcm_->DecodeFromStorage(decode_buffer_);
+  }
+}
+
+int ViECapturer::DeregisterFrameCallback(
+    const ViEFrameCallback* callbackObject) {
+  provider_cs_->Enter();
+  if (callbackObject == vie_encoder_) {
+    // Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
+    ViEEncoder* vie_encoder = NULL;
+    vie_encoder = vie_encoder_;
+    vie_encoder_ = NULL;
+    provider_cs_->Leave();
+
+    // Need to take this here in order to avoid deadlock with VCM. The reason is
+    // that VCM will call ::Release and a deadlock can occur.
+    deliver_cs_->Enter();
+    vie_encoder->DeRegisterExternalEncoder(codec_.plType);
+    deliver_cs_->Leave();
+    return 0;
+  }
+  provider_cs_->Leave();
+  return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
+}
+
+bool ViECapturer::IsFrameCallbackRegistered(
+    const ViEFrameCallback* callbackObject) {
+  CriticalSectionScoped cs(provider_cs_.get());
+  if (callbackObject == vie_encoder_) {
+    return true;
+  }
+  return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
+}
+
+WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
+                                                 ViEEncoder& vie_encoder,
+                                                 WebRtc_Word32 vie_encoder_id) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  if (vie_encoder_ && &vie_encoder != vie_encoder_) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s(capture_device_id: %d Capture device already encoding)",
+                 __FUNCTION__, capture_id_);
+    return -1;
+  }
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder =
+    capture_module_->GetEncodeInterface(codec);
+  if (!capture_encoder) {
+    // Encoding not supported?
+    return -1;
+  }
+  capture_encoder_ = capture_encoder;
+
+  // Create VCM module used for decoding frames if needed.
+  if (!vcm_) {
+    vcm_ = VideoCodingModule::Create(capture_id_);
+  }
+
+  if (vie_encoder.RegisterExternalEncoder(this, codec.plType) != 0) {
+    return -1;
+  }
+  if (vie_encoder.SetEncoder(codec) != 0) {
+    vie_encoder.DeRegisterExternalEncoder(codec.plType);
+    return -1;
+  }
+
+  // Make sure the encoder is not an I420 observer.
+  ViEFrameProviderBase::DeregisterFrameCallback(&vie_encoder);
+  // Store the vie_encoder using this capture device.
+  vie_encoder_ = &vie_encoder;
+  vie_encoder_id_ = vie_encoder_id;
+  memcpy(&codec_, &codec, sizeof(VideoCodec));
+  return 0;
+}
+
+bool ViECapturer::EncoderActive() {
+  return vie_encoder_ != NULL;
+}
+
+bool ViECapturer::CaptureCapabilityFixed() {
+  return requested_capability_.width != 0 &&
+      requested_capability_.height != 0 &&
+      requested_capability_.maxFPS != 0;
+}
+
+WebRtc_Word32 ViECapturer::Version(WebRtc_Word8* version,
+                                   WebRtc_Word32 length) const {
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
+                                      WebRtc_Word32 number_of_cores,
+                                      WebRtc_UWord32 max_payload_size) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_ || !codec_settings) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  if (vcm_) {
+    // Initialize VCM to be able to decode frames if needed.
+    if (vcm_->InitializeReceiver() == 0) {
+      if (vcm_->RegisterReceiveCallback(this) == 0) {
+        if (vcm_->RegisterReceiveCodec(codec_settings, number_of_cores,
+                                       false) == 0) {
+          decoder_initialized_ = true;
+          WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                       "%s(capture_device_id: %d) VCM Decoder initialized",
+                       __FUNCTION__, capture_id_);
+        }
+      }
+    }
+  }
+  return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
+}
+
+WebRtc_Word32 ViECapturer::Encode(const RawImage& input_image,
+                                  const CodecSpecificInfo* codec_specific_info,
+                                  const VideoFrameType* frame_types) {
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (*frame_types == kKeyFrame) {
+    return capture_encoder_->EncodeFrameType(kVideoFrameKey);
+  }
+  if (*frame_types == kSkipFrame) {
+    return capture_encoder_->EncodeFrameType(kFrameEmpty);
+  }
+  return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+}
+
+WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
+    EncodedImageCallback* callback) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  encode_complete_callback_ = callback;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::Release() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  {
+    CriticalSectionScoped cs(deliver_cs_.get());
+    encode_complete_callback_ = NULL;
+  }
+
+  {
+    CriticalSectionScoped cs(encoding_cs_.get());
+
+    decoder_initialized_ = false;
+    codec_.codecType = kVideoCodecUnknown;
+    // Reset the camera to output I420.
+    capture_encoder_->ConfigureEncoder(codec_, 0);
+
+    if (vie_encoder_) {
+      // Need to add the encoder as an observer of I420.
+      ViEFrameProviderBase::RegisterFrameCallback(vie_encoder_id_,
+                                                  vie_encoder_);
+    }
+    vie_encoder_ = NULL;
+  }
+  return 0;
+}
+
+// Should reset the capture device to the state it was in after the InitEncode
+// function. Current implementation do nothing.
+WebRtc_Word32 ViECapturer::Reset() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
+                                                int rtt) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  return capture_encoder_->SetChannelParameters(packet_loss, rtt);
+}
+
+WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
+                                    WebRtc_UWord32 frame_rate) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  return capture_encoder_->SetRates(new_bit_rate, frame_rate);
+}
+
+WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) {
+  deliver_cs_->Enter();
+  DeliverI420Frame(video_frame);
+  deliver_cs_->Leave();
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver& observer) {
+  if (observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s Observer already registered", __FUNCTION__, capture_id_);
+    return -1;
+  }
+  if (capture_module_->RegisterCaptureCallback(*this) != 0) {
+    return -1;
+  }
+  capture_module_->EnableFrameRateCallback(true);
+  capture_module_->EnableNoPictureAlarm(true);
+  observer_ = &observer;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::DeRegisterObserver() {
+  CriticalSectionScoped cs(observer_cs_.get());
+  if (!observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s No observer registered", __FUNCTION__, capture_id_);
+    return -1;
+  }
+  capture_module_->EnableFrameRateCallback(false);
+  capture_module_->EnableNoPictureAlarm(false);
+  capture_module_->DeRegisterCaptureCallback();
+  observer_ = NULL;
+  return 0;
+}
+
+bool ViECapturer::IsObserverRegistered() {
+  CriticalSectionScoped cs(observer_cs_.get());
+  return observer_ != NULL;
+}
+
+void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 frame_rate) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "OnCaptureFrameRate %d", frame_rate);
+
+  CriticalSectionScoped cs(observer_cs_.get());
+  observer_->CapturedFrameRate(id_, (WebRtc_UWord8) frame_rate);
+}
+
+void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
+                                   const VideoCaptureAlarm alarm) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "OnNoPictureAlarm %d", alarm);
+
+  CriticalSectionScoped cs(observer_cs_.get());
+  CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
+  observer_->NoPictureAlarm(id, vie_alarm);
+}
+
+WebRtc_Word32 ViECapturer::SetCaptureDeviceImage(
+    const VideoFrame& capture_device_image) {
+  return capture_module_->StartSendImage(capture_device_image, 10);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_capturer.h b/trunk/src/video_engine/vie_capturer.h
new file mode 100644
index 0000000..da68278
--- /dev/null
+++ b/trunk/src/video_engine/vie_capturer.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "modules/video_capture/main/interface/video_capture.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class ProcessThread;
+class ThreadWrapper;
+class ViEEffectFilter;
+class ViEEncoder;
+struct ViEPicture;
+
+class ViECapturer
+    : public ViEFrameProviderBase,
+      public ViEExternalCapture,
+      protected VCMReceiveCallback,
+      protected VideoCaptureDataCallback,
+      protected VideoCaptureFeedBack,
+      protected VideoEncoder {
+ public:
+  static ViECapturer* CreateViECapture(int capture_id,
+                                       int engine_id,
+                                       VideoCaptureModule& capture_module,
+                                       ProcessThread& module_process_thread);
+
+  static ViECapturer* CreateViECapture(
+      int capture_id,
+      int engine_id,
+      const WebRtc_UWord8* device_unique_idUTF8,
+      WebRtc_UWord32 device_unique_idUTF8Length,
+      ProcessThread& module_process_thread);
+
+  ~ViECapturer();
+
+  // Implements ViEFrameProviderBase.
+  int FrameCallbackChanged();
+  virtual int DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
+  bool IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
+
+  // Implements ExternalCapture.
+  virtual int IncomingFrame(unsigned char* video_frame,
+                            unsigned int video_frame_length,
+                            unsigned short width, unsigned short height,
+                            RawVideoType video_type,
+                            unsigned long long capture_time = 0);
+
+  virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
+                                unsigned long long capture_time = 0);
+
+  // Use this capture device as encoder.
+  // Returns 0 if the codec is supported by this capture device.
+  virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec,
+                                              ViEEncoder& vie_encoder,
+                                              WebRtc_Word32 vie_encoder_id);
+
+  // Start/Stop.
+  WebRtc_Word32 Start(
+      const CaptureCapability& capture_capability = CaptureCapability());
+  WebRtc_Word32 Stop();
+  bool Started();
+
+  // Overrides the capture delay.
+  WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delay_ms);
+
+  // Sets rotation of the incoming captured frame.
+  WebRtc_Word32 SetRotateCapturedFrames(const RotateCapturedFrame rotation);
+
+  // Effect filter.
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+  WebRtc_Word32 EnableDenoising(bool enable);
+  WebRtc_Word32 EnableDeflickering(bool enable);
+  WebRtc_Word32 EnableBrightnessAlarm(bool enable);
+
+  // Statistics observer.
+  WebRtc_Word32 RegisterObserver(ViECaptureObserver& observer);
+  WebRtc_Word32 DeRegisterObserver();
+  bool IsObserverRegistered();
+
+  // Information.
+  const WebRtc_UWord8* CurrentDeviceName() const;
+
+  // Set device image.
+  WebRtc_Word32 SetCaptureDeviceImage(const VideoFrame& capture_device_image);
+
+ protected:
+  ViECapturer(int capture_id,
+              int engine_id,
+              ProcessThread& module_process_thread);
+
+  WebRtc_Word32 Init(VideoCaptureModule& capture_module);
+  WebRtc_Word32 Init(const WebRtc_UWord8* device_unique_idUTF8,
+                     const WebRtc_UWord32 device_unique_idUTF8Length);
+
+  // Implements VideoCaptureDataCallback.
+  virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                       VideoFrame& video_frame,
+                                       VideoCodecType codec_type);
+  virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                     const WebRtc_Word32 delay);
+
+  bool EncoderActive();
+
+  // Returns true if the capture capability has been set in |StartCapture|
+  // function and may not be changed.
+  bool CaptureCapabilityFixed();
+
+  // Help function used for keeping track of VideoImageProcesingModule.
+  // Creates the module if it is needed, returns 0 on success and guarantees
+  // that the image proc module exist.
+  WebRtc_Word32 IncImageProcRefCount();
+  WebRtc_Word32 DecImageProcRefCount();
+
+  // Implements VideoEncoder.
+  virtual WebRtc_Word32 Version(WebRtc_Word8* version,
+                                WebRtc_Word32 length) const;
+  virtual WebRtc_Word32 InitEncode(const VideoCodec* codec_settings,
+                                   WebRtc_Word32 number_of_cores,
+                                   WebRtc_UWord32 max_payload_size);
+  virtual WebRtc_Word32 Encode(const RawImage& input_image,
+                               const CodecSpecificInfo* codec_specific_info,
+                               const VideoFrameType* frame_types);
+  virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
+      EncodedImageCallback* callback);
+  virtual WebRtc_Word32 Release();
+  virtual WebRtc_Word32 Reset();
+  virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packet_loss,
+                                             int rtt);
+  virtual WebRtc_Word32 SetRates(WebRtc_UWord32 new_bit_rate,
+                                 WebRtc_UWord32 frame_rate);
+
+  // Implements  VCMReceiveCallback.
+  virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame);
+
+  // Implements VideoCaptureFeedBack
+  virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 frame_rate);
+  virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                const VideoCaptureAlarm alarm);
+
+  // Thread functions for deliver captured frames to receivers.
+  static bool ViECaptureThreadFunction(void* obj);
+  bool ViECaptureProcess();
+
+  void DeliverI420Frame(VideoFrame& video_frame);
+  void DeliverCodedFrame(VideoFrame& video_frame);
+
+ private:
+  // Never take capture_cs_ before deliver_cs_!
+  scoped_ptr<CriticalSectionWrapper> capture_cs_;
+  scoped_ptr<CriticalSectionWrapper> deliver_cs_;
+  VideoCaptureModule* capture_module_;
+  VideoCaptureExternal* external_capture_module_;
+  ProcessThread& module_process_thread_;
+  const int capture_id_;
+
+  // Capture thread.
+  ThreadWrapper& capture_thread_;
+  EventWrapper& capture_event_;
+  EventWrapper& deliver_event_;
+
+  VideoFrame captured_frame_;
+  VideoFrame deliver_frame_;
+  VideoFrame encoded_frame_;
+
+  // Image processing.
+  ViEEffectFilter* effect_filter_;
+  VideoProcessingModule* image_proc_module_;
+  int image_proc_module_ref_counter_;
+  VideoProcessingModule::FrameStats* deflicker_frame_stats_;
+  VideoProcessingModule::FrameStats* brightness_frame_stats_;
+  Brightness current_brightness_level_;
+  Brightness reported_brightness_level_;
+  bool denoising_enabled_;
+
+  // Statistics observer.
+  scoped_ptr<CriticalSectionWrapper> observer_cs_;
+  ViECaptureObserver* observer_;
+
+  // Encoding using encoding capable cameras.
+  scoped_ptr<CriticalSectionWrapper> encoding_cs_;
+  VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder_;
+  EncodedImageCallback* encode_complete_callback_;
+  VideoCodec codec_;
+  // The ViEEncoder we are encoding for.
+  ViEEncoder* vie_encoder_;
+  // ViEEncoder id we are encoding for.
+  WebRtc_Word32 vie_encoder_id_;
+  // Used for decoding preencoded frames.
+  VideoCodingModule* vcm_;
+  EncodedVideoData decode_buffer_;
+  bool decoder_initialized_;
+  CaptureCapability requested_capability_;
+
+  VideoFrame capture_device_image_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
diff --git a/trunk/src/video_engine/vie_channel.cc b/trunk/src/video_engine/vie_channel.cc
new file mode 100644
index 0000000..e6f7a0a
--- /dev/null
+++ b/trunk/src/video_engine/vie_channel.cc
@@ -0,0 +1,2529 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_channel.h"
+
+#include <algorithm>
+#include <vector>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/udp_transport/interface/udp_transport.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_receiver.h"
+#include "video_engine/vie_sender.h"
+#include "video_engine/vie_sync_module.h"
+
+namespace webrtc {
+
+const int kMaxDecodeWaitTimeMs = 50;
+
+ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
+                       WebRtc_Word32 engine_id,
+                       WebRtc_UWord32 number_of_cores,
+                       ProcessThread& module_process_thread)
+    : ViEFrameProviderBase(channel_id, engine_id),
+      channel_id_(channel_id),
+      engine_id_(engine_id),
+      number_of_cores_(number_of_cores),
+      num_socket_threads_(kViESocketThreads),
+      callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      rtp_rtcp_(*RtpRtcp::CreateRtpRtcp(ViEModuleId(engine_id, channel_id),
+                                        false)),
+      default_rtp_rtcp_(NULL),
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+      socket_transport_(*UdpTransport::Create(
+          ViEModuleId(engine_id, channel_id), num_socket_threads_)),
+#endif
+      vcm_(*VideoCodingModule::Create(ViEModuleId(engine_id, channel_id))),
+      vie_receiver_(*(new ViEReceiver(engine_id, channel_id, rtp_rtcp_, vcm_))),
+      vie_sender_(*(new ViESender(engine_id, channel_id))),
+      vie_sync_(*(new ViESyncModule(ViEId(engine_id, channel_id), vcm_,
+                                    rtp_rtcp_))),
+      module_process_thread_(module_process_thread),
+      codec_observer_(NULL),
+      do_key_frame_callbackRequest_(false),
+      rtp_observer_(NULL),
+      rtcp_observer_(NULL),
+      networkObserver_(NULL),
+      rtp_packet_timeout_(false),
+      using_packet_spread_(false),
+      external_transport_(NULL),
+      decoder_reset_(true),
+      wait_for_key_frame_(false),
+      decode_thread_(NULL),
+      external_encryption_(NULL),
+      effect_filter_(NULL),
+      color_enhancement_(true),
+      vcm_rttreported_(TickTime::Now()),
+      file_recorder_(channel_id) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, channel_id),
+               "ViEChannel::ViEChannel(channel_id: %d, engine_id: %d)",
+               channel_id, engine_id);
+}
+
+WebRtc_Word32 ViEChannel::Init() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
+               engine_id_);
+  // RTP/RTCP initialization.
+  if (rtp_rtcp_.InitSender() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::InitSender failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.SetSendingMediaStatus(false) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetSendingMediaStatus failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.InitReceiver() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::InitReceiver failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.RegisterIncomingDataCallback(
+      static_cast<RtpData*>(&vie_receiver_)) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterIncomingDataCallback failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.RegisterSendTransport(
+      static_cast<Transport*>(&vie_sender_)) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterSendTransport failure", __FUNCTION__);
+    return -1;
+  }
+  if (module_process_thread_.RegisterModule(&rtp_rtcp_) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterModule failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.SetKeyFrameRequestMethod(kKeyFrameReqFirRtp) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetKeyFrameRequestMethod failure", __FUNCTION__);
+  }
+  if (rtp_rtcp_.SetRTCPStatus(kRtcpCompound) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
+  }
+  if (rtp_rtcp_.RegisterIncomingRTPCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterIncomingRTPCallback failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.RegisterIncomingRTCPCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterIncomingRTCPCallback failure", __FUNCTION__);
+    return -1;
+  }
+
+  // VCM initialization
+  if (vcm_.InitializeReceiver() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: VCM::InitializeReceiver failure", __FUNCTION__);
+    return -1;
+  }
+  if (vcm_.RegisterReceiveCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterReceiveCallback failure", __FUNCTION__);
+    return -1;
+  }
+  if (vcm_.RegisterFrameTypeCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterFrameTypeCallback failure", __FUNCTION__);
+  }
+  if (vcm_.RegisterReceiveStatisticsCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterReceiveStatisticsCallback failure",
+                 __FUNCTION__);
+  }
+  if (vcm_.SetRenderDelay(kViEDefaultRenderDelayMs) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::SetRenderDelay failure", __FUNCTION__);
+  }
+  if (module_process_thread_.RegisterModule(&vcm_) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterModule(vcm) failure", __FUNCTION__);
+    return -1;
+  }
+#ifdef VIDEOCODEC_VP8
+  VideoCodec video_codec;
+  if (vcm_.Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
+    rtp_rtcp_.RegisterSendPayload(video_codec);
+    rtp_rtcp_.RegisterReceivePayload(video_codec);
+    vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_);
+    vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                           rtp_rtcp_.MaxDataPayloadLength());
+  } else {
+    assert(false);
+  }
+#endif
+
+  return 0;
+}
+
+ViEChannel::~ViEChannel() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "ViEChannel Destructor, channel_id: %d, engine_id: %d",
+               channel_id_, engine_id_);
+
+  // Make sure we don't get more callbacks from the RTP module.
+  rtp_rtcp_.RegisterIncomingRTPCallback(NULL);
+  rtp_rtcp_.RegisterSendTransport(NULL);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  socket_transport_.StopReceiving();
+#endif
+  module_process_thread_.DeRegisterModule(&rtp_rtcp_);
+  module_process_thread_.DeRegisterModule(&vcm_);
+  module_process_thread_.DeRegisterModule(&vie_sync_);
+  while (simulcast_rtp_rtcp_.size() > 0) {
+    std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
+    rtp_rtcp->RegisterSendTransport(NULL);
+    module_process_thread_.DeRegisterModule(rtp_rtcp);
+    RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
+    simulcast_rtp_rtcp_.erase(it);
+  }
+  if (decode_thread_) {
+    StopDecodeThread();
+  }
+
+  delete &vie_receiver_;
+  delete &vie_sender_;
+  delete &vie_sync_;
+
+  // Release modules.
+  RtpRtcp::DestroyRtpRtcp(&rtp_rtcp_);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  UdpTransport::Destroy(&socket_transport_);
+#endif
+  VideoCodingModule::Destroy(&vcm_);
+}
+
+WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
+                                       bool new_stream) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: codec_type: %d", __FUNCTION__, video_codec.codecType);
+
+  if (video_codec.codecType == kVideoCodecRED ||
+      video_codec.codecType == kVideoCodecULPFEC) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: codec_type: %d is not a valid send codec.", __FUNCTION__,
+                 video_codec.codecType);
+    return -1;
+  }
+  if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Too many simulcast streams", __FUNCTION__);
+    return -1;
+  }
+  // Update the RTP module with the settings.
+  // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
+  // set explicitly.
+  bool restart_rtp = false;
+  if (rtp_rtcp_.Sending() && new_stream) {
+    restart_rtp = true;
+    rtp_rtcp_.SetSendingStatus(false);
+  }
+  if (video_codec.numberOfSimulcastStreams > 0) {
+    WebRtc_UWord32 start_bitrate = video_codec.startBitrate * 1000;
+    WebRtc_UWord32 stream_bitrate =
+        std::min(start_bitrate, video_codec.simulcastStream[0].maxBitrate);
+    start_bitrate -= stream_bitrate;
+    // Set correct bitrate to base layer.
+    rtp_rtcp_.SetSendBitrate(stream_bitrate, video_codec.minBitrate,
+                             video_codec.simulcastStream[0].maxBitrate);
+    // Create our simulcast RTP modules.
+    for (int i = simulcast_rtp_rtcp_.size();
+         i < video_codec.numberOfSimulcastStreams - 1;
+         i++) {
+      RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(
+          ViEModuleId(engine_id_, channel_id_), false);
+      if (rtp_rtcp->RegisterDefaultModule(default_rtp_rtcp_)) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: could not register default module", __FUNCTION__);
+        return -1;
+      }
+      simulcast_rtp_rtcp_.push_back(rtp_rtcp);
+    }
+    // Remove last in list if we have too many.
+    for (int j = simulcast_rtp_rtcp_.size();
+         j > (video_codec.numberOfSimulcastStreams - 1);
+         j--) {
+      RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
+      rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
+      rtp_rtcp->RegisterSendTransport(NULL);
+      module_process_thread_.DeRegisterModule(rtp_rtcp);
+      RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
+      simulcast_rtp_rtcp_.pop_back();
+    }
+    VideoCodec video_codec;
+    if (vcm_.Codec(kVideoCodecVP8, &video_codec) != VCM_OK) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: VCM: failure geting default VP8 pl_type", __FUNCTION__);
+      return -1;
+    }
+    WebRtc_UWord8 idx = 0;
+    // Configure all simulcast modules.
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      idx++;
+      RtpRtcp* rtp_rtcp = *it;
+      if (rtp_rtcp->InitSender() != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: RTP::InitSender failure", __FUNCTION__);
+        return -1;
+      }
+      if (rtp_rtcp->InitReceiver() != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: RTP::InitReceiver failure", __FUNCTION__);
+        return -1;
+      }
+      if (rtp_rtcp->RegisterSendTransport(
+          static_cast<Transport*>(&vie_sender_)) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: RTP::RegisterSendTransport failure", __FUNCTION__);
+        return -1;
+      }
+      // Silently ignore error.
+      module_process_thread_.RegisterModule(rtp_rtcp);
+      if (rtp_rtcp->SetRTCPStatus(rtp_rtcp_.RTCP()) != 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
+      }
+      rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
+      if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: could not register payload type", __FUNCTION__);
+        return -1;
+      }
+      if (restart_rtp) {
+        rtp_rtcp->SetSendingStatus(true);
+      }
+      // Configure all simulcast streams min and max bitrates
+      const WebRtc_UWord32 stream_bitrate =
+          std::min(start_bitrate, video_codec.simulcastStream[idx].maxBitrate);
+      start_bitrate -= stream_bitrate;
+      rtp_rtcp->SetSendBitrate(stream_bitrate, video_codec.minBitrate,
+                               video_codec.simulcastStream[idx].maxBitrate);
+    }
+    vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
+  } else {
+    if (!simulcast_rtp_rtcp_.empty()) {
+      // Delete all simulcast rtp modules.
+      while (!simulcast_rtp_rtcp_.empty()) {
+        RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
+        rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
+        rtp_rtcp->RegisterSendTransport(NULL);
+        module_process_thread_.DeRegisterModule(rtp_rtcp);
+        RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
+        simulcast_rtp_rtcp_.pop_back();
+      }
+    }
+    // Clear any previous modules.
+    vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
+
+    rtp_rtcp_.SetSendBitrate(video_codec.startBitrate * 1000,
+                             video_codec.minBitrate,
+                             video_codec.maxBitrate);
+  }
+  // Enable this if H264 is available.
+  // This sets the wanted packetization mode.
+  // if (video_codec.plType == kVideoCodecH264) {
+  //   if (video_codec.codecSpecific.H264.packetization ==  kH264SingleMode) {
+  //     rtp_rtcp_.SetH264PacketizationMode(H264_SINGLE_NAL_MODE);
+  //   } else {
+  //     rtp_rtcp_.SetH264PacketizationMode(H264_NON_INTERLEAVED_MODE);
+  //   }
+  //   if (video_codec.codecSpecific.H264.configParametersSize > 0) {
+  //     rtp_rtcp_.SetH264SendModeNALU_PPS_SPS(true);
+  //   }
+  // }
+
+  // Don't log this error, no way to check in advance if this pl_type is
+  // registered or not...
+  rtp_rtcp_.DeRegisterSendPayload(video_codec.plType);
+  if (rtp_rtcp_.RegisterSendPayload(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not register payload type", __FUNCTION__);
+    return -1;
+  }
+  if (restart_rtp) {
+    rtp_rtcp_.SetSendingStatus(true);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
+  // We will not receive simulcast streams, so no need to handle that use case.
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_Word8 old_pltype = -1;
+  if (rtp_rtcp_.ReceivePayloadType(video_codec, &old_pltype) != -1) {
+    rtp_rtcp_.DeRegisterReceivePayload(old_pltype);
+  }
+
+  if (rtp_rtcp_.RegisterReceivePayload(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not register receive payload type", __FUNCTION__);
+    return -1;
+  }
+
+  if (video_codec.codecType != kVideoCodecRED &&
+      video_codec.codecType != kVideoCodecULPFEC) {
+    // Register codec type with VCM, but do not register RED or ULPFEC.
+    if (vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_,
+                                  wait_for_key_frame_) != VCM_OK) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not register decoder", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetReceiveCodec(VideoCodec& video_codec) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (vcm_.ReceiveCodec(&video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get receive codec", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (codec_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: already added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    codec_observer_ = observer;
+  } else {
+    if (!codec_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    codec_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
+                                                  VideoDecoder* decoder,
+                                                  bool decoder_render,
+                                                  WebRtc_Word32 render_delay) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_Word32 result = 0;
+  result = vcm_.RegisterExternalDecoder(decoder, pl_type, decoder_render);
+  if (decoder_render && result == 0) {
+    // Let VCM know how long before the actual render time the decoder needs
+    // to get a frame for decoding.
+    result = vcm_.SetRenderDelay(render_delay);
+  }
+  return result;
+}
+
+WebRtc_Word32 ViEChannel::DeRegisterExternalDecoder(
+    const WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s pl_type", __FUNCTION__, pl_type);
+
+  VideoCodec current_receive_codec;
+  WebRtc_Word32 result = 0;
+  result = vcm_.ReceiveCodec(&current_receive_codec);
+  if (vcm_.RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
+    return -1;
+  }
+
+  if (result == 0 && current_receive_codec.plType == pl_type) {
+    result = vcm_.RegisterReceiveCodec(&current_receive_codec, number_of_cores_,
+                                       wait_for_key_frame_);
+  }
+  return result;
+}
+
+WebRtc_Word32 ViEChannel::ReceiveCodecStatistics(
+    WebRtc_UWord32& num_key_frames, WebRtc_UWord32& num_delta_frames) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  VCMFrameCount received_frames;
+  if (vcm_.ReceivedFrameCount(received_frames) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get received frame information", __FUNCTION__);
+    return -1;
+  }
+  num_key_frames = received_frames.numKeyFrames;
+  num_delta_frames = received_frames.numDeltaFrames;
+  return 0;
+}
+
+WebRtc_UWord32 ViEChannel::DiscardedPackets() const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  return vcm_.DiscardedPackets();
+}
+
+WebRtc_Word32 ViEChannel::WaitForKeyFrame(bool wait) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(wait: %d)", __FUNCTION__, wait);
+  wait_for_key_frame_ = wait;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetSignalPacketLossStatus(bool enable,
+                                                    bool only_key_frames) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+  if (enable) {
+    if (only_key_frames) {
+      vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
+      if (vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, true) != VCM_OK) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s failed %d", __FUNCTION__, enable);
+        return -1;
+      }
+    } else {
+      vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+      if (vcm_.SetVideoProtection(kProtectionKeyOnLoss, true) != VCM_OK) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s failed %d", __FUNCTION__, enable);
+        return -1;
+      }
+    }
+  } else {
+    vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
+    vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, rtcp_mode);
+
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetRTCPStatus(rtcp_mode);
+  }
+  return rtp_rtcp_.SetRTCPStatus(rtcp_mode);
+}
+
+WebRtc_Word32 ViEChannel::GetRTCPMode(RTCPMethod& rtcp_mode) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  rtcp_mode = rtp_rtcp_.RTCP();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetNACKStatus(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  // Update the decoding VCM.
+  if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  if (enable) {
+    // Disable possible FEC.
+    SetFECStatus(false, 0, 0);
+  }
+  // Update the decoding VCM.
+  if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  return ProcessNACKRequest(enable);
+}
+
+WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  if (enable) {
+    // Turn on NACK.
+    NACKMethod nackMethod = kNackRtcp;
+    if (rtp_rtcp_.RTCP() == kRtcpOff) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not enable NACK, RTPC not on ", __FUNCTION__);
+      return -1;
+    }
+    if (rtp_rtcp_.SetNACKStatus(nackMethod) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not set NACK method %d", __FUNCTION__,
+                   nackMethod);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Using NACK method %d", __FUNCTION__, nackMethod);
+    rtp_rtcp_.SetStorePacketsStatus(true, kNackHistorySize);
+
+    vcm_.RegisterPacketRequestCallback(this);
+
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      RtpRtcp* rtp_rtcp = *it;
+      rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
+    }
+  } else {
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      RtpRtcp* rtp_rtcp = *it;
+      rtp_rtcp->SetStorePacketsStatus(false);
+    }
+    rtp_rtcp_.SetStorePacketsStatus(false);
+    vcm_.RegisterPacketRequestCallback(NULL);
+    if (rtp_rtcp_.SetNACKStatus(kNackOff) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not turn off NACK", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetFECStatus(const bool enable,
+                                       const unsigned char payload_typeRED,
+                                       const unsigned char payload_typeFEC) {
+  // Disable possible NACK.
+  if (enable) {
+    SetNACKStatus(false);
+  }
+
+  return ProcessFECRequest(enable, payload_typeRED, payload_typeFEC);
+}
+
+WebRtc_Word32 ViEChannel::ProcessFECRequest(
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d, payload_typeRED: %u, payload_typeFEC: %u)",
+               __FUNCTION__, enable, payload_typeRED, payload_typeFEC);
+
+  if (rtp_rtcp_.SetGenericFECStatus(enable, payload_typeRED,
+                                    payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not change FEC status to %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetGenericFECStatus(enable, payload_typeRED, payload_typeFEC);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetHybridNACKFECStatus(
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  // Update the decoding VCM with hybrid mode.
+  if (vcm_.SetVideoProtection(kProtectionNackFEC, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+
+  WebRtc_Word32 ret_val = 0;
+  ret_val = ProcessNACKRequest(enable);
+  if (ret_val < 0) {
+    return ret_val;
+  }
+  return ProcessFECRequest(enable, payload_typeRED, payload_typeFEC);
+}
+
+WebRtc_Word32 ViEChannel::SetKeyFrameRequestMethod(
+    const KeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, method);
+  return rtp_rtcp_.SetKeyFrameRequestMethod(method);
+}
+
+bool ViEChannel::EnableRemb(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "ViEChannel::EnableRemb: %d", enable);
+  if (rtp_rtcp_.SetREMBStatus(enable) != 0)
+    return false;
+  return true;
+}
+
+WebRtc_Word32 ViEChannel::EnableTMMBR(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, enable);
+  return rtp_rtcp_.SetTMMBRStatus(enable);
+}
+
+WebRtc_Word32 ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, enable);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (enable && !codec_observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: No ViECodecObserver set", __FUNCTION__, enable);
+    return -1;
+  }
+  do_key_frame_callbackRequest_ = enable;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC,
+                                  const StreamType usage,
+                                  const uint8_t simulcast_idx) {
+  WEBRTC_TRACE(webrtc::kTraceInfo,
+               webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(usage:%d, SSRC: 0x%x, idx:%u)",
+               __FUNCTION__, usage, SSRC, simulcast_idx);
+  if (simulcast_idx == 0) {
+    return rtp_rtcp_.SetSSRC(SSRC);
+  }
+  std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+  for (int i = 1; i < simulcast_idx; ++i, ++it) {
+    if (it ==  simulcast_rtp_rtcp_.end()) {
+      return -1;
+    }
+  }
+  RtpRtcp* rtp_rtcp = *it;
+  if (usage == kViEStreamTypeRtx) {
+    return rtp_rtcp->SetRTXSendStatus(true, true, SSRC);
+  }
+  return rtp_rtcp->SetSSRC(SSRC);
+}
+
+WebRtc_Word32 ViEChannel::SetRemoteSSRCType(const StreamType usage,
+                                            const uint32_t SSRC) const {
+  WEBRTC_TRACE(webrtc::kTraceInfo,
+               webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(usage:%d, SSRC: 0x%x)",
+               __FUNCTION__, usage, SSRC);
+
+  return rtp_rtcp_.SetRTXReceiveStatus(true, SSRC);
+}
+
+WebRtc_Word32 ViEChannel::GetLocalSSRC(WebRtc_UWord32& SSRC) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  SSRC = rtp_rtcp_.SSRC();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteSSRC(WebRtc_UWord32& SSRC) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  SSRC = rtp_rtcp_.RemoteSSRC();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteCSRC(unsigned int CSRCs[kRtpCsrcSize]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  WebRtc_UWord32 arrayCSRC[kRtpCsrcSize];
+  memset(arrayCSRC, 0, sizeof(arrayCSRC));
+
+  WebRtc_Word32 num_csrcs = rtp_rtcp_.RemoteCSRCs(arrayCSRC);
+  if (num_csrcs > 0) {
+    memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(WebRtc_UWord32));
+    for (int idx = 0; idx < num_csrcs; idx++) {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "\tCSRC[%d] = %lu", idx, CSRCs[idx]);
+    }
+  } else {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: CSRC list is empty", __FUNCTION__);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetStartSequenceNumber(
+    WebRtc_UWord16 sequence_number) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already sending", __FUNCTION__);
+    return -1;
+  }
+  return rtp_rtcp_.SetSequenceNumber(sequence_number);
+}
+
+WebRtc_Word32 ViEChannel::SetRTCPCName(const WebRtc_Word8 rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  if (rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already sending", __FUNCTION__);
+    return -1;
+  }
+  return rtp_rtcp_.SetCNAME(rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::GetRTCPCName(WebRtc_Word8 rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  return rtp_rtcp_.CNAME(rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteRTCPCName(WebRtc_Word8 rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  WebRtc_UWord32 remoteSSRC = rtp_rtcp_.RemoteSSRC();
+  return rtp_rtcp_.RemoteCNAME(remoteSSRC, rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (rtp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    rtp_observer_ = observer;
+  } else {
+    if (!rtp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    rtp_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (rtcp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    rtcp_observer_ = observer;
+  } else {
+    if (!rtcp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    rtcp_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SendApplicationDefinedRTCPPacket(
+    const WebRtc_UWord8 sub_type,
+    WebRtc_UWord32 name,
+    const WebRtc_UWord8* data,
+    WebRtc_UWord16 data_length_in_bytes) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (!rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: not sending", __FUNCTION__);
+    return -1;
+  }
+  if (!data) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no input argument", __FUNCTION__);
+    return -1;
+  }
+  if (data_length_in_bytes % 4 != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: input length error", __FUNCTION__);
+    return -1;
+  }
+  RTCPMethod rtcp_method = rtp_rtcp_.RTCP();
+  if (rtcp_method == kRtcpOff) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTCP not enabled", __FUNCTION__);
+    return -1;
+  }
+  // Create and send packet.
+  if (rtp_rtcp_.SetRTCPApplicationSpecificData(sub_type, name, data,
+                                               data_length_in_bytes) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not send RTCP application data", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(WebRtc_UWord16& fraction_lost,
+                                                WebRtc_UWord32& cumulative_lost,
+                                                WebRtc_UWord32& extended_max,
+                                                WebRtc_UWord32& jitter_samples,
+                                                WebRtc_Word32& rtt_ms) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  // TODO(pwestin) how do we do this for simulcast ? average for all
+  // except cumulative_lost that is the sum ?
+  // for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+  //      it != simulcast_rtp_rtcp_.end();
+  //      it++) {
+  //   RtpRtcp* rtp_rtcp = *it;
+  // }
+  WebRtc_UWord32 remoteSSRC = rtp_rtcp_.RemoteSSRC();
+
+  // Get all RTCP receiver report blocks that have been received on this
+  // channel. If we receive RTP packets from a remote source we know the
+  // remote SSRC and use the report block from him.
+  // Otherwise use the first report block.
+  std::vector<RTCPReportBlock> remote_stats;
+  if (rtp_rtcp_.RemoteRTCPStat(&remote_stats) != 0 || remote_stats.empty()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get remote stats", __FUNCTION__);
+    return -1;
+  }
+  std::vector<RTCPReportBlock>::const_iterator statistics =
+      remote_stats.begin();
+  for (; statistics != remote_stats.end(); ++statistics) {
+    if (statistics->remoteSSRC == remoteSSRC)
+      break;
+  }
+
+  if (statistics == remote_stats.end()) {
+    // If we have not received any RTCP packets from this SSRC it probably means
+    // we have not received any RTP packets.
+    // Use the first received report block instead.
+    statistics = remote_stats.begin();
+    remoteSSRC = statistics->remoteSSRC;
+  }
+
+  fraction_lost = statistics->fractionLost;
+  cumulative_lost = statistics->cumulativeLost;
+  extended_max = statistics->extendedHighSeqNum;
+  jitter_samples = statistics->jitter;
+
+  WebRtc_UWord16 dummy;
+  WebRtc_UWord16 rtt = 0;
+  if (rtp_rtcp_.RTT(remoteSSRC, &rtt, &dummy, &dummy, &dummy) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get RTT", __FUNCTION__);
+    return -1;
+  }
+  rtt_ms = rtt;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(
+    WebRtc_UWord16& fraction_lost,
+    WebRtc_UWord32& cumulative_lost,
+    WebRtc_UWord32& extended_max,
+    WebRtc_UWord32& jitter_samples,
+    WebRtc_Word32& rtt_ms) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_UWord8 frac_lost = 0;
+  if (rtp_rtcp_.StatisticsRTP(&frac_lost, &cumulative_lost, &extended_max,
+                              &jitter_samples) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get received RTP statistics", __FUNCTION__);
+    return -1;
+  }
+  fraction_lost = frac_lost;
+
+  WebRtc_UWord32 remoteSSRC = rtp_rtcp_.RemoteSSRC();
+  WebRtc_UWord16 dummy = 0;
+  WebRtc_UWord16 rtt = 0;
+  if (rtp_rtcp_.RTT(remoteSSRC, &rtt, &dummy, &dummy, &dummy) != 0) {
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get RTT", __FUNCTION__);
+  }
+  rtt_ms = rtt;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRtpStatistics(
+    WebRtc_UWord32& bytes_sent,
+    WebRtc_UWord32& packets_sent,
+    WebRtc_UWord32& bytes_received,
+    WebRtc_UWord32& packets_received) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (rtp_rtcp_.DataCountersRTP(&bytes_sent,
+                                &packets_sent,
+                                &bytes_received,
+                                &packets_received) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get counters", __FUNCTION__);
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    WebRtc_UWord32 bytes_sent_temp = 0;
+    WebRtc_UWord32 packets_sent_temp = 0;
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp, NULL, NULL);
+    bytes_sent += bytes_sent_temp;
+    packets_sent += packets_sent_temp;
+  }
+  return 0;
+}
+
+void ViEChannel::GetBandwidthUsage(WebRtc_UWord32& total_bitrate_sent,
+                                   WebRtc_UWord32& video_bitrate_sent,
+                                   WebRtc_UWord32& fec_bitrate_sent,
+                                   WebRtc_UWord32& nackBitrateSent) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  rtp_rtcp_.BitrateSent(&total_bitrate_sent,
+                        &video_bitrate_sent,
+                        &fec_bitrate_sent,
+                        &nackBitrateSent);
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end(); it++) {
+    WebRtc_UWord32 stream_rate = 0;
+    WebRtc_UWord32 video_rate = 0;
+    WebRtc_UWord32 fec_rate = 0;
+    WebRtc_UWord32 nackRate = 0;
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->BitrateSent(&stream_rate, &video_rate, &fec_rate, &nackRate);
+    total_bitrate_sent += stream_rate;
+    fec_bitrate_sent += fec_rate;
+    nackBitrateSent += nackRate;
+  }
+}
+
+int ViEChannel::GetEstimatedReceiveBandwidth(
+    WebRtc_UWord32* estimated_bandwidth) const {
+  return rtp_rtcp_.EstimatedReceiveBandwidth(estimated_bandwidth);
+}
+
+WebRtc_Word32 ViEChannel::SetKeepAliveStatus(
+    const bool enable,
+    const WebRtc_Word8 unknown_payload_type,
+    const WebRtc_UWord16 delta_transmit_timeMS) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (enable && rtp_rtcp_.RTPKeepalive()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP keepalive already enabled", __FUNCTION__);
+    return -1;
+  } else if (!enable && !rtp_rtcp_.RTPKeepalive()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP keepalive already disabled", __FUNCTION__);
+    return -1;
+  }
+
+  if (rtp_rtcp_.SetRTPKeepaliveStatus(enable, unknown_payload_type,
+                                      delta_transmit_timeMS) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set RTP keepalive status %d", __FUNCTION__,
+                 enable);
+    if (enable == false && !rtp_rtcp_.DefaultModuleRegistered()) {
+      // Not sending media and we try to disable keep alive
+      rtp_rtcp_.ResetSendDataCountersRTP();
+      rtp_rtcp_.SetSendingStatus(false);
+    }
+    return -1;
+  }
+
+  if (enable && !rtp_rtcp_.Sending()) {
+    // Enable sending to start sending Sender reports instead of receive
+    // reports.
+    if (rtp_rtcp_.SetSendingStatus(true) != 0) {
+      rtp_rtcp_.SetRTPKeepaliveStatus(false, 0, 0);
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not start sending", __FUNCTION__);
+      return -1;
+    }
+  } else if (!enable && !rtp_rtcp_.SendingMedia()) {
+    // Not sending media and we're disabling keep alive.
+    rtp_rtcp_.ResetSendDataCountersRTP();
+    if (rtp_rtcp_.SetSendingStatus(false) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not stop sending", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetKeepAliveStatus(
+    bool& enabled,
+    WebRtc_Word8& unknown_payload_type,
+    WebRtc_UWord16& delta_transmit_time_ms) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (rtp_rtcp_.RTPKeepaliveStatus(&enabled, &unknown_payload_type,
+                                   &delta_transmit_time_ms) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get RTP keepalive status", __FUNCTION__);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: enabled = %d, unknown_payload_type = %d, "
+               "delta_transmit_time_ms = %ul",
+               __FUNCTION__, enabled, (WebRtc_Word32) unknown_payload_type,
+    delta_transmit_time_ms);
+
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
+                                       RTPDirections direction) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (direction != kRtpIncoming && direction != kRtpOutgoing) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: invalid input", __FUNCTION__);
+    return -1;
+  }
+
+  if (direction == kRtpIncoming) {
+    return vie_receiver_.StartRTPDump(file_nameUTF8);
+  } else {
+    return vie_sender_.StartRTPDump(file_nameUTF8);
+  }
+}
+
+WebRtc_Word32 ViEChannel::StopRTPDump(RTPDirections direction) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (direction != kRtpIncoming && direction != kRtpOutgoing) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: invalid input", __FUNCTION__);
+    return -1;
+  }
+
+  if (direction == kRtpIncoming) {
+    return vie_receiver_.StopRTPDump();
+  } else {
+    return vie_sender_.StopRTPDump();
+  }
+}
+
+WebRtc_Word32 ViEChannel::SetLocalReceiver(const WebRtc_UWord16 rtp_port,
+                                           const WebRtc_UWord16 rtcp_port,
+                                           const WebRtc_Word8* ip_address) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.Receiving()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already receiving", __FUNCTION__);
+    return -1;
+  }
+
+  const WebRtc_Word8* multicast_ip_address = NULL;
+  if (socket_transport_.InitializeReceiveSockets(&vie_receiver_, rtp_port,
+                                                 ip_address,
+                                                 multicast_ip_address,
+                                                 rtcp_port) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not initialize receive sockets. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetLocalReceiver(WebRtc_UWord16& rtp_port,
+                                           WebRtc_UWord16& rtcp_port,
+                                           WebRtc_Word8* ip_address) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.ReceiveSocketsInitialized() == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: receive sockets not initialized", __FUNCTION__);
+    return -1;
+  }
+
+  WebRtc_Word8 multicast_ip_address[UdpTransport::kIpAddressVersion6Length];
+  if (socket_transport_.ReceiveSocketInformation(ip_address, rtp_port,
+                                                 rtcp_port,
+                                                 multicast_ip_address) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: could not get receive socket information. Socket error: %d",
+      __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSendDestination(
+    const WebRtc_Word8* ip_address,
+    const WebRtc_UWord16 rtp_port,
+    const WebRtc_UWord16 rtcp_port,
+    const WebRtc_UWord16 source_rtp_port,
+    const WebRtc_UWord16 source_rtcp_port) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  const bool is_ipv6 = socket_transport_.IpV6Enabled();
+  if (UdpTransport::IsIpAddressValid(ip_address, is_ipv6) == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Not a valid IP address: %s", __FUNCTION__, ip_address);
+    return -1;
+  }
+  if (socket_transport_.InitializeSendSockets(ip_address, rtp_port,
+                                              rtcp_port)!= 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not initialize send socket. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+
+  if (source_rtp_port != 0) {
+    WebRtc_UWord16 receive_rtp_port = 0;
+    WebRtc_UWord16 receive_rtcp_port = 0;
+    if (socket_transport_.ReceiveSocketInformation(NULL, receive_rtp_port,
+                                                   receive_rtcp_port,
+                                                   NULL) != 0) {
+      WebRtc_Word32 socket_error = socket_transport_.LastError();
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+        "%s: could not get receive port information. Socket error: %d",
+        __FUNCTION__, socket_error);
+      return -1;
+    }
+    // Initialize an extra socket only if send port differs from receive
+    // port.
+    if (source_rtp_port != receive_rtp_port) {
+      if (socket_transport_.InitializeSourcePorts(source_rtp_port,
+                                                  source_rtcp_port) != 0) {
+        WebRtc_Word32 socket_error = socket_transport_.LastError();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: could not set source ports. Socket error: %d",
+                     __FUNCTION__, socket_error);
+        return -1;
+      }
+    }
+  }
+  vie_sender_.RegisterSendTransport(&socket_transport_);
+
+  // Workaround to avoid SSRC colision detection in loppback tests.
+  if (!is_ipv6) {
+    WebRtc_UWord32 local_host_address = 0;
+    const WebRtc_UWord32 current_ip_address =
+        UdpTransport::InetAddrIPV4(ip_address);
+
+    if ((UdpTransport::LocalHostAddress(local_host_address) == 0 &&
+        local_host_address == current_ip_address) ||
+        strncmp("127.0.0.1", ip_address, 9) == 0) {
+      rtp_rtcp_.SetSSRC(0xFFFFFFFF);
+      WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "Running in loopback. Forcing fixed SSRC");
+    }
+  } else {
+    char local_host_address[16];
+    char current_ip_address[16];
+
+    WebRtc_Word32 conv_result =
+      UdpTransport::LocalHostAddressIPV6(local_host_address);
+    conv_result += socket_transport_.InetPresentationToNumeric(
+        23, ip_address, current_ip_address);
+    if (conv_result == 0) {
+      bool local_host = true;
+      for (WebRtc_Word32 i = 0; i < 16; i++) {
+        if (local_host_address[i] != current_ip_address[i]) {
+          local_host = false;
+          break;
+        }
+      }
+      if (!local_host) {
+        local_host = true;
+        for (WebRtc_Word32 i = 0; i < 15; i++) {
+          if (current_ip_address[i] != 0) {
+            local_host = false;
+            break;
+          }
+        }
+        if (local_host == true && current_ip_address[15] != 1) {
+          local_host = false;
+        }
+      }
+      if (local_host) {
+        rtp_rtcp_.SetSSRC(0xFFFFFFFF);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "Running in loopback. Forcing fixed SSRC");
+      }
+    }
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSendDestination(
+    WebRtc_Word8* ip_address,
+    WebRtc_UWord16& rtp_port,
+    WebRtc_UWord16& rtcp_port,
+    WebRtc_UWord16& source_rtp_port,
+    WebRtc_UWord16& source_rtcp_port) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SendSocketsInitialized() == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: send sockets not initialized", __FUNCTION__);
+    return -1;
+  }
+  if (socket_transport_.SendSocketInformation(ip_address, rtp_port, rtcp_port)
+      != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: could not get send socket information. Socket error: %d",
+      __FUNCTION__, socket_error);
+    return -1;
+  }
+  source_rtp_port = 0;
+  source_rtcp_port = 0;
+  if (socket_transport_.SourcePortsInitialized()) {
+    socket_transport_.SourcePorts(source_rtp_port, source_rtcp_port);
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::StartSend() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!external_transport_) {
+    if (socket_transport_.SendSocketsInitialized() == false) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: send sockets not initialized", __FUNCTION__);
+      return -1;
+    }
+  }
+#endif
+  rtp_rtcp_.SetSendingMediaStatus(true);
+
+  if (rtp_rtcp_.Sending() && !rtp_rtcp_.RTPKeepalive()) {
+    if (rtp_rtcp_.RTPKeepalive()) {
+      // Sending Keep alive, don't trigger an error.
+      return 0;
+    }
+    // Already sending.
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Already sending", __FUNCTION__);
+    return kViEBaseAlreadySending;
+  }
+  if (rtp_rtcp_.SetSendingStatus(true) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not start sending RTP", __FUNCTION__);
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetSendingMediaStatus(true);
+    rtp_rtcp->SetSendingStatus(true);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopSend() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  rtp_rtcp_.SetSendingMediaStatus(false);
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetSendingMediaStatus(false);
+  }
+  if (rtp_rtcp_.RTPKeepalive()) {
+    // Don't turn off sending since we'll send keep alive packets.
+    return 0;
+  }
+  if (!rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Not sending", __FUNCTION__);
+    return kViEBaseNotSending;
+  }
+
+  // Reset.
+  rtp_rtcp_.ResetSendDataCountersRTP();
+  if (rtp_rtcp_.SetSendingStatus(false) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not stop RTP sending", __FUNCTION__);
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->ResetSendDataCountersRTP();
+    rtp_rtcp->SetSendingStatus(false);
+  }
+  return 0;
+}
+
+bool ViEChannel::Sending() {
+  return rtp_rtcp_.Sending();
+}
+
+WebRtc_Word32 ViEChannel::StartReceive() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!external_transport_) {
+    if (socket_transport_.Receiving()) {
+      // Warning, don't return error.
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: already receiving", __FUNCTION__);
+      return 0;
+    }
+    if (socket_transport_.ReceiveSocketsInitialized() == false) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: receive sockets not initialized", __FUNCTION__);
+      return -1;
+    }
+    if (socket_transport_.StartReceiving(kViENumReceiveSocketBuffers) != 0) {
+      WebRtc_Word32 socket_error = socket_transport_.LastError();
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+        "%s: could not get receive socket information. Socket error:%d",
+        __FUNCTION__, socket_error);
+      return -1;
+    }
+  }
+#endif
+  if (StartDecodeThread() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not start decoder thread", __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    socket_transport_.StopReceiving();
+#endif
+    vie_receiver_.StopReceive();
+    return -1;
+  }
+  vie_receiver_.StartReceive();
+
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopReceive() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  vie_receiver_.StopReceive();
+  StopDecodeThread();
+  vcm_.ResetDecoder();
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      return 0;
+    }
+  }
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.Receiving() == false) {
+    // Warning, don't return error
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: not receiving",
+                 __FUNCTION__);
+    return 0;
+  }
+  if (socket_transport_.StopReceiving() != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__, socket_error);
+    return -1;
+  }
+#endif
+
+  return 0;
+}
+
+bool ViEChannel::Receiving() {
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  return socket_transport_.Receiving();
+#else
+  return false;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSourceInfo(WebRtc_UWord16& rtp_port,
+                                        WebRtc_UWord16& rtcp_port,
+                                        WebRtc_Word8* ip_address,
+                                        WebRtc_UWord32 ip_address_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+                 __FUNCTION__);
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: external transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.IpV6Enabled() &&
+      ip_address_length < UdpTransport::kIpAddressVersion6Length) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IP address length is too small for IPv6", __FUNCTION__);
+    return -1;
+  } else if (ip_address_length < UdpTransport::kIpAddressVersion4Length) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IP address length is too small for IPv4", __FUNCTION__);
+    return -1;
+  }
+
+  if (socket_transport_.RemoteSocketInformation(ip_address, rtp_port, rtcp_port)
+      != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Error getting source ports. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+WebRtc_Word32 ViEChannel::RegisterSendTransport(Transport& transport) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SendSocketsInitialized() ||
+      socket_transport_.ReceiveSocketsInitialized()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s:  socket transport already initialized", __FUNCTION__);
+    return -1;
+  }
+#endif
+  if (rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Sending", __FUNCTION__);
+    return -1;
+  }
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (external_transport_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: transport already registered", __FUNCTION__);
+    return -1;
+  }
+  external_transport_ = &transport;
+  vie_sender_.RegisterSendTransport(&transport);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: Transport registered: 0x%p", __FUNCTION__,
+               &external_transport_);
+
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::DeregisterSendTransport() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (!external_transport_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no transport registered", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_.Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Sending", __FUNCTION__);
+    return -1;
+  }
+  external_transport_ = NULL;
+  vie_sender_.DeregisterSendTransport();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceivedRTPPacket(
+    const void* rtp_packet, const WebRtc_Word32 rtp_packet_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (!external_transport_) {
+      return -1;
+    }
+  }
+  return vie_receiver_.ReceivedRTPPacket(rtp_packet, rtp_packet_length);
+}
+
+WebRtc_Word32 ViEChannel::ReceivedRTCPPacket(
+  const void* rtcp_packet, const WebRtc_Word32 rtcp_packet_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (!external_transport_) {
+      return -1;
+    }
+  }
+  return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+WebRtc_Word32 ViEChannel::EnableIPv6() {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.IpV6Enabled()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IPv6 already enabled", __FUNCTION__);
+    return -1;
+  }
+
+  if (socket_transport_.EnableIpV6() != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not enable IPv6. Socket error: %d", __FUNCTION__,
+                 socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+bool ViEChannel::IsIPv6Enabled() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return false;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  return socket_transport_.IpV6Enabled();
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return false;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSourceFilter(const WebRtc_UWord16 rtp_port,
+                                          const WebRtc_UWord16 rtcp_port,
+                                          const WebRtc_Word8* ip_address) {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetFilterIP(ip_address) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  if (socket_transport_.SetFilterPorts(rtp_port, rtcp_port) != 0) {
+    // Logging done.
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSourceFilter(WebRtc_UWord16& rtp_port,
+                                          WebRtc_UWord16& rtcp_port,
+                                          WebRtc_Word8* ip_address) const {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.FilterIP(ip_address) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  if (socket_transport_.FilterPorts(rtp_port, rtcp_port) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetToS(const WebRtc_Word32 DSCP,
+                                 const bool use_set_sockOpt) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetToS(DSCP, use_set_sockOpt) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetToS(WebRtc_Word32& DSCP,
+                                 bool& use_set_sockOpt) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.ToS(DSCP, use_set_sockOpt) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSendGQoS(const bool enable,
+                                      const WebRtc_Word32 service_type,
+                                      const WebRtc_UWord32 max_bitrate,
+                                      const WebRtc_Word32 overrideDSCP) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetQoS(enable, service_type, max_bitrate, overrideDSCP,
+                               false) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSendGQoS(bool& enabled,
+                                      WebRtc_Word32& service_type,
+                                      WebRtc_Word32& overrideDSCP) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.QoS(enabled, service_type, overrideDSCP) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (rtp_rtcp_.SetMaxTransferUnit(mtu) != 0) {
+    // Logging done.
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetMaxTransferUnit(mtu);
+  }
+  return 0;
+}
+
+WebRtc_UWord16 ViEChannel::MaxDataPayloadLength() const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  return rtp_rtcp_.MaxDataPayloadLength();
+}
+
+WebRtc_Word32 ViEChannel::SetPacketTimeoutNotification(
+    bool enable, WebRtc_UWord32 timeout_seconds) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (enable) {
+    WebRtc_UWord32 timeout_ms = 1000 * timeout_seconds;
+    if (rtp_rtcp_.SetPacketTimeout(timeout_ms, 0) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s", __FUNCTION__);
+      return -1;
+    }
+  } else {
+    if (rtp_rtcp_.SetPacketTimeout(0, 0) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterNetworkObserver(
+    ViENetworkObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (networkObserver_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    networkObserver_ = observer;
+  } else {
+    if (!networkObserver_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    networkObserver_ = NULL;
+  }
+  return 0;
+}
+
+bool ViEChannel::NetworkObserverRegistered() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  return networkObserver_ != NULL;
+}
+
+WebRtc_Word32 ViEChannel::SetPeriodicDeadOrAliveStatus(
+  const bool enable, const WebRtc_UWord32 sample_time_seconds) {
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!networkObserver_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no observer added", __FUNCTION__);
+    return -1;
+  }
+
+  bool enabled = false;
+  WebRtc_UWord8 current_sampletime_seconds = 0;
+
+  // Get old settings.
+  rtp_rtcp_.PeriodicDeadOrAliveStatus(enabled, current_sampletime_seconds);
+  // Set new settings.
+  if (rtp_rtcp_.SetPeriodicDeadOrAliveStatus(
+        enable, static_cast<WebRtc_UWord8>(sample_time_seconds)) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set periodic dead-or-alive status",
+                 __FUNCTION__);
+    return -1;
+  }
+  if (!enable) {
+    // Restore last utilized sample time.
+    // Without this trick, the sample time would always be reset to default
+    // (2 sec), each time dead-or-alive was disabled without sample-time
+    // parameter.
+    rtp_rtcp_.SetPeriodicDeadOrAliveStatus(enable, current_sampletime_seconds);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SendUDPPacket(const WebRtc_Word8* data,
+                                        const WebRtc_UWord32 length,
+                                        WebRtc_Word32& transmitted_bytes,
+                                        bool use_rtcp_socket) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  transmitted_bytes = socket_transport_.SendRaw(data, length, use_rtcp_socket);
+  if (transmitted_bytes == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+                 __FUNCTION__);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::EnableColorEnhancement(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (enable && color_enhancement_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Already enabled", __FUNCTION__);
+    return -1;
+  } else if (!enable && !color_enhancement_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: not enabled", __FUNCTION__);
+    return -1;
+  }
+  color_enhancement_ = enable;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterSendRtpRtcpModule(
+    RtpRtcp& send_rtp_rtcp_module) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  WebRtc_Word32 ret_val = rtp_rtcp_.RegisterDefaultModule(
+      &send_rtp_rtcp_module);
+  if (ret_val == 0) {
+    // We need to store this for the SetSendCodec call.
+    default_rtp_rtcp_ = &send_rtp_rtcp_module;
+  }
+  return ret_val;
+}
+
+WebRtc_Word32 ViEChannel::DeregisterSendRtpRtcpModule() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  default_rtp_rtcp_ = NULL;
+
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->DeRegisterDefaultModule();
+  }
+  return rtp_rtcp_.DeRegisterDefaultModule();
+}
+
+RtpRtcp* ViEChannel::rtp_rtcp() {
+  return &rtp_rtcp_;
+}
+
+
+WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) {
+  CriticalSectionScoped cs(callback_cs_.get());
+
+  if (decoder_reset_) {
+    // Trigger a callback to the user if the incoming codec has changed.
+    if (codec_observer_) {
+      VideoCodec decoder;
+      memset(&decoder, 0, sizeof(decoder));
+      if (vcm_.ReceiveCodec(&decoder) == VCM_OK) {
+        // VCM::ReceiveCodec returns the codec set by
+        // RegisterReceiveCodec, which might not be the size we're
+        // actually decoding.
+        decoder.width = static_cast<unsigned short>(video_frame.Width());
+        decoder.height = static_cast<unsigned short>(video_frame.Height());
+        codec_observer_->IncomingCodecChanged(channel_id_, decoder);
+      } else {
+        assert(false);
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: Could not get receive codec", __FUNCTION__);
+      }
+    }
+    decoder_reset_ = false;
+  }
+  if (effect_filter_) {
+    effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
+                              video_frame.TimeStamp(), video_frame.Width(),
+                              video_frame.Height());
+  }
+  if (color_enhancement_) {
+    VideoProcessingModule::ColorEnhancement(video_frame);
+  }
+
+  // Record videoframe.
+  file_recorder_.RecordVideoFrame(video_frame);
+
+  WebRtc_UWord32 arr_ofCSRC[kRtpCsrcSize];
+  WebRtc_Word32 no_of_csrcs = rtp_rtcp_.RemoteCSRCs(arr_ofCSRC);
+  if (no_of_csrcs <= 0) {
+    arr_ofCSRC[0] = rtp_rtcp_.RemoteSSRC();
+    no_of_csrcs = 1;
+  }
+  DeliverFrame(video_frame, no_of_csrcs, arr_ofCSRC);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceivedDecodedReferenceFrame(
+  const WebRtc_UWord64 picture_id) {
+  return rtp_rtcp_.SendRTCPReferencePictureSelection(picture_id);
+}
+
+WebRtc_Word32 ViEChannel::StoreReceivedFrame(
+  const EncodedVideoData& frame_to_store) {
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceiveStatistics(const WebRtc_UWord32 bit_rate,
+                                            const WebRtc_UWord32 frame_rate) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (codec_observer_) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: bitrate %u, framerate %u", __FUNCTION__, bit_rate,
+                 frame_rate);
+    codec_observer_->IncomingRate(channel_id_, frame_rate, bit_rate);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RequestKeyFrame() {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (codec_observer_ && do_key_frame_callbackRequest_) {
+      codec_observer_->RequestNewKeyFrame(channel_id_);
+    }
+  }
+  return rtp_rtcp_.RequestKeyFrame();
+}
+
+WebRtc_Word32 ViEChannel::SliceLossIndicationRequest(
+  const WebRtc_UWord64 picture_id) {
+  return rtp_rtcp_.SendRTCPSliceLossIndication((WebRtc_UWord8) picture_id);
+}
+
+WebRtc_Word32 ViEChannel::ResendPackets(const WebRtc_UWord16* sequence_numbers,
+                                        WebRtc_UWord16 length) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(length: %d)", __FUNCTION__, length);
+  return rtp_rtcp_.SendNACK(sequence_numbers, length);
+}
+
+bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
+  return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
+}
+
+bool ViEChannel::ChannelDecodeProcess() {
+  // Decode is blocking, but sleep some time anyway to not get a spin.
+  vcm_.Decode(kMaxDecodeWaitTimeMs);
+
+  if ((TickTime::Now() - vcm_rttreported_).Milliseconds() > 1000) {
+    WebRtc_UWord16 RTT;
+    WebRtc_UWord16 avgRTT;
+    WebRtc_UWord16 minRTT;
+    WebRtc_UWord16 maxRTT;
+
+    if (rtp_rtcp_.RTT(rtp_rtcp_.RemoteSSRC(), &RTT, &avgRTT, &minRTT, &maxRTT)
+        == 0) {
+      vcm_.SetReceiveChannelParameters(RTT);
+      vcm_rttreported_ = TickTime::Now();
+    } else if (!rtp_rtcp_.Sending() &&
+               (TickTime::Now() - vcm_rttreported_).Milliseconds() > 5000) {
+      // Wait at least 5 seconds before faking a 200 ms RTT. This is to
+      // make sure we have a chance to start sending before we decide to fake.
+      vcm_.SetReceiveChannelParameters(200);
+      vcm_rttreported_ = TickTime::Now();
+    }
+  }
+  return true;
+}
+
+WebRtc_Word32 ViEChannel::StartDecodeThread() {
+  // Start the decode thread
+  if (decode_thread_) {
+    // Already started.
+    return 0;
+  }
+  decode_thread_ = ThreadWrapper::CreateThread(ChannelDecodeThreadFunction,
+                                                   this, kHighestPriority,
+                                                   "DecodingThread");
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not create decode thread", __FUNCTION__);
+    return -1;
+  }
+
+  unsigned int thread_id;
+  if (decode_thread_->Start(thread_id) == false) {
+    delete decode_thread_;
+    decode_thread_ = NULL;
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not start decode thread", __FUNCTION__);
+    return -1;
+  }
+
+  // Used to make sure that we don't give the VCM a faked RTT
+  // too early.
+  vcm_rttreported_ = TickTime::Now();
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: decode thread with id %u started", __FUNCTION__);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopDecodeThread() {
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: decode thread not running", __FUNCTION__);
+    return 0;
+  }
+
+  decode_thread_->SetNotAlive();
+  if (decode_thread_->Stop()) {
+    delete decode_thread_;
+  } else {
+    // Couldn't stop the thread, leak instead of crash.
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not stop decode thread", __FUNCTION__);
+    assert(!"could not stop decode thread");
+  }
+  decode_thread_ = NULL;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (external_encryption_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external encryption already registered", __FUNCTION__);
+    return -1;
+  }
+
+  external_encryption_ = encryption;
+
+  vie_receiver_.RegisterExternalDecryption(encryption);
+  vie_sender_.RegisterExternalEncryption(encryption);
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", "external encryption object registerd with channel=%d",
+               channel_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::DeRegisterExternalEncryption() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!external_encryption_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external encryption is not registered", __FUNCTION__);
+    return -1;
+  }
+
+  external_transport_ = NULL;
+  vie_receiver_.DeregisterExternalDecryption();
+  vie_sender_.DeregisterExternalEncryption();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s external encryption object de-registerd with channel=%d",
+               __FUNCTION__, channel_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetVoiceChannel(WebRtc_Word32 ve_channel_id,
+                                          VoEVideoSync* ve_sync_interface) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s, audio channel %d, video channel %d", __FUNCTION__,
+               ve_channel_id, channel_id_);
+
+  if (ve_sync_interface) {
+    // Register lip sync
+    module_process_thread_.RegisterModule(&vie_sync_);
+  } else {
+    module_process_thread_.DeRegisterModule(&vie_sync_);
+  }
+  return vie_sync_.SetVoiceChannel(ve_channel_id, ve_sync_interface);
+}
+
+WebRtc_Word32 ViEChannel::VoiceChannel() {
+  return vie_sync_.VoiceChannel();
+}
+
+WebRtc_Word32 ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!effect_filter) {
+    if (!effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no effect filter added for channel %d",
+                   __FUNCTION__, channel_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: deregister effect filter for device %d", __FUNCTION__,
+                 channel_id_);
+  } else {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: register effect filter for device %d", __FUNCTION__,
+                 channel_id_);
+    if (effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: effect filter already added for channel %d",
+                   __FUNCTION__, channel_id_);
+      return -1;
+    }
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+ViEFileRecorder& ViEChannel::GetIncomingFileRecorder() {
+  // Start getting callback of all frames before they are decoded.
+  vcm_.RegisterFrameStorageCallback(this);
+  return file_recorder_;
+}
+
+void ViEChannel::ReleaseIncomingFileRecorder() {
+  // Stop getting callback of all frames before they are decoded.
+  vcm_.RegisterFrameStorageCallback(NULL);
+}
+
+void ViEChannel::OnLipSyncUpdate(const WebRtc_Word32 id,
+                                 const WebRtc_Word32 audio_video_offset) {
+  if (channel_id_ != ChannelId(id)) {
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+  vie_sync_.SetNetworkDelay(audio_video_offset);
+}
+
+void ViEChannel::OnApplicationDataReceived(const WebRtc_Word32 id,
+                                           const WebRtc_UWord8 sub_type,
+                                           const WebRtc_UWord32 name,
+                                           const WebRtc_UWord16 length,
+                                           const WebRtc_UWord8* data) {
+  if (channel_id_ != ChannelId(id)) {
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtcp_observer_) {
+      rtcp_observer_->OnApplicationDataReceived(
+          channel_id_, sub_type, name, reinterpret_cast<const char*>(data),
+          length);
+    }
+  }
+}
+
+WebRtc_Word32 ViEChannel::OnInitializeDecoder(
+    const WebRtc_Word32 id,
+    const WebRtc_Word8 payload_type,
+    const WebRtc_Word8 payload_name[RTP_PAYLOAD_NAME_SIZE],
+    const int frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: payload_type %d, payload_name %s", __FUNCTION__,
+               payload_type, payload_name);
+  vcm_.ResetDecoder();
+
+  callback_cs_->Enter();
+  decoder_reset_ = true;
+  callback_cs_->Leave();
+  return 0;
+}
+
+void ViEChannel::OnPacketTimeout(const WebRtc_Word32 id) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (networkObserver_) {
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (socket_transport_.Receiving() || external_transport_) {
+#else
+    if (external_transport_) {
+#endif
+      networkObserver_->PacketTimeout(channel_id_, NoPacket);
+      rtp_packet_timeout_ = true;
+    }
+  }
+}
+
+void ViEChannel::OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packet_type) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (rtp_packet_timeout_ && packet_type == kPacketRtp) {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (networkObserver_) {
+      networkObserver_->PacketTimeout(channel_id_, PacketReceived);
+    }
+
+    // Reset even if no observer set, might have been removed during timeout.
+    rtp_packet_timeout_ = false;
+  }
+}
+
+void ViEChannel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(id=%d, alive=%d)", __FUNCTION__, id, alive);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!networkObserver_) {
+    return;
+  }
+  bool is_alive = true;
+  if (alive == kRtpDead) {
+    is_alive = false;
+  }
+  networkObserver_->OnPeriodicDeadOrAlive(channel_id_, is_alive);
+  return;
+}
+
+void ViEChannel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 SSRC) {
+  if (channel_id_ != ChannelId(id)) {
+    assert(false);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u", __FUNCTION__, SSRC);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtp_observer_) {
+      rtp_observer_->IncomingSSRCChanged(channel_id_, SSRC);
+    }
+  }
+}
+
+void ViEChannel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 CSRC,
+                                       const bool added) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u added: %d", __FUNCTION__, CSRC, added);
+
+  if (channel_id_ != ChannelId(id)) {
+    assert(false);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u", __FUNCTION__, CSRC);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtp_observer_) {
+      rtp_observer_->IncomingCSRCChanged(channel_id_, CSRC, added);
+    }
+  }
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_channel.h b/trunk/src/video_engine/vie_channel.h
new file mode 100644
index 0000000..dac268f
--- /dev/null
+++ b/trunk/src/video_engine/vie_channel.h
@@ -0,0 +1,404 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// vie_channel.h
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+
+#include <list>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "modules/udp_transport/interface/udp_transport.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_file_recorder.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class Encryption;
+class ProcessThread;
+class RtpRtcp;
+class ThreadWrapper;
+class VideoCodingModule;
+class VideoDecoder;
+class VideoRenderCallback;
+class ViEDecoderObserver;
+class ViEEffectFilter;
+class ViENetworkObserver;
+class ViEReceiver;
+class ViERTCPObserver;
+class ViERTPObserver;
+class ViESender;
+class ViESyncModule;
+class VoEVideoSync;
+
+class ViEChannel
+    : public VCMFrameTypeCallback,
+      public VCMReceiveCallback,
+      public VCMReceiveStatisticsCallback,
+      public VCMPacketRequestCallback,
+      public VCMFrameStorageCallback,
+      public RtcpFeedback,
+      public RtpFeedback,
+      public ViEFrameProviderBase {
+ public:
+  ViEChannel(WebRtc_Word32 channel_id,
+             WebRtc_Word32 engine_id,
+             WebRtc_UWord32 number_of_cores,
+             ProcessThread& module_process_thread);
+  ~ViEChannel();
+
+  WebRtc_Word32 Init();
+
+  // Sets the encoder to use for the channel. |new_stream| indicates the encoder
+  // type has changed and we should start a new RTP stream.
+  WebRtc_Word32 SetSendCodec(const VideoCodec& video_codec,
+                             bool new_stream = true);
+  WebRtc_Word32 SetReceiveCodec(const VideoCodec& video_codec);
+  WebRtc_Word32 GetReceiveCodec(VideoCodec& video_codec);
+  WebRtc_Word32 RegisterCodecObserver(ViEDecoderObserver* observer);
+  // Registers an external decoder. |decoder_render| is set to true if the
+  // decoder will do the rendering. If |decoder_render| is set,|render_delay|
+  // indicates the time needed to decode and render a frame.
+  WebRtc_Word32 RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
+                                        VideoDecoder* decoder,
+                                        bool decoder_render,
+                                        WebRtc_Word32 render_delay);
+  WebRtc_Word32 DeRegisterExternalDecoder(const WebRtc_UWord8 pl_type);
+  WebRtc_Word32 ReceiveCodecStatistics(WebRtc_UWord32& num_key_frames,
+                                       WebRtc_UWord32& num_delta_frames);
+  WebRtc_UWord32 DiscardedPackets() const;
+
+  // Only affects calls to SetReceiveCodec done after this call.
+  WebRtc_Word32 WaitForKeyFrame(bool wait);
+
+  // If enabled, a key frame request will be sent as soon as there are lost
+  // packets. If |only_key_frames| are set, requests are only sent for loss in
+  // key frames.
+  WebRtc_Word32 SetSignalPacketLossStatus(bool enable, bool only_key_frames);
+
+  WebRtc_Word32 SetRTCPMode(const RTCPMethod rtcp_mode);
+  WebRtc_Word32 GetRTCPMode(RTCPMethod& rtcp_mode);
+  WebRtc_Word32 SetNACKStatus(const bool enable);
+  WebRtc_Word32 SetFECStatus(const bool enable,
+                             const unsigned char payload_typeRED,
+                             const unsigned char payload_typeFEC);
+  WebRtc_Word32 SetHybridNACKFECStatus(const bool enable,
+                                       const unsigned char payload_typeRED,
+                                       const unsigned char payload_typeFEC);
+  WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
+  bool EnableRemb(bool enable);
+  WebRtc_Word32 EnableTMMBR(const bool enable);
+  WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable);
+
+  // Sets SSRC for outgoing stream.
+  WebRtc_Word32 SetSSRC(const WebRtc_UWord32 SSRC,
+                        const StreamType usage,
+                        const unsigned char simulcast_idx);
+
+  // Gets SSRC for outgoing stream.
+  WebRtc_Word32 GetLocalSSRC(WebRtc_UWord32& SSRC);
+
+  // Gets SSRC for the incoming stream.
+  WebRtc_Word32 GetRemoteSSRC(WebRtc_UWord32& SSRC);
+
+  // Gets the CSRC for the incoming stream.
+  WebRtc_Word32 GetRemoteCSRC(unsigned int CSRCs[kRtpCsrcSize]);
+
+  // Sets the starting sequence number, must be called before StartSend.
+  WebRtc_Word32 SetStartSequenceNumber(WebRtc_UWord16 sequence_number);
+
+  // Sets the CName for the outgoing stream on the channel.
+  WebRtc_Word32 SetRTCPCName(const WebRtc_Word8 rtcp_cname[]);
+
+  // Gets the CName for the outgoing stream on the channel.
+  WebRtc_Word32 GetRTCPCName(WebRtc_Word8 rtcp_cname[]);
+
+  // Gets the CName of the incoming stream.
+  WebRtc_Word32 GetRemoteRTCPCName(WebRtc_Word8 rtcp_cname[]);
+  WebRtc_Word32 RegisterRtpObserver(ViERTPObserver* observer);
+  WebRtc_Word32 RegisterRtcpObserver(ViERTCPObserver* observer);
+  WebRtc_Word32 SendApplicationDefinedRTCPPacket(
+      const WebRtc_UWord8 sub_type,
+      WebRtc_UWord32 name,
+      const WebRtc_UWord8* data,
+      WebRtc_UWord16 data_length_in_bytes);
+
+  // Returns statistics reported by the remote client in an RTCP packet.
+  WebRtc_Word32 GetSendRtcpStatistics(WebRtc_UWord16& fraction_lost,
+                                      WebRtc_UWord32& cumulative_lost,
+                                      WebRtc_UWord32& extended_max,
+                                      WebRtc_UWord32& jitter_samples,
+                                      WebRtc_Word32& rtt_ms);
+
+  // Returns our localy created statistics of the received RTP stream.
+  WebRtc_Word32 GetReceivedRtcpStatistics(WebRtc_UWord16& fraction_lost,
+                                          WebRtc_UWord32& cumulative_lost,
+                                          WebRtc_UWord32& extended_max,
+                                          WebRtc_UWord32& jitter_samples,
+                                          WebRtc_Word32& rtt_ms);
+
+  // Gets sent/received packets statistics.
+  WebRtc_Word32 GetRtpStatistics(WebRtc_UWord32& bytes_sent,
+                                 WebRtc_UWord32& packets_sent,
+                                 WebRtc_UWord32& bytes_received,
+                                 WebRtc_UWord32& packets_received) const;
+  void GetBandwidthUsage(WebRtc_UWord32& total_bitrate_sent,
+                         WebRtc_UWord32& video_bitrate_sent,
+                         WebRtc_UWord32& fec_bitrate_sent,
+                         WebRtc_UWord32& nackBitrateSent) const;
+  int GetEstimatedReceiveBandwidth(WebRtc_UWord32* estimated_bandwidth) const;
+  WebRtc_Word32 SetKeepAliveStatus(const bool enable,
+                                   const WebRtc_Word8 unknown_payload_type,
+                                   const WebRtc_UWord16 delta_transmit_timeMS);
+  WebRtc_Word32 GetKeepAliveStatus(bool& enable,
+                                   WebRtc_Word8& unknown_payload_type,
+                                   WebRtc_UWord16& delta_transmit_timeMS);
+  WebRtc_Word32 StartRTPDump(const char file_nameUTF8[1024],
+                             RTPDirections direction);
+  WebRtc_Word32 StopRTPDump(RTPDirections direction);
+
+  // Implements RtcpFeedback.
+  virtual void OnLipSyncUpdate(const WebRtc_Word32 id,
+                               const WebRtc_Word32 audio_video_offset);
+  virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord8 sub_type,
+                                         const WebRtc_UWord32 name,
+                                         const WebRtc_UWord16 length,
+                                         const WebRtc_UWord8* data);
+
+  // Implements RtpFeedback.
+  virtual WebRtc_Word32 OnInitializeDecoder(
+      const WebRtc_Word32 id,
+      const WebRtc_Word8 payload_type,
+      const WebRtc_Word8 payload_name[RTP_PAYLOAD_NAME_SIZE],
+      const int frequency,
+      const WebRtc_UWord8 channels,
+      const WebRtc_UWord32 rate);
+  virtual void OnPacketTimeout(const WebRtc_Word32 id);
+  virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                const RtpRtcpPacketType packet_type);
+  virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                     const RTPAliveType alive);
+  virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 SSRC);
+  virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 CSRC,
+                                     const bool added);
+
+  WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtp_port,
+                                 const WebRtc_UWord16 rtcp_port,
+                                 const WebRtc_Word8* ip_address);
+  WebRtc_Word32 GetLocalReceiver(WebRtc_UWord16& rtp_port,
+                                 WebRtc_UWord16& rtcp_port,
+                                 WebRtc_Word8* ip_address) const;
+  WebRtc_Word32 SetSendDestination(const WebRtc_Word8* ip_address,
+                                   const WebRtc_UWord16 rtp_port,
+                                   const WebRtc_UWord16 rtcp_port,
+                                   const WebRtc_UWord16 source_rtp_port,
+                                   const WebRtc_UWord16 source_rtcp_port);
+  WebRtc_Word32 GetSendDestination(WebRtc_Word8* ip_address,
+                                   WebRtc_UWord16& rtp_port,
+                                   WebRtc_UWord16& rtcp_port,
+                                   WebRtc_UWord16& source_rtp_port,
+                                   WebRtc_UWord16& source_rtcp_port) const;
+  WebRtc_Word32 GetSourceInfo(WebRtc_UWord16& rtp_port,
+                              WebRtc_UWord16& rtcp_port,
+                              WebRtc_Word8* ip_address,
+                              WebRtc_UWord32 ip_address_length);
+
+  WebRtc_Word32 SetRemoteSSRCType(const StreamType usage,
+                                  const uint32_t SSRC) const;
+
+  WebRtc_Word32 StartSend();
+  WebRtc_Word32 StopSend();
+  bool Sending();
+  WebRtc_Word32 StartReceive();
+  WebRtc_Word32 StopReceive();
+  bool Receiving();
+
+  WebRtc_Word32 RegisterSendTransport(Transport& transport);
+  WebRtc_Word32 DeregisterSendTransport();
+
+  // Incoming packet from external transport.
+  WebRtc_Word32 ReceivedRTPPacket(const void* rtp_packet,
+                                  const WebRtc_Word32 rtp_packet_length);
+
+  // Incoming packet from external transport.
+  WebRtc_Word32 ReceivedRTCPPacket(const void* rtcp_packet,
+                                   const WebRtc_Word32 rtcp_packet_length);
+
+  WebRtc_Word32 EnableIPv6();
+  bool IsIPv6Enabled();
+  WebRtc_Word32 SetSourceFilter(const WebRtc_UWord16 rtp_port,
+                                const WebRtc_UWord16 rtcp_port,
+                                const WebRtc_Word8* ip_address);
+  WebRtc_Word32 GetSourceFilter(WebRtc_UWord16& rtp_port,
+                                WebRtc_UWord16& rtcp_port,
+                                WebRtc_Word8* ip_address) const;
+
+  WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP, const bool use_set_sockOpt);
+  WebRtc_Word32 GetToS(WebRtc_Word32& DSCP, bool& use_set_sockOpt) const;
+  WebRtc_Word32 SetSendGQoS(const bool enable,
+                            const WebRtc_Word32 service_type,
+                            const WebRtc_UWord32 max_bitrate,
+                            const WebRtc_Word32 overrideDSCP);
+  WebRtc_Word32 GetSendGQoS(bool& enabled, WebRtc_Word32& service_type,
+                            WebRtc_Word32& overrideDSCP) const;
+
+  // Sets the maximum transfer unit size for the network link, i.e. including
+  // IP, UDP and RTP headers.
+  WebRtc_Word32 SetMTU(WebRtc_UWord16 mtu);
+
+  // Returns maximum allowed payload size, i.e. the maximum allowed size of
+  // encoded data in each packet.
+  WebRtc_UWord16 MaxDataPayloadLength() const;
+  WebRtc_Word32 SetMaxPacketBurstSize(WebRtc_UWord16 max_number_of_packets);
+  WebRtc_Word32 SetPacketBurstSpreadState(bool enable,
+                                          const WebRtc_UWord16 frame_periodMS);
+
+  WebRtc_Word32 SetPacketTimeoutNotification(bool enable,
+                                             WebRtc_UWord32 timeout_seconds);
+  WebRtc_Word32 RegisterNetworkObserver(ViENetworkObserver* observer);
+  bool NetworkObserverRegistered();
+  WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
+      const bool enable, const WebRtc_UWord32 sample_time_seconds);
+
+  WebRtc_Word32 SendUDPPacket(const WebRtc_Word8* data,
+                              const WebRtc_UWord32 length,
+                              WebRtc_Word32& transmitted_bytes,
+                              bool use_rtcp_socket);
+
+  WebRtc_Word32 EnableColorEnhancement(bool enable);
+
+  // Register send RTP RTCP module, which will deliver encoded frames to the
+  // to the channel RTP module.
+  WebRtc_Word32 RegisterSendRtpRtcpModule(RtpRtcp& send_rtp_rtcp_module);
+
+  // Deregisters the send RTP RTCP module, which will stop the encoder input to
+  // the channel.
+  WebRtc_Word32 DeregisterSendRtpRtcpModule();
+
+  // Gets the modules used by the channel.
+  RtpRtcp* rtp_rtcp();
+
+  // Implements VCMReceiveCallback.
+  virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame);
+
+  // Implements VCMReceiveCallback.
+  virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+      const WebRtc_UWord64 picture_id);
+
+  // Implements VCM.
+  virtual WebRtc_Word32 StoreReceivedFrame(
+      const EncodedVideoData& frame_to_store);
+
+  // Implements VideoReceiveStatisticsCallback.
+  virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bit_rate,
+                                          const WebRtc_UWord32 frame_rate);
+
+  // Implements VideoFrameTypeCallback.
+  virtual WebRtc_Word32 RequestKeyFrame();
+
+  // Implements VideoFrameTypeCallback.
+  virtual WebRtc_Word32 SliceLossIndicationRequest(
+      const WebRtc_UWord64 picture_id);
+
+  // Implements VideoPacketRequestCallback.
+  virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequence_numbers,
+                                      WebRtc_UWord16 length);
+
+  WebRtc_Word32 RegisterExternalEncryption(Encryption* encryption);
+  WebRtc_Word32 DeRegisterExternalEncryption();
+
+  WebRtc_Word32 SetVoiceChannel(WebRtc_Word32 ve_channel_id,
+                                VoEVideoSync* ve_sync_interface);
+  WebRtc_Word32 VoiceChannel();
+
+  // Implements ViEFrameProviderBase.
+  virtual int FrameCallbackChanged() {return -1;}
+
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+
+  ViEFileRecorder& GetIncomingFileRecorder();
+  void ReleaseIncomingFileRecorder();
+
+ protected:
+  static bool ChannelDecodeThreadFunction(void* obj);
+  bool ChannelDecodeProcess();
+
+ private:
+  // Assumed to be protected.
+  WebRtc_Word32 StartDecodeThread();
+  WebRtc_Word32 StopDecodeThread();
+
+  WebRtc_Word32 ProcessNACKRequest(const bool enable);
+  WebRtc_Word32 ProcessFECRequest(const bool enable,
+                                  const unsigned char payload_typeRED,
+                                  const unsigned char payload_typeFEC);
+
+  WebRtc_Word32 channel_id_;
+  WebRtc_Word32 engine_id_;
+  WebRtc_UWord32 number_of_cores_;
+  WebRtc_UWord8 num_socket_threads_;
+
+  // Used for all registered callbacks except rendering.
+  scoped_ptr<CriticalSectionWrapper> callback_cs_;
+
+  // Owned modules/classes.
+  RtpRtcp& rtp_rtcp_;
+  RtpRtcp* default_rtp_rtcp_;
+  std::list<RtpRtcp*> simulcast_rtp_rtcp_;
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  UdpTransport& socket_transport_;
+#endif
+  VideoCodingModule& vcm_;
+  ViEReceiver& vie_receiver_;
+  ViESender& vie_sender_;
+  ViESyncModule& vie_sync_;
+
+  // Not owned.
+  ProcessThread& module_process_thread_;
+  ViEDecoderObserver* codec_observer_;
+  bool do_key_frame_callbackRequest_;
+  ViERTPObserver* rtp_observer_;
+  ViERTCPObserver* rtcp_observer_;
+  ViENetworkObserver* networkObserver_;
+  bool rtp_packet_timeout_;
+  bool using_packet_spread_;
+
+  Transport* external_transport_;
+
+  bool decoder_reset_;
+  bool wait_for_key_frame_;
+  ThreadWrapper* decode_thread_;
+
+  Encryption* external_encryption_;
+
+  ViEEffectFilter* effect_filter_;
+  bool color_enhancement_;
+
+  // Time when RTT time was last reported to VCM JB.
+  TickTime vcm_rttreported_;
+
+  ViEFileRecorder file_recorder_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
diff --git a/trunk/src/video_engine/vie_channel_manager.cc b/trunk/src/video_engine/vie_channel_manager.cc
new file mode 100644
index 0000000..268bc18
--- /dev/null
+++ b/trunk/src/video_engine/vie_channel_manager.cc
@@ -0,0 +1,484 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_channel_manager.h"
+
+#include "engine_configurations.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/utility/interface/process_thread.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_remb.h"
+#include "voice_engine/main/interface/voe_video_sync.h"
+
+namespace webrtc {
+
+ViEChannelManager::ViEChannelManager(
+    int engine_id,
+    int number_of_cores,
+    ViEPerformanceMonitor& vie_performance_monitor)
+    : channel_id_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      engine_id_(engine_id),
+      number_of_cores_(number_of_cores),
+      vie_performance_monitor_(vie_performance_monitor),
+      free_channel_ids_(new bool[kViEMaxNumberOfChannels]),
+      free_channel_ids_size_(kViEMaxNumberOfChannels),
+      voice_sync_interface_(NULL),
+      remb_(new VieRemb(engine_id)),
+      voice_engine_(NULL),
+      module_process_thread_(NULL) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id),
+               "ViEChannelManager::ViEChannelManager(engine_id: %d)",
+               engine_id);
+  for (int idx = 0; idx < free_channel_ids_size_; idx++) {
+    free_channel_ids_[idx] = true;
+  }
+}
+
+ViEChannelManager::~ViEChannelManager() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_),
+               "ViEChannelManager Destructor, engine_id: %d", engine_id_);
+
+  module_process_thread_->DeRegisterModule(remb_.get());
+  while (channel_map_.size() > 0) {
+    ChannelMap::iterator it = channel_map_.begin();
+    // DeleteChannel will erase this channel from the map and invalidate |it|.
+    DeleteChannel(it->first);
+  }
+
+  if (voice_sync_interface_) {
+    voice_sync_interface_->Release();
+  }
+  if (channel_id_critsect_) {
+    delete channel_id_critsect_;
+    channel_id_critsect_ = NULL;
+  }
+  if (free_channel_ids_) {
+    delete[] free_channel_ids_;
+    free_channel_ids_ = NULL;
+    free_channel_ids_size_ = 0;
+  }
+}
+
+void ViEChannelManager::SetModuleProcessThread(
+    ProcessThread& module_process_thread) {
+  assert(!module_process_thread_);
+  module_process_thread_ = &module_process_thread;
+  module_process_thread_->RegisterModule(remb_.get());
+}
+
+int ViEChannelManager::CreateChannel(int& channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  // Get a free id for the new channel.
+  if (!GetFreeChannelId(channel_id)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "Max number of channels reached: %d", channel_map_.size());
+    return -1;
+  }
+
+  ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
+                                           number_of_cores_,
+                                           *module_process_thread_);
+  if (!vie_channel) {
+    ReturnChannelId(channel_id);
+    return -1;
+  }
+  if (vie_channel->Init() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s could not init channel", __FUNCTION__, channel_id);
+    ReturnChannelId(channel_id);
+    delete vie_channel;
+    vie_channel = NULL;
+    return -1;
+  }
+
+  // There is no ViEEncoder for this channel, create one with default settings.
+  ViEEncoder* vie_encoder = new ViEEncoder(engine_id_, channel_id,
+                                           number_of_cores_,
+                                           *module_process_thread_);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s(video_channel_id: %d) - Could not create a new encoder",
+                 __FUNCTION__, channel_id);
+    delete vie_channel;
+    return -1;
+  }
+
+  vie_encoder_map_[channel_id] = vie_encoder;
+  channel_map_[channel_id] = vie_channel;
+
+  // Register the channel at the encoder.
+  RtpRtcp* send_rtp_rtcp_module = vie_encoder->SendRtpRtcpModule();
+  if (vie_channel->RegisterSendRtpRtcpModule(*send_rtp_rtcp_module) != 0) {
+    assert(false);
+    vie_encoder_map_.erase(channel_id);
+    channel_map_.erase(channel_id);
+    ReturnChannelId(channel_id);
+    delete vie_channel;
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "%s: Could not register rtp module %d", __FUNCTION__,
+                 channel_id);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEChannelManager::CreateChannel(int& channel_id, int original_channel) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  // Check that original_channel already exists.
+  ViEEncoder* vie_encoder = ViEEncoderPtr(original_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s: Original channel doesn't exist", __FUNCTION__,
+                 original_channel);
+    return -1;
+  }
+  VideoCodec video_codec;
+  vie_encoder->GetEncoder(video_codec);
+
+  // Get a free id for the new channel.
+  if (GetFreeChannelId(channel_id) == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "Max number of channels reached: %d", channel_map_.size());
+    return -1;
+  }
+  ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
+                                           number_of_cores_,
+                                           *module_process_thread_);
+  if (!vie_channel) {
+    ReturnChannelId(channel_id);
+    return -1;
+  }
+  if (vie_channel->Init() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s could not init channel", __FUNCTION__, channel_id);
+    ReturnChannelId(channel_id);
+    delete vie_channel;
+    vie_channel = NULL;
+    return -1;
+  }
+  vie_encoder_map_[channel_id] = vie_encoder;
+
+  // Set the same encoder settings for the channel as used by the master
+  // channel. Do this before attaching rtp module to ensure all rtp children has
+  // the same codec type.
+  VideoCodec encoder;
+  if (vie_encoder->GetEncoder(encoder) == 0) {
+    vie_channel->SetSendCodec(encoder);
+  }
+  channel_map_[channel_id] = vie_channel;
+
+  // Register the channel at the encoder.
+  RtpRtcp* send_rtp_rtcp_module = vie_encoder->SendRtpRtcpModule();
+  if (vie_channel->RegisterSendRtpRtcpModule(*send_rtp_rtcp_module) != 0) {
+    assert(false);
+    vie_encoder_map_.erase(channel_id);
+    channel_map_.erase(channel_id);
+    ReturnChannelId(channel_id);
+    delete vie_channel;
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "%s: Could not register rtp module %d", __FUNCTION__,
+                 channel_id);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEChannelManager::DeleteChannel(int channel_id) {
+  ViEChannel* vie_channel = NULL;
+  ViEEncoder* vie_encoder = NULL;
+  {
+    // Write lock to make sure no one is using the channel.
+    ViEManagerWriteScoped wl(*this);
+
+    // Protect the map.
+    CriticalSectionScoped cs(*channel_id_critsect_);
+    ChannelMap::iterator c_it = channel_map_.find(channel_id);
+    if (c_it == channel_map_.end()) {
+      // No such channel.
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                   "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
+      return -1;
+    }
+    vie_channel = c_it->second;
+    channel_map_.erase(c_it);
+
+    // Deregister possible remb modules.
+    RtpRtcp* rtp_module = vie_channel->rtp_rtcp();
+    remb_->RemoveRembSender(rtp_module);
+    remb_->RemoveReceiveChannel(rtp_module);
+
+    // Deregister the channel from the ViEEncoder to stop the media flow.
+    vie_channel->DeregisterSendRtpRtcpModule();
+    ReturnChannelId(channel_id);
+
+    // Find the encoder object.
+    EncoderMap::iterator e_it = vie_encoder_map_.find(channel_id);
+    assert(e_it != vie_encoder_map_.end());
+    vie_encoder = e_it->second;
+
+    remb_->RemoveSendChannel(vie_encoder->SendRtpRtcpModule());
+
+    // Check if other channels are using the same encoder.
+    if (ChannelUsingViEEncoder(channel_id)) {
+      // Don't delete the ViEEncoder, at least one other channel is using it.
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+        "%s ViEEncoder removed from map for channel %d, not deleted",
+        __FUNCTION__, channel_id);
+      vie_encoder = NULL;
+    } else {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+                   "%s ViEEncoder deleted for channel %d", __FUNCTION__,
+                   channel_id);
+      // Delete later when we've released the critsect.
+    }
+
+    // We can't erase the item before we've checked for other channels using
+    // same ViEEncoder.
+    vie_encoder_map_.erase(e_it);
+  }
+
+  // Leave the write critsect before deleting the objects.
+  // Deleting a channel can cause other objects, such as renderers, to be
+  // deleted, which might take time.
+  if (vie_encoder) {
+    delete vie_encoder;
+  }
+  delete vie_channel;
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+               "%s Channel %d deleted", __FUNCTION__, channel_id);
+  return 0;
+}
+
+int ViEChannelManager::SetVoiceEngine(VoiceEngine* voice_engine) {
+  // Write lock to make sure no one is using the channel.
+  ViEManagerWriteScoped wl(*this);
+
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  VoEVideoSync* sync_interface = NULL;
+  if (voice_engine) {
+    // Get new sync interface.
+    sync_interface = VoEVideoSync::GetInterface(voice_engine);
+    if (!sync_interface) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                   "%s Can't get audio sync interface from VoiceEngine.",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+
+  for (ChannelMap::iterator it = channel_map_.begin(); it != channel_map_.end();
+       ++it) {
+    it->second->SetVoiceChannel(-1, sync_interface);
+  }
+  if (voice_sync_interface_) {
+    voice_sync_interface_->Release();
+  }
+  voice_engine_ = voice_engine;
+  voice_sync_interface_ = sync_interface;
+  return 0;
+}
+
+int ViEChannelManager::ConnectVoiceChannel(int channel_id,
+                                           int audio_channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  if (!voice_sync_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "No VoE set");
+    return -1;
+  }
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  if (!channel) {
+    return -1;
+  }
+  return channel->SetVoiceChannel(audio_channel_id, voice_sync_interface_);
+}
+
+int ViEChannelManager::DisconnectVoiceChannel(int channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  if (channel) {
+    channel->SetVoiceChannel(-1, NULL);
+    return 0;
+  }
+  return -1;
+}
+
+VoiceEngine* ViEChannelManager::GetVoiceEngine() {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  return voice_engine_;
+}
+
+bool ViEChannelManager::SetRembStatus(int channel_id, bool sender,
+                                      bool receiver) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  if (!channel) {
+    return false;
+  }
+
+  ViEEncoder* encoder = ViEEncoderPtr(channel_id);
+
+  if (sender || receiver) {
+    if (!channel->EnableRemb(true)) {
+      return false;
+    }
+  } else {
+    channel->EnableRemb(false);
+  }
+  RtpRtcp* rtp_module = channel->rtp_rtcp();
+  if (sender) {
+    remb_->AddRembSender(rtp_module);
+    remb_->AddSendChannel(encoder->SendRtpRtcpModule());
+  } else {
+    remb_->RemoveRembSender(rtp_module);
+    remb_->RemoveSendChannel(encoder->SendRtpRtcpModule());
+  }
+  if (receiver) {
+    remb_->AddReceiveChannel(rtp_module);
+  } else {
+    remb_->RemoveReceiveChannel(rtp_module);
+  }
+  if (sender || receiver) {
+    rtp_module->SetRemoteBitrateObserver(remb_.get());
+  } else {
+    rtp_module->SetRemoteBitrateObserver(NULL);
+  }
+  return true;
+}
+
+ViEChannel* ViEChannelManager::ViEChannelPtr(int channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ChannelMap::const_iterator it = channel_map_.find(channel_id);
+  if (it == channel_map_.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
+    return NULL;
+  }
+  return it->second;
+}
+
+void ViEChannelManager::GetViEChannels(MapWrapper& channel_map) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  if (channel_map.Size() == 0) {
+    return;
+  }
+
+  // Add all items to 'channelMap'.
+  for (ChannelMap::iterator it = channel_map_.begin(); it != channel_map_.end();
+       ++it) {
+    channel_map.Insert(it->first, it->second);
+  }
+}
+
+ViEEncoder* ViEChannelManager::ViEEncoderPtr(int video_channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator it = vie_encoder_map_.find(video_channel_id);
+  if (it == vie_encoder_map_.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+bool ViEChannelManager::GetFreeChannelId(int& free_channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  int idx = 0;
+  while (idx < free_channel_ids_size_) {
+    if (free_channel_ids_[idx] == true) {
+      // We've found a free id, allocate it and return.
+      free_channel_ids_[idx] = false;
+      free_channel_id = idx + kViEChannelIdBase;
+      return true;
+    }
+    idx++;
+  }
+  // No free channel id.
+  free_channel_id = -1;
+  return false;
+}
+
+void ViEChannelManager::ReturnChannelId(int channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  assert(channel_id < kViEMaxNumberOfChannels + kViEChannelIdBase &&
+         channel_id >= kViEChannelIdBase);
+  free_channel_ids_[channel_id - kViEChannelIdBase] = true;
+}
+
+bool ViEChannelManager::ChannelUsingViEEncoder(int channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator orig_it = vie_encoder_map_.find(channel_id);
+  if(orig_it == vie_encoder_map_.end()) {
+    // No ViEEncoder for this channel.
+    return false;
+  }
+
+  // Loop through all other channels to see if anyone points at the same
+  // ViEEncoder.
+  for (EncoderMap::const_iterator comp_it = vie_encoder_map_.begin();
+       comp_it != vie_encoder_map_.end(); ++comp_it) {
+    // Make sure we're not comparing the same channel with itself.
+    if (comp_it->first != channel_id) {
+      if (comp_it->second == orig_it->second) {
+        return true;
+      }
+    }
+  }
+  return false;
+}
+
+void ViEChannelManager::ChannelsUsingViEEncoder(int channel_id,
+                                                ChannelList* channels) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator orig_it = vie_encoder_map_.find(channel_id);
+
+  for (ChannelMap::const_iterator c_it = channel_map_.begin();
+       c_it != channel_map_.end(); ++c_it) {
+    EncoderMap::const_iterator comp_it = vie_encoder_map_.find(c_it->first);
+    assert(comp_it != vie_encoder_map_.end());
+    if (comp_it->second == orig_it->second) {
+      channels->push_back(c_it->second);
+    }
+  }
+}
+
+ViEChannelManagerScoped::ViEChannelManagerScoped(
+    const ViEChannelManager& vie_channel_manager)
+    : ViEManagerScopedBase(vie_channel_manager) {
+}
+
+ViEChannel* ViEChannelManagerScoped::Channel(int vie_channel_id) const {
+  return static_cast<const ViEChannelManager*>(vie_manager_)->ViEChannelPtr(
+      vie_channel_id);
+}
+ViEEncoder* ViEChannelManagerScoped::Encoder(int vie_channel_id) const {
+  return static_cast<const ViEChannelManager*>(vie_manager_)->ViEEncoderPtr(
+      vie_channel_id);
+}
+
+bool ViEChannelManagerScoped::ChannelUsingViEEncoder(int channel_id) const {
+  return (static_cast<const ViEChannelManager*>(vie_manager_))->
+      ChannelUsingViEEncoder(channel_id);
+}
+
+void ViEChannelManagerScoped::ChannelsUsingViEEncoder(
+    int channel_id, ChannelList* channels) const {
+  (static_cast<const ViEChannelManager*>(vie_manager_))->
+      ChannelsUsingViEEncoder(channel_id, channels);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_channel_manager.h b/trunk/src/video_engine/vie_channel_manager.h
new file mode 100644
index 0000000..f0fa348
--- /dev/null
+++ b/trunk/src/video_engine/vie_channel_manager.h
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
+
+#include <list>
+#include <map>
+
+#include "engine_configurations.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class MapWrapper;
+class ProcessThread;
+class ViEChannel;
+class ViEEncoder;
+class ViEPerformanceMonitor;
+class VieRemb;
+class VoEVideoSync;
+class VoiceEngine;
+
+typedef std::list<ViEChannel*> ChannelList;
+typedef std::map<int, ViEChannel*> ChannelMap;
+typedef std::map<int, ViEEncoder*> EncoderMap;
+
+class ViEChannelManager: private ViEManagerBase {
+  friend class ViEChannelManagerScoped;
+ public:
+  ViEChannelManager(int engine_id,
+                    int number_of_cores,
+                    ViEPerformanceMonitor& vie_performance_monitor);
+  ~ViEChannelManager();
+
+  void SetModuleProcessThread(ProcessThread& module_process_thread);
+
+  // Creates a new channel. 'channelId' will be the id of the created channel.
+  int CreateChannel(int& channel_id);
+
+  // Creates a channel and attaches to an already existing ViEEncoder.
+  int CreateChannel(int& channel_id, int original_channel);
+
+  // Deletes a channel.
+  int DeleteChannel(int channel_id);
+
+  // Set the voice engine instance to be used by all video channels.
+  int SetVoiceEngine(VoiceEngine* voice_engine);
+
+  // Enables lip sync of the channel.
+  int ConnectVoiceChannel(int channel_id, int audio_channel_id);
+
+  // Disables lip sync of the channel.
+  int DisconnectVoiceChannel(int channel_id);
+
+  VoiceEngine* GetVoiceEngine();
+
+  // Adds a channel to include when sending REMB.
+  bool SetRembStatus(int channel_id, bool sender, bool receiver);
+
+ private:
+  // Used by ViEChannelScoped, forcing a manager user to use scoped.
+  // Returns a pointer to the channel with id 'channelId'.
+  ViEChannel* ViEChannelPtr(int channel_id) const;
+
+  // Adds all channels to channel_map.
+  void GetViEChannels(MapWrapper& channel_map);
+
+  // Methods used by ViECaptureScoped and ViEEncoderScoped.
+  // Gets the ViEEncoder used as input for video_channel_id
+  ViEEncoder* ViEEncoderPtr(int video_channel_id) const;
+
+  // Returns true if we found a new channel id, free_channel_id, false
+  // otherwise.
+  bool GetFreeChannelId(int& free_channel_id);
+
+  // Returns a previously allocated channel id.
+  void ReturnChannelId(int channel_id);
+
+  // Returns true if at least one other channels uses the same ViEEncoder as
+  // channel_id.
+  bool ChannelUsingViEEncoder(int channel_id) const;
+  void ChannelsUsingViEEncoder(int channel_id, ChannelList* channels) const;
+
+  // Protects channel_map_ and free_channel_ids_.
+  CriticalSectionWrapper* channel_id_critsect_;
+  int engine_id_;
+  int number_of_cores_;
+  ViEPerformanceMonitor& vie_performance_monitor_;
+  ChannelMap channel_map_;
+  bool* free_channel_ids_;
+  int free_channel_ids_size_;
+
+  // Maps Channel id -> ViEEncoder.
+  EncoderMap vie_encoder_map_;
+  VoEVideoSync* voice_sync_interface_;
+  scoped_ptr<VieRemb> remb_;
+  VoiceEngine* voice_engine_;
+  ProcessThread* module_process_thread_;
+};
+
+class ViEChannelManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViEChannelManagerScoped(
+      const ViEChannelManager& vie_channel_manager);
+  ViEChannel* Channel(int vie_channel_id) const;
+  ViEEncoder* Encoder(int vie_channel_id) const;
+
+  // Returns true if at least one other channels uses the same ViEEncoder as
+  // channel_id.
+  bool ChannelUsingViEEncoder(int channel_id) const;
+
+  // Returns a list with pointers to all channels using the same encoder as the
+  // channel with |channel_id|, including the one with the specified id.
+  void ChannelsUsingViEEncoder(int channel_id, ChannelList* channels) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
diff --git a/trunk/src/video_engine/vie_codec_impl.cc b/trunk/src/video_engine/vie_codec_impl.cc
new file mode 100644
index 0000000..f995117
--- /dev/null
+++ b/trunk/src/video_engine/vie_codec_impl.cc
@@ -0,0 +1,748 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_codec_impl.h"
+
+#include "engine_configurations.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViECodecImpl* vie_codec_impl = vie_impl;
+  // Increase ref count.
+  (*vie_codec_impl)++;
+  return vie_codec_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViECodecImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViECodec released too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViECodec reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViECodecImpl::ViECodecImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::ViECodecImpl() Ctor");
+}
+
+ViECodecImpl::~ViECodecImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::~ViECodecImpl() Dtor");
+}
+
+int ViECodecImpl::NumberOfCodecs() const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+  // +2 because of FEC(RED and ULPFEC)
+  return static_cast<int>((VideoCodingModule::NumberOfCodecs() + 2));
+}
+
+int ViECodecImpl::GetCodec(const unsigned char list_number,
+                           VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(list_number: %d, codec_type: %d)", __FUNCTION__,
+               list_number, video_codec.codecType);
+  if (list_number == VideoCodingModule::NumberOfCodecs()) {
+    memset(&video_codec, 0, sizeof(VideoCodec));
+    strcpy(video_codec.plName, "red");
+    video_codec.codecType = kVideoCodecRED;
+    video_codec.plType = VCM_RED_PAYLOAD_TYPE;
+  } else if (list_number == VideoCodingModule::NumberOfCodecs() + 1) {
+    memset(&video_codec, 0, sizeof(VideoCodec));
+    strcpy(video_codec.plName, "ulpfec");
+    video_codec.codecType = kVideoCodecULPFEC;
+    video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+  } else if (VideoCodingModule::Codec(list_number, &video_codec) != VCM_OK) {
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Could not get codec for list_number: %u", __FUNCTION__,
+                 list_number);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetSendCodec(const int video_channel,
+                               const VideoCodec& video_codec) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(),video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d"
+               "maxBr: %d, min_br: %d, frame_rate: %d, qpMax: %u,"
+               "numberOfSimulcastStreams: %u )", __FUNCTION__,
+               video_codec.codecType, video_codec.plType, video_codec.width,
+               video_codec.height, video_codec.startBitrate,
+               video_codec.maxBitrate, video_codec.minBitrate,
+               video_codec.maxFramerate, video_codec.qpMax,
+               video_codec.numberOfSimulcastStreams);
+  if (video_codec.codecType == kVideoCodecVP8) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "pictureLossIndicationOn: %d, feedbackModeOn: %d, "
+                 "complexity: %d, resilience: %d, numberOfTemporalLayers: %u",
+                 video_codec.codecSpecific.VP8.pictureLossIndicationOn,
+                 video_codec.codecSpecific.VP8.feedbackModeOn,
+                 video_codec.codecSpecific.VP8.complexity,
+                 video_codec.codecSpecific.VP8.resilience,
+                 video_codec.codecSpecific.VP8.numberOfTemporalLayers);
+  }
+  if (!CodecValid(video_codec)) {
+    // Error logged.
+    shared_data_->SetLastError(kViECodecInvalidCodec);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  // Set a max_bitrate if the user hasn't set one.
+  VideoCodec video_codec_internal;
+  memcpy(&video_codec_internal, &video_codec, sizeof(VideoCodec));
+  if (video_codec_internal.maxBitrate == 0) {
+    // Max is one bit per pixel.
+    video_codec_internal.maxBitrate = (video_codec_internal.width *
+                                       video_codec_internal.height *
+                                       video_codec_internal.maxFramerate)
+                                       / 1000;
+    if (video_codec_internal.startBitrate > video_codec_internal.maxBitrate) {
+      // Don't limit the set start bitrate.
+      video_codec_internal.maxBitrate = video_codec_internal.startBitrate;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: New max bitrate set to %d kbps", __FUNCTION__,
+                 video_codec_internal.maxBitrate);
+  }
+
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    assert(false);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder found for channel %d", __FUNCTION__);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  VideoCodec encoder;
+  vie_encoder->GetEncoder(encoder);
+  if (encoder.codecType != video_codec_internal.codecType &&
+      cs.ChannelUsingViEEncoder(video_channel)) {
+      // We don't allow changing codec type when several channels share encoder.
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Settings differs from other channels using encoder",
+                   __FUNCTION__);
+      shared_data_->SetLastError(kViECodecInUse);
+      return -1;
+  }
+  // Make sure to generate a new SSRC if the codec type and/or resolution has
+  // changed. This won't have any effect if the user has set an SSRC.
+  bool new_rtp_stream = false;
+  if (encoder.codecType != video_codec_internal.codecType ||
+      encoder.width != video_codec_internal.width ||
+      encoder.height != video_codec_internal.height) {
+    new_rtp_stream = true;
+  }
+  if (video_codec_internal.numberOfSimulcastStreams > 1) {
+    if (cs.ChannelUsingViEEncoder(video_channel)) {
+      // We don't allow simulcast channels to share encoder.
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Can't share simulcast encoder",
+                   __FUNCTION__);
+      shared_data_->SetLastError(kViECodecInUse);
+      return -1;
+    }
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = NULL;
+
+  // Stop the media flow while reconfiguring.
+  vie_encoder->Pause();
+
+  // Check if we have a frame provider that is a camera and can provide this
+  // codec for us.
+  bool use_capture_device_as_encoder = false;
+  frame_provider = is.FrameProvider(vie_encoder);
+  if (frame_provider) {
+    if (frame_provider->Id() >= kViECaptureIdBase &&
+        frame_provider->Id() <= kViECaptureIdMax) {
+      ViECapturer* vie_capture = static_cast<ViECapturer*>(frame_provider);
+      // Try to get preencoded. Nothing to do if it is not supported.
+      if (vie_capture && vie_capture->PreEncodeToViEEncoder(
+          video_codec_internal,
+          *vie_encoder,
+          video_channel) == 0) {
+        use_capture_device_as_encoder = true;
+      }
+    }
+  }
+
+  // Update the encoder settings if we are not using a capture device capable
+  // of this codec.
+  if (!use_capture_device_as_encoder &&
+      vie_encoder->SetEncoder(video_codec_internal) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not change encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+
+  // Give the channel(s) the new information.
+  ChannelList channels;
+  cs.ChannelsUsingViEEncoder(video_channel, &channels);
+  for (ChannelList::iterator it = channels.begin(); it != channels.end();
+       ++it) {
+    bool ret = true;
+    if ((*it)->SetSendCodec(video_codec_internal, new_rtp_stream) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Could not set send codec for channel %d", __FUNCTION__,
+                   video_channel);
+      ret = false;
+    }
+    if (!ret) {
+      shared_data_->SetLastError(kViECodecUnknownError);
+      return -1;
+    }
+  }
+
+  // Update the protection mode, we might be switching NACK/FEC.
+  vie_encoder->UpdateProtectionMethod();
+
+  // Get new best format for frame provider.
+  if (frame_provider) {
+    frame_provider->FrameCallbackChanged();
+  }
+  // Restart the media flow
+  if (new_rtp_stream) {
+    // Stream settings changed, make sure we get a key frame.
+    vie_encoder->SendKeyFrame();
+  }
+  vie_encoder->Restart();
+  return 0;
+}
+
+int ViECodecImpl::GetSendCodec(const int video_channel,
+                               VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->GetEncoder(video_codec);
+}
+
+int ViECodecImpl::SetReceiveCodec(const int video_channel,
+                                  const VideoCodec& video_codec) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d,"
+               "maxBr: %d, min_br: %d, frame_rate: %d", __FUNCTION__,
+               video_codec.codecType, video_codec.plType, video_codec.width,
+               video_codec.height, video_codec.startBitrate,
+               video_codec.maxBitrate, video_codec.minBitrate,
+               video_codec.maxFramerate);
+
+  if (CodecValid(video_codec) == false) {
+    // Error logged.
+    shared_data_->SetLastError(kViECodecInvalidCodec);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->SetReceiveCodec(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not set receive codec for channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetReceiveCodec(const int video_channel,
+                                  VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->GetReceiveCodec(video_codec) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetCodecConfigParameters(
+  const int video_channel,
+  unsigned char config_parameters[kConfigParameterSize],
+  unsigned char& config_parameters_size) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->GetCodecConfigParameters(config_parameters,
+                                            config_parameters_size) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetImageScaleStatus(const int video_channel,
+                                      const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->ScaleInputImage(enable) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
+                                          unsigned int& key_frames,
+                                          unsigned int& delta_frames) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No send codec for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->SendCodecStatistics(key_frames, delta_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
+                                             unsigned int& key_frames,
+                                             unsigned int& delta_frames) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->ReceiveCodecStatistics(key_frames, delta_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
+                                        unsigned int* bitrate) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No send codec for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->CodecTargetBitrate(static_cast<WebRtc_UWord32*>(bitrate));
+}
+
+unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->DiscardedPackets();
+}
+
+int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                                   const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableKeyFrameRequestCallback(enable) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
+                                               const bool enable,
+                                               const bool only_key_frames) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, enable: %d, only_key_frames: %d)",
+               __FUNCTION__, video_channel, enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSignalPacketLossStatus(enable, only_key_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
+                                          ViEEncoderObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterCodecObserver(&observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not register codec observer at channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterCodecObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViECodecObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
+                                          ViEDecoderObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterCodecObserver(&observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not register codec observer at channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id()), "%s",
+               __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterCodecObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViECodecObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SendKeyFrame(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->SendKeyFrame() != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
+                                       const bool wait) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d, wait: %d)", __FUNCTION__, video_channel,
+               wait);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->WaitForKeyFrame(wait) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
+  // Check pl_name matches codec_type.
+  if (video_codec.codecType == kVideoCodecRED) {
+#if defined(WIN32)
+    if (_strnicmp(video_codec.plName, "red", 3) == 0) {
+#else
+    if (strncasecmp(video_codec.plName, "red", 3) == 0) {
+#endif
+      // We only care about the type and name for red.
+      return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  } else if (video_codec.codecType == kVideoCodecULPFEC) {
+#if defined(WIN32)
+    if (_strnicmp(video_codec.plName, "ULPFEC", 6) == 0) {
+#else
+    if (strncasecmp(video_codec.plName, "ULPFEC", 6) == 0) {
+#endif
+      // We only care about the type and name for ULPFEC.
+      return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  } else if ((video_codec.codecType == kVideoCodecVP8 &&
+                  strncmp(video_codec.plName, "VP8", 4) == 0) ||
+              (video_codec.codecType == kVideoCodecI420 &&
+                  strncmp(video_codec.plName, "I420", 4) == 0)) {
+    // OK.
+  } else {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  }
+
+  if (video_codec.plType == 0 && video_codec.plType > 127) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Invalid codec payload type: %d", video_codec.plType);
+    return false;
+  }
+
+  if (video_codec.width > kViEMaxCodecWidth ||
+      video_codec.height > kViEMaxCodecHeight) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid codec size: %u x %u",
+                 video_codec.width, video_codec.height);
+    return false;
+  }
+
+  if (video_codec.startBitrate < kViEMinCodecBitrate) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid start_bitrate: %u",
+                 video_codec.startBitrate);
+    return false;
+  }
+  if (video_codec.minBitrate < kViEMinCodecBitrate) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid min_bitrate: %u",
+                 video_codec.minBitrate);
+    return false;
+  }
+  if (video_codec.numberOfSimulcastStreams == 1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Number of Simulcast streams can not be 1");
+    return false;
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_codec_impl.h b/trunk/src/video_engine/vie_codec_impl.h
new file mode 100644
index 0000000..e0e5e94
--- /dev/null
+++ b/trunk/src/video_engine/vie_codec_impl.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
+
+#include "typedefs.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViECodecImpl
+    : public ViECodec,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Implements ViECodec.
+  virtual int NumberOfCodecs() const;
+  virtual int GetCodec(const unsigned char list_number,
+                       VideoCodec& video_codec) const;
+  virtual int SetSendCodec(const int video_channel,
+                           const VideoCodec& video_codec);
+  virtual int GetSendCodec(const int video_channel,
+                           VideoCodec& video_codec) const;
+  virtual int SetReceiveCodec(const int video_channel,
+                              const VideoCodec& video_codec);
+  virtual int GetReceiveCodec(const int video_channel,
+                              VideoCodec& video_codec) const;
+  virtual int GetCodecConfigParameters(
+    const int video_channel,
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size) const;
+  virtual int SetImageScaleStatus(const int video_channel, const bool enable);
+  virtual int GetSendCodecStastistics(const int video_channel,
+                                      unsigned int& key_frames,
+                                      unsigned int& delta_frames) const;
+  virtual int GetReceiveCodecStastistics(const int video_channel,
+                                         unsigned int& key_frames,
+                                         unsigned int& delta_frames) const;
+  virtual int GetCodecTargetBitrate(const int video_channel,
+                                    unsigned int* bitrate) const;
+  virtual unsigned int GetDiscardedPackets(const int video_channel) const;
+  virtual int SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                               const bool enable);
+  virtual int SetSignalKeyPacketLossStatus(const int video_channel,
+                                           const bool enable,
+                                           const bool only_key_frames = false);
+  virtual int RegisterEncoderObserver(const int video_channel,
+                                      ViEEncoderObserver& observer);
+  virtual int DeregisterEncoderObserver(const int video_channel);
+  virtual int RegisterDecoderObserver(const int video_channel,
+                                      ViEDecoderObserver& observer);
+  virtual int DeregisterDecoderObserver(const int video_channel);
+  virtual int SendKeyFrame(const int video_channel);
+  virtual int WaitForFirstKeyFrame(const int video_channel, const bool wait);
+
+ protected:
+  ViECodecImpl(ViESharedData* shared_data);
+  virtual ~ViECodecImpl();
+
+ private:
+  bool CodecValid(const VideoCodec& video_codec);
+
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
diff --git a/trunk/src/video_engine/vie_defines.h b/trunk/src/video_engine/vie_defines.h
new file mode 100644
index 0000000..73c9008
--- /dev/null
+++ b/trunk/src/video_engine/vie_defines.h
@@ -0,0 +1,261 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * vie_defines.h
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
+
+#include "engine_configurations.h"
+
+#ifdef WEBRTC_MAC_INTEL
+#include <stdio.h>
+#include <unistd.h>
+#endif
+
+#ifdef WEBRTC_ANDROID
+#include <pthread.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+#include <linux/net.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <sys/time.h>
+#endif
+
+namespace webrtc
+{
+// ===================================================
+// ViE Defines
+// ===================================================
+
+// General
+enum { kViEMinKeyRequestIntervalMs = 300};
+
+// ViEBase
+enum { kViEMaxNumberOfChannels = 32};
+enum { kViEVersionMaxMessageSize = 1024 };
+enum { kViEMaxModuleVersionSize = 960 };
+
+// ViECapture
+enum { kViEMaxCaptureDevices=10};
+// Width used if no send codec has been set when a capture device is started
+enum { kViECaptureDefaultWidth = 352};
+// Height used if no send codec has been set when a capture device is started
+enum { kViECaptureDefaultHeight = 288};
+enum { kViECaptureDefaultFramerate = 30};
+enum { kViECaptureMaxSnapshotWaitTimeMs = 500 };
+
+// ViECodec
+enum { kViEMaxCodecWidth = 1920};
+enum { kViEMaxCodecHeight = 1200};
+enum { kViEMaxCodecFramerate = 60};
+enum { kViEMinCodecBitrate = 30};
+
+// ViEEncryption
+enum { kViEMaxSrtpKeyLength = 30};
+enum { kViEMinSrtpEncryptLength = 16};
+enum { kViEMaxSrtpEncryptLength = 256};
+enum { kViEMaxSrtpAuthSh1Length = 20};
+enum { kViEMaxSrtpTagAuthNullLength = 12};
+enum { kViEMaxSrtpKeyAuthNullLength = 256};
+
+// ViEExternalCodec
+
+// ViEFile
+enum { kViEMaxFilePlayers = 3};
+
+// ViEImageProcess
+
+// ViENetwork
+enum { kViEMaxMtu = 1500};
+enum { kViESocketThreads = 1};
+enum { kViENumReceiveSocketBuffers = 500};
+
+// ViERender
+// Max valid time set in SetRenderTimeoutImage
+enum { kViEMaxRenderTimeoutTimeMs  = 10000};
+// Min valid time set in SetRenderTimeoutImage
+enum { kViEMinRenderTimeoutTimeMs = 33};
+enum { kViEDefaultRenderDelayMs = 10};
+
+// ViERTP_RTCP
+enum { kNackHistorySize = 400};
+
+// Id definitions
+enum {
+    kViEChannelIdBase=0x0,
+    kViEChannelIdMax=0xFF,
+    kViECaptureIdBase=0x1001,
+    kViECaptureIdMax=0x10FF,
+    kViEFileIdBase=0x2000,
+    kViEFileIdMax=0x200F,
+    kViEDummyChannelId=0xFFFF
+};
+
+// Module id
+// Create a unique id based on the ViE instance id and the
+// channel id. ViE id > 0 and 0 <= channel id <= 255
+
+inline int ViEId(const int vieId, const int channelId = -1)
+{
+    if (channelId == -1)
+    {
+        return (int) ((vieId << 16) + kViEDummyChannelId);
+    }
+    return (int) ((vieId << 16) + channelId);
+}
+
+inline int ViEModuleId(const int vieId, const int channelId = -1)
+{
+    if (channelId == -1)
+    {
+        return (int) ((vieId << 16) + kViEDummyChannelId);
+    }
+    return (int) ((vieId << 16) + channelId);
+}
+
+inline int ChannelId(const int moduleId)
+{
+    return (int) (moduleId & 0xffff);
+}
+
+
+// ============================================================================
+// Platform specifics
+// ============================================================================
+
+//-------------------------------------
+// Windows
+//-------------------------------------
+
+#if defined(_WIN32)
+//  Build information macros
+    #if defined(_DEBUG)
+    #define BUILDMODE TEXT("d")
+    #elif defined(DEBUG)
+    #define BUILDMODE TEXT("d")
+    #elif defined(NDEBUG)
+    #define BUILDMODE TEXT("r")
+    #else
+    #define BUILDMODE TEXT("?")
+    #endif
+
+    #define BUILDTIME TEXT(__TIME__)
+    #define BUILDDATE TEXT(__DATE__)
+
+    // example: "Oct 10 2002 12:05:30 r"
+    #define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
+
+
+    #define RENDER_MODULE_TYPE kRenderWindows
+    // Warning pragmas
+    // new behavior: elements of array 'XXX' will be default initialized
+    #pragma warning(disable: 4351)
+    // 'this' : used in base member initializer list
+    #pragma warning(disable: 4355)
+    // frame pointer register 'ebp' modified by inline assembly code
+    #pragma warning(disable: 4731)
+
+    // Include libraries
+    #pragma comment( lib, "winmm.lib" )
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    #pragma comment( lib, "ws2_32.lib" )
+    #pragma comment( lib, "Iphlpapi.lib" )   // _GetAdaptersAddresses
+#endif
+#endif
+
+
+//-------------------------------------
+// Mac
+//-------------------------------------
+
+#ifdef WEBRTC_MAC_INTEL
+	#define SLEEP(x) usleep(x * 1000)
+
+	//  Build information macros
+	#define TEXT(x) x
+	#if defined(_DEBUG)
+    #define BUILDMODE TEXT("d")
+	#elif defined(DEBUG)
+		#define BUILDMODE TEXT("d")
+	#elif defined(NDEBUG)
+		#define BUILDMODE TEXT("r")
+	#else
+		#define BUILDMODE TEXT("?")
+	#endif
+
+	#define BUILDTIME TEXT(__TIME__)
+	#define BUILDDATE TEXT(__DATE__)
+
+	// example: "Oct 10 2002 12:05:30 r"
+	#define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
+
+	#define RENDER_MODULE_TYPE kRenderWindows
+#endif
+
+//-------------------------------------
+// Linux
+//-------------------------------------
+
+#ifndef WEBRTC_ANDROID
+#ifdef WEBRTC_LINUX
+
+//  Build information macros
+#if defined(_DEBUG)
+    #define BUILDMODE "d"
+#elif defined(DEBUG)
+    #define BUILDMODE "d"
+#elif defined(NDEBUG)
+    #define BUILDMODE "r"
+#else
+    #define BUILDMODE "?"
+#endif
+
+#define BUILDTIME __TIME__
+#define BUILDDATE __DATE__
+
+// example: "Oct 10 2002 12:05:30 r"
+#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+#endif  // ifdef WEBRTC_LINUX
+#endif  // ifndef WEBRTC_ANDROID
+
+#ifdef WEBRTC_ANDROID
+
+    #define FAR
+    #define __cdecl
+
+    #if defined(_DEBUG)
+        #define BUILDMODE "d"
+    #elif defined(DEBUG)
+        #define BUILDMODE "d"
+    #elif defined(NDEBUG)
+        #define BUILDMODE "r"
+    #else
+        #define BUILDMODE "?"
+    #endif
+
+    #define BUILDTIME __TIME__
+    #define BUILDDATE __DATE__
+
+    // example: "Oct 10 2002 12:05:30 r"
+    #define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+#endif  // #ifdef WEBRTC_ANDROID
+
+} //namespace webrtc
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
diff --git a/trunk/src/video_engine/vie_encoder.cc b/trunk/src/video_engine/vie_encoder.cc
new file mode 100644
index 0000000..cf3587b
--- /dev/null
+++ b/trunk/src/video_engine/vie_encoder.cc
@@ -0,0 +1,887 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_encoder.h"
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#include "process_thread.h"
+#include "rtp_rtcp.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "video_codec_interface.h"
+#include "video_coding.h"
+#include "video_coding_defines.h"
+#include "vie_codec.h"
+#include "vie_defines.h"
+#include "vie_image_process.h"
+
+namespace webrtc {
+
+class QMVideoSettingsCallback : public VCMQMSettingsCallback {
+ public:
+  QMVideoSettingsCallback(WebRtc_Word32 engine_id,
+                          WebRtc_Word32 channel_id,
+                          VideoProcessingModule* vpm,
+                          VideoCodingModule* vcm,
+                          WebRtc_Word32 num_of_cores,
+                          WebRtc_Word32 max_payload_length);
+  ~QMVideoSettingsCallback();
+
+  // Update VPM with QM (quality modes: frame size & frame rate) settings.
+  WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frame_rate,
+                                   const WebRtc_UWord32 width,
+                                   const WebRtc_UWord32 height);
+
+  void SetMaxPayloadLength(WebRtc_Word32 max_payload_length);
+
+ private:
+  WebRtc_Word32 engine_id_;
+  WebRtc_Word32 channel_id_;
+  VideoProcessingModule* vpm_;
+  VideoCodingModule* vcm_;
+  WebRtc_Word32 num_cores_;
+  WebRtc_Word32 max_payload_length_;
+};
+
+
+ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id, WebRtc_Word32 channel_id,
+                       WebRtc_UWord32 number_of_cores,
+                       ProcessThread& module_process_thread)
+  : engine_id_(engine_id),
+    channel_id_(channel_id),
+    number_of_cores_(number_of_cores),
+    vcm_(*webrtc::VideoCodingModule::Create(ViEModuleId(engine_id,
+                                                        channel_id))),
+    vpm_(*webrtc::VideoProcessingModule::Create(ViEModuleId(engine_id,
+                                                            channel_id))),
+    default_rtp_rtcp_(*RtpRtcp::CreateRtpRtcp(
+        ViEModuleId(engine_id, channel_id), false)),
+    callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+    data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+    paused_(false),
+    channels_dropping_delta_frames_(0),
+    drop_next_frame_(false),
+    fec_enabled_(false),
+    nack_enabled_(false),
+    codec_observer_(NULL),
+    effect_filter_(NULL),
+    module_process_thread_(module_process_thread),
+    has_received_sli_(false),
+    picture_id_sli_(0),
+    has_received_rpsi_(false),
+    picture_id_rpsi_(0),
+    file_recorder_(channel_id) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
+               ViEId(engine_id, channel_id),
+               "%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
+               this);
+  for (int i = 0; i < kMaxSimulcastStreams; i++) {
+    time_last_intra_request_ms_[i] = 0;
+  }
+  vcm_.InitializeSender();
+  vpm_.EnableTemporalDecimation(true);
+
+  // Enable/disable content analysis: off by default for now.
+  vpm_.EnableContentAnalysis(false);
+
+  module_process_thread_.RegisterModule(&vcm_);
+  if (default_rtp_rtcp_.InitSender() != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViEEncoder: RTP::InitSender failure");
+    assert(false);
+  }
+  default_rtp_rtcp_.RegisterIncomingVideoCallback(this);
+  default_rtp_rtcp_.RegisterIncomingRTCPCallback(this);
+  module_process_thread_.RegisterModule(&default_rtp_rtcp_);
+
+  qm_callback_ = new QMVideoSettingsCallback(
+      engine_id_,
+      channel_id_,
+      &vpm_,
+      &vcm_,
+      number_of_cores,
+      default_rtp_rtcp_.MaxDataPayloadLength());
+
+#ifdef VIDEOCODEC_VP8
+  VideoCodec video_codec;
+  if (vcm_.Codec(webrtc::kVideoCodecVP8, &video_codec) == VCM_OK) {
+    vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                           default_rtp_rtcp_.MaxDataPayloadLength());
+    default_rtp_rtcp_.RegisterSendPayload(video_codec);
+  } else {
+    assert(false);
+  }
+#else
+  VideoCodec video_codec;
+  if (vcm_.Codec(webrtc::kVideoCodecI420, &video_codec) == VCM_OK) {
+    vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                           default_rtp_rtcp_.MaxDataPayloadLength());
+    default_rtp_rtcp_.RegisterSendPayload(video_codec);
+  } else {
+    assert(false);
+  }
+#endif
+
+  if (vcm_.RegisterTransportCallback(this) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViEEncoder: VCM::RegisterTransportCallback failure");
+  }
+  if (vcm_.RegisterSendStatisticsCallback(this) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViEEncoder: VCM::RegisterSendStatisticsCallback failure");
+  }
+
+  if (vcm_.RegisterVideoQMCallback(qm_callback_) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "VCM::RegisterQMCallback failure");
+  }
+}
+
+ViEEncoder::~ViEEncoder() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "ViEEncoder Destructor 0x%p, engine_id: %d", this, engine_id_);
+
+  if (default_rtp_rtcp_.NumberChildModules() > 0) {
+    assert(false);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Channels still attached %d, leaking memory",
+                 default_rtp_rtcp_.NumberChildModules());
+    return;
+  }
+  module_process_thread_.DeRegisterModule(&vcm_);
+  module_process_thread_.DeRegisterModule(&vpm_);
+  module_process_thread_.DeRegisterModule(&default_rtp_rtcp_);
+  delete &vcm_;
+  delete &vpm_;
+  delete &default_rtp_rtcp_;
+  delete qm_callback_;
+}
+
+void ViEEncoder::Pause() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  CriticalSectionScoped cs(data_cs_.get());
+  paused_ = true;
+}
+
+void ViEEncoder::Restart() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  CriticalSectionScoped cs(data_cs_.get());
+  paused_ = false;
+}
+
+WebRtc_Word32 ViEEncoder::DropDeltaAfterKey(bool enable) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(%d)", __FUNCTION__, enable);
+  CriticalSectionScoped cs(data_cs_.get());
+
+  if (enable) {
+    channels_dropping_delta_frames_++;
+  } else {
+    channels_dropping_delta_frames_--;
+    if (channels_dropping_delta_frames_ < 0) {
+      channels_dropping_delta_frames_ = 0;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Called too many times", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_UWord8 ViEEncoder::NumberOfCodecs() {
+  return vcm_.NumberOfCodecs();
+}
+
+WebRtc_Word32 ViEEncoder::GetCodec(WebRtc_UWord8 list_index,
+                                   webrtc::VideoCodec& video_codec) {
+  if (vcm_.Codec(list_index, &video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: Could not get codec",
+                 __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
+                                                  WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: pltype %u", __FUNCTION__,
+               pl_type);
+
+  if (encoder == NULL)
+    return -1;
+
+  if (vcm_.RegisterExternalEncoder(encoder, pl_type) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not register external encoder");
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: pltype %u", __FUNCTION__, pl_type);
+
+  webrtc::VideoCodec current_send_codec;
+  if (vcm_.SendCodec(&current_send_codec) == VCM_OK) {
+    if (vcm_.Bitrate(&current_send_codec.startBitrate) != 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Failed to get the current encoder target bitrate.");
+    }
+  }
+
+  if (vcm_.RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not deregister external encoder");
+    return -1;
+  }
+
+  // If the external encoder is the current send codeci, use vcm internal
+  // encoder.
+  if (current_send_codec.plType == pl_type) {
+    WebRtc_UWord16 max_data_payload_length =
+        default_rtp_rtcp_.MaxDataPayloadLength();
+    if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
+                               max_data_payload_length) != VCM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Could not use internal encoder");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
+               video_codec.codecType, video_codec.width, video_codec.height);
+
+  // Convert from kbps to bps.
+  default_rtp_rtcp_.SetSendBitrate(video_codec.startBitrate * 1000,
+                                   video_codec.minBitrate,
+                                   video_codec.maxBitrate);
+
+  // Setting target width and height for VPM.
+  if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
+                               video_codec.maxFramerate) != VPM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not set VPM target dimensions");
+    return -1;
+  }
+
+  if (default_rtp_rtcp_.RegisterSendPayload(video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could register RTP module video payload");
+    return -1;
+  }
+
+  WebRtc_UWord16 max_data_payload_length =
+      default_rtp_rtcp_.MaxDataPayloadLength();
+
+  qm_callback_->SetMaxPayloadLength(max_data_payload_length);
+
+  if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                             max_data_payload_length) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not register send codec");
+    return -1;
+  }
+
+  data_cs_->Enter();
+  memcpy(&send_codec_, &video_codec, sizeof(send_codec_));
+  data_cs_->Leave();
+
+  // Set this module as sending right away, let the slave module in the channel
+  // start and stop sending.
+  if (default_rtp_rtcp_.Sending() == false) {
+    if (default_rtp_rtcp_.SetSendingStatus(true) != 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Could start RTP module sending");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::GetEncoder(webrtc::VideoCodec& video_codec) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  if (vcm_.SendCodec(&video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get VCM send codec");
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::GetCodecConfigParameters(
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  WebRtc_Word32 num_parameters =
+      vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize);
+  if (num_parameters <= 0) {
+    config_parameters_size = 0;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get config parameters");
+    return -1;
+  }
+  config_parameters_size = static_cast<unsigned char>(num_parameters);
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::ScaleInputImage(bool enable) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s(enable %d)", __FUNCTION__,
+               enable);
+
+  VideoFrameResampling resampling_mode = kFastRescaling;
+  if (enable == true) {
+    // kInterpolation is currently not supported.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s not supported",
+                 __FUNCTION__, enable);
+    return -1;
+  }
+  vpm_.SetInputFrameResampleMode(resampling_mode);
+
+  return 0;
+}
+
+RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  return &default_rtp_rtcp_;
+}
+
+void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
+                              int num_csrcs,
+                              const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: %llu", __FUNCTION__,
+               video_frame.TimeStamp());
+
+  {
+    CriticalSectionScoped cs(data_cs_.get());
+    if (paused_ || default_rtp_rtcp_.SendingMedia() == false) {
+      // We've paused or we have no channels attached, don't encode.
+      return;
+    }
+    if (drop_next_frame_) {
+      // Drop this frame.
+      WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Dropping frame %llu after a key fame", __FUNCTION__,
+                   video_frame.TimeStamp());
+      drop_next_frame_ = false;
+      return;
+    }
+  }
+
+  // Convert render time, in ms, to RTP timestamp.
+  const WebRtc_UWord32 time_stamp =
+      90 * static_cast<WebRtc_UWord32>(video_frame.RenderTimeMs());
+  video_frame.SetTimeStamp(time_stamp);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (effect_filter_) {
+      effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
+                                video_frame.TimeStamp(),
+                                video_frame.Width(), video_frame.Height());
+    }
+  }
+  // Record raw frame.
+  file_recorder_.RecordVideoFrame(video_frame);
+
+  // Make sure the CSRC list is correct.
+  if (num_csrcs > 0) {
+    WebRtc_UWord32 tempCSRC[kRtpCsrcSize];
+    for (int i = 0; i < num_csrcs; i++) {
+      if (CSRC[i] == 1) {
+        tempCSRC[i] = default_rtp_rtcp_.SSRC();
+      } else {
+        tempCSRC[i] = CSRC[i];
+      }
+    }
+    default_rtp_rtcp_.SetCSRCs(tempCSRC, (WebRtc_UWord8) num_csrcs);
+  }
+
+#ifdef VIDEOCODEC_VP8
+  if (vcm_.SendCodec() == webrtc::kVideoCodecVP8) {
+    webrtc::CodecSpecificInfo codec_specific_info;
+    codec_specific_info.codecType = webrtc::kVideoCodecVP8;
+    if (has_received_sli_ || has_received_rpsi_) {
+      {
+        codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
+          has_received_rpsi_;
+        codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
+          has_received_sli_;
+        codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
+          picture_id_rpsi_;
+        codec_specific_info.codecSpecific.VP8.pictureIdSLI  =
+          picture_id_sli_;
+      }
+      has_received_sli_ = false;
+      has_received_rpsi_ = false;
+    }
+    VideoFrame* decimated_frame = NULL;
+    const int ret = vpm_.PreprocessFrame(&video_frame, &decimated_frame);
+    if (ret == 1) {
+      // Drop this frame.
+      return;
+    } else if (ret != VPM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Error preprocessing frame %u", __FUNCTION__,
+                   video_frame.TimeStamp());
+      return;
+    }
+
+    VideoContentMetrics* content_metrics = NULL;
+    content_metrics = vpm_.ContentMetrics();
+
+    // Frame was not re-sampled => use original.
+    if (decimated_frame == NULL)  {
+      decimated_frame = &video_frame;
+    }
+
+    if (vcm_.AddVideoFrame(*decimated_frame, content_metrics,
+                           &codec_specific_info) != VCM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Error encoding frame %u", __FUNCTION__,
+                   video_frame.TimeStamp());
+    }
+    return;
+  }
+#endif
+  // TODO(mflodman) Rewrite this to use code common to VP8 case.
+  // Pass frame via preprocessor.
+  VideoFrame* decimated_frame = NULL;
+  const int ret = vpm_.PreprocessFrame(&video_frame, &decimated_frame);
+  if (ret == 1) {
+    // Drop this frame.
+    return;
+  } else if (ret != VPM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Error preprocessing frame %u", __FUNCTION__,
+                 video_frame.TimeStamp());
+    return;
+  }
+
+  // Frame was not sampled => use original.
+  if (decimated_frame == NULL)  {
+    decimated_frame = &video_frame;
+  }
+  if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: Error encoding frame %u",
+                 __FUNCTION__, video_frame.TimeStamp());
+  }
+}
+
+void ViEEncoder::DelayChanged(int id, int frame_delay) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: %u", __FUNCTION__,
+               frame_delay);
+
+  default_rtp_rtcp_.SetCameraDelay(frame_delay);
+  file_recorder_.SetFrameDelay(frame_delay);
+}
+
+int ViEEncoder::GetPreferedFrameSettings(int& width,
+                                         int& height,
+                                         int& frame_rate) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(video_codec));
+  if (vcm_.SendCodec(&video_codec) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get VCM send codec");
+    return -1;
+  }
+
+  width = video_codec.width;
+  height = video_codec.height;
+  frame_rate = video_codec.maxFramerate;
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SendKeyFrame() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+  return vcm_.FrameTypeRequest(kVideoFrameKey, 0);  // Simulcast idx = 0.
+}
+
+WebRtc_Word32 ViEEncoder::SendCodecStatistics(
+    WebRtc_UWord32& num_key_frames, WebRtc_UWord32& num_delta_frames) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  webrtc::VCMFrameCount sent_frames;
+  if (vcm_.SentFrameCount(sent_frames) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Could not get sent frame information", __FUNCTION__);
+    return -1;
+  }
+  num_key_frames = sent_frames.numKeyFrames;
+  num_delta_frames = sent_frames.numDeltaFrames;
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::EstimatedSendBandwidth(
+    WebRtc_UWord32* available_bandwidth) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  return default_rtp_rtcp_.EstimatedSendBandwidth(available_bandwidth);
+}
+
+int ViEEncoder::CodecTargetBitrate(WebRtc_UWord32* bitrate) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (vcm_.Bitrate(bitrate) != 0)
+    return -1;
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::UpdateProtectionMethod() {
+  bool fec_enabled = false;
+  WebRtc_UWord8 dummy_ptype_red = 0;
+  WebRtc_UWord8 dummy_ptypeFEC = 0;
+
+  // Updated protection method to VCM to get correct packetization sizes.
+  // FEC has larger overhead than NACK -> set FEC if used.
+  WebRtc_Word32 error = default_rtp_rtcp_.GenericFECStatus(fec_enabled,
+                                                           dummy_ptype_red,
+                                                           dummy_ptypeFEC);
+  if (error) {
+    return -1;
+  }
+
+  bool nack_enabled = (default_rtp_rtcp_.NACK() == kNackOff) ? false : true;
+  if (fec_enabled_ == fec_enabled && nack_enabled_ == nack_enabled) {
+    // No change needed, we're already in correct state.
+    return 0;
+  }
+  fec_enabled_ = fec_enabled;
+  nack_enabled_ = nack_enabled;
+
+  // Set Video Protection for VCM.
+  if (fec_enabled && nack_enabled) {
+    vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, true);
+  } else {
+    vcm_.SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_);
+    vcm_.SetVideoProtection(webrtc::kProtectionNack, nack_enabled_);
+    vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, false);
+  }
+
+  if (fec_enabled || nack_enabled) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: FEC status ",
+                 __FUNCTION__, fec_enabled);
+    vcm_.RegisterProtectionCallback(this);
+    // The send codec must be registered to set correct MTU.
+    webrtc::VideoCodec codec;
+    if (vcm_.SendCodec(&codec) == 0) {
+      WebRtc_UWord16 max_pay_load = default_rtp_rtcp_.MaxDataPayloadLength();
+      if (vcm_.Bitrate(&codec.startBitrate) != 0) {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "Failed to get the current encoder target bitrate.");
+      }
+      if (vcm_.RegisterSendCodec(&codec, number_of_cores_, max_pay_load) != 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "%s: Failed to update Sendcodec when enabling FEC",
+                     __FUNCTION__, fec_enabled);
+        return -1;
+      }
+    }
+    return 0;
+  } else {
+    // FEC and NACK are disabled.
+    vcm_.RegisterProtectionCallback(NULL);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SendData(
+    const FrameType frame_type,
+    const WebRtc_UWord8 payload_type,
+    const WebRtc_UWord32 time_stamp,
+    const WebRtc_UWord8* payload_data,
+    const WebRtc_UWord32 payload_size,
+    const webrtc::RTPFragmentationHeader& fragmentation_header,
+    const RTPVideoHeader* rtp_video_hdr) {
+  {
+    CriticalSectionScoped cs(data_cs_.get());
+    if (paused_) {
+      // Paused, don't send this packet.
+      return 0;
+    }
+    if (channels_dropping_delta_frames_ &&
+        frame_type == webrtc::kVideoFrameKey) {
+      WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Sending key frame, drop next frame", __FUNCTION__);
+      drop_next_frame_ = true;
+    }
+  }
+
+  // New encoded data, hand over to the rtp module.
+  return default_rtp_rtcp_.SendOutgoingData(frame_type, payload_type,
+                                            time_stamp, payload_data,
+                                            payload_size, &fragmentation_header,
+                                            rtp_video_hdr);
+}
+
+WebRtc_Word32 ViEEncoder::ProtectionRequest(
+    WebRtc_UWord8 delta_fecrate,
+    WebRtc_UWord8 key_fecrate,
+    bool delta_use_uep_protection,
+    bool key_use_uep_protection,
+    bool nack_enabled,
+    WebRtc_UWord32* sent_video_rate_bps,
+    WebRtc_UWord32* sent_nack_rate_bps,
+    WebRtc_UWord32* sent_fec_rate_bps) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s, deltaFECRate: %u, key_fecrate: %u, "
+               "delta_use_uep_protection: %d, key_use_uep_protection: %d, ",
+               __FUNCTION__, delta_fecrate, key_fecrate,
+               delta_use_uep_protection, key_use_uep_protection);
+
+  if (default_rtp_rtcp_.SetFECCodeRate(key_fecrate, delta_fecrate) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Could not update FEC code rate", __FUNCTION__);
+  }
+  if (default_rtp_rtcp_.SetFECUepProtection(key_use_uep_protection,
+                                            delta_use_uep_protection) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Could not update FEC-UEP protection", __FUNCTION__);
+  }
+  default_rtp_rtcp_.BitrateSent(NULL,
+                                sent_video_rate_bps,
+                                sent_fec_rate_bps,
+                                sent_nack_rate_bps);
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SendStatistics(const WebRtc_UWord32 bit_rate,
+                                         const WebRtc_UWord32 frame_rate) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (codec_observer_) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: bitrate %u, framerate %u",
+                 __FUNCTION__, bit_rate, frame_rate);
+    codec_observer_->OutgoingRate(channel_id_, frame_rate, bit_rate);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: observer added",
+                 __FUNCTION__);
+    if (codec_observer_) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_), "%s: observer already set.",
+                   __FUNCTION__);
+      return -1;
+    }
+    codec_observer_ = observer;
+  } else {
+    if (codec_observer_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: observer does not exist.", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: observer removed",
+                 __FUNCTION__);
+    codec_observer_ = NULL;
+  }
+  return 0;
+}
+
+void ViEEncoder::OnSLIReceived(const WebRtc_Word32 id,
+                               const WebRtc_UWord8 picture_id) {
+  picture_id_sli_ = picture_id;
+  has_received_sli_ = true;
+}
+
+void ViEEncoder::OnRPSIReceived(const WebRtc_Word32 id,
+                                const WebRtc_UWord64 picture_id) {
+  picture_id_rpsi_ = picture_id;
+  has_received_rpsi_ = true;
+}
+
+void ViEEncoder::OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+                                             const FrameType type,
+                                             const WebRtc_UWord8 stream_idx) {
+  assert(stream_idx < kMaxSimulcastStreams);
+
+  // Key frame request from remote side, signal to VCM.
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+  if (time_last_intra_request_ms_[stream_idx] + kViEMinKeyRequestIntervalMs >
+      now) {
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Not not encoding new intra due to timing", __FUNCTION__);
+    return;
+  }
+  vcm_.FrameTypeRequest(type, stream_idx);
+  time_last_intra_request_ms_[stream_idx] = now;
+}
+
+void ViEEncoder::OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 bitrate_bps,
+                                  const WebRtc_UWord8 fraction_lost,
+                                  const WebRtc_UWord16 round_trip_time_ms) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
+               __FUNCTION__, bitrate_bps, fraction_lost, round_trip_time_ms);
+
+  vcm_.SetChannelParameters(bitrate_bps / 1000, fraction_lost,
+                            round_trip_time_ms);
+}
+
+WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (effect_filter == NULL) {
+    if (effect_filter_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_), "%s: no effect filter added",
+                   __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: deregister effect filter",
+                 __FUNCTION__);
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: register effect",
+                 __FUNCTION__);
+    if (effect_filter_) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: effect filter already added ", __FUNCTION__);
+      return -1;
+    }
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+ViEFileRecorder& ViEEncoder::GetOutgoingFileRecorder() {
+  return file_recorder_;
+}
+
+QMVideoSettingsCallback::QMVideoSettingsCallback(
+    WebRtc_Word32 engine_id,
+    WebRtc_Word32 channel_id,
+    VideoProcessingModule* vpm,
+    VideoCodingModule* vcm,
+    WebRtc_Word32 num_cores,
+    WebRtc_Word32 max_payload_length)
+    : engine_id_(engine_id),
+      channel_id_(channel_id),
+      vpm_(vpm),
+      vcm_(vcm),
+      num_cores_(num_cores),
+      max_payload_length_(max_payload_length) {
+}
+
+QMVideoSettingsCallback::~QMVideoSettingsCallback() {
+}
+
+WebRtc_Word32 QMVideoSettingsCallback::SetVideoQMSettings(
+    const WebRtc_UWord32 frame_rate,
+    const WebRtc_UWord32 width,
+    const WebRtc_UWord32 height) {
+  WebRtc_Word32 ret_val = 0;
+  ret_val = vpm_->SetTargetResolution(width, height, frame_rate);
+
+  if (!ret_val) {
+    // Get current settings.
+    VideoCodec current_codec;
+    vcm_->SendCodec(&current_codec);
+    WebRtc_UWord32 current_bit_rate;
+    if (vcm_->Bitrate(&current_bit_rate) != 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Failed to get the current encoder target bitrate.");
+    }
+
+    // Set the new values.
+    current_codec.height = static_cast<WebRtc_UWord16>(height);
+    current_codec.width = static_cast<WebRtc_UWord16>(width);
+    current_codec.maxFramerate = static_cast<WebRtc_UWord8>(frame_rate);
+    current_codec.startBitrate = current_bit_rate;
+
+    // Re-register encoder with the updated settings.
+    ret_val = vcm_->RegisterSendCodec(&current_codec, num_cores_,
+                                      max_payload_length_);
+  }
+  return ret_val;
+}
+
+void QMVideoSettingsCallback::SetMaxPayloadLength(
+    WebRtc_Word32 max_payload_length) {
+  max_payload_length_ = max_payload_length;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_encoder.h b/trunk/src/video_engine/vie_encoder.h
new file mode 100644
index 0000000..1b20491
--- /dev/null
+++ b/trunk/src/video_engine/vie_encoder.h
@@ -0,0 +1,182 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+
+#include "common_types.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+#include "video_coding_defines.h"
+#include "video_processing.h"
+#include "vie_defines.h"
+#include "vie_file_recorder.h"
+#include "vie_frame_provider_base.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class QMVideoSettingsCallback;
+class RtpRtcp;
+class VideoCodingModule;
+class ViEEffectFilter;
+class ViEEncoderObserver;
+
+class ViEEncoder
+    : public RtpVideoFeedback,
+      public RtcpFeedback,
+      public VCMPacketizationCallback,
+      public VCMProtectionCallback,
+      public VCMSendStatisticsCallback,
+      public ViEFrameCallback {
+ public:
+  ViEEncoder(WebRtc_Word32 engine_id,
+             WebRtc_Word32 channel_id,
+             WebRtc_UWord32 number_of_cores,
+             ProcessThread& module_process_thread);
+  ~ViEEncoder();
+
+  // Drops incoming packets before they get to the encoder.
+  void Pause();
+  void Restart();
+
+  WebRtc_Word32 DropDeltaAfterKey(bool enable);
+
+  // Codec settings.
+  WebRtc_UWord8 NumberOfCodecs();
+  WebRtc_Word32 GetCodec(WebRtc_UWord8 list_index, VideoCodec& video_codec);
+  WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* encoder,
+                                        WebRtc_UWord8 pl_type);
+  WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 pl_type);
+  WebRtc_Word32 SetEncoder(const VideoCodec& video_codec);
+  WebRtc_Word32 GetEncoder(VideoCodec& video_codec);
+
+  WebRtc_Word32 GetCodecConfigParameters(
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size);
+
+  // Scale or crop/pad image.
+  WebRtc_Word32 ScaleInputImage(bool enable);
+
+  // RTP settings.
+  RtpRtcp* SendRtpRtcpModule();
+
+  // Implementing ViEFrameCallback.
+  virtual void DeliverFrame(int id,
+                            VideoFrame& video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay);
+  virtual int GetPreferedFrameSettings(int& width,
+                                       int& height,
+                                       int& frame_rate);
+
+  virtual void ProviderDestroyed(int id) {
+    return;
+  }
+
+  WebRtc_Word32 EncodeFrame(VideoFrame& video_frame);
+  WebRtc_Word32 SendKeyFrame();
+  WebRtc_Word32 SendCodecStatistics(WebRtc_UWord32& num_key_frames,
+                                    WebRtc_UWord32& num_delta_frames);
+  WebRtc_Word32 EstimatedSendBandwidth(
+      WebRtc_UWord32* available_bandwidth) const;
+  int CodecTargetBitrate(WebRtc_UWord32* bitrate) const;
+  // Loss protection.
+  WebRtc_Word32 UpdateProtectionMethod();
+
+  // Implements VCMPacketizationCallback.
+  virtual WebRtc_Word32 SendData(
+    const FrameType frame_type,
+    const WebRtc_UWord8 payload_type,
+    const WebRtc_UWord32 time_stamp,
+    const WebRtc_UWord8* payload_data,
+    const WebRtc_UWord32 payload_size,
+    const RTPFragmentationHeader& fragmentation_header,
+    const RTPVideoHeader* rtp_video_hdr);
+
+  // Implements VideoProtectionCallback.
+  virtual WebRtc_Word32 ProtectionRequest(
+      WebRtc_UWord8 delta_fecrate,
+      WebRtc_UWord8 key_fecrate,
+      bool delta_use_uep_protection,
+      bool key_use_uep_protection,
+      bool nack_enabled,
+      WebRtc_UWord32* sent_video_rate_bps,
+      WebRtc_UWord32* sent_nack_rate_bps,
+      WebRtc_UWord32* sent_fec_rate_bps);
+
+  // Implements VideoSendStatisticsCallback.
+  virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bit_rate,
+                                       const WebRtc_UWord32 frame_rate);
+  WebRtc_Word32 RegisterCodecObserver(ViEEncoderObserver* observer);
+
+  // Implements RtcpFeedback.
+  virtual void OnSLIReceived(const WebRtc_Word32 id,
+                             const WebRtc_UWord8 picture_id);
+  virtual void OnRPSIReceived(const WebRtc_Word32 id,
+                              const WebRtc_UWord64 picture_id);
+
+  // Implements RtpVideoFeedback.
+  virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+                                           const FrameType type,
+                                           const WebRtc_UWord8 stream_idx);
+
+  virtual void OnNetworkChanged(const WebRtc_Word32 id,
+                                const WebRtc_UWord32 bitrate_bps,
+                                const WebRtc_UWord8 fraction_lost,
+                                const WebRtc_UWord16 round_trip_time_ms);
+
+  // Effect filter.
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+
+  // Recording.
+  ViEFileRecorder& GetOutgoingFileRecorder();
+
+ private:
+  WebRtc_Word32 engine_id_;
+  WebRtc_Word32 channel_id_;
+  const WebRtc_UWord32 number_of_cores_;
+
+  VideoCodingModule& vcm_;
+  VideoProcessingModule& vpm_;
+  RtpRtcp& default_rtp_rtcp_;
+  scoped_ptr<CriticalSectionWrapper> callback_cs_;
+  scoped_ptr<CriticalSectionWrapper> data_cs_;
+  VideoCodec send_codec_;
+
+  bool paused_;
+  WebRtc_Word64 time_last_intra_request_ms_[kMaxSimulcastStreams];
+  WebRtc_Word32 channels_dropping_delta_frames_;
+  bool drop_next_frame_;
+
+  bool fec_enabled_;
+  bool nack_enabled_;
+
+  ViEEncoderObserver* codec_observer_;
+  ViEEffectFilter* effect_filter_;
+  ProcessThread& module_process_thread_;
+
+  bool has_received_sli_;
+  WebRtc_UWord8 picture_id_sli_;
+  bool has_received_rpsi_;
+  WebRtc_UWord64 picture_id_rpsi_;
+
+  ViEFileRecorder file_recorder_;
+
+  // Quality modes callback
+  QMVideoSettingsCallback* qm_callback_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
diff --git a/trunk/src/video_engine/vie_encryption_impl.cc b/trunk/src/video_engine/vie_encryption_impl.cc
new file mode 100644
index 0000000..adbda42
--- /dev/null
+++ b/trunk/src/video_engine/vie_encryption_impl.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_encryption_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEEncryption* ViEEncryption::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+  if (video_engine == NULL) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEEncryptionImpl* vie_encryption_impl = vie_impl;
+  // Increase ref count.
+  (*vie_encryption_impl)++;
+  return vie_encryption_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEEncryptionImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEEncryptionImpl release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEEncryptionImpl::ViEEncryptionImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::ViEEncryptionImpl() Ctor");
+}
+
+ViEEncryptionImpl::~ViEEncryptionImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::~ViEEncryptionImpl() Dtor");
+}
+
+int ViEEncryptionImpl::RegisterExternalEncryption(const int video_channel,
+                                                  Encryption& encryption) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "RegisterExternalEncryption(video_channel=%d)", video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (vie_channel == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterExternalEncryption(&encryption) != 0) {
+    shared_data_->SetLastError(kViEEncryptionUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEEncryptionImpl::DeregisterExternalEncryption(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "RegisterExternalEncryption(video_channel=%d)", video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (vie_channel == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->DeRegisterExternalEncryption() != 0) {
+    shared_data_->SetLastError(kViEEncryptionUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_encryption_impl.h b/trunk/src/video_engine/vie_encryption_impl.h
new file mode 100644
index 0000000..f398461
--- /dev/null
+++ b/trunk/src/video_engine/vie_encryption_impl.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
+
+#include "typedefs.h"
+#include "video_engine/include/vie_encryption.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEEncryptionImpl
+    : public ViEEncryption,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Implements ViEEncryption.
+  virtual int RegisterExternalEncryption(const int video_channel,
+                                         Encryption& encryption);
+  virtual int DeregisterExternalEncryption(const int video_channel);
+
+ protected:
+  ViEEncryptionImpl(ViESharedData* shared_data);
+  virtual ~ViEEncryptionImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
diff --git a/trunk/src/video_engine/vie_external_codec_impl.cc b/trunk/src/video_engine/vie_external_codec_impl.cc
new file mode 100644
index 0000000..92e7dd6
--- /dev/null
+++ b/trunk/src/video_engine/vie_external_codec_impl.cc
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_external_codec_impl.h"
+
+#include "engine_configurations.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEExternalCodec* ViEExternalCodec::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+  if (video_engine == NULL) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEExternalCodecImpl* vie_external_codec_impl = vie_impl;
+  // Increase ref count.
+  (*vie_external_codec_impl)++;
+  return vie_external_codec_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEExternalCodecImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodec::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEExternalCodec release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodec reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEExternalCodecImpl::ViEExternalCodecImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodecImpl::ViEExternalCodecImpl() Ctor");
+}
+
+ViEExternalCodecImpl::~ViEExternalCodecImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodecImpl::~ViEExternalCodecImpl() Dtor");
+}
+
+int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
+                                                    const unsigned char pl_type,
+                                                    VideoEncoder* encoder) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d encoder 0x%x", __FUNCTION__,
+               video_channel, pl_type, encoder);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (!encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument Encoder 0x%x.", __FUNCTION__, encoder);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_encoder->RegisterExternalEncoder(encoder, pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::DeRegisterExternalSendCodec(
+  const int video_channel, const unsigned char pl_type) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d", __FUNCTION__, video_channel,
+               pl_type);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_encoder->DeRegisterExternalEncoder(pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
+    const int video_channel,
+    const unsigned int pl_type,
+    VideoDecoder* decoder,
+    bool decoder_render,
+    int render_delay) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d decoder 0x%x, decoder_render %d, "
+               "renderDelay %d", __FUNCTION__, video_channel, pl_type, decoder,
+               decoder_render, render_delay);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (!decoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument decoder 0x%x.", __FUNCTION__, decoder);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_channel->RegisterExternalDecoder(pl_type, decoder, decoder_render,
+                                           render_delay) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::DeRegisterExternalReceiveCodec(
+const int video_channel, const unsigned char pl_type) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %u", __FUNCTION__, video_channel,
+               pl_type);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (vie_channel->DeRegisterExternalDecoder(pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_external_codec_impl.h b/trunk/src/video_engine/vie_external_codec_impl.h
new file mode 100644
index 0000000..67d6a78
--- /dev/null
+++ b/trunk/src/video_engine/vie_external_codec_impl.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
+
+#include "video_engine/include/vie_external_codec.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEExternalCodecImpl
+    : public ViEExternalCodec,
+      public ViERefCount {
+ public:
+  // Implements ViEExternalCodec.
+  virtual int Release();
+  virtual int RegisterExternalSendCodec(const int video_channel,
+                                        const unsigned char pl_type,
+                                        VideoEncoder* encoder);
+  virtual int DeRegisterExternalSendCodec(const int video_channel,
+                                          const unsigned char pl_type);
+  virtual int RegisterExternalReceiveCodec(const int video_channel,
+                                           const unsigned int pl_type,
+                                           VideoDecoder* decoder,
+                                           bool decoder_render = false,
+                                           int render_delay = 0);
+  virtual int DeRegisterExternalReceiveCodec(const int video_channel,
+                                             const unsigned char pl_type);
+
+ protected:
+  ViEExternalCodecImpl(ViESharedData* shared_data);
+  virtual ~ViEExternalCodecImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
diff --git a/trunk/src/video_engine/vie_file_image.cc b/trunk/src/video_engine/vie_file_image.cc
new file mode 100644
index 0000000..2472a3a
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_image.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Placed first to get WEBRTC_VIDEO_ENGINE_FILE_API.
+#include "engine_configurations.h"
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+
+#include "video_engine/vie_file_image.h"
+
+#include <stdio.h>
+
+#include "common_video/interface/video_image.h"
+#include "common_video/jpeg/include/jpeg.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
+                                          const char* file_nameUTF8,
+                                          VideoFrame& video_frame) {
+  // Read jpeg file into temporary buffer.
+  EncodedImage image_buffer;
+
+  FILE* image_file = fopen(file_nameUTF8, "rb");
+  if (!image_file) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not open file %s", __FUNCTION__, file_nameUTF8);
+    return -1;
+  }
+  if (fseek(image_file, 0, SEEK_END) != 0) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame fseek SEEK_END error for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  int buffer_size = ftell(image_file);
+  if (buffer_size == -1) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame could tell file size for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  image_buffer._size = buffer_size;
+  if (fseek(image_file, 0, SEEK_SET) != 0) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame fseek SEEK_SET error for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  image_buffer._buffer = new WebRtc_UWord8[ image_buffer._size + 1];
+  if (image_buffer._size != fread(image_buffer._buffer, sizeof(WebRtc_UWord8),
+                                  image_buffer._size, image_file)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not read file %s", __FUNCTION__, file_nameUTF8);
+    fclose(image_file);
+    delete [] image_buffer._buffer;
+    return -1;
+  }
+  fclose(image_file);
+
+  JpegDecoder decoder;
+  RawImage decoded_image;
+  int ret = decoder.Decode(image_buffer, decoded_image);
+
+  delete [] image_buffer._buffer;
+  image_buffer._buffer = NULL;
+
+  if (ret == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could decode file %s from jpeg format", __FUNCTION__,
+                 file_nameUTF8);
+    return -1;
+  } else if (ret == -3) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not convert jpeg's data to i420 format",
+                 __FUNCTION__, file_nameUTF8);
+  }
+
+  // Image length in I420.
+  WebRtc_UWord32 image_length = (WebRtc_UWord32)(decoded_image._width *
+                                                 decoded_image._height * 1.5);
+  if (-1 == video_frame.Swap(decoded_image._buffer, image_length,
+                             image_length)) {
+    WEBRTC_TRACE(kTraceDebug, kTraceVideo, engine_id,
+                 "%s could not copy frame image_decoded_buffer to video_frame ",
+                 __FUNCTION__, file_nameUTF8);
+    return -1;
+  }
+
+  if (decoded_image._buffer) {
+    delete [] decoded_image._buffer;
+    decoded_image._buffer = NULL;
+  }
+
+  video_frame.SetWidth(decoded_image._width);
+  video_frame.SetHeight(decoded_image._height);
+  return 0;
+}
+
+int ViEFileImage::ConvertPictureToVideoFrame(int engine_id,
+                                             const ViEPicture& picture,
+                                             VideoFrame& video_frame) {
+  WebRtc_UWord32 picture_length = (WebRtc_UWord32)(picture.width *
+                                                   picture.height * 1.5);
+  video_frame.CopyFrame(picture_length, picture.data);
+  video_frame.SetWidth(picture.width);
+  video_frame.SetHeight(picture.height);
+  video_frame.SetLength(picture_length);
+  return 0;
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_FILE_API
diff --git a/trunk/src/video_engine/vie_file_image.h b/trunk/src/video_engine/vie_file_image.h
new file mode 100644
index 0000000..e79b115
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_image.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
+
+#include "modules/interface/module_common_types.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_file.h"
+
+namespace webrtc {
+
+class ViEFileImage {
+ public:
+  static int ConvertJPEGToVideoFrame(int engine_id,
+                                     const char* file_nameUTF8,
+                                     VideoFrame& video_frame);
+  static int ConvertPictureToVideoFrame(int engine_id,
+                                        const ViEPicture& picture,
+                                        VideoFrame& video_frame);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
diff --git a/trunk/src/video_engine/vie_file_impl.cc b/trunk/src/video_engine/vie_file_impl.cc
new file mode 100644
index 0000000..28f9854
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_impl.cc
@@ -0,0 +1,1016 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#include "video_engine/vie_file_impl.h"
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+#include "common_video/jpeg/include/jpeg.h"
+#include "system_wrappers/interface/condition_variable_wrapper.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_file_image.h"
+#include "video_engine/vie_file_player.h"
+#include "video_engine/vie_file_recorder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_render_manager.h"
+#include "video_engine/vie_renderer.h"
+#endif
+
+namespace webrtc {
+
+ViEFile* ViEFile::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEFileImpl* vie_file_impl = vie_impl;
+  // Increase ref count.
+  (*vie_file_impl)++;
+  return vie_file_impl;
+#else
+  return NULL;
+#endif
+}
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+
+int ViEFileImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEFile::Release()");
+  // Decrease ref count.
+  (*this)--;
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEFile release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEFile reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEFileImpl::ViEFileImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEFileImpl::ViEFileImpl() Ctor");
+}
+
+ViEFileImpl::~ViEFileImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEFileImpl::~ViEFileImpl() Dtor");
+}
+
+int ViEFileImpl::StartPlayFile(const char* file_nameUTF8,
+                               int& file_id,
+                               const bool loop,
+                               const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  VoiceEngine* voice = shared_data_->channel_manager()->GetVoiceEngine();
+  const WebRtc_Word32 result = shared_data_->input_manager()->CreateFilePlayer(
+      file_nameUTF8, loop, file_format, voice, file_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayFile(const int file_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+  {
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+    if (!vie_file_player) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: File with id %d is not playing.", __FUNCTION__,
+                   file_id);
+      shared_data_->SetLastError(kViEFileNotPlaying);
+      return -1;
+    }
+  }
+  // Destroy the capture device.
+  return shared_data_->input_manager()->DestroyFilePlayer(file_id);
+}
+
+int ViEFileImpl::RegisterObserver(int file_id, ViEFileObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Observer already registered", __FUNCTION__);
+    shared_data_->SetLastError(kViEFileObserverAlreadyRegistered);
+    return -1;
+  }
+  if (vie_file_player->RegisterObserver(observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Failed to register observer", __FUNCTION__, file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::DeregisterObserver(int file_id, ViEFileObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (!vie_file_player->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: No Observer registered", __FUNCTION__);
+    shared_data_->SetLastError(kViEFileObserverNotRegistered);
+    return -1;
+  }
+  if (vie_file_player->DeRegisterObserver() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Failed to deregister observer", __FUNCTION__, file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SendFileOnChannel(const int file_id, const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  if (is.FrameProvider(vie_encoder) != NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already connected to a capture device or "
+                 "file.", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInputAlreadyConnected);
+    return -1;
+  }
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+
+  if (vie_file_player->RegisterFrameCallback(video_channel, vie_encoder)
+      != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Failed to register frame callback.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopSendFileOnChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
+  if (!frame_provider ||
+      frame_provider->Id() < kViEFileIdBase ||
+      frame_provider->Id() > kViEFileIdMax) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No file connected to Channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotConnected);
+    return -1;
+  }
+  if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to deregister file from channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartPlayFileAsMicrophone(const int file_id,
+                                           const int audio_channel,
+                                           bool mix_microphone,
+                                           float volume_scaling) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->SendAudioOnChannel(audio_channel, mix_microphone,
+  volume_scaling) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayFileAsMicrophone(const int file_id,
+                                          const int audio_channel) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+
+  if (vie_file_player->StopSendAudioOnChannel(audio_channel) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartPlayAudioLocally(const int file_id,
+                                       const int audio_channel,
+                                       float volume_scaling) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->PlayAudioLocally(audio_channel, volume_scaling) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayAudioLocally(const int file_id,
+                                      const int audio_channel) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->StopPlayAudioLocally(audio_channel) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartRecordOutgoingVideo(const int video_channel,
+                                          const char* file_nameUTF8,
+                                          AudioSource audio_source,
+                                          const CodecInst& audio_codec,
+                                          const VideoCodec& video_codec,
+                                          const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
+  if (file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Already recording outgoing video on channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileAlreadyRecording);
+    return -1;
+  }
+
+  WebRtc_Word32 ve_channel_id = -1;
+  VoiceEngine* ve_ptr = NULL;
+  if (audio_source != NO_AUDIO) {
+    ViEChannel* vie_channel = cs.Channel(video_channel);
+    if (!vie_channel) {
+      // Channel should exists since we have a ViEEncoder above.
+      assert(false);
+      return -1;
+    }
+    ve_channel_id = vie_channel->VoiceChannel();
+    ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
+    if (!ve_ptr) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Can't access voice engine. Have SetVoiceEngine "
+                   "been called?", __FUNCTION__);
+      shared_data_->SetLastError(kViEFileVoENotSet);
+      return -1;
+    }
+  }
+  if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
+                                   ve_channel_id, audio_codec, ve_ptr,
+                                   file_format) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to start recording. Check arguments.",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViEFileImpl::StopRecordOutgoingVideo(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
+  if (!file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d is not recording.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotRecording);
+    return -1;
+  }
+  if (file_recorder.StopRecording() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to stop recording of channel %d.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopRecordIncomingVideo(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
+  if (!file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d is not recording.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotRecording);
+    vie_channel->ReleaseIncomingFileRecorder();
+    return -1;
+  }
+  if (file_recorder.StopRecording() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to stop recording of channel %d.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    vie_channel->ReleaseIncomingFileRecorder();
+    return -1;
+  }
+  // Let the channel know we are no longer recording.
+  vie_channel->ReleaseIncomingFileRecorder();
+  return 0;
+}
+
+int ViEFileImpl::StartRecordIncomingVideo(const int video_channel,
+                                          const char* file_nameUTF8,
+                                          AudioSource audio_source,
+                                          const CodecInst& audio_codec,
+                                          const VideoCodec& video_codec,
+                                          const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
+  if (file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Already recording outgoing video on channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileAlreadyRecording);
+    return -1;
+  }
+
+  WebRtc_Word32 ve_channel_id = -1;
+  VoiceEngine* ve_ptr = NULL;
+  if (audio_source != NO_AUDIO) {
+    ve_channel_id = vie_channel->VoiceChannel();
+    ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
+
+    if (!ve_ptr) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Can't access voice engine. Have SetVoiceEngine "
+                   "been called?", __FUNCTION__);
+      shared_data_->SetLastError(kViEFileVoENotSet);
+      return -1;
+    }
+  }
+  if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
+                                   ve_channel_id, audio_codec, ve_ptr,
+                                   file_format) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to start recording. Check arguments.",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::GetFileInformation(const char* file_name,
+                                    VideoCodec& video_codec,
+                                    CodecInst& audio_codec,
+                                    const FileFormats file_format) {
+  return ViEFilePlayer::GetFileInformation(
+           shared_data_->instance_id(),
+           static_cast<const WebRtc_Word8*>(file_name),
+           video_codec, audio_codec, file_format);
+}
+
+int ViEFileImpl::GetRenderSnapshot(const int video_channel,
+                                   const char* file_nameUTF8) {
+  // Gain access to the renderer for the specified channel and get it's
+  // current frame.
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
+    return -1;
+  }
+
+  // JPEGEncoder writes the jpeg file for you (no control over it) and does
+  // not return you the buffer. Thus, we are not going to be writing to the
+  // disk here.
+  JpegEncoder jpeg_encoder;
+  RawImage input_image;
+  if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not open output file '%s' for writing!",
+                 file_nameUTF8);
+    return -1;
+  }
+
+  input_image._width = video_frame.Width();
+  input_image._height = video_frame.Height();
+  video_frame.Swap(input_image._buffer, input_image._length,
+                   input_image._size);
+
+  if (jpeg_encoder.Encode(input_image) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not encode i420 -> jpeg file '%s' for writing!",
+                 file_nameUTF8);
+    if (input_image._buffer) {
+      delete [] input_image._buffer;
+      input_image._buffer = NULL;
+    }
+    return -1;
+  }
+  delete [] input_image._buffer;
+  input_image._buffer = NULL;
+  return 0;
+}
+
+int ViEFileImpl::GetRenderSnapshot(const int video_channel,
+                                   ViEPicture& picture) {
+  // Gain access to the renderer for the specified channel and get it's
+  // current frame.
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
+    return -1;
+  }
+
+  // Copy from VideoFrame class to ViEPicture struct.
+  int buffer_length =
+      static_cast<int>(video_frame.Width() * video_frame.Height() * 1.5);
+  picture.data =  static_cast<WebRtc_UWord8*>(malloc(
+      buffer_length * sizeof(WebRtc_UWord8)));
+  memcpy(picture.data, video_frame.Buffer(), buffer_length);
+  picture.size = buffer_length;
+  picture.width = video_frame.Width();
+  picture.height = video_frame.Height();
+  picture.type = kVideoI420;
+  return 0;
+}
+
+int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
+                                          const char* file_nameUTF8) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (GetNextCapturedFrame(capture_id, video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "Could not gain acces to capture device %d video frame "
+                 "%s:%d", capture_id, __FUNCTION__);
+    return -1;
+  }
+
+  // JPEGEncoder writes the jpeg file for you (no control over it) and does
+  // not return you the buffer Thusly, we are not going to be writing to the
+  // disk here.
+  JpegEncoder jpeg_encoder;
+  RawImage input_image;
+  input_image._width = video_frame.Width();
+  input_image._height = video_frame.Height();
+  video_frame.Swap(input_image._buffer, input_image._length,
+                   input_image._size);
+
+  if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not open output file '%s' for writing!",
+                 file_nameUTF8);
+
+    if (input_image._buffer) {
+      delete [] input_image._buffer;
+    }
+    return -1;
+  }
+  if (jpeg_encoder.Encode(input_image) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not encode i420 -> jpeg file '%s' for "
+                 "writing!", file_nameUTF8);
+    if (input_image._buffer) {
+      delete [] input_image._buffer;
+    }
+    return -1;
+  }
+  delete [] input_image._buffer;
+  input_image._buffer = NULL;
+  return 0;
+}
+
+int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
+                                          ViEPicture& picture) {
+  VideoFrame video_frame;
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+  if (GetNextCapturedFrame(capture_id, video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "Could not gain acces to capture device %d video frame "
+                 "%s:%d", capture_id, __FUNCTION__);
+    return -1;
+  }
+
+  // Copy from VideoFrame class to ViEPicture struct.
+  int buffer_length =
+      static_cast<int>(video_frame.Width() * video_frame.Height() * 1.5);
+  picture.data = static_cast<WebRtc_UWord8*>(malloc(
+      buffer_length * sizeof(WebRtc_UWord8)));
+  memcpy(picture.data, video_frame.Buffer(), buffer_length);
+  picture.size = buffer_length;
+  picture.width = video_frame.Width();
+  picture.height = video_frame.Height();
+  picture.type = kVideoI420;
+  return 0;
+}
+
+int ViEFileImpl::FreePicture(ViEPicture& picture) {
+  if (picture.data) {
+    free(picture.data);
+  }
+
+  picture.data = NULL;
+  picture.size = 0;
+  picture.width = 0;
+  picture.height = 0;
+  picture.type = kVideoUnknown;
+  return 0;
+}
+int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
+                                       const char* file_nameUTF8) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    shared_data_->SetLastError(kViEFileInvalidCaptureId);
+    return -1;
+  }
+
+  VideoFrame capture_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+          ViEId(shared_data_->instance_id(), capture_id), file_nameUTF8,
+          capture_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Failed to open file.", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (capturer->SetCaptureDeviceImage(capture_image)) {
+    shared_data_->SetLastError(kViEFileSetCaptureImageError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
+const ViEPicture& picture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Not a valid picture type.",
+                 __FUNCTION__, capture_id);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    shared_data_->SetLastError(kViEFileSetCaptureImageError);
+    return -1;
+  }
+
+  VideoFrame capture_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+  ViEId(shared_data_->instance_id(), capture_id), picture, capture_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Failed to use picture.", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (capturer->SetCaptureDeviceImage(capture_image)) {
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderStartImage(const int video_channel,
+const char* file_nameUTF8) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+
+  VideoFrame start_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+  ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8, start_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to open file.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (renderer->SetRenderStartImage(start_image) != 0) {
+    shared_data_->SetLastError(kViEFileSetStartImageError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderStartImage(const int video_channel,
+                                     const ViEPicture& picture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Not a valid picture type.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+
+  VideoFrame start_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+  ViEId(shared_data_->instance_id(), video_channel), picture, start_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to use picture.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  if (renderer->SetRenderStartImage(start_image) != 0) {
+    shared_data_->SetLastError(kViEFileSetStartImageError);
+    return -1;
+  }
+  return 0;
+}
+int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
+                                       const char* file_nameUTF8,
+                                       const unsigned int timeout_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+  VideoFrame timeout_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+          ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
+          timeout_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to open file.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  WebRtc_Word32 timeout_time = timeout_ms;
+  if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
+    timeout_time = kViEMinRenderTimeoutTimeMs;
+  }
+  if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
+    timeout_time = kViEMaxRenderTimeoutTimeMs;
+  }
+  if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
+                                       const ViEPicture& picture,
+const unsigned int timeout_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Not a valid picture type.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  VideoFrame timeout_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+          ViEId(shared_data_->instance_id(), video_channel), picture,
+          timeout_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to use picture.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  WebRtc_Word32 timeout_time = timeout_ms;
+  if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
+    timeout_time = kViEMinRenderTimeoutTimeMs;
+  }
+  if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
+    timeout_time = kViEMaxRenderTimeoutTimeMs;
+  }
+  if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEFileImpl::GetNextCapturedFrame(WebRtc_Word32 capture_id,
+VideoFrame& video_frame) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+
+  ViECaptureSnapshot* snap_shot = new ViECaptureSnapshot();
+  capturer->RegisterFrameCallback(-1, snap_shot);
+  bool snapshot_taken = snap_shot->GetSnapshot(
+      video_frame, kViECaptureMaxSnapshotWaitTimeMs);
+
+  // Check once again if it has been destroyed.
+  capturer->DeregisterFrameCallback(snap_shot);
+  delete snap_shot;
+  snap_shot = NULL;
+
+  if (snapshot_taken) {
+    return 0;
+  }
+  return -1;
+}
+
+ViECaptureSnapshot::ViECaptureSnapshot()
+    : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+      condition_varaible_(ConditionVariableWrapper::CreateConditionVariable()),
+      video_frame_(NULL) {
+}
+
+ViECaptureSnapshot::~ViECaptureSnapshot() {
+  if (video_frame_) {
+    delete video_frame_;
+    video_frame_ = NULL;
+  }
+}
+
+bool ViECaptureSnapshot::GetSnapshot(VideoFrame& video_frame,
+                                     unsigned int max_wait_time) {
+  crit_->Enter();
+  video_frame_ = new VideoFrame();
+  if (condition_varaible_->SleepCS(*(crit_.get()), max_wait_time)) {
+    // Snapshot taken.
+    video_frame.SwapFrame(*video_frame_);
+    delete video_frame_;
+    video_frame_ = NULL;
+    crit_->Leave();
+    return true;
+  }
+  crit_->Leave();
+  return false;
+}
+
+void ViECaptureSnapshot::DeliverFrame(int id, VideoFrame& video_frame,
+                                      int num_csrcs,
+const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  CriticalSectionScoped cs(crit_.get());
+  if (!video_frame_) {
+    return;
+  }
+  video_frame_->SwapFrame(video_frame);
+  condition_varaible_->WakeAll();
+  return;
+}
+
+#endif
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_file_impl.h b/trunk/src/video_engine/vie_file_impl.h
new file mode 100644
index 0000000..63e2d4a
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_impl.h
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_file.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_ref_count.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+class ConditionVariableWrapper;
+class CriticalSectionWrapper;
+class ViESharedData;
+
+class ViECaptureSnapshot : public ViEFrameCallback {
+ public:
+  ViECaptureSnapshot();
+  ~ViECaptureSnapshot();
+
+  bool GetSnapshot(VideoFrame& video_frame, unsigned int max_wait_time);
+
+  // Implements ViEFrameCallback.
+  virtual void DeliverFrame(int id, VideoFrame& video_frame, int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay) {}
+  virtual int GetPreferedFrameSettings(int& width, int& height,
+                                       int& frame_rate) {
+    return -1;
+  }
+  virtual void ProviderDestroyed(int id) {}
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> crit_;
+  scoped_ptr<ConditionVariableWrapper> condition_varaible_;
+  VideoFrame* video_frame_;
+};
+
+class ViEFileImpl
+    : public ViEFile,
+      public ViERefCount {
+ public:
+  // Implements ViEFile.
+  virtual int Release();
+  virtual int StartPlayFile(const char* file_nameUTF8, int& file_id,
+                            const bool loop = false,
+                            const FileFormats file_format = kFileFormatAviFile);
+  virtual int StopPlayFile(const int file_id);
+  virtual int RegisterObserver(int file_id, ViEFileObserver& observer);
+  virtual int DeregisterObserver(int file_id, ViEFileObserver& observer);
+  virtual int SendFileOnChannel(const int file_id, const int video_channel);
+  virtual int StopSendFileOnChannel(const int video_channel);
+  virtual int StartPlayFileAsMicrophone(const int file_id,
+                                        const int audio_channel,
+                                        bool mix_microphone = false,
+                                        float volume_scaling = 1);
+  virtual int StopPlayFileAsMicrophone(const int file_id,
+                                       const int audio_channel);
+  virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
+                                    float volume_scaling = 1);
+  virtual int StopPlayAudioLocally(const int file_id, const int audio_channel);
+  virtual int StartRecordOutgoingVideo(
+      const int video_channel,
+      const char* file_nameUTF8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int StartRecordIncomingVideo(
+      const int video_channel,
+      const char* file_nameUTF8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int StopRecordOutgoingVideo(const int video_channel);
+  virtual int StopRecordIncomingVideo(const int video_channel);
+  virtual int GetFileInformation(
+      const char* file_name,
+      VideoCodec& video_codec,
+      CodecInst& audio_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int GetRenderSnapshot(const int video_channel,
+                                const char* file_nameUTF8);
+  virtual int GetRenderSnapshot(const int video_channel, ViEPicture& picture);
+  virtual int FreePicture(ViEPicture& picture);
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       const char* file_nameUTF8);
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       ViEPicture& picture);
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const char* file_nameUTF8);
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const ViEPicture& picture);
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const char* file_nameUTF8);
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const ViEPicture& picture);
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const char* file_nameUTF8,
+                                    const unsigned int timeout_ms);
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const ViEPicture& picture,
+                                    const unsigned int timeout_ms);
+
+ protected:
+  ViEFileImpl(ViESharedData* shared_data);
+  virtual ~ViEFileImpl();
+
+ private:
+  WebRtc_Word32 GetNextCapturedFrame(WebRtc_Word32 capture_id,
+                                     VideoFrame& video_frame);
+
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
diff --git a/trunk/src/video_engine/vie_file_player.cc b/trunk/src/video_engine/vie_file_player.cc
new file mode 100644
index 0000000..e40f337
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_player.cc
@@ -0,0 +1,508 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_file_player.h"
+
+#include "modules/utility/interface/file_player.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_file.h"
+#include "video_engine/vie_input_manager.h"
+#include "voice_engine/main/interface/voe_base.h"
+#include "voice_engine/main/interface/voe_file.h"
+#include "voice_engine/main/interface/voe_video_sync.h"
+
+namespace webrtc {
+
+const int kThreadWaitTimeMs = 100;
+
+ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
+    int file_id,
+    int engine_id,
+    const char* file_nameUTF8,
+    const bool loop,
+    const FileFormats file_format,
+    ViEInputManager& input_manager,
+    VoiceEngine* voe_ptr) {
+  ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id, input_manager);
+  if (!self || self->Init(file_nameUTF8, loop, file_format, voe_ptr) != 0) {
+    delete self;
+    self = NULL;
+  }
+  return self;
+}
+
+ViEFilePlayer::ViEFilePlayer(int Id,
+                             int engine_id,
+                             ViEInputManager& input_manager)
+    : ViEFrameProviderBase(Id, engine_id),
+      play_back_started_(false),
+      input_manager_(input_manager),
+      feedback_cs_(NULL),
+      audio_cs_(NULL),
+      file_player_(NULL),
+      audio_stream_(false),
+      video_clients_(0),
+      audio_clients_(0),
+      local_audio_channel_(-1),
+      observer_(NULL),
+      voe_file_interface_(NULL),
+      voe_video_sync_(NULL),
+      decode_thread_(NULL),
+      decode_event_(NULL),
+      decoded_audio_length_(0) {
+  memset(file_name_, 0, FileWrapper::kMaxFileNameSize);
+  memset(decoded_audio_, 0, kMaxDecodedAudioLength);
+}
+
+ViEFilePlayer::~ViEFilePlayer() {
+  // StopPlay deletes decode_thread_.
+  StopPlay();
+  delete decode_event_;
+  delete audio_cs_;
+  delete feedback_cs_;
+}
+
+int ViEFilePlayer::Init(const char* file_nameUTF8,
+                        const bool loop,
+                        const FileFormats file_format,
+                        VoiceEngine* voice_engine) {
+  feedback_cs_ = CriticalSectionWrapper::CreateCriticalSection();
+  if (!feedback_cs_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate critsect");
+    return -1;
+  }
+
+  audio_cs_ = CriticalSectionWrapper::CreateCriticalSection();
+  if (!audio_cs_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate critsect");
+    return -1;
+  }
+
+  decode_event_ = EventWrapper::Create();
+  if (!decode_event_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate event");
+    return -1;
+  }
+  if (strlen(file_nameUTF8) > FileWrapper::kMaxFileNameSize) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() Too long filename");
+    return -1;
+  }
+  strncpy(file_name_, file_nameUTF8, strlen(file_nameUTF8) + 1);
+
+  file_player_ = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
+                                              file_format);
+  if (!file_player_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to create file player");
+    return -1;
+  }
+  if (file_player_->RegisterModuleFileCallback(this) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to "
+                 "RegisterModuleFileCallback");
+    file_player_ = NULL;
+    return -1;
+  }
+  decode_thread_ = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
+                                               this, kHighestPriority,
+                                               "ViEFilePlayThread");
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to start decode thread.");
+    file_player_ = NULL;
+    return -1;
+  }
+
+  // Always try to open with Audio since we don't know on what channels the
+  // audio should be played on.
+  WebRtc_Word32 error = file_player_->StartPlayingVideoFile(file_name_, loop,
+                                                            false);
+  if (error) {
+    // Failed to open the file with audio, try without.
+    error = file_player_->StartPlayingVideoFile(file_name_, loop, true);
+    audio_stream_ = false;
+    if (error) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                   "ViEFilePlayer::StartPlay() failed to Start play video "
+                   "file");
+      return -1;
+    }
+
+  } else {
+    audio_stream_ = true;
+  }
+
+  if (audio_stream_) {
+    if (voice_engine) {
+      // A VoiceEngine has been provided and we want to play audio on local
+      // a channel.
+      voe_file_interface_ = VoEFile::GetInterface(voice_engine);
+      if (!voe_file_interface_) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::StartPlay() failed to get VEFile "
+                     "interface");
+        return -1;
+      }
+      voe_video_sync_ = VoEVideoSync::GetInterface(voice_engine);
+      if (!voe_video_sync_) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo,
+                     ViEId(engine_id_, id_),
+                     "ViEFilePlayer::StartPlay() failed to get "
+                     "VoEVideoSync interface");
+        return -1;
+      }
+    }
+  }
+
+  // Read audio /(or just video) every 10ms.
+  decode_event_->StartTimer(true, 10);
+  return 0;
+}
+
+int ViEFilePlayer::FrameCallbackChanged() {
+  // Starts the decode thread when someone cares.
+  if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() >
+      video_clients_) {
+    if (!play_back_started_) {
+      play_back_started_ = true;
+      unsigned int thread_id;
+      if (decode_thread_->Start(thread_id)) {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged() Started file decode"
+                     " thread %u", thread_id);
+      } else {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged() Failed to start "
+                     "file decode thread.");
+      }
+    } else if (!file_player_->IsPlayingFile()) {
+      if (file_player_->StartPlayingVideoFile(file_name_, false,
+                                              !audio_stream_) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged(), Failed to restart "
+                     "the file player.");
+      }
+    }
+  }
+  video_clients_ = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
+  return 0;
+}
+
+bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj) {
+  return static_cast<ViEFilePlayer*>(obj)->FilePlayDecodeProcess();
+}
+
+bool ViEFilePlayer::FilePlayDecodeProcess() {
+  if (decode_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
+    if (audio_stream_ && audio_clients_ == 0) {
+      // There is audio but no one cares, read the audio here.
+      Read(NULL, 0);
+    }
+    if (file_player_->TimeUntilNextVideoFrame() < 10) {
+      // Less than 10ms to next videoframe.
+      if (file_player_->GetVideoFromFile(decoded_video_) != 0) {
+      }
+    }
+    if (decoded_video_.Length() > 0) {
+      if (local_audio_channel_ != -1 && voe_video_sync_) {
+        // We are playing audio locally.
+        int audio_delay = 0;
+        if (voe_video_sync_->GetPlayoutBufferSize(audio_delay) == 0) {
+          decoded_video_.SetRenderTime(decoded_video_.RenderTimeMs() +
+                                       audio_delay);
+        }
+      }
+      DeliverFrame(decoded_video_);
+      decoded_video_.SetLength(0);
+    }
+  }
+  return true;
+}
+
+int ViEFilePlayer::StopPlay() {
+  // Only called from destructor.
+  if (decode_thread_) {
+    decode_thread_->SetNotAlive();
+    if (decode_thread_->Stop()) {
+      delete decode_thread_;
+    } else {
+      assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread");
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                   "ViEFilePlayer::StartPlay() Failed to stop file decode "
+                   "thread.");
+    }
+  }
+  decode_thread_ = NULL;
+  if (decode_event_) {
+    decode_event_->StopTimer();
+  }
+  StopPlayAudio();
+
+  if (voe_file_interface_) {
+    voe_file_interface_->Release();
+    voe_file_interface_ = NULL;
+  }
+  if (voe_video_sync_) {
+    voe_video_sync_->Release();
+    voe_video_sync_ = NULL;
+  }
+
+  if (file_player_) {
+    file_player_->StopPlayingFile();
+    FilePlayer::DestroyFilePlayer(file_player_);
+    file_player_ = NULL;
+  }
+  return 0;
+}
+
+int ViEFilePlayer::StopPlayAudio() {
+  // Stop sending audio.
+
+  std::set<int>::iterator it = audio_channels_sending_.begin();
+  while (it != audio_channels_sending_.end()) {
+    StopSendAudioOnChannel(*it);
+    // StopSendAudioOnChannel erases the item from the map.
+    it = audio_channels_sending_.begin();
+  }
+
+  // Stop local audio playback.
+  if (local_audio_channel_ != -1) {
+    StopPlayAudioLocally(local_audio_channel_);
+  }
+  local_audio_channel_ = -1;
+  audio_channel_buffers_.clear();
+  audio_clients_ = 0;
+  return 0;
+}
+
+int ViEFilePlayer::Read(void* buf, int len) {
+  // Protect from simultaneous reading from multiple channels.
+  CriticalSectionScoped lock(*audio_cs_);
+  if (NeedsAudioFromFile(buf)) {
+    // We will run the VoE in 16KHz.
+    if (file_player_->Get10msAudioFromFile(decoded_audio_,
+                                           decoded_audio_length_, 16000) != 0) {
+      // No data.
+      decoded_audio_length_ = 0;
+      return 0;
+    }
+    // 2 bytes per sample.
+    decoded_audio_length_ *= 2;
+    if (buf) {
+      audio_channel_buffers_.push_back(buf);
+    }
+  } else {
+    // No need for new audiobuffer from file, ie the buffer read from file has
+    // not been played on this channel.
+  }
+  if (buf) {
+    memcpy(buf, decoded_audio_, decoded_audio_length_);
+  }
+  return decoded_audio_length_;
+}
+
+bool ViEFilePlayer::NeedsAudioFromFile(void* buf) {
+  bool needs_new_audio = false;
+  if (audio_channel_buffers_.size() == 0) {
+    return true;
+  }
+
+  // Check if we the buf already have read the current audio.
+  for (std::list<void*>::iterator it = audio_channel_buffers_.begin();
+       it != audio_channel_buffers_.end(); ++it) {
+    if (*it == buf) {
+      needs_new_audio = true;
+      audio_channel_buffers_.erase(it);
+      break;
+    }
+  }
+  return needs_new_audio;
+}
+
+void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id),
+               "%s: file_id %d", __FUNCTION__, id_);
+  file_player_->StopPlayingFile();
+
+  CriticalSectionScoped lock(*feedback_cs_);
+  if (observer_) {
+    observer_->PlayFileEnded(id_);
+  }
+}
+
+bool ViEFilePlayer::IsObserverRegistered() {
+  CriticalSectionScoped lock(*feedback_cs_);
+  return observer_ != NULL;
+}
+
+int ViEFilePlayer::RegisterObserver(ViEFileObserver& observer) {
+  CriticalSectionScoped lock(*feedback_cs_);
+  if (observer_) {
+    return -1;
+  }
+  observer_ = &observer;
+  return 0;
+}
+
+int ViEFilePlayer::DeRegisterObserver() {
+  CriticalSectionScoped lock(*feedback_cs_);
+  observer_ = NULL;
+  return 0;
+}
+
+int ViEFilePlayer::SendAudioOnChannel(const int audio_channel,
+                                      bool mix_microphone,
+                                      float volume_scaling) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StartPlayingFileAsMicrophone(audio_channel, this,
+                                                       mix_microphone,
+                                                       kFileFormatPcm16kHzFile,
+                                                       volume_scaling) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::SendAudioOnChannel() "
+                 "VE_StartPlayingFileAsMicrophone failed. audio_channel %d, "
+                 " mix_microphone %d, volume_scaling %.2f",
+                 audio_channel, mix_microphone, volume_scaling);
+    return -1;
+  }
+  audio_channels_sending_.insert(audio_channel);
+
+  CriticalSectionScoped lock(*audio_cs_);
+  audio_clients_++;
+  return 0;
+}
+
+int ViEFilePlayer::StopSendAudioOnChannel(const int audio_channel) {
+  int result = 0;
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel() - no VoE interface");
+    return -1;
+  }
+  std::set<int>::iterator it = audio_channels_sending_.find(audio_channel);
+  if (it == audio_channels_sending_.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel AudioChannel %d not "
+                 "sending", audio_channel);
+    return -1;
+  }
+  result = voe_file_interface_->StopPlayingFileAsMicrophone(audio_channel);
+  if (result != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel() "
+                 "VE_StopPlayingFileAsMicrophone failed. audio_channel %d",
+                 audio_channel);
+  }
+  audio_channels_sending_.erase(audio_channel);
+  CriticalSectionScoped lock(*audio_cs_);
+  audio_clients_--;
+  assert(audio_clients_ >= 0);
+  return 0;
+}
+
+int ViEFilePlayer::PlayAudioLocally(const int audio_channel,
+                                    float volume_scaling) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StartPlayingFileLocally(audio_channel, this,
+                                                   kFileFormatPcm16kHzFile,
+                                                   volume_scaling) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s  VE_StartPlayingFileAsMicrophone failed. audio_channel %d,"
+                 " mix_microphone %d, volume_scaling %.2f",
+                 __FUNCTION__, audio_channel, volume_scaling);
+    return -1;
+  }
+
+  CriticalSectionScoped lock(*audio_cs_);
+  local_audio_channel_ = audio_channel;
+  audio_clients_++;
+  return 0;
+}
+
+int ViEFilePlayer::StopPlayAudioLocally(const int audio_channel) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StopPlayingFileLocally(audio_channel) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s VE_StopPlayingFileLocally failed. audio_channel %d.",
+                 __FUNCTION__, audio_channel);
+    return -1;
+  }
+
+  CriticalSectionScoped lock(*audio_cs_);
+  local_audio_channel_ = -1;
+  audio_clients_--;
+  return 0;
+}
+
+int ViEFilePlayer::GetFileInformation(int engine_id,
+                                      const char* file_name,
+                                      VideoCodec& video_codec,
+                                      CodecInst& audio_codec,
+                                      const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, engine_id, "%s ", __FUNCTION__);
+
+  FilePlayer* file_player = FilePlayer::CreateFilePlayer(engine_id,
+                                                         file_format);
+  if (!file_player) {
+    return -1;
+  }
+
+  bool video_only = false;
+
+  memset(&video_codec, 0, sizeof(video_codec));
+  memset(&audio_codec, 0, sizeof(audio_codec));
+
+  if (file_player->StartPlayingVideoFile(file_name, false, false) != 0) {
+    video_only = true;
+    if (file_player->StartPlayingVideoFile(file_name, false, true) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                   "%s Failed to open file.", __FUNCTION__);
+      FilePlayer::DestroyFilePlayer(file_player);
+      return -1;
+    }
+  }
+
+  if (!video_only && file_player->AudioCodec(audio_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s Failed to get audio codec.", __FUNCTION__);
+    FilePlayer::DestroyFilePlayer(file_player);
+    return -1;
+  }
+  if (file_player->video_codec_info(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s Failed to get video codec.", __FUNCTION__);
+    FilePlayer::DestroyFilePlayer(file_player);
+    return -1;
+  }
+  FilePlayer::DestroyFilePlayer(file_player);
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_file_player.h b/trunk/src/video_engine/vie_file_player.h
new file mode 100644
index 0000000..640b37c
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_player.h
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
+
+#include <list>
+#include <set>
+
+#include "common_types.h"
+#include "modules/media_file/interface/media_file_defines.h"
+#include "system_wrappers/interface/file_wrapper.h"
+#include "typedefs.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class EventWrapper;
+class FilePlayer;
+class ThreadWrapper;
+class ViEFileObserver;
+class ViEInputManager;
+class VoEFile;
+class VoEVideoSync;
+class VoiceEngine;
+
+class ViEFilePlayer
+    : public ViEFrameProviderBase,
+      protected FileCallback,
+      protected InStream {
+ public:
+  static ViEFilePlayer* CreateViEFilePlayer(int file_id,
+                                            int engine_id,
+                                            const char* file_nameUTF8,
+                                            const bool loop,
+                                            const FileFormats file_format,
+                                            ViEInputManager& input_manager,
+                                            VoiceEngine* voe_ptr);
+
+  static int GetFileInformation(const int engine_id,
+                                const char* file_name,
+                                VideoCodec& video_codec,
+                                CodecInst& audio_codec,
+                                const FileFormats file_format);
+  ~ViEFilePlayer();
+
+  bool IsObserverRegistered();
+  int RegisterObserver(ViEFileObserver& observer);
+  int DeRegisterObserver();
+  int SendAudioOnChannel(const int audio_channel,
+                         bool mix_microphone,
+                         float volume_scaling);
+  int StopSendAudioOnChannel(const int audio_channel);
+  int PlayAudioLocally(const int audio_channel, float volume_scaling);
+  int StopPlayAudioLocally(const int audio_channel);
+
+  // Implements ViEFrameProviderBase.
+  virtual int FrameCallbackChanged();
+
+ protected:
+  ViEFilePlayer(int Id, int engine_id, ViEInputManager& input_manager);
+  int Init(const WebRtc_Word8* file_nameUTF8,
+           const bool loop,
+           const FileFormats file_format,
+           VoiceEngine* voe_ptr);
+  int StopPlay();
+  int StopPlayAudio();
+
+  // File play decode function.
+  static bool FilePlayDecodeThreadFunction(void* obj);
+  bool FilePlayDecodeProcess();
+  bool NeedsAudioFromFile(void* buf);
+
+  // Implements webrtc::InStream.
+  virtual int Read(void* buf, int len);
+  virtual int Rewind() {
+    return 0;
+  }
+
+  // Implements FileCallback.
+  virtual void PlayNotification(const WebRtc_Word32 /*id*/,
+                                const WebRtc_UWord32 /*notification_ms*/) {}
+  virtual void RecordNotification(const WebRtc_Word32 /*id*/,
+                                  const WebRtc_UWord32 /*notification_ms*/) {}
+  virtual void PlayFileEnded(const WebRtc_Word32 id);
+  virtual void RecordFileEnded(const WebRtc_Word32 /*id*/) {}
+
+ private:
+  static const int kMaxDecodedAudioLength = 320;
+  bool play_back_started_;
+  ViEInputManager& input_manager_;
+
+  CriticalSectionWrapper* feedback_cs_;
+  CriticalSectionWrapper* audio_cs_;
+
+  FilePlayer* file_player_;
+  bool audio_stream_;
+
+  // Number of active video clients.
+  int video_clients_;
+
+  // Number of audio channels sending this audio.
+  int audio_clients_;
+
+  // Local audio channel playing this video. Sync video against this.
+  int local_audio_channel_;
+
+  ViEFileObserver* observer_;
+  WebRtc_Word8 file_name_[FileWrapper::kMaxFileNameSize];
+
+  // VoE Interface.
+  VoEFile* voe_file_interface_;
+  VoEVideoSync* voe_video_sync_;
+
+  // Thread for decoding video (and audio if no audio clients connected).
+  ThreadWrapper* decode_thread_;
+  EventWrapper* decode_event_;
+  WebRtc_Word16 decoded_audio_[kMaxDecodedAudioLength];
+  WebRtc_UWord32 decoded_audio_length_;
+
+  // Trick - list containing VoE buffer reading this file. Used if multiple
+  // audio channels are sending.
+  std::list<void*> audio_channel_buffers_;
+
+  // AudioChannels sending audio from this file.
+  std::set<int> audio_channels_sending_;
+
+  // Frame receiving decoded video from file.
+  VideoFrame decoded_video_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
diff --git a/trunk/src/video_engine/vie_file_recorder.cc b/trunk/src/video_engine/vie_file_recorder.cc
new file mode 100644
index 0000000..c2f8fa0
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_recorder.cc
@@ -0,0 +1,239 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_file_recorder.h"
+
+#include "modules/utility/interface/file_player.h"
+#include "modules/utility/interface/file_recorder.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+ViEFileRecorder::ViEFileRecorder(int instanceID)
+    : recorder_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      file_recorder_(NULL),
+      is_first_frame_recorded_(false),
+      is_out_stream_started_(false),
+      instance_id_(instanceID),
+      frame_delay_(0),
+      audio_channel_(-1),
+      audio_source_(NO_AUDIO),
+      voe_file_interface_(NULL) {
+}
+
+ViEFileRecorder::~ViEFileRecorder() {
+  StopRecording();
+  delete recorder_cs_;
+}
+
+int ViEFileRecorder::StartRecording(const char* file_nameUTF8,
+                                    const VideoCodec& codec_inst,
+                                    AudioSource audio_source,
+                                    int audio_channel,
+                                    const CodecInst& audio_codec_inst,
+                                    VoiceEngine* voe_ptr,
+                                    const FileFormats file_format) {
+  CriticalSectionScoped lock(*recorder_cs_);
+
+  if (file_recorder_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() - already recording.");
+    return -1;
+  }
+  file_recorder_ = FileRecorder::CreateFileRecorder(instance_id_, file_format);
+  if (!file_recorder_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() failed to create recoder.");
+    return -1;
+  }
+
+  int error = file_recorder_->StartRecordingVideoFile(file_nameUTF8,
+                                                      audio_codec_inst,
+                                                      codec_inst,
+                                                      AMRFileStorage,
+                                                      audio_source == NO_AUDIO);
+  if (error) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() failed to "
+                 "StartRecordingVideoFile.");
+    FileRecorder::DestroyFileRecorder(file_recorder_);
+    file_recorder_ = NULL;
+    return -1;
+  }
+
+  audio_source_ = audio_source;
+  if (voe_ptr && audio_source != NO_AUDIO) {
+    // VoE interface has been provided and we want to record audio.
+    voe_file_interface_ = VoEFile::GetInterface(voe_ptr);
+    if (!voe_file_interface_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StartRecording() failed to get VEFile "
+                   "interface");
+      return -1;
+    }
+
+    // Always L16.
+    CodecInst engine_audio_codec_inst = {96, "L16", audio_codec_inst.plfreq,
+                                         audio_codec_inst.plfreq / 100, 1,
+                                         audio_codec_inst.plfreq * 16 };
+
+    switch (audio_source) {
+      // case NO_AUDIO is checked above.
+      case MICROPHONE:
+        error = voe_file_interface_->StartRecordingMicrophone(
+            this, &engine_audio_codec_inst);
+        break;
+      case PLAYOUT:
+        error = voe_file_interface_->StartRecordingPlayout(
+            audio_channel, this, &engine_audio_codec_inst);
+        break;
+      default:
+        assert(!"Unknown audio_source");
+    }
+    if (error != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StartRecording() failed to start recording"
+                   " audio");
+      FileRecorder::DestroyFileRecorder(file_recorder_);
+      file_recorder_ = NULL;
+      return -1;
+    }
+    is_out_stream_started_ = true;
+    audio_channel_ = audio_channel;
+  }
+  is_first_frame_recorded_ = false;
+  return 0;
+}
+
+int ViEFileRecorder::StopRecording() {
+  int error = 0;
+  // We can not hold the ptr_cs_ while accessing VoE functions. It might cause
+  // deadlock in Write.
+  if (voe_file_interface_) {
+    switch (audio_source_) {
+      case MICROPHONE:
+        error = voe_file_interface_->StopRecordingMicrophone();
+        break;
+      case PLAYOUT:
+        error = voe_file_interface_->StopRecordingPlayout(audio_channel_);
+        break;
+      case NO_AUDIO:
+        break;
+      default:
+        assert(!"Unknown audio_source");
+    }
+    if (error != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StopRecording() failed to stop recording "
+                   "audio");
+    }
+  }
+  CriticalSectionScoped lock(*recorder_cs_);
+  if (voe_file_interface_) {
+    voe_file_interface_->Release();
+    voe_file_interface_ = NULL;
+  }
+
+  if (file_recorder_) {
+    if (file_recorder_->IsRecording()) {
+      int error = file_recorder_->StopRecording();
+      if (error) {
+        return -1;
+      }
+    }
+    FileRecorder::DestroyFileRecorder(file_recorder_);
+    file_recorder_ = NULL;
+  }
+  is_first_frame_recorded_ = false;
+  is_out_stream_started_ = false;
+  return 0;
+}
+
+void ViEFileRecorder::SetFrameDelay(int frame_delay) {
+  CriticalSectionScoped lock(*recorder_cs_);
+  frame_delay_ = frame_delay;
+}
+
+bool ViEFileRecorder::RecordingStarted() {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return file_recorder_ && file_recorder_->IsRecording();
+}
+
+bool ViEFileRecorder::FirstFrameRecorded() {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return is_first_frame_recorded_;
+}
+
+bool ViEFileRecorder::IsRecordingFileFormat(const FileFormats file_format) {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return (file_recorder_->RecordingFileFormat() == file_format) ? true : false;
+}
+
+void ViEFileRecorder::RecordVideoFrame(const VideoFrame& video_frame) {
+  CriticalSectionScoped lock(*recorder_cs_);
+
+  if (file_recorder_ && file_recorder_->IsRecording()) {
+    if (!IsRecordingFileFormat(kFileFormatAviFile))
+      return;
+
+    // Compensate for frame delay in order to get audio/video sync when
+    // recording local video.
+    const WebRtc_UWord32 time_stamp = video_frame.TimeStamp();
+    const WebRtc_Word64 render_time_stamp = video_frame.RenderTimeMs();
+    VideoFrame& unconst_video_frame = const_cast<VideoFrame&>(video_frame);
+    unconst_video_frame.SetTimeStamp(time_stamp - 90 * frame_delay_);
+    unconst_video_frame.SetRenderTime(render_time_stamp - frame_delay_);
+
+    file_recorder_->RecordVideoToFile(unconst_video_frame);
+
+    unconst_video_frame.SetRenderTime(render_time_stamp);
+    unconst_video_frame.SetTimeStamp(time_stamp);
+  }
+}
+
+bool ViEFileRecorder::Write(const void* buf, int len) {
+  if (!is_out_stream_started_)
+    return true;
+
+  // Always 10 ms L16 from VoE.
+  if (len % (2 * 80)) {
+    // Not 2 bytes 80 samples.
+    WEBRTC_TRACE(kTraceError, kTraceVideo, audio_channel_,
+                 "Audio length not supported: %d.", len);
+    return true;
+  }
+
+  AudioFrame audio_frame;
+  WebRtc_UWord16 length_in_samples = len / 2;
+  audio_frame.UpdateFrame(audio_channel_, 0,
+                          static_cast<const WebRtc_Word16*>(buf),
+                          length_in_samples, length_in_samples * 100,
+                          AudioFrame::kUndefined,
+                          AudioFrame::kVadUnknown);
+
+  CriticalSectionScoped lock(*recorder_cs_);
+  if (file_recorder_ && file_recorder_->IsRecording()) {
+    TickTime tick_time = TickTime::Now();
+    file_recorder_->RecordAudioToFile(audio_frame, &tick_time);
+  }
+
+  // Always return true to continue recording.
+  return true;
+}
+
+int ViEFileRecorder::Rewind() {
+  // Not supported!
+  return -1;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_file_recorder.h b/trunk/src/video_engine/vie_file_recorder.h
new file mode 100644
index 0000000..1d34ad3
--- /dev/null
+++ b/trunk/src/video_engine/vie_file_recorder.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
+
+#include "modules/utility/interface/file_recorder.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_file.h"
+#include "voice_engine/main/interface/voe_file.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ViEFileRecorder : protected OutStream {
+ public:
+  explicit ViEFileRecorder(int channel_id);
+  ~ViEFileRecorder();
+
+  int StartRecording(const char* file_nameUTF8,
+                     const VideoCodec& codec_inst,
+                     AudioSource audio_source, int audio_channel,
+                     const CodecInst& audio_codec_inst,
+                     VoiceEngine* voe_ptr,
+                     const FileFormats file_format = kFileFormatAviFile);
+  int StopRecording();
+
+  void SetFrameDelay(int frame_delay);
+  bool RecordingStarted();
+
+  // Records incoming decoded video frame to file.
+  void RecordVideoFrame(const VideoFrame& video_frame);
+
+ protected:
+  bool FirstFrameRecorded();
+  bool IsRecordingFileFormat(const FileFormats file_format);
+
+  // Implements OutStream.
+  bool Write(const void* buf, int len);
+  int Rewind();
+
+ private:
+  CriticalSectionWrapper* recorder_cs_;
+
+  FileRecorder* file_recorder_;
+  bool is_first_frame_recorded_;
+  bool is_out_stream_started_;
+  int instance_id_;
+  int frame_delay_;
+  int audio_channel_;
+  AudioSource audio_source_;
+  VoEFile* voe_file_interface_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
diff --git a/trunk/src/video_engine/vie_frame_provider_base.cc b/trunk/src/video_engine/vie_frame_provider_base.cc
new file mode 100644
index 0000000..8b7bf1f
--- /dev/null
+++ b/trunk/src/video_engine/vie_frame_provider_base.cc
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_frame_provider_base.h"
+
+#include <algorithm>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
+    : id_(Id),
+      engine_id_(engine_id),
+      provider_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      frame_delay_(0) {
+}
+
+ViEFrameProviderBase::~ViEFrameProviderBase() {
+  if (frame_callbacks_.size() > 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "FrameCallbacks still exist when Provider deleted %d",
+                 frame_callbacks_.size());
+  }
+
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    (*it)->ProviderDestroyed(id_);
+  }
+  frame_callbacks_.clear();
+}
+
+int ViEFrameProviderBase::Id() {
+  return id_;
+}
+
+void ViEFrameProviderBase::DeliverFrame(
+    VideoFrame& video_frame,
+    int num_csrcs,
+    const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+#ifdef DEBUG_
+  const TickTime start_process_time = TickTime::Now();
+#endif
+  CriticalSectionScoped cs(provider_cs_.get());
+
+  // Deliver the frame to all registered callbacks.
+  if (frame_callbacks_.size() > 0) {
+    if (frame_callbacks_.size() == 1) {
+      // We don't have to copy the frame.
+      frame_callbacks_.front()->DeliverFrame(id_, video_frame, num_csrcs, CSRC);
+    } else {
+      // Make a copy of the frame for all callbacks.callback
+      for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+           it != frame_callbacks_.end(); ++it) {
+        if (!extra_frame_.get()) {
+          extra_frame_.reset(new VideoFrame());
+        }
+        extra_frame_->CopyFrame(video_frame);
+        (*it)->DeliverFrame(id_, *(extra_frame_.get()), num_csrcs, CSRC);
+      }
+    }
+  }
+#ifdef DEBUG_
+  const int process_time =
+      static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
+  if (process_time > 25) {
+    // Warn if the delivery time is too long.
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s Too long time: %ums", __FUNCTION__, process_time);
+  }
+#endif
+}
+
+void ViEFrameProviderBase::SetFrameDelay(int frame_delay) {
+  CriticalSectionScoped cs(provider_cs_.get());
+  frame_delay_ = frame_delay;
+
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    (*it)->DelayChanged(id_, frame_delay);
+  }
+}
+
+int ViEFrameProviderBase::FrameDelay() {
+  return frame_delay_;
+}
+
+int ViEFrameProviderBase::GetBestFormat(int& best_width,
+                                        int& best_height,
+                                        int& best_frame_rate) {
+  int largest_width = 0;
+  int largest_height = 0;
+  int highest_frame_rate = 0;
+
+  CriticalSectionScoped cs(provider_cs_.get());
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    int prefered_width = 0;
+    int prefered_height = 0;
+    int prefered_frame_rate = 0;
+    if ((*it)->GetPreferedFrameSettings(prefered_width, prefered_height,
+                                        prefered_frame_rate) == 0) {
+      if (prefered_width > largest_width) {
+        largest_width = prefered_width;
+      }
+      if (prefered_height > largest_height) {
+        largest_height = prefered_height;
+      }
+      if (prefered_frame_rate > highest_frame_rate) {
+        highest_frame_rate = prefered_frame_rate;
+      }
+    }
+  }
+  best_width = largest_width;
+  best_height = largest_height;
+  best_frame_rate = highest_frame_rate;
+  return 0;
+}
+
+int ViEFrameProviderBase::RegisterFrameCallback(
+    int observer_id, ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
+               __FUNCTION__, callback_object);
+  {
+    CriticalSectionScoped cs(provider_cs_.get());
+    if (std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
+                  callback_object) != frame_callbacks_.end()) {
+      // This object is already registered.
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                   "%s 0x%p already registered", __FUNCTION__,
+                   callback_object);
+      assert("!frameObserver already registered");
+      return -1;
+    }
+    frame_callbacks_.push_back(callback_object);
+  }
+  // Report current capture delay.
+  callback_object->DelayChanged(id_, frame_delay_);
+
+  // Notify implementer of this class that the callback list have changed.
+  FrameCallbackChanged();
+  return 0;
+}
+
+int ViEFrameProviderBase::DeregisterFrameCallback(
+    const ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
+               __FUNCTION__, callback_object);
+  CriticalSectionScoped cs(provider_cs_.get());
+
+  FrameCallbacks::iterator it = std::find(frame_callbacks_.begin(),
+                                          frame_callbacks_.end(),
+                                          callback_object);
+  if (it == frame_callbacks_.end()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s 0x%p not found", __FUNCTION__, callback_object);
+    return -1;
+  }
+  frame_callbacks_.erase(it);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
+               "%s 0x%p deregistered", __FUNCTION__, callback_object);
+
+  // Notify implementer of this class that the callback list have changed.
+  FrameCallbackChanged();
+  return 0;
+}
+
+bool ViEFrameProviderBase::IsFrameCallbackRegistered(
+    const ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
+               "%s(0x%p)", __FUNCTION__, callback_object);
+
+  CriticalSectionScoped cs(provider_cs_.get());
+  return std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
+                   callback_object) != frame_callbacks_.end();
+}
+
+int ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() {
+  CriticalSectionScoped cs(provider_cs_.get());
+  return frame_callbacks_.size();
+}
+
+}  // namespac webrtc
diff --git a/trunk/src/video_engine/vie_frame_provider_base.h b/trunk/src/video_engine/vie_frame_provider_base.h
new file mode 100644
index 0000000..466e12e
--- /dev/null
+++ b/trunk/src/video_engine/vie_frame_provider_base.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
+
+#include <vector>
+
+#include "common_types.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class VideoEncoder;
+class VideoFrame;
+
+// ViEFrameCallback shall be implemented by all classes receiving frames from a
+// frame provider.
+class ViEFrameCallback {
+ public:
+  virtual void DeliverFrame(int id,
+                            VideoFrame& video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
+
+  // The capture delay has changed from the provider. |frame_delay| is given in
+  // ms.
+  virtual void DelayChanged(int id, int frame_delay) = 0;
+
+  // Get the width, height and frame rate preferred by this observer.
+  virtual int GetPreferedFrameSettings(int& width,
+                                       int& height,
+                                       int& frame_rate) = 0;
+
+  // ProviderDestroyed is called when the frame is about to be destroyed. There
+  // must not be any more calls to the frame provider after this.
+  virtual void ProviderDestroyed(int id) = 0;
+
+  virtual ~ViEFrameCallback() {}
+};
+
+// ViEFrameProviderBase is a base class that will deliver frames to all
+// registered ViEFrameCallbacks.
+class ViEFrameProviderBase {
+ public:
+  ViEFrameProviderBase(int Id, int engine_id);
+  virtual ~ViEFrameProviderBase();
+
+  // Returns the frame provider id.
+  int Id();
+
+  // Register frame callbacks, i.e. a receiver of the captured frame.
+  virtual int RegisterFrameCallback(int observer_id,
+                                    ViEFrameCallback* callback_object);
+
+  virtual int DeregisterFrameCallback(const ViEFrameCallback* callback_object);
+
+  virtual bool IsFrameCallbackRegistered(
+      const ViEFrameCallback* callback_object);
+
+  int NumberOfRegisteredFrameCallbacks();
+
+  // FrameCallbackChanged
+  // Inherited classes should check for new frame_settings and reconfigure
+  // output if possible.
+  virtual int FrameCallbackChanged() = 0;
+
+ protected:
+  void DeliverFrame(VideoFrame& video_frame,
+                    int num_csrcs = 0,
+                    const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  void SetFrameDelay(int frame_delay);
+  int FrameDelay();
+  int GetBestFormat(int& best_width,
+                    int& best_height,
+                    int& best_frame_rate);
+
+  int id_;
+  int engine_id_;
+
+  // Frame callbacks.
+  typedef std::vector<ViEFrameCallback*> FrameCallbacks;
+  FrameCallbacks frame_callbacks_;
+  scoped_ptr<CriticalSectionWrapper> provider_cs_;
+
+ private:
+  scoped_ptr<VideoFrame> extra_frame_;
+  int frame_delay_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/trunk/src/video_engine/vie_image_process_impl.cc b/trunk/src/video_engine/vie_image_process_impl.cc
new file mode 100644
index 0000000..d9dfcd4
--- /dev/null
+++ b/trunk/src/video_engine/vie_image_process_impl.cc
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_image_process_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEImageProcess* ViEImageProcess::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEImageProcessImpl* vie_image_process_impl = vie_impl;
+  // Increase ref count.
+  (*vie_image_process_impl)++;
+  return vie_image_process_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEImageProcessImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcess::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEImageProcess release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcess reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEImageProcessImpl::ViEImageProcessImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcessImpl::ViEImageProcessImpl() Ctor");
+}
+
+ViEImageProcessImpl::~ViEImageProcessImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcessImpl::~ViEImageProcessImpl() Dtor");
+}
+
+int ViEImageProcessImpl::RegisterCaptureEffectFilter(
+  const int capture_id,
+  ViEEffectFilter& capture_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+  if (vie_capture->RegisterEffectFilter(&capture_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterCaptureEffectFilter(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+  if (vie_capture->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::RegisterSendEffectFilter(
+    const int video_channel,
+    ViEEffectFilter& send_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (vie_encoder == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->RegisterEffectFilter(&send_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterSendEffectFilter(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (vie_encoder == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::RegisterRenderEffectFilter(
+  const int video_channel,
+  ViEEffectFilter& render_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterEffectFilter(&render_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterRenderEffectFilter(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableDeflickering(const int capture_id,
+                                            const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
+               enable);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_capture->EnableDeflickering(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableDenoising(const int capture_id,
+                                         const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
+               enable);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+
+  if (vie_capture->EnableDenoising(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableColorEnhancement(const int video_channel,
+                                                const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableColorEnhancement(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_image_process_impl.h b/trunk/src/video_engine/vie_image_process_impl.h
new file mode 100644
index 0000000..169c9e7
--- /dev/null
+++ b/trunk/src/video_engine/vie_image_process_impl.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
+
+#include "typedefs.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEImageProcessImpl
+    : public ViEImageProcess,
+      public ViERefCount {
+ public:
+  // Implements ViEImageProcess.
+  virtual int Release();
+  virtual int RegisterCaptureEffectFilter(const int capture_id,
+                                          ViEEffectFilter& capture_filter);
+  virtual int DeregisterCaptureEffectFilter(const int capture_id);
+  virtual int RegisterSendEffectFilter(const int video_channel,
+                                       ViEEffectFilter& send_filter);
+  virtual int DeregisterSendEffectFilter(const int video_channel);
+  virtual int RegisterRenderEffectFilter(const int video_channel,
+                                         ViEEffectFilter& render_filter);
+  virtual int DeregisterRenderEffectFilter(const int video_channel);
+  virtual int EnableDeflickering(const int capture_id, const bool enable);
+  virtual int EnableDenoising(const int capture_id, const bool enable);
+  virtual int EnableColorEnhancement(const int video_channel,
+                                     const bool enable);
+
+ protected:
+  ViEImageProcessImpl(ViESharedData* shared_data);
+  virtual ~ViEImageProcessImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
diff --git a/trunk/src/video_engine/vie_impl.cc b/trunk/src/video_engine/vie_impl.cc
new file mode 100644
index 0000000..d8a421b
--- /dev/null
+++ b/trunk/src/video_engine/vie_impl.cc
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_impl.h"
+
+#if (defined(WIN32_) || defined(WIN64_))
+#include <Windows.h>  // For LoadLibrary.
+#include <tchar.h>    // For T_.
+#endif
+
+#include "system_wrappers/interface/trace.h"
+
+#ifdef WEBRTC_ANDROID
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_render/main/interface/video_render.h"
+#endif
+
+// Global counter to get an id for each new ViE instance.
+static WebRtc_Word32 g_vie_active_instance_counter = 0;
+
+namespace webrtc {
+
+// extern "C" ensures that GetProcAddress() can find the function address.
+extern "C" {
+  VideoEngine* GetVideoEngine() {
+    VideoEngineImpl* self = new VideoEngineImpl();
+    if (!self) {
+      return NULL;
+    }
+    g_vie_active_instance_counter++;
+    VideoEngine* vie = reinterpret_cast<VideoEngine*>(self);
+    return vie;
+  }
+}
+
+VideoEngine* VideoEngine::Create() {
+#if (defined(WIN32_) || defined(WIN64_))
+  // Load a debug dll, if there is one.
+  HMODULE hmod_ = LoadLibrary(TEXT("VideoEngineTestingDLL.dll"));
+  if (hmod_) {
+    typedef VideoEngine* (*PFNGetVideoEngineLib)(void);
+    PFNGetVideoEngineLib pfn =
+      (PFNGetVideoEngineLib)GetProcAddress(hmod_, "GetVideoEngine");
+    if (pfn) {
+      VideoEngine* self = pfn();
+      return self;
+    } else {
+      assert(!"Failed to open test dll VideoEngineTestingDLL.dll");
+      return NULL;
+    }
+  }
+#endif
+
+  return GetVideoEngine();
+}
+
+bool VideoEngine::Delete(VideoEngine*& video_engine) {
+  if (!video_engine) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "VideoEngine::Delete - No argument");
+    return false;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "VideoEngine::Delete(vie = 0x%p)", video_engine);
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+
+  // Check all reference counters.
+  ViEBaseImpl* vie_base = vie_impl;
+  if (vie_base->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEBase ref count: %d", vie_base->GetCount());
+    return false;
+  }
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+  ViECaptureImpl* vie_capture = vie_impl;
+  if (vie_capture->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViECapture ref count: %d", vie_capture->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+  ViECodecImpl* vie_codec = vie_impl;
+  if (vie_codec->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViECodec ref count: %d", vie_codec->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+  ViEEncryptionImpl* vie_encryption = vie_impl;
+  if (vie_encryption->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEEncryption ref count: %d", vie_encryption->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+  ViEExternalCodecImpl* vie_external_codec = vie_impl;
+  if (vie_external_codec->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEEncryption ref count: %d", vie_encryption->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+  ViEFileImpl* vie_file = vie_impl;
+  if (vie_file->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEFile ref count: %d", vie_file->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+  ViEImageProcessImpl* vie_image_process = vie_impl;
+  if (vie_image_process->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEImageProcess ref count: %d",
+                 vie_image_process->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+  ViENetworkImpl* vie_network = vie_impl;
+  if (vie_network->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViENetwork ref count: %d", vie_network->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+  ViERenderImpl* vie_render = vie_impl;
+  if (vie_render->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViERender ref count: %d", vie_render->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+  ViERTP_RTCPImpl* vie_rtp_rtcp = vie_impl;
+  if (vie_rtp_rtcp->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViERTP_RTCP ref count: %d", vie_rtp_rtcp->GetCount());
+    return false;
+  }
+#endif
+
+  delete vie_impl;
+  vie_impl = NULL;
+  video_engine = NULL;
+
+  // Decrease the number of instances.
+  g_vie_active_instance_counter--;
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, g_vie_active_instance_counter,
+               "%s: instance deleted. Remaining instances: %d", __FUNCTION__,
+               g_vie_active_instance_counter);
+  return true;
+}
+
+int VideoEngine::SetTraceFile(const char* file_nameUTF8,
+                              const bool add_file_counter) {
+  if (!file_nameUTF8) {
+    return -1;
+  }
+  if (Trace::SetTraceFile(file_nameUTF8, add_file_counter) == -1) {
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceFileName(file_nameUTF8 = %s, add_file_counter = %d",
+               file_nameUTF8, add_file_counter);
+  return 0;
+}
+
+int VideoEngine::SetTraceFilter(const unsigned int filter) {
+  WebRtc_UWord32 old_filter = 0;
+  Trace::LevelFilter(old_filter);
+
+  if (filter == kTraceNone && old_filter != kTraceNone) {
+    // Do the logging before turning it off.
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+                 "SetTraceFilter(filter = 0x%x)", filter);
+  }
+
+  WebRtc_Word32 error = Trace::SetLevelFilter(filter);
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceFilter(filter = 0x%x)", filter);
+  if (error != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "SetTraceFilter error: %d", error);
+    return -1;
+  }
+  return 0;
+}
+
+int VideoEngine::SetTraceCallback(TraceCallback* callback) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceCallback(TraceCallback = 0x%p)", callback);
+  return Trace::SetTraceCallback(callback);
+}
+
+int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetAndroidObjects()");
+
+#ifdef WEBRTC_ANDROID
+  if (VideoCaptureFactory::SetAndroidObjects(javaVM, javaContext) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "Could not set capture module Android objects");
+    return -1;
+  }
+  if (VideoRender::SetAndroidObjects(javaVM) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "Could not set render module Android objects");
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+               "WEBRTC_ANDROID not defined for VideoEngine::SetAndroidObjects");
+  return -1;
+#endif
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_impl.h b/trunk/src/video_engine/vie_impl.h
new file mode 100644
index 0000000..3141868
--- /dev/null
+++ b/trunk/src/video_engine/vie_impl.h
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
+
+#include "engine_configurations.h"
+#include "video_engine/vie_defines.h"
+
+#include "video_engine/vie_base_impl.h"
+
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+#include "video_engine/vie_capture_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+#include "video_engine/vie_codec_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+#include "video_engine/vie_encryption_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+#include "video_engine/vie_file_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+#include "video_engine/vie_image_process_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+#include "video_engine/vie_network_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+#include "video_engine/vie_render_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+#include "video_engine/vie_rtp_rtcp_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+#include "video_engine/vie_external_codec_impl.h"
+#endif
+
+namespace webrtc {
+
+class VideoEngineImpl
+    : public ViEBaseImpl
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+      , public ViECodecImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+      , public ViECaptureImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+      , public ViEEncryptionImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+      , public ViEFileImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+      , public ViEImageProcessImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+      , public ViENetworkImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+      , public ViERenderImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+      , public ViERTP_RTCPImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+      , public ViEExternalCodecImpl
+#endif
+{  // NOLINT
+ public:
+  VideoEngineImpl()
+      :
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+        ViECodecImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+        , ViECaptureImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+        , ViEEncryptionImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+        , ViEFileImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+        , ViEImageProcessImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+        , ViENetworkImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+        , ViERenderImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+        , ViERTP_RTCPImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+        , ViEExternalCodecImpl(ViEBaseImpl::shared_data())
+#endif
+  {}
+  virtual ~VideoEngineImpl() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
diff --git a/trunk/src/video_engine/vie_input_manager.cc b/trunk/src/video_engine/vie_input_manager.cc
new file mode 100644
index 0000000..98c5535
--- /dev/null
+++ b/trunk/src/video_engine/vie_input_manager.cc
@@ -0,0 +1,600 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_input_manager.h"
+
+#include <cassert>
+
+#include "common_types.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_file_player.h"
+
+namespace webrtc {
+
+ViEInputManager::ViEInputManager(const int engine_id)
+    : engine_id_(engine_id),
+      map_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      vie_frame_provider_map_(),
+      capture_device_info_(NULL),
+      module_process_thread_(NULL) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  for (int idx = 0; idx < kViEMaxCaptureDevices; idx++) {
+    free_capture_device_id_[idx] = true;
+  }
+  capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
+      ViEModuleId(engine_id_));
+  for (int idx = 0; idx < kViEMaxFilePlayers; idx++) {
+    free_file_id_[idx] = true;
+  }
+}
+
+ViEInputManager::~ViEInputManager() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+  while (vie_frame_provider_map_.Size() != 0) {
+    MapItem* item = vie_frame_provider_map_.First();
+    assert(item);
+    ViEFrameProviderBase* frame_provider =
+        static_cast<ViEFrameProviderBase*>(item->GetItem());
+    vie_frame_provider_map_.Erase(item);
+    delete frame_provider;
+  }
+
+  if (capture_device_info_) {
+    delete capture_device_info_;
+    capture_device_info_ = NULL;
+  }
+}
+void ViEInputManager::SetModuleProcessThread(
+    ProcessThread& module_process_thread) {
+  assert(!module_process_thread_);
+  module_process_thread_ = &module_process_thread;
+}
+
+int ViEInputManager::NumberOfCaptureDevices() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  assert(capture_device_info_);
+  return capture_device_info_->NumberOfDevices();
+}
+
+int ViEInputManager::GetDeviceName(WebRtc_UWord32 device_number,
+                                   WebRtc_UWord8* device_nameUTF8,
+                                   WebRtc_UWord32 device_name_length,
+                                   WebRtc_UWord8* device_unique_idUTF8,
+                                   WebRtc_UWord32 device_unique_idUTF8Length) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_number: %d)", __FUNCTION__, device_number);
+  assert(capture_device_info_);
+  return capture_device_info_->GetDeviceName(device_number, device_nameUTF8,
+                                             device_name_length,
+                                             device_unique_idUTF8,
+                                             device_unique_idUTF8Length);
+}
+
+int ViEInputManager::NumberOfCaptureCapabilities(
+  const WebRtc_UWord8* device_unique_idUTF8) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  assert(capture_device_info_);
+  return capture_device_info_->NumberOfCapabilities(device_unique_idUTF8);
+}
+
+int ViEInputManager::GetCaptureCapability(
+    const WebRtc_UWord8* device_unique_idUTF8,
+    const WebRtc_UWord32 device_capability_number,
+    CaptureCapability& capability) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_idUTF8: %s, device_capability_number: %d)",
+               __FUNCTION__, device_unique_idUTF8, device_capability_number);
+  assert(capture_device_info_);
+  VideoCaptureCapability module_capability;
+  int result = capture_device_info_->GetCapability(device_unique_idUTF8,
+                                                   device_capability_number,
+                                                   module_capability);
+  if (result != 0)
+    return result;
+
+  // Copy from module type to public type.
+  capability.expectedCaptureDelay = module_capability.expectedCaptureDelay;
+  capability.height = module_capability.height;
+  capability.width = module_capability.width;
+  capability.interlaced = module_capability.interlaced;
+  capability.rawType = module_capability.rawType;
+  capability.codecType = module_capability.codecType;
+  capability.maxFPS = module_capability.maxFPS;
+  return result;
+}
+
+int ViEInputManager::GetOrientation(const WebRtc_UWord8* device_unique_idUTF8,
+                                    RotateCapturedFrame& orientation) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_idUTF8: %s,)", __FUNCTION__,
+               device_unique_idUTF8);
+  assert(capture_device_info_);
+  VideoCaptureRotation module_orientation;
+  int result = capture_device_info_->GetOrientation(device_unique_idUTF8,
+                                                    module_orientation);
+  // Copy from module type to public type.
+  switch (module_orientation) {
+    case kCameraRotate0:
+      orientation = RotateCapturedFrame_0;
+      break;
+    case kCameraRotate90:
+      orientation = RotateCapturedFrame_90;
+      break;
+    case kCameraRotate180:
+      orientation = RotateCapturedFrame_180;
+      break;
+    case kCameraRotate270:
+      orientation = RotateCapturedFrame_270;
+      break;
+  }
+  return result;
+}
+
+int ViEInputManager::DisplayCaptureSettingsDialogBox(
+    const WebRtc_UWord8* device_unique_idUTF8,
+    const WebRtc_UWord8* dialog_titleUTF8,
+    void* parent_window,
+    WebRtc_UWord32 positionX,
+    WebRtc_UWord32 positionY) {
+  assert(capture_device_info_);
+  return capture_device_info_->DisplayCaptureSettingsDialogBox(
+           device_unique_idUTF8, dialog_titleUTF8, parent_window, positionX,
+           positionY);
+}
+
+int ViEInputManager::CreateCaptureDevice(
+    const WebRtc_UWord8* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length,
+    int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s)", __FUNCTION__, device_unique_idUTF8);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  // Make sure the device is not already allocated.
+  for (MapItem* item = vie_frame_provider_map_.First(); item != NULL;
+       item = vie_frame_provider_map_.Next(item)) {
+    // Make sure this is a capture device.
+    if (item->GetId() >= kViECaptureIdBase &&
+        item->GetId() <= kViECaptureIdMax) {
+      ViECapturer* vie_capture = static_cast<ViECapturer*>(item->GetItem());
+      assert(vie_capture);
+      // TODO(mflodman) Can we change input to avoid this cast?
+      const char* device_name =
+          reinterpret_cast<const char*>(vie_capture->CurrentDeviceName());
+      if (strncmp(device_name,
+                  reinterpret_cast<const char*>(device_unique_idUTF8),
+                  strlen(device_name)) == 0) {
+        return kViECaptureDeviceAlreadyAllocated;
+      }
+    }
+  }
+
+  // Make sure the device name is valid.
+  bool found_device = false;
+  for (WebRtc_UWord32 device_index = 0;
+       device_index < capture_device_info_->NumberOfDevices(); ++device_index) {
+    if (device_unique_idUTF8Length > kVideoCaptureUniqueNameLength) {
+      // User's string length is longer than the max.
+      return -1;
+    }
+
+    WebRtc_UWord8 found_name[kVideoCaptureDeviceNameLength] = "";
+    WebRtc_UWord8 found_unique_name[kVideoCaptureUniqueNameLength] = "";
+    capture_device_info_->GetDeviceName(device_index, found_name,
+                                        kVideoCaptureDeviceNameLength,
+                                        found_unique_name,
+                                        kVideoCaptureUniqueNameLength);
+
+    // TODO(mflodman) Can we change input to avoid this cast?
+    const char* cast_id = reinterpret_cast<const char*>(device_unique_idUTF8);
+    if (strncmp(cast_id, reinterpret_cast<const char*>(found_unique_name),
+                strlen(cast_id)) == 0) {
+      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s:%d Capture device was found by unique ID: %s. Returning",
+                   __FUNCTION__, __LINE__, device_unique_idUTF8);
+      found_device = true;
+      break;
+    }
+  }
+  if (!found_device) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s:%d Capture device NOT found by unique ID: %s. Returning",
+                 __FUNCTION__, __LINE__, device_unique_idUTF8);
+    return kViECaptureDeviceDoesNotExist;
+  }
+
+  int newcapture_id = 0;
+  if (GetFreeCaptureId(newcapture_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, device_unique_idUTF8,
+      device_unique_idUTF8Length, *module_process_thread_);
+  if (!vie_capture) {
+  ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not create capture module for %s", __FUNCTION__,
+                 device_unique_idUTF8);
+    return kViECaptureDeviceUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+  ReturnCaptureId(newcapture_id);
+  WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s: Could not insert capture module for %s", __FUNCTION__,
+               device_unique_idUTF8);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s, capture_id: %d)", __FUNCTION__,
+               device_unique_idUTF8, capture_id);
+  return 0;
+}
+
+int ViEInputManager::CreateCaptureDevice(VideoCaptureModule& capture_module,
+                                         int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  int newcapture_id = 0;
+  if (!GetFreeCaptureId(newcapture_id)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, capture_module, *module_process_thread_);
+  if (!vie_capture) {
+  ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could attach capture module.", __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert capture module", __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s, capture_id: %d", __FUNCTION__, capture_id);
+  return 0;
+}
+
+int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  ViECapturer* vie_capture = NULL;
+  {
+    // We need exclusive access to the object to delete it.
+    // Take this write lock first since the read lock is taken before map_cs_.
+    ViEManagerWriteScoped wl(*this);
+    CriticalSectionScoped cs(map_cs_.get());
+
+    vie_capture = ViECapturePtr(capture_id);
+    if (!vie_capture) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s(capture_id: %d) - No such capture device id",
+                   __FUNCTION__, capture_id);
+      return -1;
+    }
+    WebRtc_UWord32 num_callbacks =
+        vie_capture->NumberOfRegisteredFrameCallbacks();
+    if (num_callbacks > 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_), "%s(capture_id: %d) - %u registered "
+                   "callbacks when destroying capture device",
+                   __FUNCTION__, capture_id, num_callbacks);
+    }
+    vie_frame_provider_map_.Erase(capture_id);
+    ReturnCaptureId(capture_id);
+    // Leave cs before deleting the capture object. This is because deleting the
+    // object might cause deletions of renderers so we prefer to not have a lock
+    // at that time.
+  }
+  delete vie_capture;
+  return 0;
+}
+
+int ViEInputManager::CreateExternalCaptureDevice(
+    ViEExternalCapture*& external_capture,
+    int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  int newcapture_id = 0;
+  if (GetFreeCaptureId(newcapture_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, NULL, 0, *module_process_thread_);
+  if (!vie_capture) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not create capture module for external capture.",
+                 __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert capture module for external capture.",
+                 __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  external_capture = vie_capture;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s, capture_id: %d)", __FUNCTION__, capture_id);
+  return 0;
+}
+
+int ViEInputManager::CreateFilePlayer(const WebRtc_Word8* file_nameUTF8,
+                                      const bool loop,
+                                      const webrtc::FileFormats file_format,
+                                      VoiceEngine* voe_ptr, int& file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s)", __FUNCTION__, file_nameUTF8);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  int new_file_id = 0;
+  if (GetFreeFileId(new_file_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of file players already in use",
+                 __FUNCTION__);
+    return kViEFileMaxNoOfFilesOpened;
+  }
+
+  ViEFilePlayer* vie_file_player = ViEFilePlayer::CreateViEFilePlayer(
+      new_file_id, engine_id_, file_nameUTF8, loop, file_format, *this,
+      voe_ptr);
+  if (!vie_file_player) {
+    ReturnFileId(new_file_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not open file %s for playback", __FUNCTION__,
+                 file_nameUTF8);
+    return kViEFileUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(new_file_id, vie_file_player) != 0) {
+    ReturnCaptureId(new_file_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert file player for %s", __FUNCTION__,
+                 file_nameUTF8);
+    delete vie_file_player;
+    return kViEFileUnknownError;
+  }
+
+  file_id = new_file_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(filename: %s, file_id: %d)", __FUNCTION__, file_nameUTF8,
+               new_file_id);
+  return 0;
+}
+
+int ViEInputManager::DestroyFilePlayer(int file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEFilePlayer* vie_file_player = NULL;
+  {
+    // We need exclusive access to the object to delete it.
+    // Take this write lock first since the read lock is taken before map_cs_.
+    ViEManagerWriteScoped wl(*this);
+
+    CriticalSectionScoped cs(map_cs_.get());
+    vie_file_player = ViEFilePlayerPtr(file_id);
+    if (!vie_file_player) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s(file_id: %d) - No such file player", __FUNCTION__,
+                   file_id);
+      return -1;
+    }
+    int num_callbacks = vie_file_player->NumberOfRegisteredFrameCallbacks();
+    if (num_callbacks > 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_), "%s(file_id: %d) - %u registered "
+                   "callbacks when destroying file player", __FUNCTION__,
+                   file_id, num_callbacks);
+    }
+    vie_frame_provider_map_.Erase(file_id);
+    ReturnFileId(file_id);
+    // Leave cs before deleting the file object. This is because deleting the
+    // object might cause deletions of renderers so we prefer to not have a lock
+    // at that time.
+  }
+  delete vie_file_player;
+  return 0;
+}
+
+bool ViEInputManager::GetFreeCaptureId(int& freecapture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  for (int id = 0; id < kViEMaxCaptureDevices; id++) {
+    if (free_capture_device_id_[id]) {
+      // We found a free capture device id.
+      free_capture_device_id_[id] = false;
+      freecapture_id = id + kViECaptureIdBase;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: new id: %d", __FUNCTION__, freecapture_id);
+      return true;
+    }
+  }
+  return false;
+}
+
+void ViEInputManager::ReturnCaptureId(int capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(%d)", __FUNCTION__, capture_id);
+  CriticalSectionScoped cs(map_cs_.get());
+  if (capture_id >= kViECaptureIdBase &&
+      capture_id < kViEMaxCaptureDevices + kViECaptureIdBase) {
+    free_capture_device_id_[capture_id - kViECaptureIdBase] = true;
+  }
+  return;
+}
+
+bool ViEInputManager::GetFreeFileId(int& free_file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+
+  for (int id = 0; id < kViEMaxFilePlayers; id++) {
+    if (free_file_id_[id]) {
+      // We found a free capture device id.
+      free_file_id_[id] = false;
+      free_file_id = id + kViEFileIdBase;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: new id: %d", __FUNCTION__, free_file_id);
+      return true;
+    }
+  }
+  return false;
+}
+
+void ViEInputManager::ReturnFileId(int file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(%d)", __FUNCTION__, file_id);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  if (file_id >= kViEFileIdBase &&
+      file_id < kViEMaxFilePlayers + kViEFileIdBase) {
+    free_file_id_[file_id - kViEFileIdBase] = true;
+  }
+  return;
+}
+
+ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(
+    const ViEFrameCallback* capture_observer) const {
+  assert(capture_observer);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  for (MapItem* provider_item = vie_frame_provider_map_.First(); provider_item
+  != NULL; provider_item = vie_frame_provider_map_.Next(provider_item)) {
+    ViEFrameProviderBase* vie_frame_provider =
+        static_cast<ViEFrameProviderBase*>(provider_item->GetItem());
+    assert(vie_frame_provider != NULL);
+
+    if (vie_frame_provider->IsFrameCallbackRegistered(capture_observer)) {
+      // We found it.
+      return vie_frame_provider;
+    }
+  }
+  // No capture device set for this channel.
+  return NULL;
+}
+
+ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(int provider_id) const {
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(provider_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViEFrameProviderBase* vie_frame_provider =
+      static_cast<ViEFrameProviderBase*>(map_item->GetItem());
+  return vie_frame_provider;
+}
+
+ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
+  if (!(capture_id >= kViECaptureIdBase &&
+        capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices))
+    return NULL;
+
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(capture_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViECapturer* vie_capture = static_cast<ViECapturer*>(map_item->GetItem());
+  return vie_capture;
+}
+
+void ViEInputManager::GetViECaptures(MapWrapper& vie_capture_map) {
+  CriticalSectionScoped cs(map_cs_.get());
+
+  if (vie_frame_provider_map_.Size() == 0) {
+    return;
+  }
+  // Add all items to the map.
+  for (MapItem* item = vie_frame_provider_map_.First(); item != NULL;
+       item = vie_frame_provider_map_.Next(item)) {
+    vie_capture_map.Insert(item->GetId(), item->GetItem());
+  }
+  return;
+}
+
+ViEFilePlayer* ViEInputManager::ViEFilePlayerPtr(int file_id) const {
+  if (file_id < kViEFileIdBase || file_id > kViEFileIdMax) {
+    return NULL;
+  }
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(file_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViEFilePlayer* vie_file_player =
+      static_cast<ViEFilePlayer*>(map_item->GetItem());
+  return vie_file_player;
+}
+
+ViEInputManagerScoped::ViEInputManagerScoped(
+    const ViEInputManager& vie_input_manager)
+    : ViEManagerScopedBase(vie_input_manager) {
+}
+
+ViECapturer* ViEInputManagerScoped::Capture(int capture_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViECapturePtr(
+      capture_id);
+}
+
+ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
+    const ViEFrameCallback* capture_observer) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFrameProvider(
+      capture_observer);
+}
+
+ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
+    int provider_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFrameProvider(
+      provider_id);
+}
+
+ViEFilePlayer* ViEInputManagerScoped::FilePlayer(int file_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFilePlayerPtr(
+      file_id);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_input_manager.h b/trunk/src/video_engine/vie_input_manager.h
new file mode 100644
index 0000000..c70a2ee
--- /dev/null
+++ b/trunk/src/video_engine/vie_input_manager.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
+
+#include "modules/video_capture/main/interface/video_capture.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RWLockWrapper;
+class ViECapturer;
+class ViEExternalCapture;
+class ViEFilePlayer;
+class VoiceEngine;
+
+class ViEInputManager : private ViEManagerBase {
+  friend class ViEInputManagerScoped;
+ public:
+  explicit ViEInputManager(int engine_id);
+  ~ViEInputManager();
+
+  void SetModuleProcessThread(ProcessThread& module_process_thread);
+
+  // Returns number of capture devices.
+  int NumberOfCaptureDevices();
+
+  // Gets name and id for a capture device.
+  int GetDeviceName(WebRtc_UWord32 device_number,
+                    WebRtc_UWord8* device_nameUTF8,
+                    WebRtc_UWord32 device_name_length,
+                    WebRtc_UWord8* device_unique_idUTF8,
+                    WebRtc_UWord32 device_unique_idUTF8Length);
+
+  // Returns the number of capture capabilities for a specified device.
+  int NumberOfCaptureCapabilities(const WebRtc_UWord8* device_unique_idUTF8);
+
+  // Gets a specific capability for a capture device.
+  int GetCaptureCapability(const WebRtc_UWord8* device_unique_idUTF8,
+                           const WebRtc_UWord32 device_capability_number,
+                           CaptureCapability& capability);
+
+  // Show OS specific Capture settings.
+  int DisplayCaptureSettingsDialogBox(const WebRtc_UWord8* device_unique_idUTF8,
+                                      const WebRtc_UWord8* dialog_titleUTF8,
+                                      void* parent_window,
+                                      WebRtc_UWord32 positionX,
+                                      WebRtc_UWord32 positionY);
+  int GetOrientation(const WebRtc_UWord8* device_unique_idUTF8,
+                     RotateCapturedFrame& orientation);
+
+  // Creates a capture module for the specified capture device and assigns
+  // a capture device id for the device.
+  // Return zero on success, ViEError on failure.
+  int CreateCaptureDevice(const WebRtc_UWord8* device_unique_idUTF8,
+                          const WebRtc_UWord32 device_unique_idUTF8Length,
+                          int& capture_id);
+  int CreateCaptureDevice(VideoCaptureModule& capture_module,
+                          int& capture_id);
+  int CreateExternalCaptureDevice(ViEExternalCapture*& external_capture,
+                                  int& capture_id);
+  int DestroyCaptureDevice(int capture_id);
+
+  int CreateFilePlayer(const WebRtc_Word8* file_nameUTF8, const bool loop,
+                       const FileFormats file_format,
+                       VoiceEngine* voe_ptr,
+                       int& file_id);
+  int DestroyFilePlayer(int file_id);
+
+ private:
+  // Gets and allocates a free capture device id. Assumed protected by caller.
+  bool GetFreeCaptureId(int& freecapture_id);
+
+  // Frees a capture id assigned in GetFreeCaptureId.
+  void ReturnCaptureId(int capture_id);
+
+  // Gets and allocates a free file id. Assumed protected by caller.
+  bool GetFreeFileId(int& free_file_id);
+
+  // Frees a file id assigned in GetFreeFileId.
+  void ReturnFileId(int file_id);
+
+  // Gets the ViEFrameProvider for this capture observer.
+  ViEFrameProviderBase* ViEFrameProvider(
+      const ViEFrameCallback* capture_observer) const;
+
+  // Gets the ViEFrameProvider for this capture observer.
+  ViEFrameProviderBase* ViEFrameProvider(int provider_id) const;
+
+  // Gets the ViECapturer for the capture device id.
+  ViECapturer* ViECapturePtr(int capture_id) const;
+
+  // Gets the the entire map with GetViECaptures.
+  void GetViECaptures(MapWrapper& vie_capture_map);
+
+  // Gets the ViEFilePlayer for this file_id.
+  ViEFilePlayer* ViEFilePlayerPtr(int file_id) const;
+
+  int engine_id_;
+  scoped_ptr<CriticalSectionWrapper> map_cs_;
+  MapWrapper vie_frame_provider_map_;
+
+  // Capture devices.
+  VideoCaptureModule::DeviceInfo* capture_device_info_;
+  int free_capture_device_id_[kViEMaxCaptureDevices];
+
+  // File Players.
+  int free_file_id_[kViEMaxFilePlayers];
+
+  ProcessThread* module_process_thread_;
+};
+
+// Provides protected access to ViEInputManater.
+class ViEInputManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViEInputManagerScoped(const ViEInputManager& vie_input_manager);
+
+  ViECapturer* Capture(int capture_id) const;
+  ViEFilePlayer* FilePlayer(int file_id) const;
+  ViEFrameProviderBase* FrameProvider(int provider_id) const;
+  ViEFrameProviderBase* FrameProvider(const ViEFrameCallback*
+                                      capture_observer) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
diff --git a/trunk/src/video_engine/vie_manager_base.cc b/trunk/src/video_engine/vie_manager_base.cc
new file mode 100644
index 0000000..7e6695c
--- /dev/null
+++ b/trunk/src/video_engine/vie_manager_base.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "assert.h"
+
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+ViEManagerBase::ViEManagerBase()
+    : instance_rwlock_(*RWLockWrapper::CreateRWLock()) {
+}
+
+ViEManagerBase::~ViEManagerBase() {
+  delete &instance_rwlock_;
+}
+
+void ViEManagerBase::ReadLockManager() const {
+  instance_rwlock_.AcquireLockShared();
+}
+
+void ViEManagerBase::ReleaseLockManager() const {
+  instance_rwlock_.ReleaseLockShared();
+}
+
+void ViEManagerBase::WriteLockManager() {
+  instance_rwlock_.AcquireLockExclusive();
+}
+
+void ViEManagerBase::ReleaseWriteLockManager() {
+  instance_rwlock_.ReleaseLockExclusive();
+}
+
+ViEManagerScopedBase::ViEManagerScopedBase(const ViEManagerBase& ViEManagerBase)
+    : vie_manager_(&ViEManagerBase),
+      ref_count_(0) {
+  vie_manager_->ReadLockManager();
+}
+
+ViEManagerScopedBase::~ViEManagerScopedBase() {
+  assert(ref_count_ == 0);
+  vie_manager_->ReleaseLockManager();
+}
+
+ViEManagerWriteScoped::ViEManagerWriteScoped(ViEManagerBase& vie_manager)
+    : vie_manager_(&vie_manager) {
+  vie_manager_->WriteLockManager();
+}
+
+ViEManagerWriteScoped::~ViEManagerWriteScoped() {
+  vie_manager_->ReleaseWriteLockManager();
+}
+
+ViEManagedItemScopedBase::ViEManagedItemScopedBase(
+    ViEManagerScopedBase& vie_scoped_manager)
+    : vie_scoped_manager_(vie_scoped_manager) {
+  vie_scoped_manager_.ref_count_++;
+}
+
+ViEManagedItemScopedBase::~ViEManagedItemScopedBase() {
+  vie_scoped_manager_.ref_count_--;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_manager_base.h b/trunk/src/video_engine/vie_manager_base.h
new file mode 100644
index 0000000..44cf40f
--- /dev/null
+++ b/trunk/src/video_engine/vie_manager_base.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
+
+namespace webrtc {
+
+class RWLockWrapper;
+
+class ViEManagerBase {
+  friend class ViEManagerScopedBase;
+  friend class ViEManagedItemScopedBase;
+  friend class ViEManagerWriteScoped;
+ public:
+  ViEManagerBase();
+  ~ViEManagerBase();
+
+ private:
+  // Exclusive lock, used by ViEManagerWriteScoped
+  void WriteLockManager();
+
+  // Releases exclusive lock, used by ViEManagerWriteScoped.
+  void ReleaseWriteLockManager();
+
+  // Increases lock count, used by ViEManagerScopedBase.
+  void ReadLockManager() const;
+
+  // Releases the lock count, used by ViEManagerScopedBase.
+  void ReleaseLockManager() const;
+
+  RWLockWrapper& instance_rwlock_;
+};
+
+class ViEManagerWriteScoped {
+ public:
+  explicit ViEManagerWriteScoped(ViEManagerBase& vie_manager);
+  ~ViEManagerWriteScoped();
+
+ private:
+  ViEManagerBase* vie_manager_;
+};
+
+class ViEManagerScopedBase {
+  friend class ViEManagedItemScopedBase;
+ public:
+  explicit ViEManagerScopedBase(const ViEManagerBase& vie_manager);
+  ~ViEManagerScopedBase();
+
+ protected:
+  const ViEManagerBase* vie_manager_;
+
+ private:
+  int ref_count_;
+};
+
+class ViEManagedItemScopedBase {
+ public:
+  explicit ViEManagedItemScopedBase(ViEManagerScopedBase& vie_scoped_manager);
+  ~ViEManagedItemScopedBase();
+ protected:
+  ViEManagerScopedBase& vie_scoped_manager_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
diff --git a/trunk/src/video_engine/vie_network_impl.cc b/trunk/src/video_engine/vie_network_impl.cc
new file mode 100644
index 0000000..b4bb0e9
--- /dev/null
+++ b/trunk/src/video_engine/vie_network_impl.cc
@@ -0,0 +1,798 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_network_impl.h"
+
+#include <stdio.h>
+#if (defined(WIN32_) || defined(WIN64_))
+#include <qos.h>
+#endif
+
+#include "engine_configurations.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViENetwork* ViENetwork::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViENetworkImpl* vie_networkImpl = vie_impl;
+  // Increase ref count.
+  (*vie_networkImpl)++;
+  return vie_networkImpl;
+#else
+  return NULL;
+#endif
+}
+
+int ViENetworkImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViENetwork::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViENetwork release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViENetwork reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViENetworkImpl::ViENetworkImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViENetworkImpl::ViENetworkImpl() Ctor");
+}
+
+ViENetworkImpl::~ViENetworkImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViENetworkImpl::~ViENetworkImpl() Dtor");
+}
+
+int ViENetworkImpl::SetLocalReceiver(const int video_channel,
+                                     const unsigned short rtp_port,
+                                     const unsigned short rtcp_port,
+                                     const char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, rtp_port: %u, rtcp_port: %u, ip_address: %s)",
+               __FUNCTION__, video_channel, rtp_port, rtcp_port, ip_address);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists.
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->Receiving()) {
+    shared_data_->SetLastError(kViENetworkAlreadyReceiving);
+    return -1;
+  }
+  if (vie_channel->SetLocalReceiver(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetLocalReceiver(const int video_channel,
+                                     unsigned short& rtp_port,
+                                     unsigned short& rtcp_port,
+                                     char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetLocalReceiver(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkLocalReceiverNotSet);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendDestination(const int video_channel,
+                                       const char* ip_address,
+                                       const unsigned short rtp_port,
+                                       const unsigned short rtcp_port,
+                                       const unsigned short source_rtp_port,
+                                       const unsigned short source_rtcp_port) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, ip_address: %s, rtp_port: %u, rtcp_port: %u, "
+               "sourceRtpPort: %u, source_rtcp_port: %u)",
+               __FUNCTION__, video_channel, ip_address, rtp_port, rtcp_port,
+               source_rtp_port, source_rtcp_port);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending.", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetSendDestination(ip_address, rtp_port, rtcp_port,
+                                          source_rtp_port,
+                                          source_rtcp_port) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSendDestination(const int video_channel,
+                                       char* ip_address,
+                                       unsigned short& rtp_port,
+                                       unsigned short& rtcp_port,
+                                       unsigned short& source_rtp_port,
+                                       unsigned short& source_rtcp_port) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSendDestination(ip_address, rtp_port, rtcp_port,
+                                          source_rtp_port,
+                                          source_rtcp_port) != 0) {
+    shared_data_->SetLastError(kViENetworkDestinationNotSet);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::RegisterSendTransport(const int video_channel,
+                                          Transport& transport) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending.", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->RegisterSendTransport(transport) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::DeregisterSendTransport(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->DeregisterSendTransport() != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::ReceivedRTPPacket(const int video_channel, const void* data,
+                                      const int length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
+               video_channel, length);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->ReceivedRTPPacket(data, length);
+}
+
+int ViENetworkImpl::ReceivedRTCPPacket(const int video_channel,
+                                       const void* data, const int length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
+               video_channel, length);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->ReceivedRTCPPacket(data, length);
+}
+
+int ViENetworkImpl::GetSourceInfo(const int video_channel,
+                                  unsigned short& rtp_port,
+                                  unsigned short& rtcp_port, char* ip_address,
+                                  unsigned int ip_address_length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSourceInfo(rtp_port, rtcp_port, ip_address,
+                                 ip_address_length) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetLocalIP(char ip_address[64], bool ipv6) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s( ip_address, ipV6: %d)", __FUNCTION__, ipv6);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  if (!ip_address) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: No argument", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidArgument);
+    return -1;
+  }
+
+  WebRtc_UWord8 num_socket_threads = 1;
+  UdpTransport* socket_transport = UdpTransport::Create(
+      ViEModuleId(shared_data_->instance_id(), -1), num_socket_threads);
+
+  if (!socket_transport) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Could not create socket module", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+
+  char local_ip_address[64];
+  if (ipv6) {
+    char local_ip[16];
+    if (socket_transport->LocalHostAddressIPV6(local_ip) != 0) {
+      UdpTransport::Destroy(socket_transport);
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Could not get local IP", __FUNCTION__);
+      shared_data_->SetLastError(kViENetworkUnknownError);
+      return -1;
+    }
+    // Convert 128-bit address to character string (a:b:c:d:e:f:g:h).
+    sprintf(local_ip_address,
+            "%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:"
+            "%.2x%.2x",
+            local_ip[0], local_ip[1], local_ip[2], local_ip[3], local_ip[4],
+            local_ip[5], local_ip[6], local_ip[7], local_ip[8], local_ip[9],
+            local_ip[10], local_ip[11], local_ip[12], local_ip[13],
+            local_ip[14], local_ip[15]);
+  } else {
+    WebRtc_UWord32 local_ip = 0;
+    if (socket_transport->LocalHostAddress(local_ip) != 0) {
+      UdpTransport::Destroy(socket_transport);
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Could not get local IP", __FUNCTION__);
+      shared_data_->SetLastError(kViENetworkUnknownError);
+      return -1;
+    }
+    // Convert 32-bit address to character string (x.y.z.w).
+    sprintf(local_ip_address, "%d.%d.%d.%d",
+            static_cast<int>((local_ip >> 24) & 0x0ff),
+            static_cast<int>((local_ip >> 16) & 0x0ff),
+            static_cast<int>((local_ip >> 8) & 0x0ff),
+            static_cast<int>(local_ip & 0x0ff));
+  }
+  strcpy(ip_address, local_ip_address);
+  UdpTransport::Destroy(socket_transport);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s: local ip = %s", __FUNCTION__, local_ip_address);
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s: not available for external transport", __FUNCTION__);
+
+  return -1;
+#endif
+}
+
+int ViENetworkImpl::EnableIPv6(int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableIPv6() != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+bool ViENetworkImpl::IsIPv6Enabled(int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return false;
+  }
+  return vie_channel->IsIPv6Enabled();
+}
+
+int ViENetworkImpl::SetSourceFilter(const int video_channel,
+                                    const unsigned short rtp_port,
+                                    const unsigned short rtcp_port,
+                                    const char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, rtp_port: %u, rtcp_port: %u, ip_address: %s)",
+               __FUNCTION__, video_channel, rtp_port, rtcp_port, ip_address);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSourceFilter(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSourceFilter(const int video_channel,
+                                    unsigned short& rtp_port,
+                                    unsigned short& rtcp_port,
+                                    char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSourceFilter(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendToS(const int video_channel, const int DSCP,
+                               const bool use_set_sockOpt = false) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, DSCP: %d, use_set_sockOpt: %d)", __FUNCTION__,
+               video_channel, DSCP, use_set_sockOpt);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "   force use_set_sockopt=true since there is no alternative"
+               " implementation");
+  if (vie_channel->SetToS(DSCP, true) != 0) {
+#else
+  if (vie_channel->SetToS(DSCP, use_set_sockOpt) != 0) {
+#endif
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSendToS(const int video_channel, int& DSCP,
+                               bool& use_set_sockOpt) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetToS((WebRtc_Word32&) DSCP, use_set_sockOpt) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendGQoS(const int video_channel, const bool enable,
+                                const int service_type,
+                                const int overrideDSCP) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, service_type: %d, "
+               "overrideDSCP: %d)", __FUNCTION__, video_channel, enable,
+               service_type, overrideDSCP);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+#if (defined(WIN32_) || defined(WIN64_))
+  // Sanity check. We might crash if testing and relying on an OS socket error.
+  if (enable &&
+      (service_type != SERVICETYPE_BESTEFFORT) &&
+      (service_type != SERVICETYPE_CONTROLLEDLOAD) &&
+      (service_type != SERVICETYPE_GUARANTEED) &&
+      (service_type != SERVICETYPE_QUALITATIVE)) {
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: service type %d not supported", __FUNCTION__,
+                 video_channel, service_type);
+    shared_data_->SetLastError(kViENetworkServiceTypeNotSupported);
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  VideoCodec video_codec;
+  if (vie_encoder->GetEncoder(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get max bitrate for the channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkSendCodecNotSet);
+    return -1;
+  }
+  if (vie_channel->SetSendGQoS(enable, service_type, video_codec.maxBitrate,
+                               overrideDSCP) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: Not supported", __FUNCTION__);
+  shared_data_->SetLastError(kViENetworkNotSupported);
+  return -1;
+#endif
+}
+
+int ViENetworkImpl::GetSendGQoS(const int video_channel, bool& enabled,
+                                int& service_type, int& overrideDSCP) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSendGQoS(enabled, service_type, overrideDSCP) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, mtu: %u)", __FUNCTION__, video_channel, mtu);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetMTU(mtu) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetPacketTimeoutNotification(const int video_channel,
+                                                 bool enable,
+                                                 int timeout_seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, timeout_seconds: %u)",
+               __FUNCTION__, video_channel, enable, timeout_seconds);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetPacketTimeoutNotification(enable,
+                                                timeout_seconds) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::RegisterObserver(const int video_channel,
+                                     ViENetworkObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterNetworkObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViENetworkObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::DeregisterObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->NetworkObserverRegistered()) {
+    shared_data_->SetLastError(kViENetworkObserverNotRegistered);
+    return -1;
+  }
+  return vie_channel->RegisterNetworkObserver(NULL);
+}
+
+int ViENetworkImpl::SetPeriodicDeadOrAliveStatus(
+    const int video_channel,
+    bool enable,
+    unsigned int sample_time_seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, sample_time_seconds: %ul)",
+               __FUNCTION__, video_channel, enable, sample_time_seconds);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->NetworkObserverRegistered()) {
+    shared_data_->SetLastError(kViENetworkObserverNotRegistered);
+    return -1;
+  }
+  if (vie_channel->SetPeriodicDeadOrAliveStatus(enable, sample_time_seconds)
+      != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SendUDPPacket(const int video_channel, const void* data,
+                                  const unsigned int length,
+                                  int& transmitted_bytes,
+                                  bool use_rtcp_socket = false) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d, transmitter_bytes: -, "
+               "useRtcpSocket: %d)", __FUNCTION__, video_channel, length,
+               use_rtcp_socket);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SendUDPPacket((const WebRtc_Word8*) data, length,
+                                     (WebRtc_Word32&) transmitted_bytes,
+                                     use_rtcp_socket) < 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_network_impl.h b/trunk/src/video_engine/vie_network_impl.h
new file mode 100644
index 0000000..8eff523
--- /dev/null
+++ b/trunk/src/video_engine/vie_network_impl.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
+
+#include "typedefs.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViENetworkImpl
+    : public ViENetwork,
+      public ViERefCount {
+ public:
+  // Implements ViENetwork.
+  virtual int Release();
+  virtual int SetLocalReceiver(const int video_channel,
+                               const unsigned short rtp_port,
+                               const unsigned short rtcp_port,
+                               const char* ip_address);
+  virtual int GetLocalReceiver(const int video_channel,
+                               unsigned short& rtp_port,
+                               unsigned short& rtcp_port,
+                               char* ip_address);
+  virtual int SetSendDestination(const int video_channel,
+                                 const char* ip_address,
+                                 const unsigned short rtp_port,
+                                 const unsigned short rtcp_port,
+                                 const unsigned short source_rtp_port,
+                                 const unsigned short source_rtcp_port);
+  virtual int GetSendDestination(const int video_channel,
+                                 char* ip_address,
+                                 unsigned short& rtp_port,
+                                 unsigned short& rtcp_port,
+                                 unsigned short& source_rtp_port,
+                                 unsigned short& source_rtcp_port);
+  virtual int RegisterSendTransport(const int video_channel,
+                                    Transport& transport);
+  virtual int DeregisterSendTransport(const int video_channel);
+  virtual int ReceivedRTPPacket(const int video_channel,
+                                const void* data,
+                                const int length);
+  virtual int ReceivedRTCPPacket(const int video_channel,
+                                 const void* data,
+                                 const int length);
+  virtual int GetSourceInfo(const int video_channel,
+                            unsigned short& rtp_port,
+                            unsigned short& rtcp_port,
+                            char* ip_address,
+                            unsigned int ip_address_length);
+  virtual int GetLocalIP(char ip_address[64], bool ipv6);
+  virtual int EnableIPv6(int video_channel);
+  virtual bool IsIPv6Enabled(int video_channel);
+  virtual int SetSourceFilter(const int video_channel,
+                              const unsigned short rtp_port,
+                              const unsigned short rtcp_port,
+                              const char* ip_address);
+  virtual int GetSourceFilter(const int video_channel,
+                              unsigned short& rtp_port,
+                              unsigned short& rtcp_port,
+                              char* ip_address);
+  virtual int SetSendToS(const int video_channel,
+                         const int DSCP,
+                         const bool use_set_sockOpt);
+  virtual int GetSendToS(const int video_channel,
+                         int& DSCP,
+                         bool& use_set_sockOpt);
+  virtual int SetSendGQoS(const int video_channel,
+                          const bool enable,
+                          const int service_type,
+                          const int overrideDSCP);
+  virtual int GetSendGQoS(const int video_channel,
+                          bool& enabled,
+                          int& service_type,
+                          int& overrideDSCP);
+  virtual int SetMTU(int video_channel, unsigned int mtu);
+  virtual int SetPacketTimeoutNotification(const int video_channel,
+                                           bool enable,
+                                           int timeout_seconds);
+  virtual int RegisterObserver(const int video_channel,
+                               ViENetworkObserver& observer);
+  virtual int DeregisterObserver(const int video_channel);
+  virtual int SetPeriodicDeadOrAliveStatus(
+      const int video_channel,
+      const bool enable,
+      const unsigned int sample_time_seconds);
+  virtual int SendUDPPacket(const int video_channel,
+                            const void* data,
+                            const unsigned int length,
+                            int& transmitted_bytes,
+                            bool use_rtcp_socket);
+
+ protected:
+  ViENetworkImpl(ViESharedData* shared_data);
+  virtual ~ViENetworkImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
diff --git a/trunk/src/video_engine/vie_performance_monitor.cc b/trunk/src/video_engine/vie_performance_monitor.cc
new file mode 100644
index 0000000..43ae056
--- /dev/null
+++ b/trunk/src/video_engine/vie_performance_monitor.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_performance_monitor.h"
+
+#include "cpu_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "vie_base.h"
+#include "vie_defines.h"
+
+namespace webrtc {
+
+enum { kVieMonitorPeriodMs = 975 };
+enum { kVieCpuStartValue = 75 };
+
+ViEPerformanceMonitor::ViEPerformanceMonitor(int engine_id)
+    : engine_id_(engine_id),
+      pointer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      monitor_thread_(NULL),
+      monitor_event_(*EventWrapper::Create()),
+      average_application_cpu_(kVieCpuStartValue),
+      average_system_cpu_(kVieCpuStartValue),
+      cpu_(NULL),
+      vie_base_observer_(NULL) {
+}
+
+ViEPerformanceMonitor::~ViEPerformanceMonitor() {
+  Terminate();
+  delete pointer_cs_;
+  delete &monitor_event_;
+}
+
+int ViEPerformanceMonitor::Init(ViEBaseObserver* vie_base_observer) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  CriticalSectionScoped cs(pointer_cs_);
+  if (!vie_base_observer || vie_base_observer_) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Bad input argument or observer already set",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  cpu_ = CpuWrapper::CreateCpu();
+  if (cpu_ == NULL) {
+    // Performance monitoring not supported
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                 ViEId(engine_id_), "%s: Not supported", __FUNCTION__);
+    return 0;
+  }
+
+  if (monitor_thread_ == NULL) {
+    monitor_event_.StartTimer(true, kVieMonitorPeriodMs);
+    monitor_thread_ = ThreadWrapper::CreateThread(ViEMonitorThreadFunction,
+                                                  this, kNormalPriority,
+                                                  "ViEPerformanceMonitor");
+    unsigned int t_id = 0;
+    if (monitor_thread_->Start(t_id)) {
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: Performance monitor thread started %u",
+                   __FUNCTION__, t_id);
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: Could not start performance monitor", __FUNCTION__);
+      monitor_event_.StopTimer();
+      return -1;
+    }
+  }
+  vie_base_observer_ = vie_base_observer;
+  return 0;
+}
+
+void ViEPerformanceMonitor::Terminate() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  pointer_cs_->Enter();
+  if (!vie_base_observer_) {
+    pointer_cs_->Leave();
+    return;
+  }
+
+  vie_base_observer_ = NULL;
+  monitor_event_.StopTimer();
+  if (monitor_thread_) {
+    ThreadWrapper* tmp_thread = monitor_thread_;
+    monitor_thread_ = NULL;
+    monitor_event_.Set();
+    pointer_cs_->Leave();
+    if (tmp_thread->Stop()) {
+      pointer_cs_->Enter();
+      delete tmp_thread;
+      tmp_thread = NULL;
+      delete cpu_;
+    }
+    cpu_ = NULL;
+  }
+  pointer_cs_->Leave();
+}
+
+bool ViEPerformanceMonitor::ViEBaseObserverRegistered() {
+  CriticalSectionScoped cs(pointer_cs_);
+  return vie_base_observer_ != NULL;
+}
+
+bool ViEPerformanceMonitor::ViEMonitorThreadFunction(void* obj) {
+  return static_cast<ViEPerformanceMonitor*>(obj)->ViEMonitorProcess();
+}
+
+bool ViEPerformanceMonitor::ViEMonitorProcess() {
+  // Periodically triggered with time KViEMonitorPeriodMs
+  monitor_event_.Wait(kVieMonitorPeriodMs);
+  if (monitor_thread_ == NULL) {
+    // Thread removed, exit
+    return false;
+  }
+
+  CriticalSectionScoped cs(pointer_cs_);
+  if (cpu_) {
+    int cpu_load = cpu_->CpuUsage();
+    if (cpu_load > 75) {
+      if (vie_base_observer_) {
+        vie_base_observer_->PerformanceAlarm(cpu_load);
+      }
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_performance_monitor.h b/trunk/src/video_engine/vie_performance_monitor.h
new file mode 100644
index 0000000..c3ea931
--- /dev/null
+++ b/trunk/src/video_engine/vie_performance_monitor.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViEPerformanceMonitor is used to check the current CPU usage and triggers a
+// callback when getting over a specified threshold.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class CpuWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViEBaseObserver;
+
+class ViEPerformanceMonitor {
+ public:
+  explicit ViEPerformanceMonitor(int engine_id);
+  ~ViEPerformanceMonitor();
+
+  int Init(ViEBaseObserver* vie_base_observer);
+  void Terminate();
+  bool ViEBaseObserverRegistered();
+
+ protected:
+  static bool ViEMonitorThreadFunction(void* obj);
+  bool ViEMonitorProcess();
+
+ private:
+  const int engine_id_;
+  // TODO(mfldoman) Make this one scoped_ptr.
+  CriticalSectionWrapper* pointer_cs_;
+  ThreadWrapper* monitor_thread_;
+  EventWrapper& monitor_event_;
+  int average_application_cpu_;
+  int average_system_cpu_;
+  CpuWrapper* cpu_;
+  ViEBaseObserver* vie_base_observer_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
diff --git a/trunk/src/video_engine/vie_receiver.cc b/trunk/src/video_engine/vie_receiver.cc
new file mode 100644
index 0000000..af0dc75
--- /dev/null
+++ b/trunk/src/video_engine/vie_receiver.cc
@@ -0,0 +1,267 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_receiver.h"
+
+#include "critical_section_wrapper.h"
+#include "rtp_dump.h"
+#include "rtp_rtcp.h"
+#include "video_coding.h"
+#include "trace.h"
+
+namespace webrtc {
+
+ViEReceiver::ViEReceiver(int engine_id, int channel_id,
+                         RtpRtcp& rtp_rtcp,
+                         VideoCodingModule& module_vcm)
+    : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      engine_id_(engine_id),
+      channel_id_(channel_id),
+      rtp_rtcp_(rtp_rtcp),
+      vcm_(module_vcm),
+      external_decryption_(NULL),
+      decryption_buffer_(NULL),
+      rtp_dump_(NULL),
+      receiving_(false) {
+}
+
+ViEReceiver::~ViEReceiver() {
+  if (decryption_buffer_) {
+    delete[] decryption_buffer_;
+    decryption_buffer_ = NULL;
+  }
+  if (rtp_dump_) {
+    rtp_dump_->Stop();
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  }
+}
+
+int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (external_decryption_) {
+    return -1;
+  }
+  decryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
+  if (decryption_buffer_ == NULL) {
+    return -1;
+  }
+  external_decryption_ = decryption;
+  return 0;
+}
+
+int ViEReceiver::DeregisterExternalDecryption() {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (external_decryption_ == NULL) {
+    return -1;
+  }
+  external_decryption_ = NULL;
+  return 0;
+}
+
+void ViEReceiver::RegisterSimulcastRtpRtcpModules(
+    const std::list<RtpRtcp*>& rtp_modules) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  rtp_rtcp_simulcast_.clear();
+
+  if (!rtp_modules.empty()) {
+    rtp_rtcp_simulcast_.insert(rtp_rtcp_simulcast_.begin(),
+                               rtp_modules.begin(),
+                               rtp_modules.end());
+  }
+}
+
+void ViEReceiver::IncomingRTPPacket(const WebRtc_Word8* rtp_packet,
+                                    const WebRtc_Word32 rtp_packet_length,
+                                    const WebRtc_Word8* from_ip,
+                                    const WebRtc_UWord16 from_port) {
+  InsertRTPPacket(rtp_packet, rtp_packet_length);
+}
+
+void ViEReceiver::IncomingRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                     const WebRtc_Word32 rtcp_packet_length,
+                                     const WebRtc_Word8* from_ip,
+                                     const WebRtc_UWord16 from_port) {
+  InsertRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
+                                   int rtp_packet_length) {
+  if (!receiving_) {
+    return -1;
+  }
+  return InsertRTPPacket((const WebRtc_Word8*) rtp_packet, rtp_packet_length);
+}
+
+int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
+                                    int rtcp_packet_length) {
+  if (!receiving_) {
+    return -1;
+  }
+  return InsertRTCPPacket((const WebRtc_Word8*) rtcp_packet,
+                          rtcp_packet_length);
+}
+
+WebRtc_Word32 ViEReceiver::OnReceivedPayloadData(
+    const WebRtc_UWord8* payload_data, const WebRtc_UWord16 payload_size,
+    const WebRtcRTPHeader* rtp_header) {
+  if (rtp_header == NULL) {
+    return 0;
+  }
+
+  if (vcm_.IncomingPacket(payload_data, payload_size, *rtp_header) != 0) {
+    // Check this...
+    return -1;
+  }
+  return 0;
+}
+
+int ViEReceiver::InsertRTPPacket(const WebRtc_Word8* rtp_packet,
+                                 int rtp_packet_length) {
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtp_packet);
+  unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
+  int received_packet_length = rtp_packet_length;
+
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+
+    if (external_decryption_) {
+      int decrypted_length = 0;
+      external_decryption_->decrypt(channel_id_, received_packet,
+                                    decryption_buffer_, received_packet_length,
+                                    &decrypted_length);
+      if (decrypted_length <= 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_), "RTP decryption failed");
+        return -1;
+      } else if (decrypted_length > kViEMaxMtu) {
+        WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "InsertRTPPacket: %d bytes is allocated as RTP decrytption"
+                     " output, external decryption used %d bytes. => memory is "
+                     " now corrupted", kViEMaxMtu, decrypted_length);
+        return -1;
+      }
+      received_packet = decryption_buffer_;
+      received_packet_length = decrypted_length;
+    }
+
+    if (rtp_dump_) {
+      rtp_dump_->DumpPacket(received_packet,
+                           static_cast<WebRtc_UWord16>(received_packet_length));
+    }
+  }
+  return rtp_rtcp_.IncomingPacket(received_packet, received_packet_length);
+}
+
+int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                  int rtcp_packet_length) {
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+    WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtcp_packet);
+    unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
+  int received_packet_length = rtcp_packet_length;
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+
+    if (external_decryption_) {
+      int decrypted_length = 0;
+      external_decryption_->decrypt_rtcp(channel_id_, received_packet,
+                                         decryption_buffer_,
+                                         received_packet_length,
+                                         &decrypted_length);
+      if (decrypted_length <= 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_), "RTP decryption failed");
+        return -1;
+      } else if (decrypted_length > kViEMaxMtu) {
+        WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "InsertRTCPPacket: %d bytes is allocated as RTP "
+                     " decrytption output, external decryption used %d bytes. "
+                     " => memory is now corrupted",
+                     kViEMaxMtu, decrypted_length);
+        return -1;
+      }
+      received_packet = decryption_buffer_;
+      received_packet_length = decrypted_length;
+    }
+
+    if (rtp_dump_) {
+      rtp_dump_->DumpPacket(
+          received_packet, static_cast<WebRtc_UWord16>(received_packet_length));
+    }
+  }
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+    std::list<RtpRtcp*>::iterator it = rtp_rtcp_simulcast_.begin();
+    while (it != rtp_rtcp_simulcast_.end()) {
+      RtpRtcp* rtp_rtcp = *it++;
+      rtp_rtcp->IncomingPacket(received_packet, received_packet_length);
+    }
+  }
+  return rtp_rtcp_.IncomingPacket(received_packet, received_packet_length);
+}
+
+void ViEReceiver::StartReceive() {
+  receiving_ = true;
+}
+
+void ViEReceiver::StopReceive() {
+  receiving_ = false;
+}
+
+int ViEReceiver::StartRTPDump(const char file_nameUTF8[1024]) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (rtp_dump_) {
+    // Restart it if it already exists and is started
+    rtp_dump_->Stop();
+  } else {
+    rtp_dump_ = RtpDump::CreateRtpDump();
+    if (rtp_dump_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "StartRTPDump: Failed to create RTP dump");
+      return -1;
+    }
+  }
+  if (rtp_dump_->Start(file_nameUTF8) != 0) {
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "StartRTPDump: Failed to start RTP dump");
+    return -1;
+  }
+  return 0;
+}
+
+int ViEReceiver::StopRTPDump() {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (rtp_dump_) {
+    if (rtp_dump_->IsActive()) {
+      rtp_dump_->Stop();
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "StopRTPDump: Dump not active");
+    }
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "StopRTPDump: RTP dump not started");
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_receiver.h b/trunk/src/video_engine/vie_receiver.h
new file mode 100644
index 0000000..a99d388
--- /dev/null
+++ b/trunk/src/video_engine/vie_receiver.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+
+#include <list>
+
+#include "engine_configurations.h"
+#include "rtp_rtcp_defines.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "udp_transport.h"
+#include "vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class Encryption;
+class RtpDump;
+class RtpRtcp;
+class VideoCodingModule;
+
+class ViEReceiver : public UdpTransportData, public RtpData {
+ public:
+  ViEReceiver(int engine_id, int channel_id, RtpRtcp& rtp_rtcp,
+              VideoCodingModule& module_vcm);
+  ~ViEReceiver();
+
+  int RegisterExternalDecryption(Encryption* decryption);
+  int DeregisterExternalDecryption();
+
+  void RegisterSimulcastRtpRtcpModules(const std::list<RtpRtcp*>& rtp_modules);
+
+  void StartReceive();
+  void StopReceive();
+
+  int StartRTPDump(const char file_nameUTF8[1024]);
+  int StopRTPDump();
+
+  // Implements UdpTransportData.
+  virtual void IncomingRTPPacket(const WebRtc_Word8* rtp_packet,
+                                 const WebRtc_Word32 rtp_packet_length,
+                                 const WebRtc_Word8* from_ip,
+                                 const WebRtc_UWord16 from_port);
+  virtual void IncomingRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                  const WebRtc_Word32 rtcp_packet_length,
+                                  const WebRtc_Word8* from_ip,
+                                  const WebRtc_UWord16 from_port);
+
+  // Receives packets from external transport.
+  int ReceivedRTPPacket(const void* rtp_packet, int rtp_packet_length);
+  int ReceivedRTCPPacket(const void* rtcp_packet, int rtcp_packet_length);
+
+  // Implements RtpData.
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* payload_data,
+      const WebRtc_UWord16 payload_size,
+      const WebRtcRTPHeader* rtp_header);
+
+ private:
+  int InsertRTPPacket(const WebRtc_Word8* rtp_packet, int rtp_packet_length);
+  int InsertRTCPPacket(const WebRtc_Word8* rtcp_packet, int rtcp_packet_length);
+
+  scoped_ptr<CriticalSectionWrapper> receive_cs_;
+  int engine_id_;
+  int channel_id_;
+  RtpRtcp& rtp_rtcp_;
+  std::list<RtpRtcp*> rtp_rtcp_simulcast_;
+  VideoCodingModule& vcm_;
+
+  Encryption* external_decryption_;
+  WebRtc_UWord8* decryption_buffer_;
+  RtpDump* rtp_dump_;
+  bool receiving_;
+};
+
+}  // namespace webrt
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
diff --git a/trunk/src/video_engine/vie_ref_count.cc b/trunk/src/video_engine/vie_ref_count.cc
new file mode 100644
index 0000000..2ab285c
--- /dev/null
+++ b/trunk/src/video_engine/vie_ref_count.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_ref_count.h"
+
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+
+ViERefCount::ViERefCount()
+    : count_(0),
+      crit_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+ViERefCount::~ViERefCount() {
+}
+
+ViERefCount& ViERefCount::operator++(int) {
+  CriticalSectionScoped lock(crit_.get());
+  count_++;
+  return *this;
+}
+
+ViERefCount& ViERefCount::operator--(int) {
+  CriticalSectionScoped lock(crit_.get());
+  count_--;
+  return *this;
+}
+
+void ViERefCount::Reset() {
+  CriticalSectionScoped lock(crit_.get());
+  count_ = 0;
+}
+
+int ViERefCount::GetCount() const {
+  return count_;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_ref_count.h b/trunk/src/video_engine/vie_ref_count.h
new file mode 100644
index 0000000..65efe36
--- /dev/null
+++ b/trunk/src/video_engine/vie_ref_count.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(mflodman) Use ref count class in system_wrappers.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ViERefCount {
+ public:
+  ViERefCount();
+  ~ViERefCount();
+
+  ViERefCount& operator++(int);
+  ViERefCount& operator--(int);
+
+  void Reset();
+  int GetCount() const;
+
+ private:
+  volatile int count_;
+  scoped_ptr<CriticalSectionWrapper> crit_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
diff --git a/trunk/src/video_engine/vie_remb.cc b/trunk/src/video_engine/vie_remb.cc
new file mode 100644
index 0000000..7732408
--- /dev/null
+++ b/trunk/src/video_engine/vie_remb.cc
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_remb.h"
+
+#include <algorithm>
+#include <cassert>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+const int kRembSendIntervallMs = 1000;
+
+// % threshold for if we should send a new REMB asap.
+const int kSendThresholdPercent = 97;
+
+VieRemb::VieRemb(int engine_id)
+    : engine_id_(engine_id),
+      list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
+      last_remb_time_(TickTime::MillisecondTimestamp()),
+      last_send_bitrate_(0) {
+}
+
+VieRemb::~VieRemb() {
+}
+
+void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::AddReceiveChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
+      receive_modules_.end())
+    return;
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, engine_id_, "AddRembChannel");
+  // The module probably doesn't have a remote SSRC yet, so don't add it to the
+  // map.
+  receive_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::RemoveReceiveChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  unsigned int ssrc = rtp_rtcp->RemoteSSRC();
+  for (RtpModules::iterator it = receive_modules_.begin();
+       it != receive_modules_.end(); ++it) {
+    if ((*it) == rtp_rtcp) {
+      receive_modules_.erase(it);
+      break;
+    }
+  }
+  bitrates_.erase(ssrc);
+}
+
+void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::AddRembSender(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+
+  // Verify this module hasn't been added earlier.
+  if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
+      rtcp_sender_.end())
+    return;
+  rtcp_sender_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::RemoveRembSender(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  for (RtpModules::iterator it = rtcp_sender_.begin();
+       it != rtcp_sender_.end(); ++it) {
+    if ((*it) == rtp_rtcp) {
+      rtcp_sender_.erase(it);
+      return;
+    }
+  }
+}
+
+void VieRemb::AddSendChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::AddSendChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+
+  // Verify this module hasn't been added earlier.
+  if (std::find(send_modules_.begin(), send_modules_.end(), rtp_rtcp) !=
+      send_modules_.end())
+    return;
+  send_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveSendChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, engine_id_,
+               "VieRemb::RemoveSendChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  for (RtpModules::iterator it = send_modules_.begin();
+      it != send_modules_.end(); ++it) {
+    if ((*it) == rtp_rtcp) {
+      send_modules_.erase(it);
+      return;
+    }
+  }
+}
+
+void VieRemb::OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, engine_id_,
+               "VieRemb::UpdateBitrateEstimate(ssrc: %u, bitrate: %u)",
+               ssrc, bitrate);
+  CriticalSectionScoped cs(list_crit_.get());
+
+  // Check if this is a new ssrc and add it to the map if it is.
+  if (bitrates_.find(ssrc) == bitrates_.end()) {
+    bitrates_[ssrc] = bitrate;
+  }
+
+  int new_remb_bitrate = last_send_bitrate_ - bitrates_[ssrc] + bitrate;
+  if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
+    // The new bitrate estimate is less than kSendThresholdPercent % of the last
+    // report. Send a REMB asap.
+    last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervallMs;
+  }
+  bitrates_[ssrc] = bitrate;
+}
+
+void VieRemb::OnReceivedRemb(unsigned int bitrate) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, engine_id_,
+               "VieRemb::OnReceivedRemb(bitrate: %u)", bitrate);
+  // TODO(mflodman) Should be extended to allow different split of bitrate.
+  // TODO(mflodman) Do we want to call |SetMaximumBitrateEstimate| from
+  // |Process| instead?
+
+  // Split the bitrate estimate between all sending channels.
+  CriticalSectionScoped cs(list_crit_.get());
+  for (RtpModules::iterator it = send_modules_.begin();
+       it != send_modules_.end(); ++it) {
+    (*it)->SetMaximumBitrateEstimate(bitrate / send_modules_.size());
+  }
+}
+
+WebRtc_Word32 VieRemb::ChangeUniqueId(const WebRtc_Word32 id) {
+  return 0;
+}
+
+WebRtc_Word32 VieRemb::TimeUntilNextProcess() {
+  return kRembSendIntervallMs -
+      (TickTime::MillisecondTimestamp() - last_remb_time_);
+}
+
+WebRtc_Word32 VieRemb::Process() {
+  int64_t now = TickTime::MillisecondTimestamp();
+  if (now - last_remb_time_ < kRembSendIntervallMs)
+    return 0;
+
+  last_remb_time_ = now;
+
+  // Calculate total receive bitrate estimate.
+  list_crit_->Enter();
+  int total_bitrate = 0;
+  int num_bitrates = bitrates_.size();
+
+  if (num_bitrates == 0) {
+    list_crit_->Leave();
+    return 0;
+  }
+
+  // TODO(mflodman) Use std::vector and change RTP module API.
+  unsigned int* ssrcs = new unsigned int[num_bitrates];
+
+  int idx = 0;
+  for (SsrcBitrate::iterator it = bitrates_.begin(); it != bitrates_.end();
+       ++it, ++idx) {
+    total_bitrate += it->second;
+    ssrcs[idx] = it->first;
+  }
+
+  // Send a REMB packet.
+  RtpRtcp* sender = NULL;
+  if (!rtcp_sender_.empty()) {
+    sender = rtcp_sender_.front();
+  }
+  last_send_bitrate_ = total_bitrate;
+  list_crit_->Leave();
+
+  if (sender) {
+    sender->SetREMBData(total_bitrate, num_bitrates, ssrcs);
+  }
+  delete [] ssrcs;
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_remb.h b/trunk/src/video_engine/vie_remb.h
new file mode 100644
index 0000000..32d58a7
--- /dev/null
+++ b/trunk/src/video_engine/vie_remb.h
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// 1. Register a RtpRtcp module to include in the REMB packet.
+// 2. When UpdateBitrateEstimate is called for the first time for a SSRC, add it
+//    to the map.
+// 3. Send a new REMB every kRembSendIntervallMs or if a lower bitrate estimate
+//    for a specified SSRC.
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
+
+#include <list>
+#include <map>
+
+#include "modules/interface/module.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+
+class VieRemb : public RtpRemoteBitrateObserver, public Module {
+ public:
+  explicit VieRemb(int engine_id);
+  ~VieRemb();
+
+  // Called to add a receive channel to include in the REMB packet.
+  void AddReceiveChannel(RtpRtcp* rtp_rtcp);
+
+  // Removes the specified channel from REMB estimate.
+  void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
+
+  // Called to add a module that can generate and send REMB RTCP.
+  void AddRembSender(RtpRtcp* rtp_rtcp);
+
+  // Removes a REMB RTCP sender.
+  void RemoveRembSender(RtpRtcp* rtp_rtcp);
+
+  // Called to add a send channel encoding and sending data, affected by
+  // received  REMB packets.
+  void AddSendChannel(RtpRtcp* rtp_rtcp);
+
+  // Removes the specified channel from receiving REMB packet estimates.
+  void RemoveSendChannel(RtpRtcp* rtp_rtcp);
+
+  // Called every time there is a new bitrate estimate for the received stream
+  // with given SSRC. This call will trigger a new RTCP REMB packet if the
+  // bitrate estimate has decreased or if no RTCP REMB packet has been sent for
+  // a certain time interval.
+  // Implements RtpReceiveBitrateUpdate.
+  virtual void OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate);
+
+  // Called for every new receive REMB packet and distributes the estmate
+  // between all sending modules.
+  virtual void OnReceivedRemb(unsigned int bitrate);
+
+  // Implements Module.
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+  virtual WebRtc_Word32 TimeUntilNextProcess();
+  virtual WebRtc_Word32 Process();
+
+ private:
+  typedef std::list<RtpRtcp*> RtpModules;
+  typedef std::map<unsigned int, unsigned int> SsrcBitrate;
+
+  int engine_id_;
+  scoped_ptr<CriticalSectionWrapper> list_crit_;
+
+  // The last time a REMB was sent.
+  int64_t last_remb_time_;
+  int last_send_bitrate_;
+
+  // All RtpRtcp modules to include in the REMB packet.
+  RtpModules receive_modules_;
+
+  // All modules encoding and sending data.
+  RtpModules send_modules_;
+
+  // All modules that can send REMB RTCP.
+  RtpModules rtcp_sender_;
+
+  // The last bitrate update for each SSRC.
+  SsrcBitrate bitrates_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
diff --git a/trunk/src/video_engine/vie_remb_unittest.cc b/trunk/src/video_engine/vie_remb_unittest.cc
new file mode 100644
index 0000000..a842103
--- /dev/null
+++ b/trunk/src/video_engine/vie_remb_unittest.cc
@@ -0,0 +1,335 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for ViERemb.
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#if defined(_WIN32)
+#include <windows.h>
+#endif
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "video_engine/vie_remb.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::Return;
+
+namespace webrtc {
+
+class ViERembTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    vie_remb_.reset(new VieRemb(1234));
+  }
+  scoped_ptr<VieRemb> vie_remb_;
+
+  void TestSleep(unsigned int time_ms) {
+#if defined(_WIN32)
+    ::Sleep(time_ms);
+#else
+    usleep(time_ms * 1000);
+#endif
+  }
+};
+
+TEST_F(ViERembTest, OneModuleTestForSendingRemb)
+{
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddSendChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+
+  const unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // TODO(mflodman) Add fake clock and remove the lowered bitrate below.
+  TestSleep(1010);
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Lower bitrate to send another REMB packet.
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate - 100);
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, 1, _))
+        .Times(1);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveSendChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, LowerEstimateToSendRemb)
+{
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddSendChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Lower the estimate with more than 3% to trigger a call to SetREMBData right
+  // away.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+}
+
+TEST_F(ViERembTest, VerifyCombinedBitrateEstimate)
+{
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddSendChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1] + 100);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Lower the estimate to trigger a callback.
+  int total_bitrate = bitrate_estimate[0] + bitrate_estimate[1];
+  EXPECT_CALL(rtp_0, SetREMBData(total_bitrate, 2, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveSendChannel(&rtp_0);
+  vie_remb_->RemoveRembSender(&rtp_0);
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+// Add two senders, and insert a received REMB estimate. Both sending channels
+// should get half of the received value.
+TEST_F(ViERembTest, IncomingRemb)
+{
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddSendChannel(&rtp_0);
+  vie_remb_->AddSendChannel(&rtp_1);
+
+  const unsigned int bitrate_estimate = 1200;
+
+  // Fake received REMB and verify both modules get half of the bitrate.
+  EXPECT_CALL(rtp_0, SetMaximumBitrateEstimate(bitrate_estimate/2))
+      .Times(1);
+  EXPECT_CALL(rtp_1, SetMaximumBitrateEstimate(bitrate_estimate/2))
+      .Times(1);
+  vie_remb_->OnReceivedRemb(bitrate_estimate);
+
+  // Remove one of the modules and verify the other module get the entire
+  // bitrate.
+  vie_remb_->RemoveSendChannel(&rtp_0);
+  EXPECT_CALL(rtp_0, SetMaximumBitrateEstimate(_))
+      .Times(0);
+  EXPECT_CALL(rtp_1, SetMaximumBitrateEstimate(bitrate_estimate))
+      .Times(1);
+  vie_remb_->OnReceivedRemb(bitrate_estimate);
+
+  vie_remb_->RemoveSendChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, NoRembForIncreasedBitrate)
+{
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddSendChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Trigger a first call to have a running state.
+  // TODO(mflodman) Add fake clock.
+  TestSleep(1010);
+  EXPECT_CALL(rtp_0,
+              SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2, _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Increased estimate shouldn't trigger a callback right away.
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0] + 1);
+  EXPECT_CALL(rtp_0, SetREMBData(_, _, _))
+      .Times(0);
+
+  // Decresing the estimate less than 3% shouldn't trigger a new callback.
+  int lower_estimate = bitrate_estimate[0] * 98 / 100;
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], lower_estimate);
+   EXPECT_CALL(rtp_0, SetREMBData(_, _, _))
+       .Times(0);
+
+  vie_remb_->Process();
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveSendChannel(&rtp_0);
+  vie_remb_->RemoveRembSender(&rtp_0);
+}
+
+TEST_F(ViERembTest, ChangeSendRtpModule)
+{
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddSendChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Decrease estimate to trigger a REMB.
+  bitrate_estimate[0] = bitrate_estimate[0] - 100;
+  EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2,
+                                 _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  vie_remb_->Process();
+
+  // Remove the sending module, add it again -> should get remb on the second
+  // module.
+  vie_remb_->RemoveSendChannel(&rtp_0);
+  vie_remb_->RemoveRembSender(&rtp_0);
+  vie_remb_->AddSendChannel(&rtp_1);
+  vie_remb_->AddRembSender(&rtp_1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+
+  bitrate_estimate[1] = bitrate_estimate[1] - 100;
+  EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2,
+                                 _))
+        .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+  vie_remb_->RemoveSendChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, OnlyOneRembForDoubleProcess)
+{
+  MockRtpRtcp rtp;
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddSendChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Lower the estimate, should trigger a call to SetREMBData right away.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+
+  // Call Process again, this should not trigger a new callback.
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(0);
+  vie_remb_->Process();
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveSendChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, NoOnReceivedBitrateChangedCall)
+{
+  MockRtpRtcp rtp;
+  EXPECT_CALL(rtp, RemoteSSRC())
+        .WillRepeatedly(Return(1234));
+
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddSendChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+  // TODO(mflodman) Add fake clock.
+  TestSleep(1010);
+  // No bitrate estimate given, no callback expected.
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(0);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveSendChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, NoSendingRtpModule)
+{
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Lower the estimate. This should normally trigger a callback, but not now
+  // since we have no sending module.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(0);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_render_impl.cc b/trunk/src/video_engine/vie_render_impl.cc
new file mode 100644
index 0000000..e37e5d4
--- /dev/null
+++ b/trunk/src/video_engine/vie_render_impl.cc
@@ -0,0 +1,398 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_render_impl.h"
+
+#include "engine_configurations.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_render_manager.h"
+#include "video_engine/vie_renderer.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViERender* ViERender::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViERenderImpl* vie_render_impl = vie_impl;
+  // Increase ref count.
+  (*vie_render_impl)++;
+  return vie_render_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViERenderImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViERender::Release()");
+  // Decrease ref count
+  (*this)--;
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViERender release too many times");
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViERender reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViERenderImpl::ViERenderImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERenderImpl::ViERenderImpl() Ctor");
+}
+
+ViERenderImpl::~ViERenderImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERenderImpl::~ViERenderImpl() Dtor");
+}
+
+int ViERenderImpl::RegisterVideoRenderModule(
+  VideoRender& render_module) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (&render_module: %p)", __FUNCTION__, &render_module);
+  if (shared_data_->render_manager()->RegisterVideoRenderModule(
+      render_module) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::DeRegisterVideoRenderModule(
+  VideoRender& render_module) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (&render_module: %p)", __FUNCTION__, &render_module);
+  if (shared_data_->render_manager()->DeRegisterVideoRenderModule(
+      render_module) != 0) {
+    // Error logging is done in ViERenderManager::DeRegisterVideoRenderModule.
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::AddRenderer(const int render_id, void* window,
+                               const unsigned int z_order, const float left,
+                               const float top, const float right,
+                               const float bottom) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (render_id: %d,  window: 0x%p, z_order: %u, left: %f, "
+               "top: %f, right: %f, bottom: %f)",
+               __FUNCTION__, render_id, window, z_order, left, top, right,
+               bottom);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  {
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    if (rs.Renderer(render_id)) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s - Renderer already exist %d.", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderAlreadyExists);
+      return -1;
+    }
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, window, z_order, left, top, right, bottom);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  } else {
+    // Camera or file.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, window, z_order, left, top, right, bottom);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  }
+}
+
+int ViERenderImpl::RemoveRenderer(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(render_id: %d)", __FUNCTION__, render_id);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViERenderer* renderer = NULL;
+  {
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    renderer = rs.Renderer(render_id);
+    if (!renderer) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s No render exist with render_id: %d", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    // Leave the scope lock since we don't want to lock two managers
+    // simultanousely.
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEChannel* channel = cm.Channel(render_id);
+    if (!channel) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s: no channel with id %d exists ", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    channel->DeregisterFrameCallback(renderer);
+  } else {
+    // Provider owned by inputmanager, i.e. file or capture device.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* provider = is.FrameProvider(render_id);
+    if (!provider) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s: no provider with id %d exists ", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    provider->DeregisterFrameCallback(renderer);
+  }
+  if (shared_data_->render_manager()->RemoveRenderStream(render_id) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::StartRender(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render Id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->StartRender() != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::StopRender(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->StopRender() != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::ConfigureRender(int render_id, const unsigned int z_order,
+                                   const float left, const float top,
+                                   const float right, const float bottom) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+
+  if (renderer->ConfigureRenderer(z_order, left, top, right, bottom) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::MirrorRenderStream(const int render_id, const bool enable,
+                                      const bool mirror_xaxis,
+                                      const bool mirror_yaxis) {
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->EnableMirroring(render_id, enable, mirror_xaxis, mirror_yaxis)
+      != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::AddRenderer(const int render_id,
+                               RawVideoType video_input_format,
+                               ExternalRenderer* external_renderer) {
+  // Check if the client requested a format that we can convert the frames to.
+  if (video_input_format != kVideoI420 &&
+      video_input_format != kVideoYV12 &&
+      video_input_format != kVideoYUY2 &&
+      video_input_format != kVideoUYVY &&
+      video_input_format != kVideoARGB &&
+      video_input_format != kVideoRGB24 &&
+      video_input_format != kVideoRGB565 &&
+      video_input_format != kVideoARGB4444 &&
+      video_input_format != kVideoARGB1555) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: Unsupported video frame format requested",
+                 __FUNCTION__, render_id);
+    shared_data_->SetLastError(kViERenderInvalidFrameFormat);
+    return -1;
+  }
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  {
+    // Verify the renderer doesn't exist.
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    if (rs.Renderer(render_id)) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s - Renderer already exist %d.", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderAlreadyExists);
+      return -1;
+    }
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, NULL, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    if (renderer->SetExternalRenderer(render_id, video_input_format,
+                                      external_renderer) == -1) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  } else {
+    // Camera or file.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, NULL, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    if (renderer->SetExternalRenderer(render_id, video_input_format,
+                                      external_renderer) == -1) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  }
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_render_impl.h b/trunk/src/video_engine/vie_render_impl.h
new file mode 100644
index 0000000..0c74b3d
--- /dev/null
+++ b/trunk/src/video_engine/vie_render_impl.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
+
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViERenderImpl
+    : public ViERender,
+      public ViERefCount {
+ public:
+  // Implements ViERender
+  virtual int Release();
+  virtual int RegisterVideoRenderModule(VideoRender& render_module);
+  virtual int DeRegisterVideoRenderModule(VideoRender& render_module);
+  virtual int AddRenderer(const int render_id, void* window,
+                          const unsigned int z_order, const float left,
+                          const float top, const float right,
+                          const float bottom);
+  virtual int RemoveRenderer(const int render_id);
+  virtual int StartRender(const int render_id);
+  virtual int StopRender(const int render_id);
+  virtual int ConfigureRender(int render_id, const unsigned int z_order,
+                              const float left, const float top,
+                              const float right, const float bottom);
+  virtual int MirrorRenderStream(const int render_id, const bool enable,
+                                 const bool mirror_xaxis,
+                                 const bool mirror_yaxis);
+  virtual int AddRenderer(const int render_id, RawVideoType video_input_format,
+                          ExternalRenderer* renderer);
+
+ protected:
+  ViERenderImpl(ViESharedData* shared_data);
+  virtual ~ViERenderImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
diff --git a/trunk/src/video_engine/vie_render_manager.cc b/trunk/src/video_engine/vie_render_manager.cc
new file mode 100644
index 0000000..810c079
--- /dev/null
+++ b/trunk/src/video_engine/vie_render_manager.cc
@@ -0,0 +1,230 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_render_manager.h"
+
+#include "engine_configurations.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_renderer.h"
+
+namespace webrtc {
+
+ViERenderManagerScoped::ViERenderManagerScoped(
+    const ViERenderManager& vie_render_manager)
+    : ViEManagerScopedBase(vie_render_manager) {
+}
+
+ViERenderer* ViERenderManagerScoped::Renderer(WebRtc_Word32 render_id) const {
+  return static_cast<const ViERenderManager*>(vie_manager_)->ViERenderPtr(
+           render_id);
+}
+
+ViERenderManager::ViERenderManager(WebRtc_Word32 engine_id)
+    : list_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      engine_id_(engine_id),
+      use_external_render_module_(false) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id),
+               "ViERenderManager::ViERenderManager(engine_id: %d) - "
+               "Constructor", engine_id);
+}
+
+ViERenderManager::~ViERenderManager() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "ViERenderManager Destructor, engine_id: %d", engine_id_);
+
+  while (stream_to_vie_renderer_.Size() != 0) {
+    MapItem* item = stream_to_vie_renderer_.First();
+    assert(item);
+    const WebRtc_Word32 render_id = item->GetId();
+    // The renderer is delete in RemoveRenderStream.
+    item = NULL;
+    RemoveRenderStream(render_id);
+  }
+}
+
+WebRtc_Word32 ViERenderManager::RegisterVideoRenderModule(
+    VideoRender& render_module) {
+  // See if there is already a render module registered for the window that
+  // the registrant render module is associated with.
+  VideoRender* current_module = FindRenderModule(render_module.Window());
+  if (current_module) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "A module is already registered for this window (window=%p, "
+                 "current module=%p, registrant module=%p.",
+                 render_module.Window(), current_module, &render_module);
+    return -1;
+  }
+
+  // Register module.
+  render_list_.PushBack(static_cast<void*>(&render_module));
+  use_external_render_module_ = true;
+  return 0;
+}
+
+WebRtc_Word32 ViERenderManager::DeRegisterVideoRenderModule(
+    VideoRender& render_module) {
+  // Check if there are streams in the module.
+  WebRtc_UWord32 n_streams = render_module.GetNumIncomingRenderStreams();
+  if (n_streams != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "There are still %d streams in this module, cannot "
+                 "de-register", n_streams);
+    return -1;
+  }
+
+  // Erase the render module from the map.
+  ListItem* list_item = NULL;
+  bool found = false;
+  for (list_item = render_list_.First(); list_item != NULL;
+       list_item = render_list_.Next(list_item)) {
+    if (&render_module == static_cast<VideoRender*>(list_item->GetItem())) {
+      // We've found our renderer.
+      render_list_.Erase(list_item);
+      found = true;
+      break;
+    }
+  }
+  if (!found) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "Module not registered");
+    return -1;
+  }
+  return 0;
+}
+
+ViERenderer* ViERenderManager::AddRenderStream(const WebRtc_Word32 render_id,
+                                               void* window,
+                                               const WebRtc_UWord32 z_order,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom) {
+  CriticalSectionScoped cs(list_cs_.get());
+
+  if (stream_to_vie_renderer_.Find(render_id) != NULL) {
+    // This stream is already added to a renderer, not allowed!
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "Render stream already exists");
+    return NULL;
+  }
+
+  // Get the render module for this window.
+  VideoRender* render_module = FindRenderModule(window);
+  if (render_module == NULL) {
+    // No render module for this window, create a new one.
+    render_module = VideoRender::CreateVideoRender(ViEModuleId(engine_id_, -1),
+                                                  window, false);
+    if (!render_module) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "Could not create new render module");
+      return NULL;
+    }
+    render_list_.PushBack(static_cast<void*>(render_module));
+  }
+
+  ViERenderer* vie_renderer = ViERenderer::CreateViERenderer(render_id,
+                                                             engine_id_,
+                                                             *render_module,
+                                                             *this, z_order,
+                                                             left, top, right,
+                                                             bottom);
+  if (!vie_renderer) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, render_id),
+                 "Could not create new render stream");
+    return NULL;
+  }
+  stream_to_vie_renderer_.Insert(render_id, vie_renderer);
+  return vie_renderer;
+}
+
+WebRtc_Word32 ViERenderManager::RemoveRenderStream(
+    const WebRtc_Word32 render_id) {
+  // We need exclusive right to the items in the render manager to delete a
+  // stream.
+  ViEManagerWriteScoped(*this);
+
+  CriticalSectionScoped cs(list_cs_.get());
+  MapItem* map_item = stream_to_vie_renderer_.Find(render_id);
+  if (!map_item) {
+    // No such stream
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "No renderer for this stream found, channel_id");
+    return 0;
+  }
+
+  ViERenderer* vie_renderer = static_cast<ViERenderer*>(map_item->GetItem());
+  assert(vie_renderer);
+
+  // Get the render module pointer for this vie_render object.
+  VideoRender& renderer = vie_renderer->RenderModule();
+
+  // Delete the vie_render.
+  // This deletes the stream in the render module.
+  delete vie_renderer;
+
+  // Remove from the stream map.
+  stream_to_vie_renderer_.Erase(map_item);
+
+  // Check if there are other streams in the module.
+  if (!use_external_render_module_ &&
+      renderer.GetNumIncomingRenderStreams() == 0) {
+    // Erase the render module from the map.
+    ListItem* list_item = NULL;
+    for (list_item = render_list_.First(); list_item != NULL;
+         list_item = render_list_.Next(list_item)) {
+      if (&renderer == static_cast<VideoRender*>(list_item->GetItem())) {
+        // We've found our renderer.
+        render_list_.Erase(list_item);
+        break;
+      }
+    }
+    // Destroy the module.
+    VideoRender::DestroyVideoRender(&renderer);
+  }
+  return 0;
+}
+
+VideoRender* ViERenderManager::FindRenderModule(void* window) {
+  VideoRender* renderer = NULL;
+  ListItem* list_item = NULL;
+  for (list_item = render_list_.First(); list_item != NULL;
+       list_item = render_list_.Next(list_item)) {
+    renderer = static_cast<VideoRender*>(list_item->GetItem());
+    if (renderer == NULL) {
+      break;
+    }
+    if (renderer->Window() == window) {
+      // We've found the render module.
+      break;
+    }
+    renderer = NULL;
+  }
+  return renderer;
+}
+
+ViERenderer* ViERenderManager::ViERenderPtr(WebRtc_Word32 render_id) const {
+  ViERenderer* renderer = NULL;
+  MapItem* map_item = stream_to_vie_renderer_.Find(render_id);
+  if (!map_item) {
+    // No such stream in any renderer.
+    return NULL;
+  }
+  renderer = static_cast<ViERenderer*>(map_item->GetItem());
+
+  return renderer;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_render_manager.h b/trunk/src/video_engine/vie_render_manager.h
new file mode 100644
index 0000000..242b63a
--- /dev/null
+++ b/trunk/src/video_engine/vie_render_manager.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
+
+#include "system_wrappers/interface/list_wrapper.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RWLockWrapper;
+class VideoRender;
+class VideoRenderCallback;
+class ViERenderer;
+
+class ViERenderManager : private ViEManagerBase {
+  friend class ViERenderManagerScoped;
+ public:
+  explicit ViERenderManager(WebRtc_Word32 engine_id);
+  ~ViERenderManager();
+
+  WebRtc_Word32 RegisterVideoRenderModule(VideoRender& render_module);
+  WebRtc_Word32 DeRegisterVideoRenderModule(VideoRender& render_module);
+
+  ViERenderer* AddRenderStream(const WebRtc_Word32 render_id,
+                               void* window,
+                               const WebRtc_UWord32 z_order,
+                               const float left,
+                               const float top,
+                               const float right,
+                               const float bottom);
+
+  WebRtc_Word32 RemoveRenderStream(WebRtc_Word32 render_id);
+
+ private:
+  // Returns a pointer to the render module if it exists in the render list.
+  // Assumed protected.
+  VideoRender* FindRenderModule(void* window);
+
+  // Methods used by ViERenderScoped.
+  ViERenderer* ViERenderPtr(WebRtc_Word32 render_id) const;
+
+  scoped_ptr<CriticalSectionWrapper> list_cs_;
+  WebRtc_Word32 engine_id_;
+  MapWrapper stream_to_vie_renderer_;  // Protected by ViEManagerBase.
+  ListWrapper render_list_;
+  bool use_external_render_module_;
+};
+
+class ViERenderManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViERenderManagerScoped(const ViERenderManager& vie_render_manager);
+
+  // Returns a pointer to the ViERender object.
+  ViERenderer* Renderer(WebRtc_Word32 render_id) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
diff --git a/trunk/src/video_engine/vie_renderer.cc b/trunk/src/video_engine/vie_renderer.cc
new file mode 100644
index 0000000..7e56b7a
--- /dev/null
+++ b/trunk/src/video_engine/vie_renderer.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_renderer.h"
+
+#include "common_video/libyuv/include/libyuv.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "video_engine/vie_render_manager.h"
+
+namespace webrtc {
+
+ViERenderer* ViERenderer::CreateViERenderer(const WebRtc_Word32 render_id,
+                                            const WebRtc_Word32 engine_id,
+                                            VideoRender& render_module,
+                                            ViERenderManager& render_manager,
+                                            const WebRtc_UWord32 z_order,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) {
+  ViERenderer* self = new ViERenderer(render_id, engine_id, render_module,
+                                      render_manager);
+  if (!self || self->Init(z_order, left, top, right, bottom) != 0) {
+    delete self;
+    self = NULL;
+  }
+  return self;
+}
+
+ViERenderer::~ViERenderer(void) {
+  if (render_callback_)
+    render_module_.DeleteIncomingRenderStream(render_id_);
+
+  if (incoming_external_callback_)
+    delete incoming_external_callback_;
+}
+
+WebRtc_Word32 ViERenderer::StartRender() {
+  return render_module_.StartRender(render_id_);
+}
+WebRtc_Word32 ViERenderer::StopRender() {
+  return render_module_.StopRender(render_id_);
+}
+
+WebRtc_Word32 ViERenderer::GetLastRenderedFrame(const WebRtc_Word32 renderID,
+                                                VideoFrame& video_frame) {
+  return render_module_.GetLastRenderedFrame(renderID, video_frame);
+}
+
+WebRtc_Word32 ViERenderer::ConfigureRenderer(const unsigned int z_order,
+                                             const float left,
+                                             const float top,
+                                             const float right,
+                                             const float bottom) {
+  return render_module_.ConfigureRenderer(render_id_, z_order, left, top, right,
+                                          bottom);
+}
+
+VideoRender& ViERenderer::RenderModule() {
+  return render_module_;
+}
+
+WebRtc_Word32 ViERenderer::EnableMirroring(const WebRtc_Word32 render_id,
+                                           const bool enable,
+                                           const bool mirror_xaxis,
+                                           const bool mirror_yaxis) {
+  return render_module_.MirrorRenderStream(render_id, enable, mirror_xaxis,
+                                           mirror_yaxis);
+}
+
+WebRtc_Word32 ViERenderer::SetTimeoutImage(const VideoFrame& timeout_image,
+                                           const WebRtc_Word32 timeout_value) {
+  return render_module_.SetTimeoutImage(render_id_, timeout_image,
+                                        timeout_value);
+}
+
+WebRtc_Word32  ViERenderer::SetRenderStartImage(const VideoFrame& start_image) {
+  return render_module_.SetStartImage(render_id_, start_image);
+}
+
+WebRtc_Word32 ViERenderer::SetExternalRenderer(
+    const WebRtc_Word32 render_id,
+    RawVideoType video_input_format,
+    ExternalRenderer* external_renderer) {
+  if (!incoming_external_callback_)
+    return -1;
+
+  incoming_external_callback_->SetViEExternalRenderer(external_renderer,
+                                                      video_input_format);
+  return render_module_.AddExternalRenderCallback(render_id,
+                                                  incoming_external_callback_);
+}
+
+ViERenderer::ViERenderer(const WebRtc_Word32 render_id,
+                         const WebRtc_Word32 engine_id,
+                         VideoRender& render_module,
+                         ViERenderManager& render_manager)
+    : render_id_(render_id),
+      engine_id_(engine_id),
+      render_module_(render_module),
+      render_manager_(render_manager),
+      render_callback_(NULL),
+      incoming_external_callback_(new ViEExternalRendererImpl()) {
+}
+
+WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order,
+                                const float left,
+                                const float top,
+                                const float right,
+                                const float bottom) {
+  render_callback_ =
+      static_cast<VideoRenderCallback*>(render_module_.AddIncomingRenderStream(
+          render_id_, z_order, left, top, right, bottom));
+  if (!render_callback_) {
+    // Logging done.
+    return -1;
+  }
+  return 0;
+}
+
+void ViERenderer::DeliverFrame(int id,
+                               VideoFrame& video_frame,
+                               int num_csrcs,
+                               const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  render_callback_->RenderFrame(render_id_, video_frame);
+}
+
+void ViERenderer::DelayChanged(int id, int frame_delay) {}
+
+int ViERenderer::GetPreferedFrameSettings(int& width,
+                                          int& height,
+                                          int& frame_rate) {
+    return -1;
+}
+
+void ViERenderer::ProviderDestroyed(int id) {
+  // Remove the render stream since the provider is destroyed.
+  render_manager_.RemoveRenderStream(render_id_);
+}
+
+ViEExternalRendererImpl::ViEExternalRendererImpl()
+    : external_renderer_(NULL),
+      external_renderer_format_(kVideoUnknown),
+      external_renderer_width_(0),
+      external_renderer_height_(0),
+      converted_frame_(new VideoFrame()){
+}
+
+int ViEExternalRendererImpl::SetViEExternalRenderer(
+    ExternalRenderer* external_renderer,
+    RawVideoType video_input_format) {
+  external_renderer_ = external_renderer;
+  external_renderer_format_ = video_input_format;
+  return 0;
+}
+
+WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(
+    const WebRtc_UWord32 stream_id,
+    VideoFrame&   video_frame) {
+  VideoFrame* out_frame = converted_frame_.get();
+
+  // Convert to requested format.
+  VideoType type =
+      RawVideoTypeToCommonVideoVideoType(external_renderer_format_);
+  int buffer_size = CalcBufferSize(type, video_frame.Width(),
+                                   video_frame.Height());
+  if (buffer_size <= 0) {
+    // Unsupported video format.
+    assert(false);
+    return -1;
+  }
+  converted_frame_->VerifyAndAllocate(buffer_size);
+
+  switch (external_renderer_format_) {
+    case kVideoI420:
+      out_frame = &video_frame;
+      break;
+    case kVideoYV12:
+    case kVideoYUY2:
+    case kVideoUYVY:
+    case kVideoARGB:
+    case kVideoRGB24:
+      {
+        ConvertFromI420(video_frame.Buffer(), video_frame.Width(), type, 0,
+                        video_frame.Width(), video_frame.Height(),
+                        converted_frame_->Buffer());
+      }
+      break;
+    case kVideoIYUV:
+      // no conversion available
+      break;
+    case kVideoRGB565:
+      ConvertI420ToRGB565(video_frame.Buffer(), converted_frame_->Buffer(),
+                          video_frame.Width(), video_frame.Height());
+      break;
+    case kVideoARGB4444:
+      ConvertI420ToARGB4444(video_frame.Buffer(), converted_frame_->Buffer(),
+                            video_frame.Width(), video_frame.Height(), 0);
+      break;
+    case kVideoARGB1555 :
+      ConvertI420ToARGB1555(video_frame.Buffer(), converted_frame_->Buffer(),
+                            video_frame.Width(), video_frame.Height(), 0);
+      break;
+    default:
+      assert(false);
+      out_frame = NULL;
+      break;
+  }
+
+  if (external_renderer_width_ != video_frame.Width() ||
+      external_renderer_height_ != video_frame.Height()) {
+    external_renderer_width_ = video_frame.Width();
+    external_renderer_height_ = video_frame.Height();
+    external_renderer_->FrameSizeChange(external_renderer_width_,
+                                        external_renderer_height_, stream_id);
+  }
+
+  if (out_frame) {
+    external_renderer_->DeliverFrame(out_frame->Buffer(),
+                                     out_frame->Length(),
+                                     video_frame.TimeStamp(),
+                                     video_frame.RenderTimeMs());
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_renderer.h b/trunk/src/video_engine/vie_renderer.h
new file mode 100644
index 0000000..028d72b
--- /dev/null
+++ b/trunk/src/video_engine/vie_renderer.h
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
+
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class VideoRender;
+class VideoRenderCallback;
+class ViERenderManager;
+
+class ViEExternalRendererImpl : public VideoRenderCallback {
+ public:
+  ViEExternalRendererImpl();
+  virtual ~ViEExternalRendererImpl() {}
+
+  int SetViEExternalRenderer(ExternalRenderer* external_renderer,
+                             RawVideoType video_input_format);
+
+  // Implements VideoRenderCallback.
+  virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
+                                    VideoFrame& video_frame);
+
+ private:
+  ExternalRenderer* external_renderer_;
+  RawVideoType external_renderer_format_;
+  WebRtc_UWord32 external_renderer_width_;
+  WebRtc_UWord32 external_renderer_height_;
+  scoped_ptr<VideoFrame> converted_frame_;
+};
+
+class ViERenderer: public ViEFrameCallback {
+ public:
+  static ViERenderer* CreateViERenderer(const WebRtc_Word32 render_id,
+                                        const WebRtc_Word32 engine_id,
+                                        VideoRender& render_module,
+                                        ViERenderManager& render_manager,
+                                        const WebRtc_UWord32 z_order,
+                                        const float left,
+                                        const float top,
+                                        const float right,
+                                        const float bottom);
+  ~ViERenderer(void);
+
+  WebRtc_Word32 StartRender();
+  WebRtc_Word32 StopRender();
+
+  WebRtc_Word32 GetLastRenderedFrame(const WebRtc_Word32 renderID,
+                                     VideoFrame& video_frame);
+
+  WebRtc_Word32 ConfigureRenderer(const unsigned int z_order,
+                                  const float left,
+                                  const float top,
+                                  const float right,
+                                  const float bottom);
+
+  VideoRender& RenderModule();
+
+  WebRtc_Word32 EnableMirroring(const WebRtc_Word32 render_id,
+                                const bool enable,
+                                const bool mirror_xaxis,
+                                const bool mirror_yaxis);
+
+  WebRtc_Word32 SetTimeoutImage(const VideoFrame& timeout_image,
+                                const WebRtc_Word32 timeout_value);
+  WebRtc_Word32 SetRenderStartImage(const VideoFrame& start_image);
+  WebRtc_Word32 SetExternalRenderer(const WebRtc_Word32 render_id,
+                                    RawVideoType video_input_format,
+                                    ExternalRenderer* external_renderer);
+
+ private:
+  ViERenderer(const WebRtc_Word32 render_id, const WebRtc_Word32 engine_id,
+                VideoRender& render_module,
+                ViERenderManager& render_manager);
+
+  WebRtc_Word32 Init(const WebRtc_UWord32 z_order,
+                     const float left,
+                     const float top,
+                     const float right,
+                     const float bottom);
+
+  // Implement ViEFrameCallback
+  virtual void DeliverFrame(int id,
+                            VideoFrame& video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay);
+  virtual int GetPreferedFrameSettings(int& width,
+                                       int& height,
+                                       int& frame_rate);
+  virtual void ProviderDestroyed(int id);
+
+  WebRtc_UWord32 render_id_;
+  WebRtc_Word32 engine_id_;
+  VideoRender& render_module_;
+  ViERenderManager& render_manager_;
+  VideoRenderCallback* render_callback_;
+  ViEExternalRendererImpl* incoming_external_callback_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
diff --git a/trunk/src/video_engine/vie_rtp_rtcp_impl.cc b/trunk/src/video_engine/vie_rtp_rtcp_impl.cc
new file mode 100644
index 0000000..497481c
--- /dev/null
+++ b/trunk/src/video_engine/vie_rtp_rtcp_impl.cc
@@ -0,0 +1,957 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_rtp_rtcp_impl.h"
+
+#include "engine_configurations.h"
+#include "system_wrappers/interface/file_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+// Helper methods for converting between module format and ViE API format.
+
+static RTCPMethod ViERTCPModeToRTCPMethod(ViERTCPMode api_mode) {
+  switch (api_mode) {
+    case kRtcpNone:
+      return kRtcpOff;
+
+    case kRtcpCompound_RFC4585:
+      return kRtcpCompound;
+
+    case kRtcpNonCompound_RFC5506:
+      return kRtcpNonCompound;
+  }
+  assert(false);
+  return kRtcpOff;
+}
+
+static ViERTCPMode RTCPMethodToViERTCPMode(RTCPMethod module_method) {
+  switch (module_method) {
+    case kRtcpOff:
+      return kRtcpNone;
+
+    case kRtcpCompound:
+      return kRtcpCompound_RFC4585;
+
+    case kRtcpNonCompound:
+      return kRtcpNonCompound_RFC5506;
+  }
+  assert(false);
+  return kRtcpNone;
+}
+
+static KeyFrameRequestMethod APIRequestToModuleRequest(
+  ViEKeyFrameRequestMethod api_method) {
+  switch (api_method) {
+    case kViEKeyFrameRequestNone:
+      return kKeyFrameReqFirRtp;
+
+    case kViEKeyFrameRequestPliRtcp:
+      return kKeyFrameReqPliRtcp;
+
+    case kViEKeyFrameRequestFirRtp:
+      return kKeyFrameReqFirRtp;
+
+    case kViEKeyFrameRequestFirRtcp:
+      return kKeyFrameReqFirRtcp;
+  }
+  assert(false);
+  return kKeyFrameReqFirRtp;
+}
+
+ViERTP_RTCP* ViERTP_RTCP::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViERTP_RTCPImpl* vie_rtpimpl = vie_impl;
+  // Increase ref count.
+  (*vie_rtpimpl)++;
+  return vie_rtpimpl;
+#else
+  return NULL;
+#endif
+}
+
+int ViERTP_RTCPImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCP::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViERTP_RTCP release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCP reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViERTP_RTCPImpl::ViERTP_RTCPImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCPImpl::ViERTP_RTCPImpl() Ctor");
+}
+
+ViERTP_RTCPImpl::~ViERTP_RTCPImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCPImpl::~ViERTP_RTCPImpl() Dtor");
+}
+
+int ViERTP_RTCPImpl::SetLocalSSRC(const int video_channel,
+                                  const unsigned int SSRC,
+                                  const StreamType usage,
+                                  const unsigned char simulcast_idx) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSSRC(SSRC, usage, simulcast_idx) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRemoteSSRCType(const int videoChannel,
+                                       const StreamType usage,
+                                       const unsigned int SSRC) const {
+  WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
+               ViEId(shared_data_->instance_id(), videoChannel),
+               "%s(channel: %d, usage:%d SSRC: 0x%x)",
+               __FUNCTION__, usage, videoChannel, SSRC);
+
+  // Get the channel
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
+  if (ptrViEChannel == NULL) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(shared_data_->instance_id(), videoChannel),
+                 "%s: Channel %d doesn't exist",
+                 __FUNCTION__, videoChannel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (ptrViEChannel->SetRemoteSSRCType(usage, SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetLocalSSRC(const int video_channel,
+                                  unsigned int& SSRC) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetLocalSSRC((WebRtc_UWord32&) SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteSSRC(const int video_channel,
+                                   unsigned int& SSRC) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteSSRC(SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteCSRCs(const int video_channel,
+                                    unsigned int CSRCs[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteCSRC(CSRCs) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetStartSequenceNumber(const int video_channel,
+                                            unsigned short sequence_number) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, sequence_number: %u)", __FUNCTION__,
+               video_channel, sequence_number);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already sending.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetStartSequenceNumber(sequence_number) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRTCPStatus(const int video_channel,
+                                   const ViERTCPMode rtcp_mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, mode: %d)", __FUNCTION__, video_channel,
+               rtcp_mode);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  RTCPMethod module_mode = ViERTCPModeToRTCPMethod(rtcp_mode);
+  if (vie_channel->SetRTCPMode(module_mode) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTCPStatus(const int video_channel,
+                                   ViERTCPMode& rtcp_mode) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel, rtcp_mode);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  RTCPMethod module_mode = kRtcpOff;
+  if (vie_channel->GetRTCPMode(module_mode) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: could not get current RTCP mode", __FUNCTION__);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  rtcp_mode = RTCPMethodToViERTCPMode(module_mode);
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRTCPCName(const int video_channel,
+                                  const char rtcp_cname[KMaxRTCPCNameLength]) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, name: %s)", __FUNCTION__, video_channel,
+               rtcp_cname);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already sending.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTCPCName(const int video_channel,
+                                  char rtcp_cname[KMaxRTCPCNameLength]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteRTCPCName(
+    const int video_channel,
+    char rtcp_cname[KMaxRTCPCNameLength]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
+  const int video_channel,
+  const unsigned char sub_type,
+  unsigned int name,
+  const char* data,
+  unsigned short data_length_in_bytes) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, sub_type: %c, name: %d, data: x, length: %u)",
+               __FUNCTION__, video_channel, sub_type, name,
+               data_length_in_bytes);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d not sending", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpNotSending);
+    return -1;
+  }
+  RTCPMethod method;
+  if (vie_channel->GetRTCPMode(method) != 0 || method == kRtcpOff) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: RTCP disabled on channel %d.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpRtcpDisabled);
+    return -1;
+  }
+  if (vie_channel->SendApplicationDefinedRTCPPacket(
+        sub_type, name, reinterpret_cast<const WebRtc_UWord8*>(data),
+        data_length_in_bytes) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetNACKStatus(enable) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+
+  // Update the encoder
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetFECStatus(const int video_channel, const bool enable,
+                                  const unsigned char payload_typeRED,
+                                  const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, payload_typeRED: %u, "
+               "payloadTypeFEC: %u)",
+               __FUNCTION__, video_channel, enable, payload_typeRED,
+               payload_typeFEC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetFECStatus(enable, payload_typeRED,
+                                payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  // Update the encoder.
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
+    const int video_channel,
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, payload_typeRED: %u, "
+               "payloadTypeFEC: %u)",
+               __FUNCTION__, video_channel, enable, payload_typeRED,
+               payload_typeFEC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  // Update the channel status with hybrid NACK FEC mode.
+  if (vie_channel->SetHybridNACKFECStatus(enable, payload_typeRED,
+                                          payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+
+  // Update the encoder.
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetKeyFrameRequestMethod(
+  const int video_channel,
+  const ViEKeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, method: %d)", __FUNCTION__, video_channel,
+               method);
+
+  // Get the channel.
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  KeyFrameRequestMethod module_method = APIRequestToModuleRequest(method);
+  if (vie_channel->SetKeyFrameRequestMethod(module_method) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetTMMBRStatus(const int video_channel,
+                                    const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableTMMBR(enable) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRembStatus(int video_channel, bool sender,
+                                   bool receiver) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "ViERTP_RTCPImpl::SetRembStatus(%d, %d, %d)", video_channel,
+               sender, receiver);
+  if (!shared_data_->channel_manager()->SetRembStatus(video_channel, sender,
+                                                      receiver)) {
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetReceivedRTCPStatistics(const int video_channel,
+                                               unsigned short& fraction_lost,
+                                               unsigned int& cumulative_lost,
+                                               unsigned int& extended_max,
+                                               unsigned int& jitter,
+                                               int& rtt_ms) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetReceivedRtcpStatistics(
+      static_cast<WebRtc_UWord16&>(fraction_lost),
+      static_cast<WebRtc_UWord32&>(cumulative_lost),
+      static_cast<WebRtc_UWord32&>(extended_max),
+      static_cast<WebRtc_UWord32&>(jitter),
+      static_cast<WebRtc_Word32&>(rtt_ms)) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetSentRTCPStatistics(const int video_channel,
+                                           unsigned short& fraction_lost,
+                                           unsigned int& cumulative_lost,
+                                           unsigned int& extended_max,
+                                           unsigned int& jitter,
+                                           int& rtt_ms) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->GetSendRtcpStatistics(
+      static_cast<WebRtc_UWord16&>(fraction_lost),
+      static_cast<WebRtc_UWord32&>(cumulative_lost),
+      static_cast<WebRtc_UWord32&>(extended_max),
+      static_cast<WebRtc_UWord32&>(jitter),
+      static_cast<WebRtc_Word32&>(rtt_ms)) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTPStatistics(const int video_channel,
+                                      unsigned int& bytes_sent,
+                                      unsigned int& packets_sent,
+                                      unsigned int& bytes_received,
+                                      unsigned int& packets_received) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRtpStatistics(
+      static_cast<WebRtc_UWord32&>(bytes_sent),
+      static_cast<WebRtc_UWord32&>(packets_sent),
+      static_cast<WebRtc_UWord32&>(bytes_received),
+      static_cast<WebRtc_UWord32&>(packets_received)) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetBandwidthUsage(const int video_channel,
+                                       unsigned int& total_bitrate_sent,
+                                       unsigned int& video_bitrate_sent,
+                                       unsigned int& fec_bitrate_sent,
+                                       unsigned int& nackBitrateSent) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  vie_channel->GetBandwidthUsage(
+      static_cast<WebRtc_UWord32&>(total_bitrate_sent),
+      static_cast<WebRtc_UWord32&>(video_bitrate_sent),
+      static_cast<WebRtc_UWord32&>(fec_bitrate_sent),
+      static_cast<WebRtc_UWord32&>(nackBitrateSent));
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetEstimatedSendBandwidth(
+    const int video_channel,
+    unsigned int* estimated_bandwidth) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->EstimatedSendBandwidth(
+      static_cast<WebRtc_UWord32*>(estimated_bandwidth));
+}
+
+int ViERTP_RTCPImpl::GetEstimatedReceiveBandwidth(
+    const int video_channel,
+    unsigned int* estimated_bandwidth) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->GetEstimatedReceiveBandwidth(
+      static_cast<WebRtc_UWord32*>(estimated_bandwidth));
+}
+
+int ViERTP_RTCPImpl::SetRTPKeepAliveStatus(
+    const int video_channel,
+    bool enable,
+    const char unknown_payload_type,
+    const unsigned int delta_transmit_time_seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, unknown_payload_type: %d, "
+               "deltaTransmitTimeMS: %ul)",
+               __FUNCTION__, video_channel, enable,
+               static_cast<int>(unknown_payload_type),
+               delta_transmit_time_seconds);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  WebRtc_UWord16 delta_transmit_time_ms = 1000 * delta_transmit_time_seconds;
+  if (vie_channel->SetKeepAliveStatus(enable, unknown_payload_type,
+                                      delta_transmit_time_ms) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTPKeepAliveStatus(
+    const int video_channel,
+    bool& enabled,
+    char& unknown_payload_type,
+    unsigned int& delta_transmit_time_seconds) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  WebRtc_UWord16 delta_time_ms = 0;
+  int ret_val = vie_channel->GetKeepAliveStatus(enabled, unknown_payload_type,
+                                                delta_time_ms);
+  delta_transmit_time_seconds = delta_time_ms / 1000;
+  if (ret_val != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+  }
+  return ret_val;
+}
+
+int ViERTP_RTCPImpl::StartRTPDump(const int video_channel,
+                                  const char file_nameUTF8[1024],
+                                  RTPDirections direction) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, file_name: %s, direction: %d)", __FUNCTION__,
+               video_channel, file_nameUTF8, direction);
+  assert(FileWrapper::kMaxFileNameSize == 1024);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StartRTPDump(file_nameUTF8, direction) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::StopRTPDump(const int video_channel,
+                                 RTPDirections direction) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, direction: %d)", __FUNCTION__, video_channel,
+               direction);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StopRTPDump(direction) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::RegisterRTPObserver(const int video_channel,
+                                         ViERTPObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtpObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::DeregisterRTPObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtpObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::RegisterRTCPObserver(const int video_channel,
+                                          ViERTCPObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtcpObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::DeregisterRTCPObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtcpObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_rtp_rtcp_impl.h b/trunk/src/video_engine/vie_rtp_rtcp_impl.h
new file mode 100644
index 0000000..72e8131
--- /dev/null
+++ b/trunk/src/video_engine/vie_rtp_rtcp_impl.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "typedefs.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViERTP_RTCPImpl
+    : public ViERTP_RTCP,
+      public ViERefCount {
+ public:
+  // Implements ViERTP_RTCP.
+  virtual int Release();
+  virtual int SetLocalSSRC(const int video_channel,
+                           const unsigned int SSRC,
+                           const StreamType usage,
+                           const unsigned char simulcast_idx);
+  virtual int GetLocalSSRC(const int video_channel, unsigned int& SSRC) const;
+  virtual int SetRemoteSSRCType(const int video_channel,
+                                const StreamType usage,
+                                const unsigned int SSRC) const;
+  virtual int GetRemoteSSRC(const int video_channel, unsigned int& SSRC) const;
+  virtual int GetRemoteCSRCs(const int video_channel,
+                             unsigned int CSRCs[kRtpCsrcSize]) const;
+  virtual int SetStartSequenceNumber(const int video_channel,
+                                     unsigned short sequence_number);
+  virtual int SetRTCPStatus(const int video_channel,
+                            const ViERTCPMode rtcp_mode);
+  virtual int GetRTCPStatus(const int video_channel,
+                            ViERTCPMode& rtcp_mode) const;
+  virtual int SetRTCPCName(const int video_channel,
+                           const char rtcp_cname[KMaxRTCPCNameLength]);
+  virtual int GetRTCPCName(const int video_channel,
+                           char rtcp_cname[KMaxRTCPCNameLength]) const;
+  virtual int GetRemoteRTCPCName(const int video_channel,
+                                 char rtcp_cname[KMaxRTCPCNameLength]) const;
+  virtual int SendApplicationDefinedRTCPPacket(
+      const int video_channel,
+      const unsigned char sub_type,
+      unsigned int name,
+      const char* data,
+      unsigned short data_length_in_bytes);
+  virtual int SetNACKStatus(const int video_channel, const bool enable);
+  virtual int SetFECStatus(const int video_channel, const bool enable,
+                           const unsigned char payload_typeRED,
+                           const unsigned char payload_typeFEC);
+  virtual int SetHybridNACKFECStatus(const int video_channel, const bool enable,
+                                     const unsigned char payload_typeRED,
+                                     const unsigned char payload_typeFEC);
+  virtual int SetKeyFrameRequestMethod(const int video_channel,
+                                       const ViEKeyFrameRequestMethod method);
+  virtual int SetTMMBRStatus(const int video_channel, const bool enable);
+  virtual int SetRembStatus(int video_channel, bool sender, bool receiver);
+  virtual int GetReceivedRTCPStatistics(const int video_channel,
+                                        unsigned short& fraction_lost,
+                                        unsigned int& cumulative_lost,
+                                        unsigned int& extended_max,
+                                        unsigned int& jitter,
+                                        int& rtt_ms) const;
+  virtual int GetSentRTCPStatistics(const int video_channel,
+                                    unsigned short& fraction_lost,
+                                    unsigned int& cumulative_lost,
+                                    unsigned int& extended_max,
+                                    unsigned int& jitter, int& rtt_ms) const;
+  virtual int GetRTPStatistics(const int video_channel,
+                               unsigned int& bytes_sent,
+                               unsigned int& packets_sent,
+                               unsigned int& bytes_received,
+                               unsigned int& packets_received) const;
+  virtual int GetBandwidthUsage(const int video_channel,
+                                unsigned int& total_bitrate_sent,
+                                unsigned int& video_bitrate_sent,
+                                unsigned int& fec_bitrate_sent,
+                                unsigned int& nackBitrateSent) const;
+  virtual int GetEstimatedSendBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const;
+  virtual int GetEstimatedReceiveBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const;
+  virtual int SetRTPKeepAliveStatus(
+      const int video_channel,
+      bool enable,
+      const char unknown_payload_type,
+      const unsigned int delta_transmit_time_seconds);
+  virtual int GetRTPKeepAliveStatus(
+      const int video_channel,
+      bool& enabled,
+      char& unkown_payload_type,
+      unsigned int& delta_transmit_time_seconds) const;
+  virtual int StartRTPDump(const int video_channel,
+                           const char file_nameUTF8[1024],
+                           RTPDirections direction);
+  virtual int StopRTPDump(const int video_channel, RTPDirections direction);
+  virtual int RegisterRTPObserver(const int video_channel,
+                                  ViERTPObserver& observer);
+  virtual int DeregisterRTPObserver(const int video_channel);
+  virtual int RegisterRTCPObserver(const int video_channel,
+                                   ViERTCPObserver& observer);
+  virtual int DeregisterRTCPObserver(const int video_channel);
+
+ protected:
+  ViERTP_RTCPImpl(ViESharedData* shared_data);
+  virtual ~ViERTP_RTCPImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
diff --git a/trunk/src/video_engine/vie_sender.cc b/trunk/src/video_engine/vie_sender.cc
new file mode 100644
index 0000000..e0fe29b
--- /dev/null
+++ b/trunk/src/video_engine/vie_sender.cc
@@ -0,0 +1,205 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "critical_section_wrapper.h"
+#include "rtp_dump.h"
+#include "vie_sender.h"
+#include "trace.h"
+
+namespace webrtc {
+
+ViESender::ViESender(int engine_id, int channel_id)
+    : engine_id_(engine_id),
+      channel_id_(channel_id),
+      critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      external_encryption_(NULL),
+      encryption_buffer_(NULL),
+      transport_(NULL),
+      rtp_dump_(NULL) {
+}
+
+ViESender::~ViESender() {
+  if (encryption_buffer_) {
+    delete[] encryption_buffer_;
+    encryption_buffer_ = NULL;
+  }
+
+  if (rtp_dump_) {
+    rtp_dump_->Stop();
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  }
+}
+
+int ViESender::RegisterExternalEncryption(Encryption* encryption) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (external_encryption_) {
+    return -1;
+  }
+  encryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
+  if (encryption_buffer_ == NULL) {
+    return -1;
+  }
+  external_encryption_ = encryption;
+  return 0;
+}
+
+int ViESender::DeregisterExternalEncryption() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (external_encryption_ == NULL) {
+    return -1;
+  }
+  if (encryption_buffer_) {
+    delete encryption_buffer_;
+    encryption_buffer_ = NULL;
+  }
+  external_encryption_ = NULL;
+  return 0;
+}
+
+int ViESender::RegisterSendTransport(Transport* transport) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (transport_) {
+    return -1;
+  }
+  transport_ = transport;
+  return 0;
+}
+
+int ViESender::DeregisterSendTransport() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (transport_ == NULL) {
+    return -1;
+  }
+  transport_ = NULL;
+  return 0;
+}
+
+int ViESender::StartRTPDump(const char file_nameUTF8[1024]) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (rtp_dump_) {
+    // Packet dump is already started, restart it.
+    rtp_dump_->Stop();
+  } else {
+    rtp_dump_ = RtpDump::CreateRtpDump();
+    if (rtp_dump_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "StartSRTPDump: Failed to create RTP dump");
+      return -1;
+    }
+  }
+  if (rtp_dump_->Start(file_nameUTF8) != 0) {
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "StartRTPDump: Failed to start RTP dump");
+    return -1;
+  }
+  return 0;
+}
+
+int ViESender::StopRTPDump() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (rtp_dump_) {
+    if (rtp_dump_->IsActive()) {
+      rtp_dump_->Stop();
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "StopRTPDump: Dump not active");
+    }
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "StopRTPDump: RTP dump not started");
+    return -1;
+  }
+  return 0;
+}
+
+int ViESender::SendPacket(int vie_id, const void* data, int len) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (!transport_) {
+    // No transport
+    return -1;
+  }
+
+  assert(ChannelId(vie_id) == channel_id_);
+
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  void* tmp_ptr = const_cast<void*>(data);
+  unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
+  int send_packet_length = len;
+
+  if (rtp_dump_) {
+    rtp_dump_->DumpPacket(send_packet, send_packet_length);
+  }
+
+  if (external_encryption_) {
+    external_encryption_->encrypt(channel_id_, send_packet,
+                                  encryption_buffer_, send_packet_length,
+                                  static_cast<int*>(&send_packet_length));
+    send_packet = encryption_buffer_;
+  }
+
+  const int bytes_sent = transport_->SendPacket(channel_id_, send_packet,
+                                                send_packet_length);
+  if (bytes_sent != send_packet_length) {
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViESender::SendPacket - Transport failed to send RTP packet");
+  }
+  return bytes_sent;
+}
+
+int ViESender::SendRTCPPacket(int vie_id, const void* data, int len) {
+  CriticalSectionScoped cs(critsect_.get());
+
+  if (!transport_) {
+    return -1;
+  }
+
+  assert(ChannelId(vie_id) == channel_id_);
+
+  // Prepare for possible encryption and sending.
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  void* tmp_ptr = const_cast<void*>(data);
+  unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
+  int send_packet_length = len;
+
+  if (rtp_dump_) {
+    rtp_dump_->DumpPacket(send_packet, send_packet_length);
+  }
+
+  if (external_encryption_) {
+    external_encryption_->encrypt_rtcp(
+        channel_id_, send_packet, encryption_buffer_, send_packet_length,
+        static_cast<int*>(&send_packet_length));
+    send_packet = encryption_buffer_;
+  }
+
+  const int bytes_sent = transport_->SendRTCPPacket(channel_id_, send_packet,
+                                                    send_packet_length);
+  if (bytes_sent != send_packet_length) {
+    WEBRTC_TRACE(
+        webrtc::kTraceWarning, webrtc::kTraceVideo,
+        ViEId(engine_id_, channel_id_),
+        "ViESender::SendRTCPPacket - Transport failed to send RTCP packet");
+  }
+  return bytes_sent;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_sender.h b/trunk/src/video_engine/vie_sender.h
new file mode 100644
index 0000000..a60446d
--- /dev/null
+++ b/trunk/src/video_engine/vie_sender.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESender is responsible for encrypting, if enabled, packets and send to
+// network.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+#include "vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpDump;
+class Transport;
+class VideoCodingModule;
+
+class ViESender: public Transport {
+ public:
+  ViESender(int engine_id, int channel_id);
+  ~ViESender();
+
+  // Registers an encryption class to use before sending packets.
+  int RegisterExternalEncryption(Encryption* encryption);
+  int DeregisterExternalEncryption();
+
+  // Registers transport to use for sending RTP and RTCP.
+  int RegisterSendTransport(Transport* transport);
+  int DeregisterSendTransport();
+
+  // Stores all incoming packets to file.
+  int StartRTPDump(const char file_nameUTF8[1024]);
+  int StopRTPDump();
+
+  // Implements Transport.
+  virtual int SendPacket(int vie_id, const void* data, int len);
+  virtual int SendRTCPPacket(int vie_id, const void* data, int len);
+
+ private:
+  int engine_id_;
+  int channel_id_;
+
+  scoped_ptr<CriticalSectionWrapper> critsect_;
+
+  Encryption* external_encryption_;
+  WebRtc_UWord8* encryption_buffer_;
+  Transport* transport_;
+  RtpDump* rtp_dump_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
diff --git a/trunk/src/video_engine/vie_shared_data.cc b/trunk/src/video_engine/vie_shared_data.cc
new file mode 100644
index 0000000..915bcfd
--- /dev/null
+++ b/trunk/src/video_engine/vie_shared_data.cc
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "cpu_info.h"
+#include "process_thread.h"
+#include "trace.h"
+#include "vie_channel_manager.h"
+#include "vie_defines.h"
+#include "vie_input_manager.h"
+#include "vie_render_manager.h"
+#include "vie_shared_data.h"
+
+namespace webrtc {
+
+// Active instance counter
+int ViESharedData::instance_counter_ = 0;
+
+ViESharedData::ViESharedData()
+    : instance_id_(++instance_counter_),
+      initialized_(false),
+      number_cores_(CpuInfo::DetectNumberOfCores()),
+      vie_performance_monitor_(ViEPerformanceMonitor(instance_id_)),
+      channel_manager_(*new ViEChannelManager(instance_id_, number_cores_,
+                                              vie_performance_monitor_)),
+      input_manager_(*new ViEInputManager(instance_id_)),
+      render_manager_(*new ViERenderManager(instance_id_)),
+      module_process_thread_(ProcessThread::CreateProcessThread()),
+      last_error_(0) {
+  Trace::CreateTrace();
+  channel_manager_.SetModuleProcessThread(*module_process_thread_);
+  input_manager_.SetModuleProcessThread(*module_process_thread_);
+  module_process_thread_->Start();
+}
+
+ViESharedData::~ViESharedData() {
+  delete &input_manager_;
+  delete &channel_manager_;
+  delete &render_manager_;
+
+  module_process_thread_->Stop();
+  ProcessThread::DestroyProcessThread(module_process_thread_);
+  Trace::ReturnTrace();
+}
+
+bool ViESharedData::Initialized() const {
+  return initialized_;
+}
+
+int ViESharedData::SetInitialized() {
+  initialized_ = true;
+  return 0;
+}
+
+int ViESharedData::SetUnInitialized() {
+  initialized_ = false;
+  return 0;
+}
+
+void ViESharedData::SetLastError(const int error) const {
+  last_error_ = error;
+}
+
+int ViESharedData::LastErrorInternal() const {
+  int error = last_error_;
+  last_error_ = 0;
+  return error;
+}
+
+int ViESharedData::NumberOfCores() const {
+  return number_cores_;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_shared_data.h b/trunk/src/video_engine/vie_shared_data.h
new file mode 100644
index 0000000..7f755fa
--- /dev/null
+++ b/trunk/src/video_engine/vie_shared_data.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESharedData contains data and instances common to all interface
+// implementations.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
+
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_performance_monitor.h"
+
+namespace webrtc {
+
+class ProcessThread;
+class ViEChannelManager;
+class ViEInputManager;
+class ViERenderManager;
+
+class ViESharedData {
+ public:
+  ViESharedData();
+  ~ViESharedData();
+
+  bool Initialized() const;
+  int SetInitialized();
+  int SetUnInitialized();
+  void SetLastError(const int error) const;
+  int LastErrorInternal() const;
+
+  int NumberOfCores() const;
+
+  int instance_id() { return instance_id_;}
+  ViEPerformanceMonitor* vie_performance_monitor() {
+    return &vie_performance_monitor_; }
+  ViEChannelManager* channel_manager() { return &channel_manager_; }
+  ViEInputManager* input_manager() { return &input_manager_; }
+  ViERenderManager* render_manager() { return &render_manager_; }
+
+ private:
+  static int instance_counter_;
+  const int instance_id_;
+  bool initialized_;
+  const int number_cores_;
+
+  ViEPerformanceMonitor vie_performance_monitor_;
+  ViEChannelManager& channel_manager_;
+  ViEInputManager& input_manager_;
+  ViERenderManager& render_manager_;
+  ProcessThread* module_process_thread_;
+  mutable int last_error_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
diff --git a/trunk/src/video_engine/vie_sync_module.cc b/trunk/src/video_engine/vie_sync_module.cc
new file mode 100644
index 0000000..d9b60ac
--- /dev/null
+++ b/trunk/src/video_engine/vie_sync_module.cc
@@ -0,0 +1,297 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_sync_module.h"
+
+#include "critical_section_wrapper.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "video_coding.h"
+#include "voe_video_sync.h"
+
+namespace webrtc {
+
+enum { kSyncInterval = 1000};
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+ViESyncModule::ViESyncModule(int id, VideoCodingModule& vcm,
+                             RtpRtcp& rtcp_module)
+    : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      id_(id),
+      vcm_(vcm),
+      rtcp_module_(rtcp_module),
+      voe_channel_id_(-1),
+      voe_sync_interface_(NULL),
+      last_sync_time_(TickTime::Now()) {
+}
+
+ViESyncModule::~ViESyncModule() {
+}
+
+int ViESyncModule::SetVoiceChannel(int voe_channel_id,
+                                   VoEVideoSync* voe_sync_interface) {
+  CriticalSectionScoped cs(data_cs_.get());
+  voe_channel_id_ = voe_channel_id;
+  voe_sync_interface_ = voe_sync_interface;
+  rtcp_module_.DeRegisterSyncModule();
+
+  if (!voe_sync_interface) {
+    voe_channel_id_ = -1;
+    if (voe_channel_id >= 0) {
+      // Trying to set a voice channel but no interface exist.
+      return -1;
+    }
+    return 0;
+  }
+  RtpRtcp* voe_rtp_rtcp = NULL;
+  voe_sync_interface->GetRtpRtcp(voe_channel_id_, voe_rtp_rtcp);
+  return rtcp_module_.RegisterSyncModule(voe_rtp_rtcp);
+}
+
+int ViESyncModule::VoiceChannel() {
+  return voe_channel_id_;
+}
+
+void ViESyncModule::SetNetworkDelay(int network_delay) {
+  channel_delay_.network_delay = network_delay;
+}
+
+WebRtc_Word32 ViESyncModule::Version(WebRtc_Word8* version,
+                                     WebRtc_UWord32& remaining_buffer_in_bytes,
+                                     WebRtc_UWord32& position) const {
+  if (version == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, -1,
+                 "Invalid in argument to ViESyncModule Version()");
+    return -1;
+  }
+  WebRtc_Word8 our_version[] = "ViESyncModule 1.1.0";
+  WebRtc_UWord32 our_length = (WebRtc_UWord32) strlen(our_version);
+  if (remaining_buffer_in_bytes < our_length + 1) {
+    return -1;
+  }
+  memcpy(version, our_version, our_length);
+  version[our_length] = '\0';
+  remaining_buffer_in_bytes -= (our_length + 1);
+  position += (our_length + 1);
+  return 0;
+}
+
+WebRtc_Word32 ViESyncModule::ChangeUniqueId(const WebRtc_Word32 id) {
+  id_ = id;
+  return 0;
+}
+
+WebRtc_Word32 ViESyncModule::TimeUntilNextProcess() {
+  return (WebRtc_Word32)(kSyncInterval -
+                         (TickTime::Now() - last_sync_time_).Milliseconds());
+}
+
+WebRtc_Word32 ViESyncModule::Process() {
+  CriticalSectionScoped cs(data_cs_.get());
+  last_sync_time_ = TickTime::Now();
+
+  int total_video_delay_target_ms = vcm_.Delay();
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+               "Video delay (JB + decoder) is %d ms",
+               total_video_delay_target_ms);
+
+  if (voe_channel_id_ == -1) {
+    return 0;
+  }
+
+  int current_audio_delay_ms = 0;
+  if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
+                                            current_audio_delay_ms) != 0) {
+    // Could not get VoE delay value, probably not a valid channel Id.
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_,
+                 "%s: VE_GetDelayEstimate error for voice_channel %d",
+                 __FUNCTION__, total_video_delay_target_ms, voe_channel_id_);
+    return 0;
+  }
+
+  int current_diff_ms = 0;
+  // Total video delay.
+  int video_delay_ms = 0;
+  // VoiceEngine report delay estimates even when not started, ignore if the
+  // reported value is lower than 40 ms.
+  if (current_audio_delay_ms < 40) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+                 "A/V Sync: Audio delay < 40, skipping.");
+    return 0;
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+               "Audio delay is: %d for voice channel: %d",
+               current_audio_delay_ms, voe_channel_id_);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+               "Network delay diff is: %d for voice channel: %d",
+               channel_delay_.network_delay, voe_channel_id_);
+  // Calculate the difference between the lowest possible video delay and
+  // the current audio delay.
+  current_diff_ms = total_video_delay_target_ms - current_audio_delay_ms +
+      channel_delay_.network_delay;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+               "Current diff is: %d for audio channel: %d",
+               current_diff_ms, voe_channel_id_);
+
+  if (current_diff_ms > 0) {
+    // The minimum video delay is longer than the current audio delay.
+    // We need to decrease extra video delay, if we have added extra delay
+    // earlier, or add extra audio delay.
+    if (channel_delay_.extra_video_delay_ms > 0) {
+      // We have extra delay added to ViE. Reduce this delay before adding
+      // extra delay to VoE.
+
+      // This is the desired delay, we can't reduce more than this.
+      video_delay_ms = total_video_delay_target_ms;
+
+      // Check that we don't reduce the delay more than what is allowed.
+      if (video_delay_ms <
+          channel_delay_.last_video_delay_ms - kMaxVideoDiffMs) {
+        video_delay_ms =
+            channel_delay_.last_video_delay_ms - kMaxVideoDiffMs;
+        channel_delay_.extra_video_delay_ms =
+            video_delay_ms - total_video_delay_target_ms;
+      } else {
+        channel_delay_.extra_video_delay_ms = 0;
+      }
+      channel_delay_.last_video_delay_ms = video_delay_ms;
+      channel_delay_.last_sync_delay = -1;
+      channel_delay_.extra_audio_delay_ms = 0;
+    } else {  // channel_delay_.extra_video_delay_ms > 0
+      // We have no extra video delay to remove, increase the audio delay.
+      if (channel_delay_.last_sync_delay >= 0) {
+        // We have increased the audio delay earlier, increase it even more.
+        int audio_diff_ms = current_diff_ms / 2;
+        if (audio_diff_ms > kMaxAudioDiffMs) {
+          // We only allow a maximum change of KMaxAudioDiffMS for audio
+          // due to NetEQ maximum changes.
+          audio_diff_ms = kMaxAudioDiffMs;
+        }
+        // Increase the audio delay
+        channel_delay_.extra_audio_delay_ms += audio_diff_ms;
+
+        // Don't set a too high delay.
+        if (channel_delay_.extra_audio_delay_ms > kMaxDelay) {
+          channel_delay_.extra_audio_delay_ms = kMaxDelay;
+        }
+
+        // Don't add any extra video delay.
+        video_delay_ms = total_video_delay_target_ms;
+        channel_delay_.extra_video_delay_ms = 0;
+        channel_delay_.last_video_delay_ms = video_delay_ms;
+        channel_delay_.last_sync_delay = 1;
+      } else {  // channel_delay_.last_sync_delay >= 0
+        // First time after a delay change, don't add any extra delay.
+        // This is to not toggle back and forth too much.
+        channel_delay_.extra_audio_delay_ms = 0;
+        // Set minimum video delay
+        video_delay_ms = total_video_delay_target_ms;
+        channel_delay_.extra_video_delay_ms = 0;
+        channel_delay_.last_video_delay_ms = video_delay_ms;
+        channel_delay_.last_sync_delay = 0;
+      }
+    }
+  } else {  // if (current_diffMS > 0)
+    // The minimum video delay is lower than the current audio delay.
+    // We need to decrease possible extra audio delay, or
+    // add extra video delay.
+
+    if (channel_delay_.extra_audio_delay_ms > 0) {
+      // We have extra delay in VoiceEngine
+      // Start with decreasing the voice delay
+      int audio_diff_ms = current_diff_ms / 2;
+      if (audio_diff_ms < -1 * kMaxAudioDiffMs) {
+        // Don't change the delay too much at once.
+        audio_diff_ms = -1 * kMaxAudioDiffMs;
+      }
+      // Add the negative difference.
+      channel_delay_.extra_audio_delay_ms += audio_diff_ms;
+
+      if (channel_delay_.extra_audio_delay_ms < 0) {
+        // Negative values not allowed.
+        channel_delay_.extra_audio_delay_ms = 0;
+        channel_delay_.last_sync_delay = 0;
+      } else {
+        // There is more audio delay to use for the next round.
+        channel_delay_.last_sync_delay = 1;
+      }
+
+      // Keep the video delay at the minimum values.
+      video_delay_ms = total_video_delay_target_ms;
+      channel_delay_.extra_video_delay_ms = 0;
+      channel_delay_.last_video_delay_ms = video_delay_ms;
+    } else {  // channel_delay_.extra_audio_delay_ms > 0
+      // We have no extra delay in VoiceEngine, increase the video delay.
+      channel_delay_.extra_audio_delay_ms = 0;
+
+      // Make the difference positive.
+      int video_diff_ms = -1 * current_diff_ms;
+
+      // This is the desired delay.
+      video_delay_ms = total_video_delay_target_ms + video_diff_ms;
+      if (video_delay_ms > channel_delay_.last_video_delay_ms) {
+        if (video_delay_ms >
+            channel_delay_.last_video_delay_ms + kMaxVideoDiffMs) {
+          // Don't increase the delay too much at once
+          video_delay_ms =
+              channel_delay_.last_video_delay_ms + kMaxVideoDiffMs;
+        }
+        // Verify we don't go above the maximum allowed delay
+        if (video_delay_ms > kMaxDelay) {
+          video_delay_ms = kMaxDelay;
+        }
+      } else {
+        if (video_delay_ms <
+            channel_delay_.last_video_delay_ms - kMaxVideoDiffMs) {
+          // Don't decrease the delay too much at once
+          video_delay_ms =
+              channel_delay_.last_video_delay_ms - kMaxVideoDiffMs;
+        }
+        // Verify we don't go below the minimum delay
+        if (video_delay_ms < total_video_delay_target_ms) {
+          video_delay_ms = total_video_delay_target_ms;
+        }
+      }
+      // Store the values
+      channel_delay_.extra_video_delay_ms =
+          video_delay_ms - total_video_delay_target_ms;
+      channel_delay_.last_video_delay_ms = video_delay_ms;
+      channel_delay_.last_sync_delay = -1;
+    }
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+      "Sync video delay %d ms for video channel and audio delay %d for audio "
+      "channel %d",
+      video_delay_ms, channel_delay_.extra_audio_delay_ms, voe_channel_id_);
+
+  // Set the extra audio delay.synchronization
+  if (voe_sync_interface_->SetMinimumPlayoutDelay(
+      voe_channel_id_, channel_delay_.extra_audio_delay_ms) == -1) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, id_,
+                 "Error setting voice delay");
+  }
+
+  if (video_delay_ms < 0) {
+    video_delay_ms = 0;
+  }
+  total_video_delay_target_ms =
+      (total_video_delay_target_ms  >  video_delay_ms) ?
+      total_video_delay_target_ms : video_delay_ms;
+  vcm_.SetMinimumPlayoutDelay(total_video_delay_target_ms);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, id_,
+               "New Video delay target is: %d", total_video_delay_target_ms);
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/video_engine/vie_sync_module.h b/trunk/src/video_engine/vie_sync_module.h
new file mode 100644
index 0000000..01373a9
--- /dev/null
+++ b/trunk/src/video_engine/vie_sync_module.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESyncModule is responsible for synchronization audio and video for a given
+// VoE and ViE channel couple.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+
+#include "module.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "tick_util.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+class VideoCodingModule;
+class VoEVideoSync;
+
+class ViESyncModule : public Module {
+ public:
+  ViESyncModule(int id, VideoCodingModule& vcm, RtpRtcp& rtcp_module);
+  ~ViESyncModule();
+
+  int SetVoiceChannel(int voe_channel_id, VoEVideoSync* voe_sync_interface);
+  int VoiceChannel();
+
+  // Set how long time, in ms, voice is ahead of video when received on the
+  // network. Positive value means audio is ahead of video.
+  void SetNetworkDelay(int network_delay);
+
+  // Implements Module.
+  virtual WebRtc_Word32 Version(WebRtc_Word8* version,
+                                WebRtc_UWord32& remaining_buffer_in_bytes,
+                                WebRtc_UWord32& position) const;
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+  virtual WebRtc_Word32 TimeUntilNextProcess();
+  virtual WebRtc_Word32 Process();
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> data_cs_;
+  int id_;
+  VideoCodingModule& vcm_;
+  RtpRtcp& rtcp_module_;
+  int voe_channel_id_;
+  VoEVideoSync* voe_sync_interface_;
+  TickTime last_sync_time_;
+
+  struct ViESyncDelay {
+    ViESyncDelay() {
+      extra_video_delay_ms = 0;
+      last_video_delay_ms = 0;
+      extra_audio_delay_ms = 0;
+      last_sync_delay = 0;
+      network_delay = 120;
+    }
+    int extra_video_delay_ms;
+    int last_video_delay_ms;
+    int extra_audio_delay_ms;
+    int last_sync_delay;
+    int network_delay;
+  };
+  ViESyncDelay channel_delay_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
diff --git a/trunk/src/voice_engine/OWNERS b/trunk/src/voice_engine/OWNERS
new file mode 100644
index 0000000..a07ced3
--- /dev/null
+++ b/trunk/src/voice_engine/OWNERS
@@ -0,0 +1,4 @@
+henrikg@webrtc.org
+henrika@webrtc.org
+niklas.enbom@webrtc.org
+xians@webrtc.org
diff --git a/trunk/src/voice_engine/main/interface/mock/mock_voe_connection_observer.h b/trunk/src/voice_engine/main/interface/mock/mock_voe_connection_observer.h
new file mode 100644
index 0000000..9eb9271
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/mock/mock_voe_connection_observer.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MOCK_VOE_CONNECTION_OBSERVER_H_
+#define MOCK_VOE_CONNECTION_OBSERVER_H_
+
+#include "voe_network.h"
+
+namespace webrtc {
+
+class MockVoeConnectionObserver : public VoEConnectionObserver {
+ public:
+  MOCK_METHOD2(OnPeriodicDeadOrAlive, void(const int channel,
+                                           const bool alive));
+};
+
+}
+
+#endif  // MOCK_VOE_CONNECTION_OBSERVER_H_
diff --git a/trunk/src/voice_engine/main/interface/mock/mock_voe_observer.h b/trunk/src/voice_engine/main/interface/mock/mock_voe_observer.h
new file mode 100644
index 0000000..70d1c13
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/mock/mock_voe_observer.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
+#define WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
+
+#include "voe_base.h"
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockVoEObserver: public VoiceEngineObserver {
+ public:
+  MockVoEObserver() {}
+  virtual ~MockVoEObserver() {}
+
+  MOCK_METHOD2(CallbackOnError, void(const int channel, const int error_code));
+};
+
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
diff --git a/trunk/src/voice_engine/main/interface/voe_audio_processing.h b/trunk/src/voice_engine/main/interface/voe_audio_processing.h
new file mode 100644
index 0000000..541807b
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_audio_processing.h
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Noise Suppression (NS).
+//  - Automatic Gain Control (AGC).
+//  - Echo Control (EC).
+//  - Receiving side VAD, NS and AGC.
+//  - Measurements of instantaneous speech, noise and echo levels.
+//  - Generation of AP debug recordings.
+//  - Detection of keyboard typing which can disrupt a voice conversation.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface();
+//  VoEAudioProcessing* ap = VoEAudioProcessing::GetInterface(voe);
+//  base->Init();
+//  ap->SetEcStatus(true, kAgcAdaptiveAnalog);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  ap->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
+#define WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoERxVadCallback
+class WEBRTC_DLLEXPORT VoERxVadCallback
+{
+public:
+    virtual void OnRxVad(int channel, int vadDecision) = 0;
+
+protected:
+    virtual ~VoERxVadCallback() {}
+};
+
+// VoEAudioProcessing
+class WEBRTC_DLLEXPORT VoEAudioProcessing
+{
+public:
+    // Factory for the VoEAudioProcessing sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEAudioProcessing* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEAudioProcessing sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sets Noise Suppression (NS) status and mode.
+    // The NS reduces noise in the microphone signal.
+    virtual int SetNsStatus(bool enable, NsModes mode = kNsUnchanged) = 0;
+
+    // Gets the NS status and mode.
+    virtual int GetNsStatus(bool& enabled, NsModes& mode) = 0;
+
+    // Sets the Automatic Gain Control (AGC) status and mode.
+    // The AGC adjusts the microphone signal to an appropriate level.
+    virtual int SetAgcStatus(bool enable, AgcModes mode = kAgcUnchanged) = 0;
+
+    // Gets the AGC status and mode.
+    virtual int GetAgcStatus(bool& enabled, AgcModes& mode) = 0;
+
+    // Sets the AGC configuration.
+    // Should only be used in situations where the working environment
+    // is well known.
+    virtual int SetAgcConfig(const AgcConfig config) = 0;
+
+    // Gets the AGC configuration.
+    virtual int GetAgcConfig(AgcConfig& config) = 0;
+
+    // Sets the Echo Control (EC) status and mode.
+    // The EC mitigates acoustic echo where a user can hear their own
+    // speech repeated back due to an acoustic coupling between the
+    // speaker and the microphone at the remote end.
+    virtual int SetEcStatus(bool enable, EcModes mode = kEcUnchanged) = 0;
+
+    // Gets the EC status and mode.
+    virtual int GetEcStatus(bool& enabled, EcModes& mode) = 0;
+
+    // Modifies settings for the AEC designed for mobile devices (AECM).
+    virtual int SetAecmMode(AecmModes mode = kAecmSpeakerphone,
+                            bool enableCNG = true) = 0;
+
+    // Gets settings for the AECM.
+    virtual int GetAecmMode(AecmModes& mode, bool& enabledCNG) = 0;
+
+    // Sets status and mode of the receiving-side (Rx) NS.
+    // The Rx NS reduces noise in the received signal for the specified
+    // |channel|. Intended for advanced usage only.
+    virtual int SetRxNsStatus(int channel,
+                              bool enable,
+                              NsModes mode = kNsUnchanged) = 0;
+
+    // Gets status and mode of the receiving-side NS.
+    virtual int GetRxNsStatus(int channel,
+                              bool& enabled,
+                              NsModes& mode) = 0;
+
+    // Sets status and mode of the receiving-side (Rx) AGC.
+    // The Rx AGC adjusts the received signal to an appropriate level
+    // for the specified |channel|. Intended for advanced usage only.
+    virtual int SetRxAgcStatus(int channel,
+                               bool enable,
+                               AgcModes mode = kAgcUnchanged) = 0;
+
+    // Gets status and mode of the receiving-side AGC.
+    virtual int GetRxAgcStatus(int channel,
+                               bool& enabled,
+                               AgcModes& mode) = 0;
+
+    // Modifies the AGC configuration on the receiving side for the
+    // specified |channel|.
+    virtual int SetRxAgcConfig(int channel, const AgcConfig config) = 0;
+
+    // Gets the AGC configuration on the receiving side.
+    virtual int GetRxAgcConfig(int channel, AgcConfig& config) = 0;
+
+    // Registers a VoERxVadCallback |observer| instance and enables Rx VAD
+    // notifications for the specified |channel|.
+    virtual int RegisterRxVadObserver(int channel,
+                                      VoERxVadCallback &observer) = 0;
+
+    // Deregisters the VoERxVadCallback |observer| and disables Rx VAD
+    // notifications for the specified |channel|.
+    virtual int DeRegisterRxVadObserver(int channel) = 0;
+
+    // Gets the VAD/DTX activity for the specified |channel|.
+    // The returned value is 1 if frames of audio contains speech
+    // and 0 if silence. The output is always 1 if VAD is disabled.
+    virtual int VoiceActivityIndicator(int channel) = 0;
+
+    // Enables or disables the possibility to retrieve echo metrics and delay
+    // logging values during an active call. The metrics are only supported in
+    // AEC.
+    virtual int SetEcMetricsStatus(bool enable) = 0;
+
+    // Gets the current EC metric status.
+    virtual int GetEcMetricsStatus(bool& enabled) = 0;
+
+    // Gets the instantaneous echo level metrics.
+    virtual int GetEchoMetrics(int& ERL, int& ERLE, int& RERL, int& A_NLP) = 0;
+
+    // Gets the EC internal |delay_median| and |delay_std| in ms between
+    // near-end and far-end. The values are calculated over the time period
+    // since the last GetEcDelayMetrics() call.
+    virtual int GetEcDelayMetrics(int& delay_median, int& delay_std) = 0;
+
+    // Enables recording of Audio Processing (AP) debugging information.
+    // The file can later be used for off-line analysis of the AP performance.
+    virtual int StartDebugRecording(const char* fileNameUTF8) = 0;
+
+    // Disables recording of AP debugging information.
+    virtual int StopDebugRecording() = 0;
+
+    // Enables or disables detection of disturbing keyboard typing.
+    // An error notification will be given as a callback upon detection.
+    virtual int SetTypingDetectionStatus(bool enable) = 0;
+
+    // Gets the current typing detection status.
+    virtual int GetTypingDetectionStatus(bool& enabled) = 0;
+
+protected:
+    VoEAudioProcessing() {}
+    virtual ~VoEAudioProcessing() {}
+};
+
+}  //  namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
diff --git a/trunk/src/voice_engine/main/interface/voe_base.h b/trunk/src/voice_engine/main/interface/voe_base.h
new file mode 100644
index 0000000..be6849c
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_base.h
@@ -0,0 +1,213 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Enables full duplex VoIP sessions via RTP using G.711 (mu-Law or A-Law).
+//  - Initialization and termination.
+//  - Trace information on text files or via callbacks.
+//  - Multi-channel support (mixing, sending to multiple destinations etc.).
+//  - Call setup (port and address) for receiving and sending sides.
+//
+// To support other codecs than G.711, the VoECodec sub-API must be utilized.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  base->StartPlayout(ch);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_BASE_H
+#define WEBRTC_VOICE_ENGINE_VOE_BASE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+
+const int kVoEDefault = -1;
+
+// VoiceEngineObserver
+class WEBRTC_DLLEXPORT VoiceEngineObserver
+{
+public:
+    // This method will be called after the occurrence of any runtime error
+    // code, or warning notification, when the observer interface has been
+    // installed using VoEBase::RegisterVoiceEngineObserver().
+    virtual void CallbackOnError(const int channel, const int errCode) = 0;
+
+protected:
+    virtual ~VoiceEngineObserver() {}
+};
+
+// VoiceEngine
+class WEBRTC_DLLEXPORT VoiceEngine
+{
+public:
+    // Creates a VoiceEngine object, which can then be used to acquire
+    // sub-APIs. Returns NULL on failure.
+    static VoiceEngine* Create();
+
+    // Deletes a created VoiceEngine object and releases the utilized resources.
+    // If |ignoreRefCounters| is set to false, all reference counters must be
+    // zero to enable a valid release of the allocated resources. When set to
+    // true, a release of all resources allocated by the VoE is performed
+    // without checking the reference counter state.
+    static bool Delete(VoiceEngine*& voiceEngine,
+                       bool ignoreRefCounters = false);
+
+    // Specifies the amount and type of trace information which will be
+    // created by the VoiceEngine.
+    static int SetTraceFilter(const unsigned int filter);
+
+    // Sets the name of the trace file and enables non-encrypted trace messages.
+    static int SetTraceFile(const char* fileNameUTF8,
+                            const bool addFileCounter = false);
+
+    // Installs the TraceCallback implementation to ensure that the user
+    // receives callbacks for generated trace messages.
+    static int SetTraceCallback(TraceCallback* callback);
+
+    static int SetAndroidObjects(void* javaVM, void* env, void* context);
+
+protected:
+    VoiceEngine() {}
+    virtual ~VoiceEngine() {}
+};
+
+// VoEBase
+class WEBRTC_DLLEXPORT VoEBase
+{
+public:
+    // Factory for the VoEBase sub-API. Increases an internal reference
+    // counter if successful. Returns NULL if the API is not supported or if
+    // construction fails.
+    static VoEBase* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEBase sub-API and decreases an internal reference
+    // counter. Returns the new reference count. This value should be zero
+    // for all sub-API:s before the VoiceEngine object can be safely deleted.
+    virtual int Release() = 0;
+
+    // Installs the observer class to enable runtime error control and
+    // warning notifications.
+    virtual int RegisterVoiceEngineObserver(VoiceEngineObserver& observer) = 0;
+
+    // Removes and disables the observer class for runtime error control
+    // and warning notifications.
+    virtual int DeRegisterVoiceEngineObserver() = 0;
+
+    // Initiates all common parts of the VoiceEngine; e.g. all
+    // encoders/decoders, the sound card and core receiving components.
+    // This method also makes it possible to install a user-defined
+    // external Audio Device Module (ADM) which implements all the audio
+    // layer functionality in a separate (reference counted) module.
+    virtual int Init(AudioDeviceModule* external_adm = NULL) = 0;
+
+    // Terminates all VoiceEngine functions and releses allocated resources.
+    virtual int Terminate() = 0;
+
+    // Retrieves the maximum number of channels that can be created.
+    virtual int MaxNumOfChannels() = 0;
+
+    // Creates a new channel and allocates the required resources for it.
+    virtual int CreateChannel() = 0;
+
+    // Deletes an existing channel and releases the utilized resources.
+    virtual int DeleteChannel(int channel) = 0;
+
+    // Sets the local receiver port and address for a specified
+    // |channel| number.
+    virtual int SetLocalReceiver(int channel, int port,
+                                 int RTCPport = kVoEDefault,
+                                 const char ipAddr[64] = NULL,
+                                 const char multiCastAddr[64] = NULL) = 0;
+
+    // Gets the local receiver port and address for a specified
+    // |channel| number.
+    virtual int GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                 char ipAddr[64]) = 0;
+
+    // Sets the destination port and address for a specified |channel| number.
+    virtual int SetSendDestination(int channel, int port,
+                                   const char ipAddr[64],
+                                   int sourcePort = kVoEDefault,
+                                   int RTCPport = kVoEDefault) = 0;
+
+    // Gets the destination port and address for a specified |channel| number.
+    virtual int GetSendDestination(int channel, int& port, char ipAddr[64],
+                                   int& sourcePort, int& RTCPport) = 0;
+
+    // Prepares and initiates the VoiceEngine for reception of
+    // incoming RTP/RTCP packets on the specified |channel|.
+    virtual int StartReceive(int channel) = 0;
+
+    // Stops receiving incoming RTP/RTCP packets on the specified |channel|.
+    virtual int StopReceive(int channel) = 0;
+
+    // Starts forwarding the packets to the mixer/soundcard for a
+    // specified |channel|.
+    virtual int StartPlayout(int channel) = 0;
+
+    // Stops forwarding the packets to the mixer/soundcard for a
+    // specified |channel|.
+    virtual int StopPlayout(int channel) = 0;
+
+    // Starts sending packets to an already specified IP address and
+    // port number for a specified |channel|.
+    virtual int StartSend(int channel) = 0;
+
+    // Stops sending packets from a specified |channel|.
+    virtual int StopSend(int channel) = 0;
+
+    // Gets the version information for VoiceEngine and its components.
+    virtual int GetVersion(char version[1024]) = 0;
+
+    // Gets the last VoiceEngine error code.
+    virtual int LastError() = 0;
+
+
+    // Stops or resumes playout and transmission on a temporary basis.
+    virtual int SetOnHoldStatus(int channel, bool enable,
+                                OnHoldModes mode = kHoldSendAndPlay) = 0;
+
+    // Gets the current playout and transmission status.
+    virtual int GetOnHoldStatus(int channel, bool& enabled,
+                                OnHoldModes& mode) = 0;
+
+    // Sets the NetEQ playout mode for a specified |channel| number.
+    virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode) = 0;
+
+    // Gets the NetEQ playout mode for a specified |channel| number.
+    virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode) = 0;
+
+    // Sets the NetEQ background noise mode for a specified |channel| number.
+    virtual int SetNetEQBGNMode(int channel, NetEqBgnModes mode) = 0;
+
+    // Gets the NetEQ background noise mode for a specified |channel| number.
+    virtual int GetNetEQBGNMode(int channel, NetEqBgnModes& mode) = 0;
+
+protected:
+    VoEBase() {}
+    virtual ~VoEBase() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_BASE_H
diff --git a/trunk/src/voice_engine/main/interface/voe_call_report.h b/trunk/src/voice_engine/main/interface/voe_call_report.h
new file mode 100644
index 0000000..c4d3abd
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_call_report.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Long-term speech and noise level metrics.
+//  - Long-term echo metric statistics.
+//  - Round Trip Time (RTT) statistics.
+//  - Dead-or-Alive connection summary.
+//  - Generation of call reports to text files.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoECallReport report = VoECallReport::GetInterface(voe);
+//  base->Init();
+//  LevelStatistics stats;
+//  report->GetSpeechAndNoiseSummary(stats);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  report->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
+#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoECallReport
+class WEBRTC_DLLEXPORT VoECallReport
+{
+public:
+    // Factory for the VoECallReport sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoECallReport* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoECallReport sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Performs a combined reset of all components involved in generating
+    // the call report for a specified |channel|. Pass in -1 to reset
+    // all channels.
+    virtual int ResetCallReportStatistics(int channel) = 0;
+
+    // Gets minimum, maximum and average levels for long-term echo metrics.
+    virtual int GetEchoMetricSummary(EchoStatistics& stats) = 0;
+
+    // Gets minimum, maximum and average levels for Round Trip Time (RTT)
+    // measurements.
+    virtual int GetRoundTripTimeSummary(int channel,
+                                        StatVal& delaysMs) = 0;
+
+    // Gets the total amount of dead and alive connection detections
+    // during a VoIP session.
+    virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
+                                      int& numOfAliveDetections) = 0;
+
+    // Creates a text file in ASCII format, which contains a summary
+    // of all the statistics that can be obtained by the call report sub-API.
+    virtual int WriteReportToFile(const char* fileNameUTF8) = 0;
+
+protected:
+    VoECallReport() { }
+    virtual ~VoECallReport() { }
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
diff --git a/trunk/src/voice_engine/main/interface/voe_codec.h b/trunk/src/voice_engine/main/interface/voe_codec.h
new file mode 100644
index 0000000..413bc90
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_codec.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Support of non-default codecs (e.g. iLBC, iSAC, etc.).
+//  - Voice Activity Detection (VAD) on a per channel basis.
+//  - Possibility to specify how to map received payload types to codecs.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoECodec* codec = VoECodec::GetInterface(voe);
+//  base->Init();
+//  int num_of_codecs = codec->NumOfCodecs()
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  codec->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CODEC_H
+#define WEBRTC_VOICE_ENGINE_VOE_CODEC_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoECodec
+{
+public:
+    // Factory for the VoECodec sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoECodec* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoECodec sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the number of supported codecs.
+    virtual int NumOfCodecs() = 0;
+
+    // Get the |codec| information for a specified list |index|.
+    virtual int GetCodec(int index, CodecInst& codec) = 0;
+
+    // Sets the |codec| for the |channel| to be used for sending.
+    virtual int SetSendCodec(int channel, const CodecInst& codec) = 0;
+
+    // Gets the |codec| parameters for the sending codec on a specified
+    // |channel|.
+    virtual int GetSendCodec(int channel, CodecInst& codec) = 0;
+
+    // Gets the currently received |codec| for a specific |channel|.
+    virtual int GetRecCodec(int channel, CodecInst& codec) = 0;
+
+    // Sets the initial values of target rate and frame size for iSAC
+    // for a specified |channel|. This API is only valid if iSAC is setup
+    // to run in channel-adaptive mode
+    virtual int SetISACInitTargetRate(int channel, int rateBps,
+                                      bool useFixedFrameSize = false) = 0;
+
+    // Sets the maximum allowed iSAC rate which the codec may not exceed
+    // for a single packet for the specified |channel|. The maximum rate is
+    // defined as payload size per frame size in bits per second.
+    virtual int SetISACMaxRate(int channel, int rateBps) = 0;
+
+    // Sets the maximum allowed iSAC payload size for a specified |channel|.
+    // The maximum value is set independently of the frame size, i.e.
+    // 30 ms and 60 ms packets have the same limit.
+    virtual int SetISACMaxPayloadSize(int channel, int sizeBytes) = 0;
+
+    // Sets the dynamic payload type number for a particular |codec| or
+    // disables (ignores) a codec for receiving. For instance, when receiving
+    // an invite from a SIP-based client, this function can be used to change
+    // the dynamic payload type number to match that in the INVITE SDP-
+    // message. The utilized parameters in the |codec| structure are:
+    // plname, plfreq, pltype and channels.
+    virtual int SetRecPayloadType(int channel, const CodecInst& codec) = 0;
+
+    // Gets the actual payload type that is set for receiving a |codec| on a
+    // |channel|. The value it retrieves will either be the default payload
+    // type, or a value earlier set with SetRecPayloadType().
+    virtual int GetRecPayloadType(int channel, CodecInst& codec) = 0;
+
+    // Sets the payload |type| for the sending of SID-frames with background
+    // noise estimation during silence periods detected by the VAD.
+    virtual int SetSendCNPayloadType(
+        int channel, int type, PayloadFrequencies frequency = kFreq16000Hz) = 0;
+
+
+    // Sets the VAD/DTX (silence suppression) status and |mode| for a
+    // specified |channel|.
+    virtual int SetVADStatus(int channel, bool enable,
+                             VadModes mode = kVadConventional,
+                             bool disableDTX = false) = 0;
+
+    // Gets the VAD/DTX status and |mode| for a specified |channel|.
+    virtual int GetVADStatus(int channel, bool& enabled, VadModes& mode,
+                             bool& disabledDTX) = 0;
+
+    // Not supported
+    virtual int SetAMREncFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRDecFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRWbEncFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRWbDecFormat(int channel, AmrMode mode) = 0;
+
+protected:
+    VoECodec() {}
+    virtual ~VoECodec() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_CODEC_H
diff --git a/trunk/src/voice_engine/main/interface/voe_dtmf.h b/trunk/src/voice_engine/main/interface/voe_dtmf.h
new file mode 100644
index 0000000..3ed1749
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_dtmf.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Telephone event transmission.
+//  - DTMF tone generation.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEDtmf* dtmf  = VoEDtmf::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  dtmf->SendTelephoneEvent(ch, 7);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  dtmf->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_DTMF_H
+#define WEBRTC_VOICE_ENGINE_VOE_DTMF_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoETelephoneEventObserver
+class WEBRTC_DLLEXPORT VoETelephoneEventObserver
+{
+public:
+    // This method will be called after the detection of an inband
+    // telephone event. The event code is given as output in the
+    // |eventCode| parameter.
+    virtual void OnReceivedTelephoneEventInband(int channel,
+                                                int eventCode,
+                                                bool endOfEvent) = 0;
+
+    // This method will be called after the detection of an out-of-band
+    // telephone event. The event code is given as output in the
+    // |eventCode| parameter.
+    virtual void OnReceivedTelephoneEventOutOfBand(
+        int channel,
+        int eventCode,
+        bool endOfEvent) = 0;
+
+protected:
+    virtual ~VoETelephoneEventObserver() {}
+};
+
+// VoEDtmf
+class WEBRTC_DLLEXPORT VoEDtmf
+{
+public:
+    
+    // Factory for the VoEDtmf sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEDtmf* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEDtmf sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sends telephone events either in-band or out-of-band.
+    virtual int SendTelephoneEvent(int channel, int eventCode,
+                                   bool outOfBand = true, int lengthMs = 160,
+                                   int attenuationDb = 10) = 0;
+
+   
+    // Sets the dynamic payload |type| that should be used for telephone
+    // events.
+    virtual int SetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char type) = 0;
+
+  
+    // Gets the currently set dynamic payload |type| for telephone events.
+    virtual int GetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char& type) = 0;
+
+    // Enables or disables local tone playout for received DTMF events
+    // out-of-band.
+    virtual int SetDtmfPlayoutStatus(int channel, bool enable) = 0;
+
+    // Gets the DTMF playout status.
+    virtual int GetDtmfPlayoutStatus(int channel, bool& enabled) = 0;
+
+    // Toogles DTMF feedback state: when a DTMF tone is sent, the same tone
+    // is played out on the speaker.
+    virtual int SetDtmfFeedbackStatus(bool enable,
+                                      bool directFeedback = false) = 0;
+
+    // Gets the DTMF feedback status.
+    virtual int GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback) = 0;
+
+    // Plays a DTMF feedback tone (only locally).
+    virtual int PlayDtmfTone(int eventCode, int lengthMs = 200,
+                             int attenuationDb = 10) = 0;
+
+    // Starts playing out a DTMF feedback tone locally.
+    // The tone will be played out until the corresponding stop function
+    // is called.
+    virtual int StartPlayingDtmfTone(int eventCode,
+                                     int attenuationDb = 10) = 0;
+
+    // Stops playing out a DTMF feedback tone locally.
+    virtual int StopPlayingDtmfTone() = 0;
+
+    // Installs an instance of a VoETelephoneEventObserver derived class and
+    // activates detection of telephone events for the specified |channel|.
+    virtual int RegisterTelephoneEventDetection(
+        int channel, TelephoneEventDetectionMethods detectionMethod,
+        VoETelephoneEventObserver& observer) = 0;
+
+    // Removes an instance of a VoETelephoneEventObserver derived class and
+    // disables detection of telephone events for the specified |channel|.
+    virtual int DeRegisterTelephoneEventDetection(int channel) = 0;
+
+    // Gets the current telephone-event detection status for a specified
+    // |channel|.
+    virtual int GetTelephoneEventDetectionStatus(
+        int channel, bool& enabled,
+        TelephoneEventDetectionMethods& detectionMethod) = 0;
+
+protected:
+    VoEDtmf() {}
+    virtual ~VoEDtmf() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_DTMF_H
diff --git a/trunk/src/voice_engine/main/interface/voe_encryption.h b/trunk/src/voice_engine/main/interface/voe_encryption.h
new file mode 100644
index 0000000..ae3f373
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_encryption.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - External encryption and decryption.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEEncryption* encrypt  = VoEEncryption::GetInterface(voe);
+//  ...
+//  encrypt->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
+#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEEncryption
+{
+public:
+    // Factory for the VoEEncryption sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEEncryption* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEEncryption sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs an Encryption instance and enables external encryption
+    // for the selected |channel|.
+    virtual int RegisterExternalEncryption(
+        int channel, Encryption& encryption) = 0;
+
+    // Removes an Encryption instance and disables external encryption
+    // for the selected |channel|.
+    virtual int DeRegisterExternalEncryption(int channel) = 0;
+
+    // Not supported
+    virtual int EnableSRTPSend(int channel, CipherTypes cipherType,
+        int cipherKeyLength, AuthenticationTypes authType, int authKeyLength,
+        int authTagLength, SecurityLevels level, const unsigned char key[30],
+        bool useForRTCP = false) = 0;
+
+    // Not supported
+    virtual int DisableSRTPSend(int channel) = 0;
+
+    // Not supported
+    virtual int EnableSRTPReceive(int channel, CipherTypes cipherType,
+        int cipherKeyLength, AuthenticationTypes authType, int authKeyLength,
+        int authTagLength, SecurityLevels level, const unsigned char key[30],
+        bool useForRTCP = false) = 0;
+
+    // Not supported
+    virtual int DisableSRTPReceive(int channel) = 0;
+
+protected:
+    VoEEncryption() {}
+    virtual ~VoEEncryption() {}
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
diff --git a/trunk/src/voice_engine/main/interface/voe_errors.h b/trunk/src/voice_engine/main/interface/voe_errors.h
new file mode 100644
index 0000000..cc05970
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_errors.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
+#define WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
+
+// Warnings
+#define VE_PORT_NOT_DEFINED 8001
+#define VE_CHANNEL_NOT_VALID 8002
+#define VE_FUNC_NOT_SUPPORTED 8003
+#define VE_INVALID_LISTNR 8004
+#define VE_INVALID_ARGUMENT 8005
+#define VE_INVALID_PORT_NMBR 8006
+#define VE_INVALID_PLNAME 8007
+#define VE_INVALID_PLFREQ 8008
+#define VE_INVALID_PLTYPE 8009
+#define VE_INVALID_PACSIZE 8010
+#define VE_NOT_SUPPORTED 8011
+#define VE_ALREADY_LISTENING 8012
+#define VE_CHANNEL_NOT_CREATED 8013
+#define VE_MAX_ACTIVE_CHANNELS_REACHED 8014
+#define VE_REC_CANNOT_PREPARE_HEADER 8015
+#define VE_REC_CANNOT_ADD_BUFFER 8016
+#define VE_PLAY_CANNOT_PREPARE_HEADER 8017
+#define VE_ALREADY_SENDING 8018
+#define VE_INVALID_IP_ADDRESS 8019
+#define VE_ALREADY_PLAYING 8020
+#define VE_NOT_ALL_VERSION_INFO 8021
+#define VE_DTMF_OUTOF_RANGE 8022
+#define VE_INVALID_CHANNELS 8023
+#define VE_SET_PLTYPE_FAILED 8024
+#define VE_ENCRYPT_NOT_INITED 8025
+#define VE_NOT_INITED 8026
+#define VE_NOT_SENDING 8027
+#define VE_EXT_TRANSPORT_NOT_SUPPORTED 8028
+#define VE_EXTERNAL_TRANSPORT_ENABLED 8029
+#define VE_STOP_RECORDING_FAILED 8030
+#define VE_INVALID_RATE 8031
+#define VE_INVALID_PACKET 8032
+#define VE_NO_GQOS 8033
+#define VE_INVALID_TIMESTAMP 8034
+#define VE_RECEIVE_PACKET_TIMEOUT 8035
+#define VE_STILL_PLAYING_PREV_DTMF 8036
+#define VE_INIT_FAILED_WRONG_EXPIRY 8037
+#define VE_SENDING 8038
+#define VE_ENABLE_IPV6_FAILED 8039
+#define VE_FUNC_NO_STEREO 8040
+// Range 8041-8080 is not used
+#define VE_FW_TRAVERSAL_ALREADY_INITIALIZED 8081
+#define VE_PACKET_RECEIPT_RESTARTED 8082
+#define VE_NOT_ALL_INFO 8083
+#define VE_CANNOT_SET_SEND_CODEC 8084
+#define VE_CODEC_ERROR 8085
+#define VE_NETEQ_ERROR 8086
+#define VE_RTCP_ERROR 8087
+#define VE_INVALID_OPERATION 8088
+#define VE_CPU_INFO_ERROR 8089
+#define VE_SOUNDCARD_ERROR 8090
+#define VE_SPEECH_LEVEL_ERROR 8091
+#define VE_SEND_ERROR 8092
+#define VE_CANNOT_REMOVE_CONF_CHANNEL 8093
+#define VE_PLTYPE_ERROR 8094
+#define VE_SET_FEC_FAILED 8095
+#define VE_CANNOT_GET_PLAY_DATA 8096
+#define VE_APM_ERROR 8097
+#define VE_RUNTIME_PLAY_WARNING 8098
+#define VE_RUNTIME_REC_WARNING 8099
+#define VE_NOT_PLAYING 8100
+#define VE_SOCKETS_NOT_INITED 8101
+#define VE_CANNOT_GET_SOCKET_INFO 8102
+#define VE_INVALID_MULTICAST_ADDRESS 8103
+#define VE_DESTINATION_NOT_INITED 8104
+#define VE_RECEIVE_SOCKETS_CONFLICT 8105
+#define VE_SEND_SOCKETS_CONFLICT 8106
+#define VE_TYPING_NOISE_WARNING 8107
+#define VE_SATURATION_WARNING 8108
+#define VE_NOISE_WARNING 8109
+#define VE_CANNOT_GET_SEND_CODEC 8110
+#define VE_CANNOT_GET_REC_CODEC 8111
+#define VE_ALREADY_INITED 8112
+
+// Errors causing limited functionality
+#define VE_RTCP_SOCKET_ERROR 9001
+#define VE_MIC_VOL_ERROR 9002
+#define VE_SPEAKER_VOL_ERROR 9003
+#define VE_CANNOT_ACCESS_MIC_VOL 9004
+#define VE_CANNOT_ACCESS_SPEAKER_VOL 9005
+#define VE_GET_MIC_VOL_ERROR 9006
+#define VE_GET_SPEAKER_VOL_ERROR 9007
+#define VE_THREAD_RTCP_ERROR 9008
+#define VE_CANNOT_INIT_APM 9009
+#define VE_SEND_SOCKET_TOS_ERROR 9010
+#define VE_CANNOT_RETRIEVE_DEVICE_NAME 9013
+#define VE_SRTP_ERROR 9014
+// 9015 is not used
+#define VE_INTERFACE_NOT_FOUND 9016
+#define VE_TOS_GQOS_CONFLICT 9017
+#define VE_CANNOT_ADD_CONF_CHANNEL 9018
+#define VE_BUFFER_TOO_SMALL 9019
+#define VE_CANNOT_EXECUTE_SETTING 9020
+#define VE_CANNOT_RETRIEVE_SETTING 9021
+// 9022 is not used
+#define VE_RTP_KEEPALIVE_FAILED 9023
+#define VE_SEND_DTMF_FAILED 9024
+#define VE_CANNOT_RETRIEVE_CNAME 9025
+#define VE_DECRYPTION_FAILED 9026
+#define VE_ENCRYPTION_FAILED 9027
+#define VE_CANNOT_RETRIEVE_RTP_STAT 9028
+#define VE_GQOS_ERROR 9029
+#define VE_BINDING_SOCKET_TO_LOCAL_ADDRESS_FAILED 9030
+#define VE_TOS_INVALID 9031
+#define VE_TOS_ERROR 9032
+#define VE_CANNOT_RETRIEVE_VALUE 9033
+
+// Critical errors that stops voice functionality
+#define VE_PLAY_UNDEFINED_SC_ERR 10001
+#define VE_REC_CANNOT_OPEN_SC 10002
+#define VE_SOCKET_ERROR 10003
+#define VE_MMSYSERR_INVALHANDLE 10004
+#define VE_MMSYSERR_NODRIVER 10005
+#define VE_MMSYSERR_NOMEM 10006
+#define VE_WAVERR_UNPREPARED 10007
+#define VE_WAVERR_STILLPLAYING 10008
+#define VE_UNDEFINED_SC_ERR 10009
+#define VE_UNDEFINED_SC_REC_ERR 10010
+#define VE_THREAD_ERROR 10011
+#define VE_CANNOT_START_RECORDING 10012
+#define VE_PLAY_CANNOT_OPEN_SC 10013
+#define VE_NO_WINSOCK_2 10014
+#define VE_SEND_SOCKET_ERROR 10015
+#define VE_BAD_FILE 10016
+#define VE_EXPIRED_COPY 10017
+#define VE_NOT_AUTHORISED 10018
+#define VE_RUNTIME_PLAY_ERROR 10019
+#define VE_RUNTIME_REC_ERROR 10020
+#define VE_BAD_ARGUMENT 10021
+#define VE_LINUX_API_ONLY 10022
+#define VE_REC_DEVICE_REMOVED 10023
+#define VE_NO_MEMORY 10024
+#define VE_BAD_HANDLE 10025
+#define VE_RTP_RTCP_MODULE_ERROR 10026
+#define VE_AUDIO_CODING_MODULE_ERROR 10027
+#define VE_AUDIO_DEVICE_MODULE_ERROR 10028
+#define VE_CANNOT_START_PLAYOUT 10029
+#define VE_CANNOT_STOP_RECORDING 10030
+#define VE_CANNOT_STOP_PLAYOUT 10031
+#define VE_CANNOT_INIT_CHANNEL 10032
+#define VE_RECV_SOCKET_ERROR 10033
+#define VE_SOCKET_TRANSPORT_MODULE_ERROR 10034
+#define VE_AUDIO_CONF_MIX_MODULE_ERROR 10035
+
+// Warnings for other platforms (reserved range 8061-8080)
+#define VE_IGNORED_FUNCTION 8061
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
diff --git a/trunk/src/voice_engine/main/interface/voe_external_media.h b/trunk/src/voice_engine/main/interface/voe_external_media.h
new file mode 100644
index 0000000..50d2d38
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_external_media.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// In some cases it is desirable to use an audio source or sink which may
+// not be available to the VoiceEngine, such as a DV camera. This sub-API
+// contains functions that allow for the use of such external recording
+// sources and playout sinks. It also describes how recorded data, or data
+// to be played out, can be modified outside the VoiceEngine.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEMediaProcess media = VoEMediaProcess::GetInterface(voe);
+//  base->Init();
+//  ...
+//  media->SetExternalRecordingStatus(true);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  media->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEMediaProcess
+{
+public:
+    // The VoiceEngine user should override the Process() method in a
+    // derived class. Process() will be called when audio is ready to
+    // be processed. The audio can be accessed in several different modes
+    // given by the |type| parameter. The function should modify the
+    // original data and ensure that it is copied back to the |audio10ms|
+    // array. The number of samples in the frame cannot be changed.
+    // The sampling frequency will depend upon the codec used. 
+    // If |isStereo| is true, audio10ms will contain 16-bit PCM data
+    // samples in interleaved stereo format (L0,R0,L1,R1,…):
+    virtual void Process(const int channel, const ProcessingTypes type,
+                         WebRtc_Word16 audio10ms[], const int length,
+                         const int samplingFreq, const bool isStereo) = 0;
+
+protected:
+    virtual ~VoEMediaProcess() {}
+};
+
+class WEBRTC_DLLEXPORT VoEExternalMedia
+{
+public:
+    // Factory for the VoEExternalMedia sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEExternalMedia* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEExternalMedia sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs a VoEMediaProcess derived instance and activates external
+    // media for the specified |channel| and |type|.
+    virtual int RegisterExternalMediaProcessing(
+        int channel, ProcessingTypes type, VoEMediaProcess& processObject) = 0;
+
+    // Removes the VoEMediaProcess derived instance and deactivates external
+    // media for the specified |channel| and |type|.
+    virtual int DeRegisterExternalMediaProcessing(
+        int channel, ProcessingTypes type) = 0;
+
+    // Toogles state of external recording.
+    virtual int SetExternalRecordingStatus(bool enable) = 0;
+
+    // Toogles state of external playout.
+    virtual int SetExternalPlayoutStatus(bool enable) = 0;
+
+    // This function accepts externally recorded audio. During transmission,
+    // this method should be called at as regular an interval as possible
+    // with frames of corresponding size.
+    virtual int ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[], int lengthSamples,
+        int samplingFreqHz, int current_delay_ms) = 0;
+
+    // This function gets audio for an external playout sink.
+    // During transmission, this function should be called every ~10 ms
+    // to obtain a new 10 ms frame of audio. The length of the block will
+    // be 160, 320, 440 or 480 samples (for 16, 32, 44 or 48 kHz sampling
+    // rates respectively).
+    virtual int ExternalPlayoutGetData(
+        WebRtc_Word16 speechData10ms[], int samplingFreqHz,
+        int current_delay_ms, int& lengthSamples) = 0;
+
+protected:
+    VoEExternalMedia() {}
+    virtual ~VoEExternalMedia() {}
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
diff --git a/trunk/src/voice_engine/main/interface/voe_file.h b/trunk/src/voice_engine/main/interface/voe_file.h
new file mode 100644
index 0000000..d968dcf
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_file.h
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - File playback.
+//  - File recording.
+//  - File conversion.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEFile* file  = VoEFile::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  base->StartPlayout(ch);
+//  file->StartPlayingFileAsMicrophone(ch, "data_file_16kHz.pcm", true);
+//  ...
+//  file->StopPlayingFileAsMicrophone(ch);
+//  base->StopPlayout(ch);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  file->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_FILE_H
+#define WEBRTC_VOICE_ENGINE_VOE_FILE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEFile
+{
+public:
+    // Factory for the VoEFile sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEFile* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEFile sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Starts playing and mixing files with the local speaker signal for
+    // playout.
+    virtual int StartPlayingFileLocally(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0,
+        int stopPointMs = 0) = 0;
+
+    // Starts playing and mixing streams with the local speaker signal for
+    // playout.
+    virtual int StartPlayingFileLocally(
+        int channel,
+        InStream* stream,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0, int stopPointMs = 0) = 0;
+
+    // Stops playback of a file on a specific |channel|.
+    virtual int StopPlayingFileLocally(int channel) = 0;
+
+    // Returns the current file playing state for a specific |channel|.
+    virtual int IsPlayingFileLocally(int channel) = 0;
+
+    // Sets the volume scaling for a speaker file that is already playing.
+    virtual int ScaleLocalFilePlayout(int channel, float scale) = 0;
+
+    // Starts reading data from a file and transmits the data either
+    // mixed with or instead of the microphone signal.
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false ,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0) = 0;
+
+    // Starts reading data from a stream and transmits the data either
+    // mixed with or instead of the microphone signal.
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        InStream* stream,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0) = 0;
+
+    // Stops playing of a file as microphone signal for a specific |channel|.
+    virtual int StopPlayingFileAsMicrophone(int channel) = 0;
+
+    // Returns whether the |channel| is currently playing a file as microphone.
+    virtual int IsPlayingFileAsMicrophone(int channel) = 0;
+
+    // Sets the volume scaling for a microphone file that is already playing.
+    virtual int ScaleFileAsMicrophonePlayout(int channel, float scale) = 0;
+
+    // Starts recording the mixed playout audio.
+    virtual int StartRecordingPlayout(int channel,
+                                      const char* fileNameUTF8,
+                                      CodecInst* compression = NULL,
+                                      int maxSizeBytes = -1) = 0;
+
+    // Stops recording the mixed playout audio.
+    virtual int StopRecordingPlayout(int channel) = 0;
+
+    virtual int StartRecordingPlayout(int channel,
+                                      OutStream* stream,
+                                      CodecInst* compression = NULL) = 0;
+
+    // Starts recording the microphone signal to a file.
+    virtual int StartRecordingMicrophone(const char* fileNameUTF8,
+                                         CodecInst* compression = NULL,
+                                         int maxSizeBytes = -1) = 0;
+
+    // Starts recording the microphone signal to a stream.
+    virtual int StartRecordingMicrophone(OutStream* stream,
+                                         CodecInst* compression = NULL) = 0;
+
+    // Stops recording the microphone signal.
+    virtual int StopRecordingMicrophone() = 0;
+
+
+    // Gets the duration of a file.
+    virtual int GetFileDuration(const char* fileNameUTF8, int& durationMs,
+        FileFormats format = kFileFormatPcm16kHzFile) = 0;
+
+    // Gets the current played position of a file on a specific |channel|.
+    virtual int GetPlaybackPosition(int channel, int& positionMs) = 0;
+
+    virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertPCMToWAV(InStream* streamIn,
+                                OutStream* streamOut) = 0;
+
+    virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertWAVToPCM(InStream* streamIn,
+                                OutStream* streamOut) = 0;
+
+    virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8,
+                                       CodecInst* compression) = 0;
+
+    virtual int ConvertPCMToCompressed(InStream* streamIn,
+                                       OutStream* streamOut,
+                                       CodecInst* compression) = 0;
+
+    virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertCompressedToPCM(InStream* streamIn,
+                                       OutStream* streamOut) = 0;
+
+protected:
+    VoEFile() {}
+    virtual ~VoEFile() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_FILE_H
diff --git a/trunk/src/voice_engine/main/interface/voe_hardware.h b/trunk/src/voice_engine/main/interface/voe_hardware.h
new file mode 100644
index 0000000..24ed1ff
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_hardware.h
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Audio device handling.
+//  - Device information.
+//  - CPU load monitoring.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEHardware* hardware  = VoEHardware::GetInterface(voe);
+//  base->Init();
+//  ...
+//  int n_devices = hardware->GetNumOfPlayoutDevices();
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  hardware->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
+#define WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEHardware
+{
+public:
+    // Factory for the VoEHardware sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEHardware* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEHardware sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the number of audio devices available for recording.
+    virtual int GetNumOfRecordingDevices(int& devices) = 0;
+
+    // Gets the number of audio devices available for playout.
+    virtual int GetNumOfPlayoutDevices(int& devices) = 0;
+
+    // Gets the name of a specific recording device given by an |index|.
+    // On Windows Vista/7, it also retrieves an additional unique ID
+    // (GUID) for the recording device.
+    virtual int GetRecordingDeviceName(int index, char strNameUTF8[128],
+                                       char strGuidUTF8[128]) = 0;
+
+    // Gets the name of a specific playout device given by an |index|.
+    // On Windows Vista/7, it also retrieves an additional unique ID
+    // (GUID) for the playout device.
+    virtual int GetPlayoutDeviceName(int index, char strNameUTF8[128],
+                                     char strGuidUTF8[128]) = 0;
+
+    // Checks if the sound card is available to be opened for recording.
+    virtual int GetRecordingDeviceStatus(bool& isAvailable) = 0;
+
+    // Checks if the sound card is available to be opened for playout.
+    virtual int GetPlayoutDeviceStatus(bool& isAvailable) = 0;
+
+    // Sets the audio device used for recording.
+    virtual int SetRecordingDevice(
+        int index, StereoChannel recordingChannel = kStereoBoth) = 0;
+
+    // Sets the audio device used for playout.
+    virtual int SetPlayoutDevice(int index) = 0;
+
+    // Sets the type of audio device layer to use.
+    virtual int SetAudioDeviceLayer(AudioLayers audioLayer) = 0;
+
+    // Gets the currently used (active) audio device layer.
+    virtual int GetAudioDeviceLayer(AudioLayers& audioLayer) = 0;
+
+    // Gets the VoiceEngine's current CPU consumption in terms of the percent
+    // of total CPU availability. [Windows only]
+    virtual int GetCPULoad(int& loadPercent) = 0;
+
+    // Gets the computer's current CPU consumption in terms of the percent
+    // of the total CPU availability. This method may fail a few times on
+    // Windows because it needs a certain warm-up time before reporting the
+    // result. You should check the return value and either try again or
+    // give up when it fails.
+    virtual int GetSystemCPULoad(int& loadPercent) = 0;
+
+    // Not supported
+    virtual int ResetAudioDevice() = 0;
+
+    // Not supported
+    virtual int AudioDeviceControl(
+        unsigned int par1, unsigned int par2, unsigned int par3) = 0;
+
+    // Not supported
+    virtual int SetLoudspeakerStatus(bool enable) = 0;
+
+    // Not supported
+    virtual int GetLoudspeakerStatus(bool& enabled) = 0;
+
+    // *Experimental - not recommended for use.*
+    // Enables the Windows Core Audio built-in AEC. Fails on other platforms.
+    //
+    // Currently incompatible with the standard VoE AEC and AGC; don't attempt
+    // to enable them while this is active.
+    //
+    // Must be called before VoEBase::StartSend(). When enabled:
+    // 1. VoEBase::StartPlayout() must be called before VoEBase::StartSend().
+    // 2. VoEBase::StopSend() should be called before VoEBase::StopPlayout().
+    //    The reverse order may cause garbage audio to be rendered or the
+    //    capture side to halt until StopSend() is called.
+    //
+    //    As a consequence, SetPlayoutDevice() should be used with caution
+    //    during a call. It will function, but may cause the above issues for
+    //    the duration it takes to complete. (In practice, it should complete
+    //    fast enough to avoid audible degradation).
+    virtual int EnableBuiltInAEC(bool enable) = 0;
+    virtual bool BuiltInAECIsEnabled() const = 0;
+
+protected:
+    VoEHardware() {}
+    virtual ~VoEHardware() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
diff --git a/trunk/src/voice_engine/main/interface/voe_neteq_stats.h b/trunk/src/voice_engine/main/interface/voe_neteq_stats.h
new file mode 100644
index 0000000..4940bed
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_neteq_stats.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoENetEqStats
+{
+public:
+    // Factory for the VoENetEqStats sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoENetEqStats* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoENetEqStats sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Get the "in-call" statistics from NetEQ.
+    // The statistics are reset after the query.
+    virtual int GetNetworkStatistics(int channel, NetworkStatistics& stats) = 0;
+
+protected:
+    VoENetEqStats() {}
+    virtual ~VoENetEqStats() {}
+};
+
+}   // namespace webrtc
+
+#endif    // #ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
diff --git a/trunk/src/voice_engine/main/interface/voe_network.h b/trunk/src/voice_engine/main/interface/voe_network.h
new file mode 100644
index 0000000..10acf1c
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_network.h
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - External protocol support.
+//  - Extended port and address APIs.
+//  - Port and address filters.
+//  - Windows GQoS functions.
+//  - Packet timeout notification.
+//  - Dead-or-Alive connection observations.
+//  - Transmission of raw RTP/RTCP packets into existing channels.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoENetwork* netw  = VoENetwork::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  netw->SetPeriodicDeadOrAliveStatus(ch, true);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  netw->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoEConnectionObserver
+class WEBRTC_DLLEXPORT VoEConnectionObserver
+{
+public:
+    // This method will be called peridically and deliver dead-or-alive
+    // notifications for a specified |channel| when the observer interface
+    // has been installed and activated.
+    virtual void OnPeriodicDeadOrAlive(const int channel, const bool alive) = 0;
+
+protected:
+    virtual ~VoEConnectionObserver() {}
+};
+
+// VoENetwork
+class WEBRTC_DLLEXPORT VoENetwork
+{
+public:
+    // Factory for the VoENetwork sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoENetwork* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoENetwork sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs and enables a user-defined external transport protocol for a
+    // specified |channel|.
+    virtual int RegisterExternalTransport(
+        int channel, Transport& transport) = 0;
+
+    // Removes and disables a user-defined external transport protocol for a
+    // specified |channel|.
+    virtual int DeRegisterExternalTransport(int channel) = 0;
+
+    // The packets received from the network should be passed to this
+    // function when external transport is enabled. Note that the data
+    // including the RTP-header must also be given to the VoiceEngine.
+    virtual int ReceivedRTPPacket(
+        int channel, const void* data, unsigned int length) = 0;
+
+    // The packets received from the network should be passed to this
+    // function when external transport is enabled. Note that the data
+    // including the RTCP-header must also be given to the VoiceEngine.
+    virtual int ReceivedRTCPPacket(
+        int channel, const void* data, unsigned int length) = 0;
+
+    // Gets the source ports and IP address of incoming packets on a
+    // specific |channel|.
+    virtual int GetSourceInfo(
+        int channel, int& rtpPort, int& rtcpPort, char ipAddr[64]) = 0;
+
+    // Gets the local (host) IP address.
+    virtual int GetLocalIP(char ipAddr[64], bool ipv6 = false) = 0;
+
+    // Enables IPv6 for a specified |channel|.
+    virtual int EnableIPv6(int channel) = 0;
+
+    // Gets the current IPv6 staus for a specified |channel|.
+    virtual bool IPv6IsEnabled(int channel) = 0;
+
+    // Enables a port and IP address filter for incoming packets on a
+    // specific |channel|.
+    virtual int SetSourceFilter(int channel,
+        int rtpPort, int rtcpPort = 0, const char ipAddr[64] = 0) = 0;
+
+    // Gets the current port and IP-address filter for a specified |channel|.
+    virtual int GetSourceFilter(
+        int channel, int& rtpPort, int& rtcpPort, char ipAddr[64]) = 0;
+
+    // Sets the six-bit Differentiated Services Code Point (DSCP) in the
+    // IP header of the outgoing stream for a specific |channel|.
+    virtual int SetSendTOS(int channel,
+        int DSCP, int priority = -1, bool useSetSockopt = false) = 0;
+
+    // Gets the six-bit DSCP in the IP header of the outgoing stream for
+    // a specific channel.
+    virtual int GetSendTOS(
+        int channel, int& DSCP, int& priority, bool& useSetSockopt) = 0;
+
+    // Sets the Generic Quality of Service (GQoS) service level.
+    // The Windows operating system then maps to a Differentiated Services
+    // Code Point (DSCP) and to an 802.1p setting. [Windows only]
+    virtual int SetSendGQoS(
+        int channel, bool enable, int serviceType, int overrideDSCP = 0) = 0;
+
+    // Gets the Generic Quality of Service (GQoS) service level.
+    virtual int GetSendGQoS(
+        int channel, bool& enabled, int& serviceType, int& overrideDSCP) = 0;
+
+    // Enables or disables warnings that report if packets have not been
+    // received in |timeoutSeconds| seconds for a specific |channel|.
+    virtual int SetPacketTimeoutNotification(
+        int channel, bool enable, int timeoutSeconds = 2) = 0;
+
+    // Gets the current time-out notification status.
+    virtual int GetPacketTimeoutNotification(
+        int channel, bool& enabled, int& timeoutSeconds) = 0;
+
+    // Installs the observer class implementation for a specified |channel|.
+    virtual int RegisterDeadOrAliveObserver(
+        int channel, VoEConnectionObserver& observer) = 0;
+
+    // Removes the observer class implementation for a specified |channel|.
+    virtual int DeRegisterDeadOrAliveObserver(int channel) = 0;
+
+    // Enables or disables the periodic dead-or-alive callback functionality
+    // for a specified |channel|.
+    virtual int SetPeriodicDeadOrAliveStatus(
+        int channel, bool enable, int sampleTimeSeconds = 2) = 0;
+
+    // Gets the current dead-or-alive notification status.
+    virtual int GetPeriodicDeadOrAliveStatus(
+        int channel, bool& enabled, int& sampleTimeSeconds) = 0;
+
+    // Handles sending a raw UDP data packet over an existing RTP or RTCP
+    // socket.
+    virtual int SendUDPPacket(
+        int channel, const void* data, unsigned int length,
+        int& transmittedBytes, bool useRtcpSocket = false) = 0;
+
+protected:
+    VoENetwork() {}
+    virtual ~VoENetwork() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
diff --git a/trunk/src/voice_engine/main/interface/voe_rtp_rtcp.h b/trunk/src/voice_engine/main/interface/voe_rtp_rtcp.h
new file mode 100644
index 0000000..e26d85f
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_rtp_rtcp.h
@@ -0,0 +1,234 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
+//  - SSRC handling.
+//  - Transmission of RTCP sender reports.
+//  - Obtaining RTCP data from incoming RTCP sender reports.
+//  - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
+//  - Forward Error Correction (FEC).
+//  - RTP Keepalive for maintaining the NAT mappings associated to RTP flows.
+//  - Writing RTP and RTCP packets to binary files for off-line analysis of
+//    the call quality.
+//  - Inserting extra RTP packets into active audio stream.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoERTP_RTCP* rtp_rtcp  = VoERTP_RTCP::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  rtp_rtcp->SetLocalSSRC(ch, 12345);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  rtp_rtcp->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
+#define WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoERTPObserver
+class WEBRTC_DLLEXPORT VoERTPObserver
+{
+public:
+    virtual void OnIncomingCSRCChanged(
+        const int channel, const unsigned int CSRC, const bool added) = 0;
+
+    virtual void OnIncomingSSRCChanged(
+        const int channel, const unsigned int SSRC) = 0;
+
+protected:
+    virtual ~VoERTPObserver() {}
+};
+
+// VoERTCPObserver
+class WEBRTC_DLLEXPORT VoERTCPObserver
+{
+public:
+    virtual void OnApplicationDataReceived(
+        const int channel, const unsigned char subType,
+        const unsigned int name, const unsigned char* data,
+        const unsigned short dataLengthInBytes) = 0;
+
+protected:
+    virtual ~VoERTCPObserver() {}
+};
+
+// CallStatistics
+struct CallStatistics
+{
+    unsigned short fractionLost;
+    unsigned int cumulativeLost;
+    unsigned int extendedMax;
+    unsigned int jitterSamples;
+    int rttMs;
+    int bytesSent;
+    int packetsSent;
+    int bytesReceived;
+    int packetsReceived;
+};
+
+// VoERTP_RTCP
+class WEBRTC_DLLEXPORT VoERTP_RTCP
+{
+public:
+
+    // Factory for the VoERTP_RTCP sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoERTP_RTCP* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoERTP_RTCP sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Registers an instance of a VoERTPObserver derived class for a specified
+    // |channel|. It will allow the user to observe callbacks related to the
+    // RTP protocol such as changes in the incoming SSRC.
+    virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer) = 0;
+
+    // Deregisters an instance of a VoERTPObserver derived class for a
+    // specified |channel|.
+    virtual int DeRegisterRTPObserver(int channel) = 0;
+
+    // Registers an instance of a VoERTCPObserver derived class for a specified
+    // |channel|.
+    virtual int RegisterRTCPObserver(
+        int channel, VoERTCPObserver& observer) = 0;
+
+    // Deregisters an instance of a VoERTCPObserver derived class for a
+    // specified |channel|.
+    virtual int DeRegisterRTCPObserver(int channel) = 0;
+
+    // Sets the local RTP synchronization source identifier (SSRC) explicitly.
+    virtual int SetLocalSSRC(int channel, unsigned int ssrc) = 0;
+
+    // Gets the local RTP SSRC of a specified |channel|.
+    virtual int GetLocalSSRC(int channel, unsigned int& ssrc) = 0;
+
+    // Gets the SSRC of the incoming RTP packets.
+    virtual int GetRemoteSSRC(int channel, unsigned int& ssrc) = 0;
+
+    // Sets the status of rtp-audio-level-indication on a specific |channel|.
+    virtual int SetRTPAudioLevelIndicationStatus(
+        int channel, bool enable, unsigned char ID = 1) = 0;
+
+    // Sets the status of rtp-audio-level-indication on a specific |channel|.
+    virtual int GetRTPAudioLevelIndicationStatus(
+        int channel, bool& enabled, unsigned char& ID) = 0;
+
+    // Gets the CSRCs of the incoming RTP packets.
+    virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]) = 0;
+
+    // Sets the RTCP status on a specific |channel|.
+    virtual int SetRTCPStatus(int channel, bool enable) = 0;
+
+    // Gets the RTCP status on a specific |channel|.
+    virtual int GetRTCPStatus(int channel, bool& enabled) = 0;
+
+    // Sets the canonical name (CNAME) parameter for RTCP reports on a
+    // specific |channel|.
+    virtual int SetRTCP_CNAME(int channel, const char cName[256]) = 0;
+
+    // Gets the canonical name (CNAME) parameter for RTCP reports on a
+    // specific |channel|.
+    virtual int GetRTCP_CNAME(int channel, char cName[256]) = 0;
+
+    // Gets the canonical name (CNAME) parameter for incoming RTCP reports
+    // on a specific channel.
+    virtual int GetRemoteRTCP_CNAME(int channel, char cName[256]) = 0;
+
+    // Gets RTCP data from incoming RTCP Sender Reports.
+    virtual int GetRemoteRTCPData(
+        int channel, unsigned int& NTPHigh, unsigned int& NTPLow,
+        unsigned int& timestamp, unsigned int& playoutTimestamp,
+        unsigned int* jitter = NULL, unsigned short* fractionLost = NULL) = 0;
+
+    // Gets RTP statistics for a specific |channel|.
+    virtual int GetRTPStatistics(
+        int channel, unsigned int& averageJitterMs, unsigned int& maxJitterMs,
+        unsigned int& discardedPackets) = 0;
+
+    // Gets RTCP statistics for a specific |channel|.
+    virtual int GetRTCPStatistics(int channel, CallStatistics& stats) = 0;
+
+    // Sends an RTCP APP packet on a specific |channel|.
+    virtual int SendApplicationDefinedRTCPPacket(
+        int channel, const unsigned char subType, unsigned int name,
+        const char* data, unsigned short dataLengthInBytes) = 0;
+
+    // Sets the Forward Error Correction (FEC) status on a specific |channel|.
+    virtual int SetFECStatus(
+        int channel, bool enable, int redPayloadtype = -1) = 0;
+
+    // Gets the FEC status on a specific |channel|.
+    virtual int GetFECStatus(
+        int channel, bool& enabled, int& redPayloadtype) = 0;
+
+    // Sets the RTP keepalive mechanism status.
+    // This functionality can maintain an existing Network Address Translator
+    // (NAT) mapping while regular RTP is no longer transmitted.
+    virtual int SetRTPKeepaliveStatus(
+        int channel, bool enable, unsigned char unknownPayloadType,
+        int deltaTransmitTimeSeconds = 15) = 0;
+
+    // Gets the RTP keepalive mechanism status.
+    virtual int GetRTPKeepaliveStatus(
+        int channel, bool& enabled, unsigned char& unknownPayloadType,
+        int& deltaTransmitTimeSeconds) = 0;
+
+    // Enables capturing of RTP packets to a binary file on a specific
+    // |channel| and for a given |direction|. The file can later be replayed
+    // using e.g. RTP Tools’ rtpplay since the binary file format is
+    // compatible with the rtpdump format.
+    virtual int StartRTPDump(
+        int channel, const char fileNameUTF8[1024],
+        RTPDirections direction = kRtpIncoming) = 0;
+
+    // Disables capturing of RTP packets to a binary file on a specific
+    // |channel| and for a given |direction|.
+    virtual int StopRTPDump(
+        int channel, RTPDirections direction = kRtpIncoming) = 0;
+
+    // Gets the the current RTP capturing state for the specified
+    // |channel| and |direction|.
+    virtual int RTPDumpIsActive(
+        int channel, RTPDirections direction = kRtpIncoming) = 0;
+
+    // Sends an extra RTP packet using an existing/active RTP session.
+    // It is possible to set the payload type, marker bit and payload
+    // of the extra RTP
+    virtual int InsertExtraRTPPacket(
+        int channel, unsigned char payloadType, bool markerBit,
+        const char* payloadData, unsigned short payloadSize) = 0;
+
+protected:
+    VoERTP_RTCP() {}
+    virtual ~VoERTP_RTCP() {}
+};
+
+}  // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
diff --git a/trunk/src/voice_engine/main/interface/voe_video_sync.h b/trunk/src/voice_engine/main/interface/voe_video_sync.h
new file mode 100644
index 0000000..ac3b84a
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_video_sync.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - RTP header modification (time stamp and sequence number fields).
+//  - Playout delay tuning to synchronize the voice with video.
+//  - Playout delay monitoring.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEVideoSync* vsync  = VoEVideoSync::GetInterface(voe);
+//  base->Init();
+//  ...
+//  int buffer_ms(0);
+//  vsync->GetPlayoutBufferSize(buffer_ms);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  vsync->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
+#define WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class RtpRtcp;
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEVideoSync
+{
+public:
+    // Factory for the VoEVideoSync sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEVideoSync* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEVideoSync sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the current sound card buffer size (playout delay).
+    virtual int GetPlayoutBufferSize(int& bufferMs) = 0;
+
+    // Sets an additional delay for the playout jitter buffer.
+    virtual int SetMinimumPlayoutDelay(int channel, int delayMs) = 0;
+
+    // Gets the sum of the algorithmic delay, jitter buffer delay, and the
+    // playout buffer delay for a specified |channel|.
+    virtual int GetDelayEstimate(int channel, int& delayMs) = 0;
+
+    // Manual initialization of the RTP timestamp.
+    virtual int SetInitTimestamp(int channel, unsigned int timestamp) = 0;
+
+    // Manual initialization of the RTP sequence number.
+    virtual int SetInitSequenceNumber(int channel, short sequenceNumber) = 0;
+
+    // Get the received RTP timestamp
+    virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp) = 0;
+
+    virtual int GetRtpRtcp (int channel, RtpRtcp* &rtpRtcpModule) = 0;
+
+protected:
+    VoEVideoSync() { }
+    virtual ~VoEVideoSync() { }
+};
+
+}   // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
diff --git a/trunk/src/voice_engine/main/interface/voe_volume_control.h b/trunk/src/voice_engine/main/interface/voe_volume_control.h
new file mode 100644
index 0000000..6d64e96
--- /dev/null
+++ b/trunk/src/voice_engine/main/interface/voe_volume_control.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Speaker volume controls.
+//  - Microphone volume control.
+//  - Non-linear speech level control.
+//  - Mute functions.
+//  - Additional stereo scaling methods.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEVolumeControl* volume  = VoEVolumeControl::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  volume->SetInputMute(ch, true);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  volume->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEVolumeControl
+{
+public:
+    // Factory for the VoEVolumeControl sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEVolumeControl* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEVolumeControl sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sets the speaker |volume| level. Valid range is [0,255].
+    virtual int SetSpeakerVolume(unsigned int volume) = 0;
+
+    // Gets the speaker |volume| level.
+    virtual int GetSpeakerVolume(unsigned int& volume) = 0;
+
+    // Mutes the speaker device completely in the operating system.
+    virtual int SetSystemOutputMute(bool enable) = 0;
+
+    // Gets the output device mute state in the operating system.
+    virtual int GetSystemOutputMute(bool &enabled) = 0;
+
+    // Sets the microphone volume level. Valid range is [0,255].
+    virtual int SetMicVolume(unsigned int volume) = 0;
+
+    // Gets the microphone volume level.
+    virtual int GetMicVolume(unsigned int& volume) = 0;
+
+    // Mutes the microphone input signal completely without affecting
+    // the audio device volume.
+    virtual int SetInputMute(int channel, bool enable) = 0;
+
+    // Gets the current microphone input mute state.
+    virtual int GetInputMute(int channel, bool& enabled) = 0;
+
+    // Mutes the microphone device completely in the operating system.
+    virtual int SetSystemInputMute(bool enable) = 0;
+
+    // Gets the mute state of the input device in the operating system.
+    virtual int GetSystemInputMute(bool& enabled) = 0;
+
+    // Gets the microphone speech |level|, mapped non-linearly to the range
+    // [0,9].
+    virtual int GetSpeechInputLevel(unsigned int& level) = 0;
+
+    // Gets the speaker speech |level|, mapped non-linearly to the range
+    // [0,9].
+    virtual int GetSpeechOutputLevel(int channel, unsigned int& level) = 0;
+
+    // Gets the microphone speech |level|, mapped linearly to the range
+    // [0,32768].
+    virtual int GetSpeechInputLevelFullRange(unsigned int& level) = 0;
+
+    // Gets the speaker speech |level|, mapped linearly to the range [0,32768].
+    virtual int GetSpeechOutputLevelFullRange(
+        int channel, unsigned int& level) = 0;
+
+    // Sets a volume |scaling| applied to the outgoing signal of a specific
+    // channel. Valid scale range is [0.0, 10.0].
+    virtual int SetChannelOutputVolumeScaling(int channel, float scaling) = 0;
+
+    // Gets the current volume scaling for a specified |channel|.
+    virtual int GetChannelOutputVolumeScaling(int channel, float& scaling) = 0;
+
+    // Scales volume of the |left| and |right| channels independently.
+    // Valid scale range is [0.0, 1.0].
+    virtual int SetOutputVolumePan(int channel, float left, float right) = 0;
+
+    // Gets the current left and right scaling factors.
+    virtual int GetOutputVolumePan(int channel, float& left, float& right) = 0;
+
+protected:
+    VoEVolumeControl() {};
+    virtual ~VoEVolumeControl() {};
+};
+
+}  // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
diff --git a/trunk/src/voice_engine/main/source/Android.mk b/trunk/src/voice_engine/main/source/Android.mk
new file mode 100644
index 0000000..5040f17
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/Android.mk
@@ -0,0 +1,88 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_voe_core
+LOCAL_MODULE_TAGS := optional
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := \
+    audio_frame_operations.cc \
+    channel.cc \
+    channel_manager.cc \
+    channel_manager_base.cc \
+    dtmf_inband.cc \
+    dtmf_inband_queue.cc \
+    level_indicator.cc \
+    monitor_module.cc \
+    output_mixer.cc \
+    ref_count.cc \
+    shared_data.cc \
+    statistics.cc \
+    transmit_mixer.cc \
+    utility.cc \
+    voe_audio_processing_impl.cc \
+    voe_base_impl.cc \
+    voe_call_report_impl.cc \
+    voe_codec_impl.cc \
+    voe_dtmf_impl.cc \
+    voe_encryption_impl.cc \
+    voe_external_media_impl.cc \
+    voe_file_impl.cc \
+    voe_hardware_impl.cc \
+    voe_neteq_stats_impl.cc \
+    voe_network_impl.cc \
+    voe_rtp_rtcp_impl.cc \
+    voe_video_sync_impl.cc \
+    voe_volume_control_impl.cc \
+    voice_engine_impl.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    $(MY_WEBRTC_COMMON_DEFS) \
+   '-DWEBRTC_ANDROID_OPENSLES'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../interface \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../common_audio/resampler/include \
+    $(LOCAL_PATH)/../../../common_audio/signal_processing/include \
+    $(LOCAL_PATH)/../../../modules/interface \
+    $(LOCAL_PATH)/../../../modules/audio_coding/main/interface \
+    $(LOCAL_PATH)/../../../modules/audio_conference_mixer/interface \
+    $(LOCAL_PATH)/../../../modules/audio_device/main/interface \
+    $(LOCAL_PATH)/../../../modules/audio_device/main/source \
+    $(LOCAL_PATH)/../../../modules/audio_processing/include \
+    $(LOCAL_PATH)/../../../modules/media_file/interface \
+    $(LOCAL_PATH)/../../../modules/rtp_rtcp/interface \
+    $(LOCAL_PATH)/../../../modules/udp_transport/interface \
+    $(LOCAL_PATH)/../../../modules/utility/interface \
+    $(LOCAL_PATH)/../../../system_wrappers/interface 
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport 
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+LOCAL_LDLIBS += -ldl -lpthread
+endif
+
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_STATIC_LIBRARY)
diff --git a/trunk/src/voice_engine/main/source/audio_frame_operations.cc b/trunk/src/voice_engine/main/source/audio_frame_operations.cc
new file mode 100644
index 0000000..e08d0a2
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/audio_frame_operations.cc
@@ -0,0 +1,129 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_frame_operations.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+namespace voe {
+
+WebRtc_Word32 
+AudioFrameOperations::MonoToStereo(AudioFrame& audioFrame)
+{
+    if (audioFrame._audioChannel != 1)
+    {
+        return -1;
+    }
+    if ((audioFrame._payloadDataLengthInSamples << 1) >=
+        AudioFrame::kMaxAudioFrameSizeSamples)
+    {
+        // not enough memory to expand from mono to stereo
+        return -1;
+    }
+
+    WebRtc_Word16* payloadCopy =
+        new WebRtc_Word16[audioFrame._payloadDataLengthInSamples];
+    memcpy(payloadCopy, audioFrame._payloadData,
+           sizeof(WebRtc_Word16)*audioFrame._payloadDataLengthInSamples);
+
+    for (int i = 0; i < audioFrame._payloadDataLengthInSamples; i++)
+    {
+        audioFrame._payloadData[2*i]   = payloadCopy[i];
+        audioFrame._payloadData[2*i+1] = payloadCopy[i];
+    }
+
+    audioFrame._audioChannel = 2;
+
+    delete [] payloadCopy;
+    return 0;
+}
+
+WebRtc_Word32 
+AudioFrameOperations::StereoToMono(AudioFrame& audioFrame)
+{
+    if (audioFrame._audioChannel != 2)
+    {
+        return -1;
+    }
+
+    for (int i = 0; i < audioFrame._payloadDataLengthInSamples; i++)
+    {
+        audioFrame._payloadData[i] = (audioFrame._payloadData[2*i] >> 1) +
+            (audioFrame._payloadData[2*i+1] >> 1);
+    }
+
+    audioFrame._audioChannel = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 
+AudioFrameOperations::Mute(AudioFrame& audioFrame)
+{
+    const int sizeInBytes = sizeof(WebRtc_Word16) *
+        audioFrame._payloadDataLengthInSamples * audioFrame._audioChannel;
+    memset(audioFrame._payloadData, 0, sizeInBytes);
+    audioFrame._energy = 0;
+    return 0;
+}
+
+WebRtc_Word32 
+AudioFrameOperations::Scale(const float left,
+                            const float right,
+                            AudioFrame& audioFrame)
+{
+    if (audioFrame._audioChannel == 1)
+    {
+        assert(false);
+        return -1;
+    }
+
+    for (int i = 0; i < audioFrame._payloadDataLengthInSamples; i++)
+    {
+        audioFrame._payloadData[2*i] =
+            (WebRtc_Word16)(left*audioFrame._payloadData[2*i]);
+        audioFrame._payloadData[2*i+1] =
+            (WebRtc_Word16)(right*audioFrame._payloadData[2*i+1]);
+    }
+    return 0;
+}
+
+WebRtc_Word32 
+AudioFrameOperations::ScaleWithSat(const float scale, AudioFrame& audioFrame)
+{
+    WebRtc_Word32 tmp(0);
+
+    // Ensure that the output result is saturated [-32768, +32768].
+    for (int i = 0;
+        i < audioFrame._payloadDataLengthInSamples * audioFrame._audioChannel;
+        i++)
+    {
+        tmp = static_cast<WebRtc_Word32> (scale * audioFrame._payloadData[i]);
+        if (tmp < -32768)
+        {
+            audioFrame._payloadData[i] = -32768;
+        }
+        else if (tmp > 32767)
+        {
+            audioFrame._payloadData[i] = 32767;
+        }
+        else
+        {
+            audioFrame._payloadData[i] = static_cast<WebRtc_Word16> (tmp);
+        }
+    }
+    return 0;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
+
diff --git a/trunk/src/voice_engine/main/source/audio_frame_operations.h b/trunk/src/voice_engine/main/source/audio_frame_operations.h
new file mode 100644
index 0000000..368850b
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/audio_frame_operations.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H
+#define WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class AudioFrame;
+
+namespace voe {
+
+class AudioFrameOperations
+{
+public:
+    static WebRtc_Word32 MonoToStereo(AudioFrame& audioFrame);
+
+    static WebRtc_Word32 StereoToMono(AudioFrame& audioFrame);
+
+    static WebRtc_Word32 Mute(AudioFrame& audioFrame);
+
+    static WebRtc_Word32 Scale(const float left,
+                               const float right,
+                               AudioFrame& audioFrame);
+
+    static WebRtc_Word32 ScaleWithSat(const float scale,
+                                      AudioFrame& audioFrame);
+};
+
+}  //  namespace voe
+
+}  //  namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H
diff --git a/trunk/src/voice_engine/main/source/channel.cc b/trunk/src/voice_engine/main/source/channel.cc
new file mode 100644
index 0000000..4fc1c8e
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel.cc
@@ -0,0 +1,6684 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+
+#include "audio_device.h"
+#include "audio_frame_operations.h"
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "process_thread.h"
+#include "rtp_dump.h"
+#include "statistics.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "utility.h"
+#include "voe_base.h"
+#include "voe_external_media.h"
+#include "voe_rtp_rtcp.h" 
+
+#if defined(_WIN32)
+#include <Qos.h>
+#endif
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+WebRtc_Word32
+Channel::SendData(FrameType frameType,
+                  WebRtc_UWord8   payloadType,
+                  WebRtc_UWord32  timeStamp,
+                  const WebRtc_UWord8*  payloadData,
+                  WebRtc_UWord16  payloadSize,
+                  const RTPFragmentationHeader* fragmentation)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
+                 " payloadSize=%u, fragmentation=0x%x)",
+                 frameType, payloadType, timeStamp, payloadSize, fragmentation);
+
+    if (_includeAudioLevelIndication)
+    {
+        assert(_rtpAudioProc.get() != NULL);
+        // Store current audio level in the RTP/RTCP module.
+        // The level will be used in combination with voice-activity state
+        // (frameType) to add an RTP header extension
+        _rtpRtcpModule.SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
+    }
+
+    // Push data from ACM to RTP/RTCP-module to deliver audio frame for
+    // packetization.
+    // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
+    if (_rtpRtcpModule.SendOutgoingData((FrameType&)frameType,
+                                        payloadType,
+                                        timeStamp,
+                                        payloadData,
+                                        payloadSize,
+                                        fragmentation) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
+            "Channel::SendData() failed to send data to RTP/RTCP module");
+        return -1;
+    }
+
+    _lastLocalTimeStamp = timeStamp;
+    _lastPayloadType = payloadType;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::InFrameType(WebRtc_Word16 frameType)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::InFrameType(frameType=%d)", frameType);
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    // 1 indicates speech
+    _sendFrameType = (frameType == 1) ? 1 : 0;
+    return 0;
+}
+
+#ifdef WEBRTC_DTMF_DETECTION
+int
+Channel::IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool end)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::IncomingDtmf(digitDtmf=%u, end=%d)",
+               digitDtmf, end);
+
+    if (digitDtmf != 999)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_telephoneEventDetectionPtr)
+        {
+            _telephoneEventDetectionPtr->OnReceivedTelephoneEventInband(
+                _channelId, digitDtmf, end);
+        }
+    }
+
+    return 0;
+}
+#endif
+
+WebRtc_Word32
+Channel::OnRxVadDetected(const int vadDecision)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    if (_rxVadObserverPtr)
+    {
+        _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
+    }
+
+    return 0;
+}
+
+int
+Channel::SendPacket(int channel, const void *data, int len)
+{
+    channel = VoEChannelId(channel);
+    assert(channel == _channelId);
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
+
+    if (_transportPtr == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() failed to send RTP packet due to"
+                     " invalid transport object");
+        return -1;
+    }
+
+    // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
+    // API
+    if (_insertExtraRTPPacket)
+    {
+        WebRtc_UWord8* rtpHdr = (WebRtc_UWord8*)data;
+        WebRtc_UWord8 M_PT(0);
+        if (_extraMarkerBit)
+        {
+            M_PT = 0x80;            // set the M-bit
+        }
+        M_PT += _extraPayloadType;  // set the payload type
+        *(++rtpHdr) = M_PT;     // modify the M|PT-byte within the RTP header
+        _insertExtraRTPPacket = false;  // insert one packet only
+    }
+
+    WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
+    WebRtc_Word32 bufferLength = len;
+
+    // Dump the RTP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTP dump to output file failed");
+    }
+
+    // SRTP or External encryption
+    if (_encrypting)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_encryptionRTPBufferPtr)
+            {
+                // Allocate memory for encryption buffer one time only
+                _encryptionRTPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform encryption (SRTP or external)
+            WebRtc_Word32 encryptedBufferLength = 0;
+            _encryptionPtr->encrypt(_channelId,
+                                    bufferToSendPtr,
+                                    _encryptionRTPBufferPtr,
+                                    bufferLength,
+                                    (int*)&encryptedBufferLength);
+            if (encryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_ENCRYPTION_FAILED,
+                    kTraceError, "Channel::SendPacket() encryption failed");
+                return -1;
+            }
+
+            // Replace default data buffer with encrypted buffer
+            bufferToSendPtr = _encryptionRTPBufferPtr;
+            bufferLength = encryptedBufferLength;
+        }
+    }
+
+    // Packet transmission using WebRtc socket transport
+    if (!_externalTransport)
+    {
+        int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
+                                          bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendPacket() RTP transmission using WebRtc"
+                         " sockets failed");
+            return -1;
+        }
+        return n;
+    }
+
+    // Packet transmission using external transport transport
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        int n = _transportPtr->SendPacket(channel,
+                                          bufferToSendPtr,
+                                          bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendPacket() RTP transmission using external"
+                         " transport failed");
+            return -1;
+        }
+        return n;
+    }
+}
+
+int
+Channel::SendRTCPPacket(int channel, const void *data, int len)
+{
+    channel = VoEChannelId(channel);
+    assert(channel == _channelId);
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
+
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_transportPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() failed to send RTCP packet"
+                         " due to invalid transport object");
+            return -1;
+        }
+    }
+
+    WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
+    WebRtc_Word32 bufferLength = len;
+
+    // Dump the RTCP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTCP dump to output file failed");
+    }
+
+    // SRTP or External encryption
+    if (_encrypting)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_encryptionRTCPBufferPtr)
+            {
+                // Allocate memory for encryption buffer one time only
+                _encryptionRTCPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform encryption (SRTP or external).
+            WebRtc_Word32 encryptedBufferLength = 0;
+            _encryptionPtr->encrypt_rtcp(_channelId,
+                                         bufferToSendPtr,
+                                         _encryptionRTCPBufferPtr,
+                                         bufferLength,
+                                         (int*)&encryptedBufferLength);
+            if (encryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_ENCRYPTION_FAILED, kTraceError,
+                    "Channel::SendRTCPPacket() encryption failed");
+                return -1;
+            }
+
+            // Replace default data buffer with encrypted buffer
+            bufferToSendPtr = _encryptionRTCPBufferPtr;
+            bufferLength = encryptedBufferLength;
+        }
+    }
+
+    // Packet transmission using WebRtc socket transport
+    if (!_externalTransport)
+    {
+        int n = _transportPtr->SendRTCPPacket(channel,
+                                              bufferToSendPtr,
+                                              bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() transmission using WebRtc"
+                         " sockets failed");
+            return -1;
+        }
+        return n;
+    }
+
+    // Packet transmission using external transport transport
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        int n = _transportPtr->SendRTCPPacket(channel,
+                                              bufferToSendPtr,
+                                              bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() transmission using external"
+                         " transport failed");
+            return -1;
+        }
+        return n;
+    }
+
+    return len;
+}
+
+void
+Channel::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                           const WebRtc_Word32 rtpPacketLength,
+                           const WebRtc_Word8* fromIP,
+                           const WebRtc_UWord16 fromPort)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IncomingRTPPacket(rtpPacketLength=%d,"
+                 " fromIP=%s, fromPort=%u)",
+                 rtpPacketLength, fromIP, fromPort);
+
+    // Store playout timestamp for the received RTP packet
+    // to be used for upcoming delay estimations
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
+    {
+        _playoutTimeStampRTP = playoutTimestamp;
+    }
+
+    WebRtc_UWord8* rtpBufferPtr = (WebRtc_UWord8*)incomingRtpPacket;
+    WebRtc_Word32 rtpBufferLength = rtpPacketLength;
+
+    // SRTP or External decryption
+    if (_decrypting)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_decryptionRTPBufferPtr)
+            {
+                // Allocate memory for decryption buffer one time only
+                _decryptionRTPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform decryption (SRTP or external)
+            WebRtc_Word32 decryptedBufferLength = 0;
+            _encryptionPtr->decrypt(_channelId,
+                                    rtpBufferPtr,
+                                    _decryptionRTPBufferPtr,
+                                    rtpBufferLength,
+                                    (int*)&decryptedBufferLength);
+            if (decryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_DECRYPTION_FAILED, kTraceError,
+                    "Channel::IncomingRTPPacket() decryption failed");
+                return;
+            }
+
+            // Replace default data buffer with decrypted buffer
+            rtpBufferPtr = _decryptionRTPBufferPtr;
+            rtpBufferLength = decryptedBufferLength;
+        }
+    }
+
+    // Dump the RTP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpIn.DumpPacket(rtpBufferPtr,
+                              (WebRtc_UWord16)rtpBufferLength) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTP dump to input file failed");
+    }
+
+    // Deliver RTP packet to RTP/RTCP module for parsing
+    // The packet will be pushed back to the channel thru the
+    // OnReceivedPayloadData callback so we don't push it to the ACM here
+    if (_rtpRtcpModule.IncomingPacket((const WebRtc_UWord8*)rtpBufferPtr,
+                                      (WebRtc_UWord16)rtpBufferLength) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "Channel::IncomingRTPPacket() RTP packet is invalid");
+        return;
+    }
+}
+
+void
+Channel::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                            const WebRtc_Word32 rtcpPacketLength,
+                            const WebRtc_Word8* fromIP,
+                            const WebRtc_UWord16 fromPort)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IncomingRTCPPacket(rtcpPacketLength=%d, fromIP=%s,"
+                 " fromPort=%u)",
+                 rtcpPacketLength, fromIP, fromPort);
+
+    // Temporary buffer pointer and size for decryption
+    WebRtc_UWord8* rtcpBufferPtr = (WebRtc_UWord8*)incomingRtcpPacket;
+    WebRtc_Word32 rtcpBufferLength = rtcpPacketLength;
+
+    // Store playout timestamp for the received RTCP packet
+    // which will be read by the GetRemoteRTCPData API
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
+    {
+        _playoutTimeStampRTCP = playoutTimestamp;
+    }
+
+    // SRTP or External decryption
+    if (_decrypting)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_decryptionRTCPBufferPtr)
+            {
+                // Allocate memory for decryption buffer one time only
+                _decryptionRTCPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform decryption (SRTP or external).
+            WebRtc_Word32 decryptedBufferLength = 0;
+            _encryptionPtr->decrypt_rtcp(_channelId,
+                                         rtcpBufferPtr,
+                                         _decryptionRTCPBufferPtr,
+                                         rtcpBufferLength,
+                                         (int*)&decryptedBufferLength);
+            if (decryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_DECRYPTION_FAILED, kTraceError,
+                    "Channel::IncomingRTCPPacket() decryption failed");
+                return;
+            }
+
+            // Replace default data buffer with decrypted buffer
+            rtcpBufferPtr = _decryptionRTCPBufferPtr;
+            rtcpBufferLength = decryptedBufferLength;
+        }
+    }
+
+    // Dump the RTCP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpIn.DumpPacket(rtcpBufferPtr,
+                              (WebRtc_UWord16)rtcpBufferLength) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTCP dump to input file failed");
+    }
+
+    // Deliver RTCP packet to RTP/RTCP module for parsing
+    if (_rtpRtcpModule.IncomingPacket((const WebRtc_UWord8*)rtcpBufferPtr,
+                                      (WebRtc_UWord16)rtcpBufferLength) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "Channel::IncomingRTPPacket() RTCP packet is invalid");
+        return;
+    }
+}
+
+void
+Channel::OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                  const WebRtc_UWord8 event,
+                                  const bool endOfEvent)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedTelephoneEvent(id=%d, event=%u,"
+                 " endOfEvent=%d)", id, event, endOfEvent);
+
+#ifdef WEBRTC_DTMF_DETECTION
+    if (_outOfBandTelephoneEventDetecion)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_telephoneEventDetectionPtr)
+        {
+            _telephoneEventDetectionPtr->OnReceivedTelephoneEventOutOfBand(
+                _channelId, event, endOfEvent);
+        }
+    }
+#endif
+}
+
+void
+Channel::OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                              const WebRtc_UWord8 event,
+                              const WebRtc_UWord16 lengthMs,
+                              const WebRtc_UWord8 volume)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
+                 " volume=%u)", id, event, lengthMs, volume);
+
+    if (!_playOutbandDtmfEvent || (event > 15))
+    {
+        // Ignore callback since feedback is disabled or event is not a
+        // Dtmf tone event.
+        return;
+    }
+
+    assert(_outputMixerPtr != NULL);
+
+    // Start playing out the Dtmf tone (if playout is enabled).
+    // Reduce length of tone with 80ms to the reduce risk of echo.
+    _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
+}
+
+void
+Channel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 SSRC)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
+                 id, SSRC);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    // Reset RTP-module counters since a new incoming RTP stream is detected
+    _rtpRtcpModule.ResetReceiveDataCountersRTP();
+    _rtpRtcpModule.ResetStatisticsRTP();
+
+    if (_rtpObserver)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_rtpObserverPtr)
+        {
+            // Send new SSRC to registered observer using callback
+            _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
+        }
+    }
+}
+
+void Channel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 CSRC,
+                                    const bool added)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
+                 id, CSRC, added);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    if (_rtpObserver)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_rtpObserverPtr)
+        {
+            _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
+        }
+    }
+}
+
+void
+Channel::OnApplicationDataReceived(const WebRtc_Word32 id,
+                                   const WebRtc_UWord8 subType,
+                                   const WebRtc_UWord32 name,
+                                   const WebRtc_UWord16 length,
+                                   const WebRtc_UWord8* data)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
+                 " name=%u, length=%u)",
+                 id, subType, name, length);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    if (_rtcpObserver)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_rtcpObserverPtr)
+        {
+            _rtcpObserverPtr->OnApplicationDataReceived(channel,
+                                                        subType,
+                                                        name,
+                                                        data,
+                                                        length);
+        }
+    }
+}
+
+WebRtc_Word32
+Channel::OnInitializeDecoder(
+    const WebRtc_Word32 id,
+    const WebRtc_Word8 payloadType,
+    const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const int frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
+                 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
+                 id, payloadType, payloadName, frequency, channels, rate);
+
+    assert(VoEChannelId(id) == _channelId);
+
+    CodecInst receiveCodec = {0};
+    CodecInst dummyCodec = {0};
+
+    receiveCodec.pltype = payloadType;
+    receiveCodec.plfreq = frequency;
+    receiveCodec.channels = channels;
+    receiveCodec.rate = rate;
+    strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+    
+    _audioCodingModule.Codec(payloadName, dummyCodec, frequency);
+    receiveCodec.pacsize = dummyCodec.pacsize;
+
+    // Register the new codec to the ACM
+    if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "Channel::OnInitializeDecoder() invalid codec ("
+                     "pt=%d, name=%s) received - 1", payloadType, payloadName);
+        _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
+        return -1;
+    }
+
+    return 0;
+}
+
+void
+Channel::OnPacketTimeout(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPacketTimeout(id=%d)", id);
+
+    CriticalSectionScoped cs(*_callbackCritSectPtr);
+    if (_voiceEngineObserverPtr)
+    {
+        if (_receiving || _externalTransport)
+        {
+            WebRtc_Word32 channel = VoEChannelId(id);
+            assert(channel == _channelId);
+            // Ensure that next OnReceivedPacket() callback will trigger
+            // a VE_PACKET_RECEIPT_RESTARTED callback.
+            _rtpPacketTimedOut = true;
+            // Deliver callback to the observer
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::OnPacketTimeout() => "
+                         "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
+            _voiceEngineObserverPtr->CallbackOnError(channel,
+                                                     VE_RECEIVE_PACKET_TIMEOUT);
+        }
+    }
+}
+
+void
+Channel::OnReceivedPacket(const WebRtc_Word32 id,
+                          const RtpRtcpPacketType packetType)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
+                 id, packetType);
+
+    assert(VoEChannelId(id) == _channelId);
+
+    // Notify only for the case when we have restarted an RTP session.
+    if (_rtpPacketTimedOut && (kPacketRtp == packetType))
+    {
+        CriticalSectionScoped cs(*_callbackCritSectPtr);
+        if (_voiceEngineObserverPtr)
+        {
+            WebRtc_Word32 channel = VoEChannelId(id);
+            assert(channel == _channelId);
+            // Reset timeout mechanism
+            _rtpPacketTimedOut = false;
+            // Deliver callback to the observer
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::OnPacketTimeout() =>"
+                         " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
+            _voiceEngineObserverPtr->CallbackOnError(
+                channel,
+                VE_PACKET_RECEIPT_RESTARTED);
+        }
+    }
+}
+
+void
+Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                               const RTPAliveType alive)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
+
+    if (!_connectionObserver)
+        return;
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    // Use Alive as default to limit risk of false Dead detections
+    bool isAlive(true);
+
+    // Always mark the connection as Dead when the module reports kRtpDead
+    if (kRtpDead == alive)
+    {
+        isAlive = false;
+    }
+
+    // It is possible that the connection is alive even if no RTP packet has
+    // been received for a long time since the other side might use VAD/DTX
+    // and a low SID-packet update rate.
+    if ((kRtpNoRtp == alive) && _playing)
+    {
+        // Detect Alive for all NetEQ states except for the case when we are
+        // in PLC_CNG state.
+        // PLC_CNG <=> background noise only due to long expand or error.
+        // Note that, the case where the other side stops sending during CNG
+        // state will be detected as Alive. Dead is is not set until after
+        // missing RTCP packets for at least twelve seconds (handled
+        // internally by the RTP/RTCP module).
+        isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
+    }
+
+    UpdateDeadOrAliveCounters(isAlive);
+
+    // Send callback to the registered observer
+    if (_connectionObserver)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_connectionObserverPtr)
+        {
+            _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
+        }
+    }
+}
+
+WebRtc_Word32
+Channel::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                               const WebRtc_UWord16 payloadSize,
+                               const WebRtcRTPHeader* rtpHeader)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedPayloadData(payloadSize=%d,"
+                 " payloadType=%u, audioChannel=%u)",
+                 payloadSize,
+                 rtpHeader->header.payloadType,
+                 rtpHeader->type.Audio.channel);
+
+    if (!_playing)
+    {
+        // Avoid inserting into NetEQ when we are not playing. Count the
+        // packet as discarded.
+        WEBRTC_TRACE(kTraceStream, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "received packet is discarded since playing is not"
+                     " activated");
+        _numberOfDiscardedPackets++;
+        return 0;
+    }
+
+    // Push the incoming payload (parsed and ready for decoding) into the ACM
+    if (_audioCodingModule.IncomingPacket((const WebRtc_Word8*) payloadData,
+                                          payloadSize,
+                                          *rtpHeader) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
+            "Channel::OnReceivedPayloadData() unable to push data to the ACM");
+        return -1;
+    }
+
+    // Update the packet delay
+    UpdatePacketDelay(rtpHeader->header.timestamp,
+                      rtpHeader->header.sequenceNumber);
+
+    return 0;
+}
+
+WebRtc_Word32 Channel::GetAudioFrame(const WebRtc_Word32 id,
+                                     AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetAudioFrame(id=%d)", id);
+
+    // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
+    if (_audioCodingModule.PlayoutData10Ms(audioFrame._frequencyInHz,
+                                           audioFrame) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
+        // In all likelihood, the audio in this frame is garbage. We return an
+        // error so that the audio mixer module doesn't add it to the mix. As
+        // a result, it won't be played out and the actions skipped here are
+        // irrelevant.
+        return -1;
+    }
+
+    if (_RxVadDetection)
+    {
+        UpdateRxVadDetection(audioFrame);
+    }
+
+    // Convert module ID to internal VoE channel ID
+    audioFrame._id = VoEChannelId(audioFrame._id);
+    // Store speech type for dead-or-alive detection
+    _outputSpeechType = audioFrame._speechType;
+
+    // Perform far-end AudioProcessing module processing on the received signal
+    if (_rxApmIsEnabled)
+    {
+        ApmProcessRx(audioFrame);
+    }
+
+    // Output volume scaling
+    if (_outputGain < 0.99f || _outputGain > 1.01f)
+    {
+        AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
+    }
+
+    // Scale left and/or right channel(s) if stereo and master balance is
+    // active
+
+    if (_panLeft != 1.0f || _panRight != 1.0f)
+    {
+        if (audioFrame._audioChannel == 1)
+        {
+            // Emulate stereo mode since panning is active.
+            // The mono signal is copied to both left and right channels here.
+            AudioFrameOperations::MonoToStereo(audioFrame);
+        }
+        // For true stereo mode (when we are receiving a stereo signal), no
+        // action is needed.
+
+        // Do the panning operation (the audio frame contains stereo at this
+        // stage)
+        AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
+    }
+
+    // Mix decoded PCM output with file if file mixing is enabled
+    if (_outputFilePlaying)
+    {
+        MixAudioWithFile(audioFrame, audioFrame._frequencyInHz);
+    }
+
+    // Place channel in on-hold state (~muted) if on-hold is activated
+    if (_outputIsOnHold)
+    {
+        AudioFrameOperations::Mute(audioFrame);
+    }
+
+    // External media
+    if (_outputExternalMedia)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        const bool isStereo = (audioFrame._audioChannel == 2);
+        if (_outputExternalMediaCallbackPtr)
+        {
+            _outputExternalMediaCallbackPtr->Process(
+                _channelId,
+                kPlaybackPerChannel,
+                (WebRtc_Word16*)audioFrame._payloadData,
+                audioFrame._payloadDataLengthInSamples,
+                audioFrame._frequencyInHz,
+                isStereo);
+        }
+    }
+
+    // Record playout if enabled
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        if (_outputFileRecording && _outputFileRecorderPtr)
+        {
+            if(audioFrame._audioChannel == 2)
+            {
+                AudioFrame temp =  audioFrame;
+                AudioFrameOperations::StereoToMono (temp);
+                _outputFileRecorderPtr->RecordAudioToFile(temp);
+            }
+            else if(audioFrame._audioChannel == 1)
+            {
+                _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
+            }
+            else
+            {
+                assert(false);
+            }
+        }
+    }
+
+    // Measure audio level (0-9)
+    _outputAudioLevel.ComputeLevel(audioFrame);
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::NeededFrequency(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::NeededFrequency(id=%d)", id);
+
+    int highestNeeded = 0;
+
+    // Determine highest needed receive frequency
+    WebRtc_Word32 receiveFrequency = _audioCodingModule.ReceiveFrequency();
+
+    // Return the bigger of playout and receive frequency in the ACM.
+    if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
+    {
+        highestNeeded = _audioCodingModule.PlayoutFrequency();
+    }
+    else
+    {
+        highestNeeded = receiveFrequency;
+    }
+
+    // Special case, if we're playing a file on the playout side
+    // we take that frequency into consideration as well
+    // This is not needed on sending side, since the codec will
+    // limit the spectrum anyway.
+    if (_outputFilePlaying)
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        if (_outputFilePlayerPtr && _outputFilePlaying)
+        {
+            if(_outputFilePlayerPtr->Frequency()>highestNeeded)
+            {
+                highestNeeded=_outputFilePlayerPtr->Frequency();
+            }
+        }
+    }
+
+    return(highestNeeded);
+}
+
+WebRtc_Word32
+Channel::CreateChannel(Channel*& channel,
+                       const WebRtc_Word32 channelId,
+                       const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
+                 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
+        channelId, instanceId);
+
+    channel = new Channel(channelId, instanceId);
+    if (channel == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice,
+                     VoEId(instanceId,channelId),
+                     "Channel::CreateChannel() unable to allocate memory for"
+                     " channel");
+        return -1;
+    }
+    return 0;
+}
+
+void
+Channel::PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void
+Channel::RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void
+Channel::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PlayFileEnded(id=%d)", id);
+
+    if (id == _inputFilePlayerId)
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        _inputFilePlaying = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::PlayFileEnded() => input file player module is"
+                     " shutdown");
+    }
+    else if (id == _outputFilePlayerId)
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        _outputFilePlaying = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::PlayFileEnded() => output file player module is"
+                     " shutdown");
+    }
+}
+
+void
+Channel::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RecordFileEnded(id=%d)", id);
+
+    assert(id == _outputFileRecorderId);
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    _outputFileRecording = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "Channel::RecordFileEnded() => output file recorder module is"
+                 " shutdown");
+}
+
+Channel::Channel(const WebRtc_Word32 channelId,
+                 const WebRtc_UWord32 instanceId) :
+    _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _transmitCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _instanceId(instanceId),
+    _channelId(channelId),
+    _rtpRtcpModule(*RtpRtcp::CreateRtpRtcp(VoEModuleId(
+        instanceId, channelId), true)),
+    _audioCodingModule(*AudioCodingModule::Create(
+        VoEModuleId(instanceId, channelId))),
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+     _numSocketThreads(KNumSocketThreads),
+    _socketTransportModule(*UdpTransport::Create(
+        VoEModuleId(instanceId, channelId), _numSocketThreads)),
+#endif
+#ifdef WEBRTC_SRTP
+    _srtpModule(*SrtpModule::CreateSrtpModule(VoEModuleId(instanceId,
+                                                          channelId))),
+#endif
+    _rtpDumpIn(*RtpDump::CreateRtpDump()),
+    _rtpDumpOut(*RtpDump::CreateRtpDump()),
+    _outputAudioLevel(),
+    _externalTransport(false),
+    _inputFilePlayerPtr(NULL),
+    _outputFilePlayerPtr(NULL),
+    _outputFileRecorderPtr(NULL),
+    // Avoid conflict with other channels by adding 1024 - 1026,
+    // won't use as much as 1024 channels.
+    _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
+    _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
+    _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
+    _inputFilePlaying(false),
+    _outputFilePlaying(false),
+    _outputFileRecording(false),
+    _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
+    _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
+    _inputExternalMedia(false),
+    _outputExternalMedia(false),
+    _inputExternalMediaCallbackPtr(NULL),
+    _outputExternalMediaCallbackPtr(NULL),
+    _encryptionRTPBufferPtr(NULL),
+    _decryptionRTPBufferPtr(NULL),
+    _encryptionRTCPBufferPtr(NULL),
+    _decryptionRTCPBufferPtr(NULL),
+    _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
+    _sendTelephoneEventPayloadType(106),
+    _playoutTimeStampRTP(0),
+    _playoutTimeStampRTCP(0),
+    _numberOfDiscardedPackets(0),
+    _engineStatisticsPtr(NULL),
+    _outputMixerPtr(NULL),
+    _transmitMixerPtr(NULL),
+    _moduleProcessThreadPtr(NULL),
+    _audioDeviceModulePtr(NULL),
+    _voiceEngineObserverPtr(NULL),
+    _callbackCritSectPtr(NULL),
+    _transportPtr(NULL),
+    _encryptionPtr(NULL),
+    _rtpAudioProc(NULL),
+    _rxAudioProcessingModulePtr(NULL),
+#ifdef WEBRTC_DTMF_DETECTION
+    _telephoneEventDetectionPtr(NULL),
+#endif
+    _rxVadObserverPtr(NULL),
+    _oldVadDecision(-1),
+    _sendFrameType(0),
+    _rtpObserverPtr(NULL),
+    _rtcpObserverPtr(NULL),
+    _outputIsOnHold(false),
+    _externalPlayout(false),
+    _inputIsOnHold(false),
+    _playing(false),
+    _sending(false),
+    _receiving(false),
+    _mixFileWithMicrophone(false),
+    _rtpObserver(false),
+    _rtcpObserver(false),
+    _mute(false),
+    _panLeft(1.0f),
+    _panRight(1.0f),
+    _outputGain(1.0f),
+    _encrypting(false),
+    _decrypting(false),
+    _playOutbandDtmfEvent(false),
+    _playInbandDtmfEvent(false),
+    _inbandTelephoneEventDetection(false),
+    _outOfBandTelephoneEventDetecion(false),
+    _extraPayloadType(0),
+    _insertExtraRTPPacket(false),
+    _extraMarkerBit(false),
+    _lastLocalTimeStamp(0),
+    _lastPayloadType(0),
+    _includeAudioLevelIndication(false),
+    _rtpPacketTimedOut(false),
+    _rtpPacketTimeOutIsEnabled(false),
+    _rtpTimeOutSeconds(0),
+    _connectionObserver(false),
+    _connectionObserverPtr(NULL),
+    _countAliveDetections(0),
+    _countDeadDetections(0),
+    _outputSpeechType(AudioFrame::kNormalSpeech),
+    _averageDelayMs(0),
+    _previousSequenceNumber(0),
+    _previousTimestamp(0),
+    _recPacketDelayMs(20),
+    _RxVadDetection(false),
+    _rxApmIsEnabled(false),
+    _rxAgcIsEnabled(false),
+    _rxNsIsEnabled(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Channel() - ctor");
+    _inbandDtmfQueue.ResetDtmf();
+    _inbandDtmfGenerator.Init();
+    _outputAudioLevel.Clear();
+
+    // Create far end AudioProcessing Module
+    _rxAudioProcessingModulePtr = AudioProcessing::Create(
+        VoEModuleId(instanceId, channelId));
+}
+
+Channel::~Channel()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::~Channel() - dtor");
+
+    if (_outputExternalMedia)
+    {
+        DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
+    }
+    if (_inputExternalMedia)
+    {
+        DeRegisterExternalMediaProcessing(kRecordingPerChannel);
+    }
+    StopSend();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    StopReceiving();
+    // De-register packet callback to ensure we're not in a callback when
+    // deleting channel state, avoids race condition and deadlock.
+    if (_socketTransportModule.InitializeReceiveSockets(NULL, 0, NULL, NULL, 0)
+            != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "~Channel() failed to de-register receive callback");
+    }
+#endif
+    StopPlayout();
+
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        if (_inputFilePlayerPtr)
+        {
+            _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            _inputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+            _inputFilePlayerPtr = NULL;
+        }
+        if (_outputFilePlayerPtr)
+        {
+            _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            _outputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+        }
+        if (_outputFileRecorderPtr)
+        {
+            _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _outputFileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+            _outputFileRecorderPtr = NULL;
+        }
+    }
+
+    // The order to safely shutdown modules in a channel is:
+    // 1. De-register callbacks in modules
+    // 2. De-register modules in process thread
+    // 3. Destroy modules
+
+    // De-register all RTP module callbacks to ensure geting no callbacks
+    // (Receive socket callback was de-registered above)
+    if (_rtpRtcpModule.RegisterIncomingDataCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register incoming data callback"
+                     " (RTP module)");
+    }
+    if (_rtpRtcpModule.RegisterSendTransport(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register send transport "
+                     "(RTP module)");
+    }
+    if (_rtpRtcpModule.RegisterIncomingRTPCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register incoming RTP"
+                     " callback (RTP module)");
+    }
+    if (_rtpRtcpModule.RegisterIncomingRTCPCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register incoming RTCP "
+                     "callback (RTP module)");
+    }
+    if (_rtpRtcpModule.RegisterAudioCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register audio callback "
+                     "(RTP module)");
+    }
+    if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register transport callback"
+                     " (Audio coding module)");
+    }
+    if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register VAD callback"
+                     " (Audio coding module)");
+    }
+#ifdef WEBRTC_DTMF_DETECTION
+    if (_audioCodingModule.RegisterIncomingMessagesCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register incoming messages "
+                     "callback (Audio coding module)");
+    }
+#endif
+    // De-register modules in process thread
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (_moduleProcessThreadPtr->DeRegisterModule(&_socketTransportModule)
+            == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to deregister socket module");
+    }
+#endif
+    if (_moduleProcessThreadPtr->DeRegisterModule(&_rtpRtcpModule) == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to deregister RTP/RTCP module");
+    }
+
+    // Destroy modules
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    UdpTransport::Destroy(
+        &_socketTransportModule);
+#endif
+    RtpRtcp::DestroyRtpRtcp(&_rtpRtcpModule);
+    AudioCodingModule::Destroy(&_audioCodingModule);
+#ifdef WEBRTC_SRTP
+    SrtpModule::DestroySrtpModule(&_srtpModule);
+#endif
+    if (_rxAudioProcessingModulePtr != NULL)
+    {
+        AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
+        _rxAudioProcessingModulePtr = NULL;
+    }
+
+    // End of modules shutdown
+
+    // Delete other objects
+    RtpDump::DestroyRtpDump(&_rtpDumpIn);
+    RtpDump::DestroyRtpDump(&_rtpDumpOut);
+    delete [] _encryptionRTPBufferPtr;
+    delete [] _decryptionRTPBufferPtr;
+    delete [] _encryptionRTCPBufferPtr;
+    delete [] _decryptionRTCPBufferPtr;
+    delete &_callbackCritSect;
+    delete &_transmitCritSect;
+    delete &_fileCritSect;
+}
+
+WebRtc_Word32
+Channel::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Init()");
+
+    // --- Initial sanity
+
+    if ((_engineStatisticsPtr == NULL) ||
+        (_moduleProcessThreadPtr == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::Init() must call SetEngineInformation() first");
+        return -1;
+    }
+
+    // --- Add modules to process thread (for periodic schedulation)
+
+    const bool processThreadFail =
+        ((_moduleProcessThreadPtr->RegisterModule(&_rtpRtcpModule) != 0) ||
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+        (_moduleProcessThreadPtr->RegisterModule(
+                &_socketTransportModule) != 0));
+#else
+        false);
+#endif
+    if (processThreadFail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_INIT_CHANNEL, kTraceError,
+            "Channel::Init() modules not registered");
+        return -1;
+    }
+    // --- ACM initialization
+
+    if ((_audioCodingModule.InitializeReceiver() == -1) ||
+#ifdef WEBRTC_CODEC_AVT
+        // out-of-band Dtmf tones are played out by default
+        (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
+#endif
+        (_audioCodingModule.InitializeSender() == -1))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "Channel::Init() unable to initialize the ACM - 1");
+        return -1;
+    }
+
+    // --- RTP/RTCP module initialization
+
+    // Ensure that RTCP is enabled by default for the created channel.
+    // Note that, the module will keep generating RTCP until it is explicitly
+    // disabled by the user.
+    // After StopListen (when no sockets exists), RTCP packets will no longer
+    // be transmitted since the Transport object will then be invalid.
+
+    const bool rtpRtcpFail =
+        ((_rtpRtcpModule.InitReceiver() == -1) ||
+        (_rtpRtcpModule.InitSender() == -1) ||
+        (_rtpRtcpModule.SetTelephoneEventStatus(false, true, true) == -1) ||
+        // RTCP is enabled by default
+        (_rtpRtcpModule.SetRTCPStatus(kRtcpCompound) == -1));
+    if (rtpRtcpFail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "Channel::Init() RTP/RTCP module not initialized");
+        return -1;
+    }
+
+     // --- Register all permanent callbacks
+
+    const bool fail =
+        (_rtpRtcpModule.RegisterIncomingDataCallback(this) == -1) ||
+        (_rtpRtcpModule.RegisterIncomingRTPCallback(this) == -1) ||
+        (_rtpRtcpModule.RegisterIncomingRTCPCallback(this) == -1) ||
+        (_rtpRtcpModule.RegisterSendTransport(this) == -1) ||
+        (_rtpRtcpModule.RegisterAudioCallback(this) == -1) ||
+        (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
+        (_audioCodingModule.RegisterVADCallback(this) == -1);
+
+    if (fail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_INIT_CHANNEL, kTraceError,
+            "Channel::Init() callbacks not registered");
+        return -1;
+    }
+
+    // --- Register all supported codecs to the receiving side of the
+    // RTP/RTCP module
+
+    CodecInst codec;
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    for (int idx = 0; idx < nSupportedCodecs; idx++)
+    {
+        // Open up the RTP/RTCP receiver for all supported codecs
+        if ((_audioCodingModule.Codec(idx, codec) == -1) ||
+            (_rtpRtcpModule.RegisterReceivePayload(codec) == -1))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::Init() unable to register %s (%d/%d/%d/%d) "
+                         "to RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::Init() %s (%d/%d/%d/%d) has been added to "
+                         "the RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+
+        // Ensure that PCMU is used as default codec on the sending side
+        if (!STR_CASE_CMP(codec.plname, "PCMU"))
+        {
+            SetSendCodec(codec);
+        }
+
+        // Register default PT for outband 'telephone-event'
+        if (!STR_CASE_CMP(codec.plname, "telephone-event"))
+        {
+            if ((_rtpRtcpModule.RegisterSendPayload(codec) == -1) ||
+                (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register outband "
+                             "'telephone-event' (%d/%d) correctly",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+
+        if (!STR_CASE_CMP(codec.plname, "CN"))
+        {
+            if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
+                (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
+                (_rtpRtcpModule.RegisterSendPayload(codec) == -1))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register CN (%d/%d) "
+                             "correctly - 1",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+#ifdef WEBRTC_CODEC_RED
+        // Register RED to the receiving side of the ACM.
+        // We will not receive an OnInitializeDecoder() callback for RED.
+        if (!STR_CASE_CMP(codec.plname, "RED"))
+        {
+            if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register RED (%d/%d) "
+                             "correctly",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+#endif
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    // Ensure that the WebRtcSocketTransport implementation is used as
+    // Transport on the sending side
+    {
+        // A lock is needed here since users can call
+        // RegisterExternalTransport() at the same time.
+        CriticalSectionScoped cs(_callbackCritSect);
+        _transportPtr = &_socketTransportModule;
+    }
+#endif
+
+    // Initialize the far end AP module
+    // Using 8 kHz as initial Fs, the same as in transmission. Might be
+    // changed at the first receiving audio.
+    if (_rxAudioProcessingModulePtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NO_MEMORY, kTraceCritical,
+            "Channel::Init() failed to create the far-end AudioProcessing"
+            " module");
+        return -1;
+    }
+
+    if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Channel::Init() failed to set the sample rate to 8K for"
+            " far-end AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set channels for the primary audio stream");
+    }
+
+    if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Channel::Init() failed to set the high-pass filter for"
+            " far-end AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set noise reduction level for far-end"
+            " AP module");
+    }
+    if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set noise reduction state for far-end"
+            " AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
+        (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set AGC mode for far-end AP module");
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set AGC state for far-end AP module");
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetEngineInformation(Statistics& engineStatistics,
+                              OutputMixer& outputMixer,
+                              voe::TransmitMixer& transmitMixer,
+                              ProcessThread& moduleProcessThread,
+                              AudioDeviceModule& audioDeviceModule,
+                              VoiceEngineObserver* voiceEngineObserver,
+                              CriticalSectionWrapper* callbackCritSect)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetEngineInformation()");
+    _engineStatisticsPtr = &engineStatistics;
+    _outputMixerPtr = &outputMixer;
+    _transmitMixerPtr = &transmitMixer,
+    _moduleProcessThreadPtr = &moduleProcessThread;
+    _audioDeviceModulePtr = &audioDeviceModule;
+    _voiceEngineObserverPtr = voiceEngineObserver;
+    _callbackCritSectPtr = callbackCritSect;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::UpdateLocalTimeStamp()
+{
+
+    _timeStamp += _audioFrame._payloadDataLengthInSamples;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayout()");
+    if (_playing)
+    {
+        return 0;
+    }
+    // Add participant as candidates for mixing.
+    if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayout() failed to add participant to mixer");
+        return -1;
+    }
+
+    _playing = true;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayout()");
+    if (!_playing)
+    {
+        return 0;
+    }
+    // Remove participant as candidates for mixing
+    if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayout() failed to remove participant from mixer");
+        return -1;
+    }
+
+    _playing = false;
+    _outputAudioLevel.Clear();
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartSend()");
+    {
+        // A lock is needed because |_sending| can be accessed or modified by
+        // another thread at the same time.
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (_sending)
+        {
+            return 0;
+        }
+        _sending = true;
+    }
+
+    if (_rtpRtcpModule.SetSendingStatus(true) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "StartSend() RTP/RTCP failed to start sending");
+        CriticalSectionScoped cs(_callbackCritSect);
+        _sending = false;
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopSend()");
+    {
+        // A lock is needed because |_sending| can be accessed or modified by
+        // another thread at the same time.
+        CriticalSectionScoped cs(_callbackCritSect);
+
+        if (!_sending)
+        {
+            return 0;
+        }
+        _sending = false;
+    }
+
+    // Reset sending SSRC and sequence number and triggers direct transmission
+    // of RTCP BYE
+    if (_rtpRtcpModule.SetSendingStatus(false) == -1 ||
+        _rtpRtcpModule.ResetSendDataCountersRTP() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
+            "StartSend() RTP/RTCP failed to stop sending");
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartReceiving()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartReceiving()");
+    if (_receiving)
+    {
+        return 0;
+    }
+    // If external transport is used, we will only initialize/set the variables
+    // after this section, since we are not using the WebRtc transport but
+    // still need to keep track of e.g. if we are receiving.
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_externalTransport)
+    {
+        if (!_socketTransportModule.ReceiveSocketsInitialized())
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKETS_NOT_INITED, kTraceError,
+                "StartReceive() must set local receiver first");
+            return -1;
+        }
+        if (_socketTransportModule.StartReceiving(KNumberOfSocketBuffers) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+                "StartReceiving() failed to start receiving");
+            return -1;
+        }
+    }
+#endif
+    _receiving = true;
+    _numberOfDiscardedPackets = 0;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopReceiving()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopReceiving()");
+    if (!_receiving)
+    {
+        return 0;
+    }
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_externalTransport &&
+        _socketTransportModule.ReceiveSocketsInitialized())
+    {
+        if (_socketTransportModule.StopReceiving() != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+                "StopReceiving() failed to stop receiving.");
+            return -1;
+        }
+    }
+#endif
+    bool dtmfDetection = _rtpRtcpModule.TelephoneEvent();
+    WebRtc_Word32 ret = _rtpRtcpModule.InitReceiver();
+    if (ret != 0) {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "StopReceiving() failed to reinitialize the RTP receiver.");
+        return -1;
+    }
+    // Recover DTMF detection status.
+    ret = _rtpRtcpModule.SetTelephoneEventStatus(dtmfDetection, true, true);
+    if (ret != 0) {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopReceiving() failed to restore telephone-event status.");
+    }
+    RegisterReceiveCodecsToRTPModule();
+    _receiving = false;
+    return 0;
+}
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::SetLocalReceiver(const WebRtc_UWord16 rtpPort,
+                          const WebRtc_UWord16 rtcpPort,
+                          const WebRtc_Word8 ipAddr[64],
+                          const WebRtc_Word8 multicastIpAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetLocalReceiver()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetLocalReceiver() conflict with external transport");
+        return -1;
+    }
+
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_SENDING, kTraceError,
+            "SetLocalReceiver() already sending");
+        return -1;
+    }
+    if (_receiving)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_LISTENING, kTraceError,
+            "SetLocalReceiver() already receiving");
+        return -1;
+    }
+
+    if (_socketTransportModule.InitializeReceiveSockets(this,
+                                                        rtpPort,
+                                                        ipAddr,
+                                                        multicastIpAddr,
+                                                        rtcpPort) != 0)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kIpAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "SetLocalReceiver() invalid IP address");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetLocalReceiver() invalid socket");
+            break;
+        case UdpTransport::kPortInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_PORT_NMBR, kTraceError,
+                "SetLocalReceiver() invalid port");
+            break;
+        case UdpTransport::kFailedToBindPort:
+            _engineStatisticsPtr->SetLastError(
+                VE_BINDING_SOCKET_TO_LOCAL_ADDRESS_FAILED, kTraceError,
+                "SetLocalReceiver() binding failed");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetLocalReceiver() undefined socket error");
+            break;
+        }
+        return -1;
+    }
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetLocalReceiver(int& port, int& RTCPport, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetLocalReceiver()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetLocalReceiver() conflict with external transport");
+        return -1;
+    }
+
+    WebRtc_Word8 ipAddrTmp[UdpTransport::
+                           kIpAddressVersion6Length] = {0};
+    WebRtc_UWord16 rtpPort(0);
+    WebRtc_UWord16 rtcpPort(0);
+    WebRtc_Word8 multicastIpAddr[UdpTransport::
+                                 kIpAddressVersion6Length] = {0};
+
+    // Acquire socket information from the socket module
+    if (_socketTransportModule.ReceiveSocketInformation(ipAddrTmp,
+                                                        rtpPort,
+                                                        rtcpPort,
+                                                        multicastIpAddr) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_GET_SOCKET_INFO, kTraceError,
+            "GetLocalReceiver() unable to retrieve socket information");
+        return -1;
+    }
+
+    // Deliver valid results to the user
+    port = static_cast<int> (rtpPort);
+    RTCPport = static_cast<int> (rtcpPort);
+    if (ipAddr != NULL)
+    {
+        strcpy(ipAddr, ipAddrTmp);
+    }
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::SetSendDestination(const WebRtc_UWord16 rtpPort,
+                            const WebRtc_Word8 ipAddr[64],
+                            const int sourcePort,
+                            const WebRtc_UWord16 rtcpPort)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendDestination()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendDestination() conflict with external transport");
+        return -1;
+    }
+
+    // Initialize ports and IP address for the remote (destination) side.
+    // By default, the sockets used for receiving are used for transmission as
+    // well, hence the source ports for outgoing packets are the same as the
+    // receiving ports specified in SetLocalReceiver.
+    // If an extra send socket has been created, it will be utilized until a
+    // new source port is specified or until the channel has been deleted and
+    // recreated. If no socket exists, sockets will be created when the first
+    // RTP and RTCP packets shall be transmitted (see e.g.
+    // UdpTransportImpl::SendPacket()).
+    //
+    // NOTE: this function does not require that sockets exists; all it does is
+    // to build send structures to be used with the sockets when they exist.
+    // It is therefore possible to call this method before SetLocalReceiver.
+    // However, sockets must exist if a multi-cast address is given as input.
+
+    // Build send structures and enable QoS (if enabled and supported)
+    if (_socketTransportModule.InitializeSendSockets(
+        ipAddr, rtpPort, rtcpPort) != UdpTransport::kNoSocketError)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kIpAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "SetSendDestination() invalid IP address 1");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetSendDestination() invalid socket 1");
+            break;
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(
+                VE_GQOS_ERROR, kTraceError,
+                "SetSendDestination() failed to set QoS");
+            break;
+        case UdpTransport::kMulticastAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_MULTICAST_ADDRESS, kTraceError,
+                "SetSendDestination() invalid multicast address");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetSendDestination() undefined socket error 1");
+            break;
+        }
+        return -1;
+    }
+
+    // Check if the user has specified a non-default source port different from
+    // the local receive port.
+    // If so, an extra local socket will be created unless the source port is
+    // not unique.
+    if (sourcePort != kVoEDefault)
+    {
+        WebRtc_UWord16 receiverRtpPort(0);
+        WebRtc_UWord16 rtcpNA(0);
+        if (_socketTransportModule.ReceiveSocketInformation(NULL,
+                                                            receiverRtpPort,
+                                                            rtcpNA,
+                                                            NULL) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_CANNOT_GET_SOCKET_INFO, kTraceError,
+                "SetSendDestination() failed to retrieve socket information");
+            return -1;
+        }
+
+        WebRtc_UWord16 sourcePortUW16 =
+                static_cast<WebRtc_UWord16> (sourcePort);
+
+        // An extra socket will only be created if the specified source port
+        // differs from the local receive port.
+        if (sourcePortUW16 != receiverRtpPort)
+        {
+            // Initialize extra local socket to get a different source port
+            // than the local
+            // receiver port. Always use default source for RTCP.
+            // Note that, this calls UdpTransport::CloseSendSockets().
+            if (_socketTransportModule.InitializeSourcePorts(
+                sourcePortUW16,
+                sourcePortUW16+1) != 0)
+            {
+                UdpTransport::ErrorCode lastSockError(
+                    _socketTransportModule.LastError());
+                switch (lastSockError)
+                {
+                case UdpTransport::kIpAddressInvalid:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_INVALID_IP_ADDRESS, kTraceError,
+                        "SetSendDestination() invalid IP address 2");
+                    break;
+                case UdpTransport::kSocketInvalid:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendDestination() invalid socket 2");
+                    break;
+                default:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendDestination() undefined socket error 2");
+                    break;
+                }
+                return -1;
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "SetSendDestination() extra local socket is created"
+                         " to facilitate unique source port");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "SetSendDestination() sourcePort equals the local"
+                         " receive port => no extra socket is created");
+        }
+    }
+
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetSendDestination(int& port,
+                            char ipAddr[64],
+                            int& sourcePort,
+                            int& RTCPport)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendDestination()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendDestination() conflict with external transport");
+        return -1;
+    }
+
+    WebRtc_Word8 ipAddrTmp[UdpTransport::kIpAddressVersion6Length] = {0};
+    WebRtc_UWord16 rtpPort(0);
+    WebRtc_UWord16 rtcpPort(0);
+    WebRtc_UWord16 rtpSourcePort(0);
+    WebRtc_UWord16 rtcpSourcePort(0);
+
+    // Acquire sending socket information from the socket module
+    _socketTransportModule.SendSocketInformation(ipAddrTmp, rtpPort, rtcpPort);
+    _socketTransportModule.SourcePorts(rtpSourcePort, rtcpSourcePort);
+
+    // Deliver valid results to the user
+    port = static_cast<int> (rtpPort);
+    RTCPport = static_cast<int> (rtcpPort);
+    sourcePort = static_cast<int> (rtpSourcePort);
+    if (ipAddr != NULL)
+    {
+        strcpy(ipAddr, ipAddrTmp);
+    }
+
+    return 0;
+}
+#endif
+
+
+WebRtc_Word32
+Channel::SetNetEQPlayoutMode(NetEqModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetNetEQPlayoutMode()");
+    AudioPlayoutMode playoutMode(voice);
+    switch (mode)
+    {
+        case kNetEqDefault:
+            playoutMode = voice;
+            break;
+        case kNetEqStreaming:
+            playoutMode = streaming;
+            break;
+        case kNetEqFax:
+            playoutMode = fax;
+            break;
+    }
+    if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetNetEQPlayoutMode() failed to set playout mode");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetNetEQPlayoutMode(NetEqModes& mode)
+{
+    const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
+    switch (playoutMode)
+    {
+        case voice:
+            mode = kNetEqDefault;
+            break;
+        case streaming:
+            mode = kNetEqStreaming;
+            break;
+        case fax:
+            mode = kNetEqFax;
+            break;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetNetEQBGNMode(NetEqBgnModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetNetEQPlayoutMode()");
+    ACMBackgroundNoiseMode noiseMode(On);
+    switch (mode)
+    {
+        case kBgnOn:
+            noiseMode = On;
+            break;
+        case kBgnFade:
+            noiseMode = Fade;
+            break;
+        case kBgnOff:
+            noiseMode = Off;
+            break;
+    }
+    if (_audioCodingModule.SetBackgroundNoiseMode(noiseMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetBackgroundNoiseMode() failed to set noise mode");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetOnHoldStatus()");
+    if (mode == kHoldSendAndPlay)
+    {
+        _outputIsOnHold = enable;
+        _inputIsOnHold = enable;
+    }
+    else if (mode == kHoldPlayOnly)
+    {
+        _outputIsOnHold = enable;
+    }
+    if (mode == kHoldSendOnly)
+    {
+        _inputIsOnHold = enable;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetOnHoldStatus()");
+    enabled = (_outputIsOnHold || _inputIsOnHold);
+    if (_outputIsOnHold && _inputIsOnHold)
+    {
+        mode = kHoldSendAndPlay;
+    }
+    else if (_outputIsOnHold && !_inputIsOnHold)
+    {
+        mode = kHoldPlayOnly;
+    }
+    else if (!_outputIsOnHold && _inputIsOnHold)
+    {
+        mode = kHoldSendOnly;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
+                 enabled, mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterVoiceEngineObserver() observer already enabled");
+        return -1;
+    }
+    _voiceEngineObserverPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterVoiceEngineObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterVoiceEngineObserver() observer already disabled");
+        return 0;
+    }
+    _voiceEngineObserverPtr = NULL;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetNetEQBGNMode(NetEqBgnModes& mode)
+{
+  ACMBackgroundNoiseMode noiseMode(On);
+    _audioCodingModule.BackgroundNoiseMode(noiseMode);
+    switch (noiseMode)
+    {
+        case On:
+            mode = kBgnOn;
+            break;
+        case Fade:
+            mode = kBgnFade;
+            break;
+        case Off:
+            mode = kBgnOff;
+            break;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetNetEQBGNMode() => mode=%u", mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSendCodec(CodecInst& codec)
+{
+    return (_audioCodingModule.SendCodec(codec));
+}
+
+WebRtc_Word32
+Channel::GetRecCodec(CodecInst& codec)
+{
+    return (_audioCodingModule.ReceiveCodec(codec));
+}
+
+WebRtc_Word32
+Channel::SetSendCodec(const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendCodec()");
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetSendCodec() failed to register codec to ACM");
+        return -1;
+    }
+
+    if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
+    {
+        _rtpRtcpModule.DeRegisterSendPayload(codec.pltype);
+        if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
+        {
+            WEBRTC_TRACE(
+                    kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                    "SetSendCodec() failed to register codec to"
+                    " RTP/RTCP module");
+            return -1;
+        }
+    }
+
+    if (_rtpRtcpModule.SetAudioPacketSize(codec.pacsize) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetSendCodec() failed to set audio packet size");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetVADStatus(mode=%d)", mode);
+    // To disable VAD, DTX must be disabled too
+    disableDTX = ((enableVAD == false) ? true : disableDTX);
+    if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetVADStatus() failed to set VAD");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetVADStatus");
+    if (_audioCodingModule.VAD(disabledDTX, enabledVAD, mode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "GetVADStatus() failed to get VAD status");
+        return -1;
+    }
+    disabledDTX = !disabledDTX;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetRecPayloadType(const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRecPayloadType()");
+
+    if (_playing)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "SetRecPayloadType() unable to set PT while playing");
+        return -1;
+    }
+    if (_receiving)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_LISTENING, kTraceError,
+            "SetRecPayloadType() unable to set PT while listening");
+        return -1;
+    }
+
+    if (codec.pltype == -1)
+    {
+        // De-register the selected codec (RTP/RTCP module and ACM)
+
+        WebRtc_Word8 pltype(-1);
+        CodecInst rxCodec = codec;
+
+        // Get payload type for the given codec
+        _rtpRtcpModule.ReceivePayloadType(rxCodec, &pltype);
+        rxCodec.pltype = pltype;
+
+        if (_rtpRtcpModule.DeRegisterReceivePayload(pltype) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                    VE_RTP_RTCP_MODULE_ERROR,
+                    kTraceError,
+                    "SetRecPayloadType() RTP/RTCP-module deregistration "
+                    "failed");
+            return -1;
+        }
+        if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() ACM deregistration failed - 1");
+            return -1;
+        }
+        return 0;
+    }
+
+    if (_rtpRtcpModule.RegisterReceivePayload(codec) != 0)
+    {
+        // First attempt to register failed => de-register and try again
+        _rtpRtcpModule.DeRegisterReceivePayload(codec.pltype);
+        if (_rtpRtcpModule.RegisterReceivePayload(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() RTP/RTCP-module registration failed");
+            return -1;
+        }
+    }
+    if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
+    {
+        _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
+        if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() ACM registration failed - 1");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetRecPayloadType(CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRecPayloadType()");
+    WebRtc_Word8 payloadType(-1);
+    if (_rtpRtcpModule.ReceivePayloadType(codec, &payloadType) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRecPayloadType() failed to retrieve RX payload type");
+        return -1;
+    }
+    codec.pltype = payloadType;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetAMREncFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMREncFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetAMRDecFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRDecFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetAMRWbEncFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRWbEncFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+
+}
+
+WebRtc_Word32
+Channel::SetAMRWbDecFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRWbDecFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendCNPayloadType()");
+
+    CodecInst codec;
+    WebRtc_Word32 samplingFreqHz(-1);
+    if (frequency == kFreq32000Hz)
+        samplingFreqHz = 32000;
+    else if (frequency == kFreq16000Hz)
+        samplingFreqHz = 16000;
+
+    if (_audioCodingModule.Codec("CN", codec, samplingFreqHz) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetSendCNPayloadType() failed to retrieve default CN codec "
+            "settings");
+        return -1;
+    }
+
+    // Modify the payload type (must be set to dynamic range)
+    codec.pltype = type;
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetSendCNPayloadType() failed to register CN to ACM");
+        return -1;
+    }
+
+    if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
+    {
+        _rtpRtcpModule.DeRegisterSendPayload(codec.pltype);
+        if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
+                "module");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACInitTargetRate()");
+
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACInitTargetRate() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        // This API is only valid if iSAC is setup to run in channel-adaptive
+        // mode.
+        // We do not validate the adaptive mode here. It is done later in the
+        // ConfigISACBandwidthEstimator() API.
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACInitTargetRate() send codec is not iSAC");
+        return -1;
+    }
+
+    WebRtc_UWord8 initFrameSizeMsec(0);
+    if (16000 == sendCodec.plfreq)
+    {
+        // Note that 0 is a valid and corresponds to "use default
+        if ((rateBps != 0 &&
+            rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
+            (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
+        {
+             _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACInitTargetRate() invalid target rate - 1");
+            return -1;
+        }
+        // 30 or 60ms
+        initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 16);
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((rateBps != 0 &&
+            rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
+            (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACInitTargetRate() invalid target rate - 2");
+            return -1;
+        }
+        initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 32); // 30ms
+    }
+
+    if (_audioCodingModule.ConfigISACBandwidthEstimator(
+        initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACInitTargetRate() iSAC BWE config failed");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACMaxRate(int rateBps)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACMaxRate()");
+
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxRate() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        // This API is only valid if iSAC is selected as sending codec.
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxRate() send codec is not iSAC");
+        return -1;
+    }
+    if (16000 == sendCodec.plfreq)
+    {
+        if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
+            (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxRate() invalid max rate - 1");
+            return -1;
+        }
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
+            (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxRate() invalid max rate - 2");
+            return -1;
+        }
+    }
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetISACMaxRate() unable to set max rate while sending");
+        return -1;
+    }
+
+    // Set the maximum instantaneous rate of iSAC (works for both adaptive
+    // and non-adaptive mode)
+    if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACMaxRate() failed to set max rate");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACMaxPayloadSize(int sizeBytes)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACMaxPayloadSize()");
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() send codec is not iSAC");
+        return -1;
+    }
+    if (16000 == sendCodec.plfreq)
+    {
+        if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
+            (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxPayloadSize() invalid max payload - 1");
+            return -1;
+        }
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
+            (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxPayloadSize() invalid max payload - 2");
+            return -1;
+        }
+    }
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetISACMaxPayloadSize() unable to set max rate while sending");
+        return -1;
+    }
+
+    if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() failed to set max payload size");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 Channel::RegisterExternalTransport(Transport& transport)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::RegisterExternalTransport()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    // Sanity checks for default (non external transport) to avoid conflict with
+    // WebRtc sockets.
+    if (_socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(VE_SEND_SOCKETS_CONFLICT,
+                                           kTraceError,
+                "RegisterExternalTransport() send sockets already initialized");
+        return -1;
+    }
+    if (_socketTransportModule.ReceiveSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(VE_RECEIVE_SOCKETS_CONFLICT,
+                                           kTraceError,
+             "RegisterExternalTransport() receive sockets already initialized");
+        return -1;
+    }
+#endif
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
+                                           kTraceError,
+              "RegisterExternalTransport() external transport already enabled");
+       return -1;
+    }
+    _externalTransport = true;
+    _transportPtr = &transport;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterExternalTransport()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterExternalTransport()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_transportPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterExternalTransport() external transport already "
+            "disabled");
+        return 0;
+    }
+    _externalTransport = false;
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    _transportPtr = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "DeRegisterExternalTransport() all transport is disabled");
+#else
+    _transportPtr = &_socketTransportModule;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "DeRegisterExternalTransport() internal Transport is enabled");
+#endif
+    return 0;
+}
+
+WebRtc_Word32
+Channel::ReceivedRTPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ReceivedRTPPacket()");
+    const WebRtc_Word8 dummyIP[] = "127.0.0.1";
+    IncomingRTPPacket(data, length, dummyIP, 0);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::ReceivedRTCPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ReceivedRTCPPacket()");
+    const WebRtc_Word8 dummyIP[] = "127.0.0.1";
+    IncomingRTCPPacket(data, length, dummyIP, 0);
+    return 0;
+}
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetSourceInfo(int& rtpPort, int& rtcpPort, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSourceInfo()");
+
+    WebRtc_UWord16 rtpPortModule;
+    WebRtc_UWord16 rtcpPortModule;
+    WebRtc_Word8 ipaddr[UdpTransport::kIpAddressVersion6Length] = {0};
+
+    if (_socketTransportModule.RemoteSocketInformation(ipaddr,
+                                                       rtpPortModule,
+                                                       rtcpPortModule) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSourceInfo() failed to retrieve remote socket information");
+        return -1;
+    }
+    strcpy(ipAddr, ipaddr);
+    rtpPort = rtpPortModule;
+    rtcpPort = rtcpPortModule;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+        "GetSourceInfo() => rtpPort=%d, rtcpPort=%d, ipAddr=%s",
+        rtpPort, rtcpPort, ipAddr);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::EnableIPv6()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::EnableIPv6()");
+    if (_socketTransportModule.ReceiveSocketsInitialized() ||
+        _socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "EnableIPv6() socket layer is already initialized");
+        return -1;
+    }
+    if (_socketTransportModule.EnableIpV6() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_ERROR, kTraceError,
+            "EnableIPv6() failed to enable IPv6");
+        const UdpTransport::ErrorCode lastError =
+            _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d", lastError);
+        return -1;
+    }
+    return 0;
+}
+
+bool
+Channel::IPv6IsEnabled() const
+{
+    bool isEnabled = _socketTransportModule.IpV6Enabled();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "IPv6IsEnabled() => %d", isEnabled);
+    return isEnabled;
+}
+
+WebRtc_Word32
+Channel::SetSourceFilter(int rtpPort, int rtcpPort, const char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSourceFilter()");
+    if (_socketTransportModule.SetFilterPorts(
+        static_cast<WebRtc_UWord16>(rtpPort),
+        static_cast<WebRtc_UWord16>(rtcpPort)) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "SetSourceFilter() failed to set filter ports");
+        const UdpTransport::ErrorCode lastError =
+            _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d",
+                     lastError);
+        return -1;
+    }
+    const WebRtc_Word8* filterIpAddress =
+        static_cast<const WebRtc_Word8*> (ipAddr);
+    if (_socketTransportModule.SetFilterIP(filterIpAddress) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_IP_ADDRESS, kTraceError,
+            "SetSourceFilter() failed to set filter IP address");
+        const UdpTransport::ErrorCode lastError =
+           _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d", lastError);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSourceFilter(int& rtpPort, int& rtcpPort, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSourceFilter()");
+    WebRtc_UWord16 rtpFilterPort(0);
+    WebRtc_UWord16 rtcpFilterPort(0);
+    if (_socketTransportModule.FilterPorts(rtpFilterPort, rtcpFilterPort) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "GetSourceFilter() failed to retrieve filter ports");
+    }
+    WebRtc_Word8 ipAddrTmp[UdpTransport::kIpAddressVersion6Length] = {0};
+    if (_socketTransportModule.FilterIP(ipAddrTmp) != 0)
+    {
+        // no filter has been configured (not seen as an error)
+        memset(ipAddrTmp,
+               0, UdpTransport::kIpAddressVersion6Length);
+    }
+    rtpPort = static_cast<int> (rtpFilterPort);
+    rtcpPort = static_cast<int> (rtcpFilterPort);
+    strcpy(ipAddr, ipAddrTmp);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+        "GetSourceFilter() => rtpPort=%d, rtcpPort=%d, ipAddr=%s",
+        rtpPort, rtcpPort, ipAddr);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetSendTOS(int DSCP, int priority, bool useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendTOS(DSCP=%d, useSetSockopt=%d)",
+                 DSCP, (int)useSetSockopt);
+
+    // Set TOS value and possibly try to force usage of setsockopt()
+    if (_socketTransportModule.SetToS(DSCP, useSetSockopt) != 0)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kTosError:
+            _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                               "SetSendTOS() TOS error");
+            break;
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(
+                    VE_TOS_GQOS_CONFLICT, kTraceError,
+                    "SetSendTOS() GQOS error");
+            break;
+        case UdpTransport::kTosInvalid:
+            // can't switch SetSockOpt method without disabling TOS first, or
+            // SetSockopt() call failed
+            _engineStatisticsPtr->SetLastError(VE_TOS_INVALID, kTraceError,
+                                               "SetSendTOS() invalid TOS");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(VE_SOCKET_ERROR, kTraceError,
+                                               "SetSendTOS() invalid Socket");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                               "SetSendTOS() TOS error");
+            break;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport =>  lastError = %d",
+                     lastSockError);
+        return -1;
+    }
+
+    // Set priority (PCP) value, -1 means don't change
+    if (-1 != priority)
+    {
+        if (_socketTransportModule.SetPCP(priority) != 0)
+        {
+            UdpTransport::ErrorCode lastSockError(
+                _socketTransportModule.LastError());
+            switch (lastSockError)
+            {
+            case UdpTransport::kPcpError:
+                _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                                   "SetSendTOS() PCP error");
+                break;
+            case UdpTransport::kQosError:
+                _engineStatisticsPtr->SetLastError(
+                        VE_TOS_GQOS_CONFLICT, kTraceError,
+                        "SetSendTOS() GQOS conflict");
+                break;
+            case UdpTransport::kSocketInvalid:
+                _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendTOS() invalid Socket");
+                break;
+            default:
+                _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                                   "SetSendTOS() PCP error");
+                break;
+            }
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "UdpTransport =>  lastError = %d",
+                         lastSockError);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSendTOS(int &DSCP, int& priority, bool &useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendTOS(DSCP=?, useSetSockopt=?)");
+    WebRtc_Word32 dscp(0), prio(0);
+    bool setSockopt(false);
+    if (_socketTransportModule.ToS(dscp, setSockopt) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSendTOS() failed to get TOS info");
+        return -1;
+    }
+    if (_socketTransportModule.PCP(prio) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSendTOS() failed to get PCP info");
+        return -1;
+    }
+    DSCP = static_cast<int> (dscp);
+    priority = static_cast<int> (prio);
+    useSetSockopt = setSockopt;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSendTOS() => DSCP=%d, priority=%d, useSetSockopt=%d",
+        DSCP, priority, (int)useSetSockopt);
+    return 0;
+}
+
+#if defined(_WIN32)
+WebRtc_Word32
+Channel::SetSendGQoS(bool enable, int serviceType, int overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendGQoS(enable=%d, serviceType=%d, "
+                 "overrideDSCP=%d)",
+                 (int)enable, serviceType, overrideDSCP);
+    if(!_socketTransportModule.ReceiveSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKETS_NOT_INITED, kTraceError,
+            "SetSendGQoS() GQoS state must be set after sockets are created");
+        return -1;
+    }
+    if(!_socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_DESTINATION_NOT_INITED, kTraceError,
+            "SetSendGQoS() GQoS state must be set after sending side is "
+            "initialized");
+        return -1;
+    }
+    if (enable &&
+       (serviceType != SERVICETYPE_BESTEFFORT) &&
+       (serviceType != SERVICETYPE_CONTROLLEDLOAD) &&
+       (serviceType != SERVICETYPE_GUARANTEED) &&
+       (serviceType != SERVICETYPE_QUALITATIVE))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendGQoS() Invalid service type");
+        return -1;
+    }
+    if (enable && ((overrideDSCP <  0) || (overrideDSCP > 63)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendGQoS() Invalid overrideDSCP value");
+        return -1;
+    }
+
+    // Avoid GQoS/ToS conflict when user wants to override the default DSCP
+    // mapping
+    bool QoS(false);
+    WebRtc_Word32 sType(0);
+    WebRtc_Word32 ovrDSCP(0);
+    if (_socketTransportModule.QoS(QoS, sType, ovrDSCP))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "SetSendGQoS() failed to get QOS info");
+        return -1;
+    }
+    if (QoS && ovrDSCP == 0 && overrideDSCP != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_TOS_GQOS_CONFLICT, kTraceError,
+            "SetSendGQoS() QOS is already enabled and overrideDSCP differs,"
+            " not allowed");
+        return -1;
+    }
+    const WebRtc_Word32 maxBitrate(0);
+    if (_socketTransportModule.SetQoS(enable,
+                                      static_cast<WebRtc_Word32>(serviceType),
+                                      maxBitrate,
+                                      static_cast<WebRtc_Word32>(overrideDSCP),
+                                      true))
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(VE_GQOS_ERROR, kTraceError,
+                                               "SetSendGQoS() QOS error");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(VE_SOCKET_ERROR, kTraceError,
+                                               "SetSendGQoS() Socket error");
+            break;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport() => lastError = %d",
+                     lastSockError);
+        return -1;
+    }
+    return 0;
+}
+#endif
+
+#if defined(_WIN32)
+WebRtc_Word32
+Channel::GetSendGQoS(bool &enabled, int &serviceType, int &overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendGQoS(enable=?, serviceType=?, "
+                 "overrideDSCP=?)");
+
+    bool QoS(false);
+    WebRtc_Word32 serviceTypeModule(0);
+    WebRtc_Word32 overrideDSCPModule(0);
+    _socketTransportModule.QoS(QoS, serviceTypeModule, overrideDSCPModule);
+
+    enabled = QoS;
+    serviceType = static_cast<int> (serviceTypeModule);
+    overrideDSCP = static_cast<int> (overrideDSCPModule);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "GetSendGQoS() => enabled=%d, serviceType=%d, overrideDSCP=%d",
+                 (int)enabled, serviceType, overrideDSCP);
+    return 0;
+}
+#endif
+#endif
+
+WebRtc_Word32
+Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetPacketTimeoutNotification()");
+    if (enable)
+    {
+        const WebRtc_UWord32 RTPtimeoutMS = 1000*timeoutSeconds;
+        const WebRtc_UWord32 RTCPtimeoutMS = 0;
+        _rtpRtcpModule.SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
+        _rtpPacketTimeOutIsEnabled = true;
+        _rtpTimeOutSeconds = timeoutSeconds;
+    }
+    else
+    {
+        _rtpRtcpModule.SetPacketTimeout(0, 0);
+        _rtpPacketTimeOutIsEnabled = false;
+        _rtpTimeOutSeconds = 0;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPacketTimeoutNotification()");
+    enabled = _rtpPacketTimeOutIsEnabled;
+    if (enabled)
+    {
+        timeoutSeconds = _rtpTimeOutSeconds;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetPacketTimeoutNotification() => enabled=%d,"
+                 " timeoutSeconds=%d",
+                 enabled, timeoutSeconds);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterDeadOrAliveObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_connectionObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "RegisterDeadOrAliveObserver() observer already enabled");
+        return -1;
+    }
+
+    _connectionObserverPtr = &observer;
+    _connectionObserver = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterDeadOrAliveObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterDeadOrAliveObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_connectionObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterDeadOrAliveObserver() observer already disabled");
+        return 0;
+    }
+
+    _connectionObserver = false;
+    _connectionObserverPtr = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetPeriodicDeadOrAliveStatus()");
+    if (!_connectionObserverPtr)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetPeriodicDeadOrAliveStatus() connection observer has"
+                     " not been registered");
+    }
+    if (enable)
+    {
+        ResetDeadOrAliveCounters();
+    }
+    bool enabled(false);
+    WebRtc_UWord8 currentSampleTimeSec(0);
+    // Store last state (will be used later if dead-or-alive is disabled).
+    _rtpRtcpModule.PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
+    // Update the dead-or-alive state.
+    if (_rtpRtcpModule.SetPeriodicDeadOrAliveStatus(
+        enable, (WebRtc_UWord8)sampleTimeSeconds) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR,
+                kTraceError,
+                "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
+                "status");
+        return -1;
+    }
+    if (!enable)
+    {
+        // Restore last utilized sample time.
+        // Without this, the sample time would always be reset to default
+        // (2 sec), each time dead-or-alived was disabled without sample-time
+        // parameter.
+        _rtpRtcpModule.SetPeriodicDeadOrAliveStatus(enable,
+                                                    currentSampleTimeSec);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
+{
+    _rtpRtcpModule.PeriodicDeadOrAliveStatus(
+        enabled,
+        (WebRtc_UWord8&)sampleTimeSeconds);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
+                 " sampleTimeSeconds=%d",
+                 enabled, sampleTimeSeconds);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SendUDPPacket(const void* data,
+                       unsigned int length,
+                       int& transmittedBytes,
+                       bool useRtcpSocket)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendUDPPacket()");
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SendUDPPacket() external transport is enabled");
+        return -1;
+    }
+    if (useRtcpSocket && !_rtpRtcpModule.RTCP())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTCP_ERROR, kTraceError,
+            "SendUDPPacket() RTCP is disabled");
+        return -1;
+    }
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "SendUDPPacket() not sending");
+        return -1;
+    }
+
+    char* dataC = new char[length];
+    if (NULL == dataC)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NO_MEMORY, kTraceError,
+            "SendUDPPacket() memory allocation failed");
+        return -1;
+    }
+    memcpy(dataC, data, length);
+
+    transmittedBytes = SendPacketRaw(dataC, length, useRtcpSocket);
+
+    delete [] dataC;
+    dataC = NULL;
+
+    if (transmittedBytes <= 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+                VE_SEND_ERROR, kTraceError,
+                "SendUDPPacket() transmission failed");
+        transmittedBytes = 0;
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "SendUDPPacket() => transmittedBytes=%d", transmittedBytes);
+    return 0;
+}
+
+
+int Channel::StartPlayingFileLocally(const char* fileName,
+                                     const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
+                 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
+                 "stopPosition=%d)", fileName, loop, format, volumeScaling,
+                 startPosition, stopPosition);
+
+    if (_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "StartPlayingFileLocally() is already playing");
+        return -1;
+    }
+
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        if (_outputFilePlayerPtr)
+        {
+            _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+        }
+
+        _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+            _outputFilePlayerId, (const FileFormats)format);
+
+        if (_outputFilePlayerPtr == NULL)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "StartPlayingFileLocally() filePlayer format is not correct");
+            return -1;
+        }
+
+        const WebRtc_UWord32 notificationTime(0);
+
+        if (_outputFilePlayerPtr->StartPlayingFile(
+                fileName,
+                loop,
+                startPosition,
+                volumeScaling,
+                notificationTime,
+                stopPosition,
+                (const CodecInst*)codecInst) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_BAD_FILE, kTraceError,
+                "StartPlayingFile() failed to start file playout");
+            _outputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+            return -1;
+        }
+        _outputFilePlayerPtr->RegisterModuleFileCallback(this);
+        _outputFilePlaying = true;
+    }
+    // _fileCritSect cannot be taken while calling
+    // SetAnonymousMixabilityStatus() since as soon as the participant is added
+    // frames can be pulled by the mixer. Since the frames are generated from
+    // the file, _fileCritSect will be taken. This would result in a deadlock.
+    if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        _outputFilePlaying = false;
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayingFile() failed to add participant as file to mixer");
+        _outputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+        _outputFilePlayerPtr = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::StartPlayingFileLocally(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileLocally(format=%d,"
+                 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if(stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileLocally() NULL as input stream");
+        return -1;
+    }
+
+
+    if (_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "StartPlayingFileLocally() is already playing");
+        return -1;
+    }
+
+    {
+      CriticalSectionScoped cs(_fileCritSect);
+
+      // Destroy the old instance
+      if (_outputFilePlayerPtr)
+      {
+          _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+          FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+          _outputFilePlayerPtr = NULL;
+      }
+
+      // Create the instance
+      _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+          _outputFilePlayerId,
+          (const FileFormats)format);
+
+      if (_outputFilePlayerPtr == NULL)
+      {
+          _engineStatisticsPtr->SetLastError(
+              VE_INVALID_ARGUMENT, kTraceError,
+              "StartPlayingFileLocally() filePlayer format isnot correct");
+          return -1;
+      }
+
+      const WebRtc_UWord32 notificationTime(0);
+
+      if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
+                                                 volumeScaling,
+                                                 notificationTime,
+                                                 stopPosition, codecInst) != 0)
+      {
+          _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                             "StartPlayingFile() failed to "
+                                             "start file playout");
+          _outputFilePlayerPtr->StopPlayingFile();
+          FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+          _outputFilePlayerPtr = NULL;
+          return -1;
+      }
+      _outputFilePlayerPtr->RegisterModuleFileCallback(this);
+      _outputFilePlaying = true;
+    }
+    // _fileCritSect cannot be taken while calling
+    // SetAnonymousMixibilityStatus. Refer to comments in
+    // StartPlayingFileLocally(const char* ...) for more details.
+    if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        _outputFilePlaying = false;
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayingFile() failed to add participant as file to mixer");
+        _outputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+        _outputFilePlayerPtr = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::StopPlayingFileLocally()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayingFileLocally()");
+
+    if (!_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileLocally() isnot playing");
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        if (_outputFilePlayerPtr->StopPlayingFile() != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_STOP_RECORDING_FAILED, kTraceError,
+                "StopPlayingFile() could not stop playing");
+            return -1;
+        }
+        _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+        _outputFilePlayerPtr = NULL;
+        _outputFilePlaying = false;
+    }
+    // _fileCritSect cannot be taken while calling
+    // SetAnonymousMixibilityStatus. Refer to comments in
+    // StartPlayingFileLocally(const char* ...) for more details.
+    if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StopPlayingFile() failed to stop participant from playing as"
+            "file in the mixer");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::IsPlayingFileLocally() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IsPlayingFileLocally()");
+
+    return (WebRtc_Word32)_outputFilePlaying;
+}
+
+int Channel::ScaleLocalFilePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    if (!_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleLocalFilePlayout() isnot playing");
+        return -1;
+    }
+    if ((_outputFilePlayerPtr == NULL) ||
+        (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale the playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::GetLocalPlayoutPosition(int& positionMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetLocalPlayoutPosition(position=?)");
+
+    WebRtc_UWord32 position;
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    if (_outputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
+        return -1;
+    }
+
+    if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "GetLocalPlayoutPosition() failed");
+        return -1;
+    }
+    positionMs = position;
+
+    return 0;
+}
+
+int Channel::StartPlayingFileAsMicrophone(const char* fileName,
+                                          const bool loop,
+                                          const FileFormats format,
+                                          const int startPosition,
+                                          const float volumeScaling,
+                                          const int stopPosition,
+                                          const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
+                 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
+                 "stopPosition=%d)", fileName, loop, format, volumeScaling,
+                 startPosition, stopPosition);
+
+    if (_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() filePlayer is playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    // Destroy the old instance
+    if (_inputFilePlayerPtr)
+    {
+        _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+    }
+
+    // Create the instance
+    _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+        _inputFilePlayerId, (const FileFormats)format);
+
+    if (_inputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_inputFilePlayerPtr->StartPlayingFile(
+        fileName,
+        loop,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*)codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _inputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+        return -1;
+    }
+    _inputFilePlayerPtr->RegisterModuleFileCallback(this);
+    _inputFilePlaying = true;
+
+    return 0;
+}
+
+int Channel::StartPlayingFileAsMicrophone(InStream* stream,
+                                          const FileFormats format,
+                                          const int startPosition,
+                                          const float volumeScaling,
+                                          const int stopPosition,
+                                          const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileAsMicrophone(format=%d, "
+                 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if(stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileAsMicrophone NULL as input stream");
+        return -1;
+    }
+
+    if (_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    // Destroy the old instance
+    if (_inputFilePlayerPtr)
+    {
+        _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+    }
+
+    // Create the instance
+    _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+        _inputFilePlayerId, (const FileFormats)format);
+
+    if (_inputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingInputFile() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
+                                              volumeScaling, notificationTime,
+                                              stopPosition, codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                           "StartPlayingFile() failed to start "
+                                           "file playout");
+        _inputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+        return -1;
+    }
+    
+    _inputFilePlayerPtr->RegisterModuleFileCallback(this);
+    _inputFilePlaying = true;
+
+    return 0;
+}
+
+int Channel::StopPlayingFileAsMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayingFileAsMicrophone()");
+
+    if (!_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileAsMicrophone() isnot playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+    if (_inputFilePlayerPtr->StopPlayingFile() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopPlayingFile() could not stop playing");
+        return -1;
+    }
+    _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+    FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+    _inputFilePlayerPtr = NULL;
+    _inputFilePlaying = false;
+
+    return 0;
+}
+
+int Channel::IsPlayingFileAsMicrophone() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IsPlayingFileAsMicrophone()");
+
+    return _inputFilePlaying;
+}
+
+int Channel::ScaleFileAsMicrophonePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    if (!_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleFileAsMicrophonePlayout() isnot playing");
+        return -1;
+    }
+
+    if ((_inputFilePlayerPtr == NULL) ||
+        (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::StartRecordingPlayout(const WebRtc_Word8* fileName,
+                                   const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _outputFileRecorderId, (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(
+        fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int Channel::StartRecordingPlayout(OutStream* stream,
+                                   const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartRecordingPlayout()");
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _outputFileRecorderId, (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
+                                                        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                           "StartRecordingPlayout() failed to "
+                                           "start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int Channel::StopRecordingPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "Channel::StopRecordingPlayout()");
+
+    if (!_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StopRecordingPlayout() isnot recording");
+        return -1;
+    }
+
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    if (_outputFileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording() could not stop recording");
+        return(-1);
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+    _outputFileRecorderPtr = NULL;
+    _outputFileRecording = false;
+
+    return 0;
+}
+
+void
+Channel::SetMixWithMicStatus(bool mix)
+{
+    _mixFileWithMicrophone=mix;
+}
+
+int
+Channel::GetSpeechOutputLevel(WebRtc_UWord32& level) const
+{
+    WebRtc_Word8 currentLevel = _outputAudioLevel.Level();
+    level = static_cast<WebRtc_Word32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSpeechOutputLevel() => level=%u", level);
+    return 0;
+}
+
+int
+Channel::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const
+{
+    WebRtc_Word16 currentLevel = _outputAudioLevel.LevelFullRange();
+    level = static_cast<WebRtc_Word32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSpeechOutputLevelFullRange() => level=%u", level);
+    return 0;
+}
+
+int
+Channel::SetMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetMute(enable=%d)", enable);
+    _mute = enable;
+    return 0;
+}
+
+bool
+Channel::Mute() const
+{
+    return _mute;
+}
+
+int
+Channel::SetOutputVolumePan(float left, float right)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetOutputVolumePan()");
+    _panLeft = left;
+    _panRight = right;
+    return 0;
+}
+
+int
+Channel::GetOutputVolumePan(float& left, float& right) const
+{
+    left = _panLeft;
+    right = _panRight;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
+    return 0;
+}
+
+int
+Channel::SetChannelOutputVolumeScaling(float scaling)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetChannelOutputVolumeScaling()");
+    _outputGain = scaling;
+    return 0;
+}
+
+int
+Channel::GetChannelOutputVolumeScaling(float& scaling) const
+{
+    scaling = _outputGain;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
+    return 0;
+}
+
+#ifdef WEBRTC_SRTP
+
+int
+Channel::EnableSRTPSend(
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::EnableSRTPSend()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_encrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "EnableSRTPSend() encryption already enabled");
+        return -1;
+    }
+
+    if (key == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "EnableSRTPSend() invalid key string");
+        return -1;
+    }
+
+    if (((kEncryption == level ||
+            kEncryptionAndAuthentication == level) &&
+            (cipherKeyLength < kVoiceEngineMinSrtpEncryptLength ||
+            cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength)) ||
+        ((kAuthentication == level ||
+            kEncryptionAndAuthentication == level) &&
+            kAuthHmacSha1 == authType &&
+            (authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length ||
+            authTagLength > kVoiceEngineMaxSrtpAuthSha1Length)) ||
+        ((kAuthentication == level ||
+            kEncryptionAndAuthentication == level) &&
+            kAuthNull == authType &&
+            (authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength ||
+            authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "EnableSRTPSend() invalid key length(s)");
+        return -1;
+    }
+
+
+    if (_srtpModule.EnableSRTPEncrypt(
+        !useForRTCP,
+        (SrtpModule::CipherTypes)cipherType,
+        cipherKeyLength,
+        (SrtpModule::AuthenticationTypes)authType,
+        authKeyLength, authTagLength,
+        (SrtpModule::SecurityLevels)level,
+        key) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "EnableSRTPSend() failed to enable SRTP encryption");
+        return -1;
+    }
+
+    if (_encryptionPtr == NULL)
+    {
+        _encryptionPtr = &_srtpModule;
+    }
+    _encrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DisableSRTPSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DisableSRTPSend()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_encrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DisableSRTPSend() SRTP encryption already disabled");
+        return 0;
+    }
+
+    _encrypting = false;
+
+    if (_srtpModule.DisableSRTPEncrypt() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "DisableSRTPSend() failed to disable SRTP encryption");
+        return -1;
+    }
+
+    if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
+    {
+        // Both directions are disabled
+        _encryptionPtr = NULL;
+    }
+
+    return 0;
+}
+
+int
+Channel::EnableSRTPReceive(
+    CipherTypes  cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::EnableSRTPReceive()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_decrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "EnableSRTPReceive() SRTP decryption already enabled");
+        return -1;
+    }
+
+    if (key == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "EnableSRTPReceive() invalid key string");
+        return -1;
+    }
+
+    if ((((kEncryption == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            ((cipherKeyLength < kVoiceEngineMinSrtpEncryptLength) ||
+            (cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength))) ||
+        (((kAuthentication == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            (kAuthHmacSha1 == authType) &&
+            ((authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length) ||
+            (authTagLength > kVoiceEngineMaxSrtpAuthSha1Length))) ||
+        (((kAuthentication == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            (kAuthNull == authType) &&
+            ((authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength) ||
+            (authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength))))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "EnableSRTPReceive() invalid key length(s)");
+        return -1;
+    }
+
+    if (_srtpModule.EnableSRTPDecrypt(
+        !useForRTCP,
+        (SrtpModule::CipherTypes)cipherType,
+        cipherKeyLength,
+        (SrtpModule::AuthenticationTypes)authType,
+        authKeyLength,
+        authTagLength,
+        (SrtpModule::SecurityLevels)level,
+        key) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "EnableSRTPReceive() failed to enable SRTP decryption");
+        return -1;
+    }
+
+    if (_encryptionPtr == NULL)
+    {
+        _encryptionPtr = &_srtpModule;
+    }
+
+    _decrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DisableSRTPReceive()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DisableSRTPReceive()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_decrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DisableSRTPReceive() SRTP decryption already disabled");
+        return 0;
+    }
+
+    _decrypting = false;
+
+    if (_srtpModule.DisableSRTPDecrypt() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "DisableSRTPReceive() failed to disable SRTP decryption");
+        return -1;
+    }
+
+    if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
+    {
+        _encryptionPtr = NULL;
+    }
+
+    return 0;
+}
+
+#endif
+
+int
+Channel::RegisterExternalEncryption(Encryption& encryption)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::RegisterExternalEncryption()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_encryptionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterExternalEncryption() encryption already enabled");
+        return -1;
+    }
+
+    _encryptionPtr = &encryption;
+
+    _decrypting = true;
+    _encrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterExternalEncryption()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DeRegisterExternalEncryption()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_encryptionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterExternalEncryption() encryption already disabled");
+        return 0;
+    }
+
+    _decrypting = false;
+    _encrypting = false;
+
+    _encryptionPtr = NULL;
+
+    return 0;
+}
+
+int Channel::SendTelephoneEventOutband(unsigned char eventCode,
+                                          int lengthMs, int attenuationDb,
+                                          bool playDtmfEvent)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
+               playDtmfEvent);
+
+    _playOutbandDtmfEvent = playDtmfEvent;
+
+    if (_rtpRtcpModule.SendTelephoneEventOutband(eventCode, lengthMs,
+                                                 attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SEND_DTMF_FAILED,
+            kTraceWarning,
+            "SendTelephoneEventOutband() failed to send event");
+        return -1;
+    }
+    return 0;
+}
+
+int Channel::SendTelephoneEventInband(unsigned char eventCode,
+                                         int lengthMs,
+                                         int attenuationDb,
+                                         bool playDtmfEvent)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
+               playDtmfEvent);
+
+    _playInbandDtmfEvent = playDtmfEvent;
+    _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
+
+    return 0;
+}
+
+int
+Channel::SetDtmfPlayoutStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetDtmfPlayoutStatus()");
+    if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
+            "SetDtmfPlayoutStatus() failed to set Dtmf playout");
+        return -1;
+    }
+    return 0;
+}
+
+bool
+Channel::DtmfPlayoutStatus() const
+{
+    return _audioCodingModule.DtmfPlayoutStatus();
+}
+
+int
+Channel::SetSendTelephoneEventPayloadType(unsigned char type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetSendTelephoneEventPayloadType()");
+    if (type > 127)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTelephoneEventPayloadType() invalid type");
+        return -1;
+    }
+    CodecInst codec;
+    codec.plfreq = 8000;
+    codec.pltype = type;
+    memcpy(codec.plname, "telephone-event", 16);
+    if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetSendTelephoneEventPayloadType() failed to register send"
+            "payload type");
+        return -1;
+    }
+    _sendTelephoneEventPayloadType = type;
+    return 0;
+}
+
+int
+Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendTelephoneEventPayloadType()");
+    type = _sendTelephoneEventPayloadType;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSendTelephoneEventPayloadType() => type=%u", type);
+    return 0;
+}
+
+#ifdef WEBRTC_DTMF_DETECTION
+
+WebRtc_Word32
+Channel::RegisterTelephoneEventDetection(
+    TelephoneEventDetectionMethods detectionMethod,
+    VoETelephoneEventObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterTelephoneEventDetection()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_telephoneEventDetectionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterTelephoneEventDetection() detection already enabled");
+        return -1;
+    }
+
+    _telephoneEventDetectionPtr = &observer;
+
+    switch (detectionMethod)
+    {
+        case kInBand:
+            _inbandTelephoneEventDetection = true;
+            _outOfBandTelephoneEventDetecion = false;
+            break;
+        case kOutOfBand:
+            _inbandTelephoneEventDetection = false;
+            _outOfBandTelephoneEventDetecion = true;
+            break;
+        case kInAndOutOfBand:
+            _inbandTelephoneEventDetection = true;
+            _outOfBandTelephoneEventDetecion = true;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "RegisterTelephoneEventDetection() invalid detection method");
+            return -1;
+    }
+
+    if (_inbandTelephoneEventDetection)
+    {
+        // Enable in-band Dtmf detectin in the ACM.
+        if (_audioCodingModule.RegisterIncomingMessagesCallback(this) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "RegisterTelephoneEventDetection() failed to enable Dtmf "
+                "detection");
+        }
+    }
+
+    // Enable/disable out-of-band detection of received telephone-events.
+    // When enabled, RtpAudioFeedback::OnReceivedTelephoneEvent() will be
+    // called two times by the RTP/RTCP module (start & end).
+    const bool forwardToDecoder =
+        _rtpRtcpModule.TelephoneEventForwardToDecoder();
+    const bool detectEndOfTone = true;
+    _rtpRtcpModule.SetTelephoneEventStatus(_outOfBandTelephoneEventDetecion,
+                                           forwardToDecoder,
+                                           detectEndOfTone);
+
+    return 0;
+}
+
+int
+Channel::DeRegisterTelephoneEventDetection()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::DeRegisterTelephoneEventDetection()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_telephoneEventDetectionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION,
+            kTraceWarning,
+            "DeRegisterTelephoneEventDetection() detection already disabled");
+        return 0;
+    }
+
+    // Disable out-of-band event detection
+    const bool forwardToDecoder =
+        _rtpRtcpModule.TelephoneEventForwardToDecoder();
+    _rtpRtcpModule.SetTelephoneEventStatus(false, forwardToDecoder);
+
+    // Disable in-band Dtmf detection
+    _audioCodingModule.RegisterIncomingMessagesCallback(NULL);
+
+    _inbandTelephoneEventDetection = false;
+    _outOfBandTelephoneEventDetecion = false;
+    _telephoneEventDetectionPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::GetTelephoneEventDetectionStatus(
+    bool& enabled,
+    TelephoneEventDetectionMethods& detectionMethod)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::GetTelephoneEventDetectionStatus()");
+
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        enabled = (_telephoneEventDetectionPtr != NULL);
+    }
+
+    if (enabled)
+    {
+        if (_inbandTelephoneEventDetection && !_outOfBandTelephoneEventDetecion)
+            detectionMethod = kInBand;
+        else if (!_inbandTelephoneEventDetection
+            && _outOfBandTelephoneEventDetecion)
+            detectionMethod = kOutOfBand;
+        else if (_inbandTelephoneEventDetection
+            && _outOfBandTelephoneEventDetecion)
+            detectionMethod = kInAndOutOfBand;
+        else
+        {
+            assert(false);
+            return -1;
+        }
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId, _channelId),
+               "GetTelephoneEventDetectionStatus() => enabled=%d,"
+               "detectionMethod=%d", enabled, detectionMethod);
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_DTMF_DETECTION
+
+int
+Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdateRxVadDetection()");
+
+    int vadDecision = 1;
+
+    vadDecision = (audioFrame._vadActivity == AudioFrame::kVadActive)? 1 : 0;
+
+    if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
+    {
+        OnRxVadDetected(vadDecision);
+        _oldVadDecision = vadDecision;
+    }
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdateRxVadDetection() => vadDecision=%d",
+                 vadDecision);
+    return 0;
+}
+
+int
+Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterRxVadObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_rxVadObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRxVadObserver() observer already enabled");
+        return -1;
+    }
+    _rxVadObserverPtr = &observer;
+    _RxVadDetection = true;
+    return 0;
+}
+
+int
+Channel::DeRegisterRxVadObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterRxVadObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_rxVadObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRxVadObserver() observer already disabled");
+        return 0;
+    }
+    _rxVadObserverPtr = NULL;
+    _RxVadDetection = false;
+    return 0;
+}
+
+int
+Channel::VoiceActivityIndicator(int &activity)
+{
+    activity = _sendFrameType;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::VoiceActivityIndicator(indicator=%d)", activity);
+    return 0;
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+
+int
+Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
+                 (int)enable, (int)mode);
+
+    GainControl::Mode agcMode(GainControl::kFixedDigital);
+    switch (mode)
+    {
+        case kAgcDefault:
+            agcMode = GainControl::kAdaptiveDigital;
+            break;
+        case kAgcUnchanged:
+            agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
+            break;
+        case kAgcFixedDigital:
+            agcMode = GainControl::kFixedDigital;
+            break;
+        case kAgcAdaptiveDigital:
+            agcMode =GainControl::kAdaptiveDigital;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetRxAgcStatus() invalid Agc mode");
+            return -1;
+    }
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc mode");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc state");
+        return -1;
+    }
+
+    _rxAgcIsEnabled = enable;
+
+    _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+
+    return 0;
+}
+
+int
+Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRxAgcStatus(enable=?, mode=?)");
+
+    bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
+    GainControl::Mode agcMode =
+        _rxAudioProcessingModulePtr->gain_control()->mode();
+
+    enabled = enable;
+
+    switch (agcMode)
+    {
+        case GainControl::kFixedDigital:
+            mode = kAgcFixedDigital;
+            break;
+        case GainControl::kAdaptiveDigital:
+            mode = kAgcAdaptiveDigital;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "GetRxAgcStatus() invalid Agc mode");
+            return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::SetRxAgcConfig(const AgcConfig config)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxAgcConfig()");
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
+        config.targetLeveldBOv) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set target peak |level|"
+            "(or envelope) of the Agc");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
+        config.digitalCompressionGaindB) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set the range in |gain| the"
+            " digital compression stage may apply");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
+        config.limiterEnable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set hard limiter to the signal");
+        return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::GetRxAgcConfig(AgcConfig& config)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRxAgcConfig(config=%?)");
+
+    config.targetLeveldBOv =
+        _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
+    config.digitalCompressionGaindB =
+        _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
+    config.limiterEnable =
+        _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
+                   "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
+                   " limiterEnable=%d",
+                   config.targetLeveldBOv,
+                   config.digitalCompressionGaindB,
+                   config.limiterEnable);
+
+    return 0;
+}
+
+#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
+
+#ifdef WEBRTC_VOICE_ENGINE_NR
+
+int
+Channel::SetRxNsStatus(const bool enable, const NsModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
+                 (int)enable, (int)mode);
+
+    NoiseSuppression::Level nsLevel(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
+    switch (mode)
+    {
+
+        case kNsDefault:
+            nsLevel = (NoiseSuppression::Level)
+                WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
+            break;
+        case kNsUnchanged:
+            nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
+            break;
+        case kNsConference:
+            nsLevel = NoiseSuppression::kHigh;
+            break;
+        case kNsLowSuppression:
+            nsLevel = NoiseSuppression::kLow;
+            break;
+        case kNsModerateSuppression:
+            nsLevel = NoiseSuppression::kModerate;
+            break;
+        case kNsHighSuppression:
+            nsLevel = NoiseSuppression::kHigh;
+            break;
+        case kNsVeryHighSuppression:
+            nsLevel = NoiseSuppression::kVeryHigh;
+            break;
+    }
+
+    if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
+        != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Ns level");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc state");
+        return -1;
+    }
+
+    _rxNsIsEnabled = enable;
+    _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+
+    return 0;
+}
+
+int
+Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRxNsStatus(enable=?, mode=?)");
+
+    bool enable =
+        _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
+    NoiseSuppression::Level ncLevel =
+        _rxAudioProcessingModulePtr->noise_suppression()->level();
+
+    enabled = enable;
+
+    switch (ncLevel)
+    {
+        case NoiseSuppression::kLow:
+            mode = kNsLowSuppression;
+            break;
+        case NoiseSuppression::kModerate:
+            mode = kNsModerateSuppression;
+            break;
+        case NoiseSuppression::kHigh:
+            mode = kNsHighSuppression;
+            break;
+        case NoiseSuppression::kVeryHigh:
+            mode = kNsVeryHighSuppression;
+            break;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
+    return 0;
+}
+
+#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
+
+int
+Channel::RegisterRTPObserver(VoERTPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::RegisterRTPObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_rtpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRTPObserver() observer already enabled");
+        return -1;
+    }
+
+    _rtpObserverPtr = &observer;
+    _rtpObserver = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterRTPObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterRTPObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_rtpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRTPObserver() observer already disabled");
+        return 0;
+    }
+
+    _rtpObserver = false;
+    _rtpObserverPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterRTCPObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_rtcpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRTCPObserver() observer already enabled");
+        return -1;
+    }
+
+    _rtcpObserverPtr = &observer;
+    _rtcpObserver = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterRTCPObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::DeRegisterRTCPObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (!_rtcpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRTCPObserver() observer already disabled");
+        return 0;
+    }
+
+    _rtcpObserver = false;
+    _rtcpObserverPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::SetLocalSSRC(unsigned int ssrc)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetLocalSSRC()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_SENDING, kTraceError,
+            "SetLocalSSRC() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule.SetSSRC(ssrc) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetLocalSSRC() failed to set SSRC");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetLocalSSRC(unsigned int& ssrc)
+{
+    ssrc = _rtpRtcpModule.SSRC();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetLocalSSRC() => ssrc=%lu", ssrc);
+    return 0;
+}
+
+int
+Channel::GetRemoteSSRC(unsigned int& ssrc)
+{
+    ssrc = _rtpRtcpModule.RemoteSSRC();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRemoteSSRC() => ssrc=%lu", ssrc);
+    return 0;
+}
+
+int
+Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
+{
+    if (arrCSRC == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetRemoteCSRCs() invalid array argument");
+        return -1;
+    }
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize];
+    WebRtc_Word32 CSRCs(0);
+    CSRCs = _rtpRtcpModule.CSRCs(arrOfCSRC);
+    if (CSRCs > 0)
+    {
+        memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(WebRtc_UWord32));
+        for (int i = 0; i < (int) CSRCs; i++)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                   VoEId(_instanceId, _channelId),
+                   "GetRemoteCSRCs() => list is empty!");
+    }
+    return CSRCs;
+}
+
+int
+Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
+{
+    if (_rtpAudioProc.get() == NULL)
+    {
+        _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
+                                                                _channelId)));
+        if (_rtpAudioProc.get() == NULL)
+        {
+            _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
+                "Failed to create AudioProcessing");
+            return -1;
+        }
+    }
+
+    if (_rtpAudioProc->level_estimator()->Enable(enable) !=
+        AudioProcessing::kNoError)
+    {
+        _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
+            "Failed to enable AudioProcessing::level_estimator()");
+    }
+
+    _includeAudioLevelIndication = enable;
+    return _rtpRtcpModule.SetRTPAudioLevelIndicationStatus(enable, ID);
+}
+int
+Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
+{
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
+                 enabled, ID);
+    return _rtpRtcpModule.GetRTPAudioLevelIndicationStatus(enabled, ID);
+}
+
+int
+Channel::SetRTCPStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRTCPStatus()");
+    if (_rtpRtcpModule.SetRTCPStatus(enable ?
+        kRtcpCompound : kRtcpOff) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetRTCPStatus() failed to set RTCP status");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTCPStatus(bool& enabled)
+{
+    RTCPMethod method = _rtpRtcpModule.RTCP();
+    enabled = (method != kRtcpOff);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRTCPStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+int
+Channel::SetRTCP_CNAME(const char cName[256])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetRTCP_CNAME()");
+    if (_rtpRtcpModule.SetCNAME(cName) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetRTCP_CNAME() failed to set RTCP CNAME");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTCP_CNAME(char cName[256])
+{
+    if (_rtpRtcpModule.CNAME(cName) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTCP_CNAME() => cName=%s", cName);
+    return 0;
+}
+
+int
+Channel::GetRemoteRTCP_CNAME(char cName[256])
+{
+    if (cName == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
+        return -1;
+    }
+    WebRtc_Word8 cname[RTCP_CNAME_SIZE];
+    const WebRtc_UWord32 remoteSSRC = _rtpRtcpModule.RemoteSSRC();
+    if (_rtpRtcpModule.RemoteCNAME(remoteSSRC, cname) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_CNAME, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
+        return -1;
+    }
+    strcpy(cName, cname);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCP_CNAME() => cName=%s", cName);
+    return 0;
+}
+
+int
+Channel::GetRemoteRTCPData(
+    unsigned int& NTPHigh,
+    unsigned int& NTPLow,
+    unsigned int& timestamp,
+    unsigned int& playoutTimestamp,
+    unsigned int* jitter,
+    unsigned short* fractionLost)
+{
+    // --- Information from sender info in received Sender Reports
+
+    RTCPSenderInfo senderInfo;
+    if (_rtpRtcpModule.RemoteRTCPStat(&senderInfo) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRemoteRTCPData() failed to retrieve sender info for remote "
+            "side");
+        return -1;
+    }
+
+    // We only utilize 12 out of 20 bytes in the sender info (ignores packet
+    // and octet count)
+    NTPHigh = senderInfo.NTPseconds;
+    NTPLow = senderInfo.NTPfraction;
+    timestamp = senderInfo.RTPtimeStamp;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
+                 "timestamp=%lu",
+                 NTPHigh, NTPLow, timestamp);
+
+    // --- Locally derived information
+
+    // This value is updated on each incoming RTCP packet (0 when no packet
+    // has been received)
+    playoutTimestamp = _playoutTimeStampRTCP;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCPData() => playoutTimestamp=%lu",
+                 _playoutTimeStampRTCP);
+
+    if (NULL != jitter || NULL != fractionLost)
+    {
+        // Get all RTCP receiver report blocks that have been received on this
+        // channel. If we receive RTP packets from a remote source we know the
+        // remote SSRC and use the report block from him.
+        // Otherwise use the first report block.
+        std::vector<RTCPReportBlock> remote_stats;
+        if (_rtpRtcpModule.RemoteRTCPStat(&remote_stats) != 0 ||
+            remote_stats.empty()) {
+          WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() failed to measure statistics due"
+                       " to lack of received RTP and/or RTCP packets");
+          return -1;
+        }
+
+        WebRtc_UWord32 remoteSSRC = _rtpRtcpModule.RemoteSSRC();
+        std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
+        for (; it != remote_stats.end(); ++it) {
+          if (it->remoteSSRC == remoteSSRC)
+            break;
+        }
+
+        if (it == remote_stats.end()) {
+          // If we have not received any RTCP packets from this SSRC it probably
+          // means that we have not received any RTP packets.
+          // Use the first received report block instead.
+          it = remote_stats.begin();
+          remoteSSRC = it->remoteSSRC;
+        }
+
+        if (jitter) {
+          *jitter = it->jitter;
+          WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() => jitter = %lu", *jitter);
+        }
+
+        if (fractionLost) {
+          *fractionLost = it->fractionLost;
+          WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() => fractionLost = %lu",
+                       *fractionLost);
+        }
+    }
+    return 0;
+}
+
+int
+Channel::SendApplicationDefinedRTCPPacket(const unsigned char subType,
+                                             unsigned int name,
+                                             const char* data,
+                                             unsigned short dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SendApplicationDefinedRTCPPacket()");
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "SendApplicationDefinedRTCPPacket() not sending");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SendApplicationDefinedRTCPPacket() invalid data value");
+        return -1;
+    }
+    if (dataLengthInBytes % 4 != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SendApplicationDefinedRTCPPacket() invalid length value");
+        return -1;
+    }
+    RTCPMethod status = _rtpRtcpModule.RTCP();
+    if (status == kRtcpOff)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTCP_ERROR, kTraceError,
+            "SendApplicationDefinedRTCPPacket() RTCP is disabled");
+        return -1;
+    }
+
+    // Create and schedule the RTCP APP packet for transmission
+    if (_rtpRtcpModule.SetRTCPApplicationSpecificData(
+        subType,
+        name,
+        (const unsigned char*) data,
+        dataLengthInBytes) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SEND_ERROR, kTraceError,
+            "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTPStatistics(
+        unsigned int& averageJitterMs,
+        unsigned int& maxJitterMs,
+        unsigned int& discardedPackets)
+{
+    WebRtc_UWord8 fraction_lost(0);
+    WebRtc_UWord32 cum_lost(0);
+    WebRtc_UWord32 ext_max(0);
+    WebRtc_UWord32 jitter(0);
+    WebRtc_UWord32 max_jitter(0);
+
+    // The jitter statistics is updated for each received RTP packet and is
+    // based on received packets.
+    if (_rtpRtcpModule.StatisticsRTP(&fraction_lost,
+                                     &cum_lost,
+                                     &ext_max,
+                                     &jitter,
+                                     &max_jitter) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
+            "GetRTPStatistics() failed to read RTP statistics from the "
+            "RTP/RTCP module");
+    }
+
+    const WebRtc_Word32 playoutFrequency =
+        _audioCodingModule.PlayoutFrequency();
+    if (playoutFrequency > 0)
+    {
+        // Scale RTP statistics given the current playout frequency
+        maxJitterMs = max_jitter / (playoutFrequency / 1000);
+        averageJitterMs = jitter / (playoutFrequency / 1000);
+    }
+
+    discardedPackets = _numberOfDiscardedPackets;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId, _channelId),
+               "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
+               " discardedPackets = %lu)",
+               averageJitterMs, maxJitterMs, discardedPackets);
+    return 0;
+}
+
+int
+Channel::GetRTPStatistics(CallStatistics& stats)
+{
+    WebRtc_UWord8 fraction_lost(0);
+    WebRtc_UWord32 cum_lost(0);
+    WebRtc_UWord32 ext_max(0);
+    WebRtc_UWord32 jitter(0);
+    WebRtc_UWord32 max_jitter(0);
+
+    // --- Part one of the final structure (four values)
+
+    // The jitter statistics is updated for each received RTP packet and is
+    // based on received packets.
+    if (_rtpRtcpModule.StatisticsRTP(&fraction_lost,
+                                     &cum_lost,
+                                     &ext_max,
+                                     &jitter,
+                                     &max_jitter) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
+            "GetRTPStatistics() failed to read RTP statistics from the "
+            "RTP/RTCP module");
+    }
+
+    stats.fractionLost = fraction_lost;
+    stats.cumulativeLost = cum_lost;
+    stats.extendedMax = ext_max;
+    stats.jitterSamples = jitter;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
+                 " extendedMax=%lu, jitterSamples=%li)",
+                 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
+                 stats.jitterSamples);
+
+    // --- Part two of the final structure (one value)
+
+    WebRtc_UWord16 RTT(0);
+    RTCPMethod method = _rtpRtcpModule.RTCP();
+    if (method == kRtcpOff)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "GetRTPStatistics() RTCP is disabled => valid RTT "
+                     "measurements cannot be retrieved");
+    } else
+    {
+        // The remote SSRC will be zero if no RTP packet has been received.
+        WebRtc_UWord32 remoteSSRC = _rtpRtcpModule.RemoteSSRC();
+        if (remoteSSRC > 0)
+        {
+            WebRtc_UWord16 avgRTT(0);
+            WebRtc_UWord16 maxRTT(0);
+            WebRtc_UWord16 minRTT(0);
+
+            if (_rtpRtcpModule.RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
+                != 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "GetRTPStatistics() failed to retrieve RTT from "
+                             "the RTP/RTCP module");
+            }
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "GetRTPStatistics() failed to measure RTT since no "
+                         "RTP packets have been received yet");
+        }
+    }
+
+    stats.rttMs = static_cast<int> (RTT);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
+
+    // --- Part three of the final structure (four values)
+
+    WebRtc_UWord32 bytesSent(0);
+    WebRtc_UWord32 packetsSent(0);
+    WebRtc_UWord32 bytesReceived(0);
+    WebRtc_UWord32 packetsReceived(0);
+
+    if (_rtpRtcpModule.DataCountersRTP(&bytesSent,
+                                       &packetsSent,
+                                       &bytesReceived,
+                                       &packetsReceived) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "GetRTPStatistics() failed to retrieve RTP datacounters =>"
+                     " output will not be complete");
+    }
+
+    stats.bytesSent = bytesSent;
+    stats.packetsSent = packetsSent;
+    stats.bytesReceived = bytesReceived;
+    stats.packetsReceived = packetsReceived;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
+                 " bytesReceived=%d, packetsReceived=%d)",
+                 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+                 stats.packetsReceived);
+
+    return 0;
+}
+
+int
+Channel::SetFECStatus(bool enable, int redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetFECStatus()");
+
+    CodecInst codec;
+
+    // Get default RED settings from the ACM database
+    bool foundRED(false);
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+    for (int idx = 0; (!foundRED && idx < nSupportedCodecs); idx++)
+    {
+        _audioCodingModule.Codec(idx, codec);
+        if (!STR_CASE_CMP(codec.plname, "RED"))
+        {
+            foundRED = true;
+        }
+    }
+    if (!foundRED)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetFECStatus() RED is not supported");
+        return -1;
+    }
+
+    if (redPayloadtype != -1)
+    {
+        codec.pltype = redPayloadtype;
+    }
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetFECStatus() RED registration in ACM module failed");
+        return -1;
+    }
+    if (_rtpRtcpModule.SetSendREDPayloadType(codec.pltype) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetFECStatus() RED registration in RTP/RTCP module failed");
+        return -1;
+    }
+    if (_audioCodingModule.SetFECStatus(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetFECStatus() failed to set FEC state in the ACM");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
+{
+    enabled = _audioCodingModule.FECStatus();
+    if (enabled)
+    {
+        WebRtc_Word8 payloadType(0);
+        if (_rtpRtcpModule.SendREDPayloadType(payloadType) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
+                "module");
+            return -1;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                   VoEId(_instanceId, _channelId),
+                   "GetFECStatus() => enabled=%d, redPayloadtype=%d",
+                   enabled, redPayloadtype);
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetFECStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+int
+Channel::SetRTPKeepaliveStatus(bool enable,
+                               unsigned char unknownPayloadType,
+                               int deltaTransmitTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetRTPKeepaliveStatus()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_SENDING, kTraceError,
+            "SetRTPKeepaliveStatus() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule.SetRTPKeepaliveStatus(
+        enable,
+        unknownPayloadType,
+        1000 * deltaTransmitTimeSeconds) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetRTPKeepaliveStatus() failed to set RTP keepalive status");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTPKeepaliveStatus(bool& enabled,
+                               unsigned char& unknownPayloadType,
+                               int& deltaTransmitTimeSeconds)
+{
+    bool onOff(false);
+    WebRtc_Word8 payloadType(0);
+    WebRtc_UWord16 deltaTransmitTimeMS(0);
+    if (_rtpRtcpModule.RTPKeepaliveStatus(&onOff, &payloadType,
+                                          &deltaTransmitTimeMS) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRTPKeepaliveStatus() failed to retrieve RTP keepalive status");
+        return -1;
+    }
+    enabled = onOff;
+    unknownPayloadType = payloadType;
+    deltaTransmitTimeSeconds = static_cast<int> (deltaTransmitTimeMS / 1000);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPKeepaliveStatus() => enabled=%d, "
+                 "unknownPayloadType=%u, deltaTransmitTimeSeconds=%d",
+                 enabled, unknownPayloadType, deltaTransmitTimeSeconds);
+    return 0;
+}
+
+int
+Channel::StartRTPDump(const char fileNameUTF8[1024],
+                      RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::StartRTPDump()");
+    if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRTPDump() invalid RTP direction");
+        return -1;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    if (rtpDumpPtr == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    if (rtpDumpPtr->IsActive())
+    {
+        rtpDumpPtr->Stop();
+    }
+    if (rtpDumpPtr->Start(fileNameUTF8) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRTPDump() failed to create file");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::StopRTPDump(RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::StopRTPDump()");
+    if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StopRTPDump() invalid RTP direction");
+        return -1;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    if (rtpDumpPtr == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    if (!rtpDumpPtr->IsActive())
+    {
+        return 0;
+    }
+    return rtpDumpPtr->Stop();
+}
+
+bool
+Channel::RTPDumpIsActive(RTPDirections direction)
+{
+    if ((direction != kRtpIncoming) &&
+        (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "RTPDumpIsActive() invalid RTP direction");
+        return false;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    return rtpDumpPtr->IsActive();
+}
+
+int
+Channel::InsertExtraRTPPacket(unsigned char payloadType,
+                              bool markerBit,
+                              const char* payloadData,
+                              unsigned short payloadSize)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::InsertExtraRTPPacket()");
+    if (payloadType > 127)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_PLTYPE, kTraceError,
+            "InsertExtraRTPPacket() invalid payload type");
+        return -1;
+    }
+    if (payloadData == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "InsertExtraRTPPacket() invalid payload data");
+        return -1;
+    }
+    if (payloadSize > _rtpRtcpModule.MaxDataPayloadLength())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "InsertExtraRTPPacket() invalid payload size");
+        return -1;
+    }
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "InsertExtraRTPPacket() not sending");
+        return -1;
+    }
+
+    // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
+    // Transport::SendPacket() will be called by the module when the RTP packet
+    // is created.
+    // The call to SendOutgoingData() does *not* modify the timestamp and
+    // payloadtype to ensure that the RTP module generates a valid RTP packet
+    // (user might utilize a non-registered payload type).
+    // The marker bit and payload type will be replaced just before the actual
+    // transmission, i.e., the actual modification is done *after* the RTP
+    // module has delivered its RTP packet back to the VoE.
+    // We will use the stored values above when the packet is modified
+    // (see Channel::SendPacket()).
+
+    _extraPayloadType = payloadType;
+    _extraMarkerBit = markerBit;
+    _insertExtraRTPPacket = true;
+
+    if (_rtpRtcpModule.SendOutgoingData(kAudioFrameSpeech,
+                                        _lastPayloadType,
+                                        _lastLocalTimeStamp,
+                                        (const WebRtc_UWord8*) payloadData,
+                                        payloadSize) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "InsertExtraRTPPacket() failed to send extra RTP packet");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::Demultiplex(const AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Demultiplex()");
+    _audioFrame = audioFrame;
+    _audioFrame._id = _channelId;
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::PrepareEncodeAndSend(int mixingFrequency)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PrepareEncodeAndSend()");
+
+    if (_audioFrame._payloadDataLengthInSamples == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::PrepareEncodeAndSend() invalid audio frame");
+        return -1;
+    }
+
+    if (_inputFilePlaying)
+    {
+        MixOrReplaceAudioWithFile(mixingFrequency);
+    }
+
+    if (_mute)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+    }
+
+    if (_inputExternalMedia)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        const bool isStereo = (_audioFrame._audioChannel == 2);
+        if (_inputExternalMediaCallbackPtr)
+        {
+            _inputExternalMediaCallbackPtr->Process(
+                _channelId,
+                kRecordingPerChannel,
+               (WebRtc_Word16*)_audioFrame._payloadData,
+                _audioFrame._payloadDataLengthInSamples,
+                _audioFrame._frequencyInHz,
+                isStereo);
+        }
+    }
+
+    InsertInbandDtmfTone();
+
+    if (_includeAudioLevelIndication)
+    {
+        assert(_rtpAudioProc.get() != NULL);
+
+        // Check if settings need to be updated.
+        if (_rtpAudioProc->sample_rate_hz() != _audioFrame._frequencyInHz)
+        {
+            if (_rtpAudioProc->set_sample_rate_hz(_audioFrame._frequencyInHz) !=
+                AudioProcessing::kNoError)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Error setting AudioProcessing sample rate");
+                return -1;
+            }
+        }
+
+        if (_rtpAudioProc->num_input_channels() != _audioFrame._audioChannel)
+        {
+            if (_rtpAudioProc->set_num_channels(_audioFrame._audioChannel,
+                                                _audioFrame._audioChannel)
+                != AudioProcessing::kNoError)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Error setting AudioProcessing channels");
+                return -1;
+            }
+        }
+
+        // Performs level analysis only; does not affect the signal.
+        _rtpAudioProc->ProcessStream(&_audioFrame);
+    }
+
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::EncodeAndSend()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::EncodeAndSend()");
+
+    assert(_audioFrame._audioChannel <= 2);
+    if (_audioFrame._payloadDataLengthInSamples == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::EncodeAndSend() invalid audio frame");
+        return -1;
+    }
+
+    _audioFrame._id = _channelId;
+
+    // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
+
+    // The ACM resamples internally.
+    _audioFrame._timeStamp = _timeStamp;
+    if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::EncodeAndSend() ACM encoding failed");
+        return -1;
+    }
+
+    _timeStamp += _audioFrame._payloadDataLengthInSamples;
+
+    // --- Encode if complete frame is ready
+
+    // This call will trigger AudioPacketizationCallback::SendData if encoding
+    // is done and payload is ready for packetization and transmission.
+    return _audioCodingModule.Process();
+}
+
+int Channel::RegisterExternalMediaProcessing(
+    ProcessingTypes type,
+    VoEMediaProcess& processObject)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (kPlaybackPerChannel == type)
+    {
+        if (_outputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceError,
+                "Channel::RegisterExternalMediaProcessing() "
+                "output external media already enabled");
+            return -1;
+        }
+        _outputExternalMediaCallbackPtr = &processObject;
+        _outputExternalMedia = true;
+    }
+    else if (kRecordingPerChannel == type)
+    {
+        if (_inputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceError,
+                "Channel::RegisterExternalMediaProcessing() "
+                "output external media already enabled");
+            return -1;
+        }
+        _inputExternalMediaCallbackPtr = &processObject;
+        _inputExternalMedia = true;
+    }
+    return 0;
+}
+
+int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (kPlaybackPerChannel == type)
+    {
+        if (!_outputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceWarning,
+                "Channel::DeRegisterExternalMediaProcessing() "
+                "output external media already disabled");
+            return 0;
+        }
+        _outputExternalMedia = false;
+        _outputExternalMediaCallbackPtr = NULL;
+    }
+    else if (kRecordingPerChannel == type)
+    {
+        if (!_inputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceWarning,
+                "Channel::DeRegisterExternalMediaProcessing() "
+                "input external media already disabled");
+            return 0;
+        }
+        _inputExternalMedia = false;
+        _inputExternalMediaCallbackPtr = NULL;
+    }
+
+    return 0;
+}
+
+int
+Channel::ResetRTCPStatistics()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ResetRTCPStatistics()");
+    WebRtc_UWord32 remoteSSRC(0);
+    remoteSSRC = _rtpRtcpModule.RemoteSSRC();
+    return _rtpRtcpModule.ResetRTT(remoteSSRC);
+}
+
+int
+Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRoundTripTimeSummary()");
+    // Override default module outputs for the case when RTCP is disabled.
+    // This is done to ensure that we are backward compatible with the
+    // VoiceEngine where we did not use RTP/RTCP module.
+    if (!_rtpRtcpModule.RTCP())
+    {
+        delaysMs.min = -1;
+        delaysMs.max = -1;
+        delaysMs.average = -1;
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
+                     " valid RTT measurements cannot be retrieved");
+        return 0;
+    }
+
+    WebRtc_UWord32 remoteSSRC;
+    WebRtc_UWord16 RTT;
+    WebRtc_UWord16 avgRTT;
+    WebRtc_UWord16 maxRTT;
+    WebRtc_UWord16 minRTT;
+    // The remote SSRC will be zero if no RTP packet has been received.
+    remoteSSRC = _rtpRtcpModule.RemoteSSRC();
+    if (remoteSSRC == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRoundTripTimeSummary() unable to measure RTT"
+                     " since no RTP packet has been received yet");
+    }
+
+    // Retrieve RTT statistics from the RTP/RTCP module for the specified
+    // channel and SSRC. The SSRC is required to parse out the correct source
+    // in conference scenarios.
+    if (_rtpRtcpModule.RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "GetRoundTripTimeSummary unable to retrieve RTT values"
+                     " from the RTCP layer");
+        delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
+    }
+    else
+    {
+        delaysMs.min = minRTT;
+        delaysMs.max = maxRTT;
+        delaysMs.average = avgRTT;
+    }
+    return 0;
+}
+
+int
+Channel::GetNetworkStatistics(NetworkStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetNetworkStatistics()");
+    return _audioCodingModule.NetworkStatistics(
+        (ACMNetworkStatistics &)stats);
+}
+
+int
+Channel::GetDelayEstimate(int& delayMs) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetDelayEstimate()");
+    delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
+    return 0;
+}
+
+int
+Channel::SetMinimumPlayoutDelay(int delayMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetMinimumPlayoutDelay()");
+    if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
+        (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetMinimumPlayoutDelay() invalid min delay");
+        return -1;
+    }
+    if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetMinimumPlayoutDelay() failed to set min playout delay");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetPlayoutTimestamp(unsigned int& timestamp)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPlayoutTimestamp()");
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_VALUE, kTraceError,
+            "GetPlayoutTimestamp() failed to retrieve timestamp");
+        return -1;
+    }
+    timestamp = playoutTimestamp;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
+    return 0;
+}
+
+int
+Channel::SetInitTimestamp(unsigned int timestamp)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetInitTimestamp()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule.SetStartTimestamp(timestamp) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetInitTimestamp() failed to set timestamp");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::SetInitSequenceNumber(short sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetInitSequenceNumber()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetInitSequenceNumber() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule.SetSequenceNumber(sequenceNumber) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetInitSequenceNumber() failed to set sequence number");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRtpRtcp()");
+    rtpRtcpModule = &_rtpRtcpModule;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
+{
+    WebRtc_Word16 fileBuffer[320];
+    WebRtc_UWord32 fileSamples(0);
+
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        if (_inputFilePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() fileplayer"
+                             " doesnt exist");
+            return -1;
+        }
+
+        if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer,
+                                                      fileSamples,
+                                                      mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() file mixing "
+                         "failed");
+            return -1;
+        }
+        if (fileSamples == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() file is ended");
+            return 0;
+        }
+    }
+
+    assert(_audioFrame._payloadDataLengthInSamples == fileSamples);
+
+    if (_mixFileWithMicrophone)
+    {
+        Utility::MixWithSat(_audioFrame._payloadData,
+                            fileBuffer,
+                            (WebRtc_UWord16)fileSamples);
+    }
+    else
+    {
+        // replace ACM audio with file
+        _audioFrame.UpdateFrame(_channelId,
+                                -1,
+                                fileBuffer,
+                                (WebRtc_UWord16)fileSamples,
+                                mixingFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadUnknown,
+                                1);
+
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::MixAudioWithFile(AudioFrame& audioFrame,
+                          const int mixingFrequency)
+{
+    assert(mixingFrequency <= 32000);
+
+    WebRtc_Word16 fileBuffer[640];
+    WebRtc_UWord32 fileSamples(0);
+
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+
+        if (_outputFilePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixAudioWithFile() file mixing failed");
+            return -1;
+        }
+
+        // We should get the frequency we ask for.
+        if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer,
+                                                       fileSamples,
+                                                       mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixAudioWithFile() file mixing failed");
+            return -1;
+        }
+    }
+
+    if (audioFrame._payloadDataLengthInSamples == fileSamples)
+    {
+        // In case the incoming stream is stereo and file stream is mono,
+        // turn the file stream into stereo.
+        // TODO(xians): remove the code when FilePlayer supports real stereo.
+        if (audioFrame._audioChannel == 2)
+        {
+            // The mono file stream is copied to be stereo.
+            WebRtc_Word16* FileBufferCopy = new WebRtc_Word16[fileSamples];
+            memcpy(FileBufferCopy, fileBuffer,
+                   sizeof(WebRtc_Word16) * fileSamples);
+            for (unsigned int i = 0; i < fileSamples; i++)
+            {
+                fileBuffer[2*i]   = FileBufferCopy[i];
+                fileBuffer[2*i+1] = FileBufferCopy[i];
+            }
+            fileSamples = 2*fileSamples;
+            delete [] FileBufferCopy;
+        }
+
+        // Mix the incoming stream and file stream.
+        Utility::MixWithSat(audioFrame._payloadData,
+                            fileBuffer,
+                            (WebRtc_UWord16)fileSamples);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+            "Channel::MixAudioWithFile() _payloadDataLengthInSamples(%d) != "
+            "fileSamples(%d)",
+            audioFrame._payloadDataLengthInSamples, fileSamples);
+        return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::InsertInbandDtmfTone()
+{
+    // Check if we should start a new tone.
+    if (_inbandDtmfQueue.PendingDtmf() &&
+        !_inbandDtmfGenerator.IsAddingTone() &&
+        _inbandDtmfGenerator.DelaySinceLastTone() >
+        kMinTelephoneEventSeparationMs)
+    {
+        WebRtc_Word8 eventCode(0);
+        WebRtc_UWord16 lengthMs(0);
+        WebRtc_UWord8 attenuationDb(0);
+
+        eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
+        _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
+        if (_playInbandDtmfEvent)
+        {
+            // Add tone to output mixer using a reduced length to minimize
+            // risk of echo.
+            _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
+                                          attenuationDb);
+        }
+    }
+
+    if (_inbandDtmfGenerator.IsAddingTone())
+    {
+        WebRtc_UWord16 frequency(0);
+        _inbandDtmfGenerator.GetSampleRate(frequency);
+
+        if (frequency != _audioFrame._frequencyInHz)
+        {
+            // Update sample rate of Dtmf tone since the mixing frequency
+            // has changed.
+            _inbandDtmfGenerator.SetSampleRate(
+                (WebRtc_UWord16) (_audioFrame._frequencyInHz));
+            // Reset the tone to be added taking the new sample rate into
+            // account.
+            _inbandDtmfGenerator.ResetTone();
+        }
+        
+        WebRtc_Word16 toneBuffer[320];
+        WebRtc_UWord16 toneSamples(0);
+        // Get 10ms tone segment and set time since last tone to zero
+        if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "Channel::EncodeAndSend() inserting Dtmf failed");
+            return -1;
+        }
+
+        // Replace mixed audio with DTMF tone.
+        for (int sample = 0; 
+            sample < _audioFrame._payloadDataLengthInSamples;
+            sample++)
+        {
+            for (int channel = 0; 
+                channel < _audioFrame._audioChannel; 
+                channel++)
+            {
+                _audioFrame._payloadData[sample * _audioFrame._audioChannel + channel] = 
+                        toneBuffer[sample];
+            }
+        }
+        
+        assert(_audioFrame._payloadDataLengthInSamples == toneSamples);
+    } else
+    {
+        // Add 10ms to "delay-since-last-tone" counter
+        _inbandDtmfGenerator.UpdateDelaySinceLastTone();
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp)
+{
+    WebRtc_UWord32 timestamp(0);
+    CodecInst currRecCodec;
+
+    if (_audioCodingModule.PlayoutTimestamp(timestamp) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetPlayoutTimeStamp() failed to read playout"
+                     " timestamp from the ACM");
+        return -1;
+    }
+
+    WebRtc_UWord16 delayMS(0);
+    if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetPlayoutTimeStamp() failed to read playout"
+                     " delay from the ADM");
+        return -1;
+    }
+
+    WebRtc_Word32 playoutFrequency = _audioCodingModule.PlayoutFrequency();
+    if (_audioCodingModule.ReceiveCodec(currRecCodec) == 0)
+    {
+        if (STR_CASE_CMP("G722", currRecCodec.plname) == 0)
+        {
+            playoutFrequency = 8000;
+        }
+    }
+    timestamp -= (delayMS * (playoutFrequency/1000));
+
+    playoutTimestamp = timestamp;
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
+                 playoutTimestamp);
+    return 0;
+}
+
+void
+Channel::ResetDeadOrAliveCounters()
+{
+    _countDeadDetections = 0;
+    _countAliveDetections = 0;
+}
+
+void
+Channel::UpdateDeadOrAliveCounters(bool alive)
+{
+    if (alive)
+        _countAliveDetections++;
+    else
+        _countDeadDetections++;
+}
+
+int
+Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
+{
+    bool enabled;
+    WebRtc_UWord8 timeSec;
+
+    _rtpRtcpModule.PeriodicDeadOrAliveStatus(enabled, timeSec);
+    if (!enabled)
+        return (-1);
+
+    countDead = static_cast<int> (_countDeadDetections);
+    countAlive = static_cast<int> (_countAliveDetections);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SendPacketRaw(const void *data, int len, bool RTCP)
+{
+    if (_transportPtr == NULL)
+    {
+        return -1;
+    }
+    if (!RTCP)
+    {
+        return _transportPtr->SendPacket(_channelId, data, len);
+    }
+    else
+    {
+        return _transportPtr->SendRTCPPacket(_channelId, data, len);
+    }
+}
+
+WebRtc_Word32
+Channel::UpdatePacketDelay(const WebRtc_UWord32 timestamp,
+                           const WebRtc_UWord16 sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
+                 timestamp, sequenceNumber);
+
+    WebRtc_Word32 rtpReceiveFrequency(0);
+
+    // Get frequency of last received payload
+    rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
+
+    CodecInst currRecCodec;
+    if (_audioCodingModule.ReceiveCodec(currRecCodec) == 0)
+    {
+        if (STR_CASE_CMP("G722", currRecCodec.plname) == 0)
+        {
+            // Even though the actual sampling rate for G.722 audio is
+            // 16,000 Hz, the RTP clock rate for the G722 payload format is
+            // 8,000 Hz because that value was erroneously assigned in
+            // RFC 1890 and must remain unchanged for backward compatibility.
+            rtpReceiveFrequency = 8000;
+        }
+    }
+
+    const WebRtc_UWord32 timeStampDiff = timestamp - _playoutTimeStampRTP;
+    WebRtc_UWord32 timeStampDiffMs(0);
+
+    if (timeStampDiff > 0)
+    {
+        switch (rtpReceiveFrequency)
+        {
+            case 8000:
+                timeStampDiffMs = timeStampDiff >> 3;
+                break;
+            case 16000:
+                timeStampDiffMs = timeStampDiff >> 4;
+                break;
+            case 32000:
+                timeStampDiffMs = timeStampDiff >> 5;
+                break;
+            default:
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Channel::UpdatePacketDelay() invalid sample "
+                             "rate");
+                timeStampDiffMs = 0;
+                return -1;
+        }
+        if (timeStampDiffMs > 5000)
+        {
+            timeStampDiffMs = 0;
+        }
+
+        if (_averageDelayMs == 0)
+        {
+            _averageDelayMs = timeStampDiffMs;
+        }
+        else
+        {
+            // Filter average delay value using exponential filter (alpha is
+            // 7/8). We derive 10*_averageDelayMs here (reduces risk of
+            // rounding error) and compensate for it in GetDelayEstimate()
+            // later. Adding 4/8 results in correct rounding.
+            _averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
+        }
+
+        if (sequenceNumber - _previousSequenceNumber == 1)
+        {
+            WebRtc_UWord16 packetDelayMs = 0;
+            switch (rtpReceiveFrequency)
+            {
+            case 8000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 3);
+                break;
+            case 16000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 4);
+                break;
+            case 32000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 5);
+                break;
+            }
+
+            if (packetDelayMs >= 10 && packetDelayMs <= 60)
+                _recPacketDelayMs = packetDelayMs;
+        }
+    }
+
+    _previousSequenceNumber = sequenceNumber;
+    _previousTimestamp = timestamp;
+
+    return 0;
+}
+
+void
+Channel::RegisterReceiveCodecsToRTPModule()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterReceiveCodecsToRTPModule()");
+
+
+    CodecInst codec;
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    for (int idx = 0; idx < nSupportedCodecs; idx++)
+    {
+        // Open up the RTP/RTCP receiver for all supported codecs
+        if ((_audioCodingModule.Codec(idx, codec) == -1) ||
+            (_rtpRtcpModule.RegisterReceivePayload(codec) == -1))
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::RegisterReceiveCodecsToRTPModule() unable"
+                         " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+        else
+        {
+            WEBRTC_TRACE(
+                         kTraceInfo,
+                         kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::RegisterReceiveCodecsToRTPModule() %s "
+                         "(%d/%d/%d/%d) has been added to the RTP/RTCP "
+                         "receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+    }
+}
+
+int
+Channel::ApmProcessRx(AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ApmProcessRx()");
+
+    // Reset the APM frequency if the frequency has changed
+    if (_rxAudioProcessingModulePtr->sample_rate_hz() !=
+        audioFrame._frequencyInHz)
+    {
+        if (_rxAudioProcessingModulePtr->set_sample_rate_hz(
+            audioFrame._frequencyInHz) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                         "AudioProcessingModule::set_sample_rate_hz("
+                         "_frequencyInHz=%u) => error",
+                         _audioFrame._frequencyInHz);
+        }
+    }
+
+    if (_rxAudioProcessingModulePtr->ProcessStream(&audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                   "AudioProcessingModule::ProcessStream() => error");
+    }
+
+    return 0;
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/channel.h b/trunk/src/voice_engine/main/source/channel.h
new file mode 100644
index 0000000..c9d5b82
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel.h
@@ -0,0 +1,659 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_H
+
+#include "audio_coding_module.h"
+#include "audio_conference_mixer_defines.h"
+#include "common_types.h"
+#include "dtmf_inband.h"
+#include "dtmf_inband_queue.h"
+#include "file_player.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "resampler.h"
+#include "rtp_rtcp.h"
+#include "scoped_ptr.h"
+#include "shared_data.h"
+#include "voe_audio_processing.h"
+#include "voe_network.h"
+#include "voice_engine_defines.h"
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+#include "udp_transport.h"
+#endif
+#ifdef WEBRTC_SRTP
+#include "SrtpModule.h"
+#endif
+#ifdef WEBRTC_DTMF_DETECTION
+#include "voe_dtmf.h" // TelephoneEventDetectionMethods, TelephoneEventObserver
+#endif
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class ProcessThread;
+class AudioDeviceModule;
+class RtpRtcp;
+class FileWrapper;
+class RtpDump;
+class VoiceEngineObserver;
+class VoEMediaProcess;
+class VoERTPObserver;
+class VoERTCPObserver;
+
+struct CallStatistics;
+
+namespace voe
+{
+class Statistics;
+class TransmitMixer;
+class OutputMixer;
+
+
+class Channel:
+    public RtpData,
+    public RtpFeedback,
+    public RtcpFeedback,
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    public UdpTransportData, // receiving packet from sockets
+#endif
+    public FileCallback, // receiving notification from file player & recorder
+    public Transport,
+    public RtpAudioFeedback,
+    public AudioPacketizationCallback, // receive encoded packets from the ACM
+    public ACMVADCallback, // receive voice activity from the ACM
+#ifdef WEBRTC_DTMF_DETECTION
+    public AudioCodingFeedback, // inband Dtmf detection in the ACM
+#endif
+    public MixerParticipant // supplies output mixer with audio frames
+{
+public:
+    enum {KNumSocketThreads = 1};
+    enum {KNumberOfSocketBuffers = 8};
+public:
+    virtual ~Channel();
+    static WebRtc_Word32 CreateChannel(Channel*& channel,
+                                       const WebRtc_Word32 channelId,
+                                       const WebRtc_UWord32 instanceId);
+    Channel(const WebRtc_Word32 channelId, const WebRtc_UWord32 instanceId);
+    WebRtc_Word32 Init();
+    WebRtc_Word32 SetEngineInformation(
+        Statistics& engineStatistics,
+        OutputMixer& outputMixer,
+        TransmitMixer& transmitMixer,
+        ProcessThread& moduleProcessThread,
+        AudioDeviceModule& audioDeviceModule,
+        VoiceEngineObserver* voiceEngineObserver,
+        CriticalSectionWrapper* callbackCritSect);
+    WebRtc_Word32 UpdateLocalTimeStamp();
+
+public:
+    // API methods
+
+    // VoEBase
+    WebRtc_Word32 StartPlayout();
+    WebRtc_Word32 StopPlayout();
+    WebRtc_Word32 StartSend();
+    WebRtc_Word32 StopSend();
+    WebRtc_Word32 StartReceiving();
+    WebRtc_Word32 StopReceiving();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtpPort,
+                                   const WebRtc_UWord16 rtcpPort,
+                                   const WebRtc_Word8 ipAddr[64],
+                                   const WebRtc_Word8 multicastIpAddr[64]);
+    WebRtc_Word32 GetLocalReceiver(int& port, int& RTCPport, char ipAddr[]);
+    WebRtc_Word32 SetSendDestination(const WebRtc_UWord16 rtpPort,
+                                     const WebRtc_Word8 ipAddr[64],
+                                     const int sourcePort,
+                                     const WebRtc_UWord16 rtcpPort);
+    WebRtc_Word32 GetSendDestination(int& port, char ipAddr[64],
+                                     int& sourcePort, int& RTCPport);
+#endif
+    WebRtc_Word32 SetNetEQPlayoutMode(NetEqModes mode);
+    WebRtc_Word32 GetNetEQPlayoutMode(NetEqModes& mode);
+    WebRtc_Word32 SetNetEQBGNMode(NetEqBgnModes mode);
+    WebRtc_Word32 GetNetEQBGNMode(NetEqBgnModes& mode);
+    WebRtc_Word32 SetOnHoldStatus(bool enable, OnHoldModes mode);
+    WebRtc_Word32 GetOnHoldStatus(bool& enabled, OnHoldModes& mode);
+    WebRtc_Word32 RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+    WebRtc_Word32 DeRegisterVoiceEngineObserver();
+
+    // VoECodec
+    WebRtc_Word32 GetSendCodec(CodecInst& codec);
+    WebRtc_Word32 GetRecCodec(CodecInst& codec);
+    WebRtc_Word32 SetSendCodec(const CodecInst& codec);
+    WebRtc_Word32 SetVADStatus(bool enableVAD, ACMVADMode mode,
+                               bool disableDTX);
+    WebRtc_Word32 GetVADStatus(bool& enabledVAD, ACMVADMode& mode,
+                               bool& disabledDTX);
+    WebRtc_Word32 SetRecPayloadType(const CodecInst& codec);
+    WebRtc_Word32 GetRecPayloadType(CodecInst& codec);
+    WebRtc_Word32 SetAMREncFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRDecFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRWbEncFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRWbDecFormat(AmrMode mode);
+    WebRtc_Word32 SetSendCNPayloadType(int type, PayloadFrequencies frequency);
+    WebRtc_Word32 SetISACInitTargetRate(int rateBps, bool useFixedFrameSize);
+    WebRtc_Word32 SetISACMaxRate(int rateBps);
+    WebRtc_Word32 SetISACMaxPayloadSize(int sizeBytes);
+
+    // VoENetwork
+    WebRtc_Word32 RegisterExternalTransport(Transport& transport);
+    WebRtc_Word32 DeRegisterExternalTransport();
+    WebRtc_Word32 ReceivedRTPPacket(const WebRtc_Word8* data,
+                                    WebRtc_Word32 length);
+    WebRtc_Word32 ReceivedRTCPPacket(const WebRtc_Word8* data,
+                                     WebRtc_Word32 length);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 GetSourceInfo(int& rtpPort, int& rtcpPort, char ipAddr[64]);
+    WebRtc_Word32 EnableIPv6();
+    bool IPv6IsEnabled() const;
+    WebRtc_Word32 SetSourceFilter(int rtpPort, int rtcpPort,
+                                  const char ipAddr[64]);
+    WebRtc_Word32 GetSourceFilter(int& rtpPort, int& rtcpPort, char ipAddr[64]);
+    WebRtc_Word32 SetSendTOS(int DSCP, int priority, bool useSetSockopt);
+    WebRtc_Word32 GetSendTOS(int &DSCP, int& priority, bool &useSetSockopt);
+#if defined(_WIN32)
+    WebRtc_Word32 SetSendGQoS(bool enable, int serviceType, int overrideDSCP);
+    WebRtc_Word32 GetSendGQoS(bool &enabled, int &serviceType,
+                              int &overrideDSCP);
+#endif
+#endif
+    WebRtc_Word32 SetPacketTimeoutNotification(bool enable, int timeoutSeconds);
+    WebRtc_Word32 GetPacketTimeoutNotification(bool& enabled,
+                                               int& timeoutSeconds);
+    WebRtc_Word32 RegisterDeadOrAliveObserver(VoEConnectionObserver& observer);
+    WebRtc_Word32 DeRegisterDeadOrAliveObserver();
+    WebRtc_Word32 SetPeriodicDeadOrAliveStatus(bool enable,
+                                               int sampleTimeSeconds);
+    WebRtc_Word32 GetPeriodicDeadOrAliveStatus(bool& enabled,
+                                               int& sampleTimeSeconds);
+    WebRtc_Word32 SendUDPPacket(const void* data, unsigned int length,
+                                int& transmittedBytes, bool useRtcpSocket);
+
+    // VoEFile
+    int StartPlayingFileLocally(const char* fileName, const bool loop,
+                                const FileFormats format,
+                                const int startPosition,
+                                const float volumeScaling,
+                                const int stopPosition,
+                                const CodecInst* codecInst);
+    int StartPlayingFileLocally(InStream* stream, const FileFormats format,
+                                const int startPosition,
+                                const float volumeScaling,
+                                const int stopPosition,
+                                const CodecInst* codecInst);
+    int StopPlayingFileLocally();
+    int IsPlayingFileLocally() const;
+    int ScaleLocalFilePlayout(const float scale);
+    int GetLocalPlayoutPosition(int& positionMs);
+    int StartPlayingFileAsMicrophone(const char* fileName, const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+    int StartPlayingFileAsMicrophone(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+    int StopPlayingFileAsMicrophone();
+    int IsPlayingFileAsMicrophone() const;
+    int ScaleFileAsMicrophonePlayout(const float scale);
+    int StartRecordingPlayout(const char* fileName, const CodecInst* codecInst);
+    int StartRecordingPlayout(OutStream* stream, const CodecInst* codecInst);
+    int StopRecordingPlayout();
+
+    void SetMixWithMicStatus(bool mix);
+
+    // VoEExternalMediaProcessing
+    int RegisterExternalMediaProcessing(ProcessingTypes type,
+                                        VoEMediaProcess& processObject);
+    int DeRegisterExternalMediaProcessing(ProcessingTypes type);
+
+    // VoEVolumeControl
+    int GetSpeechOutputLevel(WebRtc_UWord32& level) const;
+    int GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const;
+    int SetMute(const bool enable);
+    bool Mute() const;
+    int SetOutputVolumePan(float left, float right);
+    int GetOutputVolumePan(float& left, float& right) const;
+    int SetChannelOutputVolumeScaling(float scaling);
+    int GetChannelOutputVolumeScaling(float& scaling) const;
+
+    // VoECallReport
+    void ResetDeadOrAliveCounters();
+    int ResetRTCPStatistics();
+    int GetRoundTripTimeSummary(StatVal& delaysMs) const;
+    int GetDeadOrAliveCounters(int& countDead, int& countAlive) const;
+
+    // VoENetEqStats
+    int GetNetworkStatistics(NetworkStatistics& stats);
+
+    // VoEVideoSync
+    int GetDelayEstimate(int& delayMs) const;
+    int SetMinimumPlayoutDelay(int delayMs);
+    int GetPlayoutTimestamp(unsigned int& timestamp);
+    int SetInitTimestamp(unsigned int timestamp);
+    int SetInitSequenceNumber(short sequenceNumber);
+
+    // VoEVideoSyncExtended
+    int GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const;
+
+    // VoEEncryption
+#ifdef WEBRTC_SRTP
+    int EnableSRTPSend(
+            CipherTypes cipherType,
+            int cipherKeyLength,
+            AuthenticationTypes authType,
+            int authKeyLength,
+            int authTagLength,
+            SecurityLevels level,
+            const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+            bool useForRTCP);
+    int DisableSRTPSend();
+    int EnableSRTPReceive(
+            CipherTypes cipherType,
+            int cipherKeyLength,
+            AuthenticationTypes authType,
+            int authKeyLength,
+            int authTagLength,
+            SecurityLevels level,
+            const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+            bool useForRTCP);
+    int DisableSRTPReceive();
+#endif
+    int RegisterExternalEncryption(Encryption& encryption);
+    int DeRegisterExternalEncryption();
+
+    // VoEDtmf
+    int SendTelephoneEventOutband(unsigned char eventCode, int lengthMs,
+                                  int attenuationDb, bool playDtmfEvent);
+    int SendTelephoneEventInband(unsigned char eventCode, int lengthMs,
+                                 int attenuationDb, bool playDtmfEvent);
+    int SetDtmfPlayoutStatus(bool enable);
+    bool DtmfPlayoutStatus() const;
+    int SetSendTelephoneEventPayloadType(unsigned char type);
+    int GetSendTelephoneEventPayloadType(unsigned char& type);
+#ifdef WEBRTC_DTMF_DETECTION
+    int RegisterTelephoneEventDetection(
+            TelephoneEventDetectionMethods detectionMethod,
+            VoETelephoneEventObserver& observer);
+    int DeRegisterTelephoneEventDetection();
+    int GetTelephoneEventDetectionStatus(
+            bool& enabled,
+            TelephoneEventDetectionMethods& detectionMethod);
+#endif
+
+    // VoEAudioProcessingImpl
+    int UpdateRxVadDetection(AudioFrame& audioFrame);
+    int RegisterRxVadObserver(VoERxVadCallback &observer);
+    int DeRegisterRxVadObserver();
+    int VoiceActivityIndicator(int &activity);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    int SetRxAgcStatus(const bool enable, const AgcModes mode);
+    int GetRxAgcStatus(bool& enabled, AgcModes& mode);
+    int SetRxAgcConfig(const AgcConfig config);
+    int GetRxAgcConfig(AgcConfig& config);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NR
+    int SetRxNsStatus(const bool enable, const NsModes mode);
+    int GetRxNsStatus(bool& enabled, NsModes& mode);
+#endif
+
+    // VoERTP_RTCP
+    int RegisterRTPObserver(VoERTPObserver& observer);
+    int DeRegisterRTPObserver();
+    int RegisterRTCPObserver(VoERTCPObserver& observer);
+    int DeRegisterRTCPObserver();
+    int SetLocalSSRC(unsigned int ssrc);
+    int GetLocalSSRC(unsigned int& ssrc);
+    int GetRemoteSSRC(unsigned int& ssrc);
+    int GetRemoteCSRCs(unsigned int arrCSRC[15]);
+    int SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID);
+    int GetRTPAudioLevelIndicationStatus(bool& enable, unsigned char& ID);
+    int SetRTCPStatus(bool enable);
+    int GetRTCPStatus(bool& enabled);
+    int SetRTCP_CNAME(const char cName[256]);
+    int GetRTCP_CNAME(char cName[256]);
+    int GetRemoteRTCP_CNAME(char cName[256]);
+    int GetRemoteRTCPData(unsigned int& NTPHigh, unsigned int& NTPLow,
+                          unsigned int& timestamp,
+                          unsigned int& playoutTimestamp, unsigned int* jitter,
+                          unsigned short* fractionLost);
+    int SendApplicationDefinedRTCPPacket(const unsigned char subType,
+                                         unsigned int name, const char* data,
+                                         unsigned short dataLengthInBytes);
+    int GetRTPStatistics(unsigned int& averageJitterMs,
+                         unsigned int& maxJitterMs,
+                         unsigned int& discardedPackets);
+    int GetRTPStatistics(CallStatistics& stats);
+    int SetFECStatus(bool enable, int redPayloadtype);
+    int GetFECStatus(bool& enabled, int& redPayloadtype);
+    int SetRTPKeepaliveStatus(bool enable, unsigned char unknownPayloadType,
+                              int deltaTransmitTimeSeconds);
+    int GetRTPKeepaliveStatus(bool& enabled, unsigned char& unknownPayloadType,
+                              int& deltaTransmitTimeSeconds);
+    int StartRTPDump(const char fileNameUTF8[1024], RTPDirections direction);
+    int StopRTPDump(RTPDirections direction);
+    bool RTPDumpIsActive(RTPDirections direction);
+    int InsertExtraRTPPacket(unsigned char payloadType, bool markerBit,
+                             const char* payloadData,
+                             unsigned short payloadSize);
+
+public:
+    // From AudioPacketizationCallback in the ACM
+    WebRtc_Word32 SendData(FrameType frameType,
+                           WebRtc_UWord8 payloadType,
+                           WebRtc_UWord32 timeStamp,
+                           const WebRtc_UWord8* payloadData,
+                           WebRtc_UWord16 payloadSize,
+                           const RTPFragmentationHeader* fragmentation);
+    // From ACMVADCallback in the ACM
+    WebRtc_Word32 InFrameType(WebRtc_Word16 frameType);
+
+#ifdef WEBRTC_DTMF_DETECTION
+public: // From AudioCodingFeedback in the ACM
+    int IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool end);
+#endif
+
+public:
+    WebRtc_Word32 OnRxVadDetected(const int vadDecision);
+
+public:
+    // From RtpData in the RTP/RTCP module
+    WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                        const WebRtc_UWord16 payloadSize,
+                                        const WebRtcRTPHeader* rtpHeader);
+
+public:
+    // From RtpFeedback in the RTP/RTCP module
+    WebRtc_Word32 OnInitializeDecoder(
+            const WebRtc_Word32 id,
+            const WebRtc_Word8 payloadType,
+            const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
+            const int frequency,
+            const WebRtc_UWord8 channels,
+            const WebRtc_UWord32 rate);
+
+    void OnPacketTimeout(const WebRtc_Word32 id);
+
+    void OnReceivedPacket(const WebRtc_Word32 id,
+                          const RtpRtcpPacketType packetType);
+
+    void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                               const RTPAliveType alive);
+
+    void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 SSRC);
+
+    void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 CSRC, const bool added);
+
+public:
+    // From RtcpFeedback in the RTP/RTCP module
+    void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                   const WebRtc_UWord8 subType,
+                                   const WebRtc_UWord32 name,
+                                   const WebRtc_UWord16 length,
+                                   const WebRtc_UWord8* data);
+
+public:
+    // From RtpAudioFeedback in the RTP/RTCP module
+    void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                  const WebRtc_UWord8 event,
+                                  const bool endOfEvent);
+
+    void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                              const WebRtc_UWord8 event,
+                              const WebRtc_UWord16 lengthMs,
+                              const WebRtc_UWord8 volume);
+
+public:
+    // From UdpTransportData in the Socket Transport module
+    void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                           const WebRtc_Word32 rtpPacketLength,
+                           const WebRtc_Word8* fromIP,
+                           const WebRtc_UWord16 fromPort);
+
+    void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                            const WebRtc_Word32 rtcpPacketLength,
+                            const WebRtc_Word8* fromIP,
+                            const WebRtc_UWord16 fromPort);
+
+public:
+    // From Transport (called by the RTP/RTCP module)
+    int SendPacket(int /*channel*/, const void *data, int len);
+    int SendRTCPPacket(int /*channel*/, const void *data, int len);
+
+public:
+    // From MixerParticipant
+    WebRtc_Word32 GetAudioFrame(const WebRtc_Word32 id,
+                                AudioFrame& audioFrame);
+    WebRtc_Word32 NeededFrequency(const WebRtc_Word32 id);
+
+public:
+    // From MonitorObserver
+    void OnPeriodicProcess();
+
+public:
+    // From FileCallback
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+    void PlayFileEnded(const WebRtc_Word32 id);
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+public:
+    WebRtc_UWord32 InstanceId() const
+    {
+        return _instanceId;
+    }
+    WebRtc_Word32 ChannelId() const
+    {
+        return _channelId;
+    }
+    bool Playing() const
+    {
+        return _playing;
+    }
+    bool Sending() const
+    {
+        // A lock is needed because |_sending| is accessed by both
+        // TransmitMixer::PrepareDemux() and StartSend()/StopSend(), which
+        // are called by different threads.
+        CriticalSectionScoped cs(_callbackCritSect);
+        return _sending;
+    }
+    bool Receiving() const
+    {
+        return _receiving;
+    }
+    bool ExternalTransport() const
+    {
+        return _externalTransport;
+    }
+    bool OutputIsOnHold() const
+    {
+        return _outputIsOnHold;
+    }
+    bool InputIsOnHold() const
+    {
+        return _inputIsOnHold;
+    }
+    RtpRtcp* RtpRtcpModulePtr() const
+    {
+        return &_rtpRtcpModule;
+    }
+    WebRtc_Word8 OutputEnergyLevel() const
+    {
+        return _outputAudioLevel.Level();
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    bool SendSocketsInitialized() const
+    {
+        return _socketTransportModule.SendSocketsInitialized();
+    }
+    bool ReceiveSocketsInitialized() const
+    {
+        return _socketTransportModule.ReceiveSocketsInitialized();
+    }
+#endif
+    WebRtc_UWord32 Demultiplex(const AudioFrame& audioFrame);
+    WebRtc_UWord32 PrepareEncodeAndSend(int mixingFrequency);
+    WebRtc_UWord32 EncodeAndSend();
+
+private:
+    int InsertInbandDtmfTone();
+    WebRtc_Word32
+            MixOrReplaceAudioWithFile(const int mixingFrequency);
+    WebRtc_Word32 MixAudioWithFile(AudioFrame& audioFrame,
+                                   const int mixingFrequency);
+    WebRtc_Word32 GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp);
+    void UpdateDeadOrAliveCounters(bool alive);
+    WebRtc_Word32 SendPacketRaw(const void *data, int len, bool RTCP);
+    WebRtc_Word32 UpdatePacketDelay(const WebRtc_UWord32 timestamp,
+                                    const WebRtc_UWord16 sequenceNumber);
+    void RegisterReceiveCodecsToRTPModule();
+    int ApmProcessRx(AudioFrame& audioFrame);
+
+private:
+    CriticalSectionWrapper& _fileCritSect;
+    CriticalSectionWrapper& _callbackCritSect;
+    CriticalSectionWrapper& _transmitCritSect;
+    WebRtc_UWord32 _instanceId;
+    WebRtc_Word32 _channelId;
+
+private:
+    RtpRtcp& _rtpRtcpModule;
+    AudioCodingModule& _audioCodingModule;
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_UWord8 _numSocketThreads;
+    UdpTransport& _socketTransportModule;
+#endif
+#ifdef WEBRTC_SRTP
+    SrtpModule& _srtpModule;
+#endif
+    RtpDump& _rtpDumpIn;
+    RtpDump& _rtpDumpOut;
+private:
+    AudioLevel _outputAudioLevel;
+    bool _externalTransport;
+    AudioFrame _audioFrame;
+    WebRtc_UWord8 _audioLevel_dBov;
+    FilePlayer* _inputFilePlayerPtr;
+    FilePlayer* _outputFilePlayerPtr;
+    FileRecorder* _outputFileRecorderPtr;
+    int _inputFilePlayerId;
+    int _outputFilePlayerId;
+    int _outputFileRecorderId;
+    bool _inputFilePlaying;
+    bool _outputFilePlaying;
+    bool _outputFileRecording;
+    DtmfInbandQueue _inbandDtmfQueue;
+    DtmfInband _inbandDtmfGenerator;
+    bool _inputExternalMedia;
+    bool _outputExternalMedia;
+    VoEMediaProcess* _inputExternalMediaCallbackPtr;
+    VoEMediaProcess* _outputExternalMediaCallbackPtr;
+    WebRtc_UWord8* _encryptionRTPBufferPtr;
+    WebRtc_UWord8* _decryptionRTPBufferPtr;
+    WebRtc_UWord8* _encryptionRTCPBufferPtr;
+    WebRtc_UWord8* _decryptionRTCPBufferPtr;
+    WebRtc_UWord32 _timeStamp;
+    WebRtc_UWord8 _sendTelephoneEventPayloadType;
+    WebRtc_UWord32 _playoutTimeStampRTP;
+    WebRtc_UWord32 _playoutTimeStampRTCP;
+    WebRtc_UWord32 _numberOfDiscardedPackets;
+private:
+    // uses
+    Statistics* _engineStatisticsPtr;
+    OutputMixer* _outputMixerPtr;
+    TransmitMixer* _transmitMixerPtr;
+    ProcessThread* _moduleProcessThreadPtr;
+    AudioDeviceModule* _audioDeviceModulePtr;
+    VoiceEngineObserver* _voiceEngineObserverPtr; // owned by base
+    CriticalSectionWrapper* _callbackCritSectPtr; // owned by base
+    Transport* _transportPtr; // WebRtc socket or external transport
+    Encryption* _encryptionPtr; // WebRtc SRTP or external encryption
+    scoped_ptr<AudioProcessing> _rtpAudioProc;
+    AudioProcessing* _rxAudioProcessingModulePtr; // far end AudioProcessing
+#ifdef WEBRTC_DTMF_DETECTION
+    VoETelephoneEventObserver* _telephoneEventDetectionPtr;
+#endif
+    VoERxVadCallback* _rxVadObserverPtr;
+    WebRtc_Word32 _oldVadDecision;
+    WebRtc_Word32 _sendFrameType; // Send data is voice, 1-voice, 0-otherwise
+    VoERTPObserver* _rtpObserverPtr;
+    VoERTCPObserver* _rtcpObserverPtr;
+private:
+    // VoEBase
+    bool _outputIsOnHold;
+    bool _externalPlayout;
+    bool _inputIsOnHold;
+    bool _playing;
+    bool _sending;
+    bool _receiving;
+    bool _mixFileWithMicrophone;
+    bool _rtpObserver;
+    bool _rtcpObserver;
+    // VoEVolumeControl
+    bool _mute;
+    float _panLeft;
+    float _panRight;
+    float _outputGain;
+    // VoEEncryption
+    bool _encrypting;
+    bool _decrypting;
+    // VoEDtmf
+    bool _playOutbandDtmfEvent;
+    bool _playInbandDtmfEvent;
+    bool _inbandTelephoneEventDetection;
+    bool _outOfBandTelephoneEventDetecion;
+    // VoeRTP_RTCP
+    WebRtc_UWord8 _extraPayloadType;
+    bool _insertExtraRTPPacket;
+    bool _extraMarkerBit;
+    WebRtc_UWord32 _lastLocalTimeStamp;
+    WebRtc_Word8 _lastPayloadType;
+    bool _includeAudioLevelIndication;
+    // VoENetwork
+    bool _rtpPacketTimedOut;
+    bool _rtpPacketTimeOutIsEnabled;
+    WebRtc_UWord32 _rtpTimeOutSeconds;
+    bool _connectionObserver;
+    VoEConnectionObserver* _connectionObserverPtr;
+    WebRtc_UWord32 _countAliveDetections;
+    WebRtc_UWord32 _countDeadDetections;
+    AudioFrame::SpeechType _outputSpeechType;
+    // VoEVideoSync
+    WebRtc_UWord32 _averageDelayMs;
+    WebRtc_UWord16 _previousSequenceNumber;
+    WebRtc_UWord32 _previousTimestamp;
+    WebRtc_UWord16 _recPacketDelayMs;
+    // VoEAudioProcessing
+    bool _RxVadDetection;
+    bool _rxApmIsEnabled;
+    bool _rxAgcIsEnabled;
+    bool _rxNsIsEnabled;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_H
diff --git a/trunk/src/voice_engine/main/source/channel_manager.cc b/trunk/src/voice_engine/main/source/channel_manager.cc
new file mode 100644
index 0000000..47cec4a
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel_manager.cc
@@ -0,0 +1,161 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+#include "channel_manager.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+ChannelManager::ChannelManager(const WebRtc_UWord32 instanceId) :
+    ChannelManagerBase(),
+    _instanceId(instanceId)
+{
+}
+
+ChannelManager::~ChannelManager()
+{
+    ChannelManagerBase::DestroyAllItems();
+}
+
+bool ChannelManager::CreateChannel(WebRtc_Word32& channelId)
+{
+    return ChannelManagerBase::CreateItem(channelId);
+}
+
+WebRtc_Word32 ChannelManager::DestroyChannel(const WebRtc_Word32 channelId)
+{
+    Channel* deleteChannel =
+        static_cast<Channel*> (ChannelManagerBase::RemoveItem(channelId));
+    if (!deleteChannel)
+    {
+        return -1;
+    }
+    delete deleteChannel;
+    return 0;
+}
+
+WebRtc_Word32 ChannelManager::NumOfChannels() const
+{
+    return ChannelManagerBase::NumOfItems();
+}
+
+WebRtc_Word32 ChannelManager::MaxNumOfChannels() const
+{
+    return ChannelManagerBase::MaxNumOfItems();
+}
+
+void* ChannelManager::NewItem(WebRtc_Word32 itemID)
+{
+    Channel* channel;
+    if (Channel::CreateChannel(channel, itemID, _instanceId) == -1)
+    {
+        return NULL;
+    }
+    return static_cast<void*> (channel);
+}
+
+void ChannelManager::DeleteItem(void* item)
+{
+    Channel* deleteItem = static_cast<Channel*> (item);
+    delete deleteItem;
+}
+
+Channel* ChannelManager::GetChannel(const WebRtc_Word32 channelId) const
+{
+    return static_cast<Channel*> (ChannelManagerBase::GetItem(channelId));
+}
+
+void ChannelManager::ReleaseChannel()
+{
+    ChannelManagerBase::ReleaseItem();
+}
+
+void ChannelManager::GetChannelIds(WebRtc_Word32* channelsArray,
+                                   WebRtc_Word32& numOfChannels) const
+{
+    ChannelManagerBase::GetItemIds(channelsArray, numOfChannels);
+}
+
+void ChannelManager::GetChannels(MapWrapper& channels) const
+{
+    ChannelManagerBase::GetChannels(channels);
+}
+
+ScopedChannel::ScopedChannel(ChannelManager& chManager) :
+    _chManager(chManager),
+    _channelPtr(NULL)
+{
+    // Copy all existing channels to the local map.
+    // It is not possible to utilize the ChannelPtr() API after
+    // this constructor. The intention is that this constructor
+    // is used in combination with the scoped iterator.
+    _chManager.GetChannels(_channels);
+}
+
+ScopedChannel::ScopedChannel(ChannelManager& chManager,
+                             WebRtc_Word32 channelId) :
+    _chManager(chManager),
+    _channelPtr(NULL)
+{
+    _channelPtr = _chManager.GetChannel(channelId);
+}
+
+ScopedChannel::~ScopedChannel()
+{
+    if (_channelPtr != NULL || _channels.Size() != 0)
+    {
+        _chManager.ReleaseChannel();
+    }
+
+    // Delete the map
+    while (_channels.Erase(_channels.First()) == 0)
+        ;
+}
+
+Channel* ScopedChannel::ChannelPtr()
+{
+    return _channelPtr;
+}
+
+Channel* ScopedChannel::GetFirstChannel(void*& iterator) const
+{
+    MapItem* it = _channels.First();
+    iterator = (void*) it;
+    if (!it)
+    {
+        return NULL;
+    }
+    return static_cast<Channel*> (it->GetItem());
+}
+
+Channel* ScopedChannel::GetNextChannel(void*& iterator) const
+{
+    MapItem* it = (MapItem*) iterator;
+    if (!it)
+    {
+        iterator = NULL;
+        return NULL;
+    }
+    it = _channels.Next(it);
+    iterator = (void*) it;
+    if (!it)
+    {
+        return NULL;
+    }
+    return static_cast<Channel*> (it->GetItem());
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/channel_manager.h b/trunk/src/voice_engine/main/source/channel_manager.h
new file mode 100644
index 0000000..6c40ef1
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel_manager.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
+
+#include "channel_manager_base.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+class ScopedChannel;
+class Channel;
+
+class ChannelManager: private ChannelManagerBase
+{
+    friend class ScopedChannel;
+
+public:
+    bool CreateChannel(WebRtc_Word32& channelId);
+
+    WebRtc_Word32 DestroyChannel(const WebRtc_Word32 channelId);
+
+    WebRtc_Word32 MaxNumOfChannels() const;
+
+    WebRtc_Word32 NumOfChannels() const;
+
+    void GetChannelIds(WebRtc_Word32* channelsArray,
+                       WebRtc_Word32& numOfChannels) const;
+
+    ChannelManager(const WebRtc_UWord32 instanceId);
+
+    ~ChannelManager();
+
+private:
+    ChannelManager(const ChannelManager&);
+
+    ChannelManager& operator=(const ChannelManager&);
+
+    Channel* GetChannel(const WebRtc_Word32 channelId) const;
+
+    void GetChannels(MapWrapper& channels) const;
+
+    void ReleaseChannel();
+
+    virtual void* NewItem(WebRtc_Word32 itemID);
+
+    virtual void DeleteItem(void* item);
+
+    WebRtc_UWord32 _instanceId;
+};
+
+class ScopedChannel
+{
+public:
+    // Can only be created by the channel manager
+    ScopedChannel(ChannelManager& chManager);
+
+    ScopedChannel(ChannelManager& chManager, WebRtc_Word32 channelId);
+
+    Channel* ChannelPtr();
+
+    Channel* GetFirstChannel(void*& iterator) const;
+
+    Channel* GetNextChannel(void*& iterator) const;
+
+    ~ScopedChannel();
+private:
+    ChannelManager& _chManager;
+    Channel* _channelPtr;
+    MapWrapper _channels;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
diff --git a/trunk/src/voice_engine/main/source/channel_manager_base.cc b/trunk/src/voice_engine/main/source/channel_manager_base.cc
new file mode 100644
index 0000000..ea9938f
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel_manager_base.cc
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel_manager_base.h"
+
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include <cassert>
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+ChannelManagerBase::ChannelManagerBase() :
+    _itemsCritSectPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _itemsRWLockPtr(RWLockWrapper::CreateRWLock())
+{
+    for (int i = 0; i < KMaxNumberOfItems; i++)
+    {
+        _freeItemIds[i] = true;
+    }
+}
+
+ChannelManagerBase::~ChannelManagerBase()
+{
+    if (_itemsRWLockPtr)
+    {
+        delete _itemsRWLockPtr;
+        _itemsRWLockPtr = NULL;
+    }
+    if (_itemsCritSectPtr)
+    {
+        delete _itemsCritSectPtr;
+        _itemsCritSectPtr = NULL;
+    }
+}
+
+bool ChannelManagerBase::GetFreeItemId(WebRtc_Word32& itemId)
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    WebRtc_Word32 i(0);
+    while (i < KMaxNumberOfItems)
+    {
+        if (_freeItemIds[i])
+        {
+            itemId = i;
+            _freeItemIds[i] = false;
+            return true;
+        }
+        i++;
+    }
+    return false;
+}
+
+void ChannelManagerBase::AddFreeItemId(WebRtc_Word32 itemId)
+{
+    assert(itemId < KMaxNumberOfItems);
+    _freeItemIds[itemId] = true;
+}
+
+void ChannelManagerBase::RemoveFreeItemIds()
+{
+    for (int i = 0; i < KMaxNumberOfItems; i++)
+    {
+        _freeItemIds[i] = false;
+    }
+}
+
+bool ChannelManagerBase::CreateItem(WebRtc_Word32& itemId)
+{
+    _itemsCritSectPtr->Enter();
+    void* itemPtr;
+    itemId = -1;
+    const bool success = GetFreeItemId(itemId);
+    if (!success)
+    {
+        _itemsCritSectPtr->Leave();
+        return false;
+    }
+    itemPtr = NewItem(itemId);
+    if (!itemPtr)
+    {
+        _itemsCritSectPtr->Leave();
+        return false;
+    }
+    _itemsCritSectPtr->Leave();
+    InsertItem(itemId, itemPtr);
+
+    return true;
+}
+
+void ChannelManagerBase::InsertItem(WebRtc_Word32 itemId, void* item)
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    assert(!_items.Find(itemId));
+    _items.Insert(itemId, item);
+}
+
+void*
+ChannelManagerBase::RemoveItem(WebRtc_Word32 itemId)
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    WriteLockScoped wlock(*_itemsRWLockPtr);
+    MapItem* it = _items.Find(itemId);
+    if (!it)
+    {
+        return 0;
+    }
+    void* returnItem = it->GetItem();
+    _items.Erase(it);
+    AddFreeItemId(itemId);
+
+    return returnItem;
+}
+
+void ChannelManagerBase::DestroyAllItems()
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    MapItem* it = _items.First();
+    while (it)
+    {
+        DeleteItem(it->GetItem());
+        _items.Erase(it);
+        it = _items.First();
+    }
+    RemoveFreeItemIds();
+}
+
+WebRtc_Word32 ChannelManagerBase::NumOfItems() const
+{
+    return _items.Size();
+}
+
+WebRtc_Word32 ChannelManagerBase::MaxNumOfItems() const
+{
+    return static_cast<WebRtc_Word32> (KMaxNumberOfItems);
+}
+
+void*
+ChannelManagerBase::GetItem(WebRtc_Word32 itemId) const
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    MapItem* it = _items.Find(itemId);
+    if (!it)
+    {
+        return 0;
+    }
+    _itemsRWLockPtr->AcquireLockShared();
+    return it->GetItem();
+}
+
+void*
+ChannelManagerBase::GetFirstItem(void*& iterator) const
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    MapItem* it = _items.First();
+    iterator = (void*) it;
+    if (!it)
+    {
+        return 0;
+    }
+    return it->GetItem();
+}
+
+void*
+ChannelManagerBase::GetNextItem(void*& iterator) const
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    MapItem* it = (MapItem*) iterator;
+    if (!it)
+    {
+        iterator = 0;
+        return 0;
+    }
+    it = _items.Next(it);
+    iterator = (void*) it;
+    if (!it)
+    {
+        return 0;
+    }
+    return it->GetItem();
+}
+
+void ChannelManagerBase::ReleaseItem()
+{
+    _itemsRWLockPtr->ReleaseLockShared();
+}
+
+void ChannelManagerBase::GetItemIds(WebRtc_Word32* channelsArray,
+                                    WebRtc_Word32& numOfChannels) const
+{
+    MapItem* it = _items.First();
+    numOfChannels = (numOfChannels <= _items.Size()) ?
+        numOfChannels : _items.Size();
+    for (int i = 0; i < numOfChannels && it != NULL; i++)
+    {
+        channelsArray[i] = it->GetId();
+        it = _items.Next(it);
+    }
+}
+
+void ChannelManagerBase::GetChannels(MapWrapper& channels) const
+{
+    CriticalSectionScoped cs(*_itemsCritSectPtr);
+    if (_items.Size() == 0)
+    {
+        return;
+    }
+    _itemsRWLockPtr->AcquireLockShared();
+    for (MapItem* it = _items.First(); it != NULL; it = _items.Next(it))
+    {
+        channels.Insert(it->GetId(), it->GetItem());
+    }
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/channel_manager_base.h b/trunk/src/voice_engine/main/source/channel_manager_base.h
new file mode 100644
index 0000000..0831e43
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel_manager_base.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
+
+#include "typedefs.h"
+#include "map_wrapper.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class RWLockWrapper;
+
+namespace voe
+{
+
+class ScopedChannel;
+class Channel;
+
+class ChannelManagerBase
+{
+public:
+    enum {KMaxNumberOfItems = kVoiceEngineMaxNumOfChannels};
+
+protected:
+    bool CreateItem(WebRtc_Word32& itemId);
+
+    void InsertItem(WebRtc_Word32 itemId, void* item);
+
+    void* RemoveItem(WebRtc_Word32 itemId);
+
+    void* GetItem(WebRtc_Word32 itemId) const;
+
+    void* GetFirstItem(void*& iterator) const ;
+
+    void* GetNextItem(void*& iterator) const;
+
+    void ReleaseItem();
+
+    void AddFreeItemId(WebRtc_Word32 itemId);
+
+    bool GetFreeItemId(WebRtc_Word32& itemId);
+
+    void RemoveFreeItemIds();
+
+    void DestroyAllItems();
+
+    WebRtc_Word32 NumOfItems() const;
+
+    WebRtc_Word32 MaxNumOfItems() const;
+
+    void GetItemIds(WebRtc_Word32* channelsArray,
+                    WebRtc_Word32& numOfChannels) const;
+
+    void GetChannels(MapWrapper& channels) const;
+
+    virtual void* NewItem(WebRtc_Word32 itemId) = 0;
+
+    virtual void DeleteItem(void* item) = 0;
+
+    ChannelManagerBase();
+
+    virtual ~ChannelManagerBase();
+
+private:
+    // Protects _items and _freeItemIds
+    CriticalSectionWrapper* _itemsCritSectPtr;
+
+    MapWrapper _items;
+
+    bool _freeItemIds[KMaxNumberOfItems];
+
+    // Protects channels from being destroyed while being used
+    RWLockWrapper* _itemsRWLockPtr;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
diff --git a/trunk/src/voice_engine/main/source/channel_unittest.cc b/trunk/src/voice_engine/main/source/channel_unittest.cc
new file mode 100644
index 0000000..fc78679
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/channel_unittest.cc
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+#include "gtest/gtest.h"
+
+// Empty test just to get coverage metrics.
+TEST(ChannelTest, EmptyTestToGetCodeCoverage) {}
diff --git a/trunk/src/voice_engine/main/source/dtmf_inband.cc b/trunk/src/voice_engine/main/source/dtmf_inband.cc
new file mode 100644
index 0000000..473af10
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/dtmf_inband.cc
@@ -0,0 +1,389 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_inband.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include <cassert>
+
+namespace webrtc {
+
+const WebRtc_Word16 Dtmf_a_times2Tab8Khz[8]=
+{
+	27978, 26956, 25701, 24219,
+	19073, 16325, 13085, 9314
+};
+
+const WebRtc_Word16 Dtmf_a_times2Tab16Khz[8]=
+{
+	31548, 31281, 30951, 30556,
+	29144, 28361, 27409, 26258
+};
+
+const WebRtc_Word16 Dtmf_a_times2Tab32Khz[8]=
+{
+	32462,32394, 32311, 32210, 31849, 31647, 31400, 31098
+};
+
+// Second table is sin(2*pi*f/fs) in Q14
+
+const WebRtc_Word16 Dtmf_ym2Tab8Khz[8]=
+{
+	8527, 9315, 10163, 11036,
+	13322, 14206, 15021, 15708
+};
+
+const WebRtc_Word16 Dtmf_ym2Tab16Khz[8]=
+{
+	4429, 4879, 5380, 5918,
+	7490, 8207, 8979, 9801
+};
+
+const WebRtc_Word16 Dtmf_ym2Tab32Khz[8]=
+{
+	2235, 2468, 2728, 3010, 3853, 4249, 4685, 5164
+};
+
+const WebRtc_Word16 Dtmf_dBm0kHz[37]=
+{
+       16141,      14386,      12821,      11427,      10184,       9077,
+        8090,       7210,       6426,       5727,       5104,       4549,
+        4054,       3614,       3221,       2870,       2558,       2280,
+        2032,       1811,       1614,       1439,       1282,       1143,
+        1018,        908,        809,        721,        643,        573,
+         510,        455,        405,        361,        322,        287,
+		 256
+};
+
+
+DtmfInband::DtmfInband(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _outputFrequencyHz(8000),
+    _frameLengthSamples(0),
+    _remainingSamples(0),
+    _eventCode(0),
+    _attenuationDb(0),
+    _lengthMs(0),
+    _reinit(true),
+    _playing(false),
+    _delaySinceLastToneMS(1000)
+{
+    memset(_oldOutputLow, 0, sizeof(_oldOutputLow));
+    memset(_oldOutputHigh, 0, sizeof(_oldOutputHigh));
+}
+
+DtmfInband::~DtmfInband()
+{
+	delete &_critSect;
+}
+
+int
+DtmfInband::SetSampleRate(const WebRtc_UWord16 frequency)
+{
+    if (frequency != 8000 &&
+            frequency != 16000 &&
+            frequency != 32000)
+    {
+        // invalid sample rate
+        assert(false);
+        return -1;
+    }
+    _outputFrequencyHz = frequency;
+    return 0;
+}
+
+int
+DtmfInband::GetSampleRate(WebRtc_UWord16& frequency)
+{
+    frequency = _outputFrequencyHz;
+    return 0;
+}
+
+void 
+DtmfInband::Init()
+{
+    _remainingSamples = 0;
+    _frameLengthSamples = 0;
+    _eventCode = 0;
+    _attenuationDb = 0;
+    _lengthMs = 0;
+    _reinit = true;
+    _oldOutputLow[0] = 0;
+    _oldOutputLow[1] = 0;
+    _oldOutputHigh[0] = 0;
+    _oldOutputHigh[1] = 0;
+    _delaySinceLastToneMS = 1000;
+}
+
+int
+DtmfInband::AddTone(const WebRtc_UWord8 eventCode,
+                    WebRtc_Word32 lengthMs,
+                    WebRtc_Word32 attenuationDb)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (attenuationDb > 36 || eventCode > 15)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (IsAddingTone())
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_id,-1),
+                   "DtmfInband::AddTone() new tone interrupts ongoing tone");
+    }
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _eventCode = static_cast<WebRtc_Word16> (eventCode);
+    _attenuationDb = static_cast<WebRtc_Word16> (attenuationDb);
+    _remainingSamples = static_cast<WebRtc_Word32>
+        (lengthMs * (_outputFrequencyHz / 1000));
+    _lengthMs = lengthMs;
+
+    return 0;
+}
+
+int
+DtmfInband::ResetTone()
+{
+    CriticalSectionScoped lock(_critSect);
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _remainingSamples = static_cast<WebRtc_Word32>
+        (_lengthMs * (_outputFrequencyHz / 1000));
+
+    return 0;
+}
+
+int
+DtmfInband::StartTone(const WebRtc_UWord8 eventCode,
+                      WebRtc_Word32 attenuationDb)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (attenuationDb > 36 || eventCode > 15)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (IsAddingTone())
+    {
+            return -1;
+    }
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _eventCode = static_cast<WebRtc_Word16> (eventCode);
+    _attenuationDb = static_cast<WebRtc_Word16> (attenuationDb);
+    _playing = true;
+
+    return 0;
+}
+
+int
+DtmfInband::StopTone()
+{
+    CriticalSectionScoped lock(_critSect);
+
+    if (!_playing)
+    {
+        return 0;
+    }
+
+    _playing = false;
+
+    return 0;
+}
+
+// Shall be called between tones
+void 
+DtmfInband::ReInit()
+{
+    _reinit = true;
+}
+
+bool 
+DtmfInband::IsAddingTone()
+{
+    CriticalSectionScoped lock(_critSect);
+    return (_remainingSamples > 0 || _playing);
+}
+
+int
+DtmfInband::Get10msTone(WebRtc_Word16 output[320],
+                        WebRtc_UWord16& outputSizeInSamples)
+{
+    CriticalSectionScoped lock(_critSect);
+    if (DtmfFix_generate(output,
+                         _eventCode,
+                         _attenuationDb,
+                         _frameLengthSamples,
+                         _outputFrequencyHz) == -1)
+    {
+        return -1;
+    }
+    _remainingSamples -= _frameLengthSamples;
+    outputSizeInSamples = _frameLengthSamples;
+    _delaySinceLastToneMS = 0;
+    return 0;
+}
+
+void
+DtmfInband::UpdateDelaySinceLastTone()
+{
+    _delaySinceLastToneMS += kDtmfFrameSizeMs;
+    // avoid wraparound
+    if (_delaySinceLastToneMS > (1<<30))
+    {
+        _delaySinceLastToneMS = 1000;
+    }
+}
+
+WebRtc_UWord32
+DtmfInband::DelaySinceLastTone() const
+{
+    return _delaySinceLastToneMS;
+}
+
+WebRtc_Word16
+DtmfInband::DtmfFix_generate(WebRtc_Word16 *decoded,
+                             const WebRtc_Word16 value,
+                             const WebRtc_Word16 volume,
+                             const WebRtc_Word16 frameLen,
+                             const WebRtc_Word16 fs)
+{
+    const WebRtc_Word16 *a_times2Tbl;
+    const WebRtc_Word16 *y2_Table;
+    WebRtc_Word16 a1_times2 = 0, a2_times2 = 0;
+
+    if (fs==8000) {
+        a_times2Tbl=Dtmf_a_times2Tab8Khz;
+        y2_Table=Dtmf_ym2Tab8Khz;
+    } else if (fs==16000) {
+        a_times2Tbl=Dtmf_a_times2Tab16Khz;
+        y2_Table=Dtmf_ym2Tab16Khz;
+    } else if (fs==32000) {
+        a_times2Tbl=Dtmf_a_times2Tab32Khz;
+        y2_Table=Dtmf_ym2Tab32Khz;
+    } else {
+        return(-1);
+    }
+
+    if ((value==1)||(value==2)||(value==3)||(value==12)) {
+        a1_times2=a_times2Tbl[0];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[0];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==4)||(value==5)||(value==6)||(value==13)) {
+        a1_times2=a_times2Tbl[1];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[1];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==7)||(value==8)||(value==9)||(value==14)) {
+        a1_times2=a_times2Tbl[2];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[2];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==10)||(value==0)||(value==11)||(value==15)) {
+        a1_times2=a_times2Tbl[3];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[3];
+            _oldOutputLow[1]=0;
+        }
+    }
+    if ((value==1)||(value==4)||(value==7)||(value==10)) {
+        a2_times2=a_times2Tbl[4];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[4];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==2)||(value==5)||(value==8)||(value==0)) {
+        a2_times2=a_times2Tbl[5];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[5];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==3)||(value==6)||(value==9)||(value==11)) {
+        a2_times2=a_times2Tbl[6];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[6];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==12)||(value==13)||(value==14)||(value==15)) {
+        a2_times2=a_times2Tbl[7];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[7];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    }
+
+    return (DtmfFix_generateSignal(a1_times2,
+                                   a2_times2,
+                                   volume,
+                                   decoded,
+                                   frameLen));
+}
+
+WebRtc_Word16
+DtmfInband::DtmfFix_generateSignal(const WebRtc_Word16 a1_times2,
+                                   const WebRtc_Word16 a2_times2,
+                                   const WebRtc_Word16 volume,
+                                   WebRtc_Word16 *signal,
+                                   const WebRtc_Word16 length)
+{
+    int i;
+
+    /* Generate Signal */
+    for (i=0;i<length;i++) {
+        WebRtc_Word32 tempVal;
+        WebRtc_Word16 tempValLow, tempValHigh;
+
+        /* Use recursion formula y[n] = a*2*y[n-1] - y[n-2] */
+        tempValLow  = (WebRtc_Word16)(((( (WebRtc_Word32)(a1_times2 *
+            _oldOutputLow[1])) + 8192) >> 14) - _oldOutputLow[0]);
+        tempValHigh = (WebRtc_Word16)(((( (WebRtc_Word32)(a2_times2 *
+            _oldOutputHigh[1])) + 8192) >> 14) - _oldOutputHigh[0]);
+
+        /* Update memory */
+        _oldOutputLow[0]=_oldOutputLow[1];
+        _oldOutputLow[1]=tempValLow;
+        _oldOutputHigh[0]=_oldOutputHigh[1];
+        _oldOutputHigh[1]=tempValHigh;
+
+        tempVal = (WebRtc_Word32)(kDtmfAmpLow * tempValLow) +
+            (WebRtc_Word32)(kDtmfAmpHigh * tempValHigh);
+
+        /* Norm the signal to Q14 */
+        tempVal=(tempVal+16384)>>15;
+
+        /* Scale the signal to correct dbM0 value */
+        signal[i]=(WebRtc_Word16)((tempVal*Dtmf_dBm0kHz[volume]+8192)>>14);
+    }
+
+    return(0);
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/dtmf_inband.h b/trunk/src/voice_engine/main/source/dtmf_inband.h
new file mode 100644
index 0000000..806fff0
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/dtmf_inband.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
+#define WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class DtmfInband
+{
+public:
+    DtmfInband(const WebRtc_Word32 id);
+
+    virtual ~DtmfInband();
+
+    void Init();
+
+    int SetSampleRate(const WebRtc_UWord16 frequency);
+
+    int GetSampleRate(WebRtc_UWord16& frequency);
+
+    int AddTone(const WebRtc_UWord8 eventCode,
+                WebRtc_Word32 lengthMs,
+                WebRtc_Word32 attenuationDb);
+
+    int ResetTone();
+    int StartTone(const WebRtc_UWord8 eventCode,
+                  WebRtc_Word32 attenuationDb);
+
+    int StopTone();
+
+    bool IsAddingTone();
+
+    int Get10msTone(WebRtc_Word16 output[320],
+                    WebRtc_UWord16& outputSizeInSamples);
+
+    WebRtc_UWord32 DelaySinceLastTone() const;
+
+    void UpdateDelaySinceLastTone();
+
+private:
+    void ReInit();
+    WebRtc_Word16 DtmfFix_generate(WebRtc_Word16* decoded,
+                                   const WebRtc_Word16 value,
+                                   const WebRtc_Word16 volume,
+                                   const WebRtc_Word16 frameLen,
+                                   const WebRtc_Word16 fs);
+
+private:
+    enum {kDtmfFrameSizeMs = 10};
+    enum {kDtmfAmpHigh = 32768};
+    enum {kDtmfAmpLow  = 23171};	// 3 dB lower than the high frequency
+
+    WebRtc_Word16 DtmfFix_generateSignal(const WebRtc_Word16 a1_times2,
+                                         const WebRtc_Word16 a2_times2,
+                                         const WebRtc_Word16 volume,
+                                         WebRtc_Word16* signal,
+                                         const WebRtc_Word16 length);
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    WebRtc_UWord16 _outputFrequencyHz;  // {8000, 16000, 32000}
+    WebRtc_Word16 _oldOutputLow[2];     // Data needed for oscillator model
+    WebRtc_Word16 _oldOutputHigh[2];    // Data needed for oscillator model
+    WebRtc_Word16 _frameLengthSamples;  // {80, 160, 320}
+    WebRtc_Word32 _remainingSamples;
+    WebRtc_Word16 _eventCode;           // [0, 15]
+    WebRtc_Word16 _attenuationDb;       // [0, 36]
+    WebRtc_Word32 _lengthMs;
+    bool _reinit;  // 'true' if the oscillator should be reinit for next event
+    bool _playing;
+    WebRtc_UWord32 _delaySinceLastToneMS; // time since last generated tone [ms]
+};
+
+}   // namespace webrtc
+
+#endif // #ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
diff --git a/trunk/src/voice_engine/main/source/dtmf_inband_queue.cc b/trunk/src/voice_engine/main/source/dtmf_inband_queue.cc
new file mode 100644
index 0000000..080ef3e
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/dtmf_inband_queue.cc
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_inband_queue.h"
+#include "trace.h"
+
+namespace webrtc {
+
+DtmfInbandQueue::DtmfInbandQueue(const WebRtc_Word32 id):
+    _id(id),
+    _DtmfCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _nextEmptyIndex(0)
+{
+    memset(_DtmfKey,0, sizeof(_DtmfKey));
+    memset(_DtmfLen,0, sizeof(_DtmfLen));
+    memset(_DtmfLevel,0, sizeof(_DtmfLevel));
+}
+
+DtmfInbandQueue::~DtmfInbandQueue()
+{
+    delete &_DtmfCritsect;
+}
+
+int
+DtmfInbandQueue::AddDtmf(WebRtc_UWord8 key,
+                         WebRtc_UWord16 len,
+                         WebRtc_UWord8 level)
+{
+    CriticalSectionScoped lock(_DtmfCritsect);
+
+    if (_nextEmptyIndex >= kDtmfInbandMax)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_id,-1),
+                   "DtmfInbandQueue::AddDtmf() unable to add Dtmf tone");
+        return -1;
+    }
+    WebRtc_Word32 index = _nextEmptyIndex;
+    _DtmfKey[index] = key;
+    _DtmfLen[index] = len;
+    _DtmfLevel[index] = level;
+    _nextEmptyIndex++;
+    return 0;
+}
+
+WebRtc_Word8
+DtmfInbandQueue::NextDtmf(WebRtc_UWord16* len, WebRtc_UWord8* level)
+{
+    CriticalSectionScoped lock(_DtmfCritsect);
+
+    if(!PendingDtmf())
+    {
+        return -1;
+    }
+    WebRtc_Word8 nextDtmf = _DtmfKey[0];
+    *len=_DtmfLen[0];
+    *level=_DtmfLevel[0];
+
+    memmove(&(_DtmfKey[0]), &(_DtmfKey[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord8));
+    memmove(&(_DtmfLen[0]), &(_DtmfLen[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord16));
+    memmove(&(_DtmfLevel[0]), &(_DtmfLevel[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord8));
+
+    _nextEmptyIndex--;
+    return nextDtmf;
+}
+
+bool 
+DtmfInbandQueue::PendingDtmf()
+{
+    return(_nextEmptyIndex>0);        
+}
+
+void 
+DtmfInbandQueue::ResetDtmf()
+{
+    _nextEmptyIndex = 0;
+}
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/dtmf_inband_queue.h b/trunk/src/voice_engine/main/source/dtmf_inband_queue.h
new file mode 100644
index 0000000..b3bd39e
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/dtmf_inband_queue.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
+#define WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
+
+#include "critical_section_wrapper.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+
+namespace webrtc {
+
+class DtmfInbandQueue
+{
+public:
+
+    DtmfInbandQueue(const WebRtc_Word32 id);
+
+    virtual ~DtmfInbandQueue();
+
+    int AddDtmf(WebRtc_UWord8 DtmfKey,
+                WebRtc_UWord16 len,
+                WebRtc_UWord8 level);
+
+    WebRtc_Word8 NextDtmf(WebRtc_UWord16* len, WebRtc_UWord8* level);
+
+    bool PendingDtmf();
+
+    void ResetDtmf();
+
+private:
+    enum {kDtmfInbandMax = 20};
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _DtmfCritsect;
+    WebRtc_UWord8 _nextEmptyIndex;
+    WebRtc_UWord8 _DtmfKey[kDtmfInbandMax];
+    WebRtc_UWord16 _DtmfLen[kDtmfInbandMax];
+    WebRtc_UWord8 _DtmfLevel[kDtmfInbandMax];
+};
+
+}   // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
diff --git a/trunk/src/voice_engine/main/source/level_indicator.cc b/trunk/src/voice_engine/main/source/level_indicator.cc
new file mode 100644
index 0000000..89004a5
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/level_indicator.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "level_indicator.h"
+#include "module_common_types.h"
+#include "signal_processing_library.h"
+
+namespace webrtc {
+
+namespace voe {
+
+
+// Number of bars on the indicator.
+// Note that the number of elements is specified because we are indexing it
+// in the range of 0-32
+const WebRtc_Word8 permutation[33] =
+    {0,1,2,3,4,4,5,5,5,5,6,6,6,6,6,7,7,7,7,8,8,8,9,9,9,9,9,9,9,9,9,9,9};
+
+
+AudioLevel::AudioLevel() :
+    _absMax(0),
+    _count(0),
+    _currentLevel(0),
+    _currentLevelFullRange(0)
+{
+}
+
+AudioLevel::~AudioLevel()
+{
+}
+
+void
+AudioLevel::Clear()
+{
+    _absMax = 0;
+    _count = 0;
+    _currentLevel = 0;
+    _currentLevelFullRange = 0;
+}
+
+void
+AudioLevel::ComputeLevel(const AudioFrame& audioFrame)
+{
+    WebRtc_Word16 absValue(0);
+
+    // Check speech level (works for 2 channels as well)
+    absValue = WebRtcSpl_MaxAbsValueW16(
+        audioFrame._payloadData,
+        audioFrame._payloadDataLengthInSamples*audioFrame._audioChannel);
+    if (absValue > _absMax)
+    _absMax = absValue;
+
+    // Update level approximately 10 times per second
+    if (_count++ == kUpdateFrequency)
+    {
+        _currentLevelFullRange = _absMax;
+
+        _count = 0;
+
+        // Highest value for a WebRtc_Word16 is 0x7fff = 32767
+        // Divide with 1000 to get in the range of 0-32 which is the range of
+        // the permutation vector
+        WebRtc_Word32 position = _absMax/1000;
+
+        // Make it less likely that the bar stays at position 0. I.e. only if
+        // its in the range 0-250 (instead of 0-1000)
+        if ((position == 0) && (_absMax > 250))
+        {
+            position = 1;
+        }
+        _currentLevel = permutation[position];
+
+        // Decay the absolute maximum (divide by 4)
+        _absMax >>= 2;
+    }
+}
+
+WebRtc_Word8
+AudioLevel::Level() const
+{
+    return _currentLevel;
+}
+
+WebRtc_Word16
+AudioLevel::LevelFullRange() const
+{
+    return _currentLevelFullRange;
+}
+
+}  // namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/level_indicator.h b/trunk/src/voice_engine/main/source/level_indicator.h
new file mode 100644
index 0000000..564b068
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/level_indicator.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
+#define WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+
+class AudioFrame;
+namespace voe {
+
+class AudioLevel
+{
+public:
+    AudioLevel();
+    virtual ~AudioLevel();
+
+    void ComputeLevel(const AudioFrame& audioFrame);
+
+    WebRtc_Word8 Level() const;
+
+    WebRtc_Word16 LevelFullRange() const;
+
+    void Clear();
+
+private:
+    enum { kUpdateFrequency = 10};
+
+    WebRtc_Word16 _absMax;
+    WebRtc_Word16 _count;
+    WebRtc_Word8 _currentLevel;
+    WebRtc_Word16 _currentLevelFullRange;
+};
+
+}  // namespace voe
+
+}  // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
diff --git a/trunk/src/voice_engine/main/source/monitor_module.cc b/trunk/src/voice_engine/main/source/monitor_module.cc
new file mode 100644
index 0000000..6314f38
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/monitor_module.cc
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_wrapper.h"
+#include "monitor_module.h"
+
+namespace webrtc  {
+
+namespace voe  {
+
+MonitorModule::MonitorModule() :
+    _observerPtr(NULL),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _lastProcessTime(GET_TIME_IN_MS())
+{
+}
+
+MonitorModule::~MonitorModule()
+{
+    delete &_callbackCritSect;
+}
+
+WebRtc_Word32 
+MonitorModule::RegisterObserver(MonitorObserver& observer)
+{
+    CriticalSectionScoped lock(_callbackCritSect);
+    if (_observerPtr)
+    {
+        return -1;
+    }
+    _observerPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::DeRegisterObserver()
+{
+    CriticalSectionScoped lock(_callbackCritSect);
+    if (!_observerPtr)
+    {
+        return 0;
+    }
+    _observerPtr = NULL;
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::Version(WebRtc_Word8* version,
+                       WebRtc_UWord32& remainingBufferInBytes,
+                       WebRtc_UWord32& position) const
+{
+    return 0;
+}
+   
+WebRtc_Word32 
+MonitorModule::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 now = GET_TIME_IN_MS();
+    WebRtc_Word32 timeToNext =
+        kAverageProcessUpdateTimeMs - (now - _lastProcessTime);
+    return (timeToNext); 
+}
+
+WebRtc_Word32 
+MonitorModule::Process()
+{
+    _lastProcessTime = GET_TIME_IN_MS();
+    if (_observerPtr)
+    {
+        CriticalSectionScoped lock(_callbackCritSect);
+        _observerPtr->OnPeriodicProcess();
+    }
+    return 0;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/monitor_module.h b/trunk/src/voice_engine/main/source/monitor_module.h
new file mode 100644
index 0000000..45cf228
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/monitor_module.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
+#define WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
+
+#include "module.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+class MonitorObserver
+{
+public:
+    virtual void OnPeriodicProcess() = 0;
+protected:
+    virtual ~MonitorObserver() {}
+};
+
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class MonitorModule : public Module
+{
+public:
+    WebRtc_Word32 RegisterObserver(MonitorObserver& observer);
+
+    WebRtc_Word32 DeRegisterObserver();
+
+    MonitorModule();
+
+    virtual ~MonitorModule();
+public:	// module
+    WebRtc_Word32 Version(WebRtc_Word8* version,
+                          WebRtc_UWord32& remainingBufferInBytes,
+                          WebRtc_UWord32& position) const;
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 TimeUntilNextProcess();
+
+    WebRtc_Word32 Process();
+private:
+    enum { kAverageProcessUpdateTimeMs = 1000 };
+    MonitorObserver* _observerPtr;
+    CriticalSectionWrapper&	_callbackCritSect;
+    WebRtc_Word32 _lastProcessTime;
+};
+
+}  //  namespace voe
+
+}  //  namespace webrtc
+
+#endif // VOICE_ENGINE_MONITOR_MODULE
diff --git a/trunk/src/voice_engine/main/source/output_mixer.cc b/trunk/src/voice_engine/main/source/output_mixer.cc
new file mode 100644
index 0000000..9dfe0ad
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/output_mixer.cc
@@ -0,0 +1,754 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "output_mixer.h"
+
+#include "audio_processing.h"
+#include "audio_frame_operations.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "trace.h"
+#include "statistics.h"
+#include "voe_external_media.h"
+
+namespace webrtc {
+
+namespace voe {
+
+void
+OutputMixer::NewMixedAudio(const WebRtc_Word32 id,
+                           const AudioFrame& generalAudioFrame,
+                           const AudioFrame** uniqueAudioFrames,
+                           const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::NewMixedAudio(id=%d, size=%u)", id, size);
+
+    _audioFrame = generalAudioFrame;
+    _audioFrame._id = id;
+}
+
+void OutputMixer::MixedParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::MixedParticipants(id=%d, size=%u)", id, size);
+}
+
+void OutputMixer::VADPositiveParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::VADPositiveParticipants(id=%d, size=%u)",
+                 id, size);
+}
+
+void OutputMixer::MixedAudioLevel(const WebRtc_Word32  id,
+                                  const WebRtc_UWord32 level)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::MixedAudioLevel(id=%d, level=%u)", id, level);
+}
+
+void OutputMixer::PlayNotification(const WebRtc_Word32 id,
+                                   const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+    // Not implement yet
+}
+
+void OutputMixer::RecordNotification(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void OutputMixer::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::PlayFileEnded(id=%d)", id);
+
+    // not needed
+}
+
+void OutputMixer::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordFileEnded(id=%d)", id);
+    assert(id == _instanceId);
+
+    CriticalSectionScoped cs(_fileCritSect);
+    _outputFileRecording = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordFileEnded() =>"
+                 "output file recorder module is shutdown");
+}
+
+WebRtc_Word32
+OutputMixer::Create(OutputMixer*& mixer, const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId,
+                 "OutputMixer::Create(instanceId=%d)", instanceId);
+    mixer = new OutputMixer(instanceId);
+    if (mixer == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId,
+                     "OutputMixer::Create() unable to allocate memory for"
+                     "mixer");
+        return -1;
+    }
+    return 0;
+}
+
+OutputMixer::OutputMixer(const WebRtc_UWord32 instanceId) :
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _mixerModule(*AudioConferenceMixer::Create(instanceId)),
+    _audioLevel(),
+    _dtmfGenerator(instanceId),
+    _instanceId(instanceId),
+    _externalMediaCallbackPtr(NULL),
+    _externalMedia(false),
+    _panLeft(1.0f),
+    _panRight(1.0f),
+    _mixingFrequencyHz(8000),
+    _outputFileRecorderPtr(NULL),
+    _outputFileRecording(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::OutputMixer() - ctor");
+	
+    if ((_mixerModule.RegisterMixedStreamCallback(*this) == -1) ||
+        (_mixerModule.RegisterMixerStatusCallback(*this, 100) == -1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "OutputMixer::OutputMixer() failed to register mixer"
+                     "callbacks");
+    }
+	
+    _dtmfGenerator.Init();
+}
+
+void
+OutputMixer::Destroy(OutputMixer*& mixer)
+{
+    if (mixer)
+    {
+        delete mixer;
+        mixer = NULL;
+    }
+}
+	
+OutputMixer::~OutputMixer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::~OutputMixer() - dtor");
+    if (_externalMedia)
+    {
+        DeRegisterExternalMediaProcessing();
+    }
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        if (_outputFileRecorderPtr)
+        {
+            _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _outputFileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+            _outputFileRecorderPtr = NULL;
+        }
+    }
+    _mixerModule.UnRegisterMixerStatusCallback();
+    _mixerModule.UnRegisterMixedStreamCallback();
+    delete &_mixerModule;
+    delete &_callbackCritSect;
+    delete &_fileCritSect;
+}
+
+WebRtc_Word32
+OutputMixer::SetEngineInformation(voe::Statistics& engineStatistics)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetEngineInformation()");
+    _engineStatisticsPtr = &engineStatistics;
+    return 0;
+}
+
+WebRtc_Word32 
+OutputMixer::SetAudioProcessingModule(
+    AudioProcessing* audioProcessingModule)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetAudioProcessingModule("
+                 "audioProcessingModule=0x%x)", audioProcessingModule);
+    _audioProcessingModulePtr = audioProcessingModule;
+    return 0;
+}
+
+int OutputMixer::RegisterExternalMediaProcessing(
+    VoEMediaProcess& proccess_object)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "OutputMixer::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    _externalMediaCallbackPtr = &proccess_object;
+    _externalMedia = true;
+
+    return 0;
+}
+
+int OutputMixer::DeRegisterExternalMediaProcessing()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    _externalMedia = false;
+    _externalMediaCallbackPtr = NULL;
+
+    return 0;
+}
+
+int OutputMixer::PlayDtmfTone(WebRtc_UWord8 eventCode, int lengthMs,
+                              int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::PlayDtmfTone()");
+    if (_dtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_STILL_PLAYING_PREV_DTMF,
+                                           kTraceError,
+                                           "OutputMixer::PlayDtmfTone()");
+        return -1;
+    }
+    return 0;
+}
+
+int OutputMixer::StartPlayingDtmfTone(WebRtc_UWord8 eventCode,
+                                      int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::StartPlayingDtmfTone()");
+    if (_dtmfGenerator.StartTone(eventCode, attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STILL_PLAYING_PREV_DTMF,
+            kTraceError,
+            "OutputMixer::StartPlayingDtmfTone())");
+        return -1;
+    }
+    return 0;
+}
+
+int OutputMixer::StopPlayingDtmfTone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::StopPlayingDtmfTone()");
+    return (_dtmfGenerator.StopTone());
+}
+
+WebRtc_Word32
+OutputMixer::SetMixabilityStatus(MixerParticipant& participant,
+                                 const bool mixable)
+{
+    return _mixerModule.SetMixabilityStatus(participant, mixable);
+}
+
+WebRtc_Word32
+OutputMixer::SetAnonymousMixabilityStatus(MixerParticipant& participant,
+                                          const bool mixable)
+{
+    return _mixerModule.SetAnonymousMixabilityStatus(participant,mixable);
+}
+
+WebRtc_Word32
+OutputMixer::MixActiveChannels()
+{
+    return _mixerModule.Process();
+}
+
+int
+OutputMixer::GetSpeechOutputLevel(WebRtc_UWord32& level)
+{
+    WebRtc_Word8 currentLevel = _audioLevel.Level();
+    level = static_cast<WebRtc_UWord32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSpeechOutputLevel() => level=%u", level);
+    return 0;
+}
+
+int
+OutputMixer::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level)
+{
+    WebRtc_Word16 currentLevel = _audioLevel.LevelFullRange();
+    level = static_cast<WebRtc_UWord32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSpeechOutputLevelFullRange() => level=%u", level);
+    return 0;
+}
+
+int
+OutputMixer::SetOutputVolumePan(float left, float right)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetOutputVolumePan()");
+    _panLeft = left;
+    _panRight = right;
+    return 0;
+}
+
+int
+OutputMixer::GetOutputVolumePan(float& left, float& right)
+{
+    left = _panLeft;
+    right = _panRight;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetOutputVolumePan() => left=%2.1f, right=%2.1f",
+                 left, right);
+    return 0;
+}
+
+int OutputMixer::StartRecordingPlayout(const char* fileName,
+                                       const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StartRecordingPlayout(fileName=%s)", fileName);
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0);
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+    
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _instanceId,
+        (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&)*codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int OutputMixer::StartRecordingPlayout(OutStream* stream,
+                                       const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StartRecordingPlayout()");
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0);
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _instanceId,
+        (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream,
+                                                        *codecInst,
+                                                        notificationTime) != 0)
+    {
+       _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+	    "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int OutputMixer::StopRecordingPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StopRecordingPlayout()");
+
+    if (!_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StopRecordingPlayout() file isnot recording");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_fileCritSect);
+
+    if (_outputFileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+    _outputFileRecorderPtr = NULL;
+    _outputFileRecording = false;
+
+    return 0;
+}
+
+WebRtc_Word32 
+OutputMixer::GetMixedAudio(const WebRtc_Word32 desiredFreqHz,
+                           const WebRtc_UWord8 channels,
+                           AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::GetMixedAudio(desiredFreqHz=%d, channels=&d)",
+                 desiredFreqHz, channels);
+
+    audioFrame = _audioFrame;
+
+    // --- Record playout if enabled
+    {
+        CriticalSectionScoped cs(_fileCritSect);
+        if (_outputFileRecording)
+        {
+            assert(audioFrame._audioChannel == 1);
+        
+            if (_outputFileRecorderPtr)
+            {
+                _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
+            }
+        }
+    }
+
+    int outLen(0);
+
+    if (audioFrame._audioChannel == 1)
+    {
+        if (_resampler.ResetIfNeeded(audioFrame._frequencyInHz,
+                                     desiredFreqHz,
+                                     kResamplerSynchronous) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "OutputMixer::GetMixedAudio() unable to resample - 1");
+            return -1;
+        }
+    }
+    else
+    {
+        if (_resampler.ResetIfNeeded(audioFrame._frequencyInHz,
+                                     desiredFreqHz,
+                                     kResamplerSynchronousStereo) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "OutputMixer::GetMixedAudio() unable to resample - 2");
+            return -1;
+        }
+    }
+    if (_resampler.Push(
+        _audioFrame._payloadData,
+        _audioFrame._payloadDataLengthInSamples*_audioFrame._audioChannel,
+        audioFrame._payloadData,
+        AudioFrame::kMaxAudioFrameSizeSamples,
+        outLen) == 0)
+    {
+        // Ensure that output from resampler matches the audio-frame format.
+        // Example: 10ms stereo output at 48kHz => outLen = 960 =>
+        // convert _payloadDataLengthInSamples to 480
+        audioFrame._payloadDataLengthInSamples =
+            (outLen / _audioFrame._audioChannel);
+        audioFrame._frequencyInHz = desiredFreqHz;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "OutputMixer::GetMixedAudio() resampling failed");
+        return -1;
+    }
+
+    if ((channels == 2) && (audioFrame._audioChannel == 1))
+    {
+        AudioFrameOperations::MonoToStereo(audioFrame);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 
+OutputMixer::DoOperationsOnCombinedSignal()
+{
+    if (_audioFrame._frequencyInHz != _mixingFrequencyHz)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                     "OutputMixer::DoOperationsOnCombinedSignal() => "
+                     "mixing frequency = %d", _audioFrame._frequencyInHz);
+        _mixingFrequencyHz = _audioFrame._frequencyInHz;
+    }
+
+    // --- Insert inband Dtmf tone
+    if (_dtmfGenerator.IsAddingTone())
+    {
+        InsertInbandDtmfTone();
+    }
+
+    // Scale left and/or right channel(s) if balance is active
+    if (_panLeft != 1.0 || _panRight != 1.0)
+    {
+        if (_audioFrame._audioChannel == 1)
+        {
+            AudioFrameOperations::MonoToStereo(_audioFrame);
+        }
+        else
+        {
+            // Pure stereo mode (we are receiving a stereo signal).
+        }
+
+        assert(_audioFrame._audioChannel == 2);
+        AudioFrameOperations::Scale(_panLeft, _panRight, _audioFrame);
+    }
+
+    // --- Far-end Voice Quality Enhancement (AudioProcessing Module)
+
+    APMAnalyzeReverseStream();
+
+    // --- External media processing
+
+    if (_externalMedia)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        const bool isStereo = (_audioFrame._audioChannel == 2);
+        if (_externalMediaCallbackPtr)
+        {
+            _externalMediaCallbackPtr->Process(
+                -1,
+                kPlaybackAllChannelsMixed, 
+                (WebRtc_Word16*)_audioFrame._payloadData,
+                _audioFrame._payloadDataLengthInSamples,
+                _audioFrame._frequencyInHz,
+                isStereo);
+        }
+    }
+
+    // --- Measure audio level (0-9) for the combined signal
+    _audioLevel.ComputeLevel(_audioFrame);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//	                             Private methods
+// ----------------------------------------------------------------------------
+
+int 
+OutputMixer::APMAnalyzeReverseStream()
+{
+    int outLen(0);
+    AudioFrame audioFrame = _audioFrame;
+
+    // Convert from mixing frequency to APM frequency.
+    // Sending side determines APM frequency.
+
+    if (audioFrame._audioChannel == 1)
+    {
+        _apmResampler.ResetIfNeeded(_audioFrame._frequencyInHz,
+                                    _audioProcessingModulePtr->sample_rate_hz(),
+                                    kResamplerSynchronous);
+    }
+    else
+    {
+        _apmResampler.ResetIfNeeded(_audioFrame._frequencyInHz,
+                                    _audioProcessingModulePtr->sample_rate_hz(),
+                                    kResamplerSynchronousStereo);
+    }
+    if (_apmResampler.Push(
+        _audioFrame._payloadData,
+        _audioFrame._payloadDataLengthInSamples*_audioFrame._audioChannel,
+        audioFrame._payloadData,
+        AudioFrame::kMaxAudioFrameSizeSamples,
+        outLen) == 0)
+    {
+        audioFrame._payloadDataLengthInSamples =
+            (outLen / _audioFrame._audioChannel);
+        audioFrame._frequencyInHz = _audioProcessingModulePtr->sample_rate_hz();
+    }
+
+    if (audioFrame._audioChannel == 2)
+    {
+        AudioFrameOperations::StereoToMono(audioFrame);
+    }
+
+    // Perform far-end APM analyze
+
+    if (_audioProcessingModulePtr->AnalyzeReverseStream(&audioFrame) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "AudioProcessingModule::AnalyzeReverseStream() => error");
+    }
+
+    return 0;
+}
+
+int
+OutputMixer::InsertInbandDtmfTone()
+{
+    WebRtc_UWord16 sampleRate(0);
+    _dtmfGenerator.GetSampleRate(sampleRate);
+    if (sampleRate != _audioFrame._frequencyInHz)
+    {
+        // Update sample rate of Dtmf tone since the mixing frequency changed.
+        _dtmfGenerator.SetSampleRate(
+            (WebRtc_UWord16)(_audioFrame._frequencyInHz));
+        // Reset the tone to be added taking the new sample rate into account.
+        _dtmfGenerator.ResetTone();
+    }
+
+    WebRtc_Word16 toneBuffer[320];
+    WebRtc_UWord16 toneSamples(0);
+    if (_dtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "OutputMixer::InsertInbandDtmfTone() inserting Dtmf"
+                     "tone failed");
+        return -1;
+    }
+
+    // replace mixed audio with Dtmf tone
+    if (_audioFrame._audioChannel == 1)
+    {
+        // mono
+        memcpy(_audioFrame._payloadData, toneBuffer, sizeof(WebRtc_Word16)
+            * toneSamples);
+    } else
+    {
+        // stereo
+        for (int i = 0; i < _audioFrame._payloadDataLengthInSamples; i++)
+        {
+            _audioFrame._payloadData[2 * i] = toneBuffer[i];
+            _audioFrame._payloadData[2 * i + 1] = 0;
+        }
+    }
+    assert(_audioFrame._payloadDataLengthInSamples == toneSamples);
+
+    return 0;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/output_mixer.h b/trunk/src/voice_engine/main/source/output_mixer.h
new file mode 100644
index 0000000..a5df185
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/output_mixer.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H
+#define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H
+
+#include "audio_conference_mixer.h"
+#include "audio_conference_mixer_defines.h"
+#include "common_types.h"
+#include "dtmf_inband.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "resampler.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+
+class AudioProcessing;
+class CriticalSectionWrapper;
+class FileWrapper;
+class VoEMediaProcess;
+
+namespace voe {
+
+class Statistics;
+
+class OutputMixer : public AudioMixerOutputReceiver,
+                    public AudioMixerStatusReceiver,
+                    public FileCallback
+{
+public:
+    static WebRtc_Word32 Create(OutputMixer*& mixer,
+                                const WebRtc_UWord32 instanceId);
+
+    static void Destroy(OutputMixer*& mixer);
+
+    WebRtc_Word32 SetEngineInformation(Statistics& engineStatistics);
+
+    WebRtc_Word32 SetAudioProcessingModule(
+        AudioProcessing* audioProcessingModule);
+
+    // VoEExternalMedia
+    int RegisterExternalMediaProcessing(
+        VoEMediaProcess& proccess_object);
+
+    int DeRegisterExternalMediaProcessing();
+
+    // VoEDtmf
+    int PlayDtmfTone(WebRtc_UWord8 eventCode,
+                     int lengthMs,
+                     int attenuationDb);
+
+    int StartPlayingDtmfTone(WebRtc_UWord8 eventCode,
+                             int attenuationDb);
+
+    int StopPlayingDtmfTone();
+
+    WebRtc_Word32 MixActiveChannels();
+
+    WebRtc_Word32 DoOperationsOnCombinedSignal();
+
+    WebRtc_Word32 SetMixabilityStatus(MixerParticipant& participant,
+                                      const bool mixable);
+
+    WebRtc_Word32 SetAnonymousMixabilityStatus(MixerParticipant& participant,
+                                               const bool mixable);
+
+    WebRtc_Word32 GetMixedAudio(const WebRtc_Word32 desiredFreqHz,
+                                const WebRtc_UWord8 channels,
+                                AudioFrame& audioFrame);
+
+    // VoEVolumeControl
+    int GetSpeechOutputLevel(WebRtc_UWord32& level);
+
+    int GetSpeechOutputLevelFullRange(WebRtc_UWord32& level);
+
+    int SetOutputVolumePan(float left, float right);
+
+    int GetOutputVolumePan(float& left, float& right);
+
+    // VoEFile
+    int StartRecordingPlayout(const char* fileName,
+                              const CodecInst* codecInst);
+
+    int StartRecordingPlayout(OutStream* stream,
+                              const CodecInst* codecInst);
+    int StopRecordingPlayout();
+
+    virtual ~OutputMixer();
+
+public:	// from AudioMixerOutputReceiver
+    virtual void NewMixedAudio(
+        const WebRtc_Word32 id,
+        const AudioFrame& generalAudioFrame,
+        const AudioFrame** uniqueAudioFrames,
+        const WebRtc_UWord32 size);
+
+public:  // from AudioMixerStatusReceiver
+    virtual void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    virtual void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    virtual void MixedAudioLevel(const WebRtc_Word32  id,
+                                 const WebRtc_UWord32 level);
+
+public: // For file recording
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+
+    void PlayFileEnded(const WebRtc_Word32 id);
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+private:
+    OutputMixer(const WebRtc_UWord32 instanceId);
+    int APMAnalyzeReverseStream();
+    int InsertInbandDtmfTone();
+
+private:  // uses
+    Statistics* _engineStatisticsPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+
+private:  // owns
+    CriticalSectionWrapper& _callbackCritSect;
+    // protect the _outputFileRecorderPtr and _outputFileRecording
+    CriticalSectionWrapper& _fileCritSect;
+    AudioConferenceMixer& _mixerModule;
+    AudioFrame _audioFrame;
+    Resampler _resampler;        // converts mixed audio to fit ADM format
+    Resampler _apmResampler;    // converts mixed audio to fit APM rate
+    AudioLevel _audioLevel;    // measures audio level for the combined signal
+    DtmfInband _dtmfGenerator;
+    int _instanceId;
+    VoEMediaProcess* _externalMediaCallbackPtr;
+    bool _externalMedia;
+    float _panLeft;
+    float _panRight;
+    int _mixingFrequencyHz;
+    FileRecorder* _outputFileRecorderPtr;
+    bool _outputFileRecording;
+};
+
+}  //  namespace voe
+
+}  //  namespace werbtc
+
+#endif // VOICE_ENGINE_OUTPUT_MIXER_H
diff --git a/trunk/src/voice_engine/main/source/ref_count.cc b/trunk/src/voice_engine/main/source/ref_count.cc
new file mode 100644
index 0000000..f1ed0be
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/ref_count.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_wrapper.h"
+#include "ref_count.h"
+
+namespace webrtc {
+
+namespace voe {
+
+RefCount::RefCount() :
+    _count(0),
+    _crit(*CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+RefCount::~RefCount()
+{
+    delete &_crit;
+}
+
+RefCount&
+RefCount::operator++(int)
+{
+    CriticalSectionScoped lock(_crit);
+    _count++;
+    return *this;
+}
+    
+RefCount&
+RefCount::operator--(int)
+{
+    CriticalSectionScoped lock(_crit);
+    _count--;
+    return *this;
+}
+  
+void 
+RefCount::Reset()
+{
+    CriticalSectionScoped lock(_crit);
+    _count = 0;
+}
+
+int 
+RefCount::GetCount() const
+{
+    return _count;
+}
+
+}  // namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/ref_count.h b/trunk/src/voice_engine/main/source/ref_count.h
new file mode 100644
index 0000000..e8c0a81
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/ref_count.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_REF_COUNT_H
+#define WEBRTC_VOICE_ENGINE_REF_COUNT_H
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class RefCount
+{
+public:
+    RefCount();
+    ~RefCount();
+    RefCount& operator++(int);
+    RefCount& operator--(int);
+    void Reset();
+    int GetCount() const;
+private:
+    volatile int _count;
+    CriticalSectionWrapper& _crit;
+};
+
+}  // namespace voe
+
+}  // namespace webrtc
+#endif    // #ifndef WEBRTC_VOICE_ENGINE_REF_COUNT_H
diff --git a/trunk/src/voice_engine/main/source/shared_data.cc b/trunk/src/voice_engine/main/source/shared_data.cc
new file mode 100644
index 0000000..2362da5
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/shared_data.cc
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "shared_data.h"
+
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "channel.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+
+namespace webrtc {
+
+namespace voe {
+
+static WebRtc_Word32 _gInstanceCounter = 0;
+
+SharedData::SharedData() :
+    _instanceId(++_gInstanceCounter),
+    _apiCritPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _channelManager(_gInstanceCounter),
+    _engineStatistics(_gInstanceCounter),
+    _audioDevicePtr(NULL),
+    _audioProcessingModulePtr(NULL),
+    _moduleProcessThreadPtr(ProcessThread::CreateProcessThread()),
+    _externalRecording(false),
+    _externalPlayout(false)
+{
+    Trace::CreateTrace();
+    Trace::SetLevelFilter(WEBRTC_VOICE_ENGINE_DEFAULT_TRACE_FILTER);
+    if (OutputMixer::Create(_outputMixerPtr, _gInstanceCounter) == 0)
+    {
+        _outputMixerPtr->SetEngineInformation(_engineStatistics);
+    }
+    if (TransmitMixer::Create(_transmitMixerPtr, _gInstanceCounter) == 0)
+    {
+        _transmitMixerPtr->SetEngineInformation(*_moduleProcessThreadPtr,
+                                                _engineStatistics,
+                                                _channelManager);
+    }
+    _audioDeviceLayer = AudioDeviceModule::kPlatformDefaultAudio;
+}
+
+SharedData::~SharedData()
+{
+    OutputMixer::Destroy(_outputMixerPtr);
+    TransmitMixer::Destroy(_transmitMixerPtr);
+    if (_audioDevicePtr) {
+        _audioDevicePtr->Release();
+    }
+    AudioProcessing::Destroy(_audioProcessingModulePtr);
+    delete _apiCritPtr;
+    ProcessThread::DestroyProcessThread(_moduleProcessThreadPtr);
+    Trace::ReturnTrace();
+}
+
+WebRtc_UWord16
+SharedData::NumOfSendingChannels()
+{
+    WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+	
+    WebRtc_UWord16 nChannelsSending(0);
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+
+    _channelManager.GetChannelIds(channelsArray, numOfChannels);
+    for (int i = 0; i < numOfChannels; i++)
+    {
+        voe::ScopedChannel sc(_channelManager, channelsArray[i]);
+        Channel* chPtr = sc.ChannelPtr();
+        if (chPtr)
+        {
+            if (chPtr->Sending())
+            {
+                nChannelsSending++;
+            }
+        }
+    }
+    delete [] channelsArray;
+    return nChannelsSending;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/shared_data.h b/trunk/src/voice_engine/main/source/shared_data.h
new file mode 100644
index 0000000..afab37b
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/shared_data.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_SHARED_DATA_H
+#define WEBRTC_VOICE_ENGINE_SHARED_DATA_H
+
+#include "voice_engine_defines.h"
+
+#include "channel_manager.h"
+#include "statistics.h"
+#include "process_thread.h"
+
+#include "audio_device.h"
+#include "audio_processing.h"
+
+class ProcessThread;
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class TransmitMixer;
+class OutputMixer;
+
+class SharedData
+{
+public:
+    // Public accessors.
+    WebRtc_UWord32 instance_id() const { return _instanceId; }
+    Statistics& statistics() { return _engineStatistics; }
+    ChannelManager& channel_manager() { return _channelManager; }
+    AudioDeviceModule* audio_device() { return _audioDevicePtr; }
+
+protected:
+    WebRtc_UWord16 NumOfSendingChannels();
+
+protected:
+    const WebRtc_UWord32 _instanceId;
+    CriticalSectionWrapper* _apiCritPtr;
+    ChannelManager _channelManager;
+    Statistics _engineStatistics;
+    AudioDeviceModule* _audioDevicePtr;
+    OutputMixer* _outputMixerPtr;
+    TransmitMixer* _transmitMixerPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+    ProcessThread* _moduleProcessThreadPtr;
+
+protected:
+    bool _externalRecording;
+    bool _externalPlayout;
+
+    AudioDeviceModule::AudioLayer _audioDeviceLayer;
+
+protected:
+    SharedData();
+    virtual ~SharedData();
+};
+
+} //  namespace voe
+
+} //  namespace webrtc
+#endif // WEBRTC_VOICE_ENGINE_SHARED_DATA_H
diff --git a/trunk/src/voice_engine/main/source/statistics.cc b/trunk/src/voice_engine/main/source/statistics.cc
new file mode 100644
index 0000000..a534030
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/statistics.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <stdio.h>
+
+#include "statistics.h"
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+
+namespace voe {
+
+Statistics::Statistics(const WebRtc_UWord32 instanceId) :
+    _critPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _instanceId(instanceId),
+    _lastError(0),
+    _isInitialized(false)
+{
+}
+	
+Statistics::~Statistics()
+{
+    if (_critPtr)
+    {
+        delete _critPtr;
+        _critPtr = NULL;
+    }
+}
+
+WebRtc_Word32 Statistics::SetInitialized()
+{
+    _isInitialized = true;
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetUnInitialized()
+{
+    _isInitialized = false;
+    return 0;
+}
+
+bool Statistics::Initialized() const
+{
+    return _isInitialized;
+}
+
+WebRtc_Word32 Statistics::SetLastError(const WebRtc_Word32 error) const
+{
+    CriticalSectionScoped cs(*_critPtr);
+    _lastError = error;
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetLastError(const WebRtc_Word32 error,
+                                       const TraceLevel level) const
+{
+    CriticalSectionScoped cs(*_critPtr);
+    _lastError = error;
+    WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1),
+                 "error code is set to %d",
+                 _lastError);
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetLastError(
+    const WebRtc_Word32 error,
+    const TraceLevel level, const char* msg) const
+{
+    CriticalSectionScoped cs(*_critPtr);
+    char traceMessage[KTraceMaxMessageSize];
+    assert(strlen(msg) < KTraceMaxMessageSize);
+    _lastError = error;
+    sprintf(traceMessage, "%s (error=%d)", msg, error);
+    WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1), "%s",
+                 traceMessage);
+    return 0;
+}
+
+WebRtc_Word32 Statistics::LastError() const
+{
+    CriticalSectionScoped cs(*_critPtr);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "LastError() => %d", _lastError);
+    return _lastError;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/statistics.h b/trunk/src/voice_engine/main/source/statistics.h
new file mode 100644
index 0000000..fc0bf8c
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/statistics.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_STATISTICS_H
+#define WEBRTC_VOICE_ENGINE_STATISTICS_H
+
+#include "common_types.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+#include "voe_errors.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class Statistics
+{
+ public:
+    enum {KTraceMaxMessageSize = 256};
+ public:
+    Statistics(const WebRtc_UWord32 instanceId);
+    ~Statistics();
+
+    WebRtc_Word32 SetInitialized();
+    WebRtc_Word32 SetUnInitialized();
+    bool Initialized() const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error) const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error,
+                               const TraceLevel level) const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error,
+                               const TraceLevel level,
+                               const char* msg) const;
+    WebRtc_Word32 LastError() const;
+
+ private:
+    CriticalSectionWrapper* _critPtr;
+    const WebRtc_UWord32 _instanceId;
+    mutable WebRtc_Word32 _lastError;
+    bool _isInitialized;
+};
+
+}  // namespace voe
+
+}  //  namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_STATISTICS_H
diff --git a/trunk/src/voice_engine/main/source/transmit_mixer.cc b/trunk/src/voice_engine/main/source/transmit_mixer.cc
new file mode 100644
index 0000000..d131a85
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/transmit_mixer.cc
@@ -0,0 +1,1416 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "transmit_mixer.h"
+
+#include "audio_frame_operations.h"
+#include "channel.h"
+#include "channel_manager.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "statistics.h"
+#include "trace.h"
+#include "utility.h"
+#include "voe_base_impl.h"
+#include "voe_external_media.h"
+
+#define WEBRTC_ABS(a)	   (((a) < 0) ? -(a) : (a))
+
+namespace webrtc {
+
+namespace voe {
+
+void 
+TransmitMixer::OnPeriodicProcess()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::OnPeriodicProcess()");
+
+#if defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
+    if (_typingNoiseWarning > 0)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() => "
+                         "CallbackOnError(VE_TYPING_NOISE_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1,
+                                                     VE_TYPING_NOISE_WARNING);
+        }
+        _typingNoiseWarning = 0;
+    }
+#endif
+
+    if (_saturationWarning > 0)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() =>"
+                         " CallbackOnError(VE_SATURATION_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1, VE_SATURATION_WARNING);
+        }
+        _saturationWarning = 0;
+    }
+
+    if (_noiseWarning > 0)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() =>"
+                         "CallbackOnError(VE_NOISE_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1, VE_NOISE_WARNING);
+        }
+        _noiseWarning = 0;
+    }
+}
+
+
+void TransmitMixer::PlayNotification(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+	
+void TransmitMixer::RecordNotification(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void TransmitMixer::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayFileEnded(id=%d)", id);
+
+    assert(id == _filePlayerId);
+
+    CriticalSectionScoped cs(_critSect);
+
+    _filePlaying = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayFileEnded() =>"
+                 "file player module is shutdown");
+}
+
+void 
+TransmitMixer::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RecordFileEnded(id=%d)", id);
+
+    if (id == _fileRecorderId)
+    {
+        CriticalSectionScoped cs(_critSect);
+        _fileRecording = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordFileEnded() => fileRecorder module"
+                     "is shutdown");
+    } else if (id == _fileCallRecorderId)
+    {
+        CriticalSectionScoped cs(_critSect);
+        _fileCallRecording = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordFileEnded() => fileCallRecorder"
+                     "module is shutdown");
+    }
+}
+
+WebRtc_Word32
+TransmitMixer::Create(TransmitMixer*& mixer, const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
+                 "TransmitMixer::Create(instanceId=%d)", instanceId);
+    mixer = new TransmitMixer(instanceId);
+    if (mixer == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
+                     "TransmitMixer::Create() unable to allocate memory"
+                     "for mixer");
+        return -1;
+    }
+    return 0;
+}
+
+void
+TransmitMixer::Destroy(TransmitMixer*& mixer)
+{
+    if (mixer)
+    {
+        delete mixer;
+        mixer = NULL;
+    }
+}
+
+TransmitMixer::TransmitMixer(const WebRtc_UWord32 instanceId) :
+    _engineStatisticsPtr(NULL),
+    _channelManagerPtr(NULL),
+    _audioProcessingModulePtr(NULL),
+    _voiceEngineObserverPtr(NULL),
+    _processThreadPtr(NULL),
+    _filePlayerPtr(NULL),
+    _fileRecorderPtr(NULL),
+    _fileCallRecorderPtr(NULL),
+    // Avoid conflict with other channels by adding 1024 - 1026,
+    // won't use as much as 1024 channels.
+    _filePlayerId(instanceId + 1024),
+    _fileRecorderId(instanceId + 1025),
+    _fileCallRecorderId(instanceId + 1026),
+    _filePlaying(false),
+    _fileRecording(false),
+    _fileCallRecording(false),
+    _audioLevel(),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    _timeActive(0),
+    _penaltyCounter(0),
+    _typingNoiseWarning(0),
+#endif
+    _saturationWarning(0),
+    _noiseWarning(0),
+    _instanceId(instanceId),
+    _mixFileWithMicrophone(false),
+    _captureLevel(0),
+    _externalMedia(false),
+    _externalMediaCallbackPtr(NULL),
+    _mute(false),
+    _remainingMuteMicTimeMs(0),
+    _mixingFrequency(0),
+    _includeAudioLevelIndication(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::TransmitMixer() - ctor");
+}
+	
+TransmitMixer::~TransmitMixer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::~TransmitMixer() - dtor");
+    _monitorModule.DeRegisterObserver();
+    if (_processThreadPtr)
+    {
+        _processThreadPtr->DeRegisterModule(&_monitorModule);
+    }
+    if (_externalMedia)
+    {
+        DeRegisterExternalMediaProcessing();
+    }
+    {
+        CriticalSectionScoped cs(_critSect);
+        if (_fileRecorderPtr)
+        {
+            _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _fileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+            _fileRecorderPtr = NULL;
+        }
+        if (_fileCallRecorderPtr)
+        {
+            _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+            _fileCallRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+            _fileCallRecorderPtr = NULL;
+        }
+        if (_filePlayerPtr)
+        {
+            _filePlayerPtr->RegisterModuleFileCallback(NULL);
+            _filePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+            _filePlayerPtr = NULL;
+        }
+    }
+    delete &_critSect;
+    delete &_callbackCritSect;
+}
+
+WebRtc_Word32
+TransmitMixer::SetEngineInformation(ProcessThread& processThread,
+                                    Statistics& engineStatistics,
+                                    ChannelManager& channelManager)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetEngineInformation()");
+
+    _processThreadPtr = &processThread;
+    _engineStatisticsPtr = &engineStatistics;
+    _channelManagerPtr = &channelManager;
+
+    if (_processThreadPtr->RegisterModule(&_monitorModule) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::SetEngineInformation() failed to"
+                     "register the monitor module");
+    } else
+    {
+        _monitorModule.RegisterObserver(*this);
+    }
+
+    return 0;
+}
+	
+WebRtc_Word32 
+TransmitMixer::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+
+    if (_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterVoiceEngineObserver() observer already enabled");
+        return -1;
+    }
+    _voiceEngineObserverPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32 
+TransmitMixer::SetAudioProcessingModule(AudioProcessing* audioProcessingModule)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetAudioProcessingModule("
+                 "audioProcessingModule=0x%x)",
+                 audioProcessingModule);
+    _audioProcessingModulePtr = audioProcessingModule;
+    return 0;
+}
+
+WebRtc_Word32 
+TransmitMixer::PrepareDemux(const WebRtc_Word8* audioSamples,
+                            const WebRtc_UWord32 nSamples,
+                            const WebRtc_UWord8 nChannels,
+                            const WebRtc_UWord32 samplesPerSec,
+                            const WebRtc_UWord16 totalDelayMS,
+                            const WebRtc_Word32 clockDrift,
+                            const WebRtc_UWord16 currentMicLevel)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PrepareDemux(nSamples=%u, nChannels=%u,"
+                 "samplesPerSec=%u, totalDelayMS=%u, clockDrift=%u,"
+                 "currentMicLevel=%u)", nSamples, nChannels, samplesPerSec,
+                 totalDelayMS, clockDrift, currentMicLevel);
+
+
+    const int mixingFrequency = _mixingFrequency;
+
+    ScopedChannel sc(*_channelManagerPtr);
+    void* iterator(NULL);
+    Channel* channelPtr = sc.GetFirstChannel(iterator);
+    _mixingFrequency = 8000;
+    while (channelPtr != NULL)
+    {
+        if (channelPtr->Sending())
+        {
+            CodecInst tmpCdc;
+            channelPtr->GetSendCodec(tmpCdc);
+            if (tmpCdc.plfreq > _mixingFrequency)
+                _mixingFrequency = tmpCdc.plfreq;
+        }
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+
+
+    // --- Resample input audio and create/store the initial audio frame
+
+    if (GenerateAudioFrame((const WebRtc_Word16*) audioSamples,
+                           nSamples,
+                           nChannels,
+                           samplesPerSec,
+                           _mixingFrequency) == -1)
+    {
+        return -1;
+    }
+
+    // --- Near-end Voice Quality Enhancement (APM) processing
+
+    APMProcessStream(totalDelayMS, clockDrift, currentMicLevel);
+
+    // --- Annoying typing detection (utilizes the APM/VAD decision)
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    TypingDetection();
+#endif
+
+    // --- Mute during DTMF tone if direct feedback is enabled
+
+    if (_remainingMuteMicTimeMs > 0)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+        _remainingMuteMicTimeMs -= 10;
+        if (_remainingMuteMicTimeMs < 0)
+        {
+            _remainingMuteMicTimeMs = 0;
+        }
+    }
+
+    // --- Mute signal
+
+    if (_mute)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+    }
+
+    // --- Measure audio level of speech after APM processing
+
+    _audioLevel.ComputeLevel(_audioFrame);
+
+    // --- Mix with file (does not affect the mixing frequency)
+
+    if (_filePlaying)
+    {
+        MixOrReplaceAudioWithFile(_mixingFrequency);
+    }
+
+    // --- Record to file
+
+    if (_fileRecording)
+    {
+        RecordAudioToFile(_mixingFrequency);
+    }
+
+    // --- External media processing
+
+    if (_externalMedia)
+    {
+        CriticalSectionScoped cs(_callbackCritSect);
+        const bool isStereo = (_audioFrame._audioChannel == 2);
+        if (_externalMediaCallbackPtr)
+        {
+            _externalMediaCallbackPtr->Process(
+                -1,
+                kRecordingAllChannelsMixed,
+                (WebRtc_Word16*) _audioFrame._payloadData,
+                _audioFrame._payloadDataLengthInSamples,
+                _audioFrame._frequencyInHz,
+                isStereo);
+        }
+    }
+
+    if (_mixingFrequency != mixingFrequency)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::TransmitMixer::PrepareDemux() => "
+                     "mixing frequency = %d",
+                     _mixingFrequency);
+    }
+
+    return 0;
+}
+
+
+	
+WebRtc_Word32 
+TransmitMixer::DemuxAndMix()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::DemuxAndMix()");
+
+    ScopedChannel sc(*_channelManagerPtr);
+    void* iterator(NULL);
+    Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        if (channelPtr->InputIsOnHold())
+        {
+            channelPtr->UpdateLocalTimeStamp();
+        } else if (channelPtr->Sending())
+        {
+            // load temporary audioframe with current (mixed) microphone signal
+            AudioFrame tmpAudioFrame = _audioFrame;
+
+            channelPtr->Demultiplex(tmpAudioFrame);
+            channelPtr->PrepareEncodeAndSend(_mixingFrequency);
+        }
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+				
+	return 0;
+}
+	
+WebRtc_Word32 
+TransmitMixer::EncodeAndSend()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::EncodeAndSend()");
+
+    ScopedChannel sc(*_channelManagerPtr);
+    void* iterator(NULL);
+    Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        if (channelPtr->Sending() && !channelPtr->InputIsOnHold())
+        {
+            channelPtr->EncodeAndSend();
+        }
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+    return 0;
+}
+
+WebRtc_UWord32 TransmitMixer::CaptureLevel() const
+{
+    return _captureLevel;
+}
+
+void
+TransmitMixer::UpdateMuteMicrophoneTime(const WebRtc_UWord32 lengthMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::UpdateMuteMicrophoneTime(lengthMs=%d)",
+               lengthMs);
+    _remainingMuteMicTimeMs = lengthMs;
+}
+
+WebRtc_Word32 
+TransmitMixer::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::StopSend()");
+    _audioLevel.Clear();
+    return 0;
+}
+
+int TransmitMixer::StartPlayingFileAsMicrophone(const char* fileName,
+                                                const bool loop,
+                                                const FileFormats format,
+                                                const int startPosition,
+                                                const float volumeScaling,
+                                                const int stopPosition,
+                                                const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartPlayingFileAsMicrophone("
+                 "fileNameUTF8[]=%s,loop=%d, format=%d, volumeScaling=%5.3f,"
+                 " startPosition=%d, stopPosition=%d)", fileName, loop,
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if (_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is already playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_filePlayerPtr)
+    {
+        _filePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+    }
+
+    // Dynamically create the instance
+    _filePlayerPtr
+        = FilePlayer::CreateFilePlayer(_filePlayerId,
+                                       (const FileFormats) format);
+
+    if (_filePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_filePlayerPtr->StartPlayingFile(
+        fileName,
+        loop,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*) codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _filePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+        return -1;
+    }
+
+    _filePlayerPtr->RegisterModuleFileCallback(this);
+    _filePlaying = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartPlayingFileAsMicrophone(InStream* stream,
+                                                const FileFormats format,
+                                                const int startPosition,
+                                                const float volumeScaling,
+                                                const int stopPosition,
+                                                const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::StartPlayingFileAsMicrophone(format=%d,"
+                 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+    
+    if (stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileAsMicrophone() NULL as input stream");
+        return -1;
+    }
+
+    if (_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is already playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_filePlayerPtr)
+    {
+        _filePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+    }
+
+    // Dynamically create the instance
+    _filePlayerPtr
+        = FilePlayer::CreateFilePlayer(_filePlayerId,
+                                       (const FileFormats) format);
+
+    if (_filePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_filePlayerPtr->StartPlayingFile(
+        (InStream&) *stream,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*) codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _filePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+        return -1;
+    }
+    _filePlayerPtr->RegisterModuleFileCallback(this);
+    _filePlaying = true;
+
+    return 0;
+}
+
+int TransmitMixer::StopPlayingFileAsMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::StopPlayingFileAsMicrophone()");
+
+    if (!_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileAsMicrophone() isnot playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    if (_filePlayerPtr->StopPlayingFile() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_STOP_PLAYOUT, kTraceError,
+            "StopPlayingFile() couldnot stop playing file");
+        return -1;
+    }
+
+    _filePlayerPtr->RegisterModuleFileCallback(NULL);
+    FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+    _filePlayerPtr = NULL;
+    _filePlaying = false;
+
+    return 0;
+}
+
+int TransmitMixer::IsPlayingFileAsMicrophone() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::IsPlayingFileAsMicrophone()");
+    return _filePlaying;
+}
+
+int TransmitMixer::ScaleFileAsMicrophonePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::ScaleFileAsMicrophonePlayout(scale=%5.3f)",
+                 scale);
+
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleFileAsMicrophonePlayout() isnot playing file");
+        return -1;
+    }
+
+    if ((_filePlayerPtr == NULL) ||
+        (_filePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingMicrophone(const WebRtc_Word8* fileName,
+                                            const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingMicrophone(fileName=%s)",
+                 fileName);
+
+    if (_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingMicrophone() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL &&
+      (codecInst->channels < 0 || codecInst->channels > 2))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_fileRecorderPtr)
+    {
+        _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+    }
+
+    _fileRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileRecorderId,
+                                         (const FileFormats) format);
+    if (_fileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&) *codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _fileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+        return -1;
+    }
+    _fileRecorderPtr->RegisterModuleFileCallback(this);
+    _fileRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
+                                            const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::StartRecordingMicrophone()");
+
+    if (_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "StartRecordingMicrophone() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_fileRecorderPtr)
+    {
+        _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+    }
+
+    _fileRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileRecorderId,
+                                         (const FileFormats) format);
+    if (_fileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->StartRecordingAudioFile(*stream,
+                                                  *codecInst,
+                                                  notificationTime) != 0)
+    {
+    _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+      "StartRecordingAudioFile() failed to start file recording");
+    _fileRecorderPtr->StopRecording();
+    FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+    _fileRecorderPtr = NULL;
+    return -1;
+    }
+
+    _fileRecorderPtr->RegisterModuleFileCallback(this);
+    _fileRecording = true;
+
+    return 0;
+}
+
+
+int TransmitMixer::StopRecordingMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StopRecordingMicrophone()");
+
+    if (!_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                   "StopRecordingMicrophone() isnot recording");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    if (_fileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+    _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+    _fileRecorderPtr = NULL;
+    _fileRecording = false;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingCall(const WebRtc_Word8* fileName,
+                                      const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingCall(fileName=%s)", fileName);
+
+    if (_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingCall() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingCall() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_fileCallRecorderPtr)
+    {
+        _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+    }
+
+    _fileCallRecorderPtr
+        = FileRecorder::CreateFileRecorder(_fileCallRecorderId,
+                                           (const FileFormats) format);
+    if (_fileCallRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingCall() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileCallRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&) *codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _fileCallRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+        return -1;
+    }
+    _fileCallRecorderPtr->RegisterModuleFileCallback(this);
+    _fileCallRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingCall(OutStream* stream,
+                                      const  CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingCall()");
+
+    if (_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingCall() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingCall() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    // Destroy the old instance
+    if (_fileCallRecorderPtr)
+    {
+        _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+    }
+
+    _fileCallRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileCallRecorderId,
+                                         (const FileFormats) format);
+    if (_fileCallRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingCall() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileCallRecorderPtr->StartRecordingAudioFile(*stream,
+                                                      *codecInst,
+                                                      notificationTime) != 0)
+    {
+    _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+     "StartRecordingAudioFile() failed to start file recording");
+    _fileCallRecorderPtr->StopRecording();
+    FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+    _fileCallRecorderPtr = NULL;
+    return -1;
+    }
+     
+    _fileCallRecorderPtr->RegisterModuleFileCallback(this);
+    _fileCallRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StopRecordingCall()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StopRecordingCall()");
+
+    if (!_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "StopRecordingCall() file isnot recording");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_critSect);
+
+    if (_fileCallRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+
+    _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+    _fileCallRecorderPtr = NULL;
+    _fileCallRecording = false;
+
+    return 0;
+}
+
+void 
+TransmitMixer::SetMixWithMicStatus(bool mix)
+{
+    _mixFileWithMicrophone = mix;
+}
+
+int TransmitMixer::RegisterExternalMediaProcessing(
+    VoEMediaProcess& proccess_object)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    _externalMediaCallbackPtr = &proccess_object;
+    _externalMedia = true;
+
+    return 0;
+}
+
+int TransmitMixer::DeRegisterExternalMediaProcessing()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(_callbackCritSect);
+    _externalMedia = false;
+    _externalMediaCallbackPtr = NULL;
+
+    return 0;
+}
+
+int
+TransmitMixer::SetMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetMute(enable=%d)", enable);
+    _mute = enable;
+    return 0;
+}
+
+bool
+TransmitMixer::Mute() const
+{
+    return _mute;
+}
+
+WebRtc_Word8 TransmitMixer::AudioLevel() const
+{
+    // Speech + file level [0,9]
+    return _audioLevel.Level();
+}
+
+WebRtc_Word16 TransmitMixer::AudioLevelFullRange() const
+{
+    // Speech + file level [0,32767]
+    return _audioLevel.LevelFullRange();
+}
+
+bool TransmitMixer::IsRecordingCall()
+{
+    return _fileCallRecording;
+}
+
+bool TransmitMixer::IsRecordingMic()
+{
+
+    return _fileRecording;
+}
+
+WebRtc_Word32 
+TransmitMixer::GenerateAudioFrame(const WebRtc_Word16 audioSamples[],
+                                  const WebRtc_UWord32 nSamples,
+                                  const WebRtc_UWord8 nChannels,
+                                  const WebRtc_UWord32 samplesPerSec,
+                                  const int mixingFrequency)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::GenerateAudioFrame(nSamples=%u,"
+                 "samplesPerSec=%u, mixingFrequency=%u)",
+                 nSamples, samplesPerSec, mixingFrequency);
+
+    ResamplerType resampType = (nChannels == 1) ? 
+            kResamplerSynchronous : kResamplerSynchronousStereo;
+    
+
+    if (_audioResampler.ResetIfNeeded(samplesPerSec,
+                                        mixingFrequency,
+                                        resampType) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::GenerateAudioFrame() unable to resample");
+        return -1;
+    }
+    if (_audioResampler.Push(
+        (WebRtc_Word16*) audioSamples,
+        nSamples * nChannels,
+        _audioFrame._payloadData,
+        AudioFrame::kMaxAudioFrameSizeSamples,
+        (int&) _audioFrame._payloadDataLengthInSamples) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::GenerateAudioFrame() resampling failed");
+        return -1;
+    }
+
+    _audioFrame._payloadDataLengthInSamples /= nChannels;
+    _audioFrame._id = _instanceId;
+    _audioFrame._timeStamp = -1;
+    _audioFrame._frequencyInHz = mixingFrequency;
+    _audioFrame._speechType = AudioFrame::kNormalSpeech;
+    _audioFrame._vadActivity = AudioFrame::kVadUnknown;
+    _audioFrame._audioChannel = nChannels;
+
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::RecordAudioToFile(
+    const WebRtc_UWord32 mixingFrequency)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_fileRecorderPtr == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordAudioToFile() filerecorder doesnot"
+                     "exist");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->RecordAudioToFile(_audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordAudioToFile() file recording"
+                     "failed");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::MixOrReplaceAudioWithFile(
+    const int mixingFrequency)
+{
+    WebRtc_Word16 fileBuffer[320];
+
+    WebRtc_UWord32 fileSamples(0);
+
+    {
+        CriticalSectionScoped cs(_critSect);
+        if (_filePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, -1),
+                         "TransmitMixer::MixOrReplaceAudioWithFile()"
+                         "fileplayer doesnot exist");
+            return -1;
+        }
+
+        if (_filePlayerPtr->Get10msAudioFromFile(fileBuffer,
+                                                 fileSamples,
+                                                 mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::MixOrReplaceAudioWithFile() file"
+                         " mixing failed");
+            return -1;
+        }
+    }
+
+    if (_mixFileWithMicrophone)
+    {
+        Utility::MixWithSat(_audioFrame._payloadData,
+                             fileBuffer,
+                             (WebRtc_UWord16) fileSamples);
+        assert(_audioFrame._payloadDataLengthInSamples == fileSamples);
+    } else
+    {
+        // replace ACM audio with file
+        _audioFrame.UpdateFrame(-1,
+                                -1,
+                                fileBuffer,
+                                (WebRtc_UWord16) fileSamples, mixingFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadUnknown,
+                                1);
+
+    }
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::APMProcessStream(
+    const WebRtc_UWord16 totalDelayMS,
+    const WebRtc_Word32 clockDrift,
+    const WebRtc_UWord16 currentMicLevel)
+{
+    WebRtc_UWord16 captureLevel(currentMicLevel);
+
+    // Check if the number of input channels has changed. Retain the number
+    // of output channels.
+    if (_audioFrame._audioChannel !=
+        _audioProcessingModulePtr->num_input_channels())
+    {
+        if (_audioProcessingModulePtr->set_num_channels(
+                _audioFrame._audioChannel,
+                _audioProcessingModulePtr->num_output_channels()))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "AudioProcessing::set_num_channels(%d, %d) => error",
+                         _audioFrame._frequencyInHz,
+                         _audioProcessingModulePtr->num_output_channels());
+        }
+    }
+
+    // If the frequency has changed we need to change APM settings
+    // Sending side is "master"
+    if (_audioProcessingModulePtr->sample_rate_hz() !=
+        _audioFrame._frequencyInHz)
+    {
+        if (_audioProcessingModulePtr->set_sample_rate_hz(
+                _audioFrame._frequencyInHz))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "AudioProcessing::set_sample_rate_hz(%u) => error",
+                         _audioFrame._frequencyInHz);
+        }
+    }
+
+    if (_audioProcessingModulePtr->set_stream_delay_ms(totalDelayMS) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::set_stream_delay_ms(%u) => error",
+                     totalDelayMS);
+    }
+    if (_audioProcessingModulePtr->gain_control()->set_stream_analog_level(
+            captureLevel) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::set_stream_analog_level(%u) => error",
+                     captureLevel);
+    }
+    if (_audioProcessingModulePtr->echo_cancellation()->
+            is_drift_compensation_enabled())
+    {
+        if (_audioProcessingModulePtr->echo_cancellation()->
+                set_stream_drift_samples(clockDrift) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                "AudioProcessing::set_stream_drift_samples(%u) => error",
+                clockDrift);
+        }
+    }
+    if (_audioProcessingModulePtr->ProcessStream(&_audioFrame) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::ProcessStream() => error");
+    }
+    captureLevel =
+        _audioProcessingModulePtr->gain_control()->stream_analog_level();
+
+    // Store new capture level (only updated when analog AGC is enabled)
+    _captureLevel = captureLevel;
+
+    // Log notifications
+    if (_audioProcessingModulePtr->gain_control()->stream_is_saturated())
+    {
+        if (_saturationWarning == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                       "TransmitMixer::APMProcessStream() pending "
+                       "saturation warning exists");
+        }
+        _saturationWarning = 1; // triggers callback from moduleprocess thread
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "TransmitMixer::APMProcessStream() VE_SATURATION_WARNING "
+                   "message has been posted for callback");
+    }
+
+    return 0;
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+int TransmitMixer::TypingDetection()
+{
+    // We let the VAD determine if we're using this feature or not.
+    if (_audioFrame._vadActivity == AudioFrame::kVadUnknown)
+    {
+        return (0);
+    }
+
+    int keyPressed = EventWrapper::KeyPressed();
+
+    if (keyPressed < 0)
+    {
+        return (-1);
+    }
+
+    if (_audioFrame._vadActivity == AudioFrame::kVadActive)
+        _timeActive++;
+    else
+        _timeActive = 0;
+
+    if (keyPressed && (_audioFrame._vadActivity == AudioFrame::kVadActive)
+        && (_timeActive < 10))
+    {
+        _penaltyCounter += 100;
+        if (_penaltyCounter > 300)
+        {
+            if (_typingNoiseWarning == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                           VoEId(_instanceId, -1),
+                           "TransmitMixer::TypingDetection() pending "
+                               "noise-saturation warning exists");
+            }
+            // triggers callback from the module process thread
+            _typingNoiseWarning = 1;
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                       "TransmitMixer::TypingDetection() "
+                       "VE_TYPING_NOISE_WARNING message has been posted for"
+                       "callback");
+        }
+    }
+
+    if (_penaltyCounter > 0)
+        _penaltyCounter--;
+
+    return (0);
+}
+#endif
+
+int TransmitMixer::GetMixingFrequency()
+{
+    assert(_mixingFrequency!=0);
+    return (_mixingFrequency);
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/transmit_mixer.h b/trunk/src/voice_engine/main/source/transmit_mixer.h
new file mode 100644
index 0000000..469b288
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/transmit_mixer.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+#define WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+
+#include "common_types.h"
+#include "voe_base.h"
+#include "file_player.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "module_common_types.h"
+#include "monitor_module.h"
+#include "resampler.h"
+#include "voice_engine_defines.h"
+
+
+namespace webrtc {
+
+class AudioProcessing;
+class ProcessThread;
+class VoEExternalMedia;
+class VoEMediaProcess;
+
+namespace voe {
+
+class ChannelManager;
+class MixedAudio;
+class Statistics;
+
+class TransmitMixer : public MonitorObserver,
+                      public FileCallback
+
+{
+public:
+    static WebRtc_Word32 Create(TransmitMixer*& mixer,
+                                const WebRtc_UWord32 instanceId);
+
+    static void Destroy(TransmitMixer*& mixer);
+
+    WebRtc_Word32 SetEngineInformation(ProcessThread& processThread,
+                                       Statistics& engineStatistics,
+                                       ChannelManager& channelManager);
+
+    WebRtc_Word32 SetAudioProcessingModule(
+        AudioProcessing* audioProcessingModule);
+
+    WebRtc_Word32 PrepareDemux(const WebRtc_Word8* audioSamples,
+                               const WebRtc_UWord32 nSamples,
+                               const WebRtc_UWord8  nChannels,
+                               const WebRtc_UWord32 samplesPerSec,
+                               const WebRtc_UWord16 totalDelayMS,
+                               const WebRtc_Word32  clockDrift,
+                               const WebRtc_UWord16 currentMicLevel);
+
+
+    WebRtc_Word32 DemuxAndMix();
+
+    WebRtc_Word32 EncodeAndSend();
+
+    WebRtc_UWord32 CaptureLevel() const;
+
+    WebRtc_Word32 StopSend();
+
+    // VoEDtmf
+    void UpdateMuteMicrophoneTime(const WebRtc_UWord32 lengthMs);
+
+    // VoEExternalMedia
+    int RegisterExternalMediaProcessing(VoEMediaProcess& proccess_object);
+
+    int DeRegisterExternalMediaProcessing();
+
+    int GetMixingFrequency();
+
+    // VoEVolumeControl
+    int SetMute(const bool enable);
+
+    bool Mute() const;
+
+    WebRtc_Word8 AudioLevel() const;
+
+    WebRtc_Word16 AudioLevelFullRange() const;
+
+    bool IsRecordingCall();
+
+    bool IsRecordingMic();
+
+    int StartPlayingFileAsMicrophone(const char* fileName,
+                                     const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+
+    int StartPlayingFileAsMicrophone(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+
+    int StopPlayingFileAsMicrophone();
+
+    int IsPlayingFileAsMicrophone() const;
+
+    int ScaleFileAsMicrophonePlayout(const float scale);
+
+    int StartRecordingMicrophone(const char* fileName,
+                                 const CodecInst* codecInst);
+
+    int StartRecordingMicrophone(OutStream* stream,
+                                 const CodecInst* codecInst);
+
+    int StopRecordingMicrophone();
+
+    int StartRecordingCall(const char* fileName, const CodecInst* codecInst);
+
+    int StartRecordingCall(OutStream* stream, const CodecInst* codecInst);
+
+    int StopRecordingCall();
+
+    void SetMixWithMicStatus(bool mix);
+
+    WebRtc_Word32 RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+
+    virtual ~TransmitMixer();
+
+public:	// MonitorObserver
+    void OnPeriodicProcess();
+
+
+public: // FileCallback
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+
+    void PlayFileEnded(const WebRtc_Word32 id);
+
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+private:
+    TransmitMixer(const WebRtc_UWord32 instanceId);
+
+private:
+    WebRtc_Word32 GenerateAudioFrame(const WebRtc_Word16 audioSamples[],
+                                     const WebRtc_UWord32 nSamples,
+                                     const WebRtc_UWord8 nChannels,
+                                     const WebRtc_UWord32 samplesPerSec,
+                                     const int mixingFrequency);
+    WebRtc_Word32 RecordAudioToFile(const WebRtc_UWord32 mixingFrequency);
+
+    WebRtc_Word32 MixOrReplaceAudioWithFile(
+        const int mixingFrequency);
+
+    WebRtc_Word32 APMProcessStream(const WebRtc_UWord16 totalDelayMS,
+                                   const WebRtc_Word32 clockDrift,
+                                   const WebRtc_UWord16 currentMicLevel);
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    int TypingDetection();
+#endif
+
+private:  // uses
+    Statistics* _engineStatisticsPtr;
+    ChannelManager* _channelManagerPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+    VoiceEngineObserver* _voiceEngineObserverPtr;
+    ProcessThread* _processThreadPtr;
+
+private:  // owns
+    MonitorModule _monitorModule;
+    AudioFrame _audioFrame;
+    Resampler _audioResampler;		// ADM sample rate -> mixing rate
+    FilePlayer*	_filePlayerPtr;
+    FileRecorder* _fileRecorderPtr;
+    FileRecorder* _fileCallRecorderPtr;
+    int _filePlayerId;
+    int _fileRecorderId;
+    int _fileCallRecorderId;
+    bool _filePlaying;
+    bool _fileRecording;
+    bool _fileCallRecording;
+    voe::AudioLevel _audioLevel;
+    // protect file instances and their variables in MixedParticipants()
+    CriticalSectionWrapper& _critSect;
+    CriticalSectionWrapper& _callbackCritSect;
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    WebRtc_Word32 _timeActive;
+    WebRtc_Word32 _penaltyCounter;
+    WebRtc_UWord32 _typingNoiseWarning;
+#endif
+    WebRtc_UWord32 _saturationWarning;
+    WebRtc_UWord32 _noiseWarning;
+
+private:
+    int _instanceId;
+    bool _mixFileWithMicrophone;
+    WebRtc_UWord32 _captureLevel;
+    bool _externalMedia;
+    VoEMediaProcess* _externalMediaCallbackPtr;
+    bool _mute;
+    WebRtc_Word32 _remainingMuteMicTimeMs;
+    int _mixingFrequency;
+    bool _includeAudioLevelIndication;
+};
+
+#endif // WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+
+}  //  namespace voe
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/utility.cc b/trunk/src/voice_engine/main/source/utility.cc
new file mode 100644
index 0000000..6e70156
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/utility.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "utility.h"
+
+#include "module.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+void Utility::MixWithSat(WebRtc_Word16 target[],
+                         const WebRtc_Word16 source[],
+                         WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = source[i] + target[i];
+        if (temp > 32767)
+            target[i] = 32767;
+        else if (temp < -32768)
+            target[i] = -32768;
+        else
+            target[i] = (WebRtc_Word16) temp;
+    }
+}
+
+void Utility::MixSubtractWithSat(WebRtc_Word16 target[],
+                                 const WebRtc_Word16 source[],
+                                 WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = target[i] - source[i];
+        if (temp > 32767)
+            target[i] = 32767;
+        else if (temp < -32768)
+            target[i] = -32768;
+        else
+            target[i] = (WebRtc_Word16) temp;
+    }
+}
+
+void Utility::MixAndScaleWithSat(WebRtc_Word16 target[],
+                                 const WebRtc_Word16 source[], float scale,
+                                 WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = (WebRtc_Word32) (target[i] + scale * source[i]);
+        if (temp > 32767)
+            target[i] = 32767;
+        else if (temp < -32768)
+            target[i] = -32768;
+        else
+            target[i] = (WebRtc_Word16) temp;
+    }
+}
+
+void Utility::Scale(WebRtc_Word16 vector[], float scale, WebRtc_UWord16 len)
+{
+    for (int i = 0; i < len; i++)
+    {
+        vector[i] = (WebRtc_Word16) (scale * vector[i]);
+    }
+}
+
+void Utility::ScaleWithSat(WebRtc_Word16 vector[], float scale,
+                           WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = (WebRtc_Word32) (scale * vector[i]);
+        if (temp > 32767)
+            vector[i] = 32767;
+        else if (temp < -32768)
+            vector[i] = -32768;
+        else
+            vector[i] = (WebRtc_Word16) temp;
+    }
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/utility.h b/trunk/src/voice_engine/main/source/utility.h
new file mode 100644
index 0000000..084ddf4
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/utility.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Contains functions often used by different parts of VoiceEngine.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_UTILITY_H
+#define WEBRTC_VOICE_ENGINE_UTILITY_H
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc
+{
+
+class Module;
+
+namespace voe
+{
+
+class Utility
+{
+public:
+    static void MixWithSat(WebRtc_Word16 target[],
+                           const WebRtc_Word16 source[],
+                           WebRtc_UWord16 len);
+
+    static void MixSubtractWithSat(WebRtc_Word16 target[],
+                                   const WebRtc_Word16 source[],
+                                   WebRtc_UWord16 len);
+
+    static void MixAndScaleWithSat(WebRtc_Word16 target[],
+                                   const WebRtc_Word16 source[],
+                                   float scale,
+                                   WebRtc_UWord16 len);
+
+    static void Scale(WebRtc_Word16 vector[], float scale, WebRtc_UWord16 len);
+
+    static void ScaleWithSat(WebRtc_Word16 vector[],
+                             float scale,
+                             WebRtc_UWord16 len);
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_UTILITY_H
diff --git a/trunk/src/voice_engine/main/source/voe_audio_processing_impl.cc b/trunk/src/voice_engine/main/source/voe_audio_processing_impl.cc
new file mode 100644
index 0000000..95800f4
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_audio_processing_impl.cc
@@ -0,0 +1,1176 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_audio_processing_impl.h"
+
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEAudioProcessing* VoEAudioProcessing::GetInterface(
+    VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEAudioProcessingImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+VoEAudioProcessingImpl::VoEAudioProcessingImpl():
+    _isAecMode(WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE == EcAec?
+        true : false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEAudioProcessingImpl::VoEAudioProcessingImpl() - ctor");
+}
+
+VoEAudioProcessingImpl::~VoEAudioProcessingImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEAudioProcessingImpl::~VoEAudioProcessingImpl() - dtor");
+}
+
+int VoEAudioProcessingImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEAudioProcessing::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();  // reset reference counter to zero => OK to delete VE
+        _engineStatistics.SetLastError(
+            VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEAudioProcessing reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEAudioProcessingImpl::SetNsStatus(bool enable, NsModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetNsStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_NR
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    NoiseSuppression::Level nsLevel(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE);
+    switch (mode)
+    {
+    case kNsDefault:
+        nsLevel = (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE;
+        break; 
+    case kNsUnchanged:
+        nsLevel = _audioProcessingModulePtr->noise_suppression()->level();
+        break;
+    case kNsConference:
+        nsLevel = NoiseSuppression::kHigh;
+        break;
+    case kNsLowSuppression:
+        nsLevel = NoiseSuppression::kLow;
+        break;
+    case kNsModerateSuppression:
+        nsLevel = NoiseSuppression::kModerate;
+        break;
+    case kNsHighSuppression:
+        nsLevel = NoiseSuppression::kHigh;
+        break;
+    case kNsVeryHighSuppression:
+        nsLevel = NoiseSuppression::kVeryHigh;
+        break;
+    }
+
+    if (_audioProcessingModulePtr->noise_suppression()->set_level(nsLevel) != 0)
+    {
+        _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                                       "SetNsStatus() failed to set Ns mode");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetNsStatus() failed to set Ns state");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetNsStatus() Ns is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetNsStatus(enabled=?, mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_NR
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool enable(false);
+    NoiseSuppression::Level nsLevel(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE);
+
+    enable = _audioProcessingModulePtr->noise_suppression()->is_enabled();
+    nsLevel = _audioProcessingModulePtr->noise_suppression()->level();
+
+    enabled = enable;
+
+    switch (nsLevel)
+    {
+        case NoiseSuppression::kLow:
+            mode = kNsLowSuppression;
+            break;
+        case NoiseSuppression::kModerate:
+            mode = kNsModerateSuppression;
+            break;
+        case NoiseSuppression::kHigh:
+            mode = kNsHighSuppression;
+            break;
+        case NoiseSuppression::kVeryHigh:
+        mode = kNsVeryHighSuppression;
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetNsStatus() => enabled=% d, mode=%d",enabled, mode);
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetNsStatus() Ns is not supported");
+    return -1;
+#endif	
+}
+
+int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetAgcStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+#if defined(MAC_IPHONE) || defined(ATA) || defined(WEBRTC_ANDROID)
+    if (mode == kAgcAdaptiveAnalog)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetAgcStatus() invalid Agc mode for mobile device");
+        return -1;
+    }
+#endif
+
+    GainControl::Mode agcMode(
+        (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE);
+    switch (mode)
+    {
+    case kAgcDefault:
+        agcMode = (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE;
+        break; 
+    case kAgcUnchanged:
+        agcMode = _audioProcessingModulePtr->gain_control()->mode();;
+        break;
+    case kAgcFixedDigital:
+        agcMode = GainControl::kFixedDigital;
+        break;
+    case kAgcAdaptiveAnalog:
+        agcMode = GainControl::kAdaptiveAnalog;
+        break;
+    case kAgcAdaptiveDigital:
+        agcMode = GainControl::kAdaptiveDigital;
+        break;
+    }
+
+    if (_audioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAgcStatus() failed to set Agc mode");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->gain_control()->Enable(enable) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAgcStatus() failed to set Agc state");
+        return -1;
+    }
+
+    if (agcMode != GainControl::kFixedDigital)
+    {
+        // Set Agc state in the ADM when adaptive Agc mode has been selected.
+        // Note that we also enable the ADM Agc when Adaptive Digital mode is
+        // used since we want to be able to provide the APM with updated mic
+        // levels when the user modifies the mic level manually.
+        if (_audioDevicePtr->SetAGC(enable) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+                "SetAgcStatus() failed to set Agc mode");
+        }
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAgcStatus() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetAgcStatus(enabled=?, mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool enable(false);
+    GainControl::Mode agcMode(
+        (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE);
+
+    enable = _audioProcessingModulePtr->gain_control()->is_enabled();
+    agcMode = _audioProcessingModulePtr->gain_control()->mode();
+
+    enabled = enable;
+
+    switch (agcMode)
+    {
+        case GainControl::kFixedDigital:
+            mode = kAgcFixedDigital;
+            break;
+        case GainControl::kAdaptiveAnalog:
+            mode = kAgcAdaptiveAnalog;
+            break;
+        case GainControl::kAdaptiveDigital:
+            mode = kAgcAdaptiveDigital;
+            break;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetAgcStatus() => enabled=%d, mode=%d", enabled, mode);
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetAgcStatus() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetAgcConfig(const AgcConfig config)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetAgcConfig()");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_audioProcessingModulePtr->gain_control()->set_target_level_dbfs(
+                    config.targetLeveldBOv) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAgcConfig() failed to set target peak |level|"
+            " (or envelope) of the Agc");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->gain_control()->set_compression_gain_db(
+        config.digitalCompressionGaindB) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAgcConfig() failed to set the range in |gain|"
+            "the digital compression stage may apply");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->gain_control()->enable_limiter(
+        config.limiterEnable) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAgcConfig() failed to set hard limiter to the signal");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAgcConfig() EC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig &config)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetAgcConfig(config=?)");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    config.targetLeveldBOv =
+        _audioProcessingModulePtr->gain_control()->target_level_dbfs();
+    config.digitalCompressionGaindB =
+        _audioProcessingModulePtr->gain_control()->compression_gain_db();
+    config.limiterEnable =
+        _audioProcessingModulePtr->gain_control()->is_limiter_enabled();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetAgcConfig() => targetLeveldBOv=%u, "
+               "digitalCompressionGaindB=%u, limiterEnable=%d",
+		config.targetLeveldBOv,
+		config.digitalCompressionGaindB,
+		config.limiterEnable);
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetAgcConfig() EC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
+                                          bool enable,
+                                          NsModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRxNsStatus(channel=%d, enable=%d, mode=%d)",
+                 channel, (int)enable, (int)mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRxNsStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRxNsStatus(enable, mode);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetRxNsStatus() AGC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
+                                          bool& enabled,
+                                          NsModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRxNsStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRxNsStatus(enabled, mode);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetRxNsStatus() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxAgcStatus(int channel,
+                                           bool enable,
+                                           AgcModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRxAgcStatus(channel=%d, enable=%d, mode=%d)",
+                 channel, (int)enable, (int)mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRxAgcStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRxAgcStatus(enable, mode);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetRxAgcStatus() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxAgcStatus(int channel,
+                                           bool& enabled,
+                                           AgcModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRxAgcStatus(channel=%d, enable=?, mode=?)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRxAgcStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRxAgcStatus(enabled, mode);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetRxAgcStatus() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxAgcConfig(int channel, const AgcConfig config)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRxAgcConfig(channel=%d)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRxAgcConfig() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRxAgcConfig(config);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetRxAgcConfig() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRxAgcConfig(channel=%d)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRxAgcConfig() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRxAgcConfig(config);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetRxAgcConfig() Agc is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetEcStatus(bool enable, EcModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetEcStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // AEC mode
+    if ((mode == kEcDefault) ||
+        (mode == kEcConference) ||
+        (mode == kEcAec) ||
+        ((mode == kEcUnchanged) &&
+        (_isAecMode == true)))
+    {
+        if (enable)
+        {
+            // Disable the AECM before enable the AEC
+            if (_audioProcessingModulePtr->echo_control_mobile()->is_enabled())
+            {
+                _engineStatistics.SetLastError(
+                    VE_APM_ERROR, kTraceWarning,
+                    "SetEcStatus() disable AECM before enabling AEC");
+                if (_audioProcessingModulePtr->echo_control_mobile()->
+                    Enable(false) != 0)
+                {
+                    _engineStatistics.SetLastError(
+                        VE_APM_ERROR, kTraceError,
+                        "SetEcStatus() failed to disable AECM");
+                    return -1;
+                }
+            }
+        }
+        if (_audioProcessingModulePtr->echo_cancellation()->Enable(enable) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "SetEcStatus() failed to set AEC state");
+            return -1;
+        }
+#ifdef CLOCK_SKEW_COMP
+        if (_audioProcessingModulePtr->echo_cancellation()->
+            enable_drift_compensation(true) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "SetEcStatus() failed to enable drift compensation");
+            return -1;
+        }
+#else
+        if (_audioProcessingModulePtr->echo_cancellation()->
+            enable_drift_compensation(false) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "SetEcStatus() failed to disable drift compensation");
+            return -1;
+        }
+#endif
+        if (mode == kEcConference)
+        {
+            if (_audioProcessingModulePtr->echo_cancellation()->
+                set_suppression_level(EchoCancellation::kHighSuppression) != 0)
+           {
+                _engineStatistics.SetLastError(
+                    VE_APM_ERROR, kTraceError,
+                    "SetEcStatus() failed to set aggressiveness to high");
+                return -1;
+            }
+        }
+        else
+        {
+           if (_audioProcessingModulePtr->echo_cancellation()->
+               set_suppression_level(
+                   EchoCancellation::kModerateSuppression) != 0)
+           {
+                _engineStatistics.SetLastError(
+                    VE_APM_ERROR, kTraceError,
+                    "SetEcStatus() failed to set aggressiveness to moderate");
+                return -1;
+           }
+        }
+
+        _isAecMode = true;
+    }
+    else if ((mode == kEcAecm) ||
+            ((mode == kEcUnchanged) &&
+            (_isAecMode == false)))
+    {
+        if (enable)
+        {
+            // Disable the AEC before enable the AECM
+            if (_audioProcessingModulePtr->echo_cancellation()->is_enabled())
+            {
+                _engineStatistics.SetLastError(
+                    VE_APM_ERROR, kTraceWarning,
+                    "SetEcStatus() disable AEC before enabling AECM");
+                if (_audioProcessingModulePtr->echo_cancellation()->
+                    Enable(false) != 0)
+                {
+                    _engineStatistics.SetLastError(
+                        VE_APM_ERROR, kTraceError,
+                        "SetEcStatus() failed to disable AEC");
+                    return -1;
+                }
+            }
+        }
+        if (_audioProcessingModulePtr->echo_control_mobile()->
+            Enable(enable) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "SetEcStatus() failed to set AECM state");
+            return -1;
+        }
+        _isAecMode = false;
+    }
+    else
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetEcStatus() invalid EC mode");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetEcStatus() EC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetEcStatus()");
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_isAecMode == true)
+    {
+        mode = kEcAec;
+        enabled = _audioProcessingModulePtr->echo_cancellation()->is_enabled();
+    }
+    else
+    {
+        mode = kEcAecm;
+        enabled = _audioProcessingModulePtr->echo_control_mobile()->
+            is_enabled();
+    }
+    
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetEcStatus() => enabled=%i, mode=%i",
+                 enabled, (int)mode);
+	return 0;
+#else
+	_engineStatistics.SetLastError(
+	    VE_FUNC_NOT_SUPPORTED, kTraceError,
+	    "GetEcStatus() EC is not supported");
+	return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetAECMMode(mode = %d)", mode);
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+	
+    EchoControlMobile::RoutingMode aecmMode(
+        EchoControlMobile::kQuietEarpieceOrHeadset);
+
+    switch (mode)
+    {
+        case kAecmQuietEarpieceOrHeadset:
+            aecmMode = EchoControlMobile::kQuietEarpieceOrHeadset;
+            break;
+        case kAecmEarpiece:
+            aecmMode = EchoControlMobile::kEarpiece;
+            break;
+        case kAecmLoudEarpiece:
+            aecmMode = EchoControlMobile::kLoudEarpiece;
+            break;
+        case kAecmSpeakerphone:
+            aecmMode = EchoControlMobile::kSpeakerphone;
+            break;
+        case kAecmLoudSpeakerphone:
+            aecmMode = EchoControlMobile::kLoudSpeakerphone;
+            break;
+    }
+
+
+    if (_audioProcessingModulePtr->echo_control_mobile()->
+        set_routing_mode(aecmMode) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAECMMode() failed to set AECM routing mode");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->echo_control_mobile()->
+        enable_comfort_noise(enableCNG) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetAECMMode() failed to set comfort noise state for AECM");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAECMMode() EC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetAECMMode(mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+	
+    enabledCNG = false;
+
+    EchoControlMobile::RoutingMode aecmMode =
+        _audioProcessingModulePtr->echo_control_mobile()->routing_mode();
+    enabledCNG = _audioProcessingModulePtr->echo_control_mobile()->
+        is_comfort_noise_enabled();
+
+    switch (aecmMode)
+    {
+        case EchoControlMobile::kQuietEarpieceOrHeadset:
+            mode = kAecmQuietEarpieceOrHeadset;
+            break;
+        case EchoControlMobile::kEarpiece:
+            mode = kAecmEarpiece;
+            break;
+        case EchoControlMobile::kLoudEarpiece:
+            mode = kAecmLoudEarpiece;
+            break;
+        case EchoControlMobile::kSpeakerphone:
+            mode = kAecmSpeakerphone;
+            break;
+        case EchoControlMobile::kLoudSpeakerphone:
+            mode = kAecmLoudSpeakerphone;
+            break;
+	}
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetAECMMode() EC is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::RegisterRxVadObserver(
+    int channel,
+    VoERxVadCallback &observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterRxVadObserver()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterRxVadObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterRxVadObserver(observer);
+}
+
+int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DeRegisterRxVadObserver()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRxVadObserver() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->DeRegisterRxVadObserver();
+}
+
+int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoiceActivityIndicator(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRxVadObserver() failed to locate channel");
+        return -1;
+    }
+    int activity(-1);
+    channelPtr->VoiceActivityIndicator(activity);
+
+    return activity;
+}
+
+int VoEAudioProcessingImpl::SetEcMetricsStatus(bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "SetEcMetricsStatus(enable=%d)", enable);
+  ANDROID_NOT_SUPPORTED(_engineStatistics);
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_engineStatistics.Initialized()) {
+    _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  if ((_audioProcessingModulePtr->echo_cancellation()->enable_metrics(enable)
+      != 0) ||
+      (_audioProcessingModulePtr->echo_cancellation()->enable_delay_logging(
+          enable) != 0)) {
+    _engineStatistics.SetLastError(
+        VE_APM_ERROR, kTraceError,
+        "SetEcMetricsStatus() unable to set EC metrics mode");
+    return -1;
+  }
+  return 0;
+#else
+  _engineStatistics.SetLastError(
+      VE_FUNC_NOT_SUPPORTED, kTraceError, "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "GetEcMetricsStatus(enabled=?)");
+  ANDROID_NOT_SUPPORTED(_engineStatistics);
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_engineStatistics.Initialized()) {
+    _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  bool echo_mode =
+    _audioProcessingModulePtr->echo_cancellation()->are_metrics_enabled();
+  bool delay_mode =
+    _audioProcessingModulePtr->echo_cancellation()->is_delay_logging_enabled();
+
+  if (echo_mode != delay_mode) {
+    _engineStatistics.SetLastError(
+        VE_APM_ERROR, kTraceError,
+        "GetEcMetricsStatus() delay logging and echo mode are not the same");
+    return -1;
+  }
+
+  enabled = echo_mode;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "GetEcMetricsStatus() => enabled=%d", enabled);
+  return 0;
+#else
+  _engineStatistics.SetLastError(
+      VE_FUNC_NOT_SUPPORTED, kTraceError, "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL,
+                                           int& ERLE,
+                                           int& RERL,
+                                           int& A_NLP) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)");
+  ANDROID_NOT_SUPPORTED(_engineStatistics);
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_engineStatistics.Initialized()) {
+    _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  if (!_audioProcessingModulePtr->echo_cancellation()->is_enabled()) {
+    _engineStatistics.SetLastError(
+        VE_APM_ERROR, kTraceWarning,
+        "GetEchoMetrics() AudioProcessingModule AEC is not enabled");
+    return -1;
+  }
+
+  // Get Echo Metrics from Audio Processing Module.
+  EchoCancellation::Metrics echoMetrics;
+  if (_audioProcessingModulePtr->echo_cancellation()->GetMetrics(&echoMetrics))
+  {
+    WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetEchoMetrics(), AudioProcessingModule metrics error");
+    return -1;
+  }
+
+  // Echo quality metrics.
+  ERL = echoMetrics.echo_return_loss.instant;
+  ERLE = echoMetrics.echo_return_loss_enhancement.instant;
+  RERL = echoMetrics.residual_echo_return_loss.instant;
+  A_NLP = echoMetrics.a_nlp.instant;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetEchoMetrics() => ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d",
+               ERL, ERLE, RERL, A_NLP);
+  return 0;
+#else
+  _engineStatistics.SetLastError(
+      VE_FUNC_NOT_SUPPORTED, kTraceError, "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median,
+                                              int& delay_std) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "GetEcDelayMetrics(median=?, std=?)");
+  ANDROID_NOT_SUPPORTED(_engineStatistics);
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_engineStatistics.Initialized()) {
+    _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  if (!_audioProcessingModulePtr->echo_cancellation()->is_enabled()) {
+    _engineStatistics.SetLastError(
+        VE_APM_ERROR, kTraceWarning,
+        "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled");
+    return -1;
+  }
+
+  int median = 0;
+  int std = 0;
+  // Get delay-logging values from Audio Processing Module.
+  if (_audioProcessingModulePtr->echo_cancellation()->GetDelayMetrics(
+      &median, &std)) {
+    WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetEcDelayMetrics(), AudioProcessingModule delay-logging "
+                 "error");
+    return -1;
+  }
+
+  // EC delay-logging metrics
+  delay_median = median;
+  delay_std = std;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetEcDelayMetrics() => delay_median=%d, delay_std=%d",
+               delay_median, delay_std);
+  return 0;
+#else
+  _engineStatistics.SetLastError(
+      VE_FUNC_NOT_SUPPORTED, kTraceError, "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "StartDebugRecording()");
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return _audioProcessingModulePtr->StartDebugRecording(fileNameUTF8);
+
+}
+
+int VoEAudioProcessingImpl::StopDebugRecording()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopDebugRecording()");
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return _audioProcessingModulePtr->StopDebugRecording();
+}
+
+int VoEAudioProcessingImpl::SetTypingDetectionStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetTypingDetectionStatus()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // Just use the VAD state to determine if we should enable typing detection
+    // or not
+
+    if (_audioProcessingModulePtr->voice_detection()->Enable(enable))
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "SetTypingDetectionStatus() failed to set VAD state");
+        return -1;
+    }
+    if (_audioProcessingModulePtr->voice_detection()->set_likelihood(
+        VoiceDetection::kVeryLowLikelihood))
+    {
+        _engineStatistics.SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "SetTypingDetectionStatus() failed to set VAD likelihood to low");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetTypingDetectionStatus is not supported");
+    return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetTypingDetectionStatus()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+  	// Just use the VAD state to determine if we should enable typing
+    // detection or not
+
+    enabled = _audioProcessingModulePtr->voice_detection()->is_enabled();
+
+    return(0);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetTypingDetectionStatus is not supported");
+    return(-1);
+#endif
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+
+}   // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_audio_processing_impl.h b/trunk/src/voice_engine/main/source/voe_audio_processing_impl.h
new file mode 100644
index 0000000..277731e
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_audio_processing_impl.h
@@ -0,0 +1,100 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+
+#include "voe_audio_processing.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+
+namespace webrtc {
+
+class VoEAudioProcessingImpl : public virtual voe::SharedData,
+                               public VoEAudioProcessing,
+                               public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int SetNsStatus(bool enable, NsModes mode = kNsUnchanged);
+
+    virtual int GetNsStatus(bool& enabled, NsModes& mode);
+
+    virtual int SetAgcStatus(bool enable, AgcModes mode = kAgcUnchanged);
+
+    virtual int GetAgcStatus(bool& enabled, AgcModes& mode);
+
+    virtual int SetAgcConfig(const AgcConfig config);
+
+    virtual int GetAgcConfig(AgcConfig& config);
+
+    virtual int SetRxNsStatus(int channel,
+                              bool enable,
+                              NsModes mode = kNsUnchanged);
+
+    virtual int GetRxNsStatus(int channel, bool& enabled, NsModes& mode);
+
+    virtual int SetRxAgcStatus(int channel,
+                               bool enable,
+                               AgcModes mode = kAgcUnchanged);
+
+    virtual int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode);
+
+    virtual int SetRxAgcConfig(int channel, const AgcConfig config);
+
+    virtual int GetRxAgcConfig(int channel, AgcConfig& config);
+
+    virtual int SetEcStatus(bool enable, EcModes mode = kEcUnchanged);
+
+    virtual int GetEcStatus(bool& enabled, EcModes& mode);
+
+    virtual int SetAecmMode(AecmModes mode = kAecmSpeakerphone,
+                            bool enableCNG = true);
+
+    virtual int GetAecmMode(AecmModes& mode, bool& enabledCNG);
+
+    virtual int RegisterRxVadObserver(int channel,
+                                      VoERxVadCallback& observer);
+
+    virtual int DeRegisterRxVadObserver(int channel);
+
+    virtual int VoiceActivityIndicator(int channel);
+
+    virtual int SetEcMetricsStatus(bool enable);
+
+    virtual int GetEcMetricsStatus(bool& enabled);
+
+    virtual int GetEchoMetrics(int& ERL, int& ERLE, int& RERL, int& A_NLP);
+
+    virtual int GetEcDelayMetrics(int& delay_median, int& delay_std);
+
+    virtual int StartDebugRecording(const char* fileNameUTF8);
+
+    virtual int StopDebugRecording();
+
+    virtual int SetTypingDetectionStatus(bool enable);
+
+    virtual int GetTypingDetectionStatus(bool& enabled);
+
+protected:
+    VoEAudioProcessingImpl();
+    virtual ~VoEAudioProcessingImpl();
+
+private:
+    bool _isAecMode;
+};
+
+}   //  namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+
diff --git a/trunk/src/voice_engine/main/source/voe_base_impl.cc b/trunk/src/voice_engine/main/source/voe_base_impl.cc
new file mode 100644
index 0000000..cdc0ee3
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_base_impl.cc
@@ -0,0 +1,1679 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_base_impl.h"
+
+#include "audio_coding_module.h"
+#include "audio_device_impl.h"
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "output_mixer.h"
+#include "signal_processing_library.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "utility.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+#if (defined(_WIN32) && defined(_DLL) && (_MSC_VER == 1400))
+// Fix for VS 2005 MD/MDd link problem
+#include <stdio.h>
+extern "C"
+    { FILE _iob[3] = {   __iob_func()[0], __iob_func()[1], __iob_func()[2]}; }
+#endif
+
+namespace webrtc
+{
+
+VoEBase* VoEBase::GetInterface(VoiceEngine* voiceEngine)
+{
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEBaseImpl* d = s;
+    (*d)++;
+    return (d);
+}
+
+VoEBaseImpl::VoEBaseImpl() :
+    _voiceEngineObserverPtr(NULL),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _voiceEngineObserver(false), _oldVoEMicLevel(0), _oldMicLevel(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl() - ctor");
+}
+
+VoEBaseImpl::~VoEBaseImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "~VoEBaseImpl() - dtor");
+
+    TerminateInternal();
+
+    delete &_callbackCritSect;
+}
+
+int VoEBaseImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl reference counter = %d", refCount);
+    return (refCount);
+}
+
+void VoEBaseImpl::OnErrorIsReported(const ErrorCode error)
+{
+    CriticalSectionScoped cs(_callbackCritSect);
+    if (_voiceEngineObserver)
+    {
+        if (_voiceEngineObserverPtr)
+        {
+            int errCode(0);
+            if (error == AudioDeviceObserver::kRecordingError)
+            {
+                errCode = VE_RUNTIME_REC_ERROR;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                             "VoEBaseImpl::OnErrorIsReported() => "
+                                 "VE_RUNTIME_REC_ERROR");
+            }
+            else if (error == AudioDeviceObserver::kPlayoutError)
+            {
+                errCode = VE_RUNTIME_PLAY_ERROR;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                             "VoEBaseImpl::OnErrorIsReported() => "
+                                 "VE_RUNTIME_PLAY_ERROR");
+            }
+            // Deliver callback (-1 <=> no channel dependency)
+            _voiceEngineObserverPtr->CallbackOnError(-1, errCode);
+        }
+    }
+}
+
+void VoEBaseImpl::OnWarningIsReported(const WarningCode warning)
+{
+    CriticalSectionScoped cs(_callbackCritSect);
+    if (_voiceEngineObserver)
+    {
+        if (_voiceEngineObserverPtr)
+        {
+            int warningCode(0);
+            if (warning == AudioDeviceObserver::kRecordingWarning)
+            {
+                warningCode = VE_RUNTIME_REC_WARNING;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                             "VoEBaseImpl::OnErrorIsReported() => "
+                                 "VE_RUNTIME_REC_WARNING");
+            }
+            else if (warning == AudioDeviceObserver::kPlayoutWarning)
+            {
+                warningCode = VE_RUNTIME_PLAY_WARNING;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                             "VoEBaseImpl::OnErrorIsReported() => "
+                                 "VE_RUNTIME_PLAY_WARNING");
+            }
+            // Deliver callback (-1 <=> no channel dependency)
+            _voiceEngineObserverPtr->CallbackOnError(-1, warningCode);
+        }
+    }
+}
+
+WebRtc_Word32 VoEBaseImpl::RecordedDataIsAvailable(
+        const WebRtc_Word8* audioSamples,
+        const WebRtc_UWord32 nSamples,
+        const WebRtc_UWord8 nBytesPerSample,
+        const WebRtc_UWord8 nChannels,
+        const WebRtc_UWord32 samplesPerSec,
+        const WebRtc_UWord32 totalDelayMS,
+        const WebRtc_Word32 clockDrift,
+        const WebRtc_UWord32 currentMicLevel,
+        WebRtc_UWord32& newMicLevel)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::RecordedDataIsAvailable(nSamples=%u, "
+                     "nBytesPerSample=%u, nChannels=%u, samplesPerSec=%u, "
+                     "totalDelayMS=%u, clockDrift=%d, currentMicLevel=%u)",
+                 nSamples, nBytesPerSample, nChannels, samplesPerSec,
+                 totalDelayMS, clockDrift, currentMicLevel);
+
+    assert(_transmitMixerPtr != NULL);
+    assert(_audioDevicePtr != NULL);
+
+    bool isAnalogAGC(false);
+    WebRtc_UWord32 maxVolume(0);
+    WebRtc_UWord16 currentVoEMicLevel(0);
+    WebRtc_UWord32 newVoEMicLevel(0);
+
+    if (_audioProcessingModulePtr
+            && (_audioProcessingModulePtr->gain_control()->mode()
+                    == GainControl::kAdaptiveAnalog))
+    {
+        isAnalogAGC = true;
+    }
+
+    // Will only deal with the volume in adaptive analog mode
+    if (isAnalogAGC)
+    {
+        // Scale from ADM to VoE level range
+        if (_audioDevicePtr->MaxMicrophoneVolume(&maxVolume) == 0)
+        {
+            if (0 != maxVolume)
+            {
+                currentVoEMicLevel = (WebRtc_UWord16) ((currentMicLevel
+                        * kMaxVolumeLevel + (int) (maxVolume / 2))
+                        / (maxVolume));
+            }
+        }
+        // We learned that on certain systems (e.g Linux) the currentVoEMicLevel
+        // can be greater than the maxVolumeLevel therefore
+        // we are going to cap the currentVoEMicLevel to the maxVolumeLevel
+        // and change the maxVolume to currentMicLevel if it turns out that
+        // the currentVoEMicLevel is indeed greater than the maxVolumeLevel.
+        if (currentVoEMicLevel > kMaxVolumeLevel)
+        {
+            currentVoEMicLevel = kMaxVolumeLevel;
+            maxVolume = currentMicLevel;
+        }
+    }
+
+    // Keep track if the MicLevel has been changed by the AGC, if not,
+    // use the old value AGC returns to let AGC continue its trend,
+    // so eventually the AGC is able to change the mic level. This handles
+    // issues with truncation introduced by the scaling.
+    if (_oldMicLevel == currentMicLevel)
+    {
+        currentVoEMicLevel = (WebRtc_UWord16) _oldVoEMicLevel;
+    }
+
+    // Perform channel-independent operations
+    // (APM, mix with file, record to file, mute, etc.)
+    _transmitMixerPtr->PrepareDemux(audioSamples, nSamples, nChannels,
+                                    samplesPerSec,
+                                    (WebRtc_UWord16) totalDelayMS, clockDrift,
+                                    currentVoEMicLevel);
+
+    // Copy the audio frame to each sending channel and perform
+    // channel-dependent operations (file mixing, mute, etc.) to prepare
+    // for encoding.
+    _transmitMixerPtr->DemuxAndMix();
+    // Do the encoding and packetize+transmit the RTP packet when encoding
+    // is done.
+    _transmitMixerPtr->EncodeAndSend();
+
+    // Will only deal with the volume in adaptive analog mode
+    if (isAnalogAGC)
+    {
+        // Scale from VoE to ADM level range
+        newVoEMicLevel = _transmitMixerPtr->CaptureLevel();
+        if (newVoEMicLevel != currentVoEMicLevel)
+        {
+            // Add (kMaxVolumeLevel/2) to round the value
+            newMicLevel = (WebRtc_UWord32) ((newVoEMicLevel * maxVolume
+                    + (int) (kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+        }
+        else
+        {
+            // Pass zero if the level is unchanged
+            newMicLevel = 0;
+        }
+
+        // Keep track of the value AGC returns
+        _oldVoEMicLevel = newVoEMicLevel;
+        _oldMicLevel = currentMicLevel;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::NeedMorePlayData(
+        const WebRtc_UWord32 nSamples,
+        const WebRtc_UWord8 nBytesPerSample,
+        const WebRtc_UWord8 nChannels,
+        const WebRtc_UWord32 samplesPerSec,
+        WebRtc_Word8* audioSamples,
+        WebRtc_UWord32& nSamplesOut)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::NeedMorePlayData(nSamples=%u, "
+                     "nBytesPerSample=%d, nChannels=%d, samplesPerSec=%u)",
+                 nSamples, nBytesPerSample, nChannels, samplesPerSec);
+
+    assert(_outputMixerPtr != NULL);
+
+    // Perform mixing of all active participants (channel-based mixing)
+    _outputMixerPtr->MixActiveChannels();
+
+    // Additional operations on the combined signal
+    _outputMixerPtr->DoOperationsOnCombinedSignal();
+
+    // Retrieve the final output mix (resampled to match the ADM)
+    _outputMixerPtr->GetMixedAudio(samplesPerSec, nChannels, _audioFrame);
+
+    assert(nSamples == _audioFrame._payloadDataLengthInSamples);
+    assert(samplesPerSec ==
+        static_cast<WebRtc_UWord32>(_audioFrame._frequencyInHz));
+
+    // Deliver audio (PCM) samples to the ADM
+    memcpy(
+           (WebRtc_Word16*) audioSamples,
+           (const WebRtc_Word16*) _audioFrame._payloadData,
+           sizeof(WebRtc_Word16) * (_audioFrame._payloadDataLengthInSamples
+                   * _audioFrame._audioChannel));
+
+    nSamplesOut = _audioFrame._payloadDataLengthInSamples;
+
+    return 0;
+}
+
+int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "RegisterVoiceEngineObserver(observer=0x%d)", &observer);
+    CriticalSectionScoped cs(_callbackCritSect);
+    if (_voiceEngineObserverPtr)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_OPERATION, kTraceError,
+                                       "RegisterVoiceEngineObserver() observer"
+                                       " already enabled");
+        return -1;
+    }
+
+    // Register the observer in all active channels
+    voe::ScopedChannel sc(_channelManager);
+    void* iterator(NULL);
+    voe::Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        channelPtr->RegisterVoiceEngineObserver(observer);
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+    _transmitMixerPtr->RegisterVoiceEngineObserver(observer);
+
+    _voiceEngineObserverPtr = &observer;
+    _voiceEngineObserver = true;
+
+    return 0;
+}
+
+int VoEBaseImpl::DeRegisterVoiceEngineObserver()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "DeRegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(_callbackCritSect);
+    if (!_voiceEngineObserverPtr)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "DeRegisterVoiceEngineObserver() observer already disabled");
+        return 0;
+    }
+
+    _voiceEngineObserver = false;
+    _voiceEngineObserverPtr = NULL;
+
+    // Deregister the observer in all active channels
+    voe::ScopedChannel sc(_channelManager);
+    void* iterator(NULL);
+    voe::Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        channelPtr->DeRegisterVoiceEngineObserver();
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+
+    return 0;
+}
+
+int VoEBaseImpl::Init(AudioDeviceModule* external_adm)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1), 
+        "Init(external_adm=0x%p)", external_adm);
+    CriticalSectionScoped cs(*_apiCritPtr);
+
+    if (_engineStatistics.Initialized())
+    {
+        return 0;
+    }
+
+    if (_moduleProcessThreadPtr)
+    {
+        if (_moduleProcessThreadPtr->Start() != 0)
+        {
+            _engineStatistics.SetLastError(VE_THREAD_ERROR, kTraceError,
+                "Init() failed to start module process thread");
+            return -1;
+        }
+    }
+
+    // Create an internal ADM if the user has not added an external
+    // ADM implementation as input to Init().
+    if (external_adm == NULL)
+    {
+        // Create the internal ADM implementation.
+        _audioDevicePtr = AudioDeviceModuleImpl::Create(
+            VoEId(_instanceId, -1), _audioDeviceLayer);
+
+        if (_audioDevicePtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_NO_MEMORY, kTraceCritical,
+                                           "Init() failed to create the ADM");
+            return -1;
+        }
+    }
+    else
+    {
+        // Use the already existing external ADM implementation.
+        _audioDevicePtr = external_adm;
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+            "An external ADM implementation will be used in VoiceEngine");
+    }
+
+    // Increase the reference counter for both external and internal usage.
+    _audioDevicePtr->AddRef();
+
+    // Register the ADM to the process thread, which will drive the error
+    // callback mechanism
+    if (_moduleProcessThreadPtr &&
+        _moduleProcessThreadPtr->RegisterModule(_audioDevicePtr) != 0)
+    {
+        _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                       kTraceError,
+                                       "Init() failed to register the ADM");
+        return -1;
+    }
+
+    bool available(false);
+
+    // --------------------
+    // Reinitialize the ADM
+
+    // Register the AudioObserver implementation
+    if (_audioDevicePtr->RegisterEventObserver(this) != 0) {
+      _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                     kTraceWarning,
+                                     "Init() failed to register event observer "
+                                     "for the ADM");
+    }
+
+    // Register the AudioTransport implementation
+    if (_audioDevicePtr->RegisterAudioCallback(this) != 0) {
+      _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                     kTraceWarning,
+                                     "Init() failed to register audio callback "
+                                     "for the ADM");
+    }
+
+    // ADM initialization
+    if (_audioDevicePtr->Init() != 0)
+    {
+        _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                       kTraceError,
+                                       "Init() failed to initialize the ADM");
+        return -1;
+    }
+
+    // Initialize the default speaker
+    if (_audioDevicePtr->SetPlayoutDevice(WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE)
+            != 0)
+    {
+        _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceInfo,
+            "Init() failed to set the default output device");
+    }
+    if (_audioDevicePtr->SpeakerIsAvailable(&available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() failed to check speaker availability, trying to "
+            "initialize speaker anyway");
+    }
+    else if (!available)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() speaker not available, trying to initialize speaker "
+            "anyway");
+    }
+    if (_audioDevicePtr->InitSpeaker() != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() failed to initialize the speaker");
+    }
+
+    // Initialize the default microphone
+    if (_audioDevicePtr->SetRecordingDevice(WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE)
+            != 0)
+    {
+        _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceInfo,
+            "Init() failed to set the default input device");
+    }
+    if (_audioDevicePtr->MicrophoneIsAvailable(&available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() failed to check microphone availability, trying to "
+            "initialize microphone anyway");
+    }
+    else if (!available)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() microphone not available, trying to initialize "
+            "microphone anyway");
+    }
+    if (_audioDevicePtr->InitMicrophone() != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() failed to initialize the microphone");
+    }
+
+    // Set number of channels
+    if (_audioDevicePtr->StereoPlayoutIsAvailable(&available) != 0) {
+      _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+                                     "Init() failed to query stereo playout "
+                                     "mode");
+    }
+    if (_audioDevicePtr->SetStereoPlayout(available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set mono/stereo playout mode");
+    }
+
+    // TODO(andrew): These functions don't tell us whether stereo recording
+    // is truly available. We simply set the AudioProcessing input to stereo
+    // here, because we have to wait until receiving the first frame to
+    // determine the actual number of channels anyway.
+    //
+    // These functions may be changed; tracked here:
+    // http://code.google.com/p/webrtc/issues/detail?id=204
+    _audioDevicePtr->StereoRecordingIsAvailable(&available);
+    if (_audioDevicePtr->SetStereoRecording(available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set mono/stereo recording mode");
+    }
+
+    // APM initialization done after sound card since we need
+    // to know if we support stereo recording or not.
+
+    // Create the AudioProcessing Module if it does not exist.
+
+    if (_audioProcessingModulePtr == NULL)
+    {
+        _audioProcessingModulePtr = AudioProcessing::Create(
+                VoEId(_instanceId, -1));
+        if (_audioProcessingModulePtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_NO_MEMORY, kTraceCritical,
+                "Init() failed to create the AP module");
+            return -1;
+        }
+        // Ensure that mixers in both directions has access to the created APM
+        _transmitMixerPtr->SetAudioProcessingModule(_audioProcessingModulePtr);
+        _outputMixerPtr->SetAudioProcessingModule(_audioProcessingModulePtr);
+
+        if (_audioProcessingModulePtr->echo_cancellation()->
+                set_device_sample_rate_hz(
+                        kVoiceEngineAudioProcessingDeviceSampleRateHz))
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the device sample rate to 48K for AP "
+                " module");
+            return -1;
+        }
+        // Using 8 kHz as inital Fs. Might be changed already at first call.
+        if (_audioProcessingModulePtr->set_sample_rate_hz(8000))
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the sample rate to 8K for AP module");
+            return -1;
+        }
+
+        // Assume mono output until a send codec is set, and stereo input until
+        // we receive the first captured frame. We set stereo input here to
+        // avoid triggering a possible error in SetSendCodec when a stereo
+        // codec is selected.
+        if (_audioProcessingModulePtr->set_num_channels(2, 1) != 0)
+        {
+            _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                "Init() failed to set channels for the primary audio stream");
+            return -1;
+        }
+
+        if (_audioProcessingModulePtr->set_num_reverse_channels(1) != 0)
+        {
+            _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                "Init() failed to set channels for the primary audio stream");
+            return -1;
+        }
+        // high-pass filter
+        if (_audioProcessingModulePtr->high_pass_filter()->Enable(
+                WEBRTC_VOICE_ENGINE_HP_DEFAULT_STATE) != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the high-pass filter for AP module");
+            return -1;
+        }
+        // Echo Cancellation
+        if (_audioProcessingModulePtr->echo_cancellation()->
+                enable_drift_compensation(false) != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set drift compensation for AP module");
+            return -1;
+        }
+        if (_audioProcessingModulePtr->echo_cancellation()->Enable(
+                WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE))
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set echo cancellation state for AP module");
+            return -1;
+        }
+        // Noise Reduction
+        if (_audioProcessingModulePtr->noise_suppression()->set_level(
+                (NoiseSuppression::Level) WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE)
+                != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set noise reduction level for AP module");
+            return -1;
+        }
+        if (_audioProcessingModulePtr->noise_suppression()->Enable(
+                WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE) != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set noise reduction state for AP module");
+            return -1;
+        }
+        // Automatic Gain control
+        if (_audioProcessingModulePtr->gain_control()->set_analog_level_limits(
+                kMinVolumeLevel,kMaxVolumeLevel) != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC analog level for AP module");
+            return -1;
+        }
+        if (_audioProcessingModulePtr->gain_control()->set_mode(
+                (GainControl::Mode) WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE)
+                != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC mode for AP module");
+            return -1;
+        }
+        if (_audioProcessingModulePtr->gain_control()->Enable(
+                WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE)
+                != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC state for AP module");
+            return -1;
+        }
+        // VAD
+        if (_audioProcessingModulePtr->voice_detection()->Enable(
+                WEBRTC_VOICE_ENGINE_VAD_DEFAULT_STATE)
+                != 0)
+        {
+            _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set VAD state for AP module");
+            return -1;
+        }
+    }
+
+  // Set default AGC mode for the ADM
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    bool enable(false);
+    if (_audioProcessingModulePtr->gain_control()->mode()
+            != GainControl::kFixedDigital)
+    {
+        enable = _audioProcessingModulePtr->gain_control()->is_enabled();
+        // Only set the AGC mode for the ADM when Adaptive AGC mode is selected
+        if (_audioDevicePtr->SetAGC(enable) != 0)
+        {
+            _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                kTraceError, "Init() failed to set default AGC mode in ADM 0");
+        }
+    }
+#endif
+
+    return _engineStatistics.SetInitialized();
+}
+
+int VoEBaseImpl::Terminate()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "Terminate()");
+    CriticalSectionScoped cs(*_apiCritPtr);
+    return TerminateInternal();
+}
+
+int VoEBaseImpl::MaxNumOfChannels()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "MaxNumOfChannels()");
+    WebRtc_Word32 maxNumOfChannels = _channelManager.MaxNumOfChannels();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "MaxNumOfChannels() => %d", maxNumOfChannels);
+    return (maxNumOfChannels);
+}
+
+int VoEBaseImpl::CreateChannel()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "CreateChannel()");
+    CriticalSectionScoped cs(*_apiCritPtr);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_Word32 channelId = -1;
+
+    if (!_channelManager.CreateChannel(channelId))
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                                       "CreateChannel() failed to allocate "
+                                           "memory for channel");
+        return -1;
+    }
+
+    bool destroyChannel(false);
+    {
+        voe::ScopedChannel sc(_channelManager, channelId);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                                           "CreateChannel() failed to allocate"
+                                           " memory for channel");
+            return -1;
+        }
+        else if (channelPtr->SetEngineInformation(_engineStatistics,
+                                                  *_outputMixerPtr,
+                                                  *_transmitMixerPtr,
+                                                  *_moduleProcessThreadPtr,
+                                                  *_audioDevicePtr,
+                                                  _voiceEngineObserverPtr,
+                                                  &_callbackCritSect) != 0)
+        {
+            destroyChannel = true;
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                                           "CreateChannel() failed to "
+                                           "associate engine and channel."
+                                           " Destroying channel.");
+        }
+        else if (channelPtr->Init() != 0)
+        {
+            destroyChannel = true;
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                                           "CreateChannel() failed to "
+                                           "initialize channel. Destroying"
+                                           " channel.");
+        }
+    }
+    if (destroyChannel)
+    {
+        _channelManager.DestroyChannel(channelId);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "CreateChannel() => %d", channelId);
+    return channelId;
+}
+
+int VoEBaseImpl::DeleteChannel(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "DeleteChannel(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                           "DeleteChannel() failed to locate "
+                                           "channel");
+            return -1;
+        }
+    }
+
+    if (_channelManager.DestroyChannel(channel) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "DeleteChannel() failed to destroy "
+                                       "channel");
+        return -1;
+    }
+
+    if (StopSend() != 0)
+    {
+        return -1;
+    }
+
+    if (StopPlayout() != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEBaseImpl::SetLocalReceiver(int channel, int port, int RTCPport,
+                                  const char ipAddr[64],
+                                  const char multiCastAddr[64])
+{
+    //  Inititialize local receive sockets (RTP and RTCP).
+    //
+    //  The sockets are always first closed and then created again by this
+    //  function call. The created sockets are by default also used for
+    // transmission (unless source port is set in SetSendDestination).
+    //
+    //  Note that, sockets can also be created automatically if a user calls
+    //  SetSendDestination and StartSend without having called SetLocalReceiver
+    // first. The sockets are then created at the first packet transmission.
+
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (ipAddr == NULL && multiCastAddr == NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d)",
+                     channel, port, RTCPport);
+    }
+    else if (ipAddr != NULL && multiCastAddr == NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, "
+                         "ipAddr=%s)", channel, port, RTCPport, ipAddr);
+    }
+    else if (ipAddr == NULL && multiCastAddr != NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, "
+                         "multiCastAddr=%s)", channel, port, RTCPport,
+                     multiCastAddr);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, "
+                         "ipAddr=%s, multiCastAddr=%s)", channel, port,
+                     RTCPport, ipAddr, multiCastAddr);
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((port < 0) || (port > 65535))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+                                       "SetLocalReceiver() invalid RTP port");
+        return -1;
+    }
+    if (((RTCPport != kVoEDefault) && (RTCPport < 0)) || ((RTCPport
+            != kVoEDefault) && (RTCPport > 65535)))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+                                       "SetLocalReceiver() invalid RTCP port");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetLocalReceiver() failed to locate "
+                                           "channel");
+        return -1;
+    }
+
+    // Cast RTCP port. In the RTP module 0 corresponds to RTP port + 1 in
+    // the module, which is the default.
+    WebRtc_UWord16 rtcpPortUW16(0);
+    if (RTCPport != kVoEDefault)
+    {
+        rtcpPortUW16 = static_cast<WebRtc_UWord16> (RTCPport);
+    }
+
+    return channelPtr->SetLocalReceiver(port, rtcpPortUW16, ipAddr,
+                                        multiCastAddr);
+#else
+    _engineStatistics.SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED,
+            kTraceWarning,
+            "SetLocalReceiver() VoE is built for "
+            "external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                  char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetLocalReceiver(channel=%d, ipAddr[]=?)", channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetLocalReceiver() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    WebRtc_Word32 ret = channelPtr->GetLocalReceiver(port, RTCPport, ipAddr);
+    if (ipAddr != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "GetLocalReceiver() => port=%d, RTCPport=%d, ipAddr=%s",
+                     port, RTCPport, ipAddr);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "GetLocalReceiver() => port=%d, RTCPport=%d", port,
+                     RTCPport);
+    }
+    return ret;
+#else
+    _engineStatistics.SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED,
+                                   kTraceWarning,
+                                   "SetLocalReceiver() VoE is built for "
+                                   "external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::SetSendDestination(int channel, int port, const char* ipaddr,
+                                    int sourcePort, int RTCPport)
+{
+    WEBRTC_TRACE(
+                 kTraceApiCall,
+                 kTraceVoice,
+                 VoEId(_instanceId, -1),
+                 "SetSendDestination(channel=%d, port=%d, ipaddr=%s,"
+                 "sourcePort=%d, RTCPport=%d)",
+                 channel, port, ipaddr, sourcePort, RTCPport);
+    CriticalSectionScoped cs(*_apiCritPtr);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetSendDestination() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    if ((port < 0) || (port > 65535))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+                                       "SetSendDestination() invalid RTP port");
+        return -1;
+    }
+    if (((RTCPport != kVoEDefault) && (RTCPport < 0)) || ((RTCPport
+            != kVoEDefault) && (RTCPport > 65535)))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+                                       "SetSendDestination() invalid RTCP "
+                                       "port");
+        return -1;
+    }
+    if (((sourcePort != kVoEDefault) && (sourcePort < 0)) || ((sourcePort
+            != kVoEDefault) && (sourcePort > 65535)))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+                                       "SetSendDestination() invalid source "
+                                       "port");
+        return -1;
+    }
+
+    // Cast RTCP port. In the RTP module 0 corresponds to RTP port + 1 in the
+    // module, which is the default.
+    WebRtc_UWord16 rtcpPortUW16(0);
+    if (RTCPport != kVoEDefault)
+    {
+        rtcpPortUW16 = static_cast<WebRtc_UWord16> (RTCPport);
+        WEBRTC_TRACE(
+                     kTraceInfo,
+                     kTraceVoice,
+                     VoEId(_instanceId, channel),
+                     "SetSendDestination() non default RTCP port %u will be "
+                     "utilized",
+                     rtcpPortUW16);
+    }
+
+    return channelPtr->SetSendDestination(port, ipaddr, sourcePort,
+                                          rtcpPortUW16);
+#else
+    _engineStatistics.SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED,
+                                   kTraceWarning,
+                                   "SetSendDestination() VoE is built for "
+                                   "external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::GetSendDestination(int channel, int& port, char ipAddr[64],
+                                    int& sourcePort, int& RTCPport)
+{
+    WEBRTC_TRACE(
+                 kTraceApiCall,
+                 kTraceVoice,
+                 VoEId(_instanceId, -1),
+                 "GetSendDestination(channel=%d, ipAddr[]=?, sourcePort=?,"
+                 "RTCPport=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetSendDestination() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    WebRtc_Word32 ret = channelPtr->GetSendDestination(port, ipAddr,
+                                                       sourcePort, RTCPport);
+    if (ipAddr != NULL)
+    {
+        WEBRTC_TRACE(
+                     kTraceStateInfo,
+                     kTraceVoice,
+                     VoEId(_instanceId, -1),
+                     "GetSendDestination() => port=%d, RTCPport=%d, ipAddr=%s, "
+                     "sourcePort=%d, RTCPport=%d",
+                     port, RTCPport, ipAddr, sourcePort, RTCPport);
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceStateInfo,
+                     kTraceVoice,
+                     VoEId(_instanceId, -1),
+                     "GetSendDestination() => port=%d, RTCPport=%d, "
+                     "sourcePort=%d, RTCPport=%d",
+                     port, RTCPport, sourcePort, RTCPport);
+    }
+    return ret;
+#else
+    _engineStatistics.SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED,
+                                   kTraceWarning,
+                                   "GetSendDestination() VoE is built for "
+                                   "external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::StartReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StartReceive(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "StartReceive() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->StartReceiving();
+}
+
+int VoEBaseImpl::StopReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StopListen(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetLocalReceiver() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->StopReceiving();
+}
+
+int VoEBaseImpl::StartPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StartPlayout(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "StartPlayout() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    if (channelPtr->Playing())
+    {
+        return 0;
+    }
+    if (StartPlayout() != 0)
+    {
+        _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                       kTraceError,
+                                       "StartPlayout() failed to start "
+                                       "playout");
+        return -1;
+    }
+    return channelPtr->StartPlayout();
+}
+
+int VoEBaseImpl::StopPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StopPlayout(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "StopPlayout() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    if (channelPtr->StopPlayout() != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StopPlayout() failed to stop playout for channel %d",
+                     channel);
+    }
+    return StopPlayout();
+}
+
+int VoEBaseImpl::StartSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StartSend(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "StartSend() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->Sending())
+    {
+        return 0;
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!channelPtr->ExternalTransport()
+            && !channelPtr->SendSocketsInitialized())
+    {
+        _engineStatistics.SetLastError(VE_DESTINATION_NOT_INITED, kTraceError,
+                                       "StartSend() must set send destination "
+                                       "first");
+        return -1;
+    }
+#endif
+    if (StartSend() != 0)
+    {
+        _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                       kTraceError,
+                                       "StartSend() failed to start recording");
+        return -1;
+    }
+    return channelPtr->StartSend();
+}
+
+int VoEBaseImpl::StopSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "StopSend(channel=%d)", channel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "StopSend() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->StopSend() != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StopSend() failed to stop sending for channel %d",
+                     channel);
+    }
+    return StopSend();
+}
+
+int VoEBaseImpl::GetVersion(char version[1024])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetVersion(version=?)");
+    assert(kVoiceEngineVersionMaxMessageSize == 1024);
+
+    if (version == NULL)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError);
+        return (-1);
+    }
+
+    char versionBuf[kVoiceEngineVersionMaxMessageSize];
+    char* versionPtr = versionBuf;
+
+    WebRtc_Word32 len = 0;
+    WebRtc_Word32 accLen = 0;
+
+    len = AddVoEVersion(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+
+    len = AddBuildInfo(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    len = AddExternalTransportBuild(versionPtr);
+    if (len == -1)
+    {
+         return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+#endif
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    len = AddExternalRecAndPlayoutBuild(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+ #endif
+
+    memcpy(version, versionBuf, accLen);
+    version[accLen] = '\0';
+
+    // to avoid the truncation in the trace, split the string into parts
+    char partOfVersion[256];
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetVersion() =>");
+    for (int partStart = 0; partStart < accLen;)
+    {
+        memset(partOfVersion, 0, sizeof(partOfVersion));
+        int partEnd = partStart + 180;
+        while (version[partEnd] != '\n' && version[partEnd] != '\0')
+        {
+            partEnd--;
+        }
+        if (partEnd < accLen)
+        {
+            memcpy(partOfVersion, &version[partStart], partEnd - partStart);
+        }
+        else
+        {
+            memcpy(partOfVersion, &version[partStart], accLen - partStart);
+        }
+        partStart = partEnd;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "%s", partOfVersion);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::AddBuildInfo(char* str) const
+{
+    return sprintf(str, "Build: %s\n", BUILDINFO);
+}
+
+WebRtc_Word32 VoEBaseImpl::AddVoEVersion(char* str) const
+{
+    return sprintf(str, "VoiceEngine 4.1.0\n");
+}
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32 VoEBaseImpl::AddExternalTransportBuild(char* str) const
+{
+    return sprintf(str, "External transport build\n");
+}
+#endif
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+WebRtc_Word32 VoEBaseImpl::AddExternalRecAndPlayoutBuild(char* str) const
+{
+    return sprintf(str, "External recording and playout build\n");
+}
+#endif
+
+int VoEBaseImpl::LastError()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "LastError()");
+    return (_engineStatistics.LastError());
+}
+
+
+int VoEBaseImpl::SetNetEQPlayoutMode(int channel, NetEqModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetNetEQPlayoutMode(channel=%i, mode=%i)", channel, mode);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetNetEQPlayoutMode() failed to locate"
+                                       " channel");
+        return -1;
+    }
+    return channelPtr->SetNetEQPlayoutMode(mode);
+}
+
+int VoEBaseImpl::GetNetEQPlayoutMode(int channel, NetEqModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetNetEQPlayoutMode(channel=%i, mode=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetNetEQPlayoutMode() failed to locate"
+                                       " channel");
+        return -1;
+    }
+    return channelPtr->GetNetEQPlayoutMode(mode);
+}
+
+int VoEBaseImpl::SetNetEQBGNMode(int channel, NetEqBgnModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetNetEQBGNMode(channel=%i, mode=%i)", channel, mode);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetNetEQBGNMode() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetNetEQBGNMode(mode);
+}
+
+int VoEBaseImpl::GetNetEQBGNMode(int channel, NetEqBgnModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetNetEQBGNMode(channel=%i, mode=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetNetEQBGNMode() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->GetNetEQBGNMode(mode);
+}
+
+int VoEBaseImpl::SetOnHoldStatus(int channel, bool enable, OnHoldModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetOnHoldStatus(channel=%d, enable=%d, mode=%d)", channel,
+                 enable, mode);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetOnHoldStatus() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetOnHoldStatus(enable, mode);
+}
+
+int VoEBaseImpl::GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetOnHoldStatus(channel=%d, enabled=?, mode=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetOnHoldStatus() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->GetOnHoldStatus(enabled, mode);
+}
+
+WebRtc_Word32 VoEBaseImpl::StartPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::StartPlayout()");
+    if (_audioDevicePtr->Playing())
+    {
+        return 0;
+    }
+    if (!_externalPlayout)
+    {
+        if (_audioDevicePtr->InitPlayout() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                         "StartPlayout() failed to initialize playout");
+            return -1;
+        }
+        if (_audioDevicePtr->StartPlayout() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                         "StartPlayout() failed to start playout");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StopPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::StopPlayout()");
+
+    WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+
+    WebRtc_UWord16 nChannelsPlaying(0);
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+
+    // Get number of playing channels
+    _channelManager.GetChannelIds(channelsArray, numOfChannels);
+    for (int i = 0; i < numOfChannels; i++)
+    {
+        voe::ScopedChannel sc(_channelManager, channelsArray[i]);
+        voe::Channel* chPtr = sc.ChannelPtr();
+        if (chPtr)
+        {
+            if (chPtr->Playing())
+            {
+                nChannelsPlaying++;
+            }
+        }
+    }
+    delete[] channelsArray;
+
+    // Stop audio-device playing if no channel is playing out
+    if (nChannelsPlaying == 0)
+    {
+        if (_audioDevicePtr->StopPlayout() != 0)
+        {
+            _engineStatistics.SetLastError(VE_CANNOT_STOP_PLAYOUT, kTraceError,
+                                           "StopPlayout() failed to stop "
+                                           "playout");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StartSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::StartSend()");
+    if (_audioDevicePtr->Recording())
+    {
+        return 0;
+    }
+    if (!_externalRecording)
+    {
+        if (_audioDevicePtr->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                         "StartSend() failed to initialize recording");
+            return -1;
+        }
+        if (_audioDevicePtr->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                         "StartSend() failed to start recording");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::StopSend()");
+
+    if ((NumOfSendingChannels() == 0) && !_transmitMixerPtr->IsRecordingMic())
+    {
+        // Stop audio-device recording if no channel is recording
+        if (_audioDevicePtr->StopRecording() != 0)
+        {
+            _engineStatistics.SetLastError(VE_CANNOT_STOP_RECORDING,
+                                           kTraceError,
+                                           "StopSend() failed to stop "
+                                           "recording");
+            return -1;
+        }
+        _transmitMixerPtr->StopSend();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::TerminateInternal()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEBaseImpl::TerminateInternal()");
+
+    // Delete any remaining channel objects
+    WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+    if (numOfChannels > 0)
+    {
+        WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+        _channelManager.GetChannelIds(channelsArray, numOfChannels);
+        for (int i = 0; i < numOfChannels; i++)
+        {
+            DeleteChannel(channelsArray[i]);
+        }
+        delete[] channelsArray;
+    }
+
+    if (_moduleProcessThreadPtr)
+    {
+        if (_audioDevicePtr)
+        {
+            if (_moduleProcessThreadPtr->DeRegisterModule(_audioDevicePtr) != 0)
+            {
+                _engineStatistics.SetLastError(VE_THREAD_ERROR, kTraceError,
+                                               "TerminateInternal() failed to "
+                                               "deregister ADM");
+            }
+        }
+        if (_moduleProcessThreadPtr->Stop() != 0)
+        {
+            _engineStatistics.SetLastError(VE_THREAD_ERROR, kTraceError,
+                                           "TerminateInternal() failed to stop "
+                                           "module process thread");
+        }
+    }
+
+    // Audio Device Module
+
+    if (_audioDevicePtr != NULL)
+    {
+        if (_audioDevicePtr->StopPlayout() != 0)
+        {
+            _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+                                           "TerminateInternal() failed to stop "
+                                           "playout");
+        }
+        if (_audioDevicePtr->StopRecording() != 0)
+        {
+            _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+                                           "TerminateInternal() failed to stop "
+                                           "recording");
+        }
+        if (_audioDevicePtr->RegisterEventObserver(NULL) != 0) {
+          _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                         kTraceWarning,
+                                         "TerminateInternal() failed to de-"
+                                         "register event observer for the ADM");
+        }
+        if (_audioDevicePtr->RegisterAudioCallback(NULL) != 0) {
+          _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                         kTraceWarning,
+                                         "TerminateInternal() failed to de-"
+                                         "register audio callback for the ADM");
+        }
+        if (_audioDevicePtr->Terminate() != 0)
+        {
+            _engineStatistics.SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                                           kTraceError,
+                                           "TerminateInternal() failed to "
+                                           "terminate the ADM");
+        }
+       
+        _audioDevicePtr->Release();
+        _audioDevicePtr = NULL;
+    }
+
+    // AP module
+
+    if (_audioProcessingModulePtr != NULL)
+    {
+        _transmitMixerPtr->SetAudioProcessingModule(NULL);
+        AudioProcessing::Destroy(_audioProcessingModulePtr);
+        _audioProcessingModulePtr = NULL;
+    }
+
+    return _engineStatistics.SetUnInitialized();
+}
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_base_impl.h b/trunk/src/voice_engine/main/source/voe_base_impl.h
new file mode 100644
index 0000000..6d470a0
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_base_impl.h
@@ -0,0 +1,152 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
+
+#include "voe_base.h"
+
+#include "module_common_types.h"
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class ProcessThread;
+
+class VoEBaseImpl: public virtual voe::SharedData,
+                   public VoEBase,
+                   public voe::RefCount,
+                   public AudioTransport,
+                   public AudioDeviceObserver
+{
+public:
+    virtual int Release();
+
+    virtual int RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+
+    virtual int DeRegisterVoiceEngineObserver();
+
+    virtual int Init(AudioDeviceModule* external_adm = NULL);
+
+    virtual int Terminate();
+
+    virtual int MaxNumOfChannels();
+
+    virtual int CreateChannel();
+
+    virtual int DeleteChannel(int channel);
+
+    virtual int SetLocalReceiver(int channel, int port,
+                                 int RTCPport = kVoEDefault,
+                                 const char ipAddr[64] = NULL,
+                                 const char multiCastAddr[64] = NULL);
+
+    virtual int GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                 char ipAddr[64]);
+
+    virtual int SetSendDestination(int channel, int port,
+                                   const char ipAddr[64],
+                                   int sourcePort = kVoEDefault,
+                                   int RTCPport = kVoEDefault);
+
+    virtual int GetSendDestination(int channel,
+                                   int& port,
+                                   char ipAddr[64],
+                                   int& sourcePort,
+                                   int& RTCPport);
+
+    virtual int StartReceive(int channel);
+
+    virtual int StartPlayout(int channel);
+
+    virtual int StartSend(int channel);
+
+    virtual int StopReceive(int channel);
+
+    virtual int StopPlayout(int channel);
+
+    virtual int StopSend(int channel);
+
+    virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode);
+
+    virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode);
+
+    virtual int SetNetEQBGNMode(int channel, NetEqBgnModes mode);
+
+    virtual int GetNetEQBGNMode(int channel, NetEqBgnModes& mode);
+
+
+    virtual int SetOnHoldStatus(int channel,
+                                bool enable,
+                                OnHoldModes mode = kHoldSendAndPlay);
+
+    virtual int GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode);
+
+    virtual int GetVersion(char version[1024]);
+
+    virtual int LastError();
+
+    // AudioTransport
+    virtual WebRtc_Word32
+        RecordedDataIsAvailable(const WebRtc_Word8* audioSamples,
+                                const WebRtc_UWord32 nSamples,
+                                const WebRtc_UWord8 nBytesPerSample,
+                                const WebRtc_UWord8 nChannels,
+                                const WebRtc_UWord32 samplesPerSec,
+                                const WebRtc_UWord32 totalDelayMS,
+                                const WebRtc_Word32 clockDrift,
+                                const WebRtc_UWord32 currentMicLevel,
+                                WebRtc_UWord32& newMicLevel);
+
+    virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+                                           const WebRtc_UWord8 nBytesPerSample,
+                                           const WebRtc_UWord8 nChannels,
+                                           const WebRtc_UWord32 samplesPerSec,
+                                           WebRtc_Word8* audioSamples,
+                                           WebRtc_UWord32& nSamplesOut);
+
+    // AudioDeviceObserver
+    virtual void OnErrorIsReported(const ErrorCode error);
+    virtual void OnWarningIsReported(const WarningCode warning);
+
+protected:
+    VoEBaseImpl();
+    virtual ~VoEBaseImpl();
+
+private:
+    WebRtc_Word32 StartPlayout();
+    WebRtc_Word32 StopPlayout();
+    WebRtc_Word32 StartSend();
+    WebRtc_Word32 StopSend();
+    WebRtc_Word32 TerminateInternal();
+
+    WebRtc_Word32 AddBuildInfo(char* str) const;
+    WebRtc_Word32 AddVoEVersion(char* str) const;
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 AddExternalTransportBuild(char* str) const;
+#endif
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    WebRtc_Word32 AddExternalRecAndPlayoutBuild(char* str) const;
+#endif
+    VoiceEngineObserver* _voiceEngineObserverPtr;
+    CriticalSectionWrapper& _callbackCritSect;
+
+    bool _voiceEngineObserver;
+    WebRtc_UWord32 _oldVoEMicLevel;
+    WebRtc_UWord32 _oldMicLevel;
+    AudioFrame _audioFrame;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_call_report_impl.cc b/trunk/src/voice_engine/main/source/voe_call_report_impl.cc
new file mode 100644
index 0000000..8be8e01
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_call_report_impl.cc
@@ -0,0 +1,432 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_call_report_impl.h"
+
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoECallReport* VoECallReport::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+            reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoECallReportImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+
+VoECallReportImpl::VoECallReportImpl() :
+    _file(*FileWrapper::Create())
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECallReportImpl() - ctor");
+}
+
+VoECallReportImpl::~VoECallReportImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "~VoECallReportImpl() - dtor");
+    delete &_file;
+}
+
+int VoECallReportImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECallReportImpl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECallReportImpl reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoECallReportImpl::ResetCallReportStatistics(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "ResetCallReportStatistics(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    assert(_audioProcessingModulePtr != NULL);
+
+    bool echoMode =
+        _audioProcessingModulePtr->echo_cancellation()->are_metrics_enabled();
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  current AudioProcessingModule echo metric state %d)",
+                 echoMode);
+    // Reset the APM statistics
+    if (_audioProcessingModulePtr->echo_cancellation()->enable_metrics(true)
+        != 0)
+    {
+        _engineStatistics.SetLastError(VE_APM_ERROR, kTraceError,
+                                       "ResetCallReportStatistics() unable to "
+                                       "set the AudioProcessingModule echo "
+                                       "metrics state");
+        return -1;
+    }
+    // Restore metric states
+    _audioProcessingModulePtr->echo_cancellation()->enable_metrics(echoMode);
+
+    // Reset channel dependent statistics
+    if (channel != -1)
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                           "ResetCallReportStatistics() failed "
+                                           "to locate channel");
+            return -1;
+        }
+        channelPtr->ResetDeadOrAliveCounters();
+        channelPtr->ResetRTCPStatistics();
+    }
+    else
+    {
+        WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+        if (numOfChannels <= 0)
+        {
+            return 0;
+        }
+        WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+        _channelManager.GetChannelIds(channelsArray, numOfChannels);
+        for (int i = 0; i < numOfChannels; i++)
+        {
+            voe::ScopedChannel sc(_channelManager, channelsArray[i]);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr)
+            {
+                channelPtr->ResetDeadOrAliveCounters();
+                channelPtr->ResetRTCPStatistics();
+            }
+        }
+        delete[] channelsArray;
+    }
+
+    return 0;
+}
+
+int VoECallReportImpl::GetEchoMetricSummary(EchoStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetEchoMetricSummary()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    assert(_audioProcessingModulePtr != NULL);
+
+    return (GetEchoMetricSummaryInternal(stats));
+}
+
+int VoECallReportImpl::GetEchoMetricSummaryInternal(EchoStatistics& stats)
+{
+    // Retrieve echo metrics from the AudioProcessingModule
+    int ret(0);
+    bool mode(false);
+    EchoCancellation::Metrics metrics;
+
+    // Ensure that echo metrics is enabled
+
+    mode =
+        _audioProcessingModulePtr->echo_cancellation()->are_metrics_enabled();
+    if (mode != false)
+    {
+        ret =
+          _audioProcessingModulePtr->echo_cancellation()->GetMetrics(&metrics);
+        if (ret != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                       "  AudioProcessingModule GetMetrics() => error");
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "  AudioProcessingModule echo metrics is not enabled");
+    }
+
+    if ((ret != 0) || (mode == false))
+    {
+        // Mark complete struct as invalid (-100 dB)
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "  unable to retrieve echo metrics from the "
+                   "AudioProcessingModule");
+        stats.erl.min = -100;
+        stats.erl.max = -100;
+        stats.erl.average = -100;
+        stats.erle.min = -100;
+        stats.erle.max = -100;
+        stats.erle.average = -100;
+        stats.rerl.min = -100;
+        stats.rerl.max = -100;
+        stats.rerl.average = -100;
+        stats.a_nlp.min = -100;
+        stats.a_nlp.max = -100;
+        stats.a_nlp.average = -100;
+    }
+    else
+    {
+
+        // Deliver output results to user
+        stats.erl.min = metrics.echo_return_loss.minimum;
+        stats.erl.max = metrics.echo_return_loss.maximum;
+        stats.erl.average = metrics.echo_return_loss.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                   "  erl: min=%d, max=%d, avg=%d", stats.erl.min,
+                   stats.erl.max, stats.erl.average);
+
+        stats.erle.min = metrics.echo_return_loss_enhancement.minimum;
+        stats.erle.max = metrics.echo_return_loss_enhancement.maximum;
+        stats.erle.average = metrics.echo_return_loss_enhancement.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                   "  erle: min=%d, max=%d, avg=%d", stats.erle.min,
+                   stats.erle.max, stats.erle.average);
+
+        stats.rerl.min = metrics.residual_echo_return_loss.minimum;
+        stats.rerl.max = metrics.residual_echo_return_loss.maximum;
+        stats.rerl.average = metrics.residual_echo_return_loss.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                   "  rerl: min=%d, max=%d, avg=%d", stats.rerl.min,
+                   stats.rerl.max, stats.rerl.average);
+
+        stats.a_nlp.min = metrics.a_nlp.minimum;
+        stats.a_nlp.max = metrics.a_nlp.maximum;
+        stats.a_nlp.average = metrics.a_nlp.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                   "  a_nlp: min=%d, max=%d, avg=%d", stats.a_nlp.min,
+                   stats.a_nlp.max, stats.a_nlp.average);
+    }
+    return 0;
+}
+
+int VoECallReportImpl::GetRoundTripTimeSummary(int channel, StatVal& delaysMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRoundTripTimeSummary()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetRoundTripTimeSummary() failed to "
+                                       "locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetRoundTripTimeSummary(delaysMs);
+}
+
+int VoECallReportImpl::GetDeadOrAliveSummary(int channel,
+                                             int& numOfDeadDetections,
+                                             int& numOfAliveDetections)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetDeadOrAliveSummary(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return (GetDeadOrAliveSummaryInternal(channel, numOfDeadDetections,
+                                          numOfAliveDetections));
+}
+
+int VoECallReportImpl::GetDeadOrAliveSummaryInternal(int channel,
+                                                     int& numOfDeadDetections,
+                                                     int& numOfAliveDetections)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetDeadOrAliveSummary(channel=%d)", channel);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetRoundTripTimeSummary() failed to "
+                                       "locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetDeadOrAliveCounters(numOfDeadDetections,
+                                              numOfAliveDetections);
+}
+
+int VoECallReportImpl::WriteReportToFile(const char* fileNameUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "WriteReportToFile(fileNameUTF8=%s)", fileNameUTF8);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (NULL == fileNameUTF8)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "WriteReportToFile() invalid filename");
+        return -1;
+    }
+
+    if (_file.Open())
+    {
+        _file.CloseFile();
+    }
+
+    // Open text file in write mode
+    if (_file.OpenFile(fileNameUTF8, false, false, true) != 0)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+                                       "WriteReportToFile() unable to open the "
+                                       "file");
+        return -1;
+    }
+
+    // Summarize information and add it to the open file
+    //
+    _file.WriteText("WebRtc VoiceEngine Call Report\n");
+    _file.WriteText("==============================\n");
+    _file.WriteText("\nNetwork Packet Round Trip Time (RTT)\n");
+    _file.WriteText("------------------------------------\n\n");
+
+    WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+    _channelManager.GetChannelIds(channelsArray, numOfChannels);
+    for (int ch = 0; ch < numOfChannels; ch++)
+    {
+        voe::ScopedChannel sc(_channelManager, channelsArray[ch]);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr)
+        {
+            StatVal delaysMs;
+            _file.WriteText("channel %d:\n", ch);
+            channelPtr->GetRoundTripTimeSummary(delaysMs);
+            _file.WriteText("  min:%5d [ms]\n", delaysMs.min);
+            _file.WriteText("  max:%5d [ms]\n", delaysMs.max);
+            _file.WriteText("  avg:%5d [ms]\n", delaysMs.average);
+        }
+    }
+
+    _file.WriteText("\nDead-or-Alive Connection Detections\n");
+    _file.WriteText("------------------------------------\n\n");
+
+    for (int ch = 0; ch < numOfChannels; ch++)
+    {
+        voe::ScopedChannel sc(_channelManager, channelsArray[ch]);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr)
+        {
+            int nDead(0);
+            int nAlive(0);
+            _file.WriteText("channel %d:\n", ch);
+            GetDeadOrAliveSummary(ch, nDead, nAlive);
+            _file.WriteText("  #dead :%6d\n", nDead);
+            _file.WriteText("  #alive:%6d\n", nAlive);
+        }
+    }
+
+    delete[] channelsArray;
+
+    EchoStatistics echo;
+    GetEchoMetricSummary(echo);
+
+    _file.WriteText("\nEcho Metrics\n");
+    _file.WriteText("------------\n\n");
+
+    _file.WriteText("erl:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.erl.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.erl.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.erl.average);
+    _file.WriteText("\nerle:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.erle.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.erle.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.erle.average);
+    _file.WriteText("rerl:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.rerl.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.rerl.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.rerl.average);
+    _file.WriteText("a_nlp:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.a_nlp.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.a_nlp.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.a_nlp.average);
+
+    _file.WriteText("\n<END>");
+
+    _file.Flush();
+    _file.CloseFile();
+
+    return 0;
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_call_report_impl.h b/trunk/src/voice_engine/main/source/voe_call_report_impl.h
new file mode 100644
index 0000000..f5852b4
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_call_report_impl.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
+
+#include "voe_call_report.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+
+namespace webrtc
+{
+class FileWrapper;
+
+class VoECallReportImpl: public virtual voe::SharedData,
+                         public VoECallReport,
+                         public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int ResetCallReportStatistics(int channel);
+
+    virtual int GetEchoMetricSummary(EchoStatistics& stats);
+
+    virtual int GetRoundTripTimeSummary(int channel,
+                                        StatVal& delaysMs);
+
+    virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
+                                      int& numOfAliveDetections);
+
+    virtual int WriteReportToFile(const char* fileNameUTF8);
+
+protected:
+    VoECallReportImpl();
+    virtual ~VoECallReportImpl();
+
+private:
+    int GetDeadOrAliveSummaryInternal(int channel,
+                                      int& numOfDeadDetections,
+                                      int& numOfAliveDetections);
+
+    int GetEchoMetricSummaryInternal(EchoStatistics& stats);
+
+    int GetSpeechAndNoiseSummaryInternal(LevelStatistics& stats);
+
+    FileWrapper& _file;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_codec_impl.cc b/trunk/src/voice_engine/main/source/voe_codec_impl.cc
new file mode 100644
index 0000000..6b308cc
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_codec_impl.cc
@@ -0,0 +1,755 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_codec_impl.h"
+
+#include "audio_coding_module.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoECodec* VoECodec::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_CODEC_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+            reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoECodecImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+
+VoECodecImpl::VoECodecImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECodecImpl() - ctor");
+}
+
+VoECodecImpl::~VoECodecImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "~VoECodecImpl() - dtor");
+}
+
+int VoECodecImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECodecImpl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoECodecImpl reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoECodecImpl::NumOfCodecs()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "NumOfCodecs()");
+
+    // Number of supported codecs in the ACM
+    WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "NumOfCodecs() => %u", nSupportedCodecs);
+    return (nSupportedCodecs);
+}
+
+int VoECodecImpl::GetCodec(int index, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetCodec(index=%d, codec=?)", index);
+    CodecInst acmCodec;
+    if (AudioCodingModule::Codec(index, (CodecInst&) acmCodec)
+            == -1)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_LISTNR, kTraceError,
+                                       "GetCodec() invalid index");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetCodec() => plname=%s, pacsize=%d, plfreq=%d, pltype=%d, "
+                 "channels=%d, rate=%d", codec.plname, codec.pacsize,
+                   codec.plfreq, codec.pltype, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::SetSendCodec(int channel, const CodecInst& codec)
+{
+    CodecInst copyCodec;
+    ExternalToACMCodecRepresentation(copyCodec, codec);
+
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetSendCodec(channel=%d, codec)", channel);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "codec: plname=%s, pacsize=%d, plfreq=%d, pltype=%d, "
+                 "channels=%d, rate=%d", codec.plname, codec.pacsize,
+                 codec.plfreq, codec.pltype, codec.channels, codec.rate);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    // External sanity checks performed outside the ACM
+    if ((STR_CASE_CMP(copyCodec.plname, "L16") == 0) &&
+            (copyCodec.pacsize >= 960))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendCodec() invalid L16 packet "
+                                       "size");
+        return -1;
+    }
+    if (!STR_CASE_CMP(copyCodec.plname, "CN")
+            || !STR_CASE_CMP(copyCodec.plname, "TELEPHONE-EVENT")
+            || !STR_CASE_CMP(copyCodec.plname, "RED"))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendCodec() invalid codec name");
+        return -1;
+    }
+    if ((copyCodec.channels != 1) && (copyCodec.channels != 2))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendCodec() invalid number of "
+                                       "channels");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetSendCodec() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    if (!AudioCodingModule::IsCodecValid(
+            (CodecInst&) copyCodec))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendCodec() invalid codec");
+        return -1;
+    }
+    if (channelPtr->SetSendCodec(copyCodec) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_SET_SEND_CODEC,
+                                       kTraceError,
+                                       "SetSendCodec() failed to set send "
+                                       "codec");
+        return -1;
+    }
+
+    // Need to check if we should change APM settings for mono/stereo.
+    // We'll check all channels (sending or not), so we don't have to
+    // check this again when starting/stopping sending.
+
+    voe::ScopedChannel sc2(_channelManager);
+    void* iterator(NULL);
+    channelPtr = sc2.GetFirstChannel(iterator);
+    int maxNumChannels = 1;
+    while (channelPtr != NULL)
+    {
+        CodecInst tmpCdc;
+        channelPtr->GetSendCodec(tmpCdc);
+        if (tmpCdc.channels > maxNumChannels)
+            maxNumChannels = tmpCdc.channels;
+
+        channelPtr = sc2.GetNextChannel(iterator);
+    }
+
+    // Reuse the currently set number of capture channels. We need to wait
+    // until receiving a frame to determine the true number.
+    //
+    // TODO(andrew): AudioProcessing will return an error if there are more
+    // output than input channels (it doesn't want to produce fake channels).
+    // This will happen with a stereo codec and a device which doesn't support
+    // stereo. AudioCoding should probably do the faking; look into how to
+    // handle this case properly.
+    //
+    // Check if the number of channels has changed to avoid an unnecessary
+    // reset.
+    // TODO(andrew): look at handling this logic in AudioProcessing.
+    if (_audioProcessingModulePtr->num_output_channels() != maxNumChannels)
+    {
+        if (_audioProcessingModulePtr->set_num_channels(
+                _audioProcessingModulePtr->num_input_channels(),
+                maxNumChannels) != 0)
+        {
+            _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                "Init() failed to set APM channels for the send audio stream");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+int VoECodecImpl::GetSendCodec(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSendCodec(channel=%d, codec=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetSendCodec() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    CodecInst acmCodec;
+    if (channelPtr->GetSendCodec(acmCodec) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_GET_SEND_CODEC, kTraceError,
+                                       "GetSendCodec() failed to get send "
+                                       "codec");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSendCodec() => plname=%s, pacsize=%d, plfreq=%d, "
+                 "channels=%d, rate=%d", codec.plname, codec.pacsize,
+                 codec.plfreq, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::GetRecCodec(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRecCodec(channel=%d, codec=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetRecCodec() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    CodecInst acmCodec;
+    if (channelPtr->GetRecCodec(acmCodec) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CANNOT_GET_REC_CODEC, kTraceError,
+                                       "GetRecCodec() failed to get received "
+                                       "codec");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRecCodec() => plname=%s, pacsize=%d, plfreq=%d, "
+                 "channels=%d, rate=%d", codec.plname, codec.pacsize,
+                 codec.plfreq, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::SetAMREncFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetAMREncFormat(channel=%d, mode=%d)", channel, mode);
+#ifdef WEBRTC_CODEC_GSMAMR
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetAMREncFormat() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetAMREncFormat(mode);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetAMREncFormat() AMR codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRDecFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetAMRDecFormat(channel=%i, mode=%i)", channel, mode);
+#ifdef WEBRTC_CODEC_GSMAMR
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetAMRDecFormat() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetAMRDecFormat(mode);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetAMRDecFormat() AMR codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRWbEncFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetAMRWbEncFormat(channel=%d, mode=%d)", channel, mode);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetAMRWbEncFormat() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetAMRWbEncFormat(mode);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetAMRWbEncFormat() AMR-wb codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRWbDecFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetAMRWbDecFormat(channel=%i, mode=%i)", channel, mode);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetAMRWbDecFormat() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetAMRWbDecFormat(mode);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetAMRWbDecFormat() AMR-wb codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetRecPayloadType(int channel, const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetRecPayloadType(channel=%d, codec)", channel);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "codec: plname=%s, plfreq=%d, pltype=%d, channels=%u, "
+               "pacsize=%d, rate=%d", codec.plname, codec.plfreq, codec.pltype,
+               codec.channels, codec.pacsize, codec.rate);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetRecPayloadType() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetRecPayloadType(codec);
+}
+
+int VoECodecImpl::GetRecPayloadType(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRecPayloadType(channel=%d, codec)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetRecPayloadType() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->GetRecPayloadType(codec);
+}
+
+int VoECodecImpl::SetSendCNPayloadType(int channel, int type,
+                                       PayloadFrequencies frequency)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetSendCNPayloadType(channel=%d, type=%d, frequency=%d)",
+                 channel, type, frequency);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (type < 96 || type > 127)
+    {
+        // Only allow dynamic range: 96 to 127
+        _engineStatistics.SetLastError(VE_INVALID_PLTYPE, kTraceError,
+                                       "SetSendCNPayloadType() invalid payload "
+                                       "type");
+        return -1;
+    }
+    if ((frequency != kFreq16000Hz) && (frequency != kFreq32000Hz))
+    {
+        // It is not possible to modify the payload type for CN/8000.
+        // We only allow modification of the CN payload type for CN/16000
+        // and CN/32000.
+        _engineStatistics.SetLastError(VE_INVALID_PLFREQ, kTraceError,
+                                       "SetSendCNPayloadType() invalid payload"
+                                       " frequency");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetSendCNPayloadType() failed to "
+                                       "locate channel");
+        return -1;
+    }
+    if (channelPtr->Sending())
+    {
+        _engineStatistics.SetLastError(VE_SENDING, kTraceError,
+                                       "SetSendCNPayloadType unable so set "
+                                       "payload type while sending");
+        return -1;
+    }
+    return channelPtr->SetSendCNPayloadType(type, frequency);
+}
+
+int VoECodecImpl::SetISACInitTargetRate(int channel, int rateBps,
+                                        bool useFixedFrameSize)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetISACInitTargetRate(channel=%d, rateBps=%d, "
+                 "useFixedFrameSize=%d)", channel, rateBps, useFixedFrameSize);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetISACInitTargetRate() failed to "
+                                       "locate channel");
+        return -1;
+    }
+    return channelPtr->SetISACInitTargetRate(rateBps, useFixedFrameSize);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetISACInitTargetRate() iSAC codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetISACMaxRate(int channel, int rateBps)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetISACMaxRate(channel=%d, rateBps=%d)", channel, rateBps);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetISACMaxRate() failed to locate "
+                                       "channel");
+        return -1;
+    }
+    return channelPtr->SetISACMaxRate(rateBps);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetISACMaxRate() iSAC codec is not "
+                                   "supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetISACMaxPayloadSize(int channel, int sizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetISACMaxPayloadSize(channel=%d, sizeBytes=%d)", channel,
+                 sizeBytes);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetISACMaxPayloadSize() failed to "
+                                       "locate channel");
+        return -1;
+    }
+    return channelPtr->SetISACMaxPayloadSize(sizeBytes);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetISACMaxPayloadSize() iSAC codec is not "
+                                   "supported");
+    return -1;
+#endif
+    return 0;
+}
+
+int VoECodecImpl::SetVADStatus(int channel, bool enable, VadModes mode,
+                               bool disableDTX)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetVADStatus(channel=%i, enable=%i, mode=%i, disableDTX=%i)",
+                 channel, enable, mode, disableDTX);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetVADStatus failed to locate channel");
+        return -1;
+    }
+
+    ACMVADMode vadMode(VADNormal);
+    switch (mode)
+    {
+        case kVadConventional:
+            vadMode = VADNormal;
+            break;
+        case kVadAggressiveLow:
+            vadMode = VADLowBitrate;
+            break;
+        case kVadAggressiveMid:
+            vadMode = VADAggr;
+            break;
+        case kVadAggressiveHigh:
+            vadMode = VADVeryAggr;
+            break;
+    }
+    return channelPtr->SetVADStatus(enable, vadMode, disableDTX);
+}
+
+int VoECodecImpl::GetVADStatus(int channel, bool& enabled, VadModes& mode,
+                               bool& disabledDTX)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetVADStatus(channel=%i)", channel);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetVADStatus failed to locate channel");
+        return -1;
+    }
+
+    ACMVADMode vadMode;
+    int ret = channelPtr->GetVADStatus(enabled, vadMode, disabledDTX);
+
+    if (ret != 0)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_OPERATION, kTraceError,
+                                       "GetVADStatus failed to get VAD mode");
+        return -1;
+    }
+    switch (vadMode)
+    {
+        case VADNormal:
+            mode = kVadConventional;
+            break;
+        case VADLowBitrate:
+            mode = kVadAggressiveLow;
+            break;
+        case VADAggr:
+            mode = kVadAggressiveMid;
+            break;
+        case VADVeryAggr:
+            mode = kVadAggressiveHigh;
+            break;
+    }
+
+    return 0;
+}
+
+void VoECodecImpl::ACMToExternalCodecRepresentation(CodecInst& toInst,
+                                                    const CodecInst& fromInst)
+{
+    toInst = fromInst;
+    if (STR_CASE_CMP(fromInst.plname,"SILK") == 0)
+    {
+        if (fromInst.plfreq == 12000)
+        {
+            if (fromInst.pacsize == 320)
+            {
+                toInst.pacsize = 240;
+            }
+            else if (fromInst.pacsize == 640)
+            {
+                toInst.pacsize = 480;
+            }
+            else if (fromInst.pacsize == 960)
+            {
+                toInst.pacsize = 720;
+            }
+        }
+        else if (fromInst.plfreq == 24000)
+        {
+            if (fromInst.pacsize == 640)
+            {
+                toInst.pacsize = 480;
+            }
+            else if (fromInst.pacsize == 1280)
+            {
+                toInst.pacsize = 960;
+            }
+            else if (fromInst.pacsize == 1920)
+            {
+                toInst.pacsize = 1440;
+            }
+        }
+    }
+}
+
+void VoECodecImpl::ExternalToACMCodecRepresentation(CodecInst& toInst,
+                                                    const CodecInst& fromInst)
+{
+    toInst = fromInst;
+    if (STR_CASE_CMP(fromInst.plname,"SILK") == 0)
+    {
+        if (fromInst.plfreq == 12000)
+        {
+            if (fromInst.pacsize == 240)
+            {
+                toInst.pacsize = 320;
+            }
+            else if (fromInst.pacsize == 480)
+            {
+                toInst.pacsize = 640;
+            }
+            else if (fromInst.pacsize == 720)
+            {
+                toInst.pacsize = 960;
+            }
+        }
+        else if (fromInst.plfreq == 24000)
+        {
+            if (fromInst.pacsize == 480)
+            {
+                toInst.pacsize = 640;
+            }
+            else if (fromInst.pacsize == 960)
+            {
+                toInst.pacsize = 1280;
+            }
+            else if (fromInst.pacsize == 1440)
+            {
+                toInst.pacsize = 1920;
+            }
+        }
+    }
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_CODEC_API
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_codec_impl.h b/trunk/src/voice_engine/main/source/voe_codec_impl.h
new file mode 100644
index 0000000..fde1d41
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_codec_impl.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
+
+#include "voe_codec.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class VoECodecImpl: public virtual voe::SharedData,
+                    public VoECodec,
+                    public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int NumOfCodecs();
+
+    virtual int GetCodec(int index, CodecInst& codec);
+
+    virtual int SetSendCodec(int channel, const CodecInst& codec);
+
+    virtual int GetSendCodec(int channel, CodecInst& codec);
+
+    virtual int GetRecCodec(int channel, CodecInst& codec);
+
+    virtual int SetAMREncFormat(int channel,
+                                AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRDecFormat(int channel,
+                                AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRWbEncFormat(int channel,
+                                  AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRWbDecFormat(int channel,
+                                  AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetSendCNPayloadType(
+        int channel, int type,
+        PayloadFrequencies frequency = kFreq16000Hz);
+
+    virtual int SetRecPayloadType(int channel,
+                                  const CodecInst& codec);
+
+    virtual int GetRecPayloadType(int channel, CodecInst& codec);
+
+    virtual int SetISACInitTargetRate(int channel,
+                                      int rateBps,
+                                      bool useFixedFrameSize = false);
+
+    virtual int SetISACMaxRate(int channel, int rateBps);
+
+    virtual int SetISACMaxPayloadSize(int channel, int sizeBytes);
+
+    virtual int SetVADStatus(int channel,
+                             bool enable,
+                             VadModes mode = kVadConventional,
+                             bool disableDTX = false);
+
+    virtual int GetVADStatus(int channel,
+                             bool& enabled,
+                             VadModes& mode,
+                             bool& disabledDTX);
+
+protected:
+    VoECodecImpl();
+    virtual ~VoECodecImpl();
+
+private:
+    void ACMToExternalCodecRepresentation(CodecInst& toInst,
+                                          const CodecInst& fromInst);
+
+    void ExternalToACMCodecRepresentation(CodecInst& toInst,
+                                          const CodecInst& fromInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_dtmf_impl.cc b/trunk/src/voice_engine/main/source/voe_dtmf_impl.cc
new file mode 100644
index 0000000..67f4c46
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_dtmf_impl.cc
@@ -0,0 +1,473 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_dtmf_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEDtmf* VoEDtmf::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEDtmfImpl* d = s;
+    ( *d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+
+VoEDtmfImpl::VoEDtmfImpl() :
+    _dtmfFeedback(true),
+    _dtmfDirectFeedback(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1 ),
+                 "VoEDtmfImpl::VoEDtmfImpl() - ctor");
+}
+
+VoEDtmfImpl::~VoEDtmfImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEDtmfImpl::~VoEDtmfImpl() - dtor");
+}
+
+int VoEDtmfImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEDtmf::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset(); // reset reference counter to zero => OK to delete VE
+        _engineStatistics.SetLastError(
+            VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEDtmf reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEDtmfImpl::SendTelephoneEvent(int channel,
+                                    int eventCode,
+                                    bool outOfBand,
+                                    int lengthMs,
+                                    int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SendTelephoneEvent(channel=%d, eventCode=%d, outOfBand=%d,"
+                 "length=%d, attenuationDb=%d)",
+                 channel, eventCode, (int)outOfBand, lengthMs, attenuationDb);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendTelephoneEvent() failed to locate channel");
+        return -1;
+    }
+    if (!channelPtr->Sending())
+    {
+        _engineStatistics.SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "SendTelephoneEvent() sending is not active");
+        return -1;
+    }
+
+    // Sanity check
+    const int maxEventCode = outOfBand ?
+        static_cast<int>(kMaxTelephoneEventCode) :
+        static_cast<int>(kMaxDtmfEventCode);
+    const bool testFailed = ((eventCode < 0) ||
+        (eventCode > maxEventCode) ||
+        (lengthMs < kMinTelephoneEventDuration) ||
+        (lengthMs > kMaxTelephoneEventDuration) ||
+        (attenuationDb < kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation));
+    if (testFailed)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SendTelephoneEvent() invalid parameter(s)");
+        return -1;
+    }
+
+    const bool isDtmf =
+        (eventCode >= 0) && (eventCode <= kMaxDtmfEventCode);
+    const bool playDtmfToneDirect =
+        isDtmf && (_dtmfFeedback && _dtmfDirectFeedback);
+
+    if (playDtmfToneDirect)
+    {
+        // Mute the microphone signal while playing back the tone directly.
+        // This is to reduce the risk of introducing echo from the added output.
+        _transmitMixerPtr->UpdateMuteMicrophoneTime(lengthMs);
+
+        // Play out local feedback tone directly (same approach for both inband
+        // and outband).
+        // Reduce the length of the the tone with 80ms to reduce risk of echo.
+        // For non-direct feedback, outband and inband cases are handled
+        // differently.
+        _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs-80, attenuationDb);
+    }
+
+    if (outOfBand)
+    {
+        // The RTP/RTCP module will always deliver OnPlayTelephoneEvent when
+        // an event is transmitted. It is up to the VoE to utilize it or not.
+        // This flag ensures that feedback/playout is enabled; however, the
+        // channel object must still parse out the Dtmf events (0-15) from
+        // all possible events (0-255).
+        const bool playDTFMEvent = (_dtmfFeedback && !_dtmfDirectFeedback);
+
+        return channelPtr->SendTelephoneEventOutband(eventCode,
+                                                     lengthMs,
+                                                     attenuationDb,
+                                                     playDTFMEvent);
+    }
+    else
+    {
+        // For Dtmf tones, we want to ensure that inband tones are played out
+        // in sync with the transmitted audio. This flag is utilized by the
+        // channel object to determine if the queued Dtmf e vent shall also
+        // be fed to the output mixer in the same step as input audio is
+        // replaced by inband Dtmf tones.
+        const bool playDTFMEvent =
+            (isDtmf && _dtmfFeedback && !_dtmfDirectFeedback);
+
+        return channelPtr->SendTelephoneEventInband(eventCode,
+                                                    lengthMs,
+                                                    attenuationDb,
+                                                    playDTFMEvent);
+    }
+}
+
+int VoEDtmfImpl::SetSendTelephoneEventPayloadType(int channel,
+                                                  unsigned char type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetSendTelephoneEventPayloadType(channel=%d, type=%u)",
+                 channel, type);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendTelephoneEventPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetSendTelephoneEventPayloadType(type);
+}
+
+int VoEDtmfImpl::GetSendTelephoneEventPayloadType(int channel,
+                                                  unsigned char& type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSendTelephoneEventPayloadType(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendTelephoneEventPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetSendTelephoneEventPayloadType(type);
+}
+
+int VoEDtmfImpl::PlayDtmfTone(int eventCode,
+                              int lengthMs,
+                              int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "PlayDtmfTone(eventCode=%d, lengthMs=%d, attenuationDb=%d)",
+                 eventCode, lengthMs, attenuationDb);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_audioDevicePtr->Playing())
+    {
+        _engineStatistics.SetLastError(
+            VE_NOT_PLAYING, kTraceError,
+            "PlayDtmfTone() no channel is playing out");
+        return -1;
+    }
+    if ((eventCode < kMinDtmfEventCode) ||
+        (eventCode > kMaxDtmfEventCode) ||
+        (lengthMs < kMinTelephoneEventDuration) ||
+        (lengthMs > kMaxTelephoneEventDuration) ||
+        (attenuationDb <kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+        "PlayDtmfTone() invalid tone parameter(s)");
+        return -1;
+    }
+    return _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs, attenuationDb);
+}
+
+int VoEDtmfImpl::StartPlayingDtmfTone(int eventCode,
+                                      int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartPlayingDtmfTone(eventCode=%d, attenuationDb=%d)",
+                 eventCode, attenuationDb);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_audioDevicePtr->Playing())
+    {
+        _engineStatistics.SetLastError(
+            VE_NOT_PLAYING, kTraceError,
+            "StartPlayingDtmfTone() no channel is playing out");
+        return -1;
+    }
+    if ((eventCode < kMinDtmfEventCode) ||
+        (eventCode > kMaxDtmfEventCode) ||
+        (attenuationDb < kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingDtmfTone() invalid tone parameter(s)");
+        return -1;
+    }
+    return _outputMixerPtr->StartPlayingDtmfTone(eventCode, attenuationDb);
+}
+
+int VoEDtmfImpl::StopPlayingDtmfTone()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopPlayingDtmfTone()");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    return _outputMixerPtr->StopPlayingDtmfTone();
+}
+
+int VoEDtmfImpl::RegisterTelephoneEventDetection(
+    int channel,
+    TelephoneEventDetectionMethods detectionMethod,
+    VoETelephoneEventObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterTelephoneEventDetection(channel=%d, detectionMethod=%d,"
+                 "observer=0x%x)", channel, detectionMethod, &observer);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterTelephoneEventDetection() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterTelephoneEventDetection(detectionMethod,
+                                                       observer);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetTelephoneEventDetectionStatus() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+int VoEDtmfImpl::DeRegisterTelephoneEventDetection(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+            "DeRegisterTelephoneEventDetection(channel=%d)", channel);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterTelephoneEventDe tection() failed to locate channel");
+            return -1;
+    }
+    return channelPtr->DeRegisterTelephoneEventDetection();
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "DeRegisterTelephoneEventDetection() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+
+int VoEDtmfImpl::GetTelephoneEventDetectionStatus(
+    int channel,
+    bool& enabled,
+    TelephoneEventDetectionMethods& detectionMethod)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetTelephoneEventDetectionStatus(channel=%d)", channel);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetTelephoneEventDetectionStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetTelephoneEventDetectionStatus(enabled, detectionMethod);
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetTelephoneEventDetectionStatus() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+int VoEDtmfImpl::SetDtmfFeedbackStatus(bool enable, bool directFeedback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetDtmfFeedbackStatus(enable=%d, directFeeback=%d)",
+                 (int)enable, (int)directFeedback);
+
+    CriticalSectionScoped sc(*_apiCritPtr);
+
+    _dtmfFeedback = enable;
+    _dtmfDirectFeedback = directFeedback;
+
+    return 0;
+}
+
+int VoEDtmfImpl::GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetDtmfFeedbackStatus()");
+
+    CriticalSectionScoped sc(*_apiCritPtr);
+
+    enabled = _dtmfFeedback;
+    directFeedback = _dtmfDirectFeedback;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetDtmfFeedbackStatus() => enabled=%d, directFeedback=%d",
+                 enabled, directFeedback);
+    return 0;
+}
+
+int VoEDtmfImpl::SetDtmfPlayoutStatus(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetDtmfPlayoutStatus(channel=%d, enable=%d)",
+                 channel, enable);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetDtmfPlayoutStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetDtmfPlayoutStatus(enable);
+}
+
+int VoEDtmfImpl::GetDtmfPlayoutStatus(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetDtmfPlayoutStatus(channel=%d, enabled=?)", channel);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetDtmfPlayoutStatus() failed to locate channel");
+        return -1;
+    }
+    enabled = channelPtr->DtmfPlayoutStatus();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetDtmfPlayoutStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_dtmf_impl.h b/trunk/src/voice_engine/main/source/voe_dtmf_impl.h
new file mode 100644
index 0000000..5b53969
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_dtmf_impl.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
+
+#include "voe_dtmf.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class VoEDtmfImpl : public virtual voe::SharedData,
+                    public VoEDtmf,
+                    public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int SendTelephoneEvent(
+        int channel,
+        int eventCode,
+        bool outOfBand = true,
+        int lengthMs = 160,
+        int attenuationDb = 10);
+
+    virtual int SetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char type);
+
+    virtual int GetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char& type);
+
+    virtual int SetDtmfFeedbackStatus(bool enable,
+        bool directFeedback = false);
+
+    virtual int GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback);
+
+    virtual int PlayDtmfTone(int eventCode,
+                             int lengthMs = 200,
+                             int attenuationDb = 10);
+
+    virtual int StartPlayingDtmfTone(int eventCode,
+                                     int attenuationDb = 10);
+
+    virtual int StopPlayingDtmfTone();
+
+    virtual int RegisterTelephoneEventDetection(
+        int channel,
+        TelephoneEventDetectionMethods detectionMethod,
+        VoETelephoneEventObserver& observer);
+
+    virtual int DeRegisterTelephoneEventDetection(int channel);
+
+    virtual int GetTelephoneEventDetectionStatus(
+        int channel,
+        bool& enabled,
+        TelephoneEventDetectionMethods& detectionMethod);
+
+    virtual int SetDtmfPlayoutStatus(int channel, bool enable);
+
+    virtual int GetDtmfPlayoutStatus(int channel, bool& enabled);
+
+protected:
+    VoEDtmfImpl();
+    virtual ~VoEDtmfImpl();
+
+private:
+    bool _dtmfFeedback;
+    bool _dtmfDirectFeedback;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_encryption_impl.cc b/trunk/src/voice_engine/main/source/voe_encryption_impl.cc
new file mode 100644
index 0000000..5ba944b
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_encryption_impl.cc
@@ -0,0 +1,275 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_encryption_impl.h"
+
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEEncryption* VoEEncryption::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEEncryptionImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+
+VoEEncryptionImpl::VoEEncryptionImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEEncryptionImpl::VoEEncryptionImpl() - ctor");
+}
+
+VoEEncryptionImpl::~VoEEncryptionImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEEncryptionImpl::~VoEEncryptionImpl() - dtor");
+}
+
+int VoEEncryptionImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEEncryption::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+      // reset reference counter to zero => OK to delete VE
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEEncryption reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEEncryptionImpl::EnableSRTPSend(
+    int channel,
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "EnableSRTPSend(channel=%i, cipherType=%i, cipherKeyLength=%i,"
+                 " authType=%i, authKeyLength=%i, authTagLength=%i, level=%i, "
+                 "key=?, useForRTCP=%d)",
+                 channel, cipherType, cipherKeyLength, authType,
+                 authKeyLength, authTagLength, level, useForRTCP);
+#ifdef WEBRTC_SRTP
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableSRTPSend() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->EnableSRTPSend(cipherType,
+                                      cipherKeyLength,
+                                      authType,
+                                      authKeyLength,
+                                      authTagLength,
+                                      level,
+                                      key,
+                                      useForRTCP);
+#else
+   _engineStatistics.SetLastError(
+       VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "EnableSRTPSend() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::DisableSRTPSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "DisableSRTPSend(channel=%i)",channel);
+#ifdef WEBRTC_SRTP
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DisableSRTPSend() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DisableSRTPSend();
+#else
+   _engineStatistics.SetLastError(
+       VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "DisableSRTPSend() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::EnableSRTPReceive(
+    int channel,
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+		bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "EnableSRTPReceive(channel=%i, cipherType=%i, "
+                 "cipherKeyLength=%i, authType=%i, authKeyLength=%i, "
+                 "authTagLength=%i, level=%i, key=?, useForRTCP=%d)",
+                 channel, cipherType, cipherKeyLength, authType,
+                 authKeyLength, authTagLength, level, useForRTCP);
+#ifdef WEBRTC_SRTP
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableSRTPReceive() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->EnableSRTPReceive(cipherType,
+                                         cipherKeyLength,
+	                                 authType,
+	                                 authKeyLength,
+	                                 authTagLength,
+	                                 level,
+	                                 key,
+	                                 useForRTCP);
+#else
+   _engineStatistics.SetLastError(
+       VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "EnableSRTPReceive() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::DisableSRTPReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DisableSRTPReceive(channel=%i)", channel);
+#ifdef WEBRTC_SRTP
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DisableSRTPReceive() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DisableSRTPReceive();
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "DisableSRTPReceive() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::RegisterExternalEncryption(int channel,
+                                                  Encryption& encryption)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterExternalEncryption(channel=%d, encryption=0x%x)",
+                 channel, &encryption);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterExternalEncryption() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterExternalEncryption(encryption);
+}
+
+int VoEEncryptionImpl::DeRegisterExternalEncryption(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DeRegisterExternalEncryption(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterExternalEncryption() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterExternalEncryption();
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+
+// EOF
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_encryption_impl.h b/trunk/src/voice_engine/main/source/voe_encryption_impl.h
new file mode 100644
index 0000000..050dd88
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_encryption_impl.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
+
+#include "voe_encryption.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEEncryptionImpl : public virtual voe::SharedData,
+                          public VoEEncryption,
+                          public voe::RefCount
+{
+public:
+
+    virtual int Release();
+
+    // SRTP
+    virtual int EnableSRTPSend(
+        int channel,
+        CipherTypes cipherType,
+        int cipherKeyLength,
+        AuthenticationTypes authType,
+        int authKeyLength,
+        int authTagLength,
+        SecurityLevels level,
+        const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+        bool useForRTCP = false);
+
+    virtual int DisableSRTPSend(int channel);
+
+    virtual int EnableSRTPReceive(
+        int channel,
+        CipherTypes cipherType,
+        int cipherKeyLength,
+        AuthenticationTypes authType,
+        int authKeyLength,
+        int authTagLength,
+        SecurityLevels level,
+        const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+        bool useForRTCP = false);
+
+    virtual int DisableSRTPReceive(int channel);
+
+    // External encryption
+    virtual int RegisterExternalEncryption(
+        int channel,
+        Encryption& encryption);
+
+    virtual int DeRegisterExternalEncryption(int channel);
+
+protected:
+    VoEEncryptionImpl();
+    virtual ~VoEEncryptionImpl();
+};
+
+}   // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_external_media_impl.cc b/trunk/src/voice_engine/main/source/voe_external_media_impl.cc
new file mode 100644
index 0000000..3fc3239
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_external_media_impl.cc
@@ -0,0 +1,397 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_external_media_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voice_engine_impl.h"
+#include "voe_errors.h"
+
+namespace webrtc {
+
+VoEExternalMedia* VoEExternalMedia::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEExternalMediaImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+
+VoEExternalMediaImpl::VoEExternalMediaImpl()
+    : playout_delay_ms_(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEExternalMediaImpl() - ctor");
+}
+
+VoEExternalMediaImpl::~VoEExternalMediaImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "~VoEExternalMediaImpl() - dtor");
+}
+
+int VoEExternalMediaImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEExternalMedia::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEExternalMedia reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEExternalMediaImpl::RegisterExternalMediaProcessing(
+    int channel,
+    ProcessingTypes type,
+    VoEMediaProcess& processObject)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterExternalMediaProcessing(channel=%d, type=%d, "
+                 "processObject=0x%x)", channel, type, &processObject);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    switch (type)
+    {
+        case kPlaybackPerChannel:
+        case kRecordingPerChannel:
+        {
+            voe::ScopedChannel sc(_channelManager, channel);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr == NULL)
+            {
+                _engineStatistics.SetLastError(
+                    VE_CHANNEL_NOT_VALID, kTraceError,
+                    "RegisterExternalMediaProcessing() "
+                    "failed to locate channel");
+                return -1;
+            }
+            return channelPtr->RegisterExternalMediaProcessing(type,
+                                                               processObject);
+        }
+        case kPlaybackAllChannelsMixed:
+        {
+            return _outputMixerPtr->RegisterExternalMediaProcessing(
+                processObject);
+        }
+        case kRecordingAllChannelsMixed:
+        {
+            return _transmitMixerPtr->RegisterExternalMediaProcessing(
+                processObject);
+        }
+    }
+    return -1;
+}
+
+int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing(
+    int channel,
+    ProcessingTypes type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DeRegisterExternalMediaProcessing(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    switch (type)
+    {
+        case kPlaybackPerChannel:
+        case kRecordingPerChannel:
+        {
+            voe::ScopedChannel sc(_channelManager, channel);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr == NULL)
+            {
+                _engineStatistics.SetLastError(
+                    VE_CHANNEL_NOT_VALID, kTraceError,
+                    "RegisterExternalMediaProcessing() "
+                    "failed to locate channel");
+                return -1;
+            }
+            return channelPtr->DeRegisterExternalMediaProcessing(type);
+        }
+        case kPlaybackAllChannelsMixed:
+        {
+            return _outputMixerPtr->DeRegisterExternalMediaProcessing();
+        }
+        case kRecordingAllChannelsMixed:
+        {
+            return _transmitMixerPtr->DeRegisterExternalMediaProcessing();
+        }
+    }
+    return -1;
+}
+
+int VoEExternalMediaImpl::SetExternalRecordingStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetExternalRecordingStatus(enable=%d)", enable);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (_audioDevicePtr->Recording())
+    {
+        _engineStatistics.SetLastError(
+            VE_ALREADY_SENDING,
+            kTraceError,
+            "SetExternalRecordingStatus() cannot set state while sending");
+        return -1;
+    }
+    _externalRecording = enable;
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED,
+        kTraceError,
+        "SetExternalRecordingStatus() external recording is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[],
+        int lengthSamples,
+        int samplingFreqHz,
+        int current_delay_ms)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "ExternalRecordingInsertData(speechData10ms=0x%x,"
+                 " lengthSamples=%u, samplingFreqHz=%d, current_delay_ms=%d)",
+                 &speechData10ms[0], lengthSamples, samplingFreqHz,
+              current_delay_ms);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_externalRecording)
+    {
+       _engineStatistics.SetLastError(
+           VE_INVALID_OPERATION,
+           kTraceError,
+           "ExternalRecordingInsertData() external recording is not enabled");
+        return -1;
+    }
+    if (NumOfSendingChannels() == 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_ALREADY_SENDING,
+            kTraceError,
+            "SetExternalRecordingStatus() no channel is sending");
+        return -1;
+    }
+    if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
+        (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
+    {
+         _engineStatistics.SetLastError(
+             VE_INVALID_ARGUMENT,
+             kTraceError,
+             "SetExternalRecordingStatus() invalid sample rate");
+        return -1;
+    }
+    if ((0 == lengthSamples) ||
+        ((lengthSamples % (samplingFreqHz / 100)) != 0))
+    {
+         _engineStatistics.SetLastError(
+             VE_INVALID_ARGUMENT,
+             kTraceError,
+             "SetExternalRecordingStatus() invalid buffer size");
+        return -1;
+    }
+    if (current_delay_ms < 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT,
+            kTraceError,
+            "SetExternalRecordingStatus() invalid delay)");
+        return -1;
+    }
+
+    WebRtc_UWord16 blockSize = samplingFreqHz / 100;
+    WebRtc_UWord32 nBlocks = lengthSamples / blockSize;
+    WebRtc_Word16 totalDelayMS = 0;
+    WebRtc_UWord16 playoutDelayMS = 0;
+
+    for (WebRtc_UWord32 i = 0; i < nBlocks; i++)
+    {
+        if (!_externalPlayout)
+        {
+            // Use real playout delay if external playout is not enabled.
+            if (_audioDevicePtr->PlayoutDelay(&playoutDelayMS) != 0) {
+              _engineStatistics.SetLastError(
+                  VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+                  "PlayoutDelay() unable to get the playout delay");
+            }
+            totalDelayMS = current_delay_ms + playoutDelayMS;
+        }
+        else
+        {
+            // Use stored delay value given the last call
+            // to ExternalPlayoutGetData.
+            totalDelayMS = current_delay_ms + playout_delay_ms_;
+            // Compensate for block sizes larger than 10ms
+            totalDelayMS -= (WebRtc_Word16)(i*10);
+            if (totalDelayMS < 0)
+                totalDelayMS = 0;
+        }
+        _transmitMixerPtr->PrepareDemux(
+            (const WebRtc_Word8*)(&speechData10ms[i*blockSize]),
+            blockSize,
+            1,
+            samplingFreqHz,
+            totalDelayMS,
+            0,
+            0);
+
+        _transmitMixerPtr->DemuxAndMix();
+        _transmitMixerPtr->EncodeAndSend();
+    }
+    return 0;
+#else
+       _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED,
+        kTraceError,
+        "ExternalRecordingInsertData() external recording is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::SetExternalPlayoutStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetExternalPlayoutStatus(enable=%d)", enable);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (_audioDevicePtr->Playing())
+    {
+        _engineStatistics.SetLastError(
+            VE_ALREADY_SENDING,
+            kTraceError,
+            "SetExternalPlayoutStatus() cannot set state while playing");
+        return -1;
+    }
+    _externalPlayout = enable;
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED,
+        kTraceError,
+        "SetExternalPlayoutStatus() external playout is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::ExternalPlayoutGetData(
+    WebRtc_Word16 speechData10ms[],
+    int samplingFreqHz,
+    int current_delay_ms,
+    int& lengthSamples)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "ExternalPlayoutGetData(speechData10ms=0x%x, samplingFreqHz=%d"
+                 ",  current_delay_ms=%d)", &speechData10ms[0], samplingFreqHz,
+                 current_delay_ms);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_externalPlayout)
+    {
+       _engineStatistics.SetLastError(
+           VE_INVALID_OPERATION,
+           kTraceError,
+           "ExternalPlayoutGetData() external playout is not enabled");
+        return -1;
+    }
+    if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
+        (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT,
+            kTraceError,
+            "ExternalPlayoutGetData() invalid sample rate");
+        return -1;
+    }
+    if (current_delay_ms < 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT,
+            kTraceError,
+            "ExternalPlayoutGetData() invalid delay)");
+        return -1;
+    }
+
+    AudioFrame audioFrame;
+
+    // Retrieve mixed output at the specified rate
+    _outputMixerPtr->MixActiveChannels();
+    _outputMixerPtr->DoOperationsOnCombinedSignal();
+    _outputMixerPtr->GetMixedAudio(samplingFreqHz, 1, audioFrame);
+
+    // Deliver audio (PCM) samples to the external sink
+    memcpy(speechData10ms,
+           audioFrame._payloadData,
+           sizeof(WebRtc_Word16)*(audioFrame._payloadDataLengthInSamples));
+    lengthSamples = audioFrame._payloadDataLengthInSamples;
+
+    // Store current playout delay (to be used by ExternalRecordingInsertData).
+    playout_delay_ms_ = current_delay_ms;
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+       VE_FUNC_NOT_SUPPORTED,
+       kTraceError,
+       "ExternalPlayoutGetData() external playout is not supported");
+    return -1;
+#endif
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_external_media_impl.h b/trunk/src/voice_engine/main/source/voe_external_media_impl.h
new file mode 100644
index 0000000..fa1ff8a
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_external_media_impl.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
+
+#include "voe_external_media.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEExternalMediaImpl : public virtual voe::SharedData,
+                             public VoEExternalMedia,
+                             public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int RegisterExternalMediaProcessing(
+        int channel,
+        ProcessingTypes type,
+        VoEMediaProcess& processObject);
+
+    virtual int DeRegisterExternalMediaProcessing(
+        int channel,
+        ProcessingTypes type);
+
+    virtual int SetExternalRecordingStatus(bool enable);
+
+    virtual int SetExternalPlayoutStatus(bool enable);
+
+    virtual int ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[],
+        int lengthSamples,
+        int samplingFreqHz,
+        int current_delay_ms);
+
+    virtual int ExternalPlayoutGetData(WebRtc_Word16 speechData10ms[],
+                                       int samplingFreqHz,
+                                       int current_delay_ms,
+                                       int& lengthSamples);
+
+protected:
+    VoEExternalMediaImpl();
+    virtual ~VoEExternalMediaImpl();
+
+private:
+    int playout_delay_ms_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_file_impl.cc b/trunk/src/voice_engine/main/source/voe_file_impl.cc
new file mode 100644
index 0000000..cd3193f
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_file_impl.cc
@@ -0,0 +1,1423 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_file_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "media_file.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEFile* VoEFile::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_FILE_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEFileImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+
+VoEFileImpl::VoEFileImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEFileImpl::VoEFileImpl() - ctor");
+}
+
+VoEFileImpl::~VoEFileImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEFileImpl::~VoEFileImpl() - dtor");
+}
+
+int VoEFileImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEFile::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoEFile reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEFileImpl::StartPlayingFileLocally(
+    int channel,
+    const char fileNameUTF8[1024],
+    bool loop, FileFormats format,
+    float volumeScaling,
+    int startPointMs,
+    int stopPointMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartPlayingFileLocally(channel=%d, fileNameUTF8[]=%s, "
+                 "loop=%d, format=%d, volumeScaling=%5.3f, startPointMs=%d,"
+                 " stopPointMs=%d)",
+                 channel, fileNameUTF8, loop, format, volumeScaling,
+                 startPointMs, stopPointMs);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->StartPlayingFileLocally(fileNameUTF8,
+                                               loop,
+                                               format,
+                                               startPointMs,
+                                               volumeScaling,
+                                               stopPointMs,
+                                               NULL);
+}
+
+int VoEFileImpl::StartPlayingFileLocally(int channel,
+                                         InStream* stream,
+                                         FileFormats format,
+                                         float volumeScaling,
+                                         int startPointMs,
+                                         int stopPointMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartPlayingFileLocally(channel=%d, stream, format=%d, "
+                 "volumeScaling=%5.3f, startPointMs=%d, stopPointMs=%d)",
+                 channel, format, volumeScaling, startPointMs, stopPointMs);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->StartPlayingFileLocally(stream,
+                                               format,
+                                               startPointMs,
+                                               volumeScaling,
+                                               stopPointMs,
+                                               NULL);
+}
+
+int VoEFileImpl::StopPlayingFileLocally(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopPlayingFileLocally()");
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StopPlayingFileLocally();
+}
+
+int VoEFileImpl::IsPlayingFileLocally(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "IsPlayingFileLocally(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->IsPlayingFileLocally();
+}
+
+int VoEFileImpl::ScaleLocalFilePlayout(int channel, float scale)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ScaleLocalFilePlayout(channel=%d, scale=%5.3f)",
+                 channel, scale);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->ScaleLocalFilePlayout(scale);
+}
+
+int VoEFileImpl::StartPlayingFileAsMicrophone(int channel,
+                                              const char fileNameUTF8[1024],
+                                              bool loop,
+                                              bool mixWithMicrophone,
+                                              FileFormats format,
+                                              float volumeScaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartPlayingFileAsMicrophone(channel=%d, fileNameUTF8=%s, "
+                 "loop=%d, mixWithMicrophone=%d, format=%d, "
+                 "volumeScaling=%5.3f)",
+                 channel, fileNameUTF8, loop, mixWithMicrophone, format,
+                 volumeScaling);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    const WebRtc_UWord32 startPointMs(0);
+    const WebRtc_UWord32 stopPointMs(0);
+
+    if (channel == -1)
+    {
+        int res = _transmitMixerPtr->StartPlayingFileAsMicrophone(
+            fileNameUTF8,
+            loop,
+            format,
+            startPointMs,
+            volumeScaling,
+            stopPointMs,
+            NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartPlayingFileAsMicrophone() failed to start"
+                         " playing file");
+            return(-1);
+        }
+        else
+        {
+            _transmitMixerPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return(0);
+        }
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+
+        int res = channelPtr->StartPlayingFileAsMicrophone(fileNameUTF8,
+                                                           loop,
+                                                           format,
+                                                           startPointMs,
+                                                           volumeScaling,
+                                                           stopPointMs,
+                                                           NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartPlayingFileAsMicrophone() failed to start "
+                         "playing file");
+            return -1;
+        }
+        else
+        {
+            channelPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return 0;
+        }
+    }
+}
+
+int VoEFileImpl::StartPlayingFileAsMicrophone(int channel,
+                                              InStream* stream,
+                                              bool mixWithMicrophone,
+                                              FileFormats format,
+                                              float volumeScaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartPlayingFileAsMicrophone(channel=%d, stream,"
+                 " mixWithMicrophone=%d, format=%d, volumeScaling=%5.3f)",
+                 channel, mixWithMicrophone, format, volumeScaling);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    const WebRtc_UWord32 startPointMs(0);
+    const WebRtc_UWord32 stopPointMs(0);
+
+    if (channel == -1)
+    {
+        int res = _transmitMixerPtr->StartPlayingFileAsMicrophone(
+            stream,
+            format,
+            startPointMs,
+            volumeScaling,
+            stopPointMs,
+            NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartPlayingFileAsMicrophone() failed to start"
+                         " playing stream");
+            return(-1);
+        }
+        else
+        {
+            _transmitMixerPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return(0);
+        }
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+
+        int res = channelPtr->StartPlayingFileAsMicrophone(
+            stream, format, startPointMs, volumeScaling, stopPointMs, NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartPlayingFileAsMicrophone() failed to start"
+                         " playing stream");
+            return -1;
+        }
+        else
+        {
+            channelPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return 0;
+        }
+    }
+}
+
+int VoEFileImpl::StopPlayingFileAsMicrophone(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopPlayingFileAsMicrophone(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        // Stop adding file before demultiplexing <=> affects all channels
+        return _transmitMixerPtr->StopPlayingFileAsMicrophone();
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "StopPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StopPlayingFileAsMicrophone();
+    }
+}
+
+int VoEFileImpl::IsPlayingFileAsMicrophone(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "IsPlayingFileAsMicrophone(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _transmitMixerPtr->IsPlayingFileAsMicrophone();
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "IsPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->IsPlayingFileAsMicrophone();
+    }
+}
+
+int VoEFileImpl::ScaleFileAsMicrophonePlayout(int channel, float scale)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ScaleFileAsMicrophonePlayout(channel=%d, scale=%5.3f)",
+                 channel, scale);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _transmitMixerPtr->ScaleFileAsMicrophonePlayout(scale);
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "IsPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->ScaleFileAsMicrophonePlayout(scale);
+    }
+}
+
+int VoEFileImpl::StartRecordingPlayout(
+    int channel, const char* fileNameUTF8, CodecInst* compression,
+    int maxSizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartRecordingPlayout(channel=%d, fileNameUTF8=%s, "
+                 "compression, maxSizeBytes=%d)",
+                 channel, fileNameUTF8, maxSizeBytes);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        _outputMixerPtr->StartRecordingPlayout(fileNameUTF8, compression);
+        return 0;
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StartRecordingPlayout(fileNameUTF8, compression);
+    }
+}
+
+int VoEFileImpl::StartRecordingPlayout(
+    int channel, OutStream* stream, CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartRecordingPlayout(channel=%d, stream, compression)",
+                 channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _outputMixerPtr->StartRecordingPlayout(stream, compression);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StartRecordingPlayout(stream, compression);
+    }
+}
+
+int VoEFileImpl::StopRecordingPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopRecordingPlayout(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _outputMixerPtr->StopRecordingPlayout();
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "StopRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StopRecordingPlayout();
+    }
+}
+
+int VoEFileImpl::StartRecordingMicrophone(
+    const char* fileNameUTF8, CodecInst* compression, int maxSizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartRecordingMicrophone(fileNameUTF8=%s, compression, "
+                 "maxSizeBytes=%d)", fileNameUTF8, maxSizeBytes);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (_transmitMixerPtr->StartRecordingMicrophone(fileNameUTF8, compression))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingMicrophone() failed to start recording");
+        return -1;
+    }
+    if (_audioDevicePtr->Recording())
+    {
+        return 0;
+    }
+    if (!_externalRecording)
+    {
+        if (_audioDevicePtr->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartRecordingMicrophone() failed to initialize"
+                         " recording");
+            return -1;
+        }
+        if (_audioDevicePtr->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                "StartRecordingMicrophone() failed to start recording");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+int VoEFileImpl::StartRecordingMicrophone(
+    OutStream* stream, CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartRecordingMicrophone(stream, compression)");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (_transmitMixerPtr->StartRecordingMicrophone(stream, compression) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingMicrophone() failed to start recording");
+        return -1;
+    }
+    if (_audioDevicePtr->Recording())
+    {
+        return 0;
+    }
+    if (!_externalRecording)
+    {
+        if (_audioDevicePtr->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartRecordingMicrophone() failed to initialize "
+                         "recording");
+            return -1;
+        }
+        if (_audioDevicePtr->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "StartRecordingMicrophone() failed to start"
+                         " recording");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+int VoEFileImpl::StopRecordingMicrophone()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopRecordingMicrophone()");
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((NumOfSendingChannels() == 0)&&_audioDevicePtr->Recording())
+    {
+        // Stop audio-device recording if no channel is recording
+        if (_audioDevicePtr->StopRecording() != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_CANNOT_STOP_RECORDING, kTraceError,
+                "StopRecordingMicrophone() failed to stop recording");
+            return -1;
+        }
+    }
+    return _transmitMixerPtr->StopRecordingMicrophone();
+}
+
+int VoEFileImpl::ConvertPCMToWAV(const char* fileNameInUTF8,
+                                 const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertPCMToWAV(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1,
+        kFileFormatPcm16kHzFile));
+
+    int res=playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatWavFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToWAV failed during conversion "
+                         "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToWAV failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToWAV(InStream* streamIn, OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertPCMToWAV(streamIn, streamOut)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+            "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+        kFileFormatPcm16kHzFile));
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(-1,
+        kFileFormatWavFile));
+    CodecInst codecInst;
+    strncpy(codecInst.plname, "L16", 32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength, frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToWAV failed during conversion "
+                         "(create audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToWAV failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertWAVToPCM(const char* fileNameInUTF8,
+                                 const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertWAVToPCM(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+                                                        kFileFormatWavFile));
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                   (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertWAVToPCM failed during conversion "
+                         "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertWAVToPCM failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertWAVToPCM(InStream* streamIn, OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertWAVToPCM(streamIn, streamOut)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+                                                        kFileFormatWavFile));
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength, frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertWAVToPCM failed during conversion "
+                         "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertWAVToPCM failed during converstion"
+                         " (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                        const char* fileNameOutUTF8,
+                                        CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertPCMToCompressed(fileNameInUTF8=%s, fileNameOutUTF8=%s"
+                 ",  compression)", fileNameInUTF8, fileNameOutUTF8);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "  compression: plname=%s, plfreq=%d, pacsize=%d",
+                 compression->plname, compression->plfreq,
+                 compression->pacsize);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1,
+        kFileFormatPcm16kHzFile));
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create player object");
+        // Clean up and shutdown the file player
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1,
+        kFileFormatCompressedFile));
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8, *compression,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                  frequency, AudioFrame::kNormalSpeech,
+                                  AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToCompressed failed during conversion "
+                         "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToCompressed failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToCompressed(InStream* streamIn,
+                                        OutStream* streamOut,
+                                        CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertPCMToCompressed(streamIn, streamOut, compression)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "invalid stream handles");
+        return (-1);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "  compression: plname=%s, plfreq=%d, pacsize=%d",
+                 compression->plname, compression->plfreq,
+                 compression->pacsize);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatPcm16kHzFile));
+
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatCompressedFile));
+    res = recObj.StartRecordingAudioFile(*streamOut,*compression,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToCompressed failed during conversion"
+                         " (audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertPCMToCompressed failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                        const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertCompressedToPCM(fileNameInUTF8=%s,"
+                 " fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatCompressedFile));
+
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertCompressedToPCM failed during conversion "
+                         "(create audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertCompressedToPCM failed during converstion "
+                         "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertCompressedToPCM(InStream* streamIn,
+                                        OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "ConvertCompressedToPCM(file, file);");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+            "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatCompressedFile));
+    int res;
+
+    res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    WebRtc_UWord32 decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertCompressedToPCM failed during conversion"
+                         " (audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                         "ConvertCompressedToPCM failed during converstion"
+                         " (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+
+int VoEFileImpl::GetFileDuration(const char* fileNameUTF8,
+                                 int& durationMs,
+                                 FileFormats format)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetFileDuration(fileNameUTF8=%s, format=%d)",
+                 fileNameUTF8, format);
+
+    // Create a dummy file module for this
+    MediaFile * fileModule=MediaFile::CreateMediaFile(-1);
+
+    // Temp container of the right format
+    WebRtc_UWord32 duration;
+    int res=fileModule->FileDurationMs(fileNameUTF8,duration,format);
+    if (res)
+    {
+        _engineStatistics.SetLastError(VE_BAD_FILE, kTraceError,
+            "GetFileDuration() failed measure file duration");
+        return -1;
+    }
+    durationMs = duration;
+    MediaFile::DestroyMediaFile(fileModule);
+    fileModule = NULL;
+
+    return(res);
+}
+
+int VoEFileImpl::GetPlaybackPosition(int channel, int& positionMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetPlaybackPosition(channel=%d)", channel);
+
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlaybackPosition() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetLocalPlayoutPosition(positionMs);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_FILE_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_file_impl.h b/trunk/src/voice_engine/main/source/voe_file_impl.h
new file mode 100644
index 0000000..2d93876
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_file_impl.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+
+#include "voe_file.h"
+#include "shared_data.h"
+#include "ref_count.h"
+
+namespace webrtc {
+
+class VoEFileImpl : public virtual voe::SharedData,
+                    public VoEFile, public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    // Playout file locally
+
+    virtual int StartPlayingFileLocally(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0,
+        int stopPointMs = 0);
+
+    virtual int StartPlayingFileLocally(
+        int channel,
+        InStream* stream,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0, int stopPointMs = 0);
+
+    virtual int StopPlayingFileLocally(int channel);
+
+    virtual int IsPlayingFileLocally(int channel);
+
+    virtual int ScaleLocalFilePlayout(int channel, float scale);
+
+    // Use file as microphone input
+
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false ,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0);
+
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        InStream* stream,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0);
+
+    virtual int StopPlayingFileAsMicrophone(int channel);
+
+    virtual int IsPlayingFileAsMicrophone(int channel);
+
+    virtual int ScaleFileAsMicrophonePlayout(int channel, float scale);
+
+    // Record speaker signal to file
+
+    virtual int StartRecordingPlayout(int channel,
+                                      const char* fileNameUTF8,
+                                      CodecInst* compression = NULL,
+                                      int maxSizeBytes = -1);
+
+    virtual int StartRecordingPlayout(int channel,
+                                      OutStream* stream,
+                                      CodecInst* compression = NULL);
+
+    virtual int StopRecordingPlayout(int channel);
+
+    // Record microphone signal to file
+
+    virtual int StartRecordingMicrophone(const char* fileNameUTF8,
+                                         CodecInst* compression = NULL,
+                                         int maxSizeBytes = -1);
+
+    virtual int StartRecordingMicrophone(OutStream* stream,
+                                         CodecInst* compression = NULL);
+
+    virtual int StopRecordingMicrophone();
+
+    // Conversion between different file formats
+
+    virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8);
+
+    virtual int ConvertPCMToWAV(InStream* streamIn,
+                                OutStream* streamOut);
+
+    virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8);
+
+    virtual int ConvertWAVToPCM(InStream* streamIn,
+                                OutStream* streamOut);
+
+    virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8,
+                                       CodecInst* compression);
+
+    virtual int ConvertPCMToCompressed(InStream* streamIn,
+                                       OutStream* streamOut,
+                                       CodecInst* compression);
+
+    virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8);
+
+    virtual int ConvertCompressedToPCM(InStream* streamIn,
+                                       OutStream* streamOut);
+
+    // Misc file functions
+
+    virtual int GetFileDuration(
+        const char* fileNameUTF8,
+        int& durationMs,
+        FileFormats format = kFileFormatPcm16kHzFile);
+
+    virtual int GetPlaybackPosition(int channel, int& positionMs);
+
+protected:
+    VoEFileImpl();
+    virtual ~VoEFileImpl();
+
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+
diff --git a/trunk/src/voice_engine/main/source/voe_hardware_impl.cc b/trunk/src/voice_engine/main/source/voe_hardware_impl.cc
new file mode 100644
index 0000000..7e676c0
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_hardware_impl.cc
@@ -0,0 +1,850 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_hardware_impl.h"
+
+#include <cassert>
+
+#include "cpu_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoEHardware* VoEHardware::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_HARDWARE_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+            reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEHardwareImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+
+VoEHardwareImpl::VoEHardwareImpl() :
+    _cpu(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEHardwareImpl() - ctor");
+
+    _cpu = CpuWrapper::CreateCpu();
+    if (_cpu)
+    {
+        _cpu->CpuUsage(); // init cpu usage
+    }
+}
+
+VoEHardwareImpl::~VoEHardwareImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "~VoEHardwareImpl() - dtor");
+
+    if (_cpu)
+    {
+        delete _cpu;
+        _cpu = NULL;
+    }
+}
+
+int VoEHardwareImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEHardwareImpl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoEHardwareImpl reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEHardwareImpl::SetAudioDeviceLayer(AudioLayers audioLayer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetAudioDeviceLayer(audioLayer=%d)", audioLayer);
+
+    // Don't allow a change if VoE is initialized
+    if (_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_ALREADY_INITED, kTraceError);
+        return -1;
+    }
+
+    // Map to AudioDeviceModule::AudioLayer
+    AudioDeviceModule::AudioLayer
+        wantedLayer(AudioDeviceModule::kPlatformDefaultAudio);
+    switch (audioLayer)
+    {
+        case kAudioPlatformDefault:
+            // already set above
+            break;
+        case kAudioWindowsCore:
+            wantedLayer = AudioDeviceModule::kWindowsCoreAudio;
+            break;
+        case kAudioWindowsWave:
+            wantedLayer = AudioDeviceModule::kWindowsWaveAudio;
+            break;
+        case kAudioLinuxAlsa:
+            wantedLayer = AudioDeviceModule::kLinuxAlsaAudio;
+            break;
+        case kAudioLinuxPulse:
+            wantedLayer = AudioDeviceModule::kLinuxPulseAudio;
+            break;
+    }
+
+    // Save the audio device layer for Init()
+    _audioDeviceLayer = wantedLayer;
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetAudioDeviceLayer(AudioLayers& audioLayer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "GetAudioDeviceLayer(devices=?)");
+
+    // Can always be called regardless of VoE state
+
+    AudioDeviceModule::AudioLayer
+        activeLayer(AudioDeviceModule::kPlatformDefaultAudio);
+
+    if (_audioDevicePtr)
+    {
+        // Get active audio layer from ADM
+        if (_audioDevicePtr->ActiveAudioLayer(&activeLayer) != 0)
+        {
+            _engineStatistics.SetLastError(VE_UNDEFINED_SC_ERR, kTraceError,
+                                           "  Audio Device error");
+            return -1;
+        }
+    }
+    else
+    {
+        // Return VoE's internal layer setting
+        activeLayer = _audioDeviceLayer;
+    }
+
+    // Map to AudioLayers
+    switch (activeLayer)
+    {
+        case AudioDeviceModule::kPlatformDefaultAudio:
+            audioLayer = kAudioPlatformDefault;
+            break;
+        case AudioDeviceModule::kWindowsCoreAudio:
+            audioLayer = kAudioWindowsCore;
+            break;
+        case AudioDeviceModule::kWindowsWaveAudio:
+            audioLayer = kAudioWindowsWave;
+            break;
+        case AudioDeviceModule::kLinuxAlsaAudio:
+            audioLayer = kAudioLinuxAlsa;
+            break;
+        case AudioDeviceModule::kLinuxPulseAudio:
+            audioLayer = kAudioLinuxPulse;
+            break;
+        default:
+            _engineStatistics.SetLastError(VE_UNDEFINED_SC_ERR, kTraceError,
+                                           "  unknown audio layer");
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "  Output: audioLayer=%d", audioLayer);
+
+    return 0;
+}
+int VoEHardwareImpl::GetNumOfRecordingDevices(int& devices)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetNumOfRecordingDevices(devices=?)");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    devices = static_cast<int> (_audioDevicePtr->RecordingDevices());
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: devices=%d", devices);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetNumOfPlayoutDevices(int& devices)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetNumOfPlayoutDevices(devices=?)");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    devices = static_cast<int> (_audioDevicePtr->PlayoutDevices());
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: devices=%d", devices);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetRecordingDeviceName(int index,
+                                            char strNameUTF8[128],
+                                            char strGuidUTF8[128])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRecordingDeviceName(index=%d)", index);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (strNameUTF8 == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetRecordingDeviceName() invalid argument");
+        return -1;
+    }
+
+    // Note that strGuidUTF8 is allowed to be NULL
+
+    // Init len variable to length of supplied vectors
+    const WebRtc_UWord16 strLen = 128;
+
+    // Check if length has been changed in module
+    assert(strLen == kAdmMaxDeviceNameSize);
+    assert(strLen == kAdmMaxGuidSize);
+
+    WebRtc_Word8 name[strLen];
+    WebRtc_Word8 guid[strLen];
+
+    // Get names from module
+    if (_audioDevicePtr->RecordingDeviceName(index, name, guid) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_CANNOT_RETRIEVE_DEVICE_NAME, kTraceError,
+            "GetRecordingDeviceName() failed to get device name");
+        return -1;
+    }
+
+    // Copy to vectors supplied by user
+    strncpy(strNameUTF8, name, strLen);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: strNameUTF8=%s", strNameUTF8);
+
+    if (strGuidUTF8 != NULL)
+    {
+        strncpy(strGuidUTF8, guid, strLen);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "  Output: strGuidUTF8=%s", strGuidUTF8);
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetPlayoutDeviceName(int index,
+                                          char strNameUTF8[128],
+                                          char strGuidUTF8[128])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetPlayoutDeviceName(index=%d)", index);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (strNameUTF8 == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetPlayoutDeviceName() invalid argument");
+        return -1;
+    }
+
+    // Note that strGuidUTF8 is allowed to be NULL
+
+    // Init len variable to length of supplied vectors
+    const WebRtc_UWord16 strLen = 128;
+
+    // Check if length has been changed in module
+    assert(strLen == kAdmMaxDeviceNameSize);
+    assert(strLen == kAdmMaxGuidSize);
+
+    WebRtc_Word8 name[strLen];
+    WebRtc_Word8 guid[strLen];
+
+    // Get names from module
+    if (_audioDevicePtr->PlayoutDeviceName(index, name, guid) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_CANNOT_RETRIEVE_DEVICE_NAME, kTraceError,
+            "GetPlayoutDeviceName() failed to get device name");
+        return -1;
+    }
+
+    // Copy to vectors supplied by user
+    strncpy(strNameUTF8, name, strLen);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: strNameUTF8=%s", strNameUTF8);
+
+    if (strGuidUTF8 != NULL)
+    {
+        strncpy(strGuidUTF8, guid, strLen);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "  Output: strGuidUTF8=%s", strGuidUTF8);
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::SetRecordingDevice(int index,
+                                        StereoChannel recordingChannel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetRecordingDevice(index=%d, recordingChannel=%d)",
+                 index, (int) recordingChannel);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool isRecording(false);
+
+    // Store state about activated recording to be able to restore it after the
+    // recording device has been modified.
+    if (_audioDevicePtr->Recording())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetRecordingDevice() device is modified while recording"
+                     " is active...");
+        isRecording = true;
+        if (_audioDevicePtr->StopRecording() == -1)
+        {
+            _engineStatistics.SetLastError(
+                VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+                "SetRecordingDevice() unable to stop recording");
+            return -1;
+        }
+    }
+
+    // We let the module do the index sanity
+
+    // Set recording channel
+    AudioDeviceModule::ChannelType recCh =
+        AudioDeviceModule::kChannelBoth;
+    switch (recordingChannel)
+    {
+        case kStereoLeft:
+            recCh = AudioDeviceModule::kChannelLeft;
+            break;
+        case kStereoRight:
+            recCh = AudioDeviceModule::kChannelRight;
+            break;
+        case kStereoBoth:
+            // default setting kChannelBoth (<=> mono)
+            break;
+    }
+
+    if (_audioDevicePtr->SetRecordingChannel(recCh) != 0) {
+      _engineStatistics.SetLastError(
+          VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+          "SetRecordingChannel() unable to set the recording channel");
+    }
+
+    // Map indices to unsigned since underlying functions need that
+    WebRtc_UWord16 indexU = static_cast<WebRtc_UWord16> (index);
+
+    WebRtc_Word32 res(0);
+
+    if (index == -1)
+    {
+        res = _audioDevicePtr->SetRecordingDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice);
+    }
+    else if (index == -2)
+    {
+        res = _audioDevicePtr->SetRecordingDevice(
+            AudioDeviceModule::kDefaultDevice);
+    }
+    else
+    {
+        res = _audioDevicePtr->SetRecordingDevice(indexU);
+    }
+
+    if (res != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "SetRecordingDevice() unable to set the recording device");
+        return -1;
+    }
+
+    // Init microphone, so user can do volume settings etc
+    if (_audioDevicePtr->InitMicrophone() == -1)
+    {
+        _engineStatistics.SetLastError(
+            VE_CANNOT_ACCESS_MIC_VOL, kTraceWarning,
+            "SetRecordingDevice() cannot access microphone");
+    }
+
+    // Set number of channels
+    bool available(false);
+    if (_audioDevicePtr->StereoRecordingIsAvailable(&available) != 0) {
+      _engineStatistics.SetLastError(
+          VE_SOUNDCARD_ERROR, kTraceWarning,
+          "StereoRecordingIsAvailable() failed to query stereo recording");
+    }
+
+    if (_audioDevicePtr->SetStereoRecording(available ? true : false) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_SOUNDCARD_ERROR, kTraceWarning,
+            "SetRecordingDevice() failed to set mono recording mode");
+    }
+
+    // Restore recording if it was enabled already when calling this function.
+    if (isRecording)
+    {
+        if (!_externalRecording)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "SetRecordingDevice() recording is now being "
+                         "restored...");
+            if (_audioDevicePtr->InitRecording() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_instanceId, -1),
+                             "SetRecordingDevice() failed to initialize "
+                             "recording");
+                return -1;
+            }
+            if (_audioDevicePtr->StartRecording() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_instanceId, -1),
+                             "SetRecordingDevice() failed to start recording");
+                return -1;
+            }
+        }
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::SetPlayoutDevice(int index)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetPlayoutDevice(index=%d)", index);
+    CriticalSectionScoped cs(*_apiCritPtr);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool isPlaying(false);
+
+    // Store state about activated playout to be able to restore it after the
+    // playout device has been modified.
+    if (_audioDevicePtr->Playing())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "SetPlayoutDevice() device is modified while playout is "
+                     "active...");
+        isPlaying = true;
+        if (_audioDevicePtr->StopPlayout() == -1)
+        {
+            _engineStatistics.SetLastError(
+                VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+                "SetPlayoutDevice() unable to stop playout");
+            return -1;
+        }
+    }
+
+    // We let the module do the index sanity
+
+    // Map indices to unsigned since underlying functions need that
+    WebRtc_UWord16 indexU = static_cast<WebRtc_UWord16> (index);
+
+    WebRtc_Word32 res(0);
+
+    if (index == -1)
+    {
+        res = _audioDevicePtr->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice);
+    }
+    else if (index == -2)
+    {
+        res = _audioDevicePtr->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultDevice);
+    }
+    else
+    {
+        res = _audioDevicePtr->SetPlayoutDevice(indexU);
+    }
+
+    if (res != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_SOUNDCARD_ERROR, kTraceError,
+            "SetPlayoutDevice() unable to set the playout device");
+        return -1;
+    }
+
+    // Init speaker, so user can do volume settings etc
+    if (_audioDevicePtr->InitSpeaker() == -1)
+    {
+        _engineStatistics.SetLastError(
+            VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceWarning,
+            "SetPlayoutDevice() cannot access speaker");
+    }
+
+    // Set number of channels
+    bool available(false);
+    _audioDevicePtr->StereoPlayoutIsAvailable(&available);
+    if (_audioDevicePtr->SetStereoPlayout(available ? true : false) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_SOUNDCARD_ERROR, kTraceWarning,
+            "SetPlayoutDevice() failed to set stereo playout mode");
+    }
+
+    // Restore playout if it was enabled already when calling this function.
+    if (isPlaying)
+    {
+        if (!_externalPlayout)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "SetPlayoutDevice() playout is now being restored...");
+            if (_audioDevicePtr->InitPlayout() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_instanceId, -1),
+                             "SetPlayoutDevice() failed to initialize playout");
+                return -1;
+            }
+            if (_audioDevicePtr->StartPlayout() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_instanceId, -1),
+                             "SetPlayoutDevice() failed to start playout");
+                return -1;
+            }
+        }
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetRecordingDeviceStatus(bool& isAvailable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetRecordingDeviceStatus()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // We let the module do isRecording sanity
+
+    bool available(false);
+
+    // Check availability
+    if (_audioDevicePtr->RecordingIsAvailable(&available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_UNDEFINED_SC_REC_ERR, kTraceError,
+                                       "  Audio Device error");
+        return -1;
+    }
+
+    isAvailable = available;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: isAvailable = %d)", (int) isAvailable);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetPlayoutDeviceStatus(bool& isAvailable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetPlayoutDeviceStatus()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // We let the module do isPlaying sanity
+
+    bool available(false);
+
+    // Check availability
+    if (_audioDevicePtr->PlayoutIsAvailable(&available) != 0)
+    {
+        _engineStatistics.SetLastError(VE_PLAY_UNDEFINED_SC_ERR,
+                                       kTraceError, "  Audio Device error");
+        return -1;
+    }
+
+    isAvailable = available;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: isAvailable = %d)", (int) isAvailable);
+
+    return 0;
+}
+
+int VoEHardwareImpl::ResetAudioDevice()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "ResetAudioDevice()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+#if defined(MAC_IPHONE)
+    if (_audioDevicePtr->ResetAudioDevice() < 0)
+    {
+        _engineStatistics.SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                                       "  Failed to reset sound device");
+        return -1;
+    }
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "  no support for resetting sound device");
+    return -1;
+#endif
+
+    return 0;
+}
+
+int VoEHardwareImpl::AudioDeviceControl(unsigned int par1, unsigned int par2,
+                                        unsigned int par3)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "AudioDeviceControl(%i, %i, %i)", par1, par2, par3);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "  no support for resetting sound device");
+    return -1;
+}
+
+int VoEHardwareImpl::SetLoudspeakerStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetLoudspeakerStatus(enable=%i)", (int) enable);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+#if defined(WEBRTC_ANDROID)
+    if (_audioDevicePtr->SetLoudspeakerStatus(enable) < 0)
+    {
+        _engineStatistics.SetLastError(VE_IGNORED_FUNCTION, kTraceError,
+                                       "  Failed to set loudspeaker status");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "  no support for setting loudspeaker"
+                                   " status");
+    return -1;
+#endif
+}
+
+int VoEHardwareImpl::GetLoudspeakerStatus(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetLoudspeakerStatus()");
+    IPHONE_NOT_SUPPORTED();
+
+#if defined(WEBRTC_ANDROID)
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_audioDevicePtr->GetLoudspeakerStatus(&enabled) < 0)
+    {
+        _engineStatistics.SetLastError(VE_IGNORED_FUNCTION, kTraceError,
+                                       "  Failed to get loudspeaker status");
+        return -1;
+    }
+
+    return 0;
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "  no support for setting loudspeaker "
+                                   "status");
+    return -1;
+#endif
+}
+
+int VoEHardwareImpl::GetCPULoad(int& loadPercent)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetCPULoad()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // Get CPU load from ADM
+    WebRtc_UWord16 load(0);
+    if (_audioDevicePtr->CPULoad(&load) != 0)
+    {
+        _engineStatistics.SetLastError(VE_CPU_INFO_ERROR, kTraceError,
+                                       "  error getting system CPU load");
+        return -1;
+    }
+
+    loadPercent = static_cast<int> (load);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "  Output: loadPercent = %d", loadPercent);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetSystemCPULoad(int& loadPercent)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSystemCPULoad(loadPercent=?)");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // Check if implemented for this platform
+    if (!_cpu)
+    {
+        _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                       "  no support for getting system CPU "
+                                       "load");
+        return -1;
+    }
+
+    // Get CPU load
+    WebRtc_Word32 load = _cpu->CpuUsage();
+    if (load < 0)
+    {
+        _engineStatistics.SetLastError(VE_CPU_INFO_ERROR, kTraceError,
+                                       "  error getting system CPU load");
+        return -1;
+    }
+
+    loadPercent = static_cast<int> (load);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "  Output: loadPercent = %d", loadPercent);
+
+    return 0;
+}
+
+int VoEHardwareImpl::EnableBuiltInAEC(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+        "%s", __FUNCTION__);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return _audioDevicePtr->EnableBuiltInAEC(enable);
+}
+
+bool VoEHardwareImpl::BuiltInAECIsEnabled() const
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+        "%s", __FUNCTION__);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return false;
+    }
+
+    return _audioDevicePtr->BuiltInAECIsEnabled();
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_HARDWARE_API
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_hardware_impl.h b/trunk/src/voice_engine/main/source/voe_hardware_impl.h
new file mode 100644
index 0000000..af934ba
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_hardware_impl.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
+
+#include "voe_hardware.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc
+{
+class CpuWrapper;
+
+class VoEHardwareImpl: public virtual voe::SharedData,
+                       public VoEHardware,
+                       public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int GetNumOfRecordingDevices(int& devices);
+
+    virtual int GetNumOfPlayoutDevices(int& devices);
+
+    virtual int GetRecordingDeviceName(int index,
+                                       char strNameUTF8[128],
+                                       char strGuidUTF8[128]);
+
+    virtual int GetPlayoutDeviceName(int index,
+                                     char strNameUTF8[128],
+                                     char strGuidUTF8[128]);
+
+    virtual int GetRecordingDeviceStatus(bool& isAvailable);
+
+    virtual int GetPlayoutDeviceStatus(bool& isAvailable);
+
+    virtual int SetRecordingDevice(
+        int index,
+        StereoChannel recordingChannel = kStereoBoth);
+
+    virtual int SetPlayoutDevice(int index);
+
+    virtual int SetAudioDeviceLayer(AudioLayers audioLayer);
+
+    virtual int GetAudioDeviceLayer(AudioLayers& audioLayer);
+
+    virtual int GetCPULoad(int& loadPercent);
+
+    virtual int GetSystemCPULoad(int& loadPercent);
+
+    virtual int ResetAudioDevice();
+
+    virtual int AudioDeviceControl(unsigned int par1,
+                                   unsigned int par2,
+                                   unsigned int par3);
+
+    virtual int SetLoudspeakerStatus(bool enable);
+
+    virtual int GetLoudspeakerStatus(bool& enabled);
+
+    virtual int EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+protected:
+    VoEHardwareImpl();
+    virtual ~VoEHardwareImpl();
+
+private:
+    CpuWrapper*  _cpu;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.cc b/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.cc
new file mode 100644
index 0000000..fd87a97
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.cc
@@ -0,0 +1,100 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_neteq_stats_impl.h"
+
+#include "audio_coding_module.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+
+namespace webrtc {
+
+VoENetEqStats* VoENetEqStats::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoENetEqStatsImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+
+VoENetEqStatsImpl::VoENetEqStatsImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoENetEqStatsImpl::VoENetEqStatsImpl() - ctor");
+}
+
+VoENetEqStatsImpl::~VoENetEqStatsImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoENetEqStatsImpl::~VoENetEqStatsImpl() - dtor");
+}
+
+int VoENetEqStatsImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoENetEqStats::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();  // reset reference counter to zero => OK to delete VE
+        _engineStatistics.SetLastError(
+            VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoENetEqStats reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoENetEqStatsImpl::GetNetworkStatistics(int channel,
+                                            NetworkStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetNetworkStatistics(channel=%d, stats=?)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+   if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+	voe::ScopedChannel sc(_channelManager, channel);
+	voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetNetworkStatistics() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetNetworkStatistics(stats);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+
+}   // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.h b/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.h
new file mode 100644
index 0000000..47dd2f6
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_neteq_stats_impl.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
+
+#include "voe_neteq_stats.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoENetEqStatsImpl : public virtual voe::SharedData,
+                          public VoENetEqStats,
+                          public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int GetNetworkStatistics(int channel,
+                                     NetworkStatistics& stats);
+
+protected:
+    VoENetEqStatsImpl();
+    virtual ~VoENetEqStatsImpl();
+};
+
+}  // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_network_impl.cc b/trunk/src/voice_engine/main/source/voe_network_impl.cc
new file mode 100644
index 0000000..0bebbf5
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_network_impl.cc
@@ -0,0 +1,947 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_network_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoENetwork* VoENetwork::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_NETWORK_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+            reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoENetworkImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+
+VoENetworkImpl::VoENetworkImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoENetworkImpl() - ctor");
+}
+
+VoENetworkImpl::~VoENetworkImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "~VoENetworkImpl() - dtor");
+}
+
+int VoENetworkImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoENetworkImpl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();
+        _engineStatistics.SetLastError(VE_INTERFACE_NOT_FOUND,
+                                       kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "VoENetworkImpl reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoENetworkImpl::RegisterExternalTransport(int channel,
+                                              Transport& transport)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetExternalTransport(channel=%d, transport=0x%x)",
+                 channel, &transport);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetExternalTransport() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterExternalTransport(transport);
+}
+
+int VoENetworkImpl::DeRegisterExternalTransport(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "DeRegisterExternalTransport(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterExternalTransport() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterExternalTransport();
+}
+
+int VoENetworkImpl::ReceivedRTPPacket(int channel,
+                                      const void* data,
+                                      unsigned int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "ReceivedRTPPacket(channel=%d, length=%u)", channel, length);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((length < 12) || (length > 807))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_PACKET, kTraceError,
+            "ReceivedRTPPacket() invalid packet length");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "ReceivedRTPPacket() invalid data vector");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "ReceivedRTPPacket() failed to locate channel");
+        return -1;
+    }
+
+    if (!channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ReceivedRTPPacket() external transport is not enabled");
+        return -1;
+    }
+    return channelPtr->ReceivedRTPPacket((const WebRtc_Word8*) data, length);
+}
+
+int VoENetworkImpl::ReceivedRTCPPacket(int channel, const void* data,
+                                       unsigned int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "ReceivedRTCPPacket(channel=%d, length=%u)", channel, length);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (length < 4)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_PACKET, kTraceError,
+            "ReceivedRTCPPacket() invalid packet length");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "ReceivedRTCPPacket() invalid data vector");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "ReceivedRTCPPacket() failed to locate channel");
+        return -1;
+    }
+    if (!channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ReceivedRTCPPacket() external transport is not enabled");
+        return -1;
+    }
+    return channelPtr->ReceivedRTCPPacket((const WebRtc_Word8*) data, length);
+}
+
+int VoENetworkImpl::GetSourceInfo(int channel,
+                                  int& rtpPort,
+                                  int& rtcpPort,
+                                  char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSourceInfo(channel=%d, rtpPort=?, rtcpPort=?, ipAddr[]=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetSourceInfo() invalid IP-address buffer");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSourceInfo() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSourceInfo() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSourceInfo(rtpPort, rtcpPort, ipAddr);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSourceInfo() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetLocalIP(char ipAddr[64], bool ipv6)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetLocalIP(ipAddr[]=?, ipv6=%d)", ipv6);
+    IPHONE_NOT_SUPPORTED();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetLocalIP() invalid IP-address buffer");
+        return -1;
+    }
+
+    // Create a temporary socket module to ensure that this method can be
+    // called also when no channels are created.
+    WebRtc_UWord8 numSockThreads(1);
+    UdpTransport* socketPtr =
+        UdpTransport::Create(
+            -1,
+            numSockThreads);
+    if (NULL == socketPtr)
+    {
+        _engineStatistics.SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetLocalIP() failed to create socket module");
+        return -1;
+    }
+
+    char localIPAddr[64];
+
+    if (ipv6)
+    {
+        char localIP[16];
+        if (socketPtr->LocalHostAddressIPV6(localIP) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "GetLocalIP() failed to retrieve local IP - 1");
+            UdpTransport::Destroy(socketPtr);
+            return -1;
+        }
+        // Convert 128-bit address to character string (a:b:c:d:e:f:g:h)
+        sprintf(localIPAddr,
+                "%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x"
+                "%.2x:%.2x%.2x",
+                localIP[0], localIP[1], localIP[2], localIP[3], localIP[4],
+                localIP[5], localIP[6], localIP[7], localIP[8], localIP[9],
+                localIP[10], localIP[11], localIP[12], localIP[13],
+                localIP[14], localIP[15]);
+    }
+    else
+    {
+        WebRtc_UWord32 localIP(0);
+        // Read local IP (as 32-bit address) from the socket module
+        if (socketPtr->LocalHostAddress(localIP) != 0)
+        {
+            _engineStatistics.SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "GetLocalIP() failed to retrieve local IP - 2");
+            UdpTransport::Destroy(socketPtr);
+            return -1;
+        }
+        // Convert 32-bit address to character string (x.y.z.w)
+        sprintf(localIPAddr, "%d.%d.%d.%d", (int) ((localIP >> 24) & 0x0ff),
+                (int) ((localIP >> 16) & 0x0ff),
+                (int) ((localIP >> 8) & 0x0ff),
+                (int) (localIP & 0x0ff));
+    }
+
+    strcpy(ipAddr, localIPAddr);
+
+    UdpTransport::Destroy(socketPtr);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetLocalIP() => ipAddr=%s", ipAddr);
+    return 0;
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetLocalIP() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::EnableIPv6(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "EnableIPv6(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableIPv6() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "EnableIPv6() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->EnableIPv6();
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "EnableIPv6() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+bool VoENetworkImpl::IPv6IsEnabled(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+               "IPv6IsEnabled(channel=%d)", channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return false;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "IPv6IsEnabled() failed to locate channel");
+        return false;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "IPv6IsEnabled() external transport is enabled");
+        return false;
+    }
+    return channelPtr->IPv6IsEnabled();
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "IPv6IsEnabled() VoE is built for external transport");
+    return false;
+#endif
+}
+
+int VoENetworkImpl::SetSourceFilter(int channel,
+                                    int rtpPort,
+                                    int rtcpPort,
+                                    const char ipAddr[64])
+{
+    (ipAddr == NULL) ? WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                                    VoEId(_instanceId, -1),
+                                    "SetSourceFilter(channel=%d, rtpPort=%d,"
+                                    " rtcpPort=%d)",
+                                    channel, rtpPort, rtcpPort)
+                     : WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                                    VoEId(_instanceId, -1),
+                                    "SetSourceFilter(channel=%d, rtpPort=%d,"
+                                    " rtcpPort=%d, ipAddr=%s)",
+                                    channel, rtpPort, rtcpPort, ipAddr);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((rtpPort < 0) || (rtpPort > 65535))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSourceFilter() invalid RTP port");
+        return -1;
+    }
+    if ((rtcpPort < 0) || (rtcpPort > 65535))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSourceFilter() invalid RTCP port");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSourceFilter() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSourceFilter() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->SetSourceFilter(rtpPort, rtcpPort, ipAddr);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSourceFilter() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSourceFilter(int channel,
+                                    int& rtpPort,
+                                    int& rtcpPort,
+                                    char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSourceFilter(channel=%d, rtpPort=?, rtcpPort=?, "
+                 "ipAddr[]=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetSourceFilter() invalid IP-address buffer");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSourceFilter() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSourceFilter() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSourceFilter(rtpPort, rtcpPort, ipAddr);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSourceFilter() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetSendTOS(int channel,
+                               int DSCP,
+                               int priority,
+                               bool useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetSendTOS(channel=%d, DSCP=%d, useSetSockopt=%d)",
+                 channel, DSCP, useSetSockopt);
+
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "SetSendTOS() is not supported on this platform");
+    return -1;
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((DSCP < 0) || (DSCP > 63))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendTOS() Invalid DSCP value");
+        return -1;
+    }
+#if defined(_WIN32) || defined(WEBRTC_LINUX)
+    if ((priority < -1) || (priority > 7))
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendTOS() Invalid priority value");
+        return -1;
+    }
+#else
+    if (-1 != priority)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SetSendTOS() priority not supported");
+        return -1;
+    }
+#endif
+#if defined(_WIN32)
+    if ((priority >= 0) && useSetSockopt)
+    {
+        // On Windows, priority and useSetSockopt cannot be combined
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTOS() priority and useSetSockopt conflict");
+        return -1;
+    }
+#endif
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "SetSendTOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendTOS() external transport is enabled");
+        return -1;
+    }
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    useSetSockopt = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "   force useSetSockopt=true since there is no alternative"
+                 " implementation");
+#endif
+
+    return channelPtr->SetSendTOS(DSCP, priority, useSetSockopt);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSendTOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSendTOS(int channel,
+                               int& DSCP,
+                               int& priority,
+                               bool& useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSendTOS(channel=%d)", channel);
+
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "GetSendTOS() is not supported on this platform");
+    return -1;
+#endif
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                       "GetSendTOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendTOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSendTOS(DSCP, priority, useSetSockopt);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSendTOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetSendGQoS(int channel,
+                                bool enable,
+                                int serviceType,
+                                int overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetSendGQOS(channel=%d, enable=%d, serviceType=%d,"
+                 " overrideDSCP=%d)",
+                 channel, (int) enable, serviceType, overrideDSCP);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#if !defined(_WIN32)
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "SetSendGQOS() is not supported on this platform");
+    return -1;
+#elif !defined(WEBRTC_EXTERNAL_TRANSPORT)
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                      "SetSendGQOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendGQOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->SetSendGQoS(enable, serviceType, overrideDSCP);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSendGQOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSendGQoS(int channel,
+                                bool& enabled,
+                                int& serviceType,
+                                int& overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetSendGQOS(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+#if !defined(_WIN32)
+    _engineStatistics.SetLastError(
+        VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "GetSendGQOS() is not supported on this platform");
+    return -1;
+#elif !defined(WEBRTC_EXTERNAL_TRANSPORT)
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                                      "GetSendGQOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _engineStatistics.SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendGQOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSendGQoS(enabled, serviceType, overrideDSCP);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSendGQOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetPacketTimeoutNotification(int channel,
+                                                 bool enable,
+                                                 int timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetPacketTimeoutNotification(channel=%d, enable=%d, "
+                 "timeoutSeconds=%d)",
+                 channel, (int) enable, timeoutSeconds);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (enable &&
+        ((timeoutSeconds < kVoiceEngineMinPacketTimeoutSec) ||
+        (timeoutSeconds > kVoiceEngineMaxPacketTimeoutSec)))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetPacketTimeoutNotification() invalid timeout size");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetPacketTimeoutNotification() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetPacketTimeoutNotification(enable, timeoutSeconds);
+}
+
+int VoENetworkImpl::GetPacketTimeoutNotification(int channel,
+                                                 bool& enabled,
+                                                 int& timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetPacketTimeoutNotification(channel=%d, enabled=?,"
+                 " timeoutSeconds=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPacketTimeoutNotification() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPacketTimeoutNotification(enabled, timeoutSeconds);
+}
+
+int VoENetworkImpl::RegisterDeadOrAliveObserver(int channel,
+                                                VoEConnectionObserver&
+                                                observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "RegisterDeadOrAliveObserver(channel=%d, observer=0x%x)",
+                 channel, &observer);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterDeadOrAliveObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterDeadOrAliveObserver(observer);
+}
+
+int VoENetworkImpl::DeRegisterDeadOrAliveObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "DeRegisterDeadOrAliveObserver(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterDeadOrAliveObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterDeadOrAliveObserver();
+}
+
+int VoENetworkImpl::SetPeriodicDeadOrAliveStatus(int channel, bool enable,
+                                                 int sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SetPeriodicDeadOrAliveStatus(channel=%d, enable=%d,"
+                 " sampleTimeSeconds=%d)",
+                 channel, enable, sampleTimeSeconds);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (enable &&
+        ((sampleTimeSeconds < kVoiceEngineMinSampleTimeSec) ||
+        (sampleTimeSeconds > kVoiceEngineMaxSampleTimeSec)))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetPeriodicDeadOrAliveStatus() invalid sample time");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetPeriodicDeadOrAliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetPeriodicDeadOrAliveStatus(enable, sampleTimeSeconds);
+}
+
+int VoENetworkImpl::GetPeriodicDeadOrAliveStatus(int channel,
+                                                 bool& enabled,
+                                                 int& sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "GetPeriodicDeadOrAliveStatus(channel=%d, enabled=?,"
+                 " sampleTimeSeconds=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPeriodicDeadOrAliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPeriodicDeadOrAliveStatus(enabled,
+                                                    sampleTimeSeconds);
+}
+
+int VoENetworkImpl::SendUDPPacket(int channel,
+                                  const void* data,
+                                  unsigned int length,
+                                  int& transmittedBytes,
+                                  bool useRtcpSocket)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+                 "SendUDPPacket(channel=%d, data=0x%x, length=%u, useRTCP=%d)",
+                 channel, data, length, useRtcpSocket);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                       "SendUDPPacket() invalid data buffer");
+        return -1;
+    }
+    if (0 == length)
+    {
+        _engineStatistics.SetLastError(VE_INVALID_PACKET, kTraceError,
+                                       "SendUDPPacket() invalid packet size");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendUDPPacket() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SendUDPPacket(data,
+                                     length,
+                                     transmittedBytes,
+                                     useRtcpSocket);
+#else
+    _engineStatistics.SetLastError(
+        VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SendUDPPacket() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_NETWORK_API
+
+} // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_network_impl.h b/trunk/src/voice_engine/main/source/voe_network_impl.h
new file mode 100644
index 0000000..cc300ac
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_network_impl.h
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
+
+#include "voe_network.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+
+namespace webrtc
+{
+
+class VoENetworkImpl: public virtual voe::SharedData,
+                      public VoENetwork,
+                      public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int RegisterExternalTransport(int channel, Transport& transport);
+
+    virtual int DeRegisterExternalTransport(int channel);
+
+    virtual int ReceivedRTPPacket(int channel,
+                                  const void* data,
+                                  unsigned int length);
+
+    virtual int ReceivedRTCPPacket(int channel,
+                                   const void* data,
+                                   unsigned int length);
+
+    virtual int GetSourceInfo(int channel,
+                              int& rtpPort,
+                              int& rtcpPort,
+                              char ipAddr[64]);
+
+    virtual int GetLocalIP(char ipAddr[64], bool ipv6 = false);
+
+    virtual int EnableIPv6(int channel);
+
+    virtual bool IPv6IsEnabled(int channel);
+
+    virtual int SetSourceFilter(int channel,
+                                int rtpPort,
+                                int rtcpPort,
+                                const char ipAddr[64] = 0);
+
+    virtual int GetSourceFilter(int channel,
+                                int& rtpPort,
+                                int& rtcpPort,
+                                char ipAddr[64]);
+
+    virtual int SetSendTOS(int channel,
+                           int DSCP,
+                           int priority = -1,
+                           bool useSetSockopt = false);
+
+    virtual int GetSendTOS(int channel,
+                           int& DSCP,
+                           int& priority,
+                           bool& useSetSockopt);
+
+    virtual int SetSendGQoS(int channel,
+                            bool enable,
+                            int serviceType,
+                            int overrideDSCP);
+
+    virtual int GetSendGQoS(int channel,
+                            bool& enabled,
+                            int& serviceType,
+                            int& overrideDSCP);
+
+    virtual int SetPacketTimeoutNotification(int channel,
+                                             bool enable,
+                                             int timeoutSeconds = 2);
+
+    virtual int GetPacketTimeoutNotification(int channel,
+                                             bool& enabled,
+                                             int& timeoutSeconds);
+
+    virtual int RegisterDeadOrAliveObserver(int channel,
+                                            VoEConnectionObserver& observer);
+
+    virtual int DeRegisterDeadOrAliveObserver(int channel);
+
+    virtual int SetPeriodicDeadOrAliveStatus(int channel,
+                                             bool enable,
+                                             int sampleTimeSeconds = 2);
+
+    virtual int GetPeriodicDeadOrAliveStatus(int channel,
+                                             bool& enabled,
+                                             int& sampleTimeSeconds);
+
+    virtual int SendUDPPacket(int channel,
+                              const void* data,
+                              unsigned int length,
+                              int& transmittedBytes,
+                              bool useRtcpSocket = false);
+
+protected:
+    VoENetworkImpl();
+    virtual ~VoENetworkImpl();
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.cc b/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.cc
new file mode 100644
index 0000000..cbc4d0d
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.cc
@@ -0,0 +1,730 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_rtp_rtcp_impl.h"
+#include "trace.h"
+#include "file_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "voice_engine_impl.h"
+#include "voe_errors.h"
+
+#include "channel.h"
+#include "transmit_mixer.h"
+
+namespace webrtc {
+
+VoERTP_RTCP* VoERTP_RTCP::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoERTP_RTCPImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+
+VoERTP_RTCPImpl::VoERTP_RTCPImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoERTP_RTCPImpl::VoERTP_RTCPImpl() - ctor");
+}
+
+VoERTP_RTCPImpl::~VoERTP_RTCPImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoERTP_RTCPImpl::~VoERTP_RTCPImpl() - dtor");
+}
+
+int VoERTP_RTCPImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoERTP_RTCP::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();  // reset reference counter to zero => OK to delete VE
+        _engineStatistics.SetLastError(
+            VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "VoERTP_RTCP reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoERTP_RTCPImpl::RegisterRTPObserver(int channel, VoERTPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterRTPObserver(channel=%d observer=0x%x)",
+                 channel, &observer);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterRTPObserver(observer);
+}
+
+int VoERTP_RTCPImpl::DeRegisterRTPObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DeRegisterRTPObserver(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterRTPObserver();
+}
+
+int VoERTP_RTCPImpl::RegisterRTCPObserver(int channel, VoERTCPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RegisterRTCPObserver(channel=%d observer=0x%x)",
+                 channel, &observer);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterRTCPObserver(observer);
+}
+
+int VoERTP_RTCPImpl::DeRegisterRTCPObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "DeRegisterRTCPObserver(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRTCPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterRTCPObserver();
+}
+
+int VoERTP_RTCPImpl::SetLocalSSRC(int channel, unsigned int ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetLocalSSRC(channel=%d, %lu)", channel, ssrc);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetLocalSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetLocalSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetLocalSSRC(int channel, unsigned int& ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetLocalSSRC(channel=%d, ssrc=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetLocalSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetLocalSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetRemoteSSRC(int channel, unsigned int& ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRemoteSSRC(channel=%d, ssrc=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetRemoteCSRCs(int channel, unsigned int arrCSRC[15])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRemoteCSRCs(channel=%d, arrCSRC=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteCSRCs() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteCSRCs(arrCSRC);
+}
+
+
+int VoERTP_RTCPImpl::SetRTPAudioLevelIndicationStatus(int channel,
+                                                      bool enable,
+                                                      unsigned char ID)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRTPAudioLevelIndicationStatus(channel=%d, enable=%d,"
+                 " ID=%u)", channel, enable, ID);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (ID < kVoiceEngineMinRtpExtensionId ||
+        ID > kVoiceEngineMaxRtpExtensionId)
+    {
+        // [RFC5285] The 4-bit ID is the local identifier of this element in
+        // the range 1-14 inclusive.
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetRTPAudioLevelIndicationStatus() invalid ID parameter");
+        return -1;
+    }
+
+    // Set state and ID for the specified channel.
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTPAudioLevelIndicationStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTPAudioLevelIndicationStatus(enable, ID);
+}
+
+int VoERTP_RTCPImpl::GetRTPAudioLevelIndicationStatus(int channel,
+                                                      bool& enabled,
+                                                      unsigned char& ID)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTPAudioLevelIndicationStatus(channel=%d, enable=?, ID=?)",
+                 channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPAudioLevelIndicationStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPAudioLevelIndicationStatus(enabled, ID);
+}
+
+int VoERTP_RTCPImpl::SetRTCPStatus(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRTCPStatus(channel=%d, enable=%d)", channel, enable);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTCPStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTCPStatus(enable);
+}
+
+int VoERTP_RTCPImpl::GetRTCPStatus(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTCPStatus(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTCPStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTCPStatus(enabled);
+}
+
+int VoERTP_RTCPImpl::SetRTCP_CNAME(int channel, const char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetRTCP_CNAME(channel=%d, cName=%s)", channel, cName);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRTCP_CNAME(int channel, char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTCP_CNAME(channel=%d, cName=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCP_CNAME(int channel, char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRemoteRTCP_CNAME(channel=%d, cName=?)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCPData(
+    int channel,
+    unsigned int& NTPHigh, // from sender info in SR
+    unsigned int& NTPLow, // from sender info in SR
+    unsigned int& timestamp, // from sender info in SR
+    unsigned int& playoutTimestamp, // derived locally
+    unsigned int* jitter, // from report block 1 in SR/RR
+    unsigned short* fractionLost) // from report block 1 in SR/RR
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRemoteRTCPData(channel=%d,...)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteRTCPData(NTPHigh,
+                                         NTPLow,
+                                         timestamp,
+                                         playoutTimestamp,
+                                         jitter,
+                                         fractionLost);
+}
+
+int VoERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
+    int channel,
+    const unsigned char subType,
+    unsigned int name,
+    const char* data,
+    unsigned short dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1), 
+                 "SendApplicationDefinedRTCPPacket(channel=%d, subType=%u,"
+                 "name=%u, data=?, dataLengthInBytes=%u)",
+                 channel, subType, name, dataLengthInBytes);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendApplicationDefinedRTCPPacket() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SendApplicationDefinedRTCPPacket(subType,
+                                                        name,
+                                                        data,
+                                                        dataLengthInBytes);
+}
+
+int VoERTP_RTCPImpl::GetRTPStatistics(int channel,
+                                      unsigned int& averageJitterMs,
+                                      unsigned int& maxJitterMs,
+                                      unsigned int& discardedPackets)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTPStatistics(channel=%d,....)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPStatistics() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPStatistics(averageJitterMs,
+                                        maxJitterMs,
+                                        discardedPackets);
+}
+
+int VoERTP_RTCPImpl::GetRTCPStatistics(int channel, CallStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTCPStatistics(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPStatistics() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPStatistics(stats);
+}
+
+int VoERTP_RTCPImpl::SetFECStatus(int channel, bool enable, int redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "SetFECStatus(channel=%d, enable=%d, redPayloadtype=%d)",
+                 channel, enable, redPayloadtype);
+#ifdef WEBRTC_CODEC_RED
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetFECStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetFECStatus(enable, redPayloadtype);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "SetFECStatus() RED is not supported");
+    return -1;
+#endif
+}
+
+int VoERTP_RTCPImpl::GetFECStatus(int channel,
+                                  bool& enabled,
+                                  int& redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetFECStatus(channel=%d, enabled=?, redPayloadtype=?)",
+                 channel);
+#ifdef WEBRTC_CODEC_RED
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetFECStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetFECStatus(enabled, redPayloadtype);
+#else
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+                                   "GetFECStatus() RED is not supported");
+    return -1;
+#endif
+}
+
+int VoERTP_RTCPImpl::SetRTPKeepaliveStatus(int channel,
+                                           bool enable,
+                                           unsigned char unknownPayloadType,
+                                           int deltaTransmitTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1), 
+                 "SetRTPKeepaliveStatus(channel=%d, enable=%d,"
+                 " unknownPayloadType=%u, deltaTransmitTimeSeconds=%d)",
+                 channel, enable, unknownPayloadType, deltaTransmitTimeSeconds);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTPKeepaliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTPKeepaliveStatus(enable,
+                                             unknownPayloadType,
+                                             deltaTransmitTimeSeconds);
+}
+
+int VoERTP_RTCPImpl::GetRTPKeepaliveStatus(int channel,
+                                           bool& enabled,
+                                           unsigned char& unknownPayloadType,
+                                           int& deltaTransmitTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetRTPKeepaliveStatus(channel=%d)", channel);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPKeepaliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPKeepaliveStatus(enabled,
+                                             unknownPayloadType,
+                                             deltaTransmitTimeSeconds);
+}
+
+int VoERTP_RTCPImpl::StartRTPDump(int channel,
+                                  const char fileNameUTF8[1024],
+                                  RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StartRTPDump(channel=%d, fileNameUTF8=%s, direction=%d)",
+                 channel, fileNameUTF8, direction);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StartRTPDump(fileNameUTF8, direction);
+}
+
+int VoERTP_RTCPImpl::StopRTPDump(int channel, RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "StopRTPDump(channel=%d, direction=%d)", channel, direction);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StopRTPDump(direction);
+}
+
+int VoERTP_RTCPImpl::RTPDumpIsActive(int channel, RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "RTPDumpIsActive(channel=%d, direction=%d)",
+                 channel, direction);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RTPDumpIsActive(direction);
+}
+
+int VoERTP_RTCPImpl::InsertExtraRTPPacket(int channel,
+                                          unsigned char payloadType,
+                                          bool markerBit,
+                                          const char* payloadData,
+                                          unsigned short payloadSize)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+                 "InsertExtraRTPPacket(channel=%d, payloadType=%u,"
+                 " markerBit=%u, payloadSize=%u)",
+                 channel, payloadType, markerBit, payloadSize);
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->InsertExtraRTPPacket(payloadType,
+                                            markerBit,
+                                            payloadData,
+                                            payloadSize);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.h b/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.h
new file mode 100644
index 0000000..3cdf162
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_rtp_rtcp_impl.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+
+#include "voe_rtp_rtcp.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoERTP_RTCPImpl : public virtual voe::SharedData,
+                        public VoERTP_RTCP,
+                        public voe::RefCount
+{
+public:
+
+    virtual int Release();
+    // Registration of observers for RTP and RTCP callbacks
+    virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer);
+
+    virtual int DeRegisterRTPObserver(int channel);
+
+    virtual int RegisterRTCPObserver(int channel, VoERTCPObserver& observer);
+
+    virtual int DeRegisterRTCPObserver(int channel);
+
+    // RTCP
+    virtual int SetRTCPStatus(int channel, bool enable);
+
+    virtual int GetRTCPStatus(int channel, bool& enabled);
+
+    virtual int SetRTCP_CNAME(int channel, const char cName[256]);
+
+    virtual int GetRTCP_CNAME(int channel, char cName[256]);
+
+    virtual int GetRemoteRTCP_CNAME(int channel, char cName[256]);
+
+    virtual int GetRemoteRTCPData(int channel,
+                                  unsigned int& NTPHigh,
+                                  unsigned int& NTPLow,
+                                  unsigned int& timestamp,
+                                  unsigned int& playoutTimestamp,
+                                  unsigned int* jitter = NULL,
+                                  unsigned short* fractionLost = NULL);
+
+    virtual int SendApplicationDefinedRTCPPacket(
+        int channel,
+        const unsigned char subType,
+        unsigned int name,
+        const char* data,
+        unsigned short dataLengthInBytes);
+
+    // SSRC
+    virtual int SetLocalSSRC(int channel, unsigned int ssrc);
+
+    virtual int GetLocalSSRC(int channel, unsigned int& ssrc);
+
+    virtual int GetRemoteSSRC(int channel, unsigned int& ssrc);
+
+    // RTP Header Extension for Client-to-Mixer Audio Level Indication
+    virtual int SetRTPAudioLevelIndicationStatus(int channel,
+                                                 bool enable,
+                                                 unsigned char ID);
+
+    virtual int GetRTPAudioLevelIndicationStatus(int channel,
+                                                 bool& enabled,
+                                                 unsigned char& ID);
+
+    // CSRC 
+    virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]);
+
+    // Statistics
+    virtual int GetRTPStatistics(int channel,
+                                 unsigned int& averageJitterMs,
+                                 unsigned int& maxJitterMs,
+                                 unsigned int& discardedPackets);
+
+    virtual int GetRTCPStatistics(int channel, CallStatistics& stats);
+
+    // RTP keepalive mechanism (maintains NAT mappings associated to RTP flows)
+    virtual int SetRTPKeepaliveStatus(int channel,
+                                      bool enable,
+                                      unsigned char unknownPayloadType,
+                                      int deltaTransmitTimeSeconds = 15);
+
+    virtual int GetRTPKeepaliveStatus(int channel,
+                                      bool& enabled,
+                                      unsigned char& unknownPayloadType,
+                                      int& deltaTransmitTimeSeconds);
+
+    // FEC
+    virtual int SetFECStatus(int channel,
+                             bool enable,
+                             int redPayloadtype = -1);
+
+    virtual int GetFECStatus(int channel, bool& enabled, int& redPayloadtype);
+
+    // Store RTP and RTCP packets and dump to file (compatible with rtpplay)
+    virtual int StartRTPDump(int channel,
+                             const char fileNameUTF8[1024],
+                             RTPDirections direction = kRtpIncoming);
+
+    virtual int StopRTPDump(int channel,
+                            RTPDirections direction = kRtpIncoming);
+
+    virtual int RTPDumpIsActive(int channel,
+                                RTPDirections direction = kRtpIncoming);
+
+    // Insert (and transmits) extra RTP packet into active RTP audio stream
+    virtual int InsertExtraRTPPacket(int channel,
+                                     unsigned char payloadType,
+                                     bool markerBit,
+                                     const char* payloadData,
+                                     unsigned short payloadSize);
+
+protected:
+    VoERTP_RTCPImpl();
+    virtual ~VoERTP_RTCPImpl();
+};
+
+}  // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+
diff --git a/trunk/src/voice_engine/main/source/voe_video_sync_impl.cc b/trunk/src/voice_engine/main/source/voe_video_sync_impl.cc
new file mode 100644
index 0000000..8ee6296
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_video_sync_impl.cc
@@ -0,0 +1,248 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_video_sync_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEVideoSync* VoEVideoSync::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEVideoSyncImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+
+VoEVideoSyncImpl::VoEVideoSyncImpl(voe::SharedData* data) : _data(data)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "VoEVideoSyncImpl::VoEVideoSyncImpl() - ctor");
+}
+
+VoEVideoSyncImpl::~VoEVideoSyncImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "VoEVideoSyncImpl::~VoEVideoSyncImpl() - dtor");
+}
+
+int VoEVideoSyncImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "VoEVideoSync::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();  // reset reference counter to zero => OK to delete VE
+        _data->statistics().SetLastError(VE_INTERFACE_NOT_FOUND,
+                                         kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "VoEVideoSync reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEVideoSyncImpl::GetPlayoutTimestamp(int channel, unsigned int& timestamp)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "GetPlayoutTimestamp(channel=%d, timestamp=?)", channel);
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlayoutTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPlayoutTimestamp(timestamp);
+}
+
+int VoEVideoSyncImpl::SetInitTimestamp(int channel,
+                                       unsigned int timestamp)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "SetInitTimestamp(channel=%d, timestamp=%lu)",
+                 channel, timestamp);
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetInitTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetInitTimestamp(timestamp);
+}
+
+int VoEVideoSyncImpl::SetInitSequenceNumber(int channel,
+                                            short sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)",
+                 channel, sequenceNumber);
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetInitSequenceNumber() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetInitSequenceNumber(sequenceNumber);
+}
+
+int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel,int delayMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "SetMinimumPlayoutDelay(channel=%d, delayMs=%d)",
+                 channel, delayMs);
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetMinimumPlayoutDelay() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetMinimumPlayoutDelay(delayMs);
+}
+
+int VoEVideoSyncImpl::GetDelayEstimate(int channel, int& delayMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "GetDelayEstimate(channel=%d, delayMs=?)", channel);
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetDelayEstimate() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetDelayEstimate(delayMs);
+}
+
+int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+               "GetPlayoutBufferSize(bufferMs=?)");
+    ANDROID_NOT_SUPPORTED(_data->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    AudioDeviceModule::BufferType type
+        (AudioDeviceModule::kFixedBufferSize);
+    WebRtc_UWord16 sizeMS(0);
+    if (_data->audio_device()->PlayoutBuffer(&type, &sizeMS) != 0)
+    {
+        _data->statistics().SetLastError(
+            VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "GetPlayoutBufferSize() failed to read buffer size");
+        return -1;
+    }
+    bufferMs = sizeMS;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "GetPlayoutBufferSize() => bufferMs=%d", bufferMs);
+    return 0;
+}
+
+int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_data->instance_id(),-1),
+                 "GetRtpRtcp(channel=%i)", channel);
+    
+    if (!_data->statistics().Initialized())
+    {
+        _data->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_data->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _data->statistics().SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlayoutTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRtpRtcp(rtpRtcpModule);
+}
+
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_video_sync_impl.h b/trunk/src/voice_engine/main/source/voe_video_sync_impl.h
new file mode 100644
index 0000000..02c85d1
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_video_sync_impl.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
+
+#include "voe_video_sync.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEVideoSyncImpl : public VoEVideoSync,
+                         public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int GetPlayoutBufferSize(int& bufferMs);
+
+    virtual int SetMinimumPlayoutDelay(int channel, int delayMs);
+
+    virtual int GetDelayEstimate(int channel, int& delayMs);
+
+    virtual int SetInitTimestamp(int channel, unsigned int timestamp);
+
+    virtual int SetInitSequenceNumber(int channel, short sequenceNumber);
+
+    virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp);
+
+    virtual int GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule);
+
+protected:
+    VoEVideoSyncImpl(voe::SharedData* data);
+    virtual ~VoEVideoSyncImpl();
+
+private:
+    voe::SharedData* _data;
+};
+
+}   // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
diff --git a/trunk/src/voice_engine/main/source/voe_volume_control_impl.cc b/trunk/src/voice_engine/main/source/voe_volume_control_impl.cc
new file mode 100644
index 0000000..dd3d54d
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_volume_control_impl.cc
@@ -0,0 +1,677 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_volume_control_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEVolumeControl* VoEVolumeControl::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s =
+        reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEVolumeControlImpl* d = s;
+    (*d)++;
+    return (d);
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+VoEVolumeControlImpl::VoEVolumeControlImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+               "VoEVolumeControlImpl::VoEVolumeControlImpl() - ctor");
+}
+
+VoEVolumeControlImpl::~VoEVolumeControlImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+               "VoEVolumeControlImpl::~VoEVolumeControlImpl() - dtor");
+}
+
+int VoEVolumeControlImpl::Release()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "VoEVolumeControl::Release()");
+    (*this)--;
+    int refCount = GetCount();
+    if (refCount < 0)
+    {
+        Reset();  // reset reference counter to zero => OK to delete VE
+        _engineStatistics.SetLastError(
+            VE_INTERFACE_NOT_FOUND, kTraceWarning);
+        return (-1);
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "VoEVolumeControl reference counter = %d", refCount);
+    return (refCount);
+}
+
+int VoEVolumeControlImpl::SetSpeakerVolume(unsigned int volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetSpeakerVolume(volume=%u)", volume);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (volume > kMaxVolumeLevel)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSpeakerVolume() invalid argument");
+        return -1;
+    }
+
+    WebRtc_UWord32 maxVol(0);
+    WebRtc_UWord32 spkrVol(0);
+
+    // scale: [0,kMaxVolumeLevel] -> [0,MaxSpeakerVolume]
+    if (_audioDevicePtr->MaxSpeakerVolume(&maxVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_MIC_VOL_ERROR, kTraceError,
+            "SetSpeakerVolume() failed to get max volume");
+        return -1;
+    }
+    // Round the value and avoid floating computation.
+    spkrVol = (WebRtc_UWord32)((volume * maxVol +
+        (int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+
+    // set the actual volume using the audio mixer
+    if (_audioDevicePtr->SetSpeakerVolume(spkrVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_MIC_VOL_ERROR, kTraceError,
+            "SetSpeakerVolume() failed to set speaker volume");
+        return -1;
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeakerVolume()");
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_UWord32 spkrVol(0);
+    WebRtc_UWord32 maxVol(0);
+
+    if (_audioDevicePtr->SpeakerVolume(&spkrVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetSpeakerVolume() unable to get speaker volume");
+        return -1;
+    }
+
+    // scale: [0, MaxSpeakerVolume] -> [0, kMaxVolumeLevel]
+    if (_audioDevicePtr->MaxSpeakerVolume(&maxVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetSpeakerVolume() unable to get max speaker volume");
+        return -1;
+    }
+    // Round the value and avoid floating computation.
+    volume = (WebRtc_UWord32) ((spkrVol * kMaxVolumeLevel +
+        (int)(maxVol / 2)) / (maxVol));
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeakerVolume() => volume=%d", volume);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetSystemOutputMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSystemOutputMute(enabled=%d)", enable);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_audioDevicePtr->SetSpeakerMute(enable) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SpeakerMute() unable to Set speaker mute");
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSystemOutputMute(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSystemOutputMute(enabled=?)");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_audioDevicePtr->SpeakerMute(&enabled) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SpeakerMute() unable to get speaker mute state");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSystemOutputMute() => %d", enabled);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetMicVolume(unsigned int volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetMicVolume(volume=%u)", volume);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (volume > kMaxVolumeLevel)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetMicVolume() invalid argument");
+        return -1;
+    }
+
+    WebRtc_UWord32 maxVol(0);
+    WebRtc_UWord32 micVol(0);
+
+    // scale: [0, kMaxVolumeLevel] -> [0,MaxMicrophoneVolume]
+    if (_audioDevicePtr->MaxMicrophoneVolume(&maxVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() failed to get max volume");
+        return -1;
+    }
+
+    if (volume == kMaxVolumeLevel) {
+      // On Linux running pulse, users are able to set the volume above 100%
+      // through the volume control panel, where the +100% range is digital
+      // scaling. WebRTC does not support setting the volume above 100%, and
+      // simply ignores changing the volume if the user tries to set it to
+      // |kMaxVolumeLevel| while the current volume is higher than |maxVol|.
+      if (_audioDevicePtr->MicrophoneVolume(&micVol) != 0) {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() unable to get microphone volume");
+        return -1;
+      }
+      if (micVol >= maxVol)
+        return 0;
+    }
+
+    // Round the value and avoid floating point computation.
+    micVol = (WebRtc_UWord32) ((volume * maxVol +
+        (int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+
+    // set the actual volume using the audio mixer
+    if (_audioDevicePtr->SetMicrophoneVolume(micVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() failed to set mic volume");
+        return -1;
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetMicVolume(unsigned int& volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetMicVolume()");
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_UWord32 micVol(0);
+    WebRtc_UWord32 maxVol(0);
+
+    if (_audioDevicePtr->MicrophoneVolume(&micVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetMicVolume() unable to get microphone volume");
+        return -1;
+    }
+
+    // scale: [0, MaxMicrophoneVolume] -> [0, kMaxVolumeLevel]
+    if (_audioDevicePtr->MaxMicrophoneVolume(&maxVol) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetMicVolume() unable to get max microphone volume");
+        return -1;
+    }
+    if (micVol < maxVol) {
+      // Round the value and avoid floating point calculation.
+      volume = (WebRtc_UWord32) ((micVol * kMaxVolumeLevel +
+          (int)(maxVol / 2)) / (maxVol));
+    } else {
+      // Truncate the value to the kMaxVolumeLevel.
+      volume = kMaxVolumeLevel;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetMicVolume() => volume=%d", volume);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetInputMute(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetInputMute(channel=%d, enable=%d)", channel, enable);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        // Mute before demultiplexing <=> affects all channels
+        return _transmitMixerPtr->SetMute(enable);
+    }
+    else
+    {
+        // Mute after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetInputMute() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->SetMute(enable);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetInputMute(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetInputMute(channel=%d)", channel);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        enabled = _transmitMixerPtr->Mute();
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetInputMute() failed to locate channel");
+            return -1;
+        }
+        enabled = channelPtr->Mute();
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetInputMute() => enabled = %d", (int)enabled);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetSystemInputMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetSystemInputMute(enabled=%d)", enable);
+
+    if (!_engineStatistics.Initialized())
+    {
+            _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+            return -1;
+    }
+
+    if (_audioDevicePtr->SetMicrophoneMute(enable) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "MicrophoneMute() unable to set microphone mute state");
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSystemInputMute(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSystemInputMute(enabled=?)");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_audioDevicePtr->MicrophoneMute(&enabled) != 0)
+    {
+        _engineStatistics.SetLastError(
+            VE_GET_MIC_VOL_ERROR, kTraceError,
+            "MicrophoneMute() unable to get microphone mute state");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSystemInputMute() => %d", enabled);
+	return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechInputLevel(unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechInputLevel()");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    WebRtc_Word8 currentLevel = _transmitMixerPtr->AudioLevel();
+    level = static_cast<unsigned int> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechInputLevel() => %d", level);
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechOutputLevel(int channel,
+                                               unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechOutputLevel(channel=%d, level=?)", channel);
+	
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _outputMixerPtr->GetSpeechOutputLevel((WebRtc_UWord32&)level);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetSpeechOutputLevel() failed to locate channel");
+            return -1;
+        }
+        channelPtr->GetSpeechOutputLevel((WebRtc_UWord32&)level);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechInputLevelFullRange(unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechInputLevelFullRange(level=?)");
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    WebRtc_Word16 currentLevel = _transmitMixerPtr->AudioLevelFullRange();
+    level = static_cast<unsigned int> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechInputLevelFullRange() => %d", level);
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechOutputLevelFullRange(int channel,
+                                                        unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetSpeechOutputLevelFullRange(channel=%d, level=?)", channel);
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _outputMixerPtr->GetSpeechOutputLevelFullRange(
+            (WebRtc_UWord32&)level);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetSpeechOutputLevelFullRange() failed to locate channel");
+            return -1;
+        }
+        channelPtr->GetSpeechOutputLevelFullRange((WebRtc_UWord32&)level);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetChannelOutputVolumeScaling(int channel,
+                                                        float scaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetChannelOutputVolumeScaling(channel=%d, scaling=%3.2f)",
+               channel, scaling);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (scaling < kMinOutputVolumeScaling ||
+        scaling > kMaxOutputVolumeScaling)
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetChannelOutputVolumeScaling() invalid parameter");
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetChannelOutputVolumeScaling() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetChannelOutputVolumeScaling(scaling);
+}
+
+int VoEVolumeControlImpl::GetChannelOutputVolumeScaling(int channel,
+                                                        float& scaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetChannelOutputVolumeScaling(channel=%d, scaling=?)", channel);
+    IPHONE_NOT_SUPPORTED();
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_channelManager, channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _engineStatistics.SetLastError(
+            VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetChannelOutputVolumeScaling() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetChannelOutputVolumeScaling(scaling);
+}
+
+int VoEVolumeControlImpl::SetOutputVolumePan(int channel,
+                                             float left,
+                                             float right)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)",
+               channel, left, right);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool available(false);
+    _audioDevicePtr->StereoPlayoutIsAvailable(&available);
+    if (!available)
+    {
+        _engineStatistics.SetLastError(
+            VE_FUNC_NO_STEREO, kTraceError,
+            "SetOutputVolumePan() stereo playout not supported");
+        return -1;
+    }
+    if ((left < kMinOutputVolumePanning)  ||
+        (left > kMaxOutputVolumePanning)  ||
+        (right < kMinOutputVolumePanning) ||
+        (right > kMaxOutputVolumePanning))
+    {
+        _engineStatistics.SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetOutputVolumePan() invalid parameter");
+        return -1;
+    }
+
+    if (channel == -1)
+    {
+        // Master balance (affectes the signal after output mixing)
+        return _outputMixerPtr->SetOutputVolumePan(left, right);
+    }
+    else
+    {
+        // Per-channel balance (affects the signal before output mixing)
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetOutputVolumePan() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->SetOutputVolumePan(left, right);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
+                                             float& left,
+                                             float& right)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
+               "GetOutputVolumePan(channel=%d, left=?, right=?)", channel);
+    ANDROID_NOT_SUPPORTED(_engineStatistics);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_engineStatistics.Initialized())
+    {
+        _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool available(false);
+    _audioDevicePtr->StereoPlayoutIsAvailable(&available);
+    if (!available)
+    {
+        _engineStatistics.SetLastError(
+            VE_FUNC_NO_STEREO, kTraceError,
+            "GetOutputVolumePan() stereo playout not supported");
+        return -1;
+    }
+
+    if (channel == -1)
+    {
+        return _outputMixerPtr->GetOutputVolumePan(left, right);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_channelManager, channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _engineStatistics.SetLastError(
+                VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetOutputVolumePan() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->GetOutputVolumePan(left, right);
+    }
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+}  // namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voe_volume_control_impl.h b/trunk/src/voice_engine/main/source/voe_volume_control_impl.h
new file mode 100644
index 0000000..18f4a1b
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voe_volume_control_impl.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+
+#include "voe_volume_control.h"
+
+#include "ref_count.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEVolumeControlImpl : public virtual voe::SharedData,
+                             public VoEVolumeControl,
+                             public voe::RefCount
+{
+public:
+    virtual int Release();
+
+    virtual int SetSpeakerVolume(unsigned int volume);
+
+    virtual int GetSpeakerVolume(unsigned int& volume);
+
+    virtual int SetSystemOutputMute(bool enable);
+
+    virtual int GetSystemOutputMute(bool& enabled);
+
+    virtual int SetMicVolume(unsigned int volume);
+
+    virtual int GetMicVolume(unsigned int& volume);
+
+    virtual int SetInputMute(int channel, bool enable);
+
+    virtual int GetInputMute(int channel, bool& enabled);
+
+    virtual int SetSystemInputMute(bool enable);
+
+    virtual int GetSystemInputMute(bool& enabled);
+
+    virtual int GetSpeechInputLevel(unsigned int& level);
+
+    virtual int GetSpeechOutputLevel(int channel, unsigned int& level);
+
+    virtual int GetSpeechInputLevelFullRange(unsigned int& level);
+
+    virtual int GetSpeechOutputLevelFullRange(int channel,
+                                              unsigned int& level);
+
+    virtual int SetChannelOutputVolumeScaling(int channel, float scaling);
+
+    virtual int GetChannelOutputVolumeScaling(int channel, float& scaling);
+
+    virtual int SetOutputVolumePan(int channel, float left, float right);
+
+    virtual int GetOutputVolumePan(int channel, float& left, float& right);
+
+
+protected:
+    VoEVolumeControlImpl();
+    virtual ~VoEVolumeControlImpl();
+};
+
+}   // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+
diff --git a/trunk/src/voice_engine/main/source/voice_engine_core.gypi b/trunk/src/voice_engine/main/source/voice_engine_core.gypi
new file mode 100644
index 0000000..135d78b
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voice_engine_core.gypi
@@ -0,0 +1,158 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'voice_engine_core',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/modules/modules.gyp:audio_coding_module',
+        '<(webrtc_root)/modules/modules.gyp:audio_conference_mixer',
+        '<(webrtc_root)/modules/modules.gyp:audio_device',
+        '<(webrtc_root)/modules/modules.gyp:audio_processing',
+        '<(webrtc_root)/modules/modules.gyp:media_file',
+        '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+        '<(webrtc_root)/modules/modules.gyp:udp_transport',
+        '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../..',
+        '../interface',
+        '<(webrtc_root)/modules/audio_device/main/source',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../../..',
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../../../common_types.h',
+        '../../../engine_configurations.h',
+        '../../../typedefs.h',
+        '../interface/voe_audio_processing.h',
+        '../interface/voe_base.h',
+        '../interface/voe_call_report.h',
+        '../interface/voe_codec.h',
+        '../interface/voe_dtmf.h',
+        '../interface/voe_encryption.h',
+        '../interface/voe_errors.h',
+        '../interface/voe_external_media.h',
+        '../interface/voe_file.h',
+        '../interface/voe_hardware.h',
+        '../interface/voe_neteq_stats.h',
+        '../interface/voe_network.h',
+        '../interface/voe_rtp_rtcp.h',
+        '../interface/voe_video_sync.h',
+        '../interface/voe_volume_control.h',
+        'audio_frame_operations.cc',
+        'audio_frame_operations.h',
+        'channel.cc',
+        'channel.h',
+        'channel_manager.cc',
+        'channel_manager.h',
+        'channel_manager_base.cc',
+        'channel_manager_base.h',
+        'dtmf_inband.cc',
+        'dtmf_inband.h',
+        'dtmf_inband_queue.cc',
+        'dtmf_inband_queue.h',
+        'level_indicator.cc',
+        'level_indicator.h',
+        'monitor_module.cc',
+        'monitor_module.h',
+        'output_mixer.cc',
+        'output_mixer.h',
+        'ref_count.cc',
+        'ref_count.h',
+        'shared_data.cc',
+        'shared_data.h',
+        'statistics.cc',
+        'statistics.h',
+        'transmit_mixer.cc',
+        'transmit_mixer.h',
+        'utility.cc',
+        'utility.h',
+        'voe_audio_processing_impl.cc',
+        'voe_audio_processing_impl.h',
+        'voe_base_impl.cc',
+        'voe_base_impl.h',
+        'voe_call_report_impl.cc',
+        'voe_call_report_impl.h',
+        'voe_codec_impl.cc',
+        'voe_codec_impl.h',
+        'voe_dtmf_impl.cc',
+        'voe_dtmf_impl.h',
+        'voe_encryption_impl.cc',
+        'voe_encryption_impl.h',
+        'voe_external_media_impl.cc',
+        'voe_external_media_impl.h',
+        'voe_file_impl.cc',
+        'voe_file_impl.h',
+        'voe_hardware_impl.cc',
+        'voe_hardware_impl.h',
+        'voe_neteq_stats_impl.cc',
+        'voe_neteq_stats_impl.h',
+        'voe_network_impl.cc',
+        'voe_network_impl.h',
+        'voe_rtp_rtcp_impl.cc',
+        'voe_rtp_rtcp_impl.h',
+        'voe_video_sync_impl.cc',
+        'voe_video_sync_impl.h',
+        'voe_volume_control_impl.cc',
+        'voe_volume_control_impl.h',
+        'voice_engine_defines.h',
+        'voice_engine_impl.cc',
+        'voice_engine_impl.h',
+      ],
+    },
+  ],
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          'target_name': 'voice_engine_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'voice_engine_core',
+            '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+            '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+            '<(webrtc_root)/modules/modules.gyp:audio_coding_module',
+            '<(webrtc_root)/modules/modules.gyp:audio_conference_mixer',
+            '<(webrtc_root)/modules/modules.gyp:audio_device',
+            '<(webrtc_root)/modules/modules.gyp:audio_processing',
+            '<(webrtc_root)/modules/modules.gyp:media_file',
+            '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+            '<(webrtc_root)/modules/modules.gyp:udp_transport',
+            '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/../test/test.gyp:test_support_main',
+            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+          ],
+          'include_dirs': [            
+            '../../..',
+            '../interface',
+          ],
+          'sources': [
+            'channel_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/voice_engine/main/source/voice_engine_defines.h b/trunk/src/voice_engine/main/source/voice_engine_defines.h
new file mode 100644
index 0000000..c652e6a
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voice_engine_defines.h
@@ -0,0 +1,597 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  This file contains common constants for VoiceEngine, as well as
+ *  platform specific settings and include files.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
+#define WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
+
+#include "engine_configurations.h"
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+
+// VolumeControl
+enum { kMinVolumeLevel = 0 };
+enum { kMaxVolumeLevel = 255 };
+// Min scale factor for per-channel volume scaling
+const float kMinOutputVolumeScaling = 0.0f;
+// Max scale factor for per-channel volume scaling
+const float kMaxOutputVolumeScaling = 10.0f;
+// Min scale factor for output volume panning
+const float kMinOutputVolumePanning = 0.0f;
+// Max scale factor for output volume panning
+const float kMaxOutputVolumePanning = 1.0f;
+
+// DTMF
+enum { kMinDtmfEventCode = 0 };                 // DTMF digit "0"
+enum { kMaxDtmfEventCode = 15 };                // DTMF digit "D"
+enum { kMinTelephoneEventCode = 0 };            // RFC4733 (Section 2.3.1)
+enum { kMaxTelephoneEventCode = 255 };          // RFC4733 (Section 2.3.1)
+enum { kMinTelephoneEventDuration = 100 };
+enum { kMaxTelephoneEventDuration = 60000 };    // Actual limit is 2^16
+enum { kMinTelephoneEventAttenuation = 0 };     // 0 dBm0
+enum { kMaxTelephoneEventAttenuation = 36 };    // -36 dBm0
+enum { kMinTelephoneEventSeparationMs = 100 };  // Min delta time between two
+                                                // telephone events
+
+enum { EcAec = 0 };                             // AEC mode
+enum { EcAecm = 1 };                            // AECM mode
+
+enum { kVoiceEngineMaxIpPacketSizeBytes = 1500 };       // assumes Ethernet
+
+enum { kVoiceEngineMaxModuleVersionSize = 960 };
+
+// Base
+enum { kVoiceEngineVersionMaxMessageSize = 1024 };
+
+// Encryption
+// SRTP uses 30 bytes key length
+enum { kVoiceEngineMaxSrtpKeyLength = 30 };
+// SRTP minimum key/tag length for encryption level
+enum { kVoiceEngineMinSrtpEncryptLength = 16 };
+// SRTP maximum key/tag length for encryption level
+enum { kVoiceEngineMaxSrtpEncryptLength = 256 };
+// SRTP maximum key/tag length for authentication level,
+// HMAC SHA1 authentication type
+enum { kVoiceEngineMaxSrtpAuthSha1Length = 20 };
+// SRTP maximum tag length for authentication level,
+// null authentication type
+enum { kVoiceEngineMaxSrtpTagAuthNullLength = 12 };
+// SRTP maximum key length for authentication level,
+// null authentication type
+enum { kVoiceEngineMaxSrtpKeyAuthNullLength = 256 };
+
+// Audio processing
+enum { kVoiceEngineAudioProcessingDeviceSampleRateHz = 48000 };
+
+// Codec
+// Min init target rate for iSAC-wb
+enum { kVoiceEngineMinIsacInitTargetRateBpsWb = 10000 };
+// Max init target rate for iSAC-wb
+enum { kVoiceEngineMaxIsacInitTargetRateBpsWb = 32000 };
+// Min init target rate for iSAC-swb
+enum { kVoiceEngineMinIsacInitTargetRateBpsSwb = 10000 };
+// Max init target rate for iSAC-swb
+enum { kVoiceEngineMaxIsacInitTargetRateBpsSwb = 56000 };
+// Lowest max rate for iSAC-wb
+enum { kVoiceEngineMinIsacMaxRateBpsWb = 32000 };
+// Highest max rate for iSAC-wb
+enum { kVoiceEngineMaxIsacMaxRateBpsWb = 53400 };
+// Lowest max rate for iSAC-swb
+enum { kVoiceEngineMinIsacMaxRateBpsSwb = 32000 };
+// Highest max rate for iSAC-swb
+enum { kVoiceEngineMaxIsacMaxRateBpsSwb = 107000 };
+// Lowest max payload size for iSAC-wb
+enum { kVoiceEngineMinIsacMaxPayloadSizeBytesWb = 120 };
+// Highest max payload size for iSAC-wb
+enum { kVoiceEngineMaxIsacMaxPayloadSizeBytesWb = 400 };
+// Lowest max payload size for iSAC-swb
+enum { kVoiceEngineMinIsacMaxPayloadSizeBytesSwb = 120 };
+// Highest max payload size for iSAC-swb
+enum { kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb = 600 };
+
+// VideoSync
+// Lowest minimum playout delay
+enum { kVoiceEngineMinMinPlayoutDelayMs = 0 };
+// Highest minimum playout delay
+enum { kVoiceEngineMaxMinPlayoutDelayMs = 1000 };
+
+// Network
+// Min packet-timeout time for received RTP packets
+enum { kVoiceEngineMinPacketTimeoutSec = 1 };
+// Max packet-timeout time for received RTP packets
+enum { kVoiceEngineMaxPacketTimeoutSec = 150 };
+// Min sample time for dead-or-alive detection
+enum { kVoiceEngineMinSampleTimeSec = 1 };
+// Max sample time for dead-or-alive detection
+enum { kVoiceEngineMaxSampleTimeSec = 150 };
+
+// RTP/RTCP
+// Min 4-bit ID for RTP extension (see section 4.2 in RFC 5285)
+enum { kVoiceEngineMinRtpExtensionId = 1 };
+// Max 4-bit ID for RTP extension
+enum { kVoiceEngineMaxRtpExtensionId = 14 };
+
+} // namespace webrtc
+
+// TODO(andrew): we shouldn't be using the precompiler for this.
+// Use enums or bools as appropriate.
+#define WEBRTC_AUDIO_PROCESSING_OFF false
+
+#define WEBRTC_VOICE_ENGINE_HP_DEFAULT_STATE true
+    // AudioProcessing HP is ON
+#define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing NS off
+#define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE true
+    // AudioProcessing AGC on
+#define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing EC off
+#define WEBRTC_VOICE_ENGINE_VAD_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing off
+#define WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX AGC off
+#define WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX NS off
+#define WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX High Pass Filter off
+
+#define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE  NoiseSuppression::kModerate
+    // AudioProcessing NS moderate suppression
+#define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE GainControl::kAdaptiveAnalog
+    // AudioProcessing AGC analog digital combined
+#define WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE  EcAec
+    // AudioProcessing EC AEC
+#define WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE GainControl::kAdaptiveDigital
+    // AudioProcessing AGC mode
+#define WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE  NoiseSuppression::kModerate
+    // AudioProcessing RX NS mode
+
+// Macros
+// Comparison of two strings without regard to case
+#define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+// Compares characters of two strings without regard to case
+#define STR_NCASE_CMP(x,y,n) ::_strnicmp(x,y,n)
+
+// ----------------------------------------------------------------------------
+//  Build information macros
+// ----------------------------------------------------------------------------
+
+#if defined(_DEBUG)
+#define BUILDMODE "d"
+#elif defined(DEBUG)
+#define BUILDMODE "d"
+#elif defined(NDEBUG)
+#define BUILDMODE "r"
+#else
+#define BUILDMODE "?"
+#endif
+
+#define BUILDTIME __TIME__
+#define BUILDDATE __DATE__
+
+// Example: "Oct 10 2002 12:05:30 r"
+#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+// ----------------------------------------------------------------------------
+//  Macros
+// ----------------------------------------------------------------------------
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+  #include <windows.h>
+  #include <stdio.h>
+  #define DEBUG_PRINT(...)      \
+  {                             \
+    char msg[256];              \
+    sprintf(msg, __VA_ARGS__);  \
+    OutputDebugStringA(msg);    \
+  }
+#else
+  // special fix for visual 2003
+  #define DEBUG_PRINT(exp)      ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+#define CHECK_CHANNEL(channel)  if (CheckChannel(channel) == -1) return -1;
+
+// ----------------------------------------------------------------------------
+//  Default Trace filter
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_DEFAULT_TRACE_FILTER \
+    kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | \
+    kTraceApiCall
+
+// ----------------------------------------------------------------------------
+//  Inline functions
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+
+inline int VoEId(const int veId, const int chId)
+{
+    if (chId == -1)
+    {
+        const int dummyChannel(99);
+        return (int) ((veId << 16) + dummyChannel);
+    }
+    return (int) ((veId << 16) + chId);
+}
+
+inline int VoEModuleId(const int veId, const int chId)
+{
+    return (int) ((veId << 16) + chId);
+}
+
+// Convert module ID to internal VoE channel ID
+inline int VoEChannelId(const int moduleId)
+{
+    return (int) (moduleId & 0xffff);
+}
+
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Platform settings
+// ----------------------------------------------------------------------------
+
+// *** WINDOWS ***
+
+#if defined(_WIN32)
+
+  #pragma comment( lib, "winmm.lib" )
+
+  #ifndef WEBRTC_EXTERNAL_TRANSPORT
+    #pragma comment( lib, "ws2_32.lib" )
+  #endif
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+// Max number of supported channels
+enum { kVoiceEngineMaxNumOfChannels = 32 };
+// Max number of channels which can be played out simultaneously
+enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #include <windows.h>
+  #include <mmsystem.h> // timeGetTime
+
+  #define GET_TIME_IN_MS() ::timeGetTime()
+  #define SLEEP(x) ::Sleep(x)
+  // Comparison of two strings without regard to case
+  #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+  // Compares characters of two strings without regard to case
+  #define STR_NCASE_CMP(x,y,n) ::_strnicmp(x,y,n)
+
+// Default device for Windows PC
+  #define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE \
+    AudioDeviceModule::kDefaultCommunicationDevice
+
+#endif  // #if (defined(_WIN32)
+
+// *** LINUX ***
+
+#ifdef WEBRTC_LINUX
+
+#include <pthread.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+#ifndef QNX
+  #include <linux/net.h>
+#ifndef ANDROID
+  #include <sys/soundcard.h>
+#endif // ANDROID
+#endif // QNX
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sched.h>
+#include <time.h>
+#include <sys/time.h>
+
+#define DWORD unsigned long int
+#define WINAPI
+#define LPVOID void *
+#define FALSE 0
+#define TRUE 1
+#define UINT unsigned int
+#define UCHAR unsigned char
+#define TCHAR char
+#ifdef QNX
+#define _stricmp stricmp
+#else
+#define _stricmp strcasecmp
+#endif
+#define GetLastError() errno
+#define WSAGetLastError() errno
+#define LPCTSTR const char*
+#define LPCSTR const char*
+#define wsprintf sprintf
+#define TEXT(a) a
+#define _ftprintf fprintf
+#define _tcslen strlen
+#define FAR
+#define __cdecl
+#define LPSOCKADDR struct sockaddr *
+
+namespace
+{
+    void Sleep(unsigned long x)
+    {
+        timespec t;
+        t.tv_sec = x/1000;
+        t.tv_nsec = (x-(x/1000)*1000)*1000000;
+        nanosleep(&t,NULL);
+    }
+
+    DWORD timeGetTime()
+    {
+        struct timeval tv;
+        struct timezone tz;
+        unsigned long val;
+
+        gettimeofday(&tv, &tz);
+        val= tv.tv_sec*1000+ tv.tv_usec/1000;
+        return(val);
+    }
+}
+
+#define SLEEP(x) ::Sleep(x)
+#define GET_TIME_IN_MS timeGetTime
+
+// Default device for Linux and Android
+#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
+
+#ifdef ANDROID
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 2 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 2 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  // Always excluded for Android builds
+  #undef WEBRTC_CODEC_ISAC
+  #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  #undef WEBRTC_CONFERENCING
+  #undef WEBRTC_TYPING_DETECTION
+
+  // Default audio processing states
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+
+  // Default audio processing modes
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE  \
+      NoiseSuppression::kModerate
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE \
+      GainControl::kAdaptiveDigital
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE  EcAecm
+
+  #define ANDROID_NOT_SUPPORTED(stat)                         \
+      stat.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,   \
+                        "API call not supported");            \
+      return -1;
+
+#else // LINUX PC
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 32 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #define ANDROID_NOT_SUPPORTED(stat)
+
+#endif // ANDROID - LINUX PC
+
+#else
+#define ANDROID_NOT_SUPPORTED(stat)
+#endif  // #ifdef WEBRTC_LINUX
+
+// *** WEBRTC_MAC ***
+// including iPhone
+
+#ifdef WEBRTC_MAC
+
+#include <pthread.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sched.h>
+#include <sys/time.h>
+#include <time.h>
+#include <AudioUnit/AudioUnit.h>
+#if !defined(MAC_IPHONE) && !defined(MAC_IPHONE_SIM)
+  #include <CoreServices/CoreServices.h>
+  #include <CoreAudio/CoreAudio.h>
+  #include <AudioToolbox/DefaultAudioOutput.h>
+  #include <AudioToolbox/AudioConverter.h>
+  #include <CoreAudio/HostTime.h>
+#endif
+
+#define DWORD unsigned long int
+#define WINAPI
+#define LPVOID void *
+#define FALSE 0
+#define TRUE 1
+#define SOCKADDR_IN struct sockaddr_in
+#define UINT unsigned int
+#define UCHAR unsigned char
+#define TCHAR char
+#define _stricmp strcasecmp
+#define GetLastError() errno
+#define WSAGetLastError() errno
+#define LPCTSTR const char*
+#define wsprintf sprintf
+#define TEXT(a) a
+#define _ftprintf fprintf
+#define _tcslen strlen
+#define FAR
+#define __cdecl
+#define LPSOCKADDR struct sockaddr *
+#define LPCSTR const char*
+#define ULONG unsigned long
+
+namespace
+{
+    void Sleep(unsigned long x)
+    {
+        timespec t;
+        t.tv_sec = x/1000;
+        t.tv_nsec = (x-(x/1000)*1000)*1000000;
+        nanosleep(&t,NULL);
+    }
+
+    DWORD WebRtcTimeGetTime()
+    {
+        struct timeval tv;
+        struct timezone tz;
+        unsigned long val;
+
+        gettimeofday(&tv, &tz);
+        val= tv.tv_sec*1000+ tv.tv_usec/1000;
+        return(val);
+    }
+}
+
+#define SLEEP(x) ::Sleep(x)
+#define GET_TIME_IN_MS WebRtcTimeGetTime
+
+// Default device for Mac and iPhone
+#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
+
+// iPhone specific
+#if defined(MAC_IPHONE) || defined(MAC_IPHONE_SIM)
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 2 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 2 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  // Always excluded for iPhone builds
+  #undef WEBRTC_CODEC_ISAC
+  #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE \
+      NoiseSuppression::kModerate
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE \
+      GainControl::kAdaptiveDigital
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_MODE EcAecm
+
+  #define IPHONE_NOT_SUPPORTED() \
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, \
+                                   "API call not supported"); \
+    return -1;
+
+#else // Non-iPhone
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 32 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #define IPHONE_NOT_SUPPORTED()
+#endif
+
+#else
+#define IPHONE_NOT_SUPPORTED()
+#endif  // #ifdef WEBRTC_MAC
+
+
+
+#endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
diff --git a/trunk/src/voice_engine/main/source/voice_engine_impl.cc b/trunk/src/voice_engine/main/source/voice_engine_impl.cc
new file mode 100644
index 0000000..07af08e
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voice_engine_impl.cc
@@ -0,0 +1,317 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine_impl.h"
+#include "trace.h"
+
+#ifdef WEBRTC_ANDROID
+extern "C"
+{
+extern WebRtc_Word32 SetAndroidAudioDeviceObjects(
+    void* javaVM, void* env, void* context);
+} // extern "C"
+#endif
+
+namespace webrtc
+{
+
+// Counter to be ensure that we can add a correct ID in all static trace
+// methods. It is not the nicest solution, especially not since we already
+// have a counter in VoEBaseImpl. In other words, there is room for
+// improvement here.
+static WebRtc_Word32 gVoiceEngineInstanceCounter = 0;
+
+extern "C"
+{
+WEBRTC_DLLEXPORT VoiceEngine* GetVoiceEngine();
+
+VoiceEngine* GetVoiceEngine()
+{
+    VoiceEngineImpl* self = new VoiceEngineImpl();
+    VoiceEngine* ve = reinterpret_cast<VoiceEngine*> (self);
+    if (ve != NULL)
+    {
+        gVoiceEngineInstanceCounter++;
+    }
+    return ve;
+}
+} // extern "C"
+
+VoiceEngine* VoiceEngine::Create()
+{
+#if (defined _WIN32)
+    HMODULE hmod_ = LoadLibrary(TEXT("VoiceEngineTestingDynamic.dll"));
+
+    if (hmod_)
+    {
+        typedef VoiceEngine* (*PfnGetVoiceEngine)(void);
+        PfnGetVoiceEngine pfn = (PfnGetVoiceEngine)GetProcAddress(
+                hmod_,"GetVoiceEngine");
+        if (pfn)
+        {
+            VoiceEngine* self = pfn();
+            return (self);
+        }
+    }
+#endif
+
+    return GetVoiceEngine();
+}
+
+int VoiceEngine::SetTraceFilter(const unsigned int filter)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceFilter(filter=0x%x)", filter);
+
+    // Remember old filter
+    WebRtc_UWord32 oldFilter = 0;
+    Trace::LevelFilter(oldFilter);
+
+    // Set new filter
+    WebRtc_Word32 ret = Trace::SetLevelFilter(filter);
+
+    // If previous log was ignored, log again after changing filter
+    if (kTraceNone == oldFilter)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, -1,
+                     "SetTraceFilter(filter=0x%x)", filter);
+    }
+
+    return (ret);
+}
+
+int VoiceEngine::SetTraceFile(const char* fileNameUTF8,
+                              const bool addFileCounter)
+{
+    int ret = Trace::SetTraceFile(fileNameUTF8, addFileCounter);
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceFile(fileNameUTF8=%s, addFileCounter=%d)",
+                 fileNameUTF8, addFileCounter);
+    return (ret);
+}
+
+int VoiceEngine::SetTraceCallback(TraceCallback* callback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceCallback(callback=0x%x)", callback);
+    return (Trace::SetTraceCallback(callback));
+}
+
+bool VoiceEngine::Delete(VoiceEngine*& voiceEngine, bool ignoreRefCounters)
+{
+    if (voiceEngine == NULL)
+    {
+        return false;
+    }
+
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
+    VoEBaseImpl* base = s;
+
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, -1,
+                 "VoiceEngine::Delete(voiceEngine=0x%p, ignoreRefCounters=%d)",
+                 voiceEngine, ignoreRefCounters);
+
+    if (!ignoreRefCounters)
+    {
+        if (base->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEBase reference counter is %d => memory will not "
+                             "be released properly!", base->GetCount());
+            return false;
+        }
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+        VoECodecImpl* codec = s;
+        if (codec->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoECodec reference counter is %d => memory will not "
+                             "be released properly!", codec->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+        VoEDtmfImpl* dtmf = s;
+        if (dtmf->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEDtmf reference counter is %d =>"
+                             "memory will not be released properly!",
+                         dtmf->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+        VoEEncryptionImpl* encrypt = s;
+        if (encrypt->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEEncryption reference counter is %d => "
+                             "memory will not be released properly!",
+                         encrypt->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+        VoEExternalMediaImpl* extmedia = s;
+        if (extmedia->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEExternalMedia reference counter is %d => "
+                             "memory will not be released properly!",
+                         extmedia->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+        VoECallReportImpl* report = s;
+        if (report->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoECallReport reference counter is %d => memory "
+                             "will not be released properly!",
+                         report->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+        VoEFileImpl* file = s;
+        if (file->GetCount() != 0)
+        {
+            WEBRTC_TRACE(
+                         kTraceCritical,
+                         kTraceVoice,
+                         -1,
+                         "VoEFile reference counter is %d => memory will not "
+                         "be released properly!",
+                         file->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+        VoEHardwareImpl* hware = s;
+        if (hware->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEHardware reference counter is %d => memory will "
+                         "not be released properly!", hware->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+        VoENetEqStatsImpl* neteqst = s;
+        if (neteqst->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoENetEqStats reference counter is %d => "
+                             "memory will not be released properly!",
+                         neteqst->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+        VoENetworkImpl* netw = s;
+        if (netw->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoENetworkImpl reference counter is %d => memory "
+                         "will not be released properly!", netw->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+        VoERTP_RTCPImpl* rtcp = s;
+        if (rtcp->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoERTP_RTCP reference counter is %d =>"
+                             "memory will not be released properly!",
+                         rtcp->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+        VoEVideoSyncImpl* vsync = s;
+        if (vsync->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEVideoSync reference counter is %d => "
+                             "memory will not be released properly!",
+                         vsync->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+        VoEVolumeControlImpl* volume = s;
+        if (volume->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEVolumeControl reference counter is %d =>"
+                             "memory will not be released properly!",
+                         volume->GetCount());
+            return false;
+        }
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+        VoEAudioProcessingImpl* apm = s;
+        if (apm->GetCount() != 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceVoice, -1,
+                         "VoEAudioProcessing reference counter is %d => "
+                             "memory will not be released properly!",
+                         apm->GetCount());
+            return false;
+        }
+#endif
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, -1,
+                     "all reference counters are zero => deleting the "
+                     "VoiceEngine instance...");
+
+    } // if (!ignoreRefCounters)
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, -1,
+                     "reference counters are ignored => deleting the "
+                     "VoiceEngine instance...");
+    }
+
+    delete s;
+    voiceEngine = NULL;
+
+    return true;
+}
+
+int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
+{
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
+    // modules/audio_device/main/source/android/audio_device_android_jni.cc
+    // contains the actual implementation.
+    return SetAndroidAudioDeviceObjects(javaVM, env, context);
+#else
+    return -1;
+#endif
+}
+
+} //namespace webrtc
diff --git a/trunk/src/voice_engine/main/source/voice_engine_impl.h b/trunk/src/voice_engine/main/source/voice_engine_impl.h
new file mode 100644
index 0000000..708ac5d
--- /dev/null
+++ b/trunk/src/voice_engine/main/source/voice_engine_impl.h
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
+
+#include "engine_configurations.h"
+#include "voe_base_impl.h"
+
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#include "voe_audio_processing_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#include "voe_call_report_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+#include "voe_codec_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+#include "voe_dtmf_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#include "voe_encryption_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#include "voe_external_media_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+#include "voe_file_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#include "voe_hardware_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#include "voe_neteq_stats_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+#include "voe_network_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#include "voe_rtp_rtcp_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#include "voe_video_sync_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#include "voe_volume_control_impl.h"
+#endif
+
+namespace webrtc
+{
+
+class VoiceEngineImpl :
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+                            public VoEAudioProcessingImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+                            public VoECallReportImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+                            public VoECodecImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+                            public VoEDtmfImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+                            public VoEEncryptionImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+                            public VoEExternalMediaImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+                            public VoEFileImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+                            public VoEHardwareImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+                            public VoENetEqStatsImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+                            public VoENetworkImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+                            public VoERTP_RTCPImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+                            public VoEVideoSyncImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+                            public VoEVolumeControlImpl,
+#endif
+                            public VoEBaseImpl
+{
+public:
+    VoiceEngineImpl() : 
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+      VoEVideoSyncImpl(this),
+#endif
+      VoEBaseImpl()  // Included in initializer list to satisfy condition when
+                     // none of the WEBRTC_VOICE_XXX defines are set.
+    {
+    }
+    virtual ~VoiceEngineImpl()
+    {
+    }
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
diff --git a/trunk/src/voice_engine/main/test/android/android_test/.classpath b/trunk/src/voice_engine/main/test/android/android_test/.classpath
new file mode 100644
index 0000000..86a15c9
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/trunk/src/voice_engine/main/test/android/android_test/Android.mk b/trunk/src/voice_engine/main/test/android/android_test/Android.mk
new file mode 100644
index 0000000..a3f5ce6
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/Android.mk
@@ -0,0 +1,25 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+    src/org/webrtc/voiceengine/test/AndroidTest.java
+
+LOCAL_PACKAGE_NAME := webrtc-voice-demo
+LOCAL_CERTIFICATE := platform
+
+LOCAL_JNI_SHARED_LIBRARIES := libwebrtc-voice-demo-jni
+
+include $(BUILD_PACKAGE)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/trunk/src/voice_engine/main/test/android/android_test/AndroidManifest.xml b/trunk/src/voice_engine/main/test/android/android_test/AndroidManifest.xml
new file mode 100644
index 0000000..a614f8d
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/AndroidManifest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.voiceengine.test">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+    <activity android:name=".AndroidTest"
+              android:label="@string/app_name"
+              android:screenOrientation="portrait">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+
+  </application>
+  <uses-sdk android:minSdkVersion="3" />
+
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+  <uses-permission android:name="android.permission.RECORD_AUDIO" />
+  <uses-permission android:name="android.permission.INTERNET" />
+
+</manifest> 
diff --git a/trunk/src/voice_engine/main/test/android/android_test/default.properties b/trunk/src/voice_engine/main/test/android/android_test/default.properties
new file mode 100644
index 0000000..6ed608e
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target, OpenSL ES requires API level 9 

+target=android-9

diff --git a/trunk/src/voice_engine/main/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java b/trunk/src/voice_engine/main/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java
new file mode 100644
index 0000000..ec8f5b4
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java
@@ -0,0 +1,30 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.voiceengine.test;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050002;

+        public static final int Button02=0x7f050005;

+        public static final int EditText01=0x7f050001;

+        public static final int Spinner01=0x7f050003;

+        public static final int Spinner02=0x7f050004;

+        public static final int TextView01=0x7f050000;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+    }

+}

diff --git a/trunk/src/voice_engine/main/test/android/android_test/jni/Android.mk b/trunk/src/voice_engine/main/test/android/android_test/jni/Android.mk
new file mode 100644
index 0000000..9f9f761
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/jni/Android.mk
@@ -0,0 +1,152 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+ifdef NDK_ROOT
+
+MY_WEBRTC_ROOT_PATH := $(call my-dir)
+
+MY_WEBRTC_SRC_PATH := ../../../../../../..
+
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/common_audio/resampler/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/common_audio/signal_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/common_audio/vad/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/neteq/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/cng/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/g711/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/g722/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/pcm16b/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/ilbc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_coding/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_conference_mixer/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_device/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/aec/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/aecm/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/agc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/ns/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/audio_processing/utility/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/media_file/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/rtp_rtcp/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/udp_transport/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/modules/utility/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/system_wrappers/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/voice_engine/main/source/Android.mk
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_audio_preprocessing
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_spl \
+    libwebrtc_resampler \
+    libwebrtc_apm \
+    libwebrtc_apm_utility \
+    libwebrtc_vad \
+    libwebrtc_ns \
+    libwebrtc_agc \
+    libwebrtc_aec \
+    libwebrtc_aecm \
+    libwebrtc_system_wrappers \
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libstlport_shared
+
+LOCAL_LDLIBS := \
+    -lgcc \
+    -llog
+
+LOCAL_PRELINK_MODULE := false
+
+include $(BUILD_SHARED_LIBRARY)
+
+###
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc-voice-jni
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_system_wrappers \
+    libwebrtc_audio_device \
+    libwebrtc_pcm16b \
+    libwebrtc_cng \
+    libwebrtc_audio_coding \
+    libwebrtc_rtp_rtcp \
+    libwebrtc_media_file \
+    libwebrtc_udp_transport \
+    libwebrtc_utility \
+    libwebrtc_neteq \
+    libwebrtc_audio_conference_mixer \
+    libwebrtc_isac \
+    libwebrtc_ilbc \
+    libwebrtc_isacfix \
+    libwebrtc_g722 \
+    libwebrtc_g711 \
+    libwebrtc_voe_core
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libstlport_shared \
+    libwebrtc_audio_preprocessing
+
+LOCAL_LDLIBS := \
+    -lgcc \
+    -llog \
+    -lOpenSLES
+
+LOCAL_PRELINK_MODULE := false
+
+include $(BUILD_SHARED_LIBRARY)
+
+###
+
+include $(MY_WEBRTC_ROOT_PATH)/$(MY_WEBRTC_SRC_PATH)/src/voice_engine/main/test/cmd_test/Android.mk
+
+else
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_MODULE := libwebrtc-voice-demo-jni
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := android_test.cc
+LOCAL_CFLAGS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_ANDROID'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../../../auto_test \
+    $(LOCAL_PATH)/../../../../interface \
+    $(LOCAL_PATH)/../../../../../.. \
+    $(LOCAL_PATH)/../../../../../../system_wrappers/interface
+
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils \
+    libstlport \
+    libandroid \
+    libwebrtc \
+    libGLESv2
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/trunk/src/voice_engine/main/test/android/android_test/jni/Application.mk b/trunk/src/voice_engine/main/test/android/android_test/jni/Application.mk
new file mode 100644
index 0000000..03c35ac
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/jni/Application.mk
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Build both ARMv5TE and ARMv7-A machine code.
+APP_ABI := armeabi armeabi-v7a x86  
+APP_STL := stlport_shared
diff --git a/trunk/src/voice_engine/main/test/android/android_test/jni/android_test.cc b/trunk/src/voice_engine/main/test/android/android_test/jni/android_test.cc
new file mode 100644
index 0000000..8c5fdff
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/jni/android_test.cc
@@ -0,0 +1,1554 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <android/log.h>
+
+#include "org_webrtc_voiceengine_test_AndroidTest.h"
+
+#include "thread_wrapper.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_file.h"
+#include "voe_network.h"
+#include "voe_audio_processing.h"
+#include "voe_volume_control.h"
+#include "voe_hardware.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_encryption.h"
+
+#include "voe_test_interface.h"
+
+//#define USE_SRTP
+//#define INIT_FROM_THREAD
+//#define START_CALL_FROM_THREAD
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*" // As in WEBRTC Native...
+#define VALIDATE_BASE_POINTER \
+    if (!veData1.base) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Base pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_CODEC_POINTER \
+    if (!veData1.codec) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Codec pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_FILE_POINTER \
+    if (!veData1.file) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "File pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_NETWORK_POINTER \
+    if (!veData1.netw) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Network pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_APM_POINTER \
+    if (!veData1.codec) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Apm pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_VOLUME_POINTER \
+    if (!veData1.volume) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Volume pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_HARDWARE_POINTER \
+    if (!veData1.hardware) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Hardware pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_RTP_RTCP_POINTER \
+    if (!veData1.rtp_rtcp) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "RTP / RTCP pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_ENCRYPT_POINTER \
+    if (!veData1.encrypt) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Encrypt pointer doesn't exist"); \
+        return -1; \
+    }
+
+// Register functions in JNI_OnLoad()
+// How do we ensure that VoE is deleted? JNI_OnUnload?
+// What happens if class is unloaded? When loaded again, NativeInit will be
+// called again. Keep what we have?
+// Should we do something in JNI_OnUnload?
+// General design: create a class or keep global struct with "C" functions?
+// Otherwise make sure symbols are as unique as possible.
+
+// TestType enumerator
+enum TestType
+{
+  Invalid = -1,
+  Standard = 0,
+  Extended = 1,
+  Stress   = 2,
+  Unit     = 3,
+  CPU      = 4
+};
+
+// ExtendedSelection enumerator
+enum ExtendedSelection
+{
+   XSEL_Invalid = -1,
+   XSEL_None = 0,
+   XSEL_All,
+   XSEL_Base,
+   XSEL_CallReport,
+   XSEL_Codec,
+   XSEL_DTMF,
+   XSEL_Encryption,
+   XSEL_ExternalMedia,
+   XSEL_File,
+   XSEL_Hardware,
+   XSEL_NetEqStats,
+   XSEL_Network,
+   XSEL_PTT,
+   XSEL_RTP_RTCP,
+   XSEL_VideoSync,
+   XSEL_VideoSyncExtended,
+   XSEL_VolumeControl,
+   XSEL_VQE,
+   XSEL_APM,
+   XSEL_VQMon
+};
+
+using namespace webrtc;
+
+class my_transportation;
+
+// VoiceEngine data struct
+typedef struct
+{
+    // VoiceEngine
+    VoiceEngine* ve;
+    // Sub-APIs
+    VoEBase* base;
+    VoECodec* codec;
+    VoEFile* file;
+    VoENetwork* netw;
+    VoEAudioProcessing* apm;
+    VoEVolumeControl* volume;
+    VoEHardware* hardware;
+    VoERTP_RTCP* rtp_rtcp;
+    VoEEncryption* encrypt;
+    // Other
+    my_transportation* extTrans;
+    JavaVM* jvm;
+} VoiceEngineData;
+
+// my_transportation is used when useExtTrans is enabled
+class my_transportation : public Transport
+{
+ public:
+  my_transportation(VoENetwork * network) :
+      netw(network) {
+  }
+
+  int SendPacket(int channel,const void *data,int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+ private:
+  VoENetwork * netw;
+};
+
+int my_transportation::SendPacket(int channel,const void *data,int len)
+{
+  netw->ReceivedRTPPacket(channel, data, len);
+  return len;
+}
+
+int my_transportation::SendRTCPPacket(int channel, const void *data, int len)
+{
+  netw->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+//Global variables visible in this file
+static VoiceEngineData veData1;
+static VoiceEngineData veData2;
+
+// "Local" functions (i.e. not Java accessible)
+static bool GetSubApis(VoiceEngineData &veData);
+static bool ReleaseSubApis(VoiceEngineData &veData);
+
+class ThreadTest
+{
+public:
+    ThreadTest();
+    ~ThreadTest();
+    int RunTest();
+    int CloseTest();
+private:
+    static bool Run(void* ptr);
+    bool Process();
+private:
+    ThreadWrapper* _thread;
+};
+
+ThreadTest::~ThreadTest()
+{
+    if (_thread)
+    {
+        _thread->SetNotAlive();
+        if (_thread->Stop())
+        {
+            delete _thread;
+            _thread = NULL;
+        }
+    }
+}
+
+ThreadTest::ThreadTest() :
+    _thread(NULL)
+{
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "ThreadTest thread");
+}
+
+bool ThreadTest::Run(void* ptr)
+{
+    return static_cast<ThreadTest*> (ptr)->Process();
+}
+
+bool ThreadTest::Process()
+{
+    // Attach this thread to JVM
+    /*JNIEnv* env = NULL;
+     jint res = veData1.jvm->AttachCurrentThread(&env, NULL);
+     char msg[32];
+     sprintf(msg, "res=%d, env=%d", res, env);
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, msg);*/
+
+#ifdef INIT_FROM_THREAD
+    VALIDATE_BASE_POINTER;
+    veData1.base->Init();
+#endif
+
+#ifdef START_CALL_FROM_THREAD
+    // receiving instance
+    veData2.ve = VoiceEngine::Create();
+    GetSubApis(veData2);
+    veData2.base->Init();
+    veData2.base->CreateChannel();
+    if(veData2.base->SetLocalReceiver(0, 1234) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set local receiver 2 failed");
+    }
+    veData2.hardware->SetLoudspeakerStatus(false);
+    veData2.volume->SetSpeakerVolume(204);
+    veData2.base->StartReceive(0);
+    if(veData2.base->StartPlayout(0) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "start playout failed");
+    }
+
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+            "receiving instance started from thread");
+
+    // sending instance
+    veData1.ve = VoiceEngine::Create();
+    GetSubApis(veData1);
+    veData1.base->Init();
+    if(veData1.base->CreateChannel() < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "create channel failed");
+    }
+    if(veData1.base->SetLocalReceiver(0, 1256) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set local receiver failed");
+    }
+    if(veData1.base->SetSendDestination(0, 1234, "127.0.0.1") < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set send destination failed");
+    }
+    if(veData1.base->StartSend(0) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "start send failed");
+    }
+
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+            "sending instance started from thread");
+#endif
+
+    _thread->SetNotAlive();
+    _thread->Stop();
+
+    //res = veData1.jvm->DetachCurrentThread();
+
+    return true;
+}
+
+int ThreadTest::RunTest()
+{
+    if (_thread)
+    {
+        unsigned int id;
+        _thread->Start(id);
+    }
+    return 0;
+}
+
+int ThreadTest::CloseTest()
+{
+    VALIDATE_BASE_POINTER
+
+    veData1.base->DeleteChannel(0);
+    veData2.base->DeleteChannel(0);
+    veData1.base->Terminate();
+    veData2.base->Terminate();
+
+    // Release sub-APIs
+    ReleaseSubApis(veData1);
+    ReleaseSubApis(veData2);
+
+    // Delete
+    VoiceEngine::Delete(veData1.ve);
+    VoiceEngine::Delete(veData2.ve);
+    veData2.ve = NULL;
+    veData2.ve = NULL;
+
+    return 0;
+}
+
+ThreadTest threadTest;
+
+//////////////////////////////////////////////////////////////////
+// General functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// JNI_OnLoad
+//
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+    if (!vm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "JNI_OnLoad did not receive a valid VM pointer");
+        return -1;
+    }
+
+    // Get JNI
+    JNIEnv* env;
+    if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                             JNI_VERSION_1_4))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "JNI_OnLoad could not get JNI env");
+        return -1;
+    }
+
+    // Get class to register the native functions with
+    // jclass regClass = env->FindClass("webrtc/android/AndroidTest");
+    // if (!regClass) {
+    // return -1; // Exception thrown
+    // }
+
+    // Register native functions
+    // JNINativeMethod methods[1];
+    // methods[0].name = NULL;
+    // methods[0].signature = NULL;
+    // methods[0].fnPtr = NULL;
+    // if (JNI_OK != env->RegisterNatives(regClass, methods, 1))
+    // {
+    // return -1;
+    // }
+
+    // Init VoiceEngine data
+    memset(&veData1, 0, sizeof(veData1));
+    memset(&veData2, 0, sizeof(veData2));
+
+    // Store the JVM
+    veData1.jvm = vm;
+    veData2.jvm = vm;
+
+    return JNI_VERSION_1_4;
+}
+
+/////////////////////////////////////////////
+// Native initialization
+//
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_NativeInit(
+        JNIEnv * env,
+        jclass)
+{
+    // Look up and cache any interesting class, field and method IDs for
+    // any used java class here
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// Run auto standard test
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_RunAutoTest(
+        JNIEnv *env,
+        jobject context,
+        jint testType,
+        jint extendedSel)
+{
+    TestType tType(Invalid);
+
+    switch (testType)
+    {
+        case 0:
+            return 0;
+        case 1:
+            tType = Standard;
+            break;
+        case 2:
+            tType = Extended;
+            break;
+        case 3:
+            tType = Stress;
+            break;
+        case 4:
+            tType = Unit;
+            break;
+        default:
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "RunAutoTest - Invalid TestType");
+            return -1;
+    }
+
+    ExtendedSelection xsel(XSEL_Invalid);
+
+    switch (extendedSel)
+    {
+        case 0:
+            xsel = XSEL_None;
+            break;
+        case 1:
+            xsel = XSEL_All;
+            break;
+        case 2:
+            xsel = XSEL_Base;
+            break;
+        case 3:
+            xsel = XSEL_CallReport;
+            break;
+        case 4:
+            xsel = XSEL_Codec;
+            break;
+        case 5:
+            xsel = XSEL_DTMF;
+            break;
+        case 6:
+            xsel = XSEL_Encryption;
+            break;
+        case 7:
+            xsel = XSEL_ExternalMedia;
+            break;
+        case 8:
+            xsel = XSEL_File;
+            break;
+        case 9:
+            xsel = XSEL_Hardware;
+            break;
+        case 10:
+            xsel = XSEL_NetEqStats;
+            break;
+        case 11:
+            xsel = XSEL_Network;
+            break;
+        case 12:
+            xsel = XSEL_PTT;
+            break;
+        case 13:
+            xsel = XSEL_RTP_RTCP;
+            break;
+        case 14:
+            xsel = XSEL_VideoSync;
+            break;
+        case 15:
+            xsel = XSEL_VideoSyncExtended;
+            break;
+        case 16:
+            xsel = XSEL_VolumeControl;
+            break;
+        case 17:
+            xsel = XSEL_APM;
+            break;
+        case 18:
+            xsel = XSEL_VQMon;
+            break;
+        default:
+            xsel = XSEL_Invalid;
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "RunAutoTest - Invalid extendedType");
+            return -1;
+    }
+
+    // Set instance independent Java objects
+    VoiceEngine::SetAndroidObjects(veData1.jvm, env, context);
+
+    // Call voe test interface function
+    // TODO(leozwang) add autotest setAndroidObjects(veData1.jvm, context);
+    // jint retVal = runAutoTest(tType, xsel);
+
+    // Clear instance independent Java objects
+    VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+    return 0;
+}
+
+//////////////////////////////////////////////////////////////////
+// VoiceEngine API wrapper functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// Create VoiceEngine instance
+//
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Create(
+        JNIEnv *env,
+        jobject context)
+{
+    // Check if already created
+    if (veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "VoE already created");
+        return false;
+    }
+
+    // Set instance independent Java objects
+    VoiceEngine::SetAndroidObjects(veData1.jvm, env, context);
+
+#ifdef START_CALL_FROM_THREAD
+    threadTest.RunTest();
+#else
+    // Create
+    veData1.ve = VoiceEngine::Create();
+    if (!veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Create VoE failed");
+        return false;
+    }
+
+    // Get sub-APIs
+    if (!GetSubApis(veData1))
+    {
+        // If not OK, release all sub-APIs and delete VoE
+        ReleaseSubApis(veData1);
+        if (!VoiceEngine::Delete(veData1.ve))
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Delete VoE failed");
+        }
+        return false;
+    }
+#endif
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// Delete VoiceEngine instance
+//
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Delete(
+        JNIEnv *,
+        jobject)
+{
+#ifdef START_CALL_FROM_THREAD
+    threadTest.CloseTest();
+#else
+    // Check if exists
+    if (!veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "VoE does not exist");
+        return false;
+    }
+
+    // Release sub-APIs
+    ReleaseSubApis(veData1);
+
+    // Delete
+    if (!VoiceEngine::Delete(veData1.ve))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Delete VoE failed");
+        return false;
+    }
+
+    veData1.ve = NULL;
+#endif
+
+    // Clear instance independent Java objects
+    VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// [Base] Initialize VoiceEngine
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Init(
+        JNIEnv *,
+        jobject,
+        jboolean enableTrace,
+        jboolean useExtTrans)
+{
+    VALIDATE_BASE_POINTER;
+
+    if (enableTrace)
+    {
+        if (0 != VoiceEngine::SetTraceFile("/sdcard/trace.txt"))
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Could not enable trace");
+        }
+        if (0 != VoiceEngine::SetTraceFilter(kTraceAll))
+        {
+            __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                "Could not set trace filter");
+        }
+    }
+
+    if (useExtTrans)
+    {
+        VALIDATE_NETWORK_POINTER;
+        veData1.extTrans = new my_transportation(veData1.netw);
+    }
+
+    int retVal = 0;
+#ifdef INIT_FROM_THREAD
+    threadTest.RunTest();
+    usleep(200000);
+#else
+    retVal = veData1.base->Init();
+#endif
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Terminate VoiceEngine
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Terminate(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_BASE_POINTER;
+
+    jint retVal = veData1.base->Terminate();
+
+    delete veData1.extTrans;
+    veData1.extTrans = NULL;
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Create channel
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_CreateChannel(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_BASE_POINTER;
+    jint channel = veData1.base->CreateChannel();
+
+    if (veData1.extTrans)
+    {
+        VALIDATE_NETWORK_POINTER;
+        __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "Enabling external transport on channel %d",
+                            channel);
+        if (veData1.netw->RegisterExternalTransport(channel, *veData1.extTrans)
+                < 0)
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Could not set external transport");
+            return -1;
+        }
+    }
+
+    return channel;
+}
+
+/////////////////////////////////////////////
+// [Base] Delete channel
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_DeleteChannel(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->DeleteChannel(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] SetLocalReceiver
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetLocalReceiver(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jint port)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->SetLocalReceiver(channel, port);
+}
+
+/////////////////////////////////////////////
+// [Base] SetSendDestination
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSendDestination(
+        JNIEnv *env,
+        jobject,
+        jint channel,
+        jint port,
+        jstring ipaddr)
+{
+    VALIDATE_BASE_POINTER;
+
+    const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL);
+    if (!ipaddrNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.base->SetSendDestination(channel, port, ipaddrNative);
+
+    env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] StartListen
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartListen(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    bool useForRTCP = false;
+    if (veData1.encrypt->EnableSRTPReceive(
+                    channel,CIPHER_AES_128_COUNTER_MODE,30,AUTH_HMAC_SHA1,
+                    16,4, ENCRYPTION_AND_AUTHENTICATION,
+                    (unsigned char*)nikkey, useForRTCP) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to enable SRTP receive");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartReceive(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Start playout
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartPlayout(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Start send
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartSend(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    /*    int dscp(0), serviceType(-1), overrideDscp(0), res(0);
+     bool gqosEnabled(false), useSetSockOpt(false);
+
+     if (veData1.netw->SetSendTOS(channel, 13, useSetSockOpt) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to set TOS");
+     return -1;
+     }
+
+     res = veData1.netw->GetSendTOS(channel, dscp, useSetSockOpt);
+     if (res != 0 || dscp != 13 || useSetSockOpt != true)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to get TOS");
+     return -1;
+     } */
+
+    /* if (veData1.rtp_rtcp->SetFECStatus(channel, 1) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to enable FEC");
+     return -1;
+     } */
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    bool useForRTCP = false;
+    if (veData1.encrypt->EnableSRTPSend(
+                    channel,CIPHER_AES_128_COUNTER_MODE,30,AUTH_HMAC_SHA1,
+                    16,4, ENCRYPTION_AND_AUTHENTICATION,
+                    (unsigned char*)nikkey, useForRTCP) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to enable SRTP send");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartSend(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Stop listen
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopListen(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    if (veData1.encrypt->DisableSRTPReceive(channel) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to disable SRTP receive");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopReceive(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] Stop playout
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopPlayout(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] Stop send
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopSend(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    /* if (veData1.rtp_rtcp->SetFECStatus(channel, 0) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to disable FEC");
+     return -1;
+     } */
+
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    if (veData1.encrypt->DisableSRTPSend(channel) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to disable SRTP send");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopSend(channel);
+}
+
+/////////////////////////////////////////////
+// [codec] Number of codecs
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NumOfCodecs(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_CODEC_POINTER;
+    return veData1.codec->NumOfCodecs();
+}
+
+/////////////////////////////////////////////
+// [codec] Set send codec
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSendCodec(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jint index)
+{
+    VALIDATE_CODEC_POINTER;
+
+    CodecInst codec;
+
+    if (veData1.codec->GetCodec(index, codec) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Failed to get codec");
+        return -1;
+    }
+
+    return veData1.codec->SetSendCodec(channel, codec);
+}
+
+/////////////////////////////////////////////
+// [codec] Set VAD status
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetVADStatus(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_CODEC_POINTER;
+
+    VadModes VADmode = kVadConventional;
+
+    switch (mode)
+    {
+        case 0:
+            break; // already set
+        case 1:
+            VADmode = kVadAggressiveLow;
+            break;
+        case 2:
+            VADmode = kVadAggressiveMid;
+            break;
+        case 3:
+            VADmode = kVadAggressiveHigh;
+            break;
+        default:
+            VADmode = (VadModes) 17; // force error
+            break;
+    }
+
+    return veData1.codec->SetVADStatus(channel, enable, VADmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetNSStatus
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetNSStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    NsModes NSmode = kNsDefault;
+
+    switch (mode)
+    {
+        case 0:
+            NSmode = kNsUnchanged;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            NSmode = kNsConference;
+            break;
+        case 3:
+            NSmode = kNsLowSuppression;
+            break;
+        case 4:
+            NSmode = kNsModerateSuppression;
+            break;
+        case 5:
+            NSmode = kNsHighSuppression;
+            break;
+        case 6:
+            NSmode = kNsVeryHighSuppression;
+            break;
+        default:
+            NSmode = (NsModes) 17; // force error
+            break;
+    }
+
+    return veData1.apm->SetNsStatus(enable, NSmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetAGCStatus
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetAGCStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    AgcModes AGCmode = kAgcDefault;
+
+    switch (mode)
+    {
+        case 0:
+            AGCmode = kAgcUnchanged;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            AGCmode = kAgcAdaptiveAnalog;
+            break;
+        case 3:
+            AGCmode = kAgcAdaptiveDigital;
+            break;
+        case 4:
+            AGCmode = kAgcFixedDigital;
+            break;
+        default:
+            AGCmode = (AgcModes) 17; // force error
+            break;
+    }
+
+    /* AgcConfig agcConfig;
+     agcConfig.targetLeveldBOv = 3;
+     agcConfig.digitalCompressionGaindB = 50;
+     agcConfig.limiterEnable = 0;
+
+     if (veData1.apm->SetAGCConfig(agcConfig) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to set AGC config");
+     return -1;
+     } */
+
+    return veData1.apm->SetAgcStatus(enable, AGCmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetECStatus
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetECStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    EcModes ECmode = kEcDefault;
+
+    switch (mode)
+    {
+        case 0:
+            ECmode = kEcDefault;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            ECmode = kEcConference;
+            break;
+        case 3:
+            ECmode = kEcAec;
+            break;
+        case 4:
+            ECmode = kEcAecm;
+            break;
+        default:
+            ECmode = (EcModes) 17; // force error
+            break;
+    }
+
+    return veData1.apm->SetEcStatus(enable, ECmode);
+}
+
+/////////////////////////////////////////////
+// [File] Start play file locally
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileLocally(
+        JNIEnv * env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean loop)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartPlayingFileLocally(channel,
+                                                        fileNameNative, loop);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop play file locally
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileLocally(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopPlayingFileLocally(channel);
+}
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartRecordingPlayout
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartRecordingPlayout(
+        JNIEnv * env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartRecordingPlayout(channel, fileNameNative,
+                                                      0);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop Recording Playout
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopRecordingPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopRecordingPlayout(channel);
+}
+
+/////////////////////////////////////////////
+// [File] Start playing file as microphone
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileAsMicrophone(
+        JNIEnv *env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean loop)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartPlayingFileAsMicrophone(channel,
+                                                             fileNameNative,
+                                                             loop);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop playing file as microphone
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileAsMicrophone(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopPlayingFileAsMicrophone(channel);
+}
+
+/////////////////////////////////////////////
+// [Volume] Set speaker volume
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSpeakerVolume(
+        JNIEnv *,
+        jobject,
+        jint level)
+{
+    VALIDATE_VOLUME_POINTER;
+    if (veData1.volume->SetSpeakerVolume(level) != 0)
+    {
+        return -1;
+    }
+
+    unsigned int storedVolume = 0;
+    if (veData1.volume->GetSpeakerVolume(storedVolume) != 0)
+    {
+        return -1;
+    }
+
+    if (storedVolume != level)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+/////////////////////////////////////////////
+// [Hardware] Set loudspeaker status
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetLoudspeakerStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable)
+{
+    VALIDATE_HARDWARE_POINTER;
+    if (veData1.hardware->SetLoudspeakerStatus(enable) != 0)
+    {
+        return -1;
+    }
+
+    /*VALIDATE_RTP_RTCP_POINTER;
+
+     if (veData1.rtp_rtcp->SetFECStatus(0, enable, -1) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could not set FEC");
+     return -1;
+     }
+     else if(enable)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could enable FEC");
+     }
+     else
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could disable FEC");
+     }*/
+
+    return 0;
+}
+
+//////////////////////////////////////////////////////////////////
+// "Local" functions (i.e. not Java accessible)
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// Get all sub-APIs
+//
+bool GetSubApis(VoiceEngineData &veData)
+{
+    bool getOK = true;
+
+    // Base
+    veData.base = VoEBase::GetInterface(veData.ve);
+    if (!veData.base)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get base sub-API failed");
+        getOK = false;
+    }
+
+    // Codec
+    veData.codec = VoECodec::GetInterface(veData.ve);
+    if (!veData.codec)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get codec sub-API failed");
+        getOK = false;
+    }
+
+    // File
+    veData.file = VoEFile::GetInterface(veData.ve);
+    if (!veData.file)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get file sub-API failed");
+        getOK = false;
+    }
+
+    // Network
+    veData.netw = VoENetwork::GetInterface(veData.ve);
+    if (!veData.netw)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get network sub-API failed");
+        getOK = false;
+    }
+
+    // AudioProcessing module
+    veData.apm = VoEAudioProcessing::GetInterface(veData.ve);
+    if (!veData.apm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get apm sub-API failed");
+        getOK = false;
+    }
+
+    // Volume
+    veData.volume = VoEVolumeControl::GetInterface(veData.ve);
+    if (!veData.volume)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get volume sub-API failed");
+        getOK = false;
+    }
+
+    // Hardware
+    veData.hardware = VoEHardware::GetInterface(veData.ve);
+    if (!veData.hardware)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get hardware sub-API failed");
+        getOK = false;
+    }
+
+    // RTP / RTCP
+    veData.rtp_rtcp = VoERTP_RTCP::GetInterface(veData.ve);
+    if (!veData.rtp_rtcp)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get rtp_rtcp sub-API failed");
+        getOK = false;
+    }
+
+    // Encrypt
+    veData.encrypt = VoEEncryption::GetInterface(veData.ve);
+    if (!veData.encrypt)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get encrypt sub-API failed");
+        getOK = false;
+    }
+
+    return getOK;
+}
+
+/////////////////////////////////////////////
+// Release all sub-APIs
+//
+bool ReleaseSubApis(VoiceEngineData &veData)
+{
+    bool releaseOK = true;
+
+    // Base
+    if (veData.base)
+    {
+        if (0 != veData.base->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release base sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.base = NULL;
+        }
+    }
+
+    // Codec
+    if (veData.codec)
+    {
+        if (0 != veData.codec->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release codec sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.codec = NULL;
+        }
+    }
+
+    // File
+    if (veData.file)
+    {
+        if (0 != veData.file->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release file sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.file = NULL;
+        }
+    }
+
+    // Network
+    if (veData.netw)
+    {
+        if (0 != veData.netw->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release network sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.netw = NULL;
+        }
+    }
+
+    // apm
+    if (veData.apm)
+    {
+        if (0 != veData.apm->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release apm sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.apm = NULL;
+        }
+    }
+
+    // Volume
+    if (veData.volume)
+    {
+        if (0 != veData.volume->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release volume sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.volume = NULL;
+        }
+    }
+
+    // Hardware
+    if (veData.hardware)
+    {
+        if (0 != veData.hardware->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release hardware sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.hardware = NULL;
+        }
+    }
+
+    // RTP RTCP
+    if (veData.rtp_rtcp)
+    {
+        if (0 != veData.rtp_rtcp->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release rtp_rtcp sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.rtp_rtcp = NULL;
+        }
+    }
+
+    // Encrypt
+    if (veData.encrypt)
+    {
+        if (0 != veData.encrypt->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release encrypt sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.encrypt = NULL;
+        }
+    }
+
+    return releaseOK;
+}
diff --git a/trunk/src/voice_engine/main/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h b/trunk/src/voice_engine/main/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h
new file mode 100644
index 0000000..60fe839
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h
@@ -0,0 +1,253 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_voiceengine_test_AndroidTest */
+
+#ifndef _Included_org_webrtc_voiceengine_test_AndroidTest
+#define _Included_org_webrtc_voiceengine_test_AndroidTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    NativeInit
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NativeInit
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    RunAutoTest
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_RunAutoTest
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Create
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Create
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Delete
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Delete
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Init
+ * Signature: (IIIZZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Init
+  (JNIEnv *, jobject, jboolean, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Terminate
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    CreateChannel
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_CreateChannel
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    DeleteChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_DeleteChannel
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetLocalReceiver
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSendDestination
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSendDestination
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayingFileLocally
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileLocally
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayingFileLocally
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileLocally
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartRecordingPlayout
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartRecordingPlayout
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopRecordingPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopRecordingPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayingFileAsMicrophone
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayingFileAsMicrophone
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NumOfCodecs
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSendCodec
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSendCodec
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetVADStatus
+ * Signature: (IZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetVADStatus
+  (JNIEnv *, jobject, jint, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetNSStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetNSStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetAGCStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetAGCStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetECStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetECStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSpeakerVolume
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSpeakerVolume
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetLoudspeakerStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetLoudspeakerStatus
+  (JNIEnv *, jobject, jboolean);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/trunk/src/voice_engine/main/test/android/android_test/res/drawable/icon.png b/trunk/src/voice_engine/main/test/android/android_test/res/drawable/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/res/drawable/icon.png
Binary files differ
diff --git a/trunk/src/voice_engine/main/test/android/android_test/res/layout/main.xml b/trunk/src/voice_engine/main/test/android/android_test/res/layout/main.xml
new file mode 100644
index 0000000..4165a07
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/res/layout/main.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+  <TextView android:text="@+id/TextView01"
+	    android:id="@+id/TextView01"
+	    android:layout_width="wrap_content"
+	    android:layout_height="wrap_content">
+  </TextView>
+  <EditText android:text="@+id/EditText01"
+	    android:id="@+id/EditText01"
+	    android:layout_width="wrap_content"
+	    android:layout_height="wrap_content">
+  </EditText>
+  <Button android:text="@+id/Button01"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Spinner android:id="@+id/Spinner01"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+  </Spinner>
+  <Spinner android:id="@+id/Spinner02"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+  </Spinner>
+  <Button android:text="@+id/Button02"
+	  android:id="@+id/Button02"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+</LinearLayout>
diff --git a/trunk/src/voice_engine/main/test/android/android_test/res/values/strings.xml b/trunk/src/voice_engine/main/test/android/android_test/res/values/strings.xml
new file mode 100644
index 0000000..29ec4ee
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/res/values/strings.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<resources>
+    
+    <string name="app_name">WebRtc VoE</string>
+</resources>
diff --git a/trunk/src/voice_engine/main/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java b/trunk/src/voice_engine/main/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java
new file mode 100644
index 0000000..71b22b0
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java
@@ -0,0 +1,1190 @@
+/*

+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+ *

+ * Use of this source code is governed by a BSD-style license that can be found

+ * in the LICENSE file in the root of the source tree. An additional

+ * intellectual property rights grant can be found in the file PATENTS. All

+ * contributing project authors may be found in the AUTHORS file in the root of

+ * the source tree.

+ */

+

+/*

+ * VoiceEngine Android test application. It starts either auto test or acts like

+ * a GUI test.

+ */

+

+package org.webrtc.voiceengine.test;

+

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileNotFoundException;

+import java.io.FileOutputStream;

+import java.io.FileReader;

+import java.io.IOException;

+

+import android.app.Activity;

+import android.content.Context;

+import android.media.AudioFormat;

+import android.media.AudioManager;

+import android.media.AudioRecord;

+import android.media.AudioTrack;

+import android.media.MediaRecorder;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.View;

+import android.widget.AdapterView;

+import android.widget.ArrayAdapter;

+import android.widget.Button;

+import android.widget.EditText;

+import android.widget.Spinner;

+import android.widget.TextView;

+

+public class AndroidTest extends Activity {

+    private byte[] _playBuffer = null;

+    private short[] _circBuffer = new short[8000]; // can hold 50 frames

+

+    private int _recIndex = 0;

+    private int _playIndex = 0;

+    // private int _streamVolume = 4;

+    private int _maxVolume = 0; // Android max level (commonly 5)

+    // VoE level (0-255), corresponds to level 4 out of 5

+    private int _volumeLevel = 204;

+

+    private Thread _playThread;

+    private Thread _recThread;

+    private Thread _autotestThread;

+

+    private static AudioTrack _at;

+    private static AudioRecord _ar;

+

+    private File _fr = null;

+    private FileInputStream _in = null;

+

+    private boolean _isRunningPlay = false;

+    private boolean _isRunningRec = false;

+    private boolean _settingSet = true;

+    private boolean _isCallActive = false;

+    private boolean _runAutotest = false; // ENABLE AUTOTEST HERE!

+

+    private int _channel = -1;

+    private int _codecIndex = 0;

+    private int _ecIndex = 0;

+    private int _nsIndex = 0;

+    private int _agcIndex = 0;

+    private int _vadIndex = 0;

+    private int _audioIndex = 3;

+    private int _settingMenu = 0;

+    private int _receivePort = 1234;

+    private int _destinationPort = 1234;

+    private String _destinationIP = "127.0.0.1";

+

+    // "Build" settings

+    private final boolean _playFromFile = false;

+    // Set to true to send data to native code and back

+    private final boolean _runThroughNativeLayer = true;

+    private final boolean enableSend = true;

+    private final boolean enableReceive = true;

+    private final boolean useNativeThread = false;

+

+    /** Called when the activity is first created. */

+    public void onCreate(Bundle savedInstanceState) {

+        super.onCreate(savedInstanceState);

+        setContentView(R.layout.main);

+

+        TextView tv = (TextView) findViewById(R.id.TextView01);

+        tv.setText("");

+

+        final EditText ed = (EditText) findViewById(R.id.EditText01);

+        ed.setWidth(200);

+        ed.setText(_destinationIP);

+

+        final Button buttonStart = (Button) findViewById(R.id.Button01);

+        buttonStart.setWidth(200);

+        if (_runAutotest) {

+            buttonStart.setText("Run test");

+        } else {

+            buttonStart.setText("Start Call");

+        }

+        // button.layout(50, 50, 100, 40);

+        buttonStart.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+

+                if (_runAutotest) {

+                    startAutoTest();

+                } else {

+                    if (_isCallActive) {

+

+                        if (stopCall() != -1) {

+                            _isCallActive = false;

+                            buttonStart.setText("Start Call");

+                        }

+                    } else {

+

+                        _destinationIP = ed.getText().toString();

+                        if (startCall() != -1) {

+                            _isCallActive = true;

+                            buttonStart.setText("Stop Call");

+                        }

+                    }

+                }

+

+                // displayTextFromFile();

+                // recordAudioToFile();

+                // if(!_playFromFile)

+                // {

+                // recAudioInThread();

+                // }

+                // playAudioInThread();

+            }

+        });

+

+        final Button buttonStop = (Button) findViewById(R.id.Button02);

+        buttonStop.setWidth(200);

+        buttonStop.setText("Close app");

+        buttonStop.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+

+                if (!_runAutotest) {

+                    ShutdownVoE();

+                }

+

+                // This call terminates and should close the activity

+                finish();

+

+                // playAudioFromFile();

+                // if(!_playFromFile)

+                // {

+                // stopRecAudio();

+                // }

+                // stopPlayAudio();

+            }

+        });

+

+

+        String ap1[] = {"EC off", "AECM"};

+        final ArrayAdapter<String> adapterAp1 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap1);

+        String ap2[] =

+                        {"NS off", "NS low", "NS moderate", "NS high",

+                                        "NS very high"};

+        final ArrayAdapter<String> adapterAp2 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap2);

+        String ap3[] = {"AGC off", "AGC adaptive", "AGC fixed"};

+        final ArrayAdapter<String> adapterAp3 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap3);

+        String ap4[] =

+                        {"VAD off", "VAD conventional", "VAD high rate",

+                                        "VAD mid rate", "VAD low rate"};

+        final ArrayAdapter<String> adapterAp4 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap4);

+        String codecs[] = {"iSAC", "PCMU", "PCMA", "iLBC"};

+        final ArrayAdapter<String> adapterCodecs = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        codecs);

+

+        final Spinner spinnerSettings1 = (Spinner) findViewById(R.id.Spinner01);

+        final Spinner spinnerSettings2 = (Spinner) findViewById(R.id.Spinner02);

+        spinnerSettings1.setMinimumWidth(200);

+        String settings[] =

+                        {"Codec", "Echo Control", "Noise Suppression",

+                         "Automatic Gain Control",

+                         "Voice Activity Detection"};

+        ArrayAdapter<String> adapterSettings1 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        settings);

+        spinnerSettings1.setAdapter(adapterSettings1);

+        spinnerSettings1.setOnItemSelectedListener(

+                        new AdapterView.OnItemSelectedListener() {

+            public void onItemSelected(AdapterView adapterView, View view,

+                            int position, long id) {

+

+                _settingMenu = position;

+                _settingSet = false;

+                if (position == 0) {

+                    spinnerSettings2.setAdapter(adapterCodecs);

+                    spinnerSettings2.setSelection(_codecIndex);

+                }

+                if (position == 1) {

+                    spinnerSettings2.setAdapter(adapterAp1);

+                    spinnerSettings2.setSelection(_ecIndex);

+                }

+                if (position == 2) {

+                    spinnerSettings2.setAdapter(adapterAp2);

+                    spinnerSettings2.setSelection(_nsIndex);

+                }

+                if (position == 3) {

+                    spinnerSettings2.setAdapter(adapterAp3);

+                    spinnerSettings2.setSelection(_agcIndex);

+                }

+                if (position == 4) {

+                    spinnerSettings2.setAdapter(adapterAp4);

+                    spinnerSettings2.setSelection(_vadIndex);

+                }

+            }

+

+            public void onNothingSelected(AdapterView adapterView) {

+                WebrtcLog("No setting1 selected");

+            }

+        });

+

+        spinnerSettings2.setMinimumWidth(200);

+        ArrayAdapter<String> adapterSettings2 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        codecs);

+        spinnerSettings2.setAdapter(adapterSettings2);

+        spinnerSettings2.setOnItemSelectedListener(

+                        new AdapterView.OnItemSelectedListener() {

+            public void onItemSelected(AdapterView adapterView, View view,

+                            int position, long id) {

+

+                // avoid unintentional setting

+                if (_settingSet == false) {

+                    _settingSet = true;

+                    return;

+                }

+

+                // Change volume

+                if (_settingMenu == 0) {

+                    WebrtcLog("Selected audio " + position);

+                    setAudioProperties(position);

+                    spinnerSettings2.setSelection(_audioIndex);

+                }

+

+                // Change codec

+                if (_settingMenu == 1) {

+                    _codecIndex = position;

+                    WebrtcLog("Selected codec " + position);

+                    if (0 != SetSendCodec(_channel, _codecIndex)) {

+                        WebrtcLog("VoE set send codec failed");

+                    }

+                }

+

+                // Change EC

+                if (_settingMenu == 2) {

+                    boolean enable = true;

+                    int ECmode = 5; // AECM

+                    int AESmode = 0;

+

+                    _ecIndex = position;

+                    WebrtcLog("Selected EC " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                    }

+                    if (position > 1) {

+                        ECmode = 4; // AES

+                        AESmode = position - 1;

+                    }

+

+                    if (0 != SetECStatus(enable, ECmode)) {

+                        WebrtcLog("VoE set EC status failed");

+                    }

+                }

+

+                // Change NS

+                if (_settingMenu == 3) {

+                    boolean enable = true;

+

+                    _nsIndex = position;

+                    WebrtcLog("Selected NS " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                    }

+                    if (0 != SetNSStatus(enable, position + 2)) {

+                        WebrtcLog("VoE set NS status failed");

+                    }

+                }

+

+                // Change AGC

+                if (_settingMenu == 4) {

+                    boolean enable = true;

+

+                    _agcIndex = position;

+                    WebrtcLog("Selected AGC " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                        position = 1; // default

+                    }

+                    if (0 != SetAGCStatus(enable, position + 2)) {

+                        WebrtcLog("VoE set AGC status failed");

+                    }

+                }

+

+                // Change VAD

+                if (_settingMenu == 5) {

+                    boolean enable = true;

+

+                    _vadIndex = position;

+                    WebrtcLog("Selected VAD " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                        position++;

+                    }

+                    if (0 != SetVADStatus(_channel, enable, position - 1)) {

+                        WebrtcLog("VoE set VAD status failed");

+                    }

+                }

+            }

+

+            public void onNothingSelected(AdapterView adapterView) {

+            }

+        });

+

+        // Setup VoiceEngine

+        if (!_runAutotest && !useNativeThread) SetupVoE();

+

+        // Suggest to use the voice call audio stream for hardware volume

+        // controls

+        setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);

+

+        // Get max Android volume and adjust default volume to map exactly to an

+        // Android level

+        AudioManager am =

+                        (AudioManager) getSystemService(Context.AUDIO_SERVICE);

+        _maxVolume = am.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);

+        if (_maxVolume <= 0) {

+            WebrtcLog("Could not get max volume!");

+        } else {

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            _volumeLevel = (androidVolumeLevel * 255) / _maxVolume;

+        }

+

+        WebrtcLog("Started Webrtc Android Test");

+    }

+

+    // Will be called when activity is shutdown.

+    // NOTE: Activity may be killed without this function being called,

+    // but then we should not need to clean up.

+    protected void onDestroy() {

+        super.onDestroy();

+        // ShutdownVoE();

+    }

+

+    private void SetupVoE() {

+        // Create VoiceEngine

+        Create(); // Error logging is done in native API wrapper

+

+        // Initialize

+        if (0 != Init(false, false)) {

+            WebrtcLog("VoE init failed");

+        }

+

+        // Create channel

+        _channel = CreateChannel();

+        if (0 != _channel) {

+            WebrtcLog("VoE create channel failed");

+        }

+

+    }

+

+    private void ShutdownVoE() {

+        // Delete channel

+        if (0 != DeleteChannel(_channel)) {

+            WebrtcLog("VoE delete channel failed");

+        }

+

+        // Terminate

+        if (0 != Terminate()) {

+            WebrtcLog("VoE terminate failed");

+        }

+

+        // Delete VoiceEngine

+        Delete(); // Error logging is done in native API wrapper

+    }

+

+    int startCall() {

+

+        if (useNativeThread == true) {

+

+            Create();

+            return 0;

+        }

+

+        if (enableReceive == true) {

+            // Set local receiver

+            if (0 != SetLocalReceiver(_channel, _receivePort)) {

+                WebrtcLog("VoE set local receiver failed");

+            }

+

+            if (0 != StartListen(_channel)) {

+                WebrtcLog("VoE start listen failed");

+                return -1;

+            }

+

+            // Route audio to earpiece

+            if (0 != SetLoudspeakerStatus(false)) {

+                WebrtcLog("VoE set louspeaker status failed");

+                return -1;

+            }

+

+            /*

+             * WebrtcLog("VoE start record now"); if (0 !=

+             * StartRecordingPlayout(_channel, "/sdcard/singleUserDemoOut.pcm",

+             * false)) { WebrtcLog("VoE Recording Playout failed"); }

+             * WebrtcLog("VoE start Recording Playout end");

+             */

+            // Start playout

+            if (0 != StartPlayout(_channel)) {

+                WebrtcLog("VoE start playout failed");

+                return -1;

+            }

+

+            // Start playout file

+            // if (0 != StartPlayingFileLocally(_channel,

+            // "/sdcard/singleUserDemo.pcm", true)) {

+            // WebrtcLog("VoE start playout file failed");

+            // return -1;

+            // }

+        }

+

+        if (enableSend == true) {

+            if (0 != SetSendDestination(_channel, _destinationPort,

+                            _destinationIP)) {

+                WebrtcLog("VoE set send  destination failed");

+                return -1;

+            }

+

+            if (0 != SetSendCodec(_channel, _codecIndex)) {

+                WebrtcLog("VoE set send codec failed");

+                return -1;

+            }

+

+            /*

+             * if (0 != StartPlayingFileAsMicrophone(_channel,

+             * "/sdcard/singleUserDemo.pcm", true)) {

+             * WebrtcLog("VoE start playing file as microphone failed"); }

+             */

+            if (0 != StartSend(_channel)) {

+                WebrtcLog("VoE start send failed");

+                return -1;

+            }

+

+            // if (0 != StartPlayingFileAsMicrophone(_channel,

+            // "/sdcard/singleUserDemo.pcm", true)) {

+            // WebrtcLog("VoE start playing file as microphone failed");

+            // return -1;

+            // }

+        }

+

+        return 0;

+    }

+

+    int stopCall() {

+

+        if (useNativeThread == true) {

+

+            Delete();

+            return 0;

+        }

+

+        if (enableSend == true) {

+            // Stop playing file as microphone

+            /*

+             * if (0 != StopPlayingFileAsMicrophone(_channel)) {

+             * WebrtcLog("VoE stop playing file as microphone failed"); return

+             * -1; }

+             */

+            // Stop send

+            if (0 != StopSend(_channel)) {

+                WebrtcLog("VoE stop send failed");

+                return -1;

+            }

+        }

+

+        if (enableReceive == true) {

+            // if (0 != StopRecordingPlayout(_channel)) {

+            // WebrtcLog("VoE stop Recording Playout failed");

+            // }

+            // WebrtcLog("VoE stop Recording Playout ended");

+

+            // Stop listen

+            if (0 != StopListen(_channel)) {

+                WebrtcLog("VoE stop listen failed");

+                return -1;

+            }

+

+            // Stop playout file

+            // if (0 != StopPlayingFileLocally(_channel)) {

+            // WebrtcLog("VoE stop playout file failed");

+            // return -1;

+            // }

+

+            // Stop playout

+            if (0 != StopPlayout(_channel)) {

+                WebrtcLog("VoE stop playout failed");

+                return -1;

+            }

+

+            // Route audio to loudspeaker

+            if (0 != SetLoudspeakerStatus(true)) {

+                WebrtcLog("VoE set louspeaker status failed");

+                return -1;

+            }

+        }

+

+        return 0;

+    }

+

+    int startAutoTest() {

+

+        _autotestThread = new Thread(_autotestProc);

+        _autotestThread.start();

+

+        return 0;

+    }

+

+    private Runnable _autotestProc = new Runnable() {

+        public void run() {

+            // TODO(xians): choose test from GUI

+            // 1 = standard, not used

+            // 2 = extended, 2 = base

+            RunAutoTest(1, 2);

+        }

+    };

+

+    int setAudioProperties(int val) {

+

+        // AudioManager am = (AudioManager)

+        // getSystemService(Context.AUDIO_SERVICE);

+

+        if (val == 0) {

+            // _streamVolume =

+            // am.getStreamVolume(AudioManager.STREAM_VOICE_CALL);

+            // am.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+            // (_streamVolume+1), 0);

+

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            if (androidVolumeLevel < _maxVolume) {

+                _volumeLevel = ((androidVolumeLevel + 1) * 255) / _maxVolume;

+                if (0 != SetSpeakerVolume(_volumeLevel)) {

+                    WebrtcLog("VoE set speaker volume failed");

+                }

+            }

+        } else if (val == 1) {

+            // _streamVolume =

+            // am.getStreamVolume(AudioManager.STREAM_VOICE_CALL);

+            // am.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+            // (_streamVolume-1), 0);

+

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            if (androidVolumeLevel > 0) {

+                _volumeLevel = ((androidVolumeLevel - 1) * 255) / _maxVolume;

+                if (0 != SetSpeakerVolume(_volumeLevel)) {

+                    WebrtcLog("VoE set speaker volume failed");

+                }

+            }

+        } else if (val == 2) {

+            // route audio to back speaker

+            if (0 != SetLoudspeakerStatus(true)) {

+                WebrtcLog("VoE set loudspeaker status failed");

+            }

+            _audioIndex = 2;

+        } else if (val == 3) {

+            // route audio to earpiece

+            if (0 != SetLoudspeakerStatus(false)) {

+                WebrtcLog("VoE set loudspeaker status failed");

+            }

+            _audioIndex = 3;

+        }

+

+        return 0;

+    }

+

+    int displayTextFromFile() {

+

+        TextView tv = (TextView) findViewById(R.id.TextView01);

+        FileReader fr = null;

+        char[] fileBuffer = new char[64];

+

+        try {

+            fr = new FileReader("/sdcard/test.txt");

+        } catch (FileNotFoundException e) {

+            e.printStackTrace();

+            tv.setText("File not found!");

+        }

+

+        try {

+            fr.read(fileBuffer);

+        } catch (IOException e) {

+            e.printStackTrace();

+        }

+

+        String readString = new String(fileBuffer);

+        tv.setText(readString);

+        // setContentView(tv);

+

+        return 0;

+    }

+

+    int recordAudioToFile() {

+        File fr = null;

+        // final to be reachable within onPeriodicNotification

+        byte[] recBuffer = new byte[320];

+

+        int recBufSize =

+                        AudioRecord.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        AudioRecord rec =

+                        new AudioRecord(MediaRecorder.AudioSource.MIC, 16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT,

+                                        recBufSize);

+

+        fr = new File("/sdcard/record.pcm");

+        FileOutputStream out = null;

+        try {

+            out = new FileOutputStream(fr);

+        } catch (FileNotFoundException e1) {

+            e1.printStackTrace();

+        }

+

+        // start recording

+        try {

+            rec.startRecording();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        for (int i = 0; i < 550; i++) {

+            // note, there is a short version of write as well!

+            int wrBytes = rec.read(recBuffer, 0, 320);

+

+            try {

+                out.write(recBuffer);

+            } catch (IOException e) {

+                e.printStackTrace();

+            }

+        }

+

+        // stop playout

+        try {

+            rec.stop();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        return 0;

+    }

+

+    int playAudioFromFile() {

+

+        File fr = null;

+        // final to be reachable within onPeriodicNotification

+        // final byte[] playBuffer = new byte [320000];

+        // final to be reachable within onPeriodicNotification

+        final byte[] playBuffer = new byte[320];

+

+        final int playBufSize =

+                        AudioTrack.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        // final int playBufSize = 1920; // 100 ms buffer

+        // byte[] playBuffer = new byte [playBufSize];

+        final AudioTrack play =

+                        new AudioTrack(AudioManager.STREAM_VOICE_CALL, 16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT,

+                                        playBufSize, AudioTrack.MODE_STREAM);

+

+        // implementation of the playpos callback functions

+        play.setPlaybackPositionUpdateListener(

+                        new AudioTrack.OnPlaybackPositionUpdateListener() {

+

+            int count = 0;

+

+            public void onPeriodicNotification(AudioTrack track) {

+                // int wrBytes = play.write(playBuffer, count, 320);

+                count += 320;

+            }

+

+            public void onMarkerReached(AudioTrack track) {

+

+            }

+        });

+

+        // set the notification period = 160 samples

+        // int ret = play.setPositionNotificationPeriod(160);

+

+        fr = new File("/sdcard/record.pcm");

+        FileInputStream in = null;

+        try {

+            in = new FileInputStream(fr);

+        } catch (FileNotFoundException e1) {

+            e1.printStackTrace();

+        }

+

+        // try {

+        // in.read(playBuffer);

+        // } catch (IOException e) {

+        // e.printStackTrace();

+        // }

+

+        // play all at once

+        // int wrBytes = play.write(playBuffer, 0, 320000);

+

+

+        // start playout

+        try {

+            play.play();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        // returns the number of samples that has been written

+        // int headPos = play.getPlaybackHeadPosition();

+

+        // play with multiple writes

+        for (int i = 0; i < 500; i++) {

+            try {

+                in.read(playBuffer);

+            } catch (IOException e) {

+                e.printStackTrace();

+            }

+

+

+            // note, there is a short version of write as well!

+            int wrBytes = play.write(playBuffer, 0, 320);

+

+            Log.d("testWrite", "wrote");

+        }

+

+        // stop playout

+        try {

+            play.stop();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        return 0;

+    }

+

+    int playAudioInThread() {

+

+        if (_isRunningPlay) {

+            return 0;

+        }

+

+        // File fr = null;

+        // final byte[] playBuffer = new byte[320];

+        if (_playFromFile) {

+            _playBuffer = new byte[320];

+        } else {

+            // reset index

+            _playIndex = 0;

+        }

+        // within

+        // onPeriodicNotification

+

+        // Log some info (static)

+        WebrtcLog("Creating AudioTrack object");

+        final int minPlayBufSize =

+                        AudioTrack.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        WebrtcLog("Min play buf size = " + minPlayBufSize);

+        WebrtcLog("Min volume = " + AudioTrack.getMinVolume());

+        WebrtcLog("Max volume = " + AudioTrack.getMaxVolume());

+        WebrtcLog("Native sample rate = "

+                        + AudioTrack.getNativeOutputSampleRate(

+                                        AudioManager.STREAM_VOICE_CALL));

+

+        final int playBufSize = minPlayBufSize; // 3200; // 100 ms buffer

+        // byte[] playBuffer = new byte [playBufSize];

+        try {

+            _at = new AudioTrack(

+                            AudioManager.STREAM_VOICE_CALL,

+                            16000,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            playBufSize, AudioTrack.MODE_STREAM);

+        } catch (Exception e) {

+            WebrtcLog(e.getMessage());

+        }

+

+        // Log some info (non-static)

+        WebrtcLog("Notification marker pos = "

+                        + _at.getNotificationMarkerPosition());

+        WebrtcLog("Play head pos = " + _at.getPlaybackHeadPosition());

+        WebrtcLog("Pos notification dt = "

+                        + _at.getPositionNotificationPeriod());

+        WebrtcLog("Playback rate = " + _at.getPlaybackRate());

+        WebrtcLog("Sample rate = " + _at.getSampleRate());

+

+        // implementation of the playpos callback functions

+        // _at.setPlaybackPositionUpdateListener(

+        // new AudioTrack.OnPlaybackPositionUpdateListener() {

+        //

+        // int count = 3200;

+        //

+        // public void onPeriodicNotification(AudioTrack track) {

+        // // int wrBytes = play.write(playBuffer, count, 320);

+        // count += 320;

+        // }

+        //

+        // public void onMarkerReached(AudioTrack track) {

+        // }

+        // });

+

+        // set the notification period = 160 samples

+        // int ret = _at.setPositionNotificationPeriod(160);

+

+        if (_playFromFile) {

+            _fr = new File("/sdcard/singleUserDemo.pcm");

+            try {

+                _in = new FileInputStream(_fr);

+            } catch (FileNotFoundException e1) {

+                e1.printStackTrace();

+            }

+        }

+

+        // try {

+        // in.read(playBuffer);

+        // } catch (IOException e) {

+        // e.printStackTrace();

+        // }

+

+        _isRunningPlay = true;

+

+        // buffer = new byte[3200];

+        _playThread = new Thread(_playProc);

+        // ar.startRecording();

+        // bytesRead = 3200;

+        // recording = true;

+        _playThread.start();

+

+        return 0;

+    }

+

+    int stopPlayAudio() {

+        if (!_isRunningPlay) {

+            return 0;

+        }

+

+        _isRunningPlay = false;

+

+        return 0;

+    }

+

+    private Runnable _playProc = new Runnable() {

+        public void run() {

+

+            // set high thread priority

+            android.os.Process.setThreadPriority(

+                            android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+

+            // play all at once

+            // int wrBytes = play.write(playBuffer, 0, 320000);

+

+            // fill the buffer

+            // play.write(playBuffer, 0, 3200);

+

+            // play.flush();

+

+            // start playout

+            try {

+                _at.play();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // play with multiple writes

+            int i = 0;

+            for (; i < 3000 && _isRunningPlay; i++) {

+

+                if (_playFromFile) {

+                    try {

+                        _in.read(_playBuffer);

+                    } catch (IOException e) {

+                        e.printStackTrace();

+                    }

+

+                    int wrBytes = _at.write(_playBuffer, 0 /* i * 320 */, 320);

+                } else {

+                    int wrSamples =

+                                    _at.write(_circBuffer, _playIndex * 160,

+                                                    160);

+

+                    // WebrtcLog("Played 10 ms from buffer, _playIndex = " +

+                    // _playIndex);

+                    // WebrtcLog("Diff = " + (_recIndex - _playIndex));

+

+                    if (_playIndex == 49) {

+                        _playIndex = 0;

+                    } else {

+                        _playIndex += 1;

+                    }

+                }

+

+                // WebrtcLog("Wrote 10 ms to buffer, head = "

+                // + _at.getPlaybackHeadPosition());

+            }

+

+            // stop playout

+            try {

+                _at.stop();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // returns the number of samples that has been written

+            WebrtcLog("Test stopped, i = " + i + ", head = "

+                            + _at.getPlaybackHeadPosition());

+            int headPos = _at.getPlaybackHeadPosition();

+

+            // flush the buffers

+            _at.flush();

+

+            // release the object

+            _at.release();

+            _at = null;

+

+            // try {

+            // Thread.sleep() must be within a try - catch block

+            // Thread.sleep(3000);

+            // }catch (Exception e){

+            // System.out.println(e.getMessage());

+            // }

+

+            _isRunningPlay = false;

+

+        }

+    };

+

+    int recAudioInThread() {

+

+        if (_isRunningRec) {

+            return 0;

+        }

+

+        // within

+        // onPeriodicNotification

+

+        // reset index

+        _recIndex = 20;

+

+        // Log some info (static)

+        WebrtcLog("Creating AudioRecord object");

+        final int minRecBufSize = AudioRecord.getMinBufferSize(16000,

+                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                        AudioFormat.ENCODING_PCM_16BIT);

+        WebrtcLog("Min rec buf size = " + minRecBufSize);

+        // WebrtcLog("Min volume = " + AudioTrack.getMinVolume());

+        // WebrtcLog("Max volume = " + AudioTrack.getMaxVolume());

+        // WebrtcLog("Native sample rate = "

+        // + AudioRecord

+        // .getNativeInputSampleRate(AudioManager.STREAM_VOICE_CALL));

+

+        final int recBufSize = minRecBufSize; // 3200; // 100 ms buffer

+        try {

+            _ar = new AudioRecord(

+                            MediaRecorder.AudioSource.MIC,

+                            16000,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            recBufSize);

+        } catch (Exception e) {

+            WebrtcLog(e.getMessage());

+        }

+

+        // Log some info (non-static)

+        WebrtcLog("Notification marker pos = "

+                        + _ar.getNotificationMarkerPosition());

+        // WebrtcLog("Play head pos = " + _ar.getRecordHeadPosition());

+        WebrtcLog("Pos notification dt rec= "

+                        + _ar.getPositionNotificationPeriod());

+        // WebrtcLog("Playback rate = " + _ar.getRecordRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+        WebrtcLog("Sample rate = " + _ar.getSampleRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+

+        _isRunningRec = true;

+

+        _recThread = new Thread(_recProc);

+

+        _recThread.start();

+

+        return 0;

+    }

+

+    int stopRecAudio() {

+        if (!_isRunningRec) {

+            return 0;

+        }

+

+        _isRunningRec = false;

+

+        return 0;

+    }

+

+    private Runnable _recProc = new Runnable() {

+        public void run() {

+

+            // set high thread priority

+            android.os.Process.setThreadPriority(

+                            android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+

+            // start recording

+            try {

+                _ar.startRecording();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // keep recording to circular buffer

+            // for a while

+            int i = 0;

+            int rdSamples = 0;

+            short[] tempBuffer = new short[160]; // Only used for native case

+

+            for (; i < 3000 && _isRunningRec; i++) {

+                if (_runThroughNativeLayer) {

+                    rdSamples = _ar.read(tempBuffer, 0, 160);

+                    // audioLoop(tempBuffer, 160); // Insert into native layer

+                } else {

+                    rdSamples = _ar.read(_circBuffer, _recIndex * 160, 160);

+

+                    // WebrtcLog("Recorded 10 ms to buffer, _recIndex = " +

+                    // _recIndex);

+                    // WebrtcLog("rdSamples = " + rdSamples);

+

+                    if (_recIndex == 49) {

+                        _recIndex = 0;

+                    } else {

+                        _recIndex += 1;

+                    }

+                }

+            }

+

+            // stop recording

+            try {

+                _ar.stop();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // release the object

+            _ar.release();

+            _ar = null;

+

+            // try {

+            // Thread.sleep() must be within a try - catch block

+            // Thread.sleep(3000);

+            // }catch (Exception e){

+            // System.out.println(e.getMessage());

+            // }

+

+            _isRunningRec = false;

+

+            // returns the number of samples that has been written

+            // WebrtcLog("Test stopped, i = " + i + ", head = "

+            // + _at.getPlaybackHeadPosition());

+            // int headPos = _at.getPlaybackHeadPosition();

+        }

+    };

+

+    private void WebrtcLog(String msg) {

+        Log.d("*Webrtc*", msg);

+    }

+

+    // //////////////// Native function prototypes ////////////////////

+

+    private native static boolean NativeInit();

+

+    private native int RunAutoTest(int testType, int extendedSel);

+

+    private native boolean Create();

+

+    private native boolean Delete();

+

+    private native int Init(boolean enableTrace, boolean useExtTrans);

+

+    private native int Terminate();

+

+    private native int CreateChannel();

+

+    private native int DeleteChannel(int channel);

+

+    private native int SetLocalReceiver(int channel, int port);

+

+    private native int SetSendDestination(int channel, int port,

+                    String ipaddr);

+

+    private native int StartListen(int channel);

+

+    private native int StartPlayout(int channel);

+

+    private native int StartSend(int channel);

+

+    private native int StopListen(int channel);

+

+    private native int StopPlayout(int channel);

+

+    private native int StopSend(int channel);

+

+    private native int StartPlayingFileLocally(int channel, String fileName,

+                    boolean loop);

+

+    private native int StopPlayingFileLocally(int channel);

+

+    private native int StartRecordingPlayout(int channel, String fileName,

+                    boolean loop);

+

+    private native int StopRecordingPlayout(int channel);

+

+    private native int StartPlayingFileAsMicrophone(int channel,

+                    String fileName, boolean loop);

+

+    private native int StopPlayingFileAsMicrophone(int channel);

+

+    private native int NumOfCodecs();

+

+    private native int SetSendCodec(int channel, int index);

+

+    private native int SetVADStatus(int channel, boolean enable, int mode);

+

+    private native int SetNSStatus(boolean enable, int mode);

+

+    private native int SetAGCStatus(boolean enable, int mode);

+

+    private native int SetECStatus(boolean enable, int mode);

+

+    private native int SetSpeakerVolume(int volume);

+

+    private native int SetLoudspeakerStatus(boolean enable);

+

+    /*

+     * this is used to load the 'webrtc-voice-demo-jni'

+     * library on application startup.

+     * The library has already been unpacked into

+     * /data/data/webrtc.android.AndroidTest/lib/libwebrtc-voice-demo-jni.so

+     * at installation time by the package manager.

+     */

+    static {

+        Log.d("*Webrtc*", "Loading webrtc-voice-demo-jni...");

+        System.loadLibrary("webrtc-voice-demo-jni");

+

+        Log.d("*Webrtc*", "Calling native init...");

+        if (!NativeInit()) {

+            Log.e("*Webrtc*", "Native init failed");

+            throw new RuntimeException("Native init failed");

+        } else {

+            Log.d("*Webrtc*", "Native init successful");

+        }

+    }

+}

diff --git a/trunk/src/voice_engine/main/test/auto_test/Android.mk b/trunk/src/voice_engine/main/test/auto_test/Android.mk
new file mode 100644
index 0000000..cfb9d67
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/Android.mk
@@ -0,0 +1,56 @@
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+# voice engine test app
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= \
+    automated_mode.cc \
+    voe_cpu_test.cc \
+    voe_standard_test.cc \
+    voe_stress_test.cc \
+    voe_unit_test.cc \
+    voe_extended_test.cc \
+    voe_standard_integration_test.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_LINUX' \
+    '-DWEBRTC_ANDROID'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../modules/audio_device/main/interface \
+    $(LOCAL_PATH)/../../../../modules/interface \
+    $(LOCAL_PATH)/../../../../system_wrappers/interface \
+    $(LOCAL_PATH)/../../../../../test \
+    external/gtest/include \
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils \
+    libstlport \
+    libwebrtc
+
+LOCAL_MODULE:= webrtc_voe_autotest
+
+ifdef NDK_ROOT
+include $(BUILD_EXECUTABLE)
+else
+include external/stlport/libstlport.mk
+include $(BUILD_NATIVE_TEST)
+endif
+
diff --git a/trunk/src/voice_engine/main/test/auto_test/automated_mode.cc b/trunk/src/voice_engine/main/test/auto_test/automated_mode.cc
new file mode 100644
index 0000000..13fa257
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/automated_mode.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+int RunInAutomatedMode(int argc, char** argv) {
+  testing::InitGoogleTest(&argc, argv);
+
+  return RUN_ALL_TESTS();
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/automated_mode.h b/trunk/src/voice_engine/main/test/auto_test/automated_mode.h
new file mode 100644
index 0000000..cd7ab9e
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/automated_mode.h
@@ -0,0 +1,16 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
+
+int RunInAutomatedMode(int argc, char** argv);
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.cc b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.cc
new file mode 100644
index 0000000..f0e665b
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.cc
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class TestErrorObserver : public webrtc::VoiceEngineObserver {
+ public:
+  TestErrorObserver() {}
+  virtual ~TestErrorObserver() {}
+  void CallbackOnError(const int channel, const int error_code) {
+    ADD_FAILURE() << "Unexpected error on channel " << channel <<
+        ": error code " << error_code;
+  }
+};
+
+AfterInitializationFixture::AfterInitializationFixture()
+    : error_observer_(new TestErrorObserver()) {
+  EXPECT_EQ(0, voe_base_->Init());
+
+#if defined(WEBRTC_ANDROID)
+  EXPECT_EQ(0, voe_hardware_->SetLoudspeakerStatus(false));
+#endif
+
+  EXPECT_EQ(0, voe_base_->RegisterVoiceEngineObserver(*error_observer_));
+}
+
+AfterInitializationFixture::~AfterInitializationFixture() {
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.h b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.h
new file mode 100644
index 0000000..bbdd64d
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_initialization_fixture.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
+
+#include "before_initialization_fixture.h"
+#include "scoped_ptr.h"
+
+class TestErrorObserver;
+
+// This fixture initializes the voice engine in addition to the work
+// done by the before-initialization fixture. It also registers an error
+// observer which will fail tests on error callbacks. This fixture is
+// useful to tests that want to run before we have started any form of
+// streaming through the voice engine.
+class AfterInitializationFixture : public BeforeInitializationFixture {
+ public:
+  AfterInitializationFixture();
+  virtual ~AfterInitializationFixture();
+ protected:
+  webrtc::scoped_ptr<TestErrorObserver> error_observer_;
+};
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.cc b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.cc
new file mode 100644
index 0000000..e1a4820
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.cc
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+#include <cstring>
+
+static const char* kLoopbackIp = "127.0.0.1";
+
+AfterStreamingFixture::AfterStreamingFixture()
+    : channel_(voe_base_->CreateChannel()) {
+  EXPECT_GE(channel_, 0);
+
+  fake_microphone_input_file_ = resource_manager_.long_audio_file_path();
+  EXPECT_FALSE(fake_microphone_input_file_.empty());
+
+  SetUpLocalPlayback();
+  StartPlaying();
+}
+
+AfterStreamingFixture::~AfterStreamingFixture() {
+  voe_file_->StopPlayingFileAsMicrophone(channel_);
+  voe_base_->StopSend(channel_);
+  voe_base_->StopPlayout(channel_);
+  voe_base_->StopReceive(channel_);
+
+  voe_base_->DeleteChannel(channel_);
+}
+
+void AfterStreamingFixture::SwitchToManualMicrophone() {
+  EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+
+  TEST_LOG("You need to speak manually into the microphone for this test.\n");
+  TEST_LOG("Please start speaking now.\n");
+  Sleep(2000);
+}
+
+void AfterStreamingFixture::RestartFakeMicrophone() {
+  EXPECT_EQ(0, voe_file_->StartPlayingFileAsMicrophone(
+        channel_, fake_microphone_input_file_.c_str(), true, true));
+}
+
+void AfterStreamingFixture::SetUpLocalPlayback() {
+  EXPECT_EQ(0, voe_base_->SetSendDestination(channel_, 8000, kLoopbackIp));
+  EXPECT_EQ(0, voe_base_->SetLocalReceiver(0, 8000));
+
+  webrtc::CodecInst codec;
+  codec.channels = 1;
+  codec.pacsize = 160;
+  codec.plfreq = 8000;
+  codec.pltype = 0;
+  codec.rate = 64000;
+  strcpy(codec.plname, "PCMU");
+
+  voe_codec_->SetSendCodec(channel_, codec);
+}
+
+void AfterStreamingFixture::StartPlaying() {
+  EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  RestartFakeMicrophone();
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.h b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.h
new file mode 100644
index 0000000..c589a4c
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
+
+#include "after_initialization_fixture.h"
+#include "resource_manager.h"
+
+// This fixture will, in addition to the work done by its superclasses,
+// create a channel and start playing a file through the fake microphone
+// to simulate microphone input. The purpose is to make it convenient
+// to write tests that require microphone input.
+class AfterStreamingFixture : public AfterInitializationFixture {
+ public:
+  AfterStreamingFixture();
+  virtual ~AfterStreamingFixture();
+
+ protected:
+  int             channel_;
+  ResourceManager resource_manager_;
+  std::string     fake_microphone_input_file_;
+
+  // Shuts off the fake microphone for this test.
+  void SwitchToManualMicrophone();
+
+  // Restarts the fake microphone if it's been shut off earlier.
+  void RestartFakeMicrophone();
+
+ private:
+  void SetUpLocalPlayback();
+  void StartPlaying();
+};
+
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.cc b/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.cc
new file mode 100644
index 0000000..408141b
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "before_initialization_fixture.h"
+
+#include "voice_engine_defines.h"
+
+BeforeInitializationFixture::BeforeInitializationFixture()
+    : voice_engine_(webrtc::VoiceEngine::Create()) {
+  EXPECT_TRUE(voice_engine_ != NULL);
+
+  voe_base_ = webrtc::VoEBase::GetInterface(voice_engine_);
+  voe_codec_ = webrtc::VoECodec::GetInterface(voice_engine_);
+  voe_volume_control_ = webrtc::VoEVolumeControl::GetInterface(voice_engine_);
+  voe_dtmf_ = webrtc::VoEDtmf::GetInterface(voice_engine_);
+  voe_rtp_rtcp_ = webrtc::VoERTP_RTCP::GetInterface(voice_engine_);
+  voe_apm_ = webrtc::VoEAudioProcessing::GetInterface(voice_engine_);
+  voe_network_ = webrtc::VoENetwork::GetInterface(voice_engine_);
+  voe_file_ = webrtc::VoEFile::GetInterface(voice_engine_);
+  voe_vsync_ = webrtc::VoEVideoSync::GetInterface(voice_engine_);
+  voe_encrypt_ = webrtc::VoEEncryption::GetInterface(voice_engine_);
+  voe_hardware_ = webrtc::VoEHardware::GetInterface(voice_engine_);
+  voe_xmedia_ = webrtc::VoEExternalMedia::GetInterface(voice_engine_);
+  voe_call_report_ = webrtc::VoECallReport::GetInterface(voice_engine_);
+  voe_neteq_stats_ = webrtc::VoENetEqStats::GetInterface(voice_engine_);
+}
+
+BeforeInitializationFixture::~BeforeInitializationFixture() {
+  EXPECT_EQ(0, voe_base_->Release());
+  EXPECT_EQ(0, voe_codec_->Release());
+  EXPECT_EQ(0, voe_volume_control_->Release());
+  EXPECT_EQ(0, voe_dtmf_->Release());
+  EXPECT_EQ(0, voe_rtp_rtcp_->Release());
+  EXPECT_EQ(0, voe_apm_->Release());
+  EXPECT_EQ(0, voe_network_->Release());
+  EXPECT_EQ(0, voe_file_->Release());
+  EXPECT_EQ(0, voe_vsync_->Release());
+  EXPECT_EQ(0, voe_encrypt_->Release());
+  EXPECT_EQ(0, voe_hardware_->Release());
+  EXPECT_EQ(0, voe_xmedia_->Release());
+  EXPECT_EQ(0, voe_call_report_->Release());
+  EXPECT_EQ(0, voe_neteq_stats_->Release());
+
+  EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine_));
+}
+
+void BeforeInitializationFixture::Sleep(long milliseconds) {
+  // Implementation note: This method is used to reduce usage of the macro and
+  // avoid ugly errors in Eclipse (its parser can't deal with the sleep macro).
+  SLEEP(milliseconds);
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.h b/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.h
new file mode 100644
index 0000000..ef1636b
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/fixtures/before_initialization_fixture.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
+
+#include <assert.h>
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "voe_audio_processing.h"
+#include "voe_base.h"
+#include "voe_call_report.h"
+#include "voe_codec.h"
+#include "voe_dtmf.h"
+#include "voe_encryption.h"
+#include "voe_errors.h"
+#include "voe_external_media.h"
+#include "voe_file.h"
+#include "voe_hardware.h"
+#include "voe_neteq_stats.h"
+#include "voe_network.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_test_defines.h"
+#include "voe_video_sync.h"
+#include "voe_volume_control.h"
+
+// TODO(qhogpat): Remove these undefs once the clashing macros are gone.
+#undef TEST
+#undef ASSERT_TRUE
+#undef ASSERT_FALSE
+#include "gtest/gtest.h"
+#include "gmock/gmock.h"
+
+// This convenient fixture sets up all voice engine interfaces automatically for
+// use by testing subclasses. It allocates each interface and releases it once
+// which means that if a tests allocates additional interfaces from the voice
+// engine and forgets to release it, this test will fail in the destructor.
+// It will not call any init methods.
+//
+// Implementation note:
+// The interface fetching is done in the constructor and not SetUp() since
+// this relieves our subclasses from calling SetUp in the superclass if they
+// choose to override SetUp() themselves. This is fine as googletest will
+// construct new test objects for each method.
+class BeforeInitializationFixture : public testing::Test {
+ public:
+  BeforeInitializationFixture();
+  virtual ~BeforeInitializationFixture();
+
+ protected:
+  // Use this sleep function to sleep in test (avoid sleep macro).
+  void Sleep(long milliseconds);
+
+  webrtc::VoiceEngine*        voice_engine_;
+  webrtc::VoEBase*            voe_base_;
+  webrtc::VoECodec*           voe_codec_;
+  webrtc::VoEVolumeControl*   voe_volume_control_;
+  webrtc::VoEDtmf*            voe_dtmf_;
+  webrtc::VoERTP_RTCP*        voe_rtp_rtcp_;
+  webrtc::VoEAudioProcessing* voe_apm_;
+  webrtc::VoENetwork*         voe_network_;
+  webrtc::VoEFile*            voe_file_;
+  webrtc::VoEVideoSync*       voe_vsync_;
+  webrtc::VoEEncryption*      voe_encrypt_;
+  webrtc::VoEHardware*        voe_hardware_;
+  webrtc::VoEExternalMedia*   voe_xmedia_;
+  webrtc::VoECallReport*      voe_call_report_;
+  webrtc::VoENetEqStats*      voe_neteq_stats_;
+};
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
diff --git a/trunk/src/voice_engine/main/test/auto_test/resource_manager.cc b/trunk/src/voice_engine/main/test/auto_test/resource_manager.cc
new file mode 100644
index 0000000..65669ed
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/resource_manager.cc
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "resource_manager.h"
+
+#include "testsupport/fileutils.h"
+
+ResourceManager::ResourceManager() {
+  std::string filename = "audio_long16.pcm";
+#if defined(WEBRTC_ANDROID)
+  long_audio_file_path_ = "/sdcard/" + filename;
+#else
+  std::string resource_path = webrtc::test::ProjectRootPath();
+  if (resource_path == webrtc::test::kCannotFindProjectRootDir) {
+    long_audio_file_path_ = "";
+  } else {
+    long_audio_file_path_ =
+        resource_path + "test/data/voice_engine/" + filename;
+  }
+#endif
+}
+
diff --git a/trunk/src/voice_engine/main/test/auto_test/resource_manager.h b/trunk/src/voice_engine/main/test/auto_test/resource_manager.h
new file mode 100644
index 0000000..1bb91cf
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/resource_manager.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
+
+#include <string>
+
+class ResourceManager {
+ public:
+  ResourceManager();
+
+  // Returns the full path to a long audio file.
+  // Returns the empty string on failure.
+  const std::string& long_audio_file_path() const {
+    return long_audio_file_path_;
+  }
+
+ private:
+  std::string long_audio_file_path_;
+};
+
+#endif // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/audio_processing_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/audio_processing_test.cc
new file mode 100644
index 0000000..bfe7f49
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/audio_processing_test.cc
@@ -0,0 +1,364 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voe_standard_test.h"
+
+class AudioProcessingTest : public AfterStreamingFixture {
+ protected:
+  // Note: Be careful with this one, it is used in the
+  // Android / iPhone part too.
+  void TryEnablingAgcWithMode(webrtc::AgcModes agc_mode_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, agc_mode_to_set));
+
+    bool agc_enabled = false;
+    webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+    EXPECT_TRUE(agc_enabled);
+    EXPECT_EQ(agc_mode_to_set, agc_mode);
+  }
+
+  void TryEnablingRxAgcWithMode(webrtc::AgcModes agc_mode_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetRxAgcStatus(channel_, true, agc_mode_to_set));
+
+    bool rx_agc_enabled = false;
+    webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
+    EXPECT_TRUE(rx_agc_enabled);
+    EXPECT_EQ(agc_mode_to_set, agc_mode);
+  }
+
+  // EC modes can map to other EC modes, so we have a separate parameter
+  // for what we expect the EC mode to be set to.
+  void TryEnablingEcWithMode(webrtc::EcModes ec_mode_to_set,
+                             webrtc::EcModes expected_mode) {
+    EXPECT_EQ(0, voe_apm_->SetEcStatus(true, ec_mode_to_set));
+
+    bool ec_enabled = true;
+    webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+
+    EXPECT_EQ(expected_mode, ec_mode);
+  }
+
+  // Here, the CNG mode will be expected to be on or off depending on the mode.
+  void TryEnablingAecmWithMode(webrtc::AecmModes aecm_mode_to_set,
+                               bool cng_enabled_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetAecmMode(aecm_mode_to_set, cng_enabled_to_set));
+
+    bool cng_enabled = false;
+    webrtc::AecmModes aecm_mode = webrtc::kAecmEarpiece;
+
+    voe_apm_->GetAecmMode(aecm_mode, cng_enabled);
+
+    EXPECT_EQ(cng_enabled_to_set, cng_enabled);
+    EXPECT_EQ(aecm_mode_to_set, aecm_mode);
+  }
+
+  void TryEnablingNsWithMode(webrtc::NsModes ns_mode_to_set,
+                             webrtc::NsModes expected_ns_mode) {
+    EXPECT_EQ(0, voe_apm_->SetNsStatus(true, ns_mode_to_set));
+
+    bool ns_status = true;
+    webrtc::NsModes ns_mode = webrtc::kNsDefault;
+    EXPECT_EQ(0, voe_apm_->GetNsStatus(ns_status, ns_mode));
+
+    EXPECT_TRUE(ns_status);
+    EXPECT_EQ(expected_ns_mode, ns_mode);
+  }
+
+  void TryEnablingRxNsWithMode(webrtc::NsModes ns_mode_to_set,
+                               webrtc::NsModes expected_ns_mode) {
+    EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true, ns_mode_to_set));
+
+    bool ns_status = true;
+    webrtc::NsModes ns_mode = webrtc::kNsDefault;
+    EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
+
+    EXPECT_TRUE(ns_status);
+    EXPECT_EQ(expected_ns_mode, ns_mode);
+  }
+
+  void TryDetectingSilence() {
+    // Here, speech is running. Shut down speech.
+    EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+    EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, true));
+    EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+
+    // We should detect the silence after a short time.
+    Sleep(50);
+    for (int i = 0; i < 25; i++) {
+      EXPECT_EQ(0, voe_apm_->VoiceActivityIndicator(channel_));
+      Sleep(10);
+    }
+  }
+
+  void TryDetectingSpeechAfterSilence() {
+    // Re-enable speech.
+    RestartFakeMicrophone();
+    EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, false));
+    EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, false));
+
+    // We should detect the speech after a short time.
+    for (int i = 0; i < 50; i++) {
+      if (voe_apm_->VoiceActivityIndicator(channel_) == 1) {
+        return;
+      }
+      Sleep(10);
+    }
+
+    ADD_FAILURE() << "Failed to detect speech within 500 ms.";
+  }
+};
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+
+TEST_F(AudioProcessingTest, AgcIsOnByDefault) {
+  bool agc_enabled = false;
+  webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
+
+  EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+  EXPECT_TRUE(agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveAnalog, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanEnableAgcWithAllModes) {
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveDigital);
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveAnalog);
+  TryEnablingAgcWithMode(webrtc::kAgcFixedDigital);
+}
+
+TEST_F(AudioProcessingTest, EcIsDisabledAndAecIsDefaultEcMode) {
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAec, ec_mode);
+}
+
+TEST_F(AudioProcessingTest, EnablingEcAecShouldEnableEcAec) {
+  TryEnablingEcWithMode(webrtc::kEcAec, webrtc::kEcAec);
+}
+
+TEST_F(AudioProcessingTest, EnablingEcConferenceShouldEnableEcAec) {
+  TryEnablingEcWithMode(webrtc::kEcConference, webrtc::kEcAec);
+}
+
+TEST_F(AudioProcessingTest, EcMetricsAreOffByDefault) {
+  bool enabled = true;
+  EXPECT_EQ(0, voe_apm_->GetEcMetricsStatus(enabled));
+  EXPECT_FALSE(enabled);
+}
+
+TEST_F(AudioProcessingTest, ManualTestEcMetrics) {
+  SwitchToManualMicrophone();
+
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+
+  // Must enable AEC to get valid echo metrics.
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(true, webrtc::kEcAec));
+
+  TEST_LOG("Speak into microphone and check metrics for 10 seconds...\n");
+  int erl, erle, rerl, a_nlp;
+  int delay_median = 0;
+  int delay_std = 0;
+
+  for (int i = 0; i < 5; i++) {
+    Sleep(2000);
+    EXPECT_EQ(0, voe_apm_->GetEchoMetrics(erl, erle, rerl, a_nlp));
+    EXPECT_EQ(0, voe_apm_->GetEcDelayMetrics(delay_median, delay_std));
+    TEST_LOG("    Echo  : ERL=%5d, ERLE=%5d, RERL=%5d, A_NLP=%5d [dB], "
+        " delay median=%3d, delay std=%3d [ms]\n", erl, erle, rerl, a_nlp,
+        delay_median, delay_std);
+  }
+
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(false));
+}
+
+// TODO(phoglund): Reenable below test when it's no longer flaky.
+TEST_F(AudioProcessingTest, DISABLED_TestVoiceActivityDetectionWithObserver) {
+  voetest::RxCallback rx_callback;
+  EXPECT_EQ(0, voe_apm_->RegisterRxVadObserver(channel_, rx_callback));
+
+  // The extra sleeps are to allow decisions some time to propagate to the
+  // observer.
+  TryDetectingSilence();
+  Sleep(100);
+
+  EXPECT_EQ(0, rx_callback._vadDecision);
+
+  TryDetectingSpeechAfterSilence();
+  Sleep(100);
+
+  EXPECT_EQ(1, rx_callback._vadDecision);
+
+  EXPECT_EQ(0, voe_apm_->DeRegisterRxVadObserver(channel_));
+}
+
+#endif   // !MAC_IPHONE && !WEBRTC_ANDROID
+
+TEST_F(AudioProcessingTest, EnablingEcAecmShouldEnableEcAecm) {
+  // This one apparently applies to Android and iPhone as well.
+  TryEnablingEcWithMode(webrtc::kEcAecm, webrtc::kEcAecm);
+}
+
+TEST_F(AudioProcessingTest, EcAecmModeIsEnabledAndSpeakerphoneByDefault) {
+  bool cng_enabled = false;
+  webrtc::AecmModes aecm_mode = webrtc::kAecmEarpiece;
+
+  voe_apm_->GetAecmMode(aecm_mode, cng_enabled);
+
+  EXPECT_TRUE(cng_enabled);
+  EXPECT_EQ(webrtc::kAecmSpeakerphone, aecm_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetAecmMode) {
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(true, webrtc::kEcAecm));
+
+  // Try some AECM mode - CNG enabled combinations.
+  TryEnablingAecmWithMode(webrtc::kAecmEarpiece, true);
+  TryEnablingAecmWithMode(webrtc::kAecmEarpiece, false);
+  TryEnablingAecmWithMode(webrtc::kAecmLoudEarpiece, true);
+  TryEnablingAecmWithMode(webrtc::kAecmLoudSpeakerphone, false);
+  TryEnablingAecmWithMode(webrtc::kAecmQuietEarpieceOrHeadset, true);
+  TryEnablingAecmWithMode(webrtc::kAecmSpeakerphone, false);
+}
+
+TEST_F(AudioProcessingTest, RxAgcShouldBeOffByDefault) {
+  bool rx_agc_enabled = true;
+  webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
+  EXPECT_FALSE(rx_agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveDigital, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanTurnOnDigitalRxAcg) {
+  TryEnablingRxAgcWithMode(webrtc::kAgcAdaptiveDigital);
+  TryEnablingRxAgcWithMode(webrtc::kAgcFixedDigital);
+}
+
+TEST_F(AudioProcessingTest, CannotTurnOnAdaptiveAnalogRxAgc) {
+  EXPECT_EQ(-1, voe_apm_->SetRxAgcStatus(
+      channel_, true, webrtc::kAgcAdaptiveAnalog));
+}
+
+TEST_F(AudioProcessingTest, NsIsOffWithModerateSuppressionByDefault) {
+  bool ns_status = true;
+  webrtc::NsModes ns_mode = webrtc::kNsDefault;
+  EXPECT_EQ(0, voe_apm_->GetNsStatus(ns_status, ns_mode));
+
+  EXPECT_FALSE(ns_status);
+  EXPECT_EQ(webrtc::kNsModerateSuppression, ns_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetNsMode) {
+  // Concrete suppression values map to themselves.
+  TryEnablingNsWithMode(webrtc::kNsHighSuppression,
+                        webrtc::kNsHighSuppression);
+  TryEnablingNsWithMode(webrtc::kNsLowSuppression,
+                        webrtc::kNsLowSuppression);
+  TryEnablingNsWithMode(webrtc::kNsModerateSuppression,
+                        webrtc::kNsModerateSuppression);
+  TryEnablingNsWithMode(webrtc::kNsVeryHighSuppression,
+                        webrtc::kNsVeryHighSuppression);
+
+  // Conference and Default map to concrete values.
+  TryEnablingNsWithMode(webrtc::kNsConference,
+                        webrtc::kNsHighSuppression);
+  TryEnablingNsWithMode(webrtc::kNsDefault,
+                        webrtc::kNsModerateSuppression);
+}
+
+TEST_F(AudioProcessingTest, RxNsIsOffWithModerateSuppressionByDefault) {
+  bool ns_status = true;
+  webrtc::NsModes ns_mode = webrtc::kNsDefault;
+  EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
+
+  EXPECT_FALSE(ns_status);
+  EXPECT_EQ(webrtc::kNsModerateSuppression, ns_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetRxNsMode) {
+  EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true));
+
+  // See comments on the regular NS test above.
+  TryEnablingRxNsWithMode(webrtc::kNsHighSuppression,
+                          webrtc::kNsHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsLowSuppression,
+                          webrtc::kNsLowSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsModerateSuppression,
+                          webrtc::kNsModerateSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsVeryHighSuppression,
+                          webrtc::kNsVeryHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsConference,
+                          webrtc::kNsHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsDefault,
+                          webrtc::kNsModerateSuppression);
+}
+
+TEST_F(AudioProcessingTest, VadIsDisabledByDefault) {
+  bool vad_enabled;
+  bool disabled_dtx;
+  webrtc::VadModes vad_mode;
+
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, disabled_dtx));
+
+  EXPECT_FALSE(vad_enabled);
+}
+
+TEST_F(AudioProcessingTest, VoiceActivityIndicatorReturns1WithSpeechOn) {
+  // This sleep is necessary since the voice detection algorithm needs some
+  // time to detect the speech from the fake microphone.
+  Sleep(500);
+  EXPECT_EQ(1, voe_apm_->VoiceActivityIndicator(channel_));
+}
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+
+TEST_F(AudioProcessingTest, AgcIsOffByDefaultAndDigital) {
+  bool agc_enabled = true;
+  webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
+
+  EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+  EXPECT_FALSE(agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveDigital, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanEnableAgcInAdaptiveDigitalMode) {
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveDigital);
+}
+
+TEST_F(AudioProcessingTest, AgcIsPossibleExceptInAdaptiveAnalogMode) {
+  EXPECT_EQ(-1, voe_apm_->SetAgcStatus(true, webrtc::kAgcAdaptiveAnalog));
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, webrtc::kAgcFixedDigital));
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, webrtc::kAgcAdaptiveDigital));
+}
+
+TEST_F(AudioProcessingTest, EcIsDisabledAndAecmIsDefaultEcMode) {
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAecm, ec_mode);
+}
+
+TEST_F(AudioProcessingTest, TestVoiceActivityDetection) {
+  TryDetectingSilence();
+  TryDetectingSpeechAfterSilence();
+}
+
+#endif  // MAC_IPHONE || WEBRTC_ANDROID
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/call_report_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/call_report_test.cc
new file mode 100644
index 0000000..c96b14d
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/call_report_test.cc
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "testsupport/fileutils.h"
+
+class CallReportTest : public AfterStreamingFixture {
+};
+
+TEST_F(CallReportTest, ResetCallReportStatisticsFailsForBadInput) {
+  EXPECT_EQ(-1, voe_call_report_->ResetCallReportStatistics(-2));
+  EXPECT_EQ(-1, voe_call_report_->ResetCallReportStatistics(1));
+}
+
+TEST_F(CallReportTest, ResetCallReportStatisticsSucceedsWithCorrectInput) {
+  EXPECT_EQ(0, voe_call_report_->ResetCallReportStatistics(channel_));
+  EXPECT_EQ(0, voe_call_report_->ResetCallReportStatistics(-1));
+}
+
+TEST_F(CallReportTest, EchoMetricSummarySucceeds) {
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+  Sleep(1000);
+
+  webrtc::EchoStatistics echo_statistics;
+  EXPECT_EQ(0, voe_call_report_->GetEchoMetricSummary(echo_statistics));
+}
+
+TEST_F(CallReportTest, GetRoundTripTimeSummaryReturnsAllMinusOnesIfRtcpIsOff) {
+  voe_rtp_rtcp_->SetRTCPStatus(channel_, false);
+
+  webrtc::StatVal delays;
+  EXPECT_EQ(0, voe_call_report_->GetRoundTripTimeSummary(channel_, delays));
+  EXPECT_EQ(-1, delays.average);
+  EXPECT_EQ(-1, delays.min);
+  EXPECT_EQ(-1, delays.max);
+}
+
+TEST_F(CallReportTest, GetRoundTripTimesReturnsValuesIfRtcpIsOn) {
+  voe_rtp_rtcp_->SetRTCPStatus(channel_, true);
+  Sleep(1000);
+
+  webrtc::StatVal delays;
+  EXPECT_EQ(0, voe_call_report_->GetRoundTripTimeSummary(channel_, delays));
+  EXPECT_NE(-1, delays.average);
+  EXPECT_NE(-1, delays.min);
+  EXPECT_NE(-1, delays.max);
+}
+
+TEST_F(CallReportTest, DeadOrAliveSummaryFailsIfDeadOrAliveTrackingNotActive) {
+  int count_the_dead;
+  int count_the_living;
+  EXPECT_EQ(-1, voe_call_report_->GetDeadOrAliveSummary(channel_,
+                                                        count_the_dead,
+                                                        count_the_living));
+}
+
+TEST_F(CallReportTest,
+       DeadOrAliveSummarySucceedsIfDeadOrAliveTrackingIsActive) {
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  Sleep(1200);
+
+  int count_the_dead;
+  int count_the_living;
+  EXPECT_EQ(0, voe_call_report_->GetDeadOrAliveSummary(channel_,
+                                                       count_the_dead,
+                                                       count_the_living));
+
+  EXPECT_GE(count_the_dead, 0);
+  EXPECT_GE(count_the_living, 0);
+}
+
+TEST_F(CallReportTest, WriteReportToFileFailsOnBadInput) {
+  EXPECT_EQ(-1, voe_call_report_->WriteReportToFile(NULL));
+}
+
+TEST_F(CallReportTest, WriteReportToFileSucceedsWithCorrectFilename) {
+  std::string output_path = webrtc::test::OutputPath();
+  std::string report_filename = output_path + "call_report.txt";
+
+  EXPECT_EQ(0, voe_call_report_->WriteReportToFile(report_filename.c_str()));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/codec_before_streaming_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/codec_before_streaming_test.cc
new file mode 100644
index 0000000..6d902ef
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/codec_before_streaming_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class CodecBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp() {
+    memset(&codec_instance_, 0, sizeof(codec_instance_));
+    codec_instance_.channels = 1;
+    codec_instance_.plfreq = 16000;
+    codec_instance_.pacsize = 480;
+
+    channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(channel_);
+  }
+
+  webrtc::CodecInst codec_instance_;
+  int channel_;
+};
+
+// TODO(phoglund): add test which verifies default pltypes for various codecs.
+
+TEST_F(CodecBeforeStreamingTest, GetRecPayloadTypeFailsForInvalidCodecName) {
+  strcpy(codec_instance_.plname, "SomeInvalidCodecName");
+
+  // Should fail since the codec name is invalid.
+  EXPECT_NE(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+}
+
+TEST_F(CodecBeforeStreamingTest, GetRecPayloadTypeRecognizesISAC) {
+  strcpy(codec_instance_.plname, "iSAC");
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  strcpy(codec_instance_.plname, "ISAC");
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+}
+
+TEST_F(CodecBeforeStreamingTest, SetRecPayloadTypeCanChangeISACPayloadType) {
+  strcpy(codec_instance_.plname, "ISAC");
+
+  codec_instance_.pltype = 123;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(123, codec_instance_.pltype);
+
+  codec_instance_.pltype = 104;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(104, codec_instance_.pltype);
+}
+
+TEST_F(CodecBeforeStreamingTest, SetRecPayloadTypeCanChangeILBCPayloadType) {
+  strcpy(codec_instance_.plname, "iLBC");
+  codec_instance_.plfreq = 8000;
+  codec_instance_.pacsize = 240;
+  codec_instance_.rate = 13300;
+
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  int original_pltype = codec_instance_.pltype;
+  codec_instance_.pltype = 123;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(123, codec_instance_.pltype);
+
+  codec_instance_.pltype = original_pltype;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(original_pltype, codec_instance_.pltype);
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/codec_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/codec_test.cc
new file mode 100644
index 0000000..dc5558b
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/codec_test.cc
@@ -0,0 +1,231 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voe_test_defines.h"
+#include "voice_engine_defines.h"
+
+class CodecTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    memset(&codec_instance_, 0, sizeof(codec_instance_));
+  }
+
+  void SetArbitrarySendCodec() {
+    // Just grab the first codec.
+    EXPECT_EQ(0, voe_codec_->GetCodec(0, codec_instance_));
+    EXPECT_EQ(0, voe_codec_->SetSendCodec(channel_, codec_instance_));
+  }
+
+  webrtc::CodecInst codec_instance_;
+};
+
+static void SetRateIfILBC(webrtc::CodecInst* codec_instance, int packet_size) {
+  if (!_stricmp(codec_instance->plname, "ilbc")) {
+    if (packet_size == 160 || packet_size == 320) {
+      codec_instance->rate = 15200;
+    } else {
+      codec_instance->rate = 13300;
+    }
+  }
+}
+
+static bool IsNotViableSendCodec(const char* codec_name) {
+  return !_stricmp(codec_name, "CN") ||
+         !_stricmp(codec_name, "telephone-event") ||
+         !_stricmp(codec_name, "red");
+}
+
+TEST_F(CodecTest, PcmuIsDefaultCodecAndHasTheRightValues) {
+  EXPECT_EQ(0, voe_codec_->GetSendCodec(channel_, codec_instance_));
+  EXPECT_EQ(1, codec_instance_.channels);
+  EXPECT_EQ(160, codec_instance_.pacsize);
+  EXPECT_EQ(8000, codec_instance_.plfreq);
+  EXPECT_EQ(0, codec_instance_.pltype);
+  EXPECT_EQ(64000, codec_instance_.rate);
+  EXPECT_STRCASEEQ("PCMU", codec_instance_.plname);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionIsOffByDefault) {
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_FALSE(vad_enabled);
+  EXPECT_TRUE(dtx_disabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionCanBeEnabled) {
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_TRUE(vad_enabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+  EXPECT_FALSE(dtx_disabled);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionTypeSettingsCanBeChanged) {
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveLow, false));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveLow);
+  EXPECT_FALSE(dtx_disabled);
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveMid, false));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveMid);
+  EXPECT_FALSE(dtx_disabled);
+
+  // The fourth argument is the DTX disable flag.
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveHigh, true));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveHigh);
+  EXPECT_TRUE(dtx_disabled);
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadConventional, true));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadConventional);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionCanBeTurnedOff) {
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+
+  // VAD is always on when DTX is on, so we need to turn off DTX too.
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, false, webrtc::kVadConventional, true));
+
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_FALSE(vad_enabled);
+  EXPECT_TRUE(dtx_disabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+}
+
+// Tests requiring manual verification (although they do have some value
+// without the manual verification):
+TEST_F(CodecTest, ManualExtendedISACApisBehaveAsExpected) {
+   strcpy(codec_instance_.plname, "isac");
+   codec_instance_.pltype = 103;
+   codec_instance_.plfreq = 16000;
+   codec_instance_.channels = 1;
+   // -1 here means "adaptive rate".
+   codec_instance_.rate = -1;
+   codec_instance_.pacsize = 480;
+
+   EXPECT_EQ(0, voe_codec_->SetSendCodec(channel_, codec_instance_));
+
+   EXPECT_NE(0, voe_codec_->SetISACInitTargetRate(channel_, 5000)) <<
+       "iSAC should reject rate 5000.";
+   EXPECT_NE(0, voe_codec_->SetISACInitTargetRate(channel_, 33000)) <<
+       "iSAC should reject rate 33000.";
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 32000));
+
+   TEST_LOG("Ensure that the sound is good (iSAC, target = 32kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000));
+   TEST_LOG("Ensure that the sound is good (iSAC, target = 10kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000, true));
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000, false));
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 0));
+   TEST_LOG("Ensure that the sound is good (iSAC, target = default)...\n");
+   Sleep(3000);
+
+   TEST_LOG("  Testing SetISACMaxPayloadSize:\n");
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_NE(0, voe_codec_->SetISACMaxPayloadSize(channel_, 50));
+   EXPECT_NE(0, voe_codec_->SetISACMaxPayloadSize(channel_, 650));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxPayloadSize(channel_, 120));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+   TEST_LOG("Ensure that the sound is good (iSAC, "
+            "max payload size = 100 bytes)...\n");
+   Sleep(3000);
+
+   TEST_LOG("  Testing SetISACMaxRate:\n");
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxPayloadSize(channel_, 400));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_NE(0, voe_codec_->SetISACMaxRate(channel_, 31900));
+   EXPECT_NE(0, voe_codec_->SetISACMaxRate(channel_, 53500));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxRate(channel_, 32000));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+   TEST_LOG("Ensure that the sound is good (iSAC, max rate = 32 kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+
+   // Restore "no limitation". No, no limit, we reach for the sky.
+   EXPECT_EQ(0, voe_codec_->SetISACMaxRate(channel_, 53400));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+}
+
+TEST_F(CodecTest, ManualVerifySendCodecsForAllPacketSizes) {
+  for (int i = 0; i < voe_codec_->NumOfCodecs(); ++i) {
+    voe_codec_->GetCodec(i, codec_instance_);
+    if (IsNotViableSendCodec(codec_instance_.plname)) {
+      TEST_LOG("Skipping %s.\n", codec_instance_.plname);
+      continue;
+    }
+    EXPECT_NE(-1, codec_instance_.pltype) <<
+        "The codec database should suggest a payload type.";
+
+    // Test with default packet size:
+    TEST_LOG("%s (pt=%d): default packet size(%d), accepts sizes ",
+             codec_instance_.plname, codec_instance_.pltype,
+             codec_instance_.pacsize);
+    voe_codec_->SetSendCodec(channel_, codec_instance_);
+    Sleep(CODEC_TEST_TIME);
+
+    // Now test other reasonable packet sizes:
+    bool at_least_one_succeeded = false;
+    for (int packet_size = 80; packet_size < 1000; packet_size += 80) {
+      SetRateIfILBC(&codec_instance_, packet_size);
+      codec_instance_.pacsize = packet_size;
+
+      if (voe_codec_->SetSendCodec(channel_, codec_instance_) != -1) {
+        // Note that it's fine for SetSendCodec to fail - what packet sizes
+        // it accepts depends on the codec. It should accept one at minimum.
+        TEST_LOG("%d ", packet_size);
+        TEST_LOG_FLUSH;
+        at_least_one_succeeded = true;
+        Sleep(CODEC_TEST_TIME);
+      }
+    }
+    TEST_LOG("\n");
+    EXPECT_TRUE(at_least_one_succeeded);
+  }
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/dtmf_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/dtmf_test.cc
new file mode 100644
index 0000000..6aa4625
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/dtmf_test.cc
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voice_engine_defines.h"
+
+class DtmfTest : public AfterStreamingFixture {
+ protected:
+  void RunSixteenDtmfEvents(bool out_of_band) {
+    TEST_LOG("Sending telephone events:\n");
+    EXPECT_EQ(0, voe_dtmf_->SetDtmfFeedbackStatus(false));
+
+    for (int i = 0; i < 16; i++) {
+      TEST_LOG("%d ", i);
+      TEST_LOG_FLUSH;
+      EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(
+          channel_, i, out_of_band, 160, 10));
+      Sleep(500);
+    }
+    TEST_LOG("\n");
+  }
+};
+
+TEST_F(DtmfTest, DtmfFeedbackIsEnabledByDefaultButNotDirectFeedback) {
+  bool dtmf_feedback = false;
+  bool dtmf_direct_feedback = false;
+
+  EXPECT_EQ(0, voe_dtmf_->GetDtmfFeedbackStatus(dtmf_feedback,
+                                                dtmf_direct_feedback));
+
+  EXPECT_TRUE(dtmf_feedback);
+  EXPECT_FALSE(dtmf_direct_feedback);
+}
+
+TEST_F(DtmfTest, ManualSuccessfullySendsInBandTelephoneEvents) {
+  RunSixteenDtmfEvents(false);
+}
+
+TEST_F(DtmfTest, ManualSuccessfullySendsOutOfBandTelephoneEvents) {
+  RunSixteenDtmfEvents(true);
+}
+
+TEST_F(DtmfTest, TestTwoNonDtmfEvents) {
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 32, true));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 110, true));
+}
+
+#ifndef MAC_IPHONE
+TEST_F(DtmfTest, ManualCanDisableDtmfPlayoutExceptOnIphone) {
+  TEST_LOG("Disabling DTMF playout (no tone should be heard) \n");
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfPlayoutStatus(channel_, false));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 0, true));
+  Sleep(500);
+
+  TEST_LOG("Enabling DTMF playout (tone should be heard) \n");
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfPlayoutStatus(channel_, true));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 0, true));
+  Sleep(500);
+}
+#endif
+
+// This test modifies the DTMF payload type from the default 106 to 88
+// and then runs through 16 DTMF out.of-band events.
+TEST_F(DtmfTest, ManualCanChangeDtmfPayloadType) {
+  webrtc::CodecInst codec_instance;
+
+  TEST_LOG("Changing DTMF payload type.\n");
+
+  // Start by modifying the receiving side.
+  for (int i = 0; i < voe_codec_->NumOfCodecs(); i++) {
+    EXPECT_EQ(0, voe_codec_->GetCodec(i, codec_instance));
+    if (!_stricmp("telephone-event", codec_instance.plname)) {
+      codec_instance.pltype = 88;  // Use 88 instead of default 106.
+      EXPECT_EQ(0, voe_base_->StopSend(channel_));
+      EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+      EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+      EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance));
+      EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+      EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+      EXPECT_EQ(0, voe_base_->StartSend(channel_));
+      break;
+    }
+  }
+
+  Sleep(500);
+
+  // Next, we must modify the sending side as well.
+  EXPECT_EQ(0, voe_dtmf_->SetSendTelephoneEventPayloadType(
+      channel_, codec_instance.pltype));
+
+  RunSixteenDtmfEvents(true);
+
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfFeedbackStatus(true, false));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/file_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/file_test.cc
new file mode 100644
index 0000000..63b1600
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/file_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "testsupport/fileutils.h"
+
+class FileTest : public AfterStreamingFixture {
+ protected:
+  // Creates the string åäö.pcm.
+  std::string CreateTrickyFilenameInUtf8() {
+    char filename[16] = { (char)0xc3, (char)0xa5,
+                          (char)0xc3, (char)0xa4,
+                          (char)0xc3, (char)0xb6,
+                          static_cast<char>(0) };
+    return std::string(filename) + ".pcm";
+  }
+};
+
+TEST_F(FileTest, ManualRecordToFileForThreeSecondsAndPlayback) {
+  SwitchToManualMicrophone();
+
+  std::string recording_filename =
+      webrtc::test::OutputPath() + CreateTrickyFilenameInUtf8();
+
+  TEST_LOG("Recording to %s for 3 seconds.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartRecordingMicrophone(recording_filename.c_str()));
+  Sleep(3000);
+  EXPECT_EQ(0, voe_file_->StopRecordingMicrophone());
+
+  TEST_LOG("Playing back %s.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, recording_filename.c_str()));
+
+  // Play the file to the user and ensure the is-playing-locally
+  // and scaling methods also work. The clip is 3 seconds long.
+  Sleep(250);
+  EXPECT_EQ(1, voe_file_->IsPlayingFileLocally(channel_));
+  Sleep(1500);
+  TEST_LOG("Decreasing level by 50%%.\n");
+  EXPECT_EQ(0, voe_file_->ScaleLocalFilePlayout(channel_, 0.5f));
+  Sleep(1500);
+  EXPECT_EQ(0, voe_file_->IsPlayingFileLocally(channel_));
+}
+
+TEST_F(FileTest, ManualRecordPlayoutToWavFileForThreeSecondsAndPlayback) {
+  webrtc::CodecInst send_codec;
+  voe_codec_->GetSendCodec(channel_, send_codec);
+
+  std::string recording_filename =
+      webrtc::test::OutputPath() + "playout.wav";
+
+  TEST_LOG("Recording playout to %s.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartRecordingPlayout(
+      channel_, recording_filename.c_str(), &send_codec));
+  Sleep(3000);
+  EXPECT_EQ(0, voe_file_->StopRecordingPlayout(channel_));
+
+  TEST_LOG("Playing back the recording in looping mode.\n");
+  EXPECT_EQ(0, voe_file_->StartPlayingFileAsMicrophone(
+      channel_, recording_filename.c_str(), true, false,
+      webrtc::kFileFormatWavFile));
+
+  Sleep(2000);
+  EXPECT_EQ(1, voe_file_->IsPlayingFileAsMicrophone(channel_));
+  Sleep(2000);
+  // We should still be playing since we're looping.
+  EXPECT_EQ(1, voe_file_->IsPlayingFileAsMicrophone(channel_));
+
+  // Try scaling as well.
+  TEST_LOG("Decreasing level by 50%%.\n");
+  EXPECT_EQ(0, voe_file_->ScaleFileAsMicrophonePlayout(channel_, 0.5f));
+  Sleep(1000);
+
+  EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_initializing_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_initializing_test.cc
new file mode 100644
index 0000000..540614e
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_initializing_test.cc
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "before_initialization_fixture.h"
+
+using namespace webrtc;
+
+class HardwareBeforeInitializingTest : public BeforeInitializationFixture {
+};
+
+TEST_F(HardwareBeforeInitializingTest,
+       SetAudioDeviceLayerAcceptsPlatformDefaultBeforeInitializing) {
+  AudioLayers wanted_layer = kAudioPlatformDefault;
+  AudioLayers given_layer;
+  EXPECT_EQ(0, voe_hardware_->SetAudioDeviceLayer(wanted_layer));
+  EXPECT_EQ(0, voe_hardware_->GetAudioDeviceLayer(given_layer));
+  EXPECT_EQ(wanted_layer, given_layer) <<
+      "These should be the same before initializing.";
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_streaming_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_streaming_test.cc
new file mode 100644
index 0000000..6e56347
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_before_streaming_test.cc
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstring>
+
+#include "after_initialization_fixture.h"
+
+using namespace webrtc;
+
+static const char* kNoDevicesErrorMessage =
+    "Either you have no recording / playout device "
+    "on your system, or the method failed.";
+
+class HardwareBeforeStreamingTest : public AfterInitializationFixture {
+};
+
+// Tests that apply to both mobile and desktop:
+
+TEST_F(HardwareBeforeStreamingTest,
+       SetAudioDeviceLayerFailsSinceTheVoiceEngineHasBeenInitialized) {
+  EXPECT_NE(0, voe_hardware_->SetAudioDeviceLayer(kAudioPlatformDefault));
+  EXPECT_EQ(VE_ALREADY_INITED, voe_base_->LastError());
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       GetCPULoadSucceedsOnWindowsButNotOtherPlatforms) {
+  int load_percent;
+#if defined(_WIN32)
+  EXPECT_EQ(0, voe_hardware_->GetCPULoad(load_percent));
+#else
+  EXPECT_NE(0, voe_hardware_->GetCPULoad(load_percent)) <<
+      "Should fail on non-Windows platforms.";
+#endif
+}
+
+// Tests that only apply to mobile:
+
+#ifdef MAC_IPHONE
+TEST_F(HardwareBeforeStreamingTest, ResetsAudioDeviceOnIphone) {
+  EXPECT_EQ(0, voe_hardware_->ResetAudioDevice());
+}
+#endif
+
+// Tests that only apply to desktop:
+#if !defined(MAC_IPHONE) & !defined(WEBRTC_ANDROID)
+
+TEST_F(HardwareBeforeStreamingTest, GetSystemCpuLoadSucceeds) {
+#ifdef _WIN32
+  // This method needs some warm-up time on Windows. We sleep a good amount
+  // of time instead of retrying to make the test simpler.
+  Sleep(2000);
+#endif
+
+  int load_percent;
+  EXPECT_EQ(0, voe_hardware_->GetSystemCPULoad(load_percent));
+}
+
+TEST_F(HardwareBeforeStreamingTest, GetPlayoutDeviceStatusReturnsTrue) {
+  bool play_available = false;
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceStatus(play_available));
+  ASSERT_TRUE(play_available) <<
+      "Ensures that the method works and that hardware is in the right state.";
+}
+
+TEST_F(HardwareBeforeStreamingTest, GetRecordingDeviceStatusReturnsTrue) {
+  bool recording_available = false;
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceStatus(recording_available));
+  EXPECT_TRUE(recording_available) <<
+      "Ensures that the method works and that hardware is in the right state.";
+}
+
+  // Win, Mac and Linux sound device tests.
+TEST_F(HardwareBeforeStreamingTest,
+       GetRecordingDeviceNameRetrievesDeviceNames) {
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+#ifdef _WIN32
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      -1, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+  device_name[0] = '\0';
+
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      -1, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+
+#else
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      0, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+  device_name[0] = '\0';
+
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      0, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+#endif  // !WIN32
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       AllEnumeratedRecordingDevicesCanBeSetAsRecordingDevice) {
+  // Check recording side.
+  // Extended Win32 enumeration tests: unique GUID outputs on Vista and up:
+  // Win XP and below : device_name is copied to guid_name.
+  // Win Vista and up : device_name is the friendly name and GUID is a unique
+  //                    identifier.
+  // Other            : guid_name is left unchanged.
+  int num_of_recording_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(
+      num_of_recording_devices));
+  EXPECT_GT(num_of_recording_devices, 0) << kNoDevicesErrorMessage;
+
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+  for (int i = 0; i < num_of_recording_devices; i++) {
+    EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+        i, device_name, guid_name));
+    EXPECT_GT(strlen(device_name), 0u) <<
+        "There should be no empty device names "
+        "among the ones the system gives us.";
+    EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(i));
+  }
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       AllEnumeratedPlayoutDevicesCanBeSetAsPlayoutDevice) {
+  // Check playout side (see recording side test for more info on GUIDs).
+  int num_of_playout_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(
+      num_of_playout_devices));
+  EXPECT_GT(num_of_playout_devices, 0) << kNoDevicesErrorMessage;
+
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+  for (int i = 0; i < num_of_playout_devices; ++i) {
+    EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+        i, device_name, guid_name));
+    EXPECT_GT(strlen(device_name), 0u) <<
+        "There should be no empty device names "
+        "among the ones the system gives us.";
+    EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(i));
+  }
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       SetDeviceWithMagicalArgumentsSetsDefaultSoundDevices) {
+#ifdef _WIN32
+  // -1 means "default device" on Windows.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+#else
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(0));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(0));
+#endif
+}
+
+#endif // !defined(MAC_IPHONE) & !defined(WEBRTC_ANDROID)
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/hardware_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_test.cc
new file mode 100644
index 0000000..41145e1
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/hardware_test.cc
@@ -0,0 +1,168 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "audio_device.h"
+#include "voe_test_defines.h"
+
+class HardwareTest : public AfterStreamingFixture {
+};
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+TEST_F(HardwareTest, AbleToQueryForDevices) {
+  int num_recording_devices = 0;
+  int num_playout_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(num_recording_devices));
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(num_playout_devices));
+
+  ASSERT_GT(num_recording_devices, 0) <<
+      "There seem to be no recording devices on your system, "
+      "and this test really doesn't make sense then.";
+  ASSERT_GT(num_playout_devices, 0) <<
+      "There seem to be no playout devices on your system, "
+      "and this test really doesn't make sense then.";
+
+  // Recording devices are handled a bit differently on Windows - we can
+  // just tell it to set the 'default' communication device there.
+#ifdef _WIN32
+  // Should also work while already recording.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(
+      webrtc::AudioDeviceModule::kDefaultCommunicationDevice));
+  // Should also work while already playing.
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(
+      webrtc::AudioDeviceModule::kDefaultCommunicationDevice));
+#else
+  // For other platforms, just use the first device encountered.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(0));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(0));
+#endif
+
+  // It's hard to know what names this will return (it's system-dependent),
+  // so just check that it's possible to do it.
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      0, device_name, guid_name));
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      0, device_name, guid_name));
+}
+#endif
+
+#ifdef _WIN32
+TEST_F(HardwareTest, GetCpuLoadWorksOnWindows) {
+  int load = -1;
+  EXPECT_EQ(0, voe_hardware_->GetCPULoad(load));
+  EXPECT_GE(0, load);
+  TEST_LOG("Voice engine CPU load = %d%%\n", load);
+}
+#else
+TEST_F(HardwareTest, GetCpuLoadReturnsErrorOnNonWindowsPlatform) {
+  int load = -1;
+  EXPECT_EQ(-1, voe_hardware_->GetCPULoad(load));
+}
+#endif
+
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+TEST_F(HardwareTest, GetSystemCpuLoadWorksExceptOnMacAndAndroid) {
+#ifdef _WIN32
+  // This method needs some warm-up time on Windows. We sleep a good amount
+  // of time instead of retrying to make the test simpler.
+  Sleep(2000);
+#endif
+  int load = -1;
+  EXPECT_EQ(0, voe_hardware_->GetSystemCPULoad(load));
+  EXPECT_GE(load, 0);
+  TEST_LOG("System CPU load = %d%%\n", load);
+}
+#endif
+
+TEST_F(HardwareTest, BuiltInWasapiAECWorksForAudioWindowsCoreAudioLayer) {
+#ifdef MAC_IPHONE
+  // Ensure the sound device is reset on iPhone.
+  EXPECT_EQ(0, voe_hardware_->ResetAudioDevice());
+  Sleep(2000);
+#endif
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+
+  webrtc::AudioLayers given_layer;
+  EXPECT_EQ(0, voe_hardware_->GetAudioDeviceLayer(given_layer));
+  if (given_layer != webrtc::kAudioWindowsCore) {
+    // Not Windows Audio Core - then it shouldn't work.
+    EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(true));
+    EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(false));
+    return;
+  }
+
+  TEST_LOG("Testing AEC for Audio Windows Core.\n");
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  // Can't be set after StartSend().
+  EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(true));
+  EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(false));
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_hardware_->EnableBuiltInAEC(true));
+
+  // Can't be called before StartPlayout().
+  EXPECT_EQ(-1, voe_base_->StartSend(channel_));
+
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  TEST_LOG("Processing capture data with built-in AEC...\n");
+  Sleep(2000);
+
+  TEST_LOG("Looping through capture devices...\n");
+  int num_devs = 0;
+  char dev_name[128] = { 0 };
+  char guid_name[128] = { 0 };
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(num_devs));
+  for (int dev_index = 0; dev_index < num_devs; ++dev_index) {
+    EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(dev_index,
+                                                       dev_name,
+                                                       guid_name));
+    TEST_LOG("%d: %s\n", dev_index, dev_name);
+    EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(dev_index));
+    Sleep(2000);
+  }
+
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+
+  TEST_LOG("Looping through render devices, restarting for each "
+      "device...\n");
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(num_devs));
+  for (int dev_index = 0; dev_index < num_devs; ++dev_index) {
+    EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(dev_index,
+                                                     dev_name,
+                                                     guid_name));
+    TEST_LOG("%d: %s\n", dev_index, dev_name);
+    EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(dev_index));
+    Sleep(2000);
+  }
+
+  TEST_LOG("Using default devices...\n");
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+  Sleep(2000);
+
+  // Possible, but not recommended before StopSend().
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  Sleep(2000);  // To verify that there is no garbage audio.
+
+  TEST_LOG("Disabling built-in AEC.\n");
+  EXPECT_EQ(0, voe_hardware_->EnableBuiltInAEC(false));
+
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/manual_hold_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/manual_hold_test.cc
new file mode 100644
index 0000000..68f28b4
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/manual_hold_test.cc
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+// Note: This class includes sleeps and requires manual verification.
+class ManualHoldTest : public AfterStreamingFixture {
+};
+
+TEST_F(ManualHoldTest, SetOnHoldStatusBlockAudio) {
+  TEST_LOG("Channel not on hold => should hear audio.\n");
+  Sleep(2000);
+  TEST_LOG("Put channel on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, true));
+  Sleep(2000);
+  TEST_LOG("Remove on hold => should hear audio again.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, false));
+  Sleep(2000);
+  TEST_LOG("Put sending on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, true, webrtc::kHoldSendOnly));
+  Sleep(2000);
+}
+
+TEST_F(ManualHoldTest, SetOnHoldStatusBlocksLocalFileAudio) {
+  TEST_LOG("Start playing a file locally => "
+      "you should now hear this file being played out.\n");
+  voe_file_->StopPlayingFileAsMicrophone(channel_);
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, resource_manager_.long_audio_file_path().c_str(), true));
+  Sleep(2000);
+
+  TEST_LOG("Put playing on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(
+      channel_, true, webrtc::kHoldPlayOnly));
+  Sleep(2000);
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/neteq_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/neteq_test.cc
new file mode 100644
index 0000000..8184535
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/neteq_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+class NetEQTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    additional_channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(additional_channel_);
+  }
+
+  int additional_channel_;
+};
+
+TEST_F(NetEQTest, GetNetEQPlayoutModeReturnsDefaultModeByDefault) {
+  webrtc::NetEqModes mode;
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqDefault, mode);
+}
+
+TEST_F(NetEQTest, SetNetEQPlayoutModeActuallySetsTheModeForTheChannel) {
+  webrtc::NetEqModes mode;
+  // Set for the first channel but leave the second.
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqFax));
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqFax, mode);
+
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(additional_channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqDefault, mode);
+
+  // Set the second channel, leave the first.
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(
+      additional_channel_, webrtc::kNetEqStreaming));
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(additional_channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqStreaming, mode);
+
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqFax, mode);
+}
+
+TEST_F(NetEQTest, GetNetEQBgnModeReturnsBgnOnByDefault) {
+  webrtc::NetEqBgnModes bgn_mode;
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnOn, bgn_mode);
+}
+
+TEST_F(NetEQTest, SetNetEQBgnModeActuallySetsTheBgnMode) {
+  webrtc::NetEqBgnModes bgn_mode;
+  EXPECT_EQ(0, voe_base_->SetNetEQBGNMode(channel_, webrtc::kBgnOff));
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnOff, bgn_mode);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQBGNMode(channel_, webrtc::kBgnFade));
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnFade, bgn_mode);
+}
+
+TEST_F(NetEQTest, ManualSetEQPlayoutModeStillProducesOkAudio) {
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqDefault));
+  TEST_LOG("NetEQ default playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(
+      channel_, webrtc::kNetEqStreaming));
+  TEST_LOG("NetEQ streaming playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqFax));
+  TEST_LOG("NetEQ fax playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/network_before_streaming_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/network_before_streaming_test.cc
new file mode 100644
index 0000000..7a41e80
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/network_before_streaming_test.cc
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class NetworkBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp() {
+    channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(channel_);
+  }
+
+  int channel_;
+};
+
+TEST_F(NetworkBeforeStreamingTest,
+    GetSourceInfoReturnsEmptyValuesForUnconfiguredChannel) {
+  char src_ip[32] = "0.0.0.0";
+  int src_rtp_port = 1234;
+  int src_rtcp_port = 1235;
+
+  EXPECT_EQ(0, voe_network_->GetSourceInfo(
+      channel_, src_rtp_port, src_rtcp_port, src_ip));
+  EXPECT_EQ(0, src_rtp_port);
+  EXPECT_EQ(0, src_rtcp_port);
+  EXPECT_STRCASEEQ("", src_ip);
+}
+
+TEST_F(NetworkBeforeStreamingTest,
+    GetSourceFilterReturnsEmptyValuesForUnconfiguredChannel) {
+  int filter_port = -1;
+  int filter_port_rtcp = -1;
+  char filter_ip[32] = "0.0.0.0";
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_port, filter_port_rtcp, filter_ip));
+
+  EXPECT_EQ(0, filter_port);
+  EXPECT_EQ(0, filter_port_rtcp);
+  EXPECT_STRCASEEQ("", filter_ip);
+}
+
+TEST_F(NetworkBeforeStreamingTest, SetSourceFilterSucceeds) {
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, 0));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/network_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/network_test.cc
new file mode 100644
index 0000000..335fba6
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/network_test.cc
@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "mock/mock_voe_observer.h"
+#include "mock/mock_voe_connection_observer.h"
+#include "voe_test_interface.h"
+
+static const int kDefaultRtpPort = 8000;
+static const int kDefaultRtcpPort = 8001;
+
+class NetworkTest : public AfterStreamingFixture {
+};
+
+using ::testing::Between;
+
+TEST_F(NetworkTest, GetSourceInfoReturnsPortsAndIpAfterReceivingPackets) {
+  // Give some time to send speech packets.
+  Sleep(200);
+
+  int rtp_port = 0;
+  int rtcp_port = 0;
+  char source_ip[32] = "127.0.0.1";
+
+  EXPECT_EQ(0, voe_network_->GetSourceInfo(channel_, rtp_port, rtcp_port,
+      source_ip));
+
+  EXPECT_EQ(kDefaultRtpPort, rtp_port);
+  EXPECT_EQ(kDefaultRtcpPort, rtcp_port);
+}
+
+TEST_F(NetworkTest, NoFilterIsEnabledByDefault) {
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(0, filter_rtp_port);
+  EXPECT_EQ(0, filter_rtcp_port);
+  EXPECT_STREQ("", filter_ip);
+}
+
+TEST_F(NetworkTest, ManualCanFilterRtpPort) {
+  TEST_LOG("No filter, should hear audio.\n");
+  Sleep(1000);
+
+  int port_to_block = kDefaultRtpPort + 10;
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, port_to_block));
+
+  // Changes should take effect immediately.
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(port_to_block, filter_rtp_port);
+
+  TEST_LOG("Now filtering port %d, should not hear audio.\n", port_to_block);
+  Sleep(1000);
+
+  TEST_LOG("Removing filter, should hear audio.\n");
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, 0));
+  Sleep(1000);
+}
+
+TEST_F(NetworkTest, ManualCanFilterIp) {
+  TEST_LOG("You should hear audio.\n");
+  Sleep(1000);
+
+  int rtcp_port_to_block = kDefaultRtcpPort + 10;
+  TEST_LOG("Filtering IP 10.10.10.10, should not hear audio.\n");
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(
+      channel_, 0, rtcp_port_to_block, "10.10.10.10"));
+
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(0, filter_rtp_port);
+  EXPECT_EQ(rtcp_port_to_block, filter_rtcp_port);
+  EXPECT_STREQ("10.10.10.10", filter_ip);
+}
+
+TEST_F(NetworkTest,
+    CallsObserverOnTimeoutAndRestartWhenPacketTimeoutNotificationIsEnabled) {
+  // First, get rid of the default, asserting observer and install our observer.
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+  webrtc::MockVoEObserver mock_observer;
+  EXPECT_EQ(0, voe_base_->RegisterVoiceEngineObserver(mock_observer));
+
+  // Define expectations.
+  int expected_error = VE_RECEIVE_PACKET_TIMEOUT;
+  EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
+      .Times(1);
+  expected_error = VE_PACKET_RECEIPT_RESTARTED;
+    EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
+      .Times(1);
+
+  // Get some speech going.
+  Sleep(500);
+
+  // Enable packet timeout.
+  EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
+
+  // Trigger a timeout.
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(1500);
+
+  // Trigger a restart event.
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  Sleep(500);
+}
+
+TEST_F(NetworkTest, DoesNotCallDeRegisteredObserver) {
+  // De-register the default observer. This test will fail if the observer gets
+  // called for any reason, so if this de-register doesn't work the test will
+  // fail.
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+
+  // Get some speech going.
+  Sleep(500);
+
+  // Enable packet timeout.
+  EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
+
+  // Trigger a timeout.
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(1500);
+}
+
+TEST_F(NetworkTest, DeadOrAliveObserverSeesAliveMessagesIfEnabled) {
+  webrtc::MockVoeConnectionObserver mock_observer;
+  EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
+      channel_, mock_observer));
+
+  // We should be called about 4 times in four seconds, but 3 is OK too.
+  EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, true))
+      .Times(Between(3, 4));
+
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  Sleep(4000);
+
+  EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
+}
+
+TEST_F(NetworkTest, DeadOrAliveObserverSeesDeadMessagesIfEnabled) {
+  // "When do you see them?" - "All the time!"
+  webrtc::MockVoeConnectionObserver mock_observer;
+  EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
+      channel_, mock_observer));
+
+  Sleep(500);
+
+  // We should be called about 4 times in four seconds, but 3 is OK too.
+  EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, false))
+      .Times(Between(3, 4));
+
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, false));
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(4000);
+
+  EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
+}
+
+TEST_F(NetworkTest, CanSwitchToExternalTransport) {
+  EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+  EXPECT_EQ(0, voe_base_->DeleteChannel(channel_));
+  channel_ = voe_base_->CreateChannel();
+
+  voetest::FakeExternalTransport external_transport(voe_network_);
+  EXPECT_EQ(0, voe_network_->RegisterExternalTransport(
+      channel_, external_transport));
+
+  EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+
+  Sleep(1000);
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+
+  EXPECT_EQ(0, voe_network_->DeRegisterExternalTransport(channel_));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
new file mode 100644
index 0000000..afd0820
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+using namespace webrtc;
+using namespace testing;
+
+class RtpRtcpBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp();
+  void TearDown();
+
+  int channel_;
+};
+
+void RtpRtcpBeforeStreamingTest::SetUp() {
+  EXPECT_THAT(channel_ = voe_base_->CreateChannel(), Not(Lt(0)));
+}
+
+void RtpRtcpBeforeStreamingTest::TearDown() {
+  EXPECT_EQ(0, voe_base_->DeleteChannel(channel_));
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest,
+       GetRtcpStatusReturnsTrueByDefaultAndObeysSetRtcpStatus) {
+  bool on = false;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_TRUE(on);
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, false));
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_FALSE(on);
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, true));
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_TRUE(on);
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest, RtpKeepAliveStatusIsOffByDefault) {
+  unsigned char payload_type;
+  int delta_seconds;
+  bool on;
+
+  // Should be off by default.
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
+      channel_, on, payload_type, delta_seconds));
+  EXPECT_FALSE(on);
+  EXPECT_EQ(255, payload_type);
+  EXPECT_EQ(0, delta_seconds);
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest, SetRtpKeepAliveDealsWithInvalidParameters) {
+  unsigned char payload_type;
+  int delta_seconds;
+  bool on;
+
+  // Verify invalid input parameters.
+  EXPECT_NE(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
+      -1, on, payload_type, delta_seconds)) <<
+      "Should fail for invalid channel -1.";
+  EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      -1, true, 0, 15)) <<
+      "Should fail for invalid channel -1.";
+  EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, true, -1, 15)) <<
+      "Should fail for invalid payload -1.";
+  EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, true, 0, 61)) <<
+      "The delta time must be [1, 60] seconds.";
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
+      channel_, on, payload_type, delta_seconds));
+  EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, true, 0));
+
+  // Should still be off, default 0 used by PCMU.
+  EXPECT_FALSE(on);
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest,
+       GetRtpKeepaliveStatusObeysSetRtpKeepaliveStatus) {
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCP_CNAME(channel_, "SomeName"));
+
+  // Try valid settings.
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, true, 1));
+
+  unsigned char payload_type;
+  int delta_seconds;
+  bool on;
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
+      0, on, payload_type, delta_seconds));
+  EXPECT_TRUE(on);
+  EXPECT_EQ(1, payload_type);
+  EXPECT_EQ(15, delta_seconds) << "15 seconds delta is default.";
+
+  // Set the keep-alive payload to 60, which the codecs can't use.
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, true, 60, 3));
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
+      channel_, on, payload_type, delta_seconds));
+  EXPECT_TRUE(on);
+  EXPECT_EQ(60, payload_type);
+  EXPECT_EQ(3, delta_seconds);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
+      channel_, false, 60));
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest, GetLocalSsrcObeysSetLocalSsrc) {
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, 1234));
+  unsigned int result = 0;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetLocalSSRC(channel_, result));
+  EXPECT_EQ(1234u, result);
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc
new file mode 100644
index 0000000..42c1b4e
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voe_standard_test.h"
+#include "testsupport/fileutils.h"
+
+static const char* const RTCP_CNAME = "Whatever";
+
+class RtpRtcpTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    // We need a second channel for this test, so set it up.
+    second_channel_ = voe_base_->CreateChannel();
+    EXPECT_GE(second_channel_, 0);
+
+    EXPECT_EQ(0, voe_base_->SetSendDestination(
+        second_channel_, 8002, "127.0.0.1"));
+    EXPECT_EQ(0, voe_base_->SetLocalReceiver(
+        second_channel_, 8002));
+    EXPECT_EQ(0, voe_base_->StartReceive(second_channel_));
+    EXPECT_EQ(0, voe_base_->StartPlayout(second_channel_));
+    EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(second_channel_, 5678));
+    EXPECT_EQ(0, voe_base_->StartSend(second_channel_));
+
+    // We'll set up the RTCP CNAME and SSRC to something arbitrary here.
+    voe_rtp_rtcp_->SetRTCP_CNAME(channel_, RTCP_CNAME);
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(second_channel_);
+  }
+
+  int second_channel_;
+};
+
+TEST_F(RtpRtcpTest, RemoteRtcpCnameHasPropagatedToRemoteSide) {
+  // We need to sleep a bit here for the name to propagate. For instance,
+  // 200 milliseconds is not enough, so we'll go with one second here.
+  Sleep(1000);
+
+  char char_buffer[256];
+  voe_rtp_rtcp_->GetRemoteRTCP_CNAME(channel_, char_buffer);
+  EXPECT_STREQ(RTCP_CNAME, char_buffer);
+}
+
+TEST_F(RtpRtcpTest, SSRCPropagatesCorrectly) {
+  unsigned int local_ssrc = 1234;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, local_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(1000);
+
+  unsigned int ssrc;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetLocalSSRC(channel_, ssrc));
+  EXPECT_EQ(local_ssrc, ssrc);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRemoteSSRC(channel_, ssrc));
+  EXPECT_EQ(local_ssrc, ssrc);
+}
+
+TEST_F(RtpRtcpTest, RtcpApplicationDefinedPacketsCanBeSentAndReceived) {
+  voetest::RtcpAppHandler rtcp_app_handler;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTCPObserver(
+      channel_, rtcp_app_handler));
+
+  // Send data aligned to 32 bytes.
+  const char* data = "application-dependent data------";
+  unsigned short data_length = strlen(data);
+  unsigned int data_name = 0x41424344;  // 'ABCD' in ascii
+  unsigned char data_subtype = 1;
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->SendApplicationDefinedRTCPPacket(
+      channel_, data_subtype, data_name, data, data_length));
+
+  // Ensure the RTP-RTCP process gets scheduled.
+  Sleep(1000);
+
+  // Ensure we received the data in the callback.
+  EXPECT_EQ(data_length, rtcp_app_handler.length_in_bytes_);
+  EXPECT_EQ(0, memcmp(data, rtcp_app_handler.data_, data_length));
+  EXPECT_EQ(data_name, rtcp_app_handler.name_);
+  EXPECT_EQ(data_subtype, rtcp_app_handler.sub_type_);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTCPObserver(channel_));
+}
+
+TEST_F(RtpRtcpTest, DisabledRtcpObserverDoesNotReceiveData) {
+  voetest::RtcpAppHandler rtcp_app_handler;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTCPObserver(
+      channel_, rtcp_app_handler));
+
+  // Put observer in a known state before de-registering.
+  rtcp_app_handler.Reset();
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTCPObserver(channel_));
+
+  const char* data = "whatever";
+  EXPECT_EQ(0, voe_rtp_rtcp_->SendApplicationDefinedRTCPPacket(
+      channel_, 1, 0x41424344, data, strlen(data)));
+
+  // Ensure the RTP-RTCP process gets scheduled.
+  Sleep(1000);
+
+  // Ensure we received no data.
+  EXPECT_EQ(0u, rtcp_app_handler.name_);
+  EXPECT_EQ(0u, rtcp_app_handler.sub_type_);
+}
+
+TEST_F(RtpRtcpTest, InsertExtraRTPPacketDealsWithInvalidArguments) {
+  const char payload_data[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      -1, 0, false, payload_data, 8)) <<
+          "Should reject: invalid channel.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      channel_, -1, false, payload_data, 8)) <<
+          "Should reject: invalid payload type.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      channel_, 128, false, payload_data, 8)) <<
+          "Should reject: invalid payload type.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, 99, false, NULL, 8)) <<
+            "Should reject: bad pointer.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, 99, false, payload_data, 1500 - 28 + 1)) <<
+            "Should reject: invalid size.";
+}
+
+TEST_F(RtpRtcpTest, CanTransmitExtraRtpPacketsWithoutError) {
+  const char payload_data[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  for (int i = 0; i < 128; ++i) {
+    // Try both with and without the marker bit set
+    EXPECT_EQ(0, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, i, false, payload_data, 8));
+    EXPECT_EQ(0, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, i, true, payload_data, 8));
+  }
+}
+
+TEST_F(RtpRtcpTest, CanCreateRtpDumpFilesWithoutError) {
+  // Create two RTP dump files (3 seconds long). You can verify these after
+  // the test using rtpplay or NetEqRTPplay if you like.
+  std::string output_path = webrtc::test::OutputPath();
+  std::string incoming_filename = output_path + "dump_in_3sec.rtp";
+  std::string outgoing_filename = output_path + "dump_out_3sec.rtp";
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->StartRTPDump(
+      channel_, incoming_filename.c_str(), webrtc::kRtpIncoming));
+  EXPECT_EQ(0, voe_rtp_rtcp_->StartRTPDump(
+      channel_, outgoing_filename.c_str(), webrtc::kRtpOutgoing));
+
+  Sleep(3000);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->StopRTPDump(channel_, webrtc::kRtpIncoming));
+  EXPECT_EQ(0, voe_rtp_rtcp_->StopRTPDump(channel_, webrtc::kRtpOutgoing));
+}
+
+TEST_F(RtpRtcpTest, ObserverGetsNotifiedOnSsrcChange) {
+  voetest::TestRtpObserver rtcp_observer;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTPObserver(
+      channel_, rtcp_observer));
+  rtcp_observer.Reset();
+
+  unsigned int new_ssrc = 7777;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, new_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(500);
+
+  // Verify we got the new SSRC.
+  EXPECT_EQ(new_ssrc, rtcp_observer.ssrc_[0]);
+
+  // Now try another SSRC.
+  unsigned int newer_ssrc = 1717;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, newer_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(500);
+
+  EXPECT_EQ(newer_ssrc, rtcp_observer.ssrc_[0]);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTPObserver(channel_));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/voe_base_misc_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/voe_base_misc_test.cc
new file mode 100644
index 0000000..0388025
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/voe_base_misc_test.cc
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "before_initialization_fixture.h"
+
+class VoeBaseMiscTest : public BeforeInitializationFixture {
+};
+
+using namespace testing;
+
+TEST_F(VoeBaseMiscTest, MaxNumChannelsIs32) {
+  EXPECT_EQ(32, voe_base_->MaxNumOfChannels());
+}
+
+TEST_F(VoeBaseMiscTest, GetVersionPrintsSomeUsefulInformation) {
+  char char_buffer[1024];
+  EXPECT_EQ(0, voe_base_->GetVersion(char_buffer));
+  EXPECT_THAT(char_buffer, ContainsRegex("VoiceEngine"));
+}
diff --git a/trunk/src/voice_engine/main/test/auto_test/standard/volume_test.cc b/trunk/src/voice_engine/main/test/auto_test/standard/volume_test.cc
new file mode 100644
index 0000000..44887b7
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/standard/volume_test.cc
@@ -0,0 +1,238 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+class VolumeTest : public AfterStreamingFixture {
+};
+
+TEST_F(VolumeTest, DefaultSpeakerVolumeIsAtMost255) {
+  unsigned int volume = 1000;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_LE(volume, 255u);
+}
+
+TEST_F(VolumeTest, ManualSetVolumeWorks) {
+  unsigned int original_volume = 0;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(original_volume));
+  Sleep(1000);
+
+  TEST_LOG("Setting speaker volume to 0 out of 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(0));
+  Sleep(1000);
+
+  TEST_LOG("Setting speaker volume to 100 out of 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(100));
+  Sleep(1000);
+
+  // Set the volume to 255 very briefly so we don't blast the poor user
+  // listening to this. This is just to test the call succeeds.
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(255));
+
+  TEST_LOG("Setting speaker volume to the original %d out of 255.\n",
+      original_volume);
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(original_volume));
+  Sleep(1000);
+}
+
+#if !defined(MAC_IPHONE)
+
+// NOTE(phoglund): This test is flaky because of how the OS works, and is hence
+// disabled by default.
+TEST_F(VolumeTest, DISABLED_DefaultMicrophoneVolumeIsAtMost255) {
+  unsigned int volume = 1000;
+  EXPECT_EQ(0, voe_volume_control_->GetMicVolume(volume));
+  EXPECT_LE(volume, 255u);
+}
+
+TEST_F(VolumeTest, ManualRequiresMicrophoneCanSetMicrophoneVolumeWithAcgOff) {
+  SwitchToManualMicrophone();
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(false));
+
+  unsigned int original_volume = 0;
+  EXPECT_EQ(0, voe_volume_control_->GetMicVolume(original_volume));
+
+  TEST_LOG("Setting microphone volume to 0.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(channel_));
+  Sleep(1000);
+  TEST_LOG("Setting microphone volume to 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(255));
+  Sleep(1000);
+  TEST_LOG("Setting microphone volume back to saved value.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(original_volume));
+  Sleep(1000);
+}
+
+TEST_F(VolumeTest, ChannelScalingIsOneByDefault) {
+  float scaling = -1.0f;
+
+  EXPECT_EQ(0, voe_volume_control_->GetChannelOutputVolumeScaling(
+      channel_, scaling));
+  EXPECT_FLOAT_EQ(1.0f, scaling);
+}
+
+TEST_F(VolumeTest, ManualCanSetChannelScaling) {
+  EXPECT_EQ(0, voe_volume_control_->SetChannelOutputVolumeScaling(
+      channel_, 0.1f));
+
+  float scaling = 1.0f;
+  EXPECT_EQ(0, voe_volume_control_->GetChannelOutputVolumeScaling(
+      channel_, scaling));
+
+  EXPECT_FLOAT_EQ(0.1f, scaling);
+
+  TEST_LOG("Channel scaling set to 0.1: audio should be barely audible.\n");
+  Sleep(2000);
+}
+
+#endif  // !MAC_IPHONE
+
+#if !defined(WEBRTC_ANDROID) && !defined(MAC_IPHONE)
+
+TEST_F(VolumeTest, InputMutingIsNotEnabledByDefault) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, ManualInputMutingMutesMicrophone) {
+  SwitchToManualMicrophone();
+
+  // Enable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: talk into microphone and verify you can't hear yourself.\n");
+  Sleep(2000);
+
+  // Test that we can disable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, false));
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: talk into microphone and verify you can hear yourself.\n");
+  Sleep(2000);
+}
+
+// NOTE(phoglund): This test is flaky because of how the OS works, and is hence
+// disabled by default.
+TEST_F(VolumeTest, DISABLED_SystemInputMutingIsNotEnabledByDefault) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, ManualSystemInputMutingMutesMicrophone) {
+  SwitchToManualMicrophone();
+
+  // Enable system input muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemInputMute(true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: talk into microphone and verify you can't hear yourself.\n");
+  Sleep(2000);
+
+  // Test that we can disable system input muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemInputMute(false));
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: talk into microphone and verify you can hear yourself.\n");
+  Sleep(2000);
+}
+
+TEST_F(VolumeTest, SystemOutputMutingIsNotEnabledByDefault) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, ManualSystemOutputMutingMutesOutput) {
+  // Enable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemOutputMute(true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: you should hear no audio.\n");
+  Sleep(2000);
+
+  // Test that we can disable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemOutputMute(false));
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: you should hear audio.\n");
+  Sleep(2000);
+}
+
+TEST_F(VolumeTest, ManualTestInputAndOutputLevels) {
+  SwitchToManualMicrophone();
+
+  TEST_LOG("Speak and verify that the following levels look right:\n");
+  for (int i = 0; i < 5; i++) {
+    Sleep(1000);
+    unsigned int input_level = 0;
+    unsigned int output_level = 0;
+    unsigned int input_level_full_range = 0;
+    unsigned int output_level_full_range = 0;
+
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechInputLevel(
+        input_level));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechOutputLevel(
+        channel_, output_level));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechInputLevelFullRange(
+        input_level_full_range));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechOutputLevelFullRange(
+        channel_, output_level_full_range));
+
+    TEST_LOG("    warped levels (0-9)    : in=%5d, out=%5d\n",
+        input_level, output_level);
+    TEST_LOG("    linear levels (0-32768): in=%5d, out=%5d\n",
+        input_level_full_range, output_level_full_range);
+  }
+}
+
+TEST_F(VolumeTest, ChannelsAreNotPannedByDefault) {
+  float left = -1.0;
+  float right = -1.0;
+
+  EXPECT_EQ(0, voe_volume_control_->GetOutputVolumePan(channel_, left, right));
+  EXPECT_FLOAT_EQ(1.0, left);
+  EXPECT_FLOAT_EQ(1.0, right);
+}
+
+TEST_F(VolumeTest, ManualTestChannelPanning) {
+  TEST_LOG("Panning left.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 0.8f, 0.1f));
+  Sleep(1000);
+
+  TEST_LOG("Back to center.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 1.0f, 1.0f));
+  Sleep(1000);
+
+  TEST_LOG("Panning right.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 0.1f, 0.8f));
+  Sleep(1000);
+
+  // To finish, verify that the getter works.
+  float left = 0.0f;
+  float right = 0.0f;
+
+  EXPECT_EQ(0, voe_volume_control_->GetOutputVolumePan(channel_, left, right));
+  EXPECT_FLOAT_EQ(0.1f, left);
+  EXPECT_FLOAT_EQ(0.8f, right);
+}
+
+#endif  // !WEBRTC_ANDROID && !MAC_IPHONE
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.cc b/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.cc
new file mode 100644
index 0000000..051bce9
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "voe_cpu_test.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define CHECK(expr)                                             \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+        PAUSE												    \
+        return -1;                                              \
+    }
+
+extern char* GetFilename(char* filename);
+extern const char* GetFilename(const char* filename);
+extern int GetResource(char* resource, char* dest, int destLen);
+extern char* GetResource(char* resource);
+extern const char* GetResource(const char* resource);
+
+VoECpuTest::VoECpuTest(VoETestManager& mgr)
+    : _mgr(mgr) {
+
+}
+
+int VoECpuTest::DoTest() {
+  printf("------------------------------------------------\n");
+  printf(" CPU Reference Test\n");
+  printf("------------------------------------------------\n");
+
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+
+  int channel(-1);
+  CodecInst isac;
+
+  isac.pltype = 104;
+  strcpy(isac.plname, "ISAC");
+  isac.pacsize = 960;
+  isac.plfreq = 32000;
+  isac.channels = 1;
+  isac.rate = -1;
+
+  CHECK(base->Init());
+  channel = base->CreateChannel();
+
+  CHECK(base->SetLocalReceiver(channel, 5566));
+  CHECK(base->SetSendDestination(channel, 5566, "127.0.0.1"));
+  CHECK(codec->SetRecPayloadType(channel, isac));
+  CHECK(codec->SetSendCodec(channel, isac));
+
+  CHECK(base->StartReceive(channel));
+  CHECK(base->StartPlayout(channel));
+  CHECK(base->StartSend(channel));
+  CHECK(file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(),
+          true, true));
+
+  CHECK(codec->SetVADStatus(channel, true));
+  CHECK(apm->SetAgcStatus(true, kAgcAdaptiveAnalog));
+  CHECK(apm->SetNsStatus(true, kNsModerateSuppression));
+  CHECK(apm->SetEcStatus(true, kEcAec));
+
+  TEST_LOG("\nMeasure CPU and memory while running a full-duplex"
+    " iSAC-swb call.\n\n");
+
+  PAUSE
+
+  CHECK(base->StopSend(channel));
+  CHECK(base->StopPlayout(channel));
+  CHECK(base->StopReceive(channel));
+
+  base->DeleteChannel(channel);
+  CHECK(base->Terminate());
+
+  return 0;
+}
+
+} //  namespace voetest
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.h b/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.h
new file mode 100644
index 0000000..f883075
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_cpu_test.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
+
+#include "voe_standard_test.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+class VoECpuTest {
+ public:
+  VoECpuTest(VoETestManager& mgr);
+  ~VoECpuTest() {}
+  int DoTest();
+ private:
+  VoETestManager& _mgr;
+};
+
+} // namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.cc b/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.cc
new file mode 100644
index 0000000..8b4ad45
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.cc
@@ -0,0 +1,8421 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include <vector>
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "voe_extended_test.h"
+#include "../../source/voice_engine_defines.h"  // defines build macros
+#include "system_wrappers/interface/ref_count.h"
+
+#if defined(_WIN32)
+#include <conio.h>
+#include <winsock2.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <netdb.h>
+#endif
+
+using namespace webrtc;
+
+namespace voetest {
+
+// Set this flag to ensure that test packets are transmitted to
+// RemoteIP::RemotePort during tests of SetSendToS and SetSendGQos. Requires
+// receiver at the remote side and Wireshark with a proper ip.src filter.
+#define _SEND_TO_REMOTE_IP_
+
+#ifdef _SEND_TO_REMOTE_IP_
+const int RemotePort = 12345; // transmit to this UDP port
+const char* RemoteIP = "192.168.200.1"; // transmit to this IP address
+#endif
+
+#ifdef MAC_IPHONE
+#define SLEEP_IF_IPHONE(x) SLEEP(x)
+#else
+#define SLEEP_IF_IPHONE(x)
+#endif
+
+#ifdef WEBRTC_ANDROID
+// Global pointers
+extern void* globalJavaVM;
+extern void* globalContext;
+#endif
+
+extern char* GetFilename(char* filename);
+extern const char* GetFilename(const char* filename);
+extern int GetResource(char* resource, char* dest, int destLen);
+extern char* GetResource(char* resource);
+extern const char* GetResource(const char* resource);
+
+// ----------------------------------------------------------------------------
+// External AudioDeviceModule implementation
+// ----------------------------------------------------------------------------
+
+// static
+AudioDeviceModuleImpl* AudioDeviceModuleImpl::Create() {
+  AudioDeviceModuleImpl* xADM = new AudioDeviceModuleImpl();
+  if (xADM)
+    xADM->AddRef();
+  return xADM;
+}
+
+// static
+bool AudioDeviceModuleImpl::Destroy(AudioDeviceModuleImpl* adm) {
+  if (!adm)
+    return false;
+  int32_t count = adm->Release();
+  if (count != 0) {
+    return false;
+  } else {
+    delete adm;
+    return true;
+  }
+}
+
+AudioDeviceModuleImpl::AudioDeviceModuleImpl() :
+  _ref_count(0) {
+}
+
+AudioDeviceModuleImpl::~AudioDeviceModuleImpl() {
+}
+
+int32_t AudioDeviceModuleImpl::AddRef() {
+  return ++_ref_count;
+}
+
+int32_t AudioDeviceModuleImpl::Release() {
+  // Avoid self destruction in this mock implementation.
+  // Ensures that we can always check the reference counter while alive.
+  return --_ref_count;
+}
+
+// ----------------------------------------------------------------------------
+//  External transport (Transport) implementations:
+// ----------------------------------------------------------------------------
+
+ExtendedTestTransport::ExtendedTestTransport(VoENetwork* ptr) :
+  myNetw(ptr), _thread(NULL), _lock(NULL), _event(NULL), _length(0), _channel(0) {
+  const char* threadName = "voe_extended_test_external_thread";
+  _lock = CriticalSectionWrapper::CreateCriticalSection();
+  _event = EventWrapper::Create();
+  _thread = ThreadWrapper::CreateThread(Run, this, kHighPriority, threadName);
+  if (_thread) {
+    unsigned int id;
+    _thread->Start(id);
+  }
+}
+
+ExtendedTestTransport::~ExtendedTestTransport() {
+  if (_thread) {
+    _thread->SetNotAlive();
+    _event->Set();
+    if (_thread->Stop()) {
+      delete _thread;
+      _thread = NULL;
+      delete _event;
+      _event = NULL;
+      delete _lock;
+      _lock = NULL;
+    }
+  }
+}
+
+bool ExtendedTestTransport::Run(void* ptr) {
+  return static_cast<ExtendedTestTransport*> (ptr)->Process();
+}
+
+bool ExtendedTestTransport::Process() {
+  switch (_event->Wait(500)) {
+    case kEventSignaled:
+      _lock->Enter();
+      myNetw->ReceivedRTPPacket(_channel, _packetBuffer, _length);
+      _lock->Leave();
+      return true;
+    case kEventTimeout:
+      return true;
+    case kEventError:
+      break;
+  }
+  return true;
+}
+
+int ExtendedTestTransport::SendPacket(int channel, const void *data, int len) {
+  _lock->Enter();
+  if (len < 1612) {
+    memcpy(_packetBuffer, (const unsigned char*) data, len);
+    _length = len;
+    _channel = channel;
+  }
+  _lock->Leave();
+  _event->Set(); // triggers ReceivedRTPPacket() from worker thread
+  return len;
+}
+
+int ExtendedTestTransport::SendRTCPPacket(int channel, const void *data, int len) {
+  myNetw->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+XTransport::XTransport(VoENetwork* netw, VoEFile* file) :
+  _netw(netw), _file(file) {
+}
+
+int XTransport::SendPacket(int channel, const void *data, int len) {
+  // loopback
+  // _netw->ReceivedRTPPacket(channel, data, len);
+
+  return 0;
+}
+
+int XTransport::SendRTCPPacket(int, const void *, int) {
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoERTPObserver
+// ----------------------------------------------------------------------------
+
+XRTPObserver::XRTPObserver() :
+  _SSRC(0) {
+}
+
+XRTPObserver::~XRTPObserver() {
+}
+
+void XRTPObserver::OnIncomingCSRCChanged(const int /*channel*/, const unsigned int /*CSRC*/,
+                                         const bool /*added*/) {
+}
+
+void XRTPObserver::OnIncomingSSRCChanged(const int /*channel*/, const unsigned int SSRC) {
+  // char msg[128];
+  // sprintf(msg, "OnIncomingSSRCChanged(channel=%d, SSRC=%lu)\n",
+  //        channel, SSRC);
+  // TEST_LOG(msg);
+
+  _SSRC = SSRC; // skip channel dependency for simplicty
+
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::PrepareTest(const char* str) const {
+  TEST_LOG("\n\n================================================\n");
+  TEST_LOG("\tExtended *%s* Test\n", str);
+  TEST_LOG("================================================\n\n");
+
+  return 0;
+}
+
+int VoEExtendedTest::TestPassed(const char* str) const {
+  TEST_LOG("\n\n------------------------------------------------\n");
+  TEST_LOG("\tExtended *%s* test passed!\n", str);
+  TEST_LOG("------------------------------------------------\n\n");
+
+  return 0;
+}
+
+void VoEExtendedTest::OnPeriodicDeadOrAlive(const int /*channel*/, const bool alive) {
+  _alive = alive;
+  if (alive) {
+    TEST_LOG("=> ALIVE ");
+  } else {
+    TEST_LOG("=> DEAD ");
+  }
+  fflush(NULL);
+}
+
+void VoEExtendedTest::CallbackOnError(const int errCode, int) {
+  _errCode = errCode;
+  TEST_LOG("\n************************\n");
+  TEST_LOG(" RUNTIME ERROR: %d \n", errCode);
+  TEST_LOG("************************\n");
+}
+
+VoEExtendedTest::VoEExtendedTest(VoETestManager& mgr) :
+  _mgr(mgr) {
+  for (int i = 0; i < 32; i++) {
+    _listening[i] = false;
+    _playing[i] = false;
+    _sending[i] = false;
+  }
+}
+
+VoEExtendedTest::~VoEExtendedTest() {
+}
+
+void VoEExtendedTest::StartMedia(int channel, int rtpPort, bool listen,
+                                 bool playout, bool send) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+
+  _listening[channel] = false;
+  _playing[channel] = false;
+  _sending[channel] = false;
+
+  voe_base_->SetLocalReceiver(channel, rtpPort);
+  voe_base_->SetSendDestination(channel, rtpPort, "127.0.0.1");
+  if (listen) {
+    _listening[channel] = true;
+    voe_base_->StartReceive(channel);
+  }
+  if (playout) {
+    _playing[channel] = true;
+    voe_base_->StartPlayout(channel);
+  }
+  if (send) {
+    _sending[channel] = true;
+    voe_base_->StartSend(channel);
+  }
+}
+
+void VoEExtendedTest::StopMedia(int channel) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+
+  if (_listening[channel]) {
+    _listening[channel] = false;
+    voe_base_->StopReceive(channel);
+  }
+  if (_playing[channel]) {
+    _playing[channel] = false;
+    voe_base_->StopPlayout(channel);
+  }
+  if (_sending[channel]) {
+    _sending[channel] = false;
+    voe_base_->StopSend(channel);
+  }
+}
+
+void VoEExtendedTest::Play(int channel, unsigned int timeMillisec, bool addFileAsMicrophone,
+                           bool addTimeMarker) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  voe_base_->StartPlayout(channel);
+  TEST_LOG("[playing]");
+  fflush(NULL);
+  if (addFileAsMicrophone) {
+    file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(), true, true);
+    TEST_LOG("[file as mic]");
+    fflush(NULL);
+  }
+  if (addTimeMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    TEST_LOG("[dT=%.1f]", dtSec);
+    fflush(NULL); // print sleep time in seconds
+  }
+  SLEEP(timeMillisec);
+  voe_base_->StopPlayout(channel);
+  file->StopPlayingFileAsMicrophone(channel);
+}
+
+void VoEExtendedTest::Sleep(unsigned int timeMillisec, bool addMarker) {
+  if (addMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    TEST_LOG("[dT=%.1f]", dtSec); // print sleep time in seconds
+  }
+  ::Sleep(timeMillisec);
+}
+
+int VoEExtendedTest::TestBase() {
+#ifndef _WIN32
+  // Sleep a bit instead when pause not supported
+#undef PAUSE
+#define PAUSE SLEEP(2000);
+#endif
+
+  PrepareTest("Base");
+
+  // TODO(qhogpat): make this an actual instance variable. I think the
+  // macro black magic will make more sense then. This is named like an
+  // instance variable since it is required in order to appease the
+  // gods of darkness.
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+#ifdef _TEST_RTP_RTCP_
+  VoERTP_RTCP* rtp = _mgr.RTP_RTCPPtr();
+#endif
+
+  //////////////////////////
+  // SetTraceFileName
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST(SetTraceFileName - SetDebugTraceFileName); ANL();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(NULL)); MARK();
+  // don't use these files
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(""
+              "VoEBase_trace_dont_use.txt"))); MARK();
+  // use these instead
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(""
+              "VoEBase_trace.txt"))); MARK();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStream |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo)); MARK();
+
+  ANL(); AOK(); ANL(); ANL();
+#endif
+
+  ///////////////////////////////////////
+  // RegisterVoiceEngineObserver
+  // DeRegisterVoiceEngineObserver
+  TEST(SetObserver);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->RegisterVoiceEngineObserver(*this));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->DeRegisterVoiceEngineObserver());
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  /////////////////////
+  // GetVersion
+  TEST(GetVersion);
+  ANL();
+
+  char version[1024];
+  // audio device module and AudioProcessing fail to getversion when they
+  // are not initiliazed
+  TEST_MUSTPASS(voe_base_->GetVersion(version));
+  MARK();
+  TEST_LOG("\n-----\n%s\n-----\n", version);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ///////////////
+  // Init
+  TEST(Init);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  // ensure that no new memory is allocated at the second call (check
+  // trace file)
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  TEST_MUSTPASS(voe_base_->Terminate());
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // verify AEC recording
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK(); // verify output dat-files
+  TEST_MUSTPASS(voe_base_->Terminate());
+#endif
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ////////////////////
+  // Terminate
+  TEST(Terminate);
+  ANL();
+  TEST_MUSTPASS(voe_base_->Terminate());
+  MARK(); // should be ignored
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->Terminate());
+  MARK(); // should terminate
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> Init(AudioDeviceModule)
+  //
+  // Note that our mock implementation of the ADM also mocks the
+  // reference counting part. This approach enables us to keep track
+  // of the internal reference counter without checking return values
+  // from the ADM and we also avoid the built-in self destruction.
+  //
+  // TODO(henrika): this test does not verify that external ADM methods
+  // are called by the VoiceEngine once registered. We could extend
+  // the mock implementation and add counters for each ADM API to ensure
+  // that they are called in the correct sequence and the correct number
+  // of times.
+  TEST_LOG("\nTesting: Init in combination with an external ADM\n");
+
+  // Create the ADM and call AddRef within the factory method.
+  AudioDeviceModuleImpl* xADM = AudioDeviceModuleImpl::Create();
+  ASSERT_FALSE(xADM == NULL);
+  ASSERT_TRUE(xADM->ReferenceCounter() == 1);
+
+  // Verify default usage case for external ADM.
+  TEST_MUSTPASS(voe_base_->Init(xADM));MARK();
+  ASSERT_TRUE(xADM->ReferenceCounter() == 2);
+  TEST_MUSTPASS(voe_base_->Terminate());
+  ASSERT_TRUE(xADM->ReferenceCounter() == 1);
+
+  // Our reference-count implementation does not self destruct.
+  // We do it manually here instead by calling Release followed by delete.
+  ASSERT_TRUE(AudioDeviceModuleImpl::Destroy(xADM));
+  ANL();
+  AOK();ANL();
+
+  // >> end of Init(AudioDeviceModule)
+  // ------------------------------------------------------------------------
+
+  ///////////////////////////
+  // MaxNumOfChannels
+  TEST(MaxNumOfChannels);
+  ANL();
+  TEST_MUSTPASS(voe_base_->MaxNumOfChannels() < 0);
+  MARK();
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ////////////////////////
+  // CreateChannel
+  // DeleteChannel
+
+  int i;
+  int channel;
+  int nChannels(voe_base_->MaxNumOfChannels());
+
+  TEST(CreateChannel);
+  ANL();
+  TEST(DeleteChannel);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  channel = voe_base_->CreateChannel();
+  MARK();
+  TEST_MUSTPASS(channel != 0);
+  channel = voe_base_->CreateChannel();
+  MARK();
+  TEST_MUSTPASS(channel != 1);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  MARK();
+  TEST_MUSTPASS(voe_base_->DeleteChannel(1));
+  MARK();
+
+  // create and delete one channel many times
+  for (i = 0; i < 10; i++) {
+    channel = voe_base_->CreateChannel();
+    MARK();
+    TEST_MUSTPASS(channel != 0); // should be 0 each time
+    TEST_MUSTPASS(voe_base_->DeleteChannel(channel));
+    MARK();
+  }
+  // create max number of channels
+  for (i = 0; i < nChannels; i++) {
+    channel = voe_base_->CreateChannel();
+    MARK();
+    TEST_MUSTPASS(channel != i);
+  }
+  channel = voe_base_->CreateChannel();
+  MARK(); // should fail since no more channels can now be created
+  TEST_MUSTPASS(channel != -1);
+
+  int aChannel = (((nChannels - 17) > 0) ? (nChannels - 17) : 0);
+  TEST_MUSTPASS(voe_base_->DeleteChannel(aChannel));
+  MARK();
+  channel = voe_base_->CreateChannel();
+  MARK(); // should reuse channel
+  TEST_MUSTPASS(channel != aChannel);
+
+  // delete all created channels
+  for (i = 0; i < nChannels; i++) {
+    TEST_MUSTPASS(voe_base_->DeleteChannel(i));
+    MARK();
+  }
+
+  // try to delete a non-existing channel
+  TEST_MUSTPASS(-1 != voe_base_->DeleteChannel(aChannel));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> SetLocalReceiver
+  //
+  // State: VE not initialized, no existing channels
+  TEST_MUSTPASS(voe_base_->Init());
+
+  int ch;
+
+  TEST(SetLocalReceiver);
+  ANL();
+
+  // no channel created yet => should fail
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(0, 100));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+#ifdef MAC_IPHONE
+  printf("\nNOTE: Local IP must be set in source code (line %d) \n",
+      __LINE__ + 1);
+  char* localIp = "127.0.0.1";
+#else
+  char localIp[64] = { 0 };
+  TEST_MUSTPASS(netw->GetLocalIP(localIp));
+  MARK();
+  // NOTE: This API is supported on Win, Mac and Linux and may fail or not
+  // return local IP for other platforms.
+#endif
+
+  // trivial invalid function calls
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch+1, 12345));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, -1));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+
+  // check conflict with ongoing receiving
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_ERROR(VE_ALREADY_LISTENING);
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // check conflict with ongoing transmission
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_ERROR(VE_ALREADY_SENDING);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+
+  // valid function calls
+  // Need to sleep between, otherwise it may fail for unknown reason
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, NULL,
+          "230.1.2.3"));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp,
+          "230.1.2.3"));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 5555, NULL));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+
+  // STATE: no media but sockets exists and are binded to 12345 and 12346
+  // respectively
+
+  // Add some dynamic tests as well:
+
+  // ensure that last setting is used (cancels old settings)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 44444));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 54321));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 54321, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 1000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetLocalReceiver
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetLocalReceiver);
+  ANL();
+
+  int port;
+  char ipaddr[64];
+  int RTCPport;
+
+  ch = voe_base_->CreateChannel();
+
+  // verify non-configured (blank) local receiver
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 0);
+  TEST_MUSTPASS(RTCPport != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // check some trivial set/get combinations
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 12346);
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0); // now binded to "any" IP
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 55555))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 55555);
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 12346);
+  TEST_MUSTPASS(strcmp(ipaddr, localIp) != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendDestination
+  //
+  // State: VE initialized, no existing channels
+  TEST(SetSendDestination);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // trivial fail tests
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 65536, "127.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid RTP port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid source port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", kVoEDefault,
+          65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid RTCP port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.300"));
+  MARK();
+  TEST_ERROR(VE_INVALID_IP_ADDRESS); // invalid IP address
+
+  // sockets must be created first to support multi-cast (not required
+  // otherwise)
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 55555, "230.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_SOCKET_ERROR);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555)); // create sockets
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "230.0.0.1"));
+  MARK(); // should work now
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // STATE: one channel created, no sockets exist
+
+  // valid function calls
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1"));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", kVoEDefault,
+          55555));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444,
+          55555));
+  MARK();
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // create receive sockets first and then an extra pair of send sockets
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 44444));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1", 11111));
+  MARK(); // binds to 11111
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetSendDestination
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSendDestination
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetSendDestination);
+  ANL();
+
+  int sourcePort;
+
+  ch = voe_base_->CreateChannel();
+
+  // verify non-configured (blank) local receiver
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 0);
+  TEST_MUSTPASS(sourcePort != 0);
+  TEST_MUSTPASS(RTCPport != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // check some trivial set/get combinations
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 0); // should be 0 since no local receiver has
+  // NOT been defined yet
+  TEST_MUSTPASS(RTCPport != 44445);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 55555); // should be equal to local port
+  TEST_MUSTPASS(RTCPport != 44445);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1"));
+  // NULL as IP-address input should work as well
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, NULL, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 0);
+  TEST_MUSTPASS(RTCPport != 44445);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartReceive
+  // >> StopReceive
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartReceive);
+  ANL();
+  TEST(StopReceive);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartReceive(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopReceive(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // sockets must be created first
+  TEST_MUSTPASS(!voe_base_->StartReceive(0));
+  MARK();
+  TEST_ERROR(VE_SOCKETS_NOT_INITED);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK(); // should work this time
+
+  // enable again (should work)
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK();
+
+  // Stop/Start (should work)
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK();
+
+  // Verify in loopback
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 1000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  MARK();
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // Ensure that it is OK to add delay between SetLocalReceiver and StarListen
+  TEST_LOG("\nspeak after 2 seconds and ensure that no delay is added:\n");
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+
+  Sleep(2000, true); // adding emulated delay here
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 2000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+  ANL();
+
+  // Multi-channel tests
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 11111+2*i));
+    TEST_MUSTPASS(voe_base_->StartReceive(ch));
+    MARK();
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->StopReceive(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 11111+2*i));
+    TEST_MUSTPASS(voe_base_->StartReceive(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopReceive(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartReceive/StopReceive
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartPlayout
+  // >> StopPlayout
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartPlayout);
+  ANL();
+  TEST(StopPlayout);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartPlayout(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopPlayout(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  MARK();
+
+  voe_base_->DeleteChannel(ch);
+
+  // Multi-channel tests
+  const int MaxNumberOfPlayingChannels(kVoiceEngineMaxNumOfActiveChannels);
+
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+    MARK();
+  }
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    TEST_MUSTPASS(voe_base_->StopPlayout(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartPlayout/StopPlayout
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartSend
+  // >> StopSend
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartSend);
+  ANL();
+  TEST(StopSend);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartSend(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopSend(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // call without initialized destination
+  TEST_MUSTPASS(!voe_base_->StartSend(ch));
+  MARK();
+  TEST_ERROR(VE_DESTINATION_NOT_INITED);
+
+  // initialize destination and try again (should work even without existing
+  // sockets)
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  MARK();
+  SLEEP(100);
+
+  // STATE: sockets should now have been created automatically at the first
+  // transmitted packet should be binded to 33333 and "0.0.0.0"
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  MARK();
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+  // try loopback with unique send sockets (closed when channel is deleted or
+  // new source is set)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33333));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  Play(ch, 2000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ANL();
+
+  // Multi-channel tests
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33333 + 2*i));
+    TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333 + 2*i, "127.0.0.1"));
+    TEST_MUSTPASS(voe_base_->StartSend(ch));
+    MARK();
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->StopSend(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 45633 + 2*i));
+    TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 45633 + 2*i, "127.0.0.1"));
+    TEST_MUSTPASS(voe_base_->StartSend(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopSend(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartSend/StopSend
+  // ------------------------------------------------------------------------
+
+  //////////////////////////////
+  // SetNetEQPlayoutMode
+  // GetNetEQPlayoutMode
+  TEST(SetNetEQPlayoutMode);
+  ANL();
+  TEST(GetNetEQPlayoutMode);
+  ANL();
+
+  NetEqModes mode;
+
+  ch = voe_base_->CreateChannel();
+
+  // invalid function calls (should fail)
+  TEST_MUSTPASS(!voe_base_->GetNetEQPlayoutMode(ch+1, mode));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetNetEQPlayoutMode(ch+1, kNetEqDefault));
+  MARK();
+
+  // verify default mode (should be kNetEqDefault)
+  TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(ch, mode));
+  MARK();
+  TEST_MUSTPASS(mode != kNetEqDefault);
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqStreaming));
+  MARK();
+  voe_base_->DeleteChannel(ch);
+
+  // ensure that default mode is set as soon as new channel is created
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(ch, mode));
+  MARK();
+  TEST_MUSTPASS(mode != kNetEqDefault);
+  voe_base_->DeleteChannel(ch);
+
+  // verify Set/Get for all supported modes and max number of channels
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+
+    // verify Set/Get for all supported modes
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqDefault));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqDefault);
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqStreaming));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqStreaming);
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqFax));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqFax);
+    SLEEP(50);
+  }
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    voe_base_->DeleteChannel(i);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////////////
+  // SetNetEQBGNMode
+  // GetNetEQBGNMode
+  TEST(SetNetEQBGNMode);
+  ANL();
+  TEST(GetNetEQBGNMode);
+  ANL();
+
+  NetEqBgnModes bgnMode;
+
+  ch = voe_base_->CreateChannel();
+
+  // invalid function calls (should fail)
+  TEST_MUSTPASS(!voe_base_->GetNetEQBGNMode(ch+1, bgnMode));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetNetEQBGNMode(ch+1, kBgnOn));
+  MARK();
+
+  // verify default mode (should be kBgnOn)
+  TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(ch, bgnMode));
+  MARK();
+  TEST_MUSTPASS(bgnMode != kBgnOn);
+  voe_base_->DeleteChannel(ch);
+
+  // ensure that default mode is set as soon as new channel is created
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(ch, bgnMode));
+  MARK();
+  TEST_MUSTPASS(bgnMode != kBgnOn);
+  voe_base_->DeleteChannel(ch);
+
+  // verify Set/Get for all supported modes and max number of channels
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+
+    // verify Set/Get for all supported modes
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnOn));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnOn);
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnFade));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnFade);
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnOff));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnOff);
+    SLEEP(50);
+  }
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    voe_base_->DeleteChannel(i);
+  }
+
+  // Verify real-time performance for all playout modes in full duplex
+
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqDefault));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqDefault playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqStreaming));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqStreaming playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqFax));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqFax playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  /////////////////////
+  // Full duplex tests
+
+  ch = voe_base_->CreateChannel(); // We must delete this channel first to be able
+  // to reuse port 12345
+
+  // start with default case, also test non-default RTCP port
+#ifdef _TEST_RTP_RTCP_
+  TEST_MUSTPASS(rtp->SetRTCP_CNAME(ch, "Johnny"));
+#endif
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 12349));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", kVoEDefault,
+          12349));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("full duplex is now activated (1)\n");
+  TEST_LOG("waiting for RTCP packet...\n");
+
+  SLEEP(7000); // Make sure we get RTCP packet
+  PAUSE;
+
+  // Verify that we got RTCP packet from correct source port
+#ifdef _TEST_RTP_RTCP_
+  char tmpStr[64] = { 0 };
+  TEST_MUSTPASS(rtp->GetRemoteRTCP_CNAME(ch, tmpStr));
+  TEST_MUSTPASS(_stricmp("Johnny", tmpStr));
+#endif
+  int rtpPort(0), rtcpPort(0);
+  char ipAddr[64] = { 0 };
+  TEST_MUSTPASS(netw->GetSourceInfo(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(12349 != rtcpPort);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // Call StartSend before StartReceive
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (2)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // Try again using same ports
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (3)\n");
+  TEST_LOG("waiting for RTCP packet...\n");
+
+  SLEEP(7000); // Make sure we get RTCP packet
+  PAUSE
+
+  // Verify correct RTCP source port
+  TEST_MUSTPASS(netw->GetSourceInfo(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(12345+1 != rtcpPort);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+  // Try with extra send socket
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 22222));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 22222, "127.0.0.1", 11111));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (4)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // repeat default case starting with a fresh channel
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (5)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // restart call again
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (6)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // force sending from new socket
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", 12350,
+          12359));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_LOG("\nfull duplex is now activated (7)\n");
+
+  PAUSE
+
+  // Test getting send settings
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, rtpPort, ipAddr, sourcePort,
+          rtcpPort));
+  TEST_MUSTPASS(12345 != rtpPort);
+  TEST_MUSTPASS(_stricmp("127.0.0.1", ipAddr));
+  TEST_MUSTPASS(12350 != sourcePort);
+  TEST_MUSTPASS(12359 != rtcpPort);
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // new channel and new port
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 33221));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33221, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (8)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+#ifndef MAC_IPHONE
+  // bind to local IP and try again
+  strcpy(localIp, "127.0.0.1");
+#else
+  localIp = "127.0.0.1";
+#endif
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33221, 12349, localIp));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33221, localIp));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (9)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(33221 != rtpPort);
+  TEST_MUSTPASS(_stricmp(localIp, ipAddr));
+  TEST_MUSTPASS(12349 != rtcpPort);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////
+  // Trace filter tests
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST(SetTraceFilter); ANL();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(""
+              "VoEBase_trace_filter.txt"))); MARK();
+  SLEEP(100);
+
+  // Test a few different filters, verify in trace file
+  // Each SetTraceFilter calls should be seen once, no more, no less
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceNone)); MARK();
+  SLEEP(300);
+  // API call and info should NOT be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, true)); MARK();
+  // API call and error should NOT be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceApiCall |
+          kTraceCritical |
+          kTraceError |
+          kTraceWarning)); MARK();
+  SLEEP(300);
+  // API call should and info should NOT be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, false)); MARK();
+  // API call and error should be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceApiCall | kTraceInfo));
+  MARK();
+  SLEEP(300);
+  // API call and info should be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, true)); MARK();
+  // API call should and error should NOT be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  // Back to default
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceAll)); MARK();
+  SLEEP(300);
+
+  AOK(); ANL();
+#endif
+
+  // ------------------------------------------------------------------------
+  // >> Multiple instance testing
+  //
+  // We should support 8 instances simultaneously
+  // and at least one should be able to have a call running
+
+  // One instance is already created
+  VoiceEngine* instVE[7];
+  VoEBase* baseVE[7];
+  for (int instNum = 0; instNum < 7; instNum++) {
+    instVE[instNum] = VoiceEngine::Create();
+    baseVE[instNum] = VoEBase::GetInterface(instVE[instNum]);
+    TEST_MUSTPASS(baseVE[instNum]->Init());
+    TEST_MUSTPASS(baseVE[instNum]->CreateChannel());
+  }
+
+  TEST_LOG("Created 7 more instances of VE, make sure audio is ok...\n\n");
+  PAUSE
+
+  for (int instNum = 0; instNum < 7; instNum++) {
+    TEST_MUSTPASS(baseVE[instNum]->DeleteChannel(0));
+    TEST_MUSTPASS(baseVE[instNum]->Terminate());
+    TEST_MUSTPASS(baseVE[instNum]->Release());
+    VoiceEngine::Delete(instVE[instNum]);
+  }
+
+  AOK();
+  ANL();
+
+  //////////////
+  // Close down
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  voe_base_->DeleteChannel(0);
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestCallReport
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestCallReport() {
+  // Get required sub-API pointers
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoECallReport* report = _mgr.CallReportPtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+
+  PrepareTest("CallReport");
+
+  // check if this interface is supported
+  if (!report) {
+    TEST_LOG("VoECallReport is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename("VoECallReport_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  ///////////////////////////
+  // Actual test starts here
+  TEST(ResetCallReportStatistics);
+  ANL();
+  TEST_MUSTPASS(!report->ResetCallReportStatistics(-2));
+  MARK(); // not OK
+  TEST_MUSTPASS(!report->ResetCallReportStatistics(1));
+  MARK(); // not OK
+  TEST_MUSTPASS(report->ResetCallReportStatistics(0));
+  MARK(); // OK
+  TEST_MUSTPASS(report->ResetCallReportStatistics(-1));
+  MARK(); // OK
+  AOK();
+  ANL();
+
+  bool enabled = false;
+  EchoStatistics echo;
+  TEST(GetEchoMetricSummary);
+  ANL();
+  TEST_MUSTPASS(apm->GetEcMetricsStatus(enabled));
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(true));
+  TEST_MUSTPASS(report->GetEchoMetricSummary(echo)); // all outputs will be
+  // -100 in loopback (skip further tests)
+  AOK();
+  ANL();
+
+  // TODO(xians): investigate the cause of test failure before enabling.
+  /*
+   StatVal delays;
+   TEST(GetRoundTripTimeSummary);
+   ANL();
+   // All values should be >=0 since RTCP is now on
+   TEST_MUSTPASS(report->GetRoundTripTimeSummary(0, delays));
+   MARK();
+   TEST_MUSTPASS(delays.min == -1);
+   TEST_MUSTPASS(delays.max == -1);
+   TEST_MUSTPASS(delays.average == -1);
+   rtp_rtcp->SetRTCPStatus(0, false);
+   // All values should be -1 since RTCP is off
+   TEST_MUSTPASS(report->GetRoundTripTimeSummary(0, delays));
+   MARK();
+   TEST_MUSTPASS(delays.min != -1);
+   TEST_MUSTPASS(delays.max != -1);
+   TEST_MUSTPASS(delays.average != -1);
+   rtp_rtcp->SetRTCPStatus(0, true);
+   AOK();
+   ANL();
+   */
+
+  int nDead = 0;
+  int nAlive = 0;
+  TEST(GetDeadOrAliveSummary);
+  ANL();
+  // All results should be -1 since dead-or-alive is not active
+  TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive) != -1);
+  MARK();
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  SLEEP(2000);
+  // All results should be >= 0 since dead-or-alive is active
+  TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive));
+  MARK();
+  TEST_MUSTPASS(nDead == -1);
+  TEST_MUSTPASS(nAlive == -1)
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  AOK();
+  ANL();
+
+  TEST(WriteReportToFile);
+  ANL();
+
+  // Greek and Coptic (see http://www.utf8-chartable.de/unicode-utf8-table.pl)
+  char fileNameUTF8[64];
+
+  fileNameUTF8[0] = (char) 0xce;
+  fileNameUTF8[1] = (char) 0xba;
+  fileNameUTF8[2] = (char) 0xce;
+  fileNameUTF8[3] = (char) 0xbb;
+  fileNameUTF8[4] = (char) 0xce;
+  fileNameUTF8[5] = (char) 0xbd;
+  fileNameUTF8[6] = (char) 0xce;
+  fileNameUTF8[7] = (char) 0xbe;
+  fileNameUTF8[8] = '.';
+  fileNameUTF8[9] = 't';
+  fileNameUTF8[10] = 'x';
+  fileNameUTF8[11] = 't';
+  fileNameUTF8[12] = 0;
+
+  TEST_MUSTPASS(!report->WriteReportToFile(NULL));
+  MARK();
+  TEST_MUSTPASS(report->WriteReportToFile("call_report.txt"));
+  MARK();
+  TEST_MUSTPASS(report->WriteReportToFile(fileNameUTF8));
+  MARK(); // should work with UTF-8 as well (κλνξ.txt)
+  AOK();
+  ANL();
+
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestCodec
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestCodec() {
+  PrepareTest("Codec");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEFile* file = _mgr.FilePtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename("VoECodec_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  ExtendedTestTransport* ptrTransport(NULL);
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+#else
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+#endif
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  int i;
+  int err;
+
+  CodecInst cinst;
+
+  /////////////////////////
+  // GetNumOfCodecs
+
+  int nCodecs;
+
+  TEST(GetNumOfCodecs);
+  ANL();
+  // validate #codecs
+  nCodecs = codec->NumOfCodecs();
+  MARK();
+  TEST_MUSTPASS(nCodecs < 0);
+  AOK();
+  ANL();
+
+  ///////////////////
+  // GetCodec
+  TEST(GetCodec);
+  ANL();
+  // scan all supported codecs
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    TEST_MUSTPASS(codec->GetCodec(index, cinst));
+    TEST_LOG("[%2d] %16s: fs=%6d, pt=%4d, rate=%7d, ch=%2d, size=%5d", index, cinst.plname,
+             cinst.plfreq, cinst.pltype, cinst.rate, cinst.channels, cinst.pacsize);
+    if (cinst.pltype == -1) {
+      TEST_LOG(" <= NOTE pt=-1\n");
+    } else {
+      ANL();
+    }
+  }
+
+  // ensure that an invalid index parameter is detected
+  TEST_MUSTPASS(-1 != codec->GetCodec(-1, cinst));
+  nCodecs = codec->NumOfCodecs();
+  TEST_MUSTPASS(-1 != codec->GetCodec(nCodecs, cinst));
+  MARK();
+  // ensure that error code is VE_INVALID_LISTNR
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_LISTNR);
+  AOK();
+  ANL();
+
+  ///////////////////////
+  // GetSendCodec
+  TEST(GetSendCodec);
+  ANL();
+
+  CodecInst defaultCodec;
+
+  // check the channel parameter
+  int nMaxChannels(voe_base_->MaxNumOfChannels());
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(nMaxChannels-1, cinst));
+  MARK(); // not created
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(nMaxChannels, cinst));
+  MARK(); // out of range
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(-1, cinst));
+  MARK(); // out of range
+  TEST_MUSTPASS(codec->GetSendCodec(0, cinst));
+  MARK(); // OK
+
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    TEST_MUSTPASS(codec->GetCodec(index, defaultCodec));
+    if (codec->SetSendCodec(0, defaultCodec) == 0) {
+      TEST_MUSTPASS(codec->GetSendCodec(0, cinst));
+      MARK();
+      //TEST_LOG("[%2d] %s: fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+      // index, cinst.plname, cinst.plfreq, cinst.pltype, cinst.rate,
+      // cinst.channels, cinst.pacsize);
+      TEST_MUSTPASS(cinst.pacsize != defaultCodec.pacsize);
+      TEST_MUSTPASS(cinst.plfreq != defaultCodec.plfreq);
+      TEST_MUSTPASS(cinst.pltype != defaultCodec.pltype);
+      TEST_MUSTPASS(cinst.rate != defaultCodec.rate);
+      TEST_MUSTPASS(cinst.channels != defaultCodec.channels);
+    }
+  }
+
+  ANL();
+  AOK();
+  ANL();
+
+  ///////////////////////
+  // SetSendCodec
+  TEST(SetSendCodec);
+  ANL();
+
+  // --- Scan all supported codecs and set default parameters
+
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    // Get default (ACM) settings
+    TEST_MUSTPASS(codec->GetCodec(index, cinst));
+    defaultCodec = cinst;
+    TEST_LOG("[%2d] %s (default): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+             index, cinst.plname, cinst.plfreq, cinst.pltype, cinst.rate,
+             cinst.channels, cinst.pacsize);
+
+    // Verify invalid codec names
+    if (!_stricmp("CN", cinst.plname) || !_stricmp("telephone-event",
+                                                   cinst.plname)
+        || !_stricmp("red", cinst.plname)) {
+      // default settings for invalid payload names (should give
+      // VE_INVALID_PLNAME)
+      TEST_MUSTPASS(!codec->SetSendCodec(0, cinst));
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      continue;
+    }
+
+    // If we build the ACM with more codecs than we have payload types,
+    // some codecs will be given -1 as default payload type. This is a fix
+    // to ensure that we can complete these tests also for this case.
+    if (cinst.pltype == -1) {
+      cinst.pltype = 97;
+    }
+
+    // --- Default settings
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // --- Packet size
+    TEST_LOG("\npacsize : ");
+
+    for (int pacsize = 80; pacsize < 1440; pacsize += 80) {
+      cinst.pacsize = pacsize;
+      if (-1 != codec->SetSendCodec(0, cinst)) {
+        // log valid packet size
+        TEST_LOG("%d ", pacsize);
+      } else {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+    }
+    cinst.pacsize = defaultCodec.pacsize;
+
+    // --- Audio channels (1/mono or 2/stereo)
+    TEST_LOG("\nchannels: ");
+    for (int channels = 1; channels < 4; channels++) {
+      cinst.channels = channels;
+      if (-1 != codec->SetSendCodec(0, cinst)) {
+        // valid channels (only 1 should be OK)
+        TEST_LOG("%d ", channels);
+      } else {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+    }
+    cinst.channels = defaultCodec.channels;
+
+    // --- Payload frequency
+    TEST_LOG("\nplfreq  : ");
+    cinst.plfreq = defaultCodec.plfreq;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.plfreq);
+
+    // --- Payload name
+
+    strcpy(cinst.plname, "INVALID");
+    TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+    {
+      // ensure that error code is VE_INVALID_PLNAME
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+    }
+
+    // restore default plname
+    strcpy(cinst.plname, defaultCodec.plname);
+
+    // --- Payload type (dynamic range is 96-127)
+    TEST_LOG("\npltype  : ");
+    // All PT should be OK, test a few different
+    cinst.pltype = defaultCodec.pltype;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.pltype);
+    cinst.pltype = defaultCodec.pltype + 1;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.pltype);
+    const int valid_pltypes[4] = { 0, 96, 117, 127 };
+    for (i = 0; i < static_cast<int> (sizeof(valid_pltypes) / sizeof(int)); i++) {
+      cinst.pltype = valid_pltypes[i];
+      TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+      TEST_LOG("%d ", cinst.pltype);
+    }
+    // Restore default
+    cinst.pltype = defaultCodec.pltype;
+
+    // --- Codec rate
+    TEST_LOG("\nrate    : ");
+    if (_stricmp("isac", cinst.plname) == 0) {
+      // ISAC
+      if (cinst.plfreq == 16000) {
+        int valid_rates[3] = { -1, 10000, 32000 };
+        // failed in RegisterPayload when rate is 32000
+        for (i = 0; i < static_cast<int> (sizeof(valid_rates) / sizeof(int)); i++) {
+          cinst.rate = valid_rates[i];
+          TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+          TEST_LOG("%d ", cinst.rate);
+        }
+        cinst.rate = 0; // invalid
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+        {
+          // ensure that error code is VE_CANNOT_SET_SEND_CODEC
+          err = voe_base_->LastError();
+          TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        }
+        ANL();
+      } else //ISACSWB
+      {
+        // rate changing fails in RegisterPayload
+        int valid_rates[8] = { -1, 10000, 25000, 32000, 35000, 45000, 50000, 52000 };
+        for (i = 0; i < static_cast<int> (sizeof(valid_rates) / sizeof(int)); i++) {
+          cinst.rate = valid_rates[i];
+          TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+          TEST_LOG("%d ", cinst.rate);
+        }
+        int invalid_rates[3] = { 0, 5000, 57000 }; // invalid
+        for (i = 0; i < static_cast<int> (sizeof(invalid_rates) / sizeof(int)); i++) {
+          cinst.rate = invalid_rates[i];
+          TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+          {
+            // ensure that error code is VE_CANNOT_SET_SEND_CODEC
+            err = voe_base_->LastError();
+            TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+          }
+        }
+        ANL();
+      }
+    } else if (_stricmp("amr", cinst.plname) == 0) {
+      int valid_rates[8] = { 4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      ANL();
+    } else if (_stricmp("g7291", cinst.plname) == 0) {
+      int valid_rates[12] = { 8000, 12000, 14000, 16000, 18000, 20000, 22000,
+                              24000, 26000, 28000, 30000, 32000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      ANL();
+    } else if (_stricmp("amr-wb", cinst.plname) == 0) {
+      int valid_rates[9] = { 7000, 9000, 12000, 14000, 16000, 18000, 20000,
+                             23000, 24000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      TEST_LOG(" <=> ");
+      ANL();
+    } else if (_stricmp("speex", cinst.plname) == 0) {
+      // Valid speex rates are > 2000, testing some of them here
+      int valid_rates[9] = { 2001, 4000, 7000, 11000, 15000, 20000, 25000,
+          33000, 46000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      cinst.rate = 2000; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      ANL();
+    } else if (_stricmp("silk", cinst.plname) == 0) {
+      // Valid Silk rates are 6000 - 40000, listing some of them here
+      int valid_rates[7] = { 6000, 10000, 15000, 20000, 25000, 32000, 40000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      cinst.rate = 5999; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      cinst.rate = 40001; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      ANL();
+    } else {
+      // Use default rate for all other codecs.
+      cinst.rate = defaultCodec.rate;
+      TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+      TEST_LOG("%d ", cinst.rate);
+      cinst.rate = defaultCodec.rate + 17;
+      TEST_MUSTPASS(!codec->SetSendCodec(0, cinst));
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      ANL();
+    }
+    cinst.rate = defaultCodec.rate;
+
+    // run some extra tests for L16
+    if (_stricmp("l16", cinst.plname) == 0) {
+      if (8000 == cinst.plfreq) {
+        // valid pacsizes: 80, 160, 240, 320
+        cinst.pacsize = 480; // only supported in combination with 16kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        cinst.pacsize = 640; // only supported in combination with 16kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      } else {
+        // valid pacsizes: 160, 320, 480, 640
+        cinst.pacsize = 80; // only supported in combination with 8kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        cinst.pacsize = 240; // only supported in combination with 8kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+    }
+    ANL();
+  } // for (int index = 0; index < nCodecs; index++)
+
+  // restore PCMU
+  const CodecInst tmp = { 0, "PCMU", 8000, 160, 1, 64000 };
+  TEST_MUSTPASS(codec->SetSendCodec(0, tmp));
+
+  ANL();
+  AOK();
+  ANL();
+
+  ///////
+  // VAD
+
+  const int VADSleep = 0;
+
+  bool disabledDTX;
+  VadModes mode;
+  bool enabled;
+
+  // verify default settings (should be OFF, kVadConventional and DTX enabled)
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != true);
+
+  // enable default VAD settings
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != false);
+  SLEEP(VADSleep);
+
+  // set kVadConventional mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadConventional));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadConventional);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveLow mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveLow));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveLow);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveMid mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveMid));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveMid);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveMid mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveHigh));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveHigh);
+  SLEEP(VADSleep);
+
+  // turn DTX OFF (audio should not be affected by VAD decisions)
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadConventional, true));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(disabledDTX != true);
+  SLEEP(VADSleep);
+
+  // try to enable DTX again (should fail since VAD is disabled)
+  TEST_MUSTPASS(codec->SetVADStatus(0, false, kVadConventional, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(disabledDTX == false);
+  SLEEP(VADSleep);
+
+  // disable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  SLEEP(VADSleep);
+
+  // restore default VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != true);
+  SLEEP(VADSleep);
+
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////
+  // GetRecCodec
+  TEST(GetRecCodec);
+  ANL();
+
+  // stop all streaming first
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // start loopback streaming (PCMU is default)
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0,8000,"127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0,8000));
+#endif
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(100); // ensure that at least one packets is received
+
+  // scan all supported and valid codecs
+  CodecInst newCodec;
+  for (i = 0; i < codec->NumOfCodecs(); i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // test all valid send codecs
+    if (!_stricmp("red", newCodec.plname) || !_stricmp("cn", newCodec.plname)
+        || !_stricmp("telephone-event", newCodec.plname)) {
+      continue; // Ignore these
+    }
+    if (-1 != codec->SetSendCodec(0, newCodec)) {
+      SLEEP(150);
+      // verify correct detection
+      TEST_MUSTPASS(codec->GetRecCodec(0, cinst));
+      TEST_LOG("%s %s ", newCodec.plname, cinst.plname);
+      TEST_MUSTPASS(_stricmp(newCodec.plname, cinst.plname) != 0);
+      TEST_MUSTPASS(cinst.pltype != newCodec.pltype);
+      TEST_MUSTPASS(cinst.plfreq != newCodec.plfreq);
+    }
+  }
+
+  // stop streaming
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+#ifdef WEBRTC_CODEC_GSMAMR
+  //////////////////////////
+  // SetAMREncFormat
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST(SetAMREncFormat); ANL();
+
+  //set another codec which is not AMR
+  TEST_MUSTPASS(codec->GetCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should fail
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267BwEfficient));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267OctetAligned));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267FileStorage));
+  MARK();
+
+  //set AMR as encoder
+  strcpy(cinst.plname,"AMR");
+  cinst.channels=1; cinst.plfreq=8000; cinst.rate=12200; cinst.pltype=112;
+  cinst.pacsize=160;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should pass
+  TEST_MUSTPASS(codec->SetAMREncFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+
+  //////////////////////////
+  // SetAMRDecFormat
+
+  TEST(SetAMRDecFormat); ANL();
+
+  // It should not be possible to set AMR dec format before valid AMR decoder
+  // is registered
+  TEST_MUSTPASS(!codec->SetAMRDecFormat(0)); MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  // Ensure that ACM::RegisterReceiveCodec(AMR) is called
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+  // All these tests should now pass
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRDecFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+#endif // #ifdef WEBRTC_CODEC_GSMAMR
+#ifdef WEBRTC_CODEC_GSMAMRWB
+  //////////////////////////
+  // SetAMRWbEncFormat
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST(SetAMRWbEncFormat); ANL();
+
+  //set another codec which is not AMR-wb
+  TEST_MUSTPASS(codec->GetCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should fail
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267BwEfficient));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267OctetAligned));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267FileStorage));
+  MARK();
+
+  //set AMR-wb as encoder
+  strcpy(cinst.plname,"AMR-WB");
+  cinst.channels=1; cinst.plfreq=16000; cinst.rate=20000;
+  cinst.pltype=112; cinst.pacsize=320;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should pass
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+
+  //////////////////////////
+  // SetAMRDecFormat
+
+  TEST(SetAMRWbDecFormat); ANL();
+
+  // It should not be possible to set AMR dec format before valid AMR decoder
+  // is registered
+  TEST_MUSTPASS(!codec->SetAMRWbDecFormat(0)); MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  // Ensure that ACM::RegisterReceiveCodec(AMR) is called
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+  // All these tests should now pass
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbDecFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+#endif // #ifdef WEBRTC_CODEC_GSMAMRWB
+  ///////////////////////////////
+  // SetSendCNPayloadType
+  TEST(SetSendCNPayloadType);
+  ANL();
+
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(-1, 0));
+  MARK(); // invalid channel
+
+  // Invalid payload range (only dynamic range [96,127]
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 0));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 95));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 128));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, -1));
+  MARK(); // invalid PT
+
+  // Not possible to change PT for 8000
+  TEST_MUSTPASS(!codec->SetSendCNPayloadType(0, 96, kFreq8000Hz));
+  MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_PLFREQ);
+
+  // Try some dynamic for 16000 and 32000 as well
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 96, kFreq16000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 96, kFreq32000Hz));
+  MARK(); // same should work
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 127, kFreq16000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 127, kFreq32000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 100, kFreq32000Hz));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////
+  // SetRecPayloadType
+  TEST(SetRecPayloadType);
+  ANL();
+
+  // scan all supported and valid codecs without changing payloads
+  nCodecs = codec->NumOfCodecs();
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // If no default payload type is defined, we use 127
+    if (-1 == newCodec.pltype) {
+      newCodec.pltype = 127;
+    }
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // use default
+    newCodec.pltype = 99;
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // use same PT on all
+    newCodec.pltype = -1;
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // deregister all PTs
+  }
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////
+  // GetRecPayloadType
+  TEST(GetRecPayloadType);
+  ANL();
+
+  CodecInst extraCodec;
+  for (i = 0; i < nCodecs; i++) {
+    // Set defaults
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // If no default payload type is defined, we use 127
+    if (-1 == newCodec.pltype) {
+      newCodec.pltype = 127;
+    }
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    //TEST_LOG("[%2d] %s (SetRec): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+    //  i, newCodec.plname, newCodec.plfreq, newCodec.pltype, newCodec.rate,
+    // newCodec.channels, newCodec.pacsize);
+    extraCodec.pltype = -1; // don't know this yet
+    extraCodec.plfreq = newCodec.plfreq;
+    extraCodec.rate = newCodec.rate;
+    extraCodec.channels = newCodec.channels;
+    strcpy(extraCodec.plname, newCodec.plname);
+    // Verfify that setting is OK
+    TEST_MUSTPASS(codec->GetRecPayloadType(0, extraCodec));
+    //TEST_LOG("[%2d] %s (GetRec): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+    //  i, extraCodec.plname, extraCodec.plfreq, extraCodec.pltype,
+    // extraCodec.rate, extraCodec.channels, extraCodec.pacsize);
+    TEST_MUSTPASS(newCodec.pltype != extraCodec.pltype);
+    TEST_MUSTPASS(newCodec.plfreq != extraCodec.plfreq);
+    TEST_MUSTPASS(newCodec.channels != extraCodec.channels);
+  }
+
+  AOK();
+  ANL();
+
+  ////////////////////////////////////////////////////
+  // SetRecPayloadType - remove receive codecs
+  TEST(SetRecPayloadType - removing receive codecs);
+  ANL();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+#endif
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  if (file) {
+    TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0,
+            _mgr.AudioFilename(),
+            true,
+            true));
+  }
+
+  // Scan all supported and valid codecs and remove from receiving db, then
+  // restore
+  nCodecs = codec->NumOfCodecs();
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s", cinst.plname);
+    fflush(NULL);
+
+    if (-1 == cinst.pltype) {
+      // If no default payload type is defined, we use 127,
+      // codec is not registered for receiving
+      cinst.pltype = 127;
+    } else {
+      // Remove codec
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      extraCodec.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, extraCodec));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify no audio
+    TEST_MUSTPASS(voe_base_->StartReceive(0));
+    TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    TEST_LOG("  silence");
+    fflush(NULL);
+    SLEEP(800);
+    TEST_MUSTPASS(voe_base_->StopPlayout(0));
+    TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+    // Restore codec
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+    // Verify audio
+    TEST_MUSTPASS(voe_base_->StartReceive(0));
+    TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    TEST_LOG("  audio");
+    fflush(NULL);
+    SLEEP(800);
+    TEST_MUSTPASS(voe_base_->StopPlayout(0));
+    TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+    if (127 == cinst.pltype) {
+      // If no default payload type is defined, i.e. we have set pt to
+      //127 above,
+      // make sure we remove codec from receiving
+      cinst.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+    }
+
+    ANL();
+  }
+
+  // Remove certain codecs
+  TEST_LOG("Removing receive codecs:");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("ipcmwb", cinst.plname) || !_stricmp("pcmu", cinst.plname)
+        || !_stricmp("eg711a", cinst.plname)) {
+      TEST_LOG(" %s", cinst.plname);
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      extraCodec.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, extraCodec));
+    }
+  }
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // Test sending all codecs - verify audio/no audio depending on codec
+  TEST_LOG("Looping through send codecs \n");
+  TEST_LOG("Verify that removed codecs are not audible and the other are \n");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s \n", cinst.plname);
+
+    // If no default payload type is defined, we use 127 and set receive
+    // payload type
+    if (-1 == cinst.pltype) {
+      cinst.pltype = 127;
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify audio/no audio
+    SLEEP(800);
+  }
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // Restore codecs
+  TEST_LOG("Restoring receive codecs:");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("ipcmwb", cinst.plname) || !_stricmp("pcmu", cinst.plname)
+        || !_stricmp("eg711a", cinst.plname)) {
+      TEST_LOG(" %s", cinst.plname);
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+    }
+  }
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // Test sending all codecs - verify audio
+  TEST_LOG("Looping through send codecs \n");
+  TEST_LOG("Verify that all codecs are audible \n");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s \n", cinst.plname);
+
+    // If no default payload type is defined, we use 127 and set receive
+    // payload type
+    if (-1 == cinst.pltype) {
+      cinst.pltype = 127;
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify audio/no audio
+    SLEEP(800);
+  }
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+#if defined(WEBRTC_CODEC_ISAC)
+
+  /////////////////////////////////////
+  // SetISACInitTargetRate - wb
+  TEST(SetISACInitTargetRate);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 10000));
+  MARK(); // should fail since iSAC is not active
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec (16kHz)
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(1, 10000));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 500));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 33000));
+  MARK(); // invalid target rates (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 10000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 0));
+  MARK(); // 0 is a valid rate
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // try max as well
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, true));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, false));
+  MARK();
+
+  cinst.pacsize = 960; // 60ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, false));
+  MARK();
+
+  cinst.rate = 20000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // only works in adaptive mode
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  cinst.rate = -1;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // back to adaptive mode
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////////////
+  // SetISACInitTargetRate - swb
+  TEST(ISACSWB SetISACInitTargetRate);
+  ANL();
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(1, 10000));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, -1));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, -1));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 500));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 57000));
+  MARK(); // invalid target rates (valid range is [10000, 56000])
+
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 10000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 0));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000));
+  MARK(); // try max as well
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000, true));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000, false));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  ////////////////////////////////
+  // SetISACMaxRate
+  TEST(SetISACMaxRate);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 48000));
+  MARK(); // should fail since iSAC is not active
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 53500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 48000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 53400));
+  MARK(); // try max as well (default)
+
+  cinst.pacsize = 960; // 60ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 48000));
+  MARK();
+
+  cinst.rate = 20000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK(); // also works in non-adaptive mode
+
+  ANL();
+  AOK();
+  ANL();
+
+  TEST(ISACSWB SetISACMaxRate);
+  ANL();
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = 45000; // instantaneous mode
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 107500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 55000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 80000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 107000));
+  MARK(); // try max as well (default)
+
+
+  cinst.rate = -1; // adaptive mode
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 107500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 55000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 80000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 107000));
+  MARK(); // try max as well (default)
+
+  ANL();
+  AOK();
+  ANL();
+
+  ////////////////////////////////
+  // SetISACMaxPayloadSize
+  TEST(SetISACMaxPayloadSize);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 120));
+  MARK(); // should fail since iSAC is not active
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(1, 120));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 100));
+  MARK(); // invalid size (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 410));
+  MARK(); // invalid size (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 200));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 120));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 400));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  TEST(ISACSWB SetISACMaxPayloadSize);
+  ANL();
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = 45000; // default rate
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(1, 100));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 100));
+  MARK(); // invalid size (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 610));
+  MARK(); // invalid size (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 200));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 120));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 600));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  // set iSAC as sending codec
+  // set iSAC-wb as sending codec
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+#else
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8001, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8001));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, GetFilename("audio_long16.pcm"), true , true));
+  cinst.channels = 1;
+  TEST_LOG("Testing codec: Switch between iSAC-wb and iSAC-swb \n");
+  TEST_LOG("Testing codec: iSAC wideband \n");
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 480; // 30ms
+  cinst.plfreq = 16000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC superwideband \n");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  cinst.plfreq = 32000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC wideband \n");
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 480; // 30ms
+  cinst.plfreq = 16000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC superwideband \n");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  cinst.plfreq = 32000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#else
+  TEST_LOG("Skipping extended iSAC API tests - "
+      "WEBRTC_CODEC_ISAC not defined\n");
+#endif // #if defined(WEBRTC_CODEC_ISAC)
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  delete ptrTransport;
+#endif
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestDtmf
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestDtmf() {
+  PrepareTest("Dtmf");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEDtmf* dtmf = _mgr.DtmfPtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+
+  //#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename("VoEDtmf_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+  //#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  // SetDtmfFeedbackStatus
+  TEST(SetDtmfFeedbackStatus & GetDtmfFeedbackStatus);
+  ANL();
+  bool dtmfFeedback = false, dtmfDirectFeedback = true;
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, false));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, true));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(dtmfFeedback);
+  TEST_MUSTPASS(!dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, false));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, true));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(!dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, false));
+
+  AOK();
+  ANL();
+
+  // SendDtmf
+  TEST(SendDtmf);
+  ANL();
+
+  // Fail tests
+  // Event
+  // the eventcode is changed to unsigned char, so -1 will be interpreted as
+  // 255, 256->0
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, -1, false, 160, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 16, false, 160, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Length
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 99, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 60001, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 20, true, -1, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Volume
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 160, -1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 160, 37));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Without sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true));
+  MARK();
+  TEST_MUSTPASS(VE_NOT_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  // Testing Dtmf out-of-band: event, length and volume
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 16, true));
+  MARK();
+  SLEEP(500); // Flash, not audible
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 160, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 160, 36));
+  MARK();
+  SLEEP(500);
+
+  // Testing Dtmf inband: event, length and volume
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 15, false));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 400, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 160, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 160, 36));
+  MARK();
+  SLEEP(500);
+
+  // Testing other events out-of-band: event and length
+  // These are not audible
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 17, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 78, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 255, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  // the minimum length is 100 for the telephoneevent
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 1000, 10));
+  MARK();
+  SLEEP(1200);
+
+  AOK();
+  ANL();
+
+  // PlayDtmfTone
+  TEST(PlayDtmfTone);
+  ANL();
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(-1, 200, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(16, 200, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 9, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 200, -1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 200, 37));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0));
+  MARK();
+  SLEEP(500);
+  // the minimum length fo the DtmfTone is 100
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 2000, 10));
+  MARK();
+  SLEEP(2300);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 200, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 200, 36));
+  MARK();
+  SLEEP(500);
+
+  AOK();
+  ANL();
+
+  // SetTelephoneEventDetection
+  TEST(SetTelephoneEventDetection);
+  ANL();
+  AOK();
+  ANL();
+
+  // Testing sending Dtmf under VAD/CN
+  TEST(SendDtmf - with VAD enabled);
+  ANL();
+  // Mute mic
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  MARK();
+  // Enable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  MARK();
+  // Send Dtmf
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  // Switch codec
+  CodecInst ci;
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  ci.channels = 1;
+  ci.pacsize = 480;
+  ci.plfreq = 16000;
+  strcpy(ci.plname, "ISAC");
+  ci.pltype = 103;
+  ci.rate = -1;
+#else
+  ci.pltype = 119;
+  strcpy(ci.plname, "isaclc");
+  ci.plfreq = 16000;
+  ci.pacsize = 320;
+  ci.channels = 1;
+  ci.rate = 40000;
+#endif
+  TEST_MUSTPASS(codec->SetSendCodec(0, ci));
+  MARK();
+  // Send Dtmf
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  SLEEP(4000);
+  // Disable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  MARK();
+  // Unmute
+  TEST_MUSTPASS(volume->SetInputMute(0, false));
+  MARK();
+
+  AOK();
+  ANL();
+
+  // SetSendTelephoneEventPayloadType
+  TEST(SetSendTelephoneEventPayloadType);
+  ANL();
+  TEST_MUSTPASS(!dtmf->SetSendTelephoneEventPayloadType(0, 128));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 96));
+  MARK();
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 127));
+  MARK();
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 106));
+  MARK(); // restore default
+
+  AOK();
+  ANL();
+
+#ifdef WEBRTC_DTMF_DETECTION
+  TEST(RegisterTelephoneEventDetection - several channels); ANL();
+
+  ci.channels = 1;
+  ci.pacsize = 160;
+  ci.plfreq = 8000;
+  ci.pltype = 0;
+  ci.rate = 64000;
+  strcpy(ci.plname, "PCMU");
+  TEST_MUSTPASS(codec->SetSendCodec(0, ci));
+
+  int ch2 = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch2, 8002, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch2, 8002));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch2));
+  TEST_MUSTPASS(codec->SetSendCodec(ch2, ci));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch2));
+  TEST_MUSTPASS(voe_base_->StartSend(ch2));
+  MARK();
+
+  DtmfCallback *d = new DtmfCallback();
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false));
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // In-band
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(0, kInBand, *d));
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(ch2, kInBand, *d));
+  TEST_LOG("\nSending in-band telephone events:");
+  for(int i = 0; i < 16; i++)
+  {
+    TEST_LOG("\n  %d ", i); fflush(NULL);
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, i, false, 160, 10));
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(ch2, i, false, 160, 10));
+    SLEEP(500);
+  }
+  TEST_LOG("\nDetected %d events \n", d->counter);
+  TEST_MUSTPASS(d->counter != 32);
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(0));
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(ch2));
+
+  // Out-of-band
+  d->counter = 0;
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(0, kOutOfBand, *d));
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(ch2, kOutOfBand, *d));
+  TEST_LOG("\nSending out-band telephone events:");
+  for(int i = 0; i < 16; i++)
+  {
+    TEST_LOG("\n  %d ", i); fflush(NULL);
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, i, true, 160, 10));
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(ch2, i, true, 160, 10));
+    SLEEP(500);
+  }
+  TEST_LOG("\nDetected %d events \n", d->counter);
+  TEST_MUSTPASS(d->counter != 32);
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(0));
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(ch2));
+  delete d;
+
+  AOK(); ANL();
+#endif
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, false));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestEncryption
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestEncryption() {
+  PrepareTest("Encryption");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEEncryption* encrypt = _mgr.EncryptionPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEEncryption_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+    ///////////////////////////
+  // Actual test starts here
+
+  unsigned char key1[30] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6,
+      7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
+
+#ifdef WEBRTC_SRTP
+  unsigned char key2[30]; // Different than key1 in first position
+  memcpy(key2, key1, 30);
+  key2[0] = 99;
+  unsigned char key3[30]; // Different than key1 in last position
+  memcpy(key3, key1, 30);
+  key3[29] = 99;
+  unsigned char key4[29]; // Same as key1 but shorter
+  memcpy(key4, key1, 29);
+
+  TEST(SRTP - Fail tests); ANL();
+
+  // Send
+  // Incorrect parameters when not all protection is enabled
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kNoProtection, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryption key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  // Incorrect cipher key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 15, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 257, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 21, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 257, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth tag length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 21,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 20, 13,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  // key NULL pointer
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, NULL));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  // Same for receive
+  // Incorrect parameters when not all protection is enabled
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kNoProtection, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryption key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  // Incorrect cipher key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode,
+          30, kAuthHmacSha1, 21, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // it crashed the application
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 257, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth tag length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 21,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // it crashed the application
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 20, 13,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // key NULL pointer
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          NULL));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  ANL();
+
+  TEST(SRTP - Should hear audio at all time); ANL();
+
+  // Authentication only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // No protection
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthNull, 0, 0,
+          kNoProtection, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthNull, 0, 0,
+          kNoProtection, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+
+  // Encryption only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+
+  // Authentication only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Switching between keys
+  TEST(SRTP - Different keys - should hear audio at all time); ANL();
+
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Testing different keys that should be silent
+  TEST(SRTP - Should be silent or garbage); ANL();
+
+  // key1 and key2
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key2));
+  MARK(); SLEEP(2000);
+
+  // key1 and key3
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key3));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key3));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key3));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key3));
+  MARK(); SLEEP(2000);
+
+  // key1 and key4
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key4));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key4));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key4));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key4));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Back to normal
+  TEST(SRTP - Back to normal - should hear audio); ANL();
+
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // SRTCP tests
+  TEST(SRTCP - Ignore voice or not); ANL();
+  VoERTP_RTCP* rtp_rtcp = _mgr.RTP_RTCPPtr();
+  char tmpStr[32];
+
+  // First test that RTCP packet is received and OK without encryption
+
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik1"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik1", tmpStr));
+
+  // Enable SRTP and SRTCP send and receive
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik2"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr));
+
+  // Disable SRTP and SRTCP send
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik3"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP send, but disable SRTCP send
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik4"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP and SRTCP send, disable SRTP and SRTCP receive
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik5"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP receive, but disable SRTCP receive
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik6"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Disable all
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik7"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik7", tmpStr));
+  ANL();
+
+#else
+  TEST(SRTP disabled - Fail tests);
+  ANL();
+
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  ANL();
+#endif
+  AOK();
+
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestExternalMedia
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestExternalMedia() {
+  PrepareTest("VoEExternalMedia");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEExternalMedia* xmedia = _mgr.ExternalMediaPtr();
+
+  // check if this interface is supported
+  if (!xmedia) {
+    TEST_LOG("VoEExternalMedia is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEExternalMedia_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(
+          kTraceStateInfo | kTraceStateInfo | kTraceWarning |
+          kTraceError | kTraceCritical | kTraceApiCall |
+          kTraceMemory | kTraceInfo));
+#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  int getLen = 0;
+  WebRtc_Word16 vector[32000];
+  memset(vector, 0, 32000 * sizeof(short));
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+  // ExternalPlayoutGetData
+  TEST(ExternalPlayoutGetData);
+  ANL();
+
+  TEST_MUSTPASS(!xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, 100, getLen));
+  TEST_MUSTPASS(VE_INVALID_OPERATION != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  TEST_MUSTPASS(xmedia->ExternalPlayoutGetData(vector, 48000, 0, getLen));
+  TEST_MUSTPASS(480 != getLen);
+  SLEEP(10);
+  TEST_MUSTPASS(xmedia->ExternalPlayoutGetData(vector, 16000, 3000, getLen));
+  TEST_MUSTPASS(160 != getLen);
+  SLEEP(10);
+
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 8000, 100, getLen));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, -1, getLen));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(xmedia->SetExternalPlayoutStatus(false));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // SetExternalRecording
+  TEST(SetExternalRecording);
+  ANL();
+
+  TEST_MUSTPASS(!xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_OPERATION != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  TEST_MUSTPASS(xmedia->ExternalRecordingInsertData(vector, 480, 48000, 0));
+  SLEEP(10);
+  TEST_MUSTPASS(xmedia->ExternalRecordingInsertData(vector, 640, 16000, 0));
+  SLEEP(40);
+
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, -1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 80, 8000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 0, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 80, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 500, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(xmedia->SetExternalRecordingStatus(false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+#else // #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_MUSTPASS(!xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, 100, getLen));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, 20));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+
+#endif // #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestFile
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestFile() {
+  PrepareTest("File");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEFile_trace.txt"))); MARK();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  const int dT(100);
+
+  TEST(StartPlayingFileLocally);
+  ANL();
+  TEST(StopPlayingFileLocally);
+  ANL();
+
+  voe_base_->StopPlayout(0);
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));MARK();
+  voe_base_->StartPlayout(0);
+  MARK(); // file should be mixed in and played out
+  SLEEP(dT);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));
+  MARK(); // should fail (must stop first)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_ALREADY_PLAYING);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));
+  MARK(); // should work again (restarts file)
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm"), false, kFileFormatPcm16kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long8.pcm"), false, kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.wav"), false, kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long8mulaw.wav"), false,
+          kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+
+  // add compressed tests here...
+
+  // TEST_MUSTPASS(file->StopPlayingFileLocally(0)); MARK();
+  // TEST_MUSTPASS(file->StartPlayingFileLocally(
+  //   0, GetResource("audio_short16.pcm"), true,
+  //   kFileFormatPcm16kHzFile)); MARK(); // loop
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_short16.pcm"), false,
+          kFileFormatPcm16kHzFile, 1.0, 0, 2000));
+  MARK(); // play segment
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, GetResource("audio_short16.pcm"), false,
+          kFileFormatPcm16kHzFile, 1.0, 2000, 1000));
+  MARK(); // invalid segment
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, GetResource("audio_short16.pcm"), false,
+          kFileFormatPcm16kHzFile, 1.0, 21000, 30000));
+  MARK(); // start > file size
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, GetResource("audio_short16.pcm"), false,
+          kFileFormatPcm16kHzFile, 1.0, 100, 100));
+  MARK(); // invalid segment
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));
+  MARK(); // should work again (restarts file)
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(0, (InStream*)NULL));
+  MARK(); // just do it
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+
+  AOK();
+  ANL();
+
+  TEST(IsPlayingFileLocally);
+  ANL();
+
+  TEST_MUSTPASS(0 != file->IsPlayingFileLocally(0));
+  MARK(); // inactive
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));
+  MARK();
+  TEST_MUSTPASS(1 != file->IsPlayingFileLocally(0));
+  MARK(); // active
+  AOK();
+  ANL();
+
+  TEST(ScaleLocalFilePlayout);
+  ANL();
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 1.0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.5));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.25));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  AOK();
+  ANL();
+
+  // Replace microphone with file and play out on remote side
+  // All channels, per channel
+  // Different mixing frequencies
+  TEST(StartPlayingFileAsMicrophone);
+  ANL();
+  TEST(IsPlayingFileAsMicrophone);
+  ANL();
+  TEST(ScaleFileAsMicrophonePlayout);
+  ANL();
+  CodecInst tempCodec;
+  for (int ch = -1; ch < 1; ++ch) // Channel -1 and 0
+  {
+    TEST_LOG("Testing channel = %d \n", ch);
+    for (int fs = 1; fs < 4; ++fs) // nb, wb and swb codecs
+    {
+      switch (fs) {
+        case 1: // nb
+          TEST_LOG("Testing with nb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 160;
+          tempCodec.plfreq = 8000;
+          strcpy(tempCodec.plname, "PCMU");
+          tempCodec.pltype = 0;
+          tempCodec.rate = 64000;
+          break;
+        case 2: // wb
+#ifdef WEBRTC_CODEC_ISAC
+          TEST_LOG("Testing with wb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 480;
+          tempCodec.plfreq = 16000;
+          strcpy(tempCodec.plname, "ISAC");
+          tempCodec.pltype = 103;
+          tempCodec.rate = 32000;
+          break;
+#else
+          TEST_LOG("NOT testing with wb codec - "
+              "WEBRTC_CODEC_ISAC not defined \n");
+          continue;
+#endif
+        case 3: // swb
+#ifdef WEBRTC_CODEC_PCM16
+          TEST_LOG("Testing with swb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 640;
+          tempCodec.plfreq = 32000;
+          strcpy(tempCodec.plname, "L16");
+          tempCodec.pltype = 125;
+          tempCodec.rate = 512000;
+          break;
+#else
+          TEST_LOG("NOT testing with swb codec -"
+              " WEBRTC_CODEC_PCM16 not defined \n");
+          continue;
+#endif
+      }
+      TEST_MUSTPASS(voe_base_->StopSend(0));
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, tempCodec));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+      TEST_MUSTPASS(voe_base_->StartSend(0));
+      TEST_MUSTPASS(codec->SetSendCodec(0, tempCodec));
+
+      TEST_LOG("File 1 in 16 kHz no mix, 2 in 16 kHz mix,"
+        " 3 in 8 kHz no mix, 4 in 8 kHz mix \n");
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long16.pcm")));
+      MARK(); // don't mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long16.wav"), false, true,
+              kFileFormatWavFile));
+      MARK(); // mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long8.pcm"), false, false,
+              kFileFormatPcm8kHzFile));
+      MARK(); // don't mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long8.pcm"), false, true,
+              kFileFormatPcm8kHzFile));
+      MARK(); // mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(!file->StartPlayingFileAsMicrophone(
+              ch, (InStream*)NULL));
+      MARK(); // force error
+      AOK();
+      ANL();
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long16.pcm")));
+      TEST_MUSTPASS(1 != file->IsPlayingFileAsMicrophone(ch));
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      TEST_MUSTPASS(0 != file->IsPlayingFileAsMicrophone(ch));
+      AOK();
+      ANL();
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, GetResource("audio_long16.pcm")));
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 1.0));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.5));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.25));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.0));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      AOK();
+      ANL();
+    }
+  }
+
+  // Record speaker signal to file
+
+  CodecInst fcomp = { 0, "L16", 8000, 80, 1, 128000 };
+
+  TEST(StartRecordingPlayout);
+  ANL();
+  TEST(StopRecordingPlayout);
+  ANL();
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          GetFilename("rec_play16.pcm")));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0, GetFilename("rec_play8.wav"),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+    fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0, GetFilename("rec_play16.wav"),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 0;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "PCMU");
+  fcomp.rate = 64000;
+  fcomp.pacsize = 160;
+  fcomp.channels = 1;
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          GetFilename("rec_play_pcmu.wav"),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 8;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "PCMA");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          GetFilename("rec_play_pcma.wav"),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 97;
+  fcomp.pacsize = 240;
+  fcomp.rate = 13300;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "ILBC");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          GetFilename("rec_play.ilbc"),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(
+          -1, GetFilename("rec_play16_mixed.pcm")));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(-1));
+  MARK();
+
+  // TEST_MUSTPASS(file->StopPlayingFileLocally(0)); // Why should this work?
+  TEST_LOG("\nplaying out...\n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetFilename("rec_play.ilbc"), false, kFileFormatCompressedFile));
+  MARK();
+  SLEEP(2000);
+
+  AOK();
+  ANL();
+
+  // Record microphone signal to file
+  TEST(StartRecordingMicrophone);
+  ANL();
+  TEST(StopRecordingMicrophone);
+  ANL();
+
+  TEST_MUSTPASS(file->StartRecordingMicrophone(GetFilename("rec_mic16.pcm")));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  voe_base_->StopSend(0);
+  TEST_MUSTPASS(file->StartRecordingMicrophone(GetFilename("rec_mic16.pcm")));
+  MARK(); // record without sending as well
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  voe_base_->StartSend(0); // restore sending
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          GetFilename("rec_play8.wav"), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          GetFilename("rec_play16.wav"), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  // FT#1810, the following test is to make sure StartRecordingCall will
+  // record both mic and file
+  TEST_LOG("StartRecordingCall, record both mic and file in specific"
+    " channels \n");
+  TEST_LOG("Create maxnumofchannels \n");
+  for (int i = 1; i < voe_base_->MaxNumOfChannels(); i++) {
+    int ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(ch == -1);
+    TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  }
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(1, 12356, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(1, 12356));
+  TEST_MUSTPASS(voe_base_->StartReceive(1));
+  TEST_MUSTPASS(voe_base_->StopPlayout(1));
+  TEST_MUSTPASS(voe_base_->StartSend(1));
+  TEST_MUSTPASS(voe_base_->StartPlayout(1));
+
+  TEST_LOG("ALways playing audio_long16.pcm for "
+    "channel 0 in background \n");
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_LOG("Recording microphone to L16, please speak \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, GetResource("audio_long16.pcm"), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          GetFilename("rec_play_ch.wav"), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_LOG("Playing recording file, you should only hear what you said \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetFilename("rec_play_ch.wav"), false, kFileFormatWavFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  TEST_LOG("Recording microphone 0 to L16, please speak \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          -1, GetResource("audio_long16.pcm"), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          GetFilename("rec_play_ch_0.wav"), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(-1));
+  TEST_LOG("Playing recording file, you should hear what you said and"
+    " audio_long16.pcm \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetFilename("rec_play_ch_0.wav"), false, kFileFormatWavFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  TEST_LOG("Recording microphone to ilbc, please speak \n");
+  strcpy(fcomp.plname, "ilbc");
+  fcomp.plfreq = 8000;
+  fcomp.pacsize = 160;
+  fcomp.rate = 15200;
+  fcomp.channels = 1;
+  fcomp.pltype = 97;
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, GetResource("audio_long16.pcm"), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          GetFilename("rec_play_ch_0.ilbc"), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_LOG("Playing recording file, you should only hear what you said \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetFilename("rec_play_ch_0.ilbc"), false,
+          kFileFormatCompressedFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  for (int i = 1; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->DeleteChannel(i));
+  }
+
+  AOK();
+  ANL();
+
+  // Record mixed (speaker + microphone) signal to file
+
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  TEST(StartRecordingSpeakerStereo);
+  ANL();
+  TEST(StopRecordingSpeakerStereo);
+  ANL();
+
+  VoEHardware* hardware = _mgr.HardwarePtr();
+  TEST_MUSTPASS(NULL == hardware);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  AOK();
+  ANL();
+#else
+  TEST_LOG("Skipping stereo record tests -"
+      " MAC_IPHONE or WEBRTC_ANDROID is defined \n");
+#endif // #if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  // Conversion between different file formats
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#endif
+
+  TEST(ConvertPCMToWAV);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertPCMToWAV(
+          GetResource("audio_long16.pcm"),
+          GetFilename("singleUserDemoConv.wav")));
+  MARK();
+  TEST_MUSTPASS(!file->ConvertPCMToWAV((InStream*)NULL,
+          (OutStream*)NULL));MARK(); // invalid stream handles
+  AOK();
+  ANL();
+
+  TEST(ConvertWAVToPCM);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertWAVToPCM(
+          GetResource("audio_long16.wav"),
+          GetFilename("singleUserDemoConv.pcm")));
+  MARK();
+  TEST_MUSTPASS(!file->ConvertWAVToPCM((InStream*)NULL, (OutStream*)NULL));
+  MARK(); // invalid stream handles
+  AOK();
+  ANL();
+
+  TEST(ConvertPCMToCompressed);
+  ANL();
+
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(!file->ConvertPCMToCompressed(
+          GetResource("audio_long16.pcm"),
+          GetFilename("singleUserDemoConv16_dummy.wav"), &fcomp));
+  MARK(); // should not be supported
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "ilbc");
+  fcomp.pacsize = 160;
+  fcomp.rate = 15200;
+  fcomp.pltype = 97;
+  fcomp.channels = 1;
+  TEST_MUSTPASS(file->ConvertPCMToCompressed(
+          GetResource("audio_long16.pcm"),
+          GetFilename("singleUserDemoConv.ilbc"), &fcomp));MARK();
+  AOK();ANL();
+
+  TEST(ConvertCompressedToPCM);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertCompressedToPCM(
+          GetFilename("singleUserDemoConv.ilbc"),
+          GetFilename("singleUserDemoConv_ilbc.pcm")));MARK();
+  TEST_MUSTPASS(!file->ConvertCompressedToPCM(
+          GetResource("audio_long16.pcm"),
+          GetFilename("singleUserDemoConv_dummy.pcm")));MARK();
+  AOK();ANL();
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+#endif
+
+  // Misc file functions
+  TEST(GetFileDuration);
+  ANL();
+
+  int dur;
+
+  TEST_MUSTPASS(file->GetFileDuration(
+          GetResource("audio_long16.pcm"), dur));
+  TEST_MUSTPASS(file->GetFileDuration(
+          GetResource("audio_long8.pcm"), dur, kFileFormatPcm8kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          GetResource("audio_long16.pcm"), dur, kFileFormatPcm16kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          GetResource("audio_long16.wav"), dur, kFileFormatPcm8kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          GetFilename("singleUserDemoConv.ilbc"), dur,
+          kFileFormatCompressedFile));
+
+  AOK();
+  ANL();
+
+  TEST(GetPlaybackPosition);
+  ANL();
+
+  int pos;
+
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, GetResource("audio_long16.pcm")));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->GetPlaybackPosition(0, pos));
+  MARK(); // position should be ~1000
+  SLEEP(1000);
+  TEST_MUSTPASS(file->GetPlaybackPosition(0, pos));
+  MARK(); // position should be ~2000
+  // SLEEP(70*1000);
+  // file is no longer playing
+  // TEST_MUSTPASS(file->GetPlaybackPosition(0, pos)); MARK();
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  AOK();
+  ANL();
+
+  // These tests are related to defect 5136
+  // They play .wav files with different sample freq for 5s
+  char localFiles[7][50] = { "audio_tiny8.wav", "audio_tiny11.wav",
+      "audio_tiny16.wav", "audio_tiny22.wav", "audio_tiny32.wav",
+      "audio_tiny44.wav", "audio_tiny48.wav" };
+  char freq[7][5] = { "8", "11", "16", "22", "32", "44.1", "48" };
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  for (int i = 0; i < 7; i++) {
+    TEST_LOG("Playing file %s, in %s KHz \n", localFiles[i], freq[i]);
+    TEST_MUSTPASS(file->StartPlayingFileLocally(
+            0, GetResource(localFiles[i]),false, kFileFormatWavFile, 1));
+    SLEEP(4500); // The file should not end
+    TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  }
+
+  // TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0)); // Should not work
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestMixing
+// ----------------------------------------------------------------------------
+
+// Creates and mixes |num_remote_channels| which play a file "as microphone"
+// with |num_local_channels| which play a file "locally", using a constant
+// amplitude of |input_value|.
+//
+// The mixed output is verified to always fall between |max_output_value| and
+// |min_output_value|, after a startup phase.
+int VoEExtendedTest::RunMixingTest(int num_remote_channels,
+                                   int num_local_channels,
+                                   int16_t input_value,
+                                   int16_t max_output_value,
+                                   int16_t min_output_value) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+
+  // Use L16 at 16kHz to minimize distortion (file recording is 16kHz
+  // and resampling will cause large distortions).
+  CodecInst codec_inst;
+  strcpy(codec_inst.plname, "L16");
+  codec_inst.channels = 1;
+  codec_inst.rate = 256000;
+  codec_inst.plfreq = 16000;
+  codec_inst.pltype = 105;
+  codec_inst.pacsize = 160;
+
+  apm->SetNsStatus(false);
+  apm->SetAgcStatus(false);
+  apm->SetEcStatus(false);
+
+  const char file_to_generate_name[] = "dc_file.pcm";
+  const char* input_filename = file_to_generate_name;
+  FILE* file_to_generate = fopen(file_to_generate_name, "wb");
+  ASSERT_TRUE(file_to_generate != NULL);
+  for (int i = 0; i < 160 * 100 * 5; i++) {
+    fwrite(&input_value, sizeof(input_value), 1, file_to_generate);
+  }
+  fclose(file_to_generate);
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  std::vector<int> local_channels(num_local_channels);
+  for (int i = 0; i < num_local_channels; ++i) {
+    local_channels[i] = voe_base_->CreateChannel();
+    ASSERT_TRUE(local_channels[i] != -1);
+    TEST_MUSTPASS(voe_base_->StartPlayout(local_channels[i]));
+    TEST_MUSTPASS(file->StartPlayingFileLocally(local_channels[i],
+                                                input_filename,
+                                                true));
+  }
+
+  std::vector<int> remote_channels(num_remote_channels);
+  for (int i = 0; i < num_remote_channels; ++i) {
+    remote_channels[i] = voe_base_->CreateChannel();
+    ASSERT_TRUE(remote_channels[i] != -1);
+    TEST_MUSTPASS(codec->SetRecPayloadType(remote_channels[i], codec_inst));
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(remote_channels[i],
+                                              1234 + 2 * i));
+    TEST_MUSTPASS(voe_base_->SetSendDestination(remote_channels[i],
+                                                1234 + 2 * i,
+                                                "127.0.0.1"));
+    TEST_MUSTPASS(voe_base_->StartReceive(remote_channels[i]));
+    TEST_MUSTPASS(voe_base_->StartPlayout(remote_channels[i]));
+    TEST_MUSTPASS(codec->SetSendCodec(remote_channels[i], codec_inst));
+    TEST_MUSTPASS(voe_base_->StartSend(remote_channels[i]));
+    TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(remote_channels[i],
+                                                     input_filename,
+                                                     true));
+  }
+
+  const char mix_result[] = "mix_result.pcm";
+  TEST_MUSTPASS(file->StartRecordingPlayout(-1/*record meeting*/,
+          mix_result));
+  TEST_LOG("Playing %d remote channels.\n", num_remote_channels);
+  TEST_LOG("Playing %d local channels.\n", num_local_channels);
+  SLEEP(5000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(-1));
+  TEST_LOG("Stopping\n");
+
+  for (int i = 0; i < num_local_channels; ++i) {
+    TEST_MUSTPASS(voe_base_->StopPlayout(local_channels[i]));
+    TEST_MUSTPASS(voe_base_->DeleteChannel(local_channels[i]));
+  }
+
+  for (int i = 0; i < num_remote_channels; ++i) {
+    TEST_MUSTPASS(voe_base_->StopSend(remote_channels[i]));
+    TEST_MUSTPASS(voe_base_->StopPlayout(remote_channels[i]));
+    TEST_MUSTPASS(voe_base_->StopReceive(remote_channels[i]));
+    TEST_MUSTPASS(voe_base_->DeleteChannel(remote_channels[i]));
+  }
+
+  FILE* verification_file = fopen(mix_result, "rb");
+  ASSERT_TRUE(verification_file != NULL);
+  int16_t mix_value = 0;
+  // Skip the first 100 ms to avoid initialization and ramping-in effects.
+  ASSERT_TRUE(fseek(verification_file, sizeof(int16_t) * 1600, SEEK_SET) == 0);
+  while (fread(&mix_value, sizeof(mix_value), 1, verification_file)) {
+    ASSERT_TRUE(mix_value <= max_output_value)
+    ASSERT_TRUE(mix_value >= min_output_value);
+  }
+  fclose(verification_file);
+
+  return 0;
+}
+
+// TODO(andrew): move or copy these to the mixer module test when possible.
+int VoEExtendedTest::TestMixing() {
+  // These tests assume a maxmium of three mixed participants. We allow a
+  // +/- 10% range around the expected output level to accout for distortion
+  // from coding and processing in the loopback chain.
+
+  // Create four channels and make sure that only three are mixed.
+  TEST_LOG("Test max-three-participant mixing.\n");
+  int16_t input_value = 1000;
+  int16_t expected_output = input_value * 3;
+  if (RunMixingTest(4, 0, input_value, 1.1 * expected_output,
+                    0.9 * expected_output) != 0) {
+    return -1;
+  }
+
+  // Ensure the mixing saturation protection is working. We can do this because
+  // the mixing limiter is given some headroom, so the expected output is less
+  // than full scale.
+  TEST_LOG("Test mixing saturation protection.\n");
+  input_value = 20000;
+  expected_output = 29204; // = -1 dBFS, the limiter headroom.
+  // If this isn't satisfied, we're not testing anything.
+  assert(input_value * 3 > 32767);
+  assert(1.1 * expected_output < 32767);
+  if (RunMixingTest(3, 0, input_value, 1.1 * expected_output,
+                    0.9 * expected_output) != 0) {
+    return -1;
+  }
+
+  // Ensure the mixing saturation protection is not applied when only using a
+  // single channel.
+  TEST_LOG("Test saturation protection has no effect on one channel.\n");
+  input_value = 32767;
+  expected_output = 32767;
+  // If this isn't satisfied, we're not testing anything.
+  assert(0.95 * expected_output > 29204); // = -1 dBFS, the limiter headroom.
+  if (RunMixingTest(1, 0, input_value, expected_output,
+                    0.95 * expected_output) != 0) {
+    return -1;
+  }
+
+  TEST_LOG("Test combinations of 'anonymous' participants and regular "
+           "participants.\n");
+  input_value = 1000;
+  expected_output = input_value * 2;
+  if (RunMixingTest(1, 1, input_value, 1.1 * expected_output,
+                    0.9 * expected_output) != 0) {
+
+    return -1;
+  }
+
+  expected_output = input_value * 4;
+  if (RunMixingTest(3, 1, input_value, 1.1 * expected_output,
+                    0.9 * expected_output) != 0) {
+
+    return -1;
+  }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestHardware
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestHardware() {
+  PrepareTest("Hardware");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEHardware* hardware = _mgr.HardwarePtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(
+              "VoEHardware_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  // Set/GetAudioDeviceLayer
+  TEST(Set/GetAudioDeviceLayer);
+  ANL();
+  AudioLayers wantedLayer = kAudioPlatformDefault;
+  AudioLayers givenLayer;
+
+#if defined(_WIN32)
+  wantedLayer = kAudioWindowsCore;
+  hardware->SetAudioDeviceLayer(wantedLayer);
+  TEST_LOG("If you run on XP or below, CoreAudio "
+      "should not be able to set.\n");
+  TEST_LOG("If you run on Vista or above, CoreAudio "
+      "should be able to set.\n");
+  TEST_LOG("Verify that this is the case.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioWindowsCore)
+  {
+    TEST_LOG("CoreAudio was set\n");
+  }
+  else
+  {
+    TEST_LOG("CoreAudio was *not* set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  wantedLayer = kAudioWindowsWave;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("Wave audio should always be able to set.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioWindowsWave)
+  {
+    TEST_LOG("Wave audio was set\n");
+  }
+  else
+  {
+    TEST_LOG("Wave audio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+  // end _WIN32
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+  wantedLayer = kAudioLinuxPulse;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("If you run on Linux with no/unsupported PA version, PulseAudio "
+      "7should not be able to set.\n");
+  TEST_LOG("If you run on Linux with supported PA version running, PulseAudio"
+      " should be able to set.\n");
+  TEST_LOG("Verify that this is the case.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioLinuxPulse)
+  {
+    TEST_LOG("\nPulseAudio was set\n");
+  }
+  else
+  {
+    TEST_LOG("\nPulseAudio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  wantedLayer = kAudioLinuxAlsa;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("ALSA audio should always be able to set.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioLinuxAlsa)
+  {
+    TEST_LOG("\nALSA audio was set\n");
+  }
+  else
+  {
+    TEST_LOG("\nALSA audio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+#endif // defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+  // Invalid arguments
+  wantedLayer = (AudioLayers) 17;
+  TEST_MUSTPASS(-1 != hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  // Basic usage
+  wantedLayer = kAudioPlatformDefault;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  TEST_MUSTPASS(givenLayer != wantedLayer);
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  wantedLayer = kAudioPlatformDefault;
+  TEST_MUSTPASS(-1 != hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(VE_ALREADY_INITED != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  MARK();
+  switch (givenLayer) {
+    case kAudioPlatformDefault:
+      // already set above
+      break;
+    case kAudioWindowsCore:
+      TEST_LOG("\nRunning kAudioWindowsCore\n");
+      break;
+    case kAudioWindowsWave:
+      TEST_LOG("\nRunning kAudioWindowsWave\n");
+      break;
+    case kAudioLinuxAlsa:
+      TEST_LOG("\nRunning kAudioLinuxAlsa\n");
+      break;
+    case kAudioLinuxPulse:
+      TEST_LOG("\nRunning kAudioLinuxPulse\n");
+      break;
+    default:
+      TEST_LOG("\nERROR: Running unknown audio layer!!\n");
+      return -1;
+  }
+  ANL();
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  // GetRecording/PlayoutDeviceStatus
+  TEST(Getrecording/PlayoutDeviceStatus);
+  ANL();
+  bool isRecAvailable = false;
+  bool isPlayAvailable = false;
+  TEST_MUSTPASS(hardware->GetRecordingDeviceStatus(isRecAvailable));
+  TEST_MUSTPASS(!isRecAvailable);
+  MARK();
+  TEST_MUSTPASS(hardware->GetPlayoutDeviceStatus(isPlayAvailable));
+  TEST_MUSTPASS(!isPlayAvailable);
+  MARK();
+
+  ANL();
+
+  int nRec = 0, nPlay = 0;
+  char devName[128];
+  char guidName[128];
+  int idx;
+
+  TEST_MUSTPASS(hardware->GetNumOfPlayoutDevices(nPlay));
+
+  // GetPlayoutDeviceName
+  TEST(GetPlayoutDeviceName);
+  ANL();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(nPlay, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(-2, devName, guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(nPlay+1, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(0, NULL, guidName));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetPlayoutDeviceName(0, devName, NULL));
+
+  // default tests
+  for (idx = 0; idx < nPlay; idx++) {
+    TEST_MUSTPASS(hardware->GetPlayoutDeviceName(idx, devName, guidName));
+    MARK();
+    TEST_MUSTPASS(hardware->SetPlayoutDevice(idx));
+  }
+
+  ANL();
+
+  TEST_MUSTPASS(hardware->GetNumOfRecordingDevices(nRec));
+
+  // GetRecordingDeviceName
+  TEST(GetRecordingDeviceName);
+  ANL();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(nRec, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(-2, devName, guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(nRec+1, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(0, NULL, guidName));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetRecordingDeviceName(0, devName, NULL));
+
+  // default tests
+  for (idx = 0; idx < nRec; idx++) {
+    TEST_MUSTPASS(hardware->GetRecordingDeviceName(idx, devName, guidName));
+    MARK();
+    TEST_MUSTPASS(hardware->SetRecordingDevice(idx));
+  }
+  ANL();
+
+    // // SetRecordingDevice
+  TEST(SetRecordingDevice);
+  ANL();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  MARK();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0, kStereoLeft));
+  MARK();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0, kStereoRight));
+  MARK();
+  ANL();
+
+  // SetPlayoutDevice
+  TEST(SetPlayoutDevice);
+  ANL();
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1)); MARK();
+#else
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+  MARK();
+#endif
+  ANL();
+#endif // #if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+#if defined(MAC_IPHONE)
+  TEST(ResetSoundDevice); ANL();
+
+  for (int p=0; p<=60; p+=20)
+  {
+    TEST_LOG("Resetting sound device several times with pause %d ms\n", p);
+    for (int l=0; l<50; ++l)
+    {
+      TEST_MUSTPASS(hardware->ResetAudioDevice()); MARK();
+      SLEEP(p);
+    }
+    ANL();
+  }
+
+  TEST_LOG("Start streaming - verify the audio after each batch of resets \n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0,8000));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(2000);
+
+  SLEEP(2000);
+  for (int p=0; p<=60; p+=20)
+  {
+    TEST_LOG("Resetting sound device several time with pause %d ms\n", p);
+    for (int l=0; l<20; ++l)
+    {
+      TEST_MUSTPASS(hardware->ResetAudioDevice()); MARK();
+      SLEEP(p);
+    }
+    ANL();
+    SLEEP(2000);
+  }
+
+  TEST_LOG("Stop streaming \n");
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+#endif // defined(MAC_IPHONE))
+#ifdef MAC_IPHONE
+  TEST_LOG("\nNOTE: Always run hardware tests also without extended tests "
+      "enabled,\nsince the extended tests are pre-streaming tests only.\n");
+#endif
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestNetEqStats
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestNetEqStats() {
+  PrepareTest("NetEqStats (!EMPTY!)");
+
+  AOK();
+  ANL();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestNetwork
+//
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestNetwork() {
+  PrepareTest("Network");
+
+#ifdef WEBRTC_ANDROID
+  int sleepTime = 200;
+  int sleepTime2 = 250;
+#elif defined(MAC_IPHONE) // MAC_IPHONE needs more delay for getSourceInfo()
+  int sleepTime = 150;
+  int sleepTime2 = 200;
+#else
+  int sleepTime = 100;
+  int sleepTime2 = 200;
+#endif
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(
+              "VoENetwork_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  // ------------------------------------------------------------------------
+  // >> GetLocalIP
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetLocalIP);
+  ANL();
+
+#ifdef MAC_IPHONE
+  // Should fail
+  TEST_MUSTPASS(!netw->GetLocalIP(NULL, 0)); MARK();
+  TEST_ERROR(VE_FUNC_NOT_SUPPORTED);
+
+  ANL();
+  printf("NOTE: Local IP must be set in source code (line %d) \n",
+      __LINE__ + 1);
+  const char* localIP = "192.168.1.4";
+
+#else
+  char localIP[64];
+
+  // invalid parameter
+  TEST_MUSTPASS(!netw->GetLocalIP(NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // default function calls (IPv4)
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+  TEST_LOG("[local IPv4: %s]\n", localIP);
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+  // default function calls (IPv6)
+  TEST_MUSTPASS(netw->GetLocalIP(localIP, true));
+  MARK();
+  TEST_LOG("[local IPv6: %s]\n", localIP);
+  TEST_MUSTPASS(netw->GetLocalIP(localIP, true));
+  MARK();
+#endif
+
+  // one last call to ensure that local
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+#endif
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalIP
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSourceInfo
+  //
+  // - VE initialized
+  // - no existing channels
+  TEST(GetSourceInfo);
+  ANL();
+
+  int rtpPort(0);
+  int rtcpPort(0);
+  char ipaddr[64] = { 0 };
+  ExtendedTestTransport* ptrTransport(NULL);
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // NULL as input string
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // call when external transport is enabled
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  delete ptrTransport;
+
+  // call when external transport is disabled (no packet received yet)
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+  // send and receive packets with default settings for a while
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime2); // does not guarantee RTCP
+
+  // verify remote parameters (exclude RTCP)
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // ensure that valid results are maintained after StopListen
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // verify that results are maintained after new call to SetLocalReceiver
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: not listening, not sending
+  // send and receive packets with other settings for a while
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // STATE: listening, sending
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9005);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // restart sending to and from local IP
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9005);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0); // should not be "127.0.0.1"
+
+  // use non-default source port in outgoing packets
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1", 9010));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: listening and sending using an extra local socket
+
+  // stop/start sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify that the unique source port is maintained for the extra socket
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // set new source port for outgoing packets (9010 -> 9020)
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1", 9020));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+#ifdef MAC_IPHONE
+  SLEEP(500); // Need extra pause for some reason
+#endif
+
+  // verify that the unique source port is set for the new extra socket
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9020);
+  // STATE: listening and sending using an extra local socket
+
+  // remove extra send socket and restart call again
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // delete channel => destroys the
+  // extra socket
+  TEST_MUSTPASS(voe_base_->CreateChannel()); // new channel uses one socket only
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000)); // use new port as well
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify that remote info is correct
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: listening and sending using shared socket
+
+  // use non-default source port in outgoing packets to create extra send
+  // socket
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 7000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 7000, "127.0.0.1", 7010));
+  // RTP src is 7010 => RTCP src = 7011
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 7010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // check RTCP port as well (should be 7010 + 1 = 7011)
+  Sleep(8000, true);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 7010);
+  TEST_MUSTPASS(rtcpPort != 7011);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetSourceInfo
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetExternalTransport
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  //
+  TEST(SetExternalTransport);
+  ANL();
+
+  ptrTransport = new ExtendedTestTransport(netw);
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // different valid call combinations
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK(); // must deregister first
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+
+  // STATE: external transport is disabled
+
+  // initialize sending and ensure that external transport can't be enabled
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 1234, "127.0.0.2"));
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_ERROR(VE_SEND_SOCKETS_CONFLICT);
+
+  // restart channel to ensure that "initialized sender" state is cleared
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // initialize receiving and ensure that external transport can't be enabled
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 5678));
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_ERROR(VE_RECEIVE_SOCKETS_CONFLICT);
+
+  // restart channel to ensure that "initialized receiver" state is cleared
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // enable external transport and verify that "emulated loopback" works
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartSend(0)); // should only start recording
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK(); // should fail
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  Play(0, 2000, true, true); // play file as mic and verify loopback audio
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+
+  // STATE: external transport is disabled
+#if defined(WEBRTC_ANDROID) || defined(MAC_IPHONE)
+  int testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  int testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+
+  // check all APIs that should fail when external transport is enabled
+  int DSCP, priority, serviceType, overrideDSCP, nBytes(0);
+  bool useSetSockopt, enabled;
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(0, 12345));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->GetLocalReceiver(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->GetSendDestination(0, rtpPort, ipaddr, rtpPort,
+          rtcpPort));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->EnableIPv6(0))
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != false)
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 12346));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+
+  // modified i VoE 3.4 (can be called also for external transport)
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+#if (!defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)) || \
+      defined(WEBRTC_EXTERNAL_TRANSPORT)
+  testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0));
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(!netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_ERROR(testError);
+#if !defined(_WIN32) || defined(WEBRTC_EXTERNAL_TRANSPORT)
+  testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, false, 0));
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(!netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_ERROR(testError);
+  char dummy[1] = { 'a' };
+  TEST_MUSTPASS(!netw->SendUDPPacket(0, dummy, 1, nBytes));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+
+  // always disable external transport before deleting the Transport object;
+  // will lead to crash for RTCP transmission otherwise
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  delete ptrTransport;
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetExternalTransport
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> EnableIPv6
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  // - NOTE: set _ENABLE_IPV6_TESTS_ to include these tests
+  // - http://www.microsoft.com/resources/documentation/windows/xp/all/
+  //   proddocs/en-us/sag_ip_v6_pro_rt_enable.mspx?mfr=true
+  // >> ipv6 install
+  // >> ipv6 [-v] if [IfIndex]
+  // >> ping6 ::1
+  // >> ping6 fe80::1
+
+#ifdef _ENABLE_IPV6_TESTS_
+
+  TEST(EnableIPv6); ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // call with enabled external transport
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK();
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  delete ptrTransport;
+
+  // Test "locking" to IPv4
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0)); MARK(); // After this call we cannot
+  // enable IPv6
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK(); // Should fail
+
+  // Check that IPv6 address is invalid
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 8000, "::1")); MARK(); // fail
+
+  // New channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // valid default call
+  TEST_MUSTPASS(netw->EnableIPv6(0)); MARK();
+  TEST_MUSTPASS(netw->GetLocalIP(localIP)); MARK(); // should still read IPv4
+  TEST_LOG("[local IPv4: %s]", localIP);
+
+  // ensure that Ipv6 is enabled
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != true);
+
+  // check that IPv4 address is invalid
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_ERROR(VE_INVALID_IP_ADDRESS);
+
+  // verify usage of IPv6 loopback address
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  // IPv6 loopback address is 0:0:0:0:0:0:0:1
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "::1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK(); // Should fail
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true);
+  ANL();
+
+  // Restart channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST_MUSTPASS(netw->EnableIPv6(0)); MARK();
+  // ensure that Ipv6 is enabled
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != true);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  // IPv6 loopback address is 0:0:0:0:0:0:0:1
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "::1"));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(), true,
+      true);
+  SLEEP(500); // ensure that we receieve some packets
+
+  // SetSourceFilter and GetSourceFilter
+  TEST(SetSourceFilter and GetSourceFilter for IPv6); ANL();
+  char sourceIp[64] =
+  { 0};
+  char filterIp[64] =
+  { 0};
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, sourceIp));
+  TEST_LOG("Source port: %d \n", rtpPort);
+  TEST_LOG("Source RTCP port: %d \n", rtcpPort);
+  TEST_LOG("Source IP: %s \n", sourceIp);
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_LOG("Filter port RTP: %d \n", rtpPort);
+  TEST_LOG("Filter port RTCP: %d \n", rtcpPort);
+  TEST_LOG("Filter IP: %s \n", filterIp);
+  TEST_MUSTPASS(0 != rtpPort);
+  TEST_MUSTPASS(0 != rtcpPort);
+  TEST_MUSTPASS(filterIp[0] != '\0');
+  TEST_LOG("Set filter IP to %s => should hear audio\n", sourceIp);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, sourceIp));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(0 != rtpPort);
+  TEST_MUSTPASS(0 != rtcpPort);
+  TEST_MUSTPASS(_stricmp(filterIp, sourceIp));
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::0"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::"));
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(filterIp[0] != '\0');
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::"));
+  SLEEP(1500);
+
+  file->StopPlayingFileAsMicrophone(0);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+#endif // #ifdef _ENABLE_IPV6_TESTS_
+  // >> end of EnableIPv6
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSourceFilter
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(SetSourceFilter);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid parameters
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 12346, "300.300.300.300"));
+  MARK();
+  TEST_ERROR(VE_INVALID_IP_ADDRESS);
+
+  // STATE: RTP filter port is 12345, RTCP filter port is 12346
+
+  // disable all filters and ensure that media is received
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2000);
+  TEST_MUSTPASS(rtcpPort != 2001);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  // clear states and restart loopback session
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // clear source info state
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // set RTP filter to port 2002 and verify that source 2000 is blocked
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 2002, 0, NULL));;
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // ensure that received packets originates from 2002 and that they now pass
+  // the filter
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // RTP source is 2002
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2002, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2002, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2002);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  // clear states and restart loopback session
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // clear source info state
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // set IP filter to local IP and verify that default loopback stream is
+  // blocked
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, localIP));;
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // ensure that received packets originates from the local IP and that they
+  // now pass the filter
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // should pass the filter
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2000);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  // STATE: no active media, IP filter is active
+
+  // disable all filters
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));;
+  MARK();
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetSourceFilter
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSourceFilter
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(GetSourceFilter);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid input parameters
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // valid call without any filter set
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // STATE: no active media and no enabled filters
+
+  // set different filters and verify that they "bite"
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 54321, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 54321);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 15425, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtcpPort != 15425);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "192.168.199.19"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "192.168.199.19") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "0.0.0.0"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetSourceFilter
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> RegisterDeadOrAliveObserver
+  // >> DeRegisterDeadOrAliveObserver
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(RegisterDeadOrAliveObserver);
+  ANL();
+  TEST(DeRegisterDeadOrAliveObserver);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_MUSTPASS(!netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK(); // already registered
+  TEST_ERROR(VE_INVALID_OPERATION);
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK(); // OK to do it again
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  // STATE: dead-or-alive observer is disabled
+
+  // >> end of RegisterDeadOrAliveObserver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetPeriodicDeadOrAliveStatus
+  // >> GetPeriodicDeadOrAliveStatus
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, false));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // Invalid paramters
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, true, 151));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(1, true, 10));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  int sampleTime(0);
+
+  // Valid parameters
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(sampleTime != 1);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 150));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(sampleTime != 150);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(sampleTime != 150); // ensure last set time isnt modified
+
+  StartMedia(0, 2000, true, true, true);
+
+  // STATE: full duplex media is active
+
+  // test the dead-or-alive mechanism
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_LOG("\nVerify that Alive callbacks are received (dT=2sec): ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 2));
+  SLEEP(6000);
+  TEST_LOG("\nChange dT to 1 second: ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  SLEEP(6000);
+  TEST_LOG("\nDisable dead-or-alive callbacks: ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  SLEEP(6000);
+  TEST_LOG("\nStop sending and enable callbacks again.\n");
+  TEST_LOG("Verify that Dead callbacks are received (dT=2sec): ");
+  fflush(NULL);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 2));
+  SLEEP(6000);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_LOG("\nRestart sending.\n");
+  TEST_LOG("Verify that Alive callbacks are received again (dT=2sec): ");
+  fflush(NULL);
+  SLEEP(6000);
+  TEST_LOG("\nDisable dead-or-alive callbacks.");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+
+  StopMedia(0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetPeriodicDeadOrAliveStatus
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetPacketTimeoutNotification
+  // >> GetPacketTimeoutNotification
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  // - NOTE: dynamic tests are performed in standard test
+
+  int timeOut(0);
+
+  TEST(SetPacketTimeoutNotification);
+  ANL();
+  TEST(GetPacketTimeoutNotification);
+  ANL();
+
+  // call without existing valid channel
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid function calls
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, true, 151));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // valid function calls (no active media)
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 2));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 2);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 10));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 10);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 2));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 2);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetPacketTimeoutNotification
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SendUDPPacket
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+
+
+  // >> end of SendUDPPacket
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendTOS
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(SetSendTOS);
+  ANL();
+#if defined(_WIN32) || defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+
+  // call without existing valid channel
+
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // trivial invalid function calls
+  TEST_MUSTPASS(!netw->SetSendTOS(0, -1)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 64)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -2)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, 8)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1)); MARK();
+  TEST_ERROR(VE_SOCKET_ERROR); // must create sockets first
+
+#ifdef _WIN32
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 3000));
+
+  // enable ToS using SetSockopt (should work without local binding)
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, -1, true)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 1);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // try to disable SetSockopt while ToS is enabled (should fail)
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_ERROR(VE_TOS_INVALID); // must disable ToS before enabling SetSockopt
+
+  // disable ToS to be able to stop using SetSockopt
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, -1, true)); MARK(); // disable ToS
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 0);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // to use the "non-SetSockopt" method, local binding is required,
+  // trying without it should fail
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_ERROR(VE_TOS_ERROR); // must bind to local IP first
+
+  // bind to local IP and try again (should work this time)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault, localIP));
+  TEST_LOG("\nThis test needs to be run as administrator\n");
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 1);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != false);
+
+  // STATE: binded to local IP, local port is 12345 and DSCP is 1 (not using
+  // SetSockopt)
+
+  // verify loopback audio with the current settings
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP at the "
+      "remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each DSCP below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+  TEST_LOG("  DSCP is set to 0x%02x\n", 1);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 2));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 63));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+
+  // stop and resume sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0));
+#endif // _SEND_TO_REMOTE_IP_
+  // Windows priority tests (priority cannot be set using setsockopt on Win)
+  TEST_LOG("Testing priority\n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0, 3, true)); // Should fail
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, 3, false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, 3, false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#endif // _WIN32
+  // STATE: no media, disabled ToS, no defined receiver
+
+  // Repeat tests above but using setsockopt() this time.
+  // Binding to local IP should not be required.
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 10, -1, true)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 10);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // STATE: *not* binded to local IP, local port is 12345 and DSCP is 10
+  // (using SetSockopt)
+
+  // verify loopback audio with the current settings
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP at the"
+      " remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each DSCP below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 20, -1, true)); // use setsockopt()
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 61, -1, true)); // use setsockopt()
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // stop and resume sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, -1, true));
+#endif // _SEND_TO_REMOTE_IP_
+#if defined(WEBRTC_LINUX)
+  // Linux priority tests (using setsockopt)
+  TEST_LOG("Testing priority\n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, 3, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, 3, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+#endif // #if defined(WEBRTC_LINUX)
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX)
+  // Fail tests for other than Wind and Linux
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0, 3, false)); // Should fail
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+#endif // #if !defined(_WIN32) && !defined(WEBRTC_LINUX)
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL(); AOK(); ANL(); ANL();
+
+  // END #if defined(_WIN32) || defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#else
+  TEST_LOG("Skipping ToS tests -  _WIN32, LINUX, MAC is not defined or "
+    "WEBRTC_ANDROID is defined");
+#endif
+
+  // >> end of SetSendTOS
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendGQoS (Windows only)
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  //
+  // From qos.h:
+  //
+  //  #define SERVICETYPE_NOTRAFFIC               0x00000000
+  //  #define SERVICETYPE_BESTEFFORT              0x00000001 (*)
+  //  #define SERVICETYPE_CONTROLLEDLOAD          0x00000002 (*)
+  //  #define SERVICETYPE_GUARANTEED              0x00000003 (*)
+  //  #define SERVICETYPE_NETWORK_UNAVAILABLE     0x00000004
+  //  #define SERVICETYPE_GENERAL_INFORMATION     0x00000005
+  //  #define SERVICETYPE_NOCHANGE                0x00000006
+  //  #define SERVICETYPE_NONCONFORMING           0x00000009
+  //  #define SERVICETYPE_NETWORK_CONTROL         0x0000000A
+  //  #define SERVICETYPE_QUALITATIVE             0x0000000D (*)
+  //
+  //  #define SERVICE_BESTEFFORT                  0x80010000
+  //  #define SERVICE_CONTROLLEDLOAD              0x80020000
+  //  #define SERVICE_GUARANTEED                  0x80040000
+  //  #define SERVICE_QUALITATIVE                 0x80200000
+  //
+  //  (*) supported in WEBRTC VoE
+  TEST(SetSendGQoS);
+  ANL();
+#ifdef _WIN32
+
+  // call without existing valid channel
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, false, 0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // supported service type but no sockets
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_SOCKETS_NOT_INITED);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+
+  // supported service type but sender is not initialized
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_DESTINATION_NOT_INITED);
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+
+  // invalid service types
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NOTRAFFIC)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NETWORK_UNAVAILABLE));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_GENERAL_INFORMATION));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NOCHANGE)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NONCONFORMING));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NETWORK_CONTROL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_CONTROLLEDLOAD)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_GUARANTEED)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_QUALITATIVE)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // Is ToS enabled here?
+
+  // Settings which don't require binding to local IP
+
+  // set SERVICETYPE_BESTEFFORT
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_BESTEFFORT);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_CONTROLLEDLOAD
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD));
+  MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_CONTROLLEDLOAD);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_GUARANTEED
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_GUARANTEED);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_QUALITATIVE
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_QUALITATIVE);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // disable GQoS
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_QUALITATIVE);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // STATE: diabled QGoS, sockets exists, sending side is initialized, no media
+
+  // Loopback tests using the four different GQoS settings
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  ANL();
+  TEST_LOG("[SERVICETYPE_BESTEFFORT]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_CONTROLLEDLOAD]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_GUARANTEED]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_QUALITATIVE]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP mapping using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+
+  // Modify the send destination on the fly
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP mapping at"
+      " the remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each GQoS setting below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT (0x%02x), should "
+      "be mapped to DSCP = 0x00\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_CONTROLLEDLOAD (0x%02x), "
+      "should be mapped to DSCP = 0x18\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_LOG("  QoS is disabled, should give DSCP = 0x%02x\n", 0);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_GUARANTEED (0x%02x), should "
+      "be mapped to DSCP = 0x28\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_QUALITATIVE (0x%02x), should"
+      " be mapped to DSCP = 0x00\n", serviceType);
+  SLEEP(100);
+#endif // _SEND_TO_REMOTE_IP_
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  // STATE: sockets exists, sending side is initialized, no media
+
+  // Repeat test above but this time using overrideDSCP.
+
+  // Some initial loopack tests.
+  // NOTE - override DSCP requres binding to local IP.
+
+  // should not work since QoS is enabled
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+  TEST_ERROR(VE_TOS_GQOS_CONFLICT);
+
+  // disble QoS and try to override again (should fail again since local
+  // binding is not done yet)
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+  TEST_ERROR(VE_GQOS_ERROR);
+
+  // make proper settings and try again (should work this time)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+
+  // Now, let's try some loopback tests using override DSCP
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  ANL();
+  TEST_LOG("[overrideDSCP=3]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 17));
+  MARK();
+  ANL();
+  TEST_LOG("[overrideDSCP=17]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  // And finally, send to remote side as well to verify that the new mapping
+  // works as it should.
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Modify the send destination on the fly
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP mapping at"
+      " the remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each GQoS setting below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 18));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 62));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 32));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 1));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_LOG("  QoS is disabled, should give DSCP = 0x%02x\n", 0);
+  SLEEP(100);
+#endif // _SEND_TO_REMOTE_IP_
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL(); AOK(); ANL(); ANL();
+
+#else
+  TEST_LOG("Skipping GQoS tests - _WIN32 is not defined");
+#endif  // #ifdef _WIN32
+  // >> end of SetSendGQoS
+  // ------------------------------------------------------------------------
+
+    if (file) {
+    file->StopPlayingFileAsMicrophone(0);
+  }
+  voe_base_->StopSend(0);
+  voe_base_->StopPlayout(0);
+  voe_base_->StopReceive(0);
+  voe_base_->DeleteChannel(0);
+  voe_base_->Terminate();
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestRTP_RTCP
+// ----------------------------------------------------------------------------
+
+// Used to validate packets during the RTP audio level indication test.
+class RTPAudioTransport: public Transport {
+public:
+
+  RTPAudioTransport() :
+    mute_(false) {
+  }
+
+  virtual ~RTPAudioTransport() {
+  }
+
+  void set_mute(bool mute) {
+    mute_ = mute;
+  }
+  bool mute() const {
+    return mute_;
+  }
+
+  // TODO(andrew): use proper error checks here rather than asserts.
+  virtual int SendPacket(int channel, const void* data, int length) {
+    const uint8_t* packet = static_cast<const uint8_t*> (data);
+
+    // Extension bit.
+    assert(packet[0] & 0x10);
+    int index = 12; // Assume standard RTP header.
+    // Header extension ID
+    assert(packet[index++] == 0xBE);
+    assert(packet[index++] == 0xDE);
+    // Header extension length
+    assert(packet[index++] == 0x00);
+    assert(packet[index++] == 0x01);
+
+    // User-defined ID.
+    assert(((packet[index] & 0xf0) >> 4) == 1);
+    // Length
+    assert((packet[index++] & 0x0f) == 0);
+
+    int vad = packet[index] >> 7;
+    int level = packet[index] & 0x7f;
+    if (channel == 0) {
+      printf("%d    -%d\n", vad, level);
+    } else if (channel == 1) {
+      printf("             %d    -%d\n", vad, level);
+    } else {
+      assert(false);
+    }
+
+    if (mute_) {
+      assert(vad == 0);
+      assert(level == 127);
+    } else {
+      assert(vad == 0 || vad == 1);
+      assert(level >= 0 && level <= 127);
+    }
+
+    return 0;
+  }
+
+  virtual int SendRTCPPacket(int /*channel*/, const void* /*data*/,
+                             int /*length*/) {
+    return 0;
+  }
+
+private:
+  bool mute_;
+};
+
+int VoEExtendedTest::TestRTP_RTCP() {
+  PrepareTest("RTP_RTCP");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoERTP_RTCP* rtp_rtcp = _mgr.RTP_RTCPPtr();
+  VoENetwork* network = _mgr.NetworkPtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+  VoECodec* codec = _mgr.CodecPtr();
+
+  XRTPObserver rtpObserver;
+
+#ifdef WEBRTC_ANDROID
+  int sleepTime = 200;
+#else
+  int sleepTime = 100;
+#endif
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(
+              "VoERTP_RTCP_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  // ------------------------------------------------------------------------
+  // >> Set/GetRTPAudioLevelIndicationStatus
+  TEST(SetRTPAudioLevelIndicationStatus);
+  ANL();
+  TEST(GetRTPAudioLevelIndicationStatus);
+
+  // test invalid input parameters
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, 15));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, false, 15));
+  MARK();
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(1, true, 5));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  // test complete valid input range [1,14]
+  bool audioLevelEnabled(false);
+  unsigned char ID(0);
+  for (int id = 1; id < 15; id++) {
+    TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, id));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->GetRTPAudioLevelIndicationStatus(
+            0, audioLevelEnabled, ID));
+    MARK();
+    TEST_MUSTPASS(audioLevelEnabled != true);
+    TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, false, id));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->GetRTPAudioLevelIndicationStatus(
+            0, audioLevelEnabled, ID));
+    MARK();
+    TEST_MUSTPASS(audioLevelEnabled != false);
+    TEST_MUSTPASS(ID != id);
+  }
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  RTPAudioTransport rtpAudioTransport;
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(network->RegisterExternalTransport(0, rtpAudioTransport));
+  TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true));
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+
+  printf("\n\nReceving muted packets (expect VAD = 0, Level = -127)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  rtpAudioTransport.set_mute(true);
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  rtpAudioTransport.set_mute(false);
+  TEST_MUSTPASS(volume->SetInputMute(0, false));
+
+  printf("\nReceiving packets from mic (should respond to mic level)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  printf("\nReceiving packets from file (expect mostly VAD = 1)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  printf("\nMuted and mic on independent channels...\n");
+  printf("Muted        Mic\n");
+  SLEEP(2000);
+  ASSERT_TRUE(1 == voe_base_->CreateChannel());
+  TEST_MUSTPASS(network->RegisterExternalTransport(1, rtpAudioTransport));
+  TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(1, true));
+  TEST_MUSTPASS(codec->SetVADStatus(1, true));
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(1));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopSend(1));
+
+  TEST_MUSTPASS(network->DeRegisterExternalTransport(0));
+  TEST_MUSTPASS(network->DeRegisterExternalTransport(1));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(1));
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  MARK();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> InsertExtraRTPPacket
+
+  int i(0);
+
+  TEST(SetLocalSSRC);
+  TEST_MUSTPASS(!rtp_rtcp->SetLocalSSRC(0, 5678));
+  MARK();
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SetLocalSSRC(0, 5678)); // force send SSRC to 5678
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  ANL();
+
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  // ------------------------------------------------------------------------
+  // >> InsertExtraRTPPacket
+  TEST(InsertExtraRTPPacket);
+  ANL();
+
+  const char payloadData[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(-1, 0, false,
+          payloadData, 8));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, -1, false,
+          payloadData, 8));
+  MARK(); // invalid payload type
+  TEST_ERROR(VE_INVALID_PLTYPE);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 128, false,
+          payloadData, 8));
+  MARK(); // invalid payload type
+  TEST_ERROR(VE_INVALID_PLTYPE);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          NULL, 8));
+    MARK(); // invalid pointer
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          payloadData, 1500-28+1));
+  MARK(); // invalid size
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          payloadData, 8));
+  MARK(); // not sending
+  TEST_ERROR(VE_NOT_SENDING);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  SLEEP(1000);
+  for (int p = 0; p < 128; p++) {
+    TEST_MUSTPASS(rtp_rtcp->InsertExtraRTPPacket(0, p, false,
+            payloadData, 8));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->InsertExtraRTPPacket(0, p, true,
+            payloadData, 8));
+    MARK();
+  }
+
+  // Ensure we have sent all extra packets before we move forward to avoid
+  //incorrect error code
+  SLEEP(1000);
+
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> RTP dump APIs
+  TEST(Start/StopRtpDump);
+  ANL();
+  TEST(Start/RTPDumpIsActive);
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->RTPDumpIsActive(-1, kRtpIncoming));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(false != rtp_rtcp->RTPDumpIsActive(0, kRtpIncoming));
+  MARK(); // should be off by default
+  TEST_MUSTPASS(false != rtp_rtcp->RTPDumpIsActive(0, kRtpOutgoing));
+  MARK(); // should be off by default
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->StartRTPDump(-1, NULL));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(-1 != rtp_rtcp->StartRTPDump(0, NULL));
+  MARK(); // invalid file name
+  TEST_ERROR(VE_BAD_FILE);
+
+  // Create two RTP dump files:
+
+  //  - dump_in_1sec.rtp <=> ~1 sec recording of input side
+  //  - dump_in_2sec.rtp <=> ~2 sec recording of output side
+  //
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpIncoming));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpOutgoing));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StartRTPDump(0, GetFilename("dump_in_1sec.rtp"),
+          kRtpIncoming));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StartRTPDump(0, GetFilename("dump_out_2sec.rtp"),
+          kRtpOutgoing));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpIncoming));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpOutgoing));
+  MARK();
+
+  // Start/Stop tests:
+  //
+  // - only one file (called dump_in_200ms.rtp) should exist after this test
+  //
+  for (i = 0; i < 10; i++) {
+    TEST_MUSTPASS(rtp_rtcp->StartRTPDump(0,
+            GetFilename("dump_in_200ms.rtp")));
+    MARK();
+    SLEEP(200);
+    TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0));
+    MARK();
+  }
+
+  // >> end of RTP dump APIs
+  // ------------------------------------------------------------------------
+  ANL();
+
+  TEST(GetRTCPStatus);
+  bool enabled;
+  TEST_MUSTPASS(!rtp_rtcp->GetRTCPStatus(-1, enabled));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  MARK(); // should be on by default
+  TEST_MUSTPASS(enabled != true);
+  ANL();
+
+  TEST(SetRTCPStatus);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  TEST_MUSTPASS(enabled != false);
+  MARK();
+  SLEEP(2000);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  TEST_MUSTPASS(enabled != true);
+  MARK();
+  SLEEP(6000); // Make sure we get an RTCP packet
+  ANL();
+
+  TEST(CNAME);
+  TEST_MUSTPASS(!rtp_rtcp->SetRTCP_CNAME(0, NULL));
+  MARK();
+  TEST_MUSTPASS(VE_RTP_RTCP_MODULE_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!rtp_rtcp->GetRemoteRTCP_CNAME(0, NULL));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  ANL();
+
+  TEST(GetRemoteSSRC);
+  unsigned int ssrc(0);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteSSRC(0, ssrc));
+  MARK();
+  TEST_MUSTPASS(ssrc != 5678);
+  ANL();
+
+  TEST(GetRemoteCSRC); // only trivial tests added
+  unsigned int csrcs[2];
+  int n(0);
+  TEST_MUSTPASS(!rtp_rtcp->GetRemoteCSRCs(1, csrcs));
+  MARK();
+  n = rtp_rtcp->GetRemoteCSRCs(0, csrcs);
+  MARK();
+  TEST_MUSTPASS(n != 0); // should be empty
+  ANL();
+
+  TEST(SetRTPObserver);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(rtp_rtcp->RegisterRTPObserver(0, rtpObserver));
+  TEST_MUSTPASS(rtp_rtcp->DeRegisterRTPObserver(0));
+  TEST_MUSTPASS(rtp_rtcp->RegisterRTPObserver(0, rtpObserver));
+  TEST_MUSTPASS(rtp_rtcp->SetLocalSSRC(0, 7777)); // force send SSRC to 7777
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  // verify that the new SSRC has been detected by the observer
+  TEST_MUSTPASS(rtpObserver._SSRC != 7777);
+  TEST_MUSTPASS(rtp_rtcp->DeRegisterRTPObserver(0));
+  ANL();
+
+  TEST(GetRTPKeepaliveStatus);
+  unsigned char pt;
+  int dT;
+  TEST_MUSTPASS(!rtp_rtcp->GetRTPKeepaliveStatus(-1, enabled, pt, dT));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
+  MARK(); // should be off by default
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(pt != 255);
+  TEST_MUSTPASS(dT != 0);
+  ANL();
+
+  TEST(SetRTPKeepaliveStatus);
+  // stop send before changing the settings
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // verify invalid input parameters
+  TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(-1, true, 0, 15));
+  MARK();
+  TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(0, true, -1, 15));
+  MARK();
+  TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(0, true, 0, 61));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
+  MARK(); // should still be off
+  TEST_MUSTPASS(enabled != false);
+  // try valid settings
+  TEST_MUSTPASS(rtp_rtcp->SetRTPKeepaliveStatus(0, true, 117));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
+  MARK(); // should be on now
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(pt != 117);
+  TEST_MUSTPASS(dT != 15);
+  // change from PT 99 to 121, as 99 is occupied
+  TEST_MUSTPASS(rtp_rtcp->SetRTPKeepaliveStatus(0, true, 121, 3));
+  MARK(); // on, PT=99, dT=3
+  TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(pt != 121);
+  TEST_MUSTPASS(dT != 3);
+  ANL();
+
+  // Make fresh restart (ensures that SSRC is randomized)
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  SLEEP(100);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  SLEEP(8000);
+
+  TEST(GetRemoteRTCPData);
+  // Statistics based on received RTCP reports (i.e. statistics on the remote
+  // side sent to us).
+  unsigned int NTPHigh(0), NTPLow(0), timestamp(0), playoutTimestamp(0),
+      jitter(0);
+  unsigned short fractionLost(0);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh, NTPLow,
+          timestamp, playoutTimestamp));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh, NTPLow, timestamp, playoutTimestamp, jitter, fractionLost);
+
+  unsigned int NTPHigh2(0), NTPLow2(0), timestamp2(0);
+  unsigned int playoutTimestamp2(0), jitter2(0);
+  unsigned short fractionLost2(0);
+
+  TEST_LOG("take a new sample and ensure that the playout timestamp is "
+    "maintained");
+  SLEEP(100);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2, timestamp2,
+          playoutTimestamp2, &jitter2,
+          &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS(playoutTimestamp != playoutTimestamp2);
+
+  TEST_LOG("wait for 8 seconds and ensure that the RTCP statistics is"
+    " updated...");
+  SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2,
+          timestamp2, playoutTimestamp2,
+          &jitter2, &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS((NTPHigh == NTPHigh2) && (NTPLow == NTPLow2));
+  TEST_MUSTPASS(timestamp == timestamp2);
+  TEST_MUSTPASS(playoutTimestamp == playoutTimestamp2);
+
+#ifdef WEBRTC_CODEC_RED
+  //The following test is related to defect 4985 and 4986
+  TEST_LOG("Turn FEC and VAD on and wait for 4 seconds and ensure that "
+    "the jitter is still small...");
+  CodecInst cinst;
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  cinst.pltype = 104;
+  strcpy(cinst.plname, "isac");
+  cinst.plfreq = 32000;
+  cinst.pacsize = 960;
+  cinst.channels = 1;
+  cinst.rate = 45000;
+#else
+  cinst.pltype = 119;
+  strcpy(cinst.plname, "isaclc");
+  cinst.plfreq = 16000;
+  cinst.pacsize = 320;
+  cinst.channels = 1;
+  cinst.rate = 40000;
+#endif
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, true, -1));
+  MARK();
+  TEST_MUSTPASS(codec->SetVADStatus(0,true));
+  SLEEP(4000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2, timestamp2,
+          playoutTimestamp2, &jitter2,
+          &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n "
+    "   playoutTimestamp = %u \n    jitter = %u \n   fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS(jitter2 > 1000)
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, false));
+  MARK();
+  //4985 and 4986 end
+#endif // #ifdef WEBRTC_CODEC_RED
+  TEST(GetRTPStatistics);
+  ANL();
+  // Statistics summarized on local side based on received RTP packets.
+  CallStatistics stats;
+  // Call GetRTPStatistics over a longer period than 7.5 seconds
+  // (=dT RTCP transmissions).
+  unsigned int averageJitterMs, maxJitterMs, discardedPackets;
+  SLEEP(1000);
+  for (i = 0; i < 8; i++) {
+    TEST_MUSTPASS(rtp_rtcp->GetRTPStatistics(0, averageJitterMs,
+            maxJitterMs,
+            discardedPackets));
+    TEST_LOG("    %i) averageJitterMs = %u \n    maxJitterMs = %u \n  "
+      "  discardedPackets = %u \n", i, averageJitterMs, maxJitterMs,
+      discardedPackets);
+    SLEEP(1000);
+  }
+
+  TEST(RTCPStatistics #1);
+  ANL();
+  unsigned int packetsSent(0);
+  unsigned int packetsReceived(0);
+  for (i = 0; i < 8; i++)
+  {
+    TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+    TEST_LOG("    %i) fractionLost = %hu \n    cumulativeLost = %u \n  "
+        "  extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+        i, stats.fractionLost, stats.cumulativeLost,
+        stats.extendedMax, stats.jitterSamples, stats.rttMs);
+    TEST_LOG( "    bytesSent = %d \n    packetsSent = %d \n   "
+        " bytesReceived = %d \n    packetsReceived = %d \n",
+        stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+        stats.packetsReceived);
+    if (i > 0)
+    {
+      TEST_LOG("    diff sent packets    : %u (~50)\n",
+               stats.packetsSent - packetsSent);
+      TEST_LOG("    diff received packets: %u (~50)\n",
+               stats.packetsReceived - packetsReceived);
+    }
+    packetsSent = stats.packetsSent;
+    packetsReceived = stats.packetsReceived;
+    SLEEP(1000);
+  }
+
+  TEST(RTCPStatistics #2);
+  ANL();
+  TEST_LOG("restart sending and ensure that the statistics is reset");
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(50); // ensures approx. two received packets
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n  "
+      "  extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples, stats.rttMs);
+  TEST_LOG( "    bytesSent = %d \n    packetsSent = %d \n   "
+      " bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+      stats.packetsReceived);
+
+  TEST(RTCPStatistics #3);
+  ANL();
+  TEST_LOG("disable RTCP and verify that statistics is not corrupt");
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  SLEEP(250);
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n   "
+      " extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples, stats.rttMs);
+  TEST_LOG("    bytesSent = %d \n    packetsSent = %d \n    "
+      "bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent,
+      stats.bytesReceived, stats.packetsReceived);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+
+  TEST(RTCPStatistics #4);
+  ANL();
+  TEST_LOG("restart receiving and check RX statistics");
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  SLEEP(50); // ensures approx. two received packets
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n   "
+      " extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples,
+      stats.rttMs);
+  TEST_LOG("    bytesSent = %d \n    packetsSent = %d \n   "
+      " bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent,
+      stats.bytesReceived, stats.packetsReceived);
+
+  TEST(SendApplicationDefinedRTCPPacket);
+  // just do some fail tests here
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // should fail since sending is off
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  // should fail since RTCP is off
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+  TEST_MUSTPASS(rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  // invalid data length
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabc", 31));
+  MARK();
+  // invalid data vector
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(0, 0, 0, NULL, 0));
+  MARK();
+  ANL();
+
+#ifdef WEBRTC_CODEC_RED
+  TEST(SetFECStatus);
+  ANL();
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  cinst.pltype = 126;
+  strcpy(cinst.plname, "red");
+  cinst.plfreq = 8000;
+  cinst.pacsize = 0;
+  cinst.channels = 1;
+  cinst.rate = 0;
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  cinst.pltype = 104;
+  strcpy(cinst.plname, "isac");
+  cinst.plfreq = 32000;
+  cinst.pacsize = 960;
+  cinst.channels = 1;
+  cinst.rate = 45000;
+#else
+  cinst.pltype = 119;
+  strcpy(cinst.plname, "isaclc");
+  cinst.plfreq = 16000;
+  cinst.pacsize = 320;
+  cinst.channels = 1;
+  cinst.rate = 40000;
+#endif
+  // We have to re-register the audio codec payload type as stopReceive will
+  // clean the database
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_LOG("Start playing a file as microphone again \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+                                                   true, true));
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, true, 126));
+  MARK();
+  TEST_LOG("Should sound OK with FEC enabled\n");
+  SLEEP(4000);
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, false));
+  MARK();
+#endif // #ifdef WEBRTC_CODEC_RED
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestVideoSync
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestVideoSync()
+{
+  PrepareTest("VideoSync");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEVideoSync* vsync = _mgr.VideoSyncPtr();
+
+  // check if this interface is supported
+  if (!vsync)
+  {
+    TEST_LOG("VoEVideoSync is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(
+      "VoEVideoSync_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+                                            kTraceStateInfo |
+                                            kTraceWarning |
+                                            kTraceError |
+                                            kTraceCritical |
+                                            kTraceApiCall |
+                                            kTraceMemory |
+                                            kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  TEST(SetInitTimestamp);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  SLEEP(1000);
+  AOK();
+  ANL();
+
+  TEST(SetInitSequenceNumber);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  SLEEP(1000);
+  AOK();
+  ANL();
+
+  unsigned int timeStamp;
+  TEST(GetPlayoutTimestamp);
+  ANL();
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG("GetPlayoutTimestamp: %u", timeStamp);
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG(" %u", timeStamp);
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG(" %u\n", timeStamp);
+  AOK();
+  ANL();
+
+  TEST(SetMinimumPlayoutDelay);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetMinimumPlayoutDelay(0, -1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!vsync->SetMinimumPlayoutDelay(0, 5000));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestVolumeControl
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestVolumeControl()
+{
+  PrepareTest("TestVolumeControl");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+#ifdef _TEST_FILE_
+  VoEFile* file = _mgr.FilePtr();
+#endif
+#ifdef _TEST_HARDWARE_
+  VoEHardware* hardware = _mgr.HardwarePtr();
+#endif
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+      GetFilename("VoEVolumeControl_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+                                            kTraceStateInfo |
+                                            kTraceWarning |
+                                            kTraceError |
+                                            kTraceCritical |
+                                            kTraceApiCall |
+                                            kTraceMemory |
+                                            kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#if (defined _TEST_HARDWARE_ && (!defined(MAC_IPHONE) && \
+    !defined(WEBRTC_ANDROID)))
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+#endif
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+#ifdef _TEST_FILE_
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+                                                   true, true));
+#endif
+
+  ////////////////////////////
+  // Actual test starts here
+
+#if !defined(MAC_IPHONE)
+  TEST(SetSpeakerVolume);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetSpeakerVolume(256));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if !defined(MAC_IPHONE)
+
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  TEST(SetMicVolume); ANL();
+  TEST_MUSTPASS(-1 != volume->SetMicVolume(256)); MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+
+#if !defined(MAC_IPHONE)
+  TEST(SetChannelOutputVolumeScaling);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetChannelOutputVolumeScaling(0, (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetChannelOutputVolumeScaling(0, (float)10.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if !defined(MAC_IPHONE)
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  TEST(SetOutputVolumePan);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)-0.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.0,
+                                                 (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.0,
+                                                 (float)1.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+
+  TEST(SetChannelOutputVolumePan);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)-0.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.0,
+                                                 (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.0,
+                                                 (float)1.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+#ifdef _TEST_FILE_
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestAPM
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestAPM() {
+  PrepareTest("AudioProcessing");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+
+  //#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename("apm_trace.txt")));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+                                            kTraceStateInfo |
+                                            kTraceWarning |
+                                            kTraceError |
+                                            kTraceCritical |
+                                            kTraceApiCall |
+                                            kTraceMemory |
+                                            kTraceInfo));
+  //#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  ///////////////////////////
+  // Actual test starts here
+
+  int i;
+  bool enabled;
+
+  //////
+  // EC
+
+  const int ECSleep = 0;
+  const int ECIterations = 10;
+
+  EcModes ECmode(kEcAec);
+  AecmModes AECMmode(kAecmSpeakerphone);
+  bool enabledCNG(false);
+
+#if (defined(MAC_IPHONE) || defined(WEBRTC_ANDROID))
+  const EcModes ECmodeDefault(kEcAecm);
+#else
+  const EcModes ECmodeDefault(kEcAec);
+#endif
+
+  // verify default settings (should be OFF and mode as above)
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(ECmode != ECmodeDefault);
+
+  // set EC defaults
+  TEST_MUSTPASS(apm->SetEcStatus(false, kEcDefault));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(ECmode != ECmodeDefault);
+  SLEEP(ECSleep);
+
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // set kEcAec mode
+  TEST_MUSTPASS(apm->SetEcStatus(true, kEcAec));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(ECmode != kEcAec);
+  SLEEP(ECSleep);
+
+  // set kEcConference mode
+  TEST_MUSTPASS(apm->SetEcStatus(true, kEcConference));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(ECmode != kEcAec);
+  SLEEP(ECSleep);
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // get default AECM mode, should the kAecmSpeakerphone as default
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_MUSTPASS(AECMmode != kAecmSpeakerphone);
+  TEST_MUSTPASS(enabledCNG != true);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmQuietEarpieceOrHeadset, false));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmQuietEarpieceOrHeadset);
+  TEST_MUSTPASS(enabledCNG != false);
+
+  // set kEcAecm mode
+  TEST_MUSTPASS(apm->SetEcStatus(true, kEcAecm));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(ECmode != kEcAecm);
+  SLEEP(ECSleep);
+
+  // AECM mode, get and set
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_MUSTPASS(AECMmode != kAecmQuietEarpieceOrHeadset);
+  TEST_MUSTPASS(enabledCNG != false);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmEarpiece, true));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmEarpiece);
+  TEST_MUSTPASS(enabledCNG != true);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmEarpiece, false));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmEarpiece);
+  TEST_MUSTPASS(enabledCNG != false);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmLoudEarpiece, true));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmLoudEarpiece);
+  TEST_MUSTPASS(enabledCNG != true);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmSpeakerphone, false));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmSpeakerphone);
+  TEST_MUSTPASS(enabledCNG != false);
+  TEST_MUSTPASS(apm->SetAecmMode(kAecmLoudSpeakerphone, true));
+  TEST_MUSTPASS(apm->GetAecmMode(AECMmode, enabledCNG));
+  TEST_LOG("AECM: mode=%d, CNG: mode=%d\n", AECMmode,
+           enabledCNG);
+  TEST_MUSTPASS(AECMmode != kAecmLoudSpeakerphone);
+  TEST_MUSTPASS(enabledCNG != true);
+
+  // verify that all modes are maintained when EC is disabled
+  TEST_MUSTPASS(apm->SetEcStatus(false));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(ECmode != kEcAecm);
+  SLEEP(ECSleep);
+
+  // restore defaults
+  TEST_MUSTPASS(apm->SetEcStatus(true, kEcDefault));
+  TEST_MUSTPASS(apm->SetEcStatus(false, kEcUnchanged));
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(ECmode != ECmodeDefault);
+  SLEEP(ECSleep);
+
+  // enable/disable many times in a row
+  for (i = 0; i < ECIterations; i++) {
+    TEST_MUSTPASS(apm->SetEcStatus(true));
+    TEST_MUSTPASS(apm->SetEcStatus(false));
+  }
+  TEST_MUSTPASS(apm->GetEcStatus(enabled, ECmode));
+  TEST_LOG("EC: enabled=%d, ECmode=%d\n", enabled, ECmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(ECmode != ECmodeDefault);
+  SLEEP(ECSleep);
+
+  ///////
+  // AGC
+
+  const int AGCSleep = 0;
+  const int AGCIterations = 10;
+
+  AgcModes AGCmode(kAgcAdaptiveAnalog);
+
+#if (defined(MAC_IPHONE) || defined(WEBRTC_ANDROID))
+  bool enabledDefault = false;
+  AgcModes AGCmodeDefault(kAgcAdaptiveDigital);
+#else
+  bool enabledDefault = true;
+  AgcModes AGCmodeDefault(kAgcAdaptiveAnalog);
+#endif
+
+  // verify default settings (should be as above)
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(enabled != enabledDefault);
+  TEST_MUSTPASS(AGCmode != AGCmodeDefault);
+
+  // set default AGC mode
+  TEST_MUSTPASS(apm->SetAgcStatus(false, kAgcDefault));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(AGCmode != AGCmodeDefault);
+  SLEEP(AGCSleep);
+
+  // set kAgcFixedDigital mode
+  TEST_MUSTPASS(apm->SetAgcStatus(true, kAgcFixedDigital));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(AGCmode != kAgcFixedDigital);
+  SLEEP(AGCSleep);
+
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // set kAgcAdaptiveAnalog mode
+  TEST_MUSTPASS(apm->SetAgcStatus(true, kAgcAdaptiveAnalog));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(AGCmode != kAgcAdaptiveAnalog);
+  SLEEP(AGCSleep);
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // set kAgcAdaptiveDigital mode
+  TEST_MUSTPASS(apm->SetAgcStatus(true, kAgcAdaptiveDigital));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(AGCmode != kAgcAdaptiveDigital);
+  SLEEP(AGCSleep);
+
+  // verify that mode is maintained when AGC is disabled
+  TEST_MUSTPASS(apm->SetAgcStatus(false));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(AGCmode != kAgcAdaptiveDigital);
+  SLEEP(AGCSleep);
+
+  // restore default AGC
+  TEST_MUSTPASS(apm->SetAgcStatus(enabledDefault, kAgcDefault));
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(enabled != enabledDefault);
+  TEST_MUSTPASS(AGCmode != AGCmodeDefault);
+  SLEEP(AGCSleep);
+
+  // enable/disable many times in a row
+  for (i = 0; i < AGCIterations; i++)
+  {
+    TEST_MUSTPASS(apm->SetAgcStatus(true));
+    TEST_MUSTPASS(apm->SetAgcStatus(false));
+  }
+  TEST_MUSTPASS(apm->GetAgcStatus(enabled, AGCmode));
+  TEST_LOG("AGC: enabled=%d, AGCmode=%d\n", enabled, AGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(AGCmode != AGCmodeDefault);
+
+  // --- Set/AGCConfig --
+
+  //
+  // targetLeveldBOv         : [0, 31] (default 3)
+  // digitalCompressionGaindB: [0, 90] (default 9)
+  // limiterEnable           : 0: Off, 1: On (default)
+
+  AgcConfig agcConfig;
+  AgcConfig agcConfigDefault;
+
+  const unsigned short targetLeveldBOvDefault = 3;
+  const unsigned short digitalCompressionGaindBDefault = 9;
+  const bool limiterEnableDefault = true;
+
+  const unsigned short targetLeveldBOvMax = 31;
+  const unsigned short digitalCompressionGaindBMax = 90;
+
+  // verify default configuration
+  TEST_MUSTPASS(apm->GetAgcConfig(agcConfigDefault));
+  TEST_LOG("AGC: targetLeveldBOv=%d, digitalCompressionGaindB=%d, "
+      "limiterEnable=%d\n",
+      agcConfigDefault.targetLeveldBOv,
+      agcConfigDefault.digitalCompressionGaindB,
+      agcConfigDefault.limiterEnable);
+  TEST_MUSTPASS(agcConfigDefault.targetLeveldBOv != targetLeveldBOvDefault);
+  TEST_MUSTPASS(agcConfigDefault.digitalCompressionGaindB !=
+      digitalCompressionGaindBDefault);
+  TEST_MUSTPASS(agcConfigDefault.limiterEnable != limiterEnableDefault);
+
+  // verify that invalid (out-of-range) parameters are detected
+  agcConfig = agcConfigDefault;
+  agcConfig.targetLeveldBOv = targetLeveldBOvMax + 1;
+  TEST_MUSTPASS(!apm->SetAgcConfig(agcConfig));
+  int err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_APM_ERROR);
+  agcConfig = agcConfigDefault;
+  agcConfig.digitalCompressionGaindB
+  = digitalCompressionGaindBMax + 1;
+  TEST_MUSTPASS(!apm->SetAgcConfig(agcConfig));
+
+  AgcConfig agcConfigSet;
+  agcConfigSet.digitalCompressionGaindB = 17;
+  agcConfigSet.targetLeveldBOv = 11;
+  agcConfigSet.limiterEnable = false;
+
+  // try some set/get operations using valid settings
+  TEST_MUSTPASS(apm->SetAgcConfig(agcConfigDefault));
+  TEST_MUSTPASS(apm->GetAgcConfig(agcConfig));
+  TEST_MUSTPASS(agcConfig.targetLeveldBOv != targetLeveldBOvDefault);
+  TEST_MUSTPASS(agcConfig.digitalCompressionGaindB !=
+      digitalCompressionGaindBDefault);
+  TEST_MUSTPASS(agcConfig.limiterEnable != limiterEnableDefault);
+
+  TEST_MUSTPASS(apm->SetAgcConfig(agcConfigSet));
+  TEST_MUSTPASS(apm->GetAgcConfig(agcConfig));
+  TEST_MUSTPASS(agcConfig.targetLeveldBOv != agcConfigSet.targetLeveldBOv);
+  TEST_MUSTPASS(agcConfig.digitalCompressionGaindB !=
+      agcConfigSet.digitalCompressionGaindB);
+  TEST_MUSTPASS(agcConfig.limiterEnable != agcConfigSet.limiterEnable);
+
+  // restore default AGC config
+  TEST_MUSTPASS(apm->SetAgcConfig(agcConfigDefault));
+  SLEEP(AGCSleep);
+
+  //////
+  // NS
+
+  const int NSSleep = 0;
+  const int NSIterations = 10;
+
+  NsModes NSmode(kNsHighSuppression);
+  NsModes NSmodeDefault(kNsModerateSuppression);
+
+  // verify default settings (should be OFF and mode as above)
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(NSmode != NSmodeDefault);
+
+  // enable default NS settings
+  // must set a value first time!
+  TEST_MUSTPASS(apm->SetNsStatus(false, kNsDefault));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(NSmode != NSmodeDefault);
+  SLEEP(NSSleep);
+
+  // set kNsLowSuppression mode
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsLowSuppression));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(NSmode != kNsLowSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsModerateSuppression mode
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsModerateSuppression));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(NSmode != kNsModerateSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsHighSuppression mode
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsHighSuppression));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(NSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsVeryHighSuppression mode
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsVeryHighSuppression));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(NSmode != kNsVeryHighSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsVeryHighSuppression mode
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsConference));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(NSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // verify that mode is maintained when NS is disabled
+  TEST_MUSTPASS(apm->SetNsStatus(false));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(NSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // restore default NS
+  TEST_MUSTPASS(apm->SetNsStatus(true, kNsDefault));
+  TEST_MUSTPASS(apm->SetNsStatus(false));
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(NSmode != NSmodeDefault);
+  SLEEP(NSSleep);
+
+  // enable/disable many times in a row
+  for (i = 0; i < NSIterations; i++) {
+    TEST_MUSTPASS(apm->SetNsStatus(true));
+    TEST_MUSTPASS(apm->SetNsStatus(false));
+  }
+  TEST_MUSTPASS(apm->GetNsStatus(enabled, NSmode));
+  TEST_LOG("NS: enabled=%d, NSmode=%d\n", enabled, NSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(NSmode != NSmodeDefault);
+  SLEEP(NSSleep);
+
+  //////////////////////////////////
+  // Ec Metrics
+
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  TEST(GetEcMetricsStatus);
+  ANL();
+  TEST(SetEcMetricsStatus);
+  ANL();
+  TEST_MUSTPASS(apm->GetEcMetricsStatus(enabled));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  MARK(); // should be OFF by default
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(true));
+  MARK();
+  TEST_MUSTPASS(apm->GetEcMetricsStatus(enabled));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  MARK();
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(false));
+  MARK();
+  TEST_MUSTPASS(apm->GetEcMetricsStatus(enabled));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  MARK();
+  AOK();
+  ANL();
+
+  TEST(GetEchoMetrics);
+  ANL();
+
+  int ERL, ERLE, RERL, A_NLP;
+  TEST_MUSTPASS(-1 != apm->GetEchoMetrics(ERL, ERLE, RERL, A_NLP));
+  MARK(); // Should fail since not activated.
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_APM_ERROR);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(true));
+  TEST_MUSTPASS(-1 != apm->GetEchoMetrics(ERL, ERLE, RERL, A_NLP));
+  MARK(); // Should fail since AEC is off.
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_APM_ERROR);
+  TEST_MUSTPASS(apm->SetEcStatus(true));
+  TEST_MUSTPASS(apm->GetEchoMetrics(ERL, ERLE, RERL, A_NLP));
+  MARK(); // Should work now.
+  TEST_LOG("\nEcho: ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d [dB]\n",
+           ERL, ERLE, RERL, A_NLP);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(false));
+  TEST_MUSTPASS(apm->SetEcStatus(false));
+  AOK();
+  ANL();
+
+  TEST(GetEcDelayMetrics);
+  ANL();
+
+  int delay_median = 0;
+  int delay_std = 0;
+  TEST_MUSTPASS(-1 != apm->GetEcDelayMetrics(delay_median, delay_std));
+  MARK(); // Should fail since not activated.
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_APM_ERROR);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(true));
+  TEST_MUSTPASS(-1 != apm->GetEcDelayMetrics(delay_median, delay_std));
+  MARK(); // Should fail since AEC is off.
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_APM_ERROR);
+  TEST_MUSTPASS(apm->SetEcStatus(true));
+  TEST_MUSTPASS(apm->GetEcDelayMetrics(delay_median, delay_std));
+  MARK(); // Should work now.
+  TEST_LOG("\nEC Delay: median=%d, std=%d [ms]\n", delay_median, delay_std);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(false));
+  TEST_MUSTPASS(apm->SetEcStatus(false));
+  AOK();
+  ANL();
+
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // far-end AudioProcessing
+  ///////
+  // AGC
+
+  AgcModes rxAGCmode(kAgcAdaptiveDigital);
+  AgcModes rxAGCmodeDefault(kAgcAdaptiveDigital);
+  bool rxEnabledDefault = false;
+
+  // verify default settings (should be as above)
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(enabled != rxEnabledDefault);
+  TEST_MUSTPASS(rxAGCmode != rxAGCmodeDefault);
+
+  // set default AGC mode
+  TEST_MUSTPASS(apm->SetRxAgcStatus(0, false, kAgcDefault));
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxAGCmode != rxAGCmodeDefault);
+  SLEEP(AGCSleep);
+
+  // set kAgcAdaptiveAnalog mode, should fail
+  TEST_MUSTPASS(!apm->SetRxAgcStatus(0, true, kAgcAdaptiveAnalog));
+
+  // set kAgcFixedDigital mode
+  TEST_MUSTPASS(apm->SetRxAgcStatus(0, true, kAgcFixedDigital));
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(rxAGCmode != kAgcFixedDigital);
+  SLEEP(AGCSleep);
+
+  // set kAgcAdaptiveDigital mode
+  TEST_MUSTPASS(apm->SetRxAgcStatus(0, true, kAgcAdaptiveDigital));
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(rxAGCmode != kAgcAdaptiveDigital);
+  SLEEP(AGCSleep);
+
+  // verify that mode is maintained when AGC is disabled
+  TEST_MUSTPASS(apm->SetRxAgcStatus(0, false));
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxAGCmode != kAgcAdaptiveDigital);
+  SLEEP(AGCSleep);
+
+  // restore default AGC
+  TEST_MUSTPASS(apm->SetRxAgcStatus(0, enabledDefault, kAgcDefault));
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(enabled != enabledDefault);
+  TEST_MUSTPASS(rxAGCmode != rxAGCmodeDefault);
+  SLEEP(AGCSleep);
+
+  // enable/disable many times in a row
+  for (i = 0; i < AGCIterations; i++) {
+    TEST_MUSTPASS(apm->SetRxAgcStatus(0, true));
+    TEST_MUSTPASS(apm->SetRxAgcStatus(0, false));
+  }
+  TEST_MUSTPASS(apm->GetRxAgcStatus(0, enabled, rxAGCmode));
+  TEST_LOG("rxAGC: enabled=%d, AGCmode=%d\n", enabled,
+           rxAGCmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxAGCmode != rxAGCmodeDefault);
+
+  // --- Set/GetAgcConfig --
+
+
+  // targetLeveldBOv         : [0, 31] (default 3)
+  // digitalCompressionGaindB: [0, 90] (default 9)
+  // limiterEnable           : 0: Off, 1: On (default)
+
+  AgcConfig rxAGCConfig;
+  AgcConfig rxAGCConfigDefault;
+
+  const unsigned short rxTargetLeveldBOvDefault = 3;
+  const unsigned short rxDigitalCompressionGaindBDefault = 9;
+  const bool rxLimiterEnableDefault = true;
+
+  const unsigned short rxTargetLeveldBOvMax = 31;
+  const unsigned short rxDigitalCompressionGaindBMax = 90;
+
+  // verify default configuration
+  TEST_MUSTPASS(apm->GetRxAgcConfig(0, rxAGCConfigDefault));
+  TEST_LOG(
+      "rxAGC: targetLeveldBOv=%u, digitalCompressionGaindB=%u, "
+      "limiterEnable=%d\n",
+      rxAGCConfigDefault.targetLeveldBOv,
+      rxAGCConfigDefault.digitalCompressionGaindB,
+      rxAGCConfigDefault.limiterEnable);
+  TEST_MUSTPASS(rxAGCConfigDefault.targetLeveldBOv !=
+      rxTargetLeveldBOvDefault);
+  TEST_MUSTPASS(rxAGCConfigDefault.digitalCompressionGaindB !=
+      rxDigitalCompressionGaindBDefault);
+  TEST_MUSTPASS(rxAGCConfigDefault.limiterEnable != rxLimiterEnableDefault);
+
+  // verify that invalid (out-of-range) parameters are detected
+  rxAGCConfig = rxAGCConfigDefault;
+  rxAGCConfig.targetLeveldBOv = rxTargetLeveldBOvMax + 1;
+  TEST_MUSTPASS(!apm->SetRxAgcConfig(0, rxAGCConfig));
+  int rxErr = voe_base_->LastError();
+  TEST_MUSTPASS(rxErr != VE_APM_ERROR);
+  rxAGCConfig = rxAGCConfigDefault;
+  rxAGCConfig.digitalCompressionGaindB
+  = rxDigitalCompressionGaindBMax + 1;
+  TEST_MUSTPASS(!apm->SetRxAgcConfig(0, rxAGCConfig));
+
+  AgcConfig rxAGCConfigSet;
+  rxAGCConfigSet.digitalCompressionGaindB = 17;
+  rxAGCConfigSet.targetLeveldBOv = 11;
+  rxAGCConfigSet.limiterEnable = false;
+
+  // try some set/get operations using valid settings
+  TEST_MUSTPASS(apm->SetRxAgcConfig(0, rxAGCConfigDefault));
+  TEST_MUSTPASS(apm->GetRxAgcConfig(0, rxAGCConfig));
+  TEST_MUSTPASS(rxAGCConfig.targetLeveldBOv != rxTargetLeveldBOvDefault);
+  TEST_MUSTPASS(rxAGCConfig.digitalCompressionGaindB !=
+      rxDigitalCompressionGaindBDefault);
+  TEST_MUSTPASS(rxAGCConfig.limiterEnable != rxLimiterEnableDefault);
+
+  TEST_MUSTPASS(apm->SetRxAgcConfig(0, rxAGCConfigSet));
+  TEST_MUSTPASS(apm->GetRxAgcConfig(0, rxAGCConfig));
+  TEST_MUSTPASS(rxAGCConfig.targetLeveldBOv !=
+      rxAGCConfigSet.targetLeveldBOv);
+  TEST_MUSTPASS(rxAGCConfig.digitalCompressionGaindB !=
+      rxAGCConfigSet.digitalCompressionGaindB);
+  TEST_MUSTPASS(rxAGCConfig.limiterEnable != rxAGCConfigSet.limiterEnable);
+
+  // restore default AGC config
+  TEST_MUSTPASS(apm->SetRxAgcConfig(0, rxAGCConfigDefault));
+  SLEEP(AGCSleep);
+
+  //////
+  // NS
+
+  NsModes rxNSmode(kNsHighSuppression);
+  NsModes rxNSmodeDefault(kNsModerateSuppression);
+
+  // verify default settings (should be OFF and mode as above)
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxNSmode != rxNSmodeDefault);
+
+  // enable default NS settings
+  // must set a value first time!
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, false, kNsDefault));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxNSmode != rxNSmodeDefault);
+  SLEEP(NSSleep);
+
+  // set kNsLowSuppression mode
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsLowSuppression));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(rxNSmode != kNsLowSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsModerateSuppression mode
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsModerateSuppression));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(rxNSmode != kNsModerateSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsHighSuppression mode
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsHighSuppression));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(rxNSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsVeryHighSuppression mode
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsVeryHighSuppression));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(rxNSmode != kNsVeryHighSuppression);
+  SLEEP(NSSleep);
+
+  // set kNsVeryHighSuppression mode
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsConference));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(rxNSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // verify that mode is maintained when NS is disabled
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, false));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxNSmode != kNsHighSuppression);
+  SLEEP(NSSleep);
+
+  // restore default NS
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, true, kNsDefault));
+  TEST_MUSTPASS(apm->SetRxNsStatus(0, false));
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxNSmode != NSmodeDefault);
+  SLEEP(NSSleep);
+
+  // enable/disable many times in a row
+  for (i = 0; i < NSIterations; i++)
+  {
+    TEST_MUSTPASS(apm->SetRxNsStatus(0, true));
+    TEST_MUSTPASS(apm->SetRxNsStatus(0, false));
+  }
+  TEST_MUSTPASS(apm->GetRxNsStatus(0, enabled, rxNSmode));
+  TEST_LOG("rxNS: enabled=%d, NSmode=%d\n", enabled, rxNSmode);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(rxNSmode != NSmodeDefault);
+  SLEEP(NSSleep);
+
+  /////////////////////////////
+  // StartDebugRecording
+  ////////////////////////////
+  // StopDebugRecording
+  TEST_LOG("StartDebugRecording");
+  TEST_MUSTPASS(apm->StartDebugRecording(GetFilename("apm_debug.txt")));
+  SLEEP(1000);
+  TEST_LOG("StopDebugRecording");
+  TEST_MUSTPASS(apm->StopDebugRecording());
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+} //  namespace voetest
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.h b/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.h
new file mode 100644
index 0000000..9a10037
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_extended_test.h
@@ -0,0 +1,463 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
+
+#include "voe_standard_test.h"
+#include "audio_device.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModule
+//
+//  Implementation of the ADM to be used as external ADM in VoiceEngine.
+//  This implementation is only a mock class, i.e., it does not provide
+//  any real audio support.
+// ----------------------------------------------------------------------------
+
+class AudioDeviceModuleImpl : public AudioDeviceModule {
+ public:
+  // Factory methods
+  static AudioDeviceModuleImpl* Create();
+  static bool Destroy(AudioDeviceModuleImpl* adm);
+
+  // Helper methods which allows us to get some handy information about
+  // this mock implementation.
+  int32_t ReferenceCounter() const {
+    return _ref_count;
+  }
+
+  // RefCountedModule implementation (mocks default implementation)
+  virtual int32_t AddRef();
+  virtual int32_t Release();
+
+  // Module implementation
+  virtual int32_t Version(char* version,
+                          uint32_t& remaining_buffer_in_bytes,
+                          uint32_t& position) const {
+    return 0;
+  }
+  virtual int32_t ChangeUniqueId(const int32_t id) {
+    return 0;
+  }
+  virtual int32_t TimeUntilNextProcess() {
+    return -1;
+  }
+  virtual int32_t Process() {
+    return 0;
+  }
+
+  // AudioDeviceModule implementation
+  virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const {
+    return 0;
+  }
+
+  virtual ErrorCode LastError() const {
+    return static_cast<ErrorCode> (0);
+  }
+  virtual int32_t RegisterEventObserver(AudioDeviceObserver* eventCallback) {
+    return 0;
+  }
+
+  virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) {
+    return 0;
+  }
+
+  virtual int32_t Init() {
+    return 0;
+  }
+  virtual int32_t Terminate() {
+    return 0;
+  }
+  virtual bool Initialized() const {
+    return true;
+  }
+
+  virtual int16_t PlayoutDevices() {
+    return -1;
+  }
+  virtual int16_t RecordingDevices() {
+    return -1;
+  }
+  virtual int32_t PlayoutDeviceName(uint16_t index,
+                                    char name[kAdmMaxDeviceNameSize],
+                                    char guid[kAdmMaxGuidSize]) {
+    return -1;
+  }
+  virtual int32_t RecordingDeviceName(uint16_t index,
+                                      char name[kAdmMaxDeviceNameSize],
+                                      char guid[kAdmMaxGuidSize]) {
+    return -1;
+  }
+
+  virtual int32_t SetPlayoutDevice(uint16_t index) {
+    return 0;
+  }
+  virtual int32_t SetPlayoutDevice(WindowsDeviceType device) {
+    return 0;
+  }
+  virtual int32_t SetRecordingDevice(uint16_t index) {
+    return 0;
+  }
+  virtual int32_t SetRecordingDevice(WindowsDeviceType device) {
+    return 0;
+  }
+
+  virtual int32_t PlayoutIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitPlayout() {
+    return 0;
+  }
+  virtual bool PlayoutIsInitialized() const {
+    return true;
+  }
+  virtual int32_t RecordingIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitRecording() {
+    return 0;
+  }
+  virtual bool RecordingIsInitialized() const {
+    return true;
+  }
+
+  virtual int32_t StartPlayout() {
+    return 0;
+  }
+  virtual int32_t StopPlayout() {
+    return 0;
+  }
+  virtual bool Playing() const {
+    return true;
+  }
+  virtual int32_t StartRecording() {
+    return 0;
+  }
+  virtual int32_t StopRecording() {
+    return 0;
+  }
+  virtual bool Recording() const {
+    return true;
+  }
+
+  virtual int32_t SetAGC(bool enable) {
+    return -1;
+  }
+  virtual bool AGC() const {
+    return false;
+  }
+
+  virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+                                   uint16_t volumeRight) {
+    return -1;
+  }
+  virtual int32_t WaveOutVolume(uint16_t* volumeLeft,
+                                uint16_t* volumeRight) const {
+    return -1;
+  }
+
+  virtual int32_t SpeakerIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitSpeaker() {
+    return 0;
+  }
+  virtual bool SpeakerIsInitialized() const {
+    return true;
+  }
+  virtual int32_t MicrophoneIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitMicrophone() {
+    return 0;
+  }
+  virtual bool MicrophoneIsInitialized() const {
+    return true;
+  }
+
+  virtual int32_t SpeakerVolumeIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetSpeakerVolume(uint32_t volume) {
+    return -1;
+  }
+  virtual int32_t SpeakerVolume(uint32_t* volume) const {
+    return -1;
+  }
+  virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const {
+    return -1;
+  }
+  virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const {
+    return -1;
+  }
+  virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneVolumeIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneVolume(uint32_t volume) {
+    return -1;
+  }
+  virtual int32_t MicrophoneVolume(uint32_t* volume) const {
+    return -1;
+  }
+  virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const {
+    return -1;
+  }
+  virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const {
+    return -1;
+  }
+  virtual int32_t MicrophoneVolumeStepSize(uint16_t* stepSize) const {
+    return -1;
+  }
+
+  virtual int32_t SpeakerMuteIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetSpeakerMute(bool enable) {
+    return -1;
+  }
+  virtual int32_t SpeakerMute(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneMuteIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneMute(bool enable) {
+    return -1;
+  }
+  virtual int32_t MicrophoneMute(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneBoostIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneBoost(bool enable) {
+    return -1;
+  }
+  virtual int32_t MicrophoneBoost(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t StereoPlayoutIsAvailable(bool* available) const {
+    return -1;
+  }
+  virtual int32_t SetStereoPlayout(bool enable) {
+    return -1;
+  }
+  virtual int32_t StereoPlayout(bool* enabled) const {
+    return -1;
+  }
+  virtual int32_t StereoRecordingIsAvailable(bool* available) const {
+    return -1;
+  }
+  virtual int32_t SetStereoRecording(bool enable) {
+    return -1;
+  }
+  virtual int32_t StereoRecording(bool* enabled) const {
+    return -1;
+  }
+  virtual int32_t SetRecordingChannel(const ChannelType channel) {
+    return -1;
+  }
+  virtual int32_t RecordingChannel(ChannelType* channel) const {
+    return -1;
+  }
+
+  virtual int32_t SetPlayoutBuffer(const BufferType type, uint16_t sizeMS = 0) {
+    return -1;
+  }
+  virtual int32_t PlayoutBuffer(BufferType* type, uint16_t* sizeMS) const {
+    return -1;
+  }
+  virtual int32_t PlayoutDelay(uint16_t* delayMS) const {
+    return -1;
+  }
+  virtual int32_t RecordingDelay(uint16_t* delayMS) const {
+    return -1;
+  }
+
+  virtual int32_t CPULoad(uint16_t* load) const {
+    return -1;
+  }
+
+  virtual int32_t StartRawOutputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) {
+    return -1;
+  }
+  virtual int32_t StopRawOutputFileRecording() {
+    return -1;
+  }
+  virtual int32_t StartRawInputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) {
+    return -1;
+  }
+  virtual int32_t StopRawInputFileRecording() {
+    return -1;
+  }
+
+  virtual int32_t SetRecordingSampleRate(const uint32_t samplesPerSec) {
+    return -1;
+  }
+  virtual int32_t RecordingSampleRate(uint32_t* samplesPerSec) const {
+    return -1;
+  }
+  virtual int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec) {
+    return -1;
+  }
+  virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const {
+    return -1;
+  }
+
+  virtual int32_t ResetAudioDevice() {
+    return -1;
+  }
+  virtual int32_t SetLoudspeakerStatus(bool enable) {
+    return -1;
+  }
+  virtual int32_t GetLoudspeakerStatus(bool* enabled) const {
+    return -1;
+  }
+
+ protected:
+  AudioDeviceModuleImpl();
+  ~AudioDeviceModuleImpl();
+
+ private:
+  volatile int32_t _ref_count;
+};
+
+// ----------------------------------------------------------------------------
+//	Transport
+// ----------------------------------------------------------------------------
+
+class ExtendedTestTransport : public Transport {
+ public:
+  ExtendedTestTransport(VoENetwork* ptr);
+  ~ExtendedTestTransport();
+  VoENetwork* myNetw;
+
+ protected:
+  virtual int SendPacket(int channel, const void *data, int len);
+  virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+ private:
+  static bool Run(void* ptr);
+  bool Process();
+
+ private:
+  ThreadWrapper* _thread;
+  CriticalSectionWrapper* _lock;
+  EventWrapper* _event;
+
+ private:
+  unsigned char _packetBuffer[1612];
+  int _length;
+  int _channel;
+};
+
+class XTransport : public Transport {
+ public:
+  XTransport(VoENetwork* netw, VoEFile* file);
+  VoENetwork* _netw;
+  VoEFile* _file;
+
+ public:
+  virtual int SendPacket(int channel, const void *data, int len);
+  virtual int SendRTCPPacket(int channel, const void *data, int len);
+};
+
+class XRTPObserver : public VoERTPObserver {
+ public:
+  XRTPObserver();
+  ~XRTPObserver();
+  virtual void OnIncomingCSRCChanged(const int channel,
+                                     const unsigned int CSRC,
+                                     const bool added);
+  virtual void OnIncomingSSRCChanged(const int channel,
+                                     const unsigned int SSRC);
+ public:
+  unsigned int _SSRC;
+};
+
+// ----------------------------------------------------------------------------
+//	VoEExtendedTest
+// ----------------------------------------------------------------------------
+
+class VoEExtendedTest : public VoiceEngineObserver,
+                        public VoEConnectionObserver {
+ public:
+  VoEExtendedTest(VoETestManager& mgr);
+  ~VoEExtendedTest();
+  int PrepareTest(const char* str) const;
+  int TestPassed(const char* str) const;
+  int TestBase();
+  int TestCallReport();
+  int TestCodec();
+  int TestDtmf();
+  int TestEncryption();
+  int TestExternalMedia();
+  int TestFile();
+  int TestMixing();
+  int TestHardware();
+  int TestNetEqStats();
+  int TestNetwork();
+  int TestRTP_RTCP();
+  int TestVideoSync();
+  int TestVolumeControl();
+  int TestAPM();
+ public:
+  int ErrorCode() const {
+    return _errCode;
+  }
+  void ClearErrorCode() {
+    _errCode = 0;
+  }
+ protected:
+  // from VoiceEngineObserver
+  void CallbackOnError(const int errCode, const int channel);
+  void CallbackOnTrace(const TraceLevel level, const char* message, const int length);
+ protected:
+  // from VoEConnectionObserver
+  void OnPeriodicDeadOrAlive(const int channel, const bool alive);
+ private:
+  void Play(int channel, unsigned int timeMillisec, bool addFileAsMicrophone = false,
+            bool addTimeMarker = false);
+  void Sleep(unsigned int timeMillisec, bool addMarker = false);
+  void StartMedia(int channel, int rtpPort, bool listen, bool playout, bool send);
+  void StopMedia(int channel);
+  int RunMixingTest(int num_remote_channels, int num_local_channels,
+                    int16_t input_value, int16_t max_output_value,
+                    int16_t min_output_value);
+ private:
+  VoETestManager& _mgr;
+ private:
+  int _errCode;
+  bool _alive;
+  bool _listening[32];
+  bool _playing[32];
+  bool _sending[32];
+};
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.cc b/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.cc
new file mode 100644
index 0000000..a6af7fa
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.cc
@@ -0,0 +1,1851 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include "engine_configurations.h"
+#if defined(_WIN32)
+#include <conio.h>     // exists only on windows
+#include <tchar.h>
+#endif
+
+#include "voe_standard_test.h"
+
+#if defined (_ENABLE_VISUAL_LEAK_DETECTOR_) && defined(_DEBUG) && \
+    defined(_WIN32) && !defined(_INSTRUMENTATION_TESTING_)
+#include "vld.h"
+#endif
+
+#ifdef MAC_IPHONE
+#include "../../source/voice_engine_defines.h"  // defines build macros
+#else
+#include "../../source/voice_engine_defines.h"  // defines build macros
+#endif
+
+#include "automated_mode.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+
+#ifdef _TEST_NETEQ_STATS_
+#include "../../interface/voe_neteq_stats.h" // Not available in delivery folder
+#endif
+
+#include "voe_extended_test.h"
+#include "voe_stress_test.h"
+#include "voe_unit_test.h"
+#include "voe_cpu_test.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#ifdef MAC_IPHONE
+// Defined in iPhone specific test file
+int GetDocumentsDir(char* buf, int bufLen);
+char* GetFilename(char* filename);
+const char* GetFilename(const char* filename);
+int GetResource(char* resource, char* dest, int destLen);
+char* GetResource(char* resource);
+const char* GetResource(const char* resource);
+// #ifdef MAC_IPHONE
+#elif defined(WEBRTC_ANDROID)
+char filenameStr[2][256];
+int currentStr = 0;
+
+char* GetFilename(char* filename) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/%s", filename);
+  return filenameStr[currentStr];
+}
+
+const char* GetFilename(const char* filename) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/%s", filename);
+  return filenameStr[currentStr];
+}
+
+int GetResource(char* resource, char* dest, int destLen) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/%s", resource);
+  strncpy(dest, filenameStr[currentStr], destLen-1);
+  return 0;
+}
+
+char* GetResource(char* resource) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/%s", resource);
+  return filenameStr[currentStr];
+}
+
+const char* GetResource(const char* resource) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/%s", resource);
+  return filenameStr[currentStr];
+}
+
+#else
+char filenameStr[2][256];
+int currentStr = 0;
+
+char* GetFilename(char* filename) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/tmp/%s", filename);
+  return filenameStr[currentStr];
+}
+const char* GetFilename(const char* filename) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/tmp/%s", filename);
+  return filenameStr[currentStr];
+}
+int GetResource(char* resource, char* dest, int destLen) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/tmp/%s", resource);
+  strncpy(dest, filenameStr[currentStr], destLen - 1);
+  return 0;
+}
+char* GetResource(char* resource) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/tmp/%s", resource);
+  return filenameStr[currentStr];
+}
+const char* GetResource(const char* resource) {
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/tmp/%s", resource);
+  return filenameStr[currentStr];
+}
+#endif
+
+#if !defined(MAC_IPHONE)
+const char* summaryFilename = "/tmp/VoiceEngineSummary.txt";
+#endif
+// For iPhone the summary filename is created in createSummary
+
+int dummy = 0;  // Dummy used in different functions to avoid warnings
+
+TestRtpObserver::TestRtpObserver() {
+  Reset();
+}
+
+TestRtpObserver::~TestRtpObserver() {
+}
+
+void TestRtpObserver::Reset() {
+  for (int i = 0; i < 2; i++) {
+    ssrc_[i] = 0;
+    csrc_[i][0] = 0;
+    csrc_[i][1] = 0;
+    added_[i][0] = false;
+    added_[i][1] = false;
+    size_[i] = 0;
+  }
+}
+
+void TestRtpObserver::OnIncomingCSRCChanged(const int channel,
+                                            const unsigned int CSRC,
+                                            const bool added) {
+  char msg[128];
+  sprintf(msg, "=> OnIncomingCSRCChanged(channel=%d, CSRC=%u, added=%d)\n",
+          channel, CSRC, added);
+  TEST_LOG("%s", msg);
+
+  if (channel > 1)
+    return;  // Not enough memory.
+
+  csrc_[channel][size_[channel]] = CSRC;
+  added_[channel][size_[channel]] = added;
+
+  size_[channel]++;
+  if (size_[channel] == 2)
+    size_[channel] = 0;
+}
+
+void TestRtpObserver::OnIncomingSSRCChanged(const int channel,
+                                            const unsigned int SSRC) {
+  char msg[128];
+  sprintf(msg, "\n=> OnIncomingSSRCChanged(channel=%d, SSRC=%u)\n", channel,
+          SSRC);
+  TEST_LOG("%s", msg);
+
+  ssrc_[channel] = SSRC;
+}
+
+void MyDeadOrAlive::OnPeriodicDeadOrAlive(const int /*channel*/,
+                                          const bool alive) {
+  if (alive) {
+    TEST_LOG("ALIVE\n");
+  } else {
+    TEST_LOG("DEAD\n");
+  }
+  fflush(NULL);
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+void MyMedia::Process(const int channel,
+                      const ProcessingTypes type,
+                      WebRtc_Word16 audio_10ms[],
+                      const int length,
+                      const int samplingFreqHz,
+                      const bool stereo) {
+  for (int i = 0; i < length; i++) {
+    if (!stereo) {
+      audio_10ms[i] = (WebRtc_Word16) (audio_10ms[i] *
+          sin(2.0 * 3.14 * f * 400.0 / samplingFreqHz));
+    } else {
+      // interleaved stereo
+      audio_10ms[2 * i] = (WebRtc_Word16) (audio_10ms[2 * i] *
+          sin(2.0 * 3.14 * f * 400.0 / samplingFreqHz));
+      audio_10ms[2 * i + 1] = (WebRtc_Word16) (audio_10ms[2 * i + 1] *
+          sin(2.0 * 3.14 * f * 400.0 / samplingFreqHz));
+    }
+    f++;
+  }
+}
+#endif
+
+MyMedia mobj;
+
+FakeExternalTransport::FakeExternalTransport(VoENetwork* ptr)
+    : my_network_(ptr),
+      thread_(NULL),
+      lock_(NULL),
+      event_(NULL),
+      length_(0),
+      channel_(0),
+      delay_is_enabled_(0),
+      delay_time_in_ms_(0) {
+  const char* threadName = "external_thread";
+  lock_ = CriticalSectionWrapper::CreateCriticalSection();
+  event_ = EventWrapper::Create();
+  thread_ = ThreadWrapper::CreateThread(Run, this, kHighPriority, threadName);
+  if (thread_) {
+    unsigned int id;
+    thread_->Start(id);
+  }
+}
+
+FakeExternalTransport::~FakeExternalTransport() {
+  if (thread_) {
+    thread_->SetNotAlive();
+    event_->Set();
+    if (thread_->Stop()) {
+      delete thread_;
+      thread_ = NULL;
+      delete event_;
+      event_ = NULL;
+      delete lock_;
+      lock_ = NULL;
+    }
+  }
+}
+
+bool FakeExternalTransport::Run(void* ptr) {
+  return static_cast<FakeExternalTransport*> (ptr)->Process();
+}
+
+bool FakeExternalTransport::Process() {
+  switch (event_->Wait(500)) {
+    case kEventSignaled:
+      lock_->Enter();
+      my_network_->ReceivedRTPPacket(channel_, packet_buffer_, length_);
+      lock_->Leave();
+      return true;
+    case kEventTimeout:
+      return true;
+    case kEventError:
+      break;
+  }
+  return true;
+}
+
+int FakeExternalTransport::SendPacket(int channel, const void *data, int len) {
+  lock_->Enter();
+  if (len < 1612) {
+    memcpy(packet_buffer_, (const unsigned char*) data, len);
+    length_ = len;
+    channel_ = channel;
+  }
+  lock_->Leave();
+  event_->Set(); // triggers ReceivedRTPPacket() from worker thread
+  return len;
+}
+
+int FakeExternalTransport::SendRTCPPacket(int channel, const void *data, int len) {
+  if (delay_is_enabled_) {
+    Sleep(delay_time_in_ms_);
+  }
+  my_network_->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+void FakeExternalTransport::SetDelayStatus(bool enable, unsigned int delayInMs) {
+  delay_is_enabled_ = enable;
+  delay_time_in_ms_ = delayInMs;
+}
+
+ErrorObserver::ErrorObserver() {
+  code = -1;
+}
+void ErrorObserver::CallbackOnError(const int channel, const int errCode) {
+  code = errCode;
+#ifndef _INSTRUMENTATION_TESTING_
+  TEST_LOG("\n************************\n");
+  TEST_LOG(" RUNTIME ERROR: %d \n", errCode);
+  TEST_LOG("************************\n");
+#endif
+}
+
+void MyTraceCallback::Print(const TraceLevel level,
+                            const char *traceString,
+                            const int length) {
+  if (traceString) {
+    char* tmp = new char[length];
+    memcpy(tmp, traceString, length);
+    TEST_LOG("%s", tmp);
+    TEST_LOG("\n");
+    delete[] tmp;
+  }
+}
+
+void RtcpAppHandler::OnApplicationDataReceived(
+    const int /*channel*/, const unsigned char sub_type,
+    const unsigned int name, const unsigned char* data,
+    const unsigned short length_in_bytes) {
+  length_in_bytes_ = length_in_bytes;
+  memcpy(data_, &data[0], length_in_bytes);
+  sub_type_ = sub_type;
+  name_ = name;
+}
+
+void RtcpAppHandler::Reset() {
+  length_in_bytes_ = 0;
+  memset(data_, 0, sizeof(data_));
+  sub_type_ = 0;
+  name_ = 0;
+}
+
+void my_encryption::encrypt(int, unsigned char * in_data,
+                            unsigned char * out_data,
+                            int bytes_in,
+                            int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2; // length is increased by 2
+}
+
+void my_encryption::decrypt(int, unsigned char * in_data,
+                            unsigned char * out_data,
+                            int bytes_in,
+                            int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in - 2; // length is decreased by 2
+}
+
+void my_encryption::encrypt_rtcp(int,
+                                 unsigned char * in_data,
+                                 unsigned char * out_data,
+                                 int bytes_in,
+                                 int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2;
+}
+
+void my_encryption::decrypt_rtcp(int, unsigned char * in_data,
+                                 unsigned char * out_data,
+                                 int bytes_in,
+                                 int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2;
+}
+
+void SubAPIManager::DisplayStatus() const {
+  TEST_LOG("Supported sub APIs:\n\n");
+  if (_base)
+    TEST_LOG("  Base\n");
+  if (_callReport)
+    TEST_LOG("  CallReport\n");
+  if (_codec)
+    TEST_LOG("  Codec\n");
+  if (_dtmf)
+    TEST_LOG("  Dtmf\n");
+  if (_encryption)
+    TEST_LOG("  Encryption\n");
+  if (_externalMedia)
+    TEST_LOG("  ExternalMedia\n");
+  if (_file)
+    TEST_LOG("  File\n");
+  if (_hardware)
+    TEST_LOG("  Hardware\n");
+  if (_netEqStats)
+    TEST_LOG("  NetEqStats\n");
+  if (_network)
+    TEST_LOG("  Network\n");
+  if (_rtp_rtcp)
+    TEST_LOG("  RTP_RTCP\n");
+  if (_videoSync)
+    TEST_LOG("  VideoSync\n");
+  if (_volumeControl)
+    TEST_LOG("  VolumeControl\n");
+  if (_apm)
+    TEST_LOG("  AudioProcessing\n");
+  ANL();
+  TEST_LOG("Excluded sub APIs:\n\n");
+  if (!_base)
+    TEST_LOG("  Base\n");
+  if (!_callReport)
+    TEST_LOG("  CallReport\n");
+  if (!_codec)
+    TEST_LOG("  Codec\n");
+  if (!_dtmf)
+    TEST_LOG("  Dtmf\n");
+  if (!_encryption)
+    TEST_LOG("  Encryption\n");
+  if (!_externalMedia)
+    TEST_LOG("  ExternamMedia\n");
+  if (!_file)
+    TEST_LOG("  File\n");
+  if (!_hardware)
+    TEST_LOG("  Hardware\n");
+  if (!_netEqStats)
+    TEST_LOG("  NetEqStats\n");
+  if (!_network)
+    TEST_LOG("  Network\n");
+  if (!_rtp_rtcp)
+    TEST_LOG("  RTP_RTCP\n");
+  if (!_videoSync)
+    TEST_LOG("  VideoSync\n");
+  if (!_volumeControl)
+    TEST_LOG("  VolumeControl\n");
+  if (!_apm)
+    TEST_LOG("  AudioProcessing\n");
+  ANL();
+}
+
+bool SubAPIManager::GetExtendedMenuSelection(ExtendedSelection& sel) {
+  printf("------------------------------------------------\n");
+  printf("Select extended test\n\n");
+  printf(" (0)  None\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (1)  Base");
+  if (_base)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (2)  CallReport");
+  if (_callReport)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (3)  Codec");
+  if (_codec)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (4)  Dtmf");
+  if (_dtmf)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (5)  Encryption");
+  if (_encryption)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (6)  VoEExternalMedia");
+  if (_externalMedia)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (7)  File");
+  if (_file)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (8)  Mixing");
+  if (_file)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (9)  Hardware");
+  if (_hardware)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (10) NetEqStats");
+  if (_netEqStats)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (11) Network");
+  if (_network)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (12) RTP_RTCP");
+  if (_rtp_rtcp)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (13) VideoSync");
+  if (_videoSync)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (14) VolumeControl");
+  if (_volumeControl)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (15) AudioProcessing");
+  if (_apm)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf("\n: ");
+
+  ExtendedSelection xsel(XSEL_Invalid);
+  int selection(0);
+  dummy = scanf("%d", &selection);
+
+  switch (selection) {
+    case 0:
+      xsel = XSEL_None;
+      break;
+    case 1:
+      if (_base)
+        xsel = XSEL_Base;
+      break;
+    case 2:
+      if (_callReport)
+        xsel = XSEL_CallReport;
+      break;
+    case 3:
+      if (_codec)
+        xsel = XSEL_Codec;
+      break;
+    case 4:
+      if (_dtmf)
+        xsel = XSEL_DTMF;
+      break;
+    case 5:
+      if (_encryption)
+        xsel = XSEL_Encryption;
+      break;
+    case 6:
+      if (_externalMedia)
+        xsel = XSEL_ExternalMedia;
+      break;
+    case 7:
+      if (_file)
+        xsel = XSEL_File;
+      break;
+    case 8:
+      if (_file)
+        xsel = XSEL_Mixing;
+      break;
+    case 9:
+      if (_hardware)
+        xsel = XSEL_Hardware;
+      break;
+    case 10:
+      if (_netEqStats)
+        xsel = XSEL_NetEqStats;
+      break;
+    case 11:
+      if (_network)
+        xsel = XSEL_Network;
+      break;
+    case 12:
+      if (_rtp_rtcp)
+        xsel = XSEL_RTP_RTCP;
+      break;
+    case 13:
+      if (_videoSync)
+        xsel = XSEL_VideoSync;
+      break;
+    case 14:
+      if (_volumeControl)
+        xsel = XSEL_VolumeControl;
+      break;
+    case 15:
+      if (_apm)
+        xsel = XSEL_AudioProcessing;
+      break;
+    default:
+      xsel = XSEL_Invalid;
+      break;
+  }
+  if (xsel == XSEL_Invalid)
+    printf("Invalid selection!\n");
+
+  sel = xsel;
+  _xsel = xsel;
+
+  return (xsel != XSEL_Invalid);
+}
+
+VoETestManager::VoETestManager()
+    : initialized_(false),
+      voice_engine_(NULL),
+      voe_base_(0),
+      voe_call_report_(0),
+      voe_codec_(0),
+      voe_dtmf_(0),
+      voe_encrypt_(0),
+      voe_xmedia_(0),
+      voe_file_(0),
+      voe_hardware_(0),
+      voe_network_(0),
+#ifdef _TEST_NETEQ_STATS_
+      voe_neteq_stats_(NULL),
+#endif
+      voe_rtp_rtcp_(0),
+      voe_vsync_(0),
+      voe_volume_control_(0),
+      voe_apm_(0)
+{
+}
+
+VoETestManager::~VoETestManager() {
+}
+
+bool VoETestManager::Init() {
+  if (initialized_)
+    return true;
+
+  if (VoiceEngine::SetTraceFile(NULL) != -1) {
+    // should not be possible to call a Trace method before the VoE is
+    // created
+    TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
+      "should fail)!\n", __LINE__);
+    return false;
+  }
+
+  voice_engine_ = VoiceEngine::Create();
+  if (!voice_engine_) {
+    TEST_LOG("Failed to create VoiceEngine\n");
+    return false;
+  }
+
+  return true;
+}
+
+void VoETestManager::GetInterfaces() {
+  if (voice_engine_) {
+    voe_base_ = VoEBase::GetInterface(voice_engine_);
+    voe_codec_ = VoECodec::GetInterface(voice_engine_);
+    voe_volume_control_ = VoEVolumeControl::GetInterface(voice_engine_);
+    voe_dtmf_ = VoEDtmf::GetInterface(voice_engine_);
+    voe_rtp_rtcp_ = VoERTP_RTCP::GetInterface(voice_engine_);
+    voe_apm_ = VoEAudioProcessing::GetInterface(voice_engine_);
+    voe_network_ = VoENetwork::GetInterface(voice_engine_);
+    voe_file_ = VoEFile::GetInterface(voice_engine_);
+#ifdef _TEST_VIDEO_SYNC_
+    voe_vsync_ = VoEVideoSync::GetInterface(voice_engine_);
+#endif
+    voe_encrypt_ = VoEEncryption::GetInterface(voice_engine_);
+    voe_hardware_ = VoEHardware::GetInterface(voice_engine_);
+    // Set the audio layer to use in all tests
+    if (voe_hardware_) {
+      int res = voe_hardware_->SetAudioDeviceLayer(TESTED_AUDIO_LAYER);
+      if (res < 0) {
+        printf("\nERROR: failed to set audio layer to use in "
+          "testing\n");
+      } else {
+        printf("\nAudio layer %d will be used in testing\n",
+               TESTED_AUDIO_LAYER);
+      }
+    }
+#ifdef _TEST_XMEDIA_
+    voe_xmedia_ = VoEExternalMedia::GetInterface(voice_engine_);
+#endif
+#ifdef _TEST_CALL_REPORT_
+    voe_call_report_ = VoECallReport::GetInterface(voice_engine_);
+#endif
+#ifdef _TEST_NETEQ_STATS_
+    voe_neteq_stats_ = VoENetEqStats::GetInterface(voice_engine_);
+#endif
+  }
+}
+
+int VoETestManager::ReleaseInterfaces() {
+  int err(0), remInt(1), j(0);
+  bool releaseOK(true);
+
+  if (voe_base_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_base_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d base interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    // try to release one addition time (should fail)
+    TEST_MUSTPASS(-1 != voe_base_->Release());
+    err = voe_base_->LastError();
+    // it is considered safe to delete even if Release has been called
+    // too many times
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_codec_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_codec_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d codec interfaces"
+        " (should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_codec_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_volume_control_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_volume_control_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d volume interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_volume_control_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_dtmf_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_dtmf_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d dtmf interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_dtmf_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_rtp_rtcp_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_rtp_rtcp_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d rtp/rtcp interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_rtp_rtcp_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_apm_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_apm_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d apm interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_apm_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_network_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_network_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d network interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_network_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_file_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_file_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d file interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_file_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#ifdef _TEST_VIDEO_SYNC_
+  if (voe_vsync_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_vsync_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d video sync interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_vsync_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#endif
+  if (voe_encrypt_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_encrypt_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d encryption interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_encrypt_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+  if (voe_hardware_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_hardware_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d hardware interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_hardware_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#ifdef _TEST_XMEDIA_
+  if (voe_xmedia_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_xmedia_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d external media interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_xmedia_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#endif
+#ifdef _TEST_CALL_REPORT_
+  if (voe_call_report_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_call_report_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d call report interfaces"
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_call_report_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#endif
+#ifdef _TEST_NETEQ_STATS_
+  if (voe_neteq_stats_) {
+    for (remInt = 1, j = 0; remInt > 0; j++)
+      TEST_MUSTPASS(-1 == (remInt = voe_neteq_stats_->Release()));
+    if (j > 1) {
+      TEST_LOG("\n\n*** Error: released %d neteq stat interfaces "
+        "(should only be 1) \n", j);
+      releaseOK = false;
+    }
+    TEST_MUSTPASS(-1 != voe_neteq_stats_->Release());
+    err = voe_base_->LastError();
+    TEST_MUSTPASS(err != VE_INTERFACE_NOT_FOUND);
+  }
+#endif
+  if (false == VoiceEngine::Delete(voice_engine_)) {
+    TEST_LOG("\n\nVoiceEngine::Delete() failed. \n");
+    releaseOK = false;
+  }
+
+  if (VoiceEngine::SetTraceFile(NULL) != -1) {
+    TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
+      "should fail)!\n", __LINE__);
+  }
+
+  return (releaseOK == true) ? 0 : -1;
+}
+
+int VoETestManager::SetUp(ErrorObserver* error_observer) {
+  char char_buffer[1024];
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+#if defined(WEBRTC_ANDROID)
+  TEST_MUSTPASS(voe_hardware_->SetLoudspeakerStatus(false));
+#endif
+
+  TEST_MUSTPASS(voe_base_->RegisterVoiceEngineObserver(*error_observer));
+
+  TEST_LOG("Get version \n");
+  TEST_MUSTPASS(voe_base_->GetVersion(char_buffer));
+  TEST_LOG("--------------------\n%s\n--------------------\n", char_buffer);
+
+  TEST_LOG("Create channel \n");
+  int nChannels = voe_base_->MaxNumOfChannels();
+  TEST_MUSTPASS(!(nChannels > 0));
+  TEST_LOG("Max number of channels = %d \n", nChannels);
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  return 0;
+}
+
+int VoETestManager::TestStartStreaming(FakeExternalTransport& channel0_transport) {
+  TEST_LOG("\n\n+++ Starting streaming +++\n\n");
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_LOG("Enabling external transport \n");
+  TEST_MUSTPASS(voe_network_->RegisterExternalTransport(0, channel0_transport));
+#else
+  TEST_LOG("Setting send and receive parameters \n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  // No IP specified => "0.0.0.0" will be stored.
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0,8000));
+
+  CodecInst Jing_inst;
+  Jing_inst.channels = 1;
+  Jing_inst.pacsize = 160;
+  Jing_inst.plfreq = 8000;
+  Jing_inst.pltype = 0;
+  Jing_inst.rate = 64000;
+  strcpy(Jing_inst.plname, "PCMU");
+  TEST_MUSTPASS(voe_codec_->SetSendCodec(0, Jing_inst));
+
+  int port = -1;
+  int src_port = -1;
+  int rtcp_port = -1;
+  char ip_address[64] = { 0 };
+  strcpy(ip_address, "10.10.10.10");
+  TEST_MUSTPASS(voe_base_->GetSendDestination(0, port, ip_address, src_port,
+                                         rtcp_port));
+  TEST_MUSTPASS(8000 != port);
+  TEST_MUSTPASS(8000 != src_port);
+  TEST_MUSTPASS(8001 != rtcp_port);
+  TEST_MUSTPASS(_stricmp(ip_address, "127.0.0.1"));
+
+  port = -1;
+  rtcp_port = -1;
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(0, port, rtcp_port, ip_address));
+  TEST_MUSTPASS(8000 != port);
+  TEST_MUSTPASS(8001 != rtcp_port);
+  TEST_MUSTPASS(_stricmp(ip_address, "0.0.0.0"));
+#endif
+  return 0;
+}
+
+int VoETestManager::TestStartPlaying() {
+  TEST_LOG("Start listening, playout and sending \n");
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  // Run in full duplex.
+  TEST_LOG("You should now hear yourself, running default codec (PCMU)\n");
+  SLEEP(2000);
+
+  if (voe_file_) {
+    TEST_LOG("Start playing a file as microphone, so you don't need to"
+      " speak all the time\n");
+    TEST_MUSTPASS(voe_file_->StartPlayingFileAsMicrophone(0,
+            AudioFilename(),
+            true,
+            true));
+    SLEEP(1000);
+  }
+  return 0;
+}
+
+int VoETestManager::DoStandardTest() {
+  // Ensure we have all input files:
+  TEST_MUSTPASS(!strcmp("", AudioFilename()));
+
+  TEST_LOG("\n\n+++ Base tests +++\n\n");
+
+  ErrorObserver error_observer;
+  if (SetUp(&error_observer) != 0) return -1;
+
+  voe_network_->SetSourceFilter(0, 0);
+
+  FakeExternalTransport channel0_transport(voe_network_);
+  if (TestStartStreaming(channel0_transport) != 0) return -1;
+  if (TestStartPlaying() != 0) return -1;
+
+  //////////////
+  // Video Sync
+
+#ifdef _TEST_VIDEO_SYNC_
+  TEST_LOG("\n\n+++ Video sync tests +++\n\n");
+
+  unsigned int val;
+  TEST_MUSTPASS(voe_vsync_->GetPlayoutTimestamp(0, val));
+  TEST_LOG("Playout timestamp = %lu\n", (long unsigned int) val);
+
+  TEST_LOG("Init timestamp and sequence number manually\n");
+  TEST_MUSTPASS(!voe_vsync_->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(!voe_vsync_->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_vsync_->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(voe_vsync_->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  if (voe_file_) {
+    TEST_LOG("Start playing a file as microphone again \n");
+    TEST_MUSTPASS(voe_file_->StartPlayingFileAsMicrophone(0,
+            AudioFilename(),
+            true,
+            true));
+  }
+  SLEEP(3000);
+
+  TEST_LOG("Check delay estimates during 15 seconds, verify that "
+    "they stabilize during this time\n");
+  int valInt = -1;
+  for (int i = 0; i < 15; i++) {
+    TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
+    TEST_LOG("Delay estimate = %d ms\n", valInt);
+#if defined(MAC_IPHONE)
+    TEST_MUSTPASS(valInt <= 30);
+#else
+    TEST_MUSTPASS(valInt <= 45); // 45=20+25 => can't be this low
+#endif
+    SLEEP(1000);
+  }
+
+  TEST_LOG("Setting NetEQ min delay to 500 milliseconds and repeat "
+    "the test above\n");
+  TEST_MUSTPASS(voe_vsync_->SetMinimumPlayoutDelay(0, 500));
+  for (int i = 0; i < 15; i++) {
+    TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
+    TEST_LOG("Delay estimate = %d ms\n", valInt);
+    TEST_MUSTPASS(valInt <= 45);
+    SLEEP(1000);
+  }
+
+  TEST_LOG("Setting NetEQ min delay to 0 milliseconds and repeat"
+    " the test above\n");
+  TEST_MUSTPASS(voe_vsync_->SetMinimumPlayoutDelay(0, 0));
+  for (int i = 0; i < 15; i++) {
+    TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
+    TEST_LOG("Delay estimate = %d ms\n", valInt);
+    TEST_MUSTPASS(valInt <= 45);
+    SLEEP(1000);
+  }
+
+#if (defined (_WIN32) || (defined(WEBRTC_LINUX)) && !defined(WEBRTC_ANDROID))
+  valInt = -1;
+  TEST_MUSTPASS(voe_vsync_->GetPlayoutBufferSize(valInt));
+  TEST_LOG("Soundcard buffer size = %d ms\n", valInt);
+#endif
+#else
+  TEST_LOG("\n\n+++ Video sync tests NOT ENABLED +++\n");
+#endif  // #ifdef _TEST_VIDEO_SYNC_
+  //////////////
+  // Encryption
+
+#ifdef _TEST_ENCRYPT_
+  TEST_LOG("\n\n+++ Encryption tests +++\n\n");
+
+#ifdef WEBRTC_SRTP
+  TEST_LOG("SRTP tests:\n");
+
+  unsigned char encrKey[30] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0,
+    1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
+
+  TEST_LOG("Enable SRTP encryption and decryption, you should still hear"
+      " the voice\n");
+  TEST_MUSTPASS(voe_encrypt_->EnableSRTPSend(0,
+          kCipherAes128CounterMode,
+          30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, encrKey));
+  TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
+          kCipherAes128CounterMode,
+          30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, encrKey));
+  SLEEP(2000);
+
+  TEST_LOG("Disabling decryption, you should hear nothing or garbage\n");
+  TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
+  SLEEP(2000);
+
+  TEST_LOG("Enable decryption again, you should hear the voice again\n");
+  TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
+          kCipherAes128CounterMode,
+          30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, encrKey));
+  SLEEP(2000);
+
+  TEST_LOG("Disabling encryption and enabling decryption, you should"
+      " hear nothing\n");
+  TEST_MUSTPASS(voe_encrypt_->DisableSRTPSend(0));
+  SLEEP(2000);
+
+  TEST_LOG("Back to normal\n");
+  // both SRTP sides are now inactive
+  TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
+  SLEEP(2000);
+
+  TEST_LOG("Enable SRTP and SRTCP encryption and decryption,"
+      " you should still hear the voice\n");
+  TEST_MUSTPASS(voe_encrypt_->EnableSRTPSend(0,
+          kCipherAes128CounterMode,
+          30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, encrKey, true));
+  TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
+          kCipherAes128CounterMode,
+          30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, encrKey, true));
+  SLEEP(2000);
+
+  TEST_LOG("Back to normal\n");
+  TEST_MUSTPASS(voe_encrypt_->DisableSRTPSend(0));
+  // both SRTP sides are now inactive
+  TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
+  SLEEP(2000);
+
+#else
+  TEST_LOG("Skipping SRTP tests - WEBRTC_SRTP not defined \n");
+#endif // #ifdef WEBRTC_SRTP
+  TEST_LOG("\nExternal encryption tests:\n");
+  my_encryption * encObj = new my_encryption;
+  TEST_MUSTPASS(voe_encrypt_->RegisterExternalEncryption(0, *encObj));
+  TEST_LOG("Encryption enabled but you should still hear the voice\n");
+  SLEEP(2000);
+  TEST_LOG("Removing encryption object and deleting it\n");
+  TEST_MUSTPASS(voe_encrypt_->DeRegisterExternalEncryption(0));
+  delete encObj;
+  SLEEP(2000);
+#else
+  TEST_LOG("\n\n+++ Encryption tests NOT ENABLED +++\n");
+#endif // #ifdef _TEST_ENCRYPT_
+  //////////////////
+  // External media
+
+#ifdef _TEST_XMEDIA_
+  TEST_LOG("\n\n+++ External media tests +++\n\n");
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_LOG("Stop playing file as microphone \n");
+  TEST_LOG("==> Talk into the microphone \n");
+  TEST_MUSTPASS(voe_file_->StopPlayingFileAsMicrophone(0));
+
+  TEST_LOG("Enabling external playout\n");
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_xmedia_->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  TEST_LOG("Writing 2 secs of play data to vector\n");
+  int getLen;
+  WebRtc_Word16 speechData[32000];
+  for (int i = 0; i < 200; i++) {
+    TEST_MUSTPASS(voe_xmedia_->ExternalPlayoutGetData(speechData+i*160,
+            16000,
+            100,
+            getLen));
+    TEST_MUSTPASS(160 != getLen);
+    SLEEP(10);
+  }
+
+  TEST_LOG("Disabling external playout\n");
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_xmedia_->SetExternalPlayoutStatus(false));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  TEST_LOG("Enabling external recording\n");
+  TEST_MUSTPASS(voe_xmedia_->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  TEST_LOG("Inserting record data from vector\n");
+  for (int i = 0; i < 200; i++) {
+    TEST_MUSTPASS(voe_xmedia_->ExternalRecordingInsertData(speechData+i*160,
+            160,
+            16000,
+            20));
+    SLEEP(10);
+  }
+
+  TEST_LOG("Disabling external recording\n");
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_xmedia_->SetExternalRecordingStatus(false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  TEST_LOG("==> Start playing a file as microphone again \n");
+  TEST_MUSTPASS(voe_file_->StartPlayingFileAsMicrophone(0, AudioFilename(),
+          true, true));
+#else
+  TEST_LOG("Skipping external rec and playout tests - \
+             WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT not defined \n");
+#endif // WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_LOG("Enabling playout external media processing => "
+    "played audio should now be affected \n");
+  TEST_MUSTPASS(voe_xmedia_->RegisterExternalMediaProcessing(
+          -1, kPlaybackAllChannelsMixed, mobj));
+  SLEEP(2000);
+  TEST_LOG("Back to normal again \n");
+  TEST_MUSTPASS(voe_xmedia_->DeRegisterExternalMediaProcessing(
+          -1, kPlaybackAllChannelsMixed));
+  SLEEP(2000);
+  // Note that we must do per channel here because PlayFileAsMicrophone
+  // is only done on ch 0.
+  TEST_LOG("Enabling recording external media processing => "
+    "played audio should now be affected \n");
+  TEST_MUSTPASS(voe_xmedia_->RegisterExternalMediaProcessing(
+          0, kRecordingPerChannel, mobj));
+  SLEEP(2000);
+  TEST_LOG("Back to normal again \n");
+  TEST_MUSTPASS(voe_xmedia_->DeRegisterExternalMediaProcessing(
+          0, kRecordingPerChannel));
+  SLEEP(2000);
+  TEST_LOG("Enabling recording external media processing => "
+    "speak and make sure that voice is affected \n");
+  TEST_MUSTPASS(voe_xmedia_->RegisterExternalMediaProcessing(
+          -1, kRecordingAllChannelsMixed, mobj));
+  SLEEP(2000);
+  TEST_LOG("Back to normal again \n");
+  TEST_MUSTPASS(voe_xmedia_->DeRegisterExternalMediaProcessing(
+          -1, kRecordingAllChannelsMixed));
+  SLEEP(2000);
+#else
+  TEST_LOG("\n\n+++ External media tests NOT ENABLED +++\n");
+#endif // #ifdef _TEST_XMEDIA_
+  /////////////////////
+  // NetEQ statistics
+
+#ifdef _TEST_NETEQ_STATS_
+  TEST_LOG("\n\n+++ NetEQ statistics tests +++\n\n");
+
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+  NetworkStatistics nStats;
+  TEST_MUSTPASS(voe_neteq_stats_->GetNetworkStatistics(0, nStats));
+  TEST_LOG("\nNetwork statistics: \n");
+  TEST_LOG("    currentAccelerateRate     = %hu \n",
+           nStats.currentAccelerateRate);
+  TEST_LOG("    currentBufferSize         = %hu \n",
+           nStats.currentBufferSize);
+  TEST_LOG("    currentDiscardRate        = %hu \n",
+           nStats.currentDiscardRate);
+  TEST_LOG("    currentExpandRate         = %hu \n",
+           nStats.currentExpandRate);
+  TEST_LOG("    currentPacketLossRate     = %hu \n",
+           nStats.currentPacketLossRate);
+  TEST_LOG("    currentPreemptiveRate     = %hu \n",
+           nStats.currentPreemptiveRate);
+  TEST_LOG("    preferredBufferSize       = %hu \n",
+           nStats.preferredBufferSize);
+  TEST_LOG("    jitterPeaksFound          = %i \n",
+           nStats.jitterPeaksFound);
+  TEST_LOG("    clockDriftPPM             = %i \n",
+           nStats.clockDriftPPM);
+  TEST_LOG("    meanWaitingTimeMs         = %i \n",
+           nStats.meanWaitingTimeMs);
+  TEST_LOG("    medianWaitingTimeMs       = %i \n",
+           nStats.medianWaitingTimeMs);
+  TEST_LOG("    minWaitingTimeMs          = %i \n",
+           nStats.minWaitingTimeMs);
+  TEST_LOG("    maxWaitingTimeMs          = %i \n",
+           nStats.maxWaitingTimeMs);
+#else
+  TEST_LOG("Skipping NetEQ statistics tests - "
+      "WEBRTC_VOICE_ENGINE_NETEQ_STATS_API not defined \n");
+#endif // #ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#else
+  TEST_LOG("\n\n+++ NetEQ statistics tests NOT ENABLED +++\n");
+#endif // #ifdef _TEST_NETEQ_STATS_
+  //////////////////
+  // Stop streaming
+  TEST_LOG("\n\n+++ Stop streaming +++\n\n");
+
+  TEST_LOG("Stop playout, sending and listening \n");
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // Exit:
+  TEST_LOG("Delete channel and terminate VE \n");
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+int runAutoTest(TestType testType, ExtendedSelection extendedSel) {
+  SubAPIManager apiMgr;
+  apiMgr.DisplayStatus();
+
+  ////////////////////////////////////
+  // Create VoiceEngine and sub API:s
+
+  voetest::VoETestManager tm;
+  if (!tm.Init()) {
+    return -1;
+  }
+  tm.GetInterfaces();
+
+  //////////////////////
+  // Run standard tests
+
+  int mainRet(-1);
+  if (testType == Standard) {
+    mainRet = tm.DoStandardTest();
+
+    ////////////////////////////////
+    // Create configuration summary
+    TEST_LOG("\n\n+++ Creating configuration summary file +++\n");
+    createSummary(tm.VoiceEnginePtr());
+  } else if (testType == Extended) {
+    VoEExtendedTest xtend(tm);
+
+    mainRet = 0;
+    while (extendedSel != XSEL_None) {
+      if (extendedSel == XSEL_Base || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestBase()) == -1)
+          break;
+        xtend.TestPassed("Base");
+      }
+      if (extendedSel == XSEL_CallReport || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestCallReport()) == -1)
+          break;
+        xtend.TestPassed("CallReport");
+      }
+      if (extendedSel == XSEL_Codec || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestCodec()) == -1)
+          break;
+        xtend.TestPassed("Codec");
+      }
+      if (extendedSel == XSEL_DTMF || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestDtmf()) == -1)
+          break;
+        xtend.TestPassed("Dtmf");
+      }
+      if (extendedSel == XSEL_Encryption || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestEncryption()) == -1)
+          break;
+        xtend.TestPassed("Encryption");
+      }
+      if (extendedSel == XSEL_ExternalMedia || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestExternalMedia()) == -1)
+          break;
+        xtend.TestPassed("ExternalMedia");
+      }
+      if (extendedSel == XSEL_File || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestFile()) == -1)
+          break;
+        xtend.TestPassed("File");
+      }
+      if (extendedSel == XSEL_Mixing || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestMixing()) == -1)
+          break;
+        xtend.TestPassed("Mixing");
+      }
+      if (extendedSel == XSEL_Hardware || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestHardware()) == -1)
+          break;
+        xtend.TestPassed("Hardware");
+      }
+      if (extendedSel == XSEL_NetEqStats || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestNetEqStats()) == -1)
+          break;
+        xtend.TestPassed("NetEqStats");
+      }
+      if (extendedSel == XSEL_Network || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestNetwork()) == -1)
+          break;
+        xtend.TestPassed("Network");
+      }
+      if (extendedSel == XSEL_RTP_RTCP || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestRTP_RTCP()) == -1)
+          break;
+        xtend.TestPassed("RTP_RTCP");
+      }
+      if (extendedSel == XSEL_VideoSync || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestVideoSync()) == -1)
+          break;
+        xtend.TestPassed("VideoSync");
+      }
+      if (extendedSel == XSEL_VolumeControl || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestVolumeControl()) == -1)
+          break;
+        xtend.TestPassed("VolumeControl");
+      }
+      if (extendedSel == XSEL_AudioProcessing || extendedSel == XSEL_All) {
+        if ((mainRet = xtend.TestAPM()) == -1)
+          break;
+        xtend.TestPassed("AudioProcessing");
+      }
+      apiMgr.GetExtendedMenuSelection(extendedSel);
+    } // while (extendedSel != XSEL_None)
+  } else if (testType == Stress) {
+    VoEStressTest stressTest(tm);
+    mainRet = stressTest.DoTest();
+  } else if (testType == Unit) {
+    VoEUnitTest unitTest(tm);
+    mainRet = unitTest.DoTest();
+  } else if (testType == CPU) {
+    VoECpuTest cpuTest(tm);
+    mainRet = cpuTest.DoTest();
+  } else {
+    // Should never end up here
+    TEST_LOG("INVALID SELECTION \n");
+  }
+
+  //////////////////
+  // Release/Delete
+
+  int releaseOK = tm.ReleaseInterfaces();
+
+  if ((0 == mainRet) && (releaseOK != -1)) {
+    TEST_LOG("\n\n*** All tests passed *** \n\n");
+  } else {
+    TEST_LOG("\n\n*** Test failed! *** \n");
+  }
+
+  return 0;
+}
+
+void createSummary(VoiceEngine* ve) {
+  int len;
+  char str[256];
+
+#ifdef MAC_IPHONE
+  char summaryFilename[256];
+  GetDocumentsDir(summaryFilename, 256);
+  strcat(summaryFilename, "/summary.txt");
+#endif
+
+  VoEBase* voe_base_ = VoEBase::GetInterface(ve);
+  FILE* stream = fopen(summaryFilename, "wt");
+
+  sprintf(str, "WebRTc VoiceEngine ");
+#if defined(_WIN32)
+  strcat(str, "Win");
+#elif defined(WEBRTC_LINUX) && defined(WEBRTC_TARGET_PC) && \
+     !defined(WEBRTC_ANDROID)
+  strcat(str, "Linux");
+#elif defined(WEBRTC_MAC) && !defined(MAC_IPHONE)
+  strcat(str, "Mac");
+#elif defined(WEBRTC_ANDROID)
+  strcat(str, "Android");
+#elif defined(MAC_IPHONE)
+  strcat(str, "iPhone");
+#endif
+  // Add for other platforms as needed
+
+  fprintf(stream, "%s\n", str);
+  len = (int) strlen(str);
+  for (int i = 0; i < len; i++) {
+    fprintf(stream, "=");
+  }
+  fprintf(stream, "\n\n");
+
+  char version[1024];
+  char veVersion[24];
+  voe_base_->GetVersion(version);
+  // find first NL <=> end of VoiceEngine version string
+  int pos = (int) strcspn(version, "\n");
+  strncpy(veVersion, version, pos);
+  veVersion[pos] = '\0';
+  sprintf(str, "Version:                    %s\n", veVersion);
+  fprintf(stream, "%s\n", str);
+
+  sprintf(str, "Build date & time:          %s\n", BUILDDATE " " BUILDTIME);
+  fprintf(stream, "%s\n", str);
+
+  strcpy(str, "G.711 A-law");
+  fprintf(stream, "\nSupported codecs:           %s\n", str);
+  strcpy(str, "                            G.711 mu-law");
+  fprintf(stream, "%s\n", str);
+#ifdef WEBRTC_CODEC_EG711
+  strcpy(str, "                            Enhanced G.711 A-law");
+  fprintf(stream, "%s\n", str);
+  strcpy(str, "                            Enhanced G.711 mu-law");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_IPCMWB
+  strcpy(str, "                            iPCM-wb");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+  strcpy(str, "                            iLBC");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+  strcpy(str, "                            iSAC");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_ISACLC
+  strcpy(str, "                            iSAC-LC");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G722
+  strcpy(str, "                            G.722");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  strcpy(str, "                            G.722.1");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  strcpy(str, "                            G.722.1C");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G723
+  strcpy(str, "                            G.723");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G726
+  strcpy(str, "                            G.726");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G729
+  strcpy(str, "                            G.729");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  strcpy(str, "                            G.729.1");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  strcpy(str, "                            GSM-FR");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_GSMAMR
+  strcpy(str, "                            AMR");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+  strcpy(str, "                            AMR-WB");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_GSMEFR
+  strcpy(str, "                            GSM-EFR");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  strcpy(str, "                            Speex");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_SILK
+  strcpy(str, "                            Silk");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  strcpy(str, "                            L16");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef NETEQFIX_VOXWARE_SC3
+  strcpy(str, "                            Voxware SC3");
+  fprintf(stream, "%s\n", str);
+#endif
+  // Always included
+  strcpy(str, "                            AVT (RFC2833)");
+  fprintf(stream, "%s\n", str);
+#ifdef WEBRTC_CODEC_RED
+  strcpy(str, "                            RED (forward error correction)");
+  fprintf(stream, "%s\n", str);
+#endif
+
+  fprintf(stream, "\nEcho Control:               ");
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  fprintf(stream, "\nAutomatic Gain Control:     ");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  fprintf(stream, "\nNoise Reduction:            ");
+#ifdef WEBRTC_VOICE_ENGINE_NR
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  fprintf(stream, "\nSRTP:                       ");
+#ifdef WEBRTC_SRTP
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  fprintf(stream, "\nExternal transport only:    ");
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  fprintf(stream, "\nTelephone event detection:  ");
+#ifdef WEBRTC_DTMF_DETECTION
+  fprintf(stream, "Yes\n");
+#else
+  fprintf(stream, "No\n");
+#endif
+
+  strcpy(str, "VoEBase");
+  fprintf(stream, "\nSupported sub-APIs:         %s\n", str);
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+  strcpy(str, "                            VoECodec");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+  strcpy(str, "                            VoEDtmf");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+  strcpy(str, "                            VoEFile");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+  strcpy(str, "                            VoEHardware");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+  strcpy(str, "                            VoENetwork");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+  strcpy(str, "                            VoERTP_RTCP");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+  strcpy(str, "                            VoEVolumeControl");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+  strcpy(str, "                            VoEAudioProcessing");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+  strcpy(str, "                            VoeExternalMedia");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+  strcpy(str, "                            VoENetEqStats");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+  strcpy(str, "                            VoEEncryption");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+  strcpy(str, "                            VoECallReport");
+  fprintf(stream, "%s\n", str);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+  strcpy(str, "                            VoEVideoSync");
+  fprintf(stream, "%s\n", str);
+#endif
+
+  fclose(stream);
+  voe_base_->Release();
+}
+
+/*********Knowledge Base******************/
+
+//An example for creating threads and calling VE API's from that thread.
+// Using thread.  A generic API/Class for all platforms.
+#ifdef THEADTEST // find first NL <=> end of VoiceEngine version string
+//Definition of Thread Class
+class ThreadTest
+{
+public:
+  ThreadTest(
+      VoEBase* voe_base_);
+  ~ThreadTest()
+  {
+    delete _myThread;
+  }
+  void Stop();
+private:
+  static bool StartSend(
+      void* obj);
+  bool StartSend();
+
+  ThreadWrapper* _myThread;
+  VoEBase* _base;
+
+  bool _stopped;
+};
+
+//Main function from where StartSend is invoked as a seperate thread.
+ThreadTest::ThreadTest(
+    VoEBase* voe_base_)
+:
+_stopped(false),
+_base(voe_base_)
+{
+  //Thread Creation
+  _myThread = ThreadWrapper::CreateThread(StartSend, this, kLowPriority);
+  unsigned int id = 0;
+  //Starting the thread
+  _myThread->Start(id);
+}
+
+//Calls the StartSend.  This is to avoid the static declaration issue.
+bool
+ThreadTest::StartSend(
+    void* obj)
+{
+  return ((ThreadTest*)obj)->StartSend();
+}
+
+bool
+ThreadTest::StartSend()
+{
+  _myThread->SetNotAlive(); //Ensures this function is called only once.
+  _base->StartSend(0);
+  return true;
+}
+
+void ThreadTest::Stop()
+{
+  _stopped = true;
+}
+
+//  Use the following to invoke ThreatTest from the main function.
+//  ThreadTest* threadtest = new ThreadTest(voe_base_);
+#endif
+
+// An example to create a thread and call VE API's call from that thread.
+// Specific to Windows Platform
+#ifdef THREAD_TEST_WINDOWS
+//Thread Declaration.  Need to be added in the class controlling/dictating
+// the main code.
+/**
+ private:
+ static unsigned int WINAPI StartSend(void* obj);
+ unsigned int WINAPI StartSend();
+ **/
+
+//Thread Definition
+unsigned int WINAPI mainTest::StartSend(void *obj)
+{
+  return ((mainTest*)obj)->StartSend();
+}
+unsigned int WINAPI mainTest::StartSend()
+{
+  //base
+  voe_base_->StartSend(0);
+
+  //  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_LOG("hi hi hi");
+  return 0;
+}
+
+//Thread invoking.  From the main code
+/*****
+ unsigned int threadID=0;
+ if ((HANDLE)_beginthreadex(NULL,
+ 0,
+ StartSend,
+ (void*)this,
+ 0,
+ &threadID) == NULL)
+ return false;
+ ****/
+
+#endif
+
+} // namespace voetest
+
+int RunInManualMode(int argc, char** argv) {
+  using namespace voetest;
+
+  SubAPIManager apiMgr;
+  apiMgr.DisplayStatus();
+
+  printf("----------------------------\n");
+  printf("Select type of test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  Standard test\n");
+  printf(" (2)  Extended test(s)...\n");
+  printf(" (3)  Stress test(s)...\n");
+  printf(" (4)  Unit test(s)...\n");
+  printf(" (5)  CPU & memory reference test [Windows]...\n");
+  printf("\n: ");
+
+  int selection(0);
+
+  dummy = scanf("%d", &selection);
+
+  ExtendedSelection extendedSel(XSEL_Invalid);
+
+  enum TestType testType(Invalid);
+
+  switch (selection) {
+    case 0:
+      return 0;
+    case 1:
+      testType = Standard;
+      break;
+    case 2:
+      testType = Extended;
+      while (!apiMgr.GetExtendedMenuSelection(extendedSel))
+        continue;
+      break;
+    case 3:
+      testType = Stress;
+      break;
+    case 4:
+      testType = Unit;
+      break;
+    case 5:
+      testType = CPU;
+      break;
+    default:
+      TEST_LOG("Invalid selection!\n");
+      return 0;
+  }
+
+  if (testType == Standard) {
+    TEST_LOG("\n\n+++ Running gtest-rewritten standard tests first +++\n\n");
+
+    // Run the automated tests too in standard mode since we are gradually
+    // rewriting the standard test to be automated. Running this will give
+    // the standard suite the same completeness.
+    RunInAutomatedMode(argc, argv);
+  }
+
+  // Function that can be called from other entry functions.
+  return runAutoTest(testType, extendedSel);
+}
+
+// ----------------------------------------------------------------------------
+//                                       main
+// ----------------------------------------------------------------------------
+
+#if !defined(MAC_IPHONE)
+int main(int argc, char** argv) {
+  if (argc > 1 && std::string(argv[1]) == "--automated") {
+    // This function is defined in automated_mode.cc to avoid macro clashes
+    // with googletest (for instance the ASSERT_TRUE macro).
+    return RunInAutomatedMode(argc, argv);
+  }
+
+  return RunInManualMode(argc, argv);
+}
+#endif //#if !defined(MAC_IPHONE)
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.h b/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.h
new file mode 100644
index 0000000..ae0fb24
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_standard_test.h
@@ -0,0 +1,370 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
+
+#include <stdio.h>
+#include <string>
+
+#include "resource_manager.h"
+#include "voe_audio_processing.h"
+#include "voe_base.h"
+#include "voe_dtmf.h"
+#include "voe_errors.h"
+#include "voe_file.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_test_defines.h"
+#include "voe_test_interface.h"
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#include "voe_call_report.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+#include "voe_codec.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#include "voe_encryption.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#include "voe_external_media.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#include "voe_hardware.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+#include "voe_network.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#include "voe_video_sync.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#include "voe_volume_control.h"
+#endif
+
+#ifdef _TEST_NETEQ_STATS_
+namespace webrtc {
+class CriticalSectionWrapper;
+class ThreadWrapper;
+class VoENetEqStats;
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+extern char mobileLogMsg[640];
+#endif
+
+namespace voetest {
+
+void createSummary(VoiceEngine* ve);
+void prepareDelivery();
+
+class TestRtpObserver : public VoERTPObserver {
+ public:
+  TestRtpObserver();
+  virtual ~TestRtpObserver();
+  virtual void OnIncomingCSRCChanged(const int channel,
+                                     const unsigned int CSRC,
+                                     const bool added);
+  virtual void OnIncomingSSRCChanged(const int channel,
+                                     const unsigned int SSRC);
+  void Reset();
+ public:
+  unsigned int ssrc_[2];
+  unsigned int csrc_[2][2];  // Stores 2 CSRCs for each channel.
+  bool added_[2][2];
+  int size_[2];
+};
+
+class MyTraceCallback : public TraceCallback {
+ public:
+  void Print(const TraceLevel level, const char *traceString, const int length);
+};
+
+class MyDeadOrAlive : public VoEConnectionObserver {
+ public:
+  void OnPeriodicDeadOrAlive(const int channel, const bool alive);
+};
+
+class ErrorObserver : public VoiceEngineObserver {
+ public:
+  ErrorObserver();
+  void CallbackOnError(const int channel, const int errCode);
+ public:
+  int code;
+};
+
+class RtcpAppHandler : public VoERTCPObserver {
+ public:
+  void OnApplicationDataReceived(const int channel,
+                                 const unsigned char sub_type,
+                                 const unsigned int name,
+                                 const unsigned char* data,
+                                 const unsigned short length_in_bytes);
+  void Reset();
+  ~RtcpAppHandler() {}
+  unsigned short length_in_bytes_;
+  unsigned char data_[256];
+  unsigned char sub_type_;
+  unsigned int name_;
+};
+
+class DtmfCallback : public VoETelephoneEventObserver {
+ public:
+  int counter;
+  DtmfCallback() {
+    counter = 0;
+  }
+  virtual void OnReceivedTelephoneEventInband(int channel,
+                                              int eventCode,
+                                              bool endOfEvent) {
+    char msg[128];
+    if (endOfEvent)
+      sprintf(msg, "(event=%d, [END])", eventCode);
+    else
+      sprintf(msg, "(event=%d, [START])", eventCode);
+    TEST_LOG("%s", msg);
+    if (!endOfEvent)
+      counter++; // cound start of event only
+    fflush(NULL);
+  }
+
+  virtual void OnReceivedTelephoneEventOutOfBand(int channel,
+                                                 int eventCode,
+                                                 bool endOfEvent) {
+    char msg[128];
+    if (endOfEvent)
+      sprintf(msg, "(event=%d, [END])", eventCode);
+    else
+      sprintf(msg, "(event=%d, [START])", eventCode);
+    TEST_LOG("%s", msg);
+    if (!endOfEvent)
+      counter++; // cound start of event only
+    fflush(NULL);
+  }
+};
+
+class my_encryption : public Encryption {
+  void encrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void encrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+};
+
+class RxCallback : public VoERxVadCallback {
+ public:
+  RxCallback() :
+    _vadDecision(-1) {
+  }
+
+  virtual void OnRxVad(int, int vadDecision) {
+    char msg[128];
+    sprintf(msg, "RX VAD detected decision %d \n", vadDecision);
+    TEST_LOG("%s", msg);
+    _vadDecision = vadDecision;
+  }
+
+  int _vadDecision;
+};
+
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+class MyMedia : public VoEMediaProcess {
+ public:
+  virtual void Process(const int channel,
+                       const ProcessingTypes type,
+                       WebRtc_Word16 audio_10ms[],
+                       const int length,
+                       const int samplingFreqHz,
+                       const bool stereo);
+ private:
+  int f;
+};
+#endif
+
+class SubAPIManager {
+ public:
+  SubAPIManager()
+    : _base(true),
+      _callReport(false),
+      _codec(false),
+      _dtmf(false),
+      _encryption(false),
+      _externalMedia(false),
+      _file(false),
+      _hardware(false),
+      _netEqStats(false),
+      _network(false),
+      _rtp_rtcp(false),
+      _videoSync(false),
+      _volumeControl(false),
+      _apm(false),
+      _xsel(XSEL_Invalid) {
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+      _callReport = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+      _codec = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+      _dtmf = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+      _encryption = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+      _externalMedia = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+      _file = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+      _hardware = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+      _netEqStats = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+      _network = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+      _rtp_rtcp = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+      _videoSync = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+      _volumeControl = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+      _apm = true;
+#endif
+  }
+
+  void DisplayStatus() const;
+  bool GetExtendedMenuSelection(ExtendedSelection& sel);
+
+ private:
+  bool _base, _callReport, _codec, _dtmf, _encryption;
+  bool _externalMedia, _file, _hardware;
+  bool _netEqStats, _network, _rtp_rtcp, _videoSync, _volumeControl, _apm;
+  ExtendedSelection _xsel;
+};
+
+class VoETestManager {
+ public:
+  VoETestManager();
+  ~VoETestManager();
+
+  // Must be called after construction.
+  bool Init();
+
+  void GetInterfaces();
+  int ReleaseInterfaces();
+  int DoStandardTest();
+
+  const char* AudioFilename() const {
+    const std::string& result = resource_manager_.long_audio_file_path();
+    if (result.length() == 0) {
+      TEST_LOG("ERROR: Failed to open input file!");
+    }
+    return result.c_str();
+  }
+
+  VoiceEngine* VoiceEnginePtr() const {
+    return voice_engine_;
+  }
+  VoEBase* BasePtr() const {
+    return voe_base_;
+  }
+  VoECodec* CodecPtr() const {
+    return voe_codec_;
+  }
+  VoEVolumeControl* VolumeControlPtr() const {
+    return voe_volume_control_;
+  }
+  VoEDtmf* DtmfPtr() const {
+    return voe_dtmf_;
+  }
+  VoERTP_RTCP* RTP_RTCPPtr() const {
+    return voe_rtp_rtcp_;
+  }
+  VoEAudioProcessing* APMPtr() const {
+    return voe_apm_;
+  }
+
+  VoENetwork* NetworkPtr() const {
+    return voe_network_;
+  }
+
+  VoEFile* FilePtr() const {
+    return voe_file_;
+  }
+
+  VoEHardware* HardwarePtr() const {
+    return voe_hardware_;
+  }
+
+  VoEVideoSync* VideoSyncPtr() const {
+    return voe_vsync_;
+  }
+
+  VoEEncryption* EncryptionPtr() const {
+    return voe_encrypt_;
+  }
+
+  VoEExternalMedia* ExternalMediaPtr() const {
+    return voe_xmedia_;
+  }
+
+  VoECallReport* CallReportPtr() const {
+    return voe_call_report_;
+  }
+
+#ifdef _TEST_NETEQ_STATS_
+  VoENetEqStats* NetEqStatsPtr() const {
+    return voe_neteq_stats_;
+  }
+
+#endif
+
+ private:
+  int SetUp(ErrorObserver* error_observer);
+  int TestStartStreaming(FakeExternalTransport& channel0_transport);
+  int TestStartPlaying();
+
+  bool                   initialized_;
+
+  VoiceEngine*           voice_engine_;
+  VoEBase*               voe_base_;
+  VoECallReport*         voe_call_report_;
+  VoECodec*              voe_codec_;
+  VoEDtmf*               voe_dtmf_;
+  VoEEncryption*         voe_encrypt_;
+  VoEExternalMedia*      voe_xmedia_;
+  VoEFile*               voe_file_;
+  VoEHardware*           voe_hardware_;
+  VoENetwork*            voe_network_;
+#ifdef _TEST_NETEQ_STATS_
+  VoENetEqStats*         voe_neteq_stats_;
+#endif
+  VoERTP_RTCP*           voe_rtp_rtcp_;
+  VoEVideoSync*          voe_vsync_;
+  VoEVolumeControl*      voe_volume_control_;
+  VoEAudioProcessing*    voe_apm_;
+
+  ResourceManager        resource_manager_;
+};
+
+} // namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.cc b/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.cc
new file mode 100644
index 0000000..b7098ec
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.cc
@@ -0,0 +1,415 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//       Some ideas of improvements:
+//       Break out common init and maybe terminate to separate function(s).
+//       How much trace should we have enabled?
+//       API error counter, to print info and return -1 if any error.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "voe_stress_test.h"
+#include "voe_standard_test.h"
+
+#include "../../source/voice_engine_defines.h"  // defines build macros
+#include "thread_wrapper.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define VALIDATE_STRESS(expr)                                   \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+    }
+
+#ifdef _WIN32
+// Pause if supported
+#define PAUSE_OR_SLEEP(x) PAUSE;
+#else
+// Sleep a bit instead if pause not supported
+#define PAUSE_OR_SLEEP(x) SLEEP(x);
+#endif
+
+extern char* GetFilename(char* filename);
+extern const char* GetFilename(const char* filename);
+extern int GetResource(char* resource, char* dest, int destLen);
+extern char* GetResource(char* resource);
+extern const char* GetResource(const char* resource);
+
+const char* VoEStressTest::_key = "====YUtFWRAAAAADBtIHgAAAAAEAAAAcAAAAAQBHU0ds"
+  "b2JhbCBJUCBTb3VuZAAC\nAAAAIwAAAExpY2Vuc2VkIHRvIE5vcnRlbCBOZXR3cm9rcwAAAAA"
+  "xAAAAZxZ7/u0M\niFYyTwSwko5Uutf7mh8S0O4rYZYTFidbzQeuGonuL17F/2oD/2pfDp3jL4"
+  "Rf3z/A\nnlJsEJgEtASkDNFuwLILjGY0pzjjAYQp3pCl6z6k2MtE06AirdjGLYCjENpq/opX"
+  "\nOrs3sIuwdYK5va/aFcsjBDmlsGCUM48RDYG9s23bIHYafXUC4ofOaubbZPWiPTmL\nEVJ8WH"
+  "4F9pgNjALc14oJXfON7r/3\n=EsLx";
+
+int VoEStressTest::DoTest() {
+  int test(-1);
+  while (test != 0) {
+    test = MenuSelection();
+    switch (test) {
+      case 0:
+        // Quit stress test
+        break;
+      case 1:
+        // All tests
+        StartStopTest();
+        CreateDeleteChannelsTest();
+        MultipleThreadsTest();
+        break;
+      case 2:
+        StartStopTest();
+        break;
+      case 3:
+        CreateDeleteChannelsTest();
+        break;
+      case 4:
+        MultipleThreadsTest();
+        break;
+      default:
+        // Should not be possible
+        printf("Invalid selection! (Test code error)\n");
+        assert(false);
+    } // switch
+  } // while
+
+  return 0;
+}
+
+int VoEStressTest::MenuSelection() {
+  printf("------------------------------------------------\n");
+  printf("Select stress test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  All\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (2)  Start/stop\n");
+  printf(" (3)  Create/delete channels\n");
+  printf(" (4)  Multiple threads\n");
+
+  const int maxMenuSelection = 4;
+  int selection(-1);
+
+  while ((selection < 0) || (selection > maxMenuSelection)) {
+    printf("\n: ");
+    int retval = scanf("%d", &selection);
+    if ((retval != 1) || (selection < 0) || (selection > maxMenuSelection)) {
+      printf("Invalid selection!\n");
+    }
+  }
+
+  return selection;
+}
+
+int VoEStressTest::StartStopTest() {
+  printf("------------------------------------------------\n");
+  printf("Running start/stop test\n");
+  printf("------------------------------------------------\n");
+
+  printf("\nNOTE: this thest will fail after a while if Core audio is used\n");
+  printf("because MS returns AUDCLNT_E_CPUUSAGE_EXCEEDED (VoE Error 10013).\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //         GetFilename("VoEStressTest_StartStop_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //         GetFilename("VoEStressTest_StartStop_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //         kTraceWarning | kTraceError |
+  //         kTraceCritical | kTraceApiCall |
+  //         kTraceMemory | kTraceInfo));
+  VALIDATE_STRESS(base->Init());
+  VALIDATE_STRESS(base->CreateChannel());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(2000);
+  int loopSleep(200);
+  int i(0);
+  int markInterval(20);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  for (i = 0; i < numberOfLoops; ++i) {
+    VALIDATE_STRESS(base->SetLocalReceiver(0, 4800));
+    VALIDATE_STRESS(base->SetSendDestination(0, 4800, "127.0.0.1"));
+    VALIDATE_STRESS(base->StartReceive(0));
+    VALIDATE_STRESS(base->StartPlayout(0));
+    VALIDATE_STRESS(base->StartSend(0));
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+    VALIDATE_STRESS(base->StopSend(0));
+    VALIDATE_STRESS(base->StopPlayout(0));
+    VALIDATE_STRESS(base->StopReceive(0));
+  }
+  ANL();
+
+  VALIDATE_STRESS(base->SetLocalReceiver(0, 4800));
+  VALIDATE_STRESS(base->SetSendDestination(0, 4800, "127.0.0.1"));
+  VALIDATE_STRESS(base->StartReceive(0));
+  VALIDATE_STRESS(base->StartPlayout(0));
+  VALIDATE_STRESS(base->StartSend(0));
+  printf("Verify that audio is good. \n");
+  PAUSE_OR_SLEEP(20000);
+  VALIDATE_STRESS(base->StopSend(0));
+  VALIDATE_STRESS(base->StopPlayout(0));
+  VALIDATE_STRESS(base->StopReceive(0));
+
+  ///////////// End test /////////////
+
+
+  // Terminate
+  VALIDATE_STRESS(base->DeleteChannel(0));
+  VALIDATE_STRESS(base->Terminate());
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+int VoEStressTest::CreateDeleteChannelsTest() {
+  printf("------------------------------------------------\n");
+  printf("Running create/delete channels test\n");
+  printf("------------------------------------------------\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //          GetFilename("VoEStressTest_CreateChannels_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //          GetFilename("VoEStressTest_CreateChannels_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //         kTraceWarning | kTraceError |
+  //         kTraceCritical | kTraceApiCall |
+  //         kTraceMemory | kTraceInfo));
+  VALIDATE_STRESS(base->Init());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(10000);
+  int loopSleep(10);
+  int i(0);
+  int markInterval(200);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  //       Some possible extensions include:
+  //       Different sleep times (fixed or random) or zero.
+  //       Start call on all or some channels.
+  //       Two parts: first have a slight overweight to creating channels,
+  //       then to deleting. (To ensure we hit max channels and go to zero.)
+  //       Make sure audio is OK after test has finished.
+
+  // Set up, start with maxChannels/2 channels
+  const int maxChannels = base->MaxNumOfChannels();
+  VALIDATE_STRESS(maxChannels < 1); // Should always have at least one channel
+  bool* channelState = new bool[maxChannels];
+  memset(channelState, 0, maxChannels * sizeof(bool));
+  int channel(0);
+  int noOfActiveChannels(0);
+  for (i = 0; i < (maxChannels / 2); ++i) {
+    channel = base->CreateChannel();
+    VALIDATE_STRESS(channel < 0);
+    if (channel >= 0) {
+      channelState[channel] = true;
+      ++noOfActiveChannels;
+    }
+  }
+  srand((unsigned int) time(NULL));
+  bool action(false);
+  double rnd(0.0);
+  int res(0);
+
+  // Create/delete channels with slight
+  for (i = 0; i < numberOfLoops; ++i) {
+    // Randomize action (create or delete channel)
+    action = rand() <= (RAND_MAX / 2);
+    if (action) {
+      if (noOfActiveChannels < maxChannels) {
+        // Create new channel
+        channel = base->CreateChannel();
+        VALIDATE_STRESS(channel < 0);
+        if (channel >= 0) {
+          channelState[channel] = true;
+          ++noOfActiveChannels;
+        }
+      }
+    } else {
+      if (noOfActiveChannels > 0) {
+        // Delete random channel that's created [0, maxChannels - 1]
+        do {
+          rnd = static_cast<double> (rand());
+          channel = static_cast<int> (rnd /
+                                      (static_cast<double> (RAND_MAX) + 1.0f) *
+                                      maxChannels);
+        } while (!channelState[channel]); // Must find a created channel
+
+        res = base->DeleteChannel(channel);
+        VALIDATE_STRESS(0 != res);
+        if (0 == res) {
+          channelState[channel] = false;
+          --noOfActiveChannels;
+        }
+      }
+    }
+
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+  }
+  ANL();
+
+  delete[] channelState;
+
+  ///////////// End test /////////////
+
+
+  // Terminate
+  VALIDATE_STRESS(base->Terminate()); // Deletes all channels
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+int VoEStressTest::MultipleThreadsTest() {
+  printf("------------------------------------------------\n");
+  printf("Running multiple threads test\n");
+  printf("------------------------------------------------\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //        GetFilename("VoEStressTest_MultipleThreads_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //        GetFilename("VoEStressTest_MultipleThreads_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //        kTraceWarning | kTraceError |
+  //        kTraceCritical | kTraceApiCall |
+  //        kTraceMemory | kTraceInfo));
+
+  // Init
+  VALIDATE_STRESS(base->Init());
+  VALIDATE_STRESS(base->CreateChannel());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(10000);
+  int loopSleep(0);
+  int i(0);
+  int markInterval(1000);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  srand((unsigned int) time(NULL));
+  int rnd(0);
+
+  // Start extra thread
+  const char* threadName = "StressTest Extra API Thread";
+  _ptrExtraApiThread = ThreadWrapper::CreateThread(RunExtraApi, this,
+                                                   kNormalPriority, threadName);
+  unsigned int id(0);
+  VALIDATE_STRESS(!_ptrExtraApiThread->Start(id));
+
+  //       Some possible extensions include:
+  //       Add more API calls to randomize
+  //       More threads
+  //       Different sleep times (fixed or random).
+  //       Make sure audio is OK after test has finished.
+
+  // Call random API functions here and in extra thread, ignore any error
+  for (i = 0; i < numberOfLoops; ++i) {
+    // This part should be equal to the marked part in the extra thread
+    // --- BEGIN ---
+    rnd = rand();
+    if (rnd < (RAND_MAX / 2)) {
+      // Start playout
+      base->StartPlayout(0);
+    } else {
+      // Stop playout
+      base->StopPlayout(0);
+    }
+    // --- END ---
+
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+  }
+  ANL();
+
+  // Stop extra thread
+  VALIDATE_STRESS(!_ptrExtraApiThread->Stop());
+  delete _ptrExtraApiThread;
+
+  ///////////// End test /////////////
+
+  // Terminate
+  VALIDATE_STRESS(base->Terminate()); // Deletes all channels
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+// Thread functions
+
+bool VoEStressTest::RunExtraApi(void* ptr) {
+  return static_cast<VoEStressTest*> (ptr)->ProcessExtraApi();
+}
+
+bool VoEStressTest::ProcessExtraApi() {
+  // Prepare
+  VoEBase* base = _mgr.BasePtr();
+  int rnd(0);
+
+  // Call random API function, ignore any error
+
+  // This part should be equal to the marked part in the main thread
+  // --- BEGIN ---
+  rnd = rand();
+  if (rnd < (RAND_MAX / 2)) {
+    // Start playout
+    base->StartPlayout(0);
+  } else {
+    // Stop playout
+    base->StopPlayout(0);
+  }
+  // --- END ---
+
+  return true;
+}
+
+} //  namespace voetest
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.h b/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.h
new file mode 100644
index 0000000..b3a418c
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_stress_test.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
+
+namespace webrtc {
+class ThreadWrapper;
+}
+
+namespace voetest {
+// TODO(andrew): using directives are not permitted.
+using namespace webrtc;
+
+class VoETestManager;
+
+class VoEStressTest {
+ public:
+  VoEStressTest(VoETestManager& mgr) :
+    _mgr(mgr), _ptrExtraApiThread(NULL) {
+  }
+  ~VoEStressTest() {
+  }
+  int DoTest();
+
+ private:
+  int MenuSelection();
+  int StartStopTest();
+  int CreateDeleteChannelsTest();
+  int MultipleThreadsTest();
+
+  static bool RunExtraApi(void* ptr);
+  bool ProcessExtraApi();
+
+  VoETestManager& _mgr;
+  static const char* _key;
+
+  ThreadWrapper* _ptrExtraApiThread;
+};
+
+} //  namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_test_defines.h b/trunk/src/voice_engine/main/test/auto_test/voe_test_defines.h
new file mode 100644
index 0000000..c9c103a
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_test_defines.h
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
+#define WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
+
+// Read WEBRTC_VOICE_ENGINE_XXX_API compiler flags
+#include "engine_configurations.h"
+
+#ifdef WEBRTC_ANDROID
+#include <android/log.h>
+#define ANDROID_LOG_TAG "VoiceEngine Auto Test"
+#define TEST_LOG(...) \
+    __android_log_print(ANDROID_LOG_DEBUG, ANDROID_LOG_TAG, __VA_ARGS__)
+#define TEST_LOG_ERROR(...) \
+    __android_log_print(ANDROID_LOG_ERROR, ANDROID_LOG_TAG, __VA_ARGS__)
+#else
+#define TEST_LOG printf
+#define TEST_LOG_ERROR printf
+#define TEST_LOG_FLUSH fflush(NULL)
+#endif
+
+// Select the tests to execute, list order below is same as they will be
+// executed. Note that, all settings below will be overriden by sub-API
+// settings in engine_configurations.h.
+#define _TEST_BASE_
+#define _TEST_RTP_RTCP_
+#define _TEST_HARDWARE_
+#define _TEST_CODEC_
+#define _TEST_DTMF_
+#define _TEST_VOLUME_
+#define _TEST_AUDIO_PROCESSING_
+#define _TEST_FILE_
+#define _TEST_NETWORK_
+#define _TEST_CALL_REPORT_
+#define _TEST_VIDEO_SYNC_
+#define _TEST_ENCRYPT_
+#define _TEST_NETEQ_STATS_
+#define _TEST_XMEDIA_
+
+#define TESTED_AUDIO_LAYER kAudioPlatformDefault
+//#define TESTED_AUDIO_LAYER kAudioLinuxPulse
+
+// #define _ENABLE_VISUAL_LEAK_DETECTOR_ // Enables VLD to find memory leaks
+// #define _ENABLE_IPV6_TESTS_      // Enables IPv6 tests in network xtest
+// #define _USE_EXTENDED_TRACE_     // Adds unique trace files for extended test
+// #define _MEMORY_TEST_
+
+// Enable this when running instrumentation of some kind to exclude tests
+// that will not pass due to slowed down execution.
+// #define _INSTRUMENTATION_TESTING_
+
+// Exclude (override) API tests given preprocessor settings in
+// engine_configurations.h
+#ifndef WEBRTC_VOICE_ENGINE_CODEC_API
+#undef _TEST_CODEC_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#undef _TEST_VOLUME_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_API
+#undef _TEST_DTMF_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#undef _TEST_RTP_RTCP_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#undef _TEST_AUDIO_PROCESSING_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_FILE_API
+#undef _TEST_FILE_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#undef _TEST_VIDEO_SYNC_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#undef _TEST_ENCRYPT_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#undef _TEST_HARDWARE_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#undef _TEST_XMEDIA_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_NETWORK_API
+#undef _TEST_NETWORK_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#undef _TEST_NETEQ_STATS_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#undef _TEST_CALL_REPORT_
+#endif
+
+// Some parts can cause problems while running Insure
+#ifdef __INSURE__
+#define _INSTRUMENTATION_TESTING_
+#undef WEBRTC_SRTP
+#endif
+
+// Time in ms to test each packet size for each codec
+#define CODEC_TEST_TIME 400
+
+#define MARK() TEST_LOG("."); fflush(NULL);             // Add test marker
+#define ANL() TEST_LOG("\n")                            // Add New Line
+#define AOK() TEST_LOG("[Test is OK]"); fflush(NULL);   // Add OK
+#if defined(_WIN32)
+#define PAUSE                                      \
+    {                                               \
+        TEST_LOG("Press any key to continue...");   \
+        _getch();                                   \
+        TEST_LOG("\n");                             \
+    }
+#else
+#define PAUSE                                          \
+    {                                                   \
+        TEST_LOG("Continuing (pause not supported)\n"); \
+    }
+#endif
+
+#define TEST(s)                         \
+    {                                   \
+        TEST_LOG("Testing: %s", #s);    \
+    }                                   \
+
+#ifdef _INSTRUMENTATION_TESTING_
+// Don't stop execution if error occurs
+#define TEST_MUSTPASS(expr)                                               \
+    {                                                                     \
+        if ((expr))                                                       \
+        {                                                                 \
+            TEST_LOG_ERROR("Error at line:%i, %s \n",__LINE__, #expr);    \
+            TEST_LOG_ERROR("Error code: %i\n",voe_base_->LastError());    \
+        }                                                                 \
+    }
+#define TEST_ERROR(code)                                                \
+    {                                                                   \
+        int err = voe_base_->LastError();                               \
+        if (err != code)                                                \
+        {                                                               \
+            TEST_LOG_ERROR("Invalid error code (%d, should be %d) at line %d\n",
+                           code, err, __LINE__);
+}
+}
+#else
+#define ASSERT_TRUE(expr) TEST_MUSTPASS(!(expr))
+#define ASSERT_FALSE(expr) TEST_MUSTPASS(expr)
+#define TEST_MUSTFAIL(expr) TEST_MUSTPASS(!((expr) == -1))
+#define TEST_MUSTPASS(expr)                                              \
+    {                                                                    \
+        if ((expr))                                                      \
+        {                                                                \
+            TEST_LOG_ERROR("\nError at line:%i, %s \n",__LINE__, #expr); \
+            TEST_LOG_ERROR("Error code: %i\n", voe_base_->LastError());  \
+            PAUSE                                                        \
+            return -1;                                                   \
+        }                                                                \
+    }
+#define TEST_ERROR(code) \
+    {																                                         \
+      int err = voe_base_->LastError();                                      \
+      if (err != code)                                                       \
+      {                                                                      \
+        TEST_LOG_ERROR("Invalid error code (%d, should be %d) at line %d\n", \
+                       err, code, __LINE__);                                 \
+        PAUSE                                                                \
+        return -1;                                                           \
+      }															                                         \
+    }
+#endif  // #ifdef _INSTRUMENTATION_TESTING_
+#define EXCLUDE()                                                   \
+    {                                                               \
+        TEST_LOG("\n>>> Excluding test at line: %i <<<\n\n",__LINE__);  \
+    }
+
+#define INCOMPLETE()                                                \
+    {                                                               \
+        TEST_LOG("\n>>> Incomplete test at line: %i <<<\n\n",__LINE__);  \
+    }
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_test_interface.h b/trunk/src/voice_engine/main/test/auto_test/voe_test_interface.h
new file mode 100644
index 0000000..87b37a4
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_test_interface.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Interface for starting test
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
+#define WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class VoENetwork;
+}
+
+namespace voetest {
+// TODO(andrew): using directives are not permitted.
+using namespace webrtc;
+
+// TestType enumerator
+enum TestType {
+  Invalid = -1, Standard = 0, Extended = 1, Stress = 2, Unit = 3, CPU = 4
+};
+
+// ExtendedSelection enumerator
+enum ExtendedSelection {
+  XSEL_Invalid = -1,
+  XSEL_None = 0,
+  XSEL_All,
+  XSEL_Base,
+  XSEL_CallReport,
+  XSEL_Codec,
+  XSEL_DTMF,
+  XSEL_Encryption,
+  XSEL_ExternalMedia,
+  XSEL_File,
+  XSEL_Mixing,
+  XSEL_Hardware,
+  XSEL_NetEqStats,
+  XSEL_Network,
+  XSEL_RTP_RTCP,
+  XSEL_VideoSync,
+  XSEL_VolumeControl,
+  XSEL_AudioProcessing,
+};
+
+// ----------------------------------------------------------------------------
+//  External transport (Transport)
+// ----------------------------------------------------------------------------
+
+class FakeExternalTransport : public Transport {
+ public:
+  FakeExternalTransport(VoENetwork* ptr);
+  virtual ~FakeExternalTransport();
+  VoENetwork* my_network_;
+  int SendPacket(int channel, const void *data, int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+  void SetDelayStatus(bool enabled, unsigned int delayInMs = 100);
+ private:
+  static bool Run(void* ptr);
+  bool Process();
+ private:
+  ThreadWrapper*          thread_;
+  CriticalSectionWrapper* lock_;
+  EventWrapper*           event_;
+ private:
+  unsigned char           packet_buffer_[1612];
+  int                     length_;
+  int                     channel_;
+  bool                    delay_is_enabled_;
+  int                     delay_time_in_ms_;
+};
+
+// Main test function
+int runAutoTest(TestType testType, ExtendedSelection extendedSel);
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.cc b/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.cc
new file mode 100644
index 0000000..1aa9e10
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.cc
@@ -0,0 +1,1083 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "voe_unit_test.h"
+
+#include "../../source/voice_engine_defines.h"
+#include "thread_wrapper.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define CHECK(expr)                                             \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+		PAUSE												    \
+        return -1;                                              \
+    }
+
+extern char* GetFilename(char* filename);
+extern const char* GetFilename(const char* filename);
+extern int GetResource(char* resource, char* dest, int destLen);
+extern char* GetResource(char* resource);
+extern const char* GetResource(const char* resource);
+
+const char* VoEUnitTest::_key = "====YUtFWRAAAAADBtIHgAAAAAEAAAAcAAAAAQBHU0dsb2"
+  "JhbCBJUCBTb3VuZAAC\nAAAAIwAAAExpY2Vuc2VkIHRvIE5vcnRlbCBOZXR3cm9rcwAAAAAxA"
+  "AAAZxZ7/u0M\niFYyTwSwko5Uutf7mh8S0O4rYZYTFidbzQeuGonuL17F/2oD/2pfDp3jL4Rf"
+  "3z/A\nnlJsEJgEtASkDNFuwLILjGY0pzjjAYQp3pCl6z6k2MtE06AirdjGLYCjENpq/opX\nO"
+  "rs3sIuwdYK5va/aFcsjBDmlsGCUM48RDYG9s23bIHYafXUC4ofOaubbZPWiPTmL\nEVJ8WH4F"
+  "9pgNjALc14oJXfON7r/3\n=EsLx";
+
+// ----------------------------------------------------------------------------
+//                       >>>  R E A D M E  F I R S T <<<
+// ----------------------------------------------------------------------------
+
+// 1) The user must ensure that the following codecs are included in VoE:
+//
+// - L16
+// - G.729
+// - G.722.1C
+
+// 2) It is also possible to modify the simulation time for each individual test
+//
+const int dTBetweenEachTest = 4000;
+
+// ----------------------------------------------------------------------------
+//                                  Encrypt
+// ----------------------------------------------------------------------------
+
+void VoEUnitTest::encrypt(int channel_no, unsigned char * in_data,
+                          unsigned char * out_data, int bytes_in,
+                          int * bytes_out) {
+  int i;
+
+  if (!_extOnOff) {
+    // no stereo emulation <=> pure bypass
+    for (i = 0; i < bytes_in; i++)
+      out_data[i] = in_data[i];
+    *bytes_out = bytes_in;
+  } else if (_extOnOff && (_extBitsPerSample == 16)) {
+    // stereo emulation (sample based, 2 bytes per sample)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    short* ptrIn = (short*) &in_data[12];
+    short* ptrOut = (short*) &out_data[12];
+
+    // network byte order
+    for (i = 0; i < nBytesPayload / 2; i++) {
+      // produce two output samples for each input sample
+      *ptrOut++ = *ptrIn; // left sample
+      *ptrOut++ = *ptrIn; // right sample
+      ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  } else if (_extOnOff && (_extBitsPerSample == 8)) {
+    // stereo emulation (sample based, 1 bytes per sample)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    unsigned char* ptrIn = (unsigned char*) &in_data[12];
+    unsigned char* ptrOut = (unsigned char*) &out_data[12];
+
+    // network byte order
+    for (i = 0; i < nBytesPayload; i++) {
+      // produce two output samples for each input sample
+      *ptrOut++ = *ptrIn; // left sample
+      *ptrOut++ = *ptrIn; // right sample
+      ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  } else if (_extOnOff && (_extBitsPerSample == -1)) {
+    // stereo emulation (frame based)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    unsigned char* ptrIn = (unsigned char*) &in_data[12];
+    unsigned char* ptrOut = (unsigned char*) &out_data[12];
+
+    // left channel
+    for (i = 0; i < nBytesPayload; i++) {
+      *ptrOut++ = *ptrIn++;
+    }
+
+    ptrIn = (unsigned char*) &in_data[12];
+
+    // right channel
+    for (i = 0; i < nBytesPayload; i++) {
+      *ptrOut++ = *ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  }
+}
+
+void VoEUnitTest::decrypt(int channel_no, unsigned char * in_data,
+                          unsigned char * out_data, int bytes_in,
+                          int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::encrypt_rtcp(int channel_no, unsigned char * in_data,
+                               unsigned char * out_data, int bytes_in,
+                               int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::decrypt_rtcp(int channel_no, unsigned char * in_data,
+                               unsigned char * out_data, int bytes_in,
+                               int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::SetStereoExternalEncryption(int channel, bool onOff,
+                                              int bitsPerSample) {
+  _extOnOff = onOff;
+  _extChannel = channel;
+  _extBitsPerSample = bitsPerSample;
+}
+
+// VoEVEMediaProcess
+MyMedia mpobj;
+
+// ----------------------------------------------------------------------------
+//                               VoEUnitTest
+// ----------------------------------------------------------------------------
+
+VoEUnitTest::VoEUnitTest(VoETestManager& mgr) :
+  _mgr(mgr), _extOnOff(false), _extBitsPerSample(-1) {
+  for (int i = 0; i < 32; i++) {
+    _listening[i] = false;
+    _playing[i] = false;
+    _sending[i] = false;
+  }
+}
+
+// ----------------------------------------------------------------------------
+//  DoTest
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::DoTest() {
+  int test(-1);
+  int ret(0);
+  while ((test != 0) && (ret != -1)) {
+    test = MenuSelection();
+    switch (test) {
+      case 0:
+        // Quit stress test
+        break;
+      case 1:
+        ret = MixerTest();
+        break;
+      case 2:
+        ret = MixerTest();
+        break;
+      default:
+        // Should not be possible
+        printf("Invalid selection! (Test code error)\n");
+        assert(false);
+    } // switch
+  } // while
+
+  return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  MenuSelection
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::MenuSelection() {
+  printf("------------------------------------------------\n");
+  printf("Select unit test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  All\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (2)  Mixer\n");
+
+  const int maxMenuSelection = 2;
+  int selection(-1);
+
+  while ((selection < 0) || (selection > maxMenuSelection)) {
+    printf("\n: ");
+    int retval = scanf("%d", &selection);
+    if ((retval != 1) || (selection < 0) || (selection > maxMenuSelection)) {
+      printf("Invalid selection!\n");
+    }
+  }
+
+  return selection;
+}
+
+// ----------------------------------------------------------------------------
+//  StartMedia
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::StartMedia(int channel, int rtpPort, bool listen, bool playout,
+                            bool send, bool fileAsMic, bool localFile) {
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  _listening[channel] = false;
+  _playing[channel] = false;
+  _sending[channel] = false;
+
+  CHECK(base->SetLocalReceiver(channel, rtpPort));
+  CHECK(base->SetSendDestination(channel, rtpPort, "127.0.0.1"));
+  if (listen) {
+    _listening[channel] = true;
+    CHECK(base->StartReceive(channel));
+  }
+  if (playout) {
+    _playing[channel] = true;
+    CHECK(base->StartPlayout(channel));
+  }
+  if (send) {
+    _sending[channel] = true;
+    CHECK(base->StartSend(channel));
+  }
+  if (fileAsMic) {
+    // play mic as file, mix with microphone to ensure that SWB can be
+    //tested as well
+    const bool mixWithMic(true);
+    CHECK(file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(),
+            true, mixWithMic));
+  }
+  if (localFile) {
+    CHECK(file->StartPlayingFileLocally(channel,
+            GetResource("audio_short16.pcm"),
+            false,
+            kFileFormatPcm16kHzFile));
+  }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopMedia
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::StopMedia(int channel) {
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  if (file->IsPlayingFileAsMicrophone(channel)) {
+    CHECK(file->StopPlayingFileAsMicrophone(channel));
+  }
+  if (file->IsPlayingFileLocally(channel)) {
+    CHECK(file->StopPlayingFileLocally(channel));
+  }
+  if (_listening[channel]) {
+    _listening[channel] = false;
+    CHECK(base->StopReceive(channel));
+  }
+  if (_playing[channel]) {
+    _playing[channel] = false;
+    CHECK(base->StopPlayout(channel));
+  }
+  if (_sending[channel]) {
+    _sending[channel] = false;
+    CHECK(base->StopSend(channel));
+  }
+
+  return 0;
+}
+
+void VoEUnitTest::Sleep(unsigned int timeMillisec, bool addMarker) {
+  if (addMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    printf("[dT=%.1f]", dtSec);
+    fflush(NULL);
+  }
+  ::Sleep(timeMillisec);
+}
+
+void VoEUnitTest::Wait() {
+#if defined(_WIN32)
+  printf("\npress any key..."); fflush(NULL);
+  _getch();
+#endif
+}
+
+void VoEUnitTest::Test(const char* msg) {
+  printf("%s", msg);
+  fflush(NULL);
+  printf("\n");
+  fflush(NULL);
+}
+
+int VoEUnitTest::MixerTest() {
+  // Set up test parameters first
+  //
+  const int testTime(dTBetweenEachTest);
+
+  printf("\n\n================================================\n");
+  printf(" Mixer Unit Test\n");
+  printf("================================================\n\n");
+
+  // Get sub-API pointers
+  //
+  VoEBase* base = _mgr.BasePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+  VoEEncryption* encrypt = _mgr.EncryptionPtr();
+  VoEDtmf* dtmf = _mgr.DtmfPtr();
+  VoEExternalMedia* xmedia = _mgr.ExternalMediaPtr();
+
+  // Set trace
+  //
+  VoiceEngine::SetTraceFile(GetFilename("UnitTest_Mixer_trace.txt"));
+  VoiceEngine::SetTraceFilter(kTraceStateInfo | kTraceWarning | kTraceError |
+                              kTraceCritical | kTraceApiCall | kTraceMemory |
+                              kTraceInfo);
+
+  // Init
+  //
+  CHECK(base->Init());
+
+  // 8 kHz
+  //    CodecInst l16_8 = { 123, "L16", 8000, 160, 1, 128000 };
+  CodecInst pcmu_8 = { 0, "pcmu", 8000, 160, 1, 64000 };
+  //    CodecInst g729_8 = { 18, "g729", 8000, 160, 1, 8000 };
+
+  // 16 kHz
+  CodecInst ipcmwb_16 = { 97, "ipcmwb", 16000, 320, 1, 80000 };
+  CodecInst l16_16 = { 124, "L16", 16000, 320, 1, 256000 };
+
+  // 32 kHz
+  CodecInst l16_32 = { 125, "L16", 32000, 320, 1, 512000 };
+  CodecInst g722_1c_32 = { 126, "G7221", 32000, 640, 1, 32000 };// 20ms@32kHz
+
+  // ------------------------
+  // Verify mixing frequency
+  // ------------------------
+
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing frequency:\n");
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending file at 16kHz <=> mixing at 16kHz...");
+  CHECK(codec->SetSendCodec(0, ipcmwb_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32Hz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(codec->SetSendCodec(0, pcmu_8));
+  Sleep(testTime);
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing at 16kHz...");
+  CHECK(file->StartPlayingFileLocally(0, GetResource("audio_long16.pcm"),
+          false, kFileFormatPcm16kHzFile));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  base->CreateChannel();
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(codec->SetSendCodec(0, pcmu_8));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32Hz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+
+  Test("(ch 1) Playing 16kHz file locally <=> mixing at 32kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+
+  CHECK(StopMedia(1));
+  CHECK(StopMedia(0));
+
+  base->DeleteChannel(1);
+  base->DeleteChannel(0);
+  ANL();
+
+  // -------------------------
+  // Verify stereo mode mixing
+  // -------------------------
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  // SetOutputVolumePan
+  //
+  // Ensure that all cases sound OK and that the mixer changes state between
+  // mono and stereo as it should. A debugger is required to trace the state
+  // transitions.
+
+  Test(">> Verify correct mixing in stereo using SetOutputVolumePan():\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Panning volume to the left <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Panning volume to the left <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Stopped playing file <=> mixing in mono @ 16kHz...");
+  CHECK(StopMedia(1));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // SetChannelOutputVolumePan
+  //
+  // Ensure that all cases sound OK and that the mixer changes state between
+  // mono and stereo as it should. A debugger is required to trace the state
+  // transitions.
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing in stereo using"
+    " SetChannelOutputVolumePan():\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("(ch 0) Panning channel volume to the left <=> mixing in stereo @ "
+    "16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 0.0));
+  Sleep(testTime);
+  Test("(ch 0) Panning channel volume to the right <=> mixing in stereo"
+    " @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 0.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 0) Back to center volume again <=> mixing in mono @"
+    " 16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+  Test("(ch 1) Panning channel volume to the left <=> mixing in stereo "
+    "@ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("(ch 1) Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Stopped playing file <=> mixing in mono @ 16kHz...");
+  CHECK(StopMedia(1));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // Emulate stereo-encoding using Encryption
+  //
+  // Modify the transmitted RTP stream by using external encryption.
+  // Supports frame based and sample based "stereo-encoding schemes".
+
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing in stereo using emulated stereo input:\n");
+
+  // enable external encryption
+  CHECK(encrypt->RegisterExternalEncryption(0, *this));
+  Test("(ch 0) External Encryption is now enabled:");
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing in mono @ 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  // switch to 16kHz (L16) sending codec
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 0) Sending file at 16kHz (L16) <=> mixing in mono @ 16kHz...");
+  Sleep(testTime);
+
+  // register L16 as 2-channel codec on receiving side =>
+  // should sound bad since RTP module splits all received packets in half
+  // (sample based)
+  CHECK(base->StopPlayout(0));
+  CHECK(base->StopReceive(0));
+  l16_16.channels = 2;
+  CHECK(codec->SetRecPayloadType(0, l16_16));
+  CHECK(base->StartReceive(0));
+  CHECK(base->StartPlayout(0));
+  Test("(ch 0) 16kHz L16 is now registered as 2-channel codec on RX side => "
+    "should sound bad...");
+  Sleep(testTime);
+
+  // emulate sample-based stereo encoding
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side => "
+    "should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+  Test("(ch 0) Stop emulating sample-based stereo encoding on sending side =>"
+    " should sound bad...");
+  SetStereoExternalEncryption(0, false, 16);
+  Sleep(testTime);
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side => "
+    "should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  // switch to 32kHz (L16) sending codec and disable stereo encoding
+  CHECK(codec->SetSendCodec(0, l16_32));
+  SetStereoExternalEncryption(0, false, 16);
+  Test("(ch 0) Sending file and spech at 32kHz (L16) <=> mixing in mono @ "
+    "32kHz...");
+  Sleep(testTime);
+
+  // register L16 32kHz as 2-channel codec on receiving side
+  CHECK(base->StopPlayout(0));
+  CHECK(base->StopReceive(0));
+  l16_32.channels = 2;
+  CHECK(codec->SetRecPayloadType(0, l16_32));
+  CHECK(base->StartReceive(0));
+  CHECK(base->StartPlayout(0));
+  Test("(ch 0) 32kHz L16 is now registered as 2-channel codec on RX side =>"
+    " should sound bad...");
+  Sleep(testTime);
+
+  // emulate sample-based stereo encoding
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side =>"
+    " should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  StopMedia(0);
+  l16_32.channels = 1;
+
+  // disable external encryption
+  CHECK(encrypt->DeRegisterExternalEncryption(0));
+  ANL();
+
+  base->DeleteChannel(0);
+
+  // ------------------
+  // Verify put-on-hold
+  // ------------------
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify put-on-hold functionality:\n");
+
+  Test("(ch 0) Sending at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Sending at 16kHz...");
+  l16_16.channels = 1;
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Perform minor panning to the left to force mixing in"
+    " stereo...");
+  CHECK(volume->SetOutputVolumePan(0, (float)1.0, (float)0.7));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Back to center volume again...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 1.0));
+  Sleep(testTime);
+
+  Test("(ch 1) Add 16kHz local file to the mixer...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(1, true, kHoldPlayOnly));
+  Test("(ch 1) Playout is now on hold => should be silent...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  CHECK(base->SetOnHoldStatus(1, false, kHoldPlayOnly));
+  Test("(ch 1) Playout is now enabled again...");
+  Sleep(testTime);
+  StopMedia(1);
+  Test("(ch 1) Stopped playing file...");
+  Sleep(testTime);
+  StopMedia(0);
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // -----------------------------------
+  // Verify recording of playout to file
+  // -----------------------------------
+
+  // StartRecordingPlayout
+  //
+  // Verify that the correct set of signals is recorded in the mixer.
+  // Record each channel and all channels (-1) to ensure that post and pre
+  // mixing recording works.
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify file-recording functionality:\n");
+
+  Test("(ch 0) Sending at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, GetFilename("RecordedPlayout16kHz.pcm"), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 0) Sending at 16kHz (L16)...");
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, GetFilename("RecordedPlayout16kHz.pcm"), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Test("(ch 0) Sending at 32kHz (L16)...");
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, GetFilename("RecordedPlayout16kHz.pcm"), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  Test("(ch 0) Sending at 16kHz without file as mic but file added on the"
+    " playout side instead...");
+  CHECK(StopMedia(0));
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, GetFilename("RecordedPlayout16kHz.pcm"), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+  CHECK(file->StopPlayingFileLocally(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  CHECK(StopMedia(0));
+  CHECK(StopMedia(1));
+
+  Test("(ch 0) Sending at 16kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, false, false));
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 1) Adding playout file...");
+  CHECK(StartMedia(1, 33333, false, true, false, false, true));
+  Sleep(testTime);
+
+  Test("(ch -1) Speak while recording all channels to add mixer input on "
+    "channel 0...");
+  CHECK(file->StartRecordingPlayout(
+          -1, GetFilename("RecordedPlayout16kHz.pcm"), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(-1));
+  CHECK(file->StopPlayingFileLocally(1));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(StopMedia(0));
+  CHECK(StopMedia(1));
+  ANL();
+
+  // StartRecordingPlayoutStereo
+
+  Test(">> Verify recording of playout in stereo:\n");
+
+  Test("(ch 0) Sending at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_16));
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("Modified master balance (L=10%%, R=100%%) to force stereo mixing...");
+  CHECK(volume->SetOutputVolumePan(-1, (float)0.1, (float)1.0));
+  Sleep(testTime);
+
+  /*
+   Test("Recording of left and right channel playout to two 16kHz PCM "
+   "files...");
+   file->StartRecordingPlayoutStereo(
+   GetFilename("RecordedPlayout_Left_16kHz.pcm"),
+   GetFilename("RecordedPlayout_Right_16kHz.pcm"), StereoBoth);
+   Sleep(testTime);
+   Test("Back to center volume again...");
+   CHECK(volume->SetOutputVolumePan(-1, (float)1.0, (float)1.0));
+   */
+
+  Test("(ch 0) Playing out the recorded file for the left channel (10%%)...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout_Left_16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  Test("(ch 0) Playing out the recorded file for the right channel (100%%) =>"
+    " should sound louder than the left channel...");
+  CHECK(file->StartPlayingFileLocally(
+          0, GetFilename("RecordedPlayout_Right_16kHz.pcm")));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+  ANL();
+
+  // ---------------------------
+  // Verify inserted Dtmf tones
+  // ---------------------------
+
+  Test(">> Verify Dtmf feedback functionality:\n");
+
+  base->CreateChannel();
+
+  for (int i = 0; i < 2; i++) {
+    if (i == 0)
+      Test("Dtmf direct feedback is now enabled...");
+    else
+      Test("Dtmf direct feedback is now disabled...");
+
+    CHECK(dtmf->SetDtmfFeedbackStatus(true, (i==0)));
+
+    Test("(ch 0) Sending at 32kHz using G.722.1C...");
+    CHECK(codec->SetRecPayloadType(0, g722_1c_32));
+    CHECK(codec->SetSendCodec(0, g722_1c_32));
+    CHECK(StartMedia(0, 12345, true, true, true, false, false));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    // ensure that receiver will not play out outband Dtmf
+    CHECK(dtmf->SetSendTelephoneEventPayloadType(0, 118));
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    Test("(ch 0) Changing codec to 8kHz PCMU...");
+    CHECK(codec->SetSendCodec(0, pcmu_8));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    Test("(ch 0) Changing codec to 16kHz L16...");
+    CHECK(codec->SetSendCodec(0, l16_16));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    StopMedia(0);
+    ANL();
+  }
+
+  base->DeleteChannel(0);
+
+  // ---------------------------
+  // Verify external processing
+  // --------------------------
+
+  base->CreateChannel();
+
+  Test(">> Verify external media processing:\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Enabling playout external media processing => played audio should "
+    "now be affected");
+  CHECK(xmedia->RegisterExternalMediaProcessing(
+          0, kPlaybackAllChannelsMixed, mpobj));
+  Sleep(testTime);
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+  printf("Back to normal again\n");
+  CHECK(xmedia->DeRegisterExternalMediaProcessing(0,
+          kPlaybackAllChannelsMixed));
+  Sleep(testTime);
+  printf("Enabling playout external media processing on ch 0 => "
+    "played audio should now be affected\n");
+  CHECK(xmedia->RegisterExternalMediaProcessing(0, kPlaybackPerChannel,
+          mpobj));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  printf("Back to normal again\n");
+  CHECK(xmedia->DeRegisterExternalMediaProcessing(0, kPlaybackPerChannel));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+
+  // --------------------------------------------------
+  // Extended tests of emulated stereo encoding schemes
+  // --------------------------------------------------
+
+  CodecInst PCMU;
+  CodecInst G729;
+  CodecInst L16_8;
+  CodecInst L16_16;
+  CodecInst L16_32;
+
+  base->CreateChannel();
+
+  Test(">> Verify emulated stereo encoding for differenct codecs:\n");
+
+  // enable external encryption
+  CHECK(encrypt->RegisterExternalEncryption(0, *this));
+  Test("(ch 0) External Encryption is now enabled:");
+
+  // register all codecs on the receiving side
+  strcpy(PCMU.plname, "PCMU");
+  PCMU.channels = 2;
+  PCMU.pacsize = 160;
+  PCMU.plfreq = 8000;
+  PCMU.pltype = 125;
+  PCMU.rate = 64000;
+  CHECK(codec->SetRecPayloadType(0, PCMU));
+
+  strcpy(G729.plname, "G729");
+  G729.channels = 2;
+  G729.pacsize = 160;
+  G729.plfreq = 8000;
+  G729.pltype = 18;
+  G729.rate = 8000;
+  CHECK(codec->SetRecPayloadType(0, G729));
+
+  strcpy(L16_8.plname, "L16");
+  L16_8.channels = 2;
+  L16_8.pacsize = 160;
+  L16_8.plfreq = 8000;
+  L16_8.pltype = 120;
+  L16_8.rate = 128000;
+  CHECK(codec->SetRecPayloadType(0, L16_8));
+
+  strcpy(L16_16.plname, "L16");
+  L16_16.channels = 2;
+  L16_16.pacsize = 320;
+  L16_16.plfreq = 16000;
+  L16_16.pltype = 121;
+  L16_16.rate = 256000;
+  CHECK(codec->SetRecPayloadType(0, L16_16));
+
+  // NOTE - we cannot send larger than 1500 bytes per RTP packet
+  strcpy(L16_32.plname, "L16");
+  L16_32.channels = 2;
+  L16_32.pacsize = 320;
+  L16_32.plfreq = 32000;
+  L16_32.pltype = 122;
+  L16_32.rate = 512000;
+  CHECK(codec->SetRecPayloadType(0, L16_32));
+
+  // sample-based, 8-bits per sample
+
+  Test("(ch 0) Sending using G.711 (sample based, 8 bits/sample)...");
+  PCMU.channels = 1;
+  CHECK(codec->SetSendCodec(0, PCMU));
+  SetStereoExternalEncryption(0, true, 8);
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  // sample-based, 16-bits per sample
+
+  Test("(ch 0) Sending using L16 8kHz (sample based, 16 bits/sample)...");
+  L16_8.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_8));
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using L16 16kHz (sample based, 16 bits/sample)...");
+  L16_16.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using L16 32kHz (sample based, 16 bits/sample)...");
+  L16_32.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_32));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using G.729 (frame based)...");
+  G729.channels = 1;
+  CHECK(codec->SetSendCodec(0, G729));
+  Sleep(testTime);
+
+  StopMedia(0);
+
+  // disable external encryption
+  CHECK(encrypt->DeRegisterExternalEncryption(0));
+
+  base->DeleteChannel(0);
+
+  // ------------------------------------------------------------------------
+  CHECK(base->Terminate());
+
+  printf("\n\n------------------------------------------------\n");
+  printf(" Test passed!\n");
+  printf("------------------------------------------------\n\n");
+
+  return 0;
+}
+
+} // namespace voetest
diff --git a/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.h b/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.h
new file mode 100644
index 0000000..ce53241
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/auto_test/voe_unit_test.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
+
+#include "voe_standard_test.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+class VoEUnitTest : public Encryption {
+ public:
+  VoEUnitTest(VoETestManager& mgr);
+  ~VoEUnitTest() {}
+  int DoTest();
+
+ protected:
+  // Encryption
+  void encrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void encrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+
+ private:
+  int MenuSelection();
+  int MixerTest();
+  void Sleep(unsigned int timeMillisec, bool addMarker = false);
+  void Wait();
+  int StartMedia(int channel,
+                 int rtpPort,
+                 bool listen,
+                 bool playout,
+                 bool send,
+                 bool fileAsMic,
+                 bool localFile);
+  int StopMedia(int channel);
+  void Test(const char* msg);
+  void SetStereoExternalEncryption(int channel, bool onOff, int bitsPerSample);
+
+ private:
+  VoETestManager& _mgr;
+  static const char* _key;
+
+ private:
+  bool _listening[32];
+  bool _playing[32];
+  bool _sending[32];
+
+ private:
+  bool _extOnOff;
+  int _extBitsPerSample;
+  int _extChannel;
+};
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
diff --git a/trunk/src/voice_engine/main/test/cmd_test/Android.mk b/trunk/src/voice_engine/main/test/cmd_test/Android.mk
new file mode 100644
index 0000000..f1a4f1a
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/cmd_test/Android.mk
@@ -0,0 +1,60 @@
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH:= $(call my-dir)
+
+# voice engine test app
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/../../../../../android-webrtc.mk
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES:= \
+    voe_cmd_test.cc
+
+# Flags passed to both C and C++ files.
+LOCAL_CFLAGS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_ANDROID' \
+    '-DDEBUG'
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../../interface \
+    $(LOCAL_PATH)/../../../.. \
+    $(LOCAL_PATH)/../../../../.. \
+    external/gtest/include \
+    frameworks/base/include
+
+LOCAL_SHARED_LIBRARIES := \
+    libutils \
+    libmedia \
+    libcamera_client \
+    libgui \
+    libhardware \
+    libandroid_runtime \
+    libbinder
+
+#libwilhelm.so libDunDef-Android.so libbinder.so libsystem_server.so 
+
+LOCAL_MODULE:= webrtc_voe_cmd
+
+ifdef NDK_ROOT
+LOCAL_SHARED_LIBRARIES += \
+    libstlport_shared \
+    libwebrtc-voice-jni \
+    libwebrtc_audio_preprocessing
+include $(BUILD_EXECUTABLE)
+else
+LOCAL_SHARED_LIBRARIES += \
+    libstlport \
+    libwebrtc
+include external/stlport/libstlport.mk
+include $(BUILD_NATIVE_TEST)
+endif
diff --git a/trunk/src/voice_engine/main/test/cmd_test/voe_cmd_test.cc b/trunk/src/voice_engine/main/test/cmd_test/voe_cmd_test.cc
new file mode 100644
index 0000000..99e88d2
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/cmd_test/voe_cmd_test.cc
@@ -0,0 +1,922 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#ifndef _WIN32
+#include <unistd.h>
+#endif
+
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "test/testsupport/fileutils.h"
+
+#include "voe_errors.h"
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_volume_control.h"
+#include "voe_dtmf.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_audio_processing.h"
+#include "voe_file.h"
+#include "voe_video_sync.h"
+#include "voe_encryption.h"
+#include "voe_hardware.h"
+#include "voe_external_media.h"
+#include "voe_network.h"
+#include "voe_neteq_stats.h"
+#include "engine_configurations.h"
+
+// Enable this this flag to run this test with hard coded
+// IP/Port/codec and start test automatically with key input
+// it could be useful in repeat tests.
+//#define DEBUG
+
+// #define EXTERNAL_TRANSPORT
+
+using namespace webrtc;
+
+#define VALIDATE                                                        \
+  if (res != 0)                                                         \
+  {                                                                     \
+    printf("*** Error at position %i / line %i \n", cnt, __LINE__);     \
+    printf("*** Error code = %i \n", base1->LastError());               \
+  }                                                                     \
+  cnt++;
+
+VoiceEngine* m_voe = NULL;
+VoEBase* base1 = NULL;
+VoECodec* codec = NULL;
+VoEVolumeControl* volume = NULL;
+VoEDtmf* dtmf = NULL;
+VoERTP_RTCP* rtp_rtcp = NULL;
+VoEAudioProcessing* apm = NULL;
+VoENetwork* netw = NULL;
+VoEFile* file = NULL;
+VoEVideoSync* vsync = NULL;
+VoEEncryption* encr = NULL;
+VoEHardware* hardware = NULL;
+VoEExternalMedia* xmedia = NULL;
+VoENetEqStats* neteqst = NULL;
+
+void RunTest(std::string out_path);
+
+#ifdef EXTERNAL_TRANSPORT
+
+class my_transportation : public Transport
+{
+  int SendPacket(int channel,const void *data,int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+};
+
+int my_transportation::SendPacket(int channel,const void *data,int len)
+{
+  netw->ReceivedRTPPacket(channel, data, len);
+  return 0;
+}
+
+int my_transportation::SendRTCPPacket(int channel, const void *data, int len)
+{
+  netw->ReceivedRTCPPacket(channel, data, len);
+  return 0;
+}
+
+my_transportation my_transport;
+#endif
+
+class MyObserver : public VoiceEngineObserver {
+ public:
+   virtual void CallbackOnError(const int channel, const int err_code);
+};
+
+void MyObserver::CallbackOnError(const int channel, const int err_code) {
+  // Add printf for other error codes here
+  if (err_code == VE_TYPING_NOISE_WARNING) {
+    printf("  TYPING NOISE DETECTED \n");
+  } else if (err_code == VE_RECEIVE_PACKET_TIMEOUT) {
+    printf("  RECEIVE PACKET TIMEOUT \n");
+  } else if (err_code == VE_PACKET_RECEIPT_RESTARTED) {
+    printf("  PACKET RECEIPT RESTARTED \n");
+  } else if (err_code == VE_RUNTIME_PLAY_WARNING) {
+    printf("  RUNTIME PLAY WARNING \n");
+  } else if (err_code == VE_RUNTIME_REC_WARNING) {
+    printf("  RUNTIME RECORD WARNING \n");
+  } else if (err_code == VE_SATURATION_WARNING) {
+    printf("  SATURATION WARNING \n");
+  } else if (err_code == VE_RUNTIME_PLAY_ERROR) {
+    printf("  RUNTIME PLAY ERROR \n");
+  } else if (err_code == VE_RUNTIME_REC_ERROR) {
+    printf("  RUNTIME RECORD ERROR \n");
+  } else if (err_code == VE_REC_DEVICE_REMOVED) {
+    printf("  RECORD DEVICE REMOVED \n");
+  }
+}
+
+int main() {
+  int res = 0;
+  int cnt = 0;
+
+  printf("Test started \n");
+
+  m_voe = VoiceEngine::Create();
+  base1 = VoEBase::GetInterface(m_voe);
+  codec = VoECodec::GetInterface(m_voe);
+  apm = VoEAudioProcessing::GetInterface(m_voe);
+  volume = VoEVolumeControl::GetInterface(m_voe);
+  dtmf = VoEDtmf::GetInterface(m_voe);
+  rtp_rtcp = VoERTP_RTCP::GetInterface(m_voe);
+  netw = VoENetwork::GetInterface(m_voe);
+  file = VoEFile::GetInterface(m_voe);
+  vsync = VoEVideoSync::GetInterface(m_voe);
+  encr = VoEEncryption::GetInterface(m_voe);
+  hardware = VoEHardware::GetInterface(m_voe);
+  xmedia = VoEExternalMedia::GetInterface(m_voe);
+  neteqst = VoENetEqStats::GetInterface(m_voe);
+
+  MyObserver my_observer;
+
+#if defined(WEBRTC_ANDROID)
+  const std::string out_path = "/sdcard/";
+#else
+  const std::string out_path = webrtc::test::OutputPath();
+#endif
+  const std::string trace_filename = out_path + "webrtc_trace.txt";
+
+  printf("Set trace filenames (enable trace)\n");
+  VoiceEngine::SetTraceFilter(kTraceAll);
+  res = VoiceEngine::SetTraceFile(trace_filename.c_str());
+  VALIDATE;
+
+  res = VoiceEngine::SetTraceCallback(NULL);
+  VALIDATE;
+
+  printf("Init\n");
+  res = base1->Init();
+  if (res != 0) {
+    printf("\nError calling Init: %d\n", base1->LastError());
+    fflush(NULL);
+    exit(1);
+  }
+
+  res = base1->RegisterVoiceEngineObserver(my_observer);
+  VALIDATE;
+
+  cnt++;
+  printf("Version\n");
+  char tmp[1024];
+  res = base1->GetVersion(tmp);
+  VALIDATE;
+  cnt++;
+  printf("%s\n", tmp);
+
+  RunTest(out_path);
+
+  printf("Terminate \n");
+
+  base1->DeRegisterVoiceEngineObserver();
+
+  res = base1->Terminate();
+  VALIDATE;
+
+  if (base1)
+    base1->Release();
+
+  if (codec)
+    codec->Release();
+
+  if (volume)
+    volume->Release();
+
+  if (dtmf)
+    dtmf->Release();
+
+  if (rtp_rtcp)
+    rtp_rtcp->Release();
+
+  if (apm)
+    apm->Release();
+
+  if (netw)
+    netw->Release();
+
+  if (file)
+    file->Release();
+
+  if (vsync)
+    vsync->Release();
+
+  if (encr)
+    encr->Release();
+
+  if (hardware)
+    hardware->Release();
+
+  if (xmedia)
+    xmedia->Release();
+
+  if (neteqst)
+    neteqst->Release();
+
+  VoiceEngine::Delete(m_voe);
+
+  return 0;
+}
+
+void RunTest(std::string out_path) {
+  int chan, cnt, res;
+  CodecInst cinst;
+  cnt = 0;
+  int i;
+  int codecinput;
+  bool AEC = false;
+  bool AGC = true;
+  bool AGC1 = false;
+  bool VAD = false;
+  bool NS = false;
+  bool NS1 = false;
+  bool typing_detection = false;
+  bool muted = false;
+  bool on_hold = false;
+
+#if defined(WEBRTC_ANDROID)
+  std::string resource_path = "/sdcard/";
+#else
+  std::string resource_path = webrtc::test::ProjectRootPath();
+  if (resource_path == webrtc::test::kCannotFindProjectRootDir) {
+    printf("*** Unable to get project root directory. "
+           "File playing may fail. ***\n");
+    // Fall back to the current directory.
+    resource_path = "./";
+  } else {
+    resource_path += "test/data/voice_engine/";
+  }
+#endif
+  const std::string audio_filename = resource_path + "audio_long16.pcm";
+
+  const std::string play_filename = out_path + "recorded_playout.pcm";
+  const std::string mic_filename = out_path + "recorded_mic.pcm";
+
+  chan = base1->CreateChannel();
+  if (chan < 0) {
+    printf("Error at position %i\n", cnt);
+    printf("************ Error code = %i\n", base1->LastError());
+    fflush(NULL);
+  }
+  cnt++;
+
+  int j = 0;
+#ifdef EXTERNAL_TRANSPORT
+  my_transportation ch0transport;
+  printf("Enabling external transport \n");
+  netw->RegisterExternalTransport(0, ch0transport);
+#else
+  char ip[64];
+#ifdef DEBUG
+  strcpy(ip, "127.0.0.1");
+#else
+  char localip[64];
+  netw->GetLocalIP(localip);
+  printf("local IP:%s\n", localip);
+
+  printf("1. 127.0.0.1 \n");
+  printf("2. Specify IP \n");
+  ASSERT_EQ(1, scanf("%i", &i));
+
+  if (1 == i)
+    strcpy(ip, "127.0.0.1");
+  else {
+    printf("Specify remote IP: ");
+    ASSERT_EQ(1, scanf("%s", ip));
+  }
+#endif
+
+  int colons(0);
+  while (ip[j] != '\0' && j < 64 && !(colons = (ip[j++] == ':')))
+    ;
+  if (colons) {
+    printf("Enabling IPv6\n");
+    res = netw->EnableIPv6(0);
+    VALIDATE;
+  }
+
+  int rPort;
+#ifdef DEBUG
+  rPort=8500;
+#else
+  printf("Specify remote port (1=1234): ");
+  ASSERT_EQ(1, scanf("%i", &rPort));
+  if (1 == rPort)
+    rPort = 1234;
+  printf("Set Send port \n");
+#endif
+
+  printf("Set Send IP \n");
+  res = base1->SetSendDestination(chan, rPort, ip);
+  VALIDATE;
+
+  int lPort;
+#ifdef DEBUG
+  lPort=8500;
+#else
+  printf("Specify local port (1=1234): ");
+  ASSERT_EQ(1, scanf("%i", &lPort));
+  if (1 == lPort)
+    lPort = 1234;
+  printf("Set Rec Port \n");
+#endif
+  res = base1->SetLocalReceiver(chan, lPort);
+  VALIDATE;
+#endif
+
+  printf("\n");
+  for (i = 0; i < codec->NumOfCodecs(); i++) {
+    res = codec->GetCodec(i, cinst);
+    VALIDATE;
+    if (strncmp(cinst.plname, "ISAC", 4) == 0 && cinst.plfreq == 32000) {
+      printf("%i. ISAC-swb pltype:%i plfreqi:%i\n", i, cinst.pltype,
+             cinst.plfreq);
+    }
+    else {
+      printf("%i. %s pltype:%i plfreq:%i\n", i, cinst.plname,
+             cinst.pltype, cinst.plfreq);
+    }
+  }
+#ifdef DEBUG
+  codecinput=0;
+#else
+  printf("Select send codec: ");
+  ASSERT_EQ(1, scanf("%i", &codecinput));
+#endif
+  codec->GetCodec(codecinput, cinst);
+
+  printf("Set primary codec\n");
+  res = codec->SetSendCodec(chan, cinst);
+  VALIDATE;
+
+  const int kMaxNumChannels = 8;
+  int channel_index = 0;
+  std::vector<int> channels(kMaxNumChannels);
+  for (i = 0; i < kMaxNumChannels; ++i) {
+    channels[i] = base1->CreateChannel();
+    int port = rPort + (i + 1) * 2;
+    res = base1->SetSendDestination(channels[i], port, ip);
+    VALIDATE;
+    res = base1->SetLocalReceiver(channels[i], port);
+    VALIDATE;
+    res = codec->SetSendCodec(channels[i], cinst);
+    VALIDATE;
+  }
+
+  // Call loop
+  bool newcall = true;
+  while (newcall) {
+
+#ifdef WEBRTC_LINUX
+    int rd(-1), pd(-1);
+    res = hardware->GetNumOfRecordingDevices(rd);
+    VALIDATE;
+    res = hardware->GetNumOfPlayoutDevices(pd);
+    VALIDATE;
+
+    char dn[128] = { 0 };
+    char guid[128] = { 0 };
+    printf("\nPlayout devices (%d): \n", pd);
+    for (j=0; j<pd; ++j) {
+      res = hardware->GetPlayoutDeviceName(j, dn, guid);
+      VALIDATE;
+      printf("  %d: %s \n", j, dn);
+    }
+
+    printf("Recording devices (%d): \n", rd);
+    for (j=0; j<rd; ++j) {
+      res = hardware->GetRecordingDeviceName(j, dn, guid);
+      VALIDATE;
+      printf("  %d: %s \n", j, dn);
+    }
+
+    printf("Select playout device: ");
+    ASSERT_EQ(1, scanf("%d", &pd));
+    res = hardware->SetPlayoutDevice(pd);
+    VALIDATE;
+    printf("Select recording device: ");
+    ASSERT_EQ(1, scanf("%d", &rd));
+    printf("Setting sound devices \n");
+    res = hardware->SetRecordingDevice(rd);
+    VALIDATE;
+
+#endif // WEBRTC_LINUX
+    res = codec->SetVADStatus(0, VAD);
+    VALIDATE;
+
+    res = apm->SetAgcStatus(AGC);
+    VALIDATE;
+
+    res = apm->SetEcStatus(AEC);
+    VALIDATE;
+
+    res = apm->SetNsStatus(NS);
+    VALIDATE;
+
+#ifdef DEBUG
+    i = 1;
+#else
+    printf("\n1. Send, listen and playout \n");
+    printf("2. Send only \n");
+    printf("3. Listen and playout only \n");
+    printf("Select transfer mode: ");
+    ASSERT_EQ(1, scanf("%i", &i));
+#endif
+    const bool send = !(3 == i);
+    const bool receive = !(2 == i);
+
+    if (receive) {
+#ifndef EXTERNAL_TRANSPORT
+      printf("Start Listen \n");
+      res = base1->StartReceive(chan);
+      VALIDATE;
+#endif
+
+      printf("Start Playout \n");
+      res = base1->StartPlayout(chan);
+      VALIDATE;
+    }
+
+    if (send) {
+      printf("Start Send \n");
+      res = base1->StartSend(chan);
+      VALIDATE;
+    }
+
+    printf("Getting mic volume \n");
+    unsigned int vol = 999;
+    res = volume->GetMicVolume(vol);
+    VALIDATE;
+    if ((vol > 255) || (vol < 1)) {
+      printf("\n****ERROR in GetMicVolume");
+    }
+
+    int forever = 1;
+    while (forever) {
+      printf("\nActions\n");
+
+      printf("Codec Changes\n");
+      for (i = 0; i < codec->NumOfCodecs(); i++) {
+        res = codec->GetCodec(i, cinst);
+        VALIDATE;
+        if (strncmp(cinst.plname, "ISAC", 4) == 0 && cinst.plfreq
+            == 32000) {
+          printf("\t%i. ISAC-swb pltype:%i plfreq:%i\n", i,
+                 cinst.pltype, cinst.plfreq);
+        }
+        else {
+          printf("\t%i. %s pltype:%i plfreq:%i\n", i, cinst.plname,
+                 cinst.pltype, cinst.plfreq);
+        }
+      }
+      printf("Other\n");
+      const int noCodecs = i - 1;
+      printf("\t%i. Toggle VAD\n", i);
+      i++;
+      printf("\t%i. Toggle AGC\n", i);
+      i++;
+      printf("\t%i. Toggle NS\n", i);
+      i++;
+      printf("\t%i. Toggle EC\n", i);
+      i++;
+      printf("\t%i. Select AEC\n", i);
+      i++;
+      printf("\t%i. Select AECM\n", i);
+      i++;
+      printf("\t%i. Get speaker volume\n", i);
+      i++;
+      printf("\t%i. Set speaker volume\n", i);
+      i++;
+      printf("\t%i. Get microphone volume\n", i);
+      i++;
+      printf("\t%i. Set microphone volume\n", i);
+      i++;
+      printf("\t%i. Play local file (audio_long16.pcm) \n", i);
+      i++;
+      printf("\t%i. Change playout device \n", i);
+      i++;
+      printf("\t%i. Change recording device \n", i);
+      i++;
+      printf("\t%i. Toggle receive-side AGC \n", i);
+      i++;
+      printf("\t%i. Toggle receive-side NS \n", i);
+      i++;
+      printf("\t%i. AGC status \n", i);
+      i++;
+      printf("\t%i. Toggle microphone mute \n", i);
+      i++;
+      printf("\t%i. Toggle on hold status \n", i);
+      i++;
+      printf("\t%i. Get last error code \n", i);
+      i++;
+      printf("\t%i. Toggle typing detection (for Mac/Windows only) \n", i);
+      i++;
+      printf("\t%i. Record a PCM file \n", i);
+      i++;
+      printf("\t%i. Play a previously recorded PCM file locally \n", i);
+      i++;
+      printf("\t%i. Play a previously recorded PCM file as microphone \n", i);
+      i++;
+      printf("\t%i. Add an additional file-playing channel \n", i);
+      i++;
+      printf("\t%i. Remove a file-playing channel \n", i);
+      i++;
+
+      printf("Select action or %i to stop the call: ", i);
+      ASSERT_EQ(1, scanf("%i", &codecinput));
+
+      if (codecinput < codec->NumOfCodecs()) {
+        res = codec->GetCodec(codecinput, cinst);
+        VALIDATE;
+
+        printf("Set primary codec\n");
+        res = codec->SetSendCodec(chan, cinst);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 1)) {
+        VAD = !VAD;
+        res = codec->SetVADStatus(0, VAD);
+        VALIDATE;
+        if (VAD)
+          printf("\n VAD is now on! \n");
+        else
+          printf("\n VAD is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 2)) {
+        AGC = !AGC;
+        res = apm->SetAgcStatus(AGC);
+        VALIDATE;
+        if (AGC)
+          printf("\n AGC is now on! \n");
+        else
+          printf("\n AGC is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 3)) {
+        NS = !NS;
+        res = apm->SetNsStatus(NS);
+        VALIDATE;
+        if (NS)
+          printf("\n NS is now on! \n");
+        else
+          printf("\n NS is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 4)) {
+        AEC = !AEC;
+        res = apm->SetEcStatus(AEC, kEcUnchanged);
+        VALIDATE;
+        if (AEC)
+          printf("\n Echo control is now on! \n");
+        else
+          printf("\n Echo control is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 5)) {
+        res = apm->SetEcStatus(AEC, kEcAec);
+        VALIDATE;
+        printf("\n AEC selected! \n");
+        if (AEC)
+          printf(" (Echo control is on)\n");
+        else
+          printf(" (Echo control is off)\n");
+      }
+      else if (codecinput == (noCodecs + 6)) {
+        res = apm->SetEcStatus(AEC, kEcAecm);
+        VALIDATE;
+        printf("\n AECM selected! \n");
+        if (AEC)
+          printf(" (Echo control is on)\n");
+        else
+          printf(" (Echo control is off)\n");
+      }
+      else if (codecinput == (noCodecs + 7)) {
+        unsigned vol(0);
+        res = volume->GetSpeakerVolume(vol);
+        VALIDATE;
+        printf("\n Speaker Volume is %d \n", vol);
+      }
+      else if (codecinput == (noCodecs + 8)) {
+        printf("Level: ");
+        ASSERT_EQ(1, scanf("%i", &i));
+        res = volume->SetSpeakerVolume(i);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 9)) {
+        unsigned vol(0);
+        res = volume->GetMicVolume(vol);
+        VALIDATE;
+        printf("\n Microphone Volume is %d \n", vol);
+      }
+      else if (codecinput == (noCodecs + 10)) {
+        printf("Level: ");
+        ASSERT_EQ(1, scanf("%i", &i));
+        res = volume->SetMicVolume(i);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 11)) {
+        res = file->StartPlayingFileLocally(0, audio_filename.c_str());
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 12)) {
+        // change the playout device with current call
+        int num_pd(-1);
+        res = hardware->GetNumOfPlayoutDevices(num_pd);
+        VALIDATE;
+
+        char dn[128] = { 0 };
+        char guid[128] = { 0 };
+
+        printf("\nPlayout devices (%d): \n", num_pd);
+        for (j = 0; j < num_pd; ++j) {
+          res = hardware->GetPlayoutDeviceName(j, dn, guid);
+          VALIDATE;
+          printf("  %d: %s \n", j, dn);
+        }
+        printf("Select playout device: ");
+        ASSERT_EQ(1, scanf("%d", &num_pd));
+        // Will use plughw for hardware devices
+        res = hardware->SetPlayoutDevice(num_pd);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 13)) {
+        // change the recording device with current call
+        int num_rd(-1);
+
+        res = hardware->GetNumOfRecordingDevices(num_rd);
+        VALIDATE;
+
+        char dn[128] = { 0 };
+        char guid[128] = { 0 };
+
+        printf("Recording devices (%d): \n", num_rd);
+        for (j = 0; j < num_rd; ++j) {
+          res = hardware->GetRecordingDeviceName(j, dn, guid);
+          VALIDATE;
+          printf("  %d: %s \n", j, dn);
+        }
+
+        printf("Select recording device: ");
+        ASSERT_EQ(1, scanf("%d", &num_rd));
+        printf("Setting sound devices \n");
+        // Will use plughw for hardware devices
+        res = hardware->SetRecordingDevice(num_rd);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 14)) {
+        // Remote AGC
+        AGC1 = !AGC1;
+        res = apm->SetRxAgcStatus(chan, AGC1);
+        VALIDATE;
+        if (AGC1)
+          printf("\n Receive-side AGC is now on! \n");
+        else
+          printf("\n Receive-side AGC is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 15)) {
+        // Remote NS
+        NS1 = !NS1;
+        res = apm->SetRxNsStatus(chan, NS);
+        VALIDATE;
+        if (NS1)
+          printf("\n Receive-side NS is now on! \n");
+        else
+          printf("\n Receive-side NS is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 16)) {
+        AgcModes agcmode;
+        bool enable;
+        res = apm->GetAgcStatus(enable, agcmode);
+        VALIDATE
+            printf("\n AGC enable is %d, mode is %d \n", enable, agcmode);
+      }
+      else if (codecinput == (noCodecs + 17)) {
+        // Toggle Mute on Microphone
+        res = volume->GetInputMute(chan, muted);
+        VALIDATE;
+        muted = !muted;
+        res = volume->SetInputMute(chan, muted);
+        VALIDATE;
+        if (muted)
+          printf("\n Microphone is now on mute! \n");
+        else
+          printf("\n Microphone is no longer on mute! \n");
+
+      }
+      else if (codecinput == (noCodecs + 18)) {
+        // Toggle the call on hold
+        OnHoldModes mode;
+        res = base1->GetOnHoldStatus(chan, on_hold, mode);
+        VALIDATE;
+        on_hold = !on_hold;
+        mode = kHoldSendAndPlay;
+        res = base1->SetOnHoldStatus(chan, on_hold, mode);
+        VALIDATE;
+        if (on_hold)
+          printf("\n Call now on hold! \n");
+        else
+          printf("\n Call now not on hold! \n");
+      }
+
+      else if (codecinput == (noCodecs + 19)) {
+        // Get the last error code and print to screen
+        int err_code = 0;
+        err_code = base1->LastError();
+        if (err_code != -1)
+          printf("\n The last error code was %i.\n", err_code);
+      }
+      else if (codecinput == (noCodecs + 20)) {
+        typing_detection= !typing_detection;
+        res = apm->SetTypingDetectionStatus(typing_detection);
+        VALIDATE;
+        if (typing_detection)
+          printf("\n Typing detection is now on!\n");
+        else
+          printf("\n Typing detection is now off!\n");
+      }
+      else if (codecinput == (noCodecs + 21)) {
+        int stop_record = 1;
+        int file_source = 1;
+        printf("\n Select source of recorded file. ");
+        printf("\n 1. Record from microphone to file ");
+        printf("\n 2. Record from playout to file ");
+        printf("\n Enter your selection: \n");
+        ASSERT_EQ(1, scanf("%i", &file_source));
+        if (file_source == 1) {
+          printf("\n Start recording microphone as %s \n",
+                 mic_filename.c_str());
+          res = file->StartRecordingMicrophone(mic_filename.c_str());
+          VALIDATE;
+        }
+        else {
+          printf("\n Start recording playout as %s \n", play_filename.c_str());
+          res = file->StartRecordingPlayout(chan, play_filename.c_str());
+          VALIDATE;
+        }
+        while (stop_record != 0) {
+          printf("\n Type 0 to stop recording file \n");
+          ASSERT_EQ(1, scanf("%i", &stop_record));
+        }
+        if (file_source == 1) {
+          res = file->StopRecordingMicrophone();
+          VALIDATE;
+        }
+        else {
+          res = file->StopRecordingPlayout(chan);
+          VALIDATE;
+        }
+        printf("\n File finished recording \n");
+      }
+      else if (codecinput == (noCodecs + 22)) {
+        int file_type = 1;
+        int stop_play = 1;
+        printf("\n Select a file to play locally in a loop.");
+        printf("\n 1. Play %s", mic_filename.c_str());
+        printf("\n 2. Play %s", play_filename.c_str());
+        printf("\n Enter your selection\n");
+        ASSERT_EQ(1, scanf("%i", &file_type));
+        if (file_type == 1)  {
+          printf("\n Start playing %s locally in a loop\n",
+                 mic_filename.c_str());
+          res = file->StartPlayingFileLocally(chan, mic_filename.c_str(), true);
+          VALIDATE;
+        }
+        else {
+          printf("\n Start playing %s locally in a loop\n",
+                 play_filename.c_str());
+          res = file->StartPlayingFileLocally(chan, play_filename.c_str(),
+                                              true);
+          VALIDATE;
+        }
+        while (stop_play != 0) {
+          printf("\n Type 0 to stop playing file\n");
+          ASSERT_EQ(1, scanf("%i", &stop_play));
+        }
+        res = file->StopPlayingFileLocally(chan);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 23)) {
+        int file_type = 1;
+        int stop_play = 1;
+        printf("\n Select a file to play as microphone in a loop.");
+        printf("\n 1. Play %s", mic_filename.c_str());
+        printf("\n 2. Play %s", play_filename.c_str());
+        printf("\n Enter your selection\n");
+        ASSERT_EQ(1, scanf("%i", &file_type));
+        if (file_type == 1)  {
+          printf("\n Start playing %s as mic in a loop\n",
+                 mic_filename.c_str());
+          res = file->StartPlayingFileAsMicrophone(chan, mic_filename.c_str(),
+                                                   true);
+          VALIDATE;
+        }
+        else {
+          printf("\n Start playing %s as mic in a loop\n",
+                 play_filename.c_str());
+          res = file->StartPlayingFileAsMicrophone(chan, play_filename.c_str(),
+                                                   true);
+          VALIDATE;
+        }
+        while (stop_play != 0) {
+          printf("\n Type 0 to stop playing file\n");
+          ASSERT_EQ(1, scanf("%i", &stop_play));
+        }
+        res = file->StopPlayingFileAsMicrophone(chan);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 24)) {
+        if (channel_index < kMaxNumChannels) {
+          res = base1->StartReceive(channels[channel_index]);
+          VALIDATE;
+          res = base1->StartPlayout(channels[channel_index]);
+          VALIDATE;
+          res = base1->StartSend(channels[channel_index]);
+          VALIDATE;
+          res = file->StartPlayingFileAsMicrophone(channels[channel_index],
+                                                   audio_filename.c_str(),
+                                                   true,
+                                                   false);
+          VALIDATE;
+          channel_index++;
+          printf("Using %d additional channels\n", channel_index);
+        } else {
+          printf("Max number of channels reached\n");
+        }
+      }
+      else if (codecinput == (noCodecs + 25)) {
+        if (channel_index > 0) {
+          channel_index--;
+          res = file->StopPlayingFileAsMicrophone(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopSend(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopPlayout(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopReceive(channels[channel_index]);
+          VALIDATE;
+          printf("Using %d additional channels\n", channel_index);
+        } else {
+          printf("All additional channels stopped\n");
+        }
+      }
+      else
+        break;
+    }
+
+    if (send) {
+      printf("Stop Send \n");
+      res = base1->StopSend(chan);
+      VALIDATE;
+    }
+
+    if (receive) {
+      printf("Stop Playout \n");
+      res = base1->StopPlayout(chan);
+      VALIDATE;
+
+#ifndef EXTERNAL_TRANSPORT
+      printf("Stop Listen \n");
+      res = base1->StopReceive(chan);
+      VALIDATE;
+#endif
+    }
+
+    while (channel_index > 0) {
+      --channel_index;
+      res = file->StopPlayingFileAsMicrophone(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopSend(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopPlayout(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopReceive(channels[channel_index]);
+      VALIDATE;
+    }
+
+    printf("\n1. New call \n");
+    printf("2. Quit \n");
+    printf("Select action: ");
+    ASSERT_EQ(1, scanf("%i", &i));
+    newcall = (1 == i);
+    // Call loop
+  }
+
+  printf("Delete channels \n");
+  res = base1->DeleteChannel(chan);
+  VALIDATE;
+
+  for (i = 0; i < kMaxNumChannels; ++i) {
+    channels[i] = base1->DeleteChannel(channels[i]);
+    VALIDATE;
+  }
+}
diff --git a/trunk/src/voice_engine/main/test/voice_engine_tests.gypi b/trunk/src/voice_engine/main/test/voice_engine_tests.gypi
new file mode 100644
index 0000000..ad78912
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/voice_engine_tests.gypi
@@ -0,0 +1,140 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # Auto test - command line test for all platforms
+    {
+      'target_name': 'voe_auto_test',
+      'type': 'executable',
+      'dependencies': [
+        'voice_engine_core',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/../test/test.gyp:test_support',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/../testing/gmock.gyp:gmock',
+      ],
+      'include_dirs': [
+        'auto_test',
+        'auto_test/fixtures',
+        '<(webrtc_root)/modules/interface',
+        # TODO(phoglund): We only depend on voice_engine_defines.h here -
+        # move that file to interface and then remove this dependency.
+        '<(webrtc_root)/voice_engine/main/source',
+        '<(webrtc_root)/modules/audio_device/main/interface',
+      ],
+      'sources': [
+        'auto_test/automated_mode.cc',
+        'auto_test/fixtures/after_initialization_fixture.cc',
+        'auto_test/fixtures/after_initialization_fixture.h',
+        'auto_test/fixtures/after_streaming_fixture.cc',
+        'auto_test/fixtures/after_streaming_fixture.h',
+        'auto_test/fixtures/before_initialization_fixture.cc',
+        'auto_test/fixtures/before_initialization_fixture.h',
+        'auto_test/standard/audio_processing_test.cc',
+        'auto_test/standard/call_report_test.cc',
+        'auto_test/standard/codec_before_streaming_test.cc',
+        'auto_test/standard/codec_test.cc',
+        'auto_test/standard/dtmf_test.cc',
+        'auto_test/standard/file_test.cc',
+        'auto_test/standard/hardware_before_initializing_test.cc',
+        'auto_test/standard/hardware_before_streaming_test.cc',
+        'auto_test/standard/hardware_test.cc',
+        'auto_test/standard/manual_hold_test.cc',
+        'auto_test/standard/neteq_test.cc',
+        'auto_test/standard/network_before_streaming_test.cc',
+        'auto_test/standard/network_test.cc',
+        'auto_test/standard/rtp_rtcp_before_streaming_test.cc',
+        'auto_test/standard/rtp_rtcp_test.cc',
+        'auto_test/standard/voe_base_misc_test.cc',
+        'auto_test/standard/volume_test.cc',
+        'auto_test/resource_manager.cc',
+        'auto_test/voe_cpu_test.cc',
+        'auto_test/voe_cpu_test.h',
+        'auto_test/voe_extended_test.cc',
+        'auto_test/voe_extended_test.h',
+        'auto_test/voe_standard_test.cc',
+        'auto_test/voe_standard_test.h',
+        'auto_test/voe_stress_test.cc',
+        'auto_test/voe_stress_test.h',
+        'auto_test/voe_test_defines.h',
+        'auto_test/voe_test_interface.h',
+        'auto_test/voe_unit_test.cc',
+        'auto_test/voe_unit_test.h',
+      ],
+    },
+    {
+      # command line test that should work on linux/mac/win
+      'target_name': 'voe_cmd_test',
+      'type': 'executable',
+      'dependencies': [
+        '<(webrtc_root)/../test/test.gyp:test_support',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        'voice_engine_core',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'sources': [
+        'cmd_test/voe_cmd_test.cc',
+      ],
+    },
+  ],
+  'conditions': [
+    ['OS=="win"', {
+      'targets': [
+        # WinTest - GUI test for Windows
+        {
+          'target_name': 'voe_ui_win_test',
+          'type': 'executable',
+          'dependencies': [
+            'voice_engine_core',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'include_dirs': [
+            'win_test',
+          ],
+          'sources': [
+            'win_test/Resource.h',
+            'win_test/WinTest.cc',
+            'win_test/WinTest.h',
+            'win_test/WinTest.rc',
+            'win_test/WinTestDlg.cc',
+            'win_test/WinTestDlg.h',
+            'win_test/res/WinTest.ico',
+            'win_test/res/WinTest.rc2',
+            'win_test/stdafx.cc',
+            'win_test/stdafx.h',
+          ],
+          'configurations': {
+            'Common_Base': {
+              'msvs_configuration_attributes': {
+                'conditions': [
+                  ['component=="shared_library"', {
+                    'UseOfMFC': '2',  # Shared DLL
+                  },{
+                    'UseOfMFC': '1',  # Static
+                  }],
+                ],
+              },
+            },
+          },
+          'msvs_settings': {
+            'VCLinkerTool': {
+              'SubSystem': '2',   # Windows
+            },
+          },
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/trunk/src/voice_engine/main/test/win_test/Resource.h b/trunk/src/voice_engine/main/test/win_test/Resource.h
new file mode 100644
index 0000000..5ae9c5f
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/Resource.h
@@ -0,0 +1,241 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by WinTest.rc
+//
+#define IDM_ABOUTBOX                    0x0010
+#define IDD_ABOUTBOX                    100
+#define IDS_ABOUTBOX                    101
+#define IDD_WINTEST_DIALOG              102
+#define IDR_MAINFRAME                   128
+#define IDD_DTMF_DIALOG                 129
+#define IDC_BUTTON_CREATE_1             1000
+#define IDC_BUTTON_DELETE_1             1001
+#define IDC_EDIT_1                      1002
+#define IDC_BUTTON_CREATE_2             1003
+#define IDC_BUTTON_DELETE_2             1004
+#define IDC_EDIT_2                      1005
+#define IDC_EDIT_MESSAGE                1006
+#define IDC_BUTTON_START_LISTEN_1       1007
+#define IDC_COMBO_IP_1                  1008
+#define IDC_EDIT_TX_PORT_1              1009
+#define IDC_EDIT_RX_PORT_1              1010
+#define IDC_COMBO_CODEC_1               1011
+#define IDC_BUTTON_STOP_LISTEN_1        1012
+#define IDC_STATIC_LISTEN               1013
+#define IDC_BUTTON_START_PLAYOUT_1      1014
+#define IDC_BUTTON_STOP_PLAYOUT_1       1015
+#define IDC_STATIC_PLAY                 1016
+#define IDC_BUTTON_START_SEND_1         1017
+#define IDC_BUTTON_STOP_SEND_1          1018
+#define IDC_STATIC_SEND                 1019
+#define IDC_COMBO_IP_2                  1020
+#define IDC_STATIC_IP                   1021
+#define IDC_STATIC_PORTS                1022
+#define IDC_STATIC_CODEC                1023
+#define IDC_STATIC_CHANNEL              1024
+#define IDC_STATIC_ID                   1025
+#define IDC_EDIT_TX_PORT_2              1026
+#define IDC_EDIT_RX_PORT_2              1027
+#define IDC_COMBO_CODEC_2               1028
+#define IDC_BUTTON_START_LISTEN_2       1029
+#define IDC_BUTTON_STOP_LISTEN_2        1030
+#define IDC_BUTTON_START_PLAYOUT_2      1031
+#define IDC_BUTTON_STOP_PLAYOUT_2       1032
+#define IDC_BUTTON_START_SEND_2         1033
+#define IDC_BUTTON_STOP_SEND_2          1034
+#define IDC_BUTTON_START_SEND_3         1035
+#define IDC_BUTTON_TEST_1_1             1035
+#define IDC_BUTTON_TEST_1               1035
+#define IDC_EDIT_RESULT                 1036
+#define IDC_EDIT_N_FAILS                1037
+#define IDC_STATIC_ERROR                1038
+#define IDC_EDIT_LAST_ERROR             1039
+#define IDC_STATIC_LAST_ERROR           1040
+#define IDC_STATIC_PLAY_FILE            1041
+#define IDC_STATIC_EXTERNAL             1042
+#define IDC_CHECK_EXT_TRANS_1           1043
+#define IDC_CHECK2                      1044
+#define IDC_CHECK_PLAY_FILE_IN_1        1044
+#define IDC_CHECK_PLAY_FILE_OUT_1       1045
+#define IDC_CHECK_PLAY_FILE_IN_2        1046
+#define IDC_CHECK_PLAY_FILE_OUT_2       1047
+#define IDC_CHECK_EXT_TRANS_2           1048
+#define IDC_STATIC_ALL_CHANNELS         1049
+#define IDC_CHECK_PLAY_FILE_IN          1050
+#define IDC_CHECK_PLAY_FILE_OUT         1051
+#define IDC_CHECK_EXT_MEDIA_IN_1        1051
+#define IDC_COMBO_REC_DEVICE            1052
+#define IDC_STATIC_REC_DEVICE           1053
+#define IDC_COMBO_PLAY_DEVICE2          1054
+#define IDC_COMBO_PLAY_DEVICE           1054
+#define IDC_STATIC_PLAY_DEVICE          1055
+#define IDC_CHECK_EXT_MEDIA_PLAY_1      1056
+#define IDC_CHECK_EXT_MEDIA_OUT_1       1056
+#define IDC_STATIC_PLAY_FILE2           1057
+#define IDC_SLIDER_INPUT_VOLUME         1058
+#define IDC_STATIC_MIC_VOLUME           1059
+#define IDC_SLIDER_OUTPUT_VOLUME        1060
+#define IDC_STATIC_SPK_VOLUME2          1061
+#define IDC_STATIC_SPK_VOLUME           1061
+#define IDC_CHECK_PLAY_FILE_IN2         1062
+#define IDC_CHECK_AGC                   1062
+#define IDC_STATIC_MIC_VOLUME2          1063
+#define IDC_STATIC_AUDIO_LEVEL_IN       1063
+#define IDC_PROGRESS_AUDIO_LEVEL_IN     1064
+#define IDC_CHECK_AGC2                  1065
+#define IDC_CHECK_NS                    1065
+#define IDC_BUTTON_1                    1065
+#define IDC_CHECK_VAD                   1066
+#define IDC_CHECK_EXT_MEDIA_IN_2        1066
+#define IDC_BUTTON_2                    1066
+#define IDC_CHECK_VAD2                  1067
+#define IDC_CHECK_EC                    1067
+#define IDC_BUTTON_3                    1067
+#define IDC_CHECK_VAD_1                 1068
+#define IDC_BUTTON_4                    1068
+#define IDC_CHECK_VAD_2                 1069
+#define IDC_CHECK_EXT_MEDIA_OUT_2       1069
+#define IDC_BUTTON_5                    1069
+#define IDC_CHECK_VAD_3                 1070
+#define IDC_BUTTON_6                    1070
+#define IDC_CHECK_MUTE_IN               1071
+#define IDC_BUTTON_7                    1071
+#define IDC_CHECK_MUTE_IN_1             1072
+#define IDC_BUTTON_8                    1072
+#define IDC_CHECK_MUTE_IN_2             1073
+#define IDC_BUTTON_9                    1073
+#define IDC_CHECK_SRTP_TX_1             1074
+#define IDC_BUTTON_10                   1074
+#define IDC_CHECK_SRTP_RX_1             1075
+#define IDC_BUTTON_11                   1075
+#define IDC_STATIC_PLAY_FILE3           1076
+#define IDC_STATIC_SRTP                 1076
+#define IDC_BUTTON_12                   1076
+#define IDC_CHECK_SRTP_TX_2             1077
+#define IDC_BUTTON_13                   1077
+#define IDC_CHECK_SRTP_RX_2             1078
+#define IDC_BUTTON_14                   1078
+#define IDC_CHECK_EXT_ENCRYPTION_1      1079
+#define IDC_BUTTON_15                   1079
+#define IDC_STATIC_PLAY_FILE4           1080
+#define IDC_BUTTON_16                   1080
+#define IDC_CHECK_EXT_ENCRYPTION_2      1081
+#define IDC_BUTTON_17                   1081
+#define IDC_BUTTON_DTMF_1               1082
+#define IDC_BUTTON_18                   1082
+#define IDC_EDIT_DTMF_EVENT             1083
+#define IDC_CHECK_REC_                  1083
+#define IDC_CHECK_REC_MIC               1083
+#define IDC_STATIC_DTMF_EVENT           1084
+#define IDC_BUTTON_DTMF_2               1084
+#define IDC_STATIC_GROUP_DTMF           1085
+#define IDC_CHECK_CONFERENCE_1          1085
+#define IDC_BUTTON_19                   1086
+#define IDC_CHECK_CONFERENCE_2          1086
+#define IDC_BUTTON_20                   1087
+#define IDC_CHECK_ON_HOLD_1             1087
+#define IDC_BUTTON_21                   1088
+#define IDC_CHECK_ON_HOLD_2             1088
+#define IDC_BUTTON_22                   1089
+#define IDC_CHECK_DTMF_PLAYOUT_RX       1089
+#define IDC_CHECK_EXT_MEDIA_IN          1089
+#define IDC_STATIC_PLAYOUT_RX           1090
+#define IDC_EDIT_GET_OUTPUT             1090
+#define IDC_CHECK_DTMF_PLAY_TONE        1091
+#define IDC_STATIC_LAST_ERROR2          1091
+#define IDC_STATIC_GET                  1091
+#define IDC_STATIC_PLAY_TONE            1092
+#define IDC_CHECK_EXT_MEDIA_OUT         1092
+#define IDC_CHECK_START_STOP_MODE       1093
+#define IDC_BUTTON_SET_TX_TELEPHONE_PT  1093
+#define IDC_PROGRESS_AUDIO_LEVEL_IN2    1093
+#define IDC_PROGRESS_AUDIO_LEVEL_OUT    1093
+#define IDC_EDIT_EVENT_LENGTH           1094
+#define IDC_EDIT_RX_PORT_3              1094
+#define IDC_EDIT_DELAY_ESTIMATE_1       1094
+#define IDC_STATIC_EVENT_LENGTH         1095
+#define IDC_EDIT_PLAYOUT_BUFFER_SIZE    1095
+#define IDC_STATIC_START_STOP_MODE      1096
+#define IDC_EDIT_EVENT_RX_PT            1096
+#define IDC_CHECK_DELAY_ESTIMATE_1      1096
+#define IDC_EDIT_EVENT_ATTENUATION      1097
+#define IDC_CHECK_AGC_1                 1097
+#define IDC_CHECK_EVENT_INBAND          1098
+#define IDC_CHECK_NS_1                  1098
+#define IDC_STATIC_EVENT_ATTENUATION    1099
+#define IDC_STATIC_SRTP2                1099
+#define IDC_STATIC_RX_VQE               1099
+#define IDC_EDIT_EVENT_TX_PT            1100
+#define IDC_CHECK_REC_MIC2              1100
+#define IDC_CHECK_REC_CALL              1100
+#define IDC_CHECK_DTMF_FEEDBACK         1101
+#define IDC_CHECK_REC_CALL2             1101
+#define IDC_CHECK_TYPING_DETECTION      1101
+#define IDC_CHECK_START_STOP_MODE2      1102
+#define IDC_CHECK_DIRECT_FEEDBACK       1102
+#define IDC_CHECK_FEC                   1102
+#define IDC_BUTTON_SET_RX_TELEPHONE_PT_TYPE 1103
+#define IDC_BUTTON_SET_RX_TELEPHONE_PT  1103
+#define IDC_BUTTON_CLEAR_ERROR_CALLBACK 1103
+#define IDC_EDIT_EVENT_CODE             1104
+#define IDC_STATIC_DIRECT_FEEDBACK      1105
+#define IDC_RADIO_SINGLE                1106
+#define IDC_RADIO_MULTI                 1107
+#define IDC_RADIO_START_STOP            1108
+#define IDC_STATIC_MODE                 1109
+#define IDC_STATIC_EVENT_RX_PT          1110
+#define IDC_STATIC_EVENT_TX_PT          1111
+#define IDC_STATIC_PT                   1112
+#define IDC_BUTTON_SEND_TELEPHONE_EVENT 1113
+#define IDC_STATIC_EVENT_CODE           1114
+#define IDC_CHECK_EVENT_DETECTION       1115
+#define IDC_CHECK_DETECT_INBAND         1116
+#define IDC_CHECK_DETECT_OUT_OF_BAND    1117
+#define IDC_STATIC_INBAND_DETECTION     1118
+#define IDC_STATIC_OUT_OF_BAND_DETECTION 1119
+#define IDC_STATIC_EVENT_DETECTION      1120
+#define IDC_STATIC_TELEPHONE_EVENTS     1121
+#define IDC_EDIT_EVENT_CODE2            1122
+#define IDC_EDIT_ON_EVENT               1122
+#define IDC_EDIT_ON_EVENT_OUT_OF_BAND   1122
+#define IDC_STATIC_ON_EVENT             1123
+#define IDC_EDIT_ON_EVENT_INBAND        1123
+#define IDC_STATIC_EVEN                 1124
+#define IDC_STATIC_LINE                 1125
+#define IDC_LIST_CODEC_1                1128
+#define IDC_EDIT2                       1129
+#define IDC_EDIT_CODEC_1                1129
+#define IDC_STATIC_PANNING              1131
+#define IDC_SLIDER_PAN_LEFT             1132
+#define IDC_SLIDER_PAN_RIGHT            1133
+#define IDC_STATIC_LEFT                 1134
+#define IDC_STATIC_LEFT2                1135
+#define IDC_STATIC_RIGHT                1135
+#define IDC_BUTTON_VERSION              1136
+#define IDC_STATIC_PLAYOUT_BUFFER       1137
+#define IDC_CHECK_RXVAD                 1138
+#define IDC_EDIT1                       1139
+#define IDC_EDIT_RXVAD                  1139
+#define IDC_STATIC_RX_PORT              1140
+#define IDC_STATIC_RX_PORT2             1141
+#define IDC_EDIT3                       1142
+#define IDC_EDIT_AUDIO_LAYER            1142
+#define IDC_EDIT_AUDIO_LAYER2           1143
+#define IDC_EDIT_CPU_LOAD               1143
+#define IDC_STATIC_ERROR_CALLBACK       1144
+#define IDC_EDIT_ERROR_CALLBACK         1145
+#define IDC_EDIT_RX_CODEC_1             1146
+#define IDC_STATIC_BYTES_SENT_TEXT      1147
+#define IDC_EDIT_RTCP_STAT              1147
+#define IDC_EDIT_RTCP_STAT_1            1147
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        130
+#define _APS_NEXT_COMMAND_VALUE         32771
+#define _APS_NEXT_CONTROL_VALUE         1148
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
diff --git a/trunk/src/voice_engine/main/test/win_test/WinTest.aps b/trunk/src/voice_engine/main/test/win_test/WinTest.aps
new file mode 100644
index 0000000..499db5f
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTest.aps
Binary files differ
diff --git a/trunk/src/voice_engine/main/test/win_test/WinTest.cc b/trunk/src/voice_engine/main/test/win_test/WinTest.cc
new file mode 100644
index 0000000..e0e0248
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTest.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "stdafx.h"
+#include "WinTest.h"
+#include "WinTestDlg.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#endif
+
+
+// CWinTestApp
+
+BEGIN_MESSAGE_MAP(CWinTestApp, CWinApp)
+	ON_COMMAND(ID_HELP, &CWinApp::OnHelp)
+END_MESSAGE_MAP()
+
+
+// CWinTestApp construction
+
+CWinTestApp::CWinTestApp()
+{
+}
+
+
+// The one and only CWinTestApp object
+
+CWinTestApp theApp;
+
+
+// CWinTestApp initialization
+
+BOOL CWinTestApp::InitInstance()
+{
+	// InitCommonControlsEx() is required on Windows XP if an application
+	// manifest specifies use of ComCtl32.dll version 6 or later to enable
+	// visual styles.  Otherwise, any window creation will fail.
+	INITCOMMONCONTROLSEX InitCtrls;
+	InitCtrls.dwSize = sizeof(InitCtrls);
+	// Set this to include all the common control classes you want to use
+	// in your application.
+	InitCtrls.dwICC = ICC_WIN95_CLASSES;
+	InitCommonControlsEx(&InitCtrls);
+
+	CWinApp::InitInstance();
+
+	// Standard initialization
+	// If you are not using these features and wish to reduce the size
+	// of your final executable, you should remove from the following
+	// the specific initialization routines you do not need
+	// Change the registry key under which our settings are stored
+	SetRegistryKey(_T("Local AppWizard-Generated Applications"));
+
+	CWinTestDlg dlg;
+	m_pMainWnd = &dlg;
+	INT_PTR nResponse = dlg.DoModal();
+	if (nResponse == IDOK)
+	{
+	}
+	else if (nResponse == IDCANCEL)
+	{
+	}
+
+	// Since the dialog has been closed, return FALSE so that we exit the
+	//  application, rather than start the application's message pump.
+	return FALSE;
+}
diff --git a/trunk/src/voice_engine/main/test/win_test/WinTest.h b/trunk/src/voice_engine/main/test/win_test/WinTest.h
new file mode 100644
index 0000000..d012ce6
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTest.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#ifndef __AFXWIN_H__
+	#error "include 'stdafx.h' before including this file for PCH"
+#endif
+
+#include "resource.h"		// main symbols
+
+
+// CWinTestApp:
+// See WinTest.cpp for the implementation of this class
+//
+
+class CWinTestApp : public CWinApp
+{
+public:
+	CWinTestApp();
+
+// Overrides
+	public:
+	virtual BOOL InitInstance();
+
+// Implementation
+
+	DECLARE_MESSAGE_MAP()
+};
+
+extern CWinTestApp theApp;
diff --git a/trunk/src/voice_engine/main/test/win_test/WinTest.rc b/trunk/src/voice_engine/main/test/win_test/WinTest.rc
new file mode 100644
index 0000000..dfe503f
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTest.rc
@@ -0,0 +1,394 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "resource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "resource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "#define _AFX_NO_SPLITTER_RESOURCES\r\n"

+    "#define _AFX_NO_OLE_RESOURCES\r\n"

+    "#define _AFX_NO_TRACKER_RESOURCES\r\n"

+    "#define _AFX_NO_PROPERTY_RESOURCES\r\n"

+    "\r\n"

+    "#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)\r\n"

+    "LANGUAGE 29, 1\r\n"

+    "#pragma code_page(1252)\r\n"

+    "#include ""res\\WinTest.rc2""  // non-Microsoft Visual C++ edited resources\r\n"

+    "#include ""afxres.rc""     // Standard components\r\n"

+    "#endif\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Icon

+//

+

+// Icon with lowest ID value placed first to ensure application icon

+// remains consistent on all systems.

+IDR_MAINFRAME           ICON                    "res\\WinTest.ico"

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_ABOUTBOX DIALOGEX 0, 0, 235, 55

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "About WinTest"

+FONT 8, "MS Shell Dlg", 0, 0, 0x1

+BEGIN

+    ICON            IDR_MAINFRAME,IDC_STATIC,11,17,20,20

+    LTEXT           "WinTest Version 1.0",IDC_STATIC,40,10,119,8,SS_NOPREFIX

+    LTEXT           "Copyright (C) 2010",IDC_STATIC,40,25,119,8

+    DEFPUSHBUTTON   "OK",IDOK,178,7,50,16,WS_GROUP

+END

+

+IDD_WINTEST_DIALOG DIALOGEX 0, 0, 796, 278

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_MINIMIZEBOX | WS_POPUP | WS_VISIBLE | WS_CAPTION | WS_SYSMENU

+EXSTYLE WS_EX_APPWINDOW

+CAPTION "WinTest"

+FONT 8, "MS Shell Dlg", 0, 0, 0x1

+BEGIN

+    PUSHBUTTON      "Create",IDC_BUTTON_CREATE_1,28,24,32,14

+    PUSHBUTTON      "Delete",IDC_BUTTON_DELETE_1,28,40,32,14

+    EDITTEXT        IDC_EDIT_1,6,32,18,14,ES_AUTOHSCROLL | ES_READONLY

+    PUSHBUTTON      "Create",IDC_BUTTON_CREATE_2,28,72,32,14

+    PUSHBUTTON      "Delete",IDC_BUTTON_DELETE_2,28,88,32,14

+    EDITTEXT        IDC_EDIT_2,6,82,18,14,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_MESSAGE,28,244,764,12,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_IP_1,64,24,76,30,CBS_DROPDOWN | CBS_SORT | WS_VSCROLL | WS_TABSTOP

+    EDITTEXT        IDC_EDIT_TX_PORT_1,144,24,28,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_RX_PORT_1,144,40,28,14,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_CODEC_1,176,24,76,156,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Start",IDC_BUTTON_START_LISTEN_1,256,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_LISTEN_1,256,40,32,14

+    LTEXT           "Receive",IDC_STATIC_LISTEN,262,8,26,8

+    PUSHBUTTON      "Start",IDC_BUTTON_START_PLAYOUT_1,292,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_PLAYOUT_1,292,40,32,14

+    LTEXT           "Playout",IDC_STATIC_PLAY,295,8,25,8

+    PUSHBUTTON      "Start",IDC_BUTTON_START_SEND_1,328,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_SEND_1,328,40,32,14

+    LTEXT           "Send",IDC_STATIC_SEND,335,8,17,8

+    COMBOBOX        IDC_COMBO_IP_2,64,72,76,30,CBS_DROPDOWN | CBS_SORT | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Destination IP address",IDC_STATIC_IP,64,8,73,8

+    LTEXT           "Ports",IDC_STATIC_PORTS,145,8,18,8

+    LTEXT           "Codec",IDC_STATIC_CODEC,177,8,21,8

+    LTEXT           "Channel",IDC_STATIC_CHANNEL,30,8,27,8

+    LTEXT           "ID",IDC_STATIC_ID,12,8,8,8

+    EDITTEXT        IDC_EDIT_TX_PORT_2,144,72,28,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_RX_PORT_2,144,88,28,14,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_CODEC_2,176,72,76,156,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Start",IDC_BUTTON_START_LISTEN_2,256,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_LISTEN_2,256,88,32,14

+    PUSHBUTTON      "Start",IDC_BUTTON_START_PLAYOUT_2,292,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_PLAYOUT_2,292,88,32,14

+    PUSHBUTTON      "Start",IDC_BUTTON_START_SEND_2,328,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_SEND_2,328,88,32,14

+    PUSHBUTTON      "TEST 1",IDC_BUTTON_TEST_1,756,224,36,14

+    LTEXT           "API",IDC_STATIC,4,247,12,8

+    EDITTEXT        IDC_EDIT_RESULT,28,260,96,12,ES_AUTOHSCROLL

+    LTEXT           "Result",IDC_STATIC,3,263,21,8

+    EDITTEXT        IDC_EDIT_N_FAILS,156,260,30,12,ES_AUTOHSCROLL

+    LTEXT           "#Fails",IDC_STATIC_ERROR,132,263,20,8

+    EDITTEXT        IDC_EDIT_LAST_ERROR,228,260,36,12,ES_AUTOHSCROLL

+    LTEXT           "Last Error",IDC_STATIC_LAST_ERROR,192,262,32,8

+    LTEXT           "Ext. Trans.",IDC_STATIC_EXTERNAL,361,8,37,8

+    CONTROL         "",IDC_CHECK_EXT_TRANS_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,371,33,16,10

+    CONTROL         "In",IDC_CHECK_PLAY_FILE_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,24,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "Play File",IDC_STATIC_PLAY_FILE,401,8,27,8

+    CONTROL         "Out",IDC_CHECK_PLAY_FILE_OUT_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "In",IDC_CHECK_PLAY_FILE_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_PLAY_FILE_OUT_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_EXT_TRANS_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,371,82,16,10

+    GROUPBOX        "",IDC_STATIC_ALL_CHANNELS,6,107,662,113

+    CONTROL         "PlayFileAsMic",IDC_CHECK_PLAY_FILE_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,122,60,14,WS_EX_DLGMODALFRAME

+    COMBOBOX        IDC_COMBO_REC_DEVICE,12,132,184,80,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Recording device",IDC_STATIC_REC_DEVICE,12,120,56,8

+    COMBOBOX        IDC_COMBO_PLAY_DEVICE,12,180,184,80,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Playout device",IDC_STATIC_PLAY_DEVICE,12,167,56,8

+    CONTROL         "In",IDC_CHECK_EXT_MEDIA_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_EXT_MEDIA_OUT_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "Ext. Media",IDC_STATIC_PLAY_FILE2,437,8,35,8

+    CONTROL         "",IDC_SLIDER_INPUT_VOLUME,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,196,130,72,15

+    LTEXT           "Microphone Volume",IDC_STATIC_MIC_VOLUME,202,120,62,8

+    CONTROL         "",IDC_SLIDER_OUTPUT_VOLUME,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,196,179,72,15

+    LTEXT           "Speaker Volume",IDC_STATIC_SPK_VOLUME,202,167,52,8

+    CONTROL         "AGC",IDC_CHECK_AGC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,316,122,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_PROGRESS_AUDIO_LEVEL_IN,"msctls_progress32",WS_BORDER,268,135,42,6

+    LTEXT           "Audio Level",IDC_STATIC_AUDIO_LEVEL_IN,271,120,38,8,NOT WS_GROUP

+    CONTROL         "NS",IDC_CHECK_NS,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,316,142,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "EC",IDC_CHECK_EC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,356,122,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "VAD",IDC_CHECK_VAD_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "In",IDC_CHECK_EXT_MEDIA_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_EXT_MEDIA_OUT_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "VAD",IDC_CHECK_VAD_3,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,356,142,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "TX",IDC_CHECK_SRTP_TX_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "RX",IDC_CHECK_SRTP_RX_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "SRTP",IDC_STATIC_SRTP,525,8,18,8

+    CONTROL         "TX",IDC_CHECK_SRTP_TX_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "RX",IDC_CHECK_SRTP_RX_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_EXT_ENCRYPTION_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,564,33,16,10

+    LTEXT           "Encrypt",IDC_STATIC_PLAY_FILE4,556,8,26,8

+    CONTROL         "",IDC_CHECK_EXT_ENCRYPTION_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,564,82,16,10

+    PUSHBUTTON      "DTMF>>",IDC_BUTTON_DTMF_1,584,24,36,14

+    CONTROL         "RecMicToFile",IDC_CHECK_REC_MIC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,142,60,14,WS_EX_DLGMODALFRAME

+    PUSHBUTTON      "DTMF>>",IDC_BUTTON_DTMF_2,584,72,36,14

+    CONTROL         "Conf",IDC_CHECK_CONFERENCE_1,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,584,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Conf",IDC_CHECK_CONFERENCE_2,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,584,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Hold",IDC_CHECK_ON_HOLD_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,708,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Hold",IDC_CHECK_ON_HOLD_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,708,72,36,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_GET_OUTPUT,292,260,500,12,ES_AUTOHSCROLL

+    LTEXT           "Get",IDC_STATIC_GET,276,262,12,8

+    CONTROL         "Ext. Media",IDC_CHECK_EXT_MEDIA_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,460,122,52,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Ext. Media",IDC_CHECK_EXT_MEDIA_OUT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,460,180,52,14,WS_EX_DLGMODALFRAME

+    LISTBOX         IDC_LIST_CODEC_1,208,40,44,28,LBS_NOINTEGRALHEIGHT | NOT WS_BORDER | WS_VSCROLL | WS_TABSTOP,WS_EX_CLIENTEDGE

+    EDITTEXT        IDC_EDIT_CODEC_1,176,40,28,14,ES_AUTOHSCROLL

+    CONTROL         "",IDC_PROGRESS_AUDIO_LEVEL_OUT,"msctls_progress32",WS_BORDER,268,184,42,6

+    LTEXT           "Panning",IDC_STATIC_PANNING,328,167,26,8

+    CONTROL         "",IDC_SLIDER_PAN_LEFT,"msctls_trackbar32",TBS_VERT | TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,328,175,12,28

+    CONTROL         "",IDC_SLIDER_PAN_RIGHT,"msctls_trackbar32",TBS_VERT | TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,344,175,12,28

+    LTEXT           "L",IDC_STATIC_LEFT,332,200,8,8

+    LTEXT           "R",IDC_STATIC_RIGHT,347,201,8,8

+    PUSHBUTTON      "Version",IDC_BUTTON_VERSION,624,200,36,14

+    EDITTEXT        IDC_EDIT_PLAYOUT_BUFFER_SIZE,363,181,28,12,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY | NOT WS_TABSTOP

+    LTEXT           "Buffer Size",IDC_STATIC_PLAYOUT_BUFFER,361,167,36,8

+    CONTROL         "Delay",IDC_CHECK_DELAY_ESTIMATE_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,624,24,36,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_DELAY_ESTIMATE_1,631,40,24,14,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY | NOT WS_TABSTOP

+    CONTROL         "RxVAD",IDC_CHECK_RXVAD,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,664,24,40,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_RXVAD,671,40,24,14,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY

+    CONTROL         "AGC",IDC_CHECK_AGC_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,748,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "NS",IDC_CHECK_NS_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,748,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "RX VQE",IDC_STATIC_RX_VQE,753,8,25,8

+    CONTROL         "RecordCall",IDC_CHECK_REC_CALL,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,517,156,52,14,WS_EX_DLGMODALFRAME

+    LTEXT           "RX",IDC_STATIC_RX_PORT,133,42,10,8

+    LTEXT           "RX",IDC_STATIC_RX_PORT2,133,91,10,8

+    CONTROL         "TypingDetect",IDC_CHECK_TYPING_DETECTION,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,572,156,60,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_AUDIO_LAYER,28,224,116,14,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_CPU_LOAD,152,224,116,14,ES_AUTOHSCROLL | ES_READONLY

+    CONTROL         "FEC",IDC_CHECK_FEC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,176,55,28,14,WS_EX_DLGMODALFRAME

+    LTEXT           "=> Callbacks",IDC_STATIC_ERROR_CALLBACK,283,226,43,8

+    EDITTEXT        IDC_EDIT_ERROR_CALLBACK,328,224,312,14,ES_AUTOHSCROLL

+    PUSHBUTTON      "Clear",IDC_BUTTON_CLEAR_ERROR_CALLBACK,644,224,24,14

+    EDITTEXT        IDC_EDIT_RX_CODEC_1,256,56,216,12,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_RTCP_STAT_1,476,56,316,12,ES_AUTOHSCROLL | ES_READONLY

+END

+

+IDD_DTMF_DIALOG DIALOGEX 0, 0, 316, 212

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Telehone Events"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,260,192,50,14

+    PUSHBUTTON      "1",IDC_BUTTON_1,16,20,16,14

+    PUSHBUTTON      "2",IDC_BUTTON_2,36,20,16,14

+    PUSHBUTTON      "3",IDC_BUTTON_3,56,20,16,14

+    PUSHBUTTON      "4",IDC_BUTTON_4,16,36,16,14

+    PUSHBUTTON      "5",IDC_BUTTON_5,36,36,16,14

+    PUSHBUTTON      "6",IDC_BUTTON_6,56,36,16,14

+    PUSHBUTTON      "7",IDC_BUTTON_7,16,52,16,14

+    PUSHBUTTON      "8",IDC_BUTTON_8,36,52,16,14

+    PUSHBUTTON      "9",IDC_BUTTON_9,56,52,16,14

+    PUSHBUTTON      "*",IDC_BUTTON_10,16,68,16,14

+    PUSHBUTTON      "0",IDC_BUTTON_11,36,68,16,14

+    PUSHBUTTON      "#",IDC_BUTTON_12,56,68,16,14

+    PUSHBUTTON      "A",IDC_BUTTON_13,76,20,16,14

+    PUSHBUTTON      "B",IDC_BUTTON_14,76,36,16,14

+    PUSHBUTTON      "C",IDC_BUTTON_15,76,52,16,14

+    PUSHBUTTON      "D",IDC_BUTTON_16,76,68,16,14

+    EDITTEXT        IDC_EDIT_DTMF_EVENT,56,90,16,12,ES_AUTOHSCROLL | ES_READONLY

+    LTEXT           "Event code",IDC_STATIC_DTMF_EVENT,17,91,37,8

+    PUSHBUTTON      "1",IDC_BUTTON_17,16,20,16,14

+    PUSHBUTTON      "2",IDC_BUTTON_18,36,20,16,14

+    PUSHBUTTON      "3",IDC_BUTTON_19,56,20,16,14

+    PUSHBUTTON      "4",IDC_BUTTON_20,16,36,16,14

+    PUSHBUTTON      "A",IDC_BUTTON_21,76,20,16,14

+    GROUPBOX        "DTMF Events",IDC_STATIC_GROUP_DTMF,4,4,188,132

+    CONTROL         "",IDC_CHECK_DTMF_PLAYOUT_RX,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,160,21,12,14

+    LTEXT           "Play out-band RX",IDC_STATIC_PLAYOUT_RX,101,24,56,8

+    CONTROL         "",IDC_CHECK_DTMF_PLAY_TONE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,160,39,12,14

+    LTEXT           "Play tone locally",IDC_STATIC_PLAY_TONE,101,41,52,8

+    EDITTEXT        IDC_EDIT_EVENT_LENGTH,44,163,28,14,ES_AUTOHSCROLL

+    LTEXT           "Duration",IDC_STATIC_EVENT_LENGTH,12,165,28,8

+    EDITTEXT        IDC_EDIT_EVENT_ATTENUATION,44,183,28,14,ES_AUTOHSCROLL

+    LTEXT           "Volume",IDC_STATIC_EVENT_ATTENUATION,12,186,24,8

+    CONTROL         "Inband",IDC_CHECK_EVENT_INBAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,84,163,40,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Feedback",IDC_CHECK_DTMF_FEEDBACK,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,16,112,48,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_DIRECT_FEEDBACK,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,96,112,12,14

+    LTEXT           "Direct",IDC_STATIC_DIRECT_FEEDBACK,72,115,20,8

+    CONTROL         "Single",IDC_RADIO_SINGLE,"Button",BS_AUTORADIOBUTTON | WS_GROUP,112,68,35,10

+    CONTROL         "Sequence",IDC_RADIO_MULTI,"Button",BS_AUTORADIOBUTTON,112,80,47,10

+    CONTROL         "Start/Stop",IDC_RADIO_START_STOP,"Button",BS_AUTORADIOBUTTON,112,92,49,10

+    GROUPBOX        "Mode",IDC_STATIC_MODE,100,56,68,52

+    EDITTEXT        IDC_EDIT_EVENT_RX_PT,220,20,24,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_EVENT_TX_PT,220,41,24,14,ES_AUTOHSCROLL

+    LTEXT           "RX",IDC_STATIC_EVENT_RX_PT,208,22,10,8

+    LTEXT           "TX",IDC_STATIC_EVENT_TX_PT,208,42,9,8

+    PUSHBUTTON      "Set",IDC_BUTTON_SET_TX_TELEPHONE_PT,248,41,24,14

+    PUSHBUTTON      "Set",IDC_BUTTON_SET_RX_TELEPHONE_PT,248,20,24,14

+    GROUPBOX        "Payload Type",IDC_STATIC_PT,200,4,80,56

+    EDITTEXT        IDC_EDIT_EVENT_CODE,128,163,28,14,ES_AUTOHSCROLL

+    LTEXT           "Event code",IDC_STATIC_EVENT_CODE,125,152,37,8

+    PUSHBUTTON      "Send",IDC_BUTTON_SEND_TELEPHONE_EVENT,160,163,24,14

+    CONTROL         "On/Off",IDC_CHECK_EVENT_DETECTION,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,80,40,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_DETECT_INBAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,100,12,14

+    CONTROL         "",IDC_CHECK_DETECT_OUT_OF_BAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,116,12,14

+    LTEXT           "Inband",IDC_STATIC_INBAND_DETECTION,220,103,24,8

+    LTEXT           "Outband",IDC_STATIC_OUT_OF_BAND_DETECTION,220,120,29,8

+    GROUPBOX        "Event Detection",IDC_STATIC_EVENT_DETECTION,200,68,108,68

+    GROUPBOX        "Telephone Events",IDC_STATIC_TELEPHONE_EVENTS,4,140,188,64

+    EDITTEXT        IDC_EDIT_ON_EVENT_OUT_OF_BAND,252,117,48,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_ON_EVENT_INBAND,252,101,48,14,ES_AUTOHSCROLL

+    LTEXT           "=> Detections",IDC_STATIC_EVEN,253,90,48,8

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Version

+//

+

+VS_VERSION_INFO VERSIONINFO

+ FILEVERSION 1,0,0,0

+ PRODUCTVERSION 1,0,0,0

+ FILEFLAGSMASK 0x3fL

+#ifdef _DEBUG

+ FILEFLAGS 0x1L

+#else

+ FILEFLAGS 0x0L

+#endif

+ FILEOS 0x4L

+ FILETYPE 0x1L

+ FILESUBTYPE 0x0L

+BEGIN

+    BLOCK "StringFileInfo"

+    BEGIN

+        BLOCK "040904e4"

+        BEGIN

+            VALUE "FileDescription", "WebRTC VoiceEngine Test"

+            VALUE "FileVersion", "1.0.0.0"

+            VALUE "InternalName", "WinTest.exe"

+            VALUE "LegalCopyright", "Copyright (c) 2011 The WebRTC project authors. All Rights Reserved."

+            VALUE "OriginalFilename", "WinTest.exe"

+            VALUE "ProductName", "WebRTC VoiceEngine"

+            VALUE "ProductVersion", "1.0.0.0"

+        END

+    END

+    BLOCK "VarFileInfo"

+    BEGIN

+        VALUE "Translation", 0x409, 1252

+    END

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_ABOUTBOX, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 228

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 48

+    END

+

+    IDD_WINTEST_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 789

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 271

+    END

+

+    IDD_DTMF_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 309

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 205

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// String Table

+//

+

+STRINGTABLE 

+BEGIN

+    IDS_ABOUTBOX            "&About WinTest..."

+END

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+#define _AFX_NO_SPLITTER_RESOURCES

+#define _AFX_NO_OLE_RESOURCES

+#define _AFX_NO_TRACKER_RESOURCES

+#define _AFX_NO_PROPERTY_RESOURCES

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+LANGUAGE 29, 1

+#pragma code_page(1252)

+#include "res\WinTest.rc2"  // non-Microsoft Visual C++ edited resources

+#include "afxres.rc"     // Standard components

+#endif

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/trunk/src/voice_engine/main/test/win_test/WinTestDlg.cc b/trunk/src/voice_engine/main/test/win_test/WinTestDlg.cc
new file mode 100644
index 0000000..de54802
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTestDlg.cc
@@ -0,0 +1,3573 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include "stdafx.h"
+#include "WinTest.h"
+#include "WinTestDlg.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#endif
+
+using namespace webrtc;
+
+unsigned char key[30] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+
+// Hack to convert char to TCHAR, using two buffers to be able to
+// call twice in the same statement
+TCHAR convertTemp1[256] = {0};
+TCHAR convertTemp2[256] = {0};
+bool convertBufferSwitch(false);
+TCHAR* CharToTchar(const char* str, int len)
+{
+#ifdef _UNICODE
+  TCHAR* temp = convertBufferSwitch ? convertTemp1 : convertTemp2;
+  convertBufferSwitch = !convertBufferSwitch;
+  memset(temp, 0, sizeof(convertTemp1));
+  MultiByteToWideChar(CP_UTF8, 0, str, len, temp, 256);
+  return temp;
+#else
+  return str;
+#endif
+}
+
+// Hack to convert TCHAR to char
+char convertTemp3[256] = {0};
+char* TcharToChar(TCHAR* str, int len)
+{
+#ifdef _UNICODE
+  memset(convertTemp3, 0, sizeof(convertTemp3));
+  WideCharToMultiByte(CP_UTF8, 0, str, len, convertTemp3, 256, 0, 0);
+  return convertTemp3;
+#else
+  return str;
+#endif
+}
+
+// ----------------------------------------------------------------------------
+//    VoEConnectionObserver
+// ----------------------------------------------------------------------------
+
+class ConnectionObserver : public  VoEConnectionObserver
+{
+public:
+    ConnectionObserver();
+    virtual void OnPeriodicDeadOrAlive(const int channel, const bool alive);
+};
+
+ConnectionObserver::ConnectionObserver()
+{
+}
+
+void ConnectionObserver::OnPeriodicDeadOrAlive(const int channel, const bool alive)
+{
+    CString str;
+    str.Format(_T("OnPeriodicDeadOrAlive(channel=%d) => alive=%d"), channel, alive);
+    OutputDebugString(str);
+}
+
+// ----------------------------------------------------------------------------
+//    VoiceEngineObserver
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::CallbackOnError(const int channel, const int errCode)
+{
+    _nErrorCallbacks++;
+
+    CString str;
+    str.Format(_T("[#%d] CallbackOnError(channel=%d) => errCode = %d"), _nErrorCallbacks, channel, errCode);
+    if (errCode == VE_RECEIVE_PACKET_TIMEOUT)
+    {
+        str += _T(" <=> VE_RECEIVE_PACKET_TIMEOUT");
+    }
+    else if (errCode == VE_PACKET_RECEIPT_RESTARTED)
+    {
+        str += _T(" <=> VE_PACKET_RECEIPT_RESTARTED");
+    }
+    else if (errCode == VE_RUNTIME_PLAY_WARNING)
+    {
+        str += _T(" <=> VE_RUNTIME_PLAY_WARNING");
+    }
+    else if (errCode == VE_RUNTIME_REC_WARNING)
+    {
+        str += _T(" <=> VE_RUNTIME_REC_WARNING");
+    }
+    else if (errCode == VE_RUNTIME_PLAY_ERROR)
+    {
+        str += _T(" <=> VE_RUNTIME_PLAY_ERROR");
+    }
+    else if (errCode == VE_RUNTIME_REC_ERROR)
+    {
+        str += _T(" <=> VE_RUNTIME_REC_ERROR");
+    }
+    else if (errCode == VE_SATURATION_WARNING)
+    {
+        str += _T(" <=> VE_SATURATION_WARNING");
+    }
+    else if (errCode == VE_TYPING_NOISE_WARNING)
+    {
+        str += _T(" <=> VE_TYPING_NOISE_WARNING");
+    }
+    else if (errCode == VE_REC_DEVICE_REMOVED)
+    {
+        str += _T(" <=> VE_REC_DEVICE_REMOVED");
+    }
+    // AfxMessageBox((LPCTSTR)str, MB_OK);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+// ----------------------------------------------------------------------------
+//    VoERTPObserver
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnIncomingCSRCChanged(const int channel, const unsigned int CSRC, const bool added)
+{
+    CString str;
+    str.Format(_T("OnIncomingCSRCChanged(channel=%d) => CSRC=%u, added=%d"), channel, CSRC, added);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+void CWinTestDlg::OnIncomingSSRCChanged(const int channel, const unsigned int SSRC)
+{
+    CString str;
+    str.Format(_T("OnIncomingSSRCChanged(channel=%d) => SSRC=%u"), channel, SSRC);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+// ----------------------------------------------------------------------------
+//    Transport
+// ----------------------------------------------------------------------------
+
+class MyTransport : public Transport
+{
+public:
+    MyTransport(VoENetwork* veNetwork);
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+private:
+    VoENetwork* _veNetworkPtr;
+};
+
+MyTransport::MyTransport(VoENetwork* veNetwork) :
+    _veNetworkPtr(veNetwork)
+{
+}
+
+int
+MyTransport::SendPacket(int channel, const void *data, int len)
+{
+    _veNetworkPtr->ReceivedRTPPacket(channel, data, len);
+    return len;
+}
+
+int
+MyTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _veNetworkPtr->ReceivedRTCPPacket(channel, data, len);
+    return len;
+}
+
+// ----------------------------------------------------------------------------
+//    VoEMediaProcess
+// ----------------------------------------------------------------------------
+
+class MediaProcessImpl : public VoEMediaProcess
+{
+public:
+    MediaProcessImpl();
+    virtual void Process(const int channel,
+                         const ProcessingTypes type,
+                         WebRtc_Word16 audio_10ms[],
+                         const int length,
+                         const int samplingFreqHz,
+                         const bool stereo);
+};
+
+MediaProcessImpl::MediaProcessImpl()
+{
+}
+
+void MediaProcessImpl::Process(const int channel,
+                               const ProcessingTypes type,
+                               WebRtc_Word16 audio_10ms[],
+                               const int length,
+                               const int samplingFreqHz,
+                               const bool stereo)
+{
+    int x = rand() % 100;
+
+    for (int i = 0; i < length; i++)
+    {
+        if (channel == -1)
+        {
+            if (type == kPlaybackAllChannelsMixed)
+            {
+                // playout: scale up
+                if (!stereo)
+                {
+                    audio_10ms[i] = (audio_10ms[i] << 2);
+                }
+                else
+                {
+                    audio_10ms[2*i] = (audio_10ms[2*i] << 2);
+                    audio_10ms[2*i+1] = (audio_10ms[2*i+1] << 2);
+                }
+            }
+            else
+            {
+                // recording: emulate packet loss by "dropping" 10% of the packets
+                if (x >= 0 && x < 10)
+                {
+                    if (!stereo)
+                    {
+                        audio_10ms[i] = 0;
+                    }
+                    else
+                    {
+                        audio_10ms[2*i] = 0;
+                        audio_10ms[2*i+1] = 0;
+                    }
+                }
+            }
+        }
+        else
+        {
+            if (type == kPlaybackPerChannel)
+            {
+                // playout: mute
+                if (!stereo)
+                {
+                    audio_10ms[i] = 0;
+                }
+                else
+                {
+                    audio_10ms[2*i] = 0;
+                    audio_10ms[2*i+1] = 0;
+                }
+            }
+            else
+            {
+                // recording: emulate packet loss by "dropping" 50% of the packets
+                if (x >= 0 && x < 50)
+                {
+                    if (!stereo)
+                    {
+                        audio_10ms[i] = 0;
+                    }
+                    else
+                    {
+                        audio_10ms[2*i] = 0;
+                        audio_10ms[2*i+1] = 0;
+                    }
+                }
+            }
+        }
+    }
+}
+
+// ----------------------------------------------------------------------------
+//    Encryptionen
+// ----------------------------------------------------------------------------
+
+class MyEncryption : public Encryption
+{
+public:
+    void encrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void decrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void encrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void decrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+};
+
+void MyEncryption::encrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    // --- Stereo emulation (sample based, 2 bytes per sample)
+
+    const int nBytesPayload = bytes_in-12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    short* ptrIn = (short*) &in_data[12];
+    short* ptrOut = (short*) &out_data[12];
+
+    // network byte order
+    for (int i = 0; i < nBytesPayload/2; i++)
+    {
+        // produce two output samples for each input sample
+        *ptrOut++ = *ptrIn; // left sample
+        *ptrOut++ = *ptrIn; // right sample
+        ptrIn++;
+    }
+
+    *bytes_out = 12 + 2*nBytesPayload;
+
+    /*
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+    */
+}
+
+void MyEncryption::decrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    // Do nothing (<=> memcpy)
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] = in_data[i];
+    *bytes_out = bytes_in;
+}
+
+void MyEncryption::encrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+}
+
+void MyEncryption::decrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+}
+
+// ----------------------------------------------------------------------------
+//    TelephoneEventObserver
+// ----------------------------------------------------------------------------
+
+class TelephoneEventObserver: public VoETelephoneEventObserver
+{
+public:
+    TelephoneEventObserver(CWnd* editControlOut, CWnd* editControlIn);
+    virtual void OnReceivedTelephoneEventInband(int channel, int eventCode,
+                                                bool endOfEvent);
+    virtual void OnReceivedTelephoneEventOutOfBand(int channel, int eventCode,
+                                                   bool endOfEvent);
+private:
+    CWnd* _editControlOutPtr;
+    CWnd* _editControlInPtr;
+};
+
+TelephoneEventObserver::TelephoneEventObserver(CWnd* editControlOut, CWnd* editControlIn) :
+    _editControlOutPtr(editControlOut),
+    _editControlInPtr(editControlIn)
+{
+}
+
+void TelephoneEventObserver::OnReceivedTelephoneEventInband(int channel,
+                                                            int eventCode,
+                                                            bool endOfEvent)
+{
+    CString msg;
+    if (endOfEvent)
+    {
+        msg.AppendFormat(_T("%d [END]"), eventCode);
+        _editControlInPtr->SetWindowText((LPCTSTR)msg);
+    }
+    else
+    {
+        msg.AppendFormat(_T("%d [START]"), eventCode);
+        _editControlInPtr->SetWindowText((LPCTSTR)msg);
+    }
+}
+
+void TelephoneEventObserver::OnReceivedTelephoneEventOutOfBand(int channel,
+                                                               int eventCode,
+                                                               bool endOfEvent)
+{
+    CString msg;
+    if (endOfEvent)
+    {
+        msg.AppendFormat(_T("%d [END]"), eventCode);
+        _editControlOutPtr->SetWindowText((LPCTSTR)msg);
+    }
+    else
+    {
+        msg.AppendFormat(_T("%d [START]"), eventCode);
+        _editControlOutPtr->SetWindowText((LPCTSTR)msg);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//    RxVadCallback
+// ----------------------------------------------------------------------------
+
+class RxCallback : public VoERxVadCallback
+{
+public:
+    RxCallback() : _vadDecision(-1) {};
+
+    virtual void OnRxVad(int , int vadDecision)
+    {
+        _vadDecision = vadDecision;
+    }
+
+    int _vadDecision;
+};
+
+// ----------------------------------------------------------------------------
+//                                 CAboutDlg dialog
+// ----------------------------------------------------------------------------
+
+class CAboutDlg : public CDialog
+{
+public:
+    CAboutDlg();
+
+// Dialog Data
+    enum { IDD = IDD_ABOUTBOX };
+
+    protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+// Implementation
+protected:
+    DECLARE_MESSAGE_MAP()
+};
+
+CAboutDlg::CAboutDlg() : CDialog(CAboutDlg::IDD)
+{
+}
+
+void CAboutDlg::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+}
+
+BEGIN_MESSAGE_MAP(CAboutDlg, CDialog)
+END_MESSAGE_MAP()
+
+// ----------------------------------------------------------------------------
+//                               CTelephonyEvent dialog
+// ----------------------------------------------------------------------------
+
+class CTelephonyEvent : public CDialog
+{
+    DECLARE_DYNAMIC(CTelephonyEvent)
+
+public:
+    CTelephonyEvent(VoiceEngine* voiceEngine, int channel, CDialog* pParentDialog, CWnd* pParent = NULL);   // standard constructor
+    virtual ~CTelephonyEvent();
+
+// Dialog Data
+    enum { IDD = IDD_DTMF_DIALOG };
+
+protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+    virtual BOOL OnInitDialog();
+
+    DECLARE_MESSAGE_MAP()
+public:
+    afx_msg void OnBnClickedButton1();
+    afx_msg void OnBnClickedButton2();
+    afx_msg void OnBnClickedButton3();
+    afx_msg void OnBnClickedButton4();
+    afx_msg void OnBnClickedButton5();
+    afx_msg void OnBnClickedButton6();
+    afx_msg void OnBnClickedButton7();
+    afx_msg void OnBnClickedButton8();
+    afx_msg void OnBnClickedButton9();
+    afx_msg void OnBnClickedButton10();
+    afx_msg void OnBnClickedButton11();
+    afx_msg void OnBnClickedButton12();
+    afx_msg void OnBnClickedButtonA();
+    afx_msg void OnBnClickedButtonB();
+    afx_msg void OnBnClickedButtonC();
+    afx_msg void OnBnClickedButtonD();
+    afx_msg void OnBnClickedCheckDtmfPlayoutRx();
+    afx_msg void OnBnClickedCheckDtmfPlayTone();
+    afx_msg void OnBnClickedCheckStartStopMode();
+    afx_msg void OnBnClickedCheckEventInband();
+    afx_msg void OnBnClickedCheckDtmfFeedback();
+    afx_msg void OnBnClickedCheckDirectFeedback();
+    afx_msg void OnBnClickedRadioSingle();
+    afx_msg void OnBnClickedRadioMulti();
+    afx_msg void OnBnClickedRadioStartStop();
+    afx_msg void OnBnClickedButtonSetRxTelephonePt();
+    afx_msg void OnBnClickedButtonSetTxTelephonePt();
+    afx_msg void OnBnClickedButtonSendTelephoneEvent();
+    afx_msg void OnBnClickedCheckDetectInband();
+    afx_msg void OnBnClickedCheckDetectOutOfBand();
+    afx_msg void OnBnClickedCheckEventDetection();
+
+private:
+    void SendTelephoneEvent(unsigned char eventCode);
+
+private:
+    VoiceEngine*                _vePtr;
+    VoEBase*                    _veBasePtr;
+    VoEDtmf*                    _veDTMFPtr;
+    VoECodec*                   _veCodecPtr;
+    int                         _channel;
+    CString                     _strMsg;
+    CDialog*                    _parentDialogPtr;
+    TelephoneEventObserver*     _telephoneEventObserverPtr;
+    bool                        _PlayDtmfToneLocally;
+    bool                        _modeStartStop;
+    bool                        _modeSingle;
+    bool                        _modeSequence;
+    bool                        _playingDTMFTone;
+    bool                        _outOfBandEventDetection;
+    bool                        _inbandEventDetection;
+};
+
+IMPLEMENT_DYNAMIC(CTelephonyEvent, CDialog)
+
+CTelephonyEvent::CTelephonyEvent(VoiceEngine* voiceEngine,
+                                 int channel,
+                                 CDialog* pParentDialog,
+                                 CWnd* pParent /*=NULL*/)
+    : _vePtr(voiceEngine),
+      _channel(channel),
+      _PlayDtmfToneLocally(false),
+      _modeStartStop(false),
+      _modeSingle(true),
+      _modeSequence(false),
+      _playingDTMFTone(false),
+      _outOfBandEventDetection(true),
+      _inbandEventDetection(false),
+      _parentDialogPtr(pParentDialog),
+      _telephoneEventObserverPtr(NULL),
+      CDialog(CTelephonyEvent::IDD, pParent)
+{
+    _veBasePtr = VoEBase::GetInterface(_vePtr);
+    _veDTMFPtr = VoEDtmf::GetInterface(_vePtr);
+    _veCodecPtr = VoECodec::GetInterface(_vePtr);
+}
+
+CTelephonyEvent::~CTelephonyEvent()
+{
+    _veDTMFPtr->Release();
+    _veCodecPtr->Release();
+    _veBasePtr->Release();
+
+    if (_telephoneEventObserverPtr)
+    {
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+    }
+}
+
+void CTelephonyEvent::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+}
+
+
+BEGIN_MESSAGE_MAP(CTelephonyEvent, CDialog)
+    ON_BN_CLICKED(IDC_BUTTON_1, &CTelephonyEvent::OnBnClickedButton1)
+    ON_BN_CLICKED(IDC_BUTTON_2, &CTelephonyEvent::OnBnClickedButton2)
+    ON_BN_CLICKED(IDC_BUTTON_3, &CTelephonyEvent::OnBnClickedButton3)
+    ON_BN_CLICKED(IDC_BUTTON_4, &CTelephonyEvent::OnBnClickedButton4)
+    ON_BN_CLICKED(IDC_BUTTON_5, &CTelephonyEvent::OnBnClickedButton5)
+    ON_BN_CLICKED(IDC_BUTTON_6, &CTelephonyEvent::OnBnClickedButton6)
+    ON_BN_CLICKED(IDC_BUTTON_7, &CTelephonyEvent::OnBnClickedButton7)
+    ON_BN_CLICKED(IDC_BUTTON_8, &CTelephonyEvent::OnBnClickedButton8)
+    ON_BN_CLICKED(IDC_BUTTON_9, &CTelephonyEvent::OnBnClickedButton9)
+    ON_BN_CLICKED(IDC_BUTTON_10, &CTelephonyEvent::OnBnClickedButton10)
+    ON_BN_CLICKED(IDC_BUTTON_11, &CTelephonyEvent::OnBnClickedButton11)
+    ON_BN_CLICKED(IDC_BUTTON_12, &CTelephonyEvent::OnBnClickedButton12)
+    ON_BN_CLICKED(IDC_BUTTON_13, &CTelephonyEvent::OnBnClickedButtonA)
+    ON_BN_CLICKED(IDC_BUTTON_14, &CTelephonyEvent::OnBnClickedButtonB)
+    ON_BN_CLICKED(IDC_BUTTON_15, &CTelephonyEvent::OnBnClickedButtonC)
+    ON_BN_CLICKED(IDC_BUTTON_16, &CTelephonyEvent::OnBnClickedButtonD)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_PLAYOUT_RX, &CTelephonyEvent::OnBnClickedCheckDtmfPlayoutRx)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_PLAY_TONE, &CTelephonyEvent::OnBnClickedCheckDtmfPlayTone)
+    ON_BN_CLICKED(IDC_CHECK_EVENT_INBAND, &CTelephonyEvent::OnBnClickedCheckEventInband)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_FEEDBACK, &CTelephonyEvent::OnBnClickedCheckDtmfFeedback)
+    ON_BN_CLICKED(IDC_CHECK_DIRECT_FEEDBACK, &CTelephonyEvent::OnBnClickedCheckDirectFeedback)
+    ON_BN_CLICKED(IDC_RADIO_SINGLE, &CTelephonyEvent::OnBnClickedRadioSingle)
+    ON_BN_CLICKED(IDC_RADIO_MULTI, &CTelephonyEvent::OnBnClickedRadioMulti)
+    ON_BN_CLICKED(IDC_RADIO_START_STOP, &CTelephonyEvent::OnBnClickedRadioStartStop)
+    ON_BN_CLICKED(IDC_BUTTON_SET_RX_TELEPHONE_PT, &CTelephonyEvent::OnBnClickedButtonSetRxTelephonePt)
+    ON_BN_CLICKED(IDC_BUTTON_SET_TX_TELEPHONE_PT, &CTelephonyEvent::OnBnClickedButtonSetTxTelephonePt)
+    ON_BN_CLICKED(IDC_BUTTON_SEND_TELEPHONE_EVENT, &CTelephonyEvent::OnBnClickedButtonSendTelephoneEvent)
+    ON_BN_CLICKED(IDC_CHECK_DETECT_INBAND, &CTelephonyEvent::OnBnClickedCheckDetectInband)
+    ON_BN_CLICKED(IDC_CHECK_DETECT_OUT_OF_BAND, &CTelephonyEvent::OnBnClickedCheckDetectOutOfBand)
+    ON_BN_CLICKED(IDC_CHECK_EVENT_DETECTION, &CTelephonyEvent::OnBnClickedCheckEventDetection)
+END_MESSAGE_MAP()
+
+
+// CTelephonyEvent message handlers
+
+BOOL CTelephonyEvent::OnInitDialog()
+{
+    CDialog::OnInitDialog();
+
+    CString str;
+    GetWindowText(str);
+    str.AppendFormat(_T(" [channel = %d]"), _channel);
+    SetWindowText(str);
+
+    // Update dialog with latest playout state
+    bool enabled(false);
+    _veDTMFPtr->GetDtmfPlayoutStatus(_channel, enabled);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAYOUT_RX);
+    button->SetCheck(enabled ? BST_CHECKED : BST_UNCHECKED);
+
+    // Update dialog with latest feedback state
+    bool directFeedback(false);
+    _veDTMFPtr->GetDtmfFeedbackStatus(enabled, directFeedback);
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    button->SetCheck(enabled ? BST_CHECKED : BST_UNCHECKED);
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    button->SetCheck(directFeedback ? BST_CHECKED : BST_UNCHECKED);
+
+    // Default event length is 160 ms
+    SetDlgItemInt(IDC_EDIT_EVENT_LENGTH, 160);
+
+    // Default event attenuation is 10 (<-> -10dBm0)
+    SetDlgItemInt(IDC_EDIT_EVENT_ATTENUATION, 10);
+
+    // Current event-detection status
+    TelephoneEventDetectionMethods detectionMethod(kOutOfBand);
+    if (_veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod) == 0)
+    {
+        // DTMF detection is supported
+        if (enabled)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_DETECTION);
+            button->SetCheck(BST_CHECKED);
+        }
+        if (detectionMethod == kOutOfBand || detectionMethod == kInAndOutOfBand)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND);
+            button->SetCheck(BST_CHECKED);
+        }
+        if (detectionMethod == kInBand || detectionMethod == kInAndOutOfBand)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_INBAND);
+            button->SetCheck(BST_CHECKED);
+        }
+    }
+    else
+    {
+        // DTMF detection is not supported
+        GetDlgItem(IDC_CHECK_EVENT_DETECTION)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DETECT_INBAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_ON_EVENT_INBAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_ON_EVENT_OUT_OF_BAND)->EnableWindow(FALSE);
+    }
+
+    // Telephone-event PTs
+    unsigned char pt(0);
+    _veDTMFPtr->GetSendTelephoneEventPayloadType(_channel, pt);
+    SetDlgItemInt(IDC_EDIT_EVENT_TX_PT, pt);
+
+    CodecInst codec;
+    strcpy_s(codec.plname, 32, "telephone-event"); codec.channels = 1; codec.plfreq = 8000;
+    _veCodecPtr->GetRecPayloadType(_channel, codec);
+    SetDlgItemInt(IDC_EDIT_EVENT_RX_PT, codec.pltype);
+
+    if (_modeSingle)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_SINGLE))->SetCheck(BST_CHECKED);
+    }
+    else if (_modeStartStop)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_START_STOP))->SetCheck(BST_CHECKED);
+    }
+    else if (_modeSequence)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_MULTI))->SetCheck(BST_CHECKED);
+    }
+
+    return TRUE;  // return TRUE  unless you set the focus to a control
+}
+void CTelephonyEvent::SendTelephoneEvent(unsigned char eventCode)
+{
+    BOOL ret;
+    int lengthMs(0);
+    int attenuationDb(0);
+    bool outBand(false);
+    int res(0);
+
+    // tone length
+    if (!_modeStartStop)
+    {
+        lengthMs = GetDlgItemInt(IDC_EDIT_EVENT_LENGTH, &ret);
+        if (ret == FALSE)
+        {
+            // use default length if edit field is empty
+            lengthMs = 160;
+        }
+    }
+
+    // attenuation
+    attenuationDb = GetDlgItemInt(IDC_EDIT_EVENT_ATTENUATION, &ret);
+    if (ret == FALSE)
+    {
+        // use default length if edit field is empty
+        attenuationDb = 10;
+    }
+
+    // out-band or in-band
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_INBAND);
+    int check = button->GetCheck();
+    outBand = (check == BST_UNCHECKED);
+
+    if (eventCode < 16)
+        SetDlgItemInt(IDC_EDIT_DTMF_EVENT, eventCode);
+
+    if (_PlayDtmfToneLocally)
+    {
+        // --- PlayDtmfTone
+
+        if (_modeSingle)
+        {
+            TEST2(_veDTMFPtr->PlayDtmfTone(eventCode, lengthMs, attenuationDb) == 0,
+                _T("PlayDtmfTone(eventCode=%u, lengthMs=%d, attenuationDb=%d)"), eventCode, lengthMs, attenuationDb);
+        }
+        else if (_modeStartStop)
+        {
+            if (!_playingDTMFTone)
+            {
+                TEST2((res = _veDTMFPtr->StartPlayingDtmfTone(eventCode, attenuationDb)) == 0,
+                    _T("StartPlayingDtmfTone(eventCode=%u, attenuationDb=%d)"), eventCode, attenuationDb);
+            }
+            else
+            {
+                TEST2((res = _veDTMFPtr->StopPlayingDtmfTone()) == 0,
+                    _T("StopPlayingDTMFTone()"));
+            }
+            if (res == 0)
+                _playingDTMFTone = !_playingDTMFTone;
+        }
+        else if (_modeSequence)
+        {
+            int nTones(1);
+            int sleepMs(0);
+            int lenMult(1);
+            if (eventCode == 1)
+            {
+                nTones = 2;
+                sleepMs = lengthMs;
+                lenMult = 1;
+            }
+            else if (eventCode == 2)
+            {
+                nTones = 2;
+                sleepMs = lengthMs/2;
+                lenMult = 2;
+            }
+            else if (eventCode == 3)
+            {
+                nTones = 3;
+                sleepMs = 0;
+                lenMult = 1;
+            }
+            for (int i = 0; i < nTones; i++)
+            {
+                TEST2(_veDTMFPtr->PlayDtmfTone(eventCode, lengthMs, attenuationDb) == 0,
+                    _T("PlayDtmfTone(eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), eventCode, lengthMs, attenuationDb);
+                Sleep(sleepMs);
+                lengthMs = lenMult*lengthMs;
+                eventCode++;
+            }
+        }
+    }
+    else
+    {
+        // --- SendTelephoneEvent
+
+        if (_modeSingle)
+        {
+            TEST2(_veDTMFPtr->SendTelephoneEvent(_channel, eventCode, outBand, lengthMs, attenuationDb) == 0,
+                _T("SendTelephoneEvent(channel=%d, eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), _channel, eventCode, outBand, lengthMs, attenuationDb);
+        }
+        else if (_modeStartStop)
+        {
+            TEST2(false, _T("*** NOT IMPLEMENTED ***"));
+        }
+        else if (_modeSequence)
+        {
+            int nTones(1);
+            int sleepMs(0);
+            int lenMult(1);
+            if (eventCode == 1)
+            {
+                nTones = 2;
+                sleepMs = lengthMs;
+                lenMult = 1;
+            }
+            else if (eventCode == 2)
+            {
+                eventCode = 1;
+                nTones = 2;
+                sleepMs = lengthMs/2;
+                lenMult = 2;
+            }
+            else if (eventCode == 3)
+            {
+                eventCode = 1;
+                nTones = 3;
+                sleepMs = 0;
+                lenMult = 1;
+            }
+            for (int i = 0; i < nTones; i++)
+            {
+                TEST2(_veDTMFPtr->SendTelephoneEvent(_channel, eventCode, outBand, lengthMs, attenuationDb) == 0,
+                    _T("SendTelephoneEvent(channel=%d, eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), _channel, eventCode, outBand, lengthMs, attenuationDb);
+                Sleep(sleepMs);
+                lengthMs = lenMult*lengthMs;
+                eventCode++;
+            }
+        }
+    }
+}
+
+void CTelephonyEvent::OnBnClickedButtonSendTelephoneEvent()
+{
+    BOOL ret;
+    unsigned char eventCode(0);
+
+    eventCode = (unsigned char)GetDlgItemInt(IDC_EDIT_EVENT_CODE, &ret);
+    if (ret == FALSE)
+    {
+        return;
+    }
+    SendTelephoneEvent(eventCode);
+}
+
+void CTelephonyEvent::OnBnClickedButton1()
+{
+    SendTelephoneEvent(1);
+}
+
+void CTelephonyEvent::OnBnClickedButton2()
+{
+    SendTelephoneEvent(2);
+}
+
+void CTelephonyEvent::OnBnClickedButton3()
+{
+    SendTelephoneEvent(3);
+}
+
+void CTelephonyEvent::OnBnClickedButton4()
+{
+    SendTelephoneEvent(4);
+}
+
+void CTelephonyEvent::OnBnClickedButton5()
+{
+    SendTelephoneEvent(5);
+}
+
+void CTelephonyEvent::OnBnClickedButton6()
+{
+    SendTelephoneEvent(6);
+}
+
+void CTelephonyEvent::OnBnClickedButton7()
+{
+    SendTelephoneEvent(7);
+}
+
+void CTelephonyEvent::OnBnClickedButton8()
+{
+    SendTelephoneEvent(8);
+}
+
+void CTelephonyEvent::OnBnClickedButton9()
+{
+    SendTelephoneEvent(9);
+}
+
+void CTelephonyEvent::OnBnClickedButton10()
+{
+    // *
+    SendTelephoneEvent(10);
+}
+
+void CTelephonyEvent::OnBnClickedButton11()
+{
+    SendTelephoneEvent(0);
+}
+
+void CTelephonyEvent::OnBnClickedButton12()
+{
+    // #
+    SendTelephoneEvent(11);
+}
+
+void CTelephonyEvent::OnBnClickedButtonA()
+{
+    SendTelephoneEvent(12);
+}
+
+void CTelephonyEvent::OnBnClickedButtonB()
+{
+    SendTelephoneEvent(13);
+}
+
+void CTelephonyEvent::OnBnClickedButtonC()
+{
+    SendTelephoneEvent(14);
+}
+
+void CTelephonyEvent::OnBnClickedButtonD()
+{
+    SendTelephoneEvent(15);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfPlayoutRx()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAYOUT_RX);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST2(_veDTMFPtr->SetDtmfPlayoutStatus(_channel, enable) == 0, _T("SetDtmfPlayoutStatus(channel=%d, enable=%d)"), _channel, enable);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfPlayTone()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAY_TONE);
+    int check = button->GetCheck();
+    _PlayDtmfToneLocally = (check == BST_CHECKED);
+}
+
+void CTelephonyEvent::OnBnClickedRadioSingle()
+{
+    _modeStartStop = false;
+    _modeSingle = true;
+    _modeSequence = false;
+}
+
+void CTelephonyEvent::OnBnClickedRadioMulti()
+{
+    _modeStartStop = false;
+    _modeSingle = false;
+    _modeSequence = true;
+}
+
+void CTelephonyEvent::OnBnClickedRadioStartStop()
+{
+    // CButton* button = (CButton*)GetDlgItem(IDC_RADIO_START_STOP);
+    // int check = button->GetCheck();
+    _modeStartStop = true;
+    _modeSingle = false;
+    _modeSequence = false;
+    // GetDlgItem(IDC_EDIT_EVENT_LENGTH)->EnableWindow();
+}
+
+void CTelephonyEvent::OnBnClickedCheckEventInband()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_INBAND);
+    int check = button->GetCheck();
+    GetDlgItem(IDC_EDIT_EVENT_CODE)->EnableWindow(check?FALSE:TRUE);
+    GetDlgItem(IDC_BUTTON_SEND_TELEPHONE_EVENT)->EnableWindow(check?FALSE:TRUE);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfFeedback()
+{
+    CButton* button(NULL);
+
+    // Retrieve feedback state
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    // Retrieve direct-feedback setting
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    check = button->GetCheck();
+    const bool directFeedback = (check == BST_CHECKED);
+
+    // GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK)->EnableWindow(enable ? TRUE : FALSE);
+
+    TEST2(_veDTMFPtr->SetDtmfFeedbackStatus(enable, directFeedback) == 0,
+        _T("SetDtmfFeedbackStatus(enable=%d, directFeedback=%d)"), enable, directFeedback);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDirectFeedback()
+{
+    CButton* button(NULL);
+
+    // Retrieve feedback state
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    // Retrieve new direct-feedback setting
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    check = button->GetCheck();
+    const bool directFeedback = (check == BST_CHECKED);
+
+    TEST2(_veDTMFPtr->SetDtmfFeedbackStatus(enable, directFeedback) == 0,
+        _T("SetDtmfFeedbackStatus(enable=%d, directFeedback=%d)"), enable, directFeedback);
+}
+
+void CTelephonyEvent::OnBnClickedButtonSetRxTelephonePt()
+{
+    BOOL ret;
+    int pt = GetDlgItemInt(IDC_EDIT_EVENT_RX_PT, &ret);
+    if (ret == FALSE)
+        return;
+    CodecInst codec;
+    strcpy_s(codec.plname, 32, "telephone-event");
+    codec.pltype = pt; codec.channels = 1; codec.plfreq = 8000;
+    TEST2(_veCodecPtr->SetRecPayloadType(_channel, codec) == 0,
+        _T("SetSendTelephoneEventPayloadType(channel=%d, codec.pltype=%u)"), _channel, codec.pltype);
+}
+
+void CTelephonyEvent::OnBnClickedButtonSetTxTelephonePt()
+{
+    BOOL ret;
+    int pt = GetDlgItemInt(IDC_EDIT_EVENT_TX_PT, &ret);
+    if (ret == FALSE)
+        return;
+    TEST2(_veDTMFPtr->SetSendTelephoneEventPayloadType(_channel, pt) == 0,
+        _T("SetSendTelephoneEventPayloadType(channel=%d, type=%u)"), _channel, pt);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDetectInband()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_INBAND);
+    int check = button->GetCheck();
+    _inbandEventDetection = (check == BST_CHECKED);
+
+    bool enabled(false);
+    TelephoneEventDetectionMethods detectionMethod;
+    _veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod);
+    if (enabled)
+    {
+        // deregister
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+    OnBnClickedCheckEventDetection();
+}
+
+void CTelephonyEvent::OnBnClickedCheckDetectOutOfBand()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND);
+    int check = button->GetCheck();
+    _outOfBandEventDetection = (check == BST_CHECKED);
+
+    bool enabled(false);
+    TelephoneEventDetectionMethods detectionMethod;
+    _veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod);
+    if (enabled)
+    {
+        // deregister
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+    OnBnClickedCheckEventDetection();
+}
+
+void CTelephonyEvent::OnBnClickedCheckEventDetection()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_DETECTION);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        TelephoneEventDetectionMethods method(kInBand);
+        if (_inbandEventDetection && !_outOfBandEventDetection)
+            method = kInBand;
+        else if (!_inbandEventDetection && _outOfBandEventDetection)
+            method = kOutOfBand;
+        else if (_inbandEventDetection && _outOfBandEventDetection)
+            method = kInAndOutOfBand;
+
+        CWnd* wndOut = GetDlgItem(IDC_EDIT_ON_EVENT_OUT_OF_BAND);
+        CWnd* wndIn = GetDlgItem(IDC_EDIT_ON_EVENT_INBAND);
+        _telephoneEventObserverPtr = new TelephoneEventObserver(wndOut, wndIn);
+
+        TEST2(_veDTMFPtr->RegisterTelephoneEventDetection(_channel, method, *_telephoneEventObserverPtr) == 0,
+            _T("RegisterTelephoneEventDetection(channel=%d, detectionMethod=%d)"), _channel, method);
+    }
+    else
+    {
+        TEST2(_veDTMFPtr->DeRegisterTelephoneEventDetection(_channel) == 0,
+            _T("DeRegisterTelephoneEventDetection(channel=%d)"), _channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+}
+
+// ============================================================================
+//                                 CWinTestDlg dialog
+// ============================================================================
+
+CWinTestDlg::CWinTestDlg(CWnd* pParent /*=NULL*/)
+    : CDialog(CWinTestDlg::IDD, pParent),
+    _failCount(0),
+    _vePtr(NULL),
+    _veBasePtr(NULL),
+    _veCodecPtr(NULL),
+    _veNetworkPtr(NULL),
+    _veFilePtr(NULL),
+    _veHardwarePtr(NULL),
+    _veExternalMediaPtr(NULL),
+    _veApmPtr(NULL),
+    _veEncryptionPtr(NULL),
+    _veRtpRtcpPtr(NULL),
+    _transportPtr(NULL),
+    _encryptionPtr(NULL),
+    _externalMediaPtr(NULL),
+    _externalTransport(false),
+    _externalTransportBuild(false),
+    _checkPlayFileIn(0),
+    _checkPlayFileIn1(0),
+    _checkPlayFileIn2(0),
+    _checkPlayFileOut1(0),
+    _checkPlayFileOut2(0),
+    _checkAGC(0),
+    _checkAGC1(0),
+    _checkNS(0),
+    _checkNS1(0),
+    _checkEC(0),
+    _checkVAD1(0),
+    _checkVAD2(0),
+    _checkSrtpTx1(0),
+    _checkSrtpTx2(0),
+    _checkSrtpRx1(0),
+    _checkSrtpRx2(0),
+    _checkConference1(0),
+    _checkConference2(0),
+    _checkOnHold1(0),
+    _checkOnHold2(0),
+    _strComboIp1(_T("")),
+    _strComboIp2(_T("")),
+    _delayEstimate1(false),
+    _delayEstimate2(false),
+    _rxVad(false),
+    _nErrorCallbacks(0),
+    _timerTicks(0)
+{
+    m_hIcon = AfxGetApp()->LoadIcon(IDR_MAINFRAME);
+
+    _vePtr = VoiceEngine::Create();
+
+    VoiceEngine::SetTraceFilter(kTraceNone);
+    // VoiceEngine::SetTraceFilter(kTraceAll);
+    // VoiceEngine::SetTraceFilter(kTraceStream | kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | kTraceApiCall | kTraceModuleCall | kTraceMemory | kTraceDebug | kTraceInfo);
+    // VoiceEngine::SetTraceFilter(kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | kTraceApiCall | kTraceModuleCall | kTraceMemory | kTraceInfo);
+
+    VoiceEngine::SetTraceFile("ve_win_test.txt");
+    VoiceEngine::SetTraceCallback(NULL);
+
+    if (_vePtr)
+    {
+        _veExternalMediaPtr = VoEExternalMedia::GetInterface(_vePtr);
+        _veVolumeControlPtr = VoEVolumeControl::GetInterface(_vePtr);
+        _veEncryptionPtr = VoEEncryption::GetInterface(_vePtr);
+        _veVideoSyncPtr = VoEVideoSync::GetInterface(_vePtr);
+        _veNetworkPtr = VoENetwork::GetInterface(_vePtr);
+        _veFilePtr = VoEFile::GetInterface(_vePtr);
+        _veApmPtr = VoEAudioProcessing::GetInterface(_vePtr);
+
+        _veBasePtr = VoEBase::GetInterface(_vePtr);
+        _veCodecPtr = VoECodec::GetInterface(_vePtr);
+        _veHardwarePtr = VoEHardware::GetInterface(_vePtr);
+        _veRtpRtcpPtr = VoERTP_RTCP::GetInterface(_vePtr);
+        _transportPtr = new MyTransport(_veNetworkPtr);
+        _encryptionPtr = new MyEncryption();
+        _externalMediaPtr = new MediaProcessImpl();
+        _connectionObserverPtr = new ConnectionObserver();
+        _rxVadObserverPtr = new RxCallback();
+    }
+
+    _veBasePtr->RegisterVoiceEngineObserver(*this);
+}
+
+CWinTestDlg::~CWinTestDlg()
+{
+    if (_connectionObserverPtr) delete _connectionObserverPtr;
+    if (_externalMediaPtr) delete _externalMediaPtr;
+    if (_transportPtr) delete _transportPtr;
+    if (_encryptionPtr) delete _encryptionPtr;
+    if (_rxVadObserverPtr) delete _rxVadObserverPtr;
+
+    if (_veExternalMediaPtr) _veExternalMediaPtr->Release();
+    if (_veEncryptionPtr) _veEncryptionPtr->Release();
+    if (_veVideoSyncPtr) _veVideoSyncPtr->Release();
+    if (_veVolumeControlPtr) _veVolumeControlPtr->Release();
+
+    if (_veBasePtr) _veBasePtr->Terminate();
+    if (_veBasePtr) _veBasePtr->Release();
+
+    if (_veCodecPtr) _veCodecPtr->Release();
+    if (_veNetworkPtr) _veNetworkPtr->Release();
+    if (_veFilePtr) _veFilePtr->Release();
+    if (_veHardwarePtr) _veHardwarePtr->Release();
+    if (_veApmPtr) _veApmPtr->Release();
+    if (_veRtpRtcpPtr) _veRtpRtcpPtr->Release();
+    if (_vePtr)
+    {
+        bool ret = VoiceEngine::Delete(_vePtr);
+        ASSERT(ret == true);
+    }
+    VoiceEngine::SetTraceFilter(kTraceNone);
+}
+
+void CWinTestDlg::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+    DDX_CBString(pDX, IDC_COMBO_IP_1, _strComboIp1);
+    DDX_CBString(pDX, IDC_COMBO_IP_2, _strComboIp2);
+}
+
+BEGIN_MESSAGE_MAP(CWinTestDlg, CDialog)
+    ON_WM_SYSCOMMAND()
+    ON_WM_PAINT()
+    ON_WM_QUERYDRAGICON()
+    ON_WM_TIMER()
+    //}}AFX_MSG_MAP
+    ON_BN_CLICKED(IDC_BUTTON_CREATE_1, &CWinTestDlg::OnBnClickedButtonCreate1)
+    ON_BN_CLICKED(IDC_BUTTON_DELETE_1, &CWinTestDlg::OnBnClickedButtonDelete1)
+    ON_BN_CLICKED(IDC_BUTTON_CREATE_2, &CWinTestDlg::OnBnClickedButtonCreate2)
+    ON_BN_CLICKED(IDC_BUTTON_DELETE_2, &CWinTestDlg::OnBnClickedButtonDelete2)
+    ON_CBN_SELCHANGE(IDC_COMBO_CODEC_1, &CWinTestDlg::OnCbnSelchangeComboCodec1)
+    ON_BN_CLICKED(IDC_BUTTON_START_LISTEN_1, &CWinTestDlg::OnBnClickedButtonStartListen1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_LISTEN_1, &CWinTestDlg::OnBnClickedButtonStopListen1)
+    ON_BN_CLICKED(IDC_BUTTON_START_PLAYOUT_1, &CWinTestDlg::OnBnClickedButtonStartPlayout1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_PLAYOUT_1, &CWinTestDlg::OnBnClickedButtonStopPlayout1)
+    ON_BN_CLICKED(IDC_BUTTON_START_SEND_1, &CWinTestDlg::OnBnClickedButtonStartSend1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_SEND_1, &CWinTestDlg::OnBnClickedButtonStopSend1)
+    ON_CBN_SELCHANGE(IDC_COMBO_IP_2, &CWinTestDlg::OnCbnSelchangeComboIp2)
+    ON_CBN_SELCHANGE(IDC_COMBO_IP_1, &CWinTestDlg::OnCbnSelchangeComboIp1)
+    ON_CBN_SELCHANGE(IDC_COMBO_CODEC_2, &CWinTestDlg::OnCbnSelchangeComboCodec2)
+    ON_BN_CLICKED(IDC_BUTTON_START_LISTEN_2, &CWinTestDlg::OnBnClickedButtonStartListen2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_LISTEN_2, &CWinTestDlg::OnBnClickedButtonStopListen2)
+    ON_BN_CLICKED(IDC_BUTTON_START_PLAYOUT_2, &CWinTestDlg::OnBnClickedButtonStartPlayout2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_PLAYOUT_2, &CWinTestDlg::OnBnClickedButtonStopPlayout2)
+    ON_BN_CLICKED(IDC_BUTTON_START_SEND_2, &CWinTestDlg::OnBnClickedButtonStartSend2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_SEND_2, &CWinTestDlg::OnBnClickedButtonStopSend2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_TRANS_1, &CWinTestDlg::OnBnClickedCheckExtTrans1)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN_1, &CWinTestDlg::OnBnClickedCheckPlayFileIn1)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_OUT_1, &CWinTestDlg::OnBnClickedCheckPlayFileOut1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_TRANS_2, &CWinTestDlg::OnBnClickedCheckExtTrans2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN_2, &CWinTestDlg::OnBnClickedCheckPlayFileIn2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_OUT_2, &CWinTestDlg::OnBnClickedCheckPlayFileOut2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN, &CWinTestDlg::OnBnClickedCheckPlayFileIn)
+    ON_CBN_SELCHANGE(IDC_COMBO_REC_DEVICE, &CWinTestDlg::OnCbnSelchangeComboRecDevice)
+    ON_CBN_SELCHANGE(IDC_COMBO_PLAY_DEVICE, &CWinTestDlg::OnCbnSelchangeComboPlayDevice)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN_1, &CWinTestDlg::OnBnClickedCheckExtMediaIn1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT_1, &CWinTestDlg::OnBnClickedCheckExtMediaOut1)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_INPUT_VOLUME, &CWinTestDlg::OnNMReleasedcaptureSliderInputVolume)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_OUTPUT_VOLUME, &CWinTestDlg::OnNMReleasedcaptureSliderOutputVolume)
+    ON_BN_CLICKED(IDC_CHECK_AGC, &CWinTestDlg::OnBnClickedCheckAgc)
+    ON_BN_CLICKED(IDC_CHECK_NS, &CWinTestDlg::OnBnClickedCheckNs)
+    ON_BN_CLICKED(IDC_CHECK_EC, &CWinTestDlg::OnBnClickedCheckEc)
+    ON_BN_CLICKED(IDC_CHECK_VAD_1, &CWinTestDlg::OnBnClickedCheckVad1)
+    ON_BN_CLICKED(IDC_CHECK_VAD_3, &CWinTestDlg::OnBnClickedCheckVad2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN_2, &CWinTestDlg::OnBnClickedCheckExtMediaIn2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT_2, &CWinTestDlg::OnBnClickedCheckExtMediaOut2)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN, &CWinTestDlg::OnBnClickedCheckMuteIn)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN_1, &CWinTestDlg::OnBnClickedCheckMuteIn1)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN_2, &CWinTestDlg::OnBnClickedCheckMuteIn2)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_TX_1, &CWinTestDlg::OnBnClickedCheckSrtpTx1)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_RX_1, &CWinTestDlg::OnBnClickedCheckSrtpRx1)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_TX_2, &CWinTestDlg::OnBnClickedCheckSrtpTx2)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_RX_2, &CWinTestDlg::OnBnClickedCheckSrtpRx2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_ENCRYPTION_1, &CWinTestDlg::OnBnClickedCheckExtEncryption1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_ENCRYPTION_2, &CWinTestDlg::OnBnClickedCheckExtEncryption2)
+    ON_BN_CLICKED(IDC_BUTTON_DTMF_1, &CWinTestDlg::OnBnClickedButtonDtmf1)
+    ON_BN_CLICKED(IDC_CHECK_REC_MIC, &CWinTestDlg::OnBnClickedCheckRecMic)
+    ON_BN_CLICKED(IDC_BUTTON_DTMF_2, &CWinTestDlg::OnBnClickedButtonDtmf2)
+    ON_BN_CLICKED(IDC_BUTTON_TEST_1, &CWinTestDlg::OnBnClickedButtonTest1)
+    ON_BN_CLICKED(IDC_CHECK_CONFERENCE_1, &CWinTestDlg::OnBnClickedCheckConference1)
+    ON_BN_CLICKED(IDC_CHECK_CONFERENCE_2, &CWinTestDlg::OnBnClickedCheckConference2)
+    ON_BN_CLICKED(IDC_CHECK_ON_HOLD_1, &CWinTestDlg::OnBnClickedCheckOnHold1)
+    ON_BN_CLICKED(IDC_CHECK_ON_HOLD_2, &CWinTestDlg::OnBnClickedCheckOnHold2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN, &CWinTestDlg::OnBnClickedCheckExtMediaIn)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT, &CWinTestDlg::OnBnClickedCheckExtMediaOut)
+    ON_LBN_SELCHANGE(IDC_LIST_CODEC_1, &CWinTestDlg::OnLbnSelchangeListCodec1)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_PAN_LEFT, &CWinTestDlg::OnNMReleasedcaptureSliderPanLeft)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_PAN_RIGHT, &CWinTestDlg::OnNMReleasedcaptureSliderPanRight)
+    ON_BN_CLICKED(IDC_BUTTON_VERSION, &CWinTestDlg::OnBnClickedButtonVersion)
+    ON_BN_CLICKED(IDC_CHECK_DELAY_ESTIMATE_1, &CWinTestDlg::OnBnClickedCheckDelayEstimate1)
+    ON_BN_CLICKED(IDC_CHECK_RXVAD, &CWinTestDlg::OnBnClickedCheckRxvad)
+    ON_BN_CLICKED(IDC_CHECK_AGC_1, &CWinTestDlg::OnBnClickedCheckAgc1)
+    ON_BN_CLICKED(IDC_CHECK_NS_1, &CWinTestDlg::OnBnClickedCheckNs1)
+    ON_BN_CLICKED(IDC_CHECK_REC_CALL, &CWinTestDlg::OnBnClickedCheckRecCall)
+    ON_BN_CLICKED(IDC_CHECK_TYPING_DETECTION, &CWinTestDlg::OnBnClickedCheckTypingDetection)
+    ON_BN_CLICKED(IDC_CHECK_FEC, &CWinTestDlg::OnBnClickedCheckFEC)
+    ON_BN_CLICKED(IDC_BUTTON_CLEAR_ERROR_CALLBACK, &CWinTestDlg::OnBnClickedButtonClearErrorCallback)
+END_MESSAGE_MAP()
+
+BOOL CWinTestDlg::UpdateTest(bool failed, const CString& strMsg)
+{
+    if (failed)
+    {
+        SetDlgItemText(IDC_EDIT_MESSAGE, strMsg);
+        _strErr.Format(_T("FAILED (error=%d)"), _veBasePtr->LastError());
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr);
+        _failCount++;
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount);
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError());
+    }
+    else
+    {
+        SetDlgItemText(IDC_EDIT_MESSAGE, strMsg);
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK"));
+    }
+    return TRUE;
+}
+
+
+// CWinTestDlg message handlers
+
+BOOL CWinTestDlg::OnInitDialog()
+{
+    CDialog::OnInitDialog();
+
+    // Add "About..." menu item to system menu.
+
+    // IDM_ABOUTBOX must be in the system command range.
+    ASSERT((IDM_ABOUTBOX & 0xFFF0) == IDM_ABOUTBOX);
+    ASSERT(IDM_ABOUTBOX < 0xF000);
+
+    CMenu* pSysMenu = GetSystemMenu(FALSE);
+    if (pSysMenu != NULL)
+    {
+        CString strAboutMenu;
+        strAboutMenu.LoadString(IDS_ABOUTBOX);
+        if (!strAboutMenu.IsEmpty())
+        {
+            pSysMenu->AppendMenu(MF_SEPARATOR);
+            pSysMenu->AppendMenu(MF_STRING, IDM_ABOUTBOX, strAboutMenu);
+        }
+    }
+
+    // Set the icon for this dialog.  The framework does this automatically
+    //  when the application's main window is not a dialog
+    SetIcon(m_hIcon, TRUE);            // Set big icon
+    SetIcon(m_hIcon, FALSE);        // Set small icon
+
+    // char version[1024];
+    // _veBasePtr->GetVersion(version);
+    // AfxMessageBox(version, MB_OK);
+
+    if (_veBasePtr->Init() != 0)
+    {
+         AfxMessageBox(_T("Init() failed "), MB_OKCANCEL);
+    }
+
+    int ch = _veBasePtr->CreateChannel();
+    if (_veBasePtr->SetSendDestination(ch, 1234, "127.0.0.1") == -1)
+    {
+        if (_veBasePtr->LastError() == VE_EXTERNAL_TRANSPORT_ENABLED)
+        {
+            _strMsg.Format(_T("*** External transport build ***"));
+            SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg);
+            _externalTransportBuild = true;
+        }
+    }
+    _veBasePtr->DeleteChannel(ch);
+
+    // --- Add (preferred) local IPv4 address in title
+
+    if (_veNetworkPtr)
+    {
+        char localIP[64];
+        _veNetworkPtr->GetLocalIP(localIP);
+        CString str;
+        GetWindowText(str);
+        str.AppendFormat(_T("  [Local IPv4 address: %s]"), CharToTchar(localIP, 64));
+        SetWindowText(str);
+    }
+
+    // --- Volume sliders
+
+    if (_veVolumeControlPtr)
+    {
+        unsigned int volume(0);
+        CSliderCtrl* slider(NULL);
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+        slider->SetRangeMin(0);
+        slider->SetRangeMax(255);
+        _veVolumeControlPtr->GetMicVolume(volume);
+        slider->SetPos(volume);
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+        slider->SetRangeMin(0);
+        slider->SetRangeMax(255);
+        _veVolumeControlPtr->GetSpeakerVolume(volume);
+        slider->SetPos(volume);
+    }
+
+    // --- Panning sliders
+
+    if (_veVolumeControlPtr)
+    {
+        float lVol(0.0);
+        float rVol(0.0);
+        int leftVol, rightVol;
+        unsigned int volumePan(0);
+        CSliderCtrl* slider(NULL);
+
+        _veVolumeControlPtr->GetOutputVolumePan(-1, lVol, rVol);
+
+        leftVol = (int)(lVol*10.0f);    // [0,10]
+        rightVol = (int)(rVol*10.0f);    // [0,10]
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_LEFT);
+        slider->SetRange(0,10);
+        slider->SetPos(10-leftVol);        // pos 0 <=> max pan 1.0 (top of slider)
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_RIGHT);
+        slider->SetRange(0,10);
+        slider->SetPos(10-rightVol);
+    }
+
+    // --- APM settings
+
+    bool enable(false);
+    CButton* button(NULL);
+
+    AgcModes agcMode(kAgcDefault);
+    if (_veApmPtr->GetAgcStatus(enable, agcMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_AGC);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // AGC is not supported
+        GetDlgItem(IDC_CHECK_AGC)->EnableWindow(FALSE);
+    }
+
+    NsModes nsMode(kNsDefault);
+    if (_veApmPtr->GetNsStatus(enable, nsMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_NS);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // NS is not supported
+        GetDlgItem(IDC_CHECK_NS)->EnableWindow(FALSE);
+    }
+
+    EcModes ecMode(kEcDefault);
+    if (_veApmPtr->GetEcStatus(enable, ecMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_EC);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // EC is not supported
+        GetDlgItem(IDC_CHECK_EC)->EnableWindow(FALSE);
+    }
+
+    // --- First channel section
+
+    GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_CONFERENCE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_FEC)->EnableWindow(FALSE);
+
+    CComboBox* comboIP(NULL);
+    comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_1);
+    comboIP->AddString(_T("127.0.0.1"));
+    comboIP->SetCurSel(0);
+
+    SetDlgItemInt(IDC_EDIT_TX_PORT_1, 1111);
+    SetDlgItemInt(IDC_EDIT_RX_PORT_1, 1111);
+
+    // --- Add supported codecs to the codec combo box
+
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_1);
+    comboCodec->ResetContent();
+
+    int numCodecs = _veCodecPtr->NumOfCodecs();
+    for (int idx = 0; idx < numCodecs; idx++)
+    {
+        CodecInst codec;
+        _veCodecPtr->GetCodec(idx, codec);
+        if ((_stricmp(codec.plname, "CNNB") != 0) &&
+            (_stricmp(codec.plname, "CNWB") != 0))
+        {
+            CString strCodec;
+            if (_stricmp(codec.plname, "G7221") == 0)
+                strCodec.Format(_T("%s (%d/%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000, codec.rate/1000);
+            else
+                strCodec.Format(_T("%s (%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000);
+            comboCodec->AddString(strCodec);
+        }
+        if (idx == 0)
+        {
+            SetDlgItemInt(IDC_EDIT_CODEC_1, codec.pltype);
+        }
+    }
+    comboCodec->SetCurSel(0);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    list->AddString(_T("pltype"));
+    list->AddString(_T("plfreq"));
+    list->AddString(_T("pacsize"));
+    list->AddString(_T("channels"));
+    list->AddString(_T("rate"));
+    list->SetCurSel(0);
+
+    // --- Add available audio devices to the combo boxes
+
+    CComboBox* comboRecDevice(NULL);
+    CComboBox* comboPlayDevice(NULL);
+    comboRecDevice = (CComboBox*)GetDlgItem(IDC_COMBO_REC_DEVICE);
+    comboPlayDevice = (CComboBox*)GetDlgItem(IDC_COMBO_PLAY_DEVICE);
+    comboRecDevice->ResetContent();
+    comboPlayDevice->ResetContent();
+
+    if (_veHardwarePtr)
+    {
+        int numPlayout(0);
+        int numRecording(0);
+        char nameStr[128];
+        char guidStr[128];
+        CString strDevice;
+        AudioLayers audioLayer;
+
+        _veHardwarePtr->GetAudioDeviceLayer(audioLayer);
+        if (kAudioWindowsWave == audioLayer)
+        {
+            strDevice.FormatMessage(_T("Audio Layer: Windows Wave API"));
+        }
+        else if (kAudioWindowsCore == audioLayer)
+        {
+            strDevice.FormatMessage(_T("Audio Layer: Windows Core API"));
+        }
+        else
+        {
+            strDevice.FormatMessage(_T("Audio Layer: ** UNKNOWN **"));
+        }
+        SetDlgItemText(IDC_EDIT_AUDIO_LAYER, (LPCTSTR)strDevice);
+
+        _veHardwarePtr->GetNumOfRecordingDevices(numRecording);
+
+        for (int idx = 0; idx < numRecording; idx++)
+        {
+            _veHardwarePtr->GetRecordingDeviceName(idx, nameStr, guidStr);
+      strDevice.Format(_T("%s"), CharToTchar(nameStr, 128));
+            comboRecDevice->AddString(strDevice);
+        }
+        // Select default (communication) device in the combo box
+        _veHardwarePtr->GetRecordingDeviceName(-1, nameStr, guidStr);
+    CString tmp = CString(nameStr);
+        int nIndex = comboRecDevice->SelectString(-1, tmp);
+        ASSERT(nIndex != CB_ERR);
+
+        _veHardwarePtr->GetNumOfPlayoutDevices(numPlayout);
+
+        for (int idx = 0; idx < numPlayout; idx++)
+        {
+            _veHardwarePtr->GetPlayoutDeviceName(idx, nameStr, guidStr);
+      strDevice.Format(_T("%s"), CharToTchar(nameStr, 128));
+            comboPlayDevice->AddString(strDevice);
+        }
+        // Select default (communication) device in the combo box
+        _veHardwarePtr->GetPlayoutDeviceName(-1, nameStr, guidStr);
+        nIndex = comboPlayDevice->SelectString(-1, CString(nameStr));
+        ASSERT(nIndex != CB_ERR);
+    }
+
+    // --- Second channel section
+
+    GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(FALSE);
+
+    comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_2);
+    comboIP->AddString(_T("127.0.0.1"));
+    comboIP->SetCurSel(0);
+
+    SetDlgItemInt(IDC_EDIT_TX_PORT_2, 2222);
+    SetDlgItemInt(IDC_EDIT_RX_PORT_2, 2222);
+
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_2);
+    comboCodec->ResetContent();
+
+    if (_veCodecPtr)
+    {
+        numCodecs = _veCodecPtr->NumOfCodecs();
+        for (int idx = 0; idx < numCodecs; idx++)
+        {
+            CodecInst codec;
+            _veCodecPtr->GetCodec(idx, codec);
+            CString strCodec;
+            strCodec.Format(_T("%s (%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000);
+            comboCodec->AddString(strCodec);
+        }
+        comboCodec->SetCurSel(0);
+    }
+
+    // --- Start windows timer
+
+    SetTimer(0, 1000, NULL);
+
+    return TRUE;  // return TRUE  unless you set the focus to a control
+}
+
+void CWinTestDlg::OnSysCommand(UINT nID, LPARAM lParam)
+{
+    if ((nID & 0xFFF0) == IDM_ABOUTBOX)
+    {
+        CAboutDlg dlgAbout;
+        dlgAbout.DoModal();
+    }
+    else if (nID == SC_CLOSE)
+    {
+        BOOL ret;
+        int channel(0);
+        channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+        if (ret == TRUE)
+        {
+            _veBasePtr->DeleteChannel(channel);
+        }
+        channel = GetDlgItemInt(IDC_EDIT_2, &ret);
+        if (ret == TRUE)
+        {
+            _veBasePtr->DeleteChannel(channel);
+        }
+
+        CDialog::OnSysCommand(nID, lParam);
+    }
+    else
+    {
+        CDialog::OnSysCommand(nID, lParam);
+    }
+
+}
+
+// If you add a minimize button to your dialog, you will need the code below
+//  to draw the icon.  For MFC applications using the document/view model,
+//  this is automatically done for you by the framework.
+
+void CWinTestDlg::OnPaint()
+{
+    if (IsIconic())
+    {
+        CPaintDC dc(this); // device context for painting
+
+        SendMessage(WM_ICONERASEBKGND, reinterpret_cast<WPARAM>(dc.GetSafeHdc()), 0);
+
+        // Center icon in client rectangle
+        int cxIcon = GetSystemMetrics(SM_CXICON);
+        int cyIcon = GetSystemMetrics(SM_CYICON);
+        CRect rect;
+        GetClientRect(&rect);
+        int x = (rect.Width() - cxIcon + 1) / 2;
+        int y = (rect.Height() - cyIcon + 1) / 2;
+
+        // Draw the icon
+        dc.DrawIcon(x, y, m_hIcon);
+    }
+    else
+    {
+        CDialog::OnPaint();
+    }
+}
+
+// The system calls this function to obtain the cursor to display while the user drags
+//  the minimized window.
+HCURSOR CWinTestDlg::OnQueryDragIcon()
+{
+    return static_cast<HCURSOR>(m_hIcon);
+}
+
+
+void CWinTestDlg::OnBnClickedButtonCreate1()
+{
+    int channel(0);
+    TEST((channel = _veBasePtr->CreateChannel()) >= 0, _T("CreateChannel(channel=%d)"), channel);
+    if (channel >= 0)
+    {
+        _veRtpRtcpPtr->RegisterRTPObserver(channel, *this);
+
+        SetDlgItemInt(IDC_EDIT_1, channel);
+        GetDlgItem(IDC_BUTTON_CREATE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_FEC)->EnableWindow(TRUE);
+
+        bool enabled(false);
+        bool includeCSRCs(false);
+
+        // Always set send codec to default codec <=> index 0.
+        CodecInst codec;
+        _veCodecPtr->GetCodec(0, codec);
+        _veCodecPtr->SetSendCodec(channel, codec);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonCreate2()
+{
+    int channel(0);
+    TEST((channel = _veBasePtr->CreateChannel()) >=0 , _T("CreateChannel(%d)"), channel);
+    if (channel >= 0)
+    {
+        _veRtpRtcpPtr->RegisterRTPObserver(channel, *this);
+
+        SetDlgItemInt(IDC_EDIT_2, channel);
+        GetDlgItem(IDC_BUTTON_CREATE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(TRUE);
+
+        bool enabled(false);
+        bool includeCSRCs(false);
+
+
+        // Always set send codec to default codec <=> index 0.
+        CodecInst codec;
+        _veCodecPtr->GetCodec(0, codec);
+        _veCodecPtr->SetSendCodec(channel, codec);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDelete1()
+{
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+    if (ret == TRUE)
+    {
+        _delayEstimate1 = false;
+        _rxVad = false;
+        _veRtpRtcpPtr->DeRegisterRTPObserver(channel);
+        TEST(_veBasePtr->DeleteChannel(channel) == 0, _T("DeleteChannel(channel=%d)"), channel);
+        SetDlgItemText(IDC_EDIT_1, _T(""));
+        GetDlgItem(IDC_BUTTON_CREATE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_FEC)->EnableWindow(FALSE);
+        SetDlgItemText(IDC_EDIT_RXVAD, _T(""));
+        GetDlgItem(IDC_EDIT_RXVAD)->EnableWindow(FALSE);
+        CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_VAD_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_CONFERENCE_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_AGC_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_NS_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_RXVAD);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_FEC);
+        button->SetCheck(BST_UNCHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDelete2()
+{
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_2, &ret);
+    if (ret == TRUE)
+    {
+        _delayEstimate2 = false;
+        _veRtpRtcpPtr->DeRegisterRTPObserver(channel);
+        TEST(_veBasePtr->DeleteChannel(channel) == 0, _T("DeleteChannel(%d)"), channel);
+        SetDlgItemText(IDC_EDIT_2, _T(""));
+        GetDlgItem(IDC_BUTTON_CREATE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(FALSE);
+        CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_VAD_3);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_CONFERENCE_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_2);
+        button->SetCheck(BST_UNCHECKED);
+    }
+}
+
+void CWinTestDlg::OnCbnSelchangeComboIp1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CString str;
+    int port = GetDlgItemInt(IDC_EDIT_TX_PORT_1);
+    CComboBox* comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_1);
+    int n = comboIP->GetLBTextLen(0);
+    comboIP->GetLBText(0, str.GetBuffer(n));
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(str.GetBuffer(n), -1)) == 0,
+        _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, str.GetBuffer(n));
+    str.ReleaseBuffer();
+}
+
+void CWinTestDlg::OnCbnSelchangeComboIp2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CString str;
+    int port = GetDlgItemInt(IDC_EDIT_TX_PORT_2);
+    CComboBox* comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_2);
+    int n = comboIP->GetLBTextLen(0);
+    comboIP->GetLBText(0, str.GetBuffer(n));
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(str.GetBuffer(n), -1)) == 0,
+        _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, str.GetBuffer(n));
+    str.ReleaseBuffer();
+}
+
+void CWinTestDlg::OnCbnSelchangeComboCodec1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+
+    CodecInst codec;
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_1);
+    int index = comboCodec->GetCurSel();
+    _veCodecPtr->GetCodec(index, codec);
+    if (strncmp(codec.plname, "ISAC", 4) == 0)
+    {
+        // Set iSAC to adaptive mode by default.
+        codec.rate = -1;
+    }
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    list->SetCurSel(0);
+    SetDlgItemInt(IDC_EDIT_CODEC_1, codec.pltype);
+}
+
+void CWinTestDlg::OnLbnSelchangeListCodec1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    int listIdx = list->GetCurSel();
+    if (listIdx < 0)
+        return;
+    CString str;
+    list->GetText(listIdx, str);
+
+    CodecInst codec;
+    _veCodecPtr->GetSendCodec(channel, codec);
+
+    int value = GetDlgItemInt(IDC_EDIT_CODEC_1);
+    if (str == _T("pltype"))
+    {
+        codec.pltype = value;
+    }
+    else if (str == _T("plfreq"))
+    {
+        codec.plfreq = value;
+    }
+    else if (str == _T("pacsize"))
+    {
+        codec.pacsize = value;
+    }
+    else if (str == _T("channels"))
+    {
+        codec.channels = value;
+    }
+    else if (str == _T("rate"))
+    {
+        codec.rate = value;
+    }
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+}
+
+void CWinTestDlg::OnCbnSelchangeComboCodec2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+
+    CodecInst codec;
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_2);
+    int index = comboCodec->GetCurSel();
+    _veCodecPtr->GetCodec(index, codec);
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+}
+
+void CWinTestDlg::OnBnClickedButtonStartListen1()
+{
+    int ret1(0);
+    int ret2(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    int port = GetDlgItemInt(IDC_EDIT_RX_PORT_1);
+    TEST((ret1 = _veBasePtr->SetLocalReceiver(channel, port)) == 0, _T("SetLocalReceiver(channel=%d, port=%d)"), channel, port);
+    TEST((ret2 = _veBasePtr->StartReceive(channel)) == 0, _T("StartReceive(channel=%d)"), channel);
+    if (ret1 == 0 && ret2 == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartListen2()
+{
+    int ret1(0);
+    int ret2(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    int port = GetDlgItemInt(IDC_EDIT_RX_PORT_2);
+    TEST((ret1 = _veBasePtr->SetLocalReceiver(channel, port)) == 0, _T("SetLocalReceiver(channel=%d, port=%d)"), channel, port);
+    TEST((ret2 = _veBasePtr->StartReceive(channel)) == 0, _T("StartReceive(channel=%d)"), channel);
+    if (ret1 == 0 && ret2 == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopListen1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopReceive(channel)) == 0, _T("StopListen(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopListen2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopReceive(channel)) == 0, _T("StopListen(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartPlayout1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StartPlayout(channel)) == 0, _T("StartPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartPlayout2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StartPlayout(channel)) == 0, _T("StartPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopPlayout1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopPlayout(channel)) == 0, _T("StopPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopPlayout2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopPlayout(channel)) == 0, _T("StopPlayout(channel=%d)"));
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartSend1()
+{
+    UpdateData(TRUE);  // update IP address
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    if (!_externalTransport)
+    {
+        CString str;
+        int port = GetDlgItemInt(IDC_EDIT_TX_PORT_1);
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(_strComboIp1.GetBuffer(7), -1)) == 0,
+      _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, _strComboIp1.GetBuffer(7));
+        str.ReleaseBuffer();
+    }
+
+	//_veVideoSyncPtr->SetInitTimestamp(0,0);
+    // OnCbnSelchangeComboCodec1();
+
+    TEST((ret = _veBasePtr->StartSend(channel)) == 0, _T("StartSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartSend2()
+{
+    UpdateData(TRUE);  // update IP address
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    if (!_externalTransport)
+    {
+        CString str;
+        int port = GetDlgItemInt(IDC_EDIT_TX_PORT_2);
+        TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(_strComboIp2.GetBuffer(7), -1)) == 0,
+            _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, _strComboIp2.GetBuffer(7));
+        str.ReleaseBuffer();
+    }
+
+    // OnCbnSelchangeComboCodec2();
+
+    TEST((ret = _veBasePtr->StartSend(channel)) == 0, _T("StartSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopSend1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopSend(channel)) == 0, _T("StopSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopSend2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopSend(channel)) == 0, _T("StopSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtTrans1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veNetworkPtr->RegisterExternalTransport(channel, *_transportPtr)) == 0,
+            _T("RegisterExternalTransport(channel=%d, transport=0x%x)"), channel, _transportPtr);
+    }
+    else
+    {
+        TEST((ret = _veNetworkPtr->DeRegisterExternalTransport(channel)) == 0,
+            _T("DeRegisterExternalTransport(channel=%d)"), channel);
+    }
+    if (ret == 0)
+    {
+        _externalTransport = enable;
+    }
+    else
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtTrans2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veNetworkPtr->RegisterExternalTransport(channel, *_transportPtr)) == 0,
+            _T("RegisterExternalTransport(channel=%d, transport=0x%x)"), channel, _transportPtr);
+    }
+    else
+    {
+        TEST((ret = _veNetworkPtr->DeRegisterExternalTransport(channel)) == 0,
+            _T("DeRegisterExternalTransport(channel=%d)"), channel);
+    }
+    if (ret == 0)
+    {
+        _externalTransport = enable;
+    }
+    else
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn1()
+{
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char micFile[] = "../test/data/voice_engine/audio_short16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn1 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileAsMicrophone(channel, micFile, loop, mix, format, scale) == 0),
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, loop=%d, mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile, -1), loop, mix, format, scale);
+        _checkPlayFileIn1++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileAsMicrophone(channel) == 0),
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn2()
+{
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char micFile[] = "../test/data/voice_engine/audio_long16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn2 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileAsMicrophone(channel, micFile, loop, mix, format, scale) == 0),
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, loop=%d, mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile, -1), loop, mix, format, scale);
+        _checkPlayFileIn2++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileAsMicrophone(channel) == 0),
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileOut1()
+{
+    const FileFormats formats[8]  = {{kFileFormatPcm16kHzFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile}};
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char spkrFiles[8][64] = {{"../../test/data/voice_engine/audio_short16.pcm"},
+                                   {"../../test/data/voice_engine/audio_tiny8.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny11.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny16.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny22.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny32.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny44.wav"},
+                                   {"../../test/data/voice_engine/audio_tiny48.wav"}};
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        const bool loop(true);
+        const float volumeScaling(1.0);
+        const int startPointMs(0);
+        const int stopPointMs(0);
+        const FileFormats format = formats[_checkPlayFileOut1 % 8];
+        const char* spkrFile = spkrFiles[_checkPlayFileOut1 % 8];
+
+        CString str;
+        if (_checkPlayFileOut1 % 8 == 0)
+        {
+            str = _T("kFileFormatPcm16kHzFile");
+        }
+        else
+        {
+            str = _T("kFileFormatWavFile");
+        }
+        // (_checkPlayFileOut1 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileLocally(channel, spkrFile, loop, format, volumeScaling, startPointMs,stopPointMs) == 0),
+            _T("StartPlayingFileLocally(channel=%d, file=%s, loop=%d, format=%s, scale=%2.1f, start=%d, stop=%d)"),
+            channel, CharToTchar(spkrFile, -1), loop, str, volumeScaling, startPointMs, stopPointMs);
+        _checkPlayFileOut1++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileLocally(channel) == 0),
+            _T("StopPlayingFileLocally(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileOut2()
+{
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char spkrFile[] = "../test/data/voice_engine/audio_long16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float volumeScaling(1.0);
+        const int startPointMs(0);
+        const int stopPointMs(0);
+
+        // (_checkPlayFileOut2 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileLocally(channel, spkrFile, loop, format, volumeScaling, startPointMs,stopPointMs) == 0),
+            _T("StartPlayingFileLocally(channel=%d, file=%s, loop=%d, format=%d, scale=%2.1f, start=%d, stop=%d)"),
+            channel, CharToTchar(spkrFile, -1), loop, format, volumeScaling, startPointMs, stopPointMs);
+        // _checkPlayFileIn2++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileLocally(channel) == 0),
+            _T("StopPlayingFileLocally(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckVad1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_VAD_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        VadModes mode(kVadConventional);
+        if (_checkVAD1 % 4 == 0)
+        {
+            mode = kVadConventional;
+            str = _T("kVadConventional");
+        }
+        else if (_checkVAD1 % 4 == 1)
+        {
+            mode = kVadAggressiveLow;
+            str = _T("kVadAggressiveLow");
+        }
+        else if (_checkVAD1 % 4 == 2)
+        {
+            mode = kVadAggressiveMid;
+            str = _T("kVadAggressiveMid");
+        }
+        else if (_checkVAD1 % 4 == 3)
+        {
+            mode = kVadAggressiveHigh;
+            str = _T("kVadAggressiveHigh");
+        }
+        const bool disableDTX(false);
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, true, mode, disableDTX) == 0),
+            _T("SetVADStatus(channel=%d, enable=%d, mode=%s, disableDTX=%d)"), channel, enable, str, disableDTX);
+        _checkVAD1++;
+    }
+    else
+    {
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, false)) == 0, _T("SetVADStatus(channel=%d, enable=%d)"), channel, false);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckVad2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_VAD_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        VadModes mode(kVadConventional);
+        if (_checkVAD2 % 4 == 0)
+        {
+            mode = kVadConventional;
+            str = _T("kVadConventional");
+        }
+        else if (_checkVAD2 % 4 == 1)
+        {
+            mode = kVadAggressiveLow;
+            str = _T("kVadAggressiveLow");
+        }
+        else if (_checkVAD2 % 4 == 2)
+        {
+            mode = kVadAggressiveMid;
+            str = _T("kVadAggressiveMid");
+        }
+        else if (_checkVAD2 % 4 == 3)
+        {
+            mode = kVadAggressiveHigh;
+            str = _T("kVadAggressiveHigh");
+        }
+        const bool disableDTX(false);
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, true, mode, disableDTX)) == 0,
+            _T("SetVADStatus(channel=%d, enable=%d, mode=%s, disableDTX=%d)"), channel, enable, str, disableDTX);
+        _checkVAD2++;
+    }
+    else
+    {
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, false) == 0), _T("SetVADStatus(channel=%d, enable=%d)"), channel, false);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_1);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_2);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpTx1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP = false;
+    if (enable)
+    {
+        (_checkSrtpTx1++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPSend(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPSend(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPSend(channel) == 0), _T("DisableSRTPSend(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpTx2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP = false;
+    if (enable)
+    {
+        (_checkSrtpTx2++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPSend(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPSend(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPSend(channel) == 0), _T("DisableSRTPSend(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpRx1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP(false);
+    if (enable)
+    {
+        (_checkSrtpRx1++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPReceive(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPReceive(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPReceive(channel) == 0), _T("DisableSRTPReceive(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpRx2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP(false);
+    if (enable)
+    {
+        (_checkSrtpRx2++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPReceive(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPReceive(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPReceive(channel)) == 0, _T("DisableSRTPReceive(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtEncryption1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veEncryptionPtr->RegisterExternalEncryption(channel, *_encryptionPtr)) == 0,
+            _T("RegisterExternalEncryption(channel=%d, encryption=0x%x)"), channel, _encryptionPtr);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DeRegisterExternalEncryption(channel)) == 0,
+            _T("DeRegisterExternalEncryption(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtEncryption2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veEncryptionPtr->RegisterExternalEncryption(channel, *_encryptionPtr)) == 0,
+            _T("RegisterExternalEncryption(channel=%d, encryption=0x%x)"), channel, _encryptionPtr);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DeRegisterExternalEncryption(channel)) == 0,
+            _T("DeRegisterExternalEncryption(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDtmf1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CTelephonyEvent dlgTelephoneEvent(_vePtr, channel, this);
+    dlgTelephoneEvent.DoModal();
+}
+
+void CWinTestDlg::OnBnClickedButtonDtmf2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CTelephonyEvent dlgTelephoneEvent(_vePtr, channel, this);
+    dlgTelephoneEvent.DoModal();
+}
+
+void CWinTestDlg::OnBnClickedCheckConference1()
+{
+    // Not supported yet
+}
+
+void CWinTestDlg::OnBnClickedCheckConference2()
+{
+   // Not supported yet
+}
+
+void CWinTestDlg::OnBnClickedCheckOnHold1()
+{
+    SHORT shiftKeyIsPressed = ::GetAsyncKeyState(VK_SHIFT);
+
+    CString str;
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_1);
+    int check = button->GetCheck();
+
+    if (shiftKeyIsPressed)
+    {
+        bool enabled(false);
+        OnHoldModes mode(kHoldSendAndPlay);
+        TEST(_veBasePtr->GetOnHoldStatus(channel, enabled, mode) == 0,
+            _T("GetOnHoldStatus(channel=%d, enabled=?, mode=?)"), channel);
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+
+        switch (mode)
+        {
+        case kHoldSendAndPlay:
+            str = _T("kHoldSendAndPlay");
+            break;
+        case kHoldSendOnly:
+            str = _T("kHoldSendOnly");
+            break;
+        case kHoldPlayOnly:
+            str = _T("kHoldPlayOnly");
+            break;
+        default:
+            break;
+        }
+        PRINT_GET_RESULT(_T("enabled=%d, mode=%s"), enabled, str);
+        return;
+    }
+
+    int ret(0);
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        OnHoldModes mode(kHoldSendAndPlay);
+        if (_checkOnHold1 % 3 == 0)
+        {
+            mode = kHoldSendAndPlay;
+            str = _T("kHoldSendAndPlay");
+        }
+        else if (_checkOnHold1 % 3 == 1)
+        {
+            mode = kHoldSendOnly;
+            str = _T("kHoldSendOnly");
+        }
+        else if (_checkOnHold1 % 3 == 2)
+        {
+            mode = kHoldPlayOnly;
+            str = _T("kHoldPlayOnly");
+        }
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable, mode)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d, mode=%s)"), channel, enable, str);
+        _checkOnHold1++;
+    }
+    else
+    {
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckOnHold2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        OnHoldModes mode(kHoldSendAndPlay);
+        if (_checkOnHold1 % 3 == 0)
+        {
+            mode = kHoldSendAndPlay;
+            str = _T("kHoldSendAndPlay");
+        }
+        else if (_checkOnHold1 % 3 == 1)
+        {
+            mode = kHoldSendOnly;
+            str = _T("kHoldSendOnly");
+        }
+        else if (_checkOnHold1 % 3 == 2)
+        {
+            mode = kHoldPlayOnly;
+            str = _T("kHoldPlayOnly");
+        }
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable, mode)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d, mode=%s)"), channel, enable, str);
+        _checkOnHold1++;
+    }
+    else
+    {
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckDelayEstimate1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        _delayEstimate1 = true;
+        SetDlgItemInt(IDC_EDIT_DELAY_ESTIMATE_1, 0);
+    }
+    else
+    {
+        _delayEstimate1 = false;
+        SetDlgItemText(IDC_EDIT_DELAY_ESTIMATE_1, _T(""));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRxvad()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_RXVAD);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        _rxVad = true;
+        _veApmPtr->RegisterRxVadObserver(channel, *_rxVadObserverPtr);
+        SetDlgItemInt(IDC_EDIT_RXVAD, 0);
+    }
+    else
+    {
+        _rxVad = false;
+        _veApmPtr->DeRegisterRxVadObserver(channel);
+        SetDlgItemText(IDC_EDIT_RXVAD, _T(""));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckAgc1()
+{
+    SHORT shiftKeyIsPressed = ::GetAsyncKeyState(VK_SHIFT);
+
+    CString str;
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_AGC_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (shiftKeyIsPressed)
+    {
+        bool enabled(false);
+        AgcModes mode(kAgcAdaptiveDigital);
+        TEST(_veApmPtr->GetRxAgcStatus(channel, enabled, mode) == 0,
+            _T("GetRxAgcStatus(channel=%d, enabled=?, mode=?)"), channel);
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+
+        switch (mode)
+        {
+        case kAgcAdaptiveAnalog:
+            str = _T("kAgcAdaptiveAnalog");
+            break;
+        case kAgcAdaptiveDigital:
+            str = _T("kAgcAdaptiveDigital");
+            break;
+        case kAgcFixedDigital:
+            str = _T("kAgcFixedDigital");
+            break;
+        default:
+            break;
+        }
+        PRINT_GET_RESULT(_T("enabled=%d, mode=%s"), enabled, str);
+        return;
+    }
+
+    if (enable)
+    {
+        CString str;
+        AgcModes mode(kAgcDefault);
+        if (_checkAGC1 % 3 == 0)
+        {
+            mode = kAgcDefault;
+            str = _T("kAgcDefault");
+        }
+        else if (_checkAGC1 % 3 == 1)
+        {
+            mode = kAgcAdaptiveDigital;
+            str = _T("kAgcAdaptiveDigital");
+        }
+        else if (_checkAGC1 % 3 == 2)
+        {
+            mode = kAgcFixedDigital;
+            str = _T("kAgcFixedDigital");
+        }
+        TEST(_veApmPtr->SetRxAgcStatus(channel, true, mode) == 0, _T("SetRxAgcStatus(channel=%d, enable=%d, %s)"), channel, enable, str);
+        _checkAGC1++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetRxAgcStatus(channel, false, kAgcUnchanged) == 0, _T("SetRxAgcStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckNs1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonNS = (CButton*)GetDlgItem(IDC_CHECK_NS_1);
+    int check = buttonNS->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        NsModes mode(kNsDefault);
+        if (_checkNS1 % 6 == 0)
+        {
+            mode = kNsDefault;
+            str = _T("kNsDefault");
+        }
+        else if (_checkNS1 % 6 == 1)
+        {
+            mode = kNsConference;
+            str = _T("kNsConference");
+        }
+        else if (_checkNS1 % 6 == 2)
+        {
+            mode = kNsLowSuppression;
+            str = _T("kNsLowSuppression");
+        }
+        else if (_checkNS1 % 6 == 3)
+        {
+            mode = kNsModerateSuppression;
+            str = _T("kNsModerateSuppression");
+        }
+        else if (_checkNS1 % 6 == 4)
+        {
+            mode = kNsHighSuppression;
+            str = _T("kNsHighSuppression");
+        }
+        else if (_checkNS1 % 6 == 5)
+        {
+            mode = kNsVeryHighSuppression;
+            str = _T("kNsVeryHighSuppression");
+        }
+        TEST(_veApmPtr->SetRxNsStatus(channel, true, mode) == 0, _T("SetRxNsStatus(channel=%d, enable=%d, %s)"), channel, enable, str);
+        _checkNS1++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetRxNsStatus(channel, false, kNsUnchanged) == 0, _T("SetRxNsStatus(channel=%d, enable=%d)"), enable, channel);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//                         Channel-independent Operations
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn()
+{
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char micFile[] = "../../test/data/voice_engine/audio_short16.pcm";
+    // const char micFile[] = "../../test/data/voice_engine/audio_long16noise.pcm";
+
+    int ret(0);
+    int channel(-1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn %2 == 0) ? mix = true : mix = false;
+        TEST(_veFilePtr->StartPlayingFileAsMicrophone(channel, micFile, loop, mix, format, scale) == 0,
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, loop=%d, mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile, -1), loop, mix, format, scale);
+        _checkPlayFileIn++;
+    }
+    else
+    {
+        TEST(_veFilePtr->StopPlayingFileAsMicrophone(channel) == 0,
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRecMic()
+{
+    const char micFile[] = "/tmp/rec_mic_mono_16kHz.pcm";
+
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_REC_MIC);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veFilePtr->StartRecordingMicrophone(micFile, NULL) == 0, _T("StartRecordingMicrophone(file=%s)"), CharToTchar(micFile, -1));
+    }
+    else
+    {
+        TEST(_veFilePtr->StopRecordingMicrophone() == 0, _T("StopRecordingMicrophone()"));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckAgc()
+{
+    CButton* buttonAGC = (CButton*)GetDlgItem(IDC_CHECK_AGC);
+    int check = buttonAGC->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        AgcModes mode(kAgcDefault);
+        if (_checkAGC % 4 == 0)
+        {
+            mode = kAgcDefault;
+            str = _T("kAgcDefault");
+        }
+        else if (_checkAGC % 4 == 1)
+        {
+            mode = kAgcAdaptiveAnalog;
+            str = _T("kAgcAdaptiveAnalog");
+        }
+        else if (_checkAGC % 4 == 2)
+        {
+            mode = kAgcAdaptiveDigital;
+            str = _T("kAgcAdaptiveDigital");
+        }
+        else if (_checkAGC % 4 == 3)
+        {
+            mode = kAgcFixedDigital;
+            str = _T("kAgcFixedDigital");
+        }
+        TEST(_veApmPtr->SetAgcStatus(true, mode) == 0, _T("SetAgcStatus(enable=%d, %s)"), enable, str);
+        _checkAGC++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetAgcStatus(false, kAgcUnchanged) == 0, _T("SetAgcStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckNs()
+{
+    CButton* buttonNS = (CButton*)GetDlgItem(IDC_CHECK_NS);
+    int check = buttonNS->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        NsModes mode(kNsDefault);
+        if (_checkNS % 6 == 0)
+        {
+            mode = kNsDefault;
+            str = _T("kNsDefault");
+        }
+        else if (_checkNS % 6 == 1)
+        {
+            mode = kNsConference;
+            str = _T("kNsConference");
+        }
+        else if (_checkNS % 6 == 2)
+        {
+            mode = kNsLowSuppression;
+            str = _T("kNsLowSuppression");
+        }
+        else if (_checkNS % 6 == 3)
+        {
+            mode = kNsModerateSuppression;
+            str = _T("kNsModerateSuppression");
+        }
+        else if (_checkNS % 6 == 4)
+        {
+            mode = kNsHighSuppression;
+            str = _T("kNsHighSuppression");
+        }
+        else if (_checkNS % 6 == 5)
+        {
+            mode = kNsVeryHighSuppression;
+            str = _T("kNsVeryHighSuppression");
+        }
+        TEST(_veApmPtr->SetNsStatus(true, mode) == 0, _T("SetNsStatus(enable=%d, %s)"), enable, str);
+        _checkNS++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetNsStatus(false, kNsUnchanged) == 0, _T("SetNsStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckEc()
+{
+    CButton* buttonEC = (CButton*)GetDlgItem(IDC_CHECK_EC);
+    int check = buttonEC->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        EcModes mode(kEcDefault);
+        if (_checkEC % 4 == 0)
+        {
+            mode = kEcDefault;
+            str = _T("kEcDefault");
+        }
+        else if (_checkEC % 4 == 1)
+        {
+            mode = kEcConference;
+            str = _T("kEcConference");
+        }
+        else if (_checkEC % 4 == 2)
+        {
+            mode = kEcAec;
+            str = _T("kEcAec");
+        }
+        else if (_checkEC % 4 == 3)
+        {
+            mode = kEcAecm;
+            str = _T("kEcAecm");
+        }
+        TEST(_veApmPtr->SetEcStatus(true, mode) == 0, _T("SetEcStatus(enable=%d, %s)"), enable, str);
+        _checkEC++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetEcStatus(false, kEcUnchanged) == 0, _T("SetEcStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn()
+{
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    const int channel(-1);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn()
+{
+    const int channel(-1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingAllChannelsMixed, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingAllChannelsMixed, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingAllChannelsMixed) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingAllChannelsMixed)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut()
+{
+    const int channel(-1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackAllChannelsMixed, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackAllChannelsMixed, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackAllChannelsMixed) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackAllChannelsMixed)"), channel);
+    }
+}
+
+void CWinTestDlg::OnCbnSelchangeComboRecDevice()
+{
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_REC_DEVICE);
+    int index = comboCodec->GetCurSel();
+    TEST(_veHardwarePtr->SetRecordingDevice(index) == 0,
+        _T("SetRecordingDevice(index=%d)"), index);
+}
+
+void CWinTestDlg::OnCbnSelchangeComboPlayDevice()
+{
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_PLAY_DEVICE);
+    int index = comboCodec->GetCurSel();
+    TEST(_veHardwarePtr->SetPlayoutDevice(index) == 0,
+        _T("SetPlayoutDevice(index=%d)"), index);
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderInputVolume(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+    slider->SetRangeMin(0);
+    slider->SetRangeMax(255);
+    int pos = slider->GetPos();
+
+    TEST(_veVolumeControlPtr->SetMicVolume(pos) == 0, _T("SetMicVolume(volume=%d)"), pos);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderOutputVolume(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+    slider->SetRangeMin(0);
+    slider->SetRangeMax(255);
+    int pos = slider->GetPos();
+
+    TEST(_veVolumeControlPtr->SetSpeakerVolume(pos) == 0, _T("SetSpeakerVolume(volume=%d)"), pos);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderPanLeft(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_LEFT);
+    slider->SetRange(0,10);
+    int pos = 10 - slider->GetPos();    // 0 <=> lower end, 10 <=> upper end
+
+    float left(0.0);
+    float right(0.0);
+    const int channel(-1);
+
+    // Only left channel will be modified
+    _veVolumeControlPtr->GetOutputVolumePan(channel, left, right);
+
+    left = (float)((float)pos/10.0f);
+
+    TEST(_veVolumeControlPtr->SetOutputVolumePan(channel, left, right) == 0,
+        _T("SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)"), channel, left, right);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderPanRight(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_RIGHT);
+    slider->SetRange(0,10);
+    int pos = 10 - slider->GetPos();    // 0 <=> lower end, 10 <=> upper end
+
+    float left(0.0);
+    float right(0.0);
+    const int channel(-1);
+
+    // Only right channel will be modified
+    _veVolumeControlPtr->GetOutputVolumePan(channel, left, right);
+
+    right = (float)((float)pos/10.0f);
+
+    TEST(_veVolumeControlPtr->SetOutputVolumePan(channel, left, right) == 0,
+        _T("SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)"), channel, left, right);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnBnClickedButtonVersion()
+{
+    if (_veBasePtr)
+    {
+        char version[1024];
+        if (_veBasePtr->GetVersion(version) == 0)
+        {
+            AfxMessageBox(CString(version), MB_OK);
+        }
+        else
+        {
+            AfxMessageBox(_T("FAILED!"), MB_OK);
+        }
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRecCall()
+{
+    // Not supported
+}
+
+void CWinTestDlg::OnBnClickedCheckTypingDetection()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_TYPING_DETECTION);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veApmPtr->SetTypingDetectionStatus(enable) == 0, _T("SetTypingDetectionStatus(enable=%d)"), enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckFEC()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_FEC);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veRtpRtcpPtr->SetFECStatus(channel, enable) == 0, _T("SetFECStatus(enable=%d)"), enable);
+}
+
+// ----------------------------------------------------------------------------
+//                                   Message Handlers
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnTimer(UINT_PTR nIDEvent)
+{
+    CString str;
+
+    unsigned int svol(0);
+    unsigned int mvol(0);
+
+    _timerTicks++;
+
+    // Get speaker and microphone volumes
+    _veVolumeControlPtr->GetSpeakerVolume(svol);
+    _veVolumeControlPtr->GetMicVolume(mvol);
+
+    // Update speaker volume slider
+    CSliderCtrl* sliderSpkr = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+    sliderSpkr->SetRangeMin(0);
+    sliderSpkr->SetRangeMax(255);
+    sliderSpkr->SetPos(svol);
+
+    // Update microphone volume slider
+    CSliderCtrl* sliderMic = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+    sliderMic->SetRangeMin(0);
+    sliderMic->SetRangeMax(255);
+    sliderMic->SetPos(mvol);
+
+    unsigned int micLevel;
+    unsigned int combinedOutputLevel;
+
+    // Get audio levels
+    _veVolumeControlPtr->GetSpeechInputLevel(micLevel);
+    _veVolumeControlPtr->GetSpeechOutputLevel(-1, combinedOutputLevel);
+
+    // Update audio level controls
+    CProgressCtrl* progressMic = (CProgressCtrl*)GetDlgItem(IDC_PROGRESS_AUDIO_LEVEL_IN);
+    progressMic->SetRange(0,9);
+    progressMic->SetStep(1);
+    progressMic->SetPos(micLevel);
+    CProgressCtrl* progressOut = (CProgressCtrl*)GetDlgItem(IDC_PROGRESS_AUDIO_LEVEL_OUT);
+    progressOut->SetRange(0,9);
+    progressOut->SetStep(1);
+    progressOut->SetPos(combinedOutputLevel);
+
+    // Update playout delay (buffer size)
+    if (_veVideoSyncPtr)
+    {
+        int bufferMs(0);
+        _veVideoSyncPtr->GetPlayoutBufferSize(bufferMs);
+        SetDlgItemInt(IDC_EDIT_PLAYOUT_BUFFER_SIZE, bufferMs);
+    }
+
+    if (_delayEstimate1 && _veVideoSyncPtr)
+    {
+        const int channel = GetDlgItemInt(IDC_EDIT_1);
+        int delayMs(0);
+        _veVideoSyncPtr->GetDelayEstimate(channel, delayMs);
+        SetDlgItemInt(IDC_EDIT_DELAY_ESTIMATE_1, delayMs);
+    }
+
+    if (_rxVad && _veApmPtr && _rxVadObserverPtr)
+    {
+        SetDlgItemInt(IDC_EDIT_RXVAD, _rxVadObserverPtr->_vadDecision);
+    }
+
+    if (_veHardwarePtr)
+    {
+        int load1, load2;
+        _veHardwarePtr->GetSystemCPULoad(load1);
+        _veHardwarePtr->GetCPULoad(load2);
+        str.Format(_T("CPU load (system/VoE): %d/%d [%%]"), load1, load2);
+        SetDlgItemText(IDC_EDIT_CPU_LOAD, (LPCTSTR)str);
+    }
+
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+
+    if (_veCodecPtr)
+    {
+        if (ret == TRUE)
+        {
+            CodecInst codec;
+            if (_veCodecPtr->GetRecCodec(channel, codec) == 0)
+            {
+        str.Format(_T("RX codec: %s, freq=%d, pt=%d, rate=%d, size=%d"), CharToTchar(codec.plname, 32), codec.plfreq, codec.pltype, codec.rate, codec.pacsize);
+                SetDlgItemText(IDC_EDIT_RX_CODEC_1, (LPCTSTR)str);
+            }
+        }
+    }
+
+    if (_veRtpRtcpPtr)
+    {
+        if (ret == TRUE)
+        {
+            CallStatistics stats;
+            if (_veRtpRtcpPtr->GetRTCPStatistics(channel, stats) == 0)
+            {
+                str.Format(_T("RTCP | RTP: cum=%u, ext=%d, frac=%u, jitter=%u | TX=%d, RX=%d, RTT=%d"),
+                    stats.cumulativeLost, stats.extendedMax, stats.fractionLost, stats.jitterSamples, stats.packetsSent, stats.packetsReceived, stats.rttMs);
+                SetDlgItemText(IDC_EDIT_RTCP_STAT_1, (LPCTSTR)str);
+            }
+        }
+    }
+
+    SetTimer(0, 1000, NULL);
+    CDialog::OnTimer(nIDEvent);
+}
+
+void CWinTestDlg::OnBnClickedButtonClearErrorCallback()
+{
+    _nErrorCallbacks = 0;
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, _T(""));
+}
+
+// ----------------------------------------------------------------------------
+//                                       TEST
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnBnClickedButtonTest1()
+{
+    // add tests here...
+}
+
diff --git a/trunk/src/voice_engine/main/test/win_test/WinTestDlg.h b/trunk/src/voice_engine/main/test/win_test/WinTestDlg.h
new file mode 100644
index 0000000..6452775
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/WinTestDlg.h
@@ -0,0 +1,278 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#if (_MSC_VER >= 1400)
+#define PRINT_GET_RESULT(...) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_GET_OUTPUT, _strMsg); \
+    } \
+
+#define TEST(x, ...) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        _strErr.Format(_T("FAILED (error=%d)"), _veBasePtr->LastError()); \
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr); \
+        _failCount++; \
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount); \
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError()); \
+    } \
+    else \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK")); \
+    } \
+
+#define TEST2(x, ...) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(true, _strMsg); \
+    } \
+    else \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(false, _strMsg); \
+    }
+#else
+#define TEST(x, exp) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(exp); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        _strErr.Format("FAILED (error=%d)", _veBasePtr->LastError()); \
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr); \
+        _failCount++; \
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount); \
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError()); \
+    } \
+    else \
+    { \
+        _strMsg.Format(exp); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK")); \
+    } \
+
+#define TEST2(x, exp) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(exp); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(true, _strMsg); \
+    } \
+    else \
+    { \
+        _strMsg.Format(exp); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(false, _strMsg); \
+    }
+#endif
+
+#include "voe_base.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_codec.h"
+#include "voe_dtmf.h"
+#include "voe_encryption.h"
+#include "voe_external_media.h"
+#include "voe_file.h"
+#include "voe_hardware.h"
+#include "voe_network.h"
+#include "voe_video_sync.h"
+#include "voe_volume_control.h"
+
+#include "voe_audio_processing.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_errors.h"
+
+class MediaProcessImpl;
+class ConnectionObserver;
+class MyEncryption;
+class RxCallback;
+class MyTransport;
+
+using namespace webrtc;
+
+#define MAX_NUM_OF_CHANNELS    10
+
+// CWinTestDlg dialog
+class CWinTestDlg : public CDialog,
+                    public VoiceEngineObserver,
+                    public VoERTPObserver
+{
+// Construction
+public:
+    CWinTestDlg(CWnd* pParent = NULL);    // standard constructor
+    virtual ~CWinTestDlg();
+
+// Dialog Data
+    enum { IDD = IDD_WINTEST_DIALOG };
+
+    BOOL UpdateTest(bool failed, const CString& strMsg);
+
+protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+protected:  // VoiceEngineObserver
+    virtual void CallbackOnError(const int channel, const int errCode);
+
+protected:    // VoERTPObserver
+    virtual void OnIncomingCSRCChanged(
+        const int channel, const unsigned int CSRC, const bool added);
+    virtual void OnIncomingSSRCChanged(
+        const int channel, const unsigned int SSRC);
+
+// Implementation
+protected:
+    HICON m_hIcon;
+
+    // Generated message map functions
+    virtual BOOL OnInitDialog();
+    afx_msg void OnSysCommand(UINT nID, LPARAM lParam);
+    afx_msg void OnPaint();
+    afx_msg HCURSOR OnQueryDragIcon();
+    DECLARE_MESSAGE_MAP()
+public:
+    afx_msg void OnBnClickedButtonCreate1();
+    afx_msg void OnBnClickedButtonDelete1();
+
+private:
+    VoiceEngine*    _vePtr;
+
+    VoECodec*               _veCodecPtr;
+    VoEExternalMedia*       _veExternalMediaPtr;
+    VoEVolumeControl*       _veVolumeControlPtr;
+    VoEEncryption*          _veEncryptionPtr;
+    VoEHardware*            _veHardwarePtr;
+    VoEVideoSync*           _veVideoSyncPtr;
+    VoENetwork*             _veNetworkPtr;
+    VoEFile*                _veFilePtr;
+    VoEAudioProcessing*     _veApmPtr;
+    VoEBase*                _veBasePtr;
+    VoERTP_RTCP*            _veRtpRtcpPtr;
+
+    MyTransport*            _transportPtr;
+    MediaProcessImpl*       _externalMediaPtr;
+    ConnectionObserver*     _connectionObserverPtr;
+    MyEncryption*           _encryptionPtr;
+    RxCallback*             _rxVadObserverPtr;
+
+private:
+    int                     _failCount;
+    CString                 _strMsg;
+    CString                 _strErr;
+    bool                    _externalTransport;
+    bool                    _externalTransportBuild;
+    int                     _checkPlayFileIn;
+    int                     _checkPlayFileIn1;
+    int                     _checkPlayFileIn2;
+    int                     _checkPlayFileOut1;
+    int                     _checkPlayFileOut2;
+    int                     _checkAGC;
+    int                     _checkAGC1;
+    int                     _checkNS;
+    int                     _checkNS1;
+    int                     _checkEC;
+    int                     _checkVAD1;
+    int                     _checkVAD2;
+    int                     _checkSrtpTx1;
+    int                     _checkSrtpTx2;
+    int                     _checkSrtpRx1;
+    int                     _checkSrtpRx2;
+    int                     _checkConference1;
+    int                     _checkConference2;
+    int                     _checkOnHold1;
+    int                     _checkOnHold2;
+    bool                    _delayEstimate1;
+    bool                    _delayEstimate2;
+    bool                    _rxVad;
+    int                     _nErrorCallbacks;
+    int                     _timerTicks;
+
+public:
+    afx_msg void OnBnClickedButtonCreate2();
+    afx_msg void OnBnClickedButtonDelete2();
+    afx_msg void OnCbnSelchangeComboCodec1();
+    afx_msg void OnBnClickedButtonStartListen1();
+    afx_msg void OnBnClickedButtonStopListen1();
+    afx_msg void OnBnClickedButtonStartPlayout1();
+    afx_msg void OnBnClickedButtonStopPlayout1();
+    afx_msg void OnBnClickedButtonStartSend1();
+    afx_msg void OnBnClickedButtonStopSend1();
+    afx_msg void OnCbnSelchangeComboIp2();
+    afx_msg void OnCbnSelchangeComboIp1();
+    afx_msg void OnCbnSelchangeComboCodec2();
+    afx_msg void OnBnClickedButtonStartListen2();
+    afx_msg void OnBnClickedButtonStopListen2();
+    afx_msg void OnBnClickedButtonStartPlayout2();
+    afx_msg void OnBnClickedButtonStopPlayout2();
+    afx_msg void OnBnClickedButtonStartSend2();
+    afx_msg void OnBnClickedButtonStopSend2();
+    afx_msg void OnBnClickedButtonTest11();
+    afx_msg void OnBnClickedCheckExtTrans1();
+    afx_msg void OnBnClickedCheckPlayFileIn1();
+    afx_msg void OnBnClickedCheckPlayFileOut1();
+    afx_msg void OnBnClickedCheckExtTrans2();
+    afx_msg void OnBnClickedCheckPlayFileIn2();
+    afx_msg void OnBnClickedCheckPlayFileOut2();
+    afx_msg void OnBnClickedCheckPlayFileIn();
+    afx_msg void OnBnClickedCheckPlayFileOut();
+    afx_msg void OnCbnSelchangeComboRecDevice();
+    afx_msg void OnCbnSelchangeComboPlayDevice();
+    afx_msg void OnBnClickedCheckExtMediaIn1();
+    afx_msg void OnBnClickedCheckExtMediaOut1();
+    afx_msg void OnNMReleasedcaptureSliderInputVolume(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnNMReleasedcaptureSliderOutputVolume(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnTimer(UINT_PTR nIDEvent);
+    afx_msg void OnBnClickedCheckAgc();
+    CString _strComboIp1;
+    CString _strComboIp2;
+    afx_msg void OnBnClickedCheckNs();
+    afx_msg void OnBnClickedCheckEc();
+    afx_msg void OnBnClickedCheckVad1();
+    afx_msg void OnBnClickedCheckVad2();
+    afx_msg void OnBnClickedCheckExtMediaIn2();
+    afx_msg void OnBnClickedCheckExtMediaOut2();
+    afx_msg void OnBnClickedCheckMuteIn();
+    afx_msg void OnBnClickedCheckMuteIn1();
+    afx_msg void OnBnClickedCheckMuteIn2();
+    afx_msg void OnBnClickedCheckSrtpTx1();
+    afx_msg void OnBnClickedCheckSrtpRx1();
+    afx_msg void OnBnClickedCheckSrtpTx2();
+    afx_msg void OnBnClickedCheckSrtpRx2();
+    afx_msg void OnBnClickedCheckExtEncryption1();
+    afx_msg void OnBnClickedCheckExtEncryption2();
+    afx_msg void OnBnClickedButtonDtmf1();
+    afx_msg void OnBnClickedCheckRecMic();
+    afx_msg void OnBnClickedButtonDtmf2();
+    afx_msg void OnBnClickedButtonTest1();
+    afx_msg void OnBnClickedCheckConference1();
+    afx_msg void OnBnClickedCheckConference2();
+    afx_msg void OnBnClickedCheckOnHold1();
+    afx_msg void OnBnClickedCheckOnHold2();
+    afx_msg void OnBnClickedCheckExtMediaIn();
+    afx_msg void OnBnClickedCheckExtMediaOut();
+    afx_msg void OnLbnSelchangeListCodec1();
+    afx_msg void OnNMReleasedcaptureSliderPanLeft(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnNMReleasedcaptureSliderPanRight(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnBnClickedButtonVersion();
+    afx_msg void OnBnClickedCheckDelayEstimate1();
+    afx_msg void OnBnClickedCheckRxvad();
+    afx_msg void OnBnClickedCheckAgc1();
+    afx_msg void OnBnClickedCheckNs1();
+    afx_msg void OnBnClickedCheckRecCall();
+    afx_msg void OnBnClickedCheckTypingDetection();
+    afx_msg void OnBnClickedCheckFEC();
+    afx_msg void OnBnClickedButtonClearErrorCallback();
+    afx_msg void OnBnClickedCheckBwe1();
+};
+#pragma once
diff --git a/trunk/src/voice_engine/main/test/win_test/res/WinTest.ico b/trunk/src/voice_engine/main/test/win_test/res/WinTest.ico
new file mode 100644
index 0000000..8a84ca3
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/res/WinTest.ico
Binary files differ
diff --git a/trunk/src/voice_engine/main/test/win_test/res/WinTest.rc2 b/trunk/src/voice_engine/main/test/win_test/res/WinTest.rc2
new file mode 100644
index 0000000..044bf7e
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/res/WinTest.rc2
@@ -0,0 +1,13 @@
+//

+// WinTest.RC2 - resources Microsoft Visual C++ does not edit directly

+//

+

+#ifdef APSTUDIO_INVOKED

+#error this file is not editable by Microsoft Visual C++

+#endif //APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Add manually edited resources here...

+

+/////////////////////////////////////////////////////////////////////////////

diff --git a/trunk/src/voice_engine/main/test/win_test/stdafx.cc b/trunk/src/voice_engine/main/test/win_test/stdafx.cc
new file mode 100644
index 0000000..6cdb906
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/stdafx.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.cpp : source file that includes just the standard includes
+// WinTest.pch will be the pre-compiled header
+// stdafx.obj will contain the pre-compiled type information
+
+#include "stdafx.h"
+
+
diff --git a/trunk/src/voice_engine/main/test/win_test/stdafx.h b/trunk/src/voice_engine/main/test/win_test/stdafx.h
new file mode 100644
index 0000000..b4d875c
--- /dev/null
+++ b/trunk/src/voice_engine/main/test/win_test/stdafx.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+// or project specific include files that are used frequently,
+// but are changed infrequently
+
+#pragma once
+
+#ifndef _SECURE_ATL
+#define _SECURE_ATL 1
+#endif
+
+#ifndef VC_EXTRALEAN
+#define VC_EXTRALEAN		// Exclude rarely-used stuff from Windows headers
+#endif
+
+// Modify the following defines if you have to target a platform prior to the ones specified below.
+// Refer to MSDN for the latest info on corresponding values for different platforms.
+#ifndef WINVER				// Allow use of features specific to Windows XP or later.
+#define WINVER 0x0501		// Change this to the appropriate value to target other versions of Windows.
+#endif
+
+#ifndef _WIN32_WINNT		// Allow use of features specific to Windows XP or later.                   
+#define _WIN32_WINNT 0x0501	// Change this to the appropriate value to target other versions of Windows.
+#endif						
+
+#ifndef _WIN32_WINDOWS		// Allow use of features specific to Windows 98 or later.
+#define _WIN32_WINDOWS 0x0410 // Change this to the appropriate value to target Windows Me or later.
+#endif
+
+#ifndef _WIN32_IE			// Allow use of features specific to IE 6.0 or later.
+#define _WIN32_IE 0x0600	// Change this to the appropriate value to target other versions of IE.
+#endif
+
+#define _ATL_CSTRING_EXPLICIT_CONSTRUCTORS	// some CString constructors will be explicit
+
+// turns off MFC's hiding of some common and often safely ignored warning messages
+#define _AFX_ALL_WARNINGS
+
+#include <afxwin.h>         // MFC core and standard components
+#include <afxext.h>         // MFC extensions
+
+
+
+
+
+#ifndef _AFX_NO_OLE_SUPPORT
+#include <afxdtctl.h>		// MFC support for Internet Explorer 4 Common Controls
+#endif
+#ifndef _AFX_NO_AFXCMN_SUPPORT
+#include <afxcmn.h>			// MFC support for Windows Common Controls
+#endif // _AFX_NO_AFXCMN_SUPPORT
+
+
+
+
+
+
+
+
+
+#ifdef _UNICODE
+#if defined _M_IX86
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='x86' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#elif defined _M_IA64
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='ia64' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#elif defined _M_X64
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='amd64' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#else
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#endif
+#endif
+
+
diff --git a/trunk/src/voice_engine/voice_engine.gyp b/trunk/src/voice_engine/voice_engine.gyp
new file mode 100644
index 0000000..f45a269
--- /dev/null
+++ b/trunk/src/voice_engine/voice_engine.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'main/source/voice_engine_core.gypi',
+  ],
+
+  # Test targets, excluded when building with Chromium.
+  'conditions': [
+    ['build_with_chromium==0', {
+      'includes': [
+        'main/test/voice_engine_tests.gypi',
+      ],
+    }],
+  ],
+}
diff --git a/trunk/test/OWNERS b/trunk/test/OWNERS
new file mode 100644
index 0000000..fec9caa
--- /dev/null
+++ b/trunk/test/OWNERS
@@ -0,0 +1,4 @@
+phoglund@webrtc.org

+kjellander@webrtc.org

+ivinnichenko@webrtc.org

+

diff --git a/trunk/test/data/audio_coding/F00.BIT20 b/trunk/test/data/audio_coding/F00.BIT20
new file mode 100644
index 0000000..a117e78
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00.BIT30 b/trunk/test/data/audio_coding/F00.BIT30
new file mode 100644
index 0000000..be66b72
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00.INP b/trunk/test/data/audio_coding/F00.INP
new file mode 100644
index 0000000..d7470b2
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00.OUT20 b/trunk/test/data/audio_coding/F00.OUT20
new file mode 100644
index 0000000..416bcdd
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00.OUT30 b/trunk/test/data/audio_coding/F00.OUT30
new file mode 100644
index 0000000..e6df3d8
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00_tlm10.OUT20 b/trunk/test/data/audio_coding/F00_tlm10.OUT20
new file mode 100644
index 0000000..85e4d2d
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00_tlm10.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F00_tlm10.OUT30 b/trunk/test/data/audio_coding/F00_tlm10.OUT30
new file mode 100644
index 0000000..2aa3a22
--- /dev/null
+++ b/trunk/test/data/audio_coding/F00_tlm10.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01.BIT20 b/trunk/test/data/audio_coding/F01.BIT20
new file mode 100644
index 0000000..64e2c13
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01.BIT30 b/trunk/test/data/audio_coding/F01.BIT30
new file mode 100644
index 0000000..22785d6
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01.INP b/trunk/test/data/audio_coding/F01.INP
new file mode 100644
index 0000000..6524842
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01.OUT20 b/trunk/test/data/audio_coding/F01.OUT20
new file mode 100644
index 0000000..8ccdfce
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01.OUT30 b/trunk/test/data/audio_coding/F01.OUT30
new file mode 100644
index 0000000..488e636
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01_tlm10.OUT20 b/trunk/test/data/audio_coding/F01_tlm10.OUT20
new file mode 100644
index 0000000..31de738
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01_tlm10.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F01_tlm10.OUT30 b/trunk/test/data/audio_coding/F01_tlm10.OUT30
new file mode 100644
index 0000000..42d5ee7
--- /dev/null
+++ b/trunk/test/data/audio_coding/F01_tlm10.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02.BIT20 b/trunk/test/data/audio_coding/F02.BIT20
new file mode 100644
index 0000000..ffbd691
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02.BIT30 b/trunk/test/data/audio_coding/F02.BIT30
new file mode 100644
index 0000000..9c77fdf
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02.INP b/trunk/test/data/audio_coding/F02.INP
new file mode 100644
index 0000000..3c10a16
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02.OUT20 b/trunk/test/data/audio_coding/F02.OUT20
new file mode 100644
index 0000000..f5a2199
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02.OUT30 b/trunk/test/data/audio_coding/F02.OUT30
new file mode 100644
index 0000000..9b03e72
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02_tlm10.OUT20 b/trunk/test/data/audio_coding/F02_tlm10.OUT20
new file mode 100644
index 0000000..4ac9221
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02_tlm10.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F02_tlm10.OUT30 b/trunk/test/data/audio_coding/F02_tlm10.OUT30
new file mode 100644
index 0000000..5420cfe
--- /dev/null
+++ b/trunk/test/data/audio_coding/F02_tlm10.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F03.BIT20 b/trunk/test/data/audio_coding/F03.BIT20
new file mode 100644
index 0000000..a4dfc04
--- /dev/null
+++ b/trunk/test/data/audio_coding/F03.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F03.BIT30 b/trunk/test/data/audio_coding/F03.BIT30
new file mode 100644
index 0000000..ec41baf
--- /dev/null
+++ b/trunk/test/data/audio_coding/F03.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F03.INP b/trunk/test/data/audio_coding/F03.INP
new file mode 100644
index 0000000..503fefb
--- /dev/null
+++ b/trunk/test/data/audio_coding/F03.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F03.OUT20 b/trunk/test/data/audio_coding/F03.OUT20
new file mode 100644
index 0000000..04bf300
--- /dev/null
+++ b/trunk/test/data/audio_coding/F03.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F03.OUT30 b/trunk/test/data/audio_coding/F03.OUT30
new file mode 100644
index 0000000..399834c
--- /dev/null
+++ b/trunk/test/data/audio_coding/F03.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F04.BIT20 b/trunk/test/data/audio_coding/F04.BIT20
new file mode 100644
index 0000000..e4da6bd
--- /dev/null
+++ b/trunk/test/data/audio_coding/F04.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F04.BIT30 b/trunk/test/data/audio_coding/F04.BIT30
new file mode 100644
index 0000000..1c01f37
--- /dev/null
+++ b/trunk/test/data/audio_coding/F04.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F04.INP b/trunk/test/data/audio_coding/F04.INP
new file mode 100644
index 0000000..401f3fe
--- /dev/null
+++ b/trunk/test/data/audio_coding/F04.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F04.OUT20 b/trunk/test/data/audio_coding/F04.OUT20
new file mode 100644
index 0000000..9350001
--- /dev/null
+++ b/trunk/test/data/audio_coding/F04.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F04.OUT30 b/trunk/test/data/audio_coding/F04.OUT30
new file mode 100644
index 0000000..5e202bc
--- /dev/null
+++ b/trunk/test/data/audio_coding/F04.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F05.BIT20 b/trunk/test/data/audio_coding/F05.BIT20
new file mode 100644
index 0000000..64bc069
--- /dev/null
+++ b/trunk/test/data/audio_coding/F05.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F05.BIT30 b/trunk/test/data/audio_coding/F05.BIT30
new file mode 100644
index 0000000..64a2ffd
--- /dev/null
+++ b/trunk/test/data/audio_coding/F05.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F05.INP b/trunk/test/data/audio_coding/F05.INP
new file mode 100644
index 0000000..3e59981
--- /dev/null
+++ b/trunk/test/data/audio_coding/F05.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F05.OUT20 b/trunk/test/data/audio_coding/F05.OUT20
new file mode 100644
index 0000000..2421ea4
--- /dev/null
+++ b/trunk/test/data/audio_coding/F05.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F05.OUT30 b/trunk/test/data/audio_coding/F05.OUT30
new file mode 100644
index 0000000..a4a1874
--- /dev/null
+++ b/trunk/test/data/audio_coding/F05.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F06.BIT20 b/trunk/test/data/audio_coding/F06.BIT20
new file mode 100644
index 0000000..08cc5d4
--- /dev/null
+++ b/trunk/test/data/audio_coding/F06.BIT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F06.BIT30 b/trunk/test/data/audio_coding/F06.BIT30
new file mode 100644
index 0000000..d45be54
--- /dev/null
+++ b/trunk/test/data/audio_coding/F06.BIT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/F06.INP b/trunk/test/data/audio_coding/F06.INP
new file mode 100644
index 0000000..429019b
--- /dev/null
+++ b/trunk/test/data/audio_coding/F06.INP
Binary files differ
diff --git a/trunk/test/data/audio_coding/F06.OUT20 b/trunk/test/data/audio_coding/F06.OUT20
new file mode 100644
index 0000000..4abc5d1
--- /dev/null
+++ b/trunk/test/data/audio_coding/F06.OUT20
Binary files differ
diff --git a/trunk/test/data/audio_coding/F06.OUT30 b/trunk/test/data/audio_coding/F06.OUT30
new file mode 100644
index 0000000..b55b96d
--- /dev/null
+++ b/trunk/test/data/audio_coding/F06.OUT30
Binary files differ
diff --git a/trunk/test/data/audio_coding/READ.ME b/trunk/test/data/audio_coding/READ.ME
new file mode 100644
index 0000000..e42b9b3
--- /dev/null
+++ b/trunk/test/data/audio_coding/READ.ME
@@ -0,0 +1,18 @@
+Test files for Audio Coding Module

+

+testfile32kHz.pcm - mono speech file samples at 32 kHz

+teststereo32kHz.pcm - stereo speech file samples at 32 kHz

+

+Test and reference vectors to verify correct execution of PacketCable 

+iLBC Fixed Point Reference Code

+

+Version 1.0.6

+Format: all .INP and .OUT files contain 16 bit sampled data using the 

+Intel (PC) format. The .BIT files are stored in the appropriate byte 

+sequence (big-endian format).

+

+*.INP  - input files

+*.BIT20 - bit stream files 20 ms mode

+*.OUT20 - output files 20 ms mode (on a channel without packet loss)

+*.BIT30 - bit stream files 30 ms mode

+*.OUT30 - output files 30 ms mode (on a channel without packet loss)

diff --git a/trunk/test/data/audio_coding/clean.chn b/trunk/test/data/audio_coding/clean.chn
new file mode 100644
index 0000000..1dc0e07
--- /dev/null
+++ b/trunk/test/data/audio_coding/clean.chn
Binary files differ
diff --git a/trunk/test/data/audio_coding/testfile32kHz.pcm b/trunk/test/data/audio_coding/testfile32kHz.pcm
new file mode 100644
index 0000000..cd4bd2b
--- /dev/null
+++ b/trunk/test/data/audio_coding/testfile32kHz.pcm
Binary files differ
diff --git a/trunk/test/data/audio_coding/teststereo32kHz.pcm b/trunk/test/data/audio_coding/teststereo32kHz.pcm
new file mode 100644
index 0000000..9b84b62
--- /dev/null
+++ b/trunk/test/data/audio_coding/teststereo32kHz.pcm
Binary files differ
diff --git a/trunk/test/data/audio_coding/tlm10.chn b/trunk/test/data/audio_coding/tlm10.chn
new file mode 100644
index 0000000..33cc734
--- /dev/null
+++ b/trunk/test/data/audio_coding/tlm10.chn
Binary files differ
diff --git a/trunk/test/data/audio_coding/universal.rtp b/trunk/test/data/audio_coding/universal.rtp
new file mode 100644
index 0000000..fd2038f
--- /dev/null
+++ b/trunk/test/data/audio_coding/universal.rtp
Binary files differ
diff --git a/trunk/test/data/audio_coding/universal_ref.pcm b/trunk/test/data/audio_coding/universal_ref.pcm
new file mode 100644
index 0000000..411193a
--- /dev/null
+++ b/trunk/test/data/audio_coding/universal_ref.pcm
Binary files differ
diff --git a/trunk/test/data/audio_device/audio_short16.pcm b/trunk/test/data/audio_device/audio_short16.pcm
new file mode 100644
index 0000000..15a0f18
--- /dev/null
+++ b/trunk/test/data/audio_device/audio_short16.pcm
Binary files differ
diff --git a/trunk/test/data/audio_device/audio_short44.pcm b/trunk/test/data/audio_device/audio_short44.pcm
new file mode 100644
index 0000000..011cdce
--- /dev/null
+++ b/trunk/test/data/audio_device/audio_short44.pcm
Binary files differ
diff --git a/trunk/test/data/audio_device/audio_short48.pcm b/trunk/test/data/audio_device/audio_short48.pcm
new file mode 100644
index 0000000..06fd826
--- /dev/null
+++ b/trunk/test/data/audio_device/audio_short48.pcm
Binary files differ
diff --git a/trunk/test/data/audio_device/audio_short8.pcm b/trunk/test/data/audio_device/audio_short8.pcm
new file mode 100644
index 0000000..43afd5a
--- /dev/null
+++ b/trunk/test/data/audio_device/audio_short8.pcm
Binary files differ
diff --git a/trunk/test/data/audio_processing/android/output_data_fixed.pb b/trunk/test/data/audio_processing/android/output_data_fixed.pb
new file mode 100644
index 0000000..2f45fd3
--- /dev/null
+++ b/trunk/test/data/audio_processing/android/output_data_fixed.pb
Binary files differ
diff --git a/trunk/test/data/audio_processing/android/output_data_float.pb b/trunk/test/data/audio_processing/android/output_data_float.pb
new file mode 100644
index 0000000..1bf18c2
--- /dev/null
+++ b/trunk/test/data/audio_processing/android/output_data_float.pb
Binary files differ
diff --git a/trunk/test/data/audio_processing/output_data_fixed.pb b/trunk/test/data/audio_processing/output_data_fixed.pb
new file mode 100644
index 0000000..2ca82e3
--- /dev/null
+++ b/trunk/test/data/audio_processing/output_data_fixed.pb
Binary files differ
diff --git a/trunk/test/data/audio_processing/output_data_float.pb b/trunk/test/data/audio_processing/output_data_float.pb
new file mode 100644
index 0000000..d9a51bb
--- /dev/null
+++ b/trunk/test/data/audio_processing/output_data_float.pb
Binary files differ
diff --git a/trunk/test/data/common_video/jpeg/webrtc_logo.jpg b/trunk/test/data/common_video/jpeg/webrtc_logo.jpg
new file mode 100644
index 0000000..ddb6192
--- /dev/null
+++ b/trunk/test/data/common_video/jpeg/webrtc_logo.jpg
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin b/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin
new file mode 100644
index 0000000..00e3f80
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin b/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin
new file mode 100644
index 0000000..57f94c3
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/H263_CIF_IFRAME.bin b/trunk/test/data/rtp_rtcp/H263_CIF_IFRAME.bin
new file mode 100644
index 0000000..00e3f80
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/H263_CIF_IFRAME.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/H263_CIF_PFRAME.bin b/trunk/test/data/rtp_rtcp/H263_CIF_PFRAME.bin
new file mode 100644
index 0000000..248f3a1
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/H263_CIF_PFRAME.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/H263_QCIF_IFRAME.bin b/trunk/test/data/rtp_rtcp/H263_QCIF_IFRAME.bin
new file mode 100644
index 0000000..0fa144c
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/H263_QCIF_IFRAME.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR0.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR0.bin
new file mode 100644
index 0000000..19df13c
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR0.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR1.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR1.bin
new file mode 100644
index 0000000..b7b7c94
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR1.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR2.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR2.bin
new file mode 100644
index 0000000..257835c
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR2.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR3.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR3.bin
new file mode 100644
index 0000000..4a8e375
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR3.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4.bin
new file mode 100644
index 0000000..28cd99c
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin
new file mode 100644
index 0000000..5080b88
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin
new file mode 100644
index 0000000..2c2f288
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin
Binary files differ
diff --git a/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR5.bin b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR5.bin
new file mode 100644
index 0000000..da7235a
--- /dev/null
+++ b/trunk/test/data/rtp_rtcp/RTCPPacketTMMBR5.bin
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long16.pcm b/trunk/test/data/voice_engine/audio_long16.pcm
new file mode 100644
index 0000000..853e0df
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long16.pcm
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long16.wav b/trunk/test/data/voice_engine/audio_long16.wav
new file mode 100644
index 0000000..ebe91c4
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long16.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long16big_endian.pcm b/trunk/test/data/voice_engine/audio_long16big_endian.pcm
new file mode 100644
index 0000000..563e4e9
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long16big_endian.pcm
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long16noise.pcm b/trunk/test/data/voice_engine/audio_long16noise.pcm
new file mode 100644
index 0000000..a7be537
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long16noise.pcm
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long8.pcm b/trunk/test/data/voice_engine/audio_long8.pcm
new file mode 100644
index 0000000..85d17e5
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long8.pcm
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_long8mulaw.wav b/trunk/test/data/voice_engine/audio_long8mulaw.wav
new file mode 100644
index 0000000..2d3d8b3
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_long8mulaw.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_short16.pcm b/trunk/test/data/voice_engine/audio_short16.pcm
new file mode 100644
index 0000000..15a0f18
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_short16.pcm
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny11.wav b/trunk/test/data/voice_engine/audio_tiny11.wav
new file mode 100644
index 0000000..6db80d5
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny11.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny16.wav b/trunk/test/data/voice_engine/audio_tiny16.wav
new file mode 100644
index 0000000..baab0ac
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny16.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny22.wav b/trunk/test/data/voice_engine/audio_tiny22.wav
new file mode 100644
index 0000000..b421867
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny22.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny32.wav b/trunk/test/data/voice_engine/audio_tiny32.wav
new file mode 100644
index 0000000..773ac23
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny32.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny44.wav b/trunk/test/data/voice_engine/audio_tiny44.wav
new file mode 100644
index 0000000..c9faa45
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny44.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny48.wav b/trunk/test/data/voice_engine/audio_tiny48.wav
new file mode 100644
index 0000000..8ebf11a
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny48.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/audio_tiny8.wav b/trunk/test/data/voice_engine/audio_tiny8.wav
new file mode 100644
index 0000000..d71c65e
--- /dev/null
+++ b/trunk/test/data/voice_engine/audio_tiny8.wav
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp
new file mode 100644
index 0000000..02abbc2
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp
new file mode 100644
index 0000000..4ed110b
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/README.txt b/trunk/test/data/voice_engine/stereo_rtp_files/README.txt
new file mode 100644
index 0000000..976ac56
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/README.txt
@@ -0,0 +1,4 @@
+Use RTP Play tool with command 'rtpplay.exe -v -T -f <path>\<file.rtp> 127.0.0.1/1236' 
+Example: rtpplay.exe -v -T -f hrtf_g722_1C_48.rtp 127.0.0.1/1236.  
+This sends the stereo rtp file to port 1236.  
+You can hear the voice getting panned from left, right and center.   
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp
new file mode 100644
index 0000000..b96d59b
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp
new file mode 100644
index 0000000..527a50a
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/rtpplay.exe b/trunk/test/data/voice_engine/stereo_rtp_files/rtpplay.exe
new file mode 100755
index 0000000..6f938c8
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/rtpplay.exe
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729.rtp
new file mode 100644
index 0000000..3c36e30
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp
new file mode 100644
index 0000000..913226c
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp
new file mode 100644
index 0000000..729b565
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp
new file mode 100644
index 0000000..efa2800
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp
new file mode 100644
index 0000000..bb2d93c
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp
new file mode 100644
index 0000000..fb79378
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp
new file mode 100644
index 0000000..eebcf34
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp
new file mode 100644
index 0000000..5c368b4
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp
Binary files differ
diff --git a/trunk/test/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp b/trunk/test/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp
new file mode 100644
index 0000000..1f713f6
--- /dev/null
+++ b/trunk/test/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp
Binary files differ
diff --git a/trunk/test/functional_test/README b/trunk/test/functional_test/README
new file mode 100644
index 0000000..093f759
--- /dev/null
+++ b/trunk/test/functional_test/README
@@ -0,0 +1,46 @@
+This test client is a simple functional test for a WebRTC enabled browser. It
+has only been tested with Chrome, and is most likely only working with Chrome at
+the moment. The following instructions are in part Chrome specific.
+
+The following is necessary to run the test:
+- A WebRTC enabled Chrome binary. (Available in dev or canary channel, 18.0.1008
+or newer.)
+- A peerconnection_server binary (make peerconnection_server).
+
+It can be used in two scenarios:
+1. Single client calling itself with the server test page in loopback mode as a
+fake client.
+2. Call between two clients.
+
+To start the test for scenario (1):
+1. Start peerconnection_server.
+2. Start the WebRTC Chrome build:
+$ <path_to_chrome_binary>/chrome --enable-media-stream
+The --enable-media-stream flag is required for the time being.
+3. Open the server test page, ensure loopback is enabled, choose a name (for
+example "loopback") and connect to the server.
+For version 18.0.1008 to 18.0.1020, use:
+http://libjingle.googlecode.com/svn-history/r103/trunk/talk/examples/peerconnection/server/server_test.html
+For version 18.0.1021 and later, use:
+http://libjingle.googlecode.com/svn/trunk/talk/examples/peerconnection/server/server_test.html
+4. Open the test page, connect to the server, select the loopback peer, click
+call.
+
+To start the test for scenario (2):
+1. Start peerconnection_server.
+2. Start the WebRTC Chrome build, see scenario (1).
+3. Open the test page, connect to the server.
+4. Open a new new tab, open the test page, connect to the server.
+     OR
+   On another machine, repeat 2 and 3.
+5. Select the other peer, click call.
+
+Note: The web page must normally be on a web server to be able to access the
+camera for security reasons.
+See http://blog.chromium.org/2008/12/security-in-depth-local-web-pages.html
+for more details on this topic. This can be overridden with the flag
+--allow-file-access-from-files, in which case running it over the file://
+URI scheme works.
+
+Note: It's possible to specify the server and name in the url:
+.../webrtc.html?server=my_server&name=my_name
diff --git a/trunk/test/functional_test/webrtc_test.html b/trunk/test/functional_test/webrtc_test.html
new file mode 100644
index 0000000..300884c
--- /dev/null
+++ b/trunk/test/functional_test/webrtc_test.html
@@ -0,0 +1,602 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+
+<!--
+Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+Use of this source code is governed by a BSD-style license
+that can be found in the LICENSE file in the root of the source
+tree. An additional intellectual property rights grant can be found
+in the file PATENTS. All contributing project authors may
+be found in the AUTHORS file in the root of the source tree.
+-->
+
+<html>
+
+<head>
+<title>WebRTC Test</title>
+
+<style type="text/css">
+body, input, button, select, table {
+  font-family:"Lucida Grande", "Lucida Sans", Verdana, Arial, sans-serif;
+  font-size: 13 px;
+}
+body, input:enable, button:enable, select:enable, table {
+  color: rgb(51, 51, 51);
+}
+h1 {font-size: 40 px;}
+</style>
+
+<script type="text/javascript">
+
+// TODO: Catch more exceptions
+
+var server;
+var myId = -1;
+var myName;
+var remoteId = -1;
+var remoteName;
+var request = null;
+var hangingGet = null;
+var pc = null;
+var localStream = null;
+var disconnecting = false;
+var callState = 0; // 0 - Not started, 1 - Call ongoing
+
+
+// General
+
+function setElementValuesFromURL() {
+  window.location.href.replace(/[?&]+([^=&]+)=([^&]*)/gi,
+    function(m, key, value) {
+      document.getElementById(key).value = unescape(value);
+    });
+}
+
+function trace(txt) {
+  var elem = document.getElementById("debug");
+  elem.innerHTML += txt + "<br>";
+}
+
+function trace_warning(txt) {
+  var wtxt = "<b>" + txt + "</b>";
+  trace(wtxt);
+}
+
+function trace_exception(e, txt) {
+  var etxt = "<b>" + txt + "</b> (" + e.name + " / " + e.message + ")";
+  trace(etxt);
+}
+
+function setCallState(state) {
+  trace("Changing call state: " + callState + " -> " + state);
+  callState = state;
+}
+
+function checkPeerConnection() {
+  if (!pc) {
+    trace_warning("No PeerConnection object exists");
+    return 0;
+  }
+  return 1;
+}
+
+
+// Local stream generation
+
+function gotStream(s) {
+  var url = webkitURL.createObjectURL(s);
+  document.getElementById("localView").src = url;
+  trace("User has granted access to local media. url = " + url);
+  localStream = s;
+}
+
+function gotStreamFailed(error) {
+  alert("Failed to get access to local media. Error code was " + error.code +
+    ".");
+  trace_warning("Failed to get access to local media. Error code was " +
+    error.code);
+}
+
+function getUserMedia() {
+  try {
+    navigator.webkitGetUserMedia("video,audio", gotStream, gotStreamFailed);
+    trace("Requested access to local media");
+  } catch (e) {
+    trace_exception(e, "getUserMedia error");
+  }
+}
+
+
+// Peer list and remote peer handling
+
+function peerExists(id) {
+  try {
+    var peerList = document.getElementById("peers");
+    for (var i = 0; i < peerList.length; i++) {
+      if (parseInt(peerList.options[i].value) == id)
+        return true;
+    }
+  } catch (e) {
+    trace_exception(e, "Error searching for peer");
+  }
+  return false;
+}
+
+function addPeer(id, pname) {
+  try {
+    var peerList = document.getElementById("peers");
+    var option = document.createElement("option");
+    option.text = pname;
+    option.value = id;
+    peerList.add(option, null);
+  } catch (e) {
+    trace_exception(e, "Error adding peer");
+  }
+}
+
+function removePeer(id) {
+  try {
+    var peerList = document.getElementById("peers");
+    for (var i = 0; i < peerList.length; i++) {
+      if (parseInt(peerList.options[i].value) == id) {
+        peerList.remove(i);
+        break;
+      }
+    }
+  } catch (e) {
+    trace_exception(e, "Error removing peer");
+  }
+}
+
+function clearPeerList() {
+  var peerList = document.getElementById("peers");
+  while (peerList.length > 0)
+    peerList.remove(0);
+}
+
+function setSelectedPeer(id) {
+  try {
+    var peerList = document.getElementById("peers");
+    for (var i = 0; i < peerList.length; i++) {
+      if (parseInt(peerList.options[i].value) == id) {
+        peerList.options[i].selected = true;
+        return true;
+      }
+    }
+  } catch (e) {
+    trace_exception(e, "Error setting selected peer");
+  }
+  return false;
+}
+
+function getPeerName(id) {
+  try {
+    var peerList = document.getElementById("peers");
+    for (var i = 0; i < peerList.length; i++) {
+      if (parseInt(peerList.options[i].value) == id) {
+        return peerList.options[i].text;
+      }
+    }
+  } catch (e) {
+    trace_exception(e, "Error finding peer name");
+    return;
+  }
+  return;
+}
+
+function storeRemoteInfo() {
+  try {
+    var peerList = document.getElementById("peers");
+    if (peerList.selectedIndex < 0) {
+      alert("Please select a peer.");
+      return false;
+    } else
+      remoteId = parseInt(peerList.options[peerList.selectedIndex].value);
+      remoteName = peerList.options[peerList.selectedIndex].text;
+  } catch (e) {
+    trace_exception(e, "Error storing remote peer info");
+    return false;
+  }
+  return true;
+}
+
+
+// Call control
+
+function createPeerConnection() {
+  if (pc) {
+    trace_warning("PeerConnection object already exists");
+  }
+  trace("Creating PeerConnection object");
+  try {
+    pc = new webkitPeerConnection("STUN stun.l.google.com:19302",
+      onSignalingMessage);
+  pc.onaddstream = onAddStream;
+  pc.onremovestream = onRemoveStream;
+  } catch (e) {
+    trace_exception(e, "Create PeerConnection error");
+  }
+}
+
+function doCall() {
+  if (!storeRemoteInfo())
+    return;
+  document.getElementById("call").disabled = true;
+  document.getElementById("peers").disabled = true;
+  createPeerConnection();
+  trace("Adding stream");
+  pc.addStream(localStream);
+  document.getElementById("hangup").disabled = false;
+  setCallState(1);
+}
+
+function hangUp() {
+  document.getElementById("hangup").disabled = true;
+  trace("Sending BYE to " + remoteName + " (ID " + remoteId + ")");
+  sendToPeer(remoteId, "BYE");
+  closeCall();
+}
+
+function closeCall() {
+  trace("Stopping showing remote stream");
+  document.getElementById("remoteView").src = "dummy";
+  if (pc) {
+    trace("Stopping call [pc.close()]");
+    pc.close();
+    pc = null;
+  } else
+    trace("No pc object to close");
+  remoteId = -1;
+  document.getElementById("call").disabled = false;
+  document.getElementById("peers").disabled = false;
+  setCallState(0);
+}
+
+
+// PeerConnection callbacks
+
+function onAddStream(e) {
+  var stream = e.stream;
+  var url = webkitURL.createObjectURL(stream);
+  document.getElementById("remoteView").src = url;
+  trace("Started showing remote stream. url = " + url);
+}
+
+function onRemoveStream(e) {
+  // Currently if we get this callback, call has ended.
+  document.getElementById("remoteView").src = "";
+  trace("Stopped showing remote stream");
+}
+
+function onSignalingMessage(msg) {
+  trace("Sending message to " + remoteName + " (ID " + remoteId + "):\n" + msg);
+  sendToPeer(remoteId, msg);
+}
+
+// TODO: Add callbacks onconnecting, onopen and onstatechange.
+
+
+// Server interaction
+
+function handleServerNotification(data) {
+  trace("Server notification: " + data);
+  var parsed = data.split(",");
+  if (parseInt(parsed[2]) == 1) { // New peer
+    var peerId = parseInt(parsed[1]);
+    if (!peerExists(peerId)) {
+      var peerList = document.getElementById("peers");
+      if (peerList.length == 1 && peerList.options[0].value == -1)
+        clearPeerList();
+      addPeer(peerId, parsed[0]);
+      document.getElementById("peers").disabled = false;
+      document.getElementById("call").disabled = false;
+    }
+  } else if (parseInt(parsed[2]) == 0) { // Removed peer
+    removePeer(parseInt(parsed[1]));
+    if (document.getElementById("peers").length == 0) {
+      document.getElementById("peers").disabled = true;
+      addPeer(-1, "No other peer connected");
+    }
+  }
+}
+
+function handlePeerMessage(peer_id, msg) {
+  var peerName = getPeerName(peer_id);
+  if (peerName == undefined) {
+    trace_warning("Received message from unknown peer (ID " + peer_id +
+      "), ignoring message:");
+    trace(msg);
+    return;
+  }
+  trace("Received message from " + peerName + " (ID " + peer_id + "):\n" + msg);
+  // Assuming we receive the message from the peer we want to communicate with.
+  // TODO: Only accept messages from peer we communicate with with if call is
+  // ongoing.
+  if (msg.search("BYE") == 0) {
+    // Other side has hung up.
+    document.getElementById("hangup").disabled = true;
+    closeCall()
+  } else {
+    if (!pc) {
+      // Other side is calling us, startup
+      if (!setSelectedPeer(peer_id)) {
+        trace_warning("Recevied message from unknown peer, ignoring");
+        return;
+      }
+      if (!storeRemoteInfo())
+        return;
+      document.getElementById("call").disabled = true;
+      document.getElementById("peers").disabled = true;
+      createPeerConnection();
+      try {
+        pc.processSignalingMessage(msg);
+      } catch (e) {
+        trace_exception(e, "Process signaling message error");
+      }
+      trace("Adding stream");
+      pc.addStream(localStream);
+      document.getElementById("hangup").disabled = false;
+    } else {
+      try {
+        pc.processSignalingMessage(msg);
+      } catch (e) {
+        trace_exception(e, "Process signaling message error");
+      }
+    }
+  }
+}
+
+function getIntHeader(r, name) {
+  var val = r.getResponseHeader(name);
+  trace("header value: " + val);
+  return val != null && val.length ? parseInt(val) : -1;
+}
+
+function hangingGetCallback() {
+  try {
+    if (hangingGet.readyState != 4 || disconnecting)
+      return;
+    if (hangingGet.status != 200) {
+      trace_warning("server error, status: " + hangingGet.status + ", text: " +
+        hangingGet.statusText);
+      disconnect();
+    } else {
+      var peer_id = getIntHeader(hangingGet, "Pragma");
+      if (peer_id == myId) {
+        handleServerNotification(hangingGet.responseText);
+      } else {
+        handlePeerMessage(peer_id, hangingGet.responseText);
+      }
+    }
+
+    if (hangingGet) {
+      hangingGet.abort();
+      hangingGet = null;
+    }
+
+    if (myId != -1)
+      window.setTimeout(startHangingGet, 0);
+  } catch (e) {
+    trace_exception(e, "Hanging get error");
+  }
+}
+
+function onHangingGetTimeout() {
+  trace("hanging get timeout. issuing again");
+  hangingGet.abort();
+  hangingGet = null;
+  if (myId != -1)
+    window.setTimeout(startHangingGet, 0);
+}
+
+function startHangingGet() {
+  try {
+    hangingGet = new XMLHttpRequest();
+    hangingGet.onreadystatechange = hangingGetCallback;
+    hangingGet.ontimeout = onHangingGetTimeout;
+    hangingGet.open("GET", server + "/wait?peer_id=" + myId, true);
+    hangingGet.send();  
+  } catch (e) {
+    trace_exception(e, "Start hanging get error");
+  }
+}
+
+function sendToPeer(peer_id, data) {
+  if (myId == -1) {
+    alert("Not connected.");
+    return;
+  }
+  if (peer_id == myId) {
+    alert("Can't send a message to oneself.");
+    return;
+  }
+  var r = new XMLHttpRequest();
+  r.open("POST", server + "/message?peer_id=" + myId + "&to=" + peer_id, false);
+  r.setRequestHeader("Content-Type", "text/plain");
+  r.send(data);
+  r = null;
+}
+
+function signInCallback() {
+  try {
+    if (request.readyState == 4) {
+      if (request.status == 200) {
+        var peers = request.responseText.split("\n");
+        myId = parseInt(peers[0].split(",")[1]);
+        trace("My id: " + myId);
+        clearPeerList();
+        var added = 0;
+        for (var i = 1; i < peers.length; ++i) {
+          if (peers[i].length > 0) {
+            trace("Peer " + i + ": " + peers[i]);
+            var parsed = peers[i].split(",");
+            addPeer(parseInt(parsed[1]), parsed[0]);
+            ++added;
+          }
+        }
+        if (added == 0)
+          addPeer(-1, "No other peer connected");
+        else {
+          document.getElementById("peers").disabled = false;
+          document.getElementById("call").disabled = false;
+        }
+        startHangingGet();
+        request = null;
+        document.getElementById("connect").disabled = true;
+        document.getElementById("disconnect").disabled = false;
+      }
+    }
+  } catch (e) {
+    trace_exception(e, "Sign in error");
+    document.getElementById("connect").disabled = false;
+  }
+}
+
+function signIn() {
+  try {
+    request = new XMLHttpRequest();
+    request.onreadystatechange = signInCallback;
+    request.open("GET", server + "/sign_in?" + myName, true);
+    request.send();
+  } catch (e) {
+    trace_exception(e, "Start sign in error");
+    document.getElementById("connect").disabled = false;
+  }
+}
+
+function connect() {
+  myName = document.getElementById("name").value.toLowerCase();
+  server = document.getElementById("server").value.toLowerCase();
+  if (myName.length == 0) {
+    alert("I need a name please.");
+    document.getElementById("local").focus();
+  } else {
+    // TODO: Disable connect button here, but we need a timeout and check if we
+    // have connected, if so enable it again.
+    signIn();
+  }
+}
+
+function disconnect() {
+  if (callState == 1)
+    hangUp();
+
+  disconnecting = true;
+  
+  if (request) {
+    request.abort();
+    request = null;
+  }
+
+  if (hangingGet) {
+    hangingGet.abort();
+    hangingGet = null;
+  }
+
+  if (myId != -1) {
+    request = new XMLHttpRequest();
+    request.open("GET", server + "/sign_out?peer_id=" + myId, false);
+    request.send();
+    request = null;
+    myId = -1;
+  }
+
+  clearPeerList();
+  addPeer(-1, "Not connected");
+  document.getElementById("connect").disabled = false;
+  document.getElementById("disconnect").disabled = true;
+  document.getElementById("peers").disabled = true;
+  document.getElementById("call").disabled = true;
+
+  disconnecting = false;
+}
+
+
+// Window event handling
+
+window.onload = function() {
+  if (navigator.webkitGetUserMedia) {
+    document.getElementById('testApp').hidden = false;
+    setElementValuesFromURL();
+    getUserMedia();
+  } else {
+    document.getElementById('errorText').hidden = false;
+  }
+}
+
+window.onbeforeunload = disconnect;
+
+</script>
+</head>
+
+<body>
+<h1>WebRTC</h1>
+<section id="errorText" hidden="true">
+Could not detect WebRTC support.<p>
+You must have WebRTC enabled Chrome browser to use this test page. The browser
+must be started with the --enable-media-stream command line flag. For more
+information, please see
+<a href="https://sites.google.com/site/webrtc/blog/webrtcnowavailableinthechromedevchannel">
+this blog post</a>.
+</section>
+
+<section id="testApp" hidden="true">
+<table border="0">
+<tr>
+ <td>Local Preview</td>
+ <td>Remote Video</td>
+</tr>
+<tr>
+ <td>
+  <video width="320" height="240" id="localView" autoplay="autoplay"></video>
+ </td>
+ <td>
+  <video width="640" height="480" id="remoteView" autoplay="autoplay"></video>
+ </td>
+</tr>
+</table>
+
+<table border="0">
+<tr>
+ <td valign="top">
+  <table border="0" cellpaddning="0" cellspacing="0">
+  <tr>
+   <td>Server:</td>
+   <td>
+    <input type="text" id="server" size="30" value="http://localhost:8888"/>
+   </td>
+  </tr>
+  <tr>
+   <td>Name:</td><td><input type="text" id="name" size="30" value="name"/></td>
+  </tr>
+  </table>
+ </td>
+ <td valign="top">
+  <button id="connect" onclick="connect();">Connect</button><br>
+  <button id="disconnect" onclick="disconnect();" disabled="true">Disconnect
+  </button>
+ </td>
+ <td>&nbsp;&nbsp;&nbsp;</td>
+ <td valign="top">
+  Connected peers:<br>
+  <select id="peers" size="5" disabled="true">
+   <option value="-1">Not connected</option>
+  </select>
+  </td>
+ <td valign="top">
+  <!--input type="text" id="peer_id" size="3" value="1"/><br-->
+  <button id="call" onclick="doCall();" disabled="true">Call</button><br>
+  <button id="hangup" onclick="hangUp();" disabled="true">Hang up</button><br>
+ </td>
+</tr>
+</table>
+
+<button onclick="document.getElementById('debug').innerHTML='';">Clear log
+</button>
+<pre id="debug"></pre>
+</section>
+
+</body>
+
+</html>
+
diff --git a/trunk/test/metrics.gyp b/trunk/test/metrics.gyp
new file mode 100644
index 0000000..70483f9
--- /dev/null
+++ b/trunk/test/metrics.gyp
@@ -0,0 +1,46 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../src/build/common.gypi',
+  ],
+  'targets': [
+    {
+      # The metrics code must be kept in its own GYP file in order to
+      # avoid a circular dependency error due to the dependency on libyuv.
+      # If the code would be put in test.gyp a circular dependency error during
+      # GYP generation would occur, because the libyuv.gypi unittest target
+      # depends on test_support_main. See issue #160 for more info.
+      'target_name': 'metrics',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+        '.',
+      ],
+      'sources': [
+        'testsupport/metrics/video_metrics.h',
+        'testsupport/metrics/video_metrics.cc',
+      ],
+    },
+    {
+      'target_name': 'metrics_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'metrics',
+        '<(webrtc_root)/../test/test.gyp:test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        'testsupport/metrics/video_metrics_unittest.cc',
+      ],
+    },
+  ],
+}
diff --git a/trunk/test/run_all_unittests.cc b/trunk/test/run_all_unittests.cc
new file mode 100644
index 0000000..0cdf0cd
--- /dev/null
+++ b/trunk/test/run_all_unittests.cc
@@ -0,0 +1,16 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/test_suite.h"
+
+int main(int argc, char** argv) {
+  webrtc::test::TestSuite test_suite(argc, argv);
+  return test_suite.Run();
+}
diff --git a/trunk/test/sanity_check/README b/trunk/test/sanity_check/README
new file mode 100644
index 0000000..7b14aeb
--- /dev/null
+++ b/trunk/test/sanity_check/README
@@ -0,0 +1,13 @@
+This test will pop up a browser with two tabs, and will run video from
+one tab to the other tab, across the local network interface.
+
+In order to run this test, you need to have:
+
+- a WebRTC Chrome binary in
+  $PROJECT_ROOT/../../chromium/src/out/Debug/chrome
+- an installed lighttpd in your $PATH
+- a built peerconnection_server (make peerconnection_server)
+
+You need to run the script from the test/sanity_check directory.
+
+The first time you run it, you will be asked to choose a search engine :-(
diff --git a/trunk/test/sanity_check/lighttpd.conf b/trunk/test/sanity_check/lighttpd.conf
new file mode 100644
index 0000000..4dae92d
--- /dev/null
+++ b/trunk/test/sanity_check/lighttpd.conf
@@ -0,0 +1,5 @@
+server.document-root = "./www/html/"
+server.port = 3000
+mimetype.assign = ( 
+  ".html" => "text/html"
+)
diff --git a/trunk/test/sanity_check/run_sanity_check b/trunk/test/sanity_check/run_sanity_check
new file mode 100644
index 0000000..cf38e98
--- /dev/null
+++ b/trunk/test/sanity_check/run_sanity_check
@@ -0,0 +1,72 @@
+#!/bin/bash
+#
+# Run a test with the WebRTC Chromium build.
+# Should work on any machine with a camera.
+#
+# Method:
+# - Start server
+# - Start 2 browser tabs
+# - Browser tab 1 captures camera
+# - Both browsers sign in
+# - Browser 1 calls browser 2
+# - Browser 2 displays camera feed from browser 1
+#
+
+# Feel free to tweak this locally if your chrome build is somewhere else.
+# The default assumes that it is in a folder called chromium two levels
+# up from the project root ('trunk').
+
+set -e
+
+if [ ! -x run_sanity_check ]; then
+  echo "Error: This script must run from its own directory"
+  exit 1
+fi
+
+URLBASE=127.0.0.1:3000
+CALLER=$URLBASE/call_client.html
+CALLEE=$URLBASE/call_responder.html
+FLAGS="--enable-media-stream --enable-p2papi"
+PROJECT_ROOT=../..
+CHROME_BINARY=$PROJECT_ROOT/../../chromium/src/out/Debug/chrome
+
+if which lighttpd; then
+  LOCAL_WEB_SERVER_BINARY=$(which lighttpd)
+else
+  echo "Error: You must install lighttpd first (sudo apt-get install lighttpd)"
+  exit 1
+fi
+
+SERVER_BINARY=${PROJECT_ROOT}/out/Debug/peerconnection_server
+if [ ! -e "$SERVER_BINARY" ]; then
+  echo "Error: You must build peerconnection_server first."
+  exit 1
+fi
+
+CHROME_BINARY=$PROJECT_ROOT/../../chromium/src/out/Debug/chrome
+if [ ! -e "$CHROME_BINARY" ]; then
+  echo "Error: You must build chrome (could not open $CHROME_BINARY)."
+  exit 1
+fi
+
+# Launch the web server and make it serve the local www/html directory
+${LOCAL_WEB_SERVER_BINARY} -D -f lighttpd.conf &
+LOCAL_WEB_SERVER=$!
+
+${SERVER_BINARY} &
+SERVER=$!
+echo "Started server as $SERVER"
+
+# We can make 2 browsers
+#${CHROME_BINARY} $CALLER $FLAGS --user-data-dir=temp/user1 &
+#USER1=$!
+#echo "Started first user as $USER1"
+#${CHROME_BINARY} $CALLEE $FLAGS --user-data-dir=temp/user2 
+
+# But it also works with separate tabs in one browser.
+${CHROME_BINARY} $CALLER $CALLEE $FLAGS --user-data-dir=temp/user1
+
+echo "Test finished, cleaning up"
+
+kill $SERVER || echo "No server"
+kill $LOCAL_WEB_SERVER || echo "No local web server"
diff --git a/trunk/test/sanity_check/www/html/call_client.html b/trunk/test/sanity_check/www/html/call_client.html
new file mode 100644
index 0000000..789bc9f
--- /dev/null
+++ b/trunk/test/sanity_check/www/html/call_client.html
@@ -0,0 +1,394 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+
+<html>
+
+<head>
+<title>WebRTC Autocaller</title>
+<script type="text/javascript">
+
+var local_name;
+var server;
+var my_id = -1;
+var other_peers = {};
+var request = null;
+var hanging_get = null;
+var pc = null;
+var local_stream = null;
+var call_state = 0;
+// 0 - Not started
+// 1 - Call ongoing
+// 2 - We have initiated closing the call
+// 3 - Other side has initiated closing the call
+
+// General
+function toggleExtraButtons() {
+  document.getElementById("createPcBtn").hidden = 
+    !document.getElementById("createPcBtn").hidden;
+  document.getElementById("addStreamBtn").hidden = 
+    !document.getElementById("addStreamBtn").hidden;
+}
+
+function trace(txt) {
+  var elem = document.getElementById("debug");
+  elem.innerHTML += txt + "<br>";
+}
+
+function trace_warning(txt) {
+  var wtxt = "<b>" + txt + "</b>";
+  trace(wtxt);
+}
+
+function setCallState(state) {
+  trace("Changing call state: " + call_state + " -> " + state);
+  call_state = state;
+}
+
+function checkPeerConnection() {
+  if (!pc) {
+    trace_warning("No PeerConnection object exists.");
+    return 0;
+  }
+  return 1;
+}
+
+// Local stream generation
+function gotStream(s) {
+  var url = webkitURL.createObjectURL(s);
+  document.getElementById("localView").src = url;
+  trace("User has granted access to webcam. url = " + url);
+  local_stream = s;
+}
+
+function gotStreamFailed(error) {
+  alert("Failed to get access to webcam. Error code was " + error.code);
+  trace("Failed to get access to webcam. Error code was " + error.code);
+}
+
+// Call control
+function createPeerConnection() {
+  if (pc) {
+    trace_warning("PeerConnection object already exists.");
+  }
+  trace("Creating PeerConnection object.");
+  // CRASH TEST
+  pc = new webkitPeerConnection("STUN 1.2.3.4:43", onSignalingMessage); 
+  pc.onaddstream = onAddStream;
+  pc.onremovestream = onRemoveStream;
+}
+
+function startCall() {
+  document.getElementById("startcall").disabled = true;
+  createPeerConnection();
+  addStream();
+}
+
+function addStream() {
+  trace("Starting call. [pc.addStream(local_stream)]");
+  pc.addStream(local_stream);
+  document.getElementById("stopcall").disabled = false;
+}
+
+function stopCall() {
+  document.getElementById("stopcall").disabled = true;
+  trace("Stopping call [pc.close()]");
+  setCallState(2);
+  pc.close();
+}
+
+function test1() {
+  addStream();
+}
+
+function callHasStarted() {
+  setCallState(1);
+}
+
+// PeerConnection callbacks
+function onAddStream(e) {
+  var stream = pc.remoteStreams[0];
+  var url = webkitURL.createObjectURL(stream);
+  document.getElementById("remoteView").src = url;
+  trace("Started showing remote stream. url = " + url);
+  setTimeout(callHasStarted, 5000);  // TODO(hta): workaround, to be removed
+}
+
+function onRemoveStream(e) {
+  // Currently if we get this callback, call has ended.
+  document.getElementById("remoteView").src = "";
+  trace("Stopped showing remote stream.");
+  if (call_state == 2) {
+    // Finalize close call. We assume here that this callback comes
+    // after onSignalingMessage.
+    pc = null;
+    setCallState(0);
+    document.getElementById("startcall").disabled = false;
+  }
+}
+
+function onSignalingMessage(msg) {
+  var peer_id = parseInt(document.getElementById("peer_id").value);
+  trace("Sending message to " + other_peers[peer_id] + 
+        " (ID " + peer_id + "):\n" + msg);
+  sendToPeer(peer_id, msg);
+}
+
+// Server interaction
+
+function handleServerNotification(data) {
+  trace("Server notification: " + data);
+  notePeerAndMaybeCall(data);
+}
+
+function handlePeerMessage(peer_id, msg) {
+  trace("Received message from " + other_peers[peer_id] + 
+        " (ID " + peer_id + "):\n" + msg);
+  // Assuming we have created a PeerConnection and that we receive the
+  // message from the peer want to communicate with
+  document.getElementById("peer_id").value = 
+    peer_id;   // Ensure we reply to the same peer
+  
+  if (pc) {
+    try {
+      pc.processSignalingMessage(msg);
+    } catch (e) {
+      trace_warning("Signaling message error: " + e.description);
+    }
+    if (call_state == 1)
+      setCallState(3);
+  } else {
+    createPeerConnection();
+    pc.processSignalingMessage(msg);
+  }
+}
+
+function getIntHeader(r, name) {
+  var val = r.getResponseHeader(name);
+  trace("header value: " + val);
+  return val != null && val.length ? parseInt(val) : -1;
+}
+
+function hangingGetCallback() {
+  try {
+    if (hanging_get.readyState != 4)
+      return;
+    if (hanging_get.status != 200) {
+      trace("server error, status: " + hanging_get.status + ", text: " + 
+            hanging_get.statusText);
+      disconnect();
+    } else {
+      var peer_id = getIntHeader(hanging_get, "Pragma");
+      if (peer_id == my_id) {
+        handleServerNotification(hanging_get.responseText);
+      } else {
+        handlePeerMessage(peer_id, hanging_get.responseText);
+      }
+    }
+
+    if (hanging_get) {
+      hanging_get.abort();
+      hanging_get = null;
+    }
+
+    if (my_id != -1)
+      window.setTimeout(startHangingGet, 0);
+  } catch (e) {
+    trace("Hanging get error: " + e.description);
+  }
+}
+
+function onHangingGetTimeout() {
+  trace("hanging get timeout. issuing again.");
+  hanging_get.abort();
+  hanging_get = null;
+  if (my_id != -1)
+    window.setTimeout(startHangingGet, 0);
+}
+
+function startHangingGet() {
+  try {
+    hanging_get = new XMLHttpRequest();
+    hanging_get.onreadystatechange = hangingGetCallback;
+    hanging_get.ontimeout = onHangingGetTimeout;
+    hanging_get.open("GET", server + "/wait?peer_id=" + my_id, true);
+    hanging_get.send();  
+  } catch (e) {
+    trace("error" + e.description);
+  }
+}
+
+function sendToPeer(peer_id, data) {
+  if (my_id == -1) {
+    alert("Not connected");
+    return;
+  }
+  if (peer_id == my_id) {
+    alert("Can't send a message to oneself :)");
+    return;
+  }
+  var r = new XMLHttpRequest();
+  r.open("POST", server + "/message?peer_id=" + my_id + "&to=" + 
+         peer_id, false);
+  r.setRequestHeader("Content-Type", "text/plain");
+  r.send(data);
+  r = null;
+}
+
+function notePeerAndMaybeCall(peerInfo) {
+  var parsed = peerInfo.split(",");
+  if (parseInt(parsed[2]) != 0) {
+    trace("Peer came on: " + peerInfo);
+    other_peers[parseInt(parsed[1])] = parsed[0];
+    // If we're not already in a call, call it.
+    if (!document.getElementById("startcall").disabled) {
+      document.getElementById("peer_id").value = parsed[1];
+      startCall();
+    }
+  } else {
+    trace("Peer went away: " + peerInfo);
+    other_peers[parseInt(parsed[1])] = null;
+  }
+}
+
+function signInCallback() {
+  try {
+    if (request.readyState == 4) {
+      if (request.status == 200) {
+        var peers = request.responseText.split("\n");
+        my_id = parseInt(peers[0].split(",")[1]);
+        trace("My id: " + my_id);
+        for (var i = 1; i < peers.length; ++i) {
+          if (peers[i].length > 0) {
+            trace("Peer " + i + ": " + peers[i]);
+            notePeerAndMaybeCall(peers[i]);
+          }
+        }
+        startHangingGet();
+        request = null;
+      }
+    }
+  } catch (e) {
+    trace("error: " + e.description);
+  }
+}
+
+function signIn() {
+  try {
+    request = new XMLHttpRequest();
+    request.onreadystatechange = signInCallback;
+    request.open("GET", server + "/sign_in?" + local_name, true);
+    request.send();
+  } catch (e) {
+    trace("error: " + e.description);
+  }
+}
+
+function connect() {
+  local_name = document.getElementById("local").value.toLowerCase();
+  server = document.getElementById("server").value.toLowerCase();
+  if (local_name.length == 0) {
+    alert("I need a name please.");
+    document.getElementById("local").focus();
+  } else {
+    document.getElementById("connect").disabled = true;
+    document.getElementById("disconnect").disabled = false;
+    trace("Connecting to server");
+    signIn();
+  }
+}
+
+function disconnect() {
+  if (request) {
+    request.abort();
+    request = null;
+  }
+  
+  if (hanging_get) {
+    hanging_get.abort();
+    hanging_get = null;
+  }
+
+  if (my_id != -1) {
+    request = new XMLHttpRequest();
+    request.open("GET", server + "/sign_out?peer_id=" + my_id, false);
+    request.send();
+    request = null;
+    my_id = -1;
+  }
+
+  document.getElementById("connect").disabled = false;
+  document.getElementById("disconnect").disabled = true;
+}
+
+function toggleLog()
+{
+  document.getElementById('debug').hidden =! 
+    document.getElementById('debug').hidden;
+}
+
+window.onbeforeunload = disconnect;
+
+// TODO: Add audio here when available. Note: now it's hard coded, 
+// audio will also be started.
+navigator.webkitGetUserMedia("video", gotStream, gotStreamFailed);
+
+</script>
+</head>
+
+<body>
+<h1>WebRTC autocaller</h1>
+This page should show your local camera, and connect to a remote browser.
+<table border="0">
+
+<tr>
+<th align="left">Local Preview</th>
+<th align="left">Remote Video</th>
+</tr>
+<tr>
+<td><video width="320" height="240" id="localView" autoplay="autoplay">
+    </video></td>
+<td><video width="640" height="480" id="remoteView" autoplay="autoplay">
+    </video></td>
+</tr>
+<tr>
+<td valign="top">
+<form>
+Target peer ID: <input type="text" id="peer_id" size="3" value="1"/><br>
+<button id="startcall" onclick="startCall();">(4) Start call</button><br>
+<button id="stopcall" onclick="stopCall();" disabled="true">(5) Stop call
+</button><br>
+<!--<input type="button" value="Test 1" onclick="test1()"/><br>-->
+</form>
+</td>
+<td valign="top">
+<button onclick="toggleExtraButtons();">Toggle extra buttons</button><br>
+<button id="createPcBtn" onclick="createPeerConnection();" hidden="true">
+  Create peer connection</button><br>
+<button id="addStreamBtn" onclick="addStream();" hidden="true">
+  Add stream</button><br>
+</td>
+</tr>
+
+<tr><td colspan="2">
+(1) Server: <input type="text" id="server" value="http://localhost:8888" />
+<br>
+(2) Your name: <input type="text" id="local" value="caller"/>
+<button id="connect" onclick="connect();" disabled="true">(3) Connect</button>
+<button id="disconnect" onclick="disconnect();" disabled="true">(6) Disconnect
+</button>
+<br>
+&nbsp;<br>
+<button onclick="toggleLog()">Toggle view log</button>
+<button onclick="document.getElementById('debug').innerHTML='';">
+  Clear log</button>
+<pre id="debug">
+</pre>
+</td></tr>
+
+</table>
+</body>
+<script>
+// On init, we connect to the server.
+connect();
+</script>
+
+</html>
diff --git a/trunk/test/sanity_check/www/html/call_responder.html b/trunk/test/sanity_check/www/html/call_responder.html
new file mode 100644
index 0000000..4d7f3b4
--- /dev/null
+++ b/trunk/test/sanity_check/www/html/call_responder.html
@@ -0,0 +1,373 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+
+<html>
+
+<head>
+<title>WebRTC Autoresponder</title>
+This page should show a stream coming from a different browser.
+<script type="text/javascript">
+
+var local_name;
+var server;
+var my_id = -1;
+var other_peers = {};
+var request = null;
+var hanging_get = null;
+var pc = null;
+var local_stream = null;
+var call_state = 0;
+// 0 - Not started
+// 1 - Call ongoing
+// 2 - We have initiated closing the call
+// 3 - Other side has initiated closing the call
+
+// General
+function toggleExtraButtons() {
+  document.getElementById("createPcBtn").hidden = 
+    !document.getElementById("createPcBtn").hidden;
+  document.getElementById("addStreamBtn").hidden = 
+    !document.getElementById("addStreamBtn").hidden;
+}
+
+function trace(txt) {
+  var elem = document.getElementById("debug");
+  elem.innerHTML += txt + "<br>";
+}
+
+function trace_warning(txt) {
+  var wtxt = "<b>" + txt + "</b>";
+  trace(wtxt);
+}
+
+function setCallState(state) {
+  trace("Changing call state: " + call_state + " -> " + state);
+  call_state = state;
+}
+
+function checkPeerConnection() {
+  if (!pc) {
+    trace_warning("No PeerConnection object exists.");
+    return 0;
+  }
+  return 1;
+}
+
+// Local stream generation
+function gotStream(s) {
+  var url = webkitURL.createObjectURL(s);
+  document.getElementById("localView").src = url;
+  trace("User has granted access to webcam. url = " + url);
+  local_stream = s;
+}
+
+function gotStreamFailed(error) {
+  alert("Failed to get access to webcam. Error code was " + error.code);
+  trace("Failed to get access to webcam. Error code was " + error.code);
+}
+
+// Call control
+function createPeerConnection() {
+  if (pc) {
+    trace_warning("PeerConnection object already exists.");
+  }
+  trace("Creating PeerConnection object.");
+  pc = new webkitPeerConnection("STUN stun.l.google.com:19302", 
+                                onSignalingMessage);
+  pc.onaddstream = onAddStream;
+  pc.onremovestream = onRemoveStream;
+}
+
+function startCall() {
+  document.getElementById("startcall").disabled = true;
+  createPeerConnection();
+  addStream();
+}
+
+function addStream()
+{
+  trace("Starting call. [pc.addStream(local_stream)]");
+  pc.addStream(local_stream);
+  document.getElementById("stopcall").disabled = false;
+}
+
+function stopCall() {
+  document.getElementById("stopcall").disabled = true;
+  trace("Stopping call [pc.close()]");
+  setCallState(2);
+  pc.close();
+}
+
+function test1() {
+  addStream();
+}
+
+function callHasStarted() {
+  setCallState(1);
+}
+
+// PeerConnection callbacks
+function onAddStream(e) {
+  var stream = pc.remoteStreams[0];
+  var url = webkitURL.createObjectURL(stream);
+  document.getElementById("remoteView").src = url;
+  trace("Started showing remote stream. url = " + url);
+  setTimeout(callHasStarted, 5000); // TODO: Temp workaround, to be removed
+}
+
+function onRemoveStream(e) {
+  // Currently if we get this callback, call has ended.
+  document.getElementById("remoteView").src = "";
+  trace("Stopped showing remote stream.");
+  if (call_state == 2) {
+    // Finalize close call. We assume here that this
+    // callback comes after onSignalingMessage.
+    pc = null;
+    setCallState(0);
+    document.getElementById("startcall").disabled = false;
+  }
+}
+
+function onSignalingMessage(msg) {
+  var peer_id = parseInt(document.getElementById("peer_id").value);
+  trace("Sending message to " + other_peers[peer_id] + 
+        " (ID " + peer_id + "):\n" + msg);
+  sendToPeer(peer_id, msg);
+}
+
+// Server interaction
+
+function handleServerNotification(data) {
+  trace("Server notification: " + data);
+  var parsed = data.split(",");
+  if (parseInt(parsed[2]) != 0)
+    other_peers[parseInt(parsed[1])] = parsed[0];
+}
+
+function handlePeerMessage(peer_id, msg) {
+  trace("Received message from " + other_peers[peer_id] + 
+        " (ID " + peer_id + "):\n" + msg);
+  // Assuming we have created a PeerConnection and that we receive the message 
+  // from the peer we want to communicate with
+  document.getElementById("peer_id").value = peer_id;
+  if (pc) {
+    try {
+      pc.processSignalingMessage(msg);
+    } catch (e) {
+      trace_warning("Signaling message error: " + e.description);
+    }
+    if (call_state == 1)
+      setCallState(3);
+  } else {
+    createPeerConnection();
+    pc.processSignalingMessage(msg);
+  }
+}
+
+function getIntHeader(r, name) {
+  var val = r.getResponseHeader(name);
+  trace("header value: " + val);
+  return val != null && val.length ? parseInt(val) : -1;
+}
+
+function hangingGetCallback() {
+  try {
+    if (hanging_get.readyState != 4)
+      return;
+    if (hanging_get.status != 200) {
+      trace("server error, status: " + hanging_get.status + 
+            ", text: " + hanging_get.statusText);
+      disconnect();
+    } else {
+      var peer_id = getIntHeader(hanging_get, "Pragma");
+      if (peer_id == my_id) {
+        handleServerNotification(hanging_get.responseText);
+      } else {
+        handlePeerMessage(peer_id, hanging_get.responseText);
+      }
+    }
+
+    if (hanging_get) {
+      hanging_get.abort();
+      hanging_get = null;
+    }
+
+    if (my_id != -1)
+      window.setTimeout(startHangingGet, 0);
+  } catch (e) {
+    trace("Hanging get error: " + e.description);
+  }
+}
+
+function onHangingGetTimeout() {
+  trace("hanging get timeout. issuing again.");
+  hanging_get.abort();
+  hanging_get = null;
+  if (my_id != -1)
+    window.setTimeout(startHangingGet, 0);
+}
+
+function startHangingGet() {
+  try {
+    hanging_get = new XMLHttpRequest();
+    hanging_get.onreadystatechange = hangingGetCallback;
+    hanging_get.ontimeout = onHangingGetTimeout;
+    hanging_get.open("GET", server + "/wait?peer_id=" + my_id, true);
+    hanging_get.send();  
+  } catch (e) {
+    trace("error" + e.description);
+  }
+}
+
+function sendToPeer(peer_id, data) {
+  if (my_id == -1) {
+    alert("Not connected");
+    return;
+  }
+  if (peer_id == my_id) {
+    alert("Can't send a message to oneself :)");
+    return;
+  }
+  var r = new XMLHttpRequest();
+  r.open("POST", server + "/message?peer_id=" + my_id + "&to=" + peer_id, 
+         false);
+  r.setRequestHeader("Content-Type", "text/plain");
+  r.send(data);
+  r = null;
+}
+
+function signInCallback() {
+  try {
+    if (request.readyState == 4) {
+      if (request.status == 200) {
+        var peers = request.responseText.split("\n");
+        my_id = parseInt(peers[0].split(",")[1]);
+        trace("My id: " + my_id);
+        for (var i = 1; i < peers.length; ++i) {
+          if (peers[i].length > 0) {
+            trace("Peer " + i + ": " + peers[i]);
+            var parsed = peers[i].split(",");
+            other_peers[parseInt(parsed[1])] = parsed[0];
+          }
+        }
+        startHangingGet();
+        request = null;
+      }
+    }
+  } catch (e) {
+    trace("error: " + e.description);
+  }
+}
+
+function signIn() {
+  try {
+    request = new XMLHttpRequest();
+    request.onreadystatechange = signInCallback;
+    request.open("GET", server + "/sign_in?" + local_name, true);
+    request.send();
+  } catch (e) {
+    trace("error: " + e.description);
+  }
+}
+
+function connect() {
+  local_name = document.getElementById("local").value.toLowerCase();
+  server = document.getElementById("server").value.toLowerCase();
+  if (local_name.length == 0) {
+    alert("I need a name please.");
+    document.getElementById("local").focus();
+  } else {
+    document.getElementById("connect").disabled = true;
+    document.getElementById("disconnect").disabled = false;
+    signIn();
+  }
+}
+
+function disconnect() {
+  if (request) {
+    request.abort();
+    request = null;
+  }
+  
+  if (hanging_get) {
+    hanging_get.abort();
+    hanging_get = null;
+  }
+
+  if (my_id != -1) {
+    request = new XMLHttpRequest();
+    request.open("GET", server + "/sign_out?peer_id=" + my_id, false);
+    request.send();
+    request = null;
+    my_id = -1;
+  }
+
+  document.getElementById("connect").disabled = false;
+  document.getElementById("disconnect").disabled = true;
+}
+
+function toggleLog()
+{
+  document.getElementById('debug').hidden =! 
+    document.getElementById('debug').hidden;
+}
+
+window.onbeforeunload = disconnect;
+
+</script>
+</head>
+
+<body>
+<table border="0">
+
+<tr>
+<th align="left">Local Preview</th>
+<th align="left">Remote Video</th>
+</tr>
+<tr>
+<td><video width="320" height="240" id="localView" autoplay="autoplay">
+  </video></td>
+<td><video width="640" height="480" id="remoteView" autoplay="autoplay">
+  </video></td>
+</tr>
+<tr>
+<td valign="top">
+<form>
+Target peer ID: <input type="text" id="peer_id" size="3" value="1"/><br>
+<button id="startcall" onclick="startCall();">(4) Start call</button><br>
+<button id="stopcall" onclick="stopCall();" disabled="true">(5) Stop call
+  </button><br>
+<!--<input type="button" value="Test 1" onclick="test1()"/><br>-->
+</form>
+</td>
+<td valign="top">
+<button onclick="toggleExtraButtons();">Toggle extra buttons</button><br>
+<button id="createPcBtn" onclick="createPeerConnection();" hidden="true">
+  Create peer connection</button><br>
+<button id="addStreamBtn" onclick="addStream();" hidden="true">Add stream
+  </button><br>
+</td>
+</tr>
+
+<tr><td colspan="2">
+(1) Server: <input type="text" id="server" value="http://localhost:8888" />
+<br>
+(2) Your name: <input type="text" id="local" value="responder"/>
+<button id="connect" onclick="connect();" disabled="true">(3) Connect</button>
+<button id="disconnect" onclick="disconnect();" disabled="true">(6) Disconnect
+  </button>
+<br>
+&nbsp;<br>
+<button onclick="toggleLog()">Toggle view log</button>
+<button onclick="document.getElementById('debug').innerHTML='';">Clear log
+  </button>
+<pre id="debug">
+</pre>
+</td></tr>
+
+</table>
+</body>
+<script>
+// On init, we connect to the server.
+connect();
+</script>
+</html>
diff --git a/trunk/test/test.gyp b/trunk/test/test.gyp
new file mode 100644
index 0000000..b428bf3
--- /dev/null
+++ b/trunk/test/test.gyp
@@ -0,0 +1,79 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# TODO(andrew): consider moving test_support to src/base/test.
+{
+  'includes': [
+    '../src/build/common.gypi',
+  ],
+  'targets': [
+    {
+      'target_name': 'test_support',
+      'type': 'static_library',
+      'include_dirs': [
+        '.',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '.', # Some includes are hierarchical
+        ],
+      },
+      'dependencies': [
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/../testing/gmock.gyp:gmock',
+      ],
+      'all_dependent_settings': {
+        'include_dirs': [
+          '.',
+        ],
+      },
+      'sources': [
+        'test_suite.cc',
+        'test_suite.h',
+        'testsupport/fileutils.h',
+        'testsupport/fileutils.cc',
+        'testsupport/frame_reader.h',
+        'testsupport/frame_reader.cc',
+        'testsupport/frame_writer.h',
+        'testsupport/frame_writer.cc',
+        'testsupport/gtest_prod_util.h',
+        'testsupport/packet_reader.h',
+        'testsupport/packet_reader.cc',
+        'testsupport/mock/mock_frame_reader.h',
+        'testsupport/mock/mock_frame_writer.h',
+      ],
+    },
+    {
+      # Depend on this target when you want to have test_support but also the
+      # main method needed for gtest to execute!
+      'target_name': 'test_support_main',
+      'type': 'static_library',
+      'dependencies': [
+        'test_support',
+      ],
+      'sources': [
+        'run_all_unittests.cc',
+      ],
+    },
+    {
+      'target_name': 'test_support_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'test_support_main',
+        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        'testsupport/unittest_utils.h',
+        'testsupport/fileutils_unittest.cc',
+        'testsupport/frame_reader_unittest.cc',
+        'testsupport/frame_writer_unittest.cc',
+        'testsupport/packet_reader_unittest.cc',
+      ],
+    },
+  ],
+}
diff --git a/trunk/test/test_suite.cc b/trunk/test/test_suite.cc
new file mode 100644
index 0000000..ac3f3a2
--- /dev/null
+++ b/trunk/test/test_suite.cc
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/test_suite.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+namespace webrtc {
+namespace test {
+TestSuite::TestSuite(int argc, char** argv) {
+  testing::InitGoogleMock(&argc, argv);  // Runs InitGoogleTest() internally.
+}
+
+TestSuite::~TestSuite() {
+}
+
+int TestSuite::Run() {
+  Initialize();
+  int result = RUN_ALL_TESTS();
+  Shutdown();
+  return result;
+}
+
+void TestSuite::Initialize() {
+  // TODO(andrew): initialize singletons here (e.g. Trace).
+}
+
+void TestSuite::Shutdown() {
+}
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/test_suite.h b/trunk/test/test_suite.h
new file mode 100644
index 0000000..f500daa
--- /dev/null
+++ b/trunk/test/test_suite.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TEST_SUITE_H_
+#define TEST_TEST_SUITE_H_
+
+// Derived from Chromium's src/base/test/test_suite.h.
+
+// Defines a basic test suite framework for running gtest based tests.  You can
+// instantiate this class in your main function and call its Run method to run
+// any gtest based tests that are linked into your executable.
+
+#include "src/system_wrappers/interface/constructor_magic.h"
+
+namespace webrtc {
+namespace test {
+class TestSuite {
+ public:
+  TestSuite(int argc, char** argv);
+  virtual ~TestSuite();
+
+  int Run();
+
+ protected:
+  // Override these for custom initialization and shutdown handling.  Use these
+  // instead of putting complex code in your constructor/destructor.
+  virtual void Initialize();
+  virtual void Shutdown();
+
+  DISALLOW_COPY_AND_ASSIGN(TestSuite);
+};
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // TEST_TEST_SUITE_H_
diff --git a/trunk/test/testsupport/fileutils.cc b/trunk/test/testsupport/fileutils.cc
new file mode 100644
index 0000000..1e6bbca
--- /dev/null
+++ b/trunk/test/testsupport/fileutils.cc
@@ -0,0 +1,167 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/fileutils.h"
+
+#ifdef WIN32
+#include <direct.h>
+#define GET_CURRENT_DIR _getcwd
+#else
+#include <unistd.h>
+#define GET_CURRENT_DIR getcwd
+#endif
+
+#include <sys/stat.h>  // To check for directory existence.
+#ifndef S_ISDIR  // Not defined in stat.h on Windows.
+#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#endif
+
+#include <cstdio>
+
+#include "typedefs.h"  // For architecture defines
+
+namespace webrtc {
+namespace test {
+
+#ifdef WIN32
+static const char* kPathDelimiter = "\\";
+#else
+static const char* kPathDelimiter = "/";
+#endif
+// The file we're looking for to identify the project root dir.
+static const char* kProjectRootFileName = "DEPS";
+static const char* kOutputDirName = "out";
+static const char* kFallbackPath = "./";
+static const char* kResourcesDirName = "resources";
+const char* kCannotFindProjectRootDir = "ERROR_CANNOT_FIND_PROJECT_ROOT_DIR";
+
+std::string ProjectRootPath() {
+  std::string working_dir = WorkingDir();
+  if (working_dir == kFallbackPath) {
+    return kCannotFindProjectRootDir;
+  }
+  // Check for our file that verifies the root dir.
+  std::string current_path(working_dir);
+  FILE* file = NULL;
+  int path_delimiter_index = current_path.find_last_of(kPathDelimiter);
+  while (path_delimiter_index > -1) {
+    std::string root_filename = current_path + kPathDelimiter +
+        kProjectRootFileName;
+    file = fopen(root_filename.c_str(), "r");
+    if (file != NULL) {
+      fclose(file);
+      return current_path + kPathDelimiter;
+    }
+    // Move up one directory in the directory tree.
+    current_path = current_path.substr(0, path_delimiter_index);
+    path_delimiter_index = current_path.find_last_of(kPathDelimiter);
+  }
+  // Reached the root directory.
+  fprintf(stderr, "Cannot find project root directory!\n");
+  return kCannotFindProjectRootDir;
+}
+
+std::string OutputPath() {
+  std::string path = ProjectRootPath();
+  if (path == kCannotFindProjectRootDir) {
+    return kFallbackPath;
+  }
+  path += kOutputDirName;
+  if (!CreateDirectory(path)) {
+    return kFallbackPath;
+  }
+  return path + kPathDelimiter;
+}
+
+std::string WorkingDir() {
+  char path_buffer[FILENAME_MAX];
+  if (!GET_CURRENT_DIR(path_buffer, sizeof(path_buffer))) {
+    fprintf(stderr, "Cannot get current directory!\n");
+    return kFallbackPath;
+  } else {
+    return std::string(path_buffer);
+  }
+}
+
+bool CreateDirectory(std::string directory_name) {
+  struct stat path_info = {0};
+  // Check if the path exists already:
+  if (stat(directory_name.c_str(), &path_info) == 0) {
+    if (!S_ISDIR(path_info.st_mode)) {
+      fprintf(stderr, "Path %s exists but is not a directory! Remove this "
+              "file and re-run to create the directory.\n",
+              directory_name.c_str());
+      return false;
+    }
+  } else {
+#ifdef WIN32
+    return _mkdir(directory_name.c_str()) == 0;
+#else
+    return mkdir(directory_name.c_str(),  S_IRWXU | S_IRWXG | S_IRWXO) == 0;
+#endif
+  }
+  return true;
+}
+
+bool FileExists(std::string file_name) {
+  struct stat file_info = {0};
+  return stat(file_name.c_str(), &file_info) == 0;
+}
+
+std::string ResourcePath(std::string name, std::string extension) {
+  std::string platform = "win";
+#ifdef WEBRTC_LINUX
+  platform = "linux";
+#endif  // WEBRTC_LINUX
+#ifdef WEBRTC_MAC
+  platform = "mac";
+#endif  // WEBRTC_MAC
+
+#ifdef WEBRTC_ARCH_64_BITS
+  std::string architecture = "64";
+#else
+  std::string architecture = "32";
+#endif  // WEBRTC_ARCH_64_BITS
+
+  std::string resources_path = ProjectRootPath() + kResourcesDirName +
+      kPathDelimiter;
+  std::string resource_file = resources_path + name + "_" + platform + "_" +
+      architecture + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+  // Try without architecture.
+  resource_file = resources_path + name + "_" + platform + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+  // Try without platform.
+  resource_file = resources_path + name + "_" + architecture + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+  // Fall back on name without architecture or platform.
+  return resources_path + name + "." + extension;
+}
+
+size_t GetFileSize(std::string filename) {
+  FILE* f = fopen(filename.c_str(), "rb");
+  size_t size = 0;
+  if (f != NULL) {
+    if (fseek(f, 0, SEEK_END) == 0) {
+      size = ftell(f);
+    }
+    fclose(f);
+  }
+  return size;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/fileutils.h b/trunk/test/testsupport/fileutils.h
new file mode 100644
index 0000000..c89ac29
--- /dev/null
+++ b/trunk/test/testsupport/fileutils.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+
+// File utilities for testing purposes.
+//
+// The ProjectRootPath() method is a convenient way of getting an absolute
+// path to the project source tree root directory. Using this, it is easy to
+// refer to test resource files in a portable way.
+//
+// Notice that even if Windows platforms use backslash as path delimiter, it is
+// also supported to use slash, so there's no need for #ifdef checks in test
+// code for setting up the paths to the resource files.
+//
+// Example use:
+// Assume we have the following code being used in a test source file:
+// const std::string kInputFile = webrtc::test::ProjectRootPath() +
+//     "test/data/voice_engine/audio_long16.wav";
+// // Use the kInputFile for the tests...
+//
+// Then here's some example outputs for different platforms:
+// Linux:
+// * Source tree located in /home/user/webrtc/trunk
+// * Test project located in /home/user/webrtc/trunk/src/testproject
+// * Test binary compiled as:
+//   /home/user/webrtc/trunk/out/Debug/testproject_unittests
+// Then ProjectRootPath() will return /home/user/webrtc/trunk/ no matter if
+// the test binary is executed from standing in either of:
+// /home/user/webrtc/trunk
+// or
+// /home/user/webrtc/trunk/out/Debug
+// (or any other directory below the trunk for that matter).
+//
+// Windows:
+// * Source tree located in C:\Users\user\webrtc\trunk
+// * Test project located in C:\Users\user\webrtc\trunk\src\testproject
+// * Test binary compiled as:
+//   C:\Users\user\webrtc\trunk\src\testproject\Debug\testproject_unittests.exe
+// Then ProjectRootPath() will return C:\Users\user\webrtc\trunk\ when the
+// test binary is executed from inside Visual Studio.
+// It will also return the same path if the test is executed from a command
+// prompt standing in C:\Users\user\webrtc\trunk\src\testproject\Debug
+//
+// Mac:
+// * Source tree located in /Users/user/webrtc/trunk
+// * Test project located in /Users/user/webrtc/trunk/src/testproject
+// * Test binary compiled as:
+//   /Users/user/webrtc/trunk/xcodebuild/Debug/testproject_unittests
+// Then ProjectRootPath() will return /Users/user/webrtc/trunk/ no matter if
+// the test binary is executed from standing in either of:
+// /Users/user/webrtc/trunk
+// or
+// /Users/user/webrtc/trunk/out/Debug
+// (or any other directory below the trunk for that matter).
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
+#define WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace test {
+
+// This is the "directory" returned if the ProjectPath() function fails
+// to find the project root.
+extern const char* kCannotFindProjectRootDir;
+
+// Finds the root dir of the project, to be able to set correct paths to
+// resource files used by tests.
+// The implementation is simple: it just looks for the file defined by
+// kProjectRootFileName, starting in the current directory (the working
+// directory) and then steps upward until it is found (or it is at the root of
+// the file system).
+// If the current working directory is above the project root dir, it will not
+// be found.
+//
+// If symbolic links occur in the path they will be resolved and the actual
+// directory will be returned.
+//
+// Returns the absolute path to the project root dir (usually the trunk dir)
+// WITH a trailing path delimiter.
+// If the project root is not found, the string specified by
+// kCannotFindProjectRootDir is returned.
+std::string ProjectRootPath();
+
+// Creates and returns the absolute path to the output directory where log files
+// and other test artifacts should be put. The output directory is always a
+// directory named "out" at the top-level of the project, i.e. a subfolder to
+// the path returned by ProjectRootPath().
+//
+// Details described for ProjectRootPath() apply here too.
+//
+// Returns the path WITH a trailing path delimiter. If the project root is not
+// found, the current working directory ("./") is returned as a fallback.
+std::string OutputPath();
+
+// Returns a path to a resource file for the currently executing platform.
+// Adapts to what filenames are currently present in the
+// [project-root]/resources/ dir.
+// Returns an absolute path according to this priority list (the directory
+// part of the path is left out for readability):
+// 1. [name]_[platform]_[architecture].[extension]
+// 2. [name]_[platform].[extension]
+// 3. [name]_[architecture].[extension]
+// 4. [name].[extension]
+// Where
+// * platform is either of "win", "mac" or "linux".
+// * architecture is either of "32" or "64".
+//
+// Arguments:
+//    name - Name of the resource file. If a plain filename (no directory path)
+//           is supplied, the file is assumed to be located in resources/
+//           If a directory path is prepended to the filename, a subdirectory
+//           hierarchy reflecting that path is assumed to be present.
+//    extension - File extension, without the dot, i.e. "bmp" or "yuv".
+std::string ResourcePath(std::string name, std::string extension);
+
+// Gets the current working directory for the executing program.
+// Returns "./" if for some reason it is not possible to find the working
+// directory.
+std::string WorkingDir();
+
+// Creates a directory if it not already exists.
+// Returns true if successful. Will print an error message to stderr and return
+// false if a file with the same name already exists.
+bool CreateDirectory(std::string directory_name);
+
+// File size of the supplied file in bytes. Will return 0 if the file is
+// empty or if the file does not exist/is readable.
+size_t GetFileSize(std::string filename);
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
diff --git a/trunk/test/testsupport/fileutils_unittest.cc b/trunk/test/testsupport/fileutils_unittest.cc
new file mode 100644
index 0000000..1b76b3c
--- /dev/null
+++ b/trunk/test/testsupport/fileutils_unittest.cc
@@ -0,0 +1,192 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/fileutils.h"
+
+#include <cstdio>
+#include <list>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#ifdef WIN32
+#define chdir _chdir
+static const char* kPathDelimiter = "\\";
+#else
+static const char* kPathDelimiter = "/";
+#endif
+
+static const std::string kDummyDir = "file_utils_unittest_dummy_dir";
+static const std::string kResourcesDir = "resources";
+static const std::string kTestName = "fileutils_unittest";
+static const std::string kExtension = "tmp";
+
+typedef std::list<std::string> FileList;
+
+namespace webrtc {
+
+// Test fixture to restore the working directory between each test, since some
+// of them change it with chdir during execution (not restored by the
+// gtest framework).
+class FileUtilsTest : public testing::Test {
+ protected:
+  FileUtilsTest() {
+  }
+  virtual ~FileUtilsTest() {}
+  // Runs before the first test
+  static void SetUpTestCase() {
+    original_working_dir_ = webrtc::test::WorkingDir();
+    std::string resources_path = original_working_dir_ + kPathDelimiter +
+        kResourcesDir + kPathDelimiter;
+    webrtc::test::CreateDirectory(resources_path);
+
+    files_.push_back(resources_path + kTestName + "." + kExtension);
+    files_.push_back(resources_path + kTestName + "_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win_64." + kExtension);
+
+    // Now that the resources dir exists, write some empty test files into it.
+    for (FileList::iterator file_it = files_.begin();
+        file_it != files_.end(); ++file_it) {
+      FILE* file = fopen(file_it->c_str(), "wb");
+      ASSERT_TRUE(file != NULL) << "Failed to write file: " << file_it->c_str();
+      ASSERT_GT(fprintf(file, "%s",  "Dummy data"), 0);
+      fclose(file);
+    }
+    // Create a dummy subdir that can be chdir'ed into for testing purposes.
+    empty_dummy_dir_ = original_working_dir_ + kPathDelimiter + kDummyDir;
+    webrtc::test::CreateDirectory(empty_dummy_dir_);
+  }
+  static void TearDownTestCase() {
+    // Clean up all resource files written
+    for (FileList::iterator file_it = files_.begin();
+            file_it != files_.end(); ++file_it) {
+      remove(file_it->c_str());
+    }
+    std::remove(empty_dummy_dir_.c_str());
+  }
+  void SetUp() {
+    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
+  }
+  void TearDown() {
+    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
+  }
+ protected:
+  static FileList files_;
+  static std::string empty_dummy_dir_;
+ private:
+  static std::string original_working_dir_;
+};
+
+FileList FileUtilsTest::files_;
+std::string FileUtilsTest::original_working_dir_ = "";
+std::string FileUtilsTest::empty_dummy_dir_ = "";
+
+// Tests that the project root path is returned for the default working
+// directory that is automatically set when the test executable is launched.
+// The test is not fully testing the implementation, since we cannot be sure
+// of where the executable was launched from.
+// The test will fail if the top level directory is not named "trunk".
+TEST_F(FileUtilsTest, ProjectRootPathFromUnchangedWorkingDir) {
+  std::string path = webrtc::test::ProjectRootPath();
+  std::string expected_end = "trunk";
+  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
+  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromUnchangedWorkingDir) {
+  std::string path = webrtc::test::OutputPath();
+  std::string expected_end = "out";
+  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
+  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
+}
+
+// Tests setting the current working directory to a directory three levels
+// deeper from the current one. Then testing that the project path returned
+// is still the same, when the function under test is called again.
+TEST_F(FileUtilsTest, ProjectRootPathFromDeeperWorkingDir) {
+  std::string path = webrtc::test::ProjectRootPath();
+  std::string original_working_dir = path;  // This is the correct project root
+  // Change to a subdirectory path.
+  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
+  ASSERT_EQ(original_working_dir, webrtc::test::ProjectRootPath());
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromDeeperWorkingDir) {
+  std::string path = webrtc::test::OutputPath();
+  std::string original_working_dir = path;
+  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
+  ASSERT_EQ(original_working_dir, webrtc::test::OutputPath());
+}
+
+// Tests with current working directory set to a directory higher up in the
+// directory tree than the project root dir. This case shall return a specified
+// error string as a directory (which will be an invalid path).
+TEST_F(FileUtilsTest, ProjectRootPathFromRootWorkingDir) {
+  // Change current working dir to the root of the current file system
+  // (this will always be "above" our project root dir).
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ(webrtc::test::kCannotFindProjectRootDir,
+            webrtc::test::ProjectRootPath());
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromRootWorkingDir) {
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ("./", webrtc::test::OutputPath());
+}
+
+// Only tests that the code executes
+TEST_F(FileUtilsTest, CreateDirectory) {
+  std::string directory = "fileutils-unittest-empty-dir";
+  // Make sure it's removed if a previous test has failed:
+  std::remove(directory.c_str());
+  ASSERT_TRUE(webrtc::test::CreateDirectory(directory));
+  std::remove(directory.c_str());
+}
+
+TEST_F(FileUtilsTest, WorkingDirReturnsValue) {
+  // Hard to cover all platforms. Just test that it returns something without
+  // crashing:
+  std::string working_dir = webrtc::test::WorkingDir();
+  ASSERT_GT(working_dir.length(), 0u);
+}
+
+// Due to multiple platforms, it is hard to make a complete test for
+// ResourcePath. Manual testing has been performed by removing files and
+// verified the result confirms with the specified documentation for the
+// function.
+TEST_F(FileUtilsTest, ResourcePathReturnsValue) {
+  std::string resource = webrtc::test::ResourcePath(kTestName, kExtension);
+  ASSERT_GT(resource.find(kTestName), 0u);
+  ASSERT_GT(resource.find(kExtension), 0u);
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ("./", webrtc::test::OutputPath());
+}
+
+TEST_F(FileUtilsTest, GetFileSizeExistingFile) {
+  ASSERT_GT(webrtc::test::GetFileSize(files_.front()), 0u);
+}
+
+TEST_F(FileUtilsTest, GetFileSizeNonExistingFile) {
+  ASSERT_EQ(0u, webrtc::test::GetFileSize("non-existing-file.tmp"));
+}
+
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/frame_reader.cc b/trunk/test/testsupport/frame_reader.cc
new file mode 100644
index 0000000..b05ea58
--- /dev/null
+++ b/trunk/test/testsupport/frame_reader.cc
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/frame_reader.h"
+
+#include <cassert>
+
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+namespace test {
+
+FrameReaderImpl::FrameReaderImpl(std::string input_filename,
+                                 int frame_length_in_bytes)
+    : input_filename_(input_filename),
+      frame_length_in_bytes_(frame_length_in_bytes),
+      input_file_(NULL) {
+}
+
+FrameReaderImpl::~FrameReaderImpl() {
+  Close();
+}
+
+bool FrameReaderImpl::Init() {
+  if (frame_length_in_bytes_ <= 0) {
+    fprintf(stderr, "Frame length must be >0, was %d\n",
+            frame_length_in_bytes_);
+    return false;
+  }
+  input_file_ = fopen(input_filename_.c_str(), "rb");
+  if (input_file_ == NULL) {
+    fprintf(stderr, "Couldn't open input file for reading: %s\n",
+            input_filename_.c_str());
+    return false;
+  }
+  // Calculate total number of frames.
+  size_t source_file_size = GetFileSize(input_filename_);
+  if (source_file_size <= 0u) {
+    fprintf(stderr, "Found empty file: %s\n", input_filename_.c_str());
+    return false;
+  }
+  number_of_frames_ = source_file_size / frame_length_in_bytes_;
+  return true;
+}
+
+void FrameReaderImpl::Close() {
+  if (input_file_ != NULL) {
+    fclose(input_file_);
+    input_file_ = NULL;
+  }
+}
+
+bool FrameReaderImpl::ReadFrame(WebRtc_UWord8* source_buffer) {
+  assert(source_buffer);
+  if (input_file_ == NULL) {
+    fprintf(stderr, "FrameReader is not initialized (input file is NULL)\n");
+    return false;
+  }
+  size_t nbr_read = fread(source_buffer, 1, frame_length_in_bytes_,
+                          input_file_);
+  if (nbr_read != static_cast<unsigned int>(frame_length_in_bytes_) &&
+      ferror(input_file_)) {
+    fprintf(stderr, "Error reading from input file: %s\n",
+            input_filename_.c_str());
+    return false;
+  }
+  if (feof(input_file_) != 0) {
+    return false;  // No more frames to process.
+  }
+  return true;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/frame_reader.h b/trunk/test/testsupport/frame_reader.h
new file mode 100644
index 0000000..56d8fc4
--- /dev/null
+++ b/trunk/test/testsupport/frame_reader.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_FRAME_READER_H_
+#define WEBRTC_TEST_TESTSUPPORT_FRAME_READER_H_
+
+#include <cstdio>
+#include <string>
+
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Handles reading of frames from video files.
+class FrameReader {
+ public:
+  virtual ~FrameReader() {}
+
+  // Initializes the frame reader, i.e. opens the input file.
+  // This must be called before reading of frames has started.
+  // Returns false if an error has occurred, in addition to printing to stderr.
+  virtual bool Init() = 0;
+
+  // Reads a frame into the supplied buffer, which must contain enough space
+  // for the frame size.
+  // Returns true if there are more frames to read, false if we've already
+  // read the last frame (in the previous call).
+  virtual bool ReadFrame(WebRtc_UWord8* source_buffer) = 0;
+
+  // Closes the input file if open. Essentially makes this class impossible
+  // to use anymore. Will also be invoked by the destructor.
+  virtual void Close() = 0;
+
+  // Frame length in bytes of a single frame image.
+  virtual int FrameLength() = 0;
+  // Total number of frames in the input video source.
+  virtual int NumberOfFrames() = 0;
+};
+
+class FrameReaderImpl : public FrameReader {
+ public:
+  // Creates a file handler. The input file is assumed to exist and be readable.
+  // Parameters:
+  //   input_filename          The file to read from.
+  //   frame_length_in_bytes   The size of each frame.
+  //                           For YUV this is 3 * width * height / 2
+  FrameReaderImpl(std::string input_filename, int frame_length_in_bytes);
+  virtual ~FrameReaderImpl();
+  bool Init();
+  bool ReadFrame(WebRtc_UWord8* source_buffer);
+  void Close();
+  int FrameLength() { return frame_length_in_bytes_; }
+  int NumberOfFrames() { return number_of_frames_; }
+
+ private:
+  std::string input_filename_;
+  int frame_length_in_bytes_;
+  int number_of_frames_;
+  FILE* input_file_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_FRAME_READER_H_
diff --git a/trunk/test/testsupport/frame_reader_unittest.cc b/trunk/test/testsupport/frame_reader_unittest.cc
new file mode 100644
index 0000000..f1da5ce
--- /dev/null
+++ b/trunk/test/testsupport/frame_reader_unittest.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/frame_reader.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+namespace test {
+
+const std::string kInputFilename = "temp_inputfile.tmp";
+const std::string kInputFileContents = "baz";
+// Setting the kFrameLength value to a value much larger than the
+// file to test causes the ReadFrame test to fail on Windows.
+const int kFrameLength = 1000;
+
+class FrameReaderTest: public testing::Test {
+ protected:
+  FrameReaderTest() {}
+  virtual ~FrameReaderTest() {}
+  void SetUp() {
+    // Cleanup any previous dummy input file.
+    std::remove(kInputFilename.c_str());
+
+    // Create a dummy input file.
+    FILE* dummy = fopen(kInputFilename.c_str(), "wb");
+    fprintf(dummy, "%s", kInputFileContents.c_str());
+    fclose(dummy);
+
+    frame_reader_ = new FrameReaderImpl(kInputFilename, kFrameLength);
+    ASSERT_TRUE(frame_reader_->Init());
+  }
+  void TearDown() {
+    delete frame_reader_;
+    // Cleanup the dummy input file.
+    std::remove(kInputFilename.c_str());
+  }
+  FrameReader* frame_reader_;
+};
+
+TEST_F(FrameReaderTest, InitSuccess) {
+  FrameReaderImpl frame_reader(kInputFilename, kFrameLength);
+  ASSERT_TRUE(frame_reader.Init());
+  ASSERT_EQ(kFrameLength, frame_reader.FrameLength());
+  ASSERT_EQ(0, frame_reader.NumberOfFrames());
+}
+
+TEST_F(FrameReaderTest, ReadFrame) {
+  WebRtc_UWord8 buffer[3];
+  bool result = frame_reader_->ReadFrame(buffer);
+  ASSERT_FALSE(result);  // No more files to read.
+  ASSERT_EQ(kInputFileContents[0], buffer[0]);
+  ASSERT_EQ(kInputFileContents[1], buffer[1]);
+  ASSERT_EQ(kInputFileContents[2], buffer[2]);
+}
+
+TEST_F(FrameReaderTest, ReadFrameUninitialized) {
+  WebRtc_UWord8 buffer[3];
+  FrameReaderImpl file_reader(kInputFilename, kFrameLength);
+  ASSERT_FALSE(file_reader.ReadFrame(buffer));
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/frame_writer.cc b/trunk/test/testsupport/frame_writer.cc
new file mode 100644
index 0000000..5f32539
--- /dev/null
+++ b/trunk/test/testsupport/frame_writer.cc
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/frame_writer.h"
+
+#include <cassert>
+
+namespace webrtc {
+namespace test {
+
+FrameWriterImpl::FrameWriterImpl(std::string output_filename,
+                                 int frame_length_in_bytes)
+    : output_filename_(output_filename),
+      frame_length_in_bytes_(frame_length_in_bytes),
+      output_file_(NULL) {
+}
+
+FrameWriterImpl::~FrameWriterImpl() {
+  Close();
+}
+
+bool FrameWriterImpl::Init() {
+  if (frame_length_in_bytes_ <= 0) {
+    fprintf(stderr, "Frame length must be >0, was %d\n",
+            frame_length_in_bytes_);
+    return false;
+  }
+  output_file_ = fopen(output_filename_.c_str(), "wb");
+  if (output_file_ == NULL) {
+    fprintf(stderr, "Couldn't open output file for writing: %s\n",
+            output_filename_.c_str());
+    return false;
+  }
+  return true;
+}
+
+void FrameWriterImpl::Close() {
+  if (output_file_ != NULL) {
+    fclose(output_file_);
+    output_file_ = NULL;
+  }
+}
+
+bool FrameWriterImpl::WriteFrame(WebRtc_UWord8* frame_buffer) {
+  assert(frame_buffer);
+  if (output_file_ == NULL) {
+    fprintf(stderr, "FrameWriter is not initialized (output file is NULL)\n");
+    return false;
+  }
+  int bytes_written = fwrite(frame_buffer, 1, frame_length_in_bytes_,
+                             output_file_);
+  if (bytes_written != frame_length_in_bytes_) {
+    fprintf(stderr, "Failed to write %d bytes to file %s\n",
+            frame_length_in_bytes_, output_filename_.c_str());
+    return false;
+  }
+  return true;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/frame_writer.h b/trunk/test/testsupport/frame_writer.h
new file mode 100644
index 0000000..abc5d35
--- /dev/null
+++ b/trunk/test/testsupport/frame_writer.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
+#define WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
+
+#include <cstdio>
+#include <string>
+
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Handles writing of video files.
+class FrameWriter {
+ public:
+  virtual ~FrameWriter() {}
+
+  // Initializes the file handler, i.e. opens the input and output files etc.
+  // This must be called before reading or writing frames has started.
+  // Returns false if an error has occurred, in addition to printing to stderr.
+  virtual bool Init() = 0;
+
+  // Writes a frame of the configured frame length to the output file.
+  // Returns true if the write was successful, false otherwise.
+  virtual bool WriteFrame(WebRtc_UWord8* frame_buffer) = 0;
+
+  // Closes the output file if open. Essentially makes this class impossible
+  // to use anymore. Will also be invoked by the destructor.
+  virtual void Close() = 0;
+
+  // Frame length in bytes of a single frame image.
+  virtual int FrameLength() = 0;
+};
+
+class FrameWriterImpl : public FrameWriter {
+ public:
+  // Creates a file handler. The input file is assumed to exist and be readable
+  // and the output file must be writable.
+  // Parameters:
+  //   output_filename         The file to write. Will be overwritten if already
+  //                           existing.
+  //   frame_length_in_bytes   The size of each frame.
+  //                           For YUV: 3*width*height/2
+  FrameWriterImpl(std::string output_filename, int frame_length_in_bytes);
+  virtual ~FrameWriterImpl();
+  bool Init();
+  bool WriteFrame(WebRtc_UWord8* frame_buffer);
+  void Close();
+  int FrameLength() { return frame_length_in_bytes_; }
+
+ private:
+  std::string output_filename_;
+  int frame_length_in_bytes_;
+  int number_of_frames_;
+  FILE* output_file_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
diff --git a/trunk/test/testsupport/frame_writer_unittest.cc b/trunk/test/testsupport/frame_writer_unittest.cc
new file mode 100644
index 0000000..d25d1d2
--- /dev/null
+++ b/trunk/test/testsupport/frame_writer_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/frame_writer.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+namespace test {
+
+const std::string kOutputFilename = "temp_outputfile.tmp";
+const int kFrameLength = 1000;
+
+class FrameWriterTest: public testing::Test {
+ protected:
+  FrameWriterTest() {}
+  virtual ~FrameWriterTest() {}
+  void SetUp() {
+    // Cleanup any previous output file.
+    std::remove(kOutputFilename.c_str());
+    frame_writer_ = new FrameWriterImpl(kOutputFilename, kFrameLength);
+    ASSERT_TRUE(frame_writer_->Init());
+  }
+  void TearDown() {
+    delete frame_writer_;
+    // Cleanup the temporary file.
+    std::remove(kOutputFilename.c_str());
+  }
+  FrameWriter* frame_writer_;
+};
+
+TEST_F(FrameWriterTest, InitSuccess) {
+  FrameWriterImpl frame_writer(kOutputFilename, kFrameLength);
+  ASSERT_TRUE(frame_writer.Init());
+  ASSERT_EQ(kFrameLength, frame_writer.FrameLength());
+}
+
+TEST_F(FrameWriterTest, WriteFrame) {
+  WebRtc_UWord8 buffer[kFrameLength];
+  memset(buffer, 9, kFrameLength);  // Write lots of 9s to the buffer
+  bool result = frame_writer_->WriteFrame(buffer);
+  ASSERT_TRUE(result);  // success
+  // Close the file and verify the size.
+  frame_writer_->Close();
+  ASSERT_EQ(kFrameLength,
+            static_cast<int>(GetFileSize(kOutputFilename)));
+}
+
+TEST_F(FrameWriterTest, WriteFrameUninitialized) {
+  WebRtc_UWord8 buffer[3];
+  FrameWriterImpl frame_writer(kOutputFilename, kFrameLength);
+  ASSERT_FALSE(frame_writer.WriteFrame(buffer));
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/gtest_prod_util.h b/trunk/test/testsupport/gtest_prod_util.h
new file mode 100644
index 0000000..7d123a8
--- /dev/null
+++ b/trunk/test/testsupport/gtest_prod_util.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
+#define WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
+#pragma once
+
+#include "gtest/gtest_prod.h"
+
+// This file is a plain copy of Chromium's base/gtest_prod_util.h.
+//
+// This is a wrapper for gtest's FRIEND_TEST macro that friends
+// test with all possible prefixes. This is very helpful when changing the test
+// prefix, because the friend declarations don't need to be updated.
+//
+// Example usage:
+//
+// class MyClass {
+//  private:
+//   void MyMethod();
+//   FRIEND_TEST_ALL_PREFIXES(MyClassTest, MyMethod);
+// };
+#define FRIEND_TEST_ALL_PREFIXES(test_case_name, test_name) \
+  FRIEND_TEST(test_case_name, test_name); \
+  FRIEND_TEST(test_case_name, DISABLED_##test_name); \
+  FRIEND_TEST(test_case_name, FLAKY_##test_name); \
+  FRIEND_TEST(test_case_name, FAILS_##test_name)
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
diff --git a/trunk/test/testsupport/metrics/video_metrics.cc b/trunk/test/testsupport/metrics/video_metrics.cc
new file mode 100644
index 0000000..438f31b
--- /dev/null
+++ b/trunk/test/testsupport/metrics/video_metrics.cc
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/metrics/video_metrics.h"
+
+#include <algorithm> // min_element, max_element
+#include <cassert>
+#include <cstdio>
+
+#include "common_video/libyuv/include/libyuv.h"
+
+namespace webrtc {
+namespace test {
+
+// Used for calculating min and max values
+static bool LessForFrameResultValue (const FrameResult& s1,
+                                     const FrameResult& s2) {
+    return s1.value < s2.value;
+}
+
+enum VideoMetricsType { kPSNR, kSSIM, kBoth };
+
+// Calculates metrics for a frame and adds statistics to the result for it.
+void CalculateFrame(VideoMetricsType video_metrics_type,
+                    uint8_t* ref,
+                    uint8_t* test,
+                    int width,
+                    int height,
+                    int frame_number,
+                    QualityMetricsResult* result) {
+  FrameResult frame_result = {0, 0};
+  frame_result.frame_number = frame_number;
+  switch (video_metrics_type) {
+    case kPSNR:
+      frame_result.value = I420PSNR(ref, test, width, height);
+      break;
+    case kSSIM:
+      frame_result.value = I420SSIM(ref, test, width, height);
+      break;
+    default:
+      assert(false);
+  }
+  result->frames.push_back(frame_result);
+}
+
+// Calculates average, min and max values for the supplied struct, if non-NULL.
+void CalculateStats(QualityMetricsResult* result) {
+  if (result == NULL || result->frames.size() == 0) {
+    return;
+  }
+  // Calculate average
+  std::vector<FrameResult>::iterator iter;
+  double metrics_values_sum = 0.0;
+  for (iter = result->frames.begin(); iter != result->frames.end(); ++iter) {
+    metrics_values_sum += iter->value;
+  }
+  result->average = metrics_values_sum / result->frames.size();
+
+  // Calculate min/max statistics
+  iter = std::min_element(result->frames.begin(), result->frames.end(),
+                     LessForFrameResultValue);
+  result->min = iter->value;
+  result->min_frame_number = iter->frame_number;
+  iter = std::max_element(result->frames.begin(), result->frames.end(),
+                     LessForFrameResultValue);
+  result->max = iter->value;
+  result->max_frame_number = iter->frame_number;
+}
+
+// Single method that handles all combinations of video metrics calculation, to
+// minimize code duplication. Either psnr_result or ssim_result may be NULL,
+// depending on which VideoMetricsType is targeted.
+int CalculateMetrics(VideoMetricsType video_metrics_type,
+                     const char* ref_filename,
+                     const char* test_filename,
+                     int width,
+                     int height,
+                     QualityMetricsResult* psnr_result,
+                     QualityMetricsResult* ssim_result) {
+  assert(ref_filename != NULL);
+  assert(test_filename != NULL);
+  assert(width > 0);
+  assert(height > 0);
+
+  FILE* ref_fp = fopen(ref_filename, "rb");
+  if (ref_fp == NULL) {
+    // cannot open reference file
+    fprintf(stderr, "Cannot open file %s\n", ref_filename);
+    return -1;
+  }
+  FILE* test_fp = fopen(test_filename, "rb");
+  if (test_fp == NULL) {
+    // cannot open test file
+    fprintf(stderr, "Cannot open file %s\n", test_filename);
+    fclose(ref_fp);
+    return -2;
+  }
+  int frame_number = 0;
+
+  // Allocating size for one I420 frame.
+  const int frame_length = 3 * width * height >> 1;
+  uint8_t* ref = new uint8_t[frame_length];
+  uint8_t* test = new uint8_t[frame_length];
+
+  int ref_bytes = fread(ref, 1, frame_length, ref_fp);
+  int test_bytes = fread(test, 1, frame_length, test_fp);
+  while (ref_bytes == frame_length && test_bytes == frame_length) {
+    switch (video_metrics_type) {
+      case kPSNR:
+        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
+                       psnr_result);
+        break;
+      case kSSIM:
+        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
+                       ssim_result);
+        break;
+      case kBoth:
+        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
+                       psnr_result);
+        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
+                       ssim_result);
+        break;
+    }
+    frame_number++;
+    ref_bytes = fread(ref, 1, frame_length, ref_fp);
+    test_bytes = fread(test, 1, frame_length, test_fp);
+  }
+  int return_code = 0;
+  if (frame_number == 0) {
+    fprintf(stderr, "Tried to measure video metrics from empty files "
+            "(reference file: %s  test file: %s)\n", ref_filename,
+            test_filename);
+    return_code = -3;
+  } else {
+    CalculateStats(psnr_result);
+    CalculateStats(ssim_result);
+  }
+  delete [] ref;
+  delete [] test;
+  fclose(ref_fp);
+  fclose(test_fp);
+  return return_code;
+}
+
+int I420MetricsFromFiles(const char* ref_filename,
+                         const char* test_filename,
+                         int width,
+                         int height,
+                         QualityMetricsResult* psnr_result,
+                         QualityMetricsResult* ssim_result) {
+  assert(psnr_result != NULL);
+  assert(ssim_result != NULL);
+  return CalculateMetrics(kBoth, ref_filename, test_filename, width, height,
+                          psnr_result, ssim_result);
+}
+
+int I420PSNRFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result) {
+  assert(result != NULL);
+  return CalculateMetrics(kPSNR, ref_filename, test_filename, width, height,
+                          result, NULL);
+}
+
+int I420SSIMFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result) {
+  assert(result != NULL);
+  return CalculateMetrics(kSSIM, ref_filename, test_filename, width, height,
+                          NULL, result);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/metrics/video_metrics.h b/trunk/test/testsupport/metrics/video_metrics.h
new file mode 100644
index 0000000..df11a49
--- /dev/null
+++ b/trunk/test/testsupport/metrics/video_metrics.h
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TESTSUPPORT_METRICS_VIDEO_METRICS_H_
+#define WEBRTC_TESTSUPPORT_METRICS_VIDEO_METRICS_H_
+
+#include <limits>
+#include <vector>
+
+namespace webrtc {
+namespace test {
+
+// Contains video quality metrics result for a single frame.
+struct FrameResult {
+  int frame_number;
+  double value;
+};
+
+// Result from a PSNR/SSIM calculation operation.
+// The frames in this data structure are 0-indexed.
+struct QualityMetricsResult {
+  QualityMetricsResult() :
+    average(0.0),
+    min(std::numeric_limits<double>::max()),
+    max(std::numeric_limits<double>::min()),
+    min_frame_number(-1),
+    max_frame_number(-1)
+  {};
+  double average;
+  double min;
+  double max;
+  int min_frame_number;
+  int max_frame_number;
+  std::vector<FrameResult> frames;
+};
+
+// Calculates PSNR and SSIM values for the reference and test video files
+// (must be in I420 format). All calculated values are filled into the
+// QualityMetricsResult stucts.
+// PSNR values have the unit decibel (dB) where a high value means the test file
+// is similar to the reference file. The higher value, the more similar.
+// For more info about PSNR, see http://en.wikipedia.org/wiki/PSNR
+// SSIM values range between -1.0 and 1.0, where 1.0 means the files are
+// identical. For more info about SSIM, see http://en.wikipedia.org/wiki/SSIM
+// This function only compares video frames up to the point when the shortest
+// video ends.
+// Return value:
+//  0 if successful, negative on errors:
+// -1 if the source file cannot be opened
+// -2 if the test file cannot be opened
+// -3 if any of the files are empty
+// -4 if any arguments are invalid.
+int I420MetricsFromFiles(const char* ref_filename,
+                         const char* test_filename,
+                         int width,
+                         int height,
+                         QualityMetricsResult* psnr_result,
+                         QualityMetricsResult* ssim_result);
+
+// Calculates PSNR values for the reference and test video files (must be in
+// I420 format). All calculated values are filled into the QualityMetricsResult
+// struct.
+// PSNR values have the unit decibel (dB) where a high value means the test file
+// is similar to the reference file. The higher value, the more similar.
+// This function only compares video frames up to the point when the shortest
+// video ends.
+// For more info about PSNR, see http://en.wikipedia.org/wiki/PSNR
+//
+// Return value:
+//  0 if successful, negative on errors:
+// -1 if the source file cannot be opened
+// -2 if the test file cannot be opened
+// -3 if any of the files are empty
+// -4 if any arguments are invalid.
+int I420PSNRFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result);
+
+// Calculates SSIM values for the reference and test video files (must be in
+// I420 format). All calculated values are filled into the QualityMetricsResult
+// struct.
+// SSIM values range between -1.0 and 1.0, where 1.0 means the files are
+// identical.
+// This function only compares video frames up to the point when the shortest
+// video ends.
+// For more info about SSIM, see http://en.wikipedia.org/wiki/SSIM
+//
+// Return value:
+//  0 if successful, negative on errors:
+// -1 if the source file cannot be opened
+// -2 if the test file cannot be opened
+// -3 if any of the files are empty
+// -4 if any arguments are invalid.
+int I420SSIMFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result);
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif // WEBRTC_TESTSUPPORT_METRICS_VIDEO_METRICS_H_
diff --git a/trunk/test/testsupport/metrics/video_metrics_unittest.cc b/trunk/test/testsupport/metrics/video_metrics_unittest.cc
new file mode 100644
index 0000000..e77dbff
--- /dev/null
+++ b/trunk/test/testsupport/metrics/video_metrics_unittest.cc
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/metrics/video_metrics.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+static const char* kEmptyFileName = "video_metrics_unittest_empty_file.tmp";
+static const char* kNonExistingFileName = "video_metrics_unittest_non_existing";
+static const int kWidth = 352;
+static const int kHeight = 288;
+
+static const int kMissingReferenceFileReturnCode = -1;
+static const int kMissingTestFileReturnCode = -2;
+static const int kEmptyFileReturnCode = -3;
+static const double kPsnrPerfectResult =  48.0;
+static const double kSsimPerfectResult = 1.0;
+
+class VideoMetricsTest: public testing::Test {
+ protected:
+  VideoMetricsTest() {
+    video_file_ = webrtc::test::ResourcePath("foreman_cif_short", "yuv");
+  }
+  virtual ~VideoMetricsTest() {}
+  void SetUp() {
+    // Create an empty file:
+    FILE* dummy = fopen(kEmptyFileName, "wb");
+    fclose(dummy);
+  }
+  void TearDown() {
+    std::remove(kEmptyFileName);
+  }
+  webrtc::test::QualityMetricsResult psnr_result_;
+  webrtc::test::QualityMetricsResult ssim_result_;
+  std::string video_file_;
+};
+
+// Tests that it is possible to run with the same reference as test file
+TEST_F(VideoMetricsTest, ReturnsPerfectResultForIdenticalFilesPSNR) {
+  EXPECT_EQ(0, I420PSNRFromFiles(video_file_.c_str(), video_file_.c_str(),
+                                 kWidth, kHeight, &psnr_result_));
+  EXPECT_EQ(kPsnrPerfectResult, psnr_result_.average);
+}
+
+TEST_F(VideoMetricsTest, ReturnsPerfectResultForIdenticalFilesSSIM) {
+  EXPECT_EQ(0, I420SSIMFromFiles(video_file_.c_str(), video_file_.c_str(),
+                                 kWidth, kHeight, &ssim_result_));
+  EXPECT_EQ(kSsimPerfectResult, ssim_result_.average);
+}
+
+TEST_F(VideoMetricsTest, ReturnsPerfectResultForIdenticalFilesBothMetrics) {
+  EXPECT_EQ(0, I420MetricsFromFiles(video_file_.c_str(), video_file_.c_str(),
+                                    kWidth, kHeight, &psnr_result_,
+                                    &ssim_result_));
+  EXPECT_EQ(kPsnrPerfectResult, psnr_result_.average);
+  EXPECT_EQ(kSsimPerfectResult, ssim_result_.average);
+}
+
+// Tests that the right return code is given when the reference file is missing.
+TEST_F(VideoMetricsTest, MissingReferenceFilePSNR) {
+  EXPECT_EQ(kMissingReferenceFileReturnCode,
+            I420PSNRFromFiles(kNonExistingFileName, video_file_.c_str(),
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, MissingReferenceFileSSIM) {
+  EXPECT_EQ(kMissingReferenceFileReturnCode,
+            I420SSIMFromFiles(kNonExistingFileName, video_file_.c_str(),
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, MissingReferenceFileBothMetrics) {
+  EXPECT_EQ(kMissingReferenceFileReturnCode,
+            I420MetricsFromFiles(kNonExistingFileName, video_file_.c_str(),
+                                 kWidth, kHeight,
+                                 &psnr_result_, &ssim_result_));
+}
+
+// Tests that the right return code is given when the test file is missing.
+TEST_F(VideoMetricsTest, MissingTestFilePSNR) {
+  EXPECT_EQ(kMissingTestFileReturnCode,
+            I420PSNRFromFiles(video_file_.c_str(), kNonExistingFileName,
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, MissingTestFileSSIM) {
+  EXPECT_EQ(kMissingTestFileReturnCode,
+            I420SSIMFromFiles(video_file_.c_str(), kNonExistingFileName,
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, MissingTestFileBothMetrics) {
+  EXPECT_EQ(kMissingTestFileReturnCode,
+            I420MetricsFromFiles(video_file_.c_str(), kNonExistingFileName,
+                                 kWidth, kHeight,
+                                 &psnr_result_, &ssim_result_));
+}
+
+// Tests that the method can be executed with empty files.
+TEST_F(VideoMetricsTest, EmptyFilesPSNR) {
+  EXPECT_EQ(kEmptyFileReturnCode,
+            I420PSNRFromFiles(kEmptyFileName, video_file_.c_str(),
+                              kWidth, kHeight, &ssim_result_));
+  EXPECT_EQ(kEmptyFileReturnCode,
+            I420PSNRFromFiles(video_file_.c_str(), kEmptyFileName,
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, EmptyFilesSSIM) {
+  EXPECT_EQ(kEmptyFileReturnCode,
+            I420SSIMFromFiles(kEmptyFileName, video_file_.c_str(),
+                              kWidth, kHeight, &ssim_result_));
+  EXPECT_EQ(kEmptyFileReturnCode,
+            I420SSIMFromFiles(video_file_.c_str(), kEmptyFileName,
+                              kWidth, kHeight, &ssim_result_));
+}
+
+TEST_F(VideoMetricsTest, EmptyFilesBothMetrics) {
+  EXPECT_EQ(kEmptyFileReturnCode,
+            I420MetricsFromFiles(kEmptyFileName, video_file_.c_str(),
+                                 kWidth, kHeight,
+                                 &psnr_result_, &ssim_result_));
+  EXPECT_EQ(kEmptyFileReturnCode,
+              I420MetricsFromFiles(video_file_.c_str(), kEmptyFileName,
+                                   kWidth, kHeight,
+                                   &psnr_result_, &ssim_result_));
+}
+
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/mock/mock_frame_reader.h b/trunk/test/testsupport/mock/mock_frame_reader.h
new file mode 100644
index 0000000..ecfc13c
--- /dev/null
+++ b/trunk/test/testsupport/mock/mock_frame_reader.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
+#define WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
+
+#include "testsupport/frame_reader.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+namespace test {
+
+class MockFrameReader : public FrameReader {
+ public:
+  MOCK_METHOD0(Init, bool());
+  MOCK_METHOD1(ReadFrame, bool(WebRtc_UWord8* source_buffer));
+  MOCK_METHOD0(Close, void());
+  MOCK_METHOD0(FrameLength, int());
+  MOCK_METHOD0(NumberOfFrames, int());
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
diff --git a/trunk/test/testsupport/mock/mock_frame_writer.h b/trunk/test/testsupport/mock/mock_frame_writer.h
new file mode 100644
index 0000000..ba79184
--- /dev/null
+++ b/trunk/test/testsupport/mock/mock_frame_writer.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_WRITER_H_
+#define WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_WRITER_H_
+
+#include "testsupport/frame_writer.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+namespace test {
+
+class MockFrameWriter : public FrameWriter {
+ public:
+  MOCK_METHOD0(Init, bool());
+  MOCK_METHOD1(WriteFrame, bool(WebRtc_UWord8* frame_buffer));
+  MOCK_METHOD0(Close, void());
+  MOCK_METHOD0(FrameLength, int());
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_MOCK_MOCK_FRAME_WRITER_H_
diff --git a/trunk/test/testsupport/packet_reader.cc b/trunk/test/testsupport/packet_reader.cc
new file mode 100644
index 0000000..e8859d1
--- /dev/null
+++ b/trunk/test/testsupport/packet_reader.cc
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/packet_reader.h"
+
+#include <cassert>
+#include <cstdio>
+
+namespace webrtc {
+namespace test {
+
+PacketReader::PacketReader()
+    : initialized_(false) {}
+
+PacketReader::~PacketReader() {}
+
+void PacketReader::InitializeReading(WebRtc_UWord8* data,
+                                     int data_length_in_bytes,
+                                     int packet_size_in_bytes) {
+  assert(data);
+  assert(data_length_in_bytes >= 0);
+  assert(packet_size_in_bytes > 0);
+  data_ = data;
+  data_length_ = data_length_in_bytes;
+  packet_size_ = packet_size_in_bytes;
+  currentIndex_ = 0;
+  initialized_ = true;
+}
+
+int PacketReader::NextPacket(WebRtc_UWord8** packet_pointer) {
+  if (!initialized_) {
+    fprintf(stderr, "Attempting to use uninitialized PacketReader!\n");
+    return -1;
+  }
+  *packet_pointer = data_ + currentIndex_;
+  // Check if we're about to read the last packet:
+  if (data_length_ - currentIndex_ <= packet_size_) {
+    int size = data_length_ - currentIndex_;
+    currentIndex_ = data_length_;
+    assert(size >= 0);
+    return size;
+  }
+  currentIndex_ += packet_size_;
+  assert(packet_size_ >= 0);
+  return packet_size_;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/packet_reader.h b/trunk/test/testsupport/packet_reader.h
new file mode 100644
index 0000000..4cb0bb1
--- /dev/null
+++ b/trunk/test/testsupport/packet_reader.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_PACKET_READER_H_
+#define WEBRTC_TEST_TESTSUPPORT_PACKET_READER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Reads chunks of data to simulate network packets from a byte array.
+class PacketReader {
+ public:
+  PacketReader();
+  virtual ~PacketReader();
+
+  // Inizializes a new reading operation. Must be done before invoking the
+  // NextPacket method.
+  // * data_length_in_bytes is the length of the data byte array. Must be >= 0.
+  //   0 length will result in no packets are read.
+  // * packet_size_in_bytes is the number of bytes to read in each NextPacket
+  //   method call. Must be > 0
+  virtual void InitializeReading(WebRtc_UWord8* data, int data_length_in_bytes,
+                                 int packet_size_in_bytes);
+
+  // Moves the supplied pointer to the beginning of the next packet.
+  // Returns:
+  // *  The size of the packet ready to read (lower than the packet size for
+  //    the last packet)
+  // *  0 if there are no more packets to read
+  // * -1 if InitializeReading has not been called (also prints to stderr).
+  virtual int NextPacket(WebRtc_UWord8** packet_pointer);
+
+ private:
+  WebRtc_UWord8* data_;
+  int data_length_;
+  int packet_size_;
+  int currentIndex_;
+  bool initialized_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_PACKET_READER_H_
diff --git a/trunk/test/testsupport/packet_reader_unittest.cc b/trunk/test/testsupport/packet_reader_unittest.cc
new file mode 100644
index 0000000..6719e4c
--- /dev/null
+++ b/trunk/test/testsupport/packet_reader_unittest.cc
@@ -0,0 +1,123 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/packet_reader.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/unittest_utils.h"
+
+namespace webrtc {
+namespace test {
+
+class PacketReaderTest: public PacketRelatedTest {
+ protected:
+  PacketReaderTest() {}
+  virtual ~PacketReaderTest() {}
+  void SetUp() {
+    reader_ = new PacketReader();
+  }
+  void TearDown() {
+    delete reader_;
+  }
+  void VerifyPacketData(int expected_length,
+                        int actual_length,
+                        WebRtc_UWord8* original_data_pointer,
+                        WebRtc_UWord8* new_data_pointer) {
+    EXPECT_EQ(expected_length, actual_length);
+    EXPECT_EQ(*original_data_pointer, *new_data_pointer);
+    EXPECT_EQ(0, memcmp(original_data_pointer, new_data_pointer,
+                        actual_length));
+  }
+  PacketReader* reader_;
+};
+
+// Test lack of initialization
+TEST_F(PacketReaderTest, Uninitialized) {
+  WebRtc_UWord8* data_pointer = NULL;
+  EXPECT_EQ(-1, reader_->NextPacket(&data_pointer));
+  EXPECT_EQ(NULL, data_pointer);
+}
+
+TEST_F(PacketReaderTest, InitializeZeroLengthArgument) {
+  reader_->InitializeReading(packet_data_, 0, kPacketSizeInBytes);
+  ASSERT_EQ(0, reader_->NextPacket(&packet_data_pointer_));
+}
+
+// Test with something smaller than one packet
+TEST_F(PacketReaderTest, NormalSmallData) {
+  const int kDataLengthInBytes = 1499;
+  WebRtc_UWord8 data[kDataLengthInBytes];
+  WebRtc_UWord8* data_pointer = data;
+  memset(data, 1, kDataLengthInBytes);
+
+  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
+  int length_to_read = reader_->NextPacket(&data_pointer);
+  VerifyPacketData(kDataLengthInBytes, length_to_read, data, data_pointer);
+  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&data_pointer);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kDataLengthInBytes, data_pointer - data);
+}
+
+// Test with data length that exactly matches one packet
+TEST_F(PacketReaderTest, NormalOnePacketData) {
+  WebRtc_UWord8 data[kPacketSizeInBytes];
+  WebRtc_UWord8* data_pointer = data;
+  memset(data, 1, kPacketSizeInBytes);
+
+  reader_->InitializeReading(data, kPacketSizeInBytes, kPacketSizeInBytes);
+  int length_to_read = reader_->NextPacket(&data_pointer);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read, data, data_pointer);
+  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&data_pointer);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kPacketSizeInBytes, data_pointer - data);
+}
+
+// Test with data length that will result in 3 packets
+TEST_F(PacketReaderTest, NormalLargeData) {
+  reader_->InitializeReading(packet_data_, kPacketDataLength,
+                             kPacketSizeInBytes);
+
+  int length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read,
+                   packet1_, packet_data_pointer_);
+
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read,
+                   packet2_, packet_data_pointer_);
+
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(1u, length_to_read,
+                   packet3_, packet_data_pointer_);
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kPacketDataLength, packet_data_pointer_ - packet_data_);
+}
+
+// Test with empty data.
+TEST_F(PacketReaderTest, EmptyData) {
+  const int kDataLengthInBytes = 0;
+  WebRtc_UWord8* data = new WebRtc_UWord8[kDataLengthInBytes];
+  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
+  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data));
+  // Do it again to make sure nothing changes
+  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data));
+  delete[] data;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/trunk/test/testsupport/unittest_utils.h b/trunk/test/testsupport/unittest_utils.h
new file mode 100644
index 0000000..30464de
--- /dev/null
+++ b/trunk/test/testsupport/unittest_utils.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
+#define WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
+
+namespace webrtc {
+namespace test {
+
+const int kPacketSizeInBytes = 1500;
+const int kPacketDataLength = kPacketSizeInBytes * 2 + 1;
+const int kPacketDataNumberOfPackets = 3;
+
+// A base test fixture for packet related tests. Contains
+// two full prepared packets with 1s, 2s in their data and a third packet with
+// a single 3 in it (size=1).
+// A packet data structure is also available, that contains these three packets
+// in order.
+class PacketRelatedTest: public testing::Test {
+ protected:
+  // Tree packet byte arrays with data used for verification:
+  WebRtc_UWord8 packet1_[kPacketSizeInBytes];
+  WebRtc_UWord8 packet2_[kPacketSizeInBytes];
+  WebRtc_UWord8 packet3_[1];
+  // Construct a data structure containing these packets
+  WebRtc_UWord8 packet_data_[kPacketDataLength];
+  WebRtc_UWord8* packet_data_pointer_;
+
+  PacketRelatedTest() {
+    packet_data_pointer_ = packet_data_;
+
+    memset(packet1_, 1, kPacketSizeInBytes);
+    memset(packet2_, 2, kPacketSizeInBytes);
+    memset(packet3_, 3, 1);
+    // Fill the packet_data:
+    memcpy(packet_data_pointer_, packet1_, kPacketSizeInBytes);
+    memcpy(packet_data_pointer_ + kPacketSizeInBytes, packet2_,
+           kPacketSizeInBytes);
+    memcpy(packet_data_pointer_ + kPacketSizeInBytes * 2, packet3_, 1);
+  }
+  virtual ~PacketRelatedTest() {}
+  void SetUp() {}
+  void TearDown() {}
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
diff --git a/trunk/third_party/google-gflags/LICENSE b/trunk/third_party/google-gflags/LICENSE
new file mode 100644
index 0000000..d15b0c2
--- /dev/null
+++ b/trunk/third_party/google-gflags/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2006, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+    * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+    * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/trunk/third_party/google-gflags/README.webrtc b/trunk/third_party/google-gflags/README.webrtc
new file mode 100644
index 0000000..9a993ce
--- /dev/null
+++ b/trunk/third_party/google-gflags/README.webrtc
@@ -0,0 +1,13 @@
+URL: http://code.google.com/p/google-gflags/
+Version: 1.5
+License: New BSD
+License File: LICENSE
+
+Description:
+The gflags package contains a library that implements commandline
+flags processing. As such it's a replacement for getopt(). It has
+increased flexibility, including built-in support for C++ types like
+string, and the ability to define flags in the source file in which
+they're used.
+
+Local Modifications: None
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..fdafe2a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h
@@ -0,0 +1,601 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 0
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 0
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#if defined(_WIN32)
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG   __declspec(dllexport)
+# endif
+#else
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG
+# endif
+#endif
+
+namespace google {
+
+#if 0      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 0   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 1     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct GFLAGS_DLL_DECL CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern GFLAGS_DLL_DECL void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern GFLAGS_DLL_DECL const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern GFLAGS_DLL_DECL const char* GetArgv();               // all of argv as a string
+extern GFLAGS_DLL_DECL const char* GetArgv0();              // only argv0
+extern GFLAGS_DLL_DECL uint32 GetArgvSum();                 // simple checksum of argv
+extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set
+extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern GFLAGS_DLL_DECL const char* ProgramUsage();          // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum GFLAGS_DLL_DECL FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern GFLAGS_DLL_DECL std::string SetCommandLineOption(const char* name, const char* value);
+extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class GFLAGS_DLL_DECL FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} ;
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern GFLAGS_DLL_DECL bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern GFLAGS_DLL_DECL bool SaveCommandFlags();  // actually defined in google.cc !
+extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval);
+extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval);
+extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval);
+extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval);
+extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern GFLAGS_DLL_DECL void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.
+// Only flags registered since the last parse will be recognized.
+// Any flag value must be provided as part of the argument using "=",
+// not as a separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+extern GFLAGS_DLL_DECL uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class GFLAGS_DLL_DECL FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    /* We always want to export defined variables, dll or no */ \
+    GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(                   \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    /* We always want to import declared variables, dll or no */ \
+    extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> GFLAGS_DLL_DECL double IsBoolFlag(const From& from);
+GFLAGS_DLL_DECL bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name = *FLAGS_no##name;        \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..e97de5b
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#ifndef GFLAGS_DLL_DECL
+# ifdef _WIN32
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# else
+#   define GFLAGS_DLL_DECL
+# endif
+#endif
+
+namespace google {
+
+GFLAGS_DLL_DECL void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h
new file mode 100644
index 0000000..dcca757
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h
@@ -0,0 +1,139 @@
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Sometimes we accidentally #include this config.h instead of the one
+   in .. -- this is particularly true for msys/mingw, which uses the
+   unix config.h but also runs code in the windows directory.
+   */
+#ifdef __MINGW32__
+#include "../config.h"
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#endif
+
+#ifndef GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#ifndef GFLAGS_DLL_DECL
+# define GFLAGS_IS_A_DLL  1   /* not set if you're statically linking */
+# define GFLAGS_DLL_DECL  __declspec(dllexport)
+# define GFLAGS_DLL_DECL_FOR_UNITTESTS  __declspec(dllimport)
+#endif
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE  ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#undef HAVE_FNMATCH_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES  1
+
+/* Define if you have POSIX threads libraries and header files. */
+#undef HAVE_PTHREAD
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV  1
+
+/* Define to 1 if you have the `setenv' function. */
+#undef HAVE_SETENV
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL  1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ  1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* define if your compiler has __attribute__ */
+#undef HAVE___ATTRIBUTE__
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+#undef PTHREAD_CREATE_JOINABLE
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS  1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE  std
+
+/* Version number of package */
+#undef VERSION
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_  }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_  namespace google {
+
+// ---------------------------------------------------------------------
+// Extra stuff not found in config.h.in
+
+// This must be defined before the windows.h is included.  It's needed
+// for mutex.h, to give access to the TryLock method.
+#ifndef _WIN32_WINNT
+# define _WIN32_WINNT 0x0400
+#endif
+
+// TODO(csilvers): include windows/port.h in every relevant source file instead?
+#include "windows/port.h"
+
+#endif  /* GOOGLE_GFLAGS_WINDOWS_CONFIG_H_ */
diff --git a/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h b/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..fdafe2a
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h
@@ -0,0 +1,601 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 0
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 0
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#if defined(_WIN32)
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG   __declspec(dllexport)
+# endif
+#else
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG
+# endif
+#endif
+
+namespace google {
+
+#if 0      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 0   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 1     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct GFLAGS_DLL_DECL CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern GFLAGS_DLL_DECL void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern GFLAGS_DLL_DECL const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern GFLAGS_DLL_DECL const char* GetArgv();               // all of argv as a string
+extern GFLAGS_DLL_DECL const char* GetArgv0();              // only argv0
+extern GFLAGS_DLL_DECL uint32 GetArgvSum();                 // simple checksum of argv
+extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set
+extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern GFLAGS_DLL_DECL const char* ProgramUsage();          // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum GFLAGS_DLL_DECL FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern GFLAGS_DLL_DECL std::string SetCommandLineOption(const char* name, const char* value);
+extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class GFLAGS_DLL_DECL FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} ;
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern GFLAGS_DLL_DECL bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern GFLAGS_DLL_DECL bool SaveCommandFlags();  // actually defined in google.cc !
+extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval);
+extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval);
+extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval);
+extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval);
+extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern GFLAGS_DLL_DECL void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.
+// Only flags registered since the last parse will be recognized.
+// Any flag value must be provided as part of the argument using "=",
+// not as a separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+extern GFLAGS_DLL_DECL uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class GFLAGS_DLL_DECL FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    /* We always want to export defined variables, dll or no */ \
+    GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(                   \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    /* We always want to import declared variables, dll or no */ \
+    extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> GFLAGS_DLL_DECL double IsBoolFlag(const From& from);
+GFLAGS_DLL_DECL bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name = *FLAGS_no##name;        \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h b/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..e97de5b
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#ifndef GFLAGS_DLL_DECL
+# ifdef _WIN32
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# else
+#   define GFLAGS_DLL_DECL
+# endif
+#endif
+
+namespace google {
+
+GFLAGS_DLL_DECL void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/trunk/third_party/google-gflags/gen/arch/win/x64/include/private/config.h b/trunk/third_party/google-gflags/gen/arch/win/x64/include/private/config.h
new file mode 100644
index 0000000..dcca757
--- /dev/null
+++ b/trunk/third_party/google-gflags/gen/arch/win/x64/include/private/config.h
@@ -0,0 +1,139 @@
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Sometimes we accidentally #include this config.h instead of the one
+   in .. -- this is particularly true for msys/mingw, which uses the
+   unix config.h but also runs code in the windows directory.
+   */
+#ifdef __MINGW32__
+#include "../config.h"
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#endif
+
+#ifndef GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#ifndef GFLAGS_DLL_DECL
+# define GFLAGS_IS_A_DLL  1   /* not set if you're statically linking */
+# define GFLAGS_DLL_DECL  __declspec(dllexport)
+# define GFLAGS_DLL_DECL_FOR_UNITTESTS  __declspec(dllimport)
+#endif
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE  ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#undef HAVE_FNMATCH_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES  1
+
+/* Define if you have POSIX threads libraries and header files. */
+#undef HAVE_PTHREAD
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV  1
+
+/* Define to 1 if you have the `setenv' function. */
+#undef HAVE_SETENV
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL  1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ  1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* define if your compiler has __attribute__ */
+#undef HAVE___ATTRIBUTE__
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+#undef PTHREAD_CREATE_JOINABLE
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS  1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE  std
+
+/* Version number of package */
+#undef VERSION
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_  }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_  namespace google {
+
+// ---------------------------------------------------------------------
+// Extra stuff not found in config.h.in
+
+// This must be defined before the windows.h is included.  It's needed
+// for mutex.h, to give access to the TryLock method.
+#ifndef _WIN32_WINNT
+# define _WIN32_WINNT 0x0400
+#endif
+
+// TODO(csilvers): include windows/port.h in every relevant source file instead?
+#include "windows/port.h"
+
+#endif  /* GOOGLE_GFLAGS_WINDOWS_CONFIG_H_ */
diff --git a/trunk/third_party/google-gflags/google-gflags.gyp b/trunk/third_party/google-gflags/google-gflags.gyp
new file mode 100644
index 0000000..bd7a3ed
--- /dev/null
+++ b/trunk/third_party/google-gflags/google-gflags.gyp
@@ -0,0 +1,62 @@
+# Copyright 2011 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+  'variables': {
+    'gflags_root': '<(DEPTH)/third_party/google-gflags',
+    'gflags_gen_arch_root': '<(gflags_root)/gen/arch/<(OS)/<(target_arch)',
+  },
+  'targets': [
+    {
+      'target_name': 'google-gflags',
+      'type': '<(library)',
+      'include_dirs': [
+        '<(gflags_gen_arch_root)/include/private',  # For config.h
+        '<(gflags_gen_arch_root)/include',  # For configured files.
+        '<(gflags_root)/src',  # For everything else.
+      ],
+      'defines': [
+        # These macros exist so flags and symbols are properly
+        # exported when building DLLs. Since we don't build DLLs, we
+        # need to disable them.
+        'GFLAGS_DLL_DECL=',
+        'GFLAGS_DLL_DECLARE_FLAG=',
+        'GFLAGS_DLL_DEFINE_FLAG=',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(gflags_gen_arch_root)/include',  # For configured files.
+          '<(gflags_root)/src',  # For everything else.
+        ],
+        'defines': [
+          'GFLAGS_DLL_DECL=',
+          'GFLAGS_DLL_DECLARE_FLAG=',
+          'GFLAGS_DLL_DEFINE_FLAG=',
+        ],
+      },
+      'sources': [
+        'src/gflags.cc',
+        'src/gflags_completions.cc',
+        'src/gflags_reporting.cc',
+      ],
+      'conditions': [
+        ['OS == "win"', {
+          'sources': [
+            'src/windows/port.cc',
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/trunk/third_party_mods/ace/LICENSE b/trunk/third_party_mods/ace/LICENSE
new file mode 100644
index 0000000..9204394
--- /dev/null
+++ b/trunk/third_party_mods/ace/LICENSE
@@ -0,0 +1,66 @@
+Copyright and Licensing Information for ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), 
+and CoSMIC(TM)
+
+ACE(TM), TAO(TM), CIAO(TM), DAnCE>(TM), and CoSMIC(TM) (henceforth referred to 
+as "DOC software") are copyrighted by Douglas C. Schmidt and his research 
+group at Washington University, University of California, Irvine, and 
+Vanderbilt University, Copyright (c) 1993-2009, all rights reserved. Since DOC 
+software is open-source, freely available software, you are free to use, 
+modify, copy, and distribute--perpetually and irrevocably--the DOC software 
+source code and object code produced from the source, as well as copy and 
+distribute modified versions of this software. You must, however, include this 
+copyright statement along with any code built using DOC software that you 
+release. No copyright statement needs to be provided if you just ship binary 
+executables of your software products.
+You can use DOC software in commercial and/or binary software releases and are 
+under no obligation to redistribute any of your source code that is built 
+using DOC software. Note, however, that you may not misappropriate the DOC 
+software code, such as copyrighting it yourself or claiming authorship of the 
+DOC software code, in a way that will prevent DOC software from being 
+distributed freely using an open-source development model. You needn't inform 
+anyone that you're using DOC software in your software, though we encourage 
+you to let us know so we can promote your project in the DOC software success 
+stories.
+
+The ACE, TAO, CIAO, DAnCE, and CoSMIC web sites are maintained by the DOC 
+Group at the Institute for Software Integrated Systems (ISIS) and the Center 
+for Distributed Object Computing of Washington University, St. Louis for the 
+development of open-source software as part of the open-source software 
+community. Submissions are provided by the submitter ``as is'' with no 
+warranties whatsoever, including any warranty of merchantability, 
+noninfringement of third party intellectual property, or fitness for any 
+particular purpose. In no event shall the submitter be liable for any direct, 
+indirect, special, exemplary, punitive, or consequential damages, including 
+without limitation, lost profits, even if advised of the possibility of such 
+damages. Likewise, DOC software is provided as is with no warranties of any 
+kind, including the warranties of design, merchantability, and fitness for a 
+particular purpose, noninfringement, or arising from a course of dealing, 
+usage or trade practice. Washington University, UC Irvine, Vanderbilt 
+University, their employees, and students shall have no liability with respect 
+to the infringement of copyrights, trade secrets or any patents by DOC 
+software or any part thereof. Moreover, in no event will Washington 
+University, UC Irvine, or Vanderbilt University, their employees, or students 
+be liable for any lost revenue or profits or other special, indirect and 
+consequential damages.
+
+DOC software is provided with no support and without any obligation on the 
+part of Washington University, UC Irvine, Vanderbilt University, their 
+employees, or students to assist in its use, correction, modification, or 
+enhancement. A number of companies around the world provide commercial support 
+for DOC software, however. DOC software is Y2K-compliant, as long as the 
+underlying OS platform is Y2K-compliant. Likewise, DOC software is compliant 
+with the new US daylight savings rule passed by Congress as "The Energy Policy 
+Act of 2005," which established new daylight savings times (DST) rules for the 
+United States that expand DST as of March 2007. Since DOC software obtains 
+time/date and calendaring information from operating systems users will not be 
+affected by the new DST rules as long as they upgrade their operating systems 
+accordingly.
+
+The names ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), CoSMIC(TM), Washington 
+University, UC Irvine, and Vanderbilt University, may not be used to endorse 
+or promote products or services derived from this source without express 
+written permission from Washington University, UC Irvine, or Vanderbilt 
+University. This license grants no permission to call products or services 
+derived from this source ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), or CoSMIC(TM), 
+nor does it grant permission for the name Washington University, UC Irvine, or 
+Vanderbilt University to appear in their names.
\ No newline at end of file
diff --git a/trunk/third_party_mods/chromium/LICENSE b/trunk/third_party_mods/chromium/LICENSE
new file mode 100644
index 0000000..8dc3504
--- /dev/null
+++ b/trunk/third_party_mods/chromium/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//    * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//    * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//    * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/trunk/third_party_mods/mslpl/LICENSE b/trunk/third_party_mods/mslpl/LICENSE
new file mode 100644
index 0000000..9a3d932
--- /dev/null
+++ b/trunk/third_party_mods/mslpl/LICENSE
@@ -0,0 +1,64 @@
+This license governs use of code marked as “sample” or “example” available on 
+this web site without a license agreement, as provided under the section above 
+titled “NOTICE SPECIFIC TO SOFTWARE AVAILABLE ON THIS WEB SITE.” If you use 
+such code (the “software”), you accept this license. If you do not accept the 
+license, do not use the software.
+
+1. Definitions
+
+The terms “reproduce,” “reproduction,” “derivative works,” and “distribution” 
+have the same meaning here as under U.S. copyright law.
+
+A “contribution” is the original software, or any additions or changes to the 
+software.
+
+A “contributor” is any person that distributes its contribution under this 
+license.
+
+“Licensed patents” are a contributor’s patent claims that read directly on its 
+contribution.
+
+2. Grant of Rights
+
+(A) Copyright Grant - Subject to the terms of this license, including the 
+license conditions and limitations in section 3, each contributor grants you a 
+non-exclusive, worldwide, royalty-free copyright license to reproduce its 
+contribution, prepare derivative works of its contribution, and distribute its 
+contribution or any derivative works that you create.
+
+(B) Patent Grant - Subject to the terms of this license, including the license 
+conditions and limitations in section 3, each contributor grants you a 
+non-exclusive, worldwide, royalty-free license under its licensed patents to 
+make, have made, use, sell, offer for sale, import, and/or otherwise dispose 
+of its contribution in the software or derivative works of the contribution in 
+the software.
+
+3. Conditions and Limitations
+
+(A) No Trademark License- This license does not grant you rights to use any 
+contributors’ name, logo, or trademarks.
+
+(B) If you bring a patent claim against any contributor over patents that you 
+claim are infringed by the software, your patent license from such contributor 
+to the software ends automatically.
+
+(C) If you distribute any portion of the software, you must retain all 
+copyright, patent, trademark, and attribution notices that are present in the 
+software.
+
+(D) If you distribute any portion of the software in source code form, you may 
+do so only under this license by including a complete copy of this license 
+with your distribution. If you distribute any portion of the software in 
+compiled or object code form, you may only do so under a license that complies 
+with this license.
+
+(E) The software is licensed “as-is.” You bear the risk of using it. The 
+contributors give no express warranties, guarantees or conditions. You may 
+have additional consumer rights under your local laws which this license 
+cannot change. To the extent permitted under your local laws, the contributors 
+exclude the implied warranties of merchantability, fitness for a particular 
+purpose and non-infringement.
+
+(F) Platform Limitation - The licenses granted in sections 2(A) and 2(B) 
+extend only to the software or derivative works that you create that run on a 
+Microsoft Windows operating system product.
diff --git a/trunk/third_party_mods/sqrt_floor/LICENSE b/trunk/third_party_mods/sqrt_floor/LICENSE
new file mode 100644
index 0000000..e24dfe7
--- /dev/null
+++ b/trunk/third_party_mods/sqrt_floor/LICENSE
@@ -0,0 +1,26 @@
+The following email record is related to source files spl_sqrt_floor.c
+and spl_sqrt_floor.s in trunk/src/common_audio/signal_processing/.
+
+
+From: Wilco Dijkstra <Wilco.Dijkstra@ntlworld.com>
+Date: Fri, Jun 24, 2011 at 3:20 AM
+Subject: Re: sqrt routine
+To: Kevin Ma <kma@google.com>
+Hi Kevin,
+Thanks for asking. Those routines are public domain (originally posted to 
+comp.sys.arm a long time ago), so you can use them freely for any purpose.
+Cheers,
+Wilco
+
+----- Original Message -----
+From: "Kevin Ma" <kma@google.com>
+To: <Wilco.Dijkstra@ntlworld.com>
+Sent: Thursday, June 23, 2011 11:44 PM
+Subject: Fwd: sqrt routine
+Hi Wilco,
+I saw your sqrt routine from several web sites, including
+http://www.finesse.demon.co.uk/steven/sqrt.html.
+Just wonder if there's any copyright information with your Successive
+approximation routines, or if I can freely use it for any purpose.
+Thanks.
+Kevin
diff --git a/trunk/tools/continuous_build/OWNERS b/trunk/tools/continuous_build/OWNERS
new file mode 100644
index 0000000..60e166c
--- /dev/null
+++ b/trunk/tools/continuous_build/OWNERS
@@ -0,0 +1,3 @@
+ivinnichenko@webrtc.org
+phoglund@webrtc.org
+kjellander@webrtc.org
diff --git a/trunk/tools/continuous_build/clean_old_files.py b/trunk/tools/continuous_build/clean_old_files.py
new file mode 100755
index 0000000..e87526e
--- /dev/null
+++ b/trunk/tools/continuous_build/clean_old_files.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = "ivinnichenko@webrtc.org (Illya Vinnichenko)"
+
+"""This script will prune sufficiently old files and empty directories.
+
+   The algorithm is to look into the provided directory and delete any files
+   that is older than x days, recursively. Then all empty directories will be
+   deleted (we can't look at timestamps there since the act of deleting a file
+   will refresh the directory's timestamp).
+
+   Note: This script has only been tested on Linux.
+"""
+
+from optparse import OptionParser
+import os
+import sys
+import time
+
+# The path is considered whitelisted if any of these entries appear
+# at some point in the path
+WHITELIST = ["buildbot.tac", "master.cfg", "public_html", "changes.pck",
+             "webrtc_buildbot"]
+
+
+def is_whitelisted(path):
+  """Check if file is whitelisted.
+
+    Args:
+      path: file path.
+  """
+  for entry in WHITELIST:
+    if entry in path:
+      return True
+  return False
+
+
+def delete_directory(directory):
+  try:
+    os.rmdir(directory)
+    return True
+  except OSError as exception:
+    # The directory probably contains newer files.
+    print "Could not remove directory %s: reason %s." % (directory, exception)
+  return False
+
+
+def delete_file(file):
+  try:
+    os.remove(file)
+  except OSError as exception:
+    print "Unexpectedly failed to remove file %s: reason %s." % (file,
+                                                                 exception)
+
+
+def log_removal(file_or_directory, time_stamp, verbose):
+  if verbose:
+    str_stamp = time.strftime("%a, %d %b %Y %H:%M:%S +0000",
+                              time.gmtime(time_stamp))
+    print "Removing [%s], stamped on %s" % (file_or_directory, str_stamp)
+
+
+def remove_old_files_and_directories(path, num_days, verbose, skip_dirs):
+  """Removes all files under path that are older than num_days days.
+     The algorithm also tried to delete all directories, except for those who
+     contain files that are sufficiently new.
+
+     Implementation note: it doesn't make sense to look at timestamps for
+     directories since their timestamps are updated when a file is deleted.
+
+    Args:
+      path: The starting point.
+      num_days: days limit for removal.
+      verbose: print every cmd?
+  """
+  current_time = time.time()
+  limit = 60 * 60 * 24 * num_days
+
+  # Walk bottom-up so directories are deleted in the right order.
+  for root, directories, files in os.walk(path, topdown=False):
+    for file in files:
+      current_file = os.path.join(root, file)
+      time_stamp = os.stat(current_file).st_mtime
+
+      if is_whitelisted(current_file):
+        continue
+
+      if (current_time - time_stamp) > limit:
+        delete_file(current_file)
+        log_removal(current_file, time_stamp, verbose)
+
+    if not skip_dirs:
+      for directory in directories:
+        current_directory = os.path.join(root, directory)
+        time_stamp = os.stat(current_directory).st_mtime
+        if delete_directory(current_directory):
+          log_removal(current_directory, time_stamp, verbose)
+
+
+def main():
+  usage = "usage: %prog -p <base path> -n <number of days> [-q] [-d]"
+  parser = OptionParser(usage)
+  parser.add_option("-p", "--path", dest="cleanup_path", help="base directory")
+  parser.add_option("-n", "--num_days", dest="num_days", help="number of days")
+  parser.add_option("-q", "--quiet",
+                    action="store_false", dest="verbose", default=True,
+                    help="don't print status messages to stdout")
+  parser.add_option("-d", "--delete-dirs-too",
+                    action="store_false", dest="skip_dirs", default=True,
+                    help="number of days")
+
+  options, args = parser.parse_args()
+  if not options.cleanup_path:
+    print "You must specify base directory"
+    sys.exit(2)
+  if not options.num_days:
+    print "You must specify number of days old"
+    sys.exit(2)
+
+  if options.verbose:
+    print "Cleaning up everything in %s older than %s days" % (
+        options.cleanup_path, options.num_days)
+  remove_old_files_and_directories(options.cleanup_path, int(options.num_days),
+                                   options.verbose, options.skip_dirs)
+
+if __name__ == "__main__":
+  main()
diff --git a/trunk/tools/continuous_build/clean_third_party_gcda.sh b/trunk/tools/continuous_build/clean_third_party_gcda.sh
new file mode 100755
index 0000000..38547d8
--- /dev/null
+++ b/trunk/tools/continuous_build/clean_third_party_gcda.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+#
+
+# This script removes all .gcda files from third_party in order to work around
+# a bug in LCOV (this should also increase the bot speed).
+find . -name "*.gcda" -path "*/third_party/*" | xargs rm -f
diff --git a/trunk/tools/continuous_build/generate_coverage_html.sh b/trunk/tools/continuous_build/generate_coverage_html.sh
new file mode 100755
index 0000000..6b4c175
--- /dev/null
+++ b/trunk/tools/continuous_build/generate_coverage_html.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+#
+
+# Generates a LCOV error report and makes the results readable to all.
+
+genhtml $1 --output-directory $2
+if [ "$?" -ne "0" ]; then
+  exit 1
+fi
+
+chmod -R 777 $2
diff --git a/trunk/tools/continuous_build/master.cfg b/trunk/tools/continuous_build/master.cfg
new file mode 100755
index 0000000..aaca7ff
--- /dev/null
+++ b/trunk/tools/continuous_build/master.cfg
@@ -0,0 +1,414 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'ivinnichenko@webrtc.org (Illya Vinnichenko)'
+
+# This is a buildmaster config file for WebRTC project. It must be installed as
+# 'master.cfg' in your buildmaster's base directory (although the filename
+# can be changed with the --basedir option to 'mktap buildbot master').
+
+# It has one job: define a dictionary named BuildmasterConfig. This
+# dictionary has a variety of keys to control different aspects of the
+# buildmaster. They are documented in docs/config.xhtml .
+
+# This is the dictionary that the buildmaster pays attention to. We also use
+# a shorter alias to save typing.
+c = BuildmasterConfig = {}
+
+####### BUILDSLAVES
+
+# the 'slaves' list defines the set of allowable buildslaves. Each element is
+# a BuildSlave object, which is created with bot-name, bot-password.  These
+# correspond to values given to the buildslave's mktap invocation.
+
+from buildbot.buildslave import BuildSlave
+
+c['slaves'] = [BuildSlave('android', 'pass', max_builds=1),
+               BuildSlave('chromeos', 'pass', max_builds=1),
+               BuildSlave('linux-chrome', 'pass', max_builds=1),
+               BuildSlave('linux-clang', 'pass', max_builds=1),
+               BuildSlave('linux-slave-1', 'pass', max_builds=1),
+               BuildSlave('linux-slave-2', 'pass', max_builds=1),
+               BuildSlave('linux-slave-4', 'pass', max_builds=1),
+               BuildSlave('linux-slave-5', 'pass', max_builds=1),
+               BuildSlave('linux-slave-gcc-4.6', 'pass', max_builds=1),
+               BuildSlave('linux-valgrind', 'pass', max_builds=1),
+               BuildSlave('linux_video', 'pass', max_builds=1),
+               BuildSlave('mac-slave-3', 'pass', max_builds=1),
+               BuildSlave('mac-slave-2', 'pass', max_builds=1),
+               BuildSlave('win-slave-1', 'pass', max_builds=1),
+               BuildSlave('win-slave-2', 'pass', max_builds=1)]
+
+# 'slavePortnum' defines the TCP port to listen on. This must match the value
+# configured into the buildslaves (with their --master option)
+
+c['slavePortnum'] = 9989
+
+####### CHANGESOURCES
+
+# the 'change_source' setting tells the buildmaster how it should find out
+# about source code changes. Any class which implements IChangeSource can be
+# put here: there are several in buildbot/changes/*.py to choose from.
+
+from buildbot.changes.pb import PBChangeSource
+from buildbot.changes.svnpoller import SVNPoller
+
+source_code_svn_url = 'http://webrtc.googlecode.com/svn/trunk'
+svn_poller = SVNPoller(svnurl=source_code_svn_url, pollinterval=5*60,
+                       histmax=10, svnbin='/usr/bin/svn')
+c['change_source'] = svn_poller
+
+####### SCHEDULERS
+
+## configure the Schedulers
+
+from buildbot.scheduler import Scheduler
+webrtc_scheduler = Scheduler(name='all', branch=None, treeStableTimer=5*60,
+                             builderNames=['Win32Debug',
+                                           'Win32Release',
+                                           'MacOS32DBG',
+                                           'MacOS32Release',
+                                           'Linux32DBG',
+                                           'Linux32Release',
+                                           'Linux64DBG',
+                                           'Linux64Release',
+                                           'LinuxClang',
+                                           'LinuxValgrind',
+                                           'Linux64DBG-GCC4.6',
+                                           'LinuxVideoTest',
+                                           'Android',
+                                           'ChromeOS'
+                                           ])
+chrome_scheduler = Scheduler(name='chrome', branch=None, treeStableTimer=60*60,
+                             builderNames=['Chrome'])
+
+c['schedulers'] = [webrtc_scheduler, chrome_scheduler]
+
+####### TESTS
+# Tests to run on Virtual machine bots and our Mac hardware.
+# Defines the supported tests followed by a tuple defining if the tests are
+# enabled on Linux, Mac and/or Windows (in that order; defined in utils.py).
+
+from webrtc_buildbot import utils
+
+NORMAL_TESTS = {
+    # Test name                        Linux  Mac   Windows
+    'audio_coding_module_test':        (True, True, True),
+    'audio_coding_unittests':          (True, True, True),
+    'audio_conference_mixer_unittests':(True, True, True),
+    'audio_device_test_api':           (True, False, False), # Issue 257
+    'audioproc_unittest':              (True, True, True),
+    'cng_unittests':                   (True, True, True),
+    'g711_unittests':                  (True, True, True),
+    'g722_unittests':                  (True, True, True),
+    'libyuv_unittests':                (True, True, True),
+    'jpeg_unittests':                  (True, True, True),
+    'media_file_unittests':            (True, True, True),
+    'metrics_unittests':               (True, True, True),
+    'neteq_unittests':                 (True, True, True),
+    'pcm16b_unittests':                (True, True, True),
+    'resampler_unittests':             (True, True, True),
+    'rtp_rtcp_unittests':              (True, True, True),
+    'signal_processing_unittests':     (True, True, True),
+    'system_wrappers_unittests':       (True, True, True),
+    'test_bwe':                        (True, True, True),
+    'test_fec':                        (True, True, True),
+    'test_support_unittests':          (True, True, True),
+    'udp_transport_unittests':         (True, True, True),
+    'vad_unittests':                   (True, True, True),
+    'video_codecs_test_framework_integrationtests': (True, True, True),
+    'video_codecs_test_framework_unittests':        (True, True, True),
+    'video_coding_unittests':          (True, True, True),
+    'video_engine_core_unittests':     (True, True, True),
+    'video_processing_unittests':      (True, True, True),
+    'voice_engine_unittests':          (True, True, True),
+    'vp8_unittests':                   (True, False, True), # Issue 273.
+    'webrtc_utility_unittests':        (True, True, False),
+}
+
+PHYSICAL_MACHINE_TESTS = {
+    # Test name                        Linux  Mac   Windows
+    'video_render_module_test':        (True, False, False),
+    'vie_auto_test':                   (True, False, False),
+    'voe_auto_test':                   (True, False, False),
+}
+
+VALGRIND_DISABLED_TESTS = [
+    'audio_coding_module_test', # Issue 270
+    'test_fec',                 # Too slow for Valgrind
+]
+
+linux_normal_tests = utils.GetEnabledTests(NORMAL_TESTS, 'Linux')
+mac_normal_tests = utils.GetEnabledTests(NORMAL_TESTS, 'Mac')
+windows_normal_tests = utils.GetEnabledTests(NORMAL_TESTS, 'Windows')
+
+linux_physical_machine_tests = utils.GetEnabledTests(PHYSICAL_MACHINE_TESTS,
+                                                     'Linux')
+mac_physical_machine_tests = utils.GetEnabledTests(PHYSICAL_MACHINE_TESTS,
+                                                   'Mac')
+windows_physical_machine_tests = utils.GetEnabledTests(PHYSICAL_MACHINE_TESTS,
+                                                       'Windows')
+
+####### FACTORIES
+# Factories defines how the build is run and can be used in multiple instances
+# on multiple machines, depending on how many builders are defined.
+
+from buildbot.process import factory
+from buildbot.steps import shell
+
+############# Linux factories #######################################
+linux_factory_64_dbg = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_factory_64_dbg'))
+linux_factory_64_dbg.EnableBuild()
+linux_factory_64_dbg.EnableTests(linux_normal_tests)
+
+linux_factory_32_release = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_factory_32_release'))
+linux_factory_32_release.EnableBuild(release=True, build32=True)
+linux_factory_32_release.EnableTests(linux_normal_tests)
+
+linux_factory_64_release = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_factory_64_release'))
+linux_factory_64_release.EnableBuild(release=True)
+linux_factory_64_release.EnableTests(linux_normal_tests)
+
+linux_factory_32_dbg = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_factory_32_dbg'))
+linux_factory_32_dbg.EnableCoverage(
+    coverage_url='http://webrtc-cb-linux-slave-4.cbf.corp.google.com/coverage/')
+linux_factory_32_dbg.EnableBuild(build32=True)
+linux_factory_32_dbg.EnableTests(linux_normal_tests)
+
+linux_factory_video = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_factory_video'))
+linux_factory_video.EnableCoverage(
+    coverage_url='http://webrtc-build-bot-se.lul/coverage/')
+linux_factory_video.EnableBuild()
+linux_factory_video.EnableTests(linux_physical_machine_tests)
+
+chromeos_factory = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('chromeos_factory'))
+chromeos_factory.EnableBuild(chrome_os=True)
+chromeos_factory.EnableTests(linux_normal_tests)
+
+linux_chrome_factory = utils.WebRTCChromeFactory(
+    utils.BuildStatusOracle('linux_chrome'))
+linux_chrome_factory.EnableBuild()
+
+linux_clang = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_clang'))
+linux_clang.EnableBuild(clang=True)
+linux_clang.EnableTests(linux_normal_tests)
+
+linux_valgrind = utils.WebRTCLinuxFactory(
+    utils.BuildStatusOracle('linux_valgrind'), valgrind_enabled=True)
+linux_valgrind.EnableBuild(release=True)
+# Filter out disabled Valgrind tests:
+valgrind_tests = filter(lambda test: test not in VALGRIND_DISABLED_TESTS,
+                        linux_normal_tests)
+linux_valgrind.EnableTests(valgrind_tests)
+
+android_factory = utils.WebRTCAndroidFactory(
+    utils.BuildStatusOracle('android_factory'))
+android_factory.EnableBuild(product='toro')
+
+############## Mac factories #######################################
+mac_factory_32_dbg = utils.WebRTCMacFactory(
+    utils.BuildStatusOracle('mac_factory_32_dbg'))
+mac_factory_32_dbg.EnableBuild(build_type='both')
+mac_factory_32_dbg.EnableTests(mac_normal_tests)
+
+mac_factory_32_release = utils.WebRTCMacFactory(
+    utils.BuildStatusOracle('mac_factory_32_release'))
+mac_factory_32_release.EnableBuild(build_type='both', release=True)
+mac_factory_32_release.EnableTests(mac_normal_tests)
+
+############# Windows factories #######################################
+win_factory_32_Debug = utils.WebRTCWinFactory(
+    utils.BuildStatusOracle('win_factory_32_debug'))
+win_factory_32_Debug.EnableBuild(configuration='Debug')
+win_factory_32_Debug.EnableTests(windows_normal_tests)
+
+win_factory_32_Release = utils.WebRTCWinFactory(
+    utils.BuildStatusOracle('mac_factory_32_release'))
+win_factory_32_Release.EnableBuild(configuration='Release')
+win_factory_32_Release.EnableTests(windows_normal_tests)
+
+####### BUILDERS
+
+# the 'builders' list defines the Builders. Each one is configured with a
+# dictionary, using the following keys:
+#  name (required): the name used to describe this builder
+#  slavename (required): which slave to use (must appear in c['bots'])
+#  builddir (required): which subdirectory to run the builder in
+#  factory (required): a BuildFactory to define how the build is run
+#  periodicBuildTime (optional): if set, force a build every N seconds
+
+linux_builder_64_debug = {
+      'name': 'Linux64DBG',
+      'slavename': 'linux-slave-1',
+      'builddir': 'linux-slave-1',
+      'factory': linux_factory_64_dbg,
+      }
+linux_builder_32_release = {
+      'name': 'Linux32Release',
+      'slavename': 'linux-slave-2',
+      'builddir': 'linux-slave-2',
+      'factory': linux_factory_32_release,
+      }
+linux_builder_64_release = {
+      'name': 'Linux64Release',
+      'slavename': 'linux-slave-5',
+      'builddir': 'linux-slave-5',
+      'factory': linux_factory_64_release,
+      }
+linux_builder_32_debug = {
+      'name': 'Linux32DBG',
+      'slavename': 'linux-slave-4',
+      'builddir': 'linux-slave-4',
+      'factory': linux_factory_32_dbg,
+      }
+mac_builder_32_debug = {
+      'name': 'MacOS32DBG',
+      'slavename': 'mac-slave-3',
+      'builddir': 'mac-slave-3',
+      'factory': mac_factory_32_dbg,
+      }
+mac_builder_32_release = {
+      'name': 'MacOS32Release',
+      'slavename': 'mac-slave-2',
+      'builddir': 'mac-slave-2',
+      'factory': mac_factory_32_release,
+      }
+chromeos_builder = {
+      'name': 'ChromeOS',
+      'slavename': 'chromeos',
+      'builddir': 'chromeos',
+      'factory': chromeos_factory,
+      }
+win_builder_32_debug = {
+      'name': 'Win32Debug',
+      'slavename': 'win-slave-1',
+      'builddir': 'win-32-dbg',
+      'factory': win_factory_32_Debug,
+      }
+win_builder_32_release = {
+      'name': 'Win32Release',
+      'slavename': 'win-slave-2',
+      'builddir': 'win-32-release',
+      'factory': win_factory_32_Release,
+      }
+linux_builder_video = {
+      'name': 'LinuxVideoTest',
+      'slavename': 'linux_video',
+      'builddir': 'video',
+      'factory': linux_factory_video,
+      }
+linux_builder_chrome = {
+      'name': "Chrome",
+      'slavename': "linux-chrome",
+      'builddir': "linux-chrome",
+      'factory': linux_chrome_factory,
+      }
+linux_builder_clang = {
+      'name': 'LinuxClang',
+      'slavename': 'linux-clang',
+      'builddir': 'linux-clang',
+      'factory': linux_clang,
+      }
+linux_builder_valgrind = {
+      'name': 'LinuxValgrind',
+      'slavename': 'linux-valgrind',
+      'builddir': 'linux-valgrind',
+      'factory': linux_valgrind,
+      }
+android_builder_1 = {
+      'name': 'Android',
+      'slavename': 'android',
+      'builddir': 'android',
+      'factory': android_factory,
+      }
+linux_builder_gcc_4_6 = {
+      'name': 'Linux64DBG-GCC4.6',
+      'slavename': 'linux-slave-gcc-4.6',
+      'builddir': 'linux-slave-gcc-4.6',
+      'factory': linux_factory_64_dbg,
+      }
+c['builders'] = [
+      win_builder_32_debug,
+      win_builder_32_release,
+      mac_builder_32_debug,
+      mac_builder_32_release,
+      linux_builder_32_debug,
+      linux_builder_32_release,
+      linux_builder_64_release,
+      linux_builder_64_debug,
+      linux_builder_clang,
+      linux_builder_valgrind,
+      linux_builder_gcc_4_6,
+      linux_builder_video,
+      android_builder_1,
+      chromeos_builder,
+      linux_builder_chrome,
+      ]
+
+####### STATUS TARGETS
+
+# 'status' is a list of Status Targets. The results of each build will be
+# pushed to these targets. buildbot/status/*.py has a variety to choose from,
+# including web pages, email senders, and IRC bots.
+
+from buildbot.status import html
+from buildbot.status import mail
+
+web_page = html.WebStatus(http_port=8010, allowForce=True)
+email_notification = mail.MailNotifier(
+    fromaddr='webrtc-cb-watchlist@google.com',
+    extraRecipients=['webrtc-cb-watchlist@google.com'],
+    sendToInterestedUsers=True,
+    mode='failing')
+c['status'] = [web_page, email_notification]
+
+####### DEBUGGING OPTIONS
+
+# if you set 'debugPassword', then you can connect to the buildmaster with
+# the diagnostic tool in contrib/debugclient.py . From this tool, you can
+# manually force builds and inject changes, which may be useful for testing
+# your buildmaster without actually committing changes to your repository (or
+# before you have a functioning 'sources' set up). The debug tool uses the
+# same port number as the slaves do: 'slavePortnum'.
+
+#c['debugPassword'] = 'debugpassword'
+
+# if you set 'manhole', you can ssh into the buildmaster and get an
+# interactive python shell, which may be useful for debugging buildbot
+# internals. It is probably only useful for buildbot developers. You can also
+# use an authorized_keys file, or plain telnet.
+#from buildbot import manhole
+#c['manhole'] = manhole.PasswordManhole('tcp:9999:interface=127.0.0.1',
+#                                       'admin', 'password')
+
+
+####### PROJECT IDENTITY
+
+# the 'projectName' string will be used to describe the project that this
+# buildbot is working on. For example, it is used as the title of the
+# waterfall HTML page. The 'projectURL' string will be used to provide a link
+# from buildbot HTML pages to your project's home page.
+
+c['projectName'] = 'WebRTC'
+c['projectURL'] = 'http://www.webrtc.org'
+
+# the 'buildbotURL' string should point to the location where the buildbot's
+# internal web server (usually the html.Waterfall page) is visible. This
+# typically uses the port number set in the Waterfall 'status' entry, but
+# with an externally-visible host name which the buildbot cannot figure out
+# without some help.
+
+c['buildbotURL'] = 'http://webrtc-cb-linux-master.cbf.corp.google.com:8010/'
diff --git a/trunk/tools/continuous_build/public_html/README b/trunk/tools/continuous_build/public_html/README
new file mode 100644
index 0000000..5f59005
--- /dev/null
+++ b/trunk/tools/continuous_build/public_html/README
@@ -0,0 +1,18 @@
+Modify the look of the buildbot master's waterfall page
+-------------------------------------------------------
+
+To deploy these changes, follow these instructions:
+
+* Copy the favicon.ico file to master/public_html/ on the buildbot master
+  machine.
+
+* Edit the master/public_html/index.html and add the following to the <head>
+  element:
+  <link rel="shortcut icon" href="/favicon.ico" type="image/png" />
+  <link rel="icon" href="/favicon.ico" type="image/png" />
+
+* Edit the master/public_html/buildbot.css and change the section with
+  .LastBuild, .Activity {
+	  width: 230px;
+
+  to be 160px instead. Save the file and exit.
diff --git a/trunk/tools/continuous_build/public_html/favicon.ico b/trunk/tools/continuous_build/public_html/favicon.ico
new file mode 100644
index 0000000..324a6ca
--- /dev/null
+++ b/trunk/tools/continuous_build/public_html/favicon.ico
Binary files differ
diff --git a/trunk/tools/continuous_build/webrtc_buildbot/__init__.py b/trunk/tools/continuous_build/webrtc_buildbot/__init__.py
new file mode 100755
index 0000000..869bd84
--- /dev/null
+++ b/trunk/tools/continuous_build/webrtc_buildbot/__init__.py
@@ -0,0 +1,2 @@
+# -*- python -*-
+# ex: set syntax=python:
diff --git a/trunk/tools/continuous_build/webrtc_buildbot/utils.py b/trunk/tools/continuous_build/webrtc_buildbot/utils.py
new file mode 100755
index 0000000..271096c
--- /dev/null
+++ b/trunk/tools/continuous_build/webrtc_buildbot/utils.py
@@ -0,0 +1,789 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'ivinnichenko@webrtc.org (Illya Vinnichenko)'
+
+import buildbot
+import os
+import sys
+import urlparse
+from buildbot.process import factory
+from buildbot.process import properties
+from buildbot.process.properties import WithProperties
+from buildbot.steps.shell import ShellCommand
+
+# Defines the order of the booleans of the supported platforms in the test
+# dictionaries in master.cfg.
+SUPPORTED_PLATFORMS = ('Linux', 'Mac', 'Windows')
+
+SVN_LOCATION = 'http://webrtc.googlecode.com/svn/trunk'
+VALGRIND_CMD = ['tools/valgrind-webrtc/webrtc_tests.sh', '-t', 'cmdline']
+
+DEFAULT_COVERAGE_DIR = '/var/www/coverage/'
+DEFAULT_MASTER_WORK_DIR = '.'
+GCLIENT_RETRIES = 3
+
+# Copied from trunk/tools/build/scripts/master/factory/chromium_factory.py
+# but converted to a list since we set defines instead of using an environment
+# variable.
+#
+# On valgrind bots, override the optimizer settings so we don't inline too
+# much and make the stacks harder to figure out. Use the same settings
+# on all buildbot masters to make it easier to move bots.
+MEMORY_TOOLS_GYP_DEFINES = [
+    # GCC flags
+    'mac_debug_optimization=1 ',
+    'mac_release_optimization=1 ',
+    'release_optimize=1 ',
+    'no_gc_sections=1 ',
+    'debug_extra_cflags="-g -fno-inline -fno-omit-frame-pointer '
+    '-fno-builtin -fno-optimize-sibling-calls" ',
+    'release_extra_cflags="-g -fno-inline -fno-omit-frame-pointer '
+    '-fno-builtin -fno-optimize-sibling-calls" ',
+    # MSVS flags
+    'win_debug_RuntimeChecks=0 ',
+    'win_debug_disable_iterator_debugging=1 ',
+    'win_debug_Optimization=1 ',
+    'win_debug_InlineFunctionExpansion=0 ',
+    'win_release_InlineFunctionExpansion=0 ',
+    'win_release_OmitFramePointers=0 ',
+
+    'linux_use_tcmalloc=1 ',
+    'release_valgrind_build=1 ',
+    'werror= ',
+]
+
+
+class WebRTCFactory(factory.BuildFactory):
+  """Abstract superclass for all build factories.
+
+     A build factory defines a sequence of steps to take in a build process.
+     This class provides some helper methods and some abstract methods that
+     can be overridden to create customized build sequences.
+  """
+
+  def __init__(self, build_status_oracle):
+    """Creates the abstract factory.
+
+       Args:
+         build_status_oracle: An instance of BuildStatusOracle which is used to
+             keep track of our build state.
+    """
+    factory.BuildFactory.__init__(self)
+
+    self.build_status_oracle = build_status_oracle
+    self.properties = properties.Properties()
+    self.gyp_params = []
+    self.release = False
+
+  def EnableBuild(self):
+    """Adds steps for building WebRTC [must be overridden].
+
+       Implementations of this method must add clean and build steps so that
+       when all steps have been run, we have an up-to-date, complete and correct
+       build of WebRTC for the platform. It is up to the method how to do this.
+    """
+    pass
+
+  def EnableTests(self, tests):
+    """Adds test run steps for all tests in the list.
+
+       This method must be run after enabling the build.
+
+       Args:
+         tests: list of test to be run.
+    """
+    for test in tests:
+      self.EnableTest(test)
+
+  def AddCommonStep(self, cmd, descriptor='', workdir='build',
+                    number_of_retries=0, halt_build_on_failure=True,
+                    warn_on_failure=False):
+    """Adds a step which will run as a shell command on the slave.
+
+       NOTE: you are recommended to use this method to add new shell commands
+       instead of the base-class addStep method, since steps added here will
+       work with the smart-clean system (e.g. only do a full rebuild if the
+       previous build failed). Steps handled outside this method will not lead
+       to a full rebuild on the next build if they fail.
+
+       Args:
+         cmd: The command to run. This command follows the contract for
+             ShellCommand, so see that documentation for more details.
+         descriptor: A string, or a list of strings, describing what the step
+             does. The descriptor gets printed in the waterfall display.
+         workdir: The working directory to run the command in.
+         number_of_retries: Number of times to retry the command, if it fails.
+         halt_build_on_failure: Stops the build dead in its tracks if this step
+             fails. Use for critical steps. This option does not make sense with
+             warn_on_failure.
+         warn_on_failure: If true, this step isn't that important and will not
+             cause a failed build on failure.
+    """
+    flunk_on_failure = not warn_on_failure
+
+    if type(descriptor) is str:
+      descriptor = [descriptor]
+    # Add spaces to wrap long test names to make waterfall output more compact.
+    wrapped_text = self._WrapLongLines(descriptor)
+
+    self.addStep(MonitoredRetryingShellCommand(
+        build_status_oracle=self.build_status_oracle,
+        number_of_retries=number_of_retries,
+        command=cmd,
+        workdir=workdir,
+        description=wrapped_text + ['running...'],
+        descriptionDone=wrapped_text,
+        warnOnFailure=warn_on_failure,
+        flunkOnFailure=flunk_on_failure,
+        haltOnFailure=halt_build_on_failure,
+        name='_'.join(descriptor)))
+
+  def AddSmartCleanStep(self):
+    """Adds a smart clean step.
+
+       Smart clean only cleans the whole repository if the build status oracle
+       thinks the last build failed. Otherwise it cleans just the build output.
+    """
+    self.addStep(SmartClean(self.build_status_oracle))
+
+  def AddCommonTestRunStep(self, test, descriptor='', cmd=None,
+                           workdir='build/trunk'):
+    """Adds a step for running a single test [must be overridden].
+
+       In general, failing tests should not halt the build and allow other tests
+       to execute. A failing test should fail, or 'flunk', the build though.
+
+       Implementations of this method must add new steps through AddCommonStep
+       and not by calling addStep.
+
+       Args:
+         test: The test binary name. The step will attempt to execute this
+             binary in the binary output folder, except if the cmd argument is
+             defined (in that case, we will run cmd instead and just use the
+             test name in the descriptor).
+         descriptor: This should either be a string or a list of strings. The
+             descriptor or descriptors are appended to the test name and
+             displayed in the waterfall.
+         cmd: If necessary, you can specify this argument to override the
+             default behavior, which is to just run the binary specified in
+             test without arguments.
+         workdir: The base working directory to run the command in. This
+             directory will map to the WebRTC project root, e.g. the trunk
+             directory. This method will make sure that the test binary is run
+             in the correct output directory for the platform.
+    """
+    pass
+
+  def EnableTest(self, test):
+    """Makes a test run in the build sequence. May be overridden.
+
+       Override to handle special cases for specific platforms, for instance if
+       a particular test binary requires command line arguments.
+
+       Args:
+           test: The test name to enable.
+    """
+    self.AddCommonTestRunStep(test)
+
+  def AddGclientSyncStep(self, force_sync):
+    """Helper method for invoking gclient sync. Will retry if the operation
+       fails.
+
+       Args:
+           force_sync: If the sync should be forced, i.e. update even for
+           unchanged modules (known to be required for Windows sometimes).
+    """
+    cmd = ['gclient', 'sync']
+    if force_sync:
+      cmd.append('--force')
+    self.AddCommonStep(cmd, descriptor='Sync',
+                       number_of_retries=GCLIENT_RETRIES)
+
+  def AddCommonGYPStep(self, gyp_file, gyp_params=[], descriptor='gyp'):
+    """Helper method for invoking GYP on WebRTC.
+
+       GYP will generate makefiles or its equivalent in a platform-specific
+       manner. A failed GYP step will halt the build.
+
+       Implementations of this method must add new steps through AddCommonStep
+       and not by calling addStep.
+
+       Args:
+         gyp_file: The root GYP file to use.
+         gyp_params: Custom GYP parameters (same semantics as the GYP_PARAMS
+             environment variable).
+         descriptor: The descriptor to use for the step.
+    """
+    cmd = ['./build/gyp_chromium', '--depth=.', gyp_file]
+    cmd += gyp_params + self.gyp_params
+    self.AddCommonStep(cmd=cmd, workdir='build/trunk', descriptor=descriptor)
+
+  def _WrapLongLines(self, string_list, max_line_length=25, wrap_character='_'):
+    """ Creates a list with wrapped strings for lines that are too long.
+
+       This is done by inserting spaces to long lines with the wrap character
+       in. It's a simple way to make long test targets wrap nicer in the
+       waterfall display.
+
+       This method should only be used for lists that are displayed in the web
+       interface!
+
+       Args:
+           string_list: List of strings where each string represents one line.
+           max_line_length: Number of characters a line may have to avoid
+             getting wrapped.
+           wrap_character: The character we're looking for when inserting a
+             space if a string is larger than max_line_length. If no such
+             character is found, no space will be inserted.
+        Returns:
+            A new list of the same length as the input list, but with strings
+            that may contain extra spaces in them, if longer than the max
+            length.
+    """
+    result = []
+    for line in string_list:
+      if len(line) > max_line_length:
+        index = line.rfind(wrap_character)
+        if index != -1:
+          line = line[:index] + ' ' + line[index:]
+      result.append(line)
+    return result
+
+
+class BuildStatusOracle:
+  """Keeps track of a particular build's state.
+
+     The oracle uses files in the default master work directory to keep track
+     of whether a build has failed. It only keeps track of the most recent build
+     until told to forget it.
+  """
+
+  def __init__(self, builder_name):
+    """Creates the oracle.
+
+       Args:
+         builder_name: The name of the associated builder. This name is used
+             in the filename on disk. This name should be unique.
+    """
+    self.builder_name = builder_name
+    self.master_work_dir = DEFAULT_MASTER_WORK_DIR
+
+  def LastBuildFailed(self):
+    failure_file_path = self._GetFailureBuildPath()
+    return os.path.exists(failure_file_path)
+
+  def ForgetLastBuild(self):
+    if self.LastBuildFailed():
+      os.remove(self._GetFailureBuildPath())
+
+  def SetLastBuildAsFailed(self):
+    open(self._GetFailureBuildPath(), 'w').close()
+
+  def _GetFailureBuildPath(self):
+    return os.path.join(self.master_work_dir, self.builder_name + ".failed")
+
+
+class MonitoredRetryingShellCommand(ShellCommand):
+  """Wraps a shell command and notifies the oracle if the command fails.
+
+  If the command fails, there's an option to retry it a number of times.
+  Default behavior is to not retry."""
+
+  def __init__(self, build_status_oracle, number_of_retries=0, **kwargs):
+    ShellCommand.__init__(self, **kwargs)
+
+    self.addFactoryArguments(build_status_oracle=build_status_oracle,
+                             number_of_retries=number_of_retries)
+    self.build_status_oracle = build_status_oracle
+    self.number_of_retries = number_of_retries
+
+  def finished(self, results):
+    if (results == buildbot.status.builder.FAILURE or
+        results == buildbot.status.builder.EXCEPTION):
+      if self.number_of_retries > 0:
+        self.number_of_retries -= 1
+        self.start()
+        return
+      else:
+        self.build_status_oracle.SetLastBuildAsFailed()
+    ShellCommand.finished(self, results)
+
+
+class SmartClean(ShellCommand):
+  """Cleans the repository fully or partially depending on the build state."""
+  def __init__(self, build_status_oracle, **kwargs):
+    ShellCommand.__init__(self, **kwargs)
+
+    self.addFactoryArguments(build_status_oracle=build_status_oracle)
+    self.haltOnFailure = True
+    self.build_status_oracle = build_status_oracle
+
+  def start(self):
+    if self.build_status_oracle.LastBuildFailed():
+      self.build_status_oracle.ForgetLastBuild()
+      self.description = ['Nuke Repository', '(Previous Failed)']
+      self.setCommand(['rm', '-rf', 'trunk'])
+    else:
+      self.description = ['Clean']
+      self.setCommand('rm -rf trunk/out && '
+                      'rm -rf trunk/xcodebuild &&'
+                      'rm -rf trunk/build/Debug &&'
+                      'rm -rf trunk/build/Release')
+    ShellCommand.start(self)
+
+
+class GenerateCodeCoverage(ShellCommand):
+  """This custom shell command generates coverage HTML using genhtml.
+
+     The command will dump the HTML output into coverage_dir, in a directory
+     whose name is generated from the build number and slave name. We will
+     expect that the coverage directory is somewhere under the web server root
+     (i.e. public html root) that corresponds to the web server URL. That is, if
+     we write Foo to the coverage directory we expect that directory to be
+     reachable from url/Foo.
+  """
+
+  def __init__(self, coverage_url, coverage_dir, coverage_file, **kwargs):
+    """Prepares the coverage command.
+
+       Args:
+         coverage_url: The base URL for the serving web server we will use
+             when we generate the link to the coverage. This will generally
+             be the slave's URL (something like http://slave-hostname/).
+         coverage_dir: Where to write coverage HTML.
+         coverage_file: The LCOV file to generate the coverage from.
+    """
+    ShellCommand.__init__(self, **kwargs)
+    self.addFactoryArguments(coverage_url=coverage_url,
+                             coverage_dir=coverage_dir,
+                             coverage_file=coverage_file)
+    self.setDefaultWorkdir('build/trunk')
+    self.coverage_url = coverage_url
+    self.coverage_dir = coverage_dir
+    self.coverage_file = coverage_file
+    self.description = ['Coverage Report']
+    self.warnOnFailure = True
+    self.flunkOnFailure = False
+    output_dir = os.path.join(coverage_dir,
+                              '%(buildername)s_%(buildnumber)s')
+    self.setCommand(['./tools/continuous_build/generate_coverage_html.sh',
+                     coverage_file, WithProperties(output_dir)])
+
+  def createSummary(self, log):
+    coverage_url = urlparse.urljoin(self.coverage_url,
+                                    '%s_%s' % (self.getProperty('buildername'),
+                                               self.getProperty('buildnumber')))
+    self.addURL('click here', coverage_url)
+
+  def start(self):
+    ShellCommand.start(self)
+
+
+class WebRTCAndroidFactory(WebRTCFactory):
+  """Sets up the Android build."""
+
+  def __init__(self, build_status_oracle):
+    WebRTCFactory.__init__(self, build_status_oracle)
+
+  def EnableBuild(self, product='toro'):
+    prefix = 'rm -rf out/target/product/%s/obj/' % product
+    cleanup_list = [
+        'rm -rf external/webrtc',
+        prefix + 'STATIC_LIBRARIES/libwebrtc_*',
+        prefix + 'SHARE_LIBRARIES/libwebrtc_*',
+        prefix + 'EXECUTABLES/webrtc_*'
+        ]
+    cmd = ' ; '.join(cleanup_list)
+    self.AddCommonStep(cmd, descriptor='cleanup', workdir='build/trunk')
+
+    cmd = 'svn checkout %s external/webrtc' % SVN_LOCATION
+    self.AddCommonStep(cmd, descriptor='svn (checkout)', workdir='build/trunk')
+
+    cmd = ('source build/envsetup.sh && lunch full_%s-eng '
+           '&& mmm external/webrtc showcommands' % product)
+    self.AddCommonStep(cmd, descriptor='build', workdir='build/trunk')
+
+
+class WebRTCChromeFactory(WebRTCFactory):
+  """Sets up the Chrome OS build."""
+
+  def __init__(self, build_status_oracle):
+    WebRTCFactory.__init__(self, build_status_oracle)
+
+  def EnableBuild(self):
+    self.AddCommonStep(['rm', '-rf', 'src'], descriptor='Cleanup')
+    self.AddGclientSyncStep(force_sync=True)
+    self.AddCommonMakeStep('chrome')
+
+  def AddCommonMakeStep(self, target, make_extra=None):
+    descriptor = ['make ' + target]
+    cmd = ['make', target, '-j100']
+    if make_extra is not None:
+      cmd.append(make_extra)
+    self.AddCommonStep(cmd=cmd, descriptor=descriptor,
+                       warn_on_failure=True, workdir='build/src')
+
+
+class WebRTCLinuxFactory(WebRTCFactory):
+  """Sets up the Linux build.
+
+     This factory is quite configurable and can run a variety of builds.
+  """
+
+  def __init__(self, build_status_oracle, valgrind_enabled=False):
+    WebRTCFactory.__init__(self, build_status_oracle)
+
+    self.build_enabled = False
+    self.coverage_enabled = False
+    self.valgrind_enabled = valgrind_enabled
+
+  def EnableCoverage(self, coverage_url, coverage_dir=DEFAULT_COVERAGE_DIR):
+    """Enables coverage measurements using LCOV/GCOV.
+
+       This method must be called before enabling build.
+
+       Args:
+         coverage_url: See the GenerateCodeCoverage command's contract for
+             this argument.
+         coverage_dir: See the GenerateCodeCoverage command's contract for
+             this argument.
+    """
+    assert self.build_enabled is False
+
+    self.coverage_enabled = True
+    self.coverage_url = coverage_url
+    self.coverage_dir = coverage_dir
+
+  def EnableBuild(self, release=False, build32=False, chrome_os=False,
+                  clang=False):
+    if build32:
+      self.gyp_params.append('-Dtarget_arch=ia32')
+
+    self.build_enabled = True
+    self.release = release
+
+    self.AddSmartCleanStep()
+
+    # Valgrind bots need special GYP defines to enable memory profiling
+    # friendly compilation. They already has a custom .gclient configuration
+    # file created so they don't need one being generated like the other bots.
+    if self.valgrind_enabled:
+      for gyp_define in MEMORY_TOOLS_GYP_DEFINES:
+        self.gyp_params.append('-D' + gyp_define)
+    else:
+      self.AddCommonStep(['gclient', 'config', SVN_LOCATION],
+                         descriptor='gclient_config')
+    self.AddGclientSyncStep(force_sync=False)
+
+    if chrome_os:
+      self.gyp_params.append('-Dchromeos=1')
+
+    if clang:
+      self.gyp_params.append('-Dclang=1')
+
+    if self.coverage_enabled:
+      self.gyp_params.append('-Dcoverage=1')
+    self.AddCommonGYPStep('webrtc.gyp', descriptor='CommonGYP')
+
+    if clang:
+      self.AddCommonStep(['trunk/tools/clang/scripts/update.sh'],
+                          descriptor='Update_Clang')
+
+    if self.release:
+      self.AddCommonMakeStep('all', make_extra='BUILDTYPE=Release')
+    else:
+      self.AddCommonMakeStep('all')
+
+  def AddCommonTestRunStep(self, test, extra_text=None, cmd=None,
+                           workdir='build/trunk'):
+    descriptor = [test, extra_text] if extra_text else [test]
+    if cmd is None:
+      test_folder = 'Release' if self.release else 'Debug'
+      cmd = ['out/%s/%s' % (test_folder, test)]
+    if self.valgrind_enabled:
+      cmd = VALGRIND_CMD + cmd
+    self.AddCommonStep(cmd, descriptor=descriptor, workdir=workdir,
+                       halt_build_on_failure=False)
+
+  def AddXvfbTestRunStep(self, test_name, test_binary, test_arguments=''):
+    """ Adds a test to be run inside a XVFB window manager."""
+    cmd = ('xvfb-run '
+           '--server-args="-screen 0 800x600x24 -extension Composite" '
+           '%s %s' % (test_binary, test_arguments))
+    self.AddCommonTestRunStep(test=test_name, cmd=cmd)
+
+  def AddCommonMakeStep(self, target, extra_text=None, make_extra=None):
+    descriptor = ['make ' + target, extra_text] if extra_text else ['make ' +
+                                                                    target]
+    cmd = ['make', target, '-j100']
+    if make_extra:
+      cmd.append(make_extra)
+    self.AddCommonStep(cmd=cmd, descriptor=descriptor, workdir='build/trunk')
+
+  def AddStepsToEstablishCoverageBaseline(self):
+    self.AddCommonStep(['lcov', '--directory', '.', '--capture', '-b',
+                        '.', '--initial',
+                        '--output-file', 'webrtc_base.info'],
+                       workdir='build/trunk',
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       descriptor='LCOV (Baseline Capture)')
+    self.AddCommonStep(['lcov', '--extract', 'webrtc_base.info', '*/src/*',
+                        '--output', 'filtered.info'],
+                       workdir='build/trunk',
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       descriptor='LCOV (Baseline Extract)')
+    self.AddCommonStep(['lcov', '--remove', 'filtered.info', '*/usr/include/*',
+                        '/third*', '/testing/*', '*/test/*', '*_unittest.*',
+                        '*/mock/*', '--output',
+                        'webrtc_base_filtered_final.info'],
+                       workdir='build/trunk',
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       descriptor='LCOV (Baseline Filter)')
+
+  def AddStepsToComputeCoverage(self):
+    """Enable coverage data."""
+
+    # Delete all third-party .gcda files to save time and work around a bug
+    # in lcov which tends to hang when capturing on libjpgturbo.
+    self.AddCommonStep(['./tools/continuous_build/clean_third_party_gcda.sh'],
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       workdir='build/trunk',
+                       descriptor='LCOV (Delete 3rd party)')
+    self.AddCommonStep(['lcov', '--directory', '.', '--capture', '-b',
+                        '.', '--output-file', 'webrtc.info'],
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       workdir='build/trunk',
+                       descriptor='LCOV (Capture)')
+    self.AddCommonStep(['lcov', '--extract', 'webrtc.info', '*/src/*',
+                        '--output', 'test.info'],
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       workdir='build/trunk',
+                       descriptor='LCOV (Extract)')
+    self.AddCommonStep(['lcov', '--remove', 'test.info', '*/usr/include/*',
+                        '/third*', '/testing/*', '*/test/*', '*_unittest.*',
+                        '*/mock/*', '--output',
+                        'final.info'],
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       workdir='build/trunk',
+                       descriptor='LCOV (Filter)')
+    self.AddCommonStep(['lcov', '-a', 'webrtc_base_filtered_final.info', '-a',
+                        'final.info', '-o', 'final.info'],
+                       warn_on_failure=True,
+                       halt_build_on_failure=False,
+                       workdir='build/trunk',
+                       descriptor='LCOV (Merge)')
+    self.addStep(GenerateCodeCoverage(coverage_url=self.coverage_url,
+                                      coverage_dir=self.coverage_dir,
+                                      coverage_file='final.info'))
+
+  def EnableTests(self, tests):
+    if self.coverage_enabled:
+      self.AddStepsToEstablishCoverageBaseline()
+
+    WebRTCFactory.EnableTests(self, tests)
+
+    if self.coverage_enabled:
+      self.AddStepsToComputeCoverage()
+
+  def EnableTest(self, test):
+    """Adds a step for running a test on Linux.
+
+       In general, this method will interpret the name as the name of a binary
+       in the default build output directory, except for a few special cases
+       which require custom command lines.
+
+       Args:
+         test: the test name as a string.
+    """
+    if test == 'audioproc_unittest':
+      self.AddCommonTestRunStep(test)
+      self.AddCommonGYPStep('webrtc.gyp', gyp_params=['-Dprefer_fixed_point=1'],
+                            descriptor='GYP fixed point')
+      self.AddCommonMakeStep(test, extra_text='(fixed point)')
+      self.AddCommonTestRunStep(test, extra_text='(fixed point)')
+    elif test == 'vie_auto_test':
+      # TODO(phoglund): Enable the full stack test once it is completed and
+      # nonflaky.
+      binary = 'out/Debug/vie_auto_test'
+      args = (
+          '--automated --gtest_filter="'
+          '-ViEVideoVerificationTest.RunsFullStackWithoutErrors" '
+          '--capture_test_ensure_resolution_alignment_in_capture_device=false')
+      self.AddXvfbTestRunStep(test_name=test, test_binary=binary,
+                              test_arguments=args)
+    elif test == 'video_render_module_test':
+      self.AddXvfbTestRunStep(test_name=test,
+                              test_binary='out/Debug/video_render_module_test')
+    elif test == 'voe_auto_test':
+      cmd = 'out/Debug/voe_auto_test --automated'
+      self.AddCommonTestRunStep(test=test, cmd=cmd)
+    else:
+      self.AddCommonTestRunStep(test)
+
+
+class WebRTCMacFactory(WebRTCFactory):
+  """Sets up the Mac build, both for make and xcode."""
+
+  def __init__(self, build_status_oracle):
+    WebRTCFactory.__init__(self, build_status_oracle)
+    self.build_type = 'both'
+    self.allowed_build_types = ['both', 'xcode', 'make']
+
+  def EnableBuild(self, build_type='both', release=False):
+    self.release = release
+
+    if build_type not in self.allowed_build_types:
+      print '*** INCORRECT BUILD TYPE (%s)!!! ***' % build_type
+      sys.exit(0)
+    else:
+      self.build_type = build_type
+    self.AddSmartCleanStep()
+    self.AddCommonStep(['gclient', 'config', SVN_LOCATION],
+                       descriptor='gclient_config')
+    self.AddGclientSyncStep(force_sync=True)
+
+    if self.build_type == 'make' or self.build_type == 'both':
+      self.AddCommonGYPStep('webrtc.gyp', gyp_params=['-f', 'make'],
+                            descriptor='EnableMake')
+    self.AddCommonMakeStep('all')
+
+  def AddCommonTestRunStep(self, test, extra_text=None, cmd=None,
+                           workdir='build/trunk'):
+    descriptor = [test, extra_text] if extra_text else [test]
+    if cmd is None:
+      out_path = 'xcodebuild' if self.build_type == 'xcode' else 'out'
+      test_folder = 'Release' if self.release else 'Debug'
+      cmd = ['%s/%s/%s' % (out_path, test_folder, test)]
+
+    if self.build_type == 'xcode' or self.build_type == 'both':
+      self.AddCommonStep(cmd, descriptor=descriptor + ['(xcode)'],
+                         halt_build_on_failure=False, workdir=workdir)
+    # Execute test only for 'make' build type.
+    # If 'both' is enabled we'll only execute the 'xcode' built ones.
+    if self.build_type == 'make':
+      self.AddCommonStep(cmd, descriptor=descriptor + ['(make)'],
+                         halt_build_on_failure=False, workdir=workdir)
+
+  def AddCommonMakeStep(self, target, extra_text=None, make_extra=None):
+    descriptor = [target, extra_text] if extra_text else [target]
+    if self.build_type == 'make' or self.build_type == 'both':
+      cmd = ['make', target, '-j100']
+      if make_extra is not None:
+        cmd.append(make_extra)
+      if self.release:
+        cmd.append('BUILDTYPE=Release')
+      self.AddCommonStep(cmd, descriptor=descriptor + ['(make)'],
+                         workdir='build/trunk')
+    if self.build_type == 'xcode' or self.build_type == 'both':
+      configuration = 'Release' if self.release else 'Debug'
+      cmd = ['xcodebuild', '-project', 'webrtc.xcodeproj', '-configuration',
+             configuration, '-target', 'All']
+      self.AddCommonStep(cmd, descriptor=descriptor + ['(xcode)'],
+                         workdir='build/trunk')
+
+class WebRTCWinFactory(WebRTCFactory):
+  """Sets up the Windows build.
+
+     Allows building with Debug, Release or both in sequence.
+  """
+
+  def __init__(self, build_status_oracle):
+    WebRTCFactory.__init__(self, build_status_oracle)
+    self.configuration = 'Debug'
+    self.platform = 'x64'
+    self.allowed_platforms = ['x64', 'Win32']
+    self.allowed_configurations = ['Debug', 'Release', 'both']
+
+  def EnableBuild(self, platform='Win32', configuration='Debug'):
+    if platform not in self.allowed_platforms:
+      print '*** INCORRECT PLATFORM (%s)!!! ***' % platform
+      sys.exit(0)
+    else:
+      self.platform = platform
+    if configuration not in self.allowed_configurations:
+      print '*** INCORRECT CONFIGURATION (%s)!!! ***' % configuration
+      sys.exit(0)
+    else:
+      self.configuration = configuration
+
+    self.AddSmartCleanStep()
+    self.AddCommonStep(['gclient', 'config', SVN_LOCATION],
+                       descriptor='gclient_config')
+    self.AddGclientSyncStep(force_sync=True)
+
+    if self.configuration == 'Debug' or self.configuration == 'both':
+      cmd = ['msbuild', 'webrtc.sln', '/t:Clean',
+             '/p:Configuration=Debug;Platform=%s' % (self.platform)]
+      self.AddCommonStep(cmd, descriptor='Build(Clean)', workdir='build/trunk')
+      cmd = ['msbuild', 'webrtc.sln',
+             '/p:Configuration=Debug;Platform=%s' % (self.platform)]
+      self.AddCommonStep(cmd, descriptor='Build(Debug)', workdir='build/trunk')
+    if self.configuration == 'Release' or self.configuration == 'both':
+      cmd = ['msbuild', 'webrtc.sln', '/t:Clean',
+             '/p:Configuration=Release;Platform=%s' % (self.platform)]
+      self.AddCommonStep(cmd, descriptor='Build(Clean)', workdir='build/trunk')
+      cmd = ['msbuild', 'webrtc.sln',
+             '/p:Configuration=Release;Platform=%s' % (self.platform)]
+      self.AddCommonStep(cmd, descriptor='Build(Release)',
+                         workdir='build/trunk')
+
+  def AddCommonTestRunStep(self, test, cmd=None, workdir='build/trunk'):
+    descriptor = [test]
+    if self.configuration == 'Debug' or self.configuration == 'both':
+      if cmd is None:
+        cmd = ['build\Debug\%s.exe' % test]
+      self.AddCommonStep(cmd, descriptor=descriptor,
+                         halt_build_on_failure=False, workdir=workdir)
+    if self.configuration == 'Release' or self.configuration == 'both':
+      if cmd is None:
+        cmd = ['build\Release\%s.exe' % test]
+      self.AddCommonStep(cmd, descriptor=descriptor,
+                         halt_build_on_failure=False, workdir=workdir)
+
+# Utility functions
+
+
+class UnsupportedPlatformError(Exception):
+  pass
+
+
+def GetEnabledTests(test_dict, platform):
+  """Returns a list of enabled test names for the provided platform.
+
+     Args:
+       test_dict: Dictionary mapping test names to tuples representing if the
+           test shall be enabled on each platform. Each tuple contains one
+           boolean for each platform. The platforms are in the order specified
+           by SUPPORTED_PLATFORMS.
+       platform: The platform we're looking to get the tests for.
+
+     Returns:
+       A list of test names, sorted alphabetically.
+
+     Raises:
+       UnsupportedPlatformError: if the platform supplied is not supported.
+  """
+  if platform not in SUPPORTED_PLATFORMS:
+    raise UnsupportedPlatformError('*** UNSUPPORTED PLATFORM (%s)!!! ***' %
+                                   platform)
+  result = []
+  platform_index = SUPPORTED_PLATFORMS.index(platform)
+  for test_name, enabled_platforms in test_dict.iteritems():
+    if enabled_platforms[platform_index]:
+      result.append(test_name)
+  result.sort()
+  return result
diff --git a/trunk/tools/continuous_build/webrtc_buildbot/utils_test.py b/trunk/tools/continuous_build/webrtc_buildbot/utils_test.py
new file mode 100644
index 0000000..7da58f6
--- /dev/null
+++ b/trunk/tools/continuous_build/webrtc_buildbot/utils_test.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Unit tests for helper functions in utils.py."""
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+import unittest
+
+from webrtc_buildbot import utils
+
+
+class Test(unittest.TestCase):
+
+  def testGetEnabledTests(self):
+    tests = {
+        # Name     Linux Mac   Windows
+        'test_1': (True, True, False),
+        'test_2': (True, False, False),
+    }
+    result = utils.GetEnabledTests(tests, 'Linux')
+    self.assertEqual(2, len(result))
+    self.assertEqual('test_1', result[0])
+    self.assertEqual('test_2', result[1])
+
+    result = utils.GetEnabledTests(tests, 'Mac')
+    self.assertEqual(1, len(result))
+    self.assertEqual('test_1', result[0])
+
+    result = utils.GetEnabledTests(tests, 'Windows')
+    self.assertEqual(0, len(result))
+
+    self.assertRaises(utils.UnsupportedPlatformError,
+                      utils.GetEnabledTests, tests, 'BeOS')
+
+  def testEmptyListExitQuietly(self):
+    factory = utils.WebRTCFactory()
+    self.assertEqual([], factory._WrapLongLines([]))
+
+  def testShortLinesShallNotWrap(self):
+    factory = utils.WebRTCFactory()
+    self.assertEqual(['a'], factory._WrapLongLines(['a']))
+
+    string_25_len = '12345678901234567890123_5'
+    result = factory._WrapLongLines([string_25_len, string_25_len])
+    self.assertEqual(string_25_len, result[0])
+    self.assertEqual(string_25_len, result[1])
+
+  def testLinesWithMoreThan25CharactersWithNoWrapCharacterIsUnchanged(self):
+    factory = utils.WebRTCFactory()
+    string_26_len = '12345678901234567890123456'
+    result = factory._WrapLongLines([string_26_len, string_26_len])
+    self.assertEqual(string_26_len, result[0])
+    self.assertEqual(string_26_len, result[1])
+
+  def testLinesWithMoreThan25CharactersShallWrapOnWrapCharacter(self):
+    factory = utils.WebRTCFactory()
+    string_26_len = '123456789012345678901234_6'
+    test_list = [string_26_len, string_26_len]
+    result = factory._WrapLongLines(test_list)
+    expected_result = '123456789012345678901234 _6'
+    self.assertEqual(expected_result, result[0])
+    self.assertEqual(expected_result, result[1])
+    # Verify the original test_list was not modified too.
+    self.assertEqual(string_26_len, test_list[0])
+    self.assertEqual(string_26_len, test_list[1])
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/trunk/tools/coverity/OWNERS b/trunk/tools/coverity/OWNERS
new file mode 100644
index 0000000..b44992b
--- /dev/null
+++ b/trunk/tools/coverity/OWNERS
@@ -0,0 +1,3 @@
+kjellander@webrtc.org
+phoglund@webrtc.org
+
diff --git a/trunk/tools/coverity/coverity.py b/trunk/tools/coverity/coverity.py
new file mode 100755
index 0000000..84b95bc
--- /dev/null
+++ b/trunk/tools/coverity/coverity.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""
+Runs Coverity Static Analysis on a build of WebRTC.
+
+This script is a modified copy of Chromium's tools/coverity/coverity.py
+Changes made:
+ * Replaced deprecated switches for cov-commit-defects command:
+   * Using --host instead of --remote
+   * Using --stream instead of --product
+   * Removed --cxx (now default enabled)
+ * Changed cleaning of output path, since WebRTC's out dir is located directly
+   in trunk/
+ * Updated some default constants.
+
+The script runs on all WebRTC supported platforms.
+
+On Windows, this script should be run in a Visual Studio Command Prompt, so
+that the INCLUDE, LIB, and PATH environment variables are set properly for
+Visual Studio.
+
+Usage examples:
+  coverity.py
+  coverity.py --dry-run
+  coverity.py --target=debug
+  %comspec% /c ""C:\Program Files\Microsoft Visual Studio 8\VC\vcvarsall.bat"
+      x86 && C:\Python24\python.exe C:\coverity.py"
+
+For a full list of options, pass the '--help' switch.
+
+See http://support.microsoft.com/kb/308569 for running this script as a
+Scheduled Task on Windows XP.
+"""
+
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import time
+
+# These constants provide default values, but are exposed as command-line
+# flags. See the --help for more info. Note that for historical reasons
+# (the script started out as Windows-only and has legacy usages which pre-date
+# these switches), the constants are all tuned for Windows.
+# Usage of this script on Linux pretty much requires explicit
+# --source-dir, --coverity-bin-dir, --coverity-intermediate-dir, and
+# --coverity-target command line flags.
+
+WEBRTC_SOURCE_DIR = 'C:\\webrtc.latest'
+
+# Relative to WEBRTC_SOURCE_DIR. Only applies to Windows platform.
+WEBRTC_SOLUTION_FILE = 'webrtc.sln'
+
+# Relative to WEBRTC_SOURCE_DIR. Only applies to Windows platform.
+WEBRTC_SOLUTION_DIR = 'build'
+
+COVERITY_BIN_DIR = 'C:\\coverity-integrity-center\\static-analysis\\bin'
+
+COVERITY_INTERMEDIATE_DIR = 'C:\\coverity\\cvbuild\\cr_int'
+
+COVERITY_ANALYZE_OPTIONS = ('--security --concurrency '
+                            '--enable ATOMICITY '
+                            '--enable MISSING_LOCK '
+                            '--enable DELETE_VOID '
+                            '--checker-option PASS_BY_VALUE:size_threshold:16 '
+                            '--checker-option '
+                            'USE_AFTER_FREE:allow_simple_use:false '
+                            '--enable-constraint-fpp '
+                            '--enable-callgraph-metrics')
+
+# Might need to be changed to FQDN
+COVERITY_REMOTE = 'localhost'
+
+COVERITY_PORT = '8080'
+
+COVERITY_STREAM = 'trunk'
+
+COVERITY_TARGET = 'Windows'
+
+COVERITY_USER = 'coverityanalyzer'
+# looking for a PASSWORD constant? Look at --coverity-password-file instead.
+
+# Relative to WEBRTC_SOURCE_DIR.  Contains the pid of this script.
+LOCK_FILE = 'coverity.lock'
+
+
+def _ReadPassword(pwfilename):
+  """Reads the coverity password in from a file where it was stashed"""
+  pwfile = open(pwfilename, 'r')
+  password = pwfile.readline()
+  pwfile.close()
+  return password.rstrip()
+
+
+def _RunCommand(cmd, dry_run, shell=False, echo_cmd=True):
+  """Runs the command if dry_run is false, otherwise just prints the command."""
+  if echo_cmd:
+    print cmd
+  if not dry_run:
+    return subprocess.call(cmd, shell=shell)
+  else:
+    return 0
+
+
+def _ReleaseLock(lock_file, lock_filename):
+  """Removes the lockfile. Function-ized so we can bail from anywhere"""
+  os.close(lock_file)
+  os.remove(lock_filename)
+
+
+def run_coverity(options, args):
+  """Runs all the selected tests for the given build type and target."""
+  # Create the lock file to prevent another instance of this script from
+  # running.
+  lock_filename = os.path.join(options.source_dir, LOCK_FILE)
+  try:
+    lock_file = os.open(lock_filename,
+                        os.O_CREAT | os.O_EXCL | os.O_TRUNC | os.O_RDWR)
+  except OSError, err:
+    print 'Failed to open lock file:\n  ' + str(err)
+    return 1
+
+  # Write the pid of this script (the python.exe process) to the lock file.
+  os.write(lock_file, str(os.getpid()))
+
+  options.target = options.target.title()
+
+  start_time = time.time()
+
+  print 'Change directory to ' + options.source_dir
+  os.chdir(options.source_dir)
+
+  # The coverity-password filename may have been a relative path.
+  # If so, assume it's relative to the source directory, which means
+  # the time to read the password is after we do the chdir().
+  coverity_password = _ReadPassword(options.coverity_password_file)
+
+  cmd = 'gclient sync'
+  gclient_exit = _RunCommand(cmd, options.dry_run, shell=True)
+  if gclient_exit != 0:
+    print 'gclient aborted with status %s' % gclient_exit
+    _ReleaseLock(lock_file, lock_filename)
+    return 1
+
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  # Do a clean build.  Remove the build output directory first.
+  if sys.platform.startswith('linux'):
+    rm_path = os.path.join(options.source_dir,'out',options.target)
+  elif sys.platform == 'win32':
+    rm_path = os.path.join(options.source_dir,options.solution_dir,
+                           options.target)
+  elif sys.platform == 'darwin':
+    rm_path = os.path.join(options.source_dir,'xcodebuild')
+  else:
+    print 'Platform "%s" unrecognized, aborting' % sys.platform
+    _ReleaseLock(lock_file, lock_filename)
+    return 1
+
+  if options.dry_run:
+    print 'shutil.rmtree(%s)' % repr(rm_path)
+  else:
+    shutil.rmtree(rm_path,True)
+
+  if options.preserve_intermediate_dir:
+      print 'Preserving intermediate directory.'
+  else:
+    if options.dry_run:
+      print 'shutil.rmtree(%s)' % repr(options.coverity_intermediate_dir)
+      print 'os.mkdir(%s)' % repr(options.coverity_intermediate_dir)
+    else:
+      shutil.rmtree(options.coverity_intermediate_dir,True)
+      os.mkdir(options.coverity_intermediate_dir)
+
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  use_shell_during_make = False
+  if sys.platform.startswith('linux'):
+    use_shell_during_make = True
+    _RunCommand('pwd', options.dry_run, shell=True)
+    cmd = '%s/cov-build --dir %s make BUILDTYPE=%s All' % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.target)
+  elif sys.platform == 'win32':
+    cmd = ('%s\\cov-build.exe --dir %s devenv.com %s\\%s /build %s '
+           '/project webrtc.vcproj') % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.source_dir, options.solution_file, options.target)
+  elif sys.platform == 'darwin':
+    use_shell_during_make = True
+    _RunCommand('pwd', options.dry_run, shell=True)
+    cmd = ('%s/cov-build --dir %s xcodebuild -project webrtc.xcodeproj '
+           '-configuration %s -target All') % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.target)
+
+
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  cov_analyze_exe = os.path.join(options.coverity_bin_dir,'cov-analyze')
+  cmd = '%s --dir %s %s' % (cov_analyze_exe,
+                            options.coverity_intermediate_dir,
+                            options.coverity_analyze_options)
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  cov_commit_exe = os.path.join(options.coverity_bin_dir,'cov-commit-defects')
+
+  # On Linux we have started using a Target with a space in it, so we want
+  # to quote it. On the other hand, Windows quoting doesn't work quite the
+  # same way. To be conservative, I'd like to avoid quoting an argument
+  # that doesn't need quoting and which we haven't historically been quoting
+  # on that platform. So, only quote the target if we have to.
+  coverity_target = options.coverity_target
+  if sys.platform != 'win32':
+    coverity_target = '"%s"' % coverity_target
+
+  cmd = ('%s --dir %s --host %s --port %s '
+         '--stream %s '
+         '--target %s '
+         '--user %s '
+         '--password %s') % (cov_commit_exe,
+                             options.coverity_intermediate_dir,
+                             options.coverity_dbhost,
+                             options.coverity_port,
+                             options.coverity_stream,
+                             coverity_target,
+                             options.coverity_user,
+                             coverity_password)
+  # Avoid echoing the Commit command because it has a password in it
+  print 'Commiting defects to Coverity Integrity Manager server...'
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make, echo_cmd=False)
+
+  print 'Completed! Total time: %ds' % (time.time() - start_time)
+
+  _ReleaseLock(lock_file, lock_filename)
+
+  return 0
+
+
+def main():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('', '--dry-run', action='store_true', default=False,
+                           help='print but don\'t run the commands')
+
+  option_parser.add_option('', '--target', default='Release',
+                           help='build target (Debug or Release)')
+
+  option_parser.add_option('', '--source-dir', dest='source_dir',
+                           help='full path to directory ABOVE "src"',
+                           default=WEBRTC_SOURCE_DIR)
+
+  option_parser.add_option('', '--solution-file', dest='solution_file',
+                           help='filename of solution file to build (Win only)',
+                           default=WEBRTC_SOLUTION_FILE)
+
+  option_parser.add_option('', '--solution-dir', dest='solution_dir',
+                           help='build directory for the solution (Win only)',
+                           default=WEBRTC_SOLUTION_DIR)
+
+  option_parser.add_option('', '--coverity-bin-dir', dest='coverity_bin_dir',
+                           default=COVERITY_BIN_DIR)
+
+  option_parser.add_option('', '--coverity-intermediate-dir',
+                           dest='coverity_intermediate_dir',
+                           default=COVERITY_INTERMEDIATE_DIR)
+
+  option_parser.add_option('', '--coverity-analyze-options',
+                           dest='coverity_analyze_options',
+                           help=('all cov-analyze options, e.g. "%s"'
+                                 % COVERITY_ANALYZE_OPTIONS),
+                           default=COVERITY_ANALYZE_OPTIONS)
+
+  option_parser.add_option('', '--coverity-db-host',
+                           dest='coverity_dbhost',
+                           help=('coverity defect db server hostname, e.g. %s'
+                                 % COVERITY_REMOTE),
+                           default=COVERITY_REMOTE)
+
+  option_parser.add_option('', '--coverity-db-port', dest='coverity_port',
+                           help=('port # of coverity web/db server, e.g. %s'
+                                 % COVERITY_PORT),
+                           default=COVERITY_PORT)
+
+  option_parser.add_option('', '--coverity-stream', dest='coverity_stream',
+                           help=('Name of stream reported to Coverity, e.g. %s'
+                                 % COVERITY_STREAM),
+                           default=COVERITY_STREAM)
+
+  option_parser.add_option('', '--coverity-target', dest='coverity_target',
+                           help='Platform Target reported to coverity',
+                           default=COVERITY_TARGET)
+
+  option_parser.add_option('', '--coverity-user', dest='coverity_user',
+                           help='Username used to log into coverity',
+                           default=COVERITY_USER)
+
+  option_parser.add_option('', '--coverity-password-file',
+                           dest='coverity_password_file',
+                           help='file containing the coverity password',
+                           default='coverity-password')
+
+  helpmsg = ('By default, the intermediate dir is emptied before analysis. '
+             'This switch disables that behavior.')
+  option_parser.add_option('', '--preserve-intermediate-dir',
+                           action='store_true', help=helpmsg,
+                           default=False)
+
+  options, args = option_parser.parse_args()
+  return run_coverity(options, args)
+
+
+if '__main__' == __name__:
+  sys.exit(main())
diff --git a/trunk/tools/create_supplement_gypi.py b/trunk/tools/create_supplement_gypi.py
new file mode 100644
index 0000000..f5a08e6
--- /dev/null
+++ b/trunk/tools/create_supplement_gypi.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+supplement_gypi = """#!/usr/bin/env python
+# This file is generated by %s.  Not for check-in.
+# Please see the WebRTC DEPS file for details.
+{
+  'variables': {
+    'build_with_chromium': 0,
+    'inside_chromium_build': 0,
+  }
+}
+"""
+
+def main(argv):
+  open(argv[1], 'w').write(supplement_gypi % argv[0])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/trunk/tools/matlab/maxUnwrap.m b/trunk/tools/matlab/maxUnwrap.m
new file mode 100644
index 0000000..276c952
--- /dev/null
+++ b/trunk/tools/matlab/maxUnwrap.m
@@ -0,0 +1,25 @@
+function sequence = maxUnwrap(sequence, max)
+%
+% sequence = maxUnwrap(sequence, max)
+% Unwraps when a wrap around is detected.
+%
+% Arguments
+%
+% sequence: The vector to unwrap.
+% max: The maximum value that the sequence can take,
+%      and after which it will wrap over to 0.
+%
+% Return value
+%
+% sequence: The unwrapped vector.
+%
+
+% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+%
+% Use of this source code is governed by a BSD-style license
+% that can be found in the LICENSE file in the root of the source
+% tree. An additional intellectual property rights grant can be found
+% in the file PATENTS.  All contributing project authors may
+% be found in the AUTHORS file in the root of the source tree.
+
+sequence = round((unwrap(2 * pi * sequence / max) * max) / (2 * pi));
diff --git a/trunk/tools/matlab/parseLog.m b/trunk/tools/matlab/parseLog.m
new file mode 100644
index 0000000..5d4c3f7
--- /dev/null
+++ b/trunk/tools/matlab/parseLog.m
@@ -0,0 +1,54 @@
+function parsed = parseLog(filename)
+%
+% parsed = parseLog(filename)
+% Parses a DataLog text file, with the filename specified in the string
+% filename, into a struct with each column name as a field, and with the
+% column data stored as a vector in that field.
+%
+% Arguments
+%
+% filename: A string with the name of the file to parse.
+%
+% Return value
+%
+% parsed: A struct containing each column parsed from the input file
+%         as a field and with the column data stored as a vector in that 
+%         field.
+%
+
+% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+%
+% Use of this source code is governed by a BSD-style license
+% that can be found in the LICENSE file in the root of the source
+% tree. An additional intellectual property rights grant can be found
+% in the file PATENTS.  All contributing project authors may
+% be found in the AUTHORS file in the root of the source tree.
+
+table = importdata(filename, ',', 1);
+if ~isstruct(table)
+  error('Malformed file, possibly empty or lacking data entries')
+end
+
+colheaders = table.textdata;
+if length(colheaders) == 1
+  colheaders = regexp(table.textdata{1}, ',', 'split');
+end
+
+parsed = struct;
+i = 1;
+while i <= length(colheaders)
+  % Checking for a multi-value column.
+  m = regexp(colheaders{i}, '([\w\t]+)\[(\d+)\]', 'tokens');
+  if ~isempty(m)
+    % Parse a multi-value column
+    n = str2double(m{1}{2}) - 1;
+    parsed.(strrep(m{1}{1}, ' ', '_')) = table.data(:, i:i+n);
+    i = i + n + 1;
+  elseif ~isempty(colheaders{i})
+    % Parse a single-value column
+    parsed.(strrep(colheaders{i}, ' ', '_')) = table.data(:, i);
+    i = i + 1;
+  else
+    error('Empty column');
+  end
+end
diff --git a/trunk/tools/python_charts/OWNERS b/trunk/tools/python_charts/OWNERS
new file mode 100644
index 0000000..0428a4a
--- /dev/null
+++ b/trunk/tools/python_charts/OWNERS
@@ -0,0 +1 @@
+kjellander@webrtc.org
diff --git a/trunk/tools/python_charts/README b/trunk/tools/python_charts/README
new file mode 100644
index 0000000..483c402
--- /dev/null
+++ b/trunk/tools/python_charts/README
@@ -0,0 +1,41 @@
+This file describes how to setup Eclipse and then the Python Charts project
+
+Setup Eclipse
+-------------
+These instructions were tested on Linux, but are very similar for Windows and
+Mac.
+1. Ensure you have Python 2.x installed
+2. Download and install Google App Engine SDK for Python from 
+   http://code.google.com/appengine/downloads.html
+3. Note which location you put App Engine in, as this will be needed later on.
+4. Download Eclipse from http://www.eclipse.org. Any distribution will probably
+   do, but if you're going to do mainly web development, you might pick Eclipse
+   IDE for JavaScript Web Developers
+5. Install the PyDev plugin using the Eclipse update site mentioned at 
+   http://pydev.org/download.html
+6. Install the Google Plugin for Eclipse: http://code.google.com/eclipse/
+
+Setup the project
+-----------------
+Generic instructions are available at
+http://code.google.com/appengine/docs/python/gettingstarted/ but the following
+should be enough:
+1. Launch Eclipse and create a workspace
+2. Create a new PyDev Project
+3. In the PyDev Project wizard, uncheck the "Use Default" checkbox for Project
+   contents and browse to your tools/python_charts directory.
+4. Enter a project name. We'll assume PythonCharts in the examples below.
+5. In the radio button of the lower part of the window, select
+   "Add project directory to the PYTHONPATH"
+6. Click Finish
+7. Select the Run > Run Configuration… menu item
+8. Create a new "Python Run" configuration
+9. Select your Python Charts project as project
+10. As Main Module, enter the path to your dev_appserver.py, which is a part
+    of your App Engine installation,
+    e.g. /usr/local/google_appengine/dev_appserver.py
+11. At the Arguments tab, enter the location of your project root.
+    Using Eclipse variables if your project name is PythonCharts:
+    ${workspace_loc:PythonCharts}
+12. Launch the development app server by clicking the Run button.
+13. Launch a browser and go to http://localhost:8080
diff --git a/trunk/tools/python_charts/app.yaml b/trunk/tools/python_charts/app.yaml
new file mode 100644
index 0000000..ace1b51
--- /dev/null
+++ b/trunk/tools/python_charts/app.yaml
@@ -0,0 +1,9 @@
+application: webrtc-python-charts
+version: 1
+runtime: python
+api_version: 1
+
+handlers:
+
+- url: /*
+  script: webrtc/main.py
\ No newline at end of file
diff --git a/trunk/tools/python_charts/data/vp8_hw.py b/trunk/tools/python_charts/data/vp8_hw.py
new file mode 100644
index 0000000..b8c6cc0
--- /dev/null
+++ b/trunk/tools/python_charts/data/vp8_hw.py
@@ -0,0 +1,49 @@
+# Sample output from the video_quality_measurment program, included only for
+# reference. Geneate your own by running with the --python flag and then change
+# the filenames in main.py
+test_configuration = [{'name': 'name',                      'value': 'VP8 hardware test'},
+{'name': 'description',               'value': ''},
+{'name': 'test_number',               'value': '0'},
+{'name': 'input_filename',            'value': 'foreman_cif.yuv'},
+{'name': 'output_filename',           'value': 'foreman_cif_out.yuv'},
+{'name': 'output_dir',                'value': '.'},
+{'name': 'packet_size_in_bytes',      'value': '1500'},
+{'name': 'max_payload_size_in_bytes', 'value': '1440'},
+{'name': 'packet_loss_mode',          'value': 'Uniform'},
+{'name': 'packet_loss_probability',   'value': '0.000000'},
+{'name': 'packet_loss_burst_length',  'value': '1'},
+{'name': 'exclude_frame_types',       'value': 'ExcludeOnlyFirstKeyFrame'},
+{'name': 'frame_length_in_bytes',     'value': '152064'},
+{'name': 'use_single_core',           'value': 'False'},
+{'name': 'keyframe_interval;',        'value': '0'},
+{'name': 'video_codec_type',          'value': 'VP8'},
+{'name': 'width',                     'value': '352'},
+{'name': 'height',                    'value': '288'},
+{'name': 'bit_rate_in_kbps',          'value': '500'},
+]
+frame_data_types = {'frame_number': ('number', 'Frame number'),
+'encoding_successful': ('boolean', 'Encoding successful?'),
+'decoding_successful': ('boolean', 'Decoding successful?'),
+'encode_time': ('number', 'Encode time (us)'),
+'decode_time': ('number', 'Decode time (us)'),
+'encode_return_code': ('number', 'Encode return code'),
+'decode_return_code': ('number', 'Decode return code'),
+'bit_rate': ('number', 'Bit rate (kbps)'),
+'encoded_frame_length': ('number', 'Encoded frame length (bytes)'),
+'frame_type': ('string', 'Frame type'),
+'packets_dropped': ('number', 'Packets dropped'),
+'total_packets': ('number', 'Total packets'),
+'ssim': ('number', 'SSIM'),
+'psnr': ('number', 'PSNR (dB)'),
+}
+frame_data = [{'frame_number': 0, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 94676, 'decode_time': 37942, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 1098, 'encoded_frame_length': 4579, 'frame_type': 'Other', 'packets_dropped': 0, 'total_packets': 4, 'ssim': 0.910364, 'psnr': 35.067258},
+{'frame_number': 1, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 244007, 'decode_time': 39421, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 306, 'encoded_frame_length': 1277, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.911859, 'psnr': 35.115193},
+{'frame_number': 2, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 240508, 'decode_time': 38918, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 330, 'encoded_frame_length': 1379, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.913597, 'psnr': 35.181604},
+{'frame_number': 3, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 243449, 'decode_time': 39664, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 298, 'encoded_frame_length': 1242, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.912378, 'psnr': 35.164710},
+{'frame_number': 4, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 248024, 'decode_time': 39115, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 332, 'encoded_frame_length': 1385, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.911471, 'psnr': 35.109488},
+{'frame_number': 5, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 246910, 'decode_time': 39146, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 416, 'encoded_frame_length': 1734, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.915231, 'psnr': 35.392300},
+{'frame_number': 6, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 242953, 'decode_time': 38827, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 279, 'encoded_frame_length': 1165, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.916130, 'psnr': 35.452889},
+{'frame_number': 7, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 247343, 'decode_time': 41429, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 393, 'encoded_frame_length': 1639, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.919356, 'psnr': 35.647128},
+{'frame_number': 8, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 249529, 'decode_time': 40329, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 487, 'encoded_frame_length': 2033, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.924705, 'psnr': 36.179837},
+{'frame_number': 9, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 249408, 'decode_time': 41716, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 583, 'encoded_frame_length': 2433, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.928433, 'psnr': 36.589875},
+]
diff --git a/trunk/tools/python_charts/data/vp8_sw.py b/trunk/tools/python_charts/data/vp8_sw.py
new file mode 100644
index 0000000..0f29137
--- /dev/null
+++ b/trunk/tools/python_charts/data/vp8_sw.py
@@ -0,0 +1,49 @@
+# Sample output from the video_quality_measurment program, included only for
+# reference. Geneate your own by running with the --python flag and then change
+# the filenames in main.py
+test_configuration = [{'name': 'name',                      'value': 'VP8 software test'},
+{'name': 'description',               'value': ''},
+{'name': 'test_number',               'value': '0'},
+{'name': 'input_filename',            'value': 'foreman_cif.yuv'},
+{'name': 'output_filename',           'value': 'foreman_cif_out.yuv'},
+{'name': 'output_dir',                'value': '.'},
+{'name': 'packet_size_in_bytes',      'value': '1500'},
+{'name': 'max_payload_size_in_bytes', 'value': '1440'},
+{'name': 'packet_loss_mode',          'value': 'Uniform'},
+{'name': 'packet_loss_probability',   'value': '0.000000'},
+{'name': 'packet_loss_burst_length',  'value': '1'},
+{'name': 'exclude_frame_types',       'value': 'ExcludeOnlyFirstKeyFrame'},
+{'name': 'frame_length_in_bytes',     'value': '152064'},
+{'name': 'use_single_core',           'value': 'False'},
+{'name': 'keyframe_interval;',        'value': '0'},
+{'name': 'video_codec_type',          'value': 'VP8'},
+{'name': 'width',                     'value': '352'},
+{'name': 'height',                    'value': '288'},
+{'name': 'bit_rate_in_kbps',          'value': '500'},
+]
+frame_data_types = {'frame_number': ('number', 'Frame number'),
+'encoding_successful': ('boolean', 'Encoding successful?'),
+'decoding_successful': ('boolean', 'Decoding successful?'),
+'encode_time': ('number', 'Encode time (us)'),
+'decode_time': ('number', 'Decode time (us)'),
+'encode_return_code': ('number', 'Encode return code'),
+'decode_return_code': ('number', 'Decode return code'),
+'bit_rate': ('number', 'Bit rate (kbps)'),
+'encoded_frame_length': ('number', 'Encoded frame length (bytes)'),
+'frame_type': ('string', 'Frame type'),
+'packets_dropped': ('number', 'Packets dropped'),
+'total_packets': ('number', 'Total packets'),
+'ssim': ('number', 'SSIM'),
+'psnr': ('number', 'PSNR (dB)'),
+}
+frame_data = [{'frame_number': 0, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 12427, 'decode_time': 4403, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 2270, 'encoded_frame_length': 9459, 'frame_type': 'Other', 'packets_dropped': 0, 'total_packets': 7, 'ssim': 0.947050, 'psnr': 38.332820},
+{'frame_number': 1, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 3292, 'decode_time': 821, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 88, 'encoded_frame_length': 368, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.927272, 'psnr': 35.883510},
+{'frame_number': 2, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4295, 'decode_time': 902, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 130, 'encoded_frame_length': 544, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.920539, 'psnr': 35.457107},
+{'frame_number': 3, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 3880, 'decode_time': 767, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 171, 'encoded_frame_length': 714, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.917434, 'psnr': 35.389298},
+{'frame_number': 4, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4471, 'decode_time': 909, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 248, 'encoded_frame_length': 1035, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.918892, 'psnr': 35.570229},
+{'frame_number': 5, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4447, 'decode_time': 976, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 269, 'encoded_frame_length': 1123, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.920609, 'psnr': 35.769663},
+{'frame_number': 6, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4432, 'decode_time': 891, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 271, 'encoded_frame_length': 1132, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.922672, 'psnr': 35.913519},
+{'frame_number': 7, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 5026, 'decode_time': 1068, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 366, 'encoded_frame_length': 1529, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.925505, 'psnr': 36.246713},
+{'frame_number': 8, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4877, 'decode_time': 1051, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 369, 'encoded_frame_length': 1538, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.926122, 'psnr': 36.305984},
+{'frame_number': 9, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4712, 'decode_time': 1087, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 406, 'encoded_frame_length': 1692, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.927183, 'psnr': 36.379735},
+]
diff --git a/trunk/tools/python_charts/gviz_api.py b/trunk/tools/python_charts/gviz_api.py
new file mode 120000
index 0000000..c9dca90
--- /dev/null
+++ b/trunk/tools/python_charts/gviz_api.py
@@ -0,0 +1 @@
+../../third_party/google-visualization-python/gviz_api.py
\ No newline at end of file
diff --git a/trunk/tools/python_charts/templates/chart_page_template.html b/trunk/tools/python_charts/templates/chart_page_template.html
new file mode 100644
index 0000000..1cb3951
--- /dev/null
+++ b/trunk/tools/python_charts/templates/chart_page_template.html
@@ -0,0 +1,90 @@
+<html>
+  <!--
+  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+
+  Template file to be used to generate Charts for Video Quality Metrics.
+   -->
+  <head>
+    <link href="http://code.google.com/css/codesite.pack.04102009.css"
+          rel="stylesheet" type="text/css" />
+  </head>
+  <script src="https://www.google.com/jsapi" type="text/javascript"></script>
+  <script>
+    google.load('visualization', '1', {packages:['table', 'corechart']});
+
+    google.setOnLoadCallback(drawTable);
+    function drawTable() {
+      /* Build data tables and views */
+      var configurations_data_table =
+        new google.visualization.DataTable(%(json_configurations)s);
+      var ssim_data_table =
+        new google.visualization.DataTable(%(json_ssim_data)s);
+      var psnr_data_table =
+        new google.visualization.DataTable(%(json_psnr_data)s);
+      var packet_loss_data_table =
+        new google.visualization.DataTable(%(json_packet_loss_data)s);
+      var bit_rate_data_table =
+        new google.visualization.DataTable(%(json_bit_rate_data)s);
+
+      /* Display tables and charts */
+      var configurations_table = new google.visualization.Table(
+        document.getElementById('table_div_configurations'));
+      configurations_table.draw(configurations_data_table, {
+        height: 200
+      });
+
+      var ssim_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_ssim'));
+      ssim_chart.draw(ssim_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'SSIM'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var psnr_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_psnr'));
+      psnr_chart.draw(psnr_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'PSNR (dB)'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var packet_loss_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_packet_loss'));
+      packet_loss_chart.draw(packet_loss_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'Packets dropped'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var bit_rate_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_bit_rate'));
+      bit_rate_chart.draw(bit_rate_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink', 'green'],
+        vAxis: {title: 'Bit rate'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+    }
+  </script>
+  <body>
+    <h3>Test Configurations:</h3>
+    <div id="table_div_configurations"></div>
+    <h3>Messages:</h3>
+    <pre>%(messages)s</pre>
+    <h3>Metrics measured per frame:</h3>
+    <div id="table_div_ssim"></div>
+    <div id="table_div_psnr"></div>
+    <div id="table_div_packet_loss"></div>
+    <div id="table_div_bit_rate"></div>
+  </body>
+</html>
diff --git a/trunk/tools/python_charts/webrtc/__init__.py b/trunk/tools/python_charts/webrtc/__init__.py
new file mode 100644
index 0000000..c1caaa2
--- /dev/null
+++ b/trunk/tools/python_charts/webrtc/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
\ No newline at end of file
diff --git a/trunk/tools/python_charts/webrtc/data_helper.py b/trunk/tools/python_charts/webrtc/data_helper.py
new file mode 100644
index 0000000..fce949f
--- /dev/null
+++ b/trunk/tools/python_charts/webrtc/data_helper.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+class DataHelper(object):
+  """
+  Helper class for managing table data.
+  This class does not verify the consistency of the data tables sent into it.
+  """
+
+  def __init__(self, data_list, table_description, names_list, messages):
+    """ Initializes the DataHelper with data.
+    
+    Args:
+      data_list: List of one or more data lists in the format that the 
+        Google Visualization Python API expects (list of dictionaries, one
+        per row of data). See the gviz_api.DataTable documentation for more 
+        info.
+      table_description: dictionary describing the data types of all
+        columns in the data lists, as defined in the gviz_api.DataTable
+        documentation.
+      names_list: List of strings of what we're going to name the data
+        columns after. Usually different runs of data collection. 
+      messages: List of strings we might append error messages to.
+    """
+    self.data_list = data_list
+    self.table_description = table_description
+    self.names_list = names_list
+    self.messages = messages
+    self.number_of_datasets = len(data_list)
+    self.number_of_frames = len(data_list[0])
+    
+  def CreateData(self, field_name, start_frame=0, end_frame=0):
+    """ Creates a data structure for a specified data field.
+    
+    Creates a data structure (data type description dictionary and a list 
+    of data dictionaries) to be used with the Google Visualization Python 
+    API. The frame_number column is always present and one column per data
+    set is added and its field name is suffixed by _N where N is the number 
+    of the data set (0, 1, 2...)
+    
+    Args:
+      field_name: String name of the field, must be present in the data
+        structure this DataHelper was created with.
+      start_frame: Frame number to start at (zero indexed). Default: 0.
+      end_frame: Frame number to be the last frame. If zero all frames 
+        will be included. Default: 0.
+        
+    Returns:
+      A tuple containing:
+      - a dictionary describing the columns in the data result_data_table below.
+        This description uses the name for each data set specified by 
+        names_list.  
+        
+        Example with two data sets named 'Foreman' and 'Crew':
+        {
+         'frame_number': ('number', 'Frame number'),
+         'ssim_0': ('number', 'Foreman'),
+         'ssim_1': ('number', 'Crew'),
+         }
+      - a list containing dictionaries (one per row) with the frame_number
+        column and one column of the specified field_name column per data 
+        set. 
+        
+        Example with two data sets named 'Foreman' and 'Crew':
+        [
+         {'frame_number': 0, 'ssim_0': 0.98, 'ssim_1': 0.77 },
+         {'frame_number': 1, 'ssim_0': 0.81, 'ssim_1': 0.53 },
+        ]
+    """
+    
+    # Build dictionary that describes the data types
+    result_table_description = {'frame_number': ('string', 'Frame number')} 
+    for dataset_index in range(self.number_of_datasets):
+      column_name = '%s_%s' % (field_name, dataset_index)
+      column_type = self.table_description[field_name][0]
+      column_description = self.names_list[dataset_index]
+      result_table_description[column_name] = (column_type, column_description)
+
+    # Build data table of all the data        
+    result_data_table = []
+    # We're going to have one dictionary per row. 
+    # Create that and copy frame_number values from the first data set
+    for source_row in self.data_list[0]:
+      row_dict = { 'frame_number': source_row['frame_number'] }
+      result_data_table.append(row_dict)
+    
+    # Pick target field data points from the all data tables
+    if end_frame == 0:  # Default to all frames
+      end_frame = self.number_of_frames
+      
+    for dataset_index in range(self.number_of_datasets):
+      for row_number in range(start_frame, end_frame):
+        column_name = '%s_%s' % (field_name, dataset_index)
+        # Stop if any of the data sets are missing the frame
+        try:
+          result_data_table[row_number][column_name] = \
+          self.data_list[dataset_index][row_number][field_name]
+        except IndexError:
+          self.messages.append("Couldn't find frame data for row %d "
+          "for %s" % (row_number, self.names_list[dataset_index])) 
+          break
+    return result_table_description, result_data_table
+
+  def GetOrdering(self, table_description):
+    """ Creates a list of column names, ordered alphabetically except for the
+      frame_number column which always will be the first column.
+     
+      Args:
+        table_description: A dictionary of column definitions as defined by the
+          gviz_api.DataTable documentation.
+      Returns:
+        A list of column names, where frame_number is the first and the
+        remaining columns are sorted alphabetically.
+    """
+    # The JSON data representation generated from gviz_api.DataTable.ToJSon()
+    # must have frame_number as its first column in order for the chart to 
+    # use it as it's X-axis value series.
+    # gviz_api.DataTable orders the columns by name by default, which will 
+    # be incorrect if we have column names that are sorted before frame_number
+    # in our data table.
+    columns_ordering = ['frame_number']
+    # add all other columns:
+    for column in sorted(table_description.keys()):
+      if column != 'frame_number':
+        columns_ordering.append(column)
+    return columns_ordering
+  
+  def CreateConfigurationTable(self, configurations):
+    """ Combines multiple test data configurations for display.
+
+    Args:
+      configurations: List of one ore more configurations. Each configuration
+      is required to be a list of dictionaries with two keys: 'name' and
+      'value'.
+      Example of a single configuration:
+      [
+        {'name': 'name', 'value': 'VP8 software'},
+        {'name': 'test_number', 'value': '0'},
+        {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+      ]
+    Returns:
+      A tuple containing:
+      - a dictionary describing the columns in the configuration table to be
+        displayed. All columns will have string as data type.
+        Example:
+        {
+          'name': 'string',
+          'test_number': 'string',
+          'input_filename': 'string',
+         }
+      - a list containing dictionaries (one per configuration) with the
+        configuration column names mapped to the value for each test run:
+
+        Example matching the columns above:
+        [
+         {'name': 'VP8 software',
+          'test_number': '12',
+          'input_filename': 'foreman_cif.yuv' },
+         {'name': 'VP8 hardware',
+          'test_number': '5',
+          'input_filename': 'foreman_cif.yuv' },
+        ]
+    """
+    result_description = {}
+    result_data = []
+
+    for configuration in configurations:
+      data = {}
+      result_data.append(data)
+      for dict in configuration:
+        name = dict['name']
+        value = dict['value']
+        result_description[name] = 'string'
+        data[name] = value
+    return result_description, result_data
diff --git a/trunk/tools/python_charts/webrtc/data_helper_test.py b/trunk/tools/python_charts/webrtc/data_helper_test.py
new file mode 100644
index 0000000..6282f7b
--- /dev/null
+++ b/trunk/tools/python_charts/webrtc/data_helper_test.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+import unittest
+import webrtc.data_helper
+
+class Test(unittest.TestCase):
+
+  def setUp(self):
+    # Simulate frame data from two different test runs, with 2 frames each.
+    self.frame_data_0 = [{'frame_number': 0, 'ssim': 0.5, 'psnr': 30.5}, 
+                         {'frame_number': 1, 'ssim': 0.55, 'psnr': 30.55}]
+    self.frame_data_1 = [{'frame_number': 0, 'ssim': 0.6, 'psnr': 30.6},
+                         {'frame_number': 0, 'ssim': 0.66, 'psnr': 30.66}]
+    self.all_data = [ self.frame_data_0, self.frame_data_1 ]
+    
+    # Test with frame_number column in a non-first position sice we need to 
+    # support reordering that to be able to use the gviz_api as we want.
+    self.type_description = {
+                             'ssim': ('number', 'SSIM'),
+                             'frame_number': ('number', 'Frame number'),
+                             'psnr': ('number', 'PSRN'),
+    }
+    self.names = ["Test 0", "Test 1"]
+    self.configurations = [
+     [{'name': 'name', 'value': 'Test 0'},
+      {'name': 'test_number', 'value': '13'},
+      {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+     ],
+     [{'name': 'name', 'value': 'Test 1'},
+      {'name': 'test_number', 'value': '5'},
+      {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+     ],
+    ]
+
+  def testCreateData(self):
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data_table = helper.CreateData('ssim')
+    self.assertEqual(3, len(description))
+    self.assertTrue('frame_number' in description)
+    self.assertTrue('ssim_0' in description)
+    self.assertTrue('number' in description['ssim_0'][0])
+    self.assertTrue('Test 0' in description['ssim_0'][1])
+    self.assertTrue('ssim_1' in description)
+    self.assertTrue('number' in description['ssim_1'][0])
+    self.assertTrue('Test 1' in description['ssim_1'][1])
+
+    self.assertEqual(0, len(messages))
+
+    self.assertEquals(2, len(data_table))
+    row = data_table[0]
+    self.assertEquals(0, row['frame_number'])
+    self.assertEquals(0.5, row['ssim_0'])
+    self.assertEquals(0.6, row['ssim_1'])
+    row = data_table[1]
+    self.assertEquals(1, row['frame_number'])
+    self.assertEquals(0.55, row['ssim_0'])
+    self.assertEquals(0.66, row['ssim_1'])
+    
+    description, data_table = helper.CreateData('psnr') 
+    self.assertEqual(3, len(description))
+    self.assertTrue('frame_number' in description)
+    self.assertTrue('psnr_0' in description)
+    self.assertTrue('psnr_1' in description) 
+    self.assertEqual(0, len(messages)) 
+
+    self.assertEquals(2, len(data_table))
+    row = data_table[0]
+    self.assertEquals(0, row['frame_number'])
+    self.assertEquals(30.5, row['psnr_0'])
+    self.assertEquals(30.6, row['psnr_1'])
+    row = data_table[1]
+    self.assertEquals(1, row['frame_number'])
+    self.assertEquals(30.55, row['psnr_0'])
+    self.assertEquals(30.66, row['psnr_1'])
+  
+  def testGetOrdering(self):
+    """ Tests that the ordering help method returns a list with frame_number 
+       first and the rest sorted alphabetically """
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data_table = helper.CreateData('ssim')
+    columns = helper.GetOrdering(description)
+    self.assertEqual(3, len(columns))
+    self.assertEqual(0, len(messages))
+    self.assertEqual('frame_number', columns[0])
+    self.assertEqual('ssim_0', columns[1])
+    self.assertEqual('ssim_1', columns[2])
+    
+  def testCreateConfigurationTable(self):
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data = helper.CreateConfigurationTable(self.configurations)
+    self.assertEqual(3, len(description))  # 3 columns
+    self.assertEqual(2, len(data))  # 2 data sets
+    self.assertTrue(description.has_key('name'))
+    self.assertTrue(description.has_key('test_number'))
+    self.assertTrue(description.has_key('input_filename'))
+    self.assertEquals('Test 0', data[0]['name'])
+    self.assertEquals('Test 1', data[1]['name'])
+    
+if __name__ == "__main__":
+  unittest.main()
diff --git a/trunk/tools/python_charts/webrtc/main.py b/trunk/tools/python_charts/webrtc/main.py
new file mode 100644
index 0000000..82d8831
--- /dev/null
+++ b/trunk/tools/python_charts/webrtc/main.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+import os
+import gviz_api
+import webrtc.data_helper
+
+def main():
+  """
+  This Python script displays a web page with test created with the 
+  video_quality_measurement program, which is a tool in WebRTC.
+  
+  The script requires on two external files and one Python library:
+  - A HTML template file with layout and references to the json variables 
+    defined in this script
+  - A data file in Python format, containing the following:
+    - test_configuration - a dictionary of test configuration names and values.
+    - frame_data_types - a dictionary that maps the different metrics to their 
+      data types.
+    - frame_data - a list of dictionaries where each dictionary maps a metric to 
+      it's value. 
+  - The gviz_api.py of the Google Visualization Python API, available at
+    http://code.google.com/p/google-visualization-python/
+  
+  The HTML file is shipped with the script, while the data file must be 
+  generated by running video_quality_measurement with the --python flag
+  specified.
+  """
+  print 'Content-type: text/html\n' # the newline is required!
+
+  page_template_filename = '../templates/chart_page_template.html'
+  # The data files must be located in the project tree for app engine being
+  # able to access them.
+  data_filenames = [ '../data/vp8_sw.py', '../data/vp8_hw.py' ]
+  # Will contain info/error messages to be displayed on the resulting page.
+  messages = []
+  # Load the page HTML template.
+  try:
+    f = open(page_template_filename)
+    page_template = f.read()
+    f.close()
+  except IOError as e:
+    ShowErrorPage('Cannot open page template file: %s<br>Details: %s' % 
+                  (page_template_filename, e))
+    return
+  
+  # Read data from external Python script files. First check that they exist.
+  for filename in data_filenames:
+    if not os.path.exists(filename):
+      messages.append('Cannot open data file: %s' % filename)
+      data_filenames.remove(filename)
+  
+  # Read data from all existing input files.
+  data_list = []
+  test_configurations = []
+  names = []
+  
+  for filename in data_filenames:
+    read_vars = {} # empty dictionary to load the data into.
+    execfile(filename, read_vars, read_vars)
+    
+    test_configuration = read_vars['test_configuration']
+    table_description = read_vars['frame_data_types']
+    table_data = read_vars['frame_data']
+    
+    # Verify the data in the file loaded properly.
+    if not table_description or not table_data:
+      messages.append('Invalid input file: %s. Missing description list or '
+                      'data dictionary variables.' % filename)
+      continue
+    
+    # Frame numbers appear as number type in the data, but Chart API requires
+    # values of the X-axis to be of string type.
+    # Change the frame_number column data type: 
+    table_description['frame_number'] = ('string', 'Frame number')
+    # Convert all the values to string types: 
+    for row in table_data:
+      row['frame_number'] = str(row['frame_number'])
+    
+    # Store the unique data from this file in the high level lists.
+    test_configurations.append(test_configuration)
+    data_list.append(table_data)
+    # Name of the test run must be present.
+    test_name = FindConfiguration(test_configuration, 'name')
+    if not test_name:
+      messages.append('Invalid input file: %s. Missing configuration key ' 
+                      '"name"', filename)
+      continue
+    names.append(test_name)
+    
+  # Create data helper and build data tables for each graph.
+  helper = webrtc.data_helper.DataHelper(data_list, table_description, 
+                                         names, messages)
+    
+  # Loading it into gviz_api.DataTable objects and create JSON strings.
+  description, data = helper.CreateConfigurationTable(test_configurations)
+  configurations = gviz_api.DataTable(description, data)
+  json_configurations = configurations.ToJSon()
+  
+  description, data = helper.CreateData('ssim')
+  ssim = gviz_api.DataTable(description, data)
+  json_ssim_data = ssim.ToJSon(helper.GetOrdering(description))
+  
+  description, data = helper.CreateData('psnr')
+  psnr = gviz_api.DataTable(description, data)
+  json_psnr_data = psnr.ToJSon(helper.GetOrdering(description))
+  
+  description, data = helper.CreateData('packets_dropped')
+  packet_loss = gviz_api.DataTable(description, data)
+  json_packet_loss_data = packet_loss.ToJSon(helper.GetOrdering(description))  
+  
+  description, data = helper.CreateData('bit_rate')
+  # Add a column of data points for the desired bit rate to be plotted.
+  # (uses test configuration from the last data set, assuming it is the same 
+  # for all of them)
+  desired_bit_rate = FindConfiguration(test_configuration, 'bit_rate_in_kbps')
+  if not desired_bit_rate:
+    ShowErrorPage('Cannot configuration field named "bit_rate_in_kbps"')
+    return
+  desired_bit_rate = int(desired_bit_rate)
+  # Add new column data type description.
+  description['desired_bit_rate'] = ('number', 'Desired bit rate (kbps)')
+  for row in data:
+    row['desired_bit_rate'] = desired_bit_rate
+  bit_rate = gviz_api.DataTable(description, data)
+  json_bit_rate_data = bit_rate.ToJSon(helper.GetOrdering(description))
+
+  # Format the messages list with newlines.
+  messages = '\n'.join(messages)
+  
+  # Put the variables as JSon strings into the template.
+  print page_template % vars()
+
+def FindConfiguration(configuration, name):
+  """ Finds a configuration value using it's name. 
+      Returns the first configuration with a matching name. Returns None if no
+      matching configuration is found. """
+  return_value = None
+  for row in configuration:
+    if row['name'] == name:
+      return_value = row['value']
+      break
+  return return_value
+
+def ShowErrorPage(error_message):
+  print '<html><body>%s</body></html>' % error_message
+  
+if __name__ == '__main__':
+  main()
diff --git a/trunk/tools/quality_tracking/OWNERS b/trunk/tools/quality_tracking/OWNERS
new file mode 100644
index 0000000..323e8e7
--- /dev/null
+++ b/trunk/tools/quality_tracking/OWNERS
@@ -0,0 +1,2 @@
+phoglund@webrtc.org
+kjellander@webrtc.org
diff --git a/trunk/tools/quality_tracking/README b/trunk/tools/quality_tracking/README
new file mode 100644
index 0000000..faf3e7a
--- /dev/null
+++ b/trunk/tools/quality_tracking/README
@@ -0,0 +1,31 @@
+This file describes the coverage tracking script and the coverage dashboard.
+
+ABSTRACT:
+The intention of this small tracking system is to track code coverage data
+over time. Since code coverage is continuously recomputed on the build bots,
+the track_coverage.py script is intended to run on the build bot as a cron job
+and extract the data from there. The dashboard doesn't care how often this
+script runs, but running each hour should be more than enough.
+
+The track_coverage.py script uses OAuth to authenticate itself. In order to do
+this, it needs two files: consumer.secret and access.token. The consumer secret
+is known within the organization and is stored in a plain file on the bot 
+running the scripts (we don't want to check in this secret in the code in the
+public repository). The consumer secret is a plain file with a single line
+containing the secret string.
+
+The access.token file is generated by request_oauth_permission.py. It does this
+by going through the three-legged OAuth authorization process. An administrator
+of the dashboard must approve the request from the script. Once that is done,
+access.token will be written and track_coverage.py will be able to report
+results.
+
+HOW TO RUN LOCALLY:
+Follow the following instructions:
+http://code.google.com/appengine/docs/python/gettingstartedpython27/devenvironment.html
+The dashboard can be started on 127.0.0.1:8080 using the dev_appserver.py script 
+as described in the above URL (and in the following 'hello world' page).
+
+HOW TO DEPLOY:
+Follow the following instructions:
+http://code.google.com/appengine/docs/python/gettingstartedpython27/uploading.html
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/constants.py b/trunk/tools/quality_tracking/constants.py
new file mode 100644
index 0000000..c6aac74
--- /dev/null
+++ b/trunk/tools/quality_tracking/constants.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains tweakable constants for quality dashboard utility scripts."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+# This identifies our application using the information we got when we
+# registered the application on Google appengine.
+DASHBOARD_SERVER = 'webrtc-dashboard.appspot.com'
+DASHBOARD_SERVER_HTTP = 'http://' + DASHBOARD_SERVER
+CONSUMER_KEY = DASHBOARD_SERVER
+CONSUMER_SECRET_FILE = 'consumer.secret'
+ACCESS_TOKEN_FILE = 'access.token'
+
+# OAuth URL:s.
+REQUEST_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthGetRequestToken'
+AUTHORIZE_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthAuthorizeToken'
+ACCESS_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthGetAccessToken'
+
+# The build master URL.
+BUILD_MASTER_SERVER = 'webrtc-cb-linux-master.cbf.corp.google.com:8010'
+BUILD_MASTER_TRANSPOSED_GRID_URL = '/tgrid'
+
+# Build bot constants.
+BUILD_BOT_COVERAGE_WWW_DIRECTORY = '/var/www/'
+
+# Dashboard data input URLs.
+ADD_COVERAGE_DATA_URL = DASHBOARD_SERVER_HTTP + '/add_coverage_data'
+ADD_BUILD_STATUS_DATA_URL = DASHBOARD_SERVER_HTTP + '/add_build_status_data'
diff --git a/trunk/tools/quality_tracking/dashboard/add_build_status_data.py b/trunk/tools/quality_tracking/dashboard/add_build_status_data.py
new file mode 100644
index 0000000..106d178
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/add_build_status_data.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements a handler for adding build status data."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import datetime
+import logging
+
+from google.appengine.ext import db
+
+import oauth_post_request_handler
+
+VALID_STATUSES = ['OK', 'failed', 'building', 'warnings']
+
+
+class OrphanedBuildStatusesExistException(Exception):
+  pass
+
+
+class BuildStatusRoot(db.Model):
+  """Exists solely to be the root parent for all build status data and to keep
+     track of when the last update was made.
+
+     Since all build status data will refer to this as their parent,
+     we can run transactions on the build status data as a whole.
+  """
+  last_updated_at = db.DateTimeProperty()
+
+
+class BuildStatusData(db.Model):
+  """This represents one build status report from the build bot."""
+  bot_name = db.StringProperty(required=True)
+  revision = db.IntegerProperty(required=True)
+  build_number = db.IntegerProperty(required=True)
+  status = db.StringProperty(required=True)
+
+
+def _ensure_build_status_root_exists():
+  root = db.GqlQuery('SELECT * FROM BuildStatusRoot').get()
+  if not root:
+    # Create a new root, but ensure we don't have any orphaned build statuses
+    # (in that case, we would not have a single entity group as we desire).
+    orphans = db.GqlQuery('SELECT * FROM BuildStatusData').get()
+    if orphans:
+      raise OrphanedBuildStatusesExistException('Parent is gone and there are '
+                                                'orphaned build statuses in '
+                                                'the database!')
+    root = BuildStatusRoot()
+    root.put()
+
+  return root
+
+
+def _filter_oauth_parameters(post_keys):
+  return filter(lambda post_key: not post_key.startswith('oauth_'),
+                post_keys)
+
+
+def _parse_status(build_number_and_status):
+  parsed_status = build_number_and_status.split('--')
+  if len(parsed_status) != 2:
+    raise ValueError('Malformed status string %s.' % build_number_and_status)
+
+  parsed_build_number = int(parsed_status[0])
+  status = parsed_status[1]
+
+  if status not in VALID_STATUSES:
+    raise ValueError('Invalid status in %s.' % build_number_and_status)
+
+  return (parsed_build_number, status)
+
+
+def _parse_name(revision_and_bot_name):
+  parsed_name = revision_and_bot_name.split('--')
+  if len(parsed_name) != 2:
+    raise ValueError('Malformed name string %s.' % revision_and_bot_name)
+
+  revision = parsed_name[0]
+  bot_name = parsed_name[1]
+  if '\n' in bot_name:
+    raise ValueError('Bot name %s can not contain newlines.' % bot_name)
+
+  return (int(revision), bot_name)
+
+
+def _delete_all_with_revision(revision, build_status_root):
+  query_result = db.GqlQuery('SELECT * FROM BuildStatusData '
+                             'WHERE revision = :1 AND ANCESTOR IS :2',
+                             revision, build_status_root)
+  for entry in query_result:
+    entry.delete()
+
+
+class AddBuildStatusData(oauth_post_request_handler.OAuthPostRequestHandler):
+  """Used to report build status data.
+
+     Build status data is reported as a POST request. The POST request, aside
+     from the required oauth_* parameters should contain name-value entries that
+     abide by the following rules:
+
+     1) The name should be on the form <revision>--<bot name>, for instance
+        1568--Win32Release.
+     2) The value should be on the form <build number>--<status>, for instance
+        553--OK, 554--building. The status is permitted to be failed, OK or
+        building.
+
+    Data is keyed by revision. This handler will delete all data from a revision
+    if data with that revision is present in the current update, since we
+    assume that more recent data is always better data. We also assume that
+    an update always has complete information on a revision (e.g. the status
+    for all the bots are reported in each update).
+
+    In particular the revision arrangement solves the problem when the latest
+    revision reports 'building' for a bot. Had we not deleted the old revision
+    we would first store a 'building' status for that bot and revision, and
+    later store a 'OK' or 'failed' status for that bot and revision. This is
+    undesirable since we don't want multiple statuses for one bot-revision
+    combination. Now we will effectively update the bot's status instead.
+  """
+
+  def _parse_and_store_data(self):
+    build_status_root = _ensure_build_status_root_exists()
+    build_status_data = _filter_oauth_parameters(self.request.arguments())
+
+    db.run_in_transaction(self._parse_and_store_data_in_transaction,
+                          build_status_root, build_status_data)
+
+  def _parse_and_store_data_in_transaction(self, build_status_root,
+                                           build_status_data):
+
+    encountered_revisions = set()
+    for revision_and_bot_name in build_status_data:
+      build_number_and_status = self.request.get(revision_and_bot_name)
+
+      try:
+        (build_number, status) = _parse_status(build_number_and_status)
+        (revision, bot_name) = _parse_name(revision_and_bot_name)
+      except ValueError as error:
+        logger.warn('Invalid parameter in request: %s.' % error)
+        self.response.set_status(400)
+        return
+
+      if revision not in encountered_revisions:
+        # There's new data on this revision in this update, so clear all status
+        # entries with that revision. Only do this once when we first encounter
+        # the revision.
+        _delete_all_with_revision(revision, build_status_root)
+        encountered_revisions.add(revision)
+
+      # Finally, write the item.
+      item = BuildStatusData(parent=build_status_root,
+                             bot_name=bot_name,
+                             revision=revision,
+                             build_number=build_number,
+                             status=status)
+      item.put()
+
+    request_posix_timestamp = float(self.request.get('oauth_timestamp'))
+    request_datetime = datetime.datetime.fromtimestamp(request_posix_timestamp)
+    build_status_root.last_updated_at = request_datetime
+    build_status_root.put()
+
diff --git a/trunk/tools/quality_tracking/dashboard/add_coverage_data.py b/trunk/tools/quality_tracking/dashboard/add_coverage_data.py
new file mode 100644
index 0000000..1c9d9da
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/add_coverage_data.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements a handler for adding coverage data."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import datetime
+import logging
+
+from google.appengine.ext import db
+
+import oauth_post_request_handler
+
+class CoverageData(db.Model):
+  """This represents one coverage report from the build bot."""
+  date = db.DateTimeProperty(required=True)
+  line_coverage = db.FloatProperty(required=True)
+  function_coverage = db.FloatProperty(required=True)
+
+
+def _parse_percentage(string_value):
+  percentage = float(string_value)
+  if percentage < 0.0 or percentage > 100.0:
+    raise ValueError('%s is not a valid percentage.' % string_value)
+  return percentage
+
+
+class AddCoverageData(oauth_post_request_handler.OAuthPostRequestHandler):
+  """Used to report coverage data.
+
+     Coverage data is reported as a POST request and should contain, aside from
+     the regular oauth_* parameters, these values:
+
+     date: The POSIX timestamp for when the coverage observation was made.
+     line_coverage: A float percentage in the interval 0-100.0.
+     function_coverage: A float percentage in the interval 0-100.0.
+  """
+
+  def _parse_and_store_data(self):
+    try:
+      posix_time = int(self.request.get('date'))
+      parsed_date = datetime.datetime.fromtimestamp(posix_time)
+
+      line_coverage_string = self.request.get('line_coverage')
+      line_coverage = _parse_percentage(line_coverage_string)
+      function_coverage_string = self.request.get('function_coverage')
+      function_coverage = _parse_percentage(function_coverage_string)
+
+    except ValueError as error:
+      logger.warn('Invalid parameter in request: %s.' % error)
+      self.response.set_status(400)
+      return
+
+    item = CoverageData(date=parsed_date,
+                        line_coverage=line_coverage,
+                        function_coverage=function_coverage)
+    item.put()
+
diff --git a/trunk/tools/quality_tracking/dashboard/app.yaml b/trunk/tools/quality_tracking/dashboard/app.yaml
new file mode 100644
index 0000000..d06a813
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/app.yaml
@@ -0,0 +1,24 @@
+application: webrtc-dashboard
+version: 1
+runtime: python27
+api_version: 1
+threadsafe: false
+
+handlers:
+# Serve stylesheets statically.
+- url: /stylesheets
+  static_dir: stylesheets
+# This magic file is here to prove to the Google Account Domain Management
+# that we own this domain. It needs to stay there so the domain management
+# doesn't get suspicious.
+- url: /google403c95edcde16425.html
+  static_files: static/google403c95edcde16425.html
+  upload: static/google403c95edcde16425.html
+
+# Note: tests should be disabled in production.
+# - url: /test.*
+#  script: gaeunit.py
+
+# Redirect all other requests to our dynamic handlers.
+- url: /.*
+  script: dashboard.app
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/dashboard/dashboard.py b/trunk/tools/quality_tracking/dashboard/dashboard.py
new file mode 100644
index 0000000..5cc5a61
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/dashboard.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements the quality tracker dashboard and reporting facilities."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.ext.webapp import template
+import webapp2
+
+import add_build_status_data
+import add_coverage_data
+import load_build_status
+import load_coverage
+
+
+class ShowDashboard(webapp2.RequestHandler):
+  """Shows the dashboard page.
+
+     The page is shown by grabbing data we have stored previously
+     in the App Engine database using the AddCoverageData handler.
+  """
+
+  def get(self):
+    build_status_loader = load_build_status.BuildStatusLoader()
+    build_status_data = build_status_loader.load_build_status_data()
+    last_updated_at = build_status_loader.load_last_modified_at()
+    if last_updated_at is None:
+      self._show_error_page("No data has yet been uploaded to the dashboard.")
+      return
+
+    last_updated_at = last_updated_at.strftime("%Y-%m-%d %H:%M")
+    lkgr = build_status_loader.compute_lkgr()
+
+    coverage_loader = load_coverage.CoverageDataLoader()
+    coverage_json_data = coverage_loader.load_coverage_json_data()
+
+    page_template_filename = 'templates/dashboard_template.html'
+    self.response.write(template.render(page_template_filename, vars()))
+
+  def _show_error_page(self, error_message):
+    self.response.write('<html><body>%s</body></html>' % error_message)
+
+
+app = webapp2.WSGIApplication([('/', ShowDashboard),
+                               ('/add_coverage_data',
+                                add_coverage_data.AddCoverageData),
+                               ('/add_build_status_data',
+                                add_build_status_data.AddBuildStatusData)],
+                              debug=True)
diff --git a/trunk/tools/quality_tracking/dashboard/gaeunit.py b/trunk/tools/quality_tracking/dashboard/gaeunit.py
new file mode 120000
index 0000000..a93f6bd
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/gaeunit.py
@@ -0,0 +1 @@
+../../../third_party/gaeunit/gaeunit.py
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/dashboard/gviz_api.py b/trunk/tools/quality_tracking/dashboard/gviz_api.py
new file mode 120000
index 0000000..f3a22fc
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/gviz_api.py
@@ -0,0 +1 @@
+../../../third_party/google-visualization-python/gviz_api.py
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/dashboard/load_build_status.py b/trunk/tools/quality_tracking/dashboard/load_build_status.py
new file mode 100644
index 0000000..9a0eebb
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/load_build_status.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Loads build status data for the dashboard."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.ext import db
+
+
+def _status_not_ok(status):
+  return status not in ('OK', 'warnings')
+
+
+def _all_ok(statuses):
+  return filter(_status_not_ok, statuses) == []
+
+
+def _get_first_entry(iterable):
+  if not iterable:
+    return None
+  for item in iterable:
+    return item
+
+
+class BuildStatusLoader:
+  """ Loads various build status data from the database."""
+
+  def load_build_status_data(self):
+    """Returns the latest conclusive build status for each bot.
+
+       The statuses OK, failed and warnings are considered to be conclusive.
+
+       The two most recent revisions are considered. The set of bots returned
+       will therefore be the bots that were reported the two most recent
+       revisions. This script will therefore adapt automatically to any changes
+       in the set of available bots.
+
+       Returns:
+           A list of BuildStatusData entities with one entity per bot.
+    """
+
+    build_status_entries = db.GqlQuery('SELECT * '
+                                       'FROM BuildStatusData '
+                                       'ORDER BY revision DESC ')
+
+    bots_to_latest_conclusive_entry = dict()
+    for entry in build_status_entries:
+      if entry.status == 'building':
+        # The 'building' status it not conclusive, so discard this entry and
+        # pick up the entry for this bot on the next revision instead. That
+        # entry is guaranteed to have a status != 'building' since a bot cannot
+        # be building two revisions simultaneously.
+        continue
+      if bots_to_latest_conclusive_entry.has_key(entry.bot_name):
+        # We've already determined this bot's status.
+        continue
+
+      bots_to_latest_conclusive_entry[entry.bot_name] = entry
+
+    return bots_to_latest_conclusive_entry.values()
+
+  def load_last_modified_at(self):
+    build_status_root = db.GqlQuery('SELECT * '
+                                    'FROM BuildStatusRoot').get()
+    if not build_status_root:
+      # Operating on completely empty database
+      return None
+
+    return build_status_root.last_updated_at
+
+  def compute_lkgr(self):
+    """ Finds the most recent revision for which all bots are green.
+
+        Returns:
+            The last known good revision (as an integer) or None if there
+            is no green revision in the database.
+
+        Implementation note: The data store fetches stuff as we go, so we won't
+        read in the whole status table unless the LKGR is right at the end or
+        we don't have a LKGR.
+    """
+    build_status_entries = db.GqlQuery('SELECT * '
+                                       'FROM BuildStatusData '
+                                       'ORDER BY revision DESC ')
+
+    first_entry = _get_first_entry(build_status_entries)
+    if first_entry is None:
+      # No entries => no LKGR
+      return None
+
+    current_lkgr = first_entry.revision
+    statuses_for_current_lkgr = [first_entry.status]
+
+    for entry in build_status_entries:
+      if current_lkgr == entry.revision:
+        statuses_for_current_lkgr.append(entry.status)
+      else:
+        # Starting on new revision, check previous revision.
+        if _all_ok(statuses_for_current_lkgr):
+          # All bots are green; LKGR found.
+          return current_lkgr
+        else:
+          # Not all bots are green, so start over on the next revision.
+          current_lkgr = entry.revision
+          statuses_for_current_lkgr = [entry.status]
+
+    if _all_ok(statuses_for_current_lkgr):
+      # There was only one revision and it was OK.
+      return current_lkgr
+
+    # There is no all-green revision in the database.
+    return None
diff --git a/trunk/tools/quality_tracking/dashboard/load_coverage.py b/trunk/tools/quality_tracking/dashboard/load_coverage.py
new file mode 100644
index 0000000..eafed3b
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/load_coverage.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Loads coverage data from the database."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.ext import db
+import gviz_api
+
+
+class CoverageDataLoader:
+  """ Loads coverage data from the database."""
+
+  def load_coverage_json_data(self):
+    coverage_entries = db.GqlQuery('SELECT * '
+                                   'FROM CoverageData '
+                                   'ORDER BY date ASC')
+    data = []
+    for coverage_entry in coverage_entries:
+      data.append({'date': coverage_entry.date,
+                   'line_coverage': coverage_entry.line_coverage,
+                   'function_coverage': coverage_entry.function_coverage,
+                  })
+
+    description = {
+        'date': ('datetime', 'Date'),
+        'line_coverage': ('number', 'Line Coverage'),
+        'function_coverage': ('number', 'Function Coverage')
+    }
+    coverage_data = gviz_api.DataTable(description, data)
+    return coverage_data.ToJSon(order_by='date')
diff --git a/trunk/tools/quality_tracking/dashboard/oauth_post_request_handler.py b/trunk/tools/quality_tracking/dashboard/oauth_post_request_handler.py
new file mode 100644
index 0000000..416e1b7
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/oauth_post_request_handler.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Provides a OAuth request handler base class."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.api import oauth
+import logging
+import webapp2
+
+
+class UserNotAuthenticatedException(Exception):
+  """Gets thrown if a user is not permitted to store data."""
+  pass
+
+
+class OAuthPostRequestHandler(webapp2.RequestHandler):
+  """Works like a normal request handler but adds OAuth authentication.
+
+     This handler will expect a proper OAuth request over POST. This abstract
+     class deals with the authentication but leaves user-defined data handling
+     to its subclasses. Subclasses should not implement the post() method but
+     the _parse_and_store_data() method. Otherwise they may act like regular
+     request handlers. Subclasses should NOT override the get() method.
+
+     The handler will accept an OAuth request if it is correctly formed and
+     the consumer is acting on behalf of an administrator for the dashboard.
+  """
+
+  def post(self):
+    try:
+      self._authenticate_user()
+    except UserNotAuthenticatedException as exception:
+      logging.warn('Failed to authenticate: %s.' % exception)
+      self.response.set_status(403)
+      return
+
+    # Do the actual work.
+    self._parse_and_store_data()
+
+  def _parse_and_store_data(self):
+    """Reads data from POST request and responds accordingly."""
+    raise NotImplementedError('You must override this method!')
+
+  def _authenticate_user(self):
+    try:
+      if oauth.is_current_user_admin():
+        # The user on whose behalf we are acting is indeed an administrator
+        # of this application, so we're good to go.
+        logging.info('Authenticated on behalf of user %s.' %
+                     oauth.get_current_user())
+        return
+      else:
+        raise UserNotAuthenticatedException('We are acting on behalf of '
+                                            'user %s, but that user is not '
+                                            'an administrator.' %
+                                            oauth.get_current_user())
+    except oauth.OAuthRequestError as exception:
+      raise UserNotAuthenticatedException('Invalid OAuth request: %s' %
+                                          exception.__class__.__name__)
diff --git a/trunk/tools/quality_tracking/dashboard/static/google403c95edcde16425.html b/trunk/tools/quality_tracking/dashboard/static/google403c95edcde16425.html
new file mode 100644
index 0000000..95c7e2d
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/static/google403c95edcde16425.html
@@ -0,0 +1 @@
+google-site-verification: google403c95edcde16425.html
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/dashboard/stylesheets/stylesheet.css b/trunk/tools/quality_tracking/dashboard/stylesheets/stylesheet.css
new file mode 100644
index 0000000..c6eb679
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/stylesheets/stylesheet.css
@@ -0,0 +1,45 @@
+/********************************************************************
+*
+*  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*
+*********************************************************************/
+
+.status_OK {
+  color: #FFFFFF;
+  background-color: #8fdf5f;
+}
+
+.status_failed {
+  color: #FFFFFF;
+  background-color: #e98080;
+}
+
+.status_building {
+  color: #666666;
+  background-color: #fffc6c;
+}
+
+.status_warnings {
+  color: #000000;
+  background-color: #FFC343;
+}
+
+.last_known_good_revision {
+  font-size: 800%;
+}
+
+.status_cell {
+  width: 100px;
+  text-align: center;
+}
+
+body {
+  margin-left: 35px;
+  margin-top: 25px;
+}
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/dashboard/templates/dashboard_template.html b/trunk/tools/quality_tracking/dashboard/templates/dashboard_template.html
new file mode 100644
index 0000000..ab019d5
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/templates/dashboard_template.html
@@ -0,0 +1,86 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
+    "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+  <!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+
+  Template file to be used to generate the WebRTC dashboard.
+  -->
+  <head>
+    <title>WebRTC Coverage Dashboard</title>
+    <meta http-equiv="refresh" content="60">
+    <link href="http://code.google.com/css/codesite.pack.04102009.css"
+          rel="stylesheet" type="text/css">
+    <link href="stylesheets/stylesheet.css"
+          rel="stylesheet" type="text/css">
+
+    <script src="https://www.google.com/jsapi" type="text/javascript"></script>
+    <script type="text/javascript">
+      google.load('visualization', '1', {packages:['table', 'corechart']});
+
+      google.setOnLoadCallback(drawTable);
+      function drawTable() {
+        /* Build data tables and views */
+        {% comment %}
+          Disable Django auto-escaping here since that will mess up our
+          coverage table JSON data otherwise.
+        {% endcomment %}
+        {% autoescape off %}
+        var coverage_data_table =
+          new google.visualization.DataTable({{ coverage_json_data }});
+        {% endautoescape %}
+
+        /* Display tables and charts */
+        var coverage_chart = new google.visualization.LineChart(
+          document.getElementById('table_div_coverage'));
+        coverage_chart.draw(coverage_data_table, {
+          colors: ['blue', 'red'],
+          vAxis: {title: 'Coverage'},
+          hAxis: {title: 'Date'},
+          width: 1200, height: 300,
+        });
+      }
+    </script>
+  </head>
+  <body>
+
+    <h1>WebRTC Quality Dashboard</h1>
+    <h2>Current Build Status</h2>
+    <div>(as of {{ last_updated_at }} UTC)</div>
+    <table>
+      <tr>
+      {% for entry in build_status_data %}
+        <th class="status_cell">{{ entry.bot_name }}</th>
+      {% endfor %}
+      </tr>
+      <tr>
+      {% for entry in build_status_data %}
+        <td title="Last built revision {{ entry.revision }}"
+            class="status_cell status_{{entry.status}}">
+          {{entry.status}}
+        </td>
+      {% endfor %}
+      </tr>
+    </table>
+    <p></p>
+
+    <h2>Last Known Good Revision (LKGR)</h2>
+    <div class="last_known_good_revision">
+      {% if lkgr  %}
+        <a href="http://code.google.com/p/webrtc/source/detail?r={{ lkgr }}">
+          {{ lkgr }}</a>
+      {% else %}
+        ????
+      {% endif %}
+    </div>
+
+    <h2>Code Coverage History</h2>
+    <div id="table_div_coverage"></div>
+  </body>
+</html>
diff --git a/trunk/tools/quality_tracking/dashboard/test/load_build_status_test.py b/trunk/tools/quality_tracking/dashboard/test/load_build_status_test.py
new file mode 100755
index 0000000..56c9379
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard/test/load_build_status_test.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import unittest
+from google.appengine.ext import db
+from google.appengine.ext import testbed
+
+from add_build_status_data import BuildStatusData
+import load_build_status
+
+class LoadBuildStatusTest(unittest.TestCase):
+  def setUp(self):
+     # First, create an instance of the Testbed class.
+    self.testbed = testbed.Testbed()
+    # Then activate the testbed, which prepares the service stubs for use.
+    self.testbed.activate()
+    # Next, declare which service stubs you want to use.
+    self.testbed.init_datastore_v3_stub()
+
+  def test_returns_latest_nonbuilding_entries_when_loading_build_status(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="failed").put()
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="building").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+    BuildStatusData(bot_name="Bot3", revision=18,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    result = loader.load_build_status_data()
+
+    self.assertEqual(3, len(result))
+
+    # We make no guarantees on order, but we can use the fact that the testbed
+    # is deterministic to evaluate that the corrects bots were selected like so:
+    self.assertEqual("Bot1", result[0].bot_name)
+    self.assertEqual(17, result[0].revision)
+    self.assertEqual("OK", result[0].status)
+
+    self.assertEqual("Bot3", result[1].bot_name)
+    self.assertEqual(18, result[1].revision)
+    self.assertEqual("OK", result[1].status)
+
+    self.assertEqual("Bot2", result[2].bot_name)
+    self.assertEqual(18, result[2].revision)
+    self.assertEqual("failed", result[2].status)
+
+  def test_returns_lkgr_for_single_green_revision(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+  def test_returns_correct_lkgr_with_most_recent_revision_failed(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="OK").put()
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+    BuildStatusData(bot_name="Bot3", revision=18,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+  def test_returns_none_if_no_revisions(self):
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(None, loader.compute_lkgr())
+
+  def test_returns_none_if_no_green_revisions(self):
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(None, loader.compute_lkgr())
+
+  def test_skips_partially_building_revisions(self):
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="building").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/trunk/tools/quality_tracking/dashboard_connection.py b/trunk/tools/quality_tracking/dashboard_connection.py
new file mode 100644
index 0000000..9a6e30f
--- /dev/null
+++ b/trunk/tools/quality_tracking/dashboard_connection.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains utilities for communicating with the dashboard."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import httplib
+import shelve
+import urlparse
+import oauth.oauth as oauth
+
+import constants
+
+
+class FailedToReadRequiredInputFile(Exception):
+  pass
+
+
+class FailedToReportToDashboard(Exception):
+  pass
+
+
+class DashboardConnection:
+  """Helper class for pushing data to the dashboard.
+
+     This class deals with most of details for accessing protected resources
+     (i.e. data-writing operations) on the dashboard. Such operations are
+     authenticated using OAuth. This class requires a consumer secret and a
+     access token.
+
+     The access token and consumer secrets are stored as files on disk in the
+     working directory of the scripts. Both files are created by the
+     request_oauth_permission script.
+  """
+
+  def __init__(self, consumer_key):
+    self.consumer_key_ = consumer_key
+
+  def read_required_files(self, consumer_secret_file, access_token_file):
+    """Reads required data for making OAuth requests.
+
+       Args:
+           consumer_secret_file: A shelve file with an entry consumer_secret
+               containing the consumer secret in string form.
+           access_token_file: A shelve file with an entry access_token
+               containing the access token in string form.
+    """
+    self.access_token_string_ = self._read_access_token(access_token_file)
+    self.consumer_secret_ = self._read_consumer_secret(consumer_secret_file)
+
+  def send_post_request(self, url, parameters):
+    """Sends an OAuth request for a protected resource in the dashboard.
+
+       Use this when you want to report new data to the dashboard. You must have
+       called the read_required_files method prior to calling this method, since
+       that method will read in the consumer secret and access token we need to
+       make the OAuth request. These concepts are described in the class
+       description.
+
+       The server is expected to respond with HTTP status 200 and a completely
+       empty response if the call failed. The server may put diagnostic
+       information in the response.
+
+       Args:
+           url: An absolute url within the dashboard domain, for example
+               http://webrtc-dashboard.appspot.com/add_coverage_data.
+           parameters: A dict which maps from POST parameter names to values.
+
+       Raises:
+           FailedToReportToDashboard: If the dashboard didn't respond
+               with HTTP 200 to our request or if the response is non-empty.
+    """
+    consumer = oauth.OAuthConsumer(self.consumer_key_, self.consumer_secret_)
+    access_token = oauth.OAuthToken.from_string(self.access_token_string_)
+
+    oauth_request = oauth.OAuthRequest.from_consumer_and_token(
+                        consumer,
+                        token=access_token,
+                        http_method='POST',
+                        http_url=url,
+                        parameters=parameters)
+
+    signature_method_hmac_sha1 = oauth.OAuthSignatureMethod_HMAC_SHA1()
+    oauth_request.sign_request(signature_method_hmac_sha1, consumer,
+                               access_token)
+
+    connection = httplib.HTTPConnection(constants.DASHBOARD_SERVER)
+
+    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
+    connection.request('POST', url, body=oauth_request.to_postdata(),
+                       headers=headers)
+
+    response = connection.getresponse()
+    connection.close()
+
+    if response.status != 200:
+      message = ('Failed to report to %s: got response %d (%s)' %
+                 (url, response.status, response.reason))
+      raise FailedToReportToDashboard(message)
+
+    # The response content should be empty on success, so check that:
+    response_content = response.read()
+    if response_content:
+      message = ('Dashboard reported the following error: %s.' %
+                 response_content)
+      raise FailedToReportToDashboard(message)
+
+  def _read_access_token(self, filename):
+    return self._read_shelve(filename, 'access_token')
+
+  def _read_consumer_secret(self, filename):
+    return self._read_shelve(filename, 'consumer_secret')
+
+  def _read_shelve(self, filename, key):
+    input_file = shelve.open(filename)
+
+    if not input_file.has_key(key):
+      raise FailedToReadRequiredInputFile('Missing correct %s file in current '
+                                          'directory. You may have to run '
+                                          'request_oauth_permission.py.' %
+                                          filename)
+
+    result = input_file[key]
+    input_file.close()
+
+    return result
diff --git a/trunk/tools/quality_tracking/oauth2 b/trunk/tools/quality_tracking/oauth2
new file mode 120000
index 0000000..a4d1dde
--- /dev/null
+++ b/trunk/tools/quality_tracking/oauth2
@@ -0,0 +1 @@
+../../third_party/oauth2/oauth2/
\ No newline at end of file
diff --git a/trunk/tools/quality_tracking/request_oauth_permission.py b/trunk/tools/quality_tracking/request_oauth_permission.py
new file mode 100755
index 0000000..fb97738
--- /dev/null
+++ b/trunk/tools/quality_tracking/request_oauth_permission.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script request an access token from the appengine running the dashboard.
+
+   The script is intended to be run manually whenever we wish to change which
+   dashboard administrator we act on behalf of when running the
+   track_coverage.py script. For example, this will be useful if the current
+   dashboard administrator leaves the project. This script can also be used to
+   launch a new dashboard if that is desired.
+
+   This script should be run on the build bot which runs the track_coverage.py
+   script. This script will present a link during its execution, which the new
+   administrator should follow and then click approve on the web page that
+   appears. The new administrator should have admin rights on the coverage
+   dashboard, otherwise the track_* scripts will not work.
+
+   If successful, this script will write the access token to a file access.token
+   in the current directory, which later can be read by the track_* scripts.
+   The token is stored in string form (as reported by the web server) using the
+   shelve module. The consumer secret passed in as an argument to this script
+   will also similarly be stored in a file consumer.secret. The shelve keys
+   will be 'access_token' and 'consumer_secret', respectively.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import shelve
+import sys
+import urlparse
+import oauth2 as oauth
+
+import constants
+
+
+class FailedToRequestPermissionException(Exception):
+  pass
+
+
+def _ensure_token_response_is_200(response, queried_url, token_type):
+  if response.status != 200:
+    raise FailedToRequestPermissionException('Failed to request %s from %s: '
+                                             'received status %d, reason %s.' %
+                                             (token_type,
+                                              queried_url,
+                                              response.status,
+                                              response.reason))
+
+
+def _request_unauthorized_token(consumer, request_token_url):
+  """Requests the initial token from the dashboard service.
+
+     Given that the response from the server is correct, we will return a
+     dictionary containing oauth_token and oauth_token_secret mapped to the
+     token and secret value, respectively.
+  """
+  client = oauth.Client(consumer)
+
+  try:
+    response, content = client.request(request_token_url, 'POST')
+  except AttributeError as error:
+    # This catch handler is here since we'll get very confusing messages
+    # if the target server is down for some reason.
+    raise FailedToRequestPermissionException('Failed to request token: '
+                                             'the dashboard is likely down.',
+                                             error)
+
+  _ensure_token_response_is_200(response, request_token_url,
+                                'unauthorized token')
+
+  return dict(urlparse.parse_qsl(content))
+
+
+def _ask_user_to_authorize_us(unauthorized_token):
+  """This function will block until the user enters y + newline."""
+  print 'Go to the following link in your browser:'
+  print '%s?oauth_token=%s' % (constants.AUTHORIZE_TOKEN_URL,
+                               unauthorized_token['oauth_token'])
+
+  accepted = 'n'
+  while accepted.lower() != 'y':
+    accepted = raw_input('Have you authorized me yet? (y/n) ')
+
+
+def _request_access_token(consumer, unauthorized_token):
+  token = oauth.Token(unauthorized_token['oauth_token'],
+                      unauthorized_token['oauth_token_secret'])
+  client = oauth.Client(consumer, token)
+  response, content = client.request(constants.ACCESS_TOKEN_URL, 'POST')
+
+  _ensure_token_response_is_200(response, constants.ACCESS_TOKEN_URL,
+                                'access token')
+
+  return content
+
+
+def _write_access_token_to_file(access_token, filename):
+  output = shelve.open(filename)
+  output['access_token'] = access_token
+  output.close()
+
+  print 'Wrote the access token to the file %s.' % filename
+
+
+def _write_consumer_secret_to_file(consumer_secret, filename):
+  output = shelve.open(filename)
+  output['consumer_secret'] = consumer_secret
+  output.close()
+
+  print 'Wrote the consumer secret to the file %s.' % filename
+
+
+def _main():
+  if len(sys.argv) != 2:
+    print ('Usage: %s <consumer secret>.\n\nThe consumer secret is an OAuth '
+           'concept and is obtained from the Google Accounts domain dashboard.'
+           % sys.argv[0])
+    return
+
+  consumer_secret = sys.argv[1]
+  consumer = oauth.Consumer(constants.CONSUMER_KEY, consumer_secret)
+
+  unauthorized_token = _request_unauthorized_token(consumer,
+                                                   constants.REQUEST_TOKEN_URL)
+
+  _ask_user_to_authorize_us(unauthorized_token)
+
+  access_token_string = _request_access_token(consumer, unauthorized_token)
+
+  _write_access_token_to_file(access_token_string, constants.ACCESS_TOKEN_FILE)
+  _write_consumer_secret_to_file(consumer_secret,
+                                 constants.CONSUMER_SECRET_FILE)
+
+if __name__ == '__main__':
+  _main()
diff --git a/trunk/tools/quality_tracking/tgrid_parser.py b/trunk/tools/quality_tracking/tgrid_parser.py
new file mode 100644
index 0000000..2b674d5
--- /dev/null
+++ b/trunk/tools/quality_tracking/tgrid_parser.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains functions for parsing the build master's transposed grid page."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import re
+
+
+class FailedToParseBuildStatus(Exception):
+  pass
+
+
+def _parse_builds(revision, html):
+  """Parses the bot list, which is a sequence of <td></td> lines.
+
+     Example input:
+     <td class="build success"><a href="builders/Android/builds/119">OK</a></td>
+     The first regular expression group captures Android, second 119, third OK.
+  """
+  result = {}
+
+  for match in re.finditer('<td.*?>.*?<a href="builders/(.+?)/builds/(\d+)">'
+                           '(OK|failed|building|warnings)</a>.*?</td>',
+                           html, re.DOTALL):
+    revision_and_bot_name = revision + "--" + match.group(1)
+    build_number_and_status = match.group(2) + "--" + match.group(3)
+
+    result[revision_and_bot_name] = build_number_and_status
+
+  return result
+
+
+def parse_tgrid_page(html):
+  """Parses the build master's tgrid page.
+
+     Example input:
+     <tr>
+       <td valign="bottom" class="sourcestamp">1568</td>
+       LIST OF BOTS
+     </tr>
+     The first regular expression group captures 1568, second group captures
+     everything in LIST OF BOTS. The list of bots is then passed into a
+     separate function for parsing.
+
+     Args:
+         html: The raw HTML from the tgrid page.
+
+     Returns: A dictionary with <svn revision>--<bot name> mapped to
+         <bot build number>--<status>, where status is either OK, failed,
+         building or warnings.
+  """
+  result = {}
+
+  for match in re.finditer('<td.*?class="sourcestamp">(\d+)</td>(.*?)</tr>',
+                           html, re.DOTALL):
+    revision = match.group(1)
+    builds_for_revision_html = match.group(2)
+    result.update(_parse_builds(revision, builds_for_revision_html))
+
+  if not result:
+    raise FailedToParseBuildStatus('Could not find any build statuses in %s.' %
+                                   html)
+
+  return result
diff --git a/trunk/tools/quality_tracking/tgrid_parser_test.py b/trunk/tools/quality_tracking/tgrid_parser_test.py
new file mode 100755
index 0000000..8b15c90
--- /dev/null
+++ b/trunk/tools/quality_tracking/tgrid_parser_test.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains functions for parsing the build master's transposed grid page."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import unittest
+
+import tgrid_parser
+
+
+SAMPLE_FILE = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+<html
+ xmlns="http://www.w3.org/1999/xhtml"
+ lang="en"
+ xml:lang="en">
+<head>
+ <title>Buildbot</title>
+ <link href="buildbot.css" rel="stylesheet" type="text/css" />
+</head>
+
+<body vlink="#800080">
+<table class="Grid" border="0" cellspacing="0">
+<tr>
+<td valign="bottom" class="sourcestamp">1570</td>
+<td class="build warnings"><a href="builders/Chrome/builds/109">warnings</a>
+<br />
+make chrome</td>
+<td class="build success">
+  <a href="builders/Android/builds/121">OK</a></td>
+<td class="build success">
+  <a href="builders/ChromeOS/builds/578">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux32bitDBG/builds/564">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux32bitRelease/builds/684">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitDBG/builds/680">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitDBG-GCC4.6/builds/5">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitRelease/builds/570">OK</a></td>
+<td class="build success">
+  <a href="builders/LinuxCLANG/builds/259">OK</a></td>
+<td class="build success">
+  <a href="builders/LinuxVideoTest/builds/345">OK</a></td>
+<td class="build success">
+  <a href="builders/MacOS/builds/670">OK</a></td>
+<td class="build success">
+  <a href="builders/Win32Debug/builds/432">OK</a></td>
+<td class="build success">
+  <a href="builders/Win32Release/builds/440">OK</a></td>
+</tr>
+<tr>
+<td valign="bottom" class="sourcestamp">1571</td>
+<td class="build warnings"><a href="builders/Chrome/builds/109">warnings</a>
+<br />
+make chrome</td>
+<td class="build success">
+  <a href="builders/Android/builds/122">OK</a></td>
+<td class="build success">
+  <a href="builders/ChromeOS/builds/579">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux32bitDBG/builds/565">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux32bitRelease/builds/685">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitDBG/builds/681">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitDBG-GCC4.6/builds/6">OK</a></td>
+<td class="build success">
+  <a href="builders/Linux64bitRelease/builds/571">OK</a></td>
+<td class="build success">
+  <a href="builders/LinuxCLANG/builds/260">OK</a></td>
+<td class="build failure">
+  <a href="builders/LinuxVideoTest/builds/346">failed</a><br />
+voe_auto_test</td>
+<td class="build success">
+  <a href="builders/MacOS/builds/671">OK</a></td>
+<td class="build running">
+  <a href="builders/Win32Debug/builds/441">building</a></td>
+<td class="build success">
+  <a href="builders/Win32Release/builds/441">OK</a></td>
+</tr>
+</table>
+</body>
+</html>
+"""
+
+MINIMAL_OK = """
+<tr>
+<td valign="bottom" class="sourcestamp">1570</td>
+<td class="build success">
+<a href="builders/Android/builds/121">OK</a></td>
+</tr>
+"""
+
+MINIMAL_FAIL = """
+<tr>
+<td valign="bottom" class="sourcestamp">1573</td>
+<td class="build failure">
+<a href="builders/LinuxVideoTest/builds/347">failed</a><br />
+voe_auto_test</td>
+</tr>
+"""
+
+MINIMAL_BUILDING = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576</td>
+<td class="build running">
+<a href="builders/Win32Debug/builds/434">building</a></td>
+voe_auto_test</td>
+</tr>
+"""
+
+MINIMAL_WARNED = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576</td>
+<td class="build warnings">
+<a href="builders/Chrome/builds/109">warnings</a><br />
+make chrome</td>
+</tr>
+"""
+
+class TGridParserTest(unittest.TestCase):
+  def test_parser_throws_exception_on_empty_html(self):
+    self.assertRaises(tgrid_parser.FailedToParseBuildStatus,
+                      tgrid_parser.parse_tgrid_page, '');
+
+  def test_parser_finds_successful_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_OK)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1570--Android', first_mapping[0])
+    self.assertEqual('121--OK', first_mapping[1])
+
+  def test_parser_finds_failed_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_FAIL)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1573--LinuxVideoTest', first_mapping[0])
+    self.assertEqual('347--failed', first_mapping[1])
+
+  def test_parser_finds_building_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_BUILDING)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--Win32Debug', first_mapping[0])
+    self.assertEqual('434--building', first_mapping[1])
+
+  def test_parser_finds_warned_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_WARNED)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--Chrome', first_mapping[0])
+    self.assertEqual('109--warnings', first_mapping[1])
+
+  def test_parser_finds_all_bots_and_revisions(self):
+    result = tgrid_parser.parse_tgrid_page(SAMPLE_FILE)
+
+    # 2 * 13 = 26 bots in sample
+    self.assertEqual(26, len(result))
+
+    # Make some samples
+    self.assertTrue(result.has_key('1570--ChromeOS'))
+    self.assertEquals('578--OK', result['1570--ChromeOS'])
+
+    self.assertTrue(result.has_key('1570--Chrome'))
+    self.assertEquals('109--warnings', result['1570--Chrome'])
+
+    self.assertTrue(result.has_key('1570--LinuxCLANG'))
+    self.assertEquals('259--OK', result['1570--LinuxCLANG'])
+
+    self.assertTrue(result.has_key('1570--Win32Release'))
+    self.assertEquals('440--OK', result['1570--Win32Release'])
+
+    self.assertTrue(result.has_key('1571--ChromeOS'))
+    self.assertEquals('579--OK', result['1571--ChromeOS'])
+
+    self.assertTrue(result.has_key('1571--LinuxVideoTest'))
+    self.assertEquals('346--failed', result['1571--LinuxVideoTest'])
+
+    self.assertTrue(result.has_key('1571--Win32Debug'))
+    self.assertEquals('441--building', result['1571--Win32Debug'])
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/trunk/tools/quality_tracking/track_build_status.py b/trunk/tools/quality_tracking/track_build_status.py
new file mode 100755
index 0000000..1ddf140
--- /dev/null
+++ b/trunk/tools/quality_tracking/track_build_status.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script checks the current build status on the master and submits
+   it to the dashboard. It is adapted to build bot version 0.7.12.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+
+import httplib
+
+import constants
+import dashboard_connection
+import tgrid_parser
+
+
+class FailedToGetStatusFromMaster(Exception):
+  pass
+
+
+def _download_and_parse_build_status():
+  connection = httplib.HTTPConnection(constants.BUILD_MASTER_SERVER)
+  connection.request('GET', constants.BUILD_MASTER_TRANSPOSED_GRID_URL)
+  response = connection.getresponse()
+
+  if response.status != 200:
+    raise FailedToGetStatusFromMaster(('Failed to get build status from master:'
+                                       ' got status %d, reason %s.' %
+                                       (response.status, response.reason)))
+
+  full_response = response.read()
+  connection.close()
+
+  return tgrid_parser.parse_tgrid_page(full_response)
+
+
+def _main():
+  dashboard = dashboard_connection.DashboardConnection(constants.CONSUMER_KEY)
+  dashboard.read_required_files(constants.CONSUMER_SECRET_FILE,
+                                constants.ACCESS_TOKEN_FILE)
+
+  bot_to_status_mapping = _download_and_parse_build_status()
+
+  dashboard.send_post_request(constants.ADD_BUILD_STATUS_DATA_URL,
+                              bot_to_status_mapping)
+
+
+if __name__ == '__main__':
+  _main()
diff --git a/trunk/tools/quality_tracking/track_coverage.py b/trunk/tools/quality_tracking/track_coverage.py
new file mode 100755
index 0000000..a46ddfc
--- /dev/null
+++ b/trunk/tools/quality_tracking/track_coverage.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script grabs and reports coverage information.
+
+   It grabs coverage information from the latest Linux 32-bit build and
+   pushes it to the coverage tracker, enabling us to track code coverage
+   over time. This script is intended to run on the 32-bit Linux slave.
+
+   This script requires an access.token file in the current directory, as
+   generated by the request_oauth_permission.py script. It also expects a file
+   customer.secret with a single line containing the customer secret. The
+   customer secret is an OAuth concept and is received when one registers the
+   application with the App Engine running the dashboard.
+
+   The script assumes that all coverage data is stored under
+   /home/<build bot user>/www.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import os
+import re
+import time
+
+import constants
+import dashboard_connection
+
+
+class FailedToParseCoverageHtml(Exception):
+  pass
+
+
+class CouldNotFindCoverageDirectory(Exception):
+  pass
+
+
+def _find_latest_32bit_debug_build(www_directory_contents, coverage_www_dir):
+  """Finds the latest 32-bit coverage directory in the directory listing.
+
+     Coverage directories have the form Linux32bitDBG_<number>. There may be
+     other directories in the list though, for instance for other build
+     configurations.
+  """
+
+  # This sort ensures we will encounter the directory with the highest number
+  # first.
+  www_directory_contents.sort(reverse=True)
+
+  for entry in www_directory_contents:
+    match = re.match('Linux32DBG_\d+', entry)
+    if match is not None:
+      return entry
+
+  raise CouldNotFindCoverageDirectory('Error: Found no 32-bit '
+                                      'debug build in directory %s.' %
+                                      coverage_www_dir)
+
+
+def _grab_coverage_percentage(label, index_html_contents):
+  """Extracts coverage from a LCOV coverage report.
+
+     Grabs coverage by assuming that the label in the coverage HTML report
+     is close to the actual number and that the number is followed by a space
+     and a percentage sign.
+  """
+  match = re.search('<td[^>]*>' + label + '</td>.*?(\d+\.\d) %',
+                    index_html_contents, re.DOTALL)
+  if match is None:
+    raise FailedToParseCoverageHtml('Missing coverage at label "%s".' % label)
+
+  try:
+    return float(match.group(1))
+  except ValueError:
+    raise FailedToParseCoverageHtml('%s is not a float.' % match.group(1))
+
+
+def _report_coverage_to_dashboard(dashboard, now, line_coverage,
+                                  function_coverage):
+  parameters = {'date': '%d' % now,
+                'line_coverage': '%f' % line_coverage,
+                'function_coverage': '%f' % function_coverage
+               }
+
+  dashboard.send_post_request(constants.ADD_COVERAGE_DATA_URL, parameters)
+
+
+def _main():
+  dashboard = dashboard_connection.DashboardConnection(constants.CONSUMER_KEY)
+  dashboard.read_required_files(constants.CONSUMER_SECRET_FILE,
+                                constants.ACCESS_TOKEN_FILE)
+
+  www_dir_contents = os.listdir(BUILD_BOT_COVERAGE_WWW_DIRECTORY)
+  latest_build_directory = _find_latest_32bit_debug_build(www_dir_contents,
+                                                          coverage_www_dir)
+
+  index_html_path = os.path.join(coverage_www_dir, latest_build_directory,
+                                 'index.html')
+  index_html_file = open(index_html_path)
+  whole_file = index_html_file.read()
+
+  line_coverage = _grab_coverage_percentage('Lines:', whole_file)
+  function_coverage = _grab_coverage_percentage('Functions:', whole_file)
+  now = int(time.time())
+
+  _report_coverage_to_dashboard(dashboard, now, line_coverage,
+                                function_coverage)
+
+
+if __name__ == '__main__':
+  _main()
+
diff --git a/trunk/tools/refactoring/addfileheader.py b/trunk/tools/refactoring/addfileheader.py
new file mode 100644
index 0000000..01c8a8b
--- /dev/null
+++ b/trunk/tools/refactoring/addfileheader.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+extensions = ['.h','.cc','.c','.cpp']
+
+ignore_these = ['my_ignore_header.h']
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: directory [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+# Just steal the header from the template
+def fileheaderasstring():
+    template_file_name = 'license_template.txt'
+    if (not filemanagement.fileexist(template_file_name)):
+        print 'File ' + template_file_name + ' not found!'
+        quit()
+    template_file = open(template_file_name,'r')
+    return_string = ''
+    for line in template_file:
+        return_string += line
+    return return_string
+
+# Just steal the header from the template
+def fileheaderasarray():
+    template_file_name = 'license_template.txt'
+    if (not filemanagement.fileexist(template_file_name)):
+        print 'File ' + template_file_name + ' not found!'
+        quit()
+    template_file = open(template_file_name,'r')
+    return_value = []
+    for line in template_file:
+        return_value.append(line)
+    return return_value
+
+
+def findheader(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + file_name + ' not found!'
+        print 'Unexpected error!'
+        quit()
+    file_handle = open(full_file_name)
+    template_file_content = fileheaderasarray()
+    compare_content = []
+    # load the same number of lines from file as the fileheader
+    for index in range(len(template_file_content)):
+        line = file_handle.readline()
+        if (line == ''):
+            return False
+        compare_content.append(line)
+
+    while (True):
+        found = True
+        for index in range(len(template_file_content)):
+            line1 = template_file_content[index]
+            line2 = compare_content[index]
+            if(line1 != line2):
+                found = False
+                break
+        if (found):
+            return True
+        compare_content = compare_content[1:len(compare_content)]
+        line = file_handle.readline()
+        if (line == ''):
+            return False
+        compare_content.append(line)
+    return False
+
+# Used to store temporary result before flushing to real file when finished
+def temporaryfilename(old_file_name):
+    return old_file_name + '.deleteme'
+
+def updatefile(path, old_file_name):
+    full_old_file_name = path + old_file_name
+    if (not filemanagement.fileexist(full_old_file_name)):
+        print 'File ' + full_old_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    full_temporary_file_name = path + temporaryfilename(old_file_name)
+
+    # Make sure that the files are closed by putting them out of scope
+    old_file = open(full_old_file_name,'r')
+    temporary_file = open(full_temporary_file_name,'w')
+
+    temporary_file.writelines(fileheaderasstring())
+    remove_whitespaces = True
+    for line in old_file:
+        if (remove_whitespaces and (len(line.split()) == 0)):
+            continue
+        else:
+            remove_whitespaces = False
+        temporary_file.writelines(line)
+    old_file.close()
+    temporary_file.close()
+
+    filemanagement.copyfile(full_old_file_name,full_temporary_file_name)
+    filemanagement.deletefile(full_temporary_file_name)
+
+
+failed_files = []
+skipped_files = []
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+
+# Let the word copyright be our sanity, i.e. make sure there is only one
+# copy right occurance or report that there will be no change
+    if(filemanagement.findstringinfile(path_dir,filename,'Copyright') or
+        filemanagement.findstringinfile(path_dir,filename,'copyright') or
+        filemanagement.findstringinfile(path_dir,filename,'COPYRIGHT')):
+        if(findheader(path_dir,filename)):
+            skipped_files.append(path_dir + filename)
+        else:
+            failed_files.append(path_dir + filename)
+        continue
+
+    if (not commit):
+        print 'File ' + path_dir + filename + ' will be updated'
+        continue
+    updatefile(path_dir,filename)
+
+tense = 'will be'
+if (commit):
+    tense = 'has been'
+if (len(skipped_files) > 0):
+    print str(len(skipped_files)) + ' file(s) ' + tense + ' skipped since they already have the correct header'
+
+if (len(failed_files) > 0):
+    print 'Following files seem to have an invalid file header:'
+for line in failed_files:
+    print line
diff --git a/trunk/tools/refactoring/filemanagement.py b/trunk/tools/refactoring/filemanagement.py
new file mode 100644
index 0000000..4ff64ce
--- /dev/null
+++ b/trunk/tools/refactoring/filemanagement.py
@@ -0,0 +1,72 @@
+import fnmatch
+import os
+import stringmanipulation
+
+def fileexist( file_name ):
+    return os.path.isfile(file_name)
+
+def pathexist( path ):
+    return os.path.exists(path)
+
+def fixpath( path ):
+    return_value = path
+    if( return_value[len(return_value) - 1] != '/'):
+        return_value = return_value + '/'
+    return return_value
+
+def listallfilesinfolder( path, extension ):
+    matches = []
+    signature = '*' + extension
+    for root, dirnames, filenames in os.walk(path):
+        for filename in fnmatch.filter(filenames, signature):
+            matches.append([fixpath(root), filename])
+    return matches
+
+def copyfile(to_file, from_file):
+    if(not fileexist(from_file)):
+        return
+    command = 'cp -f ' + from_file + ' ' + to_file
+    os.system(command)
+    #print command
+
+def deletefile(file_to_delete):
+    if(not fileexist(file_to_delete)):
+        return
+    os.system('rm ' + file_to_delete)
+
+# very ugly but works, so keep for now
+def findstringinfile(path,file_name,search_string):
+    command = 'grep \'' + search_string + '\' ' + path + file_name + ' > deleteme.txt'
+    return_value = os.system(command)
+#    print command
+    return (return_value == 0)
+
+def replacestringinfolder( path, old_string, new_string, extension ):
+    if(not stringmanipulation.isextension(extension)):
+        print 'failed to search and replace'
+        return
+    if(len(old_string) == 0):
+        print 'failed to search and replace'
+        return
+    find_command = 'ls '+ path + '/*' + extension
+    sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
+                     '/g\' *' + extension
+    command_string = find_command + ' | xargs ' + sed_command + ' 2> deleteme.txt'
+    os.system(command_string)
+    #print command_string
+
+#find ./ -name "*.h" -type f  | xargs -P 0 sed -i 's/process_thread_wrapper.h/process_thread.h/g' *.h deleteme.txt
+def replacestringinallsubfolders( old_string, new_string, extension):
+    if(not stringmanipulation.isextension(extension)):
+        print 'failed to search and replace'
+        return
+    if(len(old_string) == 0):
+        print 'failed to search and replace'
+        return
+
+    find_command = 'find ./ -name \"*' + extension + '\" -type f'
+    sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
+                     '/g\' *' + extension
+    command_string = find_command + ' | xargs -P 0 ' + sed_command + ' 2> deleteme.txt'
+    os.system(command_string)
+    #print command_string
diff --git a/trunk/tools/refactoring/fixincludeguards.py b/trunk/tools/refactoring/fixincludeguards.py
new file mode 100644
index 0000000..0b56355
--- /dev/null
+++ b/trunk/tools/refactoring/fixincludeguards.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+extensions = ['.h']
+
+ignore_these = ['my_ignore_header.h']
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: directory [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+for extension in extensions:
+    files_to_fix = filemanagement.listallfilesinfolder(directory,\
+                                                       extension)
+
+def buildincludeguardname(path,filename):
+    full_file_name = 'WEBRTC_' + path + filename
+    full_file_name = full_file_name.upper()
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '/', '_')
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '\\', '_')
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '.', '_')
+    full_file_name += '_'
+    return full_file_name
+
+def buildnewincludeguardset(path,filename):
+    include_guard_name = buildincludeguardname(path,filename)
+    if(include_guard_name == ''):
+        return []
+    return_value = []
+    return_value.append('#ifndef ' + include_guard_name)
+    return_value.append('#define ' + include_guard_name)
+    return_value.append(include_guard_name)
+    return return_value
+
+def printincludeguardset(include_guard_set):
+    print 'First line: ' + include_guard_set[0]
+    print 'Second line: ' + include_guard_set[1]
+    print 'Last line: ' + include_guard_set[2]
+    return
+
+include_guard_begin_identifier = ['#ifndef', '#if !defined']
+include_guard_second_identifier = ['#define']
+def findincludeguardidentifier(line):
+    for begin_identifier in include_guard_begin_identifier:
+        line = stringmanipulation.removealloccurances(line,begin_identifier)
+    for second_identifier in include_guard_begin_identifier:
+        line = stringmanipulation.removealloccurances(line,second_identifier)
+    removed_prefix = [True,'']
+    line = stringmanipulation.whitespacestoonespace(line)
+    while(removed_prefix[0]):
+        removed_prefix = stringmanipulation.removeprefix(line,' ')
+        line = removed_prefix[1]
+    line = stringmanipulation.removealloccurances(line,'(')
+    if(line == ''):
+        return ''
+    word_pos = stringmanipulation.getword(line,0)
+    return_value = line[0:word_pos[1]]
+    return_value = return_value.rstrip('\r\n')
+    return return_value
+
+def findoldincludeguardset(path,filename):
+    return_value = []
+    full_file_name = path + filename
+    file_pointer = open(full_file_name,'r')
+    include_guard_name = ''
+    for line in file_pointer:
+        if (include_guard_name == ''):
+            for compare_string in include_guard_begin_identifier:
+                if (stringmanipulation.issubstring(compare_string, line) != -1):
+                    include_guard_name = findincludeguardidentifier(line)
+                    if (include_guard_name == ''):
+                        break
+                    line = line.rstrip('\r\n')
+                    return_value.append(line)
+                    break
+        else:
+            for compare_string in include_guard_second_identifier:
+                if (stringmanipulation.issubstring(compare_string, line) != -1):
+                    if (stringmanipulation.issubstring(include_guard_name, line) != -1):
+                        line = line.rstrip('\r\n')
+                        return_value.append(line)
+                        return_value.append(include_guard_name)
+                        return return_value
+            include_guard_name = ''
+            return_value = []
+    return []
+
+failed_files = []
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+    old_include_guard_set = findoldincludeguardset(path_dir,filename)
+    if (len(old_include_guard_set) != 3) :
+        failed_files.append('unable to figure out the include guards for ' + filename)
+        continue
+
+    new_include_guard_set = buildnewincludeguardset(path_dir,filename)
+    if (len(new_include_guard_set) != 3) :
+        failed_files.append('unable to figure out new the include guards for ' + filename)
+        continue
+
+    if(not commit):
+        print 'old guard: ' + old_include_guard_set[2]
+        print 'new guard: ' + new_include_guard_set[2]
+        continue
+
+    for index in range(2):
+        # enough to only replace for file. However, no function for that
+        for extension in extensions:
+            filemanagement.replacestringinfolder(path_dir,old_include_guard_set[index],new_include_guard_set[index],extension)
+    # special case for last to avoid complications
+    for extension in extensions:
+        filemanagement.replacestringinfolder(path_dir,' ' + old_include_guard_set[2],' ' + new_include_guard_set[2],extension)
+        filemanagement.replacestringinfolder(path_dir,'\\/\\/' + old_include_guard_set[2],'\\/\\/ ' + new_include_guard_set[2],extension)
+
+
+if(len(failed_files) > 0):
+    print 'Following failures should be investigated manually:'
+for line in failed_files:
+    print line
diff --git a/trunk/tools/refactoring/fixnames.py b/trunk/tools/refactoring/fixnames.py
new file mode 100644
index 0000000..15381e3
--- /dev/null
+++ b/trunk/tools/refactoring/fixnames.py
@@ -0,0 +1,387 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import p4commands
+import sys
+
+name_space_to_ignore = 'GIPS::'
+#only allow one prefix to be removed since allowing multiple will complicate
+# things
+prefix_to_filter = 'gips'
+#words_to_filter = ['Module']
+# it might be dangerous to remove GIPS but keep it default
+words_to_filter = ['Module','GIPS']
+
+# This script finds all the words that should be replaced in an h-file. Once
+# all words that should be replaced are found it does a global search and
+# replace.
+
+extensions_to_edit = ['.cpp','.cc','.h']
+
+#line = '    ~hiGIPSCriticalSectionScoped()'
+#print line
+#position = stringmanipulation.getword(line,11)
+#old_word = line[position[0]:position[0]+position[1]]
+#result = stringmanipulation.removealloccurances(old_word,'gips')
+#new_word = result
+#print old_word
+#print position[0]
+#print position[0]+position[1]
+#print new_word
+#quit()
+
+# Ignore whole line if any item in this table is a substring of the line
+do_not_replace_line_table = []
+do_not_replace_line_table.append('namespace GIPS')
+
+# [old_string,new_string]
+# List of things to remove that are static:
+manual_replace_table = []
+#manual_replace_table.append(['using namespace GIPS;',''])
+#manual_replace_table.append(['CreateGipsEvent','CreateEvent'])
+#manual_replace_table.append(['CreateGIPSTrace','CreateTrace'])
+#manual_replace_table.append(['ReturnGIPSTrace','ReturnTrace'])
+#manual_replace_table.append(['CreateGIPSFile','CreateFile'])
+replace_table = manual_replace_table
+#replace_table.append(['GIPS::','webrtc::'])
+# List of things to not remove that are static, i.e. exceptions:
+# don't replace any of the GIPS_Words since that will affect all files
+# do that in a separate script!
+do_not_replace_table = []
+do_not_replace_table.append('GIPS_CipherTypes')
+do_not_replace_table.append('GIPS_AuthenticationTypes')
+do_not_replace_table.append('GIPS_SecurityLevels')
+do_not_replace_table.append('GIPS_encryption')
+do_not_replace_table.append('~GIPS_encryption')
+do_not_replace_table.append('GIPS_transport')
+do_not_replace_table.append('~GIPS_transport')
+do_not_replace_table.append('GIPSTraceCallback')
+do_not_replace_table.append('~GIPSTraceCallback')
+do_not_replace_table.append('GIPS_RTP_CSRC_SIZE')
+do_not_replace_table.append('GIPS_RTPDirections')
+do_not_replace_table.append('GIPS_RTP_INCOMING')
+do_not_replace_table.append('GIPS_RTP_OUTGOING')
+do_not_replace_table.append('GIPSFrameType')
+do_not_replace_table.append('GIPS_FRAME_EMPTY')
+do_not_replace_table.append('GIPS_AUDIO_FRAME_SPEECH')
+do_not_replace_table.append('GIPS_AUDIO_FRAME_CN')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_KEY')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_GOLDEN')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA_KEY')
+do_not_replace_table.append('GIPS_PacketType')
+do_not_replace_table.append('GIPS_PACKET_TYPE_RTP')
+do_not_replace_table.append('GIPS_PACKET_TYPE_KEEP_ALIVE')
+do_not_replace_table.append('GIPS_AudioLayers')
+do_not_replace_table.append('GIPS_AUDIO_PLATFORM_DEFAULT')
+do_not_replace_table.append('GIPS_AUDIO_WINDOWS_WAVE')
+do_not_replace_table.append('GIPS_AUDIO_WINDOWS_CORE')
+do_not_replace_table.append('GIPS_AUDIO_LINUX_ALSA')
+do_not_replace_table.append('GIPS_AUDIO_LINUX_PULSE')
+do_not_replace_table.append('GIPS_AUDIO_FORMAT')
+do_not_replace_table.append('GIPS_PCM_16_16KHZ')
+do_not_replace_table.append('GIPS_PCM_16_8KHZ')
+do_not_replace_table.append('GIPS_G729')
+do_not_replace_table.append('GIPSAMRmode')
+do_not_replace_table.append('GIPS_RFC3267_BWEFFICIENT')
+do_not_replace_table.append('GIPS_RFC3267_OCTETALIGNED')
+do_not_replace_table.append('GIPS_RFC3267_FILESTORAGE')
+do_not_replace_table.append('GIPS_NCModes')
+do_not_replace_table.append('GIPS_NC_OFF')
+do_not_replace_table.append('GIPS_NC_MILD')
+do_not_replace_table.append('GIPS_NC_MODERATE')
+do_not_replace_table.append('GIPS_NC_AGGRESSIVE')
+do_not_replace_table.append('GIPS_NC_VERY_AGGRESSIVE')
+do_not_replace_table.append('GIPS_AGCModes')
+do_not_replace_table.append('GIPS_AGC_OFF')
+do_not_replace_table.append('GIPS_AGC_ANALOG')
+do_not_replace_table.append('GIPS_AGC_DIGITAL')
+do_not_replace_table.append('GIPS_AGC_STANDALONE_DIG')
+do_not_replace_table.append('GIPS_ECModes')
+do_not_replace_table.append('GIPS_EC_UNCHANGED')
+do_not_replace_table.append('GIPS_EC_DEFAULT')
+do_not_replace_table.append('GIPS_EC_CONFERENCE')
+do_not_replace_table.append('GIPS_EC_AEC')
+do_not_replace_table.append('GIPS_EC_AES')
+do_not_replace_table.append('GIPS_EC_AECM')
+do_not_replace_table.append('GIPS_EC_NEC_IAD')
+do_not_replace_table.append('GIPS_AESModes')
+do_not_replace_table.append('GIPS_AES_DEFAULT')
+do_not_replace_table.append('GIPS_AES_NORMAL')
+do_not_replace_table.append('GIPS_AES_HIGH')
+do_not_replace_table.append('GIPS_AES_ATTENUATE')
+do_not_replace_table.append('GIPS_AES_NORMAL_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AES_HIGH_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AES_ATTENUATE_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AECMModes')
+do_not_replace_table.append('GIPS_AECM_QUIET_EARPIECE_OR_HEADSET')
+do_not_replace_table.append('GIPS_AECM_EARPIECE')
+do_not_replace_table.append('GIPS_AECM_LOUD_EARPIECE')
+do_not_replace_table.append('GIPS_AECM_SPEAKERPHONE')
+do_not_replace_table.append('GIPS_AECM_LOUD_SPEAKERPHONE')
+do_not_replace_table.append('AECM_LOUD_SPEAKERPHONE')
+do_not_replace_table.append('GIPS_VAD_CONVENTIONAL')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_LOW')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_MID')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_HIGH')
+do_not_replace_table.append('GIPS_NetEQModes')
+do_not_replace_table.append('GIPS_NETEQ_DEFAULT')
+do_not_replace_table.append('GIPS_NETEQ_STREAMING')
+do_not_replace_table.append('GIPS_NETEQ_FAX')
+do_not_replace_table.append('GIPS_NetEQBGNModes')
+do_not_replace_table.append('GIPS_BGN_ON')
+do_not_replace_table.append('GIPS_BGN_FADE')
+do_not_replace_table.append('GIPS_BGN_OFF')
+do_not_replace_table.append('GIPS_OnHoldModes')
+do_not_replace_table.append('GIPS_HOLD_SEND_AND_PLAY')
+do_not_replace_table.append('GIPS_HOLD_SEND_ONLY')
+do_not_replace_table.append('GIPS_HOLD_PLAY_ONLY')
+do_not_replace_table.append('GIPS_PayloadFrequencies')
+do_not_replace_table.append('GIPS_FREQ_8000_HZ')
+do_not_replace_table.append('GIPS_FREQ_16000_HZ')
+do_not_replace_table.append('GIPS_FREQ_32000_HZ')
+do_not_replace_table.append('GIPS_TelephoneEventDetectionMethods')
+do_not_replace_table.append('GIPS_IN_BAND')
+do_not_replace_table.append('GIPS_OUT_OF_BAND')
+do_not_replace_table.append('GIPS_IN_AND_OUT_OF_BAND')
+do_not_replace_table.append('GIPS_ProcessingTypes')
+do_not_replace_table.append('GIPS_PLAYBACK_PER_CHANNEL')
+do_not_replace_table.append('GIPS_PLAYBACK_ALL_CHANNELS_MIXED')
+do_not_replace_table.append('GIPS_RECORDING_PER_CHANNEL')
+do_not_replace_table.append('GIPS_RECORDING_ALL_CHANNELS_MIXED')
+do_not_replace_table.append('GIPS_StereoChannel')
+do_not_replace_table.append('GIPS_StereoLeft')
+do_not_replace_table.append('GIPS_StereoRight')
+do_not_replace_table.append('GIPS_StereoBoth')
+do_not_replace_table.append('GIPS_stat_val')
+do_not_replace_table.append('GIPS_P56_statistics')
+do_not_replace_table.append('GIPS_echo_statistics')
+do_not_replace_table.append('GIPS_NetworkStatistics')
+do_not_replace_table.append('GIPS_JitterStatistics')
+do_not_replace_table.append('GIPSVideoRawType')
+do_not_replace_table.append('GIPS_VIDEO_I420')
+do_not_replace_table.append('GIPS_VIDEO_YV12')
+do_not_replace_table.append('GIPS_VIDEO_YUY2')
+do_not_replace_table.append('GIPS_VIDEO_UYVY')
+do_not_replace_table.append('GIPS_VIDEO_IYUV')
+do_not_replace_table.append('GIPS_VIDEO_ARGB')
+do_not_replace_table.append('GIPS_VIDEO_RGB24')
+do_not_replace_table.append('GIPS_VIDEO_RGB565')
+do_not_replace_table.append('GIPS_VIDEO_ARGB4444')
+do_not_replace_table.append('GIPS_VIDEO_ARGB1555')
+do_not_replace_table.append('GIPS_VIDEO_MJPG')
+do_not_replace_table.append('GIPS_VIDEO_NV12')
+do_not_replace_table.append('GIPS_VIDEO_NV21')
+do_not_replace_table.append('GIPS_VIDEO_Unknown')
+do_not_replace_table.append('GIPSVideoLayouts')
+do_not_replace_table.append('GIPS_LAYOUT_NONE')
+do_not_replace_table.append('GIPS_LAYOUT_DEFAULT')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED1')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED2')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED3')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED4')
+do_not_replace_table.append('GIPS_LAYOUT_FULL')
+do_not_replace_table.append('KGIPSConfigParameterSize')
+do_not_replace_table.append('KGIPSPayloadNameSize')
+do_not_replace_table.append('GIPSVideoCodecH263')
+do_not_replace_table.append('GIPSVideoH264Packetization')
+do_not_replace_table.append('GIPS_H264_SingleMode')
+do_not_replace_table.append('GIPS_H264_NonInterleavedMode')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodec_Complexity_Normal')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_High')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_Higher')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_Max')
+do_not_replace_table.append('GIPSVideoCodecH264')
+do_not_replace_table.append('GIPSVideoH264Packetization')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecProfile')
+do_not_replace_table.append('KGIPSConfigParameterSize')
+do_not_replace_table.append('KGIPSMaxSVCLayers')
+do_not_replace_table.append('GIPSVideoH264LayerTypes')
+do_not_replace_table.append('GIPS_H264SVC_Base')
+do_not_replace_table.append('GIPS_H264SVC_Extend_2X2')
+do_not_replace_table.append('GIPS_H264SVC_Extend_1X1')
+do_not_replace_table.append('GIPS_H264SVC_Extend_MGS')
+do_not_replace_table.append('GIPS_H264SVC_Extend_1_5')
+do_not_replace_table.append('GIPS_H264SVC_Extend_Custom')
+do_not_replace_table.append('GIPSVideoH264LayersProperties')
+do_not_replace_table.append('GIPSVideoH264LayerTypes')
+do_not_replace_table.append('GIPSVideoH264Layers')
+do_not_replace_table.append('GIPSVideoH264LayersProperties')
+do_not_replace_table.append('GIPSVideoCodecH264SVC')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecProfile')
+do_not_replace_table.append('GIPSVideoH264Layers')
+do_not_replace_table.append('GIPSVideoCodecVP8')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecMPEG')
+do_not_replace_table.append('GIPSVideoCodecGeneric')
+do_not_replace_table.append('GIPSVideoCodecType')
+do_not_replace_table.append('GIPSVideoCodec_H263')
+do_not_replace_table.append('GIPSVideoCodec_H264')
+do_not_replace_table.append('GIPSVideoCodec_H264SVC')
+do_not_replace_table.append('GIPSVideoCodec_VP8')
+do_not_replace_table.append('GIPSVideoCodec_MPEG4')
+do_not_replace_table.append('GIPSVideoCodec_I420')
+do_not_replace_table.append('GIPSVideoCodec_RED')
+do_not_replace_table.append('GIPSVideoCodec_ULPFEC')
+do_not_replace_table.append('GIPSVideoCodec_Unknown')
+do_not_replace_table.append('GIPSVideoCodecUnion')
+do_not_replace_table.append('GIPSVideoCodecH263')
+do_not_replace_table.append('GIPSVideoCodecH264')
+do_not_replace_table.append('GIPSVideoCodecH264SVC')
+do_not_replace_table.append('GIPSVideoCodecVP8')
+do_not_replace_table.append('GIPSVideoCodecMPEG4')
+do_not_replace_table.append('GIPSVideoCodecGeneric')
+do_not_replace_table.append('GIPSVideoCodec')
+do_not_replace_table.append('GIPSVideoCodecType')
+do_not_replace_table.append('GIPSVideoCodecUnion')
+do_not_replace_table.append('GIPSAudioFrame')
+do_not_replace_table.append('GIPS_CodecInst')
+do_not_replace_table.append('GIPS_FileFormats')
+do_not_replace_table.append('GIPSTickTime')
+do_not_replace_table.append('GIPS_Word64')
+do_not_replace_table.append('GIPS_UWord64')
+do_not_replace_table.append('GIPS_Word32')
+do_not_replace_table.append('GIPS_UWord32')
+do_not_replace_table.append('GIPS_Word16')
+do_not_replace_table.append('GIPS_UWord16')
+do_not_replace_table.append('GIPS_Word8')
+do_not_replace_table.append('GIPS_UWord8')
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+commit = (len(sys.argv) == 3)
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+# APIs are all in h-files
+extension = '.h'
+
+# All h-files
+files_to_modify = filemanagement.listallfilesinfolder(directory,\
+                                                      extension)
+
+def isinmanualremovetable( compare_word ):
+    for old_word, new_word in manual_replace_table:
+        if(old_word == compare_word):
+            return True
+    return False
+
+# Begin
+# This function looks at each line and decides which words should be replaced
+# that is this is the only part of the script that you will ever want to change!
+def findstringstoreplace(line):
+    original_line = line
+# Dont replace compiler directives
+    if(line[0] == '#'):
+        return []
+# Dont allow global removal of namespace gips since it is very intrusive
+    for sub_string_compare in do_not_replace_line_table:
+        index = stringmanipulation.issubstring(line,sub_string_compare)
+        if(index != -1):
+            return []
+
+    return_value = []
+
+    line = stringmanipulation.removeccomment(line)
+    line = stringmanipulation.whitespacestoonespace(line)
+    if(len(line) == 0):
+        return []
+    if(line[0] == '*'):
+        return []
+    index = stringmanipulation.issubstring(line,prefix_to_filter)
+    while index >= 0:
+        dont_store_hit = False
+        word_position = stringmanipulation.getword(line, index)
+        start_of_word = word_position[0]
+        size_of_word = word_position[1]
+        end_of_word = start_of_word + size_of_word
+        old_word = line[start_of_word:end_of_word]
+        if(isinmanualremovetable(old_word)):
+            dont_store_hit = True
+        if((end_of_word + 2 < len(line)) and\
+           name_space_to_ignore == line[start_of_word:end_of_word+2]):
+            dont_store_hit = True
+
+        result = stringmanipulation.removeprefix(old_word,prefix_to_filter)
+        new_word = result[1]
+        for word_to_filter in words_to_filter:
+            new_word = stringmanipulation.removealloccurances(new_word,word_to_filter)
+        result = stringmanipulation.removeprefix(new_word,'_')
+        new_word = result[1]
+        new_word = stringmanipulation.fixabbreviations(new_word)
+        new_word = stringmanipulation.removealloccurances(new_word,'_')
+        if(not dont_store_hit):
+            return_value.append([old_word,new_word])
+# remove the word we found from the string so we dont find it again
+        line = line[0:start_of_word] + line[end_of_word:len(line)]
+        index = stringmanipulation.issubstring(line,'GIPS')
+
+    return return_value
+# End
+
+# loop through all files
+for path, file_name in files_to_modify:
+#    if(file_name != 'GIPSTickUtil.h'):
+#        continue
+    full_file_name = path + file_name
+    file_pointer = open(full_file_name,'r')
+#    print file_name
+#loop through all lines
+    for line in file_pointer:
+#        print line
+        local_replace_string = findstringstoreplace(line)
+        #print local_replace_string
+        if(len(local_replace_string) != 0):
+            replace_table.extend(local_replace_string)
+
+
+# we have built our replace table now
+replace_table = stringmanipulation.removeduplicates( replace_table )
+replace_table = stringmanipulation.ordertablesizefirst( replace_table )
+replace_table = stringmanipulation.complement(replace_table,\
+                                              do_not_replace_table)
+
+def replaceoriginal( path,my_table ):
+    size_of_table = len(my_table)
+    for index in range(len(my_table)):
+        old_name = my_table[index][0]
+        new_name = my_table[index][1]
+        filemanagement.replacestringinfolder(path, old_name, new_name,\
+                                             ".h")
+        print (100*index) / (size_of_table*2)
+
+def replaceall( my_table, extension_list ):
+    size_of_table = len(my_table)
+    for index in range(len(my_table)):
+        old_name = my_table[index][0]
+        new_name = my_table[index][1]
+        new_name = new_name
+        for extension in extensions_to_edit:
+            filemanagement.replacestringinallsubfolders(old_name, new_name,
+                                                        extension)
+        print 100*(size_of_table + index) / (size_of_table*2)
+
+
+if(commit):
+    print 'commiting'
+    replace_table = stringmanipulation.removenochange(replace_table)
+    p4commands.checkoutallfiles()
+    replaceoriginal(directory,replace_table)
+    replaceall(replace_table,extensions_to_edit)
+    p4commands.revertunchangedfiles()
+else:
+    for old_name, new_name in replace_table:
+        print 'Going to replace [' + old_name + '] with [' + new_name + ']'
diff --git a/trunk/tools/refactoring/integratefiles.py b/trunk/tools/refactoring/integratefiles.py
new file mode 100644
index 0000000..c5cc892
--- /dev/null
+++ b/trunk/tools/refactoring/integratefiles.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import p4commands
+import sys
+
+extensions = ['.h', '.cpp', '.cc', '.gyp']
+
+ignore_these = ['list_no_stl.h','map_no_stl.h','constructor_magic.h']
+
+exceptions = [
+['GIPSRWLock.h','rw_lock.h'],
+['GIPSCriticalsection.h','critical_section.h'],
+]
+
+if((len(sys.argv) != 4) and (len(sys.argv) != 5)):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+old_extension = sys.argv[2]
+if(not stringmanipulation.isextension(old_extension)):
+    print old_extension + ' is not a valid extension'
+    quit()
+
+new_extension = sys.argv[3]
+if(not stringmanipulation.isextension(new_extension)):
+    print new_extension + ' is not a valid extension'
+    quit()
+
+if((len(sys.argv) == 5) and (sys.argv[4] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 5):
+    commit = True
+
+files_to_integrate = filemanagement.listallfilesinfolder(directory,\
+                                                         old_extension)
+
+if(commit):
+    p4commands.checkoutallfiles()
+for index in range(len(files_to_integrate)):
+    if(commit):
+        print (100*index)/len(files_to_integrate)
+    path_dir = files_to_integrate[index][0]
+    filename = files_to_integrate[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+
+    new_file_name = ''
+    is_exception = False
+    for exception_name,exception_name_new in exceptions:
+        if(filename == exception_name):
+            is_exception = True
+            new_file_name = exception_name_new
+            break
+
+    if(not is_exception):
+        new_file_name = filename
+
+        new_file_name = stringmanipulation.removeallprefix(new_file_name,\
+                                                       'gips')
+        new_file_name = stringmanipulation.removealloccurances(new_file_name,\
+                                                       'module')
+        new_file_name = stringmanipulation.changeextension(new_file_name,\
+                                           old_extension,\
+                                           new_extension)
+        new_file_name = stringmanipulation.fixabbreviations( new_file_name )
+        new_file_name = stringmanipulation.lowercasewithunderscore(new_file_name)
+    if(not commit):
+        print 'File ' + filename + ' will be replaced with ' + new_file_name
+        continue
+    full_new_file_name = path_dir + new_file_name
+    full_old_file_name = path_dir + filename
+    if(full_new_file_name != full_old_file_name):
+        p4commands.integratefile(full_old_file_name,full_new_file_name)
+    else:
+        print 'skipping ' + new_file_name + ' due to no change'
+    for extension in extensions:
+        print 'replacing ' + filename
+        if (extension == ".gyp"):
+            filemanagement.replacestringinallsubfolders(
+                filename,new_file_name,extension)
+        else:
+            filemanagement.replacestringinallsubfolders(
+                '\"' + filename + '\"', '\"' + new_file_name + '\"', extension)
+if(commit):
+    p4commands.revertunchangedfiles()
diff --git a/trunk/tools/refactoring/p4commands.py b/trunk/tools/refactoring/p4commands.py
new file mode 100644
index 0000000..71ac31b
--- /dev/null
+++ b/trunk/tools/refactoring/p4commands.py
@@ -0,0 +1,31 @@
+import os
+import filemanagement
+
+# checks out entire p4 repository
+def checkoutallfiles():
+    os.system('p4 edit //depotGoogle/...')
+    return
+
+# reverts all unchanged files, this is completely innoculus
+def revertunchangedfiles():
+    os.system('p4 revert -a //depotGoogle/...')
+    return
+
+def integratefile( old_name, new_name):
+    if(old_name == new_name):
+        return
+    if(not filemanagement.fileexist(old_name)):
+        return
+    integrate_command = 'p4 integrate -o -f ' +\
+                        old_name +\
+                        ' ' +\
+                        new_name +\
+                        ' > p4summary.txt 2> error.txt'
+    os.system(integrate_command)
+    #print integrate_command
+    delete_command = 'p4 delete -c default ' +\
+                     old_name +\
+                     ' > p4summary.txt 2> error.txt'
+    os.system(delete_command)
+    #print delete_command
+    return
diff --git a/trunk/tools/refactoring/removetrace.py b/trunk/tools/refactoring/removetrace.py
new file mode 100644
index 0000000..43c622d
--- /dev/null
+++ b/trunk/tools/refactoring/removetrace.py
@@ -0,0 +1,161 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# NOTE: This is a hack which disobeys a number of conventions and best
+# practices. It's here just to be easily shared. If it's to remain in the
+# repository it should be refactored.
+
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+trace_remove_key_word = 'kTraceModuleCall'
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+commit = (len(sys.argv) == 3)
+
+directory = sys.argv[1];
+occurances = []
+
+trace_identifier = 'WEBRTC_TRACE('
+extensions = ['.h','.cc','.c','.cpp']
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+# This function identifies the begining of a trace statement
+def istracebegining(line):
+    return stringmanipulation.issubstring(line, trace_identifier) != -1
+
+def endofstatement(line):
+    return stringmanipulation.issubstring(line, ';') != -1
+
+def removekeywordfound(line):
+    return stringmanipulation.issubstring(line, trace_remove_key_word) != -1
+
+# Used to store temporary result before flushing to real file when finished
+def temporaryfilename():
+    return 'deleteme.txt'
+
+
+def find_occurances(path, file_name):
+    full_filename = path + file_name
+    file_handle = open(full_filename,'r')
+    line_is_trace = False
+    last_trace_line = -1
+    for line_nr, line in enumerate(file_handle):
+        if(istracebegining(line)):
+            line_is_trace = True;
+            last_trace_line = line_nr
+
+        if(line_is_trace):
+            if(removekeywordfound(line)):
+                occurances.append(last_trace_line)
+
+        if(endofstatement(line)):
+            line_is_trace = False;
+
+def remove_occurances(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + full_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    full_temporary_file_name = path + temporaryfilename()
+    temporary_file = open(full_temporary_file_name,'w')
+    original_file = open(full_file_name,'r')
+    next_occurance_id = 0;
+    removing_statement = False
+    if(len(occurances) == next_occurance_id):
+        return
+    next_occurance = occurances[next_occurance_id]
+    next_occurance_id += 1
+    for line_nr, line in enumerate(original_file):
+        if(line_nr == next_occurance):
+            removing_statement = True
+            if(len(occurances) == next_occurance_id):
+                next_occurance_id = -1
+            else:
+                next_occurance = occurances[next_occurance_id]
+                next_occurance_id += 1
+
+        if (not removing_statement):
+            temporary_file.writelines(line)
+
+        if(endofstatement(line)):
+            removing_statement = False;
+
+    temporary_file.close()
+    original_file.close()
+    filemanagement.copyfile(full_file_name,full_temporary_file_name)
+    filemanagement.deletefile(full_temporary_file_name)
+
+def nextoccurance():
+    if (len(occurances) == 0):
+        return -1
+    return_value = occurances[0]
+    occurances = occurances[1:len(occurances)]
+    return return_value
+
+def would_be_removed_occurances(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + full_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    original_file = open(full_file_name,'r')
+    removing_statement = False
+    next_occurance_id = 0;
+    if(len(occurances) == next_occurance_id):
+        return
+    next_occurance = occurances[next_occurance_id]
+    next_occurance_id += 1
+    for line_nr, line in enumerate(original_file):
+        if(line_nr == next_occurance):
+            removing_statement = True
+            if(len(occurances) == next_occurance_id):
+                return
+            next_occurance = occurances[next_occurance_id]
+            next_occurance_id += 1
+
+        if (removing_statement):
+            print line_nr
+
+        if(endofstatement(line)):
+            removing_statement = False;
+            if(next_occurance == -1):
+                break
+    original_file.close()
+
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+
+    #print path_dir + filename
+    occurances = []
+    find_occurances(path_dir, filename)
+
+    if(not commit):
+        would_be_removed_occurances(path_dir, filename)
+        continue
+    remove_occurances(path_dir, filename)
diff --git a/trunk/tools/refactoring/stringmanipulation.py b/trunk/tools/refactoring/stringmanipulation.py
new file mode 100644
index 0000000..0d9e0ff
--- /dev/null
+++ b/trunk/tools/refactoring/stringmanipulation.py
@@ -0,0 +1,303 @@
+import string
+
+# returns tuple, [success,updated_string] where the updated string has
+# has one less (the first) occurance of match string
+def removefirstoccurance( remove_string, match_string ):
+    lowercase_string = remove_string.lower()
+    lowercase_match_string = match_string.lower()
+    lowest_index = lowercase_string.find(lowercase_match_string)
+    if(lowest_index == -1):
+        return [False,remove_string]
+    past_match_index = lowest_index + len(lowercase_match_string)
+    highest_index = len(remove_string)
+    remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
+    return [True,remove_string]
+
+# returns a string with all occurances of match_string removed
+def removealloccurances( remove_string, match_string ):
+    return_value = [True, remove_string]
+    while(return_value[0]):
+        return_value = removefirstoccurance(return_value[1],match_string)
+    return return_value[1]
+
+# removes an occurance of match_string only if it's first in the string
+# returns tuple [succes, new_string]
+def removeprefix( remove_string, match_string ):
+    lowercase_string = remove_string.lower()
+    lowercase_match_string = match_string.lower()
+    lowest_index = lowercase_string.find(lowercase_match_string)
+    if(lowest_index == -1):
+        return [False,remove_string]
+    if(lowest_index != 0):
+        return [False,remove_string]
+    past_match_index = lowest_index + len(lowercase_match_string)
+    highest_index = len(remove_string)
+    remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
+#    print lowest_index
+#    print past_match_index
+    return [True,remove_string]
+
+# removes multiple occurances of match string as long as they are first in
+# the string
+def removeallprefix( remove_string, match_string ):
+    return_value = [True, remove_string]
+    while(return_value[0]):
+        return_value = removeprefix(return_value[1],match_string)
+    return return_value[1]
+
+# returns true if extensionstring is a correct extension
+def isextension( extensionstring ):
+    if(len(extensionstring) < 2):
+        return False
+    if(extensionstring[0] != '.'):
+        return False
+    if(extensionstring[1:len(extensionstring)-1].find('.') != -1):
+        return False
+    return True
+
+# returns the index of start of the last occurance of match_string
+def findlastoccurance( original_string, match_string ):
+    search_index = original_string.find(match_string)
+    found_index = search_index
+    last_index = len(original_string) - 1
+    while((search_index != -1) and (search_index < last_index)):
+        search_index = original_string[search_index+1:last_index].find(match_string)
+        if(search_index != -1):
+            found_index = search_index
+    return found_index
+
+# changes extension from original_extension to new_extension
+def changeextension( original_string, original_extension, new_extension):
+    if(not isextension(original_extension)):
+        return original_string
+    if(not isextension(new_extension)):
+        return original_string
+    index = findlastoccurance(original_string, original_extension)
+    if(index == -1):
+        return original_string
+    return_value = original_string[0:index] + new_extension
+    return return_value
+
+# wanted to do this with str.find however didnt seem to work so do it manually
+# returns the index of the first capital letter
+def findfirstcapitalletter( original_string ):
+    for index in range(len(original_string)):
+        if(original_string[index].lower() != original_string[index]):
+            return index
+    return -1
+
+
+# replaces capital letters with underscore and lower case letter (except very
+# first
+def lowercasewithunderscore( original_string ):
+# ignore the first letter since there should be no underscore in front of it
+    if(len(original_string) < 2):
+        return original_string
+    return_value = original_string[1:len(original_string)]
+    index = findfirstcapitalletter(return_value)
+    while(index != -1):
+        return_value = return_value[0:index] + \
+                       '_' + \
+                       return_value[index].lower() + \
+                       return_value[index+1:len(return_value)]
+        index = findfirstcapitalletter(return_value)
+    return_value = original_string[0].lower() + return_value
+    return return_value
+
+# my table is a duplicate of strings
+def removeduplicates( my_table ):
+    new_table = []
+    for old_string1, new_string1 in my_table:
+        found = 0
+        for old_string2, new_string2 in new_table:
+            if(old_string1 == old_string2):
+                found += 1
+            if(new_string1 == new_string2):
+                if(new_string1 == ''):
+                    found += found
+                else:
+                    found += 1
+            if(found == 1):
+                print 'missmatching set, terminating program'
+                print old_string1
+                print new_string1
+                print old_string2
+                print new_string2
+                quit()
+            if(found == 2):
+                break
+        if(found == 0):
+            new_table.append([old_string1,new_string1])
+    return new_table
+
+def removenochange( my_table ):
+    new_table = []
+    for old_string, new_string in my_table:
+        if(old_string != new_string):
+            new_table.append([old_string,new_string])
+    return new_table
+
+# order table after size of the string (can be used to replace bigger strings
+# first which is useful since smaller strings can be inside the bigger string)
+# E.g. GIPS is a sub string of GIPSVE if we remove GIPS first GIPSVE will never
+# be removed. N is small so no need for fancy sort algorithm. Use selection sort
+def ordertablesizefirst( my_table ):
+    for current_index in range(len(my_table)):
+        biggest_string = 0
+        biggest_string_index = -1
+        for search_index in range(len(my_table)):
+            if(search_index < current_index):
+                continue
+            length_of_string = len(my_table[search_index][0])
+            if(length_of_string > biggest_string):
+                biggest_string = length_of_string
+                biggest_string_index = search_index
+        if(biggest_string_index == -1):
+            print 'sorting algorithm failed, program exit'
+            quit()
+        old_value = my_table[current_index]
+        my_table[current_index] = my_table[biggest_string_index]
+        my_table[biggest_string_index] = old_value
+    return my_table
+
+# returns true if string 1 or 2 is a substring of the other, assuming neither
+# has whitespaces
+def issubstring( string1, string2 ):
+    if(len(string1) == 0):
+        return -1
+    if(len(string2) == 0):
+        return -1
+    large_string = string1
+    small_string = string2
+    if(len(string1) < len(string2)):
+        large_string = string2
+        small_string = string1
+
+    for index in range(len(large_string)):
+        large_sub_string = large_string[index:index+len(small_string)].lower()
+        if(large_sub_string ==\
+           small_string.lower()):
+              return index
+    return -1
+
+#not_part_of_word_table = [' ','(',')','{','}',':','\t','*','&','/','[',']','.',',','\n']
+#def ispartofword( char ):
+#    for item in not_part_of_word_table:
+#        if(char == item):
+#            return False
+#    return True
+
+# must be numerical,_ or charachter
+def ispartofword( char ):
+    if(char.isalpha()):
+        return True
+    if(char.isalnum()):
+        return True
+    if(char == '_'):
+        return True
+    return False
+
+# returns the index of the first letter in the word that the current_index
+# is pointing to and the size of the word
+def getword( line, current_index):
+    if(current_index < 0):
+        return []
+    line = line.rstrip()
+    if(len(line) <= current_index):
+        return []
+    if(line[current_index] == ' '):
+        return []
+    start_pos = current_index
+    while start_pos >= 0:
+        if(not ispartofword(line[start_pos])):
+            start_pos += 1
+            break
+        start_pos -= 1
+    if(start_pos == -1):
+        start_pos = 0
+    end_pos = current_index
+    while end_pos < len(line):
+        if(not ispartofword(line[end_pos])):
+            break
+        end_pos += 1
+    return [start_pos,end_pos - start_pos]
+
+# my table is a tuple [string1,string2] complement_to_table is just a list
+# of strings to compare to string1
+def complement( my_table, complement_to_table ):
+    new_table = []
+    for index in range(len(my_table)):
+        found = False;
+        for compare_string in complement_to_table:
+            if(my_table[index][0].lower() == compare_string.lower()):
+                found = True
+        if(not found):
+            new_table.append(my_table[index])
+    return new_table
+
+def removestringfromhead( line, remove_string):
+    for index in range(len(line)):
+        if(line[index:index+len(remove_string)] != remove_string):
+            return line[index:index+len(line)]
+    return ''
+
+def removeccomment( line ):
+    comment_string = '//'
+    for index in range(len(line)):
+        if(line[index:index+len(comment_string)] == comment_string):
+            return line[0:index]
+    return line
+
+def whitespacestoonespace( line ):
+    return ' '.join(line.split())
+
+def fixabbreviations( original_string ):
+    previouswascapital = (original_string[0].upper() == original_string[0])
+    new_string = ''
+    for index in range(len(original_string)):
+        if(index == 0):
+            new_string += original_string[index]
+            continue
+        if(original_string[index] == '_'):
+            new_string += original_string[index]
+            previouswascapital = False
+            continue
+        if(original_string[index].isdigit()):
+            new_string += original_string[index]
+            previouswascapital = False
+            continue
+        currentiscapital = (original_string[index].upper() == original_string[index])
+        letter_to_add = original_string[index]
+        if(previouswascapital and currentiscapital):
+            letter_to_add = letter_to_add.lower()
+        if(previouswascapital and (not currentiscapital)):
+            old_letter = new_string[len(new_string)-1]
+            new_string = new_string[0:len(new_string)-1]
+            new_string += old_letter.upper()
+        previouswascapital = currentiscapital
+        new_string += letter_to_add
+    return new_string
+
+def replaceoccurances(old_string, replace_string, replace_with_string):
+    if (len(replace_string) == 0):
+        return old_string
+    if (len(old_string) < len(replace_string)):
+        return old_string
+    # Simple implementation, could proably be done smarter
+    new_string = ''
+    for index in range(len(old_string)):
+        #print new_string
+        if(len(replace_string) > (len(old_string) - index)):
+            new_string += old_string[index:index + len(old_string)]
+            break
+        match = (len(replace_string) > 0)
+        for replace_index in range(len(replace_string)):
+            if (replace_string[replace_index] != old_string[index + replace_index]):
+                match = False
+                break
+        if (match):
+            new_string += replace_with_string
+            index =+ len(replace_string)
+        else:
+            new_string += old_string[index]
+    return new_string
diff --git a/trunk/tools/refactoring/trim.py b/trunk/tools/refactoring/trim.py
new file mode 100644
index 0000000..5539f5f
--- /dev/null
+++ b/trunk/tools/refactoring/trim.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+import sys
+import fileinput
+
+# Defaults
+TABSIZE = 4
+
+usage = """
+Replaces all TAB characters with %(TABSIZE)d space characters.
+In addition, all trailing space characters are removed.
+usage: trim file ...
+file ... : files are changed in place without taking any backup.
+""" % vars()
+
+def main():
+
+    if len(sys.argv) == 1:
+        sys.stderr.write(usage)
+        sys.exit(2)
+
+    # Iterate over the lines of all files listed in sys.argv[1:]
+    for line in fileinput.input(sys.argv[1:], inplace=True):
+        line = line.replace('\t',' '*TABSIZE);    # replace TABs
+        line = line.rstrip(None)  # remove trailing whitespaces
+        print line                # modify the file
+
+if __name__ == '__main__':
+    main()
diff --git a/trunk/tools/refactoring/trimall.py b/trunk/tools/refactoring/trimall.py
new file mode 100644
index 0000000..7a1c458
--- /dev/null
+++ b/trunk/tools/refactoring/trimall.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+import sys
+import fileinput
+import filemanagement
+import p4commands
+
+# Defaults
+TABSIZE = 4
+
+extensions = ['.h','.cc','.c','.cpp']
+
+ignore_these = ['my_ignore_header.h']
+
+usage = """
+Replaces all TAB characters with %(TABSIZE)d space characters.
+In addition, all trailing space characters are removed.
+usage: trim directory
+""" % vars()
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+def main():
+    if (commit):
+        p4commands.checkoutallfiles()
+    for path,file_name in files_to_fix:
+        full_file_name = path + file_name
+        if (not commit):
+            print full_file_name + ' will be edited'
+            continue
+        for line in fileinput.input(full_file_name, inplace=True):
+            line = line.replace('\t',' '*TABSIZE);    # replace TABs
+            line = line.rstrip(None)  # remove trailing whitespaces
+            print line                # modify the file
+    if (commit):
+        p4commands.revertunchangedfiles()
+
+if __name__ == '__main__':
+    main()
diff --git a/trunk/tools/resources/update.py b/trunk/tools/resources/update.py
new file mode 100755
index 0000000..a07e2c1
--- /dev/null
+++ b/trunk/tools/resources/update.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+"""Downloads WebRTC resources files from a remote host."""
+
+from optparse import OptionParser
+from urlparse import urljoin
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+import urllib2
+
+
+def main():
+  """
+  Downloads WebRTC resources files from a remote host.
+
+  This script will download WebRTC resource files used for testing, like audio
+  and video files. It will check the current version in the DEPS file and
+  compare it with the one downloaded (kept in a text file in the download dir).
+  If the DEPS version is different than the one downloaded, the correct version
+  will be downloaded.
+  """
+  # Constants.
+  deps_key = 'webrtc_resources_revision'
+  remote_url_base = 'http://commondatastorage.googleapis.com/webrtc-resources/'
+  version_filename = 'webrtc-resources-version'
+  filename_prefix = 'webrtc-resources-'
+  extension = '.tgz'
+
+  # Variables used by the script.
+  project_root_dir = os.path.normpath(sys.path[0] + '/../../')
+  deps_file = os.path.join(project_root_dir, 'DEPS')
+  downloads_dir = os.path.join(project_root_dir, 'resources')
+  current_version_file = os.path.join(downloads_dir, version_filename)
+
+  # Ensure the downloads dir is created.
+  if not os.path.isdir(downloads_dir):
+    os.mkdir(downloads_dir)
+
+  # Define and parse arguments.
+  parser = OptionParser()
+  parser.add_option('-f', '--force', action='store_true', dest='force',
+                    help='forces download and removes all existing resources.')
+  (options, unused_args) = parser.parse_args()
+
+  # Check if we have an existing version already downloaded.
+  current_version = 0
+  if os.path.isfile(current_version_file):
+    f = open(current_version_file)
+    current_version = int(f.read())
+    f.close()
+    print 'Found downloaded resources: version: %s' % current_version
+
+  # Check the DEPS file for the latest version number.
+  deps_vars = EvalDepsFile(deps_file)['vars']
+  latest_version = int(deps_vars[deps_key])
+  print 'Version in DEPS file: %d' % latest_version
+
+  # Download archive if forced or DEPS version is different than our current.
+  if latest_version != current_version or options.force:
+    temp_dir = tempfile.mkdtemp(prefix='webrtc-resources-')
+    archive_name = '%s%s%s' % (filename_prefix, latest_version, extension)
+    remote_archive_url = urljoin(remote_url_base, archive_name)
+    # Download into the temporary directory with display of progress, inspired
+    # by the Stack Overflow post at http://goo.gl/JIrbo
+    temp_file = os.path.join(temp_dir, archive_name)
+    print 'Downloading: %s' % remote_archive_url
+    u = urllib2.urlopen(remote_archive_url)
+    f = open(temp_file, 'wb')
+    meta = u.info()
+    file_size = int(meta.getheaders('Content-Length')[0])
+    print 'Progress: %s bytes: %s' % (archive_name, file_size)
+
+    file_size_dl = 0
+    block_size = 65536
+    while True:
+      file_buffer = u.read(block_size)
+      if not file_buffer:
+        break
+      file_size_dl += len(file_buffer)
+      f.write(file_buffer)
+      status = r'%10d  [%3.2f%%]' % (file_size_dl,
+                                     file_size_dl * 100. / file_size)
+      status += chr(8) * (len(status) + 1)
+      print status,
+    print
+    f.close()
+
+    # Clean up the existing resources dir.
+    print 'Removing old resources in %s' % downloads_dir
+    shutil.rmtree(downloads_dir)
+    os.mkdir(downloads_dir)
+
+    # Write the downloaded version to a text file in the resources dir to avoid
+    # re-download of the same version in the future.
+    new_version_file = os.path.join(downloads_dir, version_filename)
+    f = open(new_version_file, 'w')
+    f.write('%d' % latest_version)
+    f.close()
+
+    # Extract the archive.
+    archive = tarfile.open(temp_file, 'r:gz')
+    archive.extractall(downloads_dir)
+    archive.close()
+    print 'Extracted resource files into %s' % downloads_dir
+    # Clean up the temp dir.
+    shutil.rmtree(temp_dir)
+  else:
+    print 'Already have correct version: %s' % current_version
+
+
+def EvalDepsFile(path):
+  scope = {'Var': lambda name: scope['vars'][name],
+           'File': lambda name: name,
+           'From': lambda deps, definition: deps}
+  execfile(path, {}, scope)
+  return scope
+
+if __name__ == '__main__':
+  main()
diff --git a/trunk/tools/valgrind-webrtc/memcheck/suppressions.txt b/trunk/tools/valgrind-webrtc/memcheck/suppressions.txt
new file mode 100644
index 0000000..1123017
--- /dev/null
+++ b/trunk/tools/valgrind-webrtc/memcheck/suppressions.txt
@@ -0,0 +1,30 @@
+# There are four kinds of suppressions in this file.
+# 1. third party stuff we have no control over
+#
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+#
+# 3. Suppressions for real WebRTC bugs that are not yet fixed.
+# These should all be in WebRTC's bug tracking system.
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#
+# 4. issues that happen only on Google workstations.
+#-----------------------------------------------------------------------
+
+
+
+# 1. third party stuff we have no control over
+
+#-----------------------------------------------------------------------
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+
+#-----------------------------------------------------------------------
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in WebRTC's bug tracking system.
+
+
+#-----------------------------------------------------------------------
+# 4. These only occur on our Google workstations
+
diff --git a/trunk/tools/valgrind-webrtc/memcheck/suppressions_mac.txt b/trunk/tools/valgrind-webrtc/memcheck/suppressions_mac.txt
new file mode 100644
index 0000000..a387a64
--- /dev/null
+++ b/trunk/tools/valgrind-webrtc/memcheck/suppressions_mac.txt
@@ -0,0 +1,323 @@
+# There are three kinds of suppressions in this file:
+# 1. Third party stuff we have no control over.
+#
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+#
+# 3. Suppressions for real WebRTC bugs that are not yet fixed.
+# These should all be in WebRTC's bug tracking system.
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#-----------------------------------------------------------------------
+
+# 1. Third party stuff we have no control over.
+{
+   FIXME mac kevent libevent probably needs valgrind hooks
+   Memcheck:Param
+   kevent(changelist)
+   fun:kevent
+   fun:event_base_new
+}
+{
+   # CoreAudio leak.  See http://crbug.com/9351
+   bug_9351
+   Memcheck:Leak
+   ...
+   fun:_ZN12HALCADClient19AddPropertyListenerEmPK26AudioObjectPropertyAddressPFlmmS2_PvES3_
+   fun:_ZN16HALDefaultDevice22InstallServerListenersEv
+   fun:_ZN16HALDefaultDevice10InitializeEv
+   fun:_ZN9HALSystem16CheckOutInstanceEv
+}
+{
+   # Mac test_shell_tests.  See http://crbug.com/11134
+   # Doesn't happen on bots, but happens like crazy on the smo
+   # test machine 'caliban'.  Don't delete just because it
+   # doesn't happen on the bots.
+   bug_11134
+   Memcheck:Uninitialized
+   fun:vCMMVectorConvert8BitRGBToRGB
+   fun:_ZNK15CMMConvRGBToRGB7ConvertER8CMM8BitsP14CMMRuntimeInfomm
+}
+{
+   # Mac system library bug?  See http://crbug.com/11327
+   bug_11327
+   Memcheck:Uninitialized
+   fun:_ZN19AudioConverterChain5ResetEv
+   fun:AudioConverterReset
+   obj:/System/Library/Components/CoreAudio.component/Contents/MacOS/CoreAudio
+}
+{
+   # Mac system library bug?  See http://crbug.com/11327
+   bug_11327b
+   Memcheck:Uninitialized
+   fun:AUNetSendEntry
+   fun:AUNetSendEntry
+   obj:/System/Library/Components/CoreAudio.component/Contents/MacOS/CoreAudio
+}
+{
+   # Filed with Apple as rdar://6915060; see http://crbug.com/11270
+   bug_11270
+   Memcheck:Leak
+   fun:calloc
+   fun:CMSSetLabCLUT
+}
+{
+   # Mac leak in CMOpenOrNewAccess in unit_tests PlatformCanvas_SkLayer_Test,
+   # ToolbarControllerTest_FocusLocation_Test. See http://crbug.com/11333.
+   bug_11333
+   Memcheck:Leak
+   fun:malloc
+   fun:stdSmartNewPtr
+   fun:stdSmartNewHandle
+   fun:IOCreateAndOpen
+   fun:ScratchInit
+   fun:CMOpenOrNewAccess
+}
+{
+   # suddenly very common as of 6 aug 2009
+   bug_11333b
+   Memcheck:Leak
+   fun:malloc
+   fun:stdSmartNewPtr
+   fun:stdSmartNewHandle
+   fun:IOCreateAndOpen
+   fun:ScratchInit
+   fun:CMNewAccessFromAnother
+}
+{
+   # Tiny one-time leak, widely seen by valgind users; everyone suppresses this.
+   # See related discussion at http://gcc.gnu.org/bugzilla/show_bug.cgi?id=39366
+   plugin_bundle_global_leak
+   Memcheck:Leak
+   fun:malloc
+   fun:__cxa_get_globals
+   fun:__cxa_allocate_exception
+   fun:_ZN4dyld4loadEPKcRKNS_11LoadContextE
+   fun:dlopen
+   fun:dlopen
+   fun:_CFBundleDlfcnCheckLoaded
+}
+{
+   bug_18215
+   Memcheck:Uninitialized
+   fun:_DPSNextEvent
+   fun:-[NSApplication nextEventMatchingMask:untilDate:inMode:dequeue:]
+   fun:-[NSApplication run]
+}
+{
+   bug_18223
+   Memcheck:Uninitialized
+   fun:_ZNK8Security12UnixPlusPlus17StaticForkMonitorclEv
+   fun:_ZN12ocspdGlobals10serverPortEv
+}
+{
+   # Filed with Apple as rdar://7255382
+   bug_20459a
+   Memcheck:Leak
+   ...
+   fun:_CFRuntimeCreateInstance
+   fun:CFRunLoopSourceCreate
+   fun:CFMachPortCreateRunLoopSource
+   fun:_ZN8Security12MachPlusPlus10CFAutoPort6enableEv
+   fun:_ZN8Security14SecurityServer14ThreadNotifierC2Ev
+}
+{
+   # Also filed with Apple as rdar://7255382
+   bug_20459b
+   Memcheck:Leak
+   fun:malloc_zone_malloc
+   fun:_CFRuntimeCreateInstance
+   fun:__CFArrayInit
+   fun:CFArrayCreateMutableCopy
+   fun:_ZN8Security12KeychainCore5Trust8evaluateEv
+}
+# See description of bug_20653a/b in suppressions.txt.
+{
+   bug_20653a_mac
+   Memcheck:Param
+   write(buf)
+   fun:write$UNIX2003
+   fun:pager_write_pagelist
+}
+{
+   bug_20653b_mac
+   Memcheck:Param
+   write(buf)
+   fun:write$UNIX2003
+   ...
+   fun:pager_write
+}
+
+# See http://www.openradar.me/8287193
+{
+   Invalid redzone accesses in DKeyHas8Words
+   Memcheck:Unaddressable
+   fun:DKeyHas8Words
+}
+
+# See https://bugs.kde.org/show_bug.cgi?id=188572
+# This suppression is missing in Valgrind on Mac 10.6
+# TODO(glider): remove it once it arrives in the trunk.
+{
+   Unavoidable leak in setenv()
+   Memcheck:Leak
+   fun:malloc_zone_malloc
+   fun:__setenv
+   fun:setenv$UNIX2003
+}
+{
+   # Reported to Apple as rdar://6915429
+   bug_12525
+   Memcheck:Leak
+   ...
+   fun:-[CIContextImpl render:toBitmap:rowBytes:bounds:format:colorSpace:]
+}
+{
+   bug_69436
+   Memcheck:Leak
+   ...
+   fun:-[CIKernel initWithCString:noCopy:]
+   ...
+   fun:-[NSPopUpButtonCell _drawIndicatorWithFrame:inView:]
+}
+{
+   # Capturer on Mac uses OpenGL driver, which triggers several warnings.
+   # The check has to be quite generic, as different hardware graphics cards
+   # will cause different sets of warnings.
+   bug_75037
+   Memcheck:Uninitialized
+   ...
+   fun:_ZN8remoting*CapturerMac*
+}
+{
+   # See also http://openradar.appspot.com/radar?id=1235407
+   bug_77063
+   Memcheck:Free
+   fun:_ZdlPv
+   fun:_ZN15THFSPlusCatalogD2Ev
+   fun:_ZN5TNode10SetCatalogEP15THFSPlusCatalog
+   fun:_ZN15TMountPointList9AddVolumeEsb
+   fun:_ZN15TMountPointList4FindEsPN5TNode12StPopulatingE
+   fun:_ZN15TMountPointList20SupportsInvisibleBitEsPN5TNode12StPopulatingEb
+   fun:_ZNK21THFSPlusPropertyStore4OpenEbb
+   fun:_ZNK21THFSPlusPropertyStore13GetPropertiesEb
+   fun:_ZN16TFSCopyOperation22GetSourcePropertyStoreERK11THFSPlusRef
+   fun:_ZN16TFSCopyOperation13DoMoveToTrashERK11THFSPlusRef
+   fun:_ZN16TFSCopyOperation3RunEv
+   fun:_FSOperation
+   fun:_FSOperateOnObjectSync
+   fun:FSMoveObjectToTrashSync
+   fun:_Z9TrashFuncRK8FilePath
+}
+{
+   # See also http://openradar.appspot.com/radar?id=1169404
+   bug_79533a
+   Memcheck:Uninitialized
+   ...
+   fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+   fun:CSSM_DL_DataAbortQuery
+   fun:_ZN11SSDLSession14DataAbortQueryEll
+   fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+   fun:CSSM_DL_DataAbortQuery
+   fun:tpDbFindIssuerCrl
+   fun:tpVerifyCertGroupWithCrls
+}
+{
+   # See also http://openradar.appspot.com/radar?id=1169404
+   bug_79533b
+   Memcheck:Uninitialized
+   ...
+   fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+   fun:CSSM_DL_DataAbortQuery
+   fun:_ZN11SSDLSession14DataAbortQueryEll
+   fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+   fun:CSSM_DL_DataAbortQuery
+   fun:tpDbFindIssuerCrl
+   fun:tpVerifyCertGroupWithCrls
+}
+{
+   bug_85213_a
+   Memcheck:Leak
+   ...
+   fun:_CFBundleCopyDirectoryContentsAtPath
+}
+{
+   bug_85213_b
+   Memcheck:Leak
+   ...
+   fun:_CFBundleCopyInfoDictionaryInDirectoryWithVersion
+}
+{
+   bug_85213_c
+   Memcheck:Leak
+   ...
+   fun:_CFBundleURLLooksLikeBundleVersion
+}
+{
+   bug_85213_d
+   Memcheck:Leak
+   ...
+   fun:_CFBundleCreate
+   fun:_ZN6webkit5npapi9PluginLib17ReadWebPluginInfoERK8FilePathPNS0_13WebPluginInfoE
+}
+{
+   bug_85213_e
+   Memcheck:Leak
+   ...
+   fun:CFBundlePreflightExecutable
+   fun:_ZN6webkit5npapi9PluginLib17ReadWebPluginInfoERK8FilePathPNS0_13WebPluginInfoE
+}
+{
+   bug_85213_f
+   Memcheck:Leak
+   ...
+   fun:CFBundleGetPackageInfo
+   fun:_ZN6webkit5npapi9PluginLib17ReadWebPluginInfoERK8FilePathPNS0_13WebPluginInfoE
+}
+{
+   bug_86927
+   Memcheck:Leak
+   fun:malloc
+   fun:CGSMapShmem
+   fun:CGSResolveShmemReference
+   fun:CGSScoreboard
+   fun:initCGDisplayState
+   fun:initCGDisplayMappings
+   fun:cgsInit
+   fun:pthread_once
+   fun:CGSInitialize
+   fun:CGSServerOperationState
+   fun:+[NSThemeFrame initialize]
+   fun:_class_initialize
+}
+{
+   # QTKit leak. See http://crbug.com/100772 and rdar://10319535.
+   bug_100772
+   Memcheck:Leak
+   fun:calloc
+   fun:QTMLCreateMutex
+   fun:WarholCreateGlobals
+   fun:INIT_QuickTimeLibInternal
+   fun:pthread_once
+   fun:INIT_QuickTimeLib
+   fun:EnterMovies_priv
+   fun:EnterMovies
+   fun:TundraUnitInputFromTSFileEntry
+   fun:TundraUnitVDIGInputEntry
+   fun:TundraUnitCreateFromDescription
+   fun:+[QTCaptureVDIGDevice _refreshDevices]
+   fun:+[QTCaptureVDIGDevice devicesWithIOType:]
+   fun:+[QTCaptureDevice devicesWithIOType:]
+   fun:+[QTCaptureDevice inputDevices]
+   fun:+[QTCaptureDevice inputDevicesWithMediaType:]
+   fun:+[VideoCaptureDeviceQTKit deviceNames]
+   fun:_ZN5media18VideoCaptureDevice14GetDeviceNamesEPSt4listINS0_4NameESaIS2_EE
+   fun:_ZN5media21VideoCaptureDeviceMac4InitEv
+   fun:_ZN5media18VideoCaptureDevice6CreateERKNS0_4NameE
+   fun:_ZN5media45VideoCaptureDeviceTest_OpenInvalidDevice_Test8TestBodyEv
+}
+
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+
+# 3. Suppressions for real WebRTC bugs that are not yet fixed.
diff --git a/trunk/tools/valgrind-webrtc/webrtc_tests.py b/trunk/tools/valgrind-webrtc/webrtc_tests.py
new file mode 100755
index 0000000..b532a1f
--- /dev/null
+++ b/trunk/tools/valgrind-webrtc/webrtc_tests.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+''' Runs various WebRTC tests through valgrind_test.py.
+
+This script inherits the chrome_tests.py in Chrome, replacing its tests with
+our own in WebRTC instead.
+'''
+
+import optparse
+import sys
+
+import logging_utils
+
+import chrome_tests
+
+class WebRTCTests(chrome_tests.ChromeTests):
+  # WebRTC tests, similar functions for each tests as the Chrome tests in the
+  # parent class.
+  def TestSignalProcessing(self):
+    return self.SimpleTest("signal_processing", "signal_processing_unittests")
+
+  def TestResampler(self):
+    return self.SimpleTest("resampler", "resampler_unittests")
+
+  def TestVAD(self):
+    return self.SimpleTest("vad", "vad_unittests")
+
+  def TestCNG(self):
+    return self.SimpleTest("cng", "cng_unittests")
+
+  def TestG711(self):
+    return self.SimpleTest("g711", "g711_unittests")
+
+  def TestG722(self):
+    return self.SimpleTest("g722", "g722_unittests")
+
+  def TestPCM16B(self):
+    return self.SimpleTest("pcm16b", "pcm16b_unittests")
+
+  def TestNetEQ(self):
+    return self.SimpleTest("neteq", "neteq_unittests")
+
+  def TestAudioConferenceMixer(self):
+    return self.SimpleTest("audio_conference_mixer", "audio_conference_mixer_unittests")
+
+  def TestMediaFile(self):
+    return self.SimpleTest("media_file", "media_file_unittests")
+
+  def TestRTPRTCP(self):
+    return self.SimpleTest("rtp_rtcp", "rtp_rtcp_unittests")
+
+  def TestBWE(self):
+    return self.SimpleTest("test_bwe", "test_bwe")
+
+  def TestUDPTransport(self):
+    return self.SimpleTest("udp_transport", "udp_transport_unittests")
+
+  def TestWebRTCUtility(self):
+    return self.SimpleTest("webrtc_utility", "webrtc_utility_unittests")
+
+  def TestVP8(self):
+    return self.SimpleTest("vp8", "vp8_unittests")
+
+  def TestVideoCoding(self):
+    return self.SimpleTest("video_coding", "video_coding_unittests")
+
+  def TestVideoProcessing(self):
+    return self.SimpleTest("video_processing", "video_processing_unittests")
+
+  def TestSystemWrappers(self):
+    return self.SimpleTest("system_wrappers", "system_wrappers_unittests")
+
+  def TestTestSupport(self):
+    return self.SimpleTest("test_support", "test_support_unittests")
+
+def _main(_):
+  parser = optparse.OptionParser("usage: %prog -b <dir> -t <test> "
+                                 "[-t <test> ...]")
+  parser.disable_interspersed_args()
+  parser.add_option("-b", "--build_dir",
+                    help="the location of the compiler output")
+  parser.add_option("-t", "--test", action="append", default=[],
+                    help="which test to run, supports test:gtest_filter format "
+                         "as well.")
+  parser.add_option("", "--baseline", action="store_true", default=False,
+                    help="generate baseline data instead of validating")
+  parser.add_option("", "--gtest_filter",
+                    help="additional arguments to --gtest_filter")
+  parser.add_option("", "--gtest_repeat",
+                    help="argument for --gtest_repeat")
+  parser.add_option("-v", "--verbose", action="store_true", default=False,
+                    help="verbose output - enable debug log messages")
+  parser.add_option("", "--tool", dest="valgrind_tool", default="memcheck",
+                    help="specify a valgrind tool to run the tests under")
+  parser.add_option("", "--tool_flags", dest="valgrind_tool_flags", default="",
+                    help="specify custom flags for the selected valgrind tool")
+  parser.add_option("", "--keep_logs", action="store_true", default=False,
+                    help="store memory tool logs in the <tool>.logs directory "
+                         "instead of /tmp.\nThis can be useful for tool "
+                         "developers/maintainers.\nPlease note that the <tool>"
+                         ".logs directory will be clobbered on tool startup.")
+  options, args = parser.parse_args()
+
+  if options.verbose:
+    logging_utils.config_root(logging.DEBUG)
+  else:
+    logging_utils.config_root()
+
+  if not options.test:
+    parser.error("--test not specified")
+
+  if len(options.test) != 1 and options.gtest_filter:
+    parser.error("--gtest_filter and multiple tests don't make sense together")
+
+  for t in options.test:
+    tests = WebRTCTests(options, args, t)
+    ret = tests.Run()
+    if ret: return ret
+  return 0
+
+if __name__ == "__main__":
+  # Overwrite the ChromeTests tests dictionary with our WebRTC tests. 
+  # The cmdline option allows the user to pass any executable as parameter to
+  # the test script, which is useful when developing new tests that are not yet
+  # present in this script.
+  chrome_tests.ChromeTests._test_list = {
+    "cmdline": chrome_tests.ChromeTests.RunCmdLine,
+    "signal_processing": WebRTCTests.TestSignalProcessing,
+    "resampler": WebRTCTests.TestResampler,
+    "vad": WebRTCTests.TestVAD,
+    "cng": WebRTCTests.TestCNG,
+    "g711": WebRTCTests.TestG711,
+    "g722": WebRTCTests.TestG722,
+    "pcm16b": WebRTCTests.TestPCM16B,
+    "neteq": WebRTCTests.TestNetEQ,
+    "audio_conference_mixer": WebRTCTests.TestAudioConferenceMixer,
+    "media_file": WebRTCTests.TestMediaFile,
+    "rtp_rtcp": WebRTCTests.TestRTPRTCP,
+    "test_bwe": WebRTCTests.TestBWE,
+    "udp_transport": WebRTCTests.TestUDPTransport,
+    "webrtc_utility": WebRTCTests.TestWebRTCUtility,
+    "vp8": WebRTCTests.TestVP8,
+    "video_coding": WebRTCTests.TestVideoCoding,
+    "video_processing": WebRTCTests.TestVideoProcessing,
+    "system_wrappers": WebRTCTests.TestSystemWrappers,
+    "test_support": WebRTCTests.TestTestSupport,
+  }
+  ret = _main(sys.argv)
+  sys.exit(ret)  
\ No newline at end of file
diff --git a/trunk/tools/valgrind-webrtc/webrtc_tests.sh b/trunk/tools/valgrind-webrtc/webrtc_tests.sh
new file mode 100755
index 0000000..e33b78c
--- /dev/null
+++ b/trunk/tools/valgrind-webrtc/webrtc_tests.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Set up some paths and re-direct the arguments to webrtc_tests.py
+
+# This script is a copy of the chrome_tests.sh wrapper script with the following
+# changes:
+# - The locate_valgrind.sh of Chromium's Valgrind scripts dir is used to locate
+#   the Valgrind framework install.
+# - webrtc_tests.py is invoked instead of chrome_tests.py.
+# - Chromium's Valgrind scripts directory is added to the PYTHONPATH to make it 
+#   possible to execute the Python scripts properly.
+
+export THISDIR=`dirname $0`
+ARGV_COPY="$@"
+
+# We need to set CHROME_VALGRIND iff using Memcheck or TSan-Valgrind:
+#   tools/valgrind/chrome_tests.sh --tool memcheck
+# or
+#   tools/valgrind/chrome_tests.sh --tool=memcheck
+# (same for "--tool=tsan")
+tool="memcheck"  # Default to memcheck.
+while (( "$#" ))
+do
+  if [[ "$1" == "--tool" ]]
+  then
+    tool="$2"
+    shift
+  elif [[ "$1" =~ --tool=(.*) ]]
+  then
+    tool="${BASH_REMATCH[1]}"
+  fi
+  shift
+done
+
+NEEDS_VALGRIND=0
+NEEDS_DRMEMORY=0
+
+case "$tool" in
+  "memcheck")
+    NEEDS_VALGRIND=1
+    ;;
+  "tsan" | "tsan_rv")
+    NEEDS_VALGRIND=1
+    ;;
+  "drmemory" | "drmemory_light" | "drmemory_full")
+    NEEDS_DRMEMORY=1
+    ;;
+esac
+
+# For WebRTC, we'll use the locate_valgrind.sh script in Chromium's Valgrind 
+# scripts dir to locate the Valgrind framework install
+CHROME_VALGRIND_SCRIPTS=$THISDIR/../valgrind
+
+if [ "$NEEDS_VALGRIND" == "1" ]
+then
+  CHROME_VALGRIND=`sh $CHROME_VALGRIND_SCRIPTS/locate_valgrind.sh`
+  if [ "$CHROME_VALGRIND" = "" ]
+  then
+    # locate_valgrind.sh failed
+    exit 1
+  fi
+  echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+  PATH="${CHROME_VALGRIND}/bin:$PATH"
+  # We need to set these variables to override default lib paths hard-coded into
+  # Valgrind binary.
+  export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+  export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+fi
+
+if [ "$NEEDS_DRMEMORY" == "1" ]
+then
+  if [ -z "$DRMEMORY_COMMAND" ]
+  then
+    DRMEMORY_PATH="$THISDIR/../../third_party/drmemory"
+    DRMEMORY_SFX="$DRMEMORY_PATH/drmemory-windows-sfx.exe"
+    if [ ! -f "$DRMEMORY_SFX" ]
+    then
+      echo "Can't find Dr. Memory executables."
+      echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+      echo "for the instructions on how to get them."
+      exit 1
+    fi
+
+    chmod +x "$DRMEMORY_SFX"  # Cygwin won't run it without +x.
+    "$DRMEMORY_SFX" -o"$DRMEMORY_PATH/unpacked" -y
+    export DRMEMORY_COMMAND="$DRMEMORY_PATH/unpacked/bin/drmemory.exe"
+  fi
+fi
+
+# Add Chrome's Valgrind scripts dir to the PYTHON_PATH since it contains 
+# the scripts that are needed for this script to run
+PYTHONPATH=$THISDIR/../python/google:$CHROME_VALGRIND_SCRIPTS python \
+           "$THISDIR/webrtc_tests.py" $ARGV_COPY
diff --git a/trunk/webrtc.gyp b/trunk/webrtc.gyp
new file mode 100644
index 0000000..e18399b
--- /dev/null
+++ b/trunk/webrtc.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'src/build/common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        'src/common_audio/common_audio.gyp:*',
+        'src/common_video/common_video.gyp:*',
+        'src/modules/modules.gyp:*',
+        'src/system_wrappers/source/system_wrappers.gyp:*',
+        'src/video_engine/video_engine.gyp:*',
+        'src/voice_engine/voice_engine.gyp:*',
+        'test/metrics.gyp:*',
+        'test/test.gyp:*',
+      ],
+    },
+  ],
+  'conditions': [
+  ],  # conditions
+}